summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore7
-rw-r--r--.templateconf2
-rw-r--r--Makefile35
-rw-r--r--README.OE-Core.md10
-rw-r--r--SECURITY.md22
-rw-r--r--bitbake/README36
-rw-r--r--bitbake/SECURITY.md24
-rwxr-xr-xbitbake/bin/bitbake5
-rwxr-xr-xbitbake/bin/bitbake-diffsigs13
-rwxr-xr-xbitbake/bin/bitbake-getvar32
-rwxr-xr-xbitbake/bin/bitbake-hashclient244
-rwxr-xr-xbitbake/bin/bitbake-hashserv145
-rwxr-xr-xbitbake/bin/bitbake-layers4
-rwxr-xr-xbitbake/bin/bitbake-prserv94
-rwxr-xr-xbitbake/bin/bitbake-server12
-rwxr-xr-xbitbake/bin/bitbake-worker120
-rwxr-xr-xbitbake/bin/git-make-shallow36
-rwxr-xr-xbitbake/bin/toaster12
-rwxr-xr-xbitbake/bin/toaster-eventreplay80
-rw-r--r--bitbake/contrib/vim/indent/bitbake.vim6
-rw-r--r--bitbake/contrib/vim/syntax/bitbake.vim8
-rw-r--r--bitbake/doc/README4
-rw-r--r--bitbake/doc/_templates/footer.html9
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst69
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst127
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst137
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst134
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst91
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst131
-rw-r--r--bitbake/doc/index.rst1
-rw-r--r--bitbake/doc/releases.rst82
-rw-r--r--bitbake/lib/bb/COW.py2
-rw-r--r--bitbake/lib/bb/__init__.py17
-rwxr-xr-xbitbake/lib/bb/acl.py215
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py37
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py273
-rw-r--r--bitbake/lib/bb/asyncrpc/connection.py146
-rw-r--r--bitbake/lib/bb/asyncrpc/exceptions.py21
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py385
-rw-r--r--bitbake/lib/bb/build.py177
-rw-r--r--bitbake/lib/bb/cache.py307
-rw-r--r--bitbake/lib/bb/codeparser.py84
-rw-r--r--bitbake/lib/bb/command.py76
-rw-r--r--bitbake/lib/bb/compress/_pipecompress.py2
-rw-r--r--bitbake/lib/bb/compress/lz4.py2
-rw-r--r--bitbake/lib/bb/compress/zstd.py2
-rw-r--r--bitbake/lib/bb/cooker.py522
-rw-r--r--bitbake/lib/bb/cookerdata.py145
-rw-r--r--bitbake/lib/bb/daemonize.py2
-rw-r--r--bitbake/lib/bb/data.py120
-rw-r--r--bitbake/lib/bb/data_smart.py63
-rw-r--r--bitbake/lib/bb/event.py142
-rw-r--r--bitbake/lib/bb/exceptions.py2
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py220
-rw-r--r--bitbake/lib/bb/fetch2/crate.py23
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py101
-rw-r--r--bitbake/lib/bb/fetch2/git.py211
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py29
-rw-r--r--bitbake/lib/bb/fetch2/hg.py1
-rw-r--r--bitbake/lib/bb/fetch2/local.py16
-rw-r--r--bitbake/lib/bb/fetch2/npm.py13
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py75
-rw-r--r--bitbake/lib/bb/fetch2/osc.py4
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py2
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py6
-rw-r--r--bitbake/lib/bb/fetch2/wget.py50
-rwxr-xr-xbitbake/lib/bb/main.py400
-rw-r--r--bitbake/lib/bb/monitordisk.py7
-rw-r--r--bitbake/lib/bb/msg.py2
-rw-r--r--bitbake/lib/bb/parse/__init__.py12
-rw-r--r--bitbake/lib/bb/parse/ast.py73
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py67
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py31
-rw-r--r--bitbake/lib/bb/persist_data.py21
-rw-r--r--bitbake/lib/bb/process.py2
-rw-r--r--bitbake/lib/bb/runqueue.py638
-rw-r--r--bitbake/lib/bb/server/process.py347
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py2
-rw-r--r--bitbake/lib/bb/siggen.py612
-rw-r--r--bitbake/lib/bb/tests/codeparser.py48
-rw-r--r--bitbake/lib/bb/tests/color.py2
-rw-r--r--bitbake/lib/bb/tests/compression.py2
-rw-r--r--bitbake/lib/bb/tests/cooker.py2
-rw-r--r--bitbake/lib/bb/tests/data.py41
-rw-r--r--bitbake/lib/bb/tests/event.py62
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html20
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html40
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html19
-rw-r--r--bitbake/lib/bb/tests/fetch.py737
-rw-r--r--bitbake/lib/bb/tests/parse.py159
-rw-r--r--bitbake/lib/bb/tests/runqueue.py2
-rw-r--r--bitbake/lib/bb/tests/siggen.py77
-rw-r--r--bitbake/lib/bb/tinfoil.py18
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py13
-rw-r--r--bitbake/lib/bb/ui/eventreplay.py86
-rw-r--r--bitbake/lib/bb/ui/knotty.py91
-rw-r--r--bitbake/lib/bb/ui/ncurses.py3
-rw-r--r--bitbake/lib/bb/ui/taskexp.py2
-rwxr-xr-xbitbake/lib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--bitbake/lib/bb/ui/toasterui.py2
-rw-r--r--bitbake/lib/bb/ui/uievent.py30
-rw-r--r--bitbake/lib/bb/utils.py194
-rwxr-xr-xbitbake/lib/bb/xattr.py126
-rw-r--r--bitbake/lib/bblayers/__init__.py2
-rw-r--r--bitbake/lib/bblayers/action.py13
-rw-r--r--bitbake/lib/bblayers/common.py2
-rw-r--r--bitbake/lib/bblayers/layerindex.py27
-rw-r--r--bitbake/lib/bblayers/query.py50
-rw-r--r--bitbake/lib/bs4/tests/test_tree.py2
-rw-r--r--bitbake/lib/codegen.py12
-rw-r--r--bitbake/lib/hashserv/__init__.py199
-rw-r--r--bitbake/lib/hashserv/client.py307
-rw-r--r--bitbake/lib/hashserv/server.py1003
-rw-r--r--bitbake/lib/hashserv/sqlalchemy.py598
-rw-r--r--bitbake/lib/hashserv/sqlite.py562
-rw-r--r--bitbake/lib/hashserv/tests.py1118
-rw-r--r--bitbake/lib/layerindexlib/__init__.py15
-rw-r--r--bitbake/lib/ply/yacc.py7
-rw-r--r--bitbake/lib/progressbar/progressbar.py2
-rw-r--r--bitbake/lib/prserv/__init__.py8
-rw-r--r--bitbake/lib/prserv/client.py51
-rw-r--r--bitbake/lib/prserv/db.py142
-rw-r--r--bitbake/lib/prserv/serv.py160
-rw-r--r--bitbake/lib/toaster/bldcollector/urls.py2
-rw-r--r--bitbake/lib/toaster/bldcollector/views.py3
-rw-r--r--bitbake/lib/toaster/bldcontrol/models.py4
-rw-r--r--bitbake/lib/toaster/logs/.gitignore1
-rwxr-xr-xbitbake/lib/toaster/manage.py2
-rw-r--r--bitbake/lib/toaster/orm/fixtures/README2
-rwxr-xr-xbitbake/lib/toaster/orm/fixtures/gen_fixtures.py14
-rw-r--r--bitbake/lib/toaster/orm/fixtures/oe-core.xml26
-rw-r--r--bitbake/lib/toaster/orm/fixtures/poky.xml38
-rw-r--r--bitbake/lib/toaster/orm/fixtures/settings.xml2
-rw-r--r--bitbake/lib/toaster/orm/management/commands/lsupdates.py2
-rw-r--r--bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py22
-rw-r--r--bitbake/lib/toaster/orm/models.py44
-rw-r--r--bitbake/lib/toaster/pytest.ini16
-rw-r--r--bitbake/lib/toaster/tests/browser/selenium_helpers_base.py76
-rw-r--r--bitbake/lib/toaster/tests/browser/test_all_builds_page.py315
-rw-r--r--bitbake/lib/toaster/tests/browser/test_all_projects_page.py162
-rw-r--r--bitbake/lib/toaster/tests/browser/test_builddashboard_page.py15
-rw-r--r--bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py8
-rw-r--r--bitbake/lib/toaster/tests/browser/test_delete_project.py103
-rw-r--r--bitbake/lib/toaster/tests/browser/test_landing_page.py131
-rw-r--r--bitbake/lib/toaster/tests/browser/test_layerdetails_page.py39
-rw-r--r--bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py24
-rw-r--r--bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py14
-rw-r--r--bitbake/lib/toaster/tests/browser/test_new_project_page.py16
-rw-r--r--bitbake/lib/toaster/tests/browser/test_project_builds_page.py4
-rw-r--r--bitbake/lib/toaster/tests/browser/test_project_config_page.py33
-rw-r--r--bitbake/lib/toaster/tests/browser/test_sample.py10
-rw-r--r--bitbake/lib/toaster/tests/browser/test_toastertable_ui.py11
-rw-r--r--bitbake/lib/toaster/tests/builds/buildtest.py13
-rw-r--r--bitbake/lib/toaster/tests/builds/test_core_image_min.py20
-rw-r--r--bitbake/lib/toaster/tests/commands/test_loaddata.py4
-rw-r--r--bitbake/lib/toaster/tests/commands/test_lsupdates.py3
-rw-r--r--bitbake/lib/toaster/tests/commands/test_runbuilds.py13
-rw-r--r--bitbake/lib/toaster/tests/db/test_db.py3
-rw-r--r--bitbake/lib/toaster/tests/functional/functional_helpers.py82
-rw-r--r--bitbake/lib/toaster/tests/functional/test_create_new_project.py179
-rw-r--r--bitbake/lib/toaster/tests/functional/test_functional_basic.py195
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_config.py341
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_page.py792
-rw-r--r--bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py528
-rw-r--r--bitbake/lib/toaster/tests/functional/utils.py89
-rw-r--r--bitbake/lib/toaster/tests/toaster-tests-requirements.txt8
-rw-r--r--bitbake/lib/toaster/tests/views/test_views.py20
-rw-r--r--bitbake/lib/toaster/toastergui/api.py26
-rw-r--r--bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml24
-rw-r--r--bitbake/lib/toaster/toastergui/forms.py14
-rw-r--r--bitbake/lib/toaster/toastergui/static/css/default.css28
-rw-r--r--bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css1
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js2580
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js6
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap.js2363
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js7
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js2
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map1
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js4
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/libtoaster.js2
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/projectpage.js2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/base.html9
-rw-r--r--bitbake/lib/toaster/toastergui/templates/base_specific.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/command_line_builds.html209
-rw-r--r--bitbake/lib/toaster/toastergui/templates/js-unit-tests.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/landing.html12
-rw-r--r--bitbake/lib/toaster/toastergui/templates/mrb_section.html2
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html8
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html8
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/recipe.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/target.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templatetags/projecttags.py4
-rw-r--r--bitbake/lib/toaster/toastergui/urls.py6
-rw-r--r--bitbake/lib/toaster/toastergui/views.py196
-rw-r--r--bitbake/lib/toaster/toastergui/widgets.py10
-rw-r--r--bitbake/lib/toaster/toastermain/logs.py158
-rw-r--r--bitbake/lib/toaster/toastermain/management/commands/buildimport.py2
-rw-r--r--bitbake/lib/toaster/toastermain/management/commands/checksocket.py4
-rw-r--r--bitbake/lib/toaster/toastermain/settings.py82
-rw-r--r--bitbake/lib/toaster/toastermain/settings_test.py4
-rw-r--r--bitbake/lib/toaster/toastermain/urls.py4
-rw-r--r--bitbake/lib/toaster/tox.ini24
-rw-r--r--bitbake/toaster-requirements.txt3
-rw-r--r--documentation/.gitignore4
-rw-r--r--documentation/.vale.ini7
-rw-r--r--documentation/Makefile14
-rw-r--r--documentation/README71
-rw-r--r--documentation/_templates/footer.html2
-rw-r--r--documentation/boilerplate.rst4
-rw-r--r--documentation/brief-yoctoprojectqs/index.rst63
-rw-r--r--documentation/bsp-guide/bsp.rst175
-rw-r--r--documentation/conf.py12
-rw-r--r--documentation/contributor-guide/identify-component.rst31
-rw-r--r--documentation/contributor-guide/index.rst26
-rw-r--r--documentation/contributor-guide/recipe-style-guide.rst411
-rw-r--r--documentation/contributor-guide/report-defect.rst67
-rw-r--r--documentation/contributor-guide/submit-changes.rst827
-rw-r--r--documentation/dev-manual/bmaptool.rst59
-rw-r--r--documentation/dev-manual/build-quality.rst409
-rw-r--r--documentation/dev-manual/building.rst942
-rw-r--r--documentation/dev-manual/common-tasks.rst11782
-rw-r--r--documentation/dev-manual/custom-distribution.rst135
-rw-r--r--documentation/dev-manual/custom-template-configuration-directory.rst52
-rw-r--r--documentation/dev-manual/customizing-images.rst223
-rw-r--r--documentation/dev-manual/debugging.rst1271
-rw-r--r--documentation/dev-manual/development-shell.rst82
-rw-r--r--documentation/dev-manual/device-manager.rst74
-rw-r--r--documentation/dev-manual/disk-space.rst61
-rw-r--r--documentation/dev-manual/efficiently-fetching-sources.rst68
-rw-r--r--documentation/dev-manual/error-reporting-tool.rst84
-rw-r--r--documentation/dev-manual/external-scm.rst67
-rw-r--r--documentation/dev-manual/external-toolchain.rst40
-rw-r--r--documentation/dev-manual/figures/cute-files-npm-example.pngbin26248 -> 73191 bytes
-rw-r--r--documentation/dev-manual/gobject-introspection.rst155
-rw-r--r--documentation/dev-manual/index.rst40
-rw-r--r--documentation/dev-manual/init-manager.rst162
-rw-r--r--documentation/dev-manual/layers.rst919
-rw-r--r--documentation/dev-manual/libraries.rst267
-rw-r--r--documentation/dev-manual/licenses.rst544
-rw-r--r--documentation/dev-manual/new-machine.rst118
-rw-r--r--documentation/dev-manual/new-recipe.rst1639
-rw-r--r--documentation/dev-manual/packages.rst1250
-rw-r--r--documentation/dev-manual/prebuilt-libraries.rst209
-rw-r--r--documentation/dev-manual/python-development-shell.rst50
-rw-r--r--documentation/dev-manual/qemu.rst84
-rw-r--r--documentation/dev-manual/quilt.rst89
-rw-r--r--documentation/dev-manual/read-only-rootfs.rst89
-rw-r--r--documentation/dev-manual/runtime-testing.rst594
-rw-r--r--documentation/dev-manual/sbom.rst83
-rw-r--r--documentation/dev-manual/securing-images.rst156
-rw-r--r--documentation/dev-manual/security-subjects.rst189
-rw-r--r--documentation/dev-manual/speeding-up-build.rst109
-rw-r--r--documentation/dev-manual/start.rst274
-rw-r--r--documentation/dev-manual/temporary-source-code.rst66
-rw-r--r--documentation/dev-manual/upgrading-recipes.rst397
-rw-r--r--documentation/dev-manual/vulnerabilities.rst293
-rw-r--r--documentation/dev-manual/wayland.rst90
-rw-r--r--documentation/dev-manual/wic.rst731
-rw-r--r--documentation/dev-manual/x32-psabi.rst54
-rw-r--r--documentation/genindex.rst2
-rw-r--r--documentation/index.rst1
-rw-r--r--documentation/kernel-dev/advanced.rst51
-rw-r--r--documentation/kernel-dev/common.rst392
-rw-r--r--documentation/kernel-dev/concepts-appx.rst5
-rw-r--r--documentation/kernel-dev/faq.rst4
-rw-r--r--documentation/kernel-dev/intro.rst22
-rw-r--r--documentation/kernel-dev/maint-appx.rst21
-rw-r--r--documentation/migration-guides/index.rst4
-rw-r--r--documentation/migration-guides/migration-1.3.rst23
-rw-r--r--documentation/migration-guides/migration-1.4.rst4
-rw-r--r--documentation/migration-guides/migration-1.5.rst38
-rw-r--r--documentation/migration-guides/migration-1.6.rst53
-rw-r--r--documentation/migration-guides/migration-1.7.rst25
-rw-r--r--documentation/migration-guides/migration-1.8.rst36
-rw-r--r--documentation/migration-guides/migration-2.0.rst4
-rw-r--r--documentation/migration-guides/migration-2.1.rst54
-rw-r--r--documentation/migration-guides/migration-2.2.rst35
-rw-r--r--documentation/migration-guides/migration-2.3.rst43
-rw-r--r--documentation/migration-guides/migration-2.4.rst28
-rw-r--r--documentation/migration-guides/migration-2.5.rst24
-rw-r--r--documentation/migration-guides/migration-2.6.rst48
-rw-r--r--documentation/migration-guides/migration-2.7.rst5
-rw-r--r--documentation/migration-guides/migration-3.0.rst24
-rw-r--r--documentation/migration-guides/migration-3.1.rst12
-rw-r--r--documentation/migration-guides/migration-3.2.rst65
-rw-r--r--documentation/migration-guides/migration-3.3.rst34
-rw-r--r--documentation/migration-guides/migration-3.4.rst26
-rw-r--r--documentation/migration-guides/migration-4.0.rst64
-rw-r--r--documentation/migration-guides/migration-4.1.rst216
-rw-r--r--documentation/migration-guides/migration-4.2.rst276
-rw-r--r--documentation/migration-guides/migration-4.3.rst252
-rw-r--r--documentation/migration-guides/migration-5.0.rst202
-rw-r--r--documentation/migration-guides/migration-general.rst36
-rw-r--r--documentation/migration-guides/release-3.4.rst4
-rw-r--r--documentation/migration-guides/release-4.0.rst19
-rw-r--r--documentation/migration-guides/release-4.1.rst13
-rw-r--r--documentation/migration-guides/release-4.2.rst13
-rw-r--r--documentation/migration-guides/release-4.3.rst13
-rw-r--r--documentation/migration-guides/release-5.0.rst9
-rw-r--r--documentation/migration-guides/release-notes-3.4.1.rst32
-rw-r--r--documentation/migration-guides/release-notes-3.4.2.rst24
-rw-r--r--documentation/migration-guides/release-notes-3.4.3.rst199
-rw-r--r--documentation/migration-guides/release-notes-3.4.4.rst157
-rw-r--r--documentation/migration-guides/release-notes-3.4.rst98
-rw-r--r--documentation/migration-guides/release-notes-4.0.1.rst250
-rw-r--r--documentation/migration-guides/release-notes-4.0.10.rst180
-rw-r--r--documentation/migration-guides/release-notes-4.0.11.rst214
-rw-r--r--documentation/migration-guides/release-notes-4.0.12.rst277
-rw-r--r--documentation/migration-guides/release-notes-4.0.13.rst271
-rw-r--r--documentation/migration-guides/release-notes-4.0.14.rst227
-rw-r--r--documentation/migration-guides/release-notes-4.0.15.rst189
-rw-r--r--documentation/migration-guides/release-notes-4.0.16.rst191
-rw-r--r--documentation/migration-guides/release-notes-4.0.17.rst238
-rw-r--r--documentation/migration-guides/release-notes-4.0.2.rst298
-rw-r--r--documentation/migration-guides/release-notes-4.0.3.rst316
-rw-r--r--documentation/migration-guides/release-notes-4.0.4.rst301
-rw-r--r--documentation/migration-guides/release-notes-4.0.5.rst198
-rw-r--r--documentation/migration-guides/release-notes-4.0.6.rst313
-rw-r--r--documentation/migration-guides/release-notes-4.0.7.rst242
-rw-r--r--documentation/migration-guides/release-notes-4.0.8.rst217
-rw-r--r--documentation/migration-guides/release-notes-4.0.9.rst249
-rw-r--r--documentation/migration-guides/release-notes-4.0.rst90
-rw-r--r--documentation/migration-guides/release-notes-4.1.1.rst319
-rw-r--r--documentation/migration-guides/release-notes-4.1.2.rst286
-rw-r--r--documentation/migration-guides/release-notes-4.1.3.rst317
-rw-r--r--documentation/migration-guides/release-notes-4.1.4.rst254
-rw-r--r--documentation/migration-guides/release-notes-4.1.rst761
-rw-r--r--documentation/migration-guides/release-notes-4.2.1.rst206
-rw-r--r--documentation/migration-guides/release-notes-4.2.2.rst330
-rw-r--r--documentation/migration-guides/release-notes-4.2.3.rst263
-rw-r--r--documentation/migration-guides/release-notes-4.2.4.rst364
-rw-r--r--documentation/migration-guides/release-notes-4.2.rst984
-rw-r--r--documentation/migration-guides/release-notes-4.3.1.rst237
-rw-r--r--documentation/migration-guides/release-notes-4.3.2.rst247
-rw-r--r--documentation/migration-guides/release-notes-4.3.3.rst200
-rw-r--r--documentation/migration-guides/release-notes-4.3.4.rst206
-rw-r--r--documentation/migration-guides/release-notes-4.3.rst965
-rw-r--r--documentation/migration-guides/release-notes-5.0.rst906
-rw-r--r--documentation/overview-manual/concepts.rst517
-rw-r--r--documentation/overview-manual/development-environment.rst90
-rwxr-xr-xdocumentation/overview-manual/figures/index-downloads.pngbin18142 -> 0 bytes
-rw-r--r--documentation/overview-manual/figures/source-repos.pngbin298757 -> 775796 bytes
-rw-r--r--documentation/overview-manual/figures/yp-download.pngbin82939 -> 518257 bytes
-rw-r--r--documentation/overview-manual/intro.rst2
-rw-r--r--documentation/overview-manual/svg/bitbake_tasks_map.svg4
-rw-r--r--documentation/overview-manual/yp-intro.rst75
-rw-r--r--documentation/poky.yaml.in58
-rw-r--r--documentation/profile-manual/intro.rst40
-rw-r--r--documentation/profile-manual/usage.rst891
-rw-r--r--documentation/ref-manual/TODO11
-rw-r--r--documentation/ref-manual/classes.rst2058
-rw-r--r--documentation/ref-manual/devtool-reference.rst34
-rw-r--r--documentation/ref-manual/faq.rst614
-rw-r--r--documentation/ref-manual/features.rst257
-rw-r--r--documentation/ref-manual/images.rst28
-rw-r--r--documentation/ref-manual/kickstart.rst21
-rw-r--r--documentation/ref-manual/qa-checks.rst106
-rw-r--r--documentation/ref-manual/release-process.rst98
-rw-r--r--documentation/ref-manual/resources.rst81
-rw-r--r--documentation/ref-manual/structure.rst162
-rw-r--r--documentation/ref-manual/svg/releases.svg1744
-rw-r--r--documentation/ref-manual/system-requirements.rst315
-rw-r--r--documentation/ref-manual/tasks.rst145
-rw-r--r--documentation/ref-manual/terms.rst175
-rw-r--r--documentation/ref-manual/variables.rst3253
-rw-r--r--documentation/ref-manual/varlocality.rst2
-rw-r--r--documentation/releases.rst248
-rw-r--r--documentation/sdk-manual/appendix-customizing.rst62
-rw-r--r--documentation/sdk-manual/appendix-obtain.rst146
-rw-r--r--documentation/sdk-manual/extensible.rst670
-rw-r--r--documentation/sdk-manual/intro.rst8
-rw-r--r--documentation/sdk-manual/using.rst21
-rw-r--r--documentation/sdk-manual/working-projects.rst74
-rwxr-xr-xdocumentation/set_versions.py89
-rw-r--r--documentation/standards.md83
-rw-r--r--documentation/styles/config/vocabularies/OpenSource/accept.txt20
-rw-r--r--documentation/styles/config/vocabularies/Yocto/accept.txt5
-rw-r--r--documentation/template/template.svg2
-rw-r--r--documentation/test-manual/intro.rst155
-rw-r--r--documentation/test-manual/reproducible-builds.rst25
-rw-r--r--documentation/test-manual/test-process.rst44
-rw-r--r--documentation/test-manual/understand-autobuilder.rst68
-rw-r--r--documentation/test-manual/yocto-project-compatible.rst2
-rw-r--r--documentation/toaster-manual/intro.rst5
-rw-r--r--documentation/toaster-manual/reference.rst28
-rw-r--r--documentation/toaster-manual/setup-and-use.rst23
-rw-r--r--documentation/transitioning-to-a-custom-environment.rst10
-rw-r--r--documentation/what-i-wish-id-known.rst14
-rw-r--r--meta-poky/README.poky.md15
-rw-r--r--meta-poky/classes/poky-bleeding.bbclass20
-rw-r--r--meta-poky/conf/distro/include/gcsections.inc2
-rw-r--r--meta-poky/conf/distro/include/poky-distro-alt-test-config.inc8
-rw-r--r--meta-poky/conf/distro/include/poky-floating-revisions.inc68
-rw-r--r--meta-poky/conf/distro/poky-altcfg.conf6
-rw-r--r--meta-poky/conf/distro/poky-bleeding.conf6
-rw-r--r--meta-poky/conf/distro/poky-tiny.conf11
-rw-r--r--meta-poky/conf/distro/poky.conf46
-rw-r--r--meta-poky/conf/layer.conf2
-rw-r--r--meta-poky/conf/templates/default/bblayers.conf.sample (renamed from meta-poky/conf/bblayers.conf.sample)0
-rw-r--r--meta-poky/conf/templates/default/conf-notes.txt (renamed from meta-poky/conf/conf-notes.txt)0
-rw-r--r--meta-poky/conf/templates/default/conf-summary.txt1
-rw-r--r--meta-poky/conf/templates/default/local.conf.sample (renamed from meta-poky/conf/local.conf.sample)28
-rw-r--r--meta-poky/conf/templates/default/local.conf.sample.extended (renamed from meta-poky/conf/local.conf.sample.extended)0
-rw-r--r--meta-poky/conf/templates/default/site.conf.sample (renamed from meta-poky/conf/site.conf.sample)0
-rw-r--r--meta-poky/recipes-core/base-files/base-files_%.bbappend1
-rw-r--r--meta-poky/recipes-core/base-files/files/poky/motd5
-rw-r--r--meta-selftest/classes/base-do-configure-modified.bbclass3
-rw-r--r--meta-selftest/conf/layer.conf4
-rw-r--r--meta-selftest/conf/multiconfig/muslmc.conf2
-rw-r--r--meta-selftest/files/static-group4
-rw-r--r--meta-selftest/files/static-passwd2
-rw-r--r--meta-selftest/lib/oeqa/runtime/cases/dnf_runtime.py3
-rw-r--r--meta-selftest/lib/oeqa/runtime/cases/virgl.py2
-rw-r--r--meta-selftest/recipes-devtools/mtd/mtd-utils-selftest/0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch31
-rw-r--r--meta-selftest/recipes-devtools/mtd/mtd-utils-selftest_git.bb77
-rw-r--r--meta-selftest/recipes-devtools/python/python-async-test.inc2
-rw-r--r--meta-selftest/recipes-devtools/rust/rust-c-lib-example-bin_git.bb16
-rw-r--r--meta-selftest/recipes-devtools/rust/rust-c-lib-example-crates.inc79
-rw-r--r--meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb15
-rw-r--r--meta-selftest/recipes-extended/hello-rs/hello-rs-crates.inc8
-rw-r--r--meta-selftest/recipes-extended/hello-rs/hello-rs/0001-Greet-OE-Core.patch24
-rw-r--r--meta-selftest/recipes-extended/hello-rs/hello-rs_0.1.0.bb22
-rw-r--r--meta-selftest/recipes-extended/sysdig/sysdig-selftest/0055-Add-cstdint-for-uintXX_t-types.patch38
-rw-r--r--meta-selftest/recipes-extended/sysdig/sysdig-selftest/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch29
-rw-r--r--meta-selftest/recipes-extended/sysdig/sysdig-selftest_0.28.0.bb66
-rw-r--r--meta-selftest/recipes-test/aspell/aspell_%.bbappend (renamed from meta-selftest/recipes-test/aspell/aspell_0.60.8.bbappend)0
-rw-r--r--meta-selftest/recipes-test/binutils/binutils_%.bbappend2
-rw-r--r--meta-selftest/recipes-test/cpp/.gitignore1
-rw-r--r--meta-selftest/recipes-test/cpp/cmake-example.bb25
-rw-r--r--meta-selftest/recipes-test/cpp/cmake-example/run-ptest10
-rw-r--r--meta-selftest/recipes-test/cpp/cpp-example.inc24
-rw-r--r--meta-selftest/recipes-test/cpp/files/CMakeLists.txt61
-rw-r--r--meta-selftest/recipes-test/cpp/files/cpp-example-lib.cpp33
-rw-r--r--meta-selftest/recipes-test/cpp/files/cpp-example-lib.hpp21
-rw-r--r--meta-selftest/recipes-test/cpp/files/cpp-example.cpp18
-rw-r--r--meta-selftest/recipes-test/cpp/files/meson.build38
-rw-r--r--meta-selftest/recipes-test/cpp/files/meson.options3
-rw-r--r--meta-selftest/recipes-test/cpp/files/test-cpp-example.cpp25
-rw-r--r--meta-selftest/recipes-test/cpp/meson-example.bb27
-rw-r--r--meta-selftest/recipes-test/cpp/meson-example/run-ptest10
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-test-local/file31
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-test-local_6.03.bb3
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-test-localonly.bb3
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-test-localonly/file31
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb2
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded2
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb16
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb.upgraded15
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb22
-rw-r--r--meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb.upgraded19
-rw-r--r--meta-selftest/recipes-test/gcc-source/gcc-source_%.bbappend2
-rw-r--r--meta-selftest/recipes-test/git-submodule-test/git-submodule-test.bb2
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb18
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb4
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc16
-rw-r--r--meta-selftest/recipes-test/images/oe-selftest-image.bb2
-rw-r--r--meta-selftest/recipes-test/license/incompatible-license-alias.bb2
-rw-r--r--meta-selftest/recipes-test/license/incompatible-license.bb2
-rw-r--r--meta-selftest/recipes-test/license/incompatible-licenses.bb2
-rw-r--r--meta-selftest/recipes-test/license/incompatible-nonspdx-license.bb2
-rw-r--r--meta-selftest/recipes-test/multiconfig/multiconfig-image-packager_0.1.bb18
-rw-r--r--meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb9
-rw-r--r--meta-selftest/recipes-test/packagenameconflict/packagenameconflict.bb10
-rw-r--r--meta-selftest/recipes-test/perlcross/perlcross_%.bbappend2
-rw-r--r--meta-selftest/recipes-test/poison/poison.bb8
-rw-r--r--meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb1
-rw-r--r--meta-selftest/recipes-test/selftest-hello/files/helloworld.c8
-rw-r--r--meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb19
-rw-r--r--meta-selftest/recipes-test/selftest-users/acreategroup.bb32
-rw-r--r--meta-selftest/recipes-test/selftest-users/bcreategroup.bb37
-rw-r--r--meta-selftest/recipes-test/selftest-users/ccreategroup.bb34
-rw-r--r--meta-selftest/recipes-test/selftest-users/creategroup1.bb30
-rw-r--r--meta-selftest/recipes-test/selftest-users/creategroup2.bb32
-rw-r--r--meta-selftest/recipes-test/selftest-users/dcreategroup.bb33
-rw-r--r--meta-selftest/recipes-test/selftest-users/useraddbadtask.bb20
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb16
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb12
-rw-r--r--meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb12
-rw-r--r--meta-selftest/recipes-test/testrpm/files/testfile.txt1
-rw-r--r--meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb18
-rw-r--r--meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb30
-rw-r--r--meta-selftest/recipes-test/wrapper/files/test.awk2
-rw-r--r--meta-selftest/wic/overlayfs_etc.wks.in4
-rw-r--r--meta-selftest/wic/test_gpt_partition_name.wks7
-rw-r--r--meta-selftest/wic/test_rawcopy_plugin.wks.in2
-rw-r--r--meta-selftest/wic/test_uefikernel.wks5
-rw-r--r--meta-skeleton/conf/layer.conf2
-rw-r--r--meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb53
-rw-r--r--meta-skeleton/recipes-kernel/hello-mod/files/hello.c13
-rw-r--r--meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb2
-rw-r--r--meta-skeleton/recipes-skeleton/libxpm/libxpm_3.5.6.bb1
-rw-r--r--meta-skeleton/recipes-skeleton/service/service_0.1.bb2
-rw-r--r--meta-skeleton/recipes-skeleton/useradd/useradd-example.bb7
-rw-r--r--meta-yocto-bsp/README.hardware.md201
-rw-r--r--meta-yocto-bsp/conf/layer.conf2
-rw-r--r--meta-yocto-bsp/conf/machine/beaglebone-yocto.conf16
-rw-r--r--meta-yocto-bsp/conf/machine/edgerouter.conf26
-rw-r--r--meta-yocto-bsp/conf/machine/genericarm64.conf60
-rw-r--r--meta-yocto-bsp/conf/machine/genericx86-64.conf1
-rw-r--r--meta-yocto-bsp/conf/machine/genericx86.conf1
-rw-r--r--meta-yocto-bsp/conf/machine/include/genericx86-common.inc6
-rw-r--r--meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py89
-rw-r--r--meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-beaglebone-yocto.txt4
-rw-r--r--meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-genericx86-64.txt7
-rw-r--r--meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py11
-rw-r--r--meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.conf2
-rw-r--r--meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.sh15
-rw-r--r--meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check_1.0.bb18
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto-dev.bbappend4
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.10.bbappend23
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.15.bbappend23
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.6.bbappend21
-rw-r--r--meta-yocto-bsp/wic/edgerouter.wks4
-rw-r--r--meta-yocto-bsp/wic/genericarm64.wks.in11
-rw-r--r--meta/classes-global/base.bbclass (renamed from meta/classes/base.bbclass)103
-rw-r--r--meta/classes-global/buildstats.bbclass (renamed from meta/classes/buildstats.bbclass)9
-rw-r--r--meta/classes-global/debian.bbclass (renamed from meta/classes/debian.bbclass)21
-rw-r--r--meta/classes-global/devshell.bbclass (renamed from meta/classes/devshell.bbclass)8
-rw-r--r--meta/classes-global/insane.bbclass (renamed from meta/classes/insane.bbclass)406
-rw-r--r--meta/classes-global/license.bbclass (renamed from meta/classes/license.bbclass)18
-rw-r--r--meta/classes-global/logging.bbclass (renamed from meta/classes/logging.bbclass)6
-rw-r--r--meta/classes-global/mirrors.bbclass (renamed from meta/classes/mirrors.bbclass)21
-rw-r--r--meta/classes-global/package.bbclass616
-rw-r--r--meta/classes-global/package_deb.bbclass (renamed from meta/classes/package_deb.bbclass)12
-rw-r--r--meta/classes-global/package_ipk.bbclass (renamed from meta/classes/package_ipk.bbclass)26
-rw-r--r--meta/classes-global/package_pkgdata.bbclass (renamed from meta/classes/package_pkgdata.bbclass)6
-rw-r--r--meta/classes-global/package_rpm.bbclass (renamed from meta/classes/package_rpm.bbclass)119
-rw-r--r--meta/classes-global/packagedata.bbclass (renamed from meta/classes/packagedata.bbclass)6
-rw-r--r--meta/classes-global/patch.bbclass (renamed from meta/classes/patch.bbclass)6
-rw-r--r--meta/classes-global/sanity.bbclass (renamed from meta/classes/sanity.bbclass)95
-rw-r--r--meta/classes-global/sstate.bbclass (renamed from meta/classes/sstate.bbclass)254
-rw-r--r--meta/classes-global/staging.bbclass (renamed from meta/classes/staging.bbclass)31
-rw-r--r--meta/classes-global/uninative.bbclass (renamed from meta/classes/uninative.bbclass)18
-rw-r--r--meta/classes-global/utility-tasks.bbclass (renamed from meta/classes/utility-tasks.bbclass)6
-rw-r--r--meta/classes-global/utils.bbclass (renamed from meta/classes/utils.bbclass)46
-rw-r--r--meta/classes-recipe/allarch.bbclass (renamed from meta/classes/allarch.bbclass)10
-rw-r--r--meta/classes-recipe/autotools-brokensep.bbclass11
-rw-r--r--meta/classes-recipe/autotools.bbclass (renamed from meta/classes/autotools.bbclass)15
-rw-r--r--meta/classes-recipe/baremetal-image.bbclass (renamed from meta/classes/baremetal-image.bbclass)29
-rw-r--r--meta/classes-recipe/bash-completion.bbclass (renamed from meta/classes/bash-completion.bbclass)6
-rw-r--r--meta/classes-recipe/bin_package.bbclass (renamed from meta/classes/bin_package.bbclass)9
-rw-r--r--meta/classes-recipe/binconfig-disabled.bbclass (renamed from meta/classes/binconfig-disabled.bbclass)6
-rw-r--r--meta/classes-recipe/binconfig.bbclass (renamed from meta/classes/binconfig.bbclass)6
-rw-r--r--meta/classes-recipe/cargo-update-recipe-crates.bbclass79
-rw-r--r--meta/classes-recipe/cargo.bbclass93
-rw-r--r--meta/classes-recipe/cargo_c.bbclass41
-rw-r--r--meta/classes-recipe/cargo_common.bbclass238
-rw-r--r--meta/classes-recipe/cmake-qemu.bbclass32
-rw-r--r--meta/classes-recipe/cmake.bbclass247
-rw-r--r--meta/classes-recipe/cml1.bbclass (renamed from meta/classes/cml1.bbclass)36
-rw-r--r--meta/classes-recipe/compress_doc.bbclass (renamed from meta/classes/compress_doc.bbclass)6
-rw-r--r--meta/classes-recipe/core-image.bbclass (renamed from meta/classes/core-image.bbclass)7
-rw-r--r--meta/classes-recipe/cpan-base.bbclass (renamed from meta/classes/cpan-base.bbclass)6
-rw-r--r--meta/classes-recipe/cpan.bbclass (renamed from meta/classes/cpan.bbclass)6
-rw-r--r--meta/classes-recipe/cpan_build.bbclass (renamed from meta/classes/cpan_build.bbclass)6
-rw-r--r--meta/classes-recipe/cross-canadian.bbclass (renamed from meta/classes/cross-canadian.bbclass)4
-rw-r--r--meta/classes-recipe/cross.bbclass (renamed from meta/classes/cross.bbclass)6
-rw-r--r--meta/classes-recipe/crosssdk.bbclass (renamed from meta/classes/crosssdk.bbclass)6
-rw-r--r--meta/classes-recipe/deploy.bbclass (renamed from meta/classes/deploy.bbclass)6
-rw-r--r--meta/classes-recipe/devicetree.bbclass (renamed from meta/classes/devicetree.bbclass)17
-rw-r--r--meta/classes-recipe/devupstream.bbclass (renamed from meta/classes/devupstream.bbclass)8
-rw-r--r--meta/classes-recipe/distro_features_check.bbclass13
-rw-r--r--meta/classes-recipe/dos2unix.bbclass (renamed from meta/classes/dos2unix.bbclass)6
-rw-r--r--meta/classes-recipe/features_check.bbclass (renamed from meta/classes/features_check.bbclass)3
-rw-r--r--meta/classes-recipe/fontcache.bbclass (renamed from meta/classes/fontcache.bbclass)7
-rw-r--r--meta/classes-recipe/fs-uuid.bbclass (renamed from meta/classes/fs-uuid.bbclass)8
-rw-r--r--meta/classes-recipe/gconf.bbclass (renamed from meta/classes/gconf.bbclass)6
-rw-r--r--meta/classes-recipe/gettext.bbclass (renamed from meta/classes/gettext.bbclass)6
-rw-r--r--meta/classes-recipe/gi-docgen.bbclass32
-rw-r--r--meta/classes-recipe/gio-module-cache.bbclass (renamed from meta/classes/gio-module-cache.bbclass)6
-rw-r--r--meta/classes-recipe/github-releases.bbclass3
-rw-r--r--meta/classes-recipe/gnomebase.bbclass (renamed from meta/classes/gnomebase.bbclass)13
-rw-r--r--meta/classes-recipe/go-mod.bbclass (renamed from meta/classes/go-mod.bbclass)12
-rw-r--r--meta/classes-recipe/go-ptest.bbclass (renamed from meta/classes/go-ptest.bbclass)6
-rw-r--r--meta/classes-recipe/go.bbclass (renamed from meta/classes/go.bbclass)16
-rw-r--r--meta/classes-recipe/goarch.bbclass (renamed from meta/classes/goarch.bbclass)48
-rw-r--r--meta/classes-recipe/gobject-introspection-data.bbclass (renamed from meta/classes/gobject-introspection-data.bbclass)6
-rw-r--r--meta/classes-recipe/gobject-introspection.bbclass (renamed from meta/classes/gobject-introspection.bbclass)21
-rw-r--r--meta/classes-recipe/grub-efi-cfg.bbclass (renamed from meta/classes/grub-efi-cfg.bbclass)3
-rw-r--r--meta/classes-recipe/grub-efi.bbclass14
-rw-r--r--meta/classes-recipe/gsettings.bbclass (renamed from meta/classes/gsettings.bbclass)6
-rw-r--r--meta/classes-recipe/gtk-doc.bbclass72
-rw-r--r--meta/classes-recipe/gtk-icon-cache.bbclass (renamed from meta/classes/gtk-icon-cache.bbclass)8
-rw-r--r--meta/classes-recipe/gtk-immodules-cache.bbclass (renamed from meta/classes/gtk-immodules-cache.bbclass)6
-rw-r--r--meta/classes-recipe/image-artifact-names.bbclass41
-rw-r--r--meta/classes-recipe/image-combined-dbg.bbclass15
-rw-r--r--meta/classes-recipe/image-container.bbclass (renamed from meta/classes/image-container.bbclass)6
-rw-r--r--meta/classes-recipe/image-live.bbclass (renamed from meta/classes/image-live.bbclass)10
-rw-r--r--meta/classes-recipe/image-postinst-intercepts.bbclass (renamed from meta/classes/image-postinst-intercepts.bbclass)6
-rw-r--r--meta/classes-recipe/image.bbclass (renamed from meta/classes/image.bbclass)47
-rw-r--r--meta/classes-recipe/image_types.bbclass387
-rw-r--r--meta/classes-recipe/image_types_wic.bbclass (renamed from meta/classes/image_types_wic.bbclass)31
-rw-r--r--meta/classes-recipe/kernel-arch.bbclass (renamed from meta/classes/kernel-arch.bbclass)22
-rw-r--r--meta/classes-recipe/kernel-artifact-names.bbclass (renamed from meta/classes/kernel-artifact-names.bbclass)8
-rw-r--r--meta/classes-recipe/kernel-devicetree.bbclass139
-rw-r--r--meta/classes-recipe/kernel-fitimage.bbclass (renamed from meta/classes/kernel-fitimage.bbclass)220
-rw-r--r--meta/classes-recipe/kernel-grub.bbclass (renamed from meta/classes/kernel-grub.bbclass)6
-rw-r--r--meta/classes-recipe/kernel-module-split.bbclass188
-rw-r--r--meta/classes-recipe/kernel-uboot.bbclass49
-rw-r--r--meta/classes-recipe/kernel-uimage.bbclass (renamed from meta/classes/kernel-uimage.bbclass)8
-rw-r--r--meta/classes-recipe/kernel-yocto.bbclass (renamed from meta/classes/kernel-yocto.bbclass)64
-rw-r--r--meta/classes-recipe/kernel.bbclass (renamed from meta/classes/kernel.bbclass)128
-rw-r--r--meta/classes-recipe/kernelsrc.bbclass17
-rw-r--r--meta/classes-recipe/lib_package.bbclass12
-rw-r--r--meta/classes-recipe/libc-package.bbclass (renamed from meta/classes/libc-package.bbclass)12
-rw-r--r--meta/classes-recipe/license_image.bbclass (renamed from meta/classes/license_image.bbclass)56
-rw-r--r--meta/classes-recipe/linux-dummy.bbclass (renamed from meta/classes/linux-dummy.bbclass)5
-rw-r--r--meta/classes-recipe/linux-kernel-base.bbclass62
-rw-r--r--meta/classes-recipe/linuxloader.bbclass (renamed from meta/classes/linuxloader.bbclass)8
-rw-r--r--meta/classes-recipe/live-vm-common.bbclass (renamed from meta/classes/live-vm-common.bbclass)10
-rw-r--r--meta/classes-recipe/manpages.bbclass41
-rw-r--r--meta/classes-recipe/meson-routines.bbclass (renamed from meta/classes/meson-routines.bbclass)8
-rw-r--r--meta/classes-recipe/meson.bbclass (renamed from meta/classes/meson.bbclass)44
-rw-r--r--meta/classes-recipe/mime-xdg.bbclass (renamed from meta/classes/mime-xdg.bbclass)4
-rw-r--r--meta/classes-recipe/mime.bbclass (renamed from meta/classes/mime.bbclass)6
-rw-r--r--meta/classes-recipe/module-base.bbclass (renamed from meta/classes/module-base.bbclass)7
-rw-r--r--meta/classes-recipe/module.bbclass (renamed from meta/classes/module.bbclass)18
-rw-r--r--meta/classes-recipe/multilib_header.bbclass (renamed from meta/classes/multilib_header.bbclass)6
-rw-r--r--meta/classes-recipe/multilib_script.bbclass41
-rw-r--r--meta/classes-recipe/native.bbclass (renamed from meta/classes/native.bbclass)19
-rw-r--r--meta/classes-recipe/nativesdk.bbclass (renamed from meta/classes/nativesdk.bbclass)12
-rw-r--r--meta/classes-recipe/nopackages.bbclass (renamed from meta/classes/nopackages.bbclass)6
-rw-r--r--meta/classes-recipe/npm.bbclass352
-rw-r--r--meta/classes-recipe/overlayfs-etc.bbclass (renamed from meta/classes/overlayfs-etc.bbclass)16
-rw-r--r--meta/classes-recipe/overlayfs.bbclass (renamed from meta/classes/overlayfs.bbclass)13
-rw-r--r--meta/classes-recipe/packagegroup.bbclass (renamed from meta/classes/packagegroup.bbclass)11
-rw-r--r--meta/classes-recipe/perl-version.bbclass (renamed from meta/classes/perl-version.bbclass)12
-rw-r--r--meta/classes-recipe/perlnative.bbclass9
-rw-r--r--meta/classes-recipe/pixbufcache.bbclass (renamed from meta/classes/pixbufcache.bbclass)6
-rw-r--r--meta/classes-recipe/pkgconfig.bbclass8
-rw-r--r--meta/classes-recipe/populate_sdk.bbclass13
-rw-r--r--meta/classes-recipe/populate_sdk_base.bbclass (renamed from meta/classes/populate_sdk_base.bbclass)59
-rw-r--r--meta/classes-recipe/populate_sdk_ext.bbclass (renamed from meta/classes/populate_sdk_ext.bbclass)120
-rw-r--r--meta/classes-recipe/ptest-cargo.bbclass138
-rw-r--r--meta/classes-recipe/ptest-gnome.bbclass (renamed from meta/classes/ptest-gnome.bbclass)6
-rw-r--r--meta/classes-recipe/ptest-perl.bbclass (renamed from meta/classes/ptest-perl.bbclass)6
-rw-r--r--meta/classes-recipe/ptest.bbclass (renamed from meta/classes/ptest.bbclass)18
-rw-r--r--meta/classes-recipe/pypi.bbclass43
-rw-r--r--meta/classes-recipe/python3-dir.bbclass11
-rw-r--r--meta/classes-recipe/python3native.bbclass (renamed from meta/classes/python3native.bbclass)6
-rw-r--r--meta/classes-recipe/python3targetconfig.bbclass41
-rw-r--r--meta/classes-recipe/python_flit_core.bbclass14
-rw-r--r--meta/classes-recipe/python_hatchling.bbclass27
-rw-r--r--meta/classes-recipe/python_maturin.bbclass17
-rw-r--r--meta/classes-recipe/python_mesonpy.bbclass52
-rw-r--r--meta/classes-recipe/python_pep517.bbclass (renamed from meta/classes/python_pep517.bbclass)19
-rw-r--r--meta/classes-recipe/python_poetry_core.bbclass9
-rw-r--r--meta/classes-recipe/python_pyo3.bbclass (renamed from meta/classes/python_pyo3.bbclass)8
-rw-r--r--meta/classes-recipe/python_setuptools3_rust.bbclass17
-rw-r--r--meta/classes-recipe/python_setuptools_build_meta.bbclass9
-rw-r--r--meta/classes-recipe/qemu.bbclass (renamed from meta/classes/qemu.bbclass)8
-rw-r--r--meta/classes-recipe/qemuboot.bbclass (renamed from meta/classes/qemuboot.bbclass)34
-rw-r--r--meta/classes-recipe/rootfs-postcommands.bbclass549
-rw-r--r--meta/classes-recipe/rootfs_deb.bbclass (renamed from meta/classes/rootfs_deb.bbclass)2
-rw-r--r--meta/classes-recipe/rootfs_ipk.bbclass (renamed from meta/classes/rootfs_ipk.bbclass)8
-rw-r--r--meta/classes-recipe/rootfs_rpm.bbclass (renamed from meta/classes/rootfs_rpm.bbclass)10
-rw-r--r--meta/classes-recipe/rootfsdebugfiles.bbclass (renamed from meta/classes/rootfsdebugfiles.bbclass)8
-rw-r--r--meta/classes-recipe/rust-common.bbclass196
-rw-r--r--meta/classes-recipe/rust-target-config.bbclass437
-rw-r--r--meta/classes-recipe/rust.bbclass (renamed from meta/classes/rust.bbclass)12
-rw-r--r--meta/classes-recipe/scons.bbclass40
-rw-r--r--meta/classes-recipe/setuptools3-base.bbclass30
-rw-r--r--meta/classes-recipe/setuptools3.bbclass38
-rw-r--r--meta/classes-recipe/setuptools3_legacy.bbclass (renamed from meta/classes/setuptools3_legacy.bbclass)16
-rw-r--r--meta/classes-recipe/siteinfo.bbclass (renamed from meta/classes/siteinfo.bbclass)14
-rw-r--r--meta/classes-recipe/syslinux.bbclass (renamed from meta/classes/syslinux.bbclass)4
-rw-r--r--meta/classes-recipe/systemd-boot-cfg.bbclass (renamed from meta/classes/systemd-boot-cfg.bbclass)8
-rw-r--r--meta/classes-recipe/systemd-boot.bbclass (renamed from meta/classes/systemd-boot.bbclass)2
-rw-r--r--meta/classes-recipe/systemd.bbclass (renamed from meta/classes/systemd.bbclass)11
-rw-r--r--meta/classes-recipe/testexport.bbclass (renamed from meta/classes/testexport.bbclass)20
-rw-r--r--meta/classes-recipe/testimage.bbclass (renamed from meta/classes/testimage.bbclass)94
-rw-r--r--meta/classes-recipe/testsdk.bbclass (renamed from meta/classes/testsdk.bbclass)2
-rw-r--r--meta/classes-recipe/texinfo.bbclass (renamed from meta/classes/texinfo.bbclass)6
-rw-r--r--meta/classes-recipe/toolchain-scripts-base.bbclass (renamed from meta/classes/toolchain-scripts-base.bbclass)6
-rw-r--r--meta/classes-recipe/toolchain-scripts.bbclass (renamed from meta/classes/toolchain-scripts.bbclass)50
-rw-r--r--meta/classes-recipe/uboot-config.bbclass (renamed from meta/classes/uboot-config.bbclass)27
-rw-r--r--meta/classes-recipe/uboot-extlinux-config.bbclass (renamed from meta/classes/uboot-extlinux-config.bbclass)27
-rw-r--r--meta/classes-recipe/uboot-sign.bbclass466
-rw-r--r--meta/classes-recipe/update-alternatives.bbclass (renamed from meta/classes/update-alternatives.bbclass)18
-rw-r--r--meta/classes-recipe/update-rc.d.bbclass (renamed from meta/classes/update-rc.d.bbclass)10
-rw-r--r--meta/classes-recipe/upstream-version-is-even.bbclass (renamed from meta/classes/upstream-version-is-even.bbclass)6
-rw-r--r--meta/classes-recipe/vala.bbclass (renamed from meta/classes/vala.bbclass)6
-rw-r--r--meta/classes-recipe/waf.bbclass (renamed from meta/classes/waf.bbclass)23
-rw-r--r--meta/classes-recipe/xmlcatalog.bbclass36
-rw-r--r--meta/classes/archiver.bbclass82
-rw-r--r--meta/classes/autotools-brokensep.bbclass5
-rw-r--r--meta/classes/buildhistory.bbclass58
-rw-r--r--meta/classes/buildstats-summary.bbclass6
-rw-r--r--meta/classes/cargo.bbclass90
-rw-r--r--meta/classes/cargo_common.bbclass124
-rw-r--r--meta/classes/ccache.bbclass17
-rw-r--r--meta/classes/ccmake.bbclass6
-rw-r--r--meta/classes/chrpath.bbclass8
-rw-r--r--meta/classes/cmake.bbclass217
-rw-r--r--meta/classes/copyleft_compliance.bbclass6
-rw-r--r--meta/classes/copyleft_filter.bbclass8
-rw-r--r--meta/classes/create-spdx-2.2.bbclass1158
-rw-r--r--meta/classes/create-spdx.bbclass1015
-rw-r--r--meta/classes/cve-check.bbclass322
-rw-r--r--meta/classes/devtool-source.bbclass9
-rw-r--r--meta/classes/distro_features_check.bbclass7
-rw-r--r--meta/classes/distrooverrides.bbclass6
-rw-r--r--meta/classes/externalsrc.bbclass60
-rw-r--r--meta/classes/extrausers.bbclass8
-rw-r--r--meta/classes/gi-docgen.bbclass24
-rw-r--r--meta/classes/glide.bbclass9
-rw-r--r--meta/classes/go-vendor.bbclass211
-rw-r--r--meta/classes/grub-efi.bbclass8
-rw-r--r--meta/classes/gtk-doc.bbclass83
-rw-r--r--meta/classes/icecc.bbclass63
-rw-r--r--meta/classes/image-artifact-names.bbclass22
-rw-r--r--meta/classes/image-buildinfo.bbclass54
-rw-r--r--meta/classes/image-combined-dbg.bbclass9
-rw-r--r--meta/classes/image_types.bbclass349
-rw-r--r--meta/classes/kernel-devicetree.bbclass113
-rw-r--r--meta/classes/kernel-module-split.bbclass191
-rw-r--r--meta/classes/kernel-uboot.bbclass34
-rw-r--r--meta/classes/kernelsrc.bbclass10
-rw-r--r--meta/classes/lib_package.bbclass7
-rw-r--r--meta/classes/linux-kernel-base.bbclass41
-rw-r--r--meta/classes/manpages.bbclass45
-rw-r--r--meta/classes/mcextend.bbclass6
-rw-r--r--meta/classes/metadata_scm.bbclass48
-rw-r--r--meta/classes/migrate_localcount.bbclass6
-rw-r--r--meta/classes/multilib.bbclass11
-rw-r--r--meta/classes/multilib_global.bbclass7
-rw-r--r--meta/classes/multilib_script.bbclass34
-rw-r--r--meta/classes/npm.bbclass317
-rw-r--r--meta/classes/oelint.bbclass6
-rw-r--r--meta/classes/own-mirrors.bbclass8
-rw-r--r--meta/classes/package.bbclass2522
-rw-r--r--meta/classes/package_tar.bbclass71
-rw-r--r--meta/classes/perlnative.bbclass3
-rw-r--r--meta/classes/pkgconfig.bbclass2
-rw-r--r--meta/classes/populate_sdk.bbclass7
-rw-r--r--meta/classes/prexport.bbclass6
-rw-r--r--meta/classes/primport.bbclass6
-rw-r--r--meta/classes/pypi.bbclass28
-rw-r--r--meta/classes/python3-dir.bbclass5
-rw-r--r--meta/classes/python3targetconfig.bbclass29
-rw-r--r--meta/classes/python_flit_core.bbclass5
-rw-r--r--meta/classes/python_poetry_core.bbclass5
-rw-r--r--meta/classes/python_setuptools3_rust.bbclass11
-rw-r--r--meta/classes/python_setuptools_build_meta.bbclass5
-rw-r--r--meta/classes/recipe_sanity.bbclass8
-rw-r--r--meta/classes/relative_symlinks.bbclass6
-rw-r--r--meta/classes/relocatable.bbclass6
-rw-r--r--meta/classes/remove-libtool.bbclass6
-rw-r--r--meta/classes/report-error.bbclass60
-rw-r--r--meta/classes/rm_work.bbclass122
-rw-r--r--meta/classes/rm_work_and_downloads.bbclass3
-rw-r--r--meta/classes/rootfs-postcommands.bbclass424
-rw-r--r--meta/classes/rust-bin.bbclass149
-rw-r--r--meta/classes/rust-common.bbclass189
-rw-r--r--meta/classes/scons.bbclass28
-rw-r--r--meta/classes/setuptools3-base.bbclass31
-rw-r--r--meta/classes/setuptools3.bbclass33
-rw-r--r--meta/classes/sign_ipk.bbclass6
-rw-r--r--meta/classes/sign_package_feed.bbclass6
-rw-r--r--meta/classes/sign_rpm.bbclass6
-rw-r--r--meta/classes/siteconfig.bbclass6
-rw-r--r--meta/classes/terminal.bbclass6
-rw-r--r--meta/classes/toaster.bbclass2
-rw-r--r--meta/classes/typecheck.bbclass6
-rw-r--r--meta/classes/uboot-sign.bbclass494
-rw-r--r--meta/classes/useradd-staticids.bbclass8
-rw-r--r--meta/classes/useradd.bbclass81
-rw-r--r--meta/classes/useradd_base.bbclass8
-rw-r--r--meta/classes/xmlcatalog.bbclass26
-rw-r--r--meta/classes/yocto-check-layer.bbclass6
-rw-r--r--meta/conf/abi_version.conf2
-rw-r--r--meta/conf/bitbake.conf103
-rw-r--r--meta/conf/ccache.conf1
-rw-r--r--meta/conf/conf-notes.txt19
-rw-r--r--meta/conf/cve-check-map.conf28
-rw-r--r--meta/conf/distro/defaultsetup.conf4
-rw-r--r--meta/conf/distro/include/cve-extra-exclusions.inc112
-rw-r--r--meta/conf/distro/include/default-distrovars.inc13
-rw-r--r--meta/conf/distro/include/default-providers.inc2
-rw-r--r--meta/conf/distro/include/distro_alias.inc1
-rw-r--r--meta/conf/distro/include/init-manager-systemd.inc4
-rw-r--r--meta/conf/distro/include/maintainers.inc198
-rw-r--r--meta/conf/distro/include/no-gplv3.inc30
-rw-r--r--meta/conf/distro/include/no-static-libs.inc2
-rw-r--r--meta/conf/distro/include/ptest-packagelists.inc255
-rw-r--r--meta/conf/distro/include/security_flags.inc2
-rw-r--r--meta/conf/distro/include/tcmode-default.inc25
-rw-r--r--meta/conf/distro/include/time64.inc49
-rw-r--r--meta/conf/distro/include/yocto-uninative.inc10
-rw-r--r--meta/conf/documentation.conf8
-rw-r--r--meta/conf/image-uefi.conf5
-rw-r--r--meta/conf/layer.conf15
-rw-r--r--meta/conf/machine-sdk/aarch64.conf2
-rw-r--r--meta/conf/machine-sdk/i586.conf1
-rw-r--r--meta/conf/machine-sdk/i686.conf2
-rw-r--r--meta/conf/machine-sdk/loongarch64.conf4
-rw-r--r--meta/conf/machine-sdk/ppc64.conf2
-rw-r--r--meta/conf/machine-sdk/ppc64le.conf2
-rw-r--r--meta/conf/machine-sdk/riscv64.conf4
-rw-r--r--meta/conf/machine-sdk/x86_64.conf3
-rw-r--r--meta/conf/machine/include/arm/arch-arm64.inc5
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-1a.inc18
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-2a.inc1
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-3a.inc22
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-4a.inc7
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-5a.inc7
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-6a.inc22
-rw-r--r--meta/conf/machine/include/arm/arch-armv8r.inc1
-rw-r--r--meta/conf/machine/include/arm/arch-armv9a.inc19
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc5
-rw-r--r--meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa65.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa65ae.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa75-cortexa55.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa75.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa76-cortexa55.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa76.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa76ae.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa77.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa78.inc17
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa78ae.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa78c.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexx1.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexx1c.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-neoversee1.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-neoversen1.inc2
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-octeontx2.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-4a/tune-neoverse512tvb.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-4a/tune-neoversev1.inc15
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa34.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa35.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa73.inc3
-rw-r--r--meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc8
-rw-r--r--meta/conf/machine/include/arm/armv8r/tune-cortexr82.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-cortexa510.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-cortexa710.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-cortexa715.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-cortexx2.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-cortexx3.inc15
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-neoversen2.inc11
-rw-r--r--meta/conf/machine/include/arm/armv9a/tune-neoversev2.inc15
-rw-r--r--meta/conf/machine/include/arm/feature-arm-sve.inc8
-rw-r--r--meta/conf/machine/include/arm/feature-arm-vfp.inc3
-rw-r--r--meta/conf/machine/include/loongarch/arch-loongarch.inc7
-rw-r--r--meta/conf/machine/include/loongarch/qemuloongarch.inc35
-rw-r--r--meta/conf/machine/include/loongarch/tune-loongarch.inc13
-rw-r--r--meta/conf/machine/include/microblaze/feature-microblaze-versions.inc2
-rw-r--r--meta/conf/machine/include/mips/arch-mips.inc2
-rw-r--r--meta/conf/machine/include/mips/tune-mips64r2.inc12
-rw-r--r--meta/conf/machine/include/powerpc/tune-power8.inc31
-rw-r--r--meta/conf/machine/include/qemu.inc3
-rw-r--r--meta/conf/machine/include/riscv/qemuriscv.inc10
-rw-r--r--meta/conf/machine/include/riscv/tune-riscv.inc2
-rw-r--r--meta/conf/machine/include/x86/qemuboot-x86.inc10
-rw-r--r--meta/conf/machine/include/x86/tune-core2.inc6
-rw-r--r--meta/conf/machine/include/x86/tune-x86-64-v3.inc31
-rw-r--r--meta/conf/machine/include/x86/x86-base.inc1
-rw-r--r--meta/conf/machine/qemuarm.conf7
-rw-r--r--meta/conf/machine/qemuarm64.conf5
-rw-r--r--meta/conf/machine/qemuarmv5.conf4
-rw-r--r--meta/conf/machine/qemuloongarch64.conf11
-rw-r--r--meta/conf/machine/qemuppc64.conf3
-rw-r--r--meta/conf/machine/qemux86-64.conf2
-rw-r--r--meta/conf/multilib.conf1
-rw-r--r--meta/conf/sanity.conf2
-rw-r--r--meta/conf/templates/default/conf-summary.txt1
-rw-r--r--meta/conf/testexport.conf3
-rw-r--r--meta/files/common-licenses/LGPL-3.0-with-zeromq-exception181
-rw-r--r--meta/files/ext-sdk-prepare.py2
-rw-r--r--meta/files/fs-perms-persistent-log.txt1
-rw-r--r--meta/files/fs-perms.txt1
-rw-r--r--meta/files/layers.example.json48
-rw-r--r--meta/files/layers.schema.json76
-rw-r--r--meta/files/overlayfs-create-dirs.service.in3
-rw-r--r--meta/files/overlayfs-create-dirs.sh11
-rw-r--r--meta/files/overlayfs-etc-preinit.sh.in37
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemuarm.pngbin0 -> 52173 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemuarm64.pngbin0 -> 47282 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemumips.pngbin0 -> 47282 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemumips64.pngbin0 -> 47282 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemuppc.pngbin0 -> 57366 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemuriscv64.pngbin0 -> 39012 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemux86-64.pngbin0 -> 46324 bytes
-rw-r--r--meta/files/screenshot-tests/core-image-sato-qemux86.pngbin0 -> 46324 bytes
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemuarm.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemuarm64.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemumips.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemumips64.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemuppc.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemuriscv64.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemux86-64.png1
l---------meta/files/screenshot-tests/core-image-sato-sdk-qemux86.png1
-rw-r--r--meta/files/toolchain-shar-relocate.sh13
-rw-r--r--meta/lib/bblayers/buildconf.py84
-rw-r--r--meta/lib/bblayers/create.py15
-rw-r--r--meta/lib/bblayers/makesetup.py102
-rw-r--r--meta/lib/bblayers/setupwriters/oe-setup-layers.py117
-rw-r--r--meta/lib/buildstats.py82
-rw-r--r--meta/lib/oe/__init__.py6
-rw-r--r--meta/lib/oe/buildcfg.py79
-rw-r--r--meta/lib/oe/buildhistory_analysis.py2
-rw-r--r--meta/lib/oe/cachedpath.py2
-rw-r--r--meta/lib/oe/classextend.py9
-rw-r--r--meta/lib/oe/classutils.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py4
-rw-r--r--meta/lib/oe/cve_check.py108
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py2
-rw-r--r--meta/lib/oe/elf.py4
-rw-r--r--meta/lib/oe/go.py34
-rw-r--r--meta/lib/oe/gpg_sign.py29
-rw-r--r--meta/lib/oe/license.py2
-rw-r--r--meta/lib/oe/lsb.py2
-rw-r--r--meta/lib/oe/maketype.py2
-rw-r--r--meta/lib/oe/manifest.py2
-rw-r--r--meta/lib/oe/npm_registry.py175
-rw-r--r--meta/lib/oe/overlayfs.py8
-rw-r--r--meta/lib/oe/package.py1742
-rw-r--r--meta/lib/oe/package_manager/__init__.py14
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py20
-rw-r--r--meta/lib/oe/package_manager/deb/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/deb/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/deb/sdk.py7
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py40
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py4
-rw-r--r--meta/lib/oe/package_manager/ipk/rootfs.py4
-rw-r--r--meta/lib/oe/package_manager/ipk/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py44
-rw-r--r--meta/lib/oe/package_manager/rpm/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py4
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py10
-rw-r--r--meta/lib/oe/packagedata.py256
-rw-r--r--meta/lib/oe/packagegroup.py2
-rw-r--r--meta/lib/oe/patch.py237
-rw-r--r--meta/lib/oe/path.py8
-rw-r--r--meta/lib/oe/prservice.py6
-rw-r--r--meta/lib/oe/qa.py19
-rw-r--r--meta/lib/oe/recipeutils.py102
-rw-r--r--meta/lib/oe/reproducible.py9
-rw-r--r--meta/lib/oe/rootfs.py77
-rw-r--r--meta/lib/oe/rust.py8
-rw-r--r--meta/lib/oe/sbom.py50
-rw-r--r--meta/lib/oe/sdk.py4
-rw-r--r--meta/lib/oe/spdx.py17
-rw-r--r--meta/lib/oe/sstatesig.py121
-rw-r--r--meta/lib/oe/terminal.py6
-rw-r--r--meta/lib/oe/types.py2
-rw-r--r--meta/lib/oe/useradd.py2
-rw-r--r--meta/lib/oe/utils.py100
-rw-r--r--meta/lib/oeqa/buildperf/base.py2
-rw-r--r--meta/lib/oeqa/controllers/__init__.py2
-rw-r--r--meta/lib/oeqa/controllers/testtargetloader.py2
-rw-r--r--meta/lib/oeqa/core/context.py2
-rw-r--r--meta/lib/oeqa/core/decorator/data.py34
-rw-r--r--meta/lib/oeqa/core/loader.py12
-rw-r--r--meta/lib/oeqa/core/runner.py12
-rw-r--r--meta/lib/oeqa/core/target/qemu.py15
-rw-r--r--meta/lib/oeqa/core/target/ssh.py96
-rw-r--r--meta/lib/oeqa/core/utils/concurrencytest.py15
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml20
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE201
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT25
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml8
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs48
-rw-r--r--meta/lib/oeqa/files/test.rs2
-rw-r--r--meta/lib/oeqa/oetest.py24
-rw-r--r--meta/lib/oeqa/runtime/cases/_qemutiny.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/apt.py6
-rw-r--r--meta/lib/oeqa/runtime/cases/boot.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildcpio.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/buildgalculator.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/connman.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/df.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/dnf.py88
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gi.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/go.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/gstreamer.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/kernelmodule.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ldd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/login.py116
-rw-r--r--meta/lib/oeqa/runtime/cases/logrotate.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py28
-rw-r--r--meta/lib/oeqa/runtime/cases/maturin.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/oe_syslog.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/opkg.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/pam.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt62
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt27
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt4
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt10
l---------meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt1
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py394
-rw-r--r--meta/lib/oeqa/runtime/cases/perl.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py12
-rw-r--r--meta/lib/oeqa/runtime/cases/python.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py43
-rw-r--r--meta/lib/oeqa/runtime/cases/rt.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/runlevel.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/rust.py55
-rw-r--r--meta/lib/oeqa/runtime/cases/scons.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/skeletoninit.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/stap.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/storage.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py30
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/weston.py10
-rw-r--r--meta/lib/oeqa/runtime/cases/x32lib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/xorg.py2
-rw-r--r--meta/lib/oeqa/runtime/context.py15
-rw-r--r--meta/lib/oeqa/runtime/decorator/package.py5
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/build.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/gcc.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/https.py2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/sanity.py4
-rw-r--r--meta/lib/oeqa/sdk/buildtools-docs-cases/build.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/assimp.py13
-rw-r--r--meta/lib/oeqa/sdk/cases/buildcpio.py9
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py7
-rw-r--r--meta/lib/oeqa/sdk/cases/buildgalculator.py5
-rw-r--r--meta/lib/oeqa/sdk/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py79
-rw-r--r--meta/lib/oeqa/sdk/cases/perl.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py13
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py57
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml6
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/build.rs3
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/src/main.rs3
-rw-r--r--meta/lib/oeqa/sdk/testmetaidesupport.py45
-rw-r--r--meta/lib/oeqa/sdk/testsdk.py14
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py2
-rw-r--r--meta/lib/oeqa/sdkext/testsdk.py3
-rw-r--r--meta/lib/oeqa/selftest/case.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py34
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py125
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py56
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py123
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py158
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1123
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/eSDK.py120
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py122
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py75
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py12
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/git.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py147
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py43
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py163
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py37
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py33
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py605
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py42
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py279
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py72
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py231
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py64
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py613
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py49
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py39
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py334
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
-rw-r--r--meta/lib/oeqa/selftest/context.py129
-rw-r--r--meta/lib/oeqa/targetcontrol.py14
-rw-r--r--meta/lib/oeqa/utils/__init__.py9
-rw-r--r--meta/lib/oeqa/utils/commands.py36
-rw-r--r--meta/lib/oeqa/utils/decorators.py85
-rw-r--r--meta/lib/oeqa/utils/dump.py45
-rw-r--r--meta/lib/oeqa/utils/ftools.py2
-rw-r--r--meta/lib/oeqa/utils/gitarchive.py56
-rw-r--r--meta/lib/oeqa/utils/httpserver.py29
-rw-r--r--meta/lib/oeqa/utils/logparser.py73
-rw-r--r--meta/lib/oeqa/utils/metadata.py6
-rw-r--r--meta/lib/oeqa/utils/network.py2
-rw-r--r--meta/lib/oeqa/utils/nfs.py10
-rw-r--r--meta/lib/oeqa/utils/package_manager.py2
-rw-r--r--meta/lib/oeqa/utils/postactions.py98
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py333
-rw-r--r--meta/lib/oeqa/utils/subprocesstweak.py2
-rw-r--r--meta/lib/patchtest/README.md20
-rw-r--r--meta/lib/patchtest/data.py86
-rw-r--r--meta/lib/patchtest/patch.py62
-rw-r--r--meta/lib/patchtest/repo.py174
-rw-r--r--meta/lib/patchtest/requirements.txt6
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail32
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass31
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail31
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass31
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass25
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail22
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass24
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail36
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail35
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass33
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip35
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip41
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail73
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass73
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail73
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass73
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail71
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail72
-rw-r--r--meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass72
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail30
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass31
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail37
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass39
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail53
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass54
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail35
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass51
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail46
-rw-r--r--meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass49
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail73
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass73
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail71
-rw-r--r--meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass72
-rwxr-xr-xmeta/lib/patchtest/selftest/selftest94
-rw-r--r--meta/lib/patchtest/tests/__init__.py0
-rw-r--r--meta/lib/patchtest/tests/base.py239
-rw-r--r--meta/lib/patchtest/tests/pyparsing/common.py26
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py18
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_shortlog.py14
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py22
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py24
-rw-r--r--meta/lib/patchtest/tests/test_mbox.py159
-rw-r--r--meta/lib/patchtest/tests/test_metadata.py197
-rw-r--r--meta/lib/patchtest/tests/test_patch.py103
-rw-r--r--meta/lib/patchtest/tests/test_python_pylint.py65
-rw-r--r--meta/lib/patchtest/utils.py168
-rw-r--r--meta/lib/rootfspostcommands.py64
-rw-r--r--meta/recipes-bsp/acpid/acpid.inc3
-rw-r--r--meta/recipes-bsp/acpid/acpid/0001-Replace-stat64-with-stat.patch31
-rw-r--r--meta/recipes-bsp/acpid/acpid_2.0.33.bb6
-rw-r--r--meta/recipes-bsp/acpid/acpid_2.0.34.bb6
-rw-r--r--meta/recipes-bsp/alsa-state/alsa-state.bb8
-rwxr-xr-xmeta/recipes-bsp/alsa-state/alsa-state/alsa-state-init3
-rw-r--r--meta/recipes-bsp/apmd/apmd/apmd.service7
-rw-r--r--meta/recipes-bsp/apmd/apmd/apmd_proxy91
-rw-r--r--meta/recipes-bsp/apmd/apmd/apmd_proxy.conf16
-rw-r--r--meta/recipes-bsp/apmd/apmd/default8
-rwxr-xr-xmeta/recipes-bsp/apmd/apmd/init51
-rw-r--r--meta/recipes-bsp/apmd/apmd/legacy.patch133
-rw-r--r--meta/recipes-bsp/apmd/apmd/libtool.patch41
-rw-r--r--meta/recipes-bsp/apmd/apmd/linkage.patch53
-rw-r--r--meta/recipes-bsp/apmd/apmd/unlinux.patch25
-rw-r--r--meta/recipes-bsp/apmd/apmd/wexitcode.patch26
-rw-r--r--meta/recipes-bsp/apmd/apmd_3.2.2-15.bb85
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/0001-remove-extra-decl.patch31
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/0001-src-make-compatible-with-efivar-38.patch47
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr/97668ae0bce776a36ea2001dea63d376be8274ac.patch83
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr_17.bb39
-rw-r--r--meta/recipes-bsp/efibootmgr/efibootmgr_18.bb32
-rw-r--r--meta/recipes-bsp/efivar/efivar/0001-src-Makefile-build-util.c-separately-for-makeguids.patch38
-rw-r--r--meta/recipes-bsp/efivar/efivar_38.bb40
-rw-r--r--meta/recipes-bsp/efivar/efivar_39.bb34
-rw-r--r--meta/recipes-bsp/formfactor/formfactor_0.0.bb1
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch34
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch32
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi/lib-Makefile-fix-parallel-issue.patch38
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch19
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch17
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb70
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb75
-rw-r--r--meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch9
-rw-r--r--meta/recipes-bsp/grub/files/0001-RISC-V-Restore-the-typcast-to-long.patch10
-rw-r--r--meta/recipes-bsp/grub/files/0001-configure.ac-Use-_zicsr_zifencei-extentions-on-riscv.patch47
-rw-r--r--meta/recipes-bsp/grub/files/0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch10
-rw-r--r--meta/recipes-bsp/grub/files/CVE-2021-3981-grub-mkconfig-Restore-umask-for-the-grub.cfg.patch49
-rw-r--r--meta/recipes-bsp/grub/files/autogen.sh-exclude-pc.patch10
-rw-r--r--meta/recipes-bsp/grub/files/determinism.patch68
-rw-r--r--meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch7
-rw-r--r--meta/recipes-bsp/grub/grub-efi_2.12.bb (renamed from meta/recipes-bsp/grub/grub-efi_2.06.bb)0
-rw-r--r--meta/recipes-bsp/grub/grub2.inc30
-rw-r--r--meta/recipes-bsp/grub/grub_2.12.bb (renamed from meta/recipes-bsp/grub/grub_2.06.bb)0
-rw-r--r--meta/recipes-bsp/keymaps/keymaps_1.0.bb1
-rw-r--r--meta/recipes-bsp/libacpi/libacpi_0.2.bb1
-rw-r--r--meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb1
-rw-r--r--meta/recipes-bsp/opensbi/opensbi-payloads.inc12
-rw-r--r--meta/recipes-bsp/opensbi/opensbi_1.0.bb48
-rw-r--r--meta/recipes-bsp/opensbi/opensbi_1.4.bb47
-rw-r--r--meta/recipes-bsp/pciutils/pciutils/configure.patch15
-rw-r--r--meta/recipes-bsp/pciutils/pciutils_3.11.1.bb64
-rw-r--r--meta/recipes-bsp/pciutils/pciutils_3.8.0.bb61
-rw-r--r--meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb8
-rw-r--r--meta/recipes-bsp/setserial/setserial/0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch41
-rw-r--r--meta/recipes-bsp/setserial/setserial_2.17.bb5
-rw-r--r--meta/recipes-bsp/u-boot/files/0001-riscv-fix-build-with-binutils-2.38.patch40
-rw-r--r--meta/recipes-bsp/u-boot/files/0001-riscv32-Use-double-float-ABI-for-rv32.patch44
-rw-r--r--meta/recipes-bsp/u-boot/libubootenv_0.3.2.bb26
-rw-r--r--meta/recipes-bsp/u-boot/libubootenv_0.3.5.bb26
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-common.inc6
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-configure.inc3
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-tools.inc19
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-tools_2024.04.bb (renamed from meta/recipes-bsp/u-boot/u-boot-tools_2022.04.bb)0
-rw-r--r--meta/recipes-bsp/u-boot/u-boot.inc21
-rw-r--r--meta/recipes-bsp/u-boot/u-boot_2022.04.bb9
-rw-r--r--meta/recipes-bsp/u-boot/u-boot_2024.04.bb5
-rw-r--r--meta/recipes-bsp/usbinit/usbinit.bb1
-rw-r--r--meta/recipes-bsp/usbutils/usbutils/0001-usbutils.pc.in-Fix-Cflags-entry.patch34
-rw-r--r--meta/recipes-bsp/usbutils/usbutils_014.bb32
-rw-r--r--meta/recipes-bsp/usbutils/usbutils_017.bb33
-rw-r--r--meta/recipes-bsp/v86d/v86d_0.1.10.bb2
-rw-r--r--meta/recipes-connectivity/avahi/avahi_0.8.bb32
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-1981.patch58
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38469-1.patch48
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38469-2.patch65
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38470-1.patch59
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38470-2.patch52
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38471-1.patch73
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38471-2.patch52
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38472.patch46
-rw-r--r--meta/recipes-connectivity/avahi/files/CVE-2023-38473.patch110
-rw-r--r--meta/recipes-connectivity/avahi/files/invalid-service.patch29
-rw-r--r--meta/recipes-connectivity/bind/bind/0001-avoid-start-failure-with-bind-user.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/0001-avoid-start-failure-with-bind-user.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind/0001-named-lwresd-V-and-start-log-hide-build-options.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/0001-named-lwresd-V-and-start-log-hide-build-options.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind/bind-ensure-searching-for-json-headers-searches-sysr.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/bind-ensure-searching-for-json-headers-searches-sysr.patch)6
-rw-r--r--meta/recipes-connectivity/bind/bind/bind9 (renamed from meta/recipes-connectivity/bind/bind-9.18.2/bind9)0
-rw-r--r--meta/recipes-connectivity/bind/bind/conf.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/conf.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind/generate-rndc-key.sh (renamed from meta/recipes-connectivity/bind/bind-9.18.2/generate-rndc-key.sh)0
-rw-r--r--meta/recipes-connectivity/bind/bind/init.d-add-support-for-read-only-rootfs.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/init.d-add-support-for-read-only-rootfs.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind/make-etc-initd-bind-stop-work.patch (renamed from meta/recipes-connectivity/bind/bind-9.18.2/make-etc-initd-bind-stop-work.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind/named.service (renamed from meta/recipes-connectivity/bind/bind-9.18.2/named.service)0
-rw-r--r--meta/recipes-connectivity/bind/bind_9.18.2.bb127
-rw-r--r--meta/recipes-connectivity/bind/bind_9.18.25.bb113
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5.inc14
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/0001-test-gatt-Fix-hung-issue.patch10
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/0001-tests-add-a-target-for-building-tests-without-runnin.patch10
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/0004-src-shared-util.c-include-linux-limits.h.patch27
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/fix_service.patch30
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.64.bb70
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.72.bb69
-rw-r--r--meta/recipes-connectivity/connman/connman-conf.bb1
-rw-r--r--meta/recipes-connectivity/connman/connman-conf/main.conf2
-rw-r--r--meta/recipes-connectivity/connman/connman.inc12
-rw-r--r--meta/recipes-connectivity/connman/connman/0001-src-log.c-Include-libgen.h-for-basename-API.patch55
-rw-r--r--meta/recipes-connectivity/connman/connman/0001-vpn-Adding-support-for-latest-pppd-2.5.0-release.patch152
-rw-r--r--meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch8
-rw-r--r--meta/recipes-connectivity/connman/connman/connman39
-rw-r--r--meta/recipes-connectivity/connman/connman_1.41.bb15
-rw-r--r--meta/recipes-connectivity/connman/connman_1.42.bb17
-rw-r--r--meta/recipes-connectivity/dhcpcd/dhcpcd_10.0.6.bb61
-rw-r--r--meta/recipes-connectivity/dhcpcd/dhcpcd_9.4.1.bb60
-rw-r--r--meta/recipes-connectivity/dhcpcd/files/0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch82
-rw-r--r--meta/recipes-connectivity/dhcpcd/files/0001-dhcpcd.8-Fix-conflict-error-when-enable-multilib.patch44
-rw-r--r--meta/recipes-connectivity/dhcpcd/files/0001-remove-INCLUDEDIR-to-prevent-build-issues.patch14
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/0001-ftpd-telnetd-Fix-multiple-definitions-of-errcatch-an.patch58
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/fix-buffer-fortify-tfpt.patch25
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/fix-disable-ipv6.patch85
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch27
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0003-wchar.patch25
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/inetutils-1.9-PATH_PROCNET_DEV.patch37
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/inetutils-only-check-pam_appl.h-when-pam-enabled.patch49
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.2.bb211
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.5.bb218
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2.inc91
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_5.17.0.bb11
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_6.7.0.bb106
-rw-r--r--meta/recipes-connectivity/iw/iw_5.16.bb31
-rw-r--r--meta/recipes-connectivity/iw/iw_6.7.bb31
-rw-r--r--meta/recipes-connectivity/kea/files/0001-kea-fix-reproducible-build-failure.patch62
-rw-r--r--meta/recipes-connectivity/kea/files/fix-multilib-conflict.patch10
-rw-r--r--meta/recipes-connectivity/kea/files/fix_pid_keactrl.patch6
-rw-r--r--meta/recipes-connectivity/kea/files/kea-dhcp-ddns.service1
-rw-r--r--meta/recipes-connectivity/kea/kea_2.0.2.bb77
-rw-r--r--meta/recipes-connectivity/kea/kea_2.4.1.bb78
-rw-r--r--meta/recipes-connectivity/libpcap/libpcap_1.10.1.bb43
-rw-r--r--meta/recipes-connectivity/libpcap/libpcap_1.10.4.bb43
-rw-r--r--meta/recipes-connectivity/libuv/libuv_1.44.1.bb21
-rw-r--r--meta/recipes-connectivity/libuv/libuv_1.48.0.bb22
-rw-r--r--meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb4
-rw-r--r--meta/recipes-connectivity/neard/neard_0.18.bb51
-rw-r--r--meta/recipes-connectivity/neard/neard_0.19.bb51
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/0001-locktest-Makefile.am-Do-not-use-build-flags.patch36
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/0001-reexport.h-Include-unistd.h-to-compile-with-musl.patch34
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/0001-tools-locktest-Use-intmax_t-to-print-off_t.patch53
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service1
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service1
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service2
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.1.bb145
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.4.bb154
-rw-r--r--meta/recipes-connectivity/ofono/ofono_1.34.bb55
-rw-r--r--meta/recipes-connectivity/ofono/ofono_2.4.bb55
-rw-r--r--meta/recipes-connectivity/openssh/openssh/0001-regress-banner.sh-log-input-and-output-files-on-erro.patch61
-rw-r--r--meta/recipes-connectivity/openssh/openssh/0001-systemd-Add-optional-support-for-systemd-sd_notify.patch96
-rwxr-xr-xmeta/recipes-connectivity/openssh/openssh/run-ptest16
-rw-r--r--meta/recipes-connectivity/openssh/openssh/ssh_config14
-rw-r--r--meta/recipes-connectivity/openssh/openssh/sshd.service17
-rw-r--r--meta/recipes-connectivity/openssh/openssh/sshd_check_keys3
-rw-r--r--meta/recipes-connectivity/openssh/openssh/sshd_config17
-rw-r--r--meta/recipes-connectivity/openssh/openssh_9.0p1.bb183
-rw-r--r--meta/recipes-connectivity/openssh/openssh_9.7p1.bb202
-rw-r--r--meta/recipes-connectivity/openssl/files/environment.d-openssl.sh4
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-Added-handshake-history-reporting-when-test-fails.patch374
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch11
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch20
-rw-r--r--meta/recipes-connectivity/openssl/openssl/CVE-2024-2511.patch120
-rw-r--r--meta/recipes-connectivity/openssl/openssl/afalg.patch31
-rw-r--r--meta/recipes-connectivity/openssl/openssl/bti.patch58
-rw-r--r--meta/recipes-connectivity/openssl/openssl/run-ptest2
-rw-r--r--meta/recipes-connectivity/openssl/openssl_3.0.3.bb258
-rw-r--r--meta/recipes-connectivity/openssl/openssl_3.2.1.bb264
-rw-r--r--meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb1
-rw-r--r--meta/recipes-connectivity/ppp/ppp/0001-ppp-fix-build-against-5.15-headers.patch36
-rw-r--r--meta/recipes-connectivity/ppp/ppp/makefix.patch40
-rw-r--r--meta/recipes-connectivity/ppp/ppp_2.4.9.bb99
-rw-r--r--meta/recipes-connectivity/ppp/ppp_2.5.0.bb75
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf/0001-avoid-using-m-option-for-readlink.patch37
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf_1.91.bb67
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf_1.92.bb67
-rw-r--r--meta/recipes-connectivity/slirp/libslirp_git.bb18
-rw-r--r--meta/recipes-connectivity/socat/files/0001-fix-compile-procan.c-failed.patch62
-rw-r--r--meta/recipes-connectivity/socat/socat/0001-configure.ac-check-getprotobynumber_r-with-AC_TRY_LI.patch35
-rw-r--r--meta/recipes-connectivity/socat/socat_1.7.4.3.bb53
-rw-r--r--meta/recipes-connectivity/socat/socat_1.8.0.0.bb53
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-PEAP-client-Update-Phase-2-authentication-requiremen.patch213
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.10.bb3
-rw-r--r--meta/recipes-core/base-files/base-files/0001-add-nss-resolve-to-nsswitch.patch31
-rw-r--r--meta/recipes-core/base-files/base-files/hosts2
-rw-r--r--meta/recipes-core/base-files/base-files/profile16
-rw-r--r--meta/recipes-core/base-files/base-files_3.0.14.bb7
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0001-base-passwd-Add-the-sgx-group.patch30
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0006-Make-it-possible-to-build-without-debconf-support.patch129
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0007-Add-wheel-group.patch20
-rw-r--r--meta/recipes-core/base-passwd/base-passwd/0007-Make-it-possible-to-disable-the-generation-of-the-do.patch46
-rw-r--r--meta/recipes-core/base-passwd/base-passwd_3.5.52.bb122
-rw-r--r--meta/recipes-core/base-passwd/base-passwd_3.6.3.bb125
-rw-r--r--meta/recipes-core/busybox/busybox-inittab_1.35.0.bb85
-rw-r--r--meta/recipes-core/busybox/busybox-inittab_1.36.1.bb63
-rw-r--r--meta/recipes-core/busybox/busybox.inc27
-rw-r--r--meta/recipes-core/busybox/busybox/0001-depmod-Ignore-.debug-directories.patch2
-rw-r--r--meta/recipes-core/busybox/busybox/defconfig12
-rw-r--r--meta/recipes-core/busybox/busybox/musl.cfg1
-rw-r--r--meta/recipes-core/busybox/busybox/recognize_connmand.patch10
-rw-r--r--meta/recipes-core/busybox/busybox/sha1sum.cfg1
-rw-r--r--meta/recipes-core/busybox/busybox/sha_accel.cfg2
-rw-r--r--meta/recipes-core/busybox/busybox/start-stop-false.patch35
-rw-r--r--meta/recipes-core/busybox/busybox_1.35.0.bb55
-rw-r--r--meta/recipes-core/busybox/busybox_1.36.1.bb57
-rw-r--r--meta/recipes-core/busybox/files/syslog2
-rw-r--r--meta/recipes-core/coreutils/coreutils/0001-local.mk-fix-cross-compiling-problem.patch12
-rw-r--r--meta/recipes-core/coreutils/coreutils/remove-usr-local-lib-from-m4.patch7
-rw-r--r--meta/recipes-core/coreutils/coreutils_9.1.bb210
-rw-r--r--meta/recipes-core/coreutils/coreutils_9.5.bb219
-rw-r--r--meta/recipes-core/dbus-wait/dbus-wait_git.bb7
-rw-r--r--meta/recipes-core/dbus/dbus_1.14.0.bb183
-rw-r--r--meta/recipes-core/dbus/dbus_1.14.10.bb186
-rw-r--r--meta/recipes-core/dropbear/dropbear.inc126
-rw-r--r--meta/recipes-core/dropbear/dropbear/CVE-2023-36328.patch144
-rw-r--r--meta/recipes-core/dropbear/dropbear_2022.82.bb3
-rw-r--r--meta/recipes-core/dropbear/dropbear_2022.83.bb132
-rw-r--r--meta/recipes-core/ell/ell_0.50.bb23
-rw-r--r--meta/recipes-core/ell/ell_0.64.bb22
-rw-r--r--meta/recipes-core/expat/expat/run-ptest2
-rw-r--r--meta/recipes-core/expat/expat_2.4.8.bb31
-rw-r--r--meta/recipes-core/expat/expat_2.6.2.bb33
-rw-r--r--meta/recipes-core/fts/fts_1.2.7.bb2
-rw-r--r--meta/recipes-core/gettext/gettext-0.21/0001-libtextstyle-fix-builds-with-automake-1.16.4-and-new.patch29
-rw-r--r--meta/recipes-core/gettext/gettext-0.21/0001-msgmerge-29-Add-executable-file-mode-bits.patch23
-rw-r--r--meta/recipes-core/gettext/gettext-0.21/mingw.patch28
-rw-r--r--meta/recipes-core/gettext/gettext-0.21/use-pkgconfig.patch699
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/Makefile.in.in510
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/gettext.m4386
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/host-cpu-c-abi.m4675
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/iconv.m4288
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/intlmacosx.m465
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-ld.m4168
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-link.m4800
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-prefix.m4320
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/nls.m432
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/po.m4450
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/progtest.m491
-rwxr-xr-xmeta/recipes-core/gettext/gettext-minimal-0.21/config.rpath684
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-0.21/remove-potcdate.sin25
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-native_0.21.bb30
-rw-r--r--meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb46
-rw-r--r--meta/recipes-core/gettext/gettext-minimal/COPYING (renamed from meta/recipes-core/gettext/gettext-minimal-0.21/COPYING)0
-rw-r--r--meta/recipes-core/gettext/gettext-sources.inc4
-rw-r--r--meta/recipes-core/gettext/gettext/0001-init-env.in-do-not-add-C-CXX-parameters.patch (renamed from meta/recipes-core/gettext/gettext-0.21/0001-init-env.in-do-not-add-C-CXX-parameters.patch)2
-rw-r--r--meta/recipes-core/gettext/gettext/0001-tests-autopoint-3-unset-MAKEFLAGS.patch (renamed from meta/recipes-core/gettext/gettext-0.21/0001-tests-autopoint-3-unset-MAKEFLAGS.patch)6
-rw-r--r--meta/recipes-core/gettext/gettext/parallel.patch (renamed from meta/recipes-core/gettext/gettext-0.21/parallel.patch)7
-rw-r--r--meta/recipes-core/gettext/gettext/run-ptest (renamed from meta/recipes-core/gettext/gettext-0.21/run-ptest)0
-rw-r--r--meta/recipes-core/gettext/gettext/serial-tests-config.patch (renamed from meta/recipes-core/gettext/gettext-0.21/serial-tests-config.patch)9
-rw-r--r--meta/recipes-core/gettext/gettext/use-pkgconfig.patch391
-rw-r--r--meta/recipes-core/gettext/gettext_0.21.bb218
-rw-r--r--meta/recipes-core/gettext/gettext_0.22.5.bb213
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-ignore-return-value-of-write.patch39
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch75
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch4
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch7
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch5
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch43
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch34
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch7
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch7
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch29
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch7
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/Enable-more-tests-while-cross-compiling.patch123
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch54
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch361
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc1
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch46
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/run-ptest3
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch32
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.72.1.bb53
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb57
-rw-r--r--meta/recipes-core/glib-2.0/glib.inc58
-rw-r--r--meta/recipes-core/glib-networking/glib-networking/eagain.patch36
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.72.0.bb38
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.78.1.bb45
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.35.bb54
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.39.bb54
-rw-r--r--meta/recipes-core/glibc/glibc-common.inc4
-rw-r--r--meta/recipes-core/glibc/glibc-locale.inc29
-rw-r--r--meta/recipes-core/glibc/glibc-locale_2.39.bb (renamed from meta/recipes-core/glibc/glibc-locale_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-mtrace_2.39.bb (renamed from meta/recipes-core/glibc/glibc-mtrace_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-package.inc10
-rw-r--r--meta/recipes-core/glibc/glibc-scripts_2.39.bb (renamed from meta/recipes-core/glibc/glibc-scripts_2.35.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-tests_2.35.bb116
-rw-r--r--meta/recipes-core/glibc/glibc-testsuite_2.35.bb35
-rw-r--r--meta/recipes-core/glibc/glibc-testsuite_2.39.bb36
-rw-r--r--meta/recipes-core/glibc/glibc-version.inc12
-rw-r--r--meta/recipes-core/glibc/glibc-y2038-tests_2.39.bb119
-rw-r--r--meta/recipes-core/glibc/glibc.inc2
-rw-r--r--meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch8
-rw-r--r--meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch41
-rw-r--r--meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch12
-rw-r--r--meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch4
-rw-r--r--meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch98
-rw-r--r--meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch8
-rw-r--r--meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch68
-rw-r--r--meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch4
-rw-r--r--meta/recipes-core/glibc/glibc/0015-powerpc-Do-not-ask-compiler-for-finding-arch.patch48
-rw-r--r--meta/recipes-core/glibc/glibc/0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0016-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch49
-rw-r--r--meta/recipes-core/glibc/glibc/0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch79
-rw-r--r--meta/recipes-core/glibc/glibc/0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch29
-rw-r--r--meta/recipes-core/glibc/glibc/0018-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch41
-rw-r--r--meta/recipes-core/glibc/glibc/0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch70
-rw-r--r--meta/recipes-core/glibc/glibc/0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch48
-rw-r--r--meta/recipes-core/glibc/glibc/0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch47
-rw-r--r--meta/recipes-core/glibc/glibc/0020-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch27
-rw-r--r--meta/recipes-core/glibc/glibc/0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch79
-rw-r--r--meta/recipes-core/glibc/glibc/0021-fix-create-thread-failed-in-unprivileged-process-BZ-.patch86
-rw-r--r--meta/recipes-core/glibc/glibc/0022-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch32
-rw-r--r--meta/recipes-core/glibc/glibc/0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch39
-rw-r--r--meta/recipes-core/glibc/glibc/0023-aarch64-configure-Pass-mcpu-along-with-march-to-dete.patch62
-rw-r--r--meta/recipes-core/glibc/glibc/0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch47
-rw-r--r--meta/recipes-core/glibc/glibc/0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch88
-rw-r--r--meta/recipes-core/glibc/glibc/0024-qemu-stale-process.patch45
-rw-r--r--meta/recipes-core/glibc/glibc/check-test-wrapper2
-rwxr-xr-xmeta/recipes-core/glibc/glibc/run-ptest14
-rw-r--r--meta/recipes-core/glibc/glibc_2.35.bb123
-rw-r--r--meta/recipes-core/glibc/glibc_2.39.bb128
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-handle-.dynstr-located-in-separate-segment.patch178
-rw-r--r--meta/recipes-core/glibc/ldconfig-native_2.12.1.bb2
-rw-r--r--meta/recipes-core/ifupdown/files/0001-Define-FNM_EXTMATCH-for-musl.patch8
-rw-r--r--meta/recipes-core/ifupdown/files/0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch3
-rw-r--r--meta/recipes-core/ifupdown/files/0001-ifupdown-skip-wrong-test-case.patch12
-rw-r--r--meta/recipes-core/ifupdown/files/defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch10
-rw-r--r--meta/recipes-core/ifupdown/files/tweak-ptest-script.patch19
-rw-r--r--meta/recipes-core/ifupdown/ifupdown_0.8.37.bb57
-rw-r--r--meta/recipes-core/ifupdown/ifupdown_0.8.41.bb62
-rw-r--r--meta/recipes-core/images/build-appliance-image_15.0.0.bb25
-rw-r--r--meta/recipes-core/images/core-image-initramfs-boot.bb22
-rw-r--r--meta/recipes-core/images/core-image-minimal-initramfs.bb7
-rw-r--r--meta/recipes-core/images/core-image-ptest-all.bb34
-rw-r--r--meta/recipes-core/images/core-image-ptest-fast.bb24
-rw-r--r--meta/recipes-core/images/core-image-ptest.bb44
-rw-r--r--meta/recipes-core/images/core-image-tiny-initramfs.bb4
-rw-r--r--meta/recipes-core/init-ifupdown/init-ifupdown-1.0/interfaces6
-rw-r--r--meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb1
-rwxr-xr-xmeta/recipes-core/initrdscripts/initramfs-framework/finish9
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-framework/overlayroot14
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb1
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb3
-rw-r--r--meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb1
-rw-r--r--meta/recipes-core/initscripts/init-system-helpers_1.62.bb41
-rw-r--r--meta/recipes-core/initscripts/init-system-helpers_1.66.bb42
-rw-r--r--meta/recipes-core/initscripts/initscripts-1.0/read-only-rootfs-hook.sh4
-rw-r--r--meta/recipes-core/initscripts/initscripts_1.0.bb8
-rw-r--r--meta/recipes-core/kbd/kbd/0001-Remove-non-free-Agafari-fonts.patch73
-rw-r--r--meta/recipes-core/kbd/kbd_2.4.0.bb46
-rw-r--r--meta/recipes-core/kbd/kbd_2.6.4.bb67
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_2.0.2.bb33
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_3.1.0.bb33
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.28.bb18
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.36.bb18
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt.inc10
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt_4.4.36.bb (renamed from meta/recipes-core/libxcrypt/libxcrypt_4.4.28.bb)0
-rw-r--r--meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch86
-rw-r--r--meta/recipes-core/libxml/libxml2/fix-execution-of-ptests.patch33
-rw-r--r--meta/recipes-core/libxml/libxml2/install-tests.patch34
-rw-r--r--meta/recipes-core/libxml/libxml2/libxml-64bit.patch28
-rw-r--r--meta/recipes-core/libxml/libxml2/libxml-m4-use-pkgconfig.patch230
-rw-r--r--meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch35
-rw-r--r--meta/recipes-core/libxml/libxml2/remove-fuzz-from-ptests.patch44
-rwxr-xr-x[-rw-r--r--]meta/recipes-core/libxml/libxml2/run-ptest18
-rw-r--r--meta/recipes-core/libxml/libxml2/runtest.patch849
-rw-r--r--meta/recipes-core/libxml/libxml2_2.12.6.bb101
-rw-r--r--meta/recipes-core/libxml/libxml2_2.9.14.bb110
-rw-r--r--meta/recipes-core/meta/build-sysroots.bb19
-rw-r--r--meta/recipes-core/meta/buildtools-extended-tarball.bb12
-rw-r--r--meta/recipes-core/meta/buildtools-tarball.bb11
-rw-r--r--meta/recipes-core/meta/cve-update-db-native.bb237
-rw-r--r--meta/recipes-core/meta/cve-update-nvd2-native.bb377
-rw-r--r--meta/recipes-core/meta/meta-environment.bb1
-rw-r--r--meta/recipes-core/meta/meta-ide-support.bb27
-rw-r--r--meta/recipes-core/meta/meta-toolchain.bb1
-rw-r--r--meta/recipes-core/meta/meta-world-pkgdata.bb2
-rw-r--r--meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb1
-rw-r--r--meta/recipes-core/meta/signing-keys.bb2
-rw-r--r--meta/recipes-core/meta/testexport-tarball.bb2
-rw-r--r--meta/recipes-core/meta/uninative-tarball.bb11
-rw-r--r--meta/recipes-core/meta/wic-tools.bb3
-rw-r--r--meta/recipes-core/musl/bsd-headers.bb2
-rw-r--r--meta/recipes-core/musl/bsd-headers/sys-cdefs.h8
-rw-r--r--meta/recipes-core/musl/gcompat/0001-Add-fcntl64-wrapper.patch44
-rw-r--r--meta/recipes-core/musl/gcompat_git.bb11
-rw-r--r--meta/recipes-core/musl/libc-test/run-libc-ptests28
-rw-r--r--meta/recipes-core/musl/libc-test/run-ptest3
-rw-r--r--meta/recipes-core/musl/libc-test_git.bb57
-rw-r--r--meta/recipes-core/musl/libssp-nonshared.bb2
-rw-r--r--meta/recipes-core/musl/musl-legacy-error.bb26
-rw-r--r--meta/recipes-core/musl/musl-legacy-error/error.h60
-rw-r--r--meta/recipes-core/musl/musl-locales_git.bb7
-rw-r--r--meta/recipes-core/musl/musl-obstack.bb6
-rw-r--r--meta/recipes-core/musl/musl-utils.bb2
-rw-r--r--meta/recipes-core/musl/musl.inc3
-rw-r--r--meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch18
-rw-r--r--meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch20
-rw-r--r--meta/recipes-core/musl/musl/0003-elf.h-add-typedefs-for-Elf64_Relr-and-Elf32_Relr.patch37
-rw-r--r--meta/recipes-core/musl/musl_git.bb11
-rw-r--r--meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch462
-rw-r--r--meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch499
-rw-r--r--meta/recipes-core/ncurses/files/exit_prototype.patch32
-rw-r--r--meta/recipes-core/ncurses/ncurses.inc10
-rw-r--r--meta/recipes-core/ncurses/ncurses_6.3.bb14
-rw-r--r--meta/recipes-core/ncurses/ncurses_6.4.bb17
-rw-r--r--meta/recipes-core/netbase/netbase_6.3.bb23
-rw-r--r--meta/recipes-core/netbase/netbase_6.4.bb25
-rw-r--r--meta/recipes-core/newlib/libgloss/fix-rs6000-crt0.patch24
-rw-r--r--meta/recipes-core/newlib/libgloss/fix_makefile_include_arm_h.patch30
-rw-r--r--meta/recipes-core/newlib/libgloss/libgloss-build-without-nostdinc.patch30
-rw-r--r--meta/recipes-core/newlib/libgloss_4.2.0.bb33
-rw-r--r--meta/recipes-core/newlib/libgloss_git.bb36
-rw-r--r--meta/recipes-core/newlib/newlib.inc25
-rw-r--r--meta/recipes-core/newlib/newlib_4.2.0.bb20
-rw-r--r--meta/recipes-core/newlib/newlib_git.bb20
-rw-r--r--meta/recipes-core/os-release/os-release.bb20
-rw-r--r--meta/recipes-core/ovmf/ovmf-shell-image.bb2
-rw-r--r--meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning-1.patch51
-rw-r--r--meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning.patch49
-rw-r--r--meta/recipes-core/ovmf/ovmf/0001-ovmf-update-path-to-native-BaseTools.patch10
-rw-r--r--meta/recipes-core/ovmf/ovmf/0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch57
-rw-r--r--meta/recipes-core/ovmf/ovmf/0003-debug-prefix-map.patch104
-rw-r--r--meta/recipes-core/ovmf/ovmf/0003-ovmf-Update-to-latest.patch45
-rw-r--r--meta/recipes-core/ovmf/ovmf/0004-reproducible.patch180
-rw-r--r--meta/recipes-core/ovmf/ovmf/0005-debug-prefix-map.patch104
-rw-r--r--meta/recipes-core/ovmf/ovmf/0006-reproducible.patch180
-rw-r--r--meta/recipes-core/ovmf/ovmf_git.bb27
-rw-r--r--meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-base.bb14
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-boot.bb7
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-nfs.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-sdk.bb11
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb2
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-ssh-openssh.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb5
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-go-sdk-target.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb13
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-rust-sdk-target.bb14
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-self-hosted.bb20
-rw-r--r--meta/recipes-core/psplash/files/psplash-poky-img.h1259
-rw-r--r--meta/recipes-core/psplash/files/psplash-poky-img.pngbin0 -> 13103 bytes
-rw-r--r--meta/recipes-core/psplash/files/psplash-poky-img.svg116
-rw-r--r--meta/recipes-core/psplash/files/psplash-start.service1
-rw-r--r--meta/recipes-core/psplash/files/psplash-systemd.service1
-rw-r--r--meta/recipes-core/psplash/psplash_git.bb15
-rw-r--r--meta/recipes-core/readline/readline.inc2
-rw-r--r--meta/recipes-core/readline/readline/configure-fix.patch35
-rw-r--r--meta/recipes-core/readline/readline/readline82-001.patch45
-rw-r--r--meta/recipes-core/readline/readline/readline82-002.patch51
-rw-r--r--meta/recipes-core/readline/readline/readline82-003.patch46
-rw-r--r--meta/recipes-core/readline/readline/readline82-004.patch68
-rw-r--r--meta/recipes-core/readline/readline/readline82-005.patch53
-rw-r--r--meta/recipes-core/readline/readline/readline82-006.patch102
-rw-r--r--meta/recipes-core/readline/readline/readline82-007.patch51
-rw-r--r--meta/recipes-core/readline/readline/readline82-008.patch80
-rw-r--r--meta/recipes-core/readline/readline/readline82-009.patch76
-rw-r--r--meta/recipes-core/readline/readline/readline82-010.patch70
-rw-r--r--meta/recipes-core/readline/readline_8.1.2.bb7
-rw-r--r--meta/recipes-core/readline/readline_8.2.bb17
-rw-r--r--meta/recipes-core/seatd/seatd_0.6.4.bb35
-rw-r--r--meta/recipes-core/seatd/seatd_0.8.0.bb35
-rw-r--r--meta/recipes-core/sysfsutils/sysfsutils-2.1.0/obsolete_automake_macros.patch15
-rw-r--r--meta/recipes-core/sysfsutils/sysfsutils-2.1.0/separatebuild.patch65
-rw-r--r--meta/recipes-core/sysfsutils/sysfsutils-2.1.0/sysfsutils-2.0.0-class-dup.patch23
-rw-r--r--meta/recipes-core/sysfsutils/sysfsutils_2.1.0.bb31
-rw-r--r--meta/recipes-core/sysfsutils/sysfsutils_2.1.1.bb21
-rw-r--r--meta/recipes-core/systemd/systemd-boot-native_255.4.bb15
-rw-r--r--meta/recipes-core/systemd/systemd-boot_250.5.bb73
-rw-r--r--meta/recipes-core/systemd/systemd-boot_255.4.bb67
-rw-r--r--meta/recipes-core/systemd/systemd-compat-units.bb7
-rw-r--r--meta/recipes-core/systemd/systemd-conf_1.0.bb3
-rw-r--r--meta/recipes-core/systemd/systemd-machine-units_1.0.bb4
-rw-r--r--meta/recipes-core/systemd/systemd-serialgetty.bb4
-rw-r--r--meta/recipes-core/systemd/systemd-systemctl-native.bb1
-rwxr-xr-xmeta/recipes-core/systemd/systemd-systemctl/systemctl40
-rw-r--r--meta/recipes-core/systemd/systemd.inc7
-rw-r--r--meta/recipes-core/systemd/systemd/00-create-volatile.conf1
-rw-r--r--meta/recipes-core/systemd/systemd/0001-Adjust-for-musl-headers.patch525
-rw-r--r--meta/recipes-core/systemd/systemd/0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch81
-rw-r--r--meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch61
-rw-r--r--meta/recipes-core/systemd/systemd/0001-pass-correct-parameters-to-getdents64.patch48
-rw-r--r--meta/recipes-core/systemd/systemd/0001-resolve-Use-sockaddr-pointer-type-for-bind.patch46
-rw-r--r--meta/recipes-core/systemd/systemd/0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch42
-rw-r--r--meta/recipes-core/systemd/systemd/0001-test-parse-argument-Include-signal.h.patch27
-rw-r--r--meta/recipes-core/systemd/systemd/0002-Add-sys-stat.h-for-S_IFDIR.patch27
-rw-r--r--meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch434
-rw-r--r--meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch79
-rw-r--r--meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch163
-rw-r--r--meta/recipes-core/systemd/systemd/0003-implment-systemd-sysv-install-for-OE.patch41
-rw-r--r--meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch79
-rw-r--r--meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch699
-rw-r--r--meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch433
-rw-r--r--meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch156
-rw-r--r--meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch44
-rw-r--r--meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch707
-rw-r--r--meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch106
-rw-r--r--meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch153
-rw-r--r--meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch99
-rw-r--r--meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch34
-rw-r--r--meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch74
-rw-r--r--meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch43
-rw-r--r--meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch41
-rw-r--r--meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch46
-rw-r--r--meta/recipes-core/systemd/systemd/0010-Use-uintmax_t-for-handling-rlim_t.patch104
-rw-r--r--meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch76
-rw-r--r--meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch32
-rw-r--r--meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch39
-rw-r--r--meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch562
-rw-r--r--meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch97
-rw-r--r--meta/recipes-core/systemd/systemd/0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch32
-rw-r--r--meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch60
-rw-r--r--meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch39
-rw-r--r--meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch173
-rw-r--r--meta/recipes-core/systemd/systemd/0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch60
-rw-r--r--meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch49
-rw-r--r--meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch37
-rw-r--r--meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch572
-rw-r--r--meta/recipes-core/systemd/systemd/0018-avoid-redefinition-of-prctl_mm_map-structure.patch30
-rw-r--r--meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch52
-rw-r--r--meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch42
-rw-r--r--meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch39
-rw-r--r--meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch57
-rw-r--r--meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch35
-rw-r--r--meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch43
-rw-r--r--meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch467
-rw-r--r--meta/recipes-core/systemd/systemd/0025-Handle-__cpu_mask-usage.patch58
-rw-r--r--meta/recipes-core/systemd/systemd/0026-Handle-missing-gshadow.patch171
-rw-r--r--meta/recipes-core/systemd/systemd/0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch47
-rw-r--r--meta/recipes-core/systemd/systemd_250.5.bb792
-rw-r--r--meta/recipes-core/systemd/systemd_255.4.bb899
-rw-r--r--meta/recipes-core/sysvinit/sysvinit-inittab/start_getty9
-rw-r--r--meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb43
-rw-r--r--meta/recipes-core/sysvinit/sysvinit/0001-hddown-include-libgen.h-for-basename-API.patch38
-rw-r--r--meta/recipes-core/sysvinit/sysvinit/install.patch2
-rw-r--r--meta/recipes-core/sysvinit/sysvinit/sysvinit_remove_linux_fs.patch17
-rw-r--r--meta/recipes-core/sysvinit/sysvinit_3.04.bb2
-rw-r--r--meta/recipes-core/ttyrun/ttyrun_2.32.0.bb33
-rw-r--r--meta/recipes-core/udev/eudev/0001-build-Remove-dead-g-i-r-configuration.patch155
-rw-r--r--meta/recipes-core/udev/eudev/netifnames.patch17
-rw-r--r--meta/recipes-core/udev/eudev_3.2.11.bb89
-rw-r--r--meta/recipes-core/udev/eudev_3.2.14.bb90
-rw-r--r--meta/recipes-core/udev/udev-extraconf/mount.sh103
-rw-r--r--meta/recipes-core/udev/udev-extraconf_1.1.bb17
-rw-r--r--meta/recipes-core/update-rc.d/update-rc.d_0.8.bb4
-rw-r--r--meta/recipes-core/util-linux/util-linux-libuuid_2.38.bb16
-rw-r--r--meta/recipes-core/util-linux/util-linux-libuuid_2.39.3.bb19
-rw-r--r--meta/recipes-core/util-linux/util-linux.inc13
-rw-r--r--meta/recipes-core/util-linux/util-linux/0001-login-utils-include-libgen.h-for-basename-API.patch60
-rw-r--r--meta/recipes-core/util-linux/util-linux/avoid_parallel_tests.patch4
-rw-r--r--meta/recipes-core/util-linux/util-linux/configure-sbindir.patch19
-rw-r--r--meta/recipes-core/util-linux/util-linux/display_testname_for_subtest.patch12
-rw-r--r--meta/recipes-core/util-linux/util-linux/fcntl-lock.c332
-rw-r--r--meta/recipes-core/util-linux/util-linux/mit-license.patch45
-rw-r--r--meta/recipes-core/util-linux/util-linux/ptest.patch4
-rw-r--r--meta/recipes-core/util-linux/util-linux/run-ptest10
-rw-r--r--meta/recipes-core/util-linux/util-linux_2.38.bb321
-rw-r--r--meta/recipes-core/util-linux/util-linux_2.39.3.bb345
-rw-r--r--meta/recipes-core/volatile-binds/files/volatile-binds.service.in3
-rw-r--r--meta/recipes-core/volatile-binds/volatile-binds.bb18
-rw-r--r--meta/recipes-core/zlib/zlib/0001-Correct-incorrect-inputs-provided-to-the-CRC-functio.patch54
-rw-r--r--meta/recipes-core/zlib/zlib/0001-configure-Pass-LDFLAGS-to-link-tests.patch22
-rw-r--r--meta/recipes-core/zlib/zlib/cc.patch27
-rw-r--r--meta/recipes-core/zlib/zlib/ldflags-tests.patch45
-rw-r--r--meta/recipes-core/zlib/zlib_1.2.12.bb54
-rw-r--r--meta/recipes-core/zlib/zlib_1.3.1.bb50
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Disable-documentation-directory-altogether.patch6
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch4
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Remove-using-std-binary_function.patch87
-rw-r--r--meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch4
-rw-r--r--meta/recipes-devtools/apt/apt/0001-cmake-Do-not-build-po-files.patch9
-rw-r--r--meta/recipes-devtools/apt/apt_2.4.5.bb136
-rw-r--r--meta/recipes-devtools/apt/apt_2.6.1.bb142
-rw-r--r--meta/recipes-devtools/autoconf-archive/autoconf-archive_2022.02.11.bb17
-rw-r--r--meta/recipes-devtools/autoconf-archive/autoconf-archive_2023.02.20.bb17
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/autoreconf-exclude.patch26
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/autotest-automake-result-format.patch22
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch13
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/no-man.patch19
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/preferbash.patch12
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/program_prefix.patch12
-rw-r--r--meta/recipes-devtools/autoconf/autoconf/remove-usr-local-lib-from-m4.patch12
-rw-r--r--meta/recipes-devtools/autoconf/autoconf_2.71.bb81
-rw-r--r--meta/recipes-devtools/autoconf/autoconf_2.72e.bb83
-rw-r--r--meta/recipes-devtools/automake/automake/buildtest.patch2
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.38.inc36
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.42.inc40
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross-canadian.inc6
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross-canadian_2.42.bb (renamed from meta/recipes-devtools/binutils/binutils-cross-canadian_2.38.bb)0
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross-testsuite_2.38.bb81
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross-testsuite_2.42.bb83
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross.inc1
-rw-r--r--meta/recipes-devtools/binutils/binutils-cross_2.42.bb (renamed from meta/recipes-devtools/binutils/binutils-cross_2.38.bb)0
-rw-r--r--meta/recipes-devtools/binutils/binutils-crosssdk_2.38.bb13
-rw-r--r--meta/recipes-devtools/binutils/binutils-crosssdk_2.42.bb11
-rw-r--r--meta/recipes-devtools/binutils/binutils.inc27
-rw-r--r--meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch16
-rw-r--r--meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch6
-rw-r--r--meta/recipes-devtools/binutils/binutils/0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch28
-rw-r--r--meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch10
-rw-r--r--meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch37
-rw-r--r--meta/recipes-devtools/binutils/binutils/0005-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch32
-rw-r--r--meta/recipes-devtools/binutils/binutils/0006-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch32
-rw-r--r--meta/recipes-devtools/binutils/binutils/0006-warn-for-uses-of-system-directories-when-cross-linki.patch287
-rw-r--r--meta/recipes-devtools/binutils/binutils/0007-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch37
-rw-r--r--meta/recipes-devtools/binutils/binutils/0007-warn-for-uses-of-system-directories-when-cross-linki.patch288
-rw-r--r--meta/recipes-devtools/binutils/binutils/0008-Use-libtool-2.4.patch34406
-rw-r--r--meta/recipes-devtools/binutils/binutils/0008-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch37
-rw-r--r--meta/recipes-devtools/binutils/binutils/0009-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch49
-rw-r--r--meta/recipes-devtools/binutils/binutils/0009-Use-libtool-2.4.patch25302
-rw-r--r--meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch49
-rw-r--r--meta/recipes-devtools/binutils/binutils/0010-sync-with-OE-libtool-changes.patch86
-rw-r--r--meta/recipes-devtools/binutils/binutils/0011-Check-for-clang-before-checking-gcc-version.patch45
-rw-r--r--meta/recipes-devtools/binutils/binutils/0011-sync-with-OE-libtool-changes.patch86
-rw-r--r--meta/recipes-devtools/binutils/binutils/0012-Check-for-clang-before-checking-gcc-version.patch45
-rw-r--r--meta/recipes-devtools/binutils/binutils/0012-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch38
-rw-r--r--meta/recipes-devtools/binutils/binutils/0013-Avoid-as-info-race-condition.patch75
-rw-r--r--meta/recipes-devtools/binutils/binutils/0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch48
-rw-r--r--meta/recipes-devtools/binutils/binutils/0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch32
-rw-r--r--meta/recipes-devtools/binutils/binutils/0015-gprofng-change-use-of-bignum-to-bigint.patch17
-rw-r--r--meta/recipes-devtools/binutils/binutils_2.38.bb69
-rw-r--r--meta/recipes-devtools/binutils/binutils_2.42.bb76
-rw-r--r--meta/recipes-devtools/bison/bison/autoconf-2.73.patch24
-rw-r--r--meta/recipes-devtools/bison/bison_3.8.2.bb1
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2/0001-Do-not-include-linux-fs.h.patch31
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2/0001-bootchart2-support-usrmerge.patch37
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb16
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-Add-a-possibility-to-specify-where-python-modules-ar.patch6
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_5.16.2.bb71
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb72
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian.inc72
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/cargo/cargo.inc56
-rw-r--r--meta/recipes-devtools/cargo/cargo_1.60.0.bb4
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.6.bb26
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.9.1.bb26
-rw-r--r--meta/recipes-devtools/ccache/files/0001-xxhash.h-Fix-build-with-gcc-12.patch37
-rw-r--r--meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb9
-rw-r--r--meta/recipes-devtools/cdrtools/cdrtools/riscv64-linux-gcc.rul65
-rw-r--r--meta/recipes-devtools/cmake/cmake-native_3.23.1.bb64
-rw-r--r--meta/recipes-devtools/cmake/cmake-native_3.28.3.bb67
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc10
-rw-r--r--meta/recipes-devtools/cmake/cmake/0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch44
-rw-r--r--meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch35
-rw-r--r--meta/recipes-devtools/cmake/cmake/0003-cmake-support-OpenEmbedded-Qt4-tool-binary-names.patch56
-rw-r--r--meta/recipes-devtools/cmake/cmake/0004-Fail-silently-if-system-Qt-installation-is-broken.patch79
-rw-r--r--meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake9
-rw-r--r--meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh2
-rw-r--r--meta/recipes-devtools/cmake/cmake_3.23.1.bb67
-rw-r--r--meta/recipes-devtools/cmake/cmake_3.28.3.bb68
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c/0001-include-rpm-rpmstring.h.patch30
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_0.20.0.bb40
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb41
-rw-r--r--meta/recipes-devtools/debugedit/debugedit_5.0.bb28
-rw-r--r--meta/recipes-devtools/debugedit/files/0002-sepdebugcrcfix.c-do-not-use-64bit-variants.patch56
-rw-r--r--meta/recipes-devtools/debugedit/files/0003-Makefile.am-do-not-update-manual.patch65
-rw-r--r--meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.26.bb25
-rw-r--r--meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.27.bb21
-rw-r--r--meta/recipes-devtools/devel-config/nfs-export-root.bb1
-rw-r--r--meta/recipes-devtools/diffstat/diffstat/avoid-check-user-break-cc.patch6
-rw-r--r--meta/recipes-devtools/diffstat/diffstat_1.64.bb27
-rw-r--r--meta/recipes-devtools/diffstat/diffstat_1.66.bb27
-rw-r--r--meta/recipes-devtools/dmidecode/dmidecode/0001-Committing-changes-from-do_unpack_extra.patch25
-rw-r--r--meta/recipes-devtools/dmidecode/dmidecode_3.3.bb24
-rw-r--r--meta/recipes-devtools/dmidecode/dmidecode_3.5.bb21
-rw-r--r--meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch8
-rw-r--r--meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch5
-rw-r--r--meta/recipes-devtools/dnf/dnf/0001-dnf-write-the-log-lock-to-root.patch6
-rw-r--r--meta/recipes-devtools/dnf/dnf/0001-lock.py-fix-Exception-handling.patch62
-rw-r--r--meta/recipes-devtools/dnf/dnf/0001-set-python-path-for-completion_helper.patch7
-rw-r--r--meta/recipes-devtools/dnf/dnf/0005-Do-not-prepend-installroot-to-logdir.patch11
-rw-r--r--meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch3
-rw-r--r--meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch12
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.12.0.bb90
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.19.2.bb97
-rw-r--r--meta/recipes-devtools/docbook-xml/docbook-xml-dtd4/docbook-xml-update-catalog.xml.patch89
-rw-r--r--meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb60
-rw-r--r--meta/recipes-devtools/dosfstools/dosfstools_4.2.bb6
-rw-r--r--meta/recipes-devtools/dpkg/dpkg.inc6
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0001-Add-support-for-riscv32-CPU.patch13
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0001-build.c-ignore-return-of-1-from-tar-cf.patch12
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0001-dpkg-Support-muslx32-build.patch15
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch15
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch17
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch12
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0006-add-musleabi-to-known-target-tripets.patch18
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch12
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/add_armeb_triplet_entry.patch21
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/arch_pm.patch12
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/noman.patch11
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/pager.patch21
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/remove-tar-no-timestamp.patch13
-rw-r--r--meta/recipes-devtools/dpkg/dpkg_1.21.7.bb23
-rw-r--r--meta/recipes-devtools/dpkg/dpkg_1.22.5.bb22
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs.inc4
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch4
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/extents.patch56
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch4
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch6
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest1
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.5.bb141
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.47.0.bb148
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.187.bb166
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.191.bb177
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-debuginfod-Remove-unused-variable.patch34
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-dso-link-change.patch19
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-libasm-may-link-with-libbz2-if-found.patch15
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch7
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-skip-the-test-when-gcc-not-deployed.patch3
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-srcfiles-fix-unused-variable-BUFFER_SIZE.patch36
-rw-r--r--meta/recipes-devtools/elfutils/files/0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch9
-rw-r--r--meta/recipes-devtools/elfutils/files/0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch9
-rw-r--r--meta/recipes-devtools/elfutils/files/0003-fixheadercheck.patch7
-rw-r--r--meta/recipes-devtools/elfutils/files/0003-musl-utils.patch15
-rw-r--r--meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch3
-rw-r--r--meta/recipes-devtools/elfutils/files/0015-config-eu.am-do-not-use-Werror.patch37
-rw-r--r--meta/recipes-devtools/elfutils/files/ptest.patch11
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils/0001-fsck-main.c-add-missing-include.patch26
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils_1.4.bb27
-rw-r--r--meta/recipes-devtools/erofs-utils/erofs-utils_1.7.1.bb27
-rw-r--r--meta/recipes-devtools/expect/expect/0001-Add-prototype-to-function-definitions.patch113
-rwxr-xr-xmeta/recipes-devtools/expect/expect/run-ptest6
-rw-r--r--meta/recipes-devtools/expect/expect_5.45.4.bb11
-rw-r--r--meta/recipes-devtools/fdisk/gptfdisk/0001-gptcurses-correctly-include-curses.h.patch5
-rw-r--r--meta/recipes-devtools/fdisk/gptfdisk_1.0.10.bb35
-rw-r--r--meta/recipes-devtools/fdisk/gptfdisk_1.0.9.bb35
-rw-r--r--meta/recipes-devtools/file/file_5.41.bb54
-rw-r--r--meta/recipes-devtools/file/file_5.45.bb61
-rw-r--r--meta/recipes-devtools/file/files/print_c.patch27
-rw-r--r--meta/recipes-devtools/flex/flex_2.6.4.bb16
-rw-r--r--meta/recipes-devtools/gcc/gcc-12.1.inc114
-rw-r--r--meta/recipes-devtools/gcc/gcc-13.2.inc119
-rw-r--r--meta/recipes-devtools/gcc/gcc-common.inc15
-rw-r--r--meta/recipes-devtools/gcc/gcc-configure-common.inc1
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-canadian.inc7
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-canadian_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-cross-canadian_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross.inc1
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-cross_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-crosssdk.inc6
-rw-r--r--meta/recipes-devtools/gcc/gcc-crosssdk_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-crosssdk_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-multilib-config.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-runtime.inc38
-rw-r--r--meta/recipes-devtools/gcc/gcc-runtime_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-runtime_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-sanitizers.inc5
-rw-r--r--meta/recipes-devtools/gcc/gcc-sanitizers_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-sanitizers_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-shared-source.inc10
-rw-r--r--meta/recipes-devtools/gcc/gcc-source.inc10
-rw-r--r--meta/recipes-devtools/gcc/gcc-source_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc-source_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/gcc-target.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-testsuite.inc9
-rw-r--r--meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch10
-rw-r--r--meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch78
-rw-r--r--meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch103
-rw-r--r--meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch16
-rw-r--r--meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch24
-rw-r--r--meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch14
-rw-r--r--meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch58
-rw-r--r--meta/recipes-devtools/gcc/gcc/0008-libtool.patch5
-rw-r--r--meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch9
-rw-r--r--meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch14
-rw-r--r--meta/recipes-devtools/gcc/gcc/0011-Avoid-using-libdir-from-.la-which-usually-points-to-.patch28
-rw-r--r--meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch55
-rw-r--r--meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch28
-rw-r--r--meta/recipes-devtools/gcc/gcc/0012-export-CPP.patch50
-rw-r--r--meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch24
-rw-r--r--meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch2
-rw-r--r--meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch4
-rw-r--r--meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch56
-rw-r--r--meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch11
-rw-r--r--meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch12
-rw-r--r--meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch6
-rw-r--r--meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch10
-rw-r--r--meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch38
-rw-r--r--meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch42
-rw-r--r--meta/recipes-devtools/gcc/gcc/0022-mingw32-Enable-operation_not_supported.patch26
-rw-r--r--meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch31
-rw-r--r--meta/recipes-devtools/gcc/gcc/0023-libatomic-Do-not-enforce-march-on-aarch64.patch42
-rw-r--r--meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch28
-rw-r--r--meta/recipes-devtools/gcc/gcc/0024-Fix-install-path-of-linux64.h.patch31
-rw-r--r--meta/recipes-devtools/gcc/gcc/0025-Move-sched.h-include-ahead-of-user-headers.patch56
-rw-r--r--meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch233
-rw-r--r--meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch117
-rw-r--r--meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch26
-rw-r--r--meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch3093
-rw-r--r--meta/recipes-devtools/gcc/gcc_13.2.bb (renamed from meta/recipes-devtools/gcc/gcc_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/libgcc-common.inc15
-rw-r--r--meta/recipes-devtools/gcc/libgcc-initial_13.2.bb (renamed from meta/recipes-devtools/gcc/libgcc-initial_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gcc/libgcc_12.1.bb5
-rw-r--r--meta/recipes-devtools/gcc/libgcc_13.2.bb7
-rw-r--r--meta/recipes-devtools/gcc/libgfortran_13.2.bb (renamed from meta/recipes-devtools/gcc/libgfortran_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gdb/gdb-common.inc12
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross-canadian.inc4
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross-canadian_14.2.bb (renamed from meta/recipes-devtools/gdb/gdb-cross-canadian_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross.inc2
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross_14.2.bb (renamed from meta/recipes-devtools/gdb/gdb-cross_12.1.bb)0
-rw-r--r--meta/recipes-devtools/gdb/gdb.inc19
-rw-r--r--meta/recipes-devtools/gdb/gdb/0001-make-man-install-relative-to-DESTDIR.patch28
-rw-r--r--meta/recipes-devtools/gdb/gdb/0001-mips-linux-nat-Define-_ABIO32-if-not-defined.patch32
-rw-r--r--meta/recipes-devtools/gdb/gdb/0002-mips-linux-nat-Define-_ABIO32-if-not-defined.patch35
-rw-r--r--meta/recipes-devtools/gdb/gdb/0002-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch49
-rw-r--r--meta/recipes-devtools/gdb/gdb/0003-Dont-disable-libreadline.a-when-using-disable-static.patch47
-rw-r--r--meta/recipes-devtools/gdb/gdb/0003-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch52
-rw-r--r--meta/recipes-devtools/gdb/gdb/0004-Dont-disable-libreadline.a-when-using-disable-static.patch50
-rw-r--r--meta/recipes-devtools/gdb/gdb/0004-use-asm-sgidefs.h.patch33
-rw-r--r--meta/recipes-devtools/gdb/gdb/0005-Change-order-of-CFLAGS.patch27
-rw-r--r--meta/recipes-devtools/gdb/gdb/0005-use-asm-sgidefs.h.patch36
-rw-r--r--meta/recipes-devtools/gdb/gdb/0006-Change-order-of-CFLAGS.patch30
-rw-r--r--meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch45
-rw-r--r--meta/recipes-devtools/gdb/gdb/0007-Fix-invalid-sigprocmask-call.patch46
-rw-r--r--meta/recipes-devtools/gdb/gdb/0007-resolve-restrict-keyword-conflict.patch48
-rw-r--r--meta/recipes-devtools/gdb/gdb/0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch48
-rw-r--r--meta/recipes-devtools/gdb/gdb/0008-Fix-invalid-sigprocmask-call.patch49
-rw-r--r--meta/recipes-devtools/gdb/gdb/0009-gdbserver-ctrl-c-handling.patch40
-rw-r--r--meta/recipes-devtools/gdb/gdb_14.2.bb (renamed from meta/recipes-devtools/gdb/gdb_12.1.bb)0
-rw-r--r--meta/recipes-devtools/git/git_2.36.1.bb168
-rw-r--r--meta/recipes-devtools/git/git_2.44.0.bb166
-rw-r--r--meta/recipes-devtools/glide/glide_0.13.3.bb43
-rw-r--r--meta/recipes-devtools/gnu-config/gnu-config_git.bb4
-rw-r--r--meta/recipes-devtools/go/go-1.18.2.inc17
-rw-r--r--meta/recipes-devtools/go/go-1.22.2.inc18
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.18.2.bb46
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.22.2.bb50
-rw-r--r--meta/recipes-devtools/go/go-common.inc3
-rw-r--r--meta/recipes-devtools/go/go-cross-canadian.inc14
-rw-r--r--meta/recipes-devtools/go/go-cross-canadian_1.22.2.bb (renamed from meta/recipes-devtools/go/go-cross-canadian_1.18.2.bb)0
-rw-r--r--meta/recipes-devtools/go/go-cross_1.22.2.bb (renamed from meta/recipes-devtools/go/go-cross_1.18.2.bb)0
-rw-r--r--meta/recipes-devtools/go/go-crosssdk.inc6
-rw-r--r--meta/recipes-devtools/go/go-crosssdk_1.22.2.bb (renamed from meta/recipes-devtools/go/go-crosssdk_1.18.2.bb)0
-rw-r--r--meta/recipes-devtools/go/go-native_1.18.2.bb58
-rw-r--r--meta/recipes-devtools/go/go-native_1.22.2.bb58
-rw-r--r--meta/recipes-devtools/go/go-runtime.inc4
-rw-r--r--meta/recipes-devtools/go/go-runtime_1.22.2.bb (renamed from meta/recipes-devtools/go/go-runtime_1.18.2.bb)0
-rw-r--r--meta/recipes-devtools/go/go-target.inc6
-rw-r--r--meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch92
-rw-r--r--meta/recipes-devtools/go/go/0001-exec.go-do-not-write-linker-flags-into-buildids.patch36
-rw-r--r--meta/recipes-devtools/go/go/0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch39
-rw-r--r--meta/recipes-devtools/go/go/0002-cmd-go-Allow-GOTOOLDIR-to-be-overridden-in-the-envir.patch56
-rw-r--r--meta/recipes-devtools/go/go/0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch48
-rw-r--r--meta/recipes-devtools/go/go/0003-ld-add-soname-to-shareable-objects.patch51
-rw-r--r--meta/recipes-devtools/go/go/0004-ld-add-soname-to-shareable-objects.patch45
-rw-r--r--meta/recipes-devtools/go/go/0004-make.bash-override-CC-when-building-dist-and-go_boot.patch45
-rw-r--r--meta/recipes-devtools/go/go/0005-cmd-dist-separate-host-and-target-builds.patch221
-rw-r--r--meta/recipes-devtools/go/go/0005-make.bash-override-CC-when-building-dist-and-go_boot.patch39
-rw-r--r--meta/recipes-devtools/go/go/0006-cmd-dist-separate-host-and-target-builds.patch281
-rw-r--r--meta/recipes-devtools/go/go/0006-cmd-go-make-GOROOT-precious-by-default.patch114
-rw-r--r--meta/recipes-devtools/go/go/0007-cmd-go-make-GOROOT-precious-by-default.patch104
-rw-r--r--meta/recipes-devtools/go/go/0007-exec.go-filter-out-build-specific-paths-from-linker-.patch61
-rw-r--r--meta/recipes-devtools/go/go/0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch46
-rw-r--r--meta/recipes-devtools/go/go/0009-go-Filter-build-paths-on-staticly-linked-arches.patch61
-rw-r--r--meta/recipes-devtools/go/go_1.18.2.bb18
-rw-r--r--meta/recipes-devtools/go/go_1.22.2.bb18
-rw-r--r--meta/recipes-devtools/help2man/help2man_1.49.2.bb24
-rw-r--r--meta/recipes-devtools/help2man/help2man_1.49.3.bb24
-rw-r--r--meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb1
-rw-r--r--meta/recipes-devtools/jquery/jquery_3.6.0.bb39
-rw-r--r--meta/recipes-devtools/jquery/jquery_3.7.1.bb38
-rw-r--r--meta/recipes-devtools/json-c/json-c/run-ptest20
-rw-r--r--meta/recipes-devtools/json-c/json-c_0.16.bb18
-rw-r--r--meta/recipes-devtools/json-c/json-c_0.17.bb34
-rw-r--r--meta/recipes-devtools/libcomps/libcomps_0.1.18.bb23
-rw-r--r--meta/recipes-devtools/libcomps/libcomps_0.1.21.bb25
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch9
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch11
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-drop-FindPythonInstDir.cmake.patch5
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch37
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch9
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/armarch.patch50
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/enable_test_data_dir_set.patch15
-rw-r--r--meta/recipes-devtools/libdnf/libdnf_0.67.0.bb36
-rw-r--r--meta/recipes-devtools/libdnf/libdnf_0.73.1.bb36
-rw-r--r--meta/recipes-devtools/libedit/libedit_20210910-3.1.bb24
-rw-r--r--meta/recipes-devtools/libedit/libedit_20230828-3.1.bb24
-rw-r--r--meta/recipes-devtools/libmodulemd/libmodulemd_git.bb4
-rw-r--r--meta/recipes-devtools/librepo/librepo/0001-gpg_gpgme.c-fix-build-errors-with-older-gcc.patch36
-rw-r--r--meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch5
-rw-r--r--meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch21
-rw-r--r--meta/recipes-devtools/librepo/librepo_1.14.3.bb29
-rw-r--r--meta/recipes-devtools/librepo/librepo_1.17.0.bb31
-rw-r--r--meta/recipes-devtools/libtool/libtool-2.4.7.inc1
-rw-r--r--meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Handle-trailing-slashes-on-install-command.patch4
-rw-r--r--meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Parse-additional-clang-options.patch31
-rw-r--r--meta/recipes-devtools/libtool/libtool/0002-libtool.m4-Rename-the-with-sysroot-option-to-avoid-c.patch19
-rw-r--r--meta/recipes-devtools/libtool/libtool/0003-libtool.m4-Cleanup-sysroot-trailing.patch37
-rw-r--r--meta/recipes-devtools/libtool/libtool/0003-ltmain.in-Add-missing-sysroot-to-library-path.patch4
-rw-r--r--meta/recipes-devtools/libtool/libtool/0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch8
-rw-r--r--meta/recipes-devtools/libtool/libtool/0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch8
-rw-r--r--meta/recipes-devtools/libtool/libtool/0006-libtool.m4-Handle-as-a-sysroot-correctly.patch33
-rw-r--r--meta/recipes-devtools/libtool/libtool/0007-libtool-Fix-support-for-NIOS2-processor.patch81
-rw-r--r--meta/recipes-devtools/libtool/libtool/0008-libtool-Check-for-static-libs-for-internal-compiler-.patch4
-rw-r--r--meta/recipes-devtools/libtool/libtool/0009-Makefile.am-make-sure-autoheader-run-before-autoconf.patch4
-rw-r--r--meta/recipes-devtools/libtool/libtool/0010-Makefile.am-make-sure-autoheader-run-before-automake.patch4
-rw-r--r--meta/recipes-devtools/libtool/libtool/0011-ltmain.in-Handle-prefix-map-compiler-options-correct.patch31
-rw-r--r--meta/recipes-devtools/libtool/libtool/0012-libtool.m4-For-reproducibility-stop-encoding-hostnam.patch6
-rw-r--r--meta/recipes-devtools/libtool/libtool/dont-depend-on-help2man.patch3
-rw-r--r--meta/recipes-devtools/llvm/llvm/0006-llvm-TargetLibraryInfo-Undefine-libc-functions-if-th.patch90
-rw-r--r--meta/recipes-devtools/llvm/llvm/0007-llvm-allow-env-override-of-exe-path.patch18
-rw-r--r--meta/recipes-devtools/llvm/llvm/llvm-config51
-rw-r--r--meta/recipes-devtools/llvm/llvm_git.bb47
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb19
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.1.1.bb19
-rw-r--r--meta/recipes-devtools/lua/lua/CVE-2022-28805.patch26
-rw-r--r--meta/recipes-devtools/lua/lua/lua.pc.in5
-rw-r--r--meta/recipes-devtools/lua/lua_5.4.4.bb58
-rw-r--r--meta/recipes-devtools/lua/lua_5.4.6.bb66
-rw-r--r--meta/recipes-devtools/m4/m4-1.4.19.inc11
-rw-r--r--meta/recipes-devtools/m4/m4/0001-Define-alignof_slot-using-_Alignof-when-using-C11-or.patch49
-rw-r--r--meta/recipes-devtools/make/make.inc5
-rw-r--r--meta/recipes-devtools/make/make/0001-makeinst-Do-not-undef-POSIX-on-clang-arm.patch38
-rw-r--r--meta/recipes-devtools/make/make/0001-src-dir.c-fix-buffer-overflow-warning.patch41
-rw-r--r--meta/recipes-devtools/make/make/0002-modules-fcntl-allow-being-detected-by-importing-proj.patch33
-rw-r--r--meta/recipes-devtools/make/make/0002-w32-compat-dirent.c-follow-header.patch36
-rw-r--r--meta/recipes-devtools/make/make/0003-posixfcn-fcntl-gnulib-make-emulated.patch79
-rw-r--r--meta/recipes-devtools/make/make_4.3.bb18
-rw-r--r--meta/recipes-devtools/make/make_4.4.1.bb13
-rw-r--r--meta/recipes-devtools/makedevs/makedevs/COPYING.patch346
-rw-r--r--meta/recipes-devtools/makedevs/makedevs/makedevs.c4
-rw-r--r--meta/recipes-devtools/makedevs/makedevs_1.0.1.bb5
-rw-r--r--meta/recipes-devtools/meson/meson/0001-Check-for-clang-before-guessing-gcc-or-lcc.patch56
-rw-r--r--meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch32
-rw-r--r--meta/recipes-devtools/meson/meson/0001-is_debianlike-always-return-False.patch26
-rw-r--r--meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch48
-rw-r--r--meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch12
-rw-r--r--meta/recipes-devtools/meson/meson/disable-rpath-handling.patch37
-rwxr-xr-xmeta/recipes-devtools/meson/meson/meson-wrapper22
-rw-r--r--meta/recipes-devtools/meson/meson_0.62.1.bb134
-rw-r--r--meta/recipes-devtools/meson/meson_1.3.1.bb158
-rw-r--r--meta/recipes-devtools/mmc/mmc-utils_git.bb6
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch105
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils_git.bb16
-rw-r--r--meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch6
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.39.bb49
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.43.bb49
-rw-r--r--meta/recipes-devtools/nasm/nasm/0001-stdlib-Add-strlcat.patch18
-rw-r--r--meta/recipes-devtools/nasm/nasm/0002-Add-debug-prefix-map-option.patch74
-rw-r--r--meta/recipes-devtools/nasm/nasm_2.15.05.bb23
-rw-r--r--meta/recipes-devtools/nasm/nasm_2.16.01.bb23
-rw-r--r--meta/recipes-devtools/ninja/ninja_1.10.2.bb31
-rw-r--r--meta/recipes-devtools/ninja/ninja_1.11.1.bb33
-rw-r--r--meta/recipes-devtools/opkg-utils/opkg-utils/0001-opkg-build-remove-numeric-owner-parameter-overzealou.patch34
-rw-r--r--meta/recipes-devtools/opkg-utils/opkg-utils_0.5.0.bb65
-rw-r--r--meta/recipes-devtools/opkg-utils/opkg-utils_0.6.3.bb66
-rw-r--r--meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb3
-rw-r--r--meta/recipes-devtools/opkg/opkg/0001-libopkg-Use-libgen.h-to-provide-basename-API.patch62
-rw-r--r--meta/recipes-devtools/opkg/opkg_0.5.0.bb72
-rw-r--r--meta/recipes-devtools/opkg/opkg_0.6.3.bb85
-rw-r--r--meta/recipes-devtools/orc/orc_0.4.32.bb30
-rw-r--r--meta/recipes-devtools/orc/orc_0.4.38.bb30
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/0001-Set-interpreter-only-when-necessary.patch31
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/0002-align-startOffset-with-p_align-instead-of-pagesize-f.patch42
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/0003-make-LOAD-segment-extensions-based-on-p_align-instea.patch32
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch65
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.14.5.bb18
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.18.0.bb23
-rw-r--r--meta/recipes-devtools/perl-cross/files/0001-Makefile-check-the-file-if-patched-or-not.patch14
-rw-r--r--meta/recipes-devtools/perl-cross/perlcross_1.3.7.bb38
-rw-r--r--meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb38
-rw-r--r--meta/recipes-devtools/perl/files/0001-CheckLib.pm-do-not-attempt-to-run-a-cross-executable.patch24
-rw-r--r--meta/recipes-devtools/perl/files/0001-Fix-intermittent-failure-of-test-t-op-sigsystem.t.patch77
-rw-r--r--meta/recipes-devtools/perl/files/perl-configpm-switch.patch66
-rw-r--r--meta/recipes-devtools/perl/files/perl-dynloader.patch2
-rw-r--r--meta/recipes-devtools/perl/files/perl-rdepends.txt49
-rw-r--r--meta/recipes-devtools/perl/files/run-ptest2
-rw-r--r--meta/recipes-devtools/perl/liberror-perl_0.17029.bb51
-rw-r--r--meta/recipes-devtools/perl/libmodule-build-perl_0.4231.bb123
-rw-r--r--meta/recipes-devtools/perl/libmodule-build-perl_0.4234.bb138
-rw-r--r--meta/recipes-devtools/perl/libtest-fatal-perl_0.017.bb36
-rw-r--r--meta/recipes-devtools/perl/libtest-needs-perl_0.002009.bb29
-rw-r--r--meta/recipes-devtools/perl/libtest-needs-perl_0.002010.bb28
-rw-r--r--meta/recipes-devtools/perl/libtest-warnings-perl_0.033.bb36
-rw-r--r--meta/recipes-devtools/perl/libtry-tiny-perl_0.31.bb36
-rw-r--r--meta/recipes-devtools/perl/liburi-perl/0001-Skip-TODO-test-cases-that-fail.patch110
-rw-r--r--meta/recipes-devtools/perl/liburi-perl_5.08.bb50
-rw-r--r--meta/recipes-devtools/perl/liburi-perl_5.28.bb56
-rw-r--r--meta/recipes-devtools/perl/libxml-parser-perl/0001-Makefile.PL-make-check_lib-cross-friendly.patch28
-rw-r--r--meta/recipes-devtools/perl/libxml-parser-perl_2.46.bb59
-rw-r--r--meta/recipes-devtools/perl/libxml-parser-perl_2.47.bb42
-rw-r--r--meta/recipes-devtools/perl/libxml-perl_0.08.bb3
-rw-r--r--meta/recipes-devtools/perl/perl-ptest.inc6
-rw-r--r--meta/recipes-devtools/perl/perl_5.34.1.bb413
-rw-r--r--meta/recipes-devtools/perl/perl_5.38.2.bb426
-rw-r--r--meta/recipes-devtools/pkgconf/pkgconf_1.8.0.bb67
-rw-r--r--meta/recipes-devtools/pkgconf/pkgconf_2.2.0.bb67
-rw-r--r--meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-remove-support-for-the-__int64-type.-See-1.patch144
-rw-r--r--meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-use-C99-printf-format-specifiers-on-Window.patch68
-rw-r--r--meta/recipes-devtools/pkgconfig/pkgconfig_git.bb5
-rw-r--r--meta/recipes-devtools/pseudo/files/glibc238.patch59
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_git.bb14
-rw-r--r--meta/recipes-devtools/python/python-async.inc14
-rw-r--r--meta/recipes-devtools/python/python-cython.inc37
-rw-r--r--meta/recipes-devtools/python/python-gitdb.inc23
-rw-r--r--meta/recipes-devtools/python/python-pbr.inc2
-rw-r--r--meta/recipes-devtools/python/python-pyasn1.inc20
-rw-r--r--meta/recipes-devtools/python/python-pycryptodome.inc8
-rw-r--r--meta/recipes-devtools/python/python-six.inc2
-rw-r--r--meta/recipes-devtools/python/python-testtools.inc27
-rw-r--r--meta/recipes-devtools/python/python3-alabaster_0.7.12.bb11
-rw-r--r--meta/recipes-devtools/python/python3-alabaster_0.7.16.bb12
-rw-r--r--meta/recipes-devtools/python/python3-asn1crypto_1.5.1.bb18
-rw-r--r--meta/recipes-devtools/python/python3-async_0.6.2.bb2
-rw-r--r--meta/recipes-devtools/python/python3-atomicwrites/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-atomicwrites_1.4.0.bb25
-rw-r--r--meta/recipes-devtools/python/python3-atomicwrites_1.4.1.bb27
-rw-r--r--meta/recipes-devtools/python/python3-attrs/0001-conftest.py-disable-deadline.patch45
-rw-r--r--meta/recipes-devtools/python/python3-attrs/0001-test_funcs-skip-test_unknown-for-pytest-8.patch30
-rw-r--r--meta/recipes-devtools/python/python3-attrs/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-attrs_21.4.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-attrs_23.2.0.bb39
-rw-r--r--meta/recipes-devtools/python/python3-babel_2.10.1.bb26
-rw-r--r--meta/recipes-devtools/python/python3-babel_2.14.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt-crates.inc114
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch111
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt_3.2.2.bb30
-rw-r--r--meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb37
-rw-r--r--meta/recipes-devtools/python/python3-beartype_0.18.2.bb11
-rw-r--r--meta/recipes-devtools/python/python3-booleanpy_4.0.bb13
-rw-r--r--meta/recipes-devtools/python/python3-build_1.2.1.bb32
-rw-r--r--meta/recipes-devtools/python/python3-calver/0001-setup.py-hard-code-version.patch32
-rw-r--r--meta/recipes-devtools/python/python3-calver/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-calver_2022.6.26.bb28
-rw-r--r--meta/recipes-devtools/python/python3-certifi_2021.10.8.bb14
-rw-r--r--meta/recipes-devtools/python/python3-certifi_2024.2.2.bb16
-rw-r--r--meta/recipes-devtools/python/python3-cffi_1.15.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-cffi_1.16.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-chardet_4.0.0.bb24
-rw-r--r--meta/recipes-devtools/python/python3-chardet_5.2.0.bb24
-rw-r--r--meta/recipes-devtools/python/python3-click/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-click_8.1.7.bb39
-rw-r--r--meta/recipes-devtools/python/python3-cryptography-crates.inc102
-rw-r--r--meta/recipes-devtools/python/python3-cryptography-vectors_37.0.2.bb29
-rw-r--r--meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb31
-rw-r--r--meta/recipes-devtools/python/python3-cryptography/0001-Cargo.toml-specify-pem-version.patch29
-rw-r--r--meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch15
-rw-r--r--meta/recipes-devtools/python/python3-cryptography/0002-Cargo.toml-edition-2018-2021.patch28
-rwxr-xr-xmeta/recipes-devtools/python/python3-cryptography/check-memfree.py2
-rw-r--r--meta/recipes-devtools/python/python3-cryptography/run-ptest7
-rw-r--r--meta/recipes-devtools/python/python3-cryptography_37.0.2.bb119
-rw-r--r--meta/recipes-devtools/python/python3-cryptography_42.0.5.bb67
-rw-r--r--meta/recipes-devtools/python/python3-cython_0.29.28.bb37
-rw-r--r--meta/recipes-devtools/python/python3-cython_3.0.9.bb37
-rw-r--r--meta/recipes-devtools/python/python3-dbus_1.2.18.bb25
-rw-r--r--meta/recipes-devtools/python/python3-dbus_1.3.2.bb23
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.27.5.bb18
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.31.1.bb22
-rw-r--r--meta/recipes-devtools/python/python3-docutils_0.18.1.bb11
-rw-r--r--meta/recipes-devtools/python/python3-docutils_0.20.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-dtc/0001-Revert-libfdt-overlay-make-overlay_get_target-public.patch132
-rw-r--r--meta/recipes-devtools/python/python3-dtc_1.7.0.bb25
-rw-r--r--meta/recipes-devtools/python/python3-dtschema_2022.4.bb15
-rw-r--r--meta/recipes-devtools/python/python3-dtschema_2024.2.bb20
-rw-r--r--meta/recipes-devtools/python/python3-editables_0.5.bb15
-rw-r--r--meta/recipes-devtools/python/python3-flit-core_3.7.1.bb33
-rw-r--r--meta/recipes-devtools/python/python3-flit-core_3.9.0.bb37
-rw-r--r--meta/recipes-devtools/python/python3-git_3.1.27.bb32
-rw-r--r--meta/recipes-devtools/python/python3-git_3.1.43.bb32
-rw-r--r--meta/recipes-devtools/python/python3-gitdb_4.0.11.bb22
-rw-r--r--meta/recipes-devtools/python/python3-gitdb_4.0.9.bb3
-rw-r--r--meta/recipes-devtools/python/python3-hatch-fancy-pypi-readme_24.1.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-hatch-vcs_0.4.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-hatchling_1.22.4.bb17
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.46.4.bb38
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.99.4.bb39
-rw-r--r--meta/recipes-devtools/python/python3-idna_3.3.bb19
-rw-r--r--meta/recipes-devtools/python/python3-idna_3.6.bb14
-rw-r--r--meta/recipes-devtools/python/python3-imagesize_1.3.0.bb13
-rw-r--r--meta/recipes-devtools/python/python3-imagesize_1.4.1.bb13
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_4.11.3.bb20
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_7.1.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-iniconfig_1.1.1.bb12
-rw-r--r--meta/recipes-devtools/python/python3-iniconfig_2.0.0.bb13
-rw-r--r--meta/recipes-devtools/python/python3-installer/interpreter.patch24
-rw-r--r--meta/recipes-devtools/python/python3-installer_0.5.1.bb22
-rw-r--r--meta/recipes-devtools/python/python3-installer_0.7.0.bb34
-rw-r--r--meta/recipes-devtools/python/python3-iso8601_1.0.2.bb15
-rw-r--r--meta/recipes-devtools/python/python3-iso8601_2.1.0.bb15
-rw-r--r--meta/recipes-devtools/python/python3-isodate_0.6.1.bb16
-rw-r--r--meta/recipes-devtools/python/python3-jinja2/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-jinja2_3.1.2.bb48
-rw-r--r--meta/recipes-devtools/python/python3-jinja2_3.1.3.bb48
-rw-r--r--meta/recipes-devtools/python/python3-jsonpointer/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-jsonpointer_2.3.bb26
-rw-r--r--meta/recipes-devtools/python/python3-jsonpointer_2.4.bb28
-rw-r--r--meta/recipes-devtools/python/python3-jsonschema-specifications_2023.12.1.bb16
-rw-r--r--meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb50
-rw-r--r--meta/recipes-devtools/python/python3-jsonschema_4.5.1.bb48
-rw-r--r--meta/recipes-devtools/python/python3-libarchive-c_4.0.bb21
-rw-r--r--meta/recipes-devtools/python/python3-libarchive-c_5.1.bb21
-rw-r--r--meta/recipes-devtools/python/python3-license-expression/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-license-expression_30.3.0.bb38
-rw-r--r--meta/recipes-devtools/python/python3-lxml_5.1.0.bb42
-rw-r--r--meta/recipes-devtools/python/python3-magic_0.4.25.bb22
-rw-r--r--meta/recipes-devtools/python/python3-magic_0.4.27.bb22
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.2.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.3.2.bb20
-rw-r--r--meta/recipes-devtools/python/python3-markdown_3.3.7.bb13
-rw-r--r--meta/recipes-devtools/python/python3-markdown_3.6.bb13
-rw-r--r--meta/recipes-devtools/python/python3-markupsafe/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-markupsafe_2.1.1.bb27
-rw-r--r--meta/recipes-devtools/python/python3-markupsafe_2.1.5.bb27
-rw-r--r--meta/recipes-devtools/python/python3-maturin-crates.inc618
-rw-r--r--meta/recipes-devtools/python/python3-maturin/0001-Add-32-bit-RISC-V-support.patch102
-rw-r--r--meta/recipes-devtools/python/python3-maturin_1.4.0.bb43
-rw-r--r--meta/recipes-devtools/python/python3-meson-python_0.15.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_10.2.0.bb29
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_8.13.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-ndg-httpsclient_0.5.1.bb14
-rw-r--r--meta/recipes-devtools/python/python3-numpy/0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch10
-rw-r--r--meta/recipes-devtools/python/python3-numpy/fix_reproducibility.patch33
-rw-r--r--meta/recipes-devtools/python/python3-numpy_1.22.3.bb72
-rw-r--r--meta/recipes-devtools/python/python3-numpy_1.26.4.bb64
-rw-r--r--meta/recipes-devtools/python/python3-packaging_21.3.bb12
-rw-r--r--meta/recipes-devtools/python/python3-packaging_24.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-pathlib2_2.3.7.bb4
-rw-r--r--meta/recipes-devtools/python/python3-pathspec_0.12.1.bb13
-rw-r--r--meta/recipes-devtools/python/python3-pbr_5.9.0.bb4
-rw-r--r--meta/recipes-devtools/python/python3-pbr_6.0.0.bb4
-rw-r--r--meta/recipes-devtools/python/python3-pip/0001-change-shebang-to-python3.patch115
-rw-r--r--meta/recipes-devtools/python/python3-pip/no_shebang_mangling.patch20
-rw-r--r--meta/recipes-devtools/python/python3-pip/reproducible.patch74
-rw-r--r--meta/recipes-devtools/python/python3-pip_22.1.bb63
-rw-r--r--meta/recipes-devtools/python/python3-pip_24.0.bb60
-rw-r--r--meta/recipes-devtools/python/python3-pluggy/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-pluggy_1.0.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-pluggy_1.4.0.bb29
-rw-r--r--meta/recipes-devtools/python/python3-ply_3.11.bb6
-rw-r--r--meta/recipes-devtools/python/python3-poetry-core_1.0.8.bb44
-rw-r--r--meta/recipes-devtools/python/python3-poetry-core_1.9.0.bb42
-rw-r--r--meta/recipes-devtools/python/python3-psutil/0001-fix-failure-test-cases.patch197
-rw-r--r--meta/recipes-devtools/python/python3-psutil_5.9.0.bb29
-rw-r--r--meta/recipes-devtools/python/python3-psutil_5.9.8.bb41
-rw-r--r--meta/recipes-devtools/python/python3-py_1.11.0.bb4
-rw-r--r--meta/recipes-devtools/python/python3-pyasn1/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-pyasn1_0.4.8.bb2
-rw-r--r--meta/recipes-devtools/python/python3-pyasn1_0.6.0.bb3
-rw-r--r--meta/recipes-devtools/python/python3-pycairo_1.21.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-pycairo_1.26.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-pycparser_2.21.bb6
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.14.1.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.20.0.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.14.1.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.20.0.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pyelftools_0.28.bb15
-rw-r--r--meta/recipes-devtools/python/python3-pyelftools_0.31.bb15
-rw-r--r--meta/recipes-devtools/python/python3-pygments_2.12.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-pygments_2.17.2.bb14
-rw-r--r--meta/recipes-devtools/python/python3-pygobject/0001-Do-not-build-tests.patch30
-rw-r--r--meta/recipes-devtools/python/python3-pygobject_3.42.1.bb34
-rw-r--r--meta/recipes-devtools/python/python3-pygobject_3.48.1.bb39
-rw-r--r--meta/recipes-devtools/python/python3-pyopenssl_22.0.0.bb23
-rw-r--r--meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb23
-rw-r--r--meta/recipes-devtools/python/python3-pyparsing_3.0.9.bb30
-rw-r--r--meta/recipes-devtools/python/python3-pyparsing_3.1.2.bb30
-rw-r--r--meta/recipes-devtools/python/python3-pyproject-hooks_1.0.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb27
-rw-r--r--meta/recipes-devtools/python/python3-pyrsistent_0.18.1.bb14
-rw-r--r--meta/recipes-devtools/python/python3-pyrsistent_0.20.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-pysocks_1.7.1.bb12
-rw-r--r--meta/recipes-devtools/python/python3-pytest-runner_6.0.0.bb16
-rw-r--r--meta/recipes-devtools/python/python3-pytest-runner_6.0.1.bb16
-rw-r--r--meta/recipes-devtools/python/python3-pytest-subtests_0.12.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-pytest-subtests_0.7.0.bb20
-rw-r--r--meta/recipes-devtools/python/python3-pytest_7.1.2.bb41
-rw-r--r--meta/recipes-devtools/python/python3-pytest_8.1.1.bb41
-rw-r--r--meta/recipes-devtools/python/python3-pytz/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-pytz_2022.1.bb35
-rw-r--r--meta/recipes-devtools/python/python3-pytz_2024.1.bb36
-rw-r--r--meta/recipes-devtools/python/python3-pyyaml/0001-Fix-builds-with-Cython-3.patch54
-rw-r--r--meta/recipes-devtools/python/python3-pyyaml/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb40
-rw-r--r--meta/recipes-devtools/python/python3-pyyaml_6.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-rdflib_7.0.0.bb21
-rw-r--r--meta/recipes-devtools/python/python3-referencing_0.34.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-requests_2.27.1.bb25
-rw-r--r--meta/recipes-devtools/python/python3-requests_2.31.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-rfc3339-validator_0.1.4.bb6
-rw-r--r--meta/recipes-devtools/python/python3-rfc3986-validator_0.1.1.bb2
-rw-r--r--meta/recipes-devtools/python/python3-rpds-py-crates.inc80
-rw-r--r--meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb15
-rw-r--r--meta/recipes-devtools/python/python3-ruamel-yaml_0.17.21.bb24
-rw-r--r--meta/recipes-devtools/python/python3-ruamel-yaml_0.18.6.bb23
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.3.0.bb36
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.7.0.bb36
-rw-r--r--meta/recipes-devtools/python/python3-semantic-version_2.10.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-semantic-version_2.9.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-rust_1.3.0.bb28
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-rust_1.9.0.bb35
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_6.4.2.bb28
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb31
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch34
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-change-shebang-to-python3.patch25
-rw-r--r--meta/recipes-devtools/python/python3-setuptools/0001-conditionally-do-not-fetch-code-by-easy_install.patch (renamed from meta/recipes-devtools/python/files/0001-conditionally-do-not-fetch-code-by-easy_install.patch)9
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_62.3.1.bb55
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_69.2.0.bb55
-rw-r--r--meta/recipes-devtools/python/python3-smartypants_2.0.0.bb2
-rw-r--r--meta/recipes-devtools/python/python3-smmap_5.0.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-smmap_6.0.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-spdx-tools_0.8.2.bb28
-rw-r--r--meta/recipes-devtools/python/python3-sphinx-rtd-theme_1.0.0.bb22
-rw-r--r--meta/recipes-devtools/python/python3-sphinx-rtd-theme_2.0.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-sphinx_4.5.0.bb28
-rw-r--r--meta/recipes-devtools/python/python3-sphinx_7.2.6.bb35
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.2.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.8.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.2.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.6.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.0.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.5.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-jquery_4.1.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-jsmath_1.0.1.bb2
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.3.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.7.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.10.bb12
-rw-r--r--meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.5.bb12
-rw-r--r--meta/recipes-devtools/python/python3-strict-rfc3339/0001-setup.py-use-vendored-_distutils.patch27
-rw-r--r--meta/recipes-devtools/python/python3-strict-rfc3339_0.7.bb11
-rw-r--r--meta/recipes-devtools/python/python3-subunit_1.4.0.bb15
-rw-r--r--meta/recipes-devtools/python/python3-subunit_1.4.4.bb15
-rw-r--r--meta/recipes-devtools/python/python3-testtools_2.5.0.bb2
-rw-r--r--meta/recipes-devtools/python/python3-testtools_2.7.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-toml_0.10.2.bb2
-rw-r--r--meta/recipes-devtools/python/python3-tomli_2.0.1.bb5
-rw-r--r--meta/recipes-devtools/python/python3-trove-classifiers/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-trove-classifiers_2024.3.3.bb26
-rw-r--r--meta/recipes-devtools/python/python3-typing-extensions_4.11.0.bb24
-rw-r--r--meta/recipes-devtools/python/python3-typing-extensions_4.2.0.bb14
-rw-r--r--meta/recipes-devtools/python/python3-typogrify_2.0.7.bb4
-rw-r--r--meta/recipes-devtools/python/python3-unittest-automake-output_0.2.bb13
-rw-r--r--meta/recipes-devtools/python/python3-uritools_4.0.2.bb11
-rw-r--r--meta/recipes-devtools/python/python3-urllib3_1.26.9.bb22
-rw-r--r--meta/recipes-devtools/python/python3-urllib3_2.2.1.bb24
-rw-r--r--meta/recipes-devtools/python/python3-wcwidth/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-wcwidth_0.2.13.bb27
-rw-r--r--meta/recipes-devtools/python/python3-wcwidth_0.2.5.bb25
-rw-r--r--meta/recipes-devtools/python/python3-webcolors/run-ptest2
-rw-r--r--meta/recipes-devtools/python/python3-webcolors_1.11.1.bb28
-rw-r--r--meta/recipes-devtools/python/python3-webcolors_1.13.bb28
-rw-r--r--meta/recipes-devtools/python/python3-websockets_12.0.bb16
-rw-r--r--meta/recipes-devtools/python/python3-wheel/0001-Backport-pyproject.toml-from-flit-backend-branch.patch100
-rw-r--r--meta/recipes-devtools/python/python3-wheel_0.37.1.bb17
-rw-r--r--meta/recipes-devtools/python/python3-wheel_0.43.0.bb15
-rw-r--r--meta/recipes-devtools/python/python3-xmltodict/run-ptest3
-rw-r--r--meta/recipes-devtools/python/python3-xmltodict_0.13.0.bb31
-rw-r--r--meta/recipes-devtools/python/python3-yamllint_1.35.1.bb15
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.18.1.bb18
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.8.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3/0001-Avoid-shebang-overflow-on-python-config.py.patch30
-rw-r--r--meta/recipes-devtools/python/python3/0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch25
-rw-r--r--meta/recipes-devtools/python/python3/0001-Do-not-use-the-shell-version-of-python-config-that-w.patch36
-rw-r--r--meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch27
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch12
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch30
-rw-r--r--meta/recipes-devtools/python/python3/0001-Makefile-do-not-compile-.pyc-in-parallel.patch65
-rw-r--r--meta/recipes-devtools/python/python3/0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch17
-rw-r--r--meta/recipes-devtools/python/python3/0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch50
-rw-r--r--meta/recipes-devtools/python/python3/0001-Update-test_sysconfig-for-posix_user-purelib.patch37
-rw-r--r--meta/recipes-devtools/python/python3/0001-Use-FLAG_REF-always-for-interned-strings.patch33
-rw-r--r--meta/recipes-devtools/python/python3/0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch206
-rw-r--r--meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch28
-rw-r--r--meta/recipes-devtools/python/python3/0001-gh-114492-Initialize-struct-termios-before-calling-t.patch26
-rw-r--r--meta/recipes-devtools/python/python3/0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch67
-rw-r--r--meta/recipes-devtools/python/python3/0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch42
-rw-r--r--meta/recipes-devtools/python/python3/0001-skip-no_stdout_fileno-test-due-to-load-variability.patch29
-rw-r--r--meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch6
-rw-r--r--meta/recipes-devtools/python/python3/0001-test_ctypes.test_find-skip-without-tools-sdk.patch12
-rw-r--r--meta/recipes-devtools/python/python3/0001-test_locale.py-correct-the-test-output-format.patch6
-rw-r--r--meta/recipes-devtools/python/python3/0001-test_storlines-skip-due-to-load-variability.patch30
-rw-r--r--meta/recipes-devtools/python/python3/0017-setup.py-do-not-report-missing-dependencies-for-disa.patch38
-rw-r--r--meta/recipes-devtools/python/python3/0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch38
-rw-r--r--meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch58
-rw-r--r--meta/recipes-devtools/python/python3/avoid_warning_about_tkinter.patch31
-rw-r--r--meta/recipes-devtools/python/python3/cgi_py.patch4
-rw-r--r--meta/recipes-devtools/python/python3/crosspythonpath.patch22
-rw-r--r--meta/recipes-devtools/python/python3/deterministic_imports.patch40
-rw-r--r--meta/recipes-devtools/python/python3/get_module_deps3.py4
-rw-r--r--meta/recipes-devtools/python/python3/makerace.patch15
-rw-r--r--meta/recipes-devtools/python/python3/python-config.patch55
-rw-r--r--meta/recipes-devtools/python/python3/python3-manifest.json208
-rw-r--r--meta/recipes-devtools/python/python3/run-ptest4
-rw-r--r--meta/recipes-devtools/python/python3_3.10.4.bb422
-rw-r--r--meta/recipes-devtools/python/python3_3.12.3.bb470
-rw-r--r--meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb10
-rw-r--r--meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb16
-rwxr-xr-xmeta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper25
-rw-r--r--meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper.c40
-rw-r--r--meta/recipes-devtools/qemu/qemu-helper/tunctl.c158
-rw-r--r--meta/recipes-devtools/qemu/qemu-native.inc2
-rw-r--r--meta/recipes-devtools/qemu/qemu-native_7.0.0.bb9
-rw-r--r--meta/recipes-devtools/qemu/qemu-native_8.2.1.bb9
-rw-r--r--meta/recipes-devtools/qemu/qemu-system-native_7.0.0.bb32
-rw-r--r--meta/recipes-devtools/qemu/qemu-system-native_8.2.1.bb31
-rw-r--r--meta/recipes-devtools/qemu/qemu.inc175
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-linux-user-x86_64-Handle-the-vsyscall-page-in-open_s.patch56
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch13
-rw-r--r--meta/recipes-devtools/qemu/qemu/0002-chardev-connect-socket-to-a-spawned-command.patch246
-rw-r--r--meta/recipes-devtools/qemu/qemu/0002-linux-user-Replace-use-of-lfs64-related-functions-an.patch355
-rw-r--r--meta/recipes-devtools/qemu/qemu/0002-linux-user-loongarch64-Remove-TARGET_FORCE_SHMLBA.patch43
-rw-r--r--meta/recipes-devtools/qemu/qemu/0003-apic-fixup-fallthrough-to-PIC.patch13
-rw-r--r--meta/recipes-devtools/qemu/qemu/0003-linux-user-Add-strace-for-shmat.patch71
-rw-r--r--meta/recipes-devtools/qemu/qemu/0004-configure-Add-pkg-config-handling-for-libgcrypt.patch17
-rw-r--r--meta/recipes-devtools/qemu/qemu/0004-linux-user-Rewrite-target_shmat.patch236
-rw-r--r--meta/recipes-devtools/qemu/qemu/0005-qemu-Do-not-include-file-if-not-exists.patch13
-rw-r--r--meta/recipes-devtools/qemu/qemu/0005-tests-tcg-Check-that-shmat-does-not-break-proc-self-.patch85
-rw-r--r--meta/recipes-devtools/qemu/qemu/0006-qemu-Add-some-user-space-mmap-tweaks-to-address-musl.patch13
-rw-r--r--meta/recipes-devtools/qemu/qemu/0007-qemu-Determinism-fixes.patch11
-rw-r--r--meta/recipes-devtools/qemu/qemu/0008-tests-meson.build-use-relative-path-to-refer-to-file.patch29
-rw-r--r--meta/recipes-devtools/qemu/qemu/0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch11
-rw-r--r--meta/recipes-devtools/qemu/qemu/0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch59
-rw-r--r--meta/recipes-devtools/qemu/qemu/0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch282
-rw-r--r--meta/recipes-devtools/qemu/qemu/0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch51
-rw-r--r--meta/recipes-devtools/qemu/qemu/4a8579ad8629b57a43daa62e46cc7af6e1078116.patch60
-rw-r--r--meta/recipes-devtools/qemu/qemu/CVE-2023-6683.patch91
-rw-r--r--meta/recipes-devtools/qemu/qemu/cross.patch41
-rw-r--r--meta/recipes-devtools/qemu/qemu/fixedmeson.patch20
-rw-r--r--meta/recipes-devtools/qemu/qemu/no-pip.patch45
-rw-r--r--meta/recipes-devtools/qemu/qemu/qemu-guest-agent.init75
-rw-r--r--meta/recipes-devtools/qemu/qemu/qemu-guest-agent.udev2
-rw-r--r--meta/recipes-devtools/qemu/qemu_7.0.0.bb27
-rw-r--r--meta/recipes-devtools/qemu/qemu_8.2.1.bb27
-rw-r--r--meta/recipes-devtools/quilt/quilt.inc41
-rw-r--r--meta/recipes-devtools/quilt/quilt/0001-test-Fix-a-race-condition-in-merge.test.patch48
-rw-r--r--meta/recipes-devtools/quilt/quilt/Makefile14
-rw-r--r--meta/recipes-devtools/quilt/quilt/faildiff-order.patch41
-rw-r--r--meta/recipes-devtools/quilt/quilt/fix-grep-3.8.patch144
-rwxr-xr-xmeta/recipes-devtools/quilt/quilt/run-ptest7
-rwxr-xr-xmeta/recipes-devtools/quilt/quilt/test.sh25
-rw-r--r--meta/recipes-devtools/repo/repo/0001-python3-shebang.patch26
-rw-r--r--meta/recipes-devtools/repo/repo_2.25.bb31
-rw-r--r--meta/recipes-devtools/repo/repo_2.44.bb30
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch17
-rw-r--r--meta/recipes-devtools/rpm/files/0001-CMakeLists.txt-look-for-lua-with-pkg-config-rather-t.patch27
-rw-r--r--meta/recipes-devtools/rpm/files/0001-CVE-2021-3521.patch57
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch7
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch53
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch19
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch7
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Rip-out-partial-support-for-unused-MD2-and-RIPEMD160.patch81
-rw-r--r--meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch19
-rw-r--r--meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch6
-rw-r--r--meta/recipes-devtools/rpm/files/0001-docs-do-not-build-manpages-requires-pandoc.patch26
-rw-r--r--meta/recipes-devtools/rpm/files/0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch9
-rw-r--r--meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch25
-rw-r--r--meta/recipes-devtools/rpm/files/0001-tools-Add-error.h-for-non-glibc-case.patch71
-rw-r--r--meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch29
-rw-r--r--meta/recipes-devtools/rpm/files/0002-CVE-2021-3521.patch64
-rw-r--r--meta/recipes-devtools/rpm/files/0002-rpmio-rpmglob.c-avoid-using-GLOB_BRACE-if-undefined-.patch34
-rw-r--r--meta/recipes-devtools/rpm/files/0003-CVE-2021-3521.patch329
-rw-r--r--meta/recipes-devtools/rpm/files/0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch12
-rw-r--r--meta/recipes-devtools/rpm/files/environment.d-rpm.sh1
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.17.0.bb208
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.19.1.1.bb197
-rw-r--r--meta/recipes-devtools/rsync/files/0001-Add-missing-prototypes-to-function-declarations.patch170
-rw-r--r--meta/recipes-devtools/rsync/files/makefile-no-rebuild.patch7
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.2.4.bb70
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.3.0.bb67
-rw-r--r--meta/recipes-devtools/ruby/ruby.inc39
-rw-r--r--meta/recipes-devtools/ruby/ruby/0001-extmk-fix-cross-compilation-of-external-gems.patch11
-rw-r--r--meta/recipes-devtools/ruby/ruby/0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch32
-rw-r--r--meta/recipes-devtools/ruby/ruby/0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch7
-rw-r--r--meta/recipes-devtools/ruby/ruby/0002-Obey-LDFLAGS-for-the-link-of-libruby.patch7
-rw-r--r--meta/recipes-devtools/ruby/ruby/0002-template-Makefile.in-filter-out-f-prefix-map.patch42
-rw-r--r--meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch16
-rw-r--r--meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch7
-rw-r--r--meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch28
-rw-r--r--meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch22
-rw-r--r--meta/recipes-devtools/ruby/ruby/remove_has_include_macros.patch35
-rw-r--r--meta/recipes-devtools/ruby/ruby_3.1.2.bb107
-rw-r--r--meta/recipes-devtools/ruby/ruby_3.3.0.bb140
-rwxr-xr-xmeta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts11
-rw-r--r--meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service2
-rw-r--r--meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb3
-rw-r--r--meta/recipes-devtools/rust/README-rust.md26
-rw-r--r--meta/recipes-devtools/rust/cargo-c-crates.inc654
-rw-r--r--meta/recipes-devtools/rust/cargo-c-native_0.9.30+cargo-0.77.0.bb17
-rw-r--r--meta/recipes-devtools/rust/cargo_1.75.0.bb73
-rw-r--r--meta/recipes-devtools/rust/files/0001-Handle-vendored-sources-when-remapping-paths.patch46
-rw-r--r--meta/recipes-devtools/rust/files/0001-Revert-Map-source-absolute-paths-to-OUT_DIR-as-relat.patch67
-rw-r--r--meta/recipes-devtools/rust/files/cargo-path.patch37
-rw-r--r--meta/recipes-devtools/rust/files/custom-target-cfg.patch90
-rw-r--r--meta/recipes-devtools/rust/files/hardcodepaths.patch59
-rw-r--r--meta/recipes-devtools/rust/files/repro-issue-fix-with-v175.patch36
-rw-r--r--meta/recipes-devtools/rust/files/rustc-bootstrap.patch21
-rw-r--r--meta/recipes-devtools/rust/files/rv32-cargo-rustix-0.38.19-fix.patch70
-rw-r--r--meta/recipes-devtools/rust/files/rv32-missing-syscalls.patch1503
-rw-r--r--meta/recipes-devtools/rust/files/rv32-rustix-libc-backend.patch32
-rw-r--r--meta/recipes-devtools/rust/files/target-build-value.patch26
-rw-r--r--meta/recipes-devtools/rust/files/target-rust-ccld.c19
-rw-r--r--meta/recipes-devtools/rust/files/zlib-off64_t.patch36
-rw-r--r--meta/recipes-devtools/rust/libstd-rs.inc40
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch44
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0001-Update-checksums-for-modified-vendored-libc.patch18
-rw-r--r--meta/recipes-devtools/rust/libstd-rs_1.60.0.bb12
-rw-r--r--meta/recipes-devtools/rust/libstd-rs_1.75.0.bb53
-rw-r--r--meta/recipes-devtools/rust/rust-common.inc363
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian-common.inc54
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian.inc121
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian_1.75.0.bb2
-rw-r--r--meta/recipes-devtools/rust/rust-cross.inc66
-rw-r--r--meta/recipes-devtools/rust/rust-cross_1.60.0.bb8
-rw-r--r--meta/recipes-devtools/rust/rust-crosssdk_1.60.0.bb8
-rw-r--r--meta/recipes-devtools/rust/rust-llvm.inc71
-rw-r--r--meta/recipes-devtools/rust/rust-llvm_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust-llvm_1.75.0.bb94
-rw-r--r--meta/recipes-devtools/rust/rust-snapshot.inc68
-rw-r--r--meta/recipes-devtools/rust/rust-source.inc23
-rw-r--r--meta/recipes-devtools/rust/rust-target.inc10
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian.inc38
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian_1.60.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust.inc202
-rw-r--r--meta/recipes-devtools/rust/rust_1.60.0.bb21
-rw-r--r--meta/recipes-devtools/rust/rust_1.75.0.bb361
-rw-r--r--meta/recipes-devtools/squashfs-tools/files/0001-install-manpages.sh-do-not-write-original-timestamps.patch30
-rw-r--r--meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb8
-rw-r--r--meta/recipes-devtools/strace/strace/0001-caps-abbrev.awk-fix-gawk-s-path.patch47
-rw-r--r--meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch27
-rw-r--r--meta/recipes-devtools/strace/strace/0001-landlock-update-expected-string.patch67
-rw-r--r--meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch52
-rw-r--r--meta/recipes-devtools/strace/strace/ptest-spacesave.patch2
-rwxr-xr-xmeta/recipes-devtools/strace/strace/run-ptest2
-rw-r--r--meta/recipes-devtools/strace/strace/skip-load.patch40
-rw-r--r--meta/recipes-devtools/strace/strace/update-gawk-paths.patch30
-rw-r--r--meta/recipes-devtools/strace/strace_5.17.bb55
-rw-r--r--meta/recipes-devtools/strace/strace_6.7.bb56
-rw-r--r--meta/recipes-devtools/subversion/subversion_1.14.2.bb61
-rw-r--r--meta/recipes-devtools/subversion/subversion_1.14.3.bb61
-rw-r--r--meta/recipes-devtools/swig/swig.inc2
-rw-r--r--meta/recipes-devtools/swig/swig/0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch11
-rw-r--r--meta/recipes-devtools/swig/swig/0001-configure-use-pkg-config-for-pcre-detection.patch44
-rw-r--r--meta/recipes-devtools/swig/swig/determinism.patch13
-rw-r--r--meta/recipes-devtools/swig/swig_4.0.2.bb7
-rw-r--r--meta/recipes-devtools/swig/swig_4.2.1.bb7
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0001-install-don-t-install-obsolete-file-com32.ld.patch32
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch15
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch21
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch21
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch13
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch11
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch11
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch19
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0011-install-don-t-install-obsolete-file-com32.ld.patch29
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0012-libinstaller-Fix-build-with-glibc-2.36.patch56
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0013-remove-clean-script.patch27
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/0014-Fix-reproducibility-issues.patch32
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/determinism.patch22
-rw-r--r--meta/recipes-devtools/syslinux/syslinux/syslinux-remove-clean-script.patch17
-rw-r--r--meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb29
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-architecture-Recognise-RISCV-32-RISCV-64.patch45
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch2
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0002-musl-does-not-provide-printf-h.patch2
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0003-musl-does-not-provide-canonicalize_file_name.patch2
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart_234.bb44
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb43
-rw-r--r--meta/recipes-devtools/tcf-agent/tcf-agent/ldflags.patch29
-rw-r--r--meta/recipes-devtools/tcf-agent/tcf-agent_git.bb5
-rw-r--r--meta/recipes-devtools/tcltk/tcl/alter-includedir.patch47
-rw-r--r--meta/recipes-devtools/tcltk/tcl/fix_issue_with_old_distro_glibc.patch39
-rw-r--r--meta/recipes-devtools/tcltk/tcl/fix_non_native_build_issue.patch39
-rw-r--r--meta/recipes-devtools/tcltk/tcl/interp.patch10
-rw-r--r--meta/recipes-devtools/tcltk/tcl/run-ptest26
-rw-r--r--meta/recipes-devtools/tcltk/tcl/tcl-add-soname.patch49
-rw-r--r--meta/recipes-devtools/tcltk/tcl/tcl-remove-hardcoded-install-path.patch17
-rw-r--r--meta/recipes-devtools/tcltk/tcl_8.6.11.bb103
-rw-r--r--meta/recipes-devtools/tcltk/tcl_8.6.14.bb110
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-Add-listen-action-for-a-tcp-socket.patch54
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-Alias-off64_t-to-off_t-on-linux-if-not-defined.patch28
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-attr-fix-utime-for-symlink.patch41
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Fix-race-window-for-writing-of-the-pid-file.patch68
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Libtirpc-porting-fixes.patch37
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-fix-building-on-macOS.patch27
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/0001-locate.c-Include-attr.h.patch28
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/alternate_rpc_ports.patch158
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/fix_compile_warning.patch25
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/fix_pid_race_parent_writes_child_pid.patch61
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/no-yywrap.patch14
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/relative_max_socket_path_len.patch74
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/rename_fh_cache.patch64
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/tcp_no_delay.patch56
-rw-r--r--meta/recipes-devtools/unfs3/unfs3/unfs3_parallel_build.patch37
-rw-r--r--meta/recipes-devtools/unfs3/unfs3_git.bb33
-rw-r--r--meta/recipes-devtools/unifdef/unifdef_2.12.bb2
-rw-r--r--meta/recipes-devtools/vala/vala.inc62
-rw-r--r--meta/recipes-devtools/vala/vala_0.56.1.bb3
-rw-r--r--meta/recipes-devtools/vala/vala_0.56.16.bb73
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-Fix-drd-tests-shared_timed_mutex.cpp.patch26
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-Make-local-functions-static-to-avoid-assembler-error.patch182
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-fix-opcode-not-supported-on-mips32-linux.patch82
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-memcheck-arm64-Define-__THROW-if-not-already-defined.patch32
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch64
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch147
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0002-context-APIs-are-not-available-on-musl.patch92
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0002-memcheck-x86-Define-__THROW-if-not-defined.patch32
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch221
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0003-tests-seg_override-Replace-__modify_ldt-with-syscall.patch68
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch137
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch52
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch8
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/fixed-perl-path.patch38
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-for-aarch64243
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-for-all21
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch2
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.19.0.bb255
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.22.0.bb276
-rw-r--r--meta/recipes-devtools/xmlto/xmlto-0.0.28/configure.in-drop-the-test-of-xmllint-and-xsltproc.patch30
-rw-r--r--meta/recipes-devtools/xmlto/xmlto_0.0.28.bb21
-rw-r--r--meta/recipes-example/rust-hello-world/rust-hello-world/0001-enable-LTO.patch24
-rw-r--r--meta/recipes-example/rust-hello-world/rust-hello-world_git.bb19
-rw-r--r--meta/recipes-extended/acpica/acpica_20220331.bb51
-rw-r--r--meta/recipes-extended/acpica/acpica_20240322.bb49
-rw-r--r--meta/recipes-extended/asciidoc/asciidoc_10.1.4.bb28
-rw-r--r--meta/recipes-extended/asciidoc/asciidoc_10.2.0.bb28
-rw-r--r--meta/recipes-extended/at/at_3.2.5.bb8
-rw-r--r--meta/recipes-extended/baremetal-example/baremetal-helloworld_git.bb72
-rw-r--r--meta/recipes-extended/bash/bash.inc16
-rw-r--r--meta/recipes-extended/bash/bash/0001-changes-to-SIGINT-handler-while-waiting-for-a-child-.patch226
-rw-r--r--meta/recipes-extended/bash/bash/execute_cmd.patch28
-rw-r--r--meta/recipes-extended/bash/bash/makerace.patch52
-rw-r--r--meta/recipes-extended/bash/bash/makerace2.patch98
-rw-r--r--meta/recipes-extended/bash/bash/use_aclocal.patch27
-rw-r--r--meta/recipes-extended/bash/bash_5.1.16.bb25
-rw-r--r--meta/recipes-extended/bash/bash_5.2.21.bb23
-rw-r--r--meta/recipes-extended/bc/bc/run-ptest9
-rw-r--r--meta/recipes-extended/bc/bc_1.07.1.bb11
-rw-r--r--meta/recipes-extended/bzip2/bzip2_1.0.8.bb2
-rw-r--r--meta/recipes-extended/cpio/cpio-2.13/0001-Unset-need_charset_alias-when-building-for-musl.patch30
-rw-r--r--meta/recipes-extended/cpio/cpio-2.13/0002-src-global.c-Remove-superfluous-declaration-of-progr.patch28
-rw-r--r--meta/recipes-extended/cpio/cpio-2.13/CVE-2021-38185.patch581
-rw-r--r--meta/recipes-extended/cpio/cpio_2.13.bb55
-rw-r--r--meta/recipes-extended/cpio/cpio_2.15.bb87
-rwxr-xr-xmeta/recipes-extended/cpio/files/run-ptest3
-rw-r--r--meta/recipes-extended/cpio/files/test.sh10
-rw-r--r--meta/recipes-extended/cracklib/cracklib/0001-packlib.c-support-dictionary-byte-order-dependent.patch28
-rw-r--r--meta/recipes-extended/cracklib/cracklib/0002-craklib-fix-testnum-and-teststr-failed.patch53
-rw-r--r--meta/recipes-extended/cracklib/cracklib_2.9.11.bb33
-rw-r--r--meta/recipes-extended/cracklib/cracklib_2.9.7.bb33
-rw-r--r--meta/recipes-extended/cronie/cronie/crond_pam_config.patch10
-rw-r--r--meta/recipes-extended/cronie/cronie_1.6.1.bb83
-rw-r--r--meta/recipes-extended/cronie/cronie_1.7.1.bb85
-rw-r--r--meta/recipes-extended/cups/cups.inc49
-rw-r--r--meta/recipes-extended/cups/cups_2.4.1.bb5
-rw-r--r--meta/recipes-extended/cups/cups_2.4.7.bb5
-rw-r--r--meta/recipes-extended/diffutils/diffutils/0001-Skip-strip-trailing-cr-test-case.patch24
-rw-r--r--meta/recipes-extended/diffutils/diffutils/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch33
-rw-r--r--meta/recipes-extended/diffutils/diffutils_3.10.bb43
-rw-r--r--meta/recipes-extended/diffutils/diffutils_3.8.bb44
-rw-r--r--meta/recipes-extended/ed/ed_1.18.bb38
-rw-r--r--meta/recipes-extended/ed/ed_1.20.1.bb38
-rw-r--r--meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch8
-rw-r--r--meta/recipes-extended/ethtool/ethtool_5.17.bb37
-rw-r--r--meta/recipes-extended/ethtool/ethtool_6.7.bb37
-rw-r--r--meta/recipes-extended/findutils/findutils.inc1
-rw-r--r--meta/recipes-extended/findutils/findutils/autoconf-2.73.patch24
-rw-r--r--meta/recipes-extended/findutils/findutils_4.9.0.bb3
-rw-r--r--meta/recipes-extended/gawk/gawk/remove-sensitive-tests.patch24
-rw-r--r--meta/recipes-extended/gawk/gawk/run-ptest7
-rw-r--r--meta/recipes-extended/gawk/gawk_5.1.1.bb64
-rw-r--r--meta/recipes-extended/gawk/gawk_5.3.0.bb87
-rw-r--r--meta/recipes-extended/ghostscript/files/do-not-check-local-libpng-source.patch31
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/avoid-host-contamination.patch14
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/base-genht.c-add-a-preprocessor-define-to-allow-fope.patch42
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch37
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.15-parallel-make.patch28
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-native-fix-disable-system-libtiff.patch47
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-prevent_recompiling.patch78
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/mkdir-p.patch50
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb75
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript_9.56.1.bb129
-rw-r--r--meta/recipes-extended/go-examples/go-helloworld_0.1.bb4
-rw-r--r--meta/recipes-extended/gperf/gperf/0001-Make-the-code-C-17-compliant.patch29
-rw-r--r--meta/recipes-extended/gperf/gperf/1862c6e57a308a05889c80c048dbc58bdc378dcb.patch181
-rw-r--r--meta/recipes-extended/gperf/gperf_3.1.bb7
-rw-r--r--meta/recipes-extended/grep/grep/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch32
-rw-r--r--meta/recipes-extended/grep/grep_3.11.bb46
-rw-r--r--meta/recipes-extended/grep/grep_3.7.bb48
-rw-r--r--meta/recipes-extended/groff/files/0001-Include-config.h.patch212
-rw-r--r--meta/recipes-extended/groff/files/0001-Make-manpages-mulitlib-identical.patch22
-rw-r--r--meta/recipes-extended/groff/files/0001-build-Fix-Savannah-64681-webpage.ps-deps.patch51
-rw-r--r--meta/recipes-extended/groff/files/0001-build-meintro_fr.ps-depends-on-tbl.patch31
-rw-r--r--meta/recipes-extended/groff/files/0001-replace-perl-w-with-use-warnings.patch106
-rw-r--r--meta/recipes-extended/groff/files/0001-support-musl.patch41
-rw-r--r--meta/recipes-extended/groff/files/groff-not-search-fonts-on-build-host.patch37
-rw-r--r--meta/recipes-extended/groff/groff_1.22.4.bb80
-rw-r--r--meta/recipes-extended/groff/groff_1.23.0.bb78
-rw-r--r--meta/recipes-extended/gzip/gzip-1.13/wrong-path-fix.patch (renamed from meta/recipes-extended/gzip/gzip-1.12/wrong-path-fix.patch)0
-rw-r--r--meta/recipes-extended/gzip/gzip.inc1
-rw-r--r--meta/recipes-extended/gzip/gzip_1.12.bb41
-rw-r--r--meta/recipes-extended/gzip/gzip_1.13.bb41
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.63.bb43
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.65.bb43
-rw-r--r--meta/recipes-extended/images/core-image-full-cmdline.bb4
-rw-r--r--meta/recipes-extended/images/core-image-testcontroller-initramfs.bb3
-rw-r--r--meta/recipes-extended/images/core-image-testcontroller.bb2
-rw-r--r--meta/recipes-extended/iptables/iptables/0001-Makefile.am-do-not-install-etc-ethertypes.patch40
-rw-r--r--meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch18
-rw-r--r--meta/recipes-extended/iptables/iptables/0001-iptables-xshared.h-add-missing-sys.types.h-include.patch30
-rw-r--r--meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch51
-rw-r--r--meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch31
-rw-r--r--meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch49
-rw-r--r--meta/recipes-extended/iptables/iptables/format-security.patch30
-rw-r--r--meta/recipes-extended/iptables/iptables_1.8.10.bb123
-rw-r--r--meta/recipes-extended/iptables/iptables_1.8.8.bb123
-rw-r--r--meta/recipes-extended/iputils/iputils/0001-rarpd-rdisc-Drop-PrivateUsers.patch27
-rw-r--r--meta/recipes-extended/iputils/iputils_20211215.bb66
-rw-r--r--meta/recipes-extended/iputils/iputils_20240117.bb48
-rw-r--r--meta/recipes-extended/less/files/run-ptest3
-rw-r--r--meta/recipes-extended/less/less_600.bb42
-rw-r--r--meta/recipes-extended/less/less_643.bb61
-rw-r--r--meta/recipes-extended/libarchive/libarchive/configurehack.patch49
-rw-r--r--meta/recipes-extended/libarchive/libarchive_3.6.1.bb67
-rw-r--r--meta/recipes-extended/libarchive/libarchive_3.7.2.bb67
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.2.bb32
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.7.bb33
-rw-r--r--meta/recipes-extended/libmnl/libmnl_1.0.5.bb4
-rw-r--r--meta/recipes-extended/libnsl/libnsl2_git.bb4
-rw-r--r--meta/recipes-extended/libnss-nis/libnss-nis.bb4
-rw-r--r--meta/recipes-extended/libpipeline/libpipeline/autoconf-2.73.patch24
-rw-r--r--meta/recipes-extended/libpipeline/libpipeline_1.5.6.bb14
-rw-r--r--meta/recipes-extended/libpipeline/libpipeline_1.5.7.bb15
-rw-r--r--meta/recipes-extended/libsolv/libsolv_0.7.22.bb33
-rw-r--r--meta/recipes-extended/libsolv/libsolv_0.7.28.bb33
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc/ipv6.patch52
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc_1.3.2.bb25
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc_1.3.4.bb33
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd/lighttpd19
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd/lighttpd.conf38
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.64.bb79
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.75.bb76
-rwxr-xr-xmeta/recipes-extended/logrotate/logrotate/run-ptest5
-rw-r--r--meta/recipes-extended/logrotate/logrotate_3.19.0.bb91
-rw-r--r--meta/recipes-extended/logrotate/logrotate_3.21.0.bb114
-rw-r--r--meta/recipes-extended/lsb/lsb-release_1.4.bb3
-rw-r--r--meta/recipes-extended/lsof/files/lsof-remove-host-information.patch75
-rw-r--r--meta/recipes-extended/lsof/files/remove-host-information.patch123
-rw-r--r--meta/recipes-extended/lsof/lsof_4.95.0.bb46
-rw-r--r--meta/recipes-extended/lsof/lsof_4.99.3.bb23
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch6
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch28
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-scenario_groups-default-remove-connectors.patch34
-rw-r--r--meta/recipes-extended/ltp/ltp/disable_hanging_tests.patch27
-rw-r--r--meta/recipes-extended/ltp/ltp_20220121.bb138
-rw-r--r--meta/recipes-extended/ltp/ltp_20240129.bb144
-rw-r--r--meta/recipes-extended/lzip/lzip_1.23.bb42
-rw-r--r--meta/recipes-extended/lzip/lzip_1.24.1.bb42
-rw-r--r--meta/recipes-extended/lzip/lzlib_1.14.bb39
-rw-r--r--meta/recipes-extended/man-db/files/0001-man-Move-local-variable-declaration-to-function-scop.patch9
-rw-r--r--meta/recipes-extended/man-db/files/man_db.conf-avoid-multilib-install-file-conflict.patch16
-rw-r--r--meta/recipes-extended/man-db/man-db_2.10.2.bb68
-rw-r--r--meta/recipes-extended/man-db/man-db_2.12.1.bb72
-rw-r--r--meta/recipes-extended/man-pages/man-pages/0001-GNUmakefile-use-env-from-PATH.patch31
-rw-r--r--meta/recipes-extended/man-pages/man-pages/0001-man.ml-do-not-use-dev-stdin.patch33
-rw-r--r--meta/recipes-extended/man-pages/man-pages_5.13.bb37
-rw-r--r--meta/recipes-extended/man-pages/man-pages_6.06.bb50
-rw-r--r--meta/recipes-extended/mc/files/0001-mc-replace-perl-w-with-use-warnings.patch129
-rw-r--r--meta/recipes-extended/mc/files/nomandate.patch7
-rw-r--r--meta/recipes-extended/mc/mc_4.8.28.bb60
-rw-r--r--meta/recipes-extended/mc/mc_4.8.31.bb57
-rw-r--r--meta/recipes-extended/mdadm/files/0001-Fix-parsing-of-r-in-monitor-manager-mode.patch74
-rw-r--r--meta/recipes-extended/mdadm/files/0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch7
-rw-r--r--meta/recipes-extended/mdadm/files/0001-Makefile-install-mdcheck.patch9
-rw-r--r--meta/recipes-extended/mdadm/files/0001-Revert-tests-wait-for-complete-rebuild-in-integrity-.patch53
-rw-r--r--meta/recipes-extended/mdadm/files/0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch36
-rw-r--r--meta/recipes-extended/mdadm/files/0001-fix-gcc-8-format-truncation-warning.patch7
-rw-r--r--meta/recipes-extended/mdadm/files/0001-include-libgen.h-for-basename-API.patch56
-rw-r--r--meta/recipes-extended/mdadm/files/0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch9
-rw-r--r--meta/recipes-extended/mdadm/files/0001-mdadm-skip-test-11spare-migration.patch43
-rw-r--r--meta/recipes-extended/mdadm/files/0001-mdadm.h-Undefine-dprintf-before-redefining.patch6
-rw-r--r--meta/recipes-extended/mdadm/files/0001-restripe.c-Use-_FILE_OFFSET_BITS-to-enable-largefile.patch155
-rw-r--r--meta/recipes-extended/mdadm/files/0001-util.c-add-limits.h-include-for-NAME_MAX-definition.patch24
-rw-r--r--meta/recipes-extended/mdadm/files/0002-Create.c-include-linux-falloc.h-for-FALLOC_FL_ZERO_R.patch27
-rw-r--r--meta/recipes-extended/mdadm/files/debian-no-Werror.patch11
-rw-r--r--meta/recipes-extended/mdadm/files/include_sysmacros.patch14
-rw-r--r--meta/recipes-extended/mdadm/files/mdadm-3.3.2_x32_abi_time_t.patch7
-rw-r--r--meta/recipes-extended/mdadm/files/run-ptest11
-rw-r--r--meta/recipes-extended/mdadm/mdadm_4.2.bb108
-rw-r--r--meta/recipes-extended/mdadm/mdadm_4.3.bb127
-rw-r--r--meta/recipes-extended/mingetty/mingetty_1.08.bb1
-rw-r--r--meta/recipes-extended/minicom/minicom/0001-Drop-superfluous-global-variable-definitions.patch35
-rw-r--r--meta/recipes-extended/minicom/minicom/0001-fix-minicom-h-v-return-value-is-not-0.patch33
-rw-r--r--meta/recipes-extended/minicom/minicom/0002-Drop-superfluous-global-variable-definitions.patch37
-rw-r--r--meta/recipes-extended/minicom/minicom/0003-Drop-superfluous-global-variable-definitions.patch42
-rw-r--r--meta/recipes-extended/minicom/minicom/allow.to.disable.lockdev.patch30
-rw-r--r--meta/recipes-extended/minicom/minicom_2.8.bb28
-rw-r--r--meta/recipes-extended/minicom/minicom_2.9.bb25
-rw-r--r--meta/recipes-extended/msmtp/msmtp_1.8.20.bb27
-rw-r--r--meta/recipes-extended/msmtp/msmtp_1.8.25.bb27
-rw-r--r--meta/recipes-extended/newt/files/0001-detect-gold-as-GNU-linker-too.patch14
-rw-r--r--meta/recipes-extended/newt/files/0002-don-t-ignore-CFLAGS-when-building-snack.patch29
-rw-r--r--meta/recipes-extended/newt/libnewt_0.52.21.bb58
-rw-r--r--meta/recipes-extended/newt/libnewt_0.52.24.bb56
-rw-r--r--meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb3
-rw-r--r--meta/recipes-extended/pam/libpam/0001-pam_namespace-include-stdint-h.patch42
-rw-r--r--meta/recipes-extended/pam/libpam/0001-run-xtests.sh-check-whether-files-exist.patch65
-rw-r--r--meta/recipes-extended/pam/libpam/99_pam2
-rw-r--r--meta/recipes-extended/pam/libpam/libpam-xtests.patch22
-rw-r--r--meta/recipes-extended/pam/libpam_1.5.2.bb185
-rw-r--r--meta/recipes-extended/pam/libpam_1.6.0.bb191
-rw-r--r--meta/recipes-extended/parted/files/0001-fs-Add-libuuid-to-linker-flags-for-libparted-fs-resi.patch34
-rw-r--r--meta/recipes-extended/parted/files/autoconf-2.73.patch22
-rw-r--r--meta/recipes-extended/parted/parted_3.5.bb60
-rw-r--r--meta/recipes-extended/parted/parted_3.6.bb62
-rw-r--r--meta/recipes-extended/perl/libconvert-asn1-perl_0.33.bb21
-rw-r--r--meta/recipes-extended/perl/libconvert-asn1-perl_0.34.bb22
-rw-r--r--meta/recipes-extended/perl/libxml-sax-perl_1.02.bb2
-rw-r--r--meta/recipes-extended/pigz/pigz_2.7.bb48
-rw-r--r--meta/recipes-extended/pigz/pigz_2.8.bb48
-rw-r--r--meta/recipes-extended/procps/procps/0001-w.c-correct-musl-builds.patch44
-rw-r--r--meta/recipes-extended/procps/procps/0002-proc-escape.c-add-missing-include.patch23
-rw-r--r--meta/recipes-extended/procps/procps_3.3.17.bb103
-rw-r--r--meta/recipes-extended/procps/procps_4.0.4.bb97
-rw-r--r--meta/recipes-extended/psmisc/psmisc.inc3
-rw-r--r--meta/recipes-extended/psmisc/psmisc/0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch6
-rw-r--r--meta/recipes-extended/psmisc/psmisc_23.5.bb9
-rw-r--r--meta/recipes-extended/psmisc/psmisc_23.7.bb9
-rw-r--r--meta/recipes-extended/quota/quota/0001-quota-Use-realloc-3-instead-of-reallocarray-3.patch34
-rw-r--r--meta/recipes-extended/quota/quota/fcntl.patch27
-rw-r--r--meta/recipes-extended/quota/quota_4.06.bb35
-rw-r--r--meta/recipes-extended/quota/quota_4.09.bb34
-rw-r--r--meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb2
-rw-r--r--meta/recipes-extended/rpcsvc-proto/rpcsvc-proto.bb6
-rw-r--r--meta/recipes-extended/rpcsvc-proto/rpcsvc-proto/0001-Use-cross-compiled-rpcgen.patch11
-rw-r--r--meta/recipes-extended/screen/screen_4.9.0.bb49
-rw-r--r--meta/recipes-extended/screen/screen_4.9.1.bb49
-rw-r--r--meta/recipes-extended/sed/sed_4.8.bb68
-rw-r--r--meta/recipes-extended/sed/sed_4.9.bb67
-rw-r--r--meta/recipes-extended/shadow/files/0001-Disable-use-of-syslog-for-sysroot.patch52
-rw-r--r--meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch37
-rw-r--r--meta/recipes-extended/shadow/files/commonio.c-fix-unexpected-open-failure-in-chroot-env.patch13
-rw-r--r--meta/recipes-extended/shadow/files/login.defs_shadow-sysroot1
-rw-r--r--meta/recipes-extended/shadow/files/pam.d/login4
-rw-r--r--meta/recipes-extended/shadow/files/securetty1
-rw-r--r--meta/recipes-extended/shadow/files/shadow-4.1.3-dots-in-usernames.patch27
-rw-r--r--meta/recipes-extended/shadow/files/shadow-relaxed-usernames.patch111
-rw-r--r--meta/recipes-extended/shadow/files/shadow-update-pam-conf.patch83
-rw-r--r--meta/recipes-extended/shadow/shadow-securetty_4.6.bb1
-rw-r--r--meta/recipes-extended/shadow/shadow-sysroot_4.6.bb3
-rw-r--r--meta/recipes-extended/shadow/shadow.inc51
-rw-r--r--meta/recipes-extended/shadow/shadow_4.11.1.bb11
-rw-r--r--meta/recipes-extended/shadow/shadow_4.15.0.bb10
-rw-r--r--meta/recipes-extended/slang/slang/dont-link-to-host.patch23
-rw-r--r--meta/recipes-extended/slang/slang/terminfo_fixes.patch28
-rw-r--r--meta/recipes-extended/slang/slang_2.3.2.bb84
-rw-r--r--meta/recipes-extended/slang/slang_2.3.3.bb83
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.14.01.bb29
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.17.06.bb36
-rw-r--r--meta/recipes-extended/sudo/files/0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch25
-rw-r--r--meta/recipes-extended/sudo/files/0001-sudo.conf.in-fix-conflict-with-multilib.patch21
-rw-r--r--meta/recipes-extended/sudo/sudo.inc9
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.10.bb62
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.15p5.bb61
-rw-r--r--meta/recipes-extended/sysklogd/sysklogd_2.3.0.bb56
-rw-r--r--meta/recipes-extended/sysklogd/sysklogd_2.5.2.bb56
-rw-r--r--meta/recipes-extended/sysstat/sysstat.inc71
-rw-r--r--meta/recipes-extended/sysstat/sysstat/0001-configure.in-remove-check-for-chkconfig.patch10
-rw-r--r--meta/recipes-extended/sysstat/sysstat_12.4.5.bb7
-rw-r--r--meta/recipes-extended/sysstat/sysstat_12.7.5.bb80
-rw-r--r--meta/recipes-extended/tar/tar/0001-tests-fix-TESTSUITE_AT.patch228
-rw-r--r--meta/recipes-extended/tar/tar/0002-tests-check-for-recently-fixed-bug.patch60
-rw-r--r--meta/recipes-extended/tar/tar/0003-Exclude-VCS-directory-with-writing-from-an-archive.patch112
-rw-r--r--meta/recipes-extended/tar/tar/run-ptest14
-rw-r--r--meta/recipes-extended/tar/tar_1.34.bb68
-rw-r--r--meta/recipes-extended/tar/tar_1.35.bb102
-rw-r--r--meta/recipes-extended/tcp-wrappers/tcp-wrappers-7.6/0001-Fix-implicit-function-declaration-warnings.patch114
-rw-r--r--meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb4
-rw-r--r--meta/recipes-extended/texinfo/texinfo/0001-gnulib-Update.patch11765
-rw-r--r--meta/recipes-extended/texinfo/texinfo/0001-texinfo-several-changes-to-build-without-zlib-and-nc.patch55
-rw-r--r--meta/recipes-extended/texinfo/texinfo/0002-dont-depend-on-help2man.patch68
-rw-r--r--meta/recipes-extended/texinfo/texinfo/0003-texinfo-Update-to-5.1.patch28
-rw-r--r--meta/recipes-extended/texinfo/texinfo/disable-native-tools.patch43
-rw-r--r--meta/recipes-extended/texinfo/texinfo/dont-depend-on-help2man.patch66
-rw-r--r--meta/recipes-extended/texinfo/texinfo/link-zip.patch23
-rw-r--r--meta/recipes-extended/texinfo/texinfo/use_host_makedoc.patch17
-rw-r--r--meta/recipes-extended/texinfo/texinfo_6.8.bb90
-rw-r--r--meta/recipes-extended/texinfo/texinfo_7.1.bb90
-rw-r--r--meta/recipes-extended/time/time/0001-include-string.h-for-memset.patch27
-rw-r--r--meta/recipes-extended/time/time_1.9.bb4
-rw-r--r--meta/recipes-extended/timezone/timezone.inc13
-rw-r--r--meta/recipes-extended/timezone/tzcode-native.bb3
-rw-r--r--meta/recipes-extended/timezone/tzdata.bb16
-rw-r--r--meta/recipes-extended/unzip/unzip/0001-configure-Add-correct-system-headers-and-prototypes-.patch112
-rw-r--r--meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/0001-unix-configure-fix-detection-for-cross-compilation.patch103
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch39
-rw-r--r--meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch33
-rw-r--r--meta/recipes-extended/unzip/unzip/avoid-strip.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/define-ldflags.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/fix-security-format.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip/symlink.patch2
-rw-r--r--meta/recipes-extended/unzip/unzip_6.0.bb8
-rw-r--r--meta/recipes-extended/watchdog/watchdog-config.bb7
-rw-r--r--meta/recipes-extended/watchdog/watchdog/0001-shutdown-Do-not-guard-sys-quota.h-sys-swap.h-and-sys.patch37
-rw-r--r--meta/recipes-extended/watchdog/watchdog_5.16.bb6
-rw-r--r--meta/recipes-extended/wget/wget.inc4
-rw-r--r--meta/recipes-extended/wget/wget/0002-improve-reproducibility.patch9
-rw-r--r--meta/recipes-extended/wget/wget_1.21.3.bb7
-rw-r--r--meta/recipes-extended/wget/wget_1.24.5.bb7
-rw-r--r--meta/recipes-extended/which/which_2.21.bb1
-rw-r--r--meta/recipes-extended/xdg-utils/xdg-utils/CVE-2022-4055.patch145
-rw-r--r--meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb1
-rw-r--r--meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb9
-rw-r--r--meta/recipes-extended/xz/xz/CVE-2022-1271.patch96
-rw-r--r--meta/recipes-extended/xz/xz/run-ptest26
-rw-r--r--meta/recipes-extended/xz/xz_5.2.5.bb47
-rw-r--r--meta/recipes-extended/xz/xz_5.4.6.bb69
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0001-configure-Specify-correct-function-signatures-and-de.patch134
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch96
-rw-r--r--meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch35
-rw-r--r--meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/fix-security-format.patch2
-rw-r--r--meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch2
-rw-r--r--meta/recipes-extended/zip/zip_3.0.bb11
-rw-r--r--meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch39
-rw-r--r--meta/recipes-extended/zstd/zstd_1.5.2.bb43
-rw-r--r--meta/recipes-extended/zstd/zstd_1.5.5.bb47
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_42.2.bb43
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_46.0.bb43
-rw-r--r--meta/recipes-gnome/epiphany/files/0002-help-meson.build-disable-the-use-of-yelp.patch3
-rw-r--r--meta/recipes-gnome/epiphany/files/distributor.patch17
-rw-r--r--meta/recipes-gnome/epiphany/files/migrator.patch18
-rw-r--r--meta/recipes-gnome/gcr/gcr/0001-gcr-meson.build-fix-one-parallel-build-failure.patch37
-rw-r--r--meta/recipes-gnome/gcr/gcr/b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8.patch61
-rw-r--r--meta/recipes-gnome/gcr/gcr_3.40.0.bb44
-rw-r--r--meta/recipes-gnome/gcr/gcr_4.2.1.bb58
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-Add-use_prebuilt_tools-option.patch171
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch66
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch22
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.10.bb119
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.8.bb128
-rw-r--r--meta/recipes-gnome/gi-docgen/gi-docgen_2023.3.bb21
-rw-r--r--meta/recipes-gnome/gi-docgen/gi-docgen_git.bb22
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch27
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch84
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb43
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_46.0.bb29
-rw-r--r--meta/recipes-gnome/gnome/gconf_3.2.6.bb3
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch4
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-g-ir-tool-template.in-fix-girdir-path.patch33
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection_1.72.0.bb197
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb194
-rw-r--r--meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_42.0.bb16
-rw-r--r--meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_46.0.bb15
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3.inc48
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3/0002-Do-not-try-to-initialize-GL-without-libGL.patch57
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3/0003-Add-disable-opengl-configure-option.patch872
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3/link_fribidi.patch19
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3/opengl.patch738
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3_3.24.33.bb17
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb17
-rw-r--r--meta/recipes-gnome/gtk+/gtk4_4.14.1.bb130
-rw-r--r--meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch24
-rw-r--r--meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb7
-rw-r--r--meta/recipes-gnome/json-glib/json-glib_1.6.6.bb32
-rw-r--r--meta/recipes-gnome/json-glib/json-glib_1.8.0.bb31
-rw-r--r--meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb27
-rw-r--r--meta/recipes-gnome/libdazzle/libdazzle_3.44.0.bb1
-rw-r--r--meta/recipes-gnome/libgudev/libgudev/0001-meson-Pass-export-dynamic-option-to-linker.patch38
-rw-r--r--meta/recipes-gnome/libgudev/libgudev_237.bb31
-rw-r--r--meta/recipes-gnome/libgudev/libgudev_238.bb34
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.6.2.bb27
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.8.3.bb27
-rw-r--r--meta/recipes-gnome/libnotify/libnotify_0.7.12.bb37
-rw-r--r--meta/recipes-gnome/libnotify/libnotify_0.8.3.bb35
-rw-r--r--meta/recipes-gnome/libportal/libportal_0.7.1.bb20
-rw-r--r--meta/recipes-gnome/librsvg/librsvg-crates.inc546
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch13
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch51
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.54.3.bb75
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.57.1.bb79
-rw-r--r--meta/recipes-gnome/libsecret/libsecret_0.20.5.bb26
-rw-r--r--meta/recipes-gnome/libsecret/libsecret_0.21.4.bb25
-rw-r--r--meta/recipes-gnome/libxmlb/libxmlb/0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch32
-rw-r--r--meta/recipes-gnome/libxmlb/libxmlb/run-ptest3
-rw-r--r--meta/recipes-gnome/libxmlb/libxmlb_0.3.17.bb25
-rw-r--r--meta/recipes-graphics/builder/builder_0.1.bb4
-rw-r--r--meta/recipes-graphics/cairo/cairo/CVE-2018-19876.patch34
-rw-r--r--meta/recipes-graphics/cairo/cairo/CVE-2019-6461.patch19
-rw-r--r--meta/recipes-graphics/cairo/cairo/CVE-2019-6462.patch20
-rw-r--r--meta/recipes-graphics/cairo/cairo/CVE-2020-35492.patch60
-rw-r--r--meta/recipes-graphics/cairo/cairo/cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff2
-rw-r--r--meta/recipes-graphics/cairo/cairo_1.16.0.bb102
-rw-r--r--meta/recipes-graphics/cairo/cairo_1.18.0.bb86
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.110.bb60
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.120.bb59
-rw-r--r--meta/recipes-graphics/fontconfig/fontconfig_2.14.0.bb70
-rw-r--r--meta/recipes-graphics/fontconfig/fontconfig_2.15.0.bb69
-rw-r--r--meta/recipes-graphics/freetype/freetype_2.12.1.bb44
-rw-r--r--meta/recipes-graphics/freetype/freetype_2.13.2.bb45
-rw-r--r--meta/recipes-graphics/glslang/glslang/0001-generate-glslang-pkg-config.patch19
-rw-r--r--meta/recipes-graphics/glslang/glslang_1.3.211.0.bb32
-rw-r--r--meta/recipes-graphics/glslang/glslang_1.3.280.0.bb37
-rw-r--r--meta/recipes-graphics/graphene/files/float-div.patch28
-rw-r--r--meta/recipes-graphics/graphene/graphene_1.10.8.bb29
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_4.2.1.bb48
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_8.3.1.bb48
-rw-r--r--meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools/0001-lib-meson.build-fix-meson-0.60-compatibility.patch24
-rw-r--r--meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb12
-rw-r--r--meta/recipes-graphics/jpeg/files/0001-libjpeg-turbo-fix-package_qa-error.patch32
-rw-r--r--meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.3.bb62
-rw-r--r--meta/recipes-graphics/jpeg/libjpeg-turbo_3.0.1.bb58
-rw-r--r--meta/recipes-graphics/kmscube/kmscube/0001-cube-gears-Change-header-file-to-GLES3-gl3.h.patch31
-rw-r--r--meta/recipes-graphics/kmscube/kmscube/0001-texturator-Use-correct-GL-extension-header.patch33
-rw-r--r--meta/recipes-graphics/kmscube/kmscube_git.bb14
-rw-r--r--meta/recipes-graphics/libepoxy/files/0001-dispatch_common.h-define-also-EGL_NO_X11.patch27
-rw-r--r--meta/recipes-graphics/libepoxy/libepoxy_1.5.10.bb30
-rw-r--r--meta/recipes-graphics/libepoxy/libepoxy_1.5.9.bb32
-rw-r--r--meta/recipes-graphics/libfakekey/libfakekey_git.bb4
-rw-r--r--meta/recipes-graphics/libmatchbox/libmatchbox_1.12.bb2
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2/0001-Disable-libunwind-in-native-OE-builds-by-not-looking.patch36
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2/0001-video-restore-ability-to-disable-fb-accel-via-hint.patch36
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2_2.0.22.bb83
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2_2.30.1.bb85
-rw-r--r--meta/recipes-graphics/libva/libva-initial_2.20.0.bb (renamed from meta/recipes-graphics/libva/libva-initial_2.14.0.bb)0
-rw-r--r--meta/recipes-graphics/libva/libva-utils_2.14.0.bb32
-rw-r--r--meta/recipes-graphics/libva/libva-utils_2.20.1.bb32
-rw-r--r--meta/recipes-graphics/libva/libva.inc8
-rw-r--r--meta/recipes-graphics/libva/libva_2.20.0.bb (renamed from meta/recipes-graphics/libva/libva_2.14.0.bb)0
-rw-r--r--meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb1
-rw-r--r--meta/recipes-graphics/matchbox-wm/matchbox-wm/0001-Fix-build-with-gcc-10.patch41
-rw-r--r--meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.2.bb40
-rw-r--r--meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.3.bb38
-rw-r--r--meta/recipes-graphics/mesa/files/0001-Revert-meson-do-not-pull-in-clc-for-clover.patch53
-rw-r--r--meta/recipes-graphics/mesa/files/0001-drisw-fix-build-without-dri3.patch58
-rw-r--r--meta/recipes-graphics/mesa/files/0001-futex.h-Define-__NR_futex-if-it-does-not-exist.patch34
-rw-r--r--meta/recipes-graphics/mesa/files/0001-meson-misdetects-64bit-atomics-on-mips-clang.patch3
-rw-r--r--meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch21
-rw-r--r--meta/recipes-graphics/mesa/files/0001-util-format-Check-for-NEON-before-using-it.patch47
-rw-r--r--meta/recipes-graphics/mesa/files/0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch42
-rw-r--r--meta/recipes-graphics/mesa/files/0002-meson.build-make-TLS-ELF-optional.patch61
-rw-r--r--meta/recipes-graphics/mesa/libglu_9.0.2.bb28
-rw-r--r--meta/recipes-graphics/mesa/libglu_9.0.3.bb31
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch382
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0007-Install-few-more-test-programs.patch43
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch99
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch71
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch44
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos_8.4.0.bb57
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos_8.5.0.bb43
-rw-r--r--meta/recipes-graphics/mesa/mesa-gl_22.0.3.bb13
-rw-r--r--meta/recipes-graphics/mesa/mesa-gl_24.0.3.bb15
-rw-r--r--meta/recipes-graphics/mesa/mesa.inc135
-rw-r--r--meta/recipes-graphics/mesa/mesa_24.0.3.bb (renamed from meta/recipes-graphics/mesa/mesa_22.0.3.bb)0
-rw-r--r--meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb1
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-weston.bb1
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-x11-base.bb1
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb6
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-x11.bb1
-rw-r--r--meta/recipes-graphics/pango/pango_1.50.7.bb52
-rw-r--r--meta/recipes-graphics/pango/pango_1.52.1.bb53
-rw-r--r--meta/recipes-graphics/piglit/piglit/0001-CMakeLists.txt-add-missing-endian.h-check.patch25
-rw-r--r--meta/recipes-graphics/piglit/piglit/0001-cmake-install-bash-completions-in-the-right-place.patch35
-rw-r--r--meta/recipes-graphics/piglit/piglit/0001-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch32
-rw-r--r--meta/recipes-graphics/piglit/piglit/0001-tests-Fix-narrowing-errors-seen-with-clang.patch50
-rw-r--r--meta/recipes-graphics/piglit/piglit/0001-utils-Include-libgen.h-on-musl-linux-systems.patch83
-rw-r--r--meta/recipes-graphics/piglit/piglit/0002-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch29
-rw-r--r--meta/recipes-graphics/piglit/piglit/0002-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch30
-rw-r--r--meta/recipes-graphics/piglit/piglit/0003-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch27
-rw-r--r--meta/recipes-graphics/piglit/piglit_git.bb22
-rw-r--r--meta/recipes-graphics/shaderc/files/0001-cmake-disable-building-external-dependencies.patch21
-rw-r--r--meta/recipes-graphics/shaderc/files/0002-libshaderc_util-fix-glslang-header-file-location.patch5
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2022.1.bb29
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2024.0.bb29
-rw-r--r--meta/recipes-graphics/spir/spirv-headers_1.3.211.0.bb18
-rw-r--r--meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb20
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_1.3.211.0.bb41
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb47
-rw-r--r--meta/recipes-graphics/startup-notification/startup-notification_0.12.bb1
-rw-r--r--meta/recipes-graphics/ttf-fonts/liberation-fonts_2.1.5.bb4
-rw-r--r--meta/recipes-graphics/ttf-fonts/ttf-bitstream-vera_1.10.bb4
-rw-r--r--meta/recipes-graphics/virglrenderer/virglrenderer/0001-meson.build-use-python3-directly-for-python.patch15
-rw-r--r--meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0135.patch117
-rw-r--r--meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0175.patch107
-rw-r--r--meta/recipes-graphics/virglrenderer/virglrenderer_0.9.1.bb25
-rw-r--r--meta/recipes-graphics/virglrenderer/virglrenderer_1.0.1.bb33
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.3.211.0.bb22
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb28
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.3.211.0.bb40
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb43
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-CMakeLists.txt-do-not-hardcode-lib-as-installation-t.patch29
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-Deprecate-u8string_view.patch59
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-Do-not-use-LFS64-functions-on-linux-musl.patch37
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/0001-vulkan-samples-Fix-reproducibility-issue.patch43
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/32bit.patch101
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples/debugfix.patch31
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples_git.bb15
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.3.211.0.bb32
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb37
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb33
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb49
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb37
-rw-r--r--meta/recipes-graphics/waffle/waffle/0001-waffle-do-not-make-core-protocol-into-the-library.patch26
-rw-r--r--meta/recipes-graphics/waffle/waffle_1.7.0.bb50
-rw-r--r--meta/recipes-graphics/waffle/waffle_1.8.0.bb50
-rw-r--r--meta/recipes-graphics/wayland/libinput/determinism.patch21
-rw-r--r--meta/recipes-graphics/wayland/libinput_1.19.4.bb49
-rw-r--r--meta/recipes-graphics/wayland/libinput_1.25.0.bb49
-rw-r--r--meta/recipes-graphics/wayland/mtdev_1.1.6.bb18
-rw-r--r--meta/recipes-graphics/wayland/mtdev_1.1.7.bb17
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.25.bb26
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.34.bb25
-rw-r--r--meta/recipes-graphics/wayland/wayland-utils_1.0.0.bb20
-rw-r--r--meta/recipes-graphics/wayland/wayland-utils_1.2.0.bb22
-rw-r--r--meta/recipes-graphics/wayland/wayland/0002-Consider-pkgconfig-sysroot-for-pkgdatadir.patch46
-rw-r--r--meta/recipes-graphics/wayland/wayland/0002-Do-not-hardcode-the-path-to-wayland-scanner.patch27
-rw-r--r--meta/recipes-graphics/wayland/wayland_1.20.0.bb61
-rw-r--r--meta/recipes-graphics/wayland/wayland_1.22.0.bb62
-rw-r--r--meta/recipes-graphics/wayland/weston-init.bb53
-rw-r--r--meta/recipes-graphics/wayland/weston-init/init2
-rwxr-xr-xmeta/recipes-graphics/wayland/weston-init/weston-socket.sh20
-rw-r--r--meta/recipes-graphics/wayland/weston/0001-libweston-tools-Include-libgen.h-for-basename-signat.patch48
-rw-r--r--meta/recipes-graphics/wayland/weston/dont-use-plane-add-prop.patch32
-rw-r--r--meta/recipes-graphics/wayland/weston/xwayland.weston-start3
-rw-r--r--meta/recipes-graphics/wayland/weston_10.0.0.bb144
-rw-r--r--meta/recipes-graphics/wayland/weston_13.0.0.bb146
-rw-r--r--meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb1
-rw-r--r--meta/recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_git.bb4
-rw-r--r--meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb1
-rw-r--r--meta/recipes-graphics/xinput-calibrator/xinput-calibrator_git.bb3
-rw-r--r--meta/recipes-graphics/xorg-app/mkfontscale_1.2.2.bb21
-rw-r--r--meta/recipes-graphics/xorg-app/mkfontscale_1.2.3.bb21
-rw-r--r--meta/recipes-graphics/xorg-app/rgb_1.0.6.bb16
-rw-r--r--meta/recipes-graphics/xorg-app/rgb_1.1.0.bb16
-rw-r--r--meta/recipes-graphics/xorg-app/xauth_1.1.2.bb15
-rw-r--r--meta/recipes-graphics/xorg-app/xauth_1.1.3.bb15
-rw-r--r--meta/recipes-graphics/xorg-app/xdpyinfo_1.3.3.bb20
-rw-r--r--meta/recipes-graphics/xorg-app/xdpyinfo_1.3.4.bb20
-rw-r--r--meta/recipes-graphics/xorg-app/xev/diet-x11.patch114
-rw-r--r--meta/recipes-graphics/xorg-app/xev_1.2.4.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xev_1.2.6.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xeyes_1.2.0.bb13
-rw-r--r--meta/recipes-graphics/xorg-app/xeyes_1.3.0.bb14
-rw-r--r--meta/recipes-graphics/xorg-app/xhost_1.0.8.bb20
-rw-r--r--meta/recipes-graphics/xorg-app/xhost_1.0.9.bb21
-rw-r--r--meta/recipes-graphics/xorg-app/xinit_1.4.1.bb25
-rw-r--r--meta/recipes-graphics/xorg-app/xinit_1.4.2.bb26
-rw-r--r--meta/recipes-graphics/xorg-app/xinput_1.6.3.bb12
-rw-r--r--meta/recipes-graphics/xorg-app/xinput_1.6.4.bb13
-rw-r--r--meta/recipes-graphics/xorg-app/xkbcomp_1.4.5.bb18
-rw-r--r--meta/recipes-graphics/xorg-app/xkbcomp_1.4.7.bb19
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap_1.0.10.bb16
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap_1.0.11.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xprop_1.2.5.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xprop_1.2.7.bb18
-rw-r--r--meta/recipes-graphics/xorg-app/xrandr_1.5.1.bb18
-rw-r--r--meta/recipes-graphics/xorg-app/xrandr_1.5.2.bb17
-rw-r--r--meta/recipes-graphics/xorg-app/xset/disable-xkb.patch23
-rw-r--r--meta/recipes-graphics/xorg-app/xset_1.2.4.bb19
-rw-r--r--meta/recipes-graphics/xorg-app/xset_1.2.5.bb20
-rw-r--r--meta/recipes-graphics/xorg-app/xvinfo_1.1.4.bb14
-rw-r--r--meta/recipes-graphics/xorg-app/xvinfo_1.1.5.bb14
-rw-r--r--meta/recipes-graphics/xorg-app/xwininfo_1.1.5.bb15
-rw-r--r--meta/recipes-graphics/xorg-app/xwininfo_1.1.6.bb16
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.2.1.bb12
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.4.0.bb12
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.3.bb14
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.5.bb14
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics/64bit_time_t_support.patch51
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.1.bb18
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.2.bb17
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.1.0.bb26
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.2.0.bb27
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.3.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.6.0.bb14
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-intel_git.bb5
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.5.0.bb19
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.6.0.bb20
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-vmware/0002-add-option-for-vmwgfx.patch103
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.3.0.bb19
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.4.0.bb14
-rw-r--r--meta/recipes-graphics/xorg-driver/xorg-driver-common.inc3
-rw-r--r--meta/recipes-graphics/xorg-font/encodings/nocompiler.patch42
-rw-r--r--meta/recipes-graphics/xorg-font/encodings_1.0.5.bb25
-rw-r--r--meta/recipes-graphics/xorg-font/encodings_1.1.0.bb24
-rw-r--r--meta/recipes-graphics/xorg-font/font-alias-1.0.4/nocompiler.patch42
-rw-r--r--meta/recipes-graphics/xorg-font/font-alias_1.0.4.bb23
-rw-r--r--meta/recipes-graphics/xorg-font/font-alias_1.0.5.bb22
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.3.2.bb22
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.4.1.bb23
-rw-r--r--meta/recipes-graphics/xorg-font/xorg-font-common.inc3
-rw-r--r--meta/recipes-graphics/xorg-font/xorg-minimal-fonts.bb3
-rw-r--r--meta/recipes-graphics/xorg-lib/libdmx_1.1.4.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/libfontenc_1.1.4.bb17
-rw-r--r--meta/recipes-graphics/xorg-lib/libfontenc_1.1.8.bb16
-rw-r--r--meta/recipes-graphics/xorg-lib/libice_1.0.10.bb28
-rw-r--r--meta/recipes-graphics/xorg-lib/libice_1.1.1.bb27
-rw-r--r--meta/recipes-graphics/xorg-lib/libpciaccess_0.16.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libpciaccess_0.18.1.bb26
-rw-r--r--meta/recipes-graphics/xorg-lib/libpthread-stubs_0.4.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libpthread-stubs_0.5.bb18
-rw-r--r--meta/recipes-graphics/xorg-lib/libsm_1.2.3.bb31
-rw-r--r--meta/recipes-graphics/xorg-lib/libsm_1.2.4.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11-compose-data/0001-Drop-x11-dependencies.patch12
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11-compose-data_1.6.8.bb36
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11-compose-data_1.8.4.bb35
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11_1.8.9.bb43
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11_1.8.bb48
-rw-r--r--meta/recipes-graphics/xorg-lib/libxau_1.0.11.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxau_1.0.9.bb24
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch45
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.15.bb37
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb38
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcomposite/change-include-order.patch18
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcomposite_0.4.5.bb28
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcomposite_0.4.6.bb25
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcursor_1.2.1.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcursor_1.2.2.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcvt_0.1.1.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcvt_0.1.2.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxdamage_1.1.5.bb29
-rw-r--r--meta/recipes-graphics/xorg-lib/libxdamage_1.1.6.bb28
-rw-r--r--meta/recipes-graphics/xorg-lib/libxdmcp_1.1.3.bb29
-rw-r--r--meta/recipes-graphics/xorg-lib/libxdmcp_1.1.5.bb28
-rw-r--r--meta/recipes-graphics/xorg-lib/libxext_1.3.4.bb26
-rw-r--r--meta/recipes-graphics/xorg-lib/libxext_1.3.6.bb24
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfixes_6.0.0.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfixes_6.0.1.bb21
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont2_2.0.5.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont2_2.0.6.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb1
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.3.4.bb32
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.3.8.bb32
-rw-r--r--meta/recipes-graphics/xorg-lib/libxi_1.8.1.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxi_1.8.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxinerama_1.1.4.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxinerama_1.1.5.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.0.bb36
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.7.0.bb40
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbfile_1.1.0.bb18
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbfile_1.1.3.bb16
-rw-r--r--meta/recipes-graphics/xorg-lib/libxmu_1.1.3.bb34
-rw-r--r--meta/recipes-graphics/xorg-lib/libxmu_1.2.0.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/libxpm_3.5.13.bb27
-rw-r--r--meta/recipes-graphics/xorg-lib/libxpm_3.5.17.bb27
-rw-r--r--meta/recipes-graphics/xorg-lib/libxrandr_1.5.2.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxrandr_1.5.4.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxrender_0.9.10.bb24
-rw-r--r--meta/recipes-graphics/xorg-lib/libxrender_0.9.11.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxres_1.2.1.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxres_1.2.2.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.3.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.4.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxshmfence_1.3.2.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/libxshmfence_1.3.bb21
-rw-r--r--meta/recipes-graphics/xorg-lib/libxt_1.2.1.bb31
-rw-r--r--meta/recipes-graphics/xorg-lib/libxt_1.3.0.bb32
-rw-r--r--meta/recipes-graphics/xorg-lib/libxtst_1.2.3.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxtst_1.2.4.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxv_1.0.12.bb18
-rw-r--r--meta/recipes-graphics/xorg-lib/libxvmc_1.0.13.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxvmc_1.0.14.bb19
-rw-r--r--meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.4.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.5.bb20
-rw-r--r--meta/recipes-graphics/xorg-lib/pixman_0.40.0.bb41
-rw-r--r--meta/recipes-graphics/xorg-lib/pixman_0.42.2.bb47
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-cursor_0.1.4.bb10
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.0.bb13
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.1.bb12
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.0.bb9
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.1.bb8
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.10.bb9
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.9.bb10
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.1.bb11
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.2.bb10
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util.inc2
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util_0.4.0.bb8
-rw-r--r--meta/recipes-graphics/xorg-lib/xcb-util_0.4.1.bb9
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.35.1.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.41.bb32
-rw-r--r--meta/recipes-graphics/xorg-lib/xorg-lib-common.inc3
-rw-r--r--meta/recipes-graphics/xorg-lib/xtrans_1.4.0.bb26
-rw-r--r--meta/recipes-graphics/xorg-lib/xtrans_1.5.0.bb25
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto/0001-Fix-install-conflict-when-enable-multilib.patch32
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto/0001-xcb-proto.pc.in-reinstate-libdir.patch29
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto_1.15.bb28
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto_1.16.0.bb31
-rw-r--r--meta/recipes-graphics/xorg-proto/xorgproto_2022.1.bb25
-rw-r--r--meta/recipes-graphics/xorg-proto/xorgproto_2024.1.bb25
-rw-r--r--meta/recipes-graphics/xorg-util/makedepend_1.0.6.bb21
-rw-r--r--meta/recipes-graphics/xorg-util/makedepend_1.0.9.bb20
-rw-r--r--meta/recipes-graphics/xorg-util/util-macros/0001-xorg-macros.m4.in-do-not-run-AC_CANONICAL_HOST-in-ma.patch28
-rw-r--r--meta/recipes-graphics/xorg-util/util-macros_1.19.3.bb19
-rw-r--r--meta/recipes-graphics/xorg-util/util-macros_1.20.0.bb20
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuarm/xorg.conf4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuppc/xorg.conf4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemush4/xorg.conf4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb1
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg.inc27
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-render-Fix-build-with-gcc-12.patch90
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb28
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.3.bb29
-rw-r--r--meta/recipes-graphics/xrestop/xrestop_0.4.bb1
-rw-r--r--meta/recipes-graphics/xwayland/xwayland_22.1.1.bb45
-rw-r--r--meta/recipes-graphics/xwayland/xwayland_23.2.5.bb45
-rw-r--r--meta/recipes-kernel/blktrace/blktrace_git.bb4
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev-linux_1.13.bb (renamed from meta/recipes-kernel/cryptodev/cryptodev-linux_1.12.bb)0
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev-module_1.13.bb (renamed from meta/recipes-kernel/cryptodev/cryptodev-module_1.12.bb)0
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev-tests_1.12.bb22
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev-tests_1.13.bb21
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev.inc3
-rw-r--r--meta/recipes-kernel/cryptodev/files/0001-Add-the-compile-and-install-rules-for-cryptodev-test.patch66
-rw-r--r--meta/recipes-kernel/cryptodev/files/0001-Disable-installing-header-file-provided-by-another-p.patch11
-rw-r--r--meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch3
-rw-r--r--meta/recipes-kernel/dtc/dtc/0001-fdtdump-fix-Werror-int-to-pointer-cast.patch40
-rw-r--r--meta/recipes-kernel/dtc/dtc/0001-meson.build-bump-version-to-1.7.0.patch29
-rw-r--r--meta/recipes-kernel/dtc/dtc/0002-meson-allow-building-from-shallow-clones.patch38
-rw-r--r--meta/recipes-kernel/dtc/dtc_1.6.1.bb30
-rw-r--r--meta/recipes-kernel/dtc/dtc_1.7.0.bb34
-rw-r--r--meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb2
-rw-r--r--meta/recipes-kernel/kern-tools/kern-tools-native_git.bb8
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools/0002-purgatory-Pass-r-directly-to-linker.patch7
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch11
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools/0005-Disable-PIE-during-link.patch7
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools/Fix-building-on-x86_64-with-binutils-2.41.patch95
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.24.bb86
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb86
-rw-r--r--meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb16
-rw-r--r--meta/recipes-kernel/kmod/kmod/0001-Use-portable-implementation-for-basename-API.patch136
-rw-r--r--meta/recipes-kernel/kmod/kmod/0001-depmod-Add-support-for-excluding-a-directory.patch172
-rw-r--r--meta/recipes-kernel/kmod/kmod/gtkdocdir.patch33
-rw-r--r--meta/recipes-kernel/kmod/kmod/ptest.patch25
-rw-r--r--meta/recipes-kernel/kmod/kmod_29.bb90
-rw-r--r--meta/recipes-kernel/kmod/kmod_31.bb89
-rw-r--r--meta/recipes-kernel/libtraceevent/libtraceevent/meson.patch45
-rw-r--r--meta/recipes-kernel/libtraceevent/libtraceevent_1.8.2.bb23
-rw-r--r--meta/recipes-kernel/linux-firmware/files/0001-Makefile-replace-mkdir-by-install.patch84
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20220509.bb1084
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20240312.bb1583
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc4
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-include-linux-stddef.h-in-swab.h-uapi-header.patch42
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-scripts-Use-fixed-input-and-output-files-instead-of-.patch67
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.16.bb20
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb17
-rw-r--r--meta/recipes-kernel/linux/cve-exclusion.inc34
-rw-r--r--meta/recipes-kernel/linux/cve-exclusion_6.6.inc5384
-rwxr-xr-xmeta/recipes-kernel/linux/generate-cve-exclusions.py98
-rw-r--r--meta/recipes-kernel/linux/kernel-devsrc.bb423
-rw-r--r--meta/recipes-kernel/linux/linux-dummy.bb1
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-dev.bb22
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.15.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb48
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb32
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.15.bb30
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb33
-rw-r--r--meta/recipes-kernel/linux/linux-yocto.inc17
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.10.bb58
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.15.bb70
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_6.6.bb72
-rwxr-xr-xmeta/recipes-kernel/lttng/babeltrace2/run-ptest12
-rw-r--r--meta/recipes-kernel/lttng/babeltrace2_2.0.4.bb93
-rw-r--r--meta/recipes-kernel/lttng/babeltrace2_2.0.6.bb95
-rw-r--r--meta/recipes-kernel/lttng/babeltrace_1.5.11.bb98
-rw-r--r--meta/recipes-kernel/lttng/babeltrace_1.5.8.bb98
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-Fix-ASoC-snd_doc_dapm-on-linux-6.9-rc1.patch93
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-Fix-compaction-migratepages-event-name.patch37
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-fix-sched-tracing-Append-prev_state-to-tp-args-inste.patch59
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch10
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0002-Fix-ASoC-add-component-to-set_bias_level-events-in-l.patch132
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch48
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0003-Fix-mm_compaction_migratepages-changed-in-linux-6.9-.patch81
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch183
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0004-Fix-dev_base_lock-removed-in-linux-6.9-rc1.patch57
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0004-fix-block-remove-genhd.h-v5.18.patch45
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch79
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0006-fix-random-remove-unused-tracepoints-v5.18.patch47
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch72
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch44
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0009-Rename-genhd-wrapper-to-blkdev.patch76
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch106
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.13.12.bb45
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.13.3.bb52
-rw-r--r--meta/recipes-kernel/lttng/lttng-platforms.inc4
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-Fix-rotation-destroy-flush-fix-session-daemon-abort-.patch56
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-compat-Define-off64_t-as-off_t-on-linux.patch74
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-tests-add-check_skip_kernel_test-to-check-root-user-.patch1246
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-tests-do-not-strip-a-helper-library.patch9
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/determinism.patch64
-rwxr-xr-xmeta/recipes-kernel/lttng/lttng-tools/run-ptest44
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools_2.13.13.bb197
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools_2.13.7.bb195
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch12
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch7
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch14
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.13.2.bb53
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.13.7.bb53
-rw-r--r--meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb11
-rw-r--r--meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb1
-rw-r--r--meta/recipes-kernel/perf/perf-perl.inc10
-rw-r--r--meta/recipes-kernel/perf/perf.bb115
-rwxr-xr-xmeta/recipes-kernel/perf/perf/sort-pmuevents.py13
-rw-r--r--meta/recipes-kernel/powertop/powertop/0001-src-fix-compatibility-with-ncurses-6.3.patch52
-rw-r--r--meta/recipes-kernel/powertop/powertop_2.14.bb25
-rw-r--r--meta/recipes-kernel/powertop/powertop_2.15.bb24
-rw-r--r--meta/recipes-kernel/systemtap/systemtap-native_git.bb2
-rw-r--r--meta/recipes-kernel/systemtap/systemtap-uprobes_git.bb40
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch40
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch40
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch51
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch23
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch36
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.bb8
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.inc11
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.04.08.bb43
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb43
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib/0001-topology-correct-version-script-path.patch38
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib_1.2.11.bb46
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib_1.2.6.1.bb44
-rw-r--r--meta/recipes-multimedia/alsa/alsa-plugins/0001-arcam_av.c-Include-missing-string.h.patch25
-rw-r--r--meta/recipes-multimedia/alsa/alsa-plugins_1.2.6.bb174
-rw-r--r--meta/recipes-multimedia/alsa/alsa-plugins_1.2.7.1.bb176
-rw-r--r--meta/recipes-multimedia/alsa/alsa-tools_1.2.11.bb89
-rw-r--r--meta/recipes-multimedia/alsa/alsa-tools_1.2.5.bb89
-rw-r--r--meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.11.bb24
-rw-r--r--meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.6.3.bb24
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils-scripts_1.2.6.bb25
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils.inc108
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils_1.2.11.bb124
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils_1.2.6.bb2
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg/0001-libavutil-include-assembly-with-full-path-from-sourc.patch112
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg_5.0.1.bb176
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg_6.1.1.bb186
-rw-r--r--meta/recipes-multimedia/flac/flac_1.3.4.bb45
-rw-r--r--meta/recipes-multimedia/flac/flac_1.4.3.bb36
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch3
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.20.2.bb52
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb52
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop2
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.2.bb28
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb28
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb3
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.2.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch2
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch4
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0003-ensure-valid-sentinals-for-gst_structure_get-etc.patch86
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch4
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.2.bb166
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb165
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch12
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch22
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch10
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.2.bb94
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb96
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch10
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch38
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.2.bb81
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb81
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-license.inc2
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-packaging.inc4
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.2.bb46
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.2.bb30
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb30
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.2.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.2.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch24
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch56
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch20
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch40
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0005-tests-remove-gstbin-test_watch_for_state_change-test.patch107
-rwxr-xr-xmeta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest16
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.2.bb73
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb74
-rw-r--r--meta/recipes-multimedia/liba52/liba52_0.7.4.bb1
-rw-r--r--meta/recipes-multimedia/libpng/libpng_1.6.37.bb35
-rw-r--r--meta/recipes-multimedia/libpng/libpng_1.6.43.bb33
-rw-r--r--meta/recipes-multimedia/libsamplerate/libsamplerate0_0.2.2.bb6
-rw-r--r--meta/recipes-multimedia/libsndfile/libsndfile1/cve-2022-33065.patch739
-rw-r--r--meta/recipes-multimedia/libsndfile/libsndfile1_1.1.0.bb32
-rw-r--r--meta/recipes-multimedia/libsndfile/libsndfile1_1.2.2.bb32
-rw-r--r--meta/recipes-multimedia/libtheora/libtheora_1.1.1.bb1
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch38
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch43
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch219
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0003-add-checks-for-return-value-of-limitMalloc-392.patch93
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch33
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0005-fix-the-FPE-in-tiffcrop-393.patch36
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/0006-fix-heap-buffer-overflow-in-tiffcp-278.patch57
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/561599c99f987dc32ae110370cfdd7df7975586b.patch30
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0001.patch238
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0002.patch28
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-52356.patch49
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-6228.patch31
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-Apply-1-suggestion-s-to-1-file-s.patch27
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data-2.patch36
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data.patch162
-rw-r--r--meta/recipes-multimedia/libtiff/tiff/eecb0712f4c3a5b449f70c57988260a667ddbdef.patch32
-rw-r--r--meta/recipes-multimedia/libtiff/tiff_4.3.0.bb67
-rw-r--r--meta/recipes-multimedia/libtiff/tiff_4.6.0.bb68
-rw-r--r--meta/recipes-multimedia/mpg123/mpg123_1.29.3.bb52
-rw-r--r--meta/recipes-multimedia/mpg123/mpg123_1.32.6.bb55
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio.inc8
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio/0001-meson-Check-for-__get_cpuid.patch82
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb11
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_17.0.bb10
-rw-r--r--meta/recipes-multimedia/sbc/sbc/0001-sbc_primitives-Fix-build-on-non-x86.patch45
-rw-r--r--meta/recipes-multimedia/sbc/sbc_1.5.bb24
-rw-r--r--meta/recipes-multimedia/sbc/sbc_2.0.bb22
-rw-r--r--meta/recipes-multimedia/speex/speex/CVE-2020-23903.patch30
-rw-r--r--meta/recipes-multimedia/speex/speex_1.2.0.bb22
-rw-r--r--meta/recipes-multimedia/speex/speex_1.2.1.bb20
-rw-r--r--meta/recipes-multimedia/speex/speexdsp_1.2.0.bb27
-rw-r--r--meta/recipes-multimedia/speex/speexdsp_1.2.1.bb28
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.2.2.bb55
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.3.2.bb57
-rw-r--r--meta/recipes-multimedia/x264/x264_git.bb4
-rw-r--r--meta/recipes-rt/README2
-rw-r--r--meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch6
-rwxr-xr-xmeta/recipes-rt/rt-tests/files/rt_bmark.py2
-rw-r--r--meta/recipes-rt/rt-tests/rt-tests.inc6
-rw-r--r--meta/recipes-sato/images/core-image-sato.bb1
-rw-r--r--meta/recipes-sato/l3afpad/l3afpad_git.bb2
-rw-r--r--meta/recipes-sato/matchbox-config-gtk/matchbox-config-gtk_0.2.bb8
-rw-r--r--meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.2.bb34
-rw-r--r--meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.3.bb33
-rw-r--r--meta/recipes-sato/matchbox-keyboard/matchbox-keyboard_0.1.1.bb2
-rw-r--r--meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.11.bb42
-rw-r--r--meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.12.bb39
-rw-r--r--meta/recipes-sato/matchbox-sato/matchbox-session-sato/session1
-rw-r--r--meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb3
-rw-r--r--meta/recipes-sato/matchbox-terminal/matchbox-terminal_0.2.bb8
-rw-r--r--meta/recipes-sato/matchbox-theme-sato/matchbox-theme-sato_0.2.bb2
-rw-r--r--meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb3
-rw-r--r--meta/recipes-sato/puzzles/puzzles_git.bb8
-rw-r--r--meta/recipes-sato/rxvt-unicode/rxvt-unicode.inc7
-rw-r--r--meta/recipes-sato/rxvt-unicode/rxvt-unicode/rxvt.desktop2
-rw-r--r--meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.30.bb8
-rw-r--r--meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.31.bb8
-rw-r--r--meta/recipes-sato/sato-icon-theme/icon-naming-utils_0.8.90.bb29
-rw-r--r--meta/recipes-sato/sato-icon-theme/sato-icon-theme_git.bb29
-rw-r--r--meta/recipes-sato/sato-screenshot/sato-screenshot_0.3.bb2
-rw-r--r--meta/recipes-sato/settings-daemon/settings-daemon_0.0.2.bb2
-rw-r--r--meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb1
-rw-r--r--meta/recipes-sato/webkit/libwpe_1.12.0.bb18
-rw-r--r--meta/recipes-sato/webkit/libwpe_1.16.0.bb18
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-CMake-Add-a-variable-to-control-macro-__PAS_ALWAYS_I.patch73
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch39
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-Fix-build-without-opengl-or-es.patch133
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch47
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-When-building-introspection-files-do-not-quote-CFLAG.patch78
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/2922af379dc70b4b1a63b01d67179eb431f03ac4.patch38
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/30e1d5e22213fdaca2a29ec3400c927d710a37a8.patch67
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/no-musttail-arm.patch30
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/reproducibility.patch16
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/t6-not-declared.patch37
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.36.1.bb167
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.44.0.bb187
-rw-r--r--meta/recipes-sato/webkit/wpebackend-fdo_1.12.0.bb24
-rw-r--r--meta/recipes-sato/webkit/wpebackend-fdo_1.14.2.bb24
-rw-r--r--meta/recipes-support/appstream/appstream/0001-remove-hardcoded-path.patch32
-rw-r--r--meta/recipes-support/appstream/appstream_1.0.2.bb44
-rw-r--r--meta/recipes-support/apr/apr-util/0001-Fix-error-handling-in-gdbm.patch134
-rw-r--r--meta/recipes-support/apr/apr-util/0001-test_transformation-Check-if-transform-is-supported-.patch37
-rw-r--r--meta/recipes-support/apr/apr-util_1.6.1.bb98
-rw-r--r--meta/recipes-support/apr/apr-util_1.6.3.bb97
-rw-r--r--meta/recipes-support/apr/apr/0001-Add-option-to-disable-timed-dependant-tests.patch20
-rw-r--r--meta/recipes-support/apr/apr/0001-configure-Remove-runtime-test-for-mmap-that-can-map-.patch58
-rw-r--r--meta/recipes-support/apr/apr/0001-dso-Check-for-NULL-handle-in-apr_dso_sym.patch37
-rw-r--r--meta/recipes-support/apr/apr/0002-apr-Remove-workdir-path-references-from-installed-ap.patch25
-rw-r--r--meta/recipes-support/apr/apr/0003-Makefile.in-configure.in-support-cross-compiling.patch63
-rw-r--r--meta/recipes-support/apr/apr/0006-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch76
-rw-r--r--meta/recipes-support/apr/apr/CVE-2021-35940.patch58
-rw-r--r--meta/recipes-support/apr/apr/autoconf-2.73.patch26
-rw-r--r--meta/recipes-support/apr/apr/autoconf270.patch22
-rw-r--r--meta/recipes-support/apr/apr/libtoolize_check.patch21
-rw-r--r--meta/recipes-support/apr/apr_1.7.0.bb128
-rw-r--r--meta/recipes-support/apr/apr_1.7.4.bb139
-rw-r--r--meta/recipes-support/argp-standalone/argp-standalone_1.3.bb33
-rw-r--r--meta/recipes-support/argp-standalone/argp-standalone_1.4.1.bb31
-rw-r--r--meta/recipes-support/argp-standalone/files/0001-throw-in-funcdef.patch84
-rw-r--r--meta/recipes-support/argp-standalone/files/0002-isprint.patch51
-rw-r--r--meta/recipes-support/argp-standalone/files/out_of_tree_build.patch2
-rw-r--r--meta/recipes-support/aspell/aspell_0.60.8.1.bb38
-rw-r--r--meta/recipes-support/aspell/aspell_0.60.8.bb41
-rw-r--r--meta/recipes-support/aspell/files/CVE-2019-25051.patch101
-rw-r--r--meta/recipes-support/atk/at-spi2-atk_2.38.0.bb20
-rw-r--r--meta/recipes-support/atk/at-spi2-core_2.44.1.bb39
-rw-r--r--meta/recipes-support/atk/at-spi2-core_2.52.0.bb48
-rw-r--r--meta/recipes-support/atk/atk_2.38.0.bb20
-rw-r--r--meta/recipes-support/attr/acl/0001-test-patch-out-failing-bits.patch5
-rw-r--r--meta/recipes-support/attr/acl/0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch6
-rw-r--r--meta/recipes-support/attr/acl/run-ptest6
-rw-r--r--meta/recipes-support/attr/acl_2.3.1.bb78
-rw-r--r--meta/recipes-support/attr/acl_2.3.2.bb85
-rw-r--r--meta/recipes-support/attr/attr.inc9
-rw-r--r--meta/recipes-support/attr/attr/0001-attr.c-Include-libgen.h-for-posix-version-of-basenam.patch35
-rw-r--r--meta/recipes-support/attr/attr/run-ptest7
-rw-r--r--meta/recipes-support/attr/attr_2.5.1.bb5
-rw-r--r--meta/recipes-support/attr/attr_2.5.2.bb5
-rw-r--r--meta/recipes-support/bash-completion/bash-completion_2.11.bb40
-rw-r--r--meta/recipes-support/bash-completion/bash-completion_2.13.0.bb38
-rw-r--r--meta/recipes-support/bmap-tools/bmap-tools_git.bb26
-rw-r--r--meta/recipes-support/bmaptool/bmaptool_git.bb29
-rw-r--r--meta/recipes-support/boost/boost-1.79.0.inc20
-rw-r--r--meta/recipes-support/boost/boost-1.84.0.inc20
-rw-r--r--meta/recipes-support/boost/boost-build-native_1.84.0.bb28
-rw-r--r--meta/recipes-support/boost/boost-build-native_4.4.1.bb27
-rw-r--r--meta/recipes-support/boost/boost.inc2
-rw-r--r--meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch20
-rw-r--r--meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch15
-rw-r--r--meta/recipes-support/boost/boost/boost-CVE-2012-2677.patch112
-rw-r--r--meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch9
-rw-r--r--meta/recipes-support/boost/boost_1.79.0.bb8
-rw-r--r--meta/recipes-support/boost/boost_1.84.0.bb7
-rw-r--r--meta/recipes-support/curl/curl/disable-tests41
-rw-r--r--meta/recipes-support/curl/curl/no-test-timeout.patch25
-rw-r--r--meta/recipes-support/curl/curl/run-ptest11
-rw-r--r--meta/recipes-support/curl/curl_7.83.1.bb93
-rw-r--r--meta/recipes-support/curl/curl_8.7.1.bb149
-rw-r--r--meta/recipes-support/db/db_5.3.28.bb1
-rw-r--r--meta/recipes-support/debianutils/debianutils_5.17.bb58
-rw-r--r--meta/recipes-support/debianutils/debianutils_5.7.bb60
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_212.bb30
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_260.bb43
-rw-r--r--meta/recipes-support/dos2unix/dos2unix_7.4.2.bb34
-rw-r--r--meta/recipes-support/dos2unix/dos2unix_7.5.2.bb34
-rw-r--r--meta/recipes-support/enchant/enchant2_2.3.3.bb31
-rw-r--r--meta/recipes-support/enchant/enchant2_2.6.9.bb29
-rw-r--r--meta/recipes-support/fribidi/fribidi_1.0.12.bb22
-rw-r--r--meta/recipes-support/fribidi/fribidi_1.0.13.bb20
-rwxr-xr-xmeta/recipes-support/gdbm/files/run-ptest2
-rw-r--r--meta/recipes-support/gmp/gmp/cve-2021-43618.patch27
-rw-r--r--meta/recipes-support/gmp/gmp_6.2.1.bb47
-rw-r--r--meta/recipes-support/gmp/gmp_6.3.0.bb53
-rw-r--r--meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2021.1.bb2
-rw-r--r--meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch6
-rw-r--r--meta/recipes-support/gnupg/gnupg/0002-use-pkgconfig-instead-of-npth-config.patch6
-rw-r--r--meta/recipes-support/gnupg/gnupg/0004-autogen.sh-fix-find-version-for-beta-checking.patch12
-rw-r--r--meta/recipes-support/gnupg/gnupg/relocate.patch18
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.3.6.bb87
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.4.5.bb90
-rw-r--r--meta/recipes-support/gnutls/gnutls/0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch26
-rw-r--r--meta/recipes-support/gnutls/gnutls/Add-ptest-support.patch57
-rw-r--r--meta/recipes-support/gnutls/gnutls/arm_eabi.patch7
-rw-r--r--meta/recipes-support/gnutls/gnutls/run-ptest100
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.7.5.bb68
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.8.4.bb100
-rw-r--r--meta/recipes-support/gnutls/libtasn1_4.18.0.bb23
-rw-r--r--meta/recipes-support/gnutls/libtasn1_4.19.0.bb23
-rw-r--r--meta/recipes-support/gpgme/gpgme/0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch40
-rw-r--r--meta/recipes-support/gpgme/gpgme/0001-autogen.sh-remove-unknown-in-version.patch32
-rw-r--r--meta/recipes-support/gpgme/gpgme/0001-pkgconfig.patch91
-rw-r--r--meta/recipes-support/gpgme/gpgme/0001-posix-io.c-Use-off_t-instead-of-off64_t.patch42
-rw-r--r--meta/recipes-support/gpgme/gpgme/0001-use-closefrom-on-linux-and-glibc-2.34.patch10
-rw-r--r--meta/recipes-support/gpgme/gpgme/0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch34
-rw-r--r--meta/recipes-support/gpgme/gpgme/0003-Correctly-install-python-modules.patch12
-rw-r--r--meta/recipes-support/gpgme/gpgme/0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch12
-rw-r--r--meta/recipes-support/gpgme/gpgme/0006-fix-build-path-issue.patch12
-rw-r--r--meta/recipes-support/gpgme/gpgme/0007-python-Add-variables-to-tests.patch52
-rw-r--r--meta/recipes-support/gpgme/gpgme/0008-do-not-auto-check-var-PYTHON.patch13
-rw-r--r--meta/recipes-support/gpgme/gpgme_1.17.1.bb87
-rw-r--r--meta/recipes-support/gpgme/gpgme_1.23.2.bb82
-rw-r--r--meta/recipes-support/icu/icu/0001-Disable-LDFLAGSICUDT-for-Linux.patch11
-rw-r--r--meta/recipes-support/icu/icu/0001-icu-Added-armeb-support.patch9
-rw-r--r--meta/recipes-support/icu/icu/fix-install-manx.patch7
-rw-r--r--meta/recipes-support/icu/icu_71.1.bb150
-rw-r--r--meta/recipes-support/icu/icu_74-2.bb149
-rw-r--r--meta/recipes-support/iso-codes/iso-codes_4.16.0.bb24
-rw-r--r--meta/recipes-support/iso-codes/iso-codes_4.9.0.bb22
-rw-r--r--meta/recipes-support/libassuan/libassuan_2.5.5.bb38
-rw-r--r--meta/recipes-support/libassuan/libassuan_2.5.7.bb38
-rw-r--r--meta/recipes-support/libatomic-ops/libatomic-ops_7.6.12.bb22
-rw-r--r--meta/recipes-support/libatomic-ops/libatomic-ops_7.8.2.bb22
-rw-r--r--meta/recipes-support/libbsd/libbsd_0.11.6.bb43
-rw-r--r--meta/recipes-support/libbsd/libbsd_0.12.2.bb47
-rw-r--r--meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch32
-rw-r--r--meta/recipes-support/libcap-ng/libcap-ng-python_0.8.3.bb28
-rw-r--r--meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb29
-rw-r--r--meta/recipes-support/libcap-ng/libcap-ng.inc6
-rw-r--r--meta/recipes-support/libcap-ng/libcap-ng_0.8.3.bb17
-rw-r--r--meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb17
-rw-r--r--meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch2
-rw-r--r--meta/recipes-support/libcap/libcap_2.64.bb80
-rw-r--r--meta/recipes-support/libcap/libcap_2.69.bb79
-rw-r--r--meta/recipes-support/libcheck/libcheck/automake-output.patch82
-rw-r--r--meta/recipes-support/libcheck/libcheck_0.15.2.bb11
-rw-r--r--meta/recipes-support/libcroco/files/CVE-2020-12825.patch192
-rw-r--r--meta/recipes-support/libcroco/libcroco_0.6.13.bb26
-rw-r--r--meta/recipes-support/libdaemon/libdaemon_0.14.bb1
-rw-r--r--meta/recipes-support/libevdev/libevdev_1.12.1.bb17
-rw-r--r--meta/recipes-support/libevdev/libevdev_1.13.1.bb17
-rw-r--r--meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch2
-rw-r--r--meta/recipes-support/libevent/libevent_2.1.12.bb9
-rw-r--r--meta/recipes-support/libexif/libexif/0001-Add-serial-tests-config-needed-by-ptest.patch26
-rw-r--r--meta/recipes-support/libexif/libexif/run-ptest3
-rw-r--r--meta/recipes-support/libexif/libexif_0.6.24.bb37
-rw-r--r--meta/recipes-support/libffi/libffi/0001-arm-sysv-reverted-clang-VFP-mitigation.patch104
-rw-r--r--meta/recipes-support/libffi/libffi/not-win32.patch9
-rw-r--r--meta/recipes-support/libffi/libffi_3.4.2.bb36
-rw-r--r--meta/recipes-support/libffi/libffi_3.4.6.bb32
-rw-r--r--meta/recipes-support/libgcrypt/files/0001-libgcrypt-fix-m4-file-for-oe-core.patch6
-rw-r--r--meta/recipes-support/libgcrypt/files/0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch10
-rw-r--r--meta/recipes-support/libgcrypt/files/0003-tests-bench-slope.c-workaround-ICE-failure-on-mips-w.patch79
-rw-r--r--meta/recipes-support/libgcrypt/files/no-bench-slope.patch20
-rw-r--r--meta/recipes-support/libgcrypt/files/no-native-gpg-error.patch18
-rw-r--r--meta/recipes-support/libgcrypt/files/run-ptest8
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.10.1.bb75
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.10.3.bb55
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.4.3.bb22
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.7.2.bb21
-rw-r--r--meta/recipes-support/libgpg-error/libgpg-error/0001-Do-not-fail-when-testing-config-scripts.patch7
-rw-r--r--meta/recipes-support/libgpg-error/libgpg-error/pkgconfig.patch55
-rw-r--r--meta/recipes-support/libgpg-error/libgpg-error_1.45.bb53
-rw-r--r--meta/recipes-support/libgpg-error/libgpg-error_1.48.bb53
-rw-r--r--meta/recipes-support/libical/libical_3.0.14.bb55
-rw-r--r--meta/recipes-support/libical/libical_3.0.18.bb54
-rw-r--r--meta/recipes-support/libjitterentropy/libjitterentropy_3.4.0.bb30
-rw-r--r--meta/recipes-support/libjitterentropy/libjitterentropy_3.4.1.bb27
-rw-r--r--meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch6
-rw-r--r--meta/recipes-support/libksba/libksba_1.6.0.bb34
-rw-r--r--meta/recipes-support/libksba/libksba_1.6.6.bb34
-rw-r--r--meta/recipes-support/libmd/libmd_1.0.4.bb16
-rw-r--r--meta/recipes-support/libmd/libmd_1.1.0.bb16
-rw-r--r--meta/recipes-support/libmicrohttpd/libmicrohttpd_0.9.75.bb30
-rw-r--r--meta/recipes-support/libmicrohttpd/libmicrohttpd_1.0.1.bb27
-rw-r--r--meta/recipes-support/libmpc/libmpc_1.2.1.bb12
-rw-r--r--meta/recipes-support/libmpc/libmpc_1.3.1.bb12
-rw-r--r--meta/recipes-support/libnl/files/enable-serial-tests.patch29
-rw-r--r--meta/recipes-support/libnl/files/fa7f97f8982544c4fcb403893bae6701230d5165.patch48
-rwxr-xr-xmeta/recipes-support/libnl/files/run-ptest17
-rw-r--r--meta/recipes-support/libnl/libnl_3.6.0.bb78
-rw-r--r--meta/recipes-support/libnl/libnl_3.9.0.bb64
-rw-r--r--meta/recipes-support/libpcre/libpcre2_10.40.bb54
-rw-r--r--meta/recipes-support/libpcre/libpcre2_10.43.bb55
-rw-r--r--meta/recipes-support/libproxy/libproxy/determinism.patch26
-rw-r--r--meta/recipes-support/libproxy/libproxy_0.4.17.bb42
-rw-r--r--meta/recipes-support/libproxy/libproxy_0.5.4.bb28
-rw-r--r--meta/recipes-support/libpsl/libpsl_0.21.1.bb25
-rw-r--r--meta/recipes-support/libpsl/libpsl_0.21.5.bb26
-rw-r--r--meta/recipes-support/libseccomp/files/run-ptest3
-rw-r--r--meta/recipes-support/libseccomp/libseccomp_2.5.4.bb54
-rw-r--r--meta/recipes-support/libseccomp/libseccomp_2.5.5.bb62
-rw-r--r--meta/recipes-support/libsoup/libsoup-2.4/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch44
-rw-r--r--meta/recipes-support/libsoup/libsoup-2.4_2.74.2.bb46
-rw-r--r--meta/recipes-support/libsoup/libsoup-2.4_2.74.3.bb59
-rw-r--r--meta/recipes-support/libsoup/libsoup_3.0.6.bb44
-rw-r--r--meta/recipes-support/libsoup/libsoup_3.4.4.bb59
-rw-r--r--meta/recipes-support/libssh2/files/0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch44
-rw-r--r--meta/recipes-support/libssh2/files/run-ptest9
-rw-r--r--meta/recipes-support/libssh2/libssh2/CVE-2023-48795.patch466
-rw-r--r--meta/recipes-support/libssh2/libssh2/run-ptest8
-rw-r--r--meta/recipes-support/libssh2/libssh2_1.10.0.bb50
-rw-r--r--meta/recipes-support/libssh2/libssh2_1.11.0.bb53
-rw-r--r--meta/recipes-support/libunistring/libunistring_1.0.bb27
-rw-r--r--meta/recipes-support/libunistring/libunistring_1.2.bb27
-rw-r--r--meta/recipes-support/libunwind/libunwind.inc23
-rw-r--r--meta/recipes-support/libunwind/libunwind/0001-src-Gtrace-remove-unguarded-print-calls.patch52
-rw-r--r--meta/recipes-support/libunwind/libunwind/0003-x86-Stub-out-x86_local_resume.patch55
-rw-r--r--meta/recipes-support/libunwind/libunwind/0004-Fix-build-on-mips-musl.patch84
-rw-r--r--meta/recipes-support/libunwind/libunwind/0006-Fix-for-X32.patch29
-rw-r--r--meta/recipes-support/libunwind/libunwind/mips-byte-order.patch35
-rw-r--r--meta/recipes-support/libunwind/libunwind/mips-coredump-register.patch100
-rw-r--r--meta/recipes-support/libunwind/libunwind/musl-header-conflict.patch44
-rw-r--r--meta/recipes-support/libunwind/libunwind_1.6.2.bb34
-rw-r--r--meta/recipes-support/liburcu/liburcu_0.13.1.bb24
-rw-r--r--meta/recipes-support/liburcu/liburcu_0.14.0.bb24
-rw-r--r--meta/recipes-support/libusb/libusb1_1.0.26.bb47
-rw-r--r--meta/recipes-support/libusb/libusb1_1.0.27.bb48
-rw-r--r--meta/recipes-support/libxslt/libxslt_1.1.35.bb56
-rw-r--r--meta/recipes-support/libxslt/libxslt_1.1.39.bb58
-rw-r--r--meta/recipes-support/lz4/files/CVE-2021-3520.patch27
-rw-r--r--meta/recipes-support/lz4/files/run-ptest17
-rw-r--r--meta/recipes-support/lz4/lz4_1.9.3.bb31
-rw-r--r--meta/recipes-support/lz4/lz4_1.9.4.bb46
-rw-r--r--meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch10
-rw-r--r--meta/recipes-support/mpfr/mpfr_4.1.0.bb19
-rw-r--r--meta/recipes-support/mpfr/mpfr_4.2.1.bb19
-rw-r--r--meta/recipes-support/nettle/nettle/dlopen-test.patch29
-rw-r--r--meta/recipes-support/nettle/nettle/run-ptest40
-rw-r--r--meta/recipes-support/nettle/nettle_3.7.3.bb57
-rw-r--r--meta/recipes-support/nettle/nettle_3.9.1.bb61
-rw-r--r--meta/recipes-support/nghttp2/nghttp2/0001-fetch-ocsp-response-use-python3.patch27
-rw-r--r--meta/recipes-support/nghttp2/nghttp2_1.47.0.bb35
-rw-r--r--meta/recipes-support/nghttp2/nghttp2_1.61.0.bb28
-rw-r--r--meta/recipes-support/npth/npth/0001-Revert-Fix-problem-with-regression-tests-on-recent-g.patch43
-rw-r--r--meta/recipes-support/npth/npth/musl-fix.patch37
-rw-r--r--meta/recipes-support/npth/npth/pkgconfig.patch74
-rw-r--r--meta/recipes-support/npth/npth_1.6.bb29
-rw-r--r--meta/recipes-support/npth/npth_1.7.bb25
-rw-r--r--meta/recipes-support/numactl/numactl/0001-define-run-test-target.patch13
-rw-r--r--meta/recipes-support/numactl/numactl/Fix-the-test-output-format.patch17
-rwxr-xr-xmeta/recipes-support/numactl/numactl/run-ptest6
-rw-r--r--meta/recipes-support/numactl/numactl_git.bb6
-rw-r--r--meta/recipes-support/p11-kit/files/fix-parallel-build-failures.patch33
-rw-r--r--meta/recipes-support/p11-kit/p11-kit_0.24.1.bb32
-rw-r--r--meta/recipes-support/p11-kit/p11-kit_0.25.3.bb34
-rw-r--r--meta/recipes-support/pinentry/pinentry-1.2.1/gpg-error_pkconf.patch (renamed from meta/recipes-support/pinentry/pinentry-1.2.0/gpg-error_pkconf.patch)13
-rw-r--r--meta/recipes-support/pinentry/pinentry-1.2.1/libassuan_pkgconf.patch (renamed from meta/recipes-support/pinentry/pinentry-1.2.0/libassuan_pkgconf.patch)0
-rw-r--r--meta/recipes-support/pinentry/pinentry_1.2.0.bb36
-rw-r--r--meta/recipes-support/pinentry/pinentry_1.2.1.bb38
-rw-r--r--meta/recipes-support/popt/popt/0001-popt-test-output-format-for-ptest.patch68
-rw-r--r--meta/recipes-support/popt/popt_1.18.bb33
-rw-r--r--meta/recipes-support/popt/popt_1.19.bb31
-rw-r--r--meta/recipes-support/ptest-runner/ptest-runner_2.4.2.bb34
-rw-r--r--meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb39
-rw-r--r--meta/recipes-support/re2c/re2c_3.0.bb16
-rw-r--r--meta/recipes-support/re2c/re2c_3.1.bb15
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/rng-tools.service32
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/rngd.service33
-rw-r--r--meta/recipes-support/rng-tools/rng-tools_6.15.bb61
-rw-r--r--meta/recipes-support/rng-tools/rng-tools_6.16.bb69
-rw-r--r--meta/recipes-support/serf/serf/0001-Fix-syntax-of-a-print-in-the-scons-file-to-unbreak-b.patch29
-rw-r--r--meta/recipes-support/serf/serf/0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch28
-rw-r--r--meta/recipes-support/serf/serf/0004-Follow-up-to-r1811083-fix-building-with-scons-3.0.0-.patch29
-rw-r--r--meta/recipes-support/serf/serf/SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch2
-rw-r--r--meta/recipes-support/serf/serf_1.3.10.bb40
-rw-r--r--meta/recipes-support/serf/serf_1.3.9.bb44
-rw-r--r--meta/recipes-support/shared-mime-info/shared-mime-info/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch26
-rw-r--r--meta/recipes-support/shared-mime-info/shared-mime-info/0002-Handle-build-with-older-versions-of-GCC.patch54
-rw-r--r--meta/recipes-support/shared-mime-info/shared-mime-info_2.4.bb31
-rw-r--r--meta/recipes-support/shared-mime-info/shared-mime-info_git.bb29
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.38.5.bb14
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.45.1.bb8
-rw-r--r--meta/recipes-support/taglib/taglib_1.12.bb42
-rw-r--r--meta/recipes-support/taglib/taglib_2.0.bb42
-rw-r--r--meta/recipes-support/utfcpp/utfcpp_4.0.5.bb16
-rw-r--r--meta/recipes-support/vim/files/no-path-adjust.patch35
-rw-r--r--meta/recipes-support/vim/files/racefix.patch33
-rw-r--r--meta/recipes-support/vim/vim-tiny_9.1.bb (renamed from meta/recipes-support/vim/vim-tiny_8.2.bb)0
-rw-r--r--meta/recipes-support/vim/vim.inc36
-rw-r--r--meta/recipes-support/vim/vim_8.2.bb19
-rw-r--r--meta/recipes-support/vim/vim_9.1.bb23
-rw-r--r--meta/recipes-support/vte/vte/0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch35
-rw-r--r--meta/recipes-support/vte/vte/0001-Makefile.docs-correctly-substitute-gtkdoc-qemu-wrapp.patch24
-rw-r--r--meta/recipes-support/vte/vte_0.68.0.bb62
-rw-r--r--meta/recipes-support/vte/vte_0.74.2.bb56
-rw-r--r--meta/recipes-support/xxhash/xxhash_0.8.1.bb25
-rw-r--r--meta/recipes-support/xxhash/xxhash_0.8.2.bb27
-rw-r--r--meta/site/arc-common4
-rw-r--r--meta/site/arm-3225
-rw-r--r--meta/site/arm-6424
-rw-r--r--meta/site/arm-common19
-rw-r--r--meta/site/arm-darwin2
-rw-r--r--meta/site/arm-darwin82
-rw-r--r--meta/site/arm-linux2
-rw-r--r--meta/site/armeb-linux2
-rw-r--r--meta/site/common-glibc14
-rw-r--r--meta/site/common-musl11
-rw-r--r--meta/site/ix86-common32
-rw-r--r--meta/site/loongarch64-linux7
-rw-r--r--meta/site/microblaze-linux23
-rw-r--r--meta/site/mips-common17
-rw-r--r--meta/site/mips-linux18
-rw-r--r--meta/site/mips64-linux22
-rw-r--r--meta/site/mips64el-linux22
-rw-r--r--meta/site/mipsel-linux18
-rw-r--r--meta/site/mipsisa32r6-linux18
-rw-r--r--meta/site/mipsisa32r6el-linux18
-rw-r--r--meta/site/mipsisa64r6-linux22
-rw-r--r--meta/site/mipsisa64r6el-linux22
-rw-r--r--meta/site/nios2-linux35
-rw-r--r--meta/site/powerpc-common14
-rw-r--r--meta/site/powerpc-linux5
-rw-r--r--meta/site/powerpc32-linux27
-rw-r--r--meta/site/riscv32-linux4
-rw-r--r--meta/site/riscv64-linux4
-rw-r--r--meta/site/sh-common33
-rw-r--r--meta/site/sparc-linux6
-rw-r--r--meta/site/x32-linux4
-rw-r--r--meta/site/x86_64-linux32
-rwxr-xr-xoe-init-build-env6
-rw-r--r--scripts/.oe-layers.json7
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests2
-rwxr-xr-xscripts/bblock184
-rwxr-xr-xscripts/bitbake-prserv-tool9
-rwxr-xr-xscripts/bitbake-whatchanged320
-rwxr-xr-xscripts/buildstats-diff2
-rwxr-xr-xscripts/buildstats-summary126
-rwxr-xr-xscripts/combo-layer38
-rwxr-xr-xscripts/combo-layer-hook-default.sh2
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh4
-rwxr-xr-xscripts/contrib/bbvars.py6
-rwxr-xr-xscripts/contrib/convert-overrides.py111
-rwxr-xr-xscripts/contrib/ddimage2
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh2
-rwxr-xr-xscripts/contrib/image-manifest2
-rwxr-xr-xscripts/contrib/patchreview.py67
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh2
-rwxr-xr-xscripts/contrib/verify-homepage.py2
-rwxr-xr-xscripts/cp-noerror2
-rwxr-xr-xscripts/create-pull-request9
-rwxr-xr-xscripts/devtool32
l---------scripts/esdk-tools/devtool1
l---------scripts/esdk-tools/oe-find-native-sysroot1
l---------scripts/esdk-tools/recipetool1
l---------scripts/esdk-tools/runqemu1
l---------scripts/esdk-tools/runqemu-addptable2image1
l---------scripts/esdk-tools/runqemu-export-rootfs1
l---------scripts/esdk-tools/runqemu-extract-sdk1
l---------scripts/esdk-tools/runqemu-gen-tapdevs1
l---------scripts/esdk-tools/runqemu-ifdown1
l---------scripts/esdk-tools/runqemu-ifup1
l---------scripts/esdk-tools/wic1
-rwxr-xr-xscripts/gen-lockedsig-cache3
-rwxr-xr-xscripts/git4
-rwxr-xr-xscripts/install-buildtools20
-rw-r--r--scripts/lib/argparse_oe.py2
-rw-r--r--scripts/lib/buildstats.py38
-rw-r--r--scripts/lib/checklayer/__init__.py19
-rw-r--r--scripts/lib/checklayer/cases/bsp.py2
-rw-r--r--scripts/lib/checklayer/cases/common.py18
-rw-r--r--scripts/lib/checklayer/cases/distro.py2
-rw-r--r--scripts/lib/devtool/__init__.py27
-rw-r--r--scripts/lib/devtool/build_sdk.py2
-rw-r--r--scripts/lib/devtool/deploy.py240
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1070
-rw-r--r--scripts/lib/devtool/menuconfig.py4
-rw-r--r--scripts/lib/devtool/sdk.py3
-rw-r--r--scripts/lib/devtool/standard.py467
-rw-r--r--scripts/lib/devtool/upgrade.py171
-rw-r--r--scripts/lib/recipetool/append.py66
-rw-r--r--scripts/lib/recipetool/create.py167
-rw-r--r--scripts/lib/recipetool/create_buildsys.py40
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1062
-rw-r--r--scripts/lib/recipetool/create_go.py779
-rw-r--r--scripts/lib/recipetool/create_npm.py43
-rw-r--r--scripts/lib/recipetool/setvar.py1
-rw-r--r--scripts/lib/resulttool/log.py13
-rw-r--r--scripts/lib/resulttool/regression.py281
-rw-r--r--scripts/lib/resulttool/report.py5
-rw-r--r--scripts/lib/resulttool/resultutils.py8
-rw-r--r--scripts/lib/scriptutils.py16
-rw-r--r--scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--scripts/lib/wic/canned-wks/qemuloongarch.wks3
-rw-r--r--scripts/lib/wic/canned-wks/qemux86-directdisk.wks2
-rw-r--r--scripts/lib/wic/filemap.py7
-rw-r--r--scripts/lib/wic/help.py2
-rw-r--r--scripts/lib/wic/ksparser.py6
-rw-r--r--scripts/lib/wic/misc.py9
-rw-r--r--scripts/lib/wic/partition.py47
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py122
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py175
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-partition.py9
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-pcbios.py6
-rw-r--r--scripts/lib/wic/plugins/source/empty.py59
-rw-r--r--scripts/lib/wic/plugins/source/isoimage-isohybrid.py2
-rw-r--r--scripts/lib/wic/plugins/source/rawcopy.py9
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py4
-rwxr-xr-xscripts/nativesdk-intercept/chgrp5
-rwxr-xr-xscripts/nativesdk-intercept/chown5
-rwxr-xr-xscripts/oe-buildenv-internal25
-rwxr-xr-xscripts/oe-check-sstate10
-rwxr-xr-xscripts/oe-debuginfod2
-rwxr-xr-xscripts/oe-depends-dot34
-rwxr-xr-xscripts/oe-find-native-sysroot15
-rwxr-xr-xscripts/oe-gnome-terminal-phonehome2
-rwxr-xr-xscripts/oe-pkgdata-browser5
-rwxr-xr-xscripts/oe-pkgdata-util2
-rwxr-xr-xscripts/oe-pylint2
-rwxr-xr-xscripts/oe-setup-build122
-rwxr-xr-xscripts/oe-setup-builddir90
-rwxr-xr-xscripts/oe-setup-layers146
-rwxr-xr-xscripts/oe-setup-vscode93
-rwxr-xr-xscripts/oe-time-dd-test.sh4
-rwxr-xr-xscripts/oe-trim-schemas2
-rwxr-xr-xscripts/oepydevshell-internal.py2
-rwxr-xr-xscripts/opkg-query-helper.py2
-rwxr-xr-xscripts/patchtest232
-rwxr-xr-xscripts/patchtest-get-branch81
-rwxr-xr-xscripts/patchtest-get-series115
-rwxr-xr-xscripts/patchtest-send-results110
-rwxr-xr-xscripts/patchtest-setup-sharedir83
-rw-r--r--scripts/patchtest.README153
-rw-r--r--scripts/postinst-intercepts/update_gtk_icon_cache6
-rw-r--r--scripts/postinst-intercepts/update_mandb18
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py151
-rw-r--r--scripts/pybootchartgui/pybootchartgui/parsing.py35
-rw-r--r--scripts/pybootchartgui/pybootchartgui/samples.py25
-rwxr-xr-xscripts/pythondeps2
-rwxr-xr-xscripts/relocate_sdk.py10
-rwxr-xr-xscripts/rpm2cpio.sh30
-rwxr-xr-xscripts/runqemu383
-rwxr-xr-xscripts/runqemu-export-rootfs25
-rwxr-xr-xscripts/runqemu-extract-sdk2
-rwxr-xr-xscripts/runqemu-gen-tapdevs120
-rwxr-xr-xscripts/runqemu-ifdown41
-rwxr-xr-xscripts/runqemu-ifup65
-rw-r--r--scripts/runqemu.README16
-rwxr-xr-xscripts/sstate-cache-management.py329
-rwxr-xr-xscripts/sstate-cache-management.sh458
-rwxr-xr-xscripts/sstate-diff-machines.sh2
-rwxr-xr-xscripts/sstate-sysroot-cruft.sh2
-rwxr-xr-xscripts/sysroot-relativelinks.py2
-rwxr-xr-xscripts/task-time2
-rwxr-xr-xscripts/verify-bashisms2
-rwxr-xr-xscripts/wic2
-rwxr-xr-xscripts/yocto-check-layer5
-rwxr-xr-xscripts/yocto_testresults_query.py131
4129 files changed, 198502 insertions, 140548 deletions
diff --git a/.gitignore b/.gitignore
index 4bfe022dcd..ec0447356e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,4 +31,9 @@ pull-*/
bitbake/lib/toaster/contrib/tts/backlog.txt
bitbake/lib/toaster/contrib/tts/log/*
bitbake/lib/toaster/contrib/tts/.cache/*
-bitbake/lib/bb/tests/runqueue-tests/bitbake-cookerdaemon.log \ No newline at end of file
+bitbake/lib/bb/tests/runqueue-tests/bitbake-cookerdaemon.log
+_toaster_clones/
+downloads/
+sstate-cache/
+toaster.sqlite
+.vscode/
diff --git a/.templateconf b/.templateconf
index 0fe6f82503..faf0348077 100644
--- a/.templateconf
+++ b/.templateconf
@@ -1,2 +1,2 @@
# Template settings
-TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
+TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf/templates/default}
diff --git a/Makefile b/Makefile
deleted file mode 100644
index c9518558bb..0000000000
--- a/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = .
-BUILDDIR = _build
-DESTDIR = final
-
-ifeq ($(shell if which $(SPHINXBUILD) >/dev/null 2>&1; then echo 1; else echo 0; fi),0)
-$(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed")
-endif
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile.sphinx clean publish
-
-publish: Makefile.sphinx html singlehtml
- rm -rf $(BUILDDIR)/$(DESTDIR)/
- mkdir -p $(BUILDDIR)/$(DESTDIR)/
- cp -r $(BUILDDIR)/html/* $(BUILDDIR)/$(DESTDIR)/
- cp $(BUILDDIR)/singlehtml/index.html $(BUILDDIR)/$(DESTDIR)/singleindex.html
- sed -i -e 's@index.html#@singleindex.html#@g' $(BUILDDIR)/$(DESTDIR)/singleindex.html
-
-clean:
- @rm -rf $(BUILDDIR)
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile.sphinx
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/README.OE-Core.md b/README.OE-Core.md
index 2f2127fb03..687c58e410 100644
--- a/README.OE-Core.md
+++ b/README.OE-Core.md
@@ -16,9 +16,13 @@ which can be found at:
Contributing
------------
-Please refer to
-https://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
-for guidelines on how to submit patches.
+Please refer to our contributor guide here: https://docs.yoctoproject.org/dev/contributor-guide/
+for full details on how to submit changes.
+
+As a quick guide, patches should be sent to openembedded-core@lists.openembedded.org
+The git command to do that would be:
+
+ git send-email -M -1 --to openembedded-core@lists.openembedded.org
Mailing list:
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..1b63da4f69
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,22 @@
+How to Report a Potential Vulnerability?
+========================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla]
+
+If you are dealing with a not-yet released or urgent issue, please send a
+message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available.
+
+Branches maintained with security fixes
+---------------------------------------
+
+See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
+for detailed info regarding the policies and maintenance of Stable branches.
+
+The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
+releases of the Yocto Project. Versions in grey are no longer actively maintained with
+security patches, but well-tested patches may still be accepted for them for
+significant issues.
diff --git a/bitbake/README b/bitbake/README
index 80a97118b9..e9f4c858ee 100644
--- a/bitbake/README
+++ b/bitbake/README
@@ -13,19 +13,24 @@ Bitbake plain documentation can be found under the doc directory or its integrat
html version at the Yocto Project website:
https://docs.yoctoproject.org
+Bitbake requires Python version 3.8 or newer.
+
Contributing
------------
-Please refer to
-https://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
-for guidelines on how to submit patches, just note that the latter documentation is intended
-for OpenEmbedded (and its core) not bitbake patches (bitbake-devel@lists.openembedded.org)
-but in general main guidelines apply. Once the commit(s) have been created, the way to send
-the patch is through git-send-email. For example, to send the last commit (HEAD) on current
-branch, type:
+Please refer to our contributor guide here: https://docs.yoctoproject.org/contributor-guide/
+for full details on how to submit changes.
+
+As a quick guide, patches should be sent to bitbake-devel@lists.openembedded.org
+The git command to do that would be:
git send-email -M -1 --to bitbake-devel@lists.openembedded.org
+If you're sending a patch related to the BitBake manual, make sure you copy
+the Yocto Project documentation mailing list:
+
+ git send-email -M -1 --to bitbake-devel@lists.openembedded.org --cc docs@lists.yoctoproject.org
+
Mailing list:
https://lists.openembedded.org/g/bitbake-devel
@@ -34,10 +39,25 @@ Source code:
https://git.openembedded.org/bitbake/
-Testing:
+Testing
+-------
Bitbake has a testsuite located in lib/bb/tests/ whichs aim to try and prevent regressions.
You can run this with "bitbake-selftest". In particular the fetcher is well covered since
it has so many corner cases. The datastore has many tests too. Testing with the testsuite is
recommended before submitting patches, particularly to the fetcher and datastore. We also
appreciate new test cases and may require them for more obscure issues.
+
+To run the tests "zstd" and "git" must be installed.
+
+The assumption is made that this testsuite is run from an initialized OpenEmbedded build
+environment (i.e. `source oe-init-build-env` is used). If this is not the case, run the
+testsuite as follows:
+
+ export PATH=$(pwd)/bin:$PATH
+ bin/bitbake-selftest
+
+The testsuite can alternatively be executed using pytest, e.g. obtained from PyPI (in this
+case, the PATH is configured automatically):
+
+ pytest
diff --git a/bitbake/SECURITY.md b/bitbake/SECURITY.md
new file mode 100644
index 0000000000..7d2ce1f631
--- /dev/null
+++ b/bitbake/SECURITY.md
@@ -0,0 +1,24 @@
+How to Report a Potential Vulnerability?
+========================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla].
+If you have a patch ready, submit it following the same procedure as any other
+patch as described in README.md.
+
+If you are dealing with a not-yet released or urgent issue, please send a
+message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available.
+
+Branches maintained with security fixes
+---------------------------------------
+
+See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
+for detailed info regarding the policies and maintenance of Stable branches.
+
+The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
+releases of the Yocto Project. Versions in grey are no longer actively maintained with
+security patches, but well-tested patches may still be accepted for them for
+significant issues.
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 042c91807d..382983e087 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -25,10 +25,9 @@ except RuntimeError as exc:
from bb import cookerdata
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
-__version__ = "2.0.0"
+__version__ = "2.9.0"
if __name__ == "__main__":
if __version__ != bb.__version__:
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs
index fe0f33eea1..8202c78623 100755
--- a/bitbake/bin/bitbake-diffsigs
+++ b/bitbake/bin/bitbake-diffsigs
@@ -72,13 +72,16 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
elif sig2 not in sigfiles:
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
sys.exit(1)
- latestfiles = [sigfiles[sig1], sigfiles[sig2]]
else:
- filedates = find_siginfo(bbhandler, pn, taskname)
- latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
- if not latestfiles:
+ sigfiles = find_siginfo(bbhandler, pn, taskname)
+ latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
+ if not latestsigs:
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
sys.exit(1)
+ sig1 = latestsigs[0]
+ sig2 = latestsigs[1]
+
+ latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
return latestfiles
@@ -96,7 +99,7 @@ def recursecb(key, hash1, hash2):
elif hash2 not in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
else:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
for change in out2:
for line in change.splitlines():
recout.append(' ' + line)
diff --git a/bitbake/bin/bitbake-getvar b/bitbake/bin/bitbake-getvar
index 5435a8d797..8901f99ae2 100755
--- a/bitbake/bin/bitbake-getvar
+++ b/bitbake/bin/bitbake-getvar
@@ -25,26 +25,36 @@ if __name__ == "__main__":
parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
+ parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
+ parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
args = parser.parse_args()
- if args.unexpand and not args.value:
- print("--unexpand only makes sense with --value")
- sys.exit(1)
+ if not args.value:
+ if args.unexpand:
+ sys.exit("--unexpand only makes sense with --value")
- if args.flag and not args.value:
- print("--flag only makes sense with --value")
- sys.exit(1)
+ if args.flag:
+ sys.exit("--flag only makes sense with --value")
- with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ quiet = args.quiet or args.value
+ with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
if args.recipe:
- tinfoil.prepare(quiet=2)
+ tinfoil.prepare(quiet=3 if quiet else 2)
d = tinfoil.parse_recipe(args.recipe)
else:
tinfoil.prepare(quiet=2, config_only=True)
d = tinfoil.config_data
+
+ value = None
if args.flag:
- print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
- elif args.value:
- print(str(d.getVar(args.variable, expand=(not args.unexpand))))
+ value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
+ else:
+ value = d.getVar(args.variable, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The variable '{args.variable}' is not defined")
+ if args.value:
+ print(str(value if value is not None else ""))
else:
bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient
index 494f17592a..610787ed2b 100755
--- a/bitbake/bin/bitbake-hashclient
+++ b/bitbake/bin/bitbake-hashclient
@@ -14,6 +14,8 @@ import sys
import threading
import time
import warnings
+import netrc
+import json
warnings.simplefilter("default")
try:
@@ -36,18 +38,42 @@ except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
import hashserv
+import bb.asyncrpc
DEFAULT_ADDRESS = 'unix://./hashserve.sock'
METHOD = 'stress.test.method'
+def print_user(u):
+ print(f"Username: {u['username']}")
+ if "permissions" in u:
+ print("Permissions: " + " ".join(u["permissions"]))
+ if "token" in u:
+ print(f"Token: {u['token']}")
+
def main():
+ def handle_get(args, client):
+ result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
+ def handle_get_outhash(args, client):
+ result = client.get_outhash(args.method, args.outhash, args.taskhash)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
def handle_stats(args, client):
if args.reset:
s = client.reset_stats()
else:
s = client.get_stats()
- pprint.pprint(s)
+ print(json.dumps(s, sort_keys=True, indent=4))
return 0
def handle_stress(args, client):
@@ -56,25 +82,24 @@ def main():
nonlocal missed_hashes
nonlocal max_time
- client = hashserv.create_client(args.address)
-
- for i in range(args.requests):
- taskhash = hashlib.sha256()
- taskhash.update(args.taskhash_seed.encode('utf-8'))
- taskhash.update(str(i).encode('utf-8'))
+ with hashserv.create_client(args.address) as client:
+ for i in range(args.requests):
+ taskhash = hashlib.sha256()
+ taskhash.update(args.taskhash_seed.encode('utf-8'))
+ taskhash.update(str(i).encode('utf-8'))
- start_time = time.perf_counter()
- l = client.get_unihash(METHOD, taskhash.hexdigest())
- elapsed = time.perf_counter() - start_time
+ start_time = time.perf_counter()
+ l = client.get_unihash(METHOD, taskhash.hexdigest())
+ elapsed = time.perf_counter() - start_time
- with lock:
- if l:
- found_hashes += 1
- else:
- missed_hashes += 1
+ with lock:
+ if l:
+ found_hashes += 1
+ else:
+ missed_hashes += 1
- max_time = max(elapsed, max_time)
- pbar.update()
+ max_time = max(elapsed, max_time)
+ pbar.update()
max_time = 0
found_hashes = 0
@@ -113,12 +138,114 @@ def main():
with lock:
pbar.update()
+ def handle_remove(args, client):
+ where = {k: v for k, v in args.where}
+ if where:
+ result = client.remove(where)
+ print("Removed %d row(s)" % (result["count"]))
+ else:
+ print("No query specified")
+
+ def handle_clean_unused(args, client):
+ result = client.clean_unused(args.max_age)
+ print("Removed %d rows" % (result["count"]))
+ return 0
+
+ def handle_refresh_token(args, client):
+ r = client.refresh_token(args.username)
+ print_user(r)
+
+ def handle_set_user_permissions(args, client):
+ r = client.set_user_perms(args.username, args.permissions)
+ print_user(r)
+
+ def handle_get_user(args, client):
+ r = client.get_user(args.username)
+ print_user(r)
+
+ def handle_get_all_users(args, client):
+ users = client.get_all_users()
+ print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for u in users:
+ print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
+
+ def handle_new_user(args, client):
+ r = client.new_user(args.username, args.permissions)
+ print_user(r)
+
+ def handle_delete_user(args, client):
+ r = client.delete_user(args.username)
+ print_user(r)
+
+ def handle_get_db_usage(args, client):
+ usage = client.get_db_usage()
+ print(usage)
+ tables = sorted(usage.keys())
+ print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for t in tables:
+ print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
+ print()
+
+ total_rows = sum(t["rows"] for t in usage.values())
+ print(f"Total rows: {total_rows}")
+
+ def handle_get_db_query_columns(args, client):
+ columns = client.get_db_query_columns()
+ print("\n".join(sorted(columns)))
+
+ def handle_gc_status(args, client):
+ result = client.gc_status()
+ if not result["mark"]:
+ print("No Garbage collection in progress")
+ return 0
+
+ print("Current Mark: %s" % result["mark"])
+ print("Total hashes to keep: %d" % result["keep"])
+ print("Total hashes to remove: %s" % result["remove"])
+ return 0
+
+ def handle_gc_mark(args, client):
+ where = {k: v for k, v in args.where}
+ result = client.gc_mark(args.mark, where)
+ print("New hashes marked: %d" % result["count"])
+ return 0
+
+ def handle_gc_sweep(args, client):
+ result = client.gc_sweep(args.mark)
+ print("Removed %d rows" % result["count"])
+ return 0
+
+ def handle_unihash_exists(args, client):
+ result = client.unihash_exists(args.unihash)
+ if args.quiet:
+ return 0 if result else 1
+
+ print("true" if result else "false")
+ return 0
+
parser = argparse.ArgumentParser(description='Hash Equivalence Client')
parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
parser.add_argument('--log', default='WARNING', help='Set logging level')
+ parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
+ parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
+ parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
+ parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
subparsers = parser.add_subparsers()
+ get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
+ get_parser.add_argument("method", help="Method to query")
+ get_parser.add_argument("taskhash", help="Task hash to query")
+ get_parser.set_defaults(func=handle_get)
+
+ get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
+ get_outhash_parser.add_argument("method", help="Method to query")
+ get_outhash_parser.add_argument("outhash", help="Output hash to query")
+ get_outhash_parser.add_argument("taskhash", help="Task hash to query")
+ get_outhash_parser.set_defaults(func=handle_get_outhash)
+
stats_parser = subparsers.add_parser('stats', help='Show server stats')
stats_parser.add_argument('--reset', action='store_true',
help='Reset server stats')
@@ -137,6 +264,64 @@ def main():
help='Include string in outhash')
stress_parser.set_defaults(func=handle_stress)
+ remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
+ remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Remove entries from table where KEY == VALUE")
+ remove_parser.set_defaults(func=handle_remove)
+
+ clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
+ clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
+ clean_unused_parser.set_defaults(func=handle_clean_unused)
+
+ refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
+ refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
+ refresh_token_parser.set_defaults(func=handle_refresh_token)
+
+ set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
+ set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
+ set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
+
+ get_user_parser = subparsers.add_parser('get-user', help="Get user")
+ get_user_parser.add_argument("--username", "-u", help="Username")
+ get_user_parser.set_defaults(func=handle_get_user)
+
+ get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
+ get_all_users_parser.set_defaults(func=handle_get_all_users)
+
+ new_user_parser = subparsers.add_parser('new-user', help="Create new user")
+ new_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ new_user_parser.set_defaults(func=handle_new_user)
+
+ delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
+ delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ delete_user_parser.set_defaults(func=handle_delete_user)
+
+ db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
+ db_usage_parser.set_defaults(func=handle_get_db_usage)
+
+ db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
+ db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
+
+ gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
+ gc_status_parser.set_defaults(func=handle_gc_status)
+
+ gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
+ gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Keep entries in table where KEY == VALUE")
+ gc_mark_parser.set_defaults(func=handle_gc_mark)
+
+ gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
+ gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_sweep_parser.set_defaults(func=handle_gc_sweep)
+
+ unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
+ unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
+ unihash_exists_parser.add_argument("unihash", help="Unihash to check")
+ unihash_exists_parser.set_defaults(func=handle_unihash_exists)
+
args = parser.parse_args()
logger = logging.getLogger('hashserv')
@@ -150,11 +335,30 @@ def main():
console.setLevel(level)
logger.addHandler(console)
+ login = args.login
+ password = args.password
+
+ if login is None and args.netrc:
+ try:
+ n = netrc.netrc()
+ auth = n.authenticators(args.address)
+ if auth is not None:
+ login, _, password = auth
+ except FileNotFoundError:
+ pass
+ except netrc.NetrcParseError as e:
+ sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
+
func = getattr(args, 'func', None)
if func:
- client = hashserv.create_client(args.address)
-
- return func(args, client)
+ try:
+ with hashserv.create_client(args.address, login, password) as client:
+ if args.become:
+ client.become_user(args.become)
+ return func(args, client)
+ except bb.asyncrpc.InvokeError as e:
+ print(f"ERROR: {e}")
+ return 1
return 0
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv
index 00af76b2d1..4bfb7abfbc 100755
--- a/bitbake/bin/bitbake-hashserv
+++ b/bitbake/bin/bitbake-hashserv
@@ -11,56 +11,161 @@ import logging
import argparse
import sqlite3
import warnings
+
warnings.simplefilter("default")
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import hashserv
+from hashserv.server import DEFAULT_ANON_PERMS
VERSION = "1.0.0"
-DEFAULT_BIND = 'unix://./hashserve.sock'
+DEFAULT_BIND = "unix://./hashserve.sock"
def main():
- parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION,
- epilog='''The bind address is the path to a unix domain socket if it is
- prefixed with "unix://". Otherwise, it is an IP address
- and port in form ADDRESS:PORT. To bind to all addresses, leave
- the ADDRESS empty, e.g. "--bind :8686". To bind to a specific
- IPv6 address, enclose the address in "[]", e.g.
- "--bind [::1]:8686"'''
- )
-
- parser.add_argument('-b', '--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
- parser.add_argument('-d', '--database', default='./hashserv.db', help='Database file (default "%(default)s")')
- parser.add_argument('-l', '--log', default='WARNING', help='Set logging level')
- parser.add_argument('-u', '--upstream', help='Upstream hashserv to pull hashes from')
- parser.add_argument('-r', '--read-only', action='store_true', help='Disallow write operations from clients')
+ parser = argparse.ArgumentParser(
+ description="Hash Equivalence Reference Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter,
+ epilog="""
+The bind address may take one of the following formats:
+ unix://PATH - Bind to unix domain socket at PATH
+ ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
+ ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
+
+To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
+"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
+"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
+
+Note that the default Anonymous permissions are designed to not break existing
+server instances when upgrading, but are not particularly secure defaults. If
+you want to use authentication, it is recommended that you use "--anon-perms
+@read" to only give anonymous users read access, or "--anon-perms @none" to
+give un-authenticated users no access at all.
+
+Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
+this would allow anonymous users to manage all users accounts, which is a bad
+idea.
+
+If you are using user authentication, you should run your server in websockets
+mode with an SSL terminating load balancer in front of it (as this server does
+not implement SSL). Otherwise all usernames and passwords will be transmitted
+in the clear. When configured this way, clients can connect using a secure
+websocket, as in "wss://SERVER:PORT"
+
+The following permissions are supported by the server:
+
+ @none - No permissions
+ @read - The ability to read equivalent hashes from the server
+ @report - The ability to report equivalent hashes to the server
+ @db-admin - Manage the hash database(s). This includes cleaning the
+ database, removing hashes, etc.
+ @user-admin - The ability to manage user accounts. This includes, creating
+ users, deleting users, resetting login tokens, and assigning
+ permissions.
+ @all - All possible permissions, including any that may be added
+ in the future
+ """,
+ )
+
+ parser.add_argument(
+ "-b",
+ "--bind",
+ default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
+ help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
+ )
+ parser.add_argument(
+ "-d",
+ "--database",
+ default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
+ help='Database file (default $HASHSERVER_DB, "%(default)s")',
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
+ help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
+ )
+ parser.add_argument(
+ "-u",
+ "--upstream",
+ default=os.environ.get("HASHSERVER_UPSTREAM", None),
+ help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
+ )
+ parser.add_argument(
+ "--db-username",
+ default=os.environ.get("HASHSERVER_DB_USERNAME", None),
+ help="Database username ($HASHSERVER_DB_USERNAME)",
+ )
+ parser.add_argument(
+ "--db-password",
+ default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
+ help="Database password ($HASHSERVER_DB_PASSWORD)",
+ )
+ parser.add_argument(
+ "--anon-perms",
+ metavar="PERM[,PERM[,...]]",
+ default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
+ help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
+ )
+ parser.add_argument(
+ "--admin-user",
+ default=os.environ.get("HASHSERVER_ADMIN_USER", None),
+ help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
+ )
+ parser.add_argument(
+ "--admin-password",
+ default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
+ help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
+ )
args = parser.parse_args()
- logger = logging.getLogger('hashserv')
+ logger = logging.getLogger("hashserv")
level = getattr(logging, args.log.upper(), None)
if not isinstance(level, int):
- raise ValueError('Invalid log level: %s' % args.log)
+ raise ValueError("Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log)
logger.setLevel(level)
console = logging.StreamHandler()
console.setLevel(level)
logger.addHandler(console)
- server = hashserv.create_server(args.bind, args.database, upstream=args.upstream, read_only=args.read_only)
+ read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
+ if "," in args.anon_perms:
+ anon_perms = args.anon_perms.split(",")
+ else:
+ anon_perms = args.anon_perms.split()
+
+ server = hashserv.create_server(
+ args.bind,
+ args.database,
+ upstream=args.upstream,
+ read_only=read_only,
+ db_username=args.db_username,
+ db_password=args.db_password,
+ anon_perms=anon_perms,
+ admin_username=args.admin_user,
+ admin_password=args.admin_password,
+ )
server.serve_forever()
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
+
traceback.print_exc()
sys.exit(ret)
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
index 449434d468..d4b1d1aaf2 100755
--- a/bitbake/bin/bitbake-layers
+++ b/bitbake/bin/bitbake-layers
@@ -68,11 +68,11 @@ def main():
registered = False
for plugin in plugins:
+ if hasattr(plugin, 'tinfoil_init'):
+ plugin.tinfoil_init(tinfoil)
if hasattr(plugin, 'register_commands'):
registered = True
plugin.register_commands(subparsers)
- if hasattr(plugin, 'tinfoil_init'):
- plugin.tinfoil_init(tinfoil)
if not registered:
logger.error("No commands registered - missing plugins?")
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv
index 323df66dd0..ad0a069401 100755
--- a/bitbake/bin/bitbake-prserv
+++ b/bitbake/bin/bitbake-prserv
@@ -1,53 +1,83 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys,logging
-import optparse
+import argparse
import warnings
warnings.simplefilter("default")
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import prserv
import prserv.serv
-__version__="1.0.0"
+VERSION = "1.1.0"
-PRHOST_DEFAULT='0.0.0.0'
+PRHOST_DEFAULT="0.0.0.0"
PRPORT_DEFAULT=8585
def main():
- parser = optparse.OptionParser(
- version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
- usage = "%prog < --start | --stop > [options]")
-
- parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
- dest="dbfile", type="string", default="prserv.sqlite3")
- parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
- dest="logfile", type="string", default="prserv.log")
- parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
- action = "store", type="string", dest="loglevel", default = "INFO")
- parser.add_option("--start", help="start daemon",
- action="store_true", dest="start")
- parser.add_option("--stop", help="stop daemon",
- action="store_true", dest="stop")
- parser.add_option("--host", help="ip address to bind", action="store",
- dest="host", type="string", default=PRHOST_DEFAULT)
- parser.add_option("--port", help="port number(default: 8585)", action="store",
- dest="port", type="int", default=PRPORT_DEFAULT)
- parser.add_option("-r", "--read-only", help="open database in read-only mode",
- action="store_true")
-
- options, args = parser.parse_args(sys.argv)
- prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
-
- if options.start:
- ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile), options.read_only)
- elif options.stop:
- ret=prserv.serv.stop_daemon(options.host, options.port)
+ parser = argparse.ArgumentParser(
+ description="BitBake PR Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter)
+
+ parser.add_argument(
+ "-f",
+ "--file",
+ default="prserv.sqlite3",
+ help="database filename (default: prserv.sqlite3)",
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default="prserv.log",
+ help="log filename(default: prserv.log)",
+ )
+ parser.add_argument(
+ "--loglevel",
+ default="INFO",
+ help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
+ )
+ parser.add_argument(
+ "--start",
+ action="store_true",
+ help="start daemon",
+ )
+ parser.add_argument(
+ "--stop",
+ action="store_true",
+ help="stop daemon",
+ )
+ parser.add_argument(
+ "--host",
+ help="ip address to bind",
+ default=PRHOST_DEFAULT,
+ )
+ parser.add_argument(
+ "--port",
+ type=int,
+ default=PRPORT_DEFAULT,
+ help="port number (default: 8585)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="open database in read-only mode",
+ )
+
+ args = parser.parse_args()
+ prserv.init_logger(os.path.abspath(args.log), args.loglevel)
+
+ if args.start:
+ ret=prserv.serv.start_daemon(args.file, args.host, args.port, os.path.abspath(args.log), args.read_only)
+ elif args.stop:
+ ret=prserv.serv.stop_daemon(args.host, args.port)
else:
ret=parser.print_help()
return ret
diff --git a/bitbake/bin/bitbake-server b/bitbake/bin/bitbake-server
index f53f88b6b0..454a3919aa 100755
--- a/bitbake/bin/bitbake-server
+++ b/bitbake/bin/bitbake-server
@@ -12,11 +12,12 @@ warnings.simplefilter("default")
import logging
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+import bb
+
+bb.utils.check_system_locale()
# Users shouldn't be running this code directly
-if len(sys.argv) != 10 or not sys.argv[1].startswith("decafbad"):
+if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
sys.exit(1)
@@ -28,7 +29,8 @@ logfile = sys.argv[4]
lockname = sys.argv[5]
sockname = sys.argv[6]
timeout = float(sys.argv[7])
-xmlrpcinterface = (sys.argv[8], int(sys.argv[9]))
+profile = bool(int(sys.argv[8]))
+xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
if xmlrpcinterface[0] == "None":
xmlrpcinterface = (None, xmlrpcinterface[1])
@@ -49,5 +51,5 @@ logger = logging.getLogger("BitBake")
handler = bb.event.LogHandler()
logger.addHandler(handler)
-bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface)
+bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 9d850ec77c..e8073f2ac3 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -22,8 +24,7 @@ import subprocess
from multiprocessing import Lock
from threading import Thread
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
# Users shouldn't be running this code directly
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -90,19 +91,19 @@ def worker_fire_prepickled(event):
worker_thread_exit = False
def worker_flush(worker_queue):
- worker_queue_int = b""
+ worker_queue_int = bytearray()
global worker_pipe, worker_thread_exit
while True:
try:
- worker_queue_int = worker_queue_int + worker_queue.get(True, 1)
+ worker_queue_int.extend(worker_queue.get(True, 1))
except queue.Empty:
pass
while (worker_queue_int or not worker_queue.empty()):
try:
(_, ready, _) = select.select([], [worker_pipe], [], 1)
if not worker_queue.empty():
- worker_queue_int = worker_queue_int + worker_queue.get()
+ worker_queue_int.extend(worker_queue.get())
written = os.write(worker_pipe, worker_queue_int)
worker_queue_int = worker_queue_int[written:]
except (IOError, OSError) as e:
@@ -120,11 +121,10 @@ def worker_child_fire(event, d):
data = b"<event>" + pickle.dumps(event) + b"</event>"
try:
- worker_pipe_lock.acquire()
- while(len(data)):
- written = worker_pipe.write(data)
- data = data[written:]
- worker_pipe_lock.release()
+ with bb.utils.lock_timeout(worker_pipe_lock):
+ while(len(data)):
+ written = worker_pipe.write(data)
+ data = data[written:]
except IOError:
sigterm_handler(None, None)
raise
@@ -143,7 +143,17 @@ def sigterm_handler(signum, frame):
os.killpg(0, signal.SIGTERM)
sys.exit()
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
+
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+ taskhash = runtask['taskhash']
+ unihash = runtask['unihash']
+ appends = runtask['appends']
+ layername = runtask['layername']
+ taskdepdata = runtask['taskdepdata']
+ quieterrors = runtask['quieterrors']
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
@@ -155,8 +165,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
uid = os.getuid()
gid = os.getgid()
-
- taskdep = workerdata["taskdeps"][fn]
+ taskdep = runtask['taskdep']
if 'umask' in taskdep and taskname in taskdep['umask']:
umask = taskdep['umask'][taskname]
elif workerdata["umask"]:
@@ -168,25 +177,25 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
except TypeError:
pass
- dry_run = cfg.dry_run or dry_run_exec
+ dry_run = cfg.dry_run or runtask['dry_run']
# We can't use the fakeroot environment in a dry run as it possibly hasn't been built
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
fakeroot = True
- envvars = (workerdata["fakerootenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ envvars = (runtask['fakerootenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
- fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
+ fakedirs = (runtask['fakerootdirs'] or "").split()
for p in fakedirs:
bb.utils.mkdirhier(p)
logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
(fn, taskname, ', '.join(fakedirs)))
else:
- envvars = (workerdata["fakerootnoenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ envvars = (runtask['fakerootnoenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
@@ -228,15 +237,16 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
# Let SIGHUP exit as SIGTERM
signal.signal(signal.SIGHUP, sigterm_handler)
- # No stdin
- newsi = os.open(os.devnull, os.O_RDWR)
- os.dup2(newsi, sys.stdin.fileno())
+ # No stdin & stdout
+ # stdout is used as a status report channel and must not be used by child processes.
+ dumbio = os.open(os.devnull, os.O_RDWR)
+ os.dup2(dumbio, sys.stdin.fileno())
+ os.dup2(dumbio, sys.stdout.fileno())
- if umask:
+ if umask is not None:
os.umask(umask)
try:
- bb_cache = bb.cache.NoCache(databuilder)
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
the_data = databuilder.mcdata[mc]
the_data.setVar("BB_WORKERCONTEXT", "1")
@@ -255,13 +265,14 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
ret = 0
- the_data = bb_cache.loadDataFull(fn, appends)
+ the_data = databuilder.parseRecipe(fn, appends, layername)
the_data.setVar('BB_TASKHASH', taskhash)
the_data.setVar('BB_UNIHASH', unihash)
+ bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
- if not the_data.getVarFlag(taskname, 'network', False):
+ if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
if bb.utils.is_local_uid(uid):
logger.debug("Attempting to disable network for %s" % taskname)
bb.utils.disable_network(uid, gid)
@@ -296,6 +307,10 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
if not quieterrors:
logger.critical(traceback.format_exc())
os._exit(1)
+
+ sys.stdout.flush()
+ sys.stderr.flush()
+
try:
if dry_run:
return 0
@@ -337,12 +352,12 @@ class runQueueWorkerPipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
def read(self):
start = len(self.queue)
try:
- self.queue = self.queue + (self.input.read(102400) or b"")
+ self.queue.extend(self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
@@ -370,7 +385,7 @@ class BitbakeWorker(object):
def __init__(self, din):
self.input = din
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
self.cookercfg = None
self.databuilder = None
self.data = None
@@ -404,7 +419,7 @@ class BitbakeWorker(object):
if len(r) == 0:
# EOF on pipe, server must have terminated
self.sigterm_exception(signal.SIGTERM, None)
- self.queue = self.queue + r
+ self.queue.extend(r)
except (OSError, IOError):
pass
if len(self.queue):
@@ -424,18 +439,30 @@ class BitbakeWorker(object):
while self.process_waitpid():
continue
-
def handle_item(self, item, func):
- if self.queue.startswith(b"<" + item + b">"):
- index = self.queue.find(b"</" + item + b">")
- while index != -1:
- try:
- func(self.queue[(len(item) + 2):index])
- except pickle.UnpicklingError:
- workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
- raise
- self.queue = self.queue[(index + len(item) + 3):]
- index = self.queue.find(b"</" + item + b">")
+ opening_tag = b"<" + item + b">"
+ if not self.queue.startswith(opening_tag):
+ return
+
+ tag_len = len(opening_tag)
+ if len(self.queue) < tag_len + 4:
+ # we need to receive more data
+ return
+ header = self.queue[tag_len:tag_len + 4]
+ payload_len = int.from_bytes(header, 'big')
+ # closing tag has length (tag_len + 1)
+ if len(self.queue) < tag_len * 2 + 1 + payload_len:
+ # we need to receive more data
+ return
+
+ index = self.queue.find(b"</" + item + b">")
+ if index != -1:
+ try:
+ func(self.queue[(tag_len + 4):index])
+ except pickle.UnpicklingError:
+ workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
+ raise
+ self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
def handle_cookercfg(self, data):
self.cookercfg = pickle.loads(data)
@@ -455,6 +482,7 @@ class BitbakeWorker(object):
for mc in self.databuilder.mcdata:
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
+ self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
def handle_newtaskhashes(self, data):
self.workerdata["newhashes"] = pickle.loads(data)
@@ -472,11 +500,15 @@ class BitbakeWorker(object):
sys.exit(0)
def handle_runtask(self, data):
- fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
- workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+ runtask = pickle.loads(data)
- pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+
+ workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+ pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
self.build_pids[pid] = task
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
diff --git a/bitbake/bin/git-make-shallow b/bitbake/bin/git-make-shallow
index 1d00fbf183..9de557c10e 100755
--- a/bitbake/bin/git-make-shallow
+++ b/bitbake/bin/git-make-shallow
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -22,15 +24,17 @@ warnings.simplefilter("default")
version = 1.0
+git_cmd = ['git', '-c', 'safe.bareRepository=all']
+
def main():
if sys.version_info < (3, 4, 0):
sys.exit('Python 3.4 or greater is required')
- git_dir = check_output(['git', 'rev-parse', '--git-dir']).rstrip()
+ git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
shallow_file = os.path.join(git_dir, 'shallow')
if os.path.exists(shallow_file):
try:
- check_output(['git', 'fetch', '--unshallow'])
+ check_output(git_cmd + ['fetch', '--unshallow'])
except subprocess.CalledProcessError:
try:
os.unlink(shallow_file)
@@ -39,21 +43,21 @@ def main():
raise
args = process_args()
- revs = check_output(['git', 'rev-list'] + args.revisions).splitlines()
+ revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
make_shallow(shallow_file, args.revisions, args.refs)
- ref_revs = check_output(['git', 'rev-list'] + args.refs).splitlines()
+ ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
remaining_history = set(revs) & set(ref_revs)
for rev in remaining_history:
- if check_output(['git', 'rev-parse', '{}^@'.format(rev)]):
+ if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
sys.exit('Error: %s was not made shallow' % rev)
filter_refs(args.refs)
if args.shrink:
shrink_repo(git_dir)
- subprocess.check_call(['git', 'fsck', '--unreachable'])
+ subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
def process_args():
@@ -70,12 +74,12 @@ def process_args():
args = parser.parse_args()
if args.refs:
- args.refs = check_output(['git', 'rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
+ args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
else:
args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
- args.revisions = check_output(['git', 'rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
+ args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
return args
@@ -93,7 +97,7 @@ def make_shallow(shallow_file, revisions, refs):
def get_all_refs(ref_filter=None):
"""Return all the existing refs in this repository, optionally filtering the refs."""
- ref_output = check_output(['git', 'for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
+ ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
if ref_filter:
ref_split = (e for e in ref_split if ref_filter(*e))
@@ -111,7 +115,7 @@ def filter_refs(refs):
all_refs = get_all_refs()
to_remove = set(all_refs) - set(refs)
if to_remove:
- check_output(['xargs', '-0', '-n', '1', 'git', 'update-ref', '-d', '--no-deref'],
+ check_output(['xargs', '-0', '-n', '1'] + git_cmd + ['update-ref', '-d', '--no-deref'],
input=''.join(l + '\0' for l in to_remove))
@@ -124,7 +128,7 @@ def follow_history_intersections(revisions, refs):
if rev in seen:
continue
- parents = check_output(['git', 'rev-parse', '%s^@' % rev]).splitlines()
+ parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
yield rev
seen.add(rev)
@@ -132,12 +136,12 @@ def follow_history_intersections(revisions, refs):
if not parents:
continue
- check_refs = check_output(['git', 'merge-base', '--independent'] + sorted(refs)).splitlines()
+ check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
for parent in parents:
for ref in check_refs:
print("Checking %s vs %s" % (parent, ref))
try:
- merge_base = check_output(['git', 'merge-base', parent, ref]).rstrip()
+ merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
except subprocess.CalledProcessError:
continue
else:
@@ -157,14 +161,14 @@ def iter_except(func, exception, start=None):
def shrink_repo(git_dir):
"""Shrink the newly shallow repository, removing the unreachable objects."""
- subprocess.check_call(['git', 'reflog', 'expire', '--expire-unreachable=now', '--all'])
- subprocess.check_call(['git', 'repack', '-ad'])
+ subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
+ subprocess.check_call(git_cmd + ['repack', '-ad'])
try:
os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
- subprocess.check_call(['git', 'prune', '--expire', 'now'])
+ subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
if __name__ == '__main__':
diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster
index 558a819570..f002c8c159 100755
--- a/bitbake/bin/toaster
+++ b/bitbake/bin/toaster
@@ -84,7 +84,7 @@ webserverStartAll()
echo "Starting webserver..."
$MANAGE runserver --noreload "$ADDR_PORT" \
- </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
& echo $! >${BUILDDIR}/.toastermain.pid
sleep 1
@@ -181,6 +181,14 @@ WEBSERVER=1
export TOASTER_BUILDSERVER=1
ADDR_PORT="localhost:8000"
TOASTERDIR=`dirname $BUILDDIR`
+# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
+# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
+# If the directory does not exist, create it.
+TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
+if [ ! -d $TOASTER_LOGS_DIR ]
+then
+ mkdir $TOASTER_LOGS_DIR
+fi
unset CMD
for param in $*; do
case $param in
@@ -299,7 +307,7 @@ case $CMD in
export BITBAKE_UI='toasterui'
if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
$MANAGE runbuilds \
- </dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
& echo $! >${BUILDDIR}/.runbuilds.pid
else
echo "Toaster build server not started."
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay
index 404b61f516..74a319320e 100755
--- a/bitbake/bin/toaster-eventreplay
+++ b/bitbake/bin/toaster-eventreplay
@@ -30,79 +30,23 @@ sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
import bb.cooker
from bb.ui import toasterui
-
-class EventPlayer:
- """Emulate a connection to a bitbake server."""
-
- def __init__(self, eventfile, variables):
- self.eventfile = eventfile
- self.variables = variables
- self.eventmask = []
-
- def waitEvent(self, _timeout):
- """Read event from the file."""
- line = self.eventfile.readline().strip()
- if not line:
- return
- try:
- event_str = json.loads(line)['vars'].encode('utf-8')
- event = pickle.loads(codecs.decode(event_str, 'base64'))
- event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
- if event_name not in self.eventmask:
- return
- return event
- except ValueError as err:
- print("Failed loading ", line)
- raise err
-
- def runCommand(self, command_line):
- """Emulate running a command on the server."""
- name = command_line[0]
-
- if name == "getVariable":
- var_name = command_line[1]
- variable = self.variables.get(var_name)
- if variable:
- return variable['v'], None
- return None, "Missing variable %s" % var_name
-
- elif name == "getAllKeysWithFlags":
- dump = {}
- flaglist = command_line[1]
- for key, val in self.variables.items():
- try:
- if not key.startswith("__"):
- dump[key] = {
- 'v': val['v'],
- 'history' : val['history'],
- }
- for flag in flaglist:
- dump[key][flag] = val[flag]
- except Exception as err:
- print(err)
- return (dump, None)
-
- elif name == 'setEventMask':
- self.eventmask = command_line[-1]
- return True, None
-
- else:
- raise Exception("Command %s not implemented" % command_line[0])
-
- def getEventHandle(self):
- """
- This method is called by toasterui.
- The return value is passed to self.runCommand but not used there.
- """
- pass
+from bb.ui import eventreplay
def main(argv):
with open(argv[-1]) as eventfile:
# load variables from the first line
- variables = json.loads(eventfile.readline().strip())['allvariables']
-
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
+ eventfile.seek(0)
params = namedtuple('ConfigParams', ['observe_only'])(True)
- player = EventPlayer(eventfile, variables)
+ player = eventreplay.EventPlayer(eventfile, variables)
return toasterui.main(player, player, params)
diff --git a/bitbake/contrib/vim/indent/bitbake.vim b/bitbake/contrib/vim/indent/bitbake.vim
index 1381034098..7ee9d69938 100644
--- a/bitbake/contrib/vim/indent/bitbake.vim
+++ b/bitbake/contrib/vim/indent/bitbake.vim
@@ -40,7 +40,7 @@ set cpo&vim
let s:maxoff = 50 " maximum number of lines to look backwards for ()
-function GetPythonIndent(lnum)
+function! GetBBPythonIndent(lnum)
" If this line is explicitly joined: If the previous line was also joined,
" line it up with that one, otherwise add two 'shiftwidth'
@@ -257,7 +257,7 @@ let b:did_indent = 1
setlocal indentkeys+=0\"
-function BitbakeIndent(lnum)
+function! BitbakeIndent(lnum)
if !has('syntax_items')
return -1
endif
@@ -315,7 +315,7 @@ function BitbakeIndent(lnum)
endif
if index(["bbPyDefRegion", "bbPyFuncRegion"], name) != -1
- let ret = GetPythonIndent(a:lnum)
+ let ret = GetBBPythonIndent(a:lnum)
" Should normally always be indented by at least one shiftwidth; but allow
" return of -1 (defer to autoindent) or -2 (force indent to 0)
if ret == 0
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
index c5ea80fdf2..8f39b8f951 100644
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ b/bitbake/contrib/vim/syntax/bitbake.vim
@@ -63,13 +63,14 @@ syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*
" Includes and requires
syn keyword bbInclude inherit include require contained
-syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref
+syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
" Add taks and similar
syn keyword bbStatement addtask deltask addhandler after before EXPORT_FUNCTIONS contained
-syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement
-syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
+syn match bbStatementRest /[^\\]*$/ skipwhite contained contains=bbStatement,bbVarDeref,bbVarPyValue
+syn region bbStatementRestCont start=/.*\\$/ end=/^[^\\]*$/ contained contains=bbStatement,bbVarDeref,bbVarPyValue,bbContinue keepend
+syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest,bbStatementRestCont
" OE Important Functions
syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
@@ -122,6 +123,7 @@ hi def link bbPyFlag Type
hi def link bbPyDef Statement
hi def link bbStatement Statement
hi def link bbStatementRest Identifier
+hi def link bbStatementRestCont Identifier
hi def link bbOEFunctions Special
hi def link bbVarPyValue PreProc
hi def link bbOverrideOperator Operator
diff --git a/bitbake/doc/README b/bitbake/doc/README
index cdbb23776e..d4f56afa37 100644
--- a/bitbake/doc/README
+++ b/bitbake/doc/README
@@ -47,8 +47,8 @@ To install all required packages run:
To build the documentation locally, run:
- $ cd documentation
- $ make -f Makefile.sphinx html
+ $ cd doc
+ $ make html
The resulting HTML index page will be _build/html/index.html, and you
can browse your own copy of the locally generated documentation with
diff --git a/bitbake/doc/_templates/footer.html b/bitbake/doc/_templates/footer.html
new file mode 100644
index 0000000000..1398f20d7e
--- /dev/null
+++ b/bitbake/doc/_templates/footer.html
@@ -0,0 +1,9 @@
+<footer>
+ <hr/>
+ <div role="contentinfo">
+ <p>&copy; Copyright {{ copyright }}
+ <br>Last updated on {{ last_updated }} from the <a href="https://git.openembedded.org/bitbake/">bitbake</a> git repository.
+ </p>
+ </div>
+</footer>
+
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
index 7a22e96edf..d58fbb32ea 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
@@ -552,8 +552,8 @@ through dependency chains are more complex and are generally
accomplished with a Python function. The code in
``meta/lib/oe/sstatesig.py`` shows two examples of this and also
illustrates how you can insert your own policy into the system if so
-desired. This file defines the two basic signature generators
-OpenEmbedded-Core uses: "OEBasic" and "OEBasicHash". By default, there
+desired. This file defines the basic signature generator
+OpenEmbedded-Core uses: "OEBasicHash". By default, there
is a dummy "noop" signature handler enabled in BitBake. This means that
behavior is unchanged from previous versions. ``OE-Core`` uses the
"OEBasicHash" signature handler by default through this setting in the
@@ -561,14 +561,13 @@ behavior is unchanged from previous versions. ``OE-Core`` uses the
BB_SIGNATURE_HANDLER ?= "OEBasicHash"
-The "OEBasicHash" :term:`BB_SIGNATURE_HANDLER` is the same as the "OEBasic"
-version but adds the task hash to the stamp files. This results in any
-metadata change that changes the task hash, automatically causing the
-task to be run again. This removes the need to bump
-:term:`PR` values, and changes to metadata automatically
-ripple across the build.
+The main feature of the "OEBasicHash" :term:`BB_SIGNATURE_HANDLER` is that
+it adds the task hash to the stamp files. Thanks to this, any metadata
+change will change the task hash, automatically causing the task to be run
+again. This removes the need to bump :term:`PR` values, and changes to
+metadata automatically ripple across the build.
-It is also worth noting that the end result of these signature
+It is also worth noting that the end result of signature
generators is to make some dependency and hash information available to
the build. This information includes:
@@ -587,10 +586,11 @@ or possibly those defined in the metadata/signature handler itself. The
simplest parameter to pass is "none", which causes a set of signature
information to be written out into ``STAMPS_DIR`` corresponding to the
targets specified. The other currently available parameter is
-"printdiff", which causes BitBake to try to establish the closest
+"printdiff", which causes BitBake to try to establish the most recent
signature match it can (e.g. in the sstate cache) and then run
-``bitbake-diffsigs`` over the matches to determine the stamps and delta
-where these two stamp trees diverge.
+compare the matched signatures to determine the stamps and delta
+where these two stamp trees diverge. This can be used to determine why
+tasks need to be re-run in situations where that is not expected.
.. note::
@@ -657,7 +657,7 @@ builds are when execute, bitbake also supports user defined
configuration of the `Python
logging <https://docs.python.org/3/library/logging.html>`__ facilities
through the :term:`BB_LOGCONFIG` variable. This
-variable defines a json or yaml `logging
+variable defines a JSON or YAML `logging
configuration <https://docs.python.org/3/library/logging.config.html>`__
that will be intelligently merged into the default configuration. The
logging configuration is merged using the following rules:
@@ -691,9 +691,9 @@ logging configuration is merged using the following rules:
adds a filter called ``BitBake.defaultFilter``, both filters will be
applied to the logger
-As an example, consider the following user logging configuration file
-which logs all Hash Equivalence related messages of VERBOSE or higher to
-a file called ``hashequiv.log`` ::
+As a first example, you can create a ``hashequiv.json`` user logging
+configuration file to log all Hash Equivalence related messages of ``VERBOSE``
+or higher priority to a file called ``hashequiv.log``::
{
"version": 1,
@@ -722,3 +722,40 @@ a file called ``hashequiv.log`` ::
}
}
}
+
+Then set the :term:`BB_LOGCONFIG` variable in ``conf/local.conf``::
+
+ BB_LOGCONFIG = "hashequiv.json"
+
+Another example is this ``warn.json`` file to log all ``WARNING`` and
+higher priority messages to a ``warn.log`` file::
+
+ {
+ "version": 1,
+ "formatters": {
+ "warnlogFormatter": {
+ "()": "bb.msg.BBLogFormatter",
+ "format": "%(levelname)s: %(message)s"
+ }
+ },
+
+ "handlers": {
+ "warnlog": {
+ "class": "logging.FileHandler",
+ "formatter": "warnlogFormatter",
+ "level": "WARNING",
+ "filename": "warn.log"
+ }
+ },
+
+ "loggers": {
+ "BitBake": {
+ "handlers": ["warnlog"]
+ }
+ },
+
+ "@disable_existing_loggers": false
+ }
+
+Note that BitBake's helper classes for structured logging are implemented in
+``lib/bb/msg.py``.
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index f9d9e617f3..fb4f0a23d7 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -424,8 +424,8 @@ This fetcher supports the following parameters:
- *"nobranch":* Tells the fetcher to not check the SHA validation for
the branch when set to "1". The default is "0". Set this option for
- the recipe that refers to the commit that is valid for a tag instead
- of the branch.
+ the recipe that refers to the commit that is valid for any namespace
+ (branch, tag, ...) instead of the branch.
- *"bareclone":* Tells the fetcher to clone a bare clone into the
destination directory without checking out a working tree. Only the
@@ -476,6 +476,14 @@ Here are some example URLs::
easy to share metadata without removing passwords. SSH keys, ``~/.netrc``
and ``~/.ssh/config`` files can be used as alternatives.
+Using tags with the git fetcher may cause surprising behaviour. Bitbake needs to
+resolve the tag to a specific revision and to do that, it has to connect to and use
+the upstream repository. This is because the revision the tags point at can change and
+we've seen cases of this happening in well known public repositories. This can mean
+many more network connections than expected and recipes may be reparsed at every build.
+Source mirrors will also be bypassed as the upstream repository is the only source
+of truth to resolve the revision accurately. For these reasons, whilst the fetcher
+can support tags, we recommend being specific about revisions in recipes.
.. _gitsm-fetcher:
@@ -688,6 +696,43 @@ Here is an example URL::
It can also be used when setting mirrors definitions using the :term:`PREMIRRORS` variable.
+.. _gcp-fetcher:
+
+GCP Fetcher (``gs://``)
+--------------------------
+
+This submodule fetches data from a
+`Google Cloud Storage Bucket <https://cloud.google.com/storage/docs/buckets>`__.
+It uses the `Google Cloud Storage Python Client <https://cloud.google.com/python/docs/reference/storage/latest>`__
+to check the status of objects in the bucket and download them.
+The use of the Python client makes it substantially faster than using command
+line tools such as gsutil.
+
+The fetcher requires the Google Cloud Storage Python Client to be installed, along
+with the gsutil tool.
+
+The fetcher requires that the machine has valid credentials for accessing the
+chosen bucket. Instructions for authentication can be found in the
+`Google Cloud documentation <https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev>`__.
+
+If it used from the OpenEmbedded build system, the fetcher can be used for
+fetching sstate artifacts from a GCS bucket by specifying the
+``SSTATE_MIRRORS`` variable as shown below::
+
+ SSTATE_MIRRORS ?= "\
+ file://.* gs://<bucket name>/PATH \
+ "
+
+The fetcher can also be used in recipes::
+
+ SRC_URI = "gs://<bucket name>/<foo_container>/<bar_file>"
+
+However, the checksum of the file should be also be provided::
+
+ SRC_URI[sha256sum] = "<sha256 string>"
+
+.. _crate-fetcher:
+
Crate Fetcher (``crate://``)
----------------------------
@@ -704,6 +749,80 @@ Here is an example URL::
SRC_URI = "crate://crates.io/glob/0.2.11"
+.. _npm-fetcher:
+
+NPM Fetcher (``npm://``)
+------------------------
+
+This submodule fetches source code from an
+`NPM <https://en.wikipedia.org/wiki/Npm_(software)>`__
+Javascript package registry.
+
+The format for the :term:`SRC_URI` setting must be::
+
+ SRC_URI = "npm://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
+
+This fetcher supports the following parameters:
+
+- *"package":* The NPM package name. This is a mandatory parameter.
+
+- *"version":* The NPM package version. This is a mandatory parameter.
+
+- *"downloadfilename":* Specifies the filename used when storing the downloaded file.
+
+- *"destsuffix":* Specifies the directory to use to unpack the package (default: ``npm``).
+
+Note that NPM fetcher only fetches the package source itself. The dependencies
+can be fetched through the `npmsw-fetcher`_.
+
+Here is an example URL with both fetchers::
+
+ SRC_URI = " \
+ npm://registry.npmjs.org/;package=cute-files;version=${PV} \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+See :yocto_docs:`Creating Node Package Manager (NPM) Packages
+</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
+in the Yocto Project manual for details about using
+:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
+to automatically create a recipe from an NPM URL.
+
+.. _npmsw-fetcher:
+
+NPM shrinkwrap Fetcher (``npmsw://``)
+-------------------------------------
+
+This submodule fetches source code from an
+`NPM shrinkwrap <https://docs.npmjs.com/cli/v8/commands/npm-shrinkwrap>`__
+description file, which lists the dependencies
+of an NPM package while locking their versions.
+
+The format for the :term:`SRC_URI` setting must be::
+
+ SRC_URI = "npmsw://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
+
+This fetcher supports the following parameters:
+
+- *"dev":* Set this parameter to ``1`` to install "devDependencies".
+
+- *"destsuffix":* Specifies the directory to use to unpack the dependencies
+ (``${S}`` by default).
+
+Note that the shrinkwrap file can also be provided by the recipe for
+the package which has such dependencies, for example::
+
+ SRC_URI = " \
+ npm://registry.npmjs.org/;package=cute-files;version=${PV} \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+Such a file can automatically be generated using
+:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
+as described in the :yocto_docs:`Creating Node Package Manager (NPM) Packages
+</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
+section of the Yocto Project.
+
Other Fetchers
--------------
@@ -713,10 +832,10 @@ Fetch submodules also exist for the following:
- Mercurial (``hg://``)
-- npm (``npm://``)
-
- OSC (``osc://``)
+- S3 (``s3://``)
+
- Secure FTP (``sftp://``)
- Secure Shell (``ssh://``)
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
index 722dc5a2cc..654196ca24 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
@@ -18,28 +18,32 @@ it.
Obtaining BitBake
=================
-See the :ref:`bitbake-user-manual/bitbake-user-manual-hello:obtaining bitbake` section for
+See the :ref:`bitbake-user-manual/bitbake-user-manual-intro:obtaining bitbake` section for
information on how to obtain BitBake. Once you have the source code on
your machine, the BitBake directory appears as follows::
$ ls -al
- total 100
- drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 .
- drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 ..
- -rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin
- drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build
- -rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf
- drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib
- -rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING
- drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc
- -rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore
- -rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
- drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
- -rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
- -rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
+ total 108
+ drwxr-xr-x 9 fawkh 10000 4096 feb 24 12:10 .
+ drwx------ 36 fawkh 10000 4096 mar 2 17:00 ..
+ -rw-r--r-- 1 fawkh 10000 365 feb 24 12:10 AUTHORS
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 bin
+ -rw-r--r-- 1 fawkh 10000 16501 feb 24 12:10 ChangeLog
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 classes
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 conf
+ drwxr-xr-x 5 fawkh 10000 4096 feb 24 12:10 contrib
+ drwxr-xr-x 6 fawkh 10000 4096 feb 24 12:10 doc
+ drwxr-xr-x 8 fawkh 10000 4096 mar 2 16:26 .git
+ -rw-r--r-- 1 fawkh 10000 31 feb 24 12:10 .gitattributes
+ -rw-r--r-- 1 fawkh 10000 392 feb 24 12:10 .gitignore
+ drwxr-xr-x 13 fawkh 10000 4096 feb 24 12:11 lib
+ -rw-r--r-- 1 fawkh 10000 1224 feb 24 12:10 LICENSE
+ -rw-r--r-- 1 fawkh 10000 15394 feb 24 12:10 LICENSE.GPL-2.0-only
+ -rw-r--r-- 1 fawkh 10000 1286 feb 24 12:10 LICENSE.MIT
+ -rw-r--r-- 1 fawkh 10000 229 feb 24 12:10 MANIFEST.in
+ -rw-r--r-- 1 fawkh 10000 2413 feb 24 12:10 README
+ -rw-r--r-- 1 fawkh 10000 43 feb 24 12:10 toaster-requirements.txt
+ -rw-r--r-- 1 fawkh 10000 2887 feb 24 12:10 TODO
At this point, you should have BitBake cloned to a directory that
matches the previous listing except for dates and user names.
@@ -52,7 +56,7 @@ directory to where your local BitBake files are and run the following
command::
$ ./bin/bitbake --version
- BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0
+ BitBake Build Tool Core version 2.3.1
The console output tells you what version
you are running.
@@ -130,23 +134,8 @@ Following is the complete "Hello World" example.
directory. Run the ``bitbake`` command and see what it does::
$ bitbake
- The BBPATH variable is not set and bitbake did not
- find a conf/bblayers.conf file in the expected location.
+ ERROR: The BBPATH variable is not set and bitbake did not find a conf/bblayers.conf file in the expected location.
Maybe you accidentally invoked bitbake from the wrong directory?
- DEBUG: Removed the following variables from the environment:
- GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
- GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
- XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
- MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
- GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
- XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
- _, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
- UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
-
- The majority of this output is specific to environment variables that
- are not directly relevant to BitBake. However, the very first
- message regarding the :term:`BBPATH` variable and the
- ``conf/bblayers.conf`` file is relevant.
When you run BitBake, it begins looking for metadata files. The
:term:`BBPATH` variable is what tells BitBake where
@@ -179,20 +168,14 @@ Following is the complete "Hello World" example.
``bitbake`` command again::
$ bitbake
- ERROR: Traceback (most recent call last):
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
- return func(fn, *args)
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file
- return bb.parse.handle(fn, data, include)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle
- return h['handle'](fn, data, include)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle
- abs_fn = resolve_file(fn, data)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
- raise IOError("file %s not found in %s" % (fn, bbpath))
- IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
-
- ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
+ ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/__init__.py
+ Traceback (most recent call last):
+ File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 127, in resolve_file(fn='conf/bitbake.conf', d=<bb.data_smart.DataSmart object at 0x7f22919a3df0>):
+ if not newfn:
+ > raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
+ fn = newfn
+ FileNotFoundError: [Errno 2] file conf/bitbake.conf not found in <projectdirectory>
+
This sample output shows that BitBake could not find the
``conf/bitbake.conf`` file in the project directory. This file is
@@ -226,12 +209,12 @@ Following is the complete "Hello World" example.
.. note::
- Without a value for PN , the variables STAMP , T , and B , prevent more
- than one recipe from working. You can fix this by either setting PN to
+ Without a value for :term:`PN`, the variables :term:`STAMP`, :term:`T`, and :term:`B`, prevent more
+ than one recipe from working. You can fix this by either setting :term:`PN` to
have a value similar to what OpenEmbedded and BitBake use in the default
- bitbake.conf file (see previous example). Or, by manually updating each
- recipe to set PN . You will also need to include PN as part of the STAMP
- , T , and B variable definitions in the local.conf file.
+ ``bitbake.conf`` file (see previous example). Or, by manually updating each
+ recipe to set :term:`PN`. You will also need to include :term:`PN` as part of the :term:`STAMP`,
+ :term:`T`, and :term:`B` variable definitions in the ``local.conf`` file.
The ``TMPDIR`` variable establishes a directory that BitBake uses
for build output and intermediate files other than the cached
@@ -254,18 +237,14 @@ Following is the complete "Hello World" example.
exists, you can run the ``bitbake`` command again::
$ bitbake
- ERROR: Traceback (most recent call last):
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
- return func(fn, *args)
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit
- bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit
- include(fn, file, lineno, d, "inherit")
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include
- raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
- ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
-
- ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+ ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py
+ Traceback (most recent call last):
+ File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 67, in inherit(files=['base'], fn='configuration INHERITs', lineno=0, d=<bb.data_smart.DataSmart object at 0x7fab6815edf0>):
+ if not os.path.exists(file):
+ > raise ParseError("Could not inherit file %s" % (file), fn, lineno)
+
+ bb.parse.ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+
In the sample output,
BitBake could not find the ``classes/base.bbclass`` file. You need
@@ -284,7 +263,10 @@ Following is the complete "Hello World" example.
$ mkdir classes
Move to the ``classes`` directory and then create the
- ``base.bbclass`` file by inserting this single line: addtask build
+ ``base.bbclass`` file by inserting this single line::
+
+ addtask build
+
The minimal task that BitBake runs is the ``do_build`` task. This is
all the example needs in order to build the project. Of course, the
``base.bbclass`` can have much more depending on which build
@@ -328,10 +310,19 @@ Following is the complete "Hello World" example.
BBFILES += "${LAYERDIR}/*.bb"
BBFILE_COLLECTIONS += "mylayer"
BBFILE_PATTERN_mylayer := "^${LAYERDIR_RE}/"
+ LAYERSERIES_CORENAMES = "hello_world_example"
+ LAYERSERIES_COMPAT_mylayer = "hello_world_example"
For information on these variables, click on :term:`BBFILES`,
- :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS` or :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>`
- to go to the definitions in the glossary.
+ :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS`, :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>`
+ or :term:`LAYERSERIES_COMPAT` to go to the definitions in the glossary.
+
+ .. note::
+
+ We are setting both ``LAYERSERIES_CORENAMES`` and :term:`LAYERSERIES_COMPAT` in this particular case, because we
+ are using bitbake without OpenEmbedded.
+ You should usually just use :term:`LAYERSERIES_COMPAT` to specify the OE-Core versions for which your layer
+ is compatible, and add the meta-openembedded layer to your project.
You need to create the recipe file next. Inside your layer at the
top-level, use an editor and create a recipe file named
@@ -389,12 +380,14 @@ Following is the complete "Hello World" example.
target::
$ bitbake printhello
+ Loading cache: 100% |
+ Loaded 0 entries from dependency cache.
Parsing recipes: 100% |##################################################################################|
- Time: 00:00:00
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
NOTE: Resolving any missing task queue dependencies
- NOTE: Preparing RunQueue
- NOTE: Executing RunQueue Tasks
+ Initialising tasks: 100% |###############################################################################|
+ NOTE: No setscene tasks
+ NOTE: Executing Tasks
********************
* *
* Hello, World! *
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
index af4b135867..58975f4c88 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
@@ -195,22 +195,45 @@ value. However, if ``A`` is not set, the variable is set to "aval".
Setting a weak default value (??=)
----------------------------------
-It is possible to use a "weaker" assignment than in the previous section
-by using the "??=" operator. This assignment behaves identical to "?="
-except that the assignment is made at the end of the parsing process
-rather than immediately. Consequently, when multiple "??=" assignments
-exist, the last one is used. Also, any "=" or "?=" assignment will
-override the value set with "??=". Here is an example::
+The weak default value of a variable is the value which that variable
+will expand to if no value has been assigned to it via any of the other
+assignment operators. The "??=" operator takes effect immediately, replacing
+any previously defined weak default value. Here is an example::
- A ??= "somevalue"
- A ??= "someothervalue"
+ W ??= "x"
+ A := "${W}" # Immediate variable expansion
+ W ??= "y"
+ B := "${W}" # Immediate variable expansion
+ W ??= "z"
+ C = "${W}"
+ W ?= "i"
-If ``A`` is set before the above statements are
-parsed, the variable retains its value. If ``A`` is not set, the
-variable is set to "someothervalue".
+After parsing we will have::
-Again, this assignment is a "lazy" or "weak" assignment because it does
-not occur until the end of the parsing process.
+ A = "x"
+ B = "y"
+ C = "i"
+ W = "i"
+
+Appending and prepending non-override style will not substitute the weak
+default value, which means that after parsing::
+
+ W ??= "x"
+ W += "y"
+
+we will have::
+
+ W = " y"
+
+On the other hand, override-style appends/prepends/removes are applied after
+any active weak default value has been substituted::
+
+ W ??= "x"
+ W:append = "y"
+
+After parsing we will have::
+
+ W = "xy"
Immediate variable expansion (:=)
---------------------------------
@@ -296,6 +319,10 @@ The variable ``D`` becomes "dvaladditional data".
You must control all spacing when you use the override syntax.
+.. note::
+
+ The overrides are applied in this order, ":append", ":prepend", ":remove".
+
It is also possible to append and prepend to shell functions and
BitBake-style Python functions. See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:shell functions`" and ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:bitbake-style python functions`"
sections for examples.
@@ -307,7 +334,8 @@ Removal (Override Style Syntax)
You can remove values from lists using the removal override style
syntax. Specifying a value for removal causes all occurrences of that
-value to be removed from the variable.
+value to be removed from the variable. Unlike ":append" and ":prepend",
+there is no need to add a leading or trailing space to the value.
When you use this syntax, BitBake expects one or more strings.
Surrounding spaces and spacing are preserved. Here is an example::
@@ -328,6 +356,28 @@ The variable ``FOO`` becomes
Like ":append" and ":prepend", ":remove" is applied at variable
expansion time.
+.. note::
+
+ The overrides are applied in this order, ":append", ":prepend", ":remove".
+ This implies it is not possible to re-append previously removed strings.
+ However, one can undo a ":remove" by using an intermediate variable whose
+ content is passed to the ":remove" so that modifying the intermediate
+ variable equals to keeping the string in::
+
+ FOOREMOVE = "123 456 789"
+ FOO:remove = "${FOOREMOVE}"
+ ...
+ FOOREMOVE = "123 789"
+
+ This expands to ``FOO:remove = "123 789"``.
+
+.. note::
+
+ Override application order may not match variable parse history, i.e.
+ the output of ``bitbake -e`` may contain ":remove" before ":append",
+ but the result will be removed string, because ":remove" is handled
+ last.
+
Override Style Operation Advantages
-----------------------------------
@@ -398,6 +448,12 @@ documentation to a BitBake variable as follows::
CACHE[doc] = "The directory holding the cache of the metadata."
+.. note::
+
+ Variable flag names starting with an underscore (``_``) character
+ are allowed but are ignored by ``d.getVarFlags("VAR")``
+ in Python code. Such flag names are used internally by BitBake.
+
Inline Python Variable Expansion
--------------------------------
@@ -1440,12 +1496,35 @@ functionality of the task:
directory listed is used as the current working directory for the
task.
+- ``[file-checksums]``: Controls the file dependencies for a task. The
+ baseline file list is the set of files associated with
+ :term:`SRC_URI`. May be used to set additional dependencies on
+ files not associated with :term:`SRC_URI`.
+
+ The value set to the list is a file-boolean pair where the first
+ value is the file name and the second is whether or not it
+ physically exists on the filesystem. ::
+
+ do_configure[file-checksums] += "${MY_DIRPATH}/my-file.txt:True"
+
+ It is important to record any paths which the task looked at and
+ which didn't exist. This means that if these do exist at a later
+ time, the task can be rerun with the new additional files. The
+ "exists" True or False value after the path allows this to be
+ handled.
+
- ``[lockfiles]``: Specifies one or more lockfiles to lock while the
task executes. Only one task may hold a lockfile, and any task that
attempts to lock an already locked file will block until the lock is
released. You can use this variable flag to accomplish mutual
exclusion.
+- ``[network]``: When set to "1", allows a task to access the network. By
+ default, only the ``do_fetch`` task is granted network access. Recipes
+ shouldn't access the network outside of ``do_fetch`` as it usually
+ undermines fetcher source mirroring, image and licence manifests, software
+ auditing and supply chain security.
+
- ``[noexec]``: When set to "1", marks the task as being empty, with
no execution required. You can use the ``[noexec]`` flag to set up
tasks as dependency placeholders, or to disable tasks defined
@@ -1899,6 +1978,33 @@ looking at the source code of the ``bb`` module, which is in
the commonly used functions ``bb.utils.contains()`` and
``bb.utils.mkdirhier()``, which come with docstrings.
+Extending Python Library Code
+-----------------------------
+
+If you wish to add your own Python library code (e.g. to provide
+functions/classes you can use from Python functions in the metadata)
+you can do so from any layer using the ``addpylib`` directive.
+This directive is typically added to your layer configuration (
+``conf/layer.conf``) although it will be handled in any ``.conf`` file.
+
+Usage is of the form::
+
+ addpylib <directory> <namespace>
+
+Where <directory> specifies the directory to add to the library path.
+The specified <namespace> is imported automatically, and if the imported
+module specifies an attribute named ``BBIMPORTS``, that list of
+sub-modules is iterated and imported too.
+
+Testing and Debugging BitBake Python code
+-----------------------------------------
+
+The OpenEmbedded build system implements a convenient ``pydevshell`` target which
+you can use to access the BitBake datastore and experiment with your own Python
+code. See :yocto_docs:`Using a Python Development Shell
+</dev-manual/python-development-shell.html#using-a-python-development-shell>` in the Yocto
+Project manual for details.
+
Task Checksums and Setscene
===========================
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
new file mode 100644
index 0000000000..e9c454ba11
--- /dev/null
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
@@ -0,0 +1,91 @@
+.. SPDX-License-Identifier: CC-BY-2.5
+
+================
+Variable Context
+================
+
+|
+
+Variables might only have an impact or can be used in certain contexts. Some
+should only be used in global files like ``.conf``, while others are intended only
+for local files like ``.bb``. This chapter aims to describe some important variable
+contexts.
+
+.. _ref-varcontext-configuration:
+
+BitBake's own configuration
+===========================
+
+Variables starting with ``BB_`` usually configure the behaviour of BitBake itself.
+For example, one could configure:
+
+- System resources, like disk space to be used (:term:`BB_DISKMON_DIRS`),
+ or the number of tasks to be run in parallel by BitBake (:term:`BB_NUMBER_THREADS`).
+
+- How the fetchers shall behave, e.g., :term:`BB_FETCH_PREMIRRORONLY` is used
+ by BitBake to determine if BitBake's fetcher shall search only
+ :term:`PREMIRRORS` for files.
+
+Those variables are usually configured globally.
+
+BitBake configuration
+=====================
+
+There are variables:
+
+- Like :term:`B` or :term:`T`, that are used to specify directories used by
+ BitBake during the build of a particular recipe. Those variables are
+ specified in ``bitbake.conf``. Some, like :term:`B`, are quite often
+ overwritten in recipes.
+
+- Starting with ``FAKEROOT``, to configure how the ``fakeroot`` command is
+ handled. Those are usually set by ``bitbake.conf`` and might get adapted in a
+ ``bbclass``.
+
+- Detailing where BitBake will store and fetch information from, for
+ data reuse between build runs like :term:`CACHE`, :term:`DL_DIR` or
+ :term:`PERSISTENT_DIR`. Those are usually global.
+
+
+Layers and files
+================
+
+Variables starting with ``LAYER`` configure how BitBake handles layers.
+Additionally, variables starting with ``BB`` configure how layers and files are
+handled. For example:
+
+- :term:`LAYERDEPENDS` is used to configure on which layers a given layer
+ depends.
+
+- The configured layers are contained in :term:`BBLAYERS` and files in
+ :term:`BBFILES`.
+
+Those variables are often used in the files ``layer.conf`` and ``bblayers.conf``.
+
+Recipes and packages
+====================
+
+Variables handling recipes and packages can be split into:
+
+- :term:`PN`, :term:`PV` or :term:`PF` for example, contain information about
+ the name or revision of a recipe or package. Usually, the default set in
+ ``bitbake.conf`` is used, but those are from time to time overwritten in
+ recipes.
+
+- :term:`SUMMARY`, :term:`DESCRIPTION`, :term:`LICENSE` or :term:`HOMEPAGE`
+ contain the expected information and should be set specifically for every
+ recipe.
+
+- In recipes, variables are also used to control build and runtime
+ dependencies between recipes/packages with other recipes/packages. The
+ most common should be: :term:`PROVIDES`, :term:`RPROVIDES`, :term:`DEPENDS`,
+ and :term:`RDEPENDS`.
+
+- There are further variables starting with ``SRC`` that specify the sources in
+ a recipe like :term:`SRC_URI` or :term:`SRCDATE`. Those are also usually set
+ in recipes.
+
+- Which version or provider of a recipe should be given preference when
+ multiple recipes would provide the same item, is controlled by variables
+ starting with ``PREFERRED_``. Those are normally set in the configuration
+ files of a ``MACHINE`` or ``DISTRO``.
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index af4ff9805c..899e584f91 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -40,8 +40,7 @@ overview of their function and contents.
Azure Storage Shared Access Signature, when using the
:ref:`Azure Storage fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
This variable can be defined to be used by the fetcher to authenticate
- and gain access to non-public artifacts.
- ::
+ and gain access to non-public artifacts::
AZ_SAS = ""se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>""
@@ -100,10 +99,26 @@ overview of their function and contents.
the path of the build. BitBake's output should not (and usually does
not) depend on the directory in which it was built.
+ :term:`BB_CACHEDIR`
+ Specifies the code parser cache directory (distinct from :term:`CACHE`
+ and :term:`PERSISTENT_DIR` although they can be set to the same value
+ if desired). The default value is "${TOPDIR}/cache".
+
:term:`BB_CHECK_SSL_CERTS`
Specifies if SSL certificates should be checked when fetching. The default
value is ``1`` and certificates are not checked if the value is set to ``0``.
+ :term:`BB_HASH_CODEPARSER_VALS`
+ Specifies values for variables to use when populating the codeparser cache.
+ This can be used selectively to set dummy values for variables to avoid
+ the codeparser cache growing on every parse. Variables that would typically
+ be included are those where the value is not significant for where the
+ codeparser cache is used (i.e. when calculating variable dependencies for
+ code fragments.) The value is space-separated without quoting values, for
+ example::
+
+ BB_HASH_CODEPARSER_VALS = "T=/ WORKDIR=/ DATE=1234 TIME=1234"
+
:term:`BB_CONSOLELOG`
Specifies the path to a log file into which BitBake's user interface
writes output during the build.
@@ -344,6 +359,14 @@ overview of their function and contents.
For example usage, see :term:`BB_GIT_SHALLOW`.
+ :term:`BB_GLOBAL_PYMODULES`
+ Specifies the list of Python modules to place in the global namespace.
+ It is intended that only the core layer should set this and it is meant
+ to be a very small list, typically just ``os`` and ``sys``.
+ :term:`BB_GLOBAL_PYMODULES` is expected to be set before the first
+ ``addpylib`` directive.
+ See also ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`".
+
:term:`BB_HASHCHECK_FUNCTION`
Specifies the name of the function to call during the "setscene" part
of the task's execution in order to validate the list of task hashes.
@@ -401,7 +424,7 @@ overview of their function and contents.
Example usage::
- BB_HASHSERVE_UPSTREAM = "typhoon.yocto.io:8687"
+ BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
:term:`BB_INVALIDCONF`
Used in combination with the ``ConfigParsed`` event to trigger
@@ -409,6 +432,15 @@ overview of their function and contents.
``ConfigParsed`` event can set the variable to trigger the re-parse.
You must be careful to avoid recursive loops with this functionality.
+ :term:`BB_LOADFACTOR_MAX`
+ Setting this to a value will cause BitBake to check the system load
+ average before executing new tasks. If the load average is above the
+ the number of CPUs multipled by this factor, no new task will be started
+ unless there is no task executing. A value of "1.5" has been found to
+ work reasonably. This is helpful for systems which don't have pressure
+ regulation enabled, which is more granular. Pressure values take
+ precedence over loadfactor.
+
:term:`BB_LOGCONFIG`
Specifies the name of a config file that contains the user logging
configuration. See
@@ -483,13 +515,64 @@ overview of their function and contents.
You must set this variable in the external environment in order
for it to work.
+ :term:`BB_PRESSURE_MAX_CPU`
+ Specifies a maximum CPU pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, CPU pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ This threshold can be set in ``conf/local.conf`` as::
+
+ BB_PRESSURE_MAX_CPU = "500"
+
+ :term:`BB_PRESSURE_MAX_IO`
+ Specifies a maximum I/O pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, I/O pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ At this point in time, experiments show that IO pressure tends to
+ be short-lived and regulating just the CPU with
+ :term:`BB_PRESSURE_MAX_CPU` can help to reduce it.
+
+ :term:`BB_PRESSURE_MAX_MEMORY`
+
+ Specifies a maximum memory pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, memory pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ Memory pressure is experienced when time is spent swapping,
+ refaulting pages from the page cache or performing direct reclaim.
+ This is why memory pressure is rarely seen, but setting this variable
+ might be useful as a last resort to prevent OOM errors if they are
+ occurring during builds.
+
:term:`BB_RUNFMT`
Specifies the name of the executable script files (i.e. run files)
saved into ``${``\ :term:`T`\ ``}``. By default, the
:term:`BB_RUNFMT` variable is undefined and the run filenames get
created using the following form::
- run.{task}.{pid}
+ run.{func}.{pid}
If you want to force run files to take a specific name, you can set this
variable in a configuration file.
@@ -846,9 +929,9 @@ overview of their function and contents.
section.
:term:`BBPATH`
- Used by BitBake to locate class (``.bbclass``) and configuration
- (``.conf``) files. This variable is analogous to the ``PATH``
- variable.
+ A colon-separated list used by BitBake to locate class (``.bbclass``)
+ and configuration (``.conf``) files. This variable is analogous to the
+ ``PATH`` variable.
If you run BitBake from a directory outside of the build directory,
you must be sure to set :term:`BBPATH` to point to the build directory.
@@ -940,7 +1023,7 @@ overview of their function and contents.
``bblayers.conf`` configuration file.
To exclude a recipe from a world build using this variable, set the
- variable to "1" in the recipe.
+ variable to "1" in the recipe. Set it to "0" to add it back to world build.
.. note::
@@ -998,6 +1081,11 @@ overview of their function and contents.
environment variable. The value is a colon-separated list of
directories that are searched left-to-right in order.
+ :term:`FILE_LAYERNAME`
+ During parsing and task execution, this is set to the name of the
+ layer containing the recipe file. Code can use this to identify which
+ layer a recipe is from.
+
:term:`GITDIR`
The directory in which a local copy of a Git repository is stored
when it is cloned.
@@ -1046,6 +1134,29 @@ overview of their function and contents.
variable is not available outside of ``layer.conf`` and references
are expanded immediately when parsing of the file completes.
+ :term:`LAYERSERIES_COMPAT`
+ Lists the versions of the OpenEmbedded-Core (OE-Core) for which
+ a layer is compatible. Using the :term:`LAYERSERIES_COMPAT` variable
+ allows the layer maintainer to indicate which combinations of the
+ layer and OE-Core can be expected to work. The variable gives the
+ system a way to detect when a layer has not been tested with new
+ releases of OE-Core (e.g. the layer is not maintained).
+
+ To specify the OE-Core versions for which a layer is compatible, use
+ this variable in your layer's ``conf/layer.conf`` configuration file.
+ For the list, use the Yocto Project release name (e.g. "kirkstone",
+ "mickledore"). To specify multiple OE-Core versions for the layer, use
+ a space-separated list::
+
+ LAYERSERIES_COMPAT_layer_root_name = "kirkstone mickledore"
+
+ .. note::
+
+ Setting :term:`LAYERSERIES_COMPAT` is required by the Yocto Project
+ Compatible version 2 standard.
+ The OpenEmbedded build system produces a warning if the variable
+ is not set for any given layer.
+
:term:`LAYERVERSION`
Optionally specifies the version of a layer as a single number. You
can use this variable within
@@ -1068,8 +1179,8 @@ overview of their function and contents.
order.
:term:`OVERRIDES`
- BitBake uses :term:`OVERRIDES` to control what variables are overridden
- after BitBake parses recipes and configuration files.
+ A colon-separated list that BitBake uses to control what variables are
+ overridden after BitBake parses recipes and configuration files.
Following is a simple example that uses an overrides list based on
machine architectures: OVERRIDES = "arm:x86:mips:powerpc" You can
diff --git a/bitbake/doc/index.rst b/bitbake/doc/index.rst
index 3ff8b1580f..ee1660ac15 100644
--- a/bitbake/doc/index.rst
+++ b/bitbake/doc/index.rst
@@ -13,6 +13,7 @@ BitBake User Manual
bitbake-user-manual/bitbake-user-manual-intro
bitbake-user-manual/bitbake-user-manual-execution
bitbake-user-manual/bitbake-user-manual-metadata
+ bitbake-user-manual/bitbake-user-manual-ref-variables-context
bitbake-user-manual/bitbake-user-manual-fetching
bitbake-user-manual/bitbake-user-manual-ref-variables
bitbake-user-manual/bitbake-user-manual-hello
diff --git a/bitbake/doc/releases.rst b/bitbake/doc/releases.rst
index 6635032c01..b38b1c0652 100644
--- a/bitbake/doc/releases.rst
+++ b/bitbake/doc/releases.rst
@@ -1,61 +1,63 @@
.. SPDX-License-Identifier: CC-BY-2.5
-===========================
- Supported Release Manuals
-===========================
+=================================
+BitBake Supported Release Manuals
+=================================
-******************************
-Release Series 3.4 (honister)
-******************************
+*******************************
+Release Series 4.2 (mickledore)
+*******************************
-- :yocto_docs:`3.4 BitBake User Manual </3.4/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.4.1 BitBake User Manual </3.4.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.4.2 BitBake User Manual </3.4.2/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`BitBake 2.4 User Manual </bitbake/2.4/>`
******************************
-Release Series 3.3 (hardknott)
+Release Series 4.0 (kirkstone)
******************************
-- :yocto_docs:`3.3 BitBake User Manual </3.3/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.3.1 BitBake User Manual </3.3.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.3.2 BitBake User Manual </3.3.2/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.3.3 BitBake User Manual </3.3.3/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.3.4 BitBake User Manual </3.3.4/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.3.5 BitBake User Manual </3.3.5/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`BitBake 2.0 User Manual </bitbake/2.0/>`
****************************
Release Series 3.1 (dunfell)
****************************
-- :yocto_docs:`3.1 BitBake User Manual </3.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.1 BitBake User Manual </3.1.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.2 BitBake User Manual </3.1.2/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.3 BitBake User Manual </3.1.3/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.4 BitBake User Manual </3.1.4/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.5 BitBake User Manual </3.1.5/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.6 BitBake User Manual </3.1.6/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.7 BitBake User Manual </3.1.7/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.8 BitBake User Manual </3.1.8/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.9 BitBake User Manual </3.1.9/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.10 BitBake User Manual </3.1.10/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.11 BitBake User Manual </3.1.11/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.12 BitBake User Manual </3.1.12/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.13 BitBake User Manual </3.1.13/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.1.14 BitBake User Manual </3.1.14/bitbake-user-manual/bitbake-user-manual.html>`
-
-==========================
- Outdated Release Manuals
-==========================
+- :yocto_docs:`BitBake 1.46 User Manual </bitbake/1.46/>`
+
+================================
+BitBake Outdated Release Manuals
+================================
+
+*****************************
+Release Series 4.1 (langdale)
+*****************************
+
+- :yocto_docs:`BitBake 2.2 User Manual </bitbake/2.2/>`
+
+******************************
+Release Series 3.4 (honister)
+******************************
+
+- :yocto_docs:`BitBake 1.52 User Manual </bitbake/1.52/>`
+
+******************************
+Release Series 3.3 (hardknott)
+******************************
+
+- :yocto_docs:`BitBake 1.50 User Manual </bitbake/1.50/>`
*******************************
Release Series 3.2 (gatesgarth)
*******************************
-- :yocto_docs:`3.2 BitBake User Manual </3.2/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.2.1 BitBake User Manual </3.2.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.2.2 BitBake User Manual </3.2.2/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.2.3 BitBake User Manual </3.2.3/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`3.2.4 BitBake User Manual </3.2.4/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`BitBake 1.48 User Manual </bitbake/1.48/>`
+
+*******************************************
+Release Series 3.1 (dunfell first versions)
+*******************************************
+
+- :yocto_docs:`3.1 BitBake User Manual </3.1/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`3.1.1 BitBake User Manual </3.1.1/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`3.1.2 BitBake User Manual </3.1.2/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`3.1.3 BitBake User Manual </3.1.3/bitbake-user-manual/bitbake-user-manual.html>`
*************************
Release Series 3.0 (zeus)
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 23c22b65ef..76bc08a3ea 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -3,6 +3,8 @@
#
# Copyright (C) 2006 Tim Ansell
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Please Note:
# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
# Assign a file to __warn__ to get warnings about slow operations.
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index b8333bdb81..15013540c2 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,12 +9,19 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "2.0.0"
+__version__ = "2.9.0"
import sys
-if sys.version_info < (3, 6, 0):
- raise RuntimeError("Sorry, python 3.6.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 8, 0):
+ raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 10, 0):
+ # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
+ # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
+ # https://bugs.ams1.psf.io/issue42888
+ # so ensure libgcc_s is loaded early on
+ import ctypes
+ libgcc_s = ctypes.CDLL('libgcc_s.so.1')
class BBHandledException(Exception):
"""
@@ -60,6 +67,10 @@ class BBLoggerMixin(object):
return
if loglevel < bb.msg.loggerDefaultLogLevel:
return
+
+ if not isinstance(level, int) or not isinstance(msg, str):
+ mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args))
+
return self.log(loglevel, msg, *args, **kwargs)
def plain(self, msg, *args, **kwargs):
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
new file mode 100755
index 0000000000..0f41b275cf
--- /dev/null
+++ b/bitbake/lib/bb/acl.py
@@ -0,0 +1,215 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+
+import sys
+import ctypes
+import os
+import errno
+import pwd
+import grp
+
+libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
+
+
+ACL_TYPE_ACCESS = 0x8000
+ACL_TYPE_DEFAULT = 0x4000
+
+ACL_FIRST_ENTRY = 0
+ACL_NEXT_ENTRY = 1
+
+ACL_UNDEFINED_TAG = 0x00
+ACL_USER_OBJ = 0x01
+ACL_USER = 0x02
+ACL_GROUP_OBJ = 0x04
+ACL_GROUP = 0x08
+ACL_MASK = 0x10
+ACL_OTHER = 0x20
+
+ACL_READ = 0x04
+ACL_WRITE = 0x02
+ACL_EXECUTE = 0x01
+
+acl_t = ctypes.c_void_p
+acl_entry_t = ctypes.c_void_p
+acl_permset_t = ctypes.c_void_p
+acl_perm_t = ctypes.c_uint
+
+acl_tag_t = ctypes.c_int
+
+libacl.acl_free.argtypes = [acl_t]
+
+
+def acl_free(acl):
+ libacl.acl_free(acl)
+
+
+libacl.acl_get_file.restype = acl_t
+libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
+
+
+def acl_get_file(path, typ):
+ acl = libacl.acl_get_file(os.fsencode(path), typ)
+ if acl is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ return acl
+
+
+libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
+
+
+def acl_get_entry(acl, entry_id):
+ entry = acl_entry_t()
+ ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ if ret == 0:
+ return None
+
+ return entry
+
+
+libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_tag_type(entry_d):
+ tag = acl_tag_t()
+ ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return tag.value
+
+
+libacl.acl_get_qualifier.restype = ctypes.c_void_p
+libacl.acl_get_qualifier.argtypes = [acl_entry_t]
+
+
+def acl_get_qualifier(entry_d):
+ ret = libacl.acl_get_qualifier(entry_d)
+ if ret is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return ctypes.c_void_p(ret)
+
+
+libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_permset(entry_d):
+ permset = acl_permset_t()
+ ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ return permset
+
+
+libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
+
+
+def acl_get_perm(permset_d, perm):
+ ret = libacl.acl_get_perm(permset_d, perm)
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return bool(ret)
+
+
+class Entry(object):
+ def __init__(self, tag, qualifier, mode):
+ self.tag = tag
+ self.qualifier = qualifier
+ self.mode = mode
+
+ def __str__(self):
+ typ = ""
+ qual = ""
+ if self.tag == ACL_USER:
+ typ = "user"
+ qual = pwd.getpwuid(self.qualifier).pw_name
+ elif self.tag == ACL_GROUP:
+ typ = "group"
+ qual = grp.getgrgid(self.qualifier).gr_name
+ elif self.tag == ACL_USER_OBJ:
+ typ = "user"
+ elif self.tag == ACL_GROUP_OBJ:
+ typ = "group"
+ elif self.tag == ACL_MASK:
+ typ = "mask"
+ elif self.tag == ACL_OTHER:
+ typ = "other"
+
+ r = "r" if self.mode & ACL_READ else "-"
+ w = "w" if self.mode & ACL_WRITE else "-"
+ x = "x" if self.mode & ACL_EXECUTE else "-"
+
+ return f"{typ}:{qual}:{r}{w}{x}"
+
+
+class ACL(object):
+ def __init__(self, acl):
+ self.acl = acl
+
+ def __del__(self):
+ acl_free(self.acl)
+
+ def entries(self):
+ entry_id = ACL_FIRST_ENTRY
+ while True:
+ entry = acl_get_entry(self.acl, entry_id)
+ if entry is None:
+ break
+
+ permset = acl_get_permset(entry)
+
+ mode = 0
+ for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
+ if acl_get_perm(permset, m):
+ mode |= m
+
+ qualifier = None
+ tag = acl_get_tag_type(entry)
+
+ if tag == ACL_USER or tag == ACL_GROUP:
+ qual = acl_get_qualifier(entry)
+ qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
+
+ yield Entry(tag, qualifier, mode)
+
+ entry_id = ACL_NEXT_ENTRY
+
+ @classmethod
+ def from_path(cls, path, typ):
+ acl = acl_get_file(path, typ)
+ return cls(acl)
+
+
+def main():
+ import argparse
+ import pwd
+ import grp
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
+ for entry in acl.entries():
+ print(str(entry))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
index c2f2b3c00b..639e1607f8 100644
--- a/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -1,31 +1,16 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
-import itertools
-import json
-
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-
-def chunkify(msg, max_chunk):
- if len(msg) < max_chunk - 1:
- yield ''.join((msg, "\n"))
- else:
- yield ''.join((json.dumps({
- 'chunk-stream': None
- }), "\n"))
-
- args = [iter(msg)] * (max_chunk - 1)
- for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
- yield ''.join(itertools.chain(m, "\n"))
- yield "\n"
-
-from .client import AsyncClient, Client
-from .serv import AsyncServer, AsyncServerConnection, ClientError, ServerError
+from .client import AsyncClient, Client, ClientPool
+from .serv import AsyncServer, AsyncServerConnection
+from .connection import DEFAULT_MAX_CHUNK
+from .exceptions import (
+ ClientError,
+ ServerError,
+ ConnectionClosedError,
+ InvokeError,
+)
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index 34960197d1..a350b4fb12 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -8,47 +10,126 @@ import json
import os
import socket
import sys
-from . import chunkify, DEFAULT_MAX_CHUNK
-
+import re
+import contextlib
+from threading import Thread
+from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
+from .exceptions import ConnectionClosedError, InvokeError
+
+UNIX_PREFIX = "unix://"
+WS_PREFIX = "ws://"
+WSS_PREFIX = "wss://"
+
+ADDR_TYPE_UNIX = 0
+ADDR_TYPE_TCP = 1
+ADDR_TYPE_WS = 2
+
+def parse_address(addr):
+ if addr.startswith(UNIX_PREFIX):
+ return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
+ elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
+ return (ADDR_TYPE_WS, (addr,))
+ else:
+ m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
+ if m is not None:
+ host = m.group("host")
+ port = m.group("port")
+ else:
+ host, port = addr.split(":")
+
+ return (ADDR_TYPE_TCP, (host, int(port)))
class AsyncClient(object):
- def __init__(self, proto_name, proto_version, logger, timeout=30):
- self.reader = None
- self.writer = None
+ def __init__(
+ self,
+ proto_name,
+ proto_version,
+ logger,
+ timeout=30,
+ server_headers=False,
+ headers={},
+ ):
+ self.socket = None
self.max_chunk = DEFAULT_MAX_CHUNK
self.proto_name = proto_name
self.proto_version = proto_version
self.logger = logger
self.timeout = timeout
+ self.needs_server_headers = server_headers
+ self.server_headers = {}
+ self.headers = headers
async def connect_tcp(self, address, port):
async def connect_sock():
- return await asyncio.open_connection(address, port)
+ reader, writer = await asyncio.open_connection(address, port)
+ return StreamConnection(reader, writer, self.timeout, self.max_chunk)
self._connect_sock = connect_sock
async def connect_unix(self, path):
async def connect_sock():
- return await asyncio.open_unix_connection(path)
+ # AF_UNIX has path length issues so chdir here to workaround
+ cwd = os.getcwd()
+ try:
+ os.chdir(os.path.dirname(path))
+ # The socket must be opened synchronously so that CWD doesn't get
+ # changed out from underneath us so we pass as a sock into asyncio
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
+ sock.connect(os.path.basename(path))
+ finally:
+ os.chdir(cwd)
+ reader, writer = await asyncio.open_unix_connection(sock=sock)
+ return StreamConnection(reader, writer, self.timeout, self.max_chunk)
+
+ self._connect_sock = connect_sock
+
+ async def connect_websocket(self, uri):
+ import websockets
+
+ async def connect_sock():
+ websocket = await websockets.connect(uri, ping_interval=None)
+ return WebsocketConnection(websocket, self.timeout)
self._connect_sock = connect_sock
async def setup_connection(self):
- s = '%s %s\n\n' % (self.proto_name, self.proto_version)
- self.writer.write(s.encode("utf-8"))
- await self.writer.drain()
+ # Send headers
+ await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
+ await self.socket.send(
+ "needs-headers: %s" % ("true" if self.needs_server_headers else "false")
+ )
+ for k, v in self.headers.items():
+ await self.socket.send("%s: %s" % (k, v))
+
+ # End of headers
+ await self.socket.send("")
+
+ self.server_headers = {}
+ if self.needs_server_headers:
+ while True:
+ line = await self.socket.recv()
+ if not line:
+ # End headers
+ break
+ tag, value = line.split(":", 1)
+ self.server_headers[tag.lower()] = value.strip()
+
+ async def get_header(self, tag, default):
+ await self.connect()
+ return self.server_headers.get(tag, default)
async def connect(self):
- if self.reader is None or self.writer is None:
- (self.reader, self.writer) = await self._connect_sock()
+ if self.socket is None:
+ self.socket = await self._connect_sock()
await self.setup_connection()
- async def close(self):
- self.reader = None
+ async def disconnect(self):
+ if self.socket is not None:
+ await self.socket.close()
+ self.socket = None
- if self.writer is not None:
- self.writer.close()
- self.writer = None
+ async def close(self):
+ await self.disconnect()
async def _send_wrapper(self, proc):
count = 0
@@ -59,6 +140,7 @@ class AsyncClient(object):
except (
OSError,
ConnectionError,
+ ConnectionClosedError,
json.JSONDecodeError,
UnicodeDecodeError,
) as e:
@@ -70,49 +152,27 @@ class AsyncClient(object):
await self.close()
count += 1
- async def send_message(self, msg):
- async def get_line():
- try:
- line = await asyncio.wait_for(self.reader.readline(), self.timeout)
- except asyncio.TimeoutError:
- raise ConnectionError("Timed out waiting for server")
-
- if not line:
- raise ConnectionError("Connection closed")
-
- line = line.decode("utf-8")
-
- if not line.endswith("\n"):
- raise ConnectionError("Bad message %r" % (line))
-
- return line
+ def check_invoke_error(self, msg):
+ if isinstance(msg, dict) and "invoke-error" in msg:
+ raise InvokeError(msg["invoke-error"]["message"])
+ async def invoke(self, msg):
async def proc():
- for c in chunkify(json.dumps(msg), self.max_chunk):
- self.writer.write(c.encode("utf-8"))
- await self.writer.drain()
+ await self.socket.send_message(msg)
+ return await self.socket.recv_message()
- l = await get_line()
+ result = await self._send_wrapper(proc)
+ self.check_invoke_error(result)
+ return result
- m = json.loads(l)
- if m and "chunk-stream" in m:
- lines = []
- while True:
- l = (await get_line()).rstrip("\n")
- if not l:
- break
- lines.append(l)
-
- m = json.loads("".join(lines))
-
- return m
+ async def ping(self):
+ return await self.invoke({"ping": {}})
- return await self._send_wrapper(proc)
+ async def __aenter__(self):
+ return self
- async def ping(self):
- return await self.send_message(
- {'ping': {}}
- )
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
class Client(object):
@@ -130,7 +190,7 @@ class Client(object):
# required (but harmless) with it.
asyncio.set_event_loop(self.loop)
- self._add_methods('connect_tcp', 'ping')
+ self._add_methods("connect_tcp", "ping")
@abc.abstractmethod
def _get_async_client(self):
@@ -148,14 +208,8 @@ class Client(object):
setattr(self, m, self._get_downcall_wrapper(downcall))
def connect_unix(self, path):
- # AF_UNIX has path length issues so chdir here to workaround
- cwd = os.getcwd()
- try:
- os.chdir(os.path.dirname(path))
- self.loop.run_until_complete(self.client.connect_unix(os.path.basename(path)))
- self.loop.run_until_complete(self.client.connect())
- finally:
- os.chdir(cwd)
+ self.loop.run_until_complete(self.client.connect_unix(path))
+ self.loop.run_until_complete(self.client.connect())
@property
def max_chunk(self):
@@ -165,8 +219,95 @@ class Client(object):
def max_chunk(self, value):
self.client.max_chunk = value
- def close(self):
+ def disconnect(self):
self.loop.run_until_complete(self.client.close())
- if sys.version_info >= (3, 6):
+
+ def close(self):
+ if self.loop:
+ self.loop.run_until_complete(self.client.close())
+ if sys.version_info >= (3, 6):
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+ self.loop = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False
+
+
+class ClientPool(object):
+ def __init__(self, max_clients):
+ self.avail_clients = []
+ self.num_clients = 0
+ self.max_clients = max_clients
+ self.loop = None
+ self.client_condition = None
+
+ @abc.abstractmethod
+ async def _new_client(self):
+ raise NotImplementedError("Must be implemented in derived class")
+
+ def close(self):
+ if self.client_condition:
+ self.client_condition = None
+
+ if self.loop:
+ self.loop.run_until_complete(self.__close_clients())
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
- self.loop.close()
+ self.loop.close()
+ self.loop = None
+
+ def run_tasks(self, tasks):
+ if not self.loop:
+ self.loop = asyncio.new_event_loop()
+
+ thread = Thread(target=self.__thread_main, args=(tasks,))
+ thread.start()
+ thread.join()
+
+ @contextlib.asynccontextmanager
+ async def get_client(self):
+ async with self.client_condition:
+ if self.avail_clients:
+ client = self.avail_clients.pop()
+ elif self.num_clients < self.max_clients:
+ self.num_clients += 1
+ client = await self._new_client()
+ else:
+ while not self.avail_clients:
+ await self.client_condition.wait()
+ client = self.avail_clients.pop()
+
+ try:
+ yield client
+ finally:
+ async with self.client_condition:
+ self.avail_clients.append(client)
+ self.client_condition.notify()
+
+ def __thread_main(self, tasks):
+ async def process_task(task):
+ async with self.get_client() as client:
+ await task(client)
+
+ asyncio.set_event_loop(self.loop)
+ if not self.client_condition:
+ self.client_condition = asyncio.Condition()
+ tasks = [process_task(t) for t in tasks]
+ self.loop.run_until_complete(asyncio.gather(*tasks))
+
+ async def __close_clients(self):
+ for c in self.avail_clients:
+ await c.close()
+ self.avail_clients = []
+ self.num_clients = 0
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False
diff --git a/bitbake/lib/bb/asyncrpc/connection.py b/bitbake/lib/bb/asyncrpc/connection.py
new file mode 100644
index 0000000000..7f0cf6ba96
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import asyncio
+import itertools
+import json
+from datetime import datetime
+from .exceptions import ClientError, ConnectionClosedError
+
+
+# The Python async server defaults to a 64K receive buffer, so we hardcode our
+# maximum chunk size. It would be better if the client and server reported to
+# each other what the maximum chunk sizes were, but that will slow down the
+# connection setup with a round trip delay so I'd rather not do that unless it
+# is necessary
+DEFAULT_MAX_CHUNK = 32 * 1024
+
+
+def chunkify(msg, max_chunk):
+ if len(msg) < max_chunk - 1:
+ yield "".join((msg, "\n"))
+ else:
+ yield "".join((json.dumps({"chunk-stream": None}), "\n"))
+
+ args = [iter(msg)] * (max_chunk - 1)
+ for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
+ yield "".join(itertools.chain(m, "\n"))
+ yield "\n"
+
+
+def json_serialize(obj):
+ if isinstance(obj, datetime):
+ return obj.isoformat()
+ raise TypeError("Type %s not serializeable" % type(obj))
+
+
+class StreamConnection(object):
+ def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
+ self.reader = reader
+ self.writer = writer
+ self.timeout = timeout
+ self.max_chunk = max_chunk
+
+ @property
+ def address(self):
+ return self.writer.get_extra_info("peername")
+
+ async def send_message(self, msg):
+ for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
+ self.writer.write(c.encode("utf-8"))
+ await self.writer.drain()
+
+ async def recv_message(self):
+ l = await self.recv()
+
+ m = json.loads(l)
+ if not m:
+ return m
+
+ if "chunk-stream" in m:
+ lines = []
+ while True:
+ l = await self.recv()
+ if not l:
+ break
+ lines.append(l)
+
+ m = json.loads("".join(lines))
+
+ return m
+
+ async def send(self, msg):
+ self.writer.write(("%s\n" % msg).encode("utf-8"))
+ await self.writer.drain()
+
+ async def recv(self):
+ if self.timeout < 0:
+ line = await self.reader.readline()
+ else:
+ try:
+ line = await asyncio.wait_for(self.reader.readline(), self.timeout)
+ except asyncio.TimeoutError:
+ raise ConnectionError("Timed out waiting for data")
+
+ if not line:
+ raise ConnectionClosedError("Connection closed")
+
+ line = line.decode("utf-8")
+
+ if not line.endswith("\n"):
+ raise ConnectionError("Bad message %r" % (line))
+
+ return line.rstrip()
+
+ async def close(self):
+ self.reader = None
+ if self.writer is not None:
+ self.writer.close()
+ self.writer = None
+
+
+class WebsocketConnection(object):
+ def __init__(self, socket, timeout):
+ self.socket = socket
+ self.timeout = timeout
+
+ @property
+ def address(self):
+ return ":".join(str(s) for s in self.socket.remote_address)
+
+ async def send_message(self, msg):
+ await self.send(json.dumps(msg, default=json_serialize))
+
+ async def recv_message(self):
+ m = await self.recv()
+ return json.loads(m)
+
+ async def send(self, msg):
+ import websockets.exceptions
+
+ try:
+ await self.socket.send(msg)
+ except websockets.exceptions.ConnectionClosed:
+ raise ConnectionClosedError("Connection closed")
+
+ async def recv(self):
+ import websockets.exceptions
+
+ try:
+ if self.timeout < 0:
+ return await self.socket.recv()
+
+ try:
+ return await asyncio.wait_for(self.socket.recv(), self.timeout)
+ except asyncio.TimeoutError:
+ raise ConnectionError("Timed out waiting for data")
+ except websockets.exceptions.ConnectionClosed:
+ raise ConnectionClosedError("Connection closed")
+
+ async def close(self):
+ if self.socket is not None:
+ await self.socket.close()
+ self.socket = None
diff --git a/bitbake/lib/bb/asyncrpc/exceptions.py b/bitbake/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 0000000000..ae1043a38b
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+
+class ClientError(Exception):
+ pass
+
+
+class InvokeError(Exception):
+ pass
+
+
+class ServerError(Exception):
+ pass
+
+
+class ConnectionClosedError(Exception):
+ pass
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
index b4cffff213..a66117acad 100644
--- a/bitbake/lib/bb/asyncrpc/serv.py
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -10,234 +12,333 @@ import signal
import socket
import sys
import multiprocessing
-from . import chunkify, DEFAULT_MAX_CHUNK
-
+import logging
+from .connection import StreamConnection, WebsocketConnection
+from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
-class ClientError(Exception):
- pass
-
-class ServerError(Exception):
- pass
+class ClientLoggerAdapter(logging.LoggerAdapter):
+ def process(self, msg, kwargs):
+ return f"[Client {self.extra['address']}] {msg}", kwargs
class AsyncServerConnection(object):
- def __init__(self, reader, writer, proto_name, logger):
- self.reader = reader
- self.writer = writer
+ # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
+ # return message will be automatically be sent back to the client
+ NO_RESPONSE = object()
+
+ def __init__(self, socket, proto_name, logger):
+ self.socket = socket
self.proto_name = proto_name
- self.max_chunk = DEFAULT_MAX_CHUNK
self.handlers = {
- 'chunk-stream': self.handle_chunk,
- 'ping': self.handle_ping,
+ "ping": self.handle_ping,
}
- self.logger = logger
+ self.logger = ClientLoggerAdapter(
+ logger,
+ {
+ "address": socket.address,
+ },
+ )
+ self.client_headers = {}
+
+ async def close(self):
+ await self.socket.close()
+
+ async def handle_headers(self, headers):
+ return {}
async def process_requests(self):
try:
- self.addr = self.writer.get_extra_info('peername')
- self.logger.debug('Client %r connected' % (self.addr,))
+ self.logger.info("Client %r connected" % (self.socket.address,))
# Read protocol and version
- client_protocol = await self.reader.readline()
- if client_protocol is None:
+ client_protocol = await self.socket.recv()
+ if not client_protocol:
return
- (client_proto_name, client_proto_version) = client_protocol.decode('utf-8').rstrip().split()
+ (client_proto_name, client_proto_version) = client_protocol.split()
if client_proto_name != self.proto_name:
- self.logger.debug('Rejecting invalid protocol %s' % (self.proto_name))
+ self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
return
- self.proto_version = tuple(int(v) for v in client_proto_version.split('.'))
+ self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
if not self.validate_proto_version():
- self.logger.debug('Rejecting invalid protocol version %s' % (client_proto_version))
+ self.logger.debug(
+ "Rejecting invalid protocol version %s" % (client_proto_version)
+ )
return
- # Read headers. Currently, no headers are implemented, so look for
- # an empty line to signal the end of the headers
+ # Read headers
+ self.client_headers = {}
while True:
- line = await self.reader.readline()
- if line is None:
- return
-
- line = line.decode('utf-8').rstrip()
- if not line:
+ header = await self.socket.recv()
+ if not header:
+ # Empty line. End of headers
break
+ tag, value = header.split(":", 1)
+ self.client_headers[tag.lower()] = value.strip()
+
+ if self.client_headers.get("needs-headers", "false") == "true":
+ for k, v in (await self.handle_headers(self.client_headers)).items():
+ await self.socket.send("%s: %s" % (k, v))
+ await self.socket.send("")
# Handle messages
while True:
- d = await self.read_message()
+ d = await self.socket.recv_message()
if d is None:
break
- await self.dispatch_message(d)
- await self.writer.drain()
- except ClientError as e:
+ try:
+ response = await self.dispatch_message(d)
+ except InvokeError as e:
+ await self.socket.send_message(
+ {"invoke-error": {"message": str(e)}}
+ )
+ break
+
+ if response is not self.NO_RESPONSE:
+ await self.socket.send_message(response)
+
+ except ConnectionClosedError as e:
+ self.logger.info(str(e))
+ except (ClientError, ConnectionError) as e:
self.logger.error(str(e))
finally:
- self.writer.close()
+ await self.close()
async def dispatch_message(self, msg):
for k in self.handlers.keys():
if k in msg:
- self.logger.debug('Handling %s' % k)
- await self.handlers[k](msg[k])
- return
+ self.logger.debug("Handling %s" % k)
+ return await self.handlers[k](msg[k])
raise ClientError("Unrecognized command %r" % msg)
- def write_message(self, msg):
- for c in chunkify(json.dumps(msg), self.max_chunk):
- self.writer.write(c.encode('utf-8'))
+ async def handle_ping(self, request):
+ return {"alive": True}
+
- async def read_message(self):
- l = await self.reader.readline()
- if not l:
- return None
+class StreamServer(object):
+ def __init__(self, handler, logger):
+ self.handler = handler
+ self.logger = logger
+ self.closed = False
- try:
- message = l.decode('utf-8')
+ async def handle_stream_client(self, reader, writer):
+ # writer.transport.set_write_buffer_limits(0)
+ socket = StreamConnection(reader, writer, -1)
+ if self.closed:
+ await socket.close()
+ return
+
+ await self.handler(socket)
+
+ async def stop(self):
+ self.closed = True
+
+
+class TCPStreamServer(StreamServer):
+ def __init__(self, host, port, handler, logger):
+ super().__init__(handler, logger)
+ self.host = host
+ self.port = port
+
+ def start(self, loop):
+ self.server = loop.run_until_complete(
+ asyncio.start_server(self.handle_stream_client, self.host, self.port)
+ )
+
+ for s in self.server.sockets:
+ self.logger.debug("Listening on %r" % (s.getsockname(),))
+ # Newer python does this automatically. Do it manually here for
+ # maximum compatibility
+ s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
+ s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
+
+ # Enable keep alives. This prevents broken client connections
+ # from persisting on the server for long periods of time.
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
+
+ name = self.server.sockets[0].getsockname()
+ if self.server.sockets[0].family == socket.AF_INET6:
+ self.address = "[%s]:%d" % (name[0], name[1])
+ else:
+ self.address = "%s:%d" % (name[0], name[1])
+
+ return [self.server.wait_closed()]
+
+ async def stop(self):
+ await super().stop()
+ self.server.close()
+
+ def cleanup(self):
+ pass
- if not message.endswith('\n'):
- return None
- return json.loads(message)
- except (json.JSONDecodeError, UnicodeDecodeError) as e:
- self.logger.error('Bad message from client: %r' % message)
- raise e
+class UnixStreamServer(StreamServer):
+ def __init__(self, path, handler, logger):
+ super().__init__(handler, logger)
+ self.path = path
- async def handle_chunk(self, request):
- lines = []
+ def start(self, loop):
+ cwd = os.getcwd()
try:
- while True:
- l = await self.reader.readline()
- l = l.rstrip(b"\n").decode("utf-8")
- if not l:
- break
- lines.append(l)
-
- msg = json.loads(''.join(lines))
- except (json.JSONDecodeError, UnicodeDecodeError) as e:
- self.logger.error('Bad message from client: %r' % lines)
- raise e
+ # Work around path length limits in AF_UNIX
+ os.chdir(os.path.dirname(self.path))
+ self.server = loop.run_until_complete(
+ asyncio.start_unix_server(
+ self.handle_stream_client, os.path.basename(self.path)
+ )
+ )
+ finally:
+ os.chdir(cwd)
- if 'chunk-stream' in msg:
- raise ClientError("Nested chunks are not allowed")
+ self.logger.debug("Listening on %r" % self.path)
+ self.address = "unix://%s" % os.path.abspath(self.path)
+ return [self.server.wait_closed()]
- await self.dispatch_message(msg)
+ async def stop(self):
+ await super().stop()
+ self.server.close()
- async def handle_ping(self, request):
- response = {'alive': True}
- self.write_message(response)
+ def cleanup(self):
+ os.unlink(self.path)
-class AsyncServer(object):
- def __init__(self, logger):
- self._cleanup_socket = None
+class WebsocketsServer(object):
+ def __init__(self, host, port, handler, logger):
+ self.host = host
+ self.port = port
+ self.handler = handler
self.logger = logger
- self.start = None
- self.address = None
- self.loop = None
- def start_tcp_server(self, host, port):
- def start_tcp():
- self.server = self.loop.run_until_complete(
- asyncio.start_server(self.handle_client, host, port)
+ def start(self, loop):
+ import websockets.server
+
+ self.server = loop.run_until_complete(
+ websockets.server.serve(
+ self.client_handler,
+ self.host,
+ self.port,
+ ping_interval=None,
)
+ )
- for s in self.server.sockets:
- self.logger.debug('Listening on %r' % (s.getsockname(),))
- # Newer python does this automatically. Do it manually here for
- # maximum compatibility
- s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
- s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
+ for s in self.server.sockets:
+ self.logger.debug("Listening on %r" % (s.getsockname(),))
- name = self.server.sockets[0].getsockname()
- if self.server.sockets[0].family == socket.AF_INET6:
- self.address = "[%s]:%d" % (name[0], name[1])
- else:
- self.address = "%s:%d" % (name[0], name[1])
+ # Enable keep alives. This prevents broken client connections
+ # from persisting on the server for long periods of time.
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
- self.start = start_tcp
+ name = self.server.sockets[0].getsockname()
+ if self.server.sockets[0].family == socket.AF_INET6:
+ self.address = "ws://[%s]:%d" % (name[0], name[1])
+ else:
+ self.address = "ws://%s:%d" % (name[0], name[1])
- def start_unix_server(self, path):
- def cleanup():
- os.unlink(path)
+ return [self.server.wait_closed()]
- def start_unix():
- cwd = os.getcwd()
- try:
- # Work around path length limits in AF_UNIX
- os.chdir(os.path.dirname(path))
- self.server = self.loop.run_until_complete(
- asyncio.start_unix_server(self.handle_client, os.path.basename(path))
- )
- finally:
- os.chdir(cwd)
+ async def stop(self):
+ self.server.close()
- self.logger.debug('Listening on %r' % path)
+ def cleanup(self):
+ pass
- self._cleanup_socket = cleanup
- self.address = "unix://%s" % os.path.abspath(path)
+ async def client_handler(self, websocket):
+ socket = WebsocketConnection(websocket, -1)
+ await self.handler(socket)
- self.start = start_unix
- @abc.abstractmethod
- def accept_client(self, reader, writer):
- pass
+class AsyncServer(object):
+ def __init__(self, logger):
+ self.logger = logger
+ self.loop = None
+ self.run_tasks = []
- async def handle_client(self, reader, writer):
- # writer.transport.set_write_buffer_limits(0)
+ def start_tcp_server(self, host, port):
+ self.server = TCPStreamServer(host, port, self._client_handler, self.logger)
+
+ def start_unix_server(self, path):
+ self.server = UnixStreamServer(path, self._client_handler, self.logger)
+
+ def start_websocket_server(self, host, port):
+ self.server = WebsocketsServer(host, port, self._client_handler, self.logger)
+
+ async def _client_handler(self, socket):
+ address = socket.address
try:
- client = self.accept_client(reader, writer)
+ client = self.accept_client(socket)
await client.process_requests()
except Exception as e:
import traceback
- self.logger.error('Error from client: %s' % str(e), exc_info=True)
+
+ self.logger.error(
+ "Error from client %s: %s" % (address, str(e)), exc_info=True
+ )
traceback.print_exc()
- writer.close()
- self.logger.debug('Client disconnected')
+ finally:
+ self.logger.debug("Client %s disconnected", address)
+ await socket.close()
- def run_loop_forever(self):
- try:
- self.loop.run_forever()
- except KeyboardInterrupt:
- pass
+ @abc.abstractmethod
+ def accept_client(self, socket):
+ pass
+
+ async def stop(self):
+ self.logger.debug("Stopping server")
+ await self.server.stop()
+
+ def start(self):
+ tasks = self.server.start(self.loop)
+ self.address = self.server.address
+ return tasks
def signal_handler(self):
self.logger.debug("Got exit signal")
- self.loop.stop()
+ self.loop.create_task(self.stop())
- def _serve_forever(self):
+ def _serve_forever(self, tasks):
try:
self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
+ self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
+ self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
- self.run_loop_forever()
- self.server.close()
+ self.loop.run_until_complete(asyncio.gather(*tasks))
- self.loop.run_until_complete(self.server.wait_closed())
- self.logger.debug('Server shutting down')
+ self.logger.debug("Server shutting down")
finally:
- if self._cleanup_socket is not None:
- self._cleanup_socket()
+ self.server.cleanup()
def serve_forever(self):
"""
Serve requests in the current process
"""
+ self._create_loop()
+ tasks = self.start()
+ self._serve_forever(tasks)
+ self.loop.close()
+
+ def _create_loop(self):
# Create loop and override any loop that may have existed in
# a parent process. It is possible that the usecases of
# serve_forever might be constrained enough to allow using
# get_event_loop here, but better safe than sorry for now.
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
- self.start()
- self._serve_forever()
- def serve_as_process(self, *, prefunc=None, args=()):
+ def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
"""
Serve requests in a child process
"""
+
def run(queue):
# Create loop and override any loop that may have existed
# in a parent process. Without doing this and instead
@@ -250,18 +351,22 @@ class AsyncServer(object):
# more general, though, as any potential use of asyncio in
# Cooker could create a loop that needs to replaced in this
# new process.
- self.loop = asyncio.new_event_loop()
- asyncio.set_event_loop(self.loop)
+ self._create_loop()
try:
- self.start()
+ self.address = None
+ tasks = self.start()
finally:
+ # Always put the server address to wake up the parent task
queue.put(self.address)
queue.close()
if prefunc is not None:
prefunc(self, *args)
- self._serve_forever()
+ if log_level is not None:
+ self.logger.setLevel(log_level)
+
+ self._serve_forever(tasks)
if sys.version_info >= (3, 6):
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 55f68b98c5..44d08f5c55 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
import time
import re
import stat
+import datetime
import bb
import bb.msg
import bb.process
import bb.progress
+from io import StringIO
from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
@property
def name(self):
- return sys.stdout.name
+ if "name" in dir(sys.stdout):
+ return sys.stdout.name
+ return "<mem>"
def exec_func(func, d, dirs = None):
@@ -295,9 +299,21 @@ def exec_func_python(func, d, runfile, cwd=None):
lineno = int(d.getVarFlag(func, "lineno", False))
bb.methodpool.insert_method(func, text, fn, lineno - 1)
+ if verboseStdoutLogging:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ currout = sys.stdout
+ currerr = sys.stderr
+ sys.stderr = sys.stdout = execio = StringIO()
comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
finally:
+ if verboseStdoutLogging:
+ execio.flush()
+ logger.plain("%s" % execio.getvalue())
+ sys.stdout = currout
+ sys.stderr = currerr
+ execio.close()
# We want any stdout/stderr to be printed before any other log messages to make debugging
# more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
sys.stdout.flush()
@@ -440,7 +456,11 @@ exit $ret
if fakerootcmd:
cmd = [fakerootcmd, runfile]
- if verboseStdoutLogging:
+ # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
+ # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
+ # ie. inside another function, in which case stdout is already being captured so we don't
+ # want to Tee here as output would be printed twice, and out of order.
+ if verboseStdoutLogging and sys.stdout == sys.__stdout__:
logfile = LogTee(logger, StdoutNoopContextManager())
else:
logfile = StdoutNoopContextManager()
@@ -571,7 +591,6 @@ def _task_data(fn, task, d):
localdata.setVar('BB_FILENAME', fn)
localdata.setVar('OVERRIDES', 'task-%s:%s' %
(task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
- localdata.finalize()
bb.data.expandKeys(localdata)
return localdata
@@ -618,7 +637,8 @@ def _exec_task(fn, task, d, quieterr):
logorder = os.path.join(tempdir, 'log.task_order')
try:
with open(logorder, 'a') as logorderfile:
- logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
+ timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
+ logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
except OSError:
logger.exception("Opening log file '%s'", logorder)
pass
@@ -771,44 +791,7 @@ def exec_task(fn, task, d, profile = False):
event.fire(failedevent, d)
return 1
-def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
- """
- Internal stamp helper function
- Makes sure the stamp directory exists
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stamp[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMP')
- file_name = d.getVar('BB_FILENAME')
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
-
- if baseonly:
- return stamp
- if noextra:
- extrainfo = ""
-
- if not stamp:
- return
-
- stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
-
- stampdir = os.path.dirname(stamp)
- if cached_mtime_noerror(stampdir) == 0:
- bb.utils.mkdirhier(stampdir)
-
- return stamp
-
-def stamp_cleanmask_internal(taskname, d, file_name):
+def _get_cleanmask(taskname, mcfn):
"""
Internal stamp helper function to generate stamp cleaning mask
Returns the stamp path+filename
@@ -816,27 +799,14 @@ def stamp_cleanmask_internal(taskname, d, file_name):
In the bitbake core, d can be a CacheData and file_name will be set.
When called in task context, d will be a data store, file_name will not be set
"""
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stampclean[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMPCLEAN')
- file_name = d.getVar('BB_FILENAME')
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
-
- if not stamp:
- return []
-
- cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
-
- return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
-
-def clean_stamp(task, d, file_name = None):
- cleanmask = stamp_cleanmask_internal(task, d, file_name)
+ cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
+ taskflagname = taskname.replace("_setscene", "")
+ if cleanmask:
+ return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
+ return []
+
+def clean_stamp_mcfn(task, mcfn):
+ cleanmask = _get_cleanmask(task, mcfn)
for mask in cleanmask:
for name in glob.glob(mask):
# Preserve sigdata files in the stamps directory
@@ -846,33 +816,46 @@ def clean_stamp(task, d, file_name = None):
if name.endswith('.taint'):
continue
os.unlink(name)
- return
-def make_stamp(task, d, file_name = None):
+def clean_stamp(task, d):
+ mcfn = d.getVar('BB_FILENAME')
+ clean_stamp_mcfn(task, mcfn)
+
+def make_stamp_mcfn(task, mcfn):
+
+ basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
+
+ stampdir = os.path.dirname(basestamp)
+ if cached_mtime_noerror(stampdir) == 0:
+ bb.utils.mkdirhier(stampdir)
+
+ clean_stamp_mcfn(task, mcfn)
+
+ # Remove the file and recreate to force timestamp
+ # change on broken NFS filesystems
+ if basestamp:
+ bb.utils.remove(basestamp)
+ open(basestamp, "w").close()
+
+def make_stamp(task, d):
"""
Creates/updates a stamp for a given task
- (d can be a data dict or dataCache)
"""
- clean_stamp(task, d, file_name)
+ mcfn = d.getVar('BB_FILENAME')
- stamp = stamp_internal(task, d, file_name)
- # Remove the file and recreate to force timestamp
- # change on broken NFS filesystems
- if stamp:
- bb.utils.remove(stamp)
- open(stamp, "w").close()
+ make_stamp_mcfn(task, mcfn)
# If we're in task context, write out a signature file for each task
# as it completes
- if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
- stampbase = stamp_internal(task, d, None, True)
- file_name = d.getVar('BB_FILENAME')
- bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
-
-def find_stale_stamps(task, d, file_name=None):
- current = stamp_internal(task, d, file_name)
- current2 = stamp_internal(task + "_setscene", d, file_name)
- cleanmask = stamp_cleanmask_internal(task, d, file_name)
+ if not task.endswith("_setscene"):
+ stampbase = bb.parse.siggen.stampfile_base(mcfn)
+ bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
+
+
+def find_stale_stamps(task, mcfn):
+ current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
+ current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
+ cleanmask = _get_cleanmask(task, mcfn)
found = []
for mask in cleanmask:
for name in glob.glob(mask):
@@ -886,38 +869,14 @@ def find_stale_stamps(task, d, file_name=None):
found.append(name)
return found
-def del_stamp(task, d, file_name = None):
- """
- Removes a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- bb.utils.remove(stamp)
-
-def write_taint(task, d, file_name = None):
+def write_taint(task, d):
"""
Creates a "taint" file which will force the specified task and its
dependents to be re-run the next time by influencing the value of its
taskhash.
- (d can be a data dict or dataCache)
- """
- import uuid
- if file_name:
- taintfn = d.stamp[file_name] + '.' + task + '.taint'
- else:
- taintfn = d.getVar('STAMP') + '.' + task + '.taint'
- bb.utils.mkdirhier(os.path.dirname(taintfn))
- # The specific content of the taint file is not really important,
- # we just need it to be random, so a random UUID is used
- with open(taintfn, 'w') as taintf:
- taintf.write(str(uuid.uuid4()))
-
-def stampfile(taskname, d, file_name = None, noextra=False):
- """
- Return the stamp for a given task
- (d can be a data dict or dataCache)
"""
- return stamp_internal(taskname, d, file_name, noextra=noextra)
+ mcfn = d.getVar('BB_FILENAME')
+ bb.parse.siggen.invalidate_task(task, mcfn)
def add_tasks(tasklist, d):
task_deps = d.getVar('_task_deps', False)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 92e9a3ced7..18d5574a31 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -24,10 +24,11 @@ from collections.abc import Mapping
import bb.utils
from bb import PrefixLoggerAdapter
import re
+import shutil
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "154"
+__cache_version__ = "155"
def getCacheFile(path, filename, mc, data_hash):
mcspec = ''
@@ -104,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.tasks = metadata.getVar('__BBTASKS', False)
- self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
+ self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
@@ -215,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
# Collect files we may need for possible world-dep
# calculations
- if not self.not_world:
+ if not bb.utils.to_boolean(self.not_world):
cachedata.possible_world.append(fn)
#else:
# logger.debug2("EXCLUDE FROM WORLD: %s", fn)
@@ -237,15 +238,113 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootlogs[fn] = self.fakerootlogs
cachedata.extradepsfunc[fn] = self.extradepsfunc
+
+class SiggenRecipeInfo(RecipeInfoCommon):
+ __slots__ = ()
+
+ classname = "SiggenRecipeInfo"
+ cachefile = "bb_cache_" + classname +".dat"
+ # we don't want to show this information in graph files so don't set cachefields
+ #cachefields = []
+
+ def __init__(self, filename, metadata):
+ self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
+ self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
+ self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
+
+ @classmethod
+ def init_cacheData(cls, cachedata):
+ cachedata.siggen_taskdeps = {}
+ cachedata.siggen_gendeps = {}
+ cachedata.siggen_varvals = {}
+
+ def add_cacheData(self, cachedata, fn):
+ cachedata.siggen_gendeps[fn] = self.siggen_gendeps
+ cachedata.siggen_varvals[fn] = self.siggen_varvals
+ cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
+
+ # The siggen variable data is large and impacts:
+ # - bitbake's overall memory usage
+ # - the amount of data sent over IPC between parsing processes and the server
+ # - the size of the cache files on disk
+ # - the size of "sigdata" hash information files on disk
+ # The data consists of strings (some large) or frozenset lists of variables
+ # As such, we a) deplicate the data here and b) pass references to the object at second
+ # access (e.g. over IPC or saving into pickle).
+
+ store = {}
+ save_map = {}
+ save_count = 1
+ restore_map = {}
+ restore_count = {}
+
+ @classmethod
+ def reset(cls):
+ # Needs to be called before starting new streamed data in a given process
+ # (e.g. writing out the cache again)
+ cls.save_map = {}
+ cls.save_count = 1
+ cls.restore_map = {}
+
+ @classmethod
+ def _save(cls, deps):
+ ret = []
+ if not deps:
+ return deps
+ for dep in deps:
+ fs = deps[dep]
+ if fs is None:
+ ret.append((dep, None, None))
+ elif fs in cls.save_map:
+ ret.append((dep, None, cls.save_map[fs]))
+ else:
+ cls.save_map[fs] = cls.save_count
+ ret.append((dep, fs, cls.save_count))
+ cls.save_count = cls.save_count + 1
+ return ret
+
+ @classmethod
+ def _restore(cls, deps, pid):
+ ret = {}
+ if not deps:
+ return deps
+ if pid not in cls.restore_map:
+ cls.restore_map[pid] = {}
+ map = cls.restore_map[pid]
+ for dep, fs, mapnum in deps:
+ if fs is None and mapnum is None:
+ ret[dep] = None
+ elif fs is None:
+ ret[dep] = map[mapnum]
+ else:
+ try:
+ fs = cls.store[fs]
+ except KeyError:
+ cls.store[fs] = fs
+ map[mapnum] = fs
+ ret[dep] = fs
+ return ret
+
+ def __getstate__(self):
+ ret = {}
+ for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
+ ret[key] = self._save(self.__dict__[key])
+ ret['pid'] = os.getpid()
+ return ret
+
+ def __setstate__(self, state):
+ pid = state['pid']
+ for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
+ setattr(self, key, self._restore(state[key], pid))
+
+
def virtualfn2realfn(virtualfn):
"""
Convert a virtual file name to a real one + the associated subclass keyword
"""
mc = ""
if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
- elems = virtualfn.split(':')
- mc = elems[1]
- virtualfn = ":".join(elems[2:])
+ (_, mc, virtualfn) = virtualfn.split(':', 2)
fn = virtualfn
cls = ""
@@ -268,7 +367,7 @@ def realfn2virtual(realfn, cls, mc):
def variant2virtual(realfn, variant):
"""
- Convert a real filename + the associated subclass keyword to a virtual filename
+ Convert a real filename + a variant to a virtual filename
"""
if variant == "":
return realfn
@@ -279,75 +378,18 @@ def variant2virtual(realfn, variant):
return "mc:" + elems[1] + ":" + realfn
return "virtual:" + variant + ":" + realfn
-def parse_recipe(bb_data, bbfile, appends, mc=''):
- """
- Parse a recipe
- """
-
- bb_data.setVar("__BBMULTICONFIG", mc)
-
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- bb.parse.cached_mtime_noerror(bbfile_loc)
-
- if appends:
- bb_data.setVar('__BBAPPEND', " ".join(appends))
- bb_data = bb.parse.handle(bbfile, bb_data)
- return bb_data
-
-
-class NoCache(object):
-
- def __init__(self, databuilder):
- self.databuilder = databuilder
- self.data = databuilder.data
-
- def loadDataFull(self, virtualfn, appends):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
- logger.debug("Parsing %s (full)" % virtualfn)
- (fn, virtual, mc) = virtualfn2realfn(virtualfn)
- bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
- return bb_data[virtual]
-
- def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
-
- if virtonly:
- (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
- bb_data = self.databuilder.mcdata[mc].createCopy()
- bb_data.setVar("__ONLYFINALISE", virtual or "default")
- datastores = parse_recipe(bb_data, bbfile, appends, mc)
- return datastores
-
- if mc is not None:
- bb_data = self.databuilder.mcdata[mc].createCopy()
- return parse_recipe(bb_data, bbfile, appends, mc)
-
- bb_data = self.data.createCopy()
- datastores = parse_recipe(bb_data, bbfile, appends)
-
- for mc in self.databuilder.mcdata:
- if not mc:
- continue
- bb_data = self.databuilder.mcdata[mc].createCopy()
- newstores = parse_recipe(bb_data, bbfile, appends, mc)
- for ns in newstores:
- datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
-
- return datastores
-
-class Cache(NoCache):
+#
+# Cooker calls cacheValid on its recipe list, then either calls loadCached
+# from it's main thread or parse from separate processes to generate an up to
+# date cache
+#
+class Cache(object):
"""
BitBake Cache implementation
"""
def __init__(self, databuilder, mc, data_hash, caches_array):
- super().__init__(databuilder)
- data = databuilder.data
+ self.databuilder = databuilder
+ self.data = databuilder.data
# Pass caches_array information into Cache Constructor
# It will be used later for deciding whether we
@@ -355,7 +397,7 @@ class Cache(NoCache):
self.mc = mc
self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
self.caches_array = caches_array
- self.cachedir = data.getVar("CACHE")
+ self.cachedir = self.data.getVar("CACHE")
self.clean = set()
self.checked = set()
self.depends_cache = {}
@@ -365,20 +407,12 @@ class Cache(NoCache):
self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
if self.cachedir in [None, '']:
- self.has_cache = False
- self.logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
- return
-
- self.has_cache = True
+ bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
def getCacheFile(self, cachefile):
return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
def prepare_cache(self, progress):
- if not self.has_cache:
- return 0
-
loaded = 0
self.cachefile = self.getCacheFile("bb_cache.dat")
@@ -417,9 +451,6 @@ class Cache(NoCache):
return loaded
def cachesize(self):
- if not self.has_cache:
- return 0
-
cachesize = 0
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
@@ -481,11 +512,11 @@ class Cache(NoCache):
return len(self.depends_cache)
- def parse(self, filename, appends):
+ def parse(self, filename, appends, layername):
"""Parse the specified filename, returning the recipe information"""
self.logger.debug("Parsing %s", filename)
infos = []
- datastores = self.load_bbfile(filename, appends, mc=self.mc)
+ datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
depends = []
variants = []
# Process the "real" fn last so we can store variants list
@@ -507,43 +538,19 @@ class Cache(NoCache):
return infos
- def load(self, filename, appends):
+ def loadCached(self, filename, appends):
"""Obtain the recipe information for the specified filename,
- using cached values if available, otherwise parsing.
-
- Note that if it does parse to obtain the info, it will not
- automatically add the information to the cache or to your
- CacheData. Use the add or add_info method to do so after
- running this, or use loadData instead."""
- cached = self.cacheValid(filename, appends)
- if cached:
- infos = []
- # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
- info_array = self.depends_cache[filename]
- for variant in info_array[0].variants:
- virtualfn = variant2virtual(filename, variant)
- infos.append((virtualfn, self.depends_cache[virtualfn]))
- else:
- return self.parse(filename, appends, configdata, self.caches_array)
-
- return cached, infos
-
- def loadData(self, fn, appends, cacheData):
- """Load the recipe info for the specified filename,
- parsing and adding to the cache if necessary, and adding
- the recipe information to the supplied CacheData instance."""
- skipped, virtuals = 0, 0
+ using cached values.
+ """
- cached, infos = self.load(fn, appends)
- for virtualfn, info_array in infos:
- if info_array[0].skipped:
- self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
- skipped += 1
- else:
- self.add_info(virtualfn, info_array, cacheData, not cached)
- virtuals += 1
+ infos = []
+ # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
+ info_array = self.depends_cache[filename]
+ for variant in info_array[0].variants:
+ virtualfn = variant2virtual(filename, variant)
+ infos.append((virtualfn, self.depends_cache[virtualfn]))
- return cached, skipped, virtuals
+ return infos
def cacheValid(self, fn, appends):
"""
@@ -552,10 +559,6 @@ class Cache(NoCache):
"""
if fn not in self.checked:
self.cacheValidUpdate(fn, appends)
-
- # Is cache enabled?
- if not self.has_cache:
- return False
if fn in self.clean:
return True
return False
@@ -565,10 +568,6 @@ class Cache(NoCache):
Is the cache valid for fn?
Make thorough (slower) checks including timestamps.
"""
- # Is cache enabled?
- if not self.has_cache:
- return False
-
self.checked.add(fn)
# File isn't in depends_cache
@@ -675,10 +674,6 @@ class Cache(NoCache):
Save the cache
Called from the parser when complete (or exiting)
"""
-
- if not self.has_cache:
- return
-
if self.cacheclean:
self.logger.debug2("Cache is clean, not saving.")
return
@@ -699,6 +694,7 @@ class Cache(NoCache):
p.dump(info)
del self.depends_cache
+ SiggenRecipeInfo.reset()
@staticmethod
def mtime(cachefile):
@@ -721,26 +717,11 @@ class Cache(NoCache):
if watcher:
watcher(info_array[0].file_depends)
- if not self.has_cache:
- return
-
if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
if parsed:
self.cacheclean = False
self.depends_cache[filename] = info_array
- def add(self, file_name, data, cacheData, parsed=None):
- """
- Save data we need into the cache
- """
-
- realfn = virtualfn2realfn(file_name)[0]
-
- info_array = []
- for cache_class in self.caches_array:
- info_array.append(cache_class(realfn, data))
- self.add_info(file_name, info_array, cacheData, parsed)
-
class MulticonfigCache(Mapping):
def __init__(self, databuilder, data_hash, caches_array):
def progress(p):
@@ -777,6 +758,7 @@ class MulticonfigCache(Mapping):
loaded = 0
for c in self.__caches.values():
+ SiggenRecipeInfo.reset()
loaded += c.prepare_cache(progress)
previous_progress = current_progress
@@ -854,11 +836,10 @@ class MultiProcessCache(object):
self.cachedata = self.create_cachedata()
self.cachedata_extras = self.create_cachedata()
- def init_cache(self, d, cache_file_name=None):
- cachedir = (d.getVar("PERSISTENT_DIR") or
- d.getVar("CACHE"))
- if cachedir in [None, '']:
+ def init_cache(self, cachedir, cache_file_name=None):
+ if not cachedir:
return
+
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
@@ -889,6 +870,10 @@ class MultiProcessCache(object):
if not self.cachefile:
return
+ have_data = any(self.cachedata_extras)
+ if not have_data:
+ return
+
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
i = os.getpid()
@@ -923,6 +908,8 @@ class MultiProcessCache(object):
data = self.cachedata
+ have_data = False
+
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
f = os.path.join(os.path.dirname(self.cachefile), f)
try:
@@ -937,12 +924,14 @@ class MultiProcessCache(object):
os.unlink(f)
continue
+ have_data = True
self.merge_data(extradata, data)
os.unlink(f)
- with open(self.cachefile, "wb") as f:
- p = pickle.Pickler(f, -1)
- p.dump([data, self.__class__.CACHE_VERSION])
+ if have_data:
+ with open(self.cachefile, "wb") as f:
+ p = pickle.Pickler(f, -1)
+ p.dump([data, self.__class__.CACHE_VERSION])
bb.utils.unlockfile(glf)
@@ -998,3 +987,11 @@ class SimpleCache(object):
p.dump([data, self.cacheversion])
bb.utils.unlockfile(glf)
+
+ def copyfile(self, target):
+ if not self.cachefile:
+ return
+
+ glf = bb.utils.lockfile(self.cachefile + ".lock")
+ shutil.copy(self.cachefile, target)
+ bb.utils.unlockfile(glf)
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 3b3c3b41ff..2e8b7ced3c 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -25,6 +27,7 @@ import ast
import sys
import codegen
import logging
+import inspect
import bb.pysh as pysh
import bb.utils, bb.data
import hashlib
@@ -56,10 +59,40 @@ def check_indent(codestr):
return codestr
-# A custom getstate/setstate using tuples is actually worth 15% cachesize by
-# avoiding duplication of the attribute names!
+modulecode_deps = {}
+def add_module_functions(fn, functions, namespace):
+ import os
+ fstat = os.stat(fn)
+ fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime)
+ for f in functions:
+ name = "%s.%s" % (namespace, f)
+ parser = PythonParser(name, logger)
+ try:
+ parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f)
+ #bb.warn("Cached %s" % f)
+ except KeyError:
+ lines, lineno = inspect.getsourcelines(functions[f])
+ src = "".join(lines)
+ parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f)
+ #bb.warn("Not cached %s" % f)
+ execs = parser.execs.copy()
+ # Expand internal module exec references
+ for e in parser.execs:
+ if e in functions:
+ execs.remove(e)
+ execs.add(namespace + "." + e)
+ modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()]
+ #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
+
+def update_module_dependencies(d):
+ for mod in modulecode_deps:
+ excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
+ if excludes:
+ modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]]
+# A custom getstate/setstate using tuples is actually worth 15% cachesize by
+# avoiding duplication of the attribute names!
class SetCache(object):
def __init__(self):
self.setcache = {}
@@ -152,12 +185,12 @@ class CodeParserCache(MultiProcessCache):
self.shellcachelines[h] = cacheline
return cacheline
- def init_cache(self, d):
+ def init_cache(self, cachedir):
# Check if we already have the caches
if self.pythoncache:
return
- MultiProcessCache.init_cache(self, d)
+ MultiProcessCache.init_cache(self, cachedir)
# cachedata gets re-assigned in the parent
self.pythoncache = self.cachedata[0]
@@ -169,8 +202,8 @@ class CodeParserCache(MultiProcessCache):
codeparsercache = CodeParserCache()
-def parser_cache_init(d):
- codeparsercache.init_cache(d)
+def parser_cache_init(cachedir):
+ codeparsercache.init_cache(cachedir)
def parser_cache_save():
codeparsercache.save_extras()
@@ -223,19 +256,19 @@ class PythonParser():
def visit_Call(self, node):
name = self.called_node_name(node.func)
if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
- if isinstance(node.args[0], ast.Str):
- varname = node.args[0].s
- if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
+ if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
+ varname = node.args[0].value
+ if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
if varname not in self.contains:
self.contains[varname] = set()
- self.contains[varname].add(node.args[1].s)
- elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str):
+ self.contains[varname].add(node.args[1].value)
+ elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant):
if varname not in self.contains:
self.contains[varname] = set()
- self.contains[varname].update(node.args[1].s.split())
+ self.contains[varname].update(node.args[1].value.split())
elif name.endswith(self.getvarflags):
- if isinstance(node.args[1], ast.Str):
- self.references.add('%s[%s]' % (varname, node.args[1].s))
+ if isinstance(node.args[1], ast.Constant):
+ self.references.add('%s[%s]' % (varname, node.args[1].value))
else:
self.warn(node.func, node.args[1])
else:
@@ -243,8 +276,8 @@ class PythonParser():
else:
self.warn(node.func, node.args[0])
elif name and name.endswith(".expand"):
- if isinstance(node.args[0], ast.Str):
- value = node.args[0].s
+ if isinstance(node.args[0], ast.Constant):
+ value = node.args[0].value
d = bb.data.init()
parser = d.expandWithRefs(value, self.name)
self.references |= parser.references
@@ -254,8 +287,8 @@ class PythonParser():
self.contains[varname] = set()
self.contains[varname] |= parser.contains[varname]
elif name in self.execfuncs:
- if isinstance(node.args[0], ast.Str):
- self.var_execs.add(node.args[0].s)
+ if isinstance(node.args[0], ast.Constant):
+ self.var_execs.add(node.args[0].value)
else:
self.warn(node.func, node.args[0])
elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
@@ -287,11 +320,17 @@ class PythonParser():
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
- def parse_python(self, node, lineno=0, filename="<string>"):
- if not node or not node.strip():
+ # For the python module code it is expensive to have the function text so it is
+ # uses a different fixedhash to cache against. We can take the hit on obtaining the
+ # text if it isn't in the cache.
+ def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None):
+ if not fixedhash and (not node or not node.strip()):
return
- h = bbhash(str(node))
+ if fixedhash:
+ h = fixedhash
+ else:
+ h = bbhash(str(node))
if h in codeparsercache.pythoncache:
self.references = set(codeparsercache.pythoncache[h].refs)
@@ -309,6 +348,9 @@ class PythonParser():
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
return
+ if fixedhash and not node:
+ raise KeyError
+
# Need to parse so take the hit on the real log buffer
self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log)
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index ec86885220..1fcb9bf14c 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -51,20 +51,21 @@ class Command:
"""
A queue of asynchronous commands for bitbake
"""
- def __init__(self, cooker):
+ def __init__(self, cooker, process_server):
self.cooker = cooker
self.cmds_sync = CommandsSync()
self.cmds_async = CommandsAsync()
self.remotedatastores = None
- # FIXME Add lock for this
+ self.process_server = process_server
+ # Access with locking using process_server.{get/set/clear}_async_cmd()
self.currentAsyncCommand = None
- def runCommand(self, commandline, ro_only = False):
+ def runCommand(self, commandline, process_server, ro_only=False):
command = commandline.pop(0)
# Ensure cooker is ready for commands
- if command != "updateConfig" and command != "setFeatures":
+ if command not in ["updateConfig", "setFeatures", "ping"]:
try:
self.cooker.init_configdata()
if not self.remotedatastores:
@@ -84,7 +85,6 @@ class Command:
if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
return None, "Not able to execute not readonly commands in readonly mode"
try:
- self.cooker.process_inotify_updates()
if getattr(command_method, 'needconfig', True):
self.cooker.updateCacheSync()
result = command_method(self, commandline)
@@ -99,24 +99,23 @@ class Command:
return None, traceback.format_exc()
else:
return result, None
- if self.currentAsyncCommand is not None:
- return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
if command not in CommandsAsync.__dict__:
return None, "No such command"
- self.currentAsyncCommand = (command, commandline)
- self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker)
+ if not process_server.set_async_cmd((command, commandline)):
+ return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0]
+ self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server)
return True, None
- def runAsyncCommand(self):
+ def runAsyncCommand(self, _, process_server, halt):
try:
- self.cooker.process_inotify_updates()
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
# updateCache will trigger a shutdown of the parser
# and then raise BBHandledException triggering an exit
self.cooker.updateCache()
- return False
- if self.currentAsyncCommand is not None:
- (command, options) = self.currentAsyncCommand
+ return bb.server.process.idleFinish("Cooker in error state")
+ cmd = process_server.get_async_cmd()
+ if cmd is not None:
+ (command, options) = cmd
commandmethod = getattr(CommandsAsync, command)
needcache = getattr( commandmethod, "needcache" )
if needcache and self.cooker.state != bb.cooker.state.running:
@@ -126,24 +125,21 @@ class Command:
commandmethod(self.cmds_async, self, options)
return False
else:
- return False
+ return bb.server.process.idleFinish("Nothing to do, no async command?")
except KeyboardInterrupt as exc:
- self.finishAsyncCommand("Interrupted")
- return False
+ return bb.server.process.idleFinish("Interrupted")
except SystemExit as exc:
arg = exc.args[0]
if isinstance(arg, str):
- self.finishAsyncCommand(arg)
+ return bb.server.process.idleFinish(arg)
else:
- self.finishAsyncCommand("Exited with %s" % arg)
- return False
+ return bb.server.process.idleFinish("Exited with %s" % arg)
except Exception as exc:
import traceback
if isinstance(exc, bb.BBHandledException):
- self.finishAsyncCommand("")
+ return bb.server.process.idleFinish("")
else:
- self.finishAsyncCommand(traceback.format_exc())
- return False
+ return bb.server.process.idleFinish(traceback.format_exc())
def finishAsyncCommand(self, msg=None, code=None):
if msg or msg == "":
@@ -152,8 +148,8 @@ class Command:
bb.event.fire(CommandExit(code), self.cooker.data)
else:
bb.event.fire(CommandCompleted(), self.cooker.data)
- self.currentAsyncCommand = None
self.cooker.finishcommand()
+ self.process_server.clear_async_cmd()
def reset(self):
if self.remotedatastores:
@@ -166,6 +162,14 @@ class CommandsSync:
These must not influence any running synchronous command.
"""
+ def ping(self, command, params):
+ """
+ Allow a UI to check the server is still alive
+ """
+ return "Still alive!"
+ ping.needconfig = False
+ ping.readonly = True
+
def stateShutdown(self, command, params):
"""
Trigger cooker 'shutdown' mode
@@ -303,6 +307,11 @@ class CommandsSync:
return ret
getLayerPriorities.readonly = True
+ def revalidateCaches(self, command, params):
+ """Called by UI clients when metadata may have changed"""
+ command.cooker.revalidateCaches()
+ parseConfiguration.needconfig = False
+
def getRecipes(self, command, params):
try:
mc = params[0]
@@ -541,8 +550,8 @@ class CommandsSync:
and return a datastore object representing the environment
for the recipe.
"""
- fn = params[0]
- mc = bb.runqueue.mc_from_tid(fn)
+ virtualfn = params[0]
+ (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
appends = params[1]
appendlist = params[2]
if len(params) > 3:
@@ -557,6 +566,7 @@ class CommandsSync:
appendfiles = command.cooker.collections[mc].get_file_appends(fn)
else:
appendfiles = []
+ layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2]
# We are calling bb.cache locally here rather than on the server,
# but that's OK because it doesn't actually need anything from
# the server barring the global datastore (which we have a remote
@@ -564,11 +574,10 @@ class CommandsSync:
if config_data:
# We have to use a different function here if we're passing in a datastore
# NOTE: we took a copy above, so we don't do it here again
- envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)['']
+ envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
else:
# Use the standard path
- parser = bb.cache.NoCache(command.cooker.databuilder)
- envdata = parser.loadDataFull(fn, appendfiles)
+ envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
idx = command.remotedatastores.store(envdata)
return DataStoreConnectionHandle(idx)
parseRecipeFile.readonly = True
@@ -741,7 +750,7 @@ class CommandsAsync:
"""
event = params[0]
bb.event.fire(eval(event), command.cooker.data)
- command.currentAsyncCommand = None
+ process_server.clear_async_cmd()
triggerEvent.needcache = False
def resetCooker(self, command, params):
@@ -768,7 +777,14 @@ class CommandsAsync:
(mc, pn) = bb.runqueue.split_mc(params[0])
taskname = params[1]
sigs = params[2]
+ bb.siggen.check_siggen_version(bb.siggen)
res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
command.finishAsyncCommand()
findSigInfo.needcache = False
+
+ def getTaskSignatures(self, command, params):
+ res = command.cooker.getTaskSignatures(params[0], params[1])
+ bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data)
+ command.finishAsyncCommand()
+ getTaskSignatures.needcache = True
diff --git a/bitbake/lib/bb/compress/_pipecompress.py b/bitbake/lib/bb/compress/_pipecompress.py
index 5de17a82e2..4a403d62cf 100644
--- a/bitbake/lib/bb/compress/_pipecompress.py
+++ b/bitbake/lib/bb/compress/_pipecompress.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Helper library to implement streaming compression and decompression using an
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
index 0f6bc51a5b..88b0989322 100644
--- a/bitbake/lib/bb/compress/lz4.py
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/compress/zstd.py b/bitbake/lib/bb/compress/zstd.py
index 50c42133fb..cdbbe9d60f 100644
--- a/bitbake/lib/bb/compress/zstd.py
+++ b/bitbake/lib/bb/compress/zstd.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 6da9291f9c..c5bfef55d6 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
import itertools
import logging
import multiprocessing
-import sre_constants
import threading
from io import StringIO, UnsupportedOperation
from contextlib import closing
@@ -23,7 +22,6 @@ from bb import utils, data, parse, event, cache, providers, taskdata, runqueue,
import queue
import signal
import prserv.serv
-import pyinotify
import json
import pickle
import codecs
@@ -81,7 +79,7 @@ class SkippedPackage:
class CookerFeatures(object):
- _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
+ _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
def __init__(self):
self._features=set()
@@ -104,12 +102,15 @@ class CookerFeatures(object):
class EventWriter:
def __init__(self, cooker, eventfile):
- self.file_inited = None
self.cooker = cooker
self.eventfile = eventfile
self.event_queue = []
- def write_event(self, event):
+ def write_variables(self):
+ with open(self.eventfile, "a") as f:
+ f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+
+ def send(self, event):
with open(self.eventfile, "a") as f:
try:
str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +120,6 @@ class EventWriter:
import traceback
print(err, traceback.format_exc())
- def send(self, event):
- if self.file_inited:
- # we have the file, just write the event
- self.write_event(event)
- else:
- # init on bb.event.BuildStarted
- name = "%s.%s" % (event.__module__, event.__class__.__name__)
- if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
- with open(self.eventfile, "w") as f:
- f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
-
- self.file_inited = True
-
- # write pending events
- for evt in self.event_queue:
- self.write_event(evt)
-
- # also write the current event
- self.write_event(event)
- else:
- # queue all events until the file is inited
- self.event_queue.append(event)
#============================================================================#
# BBCooker
@@ -150,8 +129,10 @@ class BBCooker:
Manages one bitbake build run
"""
- def __init__(self, featureSet=None, idleCallBackRegister=None):
+ def __init__(self, featureSet=None, server=None):
self.recipecaches = None
+ self.baseconfig_valid = False
+ self.parsecache_valid = False
self.eventlog = None
self.skiplist = {}
self.featureset = CookerFeatures()
@@ -164,20 +145,17 @@ class BBCooker:
self.configuration = bb.cookerdata.CookerConfiguration()
- self.idleCallBackRegister = idleCallBackRegister
+ self.process_server = server
+ self.idleCallBackRegister = None
+ self.waitIdle = None
+ if server:
+ self.idleCallBackRegister = server.register_idle_function
+ self.waitIdle = server.wait_for_idle
bb.debug(1, "BBCooker starting %s" % time.time())
- sys.stdout.flush()
-
- self.configwatcher = None
- self.confignotifier = None
-
- self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
- pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
- pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
- self.watcher = None
- self.notifier = None
+ self.configwatched = {}
+ self.parsewatched = {}
# If being called by something like tinfoil, we need to clean cached data
# which may now be invalid
@@ -188,14 +166,6 @@ class BBCooker:
self.hashserv = None
self.hashservaddr = None
- self.inotify_modified_files = []
-
- def _process_inotify_updates(server, cooker, halt):
- cooker.process_inotify_updates()
- return 1.0
-
- self.idleCallBackRegister(_process_inotify_updates, self)
-
# TOSTOP must not be set or our children will hang when they output
try:
fd = sys.stdout.fileno()
@@ -209,7 +179,7 @@ class BBCooker:
except UnsupportedOperation:
pass
- self.command = bb.command.Command(self)
+ self.command = bb.command.Command(self, self.process_server)
self.state = state.initial
self.parser = None
@@ -219,116 +189,37 @@ class BBCooker:
signal.signal(signal.SIGHUP, self.sigterm_exception)
bb.debug(1, "BBCooker startup complete %s" % time.time())
- sys.stdout.flush()
def init_configdata(self):
if not hasattr(self, "data"):
self.initConfigurationData()
bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
- sys.stdout.flush()
self.handlePRServ()
- def setupConfigWatcher(self):
- if self.configwatcher:
- self.configwatcher.close()
- self.confignotifier = None
- self.configwatcher = None
- self.configwatcher = pyinotify.WatchManager()
- self.configwatcher.bbseen = set()
- self.configwatcher.bbwatchedfiles = set()
- self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
-
- def setupParserWatcher(self):
- if self.watcher:
- self.watcher.close()
- self.notifier = None
- self.watcher = None
- self.watcher = pyinotify.WatchManager()
- self.watcher.bbseen = set()
- self.watcher.bbwatchedfiles = set()
- self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
-
- def process_inotify_updates(self):
- for n in [self.confignotifier, self.notifier]:
- if n and n.check_events(timeout=0):
- # read notified events and enqeue them
- n.read_events()
- n.process_events()
-
- def config_notifications(self, event):
- if event.maskname == "IN_Q_OVERFLOW":
- bb.warn("inotify event queue overflowed, invalidating caches.")
- self.parsecache_valid = False
- self.baseconfig_valid = False
- bb.parse.clear_cache()
- return
- if not event.pathname in self.configwatcher.bbwatchedfiles:
- return
- if "IN_ISDIR" in event.maskname:
- if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
- if event.pathname in self.configwatcher.bbseen:
- self.configwatcher.bbseen.remove(event.pathname)
- # Could remove all entries starting with the directory but for now...
- bb.parse.clear_cache()
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.baseconfig_valid = False
-
- def notifications(self, event):
- if event.maskname == "IN_Q_OVERFLOW":
- bb.warn("inotify event queue overflowed, invalidating caches.")
- self.parsecache_valid = False
- bb.parse.clear_cache()
- return
- if event.pathname.endswith("bitbake-cookerdaemon.log") \
- or event.pathname.endswith("bitbake.lock"):
- return
- if "IN_ISDIR" in event.maskname:
- if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
- if event.pathname in self.watcher.bbseen:
- self.watcher.bbseen.remove(event.pathname)
- # Could remove all entries starting with the directory but for now...
- bb.parse.clear_cache()
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.parsecache_valid = False
+ def _baseconfig_set(self, value):
+ if value and not self.baseconfig_valid:
+ bb.server.process.serverlog("Base config valid")
+ elif not value and self.baseconfig_valid:
+ bb.server.process.serverlog("Base config invalidated")
+ self.baseconfig_valid = value
+
+ def _parsecache_set(self, value):
+ if value and not self.parsecache_valid:
+ bb.server.process.serverlog("Parse cache valid")
+ elif not value and self.parsecache_valid:
+ bb.server.process.serverlog("Parse cache invalidated")
+ self.parsecache_valid = value
+
+ def add_filewatch(self, deps, configwatcher=False):
+ if configwatcher:
+ watcher = self.configwatched
+ else:
+ watcher = self.parsewatched
- def add_filewatch(self, deps, watcher=None, dirs=False):
- if not watcher:
- watcher = self.watcher
for i in deps:
- watcher.bbwatchedfiles.add(i[0])
- if dirs:
- f = i[0]
- else:
- f = os.path.dirname(i[0])
- if f in watcher.bbseen:
- continue
- watcher.bbseen.add(f)
- watchtarget = None
- while True:
- # We try and add watches for files that don't exist but if they did, would influence
- # the parser. The parent directory of these files may not exist, in which case we need
- # to watch any parent that does exist for changes.
- try:
- watcher.add_watch(f, self.watchmask, quiet=False)
- if watchtarget:
- watcher.bbwatchedfiles.add(watchtarget)
- break
- except pyinotify.WatchManagerError as e:
- if 'ENOENT' in str(e):
- watchtarget = f
- f = os.path.dirname(f)
- if f in watcher.bbseen:
- break
- watcher.bbseen.add(f)
- continue
- if 'ENOSPC' in str(e):
- providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
- providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
- providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
- providerlog.error("Root privilege is required to modify max_user_watches.")
- raise
+ f = i[0]
+ mtime = i[1]
+ watcher[f] = mtime
def sigterm_exception(self, signum, stackframe):
if signum == signal.SIGTERM:
@@ -336,6 +227,7 @@ class BBCooker:
elif signum == signal.SIGHUP:
bb.warn("Cooker received SIGHUP, shutting down...")
self.state = state.forceshutdown
+ bb.event._should_exit.set()
def setFeatures(self, features):
# we only accept a new feature set if we're in state initial, so we can reset without problems
@@ -358,7 +250,7 @@ class BBCooker:
if mod not in self.orig_sysmodules:
del sys.modules[mod]
- self.setupConfigWatcher()
+ self.configwatched = {}
# Need to preserve BB_CONSOLELOG over resets
consolelog = None
@@ -368,12 +260,12 @@ class BBCooker:
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
self.enableDataTracking()
- all_extra_cache_names = []
+ caches_name_array = ['bb.cache:CoreRecipeInfo']
# We hardcode all known cache types in a single place, here.
if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
- all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
-
- caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
+ caches_name_array.append("bb.cache_extra:HobRecipeInfo")
+ if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
+ caches_name_array.append("bb.cache:SiggenRecipeInfo")
# At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
# This is the entry point, no further check needed!
@@ -392,6 +284,10 @@ class BBCooker:
self.data_hash = self.databuilder.data_hash
self.extraconfigdata = {}
+ eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
+ if not self.configuration.writeeventlog and eventlog:
+ self.setupEventLog(eventlog)
+
if consolelog:
self.data.setVar("BB_CONSOLELOG", consolelog)
@@ -401,11 +297,10 @@ class BBCooker:
self.disableDataTracking()
for mc in self.databuilder.mcdata.values():
- mc.renameVar("__depends", "__base_depends")
- self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
+ self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
- self.baseconfig_valid = True
- self.parsecache_valid = False
+ self._baseconfig_set(True)
+ self._parsecache_set(False)
def handlePRServ(self):
# Setup a PR Server based on the new configuration
@@ -425,7 +320,7 @@ class BBCooker:
sock = socket.create_connection(upstream.split(":"), 5)
sock.close()
except socket.error as e:
- bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
+ bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
% (upstream, repr(e)))
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
@@ -435,11 +330,9 @@ class BBCooker:
sync=False,
upstream=upstream,
)
- self.hashserv.serve_as_process()
- self.data.setVar("BB_HASHSERVE", self.hashservaddr)
- self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
- self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
+ self.hashserv.serve_as_process(log_level=logging.WARNING)
for mc in self.databuilder.mcdata:
+ self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
bb.parse.init_parser(self.data)
@@ -454,6 +347,29 @@ class BBCooker:
if hasattr(self, "data"):
self.data.disableTracking()
+ def revalidateCaches(self):
+ bb.parse.clear_cache()
+
+ clean = True
+ for f in self.configwatched:
+ if not bb.parse.check_mtime(f, self.configwatched[f]):
+ bb.server.process.serverlog("Found %s changed, invalid cache" % f)
+ self._baseconfig_set(False)
+ self._parsecache_set(False)
+ clean = False
+ break
+
+ if clean:
+ for f in self.parsewatched:
+ if not bb.parse.check_mtime(f, self.parsewatched[f]):
+ bb.server.process.serverlog("Found %s changed, invalid cache" % f)
+ self._parsecache_set(False)
+ clean = False
+ break
+
+ if not clean:
+ bb.parse.BBHandler.cached_statements = {}
+
def parseConfiguration(self):
self.updateCacheSync()
@@ -472,8 +388,24 @@ class BBCooker:
self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
-
- self.parsecache_valid = False
+ self.collections = {}
+ for mc in self.multiconfigs:
+ self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
+
+ self._parsecache_set(False)
+
+ def setupEventLog(self, eventlog):
+ if self.eventlog and self.eventlog[0] != eventlog:
+ bb.event.unregister_UIHhandler(self.eventlog[1])
+ self.eventlog = None
+ if not self.eventlog or self.eventlog[0] != eventlog:
+ # we log all events to a file if so directed
+ # register the log file writer as UI Handler
+ if not os.path.exists(os.path.dirname(eventlog)):
+ bb.utils.mkdirhier(os.path.dirname(eventlog))
+ writer = EventWriter(self, eventlog)
+ EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
+ self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
def updateConfigOpts(self, options, environment, cmdline):
self.ui_cmdline = cmdline
@@ -494,14 +426,7 @@ class BBCooker:
setattr(self.configuration, o, options[o])
if self.configuration.writeeventlog:
- if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
- bb.event.unregister_UIHhandler(self.eventlog[1])
- if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
- # we log all events to a file if so directed
- # register the log file writer as UI Handler
- writer = EventWriter(self, self.configuration.writeeventlog)
- EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
- self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
+ self.setupEventLog(self.configuration.writeeventlog)
bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -531,19 +456,11 @@ class BBCooker:
# Now update all the variables not in the datastore to match
self.configuration.env = environment
+ self.revalidateCaches()
if not clean:
logger.debug("Base environment change, triggering reparse")
self.reset()
- def runCommands(self, server, data, halt):
- """
- Run any queued asynchronous command
- This is done by the idle handler so it runs in true context rather than
- tied to any UI.
- """
-
- return self.command.runAsyncCommand()
-
def showVersions(self):
(latest_versions, preferred_versions, required) = self.findProviders()
@@ -617,14 +534,14 @@ class BBCooker:
if fn:
try:
- bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
- envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
+ layername = self.collections[mc].calc_bbfile_priority(fn)[2]
+ envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
except Exception as e:
parselog.exception("Unable to read %s", fn)
raise
else:
if not mc in self.databuilder.mcdata:
- bb.fatal('Not multiconfig named "%s" found' % mc)
+ bb.fatal('No multiconfig named "%s" found' % mc)
envdata = self.databuilder.mcdata[mc]
data.expandKeys(envdata)
parse.ast.runAnonFuncs(envdata)
@@ -1277,15 +1194,15 @@ class BBCooker:
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
if not res:
- parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
+ parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
continue
else:
- parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
+ parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
continue
- parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
+ parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
collection_depends[c].append(rec)
else:
- parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
+ parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
# Recursively work out collection priorities based on dependencies
def calc_layer_priority(collection):
@@ -1297,7 +1214,7 @@ class BBCooker:
if depprio > max_depprio:
max_depprio = depprio
max_depprio += 1
- parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
+ parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
collection_priorities[collection] = max_depprio
# Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1309,7 +1226,7 @@ class BBCooker:
errors = True
continue
elif regex == "":
- parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+ parselog.debug("BBFILE_PATTERN_%s is empty" % c)
cre = re.compile('^NULL$')
errors = False
else:
@@ -1356,8 +1273,8 @@ class BBCooker:
if bf.startswith("/") or bf.startswith("../"):
bf = os.path.abspath(bf)
- self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
- filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
+ collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
+ filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
try:
os.stat(bf)
bf = os.path.abspath(bf)
@@ -1423,7 +1340,8 @@ class BBCooker:
bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
- infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
+ layername = self.collections[mc].calc_bbfile_priority(fn)[2]
+ infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
infos = dict(infos)
fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1449,10 +1367,12 @@ class BBCooker:
self.recipecaches[mc].rundeps[fn] = defaultdict(list)
self.recipecaches[mc].runrecs[fn] = defaultdict(list)
+ bb.parse.siggen.setup_datacache(self.recipecaches)
+
# Invalidate task for target if force mode active
if self.configuration.force:
logger.verbose("Invalidate task %s, %s", task, fn)
- bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
+ bb.parse.siggen.invalidate_task(task, fn)
# Setup taskdata structure
taskdata = {}
@@ -1466,6 +1386,9 @@ class BBCooker:
buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
if fireevents:
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
+ if self.eventlog:
+ self.eventlog[2].write_variables()
+ bb.event.enable_heartbeat()
# Execute the runqueue
runlist = [[mc, item, task, fn]]
@@ -1491,28 +1414,58 @@ class BBCooker:
failures += len(exc.args)
retval = False
except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
if quietlog:
bb.runqueue.logger.setLevel(rqloglevel)
- return False
+ return bb.server.process.idleFinish(str(exc))
if not retval:
if fireevents:
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
- self.command.finishAsyncCommand(msg)
+ bb.event.disable_heartbeat()
# We trashed self.recipecaches above
- self.parsecache_valid = False
+ self._parsecache_set(False)
self.configuration.limited_deps = False
bb.parse.siggen.reset(self.data)
if quietlog:
bb.runqueue.logger.setLevel(rqloglevel)
- return False
+ return bb.server.process.idleFinish(msg)
if retval is True:
return True
return retval
self.idleCallBackRegister(buildFileIdle, rq)
+ def getTaskSignatures(self, target, tasks):
+ sig = []
+ getAllTaskSignatures = False
+
+ if not tasks:
+ tasks = ["do_build"]
+ getAllTaskSignatures = True
+
+ for task in tasks:
+ taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
+ rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
+ rq.rqdata.prepare()
+
+ for l in runlist:
+ mc, pn, taskname, fn = l
+
+ taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
+ for t in taskdep['tasks']:
+ if t in taskdep['nostamp'] or "setscene" in t:
+ continue
+ tid = bb.runqueue.build_tid(mc, fn, t)
+
+ if t in task or getAllTaskSignatures:
+ try:
+ rq.rqdata.prepare_task_hash(tid)
+ sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
+ except KeyError:
+ sig.append(self.getTaskSignatures(target, [t])[0])
+
+ return sig
+
def buildTargets(self, targets, task):
"""
Attempt to build the targets specified
@@ -1522,6 +1475,7 @@ class BBCooker:
msg = None
interrupted = 0
if halt or self.state == state.forceshutdown:
+ bb.event._should_exit.set()
rq.finish_runqueue(True)
msg = "Forced shutdown"
interrupted = 2
@@ -1536,16 +1490,16 @@ class BBCooker:
failures += len(exc.args)
retval = False
except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
- return False
+ return bb.server.process.idleFinish(str(exc))
if not retval:
try:
for mc in self.multiconfigs:
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
finally:
- self.command.finishAsyncCommand(msg)
- return False
+ bb.event.disable_heartbeat()
+ return bb.server.process.idleFinish(msg)
+
if retval is True:
return True
return retval
@@ -1577,6 +1531,9 @@ class BBCooker:
for mc in self.multiconfigs:
bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
+ if self.eventlog:
+ self.eventlog[2].write_variables()
+ bb.event.enable_heartbeat()
rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
if 'universe' in targets:
@@ -1586,7 +1543,13 @@ class BBCooker:
def getAllKeysWithFlags(self, flaglist):
+ def dummy_autorev(d):
+ return
+
dump = {}
+ # Horrible but for now we need to avoid any sideeffects of autorev being called
+ saved = bb.fetch2.get_autorev
+ bb.fetch2.get_autorev = dummy_autorev
for k in self.data.keys():
try:
expand = True
@@ -1606,6 +1569,7 @@ class BBCooker:
dump[k][d] = None
except Exception as e:
print(e)
+ bb.fetch2.get_autorev = saved
return dump
@@ -1613,13 +1577,6 @@ class BBCooker:
if self.state == state.running:
return
- # reload files for which we got notifications
- for p in self.inotify_modified_files:
- bb.parse.update_cache(p)
- if p in bb.parse.BBHandler.cached_statements:
- del bb.parse.BBHandler.cached_statements[p]
- self.inotify_modified_files = []
-
if not self.baseconfig_valid:
logger.debug("Reloading base configuration data")
self.initConfigurationData()
@@ -1640,7 +1597,8 @@ class BBCooker:
self.updateCacheSync()
if self.state != state.parsing and not self.parsecache_valid:
- self.setupParserWatcher()
+ bb.server.process.serverlog("Parsing started")
+ self.parsewatched = {}
bb.parse.siggen.reset(self.data)
self.parseConfiguration ()
@@ -1655,30 +1613,27 @@ class BBCooker:
for dep in self.configuration.extra_assume_provided:
self.recipecaches[mc].ignored_dependencies.add(dep)
- self.collections = {}
-
mcfilelist = {}
total_masked = 0
searchdirs = set()
for mc in self.multiconfigs:
- self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
(filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
mcfilelist[mc] = filelist
total_masked += masked
searchdirs |= set(search)
- # Add inotify watches for directories searched for bb/bbappend files
+ # Add mtimes for directories searched for bb/bbappend files
for dirent in searchdirs:
- self.add_filewatch([[dirent]], dirs=True)
+ self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
self.parser = CookerParser(self, mcfilelist, total_masked)
- self.parsecache_valid = True
+ self._parsecache_set(True)
self.state = state.parsing
if not self.parser.parse_next():
- collectlog.debug(1, "parsing complete")
+ collectlog.debug("parsing complete")
if self.parser.error:
raise bb.BBHandledException()
self.show_appends_with_no_recipes()
@@ -1723,7 +1678,7 @@ class BBCooker:
if 'universe' in pkgs_to_build:
parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
- parselog.debug(1, "collating packages for \"universe\"")
+ parselog.debug("collating packages for \"universe\"")
pkgs_to_build.remove('universe')
for mc in self.multiconfigs:
for t in self.recipecaches[mc].universe_target:
@@ -1756,22 +1711,28 @@ class BBCooker:
if hasattr(self, "data"):
bb.event.fire(CookerExit(), self.data)
- def shutdown(self, force = False):
+ def shutdown(self, force=False):
if force:
self.state = state.forceshutdown
+ bb.event._should_exit.set()
else:
self.state = state.shutdown
if self.parser:
- self.parser.shutdown(clean=not force)
+ self.parser.shutdown(clean=False)
self.parser.final_cleanup()
def finishcommand(self):
+ if hasattr(self.parser, 'shutdown'):
+ self.parser.shutdown(clean=False)
+ self.parser.final_cleanup()
self.state = state.initial
+ bb.event._should_exit.clear()
def reset(self):
if hasattr(bb.parse, "siggen"):
bb.parse.siggen.exit()
+ self.finishcommand()
self.initConfigurationData()
self.handlePRServ()
@@ -1783,9 +1744,9 @@ class BBCooker:
if hasattr(self, "data"):
self.databuilder.reset()
self.data = self.databuilder.data
- self.parsecache_valid = False
- self.baseconfig_valid = False
-
+ # In theory tinfoil could have modified the base data before parsing,
+ # ideally need to track if anything did modify the datastore
+ self._parsecache_set(False)
class CookerExit(bb.event.Event):
"""
@@ -1806,10 +1767,10 @@ class CookerCollectFiles(object):
self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
def calc_bbfile_priority(self, filename):
- for _, _, regex, pri in self.bbfile_config_priorities:
+ for layername, _, regex, pri in self.bbfile_config_priorities:
if regex.match(filename):
- return pri, regex
- return 0, None
+ return pri, regex, layername
+ return 0, None, None
def get_bbfiles(self):
"""Get list of default .bb files by reading out the current directory"""
@@ -1828,7 +1789,7 @@ class CookerCollectFiles(object):
for ignored in ('SCCS', 'CVS', '.svn'):
if ignored in dirs:
dirs.remove(ignored)
- found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
+ found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
return found
@@ -1836,7 +1797,7 @@ class CookerCollectFiles(object):
"""Collect all available .bb build files"""
masked = 0
- collectlog.debug(1, "collecting .bb files")
+ collectlog.debug("collecting .bb files")
files = (config.getVar( "BBFILES") or "").split()
@@ -1851,7 +1812,7 @@ class CookerCollectFiles(object):
collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
bb.event.fire(CookerExit(), eventdata)
- # We need to track where we look so that we can add inotify watches. There
+ # We need to track where we look so that we can know when the cache is invalid. There
# is no nice way to do this, this is horrid. We intercept the os.listdir()
# (or os.scandir() for python 3.6+) calls while we run glob().
origlistdir = os.listdir
@@ -1907,7 +1868,7 @@ class CookerCollectFiles(object):
try:
re.compile(mask)
bbmasks.append(mask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
# Then validate the combined regular expressions. This should never
@@ -1915,7 +1876,7 @@ class CookerCollectFiles(object):
bbmask = "|".join(bbmasks)
try:
bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
bbmask = None
@@ -1923,7 +1884,7 @@ class CookerCollectFiles(object):
bbappend = []
for f in newfiles:
if bbmask and bbmask_compiled.search(f):
- collectlog.debug(1, "skipping masked file %s", f)
+ collectlog.debug("skipping masked file %s", f)
masked += 1
continue
if f.endswith('.bb'):
@@ -1931,7 +1892,7 @@ class CookerCollectFiles(object):
elif f.endswith('.bbappend'):
bbappend.append(f)
else:
- collectlog.debug(1, "skipping %s: unknown file extension", f)
+ collectlog.debug("skipping %s: unknown file extension", f)
# Build a list of .bbappend files for each .bb file
for f in bbappend:
@@ -1982,7 +1943,7 @@ class CookerCollectFiles(object):
# Calculate priorities for each file
for p in pkgfns:
realfn, cls, mc = bb.cache.virtualfn2realfn(p)
- priorities[p], regex = self.calc_bbfile_priority(realfn)
+ priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
if regex in unmatched_regex:
matched_regex.add(regex)
unmatched_regex.remove(regex)
@@ -2092,34 +2053,34 @@ class Parser(multiprocessing.Process):
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
pending = []
+ havejobs = True
try:
- while True:
- try:
- self.quit.get_nowait()
- except queue.Empty:
- pass
- else:
+ while havejobs or pending:
+ if self.quit.is_set():
break
- if pending:
- result = pending.pop()
- else:
- try:
- job = self.jobs.pop()
- except IndexError:
- break
+ job = None
+ try:
+ job = self.jobs.pop()
+ except IndexError:
+ havejobs = False
+ if job:
result = self.parse(*job)
# Clear the siggen cache after parsing to control memory usage, its huge
bb.parse.siggen.postparsing_clean_cache()
- try:
- self.results.put(result, timeout=0.25)
- except queue.Full:
pending.append(result)
+
+ if pending:
+ try:
+ result = pending.pop()
+ self.results.put(result, timeout=0.05)
+ except queue.Full:
+ pending.append(result)
finally:
self.results.close()
self.results.join_thread()
- def parse(self, mc, cache, filename, appends):
+ def parse(self, mc, cache, filename, appends, layername):
try:
origfilter = bb.event.LogHandler.filter
# Record the filename we're parsing into any events generated
@@ -2133,7 +2094,7 @@ class Parser(multiprocessing.Process):
bb.event.set_class_handlers(self.handlers.copy())
bb.event.LogHandler.filter = parse_filter
- return True, mc, cache.parse(filename, appends)
+ return True, mc, cache.parse(filename, appends, layername)
except Exception as exc:
tb = sys.exc_info()[2]
exc.recipe = filename
@@ -2173,10 +2134,11 @@ class CookerParser(object):
for mc in self.cooker.multiconfigs:
for filename in self.mcfilelist[mc]:
appends = self.cooker.collections[mc].get_file_appends(filename)
+ layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
if not self.bb_caches[mc].cacheValid(filename, appends):
- self.willparse.add((mc, self.bb_caches[mc], filename, appends))
+ self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
else:
- self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
+ self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
self.total = len(self.fromcache) + len(self.willparse)
self.toparse = len(self.willparse)
@@ -2185,6 +2147,7 @@ class CookerParser(object):
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
multiprocessing.cpu_count()), self.toparse)
+ bb.cache.SiggenRecipeInfo.reset()
self.start()
self.haveshutdown = False
self.syncthread = None
@@ -2195,7 +2158,7 @@ class CookerParser(object):
if self.toparse:
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
- self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
+ self.parser_quit = multiprocessing.Event()
self.result_queue = multiprocessing.Queue()
def chunkify(lst,n):
@@ -2210,7 +2173,7 @@ class CookerParser(object):
self.results = itertools.chain(self.results, self.parse_generator())
- def shutdown(self, clean=True):
+ def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
if not self.toparse:
return
if self.haveshutdown:
@@ -2225,11 +2188,9 @@ class CookerParser(object):
bb.event.fire(event, self.cfgdata)
else:
+ bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
bb.error("Parsing halted due to errors, see error messages above")
- for process in self.processes:
- self.parser_quit.put(None)
-
# Cleanup the queue before call process.join(), otherwise there might be
# deadlocks.
while True:
@@ -2238,6 +2199,16 @@ class CookerParser(object):
except queue.Empty:
break
+ def sync_caches():
+ for c in self.bb_caches.values():
+ bb.cache.SiggenRecipeInfo.reset()
+ c.sync()
+
+ self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
+ self.syncthread.start()
+
+ self.parser_quit.set()
+
for process in self.processes:
process.join(0.5)
@@ -2258,18 +2229,9 @@ class CookerParser(object):
if hasattr(process, "close"):
process.close()
- self.parser_quit.close()
- # Allow data left in the cancel queue to be discarded
- self.parser_quit.cancel_join_thread()
-
- def sync_caches():
- for c in self.bb_caches.values():
- c.sync()
-
- sync = threading.Thread(target=sync_caches, name="SyncThread")
- self.syncthread = sync
- sync.start()
+ bb.codeparser.parser_cache_save()
bb.codeparser.parser_cache_savemerge()
+ bb.cache.SiggenRecipeInfo.reset()
bb.fetch.fetcher_parse_done()
if self.cooker.configuration.profile:
profiles = []
@@ -2287,9 +2249,9 @@ class CookerParser(object):
self.syncthread.join()
def load_cached(self):
- for mc, cache, filename, appends in self.fromcache:
- cached, infos = cache.load(filename, appends)
- yield not cached, mc, infos
+ for mc, cache, filename, appends, layername in self.fromcache:
+ infos = cache.loadCached(filename, appends)
+ yield False, mc, infos
def parse_generator(self):
empty = False
@@ -2344,7 +2306,7 @@ class CookerParser(object):
except bb.parse.ParseError as exc:
self.error += 1
logger.error(str(exc))
- self.shutdown(clean=False)
+ self.shutdown(clean=False, eventmsg=str(exc))
return False
except bb.data_smart.ExpansionError as exc:
self.error += 1
@@ -2387,11 +2349,13 @@ class CookerParser(object):
return True
def reparse(self, filename):
+ bb.cache.SiggenRecipeInfo.reset()
to_reparse = set()
for mc in self.cooker.multiconfigs:
- to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
+ layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
+ to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
- for mc, filename, appends in to_reparse:
- infos = self.bb_caches[mc].parse(filename, appends)
+ for mc, filename, appends, layername in to_reparse:
+ infos = self.bb_caches[mc].parse(filename, appends, layername)
for vfn, info_array in infos:
self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index d54ac932e5..0649e40995 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -160,12 +160,7 @@ def catch_parse_error(func):
def wrapped(fn, *args):
try:
return func(fn, *args)
- except IOError as exc:
- import traceback
- parselog.critical(traceback.format_exc())
- parselog.critical("Unable to parse %s: %s" % (fn, exc))
- raise bb.BBHandledException()
- except bb.data_smart.ExpansionError as exc:
+ except Exception as exc:
import traceback
bbdir = os.path.dirname(__file__) + os.sep
@@ -177,14 +172,11 @@ def catch_parse_error(func):
break
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
raise bb.BBHandledException()
- except bb.parse.ParseError as exc:
- parselog.critical(str(exc))
- raise bb.BBHandledException()
return wrapped
@catch_parse_error
def parse_config_file(fn, data, include=True):
- return bb.parse.handle(fn, data, include)
+ return bb.parse.handle(fn, data, include, baseconfig=True)
@catch_parse_error
def _inherit(bbclass, data):
@@ -254,6 +246,7 @@ class CookerDataBuilder(object):
filtered_keys = bb.utils.approved_variables()
bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
self.basedata.setVar("BB_ORIGENV", self.savedenv)
+ self.basedata.setVar("__bbclasstype", "global")
if worker:
self.basedata.setVar("BB_WORKERCONTEXT", "1")
@@ -262,6 +255,7 @@ class CookerDataBuilder(object):
self.mcdata = {}
def parseBaseConfiguration(self, worker=False):
+ mcdata = {}
data_hash = hashlib.sha256()
try:
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
@@ -269,7 +263,6 @@ class CookerDataBuilder(object):
if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker:
bb.fetch.fetcher_init(self.data)
bb.parse.init_parser(self.data)
- bb.codeparser.parser_cache_init(self.data)
bb.event.fire(bb.event.ConfigParsed(), self.data)
@@ -287,29 +280,25 @@ class CookerDataBuilder(object):
bb.parse.init_parser(self.data)
data_hash.update(self.data.get_hash().encode('utf-8'))
- self.mcdata[''] = self.data
+ mcdata[''] = self.data
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
for config in multiconfig:
if config[0].isdigit():
bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
- mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
- bb.event.fire(bb.event.ConfigParsed(), mcdata)
- self.mcdata[config] = mcdata
- data_hash.update(mcdata.get_hash().encode('utf-8'))
+ parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
+ bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
+ mcdata[config] = parsed_mcdata
+ data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
if multiconfig:
- bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data)
+ bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
self.data_hash = data_hash.hexdigest()
- except (SyntaxError, bb.BBHandledException):
- raise bb.BBHandledException()
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
raise bb.BBHandledException()
- except Exception:
- logger.exception("Error parsing configuration files")
- raise bb.BBHandledException()
+ bb.codeparser.update_module_dependencies(self.data)
# Handle obsolete variable names
d = self.data
@@ -330,17 +319,23 @@ class CookerDataBuilder(object):
if issues:
raise bb.BBHandledException()
+ for mc in mcdata:
+ mcdata[mc].renameVar("__depends", "__base_depends")
+ mcdata[mc].setVar("__bbclasstype", "recipe")
+
# Create a copy so we can reset at a later date when UIs disconnect
- self.origdata = self.data
- self.data = bb.data.createCopy(self.origdata)
- self.mcdata[''] = self.data
+ self.mcorigdata = mcdata
+ for mc in mcdata:
+ self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
+ self.data = self.mcdata['']
def reset(self):
# We may not have run parseBaseConfiguration() yet
- if not hasattr(self, 'origdata'):
+ if not hasattr(self, 'mcorigdata'):
return
- self.data = bb.data.createCopy(self.origdata)
- self.mcdata[''] = self.data
+ for mc in self.mcorigdata:
+ self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc])
+ self.data = self.mcdata['']
def _findLayerConf(self, data):
return findConfigFile("bblayers.conf", data)
@@ -355,12 +350,17 @@ class CookerDataBuilder(object):
layerconf = self._findLayerConf(data)
if layerconf:
- parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
+ parselog.debug2("Found bblayers.conf (%s)", layerconf)
# By definition bblayers.conf is in conf/ of TOPDIR.
# We may have been called with cwd somewhere else so reset TOPDIR
data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
data = parse_config_file(layerconf, data)
+ if not data.getVar("BB_CACHEDIR"):
+ data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
+
+ bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
+
layers = (data.getVar('BBLAYERS') or "").split()
broken_layers = []
@@ -382,8 +382,10 @@ class CookerDataBuilder(object):
parselog.critical("Please check BBLAYERS in %s" % (layerconf))
raise bb.BBHandledException()
+ layerseries = None
+ compat_entries = {}
for layer in layers:
- parselog.debug(2, "Adding layer %s", layer)
+ parselog.debug2("Adding layer %s", layer)
if 'HOME' in approved and '~' in layer:
layer = os.path.expanduser(layer)
if layer.endswith('/'):
@@ -394,8 +396,27 @@ class CookerDataBuilder(object):
data.expandVarref('LAYERDIR')
data.expandVarref('LAYERDIR_RE')
+ # Sadly we can't have nice things.
+ # Some layers think they're going to be 'clever' and copy the values from
+ # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of
+ # this mechanism is to make it clear which releases a layer supports and
+ # show when a layer master branch is bitrotting and is unmaintained.
+ # We therefore avoid people doing this here.
+ collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
+ for c in collections:
+ compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c)
+ if compat_entry:
+ compat_entries[c] = set(compat_entry.split())
+ data.delVar("LAYERSERIES_COMPAT_%s" % c)
+ if not layerseries:
+ layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
+ if layerseries:
+ data.delVar("LAYERSERIES_CORENAMES")
+
data.delVar('LAYERDIR_RE')
data.delVar('LAYERDIR')
+ for c in compat_entries:
+ data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c])))
bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split()
collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
@@ -414,13 +435,15 @@ class CookerDataBuilder(object):
if invalid:
bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
- layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
collections_tmp = collections[:]
for c in collections:
collections_tmp.remove(c)
if c in collections_tmp:
bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
- compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
+
+ compat = set()
+ if c in compat_entries:
+ compat = compat_entries[c]
if compat and not layerseries:
bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
if compat and not (compat & layerseries):
@@ -429,16 +452,21 @@ class CookerDataBuilder(object):
elif not compat and not data.getVar("BB_WORKERCONTEXT"):
bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
+ data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries)))
+
if not data.getVar("BBPATH"):
msg = "The BBPATH variable is not set"
if not layerconf:
msg += (" and bitbake did not find a conf/bblayers.conf file in"
" the expected location.\nMaybe you accidentally"
" invoked bitbake from the wrong directory?")
- raise SystemExit(msg)
+ bb.fatal(msg)
if not data.getVar("TOPDIR"):
data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
+ if not data.getVar("BB_CACHEDIR"):
+ data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
+ bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
@@ -465,3 +493,54 @@ class CookerDataBuilder(object):
return data
+ @staticmethod
+ def _parse_recipe(bb_data, bbfile, appends, mc, layername):
+ bb_data.setVar("__BBMULTICONFIG", mc)
+ bb_data.setVar("FILE_LAYERNAME", layername)
+
+ bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+ bb.parse.cached_mtime_noerror(bbfile_loc)
+
+ if appends:
+ bb_data.setVar('__BBAPPEND', " ".join(appends))
+
+ return bb.parse.handle(bbfile, bb_data)
+
+ def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
+ """
+ Load and parse one .bb build file
+ Return the data and whether parsing resulted in the file being skipped
+ """
+
+ if virtonly:
+ (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
+ bb_data = self.mcdata[mc].createCopy()
+ bb_data.setVar("__ONLYFINALISE", virtual or "default")
+ return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+
+ if mc is not None:
+ bb_data = self.mcdata[mc].createCopy()
+ return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+
+ bb_data = self.data.createCopy()
+ datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername)
+
+ for mc in self.mcdata:
+ if not mc:
+ continue
+ bb_data = self.mcdata[mc].createCopy()
+ newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+ for ns in newstores:
+ datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
+
+ return datastores
+
+ def parseRecipe(self, virtualfn, appends, layername):
+ """
+ Return a complete set of data for fn.
+ To do this, we need to parse the file.
+ """
+ logger.debug("Parsing %s (full)" % virtualfn)
+ (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
+ datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
+ return datastores[virtual]
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index 4957bfd4b8..7689404436 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index c09d9b04bb..505f42950f 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
-The expandKeys and update_data are the most expensive
-operations. At night the cookie monster came by and
+expandKeys and datastore iteration are the most expensive
+operations. Updating overrides is now "on the fly" but still based
+on the idea of the cookie monster introduced by zecke:
+"At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
applying the skills from the not yet passed 'Entwurf und
Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
+to have faster datastore operations."
This is a trade-off between speed and memory again but
the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
import sys, os, re
import hashlib
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
from itertools import groupby
from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
"""Return a list of keys in d"""
return d.keys()
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
def expand(s, d, varname = None):
"""Variable expansion using the data store"""
return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if d.getVarFlag(var, 'python', False) and func:
return False
- export = d.getVarFlag(var, "export", False)
- unexport = d.getVarFlag(var, "unexport", False)
+ export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
+ unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
if not all and not export and not unexport and not func:
return False
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
def exported_keys(d):
return (key for key in d.keys() if not key.startswith('__') and
- d.getVarFlag(key, 'export', False) and
- not d.getVarFlag(key, 'unexport', False))
+ bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
+ not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
def exported_vars(d):
k = list(exported_keys(d))
@@ -268,13 +261,41 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
newdeps -= seen
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize(parent = True)
+def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
+ def handle_contains(value, contains, exclusions, d):
+ newvalue = []
+ if value:
+ newvalue.append(str(value))
+ for k in sorted(contains):
+ if k in exclusions or k in ignored_vars:
+ continue
+ l = (d.getVar(k) or "").split()
+ for item in sorted(contains[k]):
+ for word in item.split():
+ if not word in l:
+ newvalue.append("\n%s{%s} = Unset" % (k, item))
+ break
+ else:
+ newvalue.append("\n%s{%s} = Set" % (k, item))
+ return "".join(newvalue)
+
+ def handle_remove(value, deps, removes, d):
+ for r in sorted(removes):
+ r2 = d.expandWithRefs(r, None)
+ value += "\n_remove of %s" % r
+ deps |= r2.references
+ deps = deps | (keys & r2.execs)
+ value = handle_contains(value, r2.contains, exclusions, d)
+ return value
-def build_dependencies(key, keys, shelldeps, varflagsexcl, ignored_vars, d):
deps = set()
try:
+ if key in mod_funcs:
+ exclusions = set()
+ moddep = bb.codeparser.modulecode_deps[key]
+ value = handle_contains("", moddep[3], exclusions, d)
+ return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
+
if key[-1] == ']':
vf = key[:-1].split('[')
if vf[1] == "vardepvalueexclude":
@@ -282,48 +303,24 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, ignored_vars, d):
value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
- return deps, value
+ deps -= ignored_vars
+ return frozenset(deps), value
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
vardeps = varflags.get("vardeps")
exclusions = varflags.get("vardepsexclude", "").split()
- def handle_contains(value, contains, exclusions, d):
- newvalue = []
- if value:
- newvalue.append(str(value))
- for k in sorted(contains):
- if k in exclusions or k in ignored_vars:
- continue
- l = (d.getVar(k) or "").split()
- for item in sorted(contains[k]):
- for word in item.split():
- if not word in l:
- newvalue.append("\n%s{%s} = Unset" % (k, item))
- break
- else:
- newvalue.append("\n%s{%s} = Set" % (k, item))
- return "".join(newvalue)
-
- def handle_remove(value, deps, removes, d):
- for r in sorted(removes):
- r2 = d.expandWithRefs(r, None)
- value += "\n_remove of %s" % r
- deps |= r2.references
- deps = deps | (keys & r2.execs)
- return value
-
if "vardepvalue" in varflags:
value = varflags.get("vardepvalue")
elif varflags.get("func"):
if varflags.get("python"):
- value = d.getVarFlag(key, "_content", False)
+ value = codeparsedata.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
value = handle_contains(value, parser.contains, exclusions, d)
else:
- value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True)
+ value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
parser = bb.codeparser.ShellParser(key, logger)
parser.parse_shell(parsedvar.value)
deps = deps | shelldeps
@@ -365,36 +362,43 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, ignored_vars, d):
deps |= set((vardeps or "").split())
deps -= set(exclusions)
+ deps -= ignored_vars
except bb.parse.SkipRecipe:
raise
except Exception as e:
bb.warn("Exception during build_dependencies for %s" % key)
raise
- return deps, value
+ return frozenset(deps), value
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
#d.setVarFlag(key, "vardeps", deps)
def generate_dependencies(d, ignored_vars):
- keys = set(key for key in d if not key.startswith("__"))
- shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
+ mod_funcs = set(bb.codeparser.modulecode_deps.keys())
+ keys = set(key for key in d if not key.startswith("__")) | mod_funcs
+ shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
+ codeparserd = d.createCopy()
+ for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
+ key, value = forced.split("=", 1)
+ codeparserd.setVar(key, value)
+
deps = {}
values = {}
tasklist = d.getVar('__BBTASKS', False) or []
for task in tasklist:
- deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, ignored_vars, d)
+ deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps = deps[task]
seen = set()
while newdeps:
- nextdeps = newdeps - ignored_vars
+ nextdeps = newdeps
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
if dep not in deps:
- deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, ignored_vars, d)
+ deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps |= deps[dep]
newdeps -= seen
#print "For %s: %s" % (task, str(deps[task]))
@@ -413,7 +417,6 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
else:
data = [data]
- gendeps[task] -= ignored_vars
newdeps = gendeps[task]
seen = set()
while newdeps:
@@ -421,9 +424,6 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
- if dep in ignored_vars:
- continue
- gendeps[dep] -= ignored_vars
newdeps |= gendeps[dep]
newdeps -= seen
@@ -435,13 +435,13 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
data.append(str(var))
k = fn + ":" + task
basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
- taskdeps[task] = alldeps
+ taskdeps[task] = frozenset(seen)
return taskdeps, basehash
def inherits_class(klass, d):
val = d.getVar('__inherit_cache', False) or []
- needle = os.path.join('classes', '%s.bbclass' % klass)
+ needle = '/%s.bbclass' % klass
for v in val:
if v.endswith(needle):
return True
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index dd20ca557e..0128a5bb17 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -16,7 +16,10 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-import copy, re, sys, traceback
+import builtins
+import copy
+import re
+import sys
from collections.abc import MutableMapping
import logging
import hashlib
@@ -29,7 +32,7 @@ logger = logging.getLogger("BitBake.Data")
__setvar_keyword__ = [":append", ":prepend", ":remove"]
__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
+__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
__whitespace_split__ = re.compile(r'(\s)')
__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -92,10 +95,11 @@ def infer_caller_details(loginfo, parent = False, varval = True):
loginfo['func'] = func
class VariableParse:
- def __init__(self, varname, d, val = None):
+ def __init__(self, varname, d, unexpanded_value = None, val = None):
self.varname = varname
self.d = d
self.value = val
+ self.unexpanded_value = unexpanded_value
self.references = set()
self.execs = set()
@@ -119,6 +123,11 @@ class VariableParse:
else:
code = match.group()[3:-1]
+ # Do not run code that contains one or more unexpanded variables
+ # instead return the code with the characters we removed put back
+ if __expand_var_regexp__.findall(code):
+ return "${@" + code + "}"
+
if self.varname:
varname = 'Var <%s>' % self.varname
else:
@@ -144,19 +153,21 @@ class VariableParse:
value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
return str(value)
-
class DataContext(dict):
+ excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
+
def __init__(self, metadata, **kwargs):
self.metadata = metadata
dict.__init__(self, **kwargs)
self['d'] = metadata
+ self.context = set(bb.utils.get_context())
def __missing__(self, key):
- # Skip commonly accessed invalid variables
- if key in ['bb', 'oe', 'int', 'bool', 'time', 'str', 'os']:
+ if key in self.excluded or key in self.context:
raise KeyError(key)
+
value = self.metadata.getVar(key)
- if value is None or self.metadata.getVarFlag(key, 'func', False):
+ if value is None:
raise KeyError(key)
else:
return value
@@ -442,9 +453,9 @@ class DataSmart(MutableMapping):
def expandWithRefs(self, s, varname):
if not isinstance(s, str): # sanity check
- return VariableParse(varname, self, s)
+ return VariableParse(varname, self, s, s)
- varparse = VariableParse(varname, self)
+ varparse = VariableParse(varname, self, s)
while s.find('${') != -1:
olds = s
@@ -476,24 +487,19 @@ class DataSmart(MutableMapping):
def expand(self, s, varname = None):
return self.expandWithRefs(s, varname).value
- def finalize(self, parent = False):
- return
-
- def internal_finalize(self, parent = False):
- """Performs final steps upon the datastore, including application of overrides"""
- self.overrides = None
-
def need_overrides(self):
if self.overrides is not None:
return
if self.inoverride:
return
+ overrride_stack = []
for count in range(5):
self.inoverride = True
# Can end up here recursively so setup dummy values
self.overrides = []
self.overridesset = set()
self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
+ overrride_stack.append(self.overrides)
self.overridesset = set(self.overrides)
self.inoverride = False
self.expand_cache = {}
@@ -503,7 +509,7 @@ class DataSmart(MutableMapping):
self.overrides = newoverrides
self.overridesset = set(self.overrides)
else:
- bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
+ bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack))
def initVar(self, var):
self.expand_cache = {}
@@ -514,18 +520,18 @@ class DataSmart(MutableMapping):
dest = self.dict
while dest:
if var in dest:
- return dest[var], self.overridedata.get(var, None)
+ return dest[var]
if "_data" not in dest:
break
dest = dest["_data"]
- return None, self.overridedata.get(var, None)
+ return None
def _makeShadowCopy(self, var):
if var in self.dict:
return
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
if local_var:
self.dict[var] = copy.copy(local_var)
@@ -633,7 +639,7 @@ class DataSmart(MutableMapping):
nextnew.update(vardata.references)
nextnew.update(vardata.contains.keys())
new = nextnew
- self.internal_finalize(True)
+ self.overrides = None
def _setvar_update_overrides(self, var, **loginfo):
# aka pay the cookie monster
@@ -720,7 +726,7 @@ class DataSmart(MutableMapping):
if ':' in var:
override = var[var.rfind(':')+1:]
shortvar = var[:var.rfind(':')]
- while override and override.islower():
+ while override and __override_regexp__.match(override):
try:
if shortvar in self.overridedata:
# Force CoW by recreating the list first
@@ -775,13 +781,18 @@ class DataSmart(MutableMapping):
return None
cachename = var + "[" + flag + "]"
+ if not expand and retparser and cachename in self.expand_cache:
+ return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename]
+
if expand and cachename in self.expand_cache:
return self.expand_cache[cachename].value
- local_var, overridedata = self._findVar(var)
+ local_var = self._findVar(var)
value = None
removes = set()
- if flag == "_content" and overridedata is not None and not parsing:
+ if flag == "_content" and not parsing:
+ overridedata = self.overridedata.get(var, None)
+ if flag == "_content" and not parsing and overridedata is not None:
match = False
active = {}
self.need_overrides()
@@ -896,7 +907,7 @@ class DataSmart(MutableMapping):
def delVarFlag(self, var, flag, **loginfo):
self.expand_cache = {}
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
if not local_var:
return
if not var in self.dict:
@@ -939,7 +950,7 @@ class DataSmart(MutableMapping):
self.dict[var][i] = flags[i]
def getVarFlags(self, var, expand = False, internalflags=False):
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
flags = {}
if local_var:
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index df020551e3..4761c86880 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -68,29 +68,39 @@ _catchall_handlers = {}
_eventfilter = None
_uiready = False
_thread_lock = threading.Lock()
-_thread_lock_enabled = False
-
-if hasattr(__builtins__, '__setitem__'):
- builtins = __builtins__
-else:
- builtins = __builtins__.__dict__
+_heartbeat_enabled = False
+_should_exit = threading.Event()
def enable_threadlock():
- global _thread_lock_enabled
- _thread_lock_enabled = True
+ # Always needed now
+ return
def disable_threadlock():
- global _thread_lock_enabled
- _thread_lock_enabled = False
+ # Always needed now
+ return
+
+def enable_heartbeat():
+ global _heartbeat_enabled
+ _heartbeat_enabled = True
+
+def disable_heartbeat():
+ global _heartbeat_enabled
+ _heartbeat_enabled = False
+
+#
+# In long running code, this function should be called periodically
+# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
+#
+def check_for_interrupts(d):
+ global _should_exit
+ if _should_exit.is_set():
+ bb.warn("Exiting due to interrupt.")
+ raise bb.BBHandledException()
def execute_handler(name, handler, event, d):
event.data = d
- addedd = False
- if 'd' not in builtins:
- builtins['d'] = d
- addedd = True
try:
- ret = handler(event)
+ ret = handler(event, d)
except (bb.parse.SkipRecipe, bb.BBHandledException):
raise
except Exception:
@@ -104,8 +114,7 @@ def execute_handler(name, handler, event, d):
raise
finally:
del event.data
- if addedd:
- del builtins['d']
+
def fire_class_handlers(event, d):
if isinstance(event, logging.LogRecord):
@@ -132,8 +141,14 @@ def print_ui_queue():
if not _uiready:
from bb.msg import BBLogFormatter
# Flush any existing buffered content
- sys.stdout.flush()
- sys.stderr.flush()
+ try:
+ sys.stdout.flush()
+ except:
+ pass
+ try:
+ sys.stderr.flush()
+ except:
+ pass
stdout = logging.StreamHandler(sys.stdout)
stderr = logging.StreamHandler(sys.stderr)
formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -174,36 +189,30 @@ def print_ui_queue():
def fire_ui_handlers(event, d):
global _thread_lock
- global _thread_lock_enabled
if not _uiready:
# No UI handlers registered yet, queue up the messages
ui_queue.append(event)
return
- if _thread_lock_enabled:
- _thread_lock.acquire()
-
- errors = []
- for h in _ui_handlers:
- #print "Sending event %s" % event
- try:
- if not _ui_logfilters[h].filter(event):
- continue
- # We use pickle here since it better handles object instances
- # which xmlrpc's marshaller does not. Events *must* be serializable
- # by pickle.
- if hasattr(_ui_handlers[h].event, "sendpickle"):
- _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
- else:
- _ui_handlers[h].event.send(event)
- except:
- errors.append(h)
- for h in errors:
- del _ui_handlers[h]
-
- if _thread_lock_enabled:
- _thread_lock.release()
+ with bb.utils.lock_timeout(_thread_lock):
+ errors = []
+ for h in _ui_handlers:
+ #print "Sending event %s" % event
+ try:
+ if not _ui_logfilters[h].filter(event):
+ continue
+ # We use pickle here since it better handles object instances
+ # which xmlrpc's marshaller does not. Events *must* be serializable
+ # by pickle.
+ if hasattr(_ui_handlers[h].event, "sendpickle"):
+ _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
+ else:
+ _ui_handlers[h].event.send(event)
+ except:
+ errors.append(h)
+ for h in errors:
+ del _ui_handlers[h]
def fire(event, d):
"""Fire off an Event"""
@@ -247,15 +256,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
if handler is not None:
# handle string containing python code
if isinstance(handler, str):
- tmp = "def %s(e):\n%s" % (name, handler)
+ tmp = "def %s(e, d):\n%s" % (name, handler)
+ # Inject empty lines to make code match lineno in filename
+ if lineno is not None:
+ tmp = "\n" * (lineno-1) + tmp
try:
code = bb.methodpool.compile_cache(tmp)
if not code:
if filename is None:
- filename = "%s(e)" % name
+ filename = "%s(e, d)" % name
code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
- if lineno is not None:
- ast.increment_lineno(code, lineno-1)
code = compile(code, filename, "exec")
bb.methodpool.compile_cache_add(tmp, code)
except SyntaxError:
@@ -317,21 +327,23 @@ def set_eventfilter(func):
_eventfilter = func
def register_UIHhandler(handler, mainui=False):
- bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
- _ui_handlers[_ui_handler_seq] = handler
- level, debug_domains = bb.msg.constructLogOptions()
- _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
- if mainui:
- global _uiready
- _uiready = _ui_handler_seq
- return _ui_handler_seq
+ with bb.utils.lock_timeout(_thread_lock):
+ bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
+ _ui_handlers[_ui_handler_seq] = handler
+ level, debug_domains = bb.msg.constructLogOptions()
+ _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
+ if mainui:
+ global _uiready
+ _uiready = _ui_handler_seq
+ return _ui_handler_seq
def unregister_UIHhandler(handlerNum, mainui=False):
if mainui:
global _uiready
_uiready = False
- if handlerNum in _ui_handlers:
- del _ui_handlers[handlerNum]
+ with bb.utils.lock_timeout(_thread_lock):
+ if handlerNum in _ui_handlers:
+ del _ui_handlers[handlerNum]
return
def get_uihandler():
@@ -845,3 +857,19 @@ class FindSigInfoResult(Event):
def __init__(self, result):
Event.__init__(self)
self.result = result
+
+class GetTaskSignatureResult(Event):
+ """
+ Event to return results from GetTaskSignatures command
+ """
+ def __init__(self, sig):
+ Event.__init__(self)
+ self.sig = sig
+
+class ParseError(Event):
+ """
+ Event to indicate parse failed
+ """
+ def __init__(self, msg):
+ super().__init__()
+ self._msg = msg
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
index ecbad59970..801db9c82f 100644
--- a/bitbake/lib/bb/exceptions.py
+++ b/bitbake/lib/bb/exceptions.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index ac557176d7..5bf2c4b8cf 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -290,12 +290,12 @@ class URI(object):
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
- for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
+ for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
ret[k] = v
return ret
def _param_str_join(self, dict_, elmdelim, kvdelim="="):
- return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
+ return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
@property
def hostport(self):
@@ -388,7 +388,7 @@ def decodeurl(url):
if s:
if not '=' in s:
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
- s1, s2 = s.split('=')
+ s1, s2 = s.split('=', 1)
p[s1] = s2
return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -469,6 +469,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
basename = os.path.basename(mirrortarball)
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
+ uri_find_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
basename = os.path.basename(ud.localpath)
if basename:
@@ -517,7 +518,7 @@ def fetcher_init(d):
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
- _checksum_cache.init_cache(d)
+ _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
for m in methods:
if hasattr(m, "init"):
@@ -545,7 +546,7 @@ def mirror_from_string(data):
bb.warn('Invalid mirror data %s, should have paired members.' % data)
return list(zip(*[iter(mirrors)]*2))
-def verify_checksum(ud, d, precomputed={}):
+def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
"""
verify the MD5 and SHA256 checksum for downloaded src
@@ -559,17 +560,19 @@ def verify_checksum(ud, d, precomputed={}):
file against those in the recipe each time, rather than only after
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
"""
-
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
return {}
+ if localpath is None:
+ localpath = ud.localpath
+
def compute_checksum_info(checksum_id):
checksum_name = getattr(ud, "%s_name" % checksum_id)
if checksum_id in precomputed:
checksum_data = precomputed[checksum_id]
else:
- checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
+ checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
checksum_expected = getattr(ud, "%s_expected" % checksum_id)
@@ -595,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}):
checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
# If no checksum has been provided
- if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
+ if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
messages = []
strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
# If strict checking enabled and neither sum defined, raise error
if strict == "1":
- messages.append("No checksum specified for '%s', please add at " \
- "least one to the recipe:" % ud.localpath)
- messages.extend(checksum_lines)
- logger.error("\n".join(messages))
- raise NoChecksumError("Missing SRC_URI checksum", ud.url)
+ raise NoChecksumError("\n".join(checksum_lines))
bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
@@ -627,7 +626,7 @@ def verify_checksum(ud, d, precomputed={}):
for ci in checksum_infos:
if ci["expected"] and ci["expected"] != ci["data"]:
messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
- "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
+ "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
bad_checksum = ci["data"]
if bad_checksum:
@@ -745,13 +744,16 @@ def subprocess_setup():
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-def get_autorev(d):
- # only not cache src rev in autorev case
+def mark_recipe_nocache(d):
if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1')
+
+def get_autorev(d):
+ mark_recipe_nocache(d)
+ d.setVar("__BBAUTOREV_SEEN", True)
return "AUTOINC"
-def get_srcrev(d, method_name='sortable_revision'):
+def _get_srcrev(d, method_name='sortable_revision'):
"""
Return the revision string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
@@ -765,13 +767,14 @@ def get_srcrev(d, method_name='sortable_revision'):
that fetcher provides a method with the given name and the same signature as sortable_revision.
"""
- d.setVar("__BBSEENSRCREV", "1")
+ d.setVar("__BBSRCREV_SEEN", "1")
recursion = d.getVar("__BBINSRCREV")
if recursion:
raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
d.setVar("__BBINSRCREV", True)
scms = []
+ revs = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
@@ -779,16 +782,19 @@ def get_srcrev(d, method_name='sortable_revision'):
scms.append(u)
if not scms:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ d.delVar("__BBINSRCREV")
+ return "", revs
+
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+ revs.append(rev)
if len(rev) > 10:
rev = rev[:10]
d.delVar("__BBINSRCREV")
if autoinc:
- return "AUTOINC+" + rev
- return rev
+ return "AUTOINC+" + rev, revs
+ return rev, revs
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -804,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'):
ud = urldata[scm]
for name in ud.names:
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+ revs.append(rev)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
@@ -821,7 +828,21 @@ def get_srcrev(d, method_name='sortable_revision'):
format = "AUTOINC+" + format
d.delVar("__BBINSRCREV")
- return format
+ return format, revs
+
+def get_hashvalue(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return " ".join(revs)
+
+def get_pkgv_string(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return pkgv
+
+def get_srcrev(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ if not pkgv:
+ raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ return pkgv
def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
@@ -847,10 +868,17 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
'DBUS_SESSION_BUS_ADDRESS',
'P4CONFIG',
'SSL_CERT_FILE',
+ 'NODE_EXTRA_CA_CERTS',
'AWS_PROFILE',
'AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
- 'AWS_DEFAULT_REGION']
+ 'AWS_ROLE_ARN',
+ 'AWS_WEB_IDENTITY_TOKEN_FILE',
+ 'AWS_DEFAULT_REGION',
+ 'AWS_SESSION_TOKEN',
+ 'GIT_CACHE_PATH',
+ 'REMOTE_CONTAINERS_IPC',
+ 'SSL_CERT_DIR']
def get_fetcher_environment(d):
newenv = {}
@@ -915,7 +943,10 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
elif e.stderr:
output = "output:\n%s" % e.stderr
else:
- output = "no output"
+ if log:
+ output = "see logfile for output"
+ else:
+ output = "no output"
error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -977,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld):
try:
newud = FetchData(newuri, ld)
+ newud.ignore_checksums = True
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1086,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
logger.debug(str(e))
try:
- ud.method.clean(ud, ld)
+ if ud.method.cleanup_upon_failure():
+ ud.method.clean(ud, ld)
except UnboundLocalError:
pass
return False
@@ -1097,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
def ensure_symlink(target, link_name):
if not os.path.exists(link_name):
+ dirname = os.path.dirname(link_name)
+ bb.utils.mkdirhier(dirname)
if os.path.islink(link_name):
# Broken symbolic link
os.unlink(link_name)
@@ -1209,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name):
if srcrev == "INVALID" or not srcrev:
raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
if srcrev == "AUTOINC":
+ d.setVar("__BBAUTOREV_ACTED_UPON", True)
srcrev = ud.method.latest_revision(ud, d, name)
return srcrev
@@ -1220,23 +1256,21 @@ def get_checksum_file_list(d):
SRC_URI as a space-separated string
"""
fetch = Fetch([], d, cache = False, localonly = True)
-
- dl_dir = d.getVar('DL_DIR')
filelist = []
for u in fetch.urls:
ud = fetch.ud[u]
-
if ud and isinstance(ud.method, local.Local):
- paths = ud.method.localpaths(ud, d)
+ found = False
+ paths = ud.method.localfile_searchpaths(ud, d)
for f in paths:
pth = ud.decodedurl
- if f.startswith(dl_dir):
- # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
- if os.path.exists(f):
- bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
- else:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
+ if os.path.exists(f):
+ found = True
filelist.append(f + ":" + str(os.path.exists(f)))
+ if not found:
+ bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
+ "\nThe following paths were searched:"
+ "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
return " ".join(filelist)
@@ -1283,18 +1317,13 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
setattr(self, "%s_expected" % checksum_id, checksum_expected)
- for checksum_id in CHECKSUM_LIST:
- configure_checksum(checksum_id)
-
- self.ignore_checksums = False
-
self.names = self.parm.get("name",'default').split(',')
self.method = None
@@ -1316,6 +1345,11 @@ class FetchData(object):
if hasattr(self.method, "urldata_init"):
self.method.urldata_init(self, d)
+ for checksum_id in CHECKSUM_LIST:
+ configure_checksum(checksum_id)
+
+ self.ignore_checksums = False
+
if "localpath" in self.parm:
# if user sets localpath for file, use it instead.
self.localpath = self.parm["localpath"]
@@ -1395,6 +1429,9 @@ class FetchMethod(object):
Is localpath something that can be represented by a checksum?
"""
+ # We cannot compute checksums for None
+ if urldata.localpath is None:
+ return False
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath):
return False
@@ -1407,6 +1444,12 @@ class FetchMethod(object):
"""
return False
+ def cleanup_upon_failure(self):
+ """
+ When a fetch fails, should clean() be called?
+ """
+ return True
+
def verify_donestamp(self, ud, d):
"""
Verify the donestamp file
@@ -1549,6 +1592,7 @@ class FetchMethod(object):
unpackdir = rootdir
if not unpack or not cmd:
+ urldata.unpack_tracer.unpack("file-copy", unpackdir)
# If file == dest, then avoid any copies, as we already put the file into dest!
dest = os.path.join(unpackdir, os.path.basename(file))
if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1563,6 +1607,8 @@ class FetchMethod(object):
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
+ else:
+ urldata.unpack_tracer.unpack("archive-extract", unpackdir)
if not cmd:
return
@@ -1654,6 +1700,55 @@ class FetchMethod(object):
"""
return []
+
+class DummyUnpackTracer(object):
+ """
+ Abstract API definition for a class that traces unpacked source files back
+ to their respective upstream SRC_URI entries, for software composition
+ analysis, license compliance and detailed SBOM generation purposes.
+ User may load their own unpack tracer class (instead of the dummy
+ one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
+ """
+ def start(self, unpackdir, urldata_dict, d):
+ """
+ Start tracing the core Fetch.unpack process, using an index to map
+ unpacked files to each SRC_URI entry.
+ This method is called by Fetch.unpack and it may receive nested calls by
+ gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
+ URLs and by recursively calling Fetch.unpack from new (nested) Fetch
+ instances.
+ """
+ return
+ def start_url(self, url):
+ """Start tracing url unpack process.
+ This method is called by Fetch.unpack before the fetcher-specific unpack
+ method starts, and it may receive nested calls by gitsm and npmsw
+ fetchers.
+ """
+ return
+ def unpack(self, unpack_type, destdir):
+ """
+ Set unpack_type and destdir for current url.
+ This method is called by the fetcher-specific unpack method after url
+ tracing started.
+ """
+ return
+ def finish_url(self, url):
+ """Finish tracing url unpack process and update the file index.
+ This method is called by Fetch.unpack after the fetcher-specific unpack
+ method finished its job, and it may receive nested calls by gitsm
+ and npmsw fetchers.
+ """
+ return
+ def complete(self):
+ """
+ Finish tracing the Fetch.unpack process, and check if all nested
+ Fecth.unpack calls (if any) have been completed; if so, save collected
+ metadata.
+ """
+ return
+
+
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
@@ -1674,10 +1769,30 @@ class Fetch(object):
if key in urldata_cache:
self.ud = urldata_cache[key]
+ # the unpack_tracer object needs to be made available to possible nested
+ # Fetch instances (when those are created by gitsm and npmsw fetchers)
+ # so we set it as a global variable
+ global unpack_tracer
+ try:
+ unpack_tracer
+ except NameError:
+ class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
+ if class_path:
+ # use user-defined unpack tracer class
+ import importlib
+ module_name, _, class_name = class_path.rpartition(".")
+ module = importlib.import_module(module_name)
+ class_ = getattr(module, class_name)
+ unpack_tracer = class_()
+ else:
+ # fall back to the dummy/abstract class
+ unpack_tracer = DummyUnpackTracer()
+
for url in urls:
if url not in self.ud:
try:
self.ud[url] = FetchData(url, d, localonly)
+ self.ud[url].unpack_tracer = unpack_tracer
except NonLocalMethod:
if localonly:
self.ud[url] = None
@@ -1716,6 +1831,7 @@ class Fetch(object):
network = self.d.getVar("BB_NO_NETWORK")
premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
+ checksum_missing_messages = []
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
@@ -1727,7 +1843,6 @@ class Fetch(object):
try:
self.d.setVar("BB_NO_NETWORK", network)
-
if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
done = True
elif m.try_premirror(ud, self.d):
@@ -1780,7 +1895,7 @@ class Fetch(object):
logger.debug(str(e))
firsterr = e
# Remove any incomplete fetch
- if not verified_stamp:
+ if not verified_stamp and m.cleanup_upon_failure():
m.clean(ud, self.d)
logger.debug("Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
@@ -1799,13 +1914,20 @@ class Fetch(object):
raise ChecksumError("Stale Error Detected")
except BBFetchException as e:
- if isinstance(e, ChecksumError):
+ if isinstance(e, NoChecksumError):
+ (message, _) = e.args
+ checksum_missing_messages.append(message)
+ continue
+ elif isinstance(e, ChecksumError):
logger.error("Checksum failure fetching %s" % u)
raise
finally:
if ud.lockfile:
bb.utils.unlockfile(lf)
+ if checksum_missing_messages:
+ logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
+ raise BBFetchException("There was some missing checksums in the recipe")
def checkstatus(self, urls=None):
"""
@@ -1836,7 +1958,7 @@ class Fetch(object):
ret = m.try_mirrors(self, ud, self.d, mirrors, True)
if not ret:
- raise FetchError("URL %s doesn't work" % u, u)
+ raise FetchError("URL doesn't work", u)
def unpack(self, root, urls=None):
"""
@@ -1846,6 +1968,8 @@ class Fetch(object):
if not urls:
urls = self.urls
+ unpack_tracer.start(root, self.ud, self.d)
+
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
@@ -1853,11 +1977,15 @@ class Fetch(object):
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
+ unpack_tracer.start_url(u)
ud.method.unpack(ud, root, self.d)
+ unpack_tracer.finish_url(u)
if ud.lockfile:
bb.utils.unlockfile(lf)
+ unpack_tracer.complete()
+
def clean(self, urls=None):
"""
Clean files that the fetcher gets or places
@@ -1959,6 +2087,7 @@ from . import npm
from . import npmsw
from . import az
from . import crate
+from . import gcp
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1980,3 +2109,4 @@ methods.append(npm.Npm())
methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
methods.append(crate.Crate())
+methods.append(gcp.GCP())
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
index f4ddc782a9..01d49435c3 100644
--- a/bitbake/lib/bb/fetch2/crate.py
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -33,7 +33,7 @@ class Crate(Wget):
return ud.type in ['crate']
def recommends_checksum(self, urldata):
- return False
+ return True
def urldata_init(self, ud, d):
"""
@@ -56,12 +56,14 @@ class Crate(Wget):
if len(parts) < 5:
raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
- # last field is version
- version = parts[len(parts) - 1]
+ # version is expected to be the last token
+ # but ignore possible url parameters which will be used
+ # by the top fetcher class
+ version = parts[-1].split(";")[0]
# second to last field is name
- name = parts[len(parts) - 2]
+ name = parts[-2]
# host (this is to allow custom crate registries to be specified
- host = '/'.join(parts[2:len(parts) - 2])
+ host = '/'.join(parts[2:-2])
# if using upstream just fix it up nicely
if host == 'crates.io':
@@ -69,9 +71,10 @@ class Crate(Wget):
ud.url = "https://%s/%s/%s/download" % (host, name, version)
ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
- ud.parm['name'] = name
+ if 'name' not in ud.parm:
+ ud.parm['name'] = '%s-%s' % (name, version)
- logger.debug("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
+ logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
def unpack(self, ud, rootdir, d):
"""
@@ -95,11 +98,13 @@ class Crate(Wget):
save_cwd = os.getcwd()
os.chdir(rootdir)
- pn = d.getVar('BPN')
- if pn == ud.parm.get('name'):
+ bp = d.getVar('BP')
+ if bp == ud.parm.get('name'):
cmd = "tar -xz --no-same-owner -f %s" % thefile
+ ud.unpack_tracer.unpack("crate-extract", rootdir)
else:
cargo_bitbake = self._cargo_bitbake_path(rootdir)
+ ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
new file mode 100644
index 0000000000..f40ce2eaa5
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -0,0 +1,101 @@
+"""
+BitBake 'Fetch' implementation for Google Cloup Platform Storage.
+
+Class for fetching files from Google Cloud Storage using the
+Google Cloud Storage Python Client. The GCS Python Client must
+be correctly installed, configured and authenticated prior to use.
+Additionally, gsutil must also be installed.
+
+"""
+
+# Copyright (C) 2023, Snap Inc.
+#
+# Based in part on bb.fetch2.s3:
+# Copyright (C) 2017 Andre McCurdy
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+
+class GCP(FetchMethod):
+ """
+ Class to fetch urls via GCP's Python API.
+ """
+ def __init__(self):
+ self.gcp_client = None
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with GCP.
+ """
+ return ud.type in ['gs']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+ ud.basecmd = "gsutil stat"
+
+ def get_gcp_client(self):
+ from google.cloud import storage
+ self.gcp_client = storage.Client(project=None)
+
+ def download(self, ud, d):
+ """
+ Fetch urls using the GCP API.
+ Assumes localpath was called first.
+ """
+ logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
+ runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ blob = self.gcp_client.bucket(ud.host).blob(path)
+ blob.download_to_filename(ud.localpath)
+
+ # Additional sanity checks copied from the wget class (although there
+ # are no known issues which mean these are required, treat the GCP API
+ # tool with a little healthy suspicion).
+ if not os.path.exists(ud.localpath):
+ raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
+
+ return True
+
+ def checkstatus(self, fetch, ud, d):
+ """
+ Check the status of a URL.
+ """
+ logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
+ runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
+ raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
+ else:
+ return True
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 23f8c0da8f..c7ff769fdf 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -44,13 +44,27 @@ Supported SRC_URI options are:
- nobranch
Don't check the SHA validation for branch. set this option for the recipe
- referring to commit which is valid in tag instead of branch.
+ referring to commit which is valid in any namespace (branch, tag, ...)
+ instead of branch.
The default is "0", set nobranch=1 if needed.
+- subpath
+ Limit the checkout to a specific subpath of the tree.
+ By default, checkout the whole tree, set subpath=<path> if needed
+
+- destsuffix
+ The name of the path in which to place the checkout.
+ By default, the path is git/, set destsuffix=<suffix> if needed
+
- usehead
For local git:// urls to use the current branch HEAD as the revision for use with
AUTOREV. Implies nobranch.
+- lfs
+ Enable the checkout to use LFS for large files. This will download all LFS files
+ in the download step, as the unpack step does not have network access.
+ The default is "1", set lfs=0 to skip.
+
"""
# Copyright (C) 2005 Richard Purdie
@@ -64,6 +78,7 @@ import fnmatch
import os
import re
import shlex
+import shutil
import subprocess
import tempfile
import bb
@@ -72,6 +87,7 @@ from contextlib import contextmanager
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+from bb.fetch2 import trusted_network
sha1_re = re.compile(r'^[0-9a-f]{40}$')
@@ -134,6 +150,9 @@ class Git(FetchMethod):
def supports_checksum(self, urldata):
return False
+ def cleanup_upon_failure(self):
+ return False
+
def urldata_init(self, ud, d):
"""
init git specific variable within url data
@@ -243,7 +262,7 @@ class Git(FetchMethod):
for name in ud.names:
ud.unresolvedrev[name] = 'HEAD'
- ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0 -c gc.autoDetach=false -c core.pager=cat"
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -258,7 +277,7 @@ class Git(FetchMethod):
ud.unresolvedrev[name] = ud.revisions[name]
ud.revisions[name] = self.latest_revision(ud, d, name)
- gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_'))
+ gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
if gitsrcname.startswith('.'):
gitsrcname = gitsrcname[1:]
@@ -309,7 +328,10 @@ class Git(FetchMethod):
return ud.clonedir
def need_update(self, ud, d):
- return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
+ return self.clonedir_need_update(ud, d) \
+ or self.shallow_tarball_need_update(ud) \
+ or self.tarball_need_update(ud) \
+ or self.lfs_need_update(ud, d)
def clonedir_need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
@@ -321,6 +343,15 @@ class Git(FetchMethod):
return True
return False
+ def lfs_need_update(self, ud, d):
+ if self.clonedir_need_update(ud, d):
+ return True
+
+ for name in ud.names:
+ if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir):
+ return True
+ return False
+
def clonedir_need_shallow_revs(self, ud, d):
for rev in ud.shallow_revs:
try:
@@ -340,6 +371,16 @@ class Git(FetchMethod):
# is not possible
if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
+ # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
+ # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
+ # we need to try premirrors first as using upstream is destined to fail.
+ if not trusted_network(d, ud.url):
+ return True
+ # the following check is to ensure incremental fetch in downloads, this is
+ # because the premirror might be old and does not contain the new rev required,
+ # and this will cause a total removal and new clone. So if we can reach to
+ # network, we prefer upstream over premirror, though the premirror might contain
+ # the new rev.
if os.path.exists(ud.clonedir):
return False
return True
@@ -353,17 +394,54 @@ class Git(FetchMethod):
if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
ud.localpath = ud.fullshallow
return
- elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
- bb.utils.mkdirhier(ud.clonedir)
- runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
-
+ elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
+ if not os.path.exists(ud.clonedir):
+ bb.utils.mkdirhier(ud.clonedir)
+ runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
+ else:
+ tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
+ runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
+ output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
+ if 'mirror' in output:
+ runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
+ runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
+ needs_clone = False
+ if os.path.exists(ud.clonedir):
+ # The directory may exist, but not be the top level of a bare git
+ # repository in which case it needs to be deleted and re-cloned.
+ try:
+ # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
+ output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
+ toplevel = output.rstrip()
+
+ if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
+ logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
+ needs_clone = True
+ except bb.fetch2.FetchError as e:
+ logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
+ needs_clone = True
+ except FileNotFoundError as e:
+ logger.warning("%s", e)
+ needs_clone = True
+
+ if needs_clone:
+ shutil.rmtree(ud.clonedir)
+ else:
+ needs_clone = True
+
# If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- # We do this since git will use a "-l" option automatically for local urls where possible
+ if needs_clone:
+ # We do this since git will use a "-l" option automatically for local urls where possible,
+ # but it doesn't work when git/objects is a symlink, only works when it is a directory.
if repourl.startswith("file://"):
- repourl = repourl[7:]
+ repourl_path = repourl[7:]
+ objects = os.path.join(repourl_path, 'objects')
+ if os.path.isdir(objects) and not os.path.islink(objects):
+ repourl = repourl_path
clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
@@ -377,7 +455,11 @@ class Git(FetchMethod):
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+
+ if ud.nobranch:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+ else:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -400,15 +482,14 @@ class Git(FetchMethod):
if missing_rev:
raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
- if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
+ if self.lfs_need_update(ud, d):
# Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
# of all LFS blobs needed at the srcrev.
#
# It would be nice to just do this inline here by running 'git-lfs fetch'
# on the bare clonedir, but that operation requires a working copy on some
# releases of Git LFS.
- tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
- try:
+ with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
# Do the checkout. This implicitly involves a Git LFS fetch.
Git.unpack(self, ud, tmpdir, d)
@@ -424,10 +505,8 @@ class Git(FetchMethod):
# Only do this if the unpack resulted in a .git/lfs directory being
# created; this only happens if at least one blob needed to be
# downloaded.
- if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
- runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
- finally:
- bb.utils.remove(tmpdir, recurse=True)
+ if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
+ runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
def build_mirror_data(self, ud, d):
@@ -465,7 +544,7 @@ class Git(FetchMethod):
logger.info("Creating tarball of git repository")
with create_atomic(ud.fullmirror) as tfile:
- mtime = runfetchcmd("git log --all -1 --format=%cD", d,
+ mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
quiet=True, workdir=ud.clonedir)
runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
% (tfile, mtime), d, workdir=ud.clonedir)
@@ -553,6 +632,8 @@ class Git(FetchMethod):
destdir = ud.destdir = os.path.join(destdir, destsuffix)
if os.path.exists(destdir):
bb.utils.prunedir(destdir)
+ if not ud.bareclone:
+ ud.unpack_tracer.unpack("git", destdir)
need_lfs = self._need_lfs(ud)
@@ -562,13 +643,12 @@ class Git(FetchMethod):
source_found = False
source_error = []
- if not source_found:
- clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
- if clonedir_is_up_to_date:
- runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
- source_found = True
- else:
- source_error.append("clone directory not available or not up to date: " + ud.clonedir)
+ clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
+ if clonedir_is_up_to_date:
+ runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+ source_found = True
+ else:
+ source_error.append("clone directory not available or not up to date: " + ud.clonedir)
if not source_found:
if ud.shallow:
@@ -592,6 +672,8 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
elif not need_lfs:
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
+ else:
+ runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
if not ud.nocheckout:
if subpath:
@@ -643,6 +725,35 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0"
+ def _lfs_objects_downloaded(self, ud, d, name, wd):
+ """
+ Verifies whether the LFS objects for requested revisions have already been downloaded
+ """
+ # Bail out early if this repository doesn't use LFS
+ if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd):
+ return True
+
+ # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
+ # existence.
+ # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
+ cmd = "%s lfs ls-files -l %s" \
+ % (ud.basecmd, ud.revisions[name])
+ output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
+ # Do not do any further matching if no objects are managed by LFS
+ if not output:
+ return True
+
+ # Match all lines beginning with the hexadecimal OID
+ oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
+ for line in output.split("\n"):
+ oid = re.search(oid_regex, line)
+ if not oid:
+ bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
+ if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
+ return False
+
+ return True
+
def _need_lfs(self, ud):
return ud.parm.get("lfs", "1") == "1"
@@ -651,13 +762,11 @@ class Git(FetchMethod):
Check if the repository has 'lfs' (large file) content
"""
- if not ud.nobranch:
- branchname = ud.branches[ud.names[0]]
- else:
- branchname = "master"
-
- # The bare clonedir doesn't use the remote names; it has the branch immediately.
- if wd == ud.clonedir:
+ if ud.nobranch:
+ # If no branch is specified, use the current git commit
+ refname = self._build_revision(ud, d, ud.names[0])
+ elif wd == ud.clonedir:
+ # The bare clonedir doesn't use the remote names; it has the branch immediately.
refname = ud.branches[ud.names[0]]
else:
refname = "origin/%s" % ud.branches[ud.names[0]]
@@ -732,11 +841,11 @@ class Git(FetchMethod):
"""
Compute the HEAD revision for the url
"""
- if not d.getVar("__BBSEENSRCREV"):
- raise bb.fetch2.FetchError("Recipe uses a floating tag/branch without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE).")
+ if not d.getVar("__BBSRCREV_SEEN"):
+ raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path))
# Ensure we mark as not cached
- bb.fetch2.get_autorev(d)
+ bb.fetch2.mark_recipe_nocache(d)
output = self._lsremote(ud, d, "")
# Tags of the form ^{} may not work, need to fallback to other form
@@ -762,38 +871,42 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
bb.note("Could not list remote: %s" % str(e))
return pupver
+ rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
+ pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
+ nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
+
verstring = ""
- revision = ""
for line in output.split("\n"):
if not line:
break
- tag_head = line.split("/")[-1]
+ m = rev_tag_re.match(line)
+ if not m:
+ continue
+
+ (revision, tag) = m.groups()
+
# Ignore non-released branches
- m = re.search(r"(alpha|beta|rc|final)+", tag_head)
- if m:
+ if nonrel_re.search(tag):
continue
# search for version in the line
- tag = tagregex.search(tag_head)
- if tag is None:
+ m = pver_re.search(tag)
+ if not m:
continue
- tag = tag.group('pver')
- tag = tag.replace("_", ".")
+ pver = m.group('pver').replace("_", ".")
- if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
continue
- verstring = tag
- revision = line.split()[0]
+ verstring = pver
pupver = (verstring, revision)
return pupver
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index c5c23d5260..f7f3af7212 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -88,9 +88,9 @@ class GitSM(Git):
subrevision[m] = module_hash.split()[2]
# Convert relative to absolute uri based on parent uri
- if uris[m].startswith('..'):
+ if uris[m].startswith('..') or uris[m].startswith('./'):
newud = copy.copy(ud)
- newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
+ newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
uris[m] = Git._get_repo_url(self, newud)
for module in submodules:
@@ -115,10 +115,21 @@ class GitSM(Git):
# This has to be a file reference
proto = "file"
url = "gitsm://" + uris[module]
+ if url.endswith("{}{}".format(ud.host, ud.path)):
+ raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \
+ "Consider using git fetcher instead.")
url += ';protocol=%s' % proto
url += ";name=%s" % module
url += ";subpath=%s" % module
+ url += ";nobranch=1"
+ url += ";lfs=%s" % self._need_lfs(ud)
+ # Note that adding "user=" here to give credentials to the
+ # submodule is not supported. Since using SRC_URI to give git://
+ # URL a password is not supported, one have to use one of the
+ # recommended way (eg. ~/.netrc or SSH config) which does specify
+ # the user (See comment in git.py).
+ # So, we will not take patches adding "user=" support here.
ld = d.createCopy()
# Not necessary to set SRC_URI, since we're passing the URI to
@@ -207,6 +218,10 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
+ # modpath is needed by unpack tracer to calculate submodule
+ # checkout dir
+ new_ud = newfetch.ud[url]
+ new_ud.modpath = modpath
newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
except Exception as e:
logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
@@ -232,10 +247,12 @@ class GitSM(Git):
ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
if not ud.bareclone and ret:
- # All submodules should already be downloaded and configured in the tree. This simply sets
- # up the configuration and checks out the files. The main project config should remain
- # unmodified, and no download from the internet should occur.
- runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+ # All submodules should already be downloaded and configured in the tree. This simply
+ # sets up the configuration and checks out the files. The main project config should
+ # remain unmodified, and no download from the internet should occur. As such, lfs smudge
+ # should also be skipped as these files were already smudged in the fetch stage if lfs
+ # was enabled.
+ runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def implicit_urldata(self, ud, d):
import shutil, subprocess, tempfile
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 063e13008a..cbff8c490c 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@ class Hg(FetchMethod):
revflag = "-r %s" % ud.revision
subdir = ud.parm.get("destsuffix", ud.module)
codir = "%s/%s" % (destdir, subdir)
+ ud.unpack_tracer.unpack("hg", codir)
scmdata = ud.parm.get("scmdata", "")
if scmdata != "nokeep":
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index e7d1c8c58f..7d7668110e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -41,9 +41,9 @@ class Local(FetchMethod):
"""
Return the local filename of a given url assuming a successful fetch.
"""
- return self.localpaths(urldata, d)[-1]
+ return self.localfile_searchpaths(urldata, d)[-1]
- def localpaths(self, urldata, d):
+ def localfile_searchpaths(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
"""
@@ -51,18 +51,14 @@ class Local(FetchMethod):
path = urldata.decodedurl
newpath = path
if path[0] == "/":
+ logger.debug2("Using absolute %s" % (path))
return [path]
filespath = d.getVar('FILESPATH')
if filespath:
logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
+ logger.debug2("Using %s for %s" % (newpath, path))
searched.extend(hist)
- if not os.path.exists(newpath):
- dldirfile = os.path.join(d.getVar("DL_DIR"), path)
- logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
- bb.utils.mkdirhier(os.path.dirname(dldirfile))
- searched.append(dldirfile)
- return searched
return searched
def need_update(self, ud, d):
@@ -78,9 +74,7 @@ class Local(FetchMethod):
filespath = d.getVar('FILESPATH')
if filespath:
locations = filespath.split(":")
- locations.append(d.getVar("DL_DIR"))
-
- msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
+ msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations)
raise FetchError(msg)
return True
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 8f7c10ac9b..15f3f19bc8 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -44,9 +44,12 @@ def npm_package(package):
"""Convert the npm package name to remove unsupported character"""
# Scoped package names (with the @) use the same naming convention
# as the 'npm pack' command.
- if package.startswith("@"):
- return re.sub("/", "-", package[1:])
- return package
+ name = re.sub("/", "-", package)
+ name = name.lower()
+ name = re.sub(r"[^\-a-z0-9]", "", name)
+ name = name.strip("-")
+ return name
+
def npm_filename(package, version):
"""Get the filename of a npm package"""
@@ -103,6 +106,7 @@ class NpmEnvironment(object):
"""Run npm command in a controlled environment"""
with tempfile.TemporaryDirectory() as tmpdir:
d = bb.data.createCopy(self.d)
+ d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching
d.setVar("HOME", tmpdir)
if not workdir:
@@ -156,7 +160,7 @@ class Npm(FetchMethod):
raise ParameterError("Invalid 'version' parameter", ud.url)
# Extract the 'registry' part of the url
- ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0])
+ ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0])
# Using the 'downloadfilename' parameter as local filename
# or the npm package name.
@@ -294,6 +298,7 @@ class Npm(FetchMethod):
destsuffix = ud.parm.get("destsuffix", "npm")
destdir = os.path.join(rootdir, destsuffix)
npm_unpack(ud.localpath, destdir, d)
+ ud.unpack_tracer.unpack("npm", destdir)
def clean(self, ud, d):
"""Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index a8c4d3528f..ff5f8dc755 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -41,8 +41,9 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
with:
name = the package name (string)
params = the package parameters (dictionary)
- deptree = the package dependency tree (array of strings)
+ destdir = the destination of the package (string)
"""
+ # For handling old style dependencies entries in shinkwrap files
def _walk_deps(deps, deptree):
for name in deps:
subtree = [*deptree, name]
@@ -52,9 +53,22 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
continue
elif deps[name].get("bundled", False):
continue
- callback(name, deps[name], subtree)
-
- _walk_deps(shrinkwrap.get("dependencies", {}), [])
+ destsubdirs = [os.path.join("node_modules", dep) for dep in subtree]
+ destsuffix = os.path.join(*destsubdirs)
+ callback(name, deps[name], destsuffix)
+
+ # packages entry means new style shrinkwrap file, else use dependencies
+ packages = shrinkwrap.get("packages", None)
+ if packages is not None:
+ for package in packages:
+ if package != "":
+ name = package.split('node_modules/')[-1]
+ package_infos = packages.get(package, {})
+ if dev == False and package_infos.get("dev", False):
+ continue
+ callback(name, package_infos, package)
+ else:
+ _walk_deps(shrinkwrap.get("dependencies", {}), [])
class NpmShrinkWrap(FetchMethod):
"""Class to fetch all package from a shrinkwrap file"""
@@ -75,12 +89,10 @@ class NpmShrinkWrap(FetchMethod):
# Resolve the dependencies
ud.deps = []
- def _resolve_dependency(name, params, deptree):
+ def _resolve_dependency(name, params, destsuffix):
url = None
localpath = None
extrapaths = []
- destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
- destsuffix = os.path.join(*destsubdirs)
unpack = True
integrity = params.get("integrity", None)
@@ -129,10 +141,28 @@ class NpmShrinkWrap(FetchMethod):
localpath = os.path.join(d.getVar("DL_DIR"), localfile)
+ # Handle local tarball and link sources
+ elif version.startswith("file"):
+ localpath = version[5:]
+ if not version.endswith(".tgz"):
+ unpack = False
+
# Handle git sources
- elif version.startswith("git"):
+ elif version.startswith(("git", "bitbucket","gist")) or (
+ not version.endswith((".tgz", ".tar", ".tar.gz"))
+ and not version.startswith((".", "@", "/"))
+ and "/" in version
+ ):
if version.startswith("github:"):
version = "git+https://github.com/" + version[len("github:"):]
+ elif version.startswith("gist:"):
+ version = "git+https://gist.github.com/" + version[len("gist:"):]
+ elif version.startswith("bitbucket:"):
+ version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
+ elif version.startswith("gitlab:"):
+ version = "git+https://gitlab.com/" + version[len("gitlab:"):]
+ elif not version.startswith(("git+","git:")):
+ version = "git+https://github.com/" + version
regex = re.compile(r"""
^
git\+
@@ -158,16 +188,12 @@ class NpmShrinkWrap(FetchMethod):
url = str(uri)
- # Handle local tarball and link sources
- elif version.startswith("file"):
- localpath = version[5:]
- if not version.endswith(".tgz"):
- unpack = False
-
else:
raise ParameterError("Unsupported dependency: %s" % name, ud.url)
+ # name is needed by unpack tracer for module mapping
ud.deps.append({
+ "name": name,
"url": url,
"localpath": localpath,
"extrapaths": extrapaths,
@@ -193,19 +219,23 @@ class NpmShrinkWrap(FetchMethod):
# This fetcher resolves multiple URIs from a shrinkwrap file and then
# forwards it to a proxy fetcher. The management of the donestamp file,
# the lockfile and the checksums are forwarded to the proxy fetcher.
- ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data)
+ shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
+ if shrinkwrap_urls:
+ ud.proxy = Fetch(shrinkwrap_urls, data)
ud.needdonestamp = False
@staticmethod
def _foreach_proxy_method(ud, handle):
returns = []
- for proxy_url in ud.proxy.urls:
- proxy_ud = ud.proxy.ud[proxy_url]
- proxy_d = ud.proxy.d
- proxy_ud.setup_localpath(proxy_d)
- lf = lockfile(proxy_ud.lockfile)
- returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
- unlockfile(lf)
+ #Check if there are dependencies before try to fetch them
+ if len(ud.deps) > 0:
+ for proxy_url in ud.proxy.urls:
+ proxy_ud = ud.proxy.ud[proxy_url]
+ proxy_d = ud.proxy.d
+ proxy_ud.setup_localpath(proxy_d)
+ lf = lockfile(proxy_ud.lockfile)
+ returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
+ unlockfile(lf)
return returns
def verify_donestamp(self, ud, d):
@@ -242,6 +272,7 @@ class NpmShrinkWrap(FetchMethod):
destsuffix = ud.parm.get("destsuffix")
if destsuffix:
destdir = os.path.join(rootdir, destsuffix)
+ ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
bb.utils.mkdirhier(destdir)
bb.utils.copyfile(ud.shrinkwrap_file,
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index dd02f03780..495ac8a30a 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
"""
@@ -90,7 +92,7 @@ class Osc(FetchMethod):
api_source_cmd = self._buildosccommand(ud, d, "api_source")
output = runfetchcmd(api_source_cmd, d)
- match = re.match('<directory ?.* rev="(\d+)".*>', output)
+ match = re.match(r'<directory ?.* rev="(\d+)".*>', output)
if match is None:
raise FetchError("Unable to parse osc response", ud.url)
return match.groups()[0]
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index f87f292e5d..7884cce949 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -103,7 +103,7 @@ class SFTP(FetchMethod):
if path[:3] == '/~/':
path = path[3:]
- remote = '%s%s:%s' % (user, urlo.hostname, path)
+ remote = '"%s%s:%s"' % (user, urlo.hostname, path)
cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 8d082b38c1..0cbb2a6f25 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -150,8 +150,6 @@ class SSH(FetchMethod):
)
check_network_access(d, cmd, urldata.url)
+ runfetchcmd(cmd, d)
- if runfetchcmd(cmd, d):
- return True
-
- return False
+ return True
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index b3a3de571a..fbfa6938ac 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
-from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -88,7 +87,10 @@ class Wget(FetchMethod):
if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
- self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp"
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30"
+
+ if ud.type == 'ftp' or ud.type == 'ftps':
+ self.basecmd += " --passive-ftp"
if not self.check_certs(d):
self.basecmd += " --no-check-certificate"
@@ -106,10 +108,9 @@ class Wget(FetchMethod):
fetchcmd = self.basecmd
- if 'downloadfilename' in ud.parm:
- localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
- bb.utils.mkdirhier(os.path.dirname(localpath))
- fetchcmd += " -O %s" % shlex.quote(localpath)
+ localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
+ bb.utils.mkdirhier(os.path.dirname(localpath))
+ fetchcmd += " -O %s" % shlex.quote(localpath)
if ud.user and ud.pswd:
fetchcmd += " --auth-no-challenge"
@@ -133,6 +134,15 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, False)
+ # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
+ # original file, which might be a race (imagine two recipes referencing the same
+ # source, one with an incorrect checksum)
+ bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
+
+ # Remove the ".tmp" and move the file into position atomically
+ # Our lock prevents multiple writers but mirroring code may grab incomplete files
+ os.rename(localpath, localpath[:-4])
+
# Sanity check since wget can pretend it succeed when it didn't
# Also, this used to happen if sourceforge sent us to the mirror page
if not os.path.exists(ud.localpath):
@@ -333,7 +343,8 @@ class Wget(FetchMethod):
opener = urllib.request.build_opener(*handlers)
try:
- uri = ud.url.split(";")[0]
+ uri_base = ud.url.split(";")[0]
+ uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path)
r = urllib.request.Request(uri)
r.get_method = lambda: "HEAD"
# Some servers (FusionForge, as used on Alioth) require that the
@@ -352,29 +363,22 @@ class Wget(FetchMethod):
try:
import netrc
- n = netrc.netrc()
- login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
- add_basic_auth("%s:%s" % (login, password), r)
- except (TypeError, ImportError, IOError, netrc.NetrcParseError):
+ auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
+ if auth_data:
+ login, _, password = auth_data
+ add_basic_auth("%s:%s" % (login, password), r)
+ except (FileNotFoundError, netrc.NetrcParseError):
pass
with opener.open(r, timeout=30) as response:
pass
- except urllib.error.URLError as e:
- if try_again:
- logger.debug2("checkstatus: trying again")
- return self.checkstatus(fetch, ud, d, False)
- else:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
- return False
- except ConnectionResetError as e:
+ except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
if try_again:
logger.debug2("checkstatus: trying again")
return self.checkstatus(fetch, ud, d, False)
else:
# debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
+ logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
return False
return True
@@ -636,10 +640,10 @@ class Wget(FetchMethod):
# search for version matches on folders inside the path, like:
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
- m = dirver_regex.search(path)
+ m = dirver_regex.findall(path)
if m:
pn = d.getVar('PN')
- dirver = m.group('dirver')
+ dirver = m[-1][0]
dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
if not dirver_pn_regex.search(dirver):
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
index 93eda3632e..bca8ebfa09 100755
--- a/bitbake/lib/bb/main.py
+++ b/bitbake/lib/bb/main.py
@@ -12,11 +12,12 @@
import os
import sys
import logging
-import optparse
+import argparse
import warnings
import fcntl
import time
import traceback
+import datetime
import bb
from bb import event
@@ -43,18 +44,18 @@ def present_options(optionlist):
else:
return optionlist[0]
-class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
- def format_option(self, option):
+class BitbakeHelpFormatter(argparse.HelpFormatter):
+ def _get_help_string(self, action):
# We need to do this here rather than in the text we supply to
# add_option() because we don't want to call list_extension_modules()
# on every execution (since it imports all of the modules)
# Note also that we modify option.help rather than the returned text
# - this is so that we don't have to re-format the text ourselves
- if option.dest == 'ui':
+ if action.dest == 'ui':
valid_uis = list_extension_modules(bb.ui, 'main')
- option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
+ return action.help.replace('@CHOICES@', present_options(valid_uis))
- return optparse.IndentedHelpFormatter.format_option(self, option)
+ return action.help
def list_extension_modules(pkg, checkattr):
"""
@@ -114,180 +115,207 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
warnings.showwarning = _showwarning
def create_bitbake_parser():
- parser = optparse.OptionParser(
- formatter=BitbakeHelpFormatter(),
- version="BitBake Build Tool Core version %s" % bb.__version__,
- usage="""%prog [options] [recipename/target recipe:do_task ...]
-
- Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
- It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
- will provide the layer, BBFILES and other configuration information.""")
-
- parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
- help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
- "not handle any dependencies from other recipes.")
-
- parser.add_option("-k", "--continue", action="store_false", dest="halt", default=True,
- help="Continue as much as possible after an error. While the target that "
- "failed and anything depending on it cannot be built, as much as "
- "possible will be built before stopping.")
-
- parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
- help="Force the specified targets/task to run (invalidating any "
- "existing stamp file).")
-
- parser.add_option("-c", "--cmd", action="store", dest="cmd",
- help="Specify the task to execute. The exact options available "
- "depend on the metadata. Some examples might be 'compile'"
- " or 'populate_sysroot' or 'listtasks' may give a list of "
- "the tasks available.")
-
- parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
- help="Invalidate the stamp for the specified task such as 'compile' "
- "and then run the default task for the specified target(s).")
-
- parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
- help="Read the specified file before bitbake.conf.")
-
- parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
- help="Read the specified file after bitbake.conf.")
-
- parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
- help="Enable tracing of shell tasks (with 'set -x'). "
- "Also print bb.note(...) messages to stdout (in "
- "addition to writing them to ${T}/log.do_<task>).")
-
- parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
- help="Increase the debug level. You can specify this "
- "more than once. -D sets the debug level to 1, "
- "where only bb.debug(1, ...) messages are printed "
- "to stdout; -DD sets the debug level to 2, where "
- "both bb.debug(1, ...) and bb.debug(2, ...) "
- "messages are printed; etc. Without -D, no debug "
- "messages are printed. Note that -D only affects "
- "output to stdout. All debug messages are written "
- "to ${T}/log.do_taskname, regardless of the debug "
- "level.")
-
- parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
- help="Output less log message data to the terminal. You can specify this more than once.")
-
- parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
- help="Don't execute, just go through the motions.")
-
- parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
- default=[], metavar="SIGNATURE_HANDLER",
- help="Dump out the signature construction information, with no task "
- "execution. The SIGNATURE_HANDLER parameter is passed to the "
- "handler. Two common values are none and printdiff but the handler "
- "may define more/less. none means only dump the signature, printdiff"
- " means compare the dumped signature with the cached one.")
-
- parser.add_option("-p", "--parse-only", action="store_true",
- dest="parse_only", default=False,
- help="Quit after parsing the BB recipes.")
-
- parser.add_option("-s", "--show-versions", action="store_true",
- dest="show_versions", default=False,
- help="Show current and preferred versions of all recipes.")
-
- parser.add_option("-e", "--environment", action="store_true",
- dest="show_environment", default=False,
- help="Show the global or per-recipe environment complete with information"
- " about where variables were set/changed.")
-
- parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
- help="Save dependency tree information for the specified "
- "targets in the dot syntax.")
-
- parser.add_option("-I", "--ignore-deps", action="append",
- dest="extra_assume_provided", default=[],
- help="Assume these dependencies don't exist and are already provided "
- "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
- "graphs more appealing")
-
- parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
- help="Show debug logging for the specified logging domains")
-
- parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
- help="Profile the command and save reports.")
+ parser = argparse.ArgumentParser(
+ description="""\
+ It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
+ will provide the layer, BBFILES and other configuration information.
+ """,
+ formatter_class=BitbakeHelpFormatter,
+ allow_abbrev=False,
+ add_help=False, # help is manually added below in a specific argument group
+ )
+
+ general_group = parser.add_argument_group('General options')
+ task_group = parser.add_argument_group('Task control options')
+ exec_group = parser.add_argument_group('Execution control options')
+ logging_group = parser.add_argument_group('Logging/output control options')
+ server_group = parser.add_argument_group('Server options')
+ config_group = parser.add_argument_group('Configuration options')
+
+ general_group.add_argument("targets", nargs="*", metavar="recipename/target",
+ help="Execute the specified task (default is 'build') for these target "
+ "recipes (.bb files).")
+
+ general_group.add_argument("-s", "--show-versions", action="store_true",
+ help="Show current and preferred versions of all recipes.")
+
+ general_group.add_argument("-e", "--environment", action="store_true",
+ dest="show_environment",
+ help="Show the global or per-recipe environment complete with information"
+ " about where variables were set/changed.")
+
+ general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph",
+ help="Save dependency tree information for the specified "
+ "targets in the dot syntax.")
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-u", "--ui", action="store", dest="ui",
- default=os.environ.get('BITBAKE_UI', 'knotty'),
- help="The user interface to use (@CHOICES@ - default %default).")
-
- parser.add_option("", "--token", action="store", dest="xmlrpctoken",
- default=os.environ.get("BBTOKEN"),
- help="Specify the connection token to be used when connecting "
- "to a remote server.")
-
- parser.add_option("", "--revisions-changed", action="store_true",
- dest="revisions_changed", default=False,
- help="Set the exit code depending on whether upstream floating "
- "revisions have changed or not.")
-
- parser.add_option("", "--server-only", action="store_true",
- dest="server_only", default=False,
- help="Run bitbake without a UI, only starting a server "
- "(cooker) process.")
-
- parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
- help="The name/address for the bitbake xmlrpc server to bind to.")
-
- parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout",
- default=os.getenv("BB_SERVER_TIMEOUT"),
- help="Set timeout to unload bitbake server due to inactivity, "
- "set to -1 means no unload, "
- "default: Environment variable BB_SERVER_TIMEOUT.")
-
- parser.add_option("", "--no-setscene", action="store_true",
- dest="nosetscene", default=False,
- help="Do not run any setscene tasks. sstate will be ignored and "
- "everything needed, built.")
-
- parser.add_option("", "--skip-setscene", action="store_true",
- dest="skipsetscene", default=False,
- help="Skip setscene tasks if they would be executed. Tasks previously "
- "restored from sstate will be kept, unlike --no-setscene")
-
- parser.add_option("", "--setscene-only", action="store_true",
- dest="setsceneonly", default=False,
- help="Only run setscene tasks, don't run any real tasks.")
-
- parser.add_option("", "--remote-server", action="store", dest="remote_server",
- default=os.environ.get("BBSERVER"),
- help="Connect to the specified server.")
-
- parser.add_option("-m", "--kill-server", action="store_true",
- dest="kill_server", default=False,
- help="Terminate any running bitbake server.")
-
- parser.add_option("", "--observe-only", action="store_true",
- dest="observe_only", default=False,
- help="Connect to a server as an observing-only client.")
-
- parser.add_option("", "--status-only", action="store_true",
- dest="status_only", default=False,
- help="Check the status of the remote bitbake server.")
-
- parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
- default=os.environ.get("BBEVENTLOG"),
- help="Writes the event log of the build to a bitbake event json file. "
- "Use '' (empty string) to assign the name automatically.")
-
- parser.add_option("", "--runall", action="append", dest="runall",
- help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).")
-
- parser.add_option("", "--runonly", action="append", dest="runonly",
- help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).")
+ general_group.add_argument("-u", "--ui",
+ default=os.environ.get('BITBAKE_UI', 'knotty'),
+ help="The user interface to use (@CHOICES@ - default %(default)s).")
+
+ general_group.add_argument("--version", action="store_true",
+ help="Show programs version and exit.")
+
+ general_group.add_argument('-h', '--help', action='help',
+ help='Show this help message and exit.')
+
+
+ task_group.add_argument("-f", "--force", action="store_true",
+ help="Force the specified targets/task to run (invalidating any "
+ "existing stamp file).")
+
+ task_group.add_argument("-c", "--cmd",
+ help="Specify the task to execute. The exact options available "
+ "depend on the metadata. Some examples might be 'compile'"
+ " or 'populate_sysroot' or 'listtasks' may give a list of "
+ "the tasks available.")
+
+ task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp",
+ help="Invalidate the stamp for the specified task such as 'compile' "
+ "and then run the default task for the specified target(s).")
+
+ task_group.add_argument("--runall", action="append", default=[],
+ help="Run the specified task for any recipe in the taskgraph of the "
+ "specified target (even if it wouldn't otherwise have run).")
+
+ task_group.add_argument("--runonly", action="append",
+ help="Run only the specified task within the taskgraph of the "
+ "specified targets (and any task dependencies those tasks may have).")
+
+ task_group.add_argument("--no-setscene", action="store_true",
+ dest="nosetscene",
+ help="Do not run any setscene tasks. sstate will be ignored and "
+ "everything needed, built.")
+
+ task_group.add_argument("--skip-setscene", action="store_true",
+ dest="skipsetscene",
+ help="Skip setscene tasks if they would be executed. Tasks previously "
+ "restored from sstate will be kept, unlike --no-setscene.")
+
+ task_group.add_argument("--setscene-only", action="store_true",
+ dest="setsceneonly",
+ help="Only run setscene tasks, don't run any real tasks.")
+
+
+ exec_group.add_argument("-n", "--dry-run", action="store_true",
+ help="Don't execute, just go through the motions.")
+
+ exec_group.add_argument("-p", "--parse-only", action="store_true",
+ help="Quit after parsing the BB recipes.")
+
+ exec_group.add_argument("-k", "--continue", action="store_false", dest="halt",
+ help="Continue as much as possible after an error. While the target that "
+ "failed and anything depending on it cannot be built, as much as "
+ "possible will be built before stopping.")
+
+ exec_group.add_argument("-P", "--profile", action="store_true",
+ help="Profile the command and save reports.")
+
+ exec_group.add_argument("-S", "--dump-signatures", action="append",
+ default=[], metavar="SIGNATURE_HANDLER",
+ help="Dump out the signature construction information, with no task "
+ "execution. The SIGNATURE_HANDLER parameter is passed to the "
+ "handler. Two common values are none and printdiff but the handler "
+ "may define more/less. none means only dump the signature, printdiff"
+ " means recursively compare the dumped signature with the most recent"
+ " one in a local build or sstate cache (can be used to find out why tasks re-run"
+ " when that is not expected)")
+
+ exec_group.add_argument("--revisions-changed", action="store_true",
+ help="Set the exit code depending on whether upstream floating "
+ "revisions have changed or not.")
+
+ exec_group.add_argument("-b", "--buildfile",
+ help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
+ "not handle any dependencies from other recipes.")
+
+ logging_group.add_argument("-D", "--debug", action="count", default=0,
+ help="Increase the debug level. You can specify this "
+ "more than once. -D sets the debug level to 1, "
+ "where only bb.debug(1, ...) messages are printed "
+ "to stdout; -DD sets the debug level to 2, where "
+ "both bb.debug(1, ...) and bb.debug(2, ...) "
+ "messages are printed; etc. Without -D, no debug "
+ "messages are printed. Note that -D only affects "
+ "output to stdout. All debug messages are written "
+ "to ${T}/log.do_taskname, regardless of the debug "
+ "level.")
+
+ logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains",
+ default=[],
+ help="Show debug logging for the specified logging domains.")
+
+ logging_group.add_argument("-v", "--verbose", action="store_true",
+ help="Enable tracing of shell tasks (with 'set -x'). "
+ "Also print bb.note(...) messages to stdout (in "
+ "addition to writing them to ${T}/log.do_<task>).")
+
+ logging_group.add_argument("-q", "--quiet", action="count", default=0,
+ help="Output less log message data to the terminal. You can specify this "
+ "more than once.")
+
+ logging_group.add_argument("-w", "--write-log", dest="writeeventlog",
+ default=os.environ.get("BBEVENTLOG"),
+ help="Writes the event log of the build to a bitbake event json file. "
+ "Use '' (empty string) to assign the name automatically.")
+
+
+ server_group.add_argument("-B", "--bind", default=False,
+ help="The name/address for the bitbake xmlrpc server to bind to.")
+
+ server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout",
+ default=os.getenv("BB_SERVER_TIMEOUT"),
+ help="Set timeout to unload bitbake server due to inactivity, "
+ "set to -1 means no unload, "
+ "default: Environment variable BB_SERVER_TIMEOUT.")
+
+ server_group.add_argument("--remote-server",
+ default=os.environ.get("BBSERVER"),
+ help="Connect to the specified server.")
+
+ server_group.add_argument("-m", "--kill-server", action="store_true",
+ help="Terminate any running bitbake server.")
+
+ server_group.add_argument("--token", dest="xmlrpctoken",
+ default=os.environ.get("BBTOKEN"),
+ help="Specify the connection token to be used when connecting "
+ "to a remote server.")
+
+ server_group.add_argument("--observe-only", action="store_true",
+ help="Connect to a server as an observing-only client.")
+
+ server_group.add_argument("--status-only", action="store_true",
+ help="Check the status of the remote bitbake server.")
+
+ server_group.add_argument("--server-only", action="store_true",
+ help="Run bitbake without a UI, only starting a server "
+ "(cooker) process.")
+
+
+ config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[],
+ help="Read the specified file before bitbake.conf.")
+
+ config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[],
+ help="Read the specified file after bitbake.conf.")
+
+
+ config_group.add_argument("-I", "--ignore-deps", action="append",
+ dest="extra_assume_provided", default=[],
+ help="Assume these dependencies don't exist and are already provided "
+ "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
+ "graphs more appealing.")
+
return parser
class BitBakeConfigParameters(cookerdata.ConfigParameters):
def parseCommandLine(self, argv=sys.argv):
parser = create_bitbake_parser()
- options, targets = parser.parse_args(argv)
+ options = parser.parse_intermixed_args(argv[1:])
+
+ if options.version:
+ print("BitBake Build Tool Core version %s" % bb.__version__)
+ sys.exit(0)
if options.quiet and options.verbose:
parser.error("options --quiet and --verbose are mutually exclusive")
@@ -319,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
else:
options.xmlrpcinterface = (None, 0)
- return options, targets[1:]
+ return options, options.targets
def bitbake_main(configParams, configuration):
@@ -384,6 +412,9 @@ def bitbake_main(configParams, configuration):
return 1
+def timestamp():
+ return datetime.datetime.now().strftime('%H:%M:%S.%f')
+
def setup_bitbake(configParams, extrafeatures=None):
# Ensure logging messages get sent to the UI as events
handler = bb.event.LogHandler()
@@ -391,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None):
# In status only mode there are no logs and no UI
logger.addHandler(handler)
+ if configParams.dump_signatures:
+ if extrafeatures is None:
+ extrafeatures = []
+ extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
+
if configParams.server_only:
featureset = []
ui_module = None
@@ -418,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None):
retries = 8
while retries:
try:
- topdir, lock = lockBitbake()
+ topdir, lock, lockfile = lockBitbake()
sockname = topdir + "/bitbake.sock"
if lock:
if configParams.status_only or configParams.kill_server:
@@ -429,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None):
logger.info("Starting bitbake server...")
# Clear the event queue since we already displayed messages
bb.event.ui_queue = []
- server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface)
+ server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile)
else:
logger.info("Reconnecting to bitbake server...")
if not os.path.exists(sockname):
- logger.info("Previous bitbake instance shutting down?, waiting to retry...")
+ logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp())
+ procs = bb.server.process.get_lockfile_process_msg(lockfile)
+ if procs:
+ logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs))
+ logger.info("Directory listing: %s" % (str(os.listdir(topdir))))
i = 0
lock = None
# Wait for 5s or until we can get the lock
while not lock and i < 50:
time.sleep(0.1)
- _, lock = lockBitbake()
+ _, lock, _ = lockBitbake()
i += 1
if lock:
bb.utils.unlockfile(lock)
@@ -459,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None):
retries -= 1
tryno = 8 - retries
if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)):
- logger.info("Retrying server connection (#%d)..." % tryno)
+ logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp()))
else:
- logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
+ logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp()))
if not retries:
bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
@@ -490,5 +530,5 @@ def lockBitbake():
bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
raise BBMainFatal
lockfile = topdir + "/bitbake.lock"
- return topdir, bb.utils.lockfile(lockfile, False, False)
+ return topdir, bb.utils.lockfile(lockfile, False, False), lockfile
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index a1b910007d..f928210351 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -234,9 +234,10 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
+ # Some filesystems use dynamic inodes so can't run out.
+ # This is reported by the inode count being 0 (btrfs) or the free
+ # inode count being -1 (cephfs).
+ if st.f_files == 0 or st.f_favail == -1:
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 93575d89c4..3e18596faa 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -230,7 +230,7 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers
console = logging.StreamHandler(output)
console.addFilter(bb.msg.LogFilterShowOnce())
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
- if color == 'always' or (color == 'auto' and output.isatty()):
+ if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''):
format.enable_color()
console.setFormatter(format)
if preserve_handlers:
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index 347609513b..a4358f1374 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -60,6 +60,14 @@ def cached_mtime_noerror(f):
return 0
return __mtime_cache[f]
+def check_mtime(f, mtime):
+ try:
+ current_mtime = os.stat(f)[stat.ST_MTIME]
+ __mtime_cache[f] = current_mtime
+ except OSError:
+ current_mtime = 0
+ return current_mtime == mtime
+
def update_mtime(f):
try:
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
@@ -99,12 +107,12 @@ def supports(fn, data):
return 1
return 0
-def handle(fn, data, include = 0):
+def handle(fn, data, include=0, baseconfig=False):
"""Call the handler that is appropriate for this file"""
for h in handlers:
if h['supports'](fn, data):
with data.inchistory.include(fn):
- return h['handle'](fn, data, include)
+ return h['handle'](fn, data, include, baseconfig)
raise ParseError("not a BitBake file", fn)
def init(fn, data):
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 9e0a0f5c98..7581d003fd 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -9,6 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import sys
import bb
from bb import methodpool
from bb.parse import logger
@@ -210,10 +211,12 @@ class ExportFuncsNode(AstNode):
def eval(self, data):
+ sentinel = " # Export function set\n"
for func in self.n:
calledfunc = self.classname + "_" + func
- if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
+ basevar = data.getVar(func, False)
+ if basevar and sentinel not in basevar:
continue
if data.getVar(func, False):
@@ -230,12 +233,11 @@ class ExportFuncsNode(AstNode):
data.setVarFlag(func, "lineno", 1)
if data.getVarFlag(calledfunc, "python", False):
- data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
+ data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
else:
if "-" in self.classname:
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
- data.setVar(func, " " + calledfunc + "\n", parsing=True)
- data.setVarFlag(func, 'export_func', '1')
+ data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
class AddTaskNode(AstNode):
def __init__(self, filename, lineno, func, before, after):
@@ -269,6 +271,41 @@ class BBHandlerNode(AstNode):
data.setVarFlag(h, "handler", 1)
data.setVar('__BBHANDLERS', bbhands)
+class PyLibNode(AstNode):
+ def __init__(self, filename, lineno, libdir, namespace):
+ AstNode.__init__(self, filename, lineno)
+ self.libdir = libdir
+ self.namespace = namespace
+
+ def eval(self, data):
+ global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split()
+ for m in global_mods:
+ if m not in bb.utils._context:
+ bb.utils._context[m] = __import__(m)
+
+ libdir = data.expand(self.libdir)
+ if libdir not in sys.path:
+ sys.path.append(libdir)
+ try:
+ bb.utils._context[self.namespace] = __import__(self.namespace)
+ toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", [])
+ for i in toimport:
+ bb.utils._context[self.namespace] = __import__(self.namespace + "." + i)
+ mod = getattr(bb.utils._context[self.namespace], i)
+ fn = getattr(mod, "__file__")
+ funcs = {}
+ for f in dir(mod):
+ if f.startswith("_"):
+ continue
+ fcall = getattr(mod, f)
+ if not callable(fcall):
+ continue
+ funcs[f] = fcall
+ bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i))
+
+ except AttributeError as e:
+ bb.error("Error importing OE modules: %s" % str(e))
+
class InheritNode(AstNode):
def __init__(self, filename, lineno, classes):
AstNode.__init__(self, filename, lineno)
@@ -277,6 +314,16 @@ class InheritNode(AstNode):
def eval(self, data):
bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
+class InheritDeferredNode(AstNode):
+ def __init__(self, filename, lineno, classes):
+ AstNode.__init__(self, filename, lineno)
+ self.inherit = (classes, filename, lineno)
+
+ def eval(self, data):
+ inherits = data.getVar('__BBDEFINHERITS', False) or []
+ inherits.append(self.inherit)
+ data.setVar('__BBDEFINHERITS', inherits)
+
def handleInclude(statements, filename, lineno, m, force):
statements.append(IncludeNode(filename, lineno, m.group(1), force))
@@ -320,10 +367,17 @@ def handleDelTask(statements, filename, lineno, m):
def handleBBHandlers(statements, filename, lineno, m):
statements.append(BBHandlerNode(filename, lineno, m.group(1)))
+def handlePyLib(statements, filename, lineno, m):
+ statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2)))
+
def handleInherit(statements, filename, lineno, m):
classes = m.group(1)
statements.append(InheritNode(filename, lineno, classes))
+def handleInheritDeferred(statements, filename, lineno, m):
+ classes = m.group(1)
+ statements.append(InheritDeferredNode(filename, lineno, classes))
+
def runAnonFuncs(d):
code = []
for funcname in d.getVar("__BBANONFUNCS", False) or []:
@@ -361,6 +415,9 @@ def finalize(fn, d, variant = None):
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
+ if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"):
+ bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.")
+
bb.event.fire(bb.event.RecipeParsed(fn), d)
finally:
bb.event.set_handlers(saved_handlers)
@@ -387,6 +444,14 @@ def multi_finalize(fn, d):
logger.debug("Appending .bbappend file %s to %s", append, fn)
bb.parse.BBHandler.handle(append, d, True)
+ while True:
+ inherits = d.getVar('__BBDEFINHERITS', False) or []
+ if not inherits:
+ break
+ inherit, filename, lineno = inherits.pop(0)
+ d.setVar('__BBDEFINHERITS', inherits)
+ bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
+
onlyfinalise = d.getVar("__ONLYFINALISE", False)
safe_d = d
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index ee9bd760ce..c13e4b9755 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -21,6 +21,7 @@ from .ConfHandler import include, init
__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
+__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
__deltask_regexp__ = re.compile(r"deltask\s+(.+)")
@@ -33,6 +34,7 @@ __infunc__ = []
__inpython__ = False
__body__ = []
__classname__ = ""
+__residue__ = []
cached_statements = {}
@@ -40,31 +42,46 @@ def supports(fn, d):
"""Return True if fn has a supported extension"""
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
-def inherit(files, fn, lineno, d):
+def inherit(files, fn, lineno, d, deferred=False):
__inherit_cache = d.getVar('__inherit_cache', False) or []
+ #if "${" in files and not deferred:
+ # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
files = d.expand(files).split()
for file in files:
- if not os.path.isabs(file) and not file.endswith(".bbclass"):
- file = os.path.join('classes', '%s.bbclass' % file)
-
- if not os.path.isabs(file):
- bbpath = d.getVar("BBPATH")
- abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
- for af in attempts:
- if af != abs_fn:
- bb.parse.mark_dependency(d, af)
- if abs_fn:
- file = abs_fn
+ classtype = d.getVar("__bbclasstype", False)
+ origfile = file
+ for t in ["classes-" + classtype, "classes"]:
+ file = origfile
+ if not os.path.isabs(file) and not file.endswith(".bbclass"):
+ file = os.path.join(t, '%s.bbclass' % file)
+
+ if not os.path.isabs(file):
+ bbpath = d.getVar("BBPATH")
+ abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
+ for af in attempts:
+ if af != abs_fn:
+ bb.parse.mark_dependency(d, af)
+ if abs_fn:
+ file = abs_fn
+
+ if os.path.exists(file):
+ break
+
+ if not os.path.exists(file):
+ raise ParseError("Could not inherit file %s" % (file), fn, lineno)
if not file in __inherit_cache:
logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
__inherit_cache.append( file )
d.setVar('__inherit_cache', __inherit_cache)
- include(fn, file, lineno, d, "inherit")
+ try:
+ bb.parse.handle(file, d, True)
+ except (IOError, OSError) as exc:
+ raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno)
__inherit_cache = d.getVar('__inherit_cache', False) or []
def get_statements(filename, absolute_filename, base_name):
- global cached_statements
+ global cached_statements, __residue__, __body__
try:
return cached_statements[absolute_filename]
@@ -84,12 +101,17 @@ def get_statements(filename, absolute_filename, base_name):
# add a blank line to close out any python definition
feeder(lineno, "", filename, base_name, statements, eof=True)
+ if __residue__:
+ raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno)
+ if __body__:
+ raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno)
+
if filename.endswith(".bbclass") or filename.endswith(".inc"):
cached_statements[absolute_filename] = statements
return statements
-def handle(fn, d, include):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
+def handle(fn, d, include, baseconfig=False):
+ global __infunc__, __body__, __residue__, __classname__
__body__ = []
__infunc__ = []
__classname__ = ""
@@ -141,7 +163,7 @@ def handle(fn, d, include):
return d
def feeder(lineno, s, fn, root, statements, eof=False):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
+ global __inpython__, __infunc__, __body__, __residue__, __classname__
# Check tabs in python functions:
# - def py_funcname(): covered by __inpython__
@@ -178,10 +200,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
if s and s[0] == '#':
if len(__residue__) != 0 and __residue__[0][0] != "#":
- bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
+ bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s))
if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
+ bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__)))
if s and s[-1] == '\\':
__residue__.append(s[:-1])
@@ -252,7 +274,12 @@ def feeder(lineno, s, fn, root, statements, eof=False):
ast.handleInherit(statements, fn, lineno, m)
return
- return ConfHandler.feeder(lineno, s, fn, statements)
+ m = __inherit_def_regexp__.match(s)
+ if m:
+ ast.handleInheritDeferred(statements, fn, lineno, m)
+ return
+
+ return ConfHandler.feeder(lineno, s, fn, statements, conffile=False)
# Add us to the handlers list
from .. import handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index b895d5b5ef..7826dee7d3 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -21,7 +21,7 @@ __config_regexp__ = re.compile( r"""
^
(?P<exp>export\s+)?
(?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
- (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
+ (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])?
\s* (
(?P<colon>:=) |
@@ -45,7 +45,8 @@ __include_regexp__ = re.compile( r"include\s+(.+)" )
__require_regexp__ = re.compile( r"require\s+(.+)" )
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
-__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" )
+__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
+__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
def init(data):
return
@@ -102,12 +103,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
# We have an issue where a UI might want to enforce particular settings such as
# an empty DISTRO variable. If configuration files do something like assigning
# a weak default, it turns out to be very difficult to filter out these changes,
-# particularly when the weak default might appear half way though parsing a chain
+# particularly when the weak default might appear half way though parsing a chain
# of configuration files. We therefore let the UIs hook into configuration file
# parsing. This turns out to be a hard problem to solve any other way.
confFilters = []
-def handle(fn, data, include):
+def handle(fn, data, include, baseconfig=False):
init(data)
if include == 0:
@@ -125,21 +126,26 @@ def handle(fn, data, include):
s = f.readline()
if not s:
break
+ origlineno = lineno
+ origline = s
w = s.strip()
# skip empty lines
if not w:
continue
s = s.rstrip()
while s[-1] == '\\':
- s2 = f.readline().rstrip()
+ line = f.readline()
+ origline += line
+ s2 = line.rstrip()
lineno = lineno + 1
if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
+ bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline))
+
s = s[:-1] + s2
# skip comments
if s[0] == '#':
continue
- feeder(lineno, s, abs_fn, statements)
+ feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig)
# DONE WITH PARSING... time to evaluate
data.setVar('FILE', abs_fn)
@@ -147,14 +153,14 @@ def handle(fn, data, include):
if oldfile:
data.setVar('FILE', oldfile)
- f.close()
-
for f in confFilters:
f(fn, data)
return data
-def feeder(lineno, s, fn, statements):
+# baseconfig is set for the bblayers/layer.conf cookerdata config parsing
+# The function is also used by BBHandler, conffile would be False
+def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
m = __config_regexp__.match(s)
if m:
groupd = m.groupdict()
@@ -186,6 +192,11 @@ def feeder(lineno, s, fn, statements):
ast.handleUnsetFlag(statements, fn, lineno, m)
return
+ m = __addpylib_regexp__.match(s)
+ if baseconfig and conffile and m:
+ ast.handlePyLib(statements, fn, lineno, m)
+ return
+
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index ce84a15825..bcca791edf 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -249,4 +249,23 @@ def persist(domain, d):
bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
- return SQLTable(cachefile, domain)
+
+ try:
+ return SQLTable(cachefile, domain)
+ except sqlite3.OperationalError:
+ # Sqlite fails to open database when its path is too long.
+ # After testing, 504 is the biggest path length that can be opened by
+ # sqlite.
+ # Note: This code is called before sanity.bbclass and its path length
+ # check
+ max_len = 504
+ if len(cachefile) > max_len:
+ logger.critical("The path of the cache file is too long "
+ "({0} chars > {1}) to be opened by sqlite! "
+ "Your cache file is \"{2}\"".format(
+ len(cachefile),
+ max_len,
+ cachefile))
+ sys.exit(1)
+ else:
+ raise
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index be2c15a188..4c7b6d39df 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index f34f1568e2..bc7e18175d 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
from multiprocessing import Process
import shlex
import pprint
+import time
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
@@ -154,11 +155,82 @@ class RunQueueScheduler(object):
self.stamps = {}
for tid in self.rqdata.runtaskentries:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
if tid in self.rq.runq_buildable:
- self.buildable.append(tid)
+ self.buildable.add(tid)
self.rev_prio_map = None
+ self.is_pressure_usable()
+
+ def is_pressure_usable(self):
+ """
+ If monitoring pressure, return True if pressure files can be open and read. For example
+ openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
+ is returned.
+ """
+ if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
+ try:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+
+ self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_pressure_time = time.time()
+ self.check_pressure = True
+ except:
+ bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
+ self.check_pressure = False
+ else:
+ self.check_pressure = False
+
+ def exceeds_max_pressure(self):
+ """
+ Monitor the difference in total pressure at least once per second, if
+ BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
+ """
+ if self.check_pressure:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+ # extract "total" from /proc/pressure/{cpu|io}
+ curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ now = time.time()
+ tdiff = now - self.prev_pressure_time
+ psi_accumulation_interval = 1.0
+ cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
+ io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
+ memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
+ exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
+ exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
+ exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
+
+ if tdiff > psi_accumulation_interval:
+ self.prev_cpu_pressure = curr_cpu_pressure
+ self.prev_io_pressure = curr_io_pressure
+ self.prev_memory_pressure = curr_memory_pressure
+ self.prev_pressure_time = now
+
+ pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
+ pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
+ if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
+ bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
+ self.pressure_state = pressure_state
+ return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
+ elif self.rq.max_loadfactor:
+ limit = False
+ loadfactor = float(os.getloadavg()[0]) / os.cpu_count()
+ # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor))
+ if loadfactor > self.rq.max_loadfactor:
+ limit = True
+ if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit:
+ bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))
+ self.loadfactor_limit = limit
+ return limit
+ return False
def next_buildable_task(self):
"""
@@ -172,6 +244,12 @@ class RunQueueScheduler(object):
if not buildable:
return None
+ # Bitbake requires that at least one task be active. Only check for pressure if
+ # this is the case, otherwise the pressure limitation could result in no tasks
+ # being active and no new tasks started thereby, at times, breaking the scheduler.
+ if self.rq.stats.active and self.exceeds_max_pressure():
+ return None
+
# Filter out tasks that have a max number of threads that have been exceeded
skip_buildable = {}
for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -202,11 +280,11 @@ class RunQueueScheduler(object):
best = None
bestprio = None
for tid in buildable:
- taskname = taskname_from_tid(tid)
- if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
- continue
prio = self.rev_prio_map[tid]
if bestprio is None or bestprio > prio:
+ taskname = taskname_from_tid(tid)
+ if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
+ continue
stamp = self.stamps[tid]
if stamp in self.rq.build_stamps.values():
continue
@@ -595,8 +673,11 @@ class RunQueueData:
# Nothing to do
return 0
+ bb.parse.siggen.setup_datacache(self.dataCaches)
+
self.init_progress_reporter.start()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step A - Work out a list of tasks to run
#
@@ -642,6 +723,8 @@ class RunQueueData:
frommc = mcdependency[1]
mcdep = mcdependency[2]
deptask = mcdependency[4]
+ if mcdep not in taskData:
+ bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep))
if mc == frommc:
fn = taskData[mcdep].build_targets[pn][0]
newdep = '%s:%s' % (fn,deptask)
@@ -743,6 +826,7 @@ class RunQueueData:
#self.dump_data()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Resolve recursive 'recrdeptask' dependencies (Part B)
#
@@ -839,6 +923,7 @@ class RunQueueData:
self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
#self.dump_data()
@@ -877,7 +962,7 @@ class RunQueueData:
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
else:
logger.verbose("Invalidate task %s, %s", taskname, fn)
- bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn)
+ bb.parse.siggen.invalidate_task(taskname, taskfn)
self.target_tids = []
for (mc, target, task, fn) in self.targets:
@@ -920,6 +1005,7 @@ class RunQueueData:
mark_active(tid, 1)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step C - Prune all inactive tasks
#
@@ -928,25 +1014,32 @@ class RunQueueData:
# Handle --runall
if self.cooker.configuration.runall:
# re-run the mark_active and then drop unused tasks from new list
- reduced_tasklist = set(self.runtaskentries.keys())
- for tid in list(self.runtaskentries.keys()):
- if tid not in runq_build:
- reduced_tasklist.remove(tid)
- runq_build = {}
- for task in self.cooker.configuration.runall:
- if not task.startswith("do_"):
- task = "do_{0}".format(task)
- runall_tids = set()
- for tid in reduced_tasklist:
- wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
- if wanttid in self.runtaskentries:
- runall_tids.add(wanttid)
+ runall_tids = set()
+ added = True
+ while added:
+ reduced_tasklist = set(self.runtaskentries.keys())
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ reduced_tasklist.remove(tid)
+ runq_build = {}
- for tid in list(runall_tids):
- mark_active(tid, 1)
- if self.cooker.configuration.force:
- invalidate_task(tid, False)
+ orig = runall_tids
+ runall_tids = set()
+ for task in self.cooker.configuration.runall:
+ if not task.startswith("do_"):
+ task = "do_{0}".format(task)
+ for tid in reduced_tasklist:
+ wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
+ if wanttid in self.runtaskentries:
+ runall_tids.add(wanttid)
+
+ for tid in list(runall_tids):
+ mark_active(tid, 1)
+ self.target_tids.append(tid)
+ if self.cooker.configuration.force:
+ invalidate_task(tid, False)
+ added = runall_tids - orig
delcount = set()
for tid in list(self.runtaskentries.keys()):
@@ -959,6 +1052,7 @@ class RunQueueData:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Handle runonly
if self.cooker.configuration.runonly:
@@ -999,6 +1093,7 @@ class RunQueueData:
logger.verbose("Assign Weightings")
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Generate a list of reverse dependencies to ease future calculations
for tid in self.runtaskentries:
@@ -1006,6 +1101,7 @@ class RunQueueData:
self.runtaskentries[dep].revdeps.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
@@ -1022,12 +1118,14 @@ class RunQueueData:
logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Calculate task weights
# Check of higher length circular dependencies
self.runq_weight = self.calculate_task_weights(endpoints)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Sanity Check - Check for multiple tasks building the same provider
for mc in self.dataCaches:
@@ -1128,6 +1226,7 @@ class RunQueueData:
self.init_progress_reporter.next_stage()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Iterate over the task list looking for tasks with a 'setscene' function
self.runq_setscene_tids = set()
@@ -1140,6 +1239,7 @@ class RunQueueData:
self.runq_setscene_tids.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Invalidate task if force mode active
if self.cooker.configuration.force:
@@ -1156,6 +1256,7 @@ class RunQueueData:
invalidate_task(fn + ":" + st, True)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Create and print to the logs a virtual/xxxx -> PN (fn) table
for mc in taskData:
@@ -1168,6 +1269,7 @@ class RunQueueData:
bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
@@ -1180,6 +1282,7 @@ class RunQueueData:
dealtwith.add(tid)
todeal.remove(tid)
self.prepare_task_hash(tid)
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.writeout_file_checksum_cache()
@@ -1187,9 +1290,8 @@ class RunQueueData:
return len(self.runtaskentries)
def prepare_task_hash(self, tid):
- dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid))
- bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc)
- self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
+ bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
def dump_data(self):
@@ -1233,32 +1335,40 @@ class RunQueue:
self.worker = {}
self.fakeworker = {}
+ @staticmethod
+ def send_pickled_data(worker, data, name):
+ msg = bytearray()
+ msg.extend(b"<" + name.encode() + b">")
+ pickled_data = pickle.dumps(data)
+ msg.extend(len(pickled_data).to_bytes(4, 'big'))
+ msg.extend(pickled_data)
+ msg.extend(b"</" + name.encode() + b">")
+ worker.stdin.write(msg)
+
def _start_worker(self, mc, fakeroot = False, rqexec = None):
logger.debug("Starting bitbake-worker")
magic = "decafbad"
if self.cooker.configuration.profile:
magic = "decafbadbad"
fakerootlogs = None
+
+ workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker")
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
env = os.environ.copy()
- for key, value in (var.split('=') for var in fakerootenv):
+ for key, value in (var.split('=',1) for var in fakerootenv):
env[key] = value
- worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
else:
- worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
workerdata = {
- "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
- "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
- "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
- "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
"logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
"build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -1272,9 +1382,9 @@ class RunQueue:
"umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
}
- worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
- worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>")
- worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
+ RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
+ RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
+ RunQueue.send_pickled_data(worker, workerdata, "workerdata")
worker.stdin.flush()
return RunQueueWorker(worker, workerpipe)
@@ -1284,7 +1394,7 @@ class RunQueue:
return
logger.debug("Teardown for bitbake-worker")
try:
- worker.process.stdin.write(b"<quit></quit>")
+ RunQueue.send_pickled_data(worker.process, b"", "quit")
worker.process.stdin.flush()
worker.process.stdin.close()
except IOError:
@@ -1296,12 +1406,12 @@ class RunQueue:
continue
worker.pipe.close()
- def start_worker(self):
+ def start_worker(self, rqexec):
if self.worker:
self.teardown_workers()
self.teardown = False
for mc in self.rqdata.dataCaches:
- self.worker[mc] = self._start_worker(mc)
+ self.worker[mc] = self._start_worker(mc, False, rqexec)
def start_fakeworker(self, rqexec, mc):
if not mc in self.fakeworker:
@@ -1343,7 +1453,7 @@ class RunQueue:
if taskname is None:
taskname = tn
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
+ stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn)
# If the stamp is missing, it's not current
if not os.access(stampfile, os.F_OK):
@@ -1355,7 +1465,7 @@ class RunQueue:
logger.debug2("%s.%s is nostamp\n", fn, taskname)
return False
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
+ if taskname.endswith("_setscene"):
return True
if cache is None:
@@ -1366,8 +1476,8 @@ class RunQueue:
for dep in self.rqdata.runtaskentries[tid].depends:
if iscurrent:
(mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2)
+ stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2)
+ stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2)
t2 = get_timestamp(stampfile2)
t3 = get_timestamp(stampfile3)
if t3 and not t2:
@@ -1428,6 +1538,7 @@ class RunQueue:
"""
retval = True
+ bb.event.check_for_interrupts(self.cooker.data)
if self.state is runQueuePrepare:
# NOTE: if you add, remove or significantly refactor the stages of this
@@ -1456,10 +1567,13 @@ class RunQueue:
if not self.dm_event_handler_registered:
res = bb.event.register(self.dm_event_handler_name,
- lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
+ lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
('bb.event.HeartbeatEvent',), data=self.cfgData)
self.dm_event_handler_registered = True
+ self.rqdata.init_progress_reporter.next_stage()
+ self.rqexe = RunQueueExecute(self)
+
dump = self.cooker.configuration.dump_signatures
if dump:
self.rqdata.init_progress_reporter.finish()
@@ -1471,10 +1585,8 @@ class RunQueue:
self.state = runQueueComplete
if self.state is runQueueSceneInit:
- self.rqdata.init_progress_reporter.next_stage()
- self.start_worker()
- self.rqdata.init_progress_reporter.next_stage()
- self.rqexe = RunQueueExecute(self)
+ self.start_worker(self.rqexe)
+ self.rqdata.init_progress_reporter.finish()
# If we don't have any setscene functions, skip execution
if not self.rqdata.runq_setscene_tids:
@@ -1553,29 +1665,28 @@ class RunQueue:
else:
self.rqexe.finish()
- def rq_dump_sigfn(self, fn, options):
- bb_cache = bb.cache.NoCache(self.cooker.databuilder)
- mc = bb.runqueue.mc_from_tid(fn)
- the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn))
- siggen = bb.parse.siggen
- dataCaches = self.rqdata.dataCaches
- siggen.dump_sigfn(fn, dataCaches, options)
+ def _rq_dump_sigtid(self, tids):
+ for tid in tids:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ dataCaches = self.rqdata.dataCaches
+ bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
def dump_signatures(self, options):
- fns = set()
- bb.note("Reparsing files to collect dependency data")
+ if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
+ bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
- for tid in self.rqdata.runtaskentries:
- fn = fn_from_tid(tid)
- fns.add(fn)
+ bb.note("Writing task signature files")
max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
+ def chunkify(l, n):
+ return [l[i::n] for i in range(n)]
+ tids = chunkify(list(self.rqdata.runtaskentries), max_process)
# We cannot use the real multiprocessing.Pool easily due to some local data
# that can't be pickled. This is a cheap multi-process solution.
launched = []
- while fns:
+ while tids:
if len(launched) < max_process:
- p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options))
+ p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), ))
p.start()
launched.append(p)
for q in launched:
@@ -1590,6 +1701,17 @@ class RunQueue:
return
def print_diffscenetasks(self):
+ def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
+ invalidtasks = []
+ for t in taskdepends[task].depends:
+ if t not in valid and t not in visited_invalid:
+ invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid))
+ visited_invalid.add(t)
+
+ direct_invalid = [t for t in taskdepends[task].depends if t not in valid]
+ if not direct_invalid and task not in noexec:
+ invalidtasks = [task]
+ return invalidtasks
noexec = []
tocheck = set()
@@ -1623,46 +1745,49 @@ class RunQueue:
valid_new.add(dep)
invalidtasks = set()
- for tid in self.rqdata.runtaskentries:
- if tid not in valid_new and tid not in noexec:
- invalidtasks.add(tid)
- found = set()
- processed = set()
- for tid in invalidtasks:
+ toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets])
+ for tid in toptasks:
toprocess = set([tid])
while toprocess:
next = set()
+ visited_invalid = set()
for t in toprocess:
- for dep in self.rqdata.runtaskentries[t].depends:
- if dep in invalidtasks:
- found.add(tid)
- if dep not in processed:
- processed.add(dep)
+ if t not in valid_new and t not in noexec:
+ invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid))
+ continue
+ if t in self.rqdata.runq_setscene_tids:
+ for dep in self.rqexe.sqdata.sq_deps[t]:
next.add(dep)
+ continue
+
+ for dep in self.rqdata.runtaskentries[t].depends:
+ next.add(dep)
+
toprocess = next
- if tid in found:
- toprocess = set()
tasklist = []
- for tid in invalidtasks.difference(found):
+ for tid in invalidtasks:
tasklist.append(tid)
if tasklist:
bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
- return invalidtasks.difference(found)
+ return invalidtasks
def write_diffscenetasks(self, invalidtasks):
+ bb.siggen.check_siggen_version(bb.siggen)
# Define recursion callback
def recursecb(key, hash1, hash2):
hashes = [hash1, hash2]
+ bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes))
hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
+ bb.debug(1, "Found hashfiles:\n{}".format(hashfiles))
recout = []
if len(hashfiles) == 2:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
recout.extend(list(' ' + l for l in out2))
else:
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
@@ -1673,20 +1798,25 @@ class RunQueue:
for tid in invalidtasks:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- h = self.rqdata.runtaskentries[tid].hash
+ h = self.rqdata.runtaskentries[tid].unihash
+ bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname))
matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
+ bb.debug(1, "Found hashfiles:\n{}".format(matches))
match = None
- for m in matches:
- if h in m:
- match = m
+ for m in matches.values():
+ if h in m['path']:
+ match = m['path']
if match is None:
- bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
+ bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
matches = {k : v for k, v in iter(matches.items()) if h not in k}
+ matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']}
+ if matches_local:
+ matches = matches_local
if matches:
- latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
+ latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path']
prevh = __find_sha256__.search(latestmatch).group(0)
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
- bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
+ bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
class RunQueueExecute:
@@ -1699,6 +1829,10 @@ class RunQueueExecute:
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
+ self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
+ self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
+ self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
+ self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX")
self.sq_buildable = set()
self.sq_running = set()
@@ -1716,6 +1850,8 @@ class RunQueueExecute:
self.build_stamps2 = []
self.failed_tids = []
self.sq_deferred = {}
+ self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.stampcache = {}
@@ -1725,14 +1861,37 @@ class RunQueueExecute:
self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
- for mc in rq.worker:
- rq.worker[mc].pipe.setrunqueueexec(self)
- for mc in rq.fakeworker:
- rq.fakeworker[mc].pipe.setrunqueueexec(self)
-
if self.number_tasks <= 0:
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
+ lower_limit = 1.0
+ upper_limit = 1000000.0
+ if self.max_cpu_pressure:
+ self.max_cpu_pressure = float(self.max_cpu_pressure)
+ if self.max_cpu_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
+ if self.max_cpu_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
+
+ if self.max_io_pressure:
+ self.max_io_pressure = float(self.max_io_pressure)
+ if self.max_io_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
+ if self.max_io_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_memory_pressure:
+ self.max_memory_pressure = float(self.max_memory_pressure)
+ if self.max_memory_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
+ if self.max_memory_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_loadfactor:
+ self.max_loadfactor = float(self.max_loadfactor)
+ if self.max_loadfactor <= 0:
+ bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor))
+
# List of setscene tasks which we've covered
self.scenequeue_covered = set()
# List of tasks which are covered (including setscene ones)
@@ -1742,11 +1901,6 @@ class RunQueueExecute:
self.tasks_notcovered = set()
self.scenequeue_notneeded = set()
- # We can't skip specified target tasks which aren't setscene tasks
- self.cantskip = set(self.rqdata.target_tids)
- self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
- self.cantskip.intersection_update(self.rqdata.runtaskentries)
-
schedulers = self.get_schedulers()
for scheduler in schedulers:
if self.scheduler == scheduler.name:
@@ -1759,7 +1913,25 @@ class RunQueueExecute:
#if self.rqdata.runq_setscene_tids:
self.sqdata = SQData()
- build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
+ build_scenequeue_data(self.sqdata, self.rqdata, self)
+
+ update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
+
+ # Compute a list of 'stale' sstate tasks where the current hash does not match the one
+ # in any stamp files. Pass the list out to metadata as an event.
+ found = {}
+ for tid in self.rqdata.runq_setscene_tids:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ stamps = bb.build.find_stale_stamps(taskname, taskfn)
+ if stamps:
+ if mc not in found:
+ found[mc] = {}
+ found[mc][tid] = stamps
+ for mc in found:
+ event = bb.event.StaleSetSceneTasks(found[mc])
+ bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
+
+ self.build_taskdepdata_cache()
def runqueue_process_waitpid(self, task, status, fakerootlog=None):
@@ -1785,14 +1957,14 @@ class RunQueueExecute:
def finish_now(self):
for mc in self.rq.worker:
try:
- self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
self.rq.worker[mc].process.stdin.flush()
except IOError:
# worker must have died?
pass
for mc in self.rq.fakeworker:
try:
- self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
self.rq.fakeworker[mc].process.stdin.flush()
except IOError:
# worker must have died?
@@ -1861,8 +2033,7 @@ class RunQueueExecute:
try:
module = __import__(modname, fromlist=(name,))
except ImportError as exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
+ bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
else:
schedulers.add(getattr(module, name))
return schedulers
@@ -1892,11 +2063,19 @@ class RunQueueExecute:
self.setbuildable(revdep)
logger.debug("Marking task %s as buildable", revdep)
- for t in self.sq_deferred.copy():
+ found = None
+ for t in sorted(self.sq_deferred.copy()):
if self.sq_deferred[t] == task:
- logger.debug2("Deferred task %s now buildable" % t)
- del self.sq_deferred[t]
- update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
+ # We shouldn't allow all to run at once as it is prone to races.
+ if not found:
+ bb.debug(1, "Deferred task %s now buildable" % t)
+ del self.sq_deferred[t]
+ update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ found = t
+ else:
+ bb.debug(1, "Deferring %s after %s" % (t, found))
+ self.sq_deferred[t] = found
def task_complete(self, task):
self.stats.taskCompleted()
@@ -2001,8 +2180,11 @@ class RunQueueExecute:
if not self.sqdone and self.can_start_task():
# Find the next setscene to run
for nexttask in self.sorted_setscene_tids:
- if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
- if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
+ if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
+ if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
+ nexttask not in self.sq_needed_harddeps and \
+ self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
+ self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
if nexttask not in self.rqdata.target_tids:
logger.debug2("Skipping setscene for task %s" % nexttask)
self.sq_task_skip(nexttask)
@@ -2010,6 +2192,19 @@ class RunQueueExecute:
if nexttask in self.sq_deferred:
del self.sq_deferred[nexttask]
return True
+ if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ logger.debug2("Deferring %s due to hard dependencies" % nexttask)
+ updated = False
+ for dep in self.sqdata.sq_harddeps_rev[nexttask]:
+ if dep not in self.sq_needed_harddeps:
+ logger.debug2("Enabling task %s as it is a hard dependency" % dep)
+ self.sq_buildable.add(dep)
+ self.sq_needed_harddeps.add(dep)
+ updated = True
+ self.sq_harddep_deferred.add(nexttask)
+ if updated:
+ return True
+ continue
# If covered tasks are running, need to wait for them to complete
for t in self.sqdata.sq_covered_tasks[nexttask]:
if t in self.runq_running and t not in self.runq_complete:
@@ -2058,21 +2253,35 @@ class RunQueueExecute:
startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
- taskdepdata = self.sq_build_taskdepdata(task)
-
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
- taskhash = self.rqdata.get_task_hash(task)
- unihash = self.rqdata.get_task_unihash(task)
+ realfn = bb.cache.virtualfn2realfn(taskfn)[0]
+ runtask = {
+ 'fn' : taskfn,
+ 'task' : task,
+ 'taskname' : taskname,
+ 'taskhash' : self.rqdata.get_task_hash(task),
+ 'unihash' : self.rqdata.get_task_unihash(task),
+ 'quieterrors' : True,
+ 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
+ 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
+ 'taskdepdata' : self.sq_build_taskdepdata(task),
+ 'dry_run' : False,
+ 'taskdep': taskdep,
+ 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
+ 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
+ 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
+ }
+
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
self.rq.worker[mc].process.stdin.flush()
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
self.build_stamps2.append(self.build_stamps[task])
self.sq_running.add(task)
self.sq_live.add(task)
@@ -2132,18 +2341,32 @@ class RunQueueExecute:
self.runq_running.add(task)
self.stats.taskActive()
if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
- bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
+ bb.build.make_stamp_mcfn(taskname, taskfn)
self.task_complete(task)
return True
else:
startevent = runQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
- taskdepdata = self.build_taskdepdata(task)
-
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
- taskhash = self.rqdata.get_task_hash(task)
- unihash = self.rqdata.get_task_unihash(task)
+ realfn = bb.cache.virtualfn2realfn(taskfn)[0]
+ runtask = {
+ 'fn' : taskfn,
+ 'task' : task,
+ 'taskname' : taskname,
+ 'taskhash' : self.rqdata.get_task_hash(task),
+ 'unihash' : self.rqdata.get_task_unihash(task),
+ 'quieterrors' : False,
+ 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
+ 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
+ 'taskdepdata' : self.build_taskdepdata(task),
+ 'dry_run' : self.rqdata.setscene_enforce,
+ 'taskdep': taskdep,
+ 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
+ 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
+ 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
+ }
+
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
if not mc in self.rq.fakeworker:
try:
@@ -2153,13 +2376,13 @@ class RunQueueExecute:
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
self.rq.worker[mc].process.stdin.flush()
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
self.build_stamps2.append(self.build_stamps[task])
self.runq_running.add(task)
self.stats.taskActive()
@@ -2172,10 +2395,9 @@ class RunQueueExecute:
# No more tasks can be run. If we have deferred setscene tasks we should run them.
if self.sq_deferred:
- tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
- logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
- if tid not in self.runq_complete:
- self.sq_task_failoutright(tid)
+ deferred_tid = list(self.sq_deferred.keys())[0]
+ blocking_tid = self.sq_deferred.pop(deferred_tid)
+ logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid))
return True
if self.failed_tids:
@@ -2211,6 +2433,22 @@ class RunQueueExecute:
ret.add(dep)
return ret
+ # Build the individual cache entries in advance once to save time
+ def build_taskdepdata_cache(self):
+ taskdepdata_cache = {}
+ for task in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
+ deps = self.rqdata.runtaskentries[task].depends
+ provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
+ taskhash = self.rqdata.runtaskentries[task].hash
+ unihash = self.rqdata.runtaskentries[task].unihash
+ deps = self.filtermcdeps(task, mc, deps)
+ hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
+ taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
+
+ self.taskdepdata_cache = taskdepdata_cache
+
# We filter out multiconfig dependencies from taskdepdata we pass to the tasks
# as most code can't handle them
def build_taskdepdata(self, task):
@@ -2222,15 +2460,9 @@ class RunQueueExecute:
while next:
additional = []
for revdep in next:
- (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
- pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- deps = self.rqdata.runtaskentries[revdep].depends
- provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
- taskhash = self.rqdata.runtaskentries[revdep].hash
- unihash = self.rqdata.runtaskentries[revdep].unihash
- deps = self.filtermcdeps(task, mc, deps)
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
- for revdep2 in deps:
+ self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash
+ taskdepdata[revdep] = self.taskdepdata_cache[revdep]
+ for revdep2 in self.taskdepdata_cache[revdep][3]:
if revdep2 not in taskdepdata:
additional.append(revdep2)
next = additional
@@ -2244,7 +2476,7 @@ class RunQueueExecute:
return
notcovered = set(self.scenequeue_notcovered)
- notcovered |= self.cantskip
+ notcovered |= self.sqdata.cantskip
for tid in self.scenequeue_notcovered:
notcovered |= self.sqdata.sq_covered_tasks[tid]
notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2299,6 +2531,9 @@ class RunQueueExecute:
self.rqdata.runtaskentries[hashtid].unihash = unihash
bb.parse.siggen.set_unihash(hashtid, unihash)
toprocess.add(hashtid)
+ if torehash:
+ # Need to save after set_unihash above
+ bb.parse.siggen.save_unitaskhashes()
# Work out all tasks which depend upon these
total = set()
@@ -2329,8 +2564,7 @@ class RunQueueExecute:
if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
continue
orighash = self.rqdata.runtaskentries[tid].hash
- dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid))
- newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
+ newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
origuni = self.rqdata.runtaskentries[tid].unihash
newuni = bb.parse.siggen.get_unihash(tid)
# FIXME, need to check it can come from sstate at all for determinism?
@@ -2356,9 +2590,9 @@ class RunQueueExecute:
if changed:
for mc in self.rq.worker:
- self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
for mc in self.rq.fakeworker:
- self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
@@ -2405,17 +2639,6 @@ class RunQueueExecute:
self.sq_buildable.remove(tid)
if tid in self.sq_running:
self.sq_running.remove(tid)
- harddepfail = False
- for t in self.sqdata.sq_harddeps:
- if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered:
- harddepfail = True
- break
- if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
- if tid not in self.sq_buildable:
- self.sq_buildable.add(tid)
- if not self.sqdata.sq_revdeps[tid]:
- self.sq_buildable.add(tid)
-
if tid in self.sqdata.outrightfail:
self.sqdata.outrightfail.remove(tid)
if tid in self.scenequeue_notcovered:
@@ -2426,7 +2649,7 @@ class RunQueueExecute:
self.scenequeue_notneeded.remove(tid)
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
if tid in self.stampcache:
del self.stampcache[tid]
@@ -2434,31 +2657,52 @@ class RunQueueExecute:
if tid in self.build_stamps:
del self.build_stamps[tid]
- update_tasks.append((tid, harddepfail, tid in self.sqdata.valid))
+ update_tasks.append(tid)
- if update_tasks:
+ update_tasks2 = []
+ for tid in update_tasks:
+ harddepfail = False
+ for t in self.sqdata.sq_harddeps_rev[tid]:
+ if t in self.scenequeue_notcovered:
+ harddepfail = True
+ break
+ if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ if tid not in self.sq_buildable:
+ self.sq_buildable.add(tid)
+ if not self.sqdata.sq_revdeps[tid]:
+ self.sq_buildable.add(tid)
+
+ update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid))
+
+ if update_tasks2:
self.sqdone = False
- for tid in [t[0] for t in update_tasks]:
- h = pending_hash_index(tid, self.rqdata)
- if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
- self.sq_deferred[tid] = self.sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
- update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
-
- for (tid, harddepfail, origvalid) in update_tasks:
+ for mc in sorted(self.sqdata.multiconfigs):
+ for tid in sorted([t[0] for t in update_tasks2]):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, self.rqdata)
+ if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
+ self.sq_deferred[tid] = self.sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
+ update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+
+ for (tid, harddepfail, origvalid) in update_tasks2:
if tid in self.sqdata.valid and not origvalid:
hashequiv_logger.verbose("Setscene task %s became valid" % tid)
if harddepfail:
+ logger.debug2("%s has an unavailable hard dependency so skipping" % (tid))
self.sq_task_failoutright(tid)
if changed:
self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
+ self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.holdoff_need_update = True
def scenequeue_updatecounters(self, task, fail=False):
- for dep in sorted(self.sqdata.sq_deps[task]):
- if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
+ if fail and task in self.sqdata.sq_harddeps:
+ for dep in sorted(self.sqdata.sq_harddeps[task]):
if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
# dependency could be already processed, e.g. noexec setscene task
continue
@@ -2468,6 +2712,7 @@ class RunQueueExecute:
logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.sq_task_failoutright(dep)
continue
+ for dep in sorted(self.sqdata.sq_deps[task]):
if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
if dep not in self.sq_buildable:
self.sq_buildable.add(dep)
@@ -2486,6 +2731,13 @@ class RunQueueExecute:
new.add(dep)
next = new
+ # If this task was one which other setscene tasks have a hard dependency upon, we need
+ # to walk through the hard dependencies and allow execution of those which have completed dependencies.
+ if task in self.sqdata.sq_harddeps:
+ for dep in self.sq_harddep_deferred.copy():
+ if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ self.sq_harddep_deferred.remove(dep)
+
self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.holdoff_need_update = True
@@ -2559,7 +2811,8 @@ class RunQueueExecute:
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
unihash = self.rqdata.runtaskentries[revdep].unihash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
+ hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
@@ -2603,6 +2856,7 @@ class SQData(object):
self.sq_revdeps = {}
# Injected inter-setscene task dependencies
self.sq_harddeps = {}
+ self.sq_harddeps_rev = {}
# Cache of stamp files so duplicates can't run in parallel
self.stamps = {}
# Setscene tasks directly depended upon by the build
@@ -2612,12 +2866,17 @@ class SQData(object):
# A list of normal tasks a setscene task covers
self.sq_covered_tasks = {}
-def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
+def build_scenequeue_data(sqdata, rqdata, sqrq):
sq_revdeps = {}
sq_revdeps_squash = {}
sq_collated_deps = {}
+ # We can't skip specified target tasks which aren't setscene tasks
+ sqdata.cantskip = set(rqdata.target_tids)
+ sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
+ sqdata.cantskip.intersection_update(rqdata.runtaskentries)
+
# We need to construct a dependency graph for the setscene functions. Intermediate
# dependencies between the setscene tasks only complicate the code. This code
# therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2686,7 +2945,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for tid in rqdata.runtaskentries:
if not rqdata.runtaskentries[tid].revdeps:
sqdata.unskippable.add(tid)
- sqdata.unskippable |= sqrq.cantskip
+ sqdata.unskippable |= sqdata.cantskip
while new:
new = False
orig = sqdata.unskippable.copy()
@@ -2723,7 +2982,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
realtid = tid + "_setscene"
idepends = rqdata.taskData[mc].taskentries[realtid].idepends
- sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True)
+ sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
+
+ sqdata.sq_harddeps_rev[tid] = set()
for (depname, idependtask) in idepends:
if depname not in rqdata.taskData[mc].build_targets:
@@ -2736,20 +2997,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
if deptid not in rqdata.runtaskentries:
bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
+ logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid))
+
if not deptid in sqdata.sq_harddeps:
sqdata.sq_harddeps[deptid] = set()
sqdata.sq_harddeps[deptid].add(tid)
-
- sq_revdeps_squash[tid].add(deptid)
- # Have to zero this to avoid circular dependencies
- sq_revdeps_squash[deptid] = set()
+ sqdata.sq_harddeps_rev[tid].add(deptid)
rqdata.init_progress_reporter.next_stage()
- for task in sqdata.sq_harddeps:
- for dep in sqdata.sq_harddeps[task]:
- sq_revdeps_squash[dep].add(task)
-
rqdata.init_progress_reporter.next_stage()
#for tid in sq_revdeps_squash:
@@ -2776,7 +3032,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
if not sqdata.sq_revdeps[tid]:
sqrq.sq_buildable.add(tid)
- rqdata.init_progress_reporter.finish()
+ rqdata.init_progress_reporter.next_stage()
sqdata.noexec = set()
sqdata.stamppresent = set()
@@ -2793,23 +3049,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.hashes[h] = tid
else:
sqrq.sq_deferred[tid] = sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
-
- update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
-
- # Compute a list of 'stale' sstate tasks where the current hash does not match the one
- # in any stamp files. Pass the list out to metadata as an event.
- found = {}
- for tid in rqdata.runq_setscene_tids:
- (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- stamps = bb.build.find_stale_stamps(taskname, rqdata.dataCaches[mc], taskfn)
- if stamps:
- if mc not in found:
- found[mc] = {}
- found[mc][tid] = stamps
- for mc in found:
- event = bb.event.StaleSetSceneTasks(found[mc])
- bb.event.fire(event, cooker.databuilder.mcdata[mc])
+ bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
@@ -2818,7 +3058,7 @@ def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
- bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn)
+ bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn)
return True, False
if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
@@ -2848,11 +3088,13 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
if noexec:
sqdata.noexec.add(tid)
sqrq.sq_task_skip(tid)
+ logger.debug2("%s is noexec so skipping setscene" % (tid))
continue
if stamppresent:
sqdata.stamppresent.add(tid)
sqrq.sq_task_skip(tid)
+ logger.debug2("%s has a valid stamp, skipping" % (tid))
continue
tocheck.add(tid)
@@ -2873,6 +3115,7 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
if tid in sqrq.sq_deferred:
continue
sqdata.outrightfail.add(tid)
+ logger.debug2("%s already handled (fallthrough), skipping" % (tid))
class TaskFailure(Exception):
"""
@@ -3002,15 +3245,12 @@ class runQueuePipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
self.d = d
self.rq = rq
self.rqexec = rqexec
self.fakerootlogs = fakerootlogs
- def setrunqueueexec(self, rqexec):
- self.rqexec = rqexec
-
def read(self):
for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
for worker in workers.values():
@@ -3021,7 +3261,7 @@ class runQueuePipe():
start = len(self.queue)
try:
- self.queue = self.queue + (self.input.read(102400) or b"")
+ self.queue.extend(self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index 613956f30f..76b189291d 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -28,6 +28,7 @@ import datetime
import pickle
import traceback
import gc
+import stat
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -37,9 +38,46 @@ logger = logging.getLogger('BitBake')
class ProcessTimeout(SystemExit):
pass
+def currenttime():
+ return datetime.datetime.now().strftime('%H:%M:%S.%f')
+
def serverlog(msg):
- print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg)
- sys.stdout.flush()
+ print(str(os.getpid()) + " " + currenttime() + " " + msg)
+ #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server
+ #sys.stdout.flush()
+
+#
+# When we have lockfile issues, try and find infomation about which process is
+# using the lockfile
+#
+def get_lockfile_process_msg(lockfile):
+ # Some systems may not have lsof available
+ procs = None
+ try:
+ procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ pass
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if procs is None:
+ # Fall back to fuser if lsof is unavailable
+ try:
+ procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ pass
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if procs:
+ return procs.decode("utf-8")
+ return None
+
+class idleFinish():
+ def __init__(self, msg):
+ self.msg = msg
class ProcessServer():
profile_filename = "profile.log"
@@ -58,12 +96,19 @@ class ProcessServer():
self.maxuiwait = 30
self.xmlrpc = False
+ self.idle = None
+ # Need a lock for _idlefuns changes
self._idlefuns = {}
+ self._idlefuncsLock = threading.Lock()
+ self.idle_cond = threading.Condition(self._idlefuncsLock)
self.bitbake_lock = lock
self.bitbake_lock_name = lockname
self.sock = sock
self.sockname = sockname
+ # It is possible the directory may be renamed. Cache the inode of the socket file
+ # so we can tell if things changed.
+ self.sockinode = os.stat(self.sockname)[stat.ST_INO]
self.server_timeout = server_timeout
self.timeout = self.server_timeout
@@ -72,7 +117,9 @@ class ProcessServer():
def register_idle_function(self, function, data):
"""Register a function to be called while the server is idle"""
assert hasattr(function, '__call__')
- self._idlefuns[function] = data
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ self._idlefuns[function] = data
+ serverlog("Registering idle function %s" % str(function))
def run(self):
@@ -111,6 +158,31 @@ class ProcessServer():
return ret
+ def _idle_check(self):
+ return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
+
+ def wait_for_idle(self, timeout=30):
+ # Wait for the idle loop to have cleared
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ return self.idle_cond.wait_for(self._idle_check, timeout) is not False
+
+ def set_async_cmd(self, cmd):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ ret = self.idle_cond.wait_for(self._idle_check, 30)
+ if ret is False:
+ return False
+ self.cooker.command.currentAsyncCommand = cmd
+ return True
+
+ def clear_async_cmd(self):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ self.cooker.command.currentAsyncCommand = None
+ self.idle_cond.notify_all()
+
+ def get_async_cmd(self):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ return self.cooker.command.currentAsyncCommand
+
def main(self):
self.cooker.pre_serve()
@@ -125,14 +197,19 @@ class ProcessServer():
fds.append(self.xmlrpc)
seendata = False
serverlog("Entering server connection loop")
+ serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname)))
def disconnect_client(self, fds):
- serverlog("Disconnecting Client")
+ serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname))
if self.controllersock:
fds.remove(self.controllersock)
self.controllersock.close()
self.controllersock = False
if self.haveui:
+ # Wait for the idle loop to have cleared (30s max)
+ if not self.wait_for_idle(30):
+ serverlog("Idle loop didn't finish queued commands after 30s, exiting.")
+ self.quit = True
fds.remove(self.command_channel)
bb.event.unregister_UIHhandler(self.event_handle, True)
self.command_channel_reply.writer.close()
@@ -144,7 +221,7 @@ class ProcessServer():
self.cooker.clientComplete()
self.haveui = False
ready = select.select(fds,[],[],0)[0]
- if newconnections:
+ if newconnections and not self.quit:
serverlog("Starting new client")
conn = newconnections.pop(-1)
fds.append(conn)
@@ -216,8 +293,10 @@ class ProcessServer():
continue
try:
serverlog("Running command %s" % command)
- self.command_channel_reply.send(self.cooker.command.runCommand(command))
- serverlog("Command Completed")
+ reply = self.cooker.command.runCommand(command, self)
+ serverlog("Sending reply %s" % repr(reply))
+ self.command_channel_reply.send(reply)
+ serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
except Exception as e:
stack = traceback.format_exc()
serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
@@ -244,16 +323,25 @@ class ProcessServer():
ready = self.idle_commands(.1, fds)
- serverlog("Exiting")
+ if self.idle:
+ self.idle.join()
+
+ serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname))
# Remove the socket file so we don't get any more connections to avoid races
+ # The build directory could have been renamed so if the file isn't the one we created
+ # we shouldn't delete it.
try:
- os.unlink(self.sockname)
- except:
- pass
+ sockinode = os.stat(self.sockname)[stat.ST_INO]
+ if sockinode == self.sockinode:
+ os.unlink(self.sockname)
+ else:
+ serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode))
+ except Exception as err:
+ serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err))
self.sock.close()
try:
- self.cooker.shutdown(True)
+ self.cooker.shutdown(True, idle=False)
self.cooker.notifier.stop()
self.cooker.confignotifier.stop()
except:
@@ -279,20 +367,21 @@ class ProcessServer():
except FileNotFoundError:
return None
- lockcontents = get_lock_contents(lockfile)
- serverlog("Original lockfile contents: " + str(lockcontents))
-
lock.close()
lock = None
while not lock:
i = 0
lock = None
+ if not os.path.exists(os.path.basename(lockfile)):
+ serverlog("Lockfile directory gone, exiting.")
+ return
+
while not lock and i < 30:
lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
if not lock:
newlockcontents = get_lock_contents(lockfile)
- if newlockcontents != lockcontents:
+ if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]):
# A new server was started, the lockfile contents changed, we can exit
serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
return
@@ -306,80 +395,108 @@ class ProcessServer():
return
if not lock:
- # Some systems may not have lsof available
- procs = None
- try:
- procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- # File was deleted?
- continue
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- if procs is None:
- # Fall back to fuser if lsof is unavailable
- try:
- procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- # File was deleted?
- continue
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
-
+ procs = get_lockfile_process_msg(lockfile)
msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
if procs:
- msg.append(":\n%s" % str(procs.decode("utf-8")))
+ msg.append(":\n%s" % procs)
serverlog("".join(msg))
- def idle_commands(self, delay, fds=None):
- nextsleep = delay
- if not fds:
- fds = []
-
- for function, data in list(self._idlefuns.items()):
+ def idle_thread(self):
+ if self.cooker.configuration.profile:
try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- nextsleep = None
- elif retval is True:
- nextsleep = None
- elif isinstance(retval, float) and nextsleep:
- if (retval < nextsleep):
- nextsleep = retval
- elif nextsleep is None:
- continue
- else:
- fds = fds + retval
- except SystemExit:
- raise
- except Exception as exc:
- if not isinstance(exc, bb.BBHandledException):
- logger.exception('Running idle function')
+ import cProfile as profile
+ except:
+ import profile
+ prof = profile.Profile()
+
+ ret = profile.Profile.runcall(prof, self.idle_thread_internal)
+
+ prof.dump_stats("profile-mainloop.log")
+ bb.utils.process_profilelog("profile-mainloop.log")
+ serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
+ else:
+ self.idle_thread_internal()
+
+ def idle_thread_internal(self):
+ def remove_idle_func(function):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
del self._idlefuns[function]
- self.quit = True
+ self.idle_cond.notify_all()
- # Create new heartbeat event?
- now = time.time()
- if now >= self.next_heartbeat:
- # We might have missed heartbeats. Just trigger once in
- # that case and continue after the usual delay.
- self.next_heartbeat += self.heartbeat_seconds
- if self.next_heartbeat <= now:
- self.next_heartbeat = now + self.heartbeat_seconds
- if hasattr(self.cooker, "data"):
- heartbeat = bb.event.HeartbeatEvent(now)
+ while not self.quit:
+ nextsleep = 0.1
+ fds = []
+
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ items = list(self._idlefuns.items())
+
+ for function, data in items:
try:
- bb.event.fire(heartbeat, self.cooker.data)
+ retval = function(self, data, False)
+ if isinstance(retval, idleFinish):
+ serverlog("Removing idle function %s at idleFinish" % str(function))
+ remove_idle_func(function)
+ self.cooker.command.finishAsyncCommand(retval.msg)
+ nextsleep = None
+ elif retval is False:
+ serverlog("Removing idle function %s" % str(function))
+ remove_idle_func(function)
+ nextsleep = None
+ elif retval is True:
+ nextsleep = None
+ elif isinstance(retval, float) and nextsleep:
+ if (retval < nextsleep):
+ nextsleep = retval
+ elif nextsleep is None:
+ continue
+ else:
+ fds = fds + retval
+ except SystemExit:
+ raise
except Exception as exc:
if not isinstance(exc, bb.BBHandledException):
- logger.exception('Running heartbeat function')
+ logger.exception('Running idle function')
+ remove_idle_func(function)
+ serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc())
self.quit = True
- if nextsleep and now + nextsleep > self.next_heartbeat:
- # Shorten timeout so that we we wake up in time for
- # the heartbeat.
- nextsleep = self.next_heartbeat - now
+
+ # Create new heartbeat event?
+ now = time.time()
+ if bb.event._heartbeat_enabled and now >= self.next_heartbeat:
+ # We might have missed heartbeats. Just trigger once in
+ # that case and continue after the usual delay.
+ self.next_heartbeat += self.heartbeat_seconds
+ if self.next_heartbeat <= now:
+ self.next_heartbeat = now + self.heartbeat_seconds
+ if hasattr(self.cooker, "data"):
+ heartbeat = bb.event.HeartbeatEvent(now)
+ try:
+ bb.event.fire(heartbeat, self.cooker.data)
+ except Exception as exc:
+ if not isinstance(exc, bb.BBHandledException):
+ logger.exception('Running heartbeat function')
+ serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc())
+ self.quit = True
+ if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat:
+ # Shorten timeout so that we we wake up in time for
+ # the heartbeat.
+ nextsleep = self.next_heartbeat - now
+
+ if nextsleep is not None:
+ select.select(fds,[],[],nextsleep)[0]
+
+ def idle_commands(self, delay, fds=None):
+ nextsleep = delay
+ if not fds:
+ fds = []
+
+ if not self.idle:
+ self.idle = threading.Thread(target=self.idle_thread)
+ self.idle.start()
+ elif self.idle and not self.idle.is_alive():
+ serverlog("Idle thread terminated, main thread exiting too")
+ bb.error("Idle thread terminated, main thread exiting too")
+ self.quit = True
if nextsleep is not None:
if self.xmlrpc:
@@ -399,12 +516,18 @@ class ServerCommunicator():
self.recv = recv
def runCommand(self, command):
- self.connection.send(command)
+ try:
+ self.connection.send(command)
+ except BrokenPipeError as e:
+ raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
if not self.recv.poll(30):
- logger.info("No reply from server in 30s")
+ logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
if not self.recv.poll(30):
- raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
- ret, exc = self.recv.get()
+ raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
+ try:
+ ret, exc = self.recv.get()
+ except EOFError as e:
+ raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
# Should probably turn all exceptions in exc back into exceptions?
# For now, at least handle BBHandledException
if exc and ("BBHandledException" in exc or "SystemExit" in exc):
@@ -437,6 +560,7 @@ class BitBakeProcessServerConnection(object):
self.socket_connection = sock
def terminate(self):
+ self.events.close()
self.socket_connection.close()
self.connection.connection.close()
self.connection.recv.close()
@@ -447,13 +571,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
class BitBakeServer(object):
- def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface):
+ def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile):
self.server_timeout = server_timeout
self.xmlrpcinterface = xmlrpcinterface
self.featureset = featureset
self.sockname = sockname
self.bitbake_lock = lock
+ self.profile = profile
self.readypipe, self.readypipein = os.pipe()
# Place the log in the builddirectory alongside the lock file
@@ -517,9 +642,9 @@ class BitBakeServer(object):
os.set_inheritable(self.bitbake_lock.fileno(), True)
os.set_inheritable(self.readypipein, True)
serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
- os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
+ os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
-def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface):
+def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
import bb.cookerdata
import bb.cooker
@@ -531,6 +656,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
# Create server control socket
if os.path.exists(sockname):
+ serverlog("WARNING: removing existing socket file '%s'" % sockname)
os.unlink(sockname)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -547,7 +673,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
writer = ConnectionWriter(readypipeinfd)
try:
featureset = []
- cooker = bb.cooker.BBCooker(featureset, server.register_idle_function)
+ cooker = bb.cooker.BBCooker(featureset, server)
+ cooker.configuration.profile = profile
except bb.BBHandledException:
return None
writer.send("r")
@@ -662,23 +789,18 @@ class BBUIEventQueue:
self.reader = ConnectionReader(readfd)
self.t = threading.Thread()
- self.t.daemon = True
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ if len(self.eventQueue) == 0:
+ return None
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
+ item = self.eventQueue.pop(0)
+ if len(self.eventQueue) == 0:
+ self.eventQueueNotify.clear()
- self.eventQueueLock.release()
return item
def waitEvent(self, delay):
@@ -686,10 +808,9 @@ class BBUIEventQueue:
return self.getEvent()
def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ self.eventQueue.append(event)
+ self.eventQueueNotify.set()
def send_event(self, event):
self.queue_event(pickle.loads(event))
@@ -698,13 +819,17 @@ class BBUIEventQueue:
bb.utils.set_process_name("UIEventQueue")
while True:
try:
- self.reader.wait()
- event = self.reader.get()
- self.queue_event(event)
- except EOFError:
+ ready = self.reader.wait(0.25)
+ if ready:
+ event = self.reader.get()
+ self.queue_event(event)
+ except (EOFError, OSError, TypeError):
# Easiest way to exit is to close the file descriptor to cause an exit
break
+
+ def close(self):
self.reader.close()
+ self.t.join()
class ConnectionReader(object):
@@ -719,7 +844,7 @@ class ConnectionReader(object):
return self.reader.poll(timeout)
def get(self):
- with self.rlock:
+ with bb.utils.lock_timeout(self.rlock):
res = self.reader.recv_bytes()
return multiprocessing.reduction.ForkingPickler.loads(res)
@@ -740,7 +865,7 @@ class ConnectionWriter(object):
def _send(self, obj):
gc.disable()
- with self.wlock:
+ with bb.utils.lock_timeout(self.wlock):
self.writer.send_bytes(obj)
gc.enable()
@@ -753,11 +878,13 @@ class ConnectionWriter(object):
# pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139
process = multiprocessing.current_process()
if process and hasattr(process, "queue_signals"):
- with process.signal_threadlock:
+ with bb.utils.lock_timeout(process.signal_threadlock):
process.queue_signals = True
self._send(obj)
process.queue_signals = False
- for sig in process.signal_received.pop():
+
+ while len(process.signal_received) > 0:
+ sig = process.signal_received.pop()
process.handle_sig(sig, None)
else:
self._send(obj)
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 01f55538ae..04b0b17db1 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -118,7 +118,7 @@ class BitBakeXMLRPCServerCommands():
"""
Run a cooker command on the server
"""
- return self.server.cooker.command.runCommand(command, self.server.readonly)
+ return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly)
def getEventHandle(self):
return self.event_handle
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 08eca7860e..8ab08ec961 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -12,6 +14,8 @@ import bb.data
import difflib
import simplediff
import json
+import types
+from contextlib import contextmanager
import bb.compress.zstd
from bb.checksum import FileChecksumCache
from bb import runqueue
@@ -21,15 +25,33 @@ import hashserv.client
logger = logging.getLogger('BitBake.SigGen')
hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
+#find_siginfo and find_siginfo_version are set by the metadata siggen
+# The minimum version of the find_siginfo function we need
+find_siginfo_minversion = 2
+
+HASHSERV_ENVVARS = [
+ "SSL_CERT_DIR",
+ "SSL_CERT_FILE",
+ "NO_PROXY",
+ "HTTPS_PROXY",
+ "HTTP_PROXY"
+]
+
+def check_siggen_version(siggen):
+ if not hasattr(siggen, "find_siginfo_version"):
+ bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
+ if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
+ bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
+
class SetEncoder(json.JSONEncoder):
def default(self, obj):
- if isinstance(obj, set):
+ if isinstance(obj, set) or isinstance(obj, frozenset):
return dict(_set_object=list(sorted(obj)))
return json.JSONEncoder.default(self, obj)
def SetDecoder(dct):
if '_set_object' in dct:
- return set(dct['_set_object'])
+ return frozenset(dct['_set_object'])
return dct
def init(d):
@@ -51,11 +73,6 @@ class SignatureGenerator(object):
"""
name = "noop"
- # If the derived class supports multiconfig datacaches, set this to True
- # The default is False for backward compatibility with derived signature
- # generators that do not understand multiconfig caches
- supports_multiconfig_datacaches = False
-
def __init__(self, data):
self.basehash = {}
self.taskhash = {}
@@ -73,9 +90,39 @@ class SignatureGenerator(object):
def postparsing_clean_cache(self):
return
+ def setup_datacache(self, datacaches):
+ self.datacaches = datacaches
+
+ def setup_datacache_from_datastore(self, mcfn, d):
+ # In task context we have no cache so setup internal data structures
+ # from the fully parsed data store provided
+
+ mc = d.getVar("__BBMULTICONFIG", False) or ""
+ tasks = d.getVar('__BBTASKS', False)
+
+ self.datacaches = {}
+ self.datacaches[mc] = types.SimpleNamespace()
+ setattr(self.datacaches[mc], "stamp", {})
+ self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
+ setattr(self.datacaches[mc], "stamp_extrainfo", {})
+ self.datacaches[mc].stamp_extrainfo[mcfn] = {}
+ for t in tasks:
+ flag = d.getVarFlag(t, "stamp-extra-info")
+ if flag:
+ self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
+
+ def get_cached_unihash(self, tid):
+ return None
+
def get_unihash(self, tid):
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ return unihash
return self.taskhash[tid]
+ def get_unihashes(self, tids):
+ return {tid: self.get_unihash(tid) for tid in tids}
+
def prep_taskhash(self, tid, deps, dataCaches):
return
@@ -87,17 +134,51 @@ class SignatureGenerator(object):
"""Write/update the file checksum cache onto disk"""
return
+ def stampfile_base(self, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ return self.datacaches[mc].stamp[mcfn]
+
+ def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ if not stamp:
+ return
+
+ stamp_extrainfo = ""
+ if extrainfo:
+ taskflagname = taskname
+ if taskname.endswith("_setscene"):
+ taskflagname = taskname.replace("_setscene", "")
+ stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
+
+ return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
+
def stampfile(self, stampbase, file_name, taskname, extrainfo):
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
+ def stampcleanmask_mcfn(self, taskname, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ if not stamp:
+ return []
+
+ taskflagname = taskname
+ if taskname.endswith("_setscene"):
+ taskflagname = taskname.replace("_setscene", "")
+ stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
+
+ return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
+
def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
- def dump_sigtask(self, fn, task, stampbase, runtime):
+ def dump_sigtask(self, mcfn, task, stampbase, runtime):
return
- def invalidate_task(self, task, d, fn):
- bb.build.del_stamp(task, d, fn)
+ def invalidate_task(self, task, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ bb.utils.remove(stamp)
def dump_sigs(self, dataCache, options):
return
@@ -120,44 +201,20 @@ class SignatureGenerator(object):
def save_unitaskhashes(self):
return
- def set_setscene_tasks(self, setscene_tasks):
+ def copy_unitaskhashes(self, targetdir):
return
- @classmethod
- def get_data_caches(cls, dataCaches, mc):
- """
- This function returns the datacaches that should be passed to signature
- generator functions. If the signature generator supports multiconfig
- caches, the entire dictionary of data caches is sent, otherwise a
- special proxy is sent that support both index access to all
- multiconfigs, and also direct access for the default multiconfig.
-
- The proxy class allows code in this class itself to always use
- multiconfig aware code (to ease maintenance), but derived classes that
- are unaware of multiconfig data caches can still access the default
- multiconfig as expected.
-
- Do not override this function in derived classes; it will be removed in
- the future when support for multiconfig data caches is mandatory
- """
- class DataCacheProxy(object):
- def __init__(self):
- pass
-
- def __getitem__(self, key):
- return dataCaches[key]
-
- def __getattr__(self, name):
- return getattr(dataCaches[mc], name)
-
- if cls.supports_multiconfig_datacaches:
- return dataCaches
-
- return DataCacheProxy()
+ def set_setscene_tasks(self, setscene_tasks):
+ return
def exit(self):
return
+def build_pnid(mc, pn, taskname):
+ if mc:
+ return "mc:" + mc + ":" + pn + ":" + taskname
+ return pn + ":" + taskname
+
class SignatureGeneratorBasic(SignatureGenerator):
"""
"""
@@ -167,12 +224,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.basehash = {}
self.taskhash = {}
self.unihash = {}
- self.taskdeps = {}
self.runtaskdeps = {}
self.file_checksum_values = {}
self.taints = {}
- self.gendeps = {}
- self.lookupcache = {}
self.setscenetasks = set()
self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
self.taskhash_ignore_tasks = None
@@ -196,15 +250,15 @@ class SignatureGeneratorBasic(SignatureGenerator):
else:
self.twl = None
- def _build_data(self, fn, d):
+ def _build_data(self, mcfn, d):
ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
- taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, fn)
+ taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
for task in tasklist:
- tid = fn + ":" + task
+ tid = mcfn + ":" + task
if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
bb.error("The following commands may help:")
@@ -215,11 +269,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.error("%s -Sprintdiff\n" % cmd)
self.basehash[tid] = basehash[tid]
- self.taskdeps[fn] = taskdeps
- self.gendeps[fn] = gendeps
- self.lookupcache[fn] = lookupcache
-
- return taskdeps
+ return taskdeps, gendeps, lookupcache
def set_setscene_tasks(self, setscene_tasks):
self.setscenetasks = set(setscene_tasks)
@@ -227,31 +277,42 @@ class SignatureGeneratorBasic(SignatureGenerator):
def finalise(self, fn, d, variant):
mc = d.getVar("__BBMULTICONFIG", False) or ""
+ mcfn = fn
if variant or mc:
- fn = bb.cache.realfn2virtual(fn, variant, mc)
+ mcfn = bb.cache.realfn2virtual(fn, variant, mc)
try:
- taskdeps = self._build_data(fn, d)
+ taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
except bb.parse.SkipRecipe:
raise
except:
- bb.warn("Error during finalise of %s" % fn)
+ bb.warn("Error during finalise of %s" % mcfn)
raise
+ basehashes = {}
+ for task in taskdeps:
+ basehashes[task] = self.basehash[mcfn + ":" + task]
+
+ d.setVar("__siggen_basehashes", basehashes)
+ d.setVar("__siggen_gendeps", gendeps)
+ d.setVar("__siggen_varvals", lookupcache)
+ d.setVar("__siggen_taskdeps", taskdeps)
+
#Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[fn]:
- # self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
+ #self.setup_datacache_from_datastore(mcfn, d)
+ #for task in taskdeps:
+ # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
- for task in taskdeps:
- d.setVar("BB_BASEHASH:task-%s" % task, self.basehash[fn + ":" + task])
+ def setup_datacache_from_datastore(self, mcfn, d):
+ super().setup_datacache_from_datastore(mcfn, d)
- def postparsing_clean_cache(self):
- #
- # After parsing we can remove some things from memory to reduce our memory footprint
- #
- self.gendeps = {}
- self.lookupcache = {}
- self.taskdeps = {}
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
+ if not hasattr(self.datacaches[mc], attr):
+ setattr(self.datacaches[mc], attr, {})
+ self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
+ self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
+ self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
# Return True if we should keep the dependency, False to drop it
@@ -274,38 +335,37 @@ class SignatureGeneratorBasic(SignatureGenerator):
def prep_taskhash(self, tid, deps, dataCaches):
- (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
+ (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
self.runtaskdeps[tid] = []
self.file_checksum_values[tid] = []
- recipename = dataCaches[mc].pkg_fn[fn]
+ recipename = dataCaches[mc].pkg_fn[mcfn]
self.tidtopn[tid] = recipename
+ # save hashfn for deps into siginfo?
+ for dep in deps:
+ (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
+ dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
- for dep in sorted(deps, key=clean_basepath):
- (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
- depname = dataCaches[depmc].pkg_fn[depmcfn]
- if not self.supports_multiconfig_datacaches and mc != depmc:
- # If the signature generator doesn't understand multiconfig
- # data caches, any dependency not in the same multiconfig must
- # be skipped for backward compatibility
- continue
- if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
+ if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
continue
+
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
- self.runtaskdeps[tid].append(dep)
- if task in dataCaches[mc].file_checksums[fn]:
+ dep_pnid = build_pnid(depmc, dep_pn, deptask)
+ self.runtaskdeps[tid].append((dep_pnid, dep))
+
+ if task in dataCaches[mc].file_checksums[mcfn]:
if self.checksum_cache:
- checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
else:
- checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
for (f,cs) in checksums:
self.file_checksum_values[tid].append((f,cs))
- taskdep = dataCaches[mc].task_deps[fn]
+ taskdep = dataCaches[mc].task_deps[mcfn]
if 'nostamp' in taskdep and task in taskdep['nostamp']:
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -316,7 +376,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
taint = str(uuid.uuid4())
self.taints[tid] = "nostamp:" + taint
- taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn])
+ taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
if taint:
self.taints[tid] = taint
logger.warning("%s is tainted from a forced run" % tid)
@@ -326,20 +386,20 @@ class SignatureGeneratorBasic(SignatureGenerator):
def get_taskhash(self, tid, deps, dataCaches):
data = self.basehash[tid]
- for dep in self.runtaskdeps[tid]:
- data = data + self.get_unihash(dep)
+ for dep in sorted(self.runtaskdeps[tid]):
+ data += self.get_unihash(dep[1])
- for (f, cs) in self.file_checksum_values[tid]:
+ for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
if cs:
if "/./" in f:
- data = data + "./" + f.split("/./")[1]
- data = data + cs
+ data += "./" + f.split("/./")[1]
+ data += cs
if tid in self.taints:
if self.taints[tid].startswith("nostamp:"):
- data = data + self.taints[tid][8:]
+ data += self.taints[tid][8:]
else:
- data = data + self.taints[tid]
+ data += self.taints[tid]
h = hashlib.sha256(data.encode("utf-8")).hexdigest()
self.taskhash[tid] = h
@@ -358,9 +418,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
def save_unitaskhashes(self):
self.unihash_cache.save(self.unitaskhashes)
- def dump_sigtask(self, fn, task, stampbase, runtime):
+ def copy_unitaskhashes(self, targetdir):
+ self.unihash_cache.copyfile(targetdir)
- tid = fn + ":" + task
+ def dump_sigtask(self, mcfn, task, stampbase, runtime):
+ tid = mcfn + ":" + task
+ mc = bb.runqueue.mc_from_tid(mcfn)
referencestamp = stampbase
if isinstance(runtime, str) and runtime.startswith("customfile"):
sigfile = stampbase
@@ -377,32 +440,32 @@ class SignatureGeneratorBasic(SignatureGenerator):
data['task'] = task
data['basehash_ignore_vars'] = self.basehash_ignore_vars
data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
- data['taskdeps'] = self.taskdeps[fn][task]
+ data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
data['basehash'] = self.basehash[tid]
data['gendeps'] = {}
data['varvals'] = {}
- data['varvals'][task] = self.lookupcache[fn][task]
- for dep in self.taskdeps[fn][task]:
+ data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
+ for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
if dep in self.basehash_ignore_vars:
continue
- data['gendeps'][dep] = self.gendeps[fn][dep]
- data['varvals'][dep] = self.lookupcache[fn][dep]
+ data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
+ data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
if runtime and tid in self.taskhash:
- data['runtaskdeps'] = self.runtaskdeps[tid]
+ data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
data['file_checksum_values'] = []
- for f,cs in self.file_checksum_values[tid]:
+ for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
if "/./" in f:
data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
else:
data['file_checksum_values'].append((os.path.basename(f), cs))
data['runtaskhashes'] = {}
- for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.get_unihash(dep)
+ for dep in self.runtaskdeps[tid]:
+ data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
data['taskhash'] = self.taskhash[tid]
data['unihash'] = self.get_unihash(tid)
- taint = self.read_taint(fn, task, referencestamp)
+ taint = self.read_taint(mcfn, task, referencestamp)
if taint:
data['taint'] = taint
@@ -419,7 +482,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
- fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
+ fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
try:
with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
@@ -433,18 +496,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
pass
raise err
- def dump_sigfn(self, fn, dataCaches, options):
- if fn in self.taskdeps:
- for task in self.taskdeps[fn]:
- tid = fn + ":" + task
- mc = bb.runqueue.mc_from_tid(tid)
- if tid not in self.taskhash:
- continue
- if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
- bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
- bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
- self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
-
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"
@@ -455,11 +506,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
# If task is not in basehash, then error
return self.basehash[tid]
- def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- tid = fn + ":" + taskname[:-9]
+ def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
+ if taskname.endswith("_setscene"):
+ tid = mcfn + ":" + taskname[:-9]
else:
- tid = fn + ":" + taskname
+ tid = mcfn + ":" + taskname
if clean:
h = "*"
else:
@@ -467,42 +518,107 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
- def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
- return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
+ def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
+ return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
+
+ def invalidate_task(self, task, mcfn):
+ bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
- def invalidate_task(self, task, d, fn):
- bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
- bb.build.write_taint(task, d, fn)
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+
+ taintfn = stamp + '.' + task + '.taint'
+
+ import uuid
+ bb.utils.mkdirhier(os.path.dirname(taintfn))
+ # The specific content of the taint file is not really important,
+ # we just need it to be random, so a random UUID is used
+ with open(taintfn, 'w') as taintf:
+ taintf.write(str(uuid.uuid4()))
class SignatureGeneratorUniHashMixIn(object):
def __init__(self, data):
self.extramethod = {}
+ # NOTE: The cache only tracks hashes that exist. Hashes that don't
+ # exist are always queries from the server since it is possible for
+ # hashes to appear over time, but much less likely for them to
+ # disappear
+ self.unihash_exists_cache = set()
+ self.username = None
+ self.password = None
+ self.env = {}
+
+ origenv = data.getVar("BB_ORIGENV")
+ for e in HASHSERV_ENVVARS:
+ value = data.getVar(e)
+ if not value and origenv:
+ value = origenv.getVar(e)
+ if value:
+ self.env[e] = value
super().__init__(data)
def get_taskdata(self):
- return (self.server, self.method, self.extramethod) + super().get_taskdata()
+ return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata()
def set_taskdata(self, data):
- self.server, self.method, self.extramethod = data[:3]
- super().set_taskdata(data[3:])
+ self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7]
+ super().set_taskdata(data[7:])
+
+ def get_hashserv_creds(self):
+ if self.username and self.password:
+ return {
+ "username": self.username,
+ "password": self.password,
+ }
+
+ return {}
+
+ @contextmanager
+ def _client_env(self):
+ orig_env = os.environ.copy()
+ try:
+ for k, v in self.env.items():
+ os.environ[k] = v
+
+ yield
+ finally:
+ for k, v in self.env.items():
+ if k in orig_env:
+ os.environ[k] = orig_env[k]
+ else:
+ del os.environ[k]
+ @contextmanager
def client(self):
- if getattr(self, '_client', None) is None:
- self._client = hashserv.create_client(self.server)
- return self._client
+ with self._client_env():
+ if getattr(self, '_client', None) is None:
+ self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
+ yield self._client
+
+ @contextmanager
+ def client_pool(self):
+ with self._client_env():
+ if getattr(self, '_client_pool', None) is None:
+ self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
+ yield self._client_pool
def reset(self, data):
- if getattr(self, '_client', None) is not None:
- self._client.close()
- self._client = None
+ self.__close_clients()
return super().reset(data)
def exit(self):
- if getattr(self, '_client', None) is not None:
- self._client.close()
- self._client = None
+ self.__close_clients()
return super().exit()
+ def __close_clients(self):
+ with self._client_env():
+ if getattr(self, '_client', None) is not None:
+ self._client.close()
+ self._client = None
+ if getattr(self, '_client_pool', None) is not None:
+ self._client_pool.close()
+ self._client_pool = None
+
def get_stampfile_hash(self, tid):
if tid in self.taskhash:
# If a unique hash is reported, use it as the stampfile hash. This
@@ -534,7 +650,7 @@ class SignatureGeneratorUniHashMixIn(object):
return None
return unihash
- def get_unihash(self, tid):
+ def get_cached_unihash(self, tid):
taskhash = self.taskhash[tid]
# If its not a setscene task we can return
@@ -549,40 +665,105 @@ class SignatureGeneratorUniHashMixIn(object):
self.unihash[tid] = unihash
return unihash
- # In the absence of being able to discover a unique hash from the
- # server, make it be equivalent to the taskhash. The unique "hash" only
- # really needs to be a unique string (not even necessarily a hash), but
- # making it match the taskhash has a few advantages:
- #
- # 1) All of the sstate code that assumes hashes can be the same
- # 2) It provides maximal compatibility with builders that don't use
- # an equivalency server
- # 3) The value is easy for multiple independent builders to derive the
- # same unique hash from the same input. This means that if the
- # independent builders find the same taskhash, but it isn't reported
- # to the server, there is a better chance that they will agree on
- # the unique hash.
- unihash = taskhash
+ return None
- try:
- method = self.method
- if tid in self.extramethod:
- method = method + self.extramethod[tid]
- data = self.client().get_unihash(method, self.taskhash[tid])
- if data:
- unihash = data
+ def _get_method(self, tid):
+ method = self.method
+ if tid in self.extramethod:
+ method = method + self.extramethod[tid]
+
+ return method
+
+ def unihashes_exist(self, query):
+ if len(query) == 0:
+ return {}
+
+ uncached_query = {}
+ result = {}
+ for key, unihash in query.items():
+ if unihash in self.unihash_exists_cache:
+ result[key] = True
+ else:
+ uncached_query[key] = unihash
+
+ if self.max_parallel <= 1 or len(uncached_query) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ with self.client() as client:
+ uncached_result = {
+ key: client.unihash_exists(value) for key, value in uncached_query.items()
+ }
+ else:
+ with self.client_pool() as client_pool:
+ uncached_result = client_pool.unihashes_exist(uncached_query)
+
+ for key, exists in uncached_result.items():
+ if exists:
+ self.unihash_exists_cache.add(query[key])
+ result[key] = exists
+
+ return result
+
+ def get_unihash(self, tid):
+ return self.get_unihashes([tid])[tid]
+
+ def get_unihashes(self, tids):
+ """
+ For a iterable of tids, returns a dictionary that maps each tid to a
+ unihash
+ """
+ result = {}
+ queries = {}
+ query_result = {}
+
+ for tid in tids:
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ result[tid] = unihash
+ else:
+ queries[tid] = (self._get_method(tid), self.taskhash[tid])
+
+ if len(queries) == 0:
+ return result
+
+ if self.max_parallel <= 1 or len(queries) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ with self.client() as client:
+ for tid, args in queries.items():
+ query_result[tid] = client.get_unihash(*args)
+ else:
+ with self.client_pool() as client_pool:
+ query_result = client_pool.get_unihashes(queries)
+
+ for tid, unihash in query_result.items():
+ # In the absence of being able to discover a unique hash from the
+ # server, make it be equivalent to the taskhash. The unique "hash" only
+ # really needs to be a unique string (not even necessarily a hash), but
+ # making it match the taskhash has a few advantages:
+ #
+ # 1) All of the sstate code that assumes hashes can be the same
+ # 2) It provides maximal compatibility with builders that don't use
+ # an equivalency server
+ # 3) The value is easy for multiple independent builders to derive the
+ # same unique hash from the same input. This means that if the
+ # independent builders find the same taskhash, but it isn't reported
+ # to the server, there is a better chance that they will agree on
+ # the unique hash.
+ taskhash = self.taskhash[tid]
+ if unihash:
# A unique hash equal to the taskhash is not very interesting,
# so it is reported it at debug level 2. If they differ, that
# is much more interesting, so it is reported at debug level 1
- hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
+ hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
- except ConnectionError as e:
- bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
+ unihash = taskhash
- self.set_unihash(tid, unihash)
- self.unihash[tid] = unihash
- return unihash
+
+ self.set_unihash(tid, unihash)
+ self.unihash[tid] = unihash
+ result[tid] = unihash
+
+ return result
def report_unihash(self, path, task, d):
import importlib
@@ -591,8 +772,8 @@ class SignatureGeneratorUniHashMixIn(object):
unihash = d.getVar('BB_UNIHASH')
report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
tempdir = d.getVar('T')
- fn = d.getVar('BB_FILENAME')
- tid = fn + ':do_' + task
+ mcfn = d.getVar('BB_FILENAME')
+ tid = mcfn + ':do_' + task
key = tid + ':' + taskhash
if self.setscenetasks and tid not in self.setscenetasks:
@@ -646,12 +827,14 @@ class SignatureGeneratorUniHashMixIn(object):
if tid in self.extramethod:
method = method + self.extramethod[tid]
- data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data)
+ with self.client() as client:
+ data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
+
new_unihash = data['unihash']
if new_unihash != unihash:
hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
- bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
+ bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
self.set_unihash(tid, new_unihash)
d.setVar('BB_UNIHASH', new_unihash)
else:
@@ -677,7 +860,9 @@ class SignatureGeneratorUniHashMixIn(object):
if tid in self.extramethod:
method = method + self.extramethod[tid]
- data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
+ with self.client() as client:
+ data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
+
hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
if data is None:
@@ -710,20 +895,20 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
super().init_rundepcheck(data)
self.server = data.getVar('BB_HASHSERVE')
self.method = "sstate_output_hash"
+ self.max_parallel = 1
-#
-# Dummy class used for bitbake-selftest
-#
-class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash):
- name = "TestMulticonfigDepends"
- supports_multiconfig_datacaches = True
+def clean_checksum_file_path(file_checksum_tuple):
+ f, cs = file_checksum_tuple
+ if "/./" in f:
+ return "./" + f.split("/./")[1]
+ return f
def dump_this_task(outfile, d):
import bb.parse
- fn = d.getVar("BB_FILENAME")
+ mcfn = d.getVar("BB_FILENAME")
task = "do_" + d.getVar("BB_CURRENTTASK")
- referencestamp = bb.build.stamp_internal(task, d, None, True)
- bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
+ referencestamp = bb.parse.siggen.stampfile_base(mcfn)
+ bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
def init_colors(enable_color):
"""Initialise colour dict for passing to compare_sigfiles()"""
@@ -776,39 +961,6 @@ def list_inline_diff(oldlist, newlist, colors=None):
ret.append(item)
return '[%s]' % (', '.join(ret))
-def clean_basepath(basepath):
- basepath, dir, recipe_task = basepath.rsplit("/", 2)
- cleaned = dir + '/' + recipe_task
-
- if basepath[0] == '/':
- return cleaned
-
- if basepath.startswith("mc:") and basepath.count(':') >= 2:
- mc, mc_name, basepath = basepath.split(":", 2)
- mc_suffix = ':mc:' + mc_name
- else:
- mc_suffix = ''
-
- # mc stuff now removed from basepath. Whatever was next, if present will be the first
- # suffix. ':/', recipe path start, marks the end of this. Something like
- # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
- if basepath[0] != '/':
- cleaned += ':' + basepath.split(':/', 1)[0]
-
- return cleaned + mc_suffix
-
-def clean_basepaths(a):
- b = {}
- for x in a:
- b[clean_basepath(x)] = a[x]
- return b
-
-def clean_basepaths_list(a):
- b = []
- for x in a:
- b.append(clean_basepath(x))
- return b
-
# Handled renamed fields
def handle_renames(data):
if 'basewhitelist' in data:
@@ -839,10 +991,18 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
formatparams.update(values)
return formatstr.format(**formatparams)
- with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
- a_data = json.load(f, object_hook=SetDecoder)
- with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
- b_data = json.load(f, object_hook=SetDecoder)
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
+ try:
+ with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
+ b_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
+ raise err
for data in [a_data, b_data]:
handle_renames(data)
@@ -977,11 +1137,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
a = a_data['runtaskdeps'][idx]
b = b_data['runtaskdeps'][idx]
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
- changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b]))
+ changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
if changed:
- clean_a = clean_basepaths_list(a_data['runtaskdeps'])
- clean_b = clean_basepaths_list(b_data['runtaskdeps'])
+ clean_a = a_data['runtaskdeps']
+ clean_b = b_data['runtaskdeps']
if clean_a != clean_b:
output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
else:
@@ -990,8 +1150,8 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
- a = clean_basepaths(a_data['runtaskhashes'])
- b = clean_basepaths(b_data['runtaskhashes'])
+ a = a_data['runtaskhashes']
+ b = b_data['runtaskhashes']
changed, added, removed = dict_diff(a, b)
if added:
for dep in sorted(added):
@@ -1048,7 +1208,7 @@ def calc_basehash(sigdata):
basedata = ''
alldeps = sigdata['taskdeps']
- for dep in alldeps:
+ for dep in sorted(alldeps):
basedata = basedata + dep
val = sigdata['varvals'][dep]
if val is not None:
@@ -1080,8 +1240,12 @@ def calc_taskhash(sigdata):
def dump_sigfile(a):
output = []
- with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
- a_data = json.load(f, object_hook=SetDecoder)
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
handle_renames(a_data)
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 71ed382ab8..f6585fb3aa 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest):
def parseExpression(self, exp):
parsedvar = self.d.expandWithRefs(exp, None)
self.references = parsedvar.references
+ self.execs = parsedvar.execs
def test_simple_reference(self):
self.setEmptyVars(["FOO"])
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest):
self.parseExpression("${@d.getVar('BAR') + 'foo'}")
self.assertReferences(set(["BAR"]))
+ def test_python_exec_reference(self):
+ self.parseExpression("${@eval('3 * 5')}")
+ self.assertReferences(set())
+ self.assertExecs(set(["eval"]))
+
class ShellReferenceTest(ReferenceTest):
def parseExpression(self, exp):
@@ -318,7 +324,7 @@ d.getVar(a(), False)
"filename": "example.bb",
})
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
@@ -365,7 +371,7 @@ esac
self.d.setVarFlags("FOO", {"func": True})
self.setEmptyVars(execs)
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["somevar", "inverted"] + execs))
@@ -375,7 +381,7 @@ esac
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["oe_libinstall"]))
@@ -384,7 +390,7 @@ esac
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["oe_libinstall"]))
@@ -399,7 +405,7 @@ esac
# Check dependencies
self.d.setVar('ANOTHERVAR', expr)
self.d.setVar('TESTVAR', 'anothervalue testval testval2')
- deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(sorted(values.splitlines()),
sorted([expr,
'TESTVAR{anothervalue} = Set',
@@ -418,23 +424,49 @@ esac
self.d.setVar('ANOTHERVAR', varval)
self.d.setVar('TESTVAR', 'anothervalue testval testval2')
self.d.setVar('TESTVAR2', 'testval3')
- deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(["TESTVAR"]), self.d)
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d)
self.assertEqual(sorted(values.splitlines()), sorted([varval]))
self.assertEqual(deps, set(["TESTVAR2"]))
self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
# Check the vardepsexclude flag is handled by contains functionality
self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR')
- deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(sorted(values.splitlines()), sorted([varval]))
self.assertEqual(deps, set(["TESTVAR2"]))
self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
+ def test_contains_vardeps_override_operators(self):
+ # Check override operators handle dependencies correctly with the contains functionality
+ expr_plain = 'testval'
+ expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
+ expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
+ expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
+ # Check dependencies
+ self.d.setVar('ANOTHERVAR', expr_plain)
+ self.d.prependVar('ANOTHERVAR', expr_prepend)
+ self.d.appendVar('ANOTHERVAR', expr_append)
+ self.d.setVar('ANOTHERVAR:remove', expr_remove)
+ self.d.setVar('TESTVAR1', 'blah')
+ self.d.setVar('TESTVAR2', 'testval2')
+ self.d.setVar('TESTVAR3', 'no-testval')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
+ self.assertEqual(sorted(values.splitlines()),
+ sorted([
+ expr_prepend + expr_plain + expr_append,
+ '_remove of ' + expr_remove,
+ 'TESTVAR1{testval1} = Unset',
+ 'TESTVAR2{testval2} = Set',
+ 'TESTVAR3{no-testval} = Set',
+ ]))
+ # Check final value
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
+
#Currently no wildcard support
#def test_vardeps_wildcards(self):
# self.d.setVar("oe_libinstall", "echo test")
# self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
# self.d.setVarFlag("FOO", "vardeps", "oe_*")
- # self.assertEquals(deps, set(["oe_libinstall"]))
+ # self.assertEqual(deps, set(["oe_libinstall"]))
diff --git a/bitbake/lib/bb/tests/color.py b/bitbake/lib/bb/tests/color.py
index 88dd278006..bb70cb393d 100644
--- a/bitbake/lib/bb/tests/color.py
+++ b/bitbake/lib/bb/tests/color.py
@@ -20,7 +20,7 @@ class ProgressWatcher:
def __init__(self):
self._reports = []
- def handle_event(self, event):
+ def handle_event(self, event, d):
self._reports.append((event.progress, event.rate))
def reports(self):
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
index d3ddf67f1c..95af3f96d7 100644
--- a/bitbake/lib/bb/tests/compression.py
+++ b/bitbake/lib/bb/tests/compression.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/tests/cooker.py b/bitbake/lib/bb/tests/cooker.py
index c82d4b7b81..9e524ae345 100644
--- a/bitbake/lib/bb/tests/cooker.py
+++ b/bitbake/lib/bb/tests/cooker.py
@@ -1,6 +1,8 @@
#
# BitBake Tests for cooker.py
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index e667c7c7d3..cbc7c1ecd4 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase):
val = self.d.expand("${@5*12}")
self.assertEqual(str(val), "60")
+ def test_python_snippet_w_dict(self):
+ val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}")
+ self.assertEqual(str(val), "1")
+
+ def test_python_unexpanded_multi(self):
+ self.d.setVar("bar", "${unsetvar}")
+ val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}")
+ self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo")
+
def test_expand_in_python_snippet(self):
val = self.d.expand("${@'boo ' + '${foo}'}")
self.assertEqual(str(val), "boo value_of_foo")
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase):
val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
self.assertEqual(str(val), "value_of_foo value_of_bar")
+ def test_python_snippet_function_reference(self):
+ self.d.setVar("TESTVAL", "testvalue")
+ self.d.setVar("testfunc", 'd.getVar("TESTVAL")')
+ context = bb.utils.get_context()
+ context["testfunc"] = lambda d: d.getVar("TESTVAL")
+ val = self.d.expand("${@testfunc(d)}")
+ self.assertEqual(str(val), "testvalue")
+
+ def test_python_snippet_builtin_metadata(self):
+ self.d.setVar("eval", "INVALID")
+ self.d.expand("${@eval('3')}")
+
def test_python_unexpanded(self):
self.d.setVar("bar", "${unsetvar}")
val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
@@ -374,6 +395,16 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
+ # Test an override with _<numeric> in it based on a real world OE issue
+ def test_underscore_override_2(self):
+ self.d.setVar("TARGET_ARCH", "x86_64")
+ self.d.setVar("PN", "test-${TARGET_ARCH}")
+ self.d.setVar("VERSION", "1")
+ self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
+ self.d.setVar("OVERRIDES", "pn-${PN}")
+ bb.data.expandKeys(self.d)
+ self.assertEqual(self.d.getVar("VERSION"), "2")
+
def test_remove_with_override(self):
self.d.setVar("TEST:bar", "testvalue2")
self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
@@ -395,16 +426,6 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TEST:bar:append", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvalue2")
- # Test an override with _<numeric> in it based on a real world OE issue
- def test_underscore_override(self):
- self.d.setVar("TARGET_ARCH", "x86_64")
- self.d.setVar("PN", "test-${TARGET_ARCH}")
- self.d.setVar("VERSION", "1")
- self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
- self.d.setVar("OVERRIDES", "pn-${PN}")
- bb.data.expandKeys(self.d)
- self.assertEqual(self.d.getVar("VERSION"), "2")
-
def test_append_and_unused_override(self):
# Had a bug where an unused override append could return "" instead of None
self.d.setVar("BAR:append:unusedoverride", "testvalue2")
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py
index 9ca7e9bc8e..ef61891d30 100644
--- a/bitbake/lib/bb/tests/event.py
+++ b/bitbake/lib/bb/tests/event.py
@@ -13,6 +13,7 @@ import pickle
import threading
import time
import unittest
+import tempfile
from unittest.mock import Mock
from unittest.mock import call
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase):
self._test_process.event_handler,
event,
None)
- self._test_process.event_handler.assert_called_once_with(event)
+ self._test_process.event_handler.assert_called_once_with(event, None)
def test_fire_class_handlers(self):
""" Test fire_class_handlers method """
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase):
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
bb.event.fire_class_handlers(event2, None)
- expected_event_handler1 = [call(event1)]
- expected_event_handler2 = [call(event1),
- call(event2),
- call(event2)]
+ expected_event_handler1 = [call(event1, None)]
+ expected_event_handler2 = [call(event1, None),
+ call(event2, None),
+ call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected_event_handler1)
self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase):
bb.event.fire_class_handlers(event2, None)
bb.event.fire_class_handlers(event2, None)
expected_event_handler1 = []
- expected_event_handler2 = [call(event1)]
+ expected_event_handler2 = [call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected_event_handler1)
self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2)]
+ expected = [call(event1, None), call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2), call(event1)]
+ expected = [call(event1, None), call(event2, None), call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2), call(event1), call(event2)]
+ expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase):
event1 = bb.event.ConfigParsed()
bb.event.fire(event1, None)
- expected = [call(event1)]
+ expected = [call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
+ expected = [call(event1)]
self.assertEqual(self._test_ui1.event.send.call_args_list,
expected)
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase):
and disable threadlocks tests """
bb.event.fire(bb.event.OperationStarted(), None)
- def test_enable_threadlock(self):
+ def test_event_threadlock(self):
""" Test enable_threadlock method """
self._set_threadlock_test_mockups()
- bb.event.enable_threadlock()
self._set_and_run_threadlock_test_workers()
# Calls to UI handlers should be in order as all the registered
# handlers for the event coming from the first worker should be
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(self._threadlock_test_calls,
["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
-
- def test_disable_threadlock(self):
- """ Test disable_threadlock method """
- self._set_threadlock_test_mockups()
- bb.event.disable_threadlock()
- self._set_and_run_threadlock_test_workers()
- # Calls to UI handlers should be intertwined together. Thanks to the
- # delay in the registered handlers for the event coming from the first
- # worker, the event coming from the second worker starts being
- # processed before finishing handling the first worker event.
- self.assertEqual(self._threadlock_test_calls,
- ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
-
-
class EventClassesTest(unittest.TestCase):
""" Event classes test class """
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase):
def setUp(self):
bb.event.worker_pid = EventClassesTest._worker_pid
+ self.d = bb.data.init()
+ bb.parse.siggen = bb.siggen.init(self.d)
def test_Event(self):
""" Test the Event base class """
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase):
event = bb.event.FindSigInfoResult(result)
self.assertEqual(event.result, result)
self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+ def test_lineno_in_eventhandler(self):
+ # The error lineno is 5, not 4 since the first line is '\n'
+ error_line = """
+# Comment line1
+# Comment line2
+python test_lineno_in_eventhandler() {
+ This is an error line
+}
+addhandler test_lineno_in_eventhandler
+test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed"
+"""
+
+ with self.assertLogs() as logs:
+ f = tempfile.NamedTemporaryFile(suffix = '.bb')
+ f.write(bytes(error_line, "utf-8"))
+ f.flush()
+ d = bb.parse.handle(f.name, self.d)['']
+
+ output = "".join(logs.output)
+ self.assertTrue(" line 5\n" in output)
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
new file mode 100644
index 0000000000..4e41af6d6a
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
@@ -0,0 +1,20 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/2.10/</title>
+</head><body><h1>Index of /sources/libxml2/2.10/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
new file mode 100644
index 0000000000..abdfdd0fa2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
@@ -0,0 +1,40 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/2.9/</title>
+</head><body><h1>Index of /sources/libxml2/2.9/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr>
+</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
new file mode 100644
index 0000000000..c183e06a55
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
@@ -0,0 +1,19 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/</title>
+</head><body><h1>Index of /sources/libxml2/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr>
+<tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr>
+<tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr>
+<tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 1152e89c0d..85c1f79ff3 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -6,11 +6,13 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import contextlib
import unittest
import hashlib
import tempfile
import collections
import os
+import signal
import tarfile
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
@@ -22,6 +24,25 @@ def skipIfNoNetwork():
return unittest.skip("network test")
return lambda f: f
+class TestTimeout(Exception):
+ # Indicate to pytest that this is not a test suite
+ __test__ = False
+
+class Timeout():
+
+ def __init__(self, seconds):
+ self.seconds = seconds
+
+ def handle_timeout(self, signum, frame):
+ raise TestTimeout("Test failed: timeout reached")
+
+ def __enter__(self):
+ signal.signal(signal.SIGALRM, self.handle_timeout)
+ signal.alarm(self.seconds)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ signal.alarm(0)
+
class URITest(unittest.TestCase):
test_uris = {
"http://www.google.com/index.html" : {
@@ -287,6 +308,21 @@ class URITest(unittest.TestCase):
'params': {"someparam" : "1"},
'query': {},
'relative': True
+ },
+ "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": {
+ 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip',
+ 'scheme': 'https',
+ 'hostname': 'www.innodisk.com',
+ 'port': None,
+ 'hostport': 'www.innodisk.com',
+ 'path': '/Download_file',
+ 'userinfo': '',
+ 'userinfo': '',
+ 'username': '',
+ 'password': '',
+ 'params': {"downloadfilename" : "EGPL-T101.zip"},
+ 'query': {"9BE0BF6657": None},
+ 'relative': False
}
}
@@ -396,18 +432,28 @@ class FetcherTest(unittest.TestCase):
def git(self, cmd, cwd=None):
if isinstance(cmd, str):
- cmd = 'git ' + cmd
+ cmd = 'git -c safe.bareRepository=all ' + cmd
else:
- cmd = ['git'] + cmd
+ cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
if cwd is None:
cwd = self.gitdir
return bb.process.run(cmd, cwd=cwd)[0]
def git_init(self, cwd=None):
self.git('init', cwd=cwd)
- if not self.git(['config', 'user.email'], cwd=cwd):
+ # Explicitly set initial branch to master as
+ # a common setup is to use other default
+ # branch than master.
+ self.git(['checkout', '-b', 'master'], cwd=cwd)
+
+ try:
+ self.git(['config', 'user.email'], cwd=cwd)
+ except bb.process.ExecutionError:
self.git(['config', 'user.email', 'you@example.com'], cwd=cwd)
- if not self.git(['config', 'user.name'], cwd=cwd):
+
+ try:
+ self.git(['config', 'user.name'], cwd=cwd)
+ except bb.process.ExecutionError:
self.git(['config', 'user.name', 'Your Name'], cwd=cwd)
class MirrorUriTest(FetcherTest):
@@ -468,6 +514,7 @@ class MirrorUriTest(FetcherTest):
"http://.*/.* file:///someotherpath/downloads/"
def test_urireplace(self):
+ self.d.setVar("FILESPATH", ".")
for k, v in self.replaceuris.items():
ud = bb.fetch.FetchData(k[0], self.d)
ud.setup_localpath(self.d)
@@ -515,7 +562,7 @@ class MirrorUriTest(FetcherTest):
class GitDownloadDirectoryNamingTest(FetcherTest):
def setUp(self):
super(GitDownloadDirectoryNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake;branch=master"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_dir = "git.openembedded.org.bitbake"
self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_dir = "github.com.openembedded.bitbake.git"
@@ -563,7 +610,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
class TarballNamingTest(FetcherTest):
def setUp(self):
super(TarballNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake;branch=master"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
@@ -597,7 +644,7 @@ class TarballNamingTest(FetcherTest):
class GitShallowTarballNamingTest(FetcherTest):
def setUp(self):
super(GitShallowTarballNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake;branch=master"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
@@ -632,7 +679,7 @@ class GitShallowTarballNamingTest(FetcherTest):
class CleanTarballTest(FetcherTest):
def setUp(self):
super(CleanTarballTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -653,11 +700,13 @@ class CleanTarballTest(FetcherTest):
archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball))
self.assertNotEqual(len(archive.members), 0)
for member in archive.members:
- self.assertEqual(member.uname, 'oe')
- self.assertEqual(member.uid, 0)
- self.assertEqual(member.gname, 'oe')
- self.assertEqual(member.gid, 0)
- self.assertEqual(member.mtime, mtime)
+ if member.name == ".":
+ continue
+ self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name)
+ self.assertEqual(member.uid, 0, "uid for %s differs" % member.name)
+ self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name)
+ self.assertEqual(member.gid, 0, "gid for %s differs" % member.name)
+ self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name)
class FetcherLocalTest(FetcherTest):
@@ -693,6 +742,11 @@ class FetcherLocalTest(FetcherTest):
flst.sort()
return flst
+ def test_local_checksum_fails_no_file(self):
+ self.d.setVar("SRC_URI", "file://404")
+ with self.assertRaises(bb.BBHandledException):
+ bb.fetch.get_checksum_file_list(self.d)
+
def test_local(self):
tree = self.fetchUnpack(['file://a', 'file://dir/c'])
self.assertEqual(tree, ['a', 'dir/c'])
@@ -760,7 +814,7 @@ class FetcherLocalTest(FetcherTest):
# Fetch and check revision
self.d.setVar("SRCREV", "AUTOINC")
- self.d.setVar("__BBSEENSRCREV", "1")
+ self.d.setVar("__BBSRCREV_SEEN", "1")
url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
@@ -920,6 +974,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_fetch_file_mirror_of_mirror(self):
+ self.d.setVar("FILESPATH", ".")
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
os.mkdir(self.dldir + "/some2where")
@@ -985,25 +1040,25 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_gitfetch(self):
- url1 = url2 = "git://git.openembedded.org/bitbake;branch=master"
+ url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.gitfetcher(url1, url2)
@skipIfNoNetwork()
def test_gitfetch_goodsrcrev(self):
# SRCREV is set but matches rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
self.gitfetcher(url1, url2)
@skipIfNoNetwork()
def test_gitfetch_badsrcrev(self):
# SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
@skipIfNoNetwork()
def test_gitfetch_tagandrev(self):
# SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https"
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
@skipIfNoNetwork()
@@ -1012,7 +1067,7 @@ class FetcherNetworkTest(FetcherTest):
# `usehead=1' and instead fetch the specified SRCREV. See
# test_local_gitfetch_usehead() for a positive use of the usehead
# feature.
- url = "git://git.openembedded.org/bitbake;usehead=1;branch=master"
+ url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https"
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
@skipIfNoNetwork()
@@ -1021,26 +1076,26 @@ class FetcherNetworkTest(FetcherTest):
# `usehead=1' and instead fetch the specified SRCREV. See
# test_local_gitfetch_usehead() for a positive use of the usehead
# feature.
- url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master"
+ url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https"
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
@skipIfNoNetwork()
def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
- recipeurl = "git://git.openembedded.org/bitbake;branch=master"
- mirrorurl = "git://someserver.org/bitbake;branch=master"
+ recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
+ mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https"
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
self.gitfetcher(recipeurl, mirrorurl)
@skipIfNoNetwork()
def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
- recipeurl = "git://someserver.org/bitbake;branch=master"
+ recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https"
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
self.gitfetcher(recipeurl, recipeurl)
@skipIfNoNetwork()
def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
- realurl = "git://git.openembedded.org/bitbake"
- recipeurl = "git://someserver.org/bitbake"
+ realurl = "https://git.openembedded.org/bitbake"
+ recipeurl = "git://someserver.org/bitbake;protocol=https"
self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
os.chdir(self.tempdir)
self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
@@ -1050,9 +1105,9 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule(self):
# URL with ssh submodules
- url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master"
+ url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https"
# Original URL (comment this if you have ssh access to git.yoctoproject.org)
- url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master"
+ url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1069,6 +1124,25 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
@skipIfNoNetwork()
+ def test_git_submodule_restricted_network_premirrors(self):
+ # this test is to ensure that premirrors will be tried in restricted network
+ # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
+ url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
+ # create a download directory to be used as premirror later
+ tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
+ dl_premirror = os.path.join(tempdir, "download-premirror")
+ os.mkdir(dl_premirror)
+ self.d.setVar("DL_DIR", dl_premirror)
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ # now use the premirror in restricted network
+ self.d.setVar("DL_DIR", self.dldir)
+ self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
+ self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+
+ @skipIfNoNetwork()
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
@@ -1165,6 +1239,15 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
+ @skipIfNoNetwork()
+ def test_git_submodule_reference_to_parent(self):
+ self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master"
+ self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1")
+ with Timeout(60):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
+
class SVNTest(FetcherTest):
def skipIfNoSvn():
import shutil
@@ -1199,8 +1282,9 @@ class SVNTest(FetcherTest):
cwd=repo_dir)
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
- # Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
+ # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
+ # Use still accessible svn repo (only trunk to avoid longer downloads)
+ bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1228,8 +1312,8 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
@skipIfNoSvn()
def test_external_svn(self):
@@ -1242,8 +1326,8 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
class TrustedNetworksTest(FetcherTest):
def test_trusted_network(self):
@@ -1294,14 +1378,17 @@ class URLHandle(unittest.TestCase):
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
- "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
+ "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
+ "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
+ r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}),
}
# we require a pathname to encodeurl but users can still pass such urls to
# decodeurl and we need to handle them
decodedata = datatable.copy()
decodedata.update({
"http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
+ "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}),
})
def test_decodeurl(self):
@@ -1318,37 +1405,39 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
- ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
: "1.5.0",
# version pattern "pkg_name-X.Y"
# mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
- ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
: "1.4.0",
# combination version pattern
- ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
: "1.2.0",
- ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
+ ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
: "2014.01",
# version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
: "3.82+dbg0.9",
+ ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
+ : "0.28.0",
}
test_wget_uris = {
@@ -1364,6 +1453,9 @@ class FetchLatestVersionTest(FetcherTest):
# http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
: "2.8.12.1",
+ # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
+ ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "")
+ : "2.10.3",
#
# packages with versions only in current directory
#
@@ -1413,6 +1505,9 @@ class FetchLatestVersionTest(FetcherTest):
self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
r = bb.utils.vercmp_string(v, verstring)
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
+ if k[4]:
+ r = bb.utils.vercmp_string(verstring, k[4])
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
def test_wget_latest_versionstring(self):
testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
@@ -1614,7 +1709,7 @@ class GitShallowTest(FetcherTest):
self.d.setVar('BB_GIT_SHALLOW', '1')
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
- self.d.setVar("__BBSEENSRCREV", "1")
+ self.d.setVar("__BBSRCREV_SEEN", "1")
def assertRefs(self, expected_refs, cwd=None):
if cwd is None:
@@ -1834,7 +1929,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1864,7 +1959,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -2149,7 +2244,7 @@ class GitShallowTest(FetcherTest):
self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0')
self.d.setVar('BB_GIT_SHALLOW', '1')
self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
- fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master"], self.d)
+ fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d)
fetcher.download()
bb.utils.remove(self.dldir + "/*.tar.gz")
@@ -2159,6 +2254,12 @@ class GitShallowTest(FetcherTest):
self.assertIn("fstests.doap", dir)
class GitLfsTest(FetcherTest):
+ def skipIfNoGitLFS():
+ import shutil
+ if not shutil.which('git-lfs'):
+ return unittest.skip('git-lfs not installed')
+ return lambda f: f
+
def setUp(self):
FetcherTest.setUp(self)
@@ -2172,14 +2273,18 @@ class GitLfsTest(FetcherTest):
self.d.setVar('SRCREV', '${AUTOREV}')
self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
- self.d.setVar("__BBSEENSRCREV", "1")
+ self.d.setVar("__BBSRCREV_SEEN", "1")
bb.utils.mkdirhier(self.srcdir)
self.git_init(cwd=self.srcdir)
- with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
- attrs.write('*.mp3 filter=lfs -text')
- self.git(['add', '.gitattributes'], cwd=self.srcdir)
- self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
+ self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
+
+ def commit_file(self, filename, content):
+ with open(os.path.join(self.srcdir, filename), "w") as f:
+ f.write(content)
+ self.git(["add", filename], cwd=self.srcdir)
+ self.git(["commit", "-m", "Change"], cwd=self.srcdir)
+ return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip()
def fetch(self, uri=None, download=True):
uris = self.d.getVar('SRC_URI').split()
@@ -2192,55 +2297,148 @@ class GitLfsTest(FetcherTest):
ud = fetcher.ud[uri]
return fetcher, ud
+ def get_real_git_lfs_file(self):
+ self.d.setVar('PATH', os.environ.get('PATH'))
+ fetcher, ud = self.fetch()
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
+ return unpacked_lfs_file
+
+ @skipIfNoGitLFS()
+ def test_fetch_lfs_on_srcrev_change(self):
+ """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
+ self.git(["lfs", "install", "--local"], cwd=self.srcdir)
+
+ @contextlib.contextmanager
+ def hide_upstream_repository():
+ """Hide the upstream repository to make sure that git lfs cannot pull from it"""
+ temp_name = self.srcdir + ".bak"
+ os.rename(self.srcdir, temp_name)
+ try:
+ yield
+ finally:
+ os.rename(temp_name, self.srcdir)
+
+ def fetch_and_verify(revision, filename, content):
+ self.d.setVar('SRCREV', revision)
+ fetcher, ud = self.fetch()
+
+ with hide_upstream_repository():
+ workdir = self.d.getVar('WORKDIR')
+ fetcher.unpack(workdir)
+
+ with open(os.path.join(workdir, "git", filename)) as f:
+ self.assertEqual(f.read(), content)
+
+ commit_1 = self.commit_file("a.mp3", "version 1")
+ commit_2 = self.commit_file("a.mp3", "version 2")
+
+ self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
+
+ # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are
+ # available even when the upstream repository disappears.
+ fetch_and_verify(commit_2, "a.mp3", "version 2")
+ # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download
+ # folder.
+ fetch_and_verify(commit_1, "a.mp3", "version 1")
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_skips(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
+ f = self.get_real_git_lfs_file()
+ # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
+ lfs_file = (
+ 'version https://git-lfs.github.com/spec/v1\n'
+ 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
+ 'size 11423554\n'
+ )
+
+ with open(f) as fh:
+ self.assertEqual(lfs_file, fh.read())
+
+ @skipIfNoGitLFS()
def test_lfs_enabled(self):
import shutil
uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
self.d.setVar('SRC_URI', uri)
- # Careful: suppress initial attempt at downloading until
- # we know whether git-lfs is installed.
- fetcher, ud = self.fetch(uri=None, download=False)
- self.assertIsNotNone(ud.method._find_git_lfs)
-
- # If git-lfs can be found, the unpack should be successful. Only
- # attempt this with the real live copy of git-lfs installed.
- if ud.method._find_git_lfs(self.d):
- fetcher.download()
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
-
- # If git-lfs cannot be found, the unpack should throw an error
- with self.assertRaises(bb.fetch2.FetchError):
- fetcher.download()
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ # With git-lfs installed, test that we can fetch and unpack
+ fetcher, ud = self.fetch()
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ @skipIfNoGitLFS()
def test_lfs_disabled(self):
import shutil
uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
self.d.setVar('SRC_URI', uri)
- # In contrast to test_lfs_enabled(), allow the implicit download
- # done by self.fetch() to occur here. The point of this test case
- # is to verify that the fetcher can survive even if the source
+ # Verify that the fetcher can survive even if the source
# repository has Git LFS usage configured.
fetcher, ud = self.fetch()
- self.assertIsNotNone(ud.method._find_git_lfs)
-
- # If git-lfs can be found, the unpack should be successful. A
- # live copy of git-lfs is not required for this case, so
- # unconditionally forge its presence.
- ud.method._find_git_lfs = lambda d: True
- shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
- # If git-lfs cannot be found, the unpack should be successful
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ def test_lfs_enabled_not_installed(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ # Careful: suppress initial attempt at downloading
+ fetcher, ud = self.fetch(uri=None, download=False)
+
+ # Artificially assert that git-lfs is not installed, so
+ # we can verify a failure to unpack in it's absence.
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # If git-lfs cannot be found, the unpack should throw an error
+ with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
+
+ def test_lfs_disabled_not_installed(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ # Careful: suppress initial attempt at downloading
+ fetcher, ud = self.fetch(uri=None, download=False)
+
+ # Artificially assert that git-lfs is not installed, so
+ # we can verify a failure to unpack in it's absence.
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # Even if git-lfs cannot be found, the unpack should be successful
+ fetcher.download()
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
class GitURLWithSpacesTest(FetcherTest):
test_git_urls = {
@@ -2285,6 +2483,13 @@ class CrateTest(FetcherTest):
d = self.d
fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
fetcher.download()
fetcher.unpack(self.tempdir)
self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
@@ -2293,6 +2498,55 @@ class CrateTest(FetcherTest):
self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
@skipIfNoNetwork()
+ def test_crate_url_matching_recipe(self):
+
+ self.d.setVar('BP', 'glob-0.2.11')
+
+ uri = "crate://crates.io/glob/0.2.11"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs"))
+
+ @skipIfNoNetwork()
+ def test_crate_url_params(self):
+
+ uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "aho-corasick-renamed")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs"))
+
+ @skipIfNoNetwork()
def test_crate_url_multi(self):
uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
@@ -2302,6 +2556,19 @@ class CrateTest(FetcherTest):
d = self.d
fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
+ ud = fetcher.ud[fetcher.urls[1]]
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "time-0.1.35")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate")
+
fetcher.download()
fetcher.unpack(self.tempdir)
self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
@@ -2311,6 +2578,18 @@ class CrateTest(FetcherTest):
self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
+ @skipIfNoNetwork()
+ def test_crate_incorrect_cksum(self):
+ uri = "crate://crates.io/aho-corasick/0.7.20"
+ self.d.setVar('SRC_URI', uri)
+ self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest())
+
+ uris = self.d.getVar('SRC_URI').split()
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"):
+ fetcher.download()
+
class NPMTest(FetcherTest):
def skipIfNoNpm():
import shutil
@@ -2574,6 +2853,45 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
+ def test_npmsw_git(self):
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'github:jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'nodejs': {
+ 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
+ 'from': 'gitlab:gitlab-examples/nodejs'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
+
+ @skipIfNoNpm()
+ @skipIfNoNetwork()
def test_npmsw_dev(self):
swfile = self.create_shrinkwrap_file({
'dependencies': {
@@ -2779,9 +3097,9 @@ class NPMTest(FetcherTest):
class GitSharedTest(FetcherTest):
def setUp(self):
super(GitSharedTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake;branch=master"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
- self.d.setVar("__BBSEENSRCREV", "1")
+ self.d.setVar("__BBSRCREV_SEEN", "1")
@skipIfNoNetwork()
def test_shared_unpack(self):
@@ -2802,3 +3120,246 @@ class GitSharedTest(FetcherTest):
fetcher.unpack(self.unpackdir)
alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
self.assertFalse(os.path.exists(alt))
+
+
+class FetchPremirroronlyLocalTest(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyLocalTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "bitbake"
+ self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
+ self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
+ self.testfilename = "bitbake-fetch.test"
+
+ def make_git_repo(self):
+ recipeurl = "git:/git.fake.repo/bitbake"
+ os.makedirs(self.gitdir)
+ self.git_init(cwd=self.gitdir)
+ for i in range(0):
+ self.git_new_commit()
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+
+ def git_new_commit(self):
+ import random
+ os.unlink(os.path.join(self.mirrordir, self.mirrorname))
+ branch = self.git("branch --show-current", self.gitdir).split()
+ with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
+ testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
+ self.git("add {}".format(self.testfilename), self.gitdir)
+ self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+ return self.git("rev-parse HEAD", self.gitdir).strip()
+
+ def git_new_branch(self, name):
+ self.git_new_commit()
+ head = self.git("rev-parse HEAD", self.gitdir).strip()
+ self.git("checkout -b {}".format(name), self.gitdir)
+ newrev = self.git_new_commit()
+ self.git("checkout {}".format(head), self.gitdir)
+ return newrev
+
+ def test_mirror_multiple_fetches(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher2.download()
+ fetcher2.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher3.download()
+ fetcher3.unpack(self.unpackdir)
+
+
+ def test_mirror_commit_nonexistent(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+ def test_mirror_commit_exists(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+
+ def test_mirror_tarball_nonexistent(self):
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+ def test_mirror_tarball_multiple_branches(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ both branches have required revisions
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", branch2rev)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+ fetcher.download()
+ fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
+ unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
+ self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
+ with open(unpacked, 'r') as f:
+ content = f.read()
+ ## We expect to see testbranch1 in the file, not master, not testbranch2
+ self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
+
+ def test_mirror_tarball_multiple_branches_nobranch(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ Unbalanced name/branches raises ParameterError
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", branch2rev)
+ with self.assertRaises(bb.fetch2.ParameterError):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ def test_mirror_tarball_multiple_branches_norev(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ one of the branches specifies non existing SRCREV
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+
+class FetchPremirroronlyNetworkTest(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyNetworkTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "fstests"
+ self.clonedir = os.path.join(self.tempdir, "git")
+ self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
+ self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "0")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+
+ def make_git_repo(self):
+ import shutil
+ self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
+ os.makedirs(self.clonedir)
+ self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir)
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+ shutil.rmtree(self.clonedir)
+
+ @skipIfNoNetwork()
+ def test_mirror_tarball_updated(self):
+ self.make_git_repo()
+ ## Upstream commit is in the mirror
+ self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+
+ @skipIfNoNetwork()
+ def test_mirror_tarball_outdated(self):
+ self.make_git_repo()
+ ## Upstream commit not in the mirror
+ self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+class FetchPremirroronlyMercurialTest(FetcherTest):
+ """ Test for premirrors with mercurial repos
+ the test covers also basic hg:// clone (see fetch_and_create_tarball
+ """
+ def skipIfNoHg():
+ import shutil
+ if not shutil.which('hg'):
+ return unittest.skip('Mercurial not installed')
+ return lambda f: f
+
+ def setUp(self):
+ super(FetchPremirroronlyMercurialTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "libgnt"
+ self.clonedir = os.path.join(self.tempdir, "hg")
+ self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt"
+ self.d.setVar("SRCREV", "53e8b422faaf")
+ self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz"
+
+ def fetch_and_create_tarball(self):
+ """
+ Ask bitbake to download repo and prepare mirror tarball for us
+ """
+ self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname)
+ self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile))
+ ## moving tarball to mirror directory
+ os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname))
+ self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0")
+
+
+ @skipIfNoNetwork()
+ @skipIfNoHg()
+ def test_premirror_mercurial(self):
+ self.fetch_and_create_tarball()
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+
+class FetchPremirroronlyBrokenTarball(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyBrokenTarball, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "bitbake"
+ self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
+ self.recipe_url = "git://git.fake.repo/bitbake;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
+ with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz:
+ targz.write("This is not tar.gz file!")
+
+ def test_mirror_broken_download(self):
+ import sys
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
+ fetcher.download()
+ output = "".join(logs.output)
+ self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 2898f9bb14..72d1962e7e 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -164,6 +164,7 @@ python () {
# become unset/disappear.
#
def test_parse_classextend_contamination(self):
+ self.d.setVar("__bbclasstype", "recipe")
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
self.classextend = self.classextend.replace("###CLASS###", cls.name)
@@ -185,12 +186,158 @@ deltask ${EMPTYVAR}
"""
def test_parse_addtask_deltask(self):
import sys
- f = self.parsehelper(self.addtask_deltask)
+
+ with self.assertLogs() as logs:
+ f = self.parsehelper(self.addtask_deltask)
+ d = bb.parse.handle(f.name, self.d)['']
+
+ output = "".join(logs.output)
+ self.assertTrue("addtask contained multiple 'before' keywords" in output)
+ self.assertTrue("addtask contained multiple 'after' keywords" in output)
+ self.assertTrue('addtask ignored: " do_patch"' in output)
+ #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
+
+ broken_multiline_comment = """
+# First line of comment \\
+# Second line of comment \\
+
+"""
+ def test_parse_broken_multiline_comment(self):
+ f = self.parsehelper(self.broken_multiline_comment)
+ with self.assertRaises(bb.BBHandledException):
+ d = bb.parse.handle(f.name, self.d)['']
+
+
+ comment_in_var = """
+VAR = " \\
+ SOMEVAL \\
+# some comment \\
+ SOMEOTHERVAL \\
+"
+"""
+ def test_parse_comment_in_var(self):
+ f = self.parsehelper(self.comment_in_var)
+ with self.assertRaises(bb.BBHandledException):
+ d = bb.parse.handle(f.name, self.d)['']
+
+
+ at_sign_in_var_flag = """
+A[flag@.service] = "nonet"
+B[flag@.target] = "ntb"
+C[f] = "flag"
+
+unset A[flag@.service]
+"""
+ def test_parse_at_sign_in_var_flag(self):
+ f = self.parsehelper(self.at_sign_in_var_flag)
d = bb.parse.handle(f.name, self.d)['']
+ self.assertEqual(d.getVar("A"), None)
+ self.assertEqual(d.getVar("B"), None)
+ self.assertEqual(d.getVarFlag("A","flag@.service"), None)
+ self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb")
+ self.assertEqual(d.getVarFlag("C","f"), "flag")
+
+ def test_parse_invalid_at_sign_in_var_flag(self):
+ invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
+ f = self.parsehelper(invalid_at_sign)
+ with self.assertRaises(bb.parse.ParseError):
+ d = bb.parse.handle(f.name, self.d)['']
+
+ export_function_recipe = """
+inherit someclass
+"""
+
+ export_function_recipe2 = """
+inherit someclass
+
+do_compile () {
+ false
+}
+
+python do_compilepython () {
+ bb.note("Something else")
+}
+
+"""
+ export_function_class = """
+someclass_do_compile() {
+ true
+}
+
+python someclass_do_compilepython () {
+ bb.note("Something")
+}
+
+EXPORT_FUNCTIONS do_compile do_compilepython
+"""
+
+ export_function_class2 = """
+secondclass_do_compile() {
+ true
+}
+
+python secondclass_do_compilepython () {
+ bb.note("Something")
+}
+
+EXPORT_FUNCTIONS do_compile do_compilepython
+"""
- stdout = sys.stdout.getvalue()
- self.assertTrue("addtask contained multiple 'before' keywords" in stdout)
- self.assertTrue("addtask contained multiple 'after' keywords" in stdout)
- self.assertTrue('addtask ignored: " do_patch"' in stdout)
- #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout)
+ def test_parse_export_functions(self):
+ def check_function_flags(d):
+ self.assertEqual(d.getVarFlag("do_compile", "func"), 1)
+ self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1)
+ self.assertEqual(d.getVarFlag("do_compile", "python"), None)
+ self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1")
+
+ with tempfile.TemporaryDirectory() as tempdir:
+ self.d.setVar("__bbclasstype", "recipe")
+ recipename = tempdir + "/recipe.bb"
+ os.makedirs(tempdir + "/classes")
+ with open(tempdir + "/classes/someclass.bbclass", "w") as f:
+ f.write(self.export_function_class)
+ f.flush()
+ with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
+ f.write(self.export_function_class2)
+ f.flush()
+
+ with open(recipename, "w") as f:
+ f.write(self.export_function_recipe)
+ f.flush()
+ os.chdir(tempdir)
+ d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
+ self.assertIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ recipename2 = tempdir + "/recipe2.bb"
+ with open(recipename2, "w") as f:
+ f.write(self.export_function_recipe2)
+ f.flush()
+
+ d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
+ self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ self.assertIn("false", d.getVar("do_compile"))
+ self.assertIn("else", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ with open(recipename, "a+") as f:
+ f.write("\ninherit secondclass\n")
+ f.flush()
+ with open(recipename2, "a+") as f:
+ f.write("\ninherit secondclass\n")
+ f.flush()
+
+ d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
+ self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
+ self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
+ self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ self.assertIn("false", d.getVar("do_compile"))
+ self.assertIn("else", d.getVar("do_compilepython"))
+ check_function_flags(d)
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index 061a5a1f80..cc87e8d6a8 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -288,7 +288,7 @@ class RunQueueTests(unittest.TestCase):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
"BBMULTICONFIG" : "mc-1 mc_2",
- "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends",
+ "BB_SIGNATURE_HANDLER" : "basichash",
"EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
}
tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
diff --git a/bitbake/lib/bb/tests/siggen.py b/bitbake/lib/bb/tests/siggen.py
index c21ab4e4fb..0dc67e6cc2 100644
--- a/bitbake/lib/bb/tests/siggen.py
+++ b/bitbake/lib/bb/tests/siggen.py
@@ -17,75 +17,12 @@ import bb.siggen
class SiggenTest(unittest.TestCase):
- def test_clean_basepath_simple_target_basepath(self):
- basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask'
+ def test_build_pnid(self):
+ tests = {
+ ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask',
+ ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask',
+ }
- actual_cleaned = bb.siggen.clean_basepath(basepath)
+ for t in tests:
+ self.assertEqual(bb.siggen.build_pnid(*t), tests[t])
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_basic_virtual_basepath(self):
- basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_basepath(self):
- basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_virtual_long_prefix_basepath(self):
- basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_virtual_basepath(self):
- basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
- basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
-
- # def test_clean_basepath_performance(self):
- # input_basepaths = [
- # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # ]
-
- # time_start = time.time()
-
- # i = 2000000
- # while i >= 0:
- # for basepath in input_basepaths:
- # bb.siggen.clean_basepath(basepath)
- # i -= 1
-
- # elapsed = time.time() - time_start
- # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
-
- # self.assertTrue(False)
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index e68a3b879a..dcd3910cc4 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -10,6 +10,7 @@
import logging
import os
import sys
+import time
import atexit
import re
from collections import OrderedDict, defaultdict
@@ -324,11 +325,11 @@ class Tinfoil:
self.recipes_parsed = False
self.quiet = 0
self.oldhandlers = self.logger.handlers[:]
+ self.localhandlers = []
if setup_logging:
# This is the *client-side* logger, nothing to do with
# logging messages from the server
bb.msg.logger_create('BitBake', output)
- self.localhandlers = []
for handler in self.logger.handlers:
if handler not in self.oldhandlers:
self.localhandlers.append(handler)
@@ -448,6 +449,12 @@ class Tinfoil:
self.run_actions(config_params)
self.recipes_parsed = True
+ def modified_files(self):
+ """
+ Notify the server it needs to revalidate it's caches since the client has modified files
+ """
+ self.run_command("revalidateCaches")
+
def run_command(self, command, *params, handle_events=True):
"""
Run a command on the server (as implemented in bb.command).
@@ -729,6 +736,7 @@ class Tinfoil:
ret = self.run_command('buildTargets', targets, task)
if handle_events:
+ lastevent = time.time()
result = False
# Borrowed from knotty, instead somewhat hackily we use the helper
# as the object to store "shutdown" on
@@ -741,6 +749,7 @@ class Tinfoil:
try:
event = self.wait_event(0.25)
if event:
+ lastevent = time.time()
if event_callback and event_callback(event):
continue
if helper.eventHandler(event):
@@ -773,7 +782,7 @@ class Tinfoil:
if isinstance(event, bb.command.CommandCompleted):
result = True
break
- if isinstance(event, bb.command.CommandFailed):
+ if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
self.logger.error(str(event))
result = False
break
@@ -785,10 +794,13 @@ class Tinfoil:
self.logger.error(str(event))
result = False
break
-
elif helper.shutdown > 1:
break
termfilter.updateFooter()
+ if time.time() > (lastevent + (3*60)):
+ if not self.run_command('ping', handle_events=False):
+ print("\nUnable to ping server and no events, closing down...\n")
+ return False
except KeyboardInterrupt:
termfilter.clearFooter()
if helper.shutdown == 1:
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 129bb329c3..8b212b7803 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -1746,7 +1746,6 @@ class BuildInfoHelper(object):
buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
- image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
# location of the manifest files for this build;
# note that this file is only produced if an image is produced
@@ -1767,6 +1766,18 @@ class BuildInfoHelper(object):
# filter out anything which isn't an image target
image_targets = [target for target in targets if target.is_image]
+ if len(image_targets) > 0:
+ #if there are image targets retrieve image_name
+ image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
+ if not image_name:
+ #When build target is an image and image_name is not found as an environment variable
+ logger.info("IMAGE_NAME not found, extracting from bitbake command")
+ cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
+ #filter out tokens that are command line options
+ cmd = [token for token in cmd if not token.startswith('-')]
+ image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
+ logger.info("IMAGE_NAME found as : %s " % image_name)
+
for image_target in image_targets:
# this is set to True if we find at least one file relating to
# this target; if this remains False after the scan, we copy the
diff --git a/bitbake/lib/bb/ui/eventreplay.py b/bitbake/lib/bb/ui/eventreplay.py
new file mode 100644
index 0000000000..d62ecbfa56
--- /dev/null
+++ b/bitbake/lib/bb/ui/eventreplay.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file re-uses code spread throughout other Bitbake source files.
+# As such, all other copyrights belong to their own right holders.
+#
+
+
+import os
+import sys
+import json
+import pickle
+import codecs
+
+
+class EventPlayer:
+ """Emulate a connection to a bitbake server."""
+
+ def __init__(self, eventfile, variables):
+ self.eventfile = eventfile
+ self.variables = variables
+ self.eventmask = []
+
+ def waitEvent(self, _timeout):
+ """Read event from the file."""
+ line = self.eventfile.readline().strip()
+ if not line:
+ return
+ try:
+ decodedline = json.loads(line)
+ if 'allvariables' in decodedline:
+ self.variables = decodedline['allvariables']
+ return
+ if not 'vars' in decodedline:
+ raise ValueError
+ event_str = decodedline['vars'].encode('utf-8')
+ event = pickle.loads(codecs.decode(event_str, 'base64'))
+ event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
+ if event_name not in self.eventmask:
+ return
+ return event
+ except ValueError as err:
+ print("Failed loading ", line)
+ raise err
+
+ def runCommand(self, command_line):
+ """Emulate running a command on the server."""
+ name = command_line[0]
+
+ if name == "getVariable":
+ var_name = command_line[1]
+ variable = self.variables.get(var_name)
+ if variable:
+ return variable['v'], None
+ return None, "Missing variable %s" % var_name
+
+ elif name == "getAllKeysWithFlags":
+ dump = {}
+ flaglist = command_line[1]
+ for key, val in self.variables.items():
+ try:
+ if not key.startswith("__"):
+ dump[key] = {
+ 'v': val['v'],
+ 'history' : val['history'],
+ }
+ for flag in flaglist:
+ dump[key][flag] = val[flag]
+ except Exception as err:
+ print(err)
+ return (dump, None)
+
+ elif name == 'setEventMask':
+ self.eventmask = command_line[-1]
+ return True, None
+
+ else:
+ raise Exception("Command %s not implemented" % command_line[0])
+
+ def getEventHandle(self):
+ """
+ This method is called by toasterui.
+ The return value is passed to self.runCommand but not used there.
+ """
+ pass
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 61cf0a37f4..f86999bb09 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -179,7 +179,7 @@ class TerminalFilter(object):
new[3] = new[3] & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSADRAIN, new)
curses.setupterm()
- if curses.tigetnum("colors") > 2:
+ if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '':
for h in handlers:
try:
h.formatter.enable_color()
@@ -420,6 +420,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
except bb.BBHandledException:
drain_events_errorhandling(eventHandler)
return 1
+ except Exception as e:
+ # bitbake-server comms failure
+ early_logger = bb.msg.logger_create('bitbake', sys.stdout)
+ early_logger.fatal("Attempting to set server environment: %s", e)
+ return 1
if params.options.quiet == 0:
console_loglevel = loglevel
@@ -585,7 +590,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
return
llevel, debug_domains = bb.msg.constructLogOptions()
- server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
+ try:
+ server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Attempting to set event mask: %s", e)
+ return 1
# The logging_tree module is *extremely* helpful in debugging logging
# domains. Uncomment here to dump the logging tree when bitbake starts
@@ -594,7 +604,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
universe = False
if not params.observe_only:
- params.updateFromServer(server)
+ try:
+ params.updateFromServer(server)
+ except Exception as e:
+ logger.fatal("Fetching command line: %s", e)
+ return 1
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -605,7 +619,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
universe = True
- ret, error = server.runCommand(cmdline['action'])
+ try:
+ ret, error = server.runCommand(cmdline['action'])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Command '{}' failed: %s".format(cmdline), e)
+ return 1
if error:
logger.error("Command '%s' failed: %s" % (cmdline, error))
return 1
@@ -625,25 +644,38 @@ def main(server, eventHandler, params, tf = TerminalFilter):
printintervaldelta = 10 * 60 # 10 minutes
printinterval = printintervaldelta
- lastprint = time.time()
+ pinginterval = 1 * 60 # 1 minute
+ lastevent = lastprint = time.time()
termfilter = tf(main, helper, console_handlers, params.options.quiet)
atexit.register(termfilter.finish)
- while True:
+ # shutdown levels
+ # 0 - normal operation
+ # 1 - no new task execution, let current running tasks finish
+ # 2 - interrupting currently executing tasks
+ # 3 - we're done, exit
+ while main.shutdown < 3:
try:
if (lastprint + printinterval) <= time.time():
termfilter.keepAlive(printinterval)
printinterval += printintervaldelta
event = eventHandler.waitEvent(0)
if event is None:
- if main.shutdown > 1:
- break
+ if (lastevent + pinginterval) <= time.time():
+ ret, error = server.runCommand(["ping"])
+ if error or not ret:
+ termfilter.clearFooter()
+ print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret)))
+ return_value = 3
+ main.shutdown = 3
+ lastevent = time.time()
if not parseprogress:
termfilter.updateFooter()
event = eventHandler.waitEvent(0.25)
if event is None:
continue
+ lastevent = time.time()
helper.eventHandler(event)
if isinstance(event, bb.runqueue.runQueueExitWait):
if not main.shutdown:
@@ -748,15 +780,15 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if event.error:
errors = errors + 1
logger.error(str(event))
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, bb.command.CommandExit):
if not return_value:
return_value = event.exitcode
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, bb.event.MultipleProviders):
logger.info(str(event))
@@ -841,15 +873,26 @@ def main(server, eventHandler, params, tf = TerminalFilter):
logger.error("Unknown event: %s", event)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Executing event: %s", e)
+ return_value = 1
+ errors = errors + 1
+ main.shutdown = 3
except EnvironmentError as ioerror:
termfilter.clearFooter()
# ignore interrupted io
if ioerror.args[0] == 4:
continue
sys.stderr.write(str(ioerror))
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
+ if not params.observe_only:
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Unable to force shutdown: %s", e)
+ main.shutdown = 3
except KeyboardInterrupt:
termfilter.clearFooter()
if params.observe_only:
@@ -858,9 +901,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
def state_force_shutdown():
print("\nSecond Keyboard Interrupt, stopping...\n")
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- logger.error("Unable to cleanly stop: %s" % error)
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ logger.error("Unable to cleanly stop: %s" % error)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Unable to cleanly stop: %s", e)
if not params.observe_only and main.shutdown == 1:
state_force_shutdown()
@@ -873,6 +920,9 @@ def main(server, eventHandler, params, tf = TerminalFilter):
_, error = server.runCommand(["stateShutdown"])
if error:
logger.error("Unable to cleanly shutdown: %s" % error)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Unable to cleanly shutdown: %s", e)
except KeyboardInterrupt:
state_force_shutdown()
@@ -880,9 +930,14 @@ def main(server, eventHandler, params, tf = TerminalFilter):
except Exception as e:
import traceback
sys.stderr.write(traceback.format_exc())
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
+ if not params.observe_only:
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Unable to force shutdown: %s", e)
+ main.shudown = 3
return_value = 1
try:
termfilter.clearFooter()
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index cf1c876a51..18a706547a 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -227,6 +227,9 @@ class NCursesUI:
shutdown = 0
try:
+ if not params.observe_only:
+ params.updateToServer(server, os.environ.copy())
+
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py
index c00eaf6638..bedfd69b09 100644
--- a/bitbake/lib/bb/ui/taskexp.py
+++ b/bitbake/lib/bb/ui/taskexp.py
@@ -177,7 +177,7 @@ class gtkthread(threading.Thread):
quit = threading.Event()
def __init__(self, shutdown):
threading.Thread.__init__(self)
- self.setDaemon(True)
+ self.daemon = True
self.shutdown = shutdown
if not Gtk.init_check()[0]:
sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
diff --git a/bitbake/lib/bb/ui/taskexp_ncurses.py b/bitbake/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 0000000000..ea94a4987f
--- /dev/null
+++ b/bitbake/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
+#
+# BitBake Graphical ncurses-based Dependency Explorer
+# * Based on the GTK implementation
+# * Intended to run on any Linux host
+#
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 - 2008 Richard Purdie
+# Copyright (C) 2022 - 2024 David Reyna
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# Execution example:
+# $ bitbake -g -u taskexp_ncurses zlib acl
+#
+# Self-test example (executes a script of GUI actions):
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
+# ...
+# $ echo $?
+# 0
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
+# ERROR: Nothing PROVIDES 'foo'. Close matches:
+# ofono
+# $ echo $?
+# 1
+#
+# Self-test with no terminal example (only tests dependency fetch from bitbake):
+# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
+# $ echo $?
+# 0
+#
+# Features:
+# * Ncurses is used for the presentation layer. Only the 'curses'
+# library is used (none of the extension libraries), plus only
+# one main screen is used (no sub-windows)
+# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
+# dynamic dependency data based on passed recipes
+# * Computes and provides reverse dependencies
+# * Supports task sorting on:
+# (a) Task dependency order within each recipe
+# (b) Pure alphabetical order
+# (c) Provisions for third sort order (bitbake order?)
+# * The 'Filter' does a "*string*" wildcard filter on tasks in the
+# main window, dynamically re-ordering and re-centering the content
+# * A 'Print' function exports the selected task or its whole recipe
+# task set to the default file "taskdep.txt"
+# * Supports a progress bar for bitbake loads and file printing
+# * Line art for box drawing supported, ASCII art an alernative
+# * No horizontal scrolling support. Selected task's full name
+# shown in bottom bar
+# * Dynamically catches terminals that are (or become) too small
+# * Exception to insure return to normal terminal on errors
+# * Debugging support, self test option
+#
+
+import sys
+import traceback
+import curses
+import re
+import time
+
+# Bitbake server support
+import threading
+from xmlrpc import client
+import bb
+import bb.event
+
+# Dependency indexes (depends_model)
+(TYPE_DEP, TYPE_RDEP) = (0, 1)
+DEPENDS_TYPE = 0
+DEPENDS_TASK = 1
+DEPENDS_DEPS = 2
+# Task indexes (task_list)
+TASK_NAME = 0
+TASK_PRIMARY = 1
+TASK_SORT_ALPHA = 2
+TASK_SORT_DEPS = 3
+TASK_SORT_BITBAKE = 4
+# Sort options (default is SORT_DEPS)
+SORT_ALPHA = 0
+SORT_DEPS = 1
+SORT_BITBAKE_ENABLE = False # NOTE: future sort
+SORT_BITBAKE = 2
+sort_model = SORT_DEPS
+# Print options
+PRINT_MODEL_1 = 0
+PRINT_MODEL_2 = 1
+print_model = PRINT_MODEL_2
+print_file_name = "taskdep_print.log"
+print_file_backup_name = "taskdep_print_backup.log"
+is_printed = False
+is_filter = False
+
+# Standard (and backup) key mappings
+CHAR_NUL = 0 # Used as self-test nop char
+CHAR_BS_H = 8 # Alternate backspace key
+CHAR_TAB = 9
+CHAR_RETURN = 10
+CHAR_ESCAPE = 27
+CHAR_UP = ord('{') # Used as self-test ASCII char
+CHAR_DOWN = ord('}') # Used as self-test ASCII char
+
+# Color_pair IDs
+CURSES_NORMAL = 0
+CURSES_HIGHLIGHT = 1
+CURSES_WARNING = 2
+
+
+#################################################
+### Debugging support
+###
+
+verbose = False
+
+# Debug: message display slow-step through display update issues
+def alert(msg,screen):
+ if msg:
+ screen.addstr(0, 10, '[%-4s]' % msg)
+ screen.refresh();
+ curses.napms(2000)
+ else:
+ if do_line_art:
+ for i in range(10, 24):
+ screen.addch(0, i, curses.ACS_HLINE)
+ else:
+ screen.addstr(0, 10, '-' * 14)
+ screen.refresh();
+
+# Debug: display edge conditions on frame movements
+def debug_frame(nbox_ojb):
+ if verbose:
+ nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
+ nbox_ojb.cursor_index,
+ nbox_ojb.cursor_offset,
+ nbox_ojb.scroll_offset,
+ nbox_ojb.inside_height,
+ len(nbox_ojb.task_list),
+ ))
+ nbox_ojb.screen.refresh();
+
+#
+# Unit test (assumes that 'quilt-native' is always present)
+#
+
+unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
+unit_test_cmnds=[
+ '# Default selected task in primary box',
+ 'tst_selected=<TASK>.do_recipe_qa',
+ '# Default selected task in deps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=',
+ '# Default selected task in rdeps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=<TASK>.do_fetch',
+ "# Test 'select' back to primary box",
+ 'tst_entry=<CR>',
+ '#tst_entry=<DOWN>', # optional injected error
+ 'tst_selected=<TASK>.do_fetch',
+ '# Check filter',
+ 'tst_entry=/uilt-nativ/',
+ 'tst_selected=quilt-native.do_recipe_qa',
+ '# Check print',
+ 'tst_entry=p',
+ 'tst_printed=quilt-native.do_fetch',
+ '#tst_printed=quilt-foo.do_nothing', # optional injected error
+ '# Done!',
+ 'tst_entry=q',
+]
+unit_test_idx=0
+unit_test_command_chars=''
+unit_test_results=[]
+def unit_test_action(active_package):
+ global unit_test_idx
+ global unit_test_command_chars
+ global unit_test_results
+ ret = CHAR_NUL
+ if unit_test_command_chars:
+ ch = unit_test_command_chars[0]
+ unit_test_command_chars = unit_test_command_chars[1:]
+ time.sleep(0.5)
+ ret = ord(ch)
+ else:
+ line = unit_test_cmnds[unit_test_idx]
+ unit_test_idx += 1
+ line = re.sub('#.*', '', line).strip()
+ line = line.replace('<TASK>',active_package.primary[0])
+ line = line.replace('<TAB>','\t').replace('<CR>','\n')
+ line = line.replace('<UP>','{').replace('<DOWN>','}')
+ if not line: line = 'nop=nop'
+ cmnd,value = line.split('=')
+ if cmnd == 'tst_entry':
+ unit_test_command_chars = value
+ elif cmnd == 'tst_selected':
+ active_selected = active_package.get_selected()
+ if active_selected != value:
+ unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
+ elif cmnd == 'tst_printed':
+ result = os.system('grep %s %s' % (value,print_file_name))
+ if result:
+ unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
+ # Return the action (CHAR_NUL for no action til next round)
+ return(ret)
+
+# Unit test without an interative terminal (e.g. ptest)
+unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
+
+
+#################################################
+### Window frame rendering
+###
+### By default, use the normal line art. Since
+### these extended characters are not ASCII, one
+### must use the ncursus API to render them
+### The alternate ASCII line art set is optionally
+### available via the 'do_line_art' flag
+
+# By default, render frames using line art
+do_line_art = True
+
+# ASCII render set option
+CHAR_HBAR = '-'
+CHAR_VBAR = '|'
+CHAR_UL_CORNER = '/'
+CHAR_UR_CORNER = '\\'
+CHAR_LL_CORNER = '\\'
+CHAR_LR_CORNER = '/'
+
+# Box frame drawing with line-art
+def line_art_frame(box):
+ x = box.base_x
+ y = box.base_y
+ w = box.width
+ h = box.height + 1
+
+ if do_line_art:
+ for i in range(1, w - 1):
+ box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
+ box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
+ body_line = "%s" % (' ' * (w - 2))
+ for i in range(1, h - 1):
+ box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
+ box.screen.addstr(y + i, x + 1, body_line, box.color)
+ box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
+ box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
+ box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
+ box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
+ box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
+ else:
+ top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
+ body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
+ bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
+ # Top bar
+ box.screen.addstr(y, x, top_line)
+ # Middle frame
+ for i in range(1, (h - 1)):
+ box.screen.addstr(y+i, x, body_line)
+ # Bottom bar
+ box.screen.addstr(y + (h - 1), x, bot_line)
+
+# Connect the separate boxes
+def line_art_fixup(box):
+ if do_line_art:
+ box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
+ box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
+
+
+#################################################
+### Ncurses box object : box frame object to display
+### and manage a sub-window's display elements
+### using basic ncurses
+###
+### Supports:
+### * Frame drawing, content (re)drawing
+### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
+### * Highlighting for active selected item
+### * Content sorting based on selected sort model
+###
+
+class NBox():
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ # Box description
+ self.screen = screen
+ self.label = label
+ self.primary = primary
+ self.color = curses.color_pair(CURSES_NORMAL) if screen else None
+ # Box boundaries
+ self.base_x = base_x
+ self.base_y = base_y
+ self.width = width
+ self.height = height
+ # Cursor/scroll management
+ self.cursor_enable = False
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+ # Box specific content
+ # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
+ self.task_list = []
+
+ @property
+ def inside_width(self):
+ return(self.width-2)
+
+ @property
+ def inside_height(self):
+ return(self.height-2)
+
+ # Populate the box's content, include the sort mappings and is_primary flag
+ def task_list_append(self,task_name,dep):
+ task_sort_alpha = task_name
+ task_sort_deps = dep.get_dep_sort(task_name)
+ is_primary = False
+ for primary in self.primary:
+ if task_name.startswith(primary+'.'):
+ is_primary = True
+ if SORT_BITBAKE_ENABLE:
+ task_sort_bitbake = dep.get_bb_sort(task_name)
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
+ else:
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
+
+ def reset(self):
+ self.task_list = []
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+
+ # Sort the box's content based on the current sort model
+ def sort(self):
+ if SORT_ALPHA == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
+ elif SORT_DEPS == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
+ elif SORT_BITBAKE == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
+
+ # The target package list (to hightlight), from the command line
+ def set_primary(self,primary):
+ self.primary = primary
+
+ # Draw the box's outside frame
+ def draw_frame(self):
+ line_art_frame(self)
+ # Title
+ self.screen.addstr(self.base_y,
+ (self.base_x + (self.width//2))-((len(self.label)+2)//2),
+ '['+self.label+']')
+ self.screen.refresh()
+
+ # Draw the box's inside text content
+ def redraw(self):
+ task_list_len = len(self.task_list)
+ # Middle frame
+ body_line = "%s" % (' ' * (self.inside_width-1) )
+ for i in range(0,self.inside_height+1):
+ if i < (task_list_len + self.scroll_offset):
+ str_ctl = "%%-%ss" % (self.width-3)
+ # Safety assert
+ if (i + self.scroll_offset) >= task_list_len:
+ alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
+ break
+
+ task_obj = self.task_list[i + self.scroll_offset]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+
+ if task_primary:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
+ else:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ else:
+ line = "%s" % (' ' * (self.inside_width-1) )
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ self.screen.refresh()
+
+ # Show the current selected task over the bottom of the frame
+ def show_selected(self,selected_task):
+ if not selected_task:
+ selected_task = self.get_selected()
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
+ self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
+ self.screen.addstr(self.base_y + self.height,
+ (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
+ '['+selected_task+']')
+ self.screen.refresh()
+
+ # Load box with new table of content
+ def update_content(self,task_list):
+ self.task_list = task_list
+ if self.cursor_enable:
+ cursor_update(turn_on=False)
+ self.cursor_index = 0
+ self.cursor_offset = 0
+ self.scroll_offset = 0
+ self.redraw()
+ if self.cursor_enable:
+ cursor_update(turn_on=True)
+
+ # Manage the box's highlighted task and blinking cursor character
+ def cursor_on(self,is_on):
+ self.cursor_enable = is_on
+ self.cursor_update(is_on)
+
+ # High-light the current pointed package, normal for released packages
+ def cursor_update(self,turn_on=True):
+ str_ctl = "%%-%ss" % (self.inside_width-1)
+ try:
+ if len(self.task_list):
+ task_obj = self.task_list[self.cursor_index]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+ task_font = curses.A_BOLD if task_primary else 0
+ else:
+ task = ''
+ task_font = 0
+ except Exception as e:
+ alert("CURSOR_UPDATE:%s" % (e),self.screen)
+ return
+ if turn_on:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
+ else:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
+
+ # Down arrow
+ def line_down(self):
+ if len(self.task_list) <= (self.cursor_index+1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 1
+ self.cursor_offset += 1
+ if self.cursor_offset > (self.inside_height):
+ self.cursor_offset -= 1
+ self.scroll_offset += 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Up arrow
+ def line_up(self):
+ if 0 > (self.cursor_index-1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 1
+ self.cursor_offset -= 1
+ if self.cursor_offset < 0:
+ self.cursor_offset += 1
+ self.scroll_offset -= 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page down
+ def page_down(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 10
+ self.cursor_index = min(self.cursor_index,max_task)
+ self.cursor_offset = min(self.inside_height,self.cursor_index)
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page up
+ def page_up(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 10
+ self.cursor_index = max(self.cursor_index,0)
+ self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Return the currently selected task name for this box
+ def get_selected(self):
+ if self.task_list:
+ return(self.task_list[self.cursor_index][TASK_NAME])
+ else:
+ return('')
+
+#################################################
+### The helper sub-windows
+###
+
+# Show persistent help at the top of the screen
+class HelpBarView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def show_help(self,show):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
+ if show:
+ help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
+ bar_size = self.inside_width - 5 - len(help)
+ self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
+ self.screen.refresh()
+
+# Pop up a detailed Help box
+class HelpBoxView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
+ super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.x_pos = 0
+ self.y_pos = 0
+ self.dep = dep
+
+ # Instantial the pop-up help box
+ def show_help(self,show):
+ self.x_pos = self.base_x + 4
+ self.y_pos = self.base_y + 2
+
+ def add_line(line):
+ if line:
+ self.screen.addstr(self.y_pos,self.x_pos,line)
+ self.y_pos += 1
+
+ # Gather some statisics
+ dep_count = 0
+ rdep_count = 0
+ for task_obj in self.dep.depends_model:
+ if TYPE_DEP == task_obj[DEPENDS_TYPE]:
+ dep_count += 1
+ elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
+ rdep_count += 1
+
+ self.draw_frame()
+ line_art_fixup(self.dep)
+ add_line("Quit : 'q' ")
+ add_line("Filter task names : '/'")
+ add_line("Tab to next box : <Tab>")
+ add_line("Select a task : <Enter>")
+ add_line("Print task's deps : 'p'")
+ add_line("Print recipe's deps : 'P'")
+ add_line(" -> '%s'" % print_file_name)
+ add_line("Sort toggle : 's'")
+ add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
+ add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
+ if SORT_BITBAKE_ENABLE:
+ add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
+ add_line("Alternate backspace : <CTRL-H>")
+ add_line("")
+ add_line("Primary recipes = %s" % ','.join(self.primary))
+ add_line("Task count = %4d" % len(self.dep.pkg_model))
+ add_line("Deps count = %4d" % dep_count)
+ add_line("RDeps count = %4d" % rdep_count)
+ add_line("")
+ self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
+ self.screen.refresh()
+ c = self.screen.getch()
+
+# Show a progress bar
+class ProgressView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def progress(self,title,current,max):
+ if title:
+ self.label = title
+ else:
+ title = self.label
+ if max <=0: max = 10
+ bar_size = self.width - 7 - len(title)
+ bar_done = int( (float(current)/float(max)) * float(bar_size) )
+ self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
+ self.screen.refresh()
+ return(current+1)
+
+ def clear(self):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+# Implement a task filter bar
+class FilterView(NBox):
+ SEARCH_NOP = 0
+ SEARCH_GO = 1
+ SEARCH_CANCEL = 2
+
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.do_show = False
+ self.filter_str = ""
+
+ def clear(self,enable_show=True):
+ self.filter_str = ""
+
+ def show(self,enable_show=True):
+ self.do_show = enable_show
+ if self.do_show:
+ self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
+ else:
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+ def show_prompt(self):
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
+
+ # Keys specific to the filter box (start/stop filter keys are in the main loop)
+ def input(self,c,ch):
+ ret = self.SEARCH_GO
+ if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
+ # Backspace
+ if self.filter_str:
+ self.filter_str = self.filter_str[0:-1]
+ self.show()
+ elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
+ # The isalnum() acts strangly with keypad(True), so explicit bounds
+ self.filter_str += ch
+ self.show()
+ else:
+ ret = self.SEARCH_NOP
+ return(ret)
+
+
+#################################################
+### The primary dependency windows
+###
+
+# The main list of package tasks
+class PackageView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ # Find and verticaly center a selected task (from filter or from dependent box)
+ # The 'task_filter_str' can be a full or a partial (filter) task name
+ def find(self,task_filter_str):
+ found = False
+ max = self.height-2
+ if not task_filter_str:
+ return(found)
+ for i,task_obj in enumerate(self.task_list):
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_filter_str):
+ self.cursor_on(False)
+ self.cursor_index = i
+
+ # Position selected at vertical center
+ vcenter = self.inside_height // 2
+ if self.cursor_index <= vcenter:
+ self.scroll_offset = 0
+ self.cursor_offset = self.cursor_index
+ elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
+ self.cursor_offset = self.inside_height-1
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ else:
+ self.cursor_offset = vcenter
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+
+ self.redraw()
+ self.cursor_on(True)
+ found = True
+ break
+ return(found)
+
+# The view of dependent packages
+class PackageDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+# The view of reverse-dependent packages
+class PackageReverseDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+
+#################################################
+### DepExplorer : The parent frame and object
+###
+
+class DepExplorer(NBox):
+ def __init__(self,screen):
+ title = "Task Dependency Explorer"
+ super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
+
+ self.screen = screen
+ self.pkg_model = []
+ self.depends_model = []
+ self.dep_sort_map = {}
+ self.bb_sort_map = {}
+ self.filter_str = ''
+ self.filter_prev = 'deadbeef'
+
+ if self.screen:
+ self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
+ self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
+ self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
+ self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
+ self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
+ self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
+ self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
+ self.draw_frames()
+
+ # Draw this main window's frame and all sub-windows
+ def draw_frames(self):
+ self.draw_frame()
+ self.package_view.draw_frame()
+ self.dep_view.draw_frame()
+ self.reverse_view.draw_frame()
+ if is_filter:
+ self.filter_view.show(True)
+ self.filter_view.show_prompt()
+ else:
+ self.help_bar_view.show_help(True)
+ self.package_view.redraw()
+ self.dep_view.redraw()
+ self.reverse_view.redraw()
+ self.show_selected(self.package_view.get_selected())
+ line_art_fixup(self)
+
+ # Parse the bitbake dependency event object
+ def parse(self, depgraph):
+ for task in depgraph["tdepends"]:
+ self.pkg_model.insert(0, task)
+ for depend in depgraph["tdepends"][task]:
+ self.depends_model.insert (0, (TYPE_DEP, task, depend))
+ self.depends_model.insert (0, (TYPE_RDEP, depend, task))
+ if self.screen:
+ self.dep_sort_prep()
+
+ # Prepare the dependency sort order keys
+ # This method creates sort keys per recipe tasks in
+ # the order of each recipe's internal dependecies
+ # Method:
+ # Filter the tasks in dep order in dep_sort_map = {}
+ # (a) Find a task that has no dependecies
+ # Ignore non-recipe specific tasks
+ # (b) Add it to the sort mapping dict with
+ # key of "<task_group>_<order>"
+ # (c) Remove it as a dependency from the other tasks
+ # (d) Repeat till all tasks are mapped
+ # Use placeholders to insure each sub-dict is instantiated
+ def dep_sort_prep(self):
+ self.progress_view.progress('DepSort',0,4)
+ # Init the task base entries
+ self.progress_view.progress('DepSort',1,4)
+ dep_table = {}
+ bb_index = 0
+ for task in self.pkg_model:
+ # First define the incoming bitbake sort order
+ self.bb_sort_map[task] = "%04d" % (bb_index)
+ bb_index += 1
+ task_group = task[0:task.find('.')]
+ if task_group not in dep_table:
+ dep_table[task_group] = {}
+ dep_table[task_group]['-'] = {} # Placeholder
+ if task not in dep_table[task_group]:
+ dep_table[task_group][task] = {}
+ dep_table[task_group][task]['-'] = {} # Placeholder
+ # Add the task dependecy entries
+ self.progress_view.progress('DepSort',2,4)
+ for task_obj in self.depends_model:
+ if task_obj[DEPENDS_TYPE] != TYPE_DEP:
+ continue
+ task = task_obj[DEPENDS_TASK]
+ task_dep = task_obj[DEPENDS_DEPS]
+ task_group = task[0:task.find('.')]
+ # Only track depends within same group
+ if task_dep.startswith(task_group+'.'):
+ dep_table[task_group][task][task_dep] = 1
+ self.progress_view.progress('DepSort',3,4)
+ for task_group in dep_table:
+ dep_index = 0
+ # Whittle down the tasks of each group
+ this_pass = 1
+ do_loop = True
+ while (len(dep_table[task_group]) > 1) and do_loop:
+ this_pass += 1
+ is_change = False
+ delete_list = []
+ for task in dep_table[task_group]:
+ if '-' == task:
+ continue
+ if 1 == len(dep_table[task_group][task]):
+ is_change = True
+ # No more deps, so collect this task...
+ self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
+ dep_index += 1
+ # ... remove it from other lists as resolved ...
+ for dep_task in dep_table[task_group]:
+ if task in dep_table[task_group][dep_task]:
+ del dep_table[task_group][dep_task][task]
+ # ... and remove it from from the task group
+ delete_list.append(task)
+ for task in delete_list:
+ del dep_table[task_group][task]
+ if not is_change:
+ alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
+ do_loop = False
+ continue
+ self.progress_view.progress('',4,4)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ if len(self.dep_sort_map) != len(self.pkg_model):
+ alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
+
+ # Look up a dep sort order key
+ def get_dep_sort(self,key):
+ if key in self.dep_sort_map:
+ return(self.dep_sort_map[key])
+ else:
+ return(key)
+
+ # Look up a bitbake sort order key
+ def get_bb_sort(self,key):
+ if key in self.bb_sort_map:
+ return(self.bb_sort_map[key])
+ else:
+ return(key)
+
+ # Find the selected package in the main frame, update the dependency frames content accordingly
+ def select(self, package_name, only_update_dependents=False):
+ if not package_name:
+ package_name = self.package_view.get_selected()
+ # alert("SELECT:%s:" % package_name,self.screen)
+
+ if self.filter_str != self.filter_prev:
+ self.package_view.cursor_on(False)
+ # Fill of the main package task list using new filter
+ self.package_view.task_list = []
+ for package in self.pkg_model:
+ if self.filter_str:
+ if self.filter_str in package:
+ self.package_view.task_list_append(package,self)
+ else:
+ self.package_view.task_list_append(package,self)
+ self.package_view.sort()
+ self.filter_prev = self.filter_str
+
+ # Old position is lost, assert new position of previous task (if still filtered in)
+ self.package_view.cursor_index = 0
+ self.package_view.cursor_offset = 0
+ self.package_view.scroll_offset = 0
+ self.package_view.redraw()
+ self.package_view.cursor_on(True)
+
+ # Make sure the selected package is in view, with implicit redraw()
+ if (not only_update_dependents):
+ self.package_view.find(package_name)
+ # In case selected name change (i.e. filter removed previous)
+ package_name = self.package_view.get_selected()
+
+ # Filter the package's dependent list to the dependent view
+ self.dep_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.dep_view.sort()
+ self.dep_view.redraw()
+ # Filter the package's dependent list to the reverse dependent view
+ self.reverse_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.reverse_view.sort()
+ self.reverse_view.redraw()
+ self.show_selected(package_name)
+ self.screen.refresh()
+
+ # The print-to-file method
+ def print_deps(self,whole_group=False):
+ global is_printed
+ # Print the selected deptree(s) to a file
+ if not is_printed:
+ try:
+ # Move to backup any exiting file before first write
+ if os.path.isfile(print_file_name):
+ os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
+ except Exception as e:
+ alert(e,self.screen)
+ alert('',self.screen)
+ print_list = []
+ selected_task = self.package_view.get_selected()
+ if not selected_task:
+ return
+ if not whole_group:
+ print_list.append(selected_task)
+ else:
+ # Use the presorted task_group order from 'package_view'
+ task_group = selected_task[0:selected_task.find('.')+1]
+ for task_obj in self.package_view.task_list:
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_group):
+ print_list.append(task)
+ with open(print_file_name, "a") as fd:
+ print_max = len(print_list)
+ print_count = 1
+ self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
+ for task in print_list:
+ print_count = self.progress_view.progress('',print_count,print_max)
+ self.select(task)
+ self.screen.refresh();
+ # Utilize the current print output model
+ if print_model == PRINT_MODEL_1:
+ print("=== Dependendency Snapshot ===",file=fd)
+ print(" = Package =",file=fd)
+ print(' '+task,file=fd)
+ # Fill in the matching dependencies
+ print(" = Dependencies =",file=fd)
+ for task_obj in self.dep_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ print(" = Dependent Tasks =",file=fd)
+ for task_obj in self.reverse_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ if print_model == PRINT_MODEL_2:
+ print("=== Dependendency Snapshot ===",file=fd)
+ dep_count = len(self.dep_view.task_list) - 1
+ for i,task_obj in enumerate(self.dep_view.task_list):
+ print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.dep_view.task_list:
+ print('Dep =',file=fd)
+ print("Package=%s" % task,file=fd)
+ for i,task_obj in enumerate(self.reverse_view.task_list):
+ print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.reverse_view.task_list:
+ print('RDep =',file=fd)
+ curses.napms(2000)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ print('',file=fd)
+ # Restore display to original selected task
+ self.select(selected_task)
+ is_printed = True
+
+#################################################
+### Load bitbake data
+###
+
+def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
+ global bar_len_old
+ bar_len_old = 0
+
+ # Support no screen
+ def progress(msg,count,max):
+ global bar_len_old
+ if screen:
+ dep.progress_view.progress(msg,count,max)
+ else:
+ if msg:
+ if bar_len_old:
+ bar_len_old = 0
+ print("\n")
+ print(f"{msg}: ({count} of {max})")
+ else:
+ bar_len = int((count*40)/max)
+ if bar_len_old != bar_len:
+ print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
+ bar_len_old = bar_len
+ def clear():
+ if screen:
+ dep.progress_view.clear()
+ def clear_curses(screen):
+ if screen:
+ curses_off(screen)
+
+ #
+ # Trigger bitbake "generateDepTreeEvent"
+ #
+
+ cmdline = ''
+ try:
+ params.updateToServer(server, os.environ.copy())
+ params.updateFromServer(server)
+ cmdline = params.parseActions()
+ if not cmdline:
+ clear_curses(screen)
+ print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
+ return 1,cmdline
+ if 'msg' in cmdline and cmdline['msg']:
+ clear_curses(screen)
+ print('ERROR: ' + cmdline['msg'])
+ return 1,cmdline
+ cmdline = cmdline['action']
+ if not cmdline or cmdline[0] != "generateDotGraph":
+ clear_curses(screen)
+ print("ERROR: This UI requires the -g option")
+ return 1,cmdline
+ ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
+ if error:
+ clear_curses(screen)
+ print("ERROR: running command '%s': %s" % (cmdline, error))
+ return 1,cmdline
+ elif not ret:
+ clear_curses(screen)
+ print("ERROR: running command '%s': returned %s" % (cmdline, ret))
+ return 1,cmdline
+ except client.Fault as x:
+ clear_curses(screen)
+ print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
+ return 1,cmdline
+ except Exception as e:
+ clear_curses(screen)
+ print("ERROR: in startup:\n %s" % traceback.format_exc())
+ return 1,cmdline
+
+ #
+ # Receive data from bitbake
+ #
+
+ progress_total = 0
+ load_bitbake = True
+ quit = False
+ try:
+ while load_bitbake:
+ try:
+ event = eventHandler.waitEvent(0.25)
+ if quit:
+ _, error = server.runCommand(["stateForceShutdown"])
+ clear_curses(screen)
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ break
+
+ if event is None:
+ continue
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ progress_total = event.total
+ progress('Loading Cache',0,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ clear()
+ progress('Bitbake... ',1,2)
+ continue
+
+ if isinstance(event, bb.event.ParseStarted):
+ progress_total = event.total
+ progress('Processing recipes',0,progress_total)
+ if progress_total == 0:
+ continue
+
+ if isinstance(event, bb.event.ParseProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.ParseCompleted):
+ progress('Generating dependency tree',0,3)
+ continue
+
+ if isinstance(event, bb.event.DepTreeGenerated):
+ progress('Generating dependency tree',1,3)
+ dep.parse(event._depgraph)
+ progress('Generating dependency tree',2,3)
+
+ if isinstance(event, bb.command.CommandCompleted):
+ load_bitbake = False
+ progress('Generating dependency tree',3,3)
+ clear()
+ if screen:
+ dep.help_bar_view.show_help(True)
+ continue
+
+ if isinstance(event, bb.event.NoProvider):
+ clear_curses(screen)
+ print('ERROR: %s' % event)
+
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('ERROR: Unable to cleanly shutdown: %s' % error)
+ return 1,cmdline
+
+ if isinstance(event, bb.command.CommandFailed):
+ clear_curses(screen)
+ print('ERROR: ' + str(event))
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.command.CommandExit):
+ clear_curses(screen)
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.cooker.CookerExit):
+ break
+
+ continue
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
+ except KeyboardInterrupt:
+ if shutdown == 2:
+ clear_curses(screen)
+ print("\nThird Keyboard Interrupt, exit.\n")
+ break
+ if shutdown == 1:
+ clear_curses(screen)
+ print("\nSecond Keyboard Interrupt, stopping...\n")
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ if shutdown == 0:
+ clear_curses(screen)
+ print("\nKeyboard Interrupt, closing down...\n")
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('Unable to cleanly shutdown: %s' % error)
+ shutdown = shutdown + 1
+ pass
+ except Exception as e:
+ # Safe exit on error
+ clear_curses(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ return 0,cmdline
+
+#################################################
+### main
+###
+
+SCREEN_COL_MIN = 83
+SCREEN_ROW_MIN = 26
+
+def main(server, eventHandler, params):
+ global verbose
+ global sort_model
+ global print_model
+ global is_printed
+ global is_filter
+ global screen_too_small
+
+ shutdown = 0
+ screen_too_small = False
+ quit = False
+
+ # Unit test with no terminal?
+ if unit_test_noterm:
+ # Load bitbake, test that there is valid dependency data, then exit
+ screen = None
+ print("* UNIT TEST:START")
+ dep = DepExplorer(screen)
+ print("* UNIT TEST:BITBAKE FETCH")
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
+ if ret:
+ print("* UNIT TEST: BITBAKE FAILED")
+ return ret
+ # Test the acquired dependency data
+ quilt_native_deps = 0
+ quilt_native_rdeps = 0
+ quilt_deps = 0
+ quilt_rdeps = 0
+ for i,task_obj in enumerate(dep.depends_model):
+ if TYPE_DEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_deps += 1
+ elif task.startswith('quilt'):
+ quilt_deps += 1
+ elif TYPE_RDEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_rdeps += 1
+ elif task.startswith('quilt'):
+ quilt_rdeps += 1
+ # Print results
+ failed = False
+ if 0 < len(dep.depends_model):
+ print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
+ else:
+ failed = True
+ print(f"FAIL:Bitbake dependency count = 0")
+ if quilt_native_deps:
+ print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native depends count = 0")
+ if quilt_native_rdeps:
+ print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native rdepends count = 0")
+ if quilt_deps:
+ print(f"Pass:Quilt depends count = {quilt_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt depends count = 0")
+ if quilt_rdeps:
+ print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt rdepends count = 0")
+ print("* UNIT TEST:STOP")
+ return failed
+
+ # Help method to dynamically test parent window too small
+ def check_screen_size(dep, active_package):
+ global screen_too_small
+ rows, cols = screen.getmaxyx()
+ if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
+ if screen_too_small:
+ # Now big enough, remove error message and redraw screen
+ dep.draw_frames()
+ active_package.cursor_on(True)
+ screen_too_small = False
+ return True
+ # Test on App init
+ if not dep:
+ # Do not start this app if screen not big enough
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
+ print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
+ return False
+ # First time window too small
+ if not screen_too_small:
+ active_package.cursor_on(False)
+ dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
+ screen_too_small = True
+ return False
+
+ # Helper method to turn off curses mode
+ def curses_off(screen):
+ if not screen: return
+ # Safe error exit
+ screen.keypad(False)
+ curses.echo()
+ curses.curs_set(1)
+ curses.endwin()
+
+ if unit_test_results:
+ print('\nUnit Test Results:')
+ for line in unit_test_results:
+ print(" %s" % line)
+
+ #
+ # Initialize the ncurse environment
+ #
+
+ screen = curses.initscr()
+ try:
+ if not check_screen_size(None, None):
+ exit(1)
+ try:
+ curses.start_color()
+ curses.use_default_colors();
+ curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
+ curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
+ curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
+ curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
+ except:
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
+ print(" $ export TERM='xterm-256color'")
+ exit(1)
+
+ screen.keypad(True)
+ curses.noecho()
+ curses.curs_set(0)
+ screen.refresh();
+ except Exception as e:
+ # Safe error exit
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ exit(1)
+
+ try:
+ #
+ # Instantiate the presentation layers
+ #
+
+ dep = DepExplorer(screen)
+
+ #
+ # Prepare bitbake
+ #
+
+ # Fetch bitbake dependecy data
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
+ if ret: return ret
+
+ #
+ # Preset the views
+ #
+
+ # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
+ primary_packages = cmdline[1]
+ dep.package_view.set_primary(primary_packages)
+ dep.dep_view.set_primary(primary_packages)
+ dep.reverse_view.set_primary(primary_packages)
+ dep.help_box_view.set_primary(primary_packages)
+ dep.help_bar_view.show_help(True)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(primary_packages[0]+'.')
+ if unit_test:
+ alert('UNIT_TEST',screen)
+
+ # Help method to start/stop the filter feature
+ def filter_mode(new_filter_status):
+ global is_filter
+ if is_filter == new_filter_status:
+ # Ignore no changes
+ return
+ if not new_filter_status:
+ # Turn off
+ curses.curs_set(0)
+ #active_package.cursor_on(False)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ is_filter = False
+ dep.help_bar_view.show_help(True)
+ dep.filter_str = ''
+ dep.select('')
+ else:
+ # Turn on
+ curses.curs_set(1)
+ dep.help_bar_view.show_help(False)
+ dep.filter_view.clear()
+ dep.filter_view.show(True)
+ dep.filter_view.show_prompt()
+ is_filter = True
+
+ #
+ # Main user loop
+ #
+
+ while not quit:
+ if is_filter:
+ dep.filter_view.show_prompt()
+ if unit_test:
+ c = unit_test_action(active_package)
+ else:
+ c = screen.getch()
+ ch = chr(c)
+
+ # Do not draw if window now too small
+ if not check_screen_size(dep,active_package):
+ continue
+
+ if verbose:
+ if c == CHAR_RETURN:
+ screen.addstr(0, 4, "|%3d,CR |" % (c))
+ else:
+ screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
+
+ # pre-map alternate filter close keys
+ if is_filter and (c == CHAR_ESCAPE):
+ # Alternate exit from filter
+ ch = '/'
+ c = ord(ch)
+
+ # Filter and non-filter mode command keys
+ # https://docs.python.org/3/library/curses.html
+ if c in (curses.KEY_UP,CHAR_UP):
+ active_package.line_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif c in (curses.KEY_DOWN,CHAR_DOWN):
+ active_package.line_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_PPAGE == c:
+ active_package.page_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_NPAGE == c:
+ active_package.page_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif CHAR_TAB == c:
+ # Tab between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.dep_view
+ elif active_package == dep.dep_view:
+ active_package = dep.reverse_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif curses.KEY_BTAB == c:
+ # Shift-Tab reverse between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.reverse_view
+ elif active_package == dep.reverse_view:
+ active_package = dep.dep_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif (CHAR_RETURN == c):
+ # CR to select
+ selected = active_package.get_selected()
+ if selected:
+ active_package.cursor_on(False)
+ active_package = dep.package_view
+ filter_mode(False)
+ dep.select(selected)
+ else:
+ filter_mode(False)
+ dep.select(primary_packages[0]+'.')
+
+ elif '/' == ch: # Enter/exit dep.filter_view
+ if is_filter:
+ filter_mode(False)
+ else:
+ filter_mode(True)
+ elif is_filter:
+ # If in filter mode, re-direct all these other keys to the filter box
+ result = dep.filter_view.input(c,ch)
+ dep.filter_str = dep.filter_view.filter_str
+ dep.select('')
+
+ # Non-filter mode command keys
+ elif 'p' == ch:
+ dep.print_deps(whole_group=False)
+ elif 'P' == ch:
+ dep.print_deps(whole_group=True)
+ elif 'w' == ch:
+ # Toggle the print model
+ if print_model == PRINT_MODEL_1:
+ print_model = PRINT_MODEL_2
+ else:
+ print_model = PRINT_MODEL_1
+ elif 's' == ch:
+ # Toggle the sort model
+ if sort_model == SORT_DEPS:
+ sort_model = SORT_ALPHA
+ elif sort_model == SORT_ALPHA:
+ if SORT_BITBAKE_ENABLE:
+ sort_model = TASK_SORT_BITBAKE
+ else:
+ sort_model = SORT_DEPS
+ else:
+ sort_model = SORT_DEPS
+ active_package.cursor_on(False)
+ current_task = active_package.get_selected()
+ dep.package_view.sort()
+ dep.dep_view.sort()
+ dep.reverse_view.sort()
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(current_task)
+ # Announce the new sort model
+ alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
+ alert('',screen)
+
+ elif 'q' == ch:
+ quit = True
+ elif ch in ('h','?'):
+ dep.help_box_view.show_help(True)
+ dep.select(active_package.get_selected())
+
+ #
+ # Debugging commands
+ #
+
+ elif 'V' == ch:
+ verbose = not verbose
+ alert('Verbose=%s' % str(verbose),screen)
+ alert('',screen)
+ elif 'R' == ch:
+ screen.refresh()
+ elif 'B' == ch:
+ # Progress bar unit test
+ dep.progress_view.progress('Test',0,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',10,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',20,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',30,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',40,40)
+ curses.napms(1000)
+ dep.progress_view.clear()
+ dep.help_bar_view.show_help(True)
+ elif 'Q' == ch:
+ # Simulated error
+ curses_off(screen)
+ print('ERROR: simulated error exit')
+ return 1
+
+ # Safe exit
+ curses_off(screen)
+ except Exception as e:
+ # Safe exit on error
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ # Reminder to pick up your printed results
+ if is_printed:
+ print("")
+ print("You have output ready!")
+ print(" * Your printed dependency file is: %s" % print_file_name)
+ print(" * Your previous results saved in: %s" % print_file_backup_name)
+ print("")
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index ec5bd4f105..6bd21f1844 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -385,7 +385,7 @@ def main(server, eventHandler, params):
main.shutdown = 1
logger.info("ToasterUI build done, brbe: %s", brbe)
- continue
+ break
if isinstance(event, (bb.command.CommandCompleted,
bb.command.CommandFailed,
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index d595f172ac..c2f830d530 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -65,35 +65,27 @@ class BBUIEventQueue:
self.server = server
self.t = threading.Thread()
- self.t.setDaemon(True)
+ self.t.daemon = True
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
-
- self.eventQueueLock.acquire()
-
- if not self.eventQueue:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
-
- if not self.eventQueue:
- self.eventQueueNotify.clear()
-
- self.eventQueueLock.release()
- return item
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ if not self.eventQueue:
+ return None
+ item = self.eventQueue.pop(0)
+ if not self.eventQueue:
+ self.eventQueueNotify.clear()
+ return item
def waitEvent(self, delay):
self.eventQueueNotify.wait(delay)
return self.getEvent()
def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ self.eventQueue.append(event)
+ self.eventQueueNotify.set()
def send_event(self, event):
self.queue_event(pickle.loads(event))
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index d11da978d7..ebee65d3dd 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -13,6 +13,7 @@ import errno
import logging
import bb
import bb.msg
+import locale
import multiprocessing
import fcntl
import importlib
@@ -28,6 +29,10 @@ import signal
import collections
import copy
import ctypes
+import random
+import socket
+import struct
+import tempfile
from subprocess import getstatusoutput
from contextlib import contextmanager
from ctypes import cdll
@@ -45,7 +50,7 @@ def clean_context():
def get_context():
return _context
-
+
def set_context(ctx):
_context = ctx
@@ -207,8 +212,8 @@ def explode_dep_versions2(s, *, sort=True):
inversion = True
# This list is based on behavior and supported comparisons from deb, opkg and rpm.
#
- # Even though =<, <<, ==, !=, =>, and >> may not be supported,
- # we list each possibly valid item.
+ # Even though =<, <<, ==, !=, =>, and >> may not be supported,
+ # we list each possibly valid item.
# The build system is responsible for validation of what it supports.
if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
lastcmp = i[0:2]
@@ -342,7 +347,7 @@ def _print_exception(t, value, tb, realfile, text, context):
exception = traceback.format_exception_only(t, value)
error.append('Error executing a python function in %s:\n' % realfile)
- # Strip 'us' from the stack (better_exec call) unless that was where the
+ # Strip 'us' from the stack (better_exec call) unless that was where the
# error came from
if tb.tb_next is not None:
tb = tb.tb_next
@@ -429,12 +434,14 @@ def better_eval(source, locals, extraglobals = None):
return eval(source, ctx, locals)
@contextmanager
-def fileslocked(files):
+def fileslocked(files, *args, **kwargs):
"""Context manager for locking and unlocking file locks."""
locks = []
if files:
for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
+ l = bb.utils.lockfile(lockfile, *args, **kwargs)
+ if l is not None:
+ locks.append(l)
try:
yield
@@ -541,7 +548,12 @@ def md5_file(filename):
Return the hex string representation of the MD5 checksum of filename.
"""
import hashlib
- return _hasher(hashlib.new('MD5', usedforsecurity=False), filename)
+ try:
+ sig = hashlib.new('MD5', usedforsecurity=False)
+ except TypeError:
+ # Some configurations don't appear to support two arguments
+ sig = hashlib.new('MD5')
+ return _hasher(sig, filename)
def sha256_file(filename):
"""
@@ -592,11 +604,25 @@ def preserved_envvars():
v = [
'BBPATH',
'BB_PRESERVE_ENV',
- 'BB_ENV_PASSTHROUGH',
'BB_ENV_PASSTHROUGH_ADDITIONS',
]
return v + preserved_envvars_exported()
+def check_system_locale():
+ """Make sure the required system locale are available and configured"""
+ default_locale = locale.getlocale(locale.LC_CTYPE)
+
+ try:
+ locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
+ except:
+ sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
+ else:
+ locale.setlocale(locale.LC_CTYPE, default_locale)
+
+ if sys.getfilesystemencoding() != "utf-8":
+ sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
+ "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+
def filter_environment(good_vars):
"""
Create a pristine environment for bitbake. This will remove variables that
@@ -692,8 +718,8 @@ def remove(path, recurse=False, ionice=False):
return
if recurse:
for name in glob.glob(path):
- if _check_unsafe_delete_path(path):
- raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
+ if _check_unsafe_delete_path(name):
+ raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
# shutil.rmtree(name) would be ideal but its too slow
cmd = []
if ionice:
@@ -719,9 +745,9 @@ def prunedir(topdir, ionice=False):
# but thats possibly insane and suffixes is probably going to be small
#
def prune_suffix(var, suffixes, d):
- """
+ """
See if var ends with any of the suffixes listed and
- remove it if found
+ remove it if found
"""
for suffix in suffixes:
if suffix and var.endswith(suffix):
@@ -732,7 +758,8 @@ def mkdirhier(directory):
"""Create a directory like 'mkdir -p', but does not complain if
directory already exists like os.makedirs
"""
-
+ if '${' in str(directory):
+ bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
try:
os.makedirs(directory)
except OSError as e:
@@ -751,7 +778,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
if not sstat:
sstat = os.lstat(src)
except Exception as e:
- print("movefile: Stating source file failed...", e)
+ logger.warning("movefile: Stating source file failed...", e)
return None
destexists = 1
@@ -779,7 +806,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
os.unlink(src)
return os.lstat(dest)
except Exception as e:
- print("movefile: failed to properly create symlink:", dest, "->", target, e)
+ logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
return None
renamefailed = 1
@@ -796,7 +823,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
except Exception as e:
if e.errno != errno.EXDEV:
# Some random error.
- print("movefile: Failed to move", src, "to", dest, e)
+ logger.warning("movefile: Failed to move", src, "to", dest, e)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
@@ -808,13 +835,13 @@ def movefile(src, dest, newmtime = None, sstat = None):
bb.utils.rename(destpath + "#new", destpath)
didcopy = 1
except Exception as e:
- print('movefile: copy', src, '->', dest, 'failed.', e)
+ logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
return None
else:
#we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0:
- print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
+ logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
return None # failure
try:
if didcopy:
@@ -822,7 +849,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
os.unlink(src)
except Exception as e:
- print("movefile: Failed to chown/chmod/unlink", dest, e)
+ logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
return None
if newmtime:
@@ -974,13 +1001,16 @@ def umask(new_mask):
os.umask(current_mask)
def to_boolean(string, default=None):
- """
+ """
Check input string and return boolean value True/False/None
- depending upon the checks
+ depending upon the checks
"""
if not string:
return default
+ if isinstance(string, int):
+ return string != 0
+
normalized = string.lower()
if normalized in ("y", "yes", "1", "true"):
return True
@@ -1112,7 +1142,10 @@ def get_referenced_vars(start_expr, d):
def cpu_count():
- return multiprocessing.cpu_count()
+ try:
+ return len(os.sched_getaffinity(0))
+ except OSError:
+ return multiprocessing.cpu_count()
def nonblockingfd(fd):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
@@ -1599,6 +1632,44 @@ def set_process_name(name):
except:
pass
+def enable_loopback_networking():
+ # From bits/ioctls.h
+ SIOCGIFFLAGS = 0x8913
+ SIOCSIFFLAGS = 0x8914
+ SIOCSIFADDR = 0x8916
+ SIOCSIFNETMASK = 0x891C
+
+ # if.h
+ IFF_UP = 0x1
+ IFF_RUNNING = 0x40
+
+ # bits/socket.h
+ AF_INET = 2
+
+ # char ifr_name[IFNAMSIZ=16]
+ ifr_name = struct.pack("@16s", b"lo")
+ def netdev_req(fd, req, data = b""):
+ # Pad and add interface name
+ data = ifr_name + data + (b'\x00' * (16 - len(data)))
+ # Return all data after interface name
+ return fcntl.ioctl(fd, req, data)[16:]
+
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
+ fd = sock.fileno()
+
+ # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
+ req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
+ netdev_req(fd, SIOCSIFADDR, req)
+
+ # short ifr_flags
+ flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
+ flags |= IFF_UP | IFF_RUNNING
+ netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
+
+ # struct sockaddr_in ifr_netmask
+ req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
+ netdev_req(fd, SIOCSIFNETMASK, req)
+
def disable_network(uid=None, gid=None):
"""
Disable networking in the current process if the kernel supports it, else
@@ -1620,7 +1691,7 @@ def disable_network(uid=None, gid=None):
ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
if ret != 0:
- logger.debug("System doesn't suport disabling network without admin privs")
+ logger.debug("System doesn't support disabling network without admin privs")
return
with open("/proc/self/uid_map", "w") as f:
f.write("%s %s 1" % (uid, uid))
@@ -1630,25 +1701,11 @@ def disable_network(uid=None, gid=None):
f.write("%s %s 1" % (gid, gid))
def export_proxies(d):
+ from bb.fetch2 import get_fetcher_environment
""" export common proxies variables from datastore to environment """
- import os
-
- variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
- 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
- 'GIT_PROXY_COMMAND']
- exported = False
-
- for v in variables:
- if v in os.environ.keys():
- exported = True
- else:
- v_proxy = d.getVar(v)
- if v_proxy is not None:
- os.environ[v] = v_proxy
- exported = True
-
- return exported
-
+ newenv = get_fetcher_environment(d)
+ for v in newenv:
+ os.environ[v] = newenv[v]
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
@@ -1754,3 +1811,58 @@ def is_local_uid(uid=''):
if str(uid) == line_split[2]:
return True
return False
+
+def mkstemp(suffix=None, prefix=None, dir=None, text=False):
+ """
+ Generates a unique filename, independent of time.
+
+ mkstemp() in glibc (at least) generates unique file names based on the
+ current system time. When combined with highly parallel builds, and
+ operating over NFS (e.g. shared sstate/downloads) this can result in
+ conflicts and race conditions.
+
+ This function adds additional entropy to the file name so that a collision
+ is independent of time and thus extremely unlikely.
+ """
+ entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
+ if prefix:
+ prefix = prefix + entropy
+ else:
+ prefix = tempfile.gettempprefix() + entropy
+ return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
+
+def path_is_descendant(descendant, ancestor):
+ """
+ Returns True if the path `descendant` is a descendant of `ancestor`
+ (including being equivalent to `ancestor` itself). Otherwise returns False.
+ Correctly accounts for symlinks, bind mounts, etc. by using
+ os.path.samestat() to compare paths
+
+ May raise any exception that os.stat() raises
+ """
+
+ ancestor_stat = os.stat(ancestor)
+
+ # Recurse up each directory component of the descendant to see if it is
+ # equivalent to the ancestor
+ check_dir = os.path.abspath(descendant).rstrip("/")
+ while check_dir:
+ check_stat = os.stat(check_dir)
+ if os.path.samestat(check_stat, ancestor_stat):
+ return True
+ check_dir = os.path.dirname(check_dir).rstrip("/")
+
+ return False
+
+# If we don't have a timeout of some kind and a process/thread exits badly (for example
+# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
+# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
+@contextmanager
+def lock_timeout(lock):
+ held = lock.acquire(timeout=5*60)
+ try:
+ if not held:
+ os._exit(1)
+ yield held
+ finally:
+ lock.release()
diff --git a/bitbake/lib/bb/xattr.py b/bitbake/lib/bb/xattr.py
new file mode 100755
index 0000000000..7b634944a4
--- /dev/null
+++ b/bitbake/lib/bb/xattr.py
@@ -0,0 +1,126 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+import sys
+import ctypes
+import os
+import errno
+
+libc = ctypes.CDLL("libc.so.6", use_errno=True)
+fsencoding = sys.getfilesystemencoding()
+
+
+libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+
+
+def listxattr(path, follow=True):
+ func = libc.listxattr if follow else libc.llistxattr
+
+ os_path = os.fsencode(path)
+
+ while True:
+ length = func(os_path, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return []
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
+
+
+libc.getxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+libc.lgetxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+
+
+def getxattr(path, name, follow=True):
+ func = libc.getxattr if follow else libc.lgetxattr
+
+ os_path = os.fsencode(path)
+ os_name = os.fsencode(name)
+
+ while True:
+ length = func(os_path, os_name, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ if err == errno.ENODATA:
+ return None
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return ""
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, os_name, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return arr.raw
+
+
+def get_all_xattr(path, follow=True):
+ attrs = {}
+
+ names = listxattr(path, follow)
+
+ for name in names:
+ value = getxattr(path, name, follow)
+ if value is None:
+ # This can happen if a value is erased after listxattr is called,
+ # so ignore it
+ continue
+ attrs[name] = value
+
+ return attrs
+
+
+def main():
+ import argparse
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ attrs = get_all_xattr(args.path)
+
+ for name, value in attrs.items():
+ try:
+ value = value.decode(fsencoding)
+ except UnicodeDecodeError:
+ pass
+
+ print(f"{name} = {value}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/bitbake/lib/bblayers/__init__.py b/bitbake/lib/bblayers/__init__.py
index 4e7c09da04..78efd29750 100644
--- a/bitbake/lib/bblayers/__init__.py
+++ b/bitbake/lib/bblayers/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py
index 6723e2c605..a8f2699335 100644
--- a/bitbake/lib/bblayers/action.py
+++ b/bitbake/lib/bblayers/action.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -9,6 +11,7 @@ import shutil
import sys
import tempfile
+from bb.cookerdata import findTopdir
import bb.utils
from bblayers.common import LayerPlugin
@@ -35,7 +38,7 @@ class ActionPlugin(LayerPlugin):
sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir)
return 1
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir(),'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1
@@ -47,12 +50,14 @@ class ActionPlugin(LayerPlugin):
try:
notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None)
+ self.tinfoil.modified_files()
if not (args.force or notadded):
try:
self.tinfoil.run_command('parseConfiguration')
except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
# Restore the back up copy of bblayers.conf
shutil.copy2(backup, bblayers_conf)
+ self.tinfoil.modified_files()
bb.fatal("Parse failure with the specified layer added, exiting.")
else:
for item in notadded:
@@ -63,7 +68,7 @@ class ActionPlugin(LayerPlugin):
def do_remove_layer(self, args):
"""Remove one or more layers from bblayers.conf."""
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir() ,'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1
@@ -78,6 +83,7 @@ class ActionPlugin(LayerPlugin):
layerdir = os.path.abspath(item)
layerdirs.append(layerdir)
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs)
+ self.tinfoil.modified_files()
if notremoved:
for item in notremoved:
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
@@ -237,6 +243,9 @@ build results (as the layer priority order has effectively changed).
if not entry_found:
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
+ self.tinfoil.modified_files()
+
+
def get_file_layer(self, filename):
layerdir = self.get_file_layerdir(filename)
if layerdir:
diff --git a/bitbake/lib/bblayers/common.py b/bitbake/lib/bblayers/common.py
index 6c76ef3505..f7b9cee371 100644
--- a/bitbake/lib/bblayers/common.py
+++ b/bitbake/lib/bblayers/common.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py
index 7936516209..ba91fac669 100644
--- a/bitbake/lib/bblayers/layerindex.py
+++ b/bitbake/lib/bblayers/layerindex.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -47,6 +49,31 @@ class LayerIndexPlugin(ActionPlugin):
else:
logger.plain("Repository %s needs to be fetched" % url)
return subdir, layername, layerdir
+ elif os.path.exists(repodir) and branch:
+ """
+ If the repo is already cloned, ensure it is on the correct branch,
+ switching branches if necessary and possible.
+ """
+ base_cmd = ['git', '--git-dir=%s/.git' % repodir, '--work-tree=%s' % repodir]
+ cmd = base_cmd + ['branch']
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.returncode:
+ logger.error("Unable to validate repo %s (%s)" % (repodir, stderr))
+ return None, None, None
+ else:
+ if branch != completed_proc.stdout[2:-1]:
+ cmd = base_cmd + ['status', '--short']
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.stdout.count('\n') != 0:
+ logger.warning("There are uncommitted changes in repo %s" % repodir)
+ cmd = base_cmd + ['checkout', branch]
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.returncode:
+ # Could be due to original shallow clone on a different branch for example
+ logger.error("Unable to automatically switch %s to desired branch '%s' (%s)"
+ % (repodir, branch, completed_proc.stderr))
+ return None, None, None
+ return subdir, layername, layerdir
elif os.path.exists(layerdir):
return subdir, layername, layerdir
else:
diff --git a/bitbake/lib/bblayers/query.py b/bitbake/lib/bblayers/query.py
index 525d4f0d47..bfc18a7593 100644
--- a/bitbake/lib/bblayers/query.py
+++ b/bitbake/lib/bblayers/query.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -27,12 +29,12 @@ class QueryPlugin(LayerPlugin):
def do_show_layers(self, args):
"""show current configured layers."""
- logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
- logger.plain('=' * 74)
+ logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(70), "priority"))
+ logger.plain('=' * 104)
for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities:
layerdir = self.bbfile_collections.get(layer, None)
- layername = self.get_layer_name(layerdir)
- logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), pri))
+ layername = layer
+ logger.plain("%s %s %s" % (layername.ljust(20), layerdir.ljust(70), pri))
def version_str(self, pe, pv, pr = None):
verstr = "%s" % pv
@@ -55,11 +57,12 @@ are overlayed will also be listed, with a " (skipped)" suffix.
# Check for overlayed .bbclass files
classes = collections.defaultdict(list)
for layerdir in self.bblayers:
- classdir = os.path.join(layerdir, 'classes')
- if os.path.exists(classdir):
- for classfile in os.listdir(classdir):
- if os.path.splitext(classfile)[1] == '.bbclass':
- classes[classfile].append(classdir)
+ for c in ["classes-global", "classes-recipe", "classes"]:
+ classdir = os.path.join(layerdir, c)
+ if os.path.exists(classdir):
+ for classfile in os.listdir(classdir):
+ if os.path.splitext(classfile)[1] == '.bbclass':
+ classes[classfile].append(classdir)
# Locating classes and other files is a bit more complicated than recipes -
# layer priority is not a factor; instead BitBake uses the first matching
@@ -122,9 +125,14 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
if inherits:
bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
for classname in inherits:
- classfile = 'classes/%s.bbclass' % classname
- if not bb.utils.which(bbpath, classfile, history=False):
- logger.error('No class named %s found in BBPATH', classfile)
+ found = False
+ for c in ["classes-global", "classes-recipe", "classes"]:
+ cfile = c + '/%s.bbclass' % classname
+ if bb.utils.which(bbpath, cfile, history=False):
+ found = True
+ break
+ if not found:
+ logger.error('No class named %s found in BBPATH', classname)
sys.exit(1)
pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn
@@ -172,7 +180,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split()
- cls_re = re.compile('classes/')
+ cls_re = re.compile('classes.*/')
preffiles = []
show_unique_pn = []
@@ -274,7 +282,10 @@ Lists recipes with the bbappends that apply to them as subitems.
else:
logger.plain('=== Appended recipes ===')
- pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys())
+
+ cooker_data = self.tinfoil.cooker.recipecaches[args.mc]
+
+ pnlist = list(cooker_data.pkg_pn.keys())
pnlist.sort()
appends = False
for pn in pnlist:
@@ -287,7 +298,7 @@ Lists recipes with the bbappends that apply to them as subitems.
if not found:
continue
- if self.show_appends_for_pn(pn):
+ if self.show_appends_for_pn(pn, cooker_data, args.mc):
appends = True
if not args.pnspec and self.show_appends_for_skipped():
@@ -296,8 +307,10 @@ Lists recipes with the bbappends that apply to them as subitems.
if not appends:
logger.plain('No append files found')
- def show_appends_for_pn(self, pn):
- filenames = self.tinfoil.cooker_data.pkg_pn[pn]
+ def show_appends_for_pn(self, pn, cooker_data, mc):
+ filenames = cooker_data.pkg_pn[pn]
+ if mc:
+ pn = "mc:%s:%s" % (mc, pn)
best = self.tinfoil.find_best_provider(pn)
best_filename = os.path.basename(best[3])
@@ -405,7 +418,7 @@ NOTE: .bbappend files can impact the dependencies.
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
# The inherit class
- cls_re = re.compile('classes/')
+ cls_re = re.compile('classes.*/')
if f in self.tinfoil.cooker_data.inherits:
inherits = self.tinfoil.cooker_data.inherits[f]
for cls in inherits:
@@ -522,6 +535,7 @@ NOTE: .bbappend files can impact the dependencies.
parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends)
parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
+ parser_show_appends.add_argument('--mc', help='use specified multiconfig', default='')
parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends)
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py
index 8e5c66426e..cf0f1abe0c 100644
--- a/bitbake/lib/bs4/tests/test_tree.py
+++ b/bitbake/lib/bs4/tests/test_tree.py
@@ -585,7 +585,7 @@ class SiblingTest(TreeTest):
</html>'''
# All that whitespace looks good but makes the tests more
# difficult. Get rid of it.
- markup = re.compile("\n\s*").sub("", markup)
+ markup = re.compile(r"\n\s*").sub("", markup)
self.tree = self.soup(markup)
diff --git a/bitbake/lib/codegen.py b/bitbake/lib/codegen.py
index 6955a7ada5..018b283177 100644
--- a/bitbake/lib/codegen.py
+++ b/bitbake/lib/codegen.py
@@ -392,19 +392,7 @@ class SourceGenerator(NodeVisitor):
def visit_Name(self, node):
self.write(node.id)
- def visit_Str(self, node):
- self.write(repr(node.s))
-
- def visit_Bytes(self, node):
- self.write(repr(node.s))
-
- def visit_Num(self, node):
- self.write(repr(node.n))
-
def visit_Constant(self, node):
- # Python 3.8 deprecated visit_Num(), visit_Str(), visit_Bytes(),
- # visit_NameConstant() and visit_Ellipsis(). They can be removed once we
- # require 3.8+.
self.write(repr(node.value))
def visit_Tuple(self, node):
diff --git a/bitbake/lib/hashserv/__init__.py b/bitbake/lib/hashserv/__init__.py
index 9cb3fd57a5..74367eb6b4 100644
--- a/bitbake/lib/hashserv/__init__.py
+++ b/bitbake/lib/hashserv/__init__.py
@@ -5,151 +5,102 @@
import asyncio
from contextlib import closing
-import re
-import sqlite3
import itertools
import json
+from collections import namedtuple
+from urllib.parse import urlparse
+from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS
+
+User = namedtuple("User", ("username", "permissions"))
+
+def create_server(
+ addr,
+ dbname,
+ *,
+ sync=True,
+ upstream=None,
+ read_only=False,
+ db_username=None,
+ db_password=None,
+ anon_perms=None,
+ admin_username=None,
+ admin_password=None,
+):
+ def sqlite_engine():
+ from .sqlite import DatabaseEngine
+
+ return DatabaseEngine(dbname, sync)
+
+ def sqlalchemy_engine():
+ from .sqlalchemy import DatabaseEngine
+
+ return DatabaseEngine(dbname, db_username, db_password)
-UNIX_PREFIX = "unix://"
-
-ADDR_TYPE_UNIX = 0
-ADDR_TYPE_TCP = 1
-
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-UNIHASH_TABLE_DEFINITION = (
- ("method", "TEXT NOT NULL", "UNIQUE"),
- ("taskhash", "TEXT NOT NULL", "UNIQUE"),
- ("unihash", "TEXT NOT NULL", ""),
-)
-
-UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
-
-OUTHASH_TABLE_DEFINITION = (
- ("method", "TEXT NOT NULL", "UNIQUE"),
- ("taskhash", "TEXT NOT NULL", "UNIQUE"),
- ("outhash", "TEXT NOT NULL", "UNIQUE"),
- ("created", "DATETIME", ""),
-
- # Optional fields
- ("owner", "TEXT", ""),
- ("PN", "TEXT", ""),
- ("PV", "TEXT", ""),
- ("PR", "TEXT", ""),
- ("task", "TEXT", ""),
- ("outhash_siginfo", "TEXT", ""),
-)
-
-OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
-
-def _make_table(cursor, name, definition):
- cursor.execute('''
- CREATE TABLE IF NOT EXISTS {name} (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- {fields}
- UNIQUE({unique})
- )
- '''.format(
- name=name,
- fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
- unique=", ".join(name for name, _, flags in definition if "UNIQUE" in flags)
- ))
-
-
-def setup_database(database, sync=True):
- db = sqlite3.connect(database)
- db.row_factory = sqlite3.Row
-
- with closing(db.cursor()) as cursor:
- _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION)
- _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
-
- cursor.execute('PRAGMA journal_mode = WAL')
- cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
-
- # Drop old indexes
- cursor.execute('DROP INDEX IF EXISTS taskhash_lookup')
- cursor.execute('DROP INDEX IF EXISTS outhash_lookup')
- cursor.execute('DROP INDEX IF EXISTS taskhash_lookup_v2')
- cursor.execute('DROP INDEX IF EXISTS outhash_lookup_v2')
-
- # TODO: Upgrade from tasks_v2?
- cursor.execute('DROP TABLE IF EXISTS tasks_v2')
-
- # Create new indexes
- cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)')
- cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)')
-
- return db
-
-
-def parse_address(addr):
- if addr.startswith(UNIX_PREFIX):
- return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX):],))
- else:
- m = re.match(r'\[(?P<host>[^\]]*)\]:(?P<port>\d+)$', addr)
- if m is not None:
- host = m.group('host')
- port = m.group('port')
- else:
- host, port = addr.split(':')
-
- return (ADDR_TYPE_TCP, (host, int(port)))
-
+ from . import server
-def chunkify(msg, max_chunk):
- if len(msg) < max_chunk - 1:
- yield ''.join((msg, "\n"))
+ if "://" in dbname:
+ db_engine = sqlalchemy_engine()
else:
- yield ''.join((json.dumps({
- 'chunk-stream': None
- }), "\n"))
+ db_engine = sqlite_engine()
- args = [iter(msg)] * (max_chunk - 1)
- for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
- yield ''.join(itertools.chain(m, "\n"))
- yield "\n"
+ if anon_perms is None:
+ anon_perms = server.DEFAULT_ANON_PERMS
-
-def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False):
- from . import server
- db = setup_database(dbname, sync=sync)
- s = server.Server(db, upstream=upstream, read_only=read_only)
+ s = server.Server(
+ db_engine,
+ upstream=upstream,
+ read_only=read_only,
+ anon_perms=anon_perms,
+ admin_username=admin_username,
+ admin_password=admin_password,
+ )
(typ, a) = parse_address(addr)
if typ == ADDR_TYPE_UNIX:
s.start_unix_server(*a)
+ elif typ == ADDR_TYPE_WS:
+ url = urlparse(a[0])
+ s.start_websocket_server(url.hostname, url.port)
else:
s.start_tcp_server(*a)
return s
-def create_client(addr):
+def create_client(addr, username=None, password=None):
from . import client
- c = client.Client()
- (typ, a) = parse_address(addr)
- if typ == ADDR_TYPE_UNIX:
- c.connect_unix(*a)
- else:
- c.connect_tcp(*a)
+ c = client.Client(username, password)
+
+ try:
+ (typ, a) = parse_address(addr)
+ if typ == ADDR_TYPE_UNIX:
+ c.connect_unix(*a)
+ elif typ == ADDR_TYPE_WS:
+ c.connect_websocket(*a)
+ else:
+ c.connect_tcp(*a)
+ return c
+ except Exception as e:
+ c.close()
+ raise e
- return c
-async def create_async_client(addr):
+async def create_async_client(addr, username=None, password=None):
from . import client
- c = client.AsyncClient()
- (typ, a) = parse_address(addr)
- if typ == ADDR_TYPE_UNIX:
- await c.connect_unix(*a)
- else:
- await c.connect_tcp(*a)
+ c = client.AsyncClient(username, password)
+
+ try:
+ (typ, a) = parse_address(addr)
+ if typ == ADDR_TYPE_UNIX:
+ await c.connect_unix(*a)
+ elif typ == ADDR_TYPE_WS:
+ await c.connect_websocket(*a)
+ else:
+ await c.connect_tcp(*a)
- return c
+ return c
+ except Exception as e:
+ await c.close()
+ raise e
diff --git a/bitbake/lib/hashserv/client.py b/bitbake/lib/hashserv/client.py
index b2aa1026ac..0b254beddd 100644
--- a/bitbake/lib/hashserv/client.py
+++ b/bitbake/lib/hashserv/client.py
@@ -6,6 +6,7 @@
import logging
import socket
import bb.asyncrpc
+import json
from . import create_async_client
@@ -15,107 +16,331 @@ logger = logging.getLogger("hashserv.client")
class AsyncClient(bb.asyncrpc.AsyncClient):
MODE_NORMAL = 0
MODE_GET_STREAM = 1
+ MODE_EXIST_STREAM = 2
- def __init__(self):
- super().__init__('OEHASHEQUIV', '1.1', logger)
+ def __init__(self, username=None, password=None):
+ super().__init__("OEHASHEQUIV", "1.1", logger)
self.mode = self.MODE_NORMAL
+ self.username = username
+ self.password = password
+ self.saved_become_user = None
async def setup_connection(self):
await super().setup_connection()
- cur_mode = self.mode
self.mode = self.MODE_NORMAL
- await self._set_mode(cur_mode)
+ if self.username:
+ # Save off become user temporarily because auth() resets it
+ become = self.saved_become_user
+ await self.auth(self.username, self.password)
- async def send_stream(self, msg):
+ if become:
+ await self.become_user(become)
+
+ async def send_stream(self, mode, msg):
async def proc():
- self.writer.write(("%s\n" % msg).encode("utf-8"))
- await self.writer.drain()
- l = await self.reader.readline()
- if not l:
- raise ConnectionError("Connection closed")
- return l.decode("utf-8").rstrip()
+ await self._set_mode(mode)
+ await self.socket.send(msg)
+ return await self.socket.recv()
return await self._send_wrapper(proc)
+ async def invoke(self, *args, **kwargs):
+ # It's OK if connection errors cause a failure here, because the mode
+ # is also reset to normal on a new connection
+ await self._set_mode(self.MODE_NORMAL)
+ return await super().invoke(*args, **kwargs)
+
async def _set_mode(self, new_mode):
- if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM:
- r = await self.send_stream("END")
+ async def stream_to_normal():
+ await self.socket.send("END")
+ return await self.socket.recv()
+
+ async def normal_to_stream(command):
+ r = await self.invoke({command: None})
if r != "ok":
- raise ConnectionError("Bad response from server %r" % r)
- elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL:
- r = await self.send_message({"get-stream": None})
+ raise ConnectionError(
+ f"Unable to transition to stream mode: Bad response from server {r!r}"
+ )
+
+ self.logger.debug("Mode is now %s", command)
+
+ if new_mode == self.mode:
+ return
+
+ self.logger.debug("Transitioning mode %s -> %s", self.mode, new_mode)
+
+ # Always transition to normal mode before switching to any other mode
+ if self.mode != self.MODE_NORMAL:
+ r = await self._send_wrapper(stream_to_normal)
if r != "ok":
- raise ConnectionError("Bad response from server %r" % r)
- elif new_mode != self.mode:
- raise Exception(
- "Undefined mode transition %r -> %r" % (self.mode, new_mode)
- )
+ self.check_invoke_error(r)
+ raise ConnectionError(
+ f"Unable to transition to normal mode: Bad response from server {r!r}"
+ )
+ self.logger.debug("Mode is now normal")
+
+ if new_mode == self.MODE_GET_STREAM:
+ await normal_to_stream("get-stream")
+ elif new_mode == self.MODE_EXIST_STREAM:
+ await normal_to_stream("exists-stream")
+ elif new_mode != self.MODE_NORMAL:
+ raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}")
self.mode = new_mode
async def get_unihash(self, method, taskhash):
- await self._set_mode(self.MODE_GET_STREAM)
- r = await self.send_stream("%s %s" % (method, taskhash))
+ r = await self.send_stream(self.MODE_GET_STREAM, "%s %s" % (method, taskhash))
if not r:
return None
return r
async def report_unihash(self, taskhash, method, outhash, unihash, extra={}):
- await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
m["taskhash"] = taskhash
m["method"] = method
m["outhash"] = outhash
m["unihash"] = unihash
- return await self.send_message({"report": m})
+ return await self.invoke({"report": m})
async def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
- await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
m["taskhash"] = taskhash
m["method"] = method
m["unihash"] = unihash
- return await self.send_message({"report-equiv": m})
+ return await self.invoke({"report-equiv": m})
async def get_taskhash(self, method, taskhash, all_properties=False):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message(
+ return await self.invoke(
{"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
)
- async def get_outhash(self, method, outhash, taskhash):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message(
- {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}}
+ async def unihash_exists(self, unihash):
+ r = await self.send_stream(self.MODE_EXIST_STREAM, unihash)
+ return r == "true"
+
+ async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
+ return await self.invoke(
+ {
+ "get-outhash": {
+ "outhash": outhash,
+ "taskhash": taskhash,
+ "method": method,
+ "with_unihash": with_unihash,
+ }
+ }
)
async def get_stats(self):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message({"get-stats": None})
+ return await self.invoke({"get-stats": None})
async def reset_stats(self):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message({"reset-stats": None})
+ return await self.invoke({"reset-stats": None})
async def backfill_wait(self):
- await self._set_mode(self.MODE_NORMAL)
- return (await self.send_message({"backfill-wait": None}))["tasks"]
+ return (await self.invoke({"backfill-wait": None}))["tasks"]
+
+ async def remove(self, where):
+ return await self.invoke({"remove": {"where": where}})
+
+ async def clean_unused(self, max_age):
+ return await self.invoke({"clean-unused": {"max_age_seconds": max_age}})
+
+ async def auth(self, username, token):
+ result = await self.invoke({"auth": {"username": username, "token": token}})
+ self.username = username
+ self.password = token
+ self.saved_become_user = None
+ return result
+
+ async def refresh_token(self, username=None):
+ m = {}
+ if username:
+ m["username"] = username
+ result = await self.invoke({"refresh-token": m})
+ if (
+ self.username
+ and not self.saved_become_user
+ and result["username"] == self.username
+ ):
+ self.password = result["token"]
+ return result
+
+ async def set_user_perms(self, username, permissions):
+ return await self.invoke(
+ {"set-user-perms": {"username": username, "permissions": permissions}}
+ )
+
+ async def get_user(self, username=None):
+ m = {}
+ if username:
+ m["username"] = username
+ return await self.invoke({"get-user": m})
+
+ async def get_all_users(self):
+ return (await self.invoke({"get-all-users": {}}))["users"]
+
+ async def new_user(self, username, permissions):
+ return await self.invoke(
+ {"new-user": {"username": username, "permissions": permissions}}
+ )
+
+ async def delete_user(self, username):
+ return await self.invoke({"delete-user": {"username": username}})
+
+ async def become_user(self, username):
+ result = await self.invoke({"become-user": {"username": username}})
+ if username == self.username:
+ self.saved_become_user = None
+ else:
+ self.saved_become_user = username
+ return result
+
+ async def get_db_usage(self):
+ return (await self.invoke({"get-db-usage": {}}))["usage"]
+
+ async def get_db_query_columns(self):
+ return (await self.invoke({"get-db-query-columns": {}}))["columns"]
+
+ async def gc_status(self):
+ return await self.invoke({"gc-status": {}})
+
+ async def gc_mark(self, mark, where):
+ """
+ Starts a new garbage collection operation identified by "mark". If
+ garbage collection is already in progress with "mark", the collection
+ is continued.
+
+ All unihash entries that match the "where" clause are marked to be
+ kept. In addition, any new entries added to the database after this
+ command will be automatically marked with "mark"
+ """
+ return await self.invoke({"gc-mark": {"mark": mark, "where": where}})
+
+ async def gc_sweep(self, mark):
+ """
+ Finishes garbage collection for "mark". All unihash entries that have
+ not been marked will be deleted.
+
+ It is recommended to clean unused outhash entries after running this to
+ cleanup any dangling outhashes
+ """
+ return await self.invoke({"gc-sweep": {"mark": mark}})
class Client(bb.asyncrpc.Client):
- def __init__(self):
+ def __init__(self, username=None, password=None):
+ self.username = username
+ self.password = password
+
super().__init__()
self._add_methods(
"connect_tcp",
+ "connect_websocket",
"get_unihash",
"report_unihash",
"report_unihash_equiv",
"get_taskhash",
+ "unihash_exists",
"get_outhash",
"get_stats",
"reset_stats",
"backfill_wait",
+ "remove",
+ "clean_unused",
+ "auth",
+ "refresh_token",
+ "set_user_perms",
+ "get_user",
+ "get_all_users",
+ "new_user",
+ "delete_user",
+ "become_user",
+ "get_db_usage",
+ "get_db_query_columns",
+ "gc_status",
+ "gc_mark",
+ "gc_sweep",
)
def _get_async_client(self):
- return AsyncClient()
+ return AsyncClient(self.username, self.password)
+
+
+class ClientPool(bb.asyncrpc.ClientPool):
+ def __init__(
+ self,
+ address,
+ max_clients,
+ *,
+ username=None,
+ password=None,
+ become=None,
+ ):
+ super().__init__(max_clients)
+ self.address = address
+ self.username = username
+ self.password = password
+ self.become = become
+
+ async def _new_client(self):
+ client = await create_async_client(
+ self.address,
+ username=self.username,
+ password=self.password,
+ )
+ if self.become:
+ await client.become_user(self.become)
+ return client
+
+ def _run_key_tasks(self, queries, call):
+ results = {key: None for key in queries.keys()}
+
+ def make_task(key, args):
+ async def task(client):
+ nonlocal results
+ unihash = await call(client, args)
+ results[key] = unihash
+
+ return task
+
+ def gen_tasks():
+ for key, args in queries.items():
+ yield make_task(key, args)
+
+ self.run_tasks(gen_tasks())
+ return results
+
+ def get_unihashes(self, queries):
+ """
+ Query multiple unihashes in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a tuple of (method, taskhash).
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is the queried unihash (which might be none if the query
+ failed)
+ """
+
+ async def call(client, args):
+ method, taskhash = args
+ return await client.get_unihash(method, taskhash)
+
+ return self._run_key_tasks(queries, call)
+
+ def unihashes_exist(self, queries):
+ """
+ Query multiple unihash existence checks in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a unihash.
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is True or False if the unihash is known by the server (or
+ None if there was a failure)
+ """
+
+ async def call(client, unihash):
+ return await client.unihash_exists(unihash)
+
+ return self._run_key_tasks(queries, call)
diff --git a/bitbake/lib/hashserv/server.py b/bitbake/lib/hashserv/server.py
index d40a2ab8f8..68f64f983b 100644
--- a/bitbake/lib/hashserv/server.py
+++ b/bitbake/lib/hashserv/server.py
@@ -3,18 +3,51 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from contextlib import closing, contextmanager
-from datetime import datetime
-import enum
+from datetime import datetime, timedelta
import asyncio
import logging
import math
import time
-from . import create_async_client, UNIHASH_TABLE_COLUMNS, OUTHASH_TABLE_COLUMNS
+import os
+import base64
+import hashlib
+from . import create_async_client
import bb.asyncrpc
+logger = logging.getLogger("hashserv.server")
-logger = logging.getLogger('hashserv.server')
+
+# This permission only exists to match nothing
+NONE_PERM = "@none"
+
+READ_PERM = "@read"
+REPORT_PERM = "@report"
+DB_ADMIN_PERM = "@db-admin"
+USER_ADMIN_PERM = "@user-admin"
+ALL_PERM = "@all"
+
+ALL_PERMISSIONS = {
+ READ_PERM,
+ REPORT_PERM,
+ DB_ADMIN_PERM,
+ USER_ADMIN_PERM,
+ ALL_PERM,
+}
+
+DEFAULT_ANON_PERMS = (
+ READ_PERM,
+ REPORT_PERM,
+ DB_ADMIN_PERM,
+)
+
+TOKEN_ALGORITHM = "sha256"
+
+# 48 bytes of random data will result in 64 characters when base64
+# encoded. This number also ensures that the base64 encoding won't have any
+# trailing '=' characters.
+TOKEN_SIZE = 48
+
+SALT_SIZE = 8
class Measurement(object):
@@ -104,459 +137,745 @@ class Stats(object):
return math.sqrt(self.s / (self.num - 1))
def todict(self):
- return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
-
-
-@enum.unique
-class Resolve(enum.Enum):
- FAIL = enum.auto()
- IGNORE = enum.auto()
- REPLACE = enum.auto()
-
-
-def insert_table(cursor, table, data, on_conflict):
- resolve = {
- Resolve.FAIL: "",
- Resolve.IGNORE: " OR IGNORE",
- Resolve.REPLACE: " OR REPLACE",
- }[on_conflict]
-
- keys = sorted(data.keys())
- query = 'INSERT{resolve} INTO {table} ({fields}) VALUES({values})'.format(
- resolve=resolve,
- table=table,
- fields=", ".join(keys),
- values=", ".join(":" + k for k in keys),
- )
- prevrowid = cursor.lastrowid
- cursor.execute(query, data)
- logging.debug(
- "Inserting %r into %s, %s",
- data,
- table,
- on_conflict
- )
- return (cursor.lastrowid, cursor.lastrowid != prevrowid)
-
-def insert_unihash(cursor, data, on_conflict):
- return insert_table(cursor, "unihashes_v2", data, on_conflict)
-
-def insert_outhash(cursor, data, on_conflict):
- return insert_table(cursor, "outhashes_v2", data, on_conflict)
-
-async def copy_unihash_from_upstream(client, db, method, taskhash):
- d = await client.get_taskhash(method, taskhash)
- if d is not None:
- with closing(db.cursor()) as cursor:
- insert_unihash(
- cursor,
- {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS},
- Resolve.IGNORE,
- )
- db.commit()
- return d
+ return {
+ k: getattr(self, k)
+ for k in ("num", "total_time", "max_time", "average", "stdev")
+ }
-class ServerCursor(object):
- def __init__(self, db, cursor, upstream):
- self.db = db
- self.cursor = cursor
- self.upstream = upstream
+token_refresh_semaphore = asyncio.Lock()
+
+
+async def new_token():
+ # Prevent malicious users from using this API to deduce the entropy
+ # pool on the server and thus be able to guess a token. *All* token
+ # refresh requests lock the same global semaphore and then sleep for a
+ # short time. The effectively rate limits the total number of requests
+ # than can be made across all clients to 10/second, which should be enough
+ # since you have to be an authenticated users to make the request in the
+ # first place
+ async with token_refresh_semaphore:
+ await asyncio.sleep(0.1)
+ raw = os.getrandom(TOKEN_SIZE, os.GRND_NONBLOCK)
+
+ return base64.b64encode(raw, b"._").decode("utf-8")
+
+
+def new_salt():
+ return os.getrandom(SALT_SIZE, os.GRND_NONBLOCK).hex()
+
+
+def hash_token(algo, salt, token):
+ h = hashlib.new(algo)
+ h.update(salt.encode("utf-8"))
+ h.update(token.encode("utf-8"))
+ return ":".join([algo, salt, h.hexdigest()])
+
+
+def permissions(*permissions, allow_anon=True, allow_self_service=False):
+ """
+ Function decorator that can be used to decorate an RPC function call and
+ check that the current users permissions match the require permissions.
+
+ If allow_anon is True, the user will also be allowed to make the RPC call
+ if the anonymous user permissions match the permissions.
+
+ If allow_self_service is True, and the "username" property in the request
+ is the currently logged in user, or not specified, the user will also be
+ allowed to make the request. This allows users to access normal privileged
+ API, as long as they are only modifying their own user properties (e.g.
+ users can be allowed to reset their own token without @user-admin
+ permissions, but not the token for any other user.
+ """
+
+ def wrapper(func):
+ async def wrap(self, request):
+ if allow_self_service and self.user is not None:
+ username = request.get("username", self.user.username)
+ if username == self.user.username:
+ request["username"] = self.user.username
+ return await func(self, request)
+
+ if not self.user_has_permissions(*permissions, allow_anon=allow_anon):
+ if not self.user:
+ username = "Anonymous user"
+ user_perms = self.server.anon_perms
+ else:
+ username = self.user.username
+ user_perms = self.user.permissions
+
+ self.logger.info(
+ "User %s with permissions %r denied from calling %s. Missing permissions(s) %r",
+ username,
+ ", ".join(user_perms),
+ func.__name__,
+ ", ".join(permissions),
+ )
+ raise bb.asyncrpc.InvokeError(
+ f"{username} is not allowed to access permissions(s) {', '.join(permissions)}"
+ )
+
+ return await func(self, request)
+
+ return wrap
+
+ return wrapper
class ServerClient(bb.asyncrpc.AsyncServerConnection):
- def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
- super().__init__(reader, writer, 'OEHASHEQUIV', logger)
- self.db = db
- self.request_stats = request_stats
+ def __init__(self, socket, server):
+ super().__init__(socket, "OEHASHEQUIV", server.logger)
+ self.server = server
self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
- self.backfill_queue = backfill_queue
- self.upstream = upstream
+ self.user = None
- self.handlers.update({
- 'get': self.handle_get,
- 'get-outhash': self.handle_get_outhash,
- 'get-stream': self.handle_get_stream,
- 'get-stats': self.handle_get_stats,
- })
-
- if not read_only:
- self.handlers.update({
- 'report': self.handle_report,
- 'report-equiv': self.handle_equivreport,
- 'reset-stats': self.handle_reset_stats,
- 'backfill-wait': self.handle_backfill_wait,
- })
+ self.handlers.update(
+ {
+ "get": self.handle_get,
+ "get-outhash": self.handle_get_outhash,
+ "get-stream": self.handle_get_stream,
+ "exists-stream": self.handle_exists_stream,
+ "get-stats": self.handle_get_stats,
+ "get-db-usage": self.handle_get_db_usage,
+ "get-db-query-columns": self.handle_get_db_query_columns,
+ # Not always read-only, but internally checks if the server is
+ # read-only
+ "report": self.handle_report,
+ "auth": self.handle_auth,
+ "get-user": self.handle_get_user,
+ "get-all-users": self.handle_get_all_users,
+ "become-user": self.handle_become_user,
+ }
+ )
+
+ if not self.server.read_only:
+ self.handlers.update(
+ {
+ "report-equiv": self.handle_equivreport,
+ "reset-stats": self.handle_reset_stats,
+ "backfill-wait": self.handle_backfill_wait,
+ "remove": self.handle_remove,
+ "gc-mark": self.handle_gc_mark,
+ "gc-sweep": self.handle_gc_sweep,
+ "gc-status": self.handle_gc_status,
+ "clean-unused": self.handle_clean_unused,
+ "refresh-token": self.handle_refresh_token,
+ "set-user-perms": self.handle_set_perms,
+ "new-user": self.handle_new_user,
+ "delete-user": self.handle_delete_user,
+ }
+ )
+
+ def raise_no_user_error(self, username):
+ raise bb.asyncrpc.InvokeError(f"No user named '{username}' exists")
+
+ def user_has_permissions(self, *permissions, allow_anon=True):
+ permissions = set(permissions)
+ if allow_anon:
+ if ALL_PERM in self.server.anon_perms:
+ return True
+
+ if not permissions - self.server.anon_perms:
+ return True
+
+ if self.user is None:
+ return False
+
+ if ALL_PERM in self.user.permissions:
+ return True
+
+ if not permissions - self.user.permissions:
+ return True
+
+ return False
def validate_proto_version(self):
- return (self.proto_version > (1, 0) and self.proto_version <= (1, 1))
+ return self.proto_version > (1, 0) and self.proto_version <= (1, 1)
async def process_requests(self):
- if self.upstream is not None:
- self.upstream_client = await create_async_client(self.upstream)
- else:
- self.upstream_client = None
-
- await super().process_requests()
+ async with self.server.db_engine.connect(self.logger) as db:
+ self.db = db
+ if self.server.upstream is not None:
+ self.upstream_client = await create_async_client(self.server.upstream)
+ else:
+ self.upstream_client = None
- if self.upstream_client is not None:
- await self.upstream_client.close()
+ try:
+ await super().process_requests()
+ finally:
+ if self.upstream_client is not None:
+ await self.upstream_client.close()
async def dispatch_message(self, msg):
for k in self.handlers.keys():
if k in msg:
- logger.debug('Handling %s' % k)
- if 'stream' in k:
- await self.handlers[k](msg[k])
+ self.logger.debug("Handling %s" % k)
+ if "stream" in k:
+ return await self.handlers[k](msg[k])
else:
- with self.request_stats.start_sample() as self.request_sample, \
- self.request_sample.measure():
- await self.handlers[k](msg[k])
- return
+ with self.server.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
+ return await self.handlers[k](msg[k])
raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg)
+ @permissions(READ_PERM)
async def handle_get(self, request):
- method = request['method']
- taskhash = request['taskhash']
- fetch_all = request.get('all', False)
+ method = request["method"]
+ taskhash = request["taskhash"]
+ fetch_all = request.get("all", False)
- with closing(self.db.cursor()) as cursor:
- d = await self.get_unihash(cursor, method, taskhash, fetch_all)
+ return await self.get_unihash(method, taskhash, fetch_all)
- self.write_message(d)
-
- async def get_unihash(self, cursor, method, taskhash, fetch_all=False):
+ async def get_unihash(self, method, taskhash, fetch_all=False):
d = None
if fetch_all:
- cursor.execute(
- '''
- SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
- WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
- ORDER BY outhashes_v2.created ASC
- LIMIT 1
- ''',
- {
- 'method': method,
- 'taskhash': taskhash,
- }
-
- )
- row = cursor.fetchone()
-
+ row = await self.db.get_unihash_by_taskhash_full(method, taskhash)
if row is not None:
d = {k: row[k] for k in row.keys()}
elif self.upstream_client is not None:
d = await self.upstream_client.get_taskhash(method, taskhash, True)
- self.update_unified(cursor, d)
- self.db.commit()
+ await self.update_unified(d)
else:
- row = self.query_equivalent(cursor, method, taskhash)
+ row = await self.db.get_equivalent(method, taskhash)
if row is not None:
d = {k: row[k] for k in row.keys()}
elif self.upstream_client is not None:
d = await self.upstream_client.get_taskhash(method, taskhash)
- d = {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS}
- insert_unihash(cursor, d, Resolve.IGNORE)
- self.db.commit()
+ await self.db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
return d
+ @permissions(READ_PERM)
async def handle_get_outhash(self, request):
- method = request['method']
- outhash = request['outhash']
- taskhash = request['taskhash']
+ method = request["method"]
+ outhash = request["outhash"]
+ taskhash = request["taskhash"]
+ with_unihash = request.get("with_unihash", True)
- with closing(self.db.cursor()) as cursor:
- d = await self.get_outhash(cursor, method, outhash, taskhash)
+ return await self.get_outhash(method, outhash, taskhash, with_unihash)
- self.write_message(d)
-
- async def get_outhash(self, cursor, method, outhash, taskhash):
+ async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
d = None
- cursor.execute(
- '''
- SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
- WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
- ORDER BY outhashes_v2.created ASC
- LIMIT 1
- ''',
- {
- 'method': method,
- 'outhash': outhash,
- }
- )
- row = cursor.fetchone()
+ if with_unihash:
+ row = await self.db.get_unihash_by_outhash(method, outhash)
+ else:
+ row = await self.db.get_outhash(method, outhash)
if row is not None:
d = {k: row[k] for k in row.keys()}
elif self.upstream_client is not None:
d = await self.upstream_client.get_outhash(method, outhash, taskhash)
- self.update_unified(cursor, d)
- self.db.commit()
+ await self.update_unified(d)
return d
- def update_unified(self, cursor, data):
+ async def update_unified(self, data):
if data is None:
return
- insert_unihash(
- cursor,
- {k: v for k, v in data.items() if k in UNIHASH_TABLE_COLUMNS},
- Resolve.IGNORE
- )
- insert_outhash(
- cursor,
- {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS},
- Resolve.IGNORE
- )
+ await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
+ await self.db.insert_outhash(data)
- async def handle_get_stream(self, request):
- self.write_message('ok')
+ async def _stream_handler(self, handler):
+ await self.socket.send_message("ok")
while True:
upstream = None
- l = await self.reader.readline()
+ l = await self.socket.recv()
if not l:
- return
+ break
try:
# This inner loop is very sensitive and must be as fast as
# possible (which is why the request sample is handled manually
# instead of using 'with', and also why logging statements are
# commented out.
- self.request_sample = self.request_stats.start_sample()
+ self.request_sample = self.server.request_stats.start_sample()
request_measure = self.request_sample.measure()
request_measure.start()
- l = l.decode('utf-8').rstrip()
- if l == 'END':
- self.writer.write('ok\n'.encode('utf-8'))
- return
-
- (method, taskhash) = l.split()
- #logger.debug('Looking up %s %s' % (method, taskhash))
- cursor = self.db.cursor()
- try:
- row = self.query_equivalent(cursor, method, taskhash)
- finally:
- cursor.close()
-
- if row is not None:
- msg = ('%s\n' % row['unihash']).encode('utf-8')
- #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
- elif self.upstream_client is not None:
- upstream = await self.upstream_client.get_unihash(method, taskhash)
- if upstream:
- msg = ("%s\n" % upstream).encode("utf-8")
- else:
- msg = "\n".encode("utf-8")
- else:
- msg = '\n'.encode('utf-8')
+ if l == "END":
+ break
- self.writer.write(msg)
+ msg = await handler(l)
+ await self.socket.send(msg)
finally:
request_measure.end()
self.request_sample.end()
- await self.writer.drain()
+ await self.socket.send("ok")
+ return self.NO_RESPONSE
- # Post to the backfill queue after writing the result to minimize
- # the turn around time on a request
- if upstream is not None:
- await self.backfill_queue.put((method, taskhash))
+ @permissions(READ_PERM)
+ async def handle_get_stream(self, request):
+ async def handler(l):
+ (method, taskhash) = l.split()
+ # self.logger.debug('Looking up %s %s' % (method, taskhash))
+ row = await self.db.get_equivalent(method, taskhash)
- async def handle_report(self, data):
- with closing(self.db.cursor()) as cursor:
- outhash_data = {
- 'method': data['method'],
- 'outhash': data['outhash'],
- 'taskhash': data['taskhash'],
- 'created': datetime.now()
- }
+ if row is not None:
+ # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
+ return row["unihash"]
- for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
- if k in data:
- outhash_data[k] = data[k]
-
- # Insert the new entry, unless it already exists
- (rowid, inserted) = insert_outhash(cursor, outhash_data, Resolve.IGNORE)
-
- if inserted:
- # If this row is new, check if it is equivalent to another
- # output hash
- cursor.execute(
- '''
- SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
- -- Select any matching output hash except the one we just inserted
- WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
- -- Pick the oldest hash
- ORDER BY outhashes_v2.created ASC
- LIMIT 1
- ''',
- {
- 'method': data['method'],
- 'outhash': data['outhash'],
- 'taskhash': data['taskhash'],
- }
- )
- row = cursor.fetchone()
+ if self.upstream_client is not None:
+ upstream = await self.upstream_client.get_unihash(method, taskhash)
+ if upstream:
+ await self.server.backfill_queue.put((method, taskhash))
+ return upstream
- if row is not None:
- # A matching output hash was found. Set our taskhash to the
- # same unihash since they are equivalent
- unihash = row['unihash']
- resolve = Resolve.IGNORE
- else:
- # No matching output hash was found. This is probably the
- # first outhash to be added.
- unihash = data['unihash']
- resolve = Resolve.IGNORE
-
- # Query upstream to see if it has a unihash we can use
- if self.upstream_client is not None:
- upstream_data = await self.upstream_client.get_outhash(data['method'], data['outhash'], data['taskhash'])
- if upstream_data is not None:
- unihash = upstream_data['unihash']
-
-
- insert_unihash(
- cursor,
- {
- 'method': data['method'],
- 'taskhash': data['taskhash'],
- 'unihash': unihash,
- },
- resolve
- )
+ return ""
- unihash_data = await self.get_unihash(cursor, data['method'], data['taskhash'])
- if unihash_data is not None:
- unihash = unihash_data['unihash']
- else:
- unihash = data['unihash']
+ return await self._stream_handler(handler)
- self.db.commit()
+ @permissions(READ_PERM)
+ async def handle_exists_stream(self, request):
+ async def handler(l):
+ if await self.db.unihash_exists(l):
+ return "true"
- d = {
- 'taskhash': data['taskhash'],
- 'method': data['method'],
- 'unihash': unihash,
- }
+ if self.upstream_client is not None:
+ if await self.upstream_client.unihash_exists(l):
+ return "true"
- self.write_message(d)
+ return "false"
- async def handle_equivreport(self, data):
- with closing(self.db.cursor()) as cursor:
- insert_data = {
- 'method': data['method'],
- 'taskhash': data['taskhash'],
- 'unihash': data['unihash'],
- }
- insert_unihash(cursor, insert_data, Resolve.IGNORE)
- self.db.commit()
+ return await self._stream_handler(handler)
- # Fetch the unihash that will be reported for the taskhash. If the
- # unihash matches, it means this row was inserted (or the mapping
- # was already valid)
- row = self.query_equivalent(cursor, data['method'], data['taskhash'])
+ async def report_readonly(self, data):
+ method = data["method"]
+ outhash = data["outhash"]
+ taskhash = data["taskhash"]
- if row['unihash'] == data['unihash']:
- logger.info('Adding taskhash equivalence for %s with unihash %s',
- data['taskhash'], row['unihash'])
+ info = await self.get_outhash(method, outhash, taskhash)
+ if info:
+ unihash = info["unihash"]
+ else:
+ unihash = data["unihash"]
- d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+ return {
+ "taskhash": taskhash,
+ "method": method,
+ "unihash": unihash,
+ }
- self.write_message(d)
+ # Since this can be called either read only or to report, the check to
+ # report is made inside the function
+ @permissions(READ_PERM)
+ async def handle_report(self, data):
+ if self.server.read_only or not self.user_has_permissions(REPORT_PERM):
+ return await self.report_readonly(data)
+
+ outhash_data = {
+ "method": data["method"],
+ "outhash": data["outhash"],
+ "taskhash": data["taskhash"],
+ "created": datetime.now(),
+ }
+ for k in ("owner", "PN", "PV", "PR", "task", "outhash_siginfo"):
+ if k in data:
+ outhash_data[k] = data[k]
- async def handle_get_stats(self, request):
- d = {
- 'requests': self.request_stats.todict(),
+ if self.user:
+ outhash_data["owner"] = self.user.username
+
+ # Insert the new entry, unless it already exists
+ if await self.db.insert_outhash(outhash_data):
+ # If this row is new, check if it is equivalent to another
+ # output hash
+ row = await self.db.get_equivalent_for_outhash(
+ data["method"], data["outhash"], data["taskhash"]
+ )
+
+ if row is not None:
+ # A matching output hash was found. Set our taskhash to the
+ # same unihash since they are equivalent
+ unihash = row["unihash"]
+ else:
+ # No matching output hash was found. This is probably the
+ # first outhash to be added.
+ unihash = data["unihash"]
+
+ # Query upstream to see if it has a unihash we can use
+ if self.upstream_client is not None:
+ upstream_data = await self.upstream_client.get_outhash(
+ data["method"], data["outhash"], data["taskhash"]
+ )
+ if upstream_data is not None:
+ unihash = upstream_data["unihash"]
+
+ await self.db.insert_unihash(data["method"], data["taskhash"], unihash)
+
+ unihash_data = await self.get_unihash(data["method"], data["taskhash"])
+ if unihash_data is not None:
+ unihash = unihash_data["unihash"]
+ else:
+ unihash = data["unihash"]
+
+ return {
+ "taskhash": data["taskhash"],
+ "method": data["method"],
+ "unihash": unihash,
}
- self.write_message(d)
+ @permissions(READ_PERM, REPORT_PERM)
+ async def handle_equivreport(self, data):
+ await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
+
+ # Fetch the unihash that will be reported for the taskhash. If the
+ # unihash matches, it means this row was inserted (or the mapping
+ # was already valid)
+ row = await self.db.get_equivalent(data["method"], data["taskhash"])
+
+ if row["unihash"] == data["unihash"]:
+ self.logger.info(
+ "Adding taskhash equivalence for %s with unihash %s",
+ data["taskhash"],
+ row["unihash"],
+ )
+ return {k: row[k] for k in ("taskhash", "method", "unihash")}
+
+ @permissions(READ_PERM)
+ async def handle_get_stats(self, request):
+ return {
+ "requests": self.server.request_stats.todict(),
+ }
+
+ @permissions(DB_ADMIN_PERM)
async def handle_reset_stats(self, request):
d = {
- 'requests': self.request_stats.todict(),
+ "requests": self.server.request_stats.todict(),
}
- self.request_stats.reset()
- self.write_message(d)
+ self.server.request_stats.reset()
+ return d
+ @permissions(READ_PERM)
async def handle_backfill_wait(self, request):
d = {
- 'tasks': self.backfill_queue.qsize(),
+ "tasks": self.server.backfill_queue.qsize(),
}
- await self.backfill_queue.join()
- self.write_message(d)
+ await self.server.backfill_queue.join()
+ return d
- def query_equivalent(self, cursor, method, taskhash):
- # This is part of the inner loop and must be as fast as possible
- cursor.execute(
- 'SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash',
- {
- 'method': method,
- 'taskhash': taskhash,
- }
+ @permissions(DB_ADMIN_PERM)
+ async def handle_remove(self, request):
+ condition = request["where"]
+ if not isinstance(condition, dict):
+ raise TypeError("Bad condition type %s" % type(condition))
+
+ return {"count": await self.db.remove(condition)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_mark(self, request):
+ condition = request["where"]
+ mark = request["mark"]
+
+ if not isinstance(condition, dict):
+ raise TypeError("Bad condition type %s" % type(condition))
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ return {"count": await self.db.gc_mark(mark, condition)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_sweep(self, request):
+ mark = request["mark"]
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ current_mark = await self.db.get_current_gc_mark()
+
+ if not current_mark or mark != current_mark:
+ raise bb.asyncrpc.InvokeError(
+ f"'{mark}' is not the current mark. Refusing to sweep"
+ )
+
+ count = await self.db.gc_sweep()
+
+ return {"count": count}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_status(self, request):
+ (keep_rows, remove_rows, current_mark) = await self.db.gc_status()
+ return {
+ "keep": keep_rows,
+ "remove": remove_rows,
+ "mark": current_mark,
+ }
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_clean_unused(self, request):
+ max_age = request["max_age_seconds"]
+ oldest = datetime.now() - timedelta(seconds=-max_age)
+ return {"count": await self.db.clean_unused(oldest)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_get_db_usage(self, request):
+ return {"usage": await self.db.get_usage()}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_get_db_query_columns(self, request):
+ return {"columns": await self.db.get_query_columns()}
+
+ # The authentication API is always allowed
+ async def handle_auth(self, request):
+ username = str(request["username"])
+ token = str(request["token"])
+
+ async def fail_auth():
+ nonlocal username
+ # Rate limit bad login attempts
+ await asyncio.sleep(1)
+ raise bb.asyncrpc.InvokeError(f"Unable to authenticate as {username}")
+
+ user, db_token = await self.db.lookup_user_token(username)
+
+ if not user or not db_token:
+ await fail_auth()
+
+ try:
+ algo, salt, _ = db_token.split(":")
+ except ValueError:
+ await fail_auth()
+
+ if hash_token(algo, salt, token) != db_token:
+ await fail_auth()
+
+ self.user = user
+
+ self.logger.info("Authenticated as %s", username)
+
+ return {
+ "result": True,
+ "username": self.user.username,
+ "permissions": sorted(list(self.user.permissions)),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_refresh_token(self, request):
+ username = str(request["username"])
+
+ token = await new_token()
+
+ updated = await self.db.set_user_token(
+ username,
+ hash_token(TOKEN_ALGORITHM, new_salt(), token),
)
- return cursor.fetchone()
+ if not updated:
+ self.raise_no_user_error(username)
+
+ return {"username": username, "token": token}
+
+ def get_perm_arg(self, arg):
+ if not isinstance(arg, list):
+ raise bb.asyncrpc.InvokeError("Unexpected type for permissions")
+
+ arg = set(arg)
+ try:
+ arg.remove(NONE_PERM)
+ except KeyError:
+ pass
+
+ unknown_perms = arg - ALL_PERMISSIONS
+ if unknown_perms:
+ raise bb.asyncrpc.InvokeError(
+ "Unknown permissions %s" % ", ".join(sorted(list(unknown_perms)))
+ )
+
+ return sorted(list(arg))
+
+ def return_perms(self, permissions):
+ if ALL_PERM in permissions:
+ return sorted(list(ALL_PERMISSIONS))
+ return sorted(list(permissions))
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_set_perms(self, request):
+ username = str(request["username"])
+ permissions = self.get_perm_arg(request["permissions"])
+
+ if not await self.db.set_user_perms(username, permissions):
+ self.raise_no_user_error(username)
+
+ return {
+ "username": username,
+ "permissions": self.return_perms(permissions),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_get_user(self, request):
+ username = str(request["username"])
+
+ user = await self.db.lookup_user(username)
+ if user is None:
+ return None
+
+ return {
+ "username": user.username,
+ "permissions": self.return_perms(user.permissions),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_get_all_users(self, request):
+ users = await self.db.get_all_users()
+ return {
+ "users": [
+ {
+ "username": u.username,
+ "permissions": self.return_perms(u.permissions),
+ }
+ for u in users
+ ]
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_new_user(self, request):
+ username = str(request["username"])
+ permissions = self.get_perm_arg(request["permissions"])
+
+ token = await new_token()
+
+ inserted = await self.db.new_user(
+ username,
+ permissions,
+ hash_token(TOKEN_ALGORITHM, new_salt(), token),
+ )
+ if not inserted:
+ raise bb.asyncrpc.InvokeError(f"Cannot create new user '{username}'")
+
+ return {
+ "username": username,
+ "permissions": self.return_perms(permissions),
+ "token": token,
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_delete_user(self, request):
+ username = str(request["username"])
+
+ if not await self.db.delete_user(username):
+ self.raise_no_user_error(username)
+
+ return {"username": username}
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_become_user(self, request):
+ username = str(request["username"])
+
+ user = await self.db.lookup_user(username)
+ if user is None:
+ raise bb.asyncrpc.InvokeError(f"User {username} doesn't exist")
+
+ self.user = user
+
+ self.logger.info("Became user %s", username)
+
+ return {
+ "username": self.user.username,
+ "permissions": self.return_perms(self.user.permissions),
+ }
class Server(bb.asyncrpc.AsyncServer):
- def __init__(self, db, upstream=None, read_only=False):
+ def __init__(
+ self,
+ db_engine,
+ upstream=None,
+ read_only=False,
+ anon_perms=DEFAULT_ANON_PERMS,
+ admin_username=None,
+ admin_password=None,
+ ):
if upstream and read_only:
- raise bb.asyncrpc.ServerError("Read-only hashserv cannot pull from an upstream server")
+ raise bb.asyncrpc.ServerError(
+ "Read-only hashserv cannot pull from an upstream server"
+ )
+
+ disallowed_perms = set(anon_perms) - set(
+ [NONE_PERM, READ_PERM, REPORT_PERM, DB_ADMIN_PERM]
+ )
+
+ if disallowed_perms:
+ raise bb.asyncrpc.ServerError(
+ f"Permission(s) {' '.join(disallowed_perms)} are not allowed for anonymous users"
+ )
super().__init__(logger)
self.request_stats = Stats()
- self.db = db
+ self.db_engine = db_engine
self.upstream = upstream
self.read_only = read_only
+ self.backfill_queue = None
+ self.anon_perms = set(anon_perms)
+ self.admin_username = admin_username
+ self.admin_password = admin_password
- def accept_client(self, reader, writer):
- return ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only)
+ self.logger.info(
+ "Anonymous user permissions are: %s", ", ".join(self.anon_perms)
+ )
- @contextmanager
- def _backfill_worker(self):
- async def backfill_worker_task():
- client = await create_async_client(self.upstream)
- try:
- while True:
- item = await self.backfill_queue.get()
- if item is None:
- self.backfill_queue.task_done()
- break
- method, taskhash = item
- await copy_unihash_from_upstream(client, self.db, method, taskhash)
+ def accept_client(self, socket):
+ return ServerClient(socket, self)
+
+ async def create_admin_user(self):
+ admin_permissions = (ALL_PERM,)
+ async with self.db_engine.connect(self.logger) as db:
+ added = await db.new_user(
+ self.admin_username,
+ admin_permissions,
+ hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+ )
+ if added:
+ self.logger.info("Created admin user '%s'", self.admin_username)
+ else:
+ await db.set_user_perms(
+ self.admin_username,
+ admin_permissions,
+ )
+ await db.set_user_token(
+ self.admin_username,
+ hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+ )
+ self.logger.info("Admin user '%s' updated", self.admin_username)
+
+ async def backfill_worker_task(self):
+ async with await create_async_client(
+ self.upstream
+ ) as client, self.db_engine.connect(self.logger) as db:
+ while True:
+ item = await self.backfill_queue.get()
+ if item is None:
self.backfill_queue.task_done()
- finally:
- await client.close()
+ break
- async def join_worker(worker):
- await self.backfill_queue.put(None)
- await worker
+ method, taskhash = item
+ d = await client.get_taskhash(method, taskhash)
+ if d is not None:
+ await db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
+ self.backfill_queue.task_done()
- if self.upstream is not None:
- worker = asyncio.ensure_future(backfill_worker_task())
- try:
- yield
- finally:
- self.loop.run_until_complete(join_worker(worker))
- else:
- yield
+ def start(self):
+ tasks = super().start()
+ if self.upstream:
+ self.backfill_queue = asyncio.Queue()
+ tasks += [self.backfill_worker_task()]
+
+ self.loop.run_until_complete(self.db_engine.create())
- def run_loop_forever(self):
- self.backfill_queue = asyncio.Queue()
+ if self.admin_username:
+ self.loop.run_until_complete(self.create_admin_user())
- with self._backfill_worker():
- super().run_loop_forever()
+ return tasks
+
+ async def stop(self):
+ if self.backfill_queue is not None:
+ await self.backfill_queue.put(None)
+ await super().stop()
diff --git a/bitbake/lib/hashserv/sqlalchemy.py b/bitbake/lib/hashserv/sqlalchemy.py
new file mode 100644
index 0000000000..f7b0226a7a
--- /dev/null
+++ b/bitbake/lib/hashserv/sqlalchemy.py
@@ -0,0 +1,598 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+from datetime import datetime
+from . import User
+
+from sqlalchemy.ext.asyncio import create_async_engine
+from sqlalchemy.pool import NullPool
+from sqlalchemy import (
+ MetaData,
+ Column,
+ Table,
+ Text,
+ Integer,
+ UniqueConstraint,
+ DateTime,
+ Index,
+ select,
+ insert,
+ exists,
+ literal,
+ and_,
+ delete,
+ update,
+ func,
+ inspect,
+)
+import sqlalchemy.engine
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.dialects.postgresql import insert as postgres_insert
+
+Base = declarative_base()
+
+
+class UnihashesV3(Base):
+ __tablename__ = "unihashes_v3"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ unihash = Column(Text, nullable=False)
+ gc_mark = Column(Text, nullable=False)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash"),
+ Index("taskhash_lookup_v4", "method", "taskhash"),
+ Index("unihash_lookup_v1", "unihash"),
+ )
+
+
+class OuthashesV2(Base):
+ __tablename__ = "outhashes_v2"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ outhash = Column(Text, nullable=False)
+ created = Column(DateTime)
+ owner = Column(Text)
+ PN = Column(Text)
+ PV = Column(Text)
+ PR = Column(Text)
+ task = Column(Text)
+ outhash_siginfo = Column(Text)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash", "outhash"),
+ Index("outhash_lookup_v3", "method", "outhash"),
+ )
+
+
+class Users(Base):
+ __tablename__ = "users"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ username = Column(Text, nullable=False)
+ token = Column(Text, nullable=False)
+ permissions = Column(Text)
+
+ __table_args__ = (UniqueConstraint("username"),)
+
+
+class Config(Base):
+ __tablename__ = "config"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ name = Column(Text, nullable=False)
+ value = Column(Text)
+ __table_args__ = (
+ UniqueConstraint("name"),
+ Index("config_lookup", "name"),
+ )
+
+
+#
+# Old table versions
+#
+DeprecatedBase = declarative_base()
+
+
+class UnihashesV2(DeprecatedBase):
+ __tablename__ = "unihashes_v2"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ unihash = Column(Text, nullable=False)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash"),
+ Index("taskhash_lookup_v3", "method", "taskhash"),
+ )
+
+
+class DatabaseEngine(object):
+ def __init__(self, url, username=None, password=None):
+ self.logger = logging.getLogger("hashserv.sqlalchemy")
+ self.url = sqlalchemy.engine.make_url(url)
+
+ if username is not None:
+ self.url = self.url.set(username=username)
+
+ if password is not None:
+ self.url = self.url.set(password=password)
+
+ async def create(self):
+ def check_table_exists(conn, name):
+ return inspect(conn).has_table(name)
+
+ self.logger.info("Using database %s", self.url)
+ if self.url.drivername == 'postgresql+psycopg':
+ # Psygopg 3 (psygopg) driver can handle async connection pooling
+ self.engine = create_async_engine(self.url, max_overflow=-1)
+ else:
+ self.engine = create_async_engine(self.url, poolclass=NullPool)
+
+ async with self.engine.begin() as conn:
+ # Create tables
+ self.logger.info("Creating tables...")
+ await conn.run_sync(Base.metadata.create_all)
+
+ if await conn.run_sync(check_table_exists, UnihashesV2.__tablename__):
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ statement = insert(UnihashesV3).from_select(
+ ["id", "method", "unihash", "taskhash", "gc_mark"],
+ select(
+ UnihashesV2.id,
+ UnihashesV2.method,
+ UnihashesV2.unihash,
+ UnihashesV2.taskhash,
+ literal("").label("gc_mark"),
+ ),
+ )
+ self.logger.debug("%s", statement)
+ await conn.execute(statement)
+
+ await conn.run_sync(Base.metadata.drop_all, [UnihashesV2.__table__])
+ self.logger.info("Upgrade complete")
+
+ def connect(self, logger):
+ return Database(self.engine, logger)
+
+
+def map_row(row):
+ if row is None:
+ return None
+ return dict(**row._mapping)
+
+
+def map_user(row):
+ if row is None:
+ return None
+ return User(
+ username=row.username,
+ permissions=set(row.permissions.split()),
+ )
+
+
+def _make_condition_statement(table, condition):
+ where = {}
+ for c in table.__table__.columns:
+ if c.key in condition and condition[c.key] is not None:
+ where[c] = condition[c.key]
+
+ return [(k == v) for k, v in where.items()]
+
+
+class Database(object):
+ def __init__(self, engine, logger):
+ self.engine = engine
+ self.db = None
+ self.logger = logger
+
+ async def __aenter__(self):
+ self.db = await self.engine.connect()
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
+
+ async def close(self):
+ await self.db.close()
+ self.db = None
+
+ async def _execute(self, statement):
+ self.logger.debug("%s", statement)
+ return await self.db.execute(statement)
+
+ async def _set_config(self, name, value):
+ while True:
+ result = await self._execute(
+ update(Config).where(Config.name == name).values(value=value)
+ )
+
+ if result.rowcount == 0:
+ self.logger.debug("Config '%s' not found. Adding it", name)
+ try:
+ await self._execute(insert(Config).values(name=name, value=value))
+ except IntegrityError:
+ # Race. Try again
+ continue
+
+ break
+
+ def _get_config_subquery(self, name, default=None):
+ if default is not None:
+ return func.coalesce(
+ select(Config.value).where(Config.name == name).scalar_subquery(),
+ default,
+ )
+ return select(Config.value).where(Config.name == name).scalar_subquery()
+
+ async def _get_config(self, name):
+ result = await self._execute(select(Config.value).where(Config.name == name))
+ row = result.first()
+ if row is None:
+ return None
+ return row.value
+
+ async def get_unihash_by_taskhash_full(self, method, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ OuthashesV2,
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.taskhash == taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_unihash_by_outhash(self, method, outhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(OuthashesV2, UnihashesV3.unihash.label("unihash"))
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def unihash_exists(self, unihash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(UnihashesV3).where(UnihashesV3.unihash == unihash).limit(1)
+ )
+
+ return result.first() is not None
+
+ async def get_outhash(self, method, outhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(OuthashesV2)
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ OuthashesV2.taskhash.label("taskhash"),
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ OuthashesV2.taskhash != taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_equivalent(self, method, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ UnihashesV3.unihash,
+ UnihashesV3.method,
+ UnihashesV3.taskhash,
+ ).where(
+ UnihashesV3.method == method,
+ UnihashesV3.taskhash == taskhash,
+ )
+ )
+ return map_row(result.first())
+
+ async def remove(self, condition):
+ async def do_remove(table):
+ where = _make_condition_statement(table, condition)
+ if where:
+ async with self.db.begin():
+ result = await self._execute(delete(table).where(*where))
+ return result.rowcount
+
+ return 0
+
+ count = 0
+ count += await do_remove(UnihashesV3)
+ count += await do_remove(OuthashesV2)
+
+ return count
+
+ async def get_current_gc_mark(self):
+ async with self.db.begin():
+ return await self._get_config("gc-mark")
+
+ async def gc_status(self):
+ async with self.db.begin():
+ gc_mark_subquery = self._get_config_subquery("gc-mark", "")
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark == gc_mark_subquery)
+ )
+ keep_rows = result.scalar()
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark != gc_mark_subquery)
+ )
+ remove_rows = result.scalar()
+
+ return (keep_rows, remove_rows, await self._get_config("gc-mark"))
+
+ async def gc_mark(self, mark, condition):
+ async with self.db.begin():
+ await self._set_config("gc-mark", mark)
+
+ where = _make_condition_statement(UnihashesV3, condition)
+ if not where:
+ return 0
+
+ result = await self._execute(
+ update(UnihashesV3)
+ .values(gc_mark=self._get_config_subquery("gc-mark", ""))
+ .where(*where)
+ )
+ return result.rowcount
+
+ async def gc_sweep(self):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(UnihashesV3).where(
+ # A sneaky conditional that provides some errant use
+ # protection: If the config mark is NULL, this will not
+ # match any rows because No default is specified in the
+ # select statement
+ UnihashesV3.gc_mark
+ != self._get_config_subquery("gc-mark")
+ )
+ )
+ await self._set_config("gc-mark", None)
+
+ return result.rowcount
+
+ async def clean_unused(self, oldest):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(OuthashesV2).where(
+ OuthashesV2.created < oldest,
+ ~(
+ select(UnihashesV3.id)
+ .where(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ )
+ .limit(1)
+ .exists()
+ ),
+ )
+ )
+ return result.rowcount
+
+ async def insert_unihash(self, method, taskhash, unihash):
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(UnihashesV3)
+ .values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+ .on_conflict_do_nothing(index_elements=("method", "taskhash"))
+ )
+ else:
+ statement = insert(UnihashesV3).values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+
+ try:
+ async with self.db.begin():
+ result = await self._execute(statement)
+ return result.rowcount != 0
+ except IntegrityError:
+ self.logger.debug(
+ "%s, %s, %s already in unihash database", method, taskhash, unihash
+ )
+ return False
+
+ async def insert_outhash(self, data):
+ outhash_columns = set(c.key for c in OuthashesV2.__table__.columns)
+
+ data = {k: v for k, v in data.items() if k in outhash_columns}
+
+ if "created" in data and not isinstance(data["created"], datetime):
+ data["created"] = datetime.fromisoformat(data["created"])
+
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(OuthashesV2)
+ .values(**data)
+ .on_conflict_do_nothing(
+ index_elements=("method", "taskhash", "outhash")
+ )
+ )
+ else:
+ statement = insert(OuthashesV2).values(**data)
+
+ try:
+ async with self.db.begin():
+ result = await self._execute(statement)
+ return result.rowcount != 0
+ except IntegrityError:
+ self.logger.debug(
+ "%s, %s already in outhash database", data["method"], data["outhash"]
+ )
+ return False
+
+ async def _get_user(self, username):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ Users.token,
+ ).where(
+ Users.username == username,
+ )
+ )
+ return result.first()
+
+ async def lookup_user_token(self, username):
+ row = await self._get_user(username)
+ if not row:
+ return None, None
+ return map_user(row), row.token
+
+ async def lookup_user(self, username):
+ return map_user(await self._get_user(username))
+
+ async def set_user_token(self, username, token):
+ async with self.db.begin():
+ result = await self._execute(
+ update(Users)
+ .where(
+ Users.username == username,
+ )
+ .values(
+ token=token,
+ )
+ )
+ return result.rowcount != 0
+
+ async def set_user_perms(self, username, permissions):
+ async with self.db.begin():
+ result = await self._execute(
+ update(Users)
+ .where(Users.username == username)
+ .values(permissions=" ".join(permissions))
+ )
+ return result.rowcount != 0
+
+ async def get_all_users(self):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ )
+ )
+ return [map_user(row) for row in result]
+
+ async def new_user(self, username, permissions, token):
+ try:
+ async with self.db.begin():
+ await self._execute(
+ insert(Users).values(
+ username=username,
+ permissions=" ".join(permissions),
+ token=token,
+ )
+ )
+ return True
+ except IntegrityError as e:
+ self.logger.debug("Cannot create new user %s: %s", username, e)
+ return False
+
+ async def delete_user(self, username):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(Users).where(Users.username == username)
+ )
+ return result.rowcount != 0
+
+ async def get_usage(self):
+ usage = {}
+ async with self.db.begin() as session:
+ for name, table in Base.metadata.tables.items():
+ result = await self._execute(
+ statement=select(func.count()).select_from(table)
+ )
+ usage[name] = {
+ "rows": result.scalar(),
+ }
+
+ return usage
+
+ async def get_query_columns(self):
+ columns = set()
+ for table in (UnihashesV3, OuthashesV2):
+ for c in table.__table__.columns:
+ if not isinstance(c.type, Text):
+ continue
+ columns.add(c.key)
+
+ return list(columns)
diff --git a/bitbake/lib/hashserv/sqlite.py b/bitbake/lib/hashserv/sqlite.py
new file mode 100644
index 0000000000..da2e844a03
--- /dev/null
+++ b/bitbake/lib/hashserv/sqlite.py
@@ -0,0 +1,562 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+import sqlite3
+import logging
+from contextlib import closing
+from . import User
+
+logger = logging.getLogger("hashserv.sqlite")
+
+UNIHASH_TABLE_DEFINITION = (
+ ("method", "TEXT NOT NULL", "UNIQUE"),
+ ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+ ("unihash", "TEXT NOT NULL", ""),
+ ("gc_mark", "TEXT NOT NULL", ""),
+)
+
+UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
+
+OUTHASH_TABLE_DEFINITION = (
+ ("method", "TEXT NOT NULL", "UNIQUE"),
+ ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+ ("outhash", "TEXT NOT NULL", "UNIQUE"),
+ ("created", "DATETIME", ""),
+ # Optional fields
+ ("owner", "TEXT", ""),
+ ("PN", "TEXT", ""),
+ ("PV", "TEXT", ""),
+ ("PR", "TEXT", ""),
+ ("task", "TEXT", ""),
+ ("outhash_siginfo", "TEXT", ""),
+)
+
+OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
+
+USERS_TABLE_DEFINITION = (
+ ("username", "TEXT NOT NULL", "UNIQUE"),
+ ("token", "TEXT NOT NULL", ""),
+ ("permissions", "TEXT NOT NULL", ""),
+)
+
+USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION)
+
+
+CONFIG_TABLE_DEFINITION = (
+ ("name", "TEXT NOT NULL", "UNIQUE"),
+ ("value", "TEXT", ""),
+)
+
+CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION)
+
+
+def _make_table(cursor, name, definition):
+ cursor.execute(
+ """
+ CREATE TABLE IF NOT EXISTS {name} (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ {fields}
+ UNIQUE({unique})
+ )
+ """.format(
+ name=name,
+ fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
+ unique=", ".join(
+ name for name, _, flags in definition if "UNIQUE" in flags
+ ),
+ )
+ )
+
+
+def map_user(row):
+ if row is None:
+ return None
+ return User(
+ username=row["username"],
+ permissions=set(row["permissions"].split()),
+ )
+
+
+def _make_condition_statement(columns, condition):
+ where = {}
+ for c in columns:
+ if c in condition and condition[c] is not None:
+ where[c] = condition[c]
+
+ return where, " AND ".join("%s=:%s" % (k, k) for k in where.keys())
+
+
+def _get_sqlite_version(cursor):
+ cursor.execute("SELECT sqlite_version()")
+
+ version = []
+ for v in cursor.fetchone()[0].split("."):
+ try:
+ version.append(int(v))
+ except ValueError:
+ version.append(v)
+
+ return tuple(version)
+
+
+def _schema_table_name(version):
+ if version >= (3, 33):
+ return "sqlite_schema"
+
+ return "sqlite_master"
+
+
+class DatabaseEngine(object):
+ def __init__(self, dbname, sync):
+ self.dbname = dbname
+ self.logger = logger
+ self.sync = sync
+
+ async def create(self):
+ db = sqlite3.connect(self.dbname)
+ db.row_factory = sqlite3.Row
+
+ with closing(db.cursor()) as cursor:
+ _make_table(cursor, "unihashes_v3", UNIHASH_TABLE_DEFINITION)
+ _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
+ _make_table(cursor, "users", USERS_TABLE_DEFINITION)
+ _make_table(cursor, "config", CONFIG_TABLE_DEFINITION)
+
+ cursor.execute("PRAGMA journal_mode = WAL")
+ cursor.execute(
+ "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF")
+ )
+
+ # Drop old indexes
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup")
+ cursor.execute("DROP INDEX IF EXISTS outhash_lookup")
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2")
+ cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2")
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v3")
+
+ # TODO: Upgrade from tasks_v2?
+ cursor.execute("DROP TABLE IF EXISTS tasks_v2")
+
+ # Create new indexes
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS taskhash_lookup_v4 ON unihashes_v3 (method, taskhash)"
+ )
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS unihash_lookup_v1 ON unihashes_v3 (unihash)"
+ )
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)"
+ )
+ cursor.execute("CREATE INDEX IF NOT EXISTS config_lookup ON config (name)")
+
+ sqlite_version = _get_sqlite_version(cursor)
+
+ cursor.execute(
+ f"""
+ SELECT name FROM {_schema_table_name(sqlite_version)} WHERE type = 'table' AND name = 'unihashes_v2'
+ """
+ )
+ if cursor.fetchone():
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ cursor.execute(
+ """
+ INSERT INTO unihashes_v3 (id, method, unihash, taskhash, gc_mark)
+ SELECT id, method, unihash, taskhash, '' FROM unihashes_v2
+ """
+ )
+ cursor.execute("DROP TABLE unihashes_v2")
+ db.commit()
+ self.logger.info("Upgrade complete")
+
+ def connect(self, logger):
+ return Database(logger, self.dbname, self.sync)
+
+
+class Database(object):
+ def __init__(self, logger, dbname, sync):
+ self.dbname = dbname
+ self.logger = logger
+
+ self.db = sqlite3.connect(self.dbname)
+ self.db.row_factory = sqlite3.Row
+
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute("PRAGMA journal_mode = WAL")
+ cursor.execute(
+ "PRAGMA synchronous = %s" % ("NORMAL" if sync else "OFF")
+ )
+
+ self.sqlite_version = _get_sqlite_version(cursor)
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
+
+ async def _set_config(self, cursor, name, value):
+ cursor.execute(
+ """
+ INSERT OR REPLACE INTO config (id, name, value) VALUES
+ ((SELECT id FROM config WHERE name=:name), :name, :value)
+ """,
+ {
+ "name": name,
+ "value": value,
+ },
+ )
+
+ async def _get_config(self, cursor, name):
+ cursor.execute(
+ "SELECT value FROM config WHERE name=:name",
+ {
+ "name": name,
+ },
+ )
+ row = cursor.fetchone()
+ if row is None:
+ return None
+ return row["value"]
+
+ async def close(self):
+ self.db.close()
+
+ async def get_unihash_by_taskhash_full(self, method, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_unihash_by_outhash(self, method, outhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def unihash_exists(self, unihash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT * FROM unihashes_v3 WHERE unihash=:unihash
+ LIMIT 1
+ """,
+ {
+ "unihash": unihash,
+ },
+ )
+ return cursor.fetchone() is not None
+
+ async def get_outhash(self, method, outhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT * FROM outhashes_v2
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT outhashes_v2.taskhash AS taskhash, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ -- Select any matching output hash except the one we just inserted
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
+ -- Pick the oldest hash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_equivalent(self, method, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ "SELECT taskhash, method, unihash FROM unihashes_v3 WHERE method=:method AND taskhash=:taskhash",
+ {
+ "method": method,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def remove(self, condition):
+ def do_remove(columns, table_name, cursor):
+ where, clause = _make_condition_statement(columns, condition)
+ if where:
+ query = f"DELETE FROM {table_name} WHERE {clause}"
+ cursor.execute(query, where)
+ return cursor.rowcount
+
+ return 0
+
+ count = 0
+ with closing(self.db.cursor()) as cursor:
+ count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
+ count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v3", cursor)
+ self.db.commit()
+
+ return count
+
+ async def get_current_gc_mark(self):
+ with closing(self.db.cursor()) as cursor:
+ return await self._get_config(cursor, "gc-mark")
+
+ async def gc_status(self):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ keep_rows = cursor.fetchone()[0]
+
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark!=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ remove_rows = cursor.fetchone()[0]
+
+ current_mark = await self._get_config(cursor, "gc-mark")
+
+ return (keep_rows, remove_rows, current_mark)
+
+ async def gc_mark(self, mark, condition):
+ with closing(self.db.cursor()) as cursor:
+ await self._set_config(cursor, "gc-mark", mark)
+
+ where, clause = _make_condition_statement(UNIHASH_TABLE_COLUMNS, condition)
+
+ new_rows = 0
+ if where:
+ cursor.execute(
+ f"""
+ UPDATE unihashes_v3 SET
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ WHERE {clause}
+ """,
+ where,
+ )
+ new_rows = cursor.rowcount
+
+ self.db.commit()
+ return new_rows
+
+ async def gc_sweep(self):
+ with closing(self.db.cursor()) as cursor:
+ # NOTE: COALESCE is not used in this query so that if the current
+ # mark is NULL, nothing will happen
+ cursor.execute(
+ """
+ DELETE FROM unihashes_v3 WHERE
+ gc_mark!=(SELECT value FROM config WHERE name='gc-mark')
+ """
+ )
+ count = cursor.rowcount
+ await self._set_config(cursor, "gc-mark", None)
+
+ self.db.commit()
+ return count
+
+ async def clean_unused(self, oldest):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
+ SELECT unihashes_v3.id FROM unihashes_v3 WHERE unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash LIMIT 1
+ )
+ """,
+ {
+ "oldest": oldest,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount
+
+ async def insert_unihash(self, method, taskhash, unihash):
+ with closing(self.db.cursor()) as cursor:
+ prevrowid = cursor.lastrowid
+ cursor.execute(
+ """
+ INSERT OR IGNORE INTO unihashes_v3 (method, taskhash, unihash, gc_mark) VALUES
+ (
+ :method,
+ :taskhash,
+ :unihash,
+ COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ )
+ """,
+ {
+ "method": method,
+ "taskhash": taskhash,
+ "unihash": unihash,
+ },
+ )
+ self.db.commit()
+ return cursor.lastrowid != prevrowid
+
+ async def insert_outhash(self, data):
+ data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS}
+ keys = sorted(data.keys())
+ query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format(
+ fields=", ".join(keys),
+ values=", ".join(":" + k for k in keys),
+ )
+ with closing(self.db.cursor()) as cursor:
+ prevrowid = cursor.lastrowid
+ cursor.execute(query, data)
+ self.db.commit()
+ return cursor.lastrowid != prevrowid
+
+ def _get_user(self, username):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT username, permissions, token FROM users WHERE username=:username
+ """,
+ {
+ "username": username,
+ },
+ )
+ return cursor.fetchone()
+
+ async def lookup_user_token(self, username):
+ row = self._get_user(username)
+ if row is None:
+ return None, None
+ return map_user(row), row["token"]
+
+ async def lookup_user(self, username):
+ return map_user(self._get_user(username))
+
+ async def set_user_token(self, username, token):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ UPDATE users SET token=:token WHERE username=:username
+ """,
+ {
+ "username": username,
+ "token": token,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def set_user_perms(self, username, permissions):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ UPDATE users SET permissions=:permissions WHERE username=:username
+ """,
+ {
+ "username": username,
+ "permissions": " ".join(permissions),
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def get_all_users(self):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute("SELECT username, permissions FROM users")
+ return [map_user(r) for r in cursor.fetchall()]
+
+ async def new_user(self, username, permissions, token):
+ with closing(self.db.cursor()) as cursor:
+ try:
+ cursor.execute(
+ """
+ INSERT INTO users (username, token, permissions) VALUES (:username, :token, :permissions)
+ """,
+ {
+ "username": username,
+ "token": token,
+ "permissions": " ".join(permissions),
+ },
+ )
+ self.db.commit()
+ return True
+ except sqlite3.IntegrityError:
+ return False
+
+ async def delete_user(self, username):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ DELETE FROM users WHERE username=:username
+ """,
+ {
+ "username": username,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def get_usage(self):
+ usage = {}
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ f"""
+ SELECT name FROM {_schema_table_name(self.sqlite_version)} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
+ """
+ )
+ for row in cursor.fetchall():
+ cursor.execute(
+ """
+ SELECT COUNT() FROM %s
+ """
+ % row["name"],
+ )
+ usage[row["name"]] = {
+ "rows": cursor.fetchone()[0],
+ }
+ return usage
+
+ async def get_query_columns(self):
+ columns = set()
+ for name, typ, _ in UNIHASH_TABLE_DEFINITION + OUTHASH_TABLE_DEFINITION:
+ if typ.startswith("TEXT"):
+ columns.add(name)
+ return list(columns)
diff --git a/bitbake/lib/hashserv/tests.py b/bitbake/lib/hashserv/tests.py
index f6b85aed85..0809453cf8 100644
--- a/bitbake/lib/hashserv/tests.py
+++ b/bitbake/lib/hashserv/tests.py
@@ -6,6 +6,9 @@
#
from . import create_server, create_client
+from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS
+from bb.asyncrpc import InvokeError
+from .client import ClientPool
import hashlib
import logging
import multiprocessing
@@ -17,6 +20,14 @@ import unittest
import socket
import time
import signal
+import subprocess
+import json
+import re
+from pathlib import Path
+
+
+THIS_DIR = Path(__file__).parent
+BIN_DIR = THIS_DIR.parent.parent / "bin"
def server_prefunc(server, idx):
logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w',
@@ -29,11 +40,12 @@ class HashEquivalenceTestSetup(object):
METHOD = 'TestMethod'
server_index = 0
+ client_index = 0
- def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc):
+ def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc, anon_perms=DEFAULT_ANON_PERMS, admin_username=None, admin_password=None):
self.server_index += 1
if dbpath is None:
- dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+ dbpath = self.make_dbpath()
def cleanup_server(server):
if server.process.exitcode is not None:
@@ -45,19 +57,41 @@ class HashEquivalenceTestSetup(object):
server = create_server(self.get_server_addr(self.server_index),
dbpath,
upstream=upstream,
- read_only=read_only)
+ read_only=read_only,
+ anon_perms=anon_perms,
+ admin_username=admin_username,
+ admin_password=admin_password)
server.dbpath = dbpath
server.serve_as_process(prefunc=prefunc, args=(self.server_index,))
self.addCleanup(cleanup_server, server)
+ return server
+
+ def make_dbpath(self):
+ return os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+ def start_client(self, server_address, username=None, password=None):
def cleanup_client(client):
client.close()
- client = create_client(server.address)
+ client = create_client(server_address, username=username, password=password)
self.addCleanup(cleanup_client, client)
- return (client, server)
+ return client
+
+ def start_test_server(self):
+ self.server = self.start_server()
+ return self.server.address
+
+ def start_auth_server(self):
+ auth_server = self.start_server(self.server.dbpath, anon_perms=[], admin_username="admin", admin_password="password")
+ self.auth_server_address = auth_server.address
+ self.admin_client = self.start_client(auth_server.address, username="admin", password="password")
+ return self.admin_client
+
+ def auth_client(self, user):
+ return self.start_client(self.auth_server_address, user["username"], user["token"])
def setUp(self):
if sys.version_info < (3, 5, 0):
@@ -66,24 +100,82 @@ class HashEquivalenceTestSetup(object):
self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv')
self.addCleanup(self.temp_dir.cleanup)
- (self.client, self.server) = self.start_server()
+ self.server_address = self.start_test_server()
+
+ self.client = self.start_client(self.server_address)
def assertClientGetHash(self, client, taskhash, unihash):
result = client.get_unihash(self.METHOD, taskhash)
self.assertEqual(result, unihash)
+ def assertUserPerms(self, user, permissions):
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, {
+ "username": user["username"],
+ "permissions": permissions,
+ })
-class HashEquivalenceCommonTests(object):
- def test_create_hash(self):
+ def assertUserCanAuth(self, user):
+ with self.start_client(self.auth_server_address) as client:
+ client.auth(user["username"], user["token"])
+
+ def assertUserCannotAuth(self, user):
+ with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+ client.auth(user["username"], user["token"])
+
+ def create_test_hash(self, client):
# Simple test that hashes can be created
taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
- self.assertClientGetHash(self.client, taskhash, None)
+ self.assertClientGetHash(client, taskhash, None)
- result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result = client.report_unihash(taskhash, self.METHOD, outhash, unihash)
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+ return taskhash, outhash, unihash
+
+ def run_hashclient(self, args, **kwargs):
+ try:
+ p = subprocess.run(
+ [BIN_DIR / "bitbake-hashclient"] + args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ **kwargs
+ )
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise e
+
+ print(p.stdout)
+ return p
+
+
+class HashEquivalenceCommonTests(object):
+ def auth_perms(self, *permissions):
+ self.client_index += 1
+ user = self.create_user(f"user-{self.client_index}", permissions)
+ return self.auth_client(user)
+
+ def create_user(self, username, permissions, *, client=None):
+ def remove_user(username):
+ try:
+ self.admin_client.delete_user(username)
+ except bb.asyncrpc.InvokeError:
+ pass
+
+ if client is None:
+ client = self.admin_client
+
+ user = client.new_user(username, permissions)
+ self.addCleanup(remove_user, username)
+
+ return user
+
+ def test_create_hash(self):
+ return self.create_test_hash(self.client)
def test_create_equivalent(self):
# Tests that a second reported task with the same outhash will be
@@ -125,6 +217,57 @@ class HashEquivalenceCommonTests(object):
self.assertClientGetHash(self.client, taskhash, unihash)
+ def test_remove_taskhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"taskhash": taskhash})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_unihash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"unihash": unihash})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_remove_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"outhash": outhash})
+ self.assertGreater(result["count"], 0)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_method(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"method": self.METHOD})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_clean_unused(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Clean the database, which should not remove anything because all hashes an in-use
+ result = self.client.clean_unused(0)
+ self.assertEqual(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Remove the unihash. The row in the outhash table should still be present
+ self.client.remove({"unihash": unihash})
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNotNone(result_outhash)
+
+ # Now clean with no minimum age which will remove the outhash
+ result = self.client.clean_unused(0)
+ self.assertEqual(result["count"], 1)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNone(result_outhash)
+
def test_huge_message(self):
# Simple test that hashes can be created
taskhash = 'c665584ee6817aa99edfc77a44dd853828279370'
@@ -154,7 +297,7 @@ class HashEquivalenceCommonTests(object):
def test_stress(self):
def query_server(failures):
- client = Client(self.server.address)
+ client = Client(self.server_address)
try:
for i in range(1000):
taskhash = hashlib.sha256()
@@ -193,8 +336,10 @@ class HashEquivalenceCommonTests(object):
# the side client. It also verifies that the results are pulled into
# the downstream database by checking that the downstream and side servers
# match after the downstream is done waiting for all backfill tasks
- (down_client, down_server) = self.start_server(upstream=self.server.address)
- (side_client, side_server) = self.start_server(dbpath=down_server.dbpath)
+ down_server = self.start_server(upstream=self.server_address)
+ down_client = self.start_client(down_server.address)
+ side_server = self.start_server(dbpath=down_server.dbpath)
+ side_client = self.start_client(side_server.address)
def check_hash(taskhash, unihash, old_sidehash):
nonlocal down_client
@@ -298,15 +443,24 @@ class HashEquivalenceCommonTests(object):
self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream')
self.assertEqual(result['method'], self.METHOD)
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.assertTrue(self.client.unihash_exists(unihash))
+ self.assertFalse(self.client.unihash_exists('6662e699d6e3d894b24408ff9a4031ef9b038ee8'))
+
def test_ro_server(self):
- (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True)
+ rw_server = self.start_server()
+ rw_client = self.start_client(rw_server.address)
+
+ ro_server = self.start_server(dbpath=rw_server.dbpath, read_only=True)
+ ro_client = self.start_client(ro_server.address)
# Report a hash via the read-write server
taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
- result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result = rw_client.report_unihash(taskhash, self.METHOD, outhash, unihash)
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
# Check the hash via the read-only server
@@ -317,11 +471,11 @@ class HashEquivalenceCommonTests(object):
outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
- with self.assertRaises(ConnectionError):
- ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ result = ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertEqual(result['unihash'], unihash2)
# Ensure that the database was not modified
- self.assertClientGetHash(self.client, taskhash2, None)
+ self.assertClientGetHash(rw_client, taskhash2, None)
def test_slow_server_start(self):
@@ -341,7 +495,7 @@ class HashEquivalenceCommonTests(object):
old_signal = signal.signal(signal.SIGTERM, do_nothing)
self.addCleanup(signal.signal, signal.SIGTERM, old_signal)
- _, server = self.start_server(prefunc=prefunc)
+ server = self.start_server(prefunc=prefunc)
server.process.terminate()
time.sleep(30)
event.set()
@@ -401,6 +555,858 @@ class HashEquivalenceCommonTests(object):
# shares a taskhash with Task 2
self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ def test_client_pool_get_unihashes(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+
+ query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)}
+ for idx, taskhash in enumerate(EXTRA_QUERIES):
+ query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash)
+
+ result = client_pool.get_unihashes(query)
+
+ self.assertDictEqual(result, {
+ 0: "218e57509998197d570e2c98512d0105985dffc9",
+ 1: "218e57509998197d570e2c98512d0105985dffc9",
+ 2: "218e57509998197d570e2c98512d0105985dffc9",
+ 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d",
+ 4: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 5: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 6: "05d2a63c81e32f0a36542ca677e8ad852365c538",
+ 7: None,
+ })
+
+ def test_client_pool_unihash_exists(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ result_unihashes = set()
+
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result_unihashes.add(result["unihash"])
+
+ query = {}
+ expected = {}
+
+ for _, _, unihash in TEST_INPUT:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = unihash in result_unihashes
+
+
+ for unihash in EXTRA_QUERIES:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = False
+
+ result = client_pool.unihashes_exist(query)
+ self.assertDictEqual(result, expected)
+
+
+ def test_auth_read_perms(self):
+ admin_client = self.start_auth_server()
+
+ # Create hashes with non-authenticated server
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Validate hash can be retrieved using authenticated client
+ with self.auth_perms("@read") as client:
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ def test_auth_report_perms(self):
+ admin_client = self.start_auth_server()
+
+ # Without read permission, the user is completely denied
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.create_test_hash(client)
+
+ # Read permission allows the call to succeed, but it doesn't record
+ # anythin in the database
+ with self.auth_perms("@read") as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ self.assertClientGetHash(client, taskhash, None)
+
+ # Report permission alone is insufficient
+ with self.auth_perms("@report") as client, self.assertRaises(InvokeError):
+ self.create_test_hash(client)
+
+ # Read and report permission actually modify the database
+ with self.auth_perms("@read", "@report") as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ def test_auth_no_token_refresh_from_anon_user(self):
+ self.start_auth_server()
+
+ with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+ client.refresh_token()
+
+ def test_auth_self_token_refresh(self):
+ admin_client = self.start_auth_server()
+
+ # Create a new user with no permissions
+ user = self.create_user("test-user", [])
+
+ with self.auth_client(user) as client:
+ new_user = client.refresh_token()
+
+ self.assertEqual(user["username"], new_user["username"])
+ self.assertNotEqual(user["token"], new_user["token"])
+ self.assertUserCanAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ # Explicitly specifying with your own username is fine also
+ with self.auth_client(new_user) as client:
+ new_user2 = client.refresh_token(user["username"])
+
+ self.assertEqual(user["username"], new_user2["username"])
+ self.assertNotEqual(user["token"], new_user2["token"])
+ self.assertUserCanAuth(new_user2)
+ self.assertUserCannotAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ def test_auth_token_refresh(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.refresh_token(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ new_user = client.refresh_token(user["username"])
+
+ self.assertEqual(user["username"], new_user["username"])
+ self.assertNotEqual(user["token"], new_user["token"])
+ self.assertUserCanAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ def test_auth_self_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # Explicitly asking for your own username is fine also
+ info = client.get_user(user["username"])
+ self.assertEqual(info, user_info)
+
+ def test_auth_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.get_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ info = client.get_user(user["username"])
+ self.assertEqual(info, user_info)
+
+ info = client.get_user("nonexist-user")
+ self.assertIsNone(info)
+
+ def test_auth_reconnect(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ client.disconnect()
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ def test_auth_delete_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ # self service
+ with self.auth_client(user) as client:
+ client.delete_user(user["username"])
+
+ self.assertIsNone(admin_client.get_user(user["username"]))
+ user = self.create_user("test-user", [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.delete_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ client.delete_user(user["username"])
+
+ # User doesn't exist, so even though the permission is correct, it's an
+ # error
+ with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+ client.delete_user(user["username"])
+
+ def test_auth_set_user_perms(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ self.assertUserPerms(user, [])
+
+ # No self service to change permissions
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, [])
+
+ with self.auth_perms("@user-admin") as client:
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+ # Bad permissions
+ with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@this-is-not-a-permission"])
+ self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+ def test_auth_get_all_users(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.get_all_users()
+
+ # Give the test user the correct permission
+ admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+ with self.auth_client(user) as client:
+ all_users = client.get_all_users()
+
+ # Convert to a dictionary for easier comparison
+ all_users = {u["username"]: u for u in all_users}
+
+ self.assertEqual(all_users,
+ {
+ "admin": {
+ "username": "admin",
+ "permissions": sorted(list(ALL_PERMISSIONS)),
+ },
+ "test-user": {
+ "username": "test-user",
+ "permissions": ["@user-admin"],
+ }
+ }
+ )
+
+ def test_auth_new_user(self):
+ self.start_auth_server()
+
+ permissions = ["@read", "@report", "@db-admin", "@user-admin"]
+ permissions.sort()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.create_user("test-user", permissions, client=client)
+
+ with self.auth_perms("@user-admin") as client:
+ user = self.create_user("test-user", permissions, client=client)
+ self.assertIn("token", user)
+ self.assertEqual(user["username"], "test-user")
+ self.assertEqual(user["permissions"], permissions)
+
+ def test_auth_become_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", ["@read", "@report"])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ become = client.become_user(user["username"])
+ self.assertEqual(become, user_info)
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # Verify become user is preserved across disconnect
+ client.disconnect()
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # test-user doesn't have become_user permissions, so this should
+ # not work
+ with self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ # No self-service of become
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ # Give test user permissions to become
+ admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+ # It's possible to become yourself (effectively a noop)
+ with self.auth_perms("@user-admin") as client:
+ become = client.become_user(client.username)
+
+ def test_auth_gc(self):
+ admin_client = self.start_auth_server()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_status()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_sweep("ABC")
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_status()
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_sweep("ABC")
+
+ def test_get_db_usage(self):
+ usage = self.client.get_db_usage()
+
+ self.assertTrue(isinstance(usage, dict))
+ for name in usage.keys():
+ self.assertTrue(isinstance(usage[name], dict))
+ self.assertIn("rows", usage[name])
+ self.assertTrue(isinstance(usage[name]["rows"], int))
+
+ def test_get_db_query_columns(self):
+ columns = self.client.get_db_query_columns()
+
+ self.assertTrue(isinstance(columns, list))
+ self.assertTrue(len(columns) > 0)
+
+ for col in columns:
+ self.client.remove({col: ""})
+
+ def test_auth_is_owner(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", ["@read", "@report"])
+ with self.auth_client(user) as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ data = client.get_taskhash(self.METHOD, taskhash, True)
+ self.assertEqual(data["owner"], user["username"])
+
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 1})
+
+ # Hash is gone. Taskhash is returned for second hash
+ self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_switch_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Switch to a different mark and mark the second hash. This will start
+ # a new collection cycle
+ ret = self.client.gc_mark("DEF", {"unihash": unihash2, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "DEF", "keep": 1, "remove": 1})
+
+ # Both hashes are still present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Sweep with the new mark
+ ret = self.client.gc_sweep("DEF")
+ self.assertEqual(ret, {"count": 1})
+
+ # First hash is gone, second is kept
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_gc_switch_sweep_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Sweeping with a different mark raises an error
+ with self.assertRaises(InvokeError):
+ self.client.gc_sweep("DEF")
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_new_hashes(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ # Start a new garbage collection
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 0})
+
+ # Add second hash. It should inherit the mark from the current garbage
+ # collection operation
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Sweep should remove nothing
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 0})
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+
+class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
+ def get_server_addr(self, server_idx):
+ return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
+
+ def test_get(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient(["--address", self.server_address, "get", self.METHOD, taskhash])
+ data = json.loads(p.stdout)
+ self.assertEqual(data["unihash"], unihash)
+ self.assertEqual(data["outhash"], outhash)
+ self.assertEqual(data["taskhash"], taskhash)
+ self.assertEqual(data["method"], self.METHOD)
+
+ def test_get_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient(["--address", self.server_address, "get-outhash", self.METHOD, outhash, taskhash])
+ data = json.loads(p.stdout)
+ self.assertEqual(data["unihash"], unihash)
+ self.assertEqual(data["outhash"], outhash)
+ self.assertEqual(data["taskhash"], taskhash)
+ self.assertEqual(data["method"], self.METHOD)
+
+ def test_stats(self):
+ p = self.run_hashclient(["--address", self.server_address, "stats"], check=True)
+ json.loads(p.stdout)
+
+ def test_stress(self):
+ self.run_hashclient(["--address", self.server_address, "stress"], check=True)
+
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "true")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "false")
+
+ def test_unihash_exsits_quiet(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 0)
+ self.assertEqual(p.stdout.strip(), "")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 1)
+ self.assertEqual(p.stdout.strip(), "")
+
+ def test_remove_taskhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "taskhash", taskhash,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_unihash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "unihash", unihash,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_remove_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "outhash", outhash,
+ ], check=True)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_method(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "method", self.METHOD,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_clean_unused(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Clean the database, which should not remove anything because all hashes an in-use
+ self.run_hashclient([
+ "--address", self.server_address,
+ "clean-unused", "0",
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Remove the unihash. The row in the outhash table should still be present
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "unihash", unihash,
+ ], check=True)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNotNone(result_outhash)
+
+ # Now clean with no minimum age which will remove the outhash
+ self.run_hashclient([
+ "--address", self.server_address,
+ "clean-unused", "0",
+ ], check=True)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNone(result_outhash)
+
+ def test_refresh_token(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read", "@report"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", user["token"],
+ "refresh-token"
+ ], check=True)
+
+ new_token = None
+ for l in p.stdout.splitlines():
+ l = l.rstrip()
+ m = re.match(r'Token: +(.*)$', l)
+ if m is not None:
+ new_token = m.group(1)
+
+ self.assertTrue(new_token)
+
+ print("New token is %r" % new_token)
+
+ self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", new_token,
+ "get-user"
+ ], check=True)
+
+ def test_set_user_perms(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "set-user-perms",
+ "-u", user["username"],
+ "@read", "@report",
+ ], check=True)
+
+ new_user = admin_client.get_user(user["username"])
+
+ self.assertEqual(set(new_user["permissions"]), {"@read", "@report"})
+
+ def test_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "get-user",
+ "-u", user["username"],
+ ], check=True)
+
+ self.assertIn("Username:", p.stdout)
+ self.assertIn("Permissions:", p.stdout)
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", user["token"],
+ "get-user",
+ ], check=True)
+
+ self.assertIn("Username:", p.stdout)
+ self.assertIn("Permissions:", p.stdout)
+
+ def test_get_all_users(self):
+ admin_client = self.start_auth_server()
+
+ admin_client.new_user("test-user1", ["@read"])
+ admin_client.new_user("test-user2", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "get-all-users",
+ ], check=True)
+
+ self.assertIn("admin", p.stdout)
+ self.assertIn("test-user1", p.stdout)
+ self.assertIn("test-user2", p.stdout)
+
+ def test_new_user(self):
+ admin_client = self.start_auth_server()
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "new-user",
+ "-u", "test-user",
+ "@read", "@report",
+ ], check=True)
+
+ new_token = None
+ for l in p.stdout.splitlines():
+ l = l.rstrip()
+ m = re.match(r'Token: +(.*)$', l)
+ if m is not None:
+ new_token = m.group(1)
+
+ self.assertTrue(new_token)
+
+ user = {
+ "username": "test-user",
+ "token": new_token,
+ }
+
+ self.assertUserPerms(user, ["@read", "@report"])
+
+ def test_delete_user(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "delete-user",
+ "-u", user["username"],
+ ], check=True)
+
+ self.assertIsNone(admin_client.get_user(user["username"]))
+
+ def test_get_db_usage(self):
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "get-db-usage",
+ ], check=True)
+
+ def test_get_db_query_columns(self):
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "get-db-query-columns",
+ ], check=True)
+
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-mark", "ABC",
+ "--where", "unihash", unihash,
+ "--where", "method", self.METHOD
+ ], check=True)
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-sweep", "ABC",
+ ], check=True)
+
+ # Hash is gone. Taskhash is returned for second hash
+ self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+
class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
def get_server_addr(self, server_idx):
return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
@@ -431,3 +1437,77 @@ class TestHashEquivalenceTCPServer(HashEquivalenceTestSetup, HashEquivalenceComm
# If IPv6 is enabled, it should be safe to use localhost directly, in general
# case it is more reliable to resolve the IP address explicitly.
return socket.gethostbyname("localhost") + ":0"
+
+
+class TestHashEquivalenceWebsocketServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+ def setUp(self):
+ try:
+ import websockets
+ except ImportError as e:
+ self.skipTest(str(e))
+
+ super().setUp()
+
+ def get_server_addr(self, server_idx):
+ # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
+ # If IPv6 is enabled, it should be safe to use localhost directly, in general
+ # case it is more reliable to resolve the IP address explicitly.
+ host = socket.gethostbyname("localhost")
+ return "ws://%s:0" % host
+
+
+class TestHashEquivalenceWebsocketsSQLAlchemyServer(TestHashEquivalenceWebsocketServer):
+ def setUp(self):
+ try:
+ import sqlalchemy
+ import aiosqlite
+ except ImportError as e:
+ self.skipTest(str(e))
+
+ super().setUp()
+
+ def make_dbpath(self):
+ return "sqlite+aiosqlite:///%s" % os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+
+class TestHashEquivalenceExternalServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+ def get_env(self, name):
+ v = os.environ.get(name)
+ if not v:
+ self.skipTest(f'{name} not defined to test an external server')
+ return v
+
+ def start_test_server(self):
+ return self.get_env('BB_TEST_HASHSERV')
+
+ def start_server(self, *args, **kwargs):
+ self.skipTest('Cannot start local server when testing external servers')
+
+ def start_auth_server(self):
+
+ self.auth_server_address = self.server_address
+ self.admin_client = self.start_client(
+ self.server_address,
+ username=self.get_env('BB_TEST_HASHSERV_USERNAME'),
+ password=self.get_env('BB_TEST_HASHSERV_PASSWORD'),
+ )
+ return self.admin_client
+
+ def setUp(self):
+ super().setUp()
+ if "BB_TEST_HASHSERV_USERNAME" in os.environ:
+ self.client = self.start_client(
+ self.server_address,
+ username=os.environ["BB_TEST_HASHSERV_USERNAME"],
+ password=os.environ["BB_TEST_HASHSERV_PASSWORD"],
+ )
+ self.client.remove({"method": self.METHOD})
+
+ def tearDown(self):
+ self.client.remove({"method": self.METHOD})
+ super().tearDown()
+
+
+ def test_auth_get_all_users(self):
+ self.skipTest("Cannot test all users with external server")
+
diff --git a/bitbake/lib/layerindexlib/__init__.py b/bitbake/lib/layerindexlib/__init__.py
index ac03d89876..c3265ddaa1 100644
--- a/bitbake/lib/layerindexlib/__init__.py
+++ b/bitbake/lib/layerindexlib/__init__.py
@@ -178,9 +178,9 @@ class LayerIndex():
'''Load the layerindex.
indexURI - An index to load. (Use multiple calls to load multiple indexes)
-
+
reload - If reload is True, then any previously loaded indexes will be forgotten.
-
+
load - List of elements to load. Default loads all items.
Note: plugs may ignore this.
@@ -383,7 +383,14 @@ layerBranches set. If not, they are effectively blank.'''
# Get a list of dependencies and then recursively process them
for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
- deplayerbranch = layerdependency.dependency_layerBranch
+ try:
+ deplayerbranch = layerdependency.dependency_layerBranch
+ except AttributeError as e:
+ logger.error('LayerBranch does not exist for dependent layer {}:{}\n' \
+ ' Cannot continue successfully.\n' \
+ ' You might be able to resolve this by checking out the layer locally.\n' \
+ ' Consider reaching out the to the layer maintainers or the layerindex admins' \
+ .format(layerdependency.dependency.name, layerbranch.branch.name))
if ignores and deplayerbranch.layer.name in ignores:
continue
@@ -846,7 +853,7 @@ class LayerIndexObj():
continue
for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
- deplayerbranch = layerdependency.dependency_layerBranch
+ deplayerbranch = layerdependency.dependency_layerBranch or None
if ignores and deplayerbranch.layer.name in ignores:
continue
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py
index 767c4e4674..381b50cf0b 100644
--- a/bitbake/lib/ply/yacc.py
+++ b/bitbake/lib/ply/yacc.py
@@ -2798,7 +2798,14 @@ class ParserReflect(object):
def signature(self):
try:
import hashlib
+ except ImportError:
+ raise RuntimeError("Unable to import hashlib")
+ try:
sig = hashlib.new('MD5', usedforsecurity=False)
+ except TypeError:
+ # Some configurations don't appear to support two arguments
+ sig = hashlib.new('MD5')
+ try:
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
diff --git a/bitbake/lib/progressbar/progressbar.py b/bitbake/lib/progressbar/progressbar.py
index e2b6ba1083..d4da10ab75 100644
--- a/bitbake/lib/progressbar/progressbar.py
+++ b/bitbake/lib/progressbar/progressbar.py
@@ -253,7 +253,7 @@ class ProgressBar(object):
if (self.maxval is not UnknownLength
and not 0 <= value <= self.maxval):
- raise ValueError('Value out of range')
+ self.maxval = value
self.currval = value
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py
index 9961040b58..0e0aa34d0e 100644
--- a/bitbake/lib/prserv/__init__.py
+++ b/bitbake/lib/prserv/__init__.py
@@ -1,17 +1,19 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
__version__ = "1.0.0"
import os, time
-import sys,logging
+import sys, logging
def init_logger(logfile, loglevel):
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
- raise ValueError('Invalid log level: %s' % loglevel)
- FORMAT = '%(asctime)-15s %(message)s'
+ raise ValueError("Invalid log level: %s" % loglevel)
+ FORMAT = "%(asctime)-15s %(message)s"
logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
class NotFoundError(Exception):
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py
index a3f19ddafc..8471ee3046 100644
--- a/bitbake/lib/prserv/client.py
+++ b/bitbake/lib/prserv/client.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -9,40 +11,61 @@ logger = logging.getLogger("BitBake.PRserv")
class PRAsyncClient(bb.asyncrpc.AsyncClient):
def __init__(self):
- super().__init__('PRSERVICE', '1.0', logger)
+ super().__init__("PRSERVICE", "1.0", logger)
async def getPR(self, version, pkgarch, checksum):
- response = await self.send_message(
- {'get-pr': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum}}
+ response = await self.invoke(
+ {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}}
+ )
+ if response:
+ return response["value"]
+
+ async def test_pr(self, version, pkgarch, checksum):
+ response = await self.invoke(
+ {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}}
+ )
+ if response:
+ return response["value"]
+
+ async def test_package(self, version, pkgarch):
+ response = await self.invoke(
+ {"test-package": {"version": version, "pkgarch": pkgarch}}
+ )
+ if response:
+ return response["value"]
+
+ async def max_package_pr(self, version, pkgarch):
+ response = await self.invoke(
+ {"max-package-pr": {"version": version, "pkgarch": pkgarch}}
)
if response:
- return response['value']
+ return response["value"]
async def importone(self, version, pkgarch, checksum, value):
- response = await self.send_message(
- {'import-one': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'value': value}}
+ response = await self.invoke(
+ {"import-one": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "value": value}}
)
if response:
- return response['value']
+ return response["value"]
async def export(self, version, pkgarch, checksum, colinfo):
- response = await self.send_message(
- {'export': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'colinfo': colinfo}}
+ response = await self.invoke(
+ {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}}
)
if response:
- return (response['metainfo'], response['datainfo'])
+ return (response["metainfo"], response["datainfo"])
async def is_readonly(self):
- response = await self.send_message(
- {'is-readonly': {}}
+ response = await self.invoke(
+ {"is-readonly": {}}
)
if response:
- return response['readonly']
+ return response["readonly"]
class PRClient(bb.asyncrpc.Client):
def __init__(self):
super().__init__()
- self._add_methods('getPR', 'importone', 'export', 'is_readonly')
+ self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly")
def _get_async_client(self):
return PRAsyncClient()
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py
index 2710d4a225..eb41508198 100644
--- a/bitbake/lib/prserv/db.py
+++ b/bitbake/lib/prserv/db.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -36,9 +38,9 @@ class PRTable(object):
self.read_only = read_only
self.dirty = False
if nohist:
- self.table = "%s_nohist" % table
+ self.table = "%s_nohist" % table
else:
- self.table = "%s_hist" % table
+ self.table = "%s_hist" % table
if self.read_only:
table_exists = self._execute(
@@ -62,7 +64,7 @@ class PRTable(object):
try:
return self.conn.execute(*query)
except sqlite3.OperationalError as exc:
- if 'is locked' in str(exc) and end > time.time():
+ if "is locked" in str(exc) and end > time.time():
continue
raise exc
@@ -76,7 +78,53 @@ class PRTable(object):
self.sync()
self.dirty = False
- def _getValueHist(self, version, pkgarch, checksum):
+ def test_package(self, version, pkgarch):
+ """Returns whether the specified package version is found in the database for the specified architecture"""
+
+ # Just returns the value if found or None otherwise
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table,
+ (version, pkgarch))
+ row=data.fetchone()
+ if row is not None:
+ return True
+ else:
+ return False
+
+ def test_value(self, version, pkgarch, value):
+ """Returns whether the specified value is found in the database for the specified package and architecture"""
+
+ # Just returns the value if found or None otherwise
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table,
+ (version, pkgarch, value))
+ row=data.fetchone()
+ if row is not None:
+ return True
+ else:
+ return False
+
+ def find_value(self, version, pkgarch, checksum):
+ """Returns the value for the specified checksum if found or None otherwise."""
+
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
+ (version, pkgarch, checksum))
+ row=data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def find_max_value(self, version, pkgarch):
+ """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value"""
+
+ data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ row = data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def _get_value_hist(self, version, pkgarch, checksum):
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row=data.fetchone()
@@ -85,7 +133,7 @@ class PRTable(object):
else:
#no value found, try to insert
if self.read_only:
- data = self._execute("SELECT ifnull(max(value)+1,0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
(version, pkgarch))
row = data.fetchone()
if row is not None:
@@ -94,9 +142,9 @@ class PRTable(object):
return 0
try:
- self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
- % (self.table,self.table),
- (version,pkgarch, checksum,version, pkgarch))
+ self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));"
+ % (self.table, self.table),
+ (version, pkgarch, checksum, version, pkgarch))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
@@ -110,10 +158,10 @@ class PRTable(object):
else:
raise prserv.NotFoundError
- def _getValueNohist(self, version, pkgarch, checksum):
+ def _get_value_no_hist(self, version, pkgarch, checksum):
data=self._execute("SELECT value FROM %s \
WHERE version=? AND pkgarch=? AND checksum=? AND \
- value >= (select max(value) from %s where version=? AND pkgarch=?);"
+ value >= (select max(value) from %s where version=? AND pkgarch=?);"
% (self.table, self.table),
(version, pkgarch, checksum, version, pkgarch))
row=data.fetchone()
@@ -122,17 +170,13 @@ class PRTable(object):
else:
#no value found, try to insert
if self.read_only:
- data = self._execute("SELECT ifnull(max(value)+1,0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
(version, pkgarch))
- row = data.fetchone()
- if row is not None:
- return row[0]
- else:
- return 0
+ return data.fetchone()[0]
try:
- self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
- % (self.table,self.table),
+ self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));"
+ % (self.table, self.table),
(version, pkgarch, checksum, version, pkgarch))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
@@ -148,17 +192,17 @@ class PRTable(object):
else:
raise prserv.NotFoundError
- def getValue(self, version, pkgarch, checksum):
+ def get_value(self, version, pkgarch, checksum):
if self.nohist:
- return self._getValueNohist(version, pkgarch, checksum)
+ return self._get_value_no_hist(version, pkgarch, checksum)
else:
- return self._getValueHist(version, pkgarch, checksum)
+ return self._get_value_hist(version, pkgarch, checksum)
- def _importHist(self, version, pkgarch, checksum, value):
+ def _import_hist(self, version, pkgarch, checksum, value):
if self.read_only:
return None
- val = None
+ val = None
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row = data.fetchone()
@@ -181,27 +225,27 @@ class PRTable(object):
val = row[0]
return val
- def _importNohist(self, version, pkgarch, checksum, value):
+ def _import_no_hist(self, version, pkgarch, checksum, value):
if self.read_only:
return None
try:
#try to insert
self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
- (version, pkgarch, checksum,value))
+ (version, pkgarch, checksum, value))
except sqlite3.IntegrityError as exc:
#already have the record, try to update
try:
- self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?"
+ self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?"
% (self.table),
- (value,version,pkgarch,checksum,value))
+ (value, version, pkgarch, checksum, value))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
self.dirty = True
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
- (version,pkgarch,checksum,value))
+ (version, pkgarch, checksum, value))
row=data.fetchone()
if row is not None:
return row[0]
@@ -210,33 +254,33 @@ class PRTable(object):
def importone(self, version, pkgarch, checksum, value):
if self.nohist:
- return self._importNohist(version, pkgarch, checksum, value)
+ return self._import_no_hist(version, pkgarch, checksum, value)
else:
- return self._importHist(version, pkgarch, checksum, value)
+ return self._import_hist(version, pkgarch, checksum, value)
def export(self, version, pkgarch, checksum, colinfo):
metainfo = {}
- #column info
+ #column info
if colinfo:
- metainfo['tbl_name'] = self.table
- metainfo['core_ver'] = prserv.__version__
- metainfo['col_info'] = []
+ metainfo["tbl_name"] = self.table
+ metainfo["core_ver"] = prserv.__version__
+ metainfo["col_info"] = []
data = self._execute("PRAGMA table_info(%s);" % self.table)
for row in data:
col = {}
- col['name'] = row['name']
- col['type'] = row['type']
- col['notnull'] = row['notnull']
- col['dflt_value'] = row['dflt_value']
- col['pk'] = row['pk']
- metainfo['col_info'].append(col)
+ col["name"] = row["name"]
+ col["type"] = row["type"]
+ col["notnull"] = row["notnull"]
+ col["dflt_value"] = row["dflt_value"]
+ col["pk"] = row["pk"]
+ metainfo["col_info"].append(col)
#data info
datainfo = []
if self.nohist:
sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
- (SELECT version,pkgarch,max(value) as maxvalue FROM %s GROUP BY version,pkgarch) as T2 \
+ (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \
WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
else:
sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
@@ -259,12 +303,12 @@ class PRTable(object):
else:
data = self._execute(sqlstmt)
for row in data:
- if row['version']:
+ if row["version"]:
col = {}
- col['version'] = row['version']
- col['pkgarch'] = row['pkgarch']
- col['checksum'] = row['checksum']
- col['value'] = row['value']
+ col["version"] = row["version"]
+ col["pkgarch"] = row["pkgarch"]
+ col["checksum"] = row["checksum"]
+ col["value"] = row["value"]
datainfo.append(col)
return (metainfo, datainfo)
@@ -273,7 +317,7 @@ class PRTable(object):
for line in self.conn.iterdump():
writeCount = writeCount + len(line) + 1
fd.write(line)
- fd.write('\n')
+ fd.write("\n")
return writeCount
class PRData(object):
@@ -300,7 +344,7 @@ class PRData(object):
def disconnect(self):
self.connection.close()
- def __getitem__(self,tblname):
+ def __getitem__(self, tblname):
if not isinstance(tblname, str):
raise TypeError("tblname argument must be a string, not '%s'" %
type(tblname))
@@ -314,4 +358,4 @@ class PRData(object):
if tblname in self._tables:
del self._tables[tblname]
logger.info("drop table %s" % (tblname))
- self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
+ self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py
index 0a20b927c7..dc4be5b620 100644
--- a/bitbake/lib/prserv/serv.py
+++ b/bitbake/lib/prserv/serv.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -18,77 +20,101 @@ PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
singleton = None
class PRServerClient(bb.asyncrpc.AsyncServerConnection):
- def __init__(self, reader, writer, table, read_only):
- super().__init__(reader, writer, 'PRSERVICE', logger)
+ def __init__(self, socket, server):
+ super().__init__(socket, "PRSERVICE", server.logger)
+ self.server = server
+
self.handlers.update({
- 'get-pr': self.handle_get_pr,
- 'import-one': self.handle_import_one,
- 'export': self.handle_export,
- 'is-readonly': self.handle_is_readonly,
+ "get-pr": self.handle_get_pr,
+ "test-pr": self.handle_test_pr,
+ "test-package": self.handle_test_package,
+ "max-package-pr": self.handle_max_package_pr,
+ "import-one": self.handle_import_one,
+ "export": self.handle_export,
+ "is-readonly": self.handle_is_readonly,
})
- self.table = table
- self.read_only = read_only
def validate_proto_version(self):
return (self.proto_version == (1, 0))
async def dispatch_message(self, msg):
try:
- await super().dispatch_message(msg)
+ return await super().dispatch_message(msg)
except:
- self.table.sync()
+ self.server.table.sync()
raise
+ else:
+ self.server.table.sync_if_dirty()
- self.table.sync_if_dirty()
+ async def handle_test_pr(self, request):
+ '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+
+ value = self.server.table.find_value(version, pkgarch, checksum)
+ return {"value": value}
+
+ async def handle_test_package(self, request):
+ '''Tells whether there are entries for (version, pkgarch) in the db. Returns True or False'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+
+ value = self.server.table.test_package(version, pkgarch)
+ return {"value": value}
+
+ async def handle_max_package_pr(self, request):
+ '''Finds the greatest PR value for (version, pkgarch) in the db. Returns None if no entry was found'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+
+ value = self.server.table.find_max_value(version, pkgarch)
+ return {"value": value}
async def handle_get_pr(self, request):
- version = request['version']
- pkgarch = request['pkgarch']
- checksum = request['checksum']
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
response = None
try:
- value = self.table.getValue(version, pkgarch, checksum)
- response = {'value': value}
+ value = self.server.table.get_value(version, pkgarch, checksum)
+ response = {"value": value}
except prserv.NotFoundError:
- logger.error("can not find value for (%s, %s)",version, checksum)
- except sqlite3.Error as exc:
- logger.error(str(exc))
+ self.logger.error("failure storing value in database for (%s, %s)",version, checksum)
- self.write_message(response)
+ return response
async def handle_import_one(self, request):
response = None
- if not self.read_only:
- version = request['version']
- pkgarch = request['pkgarch']
- checksum = request['checksum']
- value = request['value']
+ if not self.server.read_only:
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+ value = request["value"]
- value = self.table.importone(version, pkgarch, checksum, value)
+ value = self.server.table.importone(version, pkgarch, checksum, value)
if value is not None:
- response = {'value': value}
+ response = {"value": value}
- self.write_message(response)
+ return response
async def handle_export(self, request):
- version = request['version']
- pkgarch = request['pkgarch']
- checksum = request['checksum']
- colinfo = request['colinfo']
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+ colinfo = request["colinfo"]
try:
- (metainfo, datainfo) = self.table.export(version, pkgarch, checksum, colinfo)
+ (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo)
except sqlite3.Error as exc:
- logger.error(str(exc))
+ self.logger.error(str(exc))
metainfo = datainfo = None
- response = {'metainfo': metainfo, 'datainfo': datainfo}
- self.write_message(response)
+ return {"metainfo": metainfo, "datainfo": datainfo}
async def handle_is_readonly(self, request):
- response = {'readonly': self.read_only}
- self.write_message(response)
+ return {"readonly": self.server.read_only}
class PRServer(bb.asyncrpc.AsyncServer):
def __init__(self, dbfile, read_only=False):
@@ -97,20 +123,23 @@ class PRServer(bb.asyncrpc.AsyncServer):
self.table = None
self.read_only = read_only
- def accept_client(self, reader, writer):
- return PRServerClient(reader, writer, self.table, self.read_only)
+ def accept_client(self, socket):
+ return PRServerClient(socket, self)
- def _serve_forever(self):
+ def start(self):
+ tasks = super().start()
self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only)
self.table = self.db["PRMAIN"]
- logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
+ self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
(self.dbfile, self.address, str(os.getpid())))
- super()._serve_forever()
+ return tasks
+ async def stop(self):
self.table.sync_if_dirty()
self.db.disconnect()
+ await super().stop()
def signal_handler(self):
super().signal_handler()
@@ -127,12 +156,12 @@ class PRServSingleton(object):
def start(self):
self.prserv = PRServer(self.dbfile)
self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
- self.process = self.prserv.serve_as_process()
+ self.process = self.prserv.serve_as_process(log_level=logging.WARNING)
if not self.prserv.address:
raise PRServiceConfigError
if not self.port:
- self.port = int(self.prserv.address.rsplit(':', 1)[1])
+ self.port = int(self.prserv.address.rsplit(":", 1)[1])
def run_as_daemon(func, pidfile, logfile):
"""
@@ -168,18 +197,18 @@ def run_as_daemon(func, pidfile, logfile):
# stdout/stderr or it could be 'real' unix fd forking where we need
# to physically close the fds to prevent the program launching us from
# potentially hanging on a pipe. Handle both cases.
- si = open('/dev/null', 'r')
+ si = open("/dev/null", "r")
try:
- os.dup2(si.fileno(),sys.stdin.fileno())
+ os.dup2(si.fileno(), sys.stdin.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stdin = si
- so = open(logfile, 'a+')
+ so = open(logfile, "a+")
try:
- os.dup2(so.fileno(),sys.stdout.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stdout = so
try:
- os.dup2(so.fileno(),sys.stderr.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stderr = so
@@ -197,7 +226,7 @@ def run_as_daemon(func, pidfile, logfile):
# write pidfile
pid = str(os.getpid())
- with open(pidfile, 'w') as pf:
+ with open(pidfile, "w") as pf:
pf.write("%s\n" % pid)
func()
@@ -242,15 +271,15 @@ def stop_daemon(host, port):
# so at least advise the user which ports the corresponding server is listening
ports = []
portstr = ""
- for pf in glob.glob(PIDPREFIX % (ip,'*')):
+ for pf in glob.glob(PIDPREFIX % (ip, "*")):
bn = os.path.basename(pf)
root, _ = os.path.splitext(bn)
- ports.append(root.split('_')[-1])
+ ports.append(root.split("_")[-1])
if len(ports):
- portstr = "Wrong port? Other ports listening at %s: %s" % (host, ' '.join(ports))
+ portstr = "Wrong port? Other ports listening at %s: %s" % (host, " ".join(ports))
sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n"
- % (pidfile,portstr))
+ % (pidfile, portstr))
return 1
try:
@@ -259,8 +288,11 @@ def stop_daemon(host, port):
os.kill(pid, signal.SIGTERM)
time.sleep(0.1)
- if os.path.exists(pidfile):
+ try:
os.remove(pidfile)
+ except FileNotFoundError:
+ # The PID file might have been removed by the exiting process
+ pass
except OSError as e:
err = str(e)
@@ -278,7 +310,7 @@ def is_running(pid):
return True
def is_local_special(host, port):
- if (host == 'localhost' or host == '127.0.0.1') and not port:
+ if (host == "localhost" or host == "127.0.0.1") and not port:
return True
else:
return False
@@ -289,7 +321,7 @@ class PRServiceConfigError(Exception):
def auto_start(d):
global singleton
- host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':')))
+ host_params = list(filter(None, (d.getVar("PRSERV_HOST") or "").split(":")))
if not host_params:
# Shutdown any existing PR Server
auto_shutdown()
@@ -298,7 +330,7 @@ def auto_start(d):
if len(host_params) != 2:
# Shutdown any existing PR Server
auto_shutdown()
- logger.critical('\n'.join(['PRSERV_HOST: incorrect format',
+ logger.critical("\n".join(["PRSERV_HOST: incorrect format",
'Usage: PRSERV_HOST = "<hostname>:<port>"']))
raise PRServiceConfigError
@@ -342,17 +374,17 @@ def auto_shutdown():
def ping(host, port):
from . import client
- conn = client.PRClient()
- conn.connect_tcp(host, port)
- return conn.ping()
+ with client.PRClient() as conn:
+ conn.connect_tcp(host, port)
+ return conn.ping()
def connect(host, port):
from . import client
global singleton
- if host.strip().lower() == 'localhost' and not port:
- host = 'localhost'
+ if host.strip().lower() == "localhost" and not port:
+ host = "localhost"
port = singleton.port
conn = client.PRClient()
diff --git a/bitbake/lib/toaster/bldcollector/urls.py b/bitbake/lib/toaster/bldcollector/urls.py
index efd67a81a5..3c34070351 100644
--- a/bitbake/lib/toaster/bldcollector/urls.py
+++ b/bitbake/lib/toaster/bldcollector/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
import bldcollector.views
diff --git a/bitbake/lib/toaster/bldcollector/views.py b/bitbake/lib/toaster/bldcollector/views.py
index 04cd8b3dd4..bdf38ae6e8 100644
--- a/bitbake/lib/toaster/bldcollector/views.py
+++ b/bitbake/lib/toaster/bldcollector/views.py
@@ -14,8 +14,11 @@ import subprocess
import toastermain
from django.views.decorators.csrf import csrf_exempt
+from toastermain.logs import log_view_mixin
+
@csrf_exempt
+@log_view_mixin
def eventfile(request):
""" Receives a file by POST, and runs toaster-eventreply on this file """
if request.method != "POST":
diff --git a/bitbake/lib/toaster/bldcontrol/models.py b/bitbake/lib/toaster/bldcontrol/models.py
index c2f302da24..42750e7180 100644
--- a/bitbake/lib/toaster/bldcontrol/models.py
+++ b/bitbake/lib/toaster/bldcontrol/models.py
@@ -4,7 +4,7 @@
from __future__ import unicode_literals
from django.db import models
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
from orm.models import Project, Build, Layer_Version
import logging
@@ -124,7 +124,7 @@ class BuildRequest(models.Model):
return self.brvariable_set.get(name="MACHINE").value
def __str__(self):
- return force_text('%s %s' % (self.project, self.get_state_display()))
+ return force_str('%s %s' % (self.project, self.get_state_display()))
# These tables specify the settings for running an actual build.
# They MUST be kept in sync with the tables in orm.models.Project*
diff --git a/bitbake/lib/toaster/logs/.gitignore b/bitbake/lib/toaster/logs/.gitignore
new file mode 100644
index 0000000000..e5ebf25a49
--- /dev/null
+++ b/bitbake/lib/toaster/logs/.gitignore
@@ -0,0 +1 @@
+*.log*
diff --git a/bitbake/lib/toaster/manage.py b/bitbake/lib/toaster/manage.py
index ae32619d12..f8de49c264 100755
--- a/bitbake/lib/toaster/manage.py
+++ b/bitbake/lib/toaster/manage.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/bitbake/lib/toaster/orm/fixtures/README b/bitbake/lib/toaster/orm/fixtures/README
index 1b1c660aac..7cd745e26b 100644
--- a/bitbake/lib/toaster/orm/fixtures/README
+++ b/bitbake/lib/toaster/orm/fixtures/README
@@ -27,4 +27,4 @@ Data can be provided in XML, JSON and if installed YAML formats.
Use the django management command manage.py loaddata <your fixture file>
For further information see the Django command documentation at:
-https://docs.djangoproject.com/en/1.8/ref/django-admin/#django-admin-loaddata
+https://docs.djangoproject.com/en/3.2/ref/django-admin/#django-admin-loaddata
diff --git a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py
index 0d5f4533bf..71afe3914e 100755
--- a/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py
+++ b/bitbake/lib/toaster/orm/fixtures/gen_fixtures.py
@@ -35,17 +35,19 @@ verbose = False
# [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch]
current_releases = [
# Release slot #1
- ['Kirkstone','3.5','April 2022','','Future - Long Term Support (until Apr. 2024)','27.0','1.54'],
-# ['Dunfell','3.1','April 2021','3.1.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','23.0','1.46'],
+ ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'],
# Release slot #2 'local'
['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'],
# Release slot #3 'master'
['Master','master','','Yocto Project master','master','','master'],
# Release slot #4
- ['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'],
-# ['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'],
- # Optional Release slot #4
- ['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'],
+ ['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'],
+# ['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'],
+# ['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'],
+# ['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'],
+# ['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'],
+ # Optional Release slot #5
+ ['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'],
]
default_poky_layers = [
diff --git a/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/bitbake/lib/toaster/orm/fixtures/oe-core.xml
index 450e7a2f85..950f2a98af 100644
--- a/bitbake/lib/toaster/orm/fixtures/oe-core.xml
+++ b/bitbake/lib/toaster/orm/fixtures/oe-core.xml
@@ -10,7 +10,7 @@
<object model="orm.bitbakeversion" pk="1">
<field type="CharField" name="name">kirkstone</field>
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
- <field type="CharField" name="branch">1.54</field>
+ <field type="CharField" name="branch">2.0</field>
</object>
<object model="orm.bitbakeversion" pk="2">
<field type="CharField" name="name">HEAD</field>
@@ -23,14 +23,14 @@
<field type="CharField" name="branch">master</field>
</object>
<object model="orm.bitbakeversion" pk="4">
- <field type="CharField" name="name">honister</field>
+ <field type="CharField" name="name">mickledore</field>
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
- <field type="CharField" name="branch">1.52</field>
+ <field type="CharField" name="branch">2.4</field>
</object>
<object model="orm.bitbakeversion" pk="5">
- <field type="CharField" name="name">hardknott</field>
+ <field type="CharField" name="name">dunfell</field>
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
- <field type="CharField" name="branch">1.50</field>
+ <field type="CharField" name="branch">1.46</field>
</object>
<!-- Releases available -->
@@ -56,18 +56,18 @@
<field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
</object>
<object model="orm.release" pk="4">
- <field type="CharField" name="name">honister</field>
- <field type="CharField" name="description">Openembedded Honister</field>
+ <field type="CharField" name="name">mickledore</field>
+ <field type="CharField" name="description">Openembedded Mickledore</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
- <field type="CharField" name="branch_name">honister</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=honister\"&gt;OpenEmbedded Honister&lt;/a&gt; branch.</field>
+ <field type="CharField" name="branch_name">mickledore</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=mickledore\"&gt;OpenEmbedded Mickledore&lt;/a&gt; branch.</field>
</object>
<object model="orm.release" pk="5">
- <field type="CharField" name="name">hardknott</field>
- <field type="CharField" name="description">Openembedded Hardknott</field>
+ <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="description">Openembedded Dunfell</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
- <field type="CharField" name="branch_name">hardknott</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=hardknott\"&gt;OpenEmbedded Hardknott&lt;/a&gt; branch.</field>
+ <field type="CharField" name="branch_name">dunfell</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field>
</object>
<!-- Default layers for each release -->
diff --git a/bitbake/lib/toaster/orm/fixtures/poky.xml b/bitbake/lib/toaster/orm/fixtures/poky.xml
index ed86114ebe..121e52fd45 100644
--- a/bitbake/lib/toaster/orm/fixtures/poky.xml
+++ b/bitbake/lib/toaster/orm/fixtures/poky.xml
@@ -26,15 +26,15 @@
<field type="CharField" name="dirpath">bitbake</field>
</object>
<object model="orm.bitbakeversion" pk="4">
- <field type="CharField" name="name">honister</field>
+ <field type="CharField" name="name">mickledore</field>
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="branch">honister</field>
+ <field type="CharField" name="branch">mickledore</field>
<field type="CharField" name="dirpath">bitbake</field>
</object>
<object model="orm.bitbakeversion" pk="5">
- <field type="CharField" name="name">hardknott</field>
+ <field type="CharField" name="name">dunfell</field>
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="branch">hardknott</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">bitbake</field>
</object>
@@ -42,7 +42,7 @@
<!-- Releases available -->
<object model="orm.release" pk="1">
<field type="CharField" name="name">kirkstone</field>
- <field type="CharField" name="description">Yocto Project 3.5 "Kirkstone"</field>
+ <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
<field type="CharField" name="branch_name">kirkstone</field>
<field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone"&gt;Yocto Project Kirkstone branch&lt;/a&gt;.</field>
@@ -62,18 +62,18 @@
<field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
</object>
<object model="orm.release" pk="4">
- <field type="CharField" name="name">honister</field>
- <field type="CharField" name="description">Yocto Project 3.4 "Honister"</field>
+ <field type="CharField" name="name">mickledore</field>
+ <field type="CharField" name="description">Yocto Project 4.2 "Mickledore"</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
- <field type="CharField" name="branch_name">honister</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=honister"&gt;Yocto Project Honister branch&lt;/a&gt;.</field>
+ <field type="CharField" name="branch_name">mickledore</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=mickledore"&gt;Yocto Project Mickledore branch&lt;/a&gt;.</field>
</object>
<object model="orm.release" pk="5">
- <field type="CharField" name="name">hardknott</field>
- <field type="CharField" name="description">Yocto Project 3.3 "Hardknott"</field>
+ <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
- <field type="CharField" name="branch_name">hardknott</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=hardknott"&gt;Yocto Project Hardknott branch&lt;/a&gt;.</field>
+ <field type="CharField" name="branch_name">dunfell</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field>
</object>
<!-- Default project layers for each release -->
@@ -177,14 +177,14 @@
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">honister</field>
+ <field type="CharField" name="branch">mickledore</field>
<field type="CharField" name="dirpath">meta</field>
</object>
<object model="orm.layer_version" pk="5">
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">5</field>
- <field type="CharField" name="branch">hardknott</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta</field>
</object>
@@ -222,14 +222,14 @@
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">honister</field>
+ <field type="CharField" name="branch">mickledore</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
<object model="orm.layer_version" pk="10">
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">5</field>
- <field type="CharField" name="branch">hardknott</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
@@ -267,14 +267,14 @@
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">honister</field>
+ <field type="CharField" name="branch">mickledore</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
<object model="orm.layer_version" pk="15">
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">5</field>
- <field type="CharField" name="branch">hardknott</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
</django-objects>
diff --git a/bitbake/lib/toaster/orm/fixtures/settings.xml b/bitbake/lib/toaster/orm/fixtures/settings.xml
index ab3ea021f5..02c26a6974 100644
--- a/bitbake/lib/toaster/orm/fixtures/settings.xml
+++ b/bitbake/lib/toaster/orm/fixtures/settings.xml
@@ -12,7 +12,7 @@
</object>
<object model="orm.toastersetting" pk="4">
<field type="CharField" name="name">DEFCONF_MACHINE</field>
- <field type="CharField" name="value">qemux86</field>
+ <field type="CharField" name="value">qemux86-64</field>
</object>
<object model="orm.toastersetting" pk="5">
<field type="CharField" name="name">DEFCONF_SSTATE_DIR</field>
diff --git a/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
index eb097555e2..6d64830ebd 100644
--- a/bitbake/lib/toaster/orm/management/commands/lsupdates.py
+++ b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
@@ -40,7 +40,7 @@ class Spinner(threading.Thread):
""" A simple progress spinner to indicate download/parsing is happening"""
def __init__(self, *args, **kwargs):
super(Spinner, self).__init__(*args, **kwargs)
- self.setDaemon(True)
+ self.daemon = True
self.signal = True
def run(self):
diff --git a/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py
new file mode 100644
index 0000000000..328eb5753c
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0021_eventlogsimports.py
@@ -0,0 +1,22 @@
+# Generated by Django 4.2.5 on 2023-11-23 18:44
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0020_models_bigautofield'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='EventLogsImports',
+ fields=[
+ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255)),
+ ('imported', models.BooleanField(default=False)),
+ ('build_id', models.IntegerField(blank=True, null=True)),
+ ],
+ ),
+ ]
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py
index 2cb7d7e049..19c9686206 100644
--- a/bitbake/lib/toaster/orm/models.py
+++ b/bitbake/lib/toaster/orm/models.py
@@ -107,7 +107,7 @@ class ToasterSetting(models.Model):
class ProjectManager(models.Manager):
- def create_project(self, name, release, existing_project=None):
+ def create_project(self, name, release, existing_project=None, imported=False):
if existing_project and (release is not None):
prj = existing_project
prj.bitbake_version = release.bitbake_version
@@ -134,19 +134,19 @@ class ProjectManager(models.Manager):
if release is None:
return prj
-
- for rdl in release.releasedefaultlayer_set.all():
- lv = Layer_Version.objects.filter(
- layer__name=rdl.layer_name,
- release=release).first()
-
- if lv:
- ProjectLayer.objects.create(project=prj,
- layercommit=lv,
- optional=False)
- else:
- logger.warning("Default project layer %s not found" %
- rdl.layer_name)
+ if not imported:
+ for rdl in release.releasedefaultlayer_set.all():
+ lv = Layer_Version.objects.filter(
+ layer__name=rdl.layer_name,
+ release=release).first()
+
+ if lv:
+ ProjectLayer.objects.create(project=prj,
+ layercommit=lv,
+ optional=False)
+ else:
+ logger.warning("Default project layer %s not found" %
+ rdl.layer_name)
return prj
@@ -1389,9 +1389,6 @@ class Machine(models.Model):
return "Machine " + self.name + "(" + self.description + ")"
-
-
-
class BitbakeVersion(models.Model):
name = models.CharField(max_length=32, unique = True)
@@ -1733,7 +1730,7 @@ class CustomImageRecipe(Recipe):
packages_conf += "\""
base_recipe_path = self.get_base_recipe_file()
- if base_recipe_path:
+ if base_recipe_path and os.path.isfile(base_recipe_path):
base_recipe = open(base_recipe_path, 'r').read()
else:
# Pass back None to trigger error message to user
@@ -1853,6 +1850,8 @@ def signal_runbuilds():
os.kill(int(pidf.read()), SIGUSR1)
except FileNotFoundError:
logger.info("Stopping existing runbuilds: no current process found")
+ except ProcessLookupError:
+ logger.warning("Stopping existing runbuilds: process lookup not found")
class Distro(models.Model):
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
@@ -1869,6 +1868,15 @@ class Distro(models.Model):
def __unicode__(self):
return "Distro " + self.name + "(" + self.description + ")"
+class EventLogsImports(models.Model):
+ name = models.CharField(max_length=255)
+ imported = models.BooleanField(default=False)
+ build_id = models.IntegerField(blank=True, null=True)
+
+ def __str__(self):
+ return self.name
+
+
django.db.models.signals.post_save.connect(invalidate_cache)
django.db.models.signals.post_delete.connect(invalidate_cache)
django.db.models.signals.m2m_changed.connect(invalidate_cache)
diff --git a/bitbake/lib/toaster/pytest.ini b/bitbake/lib/toaster/pytest.ini
new file mode 100644
index 0000000000..071c65fcd5
--- /dev/null
+++ b/bitbake/lib/toaster/pytest.ini
@@ -0,0 +1,16 @@
+# -- FILE: pytest.ini (or tox.ini)
+[pytest]
+# --create-db - force re creation of the test database
+# https://pytest-django.readthedocs.io/en/latest/database.html#create-db-force-re-creation-of-the-test-database
+
+# --html=report.html --self-contained-html
+# https://docs.pytest.org/en/latest/usage.html#creating-html-reports
+# https://pytest-html.readthedocs.io/en/latest/user_guide.html#creating-a-self-contained-report
+addopts = --create-db --html="Toaster Tests Report.html" --self-contained-html
+
+# Define environment variables using pytest-env
+# A pytest plugin that enables you to set environment variables in the pytest.ini file.
+# https://pypi.org/project/pytest-env/
+env =
+ TOASTER_BUILDSERVER=1
+ DJANGO_SETTINGS_MODULE=toastermain.settings_test
diff --git a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
index 644d45fe58..393be75496 100644
--- a/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
+++ b/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
@@ -19,11 +19,15 @@ import os
import time
import unittest
+import pytest
from selenium import webdriver
+from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
+from selenium.webdriver.common.by import By
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import NoSuchElementException, \
- StaleElementReferenceException, TimeoutException
+ StaleElementReferenceException, TimeoutException, \
+ SessionNotCreatedException
def create_selenium_driver(cls,browser='chrome'):
# set default browser string based on env (if available)
@@ -32,9 +36,32 @@ def create_selenium_driver(cls,browser='chrome'):
browser = env_browser
if browser == 'chrome':
- return webdriver.Chrome(
- service_args=["--verbose", "--log-path=selenium.log"]
- )
+ options = webdriver.ChromeOptions()
+ options.add_argument('--headless')
+ options.add_argument('--disable-infobars')
+ options.add_argument('--disable-dev-shm-usage')
+ options.add_argument('--no-sandbox')
+ options.add_argument('--remote-debugging-port=9222')
+ try:
+ return webdriver.Chrome(options=options)
+ except SessionNotCreatedException as e:
+ exit_message = "Halting tests prematurely to avoid cascading errors."
+ # check if chrome / chromedriver exists
+ chrome_path = os.popen("find ~/.cache/selenium/chrome/ -name 'chrome' -type f -print -quit").read().strip()
+ if not chrome_path:
+ pytest.exit(f"Failed to install/find chrome.\n{exit_message}")
+ chromedriver_path = os.popen("find ~/.cache/selenium/chromedriver/ -name 'chromedriver' -type f -print -quit").read().strip()
+ if not chromedriver_path:
+ pytest.exit(f"Failed to install/find chromedriver.\n{exit_message}")
+ # check if depends on each are fulfilled
+ depends_chrome = os.popen(f"ldd {chrome_path} | grep 'not found'").read().strip()
+ if depends_chrome:
+ pytest.exit(f"Missing chrome dependencies.\n{depends_chrome}\n{exit_message}")
+ depends_chromedriver = os.popen(f"ldd {chromedriver_path} | grep 'not found'").read().strip()
+ if depends_chromedriver:
+ pytest.exit(f"Missing chromedriver dependencies.\n{depends_chromedriver}\n{exit_message}")
+ # print original error otherwise
+ pytest.exit(f"Failed to start chromedriver.\n{e}\n{exit_message}")
elif browser == 'firefox':
return webdriver.Firefox()
elif browser == 'marionette':
@@ -66,7 +93,9 @@ class Wait(WebDriverWait):
_TIMEOUT = 10
_POLL_FREQUENCY = 0.5
- def __init__(self, driver):
+ def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY):
+ self._TIMEOUT = timeout
+ self._POLL_FREQUENCY = poll
super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY)
def until(self, method, message=''):
@@ -138,6 +167,8 @@ class SeleniumTestCaseBase(unittest.TestCase):
""" Clean up webdriver driver """
cls.driver.quit()
+ # Allow driver resources to be properly freed before proceeding with further tests
+ time.sleep(5)
super(SeleniumTestCaseBase, cls).tearDownClass()
def get(self, url):
@@ -151,13 +182,20 @@ class SeleniumTestCaseBase(unittest.TestCase):
abs_url = '%s%s' % (self.live_server_url, url)
self.driver.get(abs_url)
+ try: # Ensure page is loaded before proceeding
+ self.wait_until_visible("#global-nav", poll=3)
+ except NoSuchElementException:
+ self.driver.implicitly_wait(3)
+ except TimeoutException:
+ self.driver.implicitly_wait(3)
+
def find(self, selector):
""" Find single element by CSS selector """
- return self.driver.find_element_by_css_selector(selector)
+ return self.driver.find_element(By.CSS_SELECTOR, selector)
def find_all(self, selector):
""" Find all elements matching CSS selector """
- return self.driver.find_elements_by_css_selector(selector)
+ return self.driver.find_elements(By.CSS_SELECTOR, selector)
def element_exists(self, selector):
"""
@@ -170,18 +208,34 @@ class SeleniumTestCaseBase(unittest.TestCase):
""" Return the element which currently has focus on the page """
return self.driver.switch_to.active_element
- def wait_until_present(self, selector):
+ def wait_until_present(self, selector, poll=0.5):
""" Wait until element matching CSS selector is on the page """
is_present = lambda driver: self.find(selector)
msg = 'An element matching "%s" should be on the page' % selector
- element = Wait(self.driver).until(is_present, msg)
+ element = Wait(self.driver, poll=poll).until(is_present, msg)
+ if poll > 2:
+ time.sleep(poll) # element need more delay to be present
return element
- def wait_until_visible(self, selector):
+ def wait_until_visible(self, selector, poll=1):
""" Wait until element matching CSS selector is visible on the page """
is_visible = lambda driver: self.find(selector).is_displayed()
msg = 'An element matching "%s" should be visible' % selector
- Wait(self.driver).until(is_visible, msg)
+ Wait(self.driver, poll=poll).until(is_visible, msg)
+ time.sleep(poll) # wait for visibility to settle
+ return self.find(selector)
+
+ def wait_until_clickable(self, selector, poll=1):
+ """ Wait until element matching CSS selector is visible on the page """
+ WebDriverWait(
+ self.driver,
+ Wait._TIMEOUT,
+ poll_frequency=poll
+ ).until(
+ EC.element_to_be_clickable((By.ID, selector.removeprefix('#')
+ )
+ )
+ )
return self.find(selector)
def wait_until_focused(self, selector):
diff --git a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
index 8423d3dab2..b9356a0344 100644
--- a/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
@@ -7,13 +7,18 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
+from selenium.webdriver.support.select import Select
from django.utils import timezone
+from bldcontrol.models import BuildRequest
from tests.browser.selenium_helpers import SeleniumTestCase
-from orm.models import BitbakeVersion, Release, Project, Build, Target
+from orm.models import BitbakeVersion, Layer, Layer_Version, Recipe, Release, Project, Build, Target, Task
+
+from selenium.webdriver.common.by import By
class TestAllBuildsPage(SeleniumTestCase):
@@ -23,7 +28,8 @@ class TestAllBuildsPage(SeleniumTestCase):
CLI_BUILDS_PROJECT_NAME = 'command line builds'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -69,7 +75,7 @@ class TestAllBuildsPage(SeleniumTestCase):
'[data-role="data-recent-build-buildtime-field"]' % build.id
# because this loads via Ajax, wait for it to be visible
- self.wait_until_present(selector)
+ self.wait_until_visible(selector)
build_time_spans = self.find_all(selector)
@@ -79,7 +85,7 @@ class TestAllBuildsPage(SeleniumTestCase):
def _get_row_for_build(self, build):
""" Get the table row for the build from the all builds table """
- self.wait_until_present('#allbuildstable')
+ self.wait_until_visible('#allbuildstable')
rows = self.find_all('#allbuildstable tr')
@@ -91,7 +97,7 @@ class TestAllBuildsPage(SeleniumTestCase):
found_row = None
for row in rows:
- outcome_links = row.find_elements_by_css_selector(selector)
+ outcome_links = row.find_elements(By.CSS_SELECTOR, selector)
if len(outcome_links) == 1:
found_row = row
break
@@ -100,6 +106,66 @@ class TestAllBuildsPage(SeleniumTestCase):
return found_row
+ def _get_create_builds(self, **kwargs):
+ """ Create a build and return the build object """
+ build1 = Build.objects.create(**self.project1_build_success)
+ build2 = Build.objects.create(**self.project1_build_failure)
+
+ # add some targets to these builds so they have recipe links
+ # (and so we can find the row in the ToasterTable corresponding to
+ # a particular build)
+ Target.objects.create(build=build1, target='foo')
+ Target.objects.create(build=build2, target='bar')
+
+ if kwargs:
+ # Create kwargs.get('success') builds with success status with target
+ # and kwargs.get('failure') builds with failure status with target
+ for i in range(kwargs.get('success', 0)):
+ now = timezone.now()
+ self.project1_build_success['started_on'] = now
+ self.project1_build_success[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_success)
+ Target.objects.create(build=build,
+ target=f'{i}_success_recipe',
+ task=f'{i}_success_task')
+
+ self._set_buildRequest_and_task_on_build(build)
+ for i in range(kwargs.get('failure', 0)):
+ now = timezone.now()
+ self.project1_build_failure['started_on'] = now
+ self.project1_build_failure[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_failure)
+ Target.objects.create(build=build,
+ target=f'{i}_fail_recipe',
+ task=f'{i}_fail_task')
+ self._set_buildRequest_and_task_on_build(build)
+ return build1, build2
+
+ def _create_recipe(self):
+ """ Add a recipe to the database and return it """
+ layer = Layer.objects.create()
+ layer_version = Layer_Version.objects.create(layer=layer)
+ return Recipe.objects.create(name='recipe_foo', layer_version=layer_version)
+
+ def _set_buildRequest_and_task_on_build(self, build):
+ """ Set buildRequest and task on build """
+ build.recipes_parsed = 1
+ build.save()
+ buildRequest = BuildRequest.objects.create(
+ build=build,
+ project=self.project1,
+ state=BuildRequest.REQ_COMPLETED)
+ build.build_request = buildRequest
+ recipe = self._create_recipe()
+ task = Task.objects.create(build=build,
+ recipe=recipe,
+ task_name='task',
+ outcome=Task.OUTCOME_SUCCESS)
+ task.save()
+ build.save()
+
def test_show_tasks_with_suffix(self):
""" Task should be shown as suffix on build name """
build = Build.objects.create(**self.project1_build_success)
@@ -109,7 +175,7 @@ class TestAllBuildsPage(SeleniumTestCase):
url = reverse('all-builds')
self.get(url)
- self.wait_until_present('td[class="target"]')
+ self.wait_until_visible('td[class="target"]')
cell = self.find('td[class="target"]')
content = cell.get_attribute('innerHTML')
@@ -126,23 +192,25 @@ class TestAllBuildsPage(SeleniumTestCase):
but should be shown for other builds
"""
build1 = Build.objects.create(**self.project1_build_success)
- default_build = Build.objects.create(**self.default_project_build_success)
+ default_build = Build.objects.create(
+ **self.default_project_build_success)
url = reverse('all-builds')
self.get(url)
- # shouldn't see a rebuild button for command-line builds
- selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
- run_again_button = self.find_all(selector)
- self.assertEqual(len(run_again_button), 0,
- 'should not see a rebuild button for cli builds')
-
# should see a rebuild button for non-command-line builds
+ self.wait_until_visible('#allbuildstable tbody tr')
selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id
run_again_button = self.find_all(selector)
self.assertEqual(len(run_again_button), 1,
'should see a rebuild button for non-cli builds')
+ # shouldn't see a rebuild button for command-line builds
+ selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
+ run_again_button = self.find_all(selector)
+ self.assertEqual(len(run_again_button), 0,
+ 'should not see a rebuild button for cli builds')
+
def test_tooltips_on_project_name(self):
"""
Test tooltips shown next to project name in the main table
@@ -156,6 +224,7 @@ class TestAllBuildsPage(SeleniumTestCase):
url = reverse('all-builds')
self.get(url)
+ self.wait_until_visible('#allbuildstable', poll=3)
# get the project name cells from the table
cells = self.find_all('#allbuildstable td[class="project"]')
@@ -164,7 +233,7 @@ class TestAllBuildsPage(SeleniumTestCase):
for cell in cells:
content = cell.get_attribute('innerHTML')
- help_icons = cell.find_elements_by_css_selector(selector)
+ help_icons = cell.find_elements(By.CSS_SELECTOR, selector)
if re.search(self.PROJECT_NAME, content):
# no help icon next to non-cli project name
@@ -184,38 +253,224 @@ class TestAllBuildsPage(SeleniumTestCase):
recent builds area; failed builds should not have links on the time column,
or in the recent builds area
"""
- build1 = Build.objects.create(**self.project1_build_success)
- build2 = Build.objects.create(**self.project1_build_failure)
-
- # add some targets to these builds so they have recipe links
- # (and so we can find the row in the ToasterTable corresponding to
- # a particular build)
- Target.objects.create(build=build1, target='foo')
- Target.objects.create(build=build2, target='bar')
+ build1, build2 = self._get_create_builds()
url = reverse('all-builds')
self.get(url)
+ self.wait_until_visible('#allbuildstable', poll=3)
# test recent builds area for successful build
element = self._get_build_time_element(build1)
- links = element.find_elements_by_css_selector('a')
+ links = element.find_elements(By.CSS_SELECTOR, 'a')
msg = 'should be a link on the build time for a successful recent build'
- self.assertEquals(len(links), 1, msg)
+ self.assertEqual(len(links), 1, msg)
# test recent builds area for failed build
element = self._get_build_time_element(build2)
- links = element.find_elements_by_css_selector('a')
+ links = element.find_elements(By.CSS_SELECTOR, 'a')
msg = 'should not be a link on the build time for a failed recent build'
- self.assertEquals(len(links), 0, msg)
+ self.assertEqual(len(links), 0, msg)
# test the time column for successful build
build1_row = self._get_row_for_build(build1)
- links = build1_row.find_elements_by_css_selector('td.time a')
+ links = build1_row.find_elements(By.CSS_SELECTOR, 'td.time a')
msg = 'should be a link on the build time for a successful build'
- self.assertEquals(len(links), 1, msg)
+ self.assertEqual(len(links), 1, msg)
# test the time column for failed build
build2_row = self._get_row_for_build(build2)
- links = build2_row.find_elements_by_css_selector('td.time a')
+ links = build2_row.find_elements(By.CSS_SELECTOR, 'td.time a')
msg = 'should not be a link on the build time for a failed build'
- self.assertEquals(len(links), 0, msg)
+ self.assertEqual(len(links), 0, msg)
+
+ def test_builds_table_search_box(self):
+ """ Test the search box in the builds table on the all builds page """
+ self._get_create_builds()
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check search box is present and works
+ self.wait_until_visible('#allbuildstable tbody tr')
+ search_box = self.find('#search-input-allbuildstable')
+ self.assertTrue(search_box.is_displayed())
+
+ # Check that we can search for a build by recipe name
+ search_box.send_keys('foo')
+ search_btn = self.find('#search-submit-allbuildstable')
+ search_btn.click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ rows = self.find_all('#allbuildstable tbody tr')
+ self.assertTrue(len(rows) >= 1)
+
+ def test_filtering_on_failure_tasks_column(self):
+ """ Test the filtering on failure tasks column in the builds table on the all builds page """
+ def _check_if_filter_failed_tasks_column_is_visible():
+ # check if failed tasks filter column is visible, if not click on it
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+ filter_fails_task_checkbox = self.find('#checkbox-failed_tasks')
+ if not filter_fails_task_checkbox.is_selected():
+ filter_fails_task_checkbox.click()
+ edit_column.click()
+
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check filtering on failure tasks column
+ self.wait_until_visible('#allbuildstable tbody tr')
+ _check_if_filter_failed_tasks_column_is_visible()
+ failed_tasks_filter = self.find('#failed_tasks_filter')
+ failed_tasks_filter.click()
+ # Check popup is visible
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ self.assertTrue(
+ self.find('#filter-modal-allbuildstable').is_displayed())
+ # Check that we can filter by failure tasks
+ build_without_failure_tasks = self.find(
+ '#failed_tasks_filter\\:without_failed_tasks')
+ build_without_failure_tasks.click()
+ # click on apply button
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ # Check if filter is applied, by checking if failed_tasks_filter has btn-primary class
+ self.assertTrue(self.find('#failed_tasks_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+
+ def test_filtering_on_completedOn_column(self):
+ """ Test the filtering on completed_on column in the builds table on the all builds page """
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check filtering on failure tasks column
+ self.wait_until_visible('#allbuildstable tbody tr')
+ completed_on_filter = self.find('#completed_on_filter')
+ completed_on_filter.click()
+ # Check popup is visible
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ self.assertTrue(
+ self.find('#filter-modal-allbuildstable').is_displayed())
+ # Check that we can filter by failure tasks
+ build_without_failure_tasks = self.find(
+ '#completed_on_filter\\:date_range')
+ build_without_failure_tasks.click()
+ # click on apply button
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ # Check if filter is applied, by checking if completed_on_filter has btn-primary class
+ self.assertTrue(self.find('#completed_on_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+
+ # Filter by date range
+ self.find('#completed_on_filter').click()
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ date_ranges = self.driver.find_elements(
+ By.XPATH, '//input[@class="form-control hasDatepicker"]')
+ today = timezone.now()
+ yestersday = today - timezone.timedelta(days=1)
+ date_ranges[0].send_keys(yestersday.strftime('%Y-%m-%d'))
+ date_ranges[1].send_keys(today.strftime('%Y-%m-%d'))
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ self.assertTrue(self.find('#completed_on_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+ # Check if filter is applied, number of builds displayed should be 6
+ self.assertTrue(len(self.find_all('#allbuildstable tbody tr')) >= 4)
+
+ def test_builds_table_editColumn(self):
+ """ Test the edit column feature in the builds table on the all builds page """
+ self._get_create_builds(success=10, failure=10)
+
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#allbuildstable tbody tr')
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-errors_no')
+ test_edit_column('checkbox-failed_tasks')
+ test_edit_column('checkbox-image_files')
+ test_edit_column('checkbox-project')
+ test_edit_column('checkbox-started_on')
+ test_edit_column('checkbox-time')
+ test_edit_column('checkbox-warnings_no')
+
+ def test_builds_table_show_rows(self):
+ """ Test the show rows feature in the builds table on the all builds page """
+ self._get_create_builds(success=100, failure=100)
+
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#allbuildstable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#allbuildstable tbody tr')) > 0
+ )
+
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#allbuildstable tbody tr')
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-allbuildstable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
diff --git a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
index 15b03400f9..9ed1901cc9 100644
--- a/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
@@ -7,15 +7,20 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
from django.utils import timezone
+from selenium.webdriver.support.select import Select
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, Build
from orm.models import ProjectVariable
+from selenium.webdriver.common.by import By
+
+
class TestAllProjectsPage(SeleniumTestCase):
""" Browser tests for projects page /projects/ """
@@ -25,7 +30,8 @@ class TestAllProjectsPage(SeleniumTestCase):
def setUp(self):
""" Add default project manually """
- project = Project.objects.create_project(self.CLI_BUILDS_PROJECT_NAME, None)
+ project = Project.objects.create_project(
+ self.CLI_BUILDS_PROJECT_NAME, None)
self.default_project = project
self.default_project.is_default = True
self.default_project.save()
@@ -35,6 +41,17 @@ class TestAllProjectsPage(SeleniumTestCase):
self.release = None
+ def _create_projects(self, nb_project=10):
+ projects = []
+ for i in range(1, nb_project + 1):
+ projects.append(
+ Project(
+ name='test project {}'.format(i),
+ release=self.release,
+ )
+ )
+ Project.objects.bulk_create(projects)
+
def _add_build_to_default_project(self):
""" Add a build to the default project (not used in all tests) """
now = timezone.now()
@@ -45,12 +62,14 @@ class TestAllProjectsPage(SeleniumTestCase):
def _add_non_default_project(self):
""" Add another project """
- bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
branch='master', dirpath='')
self.release = Release.objects.create(name='test release',
branch_name='master',
bitbake_version=bbv)
- self.project = Project.objects.create_project(self.PROJECT_NAME, self.release)
+ self.project = Project.objects.create_project(
+ self.PROJECT_NAME, self.release)
self.project.is_default = False
self.project.save()
@@ -62,7 +81,7 @@ class TestAllProjectsPage(SeleniumTestCase):
def _get_row_for_project(self, project_name):
""" Get the HTML row for a project, or None if not found """
- self.wait_until_present('#projectstable tbody tr')
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
rows = self.find_all('#projectstable tbody tr')
# find the row with a project name matching the one supplied
@@ -93,7 +112,8 @@ class TestAllProjectsPage(SeleniumTestCase):
url = reverse('all-projects')
self.get(url)
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
self.assertNotEqual(default_project_row, None,
'default project "cli builds" should be in page')
@@ -113,11 +133,12 @@ class TestAllProjectsPage(SeleniumTestCase):
self.wait_until_visible("#projectstable tr")
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the release text for the default project
selector = 'span[data-project-field="release"] span.text-muted'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, 'Not applicable',
'release should be "not applicable" for default project')
@@ -127,7 +148,7 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link in the release cell for the other project
selector = 'span[data-project-field="release"]'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, self.release.name,
'release name should be shown for non-default project')
@@ -148,11 +169,12 @@ class TestAllProjectsPage(SeleniumTestCase):
self.wait_until_visible("#projectstable tr")
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the machine cell for the default project
selector = 'span[data-project-field="machine"] span.text-muted'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, 'Not applicable',
'machine should be not applicable for default project')
@@ -162,7 +184,7 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link in the machine cell for the other project
selector = 'span[data-project-field="machine"]'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, self.MACHINE_NAME,
'machine name should be shown for non-default project')
@@ -183,13 +205,15 @@ class TestAllProjectsPage(SeleniumTestCase):
self.get(reverse('all-projects'))
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the link on the name field
selector = 'span[data-project-field="name"] a'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
link_url = element.get_attribute('href').strip()
- expected_url = reverse('projectbuilds', args=(self.default_project.id,))
+ expected_url = reverse(
+ 'projectbuilds', args=(self.default_project.id,))
msg = 'link on default project name should point to builds but was %s' % link_url
self.assertTrue(link_url.endswith(expected_url), msg)
@@ -198,8 +222,116 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link for the other project
selector = 'span[data-project-field="name"] a'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
link_url = element.get_attribute('href').strip()
expected_url = reverse('project', args=(self.project.id,))
msg = 'link on project name should point to configuration but was %s' % link_url
self.assertTrue(link_url.endswith(expected_url), msg)
+
+ def test_allProject_table_search_box(self):
+ """ Test the search box in the all project table on the all projects page """
+ self._create_projects()
+
+ url = reverse('all-projects')
+ self.get(url)
+
+ # Chseck search box is present and works
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ search_box = self.find('#search-input-projectstable')
+ self.assertTrue(search_box.is_displayed())
+
+ # Check that we can search for a project by project name
+ search_box.send_keys('test project 10')
+ search_btn = self.find('#search-submit-projectstable')
+ search_btn.click()
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ rows = self.find_all('#projectstable tbody tr')
+ self.assertTrue(len(rows) == 1)
+
+ def test_allProject_table_editColumn(self):
+ """ Test the edit column feature in the projects table on the all projects page """
+ self._create_projects()
+
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ url = reverse('all-projects')
+ self.get(url)
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-errors')
+ test_edit_column('checkbox-image_files')
+ test_edit_column('checkbox-last_build_outcome')
+ test_edit_column('checkbox-recipe_name')
+ test_edit_column('checkbox-warnings')
+
+ def test_allProject_table_show_rows(self):
+ """ Test the show rows feature in the projects table on the all projects page """
+ self._create_projects(nb_project=200)
+
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#projectstable tbody tr')) > 0
+ )
+
+ url = reverse('all-projects')
+ self.get(url)
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-projectstable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
index efcd89b346..d838ce363a 100644
--- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from django.utils import timezone
@@ -15,11 +16,14 @@ from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import Project, Release, BitbakeVersion, Build, LogMessage
from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable
+from selenium.webdriver.common.by import By
+
class TestBuildDashboardPage(SeleniumTestCase):
""" Tests for the build dashboard /build/X """
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath="")
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -158,6 +162,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
"""
url = reverse('builddashboard', args=(build.id,))
self.get(url)
+ self.wait_until_visible('#global-nav', poll=3)
def _get_build_dashboard_errors(self, build):
"""
@@ -183,7 +188,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
found = False
for element in message_elements:
- log_message_text = element.find_element_by_tag_name('pre').text.strip()
+ log_message_text = element.find_element(By.TAG_NAME, 'pre').text.strip()
text_matches = (log_message_text == expected_text)
log_message_pk = element.get_attribute('data-log-message-id')
@@ -213,7 +218,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
the WebElement modal match the list of text values in expected
"""
# labels containing the radio buttons we're testing for
- labels = modal.find_elements_by_css_selector(".radio")
+ labels = modal.find_elements(By.CSS_SELECTOR,".radio")
labels_text = [lab.text for lab in labels]
self.assertEqual(len(labels_text), len(expected))
@@ -248,7 +253,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
selector = '[data-role="edit-custom-image-trigger"]'
self.click(selector)
- modal = self.driver.find_element_by_id('edit-custom-image-modal')
+ modal = self.driver.find_element(By.ID, 'edit-custom-image-modal')
self.wait_until_visible("#edit-custom-image-modal")
# recipes we expect to see in the edit custom image modal
@@ -270,7 +275,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
selector = '[data-role="new-custom-image-trigger"]'
self.click(selector)
- modal = self.driver.find_element_by_id('new-custom-image-modal')
+ modal = self.driver.find_element(By.ID,'new-custom-image-modal')
self.wait_until_visible("#new-custom-image-modal")
# recipes we expect to see in the new custom image modal
diff --git a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
index c6226d60eb..675825bd40 100644
--- a/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
+++ b/bitbake/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from django.utils import timezone
@@ -20,7 +21,8 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
""" Tests for artifacts on the build dashboard /build/X """
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath="")
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -197,12 +199,12 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
# check package count and size, link on target name
selector = '[data-value="target-package-count"]'
element = self.find(selector)
- self.assertEquals(element.text, '1',
+ self.assertEqual(element.text, '1',
'package count should be shown for image builds')
selector = '[data-value="target-package-size"]'
element = self.find(selector)
- self.assertEquals(element.text, '1.0 KB',
+ self.assertEqual(element.text, '1.0 KB',
'package size should be shown for image builds')
selector = '[data-link="target-packages"]'
diff --git a/bitbake/lib/toaster/tests/browser/test_delete_project.py b/bitbake/lib/toaster/tests/browser/test_delete_project.py
new file mode 100644
index 0000000000..1941777ccc
--- /dev/null
+++ b/bitbake/lib/toaster/tests/browser/test_delete_project.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux Inc
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.ui import Select
+from tests.browser.selenium_helpers import SeleniumTestCase
+from orm.models import BitbakeVersion, Project, Release
+from selenium.webdriver.common.by import By
+
+class TestDeleteProject(SeleniumTestCase):
+
+ def setUp(self):
+ bitbake, _ = BitbakeVersion.objects.get_or_create(
+ name="master",
+ giturl="git://master",
+ branch="master",
+ dirpath="master")
+
+ self.release, _ = Release.objects.get_or_create(
+ name="master",
+ description="Yocto Project master",
+ branch_name="master",
+ helptext="latest",
+ bitbake_version=bitbake)
+
+ Release.objects.get_or_create(
+ name="foo",
+ description="Yocto Project foo",
+ branch_name="foo",
+ helptext="latest",
+ bitbake_version=bitbake)
+
+ @pytest.mark.django_db
+ def test_delete_project(self):
+ """ Test delete a project
+ - Check delete modal is visible
+ - Check delete modal has right text
+ - Confirm delete
+ - Check project is deleted
+ """
+ project_name = "project_to_delete"
+ url = reverse('newproject')
+ self.get(url)
+ self.enter_text('#new-project-name', project_name)
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(str(self.release.pk))
+ self.click("#create-project-button")
+ # We should get redirected to the new project's page with the
+ # notification at the top
+ element = self.wait_until_visible('#project-created-notification')
+ self.assertTrue(project_name in element.text,
+ "New project name not in new project notification")
+ self.assertTrue(Project.objects.filter(name=project_name).count(),
+ "New project not found in database")
+
+ # Delete project
+ delete_project_link = self.driver.find_element(
+ By.XPATH, '//a[@href="#delete-project-modal"]')
+ delete_project_link.click()
+
+ # Check delete modal is visible
+ self.wait_until_visible('#delete-project-modal')
+
+ # Check delete modal has right text
+ modal_header_text = self.find('#delete-project-modal .modal-header').text
+ self.assertTrue(
+ "Are you sure you want to delete this project?" in modal_header_text,
+ "Delete project modal header text is wrong")
+
+ modal_body_text = self.find('#delete-project-modal .modal-body').text
+ self.assertTrue(
+ "Cancel its builds currently in progress" in modal_body_text,
+ "Modal body doesn't contain: Cancel its builds currently in progress")
+ self.assertTrue(
+ "Remove its configuration information" in modal_body_text,
+ "Modal body doesn't contain: Remove its configuration information")
+ self.assertTrue(
+ "Remove its imported layers" in modal_body_text,
+ "Modal body doesn't contain: Remove its imported layers")
+ self.assertTrue(
+ "Remove its custom images" in modal_body_text,
+ "Modal body doesn't contain: Remove its custom images")
+ self.assertTrue(
+ "Remove all its build information" in modal_body_text,
+ "Modal body doesn't contain: Remove all its build information")
+
+ # Confirm delete
+ delete_btn = self.find('#delete-project-confirmed')
+ delete_btn.click()
+
+ # Check project is deleted
+ self.wait_until_visible('#change-notification')
+ delete_notification = self.find('#change-notification-msg')
+ self.assertTrue("You have deleted 1 project:" in delete_notification.text)
+ self.assertTrue(project_name in delete_notification.text)
+ self.assertFalse(Project.objects.filter(name=project_name).exists(),
+ "Project not deleted from database")
diff --git a/bitbake/lib/toaster/tests/browser/test_landing_page.py b/bitbake/lib/toaster/tests/browser/test_landing_page.py
index 8bb64b9f3e..8fe5fea467 100644
--- a/bitbake/lib/toaster/tests/browser/test_landing_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_landing_page.py
@@ -10,8 +10,10 @@
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
+from selenium.webdriver.common.by import By
+
+from orm.models import Layer, Layer_Version, Project, Build
-from orm.models import Project, Build
class TestLandingPage(SeleniumTestCase):
""" Tests for redirects on the landing page """
@@ -29,6 +31,130 @@ class TestLandingPage(SeleniumTestCase):
self.project.is_default = True
self.project.save()
+ def test_icon_info_visible_and_clickable(self):
+ """ Test that the information icon is visible and clickable """
+ self.get(reverse('landing'))
+ info_sign = self.find('#toaster-version-info-sign')
+
+ # check that the info sign is visible
+ self.assertTrue(info_sign.is_displayed())
+
+ # check that the info sign is clickable
+ # and info modal is appearing when clicking on the info sign
+ info_sign.click() # click on the info sign make attribute 'aria-describedby' visible
+ info_model_id = info_sign.get_attribute('aria-describedby')
+ info_modal = self.find(f'#{info_model_id}')
+ self.assertTrue(info_modal.is_displayed())
+ self.assertTrue("Toaster version information" in info_modal.text)
+
+ def test_documentation_link_displayed(self):
+ """ Test that the documentation link is displayed """
+ self.get(reverse('landing'))
+ documentation_link = self.find('#navbar-docs > a')
+
+ # check that the documentation link is visible
+ self.assertTrue(documentation_link.is_displayed())
+
+ # check browser open new tab toaster manual when clicking on the documentation link
+ self.assertEqual(documentation_link.get_attribute('target'), '_blank')
+ self.assertEqual(
+ documentation_link.get_attribute('href'),
+ 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual')
+ self.assertTrue("Documentation" in documentation_link.text)
+
+ def test_openembedded_jumbotron_link_visible_and_clickable(self):
+ """ Test OpenEmbedded link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check OpenEmbedded
+ openembedded = jumbotron.find_element(By.LINK_TEXT, 'OpenEmbedded')
+ self.assertTrue(openembedded.is_displayed())
+ openembedded.click()
+ self.assertTrue("openembedded.org" in self.driver.current_url)
+
+ def test_bitbake_jumbotron_link_visible_and_clickable(self):
+ """ Test BitBake link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check BitBake
+ bitbake = jumbotron.find_element(By.LINK_TEXT, 'BitBake')
+ self.assertTrue(bitbake.is_displayed())
+ bitbake.click()
+ self.assertTrue(
+ "docs.yoctoproject.org/bitbake.html" in self.driver.current_url)
+
+ def test_yoctoproject_jumbotron_link_visible_and_clickable(self):
+ """ Test Yocto Project link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Yocto Project
+ yoctoproject = jumbotron.find_element(By.LINK_TEXT, 'Yocto Project')
+ self.assertTrue(yoctoproject.is_displayed())
+ yoctoproject.click()
+ self.assertTrue("yoctoproject.org" in self.driver.current_url)
+
+ def test_link_setup_using_toaster_visible_and_clickable(self):
+ """ Test big magenta button setting up and using toaster link in jumbotron
+ if visible and clickable
+ """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Big magenta button
+ big_magenta_button = jumbotron.find_element(By.LINK_TEXT,
+ 'Toaster is ready to capture your command line builds'
+ )
+ self.assertTrue(big_magenta_button.is_displayed())
+ big_magenta_button.click()
+ self.assertTrue(
+ "docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" in self.driver.current_url)
+
+ def test_link_create_new_project_in_jumbotron_visible_and_clickable(self):
+ """ Test big blue button create new project jumbotron if visible and clickable """
+ # Create a layer and a layer version to make visible the big blue button
+ layer = Layer.objects.create(name='bar')
+ Layer_Version.objects.create(layer=layer)
+
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Big Blue button
+ big_blue_button = jumbotron.find_element(By.LINK_TEXT,
+ 'Create your first Toaster project to run manage builds'
+ )
+ self.assertTrue(big_blue_button.is_displayed())
+ big_blue_button.click()
+ self.assertTrue("toastergui/newproject/" in self.driver.current_url)
+
+ def test_toaster_manual_link_visible_and_clickable(self):
+ """ Test Read the Toaster manual link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Read the Toaster manual
+ toaster_manual = jumbotron.find_element(
+ By.LINK_TEXT, 'Read the Toaster manual')
+ self.assertTrue(toaster_manual.is_displayed())
+ toaster_manual.click()
+ self.assertTrue(
+ "https://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual" in self.driver.current_url)
+
+ def test_contrib_to_toaster_link_visible_and_clickable(self):
+ """ Test Contribute to Toaster link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Contribute to Toaster
+ contribute_to_toaster = jumbotron.find_element(
+ By.LINK_TEXT, 'Contribute to Toaster')
+ self.assertTrue(contribute_to_toaster.is_displayed())
+ contribute_to_toaster.click()
+ self.assertTrue(
+ "wiki.yoctoproject.org/wiki/contribute_to_toaster" in str(self.driver.current_url).lower())
+
def test_only_default_project(self):
"""
No projects except default
@@ -87,10 +213,9 @@ class TestLandingPage(SeleniumTestCase):
self.get(reverse('landing'))
+ self.wait_until_visible("#latest-builds", poll=3)
elements = self.find_all('#allbuildstable')
self.assertEqual(len(elements), 1, 'should redirect to builds')
content = self.get_page_source()
self.assertTrue(self.PROJECT_NAME in content,
'should show builds for project %s' % self.PROJECT_NAME)
- self.assertFalse(self.CLI_BUILDS_PROJECT_NAME in content,
- 'should not show builds for cli project')
diff --git a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
index 71bdd2aafd..5c29548b78 100644
--- a/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
@@ -8,6 +8,7 @@
#
from django.urls import reverse
+from selenium.common.exceptions import ElementClickInterceptedException, TimeoutException
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import Layer, Layer_Version, Project, LayerSource, Release
@@ -63,11 +64,12 @@ class TestLayerDetailsPage(SeleniumTestCase):
args=(self.project.pk,
self.imported_layer_version.pk))
- def test_edit_layerdetails(self):
+ def _edit_layerdetails(self):
""" Edit all the editable fields for the layer refresh the page and
check that the new values exist"""
self.get(self.url)
+ self.wait_until_visible("#add-remove-layer-btn")
self.click("#add-remove-layer-btn")
self.click("#edit-layer-source")
@@ -97,13 +99,26 @@ class TestLayerDetailsPage(SeleniumTestCase):
"Expecting any of \"%s\"but got \"%s\"" %
(self.initial_values, value))
+ # Make sure the input visible beofre sending keys
+ self.wait_until_visible("#layer-git input[type=text]")
inputs.send_keys("-edited")
# Save the new values
for save_btn in self.find_all(".change-btn"):
save_btn.click()
- self.click("#save-changes-for-switch")
+ try:
+ self.wait_until_visible("#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch = self.wait_until_clickable(
+ "#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "save-changes-for-switch click intercepted. Element not visible or maybe covered by another element.")
+ except TimeoutException:
+ self.skipTest(
+ "save-changes-for-switch is not clickable within the specified timeout.")
+
self.wait_until_visible("#edit-layer-source")
# Refresh the page to see if the new values are returned
@@ -132,7 +147,18 @@ class TestLayerDetailsPage(SeleniumTestCase):
new_dir = "/home/test/my-meta-dir"
dir_input.send_keys(new_dir)
- self.click("#save-changes-for-switch")
+ try:
+ self.wait_until_visible("#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch = self.wait_until_clickable(
+ "#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "save-changes-for-switch click intercepted. Element not properly visible or maybe behind another element.")
+ except TimeoutException:
+ self.skipTest(
+ "save-changes-for-switch is not clickable within the specified timeout.")
+
self.wait_until_visible("#edit-layer-source")
# Refresh the page to see if the new values are returned
@@ -142,6 +168,13 @@ class TestLayerDetailsPage(SeleniumTestCase):
"Expected %s in the dir value for layer directory" %
new_dir)
+ def test_edit_layerdetails_page(self):
+ try:
+ self._edit_layerdetails()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "ElementClickInterceptedException occured. Element not visible or maybe covered by another element.")
+
def test_delete_layer(self):
""" Delete the layer """
diff --git a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
index 7844aaa395..d7a4c34532 100644
--- a/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
+++ b/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
@@ -6,7 +6,6 @@
#
# Copyright (C) 2013-2016 Intel Corporation
#
-
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
@@ -14,6 +13,8 @@ from tests.browser.selenium_helpers_base import Wait
from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version
from bldcontrol.models import BuildRequest
+from selenium.webdriver.common.by import By
+
class TestMostRecentBuildsStates(SeleniumTestCase):
""" Test states update correctly in most recent builds area """
@@ -45,13 +46,14 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
# build queued; check shown as queued
selector = base_selector + '[data-build-state="Queued"]'
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Build queued', 'build should show queued status')
# waiting for recipes to be parsed
build.outcome = Build.IN_PROGRESS
build.recipes_to_parse = recipes_to_parse
build.recipes_parsed = 0
+ build.save()
build_request.state = BuildRequest.REQ_INPROGRESS
build_request.save()
@@ -62,7 +64,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = self.wait_until_visible(selector)
bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
self.assertEqual(bar_element.value_of_css_property('width'), '0px',
'recipe parse progress should be at 0')
@@ -73,7 +75,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
self.get(url)
element = self.wait_until_visible(selector)
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
recipe_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 50%;'
msg = 'recipe parse progress bar should update to 50%'
@@ -94,11 +96,11 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = base_selector + '[data-build-state="Starting"]'
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Tasks starting', 'build should show "tasks starting" status')
# first task finished; check tasks progress bar
- task1.order = 1
+ task1.outcome = Task.OUTCOME_SUCCESS
task1.save()
self.get(url)
@@ -107,7 +109,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = self.wait_until_visible(selector)
bar_selector = '#build-pc-done-bar-%s' % build.id
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
task_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 50%;'
@@ -115,13 +117,13 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = Wait(self.driver).until(task_bar_updated, msg)
# last task finished; check tasks progress bar updates
- task2.order = 2
+ task2.outcome = Task.OUTCOME_SUCCESS
task2.save()
self.get(url)
element = self.wait_until_visible(selector)
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
task_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 100%;'
msg = 'tasks progress bar should update to 100%'
@@ -183,7 +185,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = '[data-latest-build-result="%s"] ' \
'[data-build-state="Cancelling"]' % build.id
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Cancelling the build', 'build should show "cancelling" status')
# check cancelled state
@@ -195,5 +197,5 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = '[data-latest-build-result="%s"] ' \
'[data-build-state="Cancelled"]' % build.id
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Build cancelled', 'build should show "cancelled" status')
diff --git a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
index 9906ae42a9..9f0b6397fe 100644
--- a/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
@@ -6,6 +6,7 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
+from bldcontrol.models import BuildEnvironment
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
@@ -18,6 +19,9 @@ class TestNewCustomImagePage(SeleniumTestCase):
CUSTOM_IMAGE_NAME = 'roopa-doopa'
def setUp(self):
+ BuildEnvironment.objects.get_or_create(
+ betype=BuildEnvironment.TYPE_LOCAL,
+ )
release = Release.objects.create(
name='baz',
bitbake_version=BitbakeVersion.objects.create(name='v1')
@@ -41,11 +45,16 @@ class TestNewCustomImagePage(SeleniumTestCase):
)
# add a fake image recipe to the layer that can be customised
+ builldir = os.environ.get('BUILDDIR', './')
self.recipe = Recipe.objects.create(
name='core-image-minimal',
layer_version=layer_version,
+ file_path=f'{builldir}/core-image-minimal.bb',
is_image=True
)
+ # create a tmp file for the recipe
+ with open(self.recipe.file_path, 'w') as f:
+ f.write('foo')
# another project with a custom image already in it
project2 = Project.objects.create(name='whoop', release=release)
@@ -81,6 +90,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
"""
url = reverse('newcustomimage', args=(self.project.id,))
self.get(url)
+ self.wait_until_visible('#global-nav', poll=3)
self.click('button[data-recipe="%s"]' % self.recipe.id)
@@ -128,7 +138,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
"""
self._create_custom_image(self.recipe.name)
element = self.wait_until_visible('#invalid-name-help')
- self.assertRegexpMatches(element.text.strip(),
+ self.assertRegex(element.text.strip(),
'image with this name already exists')
def test_new_duplicates_project_image(self):
@@ -146,4 +156,4 @@ class TestNewCustomImagePage(SeleniumTestCase):
self._create_custom_image(custom_image_name)
element = self.wait_until_visible('#invalid-name-help')
expected = 'An image with this name already exists in this project'
- self.assertRegexpMatches(element.text.strip(), expected)
+ self.assertRegex(element.text.strip(), expected)
diff --git a/bitbake/lib/toaster/tests/browser/test_new_project_page.py b/bitbake/lib/toaster/tests/browser/test_new_project_page.py
index e20a1f686e..458bb6538d 100644
--- a/bitbake/lib/toaster/tests/browser/test_new_project_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_new_project_page.py
@@ -6,11 +6,11 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
-
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import InvalidElementStateException
+from selenium.webdriver.common.by import By
from orm.models import Project, Release, BitbakeVersion
@@ -47,7 +47,7 @@ class TestNewProjectPage(SeleniumTestCase):
url = reverse('newproject')
self.get(url)
-
+ self.wait_until_visible('#new-project-name', poll=3)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
@@ -57,7 +57,8 @@ class TestNewProjectPage(SeleniumTestCase):
# We should get redirected to the new project's page with the
# notification at the top
- element = self.wait_until_visible('#project-created-notification')
+ element = self.wait_until_visible(
+ '#project-created-notification', poll=3)
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
@@ -78,13 +79,20 @@ class TestNewProjectPage(SeleniumTestCase):
url = reverse('newproject')
self.get(url)
+ self.wait_until_visible('#new-project-name', poll=3)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
select.select_by_value(str(self.release.pk))
- element = self.wait_until_visible('#hint-error-project-name')
+ radio = self.driver.find_element(By.ID, 'type-new')
+ radio.click()
+
+ self.click("#create-project-button")
+
+ self.wait_until_present('#hint-error-project-name', poll=3)
+ element = self.find('#hint-error-project-name')
self.assertTrue(("Project names must be unique" in element.text),
"Did not find unique project name error message")
diff --git a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
index 51717e72d4..0dba33b9c8 100644
--- a/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_project_builds_page.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
@@ -22,7 +23,8 @@ class TestProjectBuildsPage(SeleniumTestCase):
CLI_BUILDS_PROJECT_NAME = 'command line builds'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
diff --git a/bitbake/lib/toaster/tests/browser/test_project_config_page.py b/bitbake/lib/toaster/tests/browser/test_project_config_page.py
index 944bcb2631..b9de541efa 100644
--- a/bitbake/lib/toaster/tests/browser/test_project_config_page.py
+++ b/bitbake/lib/toaster/tests/browser/test_project_config_page.py
@@ -7,10 +7,12 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, ProjectVariable
+from selenium.webdriver.common.by import By
class TestProjectConfigsPage(SeleniumTestCase):
""" Test data at /project/X/builds is displayed correctly """
@@ -21,7 +23,8 @@ class TestProjectConfigsPage(SeleniumTestCase):
'any of these characters'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -66,7 +69,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.enter_text('#new-imagefs_types', imagefs_type)
- checkboxes = self.driver.find_elements_by_xpath("//input[@class='fs-checkbox-fstypes']")
+ checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
for checkbox in checkboxes:
if checkbox.get_attribute("value") == "btrfs":
@@ -95,7 +98,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
for checkbox in checkboxes:
if checkbox.get_attribute("value") == "cpio":
checkbox.click()
- element = self.driver.find_element_by_id('new-imagefs_types')
+ element = self.driver.find_element(By.ID, 'new-imagefs_types')
self.wait_until_visible('#new-imagefs_types')
@@ -129,7 +132,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
# downloads dir path has a space
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID, 'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '/foo/bar a')
element = self.wait_until_visible('#hintError-dl_dir')
@@ -137,7 +140,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# downloads dir path starts with ${...} but has a space
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
element = self.wait_until_visible('#hintError-dl_dir')
@@ -145,18 +148,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# downloads dir path starts with /
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '/bar/foo')
- hidden_element = self.driver.find_element_by_id('hintError-dl_dir')
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'downloads directory path valid but treated as invalid')
# downloads dir path starts with ${...}
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '${TOPDIR}/down')
- hidden_element = self.driver.find_element_by_id('hintError-dl_dir')
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'downloads directory path valid but treated as invalid')
@@ -184,7 +187,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
# path has a space
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '/foo/bar a')
element = self.wait_until_visible('#hintError-sstate_dir')
@@ -192,7 +195,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# path starts with ${...} but has a space
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
element = self.wait_until_visible('#hintError-sstate_dir')
@@ -200,18 +203,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# path starts with /
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '/bar/foo')
- hidden_element = self.driver.find_element_by_id('hintError-sstate_dir')
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'sstate directory path valid but treated as invalid')
# paths starts with ${...}
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
- hidden_element = self.driver.find_element_by_id('hintError-sstate_dir')
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'sstate directory path valid but treated as invalid')
diff --git a/bitbake/lib/toaster/tests/browser/test_sample.py b/bitbake/lib/toaster/tests/browser/test_sample.py
index b0067c21cd..f04f1d9a16 100644
--- a/bitbake/lib/toaster/tests/browser/test_sample.py
+++ b/bitbake/lib/toaster/tests/browser/test_sample.py
@@ -27,3 +27,13 @@ class TestSample(SeleniumTestCase):
self.get(url)
brand_link = self.find('.toaster-navbar-brand a.brand')
self.assertEqual(brand_link.text.strip(), 'Toaster')
+
+ def test_no_builds_message(self):
+ """ Test that a message is shown when there are no builds """
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#empty-state-allbuildstable') # wait for the empty state div to appear
+ div_msg = self.find('#empty-state-allbuildstable .alert-info')
+
+ msg = 'Sorry - no data found'
+ self.assertEqual(div_msg.text, msg)
diff --git a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
index e82d5ec654..691aca1ef0 100644
--- a/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
+++ b/bitbake/lib/toaster/tests/browser/test_toastertable_ui.py
@@ -8,11 +8,13 @@
#
from datetime import datetime
+import os
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, Build
+from selenium.webdriver.common.by import By
class TestToasterTableUI(SeleniumTestCase):
"""
@@ -33,7 +35,7 @@ class TestToasterTableUI(SeleniumTestCase):
table: WebElement for a ToasterTable
"""
selector = 'thead a.sorted'
- heading = table.find_element_by_css_selector(selector)
+ heading = table.find_element(By.CSS_SELECTOR, selector)
return heading.get_attribute('innerHTML').strip()
def _get_datetime_from_cell(self, row, selector):
@@ -45,7 +47,7 @@ class TestToasterTableUI(SeleniumTestCase):
selector: CSS selector to use to find the cell containing the date time
string
"""
- cell = row.find_element_by_css_selector(selector)
+ cell = row.find_element(By.CSS_SELECTOR, selector)
cell_text = cell.get_attribute('innerHTML').strip()
return datetime.strptime(cell_text, '%d/%m/%y %H:%M')
@@ -58,7 +60,8 @@ class TestToasterTableUI(SeleniumTestCase):
later = now + timezone.timedelta(hours=1)
even_later = later + timezone.timedelta(hours=1)
- bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='test release',
branch_name='master',
@@ -105,7 +108,7 @@ class TestToasterTableUI(SeleniumTestCase):
self.click('#checkbox-started_on')
# sort by started_on column
- links = table.find_elements_by_css_selector('th.started_on a')
+ links = table.find_elements(By.CSS_SELECTOR, 'th.started_on a')
for link in links:
if link.get_attribute('innerHTML').strip() == 'Started on':
link.click()
diff --git a/bitbake/lib/toaster/tests/builds/buildtest.py b/bitbake/lib/toaster/tests/builds/buildtest.py
index 13b51fb0d8..cacfccd4d3 100644
--- a/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -88,7 +88,7 @@ def load_build_environment():
class BuildTest(unittest.TestCase):
PROJECT_NAME = "Testbuild"
- BUILDDIR = "/tmp/build/"
+ BUILDDIR = os.environ.get("BUILDDIR")
def build(self, target):
# So that the buildinfo helper uses the test database'
@@ -116,10 +116,19 @@ class BuildTest(unittest.TestCase):
project = Project.objects.create_project(name=BuildTest.PROJECT_NAME,
release=release)
+ passthrough_variable_names = ["SSTATE_DIR", "DL_DIR", "SSTATE_MIRRORS", "BB_HASHSERVE", "BB_HASHSERVE_UPSTREAM"]
+ for variable_name in passthrough_variable_names:
+ current_variable = os.environ.get(variable_name)
+ if current_variable:
+ ProjectVariable.objects.get_or_create(
+ name=variable_name,
+ value=current_variable,
+ project=project)
+
if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
ProjectVariable.objects.get_or_create(
name="SSTATE_MIRRORS",
- value="file://.* http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
+ value="file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH",
project=project)
ProjectTarget.objects.create(project=project,
diff --git a/bitbake/lib/toaster/tests/builds/test_core_image_min.py b/bitbake/lib/toaster/tests/builds/test_core_image_min.py
index 44b6cbec7b..c5bfdbfbb5 100644
--- a/bitbake/lib/toaster/tests/builds/test_core_image_min.py
+++ b/bitbake/lib/toaster/tests/builds/test_core_image_min.py
@@ -10,6 +10,7 @@
# Ionut Chisanovici, Paul Eggleton and Cristian Iorga
import os
+import pytest
from django.db.models import Q
@@ -20,12 +21,13 @@ from orm.models import CustomImagePackage
from tests.builds.buildtest import BuildTest
-
+@pytest.mark.order(4)
+@pytest.mark.django_db(True)
class BuildCoreImageMinimal(BuildTest):
"""Build core-image-minimal and test the results"""
def setUp(self):
- self.completed_build = self.build("core-image-minimal")
+ self.completed_build = self.target_already_built("core-image-minimal")
# Check if build name is unique - tc_id=795
def test_Build_Unique_Name(self):
@@ -44,17 +46,6 @@ class BuildCoreImageMinimal(BuildTest):
total_builds,
msg='Build cooker log path is not unique')
- # Check if task order is unique for one build - tc=824
- def test_Task_Unique_Order(self):
- total_task_order = Task.objects.filter(
- build=self.built).values('order').count()
- distinct_task_order = Task.objects.filter(
- build=self.completed_build).values('order').distinct().count()
-
- self.assertEqual(total_task_order,
- distinct_task_order,
- msg='Errors task order is not unique')
-
# Check task order sequence for one build - tc=825
def test_Task_Order_Sequence(self):
cnt_err = []
@@ -98,7 +89,6 @@ class BuildCoreImageMinimal(BuildTest):
'task_name',
'sstate_result')
cnt_err = []
-
for task in tasks:
if (task['sstate_result'] != Task.SSTATE_NA and
task['sstate_result'] != Task.SSTATE_MISS):
@@ -221,6 +211,7 @@ class BuildCoreImageMinimal(BuildTest):
# orm_build.outcome=0 then if the file exists and its size matches
# the file_size value. Need to add the tc in the test run
def test_Target_File_Name_Populated(self):
+ cnt_err = []
builds = Build.objects.filter(outcome=0).values('id')
for build in builds:
targets = Target.objects.filter(
@@ -230,7 +221,6 @@ class BuildCoreImageMinimal(BuildTest):
target_id=target['id']).values('id',
'file_name',
'file_size')
- cnt_err = []
for file_info in target_files:
target_id = file_info['id']
target_file_name = file_info['file_name']
diff --git a/bitbake/lib/toaster/tests/commands/test_loaddata.py b/bitbake/lib/toaster/tests/commands/test_loaddata.py
index 9e8d5553cf..7d04f030ee 100644
--- a/bitbake/lib/toaster/tests/commands/test_loaddata.py
+++ b/bitbake/lib/toaster/tests/commands/test_loaddata.py
@@ -6,13 +6,13 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
-
+import pytest
from django.test import TestCase
from django.core import management
from orm.models import Layer_Version, Layer, Release, ToasterSetting
-
+@pytest.mark.order(2)
class TestLoadDataFixtures(TestCase):
""" Test loading our 3 provided fixtures """
def test_run_loaddata_poky_command(self):
diff --git a/bitbake/lib/toaster/tests/commands/test_lsupdates.py b/bitbake/lib/toaster/tests/commands/test_lsupdates.py
index 3c4fbe0550..30c6eeb4ac 100644
--- a/bitbake/lib/toaster/tests/commands/test_lsupdates.py
+++ b/bitbake/lib/toaster/tests/commands/test_lsupdates.py
@@ -7,12 +7,13 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import pytest
from django.test import TestCase
from django.core import management
from orm.models import Layer_Version, Machine, Recipe
-
+@pytest.mark.order(3)
class TestLayerIndexUpdater(TestCase):
def test_run_lsupdates_command(self):
# Load some release information for us to fetch from the layer index
diff --git a/bitbake/lib/toaster/tests/commands/test_runbuilds.py b/bitbake/lib/toaster/tests/commands/test_runbuilds.py
index e223b95fcb..849c227edc 100644
--- a/bitbake/lib/toaster/tests/commands/test_runbuilds.py
+++ b/bitbake/lib/toaster/tests/commands/test_runbuilds.py
@@ -19,12 +19,14 @@ import time
import subprocess
import signal
+import logging
+
class KillRunbuilds(threading.Thread):
""" Kill the runbuilds process after an amount of time """
def __init__(self, *args, **kwargs):
super(KillRunbuilds, self).__init__(*args, **kwargs)
- self.setDaemon(True)
+ self.daemon = True
def run(self):
time.sleep(5)
@@ -34,9 +36,12 @@ class KillRunbuilds(threading.Thread):
pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."),
".runbuilds.pid")
- with open(pidfile_path) as pidfile:
- pid = pidfile.read()
- os.kill(int(pid), signal.SIGTERM)
+ try:
+ with open(pidfile_path) as pidfile:
+ pid = pidfile.read()
+ os.kill(int(pid), signal.SIGTERM)
+ except ProcessLookupError:
+ logging.warning("Runbuilds not running or already killed")
class TestCommands(TestCase):
diff --git a/bitbake/lib/toaster/tests/db/test_db.py b/bitbake/lib/toaster/tests/db/test_db.py
index 0410422276..072ab94363 100644
--- a/bitbake/lib/toaster/tests/db/test_db.py
+++ b/bitbake/lib/toaster/tests/db/test_db.py
@@ -23,6 +23,7 @@
# SOFTWARE.
import sys
+import pytest
try:
from StringIO import StringIO
@@ -47,7 +48,7 @@ def capture(command, *args, **kwargs):
def makemigrations():
management.call_command('makemigrations')
-
+@pytest.mark.order(1)
class MigrationTest(TestCase):
def testPendingMigration(self):
diff --git a/bitbake/lib/toaster/tests/functional/functional_helpers.py b/bitbake/lib/toaster/tests/functional/functional_helpers.py
index 5c4ea71794..7c20437d14 100644
--- a/bitbake/lib/toaster/tests/functional/functional_helpers.py
+++ b/bitbake/lib/toaster/tests/functional/functional_helpers.py
@@ -11,35 +11,55 @@ import os
import logging
import subprocess
import signal
-import time
import re
from tests.browser.selenium_helpers_base import SeleniumTestCaseBase
-from tests.builds.buildtest import load_build_environment
+from selenium.webdriver.common.by import By
+from selenium.common.exceptions import NoSuchElementException
logger = logging.getLogger("toaster")
+toaster_processes = []
class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
- wait_toaster_time = 5
+ wait_toaster_time = 10
@classmethod
def setUpClass(cls):
# So that the buildinfo helper uses the test database'
if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \
'toastermain.settings_test':
- raise RuntimeError("Please initialise django with the tests settings: " \
+ raise RuntimeError("Please initialise django with the tests settings: "
"DJANGO_SETTINGS_MODULE='toastermain.settings_test'")
- load_build_environment()
+ # Wait for any known toaster processes to exit
+ global toaster_processes
+ for toaster_process in toaster_processes:
+ try:
+ os.waitpid(toaster_process, os.WNOHANG)
+ except ChildProcessError:
+ pass
# start toaster
cmd = "bash -c 'source toaster start'"
- p = subprocess.Popen(
+ start_process = subprocess.Popen(
cmd,
cwd=os.environ.get("BUILDDIR"),
shell=True)
- if p.wait() != 0:
- raise RuntimeError("Can't initialize toaster")
+ toaster_processes = [start_process.pid]
+ if start_process.wait() != 0:
+ port_use = os.popen("lsof -i -P -n | grep '8000 (LISTEN)'").read().strip()
+ message = ''
+ if port_use:
+ process_id = port_use.split()[1]
+ process = os.popen(f"ps -o cmd= -p {process_id}").read().strip()
+ message = f"Port 8000 occupied by {process}"
+ raise RuntimeError(f"Can't initialize toaster. {message}")
+
+ builddir = os.environ.get("BUILDDIR")
+ with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f:
+ toaster_processes.append(int(f.read()))
+ with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f:
+ toaster_processes.append(int(f.read()))
super(SeleniumFunctionalTestCase, cls).setUpClass()
cls.live_server_url = 'http://localhost:8000/'
@@ -48,22 +68,30 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
def tearDownClass(cls):
super(SeleniumFunctionalTestCase, cls).tearDownClass()
- # XXX: source toaster stop gets blocked, to review why?
- # from now send SIGTERM by hand
- time.sleep(cls.wait_toaster_time)
- builddir = os.environ.get("BUILDDIR")
+ global toaster_processes
- with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f:
- toastermain_pid = int(f.read())
- os.kill(toastermain_pid, signal.SIGTERM)
- with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f:
- runbuilds_pid = int(f.read())
- os.kill(runbuilds_pid, signal.SIGTERM)
+ cmd = "bash -c 'source toaster stop'"
+ stop_process = subprocess.Popen(
+ cmd,
+ cwd=os.environ.get("BUILDDIR"),
+ shell=True)
+ # Toaster stop has been known to hang in these tests so force kill if it stalls
+ try:
+ if stop_process.wait(cls.wait_toaster_time) != 0:
+ raise Exception('Toaster stop process failed')
+ except Exception as e:
+ if e is subprocess.TimeoutExpired:
+ print('Toaster stop process took too long. Force killing toaster...')
+ else:
+ print('Toaster stop process failed. Force killing toaster...')
+ stop_process.kill()
+ for toaster_process in toaster_processes:
+ os.kill(toaster_process, signal.SIGTERM)
def get_URL(self):
rc=self.get_page_source()
- project_url=re.search("(projectPageUrl\s:\s\")(.*)(\",)",rc)
+ project_url=re.search(r"(projectPageUrl\s:\s\")(.*)(\",)",rc)
return project_url.group(2)
@@ -74,8 +102,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
"""
try:
table_element = self.get_table_element(table_id)
- element = table_element.find_element_by_link_text(link_text)
- except self.NoSuchElementException:
+ element = table_element.find_element(By.LINK_TEXT, link_text)
+ except NoSuchElementException:
print('no element found')
raise
return element
@@ -85,8 +113,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
#return whole-table element
element_xpath = "//*[@id='" + table_id + "']"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
raise
return element
row = coordinate[0]
@@ -95,8 +123,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
#return whole-row element
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
return False
return element
#now we are looking for an element with specified X and Y
@@ -104,7 +132,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
return False
return element
diff --git a/bitbake/lib/toaster/tests/functional/test_create_new_project.py b/bitbake/lib/toaster/tests/functional/test_create_new_project.py
new file mode 100644
index 0000000000..94d90459e1
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_create_new_project.py
@@ -0,0 +1,179 @@
+#! /usr/bin/env python3
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.select import Select
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from orm.models import Project
+from selenium.webdriver.common.by import By
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestCreateNewProject(SeleniumFunctionalTestCase):
+
+ def _create_test_new_project(
+ self,
+ project_name,
+ release,
+ release_title,
+ merge_toaster_settings,
+ ):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=3)
+ self.driver.find_element(By.ID,
+ "new-project-name").send_keys(project_name)
+
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(release)
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if merge_toaster_settings:
+ if not checkbox.is_selected():
+ checkbox.click()
+ else:
+ if checkbox.is_selected():
+ checkbox.click()
+
+ self.driver.find_element(By.ID, "create-project-button").click()
+
+ element = self.wait_until_visible('#project-created-notification', poll=3)
+ self.assertTrue(
+ self.element_exists('#project-created-notification'),
+ f"Project:{project_name} creation notification not shown"
+ )
+ self.assertTrue(
+ project_name in element.text,
+ f"New project name:{project_name} not in new project notification"
+ )
+ self.assertTrue(
+ Project.objects.filter(name=project_name).count(),
+ f"New project:{project_name} not found in database"
+ )
+
+ # check release
+ self.assertTrue(re.search(
+ release_title,
+ self.driver.find_element(By.XPATH,
+ "//span[@id='project-release-title']"
+ ).text),
+ 'The project release is not defined')
+
+ def test_create_new_project_master(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project master (option value: 3)
+ - Merge Toaster settings: False
+ """
+ release = '3'
+ release_title = 'Yocto Project master'
+ project_name = 'projectmaster'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ False,
+ )
+
+ def test_create_new_project_kirkstone(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project 4.0 "Kirkstone" (option value: 1)
+ - Merge Toaster settings: True
+ """
+ release = '1'
+ release_title = 'Yocto Project 4.0 "Kirkstone"'
+ project_name = 'projectkirkstone'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ True,
+ )
+
+ def test_create_new_project_dunfell(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project 3.1 "Dunfell" (option value: 5)
+ - Merge Toaster settings: False
+ """
+ release = '5'
+ release_title = 'Yocto Project 3.1 "Dunfell"'
+ project_name = 'projectdunfell'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ False,
+ )
+
+ def test_create_new_project_local(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Local Yocto Project (option value: 2)
+ - Merge Toaster settings: True
+ """
+ release = '2'
+ release_title = 'Local Yocto Project'
+ project_name = 'projectlocal'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ True,
+ )
+
+ def test_create_new_project_without_name(self):
+ """ Test create new project without project name """
+ self.get(reverse('newproject'))
+
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(str(3))
+
+ # Check input name has required attribute
+ input_name = self.driver.find_element(By.ID, "new-project-name")
+ self.assertIsNotNone(input_name.get_attribute('required'),
+ 'Input name has not required attribute')
+
+ # Check create button is disabled
+ create_btn = self.driver.find_element(By.ID, "create-project-button")
+ self.assertIsNotNone(create_btn.get_attribute('disabled'),
+ 'Create button is not disabled')
+
+ def test_import_new_project(self):
+ """ Test import new project using:
+ - Project Name: Any string
+ - Project type: select (Import command line project)
+ - Import existing project directory: Wrong Path
+ """
+ project_name = 'projectimport'
+ self.get(reverse('newproject'))
+ self.driver.find_element(By.ID,
+ "new-project-name").send_keys(project_name)
+ # select import project
+ self.find('#type-import').click()
+
+ # set wrong path
+ wrong_path = '/wrongpath'
+ self.driver.find_element(By.ID,
+ "import-project-dir").send_keys(wrong_path)
+ self.driver.find_element(By.ID, "create-project-button").click()
+
+ # check error message
+ self.assertTrue(self.element_exists('.alert-danger'),
+ 'Allert message not shown')
+ self.assertTrue(wrong_path in self.find('.alert-danger').text,
+ "Wrong path not in alert message")
diff --git a/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/bitbake/lib/toaster/tests/functional/test_functional_basic.py
index 5683e3873e..e4070fbb88 100644
--- a/bitbake/lib/toaster/tests/functional/test_functional_basic.py
+++ b/bitbake/lib/toaster/tests/functional/test_functional_basic.py
@@ -8,104 +8,129 @@
#
import re
+from django.urls import reverse
+import pytest
from tests.functional.functional_helpers import SeleniumFunctionalTestCase
from orm.models import Project
+from selenium.webdriver.common.by import By
+from tests.functional.utils import get_projectId_from_url
+
+
+@pytest.mark.django_db
+@pytest.mark.order("second_to_last")
class FuntionalTestBasic(SeleniumFunctionalTestCase):
+ """Basic functional tests for Toaster"""
+ project_id = None
+
+ def setUp(self):
+ super(FuntionalTestBasic, self).setUp()
+ if not FuntionalTestBasic.project_id:
+ self._create_slenium_project()
+ current_url = self.driver.current_url
+ FuntionalTestBasic.project_id = get_projectId_from_url(current_url)
# testcase (1514)
- def test_create_slenium_project(self):
+ def _create_slenium_project(self):
project_name = 'selenium-project'
- self.get('')
- self.driver.find_element_by_link_text("To start building, create your first Toaster project").click()
- self.driver.find_element_by_id("new-project-name").send_keys(project_name)
- self.driver.find_element_by_id('projectversion').click()
- self.driver.find_element_by_id("create-project-button").click()
- element = self.wait_until_visible('#project-created-notification')
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=3)
+ self.driver.find_element(By.ID, "new-project-name").send_keys(project_name)
+ self.driver.find_element(By.ID, 'projectversion').click()
+ self.driver.find_element(By.ID, "create-project-button").click()
+ element = self.wait_until_visible('#project-created-notification', poll=10)
self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown')
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
self.assertTrue(Project.objects.filter(name=project_name).count(),
"New project not found in database")
+ return Project.objects.last().id
# testcase (1515)
def test_verify_left_bar_menu(self):
- self.get('')
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist')
project_URL=self.get_URL()
- self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click()
+ self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
- self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly')
except:
self.fail(msg='No Custom images tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
- self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
except:
self.fail(msg='No Compatible image tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
- self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
except:
self.fail(msg='No Compatible software recipe tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
- self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
except:
self.fail(msg='No Compatible machines tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
- self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
except:
self.fail(msg='No Compatible layers tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
- self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
except:
self.fail(msg='No Bitbake variables tab available')
# testcase (1516)
def test_review_configuration_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
-
+ self.wait_until_present('#config-nav', poll=10)
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
- self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned')
- self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click()
- self.wait_until_visible('#select-machine-form')
- self.wait_until_visible('#cancel-machine-change')
- self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
+ self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned')
+ self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click()
+ self.wait_until_visible('#select-machine-form', poll=10)
+ self.wait_until_visible('#cancel-machine-change', poll=10)
+ self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
try:
- self.driver.find_element_by_id('no-most-built')
+ self.driver.find_element(By.ID, 'no-most-built')
except:
self.fail(msg='No Most built information in project detail page')
try:
- self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined')
+ self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
try:
- self.driver.find_element_by_xpath("//div[@id='layer-container']")
- self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
- layer_list = self.driver.find_element_by_id("layers-in-project-list")
- layers = layer_list.find_elements_by_tag_name("li")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
+ self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
+ layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
+ layers = layer_list.find_elements(By.TAG_NAME, "li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
print ("openembedded-core layer is a default layer in the project configuration")
@@ -120,61 +145,60 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase):
# testcase (1517)
def test_verify_machine_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
- self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned')
- self.driver.find_element_by_id("change-machine-toggle").click()
- self.wait_until_visible('#select-machine-form')
- self.wait_until_visible('#cancel-machine-change')
- self.driver.find_element_by_id("cancel-machine-change").click()
+ self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned')
+ self.driver.find_element(By.ID, "change-machine-toggle").click()
+ self.wait_until_visible('#select-machine-form', poll=10)
+ self.wait_until_visible('#cancel-machine-change', poll=10)
+ self.driver.find_element(By.ID, "cancel-machine-change").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
# testcase (1518)
def test_verify_most_built_recipes_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
try:
- self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present')
- self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
- self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
+ self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present')
+ self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
except:
self.fail(msg='No Most built information in project detail page')
# testcase (1519)
def test_verify_project_release_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
- self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined')
+ self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
# testcase (1520)
def test_verify_layer_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
try:
- self.driver.find_element_by_xpath("//div[@id='layer-container']")
- self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
- layer_list = self.driver.find_element_by_id("layers-in-project-list")
- layers = layer_list.find_elements_by_tag_name("li")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
+ self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
+ layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
+ layers = layer_list.find_elements(By.TAG_NAME, "li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
@@ -186,43 +210,46 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase):
else:
self.fail(msg='default layers are missing from the project configuration')
- self.driver.find_element_by_xpath("//input[@id='layer-add-input']")
- self.driver.find_element_by_xpath("//button[@id='add-layer-btn']")
- self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
- self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
+ self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']")
+ self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
except:
self.fail(msg='No Layer information in project detail page')
# testcase (1521)
def test_verify_project_detail_links(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
- self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
- self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
- self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']")
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']")
except:
self.fail(msg='Builds tab information is not present')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
- self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
- self.driver.find_element_by_xpath("//fieldset[@id='repo-select']")
- self.driver.find_element_by_xpath("//fieldset[@id='git-repo']")
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']")
+ self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']")
except:
self.fail(msg='Import layer tab not loading properly')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
- self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
- self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
+ self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
except:
self.fail(msg='New custom image tab not loading properly')
diff --git a/bitbake/lib/toaster/tests/functional/test_project_config.py b/bitbake/lib/toaster/tests/functional/test_project_config.py
new file mode 100644
index 0000000000..dbee36aa4e
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_config.py
@@ -0,0 +1,341 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import string
+import random
+import pytest
+from django.urls import reverse
+from selenium.webdriver import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import TimeoutException
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectConfig(SeleniumFunctionalTestCase):
+ project_id = None
+ PROJECT_NAME = 'TestProjectConfig'
+ INVALID_PATH_START_TEXT = 'The directory path should either start with a /'
+ INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \
+ 'any of these characters'
+
+ def _create_project(self, project_name):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=2)
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value('3')
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectConfig':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectConfig'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name', poll=2)
+ url = reverse('project', args=(TestProjectConfig.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def _navigate_bbv_page(self):
+ """ Navigate to project BitBake variables page """
+ # check if the menu is displayed
+ if TestProjectConfig.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectConfig.project_id = get_projectId_from_url(current_url)
+ else:
+ url = reverse('projectconf', args=(TestProjectConfig.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ bbv_page_link = self._get_config_nav_item(9)
+ bbv_page_link.click()
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def test_no_underscore_iamgefs_type(self):
+ """
+ Should not accept IMAGEFS_TYPE with an underscore
+ """
+ self._navigate_bbv_page()
+ imagefs_type = "foo_bar"
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ self.enter_text('#new-imagefs_types', imagefs_type)
+
+ element = self.wait_until_visible('#hintError-image-fs_type', poll=2)
+
+ self.assertTrue(("A valid image type cannot include underscores" in element.text),
+ "Did not find underscore error message")
+
+ def test_checkbox_verification(self):
+ """
+ Should automatically check the checkbox if user enters value
+ text box, if value is there in the checkbox.
+ """
+ self._navigate_bbv_page()
+
+ imagefs_type = "btrfs"
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ self.enter_text('#new-imagefs_types', imagefs_type)
+
+ checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
+
+ for checkbox in checkboxes:
+ if checkbox.get_attribute("value") == "btrfs":
+ self.assertEqual(checkbox.is_selected(), True)
+
+ def test_textbox_with_checkbox_verification(self):
+ """
+ Should automatically add or remove value in textbox, if user checks
+ or unchecks checkboxes.
+ """
+ self._navigate_bbv_page()
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ checkboxes_selector = '.fs-checkbox-fstypes'
+
+ self.wait_until_visible(checkboxes_selector, poll=2)
+ checkboxes = self.find_all(checkboxes_selector)
+
+ for checkbox in checkboxes:
+ if checkbox.get_attribute("value") == "cpio":
+ checkbox.click()
+ element = self.driver.find_element(By.ID, 'new-imagefs_types')
+
+ self.wait_until_visible('#new-imagefs_types', poll=2)
+
+ self.assertTrue(("cpio" in element.get_attribute('value'),
+ "Imagefs not added into the textbox"))
+ checkbox.click()
+ self.assertTrue(("cpio" not in element.text),
+ "Image still present in the textbox")
+
+ def test_set_download_dir(self):
+ """
+ Validate the allowed and disallowed types in the directory field for
+ DL_DIR
+ """
+ self._navigate_bbv_page()
+
+ # activate the input to edit download dir
+ try:
+ change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2)
+ except TimeoutException:
+ # If download dir is not displayed, test is skipped
+ change_dl_dir_btn = None
+
+ if change_dl_dir_btn:
+ change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2)
+ change_dl_dir_btn.click()
+
+ # downloads dir path doesn't start with / or ${...}
+ input_field = self.wait_until_visible('#new-dl_dir', poll=2)
+ input_field.clear()
+ self.enter_text('#new-dl_dir', 'home/foo')
+ element = self.wait_until_visible('#hintError-initialChar-dl_dir', poll=2)
+
+ msg = 'downloads directory path starts with invalid character but ' \
+ 'treated as valid'
+ self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
+
+ # downloads dir path has a space
+ self.driver.find_element(By.ID, 'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '/foo/bar a')
+
+ element = self.wait_until_visible('#hintError-dl_dir', poll=2)
+ msg = 'downloads directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # downloads dir path starts with ${...} but has a space
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
+
+ element = self.wait_until_visible('#hintError-dl_dir', poll=2)
+ msg = 'downloads directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # downloads dir path starts with /
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '/bar/foo')
+
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'downloads directory path valid but treated as invalid')
+
+ # downloads dir path starts with ${...}
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '${TOPDIR}/down')
+
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'downloads directory path valid but treated as invalid')
+
+ def test_set_sstate_dir(self):
+ """
+ Validate the allowed and disallowed types in the directory field for
+ SSTATE_DIR
+ """
+ self._navigate_bbv_page()
+
+ try:
+ btn_chg_sstate_dir = self.wait_until_visible(
+ '#change-sstate_dir-icon',
+ poll=2
+ )
+ self.click('#change-sstate_dir-icon')
+ except TimeoutException:
+ # If sstate_dir is not displayed, test is skipped
+ btn_chg_sstate_dir = None
+
+ if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed
+ # path doesn't start with / or ${...}
+ input_field = self.wait_until_visible('#new-sstate_dir', poll=2)
+ input_field.clear()
+ self.enter_text('#new-sstate_dir', 'home/foo')
+ element = self.wait_until_visible('#hintError-initialChar-sstate_dir', poll=2)
+
+ msg = 'sstate directory path starts with invalid character but ' \
+ 'treated as valid'
+ self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
+
+ # path has a space
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '/foo/bar a')
+
+ element = self.wait_until_visible('#hintError-sstate_dir', poll=2)
+ msg = 'sstate directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # path starts with ${...} but has a space
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
+
+ element = self.wait_until_visible('#hintError-sstate_dir', poll=2)
+ msg = 'sstate directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # path starts with /
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '/bar/foo')
+
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'sstate directory path valid but treated as invalid')
+
+ # paths starts with ${...}
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
+
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'sstate directory path valid but treated as invalid')
+
+ def _change_bbv_value(self, **kwargs):
+ var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values()
+ """ Change bitbake variable value """
+ self._navigate_bbv_page()
+ self.wait_until_visible(f'#{btn_id}', poll=2)
+ if kwargs.get('new_variable'):
+ self.find(f"#{btn_id}").clear()
+ self.enter_text(f"#{btn_id}", f"{var_name}")
+ else:
+ self.click(f'#{btn_id}')
+ self.wait_until_visible(f'#{input_id}', poll=2)
+
+ if kwargs.get('is_select'):
+ select = Select(self.find(f'#{input_id}'))
+ select.select_by_visible_text(value)
+ else:
+ self.find(f"#{input_id}").clear()
+ self.enter_text(f'#{input_id}', f'{value}')
+ self.click(f'#{save_btn}')
+ value_displayed = str(self.wait_until_visible(f'#{field}').text).lower()
+ msg = f'{var_name} variable not changed'
+ self.assertTrue(str(value).lower() in value_displayed, msg)
+
+ def test_change_distro_var(self):
+ """ Test changing distro variable """
+ self._change_bbv_value(
+ var_name='DISTRO',
+ field='distro',
+ btn_id='change-distro-icon',
+ input_id='new-distro',
+ value='poky-changed',
+ save_btn="apply-change-distro",
+ )
+
+ def test_set_image_install_append_var(self):
+ """ Test setting IMAGE_INSTALL:append variable """
+ self._change_bbv_value(
+ var_name='IMAGE_INSTALL:append',
+ field='image_install',
+ btn_id='change-image_install-icon',
+ input_id='new-image_install',
+ value='bash, apt, busybox',
+ save_btn="apply-change-image_install",
+ )
+
+ def test_set_package_classes_var(self):
+ """ Test setting PACKAGE_CLASSES variable """
+ self._change_bbv_value(
+ var_name='PACKAGE_CLASSES',
+ field='package_classes',
+ btn_id='change-package_classes-icon',
+ input_id='package_classes-select',
+ value='package_deb',
+ save_btn="apply-change-package_classes",
+ is_select=True,
+ )
+
+ def test_create_new_bbv(self):
+ """ Test creating new bitbake variable """
+ self._change_bbv_value(
+ var_name='New_Custom_Variable',
+ field='configvar-list',
+ btn_id='variable',
+ input_id='value',
+ value='new variable value',
+ save_btn="add-configvar-button",
+ new_variable=True
+ )
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page.py b/bitbake/lib/toaster/tests/functional/test_project_page.py
new file mode 100644
index 0000000000..adbe3587e4
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_page.py
@@ -0,0 +1,792 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import random
+import string
+from unittest import skip
+import pytest
+from django.urls import reverse
+from django.utils import timezone
+from selenium.webdriver.common.keys import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import TimeoutException
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from orm.models import Build, Project, Target
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectPage(SeleniumFunctionalTestCase):
+ project_id = None
+ PROJECT_NAME = 'TestProjectPage'
+
+ def _create_project(self, project_name):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name')
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value('3')
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectPage':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectPage'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name')
+ url = reverse('project', args=(TestProjectPage.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _navigate_to_project_page(self):
+ # Navigate to project page
+ if TestProjectPage.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectPage.project_id = get_projectId_from_url(current_url)
+ else:
+ url = reverse('project', args=(TestProjectPage.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav')
+
+ def _get_create_builds(self, **kwargs):
+ """ Create a build and return the build object """
+ # parameters for builds to associate with the projects
+ now = timezone.now()
+ self.project1_build_success = {
+ 'project': Project.objects.get(id=TestProjectPage.project_id),
+ 'started_on': now,
+ 'completed_on': now,
+ 'outcome': Build.SUCCEEDED
+ }
+
+ self.project1_build_failure = {
+ 'project': Project.objects.get(id=TestProjectPage.project_id),
+ 'started_on': now,
+ 'completed_on': now,
+ 'outcome': Build.FAILED
+ }
+ build1 = Build.objects.create(**self.project1_build_success)
+ build2 = Build.objects.create(**self.project1_build_failure)
+
+ # add some targets to these builds so they have recipe links
+ # (and so we can find the row in the ToasterTable corresponding to
+ # a particular build)
+ Target.objects.create(build=build1, target='foo')
+ Target.objects.create(build=build2, target='bar')
+
+ if kwargs:
+ # Create kwargs.get('success') builds with success status with target
+ # and kwargs.get('failure') builds with failure status with target
+ for i in range(kwargs.get('success', 0)):
+ now = timezone.now()
+ self.project1_build_success['started_on'] = now
+ self.project1_build_success[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_success)
+ Target.objects.create(build=build,
+ target=f'{i}_success_recipe',
+ task=f'{i}_success_task')
+
+ for i in range(kwargs.get('failure', 0)):
+ now = timezone.now()
+ self.project1_build_failure['started_on'] = now
+ self.project1_build_failure[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_failure)
+ Target.objects.create(build=build,
+ target=f'{i}_fail_recipe',
+ task=f'{i}_fail_task')
+ return build1, build2
+
+ def _mixin_test_table_edit_column(
+ self,
+ table_id,
+ edit_btn_id,
+ list_check_box_id: list
+ ):
+ # Check edit column
+ edit_column = self.find(f'#{edit_btn_id}')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+ for check_box_id in list_check_box_id:
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def _navigate_to_config_nav(self, nav_id, nav_index):
+ # navigate to the project page
+ self._navigate_to_project_page()
+ # click on "Software recipe" tab
+ soft_recipe = self._get_config_nav_item(nav_index)
+ soft_recipe.click()
+ self.wait_until_visible(f'#{nav_id}')
+
+ def _mixin_test_table_show_rows(self, table_selector, **kwargs):
+ """ Test the show rows feature in the builds table on the all builds page """
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible(f'#{table_selector} tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all(f'#{table_selector} tbody tr')) > 0
+ )
+ self.wait_until_present(f'#{table_selector} tbody tr')
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ f'//select[@class="form-control pagesize-{table_selector}"]'
+ )
+ rows_to_show = [10, 25, 50, 100, 150]
+ to_skip = kwargs.get('to_skip', [])
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ for row_to_show in rows_to_show:
+ if row_to_show not in to_skip:
+ test_show_rows(row_to_show, show_row_link)
+
+ def _mixin_test_table_search_input(self, **kwargs):
+ input_selector, input_text, searchBtn_selector, table_selector, *_ = kwargs.values()
+ # Test search input
+ self.wait_until_visible(f'#{input_selector}')
+ recipe_input = self.find(f'#{input_selector}')
+ recipe_input.send_keys(input_text)
+ self.find(f'#{searchBtn_selector}').click()
+ self.wait_until_visible(f'#{table_selector} tbody tr')
+ rows = self.find_all(f'#{table_selector} tbody tr')
+ self.assertTrue(len(rows) > 0)
+
+ def test_create_project(self):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self._create_project(project_name=self.PROJECT_NAME)
+
+ def test_image_recipe_editColumn(self):
+ """ Test the edit column feature in image recipe table on project page """
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('projectimagerecipes', args=(TestProjectPage.project_id,))
+ self.get(url)
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ column_list = [
+ 'get_description_or_summary', 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name', 'license', 'recipe-file', 'section',
+ 'version'
+ ]
+
+ # Check that we can hide the edit column
+ self._mixin_test_table_edit_column(
+ 'imagerecipestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+
+ def test_page_header_on_project_page(self):
+ """ Check page header in project page:
+ - AT LEFT -> Logo of Yocto project, displayed, clickable
+ - "Toaster"+" Information icon", displayed, clickable
+ - "Server Icon" + "All builds", displayed, clickable
+ - "Directory Icon" + "All projects", displayed, clickable
+ - "Book Icon" + "Documentation", displayed, clickable
+ - AT RIGHT -> button "New project", displayed, clickable
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # check page header
+ # AT LEFT -> Logo of Yocto project
+ logo = self.driver.find_element(
+ By.XPATH,
+ "//div[@class='toaster-navbar-brand']",
+ )
+ logo_img = logo.find_element(By.TAG_NAME, 'img')
+ self.assertTrue(logo_img.is_displayed(),
+ 'Logo of Yocto project not found')
+ self.assertTrue(
+ '/static/img/logo.png' in str(logo_img.get_attribute('src')),
+ 'Logo of Yocto project not found'
+ )
+ # "Toaster"+" Information icon", clickable
+ toaster = self.driver.find_element(
+ By.XPATH,
+ "//div[@class='toaster-navbar-brand']//a[@class='brand']",
+ )
+ self.assertTrue(toaster.is_displayed(), 'Toaster not found')
+ self.assertTrue(toaster.text == 'Toaster')
+ info_sign = self.find('.glyphicon-info-sign')
+ self.assertTrue(info_sign.is_displayed())
+
+ # "Server Icon" + "All builds"
+ all_builds = self.find('#navbar-all-builds')
+ all_builds_link = all_builds.find_element(By.TAG_NAME, 'a')
+ self.assertTrue("All builds" in all_builds_link.text)
+ self.assertTrue(
+ '/toastergui/builds/' in str(all_builds_link.get_attribute('href'))
+ )
+ server_icon = all_builds.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ server_icon.get_attribute('class') == 'glyphicon glyphicon-tasks'
+ )
+ self.assertTrue(server_icon.is_displayed())
+
+ # "Directory Icon" + "All projects"
+ all_projects = self.find('#navbar-all-projects')
+ all_projects_link = all_projects.find_element(By.TAG_NAME, 'a')
+ self.assertTrue("All projects" in all_projects_link.text)
+ self.assertTrue(
+ '/toastergui/projects/' in str(all_projects_link.get_attribute(
+ 'href'))
+ )
+ dir_icon = all_projects.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ dir_icon.get_attribute('class') == 'icon-folder-open'
+ )
+ self.assertTrue(dir_icon.is_displayed())
+
+ # "Book Icon" + "Documentation"
+ toaster_docs_link = self.find('#navbar-docs')
+ toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME,
+ 'a')
+ self.assertTrue("Documentation" in toaster_docs_link_link.text)
+ self.assertTrue(
+ toaster_docs_link_link.get_attribute('href') == 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual'
+ )
+ book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ book_icon.get_attribute('class') == 'glyphicon glyphicon-book'
+ )
+ self.assertTrue(book_icon.is_displayed())
+
+ # AT RIGHT -> button "New project"
+ new_project_button = self.find('#new-project-button')
+ self.assertTrue(new_project_button.is_displayed())
+ self.assertTrue(new_project_button.text == 'New project')
+ new_project_button.click()
+ self.assertTrue(
+ '/toastergui/newproject/' in str(self.driver.current_url)
+ )
+
+ def test_edit_project_name(self):
+ """ Test edit project name:
+ - Click on "Edit" icon button
+ - Change project name
+ - Click on "Save" button
+ - Check project name is changed
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # click on "Edit" icon button
+ self.wait_until_visible('#project-name-container')
+ edit_button = self.find('#project-change-form-toggle')
+ edit_button.click()
+ project_name_input = self.find('#project-name-change-input')
+ self.assertTrue(project_name_input.is_displayed())
+ project_name_input.clear()
+ project_name_input.send_keys('New Name')
+ self.find('#project-name-change-btn').click()
+
+ # check project name is changed
+ self.wait_until_visible('#project-name-container')
+ self.assertTrue(
+ 'New Name' in str(self.find('#project-name-container').text)
+ )
+
+ def test_project_page_tabs(self):
+ """ Test project tabs:
+ - "configuration" tab
+ - "Builds" tab
+ - "Import layers" tab
+ - "New custom image" tab
+ Check search box used to build recipes
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # check "configuration" tab
+ self.wait_until_visible('#topbar-configuration-tab')
+ config_tab = self.find('#topbar-configuration-tab')
+ self.assertTrue(config_tab.get_attribute('class') == 'active')
+ self.assertTrue('Configuration' in str(config_tab.text))
+ self.assertTrue(
+ f"/toastergui/project/{TestProjectPage.project_id}" in str(self.driver.current_url)
+ )
+
+ def get_tabs():
+ # tabs links list
+ return self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="project-topbar"]//li'
+ )
+
+ def check_tab_link(tab_index, tab_name, url):
+ tab = get_tabs()[tab_index]
+ tab_link = tab.find_element(By.TAG_NAME, 'a')
+ self.assertTrue(url in tab_link.get_attribute('href'))
+ self.assertTrue(tab_name in tab_link.text)
+ self.assertTrue(tab.get_attribute('class') == 'active')
+
+ # check "Builds" tab
+ builds_tab = get_tabs()[1]
+ builds_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 1,
+ 'Builds',
+ f"/toastergui/project/{TestProjectPage.project_id}/builds"
+ )
+
+ # check "Import layers" tab
+ import_layers_tab = get_tabs()[2]
+ import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 2,
+ 'Import layer',
+ f"/toastergui/project/{TestProjectPage.project_id}/importlayer"
+ )
+
+ # check "New custom image" tab
+ new_custom_image_tab = get_tabs()[3]
+ new_custom_image_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 3,
+ 'New custom image',
+ f"/toastergui/project/{TestProjectPage.project_id}/newcustomimage"
+ )
+
+ # check search box can be use to build recipes
+ search_box = self.find('#build-input')
+ search_box.send_keys('core-image-minimal')
+ self.find('#build-button').click()
+ self.wait_until_visible('#latest-builds')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]',
+ )
+ last_build = lastest_builds[0]
+ self.assertTrue(
+ 'core-image-minimal' in str(last_build.text)
+ )
+
+ def test_softwareRecipe_page(self):
+ """ Test software recipe page
+ - Check title "Compatible software recipes" is displayed
+ - Check search input
+ - Check "build recipe" button works
+ - Check software recipe table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ # check title "Compatible software recipes" is displayed
+ self.assertTrue("Compatible software recipes" in self.get_page_source())
+ # Test search input
+ self._mixin_test_table_search_input(
+ input_selector='search-input-softwarerecipestable',
+ input_text='busybox',
+ searchBtn_selector='search-submit-softwarerecipestable',
+ table_selector='softwarerecipestable'
+ )
+ # check "build recipe" button works
+ rows = self.find_all('#softwarerecipestable tbody tr')
+ image_to_build = rows[0]
+ build_btn = image_to_build.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ build_btn.click()
+ build_state = wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div'
+ )
+ self.assertTrue(len(lastest_builds) > 0)
+ last_build = lastest_builds[0]
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ if 'starting' not in build_state: # change build state when cancelled in starting state
+ wait_until_build_cancelled(self)
+
+ # check software recipe table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ column_list = [
+ 'get_description_or_summary',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ 'license',
+ 'recipe-file',
+ 'section',
+ 'version',
+ ]
+ self._mixin_test_table_edit_column(
+ 'softwarerecipestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='softwarerecipestable',
+ to_skip=[150],
+ )
+
+ def test_machines_page(self):
+ """ Test Machine page
+ - Check if title "Compatible machines" is displayed
+ - Check search input
+ - Check "Select machine" button works
+ - Check "Add layer" button works
+ - Check Machine table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('machinestable', 5)
+ # check title "Compatible software recipes" is displayed
+ self.assertTrue("Compatible machines" in self.get_page_source())
+ # Test search input
+ self._mixin_test_table_search_input(
+ input_selector='search-input-machinestable',
+ input_text='qemux86-64',
+ searchBtn_selector='search-submit-machinestable',
+ table_selector='machinestable'
+ )
+ # check "Select machine" button works
+ rows = self.find_all('#machinestable tbody tr')
+ machine_to_select = rows[0]
+ select_btn = machine_to_select.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ select_btn.send_keys(Keys.RETURN)
+ self.wait_until_visible('#config-nav')
+ project_machine_name = self.find('#project-machine-name')
+ self.assertTrue(
+ 'qemux86-64' in project_machine_name.text
+ )
+ # check "Add layer" button works
+ self._navigate_to_config_nav('machinestable', 5)
+ # Search for a machine whit layer not in project
+ self._mixin_test_table_search_input(
+ input_selector='search-input-machinestable',
+ input_text='qemux86-64-tpm2',
+ searchBtn_selector='search-submit-machinestable',
+ table_selector='machinestable'
+ )
+ self.wait_until_visible('#machinestable tbody tr', poll=3)
+ rows = self.find_all('#machinestable tbody tr')
+ machine_to_add = rows[0]
+ add_btn = machine_to_add.find_element(By.XPATH, '//td[@class="add-del-layers"]')
+ add_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added 1 layer to your project' in str(change_notification.text)
+ )
+ # check Machine table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('machinestable', 5)
+ column_list = [
+ 'description',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ 'machinefile',
+ ]
+ self._mixin_test_table_edit_column(
+ 'machinestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('machinestable', 5)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='machinestable',
+ to_skip=[150],
+ )
+
+ def test_layers_page(self):
+ """ Test layers page
+ - Check if title "Compatible layerss" is displayed
+ - Check search input
+ - Check "Add layer" button works
+ - Check "Remove layer" button works
+ - Check layers table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('layerstable', 6)
+ # check title "Compatible layers" is displayed
+ self.assertTrue("Compatible layers" in self.get_page_source())
+ # Test search input
+ input_text='meta-tanowrt'
+ self._mixin_test_table_search_input(
+ input_selector='search-input-layerstable',
+ input_text=input_text,
+ searchBtn_selector='search-submit-layerstable',
+ table_selector='layerstable'
+ )
+ # check "Add layer" button works
+ self.wait_until_visible('#layerstable tbody tr', poll=3)
+ rows = self.find_all('#layerstable tbody tr')
+ layer_to_add = rows[0]
+ add_btn = layer_to_add.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]'
+ )
+ add_btn.click()
+ # check modal is displayed
+ self.wait_until_visible('#dependencies-modal', poll=3)
+ list_dependencies = self.find_all('#dependencies-list li')
+ # click on add-layers button
+ add_layers_btn = self.driver.find_element(
+ By.XPATH,
+ '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]'
+ )
+ add_layers_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies' in str(change_notification.text)
+ )
+ # check "Remove layer" button works
+ self.wait_until_visible('#layerstable tbody tr', poll=3)
+ rows = self.find_all('#layerstable tbody tr')
+ layer_to_remove = rows[0]
+ remove_btn = layer_to_remove.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]'
+ )
+ remove_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have removed 1 layer from your project: {input_text}' in str(change_notification.text)
+ )
+ # check layers table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('layerstable', 6)
+ column_list = [
+ 'dependencies',
+ 'revision',
+ 'layer__vcs_url',
+ 'git_subdir',
+ 'layer__summary',
+ ]
+ self._mixin_test_table_edit_column(
+ 'layerstable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('layerstable', 6)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='layerstable',
+ to_skip=[150],
+ )
+
+ def test_distro_page(self):
+ """ Test distros page
+ - Check if title "Compatible distros" is displayed
+ - Check search input
+ - Check "Add layer" button works
+ - Check distro table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('distrostable', 7)
+ # check title "Compatible distros" is displayed
+ self.assertTrue("Compatible Distros" in self.get_page_source())
+ # Test search input
+ input_text='poky-altcfg'
+ self._mixin_test_table_search_input(
+ input_selector='search-input-distrostable',
+ input_text=input_text,
+ searchBtn_selector='search-submit-distrostable',
+ table_selector='distrostable'
+ )
+ # check "Add distro" button works
+ rows = self.find_all('#distrostable tbody tr')
+ distro_to_add = rows[0]
+ add_btn = distro_to_add.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ add_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have changed the distro to: {input_text}' in str(change_notification.text)
+ )
+ # check distro table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('distrostable', 7)
+ column_list = [
+ 'description',
+ 'templatefile',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ ]
+ self._mixin_test_table_edit_column(
+ 'distrostable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('distrostable', 7)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='distrostable',
+ to_skip=[150],
+ )
+
+ def test_single_layer_page(self):
+ """ Test layer page
+ - Check if title is displayed
+ - Check add/remove layer button works
+ - Check tabs(layers, recipes, machines) are displayed
+ - Check left section is displayed
+ - Check layer name
+ - Check layer summary
+ - Check layer description
+ """
+ url = reverse("layerdetails", args=(TestProjectPage.project_id, 8))
+ self.get(url)
+ self.wait_until_visible('.page-header')
+ # check title is displayed
+ self.assertTrue(self.find('.page-header h1').is_displayed())
+
+ # check add layer button works
+ remove_layer_btn = self.find('#add-remove-layer-btn')
+ remove_layer_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have removed 1 layer from your project' in str(change_notification.text)
+ )
+ # check add layer button works, 18 is the random layer id
+ add_layer_btn = self.find('#add-remove-layer-btn')
+ add_layer_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added 1 layer to your project' in str(change_notification.text)
+ )
+ # check tabs(layers, recipes, machines) are displayed
+ tabs = self.find_all('.nav-tabs li')
+ self.assertEqual(len(tabs), 3)
+ # Check first tab
+ tabs[0].click()
+ self.assertTrue(
+ 'active' in str(self.find('#information').get_attribute('class'))
+ )
+ # Check second tab
+ tabs[1].click()
+ self.assertTrue(
+ 'active' in str(self.find('#recipes').get_attribute('class'))
+ )
+ # Check third tab
+ tabs[2].click()
+ self.assertTrue(
+ 'active' in str(self.find('#machines').get_attribute('class'))
+ )
+ # Check left section is displayed
+ section = self.find('.well')
+ # Check layer name
+ self.assertTrue(
+ section.find_element(By.XPATH, '//h2[1]').is_displayed()
+ )
+ # Check layer summary
+ self.assertTrue("Summary" in section.text)
+ # Check layer description
+ self.assertTrue("Description" in section.text)
+
+ def test_single_recipe_page(self):
+ """ Test recipe page
+ - Check if title is displayed
+ - Check add recipe layer displayed
+ - Check left section is displayed
+ - Check recipe: name, summary, description, Version, Section,
+ License, Approx. packages included, Approx. size, Recipe file
+ """
+ url = reverse("recipedetails", args=(TestProjectPage.project_id, 53428))
+ self.get(url)
+ self.wait_until_visible('.page-header')
+ # check title is displayed
+ self.assertTrue(self.find('.page-header h1').is_displayed())
+ # check add recipe layer displayed
+ add_recipe_layer_btn = self.find('#add-layer-btn')
+ self.assertTrue(add_recipe_layer_btn.is_displayed())
+ # check left section is displayed
+ section = self.find('.well')
+ # Check recipe name
+ self.assertTrue(
+ section.find_element(By.XPATH, '//h2[1]').is_displayed()
+ )
+ # Check recipe sections details info are displayed
+ self.assertTrue("Summary" in section.text)
+ self.assertTrue("Description" in section.text)
+ self.assertTrue("Version" in section.text)
+ self.assertTrue("Section" in section.text)
+ self.assertTrue("License" in section.text)
+ self.assertTrue("Approx. packages included" in section.text)
+ self.assertTrue("Approx. package size" in section.text)
+ self.assertTrue("Recipe file" in section.text)
diff --git a/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
new file mode 100644
index 0000000000..eb905ddf3f
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
@@ -0,0 +1,528 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import string
+import random
+import pytest
+from django.urls import reverse
+from selenium.webdriver import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException
+from orm.models import Project
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectConfigTab(SeleniumFunctionalTestCase):
+ PROJECT_NAME = 'TestProjectConfigTab'
+ project_id = None
+
+ def _create_project(self, project_name, **kwargs):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ release = kwargs.get('release', '3')
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name')
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value(release)
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectConfigTab':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectConfigTab'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name', poll=3)
+ url = reverse('project', args=(TestProjectConfigTab.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _navigate_to_project_page(self):
+ # Navigate to project page
+ if TestProjectConfigTab.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(
+ current_url)
+ else:
+ url = reverse('project', args=(TestProjectConfigTab.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav')
+
+ def _create_builds(self):
+ # check search box can be use to build recipes
+ search_box = self.find('#build-input')
+ search_box.send_keys('foo')
+ self.find('#build-button').click()
+ self.wait_until_present('#latest-builds')
+ # loop until reach the parsing state
+ wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div',
+ )
+ last_build = lastest_builds[0]
+ self.assertTrue(
+ 'foo' in str(last_build.text)
+ )
+ last_build = lastest_builds[0]
+ try:
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ except NoSuchElementException:
+ # Skip if the build is already cancelled
+ pass
+ wait_until_build_cancelled(self)
+
+ def _get_tabs(self):
+ # tabs links list
+ return self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="project-topbar"]//li'
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def test_project_config_nav(self):
+ """ Test project config tab navigation:
+ - Check if the menu is displayed and contains the right elements:
+ - Configuration
+ - COMPATIBLE METADATA
+ - Custom images
+ - Image recipes
+ - Software recipes
+ - Machines
+ - Layers
+ - Distro
+ - EXTRA CONFIGURATION
+ - Bitbake variables
+ - Actions
+ - Delete project
+ """
+ self._navigate_to_project_page()
+
+ def _get_config_nav_item(index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def check_config_nav_item(index, item_name, url):
+ item = _get_config_nav_item(index)
+ self.assertTrue(item_name in item.text)
+ self.assertTrue(item.get_attribute('class') == 'active')
+ self.assertTrue(url in self.driver.current_url)
+
+ # check if the menu contains the right elements
+ # COMPATIBLE METADATA
+ compatible_metadata = _get_config_nav_item(1)
+ self.assertTrue(
+ "compatible metadata" in compatible_metadata.text.lower()
+ )
+ # EXTRA CONFIGURATION
+ extra_configuration = _get_config_nav_item(8)
+ self.assertTrue(
+ "extra configuration" in extra_configuration.text.lower()
+ )
+ # Actions
+ actions = _get_config_nav_item(10)
+ self.assertTrue("actions" in str(actions.text).lower())
+
+ conf_nav_list = [
+ # config
+ [0, 'Configuration',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}"],
+ # custom images
+ [2, 'Custom images',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/customimages"],
+ # image recipes
+ [3, 'Image recipes',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/images"],
+ # software recipes
+ [4, 'Software recipes',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/softwarerecipes"],
+ # machines
+ [5, 'Machines',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/machines"],
+ # layers
+ [6, 'Layers',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/layers"],
+ # distro
+ [7, 'Distros',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/distros"],
+ # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTab.project_id}/configuration"], # bitbake variables
+ ]
+ for index, item_name, url in conf_nav_list:
+ item = _get_config_nav_item(index)
+ if item.get_attribute('class') != 'active':
+ item.click()
+ check_config_nav_item(index, item_name, url)
+
+ def test_image_recipe_editColumn(self):
+ """ Test the edit column feature in image recipe table on project page """
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+
+ self._navigate_to_project_page()
+ # navigate to project image recipe page
+ recipe_image_page_link = self._get_config_nav_item(3)
+ recipe_image_page_link.click()
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-get_description_or_summary')
+ test_edit_column('checkbox-layer_version__get_vcs_reference')
+ test_edit_column('checkbox-layer_version__layer__name')
+ test_edit_column('checkbox-license')
+ test_edit_column('checkbox-recipe-file')
+ test_edit_column('checkbox-section')
+ test_edit_column('checkbox-version')
+
+ def test_image_recipe_show_rows(self):
+ """ Test the show rows feature in image recipe table on project page """
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#imagerecipestable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#imagerecipestable tbody tr')) > 0
+ )
+
+ self._navigate_to_project_page()
+ # navigate to project image recipe page
+ recipe_image_page_link = self._get_config_nav_item(3)
+ recipe_image_page_link.click()
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-imagerecipestable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
+
+ def test_project_config_tab_right_section(self):
+ """ Test project config tab right section contains five blocks:
+ - Machine:
+ - check 'Machine' is displayed
+ - check can change Machine
+ - Distro:
+ - check 'Distro' is displayed
+ - check can change Distro
+ - Most built recipes:
+ - check 'Most built recipes' is displayed
+ - check can select a recipe and build it
+ - Project release:
+ - check 'Project release' is displayed
+ - check project has right release displayed
+ - Layers:
+ - check can add a layer if exists
+ - check at least three layers are displayed
+ - openembedded-core
+ - meta-poky
+ - meta-yocto-bsp
+ """
+ # Create a new project for this test
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name)
+ # check if the menu is displayed
+ self.wait_until_visible('#project-page')
+ block_l = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[2]')
+ project_release = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[1]/div[4]')
+ layers = block_l.find_element(By.ID, 'layer-container')
+
+ def check_machine_distro(self, item_name, new_item_name, block_id):
+ block = self.find(f'#{block_id}')
+ title = block.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue(item_name.capitalize() in title.text)
+ edit_btn = self.find(f'#change-{item_name}-toggle')
+ edit_btn.click()
+ self.wait_until_visible(f'#{item_name}-change-input')
+ name_input = self.find(f'#{item_name}-change-input')
+ name_input.clear()
+ name_input.send_keys(new_item_name)
+ change_btn = self.find(f'#{item_name}-change-btn')
+ change_btn.click()
+ self.wait_until_visible(f'#project-{item_name}-name')
+ project_name = self.find(f'#project-{item_name}-name')
+ self.assertTrue(new_item_name in project_name.text)
+ # check change notificaiton is displayed
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have changed the {item_name} to: {new_item_name}' in change_notification.text
+ )
+
+ # Machine
+ check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section')
+ # Distro
+ check_machine_distro(self, 'distro', 'poky-altcfg', 'distro-section')
+
+ # Project release
+ title = project_release.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Project release" in title.text)
+ self.assertTrue(
+ "Yocto Project master" in self.find('#project-release-title').text
+ )
+ # Layers
+ title = layers.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Layers" in title.text)
+ # check at least three layers are displayed
+ # openembedded-core
+ # meta-poky
+ # meta-yocto-bsp
+ layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ # remove all layers except the first three layers
+ for i in range(3, len(layers_list_items)):
+ layers_list_items[i].find_element(By.TAG_NAME, 'span').click()
+ # check can add a layer if exists
+ add_layer_input = layers.find_element(By.ID, 'layer-add-input')
+ add_layer_input.send_keys('meta-oe')
+ self.wait_until_visible('#layer-container > form > div > span > div')
+ dropdown_item = self.driver.find_element(
+ By.XPATH,
+ '//*[@id="layer-container"]/form/div/span/div'
+ )
+ try:
+ dropdown_item.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "layer-container dropdown item click intercepted. Element not properly visible.")
+ add_layer_btn = layers.find_element(By.ID, 'add-layer-btn')
+ add_layer_btn.click()
+ self.wait_until_visible('#layers-in-project-list')
+ # check layer is added
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(len(layers_list_items) == 4)
+
+ def test_most_build_recipes(self):
+ """ Test most build recipes block contains"""
+ def rebuild_from_most_build_recipes(recipe_list_items):
+ checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input')
+ checkbox.click()
+ build_btn = self.find('#freq-build-btn')
+ build_btn.click()
+ self.wait_until_visible('#latest-builds')
+ wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div'
+ )
+ self.assertTrue(len(lastest_builds) >= 2)
+ last_build = lastest_builds[0]
+ try:
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ except NoSuchElementException:
+ # Skip if the build is already cancelled
+ pass
+ wait_until_build_cancelled(self)
+ # Create a new project for remaining asserts
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name, release='2')
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(current_url)
+ url = current_url.split('?')[0]
+
+ # Create a new builds
+ self._create_builds()
+
+ # back to project page
+ self.driver.get(url)
+
+ self.wait_until_visible('#project-page', poll=3)
+
+ # Most built recipes
+ most_built_recipes = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[1]/div[3]')
+ title = most_built_recipes.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Most built recipes" in title.text)
+ # check can select a recipe and build it
+ self.wait_until_visible('#freq-build-list', poll=3)
+ recipe_list = self.find('#freq-build-list')
+ recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(
+ len(recipe_list_items) > 0,
+ msg="Any recipes found in the most built recipes list",
+ )
+ rebuild_from_most_build_recipes(recipe_list_items)
+ TestProjectConfigTab.project_id = None # reset project id
+
+ def test_project_page_tab_importlayer(self):
+ """ Test project page tab import layer """
+ self._navigate_to_project_page()
+ # navigate to "Import layers" tab
+ import_layers_tab = self._get_tabs()[2]
+ import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+ self.wait_until_visible('#layer-git-repo-url')
+
+ # Check git repo radio button
+ git_repo_radio = self.find('#git-repo-radio')
+ git_repo_radio.click()
+
+ # Set git repo url
+ input_repo_url = self.find('#layer-git-repo-url')
+ input_repo_url.send_keys('git://git.yoctoproject.org/meta-fake')
+ # Blur the input to trigger the validation
+ input_repo_url.send_keys(Keys.TAB)
+
+ # Check name is set
+ input_layer_name = self.find('#import-layer-name')
+ self.assertTrue(input_layer_name.get_attribute('value') == 'meta-fake')
+
+ # Set branch
+ input_branch = self.find('#layer-git-ref')
+ input_branch.send_keys('master')
+
+ # Import layer
+ self.find('#import-and-add-btn').click()
+
+ # Check layer is added
+ self.wait_until_visible('#layer-container')
+ block_l = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[2]')
+ layers = block_l.find_element(By.ID, 'layer-container')
+ layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(
+ 'meta-fake' in str(layers_list_items[-1].text)
+ )
+
+ def test_project_page_custom_image_no_image(self):
+ """ Test project page tab "New custom image" when no custom image """
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name)
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(current_url)
+ # navigate to "Custom image" tab
+ custom_image_section = self._get_config_nav_item(2)
+ custom_image_section.click()
+ self.wait_until_visible('#empty-state-customimagestable')
+
+ # Check message when no custom image
+ self.assertTrue(
+ "You have not created any custom images yet." in str(
+ self.find('#empty-state-customimagestable').text
+ )
+ )
+ div_empty_msg = self.find('#empty-state-customimagestable')
+ link_create_custom_image = div_empty_msg.find_element(
+ By.TAG_NAME, 'a')
+ self.assertTrue(TestProjectConfigTab.project_id is not None)
+ self.assertTrue(
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/newcustomimage" in str(
+ link_create_custom_image.get_attribute('href')
+ )
+ )
+ self.assertTrue(
+ "Create your first custom image" in str(
+ link_create_custom_image.text
+ )
+ )
+ TestProjectConfigTab.project_id = None # reset project id
+
+ def test_project_page_image_recipe(self):
+ """ Test project page section images
+ - Check image recipes are displayed
+ - Check search input
+ - Check image recipe build button works
+ - Check image recipe table features(show/hide column, pagination)
+ """
+ self._navigate_to_project_page()
+ # navigate to "Images section"
+ images_section = self._get_config_nav_item(3)
+ images_section.click()
+ self.wait_until_visible('#imagerecipestable')
+ rows = self.find_all('#imagerecipestable tbody tr')
+ self.assertTrue(len(rows) > 0)
+
+ # Test search input
+ self.wait_until_visible('#search-input-imagerecipestable')
+ recipe_input = self.find('#search-input-imagerecipestable')
+ recipe_input.send_keys('core-image-minimal')
+ self.find('#search-submit-imagerecipestable').click()
+ self.wait_until_visible('#imagerecipestable tbody tr')
+ rows = self.find_all('#imagerecipestable tbody tr')
+ self.assertTrue(len(rows) > 0)
diff --git a/bitbake/lib/toaster/tests/functional/utils.py b/bitbake/lib/toaster/tests/functional/utils.py
new file mode 100644
index 0000000000..7269fa1805
--- /dev/null
+++ b/bitbake/lib/toaster/tests/functional/utils.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+from time import sleep
+from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException
+from selenium.webdriver.common.by import By
+
+from orm.models import Build
+
+
+def wait_until_build(test_instance, state):
+ timeout = 60
+ start_time = 0
+ build_state = ''
+ while True:
+ try:
+ if start_time > timeout:
+ raise TimeoutException(
+ f'Build did not reach {state} state within {timeout} seconds'
+ )
+ last_build_state = test_instance.driver.find_element(
+ By.XPATH,
+ '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
+ )
+ build_state = last_build_state.get_attribute(
+ 'data-build-state')
+ state_text = state.lower().split()
+ if any(x in str(build_state).lower() for x in state_text):
+ return str(build_state).lower()
+ if 'failed' in str(build_state).lower():
+ break
+ except NoSuchElementException:
+ continue
+ except TimeoutException:
+ break
+ start_time += 1
+ sleep(1) # take a breath and try again
+
+def wait_until_build_cancelled(test_instance):
+ """ Cancel build take a while sometime, the method is to wait driver action
+ until build being cancelled
+ """
+ timeout = 30
+ start_time = 0
+ build = None
+ while True:
+ try:
+ if start_time > timeout:
+ raise TimeoutException(
+ f'Build did not reach cancelled state within {timeout} seconds'
+ )
+ last_build_state = test_instance.driver.find_element(
+ By.XPATH,
+ '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
+ )
+ build_state = last_build_state.get_attribute(
+ 'data-build-state')
+ if 'failed' in str(build_state).lower():
+ break
+ if 'cancelling' in str(build_state).lower():
+ # Change build state to cancelled
+ if not build: # get build object only once
+ build = Build.objects.last()
+ build.outcome = Build.CANCELLED
+ build.save()
+ if 'cancelled' in str(build_state).lower():
+ break
+ except NoSuchElementException:
+ continue
+ except StaleElementReferenceException:
+ continue
+ except TimeoutException:
+ break
+ start_time += 1
+ sleep(1) # take a breath and try again
+
+def get_projectId_from_url(url):
+ # url = 'http://domainename.com/toastergui/project/1656/whatever
+ # or url = 'http://domainename.com/toastergui/project/1/
+ # or url = 'http://domainename.com/toastergui/project/186
+ assert '/toastergui/project/' in url, "URL is not valid"
+ url_to_list = url.split('/toastergui/project/')
+ return int(url_to_list[1].split('/')[0]) # project_id
diff --git a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
index 4f9fcc46d2..71cc083436 100644
--- a/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
+++ b/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
@@ -1 +1,7 @@
-selenium==2.49.2
+selenium>=4.13.0
+pytest==7.4.2
+pytest-django==4.5.2
+pytest-env==1.1.0
+pytest-html==4.0.2
+pytest-metadata==3.0.0
+pytest-order==1.1.0
diff --git a/bitbake/lib/toaster/tests/views/test_views.py b/bitbake/lib/toaster/tests/views/test_views.py
index 735d596bcc..e1adfcf86a 100644
--- a/bitbake/lib/toaster/tests/views/test_views.py
+++ b/bitbake/lib/toaster/tests/views/test_views.py
@@ -9,6 +9,8 @@
"""Test cases for Toaster GUI and ReST."""
+import os
+import pytest
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
@@ -19,6 +21,7 @@ from orm.models import Layer_Version, Recipe
from orm.models import CustomImageRecipe
from orm.models import CustomImagePackage
+from bldcontrol.models import BuildEnvironment
import inspect
import toastergui
@@ -32,19 +35,32 @@ PROJECT_NAME2 = "test project 2"
CLI_BUILDS_PROJECT_NAME = 'Command line builds'
+
class ViewTests(TestCase):
"""Tests to verify view APIs."""
fixtures = ['toastergui-unittest-data']
+ builldir = os.environ.get('BUILDDIR')
def setUp(self):
self.project = Project.objects.first()
+
self.recipe1 = Recipe.objects.get(pk=2)
+ # create a file and to recipe1 file_path
+ file_path = f"{self.builldir}/{self.recipe1.name.strip().replace(' ', '-')}.bb"
+ with open(file_path, 'w') as f:
+ f.write('foo')
+ self.recipe1.file_path = file_path
+ self.recipe1.save()
+
self.customr = CustomImageRecipe.objects.first()
self.cust_package = CustomImagePackage.objects.first()
self.package = Package.objects.first()
self.lver = Layer_Version.objects.first()
+ if BuildEnvironment.objects.count() == 0:
+ BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL)
+
def test_get_base_call_returns_html(self):
"""Basic test for all-projects view"""
@@ -226,7 +242,7 @@ class ViewTests(TestCase):
recipe = CustomImageRecipe.objects.create(
name=name, project=self.project,
base_recipe=self.recipe1,
- file_path="/tmp/testing",
+ file_path=f"{self.builldir}/testing",
layer_version=self.customr.layer_version)
url = reverse('xhr_customrecipe_id', args=(recipe.id,))
response = self.client.delete(url)
@@ -297,7 +313,7 @@ class ViewTests(TestCase):
"""Download the recipe file generated for the custom image"""
# Create a dummy recipe file for the custom image generation to read
- open("/tmp/a_recipe.bb", 'a').close()
+ open(f"{self.builldir}/a_recipe.bb", 'a').close()
response = self.client.get(reverse('customrecipedownload',
args=(self.project.id,
self.customr.id)))
diff --git a/bitbake/lib/toaster/toastergui/api.py b/bitbake/lib/toaster/toastergui/api.py
index b4cdc335ef..e367bd910e 100644
--- a/bitbake/lib/toaster/toastergui/api.py
+++ b/bitbake/lib/toaster/toastergui/api.py
@@ -11,7 +11,7 @@ import os
import re
import logging
import json
-import subprocess
+import glob
from collections import Counter
from orm.models import Project, ProjectTarget, Build, Layer_Version
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View):
# same logical name
# * Each project that uses a layer will have its own
# LayerVersion and Project Layer for it
-# * During the Paroject delete process, when the last
+# * During the Project delete process, when the last
# LayerVersion for a 'local_source_dir' layer is deleted
# then the Layer record is deleted to remove orphans
#
def scan_layer_content(layer,layer_version):
# if this is a local layer directory, we can immediately scan its content
- if layer.local_source_dir:
+ if os.path.isdir(layer.local_source_dir):
try:
# recipes-*/*/*.bb
- cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb'))
- recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read()
- recipes_list = recipes_list.decode("utf-8").strip()
- if recipes_list and 'No such' not in recipes_list:
+ recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb'))
+ for recipe in recipes_list:
for recipe in recipes_list.split('\n'):
recipe_path = recipe[recipe.rfind('recipes-'):]
recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','')
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version):
except Exception as e:
logger.warning("ERROR:scan_layer_content: %s" % e)
+ else:
+ logger.warning("ERROR: wrong path given")
+ raise KeyError("local_source_dir")
class XhrLayer(View):
""" Delete, Get, Add and Update Layer information
@@ -456,15 +457,18 @@ class XhrLayer(View):
'layerdetailurl':
layer_dep.get_detailspage_url(project.pk)})
- # Scan the layer's content and update components
- scan_layer_content(layer,layer_version)
+ # Only scan_layer_content if layer is local
+ if layer_data.get('local_source_dir', None):
+ # Scan the layer's content and update components
+ scan_layer_content(layer,layer_version)
except Layer_Version.DoesNotExist:
return error_response("layer-dep-not-found")
except Project.DoesNotExist:
return error_response("project-not-found")
- except KeyError:
- return error_response("incorrect-parameters")
+ except KeyError as e:
+ _log("KeyError: %s" % e)
+ return error_response(f"incorrect-parameters")
return JsonResponse({'error': "ok",
'imported_layer': {
diff --git a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
index 4517ed1765..f626572fd1 100644
--- a/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
+++ b/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
@@ -6,10 +6,22 @@
<field type="CharField" name="dirpath">b</field>
<field type="CharField" name="branch">a</field>
</object>
+ <object pk="1" model="orm.distro">
+ <field type="DateTimeField" name="up_date"><None></None></field>
+ <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
+ <field type="CharField" name="name">poky_distro1</field>
+ <field type="CharField" name="description">poky_distro1 description</field>
+ </object>
+ <object pk="2" model="orm.distro">
+ <field type="DateTimeField" name="up_date"><None></None></field>
+ <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field>
+ <field type="CharField" name="name">poky_distro2</field>
+ <field type="CharField" name="description">poky_distro2 description</field>
+ </object>
<object pk="1" model="orm.release">
- <field type="CharField" name="name">master</field>
+ <field type="CharField" name="name">foo_master</field>
<field type="CharField" name="description">master project</field>
- <field to="orm.bitbake_version" name="bitbake_version">1</field>
+ <field to="orm.bitbakeversion" name="bitbake_version">1</field>
</object>
<object pk="1" model="orm.project">
<field type="CharField" name="name">a test project</field>
@@ -34,12 +46,12 @@
<object pk="1" model="orm.ProjectVariable">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
<field type="CharField" name="name">MACHINE</field>
- <field type="TextField" name="value">qemux86</field>
+ <field type="TextField" name="value">qemux86-64</field>
</object>
<object pk="2" model="orm.ProjectVariable">
<field to="orm.project" name="project" rel="ManyToOneRel">2</field>
<field type="CharField" name="name">MACHINE</field>
- <field type="TextField" name="value">qemux86</field>
+ <field type="TextField" name="value">qemux86-64</field>
</object>
<object pk="1" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
@@ -67,7 +79,7 @@
</object>
<object pk="3" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
- <field type="CharField" name="machine">qemux86</field>
+ <field type="CharField" name="machine">qemux86-64</field>
<field type="CharField" name="distro"></field>
<field type="CharField" name="distro_version"></field>
<field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field>
@@ -79,7 +91,7 @@
</object>
<object pk="4" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">2</field>
- <field type="CharField" name="machine">qemux86</field>
+ <field type="CharField" name="machine">qemux86-64</field>
<field type="CharField" name="distro"></field>
<field type="CharField" name="distro_version"></field>
<field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field>
diff --git a/bitbake/lib/toaster/toastergui/forms.py b/bitbake/lib/toaster/toastergui/forms.py
new file mode 100644
index 0000000000..0f279e06c5
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/forms.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from django import forms
+from django.core.validators import FileExtensionValidator
+
+class LoadFileForm(forms.Form):
+ eventlog_file = forms.FileField(widget=forms.FileInput(attrs={'accept': '.json'}))
diff --git a/bitbake/lib/toaster/toastergui/static/css/default.css b/bitbake/lib/toaster/toastergui/static/css/default.css
index 5cd7e211a0..284355e70b 100644
--- a/bitbake/lib/toaster/toastergui/static/css/default.css
+++ b/bitbake/lib/toaster/toastergui/static/css/default.css
@@ -367,3 +367,31 @@ h2.panel-title { font-size: 30px; }
}
}
/* End copied in from newer version of Font-Awesome 4.3.0 */
+
+
+#overlay {
+ display: flex;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(0, 0, 0, 0.7);
+ align-items: center;
+ justify-content: center;
+ z-index: 999;
+}
+
+.spinner {
+ border: 6px solid rgba(255, 255, 255, 0.3);
+ border-radius: 50%;
+ border-top: 6px solid #3498db;
+ width: 50px;
+ height: 50px;
+ animation: spin 1s linear infinite;
+}
+
+@keyframes spin {
+ 0% { transform: rotate(0deg); }
+ 100% { transform: rotate(360deg); }
+}
diff --git a/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
new file mode 100644
index 0000000000..c0a442ce07
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
@@ -0,0 +1 @@
+:root{--dt-row-selected: 13, 110, 253;--dt-row-selected-text: 255, 255, 255;--dt-row-selected-link: 9, 10, 11;--dt-row-stripe: 0, 0, 0;--dt-row-hover: 0, 0, 0;--dt-column-ordering: 0, 0, 0;--dt-html-background: white}:root.dark{--dt-html-background: rgb(33, 37, 41)}table.dataTable td.dt-control{text-align:center;cursor:pointer}table.dataTable td.dt-control:before{display:inline-block;color:rgba(0, 0, 0, 0.5);content:"▶"}table.dataTable tr.dt-hasChild td.dt-control:before{content:"▼"}html.dark table.dataTable td.dt-control:before{color:rgba(255, 255, 255, 0.5)}html.dark table.dataTable tr.dt-hasChild td.dt-control:before{color:rgba(255, 255, 255, 0.5)}table.dataTable thead>tr>th.sorting,table.dataTable thead>tr>th.sorting_asc,table.dataTable thead>tr>th.sorting_desc,table.dataTable thead>tr>th.sorting_asc_disabled,table.dataTable thead>tr>th.sorting_desc_disabled,table.dataTable thead>tr>td.sorting,table.dataTable thead>tr>td.sorting_asc,table.dataTable thead>tr>td.sorting_desc,table.dataTable thead>tr>td.sorting_asc_disabled,table.dataTable thead>tr>td.sorting_desc_disabled{cursor:pointer;position:relative;padding-right:26px}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after{position:absolute;display:block;opacity:.125;right:10px;line-height:9px;font-size:.8em}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:before{bottom:50%;content:"▲";content:"▲"/""}table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:after{top:50%;content:"▼";content:"▼"/""}table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:after{opacity:.6}table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting_asc_disabled:before{display:none}table.dataTable thead>tr>th:active,table.dataTable thead>tr>td:active{outline:none}div.dataTables_scrollBody>table.dataTable>thead>tr>th:before,div.dataTables_scrollBody>table.dataTable>thead>tr>th:after,div.dataTables_scrollBody>table.dataTable>thead>tr>td:before,div.dataTables_scrollBody>table.dataTable>thead>tr>td:after{display:none}div.dataTables_processing{position:absolute;top:50%;left:50%;width:200px;margin-left:-100px;margin-top:-26px;text-align:center;padding:2px;z-index:10}div.dataTables_processing>div:last-child{position:relative;width:80px;height:15px;margin:1em auto}div.dataTables_processing>div:last-child>div{position:absolute;top:0;width:13px;height:13px;border-radius:50%;background:rgb(13, 110, 253);background:rgb(var(--dt-row-selected));animation-timing-function:cubic-bezier(0, 1, 1, 0)}div.dataTables_processing>div:last-child>div:nth-child(1){left:8px;animation:datatables-loader-1 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(2){left:8px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(3){left:32px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(4){left:56px;animation:datatables-loader-3 .6s infinite}@keyframes datatables-loader-1{0%{transform:scale(0)}100%{transform:scale(1)}}@keyframes datatables-loader-3{0%{transform:scale(1)}100%{transform:scale(0)}}@keyframes datatables-loader-2{0%{transform:translate(0, 0)}100%{transform:translate(24px, 0)}}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}table.dataTable th.dt-left,table.dataTable td.dt-left{text-align:left}table.dataTable th.dt-center,table.dataTable td.dt-center,table.dataTable td.dataTables_empty{text-align:center}table.dataTable th.dt-right,table.dataTable td.dt-right{text-align:right}table.dataTable th.dt-justify,table.dataTable td.dt-justify{text-align:justify}table.dataTable th.dt-nowrap,table.dataTable td.dt-nowrap{white-space:nowrap}table.dataTable thead th,table.dataTable thead td,table.dataTable tfoot th,table.dataTable tfoot td{text-align:left}table.dataTable thead th.dt-head-left,table.dataTable thead td.dt-head-left,table.dataTable tfoot th.dt-head-left,table.dataTable tfoot td.dt-head-left{text-align:left}table.dataTable thead th.dt-head-center,table.dataTable thead td.dt-head-center,table.dataTable tfoot th.dt-head-center,table.dataTable tfoot td.dt-head-center{text-align:center}table.dataTable thead th.dt-head-right,table.dataTable thead td.dt-head-right,table.dataTable tfoot th.dt-head-right,table.dataTable tfoot td.dt-head-right{text-align:right}table.dataTable thead th.dt-head-justify,table.dataTable thead td.dt-head-justify,table.dataTable tfoot th.dt-head-justify,table.dataTable tfoot td.dt-head-justify{text-align:justify}table.dataTable thead th.dt-head-nowrap,table.dataTable thead td.dt-head-nowrap,table.dataTable tfoot th.dt-head-nowrap,table.dataTable tfoot td.dt-head-nowrap{white-space:nowrap}table.dataTable tbody th.dt-body-left,table.dataTable tbody td.dt-body-left{text-align:left}table.dataTable tbody th.dt-body-center,table.dataTable tbody td.dt-body-center{text-align:center}table.dataTable tbody th.dt-body-right,table.dataTable tbody td.dt-body-right{text-align:right}table.dataTable tbody th.dt-body-justify,table.dataTable tbody td.dt-body-justify{text-align:justify}table.dataTable tbody th.dt-body-nowrap,table.dataTable tbody td.dt-body-nowrap{white-space:nowrap}table.dataTable{width:100%;margin:0 auto;clear:both;border-collapse:separate;border-spacing:0}table.dataTable thead th,table.dataTable tfoot th{font-weight:bold}table.dataTable>thead>tr>th,table.dataTable>thead>tr>td{padding:10px;border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable>thead>tr>th:active,table.dataTable>thead>tr>td:active{outline:none}table.dataTable>tfoot>tr>th,table.dataTable>tfoot>tr>td{padding:10px 10px 6px 10px;border-top:1px solid rgba(0, 0, 0, 0.3)}table.dataTable tbody tr{background-color:transparent}table.dataTable tbody tr.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.9);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.9);color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable tbody tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable tbody th,table.dataTable tbody td{padding:8px 10px}table.dataTable.row-border>tbody>tr>th,table.dataTable.row-border>tbody>tr>td,table.dataTable.display>tbody>tr>th,table.dataTable.display>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.row-border>tbody>tr:first-child>th,table.dataTable.row-border>tbody>tr:first-child>td,table.dataTable.display>tbody>tr:first-child>th,table.dataTable.display>tbody>tr:first-child>td{border-top:none}table.dataTable.row-border>tbody>tr.selected+tr.selected>td,table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0262ef}table.dataTable.cell-border>tbody>tr>th,table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15);border-right:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr>th:first-child,table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr:first-child>th,table.dataTable.cell-border>tbody>tr:first-child>td{border-top:none}table.dataTable.stripe>tbody>tr.odd>*,table.dataTable.display>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.023);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-stripe), 0.023)}table.dataTable.stripe>tbody>tr.odd.selected>*,table.dataTable.display>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.923);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.923)}table.dataTable.hover>tbody>tr:hover>*,table.dataTable.display>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.035);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.035)}table.dataTable.hover>tbody>tr.selected:hover>*,table.dataTable.display>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px #0d6efd !important;box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 1) !important}table.dataTable.order-column>tbody tr>.sorting_1,table.dataTable.order-column>tbody tr>.sorting_2,table.dataTable.order-column>tbody tr>.sorting_3,table.dataTable.display>tbody tr>.sorting_1,table.dataTable.display>tbody tr>.sorting_2,table.dataTable.display>tbody tr>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.019)}table.dataTable.order-column>tbody tr.selected>.sorting_1,table.dataTable.order-column>tbody tr.selected>.sorting_2,table.dataTable.order-column>tbody tr.selected>.sorting_3,table.dataTable.display>tbody tr.selected>.sorting_1,table.dataTable.display>tbody tr.selected>.sorting_2,table.dataTable.display>tbody tr.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.odd>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.054);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.054)}table.dataTable.display>tbody>tr.odd>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.047);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.047)}table.dataTable.display>tbody>tr.odd>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.039);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.039)}table.dataTable.display>tbody>tr.odd.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.954);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.954)}table.dataTable.display>tbody>tr.odd.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.947);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.947)}table.dataTable.display>tbody>tr.odd.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.939);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.939)}table.dataTable.display>tbody>tr.even>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.019)}table.dataTable.display>tbody>tr.even>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.011);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.011)}table.dataTable.display>tbody>tr.even>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.003);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.003)}table.dataTable.display>tbody>tr.even.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.even.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.911);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.911)}table.dataTable.display>tbody>tr.even.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.903);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.903)}table.dataTable.display tbody tr:hover>.sorting_1,table.dataTable.order-column.hover tbody tr:hover>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.082);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.082)}table.dataTable.display tbody tr:hover>.sorting_2,table.dataTable.order-column.hover tbody tr:hover>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.074);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.074)}table.dataTable.display tbody tr:hover>.sorting_3,table.dataTable.order-column.hover tbody tr:hover>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.062);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.062)}table.dataTable.display tbody tr:hover.selected>.sorting_1,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.982);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.982)}table.dataTable.display tbody tr:hover.selected>.sorting_2,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.974);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.974)}table.dataTable.display tbody tr:hover.selected>.sorting_3,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.962);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.962)}table.dataTable.no-footer{border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable.compact thead th,table.dataTable.compact thead td,table.dataTable.compact tfoot th,table.dataTable.compact tfoot td,table.dataTable.compact tbody th,table.dataTable.compact tbody td{padding:4px}table.dataTable th,table.dataTable td{box-sizing:content-box}.dataTables_wrapper{position:relative;clear:both}.dataTables_wrapper .dataTables_length{float:left}.dataTables_wrapper .dataTables_length select{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;padding:4px}.dataTables_wrapper .dataTables_filter{float:right;text-align:right}.dataTables_wrapper .dataTables_filter input{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;margin-left:3px}.dataTables_wrapper .dataTables_info{clear:both;float:left;padding-top:.755em}.dataTables_wrapper .dataTables_paginate{float:right;text-align:right;padding-top:.25em}.dataTables_wrapper .dataTables_paginate .paginate_button{box-sizing:border-box;display:inline-block;min-width:1.5em;padding:.5em 1em;margin-left:2px;text-align:center;text-decoration:none !important;cursor:pointer;color:inherit !important;border:1px solid transparent;border-radius:2px;background:transparent}.dataTables_wrapper .dataTables_paginate .paginate_button.current,.dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{color:inherit !important;border:1px solid rgba(0, 0, 0, 0.3);background-color:rgba(0, 0, 0, 0.05);background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, rgba(230, 230, 230, 0.05)), color-stop(100%, rgba(0, 0, 0, 0.05)));background:-webkit-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-moz-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-ms-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-o-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:linear-gradient(to bottom, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button.disabled,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{cursor:default;color:#666 !important;border:1px solid transparent;background:transparent;box-shadow:none}.dataTables_wrapper .dataTables_paginate .paginate_button:hover{color:white !important;border:1px solid #111;background-color:#111;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111));background:-webkit-linear-gradient(top, #585858 0%, #111 100%);background:-moz-linear-gradient(top, #585858 0%, #111 100%);background:-ms-linear-gradient(top, #585858 0%, #111 100%);background:-o-linear-gradient(top, #585858 0%, #111 100%);background:linear-gradient(to bottom, #585858 0%, #111 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button:active{outline:none;background-color:#0c0c0c;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c));background:-webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%);box-shadow:inset 0 0 3px #111}.dataTables_wrapper .dataTables_paginate .ellipsis{padding:0 1em}.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter,.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_processing,.dataTables_wrapper .dataTables_paginate{color:inherit}.dataTables_wrapper .dataTables_scroll{clear:both}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody{-webkit-overflow-scrolling:touch}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td{vertical-align:middle}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td>div.dataTables_sizing{height:0;overflow:hidden;margin:0 !important;padding:0 !important}.dataTables_wrapper.no-footer .dataTables_scrollBody{border-bottom:1px solid rgba(0, 0, 0, 0.3)}.dataTables_wrapper.no-footer div.dataTables_scrollHead table.dataTable,.dataTables_wrapper.no-footer div.dataTables_scrollBody>table{border-bottom:none}.dataTables_wrapper:after{visibility:hidden;display:block;content:"";clear:both;height:0}@media screen and (max-width: 767px){.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_paginate{float:none;text-align:center}.dataTables_wrapper .dataTables_paginate{margin-top:.5em}}@media screen and (max-width: 640px){.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter{float:none;text-align:center}.dataTables_wrapper .dataTables_filter{margin-top:.5em}}html.dark{--dt-row-hover: 255, 255, 255;--dt-row-stripe: 255, 255, 255;--dt-column-ordering: 255, 255, 255}html.dark table.dataTable>thead>tr>th,html.dark table.dataTable>thead>tr>td{border-bottom:1px solid rgb(89, 91, 94)}html.dark table.dataTable>thead>tr>th:active,html.dark table.dataTable>thead>tr>td:active{outline:none}html.dark table.dataTable>tfoot>tr>th,html.dark table.dataTable>tfoot>tr>td{border-top:1px solid rgb(89, 91, 94)}html.dark table.dataTable.row-border>tbody>tr>th,html.dark table.dataTable.row-border>tbody>tr>td,html.dark table.dataTable.display>tbody>tr>th,html.dark table.dataTable.display>tbody>tr>td{border-top:1px solid rgb(64, 67, 70)}html.dark table.dataTable.row-border>tbody>tr.selected+tr.selected>td,html.dark table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0257d5}html.dark table.dataTable.cell-border>tbody>tr>th,html.dark table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgb(64, 67, 70);border-right:1px solid rgb(64, 67, 70)}html.dark table.dataTable.cell-border>tbody>tr>th:first-child,html.dark table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgb(64, 67, 70)}html.dark .dataTables_wrapper .dataTables_filter input,html.dark .dataTables_wrapper .dataTables_length select{border:1px solid rgba(255, 255, 255, 0.2);background-color:var(--dt-html-background)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{border:1px solid rgb(89, 91, 94);background:rgba(255, 255, 255, 0.15)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{color:#666 !important}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:hover{border:1px solid rgb(53, 53, 53);background:rgb(53, 53, 53)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:active{background:#3a3a3a}
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
new file mode 100644
index 0000000000..170bd608f7
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
@@ -0,0 +1,2580 @@
+/*!
+ * Bootstrap v3.4.1 (https://getbootstrap.com/)
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+
+if (typeof jQuery === 'undefined') {
+ throw new Error('Bootstrap\'s JavaScript requires jQuery')
+}
+
++function ($) {
+ 'use strict';
+ var version = $.fn.jquery.split(' ')[0].split('.')
+ if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
+ throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
+ }
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: transition.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#transitions
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/)
+ // ============================================================
+
+ function transitionEnd() {
+ var el = document.createElement('bootstrap')
+
+ var transEndEventNames = {
+ WebkitTransition : 'webkitTransitionEnd',
+ MozTransition : 'transitionend',
+ OTransition : 'oTransitionEnd otransitionend',
+ transition : 'transitionend'
+ }
+
+ for (var name in transEndEventNames) {
+ if (el.style[name] !== undefined) {
+ return { end: transEndEventNames[name] }
+ }
+ }
+
+ return false // explicit for ie8 ( ._.)
+ }
+
+ // https://blog.alexmaccaw.com/css-transitions
+ $.fn.emulateTransitionEnd = function (duration) {
+ var called = false
+ var $el = this
+ $(this).one('bsTransitionEnd', function () { called = true })
+ var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
+ setTimeout(callback, duration)
+ return this
+ }
+
+ $(function () {
+ $.support.transition = transitionEnd()
+
+ if (!$.support.transition) return
+
+ $.event.special.bsTransitionEnd = {
+ bindType: $.support.transition.end,
+ delegateType: $.support.transition.end,
+ handle: function (e) {
+ if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
+ }
+ }
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: alert.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#alerts
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // ALERT CLASS DEFINITION
+ // ======================
+
+ var dismiss = '[data-dismiss="alert"]'
+ var Alert = function (el) {
+ $(el).on('click', dismiss, this.close)
+ }
+
+ Alert.VERSION = '3.4.1'
+
+ Alert.TRANSITION_DURATION = 150
+
+ Alert.prototype.close = function (e) {
+ var $this = $(this)
+ var selector = $this.attr('data-target')
+
+ if (!selector) {
+ selector = $this.attr('href')
+ selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
+ }
+
+ selector = selector === '#' ? [] : selector
+ var $parent = $(document).find(selector)
+
+ if (e) e.preventDefault()
+
+ if (!$parent.length) {
+ $parent = $this.closest('.alert')
+ }
+
+ $parent.trigger(e = $.Event('close.bs.alert'))
+
+ if (e.isDefaultPrevented()) return
+
+ $parent.removeClass('in')
+
+ function removeElement() {
+ // detach from parent, fire event then clean up data
+ $parent.detach().trigger('closed.bs.alert').remove()
+ }
+
+ $.support.transition && $parent.hasClass('fade') ?
+ $parent
+ .one('bsTransitionEnd', removeElement)
+ .emulateTransitionEnd(Alert.TRANSITION_DURATION) :
+ removeElement()
+ }
+
+
+ // ALERT PLUGIN DEFINITION
+ // =======================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.alert')
+
+ if (!data) $this.data('bs.alert', (data = new Alert(this)))
+ if (typeof option == 'string') data[option].call($this)
+ })
+ }
+
+ var old = $.fn.alert
+
+ $.fn.alert = Plugin
+ $.fn.alert.Constructor = Alert
+
+
+ // ALERT NO CONFLICT
+ // =================
+
+ $.fn.alert.noConflict = function () {
+ $.fn.alert = old
+ return this
+ }
+
+
+ // ALERT DATA-API
+ // ==============
+
+ $(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close)
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: button.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#buttons
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // BUTTON PUBLIC CLASS DEFINITION
+ // ==============================
+
+ var Button = function (element, options) {
+ this.$element = $(element)
+ this.options = $.extend({}, Button.DEFAULTS, options)
+ this.isLoading = false
+ }
+
+ Button.VERSION = '3.4.1'
+
+ Button.DEFAULTS = {
+ loadingText: 'loading...'
+ }
+
+ Button.prototype.setState = function (state) {
+ var d = 'disabled'
+ var $el = this.$element
+ var val = $el.is('input') ? 'val' : 'html'
+ var data = $el.data()
+
+ state += 'Text'
+
+ if (data.resetText == null) $el.data('resetText', $el[val]())
+
+ // push to event loop to allow forms to submit
+ setTimeout($.proxy(function () {
+ $el[val](data[state] == null ? this.options[state] : data[state])
+
+ if (state == 'loadingText') {
+ this.isLoading = true
+ $el.addClass(d).attr(d, d).prop(d, true)
+ } else if (this.isLoading) {
+ this.isLoading = false
+ $el.removeClass(d).removeAttr(d).prop(d, false)
+ }
+ }, this), 0)
+ }
+
+ Button.prototype.toggle = function () {
+ var changed = true
+ var $parent = this.$element.closest('[data-toggle="buttons"]')
+
+ if ($parent.length) {
+ var $input = this.$element.find('input')
+ if ($input.prop('type') == 'radio') {
+ if ($input.prop('checked')) changed = false
+ $parent.find('.active').removeClass('active')
+ this.$element.addClass('active')
+ } else if ($input.prop('type') == 'checkbox') {
+ if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false
+ this.$element.toggleClass('active')
+ }
+ $input.prop('checked', this.$element.hasClass('active'))
+ if (changed) $input.trigger('change')
+ } else {
+ this.$element.attr('aria-pressed', !this.$element.hasClass('active'))
+ this.$element.toggleClass('active')
+ }
+ }
+
+
+ // BUTTON PLUGIN DEFINITION
+ // ========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.button')
+ var options = typeof option == 'object' && option
+
+ if (!data) $this.data('bs.button', (data = new Button(this, options)))
+
+ if (option == 'toggle') data.toggle()
+ else if (option) data.setState(option)
+ })
+ }
+
+ var old = $.fn.button
+
+ $.fn.button = Plugin
+ $.fn.button.Constructor = Button
+
+
+ // BUTTON NO CONFLICT
+ // ==================
+
+ $.fn.button.noConflict = function () {
+ $.fn.button = old
+ return this
+ }
+
+
+ // BUTTON DATA-API
+ // ===============
+
+ $(document)
+ .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
+ var $btn = $(e.target).closest('.btn')
+ Plugin.call($btn, 'toggle')
+ if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
+ // Prevent double click on radios, and the double selections (so cancellation) on checkboxes
+ e.preventDefault()
+ // The target component still receive the focus
+ if ($btn.is('input,button')) $btn.trigger('focus')
+ else $btn.find('input:visible,button:visible').first().trigger('focus')
+ }
+ })
+ .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
+ $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: carousel.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#carousel
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // CAROUSEL CLASS DEFINITION
+ // =========================
+
+ var Carousel = function (element, options) {
+ this.$element = $(element)
+ this.$indicators = this.$element.find('.carousel-indicators')
+ this.options = options
+ this.paused = null
+ this.sliding = null
+ this.interval = null
+ this.$active = null
+ this.$items = null
+
+ this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this))
+
+ this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element
+ .on('mouseenter.bs.carousel', $.proxy(this.pause, this))
+ .on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
+ }
+
+ Carousel.VERSION = '3.4.1'
+
+ Carousel.TRANSITION_DURATION = 600
+
+ Carousel.DEFAULTS = {
+ interval: 5000,
+ pause: 'hover',
+ wrap: true,
+ keyboard: true
+ }
+
+ Carousel.prototype.keydown = function (e) {
+ if (/input|textarea/i.test(e.target.tagName)) return
+ switch (e.which) {
+ case 37: this.prev(); break
+ case 39: this.next(); break
+ default: return
+ }
+
+ e.preventDefault()
+ }
+
+ Carousel.prototype.cycle = function (e) {
+ e || (this.paused = false)
+
+ this.interval && clearInterval(this.interval)
+
+ this.options.interval
+ && !this.paused
+ && (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
+
+ return this
+ }
+
+ Carousel.prototype.getItemIndex = function (item) {
+ this.$items = item.parent().children('.item')
+ return this.$items.index(item || this.$active)
+ }
+
+ Carousel.prototype.getItemForDirection = function (direction, active) {
+ var activeIndex = this.getItemIndex(active)
+ var willWrap = (direction == 'prev' && activeIndex === 0)
+ || (direction == 'next' && activeIndex == (this.$items.length - 1))
+ if (willWrap && !this.options.wrap) return active
+ var delta = direction == 'prev' ? -1 : 1
+ var itemIndex = (activeIndex + delta) % this.$items.length
+ return this.$items.eq(itemIndex)
+ }
+
+ Carousel.prototype.to = function (pos) {
+ var that = this
+ var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active'))
+
+ if (pos > (this.$items.length - 1) || pos < 0) return
+
+ if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid"
+ if (activeIndex == pos) return this.pause().cycle()
+
+ return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos))
+ }
+
+ Carousel.prototype.pause = function (e) {
+ e || (this.paused = true)
+
+ if (this.$element.find('.next, .prev').length && $.support.transition) {
+ this.$element.trigger($.support.transition.end)
+ this.cycle(true)
+ }
+
+ this.interval = clearInterval(this.interval)
+
+ return this
+ }
+
+ Carousel.prototype.next = function () {
+ if (this.sliding) return
+ return this.slide('next')
+ }
+
+ Carousel.prototype.prev = function () {
+ if (this.sliding) return
+ return this.slide('prev')
+ }
+
+ Carousel.prototype.slide = function (type, next) {
+ var $active = this.$element.find('.item.active')
+ var $next = next || this.getItemForDirection(type, $active)
+ var isCycling = this.interval
+ var direction = type == 'next' ? 'left' : 'right'
+ var that = this
+
+ if ($next.hasClass('active')) return (this.sliding = false)
+
+ var relatedTarget = $next[0]
+ var slideEvent = $.Event('slide.bs.carousel', {
+ relatedTarget: relatedTarget,
+ direction: direction
+ })
+ this.$element.trigger(slideEvent)
+ if (slideEvent.isDefaultPrevented()) return
+
+ this.sliding = true
+
+ isCycling && this.pause()
+
+ if (this.$indicators.length) {
+ this.$indicators.find('.active').removeClass('active')
+ var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)])
+ $nextIndicator && $nextIndicator.addClass('active')
+ }
+
+ var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
+ if ($.support.transition && this.$element.hasClass('slide')) {
+ $next.addClass(type)
+ if (typeof $next === 'object' && $next.length) {
+ $next[0].offsetWidth // force reflow
+ }
+ $active.addClass(direction)
+ $next.addClass(direction)
+ $active
+ .one('bsTransitionEnd', function () {
+ $next.removeClass([type, direction].join(' ')).addClass('active')
+ $active.removeClass(['active', direction].join(' '))
+ that.sliding = false
+ setTimeout(function () {
+ that.$element.trigger(slidEvent)
+ }, 0)
+ })
+ .emulateTransitionEnd(Carousel.TRANSITION_DURATION)
+ } else {
+ $active.removeClass('active')
+ $next.addClass('active')
+ this.sliding = false
+ this.$element.trigger(slidEvent)
+ }
+
+ isCycling && this.cycle()
+
+ return this
+ }
+
+
+ // CAROUSEL PLUGIN DEFINITION
+ // ==========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.carousel')
+ var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option)
+ var action = typeof option == 'string' ? option : options.slide
+
+ if (!data) $this.data('bs.carousel', (data = new Carousel(this, options)))
+ if (typeof option == 'number') data.to(option)
+ else if (action) data[action]()
+ else if (options.interval) data.pause().cycle()
+ })
+ }
+
+ var old = $.fn.carousel
+
+ $.fn.carousel = Plugin
+ $.fn.carousel.Constructor = Carousel
+
+
+ // CAROUSEL NO CONFLICT
+ // ====================
+
+ $.fn.carousel.noConflict = function () {
+ $.fn.carousel = old
+ return this
+ }
+
+
+ // CAROUSEL DATA-API
+ // =================
+
+ var clickHandler = function (e) {
+ var $this = $(this)
+ var href = $this.attr('href')
+ if (href) {
+ href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
+ }
+
+ var target = $this.attr('data-target') || href
+ var $target = $(document).find(target)
+
+ if (!$target.hasClass('carousel')) return
+
+ var options = $.extend({}, $target.data(), $this.data())
+ var slideIndex = $this.attr('data-slide-to')
+ if (slideIndex) options.interval = false
+
+ Plugin.call($target, options)
+
+ if (slideIndex) {
+ $target.data('bs.carousel').to(slideIndex)
+ }
+
+ e.preventDefault()
+ }
+
+ $(document)
+ .on('click.bs.carousel.data-api', '[data-slide]', clickHandler)
+ .on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler)
+
+ $(window).on('load', function () {
+ $('[data-ride="carousel"]').each(function () {
+ var $carousel = $(this)
+ Plugin.call($carousel, $carousel.data())
+ })
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: collapse.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#collapse
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+/* jshint latedef: false */
+
++function ($) {
+ 'use strict';
+
+ // COLLAPSE PUBLIC CLASS DEFINITION
+ // ================================
+
+ var Collapse = function (element, options) {
+ this.$element = $(element)
+ this.options = $.extend({}, Collapse.DEFAULTS, options)
+ this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
+ '[data-toggle="collapse"][data-target="#' + element.id + '"]')
+ this.transitioning = null
+
+ if (this.options.parent) {
+ this.$parent = this.getParent()
+ } else {
+ this.addAriaAndCollapsedClass(this.$element, this.$trigger)
+ }
+
+ if (this.options.toggle) this.toggle()
+ }
+
+ Collapse.VERSION = '3.4.1'
+
+ Collapse.TRANSITION_DURATION = 350
+
+ Collapse.DEFAULTS = {
+ toggle: true
+ }
+
+ Collapse.prototype.dimension = function () {
+ var hasWidth = this.$element.hasClass('width')
+ return hasWidth ? 'width' : 'height'
+ }
+
+ Collapse.prototype.show = function () {
+ if (this.transitioning || this.$element.hasClass('in')) return
+
+ var activesData
+ var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
+
+ if (actives && actives.length) {
+ activesData = actives.data('bs.collapse')
+ if (activesData && activesData.transitioning) return
+ }
+
+ var startEvent = $.Event('show.bs.collapse')
+ this.$element.trigger(startEvent)
+ if (startEvent.isDefaultPrevented()) return
+
+ if (actives && actives.length) {
+ Plugin.call(actives, 'hide')
+ activesData || actives.data('bs.collapse', null)
+ }
+
+ var dimension = this.dimension()
+
+ this.$element
+ .removeClass('collapse')
+ .addClass('collapsing')[dimension](0)
+ .attr('aria-expanded', true)
+
+ this.$trigger
+ .removeClass('collapsed')
+ .attr('aria-expanded', true)
+
+ this.transitioning = 1
+
+ var complete = function () {
+ this.$element
+ .removeClass('collapsing')
+ .addClass('collapse in')[dimension]('')
+ this.transitioning = 0
+ this.$element
+ .trigger('shown.bs.collapse')
+ }
+
+ if (!$.support.transition) return complete.call(this)
+
+ var scrollSize = $.camelCase(['scroll', dimension].join('-'))
+
+ this.$element
+ .one('bsTransitionEnd', $.proxy(complete, this))
+ .emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
+ }
+
+ Collapse.prototype.hide = function () {
+ if (this.transitioning || !this.$element.hasClass('in')) return
+
+ var startEvent = $.Event('hide.bs.collapse')
+ this.$element.trigger(startEvent)
+ if (startEvent.isDefaultPrevented()) return
+
+ var dimension = this.dimension()
+
+ this.$element[dimension](this.$element[dimension]())[0].offsetHeight
+
+ this.$element
+ .addClass('collapsing')
+ .removeClass('collapse in')
+ .attr('aria-expanded', false)
+
+ this.$trigger
+ .addClass('collapsed')
+ .attr('aria-expanded', false)
+
+ this.transitioning = 1
+
+ var complete = function () {
+ this.transitioning = 0
+ this.$element
+ .removeClass('collapsing')
+ .addClass('collapse')
+ .trigger('hidden.bs.collapse')
+ }
+
+ if (!$.support.transition) return complete.call(this)
+
+ this.$element
+ [dimension](0)
+ .one('bsTransitionEnd', $.proxy(complete, this))
+ .emulateTransitionEnd(Collapse.TRANSITION_DURATION)
+ }
+
+ Collapse.prototype.toggle = function () {
+ this[this.$element.hasClass('in') ? 'hide' : 'show']()
+ }
+
+ Collapse.prototype.getParent = function () {
+ return $(document).find(this.options.parent)
+ .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
+ .each($.proxy(function (i, element) {
+ var $element = $(element)
+ this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
+ }, this))
+ .end()
+ }
+
+ Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
+ var isOpen = $element.hasClass('in')
+
+ $element.attr('aria-expanded', isOpen)
+ $trigger
+ .toggleClass('collapsed', !isOpen)
+ .attr('aria-expanded', isOpen)
+ }
+
+ function getTargetFromTrigger($trigger) {
+ var href
+ var target = $trigger.attr('data-target')
+ || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
+
+ return $(document).find(target)
+ }
+
+
+ // COLLAPSE PLUGIN DEFINITION
+ // ==========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.collapse')
+ var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
+
+ if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
+ if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.collapse
+
+ $.fn.collapse = Plugin
+ $.fn.collapse.Constructor = Collapse
+
+
+ // COLLAPSE NO CONFLICT
+ // ====================
+
+ $.fn.collapse.noConflict = function () {
+ $.fn.collapse = old
+ return this
+ }
+
+
+ // COLLAPSE DATA-API
+ // =================
+
+ $(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
+ var $this = $(this)
+
+ if (!$this.attr('data-target')) e.preventDefault()
+
+ var $target = getTargetFromTrigger($this)
+ var data = $target.data('bs.collapse')
+ var option = data ? 'toggle' : $this.data()
+
+ Plugin.call($target, option)
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: dropdown.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#dropdowns
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // DROPDOWN CLASS DEFINITION
+ // =========================
+
+ var backdrop = '.dropdown-backdrop'
+ var toggle = '[data-toggle="dropdown"]'
+ var Dropdown = function (element) {
+ $(element).on('click.bs.dropdown', this.toggle)
+ }
+
+ Dropdown.VERSION = '3.4.1'
+
+ function getParent($this) {
+ var selector = $this.attr('data-target')
+
+ if (!selector) {
+ selector = $this.attr('href')
+ selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
+ }
+
+ var $parent = selector !== '#' ? $(document).find(selector) : null
+
+ return $parent && $parent.length ? $parent : $this.parent()
+ }
+
+ function clearMenus(e) {
+ if (e && e.which === 3) return
+ $(backdrop).remove()
+ $(toggle).each(function () {
+ var $this = $(this)
+ var $parent = getParent($this)
+ var relatedTarget = { relatedTarget: this }
+
+ if (!$parent.hasClass('open')) return
+
+ if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return
+
+ $parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget))
+
+ if (e.isDefaultPrevented()) return
+
+ $this.attr('aria-expanded', 'false')
+ $parent.removeClass('open').trigger($.Event('hidden.bs.dropdown', relatedTarget))
+ })
+ }
+
+ Dropdown.prototype.toggle = function (e) {
+ var $this = $(this)
+
+ if ($this.is('.disabled, :disabled')) return
+
+ var $parent = getParent($this)
+ var isActive = $parent.hasClass('open')
+
+ clearMenus()
+
+ if (!isActive) {
+ if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) {
+ // if mobile we use a backdrop because click events don't delegate
+ $(document.createElement('div'))
+ .addClass('dropdown-backdrop')
+ .insertAfter($(this))
+ .on('click', clearMenus)
+ }
+
+ var relatedTarget = { relatedTarget: this }
+ $parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget))
+
+ if (e.isDefaultPrevented()) return
+
+ $this
+ .trigger('focus')
+ .attr('aria-expanded', 'true')
+
+ $parent
+ .toggleClass('open')
+ .trigger($.Event('shown.bs.dropdown', relatedTarget))
+ }
+
+ return false
+ }
+
+ Dropdown.prototype.keydown = function (e) {
+ if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return
+
+ var $this = $(this)
+
+ e.preventDefault()
+ e.stopPropagation()
+
+ if ($this.is('.disabled, :disabled')) return
+
+ var $parent = getParent($this)
+ var isActive = $parent.hasClass('open')
+
+ if (!isActive && e.which != 27 || isActive && e.which == 27) {
+ if (e.which == 27) $parent.find(toggle).trigger('focus')
+ return $this.trigger('click')
+ }
+
+ var desc = ' li:not(.disabled):visible a'
+ var $items = $parent.find('.dropdown-menu' + desc)
+
+ if (!$items.length) return
+
+ var index = $items.index(e.target)
+
+ if (e.which == 38 && index > 0) index-- // up
+ if (e.which == 40 && index < $items.length - 1) index++ // down
+ if (!~index) index = 0
+
+ $items.eq(index).trigger('focus')
+ }
+
+
+ // DROPDOWN PLUGIN DEFINITION
+ // ==========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.dropdown')
+
+ if (!data) $this.data('bs.dropdown', (data = new Dropdown(this)))
+ if (typeof option == 'string') data[option].call($this)
+ })
+ }
+
+ var old = $.fn.dropdown
+
+ $.fn.dropdown = Plugin
+ $.fn.dropdown.Constructor = Dropdown
+
+
+ // DROPDOWN NO CONFLICT
+ // ====================
+
+ $.fn.dropdown.noConflict = function () {
+ $.fn.dropdown = old
+ return this
+ }
+
+
+ // APPLY TO STANDARD DROPDOWN ELEMENTS
+ // ===================================
+
+ $(document)
+ .on('click.bs.dropdown.data-api', clearMenus)
+ .on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
+ .on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle)
+ .on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown)
+ .on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown)
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: modal.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#modals
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // MODAL CLASS DEFINITION
+ // ======================
+
+ var Modal = function (element, options) {
+ this.options = options
+ this.$body = $(document.body)
+ this.$element = $(element)
+ this.$dialog = this.$element.find('.modal-dialog')
+ this.$backdrop = null
+ this.isShown = null
+ this.originalBodyPad = null
+ this.scrollbarWidth = 0
+ this.ignoreBackdropClick = false
+ this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom'
+
+ if (this.options.remote) {
+ this.$element
+ .find('.modal-content')
+ .load(this.options.remote, $.proxy(function () {
+ this.$element.trigger('loaded.bs.modal')
+ }, this))
+ }
+ }
+
+ Modal.VERSION = '3.4.1'
+
+ Modal.TRANSITION_DURATION = 300
+ Modal.BACKDROP_TRANSITION_DURATION = 150
+
+ Modal.DEFAULTS = {
+ backdrop: true,
+ keyboard: true,
+ show: true
+ }
+
+ Modal.prototype.toggle = function (_relatedTarget) {
+ return this.isShown ? this.hide() : this.show(_relatedTarget)
+ }
+
+ Modal.prototype.show = function (_relatedTarget) {
+ var that = this
+ var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
+
+ this.$element.trigger(e)
+
+ if (this.isShown || e.isDefaultPrevented()) return
+
+ this.isShown = true
+
+ this.checkScrollbar()
+ this.setScrollbar()
+ this.$body.addClass('modal-open')
+
+ this.escape()
+ this.resize()
+
+ this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this))
+
+ this.$dialog.on('mousedown.dismiss.bs.modal', function () {
+ that.$element.one('mouseup.dismiss.bs.modal', function (e) {
+ if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true
+ })
+ })
+
+ this.backdrop(function () {
+ var transition = $.support.transition && that.$element.hasClass('fade')
+
+ if (!that.$element.parent().length) {
+ that.$element.appendTo(that.$body) // don't move modals dom position
+ }
+
+ that.$element
+ .show()
+ .scrollTop(0)
+
+ that.adjustDialog()
+
+ if (transition) {
+ that.$element[0].offsetWidth // force reflow
+ }
+
+ that.$element.addClass('in')
+
+ that.enforceFocus()
+
+ var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget })
+
+ transition ?
+ that.$dialog // wait for modal to slide in
+ .one('bsTransitionEnd', function () {
+ that.$element.trigger('focus').trigger(e)
+ })
+ .emulateTransitionEnd(Modal.TRANSITION_DURATION) :
+ that.$element.trigger('focus').trigger(e)
+ })
+ }
+
+ Modal.prototype.hide = function (e) {
+ if (e) e.preventDefault()
+
+ e = $.Event('hide.bs.modal')
+
+ this.$element.trigger(e)
+
+ if (!this.isShown || e.isDefaultPrevented()) return
+
+ this.isShown = false
+
+ this.escape()
+ this.resize()
+
+ $(document).off('focusin.bs.modal')
+
+ this.$element
+ .removeClass('in')
+ .off('click.dismiss.bs.modal')
+ .off('mouseup.dismiss.bs.modal')
+
+ this.$dialog.off('mousedown.dismiss.bs.modal')
+
+ $.support.transition && this.$element.hasClass('fade') ?
+ this.$element
+ .one('bsTransitionEnd', $.proxy(this.hideModal, this))
+ .emulateTransitionEnd(Modal.TRANSITION_DURATION) :
+ this.hideModal()
+ }
+
+ Modal.prototype.enforceFocus = function () {
+ $(document)
+ .off('focusin.bs.modal') // guard against infinite focus loop
+ .on('focusin.bs.modal', $.proxy(function (e) {
+ if (document !== e.target &&
+ this.$element[0] !== e.target &&
+ !this.$element.has(e.target).length) {
+ this.$element.trigger('focus')
+ }
+ }, this))
+ }
+
+ Modal.prototype.escape = function () {
+ if (this.isShown && this.options.keyboard) {
+ this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) {
+ e.which == 27 && this.hide()
+ }, this))
+ } else if (!this.isShown) {
+ this.$element.off('keydown.dismiss.bs.modal')
+ }
+ }
+
+ Modal.prototype.resize = function () {
+ if (this.isShown) {
+ $(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this))
+ } else {
+ $(window).off('resize.bs.modal')
+ }
+ }
+
+ Modal.prototype.hideModal = function () {
+ var that = this
+ this.$element.hide()
+ this.backdrop(function () {
+ that.$body.removeClass('modal-open')
+ that.resetAdjustments()
+ that.resetScrollbar()
+ that.$element.trigger('hidden.bs.modal')
+ })
+ }
+
+ Modal.prototype.removeBackdrop = function () {
+ this.$backdrop && this.$backdrop.remove()
+ this.$backdrop = null
+ }
+
+ Modal.prototype.backdrop = function (callback) {
+ var that = this
+ var animate = this.$element.hasClass('fade') ? 'fade' : ''
+
+ if (this.isShown && this.options.backdrop) {
+ var doAnimate = $.support.transition && animate
+
+ this.$backdrop = $(document.createElement('div'))
+ .addClass('modal-backdrop ' + animate)
+ .appendTo(this.$body)
+
+ this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) {
+ if (this.ignoreBackdropClick) {
+ this.ignoreBackdropClick = false
+ return
+ }
+ if (e.target !== e.currentTarget) return
+ this.options.backdrop == 'static'
+ ? this.$element[0].focus()
+ : this.hide()
+ }, this))
+
+ if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
+
+ this.$backdrop.addClass('in')
+
+ if (!callback) return
+
+ doAnimate ?
+ this.$backdrop
+ .one('bsTransitionEnd', callback)
+ .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
+ callback()
+
+ } else if (!this.isShown && this.$backdrop) {
+ this.$backdrop.removeClass('in')
+
+ var callbackRemove = function () {
+ that.removeBackdrop()
+ callback && callback()
+ }
+ $.support.transition && this.$element.hasClass('fade') ?
+ this.$backdrop
+ .one('bsTransitionEnd', callbackRemove)
+ .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
+ callbackRemove()
+
+ } else if (callback) {
+ callback()
+ }
+ }
+
+ // these following methods are used to handle overflowing modals
+
+ Modal.prototype.handleUpdate = function () {
+ this.adjustDialog()
+ }
+
+ Modal.prototype.adjustDialog = function () {
+ var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
+
+ this.$element.css({
+ paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
+ paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
+ })
+ }
+
+ Modal.prototype.resetAdjustments = function () {
+ this.$element.css({
+ paddingLeft: '',
+ paddingRight: ''
+ })
+ }
+
+ Modal.prototype.checkScrollbar = function () {
+ var fullWindowWidth = window.innerWidth
+ if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8
+ var documentElementRect = document.documentElement.getBoundingClientRect()
+ fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left)
+ }
+ this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth
+ this.scrollbarWidth = this.measureScrollbar()
+ }
+
+ Modal.prototype.setScrollbar = function () {
+ var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
+ this.originalBodyPad = document.body.style.paddingRight || ''
+ var scrollbarWidth = this.scrollbarWidth
+ if (this.bodyIsOverflowing) {
+ this.$body.css('padding-right', bodyPad + scrollbarWidth)
+ $(this.fixedContent).each(function (index, element) {
+ var actualPadding = element.style.paddingRight
+ var calculatedPadding = $(element).css('padding-right')
+ $(element)
+ .data('padding-right', actualPadding)
+ .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px')
+ })
+ }
+ }
+
+ Modal.prototype.resetScrollbar = function () {
+ this.$body.css('padding-right', this.originalBodyPad)
+ $(this.fixedContent).each(function (index, element) {
+ var padding = $(element).data('padding-right')
+ $(element).removeData('padding-right')
+ element.style.paddingRight = padding ? padding : ''
+ })
+ }
+
+ Modal.prototype.measureScrollbar = function () { // thx walsh
+ var scrollDiv = document.createElement('div')
+ scrollDiv.className = 'modal-scrollbar-measure'
+ this.$body.append(scrollDiv)
+ var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth
+ this.$body[0].removeChild(scrollDiv)
+ return scrollbarWidth
+ }
+
+
+ // MODAL PLUGIN DEFINITION
+ // =======================
+
+ function Plugin(option, _relatedTarget) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.modal')
+ var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
+
+ if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
+ if (typeof option == 'string') data[option](_relatedTarget)
+ else if (options.show) data.show(_relatedTarget)
+ })
+ }
+
+ var old = $.fn.modal
+
+ $.fn.modal = Plugin
+ $.fn.modal.Constructor = Modal
+
+
+ // MODAL NO CONFLICT
+ // =================
+
+ $.fn.modal.noConflict = function () {
+ $.fn.modal = old
+ return this
+ }
+
+
+ // MODAL DATA-API
+ // ==============
+
+ $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
+ var $this = $(this)
+ var href = $this.attr('href')
+ var target = $this.attr('data-target') ||
+ (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
+
+ var $target = $(document).find(target)
+ var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
+
+ if ($this.is('a')) e.preventDefault()
+
+ $target.one('show.bs.modal', function (showEvent) {
+ if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown
+ $target.one('hidden.bs.modal', function () {
+ $this.is(':visible') && $this.trigger('focus')
+ })
+ })
+ Plugin.call($target, option, this)
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: tooltip.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tooltip
+ * Inspired by the original jQuery.tipsy by Jason Frame
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
++function ($) {
+ 'use strict';
+
+ var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn']
+
+ var uriAttrs = [
+ 'background',
+ 'cite',
+ 'href',
+ 'itemtype',
+ 'longdesc',
+ 'poster',
+ 'src',
+ 'xlink:href'
+ ]
+
+ var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i
+
+ var DefaultWhitelist = {
+ // Global attributes allowed on any supplied element below.
+ '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
+ a: ['target', 'href', 'title', 'rel'],
+ area: [],
+ b: [],
+ br: [],
+ col: [],
+ code: [],
+ div: [],
+ em: [],
+ hr: [],
+ h1: [],
+ h2: [],
+ h3: [],
+ h4: [],
+ h5: [],
+ h6: [],
+ i: [],
+ img: ['src', 'alt', 'title', 'width', 'height'],
+ li: [],
+ ol: [],
+ p: [],
+ pre: [],
+ s: [],
+ small: [],
+ span: [],
+ sub: [],
+ sup: [],
+ strong: [],
+ u: [],
+ ul: []
+ }
+
+ /**
+ * A pattern that recognizes a commonly useful subset of URLs that are safe.
+ *
+ * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+ */
+ var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi
+
+ /**
+ * A pattern that matches safe data URLs. Only matches image, video and audio types.
+ *
+ * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+ */
+ var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i
+
+ function allowedAttribute(attr, allowedAttributeList) {
+ var attrName = attr.nodeName.toLowerCase()
+
+ if ($.inArray(attrName, allowedAttributeList) !== -1) {
+ if ($.inArray(attrName, uriAttrs) !== -1) {
+ return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN))
+ }
+
+ return true
+ }
+
+ var regExp = $(allowedAttributeList).filter(function (index, value) {
+ return value instanceof RegExp
+ })
+
+ // Check if a regular expression validates the attribute.
+ for (var i = 0, l = regExp.length; i < l; i++) {
+ if (attrName.match(regExp[i])) {
+ return true
+ }
+ }
+
+ return false
+ }
+
+ function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) {
+ if (unsafeHtml.length === 0) {
+ return unsafeHtml
+ }
+
+ if (sanitizeFn && typeof sanitizeFn === 'function') {
+ return sanitizeFn(unsafeHtml)
+ }
+
+ // IE 8 and below don't support createHTMLDocument
+ if (!document.implementation || !document.implementation.createHTMLDocument) {
+ return unsafeHtml
+ }
+
+ var createdDocument = document.implementation.createHTMLDocument('sanitization')
+ createdDocument.body.innerHTML = unsafeHtml
+
+ var whitelistKeys = $.map(whiteList, function (el, i) { return i })
+ var elements = $(createdDocument.body).find('*')
+
+ for (var i = 0, len = elements.length; i < len; i++) {
+ var el = elements[i]
+ var elName = el.nodeName.toLowerCase()
+
+ if ($.inArray(elName, whitelistKeys) === -1) {
+ el.parentNode.removeChild(el)
+
+ continue
+ }
+
+ var attributeList = $.map(el.attributes, function (el) { return el })
+ var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || [])
+
+ for (var j = 0, len2 = attributeList.length; j < len2; j++) {
+ if (!allowedAttribute(attributeList[j], whitelistedAttributes)) {
+ el.removeAttribute(attributeList[j].nodeName)
+ }
+ }
+ }
+
+ return createdDocument.body.innerHTML
+ }
+
+ // TOOLTIP PUBLIC CLASS DEFINITION
+ // ===============================
+
+ var Tooltip = function (element, options) {
+ this.type = null
+ this.options = null
+ this.enabled = null
+ this.timeout = null
+ this.hoverState = null
+ this.$element = null
+ this.inState = null
+
+ this.init('tooltip', element, options)
+ }
+
+ Tooltip.VERSION = '3.4.1'
+
+ Tooltip.TRANSITION_DURATION = 150
+
+ Tooltip.DEFAULTS = {
+ animation: true,
+ placement: 'top',
+ selector: false,
+ template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
+ trigger: 'hover focus',
+ title: '',
+ delay: 0,
+ html: false,
+ container: false,
+ viewport: {
+ selector: 'body',
+ padding: 0
+ },
+ sanitize : true,
+ sanitizeFn : null,
+ whiteList : DefaultWhitelist
+ }
+
+ Tooltip.prototype.init = function (type, element, options) {
+ this.enabled = true
+ this.type = type
+ this.$element = $(element)
+ this.options = this.getOptions(options)
+ this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
+ this.inState = { click: false, hover: false, focus: false }
+
+ if (this.$element[0] instanceof document.constructor && !this.options.selector) {
+ throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!')
+ }
+
+ var triggers = this.options.trigger.split(' ')
+
+ for (var i = triggers.length; i--;) {
+ var trigger = triggers[i]
+
+ if (trigger == 'click') {
+ this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
+ } else if (trigger != 'manual') {
+ var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin'
+ var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout'
+
+ this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
+ this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
+ }
+ }
+
+ this.options.selector ?
+ (this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
+ this.fixTitle()
+ }
+
+ Tooltip.prototype.getDefaults = function () {
+ return Tooltip.DEFAULTS
+ }
+
+ Tooltip.prototype.getOptions = function (options) {
+ var dataAttributes = this.$element.data()
+
+ for (var dataAttr in dataAttributes) {
+ if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) {
+ delete dataAttributes[dataAttr]
+ }
+ }
+
+ options = $.extend({}, this.getDefaults(), dataAttributes, options)
+
+ if (options.delay && typeof options.delay == 'number') {
+ options.delay = {
+ show: options.delay,
+ hide: options.delay
+ }
+ }
+
+ if (options.sanitize) {
+ options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn)
+ }
+
+ return options
+ }
+
+ Tooltip.prototype.getDelegateOptions = function () {
+ var options = {}
+ var defaults = this.getDefaults()
+
+ this._options && $.each(this._options, function (key, value) {
+ if (defaults[key] != value) options[key] = value
+ })
+
+ return options
+ }
+
+ Tooltip.prototype.enter = function (obj) {
+ var self = obj instanceof this.constructor ?
+ obj : $(obj.currentTarget).data('bs.' + this.type)
+
+ if (!self) {
+ self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
+ $(obj.currentTarget).data('bs.' + this.type, self)
+ }
+
+ if (obj instanceof $.Event) {
+ self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true
+ }
+
+ if (self.tip().hasClass('in') || self.hoverState == 'in') {
+ self.hoverState = 'in'
+ return
+ }
+
+ clearTimeout(self.timeout)
+
+ self.hoverState = 'in'
+
+ if (!self.options.delay || !self.options.delay.show) return self.show()
+
+ self.timeout = setTimeout(function () {
+ if (self.hoverState == 'in') self.show()
+ }, self.options.delay.show)
+ }
+
+ Tooltip.prototype.isInStateTrue = function () {
+ for (var key in this.inState) {
+ if (this.inState[key]) return true
+ }
+
+ return false
+ }
+
+ Tooltip.prototype.leave = function (obj) {
+ var self = obj instanceof this.constructor ?
+ obj : $(obj.currentTarget).data('bs.' + this.type)
+
+ if (!self) {
+ self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
+ $(obj.currentTarget).data('bs.' + this.type, self)
+ }
+
+ if (obj instanceof $.Event) {
+ self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false
+ }
+
+ if (self.isInStateTrue()) return
+
+ clearTimeout(self.timeout)
+
+ self.hoverState = 'out'
+
+ if (!self.options.delay || !self.options.delay.hide) return self.hide()
+
+ self.timeout = setTimeout(function () {
+ if (self.hoverState == 'out') self.hide()
+ }, self.options.delay.hide)
+ }
+
+ Tooltip.prototype.show = function () {
+ var e = $.Event('show.bs.' + this.type)
+
+ if (this.hasContent() && this.enabled) {
+ this.$element.trigger(e)
+
+ var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0])
+ if (e.isDefaultPrevented() || !inDom) return
+ var that = this
+
+ var $tip = this.tip()
+
+ var tipId = this.getUID(this.type)
+
+ this.setContent()
+ $tip.attr('id', tipId)
+ this.$element.attr('aria-describedby', tipId)
+
+ if (this.options.animation) $tip.addClass('fade')
+
+ var placement = typeof this.options.placement == 'function' ?
+ this.options.placement.call(this, $tip[0], this.$element[0]) :
+ this.options.placement
+
+ var autoToken = /\s?auto?\s?/i
+ var autoPlace = autoToken.test(placement)
+ if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
+
+ $tip
+ .detach()
+ .css({ top: 0, left: 0, display: 'block' })
+ .addClass(placement)
+ .data('bs.' + this.type, this)
+
+ this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element)
+ this.$element.trigger('inserted.bs.' + this.type)
+
+ var pos = this.getPosition()
+ var actualWidth = $tip[0].offsetWidth
+ var actualHeight = $tip[0].offsetHeight
+
+ if (autoPlace) {
+ var orgPlacement = placement
+ var viewportDim = this.getPosition(this.$viewport)
+
+ placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' :
+ placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' :
+ placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' :
+ placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' :
+ placement
+
+ $tip
+ .removeClass(orgPlacement)
+ .addClass(placement)
+ }
+
+ var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
+
+ this.applyPlacement(calculatedOffset, placement)
+
+ var complete = function () {
+ var prevHoverState = that.hoverState
+ that.$element.trigger('shown.bs.' + that.type)
+ that.hoverState = null
+
+ if (prevHoverState == 'out') that.leave(that)
+ }
+
+ $.support.transition && this.$tip.hasClass('fade') ?
+ $tip
+ .one('bsTransitionEnd', complete)
+ .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
+ complete()
+ }
+ }
+
+ Tooltip.prototype.applyPlacement = function (offset, placement) {
+ var $tip = this.tip()
+ var width = $tip[0].offsetWidth
+ var height = $tip[0].offsetHeight
+
+ // manually read margins because getBoundingClientRect includes difference
+ var marginTop = parseInt($tip.css('margin-top'), 10)
+ var marginLeft = parseInt($tip.css('margin-left'), 10)
+
+ // we must check for NaN for ie 8/9
+ if (isNaN(marginTop)) marginTop = 0
+ if (isNaN(marginLeft)) marginLeft = 0
+
+ offset.top += marginTop
+ offset.left += marginLeft
+
+ // $.fn.offset doesn't round pixel values
+ // so we use setOffset directly with our own function B-0
+ $.offset.setOffset($tip[0], $.extend({
+ using: function (props) {
+ $tip.css({
+ top: Math.round(props.top),
+ left: Math.round(props.left)
+ })
+ }
+ }, offset), 0)
+
+ $tip.addClass('in')
+
+ // check to see if placing tip in new offset caused the tip to resize itself
+ var actualWidth = $tip[0].offsetWidth
+ var actualHeight = $tip[0].offsetHeight
+
+ if (placement == 'top' && actualHeight != height) {
+ offset.top = offset.top + height - actualHeight
+ }
+
+ var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight)
+
+ if (delta.left) offset.left += delta.left
+ else offset.top += delta.top
+
+ var isVertical = /top|bottom/.test(placement)
+ var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight
+ var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight'
+
+ $tip.offset(offset)
+ this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical)
+ }
+
+ Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) {
+ this.arrow()
+ .css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%')
+ .css(isVertical ? 'top' : 'left', '')
+ }
+
+ Tooltip.prototype.setContent = function () {
+ var $tip = this.tip()
+ var title = this.getTitle()
+
+ if (this.options.html) {
+ if (this.options.sanitize) {
+ title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn)
+ }
+
+ $tip.find('.tooltip-inner').html(title)
+ } else {
+ $tip.find('.tooltip-inner').text(title)
+ }
+
+ $tip.removeClass('fade in top bottom left right')
+ }
+
+ Tooltip.prototype.hide = function (callback) {
+ var that = this
+ var $tip = $(this.$tip)
+ var e = $.Event('hide.bs.' + this.type)
+
+ function complete() {
+ if (that.hoverState != 'in') $tip.detach()
+ if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary.
+ that.$element
+ .removeAttr('aria-describedby')
+ .trigger('hidden.bs.' + that.type)
+ }
+ callback && callback()
+ }
+
+ this.$element.trigger(e)
+
+ if (e.isDefaultPrevented()) return
+
+ $tip.removeClass('in')
+
+ $.support.transition && $tip.hasClass('fade') ?
+ $tip
+ .one('bsTransitionEnd', complete)
+ .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
+ complete()
+
+ this.hoverState = null
+
+ return this
+ }
+
+ Tooltip.prototype.fixTitle = function () {
+ var $e = this.$element
+ if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') {
+ $e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
+ }
+ }
+
+ Tooltip.prototype.hasContent = function () {
+ return this.getTitle()
+ }
+
+ Tooltip.prototype.getPosition = function ($element) {
+ $element = $element || this.$element
+
+ var el = $element[0]
+ var isBody = el.tagName == 'BODY'
+
+ var elRect = el.getBoundingClientRect()
+ if (elRect.width == null) {
+ // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
+ elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
+ }
+ var isSvg = window.SVGElement && el instanceof window.SVGElement
+ // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3.
+ // See https://github.com/twbs/bootstrap/issues/20280
+ var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset())
+ var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
+ var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
+
+ return $.extend({}, elRect, scroll, outerDims, elOffset)
+ }
+
+ Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
+ return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } :
+ placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } :
+ placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
+ /* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width }
+
+ }
+
+ Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) {
+ var delta = { top: 0, left: 0 }
+ if (!this.$viewport) return delta
+
+ var viewportPadding = this.options.viewport && this.options.viewport.padding || 0
+ var viewportDimensions = this.getPosition(this.$viewport)
+
+ if (/right|left/.test(placement)) {
+ var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll
+ var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight
+ if (topEdgeOffset < viewportDimensions.top) { // top overflow
+ delta.top = viewportDimensions.top - topEdgeOffset
+ } else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow
+ delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset
+ }
+ } else {
+ var leftEdgeOffset = pos.left - viewportPadding
+ var rightEdgeOffset = pos.left + viewportPadding + actualWidth
+ if (leftEdgeOffset < viewportDimensions.left) { // left overflow
+ delta.left = viewportDimensions.left - leftEdgeOffset
+ } else if (rightEdgeOffset > viewportDimensions.right) { // right overflow
+ delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset
+ }
+ }
+
+ return delta
+ }
+
+ Tooltip.prototype.getTitle = function () {
+ var title
+ var $e = this.$element
+ var o = this.options
+
+ title = $e.attr('data-original-title')
+ || (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
+
+ return title
+ }
+
+ Tooltip.prototype.getUID = function (prefix) {
+ do prefix += ~~(Math.random() * 1000000)
+ while (document.getElementById(prefix))
+ return prefix
+ }
+
+ Tooltip.prototype.tip = function () {
+ if (!this.$tip) {
+ this.$tip = $(this.options.template)
+ if (this.$tip.length != 1) {
+ throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!')
+ }
+ }
+ return this.$tip
+ }
+
+ Tooltip.prototype.arrow = function () {
+ return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow'))
+ }
+
+ Tooltip.prototype.enable = function () {
+ this.enabled = true
+ }
+
+ Tooltip.prototype.disable = function () {
+ this.enabled = false
+ }
+
+ Tooltip.prototype.toggleEnabled = function () {
+ this.enabled = !this.enabled
+ }
+
+ Tooltip.prototype.toggle = function (e) {
+ var self = this
+ if (e) {
+ self = $(e.currentTarget).data('bs.' + this.type)
+ if (!self) {
+ self = new this.constructor(e.currentTarget, this.getDelegateOptions())
+ $(e.currentTarget).data('bs.' + this.type, self)
+ }
+ }
+
+ if (e) {
+ self.inState.click = !self.inState.click
+ if (self.isInStateTrue()) self.enter(self)
+ else self.leave(self)
+ } else {
+ self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
+ }
+ }
+
+ Tooltip.prototype.destroy = function () {
+ var that = this
+ clearTimeout(this.timeout)
+ this.hide(function () {
+ that.$element.off('.' + that.type).removeData('bs.' + that.type)
+ if (that.$tip) {
+ that.$tip.detach()
+ }
+ that.$tip = null
+ that.$arrow = null
+ that.$viewport = null
+ that.$element = null
+ })
+ }
+
+ Tooltip.prototype.sanitizeHtml = function (unsafeHtml) {
+ return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn)
+ }
+
+ // TOOLTIP PLUGIN DEFINITION
+ // =========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.tooltip')
+ var options = typeof option == 'object' && option
+
+ if (!data && /destroy|hide/.test(option)) return
+ if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.tooltip
+
+ $.fn.tooltip = Plugin
+ $.fn.tooltip.Constructor = Tooltip
+
+
+ // TOOLTIP NO CONFLICT
+ // ===================
+
+ $.fn.tooltip.noConflict = function () {
+ $.fn.tooltip = old
+ return this
+ }
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: popover.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#popovers
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // POPOVER PUBLIC CLASS DEFINITION
+ // ===============================
+
+ var Popover = function (element, options) {
+ this.init('popover', element, options)
+ }
+
+ if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
+
+ Popover.VERSION = '3.4.1'
+
+ Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
+ placement: 'right',
+ trigger: 'click',
+ content: '',
+ template: '<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'
+ })
+
+
+ // NOTE: POPOVER EXTENDS tooltip.js
+ // ================================
+
+ Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype)
+
+ Popover.prototype.constructor = Popover
+
+ Popover.prototype.getDefaults = function () {
+ return Popover.DEFAULTS
+ }
+
+ Popover.prototype.setContent = function () {
+ var $tip = this.tip()
+ var title = this.getTitle()
+ var content = this.getContent()
+
+ if (this.options.html) {
+ var typeContent = typeof content
+
+ if (this.options.sanitize) {
+ title = this.sanitizeHtml(title)
+
+ if (typeContent === 'string') {
+ content = this.sanitizeHtml(content)
+ }
+ }
+
+ $tip.find('.popover-title').html(title)
+ $tip.find('.popover-content').children().detach().end()[
+ typeContent === 'string' ? 'html' : 'append'
+ ](content)
+ } else {
+ $tip.find('.popover-title').text(title)
+ $tip.find('.popover-content').children().detach().end().text(content)
+ }
+
+ $tip.removeClass('fade top bottom left right in')
+
+ // IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do
+ // this manually by checking the contents.
+ if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide()
+ }
+
+ Popover.prototype.hasContent = function () {
+ return this.getTitle() || this.getContent()
+ }
+
+ Popover.prototype.getContent = function () {
+ var $e = this.$element
+ var o = this.options
+
+ return $e.attr('data-content')
+ || (typeof o.content == 'function' ?
+ o.content.call($e[0]) :
+ o.content)
+ }
+
+ Popover.prototype.arrow = function () {
+ return (this.$arrow = this.$arrow || this.tip().find('.arrow'))
+ }
+
+
+ // POPOVER PLUGIN DEFINITION
+ // =========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.popover')
+ var options = typeof option == 'object' && option
+
+ if (!data && /destroy|hide/.test(option)) return
+ if (!data) $this.data('bs.popover', (data = new Popover(this, options)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.popover
+
+ $.fn.popover = Plugin
+ $.fn.popover.Constructor = Popover
+
+
+ // POPOVER NO CONFLICT
+ // ===================
+
+ $.fn.popover.noConflict = function () {
+ $.fn.popover = old
+ return this
+ }
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: scrollspy.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#scrollspy
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // SCROLLSPY CLASS DEFINITION
+ // ==========================
+
+ function ScrollSpy(element, options) {
+ this.$body = $(document.body)
+ this.$scrollElement = $(element).is(document.body) ? $(window) : $(element)
+ this.options = $.extend({}, ScrollSpy.DEFAULTS, options)
+ this.selector = (this.options.target || '') + ' .nav li > a'
+ this.offsets = []
+ this.targets = []
+ this.activeTarget = null
+ this.scrollHeight = 0
+
+ this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this))
+ this.refresh()
+ this.process()
+ }
+
+ ScrollSpy.VERSION = '3.4.1'
+
+ ScrollSpy.DEFAULTS = {
+ offset: 10
+ }
+
+ ScrollSpy.prototype.getScrollHeight = function () {
+ return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight)
+ }
+
+ ScrollSpy.prototype.refresh = function () {
+ var that = this
+ var offsetMethod = 'offset'
+ var offsetBase = 0
+
+ this.offsets = []
+ this.targets = []
+ this.scrollHeight = this.getScrollHeight()
+
+ if (!$.isWindow(this.$scrollElement[0])) {
+ offsetMethod = 'position'
+ offsetBase = this.$scrollElement.scrollTop()
+ }
+
+ this.$body
+ .find(this.selector)
+ .map(function () {
+ var $el = $(this)
+ var href = $el.data('target') || $el.attr('href')
+ var $href = /^#./.test(href) && $(href)
+
+ return ($href
+ && $href.length
+ && $href.is(':visible')
+ && [[$href[offsetMethod]().top + offsetBase, href]]) || null
+ })
+ .sort(function (a, b) { return a[0] - b[0] })
+ .each(function () {
+ that.offsets.push(this[0])
+ that.targets.push(this[1])
+ })
+ }
+
+ ScrollSpy.prototype.process = function () {
+ var scrollTop = this.$scrollElement.scrollTop() + this.options.offset
+ var scrollHeight = this.getScrollHeight()
+ var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height()
+ var offsets = this.offsets
+ var targets = this.targets
+ var activeTarget = this.activeTarget
+ var i
+
+ if (this.scrollHeight != scrollHeight) {
+ this.refresh()
+ }
+
+ if (scrollTop >= maxScroll) {
+ return activeTarget != (i = targets[targets.length - 1]) && this.activate(i)
+ }
+
+ if (activeTarget && scrollTop < offsets[0]) {
+ this.activeTarget = null
+ return this.clear()
+ }
+
+ for (i = offsets.length; i--;) {
+ activeTarget != targets[i]
+ && scrollTop >= offsets[i]
+ && (offsets[i + 1] === undefined || scrollTop < offsets[i + 1])
+ && this.activate(targets[i])
+ }
+ }
+
+ ScrollSpy.prototype.activate = function (target) {
+ this.activeTarget = target
+
+ this.clear()
+
+ var selector = this.selector +
+ '[data-target="' + target + '"],' +
+ this.selector + '[href="' + target + '"]'
+
+ var active = $(selector)
+ .parents('li')
+ .addClass('active')
+
+ if (active.parent('.dropdown-menu').length) {
+ active = active
+ .closest('li.dropdown')
+ .addClass('active')
+ }
+
+ active.trigger('activate.bs.scrollspy')
+ }
+
+ ScrollSpy.prototype.clear = function () {
+ $(this.selector)
+ .parentsUntil(this.options.target, '.active')
+ .removeClass('active')
+ }
+
+
+ // SCROLLSPY PLUGIN DEFINITION
+ // ===========================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.scrollspy')
+ var options = typeof option == 'object' && option
+
+ if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.scrollspy
+
+ $.fn.scrollspy = Plugin
+ $.fn.scrollspy.Constructor = ScrollSpy
+
+
+ // SCROLLSPY NO CONFLICT
+ // =====================
+
+ $.fn.scrollspy.noConflict = function () {
+ $.fn.scrollspy = old
+ return this
+ }
+
+
+ // SCROLLSPY DATA-API
+ // ==================
+
+ $(window).on('load.bs.scrollspy.data-api', function () {
+ $('[data-spy="scroll"]').each(function () {
+ var $spy = $(this)
+ Plugin.call($spy, $spy.data())
+ })
+ })
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: tab.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tabs
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // TAB CLASS DEFINITION
+ // ====================
+
+ var Tab = function (element) {
+ // jscs:disable requireDollarBeforejQueryAssignment
+ this.element = $(element)
+ // jscs:enable requireDollarBeforejQueryAssignment
+ }
+
+ Tab.VERSION = '3.4.1'
+
+ Tab.TRANSITION_DURATION = 150
+
+ Tab.prototype.show = function () {
+ var $this = this.element
+ var $ul = $this.closest('ul:not(.dropdown-menu)')
+ var selector = $this.data('target')
+
+ if (!selector) {
+ selector = $this.attr('href')
+ selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
+ }
+
+ if ($this.parent('li').hasClass('active')) return
+
+ var $previous = $ul.find('.active:last a')
+ var hideEvent = $.Event('hide.bs.tab', {
+ relatedTarget: $this[0]
+ })
+ var showEvent = $.Event('show.bs.tab', {
+ relatedTarget: $previous[0]
+ })
+
+ $previous.trigger(hideEvent)
+ $this.trigger(showEvent)
+
+ if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
+
+ var $target = $(document).find(selector)
+
+ this.activate($this.closest('li'), $ul)
+ this.activate($target, $target.parent(), function () {
+ $previous.trigger({
+ type: 'hidden.bs.tab',
+ relatedTarget: $this[0]
+ })
+ $this.trigger({
+ type: 'shown.bs.tab',
+ relatedTarget: $previous[0]
+ })
+ })
+ }
+
+ Tab.prototype.activate = function (element, container, callback) {
+ var $active = container.find('> .active')
+ var transition = callback
+ && $.support.transition
+ && ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length)
+
+ function next() {
+ $active
+ .removeClass('active')
+ .find('> .dropdown-menu > .active')
+ .removeClass('active')
+ .end()
+ .find('[data-toggle="tab"]')
+ .attr('aria-expanded', false)
+
+ element
+ .addClass('active')
+ .find('[data-toggle="tab"]')
+ .attr('aria-expanded', true)
+
+ if (transition) {
+ element[0].offsetWidth // reflow for transition
+ element.addClass('in')
+ } else {
+ element.removeClass('fade')
+ }
+
+ if (element.parent('.dropdown-menu').length) {
+ element
+ .closest('li.dropdown')
+ .addClass('active')
+ .end()
+ .find('[data-toggle="tab"]')
+ .attr('aria-expanded', true)
+ }
+
+ callback && callback()
+ }
+
+ $active.length && transition ?
+ $active
+ .one('bsTransitionEnd', next)
+ .emulateTransitionEnd(Tab.TRANSITION_DURATION) :
+ next()
+
+ $active.removeClass('in')
+ }
+
+
+ // TAB PLUGIN DEFINITION
+ // =====================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.tab')
+
+ if (!data) $this.data('bs.tab', (data = new Tab(this)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.tab
+
+ $.fn.tab = Plugin
+ $.fn.tab.Constructor = Tab
+
+
+ // TAB NO CONFLICT
+ // ===============
+
+ $.fn.tab.noConflict = function () {
+ $.fn.tab = old
+ return this
+ }
+
+
+ // TAB DATA-API
+ // ============
+
+ var clickHandler = function (e) {
+ e.preventDefault()
+ Plugin.call($(this), 'show')
+ }
+
+ $(document)
+ .on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler)
+ .on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler)
+
+}(jQuery);
+
+/* ========================================================================
+ * Bootstrap: affix.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#affix
+ * ========================================================================
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ * ======================================================================== */
+
+
++function ($) {
+ 'use strict';
+
+ // AFFIX CLASS DEFINITION
+ // ======================
+
+ var Affix = function (element, options) {
+ this.options = $.extend({}, Affix.DEFAULTS, options)
+
+ var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target)
+
+ this.$target = target
+ .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
+ .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
+
+ this.$element = $(element)
+ this.affixed = null
+ this.unpin = null
+ this.pinnedOffset = null
+
+ this.checkPosition()
+ }
+
+ Affix.VERSION = '3.4.1'
+
+ Affix.RESET = 'affix affix-top affix-bottom'
+
+ Affix.DEFAULTS = {
+ offset: 0,
+ target: window
+ }
+
+ Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) {
+ var scrollTop = this.$target.scrollTop()
+ var position = this.$element.offset()
+ var targetHeight = this.$target.height()
+
+ if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false
+
+ if (this.affixed == 'bottom') {
+ if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom'
+ return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom'
+ }
+
+ var initializing = this.affixed == null
+ var colliderTop = initializing ? scrollTop : position.top
+ var colliderHeight = initializing ? targetHeight : height
+
+ if (offsetTop != null && scrollTop <= offsetTop) return 'top'
+ if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom'
+
+ return false
+ }
+
+ Affix.prototype.getPinnedOffset = function () {
+ if (this.pinnedOffset) return this.pinnedOffset
+ this.$element.removeClass(Affix.RESET).addClass('affix')
+ var scrollTop = this.$target.scrollTop()
+ var position = this.$element.offset()
+ return (this.pinnedOffset = position.top - scrollTop)
+ }
+
+ Affix.prototype.checkPositionWithEventLoop = function () {
+ setTimeout($.proxy(this.checkPosition, this), 1)
+ }
+
+ Affix.prototype.checkPosition = function () {
+ if (!this.$element.is(':visible')) return
+
+ var height = this.$element.height()
+ var offset = this.options.offset
+ var offsetTop = offset.top
+ var offsetBottom = offset.bottom
+ var scrollHeight = Math.max($(document).height(), $(document.body).height())
+
+ if (typeof offset != 'object') offsetBottom = offsetTop = offset
+ if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element)
+ if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element)
+
+ var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom)
+
+ if (this.affixed != affix) {
+ if (this.unpin != null) this.$element.css('top', '')
+
+ var affixType = 'affix' + (affix ? '-' + affix : '')
+ var e = $.Event(affixType + '.bs.affix')
+
+ this.$element.trigger(e)
+
+ if (e.isDefaultPrevented()) return
+
+ this.affixed = affix
+ this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null
+
+ this.$element
+ .removeClass(Affix.RESET)
+ .addClass(affixType)
+ .trigger(affixType.replace('affix', 'affixed') + '.bs.affix')
+ }
+
+ if (affix == 'bottom') {
+ this.$element.offset({
+ top: scrollHeight - height - offsetBottom
+ })
+ }
+ }
+
+
+ // AFFIX PLUGIN DEFINITION
+ // =======================
+
+ function Plugin(option) {
+ return this.each(function () {
+ var $this = $(this)
+ var data = $this.data('bs.affix')
+ var options = typeof option == 'object' && option
+
+ if (!data) $this.data('bs.affix', (data = new Affix(this, options)))
+ if (typeof option == 'string') data[option]()
+ })
+ }
+
+ var old = $.fn.affix
+
+ $.fn.affix = Plugin
+ $.fn.affix.Constructor = Affix
+
+
+ // AFFIX NO CONFLICT
+ // =================
+
+ $.fn.affix.noConflict = function () {
+ $.fn.affix = old
+ return this
+ }
+
+
+ // AFFIX DATA-API
+ // ==============
+
+ $(window).on('load', function () {
+ $('[data-spy="affix"]').each(function () {
+ var $spy = $(this)
+ var data = $spy.data()
+
+ data.offset = data.offset || {}
+
+ if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom
+ if (data.offsetTop != null) data.offset.top = data.offsetTop
+
+ Plugin.call($spy, data)
+ })
+ })
+
+}(jQuery);
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
new file mode 100644
index 0000000000..eb0a8b410f
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
@@ -0,0 +1,6 @@
+/*!
+ * Bootstrap v3.4.1 (https://getbootstrap.com/)
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");!function(t){"use strict";var e=jQuery.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||3<e[0])throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(),function(n){"use strict";n.fn.emulateTransitionEnd=function(t){var e=!1,i=this;n(this).one("bsTransitionEnd",function(){e=!0});return setTimeout(function(){e||n(i).trigger(n.support.transition.end)},t),this},n(function(){n.support.transition=function o(){var t=document.createElement("bootstrap"),e={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var i in e)if(t.style[i]!==undefined)return{end:e[i]};return!1}(),n.support.transition&&(n.event.special.bsTransitionEnd={bindType:n.support.transition.end,delegateType:n.support.transition.end,handle:function(t){if(n(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}})})}(jQuery),function(s){"use strict";var e='[data-dismiss="alert"]',a=function(t){s(t).on("click",e,this.close)};a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.close=function(t){var e=s(this),i=e.attr("data-target");i||(i=(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),i="#"===i?[]:i;var o=s(document).find(i);function n(){o.detach().trigger("closed.bs.alert").remove()}t&&t.preventDefault(),o.length||(o=e.closest(".alert")),o.trigger(t=s.Event("close.bs.alert")),t.isDefaultPrevented()||(o.removeClass("in"),s.support.transition&&o.hasClass("fade")?o.one("bsTransitionEnd",n).emulateTransitionEnd(a.TRANSITION_DURATION):n())};var t=s.fn.alert;s.fn.alert=function o(i){return this.each(function(){var t=s(this),e=t.data("bs.alert");e||t.data("bs.alert",e=new a(this)),"string"==typeof i&&e[i].call(t)})},s.fn.alert.Constructor=a,s.fn.alert.noConflict=function(){return s.fn.alert=t,this},s(document).on("click.bs.alert.data-api",e,a.prototype.close)}(jQuery),function(s){"use strict";var n=function(t,e){this.$element=s(t),this.options=s.extend({},n.DEFAULTS,e),this.isLoading=!1};function i(o){return this.each(function(){var t=s(this),e=t.data("bs.button"),i="object"==typeof o&&o;e||t.data("bs.button",e=new n(this,i)),"toggle"==o?e.toggle():o&&e.setState(o)})}n.VERSION="3.4.1",n.DEFAULTS={loadingText:"loading..."},n.prototype.setState=function(t){var e="disabled",i=this.$element,o=i.is("input")?"val":"html",n=i.data();t+="Text",null==n.resetText&&i.data("resetText",i[o]()),setTimeout(s.proxy(function(){i[o](null==n[t]?this.options[t]:n[t]),"loadingText"==t?(this.isLoading=!0,i.addClass(e).attr(e,e).prop(e,!0)):this.isLoading&&(this.isLoading=!1,i.removeClass(e).removeAttr(e).prop(e,!1))},this),0)},n.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var t=s.fn.button;s.fn.button=i,s.fn.button.Constructor=n,s.fn.button.noConflict=function(){return s.fn.button=t,this},s(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(t){var e=s(t.target).closest(".btn");i.call(e,"toggle"),s(t.target).is('input[type="radio"], input[type="checkbox"]')||(t.preventDefault(),e.is("input,button")?e.trigger("focus"):e.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(t){s(t.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(t.type))})}(jQuery),function(p){"use strict";var c=function(t,e){this.$element=p(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=e,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",p.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",p.proxy(this.pause,this)).on("mouseleave.bs.carousel",p.proxy(this.cycle,this))};function r(n){return this.each(function(){var t=p(this),e=t.data("bs.carousel"),i=p.extend({},c.DEFAULTS,t.data(),"object"==typeof n&&n),o="string"==typeof n?n:i.slide;e||t.data("bs.carousel",e=new c(this,i)),"number"==typeof n?e.to(n):o?e[o]():i.interval&&e.pause().cycle()})}c.VERSION="3.4.1",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},c.prototype.cycle=function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(p.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},c.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e);if(("prev"==t&&0===i||"next"==t&&i==this.$items.length-1)&&!this.options.wrap)return e;var o=(i+("prev"==t?-1:1))%this.$items.length;return this.$items.eq(o)},c.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(t>this.$items.length-1||t<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(i<t?"next":"prev",this.$items.eq(t))},c.prototype.pause=function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&p.support.transition&&(this.$element.trigger(p.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(t,e){var i=this.$element.find(".item.active"),o=e||this.getItemForDirection(t,i),n=this.interval,s="next"==t?"left":"right",a=this;if(o.hasClass("active"))return this.sliding=!1;var r=o[0],l=p.Event("slide.bs.carousel",{relatedTarget:r,direction:s});if(this.$element.trigger(l),!l.isDefaultPrevented()){if(this.sliding=!0,n&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var h=p(this.$indicators.children()[this.getItemIndex(o)]);h&&h.addClass("active")}var d=p.Event("slid.bs.carousel",{relatedTarget:r,direction:s});return p.support.transition&&this.$element.hasClass("slide")?(o.addClass(t),"object"==typeof o&&o.length&&o[0].offsetWidth,i.addClass(s),o.addClass(s),i.one("bsTransitionEnd",function(){o.removeClass([t,s].join(" ")).addClass("active"),i.removeClass(["active",s].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger(d)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(i.removeClass("active"),o.addClass("active"),this.sliding=!1,this.$element.trigger(d)),n&&this.cycle(),this}};var t=p.fn.carousel;p.fn.carousel=r,p.fn.carousel.Constructor=c,p.fn.carousel.noConflict=function(){return p.fn.carousel=t,this};var e=function(t){var e=p(this),i=e.attr("href");i&&(i=i.replace(/.*(?=#[^\s]+$)/,""));var o=e.attr("data-target")||i,n=p(document).find(o);if(n.hasClass("carousel")){var s=p.extend({},n.data(),e.data()),a=e.attr("data-slide-to");a&&(s.interval=!1),r.call(n,s),a&&n.data("bs.carousel").to(a),t.preventDefault()}};p(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),p(window).on("load",function(){p('[data-ride="carousel"]').each(function(){var t=p(this);r.call(t,t.data())})})}(jQuery),function(a){"use strict";var r=function(t,e){this.$element=a(t),this.options=a.extend({},r.DEFAULTS,e),this.$trigger=a('[data-toggle="collapse"][href="#'+t.id+'"],[data-toggle="collapse"][data-target="#'+t.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};function n(t){var e,i=t.attr("data-target")||(e=t.attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"");return a(document).find(i)}function l(o){return this.each(function(){var t=a(this),e=t.data("bs.collapse"),i=a.extend({},r.DEFAULTS,t.data(),"object"==typeof o&&o);!e&&i.toggle&&/show|hide/.test(o)&&(i.toggle=!1),e||t.data("bs.collapse",e=new r(this,i)),"string"==typeof o&&e[o]()})}r.VERSION="3.4.1",r.TRANSITION_DURATION=350,r.DEFAULTS={toggle:!0},r.prototype.dimension=function(){return this.$element.hasClass("width")?"width":"height"},r.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var t,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(t=e.data("bs.collapse"))&&t.transitioning)){var i=a.Event("show.bs.collapse");if(this.$element.trigger(i),!i.isDefaultPrevented()){e&&e.length&&(l.call(e,"hide"),t||e.data("bs.collapse",null));var o=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[o](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var n=function(){this.$element.removeClass("collapsing").addClass("collapse in")[o](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return n.call(this);var s=a.camelCase(["scroll",o].join("-"));this.$element.one("bsTransitionEnd",a.proxy(n,this)).emulateTransitionEnd(r.TRANSITION_DURATION)[o](this.$element[0][s])}}}},r.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var t=a.Event("hide.bs.collapse");if(this.$element.trigger(t),!t.isDefaultPrevented()){var e=this.dimension();this.$element[e](this.$element[e]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var i=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};if(!a.support.transition)return i.call(this);this.$element[e](0).one("bsTransitionEnd",a.proxy(i,this)).emulateTransitionEnd(r.TRANSITION_DURATION)}}},r.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},r.prototype.getParent=function(){return a(document).find(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(t,e){var i=a(e);this.addAriaAndCollapsedClass(n(i),i)},this)).end()},r.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var t=a.fn.collapse;a.fn.collapse=l,a.fn.collapse.Constructor=r,a.fn.collapse.noConflict=function(){return a.fn.collapse=t,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(t){var e=a(this);e.attr("data-target")||t.preventDefault();var i=n(e),o=i.data("bs.collapse")?"toggle":e.data();l.call(i,o)})}(jQuery),function(a){"use strict";var r='[data-toggle="dropdown"]',o=function(t){a(t).on("click.bs.dropdown",this.toggle)};function l(t){var e=t.attr("data-target");e||(e=(e=t.attr("href"))&&/#[A-Za-z]/.test(e)&&e.replace(/.*(?=#[^\s]*$)/,""));var i="#"!==e?a(document).find(e):null;return i&&i.length?i:t.parent()}function s(o){o&&3===o.which||(a(".dropdown-backdrop").remove(),a(r).each(function(){var t=a(this),e=l(t),i={relatedTarget:this};e.hasClass("open")&&(o&&"click"==o.type&&/input|textarea/i.test(o.target.tagName)&&a.contains(e[0],o.target)||(e.trigger(o=a.Event("hide.bs.dropdown",i)),o.isDefaultPrevented()||(t.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",i)))))}))}o.VERSION="3.4.1",o.prototype.toggle=function(t){var e=a(this);if(!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(s(),!o){"ontouchstart"in document.documentElement&&!i.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",s);var n={relatedTarget:this};if(i.trigger(t=a.Event("show.bs.dropdown",n)),t.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),i.toggleClass("open").trigger(a.Event("shown.bs.dropdown",n))}return!1}},o.prototype.keydown=function(t){if(/(38|40|27|32)/.test(t.which)&&!/input|textarea/i.test(t.target.tagName)){var e=a(this);if(t.preventDefault(),t.stopPropagation(),!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(!o&&27!=t.which||o&&27==t.which)return 27==t.which&&i.find(r).trigger("focus"),e.trigger("click");var n=i.find(".dropdown-menu li:not(.disabled):visible a");if(n.length){var s=n.index(t.target);38==t.which&&0<s&&s--,40==t.which&&s<n.length-1&&s++,~s||(s=0),n.eq(s).trigger("focus")}}}};var t=a.fn.dropdown;a.fn.dropdown=function e(i){return this.each(function(){var t=a(this),e=t.data("bs.dropdown");e||t.data("bs.dropdown",e=new o(this)),"string"==typeof i&&e[i].call(t)})},a.fn.dropdown.Constructor=o,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=t,this},a(document).on("click.bs.dropdown.data-api",s).on("click.bs.dropdown.data-api",".dropdown form",function(t){t.stopPropagation()}).on("click.bs.dropdown.data-api",r,o.prototype.toggle).on("keydown.bs.dropdown.data-api",r,o.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",o.prototype.keydown)}(jQuery),function(a){"use strict";var s=function(t,e){this.options=e,this.$body=a(document.body),this.$element=a(t),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.fixedContent=".navbar-fixed-top, .navbar-fixed-bottom",this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};function r(o,n){return this.each(function(){var t=a(this),e=t.data("bs.modal"),i=a.extend({},s.DEFAULTS,t.data(),"object"==typeof o&&o);e||t.data("bs.modal",e=new s(this,i)),"string"==typeof o?e[o](n):i.show&&e.show(n)})}s.VERSION="3.4.1",s.TRANSITION_DURATION=300,s.BACKDROP_TRANSITION_DURATION=150,s.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},s.prototype.toggle=function(t){return this.isShown?this.hide():this.show(t)},s.prototype.show=function(i){var o=this,t=a.Event("show.bs.modal",{relatedTarget:i});this.$element.trigger(t),this.isShown||t.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){o.$element.one("mouseup.dismiss.bs.modal",function(t){a(t.target).is(o.$element)&&(o.ignoreBackdropClick=!0)})}),this.backdrop(function(){var t=a.support.transition&&o.$element.hasClass("fade");o.$element.parent().length||o.$element.appendTo(o.$body),o.$element.show().scrollTop(0),o.adjustDialog(),t&&o.$element[0].offsetWidth,o.$element.addClass("in"),o.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:i});t?o.$dialog.one("bsTransitionEnd",function(){o.$element.trigger("focus").trigger(e)}).emulateTransitionEnd(s.TRANSITION_DURATION):o.$element.trigger("focus").trigger(e)}))},s.prototype.hide=function(t){t&&t.preventDefault(),t=a.Event("hide.bs.modal"),this.$element.trigger(t),this.isShown&&!t.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(s.TRANSITION_DURATION):this.hideModal())},s.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(t){document===t.target||this.$element[0]===t.target||this.$element.has(t.target).length||this.$element.trigger("focus")},this))},s.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(t){27==t.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},s.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},s.prototype.hideModal=function(){var t=this;this.$element.hide(),this.backdrop(function(){t.$body.removeClass("modal-open"),t.resetAdjustments(),t.resetScrollbar(),t.$element.trigger("hidden.bs.modal")})},s.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},s.prototype.backdrop=function(t){var e=this,i=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var o=a.support.transition&&i;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+i).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(t){this.ignoreBackdropClick?this.ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide())},this)),o&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!t)return;o?this.$backdrop.one("bsTransitionEnd",t).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):t()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var n=function(){e.removeBackdrop(),t&&t()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",n).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):n()}else t&&t()},s.prototype.handleUpdate=function(){this.adjustDialog()},s.prototype.adjustDialog=function(){var t=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},s.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},s.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth<t,this.scrollbarWidth=this.measureScrollbar()},s.prototype.setScrollbar=function(){var t=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"";var n=this.scrollbarWidth;this.bodyIsOverflowing&&(this.$body.css("padding-right",t+n),a(this.fixedContent).each(function(t,e){var i=e.style.paddingRight,o=a(e).css("padding-right");a(e).data("padding-right",i).css("padding-right",parseFloat(o)+n+"px")}))},s.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad),a(this.fixedContent).each(function(t,e){var i=a(e).data("padding-right");a(e).removeData("padding-right"),e.style.paddingRight=i||""})},s.prototype.measureScrollbar=function(){var t=document.createElement("div");t.className="modal-scrollbar-measure",this.$body.append(t);var e=t.offsetWidth-t.clientWidth;return this.$body[0].removeChild(t),e};var t=a.fn.modal;a.fn.modal=r,a.fn.modal.Constructor=s,a.fn.modal.noConflict=function(){return a.fn.modal=t,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(t){var e=a(this),i=e.attr("href"),o=e.attr("data-target")||i&&i.replace(/.*(?=#[^\s]+$)/,""),n=a(document).find(o),s=n.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(i)&&i},n.data(),e.data());e.is("a")&&t.preventDefault(),n.one("show.bs.modal",function(t){t.isDefaultPrevented()||n.one("hidden.bs.modal",function(){e.is(":visible")&&e.trigger("focus")})}),r.call(n,s,this)})}(jQuery),function(g){"use strict";var o=["sanitize","whiteList","sanitizeFn"],a=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],t={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},r=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,l=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function u(t,e){var i=t.nodeName.toLowerCase();if(-1!==g.inArray(i,e))return-1===g.inArray(i,a)||Boolean(t.nodeValue.match(r)||t.nodeValue.match(l));for(var o=g(e).filter(function(t,e){return e instanceof RegExp}),n=0,s=o.length;n<s;n++)if(i.match(o[n]))return!0;return!1}function n(t,e,i){if(0===t.length)return t;if(i&&"function"==typeof i)return i(t);if(!document.implementation||!document.implementation.createHTMLDocument)return t;var o=document.implementation.createHTMLDocument("sanitization");o.body.innerHTML=t;for(var n=g.map(e,function(t,e){return e}),s=g(o.body).find("*"),a=0,r=s.length;a<r;a++){var l=s[a],h=l.nodeName.toLowerCase();if(-1!==g.inArray(h,n))for(var d=g.map(l.attributes,function(t){return t}),p=[].concat(e["*"]||[],e[h]||[]),c=0,f=d.length;c<f;c++)u(d[c],p)||l.removeAttribute(d[c].nodeName);else l.parentNode.removeChild(l)}return o.body.innerHTML}var m=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};m.VERSION="3.4.1",m.TRANSITION_DURATION=150,m.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0},sanitize:!0,sanitizeFn:null,whiteList:t},m.prototype.init=function(t,e,i){if(this.enabled=!0,this.type=t,this.$element=g(e),this.options=this.getOptions(i),this.$viewport=this.options.viewport&&g(document).find(g.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var o=this.options.trigger.split(" "),n=o.length;n--;){var s=o[n];if("click"==s)this.$element.on("click."+this.type,this.options.selector,g.proxy(this.toggle,this));else if("manual"!=s){var a="hover"==s?"mouseenter":"focusin",r="hover"==s?"mouseleave":"focusout";this.$element.on(a+"."+this.type,this.options.selector,g.proxy(this.enter,this)),this.$element.on(r+"."+this.type,this.options.selector,g.proxy(this.leave,this))}}this.options.selector?this._options=g.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},m.prototype.getDefaults=function(){return m.DEFAULTS},m.prototype.getOptions=function(t){var e=this.$element.data();for(var i in e)e.hasOwnProperty(i)&&-1!==g.inArray(i,o)&&delete e[i];return(t=g.extend({},this.getDefaults(),e,t)).delay&&"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),t.sanitize&&(t.template=n(t.template,t.whiteList,t.sanitizeFn)),t},m.prototype.getDelegateOptions=function(){var i={},o=this.getDefaults();return this._options&&g.each(this._options,function(t,e){o[t]!=e&&(i[t]=e)}),i},m.prototype.enter=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusin"==t.type?"focus":"hover"]=!0),e.tip().hasClass("in")||"in"==e.hoverState)e.hoverState="in";else{if(clearTimeout(e.timeout),e.hoverState="in",!e.options.delay||!e.options.delay.show)return e.show();e.timeout=setTimeout(function(){"in"==e.hoverState&&e.show()},e.options.delay.show)}},m.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},m.prototype.leave=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusout"==t.type?"focus":"hover"]=!1),!e.isInStateTrue()){if(clearTimeout(e.timeout),e.hoverState="out",!e.options.delay||!e.options.delay.hide)return e.hide();e.timeout=setTimeout(function(){"out"==e.hoverState&&e.hide()},e.options.delay.hide)}},m.prototype.show=function(){var t=g.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(t);var e=g.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(t.isDefaultPrevented()||!e)return;var i=this,o=this.tip(),n=this.getUID(this.type);this.setContent(),o.attr("id",n),this.$element.attr("aria-describedby",n),this.options.animation&&o.addClass("fade");var s="function"==typeof this.options.placement?this.options.placement.call(this,o[0],this.$element[0]):this.options.placement,a=/\s?auto?\s?/i,r=a.test(s);r&&(s=s.replace(a,"")||"top"),o.detach().css({top:0,left:0,display:"block"}).addClass(s).data("bs."+this.type,this),this.options.container?o.appendTo(g(document).find(this.options.container)):o.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var l=this.getPosition(),h=o[0].offsetWidth,d=o[0].offsetHeight;if(r){var p=s,c=this.getPosition(this.$viewport);s="bottom"==s&&l.bottom+d>c.bottom?"top":"top"==s&&l.top-d<c.top?"bottom":"right"==s&&l.right+h>c.width?"left":"left"==s&&l.left-h<c.left?"right":s,o.removeClass(p).addClass(s)}var f=this.getCalculatedOffset(s,l,h,d);this.applyPlacement(f,s);var u=function(){var t=i.hoverState;i.$element.trigger("shown.bs."+i.type),i.hoverState=null,"out"==t&&i.leave(i)};g.support.transition&&this.$tip.hasClass("fade")?o.one("bsTransitionEnd",u).emulateTransitionEnd(m.TRANSITION_DURATION):u()}},m.prototype.applyPlacement=function(t,e){var i=this.tip(),o=i[0].offsetWidth,n=i[0].offsetHeight,s=parseInt(i.css("margin-top"),10),a=parseInt(i.css("margin-left"),10);isNaN(s)&&(s=0),isNaN(a)&&(a=0),t.top+=s,t.left+=a,g.offset.setOffset(i[0],g.extend({using:function(t){i.css({top:Math.round(t.top),left:Math.round(t.left)})}},t),0),i.addClass("in");var r=i[0].offsetWidth,l=i[0].offsetHeight;"top"==e&&l!=n&&(t.top=t.top+n-l);var h=this.getViewportAdjustedDelta(e,t,r,l);h.left?t.left+=h.left:t.top+=h.top;var d=/top|bottom/.test(e),p=d?2*h.left-o+r:2*h.top-n+l,c=d?"offsetWidth":"offsetHeight";i.offset(t),this.replaceArrow(p,i[0][c],d)},m.prototype.replaceArrow=function(t,e,i){this.arrow().css(i?"left":"top",50*(1-t/e)+"%").css(i?"top":"left","")},m.prototype.setContent=function(){var t=this.tip(),e=this.getTitle();this.options.html?(this.options.sanitize&&(e=n(e,this.options.whiteList,this.options.sanitizeFn)),t.find(".tooltip-inner").html(e)):t.find(".tooltip-inner").text(e),t.removeClass("fade in top bottom left right")},m.prototype.hide=function(t){var e=this,i=g(this.$tip),o=g.Event("hide.bs."+this.type);function n(){"in"!=e.hoverState&&i.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),t&&t()}if(this.$element.trigger(o),!o.isDefaultPrevented())return i.removeClass("in"),g.support.transition&&i.hasClass("fade")?i.one("bsTransitionEnd",n).emulateTransitionEnd(m.TRANSITION_DURATION):n(),this.hoverState=null,this},m.prototype.fixTitle=function(){var t=this.$element;(t.attr("title")||"string"!=typeof t.attr("data-original-title"))&&t.attr("data-original-title",t.attr("title")||"").attr("title","")},m.prototype.hasContent=function(){return this.getTitle()},m.prototype.getPosition=function(t){var e=(t=t||this.$element)[0],i="BODY"==e.tagName,o=e.getBoundingClientRect();null==o.width&&(o=g.extend({},o,{width:o.right-o.left,height:o.bottom-o.top}));var n=window.SVGElement&&e instanceof window.SVGElement,s=i?{top:0,left:0}:n?null:t.offset(),a={scroll:i?document.documentElement.scrollTop||document.body.scrollTop:t.scrollTop()},r=i?{width:g(window).width(),height:g(window).height()}:null;return g.extend({},o,a,r,s)},m.prototype.getCalculatedOffset=function(t,e,i,o){return"bottom"==t?{top:e.top+e.height,left:e.left+e.width/2-i/2}:"top"==t?{top:e.top-o,left:e.left+e.width/2-i/2}:"left"==t?{top:e.top+e.height/2-o/2,left:e.left-i}:{top:e.top+e.height/2-o/2,left:e.left+e.width}},m.prototype.getViewportAdjustedDelta=function(t,e,i,o){var n={top:0,left:0};if(!this.$viewport)return n;var s=this.options.viewport&&this.options.viewport.padding||0,a=this.getPosition(this.$viewport);if(/right|left/.test(t)){var r=e.top-s-a.scroll,l=e.top+s-a.scroll+o;r<a.top?n.top=a.top-r:l>a.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;h<a.left?n.left=a.left-h:d>a.right&&(n.left=a.left+a.width-d)}return n},m.prototype.getTitle=function(){var t=this.$element,e=this.options;return t.attr("data-original-title")||("function"==typeof e.title?e.title.call(t[0]):e.title)},m.prototype.getUID=function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},m.prototype.tip=function(){if(!this.$tip&&(this.$tip=g(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},m.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},m.prototype.enable=function(){this.enabled=!0},m.prototype.disable=function(){this.enabled=!1},m.prototype.toggleEnabled=function(){this.enabled=!this.enabled},m.prototype.toggle=function(t){var e=this;t&&((e=g(t.currentTarget).data("bs."+this.type))||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e))),t?(e.inState.click=!e.inState.click,e.isInStateTrue()?e.enter(e):e.leave(e)):e.tip().hasClass("in")?e.leave(e):e.enter(e)},m.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})},m.prototype.sanitizeHtml=function(t){return n(t,this.options.whiteList,this.options.sanitizeFn)};var e=g.fn.tooltip;g.fn.tooltip=function i(o){return this.each(function(){var t=g(this),e=t.data("bs.tooltip"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.tooltip",e=new m(this,i)),"string"==typeof o&&e[o]())})},g.fn.tooltip.Constructor=m,g.fn.tooltip.noConflict=function(){return g.fn.tooltip=e,this}}(jQuery),function(n){"use strict";var s=function(t,e){this.init("popover",t,e)};if(!n.fn.tooltip)throw new Error("Popover requires tooltip.js");s.VERSION="3.4.1",s.DEFAULTS=n.extend({},n.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),((s.prototype=n.extend({},n.fn.tooltip.Constructor.prototype)).constructor=s).prototype.getDefaults=function(){return s.DEFAULTS},s.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();if(this.options.html){var o=typeof i;this.options.sanitize&&(e=this.sanitizeHtml(e),"string"===o&&(i=this.sanitizeHtml(i))),t.find(".popover-title").html(e),t.find(".popover-content").children().detach().end()["string"===o?"html":"append"](i)}else t.find(".popover-title").text(e),t.find(".popover-content").children().detach().end().text(i);t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},s.prototype.hasContent=function(){return this.getTitle()||this.getContent()},s.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},s.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var t=n.fn.popover;n.fn.popover=function e(o){return this.each(function(){var t=n(this),e=t.data("bs.popover"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.popover",e=new s(this,i)),"string"==typeof o&&e[o]())})},n.fn.popover.Constructor=s,n.fn.popover.noConflict=function(){return n.fn.popover=t,this}}(jQuery),function(s){"use strict";function n(t,e){this.$body=s(document.body),this.$scrollElement=s(t).is(document.body)?s(window):s(t),this.options=s.extend({},n.DEFAULTS,e),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",s.proxy(this.process,this)),this.refresh(),this.process()}function e(o){return this.each(function(){var t=s(this),e=t.data("bs.scrollspy"),i="object"==typeof o&&o;e||t.data("bs.scrollspy",e=new n(this,i)),"string"==typeof o&&e[o]()})}n.VERSION="3.4.1",n.DEFAULTS={offset:10},n.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},n.prototype.refresh=function(){var t=this,o="offset",n=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),s.isWindow(this.$scrollElement[0])||(o="position",n=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var t=s(this),e=t.data("target")||t.attr("href"),i=/^#./.test(e)&&s(e);return i&&i.length&&i.is(":visible")&&[[i[o]().top+n,e]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},n.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),o<=e)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e<n[0])return this.activeTarget=null,this.clear();for(t=n.length;t--;)a!=s[t]&&e>=n[t]&&(n[t+1]===undefined||e<n[t+1])&&this.activate(s[t])},n.prototype.activate=function(t){this.activeTarget=t,this.clear();var e=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',i=s(e).parents("li").addClass("active");i.parent(".dropdown-menu").length&&(i=i.closest("li.dropdown").addClass("active")),i.trigger("activate.bs.scrollspy")},n.prototype.clear=function(){s(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var t=s.fn.scrollspy;s.fn.scrollspy=e,s.fn.scrollspy.Constructor=n,s.fn.scrollspy.noConflict=function(){return s.fn.scrollspy=t,this},s(window).on("load.bs.scrollspy.data-api",function(){s('[data-spy="scroll"]').each(function(){var t=s(this);e.call(t,t.data())})})}(jQuery),function(r){"use strict";var a=function(t){this.element=r(t)};function e(i){return this.each(function(){var t=r(this),e=t.data("bs.tab");e||t.data("bs.tab",e=new a(this)),"string"==typeof i&&e[i]()})}a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.show=function(){var t=this.element,e=t.closest("ul:not(.dropdown-menu)"),i=t.data("target");if(i||(i=(i=t.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),!t.parent("li").hasClass("active")){var o=e.find(".active:last a"),n=r.Event("hide.bs.tab",{relatedTarget:t[0]}),s=r.Event("show.bs.tab",{relatedTarget:o[0]});if(o.trigger(n),t.trigger(s),!s.isDefaultPrevented()&&!n.isDefaultPrevented()){var a=r(document).find(i);this.activate(t.closest("li"),e),this.activate(a,a.parent(),function(){o.trigger({type:"hidden.bs.tab",relatedTarget:t[0]}),t.trigger({type:"shown.bs.tab",relatedTarget:o[0]})})}}},a.prototype.activate=function(t,e,i){var o=e.find("> .active"),n=i&&r.support.transition&&(o.length&&o.hasClass("fade")||!!e.find("> .fade").length);function s(){o.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),t.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),n?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu").length&&t.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),i&&i()}o.length&&n?o.one("bsTransitionEnd",s).emulateTransitionEnd(a.TRANSITION_DURATION):s(),o.removeClass("in")};var t=r.fn.tab;r.fn.tab=e,r.fn.tab.Constructor=a,r.fn.tab.noConflict=function(){return r.fn.tab=t,this};var i=function(t){t.preventDefault(),e.call(r(this),"show")};r(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',i).on("click.bs.tab.data-api",'[data-toggle="pill"]',i)}(jQuery),function(l){"use strict";var h=function(t,e){this.options=l.extend({},h.DEFAULTS,e);var i=this.options.target===h.DEFAULTS.target?l(this.options.target):l(document).find(this.options.target);this.$target=i.on("scroll.bs.affix.data-api",l.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",l.proxy(this.checkPositionWithEventLoop,this)),this.$element=l(t),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};function i(o){return this.each(function(){var t=l(this),e=t.data("bs.affix"),i="object"==typeof o&&o;e||t.data("bs.affix",e=new h(this,i)),"string"==typeof o&&e[o]()})}h.VERSION="3.4.1",h.RESET="affix affix-top affix-bottom",h.DEFAULTS={offset:0,target:window},h.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return n<i&&"top";if("bottom"==this.affixed)return null!=i?!(n+this.unpin<=s.top)&&"bottom":!(n+a<=t-o)&&"bottom";var r=null==this.affixed,l=r?n:s.top;return null!=i&&n<=i?"top":null!=o&&t-o<=l+(r?a:e)&&"bottom"},h.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(h.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},h.prototype.checkPositionWithEventLoop=function(){setTimeout(l.proxy(this.checkPosition,this),1)},h.prototype.checkPosition=function(){if(this.$element.is(":visible")){var t=this.$element.height(),e=this.options.offset,i=e.top,o=e.bottom,n=Math.max(l(document).height(),l(document.body).height());"object"!=typeof e&&(o=i=e),"function"==typeof i&&(i=e.top(this.$element)),"function"==typeof o&&(o=e.bottom(this.$element));var s=this.getState(n,t,i,o);if(this.affixed!=s){null!=this.unpin&&this.$element.css("top","");var a="affix"+(s?"-"+s:""),r=l.Event(a+".bs.affix");if(this.$element.trigger(r),r.isDefaultPrevented())return;this.affixed=s,this.unpin="bottom"==s?this.getPinnedOffset():null,this.$element.removeClass(h.RESET).addClass(a).trigger(a.replace("affix","affixed")+".bs.affix")}"bottom"==s&&this.$element.offset({top:n-t-o})}};var t=l.fn.affix;l.fn.affix=i,l.fn.affix.Constructor=h,l.fn.affix.noConflict=function(){return l.fn.affix=t,this},l(window).on("load",function(){l('[data-spy="affix"]').each(function(){var t=l(this),e=t.data();e.offset=e.offset||{},null!=e.offsetBottom&&(e.offset.bottom=e.offsetBottom),null!=e.offsetTop&&(e.offset.top=e.offsetTop),i.call(t,e)})})}(jQuery); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap.js
deleted file mode 100644
index d47d640feb..0000000000
--- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.js
+++ /dev/null
@@ -1,2363 +0,0 @@
-/*!
- * Bootstrap v3.3.6 (http://getbootstrap.com)
- * Copyright 2011-2016 Twitter, Inc.
- * Licensed under the MIT license
- */
-
-if (typeof jQuery === 'undefined') {
- throw new Error('Bootstrap\'s JavaScript requires jQuery')
-}
-
-+function ($) {
- 'use strict';
- var version = $.fn.jquery.split(' ')[0].split('.')
- if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) {
- throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3')
- }
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: transition.js v3.3.6
- * http://getbootstrap.com/javascript/#transitions
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
- // ============================================================
-
- function transitionEnd() {
- var el = document.createElement('bootstrap')
-
- var transEndEventNames = {
- WebkitTransition : 'webkitTransitionEnd',
- MozTransition : 'transitionend',
- OTransition : 'oTransitionEnd otransitionend',
- transition : 'transitionend'
- }
-
- for (var name in transEndEventNames) {
- if (el.style[name] !== undefined) {
- return { end: transEndEventNames[name] }
- }
- }
-
- return false // explicit for ie8 ( ._.)
- }
-
- // http://blog.alexmaccaw.com/css-transitions
- $.fn.emulateTransitionEnd = function (duration) {
- var called = false
- var $el = this
- $(this).one('bsTransitionEnd', function () { called = true })
- var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
- setTimeout(callback, duration)
- return this
- }
-
- $(function () {
- $.support.transition = transitionEnd()
-
- if (!$.support.transition) return
-
- $.event.special.bsTransitionEnd = {
- bindType: $.support.transition.end,
- delegateType: $.support.transition.end,
- handle: function (e) {
- if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
- }
- }
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: alert.js v3.3.6
- * http://getbootstrap.com/javascript/#alerts
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // ALERT CLASS DEFINITION
- // ======================
-
- var dismiss = '[data-dismiss="alert"]'
- var Alert = function (el) {
- $(el).on('click', dismiss, this.close)
- }
-
- Alert.VERSION = '3.3.6'
-
- Alert.TRANSITION_DURATION = 150
-
- Alert.prototype.close = function (e) {
- var $this = $(this)
- var selector = $this.attr('data-target')
-
- if (!selector) {
- selector = $this.attr('href')
- selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
- }
-
- var $parent = $(selector)
-
- if (e) e.preventDefault()
-
- if (!$parent.length) {
- $parent = $this.closest('.alert')
- }
-
- $parent.trigger(e = $.Event('close.bs.alert'))
-
- if (e.isDefaultPrevented()) return
-
- $parent.removeClass('in')
-
- function removeElement() {
- // detach from parent, fire event then clean up data
- $parent.detach().trigger('closed.bs.alert').remove()
- }
-
- $.support.transition && $parent.hasClass('fade') ?
- $parent
- .one('bsTransitionEnd', removeElement)
- .emulateTransitionEnd(Alert.TRANSITION_DURATION) :
- removeElement()
- }
-
-
- // ALERT PLUGIN DEFINITION
- // =======================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.alert')
-
- if (!data) $this.data('bs.alert', (data = new Alert(this)))
- if (typeof option == 'string') data[option].call($this)
- })
- }
-
- var old = $.fn.alert
-
- $.fn.alert = Plugin
- $.fn.alert.Constructor = Alert
-
-
- // ALERT NO CONFLICT
- // =================
-
- $.fn.alert.noConflict = function () {
- $.fn.alert = old
- return this
- }
-
-
- // ALERT DATA-API
- // ==============
-
- $(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close)
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: button.js v3.3.6
- * http://getbootstrap.com/javascript/#buttons
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // BUTTON PUBLIC CLASS DEFINITION
- // ==============================
-
- var Button = function (element, options) {
- this.$element = $(element)
- this.options = $.extend({}, Button.DEFAULTS, options)
- this.isLoading = false
- }
-
- Button.VERSION = '3.3.6'
-
- Button.DEFAULTS = {
- loadingText: 'loading...'
- }
-
- Button.prototype.setState = function (state) {
- var d = 'disabled'
- var $el = this.$element
- var val = $el.is('input') ? 'val' : 'html'
- var data = $el.data()
-
- state += 'Text'
-
- if (data.resetText == null) $el.data('resetText', $el[val]())
-
- // push to event loop to allow forms to submit
- setTimeout($.proxy(function () {
- $el[val](data[state] == null ? this.options[state] : data[state])
-
- if (state == 'loadingText') {
- this.isLoading = true
- $el.addClass(d).attr(d, d)
- } else if (this.isLoading) {
- this.isLoading = false
- $el.removeClass(d).removeAttr(d)
- }
- }, this), 0)
- }
-
- Button.prototype.toggle = function () {
- var changed = true
- var $parent = this.$element.closest('[data-toggle="buttons"]')
-
- if ($parent.length) {
- var $input = this.$element.find('input')
- if ($input.prop('type') == 'radio') {
- if ($input.prop('checked')) changed = false
- $parent.find('.active').removeClass('active')
- this.$element.addClass('active')
- } else if ($input.prop('type') == 'checkbox') {
- if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false
- this.$element.toggleClass('active')
- }
- $input.prop('checked', this.$element.hasClass('active'))
- if (changed) $input.trigger('change')
- } else {
- this.$element.attr('aria-pressed', !this.$element.hasClass('active'))
- this.$element.toggleClass('active')
- }
- }
-
-
- // BUTTON PLUGIN DEFINITION
- // ========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.button')
- var options = typeof option == 'object' && option
-
- if (!data) $this.data('bs.button', (data = new Button(this, options)))
-
- if (option == 'toggle') data.toggle()
- else if (option) data.setState(option)
- })
- }
-
- var old = $.fn.button
-
- $.fn.button = Plugin
- $.fn.button.Constructor = Button
-
-
- // BUTTON NO CONFLICT
- // ==================
-
- $.fn.button.noConflict = function () {
- $.fn.button = old
- return this
- }
-
-
- // BUTTON DATA-API
- // ===============
-
- $(document)
- .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
- var $btn = $(e.target)
- if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
- Plugin.call($btn, 'toggle')
- if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault()
- })
- .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
- $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: carousel.js v3.3.6
- * http://getbootstrap.com/javascript/#carousel
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // CAROUSEL CLASS DEFINITION
- // =========================
-
- var Carousel = function (element, options) {
- this.$element = $(element)
- this.$indicators = this.$element.find('.carousel-indicators')
- this.options = options
- this.paused = null
- this.sliding = null
- this.interval = null
- this.$active = null
- this.$items = null
-
- this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this))
-
- this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element
- .on('mouseenter.bs.carousel', $.proxy(this.pause, this))
- .on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
- }
-
- Carousel.VERSION = '3.3.6'
-
- Carousel.TRANSITION_DURATION = 600
-
- Carousel.DEFAULTS = {
- interval: 5000,
- pause: 'hover',
- wrap: true,
- keyboard: true
- }
-
- Carousel.prototype.keydown = function (e) {
- if (/input|textarea/i.test(e.target.tagName)) return
- switch (e.which) {
- case 37: this.prev(); break
- case 39: this.next(); break
- default: return
- }
-
- e.preventDefault()
- }
-
- Carousel.prototype.cycle = function (e) {
- e || (this.paused = false)
-
- this.interval && clearInterval(this.interval)
-
- this.options.interval
- && !this.paused
- && (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
-
- return this
- }
-
- Carousel.prototype.getItemIndex = function (item) {
- this.$items = item.parent().children('.item')
- return this.$items.index(item || this.$active)
- }
-
- Carousel.prototype.getItemForDirection = function (direction, active) {
- var activeIndex = this.getItemIndex(active)
- var willWrap = (direction == 'prev' && activeIndex === 0)
- || (direction == 'next' && activeIndex == (this.$items.length - 1))
- if (willWrap && !this.options.wrap) return active
- var delta = direction == 'prev' ? -1 : 1
- var itemIndex = (activeIndex + delta) % this.$items.length
- return this.$items.eq(itemIndex)
- }
-
- Carousel.prototype.to = function (pos) {
- var that = this
- var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active'))
-
- if (pos > (this.$items.length - 1) || pos < 0) return
-
- if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid"
- if (activeIndex == pos) return this.pause().cycle()
-
- return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos))
- }
-
- Carousel.prototype.pause = function (e) {
- e || (this.paused = true)
-
- if (this.$element.find('.next, .prev').length && $.support.transition) {
- this.$element.trigger($.support.transition.end)
- this.cycle(true)
- }
-
- this.interval = clearInterval(this.interval)
-
- return this
- }
-
- Carousel.prototype.next = function () {
- if (this.sliding) return
- return this.slide('next')
- }
-
- Carousel.prototype.prev = function () {
- if (this.sliding) return
- return this.slide('prev')
- }
-
- Carousel.prototype.slide = function (type, next) {
- var $active = this.$element.find('.item.active')
- var $next = next || this.getItemForDirection(type, $active)
- var isCycling = this.interval
- var direction = type == 'next' ? 'left' : 'right'
- var that = this
-
- if ($next.hasClass('active')) return (this.sliding = false)
-
- var relatedTarget = $next[0]
- var slideEvent = $.Event('slide.bs.carousel', {
- relatedTarget: relatedTarget,
- direction: direction
- })
- this.$element.trigger(slideEvent)
- if (slideEvent.isDefaultPrevented()) return
-
- this.sliding = true
-
- isCycling && this.pause()
-
- if (this.$indicators.length) {
- this.$indicators.find('.active').removeClass('active')
- var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)])
- $nextIndicator && $nextIndicator.addClass('active')
- }
-
- var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
- if ($.support.transition && this.$element.hasClass('slide')) {
- $next.addClass(type)
- $next[0].offsetWidth // force reflow
- $active.addClass(direction)
- $next.addClass(direction)
- $active
- .one('bsTransitionEnd', function () {
- $next.removeClass([type, direction].join(' ')).addClass('active')
- $active.removeClass(['active', direction].join(' '))
- that.sliding = false
- setTimeout(function () {
- that.$element.trigger(slidEvent)
- }, 0)
- })
- .emulateTransitionEnd(Carousel.TRANSITION_DURATION)
- } else {
- $active.removeClass('active')
- $next.addClass('active')
- this.sliding = false
- this.$element.trigger(slidEvent)
- }
-
- isCycling && this.cycle()
-
- return this
- }
-
-
- // CAROUSEL PLUGIN DEFINITION
- // ==========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.carousel')
- var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option)
- var action = typeof option == 'string' ? option : options.slide
-
- if (!data) $this.data('bs.carousel', (data = new Carousel(this, options)))
- if (typeof option == 'number') data.to(option)
- else if (action) data[action]()
- else if (options.interval) data.pause().cycle()
- })
- }
-
- var old = $.fn.carousel
-
- $.fn.carousel = Plugin
- $.fn.carousel.Constructor = Carousel
-
-
- // CAROUSEL NO CONFLICT
- // ====================
-
- $.fn.carousel.noConflict = function () {
- $.fn.carousel = old
- return this
- }
-
-
- // CAROUSEL DATA-API
- // =================
-
- var clickHandler = function (e) {
- var href
- var $this = $(this)
- var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
- if (!$target.hasClass('carousel')) return
- var options = $.extend({}, $target.data(), $this.data())
- var slideIndex = $this.attr('data-slide-to')
- if (slideIndex) options.interval = false
-
- Plugin.call($target, options)
-
- if (slideIndex) {
- $target.data('bs.carousel').to(slideIndex)
- }
-
- e.preventDefault()
- }
-
- $(document)
- .on('click.bs.carousel.data-api', '[data-slide]', clickHandler)
- .on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler)
-
- $(window).on('load', function () {
- $('[data-ride="carousel"]').each(function () {
- var $carousel = $(this)
- Plugin.call($carousel, $carousel.data())
- })
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: collapse.js v3.3.6
- * http://getbootstrap.com/javascript/#collapse
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // COLLAPSE PUBLIC CLASS DEFINITION
- // ================================
-
- var Collapse = function (element, options) {
- this.$element = $(element)
- this.options = $.extend({}, Collapse.DEFAULTS, options)
- this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
- '[data-toggle="collapse"][data-target="#' + element.id + '"]')
- this.transitioning = null
-
- if (this.options.parent) {
- this.$parent = this.getParent()
- } else {
- this.addAriaAndCollapsedClass(this.$element, this.$trigger)
- }
-
- if (this.options.toggle) this.toggle()
- }
-
- Collapse.VERSION = '3.3.6'
-
- Collapse.TRANSITION_DURATION = 350
-
- Collapse.DEFAULTS = {
- toggle: true
- }
-
- Collapse.prototype.dimension = function () {
- var hasWidth = this.$element.hasClass('width')
- return hasWidth ? 'width' : 'height'
- }
-
- Collapse.prototype.show = function () {
- if (this.transitioning || this.$element.hasClass('in')) return
-
- var activesData
- var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
-
- if (actives && actives.length) {
- activesData = actives.data('bs.collapse')
- if (activesData && activesData.transitioning) return
- }
-
- var startEvent = $.Event('show.bs.collapse')
- this.$element.trigger(startEvent)
- if (startEvent.isDefaultPrevented()) return
-
- if (actives && actives.length) {
- Plugin.call(actives, 'hide')
- activesData || actives.data('bs.collapse', null)
- }
-
- var dimension = this.dimension()
-
- this.$element
- .removeClass('collapse')
- .addClass('collapsing')[dimension](0)
- .attr('aria-expanded', true)
-
- this.$trigger
- .removeClass('collapsed')
- .attr('aria-expanded', true)
-
- this.transitioning = 1
-
- var complete = function () {
- this.$element
- .removeClass('collapsing')
- .addClass('collapse in')[dimension]('')
- this.transitioning = 0
- this.$element
- .trigger('shown.bs.collapse')
- }
-
- if (!$.support.transition) return complete.call(this)
-
- var scrollSize = $.camelCase(['scroll', dimension].join('-'))
-
- this.$element
- .one('bsTransitionEnd', $.proxy(complete, this))
- .emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
- }
-
- Collapse.prototype.hide = function () {
- if (this.transitioning || !this.$element.hasClass('in')) return
-
- var startEvent = $.Event('hide.bs.collapse')
- this.$element.trigger(startEvent)
- if (startEvent.isDefaultPrevented()) return
-
- var dimension = this.dimension()
-
- this.$element[dimension](this.$element[dimension]())[0].offsetHeight
-
- this.$element
- .addClass('collapsing')
- .removeClass('collapse in')
- .attr('aria-expanded', false)
-
- this.$trigger
- .addClass('collapsed')
- .attr('aria-expanded', false)
-
- this.transitioning = 1
-
- var complete = function () {
- this.transitioning = 0
- this.$element
- .removeClass('collapsing')
- .addClass('collapse')
- .trigger('hidden.bs.collapse')
- }
-
- if (!$.support.transition) return complete.call(this)
-
- this.$element
- [dimension](0)
- .one('bsTransitionEnd', $.proxy(complete, this))
- .emulateTransitionEnd(Collapse.TRANSITION_DURATION)
- }
-
- Collapse.prototype.toggle = function () {
- this[this.$element.hasClass('in') ? 'hide' : 'show']()
- }
-
- Collapse.prototype.getParent = function () {
- return $(this.options.parent)
- .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
- .each($.proxy(function (i, element) {
- var $element = $(element)
- this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
- }, this))
- .end()
- }
-
- Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
- var isOpen = $element.hasClass('in')
-
- $element.attr('aria-expanded', isOpen)
- $trigger
- .toggleClass('collapsed', !isOpen)
- .attr('aria-expanded', isOpen)
- }
-
- function getTargetFromTrigger($trigger) {
- var href
- var target = $trigger.attr('data-target')
- || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
-
- return $(target)
- }
-
-
- // COLLAPSE PLUGIN DEFINITION
- // ==========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.collapse')
- var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
-
- if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
- if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.collapse
-
- $.fn.collapse = Plugin
- $.fn.collapse.Constructor = Collapse
-
-
- // COLLAPSE NO CONFLICT
- // ====================
-
- $.fn.collapse.noConflict = function () {
- $.fn.collapse = old
- return this
- }
-
-
- // COLLAPSE DATA-API
- // =================
-
- $(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
- var $this = $(this)
-
- if (!$this.attr('data-target')) e.preventDefault()
-
- var $target = getTargetFromTrigger($this)
- var data = $target.data('bs.collapse')
- var option = data ? 'toggle' : $this.data()
-
- Plugin.call($target, option)
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: dropdown.js v3.3.6
- * http://getbootstrap.com/javascript/#dropdowns
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // DROPDOWN CLASS DEFINITION
- // =========================
-
- var backdrop = '.dropdown-backdrop'
- var toggle = '[data-toggle="dropdown"]'
- var Dropdown = function (element) {
- $(element).on('click.bs.dropdown', this.toggle)
- }
-
- Dropdown.VERSION = '3.3.6'
-
- function getParent($this) {
- var selector = $this.attr('data-target')
-
- if (!selector) {
- selector = $this.attr('href')
- selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
- }
-
- var $parent = selector && $(selector)
-
- return $parent && $parent.length ? $parent : $this.parent()
- }
-
- function clearMenus(e) {
- if (e && e.which === 3) return
- $(backdrop).remove()
- $(toggle).each(function () {
- var $this = $(this)
- var $parent = getParent($this)
- var relatedTarget = { relatedTarget: this }
-
- if (!$parent.hasClass('open')) return
-
- if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return
-
- $parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget))
-
- if (e.isDefaultPrevented()) return
-
- $this.attr('aria-expanded', 'false')
- $parent.removeClass('open').trigger($.Event('hidden.bs.dropdown', relatedTarget))
- })
- }
-
- Dropdown.prototype.toggle = function (e) {
- var $this = $(this)
-
- if ($this.is('.disabled, :disabled')) return
-
- var $parent = getParent($this)
- var isActive = $parent.hasClass('open')
-
- clearMenus()
-
- if (!isActive) {
- if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) {
- // if mobile we use a backdrop because click events don't delegate
- $(document.createElement('div'))
- .addClass('dropdown-backdrop')
- .insertAfter($(this))
- .on('click', clearMenus)
- }
-
- var relatedTarget = { relatedTarget: this }
- $parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget))
-
- if (e.isDefaultPrevented()) return
-
- $this
- .trigger('focus')
- .attr('aria-expanded', 'true')
-
- $parent
- .toggleClass('open')
- .trigger($.Event('shown.bs.dropdown', relatedTarget))
- }
-
- return false
- }
-
- Dropdown.prototype.keydown = function (e) {
- if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return
-
- var $this = $(this)
-
- e.preventDefault()
- e.stopPropagation()
-
- if ($this.is('.disabled, :disabled')) return
-
- var $parent = getParent($this)
- var isActive = $parent.hasClass('open')
-
- if (!isActive && e.which != 27 || isActive && e.which == 27) {
- if (e.which == 27) $parent.find(toggle).trigger('focus')
- return $this.trigger('click')
- }
-
- var desc = ' li:not(.disabled):visible a'
- var $items = $parent.find('.dropdown-menu' + desc)
-
- if (!$items.length) return
-
- var index = $items.index(e.target)
-
- if (e.which == 38 && index > 0) index-- // up
- if (e.which == 40 && index < $items.length - 1) index++ // down
- if (!~index) index = 0
-
- $items.eq(index).trigger('focus')
- }
-
-
- // DROPDOWN PLUGIN DEFINITION
- // ==========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.dropdown')
-
- if (!data) $this.data('bs.dropdown', (data = new Dropdown(this)))
- if (typeof option == 'string') data[option].call($this)
- })
- }
-
- var old = $.fn.dropdown
-
- $.fn.dropdown = Plugin
- $.fn.dropdown.Constructor = Dropdown
-
-
- // DROPDOWN NO CONFLICT
- // ====================
-
- $.fn.dropdown.noConflict = function () {
- $.fn.dropdown = old
- return this
- }
-
-
- // APPLY TO STANDARD DROPDOWN ELEMENTS
- // ===================================
-
- $(document)
- .on('click.bs.dropdown.data-api', clearMenus)
- .on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
- .on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle)
- .on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown)
- .on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown)
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: modal.js v3.3.6
- * http://getbootstrap.com/javascript/#modals
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // MODAL CLASS DEFINITION
- // ======================
-
- var Modal = function (element, options) {
- this.options = options
- this.$body = $(document.body)
- this.$element = $(element)
- this.$dialog = this.$element.find('.modal-dialog')
- this.$backdrop = null
- this.isShown = null
- this.originalBodyPad = null
- this.scrollbarWidth = 0
- this.ignoreBackdropClick = false
-
- if (this.options.remote) {
- this.$element
- .find('.modal-content')
- .load(this.options.remote, $.proxy(function () {
- this.$element.trigger('loaded.bs.modal')
- }, this))
- }
- }
-
- Modal.VERSION = '3.3.6'
-
- Modal.TRANSITION_DURATION = 300
- Modal.BACKDROP_TRANSITION_DURATION = 150
-
- Modal.DEFAULTS = {
- backdrop: true,
- keyboard: true,
- show: true
- }
-
- Modal.prototype.toggle = function (_relatedTarget) {
- return this.isShown ? this.hide() : this.show(_relatedTarget)
- }
-
- Modal.prototype.show = function (_relatedTarget) {
- var that = this
- var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
-
- this.$element.trigger(e)
-
- if (this.isShown || e.isDefaultPrevented()) return
-
- this.isShown = true
-
- this.checkScrollbar()
- this.setScrollbar()
- this.$body.addClass('modal-open')
-
- this.escape()
- this.resize()
-
- this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this))
-
- this.$dialog.on('mousedown.dismiss.bs.modal', function () {
- that.$element.one('mouseup.dismiss.bs.modal', function (e) {
- if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true
- })
- })
-
- this.backdrop(function () {
- var transition = $.support.transition && that.$element.hasClass('fade')
-
- if (!that.$element.parent().length) {
- that.$element.appendTo(that.$body) // don't move modals dom position
- }
-
- that.$element
- .show()
- .scrollTop(0)
-
- that.adjustDialog()
-
- if (transition) {
- that.$element[0].offsetWidth // force reflow
- }
-
- that.$element.addClass('in')
-
- that.enforceFocus()
-
- var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget })
-
- transition ?
- that.$dialog // wait for modal to slide in
- .one('bsTransitionEnd', function () {
- that.$element.trigger('focus').trigger(e)
- })
- .emulateTransitionEnd(Modal.TRANSITION_DURATION) :
- that.$element.trigger('focus').trigger(e)
- })
- }
-
- Modal.prototype.hide = function (e) {
- if (e) e.preventDefault()
-
- e = $.Event('hide.bs.modal')
-
- this.$element.trigger(e)
-
- if (!this.isShown || e.isDefaultPrevented()) return
-
- this.isShown = false
-
- this.escape()
- this.resize()
-
- $(document).off('focusin.bs.modal')
-
- this.$element
- .removeClass('in')
- .off('click.dismiss.bs.modal')
- .off('mouseup.dismiss.bs.modal')
-
- this.$dialog.off('mousedown.dismiss.bs.modal')
-
- $.support.transition && this.$element.hasClass('fade') ?
- this.$element
- .one('bsTransitionEnd', $.proxy(this.hideModal, this))
- .emulateTransitionEnd(Modal.TRANSITION_DURATION) :
- this.hideModal()
- }
-
- Modal.prototype.enforceFocus = function () {
- $(document)
- .off('focusin.bs.modal') // guard against infinite focus loop
- .on('focusin.bs.modal', $.proxy(function (e) {
- if (this.$element[0] !== e.target && !this.$element.has(e.target).length) {
- this.$element.trigger('focus')
- }
- }, this))
- }
-
- Modal.prototype.escape = function () {
- if (this.isShown && this.options.keyboard) {
- this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) {
- e.which == 27 && this.hide()
- }, this))
- } else if (!this.isShown) {
- this.$element.off('keydown.dismiss.bs.modal')
- }
- }
-
- Modal.prototype.resize = function () {
- if (this.isShown) {
- $(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this))
- } else {
- $(window).off('resize.bs.modal')
- }
- }
-
- Modal.prototype.hideModal = function () {
- var that = this
- this.$element.hide()
- this.backdrop(function () {
- that.$body.removeClass('modal-open')
- that.resetAdjustments()
- that.resetScrollbar()
- that.$element.trigger('hidden.bs.modal')
- })
- }
-
- Modal.prototype.removeBackdrop = function () {
- this.$backdrop && this.$backdrop.remove()
- this.$backdrop = null
- }
-
- Modal.prototype.backdrop = function (callback) {
- var that = this
- var animate = this.$element.hasClass('fade') ? 'fade' : ''
-
- if (this.isShown && this.options.backdrop) {
- var doAnimate = $.support.transition && animate
-
- this.$backdrop = $(document.createElement('div'))
- .addClass('modal-backdrop ' + animate)
- .appendTo(this.$body)
-
- this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) {
- if (this.ignoreBackdropClick) {
- this.ignoreBackdropClick = false
- return
- }
- if (e.target !== e.currentTarget) return
- this.options.backdrop == 'static'
- ? this.$element[0].focus()
- : this.hide()
- }, this))
-
- if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
-
- this.$backdrop.addClass('in')
-
- if (!callback) return
-
- doAnimate ?
- this.$backdrop
- .one('bsTransitionEnd', callback)
- .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
- callback()
-
- } else if (!this.isShown && this.$backdrop) {
- this.$backdrop.removeClass('in')
-
- var callbackRemove = function () {
- that.removeBackdrop()
- callback && callback()
- }
- $.support.transition && this.$element.hasClass('fade') ?
- this.$backdrop
- .one('bsTransitionEnd', callbackRemove)
- .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
- callbackRemove()
-
- } else if (callback) {
- callback()
- }
- }
-
- // these following methods are used to handle overflowing modals
-
- Modal.prototype.handleUpdate = function () {
- this.adjustDialog()
- }
-
- Modal.prototype.adjustDialog = function () {
- var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
-
- this.$element.css({
- paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
- paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
- })
- }
-
- Modal.prototype.resetAdjustments = function () {
- this.$element.css({
- paddingLeft: '',
- paddingRight: ''
- })
- }
-
- Modal.prototype.checkScrollbar = function () {
- var fullWindowWidth = window.innerWidth
- if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8
- var documentElementRect = document.documentElement.getBoundingClientRect()
- fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left)
- }
- this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth
- this.scrollbarWidth = this.measureScrollbar()
- }
-
- Modal.prototype.setScrollbar = function () {
- var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
- this.originalBodyPad = document.body.style.paddingRight || ''
- if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
- }
-
- Modal.prototype.resetScrollbar = function () {
- this.$body.css('padding-right', this.originalBodyPad)
- }
-
- Modal.prototype.measureScrollbar = function () { // thx walsh
- var scrollDiv = document.createElement('div')
- scrollDiv.className = 'modal-scrollbar-measure'
- this.$body.append(scrollDiv)
- var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth
- this.$body[0].removeChild(scrollDiv)
- return scrollbarWidth
- }
-
-
- // MODAL PLUGIN DEFINITION
- // =======================
-
- function Plugin(option, _relatedTarget) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.modal')
- var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
-
- if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
- if (typeof option == 'string') data[option](_relatedTarget)
- else if (options.show) data.show(_relatedTarget)
- })
- }
-
- var old = $.fn.modal
-
- $.fn.modal = Plugin
- $.fn.modal.Constructor = Modal
-
-
- // MODAL NO CONFLICT
- // =================
-
- $.fn.modal.noConflict = function () {
- $.fn.modal = old
- return this
- }
-
-
- // MODAL DATA-API
- // ==============
-
- $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
- var $this = $(this)
- var href = $this.attr('href')
- var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
- var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
-
- if ($this.is('a')) e.preventDefault()
-
- $target.one('show.bs.modal', function (showEvent) {
- if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown
- $target.one('hidden.bs.modal', function () {
- $this.is(':visible') && $this.trigger('focus')
- })
- })
- Plugin.call($target, option, this)
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: tooltip.js v3.3.6
- * http://getbootstrap.com/javascript/#tooltip
- * Inspired by the original jQuery.tipsy by Jason Frame
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // TOOLTIP PUBLIC CLASS DEFINITION
- // ===============================
-
- var Tooltip = function (element, options) {
- this.type = null
- this.options = null
- this.enabled = null
- this.timeout = null
- this.hoverState = null
- this.$element = null
- this.inState = null
-
- this.init('tooltip', element, options)
- }
-
- Tooltip.VERSION = '3.3.6'
-
- Tooltip.TRANSITION_DURATION = 150
-
- Tooltip.DEFAULTS = {
- animation: true,
- placement: 'top',
- selector: false,
- template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
- trigger: 'hover focus',
- title: '',
- delay: 0,
- html: false,
- container: false,
- viewport: {
- selector: 'body',
- padding: 0
- }
- }
-
- Tooltip.prototype.init = function (type, element, options) {
- this.enabled = true
- this.type = type
- this.$element = $(element)
- this.options = this.getOptions(options)
- this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
- this.inState = { click: false, hover: false, focus: false }
-
- if (this.$element[0] instanceof document.constructor && !this.options.selector) {
- throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!')
- }
-
- var triggers = this.options.trigger.split(' ')
-
- for (var i = triggers.length; i--;) {
- var trigger = triggers[i]
-
- if (trigger == 'click') {
- this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
- } else if (trigger != 'manual') {
- var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin'
- var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout'
-
- this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
- this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
- }
- }
-
- this.options.selector ?
- (this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
- this.fixTitle()
- }
-
- Tooltip.prototype.getDefaults = function () {
- return Tooltip.DEFAULTS
- }
-
- Tooltip.prototype.getOptions = function (options) {
- options = $.extend({}, this.getDefaults(), this.$element.data(), options)
-
- if (options.delay && typeof options.delay == 'number') {
- options.delay = {
- show: options.delay,
- hide: options.delay
- }
- }
-
- return options
- }
-
- Tooltip.prototype.getDelegateOptions = function () {
- var options = {}
- var defaults = this.getDefaults()
-
- this._options && $.each(this._options, function (key, value) {
- if (defaults[key] != value) options[key] = value
- })
-
- return options
- }
-
- Tooltip.prototype.enter = function (obj) {
- var self = obj instanceof this.constructor ?
- obj : $(obj.currentTarget).data('bs.' + this.type)
-
- if (!self) {
- self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
- $(obj.currentTarget).data('bs.' + this.type, self)
- }
-
- if (obj instanceof $.Event) {
- self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true
- }
-
- if (self.tip().hasClass('in') || self.hoverState == 'in') {
- self.hoverState = 'in'
- return
- }
-
- clearTimeout(self.timeout)
-
- self.hoverState = 'in'
-
- if (!self.options.delay || !self.options.delay.show) return self.show()
-
- self.timeout = setTimeout(function () {
- if (self.hoverState == 'in') self.show()
- }, self.options.delay.show)
- }
-
- Tooltip.prototype.isInStateTrue = function () {
- for (var key in this.inState) {
- if (this.inState[key]) return true
- }
-
- return false
- }
-
- Tooltip.prototype.leave = function (obj) {
- var self = obj instanceof this.constructor ?
- obj : $(obj.currentTarget).data('bs.' + this.type)
-
- if (!self) {
- self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
- $(obj.currentTarget).data('bs.' + this.type, self)
- }
-
- if (obj instanceof $.Event) {
- self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false
- }
-
- if (self.isInStateTrue()) return
-
- clearTimeout(self.timeout)
-
- self.hoverState = 'out'
-
- if (!self.options.delay || !self.options.delay.hide) return self.hide()
-
- self.timeout = setTimeout(function () {
- if (self.hoverState == 'out') self.hide()
- }, self.options.delay.hide)
- }
-
- Tooltip.prototype.show = function () {
- var e = $.Event('show.bs.' + this.type)
-
- if (this.hasContent() && this.enabled) {
- this.$element.trigger(e)
-
- var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0])
- if (e.isDefaultPrevented() || !inDom) return
- var that = this
-
- var $tip = this.tip()
-
- var tipId = this.getUID(this.type)
-
- this.setContent()
- $tip.attr('id', tipId)
- this.$element.attr('aria-describedby', tipId)
-
- if (this.options.animation) $tip.addClass('fade')
-
- var placement = typeof this.options.placement == 'function' ?
- this.options.placement.call(this, $tip[0], this.$element[0]) :
- this.options.placement
-
- var autoToken = /\s?auto?\s?/i
- var autoPlace = autoToken.test(placement)
- if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
-
- $tip
- .detach()
- .css({ top: 0, left: 0, display: 'block' })
- .addClass(placement)
- .data('bs.' + this.type, this)
-
- this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
- this.$element.trigger('inserted.bs.' + this.type)
-
- var pos = this.getPosition()
- var actualWidth = $tip[0].offsetWidth
- var actualHeight = $tip[0].offsetHeight
-
- if (autoPlace) {
- var orgPlacement = placement
- var viewportDim = this.getPosition(this.$viewport)
-
- placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' :
- placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' :
- placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' :
- placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' :
- placement
-
- $tip
- .removeClass(orgPlacement)
- .addClass(placement)
- }
-
- var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
-
- this.applyPlacement(calculatedOffset, placement)
-
- var complete = function () {
- var prevHoverState = that.hoverState
- that.$element.trigger('shown.bs.' + that.type)
- that.hoverState = null
-
- if (prevHoverState == 'out') that.leave(that)
- }
-
- $.support.transition && this.$tip.hasClass('fade') ?
- $tip
- .one('bsTransitionEnd', complete)
- .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
- complete()
- }
- }
-
- Tooltip.prototype.applyPlacement = function (offset, placement) {
- var $tip = this.tip()
- var width = $tip[0].offsetWidth
- var height = $tip[0].offsetHeight
-
- // manually read margins because getBoundingClientRect includes difference
- var marginTop = parseInt($tip.css('margin-top'), 10)
- var marginLeft = parseInt($tip.css('margin-left'), 10)
-
- // we must check for NaN for ie 8/9
- if (isNaN(marginTop)) marginTop = 0
- if (isNaN(marginLeft)) marginLeft = 0
-
- offset.top += marginTop
- offset.left += marginLeft
-
- // $.fn.offset doesn't round pixel values
- // so we use setOffset directly with our own function B-0
- $.offset.setOffset($tip[0], $.extend({
- using: function (props) {
- $tip.css({
- top: Math.round(props.top),
- left: Math.round(props.left)
- })
- }
- }, offset), 0)
-
- $tip.addClass('in')
-
- // check to see if placing tip in new offset caused the tip to resize itself
- var actualWidth = $tip[0].offsetWidth
- var actualHeight = $tip[0].offsetHeight
-
- if (placement == 'top' && actualHeight != height) {
- offset.top = offset.top + height - actualHeight
- }
-
- var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight)
-
- if (delta.left) offset.left += delta.left
- else offset.top += delta.top
-
- var isVertical = /top|bottom/.test(placement)
- var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight
- var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight'
-
- $tip.offset(offset)
- this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical)
- }
-
- Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) {
- this.arrow()
- .css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%')
- .css(isVertical ? 'top' : 'left', '')
- }
-
- Tooltip.prototype.setContent = function () {
- var $tip = this.tip()
- var title = this.getTitle()
-
- $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
- $tip.removeClass('fade in top bottom left right')
- }
-
- Tooltip.prototype.hide = function (callback) {
- var that = this
- var $tip = $(this.$tip)
- var e = $.Event('hide.bs.' + this.type)
-
- function complete() {
- if (that.hoverState != 'in') $tip.detach()
- that.$element
- .removeAttr('aria-describedby')
- .trigger('hidden.bs.' + that.type)
- callback && callback()
- }
-
- this.$element.trigger(e)
-
- if (e.isDefaultPrevented()) return
-
- $tip.removeClass('in')
-
- $.support.transition && $tip.hasClass('fade') ?
- $tip
- .one('bsTransitionEnd', complete)
- .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
- complete()
-
- this.hoverState = null
-
- return this
- }
-
- Tooltip.prototype.fixTitle = function () {
- var $e = this.$element
- if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') {
- $e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
- }
- }
-
- Tooltip.prototype.hasContent = function () {
- return this.getTitle()
- }
-
- Tooltip.prototype.getPosition = function ($element) {
- $element = $element || this.$element
-
- var el = $element[0]
- var isBody = el.tagName == 'BODY'
-
- var elRect = el.getBoundingClientRect()
- if (elRect.width == null) {
- // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
- elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
- }
- var elOffset = isBody ? { top: 0, left: 0 } : $element.offset()
- var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
- var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
-
- return $.extend({}, elRect, scroll, outerDims, elOffset)
- }
-
- Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
- return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } :
- placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } :
- placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
- /* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width }
-
- }
-
- Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) {
- var delta = { top: 0, left: 0 }
- if (!this.$viewport) return delta
-
- var viewportPadding = this.options.viewport && this.options.viewport.padding || 0
- var viewportDimensions = this.getPosition(this.$viewport)
-
- if (/right|left/.test(placement)) {
- var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll
- var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight
- if (topEdgeOffset < viewportDimensions.top) { // top overflow
- delta.top = viewportDimensions.top - topEdgeOffset
- } else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow
- delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset
- }
- } else {
- var leftEdgeOffset = pos.left - viewportPadding
- var rightEdgeOffset = pos.left + viewportPadding + actualWidth
- if (leftEdgeOffset < viewportDimensions.left) { // left overflow
- delta.left = viewportDimensions.left - leftEdgeOffset
- } else if (rightEdgeOffset > viewportDimensions.right) { // right overflow
- delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset
- }
- }
-
- return delta
- }
-
- Tooltip.prototype.getTitle = function () {
- var title
- var $e = this.$element
- var o = this.options
-
- title = $e.attr('data-original-title')
- || (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
-
- return title
- }
-
- Tooltip.prototype.getUID = function (prefix) {
- do prefix += ~~(Math.random() * 1000000)
- while (document.getElementById(prefix))
- return prefix
- }
-
- Tooltip.prototype.tip = function () {
- if (!this.$tip) {
- this.$tip = $(this.options.template)
- if (this.$tip.length != 1) {
- throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!')
- }
- }
- return this.$tip
- }
-
- Tooltip.prototype.arrow = function () {
- return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow'))
- }
-
- Tooltip.prototype.enable = function () {
- this.enabled = true
- }
-
- Tooltip.prototype.disable = function () {
- this.enabled = false
- }
-
- Tooltip.prototype.toggleEnabled = function () {
- this.enabled = !this.enabled
- }
-
- Tooltip.prototype.toggle = function (e) {
- var self = this
- if (e) {
- self = $(e.currentTarget).data('bs.' + this.type)
- if (!self) {
- self = new this.constructor(e.currentTarget, this.getDelegateOptions())
- $(e.currentTarget).data('bs.' + this.type, self)
- }
- }
-
- if (e) {
- self.inState.click = !self.inState.click
- if (self.isInStateTrue()) self.enter(self)
- else self.leave(self)
- } else {
- self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
- }
- }
-
- Tooltip.prototype.destroy = function () {
- var that = this
- clearTimeout(this.timeout)
- this.hide(function () {
- that.$element.off('.' + that.type).removeData('bs.' + that.type)
- if (that.$tip) {
- that.$tip.detach()
- }
- that.$tip = null
- that.$arrow = null
- that.$viewport = null
- })
- }
-
-
- // TOOLTIP PLUGIN DEFINITION
- // =========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.tooltip')
- var options = typeof option == 'object' && option
-
- if (!data && /destroy|hide/.test(option)) return
- if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.tooltip
-
- $.fn.tooltip = Plugin
- $.fn.tooltip.Constructor = Tooltip
-
-
- // TOOLTIP NO CONFLICT
- // ===================
-
- $.fn.tooltip.noConflict = function () {
- $.fn.tooltip = old
- return this
- }
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: popover.js v3.3.6
- * http://getbootstrap.com/javascript/#popovers
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // POPOVER PUBLIC CLASS DEFINITION
- // ===============================
-
- var Popover = function (element, options) {
- this.init('popover', element, options)
- }
-
- if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
-
- Popover.VERSION = '3.3.6'
-
- Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
- placement: 'right',
- trigger: 'click',
- content: '',
- template: '<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'
- })
-
-
- // NOTE: POPOVER EXTENDS tooltip.js
- // ================================
-
- Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype)
-
- Popover.prototype.constructor = Popover
-
- Popover.prototype.getDefaults = function () {
- return Popover.DEFAULTS
- }
-
- Popover.prototype.setContent = function () {
- var $tip = this.tip()
- var title = this.getTitle()
- var content = this.getContent()
-
- $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
- $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events
- this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text'
- ](content)
-
- $tip.removeClass('fade top bottom left right in')
-
- // IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do
- // this manually by checking the contents.
- if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide()
- }
-
- Popover.prototype.hasContent = function () {
- return this.getTitle() || this.getContent()
- }
-
- Popover.prototype.getContent = function () {
- var $e = this.$element
- var o = this.options
-
- return $e.attr('data-content')
- || (typeof o.content == 'function' ?
- o.content.call($e[0]) :
- o.content)
- }
-
- Popover.prototype.arrow = function () {
- return (this.$arrow = this.$arrow || this.tip().find('.arrow'))
- }
-
-
- // POPOVER PLUGIN DEFINITION
- // =========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.popover')
- var options = typeof option == 'object' && option
-
- if (!data && /destroy|hide/.test(option)) return
- if (!data) $this.data('bs.popover', (data = new Popover(this, options)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.popover
-
- $.fn.popover = Plugin
- $.fn.popover.Constructor = Popover
-
-
- // POPOVER NO CONFLICT
- // ===================
-
- $.fn.popover.noConflict = function () {
- $.fn.popover = old
- return this
- }
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: scrollspy.js v3.3.6
- * http://getbootstrap.com/javascript/#scrollspy
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // SCROLLSPY CLASS DEFINITION
- // ==========================
-
- function ScrollSpy(element, options) {
- this.$body = $(document.body)
- this.$scrollElement = $(element).is(document.body) ? $(window) : $(element)
- this.options = $.extend({}, ScrollSpy.DEFAULTS, options)
- this.selector = (this.options.target || '') + ' .nav li > a'
- this.offsets = []
- this.targets = []
- this.activeTarget = null
- this.scrollHeight = 0
-
- this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this))
- this.refresh()
- this.process()
- }
-
- ScrollSpy.VERSION = '3.3.6'
-
- ScrollSpy.DEFAULTS = {
- offset: 10
- }
-
- ScrollSpy.prototype.getScrollHeight = function () {
- return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight)
- }
-
- ScrollSpy.prototype.refresh = function () {
- var that = this
- var offsetMethod = 'offset'
- var offsetBase = 0
-
- this.offsets = []
- this.targets = []
- this.scrollHeight = this.getScrollHeight()
-
- if (!$.isWindow(this.$scrollElement[0])) {
- offsetMethod = 'position'
- offsetBase = this.$scrollElement.scrollTop()
- }
-
- this.$body
- .find(this.selector)
- .map(function () {
- var $el = $(this)
- var href = $el.data('target') || $el.attr('href')
- var $href = /^#./.test(href) && $(href)
-
- return ($href
- && $href.length
- && $href.is(':visible')
- && [[$href[offsetMethod]().top + offsetBase, href]]) || null
- })
- .sort(function (a, b) { return a[0] - b[0] })
- .each(function () {
- that.offsets.push(this[0])
- that.targets.push(this[1])
- })
- }
-
- ScrollSpy.prototype.process = function () {
- var scrollTop = this.$scrollElement.scrollTop() + this.options.offset
- var scrollHeight = this.getScrollHeight()
- var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height()
- var offsets = this.offsets
- var targets = this.targets
- var activeTarget = this.activeTarget
- var i
-
- if (this.scrollHeight != scrollHeight) {
- this.refresh()
- }
-
- if (scrollTop >= maxScroll) {
- return activeTarget != (i = targets[targets.length - 1]) && this.activate(i)
- }
-
- if (activeTarget && scrollTop < offsets[0]) {
- this.activeTarget = null
- return this.clear()
- }
-
- for (i = offsets.length; i--;) {
- activeTarget != targets[i]
- && scrollTop >= offsets[i]
- && (offsets[i + 1] === undefined || scrollTop < offsets[i + 1])
- && this.activate(targets[i])
- }
- }
-
- ScrollSpy.prototype.activate = function (target) {
- this.activeTarget = target
-
- this.clear()
-
- var selector = this.selector +
- '[data-target="' + target + '"],' +
- this.selector + '[href="' + target + '"]'
-
- var active = $(selector)
- .parents('li')
- .addClass('active')
-
- if (active.parent('.dropdown-menu').length) {
- active = active
- .closest('li.dropdown')
- .addClass('active')
- }
-
- active.trigger('activate.bs.scrollspy')
- }
-
- ScrollSpy.prototype.clear = function () {
- $(this.selector)
- .parentsUntil(this.options.target, '.active')
- .removeClass('active')
- }
-
-
- // SCROLLSPY PLUGIN DEFINITION
- // ===========================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.scrollspy')
- var options = typeof option == 'object' && option
-
- if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.scrollspy
-
- $.fn.scrollspy = Plugin
- $.fn.scrollspy.Constructor = ScrollSpy
-
-
- // SCROLLSPY NO CONFLICT
- // =====================
-
- $.fn.scrollspy.noConflict = function () {
- $.fn.scrollspy = old
- return this
- }
-
-
- // SCROLLSPY DATA-API
- // ==================
-
- $(window).on('load.bs.scrollspy.data-api', function () {
- $('[data-spy="scroll"]').each(function () {
- var $spy = $(this)
- Plugin.call($spy, $spy.data())
- })
- })
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: tab.js v3.3.6
- * http://getbootstrap.com/javascript/#tabs
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // TAB CLASS DEFINITION
- // ====================
-
- var Tab = function (element) {
- // jscs:disable requireDollarBeforejQueryAssignment
- this.element = $(element)
- // jscs:enable requireDollarBeforejQueryAssignment
- }
-
- Tab.VERSION = '3.3.6'
-
- Tab.TRANSITION_DURATION = 150
-
- Tab.prototype.show = function () {
- var $this = this.element
- var $ul = $this.closest('ul:not(.dropdown-menu)')
- var selector = $this.data('target')
-
- if (!selector) {
- selector = $this.attr('href')
- selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
- }
-
- if ($this.parent('li').hasClass('active')) return
-
- var $previous = $ul.find('.active:last a')
- var hideEvent = $.Event('hide.bs.tab', {
- relatedTarget: $this[0]
- })
- var showEvent = $.Event('show.bs.tab', {
- relatedTarget: $previous[0]
- })
-
- $previous.trigger(hideEvent)
- $this.trigger(showEvent)
-
- if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
-
- var $target = $(selector)
-
- this.activate($this.closest('li'), $ul)
- this.activate($target, $target.parent(), function () {
- $previous.trigger({
- type: 'hidden.bs.tab',
- relatedTarget: $this[0]
- })
- $this.trigger({
- type: 'shown.bs.tab',
- relatedTarget: $previous[0]
- })
- })
- }
-
- Tab.prototype.activate = function (element, container, callback) {
- var $active = container.find('> .active')
- var transition = callback
- && $.support.transition
- && ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length)
-
- function next() {
- $active
- .removeClass('active')
- .find('> .dropdown-menu > .active')
- .removeClass('active')
- .end()
- .find('[data-toggle="tab"]')
- .attr('aria-expanded', false)
-
- element
- .addClass('active')
- .find('[data-toggle="tab"]')
- .attr('aria-expanded', true)
-
- if (transition) {
- element[0].offsetWidth // reflow for transition
- element.addClass('in')
- } else {
- element.removeClass('fade')
- }
-
- if (element.parent('.dropdown-menu').length) {
- element
- .closest('li.dropdown')
- .addClass('active')
- .end()
- .find('[data-toggle="tab"]')
- .attr('aria-expanded', true)
- }
-
- callback && callback()
- }
-
- $active.length && transition ?
- $active
- .one('bsTransitionEnd', next)
- .emulateTransitionEnd(Tab.TRANSITION_DURATION) :
- next()
-
- $active.removeClass('in')
- }
-
-
- // TAB PLUGIN DEFINITION
- // =====================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.tab')
-
- if (!data) $this.data('bs.tab', (data = new Tab(this)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.tab
-
- $.fn.tab = Plugin
- $.fn.tab.Constructor = Tab
-
-
- // TAB NO CONFLICT
- // ===============
-
- $.fn.tab.noConflict = function () {
- $.fn.tab = old
- return this
- }
-
-
- // TAB DATA-API
- // ============
-
- var clickHandler = function (e) {
- e.preventDefault()
- Plugin.call($(this), 'show')
- }
-
- $(document)
- .on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler)
- .on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler)
-
-}(jQuery);
-
-/* ========================================================================
- * Bootstrap: affix.js v3.3.6
- * http://getbootstrap.com/javascript/#affix
- * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * ======================================================================== */
-
-
-+function ($) {
- 'use strict';
-
- // AFFIX CLASS DEFINITION
- // ======================
-
- var Affix = function (element, options) {
- this.options = $.extend({}, Affix.DEFAULTS, options)
-
- this.$target = $(this.options.target)
- .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
- .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
-
- this.$element = $(element)
- this.affixed = null
- this.unpin = null
- this.pinnedOffset = null
-
- this.checkPosition()
- }
-
- Affix.VERSION = '3.3.6'
-
- Affix.RESET = 'affix affix-top affix-bottom'
-
- Affix.DEFAULTS = {
- offset: 0,
- target: window
- }
-
- Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) {
- var scrollTop = this.$target.scrollTop()
- var position = this.$element.offset()
- var targetHeight = this.$target.height()
-
- if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false
-
- if (this.affixed == 'bottom') {
- if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom'
- return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom'
- }
-
- var initializing = this.affixed == null
- var colliderTop = initializing ? scrollTop : position.top
- var colliderHeight = initializing ? targetHeight : height
-
- if (offsetTop != null && scrollTop <= offsetTop) return 'top'
- if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom'
-
- return false
- }
-
- Affix.prototype.getPinnedOffset = function () {
- if (this.pinnedOffset) return this.pinnedOffset
- this.$element.removeClass(Affix.RESET).addClass('affix')
- var scrollTop = this.$target.scrollTop()
- var position = this.$element.offset()
- return (this.pinnedOffset = position.top - scrollTop)
- }
-
- Affix.prototype.checkPositionWithEventLoop = function () {
- setTimeout($.proxy(this.checkPosition, this), 1)
- }
-
- Affix.prototype.checkPosition = function () {
- if (!this.$element.is(':visible')) return
-
- var height = this.$element.height()
- var offset = this.options.offset
- var offsetTop = offset.top
- var offsetBottom = offset.bottom
- var scrollHeight = Math.max($(document).height(), $(document.body).height())
-
- if (typeof offset != 'object') offsetBottom = offsetTop = offset
- if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element)
- if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element)
-
- var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom)
-
- if (this.affixed != affix) {
- if (this.unpin != null) this.$element.css('top', '')
-
- var affixType = 'affix' + (affix ? '-' + affix : '')
- var e = $.Event(affixType + '.bs.affix')
-
- this.$element.trigger(e)
-
- if (e.isDefaultPrevented()) return
-
- this.affixed = affix
- this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null
-
- this.$element
- .removeClass(Affix.RESET)
- .addClass(affixType)
- .trigger(affixType.replace('affix', 'affixed') + '.bs.affix')
- }
-
- if (affix == 'bottom') {
- this.$element.offset({
- top: scrollHeight - height - offsetBottom
- })
- }
- }
-
-
- // AFFIX PLUGIN DEFINITION
- // =======================
-
- function Plugin(option) {
- return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.affix')
- var options = typeof option == 'object' && option
-
- if (!data) $this.data('bs.affix', (data = new Affix(this, options)))
- if (typeof option == 'string') data[option]()
- })
- }
-
- var old = $.fn.affix
-
- $.fn.affix = Plugin
- $.fn.affix.Constructor = Affix
-
-
- // AFFIX NO CONFLICT
- // =================
-
- $.fn.affix.noConflict = function () {
- $.fn.affix = old
- return this
- }
-
-
- // AFFIX DATA-API
- // ==============
-
- $(window).on('load', function () {
- $('[data-spy="affix"]').each(function () {
- var $spy = $(this)
- var data = $spy.data()
-
- data.offset = data.offset || {}
-
- if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom
- if (data.offsetTop != null) data.offset.top = data.offsetTop
-
- Plugin.call($spy, data)
- })
- })
-
-}(jQuery);
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
deleted file mode 100644
index c4a924160d..0000000000
--- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/*!
- * Bootstrap v3.3.6 (http://getbootstrap.com)
- * Copyright 2011-2016 Twitter, Inc.
- * Licensed under the MIT license
- */
-if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);return this.$element.trigger(g),g.isDefaultPrevented()?void 0:(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this)},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=d?{top:0,left:0}:b.offset(),g={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},h=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,g,h,f)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),
-d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
new file mode 100644
index 0000000000..7f37b5d991
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}function fe(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}ce.fn=ce.prototype={jquery:t,constructor:ce,length:0,toArray:function(){return ae.call(this)},get:function(e){return null==e?ae.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=ce.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return ce.each(this,e)},map:function(n){return this.pushStack(ce.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(ae.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(ce.grep(this,function(e,t){return(t+1)%2}))},odd:function(){return this.pushStack(ce.grep(this,function(e,t){return t%2}))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:oe.sort,splice:oe.splice},ce.extend=ce.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||v(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(ce.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||ce.isPlainObject(n)?n:{},i=!1,a[t]=ce.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},ce.extend({expando:"jQuery"+(t+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==i.call(e))&&(!(t=r(e))||"function"==typeof(n=ue.call(t,"constructor")&&t.constructor)&&o.call(n)===a)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t,n){m(e,{nonce:t&&t.nonce},n)},each:function(e,t){var n,r=0;if(c(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},text:function(e){var t,n="",r=0,i=e.nodeType;if(!i)while(t=e[r++])n+=ce.text(t);return 1===i||11===i?e.textContent:9===i?e.documentElement.textContent:3===i||4===i?e.nodeValue:n},makeArray:function(e,t){var n=t||[];return null!=e&&(c(Object(e))?ce.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:se.call(t,e,n)},isXMLDoc:function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!l.test(t||n&&n.nodeName||"HTML")},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(c(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g(a)},guid:1,support:le}),"function"==typeof Symbol&&(ce.fn[Symbol.iterator]=oe[Symbol.iterator]),ce.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var pe=oe.pop,de=oe.sort,he=oe.splice,ge="[\\x20\\t\\r\\n\\f]",ve=new RegExp("^"+ge+"+|((?:^|[^\\\\])(?:\\\\.)*)"+ge+"+$","g");ce.contains=function(e,t){var n=t&&t.parentNode;return e===n||!(!n||1!==n.nodeType||!(e.contains?e.contains(n):e.compareDocumentPosition&&16&e.compareDocumentPosition(n)))};var f=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\x80-\uFFFF\w-]/g;function p(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e}ce.escapeSelector=function(e){return(e+"").replace(f,p)};var ye=C,me=s;!function(){var e,b,w,o,a,T,r,C,d,i,k=me,S=ce.expando,E=0,n=0,s=W(),c=W(),u=W(),h=W(),l=function(e,t){return e===t&&(a=!0),0},f="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",t="(?:\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+",p="\\["+ge+"*("+t+")(?:"+ge+"*([*^$|!~]?=)"+ge+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+t+"))|)"+ge+"*\\]",g=":("+t+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+p+")*)|.*)\\)|)",v=new RegExp(ge+"+","g"),y=new RegExp("^"+ge+"*,"+ge+"*"),m=new RegExp("^"+ge+"*([>+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="<a id='"+S+"' href='' disabled='disabled'></a><select id='"+S+"-\r\\' disabled='disabled'><option selected=''></option></select>",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0<I(t,T,null,[e]).length},I.contains=function(e,t){return(e.ownerDocument||e)!=T&&V(e),ce.contains(e,t)},I.attr=function(e,t){(e.ownerDocument||e)!=T&&V(e);var n=b.attrHandle[t.toLowerCase()],r=n&&ue.call(b.attrHandle,t.toLowerCase())?n(e,t,!C):void 0;return void 0!==r?r:e.getAttribute(t)},I.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},ce.uniqueSort=function(e){var t,n=[],r=0,i=0;if(a=!le.sortStable,o=!le.sortStable&&ae.call(e,0),de.call(e,l),a){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)he.call(e,n[r],1)}return o=null,e},ce.fn.uniqueSort=function(){return this.pushStack(ce.uniqueSort(ae.apply(this)))},(b=ce.expr={cacheLength:50,createPseudo:F,match:D,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(v," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(d,e,t,h,g){var v="nth"!==d.slice(0,3),y="last"!==d.slice(-4),m="of-type"===e;return 1===h&&0===g?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u=v!==y?"nextSibling":"previousSibling",l=e.parentNode,c=m&&e.nodeName.toLowerCase(),f=!n&&!m,p=!1;if(l){if(v){while(u){o=e;while(o=o[u])if(m?fe(o,c):1===o.nodeType)return!1;s=u="only"===d&&!s&&"nextSibling"}return!0}if(s=[y?l.firstChild:l.lastChild],y&&f){p=(a=(r=(i=l[S]||(l[S]={}))[d]||[])[0]===E&&r[1])&&r[2],o=a&&l.childNodes[a];while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if(1===o.nodeType&&++p&&o===e){i[d]=[E,a,p];break}}else if(f&&(p=a=(r=(i=e[S]||(e[S]={}))[d]||[])[0]===E&&r[1]),!1===p)while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if((m?fe(o,c):1===o.nodeType)&&++p&&(f&&((i=o[S]||(o[S]={}))[d]=[E,p]),o===e))break;return(p-=g)===h||p%h==0&&0<=p/h}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||I.error("unsupported pseudo: "+e);return a[S]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?F(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=se.call(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:F(function(e){var r=[],i=[],s=ne(e.replace(ve,"$1"));return s[S]?F(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:F(function(t){return function(e){return 0<I(t,e).length}}),contains:F(function(t){return t=t.replace(O,P),function(e){return-1<(e.textContent||ce.text(e)).indexOf(t)}}),lang:F(function(n){return A.test(n||"")||I.error("unsupported lang: "+n),n=n.replace(O,P).toLowerCase(),function(e){var t;do{if(t=C?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=ie.location&&ie.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===r},focus:function(e){return e===function(){try{return T.activeElement}catch(e){}}()&&T.hasFocus()&&!!(e.type||e.href||~e.tabIndex)},enabled:z(!1),disabled:z(!0),checked:function(e){return fe(e,"input")&&!!e.checked||fe(e,"option")&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return q.test(e.nodeName)},input:function(e){return N.test(e.nodeName)},button:function(e){return fe(e,"input")&&"button"===e.type||fe(e,"button")},text:function(e){var t;return fe(e,"input")&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:X(function(){return[0]}),last:X(function(e,t){return[t-1]}),eq:X(function(e,t,n){return[n<0?n+t:n]}),even:X(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:X(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:X(function(e,t,n){var r;for(r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:X(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=B(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=_(e);function G(){}function Y(e,t){var n,r,i,o,a,s,u,l=c[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=y.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=m.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(ve," ")}),a=a.slice(n.length)),b.filter)!(r=D[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?I.error(e):c(e,s).slice(0)}function Q(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function J(a,e,t){var s=e.dir,u=e.next,l=u||s,c=t&&"parentNode"===l,f=n++;return e.first?function(e,t,n){while(e=e[s])if(1===e.nodeType||c)return a(e,t,n);return!1}:function(e,t,n){var r,i,o=[E,f];if(n){while(e=e[s])if((1===e.nodeType||c)&&a(e,t,n))return!0}else while(e=e[s])if(1===e.nodeType||c)if(i=e[S]||(e[S]={}),u&&fe(e,u))e=e[s]||e;else{if((r=i[l])&&r[0]===E&&r[1]===f)return o[2]=r[2];if((i[l]=o)[2]=a(e,t,n))return!0}return!1}}function K(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Z(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function ee(d,h,g,v,y,e){return v&&!v[S]&&(v=ee(v)),y&&!y[S]&&(y=ee(y,e)),F(function(e,t,n,r){var i,o,a,s,u=[],l=[],c=t.length,f=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)I(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),p=!d||!e&&h?f:Z(f,u,d,n,r);if(g?g(p,s=y||(e?d:c||v)?[]:t,n,r):s=p,v){i=Z(s,l),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(s[l[o]]=!(p[l[o]]=a))}if(e){if(y||d){if(y){i=[],o=s.length;while(o--)(a=s[o])&&i.push(p[o]=a);y(null,s=[],i,r)}o=s.length;while(o--)(a=s[o])&&-1<(i=y?se.call(e,a):u[o])&&(e[i]=!(t[i]=a))}}else s=Z(s===t?s.splice(c,s.length):s),y?y(null,t,s,r):k.apply(t,s)})}function te(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=J(function(e){return e===i},a,!0),l=J(function(e){return-1<se.call(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!=w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[J(K(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[S]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return ee(1<s&&K(c),1<s&&Q(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(ve,"$1"),t,s<n&&te(e.slice(s,n)),n<r&&te(e=e.slice(n)),n<r&&Q(e))}c.push(t)}return K(c)}function ne(e,t){var n,v,y,m,x,r,i=[],o=[],a=u[e+" "];if(!a){t||(t=Y(e)),n=t.length;while(n--)(a=te(t[n]))[S]?i.push(a):o.push(a);(a=u(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=E+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t==T||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument==T||(V(o),n=!C);while(s=v[a++])if(s(o,t||T,n)){k.call(r,o);break}i&&(E=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=pe.call(r));f=Z(f)}k.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&ce.uniqueSort(r)}return i&&(E=h,w=p),c},m?F(r):r))).selector=e}return a}function re(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&Y(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&C&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(O,P),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=D.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(O,P),H.test(o[0].type)&&U(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&Q(o)))return k.apply(n,r),n;break}}}return(l||ne(e,c))(r,t,!C,n,!t||H.test(e)&&U(t.parentNode)||t),n}G.prototype=b.filters=b.pseudos,b.setFilters=new G,le.sortStable=S.split("").sort(l).join("")===S,V(),le.sortDetached=$(function(e){return 1&e.compareDocumentPosition(T.createElement("fieldset"))}),ce.find=I,ce.expr[":"]=ce.expr.pseudos,ce.unique=ce.uniqueSort,I.compile=ne,I.select=re,I.setDocument=V,I.tokenize=Y,I.escape=ce.escapeSelector,I.getText=ce.text,I.isXML=ce.isXMLDoc,I.selectors=ce.expr,I.support=ce.support,I.uniqueSort=ce.uniqueSort}();var d=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&ce(e).is(n))break;r.push(e)}return r},h=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},b=ce.expr.match.needsContext,w=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1<se.call(n,e)!==r}):ce.filter(n,e,r)}ce.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?ce.find.matchesSelector(r,e)?[r]:[]:ce.find.matches(e,ce.grep(t,function(e){return 1===e.nodeType}))},ce.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(ce(e).filter(function(){for(t=0;t<r;t++)if(ce.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)ce.find(e,i[t],n);return 1<r?ce.uniqueSort(n):n},filter:function(e){return this.pushStack(T(this,e||[],!1))},not:function(e){return this.pushStack(T(this,e||[],!0))},is:function(e){return!!T(this,"string"==typeof e&&b.test(e)?ce(e):e||[],!1).length}});var k,S=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(ce.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&ce(e);if(!b.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&ce.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?ce.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?se.call(ce(e),this[0]):se.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(ce.uniqueSort(ce.merge(this.get(),ce(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),ce.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return d(e,"parentNode")},parentsUntil:function(e,t,n){return d(e,"parentNode",n)},next:function(e){return A(e,"nextSibling")},prev:function(e){return A(e,"previousSibling")},nextAll:function(e){return d(e,"nextSibling")},prevAll:function(e){return d(e,"previousSibling")},nextUntil:function(e,t,n){return d(e,"nextSibling",n)},prevUntil:function(e,t,n){return d(e,"previousSibling",n)},siblings:function(e){return h((e.parentNode||{}).firstChild,e)},children:function(e){return h(e.firstChild)},contents:function(e){return null!=e.contentDocument&&r(e.contentDocument)?e.contentDocument:(fe(e,"template")&&(e=e.content||e),ce.merge([],e.childNodes))}},function(r,i){ce.fn[r]=function(e,t){var n=ce.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=ce.filter(t,n)),1<this.length&&(j[r]||ce.uniqueSort(n),E.test(r)&&n.reverse()),this.pushStack(n)}});var D=/[^\x20\t\r\n\f]+/g;function N(e){return e}function q(e){throw e}function L(e,t,n,r){var i;try{e&&v(i=e.promise)?i.call(e).done(t).fail(n):e&&v(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}ce.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},ce.each(e.match(D)||[],function(e,t){n[t]=!0}),n):ce.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){ce.each(e,function(e,t){v(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==x(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return ce.each(arguments,function(e,t){var n;while(-1<(n=ce.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<ce.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},ce.extend({Deferred:function(e){var o=[["notify","progress",ce.Callbacks("memory"),ce.Callbacks("memory"),2],["resolve","done",ce.Callbacks("once memory"),ce.Callbacks("once memory"),0,"resolved"],["reject","fail",ce.Callbacks("once memory"),ce.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return ce.Deferred(function(r){ce.each(o,function(e,t){var n=v(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&v(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,v(t)?s?t.call(e,l(u,o,N,s),l(u,o,q,s)):(u++,t.call(e,l(u,o,N,s),l(u,o,q,s),l(u,o,N,o.notifyWith))):(a!==N&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){ce.Deferred.exceptionHook&&ce.Deferred.exceptionHook(e,t.error),u<=i+1&&(a!==q&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(ce.Deferred.getErrorHook?t.error=ce.Deferred.getErrorHook():ce.Deferred.getStackHook&&(t.error=ce.Deferred.getStackHook()),ie.setTimeout(t))}}return ce.Deferred(function(e){o[0][3].add(l(0,e,v(r)?r:N,e.notifyWith)),o[1][3].add(l(0,e,v(t)?t:N)),o[2][3].add(l(0,e,v(n)?n:q))}).promise()},promise:function(e){return null!=e?ce.extend(e,a):a}},s={};return ce.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=ae.call(arguments),o=ce.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?ae.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(L(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||v(i[t]&&i[t].then)))return o.then();while(t--)L(i[t],a(t),o.reject);return o.promise()}});var H=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;ce.Deferred.exceptionHook=function(e,t){ie.console&&ie.console.warn&&e&&H.test(e.name)&&ie.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},ce.readyException=function(e){ie.setTimeout(function(){throw e})};var O=ce.Deferred();function P(){C.removeEventListener("DOMContentLoaded",P),ie.removeEventListener("load",P),ce.ready()}ce.fn.ready=function(e){return O.then(e)["catch"](function(e){ce.readyException(e)}),this},ce.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--ce.readyWait:ce.isReady)||(ce.isReady=!0)!==e&&0<--ce.readyWait||O.resolveWith(C,[ce])}}),ce.ready.then=O.then,"complete"===C.readyState||"loading"!==C.readyState&&!C.documentElement.doScroll?ie.setTimeout(ce.ready):(C.addEventListener("DOMContentLoaded",P),ie.addEventListener("load",P));var M=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n))for(s in i=!0,n)M(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,v(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(ce(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},R=/^-ms-/,I=/-([a-z])/g;function W(e,t){return t.toUpperCase()}function F(e){return e.replace(R,"ms-").replace(I,W)}var $=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function B(){this.expando=ce.expando+B.uid++}B.uid=1,B.prototype={cache:function(e){var t=e[this.expando];return t||(t={},$(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[F(t)]=n;else for(r in t)i[F(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][F(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(F):(t=F(t))in r?[t]:t.match(D)||[]).length;while(n--)delete r[t[n]]}(void 0===t||ce.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!ce.isEmptyObject(t)}};var _=new B,z=new B,X=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,U=/[A-Z]/g;function V(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(U,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:X.test(i)?JSON.parse(i):i)}catch(e){}z.set(e,t,n)}else n=void 0;return n}ce.extend({hasData:function(e){return z.hasData(e)||_.hasData(e)},data:function(e,t,n){return z.access(e,t,n)},removeData:function(e,t){z.remove(e,t)},_data:function(e,t,n){return _.access(e,t,n)},_removeData:function(e,t){_.remove(e,t)}}),ce.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=z.get(o),1===o.nodeType&&!_.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=F(r.slice(5)),V(o,r,i[r]));_.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){z.set(this,n)}):M(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=z.get(o,n))?t:void 0!==(t=V(o,n))?t:void 0;this.each(function(){z.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){z.remove(this,e)})}}),ce.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=_.get(e,t),n&&(!r||Array.isArray(n)?r=_.access(e,t,ce.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=ce.queue(e,t),r=n.length,i=n.shift(),o=ce._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){ce.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return _.get(e,n)||_.access(e,n,{empty:ce.Callbacks("once memory").add(function(){_.remove(e,[t+"queue",n])})})}}),ce.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?ce.queue(this[0],t):void 0===n?this:this.each(function(){var e=ce.queue(this,t,n);ce._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&ce.dequeue(this,t)})},dequeue:function(e){return this.each(function(){ce.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=ce.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=_.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var G=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,Y=new RegExp("^(?:([+-])=|)("+G+")([a-z%]*)$","i"),Q=["Top","Right","Bottom","Left"],J=C.documentElement,K=function(e){return ce.contains(e.ownerDocument,e)},Z={composed:!0};J.getRootNode&&(K=function(e){return ce.contains(e.ownerDocument,e)||e.getRootNode(Z)===e.ownerDocument});var ee=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&K(e)&&"none"===ce.css(e,"display")};function te(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return ce.css(e,t,"")},u=s(),l=n&&n[3]||(ce.cssNumber[t]?"":"px"),c=e.nodeType&&(ce.cssNumber[t]||"px"!==l&&+u)&&Y.exec(ce.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)ce.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,ce.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ne={};function re(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=_.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&ee(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ne[s])||(o=a.body.appendChild(a.createElement(s)),u=ce.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ne[s]=u)))):"none"!==n&&(l[c]="none",_.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}ce.fn.extend({show:function(){return re(this,!0)},hide:function(){return re(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){ee(this)?ce(this).show():ce(this).hide()})}});var xe,be,we=/^(?:checkbox|radio)$/i,Te=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="<textarea>x</textarea>",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="<option></option>",le.option=!!xe.lastChild;var ke={thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n<r;n++)_.set(e[n],"globalEval",!t||_.get(t[n],"globalEval"))}ke.tbody=ke.tfoot=ke.colgroup=ke.caption=ke.thead,ke.th=ke.td,le.option||(ke.optgroup=ke.option=[1,"<select multiple='multiple'>","</select>"]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===x(o))ce.merge(p,o.nodeType?[o]:o);else if(je.test(o)){a=a||f.appendChild(t.createElement("div")),s=(Te.exec(o)||["",""])[1].toLowerCase(),u=ke[s]||ke._default,a.innerHTML=u[1]+ce.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;ce.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<ce.inArray(o,r))i&&i.push(o);else if(l=K(o),a=Se(f.appendChild(o),"script"),l&&Ee(a),n){c=0;while(o=a[c++])Ce.test(o.type||"")&&n.push(o)}return f}var De=/^([^.]*)(?:\.(.+)|)/;function Ne(){return!0}function qe(){return!1}function Le(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Le(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=qe;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return ce().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=ce.guid++)),e.each(function(){ce.event.add(this,t,i,r,n)})}function He(e,r,t){t?(_.set(e,r,!1),ce.event.add(e,r,{namespace:!1,handler:function(e){var t,n=_.get(this,r);if(1&e.isTrigger&&this[r]){if(n)(ce.event.special[r]||{}).delegateType&&e.stopPropagation();else if(n=ae.call(arguments),_.set(this,r,n),this[r](),t=_.get(this,r),_.set(this,r,!1),n!==t)return e.stopImmediatePropagation(),e.preventDefault(),t}else n&&(_.set(this,r,ce.event.trigger(n[0],n.slice(1),this)),e.stopPropagation(),e.isImmediatePropagationStopped=Ne)}})):void 0===_.get(e,r)&&ce.event.add(e,r,Ne)}ce.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.get(t);if($(t)){n.handler&&(n=(o=n).handler,i=o.selector),i&&ce.find.matchesSelector(J,i),n.guid||(n.guid=ce.guid++),(u=v.events)||(u=v.events=Object.create(null)),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof ce&&ce.event.triggered!==e.type?ce.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(D)||[""]).length;while(l--)d=g=(s=De.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=ce.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=ce.event.special[d]||{},c=ce.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&ce.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),ce.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.hasData(e)&&_.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(D)||[""]).length;while(l--)if(d=g=(s=De.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=ce.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||ce.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)ce.event.remove(e,d+t[l],n,r,!0);ce.isEmptyObject(u)&&_.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=new Array(arguments.length),u=ce.event.fix(e),l=(_.get(this,"events")||Object.create(null))[u.type]||[],c=ce.event.special[u.type]||{};for(s[0]=u,t=1;t<arguments.length;t++)s[t]=arguments[t];if(u.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,u)){a=ce.event.handlers.call(this,u,l),t=0;while((i=a[t++])&&!u.isPropagationStopped()){u.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!u.isImmediatePropagationStopped())u.rnamespace&&!1!==o.namespace&&!u.rnamespace.test(o.namespace)||(u.handleObj=o,u.data=o.data,void 0!==(r=((ce.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,s))&&!1===(u.result=r)&&(u.preventDefault(),u.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,u),u.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<ce(i,this).index(l):ce.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(ce.Event.prototype,t,{enumerable:!0,configurable:!0,get:v(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[ce.expando]?e:new ce.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click",!0),!1},trigger:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click"),!0},_default:function(e){var t=e.target;return we.test(t.type)&&t.click&&fe(t,"input")&&_.get(t,"click")||fe(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},ce.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},ce.Event=function(e,t){if(!(this instanceof ce.Event))return new ce.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?Ne:qe,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&ce.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[ce.expando]=!0},ce.Event.prototype={constructor:ce.Event,isDefaultPrevented:qe,isPropagationStopped:qe,isImmediatePropagationStopped:qe,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=Ne,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=Ne,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=Ne,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},ce.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:!0},ce.event.addProp),ce.each({focus:"focusin",blur:"focusout"},function(r,i){function o(e){if(C.documentMode){var t=_.get(this,"handle"),n=ce.event.fix(e);n.type="focusin"===e.type?"focus":"blur",n.isSimulated=!0,t(e),n.target===n.currentTarget&&t(n)}else ce.event.simulate(i,e.target,ce.event.fix(e))}ce.event.special[r]={setup:function(){var e;if(He(this,r,!0),!C.documentMode)return!1;(e=_.get(this,i))||this.addEventListener(i,o),_.set(this,i,(e||0)+1)},trigger:function(){return He(this,r),!0},teardown:function(){var e;if(!C.documentMode)return!1;(e=_.get(this,i)-1)?_.set(this,i,e):(this.removeEventListener(i,o),_.remove(this,i))},_default:function(e){return _.get(e.target,r)},delegateType:i},ce.event.special[i]={setup:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i);n||(C.documentMode?this.addEventListener(i,o):e.addEventListener(r,o,!0)),_.set(t,i,(n||0)+1)},teardown:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i)-1;n?_.set(t,i,n):(C.documentMode?this.removeEventListener(i,o):e.removeEventListener(r,o,!0),_.remove(t,i))}}}),ce.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){ce.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||ce.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),ce.fn.extend({on:function(e,t,n,r){return Le(this,e,t,n,r)},one:function(e,t,n,r){return Le(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,ce(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=qe),this.each(function(){ce.event.remove(this,e,n,t)})}});var Oe=/<script|<style|<link/i,Pe=/checked\s*(?:[^=]|=\s*.checked.)/i,Me=/^\s*<!\[CDATA\[|\]\]>\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n<r;n++)ce.event.add(t,i,s[i][n]);z.hasData(e)&&(o=z.access(e),a=ce.extend({},o),z.set(t,a))}}function $e(n,r,i,o){r=g(r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=v(d);if(h||1<f&&"string"==typeof d&&!le.checkClone&&Pe.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),$e(t,r,i,o)});if(f&&(t=(e=Ae(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=ce.map(Se(e,"script"),Ie)).length;c<f;c++)u=e,c!==p&&(u=ce.clone(u,!0,!0),s&&ce.merge(a,Se(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,ce.map(a,We),c=0;c<s;c++)u=a[c],Ce.test(u.type||"")&&!_.access(u,"globalEval")&&ce.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?ce._evalUrl&&!u.noModule&&ce._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")},l):m(u.textContent.replace(Me,""),u,l))}return n}function Be(e,t,n){for(var r,i=t?ce.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||ce.cleanData(Se(r)),r.parentNode&&(n&&K(r)&&Ee(Se(r,"script")),r.parentNode.removeChild(r));return e}ce.extend({htmlPrefilter:function(e){return e},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=K(e);if(!(le.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||ce.isXMLDoc(e)))for(a=Se(c),r=0,i=(o=Se(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&we.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||Se(e),a=a||Se(c),r=0,i=o.length;r<i;r++)Fe(o[r],a[r]);else Fe(e,c);return 0<(a=Se(c,"script")).length&&Ee(a,!f&&Se(e,"script")),c},cleanData:function(e){for(var t,n,r,i=ce.event.special,o=0;void 0!==(n=e[o]);o++)if($(n)){if(t=n[_.expando]){if(t.events)for(r in t.events)i[r]?ce.event.remove(n,r):ce.removeEvent(n,r,t.handle);n[_.expando]=void 0}n[z.expando]&&(n[z.expando]=void 0)}}}),ce.fn.extend({detach:function(e){return Be(this,e,!0)},remove:function(e){return Be(this,e)},text:function(e){return M(this,function(e){return void 0===e?ce.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return $e(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Re(this,e).appendChild(e)})},prepend:function(){return $e(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Re(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(ce.cleanData(Se(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return ce.clone(this,e,t)})},html:function(e){return M(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Oe.test(e)&&!ke[(Te.exec(e)||["",""])[1].toLowerCase()]){e=ce.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(ce.cleanData(Se(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return $e(this,arguments,function(e){var t=this.parentNode;ce.inArray(this,n)<0&&(ce.cleanData(Se(this)),t&&t.replaceChild(e,this))},n)}}),ce.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){ce.fn[e]=function(e){for(var t,n=[],r=ce(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),ce(r[o])[a](t),s.apply(n,t.get());return this.pushStack(n)}});var _e=new RegExp("^("+G+")(?!px)[a-z%]+$","i"),ze=/^--/,Xe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=ie),t.getComputedStyle(e)},Ue=function(e,t,n){var r,i,o={};for(i in t)o[i]=e.style[i],e.style[i]=t[i];for(i in r=n.call(e),t)e.style[i]=o[i];return r},Ve=new RegExp(Q.join("|"),"i");function Ge(e,t,n){var r,i,o,a,s=ze.test(t),u=e.style;return(n=n||Xe(e))&&(a=n.getPropertyValue(t)||n[t],s&&a&&(a=a.replace(ve,"$1")||void 0),""!==a||K(e)||(a=ce.style(e,t)),!le.pixelBoxStyles()&&_e.test(a)&&Ve.test(t)&&(r=u.width,i=u.minWidth,o=u.maxWidth,u.minWidth=u.maxWidth=u.width=a,a=n.width,u.width=r,u.minWidth=i,u.maxWidth=o)),void 0!==a?a+"":a}function Ye(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(l){u.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",l.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",J.appendChild(u).appendChild(l);var e=ie.getComputedStyle(l);n="1%"!==e.top,s=12===t(e.marginLeft),l.style.right="60%",o=36===t(e.right),r=36===t(e.width),l.style.position="absolute",i=12===t(l.offsetWidth/3),J.removeChild(u),l=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s,u=C.createElement("div"),l=C.createElement("div");l.style&&(l.style.backgroundClip="content-box",l.cloneNode(!0).style.backgroundClip="",le.clearCloneStyle="content-box"===l.style.backgroundClip,ce.extend(le,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),s},scrollboxSize:function(){return e(),i},reliableTrDimensions:function(){var e,t,n,r;return null==a&&(e=C.createElement("table"),t=C.createElement("tr"),n=C.createElement("div"),e.style.cssText="position:absolute;left:-11111px;border-collapse:separate",t.style.cssText="box-sizing:content-box;border:1px solid",t.style.height="1px",n.style.height="9px",n.style.display="block",J.appendChild(e).appendChild(t).appendChild(n),r=ie.getComputedStyle(t),a=parseInt(r.height,10)+parseInt(r.borderTopWidth,10)+parseInt(r.borderBottomWidth,10)===t.offsetHeight,J.removeChild(e)),a}}))}();var Qe=["Webkit","Moz","ms"],Je=C.createElement("div").style,Ke={};function Ze(e){var t=ce.cssProps[e]||Ke[e];return t||(e in Je?e:Ke[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Qe.length;while(n--)if((e=Qe[n]+t)in Je)return e}(e)||e)}var et=/^(none|table(?!-c[ea]).+)/,tt={position:"absolute",visibility:"hidden",display:"block"},nt={letterSpacing:"0",fontWeight:"400"};function rt(e,t,n){var r=Y.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function it(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0,l=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(l+=ce.css(e,n+Q[a],!0,i)),r?("content"===n&&(u-=ce.css(e,"padding"+Q[a],!0,i)),"margin"!==n&&(u-=ce.css(e,"border"+Q[a]+"Width",!0,i))):(u+=ce.css(e,"padding"+Q[a],!0,i),"padding"!==n?u+=ce.css(e,"border"+Q[a]+"Width",!0,i):s+=ce.css(e,"border"+Q[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u+l}function ot(e,t,n){var r=Xe(e),i=(!le.boxSizingReliable()||n)&&"border-box"===ce.css(e,"boxSizing",!1,r),o=i,a=Ge(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if(_e.test(a)){if(!n)return a;a="auto"}return(!le.boxSizingReliable()&&i||!le.reliableTrDimensions()&&fe(e,"tr")||"auto"===a||!parseFloat(a)&&"inline"===ce.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===ce.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+it(e,t,n||(i?"border":"content"),o,r,a)+"px"}function at(e,t,n,r,i){return new at.prototype.init(e,t,n,r,i)}ce.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Ge(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,aspectRatio:!0,borderImageSlice:!0,columnCount:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,scale:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeMiterlimit:!0,strokeOpacity:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=F(t),u=ze.test(t),l=e.style;if(u||(t=Ze(s)),a=ce.cssHooks[t]||ce.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=Y.exec(n))&&i[1]&&(n=te(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(ce.cssNumber[s]?"":"px")),le.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=F(t);return ze.test(t)||(t=Ze(s)),(a=ce.cssHooks[t]||ce.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Ge(e,t,r)),"normal"===i&&t in nt&&(i=nt[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),ce.each(["height","width"],function(e,u){ce.cssHooks[u]={get:function(e,t,n){if(t)return!et.test(ce.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?ot(e,u,n):Ue(e,tt,function(){return ot(e,u,n)})},set:function(e,t,n){var r,i=Xe(e),o=!le.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===ce.css(e,"boxSizing",!1,i),s=n?it(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-it(e,u,"border",!1,i)-.5)),s&&(r=Y.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=ce.css(e,u)),rt(0,t,s)}}}),ce.cssHooks.marginLeft=Ye(le.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Ge(e,"marginLeft"))||e.getBoundingClientRect().left-Ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),ce.each({margin:"",padding:"",border:"Width"},function(i,o){ce.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+Q[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(ce.cssHooks[i+o].set=rt)}),ce.fn.extend({css:function(e,t){return M(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Xe(e),i=t.length;a<i;a++)o[t[a]]=ce.css(e,t[a],!1,r);return o}return void 0!==n?ce.style(e,t,n):ce.css(e,t)},e,t,1<arguments.length)}}),((ce.Tween=at).prototype={constructor:at,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||ce.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(ce.cssNumber[n]?"":"px")},cur:function(){var e=at.propHooks[this.prop];return e&&e.get?e.get(this):at.propHooks._default.get(this)},run:function(e){var t,n=at.propHooks[this.prop];return this.options.duration?this.pos=t=ce.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):at.propHooks._default.set(this),this}}).init.prototype=at.prototype,(at.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=ce.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){ce.fx.step[e.prop]?ce.fx.step[e.prop](e):1!==e.elem.nodeType||!ce.cssHooks[e.prop]&&null==e.elem.style[Ze(e.prop)]?e.elem[e.prop]=e.now:ce.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=at.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},ce.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},ce.fx=at.prototype.init,ce.fx.step={};var st,ut,lt,ct,ft=/^(?:toggle|show|hide)$/,pt=/queueHooks$/;function dt(){ut&&(!1===C.hidden&&ie.requestAnimationFrame?ie.requestAnimationFrame(dt):ie.setTimeout(dt,ce.fx.interval),ce.fx.tick())}function ht(){return ie.setTimeout(function(){st=void 0}),st=Date.now()}function gt(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=Q[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function vt(e,t,n){for(var r,i=(yt.tweeners[t]||[]).concat(yt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function yt(o,e,t){var n,a,r=0,i=yt.prefilters.length,s=ce.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=st||ht(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:ce.extend({},e),opts:ce.extend(!0,{specialEasing:{},easing:ce.easing._default},t),originalProperties:e,originalOptions:t,startTime:st||ht(),duration:t.duration,tweens:[],createTween:function(e,t){var n=ce.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=F(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=ce.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=yt.prefilters[r].call(l,o,c,l.opts))return v(n.stop)&&(ce._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return ce.map(c,vt,l),v(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),ce.fx.timer(ce.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}ce.Animation=ce.extend(yt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return te(n.elem,e,Y.exec(t),n),n}]},tweener:function(e,t){v(e)?(t=e,e=["*"]):e=e.match(D);for(var n,r=0,i=e.length;r<i;r++)n=e[r],yt.tweeners[n]=yt.tweeners[n]||[],yt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&ee(e),v=_.get(e,"fxshow");for(r in n.queue||(null==(a=ce._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,ce.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],ft.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||ce.style(e,r)}if((u=!ce.isEmptyObject(t))||!ce.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=_.get(e,"display")),"none"===(c=ce.css(e,"display"))&&(l?c=l:(re([e],!0),l=e.style.display||l,c=ce.css(e,"display"),re([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===ce.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=_.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&re([e],!0),p.done(function(){for(r in g||re([e]),_.remove(e,"fxshow"),d)ce.style(e,r,d[r])})),u=vt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?yt.prefilters.unshift(e):yt.prefilters.push(e)}}),ce.speed=function(e,t,n){var r=e&&"object"==typeof e?ce.extend({},e):{complete:n||!n&&t||v(e)&&e,duration:e,easing:n&&t||t&&!v(t)&&t};return ce.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in ce.fx.speeds?r.duration=ce.fx.speeds[r.duration]:r.duration=ce.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){v(r.old)&&r.old.call(this),r.queue&&ce.dequeue(this,r.queue)},r},ce.fn.extend({fadeTo:function(e,t,n,r){return this.filter(ee).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=ce.isEmptyObject(t),o=ce.speed(e,n,r),a=function(){var e=yt(this,ce.extend({},t),o);(i||_.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=ce.timers,r=_.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&pt.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||ce.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=_.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=ce.timers,o=n?n.length:0;for(t.finish=!0,ce.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),ce.each(["toggle","show","hide"],function(e,r){var i=ce.fn[r];ce.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(gt(r,!0),e,t,n)}}),ce.each({slideDown:gt("show"),slideUp:gt("hide"),slideToggle:gt("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){ce.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),ce.timers=[],ce.fx.tick=function(){var e,t=0,n=ce.timers;for(st=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||ce.fx.stop(),st=void 0},ce.fx.timer=function(e){ce.timers.push(e),ce.fx.start()},ce.fx.interval=13,ce.fx.start=function(){ut||(ut=!0,dt())},ce.fx.stop=function(){ut=null},ce.fx.speeds={slow:600,fast:200,_default:400},ce.fn.delay=function(r,e){return r=ce.fx&&ce.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=ie.setTimeout(e,r);t.stop=function(){ie.clearTimeout(n)}})},lt=C.createElement("input"),ct=C.createElement("select").appendChild(C.createElement("option")),lt.type="checkbox",le.checkOn=""!==lt.value,le.optSelected=ct.selected,(lt=C.createElement("input")).value="t",lt.type="radio",le.radioValue="t"===lt.value;var mt,xt=ce.expr.attrHandle;ce.fn.extend({attr:function(e,t){return M(this,ce.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){ce.removeAttr(this,e)})}}),ce.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?ce.prop(e,t,n):(1===o&&ce.isXMLDoc(e)||(i=ce.attrHooks[t.toLowerCase()]||(ce.expr.match.bool.test(t)?mt:void 0)),void 0!==n?null===n?void ce.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=ce.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!le.radioValue&&"radio"===t&&fe(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(D);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),mt={set:function(e,t,n){return!1===t?ce.removeAttr(e,n):e.setAttribute(n,n),n}},ce.each(ce.expr.match.bool.source.match(/\w+/g),function(e,t){var a=xt[t]||ce.find.attr;xt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=xt[o],xt[o]=r,r=null!=a(e,t,n)?o:null,xt[o]=i),r}});var bt=/^(?:input|select|textarea|button)$/i,wt=/^(?:a|area)$/i;function Tt(e){return(e.match(D)||[]).join(" ")}function Ct(e){return e.getAttribute&&e.getAttribute("class")||""}function kt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(D)||[]}ce.fn.extend({prop:function(e,t){return M(this,ce.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[ce.propFix[e]||e]})}}),ce.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&ce.isXMLDoc(e)||(t=ce.propFix[t]||t,i=ce.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=ce.find.attr(e,"tabindex");return t?parseInt(t,10):bt.test(e.nodeName)||wt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),le.optSelected||(ce.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),ce.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){ce.propFix[this.toLowerCase()]=this}),ce.fn.extend({addClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).addClass(t.call(this,e,Ct(this)))}):(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++)i=e[o],n.indexOf(" "+i+" ")<0&&(n+=i+" ");a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this},removeClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).removeClass(t.call(this,e,Ct(this)))}):arguments.length?(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++){i=e[o];while(-1<n.indexOf(" "+i+" "))n=n.replace(" "+i+" "," ")}a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this:this.attr("class","")},toggleClass:function(t,n){var e,r,i,o,a=typeof t,s="string"===a||Array.isArray(t);return v(t)?this.each(function(e){ce(this).toggleClass(t.call(this,e,Ct(this),n),n)}):"boolean"==typeof n&&s?n?this.addClass(t):this.removeClass(t):(e=kt(t),this.each(function(){if(s)for(o=ce(this),i=0;i<e.length;i++)r=e[i],o.hasClass(r)?o.removeClass(r):o.addClass(r);else void 0!==t&&"boolean"!==a||((r=Ct(this))&&_.set(this,"__className__",r),this.setAttribute&&this.setAttribute("class",r||!1===t?"":_.get(this,"__className__")||""))}))},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+Tt(Ct(n))+" ").indexOf(t))return!0;return!1}});var St=/\r/g;ce.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=v(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,ce(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=ce.map(t,function(e){return null==e?"":e+""})),(r=ce.valHooks[this.type]||ce.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=ce.valHooks[t.type]||ce.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(St,""):null==e?"":e:void 0}}),ce.extend({valHooks:{option:{get:function(e){var t=ce.find.attr(e,"value");return null!=t?t:Tt(ce.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!fe(n.parentNode,"optgroup"))){if(t=ce(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=ce.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<ce.inArray(ce.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),ce.each(["radio","checkbox"],function(){ce.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<ce.inArray(ce(e).val(),t)}},le.checkOn||(ce.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var Et=ie.location,jt={guid:Date.now()},At=/\?/;ce.parseXML=function(e){var t,n;if(!e||"string"!=typeof e)return null;try{t=(new ie.DOMParser).parseFromString(e,"text/xml")}catch(e){}return n=t&&t.getElementsByTagName("parsererror")[0],t&&!n||ce.error("Invalid XML: "+(n?ce.map(n.childNodes,function(e){return e.textContent}).join("\n"):e)),t};var Dt=/^(?:focusinfocus|focusoutblur)$/,Nt=function(e){e.stopPropagation()};ce.extend(ce.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||C],d=ue.call(e,"type")?e.type:e,h=ue.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||C,3!==n.nodeType&&8!==n.nodeType&&!Dt.test(d+ce.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[ce.expando]?e:new ce.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:ce.makeArray(t,[e]),c=ce.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!y(n)){for(s=c.delegateType||d,Dt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||C)&&p.push(a.defaultView||a.parentWindow||ie)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(_.get(o,"events")||Object.create(null))[e.type]&&_.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&$(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!$(n)||u&&v(n[d])&&!y(n)&&((a=n[u])&&(n[u]=null),ce.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Nt),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Nt),ce.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=ce.extend(new ce.Event,n,{type:e,isSimulated:!0});ce.event.trigger(r,null,t)}}),ce.fn.extend({trigger:function(e,t){return this.each(function(){ce.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return ce.event.trigger(e,t,n,!0)}});var qt=/\[\]$/,Lt=/\r?\n/g,Ht=/^(?:submit|button|image|reset|file)$/i,Ot=/^(?:input|select|textarea|keygen)/i;function Pt(n,e,r,i){var t;if(Array.isArray(e))ce.each(e,function(e,t){r||qt.test(n)?i(n,t):Pt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==x(e))i(n,e);else for(t in e)Pt(n+"["+t+"]",e[t],r,i)}ce.param=function(e,t){var n,r=[],i=function(e,t){var n=v(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!ce.isPlainObject(e))ce.each(e,function(){i(this.name,this.value)});else for(n in e)Pt(n,e[n],t,i);return r.join("&")},ce.fn.extend({serialize:function(){return ce.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=ce.prop(this,"elements");return e?ce.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!ce(this).is(":disabled")&&Ot.test(this.nodeName)&&!Ht.test(e)&&(this.checked||!we.test(e))}).map(function(e,t){var n=ce(this).val();return null==n?null:Array.isArray(n)?ce.map(n,function(e){return{name:t.name,value:e.replace(Lt,"\r\n")}}):{name:t.name,value:n.replace(Lt,"\r\n")}}).get()}});var Mt=/%20/g,Rt=/#.*$/,It=/([?&])_=[^&]*/,Wt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Ft=/^(?:GET|HEAD)$/,$t=/^\/\//,Bt={},_t={},zt="*/".concat("*"),Xt=C.createElement("a");function Ut(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(D)||[];if(v(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function Vt(t,i,o,a){var s={},u=t===_t;function l(e){var r;return s[e]=!0,ce.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function Gt(e,t){var n,r,i=ce.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&ce.extend(!0,e,r),e}Xt.href=Et.href,ce.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":zt,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":ce.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Gt(Gt(e,ce.ajaxSettings),t):Gt(ce.ajaxSettings,e)},ajaxPrefilter:Ut(Bt),ajaxTransport:Ut(_t),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=ce.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?ce(y):ce.event,x=ce.Deferred(),b=ce.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Wt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace($t,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(D)||[""],null==v.crossDomain){r=C.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Xt.protocol+"//"+Xt.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=ce.param(v.data,v.traditional)),Vt(Bt,v,t,T),h)return T;for(i in(g=ce.event&&v.global)&&0==ce.active++&&ce.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Ft.test(v.type),f=v.url.replace(Rt,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Mt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(At.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(It,"$1"),o=(At.test(f)?"&":"?")+"_="+jt.guid+++o),v.url=f+o),v.ifModified&&(ce.lastModified[f]&&T.setRequestHeader("If-Modified-Since",ce.lastModified[f]),ce.etag[f]&&T.setRequestHeader("If-None-Match",ce.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+zt+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=Vt(_t,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=ie.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&ie.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),!i&&-1<ce.inArray("script",v.dataTypes)&&ce.inArray("json",v.dataTypes)<0&&(v.converters["text script"]=function(){}),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(ce.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(ce.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--ce.active||ce.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return ce.get(e,t,n,"json")},getScript:function(e,t){return ce.get(e,void 0,t,"script")}}),ce.each(["get","post"],function(e,i){ce[i]=function(e,t,n,r){return v(t)&&(r=r||n,n=t,t=void 0),ce.ajax(ce.extend({url:e,type:i,dataType:r,data:t,success:n},ce.isPlainObject(e)&&e))}}),ce.ajaxPrefilter(function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")}),ce._evalUrl=function(e,t,n){return ce.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){ce.globalEval(e,t,n)}})},ce.fn.extend({wrapAll:function(e){var t;return this[0]&&(v(e)&&(e=e.call(this[0])),t=ce(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return v(n)?this.each(function(e){ce(this).wrapInner(n.call(this,e))}):this.each(function(){var e=ce(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=v(t);return this.each(function(e){ce(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){ce(this).replaceWith(this.childNodes)}),this}}),ce.expr.pseudos.hidden=function(e){return!ce.expr.pseudos.visible(e)},ce.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},ce.ajaxSettings.xhr=function(){try{return new ie.XMLHttpRequest}catch(e){}};var Yt={0:200,1223:204},Qt=ce.ajaxSettings.xhr();le.cors=!!Qt&&"withCredentials"in Qt,le.ajax=Qt=!!Qt,ce.ajaxTransport(function(i){var o,a;if(le.cors||Qt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Yt[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&ie.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),ce.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),ce.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return ce.globalEval(e),e}}}),ce.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),ce.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=ce("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=Tt(e.slice(s)),e=e.slice(0,s)),v(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&ce.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?ce("<div>").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}});var en=/^[\s\uFEFF\xA0]+|([^\s\uFEFF\xA0])[\s\uFEFF\xA0]+$/g;ce.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),v(e))return r=ae.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(ae.call(arguments)))}).guid=e.guid=e.guid||ce.guid++,i},ce.holdReady=function(e){e?ce.readyWait++:ce.ready(!0)},ce.isArray=Array.isArray,ce.parseJSON=JSON.parse,ce.nodeName=fe,ce.isFunction=v,ce.isWindow=y,ce.camelCase=F,ce.type=x,ce.now=Date.now,ce.isNumeric=function(e){var t=ce.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},ce.trim=function(e){return null==e?"":(e+"").replace(en,"$1")},"function"==typeof define&&define.amd&&define("jquery",[],function(){return ce});var tn=ie.jQuery,nn=ie.$;return ce.noConflict=function(e){return ie.$===ce&&(ie.$=nn),e&&ie.jQuery===ce&&(ie.jQuery=tn),ce},"undefined"==typeof e&&(ie.jQuery=ie.$=ce),ce});
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
new file mode 100644
index 0000000000..db38af5893
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
@@ -0,0 +1 @@
+{"version":3,"sources":["jquery-3.7.1.js"],"names":["global","factory","module","exports","document","w","Error","window","this","noGlobal","arr","getProto","Object","getPrototypeOf","slice","flat","array","call","concat","apply","push","indexOf","class2type","toString","hasOwn","hasOwnProperty","fnToString","ObjectFunctionString","support","isFunction","obj","nodeType","item","isWindow","preservedScriptAttributes","type","src","nonce","noModule","DOMEval","code","node","doc","i","val","script","createElement","text","getAttribute","setAttribute","head","appendChild","parentNode","removeChild","toType","version","rhtmlSuffix","jQuery","selector","context","fn","init","isArrayLike","length","nodeName","elem","name","toLowerCase","prototype","jquery","constructor","toArray","get","num","pushStack","elems","ret","merge","prevObject","each","callback","map","arguments","first","eq","last","even","grep","_elem","odd","len","j","end","sort","splice","extend","options","copy","copyIsArray","clone","target","deep","isPlainObject","Array","isArray","undefined","expando","Math","random","replace","isReady","error","msg","noop","proto","Ctor","isEmptyObject","globalEval","textContent","documentElement","nodeValue","makeArray","results","inArray","isXMLDoc","namespace","namespaceURI","docElem","ownerDocument","test","second","invert","matches","callbackExpect","arg","value","guid","Symbol","iterator","split","_i","pop","whitespace","rtrimCSS","RegExp","contains","a","b","bup","compareDocumentPosition","rcssescape","fcssescape","ch","asCodePoint","charCodeAt","escapeSelector","sel","preferredDoc","pushNative","Expr","outermostContext","sortInput","hasDuplicate","documentIsHTML","rbuggyQSA","dirruns","done","classCache","createCache","tokenCache","compilerCache","nonnativeSelectorCache","sortOrder","booleans","identifier","attributes","pseudos","rwhitespace","rcomma","rleadingCombinator","rdescend","rpseudo","ridentifier","matchExpr","ID","CLASS","TAG","ATTR","PSEUDO","CHILD","bool","needsContext","rinputs","rheader","rquickExpr","rsibling","runescape","funescape","escape","nonHex","high","String","fromCharCode","unloadHandler","setDocument","inDisabledFieldset","addCombinator","disabled","dir","next","childNodes","e","els","find","seed","m","nid","match","groups","newSelector","newContext","exec","getElementById","id","getElementsByTagName","getElementsByClassName","testContext","scope","tokenize","toSelector","join","querySelectorAll","qsaError","removeAttribute","select","keys","cache","key","cacheLength","shift","markFunction","assert","el","createInputPseudo","createButtonPseudo","createDisabledPseudo","isDisabled","createPositionalPseudo","argument","matchIndexes","subWindow","webkitMatchesSelector","msMatchesSelector","defaultView","top","addEventListener","getById","getElementsByName","disconnectedMatch","cssHas","querySelector","filter","attrId","getAttributeNode","tag","className","input","innerHTML","compare","sortDetached","expr","elements","matchesSelector","attr","attrHandle","uniqueSort","duplicates","sortStable","createPseudo","relative",">"," ","+","~","preFilter","excess","unquoted","nodeNameSelector","expectedNodeName","pattern","operator","check","result","what","_argument","simple","forward","ofType","_context","xml","outerCache","nodeIndex","start","parent","useCache","diff","firstChild","lastChild","pseudo","args","setFilters","idx","matched","not","matcher","compile","unmatched","has","lang","elemLang","hash","location","root","focus","activeElement","err","safeActiveElement","hasFocus","href","tabIndex","enabled","checked","selected","selectedIndex","empty","nextSibling","header","button","_matchIndexes","lt","gt","nth","radio","checkbox","file","password","image","submit","reset","parseOnly","tokens","soFar","preFilters","cached","combinator","base","skip","checkNonElements","doneName","oldCache","newCache","elementMatcher","matchers","condense","newUnmatched","mapped","setMatcher","postFilter","postFinder","postSelector","temp","matcherOut","preMap","postMap","preexisting","contexts","multipleContexts","matcherIn","matcherFromTokens","checkContext","leadingRelative","implicitRelative","matchContext","matchAnyContext","elementMatchers","setMatchers","bySet","byElement","superMatcher","outermost","matchedCount","setMatched","contextBackup","dirrunsUnique","token","compiled","filters","unique","getText","isXML","selectors","until","truncate","is","siblings","n","rneedsContext","rsingleTag","winnow","qualifier","self","rootjQuery","parseHTML","ready","rparentsprev","guaranteedUnique","children","contents","prev","sibling","cur","targets","l","closest","index","prevAll","add","addBack","parents","parentsUntil","nextAll","nextUntil","prevUntil","contentDocument","content","reverse","rnothtmlwhite","Identity","v","Thrower","ex","adoptValue","resolve","reject","noValue","method","promise","fail","then","Callbacks","object","_","flag","firing","memory","fired","locked","list","queue","firingIndex","fire","once","stopOnFalse","remove","disable","lock","fireWith","Deferred","func","tuples","state","always","deferred","catch","pipe","fns","newDefer","tuple","returned","progress","notify","onFulfilled","onRejected","onProgress","maxDepth","depth","handler","special","that","mightThrow","TypeError","notifyWith","resolveWith","process","exceptionHook","rejectWith","getErrorHook","getStackHook","setTimeout","stateString","when","singleValue","remaining","resolveContexts","resolveValues","primary","updateFunc","rerrorNames","asyncError","console","warn","message","stack","readyException","readyList","completed","removeEventListener","readyWait","wait","readyState","doScroll","access","chainable","emptyGet","raw","bulk","_key","rmsPrefix","rdashAlpha","fcamelCase","_all","letter","toUpperCase","camelCase","string","acceptData","owner","Data","uid","defineProperty","configurable","set","data","prop","hasData","dataPriv","dataUser","rbrace","rmultiDash","dataAttr","JSON","parse","removeData","_data","_removeData","attrs","dequeue","startLength","hooks","_queueHooks","unshift","stop","setter","clearQueue","tmp","count","defer","pnum","source","rcssNum","cssExpand","isAttached","composed","getRootNode","isHiddenWithinTree","style","display","css","adjustCSS","valueParts","tween","adjusted","scale","maxIterations","currentValue","initial","unit","cssNumber","initialInUnit","defaultDisplayMap","showHide","show","values","body","hide","toggle","div","rcheckableType","rtagName","rscriptType","createDocumentFragment","checkClone","cloneNode","noCloneChecked","defaultValue","option","wrapMap","thead","col","tr","td","_default","getAll","setGlobalEval","refElements","tbody","tfoot","colgroup","caption","th","optgroup","rhtml","buildFragment","scripts","selection","ignored","wrap","attached","fragment","nodes","htmlPrefilter","createTextNode","rtypenamespace","returnTrue","returnFalse","on","types","one","origFn","event","off","leverageNative","isSetup","saved","isTrigger","delegateType","stopPropagation","stopImmediatePropagation","preventDefault","trigger","isImmediatePropagationStopped","handleObjIn","eventHandle","events","t","handleObj","handlers","namespaces","origType","elemData","create","handle","triggered","dispatch","bindType","delegateCount","setup","mappedTypes","origCount","teardown","removeEvent","nativeEvent","handlerQueue","fix","delegateTarget","preDispatch","isPropagationStopped","currentTarget","rnamespace","postDispatch","matchedHandlers","matchedSelectors","addProp","hook","Event","enumerable","originalEvent","writable","load","noBubble","click","beforeunload","returnValue","props","isDefaultPrevented","defaultPrevented","relatedTarget","timeStamp","Date","now","isSimulated","altKey","bubbles","cancelable","changedTouches","ctrlKey","detail","eventPhase","metaKey","pageX","pageY","shiftKey","view","char","charCode","keyCode","buttons","clientX","clientY","offsetX","offsetY","pointerId","pointerType","screenX","screenY","targetTouches","toElement","touches","which","blur","focusMappedHandler","documentMode","simulate","attaches","dataHolder","mouseenter","mouseleave","pointerenter","pointerleave","orig","related","rnoInnerhtml","rchecked","rcleanScript","manipulationTarget","disableScript","restoreScript","cloneCopyEvent","dest","udataOld","udataCur","domManip","collection","hasScripts","iNoClone","valueIsFunction","html","_evalUrl","keepData","cleanData","dataAndEvents","deepDataAndEvents","srcElements","destElements","inPage","detach","append","prepend","insertBefore","before","after","replaceWith","replaceChild","appendTo","prependTo","insertAfter","replaceAll","original","insert","rnumnonpx","rcustomProp","getStyles","opener","getComputedStyle","swap","old","rboxStyle","curCSS","computed","width","minWidth","maxWidth","isCustomProp","getPropertyValue","pixelBoxStyles","addGetHookIf","conditionFn","hookFn","computeStyleTests","container","cssText","divStyle","pixelPositionVal","reliableMarginLeftVal","roundPixelMeasures","marginLeft","right","pixelBoxStylesVal","boxSizingReliableVal","position","scrollboxSizeVal","offsetWidth","measure","round","parseFloat","reliableTrDimensionsVal","backgroundClip","clearCloneStyle","boxSizingReliable","pixelPosition","reliableMarginLeft","scrollboxSize","reliableTrDimensions","table","trChild","trStyle","height","parseInt","borderTopWidth","borderBottomWidth","offsetHeight","cssPrefixes","emptyStyle","vendorProps","finalPropName","final","cssProps","capName","vendorPropName","rdisplayswap","cssShow","visibility","cssNormalTransform","letterSpacing","fontWeight","setPositiveNumber","subtract","max","boxModelAdjustment","dimension","box","isBorderBox","styles","computedVal","extra","delta","marginDelta","ceil","getWidthOrHeight","valueIsBorderBox","offsetProp","getClientRects","Tween","easing","cssHooks","opacity","animationIterationCount","aspectRatio","borderImageSlice","columnCount","flexGrow","flexShrink","gridArea","gridColumn","gridColumnEnd","gridColumnStart","gridRow","gridRowEnd","gridRowStart","lineHeight","order","orphans","widows","zIndex","zoom","fillOpacity","floodOpacity","stopOpacity","strokeMiterlimit","strokeOpacity","origName","setProperty","isFinite","getBoundingClientRect","scrollboxSizeBuggy","left","margin","padding","border","prefix","suffix","expand","expanded","parts","propHooks","run","percent","eased","duration","pos","step","fx","scrollTop","scrollLeft","linear","p","swing","cos","PI","fxNow","inProgress","opt","rfxtypes","rrun","schedule","hidden","requestAnimationFrame","interval","tick","createFxNow","genFx","includeWidth","createTween","animation","Animation","tweeners","properties","stopped","prefilters","currentTime","startTime","tweens","opts","specialEasing","originalProperties","originalOptions","gotoEnd","propFilter","bind","complete","timer","anim","*","tweener","oldfire","propTween","restoreDisplay","isBox","dataShow","unqueued","overflow","overflowX","overflowY","prefilter","speed","speeds","fadeTo","to","animate","optall","doAnimation","finish","stopQueue","timers","cssFn","slideDown","slideUp","slideToggle","fadeIn","fadeOut","fadeToggle","slow","fast","delay","time","timeout","clearTimeout","checkOn","optSelected","radioValue","boolHook","removeAttr","nType","attrHooks","attrNames","getter","lowercaseName","rfocusable","rclickable","stripAndCollapse","getClass","classesToArray","removeProp","propFix","tabindex","for","class","addClass","classNames","curValue","finalValue","removeClass","toggleClass","stateVal","isValidValue","hasClass","rreturn","valHooks","optionSet","rquery","parseXML","parserErrorElem","DOMParser","parseFromString","rfocusMorph","stopPropagationCallback","onlyHandlers","bubbleType","ontype","lastElement","eventPath","parentWindow","triggerHandler","rbracket","rCRLF","rsubmitterTypes","rsubmittable","buildParams","traditional","param","s","valueOrFunction","encodeURIComponent","serialize","serializeArray","r20","rhash","rantiCache","rheaders","rnoContent","rprotocol","transports","allTypes","originAnchor","addToPrefiltersOrTransports","structure","dataTypeExpression","dataType","dataTypes","inspectPrefiltersOrTransports","jqXHR","inspected","seekingTransport","inspect","prefilterOrFactory","dataTypeOrTransport","ajaxExtend","flatOptions","ajaxSettings","active","lastModified","etag","url","isLocal","protocol","processData","async","contentType","accepts","json","responseFields","converters","* text","text html","text json","text xml","ajaxSetup","settings","ajaxPrefilter","ajaxTransport","ajax","transport","cacheURL","responseHeadersString","responseHeaders","timeoutTimer","urlAnchor","fireGlobals","uncached","callbackContext","globalEventContext","completeDeferred","statusCode","requestHeaders","requestHeadersNames","strAbort","getResponseHeader","getAllResponseHeaders","setRequestHeader","overrideMimeType","mimeType","status","abort","statusText","finalText","crossDomain","host","hasContent","ifModified","headers","beforeSend","success","send","nativeStatusText","responses","isSuccess","response","modified","ct","finalDataType","firstDataType","ajaxHandleResponses","conv2","current","conv","dataFilter","throws","ajaxConvert","getJSON","getScript","text script","wrapAll","firstElementChild","wrapInner","htmlIsFunction","unwrap","visible","xhr","XMLHttpRequest","xhrSuccessStatus","0","1223","xhrSupported","cors","errorCallback","open","username","xhrFields","onload","onerror","onabort","ontimeout","onreadystatechange","responseType","responseText","binary","scriptAttrs","charset","scriptCharset","evt","oldCallbacks","rjsonp","jsonp","jsonpCallback","originalSettings","callbackName","overwritten","responseContainer","jsonProp","createHTMLDocument","implementation","keepScripts","parsed","params","animated","offset","setOffset","curPosition","curLeft","curCSSTop","curTop","curOffset","curCSSLeft","curElem","using","rect","win","pageYOffset","pageXOffset","offsetParent","parentOffset","scrollTo","Height","Width","","defaultExtra","funcName","unbind","delegate","undelegate","hover","fnOver","fnOut","rtrim","proxy","holdReady","hold","parseJSON","isNumeric","isNaN","trim","define","amd","_jQuery","_$","$","noConflict"],"mappings":";CAUA,SAAYA,EAAQC,GAEnB,aAEuB,iBAAXC,QAAiD,iBAAnBA,OAAOC,QAShDD,OAAOC,QAAUH,EAAOI,SACvBH,EAASD,GAAQ,GACjB,SAAUK,GACT,IAAMA,EAAED,SACP,MAAM,IAAIE,MAAO,4CAElB,OAAOL,EAASI,IAGlBJ,EAASD,GAtBX,CA0BuB,oBAAXO,OAAyBA,OAASC,KAAM,SAAUD,GAAQE,GAMtE,aAEA,IAAIC,GAAM,GAENC,EAAWC,OAAOC,eAElBC,GAAQJ,GAAII,MAEZC,EAAOL,GAAIK,KAAO,SAAUC,GAC/B,OAAON,GAAIK,KAAKE,KAAMD,IACnB,SAAUA,GACb,OAAON,GAAIQ,OAAOC,MAAO,GAAIH,IAI1BI,EAAOV,GAAIU,KAEXC,GAAUX,GAAIW,QAEdC,EAAa,GAEbC,EAAWD,EAAWC,SAEtBC,GAASF,EAAWG,eAEpBC,EAAaF,GAAOD,SAEpBI,EAAuBD,EAAWT,KAAML,QAExCgB,GAAU,GAEVC,EAAa,SAAqBC,GASpC,MAAsB,mBAARA,GAA8C,iBAAjBA,EAAIC,UAC1B,mBAAbD,EAAIE,MAIVC,EAAW,SAAmBH,GAChC,OAAc,MAAPA,GAAeA,IAAQA,EAAIvB,QAIhCH,EAAWG,GAAOH,SAIjB8B,EAA4B,CAC/BC,MAAM,EACNC,KAAK,EACLC,OAAO,EACPC,UAAU,GAGX,SAASC,EAASC,EAAMC,EAAMC,GAG7B,IAAIC,EAAGC,EACNC,GAHDH,EAAMA,GAAOtC,GAGC0C,cAAe,UAG7B,GADAD,EAAOE,KAAOP,EACTC,EACJ,IAAME,KAAKT,GAYVU,EAAMH,EAAME,IAAOF,EAAKO,cAAgBP,EAAKO,aAAcL,KAE1DE,EAAOI,aAAcN,EAAGC,GAI3BF,EAAIQ,KAAKC,YAAaN,GAASO,WAAWC,YAAaR,GAIzD,SAASS,EAAQxB,GAChB,OAAY,MAAPA,EACGA,EAAM,GAIQ,iBAARA,GAAmC,mBAARA,EACxCR,EAAYC,EAASN,KAAMa,KAAW,gBAC/BA,EAQT,IAAIyB,EAAU,QAEbC,EAAc,SAGdC,GAAS,SAAUC,EAAUC,GAI5B,OAAO,IAAIF,GAAOG,GAAGC,KAAMH,EAAUC,IAmYvC,SAASG,EAAahC,GAMrB,IAAIiC,IAAWjC,GAAO,WAAYA,GAAOA,EAAIiC,OAC5C5B,EAAOmB,EAAQxB,GAEhB,OAAKD,EAAYC,KAASG,EAAUH,KAIpB,UAATK,GAA+B,IAAX4B,GACR,iBAAXA,GAAgC,EAATA,GAAgBA,EAAS,KAAOjC,GAIhE,SAASkC,GAAUC,EAAMC,GAExB,OAAOD,EAAKD,UAAYC,EAAKD,SAASG,gBAAkBD,EAAKC,cApZ9DV,GAAOG,GAAKH,GAAOW,UAAY,CAG9BC,OAAQd,EAERe,YAAab,GAGbM,OAAQ,EAERQ,QAAS,WACR,OAAOzD,GAAMG,KAAMT,OAKpBgE,IAAK,SAAUC,GAGd,OAAY,MAAPA,EACG3D,GAAMG,KAAMT,MAIbiE,EAAM,EAAIjE,KAAMiE,EAAMjE,KAAKuD,QAAWvD,KAAMiE,IAKpDC,UAAW,SAAUC,GAGpB,IAAIC,EAAMnB,GAAOoB,MAAOrE,KAAK8D,cAAeK,GAM5C,OAHAC,EAAIE,WAAatE,KAGVoE,GAIRG,KAAM,SAAUC,GACf,OAAOvB,GAAOsB,KAAMvE,KAAMwE,IAG3BC,IAAK,SAAUD,GACd,OAAOxE,KAAKkE,UAAWjB,GAAOwB,IAAKzE,KAAM,SAAUyD,EAAMtB,GACxD,OAAOqC,EAAS/D,KAAMgD,EAAMtB,EAAGsB,OAIjCnD,MAAO,WACN,OAAON,KAAKkE,UAAW5D,GAAMK,MAAOX,KAAM0E,aAG3CC,MAAO,WACN,OAAO3E,KAAK4E,GAAI,IAGjBC,KAAM,WACL,OAAO7E,KAAK4E,IAAK,IAGlBE,KAAM,WACL,OAAO9E,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAASA,EAAI,GAAM,MAIrB8C,IAAK,WACJ,OAAOjF,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAAOA,EAAI,MAIbyC,GAAI,SAAUzC,GACb,IAAI+C,EAAMlF,KAAKuD,OACd4B,GAAKhD,GAAMA,EAAI,EAAI+C,EAAM,GAC1B,OAAOlF,KAAKkE,UAAgB,GAALiB,GAAUA,EAAID,EAAM,CAAElF,KAAMmF,IAAQ,KAG5DC,IAAK,WACJ,OAAOpF,KAAKsE,YAActE,KAAK8D,eAKhClD,KAAMA,EACNyE,KAAMnF,GAAImF,KACVC,OAAQpF,GAAIoF,QAGbrC,GAAOsC,OAAStC,GAAOG,GAAGmC,OAAS,WAClC,IAAIC,EAAS9B,EAAM9B,EAAK6D,EAAMC,EAAaC,EAC1CC,EAASlB,UAAW,IAAO,GAC3BvC,EAAI,EACJoB,EAASmB,UAAUnB,OACnBsC,GAAO,EAsBR,IAnBuB,kBAAXD,IACXC,EAAOD,EAGPA,EAASlB,UAAWvC,IAAO,GAC3BA,KAIsB,iBAAXyD,GAAwBvE,EAAYuE,KAC/CA,EAAS,IAILzD,IAAMoB,IACVqC,EAAS5F,KACTmC,KAGOA,EAAIoB,EAAQpB,IAGnB,GAAqC,OAA9BqD,EAAUd,UAAWvC,IAG3B,IAAMuB,KAAQ8B,EACbC,EAAOD,EAAS9B,GAIF,cAATA,GAAwBkC,IAAWH,IAKnCI,GAAQJ,IAAUxC,GAAO6C,cAAeL,KAC1CC,EAAcK,MAAMC,QAASP,MAC/B7D,EAAMgE,EAAQlC,GAIbiC,EADID,IAAgBK,MAAMC,QAASpE,GAC3B,GACI8D,GAAgBzC,GAAO6C,cAAelE,GAG1CA,EAFA,GAIT8D,GAAc,EAGdE,EAAQlC,GAAST,GAAOsC,OAAQM,EAAMF,EAAOF,SAGzBQ,IAATR,IACXG,EAAQlC,GAAS+B,IAOrB,OAAOG,GAGR3C,GAAOsC,OAAQ,CAGdW,QAAS,UAAanD,EAAUoD,KAAKC,UAAWC,QAAS,MAAO,IAGhEC,SAAS,EAETC,MAAO,SAAUC,GAChB,MAAM,IAAI1G,MAAO0G,IAGlBC,KAAM,aAENX,cAAe,SAAUxE,GACxB,IAAIoF,EAAOC,EAIX,SAAMrF,GAAgC,oBAAzBP,EAASN,KAAMa,QAI5BoF,EAAQvG,EAAUmB,KASK,mBADvBqF,EAAO3F,GAAOP,KAAMiG,EAAO,gBAAmBA,EAAM5C,cACf5C,EAAWT,KAAMkG,KAAWxF,IAGlEyF,cAAe,SAAUtF,GACxB,IAAIoC,EAEJ,IAAMA,KAAQpC,EACb,OAAO,EAER,OAAO,GAKRuF,WAAY,SAAU7E,EAAMwD,EAAStD,GACpCH,EAASC,EAAM,CAAEH,MAAO2D,GAAWA,EAAQ3D,OAASK,IAGrDqC,KAAM,SAAUjD,EAAKkD,GACpB,IAAIjB,EAAQpB,EAAI,EAEhB,GAAKmB,EAAahC,IAEjB,IADAiC,EAASjC,EAAIiC,OACLpB,EAAIoB,EAAQpB,IACnB,IAAgD,IAA3CqC,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,WAIF,IAAMA,KAAKb,EACV,IAAgD,IAA3CkD,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,MAKH,OAAOb,GAKRiB,KAAM,SAAUkB,GACf,IAAIxB,EACHmC,EAAM,GACNjC,EAAI,EACJZ,EAAWkC,EAAKlC,SAEjB,IAAMA,EAGL,MAAUU,EAAOwB,EAAMtB,KAGtBiC,GAAOnB,GAAOV,KAAMN,GAGtB,OAAkB,IAAbV,GAA+B,KAAbA,EACfkC,EAAKqD,YAEK,IAAbvF,EACGkC,EAAKsD,gBAAgBD,YAEX,IAAbvF,GAA+B,IAAbA,EACfkC,EAAKuD,UAKN5C,GAIR6C,UAAW,SAAU/G,EAAKgH,GACzB,IAAI9C,EAAM8C,GAAW,GAarB,OAXY,MAAPhH,IACCoD,EAAalD,OAAQF,IACzB+C,GAAOoB,MAAOD,EACE,iBAARlE,EACN,CAAEA,GAAQA,GAGZU,EAAKH,KAAM2D,EAAKlE,IAIXkE,GAGR+C,QAAS,SAAU1D,EAAMvD,EAAKiC,GAC7B,OAAc,MAAPjC,GAAe,EAAIW,GAAQJ,KAAMP,EAAKuD,EAAMtB,IAGpDiF,SAAU,SAAU3D,GACnB,IAAI4D,EAAY5D,GAAQA,EAAK6D,aAC5BC,EAAU9D,IAAUA,EAAK+D,eAAiB/D,GAAOsD,gBAIlD,OAAQ/D,EAAYyE,KAAMJ,GAAaE,GAAWA,EAAQ/D,UAAY,SAKvEa,MAAO,SAAUM,EAAO+C,GAKvB,IAJA,IAAIxC,GAAOwC,EAAOnE,OACjB4B,EAAI,EACJhD,EAAIwC,EAAMpB,OAEH4B,EAAID,EAAKC,IAChBR,EAAOxC,KAAQuF,EAAQvC,GAKxB,OAFAR,EAAMpB,OAASpB,EAERwC,GAGRI,KAAM,SAAUZ,EAAOK,EAAUmD,GAShC,IARA,IACCC,EAAU,GACVzF,EAAI,EACJoB,EAASY,EAAMZ,OACfsE,GAAkBF,EAIXxF,EAAIoB,EAAQpB,KACAqC,EAAUL,EAAOhC,GAAKA,KAChB0F,GACxBD,EAAQhH,KAAMuD,EAAOhC,IAIvB,OAAOyF,GAIRnD,IAAK,SAAUN,EAAOK,EAAUsD,GAC/B,IAAIvE,EAAQwE,EACX5F,EAAI,EACJiC,EAAM,GAGP,GAAKd,EAAaa,GAEjB,IADAZ,EAASY,EAAMZ,OACPpB,EAAIoB,EAAQpB,IAGL,OAFd4F,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,QAMZ,IAAM5F,KAAKgC,EAGI,OAFd4D,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,GAMb,OAAOxH,EAAM6D,IAId4D,KAAM,EAIN5G,QAASA,KAGa,mBAAX6G,SACXhF,GAAOG,GAAI6E,OAAOC,UAAahI,GAAK+H,OAAOC,WAI5CjF,GAAOsB,KAAM,uEAAuE4D,MAAO,KAC1F,SAAUC,EAAI1E,GACb5C,EAAY,WAAa4C,EAAO,KAAQA,EAAKC,gBA0B/C,IAAI0E,GAAMnI,GAAImI,IAGVhD,GAAOnF,GAAImF,KAGXC,GAASpF,GAAIoF,OAGbgD,GAAa,sBAGbC,GAAW,IAAIC,OAClB,IAAMF,GAAa,8BAAgCA,GAAa,KAChE,KAODrF,GAAOwF,SAAW,SAAUC,EAAGC,GAC9B,IAAIC,EAAMD,GAAKA,EAAE/F,WAEjB,OAAO8F,IAAME,MAAWA,GAAwB,IAAjBA,EAAIrH,YAIlCmH,EAAED,SACDC,EAAED,SAAUG,GACZF,EAAEG,yBAA8D,GAAnCH,EAAEG,wBAAyBD,MAS3D,IAAIE,EAAa,+CAEjB,SAASC,EAAYC,EAAIC,GACxB,OAAKA,EAGQ,OAAPD,EACG,SAIDA,EAAG1I,MAAO,GAAI,GAAM,KAAO0I,EAAGE,WAAYF,EAAGzF,OAAS,GAAIxC,SAAU,IAAO,IAI5E,KAAOiI,EAGf/F,GAAOkG,eAAiB,SAAUC,GACjC,OAASA,EAAM,IAAK/C,QAASyC,EAAYC,IAM1C,IAAIM,GAAezJ,EAClB0J,GAAa1I,GAEd,WAEA,IAAIuB,EACHoH,EACAC,EACAC,EACAC,EAIA9J,EACAmH,EACA4C,EACAC,EACAhC,EAPAhH,EAAO0I,GAUPpD,EAAUjD,GAAOiD,QACjB2D,EAAU,EACVC,EAAO,EACPC,EAAaC,IACbC,EAAaD,IACbE,EAAgBF,IAChBG,EAAyBH,IACzBI,EAAY,SAAU1B,EAAGC,GAIxB,OAHKD,IAAMC,IACVe,GAAe,GAET,GAGRW,EAAW,6HAMXC,EAAa,0BAA4BhC,GACxC,0CAGDiC,EAAa,MAAQjC,GAAa,KAAOgC,EAAa,OAAShC,GAG9D,gBAAkBA,GAGlB,2DAA6DgC,EAAa,OAC1EhC,GAAa,OAEdkC,EAAU,KAAOF,EAAa,wFAOAC,EAAa,eAO3CE,EAAc,IAAIjC,OAAQF,GAAa,IAAK,KAE5CoC,EAAS,IAAIlC,OAAQ,IAAMF,GAAa,KAAOA,GAAa,KAC5DqC,EAAqB,IAAInC,OAAQ,IAAMF,GAAa,WAAaA,GAAa,IAC7EA,GAAa,KACdsC,EAAW,IAAIpC,OAAQF,GAAa,MAEpCuC,EAAU,IAAIrC,OAAQgC,GACtBM,EAAc,IAAItC,OAAQ,IAAM8B,EAAa,KAE7CS,EAAY,CACXC,GAAI,IAAIxC,OAAQ,MAAQ8B,EAAa,KACrCW,MAAO,IAAIzC,OAAQ,QAAU8B,EAAa,KAC1CY,IAAK,IAAI1C,OAAQ,KAAO8B,EAAa,SACrCa,KAAM,IAAI3C,OAAQ,IAAM+B,GACxBa,OAAQ,IAAI5C,OAAQ,IAAMgC,GAC1Ba,MAAO,IAAI7C,OACV,yDACCF,GAAa,+BAAiCA,GAAa,cAC3DA,GAAa,aAAeA,GAAa,SAAU,KACrDgD,KAAM,IAAI9C,OAAQ,OAAS6B,EAAW,KAAM,KAI5CkB,aAAc,IAAI/C,OAAQ,IAAMF,GAC/B,mDAAqDA,GACrD,mBAAqBA,GAAa,mBAAoB,MAGxDkD,EAAU,sCACVC,EAAU,SAGVC,EAAa,mCAEbC,EAAW,OAIXC,EAAY,IAAIpD,OAAQ,uBAAyBF,GAChD,uBAAwB,KACzBuD,EAAY,SAAUC,EAAQC,GAC7B,IAAIC,EAAO,KAAOF,EAAOxL,MAAO,GAAM,MAEtC,OAAKyL,IAUEC,EAAO,EACbC,OAAOC,aAAcF,EAAO,OAC5BC,OAAOC,aAAcF,GAAQ,GAAK,MAAe,KAAPA,EAAe,SAO3DG,EAAgB,WACfC,KAGDC,EAAqBC,EACpB,SAAU7I,GACT,OAAyB,IAAlBA,EAAK8I,UAAqB/I,GAAUC,EAAM,aAElD,CAAE+I,IAAK,aAAcC,KAAM,WAa7B,IACC7L,EAAKD,MACFT,GAAMI,GAAMG,KAAM4I,GAAaqD,YACjCrD,GAAaqD,YAMdxM,GAAKmJ,GAAaqD,WAAWnJ,QAAShC,SACrC,MAAQoL,GACT/L,EAAO,CACND,MAAO,SAAUiF,EAAQgH,GACxBtD,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMmM,KAEvCnM,KAAM,SAAUmF,GACf0D,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMiE,UAAW,MAKpD,SAASmI,EAAM3J,EAAUC,EAAS+D,EAAS4F,GAC1C,IAAIC,EAAG5K,EAAGsB,EAAMuJ,EAAKC,EAAOC,EAAQC,EACnCC,EAAajK,GAAWA,EAAQqE,cAGhCjG,EAAW4B,EAAUA,EAAQ5B,SAAW,EAKzC,GAHA2F,EAAUA,GAAW,GAGI,iBAAbhE,IAA0BA,GACxB,IAAb3B,GAA+B,IAAbA,GAA+B,KAAbA,EAEpC,OAAO2F,EAIR,IAAM4F,IACLV,EAAajJ,GACbA,EAAUA,GAAWvD,EAEhB+J,GAAiB,CAIrB,GAAkB,KAAbpI,IAAqB0L,EAAQvB,EAAW2B,KAAMnK,IAGlD,GAAO6J,EAAIE,EAAO,IAGjB,GAAkB,IAAb1L,EAAiB,CACrB,KAAOkC,EAAON,EAAQmK,eAAgBP,IASrC,OAAO7F,EALP,GAAKzD,EAAK8J,KAAOR,EAEhB,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,OAWT,GAAKkG,IAAgB3J,EAAO2J,EAAWE,eAAgBP,KACtDF,EAAKpE,SAAUtF,EAASM,IACxBA,EAAK8J,KAAOR,EAGZ,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,MAKH,CAAA,GAAK+F,EAAO,GAElB,OADArM,EAAKD,MAAOuG,EAAS/D,EAAQqK,qBAAsBtK,IAC5CgE,EAGD,IAAO6F,EAAIE,EAAO,KAAS9J,EAAQsK,uBAEzC,OADA7M,EAAKD,MAAOuG,EAAS/D,EAAQsK,uBAAwBV,IAC9C7F,EAKT,KAAMiD,EAAwBjH,EAAW,MACrC0G,GAAcA,EAAUnC,KAAMvE,IAAe,CAYhD,GAVAiK,EAAcjK,EACdkK,EAAajK,EASK,IAAb5B,IACFqJ,EAASnD,KAAMvE,IAAcyH,EAAmBlD,KAAMvE,IAAe,EAGvEkK,EAAazB,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAC9DO,IAQkBA,GAAY/B,GAAQuM,SAG/BX,EAAM7J,EAAQX,aAAc,OAClCwK,EAAM/J,GAAOkG,eAAgB6D,GAE7B7J,EAAQV,aAAc,KAAQuK,EAAM9G,IAMtC/D,GADA+K,EAASU,EAAU1K,IACRK,OACX,MAAQpB,IACP+K,EAAQ/K,IAAQ6K,EAAM,IAAMA,EAAM,UAAa,IAC9Ca,EAAYX,EAAQ/K,IAEtBgL,EAAcD,EAAOY,KAAM,KAG5B,IAIC,OAHAlN,EAAKD,MAAOuG,EACXkG,EAAWW,iBAAkBZ,IAEvBjG,EACN,MAAQ8G,GACT7D,EAAwBjH,GAAU,GACjC,QACI8J,IAAQ9G,GACZ/C,EAAQ8K,gBAAiB,QAQ9B,OAAOC,GAAQhL,EAASmD,QAASkC,GAAU,MAAQpF,EAAS+D,EAAS4F,GAStE,SAAS9C,IACR,IAAImE,EAAO,GAaX,OAXA,SAASC,EAAOC,EAAKtG,GASpB,OALKoG,EAAKvN,KAAMyN,EAAM,KAAQ9E,EAAK+E,oBAG3BF,EAAOD,EAAKI,SAEXH,EAAOC,EAAM,KAAQtG,GAShC,SAASyG,EAAcpL,GAEtB,OADAA,EAAI8C,IAAY,EACT9C,EAOR,SAASqL,EAAQrL,GAChB,IAAIsL,EAAK9O,EAAS0C,cAAe,YAEjC,IACC,QAASc,EAAIsL,GACZ,MAAQ/B,GACT,OAAO,EACN,QAGI+B,EAAG9L,YACP8L,EAAG9L,WAAWC,YAAa6L,GAI5BA,EAAK,MAQP,SAASC,EAAmBhN,GAC3B,OAAO,SAAU8B,GAChB,OAAOD,GAAUC,EAAM,UAAaA,EAAK9B,OAASA,GAQpD,SAASiN,EAAoBjN,GAC5B,OAAO,SAAU8B,GAChB,OAASD,GAAUC,EAAM,UAAaD,GAAUC,EAAM,YACrDA,EAAK9B,OAASA,GAQjB,SAASkN,EAAsBtC,GAG9B,OAAO,SAAU9I,GAKhB,MAAK,SAAUA,EASTA,EAAKb,aAAgC,IAAlBa,EAAK8I,SAGvB,UAAW9I,EACV,UAAWA,EAAKb,WACba,EAAKb,WAAW2J,WAAaA,EAE7B9I,EAAK8I,WAAaA,EAMpB9I,EAAKqL,aAAevC,GAG1B9I,EAAKqL,cAAgBvC,GACpBF,EAAoB5I,KAAW8I,EAG3B9I,EAAK8I,WAAaA,EAKd,UAAW9I,GACfA,EAAK8I,WAAaA,GAY5B,SAASwC,EAAwB3L,GAChC,OAAOoL,EAAc,SAAUQ,GAE9B,OADAA,GAAYA,EACLR,EAAc,SAAU1B,EAAMlF,GACpC,IAAIzC,EACH8J,EAAe7L,EAAI,GAAI0J,EAAKvJ,OAAQyL,GACpC7M,EAAI8M,EAAa1L,OAGlB,MAAQpB,IACF2K,EAAQ3H,EAAI8J,EAAc9M,MAC9B2K,EAAM3H,KAASyC,EAASzC,GAAM2H,EAAM3H,SAYzC,SAASuI,EAAavK,GACrB,OAAOA,GAAmD,oBAAjCA,EAAQqK,sBAAwCrK,EAQ1E,SAASiJ,EAAanK,GACrB,IAAIiN,EACHhN,EAAMD,EAAOA,EAAKuF,eAAiBvF,EAAOoH,GAO3C,OAAKnH,GAAOtC,GAA6B,IAAjBsC,EAAIX,UAAmBW,EAAI6E,kBAMnDA,GADAnH,EAAWsC,GACgB6E,gBAC3B4C,GAAkB1G,GAAOmE,SAAUxH,GAInCgI,EAAUb,EAAgBa,SACzBb,EAAgBoI,uBAChBpI,EAAgBqI,kBAOZrI,EAAgBqI,mBAMpB/F,IAAgBzJ,IACdsP,EAAYtP,EAASyP,cAAiBH,EAAUI,MAAQJ,GAG1DA,EAAUK,iBAAkB,SAAUpD,GAOvC/K,GAAQoO,QAAUf,EAAQ,SAAUC,GAEnC,OADA3H,EAAgBpE,YAAa+L,GAAKnB,GAAKtK,GAAOiD,SACtCtG,EAAS6P,oBACf7P,EAAS6P,kBAAmBxM,GAAOiD,SAAU3C,SAMhDnC,GAAQsO,kBAAoBjB,EAAQ,SAAUC,GAC7C,OAAO9G,EAAQnH,KAAMiO,EAAI,OAK1BtN,GAAQuM,MAAQc,EAAQ,WACvB,OAAO7O,EAASmO,iBAAkB,YAYnC3M,GAAQuO,OAASlB,EAAQ,WACxB,IAEC,OADA7O,EAASgQ,cAAe,oBACjB,EACN,MAAQjD,GACT,OAAO,KAKJvL,GAAQoO,SACZjG,EAAKsG,OAAO7E,GAAK,SAAUuC,GAC1B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,OAAOA,EAAKjB,aAAc,QAAWsN,IAGvCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAIlG,EAAON,EAAQmK,eAAgBC,GACnC,OAAO9J,EAAO,CAAEA,GAAS,OAI3B8F,EAAKsG,OAAO7E,GAAM,SAAUuC,GAC3B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,IAAIxB,EAAwC,oBAA1BwB,EAAKsM,kBACtBtM,EAAKsM,iBAAkB,MACxB,OAAO9N,GAAQA,EAAK8F,QAAU+H,IAMhCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAI1H,EAAME,EAAGgC,EACZV,EAAON,EAAQmK,eAAgBC,GAEhC,GAAK9J,EAAO,CAIX,IADAxB,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAIVU,EAAQhB,EAAQsM,kBAAmBlC,GACnCpL,EAAI,EACJ,MAAUsB,EAAOU,EAAOhC,KAEvB,IADAF,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAKZ,MAAO,MAMV8F,EAAKsD,KAAK3B,IAAM,SAAU8E,EAAK7M,GAC9B,MAA6C,oBAAjCA,EAAQqK,qBACZrK,EAAQqK,qBAAsBwC,GAI9B7M,EAAQ4K,iBAAkBiC,IAKnCzG,EAAKsD,KAAK5B,MAAQ,SAAUgF,EAAW9M,GACtC,GAA+C,oBAAnCA,EAAQsK,wBAA0C9D,EAC7D,OAAOxG,EAAQsK,uBAAwBwC,IASzCrG,EAAY,GAIZ6E,EAAQ,SAAUC,GAEjB,IAAIwB,EAEJnJ,EAAgBpE,YAAa+L,GAAKyB,UACjC,UAAYjK,EAAU,iDACLA,EAAU,oEAKtBwI,EAAGX,iBAAkB,cAAexK,QACzCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,aAAe+B,EAAW,KAI1DqE,EAAGX,iBAAkB,QAAU7H,EAAU,MAAO3C,QACrDqG,EAAUhJ,KAAM,MAMX8N,EAAGX,iBAAkB,KAAO7H,EAAU,MAAO3C,QAClDqG,EAAUhJ,KAAM,YAOX8N,EAAGX,iBAAkB,YAAaxK,QACvCqG,EAAUhJ,KAAM,aAKjBsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,UAC5BiM,EAAG/L,YAAauN,GAAQzN,aAAc,OAAQ,KAQ9CsE,EAAgBpE,YAAa+L,GAAKnC,UAAW,EACM,IAA9CmC,EAAGX,iBAAkB,aAAcxK,QACvCqG,EAAUhJ,KAAM,WAAY,cAQ7BsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,IAC5BiM,EAAG/L,YAAauN,GACVxB,EAAGX,iBAAkB,aAAcxK,QACxCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,QAAUA,GAAa,KAC3DA,GAAa,kBAIVlH,GAAQuO,QAQb/F,EAAUhJ,KAAM,QAGjBgJ,EAAYA,EAAUrG,QAAU,IAAIiF,OAAQoB,EAAUkE,KAAM,MAM5D1D,EAAY,SAAU1B,EAAGC,GAGxB,GAAKD,IAAMC,EAEV,OADAe,GAAe,EACR,EAIR,IAAI0G,GAAW1H,EAAEG,yBAA2BF,EAAEE,wBAC9C,OAAKuH,IAgBU,GAPfA,GAAY1H,EAAElB,eAAiBkB,KAASC,EAAEnB,eAAiBmB,GAC1DD,EAAEG,wBAAyBF,GAG3B,KAIGvH,GAAQiP,cAAgB1H,EAAEE,wBAAyBH,KAAQ0H,EAOzD1H,IAAM9I,GAAY8I,EAAElB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcX,IACrB,EAOJC,IAAM/I,GAAY+I,EAAEnB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcV,GACtB,EAIDc,EACJ5I,GAAQJ,KAAMgJ,EAAWf,GAAM7H,GAAQJ,KAAMgJ,EAAWd,GAC1D,EAGe,EAAVyH,GAAe,EAAI,KAGpBxQ,EAqpBR,IAAMuC,KAlpBN0K,EAAKjF,QAAU,SAAU0I,EAAMC,GAC9B,OAAO1D,EAAMyD,EAAM,KAAM,KAAMC,IAGhC1D,EAAK2D,gBAAkB,SAAU/M,EAAM6M,GAGtC,GAFAlE,EAAa3I,GAERkG,IACHQ,EAAwBmG,EAAO,QAC7B1G,IAAcA,EAAUnC,KAAM6I,IAEjC,IACC,IAAIlM,EAAMwD,EAAQnH,KAAMgD,EAAM6M,GAG9B,GAAKlM,GAAOhD,GAAQsO,mBAIlBjM,EAAK7D,UAAuC,KAA3B6D,EAAK7D,SAAS2B,SAChC,OAAO6C,EAEP,MAAQuI,GACTxC,EAAwBmG,GAAM,GAIhC,OAAuD,EAAhDzD,EAAMyD,EAAM1Q,EAAU,KAAM,CAAE6D,IAASF,QAG/CsJ,EAAKpE,SAAW,SAAUtF,EAASM,GAUlC,OAHON,EAAQqE,eAAiBrE,IAAavD,GAC5CwM,EAAajJ,GAEPF,GAAOwF,SAAUtF,EAASM,IAIlCoJ,EAAK4D,KAAO,SAAUhN,EAAMC,IAOpBD,EAAK+D,eAAiB/D,IAAU7D,GACtCwM,EAAa3I,GAGd,IAAIL,EAAKmG,EAAKmH,WAAYhN,EAAKC,eAG9BvB,EAAMgB,GAAMpC,GAAOP,KAAM8I,EAAKmH,WAAYhN,EAAKC,eAC9CP,EAAIK,EAAMC,GAAOiG,QACjB1D,EAEF,YAAaA,IAAR7D,EACGA,EAGDqB,EAAKjB,aAAckB,IAG3BmJ,EAAKtG,MAAQ,SAAUC,GACtB,MAAM,IAAI1G,MAAO,0CAA4C0G,IAO9DvD,GAAO0N,WAAa,SAAUzJ,GAC7B,IAAIzD,EACHmN,EAAa,GACbzL,EAAI,EACJhD,EAAI,EAWL,GAJAuH,GAAgBtI,GAAQyP,WACxBpH,GAAarI,GAAQyP,YAAcvQ,GAAMG,KAAMyG,EAAS,GACxD7B,GAAK5E,KAAMyG,EAASkD,GAEfV,EAAe,CACnB,MAAUjG,EAAOyD,EAAS/E,KACpBsB,IAASyD,EAAS/E,KACtBgD,EAAIyL,EAAWhQ,KAAMuB,IAGvB,MAAQgD,IACPG,GAAO7E,KAAMyG,EAAS0J,EAAYzL,GAAK,GAQzC,OAFAsE,EAAY,KAELvC,GAGRjE,GAAOG,GAAGuN,WAAa,WACtB,OAAO3Q,KAAKkE,UAAWjB,GAAO0N,WAAYrQ,GAAMK,MAAOX,UAGxDuJ,EAAOtG,GAAOqN,KAAO,CAGpBhC,YAAa,GAEbwC,aAActC,EAEdvB,MAAOlC,EAEP2F,WAAY,GAEZ7D,KAAM,GAENkE,SAAU,CACTC,IAAK,CAAExE,IAAK,aAAc7H,OAAO,GACjCsM,IAAK,CAAEzE,IAAK,cACZ0E,IAAK,CAAE1E,IAAK,kBAAmB7H,OAAO,GACtCwM,IAAK,CAAE3E,IAAK,oBAGb4E,UAAW,CACVjG,KAAM,SAAU8B,GAWf,OAVAA,EAAO,GAAMA,EAAO,GAAI5G,QAASuF,EAAWC,GAG5CoB,EAAO,IAAQA,EAAO,IAAOA,EAAO,IAAOA,EAAO,IAAO,IACvD5G,QAASuF,EAAWC,GAEF,OAAfoB,EAAO,KACXA,EAAO,GAAM,IAAMA,EAAO,GAAM,KAG1BA,EAAM3M,MAAO,EAAG,IAGxB+K,MAAO,SAAU4B,GAkChB,OAtBAA,EAAO,GAAMA,EAAO,GAAItJ,cAEU,QAA7BsJ,EAAO,GAAI3M,MAAO,EAAG,IAGnB2M,EAAO,IACZJ,EAAKtG,MAAO0G,EAAO,IAKpBA,EAAO,KAASA,EAAO,GACtBA,EAAO,IAAQA,EAAO,IAAO,GAC7B,GAAqB,SAAfA,EAAO,IAAiC,QAAfA,EAAO,KAEvCA,EAAO,KAAWA,EAAO,GAAMA,EAAO,IAAwB,QAAfA,EAAO,KAG3CA,EAAO,IAClBJ,EAAKtG,MAAO0G,EAAO,IAGbA,GAGR7B,OAAQ,SAAU6B,GACjB,IAAIoE,EACHC,GAAYrE,EAAO,IAAOA,EAAO,GAElC,OAAKlC,EAAUM,MAAM5D,KAAMwF,EAAO,IAC1B,MAIHA,EAAO,GACXA,EAAO,GAAMA,EAAO,IAAOA,EAAO,IAAO,GAG9BqE,GAAYzG,EAAQpD,KAAM6J,KAGnCD,EAASzD,EAAU0D,GAAU,MAG7BD,EAASC,EAASzQ,QAAS,IAAKyQ,EAAS/N,OAAS8N,GAAWC,EAAS/N,UAGxE0J,EAAO,GAAMA,EAAO,GAAI3M,MAAO,EAAG+Q,GAClCpE,EAAO,GAAMqE,EAAShR,MAAO,EAAG+Q,IAI1BpE,EAAM3M,MAAO,EAAG,MAIzBuP,OAAQ,CAEP3E,IAAK,SAAUqG,GACd,IAAIC,EAAmBD,EAAiBlL,QAASuF,EAAWC,GAAYlI,cACxE,MAA4B,MAArB4N,EACN,WACC,OAAO,GAER,SAAU9N,GACT,OAAOD,GAAUC,EAAM+N,KAI1BvG,MAAO,SAAUgF,GAChB,IAAIwB,EAAU1H,EAAYkG,EAAY,KAEtC,OAAOwB,IACJA,EAAU,IAAIjJ,OAAQ,MAAQF,GAAa,IAAM2H,EAClD,IAAM3H,GAAa,SACpByB,EAAYkG,EAAW,SAAUxM,GAChC,OAAOgO,EAAQhK,KACY,iBAAnBhE,EAAKwM,WAA0BxM,EAAKwM,WACb,oBAAtBxM,EAAKjB,cACXiB,EAAKjB,aAAc,UACpB,OAKL2I,KAAM,SAAUzH,EAAMgO,EAAUC,GAC/B,OAAO,SAAUlO,GAChB,IAAImO,EAAS/E,EAAK4D,KAAMhN,EAAMC,GAE9B,OAAe,MAAVkO,EACgB,OAAbF,GAEFA,IAINE,GAAU,GAEQ,MAAbF,EACGE,IAAWD,EAED,OAAbD,EACGE,IAAWD,EAED,OAAbD,EACGC,GAAqC,IAA5BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,IAAoC,EAA3BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,GAASC,EAAOtR,OAAQqR,EAAMpO,UAAaoO,EAEjC,OAAbD,GAEkB,GADb,IAAME,EAAOvL,QAASoE,EAAa,KAAQ,KAClD5J,QAAS8Q,GAEM,OAAbD,IACGE,IAAWD,GAASC,EAAOtR,MAAO,EAAGqR,EAAMpO,OAAS,KAAQoO,EAAQ,QAO9EtG,MAAO,SAAU1J,EAAMkQ,EAAMC,EAAWnN,EAAOE,GAC9C,IAAIkN,EAAgC,QAAvBpQ,EAAKrB,MAAO,EAAG,GAC3B0R,EAA+B,SAArBrQ,EAAKrB,OAAQ,GACvB2R,EAAkB,YAATJ,EAEV,OAAiB,IAAVlN,GAAwB,IAATE,EAGrB,SAAUpB,GACT,QAASA,EAAKb,YAGf,SAAUa,EAAMyO,EAAUC,GACzB,IAAI/D,EAAOgE,EAAYnQ,EAAMoQ,EAAWC,EACvC9F,EAAMuF,IAAWC,EAAU,cAAgB,kBAC3CO,EAAS9O,EAAKb,WACdc,EAAOuO,GAAUxO,EAAKD,SAASG,cAC/B6O,GAAYL,IAAQF,EACpBQ,GAAO,EAER,GAAKF,EAAS,CAGb,GAAKR,EAAS,CACb,MAAQvF,EAAM,CACbvK,EAAOwB,EACP,MAAUxB,EAAOA,EAAMuK,GACtB,GAAKyF,EACJzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,SAEL,OAAO,EAKT+Q,EAAQ9F,EAAe,SAAT7K,IAAoB2Q,GAAS,cAE5C,OAAO,EAMR,GAHAA,EAAQ,CAAEN,EAAUO,EAAOG,WAAaH,EAAOI,WAG1CX,GAAWQ,EAAW,CAM1BC,GADAJ,GADAjE,GADAgE,EAAaG,EAAQrM,KAAeqM,EAAQrM,GAAY,KACpCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KACzBA,EAAO,GAC3BnM,EAAOoQ,GAAaE,EAAO7F,WAAY2F,GAEvC,MAAUpQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAG3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAGlC,GAAuB,IAAlBpG,EAAKV,YAAoBkR,GAAQxQ,IAASwB,EAAO,CACrD2O,EAAYzQ,GAAS,CAAEkI,EAASwI,EAAWI,GAC3C,YAgBF,GATKD,IAIJC,EADAJ,GADAjE,GADAgE,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,KAChCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KAMhC,IAATqE,EAGJ,MAAUxQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAC3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAElC,IAAO4J,EACNzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,aACHkR,IAGGD,KACJJ,EAAanQ,EAAMiE,KAChBjE,EAAMiE,GAAY,KACTvE,GAAS,CAAEkI,EAAS4I,IAG5BxQ,IAASwB,GACb,MASL,OADAgP,GAAQ5N,KACQF,GAAW8N,EAAO9N,GAAU,GAAqB,GAAhB8N,EAAO9N,KAK5DyG,OAAQ,SAAUwH,EAAQ5D,GAMzB,IAAI6D,EACHzP,EAAKmG,EAAKiB,QAASoI,IAAYrJ,EAAKuJ,WAAYF,EAAOjP,gBACtDkJ,EAAKtG,MAAO,uBAAyBqM,GAKvC,OAAKxP,EAAI8C,GACD9C,EAAI4L,GAIK,EAAZ5L,EAAGG,QACPsP,EAAO,CAAED,EAAQA,EAAQ,GAAI5D,GACtBzF,EAAKuJ,WAAW7R,eAAgB2R,EAAOjP,eAC7C6K,EAAc,SAAU1B,EAAMlF,GAC7B,IAAImL,EACHC,EAAU5P,EAAI0J,EAAMkC,GACpB7M,EAAI6Q,EAAQzP,OACb,MAAQpB,IAEP2K,EADAiG,EAAMlS,GAAQJ,KAAMqM,EAAMkG,EAAS7Q,OAClByF,EAASmL,GAAQC,EAAS7Q,MAG7C,SAAUsB,GACT,OAAOL,EAAIK,EAAM,EAAGoP,KAIhBzP,IAIToH,QAAS,CAGRyI,IAAKzE,EAAc,SAAUtL,GAK5B,IAAIgN,EAAQ,GACXhJ,EAAU,GACVgM,EAAUC,GAASjQ,EAASmD,QAASkC,GAAU,OAEhD,OAAO2K,EAAShN,GACfsI,EAAc,SAAU1B,EAAMlF,EAASsK,EAAUC,GAChD,IAAI1O,EACH2P,EAAYF,EAASpG,EAAM,KAAMqF,EAAK,IACtChQ,EAAI2K,EAAKvJ,OAGV,MAAQpB,KACAsB,EAAO2P,EAAWjR,MACxB2K,EAAM3K,KAASyF,EAASzF,GAAMsB,MAIjC,SAAUA,EAAMyO,EAAUC,GAOzB,OANAjC,EAAO,GAAMzM,EACbyP,EAAShD,EAAO,KAAMiC,EAAKjL,GAI3BgJ,EAAO,GAAM,MACLhJ,EAAQmB,SAInBgL,IAAK7E,EAAc,SAAUtL,GAC5B,OAAO,SAAUO,GAChB,OAAuC,EAAhCoJ,EAAM3J,EAAUO,GAAOF,UAIhCkF,SAAU+F,EAAc,SAAUjM,GAEjC,OADAA,EAAOA,EAAK8D,QAASuF,EAAWC,GACzB,SAAUpI,GAChB,OAAsE,GAA7DA,EAAKqD,aAAe7D,GAAOV,KAAMkB,IAAS5C,QAAS0B,MAW9D+Q,KAAM9E,EAAc,SAAU8E,GAO7B,OAJMxI,EAAYrD,KAAM6L,GAAQ,KAC/BzG,EAAKtG,MAAO,qBAAuB+M,GAEpCA,EAAOA,EAAKjN,QAASuF,EAAWC,GAAYlI,cACrC,SAAUF,GAChB,IAAI8P,EACJ,GACC,GAAOA,EAAW5J,EACjBlG,EAAK6P,KACL7P,EAAKjB,aAAc,aAAgBiB,EAAKjB,aAAc,QAGtD,OADA+Q,EAAWA,EAAS5P,iBACA2P,GAA2C,IAAnCC,EAAS1S,QAASyS,EAAO,YAE3C7P,EAAOA,EAAKb,aAAkC,IAAlBa,EAAKlC,UAC7C,OAAO,KAKTqE,OAAQ,SAAUnC,GACjB,IAAI+P,EAAOzT,GAAO0T,UAAY1T,GAAO0T,SAASD,KAC9C,OAAOA,GAAQA,EAAKlT,MAAO,KAAQmD,EAAK8J,IAGzCmG,KAAM,SAAUjQ,GACf,OAAOA,IAASsD,GAGjB4M,MAAO,SAAUlQ,GAChB,OAAOA,IA5oCV,WACC,IACC,OAAO7D,EAASgU,cACf,MAAQC,KAyoCQC,IACflU,EAASmU,eACLtQ,EAAK9B,MAAQ8B,EAAKuQ,OAASvQ,EAAKwQ,WAItCC,QAASrF,GAAsB,GAC/BtC,SAAUsC,GAAsB,GAEhCsF,QAAS,SAAU1Q,GAIlB,OAASD,GAAUC,EAAM,YAAeA,EAAK0Q,SAC1C3Q,GAAUC,EAAM,aAAgBA,EAAK2Q,UAGzCA,SAAU,SAAU3Q,GAWnB,OALKA,EAAKb,YAETa,EAAKb,WAAWyR,eAGQ,IAAlB5Q,EAAK2Q,UAIbE,MAAO,SAAU7Q,GAMhB,IAAMA,EAAOA,EAAKiP,WAAYjP,EAAMA,EAAOA,EAAK8Q,YAC/C,GAAK9Q,EAAKlC,SAAW,EACpB,OAAO,EAGT,OAAO,GAGRgR,OAAQ,SAAU9O,GACjB,OAAQ8F,EAAKiB,QAAQ8J,MAAO7Q,IAI7B+Q,OAAQ,SAAU/Q,GACjB,OAAOgI,EAAQhE,KAAMhE,EAAKD,WAG3B0M,MAAO,SAAUzM,GAChB,OAAO+H,EAAQ/D,KAAMhE,EAAKD,WAG3BiR,OAAQ,SAAUhR,GACjB,OAAOD,GAAUC,EAAM,UAA2B,WAAdA,EAAK9B,MACxC6B,GAAUC,EAAM,WAGlBlB,KAAM,SAAUkB,GACf,IAAIgN,EACJ,OAAOjN,GAAUC,EAAM,UAA2B,SAAdA,EAAK9B,OAKI,OAAxC8O,EAAOhN,EAAKjB,aAAc,UACN,SAAvBiO,EAAK9M,gBAIRgB,MAAOoK,EAAwB,WAC9B,MAAO,CAAE,KAGVlK,KAAMkK,EAAwB,SAAU2F,EAAenR,GACtD,MAAO,CAAEA,EAAS,KAGnBqB,GAAImK,EAAwB,SAAU2F,EAAenR,EAAQyL,GAC5D,MAAO,CAAEA,EAAW,EAAIA,EAAWzL,EAASyL,KAG7ClK,KAAMiK,EAAwB,SAAUE,EAAc1L,GAErD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGRhK,IAAK8J,EAAwB,SAAUE,EAAc1L,GAEpD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR0F,GAAI5F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAC3D,IAAI7M,EAUJ,IAPCA,EADI6M,EAAW,EACXA,EAAWzL,EACOA,EAAXyL,EACPzL,EAEAyL,EAGU,KAAL7M,GACT8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR2F,GAAI7F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAE3D,IADA,IAAI7M,EAAI6M,EAAW,EAAIA,EAAWzL,EAASyL,IACjC7M,EAAIoB,GACb0L,EAAarO,KAAMuB,GAEpB,OAAO8M,OAKLzE,QAAQqK,IAAMtL,EAAKiB,QAAQ5F,GAGrB,CAAEkQ,OAAO,EAAMC,UAAU,EAAMC,MAAM,EAAMC,UAAU,EAAMC,OAAO,GAC5E3L,EAAKiB,QAASrI,GAAMwM,EAAmBxM,GAExC,IAAMA,IAAK,CAAEgT,QAAQ,EAAMC,OAAO,GACjC7L,EAAKiB,QAASrI,GAAMyM,EAAoBzM,GAIzC,SAAS2Q,KAIT,SAASlF,EAAU1K,EAAUmS,GAC5B,IAAIrC,EAAS/F,EAAOqI,EAAQ3T,EAC3B4T,EAAOrI,EAAQsI,EACfC,EAASxL,EAAY/G,EAAW,KAEjC,GAAKuS,EACJ,OAAOJ,EAAY,EAAII,EAAOnV,MAAO,GAGtCiV,EAAQrS,EACRgK,EAAS,GACTsI,EAAajM,EAAK6H,UAElB,MAAQmE,EAAQ,CA2Bf,IAAM5T,KAxBAqR,KAAa/F,EAAQvC,EAAO2C,KAAMkI,MAClCtI,IAGJsI,EAAQA,EAAMjV,MAAO2M,EAAO,GAAI1J,SAAYgS,GAE7CrI,EAAOtM,KAAQ0U,EAAS,KAGzBtC,GAAU,GAGH/F,EAAQtC,EAAmB0C,KAAMkI,MACvCvC,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EAGPrR,KAAMsL,EAAO,GAAI5G,QAASkC,GAAU,OAErCgN,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAIhBgG,EAAKsG,SACX5C,EAAQlC,EAAWpJ,GAAO0L,KAAMkI,KAAgBC,EAAY7T,MAChEsL,EAAQuI,EAAY7T,GAAQsL,MAC9B+F,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EACPrR,KAAMA,EACNiG,QAASqF,IAEVsI,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAI/B,IAAMyP,EACL,MAOF,OAAKqC,EACGE,EAAMhS,OAGPgS,EACN1I,EAAKtG,MAAOrD,GAGZ+G,EAAY/G,EAAUgK,GAAS5M,MAAO,GAGxC,SAASuN,EAAYyH,GAIpB,IAHA,IAAInT,EAAI,EACP+C,EAAMoQ,EAAO/R,OACbL,EAAW,GACJf,EAAI+C,EAAK/C,IAChBe,GAAYoS,EAAQnT,GAAI4F,MAEzB,OAAO7E,EAGR,SAASoJ,EAAe4G,EAASwC,EAAYC,GAC5C,IAAInJ,EAAMkJ,EAAWlJ,IACpBoJ,EAAOF,EAAWjJ,KAClB4B,EAAMuH,GAAQpJ,EACdqJ,EAAmBF,GAAgB,eAARtH,EAC3ByH,EAAWhM,IAEZ,OAAO4L,EAAW/Q,MAGjB,SAAUlB,EAAMN,EAASgP,GACxB,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAC3B,OAAO3C,EAASzP,EAAMN,EAASgP,GAGjC,OAAO,GAIR,SAAU1O,EAAMN,EAASgP,GACxB,IAAI4D,EAAU3D,EACb4D,EAAW,CAAEnM,EAASiM,GAGvB,GAAK3D,GACJ,MAAU1O,EAAOA,EAAM+I,GACtB,IAAuB,IAAlB/I,EAAKlC,UAAkBsU,IACtB3C,EAASzP,EAAMN,EAASgP,GAC5B,OAAO,OAKV,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAG3B,GAFAzD,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,IAE/C0P,GAAQpS,GAAUC,EAAMmS,GAC5BnS,EAAOA,EAAM+I,IAAS/I,MAChB,CAAA,IAAOsS,EAAW3D,EAAY/D,KACpC0H,EAAU,KAAQlM,GAAWkM,EAAU,KAAQD,EAG/C,OAASE,EAAU,GAAMD,EAAU,GAOnC,IAHA3D,EAAY/D,GAAQ2H,GAGH,GAAM9C,EAASzP,EAAMN,EAASgP,GAC9C,OAAO,EAMZ,OAAO,GAIV,SAAS8D,EAAgBC,GACxB,OAAyB,EAAlBA,EAAS3S,OACf,SAAUE,EAAMN,EAASgP,GACxB,IAAIhQ,EAAI+T,EAAS3S,OACjB,MAAQpB,IACP,IAAM+T,EAAU/T,GAAKsB,EAAMN,EAASgP,GACnC,OAAO,EAGT,OAAO,GAER+D,EAAU,GAYZ,SAASC,EAAU/C,EAAW3O,EAAKoL,EAAQ1M,EAASgP,GAOnD,IANA,IAAI1O,EACH2S,EAAe,GACfjU,EAAI,EACJ+C,EAAMkO,EAAU7P,OAChB8S,EAAgB,MAAP5R,EAEFtC,EAAI+C,EAAK/C,KACTsB,EAAO2P,EAAWjR,MAClB0N,IAAUA,EAAQpM,EAAMN,EAASgP,KACtCiE,EAAaxV,KAAM6C,GACd4S,GACJ5R,EAAI7D,KAAMuB,KAMd,OAAOiU,EAGR,SAASE,GAAYlF,EAAWlO,EAAUgQ,EAASqD,EAAYC,EAAYC,GAO1E,OANKF,IAAeA,EAAYrQ,KAC/BqQ,EAAaD,GAAYC,IAErBC,IAAeA,EAAYtQ,KAC/BsQ,EAAaF,GAAYE,EAAYC,IAE/BjI,EAAc,SAAU1B,EAAM5F,EAAS/D,EAASgP,GACtD,IAAIuE,EAAMvU,EAAGsB,EAAMkT,EAClBC,EAAS,GACTC,EAAU,GACVC,EAAc5P,EAAQ3D,OAGtBY,EAAQ2I,GA5CX,SAA2B5J,EAAU6T,EAAU7P,GAG9C,IAFA,IAAI/E,EAAI,EACP+C,EAAM6R,EAASxT,OACRpB,EAAI+C,EAAK/C,IAChB0K,EAAM3J,EAAU6T,EAAU5U,GAAK+E,GAEhC,OAAOA,EAuCJ8P,CAAkB9T,GAAY,IAC7BC,EAAQ5B,SAAW,CAAE4B,GAAYA,EAAS,IAG5C8T,GAAY7F,IAAetE,GAAS5J,EAEnCiB,EADAgS,EAAUhS,EAAOyS,EAAQxF,EAAWjO,EAASgP,GAsB/C,GAnBKe,EAaJA,EAAS+D,EATTN,EAAaH,IAAgB1J,EAAOsE,EAAY0F,GAAeP,GAG9D,GAGArP,EAG+B/D,EAASgP,GAEzCwE,EAAaM,EAITV,EAAa,CACjBG,EAAOP,EAAUQ,EAAYE,GAC7BN,EAAYG,EAAM,GAAIvT,EAASgP,GAG/BhQ,EAAIuU,EAAKnT,OACT,MAAQpB,KACAsB,EAAOiT,EAAMvU,MACnBwU,EAAYE,EAAS1U,MAAW8U,EAAWJ,EAAS1U,IAAQsB,IAK/D,GAAKqJ,GACJ,GAAK0J,GAAcpF,EAAY,CAC9B,GAAKoF,EAAa,CAGjBE,EAAO,GACPvU,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,KAGzBuU,EAAK9V,KAAQqW,EAAW9U,GAAMsB,GAGhC+S,EAAY,KAAQG,EAAa,GAAMD,EAAMvE,GAI9ChQ,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,MAC2C,GAAlEuU,EAAOF,EAAa3V,GAAQJ,KAAMqM,EAAMrJ,GAASmT,EAAQzU,MAE3D2K,EAAM4J,KAAYxP,EAASwP,GAASjT,UAOvCkT,EAAaR,EACZQ,IAAezP,EACdyP,EAAWrR,OAAQwR,EAAaH,EAAWpT,QAC3CoT,GAEGH,EACJA,EAAY,KAAMtP,EAASyP,EAAYxE,GAEvCvR,EAAKD,MAAOuG,EAASyP,KAMzB,SAASO,GAAmB5B,GA+B3B,IA9BA,IAAI6B,EAAcjE,EAAS/N,EAC1BD,EAAMoQ,EAAO/R,OACb6T,EAAkB7N,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAC7C0V,EAAmBD,GAAmB7N,EAAKwH,SAAU,KACrD5O,EAAIiV,EAAkB,EAAI,EAG1BE,EAAehL,EAAe,SAAU7I,GACvC,OAAOA,IAAS0T,GACdE,GAAkB,GACrBE,EAAkBjL,EAAe,SAAU7I,GAC1C,OAA6C,EAAtC5C,GAAQJ,KAAM0W,EAAc1T,IACjC4T,GAAkB,GACrBnB,EAAW,CAAE,SAAUzS,EAAMN,EAASgP,GAMrC,IAAI/N,GAASgT,IAAqBjF,GAAOhP,GAAWqG,MACjD2N,EAAehU,GAAU5B,SAC1B+V,EAAc7T,EAAMN,EAASgP,GAC7BoF,EAAiB9T,EAAMN,EAASgP,IAKlC,OADAgF,EAAe,KACR/S,IAGDjC,EAAI+C,EAAK/C,IAChB,GAAO+Q,EAAU3J,EAAKwH,SAAUuE,EAAQnT,GAAIR,MAC3CuU,EAAW,CAAE5J,EAAe2J,EAAgBC,GAAYhD,QAClD,CAIN,IAHAA,EAAU3J,EAAKsG,OAAQyF,EAAQnT,GAAIR,MAAOhB,MAAO,KAAM2U,EAAQnT,GAAIyF,UAGrD1B,GAAY,CAIzB,IADAf,IAAMhD,EACEgD,EAAID,EAAKC,IAChB,GAAKoE,EAAKwH,SAAUuE,EAAQnQ,GAAIxD,MAC/B,MAGF,OAAO2U,GACF,EAAJnU,GAAS8T,EAAgBC,GACrB,EAAJ/T,GAAS0L,EAGRyH,EAAOhV,MAAO,EAAG6B,EAAI,GACnBzB,OAAQ,CAAEqH,MAAgC,MAAzBuN,EAAQnT,EAAI,GAAIR,KAAe,IAAM,MACvD0E,QAASkC,GAAU,MACrB2K,EACA/Q,EAAIgD,GAAK+R,GAAmB5B,EAAOhV,MAAO6B,EAAGgD,IAC7CA,EAAID,GAAOgS,GAAqB5B,EAASA,EAAOhV,MAAO6E,IACvDA,EAAID,GAAO2I,EAAYyH,IAGzBY,EAAStV,KAAMsS,GAIjB,OAAO+C,EAAgBC,GAiIxB,SAAS/C,GAASjQ,EAAU+J,GAC3B,IAAI9K,EA/H8BqV,EAAiBC,EAC/CC,EACHC,EACAC,EA6HAH,EAAc,GACdD,EAAkB,GAClB/B,EAASvL,EAAehH,EAAW,KAEpC,IAAMuS,EAAS,CAGRxI,IACLA,EAAQW,EAAU1K,IAEnBf,EAAI8K,EAAM1J,OACV,MAAQpB,KACPsT,EAASyB,GAAmBjK,EAAO9K,KACtB+D,GACZuR,EAAY7W,KAAM6U,GAElB+B,EAAgB5W,KAAM6U,IAKxBA,EAASvL,EAAehH,GArJSsU,EAsJNA,EArJxBE,EAA6B,GADkBD,EAsJNA,GArJrBlU,OACvBoU,EAAqC,EAAzBH,EAAgBjU,OAC5BqU,EAAe,SAAU9K,EAAM3J,EAASgP,EAAKjL,EAAS2Q,GACrD,IAAIpU,EAAM0B,EAAG+N,EACZ4E,EAAe,EACf3V,EAAI,IACJiR,EAAYtG,GAAQ,GACpBiL,EAAa,GACbC,EAAgBxO,EAGhBrF,EAAQ2I,GAAQ6K,GAAapO,EAAKsD,KAAK3B,IAAK,IAAK2M,GAGjDI,EAAkBpO,GAA4B,MAAjBmO,EAAwB,EAAI7R,KAAKC,UAAY,GAC1ElB,EAAMf,EAAMZ,OAeb,IAbKsU,IAMJrO,EAAmBrG,GAAWvD,GAAYuD,GAAW0U,GAO9C1V,IAAM+C,GAAgC,OAAvBzB,EAAOU,EAAOhC,IAAeA,IAAM,CACzD,GAAKwV,GAAalU,EAAO,CACxB0B,EAAI,EAMEhC,GAAWM,EAAK+D,eAAiB5H,IACtCwM,EAAa3I,GACb0O,GAAOxI,GAER,MAAUuJ,EAAUsE,EAAiBrS,KACpC,GAAK+N,EAASzP,EAAMN,GAAWvD,EAAUuS,GAAQ,CAChDvR,EAAKH,KAAMyG,EAASzD,GACpB,MAGGoU,IACJhO,EAAUoO,GAKPP,KAGGjU,GAAQyP,GAAWzP,IACzBqU,IAIIhL,GACJsG,EAAUxS,KAAM6C,IAgBnB,GATAqU,GAAgB3V,EASXuV,GAASvV,IAAM2V,EAAe,CAClC3S,EAAI,EACJ,MAAU+N,EAAUuE,EAAatS,KAChC+N,EAASE,EAAW2E,EAAY5U,EAASgP,GAG1C,GAAKrF,EAAO,CAGX,GAAoB,EAAfgL,EACJ,MAAQ3V,IACCiR,EAAWjR,IAAO4V,EAAY5V,KACrC4V,EAAY5V,GAAMkG,GAAI5H,KAAMyG,IAM/B6Q,EAAa5B,EAAU4B,GAIxBnX,EAAKD,MAAOuG,EAAS6Q,GAGhBF,IAAc/K,GAA4B,EAApBiL,EAAWxU,QACG,EAAtCuU,EAAeL,EAAYlU,QAE7BN,GAAO0N,WAAYzJ,GAUrB,OALK2Q,IACJhO,EAAUoO,EACVzO,EAAmBwO,GAGb5E,GAGFsE,EACNlJ,EAAcoJ,GACdA,KA8BO1U,SAAWA,EAEnB,OAAOuS,EAYR,SAASvH,GAAQhL,EAAUC,EAAS+D,EAAS4F,GAC5C,IAAI3K,EAAGmT,EAAQ4C,EAAOvW,EAAMkL,EAC3BsL,EAA+B,mBAAbjV,GAA2BA,EAC7C+J,GAASH,GAAQc,EAAY1K,EAAWiV,EAASjV,UAAYA,GAM9D,GAJAgE,EAAUA,GAAW,GAIC,IAAjB+F,EAAM1J,OAAe,CAIzB,GAAqB,GADrB+R,EAASrI,EAAO,GAAMA,EAAO,GAAI3M,MAAO,IAC5BiD,QAA+C,QAA/B2U,EAAQ5C,EAAQ,IAAM3T,MAC3B,IAArBwB,EAAQ5B,UAAkBoI,GAAkBJ,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAAS,CAMjF,KAJAwB,GAAYoG,EAAKsD,KAAK7B,GACrBkN,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvC1I,IACI,IAAM,IAEV,OAAO+D,EAGIiR,IACXhV,EAAUA,EAAQP,YAGnBM,EAAWA,EAAS5C,MAAOgV,EAAO/G,QAAQxG,MAAMxE,QAIjDpB,EAAI4I,EAAUQ,aAAa9D,KAAMvE,GAAa,EAAIoS,EAAO/R,OACzD,MAAQpB,IAAM,CAIb,GAHA+V,EAAQ5C,EAAQnT,GAGXoH,EAAKwH,SAAYpP,EAAOuW,EAAMvW,MAClC,MAED,IAAOkL,EAAOtD,EAAKsD,KAAMlL,MAGjBmL,EAAOD,EACbqL,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvCF,EAASlE,KAAM6N,EAAQ,GAAI3T,OAC1B+L,EAAavK,EAAQP,aAAgBO,IACjC,CAKL,GAFAmS,EAAOhQ,OAAQnD,EAAG,KAClBe,EAAW4J,EAAKvJ,QAAUsK,EAAYyH,IAGrC,OADA1U,EAAKD,MAAOuG,EAAS4F,GACd5F,EAGR,QAeJ,OAPEiR,GAAYhF,GAASjQ,EAAU+J,IAChCH,EACA3J,GACCwG,EACDzC,GACC/D,GAAWwI,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAAgBO,GAExE+D,EArlBR4L,EAAWlP,UAAY2F,EAAK6O,QAAU7O,EAAKiB,QAC3CjB,EAAKuJ,WAAa,IAAIA,EA2lBtB1R,GAAQyP,WAAa3K,EAAQiC,MAAO,IAAK9C,KAAM+E,GAAY0D,KAAM,MAAS5H,EAG1EkG,IAIAhL,GAAQiP,aAAe5B,EAAQ,SAAUC,GAGxC,OAA4E,EAArEA,EAAG7F,wBAAyBjJ,EAAS0C,cAAe,eAG5DW,GAAO4J,KAAOA,EAGd5J,GAAOqN,KAAM,KAAQrN,GAAOqN,KAAK9F,QACjCvH,GAAOoV,OAASpV,GAAO0N,WAIvB9D,EAAKsG,QAAUA,GACftG,EAAKqB,OAASA,GACdrB,EAAKT,YAAcA,EACnBS,EAAKe,SAAWA,EAEhBf,EAAKf,OAAS7I,GAAOkG,eACrB0D,EAAKyL,QAAUrV,GAAOV,KACtBsK,EAAK0L,MAAQtV,GAAOmE,SACpByF,EAAK2L,UAAYvV,GAAOqN,KACxBzD,EAAKzL,QAAU6B,GAAO7B,QACtByL,EAAK8D,WAAa1N,GAAO0N,WAniEzB,GA0iEA,IAAInE,EAAM,SAAU/I,EAAM+I,EAAKiM,GAC9B,IAAIzF,EAAU,GACb0F,OAAqBzS,IAAVwS,EAEZ,OAAUhV,EAAOA,EAAM+I,KAA6B,IAAlB/I,EAAKlC,SACtC,GAAuB,IAAlBkC,EAAKlC,SAAiB,CAC1B,GAAKmX,GAAYzV,GAAQQ,GAAOkV,GAAIF,GACnC,MAEDzF,EAAQpS,KAAM6C,GAGhB,OAAOuP,GAIJ4F,EAAW,SAAUC,EAAGpV,GAG3B,IAFA,IAAIuP,EAAU,GAEN6F,EAAGA,EAAIA,EAAEtE,YACI,IAAfsE,EAAEtX,UAAkBsX,IAAMpV,GAC9BuP,EAAQpS,KAAMiY,GAIhB,OAAO7F,GAIJ8F,EAAgB7V,GAAOqN,KAAKrD,MAAM1B,aAElCwN,EAAa,kEAKjB,SAASC,EAAQzI,EAAU0I,EAAWhG,GACrC,OAAK5R,EAAY4X,GACThW,GAAO8B,KAAMwL,EAAU,SAAU9M,EAAMtB,GAC7C,QAAS8W,EAAUxY,KAAMgD,EAAMtB,EAAGsB,KAAWwP,IAK1CgG,EAAU1X,SACP0B,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAASA,IAASwV,IAAgBhG,IAKV,iBAAdgG,EACJhW,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAA4C,EAAnC5C,GAAQJ,KAAMwY,EAAWxV,KAAkBwP,IAK/ChQ,GAAO4M,OAAQoJ,EAAW1I,EAAU0C,GAG5ChQ,GAAO4M,OAAS,SAAUS,EAAMnM,EAAO8O,GACtC,IAAIxP,EAAOU,EAAO,GAMlB,OAJK8O,IACJ3C,EAAO,QAAUA,EAAO,KAGH,IAAjBnM,EAAMZ,QAAkC,IAAlBE,EAAKlC,SACxB0B,GAAO4J,KAAK2D,gBAAiB/M,EAAM6M,GAAS,CAAE7M,GAAS,GAGxDR,GAAO4J,KAAKjF,QAAS0I,EAAMrN,GAAO8B,KAAMZ,EAAO,SAAUV,GAC/D,OAAyB,IAAlBA,EAAKlC,aAId0B,GAAOG,GAAGmC,OAAQ,CACjBsH,KAAM,SAAU3J,GACf,IAAIf,EAAGiC,EACNc,EAAMlF,KAAKuD,OACX2V,EAAOlZ,KAER,GAAyB,iBAAbkD,EACX,OAAOlD,KAAKkE,UAAWjB,GAAQC,GAAW2M,OAAQ,WACjD,IAAM1N,EAAI,EAAGA,EAAI+C,EAAK/C,IACrB,GAAKc,GAAOwF,SAAUyQ,EAAM/W,GAAKnC,MAChC,OAAO,KAQX,IAFAoE,EAAMpE,KAAKkE,UAAW,IAEhB/B,EAAI,EAAGA,EAAI+C,EAAK/C,IACrBc,GAAO4J,KAAM3J,EAAUgW,EAAM/W,GAAKiC,GAGnC,OAAa,EAANc,EAAUjC,GAAO0N,WAAYvM,GAAQA,GAE7CyL,OAAQ,SAAU3M,GACjB,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtD+P,IAAK,SAAU/P,GACd,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtDyV,GAAI,SAAUzV,GACb,QAAS8V,EACRhZ,KAIoB,iBAAbkD,GAAyB4V,EAAcrR,KAAMvE,GACnDD,GAAQC,GACRA,GAAY,IACb,GACCK,UASJ,IAAI4V,EAMHzN,EAAa,uCAENzI,GAAOG,GAAGC,KAAO,SAAUH,EAAUC,EAASuQ,GACpD,IAAIzG,EAAOxJ,EAGX,IAAMP,EACL,OAAOlD,KAQR,GAHA0T,EAAOA,GAAQyF,EAGU,iBAAbjW,EAAwB,CAanC,KAPC+J,EALsB,MAAlB/J,EAAU,IACsB,MAApCA,EAAUA,EAASK,OAAS,IACT,GAAnBL,EAASK,OAGD,CAAE,KAAML,EAAU,MAGlBwI,EAAW2B,KAAMnK,MAIV+J,EAAO,IAAQ9J,EA6CxB,OAAMA,GAAWA,EAAQU,QACtBV,GAAWuQ,GAAO7G,KAAM3J,GAK1BlD,KAAK8D,YAAaX,GAAU0J,KAAM3J,GAhDzC,GAAK+J,EAAO,GAAM,CAYjB,GAXA9J,EAAUA,aAAmBF,GAASE,EAAS,GAAMA,EAIrDF,GAAOoB,MAAOrE,KAAMiD,GAAOmW,UAC1BnM,EAAO,GACP9J,GAAWA,EAAQ5B,SAAW4B,EAAQqE,eAAiBrE,EAAUvD,GACjE,IAIImZ,EAAWtR,KAAMwF,EAAO,KAAShK,GAAO6C,cAAe3C,GAC3D,IAAM8J,KAAS9J,EAGT9B,EAAYrB,KAAMiN,IACtBjN,KAAMiN,GAAS9J,EAAS8J,IAIxBjN,KAAKyQ,KAAMxD,EAAO9J,EAAS8J,IAK9B,OAAOjN,KAYP,OARAyD,EAAO7D,EAAS0N,eAAgBL,EAAO,OAKtCjN,KAAM,GAAMyD,EACZzD,KAAKuD,OAAS,GAERvD,KAcH,OAAKkD,EAAS3B,UACpBvB,KAAM,GAAMkD,EACZlD,KAAKuD,OAAS,EACPvD,MAIIqB,EAAY6B,QACD+C,IAAfyN,EAAK2F,MACX3F,EAAK2F,MAAOnW,GAGZA,EAAUD,IAGLA,GAAOgE,UAAW/D,EAAUlD,QAIhC4D,UAAYX,GAAOG,GAGxB+V,EAAalW,GAAQrD,GAGrB,IAAI0Z,EAAe,iCAGlBC,EAAmB,CAClBC,UAAU,EACVC,UAAU,EACVhN,MAAM,EACNiN,MAAM,GAoFR,SAASC,EAASC,EAAKpN,GACtB,OAAUoN,EAAMA,EAAKpN,KAA4B,IAAjBoN,EAAIrY,UACpC,OAAOqY,EAnFR3W,GAAOG,GAAGmC,OAAQ,CACjB8N,IAAK,SAAUzN,GACd,IAAIiU,EAAU5W,GAAQ2C,EAAQ5F,MAC7B8Z,EAAID,EAAQtW,OAEb,OAAOvD,KAAK6P,OAAQ,WAEnB,IADA,IAAI1N,EAAI,EACAA,EAAI2X,EAAG3X,IACd,GAAKc,GAAOwF,SAAUzI,KAAM6Z,EAAS1X,IACpC,OAAO,KAMX4X,QAAS,SAAUvB,EAAWrV,GAC7B,IAAIyW,EACHzX,EAAI,EACJ2X,EAAI9Z,KAAKuD,OACTyP,EAAU,GACV6G,EAA+B,iBAAdrB,GAA0BvV,GAAQuV,GAGpD,IAAMM,EAAcrR,KAAM+Q,GACzB,KAAQrW,EAAI2X,EAAG3X,IACd,IAAMyX,EAAM5Z,KAAMmC,GAAKyX,GAAOA,IAAQzW,EAASyW,EAAMA,EAAIhX,WAGxD,GAAKgX,EAAIrY,SAAW,KAAQsY,GACH,EAAxBA,EAAQG,MAAOJ,GAGE,IAAjBA,EAAIrY,UACH0B,GAAO4J,KAAK2D,gBAAiBoJ,EAAKpB,IAAgB,CAEnDxF,EAAQpS,KAAMgZ,GACd,MAMJ,OAAO5Z,KAAKkE,UAA4B,EAAjB8O,EAAQzP,OAAaN,GAAO0N,WAAYqC,GAAYA,IAI5EgH,MAAO,SAAUvW,GAGhB,OAAMA,EAKe,iBAATA,EACJ5C,GAAQJ,KAAMwC,GAAQQ,GAAQzD,KAAM,IAIrCa,GAAQJ,KAAMT,KAGpByD,EAAKI,OAASJ,EAAM,GAAMA,GAZjBzD,KAAM,IAAOA,KAAM,GAAI4C,WAAe5C,KAAK2E,QAAQsV,UAAU1W,QAAU,GAgBlF2W,IAAK,SAAUhX,EAAUC,GACxB,OAAOnD,KAAKkE,UACXjB,GAAO0N,WACN1N,GAAOoB,MAAOrE,KAAKgE,MAAOf,GAAQC,EAAUC,OAK/CgX,QAAS,SAAUjX,GAClB,OAAOlD,KAAKka,IAAiB,MAAZhX,EAChBlD,KAAKsE,WAAatE,KAAKsE,WAAWuL,OAAQ3M,OAU7CD,GAAOsB,KAAM,CACZgO,OAAQ,SAAU9O,GACjB,IAAI8O,EAAS9O,EAAKb,WAClB,OAAO2P,GAA8B,KAApBA,EAAOhR,SAAkBgR,EAAS,MAEpD6H,QAAS,SAAU3W,GAClB,OAAO+I,EAAK/I,EAAM,eAEnB4W,aAAc,SAAU5W,EAAM2E,EAAIqQ,GACjC,OAAOjM,EAAK/I,EAAM,aAAcgV,IAEjChM,KAAM,SAAUhJ,GACf,OAAOkW,EAASlW,EAAM,gBAEvBiW,KAAM,SAAUjW,GACf,OAAOkW,EAASlW,EAAM,oBAEvB6W,QAAS,SAAU7W,GAClB,OAAO+I,EAAK/I,EAAM,gBAEnBwW,QAAS,SAAUxW,GAClB,OAAO+I,EAAK/I,EAAM,oBAEnB8W,UAAW,SAAU9W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,cAAegV,IAElC+B,UAAW,SAAU/W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,kBAAmBgV,IAEtCG,SAAU,SAAUnV,GACnB,OAAOmV,GAAYnV,EAAKb,YAAc,IAAK8P,WAAYjP,IAExD+V,SAAU,SAAU/V,GACnB,OAAOmV,EAAUnV,EAAKiP,aAEvB+G,SAAU,SAAUhW,GACnB,OAA6B,MAAxBA,EAAKgX,iBAKTta,EAAUsD,EAAKgX,iBAERhX,EAAKgX,iBAMRjX,GAAUC,EAAM,cACpBA,EAAOA,EAAKiX,SAAWjX,GAGjBR,GAAOoB,MAAO,GAAIZ,EAAKiJ,eAE7B,SAAUhJ,EAAMN,GAClBH,GAAOG,GAAIM,GAAS,SAAU+U,EAAOvV,GACpC,IAAI8P,EAAU/P,GAAOwB,IAAKzE,KAAMoD,EAAIqV,GAuBpC,MArB0B,UAArB/U,EAAKpD,OAAQ,KACjB4C,EAAWuV,GAGPvV,GAAgC,iBAAbA,IACvB8P,EAAU/P,GAAO4M,OAAQ3M,EAAU8P,IAGjB,EAAdhT,KAAKuD,SAGHgW,EAAkB7V,IACvBT,GAAO0N,WAAYqC,GAIfsG,EAAa7R,KAAM/D,IACvBsP,EAAQ2H,WAIH3a,KAAKkE,UAAW8O,MAGzB,IAAI4H,EAAgB,oBAsOpB,SAASC,EAAUC,GAClB,OAAOA,EAER,SAASC,EAASC,GACjB,MAAMA,EAGP,SAASC,EAAYlT,EAAOmT,EAASC,EAAQC,GAC5C,IAAIC,EAEJ,IAGMtT,GAAS1G,EAAcga,EAAStT,EAAMuT,SAC1CD,EAAO5a,KAAMsH,GAAQ+B,KAAMoR,GAAUK,KAAMJ,GAGhCpT,GAAS1G,EAAcga,EAAStT,EAAMyT,MACjDH,EAAO5a,KAAMsH,EAAOmT,EAASC,GAQ7BD,EAAQva,WAAOsF,EAAW,CAAE8B,GAAQzH,MAAO8a,IAM3C,MAAQrT,GAIToT,EAAOxa,WAAOsF,EAAW,CAAE8B,KAvO7B9E,GAAOwY,UAAY,SAAUjW,GA9B7B,IAAwBA,EACnBkW,EAiCJlW,EAA6B,iBAAZA,GAlCMA,EAmCPA,EAlCZkW,EAAS,GACbzY,GAAOsB,KAAMiB,EAAQyH,MAAO2N,IAAmB,GAAI,SAAUe,EAAGC,GAC/DF,EAAQE,IAAS,IAEXF,GA+BNzY,GAAOsC,OAAQ,GAAIC,GAEpB,IACCqW,EAGAC,EAGAC,EAGAC,EAGAC,EAAO,GAGPC,EAAQ,GAGRC,GAAe,EAGfC,EAAO,WAQN,IALAJ,EAASA,GAAUxW,EAAQ6W,KAI3BN,EAAQF,GAAS,EACTK,EAAM3Y,OAAQ4Y,GAAe,EAAI,CACxCL,EAASI,EAAM3N,QACf,QAAU4N,EAAcF,EAAK1Y,QAGmC,IAA1D0Y,EAAME,GAAcxb,MAAOmb,EAAQ,GAAKA,EAAQ,KACpDtW,EAAQ8W,cAGRH,EAAcF,EAAK1Y,OACnBuY,GAAS,GAMNtW,EAAQsW,SACbA,GAAS,GAGVD,GAAS,EAGJG,IAIHC,EADIH,EACG,GAIA,KAMV5C,EAAO,CAGNgB,IAAK,WA2BJ,OA1BK+B,IAGCH,IAAWD,IACfM,EAAcF,EAAK1Y,OAAS,EAC5B2Y,EAAMtb,KAAMkb,IAGb,SAAW5B,EAAKrH,GACf5P,GAAOsB,KAAMsO,EAAM,SAAU8I,EAAG7T,GAC1BzG,EAAYyG,GACVtC,EAAQ6S,QAAWa,EAAK7F,IAAKvL,IAClCmU,EAAKrb,KAAMkH,GAEDA,GAAOA,EAAIvE,QAA4B,WAAlBT,EAAQgF,IAGxCoS,EAAKpS,KATR,CAYKpD,WAEAoX,IAAWD,GACfO,KAGKpc,MAIRuc,OAAQ,WAYP,OAXAtZ,GAAOsB,KAAMG,UAAW,SAAUiX,EAAG7T,GACpC,IAAIkS,EACJ,OAA0D,GAAhDA,EAAQ/W,GAAOkE,QAASW,EAAKmU,EAAMjC,IAC5CiC,EAAK3W,OAAQ0U,EAAO,GAGfA,GAASmC,GACbA,MAIInc,MAKRqT,IAAK,SAAUjQ,GACd,OAAOA,GACwB,EAA9BH,GAAOkE,QAAS/D,EAAI6Y,GACN,EAAdA,EAAK1Y,QAIP+Q,MAAO,WAIN,OAHK2H,IACJA,EAAO,IAEDjc,MAMRwc,QAAS,WAGR,OAFAR,EAASE,EAAQ,GACjBD,EAAOH,EAAS,GACT9b,MAERuM,SAAU,WACT,OAAQ0P,GAMTQ,KAAM,WAKL,OAJAT,EAASE,EAAQ,GACXJ,GAAWD,IAChBI,EAAOH,EAAS,IAEV9b,MAERgc,OAAQ,WACP,QAASA,GAIVU,SAAU,SAAUvZ,EAAS0P,GAS5B,OARMmJ,IAELnJ,EAAO,CAAE1P,GADT0P,EAAOA,GAAQ,IACQvS,MAAQuS,EAAKvS,QAAUuS,GAC9CqJ,EAAMtb,KAAMiS,GACNgJ,GACLO,KAGKpc,MAIRoc,KAAM,WAEL,OADAlD,EAAKwD,SAAU1c,KAAM0E,WACd1E,MAIR+b,MAAO,WACN,QAASA,IAIZ,OAAO7C,GA4CRjW,GAAOsC,OAAQ,CAEdoX,SAAU,SAAUC,GACnB,IAAIC,EAAS,CAIX,CAAE,SAAU,WAAY5Z,GAAOwY,UAAW,UACzCxY,GAAOwY,UAAW,UAAY,GAC/B,CAAE,UAAW,OAAQxY,GAAOwY,UAAW,eACtCxY,GAAOwY,UAAW,eAAiB,EAAG,YACvC,CAAE,SAAU,OAAQxY,GAAOwY,UAAW,eACrCxY,GAAOwY,UAAW,eAAiB,EAAG,aAExCqB,EAAQ,UACRxB,EAAU,CACTwB,MAAO,WACN,OAAOA,GAERC,OAAQ,WAEP,OADAC,EAASlT,KAAMpF,WAAY6W,KAAM7W,WAC1B1E,MAERid,QAAS,SAAU7Z,GAClB,OAAOkY,EAAQE,KAAM,KAAMpY,IAI5B8Z,KAAM,WACL,IAAIC,EAAMzY,UAEV,OAAOzB,GAAO0Z,SAAU,SAAUS,GACjCna,GAAOsB,KAAMsY,EAAQ,SAAUzU,EAAIiV,GAGlC,IAAIja,EAAK/B,EAAY8b,EAAKE,EAAO,MAAWF,EAAKE,EAAO,IAKxDL,EAAUK,EAAO,IAAO,WACvB,IAAIC,EAAWla,GAAMA,EAAGzC,MAAOX,KAAM0E,WAChC4Y,GAAYjc,EAAYic,EAAShC,SACrCgC,EAAShC,UACPiC,SAAUH,EAASI,QACnB1T,KAAMsT,EAASlC,SACfK,KAAM6B,EAASjC,QAEjBiC,EAAUC,EAAO,GAAM,QACtBrd,KACAoD,EAAK,CAAEka,GAAa5Y,eAKxByY,EAAM,OACH7B,WAELE,KAAM,SAAUiC,EAAaC,EAAYC,GACxC,IAAIC,EAAW,EACf,SAAS1C,EAAS2C,EAAOb,EAAUc,EAASC,GAC3C,OAAO,WACN,IAAIC,EAAOhe,KACV6S,EAAOnO,UACPuZ,EAAa,WACZ,IAAIX,EAAU9B,EAKd,KAAKqC,EAAQD,GAAb,CAQA,IAJAN,EAAWQ,EAAQnd,MAAOqd,EAAMnL,MAIdmK,EAAS1B,UAC1B,MAAM,IAAI4C,UAAW,4BAOtB1C,EAAO8B,IAKgB,iBAAbA,GACY,mBAAbA,IACRA,EAAS9B,KAGLna,EAAYma,GAGXuC,EACJvC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,KAOvCH,IAEApC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,GACtC7C,EAAS0C,EAAUZ,EAAUnC,EAC5BmC,EAASmB,eASPL,IAAYjD,IAChBmD,OAAO/X,EACP4M,EAAO,CAAEyK,KAKRS,GAAWf,EAASoB,aAAeJ,EAAMnL,MAK7CwL,EAAUN,EACTE,EACA,WACC,IACCA,IACC,MAAQtR,GAEJ1J,GAAO0Z,SAAS2B,eACpBrb,GAAO0Z,SAAS2B,cAAe3R,EAC9B0R,EAAQ9X,OAMQqX,GAAbC,EAAQ,IAIPC,IAAY/C,IAChBiD,OAAO/X,EACP4M,EAAO,CAAElG,IAGVqQ,EAASuB,WAAYP,EAAMnL,MAS3BgL,EACJQ,KAKKpb,GAAO0Z,SAAS6B,aACpBH,EAAQ9X,MAAQtD,GAAO0Z,SAAS6B,eAMrBvb,GAAO0Z,SAAS8B,eAC3BJ,EAAQ9X,MAAQtD,GAAO0Z,SAAS8B,gBAEjC1e,GAAO2e,WAAYL,KAKtB,OAAOpb,GAAO0Z,SAAU,SAAUS,GAGjCP,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYsc,GACXA,EACA9C,EACDuC,EAASe,aAKXtB,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYoc,GACXA,EACA5C,IAKHgC,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYqc,GACXA,EACA3C,MAGAO,WAKLA,QAAS,SAAUha,GAClB,OAAc,MAAPA,EAAc2B,GAAOsC,OAAQjE,EAAKga,GAAYA,IAGvD0B,EAAW,GAkEZ,OA/DA/Z,GAAOsB,KAAMsY,EAAQ,SAAU1a,EAAGkb,GACjC,IAAIpB,EAAOoB,EAAO,GACjBsB,EAActB,EAAO,GAKtB/B,EAAS+B,EAAO,IAAQpB,EAAK/B,IAGxByE,GACJ1C,EAAK/B,IACJ,WAIC4C,EAAQ6B,GAKT9B,EAAQ,EAAI1a,GAAK,GAAIqa,QAIrBK,EAAQ,EAAI1a,GAAK,GAAIqa,QAGrBK,EAAQ,GAAK,GAAIJ,KAGjBI,EAAQ,GAAK,GAAIJ,MAOnBR,EAAK/B,IAAKmD,EAAO,GAAIjB,MAKrBY,EAAUK,EAAO,IAAQ,WAExB,OADAL,EAAUK,EAAO,GAAM,QAAUrd,OAASgd,OAAW/W,EAAYjG,KAAM0E,WAChE1E,MAMRgd,EAAUK,EAAO,GAAM,QAAWpB,EAAKS,WAIxCpB,EAAQA,QAAS0B,GAGZJ,GACJA,EAAKnc,KAAMuc,EAAUA,GAIfA,GAIR4B,KAAM,SAAUC,GACf,IAGCC,EAAYpa,UAAUnB,OAGtBpB,EAAI2c,EAGJC,EAAkBhZ,MAAO5D,GACzB6c,EAAgB1e,GAAMG,KAAMiE,WAG5Bua,EAAUhc,GAAO0Z,WAGjBuC,EAAa,SAAU/c,GACtB,OAAO,SAAU4F,GAChBgX,EAAiB5c,GAAMnC,KACvBgf,EAAe7c,GAAyB,EAAnBuC,UAAUnB,OAAajD,GAAMG,KAAMiE,WAAcqD,IAC5D+W,GACTG,EAAQb,YAAaW,EAAiBC,KAM1C,GAAKF,GAAa,IACjB7D,EAAY4D,EAAaI,EAAQnV,KAAMoV,EAAY/c,IAAM+Y,QAAS+D,EAAQ9D,QACxE2D,GAGuB,YAApBG,EAAQnC,SACZzb,EAAY2d,EAAe7c,IAAO6c,EAAe7c,GAAIqZ,OAErD,OAAOyD,EAAQzD,OAKjB,MAAQrZ,IACP8Y,EAAY+D,EAAe7c,GAAK+c,EAAY/c,GAAK8c,EAAQ9D,QAG1D,OAAO8D,EAAQ3D,aAOjB,IAAI6D,EAAc,yDAKlBlc,GAAO0Z,SAAS2B,cAAgB,SAAU/X,EAAO6Y,GAI3Crf,GAAOsf,SAAWtf,GAAOsf,QAAQC,MAAQ/Y,GAAS4Y,EAAY1X,KAAMlB,EAAM7C,OAC9E3D,GAAOsf,QAAQC,KAAM,8BAAgC/Y,EAAMgZ,QAC1DhZ,EAAMiZ,MAAOJ,IAOhBnc,GAAOwc,eAAiB,SAAUlZ,GACjCxG,GAAO2e,WAAY,WAClB,MAAMnY,KAQR,IAAImZ,EAAYzc,GAAO0Z,WAkDvB,SAASgD,IACR/f,EAASggB,oBAAqB,mBAAoBD,GAClD5f,GAAO6f,oBAAqB,OAAQD,GACpC1c,GAAOoW,QAnDRpW,GAAOG,GAAGiW,MAAQ,SAAUjW,GAY3B,OAVAsc,EACElE,KAAMpY,GAKN6Z,SAAO,SAAU1W,GACjBtD,GAAOwc,eAAgBlZ,KAGlBvG,MAGRiD,GAAOsC,OAAQ,CAGde,SAAS,EAITuZ,UAAW,EAGXxG,MAAO,SAAUyG,KAGF,IAATA,IAAkB7c,GAAO4c,UAAY5c,GAAOqD,WAKjDrD,GAAOqD,SAAU,KAGZwZ,GAAsC,IAAnB7c,GAAO4c,WAK/BH,EAAUtB,YAAaxe,EAAU,CAAEqD,QAIrCA,GAAOoW,MAAMmC,KAAOkE,EAAUlE,KAaD,aAAxB5b,EAASmgB,YACa,YAAxBngB,EAASmgB,aAA6BngB,EAASmH,gBAAgBiZ,SAGjEjgB,GAAO2e,WAAYzb,GAAOoW,QAK1BzZ,EAAS2P,iBAAkB,mBAAoBoQ,GAG/C5f,GAAOwP,iBAAkB,OAAQoQ,IAQlC,IAAIM,EAAS,SAAU9b,EAAOf,EAAIiL,EAAKtG,EAAOmY,EAAWC,EAAUC,GAClE,IAAIje,EAAI,EACP+C,EAAMf,EAAMZ,OACZ8c,EAAc,MAAPhS,EAGR,GAAuB,WAAlBvL,EAAQuL,GAEZ,IAAMlM,KADN+d,GAAY,EACD7R,EACV4R,EAAQ9b,EAAOf,EAAIjB,EAAGkM,EAAKlM,IAAK,EAAMge,EAAUC,QAI3C,QAAena,IAAV8B,IACXmY,GAAY,EAEN7e,EAAY0G,KACjBqY,GAAM,GAGFC,IAGCD,GACJhd,EAAG3C,KAAM0D,EAAO4D,GAChB3E,EAAK,OAILid,EAAOjd,EACPA,EAAK,SAAUK,EAAM6c,EAAMvY,GAC1B,OAAOsY,EAAK5f,KAAMwC,GAAQQ,GAAQsE,MAKhC3E,GACJ,KAAQjB,EAAI+C,EAAK/C,IAChBiB,EACCe,EAAOhC,GAAKkM,EAAK+R,EAChBrY,EACAA,EAAMtH,KAAM0D,EAAOhC,GAAKA,EAAGiB,EAAIe,EAAOhC,GAAKkM,KAMhD,OAAK6R,EACG/b,EAIHkc,EACGjd,EAAG3C,KAAM0D,GAGVe,EAAM9B,EAAIe,EAAO,GAAKkK,GAAQ8R,GAKlCI,EAAY,QACfC,EAAa,YAGd,SAASC,EAAYC,EAAMC,GAC1B,OAAOA,EAAOC,cAMf,SAASC,EAAWC,GACnB,OAAOA,EAAOza,QAASka,EAAW,OAAQla,QAASma,EAAYC,GAEhE,IAAIM,EAAa,SAAUC,GAQ1B,OAA0B,IAAnBA,EAAMzf,UAAqC,IAAnByf,EAAMzf,YAAsByf,EAAMzf,UAMlE,SAAS0f,IACRjhB,KAAKkG,QAAUjD,GAAOiD,QAAU+a,EAAKC,MAGtCD,EAAKC,IAAM,EAEXD,EAAKrd,UAAY,CAEhBwK,MAAO,SAAU4S,GAGhB,IAAIjZ,EAAQiZ,EAAOhhB,KAAKkG,SA4BxB,OAzBM6B,IACLA,EAAQ,GAKHgZ,EAAYC,KAIXA,EAAMzf,SACVyf,EAAOhhB,KAAKkG,SAAY6B,EAMxB3H,OAAO+gB,eAAgBH,EAAOhhB,KAAKkG,QAAS,CAC3C6B,MAAOA,EACPqZ,cAAc,MAMXrZ,GAERsZ,IAAK,SAAUL,EAAOM,EAAMvZ,GAC3B,IAAIwZ,EACHnT,EAAQpO,KAAKoO,MAAO4S,GAIrB,GAAqB,iBAATM,EACXlT,EAAOyS,EAAWS,IAAWvZ,OAM7B,IAAMwZ,KAAQD,EACblT,EAAOyS,EAAWU,IAAWD,EAAMC,GAGrC,OAAOnT,GAERpK,IAAK,SAAUgd,EAAO3S,GACrB,YAAepI,IAARoI,EACNrO,KAAKoO,MAAO4S,GAGZA,EAAOhhB,KAAKkG,UAAa8a,EAAOhhB,KAAKkG,SAAW2a,EAAWxS,KAE7D4R,OAAQ,SAAUe,EAAO3S,EAAKtG,GAa7B,YAAa9B,IAARoI,GACCA,GAAsB,iBAARA,QAAgCpI,IAAV8B,EAElC/H,KAAKgE,IAAKgd,EAAO3S,IASzBrO,KAAKqhB,IAAKL,EAAO3S,EAAKtG,QAIL9B,IAAV8B,EAAsBA,EAAQsG,IAEtCkO,OAAQ,SAAUyE,EAAO3S,GACxB,IAAIlM,EACHiM,EAAQ4S,EAAOhhB,KAAKkG,SAErB,QAAeD,IAAVmI,EAAL,CAIA,QAAanI,IAARoI,EAAoB,CAkBxBlM,GAXCkM,EAJItI,MAAMC,QAASqI,GAIbA,EAAI5J,IAAKoc,IAEfxS,EAAMwS,EAAWxS,MAIJD,EACZ,CAAEC,GACAA,EAAIpB,MAAO2N,IAAmB,IAG1BrX,OAER,MAAQpB,WACAiM,EAAOC,EAAKlM,UAKR8D,IAARoI,GAAqBpL,GAAO2D,cAAewH,MAM1C4S,EAAMzf,SACVyf,EAAOhhB,KAAKkG,cAAYD,SAEjB+a,EAAOhhB,KAAKkG,YAItBsb,QAAS,SAAUR,GAClB,IAAI5S,EAAQ4S,EAAOhhB,KAAKkG,SACxB,YAAiBD,IAAVmI,IAAwBnL,GAAO2D,cAAewH,KAGvD,IAAIqT,EAAW,IAAIR,EAEfS,EAAW,IAAIT,EAcfU,EAAS,gCACZC,EAAa,SA2Bd,SAASC,EAAUpe,EAAM4K,EAAKiT,GAC7B,IAAI5d,EA1Ba4d,EA8BjB,QAAcrb,IAATqb,GAAwC,IAAlB7d,EAAKlC,SAI/B,GAHAmC,EAAO,QAAU2K,EAAIhI,QAASub,EAAY,OAAQje,cAG7B,iBAFrB2d,EAAO7d,EAAKjB,aAAckB,IAEM,CAC/B,IACC4d,EAnCW,UADGA,EAoCEA,IA/BL,UAATA,IAIS,SAATA,EACG,KAIHA,KAAUA,EAAO,IACbA,EAGJK,EAAOla,KAAM6Z,GACVQ,KAAKC,MAAOT,GAGbA,GAeH,MAAQ3U,IAGV+U,EAASL,IAAK5d,EAAM4K,EAAKiT,QAEzBA,OAAOrb,EAGT,OAAOqb,EAGRre,GAAOsC,OAAQ,CACdic,QAAS,SAAU/d,GAClB,OAAOie,EAASF,QAAS/d,IAAUge,EAASD,QAAS/d,IAGtD6d,KAAM,SAAU7d,EAAMC,EAAM4d,GAC3B,OAAOI,EAASzB,OAAQxc,EAAMC,EAAM4d,IAGrCU,WAAY,SAAUve,EAAMC,GAC3Bge,EAASnF,OAAQ9Y,EAAMC,IAKxBue,MAAO,SAAUxe,EAAMC,EAAM4d,GAC5B,OAAOG,EAASxB,OAAQxc,EAAMC,EAAM4d,IAGrCY,YAAa,SAAUze,EAAMC,GAC5B+d,EAASlF,OAAQ9Y,EAAMC,MAIzBT,GAAOG,GAAGmC,OAAQ,CACjB+b,KAAM,SAAUjT,EAAKtG,GACpB,IAAI5F,EAAGuB,EAAM4d,EACZ7d,EAAOzD,KAAM,GACbmiB,EAAQ1e,GAAQA,EAAK8G,WAGtB,QAAatE,IAARoI,EAAoB,CACxB,GAAKrO,KAAKuD,SACT+d,EAAOI,EAAS1d,IAAKP,GAEE,IAAlBA,EAAKlC,WAAmBkgB,EAASzd,IAAKP,EAAM,iBAAmB,CACnEtB,EAAIggB,EAAM5e,OACV,MAAQpB,IAIFggB,EAAOhgB,IAEsB,KADjCuB,EAAOye,EAAOhgB,GAAIuB,MACR7C,QAAS,WAClB6C,EAAOmd,EAAWnd,EAAKpD,MAAO,IAC9BuhB,EAAUpe,EAAMC,EAAM4d,EAAM5d,KAI/B+d,EAASJ,IAAK5d,EAAM,gBAAgB,GAItC,OAAO6d,EAIR,MAAoB,iBAARjT,EACJrO,KAAKuE,KAAM,WACjBmd,EAASL,IAAKrhB,KAAMqO,KAIf4R,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAIuZ,EAOJ,GAAK7d,QAAkBwC,IAAV8B,EAKZ,YAAc9B,KADdqb,EAAOI,EAAS1d,IAAKP,EAAM4K,IAEnBiT,OAMMrb,KADdqb,EAAOO,EAAUpe,EAAM4K,IAEfiT,OAIR,EAIDthB,KAAKuE,KAAM,WAGVmd,EAASL,IAAKrhB,KAAMqO,EAAKtG,MAExB,KAAMA,EAA0B,EAAnBrD,UAAUnB,OAAY,MAAM,IAG7Cye,WAAY,SAAU3T,GACrB,OAAOrO,KAAKuE,KAAM,WACjBmd,EAASnF,OAAQvc,KAAMqO,QAM1BpL,GAAOsC,OAAQ,CACd2W,MAAO,SAAUzY,EAAM9B,EAAM2f,GAC5B,IAAIpF,EAEJ,GAAKzY,EAYJ,OAXA9B,GAASA,GAAQ,MAAS,QAC1Bua,EAAQuF,EAASzd,IAAKP,EAAM9B,GAGvB2f,KACEpF,GAASnW,MAAMC,QAASsb,GAC7BpF,EAAQuF,EAASxB,OAAQxc,EAAM9B,EAAMsB,GAAOgE,UAAWqa,IAEvDpF,EAAMtb,KAAM0gB,IAGPpF,GAAS,IAIlBkG,QAAS,SAAU3e,EAAM9B,GACxBA,EAAOA,GAAQ,KAEf,IAAIua,EAAQjZ,GAAOiZ,MAAOzY,EAAM9B,GAC/B0gB,EAAcnG,EAAM3Y,OACpBH,EAAK8Y,EAAM3N,QACX+T,EAAQrf,GAAOsf,YAAa9e,EAAM9B,GAMvB,eAAPyB,IACJA,EAAK8Y,EAAM3N,QACX8T,KAGIjf,IAIU,OAATzB,GACJua,EAAMsG,QAAS,qBAITF,EAAMG,KACbrf,EAAG3C,KAAMgD,EApBF,WACNR,GAAOmf,QAAS3e,EAAM9B,IAmBF2gB,KAGhBD,GAAeC,GACpBA,EAAMhO,MAAM8H,QAKdmG,YAAa,SAAU9e,EAAM9B,GAC5B,IAAI0M,EAAM1M,EAAO,aACjB,OAAO8f,EAASzd,IAAKP,EAAM4K,IAASoT,EAASxB,OAAQxc,EAAM4K,EAAK,CAC/DiG,MAAOrR,GAAOwY,UAAW,eAAgBvB,IAAK,WAC7CuH,EAASlF,OAAQ9Y,EAAM,CAAE9B,EAAO,QAAS0M,WAM7CpL,GAAOG,GAAGmC,OAAQ,CACjB2W,MAAO,SAAUva,EAAM2f,GACtB,IAAIoB,EAAS,EAQb,MANqB,iBAAT/gB,IACX2f,EAAO3f,EACPA,EAAO,KACP+gB,KAGIhe,UAAUnB,OAASmf,EAChBzf,GAAOiZ,MAAOlc,KAAM,GAAK2B,QAGjBsE,IAATqb,EACNthB,KACAA,KAAKuE,KAAM,WACV,IAAI2X,EAAQjZ,GAAOiZ,MAAOlc,KAAM2B,EAAM2f,GAGtCre,GAAOsf,YAAaviB,KAAM2B,GAEZ,OAATA,GAAgC,eAAfua,EAAO,IAC5BjZ,GAAOmf,QAASpiB,KAAM2B,MAI1BygB,QAAS,SAAUzgB,GAClB,OAAO3B,KAAKuE,KAAM,WACjBtB,GAAOmf,QAASpiB,KAAM2B,MAGxBghB,WAAY,SAAUhhB,GACrB,OAAO3B,KAAKkc,MAAOva,GAAQ,KAAM,KAKlC2Z,QAAS,SAAU3Z,EAAML,GACxB,IAAIshB,EACHC,EAAQ,EACRC,EAAQ7f,GAAO0Z,WACfpM,EAAWvQ,KACXmC,EAAInC,KAAKuD,OACT2X,EAAU,aACC2H,GACTC,EAAM1E,YAAa7N,EAAU,CAAEA,KAIb,iBAAT5O,IACXL,EAAMK,EACNA,OAAOsE,GAERtE,EAAOA,GAAQ,KAEf,MAAQQ,KACPygB,EAAMnB,EAASzd,IAAKuM,EAAUpO,GAAKR,EAAO,gBAC9BihB,EAAItO,QACfuO,IACAD,EAAItO,MAAM4F,IAAKgB,IAIjB,OADAA,IACO4H,EAAMxH,QAASha,MAGxB,IAAIyhB,EAAO,sCAA0CC,OAEjDC,EAAU,IAAIza,OAAQ,iBAAmBua,EAAO,cAAe,KAG/DG,EAAY,CAAE,MAAO,QAAS,SAAU,QAExCnc,EAAkBnH,EAASmH,gBAI1Boc,EAAa,SAAU1f,GACzB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAE7C2f,EAAW,CAAEA,UAAU,GAOnBrc,EAAgBsc,cACpBF,EAAa,SAAU1f,GACtB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAC3CA,EAAK4f,YAAaD,KAAe3f,EAAK+D,gBAG1C,IAAI8b,GAAqB,SAAU7f,EAAMiL,GAOvC,MAA8B,UAH9BjL,EAAOiL,GAAMjL,GAGD8f,MAAMC,SACM,KAAvB/f,EAAK8f,MAAMC,SAMXL,EAAY1f,IAEsB,SAAlCR,GAAOwgB,IAAKhgB,EAAM,YAKrB,SAASigB,GAAWjgB,EAAM8d,EAAMoC,EAAYC,GAC3C,IAAIC,EAAUC,EACbC,EAAgB,GAChBC,EAAeJ,EACd,WACC,OAAOA,EAAMhK,OAEd,WACC,OAAO3W,GAAOwgB,IAAKhgB,EAAM8d,EAAM,KAEjC0C,EAAUD,IACVE,EAAOP,GAAcA,EAAY,KAAS1gB,GAAOkhB,UAAW5C,GAAS,GAAK,MAG1E6C,EAAgB3gB,EAAKlC,WAClB0B,GAAOkhB,UAAW5C,IAAmB,OAAT2C,IAAkBD,IAChDhB,EAAQ5V,KAAMpK,GAAOwgB,IAAKhgB,EAAM8d,IAElC,GAAK6C,GAAiBA,EAAe,KAAQF,EAAO,CAInDD,GAAoB,EAGpBC,EAAOA,GAAQE,EAAe,GAG9BA,GAAiBH,GAAW,EAE5B,MAAQF,IAIP9gB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,IACnC,EAAIJ,IAAY,GAAMA,EAAQE,IAAiBC,GAAW,MAAW,IAC3EF,EAAgB,GAEjBK,GAAgCN,EAIjCM,GAAgC,EAChCnhB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,GAG1CP,EAAaA,GAAc,GAgB5B,OAbKA,IACJS,GAAiBA,IAAkBH,GAAW,EAG9CJ,EAAWF,EAAY,GACtBS,GAAkBT,EAAY,GAAM,GAAMA,EAAY,IACrDA,EAAY,GACTC,IACJA,EAAMM,KAAOA,EACbN,EAAMtR,MAAQ8R,EACdR,EAAMxe,IAAMye,IAGPA,EAIR,IAAIQ,GAAoB,GAyBxB,SAASC,GAAU/T,EAAUgU,GAO5B,IANA,IAAIf,EAAS/f,EAxBcA,EACvBiT,EACHxU,EACAsB,EACAggB,EAqBAgB,EAAS,GACTxK,EAAQ,EACRzW,EAASgN,EAAShN,OAGXyW,EAAQzW,EAAQyW,KACvBvW,EAAO8M,EAAUyJ,IACNuJ,QAIXC,EAAU/f,EAAK8f,MAAMC,QAChBe,GAKa,SAAZf,IACJgB,EAAQxK,GAAUyH,EAASzd,IAAKP,EAAM,YAAe,KAC/C+gB,EAAQxK,KACbvW,EAAK8f,MAAMC,QAAU,KAGK,KAAvB/f,EAAK8f,MAAMC,SAAkBF,GAAoB7f,KACrD+gB,EAAQxK,IA7CVwJ,EAFAthB,EADGwU,OAAAA,EACHxU,GAF0BuB,EAiDaA,GA/C5B+D,cACXhE,EAAWC,EAAKD,UAChBggB,EAAUa,GAAmB7gB,MAM9BkT,EAAOxU,EAAIuiB,KAAK9hB,YAAaT,EAAII,cAAekB,IAChDggB,EAAUvgB,GAAOwgB,IAAK/M,EAAM,WAE5BA,EAAK9T,WAAWC,YAAa6T,GAEZ,SAAZ8M,IACJA,EAAU,SAEXa,GAAmB7gB,GAAaggB,MAkCb,SAAZA,IACJgB,EAAQxK,GAAU,OAGlByH,EAASJ,IAAK5d,EAAM,UAAW+f,KAMlC,IAAMxJ,EAAQ,EAAGA,EAAQzW,EAAQyW,IACR,MAAnBwK,EAAQxK,KACZzJ,EAAUyJ,GAAQuJ,MAAMC,QAAUgB,EAAQxK,IAI5C,OAAOzJ,EAGRtN,GAAOG,GAAGmC,OAAQ,CACjBgf,KAAM,WACL,OAAOD,GAAUtkB,MAAM,IAExB0kB,KAAM,WACL,OAAOJ,GAAUtkB,OAElB2kB,OAAQ,SAAU7H,GACjB,MAAsB,kBAAVA,EACJA,EAAQ9c,KAAKukB,OAASvkB,KAAK0kB,OAG5B1kB,KAAKuE,KAAM,WACZ+e,GAAoBtjB,MACxBiD,GAAQjD,MAAOukB,OAEfthB,GAAQjD,MAAO0kB,YAKnB,IAUEE,GACA1U,GAXE2U,GAAiB,wBAEjBC,GAAW,iCAEXC,GAAc,qCAMhBH,GADchlB,EAASolB,yBACRriB,YAAa/C,EAAS0C,cAAe,SACpD4N,GAAQtQ,EAAS0C,cAAe,UAM3BG,aAAc,OAAQ,SAC5ByN,GAAMzN,aAAc,UAAW,WAC/ByN,GAAMzN,aAAc,OAAQ,KAE5BmiB,GAAIjiB,YAAauN,IAIjB9O,GAAQ6jB,WAAaL,GAAIM,WAAW,GAAOA,WAAW,GAAOvS,UAAUwB,QAIvEyQ,GAAIzU,UAAY,yBAChB/O,GAAQ+jB,iBAAmBP,GAAIM,WAAW,GAAOvS,UAAUyS,aAK3DR,GAAIzU,UAAY,oBAChB/O,GAAQikB,SAAWT,GAAIjS,UAKxB,IAAI2S,GAAU,CAKbC,MAAO,CAAE,EAAG,UAAW,YACvBC,IAAK,CAAE,EAAG,oBAAqB,uBAC/BC,GAAI,CAAE,EAAG,iBAAkB,oBAC3BC,GAAI,CAAE,EAAG,qBAAsB,yBAE/BC,SAAU,CAAE,EAAG,GAAI,KAYpB,SAASC,GAAQziB,EAAS6M,GAIzB,IAAI5L,EAYJ,OATCA,EAD4C,oBAAjCjB,EAAQqK,qBACbrK,EAAQqK,qBAAsBwC,GAAO,KAEI,oBAA7B7M,EAAQ4K,iBACpB5K,EAAQ4K,iBAAkBiC,GAAO,KAGjC,QAGM/J,IAAR+J,GAAqBA,GAAOxM,GAAUL,EAAS6M,GAC5C/M,GAAOoB,MAAO,CAAElB,GAAWiB,GAG5BA,EAKR,SAASyhB,GAAe1hB,EAAO2hB,GAI9B,IAHA,IAAI3jB,EAAI,EACP2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IACdsf,EAASJ,IACRld,EAAOhC,GACP,cACC2jB,GAAerE,EAASzd,IAAK8hB,EAAa3jB,GAAK,eA1CnDmjB,GAAQS,MAAQT,GAAQU,MAAQV,GAAQW,SAAWX,GAAQY,QAAUZ,GAAQC,MAC7ED,GAAQa,GAAKb,GAAQI,GAGftkB,GAAQikB,SACbC,GAAQc,SAAWd,GAAQD,OAAS,CAAE,EAAG,+BAAgC,cA2C1E,IAAIgB,GAAQ,YAEZ,SAASC,GAAeniB,EAAOhB,EAASojB,EAASC,EAAWC,GAO3D,IANA,IAAIhjB,EAAMmf,EAAK5S,EAAK0W,EAAMC,EAAUxhB,EACnCyhB,EAAWzjB,EAAQ6hB,yBACnB6B,EAAQ,GACR1kB,EAAI,EACJ2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IAGd,IAFAsB,EAAOU,EAAOhC,KAEQ,IAATsB,EAGZ,GAAwB,WAAnBX,EAAQW,GAIZR,GAAOoB,MAAOwiB,EAAOpjB,EAAKlC,SAAW,CAAEkC,GAASA,QAG1C,GAAM4iB,GAAM5e,KAAMhE,GAIlB,CACNmf,EAAMA,GAAOgE,EAASjkB,YAAaQ,EAAQb,cAAe,QAG1D0N,GAAQ8U,GAASzX,KAAM5J,IAAU,CAAE,GAAI,KAAQ,GAAIE,cACnD+iB,EAAOpB,GAAStV,IAASsV,GAAQK,SACjC/C,EAAIzS,UAAYuW,EAAM,GAAMzjB,GAAO6jB,cAAerjB,GAASijB,EAAM,GAGjEvhB,EAAIuhB,EAAM,GACV,MAAQvhB,IACPyd,EAAMA,EAAIjQ,UAKX1P,GAAOoB,MAAOwiB,EAAOjE,EAAIlW,aAGzBkW,EAAMgE,EAASlU,YAGX5L,YAAc,QAzBlB+f,EAAMjmB,KAAMuC,EAAQ4jB,eAAgBtjB,IA+BvCmjB,EAAS9f,YAAc,GAEvB3E,EAAI,EACJ,MAAUsB,EAAOojB,EAAO1kB,KAGvB,GAAKqkB,IAAkD,EAArCvjB,GAAOkE,QAAS1D,EAAM+iB,GAClCC,GACJA,EAAQ7lB,KAAM6C,QAgBhB,GAXAkjB,EAAWxD,EAAY1f,GAGvBmf,EAAMgD,GAAQgB,EAASjkB,YAAac,GAAQ,UAGvCkjB,GACJd,GAAejD,GAIX2D,EAAU,CACdphB,EAAI,EACJ,MAAU1B,EAAOmf,EAAKzd,KAChB4f,GAAYtd,KAAMhE,EAAK9B,MAAQ,KACnC4kB,EAAQ3lB,KAAM6C,GAMlB,OAAOmjB,EAIR,IAAII,GAAiB,sBAErB,SAASC,KACR,OAAO,EAGR,SAASC,KACR,OAAO,EAGR,SAASC,GAAI1jB,EAAM2jB,EAAOlkB,EAAUoe,EAAMle,EAAIikB,GAC7C,IAAIC,EAAQ3lB,EAGZ,GAAsB,iBAAVylB,EAAqB,CAShC,IAAMzlB,IANmB,iBAAbuB,IAGXoe,EAAOA,GAAQpe,EACfA,OAAW+C,GAEEmhB,EACbD,GAAI1jB,EAAM9B,EAAMuB,EAAUoe,EAAM8F,EAAOzlB,GAAQ0lB,GAEhD,OAAO5jB,EAsBR,GAnBa,MAAR6d,GAAsB,MAANle,GAGpBA,EAAKF,EACLoe,EAAOpe,OAAW+C,GACD,MAAN7C,IACc,iBAAbF,GAGXE,EAAKke,EACLA,OAAOrb,IAIP7C,EAAKke,EACLA,EAAOpe,EACPA,OAAW+C,KAGD,IAAP7C,EACJA,EAAK8jB,QACC,IAAM9jB,EACZ,OAAOK,EAeR,OAZa,IAAR4jB,IACJC,EAASlkB,GACTA,EAAK,SAAUmkB,GAId,OADAtkB,KAASukB,IAAKD,GACPD,EAAO3mB,MAAOX,KAAM0E,aAIzBsD,KAAOsf,EAAOtf,OAAUsf,EAAOtf,KAAO/E,GAAO+E,SAE1CvE,EAAKc,KAAM,WACjBtB,GAAOskB,MAAMrN,IAAKla,KAAMonB,EAAOhkB,EAAIke,EAAMpe,KA+a3C,SAASukB,GAAgB/Y,EAAI/M,EAAM+lB,GAG5BA,GAQNjG,EAASJ,IAAK3S,EAAI/M,GAAM,GACxBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAM,CAC3B0F,WAAW,EACXyW,QAAS,SAAUyJ,GAClB,IAAI3V,EACH+V,EAAQlG,EAASzd,IAAKhE,KAAM2B,GAE7B,GAAyB,EAAlB4lB,EAAMK,WAAmB5nB,KAAM2B,IAGrC,GAAMgmB,GA4BQ1kB,GAAOskB,MAAMxJ,QAASpc,IAAU,IAAKkmB,cAClDN,EAAMO,uBAhBN,GARAH,EAAQrnB,GAAMG,KAAMiE,WACpB+c,EAASJ,IAAKrhB,KAAM2B,EAAMgmB,GAG1B3nB,KAAM2B,KACNiQ,EAAS6P,EAASzd,IAAKhE,KAAM2B,GAC7B8f,EAASJ,IAAKrhB,KAAM2B,GAAM,GAErBgmB,IAAU/V,EAMd,OAHA2V,EAAMQ,2BACNR,EAAMS,iBAECpW,OAeE+V,IAGXlG,EAASJ,IAAKrhB,KAAM2B,EAAMsB,GAAOskB,MAAMU,QACtCN,EAAO,GACPA,EAAMrnB,MAAO,GACbN,OAWDunB,EAAMO,kBACNP,EAAMW,8BAAgCjB,aArENhhB,IAA7Bwb,EAASzd,IAAK0K,EAAI/M,IACtBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAMslB,IA5a/BhkB,GAAOskB,MAAQ,CAEd/nB,OAAQ,GAER0a,IAAK,SAAUzW,EAAM2jB,EAAOtJ,EAASwD,EAAMpe,GAE1C,IAAIilB,EAAaC,EAAaxF,EAC7ByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASzd,IAAKP,GAG1B,GAAMsd,EAAYtd,GAAlB,CAKKqa,EAAQA,UAEZA,GADAqK,EAAcrK,GACQA,QACtB5a,EAAWilB,EAAYjlB,UAKnBA,GACJD,GAAO4J,KAAK2D,gBAAiBzJ,EAAiB7D,GAIzC4a,EAAQ9V,OACb8V,EAAQ9V,KAAO/E,GAAO+E,SAIfqgB,EAASM,EAASN,UACzBA,EAASM,EAASN,OAASjoB,OAAOwoB,OAAQ,QAEnCR,EAAcO,EAASE,UAC9BT,EAAcO,EAASE,OAAS,SAAUlc,GAIzC,MAAyB,oBAAX1J,IAA0BA,GAAOskB,MAAMuB,YAAcnc,EAAEhL,KACpEsB,GAAOskB,MAAMwB,SAASpoB,MAAO8C,EAAMiB,gBAAcuB,IAMpDqiB,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAEP3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,IAKNoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1CA,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,EAGjEoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1C4mB,EAAYtlB,GAAOsC,OAAQ,CAC1B5D,KAAMA,EACN+mB,SAAUA,EACVpH,KAAMA,EACNxD,QAASA,EACT9V,KAAM8V,EAAQ9V,KACd9E,SAAUA,EACVqI,aAAcrI,GAAYD,GAAOqN,KAAKrD,MAAM1B,aAAa9D,KAAMvE,GAC/DmE,UAAWohB,EAAW3a,KAAM,MAC1Bqa,IAGKK,EAAWH,EAAQ1mB,OAC1B6mB,EAAWH,EAAQ1mB,GAAS,IACnBsnB,cAAgB,EAGnBlL,EAAQmL,QACiD,IAA9DnL,EAAQmL,MAAMzoB,KAAMgD,EAAM6d,EAAMmH,EAAYL,IAEvC3kB,EAAK8L,kBACT9L,EAAK8L,iBAAkB5N,EAAMymB,IAK3BrK,EAAQ7D,MACZ6D,EAAQ7D,IAAIzZ,KAAMgD,EAAM8kB,GAElBA,EAAUzK,QAAQ9V,OACvBugB,EAAUzK,QAAQ9V,KAAO8V,EAAQ9V,OAK9B9E,EACJslB,EAASljB,OAAQkjB,EAASS,gBAAiB,EAAGV,GAE9CC,EAAS5nB,KAAM2nB,GAIhBtlB,GAAOskB,MAAM/nB,OAAQmC,IAAS,KAMhC4a,OAAQ,SAAU9Y,EAAM2jB,EAAOtJ,EAAS5a,EAAUimB,GAEjD,IAAIhkB,EAAGikB,EAAWxG,EACjByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASD,QAAS/d,IAAUge,EAASzd,IAAKP,GAEtD,GAAMklB,IAAeN,EAASM,EAASN,QAAvC,CAMAC,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAMP,GAJA3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,EAAN,CAOAoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAE1C6mB,EAAWH,EADX1mB,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,IACpC,GAC7BihB,EAAMA,EAAK,IACV,IAAIpa,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAG9Dsb,EAAYjkB,EAAIqjB,EAASjlB,OACzB,MAAQ4B,IACPojB,EAAYC,EAAUrjB,IAEfgkB,GAAeT,IAAaH,EAAUG,UACzC5K,GAAWA,EAAQ9V,OAASugB,EAAUvgB,MACtC4a,IAAOA,EAAInb,KAAM8gB,EAAUlhB,YAC3BnE,GAAYA,IAAaqlB,EAAUrlB,WACxB,OAAbA,IAAqBqlB,EAAUrlB,YAChCslB,EAASljB,OAAQH,EAAG,GAEfojB,EAAUrlB,UACdslB,EAASS,gBAELlL,EAAQxB,QACZwB,EAAQxB,OAAO9b,KAAMgD,EAAM8kB,IAOzBa,IAAcZ,EAASjlB,SACrBwa,EAAQsL,WACkD,IAA/DtL,EAAQsL,SAAS5oB,KAAMgD,EAAMglB,EAAYE,EAASE,SAElD5lB,GAAOqmB,YAAa7lB,EAAM9B,EAAMgnB,EAASE,eAGnCR,EAAQ1mB,SA1Cf,IAAMA,KAAQ0mB,EACbplB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,EAAOylB,EAAOkB,GAAKxK,EAAS5a,GAAU,GA8C/DD,GAAO2D,cAAeyhB,IAC1B5G,EAASlF,OAAQ9Y,EAAM,mBAIzBslB,SAAU,SAAUQ,GAEnB,IAAIpnB,EAAGgD,EAAGf,EAAK4O,EAASuV,EAAWiB,EAClC3W,EAAO,IAAI9M,MAAOrB,UAAUnB,QAG5BgkB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAE1Bf,GACC/G,EAASzd,IAAKhE,KAAM,WAAcI,OAAOwoB,OAAQ,OAC/CrB,EAAM5lB,OAAU,GACnBoc,EAAU9a,GAAOskB,MAAMxJ,QAASwJ,EAAM5lB,OAAU,GAKjD,IAFAkR,EAAM,GAAM0U,EAENplB,EAAI,EAAGA,EAAIuC,UAAUnB,OAAQpB,IAClC0Q,EAAM1Q,GAAMuC,UAAWvC,GAMxB,GAHAolB,EAAMmC,eAAiB1pB,MAGlB+d,EAAQ4L,cAA2D,IAA5C5L,EAAQ4L,YAAYlpB,KAAMT,KAAMunB,GAA5D,CAKAiC,EAAevmB,GAAOskB,MAAMiB,SAAS/nB,KAAMT,KAAMunB,EAAOiB,GAGxDrmB,EAAI,EACJ,OAAU6Q,EAAUwW,EAAcrnB,QAAYolB,EAAMqC,uBAAyB,CAC5ErC,EAAMsC,cAAgB7W,EAAQvP,KAE9B0B,EAAI,EACJ,OAAUojB,EAAYvV,EAAQwV,SAAUrjB,QACtCoiB,EAAMW,gCAIDX,EAAMuC,aAAsC,IAAxBvB,EAAUlhB,YACnCkgB,EAAMuC,WAAWriB,KAAM8gB,EAAUlhB,aAEjCkgB,EAAMgB,UAAYA,EAClBhB,EAAMjG,KAAOiH,EAAUjH,UAKVrb,KAHb7B,IAAUnB,GAAOskB,MAAMxJ,QAASwK,EAAUG,WAAc,IAAKG,QAC5DN,EAAUzK,SAAUnd,MAAOqS,EAAQvP,KAAMoP,MAGT,KAAzB0U,EAAM3V,OAASxN,KACrBmjB,EAAMS,iBACNT,EAAMO,oBAYX,OAJK/J,EAAQgM,cACZhM,EAAQgM,aAAatpB,KAAMT,KAAMunB,GAG3BA,EAAM3V,SAGd4W,SAAU,SAAUjB,EAAOiB,GAC1B,IAAIrmB,EAAGomB,EAAWnf,EAAK4gB,EAAiBC,EACvCT,EAAe,GACfP,EAAgBT,EAASS,cACzBrP,EAAM2N,EAAM3hB,OAGb,GAAKqjB,GAIJrP,EAAIrY,YAOc,UAAfgmB,EAAM5lB,MAAoC,GAAhB4lB,EAAM9S,QAEnC,KAAQmF,IAAQ5Z,KAAM4Z,EAAMA,EAAIhX,YAAc5C,KAI7C,GAAsB,IAAjB4Z,EAAIrY,WAAoC,UAAfgmB,EAAM5lB,OAAqC,IAAjBiY,EAAIrN,UAAsB,CAGjF,IAFAyd,EAAkB,GAClBC,EAAmB,GACb9nB,EAAI,EAAGA,EAAI8mB,EAAe9mB,SAME8D,IAA5BgkB,EAFL7gB,GAHAmf,EAAYC,EAAUrmB,IAGNe,SAAW,OAG1B+mB,EAAkB7gB,GAAQmf,EAAUhd,cACC,EAApCtI,GAAQmG,EAAKpJ,MAAOga,MAAOJ,GAC3B3W,GAAO4J,KAAMzD,EAAKpJ,KAAM,KAAM,CAAE4Z,IAAQrW,QAErC0mB,EAAkB7gB,IACtB4gB,EAAgBppB,KAAM2nB,GAGnByB,EAAgBzmB,QACpBimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUwB,IAY9C,OALApQ,EAAM5Z,KACDipB,EAAgBT,EAASjlB,QAC7BimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUA,EAASloB,MAAO2oB,KAGpDO,GAGRU,QAAS,SAAUxmB,EAAMymB,GACxB/pB,OAAO+gB,eAAgBle,GAAOmnB,MAAMxmB,UAAWF,EAAM,CACpD2mB,YAAY,EACZjJ,cAAc,EAEdpd,IAAK3C,EAAY8oB,GAChB,WACC,GAAKnqB,KAAKsqB,cACT,OAAOH,EAAMnqB,KAAKsqB,gBAGpB,WACC,GAAKtqB,KAAKsqB,cACT,OAAOtqB,KAAKsqB,cAAe5mB,IAI9B2d,IAAK,SAAUtZ,GACd3H,OAAO+gB,eAAgBnhB,KAAM0D,EAAM,CAClC2mB,YAAY,EACZjJ,cAAc,EACdmJ,UAAU,EACVxiB,MAAOA,QAMX0hB,IAAK,SAAUa,GACd,OAAOA,EAAernB,GAAOiD,SAC5BokB,EACA,IAAIrnB,GAAOmnB,MAAOE,IAGpBvM,QAAS,CACRyM,KAAM,CAGLC,UAAU,GAEXC,MAAO,CAGNxB,MAAO,SAAU5H,GAIhB,IAAI5S,EAAK1O,MAAQshB,EAWjB,OARKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAG1B+Y,GAAgB/Y,EAAI,SAAS,IAIvB,GAERuZ,QAAS,SAAU3G,GAIlB,IAAI5S,EAAK1O,MAAQshB,EAUjB,OAPKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAE1B+Y,GAAgB/Y,EAAI,UAId,GAKRiX,SAAU,SAAU4B,GACnB,IAAI3hB,EAAS2hB,EAAM3hB,OACnB,OAAOif,GAAepd,KAAM7B,EAAOjE,OAClCiE,EAAO8kB,OAASlnB,GAAUoC,EAAQ,UAClC6b,EAASzd,IAAK4B,EAAQ,UACtBpC,GAAUoC,EAAQ,OAIrB+kB,aAAc,CACbZ,aAAc,SAAUxC,QAIDthB,IAAjBshB,EAAM3V,QAAwB2V,EAAM+C,gBACxC/C,EAAM+C,cAAcM,YAAcrD,EAAM3V,YA0F7C3O,GAAOqmB,YAAc,SAAU7lB,EAAM9B,EAAMknB,GAGrCplB,EAAKmc,qBACTnc,EAAKmc,oBAAqBje,EAAMknB,IAIlC5lB,GAAOmnB,MAAQ,SAAUxoB,EAAKipB,GAG7B,KAAQ7qB,gBAAgBiD,GAAOmnB,OAC9B,OAAO,IAAInnB,GAAOmnB,MAAOxoB,EAAKipB,GAI1BjpB,GAAOA,EAAID,MACf3B,KAAKsqB,cAAgB1oB,EACrB5B,KAAK2B,KAAOC,EAAID,KAIhB3B,KAAK8qB,mBAAqBlpB,EAAImpB,uBACH9kB,IAAzBrE,EAAImpB,mBAGgB,IAApBnpB,EAAIgpB,YACL3D,GACAC,GAKDlnB,KAAK4F,OAAWhE,EAAIgE,QAAkC,IAAxBhE,EAAIgE,OAAOrE,SACxCK,EAAIgE,OAAOhD,WACXhB,EAAIgE,OAEL5F,KAAK6pB,cAAgBjoB,EAAIioB,cACzB7pB,KAAKgrB,cAAgBppB,EAAIopB,eAIzBhrB,KAAK2B,KAAOC,EAIRipB,GACJ5nB,GAAOsC,OAAQvF,KAAM6qB,GAItB7qB,KAAKirB,UAAYrpB,GAAOA,EAAIqpB,WAAaC,KAAKC,MAG9CnrB,KAAMiD,GAAOiD,UAAY,GAK1BjD,GAAOmnB,MAAMxmB,UAAY,CACxBE,YAAab,GAAOmnB,MACpBU,mBAAoB5D,GACpB0C,qBAAsB1C,GACtBgB,8BAA+BhB,GAC/BkE,aAAa,EAEbpD,eAAgB,WACf,IAAIrb,EAAI3M,KAAKsqB,cAEbtqB,KAAK8qB,mBAAqB7D,GAErBta,IAAM3M,KAAKorB,aACfze,EAAEqb,kBAGJF,gBAAiB,WAChB,IAAInb,EAAI3M,KAAKsqB,cAEbtqB,KAAK4pB,qBAAuB3C,GAEvBta,IAAM3M,KAAKorB,aACfze,EAAEmb,mBAGJC,yBAA0B,WACzB,IAAIpb,EAAI3M,KAAKsqB,cAEbtqB,KAAKkoB,8BAAgCjB,GAEhCta,IAAM3M,KAAKorB,aACfze,EAAEob,2BAGH/nB,KAAK8nB,oBAKP7kB,GAAOsB,KAAM,CACZ8mB,QAAQ,EACRC,SAAS,EACTC,YAAY,EACZC,gBAAgB,EAChBC,SAAS,EACTC,QAAQ,EACRC,YAAY,EACZC,SAAS,EACTC,OAAO,EACPC,OAAO,EACPC,UAAU,EACVC,MAAM,EACNC,QAAQ,EACRjqB,MAAM,EACNkqB,UAAU,EACV7d,KAAK,EACL8d,SAAS,EACT1X,QAAQ,EACR2X,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,WAAW,EACXC,aAAa,EACbC,SAAS,EACTC,SAAS,EACTC,eAAe,EACfC,WAAW,EACXC,SAAS,EACTC,OAAO,GACL/pB,GAAOskB,MAAM2C,SAEhBjnB,GAAOsB,KAAM,CAAEoP,MAAO,UAAWsZ,KAAM,YAAc,SAAUtrB,EAAMkmB,GAEpE,SAASqF,EAAoB3D,GAC5B,GAAK3pB,EAASutB,aAAe,CAS5B,IAAItE,EAASpH,EAASzd,IAAKhE,KAAM,UAChCunB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAC3BhC,EAAM5lB,KAA4B,YAArB4nB,EAAY5nB,KAAqB,QAAU,OACxD4lB,EAAM6D,aAAc,EAGpBvC,EAAQU,GAMHhC,EAAM3hB,SAAW2hB,EAAMsC,eAK3BhB,EAAQtB,QAMTtkB,GAAOskB,MAAM6F,SAAUvF,EAAc0B,EAAY3jB,OAChD3C,GAAOskB,MAAMkC,IAAKF,IAIrBtmB,GAAOskB,MAAMxJ,QAASpc,GAAS,CAG9BunB,MAAO,WAEN,IAAImE,EAOJ,GAFA5F,GAAgBznB,KAAM2B,GAAM,IAEvB/B,EAASutB,aAcb,OAAO,GARPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,KAE9B7nB,KAAKuP,iBAAkBsY,EAAcqF,GAEtCzL,EAASJ,IAAKrhB,KAAM6nB,GAAgBwF,GAAY,GAAM,IAOxDpF,QAAS,WAMR,OAHAR,GAAgBznB,KAAM2B,IAGf,GAGR0nB,SAAU,WACT,IAAIgE,EAEJ,IAAKztB,EAASutB,aAWb,OAAO,GAVPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,GAAiB,GAK/CpG,EAASJ,IAAKrhB,KAAM6nB,EAAcwF,IAHlCrtB,KAAK4f,oBAAqBiI,EAAcqF,GACxCzL,EAASlF,OAAQvc,KAAM6nB,KAa1BlC,SAAU,SAAU4B,GACnB,OAAO9F,EAASzd,IAAKujB,EAAM3hB,OAAQjE,IAGpCkmB,aAAcA,GAef5kB,GAAOskB,MAAMxJ,QAAS8J,GAAiB,CACtCqB,MAAO,WAIN,IAAIhnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAMhCwF,IACAztB,EAASutB,aACbntB,KAAKuP,iBAAkBsY,EAAcqF,GAErChrB,EAAIqN,iBAAkB5N,EAAMurB,GAAoB,IAGlDzL,EAASJ,IAAKiM,EAAYzF,GAAgBwF,GAAY,GAAM,IAE7DhE,SAAU,WACT,IAAInnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAAiB,EAEjDwF,EAQL5L,EAASJ,IAAKiM,EAAYzF,EAAcwF,IAPnCztB,EAASutB,aACbntB,KAAK4f,oBAAqBiI,EAAcqF,GAExChrB,EAAI0d,oBAAqBje,EAAMurB,GAAoB,GAEpDzL,EAASlF,OAAQ+Q,EAAYzF,QAgBjC5kB,GAAOsB,KAAM,CACZgpB,WAAY,YACZC,WAAY,WACZC,aAAc,cACdC,aAAc,cACZ,SAAUC,EAAMlE,GAClBxmB,GAAOskB,MAAMxJ,QAAS4P,GAAS,CAC9B9F,aAAc4B,EACdT,SAAUS,EAEVZ,OAAQ,SAAUtB,GACjB,IAAInjB,EAEHwpB,EAAUrG,EAAMyD,cAChBzC,EAAYhB,EAAMgB,UASnB,OALMqF,IAAaA,IANT5tB,MAMgCiD,GAAOwF,SANvCzI,KAMyD4tB,MAClErG,EAAM5lB,KAAO4mB,EAAUG,SACvBtkB,EAAMmkB,EAAUzK,QAAQnd,MAAOX,KAAM0E,WACrC6iB,EAAM5lB,KAAO8nB,GAEPrlB,MAKVnB,GAAOG,GAAGmC,OAAQ,CAEjB4hB,GAAI,SAAUC,EAAOlkB,EAAUoe,EAAMle,GACpC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,IAEzCikB,IAAK,SAAUD,EAAOlkB,EAAUoe,EAAMle,GACrC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,EAAI,IAE7CokB,IAAK,SAAUJ,EAAOlkB,EAAUE,GAC/B,IAAImlB,EAAW5mB,EACf,GAAKylB,GAASA,EAAMY,gBAAkBZ,EAAMmB,UAW3C,OARAA,EAAYnB,EAAMmB,UAClBtlB,GAAQmkB,EAAMsC,gBAAiBlC,IAC9Be,EAAUlhB,UACTkhB,EAAUG,SAAW,IAAMH,EAAUlhB,UACrCkhB,EAAUG,SACXH,EAAUrlB,SACVqlB,EAAUzK,SAEJ9d,KAER,GAAsB,iBAAVonB,EAAqB,CAGhC,IAAMzlB,KAAQylB,EACbpnB,KAAKwnB,IAAK7lB,EAAMuB,EAAUkkB,EAAOzlB,IAElC,OAAO3B,KAWR,OATkB,IAAbkD,GAA0C,mBAAbA,IAGjCE,EAAKF,EACLA,OAAW+C,IAEA,IAAP7C,IACJA,EAAK8jB,IAEClnB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMhL,OAAQvc,KAAMonB,EAAOhkB,EAAIF,QAMzC,IAKC2qB,GAAe,wBAGfC,GAAW,oCAEXC,GAAe,6BAGhB,SAASC,GAAoBvqB,EAAMiX,GAClC,OAAKlX,GAAUC,EAAM,UACpBD,GAA+B,KAArBkX,EAAQnZ,SAAkBmZ,EAAUA,EAAQhI,WAAY,OAE3DzP,GAAQQ,GAAO+V,SAAU,SAAW,IAGrC/V,EAIR,SAASwqB,GAAexqB,GAEvB,OADAA,EAAK9B,MAAyC,OAAhC8B,EAAKjB,aAAc,SAAsB,IAAMiB,EAAK9B,KAC3D8B,EAER,SAASyqB,GAAezqB,GAOvB,MAN2C,WAApCA,EAAK9B,MAAQ,IAAKrB,MAAO,EAAG,GAClCmD,EAAK9B,KAAO8B,EAAK9B,KAAKrB,MAAO,GAE7BmD,EAAKwK,gBAAiB,QAGhBxK,EAGR,SAAS0qB,GAAgBvsB,EAAKwsB,GAC7B,IAAIjsB,EAAG2X,EAAGnY,EAAgB0sB,EAAUC,EAAUjG,EAE9C,GAAuB,IAAlB+F,EAAK7sB,SAAV,CAKA,GAAKkgB,EAASD,QAAS5f,KAEtBymB,EADW5G,EAASzd,IAAKpC,GACPymB,QAKjB,IAAM1mB,KAFN8f,EAASlF,OAAQ6R,EAAM,iBAET/F,EACb,IAAMlmB,EAAI,EAAG2X,EAAIuO,EAAQ1mB,GAAO4B,OAAQpB,EAAI2X,EAAG3X,IAC9Cc,GAAOskB,MAAMrN,IAAKkU,EAAMzsB,EAAM0mB,EAAQ1mB,GAAQQ,IAO7Cuf,EAASF,QAAS5f,KACtBysB,EAAW3M,EAASzB,OAAQre,GAC5B0sB,EAAWrrB,GAAOsC,OAAQ,GAAI8oB,GAE9B3M,EAASL,IAAK+M,EAAME,KAkBtB,SAASC,GAAUC,EAAY3b,EAAMrO,EAAUiiB,GAG9C5T,EAAOtS,EAAMsS,GAEb,IAAI+T,EAAUjiB,EAAO4hB,EAASkI,EAAYxsB,EAAMC,EAC/CC,EAAI,EACJ2X,EAAI0U,EAAWjrB,OACfmrB,EAAW5U,EAAI,EACf/R,EAAQ8K,EAAM,GACd8b,EAAkBttB,EAAY0G,GAG/B,GAAK4mB,GACG,EAAJ7U,GAA0B,iBAAV/R,IAChB3G,GAAQ6jB,YAAc6I,GAASrmB,KAAMM,GACxC,OAAOymB,EAAWjqB,KAAM,SAAUyV,GACjC,IAAId,EAAOsV,EAAW5pB,GAAIoV,GACrB2U,IACJ9b,EAAM,GAAM9K,EAAMtH,KAAMT,KAAMga,EAAOd,EAAK0V,SAE3CL,GAAUrV,EAAMrG,EAAMrO,EAAUiiB,KAIlC,GAAK3M,IAEJnV,GADAiiB,EAAWN,GAAezT,EAAM2b,EAAY,GAAIhnB,eAAe,EAAOgnB,EAAY/H,IACjE/T,WAEmB,IAA/BkU,EAASla,WAAWnJ,SACxBqjB,EAAWjiB,GAIPA,GAAS8hB,GAAU,CAOvB,IALAgI,GADAlI,EAAUtjB,GAAOwB,IAAKmhB,GAAQgB,EAAU,UAAYqH,KAC/B1qB,OAKbpB,EAAI2X,EAAG3X,IACdF,EAAO2kB,EAEFzkB,IAAMusB,IACVzsB,EAAOgB,GAAO0C,MAAO1D,GAAM,GAAM,GAG5BwsB,GAIJxrB,GAAOoB,MAAOkiB,EAASX,GAAQ3jB,EAAM,YAIvCuC,EAAS/D,KAAM+tB,EAAYrsB,GAAKF,EAAME,GAGvC,GAAKssB,EAOJ,IANAvsB,EAAMqkB,EAASA,EAAQhjB,OAAS,GAAIiE,cAGpCvE,GAAOwB,IAAK8hB,EAAS2H,IAGf/rB,EAAI,EAAGA,EAAIssB,EAAYtsB,IAC5BF,EAAOskB,EAASpkB,GACX4iB,GAAYtd,KAAMxF,EAAKN,MAAQ,MAClC8f,EAASxB,OAAQhe,EAAM,eACxBgB,GAAOwF,SAAUvG,EAAKD,KAEjBA,EAAKL,KAA8C,YAArCK,EAAKN,MAAQ,IAAKgC,cAG/BV,GAAO4rB,WAAa5sB,EAAKH,UAC7BmB,GAAO4rB,SAAU5sB,EAAKL,IAAK,CAC1BC,MAAOI,EAAKJ,OAASI,EAAKO,aAAc,UACtCN,GASJH,EAASE,EAAK6E,YAAYT,QAAS0nB,GAAc,IAAM9rB,EAAMC,IAQnE,OAAOssB,EAGR,SAASjS,GAAQ9Y,EAAMP,EAAU4rB,GAKhC,IAJA,IAAI7sB,EACH4kB,EAAQ3jB,EAAWD,GAAO4M,OAAQ3M,EAAUO,GAASA,EACrDtB,EAAI,EAE4B,OAAvBF,EAAO4kB,EAAO1kB,IAAeA,IAChC2sB,GAA8B,IAAlB7sB,EAAKV,UACtB0B,GAAO8rB,UAAWnJ,GAAQ3jB,IAGtBA,EAAKW,aACJksB,GAAY3L,EAAYlhB,IAC5B4jB,GAAeD,GAAQ3jB,EAAM,WAE9BA,EAAKW,WAAWC,YAAaZ,IAI/B,OAAOwB,EAGRR,GAAOsC,OAAQ,CACduhB,cAAe,SAAU8H,GACxB,OAAOA,GAGRjpB,MAAO,SAAUlC,EAAMurB,EAAeC,GACrC,IAAI9sB,EAAG2X,EAAGoV,EAAaC,EA1INvtB,EAAKwsB,EACnB5qB,EA0IFmC,EAAQlC,EAAKyhB,WAAW,GACxBkK,EAASjM,EAAY1f,GAGtB,KAAMrC,GAAQ+jB,gBAAsC,IAAlB1hB,EAAKlC,UAAoC,KAAlBkC,EAAKlC,UAC3D0B,GAAOmE,SAAU3D,IAOnB,IAHA0rB,EAAevJ,GAAQjgB,GAGjBxD,EAAI,EAAG2X,GAFboV,EAActJ,GAAQniB,IAEOF,OAAQpB,EAAI2X,EAAG3X,IAvJ5BP,EAwJLstB,EAAa/sB,GAxJHisB,EAwJQe,EAAchtB,QAvJzCqB,EAGc,WAHdA,EAAW4qB,EAAK5qB,SAASG,gBAGAkhB,GAAepd,KAAM7F,EAAID,MACrDysB,EAAKja,QAAUvS,EAAIuS,QAGK,UAAb3Q,GAAqC,aAAbA,IACnC4qB,EAAKhJ,aAAexjB,EAAIwjB,cAoJxB,GAAK4J,EACJ,GAAKC,EAIJ,IAHAC,EAAcA,GAAetJ,GAAQniB,GACrC0rB,EAAeA,GAAgBvJ,GAAQjgB,GAEjCxD,EAAI,EAAG2X,EAAIoV,EAAY3rB,OAAQpB,EAAI2X,EAAG3X,IAC3CgsB,GAAgBe,EAAa/sB,GAAKgtB,EAAchtB,SAGjDgsB,GAAgB1qB,EAAMkC,GAWxB,OAL2B,GAD3BwpB,EAAevJ,GAAQjgB,EAAO,WACZpC,QACjBsiB,GAAesJ,GAAeC,GAAUxJ,GAAQniB,EAAM,WAIhDkC,GAGRopB,UAAW,SAAU5qB,GAKpB,IAJA,IAAImd,EAAM7d,EAAM9B,EACfoc,EAAU9a,GAAOskB,MAAMxJ,QACvB5b,EAAI,OAE6B8D,KAAxBxC,EAAOU,EAAOhC,IAAqBA,IAC5C,GAAK4e,EAAYtd,GAAS,CACzB,GAAO6d,EAAO7d,EAAMge,EAASvb,SAAc,CAC1C,GAAKob,EAAK+G,OACT,IAAM1mB,KAAQ2f,EAAK+G,OACbtK,EAASpc,GACbsB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,GAI3BsB,GAAOqmB,YAAa7lB,EAAM9B,EAAM2f,EAAKuH,QAOxCplB,EAAMge,EAASvb,cAAYD,EAEvBxC,EAAMie,EAASxb,WAInBzC,EAAMie,EAASxb,cAAYD,OAOhChD,GAAOG,GAAGmC,OAAQ,CACjB8pB,OAAQ,SAAUnsB,GACjB,OAAOqZ,GAAQvc,KAAMkD,GAAU,IAGhCqZ,OAAQ,SAAUrZ,GACjB,OAAOqZ,GAAQvc,KAAMkD,IAGtBX,KAAM,SAAUwF,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,YAAiB9B,IAAV8B,EACN9E,GAAOV,KAAMvC,MACbA,KAAKsU,QAAQ/P,KAAM,WACK,IAAlBvE,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,WACxDvB,KAAK8G,YAAciB,MAGpB,KAAMA,EAAOrD,UAAUnB,SAG3B+rB,OAAQ,WACP,OAAOf,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACpB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,UAC3CysB,GAAoBhuB,KAAMyD,GAChCd,YAAac,MAKvB8rB,QAAS,WACR,OAAOhB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,GAAuB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,SAAiB,CACzE,IAAIqE,EAASooB,GAAoBhuB,KAAMyD,GACvCmC,EAAO4pB,aAAc/rB,EAAMmC,EAAO8M,gBAKrC+c,OAAQ,WACP,OAAOlB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,SAKvC0vB,MAAO,WACN,OAAOnB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,KAAKuU,gBAK5CD,MAAO,WAIN,IAHA,IAAI7Q,EACHtB,EAAI,EAE2B,OAAtBsB,EAAOzD,KAAMmC,IAAeA,IACd,IAAlBsB,EAAKlC,WAGT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAGhCA,EAAKqD,YAAc,IAIrB,OAAO9G,MAGR2F,MAAO,SAAUqpB,EAAeC,GAI/B,OAHAD,EAAiC,MAAjBA,GAAgCA,EAChDC,EAAyC,MAArBA,EAA4BD,EAAgBC,EAEzDjvB,KAAKyE,IAAK,WAChB,OAAOxB,GAAO0C,MAAO3F,KAAMgvB,EAAeC,MAI5CL,KAAM,SAAU7mB,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAItE,EAAOzD,KAAM,IAAO,GACvBmC,EAAI,EACJ2X,EAAI9Z,KAAKuD,OAEV,QAAe0C,IAAV8B,GAAyC,IAAlBtE,EAAKlC,SAChC,OAAOkC,EAAK0M,UAIb,GAAsB,iBAAVpI,IAAuB8lB,GAAapmB,KAAMM,KACpDud,IAAWR,GAASzX,KAAMtF,IAAW,CAAE,GAAI,KAAQ,GAAIpE,eAAkB,CAE1EoE,EAAQ9E,GAAO6jB,cAAe/e,GAE9B,IACC,KAAQ5F,EAAI2X,EAAG3X,IAIS,KAHvBsB,EAAOzD,KAAMmC,IAAO,IAGVZ,WACT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAChCA,EAAK0M,UAAYpI,GAInBtE,EAAO,EAGN,MAAQkJ,KAGNlJ,GACJzD,KAAKsU,QAAQgb,OAAQvnB,IAEpB,KAAMA,EAAOrD,UAAUnB,SAG3BosB,YAAa,WACZ,IAAIlJ,EAAU,GAGd,OAAO8H,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,IAAI8O,EAASvS,KAAK4C,WAEbK,GAAOkE,QAASnH,KAAMymB,GAAY,IACtCxjB,GAAO8rB,UAAWnJ,GAAQ5lB,OACrBuS,GACJA,EAAOqd,aAAcnsB,EAAMzD,QAK3BymB,MAILxjB,GAAOsB,KAAM,CACZsrB,SAAU,SACVC,UAAW,UACXN,aAAc,SACdO,YAAa,QACbC,WAAY,eACV,SAAUtsB,EAAMusB,GAClBhtB,GAAOG,GAAIM,GAAS,SAAUR,GAO7B,IANA,IAAIiB,EACHC,EAAM,GACN8rB,EAASjtB,GAAQC,GACjB2B,EAAOqrB,EAAO3sB,OAAS,EACvBpB,EAAI,EAEGA,GAAK0C,EAAM1C,IAClBgC,EAAQhC,IAAM0C,EAAO7E,KAAOA,KAAK2F,OAAO,GACxC1C,GAAQitB,EAAQ/tB,IAAO8tB,GAAY9rB,GAInCvD,EAAKD,MAAOyD,EAAKD,EAAMH,OAGxB,OAAOhE,KAAKkE,UAAWE,MAGzB,IAAI+rB,GAAY,IAAI3nB,OAAQ,KAAOua,EAAO,kBAAmB,KAEzDqN,GAAc,MAGdC,GAAY,SAAU5sB,GAKxB,IAAIuoB,EAAOvoB,EAAK+D,cAAc6H,YAM9B,OAJM2c,GAASA,EAAKsE,SACnBtE,EAAOjsB,IAGDisB,EAAKuE,iBAAkB9sB,IAG5B+sB,GAAO,SAAU/sB,EAAM+B,EAAShB,GACnC,IAAIJ,EAAKV,EACR+sB,EAAM,GAGP,IAAM/sB,KAAQ8B,EACbirB,EAAK/sB,GAASD,EAAK8f,MAAO7f,GAC1BD,EAAK8f,MAAO7f,GAAS8B,EAAS9B,GAM/B,IAAMA,KAHNU,EAAMI,EAAS/D,KAAMgD,GAGP+B,EACb/B,EAAK8f,MAAO7f,GAAS+sB,EAAK/sB,GAG3B,OAAOU,GAIJssB,GAAY,IAAIloB,OAAQ0a,EAAUpV,KAAM,KAAO,KAiJnD,SAAS6iB,GAAQltB,EAAMC,EAAMktB,GAC5B,IAAIC,EAAOC,EAAUC,EAAU3sB,EAC9B4sB,EAAeZ,GAAY3oB,KAAM/D,GAMjC6f,EAAQ9f,EAAK8f,MAoEd,OAlEAqN,EAAWA,GAAYP,GAAW5sB,MAgBjCW,EAAMwsB,EAASK,iBAAkBvtB,IAAUktB,EAAUltB,GAEhDstB,GAAgB5sB,IAkBpBA,EAAMA,EAAIiC,QAASkC,GAAU,YAAUtC,GAG3B,KAAR7B,GAAe+e,EAAY1f,KAC/BW,EAAMnB,GAAOsgB,MAAO9f,EAAMC,KAQrBtC,GAAQ8vB,kBAAoBf,GAAU1oB,KAAMrD,IAASssB,GAAUjpB,KAAM/D,KAG1EmtB,EAAQtN,EAAMsN,MACdC,EAAWvN,EAAMuN,SACjBC,EAAWxN,EAAMwN,SAGjBxN,EAAMuN,SAAWvN,EAAMwN,SAAWxN,EAAMsN,MAAQzsB,EAChDA,EAAMwsB,EAASC,MAGftN,EAAMsN,MAAQA,EACdtN,EAAMuN,SAAWA,EACjBvN,EAAMwN,SAAWA,SAIJ9qB,IAAR7B,EAINA,EAAM,GACNA,EAIF,SAAS+sB,GAAcC,EAAaC,GAGnC,MAAO,CACNrtB,IAAK,WACJ,IAAKotB,IASL,OAASpxB,KAAKgE,IAAMqtB,GAAS1wB,MAAOX,KAAM0E,kBALlC1E,KAAKgE,OA3OhB,WAIC,SAASstB,IAGR,GAAM1M,EAAN,CAIA2M,EAAUhO,MAAMiO,QAAU,+EAE1B5M,EAAIrB,MAAMiO,QACT,4HAGDzqB,EAAgBpE,YAAa4uB,GAAY5uB,YAAaiiB,GAEtD,IAAI6M,EAAW1xB,GAAOwwB,iBAAkB3L,GACxC8M,EAAoC,OAAjBD,EAASniB,IAG5BqiB,EAAsE,KAA9CC,EAAoBH,EAASI,YAIrDjN,EAAIrB,MAAMuO,MAAQ,MAClBC,EAA6D,KAAzCH,EAAoBH,EAASK,OAIjDE,EAAgE,KAAzCJ,EAAoBH,EAASZ,OAMpDjM,EAAIrB,MAAM0O,SAAW,WACrBC,EAAiE,KAA9CN,EAAoBhN,EAAIuN,YAAc,GAEzDprB,EAAgBlE,YAAa0uB,GAI7B3M,EAAM,MAGP,SAASgN,EAAoBQ,GAC5B,OAAOjsB,KAAKksB,MAAOC,WAAYF,IAGhC,IAAIV,EAAkBM,EAAsBE,EAAkBH,EAC7DQ,EAAyBZ,EACzBJ,EAAY3xB,EAAS0C,cAAe,OACpCsiB,EAAMhlB,EAAS0C,cAAe,OAGzBsiB,EAAIrB,QAMVqB,EAAIrB,MAAMiP,eAAiB,cAC3B5N,EAAIM,WAAW,GAAO3B,MAAMiP,eAAiB,GAC7CpxB,GAAQqxB,gBAA+C,gBAA7B7N,EAAIrB,MAAMiP,eAEpCvvB,GAAOsC,OAAQnE,GAAS,CACvBsxB,kBAAmB,WAElB,OADApB,IACOU,GAERd,eAAgB,WAEf,OADAI,IACOS,GAERY,cAAe,WAEd,OADArB,IACOI,GAERkB,mBAAoB,WAEnB,OADAtB,IACOK,GAERkB,cAAe,WAEd,OADAvB,IACOY,GAYRY,qBAAsB,WACrB,IAAIC,EAAOtN,EAAIuN,EAASC,EAmCxB,OAlCgC,MAA3BV,IACJQ,EAAQnzB,EAAS0C,cAAe,SAChCmjB,EAAK7lB,EAAS0C,cAAe,MAC7B0wB,EAAUpzB,EAAS0C,cAAe,OAElCywB,EAAMxP,MAAMiO,QAAU,2DACtB/L,EAAGlC,MAAMiO,QAAU,0CAKnB/L,EAAGlC,MAAM2P,OAAS,MAClBF,EAAQzP,MAAM2P,OAAS,MAQvBF,EAAQzP,MAAMC,QAAU,QAExBzc,EACEpE,YAAaowB,GACbpwB,YAAa8iB,GACb9iB,YAAaqwB,GAEfC,EAAUlzB,GAAOwwB,iBAAkB9K,GACnC8M,EAA4BY,SAAUF,EAAQC,OAAQ,IACrDC,SAAUF,EAAQG,eAAgB,IAClCD,SAAUF,EAAQI,kBAAmB,MAAW5N,EAAG6N,aAEpDvsB,EAAgBlE,YAAakwB,IAEvBR,MAvIV,GAsPA,IAAIgB,GAAc,CAAE,SAAU,MAAO,MACpCC,GAAa5zB,EAAS0C,cAAe,OAAQihB,MAC7CkQ,GAAc,GAkBf,SAASC,GAAehwB,GACvB,IAAIiwB,EAAQ1wB,GAAO2wB,SAAUlwB,IAAU+vB,GAAa/vB,GAEpD,OAAKiwB,IAGAjwB,KAAQ8vB,GACL9vB,EAED+vB,GAAa/vB,GAxBrB,SAAyBA,GAGxB,IAAImwB,EAAUnwB,EAAM,GAAIkd,cAAgBld,EAAKpD,MAAO,GACnD6B,EAAIoxB,GAAYhwB,OAEjB,MAAQpB,IAEP,IADAuB,EAAO6vB,GAAapxB,GAAM0xB,KACbL,GACZ,OAAO9vB,EAeoBowB,CAAgBpwB,IAAUA,GAIxD,IAKCqwB,GAAe,4BACfC,GAAU,CAAE/B,SAAU,WAAYgC,WAAY,SAAUzQ,QAAS,SACjE0Q,GAAqB,CACpBC,cAAe,IACfC,WAAY,OAGd,SAASC,GAAmBrvB,EAAO+C,EAAOusB,GAIzC,IAAI1sB,EAAUqb,EAAQ5V,KAAMtF,GAC5B,OAAOH,EAGNzB,KAAKouB,IAAK,EAAG3sB,EAAS,IAAQ0sB,GAAY,KAAU1sB,EAAS,IAAO,MACpEG,EAGF,SAASysB,GAAoB/wB,EAAMgxB,EAAWC,EAAKC,EAAaC,EAAQC,GACvE,IAAI1yB,EAAkB,UAAdsyB,EAAwB,EAAI,EACnCK,EAAQ,EACRC,EAAQ,EACRC,EAAc,EAGf,GAAKN,KAAUC,EAAc,SAAW,WACvC,OAAO,EAGR,KAAQxyB,EAAI,EAAGA,GAAK,EAKN,WAARuyB,IACJM,GAAe/xB,GAAOwgB,IAAKhgB,EAAMixB,EAAMxR,EAAW/gB,IAAK,EAAMyyB,IAIxDD,GAmBQ,YAARD,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,IAIjD,WAARF,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,MAtBvEG,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,GAGhD,YAARF,EACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,GAItEE,GAAS7xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,IAoCzE,OAhBMD,GAA8B,GAAfE,IAIpBE,GAAS5uB,KAAKouB,IAAK,EAAGpuB,KAAK8uB,KAC1BxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEu0B,EACAE,EACAD,EACA,MAIM,GAGDC,EAAQC,EAGhB,SAASE,GAAkBzxB,EAAMgxB,EAAWK,GAG3C,IAAIF,EAASvE,GAAW5sB,GAKvBkxB,IADmBvzB,GAAQsxB,qBAAuBoC,IAEE,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCO,EAAmBR,EAEnBvyB,EAAMuuB,GAAQltB,EAAMgxB,EAAWG,GAC/BQ,EAAa,SAAWX,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,GAIzE,GAAK6vB,GAAU1oB,KAAMrF,GAAQ,CAC5B,IAAM0yB,EACL,OAAO1yB,EAERA,EAAM,OAyCP,QAlCQhB,GAAQsxB,qBAAuBiC,IAMrCvzB,GAAQ0xB,wBAA0BtvB,GAAUC,EAAM,OAI3C,SAARrB,IAICkwB,WAAYlwB,IAA0D,WAAjDa,GAAOwgB,IAAKhgB,EAAM,WAAW,EAAOmxB,KAG1DnxB,EAAK4xB,iBAAiB9xB,SAEtBoxB,EAAiE,eAAnD1xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,IAKpDO,EAAmBC,KAAc3xB,KAEhCrB,EAAMqB,EAAM2xB,MAKdhzB,EAAMkwB,WAAYlwB,IAAS,GAI1BoyB,GACC/wB,EACAgxB,EACAK,IAAWH,EAAc,SAAW,WACpCQ,EACAP,EAGAxyB,GAEE,KAwTL,SAASkzB,GAAO7xB,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GACzC,OAAO,IAAID,GAAM1xB,UAAUP,KAAMI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GAtT5DtyB,GAAOsC,OAAQ,CAIdiwB,SAAU,CACTC,QAAS,CACRzxB,IAAK,SAAUP,EAAMmtB,GACpB,GAAKA,EAAW,CAGf,IAAIxsB,EAAMusB,GAAQltB,EAAM,WACxB,MAAe,KAARW,EAAa,IAAMA,MAO9B+f,UAAW,CACVuR,yBAAyB,EACzBC,aAAa,EACbC,kBAAkB,EAClBC,aAAa,EACbC,UAAU,EACVC,YAAY,EACZ3B,YAAY,EACZ4B,UAAU,EACVC,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBC,SAAS,EACTC,YAAY,EACZC,cAAc,EACdC,YAAY,EACZd,SAAS,EACTe,OAAO,EACPC,SAAS,EACT3S,OAAO,EACP4S,QAAQ,EACRC,QAAQ,EACRC,MAAM,EAGNC,aAAa,EACbC,cAAc,EACdC,aAAa,EACbC,kBAAkB,EAClBC,eAAe,GAKhBrD,SAAU,GAGVrQ,MAAO,SAAU9f,EAAMC,EAAMqE,EAAO+sB,GAGnC,GAAMrxB,GAA0B,IAAlBA,EAAKlC,UAAoC,IAAlBkC,EAAKlC,UAAmBkC,EAAK8f,MAAlE,CAKA,IAAInf,EAAKzC,EAAM2gB,EACd4U,EAAWrW,EAAWnd,GACtBstB,EAAeZ,GAAY3oB,KAAM/D,GACjC6f,EAAQ9f,EAAK8f,MAad,GARMyN,IACLttB,EAAOgwB,GAAewD,IAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,QAGrCjxB,IAAV8B,EA0CJ,OAAKua,GAAS,QAASA,QACwBrc,KAA5C7B,EAAMke,EAAMte,IAAKP,GAAM,EAAOqxB,IAEzB1wB,EAIDmf,EAAO7f,GA7CA,YAHd/B,SAAcoG,KAGc3D,EAAM6e,EAAQ5V,KAAMtF,KAAa3D,EAAK,KACjE2D,EAAQ2b,GAAWjgB,EAAMC,EAAMU,GAG/BzC,EAAO,UAIM,MAAToG,GAAiBA,GAAUA,IAOlB,WAATpG,GAAsBqvB,IAC1BjpB,GAAS3D,GAAOA,EAAK,KAASnB,GAAOkhB,UAAW+S,GAAa,GAAK,OAI7D91B,GAAQqxB,iBAA6B,KAAV1qB,GAAiD,IAAjCrE,EAAK7C,QAAS,gBAC9D0iB,EAAO7f,GAAS,WAIX4e,GAAY,QAASA,QACsBrc,KAA9C8B,EAAQua,EAAMjB,IAAK5d,EAAMsE,EAAO+sB,MAE7B9D,EACJzN,EAAM4T,YAAazzB,EAAMqE,GAEzBwb,EAAO7f,GAASqE,MAkBpB0b,IAAK,SAAUhgB,EAAMC,EAAMoxB,EAAOF,GACjC,IAAIxyB,EAAK6B,EAAKqe,EACb4U,EAAWrW,EAAWnd,GA6BvB,OA5BgB0sB,GAAY3oB,KAAM/D,KAMjCA,EAAOgwB,GAAewD,KAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,KAGtC,QAAS5U,IACtBlgB,EAAMkgB,EAAMte,IAAKP,GAAM,EAAMqxB,SAIjB7uB,IAAR7D,IACJA,EAAMuuB,GAAQltB,EAAMC,EAAMkxB,IAId,WAARxyB,GAAoBsB,KAAQwwB,KAChC9xB,EAAM8xB,GAAoBxwB,IAIZ,KAAVoxB,GAAgBA,GACpB7wB,EAAMquB,WAAYlwB,IACD,IAAV0yB,GAAkBsC,SAAUnzB,GAAQA,GAAO,EAAI7B,GAGhDA,KAITa,GAAOsB,KAAM,CAAE,SAAU,SAAW,SAAU6D,EAAIqsB,GACjDxxB,GAAOuyB,SAAUf,GAAc,CAC9BzwB,IAAK,SAAUP,EAAMmtB,EAAUkE,GAC9B,GAAKlE,EAIJ,OAAOmD,GAAatsB,KAAMxE,GAAOwgB,IAAKhgB,EAAM,aAQxCA,EAAK4xB,iBAAiB9xB,QAAWE,EAAK4zB,wBAAwBxG,MAIjEqE,GAAkBzxB,EAAMgxB,EAAWK,GAHnCtE,GAAM/sB,EAAMuwB,GAAS,WACpB,OAAOkB,GAAkBzxB,EAAMgxB,EAAWK,MAM9CzT,IAAK,SAAU5d,EAAMsE,EAAO+sB,GAC3B,IAAIltB,EACHgtB,EAASvE,GAAW5sB,GAIpB6zB,GAAsBl2B,GAAQyxB,iBACT,aAApB+B,EAAO3C,SAIR0C,GADkB2C,GAAsBxC,IAEY,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCN,EAAWQ,EACVN,GACC/wB,EACAgxB,EACAK,EACAH,EACAC,GAED,EAqBF,OAjBKD,GAAe2C,IACnBhD,GAAYnuB,KAAK8uB,KAChBxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEgyB,WAAYsC,EAAQH,IACpBD,GAAoB/wB,EAAMgxB,EAAW,UAAU,EAAOG,GACtD,KAKGN,IAAc1sB,EAAUqb,EAAQ5V,KAAMtF,KACb,QAA3BH,EAAS,IAAO,QAElBnE,EAAK8f,MAAOkR,GAAc1sB,EAC1BA,EAAQ9E,GAAOwgB,IAAKhgB,EAAMgxB,IAGpBJ,GAAmB5wB,EAAMsE,EAAOusB,OAK1CrxB,GAAOuyB,SAAS3D,WAAaV,GAAc/vB,GAAQwxB,mBAClD,SAAUnvB,EAAMmtB,GACf,GAAKA,EACJ,OAAS0B,WAAY3B,GAAQltB,EAAM,gBAClCA,EAAK4zB,wBAAwBE,KAC5B/G,GAAM/sB,EAAM,CAAEouB,WAAY,GAAK,WAC9B,OAAOpuB,EAAK4zB,wBAAwBE,QAEnC,OAMPt0B,GAAOsB,KAAM,CACZizB,OAAQ,GACRC,QAAS,GACTC,OAAQ,SACN,SAAUC,EAAQC,GACpB30B,GAAOuyB,SAAUmC,EAASC,GAAW,CACpCC,OAAQ,SAAU9vB,GAOjB,IANA,IAAI5F,EAAI,EACP21B,EAAW,GAGXC,EAAyB,iBAAVhwB,EAAqBA,EAAMI,MAAO,KAAQ,CAAEJ,GAEpD5F,EAAI,EAAGA,IACd21B,EAAUH,EAASzU,EAAW/gB,GAAMy1B,GACnCG,EAAO51B,IAAO41B,EAAO51B,EAAI,IAAO41B,EAAO,GAGzC,OAAOD,IAIO,WAAXH,IACJ10B,GAAOuyB,SAAUmC,EAASC,GAASvW,IAAMgT,MAI3CpxB,GAAOG,GAAGmC,OAAQ,CACjBke,IAAK,SAAU/f,EAAMqE,GACpB,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAMC,EAAMqE,GAC1C,IAAI6sB,EAAQ1vB,EACXT,EAAM,GACNtC,EAAI,EAEL,GAAK4D,MAAMC,QAAStC,GAAS,CAI5B,IAHAkxB,EAASvE,GAAW5sB,GACpByB,EAAMxB,EAAKH,OAEHpB,EAAI+C,EAAK/C,IAChBsC,EAAKf,EAAMvB,IAAQc,GAAOwgB,IAAKhgB,EAAMC,EAAMvB,IAAK,EAAOyyB,GAGxD,OAAOnwB,EAGR,YAAiBwB,IAAV8B,EACN9E,GAAOsgB,MAAO9f,EAAMC,EAAMqE,GAC1B9E,GAAOwgB,IAAKhgB,EAAMC,IACjBA,EAAMqE,EAA0B,EAAnBrD,UAAUnB,aAQ5BN,GAAOqyB,MAAQA,IAET1xB,UAAY,CACjBE,YAAawxB,GACbjyB,KAAM,SAAUI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,EAAQrR,GACjDlkB,KAAKyD,KAAOA,EACZzD,KAAKuhB,KAAOA,EACZvhB,KAAKu1B,OAASA,GAAUtyB,GAAOsyB,OAAO5P,SACtC3lB,KAAKwF,QAAUA,EACfxF,KAAKsS,MAAQtS,KAAKmrB,IAAMnrB,KAAK4Z,MAC7B5Z,KAAKoF,IAAMA,EACXpF,KAAKkkB,KAAOA,IAAUjhB,GAAOkhB,UAAW5C,GAAS,GAAK,OAEvD3H,IAAK,WACJ,IAAI0I,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAElC,OAAOe,GAASA,EAAMte,IACrBse,EAAMte,IAAKhE,MACXs1B,GAAM0C,UAAUrS,SAAS3hB,IAAKhE,OAEhCi4B,IAAK,SAAUC,GACd,IAAIC,EACH7V,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAoB/B,OAlBKvhB,KAAKwF,QAAQ4yB,SACjBp4B,KAAKq4B,IAAMF,EAAQl1B,GAAOsyB,OAAQv1B,KAAKu1B,QACtC2C,EAASl4B,KAAKwF,QAAQ4yB,SAAWF,EAAS,EAAG,EAAGl4B,KAAKwF,QAAQ4yB,UAG9Dp4B,KAAKq4B,IAAMF,EAAQD,EAEpBl4B,KAAKmrB,KAAQnrB,KAAKoF,IAAMpF,KAAKsS,OAAU6lB,EAAQn4B,KAAKsS,MAE/CtS,KAAKwF,QAAQ8yB,MACjBt4B,KAAKwF,QAAQ8yB,KAAK73B,KAAMT,KAAKyD,KAAMzD,KAAKmrB,IAAKnrB,MAGzCsiB,GAASA,EAAMjB,IACnBiB,EAAMjB,IAAKrhB,MAEXs1B,GAAM0C,UAAUrS,SAAStE,IAAKrhB,MAExBA,QAIOqD,KAAKO,UAAY0xB,GAAM1xB,WAEvC0xB,GAAM0C,UAAY,CACjBrS,SAAU,CACT3hB,IAAK,SAAU4f,GACd,IAAIhS,EAIJ,OAA6B,IAAxBgS,EAAMngB,KAAKlC,UACa,MAA5BqiB,EAAMngB,KAAMmgB,EAAMrC,OAAoD,MAAlCqC,EAAMngB,KAAK8f,MAAOK,EAAMrC,MACrDqC,EAAMngB,KAAMmgB,EAAMrC,OAO1B3P,EAAS3O,GAAOwgB,IAAKG,EAAMngB,KAAMmgB,EAAMrC,KAAM,MAGhB,SAAX3P,EAAwBA,EAAJ,GAEvCyP,IAAK,SAAUuC,GAKT3gB,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAC1Bte,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAAQqC,GACK,IAAxBA,EAAMngB,KAAKlC,WACtB0B,GAAOuyB,SAAU5R,EAAMrC,OAC6B,MAAnDqC,EAAMngB,KAAK8f,MAAOmQ,GAAe9P,EAAMrC,OAGxCqC,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,IAFjCloB,GAAOsgB,MAAOK,EAAMngB,KAAMmgB,EAAMrC,KAAMqC,EAAMuH,IAAMvH,EAAMM,UAU5CsU,UAAYlD,GAAM0C,UAAUS,WAAa,CACxDpX,IAAK,SAAUuC,GACTA,EAAMngB,KAAKlC,UAAYqiB,EAAMngB,KAAKb,aACtCghB,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,OAKpCloB,GAAOsyB,OAAS,CACfmD,OAAQ,SAAUC,GACjB,OAAOA,GAERC,MAAO,SAAUD,GAChB,MAAO,GAAMxyB,KAAK0yB,IAAKF,EAAIxyB,KAAK2yB,IAAO,GAExCnT,SAAU,SAGX1iB,GAAOs1B,GAAKjD,GAAM1xB,UAAUP,KAG5BJ,GAAOs1B,GAAGD,KAAO,GAKjB,IACCS,GAAOC,GAkrBH9oB,GAEH+oB,GAnrBDC,GAAW,yBACXC,GAAO,cAER,SAASC,KACHJ,MACqB,IAApBp5B,EAASy5B,QAAoBt5B,GAAOu5B,sBACxCv5B,GAAOu5B,sBAAuBF,IAE9Br5B,GAAO2e,WAAY0a,GAAUn2B,GAAOs1B,GAAGgB,UAGxCt2B,GAAOs1B,GAAGiB,QAKZ,SAASC,KAIR,OAHA15B,GAAO2e,WAAY,WAClBqa,QAAQ9yB,IAEA8yB,GAAQ7N,KAAKC,MAIvB,SAASuO,GAAO/3B,EAAMg4B,GACrB,IAAI3M,EACH7qB,EAAI,EACJggB,EAAQ,CAAE+Q,OAAQvxB,GAKnB,IADAg4B,EAAeA,EAAe,EAAI,EAC1Bx3B,EAAI,EAAGA,GAAK,EAAIw3B,EAEvBxX,EAAO,UADP6K,EAAQ9J,EAAW/gB,KACSggB,EAAO,UAAY6K,GAAUrrB,EAO1D,OAJKg4B,IACJxX,EAAMsT,QAAUtT,EAAM0O,MAAQlvB,GAGxBwgB,EAGR,SAASyX,GAAa7xB,EAAOwZ,EAAMsY,GAKlC,IAJA,IAAIjW,EACH4K,GAAesL,GAAUC,SAAUxY,IAAU,IAAK7gB,OAAQo5B,GAAUC,SAAU,MAC9E/f,EAAQ,EACRzW,EAASirB,EAAWjrB,OACbyW,EAAQzW,EAAQyW,IACvB,GAAO4J,EAAQ4K,EAAYxU,GAAQvZ,KAAMo5B,EAAWtY,EAAMxZ,GAGzD,OAAO6b,EAsNV,SAASkW,GAAWr2B,EAAMu2B,EAAYx0B,GACrC,IAAIoM,EACHqoB,EACAjgB,EAAQ,EACRzW,EAASu2B,GAAUI,WAAW32B,OAC9ByZ,EAAW/Z,GAAO0Z,WAAWI,OAAQ,kBAG7Byc,EAAK/1B,OAEb+1B,EAAO,WACN,GAAKS,EACJ,OAAO,EAYR,IAVA,IAAIE,EAAcpB,IAASU,KAC1B3a,EAAY3Y,KAAKouB,IAAK,EAAGsF,EAAUO,UAAYP,EAAUzB,SAAW+B,GAKpEjC,EAAU,GADHpZ,EAAY+a,EAAUzB,UAAY,GAEzCpe,EAAQ,EACRzW,EAASs2B,EAAUQ,OAAO92B,OAEnByW,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAKC,GAMhC,OAHAlb,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW3B,EAASpZ,IAG5CoZ,EAAU,GAAK30B,EACZub,GAIFvb,GACLyZ,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAI5C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,KACvB,IAERA,EAAY7c,EAAS1B,QAAS,CAC7B7X,KAAMA,EACNonB,MAAO5nB,GAAOsC,OAAQ,GAAIy0B,GAC1BM,KAAMr3B,GAAOsC,QAAQ,EAAM,CAC1Bg1B,cAAe,GACfhF,OAAQtyB,GAAOsyB,OAAO5P,UACpBngB,GACHg1B,mBAAoBR,EACpBS,gBAAiBj1B,EACjB40B,UAAWrB,IAASU,KACpBrB,SAAU5yB,EAAQ4yB,SAClBiC,OAAQ,GACRT,YAAa,SAAUrY,EAAMnc,GAC5B,IAAIwe,EAAQ3gB,GAAOqyB,MAAO7xB,EAAMo2B,EAAUS,KAAM/Y,EAAMnc,EACrDy0B,EAAUS,KAAKC,cAAehZ,IAAUsY,EAAUS,KAAK/E,QAExD,OADAsE,EAAUQ,OAAOz5B,KAAMgjB,GAChBA,GAERnB,KAAM,SAAUiY,GACf,IAAI1gB,EAAQ,EAIXzW,EAASm3B,EAAUb,EAAUQ,OAAO92B,OAAS,EAC9C,GAAK02B,EACJ,OAAOj6B,KAGR,IADAi6B,GAAU,EACFjgB,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAK,GAUhC,OANKyC,GACJ1d,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAC3C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,EAAWa,KAEzC1d,EAASuB,WAAY9a,EAAM,CAAEo2B,EAAWa,IAElC16B,QAGT6qB,EAAQgP,EAAUhP,MAInB,KA/HD,SAAqBA,EAAO0P,GAC3B,IAAIvgB,EAAOtW,EAAM6xB,EAAQxtB,EAAOua,EAGhC,IAAMtI,KAAS6Q,EAed,GAbA0K,EAASgF,EADT72B,EAAOmd,EAAW7G,IAElBjS,EAAQ8iB,EAAO7Q,GACVjU,MAAMC,QAAS+B,KACnBwtB,EAASxtB,EAAO,GAChBA,EAAQ8iB,EAAO7Q,GAAUjS,EAAO,IAG5BiS,IAAUtW,IACdmnB,EAAOnnB,GAASqE,SACT8iB,EAAO7Q,KAGfsI,EAAQrf,GAAOuyB,SAAU9xB,KACX,WAAY4e,EAMzB,IAAMtI,KALNjS,EAAQua,EAAMuV,OAAQ9vB,UACf8iB,EAAOnnB,GAICqE,EACNiS,KAAS6Q,IAChBA,EAAO7Q,GAAUjS,EAAOiS,GACxBugB,EAAevgB,GAAUub,QAI3BgF,EAAe72B,GAAS6xB,EA6F1BoF,CAAY9P,EAAOgP,EAAUS,KAAKC,eAE1BvgB,EAAQzW,EAAQyW,IAEvB,GADApI,EAASkoB,GAAUI,WAAYlgB,GAAQvZ,KAAMo5B,EAAWp2B,EAAMonB,EAAOgP,EAAUS,MAM9E,OAJKj5B,EAAYuQ,EAAO6Q,QACvBxf,GAAOsf,YAAasX,EAAUp2B,KAAMo2B,EAAUS,KAAKpe,OAAQuG,KAC1D7Q,EAAO6Q,KAAKmY,KAAMhpB,IAEbA,EAyBT,OArBA3O,GAAOwB,IAAKomB,EAAO+O,GAAaC,GAE3Bx4B,EAAYw4B,EAAUS,KAAKhoB,QAC/BunB,EAAUS,KAAKhoB,MAAM7R,KAAMgD,EAAMo2B,GAIlCA,EACEtc,SAAUsc,EAAUS,KAAK/c,UACzBzT,KAAM+vB,EAAUS,KAAKxwB,KAAM+vB,EAAUS,KAAKO,UAC1Ctf,KAAMse,EAAUS,KAAK/e,MACrBwB,OAAQ8c,EAAUS,KAAKvd,QAEzB9Z,GAAOs1B,GAAGuC,MACT73B,GAAOsC,OAAQi0B,EAAM,CACpB/1B,KAAMA,EACNs3B,KAAMlB,EACN3d,MAAO2d,EAAUS,KAAKpe,SAIjB2d,EAGR52B,GAAO62B,UAAY72B,GAAOsC,OAAQu0B,GAAW,CAE5CC,SAAU,CACTiB,IAAK,CAAE,SAAUzZ,EAAMxZ,GACtB,IAAI6b,EAAQ5jB,KAAK45B,YAAarY,EAAMxZ,GAEpC,OADA2b,GAAWE,EAAMngB,KAAM8d,EAAM0B,EAAQ5V,KAAMtF,GAAS6b,GAC7CA,KAITqX,QAAS,SAAUpQ,EAAOrmB,GACpBnD,EAAYwpB,IAChBrmB,EAAWqmB,EACXA,EAAQ,CAAE,MAEVA,EAAQA,EAAM5d,MAAO2N,GAOtB,IAJA,IAAI2G,EACHvH,EAAQ,EACRzW,EAASsnB,EAAMtnB,OAERyW,EAAQzW,EAAQyW,IACvBuH,EAAOsJ,EAAO7Q,GACd8f,GAAUC,SAAUxY,GAASuY,GAAUC,SAAUxY,IAAU,GAC3DuY,GAAUC,SAAUxY,GAAOiB,QAAShe,IAItC01B,WAAY,CA3Wb,SAA2Bz2B,EAAMonB,EAAOyP,GACvC,IAAI/Y,EAAMxZ,EAAO4c,EAAQrC,EAAO4Y,EAASC,EAAWC,EAAgB5X,EACnE6X,EAAQ,UAAWxQ,GAAS,WAAYA,EACxCkQ,EAAO/6B,KACP2tB,EAAO,GACPpK,EAAQ9f,EAAK8f,MACb8V,EAAS51B,EAAKlC,UAAY+hB,GAAoB7f,GAC9C63B,EAAW7Z,EAASzd,IAAKP,EAAM,UA6BhC,IAAM8d,KA1BA+Y,EAAKpe,QAEa,OADvBoG,EAAQrf,GAAOsf,YAAa9e,EAAM,OACvB83B,WACVjZ,EAAMiZ,SAAW,EACjBL,EAAU5Y,EAAMhO,MAAM8H,KACtBkG,EAAMhO,MAAM8H,KAAO,WACZkG,EAAMiZ,UACXL,MAIH5Y,EAAMiZ,WAENR,EAAKhe,OAAQ,WAGZge,EAAKhe,OAAQ,WACZuF,EAAMiZ,WACAt4B,GAAOiZ,MAAOzY,EAAM,MAAOF,QAChC+e,EAAMhO,MAAM8H,YAOFyO,EAEb,GADA9iB,EAAQ8iB,EAAOtJ,GACV2X,GAASzxB,KAAMM,GAAU,CAG7B,UAFO8iB,EAAOtJ,GACdoD,EAASA,GAAoB,WAAV5c,EACdA,KAAYsxB,EAAS,OAAS,QAAW,CAI7C,GAAe,SAAVtxB,IAAoBuzB,QAAiCr1B,IAArBq1B,EAAU/Z,GAK9C,SAJA8X,GAAS,EAOX1L,EAAMpM,GAAS+Z,GAAYA,EAAU/Z,IAAUte,GAAOsgB,MAAO9f,EAAM8d,GAMrE,IADA4Z,GAAal4B,GAAO2D,cAAeikB,MAChB5nB,GAAO2D,cAAe+mB,GA8DzC,IAAMpM,KAzDD8Z,GAA2B,IAAlB53B,EAAKlC,WAMlB+4B,EAAKkB,SAAW,CAAEjY,EAAMiY,SAAUjY,EAAMkY,UAAWlY,EAAMmY,WAIlC,OADvBN,EAAiBE,GAAYA,EAAS9X,WAErC4X,EAAiB3Z,EAASzd,IAAKP,EAAM,YAGrB,UADjB+f,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,cAEtB23B,EACJ5X,EAAU4X,GAIV9W,GAAU,CAAE7gB,IAAQ,GACpB23B,EAAiB33B,EAAK8f,MAAMC,SAAW4X,EACvC5X,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,WAC5B6gB,GAAU,CAAE7gB,OAKG,WAAZ+f,GAAoC,iBAAZA,GAAgD,MAAlB4X,IACrB,SAAhCn4B,GAAOwgB,IAAKhgB,EAAM,WAGhB03B,IACLJ,EAAKjxB,KAAM,WACVyZ,EAAMC,QAAU4X,IAEM,MAAlBA,IACJ5X,EAAUD,EAAMC,QAChB4X,EAA6B,SAAZ5X,EAAqB,GAAKA,IAG7CD,EAAMC,QAAU,iBAKd8W,EAAKkB,WACTjY,EAAMiY,SAAW,SACjBT,EAAKhe,OAAQ,WACZwG,EAAMiY,SAAWlB,EAAKkB,SAAU,GAChCjY,EAAMkY,UAAYnB,EAAKkB,SAAU,GACjCjY,EAAMmY,UAAYpB,EAAKkB,SAAU,MAKnCL,GAAY,EACExN,EAGPwN,IACAG,EACC,WAAYA,IAChBjC,EAASiC,EAASjC,QAGnBiC,EAAW7Z,EAASxB,OAAQxc,EAAM,SAAU,CAAE+f,QAAS4X,IAInDzW,IACJ2W,EAASjC,QAAUA,GAIfA,GACJ/U,GAAU,CAAE7gB,IAAQ,GAKrBs3B,EAAKjxB,KAAM,WASV,IAAMyX,KAJA8X,GACL/U,GAAU,CAAE7gB,IAEbge,EAASlF,OAAQ9Y,EAAM,UACTkqB,EACb1qB,GAAOsgB,MAAO9f,EAAM8d,EAAMoM,EAAMpM,OAMnC4Z,EAAYvB,GAAaP,EAASiC,EAAU/Z,GAAS,EAAGA,EAAMwZ,GACtDxZ,KAAQ+Z,IACfA,EAAU/Z,GAAS4Z,EAAU7oB,MACxB+mB,IACJ8B,EAAU/1B,IAAM+1B,EAAU7oB,MAC1B6oB,EAAU7oB,MAAQ,MAuMrBqpB,UAAW,SAAUn3B,EAAU+qB,GACzBA,EACJuK,GAAUI,WAAW1X,QAAShe,GAE9Bs1B,GAAUI,WAAWt5B,KAAM4D,MAK9BvB,GAAO24B,MAAQ,SAAUA,EAAOrG,EAAQnyB,GACvC,IAAI61B,EAAM2C,GAA0B,iBAAVA,EAAqB34B,GAAOsC,OAAQ,GAAIq2B,GAAU,CAC3Ef,SAAUz3B,IAAOA,GAAMmyB,GACtBl0B,EAAYu6B,IAAWA,EACxBxD,SAAUwD,EACVrG,OAAQnyB,GAAMmyB,GAAUA,IAAWl0B,EAAYk0B,IAAYA,GAoC5D,OAhCKtyB,GAAOs1B,GAAG/Q,IACdyR,EAAIb,SAAW,EAGc,iBAAjBa,EAAIb,WACVa,EAAIb,YAAYn1B,GAAOs1B,GAAGsD,OAC9B5C,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAQ5C,EAAIb,UAGrCa,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAOlW,UAMjB,MAAbsT,EAAI/c,QAA+B,IAAd+c,EAAI/c,QAC7B+c,EAAI/c,MAAQ,MAIb+c,EAAIxI,IAAMwI,EAAI4B,SAEd5B,EAAI4B,SAAW,WACTx5B,EAAY43B,EAAIxI,MACpBwI,EAAIxI,IAAIhwB,KAAMT,MAGVi5B,EAAI/c,OACRjZ,GAAOmf,QAASpiB,KAAMi5B,EAAI/c,QAIrB+c,GAGRh2B,GAAOG,GAAGmC,OAAQ,CACjBu2B,OAAQ,SAAUF,EAAOG,EAAIxG,EAAQ/wB,GAGpC,OAAOxE,KAAK6P,OAAQyT,IAAqBG,IAAK,UAAW,GAAIc,OAG3Dnf,MAAM42B,QAAS,CAAEvG,QAASsG,GAAMH,EAAOrG,EAAQ/wB,IAElDw3B,QAAS,SAAUza,EAAMqa,EAAOrG,EAAQ/wB,GACvC,IAAI8P,EAAQrR,GAAO2D,cAAe2a,GACjC0a,EAASh5B,GAAO24B,MAAOA,EAAOrG,EAAQ/wB,GACtC03B,EAAc,WAGb,IAAInB,EAAOjB,GAAW95B,KAAMiD,GAAOsC,OAAQ,GAAIgc,GAAQ0a,IAGlD3nB,GAASmN,EAASzd,IAAKhE,KAAM,YACjC+6B,EAAKtY,MAAM,IAMd,OAFAyZ,EAAYC,OAASD,EAEd5nB,IAA0B,IAAjB2nB,EAAO/f,MACtBlc,KAAKuE,KAAM23B,GACXl8B,KAAKkc,MAAO+f,EAAO/f,MAAOggB,IAE5BzZ,KAAM,SAAU9gB,EAAMghB,EAAY+X,GACjC,IAAI0B,EAAY,SAAU9Z,GACzB,IAAIG,EAAOH,EAAMG,YACVH,EAAMG,KACbA,EAAMiY,IAYP,MATqB,iBAAT/4B,IACX+4B,EAAU/X,EACVA,EAAahhB,EACbA,OAAOsE,GAEH0c,GACJ3iB,KAAKkc,MAAOva,GAAQ,KAAM,IAGpB3B,KAAKuE,KAAM,WACjB,IAAI6d,GAAU,EACbpI,EAAgB,MAARrY,GAAgBA,EAAO,aAC/B06B,EAASp5B,GAAOo5B,OAChB/a,EAAOG,EAASzd,IAAKhE,MAEtB,GAAKga,EACCsH,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MACnC2Z,EAAW9a,EAAMtH,SAGlB,IAAMA,KAASsH,EACTA,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MAAQ0W,GAAK1xB,KAAMuS,IACtDoiB,EAAW9a,EAAMtH,IAKpB,IAAMA,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MACnB,MAAR2B,GAAgB06B,EAAQriB,GAAQkC,QAAUva,IAE5C06B,EAAQriB,GAAQ+gB,KAAKtY,KAAMiY,GAC3BtY,GAAU,EACVia,EAAO/2B,OAAQ0U,EAAO,KAOnBoI,GAAYsY,GAChBz3B,GAAOmf,QAASpiB,KAAM2B,MAIzBw6B,OAAQ,SAAUx6B,GAIjB,OAHc,IAATA,IACJA,EAAOA,GAAQ,MAET3B,KAAKuE,KAAM,WACjB,IAAIyV,EACHsH,EAAOG,EAASzd,IAAKhE,MACrBkc,EAAQoF,EAAM3f,EAAO,SACrB2gB,EAAQhB,EAAM3f,EAAO,cACrB06B,EAASp5B,GAAOo5B,OAChB94B,EAAS2Y,EAAQA,EAAM3Y,OAAS,EAajC,IAVA+d,EAAK6a,QAAS,EAGdl5B,GAAOiZ,MAAOlc,KAAM2B,EAAM,IAErB2gB,GAASA,EAAMG,MACnBH,EAAMG,KAAKhiB,KAAMT,MAAM,GAIlBga,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MAAQq8B,EAAQriB,GAAQkC,QAAUva,IAC/D06B,EAAQriB,GAAQ+gB,KAAKtY,MAAM,GAC3B4Z,EAAO/2B,OAAQ0U,EAAO,IAKxB,IAAMA,EAAQ,EAAGA,EAAQzW,EAAQyW,IAC3BkC,EAAOlC,IAAWkC,EAAOlC,GAAQmiB,QACrCjgB,EAAOlC,GAAQmiB,OAAO17B,KAAMT,aAKvBshB,EAAK6a,YAKfl5B,GAAOsB,KAAM,CAAE,SAAU,OAAQ,QAAU,SAAU6D,EAAI1E,GACxD,IAAI44B,EAAQr5B,GAAOG,GAAIM,GACvBT,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAgB,MAATo3B,GAAkC,kBAAVA,EAC9BU,EAAM37B,MAAOX,KAAM0E,WACnB1E,KAAKg8B,QAAStC,GAAOh2B,GAAM,GAAQk4B,EAAOrG,EAAQ/wB,MAKrDvB,GAAOsB,KAAM,CACZg4B,UAAW7C,GAAO,QAClB8C,QAAS9C,GAAO,QAChB+C,YAAa/C,GAAO,UACpBgD,OAAQ,CAAEjH,QAAS,QACnBkH,QAAS,CAAElH,QAAS,QACpBmH,WAAY,CAAEnH,QAAS,WACrB,SAAU/xB,EAAMmnB,GAClB5nB,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAOxE,KAAKg8B,QAASnR,EAAO+Q,EAAOrG,EAAQ/wB,MAI7CvB,GAAOo5B,OAAS,GAChBp5B,GAAOs1B,GAAGiB,KAAO,WAChB,IAAIsB,EACH34B,EAAI,EACJk6B,EAASp5B,GAAOo5B,OAIjB,IAFAtD,GAAQ7N,KAAKC,MAELhpB,EAAIk6B,EAAO94B,OAAQpB,KAC1B24B,EAAQuB,EAAQl6B,OAGCk6B,EAAQl6B,KAAQ24B,GAChCuB,EAAO/2B,OAAQnD,IAAK,GAIhBk6B,EAAO94B,QACZN,GAAOs1B,GAAG9V,OAEXsW,QAAQ9yB,GAGThD,GAAOs1B,GAAGuC,MAAQ,SAAUA,GAC3B73B,GAAOo5B,OAAOz7B,KAAMk6B,GACpB73B,GAAOs1B,GAAGjmB,SAGXrP,GAAOs1B,GAAGgB,SAAW,GACrBt2B,GAAOs1B,GAAGjmB,MAAQ,WACZ0mB,KAILA,IAAa,EACbI,OAGDn2B,GAAOs1B,GAAG9V,KAAO,WAChBuW,GAAa,MAGd/1B,GAAOs1B,GAAGsD,OAAS,CAClBgB,KAAM,IACNC,KAAM,IAGNnX,SAAU,KAKX1iB,GAAOG,GAAG25B,MAAQ,SAAUC,EAAMr7B,GAIjC,OAHAq7B,EAAO/5B,GAAOs1B,IAAKt1B,GAAOs1B,GAAGsD,OAAQmB,IAAiBA,EACtDr7B,EAAOA,GAAQ,KAER3B,KAAKkc,MAAOva,EAAM,SAAU8K,EAAM6V,GACxC,IAAI2a,EAAUl9B,GAAO2e,WAAYjS,EAAMuwB,GACvC1a,EAAMG,KAAO,WACZ1iB,GAAOm9B,aAAcD,OAOnB/sB,GAAQtQ,EAAS0C,cAAe,SAEnC22B,GADSr5B,EAAS0C,cAAe,UACpBK,YAAa/C,EAAS0C,cAAe,WAEnD4N,GAAMvO,KAAO,WAIbP,GAAQ+7B,QAA0B,KAAhBjtB,GAAMnI,MAIxB3G,GAAQg8B,YAAcnE,GAAI7kB,UAI1BlE,GAAQtQ,EAAS0C,cAAe,UAC1ByF,MAAQ,IACdmI,GAAMvO,KAAO,QACbP,GAAQi8B,WAA6B,MAAhBntB,GAAMnI,MAI5B,IAAIu1B,GACH5sB,GAAazN,GAAOqN,KAAKI,WAE1BzN,GAAOG,GAAGmC,OAAQ,CACjBkL,KAAM,SAAU/M,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOwN,KAAM/M,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1Dg6B,WAAY,SAAU75B,GACrB,OAAO1D,KAAKuE,KAAM,WACjBtB,GAAOs6B,WAAYv9B,KAAM0D,QAK5BT,GAAOsC,OAAQ,CACdkL,KAAM,SAAUhN,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAKnC,MAAkC,oBAAtB/5B,EAAKjB,aACTS,GAAOse,KAAM9d,EAAMC,EAAMqE,IAKlB,IAAVy1B,GAAgBv6B,GAAOmE,SAAU3D,KACrC6e,EAAQrf,GAAOw6B,UAAW/5B,EAAKC,iBAC5BV,GAAOqN,KAAKrD,MAAM3B,KAAK7D,KAAM/D,GAAS45B,QAAWr3B,SAGtCA,IAAV8B,EACW,OAAVA,OACJ9E,GAAOs6B,WAAY95B,EAAMC,GAIrB4e,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,GAGRX,EAAKhB,aAAciB,EAAMqE,EAAQ,IAC1BA,GAGHua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAMM,OAHdA,EAAMnB,GAAO4J,KAAK4D,KAAMhN,EAAMC,SAGTuC,EAAY7B,IAGlCq5B,UAAW,CACV97B,KAAM,CACL0f,IAAK,SAAU5d,EAAMsE,GACpB,IAAM3G,GAAQi8B,YAAwB,UAAVt1B,GAC3BvE,GAAUC,EAAM,SAAY,CAC5B,IAAIrB,EAAMqB,EAAKsE,MAKf,OAJAtE,EAAKhB,aAAc,OAAQsF,GACtB3F,IACJqB,EAAKsE,MAAQ3F,GAEP2F,MAMXw1B,WAAY,SAAU95B,EAAMsE,GAC3B,IAAIrE,EACHvB,EAAI,EAIJu7B,EAAY31B,GAASA,EAAMkF,MAAO2N,GAEnC,GAAK8iB,GAA+B,IAAlBj6B,EAAKlC,SACtB,MAAUmC,EAAOg6B,EAAWv7B,KAC3BsB,EAAKwK,gBAAiBvK,MAO1B45B,GAAW,CACVjc,IAAK,SAAU5d,EAAMsE,EAAOrE,GAQ3B,OAPe,IAAVqE,EAGJ9E,GAAOs6B,WAAY95B,EAAMC,GAEzBD,EAAKhB,aAAciB,EAAMA,GAEnBA,IAITT,GAAOsB,KAAMtB,GAAOqN,KAAKrD,MAAM3B,KAAK0X,OAAO/V,MAAO,QAAU,SAAU7E,EAAI1E,GACzE,IAAIi6B,EAASjtB,GAAYhN,IAAUT,GAAO4J,KAAK4D,KAE/CC,GAAYhN,GAAS,SAAUD,EAAMC,EAAM6U,GAC1C,IAAInU,EAAKykB,EACR+U,EAAgBl6B,EAAKC,cAYtB,OAVM4U,IAGLsQ,EAASnY,GAAYktB,GACrBltB,GAAYktB,GAAkBx5B,EAC9BA,EAAqC,MAA/Bu5B,EAAQl6B,EAAMC,EAAM6U,GACzBqlB,EACA,KACDltB,GAAYktB,GAAkB/U,GAExBzkB,KAOT,IAAIy5B,GAAa,sCAChBC,GAAa,gBAwIb,SAASC,GAAkBh2B,GAE1B,OADaA,EAAMkF,MAAO2N,IAAmB,IAC/B9M,KAAM,KAItB,SAASkwB,GAAUv6B,GAClB,OAAOA,EAAKjB,cAAgBiB,EAAKjB,aAAc,UAAa,GAG7D,SAASy7B,GAAgBl2B,GACxB,OAAKhC,MAAMC,QAAS+B,GACZA,EAEc,iBAAVA,GACJA,EAAMkF,MAAO2N,IAEd,GAvJR3X,GAAOG,GAAGmC,OAAQ,CACjBgc,KAAM,SAAU7d,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOse,KAAM7d,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1D26B,WAAY,SAAUx6B,GACrB,OAAO1D,KAAKuE,KAAM,kBACVvE,KAAMiD,GAAOk7B,QAASz6B,IAAUA,QAK1CT,GAAOsC,OAAQ,CACdgc,KAAM,SAAU9d,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAWnC,OAPe,IAAVA,GAAgBv6B,GAAOmE,SAAU3D,KAGrCC,EAAOT,GAAOk7B,QAASz6B,IAAUA,EACjC4e,EAAQrf,GAAO+0B,UAAWt0B,SAGZuC,IAAV8B,EACCua,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,EAGCX,EAAMC,GAASqE,EAGpBua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAGDX,EAAMC,IAGds0B,UAAW,CACV/jB,SAAU,CACTjQ,IAAK,SAAUP,GAMd,IAAI26B,EAAWn7B,GAAO4J,KAAK4D,KAAMhN,EAAM,YAEvC,OAAK26B,EACGjL,SAAUiL,EAAU,IAI3BP,GAAWp2B,KAAMhE,EAAKD,WACtBs6B,GAAWr2B,KAAMhE,EAAKD,WACtBC,EAAKuQ,KAEE,GAGA,KAKXmqB,QAAS,CACRE,MAAO,UACPC,QAAS,eAYLl9B,GAAQg8B,cACbn6B,GAAO+0B,UAAU5jB,SAAW,CAC3BpQ,IAAK,SAAUP,GAId,IAAI8O,EAAS9O,EAAKb,WAIlB,OAHK2P,GAAUA,EAAO3P,YACrB2P,EAAO3P,WAAWyR,cAEZ,MAERgN,IAAK,SAAU5d,GAId,IAAI8O,EAAS9O,EAAKb,WACb2P,IACJA,EAAO8B,cAEF9B,EAAO3P,YACX2P,EAAO3P,WAAWyR,kBAOvBpR,GAAOsB,KAAM,CACZ,WACA,WACA,YACA,cACA,cACA,UACA,UACA,SACA,cACA,mBACE,WACFtB,GAAOk7B,QAASn+B,KAAK2D,eAAkB3D,OA4BxCiD,GAAOG,GAAGmC,OAAQ,CACjBg5B,SAAU,SAAUx2B,GACnB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAOu+B,SAAUx2B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,WAI1Dw+B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAIjB,GAHAk6B,EAAWT,GAAUh+B,MACrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GACnByX,EAAI/Y,QAAS,IAAMoP,EAAY,KAAQ,IAC3C2J,GAAO3J,EAAY,KAKrByuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,MAGR2+B,YAAa,SAAU52B,GACtB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAO2+B,YAAa52B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,UAIvD0E,UAAUnB,QAIhBi7B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAMjB,GALAk6B,EAAWT,GAAUh+B,MAGrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IAAM,CACzC8N,EAAYuuB,EAAYr8B,GAGxB,OAAgD,EAAxCyX,EAAI/Y,QAAS,IAAMoP,EAAY,KACtC2J,EAAMA,EAAIvT,QAAS,IAAM4J,EAAY,IAAK,KAK5CyuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,KA/BCA,KAAKyQ,KAAM,QAAS,KAkC7BmuB,YAAa,SAAU72B,EAAO82B,GAC7B,IAAIL,EAAYvuB,EAAW9N,EAAG+W,EAC7BvX,SAAcoG,EACd+2B,EAAwB,WAATn9B,GAAqBoE,MAAMC,QAAS+B,GAEpD,OAAK1G,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO4+B,YACd72B,EAAMtH,KAAMT,KAAMmC,EAAG67B,GAAUh+B,MAAQ6+B,GACvCA,KAKsB,kBAAbA,GAA0BC,EAC9BD,EAAW7+B,KAAKu+B,SAAUx2B,GAAU/H,KAAK2+B,YAAa52B,IAG9Dy2B,EAAaP,GAAgBl2B,GAEtB/H,KAAKuE,KAAM,WACjB,GAAKu6B,EAKJ,IAFA5lB,EAAOjW,GAAQjD,MAETmC,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GAGnB+W,EAAK6lB,SAAU9uB,GACnBiJ,EAAKylB,YAAa1uB,GAElBiJ,EAAKqlB,SAAUtuB,aAKIhK,IAAV8B,GAAgC,YAATpG,KAClCsO,EAAY+tB,GAAUh+B,QAIrByhB,EAASJ,IAAKrhB,KAAM,gBAAiBiQ,GAOjCjQ,KAAKyC,cACTzC,KAAKyC,aAAc,QAClBwN,IAAuB,IAAVlI,EACZ,GACA0Z,EAASzd,IAAKhE,KAAM,kBAAqB,SAO/C++B,SAAU,SAAU77B,GACnB,IAAI+M,EAAWxM,EACdtB,EAAI,EAEL8N,EAAY,IAAM/M,EAAW,IAC7B,MAAUO,EAAOzD,KAAMmC,KACtB,GAAuB,IAAlBsB,EAAKlC,WACoE,GAA3E,IAAMw8B,GAAkBC,GAAUv6B,IAAW,KAAM5C,QAASoP,GAC9D,OAAO,EAIT,OAAO,KAOT,IAAI+uB,GAAU,MAEd/7B,GAAOG,GAAGmC,OAAQ,CACjBnD,IAAK,SAAU2F,GACd,IAAIua,EAAOle,EAAKuqB,EACflrB,EAAOzD,KAAM,GAEd,OAAM0E,UAAUnB,QA0BhBorB,EAAkBttB,EAAY0G,GAEvB/H,KAAKuE,KAAM,SAAUpC,GAC3B,IAAIC,EAEmB,IAAlBpC,KAAKuB,WAWE,OANXa,EADIusB,EACE5mB,EAAMtH,KAAMT,KAAMmC,EAAGc,GAAQjD,MAAOoC,OAEpC2F,GAKN3F,EAAM,GAEoB,iBAARA,EAClBA,GAAO,GAEI2D,MAAMC,QAAS5D,KAC1BA,EAAMa,GAAOwB,IAAKrC,EAAK,SAAU2F,GAChC,OAAgB,MAATA,EAAgB,GAAKA,EAAQ,OAItCua,EAAQrf,GAAOg8B,SAAUj/B,KAAK2B,OAAUsB,GAAOg8B,SAAUj/B,KAAKwD,SAASG,iBAGrD,QAAS2e,QAA+Crc,IAApCqc,EAAMjB,IAAKrhB,KAAMoC,EAAK,WAC3DpC,KAAK+H,MAAQ3F,OAzDTqB,GACJ6e,EAAQrf,GAAOg8B,SAAUx7B,EAAK9B,OAC7BsB,GAAOg8B,SAAUx7B,EAAKD,SAASG,iBAG/B,QAAS2e,QACgCrc,KAAvC7B,EAAMke,EAAMte,IAAKP,EAAM,UAElBW,EAMY,iBAHpBA,EAAMX,EAAKsE,OAIH3D,EAAIiC,QAAS24B,GAAS,IAIhB,MAAP56B,EAAc,GAAKA,OAG3B,KAyCHnB,GAAOsC,OAAQ,CACd05B,SAAU,CACT5Z,OAAQ,CACPrhB,IAAK,SAAUP,GAEd,IAAIrB,EAAMa,GAAO4J,KAAK4D,KAAMhN,EAAM,SAClC,OAAc,MAAPrB,EACNA,EAMA27B,GAAkB96B,GAAOV,KAAMkB,MAGlCyK,OAAQ,CACPlK,IAAK,SAAUP,GACd,IAAIsE,EAAOsd,EAAQljB,EAClBqD,EAAU/B,EAAK+B,QACfwU,EAAQvW,EAAK4Q,cACbgT,EAAoB,eAAd5jB,EAAK9B,KACX6iB,EAAS6C,EAAM,KAAO,GACtBkN,EAAMlN,EAAMrN,EAAQ,EAAIxU,EAAQjC,OAUjC,IAPCpB,EADI6X,EAAQ,EACRua,EAGAlN,EAAMrN,EAAQ,EAIX7X,EAAIoyB,EAAKpyB,IAKhB,KAJAkjB,EAAS7f,EAASrD,IAIJiS,UAAYjS,IAAM6X,KAG7BqL,EAAO9Y,YACL8Y,EAAOziB,WAAW2J,WACnB/I,GAAU6hB,EAAOziB,WAAY,aAAiB,CAMjD,GAHAmF,EAAQ9E,GAAQoiB,GAASjjB,MAGpBilB,EACJ,OAAOtf,EAIRyc,EAAO5jB,KAAMmH,GAIf,OAAOyc,GAGRnD,IAAK,SAAU5d,EAAMsE,GACpB,IAAIm3B,EAAW7Z,EACd7f,EAAU/B,EAAK+B,QACfgf,EAASvhB,GAAOgE,UAAWc,GAC3B5F,EAAIqD,EAAQjC,OAEb,MAAQpB,MACPkjB,EAAS7f,EAASrD,IAINiS,UACuD,EAAlEnR,GAAOkE,QAASlE,GAAOg8B,SAAS5Z,OAAOrhB,IAAKqhB,GAAUb,MAEtD0a,GAAY,GAUd,OAHMA,IACLz7B,EAAK4Q,eAAiB,GAEhBmQ,OAOXvhB,GAAOsB,KAAM,CAAE,QAAS,YAAc,WACrCtB,GAAOg8B,SAAUj/B,MAAS,CACzBqhB,IAAK,SAAU5d,EAAMsE,GACpB,GAAKhC,MAAMC,QAAS+B,GACnB,OAAStE,EAAK0Q,SAA2D,EAAjDlR,GAAOkE,QAASlE,GAAQQ,GAAOrB,MAAO2F,KAI3D3G,GAAQ+7B,UACbl6B,GAAOg8B,SAAUj/B,MAAOgE,IAAM,SAAUP,GACvC,OAAwC,OAAjCA,EAAKjB,aAAc,SAAqB,KAAOiB,EAAKsE,UAS9D,IAAI0L,GAAW1T,GAAO0T,SAElB5R,GAAQ,CAAEmG,KAAMkjB,KAAKC,OAErBgU,GAAS,KAKbl8B,GAAOm8B,SAAW,SAAU9d,GAC3B,IAAInP,EAAKktB,EACT,IAAM/d,GAAwB,iBAATA,EACpB,OAAO,KAKR,IACCnP,GAAM,IAAMpS,GAAOu/B,WAAcC,gBAAiBje,EAAM,YACvD,MAAQ3U,IAYV,OAVA0yB,EAAkBltB,GAAOA,EAAI3E,qBAAsB,eAAiB,GAC9D2E,IAAOktB,GACZp8B,GAAOsD,MAAO,iBACb84B,EACCp8B,GAAOwB,IAAK46B,EAAgB3yB,WAAY,SAAUgC,GACjD,OAAOA,EAAG5H,cACPgH,KAAM,MACVwT,IAGInP,GAIR,IAAIqtB,GAAc,kCACjBC,GAA0B,SAAU9yB,GACnCA,EAAEmb,mBAGJ7kB,GAAOsC,OAAQtC,GAAOskB,MAAO,CAE5BU,QAAS,SAAUV,EAAOjG,EAAM7d,EAAMi8B,GAErC,IAAIv9B,EAAGyX,EAAKgJ,EAAK+c,EAAYC,EAAQ/W,EAAQ9K,EAAS8hB,EACrDC,EAAY,CAAEr8B,GAAQ7D,GACtB+B,EAAOX,GAAOP,KAAM8mB,EAAO,QAAWA,EAAM5lB,KAAO4lB,EACnDkB,EAAaznB,GAAOP,KAAM8mB,EAAO,aAAgBA,EAAMlgB,UAAUc,MAAO,KAAQ,GAKjF,GAHAyR,EAAMimB,EAAcjd,EAAMnf,EAAOA,GAAQ7D,EAGlB,IAAlB6D,EAAKlC,UAAoC,IAAlBkC,EAAKlC,WAK5Bi+B,GAAY/3B,KAAM9F,EAAOsB,GAAOskB,MAAMuB,cAIf,EAAvBnnB,EAAKd,QAAS,OAIlBc,GADA8mB,EAAa9mB,EAAKwG,MAAO,MACPoG,QAClBka,EAAWpjB,QAEZu6B,EAASj+B,EAAKd,QAAS,KAAQ,GAAK,KAAOc,GAG3C4lB,EAAQA,EAAOtkB,GAAOiD,SACrBqhB,EACA,IAAItkB,GAAOmnB,MAAOzoB,EAAuB,iBAAV4lB,GAAsBA,IAGhDK,UAAY8X,EAAe,EAAI,EACrCnY,EAAMlgB,UAAYohB,EAAW3a,KAAM,KACnCyZ,EAAMuC,WAAavC,EAAMlgB,UACxB,IAAImB,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAC7D,KAGDyZ,EAAM3V,YAAS3L,EACTshB,EAAM3hB,SACX2hB,EAAM3hB,OAASnC,GAIhB6d,EAAe,MAARA,EACN,CAAEiG,GACFtkB,GAAOgE,UAAWqa,EAAM,CAAEiG,IAG3BxJ,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GACpC+9B,IAAgB3hB,EAAQkK,UAAmD,IAAxClK,EAAQkK,QAAQtnB,MAAO8C,EAAM6d,IAAtE,CAMA,IAAMoe,IAAiB3hB,EAAQ0M,WAAahpB,EAAUgC,GAAS,CAM9D,IAJAk8B,EAAa5hB,EAAQ8J,cAAgBlmB,EAC/B69B,GAAY/3B,KAAMk4B,EAAah+B,KACpCiY,EAAMA,EAAIhX,YAEHgX,EAAKA,EAAMA,EAAIhX,WACtBk9B,EAAUl/B,KAAMgZ,GAChBgJ,EAAMhJ,EAIFgJ,KAAUnf,EAAK+D,eAAiB5H,IACpCkgC,EAAUl/B,KAAMgiB,EAAIvT,aAAeuT,EAAImd,cAAgBhgC,IAKzDoC,EAAI,EACJ,OAAUyX,EAAMkmB,EAAW39B,QAAYolB,EAAMqC,uBAC5CiW,EAAcjmB,EACd2N,EAAM5lB,KAAW,EAAJQ,EACZw9B,EACA5hB,EAAQiL,UAAYrnB,GAGrBknB,GAAWpH,EAASzd,IAAK4V,EAAK,WAAcxZ,OAAOwoB,OAAQ,OAAUrB,EAAM5lB,OAC1E8f,EAASzd,IAAK4V,EAAK,YAEnBiP,EAAOloB,MAAOiZ,EAAK0H,IAIpBuH,EAAS+W,GAAUhmB,EAAKgmB,KACT/W,EAAOloB,OAASogB,EAAYnH,KAC1C2N,EAAM3V,OAASiX,EAAOloB,MAAOiZ,EAAK0H,IACZ,IAAjBiG,EAAM3V,QACV2V,EAAMS,kBA8CT,OA1CAT,EAAM5lB,KAAOA,EAGP+9B,GAAiBnY,EAAMuD,sBAEpB/M,EAAQ4H,WACqC,IAApD5H,EAAQ4H,SAAShlB,MAAOm/B,EAAUz3B,MAAOiZ,KACzCP,EAAYtd,IAIPm8B,GAAUv+B,EAAYoC,EAAM9B,MAAaF,EAAUgC,MAGvDmf,EAAMnf,EAAMm8B,MAGXn8B,EAAMm8B,GAAW,MAIlB38B,GAAOskB,MAAMuB,UAAYnnB,EAEpB4lB,EAAMqC,wBACViW,EAAYtwB,iBAAkB5N,EAAM89B,IAGrCh8B,EAAM9B,KAED4lB,EAAMqC,wBACViW,EAAYjgB,oBAAqBje,EAAM89B,IAGxCx8B,GAAOskB,MAAMuB,eAAY7iB,EAEpB2c,IACJnf,EAAMm8B,GAAWhd,IAMd2E,EAAM3V,SAKdwb,SAAU,SAAUzrB,EAAM8B,EAAM8jB,GAC/B,IAAI5a,EAAI1J,GAAOsC,OACd,IAAItC,GAAOmnB,MACX7C,EACA,CACC5lB,KAAMA,EACNypB,aAAa,IAIfnoB,GAAOskB,MAAMU,QAAStb,EAAG,KAAMlJ,MAKjCR,GAAOG,GAAGmC,OAAQ,CAEjB0iB,QAAS,SAAUtmB,EAAM2f,GACxB,OAAOthB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAMthB,SAGpCggC,eAAgB,SAAUr+B,EAAM2f,GAC/B,IAAI7d,EAAOzD,KAAM,GACjB,GAAKyD,EACJ,OAAOR,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAM7d,GAAM,MAMlD,IACCw8B,GAAW,QACXC,GAAQ,SACRC,GAAkB,wCAClBC,GAAe,qCAEhB,SAASC,GAAa1I,EAAQr2B,EAAKg/B,EAAapmB,GAC/C,IAAIxW,EAEJ,GAAKqC,MAAMC,QAAS1E,GAGnB2B,GAAOsB,KAAMjD,EAAK,SAAUa,EAAG2Y,GACzBwlB,GAAeL,GAASx4B,KAAMkwB,GAGlCzd,EAAKyd,EAAQ7c,GAKbulB,GACC1I,EAAS,KAAqB,iBAAN7c,GAAuB,MAALA,EAAY3Y,EAAI,IAAO,IACjE2Y,EACAwlB,EACApmB,UAKG,GAAMomB,GAAiC,WAAlBx9B,EAAQxB,GAUnC4Y,EAAKyd,EAAQr2B,QAPb,IAAMoC,KAAQpC,EACb++B,GAAa1I,EAAS,IAAMj0B,EAAO,IAAKpC,EAAKoC,GAAQ48B,EAAapmB,GAYrEjX,GAAOs9B,MAAQ,SAAU73B,EAAG43B,GAC3B,IAAI3I,EACH6I,EAAI,GACJtmB,EAAM,SAAU7L,EAAKoyB,GAGpB,IAAI14B,EAAQ1G,EAAYo/B,GACvBA,IACAA,EAEDD,EAAGA,EAAEj9B,QAAWm9B,mBAAoBryB,GAAQ,IAC3CqyB,mBAA6B,MAAT34B,EAAgB,GAAKA,IAG5C,GAAU,MAALW,EACJ,MAAO,GAIR,GAAK3C,MAAMC,QAAS0C,IAASA,EAAE7E,SAAWZ,GAAO6C,cAAe4C,GAG/DzF,GAAOsB,KAAMmE,EAAG,WACfwR,EAAKla,KAAK0D,KAAM1D,KAAK+H,cAOtB,IAAM4vB,KAAUjvB,EACf23B,GAAa1I,EAAQjvB,EAAGivB,GAAU2I,EAAapmB,GAKjD,OAAOsmB,EAAE1yB,KAAM,MAGhB7K,GAAOG,GAAGmC,OAAQ,CACjBo7B,UAAW,WACV,OAAO19B,GAAOs9B,MAAOvgC,KAAK4gC,mBAE3BA,eAAgB,WACf,OAAO5gC,KAAKyE,IAAK,WAGhB,IAAI8L,EAAWtN,GAAOse,KAAMvhB,KAAM,YAClC,OAAOuQ,EAAWtN,GAAOgE,UAAWsJ,GAAavQ,OAC9C6P,OAAQ,WACX,IAAIlO,EAAO3B,KAAK2B,KAGhB,OAAO3B,KAAK0D,OAAST,GAAQjD,MAAO2Y,GAAI,cACvCynB,GAAa34B,KAAMzH,KAAKwD,YAAe28B,GAAgB14B,KAAM9F,KAC3D3B,KAAKmU,UAAY0Q,GAAepd,KAAM9F,MACtC8C,IAAK,SAAU2D,EAAI3E,GACtB,IAAIrB,EAAMa,GAAQjD,MAAOoC,MAEzB,OAAY,MAAPA,EACG,KAGH2D,MAAMC,QAAS5D,GACZa,GAAOwB,IAAKrC,EAAK,SAAUA,GACjC,MAAO,CAAEsB,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAIhD,CAAEx8B,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAClDl8B,SAKN,IACC68B,GAAM,OACNC,GAAQ,OACRC,GAAa,gBACbC,GAAW,6BAIXC,GAAa,iBACbC,GAAY,QAWZhH,GAAa,GAObiH,GAAa,GAGbC,GAAW,KAAK1gC,OAAQ,KAGxB2gC,GAAezhC,EAAS0C,cAAe,KAKxC,SAASg/B,GAA6BC,GAGrC,OAAO,SAAUC,EAAoB5kB,GAED,iBAAvB4kB,IACX5kB,EAAO4kB,EACPA,EAAqB,KAGtB,IAAIC,EACHt/B,EAAI,EACJu/B,EAAYF,EAAmB79B,cAAcsJ,MAAO2N,IAAmB,GAExE,GAAKvZ,EAAYub,GAGhB,MAAU6kB,EAAWC,EAAWv/B,KAGR,MAAlBs/B,EAAU,IACdA,EAAWA,EAASnhC,MAAO,IAAO,KAChCihC,EAAWE,GAAaF,EAAWE,IAAc,IAAKjf,QAAS5F,KAI/D2kB,EAAWE,GAAaF,EAAWE,IAAc,IAAK7gC,KAAMgc,IAQnE,SAAS+kB,GAA+BJ,EAAW/7B,EAASi1B,EAAiBmH,GAE5E,IAAIC,EAAY,GACfC,EAAqBP,IAAcJ,GAEpC,SAASY,EAASN,GACjB,IAAIrtB,EAcJ,OAbAytB,EAAWJ,IAAa,EACxBx+B,GAAOsB,KAAMg9B,EAAWE,IAAc,GAAI,SAAU9lB,EAAGqmB,GACtD,IAAIC,EAAsBD,EAAoBx8B,EAASi1B,EAAiBmH,GACxE,MAAoC,iBAAxBK,GACVH,GAAqBD,EAAWI,GAKtBH,IACD1tB,EAAW6tB,QADf,GAHNz8B,EAAQk8B,UAAUlf,QAASyf,GAC3BF,EAASE,IACF,KAKF7tB,EAGR,OAAO2tB,EAASv8B,EAAQk8B,UAAW,MAAUG,EAAW,MAASE,EAAS,KAM3E,SAASG,GAAYt8B,EAAQhE,GAC5B,IAAIyM,EAAKxI,EACRs8B,EAAcl/B,GAAOm/B,aAAaD,aAAe,GAElD,IAAM9zB,KAAOzM,OACQqE,IAAfrE,EAAKyM,MACP8zB,EAAa9zB,GAAQzI,EAAWC,IAAUA,EAAO,KAAUwI,GAAQzM,EAAKyM,IAO5E,OAJKxI,GACJ5C,GAAOsC,QAAQ,EAAMK,EAAQC,GAGvBD,EA/ERy7B,GAAartB,KAAOP,GAASO,KAgP7B/Q,GAAOsC,OAAQ,CAGd88B,OAAQ,EAGRC,aAAc,GACdC,KAAM,GAENH,aAAc,CACbI,IAAK/uB,GAASO,KACdrS,KAAM,MACN8gC,QAxRgB,4DAwRQh7B,KAAMgM,GAASivB,UACvCljC,QAAQ,EACRmjC,aAAa,EACbC,OAAO,EACPC,YAAa,mDAcbC,QAAS,CACR9H,IAAKoG,GACL7+B,KAAM,aACNqsB,KAAM,YACNzc,IAAK,4BACL4wB,KAAM,qCAGPtpB,SAAU,CACTtH,IAAK,UACLyc,KAAM,SACNmU,KAAM,YAGPC,eAAgB,CACf7wB,IAAK,cACL5P,KAAM,eACNwgC,KAAM,gBAKPE,WAAY,CAGXC,SAAUj3B,OAGVk3B,aAAa,EAGbC,YAAathB,KAAKC,MAGlBshB,WAAYpgC,GAAOm8B,UAOpB+C,YAAa,CACZK,KAAK,EACLr/B,SAAS,IAOXmgC,UAAW,SAAU19B,EAAQ29B,GAC5B,OAAOA,EAGNrB,GAAYA,GAAYt8B,EAAQ3C,GAAOm/B,cAAgBmB,GAGvDrB,GAAYj/B,GAAOm/B,aAAcx8B,IAGnC49B,cAAelC,GAA6BpH,IAC5CuJ,cAAenC,GAA6BH,IAG5CuC,KAAM,SAAUlB,EAAKh9B,GAGA,iBAARg9B,IACXh9B,EAAUg9B,EACVA,OAAMv8B,GAIPT,EAAUA,GAAW,GAErB,IAAIm+B,EAGHC,EAGAC,EACAC,EAGAC,EAGAC,EAGArkB,EAGAskB,EAGA9hC,EAGA+hC,EAGA1D,EAAIv9B,GAAOqgC,UAAW,GAAI99B,GAG1B2+B,EAAkB3D,EAAEr9B,SAAWq9B,EAG/B4D,EAAqB5D,EAAEr9B,UACpBghC,EAAgB5iC,UAAY4iC,EAAgBtgC,QAC9CZ,GAAQkhC,GACRlhC,GAAOskB,MAGRvK,EAAW/Z,GAAO0Z,WAClB0nB,EAAmBphC,GAAOwY,UAAW,eAGrC6oB,EAAa9D,EAAE8D,YAAc,GAG7BC,EAAiB,GACjBC,EAAsB,GAGtBC,EAAW,WAGX7C,EAAQ,CACP7hB,WAAY,EAGZ2kB,kBAAmB,SAAUr2B,GAC5B,IAAIpB,EACJ,GAAK0S,EAAY,CAChB,IAAMmkB,EAAkB,CACvBA,EAAkB,GAClB,MAAU72B,EAAQ+zB,GAAS3zB,KAAMw2B,GAChCC,EAAiB72B,EAAO,GAAItJ,cAAgB,MACzCmgC,EAAiB72B,EAAO,GAAItJ,cAAgB,MAAS,IACrDjD,OAAQuM,EAAO,IAGpBA,EAAQ62B,EAAiBz1B,EAAI1K,cAAgB,KAE9C,OAAgB,MAATsJ,EAAgB,KAAOA,EAAMa,KAAM,OAI3C62B,sBAAuB,WACtB,OAAOhlB,EAAYkkB,EAAwB,MAI5Ce,iBAAkB,SAAUlhC,EAAMqE,GAMjC,OALkB,MAAb4X,IACJjc,EAAO8gC,EAAqB9gC,EAAKC,eAChC6gC,EAAqB9gC,EAAKC,gBAAmBD,EAC9C6gC,EAAgB7gC,GAASqE,GAEnB/H,MAIR6kC,iBAAkB,SAAUljC,GAI3B,OAHkB,MAAbge,IACJ6gB,EAAEsE,SAAWnjC,GAEP3B,MAIRskC,WAAY,SAAU7/B,GACrB,IAAIzC,EACJ,GAAKyC,EACJ,GAAKkb,EAGJiiB,EAAM7kB,OAAQtY,EAAKm9B,EAAMmD,cAIzB,IAAM/iC,KAAQyC,EACb6/B,EAAYtiC,GAAS,CAAEsiC,EAAYtiC,GAAQyC,EAAKzC,IAInD,OAAOhC,MAIRglC,MAAO,SAAUC,GAChB,IAAIC,EAAYD,GAAcR,EAK9B,OAJKd,GACJA,EAAUqB,MAAOE,GAElBp7B,EAAM,EAAGo7B,GACFllC,OAoBV,GAfAgd,EAAS1B,QAASsmB,GAKlBpB,EAAEgC,MAAUA,GAAOhC,EAAEgC,KAAO/uB,GAASO,MAAS,IAC5C3N,QAAS66B,GAAWztB,GAASivB,SAAW,MAG1ClC,EAAE7+B,KAAO6D,EAAQ6V,QAAU7V,EAAQ7D,MAAQ6+B,EAAEnlB,QAAUmlB,EAAE7+B,KAGzD6+B,EAAEkB,WAAclB,EAAEiB,UAAY,KAAM99B,cAAcsJ,MAAO2N,IAAmB,CAAE,IAGxD,MAAjB4lB,EAAE2E,YAAsB,CAC5BnB,EAAYpkC,EAAS0C,cAAe,KAKpC,IACC0hC,EAAUhwB,KAAOwsB,EAAEgC,IAInBwB,EAAUhwB,KAAOgwB,EAAUhwB,KAC3BwsB,EAAE2E,YAAc9D,GAAaqB,SAAW,KAAOrB,GAAa+D,MAC3DpB,EAAUtB,SAAW,KAAOsB,EAAUoB,KACtC,MAAQz4B,GAIT6zB,EAAE2E,aAAc,GAalB,GARK3E,EAAElf,MAAQkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,OACxCkf,EAAElf,KAAOre,GAAOs9B,MAAOC,EAAElf,KAAMkf,EAAEF,cAIlCqB,GAA+BzH,GAAYsG,EAAGh7B,EAASo8B,GAGlDjiB,EACJ,OAAOiiB,EA8ER,IAAMz/B,KAzEN8hC,EAAchhC,GAAOskB,OAASiZ,EAAEhhC,SAGQ,GAApByD,GAAOo/B,UAC1Bp/B,GAAOskB,MAAMU,QAAS,aAIvBuY,EAAE7+B,KAAO6+B,EAAE7+B,KAAKif,cAGhB4f,EAAE6E,YAAcpE,GAAWx5B,KAAM+4B,EAAE7+B,MAKnCiiC,EAAWpD,EAAEgC,IAAIn8B,QAASy6B,GAAO,IAG3BN,EAAE6E,WAwBI7E,EAAElf,MAAQkf,EAAEmC,aACoD,KAAzEnC,EAAEqC,aAAe,IAAKhiC,QAAS,uCACjC2/B,EAAElf,KAAOkf,EAAElf,KAAKjb,QAASw6B,GAAK,OAvB9BqD,EAAW1D,EAAEgC,IAAIliC,MAAOsjC,EAASrgC,QAG5Bi9B,EAAElf,OAAUkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,QAC1CsiB,IAAczE,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQpD,EAAElf,YAGjDkf,EAAElf,OAIO,IAAZkf,EAAEpyB,QACNw1B,EAAWA,EAASv9B,QAAS06B,GAAY,MACzCmD,GAAa/E,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQ,KAAS/hC,GAAMmG,OACnEk8B,GAIF1D,EAAEgC,IAAMoB,EAAWM,GASf1D,EAAE8E,aACDriC,GAAOq/B,aAAcsB,IACzBhC,EAAMgD,iBAAkB,oBAAqB3hC,GAAOq/B,aAAcsB,IAE9D3gC,GAAOs/B,KAAMqB,IACjBhC,EAAMgD,iBAAkB,gBAAiB3hC,GAAOs/B,KAAMqB,MAKnDpD,EAAElf,MAAQkf,EAAE6E,aAAgC,IAAlB7E,EAAEqC,aAAyBr9B,EAAQq9B,cACjEjB,EAAMgD,iBAAkB,eAAgBpE,EAAEqC,aAI3CjB,EAAMgD,iBACL,SACApE,EAAEkB,UAAW,IAAOlB,EAAEsC,QAAStC,EAAEkB,UAAW,IAC3ClB,EAAEsC,QAAStC,EAAEkB,UAAW,KACA,MAArBlB,EAAEkB,UAAW,GAAc,KAAON,GAAW,WAAa,IAC7DZ,EAAEsC,QAAS,MAIFtC,EAAE+E,QACZ3D,EAAMgD,iBAAkBziC,EAAGq+B,EAAE+E,QAASpjC,IAIvC,GAAKq+B,EAAEgF,cAC+C,IAAnDhF,EAAEgF,WAAW/kC,KAAM0jC,EAAiBvC,EAAOpB,IAAiB7gB,GAG9D,OAAOiiB,EAAMoD,QAed,GAXAP,EAAW,QAGXJ,EAAiBnqB,IAAKsmB,EAAE3F,UACxB+G,EAAM93B,KAAM02B,EAAEiF,SACd7D,EAAMrmB,KAAMilB,EAAEj6B,OAGdo9B,EAAYhC,GAA+BR,GAAYX,EAAGh7B,EAASo8B,GAK5D,CASN,GARAA,EAAM7hB,WAAa,EAGdkkB,GACJG,EAAmBnc,QAAS,WAAY,CAAE2Z,EAAOpB,IAI7C7gB,EACJ,OAAOiiB,EAIHpB,EAAEoC,OAAqB,EAAZpC,EAAEvD,UACjB8G,EAAehkC,GAAO2e,WAAY,WACjCkjB,EAAMoD,MAAO,YACXxE,EAAEvD,UAGN,IACCtd,GAAY,EACZgkB,EAAU+B,KAAMnB,EAAgBz6B,GAC/B,MAAQ6C,GAGT,GAAKgT,EACJ,MAAMhT,EAIP7C,GAAO,EAAG6C,SAhCX7C,GAAO,EAAG,gBAqCX,SAASA,EAAMi7B,EAAQY,EAAkBC,EAAWL,GACnD,IAAIM,EAAWJ,EAASl/B,EAAOu/B,EAAUC,EACxCd,EAAaU,EAGThmB,IAILA,GAAY,EAGPokB,GACJhkC,GAAOm9B,aAAc6G,GAKtBJ,OAAY19B,EAGZ49B,EAAwB0B,GAAW,GAGnC3D,EAAM7hB,WAAsB,EAATglB,EAAa,EAAI,EAGpCc,EAAsB,KAAVd,GAAiBA,EAAS,KAAkB,MAAXA,EAGxCa,IACJE,EA7lBJ,SAA8BtF,EAAGoB,EAAOgE,GAEvC,IAAII,EAAIrkC,EAAMskC,EAAeC,EAC5BzsB,EAAW+mB,EAAE/mB,SACbioB,EAAYlB,EAAEkB,UAGf,MAA2B,MAAnBA,EAAW,GAClBA,EAAUnzB,aACEtI,IAAP+/B,IACJA,EAAKxF,EAAEsE,UAAYlD,EAAM8C,kBAAmB,iBAK9C,GAAKsB,EACJ,IAAMrkC,KAAQ8X,EACb,GAAKA,EAAU9X,IAAU8X,EAAU9X,GAAO8F,KAAMu+B,GAAO,CACtDtE,EAAUlf,QAAS7gB,GACnB,MAMH,GAAK+/B,EAAW,KAAOkE,EACtBK,EAAgBvE,EAAW,OACrB,CAGN,IAAM//B,KAAQikC,EAAY,CACzB,IAAMlE,EAAW,IAAOlB,EAAEyC,WAAYthC,EAAO,IAAM+/B,EAAW,IAAQ,CACrEuE,EAAgBtkC,EAChB,MAEKukC,IACLA,EAAgBvkC,GAKlBskC,EAAgBA,GAAiBC,EAMlC,GAAKD,EAIJ,OAHKA,IAAkBvE,EAAW,IACjCA,EAAUlf,QAASyjB,GAEbL,EAAWK,GA0iBLE,CAAqB3F,EAAGoB,EAAOgE,KAIrCC,IACsC,EAA3C5iC,GAAOkE,QAAS,SAAUq5B,EAAEkB,YAC5Bz+B,GAAOkE,QAAS,OAAQq5B,EAAEkB,WAAc,IACxClB,EAAEyC,WAAY,eAAkB,cAIjC6C,EA9iBH,SAAsBtF,EAAGsF,EAAUlE,EAAOiE,GACzC,IAAIO,EAAOC,EAASC,EAAM1jB,EAAKlJ,EAC9BupB,EAAa,GAGbvB,EAAYlB,EAAEkB,UAAUphC,QAGzB,GAAKohC,EAAW,GACf,IAAM4E,KAAQ9F,EAAEyC,WACfA,EAAYqD,EAAK3iC,eAAkB68B,EAAEyC,WAAYqD,GAInDD,EAAU3E,EAAUnzB,QAGpB,MAAQ83B,EAcP,GAZK7F,EAAEwC,eAAgBqD,KACtBzE,EAAOpB,EAAEwC,eAAgBqD,IAAcP,IAIlCpsB,GAAQmsB,GAAarF,EAAE+F,aAC5BT,EAAWtF,EAAE+F,WAAYT,EAAUtF,EAAEiB,WAGtC/nB,EAAO2sB,EACPA,EAAU3E,EAAUnzB,QAKnB,GAAiB,MAAZ83B,EAEJA,EAAU3sB,OAGJ,GAAc,MAATA,GAAgBA,IAAS2sB,EAAU,CAM9C,KAHAC,EAAOrD,EAAYvpB,EAAO,IAAM2sB,IAAapD,EAAY,KAAOoD,IAI/D,IAAMD,KAASnD,EAId,IADArgB,EAAMwjB,EAAMj+B,MAAO,MACT,KAAQk+B,IAGjBC,EAAOrD,EAAYvpB,EAAO,IAAMkJ,EAAK,KACpCqgB,EAAY,KAAOrgB,EAAK,KACb,EAGG,IAAT0jB,EACJA,EAAOrD,EAAYmD,IAGgB,IAAxBnD,EAAYmD,KACvBC,EAAUzjB,EAAK,GACf8e,EAAUlf,QAASI,EAAK,KAEzB,MAOJ,IAAc,IAAT0jB,EAGJ,GAAKA,GAAQ9F,EAAEgG,UACdV,EAAWQ,EAAMR,QAEjB,IACCA,EAAWQ,EAAMR,GAChB,MAAQn5B,GACT,MAAO,CACNmQ,MAAO,cACPvW,MAAO+/B,EAAO35B,EAAI,sBAAwB+M,EAAO,OAAS2sB,IASjE,MAAO,CAAEvpB,MAAO,UAAWwE,KAAMwkB,GAidpBW,CAAajG,EAAGsF,EAAUlE,EAAOiE,GAGvCA,GAGCrF,EAAE8E,cACNS,EAAWnE,EAAM8C,kBAAmB,oBAEnCzhC,GAAOq/B,aAAcsB,GAAamC,IAEnCA,EAAWnE,EAAM8C,kBAAmB,WAEnCzhC,GAAOs/B,KAAMqB,GAAamC,IAKZ,MAAXhB,GAA6B,SAAXvE,EAAE7+B,KACxBsjC,EAAa,YAGS,MAAXF,EACXE,EAAa,eAIbA,EAAaa,EAAShpB,MACtB2oB,EAAUK,EAASxkB,KAEnBukB,IADAt/B,EAAQu/B,EAASv/B,UAMlBA,EAAQ0+B,GACHF,GAAWE,IACfA,EAAa,QACRF,EAAS,IACbA,EAAS,KAMZnD,EAAMmD,OAASA,EACfnD,EAAMqD,YAAeU,GAAoBV,GAAe,GAGnDY,EACJ7oB,EAASoB,YAAa+lB,EAAiB,CAAEsB,EAASR,EAAYrD,IAE9D5kB,EAASuB,WAAY4lB,EAAiB,CAAEvC,EAAOqD,EAAY1+B,IAI5Dq7B,EAAM0C,WAAYA,GAClBA,OAAar+B,EAERg+B,GACJG,EAAmBnc,QAAS4d,EAAY,cAAgB,YACvD,CAAEjE,EAAOpB,EAAGqF,EAAYJ,EAAUl/B,IAIpC89B,EAAiB3nB,SAAUynB,EAAiB,CAAEvC,EAAOqD,IAEhDhB,IACJG,EAAmBnc,QAAS,eAAgB,CAAE2Z,EAAOpB,MAG3Cv9B,GAAOo/B,QAChBp/B,GAAOskB,MAAMU,QAAS,cAKzB,OAAO2Z,GAGR8E,QAAS,SAAUlE,EAAKlhB,EAAM9c,GAC7B,OAAOvB,GAAOe,IAAKw+B,EAAKlhB,EAAM9c,EAAU,SAGzCmiC,UAAW,SAAUnE,EAAKh+B,GACzB,OAAOvB,GAAOe,IAAKw+B,OAAKv8B,EAAWzB,EAAU,aAI/CvB,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAIiT,GAC7CpY,GAAQoY,GAAW,SAAUmnB,EAAKlhB,EAAM9c,EAAU7C,GAUjD,OAPKN,EAAYigB,KAChB3f,EAAOA,GAAQ6C,EACfA,EAAW8c,EACXA,OAAOrb,GAIDhD,GAAOygC,KAAMzgC,GAAOsC,OAAQ,CAClCi9B,IAAKA,EACL7gC,KAAM0Z,EACNomB,SAAU9/B,EACV2f,KAAMA,EACNmkB,QAASjhC,GACPvB,GAAO6C,cAAe08B,IAASA,OAIpCv/B,GAAOugC,cAAe,SAAUhD,GAC/B,IAAIr+B,EACJ,IAAMA,KAAKq+B,EAAE+E,QACa,iBAApBpjC,EAAEwB,gBACN68B,EAAEqC,YAAcrC,EAAE+E,QAASpjC,IAAO,MAMrCc,GAAO4rB,SAAW,SAAU2T,EAAKh9B,EAAStD,GACzC,OAAOe,GAAOygC,KAAM,CACnBlB,IAAKA,EAGL7gC,KAAM,MACN8/B,SAAU,SACVrzB,OAAO,EACPw0B,OAAO,EACPpjC,QAAQ,EAKRyjC,WAAY,CACX2D,cAAe,cAEhBL,WAAY,SAAUT,GACrB7iC,GAAO4D,WAAYi/B,EAAUtgC,EAAStD,OAMzCe,GAAOG,GAAGmC,OAAQ,CACjBshC,QAAS,SAAUjY,GAClB,IAAIlI,EAyBJ,OAvBK1mB,KAAM,KACLqB,EAAYutB,KAChBA,EAAOA,EAAKnuB,KAAMT,KAAM,KAIzB0mB,EAAOzjB,GAAQ2rB,EAAM5uB,KAAM,GAAIwH,eAAgB5C,GAAI,GAAIe,OAAO,GAEzD3F,KAAM,GAAI4C,YACd8jB,EAAK8I,aAAcxvB,KAAM,IAG1B0mB,EAAKjiB,IAAK,WACT,IAAIhB,EAAOzD,KAEX,MAAQyD,EAAKqjC,kBACZrjC,EAAOA,EAAKqjC,kBAGb,OAAOrjC,IACJ6rB,OAAQtvB,OAGNA,MAGR+mC,UAAW,SAAUnY,GACpB,OAAKvtB,EAAYutB,GACT5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO+mC,UAAWnY,EAAKnuB,KAAMT,KAAMmC,MAItCnC,KAAKuE,KAAM,WACjB,IAAI2U,EAAOjW,GAAQjD,MAClByZ,EAAWP,EAAKO,WAEZA,EAASlW,OACbkW,EAASotB,QAASjY,GAGlB1V,EAAKoW,OAAQV,MAKhBlI,KAAM,SAAUkI,GACf,IAAIoY,EAAiB3lC,EAAYutB,GAEjC,OAAO5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO6mC,QAASG,EAAiBpY,EAAKnuB,KAAMT,KAAMmC,GAAMysB,MAIlEqY,OAAQ,SAAU/jC,GAIjB,OAHAlD,KAAKuS,OAAQrP,GAAW+P,IAAK,QAAS1O,KAAM,WAC3CtB,GAAQjD,MAAO2vB,YAAa3vB,KAAK0M,cAE3B1M,QAKTiD,GAAOqN,KAAK9F,QAAQ6uB,OAAS,SAAU51B,GACtC,OAAQR,GAAOqN,KAAK9F,QAAQ08B,QAASzjC,IAEtCR,GAAOqN,KAAK9F,QAAQ08B,QAAU,SAAUzjC,GACvC,SAAWA,EAAK0uB,aAAe1uB,EAAK6vB,cAAgB7vB,EAAK4xB,iBAAiB9xB,SAM3EN,GAAOm/B,aAAa+E,IAAM,WACzB,IACC,OAAO,IAAIpnC,GAAOqnC,eACjB,MAAQz6B,MAGX,IAAI06B,GAAmB,CAGrBC,EAAG,IAIHC,KAAM,KAEPC,GAAevkC,GAAOm/B,aAAa+E,MAEpC/lC,GAAQqmC,OAASD,IAAkB,oBAAqBA,GACxDpmC,GAAQsiC,KAAO8D,KAAiBA,GAEhCvkC,GAAOwgC,cAAe,SAAUj+B,GAC/B,IAAIhB,EAAUkjC,EAGd,GAAKtmC,GAAQqmC,MAAQD,KAAiBhiC,EAAQ2/B,YAC7C,MAAO,CACNO,KAAM,SAAUH,EAAS1K,GACxB,IAAI14B,EACHglC,EAAM3hC,EAAQ2hC,MAWf,GATAA,EAAIQ,KACHniC,EAAQ7D,KACR6D,EAAQg9B,IACRh9B,EAAQo9B,MACRp9B,EAAQoiC,SACRpiC,EAAQyP,UAIJzP,EAAQqiC,UACZ,IAAM1lC,KAAKqD,EAAQqiC,UAClBV,EAAKhlC,GAAMqD,EAAQqiC,UAAW1lC,GAmBhC,IAAMA,KAdDqD,EAAQs/B,UAAYqC,EAAItC,kBAC5BsC,EAAItC,iBAAkBr/B,EAAQs/B,UAQzBt/B,EAAQ2/B,aAAgBI,EAAS,sBACtCA,EAAS,oBAAuB,kBAItBA,EACV4B,EAAIvC,iBAAkBziC,EAAGojC,EAASpjC,IAInCqC,EAAW,SAAU7C,GACpB,OAAO,WACD6C,IACJA,EAAWkjC,EAAgBP,EAAIW,OAC9BX,EAAIY,QAAUZ,EAAIa,QAAUb,EAAIc,UAC/Bd,EAAIe,mBAAqB,KAEb,UAATvmC,EACJwlC,EAAInC,QACgB,UAATrjC,EAKgB,iBAAfwlC,EAAIpC,OACflK,EAAU,EAAG,SAEbA,EAGCsM,EAAIpC,OACJoC,EAAIlC,YAINpK,EACCwM,GAAkBF,EAAIpC,SAAYoC,EAAIpC,OACtCoC,EAAIlC,WAK+B,UAAjCkC,EAAIgB,cAAgB,SACM,iBAArBhB,EAAIiB,aACV,CAAEC,OAAQlB,EAAIrB,UACd,CAAEvjC,KAAM4kC,EAAIiB,cACbjB,EAAIxC,4BAQTwC,EAAIW,OAAStjC,IACbkjC,EAAgBP,EAAIY,QAAUZ,EAAIc,UAAYzjC,EAAU,cAKnCyB,IAAhBkhC,EAAIa,QACRb,EAAIa,QAAUN,EAEdP,EAAIe,mBAAqB,WAGA,IAAnBf,EAAIpnB,YAMRhgB,GAAO2e,WAAY,WACbla,GACJkjC,OAQLljC,EAAWA,EAAU,SAErB,IAGC2iC,EAAIzB,KAAMlgC,EAAQ6/B,YAAc7/B,EAAQ8b,MAAQ,MAC/C,MAAQ3U,GAGT,GAAKnI,EACJ,MAAMmI,IAKTq4B,MAAO,WACDxgC,GACJA,QAWLvB,GAAOugC,cAAe,SAAUhD,GAC1BA,EAAE2E,cACN3E,EAAE/mB,SAASpX,QAAS,KAKtBY,GAAOqgC,UAAW,CACjBR,QAAS,CACRzgC,OAAQ,6FAGToX,SAAU,CACTpX,OAAQ,2BAET4gC,WAAY,CACX2D,cAAe,SAAUrkC,GAExB,OADAU,GAAO4D,WAAYtE,GACZA,MAMVU,GAAOugC,cAAe,SAAU,SAAUhD,QACxBv6B,IAAZu6B,EAAEpyB,QACNoyB,EAAEpyB,OAAQ,GAENoyB,EAAE2E,cACN3E,EAAE7+B,KAAO,SAKXsB,GAAOwgC,cAAe,SAAU,SAAUjD,GAIxC,IAAIn+B,EAAQmC,EADb,GAAKg8B,EAAE2E,aAAe3E,EAAE8H,YAEvB,MAAO,CACN5C,KAAM,SAAU/pB,EAAGkf,GAClBx4B,EAASY,GAAQ,YACfwN,KAAM+vB,EAAE8H,aAAe,IACvB/mB,KAAM,CAAEgnB,QAAS/H,EAAEgI,cAAe5mC,IAAK4+B,EAAEgC,MACzCrb,GAAI,aAAc3iB,EAAW,SAAUikC,GACvCpmC,EAAOka,SACP/X,EAAW,KACNikC,GACJ5N,EAAuB,UAAb4N,EAAI9mC,KAAmB,IAAM,IAAK8mC,EAAI9mC,QAKnD/B,EAAS8C,KAAKC,YAAaN,EAAQ,KAEpC2iC,MAAO,WACDxgC,GACJA,QAUL,IAqGKigB,GArGDikB,GAAe,GAClBC,GAAS,oBAGV1lC,GAAOqgC,UAAW,CACjBsF,MAAO,WACPC,cAAe,WACd,IAAIrkC,EAAWkkC,GAAargC,OAAWpF,GAAOiD,QAAU,IAAQrE,GAAMmG,OAEtE,OADAhI,KAAMwE,IAAa,EACZA,KAKTvB,GAAOugC,cAAe,aAAc,SAAUhD,EAAGsI,EAAkBlH,GAElE,IAAImH,EAAcC,EAAaC,EAC9BC,GAAuB,IAAZ1I,EAAEoI,QAAqBD,GAAOlhC,KAAM+4B,EAAEgC,KAChD,MACkB,iBAAXhC,EAAElf,MAE6C,KADnDkf,EAAEqC,aAAe,IACjBhiC,QAAS,sCACX8nC,GAAOlhC,KAAM+4B,EAAElf,OAAU,QAI5B,GAAK4nB,GAAiC,UAArB1I,EAAEkB,UAAW,GA8D7B,OA3DAqH,EAAevI,EAAEqI,cAAgBxnC,EAAYm/B,EAAEqI,eAC9CrI,EAAEqI,gBACFrI,EAAEqI,cAGEK,EACJ1I,EAAG0I,GAAa1I,EAAG0I,GAAW7iC,QAASsiC,GAAQ,KAAOI,IAC/B,IAAZvI,EAAEoI,QACbpI,EAAEgC,MAASrD,GAAO13B,KAAM+4B,EAAEgC,KAAQ,IAAM,KAAQhC,EAAEoI,MAAQ,IAAMG,GAIjEvI,EAAEyC,WAAY,eAAkB,WAI/B,OAHMgG,GACLhmC,GAAOsD,MAAOwiC,EAAe,mBAEvBE,EAAmB,IAI3BzI,EAAEkB,UAAW,GAAM,OAGnBsH,EAAcjpC,GAAQgpC,GACtBhpC,GAAQgpC,GAAiB,WACxBE,EAAoBvkC,WAIrBk9B,EAAM7kB,OAAQ,gBAGQ9W,IAAhB+iC,EACJ/lC,GAAQlD,IAASm+B,WAAY6K,GAI7BhpC,GAAQgpC,GAAiBC,EAIrBxI,EAAGuI,KAGPvI,EAAEqI,cAAgBC,EAAiBD,cAGnCH,GAAa9nC,KAAMmoC,IAIfE,GAAqB5nC,EAAY2nC,IACrCA,EAAaC,EAAmB,IAGjCA,EAAoBD,OAAc/iC,IAI5B,WAYT7E,GAAQ+nC,qBACH1kB,GAAO7kB,EAASwpC,eAAeD,mBAAoB,IAAK1kB,MACvDtU,UAAY,6BACiB,IAA3BsU,GAAK/X,WAAWnJ,QAQxBN,GAAOmW,UAAY,SAAUkI,EAAMne,EAASkmC,GAC3C,MAAqB,iBAAT/nB,EACJ,IAEgB,kBAAZne,IACXkmC,EAAclmC,EACdA,GAAU,GAKLA,IAIA/B,GAAQ+nC,qBAMZxzB,GALAxS,EAAUvD,EAASwpC,eAAeD,mBAAoB,KAKvC7mC,cAAe,SACzB0R,KAAOpU,EAAS6T,SAASO,KAC9B7Q,EAAQT,KAAKC,YAAagT,IAE1BxS,EAAUvD,GAKZ2mB,GAAW8iB,GAAe,IAD1BC,EAASvwB,EAAW1L,KAAMiU,IAKlB,CAAEne,EAAQb,cAAegnC,EAAQ,MAGzCA,EAAShjB,GAAe,CAAEhF,GAAQne,EAASojB,GAEtCA,GAAWA,EAAQhjB,QACvBN,GAAQsjB,GAAUhK,SAGZtZ,GAAOoB,MAAO,GAAIilC,EAAO58B,cAlChC,IAAIiJ,EAAM2zB,EAAQ/iB,GAyCnBtjB,GAAOG,GAAGonB,KAAO,SAAUgY,EAAK+G,EAAQ/kC,GACvC,IAAItB,EAAUvB,EAAMmkC,EACnB5sB,EAAOlZ,KACPwnB,EAAMgb,EAAI3hC,QAAS,KAsDpB,OApDY,EAAP2mB,IACJtkB,EAAW66B,GAAkByE,EAAIliC,MAAOknB,IACxCgb,EAAMA,EAAIliC,MAAO,EAAGknB,IAIhBnmB,EAAYkoC,IAGhB/kC,EAAW+kC,EACXA,OAAStjC,GAGEsjC,GAA4B,iBAAXA,IAC5B5nC,EAAO,QAIW,EAAduX,EAAK3V,QACTN,GAAOygC,KAAM,CACZlB,IAAKA,EAKL7gC,KAAMA,GAAQ,MACd8/B,SAAU,OACVngB,KAAMioB,IACHz/B,KAAM,SAAUs+B,GAGnBtC,EAAWphC,UAEXwU,EAAK0V,KAAM1rB,EAIVD,GAAQ,SAAUqsB,OAAQrsB,GAAOmW,UAAWgvB,IAAiBv7B,KAAM3J,GAGnEklC,KAKErrB,OAAQvY,GAAY,SAAUo9B,EAAOmD,GACxC7rB,EAAK3U,KAAM,WACVC,EAAS7D,MAAOX,KAAM8lC,GAAY,CAAElE,EAAMwG,aAAcrD,EAAQnD,QAK5D5hC,MAMRiD,GAAOqN,KAAK9F,QAAQg/B,SAAW,SAAU/lC,GACxC,OAAOR,GAAO8B,KAAM9B,GAAOo5B,OAAQ,SAAUj5B,GAC5C,OAAOK,IAASL,EAAGK,OAChBF,QAMLN,GAAOwmC,OAAS,CACfC,UAAW,SAAUjmC,EAAM+B,EAASrD,GACnC,IAAIwnC,EAAaC,EAASC,EAAWC,EAAQC,EAAWC,EACvD/X,EAAWhvB,GAAOwgB,IAAKhgB,EAAM,YAC7BwmC,EAAUhnC,GAAQQ,GAClBonB,EAAQ,GAGS,WAAboH,IACJxuB,EAAK8f,MAAM0O,SAAW,YAGvB8X,EAAYE,EAAQR,SACpBI,EAAY5mC,GAAOwgB,IAAKhgB,EAAM,OAC9BumC,EAAa/mC,GAAOwgB,IAAKhgB,EAAM,SACI,aAAbwuB,GAAwC,UAAbA,KACA,GAA9C4X,EAAYG,GAAanpC,QAAS,SAMpCipC,GADAH,EAAcM,EAAQhY,YACD3iB,IACrBs6B,EAAUD,EAAYpS,OAGtBuS,EAASxX,WAAYuX,IAAe,EACpCD,EAAUtX,WAAY0X,IAAgB,GAGlC3oC,EAAYmE,KAGhBA,EAAUA,EAAQ/E,KAAMgD,EAAMtB,EAAGc,GAAOsC,OAAQ,GAAIwkC,KAGjC,MAAfvkC,EAAQ8J,MACZub,EAAMvb,IAAQ9J,EAAQ8J,IAAMy6B,EAAUz6B,IAAQw6B,GAE1B,MAAhBtkC,EAAQ+xB,OACZ1M,EAAM0M,KAAS/xB,EAAQ+xB,KAAOwS,EAAUxS,KAASqS,GAG7C,UAAWpkC,EACfA,EAAQ0kC,MAAMzpC,KAAMgD,EAAMonB,GAG1Bof,EAAQxmB,IAAKoH,KAKhB5nB,GAAOG,GAAGmC,OAAQ,CAGjBkkC,OAAQ,SAAUjkC,GAGjB,GAAKd,UAAUnB,OACd,YAAmB0C,IAAZT,EACNxF,KACAA,KAAKuE,KAAM,SAAUpC,GACpBc,GAAOwmC,OAAOC,UAAW1pC,KAAMwF,EAASrD,KAI3C,IAAIgoC,EAAMC,EACT3mC,EAAOzD,KAAM,GAEd,OAAMyD,EAQAA,EAAK4xB,iBAAiB9xB,QAK5B4mC,EAAO1mC,EAAK4zB,wBACZ+S,EAAM3mC,EAAK+D,cAAc6H,YAClB,CACNC,IAAK66B,EAAK76B,IAAM86B,EAAIC,YACpB9S,KAAM4S,EAAK5S,KAAO6S,EAAIE,cARf,CAAEh7B,IAAK,EAAGioB,KAAM,QATxB,GAuBDtF,SAAU,WACT,GAAMjyB,KAAM,GAAZ,CAIA,IAAIuqC,EAAcd,EAAQvnC,EACzBuB,EAAOzD,KAAM,GACbwqC,EAAe,CAAEl7B,IAAK,EAAGioB,KAAM,GAGhC,GAAwC,UAAnCt0B,GAAOwgB,IAAKhgB,EAAM,YAGtBgmC,EAAShmC,EAAK4zB,4BAER,CACNoS,EAASzpC,KAAKypC,SAIdvnC,EAAMuB,EAAK+D,cACX+iC,EAAe9mC,EAAK8mC,cAAgBroC,EAAI6E,gBACxC,MAAQwjC,IACLA,IAAiBroC,EAAIuiB,MAAQ8lB,IAAiBroC,EAAI6E,kBACT,WAA3C9D,GAAOwgB,IAAK8mB,EAAc,YAE1BA,EAAeA,EAAa3nC,WAExB2nC,GAAgBA,IAAiB9mC,GAAkC,IAA1B8mC,EAAahpC,YAG1DipC,EAAevnC,GAAQsnC,GAAed,UACzBn6B,KAAOrM,GAAOwgB,IAAK8mB,EAAc,kBAAkB,GAChEC,EAAajT,MAAQt0B,GAAOwgB,IAAK8mB,EAAc,mBAAmB,IAKpE,MAAO,CACNj7B,IAAKm6B,EAAOn6B,IAAMk7B,EAAal7B,IAAMrM,GAAOwgB,IAAKhgB,EAAM,aAAa,GACpE8zB,KAAMkS,EAAOlS,KAAOiT,EAAajT,KAAOt0B,GAAOwgB,IAAKhgB,EAAM,cAAc,MAc1E8mC,aAAc,WACb,OAAOvqC,KAAKyE,IAAK,WAChB,IAAI8lC,EAAevqC,KAAKuqC,aAExB,MAAQA,GAA2D,WAA3CtnC,GAAOwgB,IAAK8mB,EAAc,YACjDA,EAAeA,EAAaA,aAG7B,OAAOA,GAAgBxjC,OAM1B9D,GAAOsB,KAAM,CAAEk0B,WAAY,cAAeD,UAAW,eAAiB,SAAUnd,EAAQkG,GACvF,IAAIjS,EAAM,gBAAkBiS,EAE5Bte,GAAOG,GAAIiY,GAAW,SAAUjZ,GAC/B,OAAO6d,EAAQjgB,KAAM,SAAUyD,EAAM4X,EAAQjZ,GAG5C,IAAIgoC,EAOJ,GANK3oC,EAAUgC,GACd2mC,EAAM3mC,EACuB,IAAlBA,EAAKlC,WAChB6oC,EAAM3mC,EAAK4L,kBAGCpJ,IAAR7D,EACJ,OAAOgoC,EAAMA,EAAK7oB,GAAS9d,EAAM4X,GAG7B+uB,EACJA,EAAIK,SACFn7B,EAAY86B,EAAIE,YAAVloC,EACPkN,EAAMlN,EAAMgoC,EAAIC,aAIjB5mC,EAAM4X,GAAWjZ,GAEhBiZ,EAAQjZ,EAAKsC,UAAUnB,WAU5BN,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAImZ,GAC7Cte,GAAOuyB,SAAUjU,GAAS4P,GAAc/vB,GAAQuxB,cAC/C,SAAUlvB,EAAMmtB,GACf,GAAKA,EAIJ,OAHAA,EAAWD,GAAQltB,EAAM8d,GAGlB4O,GAAU1oB,KAAMmpB,GACtB3tB,GAAQQ,GAAOwuB,WAAY1Q,GAAS,KACpCqP,MAQL3tB,GAAOsB,KAAM,CAAEmmC,OAAQ,SAAUC,MAAO,SAAW,SAAUjnC,EAAM/B,GAClEsB,GAAOsB,KAAM,CACZkzB,QAAS,QAAU/zB,EACnBgX,QAAS/Y,EACTipC,GAAI,QAAUlnC,GACZ,SAAUmnC,EAAcC,GAG1B7nC,GAAOG,GAAI0nC,GAAa,SAAUtT,EAAQzvB,GACzC,IAAImY,EAAYxb,UAAUnB,SAAYsnC,GAAkC,kBAAXrT,GAC5D1C,EAAQ+V,KAA6B,IAAXrT,IAA6B,IAAVzvB,EAAiB,SAAW,UAE1E,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAM9B,EAAMoG,GAC1C,IAAI7F,EAEJ,OAAKT,EAAUgC,GAGyB,IAAhCqnC,EAASjqC,QAAS,SACxB4C,EAAM,QAAUC,GAChBD,EAAK7D,SAASmH,gBAAiB,SAAWrD,GAIrB,IAAlBD,EAAKlC,UACTW,EAAMuB,EAAKsD,gBAIJZ,KAAKouB,IACX9wB,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CD,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CxB,EAAK,SAAWwB,UAIDuC,IAAV8B,EAGN9E,GAAOwgB,IAAKhgB,EAAM9B,EAAMmzB,GAGxB7xB,GAAOsgB,MAAO9f,EAAM9B,EAAMoG,EAAO+sB,IAChCnzB,EAAMue,EAAYsX,OAASvxB,EAAWia,QAM5Cjd,GAAOsB,KAAM,CACZ,YACA,WACA,eACA,YACA,cACA,YACE,SAAU6D,EAAIzG,GAChBsB,GAAOG,GAAIzB,GAAS,SAAUyB,GAC7B,OAAOpD,KAAKmnB,GAAIxlB,EAAMyB,MAOxBH,GAAOG,GAAGmC,OAAQ,CAEjBq1B,KAAM,SAAUxT,EAAO9F,EAAMle,GAC5B,OAAOpD,KAAKmnB,GAAIC,EAAO,KAAM9F,EAAMle,IAEpC2nC,OAAQ,SAAU3jB,EAAOhkB,GACxB,OAAOpD,KAAKwnB,IAAKJ,EAAO,KAAMhkB,IAG/B4nC,SAAU,SAAU9nC,EAAUkkB,EAAO9F,EAAMle,GAC1C,OAAOpD,KAAKmnB,GAAIC,EAAOlkB,EAAUoe,EAAMle,IAExC6nC,WAAY,SAAU/nC,EAAUkkB,EAAOhkB,GAGtC,OAA4B,IAArBsB,UAAUnB,OAChBvD,KAAKwnB,IAAKtkB,EAAU,MACpBlD,KAAKwnB,IAAKJ,EAAOlkB,GAAY,KAAME,IAGrC8nC,MAAO,SAAUC,EAAQC,GACxB,OAAOprC,KACLmnB,GAAI,aAAcgkB,GAClBhkB,GAAI,aAAcikB,GAASD,MAI/BloC,GAAOsB,KACN,wLAE4D4D,MAAO,KACnE,SAAUC,EAAI1E,GAGbT,GAAOG,GAAIM,GAAS,SAAU4d,EAAMle,GACnC,OAA0B,EAAnBsB,UAAUnB,OAChBvD,KAAKmnB,GAAIzjB,EAAM,KAAM4d,EAAMle,GAC3BpD,KAAKioB,QAASvkB,MAYlB,IAAI2nC,GAAQ,sDAMZpoC,GAAOqoC,MAAQ,SAAUloC,EAAID,GAC5B,IAAIyf,EAAK/P,EAAMy4B,EAUf,GARwB,iBAAZnoC,IACXyf,EAAMxf,EAAID,GACVA,EAAUC,EACVA,EAAKwf,GAKAvhB,EAAY+B,GAalB,OARAyP,EAAOvS,GAAMG,KAAMiE,UAAW,IAC9B4mC,EAAQ,WACP,OAAOloC,EAAGzC,MAAOwC,GAAWnD,KAAM6S,EAAKnS,OAAQJ,GAAMG,KAAMiE,eAItDsD,KAAO5E,EAAG4E,KAAO5E,EAAG4E,MAAQ/E,GAAO+E,OAElCsjC,GAGRroC,GAAOsoC,UAAY,SAAUC,GACvBA,EACJvoC,GAAO4c,YAEP5c,GAAOoW,OAAO,IAGhBpW,GAAO+C,QAAUD,MAAMC,QACvB/C,GAAOwoC,UAAY3pB,KAAKC,MACxB9e,GAAOO,SAAWA,GAClBP,GAAO5B,WAAaA,EACpB4B,GAAOxB,SAAWA,EAClBwB,GAAO4d,UAAYA,EACnB5d,GAAOtB,KAAOmB,EAEdG,GAAOkoB,IAAMD,KAAKC,IAElBloB,GAAOyoC,UAAY,SAAUpqC,GAK5B,IAAIK,EAAOsB,GAAOtB,KAAML,GACxB,OAAkB,WAATK,GAA8B,WAATA,KAK5BgqC,MAAOrqC,EAAMgxB,WAAYhxB,KAG5B2B,GAAO2oC,KAAO,SAAUrpC,GACvB,OAAe,MAARA,EACN,IACEA,EAAO,IAAK8D,QAASglC,GAAO,OAkBT,mBAAXQ,QAAyBA,OAAOC,KAC3CD,OAAQ,SAAU,GAAI,WACrB,OAAO5oC,KAOT,IAGC8oC,GAAUhsC,GAAOkD,OAGjB+oC,GAAKjsC,GAAOksC,EAwBb,OAtBAhpC,GAAOipC,WAAa,SAAUrmC,GAS7B,OARK9F,GAAOksC,IAAMhpC,KACjBlD,GAAOksC,EAAID,IAGPnmC,GAAQ9F,GAAOkD,SAAWA,KAC9BlD,GAAOkD,OAAS8oC,IAGV9oC,IAMiB,oBAAbhD,IACXF,GAAOkD,OAASlD,GAAOksC,EAAIhpC,IAMrBA","file":"jquery-3.7.1.min.js"} \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
new file mode 100644
index 0000000000..b6d9aa8c79
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
@@ -0,0 +1,4 @@
+/*! DataTables 1.13.8
+ * ©2008-2023 SpryMedia Ltd - datatables.net/license
+ */
+!function(n){"use strict";var a;"function"==typeof define&&define.amd?define(["jquery"],function(t){return n(t,window,document)}):"object"==typeof exports?(a=require("jquery"),"undefined"==typeof window?module.exports=function(t,e){return t=t||window,e=e||a(t),n(e,t,t.document)}:module.exports=n(a,window,window.document)):window.DataTable=n(jQuery,window,document)}(function(P,j,v,H){"use strict";function d(t){var e=parseInt(t,10);return!isNaN(e)&&isFinite(t)?e:null}function l(t,e,n){var a=typeof t,r="string"==a;return"number"==a||"bigint"==a||!!h(t)||(e&&r&&(t=$(t,e)),n&&r&&(t=t.replace(q,"")),!isNaN(parseFloat(t))&&isFinite(t))}function a(t,e,n){var a;return!!h(t)||(h(a=t)||"string"==typeof a)&&!!l(t.replace(V,"").replace(/<script/i,""),e,n)||null}function m(t,e,n,a){var r=[],o=0,i=e.length;if(a!==H)for(;o<i;o++)t[e[o]][n]&&r.push(t[e[o]][n][a]);else for(;o<i;o++)r.push(t[e[o]][n]);return r}function f(t,e){var n,a=[];e===H?(e=0,n=t):(n=e,e=t);for(var r=e;r<n;r++)a.push(r);return a}function _(t){for(var e=[],n=0,a=t.length;n<a;n++)t[n]&&e.push(t[n]);return e}function s(t,e){return-1!==this.indexOf(t,e=e===H?0:e)}var p,e,t,w=function(t,v){if(w.factory(t,v))return w;if(this instanceof w)return P(t).DataTable(v);v=t,this.$=function(t,e){return this.api(!0).$(t,e)},this._=function(t,e){return this.api(!0).rows(t,e).data()},this.api=function(t){return new B(t?ge(this[p.iApiIndex]):this)},this.fnAddData=function(t,e){var n=this.api(!0),t=(Array.isArray(t)&&(Array.isArray(t[0])||P.isPlainObject(t[0]))?n.rows:n.row).add(t);return e!==H&&!e||n.draw(),t.flatten().toArray()},this.fnAdjustColumnSizing=function(t){var e=this.api(!0).columns.adjust(),n=e.settings()[0],a=n.oScroll;t===H||t?e.draw(!1):""===a.sX&&""===a.sY||Qt(n)},this.fnClearTable=function(t){var e=this.api(!0).clear();t!==H&&!t||e.draw()},this.fnClose=function(t){this.api(!0).row(t).child.hide()},this.fnDeleteRow=function(t,e,n){var a=this.api(!0),t=a.rows(t),r=t.settings()[0],o=r.aoData[t[0][0]];return t.remove(),e&&e.call(this,r,o),n!==H&&!n||a.draw(),o},this.fnDestroy=function(t){this.api(!0).destroy(t)},this.fnDraw=function(t){this.api(!0).draw(t)},this.fnFilter=function(t,e,n,a,r,o){var i=this.api(!0);(null===e||e===H?i:i.column(e)).search(t,n,a,o),i.draw()},this.fnGetData=function(t,e){var n,a=this.api(!0);return t!==H?(n=t.nodeName?t.nodeName.toLowerCase():"",e!==H||"td"==n||"th"==n?a.cell(t,e).data():a.row(t).data()||null):a.data().toArray()},this.fnGetNodes=function(t){var e=this.api(!0);return t!==H?e.row(t).node():e.rows().nodes().flatten().toArray()},this.fnGetPosition=function(t){var e=this.api(!0),n=t.nodeName.toUpperCase();return"TR"==n?e.row(t).index():"TD"==n||"TH"==n?[(n=e.cell(t).index()).row,n.columnVisible,n.column]:null},this.fnIsOpen=function(t){return this.api(!0).row(t).child.isShown()},this.fnOpen=function(t,e,n){return this.api(!0).row(t).child(e,n).show().child()[0]},this.fnPageChange=function(t,e){t=this.api(!0).page(t);e!==H&&!e||t.draw(!1)},this.fnSetColumnVis=function(t,e,n){t=this.api(!0).column(t).visible(e);n!==H&&!n||t.columns.adjust().draw()},this.fnSettings=function(){return ge(this[p.iApiIndex])},this.fnSort=function(t){this.api(!0).order(t).draw()},this.fnSortListener=function(t,e,n){this.api(!0).order.listener(t,e,n)},this.fnUpdate=function(t,e,n,a,r){var o=this.api(!0);return(n===H||null===n?o.row(e):o.cell(e,n)).data(t),r!==H&&!r||o.columns.adjust(),a!==H&&!a||o.draw(),0},this.fnVersionCheck=p.fnVersionCheck;var e,y=this,D=v===H,_=this.length;for(e in D&&(v={}),this.oApi=this.internal=p.internal,w.ext.internal)e&&(this[e]=$e(e));return this.each(function(){var r=1<_?be({},v,!0):v,o=0,t=this.getAttribute("id"),i=!1,e=w.defaults,l=P(this);if("table"!=this.nodeName.toLowerCase())W(null,0,"Non-table node initialisation ("+this.nodeName+")",2);else{K(e),Q(e.column),C(e,e,!0),C(e.column,e.column,!0),C(e,P.extend(r,l.data()),!0);for(var n=w.settings,o=0,s=n.length;o<s;o++){var a=n[o];if(a.nTable==this||a.nTHead&&a.nTHead.parentNode==this||a.nTFoot&&a.nTFoot.parentNode==this){var u=(r.bRetrieve!==H?r:e).bRetrieve,c=(r.bDestroy!==H?r:e).bDestroy;if(D||u)return a.oInstance;if(c){a.oInstance.fnDestroy();break}return void W(a,0,"Cannot reinitialise DataTable",3)}if(a.sTableId==this.id){n.splice(o,1);break}}null!==t&&""!==t||(t="DataTables_Table_"+w.ext._unique++,this.id=t);var f,d,h=P.extend(!0,{},w.models.oSettings,{sDestroyWidth:l[0].style.width,sInstance:t,sTableId:t}),p=(h.nTable=this,h.oApi=y.internal,h.oInit=r,n.push(h),h.oInstance=1===y.length?y:l.dataTable(),K(r),Z(r.oLanguage),r.aLengthMenu&&!r.iDisplayLength&&(r.iDisplayLength=(Array.isArray(r.aLengthMenu[0])?r.aLengthMenu[0]:r.aLengthMenu)[0]),r=be(P.extend(!0,{},e),r),F(h.oFeatures,r,["bPaginate","bLengthChange","bFilter","bSort","bSortMulti","bInfo","bProcessing","bAutoWidth","bSortClasses","bServerSide","bDeferRender"]),F(h,r,["asStripeClasses","ajax","fnServerData","fnFormatNumber","sServerMethod","aaSorting","aaSortingFixed","aLengthMenu","sPaginationType","sAjaxSource","sAjaxDataProp","iStateDuration","sDom","bSortCellsTop","iTabIndex","fnStateLoadCallback","fnStateSaveCallback","renderer","searchDelay","rowId",["iCookieDuration","iStateDuration"],["oSearch","oPreviousSearch"],["aoSearchCols","aoPreSearchCols"],["iDisplayLength","_iDisplayLength"]]),F(h.oScroll,r,[["sScrollX","sX"],["sScrollXInner","sXInner"],["sScrollY","sY"],["bScrollCollapse","bCollapse"]]),F(h.oLanguage,r,"fnInfoCallback"),L(h,"aoDrawCallback",r.fnDrawCallback,"user"),L(h,"aoServerParams",r.fnServerParams,"user"),L(h,"aoStateSaveParams",r.fnStateSaveParams,"user"),L(h,"aoStateLoadParams",r.fnStateLoadParams,"user"),L(h,"aoStateLoaded",r.fnStateLoaded,"user"),L(h,"aoRowCallback",r.fnRowCallback,"user"),L(h,"aoRowCreatedCallback",r.fnCreatedRow,"user"),L(h,"aoHeaderCallback",r.fnHeaderCallback,"user"),L(h,"aoFooterCallback",r.fnFooterCallback,"user"),L(h,"aoInitComplete",r.fnInitComplete,"user"),L(h,"aoPreDrawCallback",r.fnPreDrawCallback,"user"),h.rowIdFn=A(r.rowId),tt(h),h.oClasses),g=(P.extend(p,w.ext.classes,r.oClasses),l.addClass(p.sTable),h.iInitDisplayStart===H&&(h.iInitDisplayStart=r.iDisplayStart,h._iDisplayStart=r.iDisplayStart),null!==r.iDeferLoading&&(h.bDeferLoading=!0,t=Array.isArray(r.iDeferLoading),h._iRecordsDisplay=t?r.iDeferLoading[0]:r.iDeferLoading,h._iRecordsTotal=t?r.iDeferLoading[1]:r.iDeferLoading),h.oLanguage),t=(P.extend(!0,g,r.oLanguage),g.sUrl?(P.ajax({dataType:"json",url:g.sUrl,success:function(t){C(e.oLanguage,t),Z(t),P.extend(!0,g,t,h.oInit.oLanguage),R(h,null,"i18n",[h]),Jt(h)},error:function(){Jt(h)}}),i=!0):R(h,null,"i18n",[h]),null===r.asStripeClasses&&(h.asStripeClasses=[p.sStripeOdd,p.sStripeEven]),h.asStripeClasses),b=l.children("tbody").find("tr").eq(0),m=(-1!==P.inArray(!0,P.map(t,function(t,e){return b.hasClass(t)}))&&(P("tbody tr",this).removeClass(t.join(" ")),h.asDestroyStripes=t.slice()),[]),t=this.getElementsByTagName("thead");if(0!==t.length&&(wt(h.aoHeader,t[0]),m=Ct(h)),null===r.aoColumns)for(f=[],o=0,s=m.length;o<s;o++)f.push(null);else f=r.aoColumns;for(o=0,s=f.length;o<s;o++)nt(h,m?m[o]:null);st(h,r.aoColumnDefs,f,function(t,e){at(h,t,e)}),b.length&&(d=function(t,e){return null!==t.getAttribute("data-"+e)?e:null},P(b[0]).children("th, td").each(function(t,e){var n,a=h.aoColumns[t];a||W(h,0,"Incorrect column count",18),a.mData===t&&(n=d(e,"sort")||d(e,"order"),e=d(e,"filter")||d(e,"search"),null===n&&null===e||(a.mData={_:t+".display",sort:null!==n?t+".@data-"+n:H,type:null!==n?t+".@data-"+n:H,filter:null!==e?t+".@data-"+e:H},a._isArrayHost=!0,at(h,t)))}));var S=h.oFeatures,t=function(){if(r.aaSorting===H){var t=h.aaSorting;for(o=0,s=t.length;o<s;o++)t[o][1]=h.aoColumns[o].asSorting[0]}ce(h),S.bSort&&L(h,"aoDrawCallback",function(){var t,n;h.bSorted&&(t=I(h),n={},P.each(t,function(t,e){n[e.src]=e.dir}),R(h,null,"order",[h,t,n]),le(h))}),L(h,"aoDrawCallback",function(){(h.bSorted||"ssp"===E(h)||S.bDeferRender)&&ce(h)},"sc");var e=l.children("caption").each(function(){this._captionSide=P(this).css("caption-side")}),n=l.children("thead"),a=(0===n.length&&(n=P("<thead/>").appendTo(l)),h.nTHead=n[0],l.children("tbody")),n=(0===a.length&&(a=P("<tbody/>").insertAfter(n)),h.nTBody=a[0],l.children("tfoot"));if(0===(n=0===n.length&&0<e.length&&(""!==h.oScroll.sX||""!==h.oScroll.sY)?P("<tfoot/>").appendTo(l):n).length||0===n.children().length?l.addClass(p.sNoFooter):0<n.length&&(h.nTFoot=n[0],wt(h.aoFooter,h.nTFoot)),r.aaData)for(o=0;o<r.aaData.length;o++)x(h,r.aaData[o]);else!h.bDeferLoading&&"dom"!=E(h)||ut(h,P(h.nTBody).children("tr"));h.aiDisplay=h.aiDisplayMaster.slice(),!(h.bInitialised=!0)===i&&Jt(h)};L(h,"aoDrawCallback",de,"state_save"),r.bStateSave?(S.bStateSave=!0,he(h,0,t)):t()}}),y=null,this},c={},U=/[\r\n\u2028]/g,V=/<.*?>/g,X=/^\d{2,4}[\.\/\-]\d{1,2}[\.\/\-]\d{1,2}([T ]{1}\d{1,2}[:\.]\d{2}([\.:]\d{2})?)?$/,J=new RegExp("(\\"+["/",".","*","+","?","|","(",")","[","]","{","}","\\","$","^","-"].join("|\\")+")","g"),q=/['\u00A0,$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfkɃΞ]/gi,h=function(t){return!t||!0===t||"-"===t},$=function(t,e){return c[e]||(c[e]=new RegExp(Ot(e),"g")),"string"==typeof t&&"."!==e?t.replace(/\./g,"").replace(c[e],"."):t},N=function(t,e,n){var a=[],r=0,o=t.length;if(n!==H)for(;r<o;r++)t[r]&&t[r][e]&&a.push(t[r][e][n]);else for(;r<o;r++)t[r]&&a.push(t[r][e]);return a},G=function(t){if(!(t.length<2))for(var e=t.slice().sort(),n=e[0],a=1,r=e.length;a<r;a++){if(e[a]===n)return!1;n=e[a]}return!0},z=function(t){if(G(t))return t.slice();var e,n,a,r=[],o=t.length,i=0;t:for(n=0;n<o;n++){for(e=t[n],a=0;a<i;a++)if(r[a]===e)continue t;r.push(e),i++}return r},Y=function(t,e){if(Array.isArray(e))for(var n=0;n<e.length;n++)Y(t,e[n]);else t.push(e);return t};function i(n){var a,r,o={};P.each(n,function(t,e){(a=t.match(/^([^A-Z]+?)([A-Z])/))&&-1!=="a aa ai ao as b fn i m o s ".indexOf(a[1]+" ")&&(r=t.replace(a[0],a[2].toLowerCase()),o[r]=t,"o"===a[1])&&i(n[t])}),n._hungarianMap=o}function C(n,a,r){var o;n._hungarianMap||i(n),P.each(a,function(t,e){(o=n._hungarianMap[t])===H||!r&&a[o]!==H||("o"===o.charAt(0)?(a[o]||(a[o]={}),P.extend(!0,a[o],a[t]),C(n[o],a[o],r)):a[o]=a[t])})}function Z(t){var e,n=w.defaults.oLanguage,a=n.sDecimal;a&&Me(a),t&&(e=t.sZeroRecords,!t.sEmptyTable&&e&&"No data available in table"===n.sEmptyTable&&F(t,t,"sZeroRecords","sEmptyTable"),!t.sLoadingRecords&&e&&"Loading..."===n.sLoadingRecords&&F(t,t,"sZeroRecords","sLoadingRecords"),t.sInfoThousands&&(t.sThousands=t.sInfoThousands),e=t.sDecimal)&&a!==e&&Me(e)}Array.isArray||(Array.isArray=function(t){return"[object Array]"===Object.prototype.toString.call(t)}),Array.prototype.includes||(Array.prototype.includes=s),String.prototype.trim||(String.prototype.trim=function(){return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"")}),String.prototype.includes||(String.prototype.includes=s),w.util={throttle:function(a,t){var r,o,i=t!==H?t:200;return function(){var t=this,e=+new Date,n=arguments;r&&e<r+i?(clearTimeout(o),o=setTimeout(function(){r=H,a.apply(t,n)},i)):(r=e,a.apply(t,n))}},escapeRegex:function(t){return t.replace(J,"\\$1")},set:function(a){var d;return P.isPlainObject(a)?w.util.set(a._):null===a?function(){}:"function"==typeof a?function(t,e,n){a(t,"set",e,n)}:"string"!=typeof a||-1===a.indexOf(".")&&-1===a.indexOf("[")&&-1===a.indexOf("(")?function(t,e){t[a]=e}:(d=function(t,e,n){for(var a,r,o,i,l=dt(n),n=l[l.length-1],s=0,u=l.length-1;s<u;s++){if("__proto__"===l[s]||"constructor"===l[s])throw new Error("Cannot set prototype values");if(a=l[s].match(ft),r=l[s].match(g),a){if(l[s]=l[s].replace(ft,""),t[l[s]]=[],(a=l.slice()).splice(0,s+1),i=a.join("."),Array.isArray(e))for(var c=0,f=e.length;c<f;c++)d(o={},e[c],i),t[l[s]].push(o);else t[l[s]]=e;return}r&&(l[s]=l[s].replace(g,""),t=t[l[s]](e)),null!==t[l[s]]&&t[l[s]]!==H||(t[l[s]]={}),t=t[l[s]]}n.match(g)?t[n.replace(g,"")](e):t[n.replace(ft,"")]=e},function(t,e){return d(t,e,a)})},get:function(r){var o,d;return P.isPlainObject(r)?(o={},P.each(r,function(t,e){e&&(o[t]=w.util.get(e))}),function(t,e,n,a){var r=o[e]||o._;return r!==H?r(t,e,n,a):t}):null===r?function(t){return t}:"function"==typeof r?function(t,e,n,a){return r(t,e,n,a)}:"string"!=typeof r||-1===r.indexOf(".")&&-1===r.indexOf("[")&&-1===r.indexOf("(")?function(t,e){return t[r]}:(d=function(t,e,n){var a,r,o;if(""!==n)for(var i=dt(n),l=0,s=i.length;l<s;l++){if(f=i[l].match(ft),a=i[l].match(g),f){if(i[l]=i[l].replace(ft,""),""!==i[l]&&(t=t[i[l]]),r=[],i.splice(0,l+1),o=i.join("."),Array.isArray(t))for(var u=0,c=t.length;u<c;u++)r.push(d(t[u],e,o));var f=f[0].substring(1,f[0].length-1);t=""===f?r:r.join(f);break}if(a)i[l]=i[l].replace(g,""),t=t[i[l]]();else{if(null===t||null===t[i[l]])return null;if(t===H||t[i[l]]===H)return H;t=t[i[l]]}}return t},function(t,e){return d(t,e,r)})}};var r=function(t,e,n){t[e]!==H&&(t[n]=t[e])};function K(t){r(t,"ordering","bSort"),r(t,"orderMulti","bSortMulti"),r(t,"orderClasses","bSortClasses"),r(t,"orderCellsTop","bSortCellsTop"),r(t,"order","aaSorting"),r(t,"orderFixed","aaSortingFixed"),r(t,"paging","bPaginate"),r(t,"pagingType","sPaginationType"),r(t,"pageLength","iDisplayLength"),r(t,"searching","bFilter"),"boolean"==typeof t.sScrollX&&(t.sScrollX=t.sScrollX?"100%":""),"boolean"==typeof t.scrollX&&(t.scrollX=t.scrollX?"100%":"");var e=t.aoSearchCols;if(e)for(var n=0,a=e.length;n<a;n++)e[n]&&C(w.models.oSearch,e[n])}function Q(t){r(t,"orderable","bSortable"),r(t,"orderData","aDataSort"),r(t,"orderSequence","asSorting"),r(t,"orderDataType","sortDataType");var e=t.aDataSort;"number"!=typeof e||Array.isArray(e)||(t.aDataSort=[e])}function tt(t){var e,n,a,r;w.__browser||(w.__browser=e={},r=(a=(n=P("<div/>").css({position:"fixed",top:0,left:-1*P(j).scrollLeft(),height:1,width:1,overflow:"hidden"}).append(P("<div/>").css({position:"absolute",top:1,left:1,width:100,overflow:"scroll"}).append(P("<div/>").css({width:"100%",height:10}))).appendTo("body")).children()).children(),e.barWidth=a[0].offsetWidth-a[0].clientWidth,e.bScrollOversize=100===r[0].offsetWidth&&100!==a[0].clientWidth,e.bScrollbarLeft=1!==Math.round(r.offset().left),e.bBounding=!!n[0].getBoundingClientRect().width,n.remove()),P.extend(t.oBrowser,w.__browser),t.oScroll.iBarWidth=w.__browser.barWidth}function et(t,e,n,a,r,o){var i,l=a,s=!1;for(n!==H&&(i=n,s=!0);l!==r;)t.hasOwnProperty(l)&&(i=s?e(i,t[l],l,t):t[l],s=!0,l+=o);return i}function nt(t,e){var n=w.defaults.column,a=t.aoColumns.length,n=P.extend({},w.models.oColumn,n,{nTh:e||v.createElement("th"),sTitle:n.sTitle||(e?e.innerHTML:""),aDataSort:n.aDataSort||[a],mData:n.mData||a,idx:a}),n=(t.aoColumns.push(n),t.aoPreSearchCols);n[a]=P.extend({},w.models.oSearch,n[a]),at(t,a,P(e).data())}function at(t,e,n){function a(t){return"string"==typeof t&&-1!==t.indexOf("@")}var e=t.aoColumns[e],r=t.oClasses,o=P(e.nTh),i=(!e.sWidthOrig&&(e.sWidthOrig=o.attr("width")||null,u=(o.attr("style")||"").match(/width:\s*(\d+[pxem%]+)/))&&(e.sWidthOrig=u[1]),n!==H&&null!==n&&(Q(n),C(w.defaults.column,n,!0),n.mDataProp===H||n.mData||(n.mData=n.mDataProp),n.sType&&(e._sManualType=n.sType),n.className&&!n.sClass&&(n.sClass=n.className),n.sClass&&o.addClass(n.sClass),u=e.sClass,P.extend(e,n),F(e,n,"sWidth","sWidthOrig"),u!==e.sClass&&(e.sClass=u+" "+e.sClass),n.iDataSort!==H&&(e.aDataSort=[n.iDataSort]),F(e,n,"aDataSort"),e.ariaTitle||(e.ariaTitle=o.attr("aria-label"))),e.mData),l=A(i),s=e.mRender?A(e.mRender):null,u=(e._bAttrSrc=P.isPlainObject(i)&&(a(i.sort)||a(i.type)||a(i.filter)),e._setter=null,e.fnGetData=function(t,e,n){var a=l(t,e,H,n);return s&&e?s(a,e,t,n):a},e.fnSetData=function(t,e,n){return b(i)(t,e,n)},"number"==typeof i||e._isArrayHost||(t._rowReadObject=!0),t.oFeatures.bSort||(e.bSortable=!1,o.addClass(r.sSortableNone)),-1!==P.inArray("asc",e.asSorting)),n=-1!==P.inArray("desc",e.asSorting);e.bSortable&&(u||n)?u&&!n?(e.sSortingClass=r.sSortableAsc,e.sSortingClassJUI=r.sSortJUIAscAllowed):!u&&n?(e.sSortingClass=r.sSortableDesc,e.sSortingClassJUI=r.sSortJUIDescAllowed):(e.sSortingClass=r.sSortable,e.sSortingClassJUI=r.sSortJUI):(e.sSortingClass=r.sSortableNone,e.sSortingClassJUI="")}function O(t){if(!1!==t.oFeatures.bAutoWidth){var e=t.aoColumns;ee(t);for(var n=0,a=e.length;n<a;n++)e[n].nTh.style.width=e[n].sWidth}var r=t.oScroll;""===r.sY&&""===r.sX||Qt(t),R(t,null,"column-sizing",[t])}function rt(t,e){t=it(t,"bVisible");return"number"==typeof t[e]?t[e]:null}function ot(t,e){t=it(t,"bVisible"),e=P.inArray(e,t);return-1!==e?e:null}function T(t){var n=0;return P.each(t.aoColumns,function(t,e){e.bVisible&&"none"!==P(e.nTh).css("display")&&n++}),n}function it(t,n){var a=[];return P.map(t.aoColumns,function(t,e){t[n]&&a.push(e)}),a}function lt(t){for(var e,n,a,r,o,i,l,s=t.aoColumns,u=t.aoData,c=w.ext.type.detect,f=0,d=s.length;f<d;f++)if(l=[],!(o=s[f]).sType&&o._sManualType)o.sType=o._sManualType;else if(!o.sType){for(e=0,n=c.length;e<n;e++){for(a=0,r=u.length;a<r&&(l[a]===H&&(l[a]=S(t,a,f,"type")),(i=c[e](l[a],t))||e===c.length-1)&&("html"!==i||h(l[a]));a++);if(i){o.sType=i;break}}o.sType||(o.sType="string")}}function st(t,e,n,a){var r,o,i,l,s=t.aoColumns;if(e)for(r=e.length-1;0<=r;r--)for(var u,c=(u=e[r]).target!==H?u.target:u.targets!==H?u.targets:u.aTargets,f=0,d=(c=Array.isArray(c)?c:[c]).length;f<d;f++)if("number"==typeof c[f]&&0<=c[f]){for(;s.length<=c[f];)nt(t);a(c[f],u)}else if("number"==typeof c[f]&&c[f]<0)a(s.length+c[f],u);else if("string"==typeof c[f])for(i=0,l=s.length;i<l;i++)"_all"!=c[f]&&!P(s[i].nTh).hasClass(c[f])||a(i,u);if(n)for(r=0,o=n.length;r<o;r++)a(r,n[r])}function x(t,e,n,a){for(var r=t.aoData.length,o=P.extend(!0,{},w.models.oRow,{src:n?"dom":"data",idx:r}),i=(o._aData=e,t.aoData.push(o),t.aoColumns),l=0,s=i.length;l<s;l++)i[l].sType=null;t.aiDisplayMaster.push(r);e=t.rowIdFn(e);return e!==H&&(t.aIds[e]=o),!n&&t.oFeatures.bDeferRender||St(t,r,n,a),r}function ut(n,t){var a;return(t=t instanceof P?t:P(t)).map(function(t,e){return a=mt(n,e),x(n,a.data,e,a.cells)})}function S(t,e,n,a){"search"===a?a="filter":"order"===a&&(a="sort");var r=t.iDraw,o=t.aoColumns[n],i=t.aoData[e]._aData,l=o.sDefaultContent,s=o.fnGetData(i,a,{settings:t,row:e,col:n});if(s===H)return t.iDrawError!=r&&null===l&&(W(t,0,"Requested unknown parameter "+("function"==typeof o.mData?"{function}":"'"+o.mData+"'")+" for row "+e+", column "+n,4),t.iDrawError=r),l;if(s!==i&&null!==s||null===l||a===H){if("function"==typeof s)return s.call(i)}else s=l;return null===s&&"display"===a?"":"filter"===a&&(e=w.ext.type.search)[o.sType]?e[o.sType](s):s}function ct(t,e,n,a){var r=t.aoColumns[n],o=t.aoData[e]._aData;r.fnSetData(o,a,{settings:t,row:e,col:n})}var ft=/\[.*?\]$/,g=/\(\)$/;function dt(t){return P.map(t.match(/(\\.|[^\.])+/g)||[""],function(t){return t.replace(/\\\./g,".")})}var A=w.util.get,b=w.util.set;function ht(t){return N(t.aoData,"_aData")}function pt(t){t.aoData.length=0,t.aiDisplayMaster.length=0,t.aiDisplay.length=0,t.aIds={}}function gt(t,e,n){for(var a=-1,r=0,o=t.length;r<o;r++)t[r]==e?a=r:t[r]>e&&t[r]--;-1!=a&&n===H&&t.splice(a,1)}function bt(n,a,t,e){function r(t,e){for(;t.childNodes.length;)t.removeChild(t.firstChild);t.innerHTML=S(n,a,e,"display")}var o,i,l=n.aoData[a];if("dom"!==t&&(t&&"auto"!==t||"dom"!==l.src)){var s=l.anCells;if(s)if(e!==H)r(s[e],e);else for(o=0,i=s.length;o<i;o++)r(s[o],o)}else l._aData=mt(n,l,e,e===H?H:l._aData).data;l._aSortData=null,l._aFilterData=null;var u=n.aoColumns;if(e!==H)u[e].sType=null;else{for(o=0,i=u.length;o<i;o++)u[o].sType=null;vt(n,l)}}function mt(t,e,n,a){function r(t,e){var n;"string"==typeof t&&-1!==(n=t.indexOf("@"))&&(n=t.substring(n+1),b(t)(a,e.getAttribute(n)))}function o(t){n!==H&&n!==f||(l=d[f],s=t.innerHTML.trim(),l&&l._bAttrSrc?(b(l.mData._)(a,s),r(l.mData.sort,t),r(l.mData.type,t),r(l.mData.filter,t)):h?(l._setter||(l._setter=b(l.mData)),l._setter(a,s)):a[f]=s),f++}var i,l,s,u=[],c=e.firstChild,f=0,d=t.aoColumns,h=t._rowReadObject;a=a!==H?a:h?{}:[];if(c)for(;c;)"TD"!=(i=c.nodeName.toUpperCase())&&"TH"!=i||(o(c),u.push(c)),c=c.nextSibling;else for(var p=0,g=(u=e.anCells).length;p<g;p++)o(u[p]);var e=e.firstChild?e:e.nTr;return e&&(e=e.getAttribute("id"))&&b(t.rowId)(a,e),{data:a,cells:u}}function St(t,e,n,a){var r,o,i,l,s,u,c=t.aoData[e],f=c._aData,d=[];if(null===c.nTr){for(r=n||v.createElement("tr"),c.nTr=r,c.anCells=d,r._DT_RowIndex=e,vt(t,c),l=0,s=t.aoColumns.length;l<s;l++)i=t.aoColumns[l],(o=(u=!n)?v.createElement(i.sCellType):a[l])||W(t,0,"Incorrect column count",18),o._DT_CellIndex={row:e,column:l},d.push(o),!u&&(!i.mRender&&i.mData===l||P.isPlainObject(i.mData)&&i.mData._===l+".display")||(o.innerHTML=S(t,e,l,"display")),i.sClass&&(o.className+=" "+i.sClass),i.bVisible&&!n?r.appendChild(o):!i.bVisible&&n&&o.parentNode.removeChild(o),i.fnCreatedCell&&i.fnCreatedCell.call(t.oInstance,o,S(t,e,l),f,e,l);R(t,"aoRowCreatedCallback",null,[r,f,e,d])}}function vt(t,e){var n=e.nTr,a=e._aData;n&&((t=t.rowIdFn(a))&&(n.id=t),a.DT_RowClass&&(t=a.DT_RowClass.split(" "),e.__rowc=e.__rowc?z(e.__rowc.concat(t)):t,P(n).removeClass(e.__rowc.join(" ")).addClass(a.DT_RowClass)),a.DT_RowAttr&&P(n).attr(a.DT_RowAttr),a.DT_RowData)&&P(n).data(a.DT_RowData)}function yt(t){var e,n,a,r=t.nTHead,o=t.nTFoot,i=0===P("th, td",r).length,l=t.oClasses,s=t.aoColumns;for(i&&(n=P("<tr/>").appendTo(r)),c=0,f=s.length;c<f;c++)a=s[c],e=P(a.nTh).addClass(a.sClass),i&&e.appendTo(n),t.oFeatures.bSort&&(e.addClass(a.sSortingClass),!1!==a.bSortable)&&(e.attr("tabindex",t.iTabIndex).attr("aria-controls",t.sTableId),ue(t,a.nTh,c)),a.sTitle!=e[0].innerHTML&&e.html(a.sTitle),ve(t,"header")(t,e,a,l);if(i&&wt(t.aoHeader,r),P(r).children("tr").children("th, td").addClass(l.sHeaderTH),P(o).children("tr").children("th, td").addClass(l.sFooterTH),null!==o)for(var u=t.aoFooter[0],c=0,f=u.length;c<f;c++)(a=s[c])?(a.nTf=u[c].cell,a.sClass&&P(a.nTf).addClass(a.sClass)):W(t,0,"Incorrect column count",18)}function Dt(t,e,n){var a,r,o,i,l,s,u,c,f,d=[],h=[],p=t.aoColumns.length;if(e){for(n===H&&(n=!1),a=0,r=e.length;a<r;a++){for(d[a]=e[a].slice(),d[a].nTr=e[a].nTr,o=p-1;0<=o;o--)t.aoColumns[o].bVisible||n||d[a].splice(o,1);h.push([])}for(a=0,r=d.length;a<r;a++){if(u=d[a].nTr)for(;s=u.firstChild;)u.removeChild(s);for(o=0,i=d[a].length;o<i;o++)if(f=c=1,h[a][o]===H){for(u.appendChild(d[a][o].cell),h[a][o]=1;d[a+c]!==H&&d[a][o].cell==d[a+c][o].cell;)h[a+c][o]=1,c++;for(;d[a][o+f]!==H&&d[a][o].cell==d[a][o+f].cell;){for(l=0;l<c;l++)h[a+l][o+f]=1;f++}P(d[a][o].cell).attr("rowspan",c).attr("colspan",f)}}}}function y(t,e){n="ssp"==E(s=t),(l=s.iInitDisplayStart)!==H&&-1!==l&&(s._iDisplayStart=!n&&l>=s.fnRecordsDisplay()?0:l,s.iInitDisplayStart=-1);var n=R(t,"aoPreDrawCallback","preDraw",[t]);if(-1!==P.inArray(!1,n))D(t,!1);else{var a=[],r=0,o=t.asStripeClasses,i=o.length,l=t.oLanguage,s="ssp"==E(t),u=t.aiDisplay,n=t._iDisplayStart,c=t.fnDisplayEnd();if(t.bDrawing=!0,t.bDeferLoading)t.bDeferLoading=!1,t.iDraw++,D(t,!1);else if(s){if(!t.bDestroying&&!e)return void xt(t)}else t.iDraw++;if(0!==u.length)for(var f=s?t.aoData.length:c,d=s?0:n;d<f;d++){var h,p=u[d],g=t.aoData[p],b=(null===g.nTr&&St(t,p),g.nTr);0!==i&&(h=o[r%i],g._sRowStripe!=h)&&(P(b).removeClass(g._sRowStripe).addClass(h),g._sRowStripe=h),R(t,"aoRowCallback",null,[b,g._aData,r,d,p]),a.push(b),r++}else{e=l.sZeroRecords;1==t.iDraw&&"ajax"==E(t)?e=l.sLoadingRecords:l.sEmptyTable&&0===t.fnRecordsTotal()&&(e=l.sEmptyTable),a[0]=P("<tr/>",{class:i?o[0]:""}).append(P("<td />",{valign:"top",colSpan:T(t),class:t.oClasses.sRowEmpty}).html(e))[0]}R(t,"aoHeaderCallback","header",[P(t.nTHead).children("tr")[0],ht(t),n,c,u]),R(t,"aoFooterCallback","footer",[P(t.nTFoot).children("tr")[0],ht(t),n,c,u]);s=P(t.nTBody);s.children().detach(),s.append(P(a)),R(t,"aoDrawCallback","draw",[t]),t.bSorted=!1,t.bFiltered=!1,t.bDrawing=!1}}function u(t,e){var n=t.oFeatures,a=n.bSort,n=n.bFilter;a&&ie(t),n?Rt(t,t.oPreviousSearch):t.aiDisplay=t.aiDisplayMaster.slice(),!0!==e&&(t._iDisplayStart=0),t._drawHold=e,y(t),t._drawHold=!1}function _t(t){for(var e,n,a,r,o,i,l,s=t.oClasses,u=P(t.nTable),u=P("<div/>").insertBefore(u),c=t.oFeatures,f=P("<div/>",{id:t.sTableId+"_wrapper",class:s.sWrapper+(t.nTFoot?"":" "+s.sNoFooter)}),d=(t.nHolding=u[0],t.nTableWrapper=f[0],t.nTableReinsertBefore=t.nTable.nextSibling,t.sDom.split("")),h=0;h<d.length;h++){if(e=null,"<"==(n=d[h])){if(a=P("<div/>")[0],"'"==(r=d[h+1])||'"'==r){for(o="",i=2;d[h+i]!=r;)o+=d[h+i],i++;"H"==o?o=s.sJUIHeader:"F"==o&&(o=s.sJUIFooter),-1!=o.indexOf(".")?(l=o.split("."),a.id=l[0].substr(1,l[0].length-1),a.className=l[1]):"#"==o.charAt(0)?a.id=o.substr(1,o.length-1):a.className=o,h+=i}f.append(a),f=P(a)}else if(">"==n)f=f.parent();else if("l"==n&&c.bPaginate&&c.bLengthChange)e=Gt(t);else if("f"==n&&c.bFilter)e=Lt(t);else if("r"==n&&c.bProcessing)e=Zt(t);else if("t"==n)e=Kt(t);else if("i"==n&&c.bInfo)e=Ut(t);else if("p"==n&&c.bPaginate)e=zt(t);else if(0!==w.ext.feature.length)for(var p=w.ext.feature,g=0,b=p.length;g<b;g++)if(n==p[g].cFeature){e=p[g].fnInit(t);break}e&&((l=t.aanFeatures)[n]||(l[n]=[]),l[n].push(e),f.append(e))}u.replaceWith(f),t.nHolding=null}function wt(t,e){var n,a,r,o,i,l,s,u,c,f,d=P(e).children("tr");for(t.splice(0,t.length),r=0,l=d.length;r<l;r++)t.push([]);for(r=0,l=d.length;r<l;r++)for(a=(n=d[r]).firstChild;a;){if("TD"==a.nodeName.toUpperCase()||"TH"==a.nodeName.toUpperCase())for(u=(u=+a.getAttribute("colspan"))&&0!=u&&1!=u?u:1,c=(c=+a.getAttribute("rowspan"))&&0!=c&&1!=c?c:1,s=function(t,e,n){for(var a=t[e];a[n];)n++;return n}(t,r,0),f=1==u,i=0;i<u;i++)for(o=0;o<c;o++)t[r+o][s+i]={cell:a,unique:f},t[r+o].nTr=n;a=a.nextSibling}}function Ct(t,e,n){var a=[];n||(n=t.aoHeader,e&&wt(n=[],e));for(var r=0,o=n.length;r<o;r++)for(var i=0,l=n[r].length;i<l;i++)!n[r][i].unique||a[i]&&t.bSortCellsTop||(a[i]=n[r][i].cell);return a}function Tt(r,t,n){function e(t){var e=r.jqXHR?r.jqXHR.status:null;(null===t||"number"==typeof e&&204==e)&&Ft(r,t={},[]),(e=t.error||t.sError)&&W(r,0,e),r.json=t,R(r,null,"xhr",[r,t,r.jqXHR]),n(t)}R(r,"aoServerParams","serverParams",[t]),t&&Array.isArray(t)&&(a={},o=/(.*?)\[\]$/,P.each(t,function(t,e){var n=e.name.match(o);n?(n=n[0],a[n]||(a[n]=[]),a[n].push(e.value)):a[e.name]=e.value}),t=a);var a,o,i,l=r.ajax,s=r.oInstance,u=(P.isPlainObject(l)&&l.data&&(u="function"==typeof(i=l.data)?i(t,r):i,t="function"==typeof i&&u?u:P.extend(!0,t,u),delete l.data),{data:t,success:e,dataType:"json",cache:!1,type:r.sServerMethod,error:function(t,e,n){var a=R(r,null,"xhr",[r,null,r.jqXHR]);-1===P.inArray(!0,a)&&("parsererror"==e?W(r,0,"Invalid JSON response",1):4===t.readyState&&W(r,0,"Ajax error",7)),D(r,!1)}});r.oAjaxData=t,R(r,null,"preXhr",[r,t]),r.fnServerData?r.fnServerData.call(s,r.sAjaxSource,P.map(t,function(t,e){return{name:e,value:t}}),e,r):r.sAjaxSource||"string"==typeof l?r.jqXHR=P.ajax(P.extend(u,{url:l||r.sAjaxSource})):"function"==typeof l?r.jqXHR=l.call(s,t,e,r):(r.jqXHR=P.ajax(P.extend(u,l)),l.data=i)}function xt(e){e.iDraw++,D(e,!0);var n=e._drawHold;Tt(e,At(e),function(t){e._drawHold=n,It(e,t),e._drawHold=!1})}function At(t){for(var e,n,a,r=t.aoColumns,o=r.length,i=t.oFeatures,l=t.oPreviousSearch,s=t.aoPreSearchCols,u=[],c=I(t),f=t._iDisplayStart,d=!1!==i.bPaginate?t._iDisplayLength:-1,h=function(t,e){u.push({name:t,value:e})},p=(h("sEcho",t.iDraw),h("iColumns",o),h("sColumns",N(r,"sName").join(",")),h("iDisplayStart",f),h("iDisplayLength",d),{draw:t.iDraw,columns:[],order:[],start:f,length:d,search:{value:l.sSearch,regex:l.bRegex}}),g=0;g<o;g++)n=r[g],a=s[g],e="function"==typeof n.mData?"function":n.mData,p.columns.push({data:e,name:n.sName,searchable:n.bSearchable,orderable:n.bSortable,search:{value:a.sSearch,regex:a.bRegex}}),h("mDataProp_"+g,e),i.bFilter&&(h("sSearch_"+g,a.sSearch),h("bRegex_"+g,a.bRegex),h("bSearchable_"+g,n.bSearchable)),i.bSort&&h("bSortable_"+g,n.bSortable);i.bFilter&&(h("sSearch",l.sSearch),h("bRegex",l.bRegex)),i.bSort&&(P.each(c,function(t,e){p.order.push({column:e.col,dir:e.dir}),h("iSortCol_"+t,e.col),h("sSortDir_"+t,e.dir)}),h("iSortingCols",c.length));f=w.ext.legacy.ajax;return null===f?t.sAjaxSource?u:p:f?u:p}function It(t,n){function e(t,e){return n[t]!==H?n[t]:n[e]}var a=Ft(t,n),r=e("sEcho","draw"),o=e("iTotalRecords","recordsTotal"),i=e("iTotalDisplayRecords","recordsFiltered");if(r!==H){if(+r<t.iDraw)return;t.iDraw=+r}a=a||[],pt(t),t._iRecordsTotal=parseInt(o,10),t._iRecordsDisplay=parseInt(i,10);for(var l=0,s=a.length;l<s;l++)x(t,a[l]);t.aiDisplay=t.aiDisplayMaster.slice(),y(t,!0),t._bInitComplete||qt(t,n),D(t,!1)}function Ft(t,e,n){t=P.isPlainObject(t.ajax)&&t.ajax.dataSrc!==H?t.ajax.dataSrc:t.sAjaxDataProp;if(!n)return"data"===t?e.aaData||e[t]:""!==t?A(t)(e):e;b(t)(e,n)}function Lt(n){function e(t){i.f;var e=this.value||"";o.return&&"Enter"!==t.key||e!=o.sSearch&&(Rt(n,{sSearch:e,bRegex:o.bRegex,bSmart:o.bSmart,bCaseInsensitive:o.bCaseInsensitive,return:o.return}),n._iDisplayStart=0,y(n))}var t=n.oClasses,a=n.sTableId,r=n.oLanguage,o=n.oPreviousSearch,i=n.aanFeatures,l='<input type="search" class="'+t.sFilterInput+'"/>',s=(s=r.sSearch).match(/_INPUT_/)?s.replace("_INPUT_",l):s+l,l=P("<div/>",{id:i.f?null:a+"_filter",class:t.sFilter}).append(P("<label/>").append(s)),t=null!==n.searchDelay?n.searchDelay:"ssp"===E(n)?400:0,u=P("input",l).val(o.sSearch).attr("placeholder",r.sSearchPlaceholder).on("keyup.DT search.DT input.DT paste.DT cut.DT",t?ne(e,t):e).on("mouseup.DT",function(t){setTimeout(function(){e.call(u[0],t)},10)}).on("keypress.DT",function(t){if(13==t.keyCode)return!1}).attr("aria-controls",a);return P(n.nTable).on("search.dt.DT",function(t,e){if(n===e)try{u[0]!==v.activeElement&&u.val(o.sSearch)}catch(t){}}),l[0]}function Rt(t,e,n){function a(t){o.sSearch=t.sSearch,o.bRegex=t.bRegex,o.bSmart=t.bSmart,o.bCaseInsensitive=t.bCaseInsensitive,o.return=t.return}function r(t){return t.bEscapeRegex!==H?!t.bEscapeRegex:t.bRegex}var o=t.oPreviousSearch,i=t.aoPreSearchCols;if(lt(t),"ssp"!=E(t)){Ht(t,e.sSearch,n,r(e),e.bSmart,e.bCaseInsensitive),a(e);for(var l=0;l<i.length;l++)jt(t,i[l].sSearch,l,r(i[l]),i[l].bSmart,i[l].bCaseInsensitive);Pt(t)}else a(e);t.bFiltered=!0,R(t,null,"search",[t])}function Pt(t){for(var e,n,a=w.ext.search,r=t.aiDisplay,o=0,i=a.length;o<i;o++){for(var l=[],s=0,u=r.length;s<u;s++)n=r[s],e=t.aoData[n],a[o](t,e._aFilterData,n,e._aData,s)&&l.push(n);r.length=0,P.merge(r,l)}}function jt(t,e,n,a,r,o){if(""!==e){for(var i,l=[],s=t.aiDisplay,u=Nt(e,a,r,o),c=0;c<s.length;c++)i=t.aoData[s[c]]._aFilterData[n],u.test(i)&&l.push(s[c]);t.aiDisplay=l}}function Ht(t,e,n,a,r,o){var i,l,s,u=Nt(e,a,r,o),r=t.oPreviousSearch.sSearch,o=t.aiDisplayMaster,c=[];if(0!==w.ext.search.length&&(n=!0),l=Wt(t),e.length<=0)t.aiDisplay=o.slice();else{for((l||n||a||r.length>e.length||0!==e.indexOf(r)||t.bSorted)&&(t.aiDisplay=o.slice()),i=t.aiDisplay,s=0;s<i.length;s++)u.test(t.aoData[i[s]]._sFilterRow)&&c.push(i[s]);t.aiDisplay=c}}function Nt(t,e,n,a){return t=e?t:Ot(t),n&&(t="^(?=.*?"+P.map(t.match(/["\u201C][^"\u201D]+["\u201D]|[^ ]+/g)||[""],function(t){var e;return'"'===t.charAt(0)?t=(e=t.match(/^"(.*)"$/))?e[1]:t:"“"===t.charAt(0)&&(t=(e=t.match(/^\u201C(.*)\u201D$/))?e[1]:t),t.replace('"',"")}).join(")(?=.*?")+").*$"),new RegExp(t,a?"i":"")}var Ot=w.util.escapeRegex,kt=P("<div>")[0],Mt=kt.textContent!==H;function Wt(t){for(var e,n,a,r,o,i=t.aoColumns,l=!1,s=0,u=t.aoData.length;s<u;s++)if(!(o=t.aoData[s])._aFilterData){for(a=[],e=0,n=i.length;e<n;e++)i[e].bSearchable?"string"!=typeof(r=null===(r=S(t,s,e,"filter"))?"":r)&&r.toString&&(r=r.toString()):r="",r.indexOf&&-1!==r.indexOf("&")&&(kt.innerHTML=r,r=Mt?kt.textContent:kt.innerText),r.replace&&(r=r.replace(/[\r\n\u2028]/g,"")),a.push(r);o._aFilterData=a,o._sFilterRow=a.join(" "),l=!0}return l}function Et(t){return{search:t.sSearch,smart:t.bSmart,regex:t.bRegex,caseInsensitive:t.bCaseInsensitive}}function Bt(t){return{sSearch:t.search,bSmart:t.smart,bRegex:t.regex,bCaseInsensitive:t.caseInsensitive}}function Ut(t){var e=t.sTableId,n=t.aanFeatures.i,a=P("<div/>",{class:t.oClasses.sInfo,id:n?null:e+"_info"});return n||(t.aoDrawCallback.push({fn:Vt,sName:"information"}),a.attr("role","status").attr("aria-live","polite"),P(t.nTable).attr("aria-describedby",e+"_info")),a[0]}function Vt(t){var e,n,a,r,o,i,l=t.aanFeatures.i;0!==l.length&&(i=t.oLanguage,e=t._iDisplayStart+1,n=t.fnDisplayEnd(),a=t.fnRecordsTotal(),o=(r=t.fnRecordsDisplay())?i.sInfo:i.sInfoEmpty,r!==a&&(o+=" "+i.sInfoFiltered),o=Xt(t,o+=i.sInfoPostFix),null!==(i=i.fnInfoCallback)&&(o=i.call(t.oInstance,t,e,n,a,r,o)),P(l).html(o))}function Xt(t,e){var n=t.fnFormatNumber,a=t._iDisplayStart+1,r=t._iDisplayLength,o=t.fnRecordsDisplay(),i=-1===r;return e.replace(/_START_/g,n.call(t,a)).replace(/_END_/g,n.call(t,t.fnDisplayEnd())).replace(/_MAX_/g,n.call(t,t.fnRecordsTotal())).replace(/_TOTAL_/g,n.call(t,o)).replace(/_PAGE_/g,n.call(t,i?1:Math.ceil(a/r))).replace(/_PAGES_/g,n.call(t,i?1:Math.ceil(o/r)))}function Jt(n){var a,t,e,r=n.iInitDisplayStart,o=n.aoColumns,i=n.oFeatures,l=n.bDeferLoading;if(n.bInitialised){for(_t(n),yt(n),Dt(n,n.aoHeader),Dt(n,n.aoFooter),D(n,!0),i.bAutoWidth&&ee(n),a=0,t=o.length;a<t;a++)(e=o[a]).sWidth&&(e.nTh.style.width=M(e.sWidth));R(n,null,"preInit",[n]),u(n);i=E(n);"ssp"==i&&!l||("ajax"==i?Tt(n,[],function(t){var e=Ft(n,t);for(a=0;a<e.length;a++)x(n,e[a]);n.iInitDisplayStart=r,u(n),D(n,!1),qt(n,t)}):(D(n,!1),qt(n)))}else setTimeout(function(){Jt(n)},200)}function qt(t,e){t._bInitComplete=!0,(e||t.oInit.aaData)&&O(t),R(t,null,"plugin-init",[t,e]),R(t,"aoInitComplete","init",[t,e])}function $t(t,e){e=parseInt(e,10);t._iDisplayLength=e,Se(t),R(t,null,"length",[t,e])}function Gt(a){for(var t=a.oClasses,e=a.sTableId,n=a.aLengthMenu,r=Array.isArray(n[0]),o=r?n[0]:n,i=r?n[1]:n,l=P("<select/>",{name:e+"_length","aria-controls":e,class:t.sLengthSelect}),s=0,u=o.length;s<u;s++)l[0][s]=new Option("number"==typeof i[s]?a.fnFormatNumber(i[s]):i[s],o[s]);var c=P("<div><label/></div>").addClass(t.sLength);return a.aanFeatures.l||(c[0].id=e+"_length"),c.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",l[0].outerHTML)),P("select",c).val(a._iDisplayLength).on("change.DT",function(t){$t(a,P(this).val()),y(a)}),P(a.nTable).on("length.dt.DT",function(t,e,n){a===e&&P("select",c).val(n)}),c[0]}function zt(t){function c(t){y(t)}var e=t.sPaginationType,f=w.ext.pager[e],d="function"==typeof f,e=P("<div/>").addClass(t.oClasses.sPaging+e)[0],h=t.aanFeatures;return d||f.fnInit(t,e,c),h.p||(e.id=t.sTableId+"_paginate",t.aoDrawCallback.push({fn:function(t){if(d)for(var e=t._iDisplayStart,n=t._iDisplayLength,a=t.fnRecordsDisplay(),r=-1===n,o=r?0:Math.ceil(e/n),i=r?1:Math.ceil(a/n),l=f(o,i),s=0,u=h.p.length;s<u;s++)ve(t,"pageButton")(t,h.p[s],s,l,o,i);else f.fnUpdate(t,c)},sName:"pagination"})),e}function Yt(t,e,n){var a=t._iDisplayStart,r=t._iDisplayLength,o=t.fnRecordsDisplay(),o=(0===o||-1===r?a=0:"number"==typeof e?o<(a=e*r)&&(a=0):"first"==e?a=0:"previous"==e?(a=0<=r?a-r:0)<0&&(a=0):"next"==e?a+r<o&&(a+=r):"last"==e?a=Math.floor((o-1)/r)*r:W(t,0,"Unknown paging action: "+e,5),t._iDisplayStart!==a);return t._iDisplayStart=a,o?(R(t,null,"page",[t]),n&&y(t)):R(t,null,"page-nc",[t]),o}function Zt(t){return P("<div/>",{id:t.aanFeatures.r?null:t.sTableId+"_processing",class:t.oClasses.sProcessing,role:"status"}).html(t.oLanguage.sProcessing).append("<div><div></div><div></div><div></div><div></div></div>").insertBefore(t.nTable)[0]}function D(t,e){t.oFeatures.bProcessing&&P(t.aanFeatures.r).css("display",e?"block":"none"),R(t,null,"processing",[t,e])}function Kt(t){var e,n,a,r,o,i,l,s,u,c,f,d,h=P(t.nTable),p=t.oScroll;return""===p.sX&&""===p.sY?t.nTable:(e=p.sX,n=p.sY,a=t.oClasses,o=(r=h.children("caption")).length?r[0]._captionSide:null,s=P(h[0].cloneNode(!1)),i=P(h[0].cloneNode(!1)),u=function(t){return t?M(t):null},(l=h.children("tfoot")).length||(l=null),s=P(f="<div/>",{class:a.sScrollWrapper}).append(P(f,{class:a.sScrollHead}).css({overflow:"hidden",position:"relative",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollHeadInner}).css({"box-sizing":"content-box",width:p.sXInner||"100%"}).append(s.removeAttr("id").css("margin-left",0).append("top"===o?r:null).append(h.children("thead"))))).append(P(f,{class:a.sScrollBody}).css({position:"relative",overflow:"auto",width:u(e)}).append(h)),l&&s.append(P(f,{class:a.sScrollFoot}).css({overflow:"hidden",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollFootInner}).append(i.removeAttr("id").css("margin-left",0).append("bottom"===o?r:null).append(h.children("tfoot"))))),u=s.children(),c=u[0],f=u[1],d=l?u[2]:null,e&&P(f).on("scroll.DT",function(t){var e=this.scrollLeft;c.scrollLeft=e,l&&(d.scrollLeft=e)}),P(f).css("max-height",n),p.bCollapse||P(f).css("height",n),t.nScrollHead=c,t.nScrollBody=f,t.nScrollFoot=d,t.aoDrawCallback.push({fn:Qt,sName:"scrolling"}),s[0])}function Qt(n){function t(t){(t=t.style).paddingTop="0",t.paddingBottom="0",t.borderTopWidth="0",t.borderBottomWidth="0",t.height=0}var e,a,r,o,i,l=n.oScroll,s=l.sX,u=l.sXInner,c=l.sY,l=l.iBarWidth,f=P(n.nScrollHead),d=f[0].style,h=f.children("div"),p=h[0].style,h=h.children("table"),g=n.nScrollBody,b=P(g),m=g.style,S=P(n.nScrollFoot).children("div"),v=S.children("table"),y=P(n.nTHead),D=P(n.nTable),_=D[0],w=_.style,C=n.nTFoot?P(n.nTFoot):null,T=n.oBrowser,x=T.bScrollOversize,A=(N(n.aoColumns,"nTh"),[]),I=[],F=[],L=[],R=g.scrollHeight>g.clientHeight;n.scrollBarVis!==R&&n.scrollBarVis!==H?(n.scrollBarVis=R,O(n)):(n.scrollBarVis=R,D.children("thead, tfoot").remove(),C&&(R=C.clone().prependTo(D),i=C.find("tr"),a=R.find("tr"),R.find("[id]").removeAttr("id")),R=y.clone().prependTo(D),y=y.find("tr"),e=R.find("tr"),R.find("th, td").removeAttr("tabindex"),R.find("[id]").removeAttr("id"),s||(m.width="100%",f[0].style.width="100%"),P.each(Ct(n,R),function(t,e){r=rt(n,t),e.style.width=n.aoColumns[r].sWidth}),C&&k(function(t){t.style.width=""},a),f=D.outerWidth(),""===s?(w.width="100%",x&&(D.find("tbody").height()>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(D.outerWidth()-l)),f=D.outerWidth()):""!==u&&(w.width=M(u),f=D.outerWidth()),k(t,e),k(function(t){var e=j.getComputedStyle?j.getComputedStyle(t).width:M(P(t).width());F.push(t.innerHTML),A.push(e)},e),k(function(t,e){t.style.width=A[e]},y),P(e).css("height",0),C&&(k(t,a),k(function(t){L.push(t.innerHTML),I.push(M(P(t).css("width")))},a),k(function(t,e){t.style.width=I[e]},i),P(a).height(0)),k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+F[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=A[e]},e),C&&k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+L[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=I[e]},a),Math.round(D.outerWidth())<Math.round(f)?(o=g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y")?f+l:f,x&&(g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(o-l)),""!==s&&""===u||W(n,1,"Possible column misalignment",6)):o="100%",m.width=M(o),d.width=M(o),C&&(n.nScrollFoot.style.width=M(o)),c||x&&(m.height=M(_.offsetHeight+l)),R=D.outerWidth(),h[0].style.width=M(R),p.width=M(R),y=D.height()>g.clientHeight||"scroll"==b.css("overflow-y"),p[i="padding"+(T.bScrollbarLeft?"Left":"Right")]=y?l+"px":"0px",C&&(v[0].style.width=M(R),S[0].style.width=M(R),S[0].style[i]=y?l+"px":"0px"),D.children("colgroup").insertBefore(D.children("thead")),b.trigger("scroll"),!n.bSorted&&!n.bFiltered||n._drawHold||(g.scrollTop=0))}function k(t,e,n){for(var a,r,o=0,i=0,l=e.length;i<l;){for(a=e[i].firstChild,r=n?n[i].firstChild:null;a;)1===a.nodeType&&(n?t(a,r,o):t(a,o),o++),a=a.nextSibling,r=n?r.nextSibling:null;i++}}var te=/<.*?>/g;function ee(t){var e,n,a=t.nTable,r=t.aoColumns,o=t.oScroll,i=o.sY,l=o.sX,o=o.sXInner,s=r.length,u=it(t,"bVisible"),c=P("th",t.nTHead),f=a.getAttribute("width"),d=a.parentNode,h=!1,p=t.oBrowser,g=p.bScrollOversize,b=a.style.width,m=(b&&-1!==b.indexOf("%")&&(f=b),ae(N(r,"sWidthOrig"),d));for(_=0;_<u.length;_++)null!==(e=r[u[_]]).sWidth&&(e.sWidth=m[_],h=!0);if(g||!h&&!l&&!i&&s==T(t)&&s==c.length)for(_=0;_<s;_++){var S=rt(t,_);null!==S&&(r[S].sWidth=M(c.eq(_).width()))}else{var b=P(a).clone().css("visibility","hidden").removeAttr("id"),v=(b.find("tbody tr").remove(),P("<tr/>").appendTo(b.find("tbody")));for(b.find("thead, tfoot").remove(),b.append(P(t.nTHead).clone()).append(P(t.nTFoot).clone()),b.find("tfoot th, tfoot td").css("width",""),c=Ct(t,b.find("thead")[0]),_=0;_<u.length;_++)e=r[u[_]],c[_].style.width=null!==e.sWidthOrig&&""!==e.sWidthOrig?M(e.sWidthOrig):"",e.sWidthOrig&&l&&P(c[_]).append(P("<div/>").css({width:e.sWidthOrig,margin:0,padding:0,border:0,height:1}));if(t.aoData.length)for(_=0;_<u.length;_++)e=r[n=u[_]],P(re(t,n)).clone(!1).append(e.sContentPadding).appendTo(v);P("[name]",b).removeAttr("name");for(var y=P("<div/>").css(l||i?{position:"absolute",top:0,left:0,height:1,right:0,overflow:"hidden"}:{}).append(b).appendTo(d),D=(l&&o?b.width(o):l?(b.css("width","auto"),b.removeAttr("width"),b.width()<d.clientWidth&&f&&b.width(d.clientWidth)):i?b.width(d.clientWidth):f&&b.width(f),0),_=0;_<u.length;_++){var w=P(c[_]),C=w.outerWidth()-w.width(),w=p.bBounding?Math.ceil(c[_].getBoundingClientRect().width):w.outerWidth();D+=w,r[u[_]].sWidth=M(w-C)}a.style.width=M(D),y.remove()}f&&(a.style.width=M(f)),!f&&!l||t._reszEvt||(o=function(){P(j).on("resize.DT-"+t.sInstance,ne(function(){O(t)}))},g?setTimeout(o,1e3):o(),t._reszEvt=!0)}var ne=w.util.throttle;function ae(t,e){for(var n=[],a=[],r=0;r<t.length;r++)t[r]?n.push(P("<div/>").css("width",M(t[r])).appendTo(e||v.body)):n.push(null);for(r=0;r<t.length;r++)a.push(n[r]?n[r][0].offsetWidth:null);return P(n).remove(),a}function re(t,e){var n,a=oe(t,e);return a<0?null:(n=t.aoData[a]).nTr?n.anCells[e]:P("<td/>").html(S(t,a,e,"display"))[0]}function oe(t,e){for(var n,a=-1,r=-1,o=0,i=t.aoData.length;o<i;o++)(n=(n=(n=S(t,o,e,"display")+"").replace(te,"")).replace(/&nbsp;/g," ")).length>a&&(a=n.length,r=o);return r}function M(t){return null===t?"0px":"number"==typeof t?t<0?"0px":t+"px":t.match(/\d$/)?t+"px":t}function I(t){function e(t){t.length&&!Array.isArray(t[0])?h.push(t):P.merge(h,t)}var n,a,r,o,i,l,s,u=[],c=t.aoColumns,f=t.aaSortingFixed,d=P.isPlainObject(f),h=[];for(Array.isArray(f)&&e(f),d&&f.pre&&e(f.pre),e(t.aaSorting),d&&f.post&&e(f.post),n=0;n<h.length;n++)for(r=(o=c[s=h[n][a=0]].aDataSort).length;a<r;a++)l=c[i=o[a]].sType||"string",h[n]._idx===H&&(h[n]._idx=P.inArray(h[n][1],c[i].asSorting)),u.push({src:s,col:i,dir:h[n][1],index:h[n]._idx,type:l,formatter:w.ext.type.order[l+"-pre"]});return u}function ie(t){var e,n,a,r,c,f=[],u=w.ext.type.order,d=t.aoData,o=(t.aoColumns,0),i=t.aiDisplayMaster;for(lt(t),e=0,n=(c=I(t)).length;e<n;e++)(r=c[e]).formatter&&o++,fe(t,r.col);if("ssp"!=E(t)&&0!==c.length){for(e=0,a=i.length;e<a;e++)f[i[e]]=e;o===c.length?i.sort(function(t,e){for(var n,a,r,o,i=c.length,l=d[t]._aSortData,s=d[e]._aSortData,u=0;u<i;u++)if(0!=(r=(n=l[(o=c[u]).col])<(a=s[o.col])?-1:a<n?1:0))return"asc"===o.dir?r:-r;return(n=f[t])<(a=f[e])?-1:a<n?1:0}):i.sort(function(t,e){for(var n,a,r,o=c.length,i=d[t]._aSortData,l=d[e]._aSortData,s=0;s<o;s++)if(n=i[(r=c[s]).col],a=l[r.col],0!==(r=(u[r.type+"-"+r.dir]||u["string-"+r.dir])(n,a)))return r;return(n=f[t])<(a=f[e])?-1:a<n?1:0})}t.bSorted=!0}function le(t){for(var e=t.aoColumns,n=I(t),a=t.oLanguage.oAria,r=0,o=e.length;r<o;r++){var i=e[r],l=i.asSorting,s=i.ariaTitle||i.sTitle.replace(/<.*?>/g,""),u=i.nTh;u.removeAttribute("aria-sort"),i=i.bSortable?s+("asc"===(0<n.length&&n[0].col==r&&(u.setAttribute("aria-sort","asc"==n[0].dir?"ascending":"descending"),l[n[0].index+1])||l[0])?a.sSortAscending:a.sSortDescending):s,u.setAttribute("aria-label",i)}}function se(t,e,n,a){function r(t,e){var n=t._idx;return(n=n===H?P.inArray(t[1],s):n)+1<s.length?n+1:e?null:0}var o,i=t.aoColumns[e],l=t.aaSorting,s=i.asSorting;"number"==typeof l[0]&&(l=t.aaSorting=[l]),n&&t.oFeatures.bSortMulti?-1!==(i=P.inArray(e,N(l,"0")))?null===(o=null===(o=r(l[i],!0))&&1===l.length?0:o)?l.splice(i,1):(l[i][1]=s[o],l[i]._idx=o):(l.push([e,s[0],0]),l[l.length-1]._idx=0):l.length&&l[0][0]==e?(o=r(l[0]),l.length=1,l[0][1]=s[o],l[0]._idx=o):(l.length=0,l.push([e,s[0]]),l[0]._idx=0),u(t),"function"==typeof a&&a(t)}function ue(e,t,n,a){var r=e.aoColumns[n];me(t,{},function(t){!1!==r.bSortable&&(e.oFeatures.bProcessing?(D(e,!0),setTimeout(function(){se(e,n,t.shiftKey,a),"ssp"!==E(e)&&D(e,!1)},0)):se(e,n,t.shiftKey,a))})}function ce(t){var e,n,a,r=t.aLastSort,o=t.oClasses.sSortColumn,i=I(t),l=t.oFeatures;if(l.bSort&&l.bSortClasses){for(e=0,n=r.length;e<n;e++)a=r[e].src,P(N(t.aoData,"anCells",a)).removeClass(o+(e<2?e+1:3));for(e=0,n=i.length;e<n;e++)a=i[e].src,P(N(t.aoData,"anCells",a)).addClass(o+(e<2?e+1:3))}t.aLastSort=i}function fe(t,e){for(var n,a,r,o=t.aoColumns[e],i=w.ext.order[o.sSortDataType],l=(i&&(n=i.call(t.oInstance,t,e,ot(t,e))),w.ext.type.order[o.sType+"-pre"]),s=0,u=t.aoData.length;s<u;s++)(a=t.aoData[s])._aSortData||(a._aSortData=[]),a._aSortData[e]&&!i||(r=i?n[s]:S(t,s,e,"sort"),a._aSortData[e]=l?l(r):r)}function de(n){var t;n._bLoadingState||(t={time:+new Date,start:n._iDisplayStart,length:n._iDisplayLength,order:P.extend(!0,[],n.aaSorting),search:Et(n.oPreviousSearch),columns:P.map(n.aoColumns,function(t,e){return{visible:t.bVisible,search:Et(n.aoPreSearchCols[e])}})},n.oSavedState=t,R(n,"aoStateSaveParams","stateSaveParams",[n,t]),n.oFeatures.bStateSave&&!n.bDestroying&&n.fnStateSaveCallback.call(n.oInstance,n,t))}function he(e,t,n){var a;if(e.oFeatures.bStateSave)return(a=e.fnStateLoadCallback.call(e.oInstance,e,function(t){pe(e,t,n)}))!==H&&pe(e,a,n),!0;n()}function pe(n,t,e){var a,r,o=n.aoColumns,i=(n._bLoadingState=!0,n._bInitComplete?new w.Api(n):null);if(t&&t.time){var l=R(n,"aoStateLoadParams","stateLoadParams",[n,t]);if(-1!==P.inArray(!1,l))n._bLoadingState=!1;else{l=n.iStateDuration;if(0<l&&t.time<+new Date-1e3*l)n._bLoadingState=!1;else if(t.columns&&o.length!==t.columns.length)n._bLoadingState=!1;else{if(n.oLoadedState=P.extend(!0,{},t),t.length!==H&&(i?i.page.len(t.length):n._iDisplayLength=t.length),t.start!==H&&(null===i?(n._iDisplayStart=t.start,n.iInitDisplayStart=t.start):Yt(n,t.start/n._iDisplayLength)),t.order!==H&&(n.aaSorting=[],P.each(t.order,function(t,e){n.aaSorting.push(e[0]>=o.length?[0,e[1]]:e)})),t.search!==H&&P.extend(n.oPreviousSearch,Bt(t.search)),t.columns){for(a=0,r=t.columns.length;a<r;a++){var s=t.columns[a];s.visible!==H&&(i?i.column(a).visible(s.visible,!1):o[a].bVisible=s.visible),s.search!==H&&P.extend(n.aoPreSearchCols[a],Bt(s.search))}i&&i.columns.adjust()}n._bLoadingState=!1,R(n,"aoStateLoaded","stateLoaded",[n,t])}}}else n._bLoadingState=!1;e()}function ge(t){var e=w.settings,t=P.inArray(t,N(e,"nTable"));return-1!==t?e[t]:null}function W(t,e,n,a){if(n="DataTables warning: "+(t?"table id="+t.sTableId+" - ":"")+n,a&&(n+=". For more information about this error, please see https://datatables.net/tn/"+a),e)j.console&&console.log&&console.log(n);else{e=w.ext,e=e.sErrMode||e.errMode;if(t&&R(t,null,"error",[t,a,n]),"alert"==e)alert(n);else{if("throw"==e)throw new Error(n);"function"==typeof e&&e(t,a,n)}}}function F(n,a,t,e){Array.isArray(t)?P.each(t,function(t,e){Array.isArray(e)?F(n,a,e[0],e[1]):F(n,a,e)}):(e===H&&(e=t),a[t]!==H&&(n[e]=a[t]))}function be(t,e,n){var a,r;for(r in e)e.hasOwnProperty(r)&&(a=e[r],P.isPlainObject(a)?(P.isPlainObject(t[r])||(t[r]={}),P.extend(!0,t[r],a)):n&&"data"!==r&&"aaData"!==r&&Array.isArray(a)?t[r]=a.slice():t[r]=a);return t}function me(e,t,n){P(e).on("click.DT",t,function(t){P(e).trigger("blur"),n(t)}).on("keypress.DT",t,function(t){13===t.which&&(t.preventDefault(),n(t))}).on("selectstart.DT",function(){return!1})}function L(t,e,n,a){n&&t[e].push({fn:n,sName:a})}function R(n,t,e,a){var r=[];return t&&(r=P.map(n[t].slice().reverse(),function(t,e){return t.fn.apply(n.oInstance,a)})),null!==e&&(t=P.Event(e+".dt"),(e=P(n.nTable)).trigger(t,a),0===e.parents("body").length&&P("body").trigger(t,a),r.push(t.result)),r}function Se(t){var e=t._iDisplayStart,n=t.fnDisplayEnd(),a=t._iDisplayLength;n<=e&&(e=n-a),e-=e%a,t._iDisplayStart=e=-1===a||e<0?0:e}function ve(t,e){var t=t.renderer,n=w.ext.renderer[e];return P.isPlainObject(t)&&t[e]?n[t[e]]||n._:"string"==typeof t&&n[t]||n._}function E(t){return t.oFeatures.bServerSide?"ssp":t.ajax||t.sAjaxSource?"ajax":"dom"}function ye(t,n){var a;return Array.isArray(t)?P.map(t,function(t){return ye(t,n)}):"number"==typeof t?[n[t]]:(a=P.map(n,function(t,e){return t.nTable}),P(a).filter(t).map(function(t){var e=P.inArray(this,a);return n[e]}).toArray())}function De(r,o,t){var e,n;t&&(e=new B(r)).one("draw",function(){t(e.ajax.json())}),"ssp"==E(r)?u(r,o):(D(r,!0),(n=r.jqXHR)&&4!==n.readyState&&n.abort(),Tt(r,[],function(t){pt(r);for(var e=Ft(r,t),n=0,a=e.length;n<a;n++)x(r,e[n]);u(r,o),D(r,!1)}))}function _e(t,e,n,a,r){for(var o,i,l,s,u=[],c=typeof e,f=0,d=(e=e&&"string"!=c&&"function"!=c&&e.length!==H?e:[e]).length;f<d;f++)for(l=0,s=(i=e[f]&&e[f].split&&!e[f].match(/[\[\(:]/)?e[f].split(","):[e[f]]).length;l<s;l++)(o=n("string"==typeof i[l]?i[l].trim():i[l]))&&o.length&&(u=u.concat(o));var h=p.selector[t];if(h.length)for(f=0,d=h.length;f<d;f++)u=h[f](a,r,u);return z(u)}function we(t){return(t=t||{}).filter&&t.search===H&&(t.search=t.filter),P.extend({search:"none",order:"current",page:"all"},t)}function Ce(t){for(var e=0,n=t.length;e<n;e++)if(0<t[e].length)return t[0]=t[e],t[0].length=1,t.length=1,t.context=[t.context[e]],t;return t.length=0,t}function Te(o,t,e,n){function i(t,e){var n;if(Array.isArray(t)||t instanceof P)for(var a=0,r=t.length;a<r;a++)i(t[a],e);else t.nodeName&&"tr"===t.nodeName.toLowerCase()?l.push(t):(n=P("<tr><td></td></tr>").addClass(e),P("td",n).addClass(e).html(t)[0].colSpan=T(o),l.push(n[0]))}var l=[];i(e,n),t._details&&t._details.detach(),t._details=P(l),t._detailsShow&&t._details.insertAfter(t.nTr)}function xe(t,e){var n=t.context;if(n.length&&t.length){var a=n[0].aoData[t[0]];if(a._details){(a._detailsShow=e)?(a._details.insertAfter(a.nTr),P(a.nTr).addClass("dt-hasChild")):(a._details.detach(),P(a.nTr).removeClass("dt-hasChild")),R(n[0],null,"childRow",[e,t.row(t[0])]);var s=n[0],r=new B(s),a=".dt.DT_details",e="draw"+a,t="column-sizing"+a,a="destroy"+a,u=s.aoData;if(r.off(e+" "+t+" "+a),N(u,"_details").length>0){r.on(e,function(t,e){if(s!==e)return;r.rows({page:"current"}).eq(0).each(function(t){var e=u[t];if(e._detailsShow)e._details.insertAfter(e.nTr)})});r.on(t,function(t,e,n,a){if(s!==e)return;var r,o=T(e);for(var i=0,l=u.length;i<l;i++){r=u[i];if(r._details)r._details.each(function(){var t=P(this).children("td");if(t.length==1)t.attr("colspan",o)})}});r.on(a,function(t,e){if(s!==e)return;for(var n=0,a=u.length;n<a;n++)if(u[n]._details)Re(r,n)})}Le(n)}}}function Ae(t,e,n,a,r){for(var o=[],i=0,l=r.length;i<l;i++)o.push(S(t,r[i],e));return o}var Ie=[],o=Array.prototype,B=function(t,e){if(!(this instanceof B))return new B(t,e);function n(t){var e,n,a,r;t=t,a=w.settings,r=P.map(a,function(t,e){return t.nTable}),(t=t?t.nTable&&t.oApi?[t]:t.nodeName&&"table"===t.nodeName.toLowerCase()?-1!==(e=P.inArray(t,r))?[a[e]]:null:t&&"function"==typeof t.settings?t.settings().toArray():("string"==typeof t?n=P(t):t instanceof P&&(n=t),n?n.map(function(t){return-1!==(e=P.inArray(this,r))?a[e]:null}).toArray():void 0):[])&&o.push.apply(o,t)}var o=[];if(Array.isArray(t))for(var a=0,r=t.length;a<r;a++)n(t[a]);else n(t);this.context=z(o),e&&P.merge(this,e),this.selector={rows:null,cols:null,opts:null},B.extend(this,this,Ie)},Fe=(w.Api=B,P.extend(B.prototype,{any:function(){return 0!==this.count()},concat:o.concat,context:[],count:function(){return this.flatten().length},each:function(t){for(var e=0,n=this.length;e<n;e++)t.call(this,this[e],e,this);return this},eq:function(t){var e=this.context;return e.length>t?new B(e[t],this[t]):null},filter:function(t){var e=[];if(o.filter)e=o.filter.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)t.call(this,this[n],n,this)&&e.push(this[n]);return new B(this.context,e)},flatten:function(){var t=[];return new B(this.context,t.concat.apply(t,this.toArray()))},join:o.join,indexOf:o.indexOf||function(t,e){for(var n=e||0,a=this.length;n<a;n++)if(this[n]===t)return n;return-1},iterator:function(t,e,n,a){var r,o,i,l,s,u,c,f,d=[],h=this.context,p=this.selector;for("string"==typeof t&&(a=n,n=e,e=t,t=!1),o=0,i=h.length;o<i;o++){var g=new B(h[o]);if("table"===e)(r=n.call(g,h[o],o))!==H&&d.push(r);else if("columns"===e||"rows"===e)(r=n.call(g,h[o],this[o],o))!==H&&d.push(r);else if("column"===e||"column-rows"===e||"row"===e||"cell"===e)for(c=this[o],"column-rows"===e&&(u=Fe(h[o],p.opts)),l=0,s=c.length;l<s;l++)f=c[l],(r="cell"===e?n.call(g,h[o],f.row,f.column,o,l):n.call(g,h[o],f,o,l,u))!==H&&d.push(r)}return d.length||a?((t=(a=new B(h,t?d.concat.apply([],d):d)).selector).rows=p.rows,t.cols=p.cols,t.opts=p.opts,a):this},lastIndexOf:o.lastIndexOf||function(t,e){return this.indexOf.apply(this.toArray.reverse(),arguments)},length:0,map:function(t){var e=[];if(o.map)e=o.map.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)e.push(t.call(this,this[n],n));return new B(this.context,e)},pluck:function(t){var e=w.util.get(t);return this.map(function(t){return e(t)})},pop:o.pop,push:o.push,reduce:o.reduce||function(t,e){return et(this,t,e,0,this.length,1)},reduceRight:o.reduceRight||function(t,e){return et(this,t,e,this.length-1,-1,-1)},reverse:o.reverse,selector:null,shift:o.shift,slice:function(){return new B(this.context,this)},sort:o.sort,splice:o.splice,toArray:function(){return o.slice.call(this)},to$:function(){return P(this)},toJQuery:function(){return P(this)},unique:function(){return new B(this.context,z(this))},unshift:o.unshift}),B.extend=function(t,e,n){if(n.length&&e&&(e instanceof B||e.__dt_wrapper))for(var a,r=0,o=n.length;r<o;r++)e[(a=n[r]).name]="function"===a.type?function(e,n,a){return function(){var t=n.apply(e,arguments);return B.extend(t,t,a.methodExt),t}}(t,a.val,a):"object"===a.type?{}:a.val,e[a.name].__dt_wrapper=!0,B.extend(t,e[a.name],a.propExt)},B.register=e=function(t,e){if(Array.isArray(t))for(var n=0,a=t.length;n<a;n++)B.register(t[n],e);else for(var r=t.split("."),o=Ie,i=0,l=r.length;i<l;i++){var s,u,c=function(t,e){for(var n=0,a=t.length;n<a;n++)if(t[n].name===e)return t[n];return null}(o,u=(s=-1!==r[i].indexOf("()"))?r[i].replace("()",""):r[i]);c||o.push(c={name:u,val:{},methodExt:[],propExt:[],type:"object"}),i===l-1?(c.val=e,c.type="function"==typeof e?"function":P.isPlainObject(e)?"object":"other"):o=s?c.methodExt:c.propExt}},B.registerPlural=t=function(t,e,n){B.register(t,n),B.register(e,function(){var t=n.apply(this,arguments);return t===this?this:t instanceof B?t.length?Array.isArray(t[0])?new B(t.context,t[0]):t[0]:H:t})},e("tables()",function(t){return t!==H&&null!==t?new B(ye(t,this.context)):this}),e("table()",function(t){var t=this.tables(t),e=t.context;return e.length?new B(e[0]):t}),t("tables().nodes()","table().node()",function(){return this.iterator("table",function(t){return t.nTable},1)}),t("tables().body()","table().body()",function(){return this.iterator("table",function(t){return t.nTBody},1)}),t("tables().header()","table().header()",function(){return this.iterator("table",function(t){return t.nTHead},1)}),t("tables().footer()","table().footer()",function(){return this.iterator("table",function(t){return t.nTFoot},1)}),t("tables().containers()","table().container()",function(){return this.iterator("table",function(t){return t.nTableWrapper},1)}),e("draw()",function(e){return this.iterator("table",function(t){"page"===e?y(t):u(t,!1===(e="string"==typeof e?"full-hold"!==e:e))})}),e("page()",function(e){return e===H?this.page.info().page:this.iterator("table",function(t){Yt(t,e)})}),e("page.info()",function(t){var e,n,a,r,o;return 0===this.context.length?H:(n=(e=this.context[0])._iDisplayStart,a=e.oFeatures.bPaginate?e._iDisplayLength:-1,r=e.fnRecordsDisplay(),{page:(o=-1===a)?0:Math.floor(n/a),pages:o?1:Math.ceil(r/a),start:n,end:e.fnDisplayEnd(),length:a,recordsTotal:e.fnRecordsTotal(),recordsDisplay:r,serverSide:"ssp"===E(e)})}),e("page.len()",function(e){return e===H?0!==this.context.length?this.context[0]._iDisplayLength:H:this.iterator("table",function(t){$t(t,e)})}),e("ajax.json()",function(){var t=this.context;if(0<t.length)return t[0].json}),e("ajax.params()",function(){var t=this.context;if(0<t.length)return t[0].oAjaxData}),e("ajax.reload()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),e("ajax.url()",function(e){var t=this.context;return e===H?0===t.length?H:(t=t[0]).ajax?P.isPlainObject(t.ajax)?t.ajax.url:t.ajax:t.sAjaxSource:this.iterator("table",function(t){P.isPlainObject(t.ajax)?t.ajax.url=e:t.ajax=e})}),e("ajax.url().load()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),function(t,e){var n,a=[],r=t.aiDisplay,o=t.aiDisplayMaster,i=e.search,l=e.order,e=e.page;if("ssp"==E(t))return"removed"===i?[]:f(0,o.length);if("current"==e)for(u=t._iDisplayStart,c=t.fnDisplayEnd();u<c;u++)a.push(r[u]);else if("current"==l||"applied"==l){if("none"==i)a=o.slice();else if("applied"==i)a=r.slice();else if("removed"==i){for(var s={},u=0,c=r.length;u<c;u++)s[r[u]]=null;a=P.map(o,function(t){return s.hasOwnProperty(t)?null:t})}}else if("index"==l||"original"==l)for(u=0,c=t.aoData.length;u<c;u++)("none"==i||-1===(n=P.inArray(u,r))&&"removed"==i||0<=n&&"applied"==i)&&a.push(u);return a}),Le=(e("rows()",function(e,n){e===H?e="":P.isPlainObject(e)&&(n=e,e=""),n=we(n);var t=this.iterator("table",function(t){return _e("row",e,function(n){var t=d(n),a=r.aoData;if(null!==t&&!o)return[t];if(i=i||Fe(r,o),null!==t&&-1!==P.inArray(t,i))return[t];if(null===n||n===H||""===n)return i;if("function"==typeof n)return P.map(i,function(t){var e=a[t];return n(t,e._aData,e.nTr)?t:null});if(n.nodeName)return t=n._DT_RowIndex,e=n._DT_CellIndex,t!==H?a[t]&&a[t].nTr===n?[t]:[]:e?a[e.row]&&a[e.row].nTr===n.parentNode?[e.row]:[]:(t=P(n).closest("*[data-dt-row]")).length?[t.data("dt-row")]:[];if("string"==typeof n&&"#"===n.charAt(0)){var e=r.aIds[n.replace(/^#/,"")];if(e!==H)return[e.idx]}t=_(m(r.aoData,i,"nTr"));return P(t).filter(n).map(function(){return this._DT_RowIndex}).toArray()},r=t,o=n);var r,o,i},1);return t.selector.rows=e,t.selector.opts=n,t}),e("rows().nodes()",function(){return this.iterator("row",function(t,e){return t.aoData[e].nTr||H},1)}),e("rows().data()",function(){return this.iterator(!0,"rows",function(t,e){return m(t.aoData,e,"_aData")},1)}),t("rows().cache()","row().cache()",function(n){return this.iterator("row",function(t,e){t=t.aoData[e];return"search"===n?t._aFilterData:t._aSortData},1)}),t("rows().invalidate()","row().invalidate()",function(n){return this.iterator("row",function(t,e){bt(t,e,n)})}),t("rows().indexes()","row().index()",function(){return this.iterator("row",function(t,e){return e},1)}),t("rows().ids()","row().id()",function(t){for(var e=[],n=this.context,a=0,r=n.length;a<r;a++)for(var o=0,i=this[a].length;o<i;o++){var l=n[a].rowIdFn(n[a].aoData[this[a][o]]._aData);e.push((!0===t?"#":"")+l)}return new B(n,e)}),t("rows().remove()","row().remove()",function(){var f=this;return this.iterator("row",function(t,e,n){var a,r,o,i,l,s,u=t.aoData,c=u[e];for(u.splice(e,1),a=0,r=u.length;a<r;a++)if(s=(l=u[a]).anCells,null!==l.nTr&&(l.nTr._DT_RowIndex=a),null!==s)for(o=0,i=s.length;o<i;o++)s[o]._DT_CellIndex.row=a;gt(t.aiDisplayMaster,e),gt(t.aiDisplay,e),gt(f[n],e,!1),0<t._iRecordsDisplay&&t._iRecordsDisplay--,Se(t);n=t.rowIdFn(c._aData);n!==H&&delete t.aIds[n]}),this.iterator("table",function(t){for(var e=0,n=t.aoData.length;e<n;e++)t.aoData[e].idx=e}),this}),e("rows.add()",function(o){var t=this.iterator("table",function(t){for(var e,n=[],a=0,r=o.length;a<r;a++)(e=o[a]).nodeName&&"TR"===e.nodeName.toUpperCase()?n.push(ut(t,e)[0]):n.push(x(t,e));return n},1),e=this.rows(-1);return e.pop(),P.merge(e,t),e}),e("row()",function(t,e){return Ce(this.rows(t,e))}),e("row().data()",function(t){var e,n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._aData:H:((e=n[0].aoData[this[0]])._aData=t,Array.isArray(t)&&e.nTr&&e.nTr.id&&b(n[0].rowId)(t,e.nTr.id),bt(n[0],this[0],"data"),this)}),e("row().node()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]].nTr||null}),e("row.add()",function(e){e instanceof P&&e.length&&(e=e[0]);var t=this.iterator("table",function(t){return e.nodeName&&"TR"===e.nodeName.toUpperCase()?ut(t,e)[0]:x(t,e)});return this.row(t[0])}),P(v).on("plugin-init.dt",function(t,e){var n=new B(e),a="on-plugin-init",r="stateSaveParams."+a,o="destroy. "+a,a=(n.on(r,function(t,e,n){for(var a=e.rowIdFn,r=e.aoData,o=[],i=0;i<r.length;i++)r[i]._detailsShow&&o.push("#"+a(r[i]._aData));n.childRows=o}),n.on(o,function(){n.off(r+" "+o)}),n.state.loaded());a&&a.childRows&&n.rows(P.map(a.childRows,function(t){return t.replace(/:/g,"\\:")})).every(function(){R(e,null,"requestChild",[this])})}),w.util.throttle(function(t){de(t[0])},500)),Re=function(t,e){var n=t.context;n.length&&(e=n[0].aoData[e!==H?e:t[0]])&&e._details&&(e._details.remove(),e._detailsShow=H,e._details=H,P(e.nTr).removeClass("dt-hasChild"),Le(n))},Pe="row().child",je=Pe+"()",He=(e(je,function(t,e){var n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._details:H:(!0===t?this.child.show():!1===t?Re(this):n.length&&this.length&&Te(n[0],n[0].aoData[this[0]],t,e),this)}),e([Pe+".show()",je+".show()"],function(t){return xe(this,!0),this}),e([Pe+".hide()",je+".hide()"],function(){return xe(this,!1),this}),e([Pe+".remove()",je+".remove()"],function(){return Re(this),this}),e(Pe+".isShown()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]]._detailsShow||!1}),/^([^:]+):(name|visIdx|visible)$/),Ne=(e("columns()",function(n,a){n===H?n="":P.isPlainObject(n)&&(a=n,n=""),a=we(a);var t=this.iterator("table",function(t){return e=n,l=a,s=(i=t).aoColumns,u=N(s,"sName"),c=N(s,"nTh"),_e("column",e,function(n){var a,t=d(n);if(""===n)return f(s.length);if(null!==t)return[0<=t?t:s.length+t];if("function"==typeof n)return a=Fe(i,l),P.map(s,function(t,e){return n(e,Ae(i,e,0,0,a),c[e])?e:null});var r="string"==typeof n?n.match(He):"";if(r)switch(r[2]){case"visIdx":case"visible":var e,o=parseInt(r[1],10);return o<0?[(e=P.map(s,function(t,e){return t.bVisible?e:null}))[e.length+o]]:[rt(i,o)];case"name":return P.map(u,function(t,e){return t===r[1]?e:null});default:return[]}return n.nodeName&&n._DT_CellIndex?[n._DT_CellIndex.column]:(t=P(c).filter(n).map(function(){return P.inArray(this,c)}).toArray()).length||!n.nodeName?t:(t=P(n).closest("*[data-dt-column]")).length?[t.data("dt-column")]:[]},i,l);var i,e,l,s,u,c},1);return t.selector.cols=n,t.selector.opts=a,t}),t("columns().header()","column().header()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTh},1)}),t("columns().footer()","column().footer()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTf},1)}),t("columns().data()","column().data()",function(){return this.iterator("column-rows",Ae,1)}),t("columns().dataSrc()","column().dataSrc()",function(){return this.iterator("column",function(t,e){return t.aoColumns[e].mData},1)}),t("columns().cache()","column().cache()",function(o){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"search"===o?"_aFilterData":"_aSortData",e)},1)}),t("columns().nodes()","column().nodes()",function(){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"anCells",e)},1)}),t("columns().visible()","column().visible()",function(f,n){var e=this,t=this.iterator("column",function(t,e){if(f===H)return t.aoColumns[e].bVisible;var n,a,r=e,e=f,o=t.aoColumns,i=o[r],l=t.aoData;if(e===H)i.bVisible;else if(i.bVisible!==e){if(e)for(var s=P.inArray(!0,N(o,"bVisible"),r+1),u=0,c=l.length;u<c;u++)a=l[u].nTr,n=l[u].anCells,a&&a.insertBefore(n[r],n[s]||null);else P(N(t.aoData,"anCells",r)).detach();i.bVisible=e}});return f!==H&&this.iterator("table",function(t){Dt(t,t.aoHeader),Dt(t,t.aoFooter),t.aiDisplay.length||P(t.nTBody).find("td[colspan]").attr("colspan",T(t)),de(t),e.iterator("column",function(t,e){R(t,null,"column-visibility",[t,e,f,n])}),n!==H&&!n||e.columns.adjust()}),t}),t("columns().indexes()","column().index()",function(n){return this.iterator("column",function(t,e){return"visible"===n?ot(t,e):e},1)}),e("columns.adjust()",function(){return this.iterator("table",function(t){O(t)},1)}),e("column.index()",function(t,e){var n;if(0!==this.context.length)return n=this.context[0],"fromVisible"===t||"toData"===t?rt(n,e):"fromData"===t||"toVisible"===t?ot(n,e):void 0}),e("column()",function(t,e){return Ce(this.columns(t,e))}),e("cells()",function(g,t,b){var a,r,o,i,l,s,e;return P.isPlainObject(g)&&(g.row===H?(b=g,g=null):(b=t,t=null)),P.isPlainObject(t)&&(b=t,t=null),null===t||t===H?this.iterator("table",function(t){return a=t,t=g,e=we(b),f=a.aoData,d=Fe(a,e),n=_(m(f,d,"anCells")),h=P(Y([],n)),p=a.aoColumns.length,_e("cell",t,function(t){var e,n="function"==typeof t;if(null===t||t===H||n){for(o=[],i=0,l=d.length;i<l;i++)for(r=d[i],s=0;s<p;s++)u={row:r,column:s},(!n||(c=f[r],t(u,S(a,r,s),c.anCells?c.anCells[s]:null)))&&o.push(u);return o}return P.isPlainObject(t)?t.column!==H&&t.row!==H&&-1!==P.inArray(t.row,d)?[t]:[]:(e=h.filter(t).map(function(t,e){return{row:e._DT_CellIndex.row,column:e._DT_CellIndex.column}}).toArray()).length||!t.nodeName?e:(c=P(t).closest("*[data-dt-row]")).length?[{row:c.data("dt-row"),column:c.data("dt-column")}]:[]},a,e);var a,e,r,o,i,l,s,u,c,f,d,n,h,p}):(e=b?{page:b.page,order:b.order,search:b.search}:{},a=this.columns(t,e),r=this.rows(g,e),e=this.iterator("table",function(t,e){var n=[];for(o=0,i=r[e].length;o<i;o++)for(l=0,s=a[e].length;l<s;l++)n.push({row:r[e][o],column:a[e][l]});return n},1),e=b&&b.selected?this.cells(e,b):e,P.extend(e.selector,{cols:t,rows:g,opts:b}),e)}),t("cells().nodes()","cell().node()",function(){return this.iterator("cell",function(t,e,n){t=t.aoData[e];return t&&t.anCells?t.anCells[n]:H},1)}),e("cells().data()",function(){return this.iterator("cell",function(t,e,n){return S(t,e,n)},1)}),t("cells().cache()","cell().cache()",function(a){return a="search"===a?"_aFilterData":"_aSortData",this.iterator("cell",function(t,e,n){return t.aoData[e][a][n]},1)}),t("cells().render()","cell().render()",function(a){return this.iterator("cell",function(t,e,n){return S(t,e,n,a)},1)}),t("cells().indexes()","cell().index()",function(){return this.iterator("cell",function(t,e,n){return{row:e,column:n,columnVisible:ot(t,n)}},1)}),t("cells().invalidate()","cell().invalidate()",function(a){return this.iterator("cell",function(t,e,n){bt(t,e,a,n)})}),e("cell()",function(t,e,n){return Ce(this.cells(t,e,n))}),e("cell().data()",function(t){var e=this.context,n=this[0];return t===H?e.length&&n.length?S(e[0],n[0].row,n[0].column):H:(ct(e[0],n[0].row,n[0].column,t),bt(e[0],n[0].row,"data",n[0].column),this)}),e("order()",function(e,t){var n=this.context;return e===H?0!==n.length?n[0].aaSorting:H:("number"==typeof e?e=[[e,t]]:e.length&&!Array.isArray(e[0])&&(e=Array.prototype.slice.call(arguments)),this.iterator("table",function(t){t.aaSorting=e.slice()}))}),e("order.listener()",function(e,n,a){return this.iterator("table",function(t){ue(t,e,n,a)})}),e("order.fixed()",function(e){var t;return e?this.iterator("table",function(t){t.aaSortingFixed=P.extend(!0,{},e)}):(t=(t=this.context).length?t[0].aaSortingFixed:H,Array.isArray(t)?{pre:t}:t)}),e(["columns().order()","column().order()"],function(a){var r=this;return this.iterator("table",function(t,e){var n=[];P.each(r[e],function(t,e){n.push([e,a])}),t.aaSorting=n})}),e("search()",function(e,n,a,r){var t=this.context;return e===H?0!==t.length?t[0].oPreviousSearch.sSearch:H:this.iterator("table",function(t){t.oFeatures.bFilter&&Rt(t,P.extend({},t.oPreviousSearch,{sSearch:e+"",bRegex:null!==n&&n,bSmart:null===a||a,bCaseInsensitive:null===r||r}),1)})}),t("columns().search()","column().search()",function(a,r,o,i){return this.iterator("column",function(t,e){var n=t.aoPreSearchCols;if(a===H)return n[e].sSearch;t.oFeatures.bFilter&&(P.extend(n[e],{sSearch:a+"",bRegex:null!==r&&r,bSmart:null===o||o,bCaseInsensitive:null===i||i}),Rt(t,t.oPreviousSearch,1))})}),e("state()",function(){return this.context.length?this.context[0].oSavedState:null}),e("state.clear()",function(){return this.iterator("table",function(t){t.fnStateSaveCallback.call(t.oInstance,t,{})})}),e("state.loaded()",function(){return this.context.length?this.context[0].oLoadedState:null}),e("state.save()",function(){return this.iterator("table",function(t){de(t)})}),w.use=function(t,e){"lib"===e||t.fn?P=t:"win"==e||t.document?v=(j=t).document:"datetime"!==e&&"DateTime"!==t.type||(w.DateTime=t)},w.factory=function(t,e){var n=!1;return t&&t.document&&(v=(j=t).document),e&&e.fn&&e.fn.jquery&&(P=e,n=!0),n},w.versionCheck=w.fnVersionCheck=function(t){for(var e,n,a=w.version.split("."),r=t.split("."),o=0,i=r.length;o<i;o++)if((e=parseInt(a[o],10)||0)!==(n=parseInt(r[o],10)||0))return n<e;return!0},w.isDataTable=w.fnIsDataTable=function(t){var r=P(t).get(0),o=!1;return t instanceof w.Api||(P.each(w.settings,function(t,e){var n=e.nScrollHead?P("table",e.nScrollHead)[0]:null,a=e.nScrollFoot?P("table",e.nScrollFoot)[0]:null;e.nTable!==r&&n!==r&&a!==r||(o=!0)}),o)},w.tables=w.fnTables=function(e){var t=!1,n=(P.isPlainObject(e)&&(t=e.api,e=e.visible),P.map(w.settings,function(t){if(!e||P(t.nTable).is(":visible"))return t.nTable}));return t?new B(n):n},w.camelToHungarian=C,e("$()",function(t,e){e=this.rows(e).nodes(),e=P(e);return P([].concat(e.filter(t).toArray(),e.find(t).toArray()))}),P.each(["on","one","off"],function(t,n){e(n+"()",function(){var t=Array.prototype.slice.call(arguments),e=(t[0]=P.map(t[0].split(/\s/),function(t){return t.match(/\.dt\b/)?t:t+".dt"}).join(" "),P(this.tables().nodes()));return e[n].apply(e,t),this})}),e("clear()",function(){return this.iterator("table",function(t){pt(t)})}),e("settings()",function(){return new B(this.context,this.context)}),e("init()",function(){var t=this.context;return t.length?t[0].oInit:null}),e("data()",function(){return this.iterator("table",function(t){return N(t.aoData,"_aData")}).flatten()}),e("destroy()",function(c){return c=c||!1,this.iterator("table",function(e){var n,t=e.oClasses,a=e.nTable,r=e.nTBody,o=e.nTHead,i=e.nTFoot,l=P(a),r=P(r),s=P(e.nTableWrapper),u=P.map(e.aoData,function(t){return t.nTr}),i=(e.bDestroying=!0,R(e,"aoDestroyCallback","destroy",[e]),c||new B(e).columns().visible(!0),s.off(".DT").find(":not(tbody *)").off(".DT"),P(j).off(".DT-"+e.sInstance),a!=o.parentNode&&(l.children("thead").detach(),l.append(o)),i&&a!=i.parentNode&&(l.children("tfoot").detach(),l.append(i)),e.aaSorting=[],e.aaSortingFixed=[],ce(e),P(u).removeClass(e.asStripeClasses.join(" ")),P("th, td",o).removeClass(t.sSortable+" "+t.sSortableAsc+" "+t.sSortableDesc+" "+t.sSortableNone),r.children().detach(),r.append(u),e.nTableWrapper.parentNode),o=c?"remove":"detach",u=(l[o](),s[o](),!c&&i&&(i.insertBefore(a,e.nTableReinsertBefore),l.css("width",e.sDestroyWidth).removeClass(t.sTable),n=e.asDestroyStripes.length)&&r.children().each(function(t){P(this).addClass(e.asDestroyStripes[t%n])}),P.inArray(e,w.settings));-1!==u&&w.settings.splice(u,1)})}),P.each(["column","row","cell"],function(t,s){e(s+"s().every()",function(o){var i=this.selector.opts,l=this;return this.iterator(s,function(t,e,n,a,r){o.call(l[s](e,"cell"===s?n:i,"cell"===s?i:H),e,n,a,r)})})}),e("i18n()",function(t,e,n){var a=this.context[0],t=A(t)(a.oLanguage);return t===H&&(t=e),"string"==typeof(t=n!==H&&P.isPlainObject(t)?t[n]!==H?t[n]:t._:t)?t.replace("%d",n):t}),w.version="1.13.8",w.settings=[],w.models={},w.models.oSearch={bCaseInsensitive:!0,sSearch:"",bRegex:!1,bSmart:!0,return:!1},w.models.oRow={nTr:null,anCells:null,_aData:[],_aSortData:null,_aFilterData:null,_sFilterRow:null,_sRowStripe:"",src:null,idx:-1},w.models.oColumn={idx:null,aDataSort:null,asSorting:null,bSearchable:null,bSortable:null,bVisible:null,_sManualType:null,_bAttrSrc:!1,fnCreatedCell:null,fnGetData:null,fnSetData:null,mData:null,mRender:null,nTh:null,nTf:null,sClass:null,sContentPadding:null,sDefaultContent:null,sName:null,sSortDataType:"std",sSortingClass:null,sSortingClassJUI:null,sTitle:null,sType:null,sWidth:null,sWidthOrig:null},w.defaults={aaData:null,aaSorting:[[0,"asc"]],aaSortingFixed:[],ajax:null,aLengthMenu:[10,25,50,100],aoColumns:null,aoColumnDefs:null,aoSearchCols:[],asStripeClasses:null,bAutoWidth:!0,bDeferRender:!1,bDestroy:!1,bFilter:!0,bInfo:!0,bLengthChange:!0,bPaginate:!0,bProcessing:!1,bRetrieve:!1,bScrollCollapse:!1,bServerSide:!1,bSort:!0,bSortMulti:!0,bSortCellsTop:!1,bSortClasses:!0,bStateSave:!1,fnCreatedRow:null,fnDrawCallback:null,fnFooterCallback:null,fnFormatNumber:function(t){return t.toString().replace(/\B(?=(\d{3})+(?!\d))/g,this.oLanguage.sThousands)},fnHeaderCallback:null,fnInfoCallback:null,fnInitComplete:null,fnPreDrawCallback:null,fnRowCallback:null,fnServerData:null,fnServerParams:null,fnStateLoadCallback:function(t){try{return JSON.parse((-1===t.iStateDuration?sessionStorage:localStorage).getItem("DataTables_"+t.sInstance+"_"+location.pathname))}catch(t){return{}}},fnStateLoadParams:null,fnStateLoaded:null,fnStateSaveCallback:function(t,e){try{(-1===t.iStateDuration?sessionStorage:localStorage).setItem("DataTables_"+t.sInstance+"_"+location.pathname,JSON.stringify(e))}catch(t){}},fnStateSaveParams:null,iStateDuration:7200,iDeferLoading:null,iDisplayLength:10,iDisplayStart:0,iTabIndex:0,oClasses:{},oLanguage:{oAria:{sSortAscending:": activate to sort column ascending",sSortDescending:": activate to sort column descending"},oPaginate:{sFirst:"First",sLast:"Last",sNext:"Next",sPrevious:"Previous"},sEmptyTable:"No data available in table",sInfo:"Showing _START_ to _END_ of _TOTAL_ entries",sInfoEmpty:"Showing 0 to 0 of 0 entries",sInfoFiltered:"(filtered from _MAX_ total entries)",sInfoPostFix:"",sDecimal:"",sThousands:",",sLengthMenu:"Show _MENU_ entries",sLoadingRecords:"Loading...",sProcessing:"",sSearch:"Search:",sSearchPlaceholder:"",sUrl:"",sZeroRecords:"No matching records found"},oSearch:P.extend({},w.models.oSearch),sAjaxDataProp:"data",sAjaxSource:null,sDom:"lfrtip",searchDelay:null,sPaginationType:"simple_numbers",sScrollX:"",sScrollXInner:"",sScrollY:"",sServerMethod:"GET",renderer:null,rowId:"DT_RowId"},i(w.defaults),w.defaults.column={aDataSort:null,iDataSort:-1,asSorting:["asc","desc"],bSearchable:!0,bSortable:!0,bVisible:!0,fnCreatedCell:null,mData:null,mRender:null,sCellType:"td",sClass:"",sContentPadding:"",sDefaultContent:null,sName:"",sSortDataType:"std",sTitle:null,sType:null,sWidth:null},i(w.defaults.column),w.models.oSettings={oFeatures:{bAutoWidth:null,bDeferRender:null,bFilter:null,bInfo:null,bLengthChange:null,bPaginate:null,bProcessing:null,bServerSide:null,bSort:null,bSortMulti:null,bSortClasses:null,bStateSave:null},oScroll:{bCollapse:null,iBarWidth:0,sX:null,sXInner:null,sY:null},oLanguage:{fnInfoCallback:null},oBrowser:{bScrollOversize:!1,bScrollbarLeft:!1,bBounding:!1,barWidth:0},ajax:null,aanFeatures:[],aoData:[],aiDisplay:[],aiDisplayMaster:[],aIds:{},aoColumns:[],aoHeader:[],aoFooter:[],oPreviousSearch:{},aoPreSearchCols:[],aaSorting:null,aaSortingFixed:[],asStripeClasses:null,asDestroyStripes:[],sDestroyWidth:0,aoRowCallback:[],aoHeaderCallback:[],aoFooterCallback:[],aoDrawCallback:[],aoRowCreatedCallback:[],aoPreDrawCallback:[],aoInitComplete:[],aoStateSaveParams:[],aoStateLoadParams:[],aoStateLoaded:[],sTableId:"",nTable:null,nTHead:null,nTFoot:null,nTBody:null,nTableWrapper:null,bDeferLoading:!1,bInitialised:!1,aoOpenRows:[],sDom:null,searchDelay:null,sPaginationType:"two_button",iStateDuration:0,aoStateSave:[],aoStateLoad:[],oSavedState:null,oLoadedState:null,sAjaxSource:null,sAjaxDataProp:null,jqXHR:null,json:H,oAjaxData:H,fnServerData:null,aoServerParams:[],sServerMethod:null,fnFormatNumber:null,aLengthMenu:null,iDraw:0,bDrawing:!1,iDrawError:-1,_iDisplayLength:10,_iDisplayStart:0,_iRecordsTotal:0,_iRecordsDisplay:0,oClasses:{},bFiltered:!1,bSorted:!1,bSortCellsTop:null,oInit:null,aoDestroyCallback:[],fnRecordsTotal:function(){return"ssp"==E(this)?+this._iRecordsTotal:this.aiDisplayMaster.length},fnRecordsDisplay:function(){return"ssp"==E(this)?+this._iRecordsDisplay:this.aiDisplay.length},fnDisplayEnd:function(){var t=this._iDisplayLength,e=this._iDisplayStart,n=e+t,a=this.aiDisplay.length,r=this.oFeatures,o=r.bPaginate;return r.bServerSide?!1===o||-1===t?e+a:Math.min(e+t,this._iRecordsDisplay):!o||a<n||-1===t?a:n},oInstance:null,sInstance:null,iTabIndex:0,nScrollHead:null,nScrollFoot:null,aLastSort:[],oPlugins:{},rowIdFn:null,rowId:null},w.ext=p={buttons:{},classes:{},builder:"-source-",errMode:"alert",feature:[],search:[],selector:{cell:[],column:[],row:[]},internal:{},legacy:{ajax:null},pager:{},renderer:{pageButton:{},header:{}},order:{},type:{detect:[],search:{},order:{}},_unique:0,fnVersionCheck:w.fnVersionCheck,iApiIndex:0,oJUIClasses:{},sVersion:w.version},P.extend(p,{afnFiltering:p.search,aTypes:p.type.detect,ofnSearch:p.type.search,oSort:p.type.order,afnSortData:p.order,aoFeatures:p.feature,oApi:p.internal,oStdClasses:p.classes,oPagination:p.pager}),P.extend(w.ext.classes,{sTable:"dataTable",sNoFooter:"no-footer",sPageButton:"paginate_button",sPageButtonActive:"current",sPageButtonDisabled:"disabled",sStripeOdd:"odd",sStripeEven:"even",sRowEmpty:"dataTables_empty",sWrapper:"dataTables_wrapper",sFilter:"dataTables_filter",sInfo:"dataTables_info",sPaging:"dataTables_paginate paging_",sLength:"dataTables_length",sProcessing:"dataTables_processing",sSortAsc:"sorting_asc",sSortDesc:"sorting_desc",sSortable:"sorting",sSortableAsc:"sorting_desc_disabled",sSortableDesc:"sorting_asc_disabled",sSortableNone:"sorting_disabled",sSortColumn:"sorting_",sFilterInput:"",sLengthSelect:"",sScrollWrapper:"dataTables_scroll",sScrollHead:"dataTables_scrollHead",sScrollHeadInner:"dataTables_scrollHeadInner",sScrollBody:"dataTables_scrollBody",sScrollFoot:"dataTables_scrollFoot",sScrollFootInner:"dataTables_scrollFootInner",sHeaderTH:"",sFooterTH:"",sSortJUIAsc:"",sSortJUIDesc:"",sSortJUI:"",sSortJUIAscAllowed:"",sSortJUIDescAllowed:"",sSortJUIWrapper:"",sSortIcon:"",sJUIHeader:"",sJUIFooter:""}),w.ext.pager);function Oe(t,e){var n=[],a=Ne.numbers_length,r=Math.floor(a/2);return e<=a?n=f(0,e):t<=r?((n=f(0,a-2)).push("ellipsis"),n.push(e-1)):((e-1-r<=t?n=f(e-(a-2),e):((n=f(t-r+2,t+r-1)).push("ellipsis"),n.push(e-1),n)).splice(0,0,"ellipsis"),n.splice(0,0,0)),n.DT_el="span",n}P.extend(Ne,{simple:function(t,e){return["previous","next"]},full:function(t,e){return["first","previous","next","last"]},numbers:function(t,e){return[Oe(t,e)]},simple_numbers:function(t,e){return["previous",Oe(t,e),"next"]},full_numbers:function(t,e){return["first","previous",Oe(t,e),"next","last"]},first_last_numbers:function(t,e){return["first",Oe(t,e),"last"]},_numbers:Oe,numbers_length:7}),P.extend(!0,w.ext.renderer,{pageButton:{_:function(u,t,c,e,f,d){function h(t,e){for(var n,a=b.sPageButtonDisabled,r=function(t){Yt(u,t.data.action,!0)},o=0,i=e.length;o<i;o++)if(n=e[o],Array.isArray(n)){var l=P("<"+(n.DT_el||"div")+"/>").appendTo(t);h(l,n)}else{var s=!1;switch(p=null,g=n){case"ellipsis":t.append('<span class="ellipsis">&#x2026;</span>');break;case"first":p=m.sFirst,0===f&&(s=!0);break;case"previous":p=m.sPrevious,0===f&&(s=!0);break;case"next":p=m.sNext,0!==d&&f!==d-1||(s=!0);break;case"last":p=m.sLast,0!==d&&f!==d-1||(s=!0);break;default:p=u.fnFormatNumber(n+1),g=f===n?b.sPageButtonActive:""}null!==p&&(l=u.oInit.pagingTag||"a",s&&(g+=" "+a),me(P("<"+l+">",{class:b.sPageButton+" "+g,"aria-controls":u.sTableId,"aria-disabled":s?"true":null,"aria-label":S[n],role:"link","aria-current":g===b.sPageButtonActive?"page":null,"data-dt-idx":n,tabindex:s?-1:u.iTabIndex,id:0===c&&"string"==typeof n?u.sTableId+"_"+n:null}).html(p).appendTo(t),{action:n},r))}}var p,g,n,b=u.oClasses,m=u.oLanguage.oPaginate,S=u.oLanguage.oAria.paginate||{};try{n=P(t).find(v.activeElement).data("dt-idx")}catch(t){}h(P(t).empty(),e),n!==H&&P(t).find("[data-dt-idx="+n+"]").trigger("focus")}}}),P.extend(w.ext.type.detect,[function(t,e){e=e.oLanguage.sDecimal;return l(t,e)?"num"+e:null},function(t,e){var n;return(!t||t instanceof Date||X.test(t))&&(null!==(n=Date.parse(t))&&!isNaN(n)||h(t))?"date":null},function(t,e){e=e.oLanguage.sDecimal;return l(t,e,!0)?"num-fmt"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e)?"html-num"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e,!0)?"html-num-fmt"+e:null},function(t,e){return h(t)||"string"==typeof t&&-1!==t.indexOf("<")?"html":null}]),P.extend(w.ext.type.search,{html:function(t){return h(t)?t:"string"==typeof t?t.replace(U," ").replace(V,""):""},string:function(t){return!h(t)&&"string"==typeof t?t.replace(U," "):t}});function ke(t,e,n,a){var r;return 0===t||t&&"-"!==t?"number"==(r=typeof t)||"bigint"==r?t:+(t=(t=e?$(t,e):t).replace&&(n&&(t=t.replace(n,"")),a)?t.replace(a,""):t):-1/0}function Me(n){P.each({num:function(t){return ke(t,n)},"num-fmt":function(t){return ke(t,n,q)},"html-num":function(t){return ke(t,n,V)},"html-num-fmt":function(t){return ke(t,n,V,q)}},function(t,e){p.type.order[t+n+"-pre"]=e,t.match(/^html\-/)&&(p.type.search[t+n]=p.type.search.html)})}P.extend(p.type.order,{"date-pre":function(t){t=Date.parse(t);return isNaN(t)?-1/0:t},"html-pre":function(t){return h(t)?"":t.replace?t.replace(/<.*?>/g,"").toLowerCase():t+""},"string-pre":function(t){return h(t)?"":"string"==typeof t?t.toLowerCase():t.toString?t.toString():""},"string-asc":function(t,e){return t<e?-1:e<t?1:0},"string-desc":function(t,e){return t<e?1:e<t?-1:0}}),Me(""),P.extend(!0,w.ext.renderer,{header:{_:function(r,o,i,l){P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass))})},jqueryui:function(r,o,i,l){P("<div/>").addClass(l.sSortJUIWrapper).append(o.contents()).append(P("<span/>").addClass(l.sSortIcon+" "+i.sSortingClassJUI)).appendTo(o),P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass),o.find("span."+l.sSortIcon).removeClass(l.sSortJUIAsc+" "+l.sSortJUIDesc+" "+l.sSortJUI+" "+l.sSortJUIAscAllowed+" "+l.sSortJUIDescAllowed).addClass("asc"==a[e]?l.sSortJUIAsc:"desc"==a[e]?l.sSortJUIDesc:i.sSortingClassJUI))})}}});function We(t){return"string"==typeof(t=Array.isArray(t)?t.join(","):t)?t.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;"):t}function Ee(t,e,n,a,r){return j.moment?t[e](r):j.luxon?t[n](r):a?t[a](r):t}var Be=!1;function Ue(t,e,n){var a;if(j.moment){if(!(a=j.moment.utc(t,e,n,!0)).isValid())return null}else if(j.luxon){if(!(a=e&&"string"==typeof t?j.luxon.DateTime.fromFormat(t,e):j.luxon.DateTime.fromISO(t)).isValid)return null;a.setLocale(n)}else e?(Be||alert("DataTables warning: Formatted date without Moment.js or Luxon - https://datatables.net/tn/17"),Be=!0):a=new Date(t);return a}function Ve(s){return function(a,r,o,i){0===arguments.length?(o="en",a=r=null):1===arguments.length?(o="en",r=a,a=null):2===arguments.length&&(o=r,r=a,a=null);var l="datetime-"+r;return w.ext.type.order[l]||(w.ext.type.detect.unshift(function(t){return t===l&&l}),w.ext.type.order[l+"-asc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:t<e?-1:1},w.ext.type.order[l+"-desc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:e<t?-1:1}),function(t,e){var n;return null!==t&&t!==H||(t="--now"===i?(n=new Date,new Date(Date.UTC(n.getFullYear(),n.getMonth(),n.getDate(),n.getHours(),n.getMinutes(),n.getSeconds()))):""),"type"===e?l:""===t?"sort"!==e?"":Ue("0000-01-01 00:00:00",null,o):!(null===r||a!==r||"sort"===e||"type"===e||t instanceof Date)||null===(n=Ue(t,a,o))?t:"sort"===e?n:(t=null===r?Ee(n,"toDate","toJSDate","")[s]():Ee(n,"format","toFormat","toISOString",r),"display"===e?We(t):t)}}}var Xe=",",Je=".";if(j.Intl!==H)try{for(var qe=(new Intl.NumberFormat).formatToParts(100000.1),n=0;n<qe.length;n++)"group"===qe[n].type?Xe=qe[n].value:"decimal"===qe[n].type&&(Je=qe[n].value)}catch(t){}function $e(e){return function(){var t=[ge(this[w.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));return w.ext.internal[e].apply(this,t)}}return w.datetime=function(n,a){var r="datetime-detect-"+n;a=a||"en",w.ext.type.order[r]||(w.ext.type.detect.unshift(function(t){var e=Ue(t,n,a);return!(""!==t&&!e)&&r}),w.ext.type.order[r+"-pre"]=function(t){return Ue(t,n,a)||0})},w.render={date:Ve("toLocaleDateString"),datetime:Ve("toLocaleString"),time:Ve("toLocaleTimeString"),number:function(a,r,o,i,l){return null!==a&&a!==H||(a=Xe),null!==r&&r!==H||(r=Je),{display:function(t){if("number"!=typeof t&&"string"!=typeof t)return t;if(""===t||null===t)return t;var e=t<0?"-":"",n=parseFloat(t);if(isNaN(n))return We(t);n=n.toFixed(o),t=Math.abs(n);n=parseInt(t,10),t=o?r+(t-n).toFixed(o).substring(2):"";return(e=0===n&&0===parseFloat(t)?"":e)+(i||"")+n.toString().replace(/\B(?=(\d{3})+(?!\d))/g,a)+t+(l||"")}}},text:function(){return{display:We,filter:We}}},P.extend(w.ext.internal,{_fnExternApiFunc:$e,_fnBuildAjax:Tt,_fnAjaxUpdate:xt,_fnAjaxParameters:At,_fnAjaxUpdateDraw:It,_fnAjaxDataSrc:Ft,_fnAddColumn:nt,_fnColumnOptions:at,_fnAdjustColumnSizing:O,_fnVisibleToColumnIndex:rt,_fnColumnIndexToVisible:ot,_fnVisbleColumns:T,_fnGetColumns:it,_fnColumnTypes:lt,_fnApplyColumnDefs:st,_fnHungarianMap:i,_fnCamelToHungarian:C,_fnLanguageCompat:Z,_fnBrowserDetect:tt,_fnAddData:x,_fnAddTr:ut,_fnNodeToDataIndex:function(t,e){return e._DT_RowIndex!==H?e._DT_RowIndex:null},_fnNodeToColumnIndex:function(t,e,n){return P.inArray(n,t.aoData[e].anCells)},_fnGetCellData:S,_fnSetCellData:ct,_fnSplitObjNotation:dt,_fnGetObjectDataFn:A,_fnSetObjectDataFn:b,_fnGetDataMaster:ht,_fnClearTable:pt,_fnDeleteIndex:gt,_fnInvalidate:bt,_fnGetRowElements:mt,_fnCreateTr:St,_fnBuildHead:yt,_fnDrawHead:Dt,_fnDraw:y,_fnReDraw:u,_fnAddOptionsHtml:_t,_fnDetectHeader:wt,_fnGetUniqueThs:Ct,_fnFeatureHtmlFilter:Lt,_fnFilterComplete:Rt,_fnFilterCustom:Pt,_fnFilterColumn:jt,_fnFilter:Ht,_fnFilterCreateSearch:Nt,_fnEscapeRegex:Ot,_fnFilterData:Wt,_fnFeatureHtmlInfo:Ut,_fnUpdateInfo:Vt,_fnInfoMacros:Xt,_fnInitialise:Jt,_fnInitComplete:qt,_fnLengthChange:$t,_fnFeatureHtmlLength:Gt,_fnFeatureHtmlPaginate:zt,_fnPageChange:Yt,_fnFeatureHtmlProcessing:Zt,_fnProcessingDisplay:D,_fnFeatureHtmlTable:Kt,_fnScrollDraw:Qt,_fnApplyToChildren:k,_fnCalculateColumnWidths:ee,_fnThrottle:ne,_fnConvertToWidth:ae,_fnGetWidestNode:re,_fnGetMaxLenString:oe,_fnStringToCss:M,_fnSortFlatten:I,_fnSort:ie,_fnSortAria:le,_fnSortListener:se,_fnSortAttachListener:ue,_fnSortingClasses:ce,_fnSortData:fe,_fnSaveState:de,_fnLoadState:he,_fnImplementState:pe,_fnSettingsFromNode:ge,_fnLog:W,_fnMap:F,_fnBindAction:me,_fnCallbackReg:L,_fnCallbackFire:R,_fnLengthOverflow:Se,_fnRenderer:ve,_fnDataSource:E,_fnRowAttributes:vt,_fnExtend:be,_fnCalculateEnd:function(){}}),((P.fn.dataTable=w).$=P).fn.dataTableSettings=w.settings,P.fn.dataTableExt=w.ext,P.fn.DataTable=function(t){return P(this).dataTable(t).api()},P.each(w,function(t,e){P.fn.DataTable[t]=e}),w}); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
index f2c45c833e..d4ac31234c 100644
--- a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
+++ b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
@@ -657,7 +657,7 @@ $(document).ready(function() {
hljs.initHighlightingOnLoad();
// Prevent invalid links from jumping page scroll
- $('a[href=#]').click(function() {
+ $('a[href="#"]').click(function() {
return false;
});
diff --git a/bitbake/lib/toaster/toastergui/static/js/projectpage.js b/bitbake/lib/toaster/toastergui/static/js/projectpage.js
index 506471e091..a3c95810a7 100644
--- a/bitbake/lib/toaster/toastergui/static/js/projectpage.js
+++ b/bitbake/lib/toaster/toastergui/static/js/projectpage.js
@@ -61,7 +61,7 @@ function projectPageInit(ctx) {
distroChangeInput.val(urlParams.setDistro);
distroChangeBtn.click();
} else {
- updateDistroName(prjInfo.distro.name);
+ updateDistroName(prjInfo.distro?.name);
}
/* Now we're really ready show the page */
diff --git a/bitbake/lib/toaster/toastergui/templates/base.html b/bitbake/lib/toaster/toastergui/templates/base.html
index 2b3054936a..e90be69620 100644
--- a/bitbake/lib/toaster/toastergui/templates/base.html
+++ b/bitbake/lib/toaster/toastergui/templates/base.html
@@ -14,11 +14,11 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
- <script src="{% static 'js/jquery-2.0.3.min.js' %}">
+ <script src="{% static 'js/jquery-3.7.1.min.js' %}">
</script>
<script src="{% static 'js/jquery.cookie.js' %}">
</script>
- <script src="{% static 'js/bootstrap.min.js' %}">
+ <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
</script>
<script src="{% static 'js/typeahead.jquery.js' %}">
</script>
@@ -94,7 +94,7 @@
</a>
<a class="brand" href="/">Toaster</a>
{% if DEBUG %}
- <span class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
+ <span id="toaster-version-info-sign" class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
{% endif %}
</div>
</div>
@@ -132,7 +132,8 @@
{% if project_enable %}
<a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a>
{% endif %}
- </div>
+ <a class="btn btn-default navbar-btn navbar-right" id="import_page" style="margin-right: 5px !important" id="import-cmdline-button" href="{% url 'cmdlines' %}">Import command line builds</a>
+ </div>
</div>
</nav>
diff --git a/bitbake/lib/toaster/toastergui/templates/base_specific.html b/bitbake/lib/toaster/toastergui/templates/base_specific.html
index e377cadd73..425f7ed73d 100644
--- a/bitbake/lib/toaster/toastergui/templates/base_specific.html
+++ b/bitbake/lib/toaster/toastergui/templates/base_specific.html
@@ -14,11 +14,11 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
- <script src="{% static 'js/jquery-2.0.3.min.js' %}">
+ <script src="{% static 'js/jquery-3.7.1.min.js' %}">
</script>
<script src="{% static 'js/jquery.cookie.js' %}">
</script>
- <script src="{% static 'js/bootstrap.min.js' %}">
+ <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
</script>
<script src="{% static 'js/typeahead.jquery.js' %}">
</script>
diff --git a/bitbake/lib/toaster/toastergui/templates/command_line_builds.html b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html
new file mode 100644
index 0000000000..05db6727e7
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/command_line_builds.html
@@ -0,0 +1,209 @@
+{% extends "base.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% load static %}
+
+{% block title %} Import Builds from eventlogs - Toaster {% endblock %}
+
+{% block pagecontent %}
+
+<div class="container-fluid">
+ <div id="overlay" class="hide">
+ <div class="spinner">
+ <div class="fa-spin">
+ </div>
+ </div>
+ </div>
+ <div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <div class="row">
+ <div class="col-md-6">
+ <h1>Import command line builds</h1>
+ </div>
+ {% if import_all %}
+ <div class="col-md-6">
+ <button id="import_all" type="button" class="btn btn-primary navbar-btn navbar-right">
+ <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import All
+ </button>
+ </div>
+ {% endif %}
+ </div>
+ </div>
+ {% if messages %}
+ <div class="row-fluid" id="empty-state-{{table_name}}">
+ {% for message in messages %}
+ <div class="alert alert-danger">{{message}}</div>
+ {%endfor%}
+ </div>
+ {% endif %}
+ <div class="row">
+ <h4 style="margin-left: 15px;"><strong>Import eventlog file</strong></h4>
+ <form method="POST" enctype="multipart/form-data" action="{% url 'cmdlines' %}" id="form_file">
+ {% csrf_token %}
+ <div class="col-md-6" style="padding-left: 20px;">
+ <div class="row">
+ <input type="hidden" value="{{dir}}" name="dir">
+ <div class="col-md-3"> {{ form.eventlog_file}} </div>
+ </div>
+ <div class="row" style="padding-top: 10px;">
+ <div class="col-md-6">
+ <button id="file_import" type="submit" disabled="disabled" class="btn btn-default navbar-btn" >
+ <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import
+ </button>
+ </div>
+ </div>
+ </div>
+ </form>
+ </div>
+
+ <div class="row" style="padding-top: 20px;">
+ <div class="col-md-8 ">
+ <h4><strong>Eventlogs from existing build directory: </strong>
+ <a href="#" data-toggle="tooltip" title="{{dir}}">
+ <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-info-circle" viewBox="0 0 16 16" data-toggle="tooltip">
+ <path d="M8 15A7 7 0 1 1 8 1a7 7 0 0 1 0 14m0 1A8 8 0 1 0 8 0a8 8 0 0 0 0 16"/>
+ <path d="m8.93 6.588-2.29.287-.082.38.45.083c.294.07.352.176.288.469l-.738 3.468c-.194.897.105 1.319.808 1.319.545 0 1.178-.252 1.465-.598l.088-.416c-.2.176-.492.246-.686.246-.275 0-.375-.193-.304-.533zM9 4.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0"/>
+ </svg>
+ </a>
+ </h4>
+ {% if files %}
+ <div class="table-responsive">
+ <table class="table col-md-6 table-bordered table-hover" id="eventlog-table" style="border-collapse: collapse;">
+ <thead>
+ <tr class="row">
+ <th scope="col">Name</th>
+ <th scope="col">Size</th>
+ <th scope="col">Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for file in files %}
+ <tr class="row" style="height: 48px;">
+ <th scope="row" class="col-md-4" style="vertical-align: middle;">
+ <input type="hidden" value="{{file.name}}" name="{{file.name}}">{{file.name}}
+ </th>
+ <td class="col-md-4 align-middle" style="vertical-align: middle;">{{file.size|filesizeformat}}</td>
+ <td class="col-md-4 align-middle" style="vertical-align: middle;">
+ {% if file.imported == True and file.build_id is not None %}
+ <a href="{% url 'builddashboard' file.build_id %}">Build Details</a>
+ {% elif request.session.file == file.name or request.session.all_builds %}
+ <a data-toggle="tooltip" title="Build in progress">
+ <span class="glyphicon glyphicon-upload" style="font-size: 18px; color:grey"></span>
+ </a>
+ {%else%}
+ <a onclick="_ajax_update('{{file.name}}', false, '{{dir}}')" data-toggle="tooltip" title="Import File">
+ <span class="glyphicon glyphicon-upload" style="font-size: 18px;"></span>
+ </a>
+ {%endif%}
+ </td>
+ </tr>
+ {% endfor%}
+ </tbody>
+ </table>
+ </div>
+ {% else %}
+ <div class="row-fluid" id="empty-state-{{table_name}}">
+ <div class="alert alert-info">Sorry - no files found</div>
+ </div>
+ {%endif%}
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
+
+<link rel="stylesheet" href="{% static 'css/jquery.dataTables-1.13.8.min.css' %}" type='text/css'/>
+<script src="{% static 'js/jquery.dataTables-1.13.8.min.js' %}"> </script>
+<script>
+
+function _ajax_update(file, all, dir){
+ function getCookie(name) {
+ var cookieValue = null;
+ if (document.cookie && document.cookie !== '') {
+ var cookies = document.cookie.split(';');
+ for (var i = 0; i < cookies.length; i++) {
+ var cookie = jQuery.trim(cookies[i]);
+ // Does this cookie string begin with the name we want?
+ if (cookie.substring(0, name.length + 1) === (name + '=')) {
+ cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
+ break;
+ }
+ }
+ }
+ return cookieValue;
+ }
+ var csrftoken = getCookie('csrftoken');
+
+ function csrfSafeMethod(method) {
+ // these HTTP methods do not require CSRF protection
+ return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
+ }
+ $.ajaxSetup({
+ beforeSend: function (xhr, settings) {
+ if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
+ xhr.setRequestHeader("X-CSRFToken", csrftoken);
+ }
+ }
+ });
+
+ $.ajax({
+ url:'/toastergui/cmdline/',
+ type: "POST",
+ data: {file: file, all: all, dir: dir},
+ success:function(data){
+ if (data['response']=='building'){
+ location.reload()
+ } else {
+ window.location = '/toastergui/builds/'
+ }
+ },
+ complete:function(data){
+ },
+ error:function (xhr, textStatus, thrownError){
+ console.log('fail');
+ }
+ });
+}
+
+$('#import_all').on('click', function(){
+ _ajax_update("{{files | safe}}", true, "{{dir | safe}}");
+});
+
+
+$('#import_page').hide();
+
+$(function () {
+ $('[data-toggle="tooltip"]').tooltip()
+})
+
+
+$("#id_eventlog_file").change(function(){
+ $('#file_import').prop("disabled", false);
+ $('#file_import').addClass('btn-primary')
+ $('#file_import').removeClass('btn-default')
+})
+
+$(document).ajaxStart(function(){
+ $('#overlay').removeClass('hide');
+ window.setTimeout(
+ function() {
+ window.location = '/toastergui/builds/'
+ }, 10000)
+});
+
+$( "#form_file").on( "submit", function( event ) {
+ $('#overlay').removeClass('hide');
+ window.setTimeout(
+ function() {
+ window.location = '/toastergui/builds/'
+ }, 10000)
+});
+
+$(document).ready( function () {
+ $('#eventlog-table').DataTable({order: [[0, 'desc']], "pageLength": 50});
+});
+
+</script>
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
index ca248962f0..41553c4f9d 100644
--- a/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
+++ b/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
@@ -11,7 +11,7 @@
<script src="{% static 'js/layerDepsModal.js' %}"></script>
<script src="{% static 'js/projectpage.js' %}"></script>
-<script src="{% static 'js/bootstrap.min.js' %}"></script>
+<script src="{% static 'js/bootstrap-3.4.1.min.js' %}"></script>
<script src="{% static 'js/filtersnippet.js' %}"></script>
<script src="{% static 'js/importlayer.js' %}"></script>
<script src="{% static 'js/highlight.pack.js' %}"></script>
diff --git a/bitbake/lib/toaster/toastergui/templates/landing.html b/bitbake/lib/toaster/toastergui/templates/landing.html
index 08b40fb2f2..589ee22634 100644
--- a/bitbake/lib/toaster/toastergui/templates/landing.html
+++ b/bitbake/lib/toaster/toastergui/templates/landing.html
@@ -12,10 +12,10 @@
<div class="col-md-6">
<h1>This is Toaster</h1>
- <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p>
+ <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://docs.yoctoproject.org/bitbake.html">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p>
<p class="top-air">
- <a class="btn btn-info btn-lg" href="http://docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster">
+ <a class="btn btn-info btn-lg" href="http://docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" style="min-width: 460px;">
Toaster is ready to capture your command line builds
</a>
</p>
@@ -23,7 +23,7 @@
{% if lvs_nos %}
{% if project_enable %}
<p class="top-air">
- <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}">
+ <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}" style="min-width: 460px;">
Create your first Toaster project to run manage builds
</a>
</p>
@@ -42,6 +42,12 @@
</div>
{% endif %}
+ <p class="top-air">
+ <a class="btn btn-info btn-lg" href="{% url 'cmdlines' %}" style="min-width: 460px;">
+ Import command line event logs from build directory
+ </a>
+ </p>
+
<ul class="list-unstyled lead">
<li>
<a href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual">
diff --git a/bitbake/lib/toaster/toastergui/templates/mrb_section.html b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
index 98d9fac822..9fc7dfaee4 100644
--- a/bitbake/lib/toaster/toastergui/templates/mrb_section.html
+++ b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
@@ -63,7 +63,7 @@
<%/if%>
</div>
- <div data-build-state="<%:state%>">
+ <div class="build-state" data-build-state="<%:state%>">
<%if state == 'Cloning'%>
<%include tmpl='#cloning-repos-build-template'/%>
<%else state == 'Parsing'%>
diff --git a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
index a5d5893571..2493954deb 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_built_dependencies.html
@@ -18,7 +18,7 @@
</ul>
<div class="tab-content">
<div class="tab-pane active" id="dependencies">
- {% ifequal runtime_deps|length 0 %}
+ {% if runtime_deps|length == 0 %}
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
</div>
@@ -54,8 +54,8 @@
{% endfor %}
</tbody>
</table>
- {% endifequal %}
- {% ifnotequal other_deps|length 0 %}
+ {% endif %}
+ {% if other_deps|length != 0 %}
<h3>Other runtime relationships</h3>
<table class="table table-bordered table-hover">
<thead>
@@ -93,7 +93,7 @@
{% endfor %}
</tbody>
</table>
- {% endifnotequal %}
+ {% endif %}
</div> <!-- tab-pane -->
</div> <!-- tab-content -->
{% endblock tabcontent %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
index 95e56ded26..1f5ed6d913 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
@@ -14,7 +14,7 @@
{% include "package_included_tabs.html" with active_tab="dependencies" %}
<div class="tab-content">
<div class="tab-pane active" id="dependencies">
- {% ifnotequal runtime_deps|length 0 %}
+ {% if runtime_deps|length != 0 %}
<table class="table table-bordered table-hover">
<thead>
<tr>
@@ -48,9 +48,9 @@
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
</div>
- {% endifnotequal %}
+ {% endif %}
- {% ifnotequal other_deps|length 0 %}
+ {% if other_deps|length != 0 %}
<h3>Other runtime relationships</h3>
<table class="table table-bordered table-hover">
<thead>
@@ -103,7 +103,7 @@
{% endfor %}
</tbody>
</table>
- {% endifnotequal %}
+ {% endif %}
</div> <!-- end tab-pane -->
</div> <!-- end tab content -->
{% endwith %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
index fb310c7fc7..dae4549e21 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
@@ -15,7 +15,7 @@
<div class="tab-content">
<div class="tab-pane active" id="brought-in-by">
- {% ifequal reverse_count 0 %}
+ {% if reverse_count == 0 %}
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies.
</div>
@@ -43,7 +43,7 @@
{% endfor %}
</tbody>
</table>
- {% endifequal %}
+ {% endif %}
</div> <!-- end tab-pane -->
</div> <!-- end tab content -->
{% endwith %}
diff --git a/bitbake/lib/toaster/toastergui/templates/recipe.html b/bitbake/lib/toaster/toastergui/templates/recipe.html
index 3f76e656fe..4b5301b548 100644
--- a/bitbake/lib/toaster/toastergui/templates/recipe.html
+++ b/bitbake/lib/toaster/toastergui/templates/recipe.html
@@ -186,9 +186,9 @@
<i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i>
</td>
<td>
- {% ifnotequal task.sstate_result task.SSTATE_NA %}
+ {% if task.sstate_result != task.SSTATE_NA %}
{{task.get_sstate_result_display}}
- {% endifnotequal %}
+ {% endif %}
</td>
</tr>
diff --git a/bitbake/lib/toaster/toastergui/templates/target.html b/bitbake/lib/toaster/toastergui/templates/target.html
index 1924a0dad7..d5f60e77a8 100644
--- a/bitbake/lib/toaster/toastergui/templates/target.html
+++ b/bitbake/lib/toaster/toastergui/templates/target.html
@@ -8,11 +8,11 @@
{% block nav-target %}
{% for t in build.get_sorted_target_list %}
- {% ifequal target.pk t.pk %}
+ {% if target.pk == t.pk %}
<li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
{% else %}
<li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
- {% endifequal %}
+ {% endif %}
{% endfor %}
{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
index c432f59a78..bd398f0012 100644
--- a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
+++ b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
@@ -167,8 +167,8 @@ def check_filter_status(options, filter):
def variable_parent_name(value):
""" filter extended variable names to the parent name
"""
- value=re.sub('_\$.*', '', value)
- return re.sub('_[a-z].*', '', value)
+ value = re.sub(r'_\$.*', '', value)
+ return re.sub(r'_[a-z].*', '', value)
@register.filter
def filter_setin_files(file_list, matchstr):
diff --git a/bitbake/lib/toaster/toastergui/urls.py b/bitbake/lib/toaster/toastergui/urls.py
index d2df4e6048..7f8489d3aa 100644
--- a/bitbake/lib/toaster/toastergui/urls.py
+++ b/bitbake/lib/toaster/toastergui/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView
from toastergui import tables
@@ -95,6 +95,7 @@ urlpatterns = [
# project URLs
url(r'^newproject/$', views.newproject, name='newproject'),
+ url(r'^cmdline/$', views.CommandLineBuilds.as_view(), name='cmdlines'),
url(r'^projects/$',
tables.ProjectsTable.as_view(template_name="projects-toastertable.html"),
name='all-projects'),
@@ -206,8 +207,7 @@ urlpatterns = [
url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'),
# image customisation functionality
- url(r'^xhr_customrecipe/(?P<recipe_id>\d+)'
- '/packages/(?P<package_id>\d+|)$',
+ url(r'^xhr_customrecipe/(?P<recipe_id>\d+)/packages/(?P<package_id>\d+|)$',
api.XhrCustomRecipePackages.as_view(),
name='xhr_customrecipe_packages'),
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py
index a571b8cc18..40aed265dc 100644
--- a/bitbake/lib/toaster/toastergui/views.py
+++ b/bitbake/lib/toaster/toastergui/views.py
@@ -6,24 +6,36 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import ast
import re
+import subprocess
+import sys
+
+import bb.cooker
+from bb.ui import toasterui
+from bb.ui import eventreplay
from django.db.models import F, Q, Sum
from django.db import IntegrityError
-from django.shortcuts import render, redirect, get_object_or_404
+from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect
from django.utils.http import urlencode
from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe
from orm.models import LogMessage, Variable, Package_Dependency, Package
from orm.models import Task_Dependency, Package_File
from orm.models import Target_Installed_Package, Target_File
from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File
-from orm.models import BitbakeVersion, CustomImageRecipe
+from orm.models import BitbakeVersion, CustomImageRecipe, EventLogsImports
from django.urls import reverse, resolve
+from django.contrib import messages
+
from django.core.exceptions import ObjectDoesNotExist
+from django.core.files.storage import FileSystemStorage
+from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseNotFound, JsonResponse
from django.utils import timezone
+from django.views.generic import TemplateView
from datetime import timedelta, datetime
from toastergui.templatetags.projecttags import json as jsonfilter
from decimal import Decimal
@@ -32,13 +44,20 @@ import os
from os.path import dirname
import mimetypes
+from toastergui.forms import LoadFileForm
+
+from collections import namedtuple
+
import logging
+from toastermain.logs import log_view_mixin
+
logger = logging.getLogger("toaster")
# Project creation and managed build enable
project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER'))
is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC'))
+import_page = False
class MimeTypeFinder(object):
# setting this to False enables additional non-standard mimetypes
@@ -56,6 +75,7 @@ class MimeTypeFinder(object):
return guessed_type
# single point to add global values into the context before rendering
+@log_view_mixin
def toaster_render(request, page, context):
context['project_enable'] = project_enable
context['project_specific'] = is_project_specific
@@ -665,16 +685,17 @@ def recipe_packages(request, build_id, recipe_id):
return response
from django.http import HttpResponse
+@log_view_mixin
def xhr_dirinfo(request, build_id, target_id):
top = request.GET.get('start', '/')
return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json")
from django.utils.functional import Promise
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
class LazyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Promise):
- return force_text(obj)
+ return force_str(obj)
return super(LazyEncoder, self).default(obj)
from toastergui.templatetags.projecttags import filtered_filesizeformat
@@ -1404,7 +1425,7 @@ if True:
if not os.path.isdir('%s/conf' % request.POST['importdir']):
raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir'])
from django.core import management
- management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'], interactive=False)
+ management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'])
prj = Project.objects.get(name = request.POST['projectname'])
prj.merged_attr = True
prj.save()
@@ -1606,12 +1627,13 @@ if True:
# make sure we have a machine set for this project
ProjectVariable.objects.get_or_create(project=new_project,
name="MACHINE",
- value="qemux86")
+ value="qemux86-64")
context = {'project': new_project}
return toaster_render(request, "js-unit-tests.html", context)
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
+ @log_view_mixin
def xhr_testreleasechange(request, pid):
def response(data):
return HttpResponse(jsonfilter(data),
@@ -1648,6 +1670,7 @@ if True:
except Exception as e:
return response({"error": str(e) })
+ @log_view_mixin
def xhr_configvaredit(request, pid):
try:
prj = Project.objects.get(id = pid)
@@ -1726,6 +1749,7 @@ if True:
return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+ @log_view_mixin
def customrecipe_download(request, pid, recipe_id):
recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id)
@@ -1933,3 +1957,163 @@ if True:
except (ObjectDoesNotExist, IOError):
return toaster_render(request, "unavailable_artifact.html")
+
+class CommandLineBuilds(TemplateView):
+ model = EventLogsImports
+ template_name = 'command_line_builds.html'
+
+ def get_context_data(self, **kwargs):
+ context = super(CommandLineBuilds, self).get_context_data(**kwargs)
+ #get value from BB_DEFAULT_EVENTLOG defined in bitbake.conf
+ eventlog = subprocess.check_output(['bitbake-getvar', 'BB_DEFAULT_EVENTLOG', '--value'])
+ if eventlog:
+ logs_dir = os.path.dirname(eventlog.decode().strip('\n'))
+ files = os.listdir(logs_dir)
+ imported_files = EventLogsImports.objects.all()
+ files_list = []
+
+ # Filter files that end with ".json"
+ event_files = []
+ for file in files:
+ if file.endswith(".json"):
+ # because BB_DEFAULT_EVENTLOG is a directory, we need to check if the file is a valid eventlog
+ with open("{}/{}".format(logs_dir, file)) as efile:
+ content = efile.read()
+ if 'allvariables' in content:
+ event_files.append(file)
+
+ #build dict for template using db data
+ for event_file in event_files:
+ if imported_files.filter(name=event_file):
+ files_list.append({
+ 'name': event_file,
+ 'imported': True,
+ 'build_id': imported_files.filter(name=event_file)[0].build_id,
+ 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
+ })
+ else:
+ files_list.append({
+ 'name': event_file,
+ 'imported': False,
+ 'build_id': None,
+ 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
+ })
+ context['import_all'] = True
+
+ context['files'] = files_list
+ context['dir'] = logs_dir
+ else:
+ context['files'] = []
+ context['dir'] = ''
+
+ # enable session variable
+ if not self.request.session.get('file'):
+ self.request.session['file'] = ""
+
+ context['form'] = LoadFileForm()
+ context['project_enable'] = project_enable
+ return context
+
+ def post(self, request, **kwargs):
+ logs_dir = request.POST.get('dir')
+ all_files = request.POST.get('all')
+
+ # check if a build is already in progress
+ if Build.objects.filter(outcome=Build.IN_PROGRESS):
+ messages.add_message(
+ self.request,
+ messages.ERROR,
+ "A build is already in progress. Please wait for it to complete before starting a new build."
+ )
+ return JsonResponse({'response': 'building'})
+ imported_files = EventLogsImports.objects.all()
+ try:
+ if all_files == 'true':
+ # use of session variable to deactivate icon for builds in progress
+ request.session['all_builds'] = True
+ request.session.modified = True
+ request.session.save()
+
+ files = ast.literal_eval(request.POST.get('file'))
+ for file in files:
+ if imported_files.filter(name=file.get('name')).exists():
+ imported_files.filter(name=file.get('name'))[0].imported = True
+ else:
+ with open("{}/{}".format(logs_dir, file.get('name'))) as eventfile:
+ # load variables from the first line
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ eventfile.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(eventfile, variables)
+
+ toasterui.main(player, player, params)
+ event_log_import = EventLogsImports.objects.create(name=file.get('name'), imported=True)
+ event_log_import.build_id = Build.objects.last().id
+ event_log_import.save()
+ else:
+ if self.request.FILES.get('eventlog_file'):
+ file = self.request.FILES['eventlog_file']
+ else:
+ file = request.POST.get('file')
+ # use of session variable to deactivate icon for build in progress
+ request.session['file'] = file
+ request.session['all_builds'] = False
+ request.session.modified = True
+ request.session.save()
+
+ if imported_files.filter(name=file).exists():
+ imported_files.filter(name=file)[0].imported = True
+ else:
+ if isinstance(file, InMemoryUploadedFile) or isinstance(file, TemporaryUploadedFile):
+ variables = None
+ while line := file.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ file.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(file, variables)
+ if not os.path.exists('{}/{}'.format(logs_dir, file.name)):
+ fs = FileSystemStorage(location=logs_dir)
+ fs.save(file.name, file)
+ toasterui.main(player, player, params)
+ else:
+ with open("{}/{}".format(logs_dir, file)) as eventfile:
+ # load variables from the first line
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ eventfile.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(eventfile, variables)
+ toasterui.main(player, player, params)
+ event_log_import = EventLogsImports.objects.create(name=file, imported=True)
+ event_log_import.build_id = Build.objects.last().id
+ event_log_import.save()
+ request.session['file'] = ""
+ except Exception:
+ messages.add_message(
+ self.request,
+ messages.ERROR,
+ "The file content is not in the correct format. Update file content or upload a different file."
+ )
+ return HttpResponseRedirect("/toastergui/cmdline/")
+ return HttpResponseRedirect('/toastergui/builds/')
diff --git a/bitbake/lib/toaster/toastergui/widgets.py b/bitbake/lib/toaster/toastergui/widgets.py
index ceff52942e..b32abf40b3 100644
--- a/bitbake/lib/toaster/toastergui/widgets.py
+++ b/bitbake/lib/toaster/toastergui/widgets.py
@@ -7,6 +7,7 @@
#
from django.views.generic import View, TemplateView
+from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.shortcuts import HttpResponse
from django.core.cache import cache
@@ -31,6 +32,7 @@ import re
import os
from toastergui.tablefilter import TableFilterMap
+from toastermain.logs import log_view_mixin
try:
from urllib import unquote_plus
@@ -63,8 +65,8 @@ class ToasterTable(TemplateView):
self.default_orderby = ""
# prevent HTTP caching of table data
- @cache_control(must_revalidate=True,
- max_age=0, no_store=True, no_cache=True)
+ @method_decorator(cache_control(must_revalidate=True,
+ max_age=0, no_store=True, no_cache=True))
def dispatch(self, *args, **kwargs):
return super(ToasterTable, self).dispatch(*args, **kwargs)
@@ -83,6 +85,7 @@ class ToasterTable(TemplateView):
return context
+ @log_view_mixin
def get(self, request, *args, **kwargs):
if request.GET.get('format', None) == 'json':
@@ -304,6 +307,7 @@ class ToasterTable(TemplateView):
self.setup_columns(**kwargs)
+ self.apply_orderby('pk')
if search:
self.apply_search(search)
if filters:
@@ -413,6 +417,7 @@ class ToasterTypeAhead(View):
def __init__(self, *args, **kwargs):
super(ToasterTypeAhead, self).__init__()
+ @log_view_mixin
def get(self, request, *args, **kwargs):
def response(data):
return HttpResponse(json.dumps(data,
@@ -468,6 +473,7 @@ class MostRecentBuildsView(View):
return False
+ @log_view_mixin
def get(self, request, *args, **kwargs):
"""
Returns a list of builds in JSON format.
diff --git a/bitbake/lib/toaster/toastermain/logs.py b/bitbake/lib/toaster/toastermain/logs.py
new file mode 100644
index 0000000000..62d871963a
--- /dev/null
+++ b/bitbake/lib/toaster/toastermain/logs.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+import os
+import logging
+import json
+from pathlib import Path
+from django.http import HttpRequest
+
+BUILDDIR = Path(os.environ.get('BUILDDIR', '/tmp'))
+
+def log_api_request(request, response, view, logger_name='api'):
+ """Helper function for LogAPIMixin"""
+
+ repjson = {
+ 'view': view,
+ 'path': request.path,
+ 'method': request.method,
+ 'status': response.status_code
+ }
+
+ logger = logging.getLogger(logger_name)
+ logger.info(
+ json.dumps(repjson, indent=4, separators=(", ", " : "))
+ )
+
+
+def log_view_mixin(view):
+ def log_view_request(*args, **kwargs):
+ # get request from args else kwargs
+ request = None
+ if len(args) > 0:
+ for req in args:
+ if isinstance(req, HttpRequest):
+ request = req
+ break
+ elif request is None:
+ request = kwargs.get('request')
+
+ response = view(*args, **kwargs)
+ view_name = 'unknown'
+ if hasattr(request, 'resolver_match'):
+ if hasattr(request.resolver_match, 'view_name'):
+ view_name = request.resolver_match.view_name
+
+ log_api_request(
+ request, response, view_name, 'toaster')
+ return response
+ return log_view_request
+
+
+
+class LogAPIMixin:
+ """Logs API requests
+
+ tested with:
+ - APIView
+ - ModelViewSet
+ - ReadOnlyModelViewSet
+ - GenericAPIView
+
+ Note: you can set `view_name` attribute in View to override get_view_name()
+ """
+
+ def get_view_name(self):
+ if hasattr(self, 'view_name'):
+ return self.view_name
+ return super().get_view_name()
+
+ def finalize_response(self, request, response, *args, **kwargs):
+ log_api_request(request, response, self.get_view_name())
+ return super().finalize_response(request, response, *args, **kwargs)
+
+
+LOGGING_SETTINGS = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'filters': {
+ 'require_debug_false': {
+ '()': 'django.utils.log.RequireDebugFalse'
+ }
+ },
+ 'formatters': {
+ 'datetime': {
+ 'format': '%(asctime)s %(levelname)s %(message)s'
+ },
+ 'verbose': {
+ 'format': '{levelname} {asctime} {module} {name}.{funcName} {process:d} {thread:d} {message}',
+ 'datefmt': "%d/%b/%Y %H:%M:%S",
+ 'style': '{',
+ },
+ 'api': {
+ 'format': '\n{levelname} {asctime} {name}.{funcName}:\n{message}',
+ 'style': '{'
+ }
+ },
+ 'handlers': {
+ 'mail_admins': {
+ 'level': 'ERROR',
+ 'filters': ['require_debug_false'],
+ 'class': 'django.utils.log.AdminEmailHandler'
+ },
+ 'console': {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'datetime',
+ },
+ 'file_django': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/django.log',
+ 'when': 'D', # interval type
+ 'interval': 1, # defaults to 1
+ 'backupCount': 10, # how many files to keep
+ 'formatter': 'verbose',
+ },
+ 'file_api': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/api.log',
+ 'when': 'D',
+ 'interval': 1,
+ 'backupCount': 10,
+ 'formatter': 'verbose',
+ },
+ 'file_toaster': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/web.log',
+ 'when': 'D',
+ 'interval': 1,
+ 'backupCount': 10,
+ 'formatter': 'verbose',
+ },
+ },
+ 'loggers': {
+ 'django.request': {
+ 'handlers': ['file_django', 'console'],
+ 'level': 'WARN',
+ 'propagate': True,
+ },
+ 'django': {
+ 'handlers': ['file_django', 'console'],
+ 'level': 'WARNING',
+ 'propogate': True,
+ },
+ 'toaster': {
+ 'handlers': ['file_toaster'],
+ 'level': 'INFO',
+ 'propagate': False,
+ },
+ 'api': {
+ 'handlers': ['file_api'],
+ 'level': 'INFO',
+ 'propagate': False,
+ }
+ }
+}
diff --git a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
index e25b55e5ab..f7139aa041 100644
--- a/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
+++ b/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
@@ -545,7 +545,7 @@ class Command(BaseCommand):
# Find the directory's release, and promote to default_release if local paths
release = self.find_import_release(layers_list,lv_dict,default_release)
# create project, SANITY: reuse any project of same name
- project = Project.objects.create_project(project_name,release,project)
+ project = Project.objects.create_project(project_name,release,project, imported=True)
# Apply any new layers or variables
self.apply_conf_variables(project,layers_list,lv_dict,release)
# WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific'
diff --git a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
index 811fd5d516..b2c002da7a 100644
--- a/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
+++ b/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
@@ -13,7 +13,7 @@ import errno
import socket
from django.core.management.base import BaseCommand, CommandError
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
DEFAULT_ADDRPORT = "0.0.0.0:8000"
@@ -51,7 +51,7 @@ class Command(BaseCommand):
if hasattr(err, 'errno') and err.errno in errors:
errtext = errors[err.errno]
else:
- errtext = force_text(err)
+ errtext = force_str(err)
raise CommandError(errtext)
self.stdout.write("OK")
diff --git a/bitbake/lib/toaster/toastermain/settings.py b/bitbake/lib/toaster/toastermain/settings.py
index 609c85d9d8..e06adc5a93 100644
--- a/bitbake/lib/toaster/toastermain/settings.py
+++ b/bitbake/lib/toaster/toastermain/settings.py
@@ -9,6 +9,8 @@
# Django settings for Toaster project.
import os
+from pathlib import Path
+from toastermain.logs import LOGGING_SETTINGS
DEBUG = True
@@ -87,14 +89,17 @@ else:
from pytz.exceptions import UnknownTimeZoneError
try:
if pytz.timezone(zonename) is not None:
- zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename
+ with open(filepath, 'rb') as f:
+ zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
except UnknownTimeZoneError as ValueError:
# we expect timezone failures here, just move over
pass
except ImportError:
- zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename
+ with open(filepath, 'rb') as f:
+ zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
- TIME_ZONE = zonefilelist[hashlib.md5(open('/etc/localtime', 'rb').read()).hexdigest()]
+ with open('/etc/localtime', 'rb') as f:
+ TIME_ZONE = zonefilelist[hashlib.md5(f.read()).hexdigest()]
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
@@ -106,10 +111,6 @@ SITE_ID = 1
# to load the internationalization machinery.
USE_I18N = True
-# If you set this to False, Django will not format dates, numbers and
-# calendars according to the current locale.
-USE_L10N = True
-
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
@@ -150,6 +151,8 @@ STATICFILES_FINDERS = (
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT'
+TMPDIR = os.environ.get('TOASTER_DJANGO_TMPDIR', '/tmp')
+
class InvalidString(str):
def __mod__(self, other):
from django.template.base import TemplateSyntaxError
@@ -186,7 +189,13 @@ TEMPLATES = [
'django.template.loaders.app_directories.Loader',
#'django.template.loaders.eggs.Loader',
],
- 'string_if_invalid': InvalidString("%s"),
+ # https://docs.djangoproject.com/en/4.2/ref/templates/api/#how-invalid-variables-are-handled
+ # Generally, string_if_invalid should only be enabled in order to debug
+ # a specific template problem, then cleared once debugging is complete.
+ # If you assign a value other than '' to string_if_invalid,
+ # you will experience rendering problems with these templates and sites.
+ # 'string_if_invalid': InvalidString("%s"),
+ 'string_if_invalid': "",
'debug': DEBUG,
},
},
@@ -210,7 +219,7 @@ CACHES = {
# },
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
- 'LOCATION': '/tmp/toaster_cache_%d' % os.getuid(),
+ 'LOCATION': '%s/toaster_cache_%d' % (TMPDIR, os.getuid()),
'TIMEOUT': 1,
}
}
@@ -242,6 +251,9 @@ INSTALLED_APPS = (
'django.contrib.humanize',
'bldcollector',
'toastermain',
+
+ # 3rd-lib
+ "log_viewer",
)
@@ -302,43 +314,21 @@ for t in os.walk(os.path.dirname(currentdir)):
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
-LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': False,
- 'filters': {
- 'require_debug_false': {
- '()': 'django.utils.log.RequireDebugFalse'
- }
- },
- 'formatters': {
- 'datetime': {
- 'format': '%(asctime)s %(levelname)s %(message)s'
- }
- },
- 'handlers': {
- 'mail_admins': {
- 'level': 'ERROR',
- 'filters': ['require_debug_false'],
- 'class': 'django.utils.log.AdminEmailHandler'
- },
- 'console': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- 'formatter': 'datetime',
- }
- },
- 'loggers': {
- 'toaster' : {
- 'handlers': ['console'],
- 'level': 'DEBUG',
- },
- 'django.request': {
- 'handlers': ['console'],
- 'level': 'WARN',
- 'propagate': True,
- },
- }
-}
+LOGGING = LOGGING_SETTINGS
+
+# Build paths inside the project like this: BASE_DIR / 'subdir'.
+BUILDDIR = os.environ.get("BUILDDIR", TMPDIR)
+
+# LOG VIEWER
+# https://pypi.org/project/django-log-viewer/
+LOG_VIEWER_FILES_PATTERN = '*.log*'
+LOG_VIEWER_FILES_DIR = os.path.join(BUILDDIR, "toaster_logs/")
+LOG_VIEWER_PAGE_LENGTH = 25 # total log lines per-page
+LOG_VIEWER_MAX_READ_LINES = 100000 # total log lines will be read
+LOG_VIEWER_PATTERNS = ['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL']
+
+# Optionally you can set the next variables in order to customize the admin:
+LOG_VIEWER_FILE_LIST_TITLE = "Logs list"
if DEBUG and SQL_DEBUG:
LOGGING['loggers']['django.db.backends'] = {
diff --git a/bitbake/lib/toaster/toastermain/settings_test.py b/bitbake/lib/toaster/toastermain/settings_test.py
index 6538d9e453..74def2d240 100644
--- a/bitbake/lib/toaster/toastermain/settings_test.py
+++ b/bitbake/lib/toaster/toastermain/settings_test.py
@@ -19,10 +19,10 @@ TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': '/tmp/toaster-test-db.sqlite',
+ 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
'TEST': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': '/tmp/toaster-test-db.sqlite',
+ 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
}
}
}
diff --git a/bitbake/lib/toaster/toastermain/urls.py b/bitbake/lib/toaster/toastermain/urls.py
index 5fb520b384..3be46fcf0c 100644
--- a/bitbake/lib/toaster/toastermain/urls.py
+++ b/bitbake/lib/toaster/toastermain/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import include, url
+from django.urls import re_path as url, include
from django.views.generic import RedirectView, TemplateView
from django.views.decorators.cache import never_cache
import bldcollector.views
@@ -28,6 +28,8 @@ urlpatterns = [
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
+ url(r'^logs/', include('log_viewer.urls')),
+
# This is here to maintain backward compatibility and will be deprecated
# in the future.
url(r'^orm/eventfile$', bldcollector.views.eventfile),
diff --git a/bitbake/lib/toaster/tox.ini b/bitbake/lib/toaster/tox.ini
new file mode 100644
index 0000000000..1516a527ae
--- /dev/null
+++ b/bitbake/lib/toaster/tox.ini
@@ -0,0 +1,24 @@
+[tox]
+envlist = py38, py39, py310, py311, py312
+skipsdist = True
+toxworkdir = {env:TOX_WORKDIR:.tox}
+passenv = *
+
+[testenv]
+passenv =
+ SSTATE_DIR
+ DL_DIR
+ TOASTER_DJANGO_TMPDIR
+setenv =
+ DJANGO_SETTINGS_MODULE=toastermain.settings_test
+ TOASTER_BUILDSERVER=1
+ BUILDDIR = {env:BUILDDIR}
+ EVENTREPLAY_DIR = {env:EVENTREPLAY_DIR:BUILDDIR}
+commands =
+ python3 {toxinidir}/manage.py test tests.db tests.commands tests.builds tests.browser tests.functional tests.views
+deps =
+ -r {toxinidir}/../../toaster-requirements.txt
+ -r {toxinidir}/tests/toaster-tests-requirements.txt
+
+[testenv:chrome]
+commands={[testenv]commands} --splinter-webdriver=chrome \ No newline at end of file
diff --git a/bitbake/toaster-requirements.txt b/bitbake/toaster-requirements.txt
index dedd423556..d8e48b7f3a 100644
--- a/bitbake/toaster-requirements.txt
+++ b/bitbake/toaster-requirements.txt
@@ -1,3 +1,4 @@
-Django>3.2,<3.3
+Django>4.2,<4.3
beautifulsoup4>=4.4.0
pytz
+django-log-viewer==1.1.7
diff --git a/documentation/.gitignore b/documentation/.gitignore
index 096b97ec28..b23d598054 100644
--- a/documentation/.gitignore
+++ b/documentation/.gitignore
@@ -1,7 +1,11 @@
+sphinx/__pycache__
_build/
Pipfile.lock
poky.yaml
sphinx-static/switchers.js
+releases.rst
.vscode/
*/svg/*.png
*/svg/*.pdf
+styles/*
+!styles/config
diff --git a/documentation/.vale.ini b/documentation/.vale.ini
new file mode 100644
index 0000000000..02042bb632
--- /dev/null
+++ b/documentation/.vale.ini
@@ -0,0 +1,7 @@
+StylesPath = styles
+MinAlertLevel = suggestion
+Packages = RedHat, proselint, write-good, alex, Readability, Joblint
+Vocab = Yocto, OpenSource
+[*.rst]
+BasedOnStyles = Vale, RedHat, proselint, write-good, alex, Readability, Joblint
+
diff --git a/documentation/Makefile b/documentation/Makefile
index 9fb6814c8f..189bd1dfac 100644
--- a/documentation/Makefile
+++ b/documentation/Makefile
@@ -5,6 +5,9 @@
# from the environment for the first two.
SPHINXOPTS ?= -W --keep-going -j auto
SPHINXBUILD ?= sphinx-build
+# Release notes are excluded because they contain contributor names and commit messages which can't be modified
+VALEOPTS ?= --no-wrap --glob '!migration-guides/release-notes-*.rst'
+VALEDOCS ?= .
SOURCEDIR = .
IMAGEDIRS = */svg
BUILDDIR = _build
@@ -20,7 +23,7 @@ endif
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-.PHONY: all help Makefile clean publish epub latexpdf
+.PHONY: all help Makefile clean stylecheck publish epub latexpdf
publish: Makefile html singlehtml
rm -rf $(BUILDDIR)/$(DESTDIR)/
@@ -44,7 +47,14 @@ PNGs := $(foreach dir, $(IMAGEDIRS), $(patsubst %.svg,%.png,$(wildcard $(SOURCED
$(SVG2PNG) --export-filename=$@ $<
clean:
- @rm -rf $(BUILDDIR) $(PNGs) $(PDFs) poky.yaml sphinx-static/switchers.js
+ @rm -rf $(BUILDDIR) $(PNGs) $(PDFs) poky.yaml sphinx-static/switchers.js releases.rst
+
+stylecheck:
+ vale sync
+ vale $(VALEOPTS) $(VALEDOCS)
+
+sphinx-lint:
+ sphinx-lint $(SOURCEDIR)
epub: $(PNGs)
$(SOURCEDIR)/set_versions.py
diff --git a/documentation/README b/documentation/README
index 6f6a8ec842..b60472fcbf 100644
--- a/documentation/README
+++ b/documentation/README
@@ -34,16 +34,18 @@ Manual Organization
Here the folders corresponding to individual manuals:
+* brief-yoctoprojectqs - Yocto Project Quick Start
* overview-manual - Yocto Project Overview and Concepts Manual
-* sdk-manual - Yocto Project Software Development Kit (SDK) Developer's Guide.
+* contributor-guide - Yocto Project and OpenEmbedded Contributor Guide
+* ref-manual - Yocto Project Reference Manual
* bsp-guide - Yocto Project Board Support Package (BSP) Developer's Guide
* dev-manual - Yocto Project Development Tasks Manual
* kernel-dev - Yocto Project Linux Kernel Development Manual
-* ref-manual - Yocto Project Reference Manual
-* brief-yoctoprojectqs - Yocto Project Quick Start
* profile-manual - Yocto Project Profiling and Tracing Manual
+* sdk-manual - Yocto Project Software Development Kit (SDK) Developer's Guide.
* toaster-manual - Toaster User Manual
* test-manual - Yocto Project Test Environment Manual
+* migration-guides - Yocto Project Release and Migration Notes
Each folder is self-contained regarding content and figures.
@@ -129,6 +131,10 @@ Also install the "inkscape" package from your distribution.
Inkscape is need to convert SVG graphics to PNG (for EPUB
export) and to PDF (for PDF export).
+Additionally install "fncychap.sty" TeX font if you want to build PDFs. Debian
+and Ubuntu have it in "texlive-latex-extra" package while RedHat distributions
+and OpenSUSE have it in "texlive-fncychap" package for example.
+
To build the documentation locally, run:
$ cd documentation
@@ -145,6 +151,34 @@ dependencies in a virtual environment:
$ pipenv install
$ pipenv run make html
+Style checking the Yocto Project documentation
+==============================================
+
+The project is starting to use Vale (https://vale.sh/)
+to validate the text style.
+
+To install Vale:
+
+ $ pip install vale
+
+To run Vale:
+
+ $ make stylecheck
+
+Link checking the Yocto Project documentation
+=============================================
+
+To fix errors which are not reported by Sphinx itself,
+the project uses sphinx-lint (https://github.com/sphinx-contrib/sphinx-lint).
+
+To install sphinx-lint:
+
+ $ pip install sphinx-lint
+
+To run sphinx-lint:
+
+ $ make sphinx-lint
+
Sphinx theme and CSS customization
==================================
@@ -271,6 +305,19 @@ websites.
More information can be found here:
https://sublime-and-sphinx-guide.readthedocs.io/en/latest/references.html.
+For external links, we use this syntax:
+`link text <link URL>`__
+
+instead of:
+`link text <link URL>`_
+
+Both syntaxes work, but the latter also creates a "link text" reference
+target which could conflict with other references with the same name.
+So, only use this variant when you wish to make multiple references
+to this link, reusing only the target name.
+
+See https://stackoverflow.com/questions/27420317/restructured-text-rst-http-links-underscore-vs-use
+
Anchor (<#link>) links are forbidden as they are not checked by Sphinx during
the build and may be broken without knowing about it.
@@ -340,13 +387,16 @@ The sphinx.ext.intersphinx extension is enabled by default
so that we can cross reference content from other Sphinx based
documentation projects, such as the BitBake manual.
-References to the BitBake manual can be done:
+References to the BitBake manual can directly be done:
- With a specific description instead of the section name:
- :ref:`Azure Storage fetcher (az://) <bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
+ :ref:`Azure Storage fetcher (az://) <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
- With the section name:
- :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-intro:usage and syntax` option
- - Linking to the entire BitBake manual:
- :doc:`BitBake User Manual <bitbake:index>`
+ :ref:`bitbake-user-manual/bitbake-user-manual-intro:usage and syntax` option
+
+If you want to refer to an entire document (or chapter) in the BitBake manual,
+you have to use the ":doc:" macro with the "bitbake:" prefix:
+ - :doc:`BitBake User Manual <bitbake:index>`
+ - :doc:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata`" chapter
Note that a reference to a variable (:term:`VARIABLE`) automatically points to
the BitBake manual if the variable is not described in the Reference Manual's Variable Glossary.
@@ -355,6 +405,11 @@ BitBake manual as follows:
:term:`bitbake:BB_NUMBER_PARSE_THREADS`
+This would be the same if we had identical document filenames in
+both the Yocto Project and BitBake manuals:
+
+ :ref:`bitbake:directory/file:section title`
+
Submitting documentation changes
================================
diff --git a/documentation/_templates/footer.html b/documentation/_templates/footer.html
index 508129ede4..fb3c58d845 100644
--- a/documentation/_templates/footer.html
+++ b/documentation/_templates/footer.html
@@ -5,7 +5,7 @@
<br> All Rights Reserved. Linux Foundation&reg; and Yocto Project&reg; are registered trademarks of the Linux Foundation.
<br>Linux&reg; is a registered trademark of Linus Torvalds.
<br>&copy; Copyright {{ copyright }}
- <br>Last updated on {{ last_updated }}
+ <br>Last updated on {{ last_updated }} from the <a href="https://git.yoctoproject.org/yocto-docs/">yocto-docs</a> git repository.
</p>
</div>
</footer>
diff --git a/documentation/boilerplate.rst b/documentation/boilerplate.rst
index 9b64d91efd..6b7f956707 100644
--- a/documentation/boilerplate.rst
+++ b/documentation/boilerplate.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
.. include:: <xhtml1-lat1.txt>
.. include:: <xhtml1-symbol.txt>
@@ -8,7 +10,7 @@
Permission is granted to copy, distribute and/or modify this document under the
terms of the `Creative Commons Attribution-Share Alike 2.0 UK: England & Wales
-<https://creativecommons.org/licenses/by-sa/2.0/uk/>`_ as published by Creative
+<https://creativecommons.org/licenses/by-sa/2.0/uk/>`__ as published by Creative
Commons.
To report any inaccuracies or problems with this (or any other Yocto Project)
diff --git a/documentation/brief-yoctoprojectqs/index.rst b/documentation/brief-yoctoprojectqs/index.rst
index 7179f25022..61c5cbec36 100644
--- a/documentation/brief-yoctoprojectqs/index.rst
+++ b/documentation/brief-yoctoprojectqs/index.rst
@@ -25,18 +25,11 @@ build a reference embedded OS called Poky.
in the Yocto Project Development Tasks Manual for more
information.
- - You may use Windows Subsystem For Linux v2 to set up a build host
- using Windows 10.
-
- .. note::
-
- The Yocto Project is not compatible with WSLv1, it is
- compatible but not officially supported nor validated with
- WSLv2, if you still decide to use WSL please upgrade to WSLv2.
-
- See the :ref:`dev-manual/start:setting up to use windows
- subsystem for linux (wslv2)` section in the Yocto Project Development
- Tasks Manual for more information.
+ - You may use version 2 of Windows Subsystem For Linux (WSL 2) to set
+ up a build host using Windows 10 or later, Windows Server 2019 or later.
+ See the :ref:`dev-manual/start:setting up to use windows subsystem for
+ linux (wsl 2)` section in the Yocto Project Development Tasks Manual
+ for more information.
If you want more conceptual or background information on the Yocto
Project, see the :doc:`/overview-manual/index`.
@@ -47,7 +40,13 @@ Compatible Linux Distribution
Make sure your :term:`Build Host` meets the
following requirements:
-- 50 Gbytes of free disk space
+- At least &MIN_DISK_SPACE; Gbytes of free disk space, though
+ much more will help to run multiple builds and increase
+ performance by reusing build artifacts.
+
+- At least &MIN_RAM; Gbytes of RAM, though a modern modern build host with as
+ much RAM and as many CPU cores as possible is strongly recommended to
+ maximize build performance.
- Runs a supported Linux distribution (i.e. recent releases of Fedora,
openSUSE, CentOS, Debian, or Ubuntu). For a list of Linux
@@ -64,11 +63,12 @@ following requirements:
- tar &MIN_TAR_VERSION; or greater
- Python &MIN_PYTHON_VERSION; or greater.
- gcc &MIN_GCC_VERSION; or greater.
+ - GNU make &MIN_MAKE_VERSION; or greater
If your build host does not meet any of these three listed version
requirements, you can take steps to prepare the system so that you
can still use the Yocto Project. See the
-:ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`
+:ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`
section in the Yocto Project Reference Manual for information.
Build Host Packages
@@ -76,11 +76,9 @@ Build Host Packages
You must install essential host packages on your build host. The
following command installs the host packages based on an Ubuntu
-distribution:
-
-.. code-block:: shell
+distribution::
- $ sudo apt install &UBUNTU_HOST_PACKAGES_ESSENTIAL;
+ $ sudo apt install &UBUNTU_HOST_PACKAGES_ESSENTIAL;
.. note::
@@ -230,13 +228,13 @@ an entire Linux distribution, including the toolchain, from source.
Among other things, the script creates the :term:`Build Directory`, which is
``build`` in this case and is located in the :term:`Source Directory`. After
- the script runs, your current working directory is set to the Build
- Directory. Later, when the build completes, the Build Directory contains all the
- files created during the build.
+ the script runs, your current working directory is set to the
+ :term:`Build Directory`. Later, when the build completes, the
+ :term:`Build Directory` contains all the files created during the build.
#. **Examine Your Local Configuration File:** When you set up the build
environment, a local configuration file named ``local.conf`` becomes
- available in a ``conf`` subdirectory of the Build Directory. For this
+ available in a ``conf`` subdirectory of the :term:`Build Directory`. For this
example, the defaults are set to build for a ``qemux86`` target,
which is suitable for emulation. The package manager used is set to
the RPM package manager.
@@ -253,15 +251,10 @@ an entire Linux distribution, including the toolchain, from source.
To use such mirrors, uncomment the below lines in your ``conf/local.conf``
file in the :term:`Build Directory`::
- BB_SIGNATURE_HANDLER = "OEEquivHash"
+ BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+ SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
BB_HASHSERVE = "auto"
- BB_HASHSERVE_UPSTREAM = "typhoon.yocto.io:8687"
- SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/&YOCTO_DOC_VERSION;/PATH;downloadfilename=PATH"
-
- The above settings assumed the use of Yocto Project &YOCTO_DOC_VERSION;.
- If you are using the development version instead, set :term:`SSTATE_MIRRORS` as follows::
-
- SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/dev/PATH;downloadfilename=PATH"
+ BB_SIGNATURE_HANDLER = "OEEquivHash"
#. **Start the Build:** Continue with the following command to build an OS
image for the target, which is ``core-image-sato`` in this example:
@@ -273,7 +266,7 @@ an entire Linux distribution, including the toolchain, from source.
For information on using the ``bitbake`` command, see the
:ref:`overview-manual/concepts:bitbake` section in the Yocto Project Overview and
Concepts Manual, or see
- :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-intro:the bitbake command`
+ :ref:`bitbake-user-manual/bitbake-user-manual-intro:the bitbake command`
in the BitBake User Manual.
#. **Simulate Your Image Using QEMU:** Once this particular image is
@@ -356,9 +349,7 @@ Follow these steps to add a hardware layer:
#. **Add Your Layer to the Layer Configuration File:** Before you can use
a layer during a build, you must add it to your ``bblayers.conf``
- file, which is found in the
- :term:`Build Directory` ``conf``
- directory.
+ file, which is found in the :term:`Build Directory` ``conf`` directory.
Use the ``bitbake-layers add-layer`` command to add the layer to the
configuration file:
@@ -374,7 +365,7 @@ Follow these steps to add a hardware layer:
You can find
more information on adding layers in the
- :ref:`dev-manual/common-tasks:adding a layer using the \`\`bitbake-layers\`\` script`
+ :ref:`dev-manual/layers:adding a layer using the \`\`bitbake-layers\`\` script`
section.
Completing these steps has added the ``meta-altera`` layer to your Yocto
@@ -409,7 +400,7 @@ The following commands run the tool to create a layer named
For more information
on layers and how to create them, see the
-:ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`
+:ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`
section in the Yocto Project Development Tasks Manual.
Where To Go Next
diff --git a/documentation/bsp-guide/bsp.rst b/documentation/bsp-guide/bsp.rst
index 280b160807..11ca5d8b76 100644
--- a/documentation/bsp-guide/bsp.rst
+++ b/documentation/bsp-guide/bsp.rst
@@ -1,8 +1,8 @@
.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
-************************************************
-Board Support Packages (BSP) - Developer's Guide
-************************************************
+**************************************************
+Board Support Packages (BSP) --- Developer's Guide
+**************************************************
A Board Support Package (BSP) is a collection of information that
defines how to support a particular hardware device, set of devices, or
@@ -64,8 +64,8 @@ Here is an example that clones the Raspberry Pi BSP layer::
In addition to BSP layers, the ``meta-yocto-bsp`` layer is part of the
shipped ``poky`` repository. The ``meta-yocto-bsp`` layer maintains
-several "reference" BSPs including the ARM-based Beaglebone, MIPS-based
-EdgeRouter, and generic versions of both 32-bit and 64-bit IA machines.
+several "reference" BSPs including the ARM-based Beaglebone and generic
+versions of both 32-bit and 64-bit IA machines.
For information on typical BSP development workflow, see the
:ref:`bsp-guide/bsp:developing a board support package (bsp)`
@@ -109,8 +109,7 @@ them to the "Dependencies" section.
Some layers function as a layer to hold other BSP layers. These layers
are known as ":term:`container layers <Container Layer>`". An example of
-this type of layer is OpenEmbedded's
-`meta-openembedded <https://github.com/openembedded/meta-openembedded>`__
+this type of layer is OpenEmbedded's :oe_git:`meta-openembedded </meta-openembedded>`
layer. The ``meta-openembedded`` layer contains many ``meta-*`` layers.
In cases like this, you need to include the names of the actual layers
you want to work with, such as::
@@ -128,7 +127,7 @@ you want to work with, such as::
and so on.
For more information on layers, see the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
+":ref:`dev-manual/layers:understanding and creating layers`"
section of the Yocto Project Development Tasks Manual.
Preparing Your Build Host to Work With BSP Layers
@@ -337,7 +336,7 @@ the :yocto_git:`Source Repositories <>`:
meta-raspberrypi/recipes-bsp/formfactor/formfactor
meta-raspberrypi/recipes-bsp/formfactor/formfactor/raspberrypi
meta-raspberrypi/recipes-bsp/formfactor/formfactor/raspberrypi/machconfig
- meta-raspberrypi/recipes-bsp/formfactor/formfactor_0.0.bbappend
+ meta-raspberrypi/recipes-bsp/formfactor/formfactor_%.bbappend
meta-raspberrypi/recipes-bsp/rpi-u-boot-src
meta-raspberrypi/recipes-bsp/rpi-u-boot-src/files
meta-raspberrypi/recipes-bsp/rpi-u-boot-src/files/boot.cmd.in
@@ -464,7 +463,7 @@ requirements are handled with the ``COPYING.MIT`` file.
Licensing files can be MIT, BSD, GPLv*, and so forth. These files are
recommended for the BSP but are optional and totally up to the BSP
developer. For information on how to maintain license compliance, see
-the ":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
+the ":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
section in the Yocto Project Development Tasks Manual.
README File
@@ -590,7 +589,7 @@ filenames correspond to the values to which users have set the
These files define things such as the kernel package to use
(:term:`PREFERRED_PROVIDER` of
-:ref:`virtual/kernel <dev-manual/common-tasks:using virtual providers>`),
+:ref:`virtual/kernel <dev-manual/new-recipe:using virtual providers>`),
the hardware drivers to include in different types of images, any
special software components that are needed, any bootloader information,
and also any special image format requirements.
@@ -621,7 +620,7 @@ You can find these files in the BSP Layer at::
This optional directory contains miscellaneous recipe files for the BSP.
Most notably would be the formfactor files. For example, in the
-Raspberry Pi BSP, there is the ``formfactor_0.0.bbappend`` file, which
+Raspberry Pi BSP, there is the ``formfactor_%.bbappend`` file, which
is an append file used to augment the recipe that starts the build.
Furthermore, there are machine-specific settings used during the build
that are defined by the ``machconfig`` file further down in the
@@ -758,36 +757,20 @@ workflow.
OpenEmbedded build system knows about. For more information on
layers, see the ":ref:`overview-manual/yp-intro:the yocto project layer model`"
section in the Yocto Project Overview and Concepts Manual. You can also
- reference the ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ reference the ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual. For more
information on BSP layers, see the ":ref:`bsp-guide/bsp:bsp layers`"
section.
.. note::
- - There are four hardware reference BSPs in the Yocto
+ - There are three hardware reference BSPs in the Yocto
Project release, located in the ``poky/meta-yocto-bsp``
BSP layer:
- Texas Instruments Beaglebone (``beaglebone-yocto``)
- - Ubiquiti Networks EdgeRouter Lite (``edgerouter``)
-
- - Two general IA platforms (``genericx86`` and ``genericx86-64``)
-
- - There are three core Intel BSPs in the Yocto Project
- release, in the ``meta-intel`` layer:
-
- - ``intel-core2-32``, which is a BSP optimized for the Core2
- family of CPUs as well as all CPUs prior to the Silvermont
- core.
-
- - ``intel-corei7-64``, which is a BSP optimized for Nehalem
- and later Core and Xeon CPUs as well as Silvermont and later
- Atom CPUs, such as the Baytrail SoCs.
-
- - ``intel-quark``, which is a BSP optimized for the Intel
- Galileo gen1 & gen2 development boards.
+ - Two generic IA platforms (``genericx86`` and ``genericx86-64``)
When you set up a layer for a new BSP, you should follow a standard
layout. This layout is described in the ":ref:`bsp-guide/bsp:example filesystem layout`"
@@ -817,7 +800,7 @@ workflow.
key configuration files are configured appropriately: the
``conf/local.conf`` and the ``conf/bblayers.conf`` file. You must
make the OpenEmbedded build system aware of your new layer. See the
- ":ref:`dev-manual/common-tasks:enabling your layer`"
+ ":ref:`dev-manual/layers:enabling your layer`"
section in the Yocto Project Development Tasks Manual for information
on how to let the build system know about your new layer.
@@ -846,7 +829,7 @@ Before looking at BSP requirements, you should consider the following:
layer that can be added to the Yocto Project. For guidelines on
creating a layer that meets these base requirements, see the
":ref:`bsp-guide/bsp:bsp layers`" section in this manual and the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual.
- The requirements in this section apply regardless of how you package
@@ -868,8 +851,7 @@ Before looking at BSP requirements, you should consider the following:
dictating that a specific kernel or kernel version be used in a given
BSP.
-Following are the requirements for a released BSP that conform to the
-Yocto Project:
+The requirements for a released BSP that conform to the Yocto Project are:
- *Layer Name:* The BSP must have a layer name that follows the Yocto
Project standards. For information on BSP layer names, see the
@@ -894,8 +876,8 @@ Yocto Project:
``recipes-*`` subdirectories specific to the recipe's function, or
within a subdirectory containing a set of closely-related recipes.
The recipes themselves should follow the general guidelines for
- recipes used in the Yocto Project found in the ":oe_wiki:`OpenEmbedded
- Style Guide </Styleguide>`".
+ recipes found in the ":doc:`../contributor-guide/recipe-style-guide`"
+ in the Yocto Project and OpenEmbedded Contributor Guide.
- *License File:* You must include a license file in the
``meta-bsp_root_name`` directory. This license covers the BSP
@@ -928,8 +910,8 @@ Yocto Project:
- The name and contact information for the BSP layer maintainer.
This is the person to whom patches and questions should be sent.
For information on how to find the right person, see the
- ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
- section in the Yocto Project Development Tasks Manual.
+ :doc:`../contributor-guide/submit-changes` section in the Yocto Project and
+ OpenEmbedded Contributor Guide.
- Instructions on how to build the BSP using the BSP layer.
@@ -973,7 +955,7 @@ Yocto Project:
Released BSP Recommendations
----------------------------
-Following are recommendations for released BSPs that conform to the
+Here are recommendations for released BSPs that conform to the
Yocto Project:
- *Bootable Images:* Released BSPs can contain one or more bootable
@@ -1014,7 +996,7 @@ the following:
- Create a ``*.bbappend`` file for the modified recipe. For information on using
append files, see the
- ":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+ ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the Yocto Project Development Tasks Manual.
- Ensure your directory structure in the BSP layer that supports your
@@ -1035,7 +1017,7 @@ the following:
that additional hierarchy and the files would obviously not be able
to reside in a machine-specific directory.
-Following is a specific example to help you better understand the
+Here is a specific example to help you better understand the
process. This example customizes a recipe by adding a
BSP-specific configuration file named ``interfaces`` to the
``init-ifupdown_1.0.bb`` recipe for machine "xyz" where the BSP layer
@@ -1118,7 +1100,7 @@ list describes them in order of preference:
Specifying the matching license string signifies that you agree to
the license. Thus, the build system can build the corresponding
recipe and include the component in the image. See the
- ":ref:`dev-manual/common-tasks:enabling commercially licensed recipes`"
+ ":ref:`dev-manual/licenses:enabling commercially licensed recipes`"
section in the Yocto Project Development Tasks Manual for details on
how to use these variables.
@@ -1170,7 +1152,7 @@ Use these steps to create a BSP layer:
``create-layer`` subcommand to create a new general layer. For
instructions on how to create a general layer using the
``bitbake-layers`` script, see the
- ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
section in the Yocto Project Development Tasks Manual.
- *Create a Layer Configuration File:* Every layer needs a layer
@@ -1180,14 +1162,14 @@ Use these steps to create a BSP layer:
:yocto_git:`Source Repositories <>`. To get examples of what you need
in your configuration file, locate a layer (e.g. "meta-ti") and
examine the
- :yocto_git:`local.conf </meta-ti/tree/conf/layer.conf>`
+ :yocto_git:`local.conf </meta-ti/tree/meta-ti-bsp/conf/layer.conf>`
file.
- *Create a Machine Configuration File:* Create a
``conf/machine/bsp_root_name.conf`` file. See
:yocto_git:`meta-yocto-bsp/conf/machine </poky/tree/meta-yocto-bsp/conf/machine>`
for sample ``bsp_root_name.conf`` files. There are other samples such as
- :yocto_git:`meta-ti </meta-ti/tree/conf/machine>`
+ :yocto_git:`meta-ti </meta-ti/tree/meta-ti-bsp/conf/machine>`
and
:yocto_git:`meta-freescale </meta-freescale/tree/conf/machine>`
from other vendors that have more specific machine and tuning
@@ -1195,7 +1177,7 @@ Use these steps to create a BSP layer:
- *Create a Kernel Recipe:* Create a kernel recipe in
``recipes-kernel/linux`` by either using a kernel append file or a
- new custom kernel recipe file (e.g. ``yocto-linux_4.12.bb``). The BSP
+ new custom kernel recipe file (e.g. ``linux-yocto_4.12.bb``). The BSP
layers mentioned in the previous step also contain different kernel
examples. See the ":ref:`kernel-dev/common:modifying an existing recipe`"
section in the Yocto Project Linux Kernel Development Manual for
@@ -1210,7 +1192,7 @@ BSP Layer Configuration Example
-------------------------------
The layer's ``conf`` directory contains the ``layer.conf`` configuration
-file. In this example, the ``conf/layer.conf`` is the following::
+file. In this example, the ``conf/layer.conf`` file is the following::
# We have a conf and classes directory, add to BBPATH
BBPATH .= ":${LAYERDIR}"
@@ -1230,7 +1212,7 @@ configuration files is to examine various files for BSP from the
:yocto_git:`Source Repositories <>`.
For a detailed description of this particular layer configuration file,
-see ":ref:`step 3 <dev-manual/common-tasks:creating your own layer>`"
+see ":ref:`step 3 <dev-manual/layers:creating your own layer>`"
in the discussion that describes how to create layers in the Yocto
Project Development Tasks Manual.
@@ -1251,21 +1233,18 @@ There are one or more machine configuration files in the
For example, the machine configuration file for the `BeagleBone and
BeagleBone Black development boards <https://beagleboard.org/bone>`__ is
-located in the layer ``poky/meta-yocto-bsp/conf/machine`` and is named
-``beaglebone-yocto.conf``::
+located in :yocto_git:`poky/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf
+</poky/tree/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf>`::
#@TYPE: Machine
#@NAME: Beaglebone-yocto machine
#@DESCRIPTION: Reference machine configuration for http://beagleboard.org/bone and http://beagleboard.org/black boards
PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
- XSERVER ?= "xserver-xorg \
- xf86-video-modesetting \
- "
MACHINE_EXTRA_RRECOMMENDS = "kernel-modules kernel-devicetree"
- EXTRA_IMAGEDEPENDS += "u-boot"
+ EXTRA_IMAGEDEPENDS += "virtual/bootloader"
DEFAULTTUNE ?= "cortexa8hf-neon"
include conf/machine/include/arm/armv7a/tune-cortexa8.inc
@@ -1273,19 +1252,20 @@ located in the layer ``poky/meta-yocto-bsp/conf/machine`` and is named
IMAGE_FSTYPES += "tar.bz2 jffs2 wic wic.bmap"
EXTRA_IMAGECMD:jffs2 = "-lnp "
WKS_FILE ?= "beaglebone-yocto.wks"
- IMAGE_INSTALL:append = " kernel-devicetree kernel-image-zimage"
- do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot"
+ MACHINE_ESSENTIAL_EXTRA_RDEPENDS += "kernel-image kernel-devicetree"
+ do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot virtual/bootloader:do_deploy"
- SERIAL_CONSOLES ?= "115200;ttyS0 115200;ttyO0"
- SERIAL_CONSOLES_CHECK = "${SERIAL_CONSOLES}"
+ SERIAL_CONSOLES ?= "115200;ttyS0 115200;ttyO0 115200;ttyAMA0"
PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
- PREFERRED_VERSION_linux-yocto ?= "5.0%"
+ PREFERRED_VERSION_linux-yocto ?= "6.1%"
KERNEL_IMAGETYPE = "zImage"
KERNEL_DEVICETREE = "am335x-bone.dtb am335x-boneblack.dtb am335x-bonegreen.dtb"
KERNEL_EXTRA_ARGS += "LOADADDR=${UBOOT_ENTRYPOINT}"
+ PREFERRED_PROVIDER_virtual/bootloader ?= "u-boot"
+
SPL_BINARY = "MLO"
UBOOT_SUFFIX = "img"
UBOOT_MACHINE = "am335x_evm_defconfig"
@@ -1294,7 +1274,24 @@ located in the layer ``poky/meta-yocto-bsp/conf/machine`` and is named
MACHINE_FEATURES = "usbgadget usbhost vfat alsa"
- IMAGE_BOOT_FILES ?= "u-boot.${UBOOT_SUFFIX} MLO zImage am335x-bone.dtb am335x-boneblack.dtb am335x-bonegreen.dtb"
+ IMAGE_BOOT_FILES ?= "u-boot.${UBOOT_SUFFIX} ${SPL_BINARY} ${KERNEL_IMAGETYPE} ${KERNEL_DEVICETREE}"
+
+ # support runqemu
+ EXTRA_IMAGEDEPENDS += "qemu-native qemu-helper-native"
+ IMAGE_CLASSES += "qemuboot"
+ QB_DEFAULT_FSTYPE = "wic"
+ QB_FSINFO = "wic:no-kernel-in-fs"
+ QB_KERNEL_ROOT = "/dev/vda2"
+ QB_SYSTEM_NAME = "qemu-system-arm"
+ QB_MACHINE = "-machine virt"
+ QB_CPU = "-cpu cortex-a15"
+ QB_KERNEL_CMDLINE_APPEND = "console=ttyAMA0 systemd.mask=systemd-networkd"
+ QB_OPT_APPEND = "-device virtio-rng-device"
+ QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no"
+ QB_NETWORK_DEVICE = "-device virtio-net-device,netdev=net0,mac=@MAC@"
+ QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0"
+ QB_SERIAL_OPT = ""
+ QB_TCPSERIAL_OPT = "-device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon"
The variables used to configure the machine define machine-specific properties; for
example, machine-dependent packages, machine tunings, the type of kernel
@@ -1314,11 +1311,6 @@ Project Reference Manual.
"virtual/xserver" is "xserver-xorg", available in
``poky/meta/recipes-graphics/xorg-xserver``.
-- :term:`XSERVER`: The packages that
- should be installed to provide an X server and drivers for the
- machine. In this example, the "xserver-xorg" and
- "xf86-video-modesetting" are installed.
-
- :term:`MACHINE_EXTRA_RRECOMMENDS`:
A list of machine-dependent packages not essential for booting the
image. Thus, the build does not fail if the packages do not exist.
@@ -1336,12 +1328,15 @@ Project Reference Manual.
needed in the root filesystem. In this case, the U-Boot recipe must
be built for the image.
+ At the end of the file, we also use this setings to implement
+ ``runqemu`` support on the host machine.
+
- :term:`DEFAULTTUNE`: Machines
use tunings to optimize machine, CPU, and application performance.
These features, which are collectively known as "tuning features",
- are set in the :term:`OpenEmbedded-Core (OE-Core)` layer (e.g.
- ``poky/meta/conf/machine/include``). In this example, the default
- tuning file is ``cortexa8hf-neon``.
+ are set in the :term:`OpenEmbedded-Core (OE-Core)` layer. In this
+ example, the default tuning file is :oe_git:`tune-cortexa8
+ </openembedded-core/tree/meta/conf/machine/include/arm/armv7a/tune-cortexa8.inc>`.
.. note::
@@ -1357,17 +1352,11 @@ Project Reference Manual.
- :term:`EXTRA_IMAGECMD`:
Specifies additional options for image creation commands. In this
example, the "-lnp " option is used when creating the
- `JFFS2 <https://en.wikipedia.org/wiki/JFFS2>`__ image.
+ :wikipedia:`JFFS2 <JFFS2>` image.
- :term:`WKS_FILE`: The location of
the :ref:`Wic kickstart <ref-manual/kickstart:openembedded kickstart (\`\`.wks\`\`) reference>` file used
- by the OpenEmbedded build system to create a partitioned image
- (image.wic).
-
-- :term:`IMAGE_INSTALL`:
- Specifies packages to install into an image through the
- :ref:`image <ref-classes-image>` class. Recipes
- use the :term:`IMAGE_INSTALL` variable.
+ by the OpenEmbedded build system to create a partitioned image.
- ``do_image_wic[depends]``: A task that is constructed during the
build. In this example, the task depends on specific tools in order
@@ -1385,7 +1374,7 @@ Project Reference Manual.
- :term:`PREFERRED_VERSION_linux-yocto <PREFERRED_VERSION>`:
Defines the version of the recipe used to build the kernel, which is
- "5.0" in this case.
+ "6.1" in this case.
- :term:`KERNEL_IMAGETYPE`:
The type of kernel to build for the device. In this case, the
@@ -1450,39 +1439,35 @@ The kernel recipe used to build the kernel image for the BeagleBone
device was established in the machine configuration::
PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
- PREFERRED_VERSION_linux-yocto ?= "5.0%"
+ PREFERRED_VERSION_linux-yocto ?= "6.1%"
The ``meta-yocto-bsp/recipes-kernel/linux`` directory in the layer contains
metadata used to build the kernel. In this case, a kernel append file
-(i.e. ``linux-yocto_5.0.bbappend``) is used to override an established
-kernel recipe (i.e. ``linux-yocto_5.0.bb``), which is located in
+(i.e. ``linux-yocto_6.1.bbappend``) is used to override an established
+kernel recipe (i.e. ``linux-yocto_6.1.bb``), which is located in
:yocto_git:`/poky/tree/meta/recipes-kernel/linux`.
-Following is the contents of the append file::
+The contents of the append file are::
- KBRANCH:genericx86 = "v5.0/standard/base"
- KBRANCH:genericx86-64 = "v5.0/standard/base"
- KBRANCH:edgerouter = "v5.0/standard/edgerouter"
- KBRANCH:beaglebone-yocto = "v5.0/standard/beaglebone"
+ KBRANCH:genericx86 = "v6.1/standard/base"
+ KBRANCH:genericx86-64 = "v6.1/standard/base"
+ KBRANCH:beaglebone-yocto = "v6.1/standard/beaglebone"
KMACHINE:genericx86 ?= "common-pc"
KMACHINE:genericx86-64 ?= "common-pc-64"
KMACHINE:beaglebone-yocto ?= "beaglebone"
- SRCREV_machine:genericx86 ?= "3df4aae6074e94e794e27fe7f17451d9353cdf3d"
- SRCREV_machine:genericx86-64 ?= "3df4aae6074e94e794e27fe7f17451d9353cdf3d"
- SRCREV_machine:edgerouter ?= "3df4aae6074e94e794e27fe7f17451d9353cdf3d"
- SRCREV_machine:beaglebone-yocto ?= "3df4aae6074e94e794e27fe7f17451d9353cdf3d"
+ SRCREV_machine:genericx86 ?= "6ec439b4b456ce929c4c07fe457b5d6a4b468e86"
+ SRCREV_machine:genericx86-64 ?= "6ec439b4b456ce929c4c07fe457b5d6a4b468e86"
+ SRCREV_machine:beaglebone-yocto ?= "423e1996694b61fbfc8ec3bf062fc6461d64fde1"
COMPATIBLE_MACHINE:genericx86 = "genericx86"
COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
- COMPATIBLE_MACHINE:edgerouter = "edgerouter"
COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
- LINUX_VERSION:genericx86 = "5.0.3"
- LINUX_VERSION:genericx86-64 = "5.0.3"
- LINUX_VERSION:edgerouter = "5.0.3"
- LINUX_VERSION:beaglebone-yocto = "5.0.3"
+ LINUX_VERSION:genericx86 = "6.1.30"
+ LINUX_VERSION:genericx86-64 = "6.1.30"
+ LINUX_VERSION:beaglebone-yocto = "6.1.20"
This particular append file works for all the machines that are
part of the ``meta-yocto-bsp`` layer. The relevant statements are
diff --git a/documentation/conf.py b/documentation/conf.py
index baf550e3e3..35c5c14535 100644
--- a/documentation/conf.py
+++ b/documentation/conf.py
@@ -53,7 +53,7 @@ author = 'The Linux Foundation'
# -- General configuration ---------------------------------------------------
# Prevent building with an outdated version of sphinx
-needs_sphinx = "3.1"
+needs_sphinx = "4.0"
# to load local extension from the folder 'sphinx'
sys.path.insert(0, os.path.abspath('sphinx'))
@@ -90,7 +90,8 @@ rst_prolog = """
# external links and substitutions
extlinks = {
- 'cve': ('https://nvd.nist.gov/vuln/detail/CVE-%s', 'CVE-'),
+ 'cve': ('https://nvd.nist.gov/vuln/detail/CVE-%s', 'CVE-%s'),
+ 'cve_mitre': ('https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-%s', 'CVE-%s'),
'yocto_home': ('https://www.yoctoproject.org%s', None),
'yocto_wiki': ('https://wiki.yoctoproject.org/wiki%s', None),
'yocto_dl': ('https://downloads.yoctoproject.org%s', None),
@@ -98,7 +99,7 @@ extlinks = {
'yocto_bugs': ('https://bugzilla.yoctoproject.org%s', None),
'yocto_ab': ('https://autobuilder.yoctoproject.org%s', None),
'yocto_docs': ('https://docs.yoctoproject.org%s', None),
- 'yocto_git': ('https://git.yoctoproject.org/cgit/cgit.cgi%s', None),
+ 'yocto_git': ('https://git.yoctoproject.org%s', None),
'yocto_sstate': ('http://sstate.yoctoproject.org%s', None),
'oe_home': ('https://www.openembedded.org%s', None),
'oe_lists': ('https://lists.openembedded.org%s', None),
@@ -106,6 +107,7 @@ extlinks = {
'oe_wiki': ('https://www.openembedded.org/wiki%s', None),
'oe_layerindex': ('https://layers.openembedded.org%s', None),
'oe_layer': ('https://layers.openembedded.org/layerindex/branch/master/layer%s', None),
+ 'wikipedia': ('https://en.wikipedia.org/wiki/%s', None),
}
# Intersphinx config to use cross reference with BitBake user manual
@@ -157,8 +159,8 @@ html_last_updated_fmt = '%b %d, %Y'
html_secnumber_suffix = " "
latex_elements = {
- 'passoptionstopackages': '\PassOptionsToPackage{bookmarksdepth=5}{hyperref}',
- 'preamble': '\setcounter{tocdepth}{2}',
+ 'passoptionstopackages': '\\PassOptionsToPackage{bookmarksdepth=5}{hyperref}',
+ 'preamble': '\\setcounter{tocdepth}{2}',
}
# Make the EPUB builder prefer PNG to SVG because of issues rendering Inkscape SVG
diff --git a/documentation/contributor-guide/identify-component.rst b/documentation/contributor-guide/identify-component.rst
new file mode 100644
index 0000000000..a28391a66a
--- /dev/null
+++ b/documentation/contributor-guide/identify-component.rst
@@ -0,0 +1,31 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Identify the component
+**********************
+
+The Yocto Project and OpenEmbedded ecosystem is built of :term:`layers <Layer>`
+so the first step is to identify the component where the issue likely lies.
+For example, if you have a hardware issue, it is likely related to the BSP
+you are using and the best place to seek advice would be from the BSP provider
+or :term:`layer`. If the issue is a build/configuration one and a distro is in
+use, they would likely be the first place to ask questions. If the issue is a
+generic one and/or in the core classes or metadata, the core layer or BitBake
+might be the appropriate component.
+
+Each metadata layer being used should contain a ``README`` file and that should
+explain where to report issues, where to send changes and how to contact the
+maintainers.
+
+If the issue is in the core metadata layer (OpenEmbedded-Core) or in BitBake,
+issues can be reported in the :yocto_bugs:`Yocto Project Bugzilla <>`. The
+:yocto_lists:`yocto </g/yocto>` mailing list is a general “catch-all” location
+where questions can be sent if you can’t work out where something should go.
+
+:term:`Poky` is a commonly used “combination” repository where multiple
+components have been combined (:oe_git:`bitbake </bitbake>`,
+:oe_git:`openembedded-core </openembedded-core>`,
+:yocto_git:`meta-yocto </meta-yocto>` and
+:yocto_git:`yocto-docs </yocto-docs>`). Patches should be submitted against the
+appropriate individual component rather than :term:`Poky` itself as detailed in
+the appropriate ``README`` file.
+
diff --git a/documentation/contributor-guide/index.rst b/documentation/contributor-guide/index.rst
new file mode 100644
index 0000000000..a832169455
--- /dev/null
+++ b/documentation/contributor-guide/index.rst
@@ -0,0 +1,26 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+================================================
+Yocto Project and OpenEmbedded Contributor Guide
+================================================
+
+The Yocto Project and OpenEmbedded are open-source, community-based projects so
+contributions are very welcome, it is how the code evolves and everyone can
+effect change. Contributions take different forms, if you have a fix for an
+issue you’ve run into, a patch is the most appropriate way to contribute it.
+If you run into an issue but don’t have a solution, opening a defect in
+:yocto_bugs:`Bugzilla <>` or asking questions on the mailing lists might be
+more appropriate. This guide intends to point you in the right direction to
+this.
+
+
+.. toctree::
+ :caption: Table of Contents
+ :numbered:
+
+ identify-component
+ report-defect
+ recipe-style-guide
+ submit-changes
+
+.. include:: /boilerplate.rst
diff --git a/documentation/contributor-guide/recipe-style-guide.rst b/documentation/contributor-guide/recipe-style-guide.rst
new file mode 100644
index 0000000000..08d8fb4259
--- /dev/null
+++ b/documentation/contributor-guide/recipe-style-guide.rst
@@ -0,0 +1,411 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Recipe Style Guide
+******************
+
+Recipe Naming Conventions
+=========================
+
+In general, most recipes should follow the naming convention
+``recipes-category/recipename/recipename_version.bb``. Recipes for related
+projects may share the same recipe directory. ``recipename`` and ``category``
+may contain hyphens, but hyphens are not allowed in ``version``.
+
+If the recipe is tracking a Git revision that does not correspond to a released
+version of the software, ``version`` may be ``git`` (e.g. ``recipename_git.bb``)
+and the recipe would set :term:`PV`.
+
+Version Policy
+==============
+
+Our versions follow the form ``<epoch>:<version>-<revision>``
+or in BitBake variable terms ${:term:`PE`}:${:term:`PV`}-${:term:`PR`}. We
+generally follow the `Debian <https://www.debian.org/doc/debian-policy/ch-controlfields.html#version>`__
+version policy which defines these terms.
+
+In most cases the version :term:`PV` will be set automatically from the recipe
+file name. It is recommended to use released versions of software as these are
+revisions that upstream are expecting people to use.
+
+Recipe versions should always compare and sort correctly so that upgrades work
+as expected. With conventional versions such as ``1.4`` upgrading ``to 1.5``
+this happens naturally, but some versions don't sort. For example,
+``1.5 Release Candidate 2`` could be written as ``1.5rc2`` but this sorts after
+``1.5``, so upgrades from feeds won't happen correctly.
+
+Instead the tilde (``~``) operator can be used, which sorts before the empty
+string so ``1.5~rc2`` comes before ``1.5``. There is a historical syntax which
+may be found where :term:`PV` is set as a combination of the prior version
+``+`` the pre-release version, for example ``PV=1.4+1.5rc2``. This is a valid
+syntax but the tilde form is preferred.
+
+For version comparisons, the ``opkg-compare-versions`` program from
+``opkg-utils`` can be useful when attempting to determine how two version
+numbers compare to each other. Our definitive version comparison algorithm is
+the one within bitbake which aims to match those of the package managers and
+Debian policy closely.
+
+When a recipe references a git revision that does not correspond to a released
+version of software (e.g. is not a tagged version), the :term:`PV` variable
+should include the Git revision using the following to make the
+version clear::
+
+ PV = "<version>+git${SRCPV}"
+
+In this case, ``<version>`` should be the most recently released version of the
+software from the current source revision (``git describe`` can be useful for
+determining this). Whilst not recommended for published layers, this format is
+also useful when using :term:`AUTOREV` to set the recipe to increment source
+control revisions automatically, which can be useful during local development.
+
+Version Number Changes
+======================
+
+The :term:`PR` variable is used to indicate different revisions of a recipe
+that reference the same upstream source version. It can be used to force a
+new version of a recipe to be installed onto a device from a package feed.
+These once had to be set manually but in most cases these can now be set and
+incremented automatically by a PR Server connected with a package feed.
+
+When :term:`PV` increases, any existing :term:`PR` value can and should be
+removed.
+
+If :term:`PV` changes in such a way that it does not increase with respect to
+the previous value, you need to increase :term:`PE` to ensure package managers
+will upgrade it correctly. If unset you should set :term:`PE` to "1" since
+the default of empty is easily confused with "0" depending on the package
+manager. :term:`PE` can only have an integer value.
+
+Recipe formatting
+=================
+
+Variable Formatting
+-------------------
+
+- Variable assignment should a space around each side of the operator, e.g.
+ ``FOO = "bar"``, not ``FOO="bar"``.
+
+- Double quotes should be used on the right-hand side of the assignment,
+ e.g. ``FOO = "bar"`` not ``FOO = 'bar'``
+
+- Spaces should be used for indenting variables, with 4 spaces per tab
+
+- Long variables should be split over multiple lines when possible by using
+ the continuation character (``\``)
+
+- When splitting a long variable over multiple lines, all continuation lines
+ should be indented (with spaces) to align with the start of the quote on the
+ first line::
+
+ FOO = "this line is \
+ long \
+ "
+
+ Instead of::
+
+ FOO = "this line is \
+ long \
+ "
+
+Python Function formatting
+--------------------------
+
+- Spaces must be used for indenting Python code, with 4 spaces per tab
+
+Shell Function formatting
+-------------------------
+
+- The formatting of shell functions should be consistent within layers.
+ Some use tabs, some use spaces.
+
+Recipe metadata
+===============
+
+Required Variables
+------------------
+
+The following variables should be included in all recipes:
+
+- :term:`SUMMARY`: a one line description of the upstream project
+
+- :term:`DESCRIPTION`: an extended description of the upstream project,
+ possibly with multiple lines. If no reasonable description can be written,
+ this may be omitted as it defaults to :term:`SUMMARY`.
+
+- :term:`HOMEPAGE`: the URL to the upstream projects homepage.
+
+- :term:`BUGTRACKER`: the URL upstream projects bug tracking website,
+ if applicable.
+
+Recipe Ordering
+---------------
+
+When a variable is defined in recipes and classes, variables should follow the
+general order when possible:
+
+- :term:`SUMMARY`
+- :term:`DESCRIPTION`
+- :term:`HOMEPAGE`
+- :term:`BUGTRACKER`
+- :term:`SECTION`
+- :term:`LICENSE`
+- :term:`LIC_FILES_CHKSUM`
+- :term:`DEPENDS`
+- :term:`PROVIDES`
+- :term:`PV`
+- :term:`SRC_URI`
+- :term:`SRCREV`
+- :term:`S`
+- ``inherit ...``
+- :term:`PACKAGECONFIG`
+- Build class specific variables such as ``EXTRA_QMAKEVARS_POST`` and :term:`EXTRA_OECONF`
+- Tasks such as :ref:`ref-tasks-configure`
+- :term:`PACKAGE_ARCH`
+- :term:`PACKAGES`
+- :term:`FILES`
+- :term:`RDEPENDS`
+- :term:`RRECOMMENDS`
+- :term:`RSUGGESTS`
+- :term:`RPROVIDES`
+- :term:`RCONFLICTS`
+- :term:`BBCLASSEXTEND`
+
+There are some cases where ordering is important and these cases would override
+this default order. Examples include:
+
+- :term:`PACKAGE_ARCH` needing to be set before ``inherit packagegroup``
+
+Tasks should be ordered based on the order they generally execute. For commonly
+used tasks this would be:
+
+- :ref:`ref-tasks-fetch`
+- :ref:`ref-tasks-unpack`
+- :ref:`ref-tasks-patch`
+- :ref:`ref-tasks-prepare_recipe_sysroot`
+- :ref:`ref-tasks-configure`
+- :ref:`ref-tasks-compile`
+- :ref:`ref-tasks-install`
+- :ref:`ref-tasks-populate_sysroot`
+- :ref:`ref-tasks-package`
+
+Custom tasks should be sorted similarly.
+
+Package specific variables are typically grouped together, e.g.::
+
+ RDEPENDS:${PN} = “foo”
+ RDEPENDS:${PN}-libs = “bar”
+
+ RRECOMMENDS:${PN} = “one”
+ RRECOMMENDS:${PN}-libs = “two”
+
+Recipe License Fields
+---------------------
+
+Recipes need to define both the :term:`LICENSE` and
+:term:`LIC_FILES_CHKSUM` variables:
+
+- :term:`LICENSE`: This variable specifies the license for the software.
+ If you do not know the license under which the software you are
+ building is distributed, you should go to the source code and look
+ for that information. Typical files containing this information
+ include ``COPYING``, :term:`LICENSE`, and ``README`` files. You could
+ also find the information near the top of a source file. For example,
+ given a piece of software licensed under the GNU General Public
+ License version 2, you would set :term:`LICENSE` as follows::
+
+ LICENSE = "GPL-2.0-only"
+
+ The licenses you specify within :term:`LICENSE` can have any name as long
+ as you do not use spaces, since spaces are used as separators between
+ license names. For standard licenses, use the names of the files in
+ ``meta/files/common-licenses/`` or the :term:`SPDXLICENSEMAP` flag names
+ defined in ``meta/conf/licenses.conf``.
+
+- :term:`LIC_FILES_CHKSUM`: The OpenEmbedded build system uses this
+ variable to make sure the license text has not changed. If it has,
+ the build produces an error and it affords you the chance to figure
+ it out and correct the problem.
+
+ You need to specify all applicable licensing files for the software.
+ At the end of the configuration step, the build process will compare
+ the checksums of the files to be sure the text has not changed. Any
+ differences result in an error with the message containing the
+ current checksum. For more explanation and examples of how to set the
+ :term:`LIC_FILES_CHKSUM` variable, see the
+ ":ref:`dev-manual/licenses:tracking license changes`" section.
+
+ To determine the correct checksum string, you can list the
+ appropriate files in the :term:`LIC_FILES_CHKSUM` variable with incorrect
+ md5 strings, attempt to build the software, and then note the
+ resulting error messages that will report the correct md5 strings.
+ See the ":ref:`dev-manual/new-recipe:fetching code`" section for
+ additional information.
+
+ Here is an example that assumes the software has a ``COPYING`` file::
+
+ LIC_FILES_CHKSUM = "file://COPYING;md5=xxx"
+
+ When you try to build the
+ software, the build system will produce an error and give you the
+ correct string that you can substitute into the recipe file for a
+ subsequent build.
+
+License Updates
+~~~~~~~~~~~~~~~
+
+When you change the :term:`LICENSE` or :term:`LIC_FILES_CHKSUM` in the recipe
+you need to briefly explain the reason for the change via a ``License-Update:``
+tag. Often it's quite trivial, such as::
+
+ License-Update: copyright years refreshed
+
+Less often, the actual licensing terms themselves will have changed. If so, do
+try to link to upstream making/justifying that decision.
+
+Tips and Guidelines for Writing Recipes
+---------------------------------------
+
+- Use :term:`BBCLASSEXTEND` instead of creating separate recipes such as ``-native``
+ and ``-nativesdk`` ones, whenever possible. This avoids having to maintain multiple
+ recipe files at the same time.
+
+- Recipes should have tasks which are idempotent, i.e. that executing a given task
+ multiple times shouldn't change the end result. The build environment is built upon
+ this assumption and breaking it can cause obscure build failures.
+
+- For idempotence when modifying files in tasks, it is usually best to:
+
+ - copy a file ``X`` to ``X.orig`` (only if it doesn't exist already)
+ - then, copy ``X.orig`` back to ``X``,
+ - and, finally, modify ``X``.
+
+ This ensures if rerun the task always has the same end result and the
+ original file can be preserved to reuse. It also guards against an
+ interrupted build corrupting the file.
+
+Patch Upstream Status
+=====================
+
+In order to keep track of patches applied by recipes and ultimately reduce the
+number of patches that need maintaining, the OpenEmbedded build system
+requires information about the upstream status of each patch.
+
+In its description, each patch should provide detailed information about the
+bug that it addresses, such as the URL in a bug tracking system and links
+to relevant mailing list archives.
+
+Then, you should also add an ``Upstream-Status:`` tag containing one of the
+following status strings:
+
+``Pending``
+ No determination has been made yet, or patch has not yet been submitted to
+ upstream.
+
+ Keep in mind that every patch submitted upstream reduces the maintainance
+ burden in OpenEmbedded and Yocto Project in the long run, so this patch
+ status should only be used in exceptional cases if there are genuine
+ obstacles to submitting a patch upstream; the reason for that should be
+ included in the patch.
+
+``Submitted [where]``
+ Submitted to upstream, waiting for approval. Optionally include where
+ it was submitted, such as the author, mailing list, etc.
+
+``Backport [version]``
+ Accepted upstream and included in the next release, or backported from newer
+ upstream version, because we are at a fixed version.
+ Include upstream version info (e.g. commit ID or next expected version).
+
+``Denied``
+ Not accepted by upstream, include reason in patch.
+
+``Inactive-Upstream [lastcommit: when (and/or) lastrelease: when]``
+ The upstream is no longer available. This typically means a defunct project
+ where no activity has happened for a long time --- measured in years. To make
+ that judgement, it is recommended to look at not only when the last release
+ happened, but also when the last commit happened, and whether newly made bug
+ reports and merge requests since that time receive no reaction. It is also
+ recommended to add to the patch description any relevant links where the
+ inactivity can be clearly seen.
+
+``Inappropriate [reason]``
+ The patch is not appropriate for upstream, include a brief reason on the
+ same line enclosed with ``[]``. In the past, there were several different
+ reasons not to submit patches upstream, but we have to consider that every
+ non-upstreamed patch means a maintainance burden for recipe maintainers.
+ Currently, the only reasons to mark patches as inappropriate for upstream
+ submission are:
+
+ - ``oe specific``: the issue is specific to how OpenEmbedded performs builds
+ or sets things up at runtime, and can be resolved only with a patch that
+ is not however relevant or appropriate for general upstream submission.
+ - ``upstream ticket <link>``: the issue is not specific to Open-Embedded
+ and should be fixed upstream, but the patch in its current form is not
+ suitable for merging upstream, and the author lacks sufficient expertise
+ to develop a proper patch. Instead the issue is handled via a bug report
+ (include link).
+
+Of course, if another person later takes care of submitting this patch upstream,
+the status should be changed to ``Submitted [where]``, and an additional
+``Signed-off-by:`` line should be added to the patch by the person claiming
+responsibility for upstreaming.
+
+Examples
+--------
+
+Here's an example of a patch that has been submitted upstream::
+
+ rpm: Adjusted the foo setting in bar
+
+ [RPM Ticket #65] -- http://rpm5.org/cvs/tktview?tn=65,5
+
+ The foo setting in bar was decreased from X to X-50% in order to
+ ensure we don't exhaust all system memory with foobar threads.
+
+ Upstream-Status: Submitted [rpm5-devel@rpm5.org]
+
+ Signed-off-by: Joe Developer <joe.developer@example.com>
+
+A future update can change the value to ``Backport`` or ``Denied`` as
+appropriate.
+
+Another example of a patch that is specific to OpenEmbedded::
+
+ Do not treat warnings as errors
+
+ There are additional warnings found with musl which are
+ treated as errors and fails the build, we have more combinations
+ than upstream supports to handle.
+
+ Upstream-Status: Inappropriate [oe specific]
+
+Here's a patch that has been backported from an upstream commit::
+
+ include missing sys/file.h for LOCK_EX
+
+ Upstream-Status: Backport [https://github.com/systemd/systemd/commit/ac8db36cbc26694ee94beecc8dca208ec4b5fd45]
+
+CVE patches
+===========
+
+In order to have a better control of vulnerabilities, patches that fix CVEs must
+contain a ``CVE:`` tag. This tag list all CVEs fixed by the patch. If more than
+one CVE is fixed, separate them using spaces.
+
+CVE Examples
+------------
+
+This should be the header of patch that fixes :cve:`2015-8370` in GRUB2::
+
+ grub2: Fix CVE-2015-8370
+
+ [No upstream tracking] -- https://bugzilla.redhat.com/show_bug.cgi?id=1286966
+
+ Back to 28; Grub2 Authentication
+
+ Two functions suffer from integer underflow fault; the grub_username_get() and grub_password_get()located in
+ grub-core/normal/auth.c and lib/crypto.c respectively. This can be exploited to obtain a Grub rescue shell.
+
+ Upstream-Status: Backport [http://git.savannah.gnu.org/cgit/grub.git/commit/?id=451d80e52d851432e109771bb8febafca7a5f1f2]
+ CVE: CVE-2015-8370
+ Signed-off-by: Joe Developer <joe.developer@example.com>
diff --git a/documentation/contributor-guide/report-defect.rst b/documentation/contributor-guide/report-defect.rst
new file mode 100644
index 0000000000..8ef133b842
--- /dev/null
+++ b/documentation/contributor-guide/report-defect.rst
@@ -0,0 +1,67 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Reporting a Defect Against the Yocto Project and OpenEmbedded
+**************************************************************
+
+You can use the Yocto Project instance of
+`Bugzilla <https://www.bugzilla.org/about/>`__ to submit a defect (bug)
+against BitBake, OpenEmbedded-Core, against any other Yocto Project component
+or for tool issues. For additional information on this implementation of
+Bugzilla see the ":ref:`Yocto Project Bugzilla <resources-bugtracker>`" section
+in the Yocto Project Reference Manual. For more detail on any of the following
+steps, see the Yocto Project
+:yocto_wiki:`Bugzilla wiki page </Bugzilla_Configuration_and_Bug_Tracking>`.
+
+Use the following general steps to submit a bug:
+
+#. Open the Yocto Project implementation of :yocto_bugs:`Bugzilla <>`.
+
+#. Click "File a Bug" to enter a new bug.
+
+#. Choose the appropriate "Classification", "Product", and "Component"
+ for which the bug was found. Bugs for the Yocto Project fall into
+ one of several classifications, which in turn break down into
+ several products and components. For example, for a bug against the
+ ``meta-intel`` layer, you would choose "Build System, Metadata &
+ Runtime", "BSPs", and "bsps-meta-intel", respectively.
+
+#. Choose the "Version" of the Yocto Project for which you found the
+ bug (e.g. &DISTRO;).
+
+#. Determine and select the "Severity" of the bug. The severity
+ indicates how the bug impacted your work.
+
+#. Choose the "Hardware" that the bug impacts.
+
+#. Choose the "Architecture" that the bug impacts.
+
+#. Choose a "Documentation change" item for the bug. Fixing a bug might
+ or might not affect the Yocto Project documentation. If you are
+ unsure of the impact to the documentation, select "Don't Know".
+
+#. Provide a brief "Summary" of the bug. Try to limit your summary to
+ just a line or two and be sure to capture the essence of the bug.
+
+#. Provide a detailed "Description" of the bug. You should provide as
+ much detail as you can about the context, behavior, output, and so
+ forth that surrounds the bug. You can even attach supporting files
+ for output from logs by using the "Add an attachment" button.
+
+#. Click the "Submit Bug" button submit the bug. A new Bugzilla number
+ is assigned to the bug and the defect is logged in the bug tracking
+ system.
+
+Once you file a bug, the bug is processed by the Yocto Project Bug
+Triage Team and further details concerning the bug are assigned (e.g.
+priority and owner). You are the "Submitter" of the bug and any further
+categorization, progress, or comments on the bug result in Bugzilla
+sending you an automated email concerning the particular change or
+progress to the bug.
+
+There are no guarantees about if or when a bug might be worked on since an
+open-source project has no dedicated engineering resources. However, the
+project does have a good track record of resolving common issues over the
+medium and long term. We do encourage people to file bugs so issues are
+at least known about. It helps other users when they find somebody having
+the same issue as they do, and an issue that is unknown is much less likely
+to ever be fixed!
diff --git a/documentation/contributor-guide/submit-changes.rst b/documentation/contributor-guide/submit-changes.rst
new file mode 100644
index 0000000000..47a416b245
--- /dev/null
+++ b/documentation/contributor-guide/submit-changes.rst
@@ -0,0 +1,827 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Contributing Changes to a Component
+************************************
+
+Contributions to the Yocto Project and OpenEmbedded are very welcome.
+Because the system is extremely configurable and flexible, we recognize
+that developers will want to extend, configure or optimize it for their
+specific uses.
+
+.. _ref-why-mailing-lists:
+
+Contributing through mailing lists --- Why not using web-based workflows?
+=========================================================================
+
+Both Yocto Project and OpenEmbedded have many key components that are
+maintained by patches being submitted on mailing lists. We appreciate this
+approach does look a little old fashioned when other workflows are available
+through web technology such as GitHub, GitLab and others. Since we are often
+asked this question, we’ve decided to document the reasons for using mailing
+lists.
+
+One significant factor is that we value peer review. When a change is proposed
+to many of the core pieces of the project, it helps to have many eyes of review
+go over them. Whilst there is ultimately one maintainer who needs to make the
+final call on accepting or rejecting a patch, the review is made by many eyes
+and the exact people reviewing it are likely unknown to the maintainer. It is
+often the surprise reviewer that catches the most interesting issues!
+
+This is in contrast to the "GitHub" style workflow where either just a
+maintainer makes that review, or review is specifically requested from
+nominated people. We believe there is significant value added to the codebase
+by this peer review and that moving away from mailing lists would be to the
+detriment of our code.
+
+We also need to acknowledge that many of our developers are used to this
+mailing list workflow and have worked with it for years, with tools and
+processes built around it. Changing away from this would result in a loss
+of key people from the project, which would again be to its detriment.
+
+The projects are acutely aware that potential new contributors find the
+mailing list approach off-putting and would prefer a web-based GUI.
+Since we don’t believe that can work for us, the project is aiming to ensure
+`patchwork <https://patchwork.yoctoproject.org/>`__ is available to help track
+patch status and also looking at how tooling can provide more feedback to users
+about patch status. We are looking at improving tools such as ``patchtest`` to
+test user contributions before they hit the mailing lists and also at better
+documenting how to use such workflows since we recognise that whilst this was
+common knowledge a decade ago, it might not be as familiar now.
+
+Preparing Changes for Submission
+================================
+
+Set up Git
+----------
+
+The first thing to do is to install Git packages. Here is an example
+on Debian and Ubuntu::
+
+ sudo apt install git-core git-email
+
+Then, you need to set a name and e-mail address that Git will
+use to identify your commits::
+
+ git config --global user.name "Ada Lovelace"
+ git config --global user.email "ada.lovelace@gmail.com"
+
+Clone the Git repository for the component to modify
+----------------------------------------------------
+
+After identifying the component to modify as described in the
+":doc:`../contributor-guide/identify-component`" section, clone the
+corresponding Git repository. Here is an example for OpenEmbedded-Core::
+
+ git clone https://git.openembedded.org/openembedded-core
+ cd openembedded-core
+
+Create a new branch
+-------------------
+
+Then, create a new branch in your local Git repository
+for your changes, starting from the reference branch in the upstream
+repository (often called ``master``)::
+
+ $ git checkout <ref-branch>
+ $ git checkout -b my-changes
+
+If you have completely unrelated sets of changes to submit, you should even
+create one branch for each set.
+
+Implement and commit changes
+----------------------------
+
+In each branch, you should group your changes into small, controlled and
+isolated ones. Keeping changes small and isolated aids review, makes
+merging/rebasing easier and keeps the change history clean should anyone need
+to refer to it in future.
+
+To this purpose, you should create *one Git commit per change*,
+corresponding to each of the patches you will eventually submit.
+See `further guidance <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#separate-your-changes>`__
+in the Linux kernel documentation if needed.
+
+For example, when you intend to add multiple new recipes, each recipe
+should be added in a separate commit. For upgrades to existing recipes,
+the previous version should usually be deleted as part of the same commit
+to add the upgraded version.
+
+#. *Stage Your Changes:* Stage your changes by using the ``git add``
+ command on each file you modified. If you want to stage all the
+ files you modified, you can even use the ``git add -A`` command.
+
+#. *Commit Your Changes:* This is when you can create separate commits. For
+ each commit to create, use the ``git commit -s`` command with the files
+ or directories you want to include in the commit::
+
+ $ git commit -s file1 file2 dir1 dir2 ...
+
+ To include **a**\ ll staged files::
+
+ $ git commit -sa
+
+ - The ``-s`` option of ``git commit`` adds a "Signed-off-by:" line
+ to your commit message. There is the same requirement for contributing
+ to the Linux kernel. Adding such a line signifies that you, the
+ submitter, have agreed to the `Developer's Certificate of Origin 1.1
+ <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`__
+ as follows:
+
+ .. code-block:: none
+
+ Developer's Certificate of Origin 1.1
+
+ By making a contribution to this project, I certify that:
+
+ (a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+ (b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+ (c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+ (d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
+
+ - Provide a single-line summary of the change and, if more
+ explanation is needed, provide more detail in the body of the
+ commit. This summary is typically viewable in the "shortlist" of
+ changes. Thus, providing something short and descriptive that
+ gives the reader a summary of the change is useful when viewing a
+ list of many commits. You should prefix this short description
+ with the recipe name (if changing a recipe), or else with the
+ short form path to the file being changed.
+
+ .. note::
+
+ To find a suitable prefix for the commit summary, a good idea
+ is to look for prefixes used in previous commits touching the
+ same files or directories::
+
+ git log --oneline <paths>
+
+ - For the body of the commit message, provide detailed information
+ that describes what you changed, why you made the change, and the
+ approach you used. It might also be helpful if you mention how you
+ tested the change. Provide as much detail as you can in the body
+ of the commit message.
+
+ .. note::
+
+ If the single line summary is enough to describe a simple
+ change, the body of the commit message can be left empty.
+
+ - If the change addresses a specific bug or issue that is associated
+ with a bug-tracking ID, include a reference to that ID in your
+ detailed description. For example, the Yocto Project uses a
+ specific convention for bug references --- any commit that addresses
+ a specific bug should use the following form for the detailed
+ description. Be sure to use the actual bug-tracking ID from
+ Bugzilla for bug-id::
+
+ Fixes [YOCTO #bug-id]
+
+ detailed description of change
+
+#. *Crediting contributors:* By using the ``git commit --amend`` command,
+ you can add some tags to the commit description to credit other contributors
+ to the change:
+
+ - ``Reported-by``: name and email of a person reporting a bug
+ that your commit is trying to fix. This is a good practice
+ to encourage people to go on reporting bugs and let them
+ know that their reports are taken into account.
+
+ - ``Suggested-by``: name and email of a person to credit for the
+ idea of making the change.
+
+ - ``Tested-by``, ``Reviewed-by``: name and email for people having
+ tested your changes or reviewed their code. These fields are
+ usually added by the maintainer accepting a patch, or by
+ yourself if you submitted your patches to early reviewers,
+ or are submitting an unmodified patch again as part of a
+ new iteration of your patch series.
+
+ - ``CC:`` Name and email of people you want to send a copy
+ of your changes to. This field will be used by ``git send-email``.
+
+ See `more guidance about using such tags
+ <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#using-reported-by-tested-by-reviewed-by-suggested-by-and-fixes>`__
+ in the Linux kernel documentation.
+
+Test your changes
+-----------------
+
+For each contributions you make, you should test your changes as well.
+For this the Yocto Project offers several types of tests. Those tests cover
+different areas and it depends on your changes which are feasible. For example run:
+
+ - For changes that affect the build environment:
+
+ - ``bitbake-selftest``: for changes within BitBake
+
+ - ``oe-selftest``: to test combinations of BitBake runs
+
+ - ``oe-build-perf-test``: to test the performance of common build scenarios
+
+ - For changes in a recipe:
+
+ - ``ptest``: run package specific tests, if they exist
+
+ - ``testimage``: build an image, boot it and run testcases on it
+
+ - If applicable, ensure also the ``native`` and ``nativesdk`` variants builds
+
+ - For changes relating to the SDK:
+
+ - ``testsdk``: to build, install and run tests against a SDK
+
+ - ``testsdk_ext``: to build, install and run tests against an extended SDK
+
+Note that this list just gives suggestions and is not exhaustive. More details can
+be found here: :ref:`test-manual/intro:Yocto Project Tests --- Types of Testing Overview`.
+
+Creating Patches
+================
+
+Here is the general procedure on how to create patches to be sent through email:
+
+#. *Describe the Changes in your Branch:* If you have more than one commit
+ in your branch, it's recommended to provide a cover letter describing
+ the series of patches you are about to send.
+
+ For this purpose, a good solution is to store the cover letter contents
+ in the branch itself::
+
+ git branch --edit-description
+
+ This will open a text editor to fill in the description for your
+ changes. This description can be updated when necessary and will
+ be used by Git to create the cover letter together with the patches.
+
+ It is recommended to start this description with a title line which
+ will serve a the subject line for the cover letter.
+
+#. *Generate Patches for your Branch:* The ``git format-patch`` command will
+ generate patch files for each of the commits in your branch. You need
+ to pass the reference branch your branch starts from.
+
+ If you branch didn't need a description in the previous step::
+
+ $ git format-patch <ref-branch>
+
+ If you filled a description for your branch, you will want to generate
+ a cover letter too::
+
+ $ git format-patch --cover-letter --cover-from-description=auto <ref-branch>
+
+ After the command is run, the current directory contains numbered
+ ``.patch`` files for the commits in your branch. If you have a cover
+ letter, it will be in the ``0000-cover-letter.patch``.
+
+ .. note::
+
+ The ``--cover-from-description=auto`` option makes ``git format-patch``
+ use the first paragraph of the branch description as the cover
+ letter title. Another possibility, which is easier to remember, is to pass
+ only the ``--cover-letter`` option, but you will have to edit the
+ subject line manually every time you generate the patches.
+
+ See the `git format-patch manual page <https://git-scm.com/docs/git-format-patch>`__
+ for details.
+
+#. *Review each of the Patch Files:* This final review of the patches
+ before sending them often allows to view your changes from a different
+ perspective and discover defects such as typos, spacing issues or lines
+ or even files that you didn't intend to modify. This review should
+ include the cover letter patch too.
+
+ If necessary, rework your commits as described in
+ ":ref:`contributor-guide/submit-changes:taking patch review into account`".
+
+Validating Patches with Patchtest
+=================================
+
+``patchtest`` is available in ``openembedded-core`` as a tool for making
+sure that your patches are well-formatted and contain important info for
+maintenance purposes, such as ``Signed-off-by`` and ``Upstream-Status``
+tags. Note that no functional testing of the changes will be performed by ``patchtest``.
+Currently, it only supports testing patches for ``openembedded-core`` branches.
+To setup, perform the following::
+
+ pip install -r meta/lib/patchtest/requirements.txt
+ source oe-init-build-env
+ bitbake-layers add-layer ../meta-selftest
+
+Once these steps are complete and you have generated your patch files,
+you can run ``patchtest`` like so::
+
+ patchtest --patch <patch_name>
+
+Alternatively, if you want ``patchtest`` to iterate over and test
+multiple patches stored in a directory, you can use::
+
+ patchtest --directory <directory_name>
+
+By default, ``patchtest`` uses its own modules' file paths to determine what
+repository and test suite to check patches against. If you wish to test
+patches against a repository other than ``openembedded-core`` and/or use
+a different set of tests, you can use the ``--repodir`` and ``--testdir``
+flags::
+
+ patchtest --patch <patch_name> --repodir <path/to/repo> --testdir <path/to/testdir>
+
+Finally, note that ``patchtest`` is designed to test patches in a standalone
+way, so if your patches are meant to apply on top of changes made by
+previous patches in a series, it is possible that ``patchtest`` will report
+false failures regarding the "merge on head" test.
+
+Using ``patchtest`` in this manner provides a final check for the overall
+quality of your changes before they are submitted for review by the
+maintainers.
+
+Sending the Patches via Email
+=============================
+
+Using Git to Send Patches
+-------------------------
+
+To submit patches through email, it is very important that you send them
+without any whitespace or HTML formatting that either you or your mailer
+introduces. The maintainer that receives your patches needs to be able
+to save and apply them directly from your emails, using the ``git am``
+command.
+
+Using the ``git send-email`` command is the only error-proof way of sending
+your patches using email since there is no risk of compromising whitespace
+in the body of the message, which can occur when you use your own mail
+client. It will also properly include your patches as *inline attachments*,
+which is not easy to do with standard e-mail clients without breaking lines.
+If you used your regular e-mail client and shared your patches as regular
+attachments, reviewers wouldn't be able to quote specific sections of your
+changes and make comments about them.
+
+Setting up Git to Send Email
+----------------------------
+
+The ``git send-email`` command can send email by using a local or remote
+Mail Transport Agent (MTA) such as ``msmtp``, ``sendmail``, or
+through a direct SMTP configuration in your Git ``~/.gitconfig`` file.
+
+Here are the settings for letting ``git send-email`` send e-mail through your
+regular STMP server, using a Google Mail account as an example::
+
+ git config --global sendemail.smtpserver smtp.gmail.com
+ git config --global sendemail.smtpserverport 587
+ git config --global sendemail.smtpencryption tls
+ git config --global sendemail.smtpuser ada.lovelace@gmail.com
+ git config --global sendemail.smtppass = XXXXXXXX
+
+These settings will appear in the ``.gitconfig`` file in your home directory.
+
+If you neither can use a local MTA nor SMTP, make sure you use an email client
+that does not touch the message (turning spaces in tabs, wrapping lines, etc.).
+A good mail client to do so is Pine (or Alpine) or Mutt. For more
+information about suitable clients, see `Email clients info for Linux
+<https://www.kernel.org/doc/html/latest/process/email-clients.html>`__
+in the Linux kernel sources.
+
+If you use such clients, just include the patch in the body of your email.
+
+Finding a Suitable Mailing List
+-------------------------------
+
+You should send patches to the appropriate mailing list so that they can be
+reviewed by the right contributors and merged by the appropriate maintainer.
+The specific mailing list you need to use depends on the location of the code
+you are changing.
+
+If people have concerns with any of the patches, they will usually voice
+their concern over the mailing list. If patches do not receive any negative
+reviews, the maintainer of the affected layer typically takes them, tests them,
+and then based on successful testing, merges them.
+
+In general, each component (e.g. layer) should have a ``README`` file
+that indicates where to send the changes and which process to follow.
+
+The "poky" repository, which is the Yocto Project's reference build
+environment, is a hybrid repository that contains several individual
+pieces (e.g. BitBake, Metadata, documentation, and so forth) built using
+the combo-layer tool. The upstream location used for submitting changes
+varies by component:
+
+- *Core Metadata:* Send your patches to the
+ :oe_lists:`openembedded-core </g/openembedded-core>`
+ mailing list. For example, a change to anything under the ``meta`` or
+ ``scripts`` directories should be sent to this mailing list.
+
+- *BitBake:* For changes to BitBake (i.e. anything under the
+ ``bitbake`` directory), send your patches to the
+ :oe_lists:`bitbake-devel </g/bitbake-devel>`
+ mailing list.
+
+- *meta-poky* and *meta-yocto-bsp* trees: These trees contain Metadata. Use the
+ :yocto_lists:`poky </g/poky>` mailing list.
+
+- *Documentation*: For changes to the Yocto Project documentation, use the
+ :yocto_lists:`docs </g/docs>` mailing list.
+
+For changes to other layers and tools hosted in the Yocto Project source
+repositories (i.e. :yocto_git:`git.yoctoproject.org <>`), use the
+:yocto_lists:`yocto-patches </g/yocto-patches/>` general mailing list.
+
+For changes to other layers hosted in the OpenEmbedded source
+repositories (i.e. :oe_git:`git.openembedded.org <>`), use
+the :oe_lists:`openembedded-devel </g/openembedded-devel>`
+mailing list, unless specified otherwise in the layer's ``README`` file.
+
+If you intend to submit a new recipe that neither fits into the core Metadata,
+nor into :oe_git:`meta-openembedded </meta-openembedded/>`, you should
+look for a suitable layer in https://layers.openembedded.org. If similar
+recipes can be expected, you may consider :ref:`dev-manual/layers:creating your own layer`.
+
+If in doubt, please ask on the :yocto_lists:`yocto </g/yocto/>` general mailing list
+or on the :oe_lists:`openembedded-devel </g/openembedded-devel>` mailing list.
+
+Subscribing to the Mailing List
+-------------------------------
+
+After identifying the right mailing list to use, you will have to subscribe to
+it if you haven't done it yet.
+
+If you attempt to send patches to a list you haven't subscribed to, your email
+will be returned as undelivered.
+
+However, if you don't want to be receive all the messages sent to a mailing list,
+you can set your subscription to "no email". You will still be a subscriber able
+to send messages, but you won't receive any e-mail. If people reply to your message,
+their e-mail clients will default to including your email address in the
+conversation anyway.
+
+Anyway, you'll also be able to access the new messages on mailing list archives,
+either through a web browser, or for the lists archived on https://lore.kernel.org,
+through an individual newsgroup feed or a git repository.
+
+Sending Patches via Email
+-------------------------
+
+At this stage, you are ready to send your patches via email. Here's the
+typical usage of ``git send-email``::
+
+ git send-email --to <mailing-list-address> *.patch
+
+Then, review each subject line and list of recipients carefully, and then
+and then allow the command to send each message.
+
+You will see that ``git send-email`` will automatically copy the people listed
+in any commit tags such as ``Signed-off-by`` or ``Reported-by``.
+
+In case you are sending patches for :oe_git:`meta-openembedded </meta-openembedded/>`
+or any layer other than :oe_git:`openembedded-core </openembedded-core/>`,
+please add the appropriate prefix so that it is clear which layer the patch is intended
+to be applied to::
+
+ git format-patch --subject-prefix="meta-oe][PATCH" ...
+
+.. note::
+
+ It is actually possible to send patches without generating them
+ first. However, make sure you have reviewed your changes carefully
+ because ``git send-email`` will just show you the title lines of
+ each patch.
+
+ Here's a command you can use if you just have one patch in your
+ branch::
+
+ git send-email --to <mailing-list-address> -1
+
+ If you have multiple patches and a cover letter, you can send
+ patches for all the commits between the reference branch
+ and the tip of your branch::
+
+ git send-email --cover-letter --cover-from-description=auto --to <mailing-list-address> -M <ref-branch>
+
+See the `git send-email manual page <https://git-scm.com/docs/git-send-email>`__
+for details.
+
+Troubleshooting Email Issues
+----------------------------
+
+Fixing your From identity
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We have a frequent issue with contributors whose patches are received through
+a ``From`` field which doesn't match the ``Signed-off-by`` information. Here is
+a typical example for people sending from a domain name with :wikipedia:`DMARC`::
+
+ From: "Linus Torvalds via lists.openembedded.org <linus.torvalds=kernel.org@lists.openembedded.org>"
+
+This ``From`` field is used by ``git am`` to recreate commits with the right
+author name. The following will ensure that your e-mails have an additional
+``From`` field at the beginning of the Email body, and therefore that
+maintainers accepting your patches don't have to fix commit author information
+manually::
+
+ git config --global sendemail.from "linus.torvalds@kernel.org"
+
+The ``sendemail.from`` should match your ``user.email`` setting,
+which appears in the ``Signed-off-by`` line of your commits.
+
+Streamlining git send-email usage
+---------------------------------
+
+If you want to save time and not be forced to remember the right options to use
+with ``git send-email``, you can use Git configuration settings.
+
+- To set the right mailing list address for a given repository::
+
+ git config --local sendemail.to openembedded-devel@lists.openembedded.org
+
+- If the mailing list requires a subject prefix for the layer
+ (this only works when the repository only contains one layer)::
+
+ git config --local format.subjectprefix "meta-something][PATCH"
+
+Using Scripts to Push a Change Upstream and Request a Pull
+==========================================================
+
+For larger patch series it is preferable to send a pull request which not
+only includes the patch but also a pointer to a branch that can be pulled
+from. This involves making a local branch for your changes, pushing this
+branch to an accessible repository and then using the ``create-pull-request``
+and ``send-pull-request`` scripts from openembedded-core to create and send a
+patch series with a link to the branch for review.
+
+Follow this procedure to push a change to an upstream "contrib" Git
+repository once the steps in
+":ref:`contributor-guide/submit-changes:preparing changes for submission`"
+have been followed:
+
+.. note::
+
+ You can find general Git information on how to push a change upstream
+ in the
+ `Git Community Book <https://git-scm.com/book/en/v2/Distributed-Git-Distributed-Workflows>`__.
+
+#. *Request Push Access to an "Upstream" Contrib Repository:* Send an email to
+ ``helpdesk@yoctoproject.org``:
+
+ - Attach your SSH public key which usually named ``id_rsa.pub.``.
+ If you don't have one generate it by running ``ssh-keygen -t rsa -b 4096 -C "your_email@example.com"``.
+
+ - List the repositories you're planning to contribute to.
+
+ - Include your preferred branch prefix for ``-contrib`` repositories.
+
+#. *Push Your Commits to the "Contrib" Upstream:* Push your
+ changes to that repository::
+
+ $ git push upstream_remote_repo local_branch_name
+
+ For example, suppose you have permissions to push
+ into the upstream ``meta-intel-contrib`` repository and you are
+ working in a local branch named `your_name`\ ``/README``. The following
+ command pushes your local commits to the ``meta-intel-contrib``
+ upstream repository and puts the commit in a branch named
+ `your_name`\ ``/README``::
+
+ $ git push meta-intel-contrib your_name/README
+
+#. *Determine Who to Notify:* Determine the maintainer or the mailing
+ list that you need to notify for the change.
+
+ Before submitting any change, you need to be sure who the maintainer
+ is or what mailing list that you need to notify. Use either these
+ methods to find out:
+
+ - *Maintenance File:* Examine the ``maintainers.inc`` file, which is
+ located in the :term:`Source Directory` at
+ ``meta/conf/distro/include``, to see who is responsible for code.
+
+ - *Search by File:* Using :ref:`overview-manual/development-environment:git`, you can
+ enter the following command to bring up a short list of all
+ commits against a specific file::
+
+ git shortlog -- filename
+
+ Just provide the name of the file for which you are interested. The
+ information returned is not ordered by history but does include a
+ list of everyone who has committed grouped by name. From the list,
+ you can see who is responsible for the bulk of the changes against
+ the file.
+
+ - *Find the Mailing List to Use:* See the
+ ":ref:`contributor-guide/submit-changes:finding a suitable mailing list`"
+ section above.
+
+#. *Make a Pull Request:* Notify the maintainer or the mailing list that
+ you have pushed a change by making a pull request.
+
+ The Yocto Project provides two scripts that conveniently let you
+ generate and send pull requests to the Yocto Project. These scripts
+ are ``create-pull-request`` and ``send-pull-request``. You can find
+ these scripts in the ``scripts`` directory within the
+ :term:`Source Directory` (e.g.
+ ``poky/scripts``).
+
+ Using these scripts correctly formats the requests without
+ introducing any whitespace or HTML formatting. The maintainer that
+ receives your patches either directly or through the mailing list
+ needs to be able to save and apply them directly from your emails.
+ Using these scripts is the preferred method for sending patches.
+
+ First, create the pull request. For example, the following command
+ runs the script, specifies the upstream repository in the contrib
+ directory into which you pushed the change, and provides a subject
+ line in the created patch files::
+
+ $ poky/scripts/create-pull-request -u meta-intel-contrib -s "Updated Manual Section Reference in README"
+
+ Running this script forms ``*.patch`` files in a folder named
+ ``pull-``\ `PID` in the current directory. One of the patch files is a
+ cover letter.
+
+ Before running the ``send-pull-request`` script, you must edit the
+ cover letter patch to insert information about your change. After
+ editing the cover letter, send the pull request. For example, the
+ following command runs the script and specifies the patch directory
+ and email address. In this example, the email address is a mailing
+ list::
+
+ $ poky/scripts/send-pull-request -p ~/meta-intel/pull-10565 -t meta-intel@lists.yoctoproject.org
+
+ You need to follow the prompts as the script is interactive.
+
+ .. note::
+
+ For help on using these scripts, simply provide the ``-h``
+ argument as follows::
+
+ $ poky/scripts/create-pull-request -h
+ $ poky/scripts/send-pull-request -h
+
+Submitting Changes to Stable Release Branches
+=============================================
+
+The process for proposing changes to a Yocto Project stable branch differs
+from the steps described above. Changes to a stable branch must address
+identified bugs or CVEs and should be made carefully in order to avoid the
+risk of introducing new bugs or breaking backwards compatibility. Typically
+bug fixes must already be accepted into the master branch before they can be
+backported to a stable branch unless the bug in question does not affect the
+master branch or the fix on the master branch is unsuitable for backporting.
+
+The list of stable branches along with the status and maintainer for each
+branch can be obtained from the
+:yocto_wiki:`Releases wiki page </Releases>`.
+
+.. note::
+
+ Changes will not typically be accepted for branches which are marked as
+ End-Of-Life (EOL).
+
+With this in mind, the steps to submit a change for a stable branch are as
+follows:
+
+#. *Identify the bug or CVE to be fixed:* This information should be
+ collected so that it can be included in your submission.
+
+ See :ref:`dev-manual/vulnerabilities:checking for vulnerabilities`
+ for details about CVE tracking.
+
+#. *Check if the fix is already present in the master branch:* This will
+ result in the most straightforward path into the stable branch for the
+ fix.
+
+ #. *If the fix is present in the master branch --- submit a backport request
+ by email:* You should send an email to the relevant stable branch
+ maintainer and the mailing list with details of the bug or CVE to be
+ fixed, the commit hash on the master branch that fixes the issue and
+ the stable branches which you would like this fix to be backported to.
+
+ #. *If the fix is not present in the master branch --- submit the fix to the
+ master branch first:* This will ensure that the fix passes through the
+ project's usual patch review and test processes before being accepted.
+ It will also ensure that bugs are not left unresolved in the master
+ branch itself. Once the fix is accepted in the master branch a backport
+ request can be submitted as above.
+
+ #. *If the fix is unsuitable for the master branch --- submit a patch
+ directly for the stable branch:* This method should be considered as a
+ last resort. It is typically necessary when the master branch is using
+ a newer version of the software which includes an upstream fix for the
+ issue or when the issue has been fixed on the master branch in a way
+ that introduces backwards incompatible changes. In this case follow the
+ steps in ":ref:`contributor-guide/submit-changes:preparing changes for submission`"
+ and in the following sections but modify the subject header of your patch
+ email to include the name of the stable branch which you are
+ targetting. This can be done using the ``--subject-prefix`` argument to
+ ``git format-patch``, for example to submit a patch to the
+ "&DISTRO_NAME_NO_CAP_MINUS_ONE;" branch use::
+
+ git format-patch --subject-prefix='&DISTRO_NAME_NO_CAP_MINUS_ONE;][PATCH' ...
+
+Taking Patch Review into Account
+================================
+
+You may get feedback on your submitted patches from other community members
+or from the automated patchtest service. If issues are identified in your
+patches then it is usually necessary to address these before the patches are
+accepted into the project. In this case you should your commits according
+to the feedback and submit an updated version to the relevant mailing list.
+
+In any case, never fix reported issues by fixing them in new commits
+on the tip of your branch. Always come up with a new series of commits
+without the reported issues.
+
+.. note::
+
+ It is a good idea to send a copy to the reviewers who provided feedback
+ to the previous version of the patch. You can make sure this happens
+ by adding a ``CC`` tag to the commit description::
+
+ CC: William Shakespeare <bill@yoctoproject.org>
+
+A single patch can be amended using ``git commit --amend``, and multiple
+patches can be easily reworked and reordered through an interactive Git rebase::
+
+ git rebase -i <ref-branch>
+
+See `this tutorial <https://hackernoon.com/beginners-guide-to-interactive-rebasing-346a3f9c3a6d>`__
+for practical guidance about using Git interactive rebasing.
+
+You should also modify the ``[PATCH]`` tag in the email subject line when
+sending the revised patch to mark the new iteration as ``[PATCH v2]``,
+``[PATCH v3]``, etc as appropriate. This can be done by passing the ``-v``
+argument to ``git format-patch`` with a version number::
+
+ git format-patch -v2 <ref-branch>
+
+Lastly please ensure that you also test your revised changes. In particular
+please don't just edit the patch file written out by ``git format-patch`` and
+resend it.
+
+Tracking the Status of Patches
+==============================
+
+The Yocto Project uses a `Patchwork instance <https://patchwork.yoctoproject.org/>`__
+to track the status of patches submitted to the various mailing lists and to
+support automated patch testing. Each submitted patch is checked for common
+mistakes and deviations from the expected patch format and submitters are
+notified by ``patchtest`` if such mistakes are found. This process helps to
+reduce the burden of patch review on maintainers.
+
+.. note::
+
+ This system is imperfect and changes can sometimes get lost in the flow.
+ Asking about the status of a patch or change is reasonable if the change
+ has been idle for a while with no feedback.
+
+If your patches have not had any feedback in a few days, they may have already
+been merged. You can run ``git pull`` branch to check this. Note that many if
+not most layer maintainers do not send out acknowledgement emails when they
+accept patches. Alternatively, if there is no response or merge after a few days
+the patch may have been missed or the appropriate reviewers may not currently be
+around. It is then perfectly fine to reply to it yourself with a reminder asking
+for feedback.
+
+.. note::
+
+ Patch reviews for feature and recipe upgrade patches are likely be delayed
+ during a feature freeze because these types of patches aren't merged during
+ at that time --- you may have to wait until after the freeze is lifted.
+
+Maintainers also commonly use ``-next`` branches to test submissions prior to
+merging patches. Thus, you can get an idea of the status of a patch based on
+whether the patch has been merged into one of these branches. The commonly
+used testing branches for OpenEmbedded-Core are as follows:
+
+- *openembedded-core "master-next" branch:* This branch is part of the
+ :oe_git:`openembedded-core </openembedded-core/>` repository and contains
+ proposed changes to the core metadata.
+
+- *poky "master-next" branch:* This branch is part of the
+ :yocto_git:`poky </poky/>` repository and combines proposed
+ changes to BitBake, the core metadata and the poky distro.
+
+Similarly, stable branches maintained by the project may have corresponding
+``-next`` branches which collect proposed changes. For example,
+``&DISTRO_NAME_NO_CAP;-next`` and ``&DISTRO_NAME_NO_CAP_MINUS_ONE;-next``
+branches in both the "openembdedded-core" and "poky" repositories.
+
+Other layers may have similar testing branches but there is no formal
+requirement or standard for these so please check the documentation for the
+layers you are contributing to.
+
diff --git a/documentation/dev-manual/bmaptool.rst b/documentation/dev-manual/bmaptool.rst
new file mode 100644
index 0000000000..f6f0e6afaf
--- /dev/null
+++ b/documentation/dev-manual/bmaptool.rst
@@ -0,0 +1,59 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Flashing Images Using ``bmaptool``
+**********************************
+
+A fast and easy way to flash an image to a bootable device is to use
+bmaptool, which is integrated into the OpenEmbedded build system.
+bmaptool is a generic tool that creates a file's block map (bmap) and
+then uses that map to copy the file. As compared to traditional tools
+such as dd or cp, bmaptool can copy (or flash) large files like raw
+system image files much faster.
+
+.. note::
+
+ - If you are using Ubuntu or Debian distributions, you can install
+ the ``bmap-tools`` package using the following command and then
+ use the tool without specifying ``PATH`` even from the root
+ account::
+
+ $ sudo apt install bmap-tools
+
+ - If you are unable to install the ``bmap-tools`` package, you will
+ need to build bmaptool before using it. Use the following command::
+
+ $ bitbake bmaptool-native
+
+Following, is an example that shows how to flash a Wic image. Realize
+that while this example uses a Wic image, you can use bmaptool to flash
+any type of image. Use these steps to flash an image using bmaptool:
+
+#. *Update your local.conf File:* You need to have the following set
+ in your ``local.conf`` file before building your image::
+
+ IMAGE_FSTYPES += "wic wic.bmap"
+
+#. *Get Your Image:* Either have your image ready (pre-built with the
+ :term:`IMAGE_FSTYPES`
+ setting previously mentioned) or take the step to build the image::
+
+ $ bitbake image
+
+#. *Flash the Device:* Flash the device with the image by using bmaptool
+ depending on your particular setup. The following commands assume the
+ image resides in the :term:`Build Directory`'s ``deploy/images/`` area:
+
+ - If you have write access to the media, use this command form::
+
+ $ oe-run-native bmaptool-native bmaptool copy build-directory/tmp/deploy/images/machine/image.wic /dev/sdX
+
+ - If you do not have write access to the media, set your permissions
+ first and then use the same command form::
+
+ $ sudo chmod 666 /dev/sdX
+ $ oe-run-native bmaptool-native bmaptool copy build-directory/tmp/deploy/images/machine/image.wic /dev/sdX
+
+For help on the ``bmaptool`` command, use the following command::
+
+ $ bmaptool --help
+
diff --git a/documentation/dev-manual/build-quality.rst b/documentation/dev-manual/build-quality.rst
new file mode 100644
index 0000000000..713ea3a48e
--- /dev/null
+++ b/documentation/dev-manual/build-quality.rst
@@ -0,0 +1,409 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Maintaining Build Output Quality
+********************************
+
+Many factors can influence the quality of a build. For example, if you
+upgrade a recipe to use a new version of an upstream software package or
+you experiment with some new configuration options, subtle changes can
+occur that you might not detect until later. Consider the case where
+your recipe is using a newer version of an upstream package. In this
+case, a new version of a piece of software might introduce an optional
+dependency on another library, which is auto-detected. If that library
+has already been built when the software is building, the software will
+link to the built library and that library will be pulled into your
+image along with the new software even if you did not want the library.
+
+The :ref:`ref-classes-buildhistory` class helps you maintain the quality of
+your build output. You can use the class to highlight unexpected and possibly
+unwanted changes in the build output. When you enable build history, it records
+information about the contents of each package and image and then commits that
+information to a local Git repository where you can examine the information.
+
+The remainder of this section describes the following:
+
+- :ref:`How you can enable and disable build history <dev-manual/build-quality:enabling and disabling build history>`
+
+- :ref:`How to understand what the build history contains <dev-manual/build-quality:understanding what the build history contains>`
+
+- :ref:`How to limit the information used for build history <dev-manual/build-quality:using build history to gather image information only>`
+
+- :ref:`How to examine the build history from both a command-line and web interface <dev-manual/build-quality:examining build history information>`
+
+Enabling and Disabling Build History
+====================================
+
+Build history is disabled by default. To enable it, add the following
+:term:`INHERIT` statement and set the :term:`BUILDHISTORY_COMMIT` variable to
+"1" at the end of your ``conf/local.conf`` file found in the
+:term:`Build Directory`::
+
+ INHERIT += "buildhistory"
+ BUILDHISTORY_COMMIT = "1"
+
+Enabling build history as
+previously described causes the OpenEmbedded build system to collect
+build output information and commit it as a single commit to a local
+:ref:`overview-manual/development-environment:git` repository.
+
+.. note::
+
+ Enabling build history increases your build times slightly,
+ particularly for images, and increases the amount of disk space used
+ during the build.
+
+You can disable build history by removing the previous statements from
+your ``conf/local.conf`` file.
+
+Understanding What the Build History Contains
+=============================================
+
+Build history information is kept in ``${``\ :term:`TOPDIR`\ ``}/buildhistory``
+in the :term:`Build Directory` as defined by the :term:`BUILDHISTORY_DIR`
+variable. Here is an example abbreviated listing:
+
+.. image:: figures/buildhistory.png
+ :align: center
+ :width: 50%
+
+At the top level, there is a ``metadata-revs`` file that lists the
+revisions of the repositories for the enabled layers when the build was
+produced. The rest of the data splits into separate ``packages``,
+``images`` and ``sdk`` directories, the contents of which are described
+as follows.
+
+Build History Package Information
+---------------------------------
+
+The history for each package contains a text file that has name-value
+pairs with information about the package. For example,
+``buildhistory/packages/i586-poky-linux/busybox/busybox/latest``
+contains the following:
+
+.. code-block:: none
+
+ PV = 1.22.1
+ PR = r32
+ RPROVIDES =
+ RDEPENDS = glibc (>= 2.20) update-alternatives-opkg
+ RRECOMMENDS = busybox-syslog busybox-udhcpc update-rc.d
+ PKGSIZE = 540168
+ FILES = /usr/bin/* /usr/sbin/* /usr/lib/busybox/* /usr/lib/lib*.so.* \
+ /etc /com /var /bin/* /sbin/* /lib/*.so.* /lib/udev/rules.d \
+ /usr/lib/udev/rules.d /usr/share/busybox /usr/lib/busybox/* \
+ /usr/share/pixmaps /usr/share/applications /usr/share/idl \
+ /usr/share/omf /usr/share/sounds /usr/lib/bonobo/servers
+ FILELIST = /bin/busybox /bin/busybox.nosuid /bin/busybox.suid /bin/sh \
+ /etc/busybox.links.nosuid /etc/busybox.links.suid
+
+Most of these
+name-value pairs correspond to variables used to produce the package.
+The exceptions are ``FILELIST``, which is the actual list of files in
+the package, and ``PKGSIZE``, which is the total size of files in the
+package in bytes.
+
+There is also a file that corresponds to the recipe from which the package
+came (e.g. ``buildhistory/packages/i586-poky-linux/busybox/latest``):
+
+.. code-block:: none
+
+ PV = 1.22.1
+ PR = r32
+ DEPENDS = initscripts kern-tools-native update-rc.d-native \
+ virtual/i586-poky-linux-compilerlibs virtual/i586-poky-linux-gcc \
+ virtual/libc virtual/update-alternatives
+ PACKAGES = busybox-ptest busybox-httpd busybox-udhcpd busybox-udhcpc \
+ busybox-syslog busybox-mdev busybox-hwclock busybox-dbg \
+ busybox-staticdev busybox-dev busybox-doc busybox-locale busybox
+
+Finally, for those recipes fetched from a version control system (e.g.,
+Git), there is a file that lists source revisions that are specified in
+the recipe and the actual revisions used during the build. Listed
+and actual revisions might differ when
+:term:`SRCREV` is set to
+${:term:`AUTOREV`}. Here is an
+example assuming
+``buildhistory/packages/qemux86-poky-linux/linux-yocto/latest_srcrev``)::
+
+ # SRCREV_machine = "38cd560d5022ed2dbd1ab0dca9642e47c98a0aa1"
+ SRCREV_machine = "38cd560d5022ed2dbd1ab0dca9642e47c98a0aa1"
+ # SRCREV_meta = "a227f20eff056e511d504b2e490f3774ab260d6f"
+ SRCREV_meta ="a227f20eff056e511d504b2e490f3774ab260d6f"
+
+You can use the
+``buildhistory-collect-srcrevs`` command with the ``-a`` option to
+collect the stored :term:`SRCREV` values from build history and report them
+in a format suitable for use in global configuration (e.g.,
+``local.conf`` or a distro include file) to override floating
+:term:`AUTOREV` values to a fixed set of revisions. Here is some example
+output from this command::
+
+ $ buildhistory-collect-srcrevs -a
+ # all-poky-linux
+ SRCREV:pn-ca-certificates = "07de54fdcc5806bde549e1edf60738c6bccf50e8"
+ SRCREV:pn-update-rc.d = "8636cf478d426b568c1be11dbd9346f67e03adac"
+ # core2-64-poky-linux
+ SRCREV:pn-binutils = "87d4632d36323091e731eb07b8aa65f90293da66"
+ SRCREV:pn-btrfs-tools = "8ad326b2f28c044cb6ed9016d7c3285e23b673c8"
+ SRCREV_bzip2-tests:pn-bzip2 = "f9061c030a25de5b6829e1abf373057309c734c0"
+ SRCREV:pn-e2fsprogs = "02540dedd3ddc52c6ae8aaa8a95ce75c3f8be1c0"
+ SRCREV:pn-file = "504206e53a89fd6eed71aeaf878aa3512418eab1"
+ SRCREV_glibc:pn-glibc = "24962427071fa532c3c48c918e9d64d719cc8a6c"
+ SRCREV:pn-gnome-desktop-testing = "e346cd4ed2e2102c9b195b614f3c642d23f5f6e7"
+ SRCREV:pn-init-system-helpers = "dbd9197569c0935029acd5c9b02b84c68fd937ee"
+ SRCREV:pn-kmod = "b6ecfc916a17eab8f93be5b09f4e4f845aabd3d1"
+ SRCREV:pn-libnsl2 = "82245c0c58add79a8e34ab0917358217a70e5100"
+ SRCREV:pn-libseccomp = "57357d2741a3b3d3e8425889a6b79a130e0fa2f3"
+ SRCREV:pn-libxcrypt = "50cf2b6dd4fdf04309445f2eec8de7051d953abf"
+ SRCREV:pn-ncurses = "51d0fd9cc3edb975f04224f29f777f8f448e8ced"
+ SRCREV:pn-procps = "19a508ea121c0c4ac6d0224575a036de745eaaf8"
+ SRCREV:pn-psmisc = "5fab6b7ab385080f1db725d6803136ec1841a15f"
+ SRCREV:pn-ptest-runner = "bcb82804daa8f725b6add259dcef2067e61a75aa"
+ SRCREV:pn-shared-mime-info = "18e558fa1c8b90b86757ade09a4ba4d6a6cf8f70"
+ SRCREV:pn-zstd = "e47e674cd09583ff0503f0f6defd6d23d8b718d3"
+ # qemux86_64-poky-linux
+ SRCREV_machine:pn-linux-yocto = "20301aeb1a64164b72bc72af58802b315e025c9c"
+ SRCREV_meta:pn-linux-yocto = "2d38a472b21ae343707c8bd64ac68a9eaca066a0"
+ # x86_64-linux
+ SRCREV:pn-binutils-cross-x86_64 = "87d4632d36323091e731eb07b8aa65f90293da66"
+ SRCREV_glibc:pn-cross-localedef-native = "24962427071fa532c3c48c918e9d64d719cc8a6c"
+ SRCREV_localedef:pn-cross-localedef-native = "794da69788cbf9bf57b59a852f9f11307663fa87"
+ SRCREV:pn-debianutils-native = "de14223e5bffe15e374a441302c528ffc1cbed57"
+ SRCREV:pn-libmodulemd-native = "ee80309bc766d781a144e6879419b29f444d94eb"
+ SRCREV:pn-virglrenderer-native = "363915595e05fb252e70d6514be2f0c0b5ca312b"
+ SRCREV:pn-zstd-native = "e47e674cd09583ff0503f0f6defd6d23d8b718d3"
+
+.. note::
+
+ Here are some notes on using the ``buildhistory-collect-srcrevs`` command:
+
+ - By default, only values where the :term:`SRCREV` was not hardcoded
+ (usually when :term:`AUTOREV` is used) are reported. Use the ``-a``
+ option to see all :term:`SRCREV` values.
+
+ - The output statements might not have any effect if overrides are
+ applied elsewhere in the build system configuration. Use the
+ ``-f`` option to add the ``forcevariable`` override to each output
+ line if you need to work around this restriction.
+
+ - The script does apply special handling when building for multiple
+ machines. However, the script does place a comment before each set
+ of values that specifies which triplet to which they belong as
+ previously shown (e.g., ``i586-poky-linux``).
+
+Build History Image Information
+-------------------------------
+
+The files produced for each image are as follows:
+
+- ``image-files:`` A directory containing selected files from the root
+ filesystem. The files are defined by
+ :term:`BUILDHISTORY_IMAGE_FILES`.
+
+- ``build-id.txt:`` Human-readable information about the build
+ configuration and metadata source revisions. This file contains the
+ full build header as printed by BitBake.
+
+- ``*.dot:`` Dependency graphs for the image that are compatible with
+ ``graphviz``.
+
+- ``files-in-image.txt:`` A list of files in the image with
+ permissions, owner, group, size, and symlink information.
+
+- ``image-info.txt:`` A text file containing name-value pairs with
+ information about the image. See the following listing example for
+ more information.
+
+- ``installed-package-names.txt:`` A list of installed packages by name
+ only.
+
+- ``installed-package-sizes.txt:`` A list of installed packages ordered
+ by size.
+
+- ``installed-packages.txt:`` A list of installed packages with full
+ package filenames.
+
+.. note::
+
+ Installed package information is able to be gathered and produced
+ even if package management is disabled for the final image.
+
+Here is an example of ``image-info.txt``:
+
+.. code-block:: none
+
+ DISTRO = poky
+ DISTRO_VERSION = 3.4+snapshot-a0245d7be08f3d24ea1875e9f8872aa6bbff93be
+ USER_CLASSES = buildstats
+ IMAGE_CLASSES = qemuboot qemuboot license_image
+ IMAGE_FEATURES = debug-tweaks
+ IMAGE_LINGUAS =
+ IMAGE_INSTALL = packagegroup-core-boot speex speexdsp
+ BAD_RECOMMENDATIONS =
+ NO_RECOMMENDATIONS =
+ PACKAGE_EXCLUDE =
+ ROOTFS_POSTPROCESS_COMMAND = write_package_manifest; license_create_manifest; cve_check_write_rootfs_manifest; ssh_allow_empty_password; ssh_allow_root_login; postinst_enable_logging; rootfs_update_timestamp; write_image_test_data; empty_var_volatile; sort_passwd; rootfs_reproducible;
+ IMAGE_POSTPROCESS_COMMAND = buildhistory_get_imageinfo ;
+ IMAGESIZE = 9265
+
+Other than ``IMAGESIZE``,
+which is the total size of the files in the image in Kbytes, the
+name-value pairs are variables that may have influenced the content of
+the image. This information is often useful when you are trying to
+determine why a change in the package or file listings has occurred.
+
+Using Build History to Gather Image Information Only
+----------------------------------------------------
+
+As you can see, build history produces image information, including
+dependency graphs, so you can see why something was pulled into the
+image. If you are just interested in this information and not interested
+in collecting specific package or SDK information, you can enable
+writing only image information without any history by adding the
+following to your ``conf/local.conf`` file found in the
+:term:`Build Directory`::
+
+ INHERIT += "buildhistory"
+ BUILDHISTORY_COMMIT = "0"
+ BUILDHISTORY_FEATURES = "image"
+
+Here, you set the
+:term:`BUILDHISTORY_FEATURES`
+variable to use the image feature only.
+
+Build History SDK Information
+-----------------------------
+
+Build history collects similar information on the contents of SDKs (e.g.
+``bitbake -c populate_sdk imagename``) as compared to information it
+collects for images. Furthermore, this information differs depending on
+whether an extensible or standard SDK is being produced.
+
+The following list shows the files produced for SDKs:
+
+- ``files-in-sdk.txt:`` A list of files in the SDK with permissions,
+ owner, group, size, and symlink information. This list includes both
+ the host and target parts of the SDK.
+
+- ``sdk-info.txt:`` A text file containing name-value pairs with
+ information about the SDK. See the following listing example for more
+ information.
+
+- ``sstate-task-sizes.txt:`` A text file containing name-value pairs
+ with information about task group sizes (e.g. :ref:`ref-tasks-populate_sysroot`
+ tasks have a total size). The ``sstate-task-sizes.txt`` file exists
+ only when an extensible SDK is created.
+
+- ``sstate-package-sizes.txt:`` A text file containing name-value pairs
+ with information for the shared-state packages and sizes in the SDK.
+ The ``sstate-package-sizes.txt`` file exists only when an extensible
+ SDK is created.
+
+- ``sdk-files:`` A folder that contains copies of the files mentioned
+ in ``BUILDHISTORY_SDK_FILES`` if the files are present in the output.
+ Additionally, the default value of ``BUILDHISTORY_SDK_FILES`` is
+ specific to the extensible SDK although you can set it differently if
+ you would like to pull in specific files from the standard SDK.
+
+ The default files are ``conf/local.conf``, ``conf/bblayers.conf``,
+ ``conf/auto.conf``, ``conf/locked-sigs.inc``, and
+ ``conf/devtool.conf``. Thus, for an extensible SDK, these files get
+ copied into the ``sdk-files`` directory.
+
+- The following information appears under each of the ``host`` and
+ ``target`` directories for the portions of the SDK that run on the
+ host and on the target, respectively:
+
+ .. note::
+
+ The following files for the most part are empty when producing an
+ extensible SDK because this type of SDK is not constructed from
+ packages as is the standard SDK.
+
+ - ``depends.dot:`` Dependency graph for the SDK that is compatible
+ with ``graphviz``.
+
+ - ``installed-package-names.txt:`` A list of installed packages by
+ name only.
+
+ - ``installed-package-sizes.txt:`` A list of installed packages
+ ordered by size.
+
+ - ``installed-packages.txt:`` A list of installed packages with full
+ package filenames.
+
+Here is an example of ``sdk-info.txt``:
+
+.. code-block:: none
+
+ DISTRO = poky
+ DISTRO_VERSION = 1.3+snapshot-20130327
+ SDK_NAME = poky-glibc-i686-arm
+ SDK_VERSION = 1.3+snapshot
+ SDKMACHINE =
+ SDKIMAGE_FEATURES = dev-pkgs dbg-pkgs
+ BAD_RECOMMENDATIONS =
+ SDKSIZE = 352712
+
+Other than ``SDKSIZE``, which is
+the total size of the files in the SDK in Kbytes, the name-value pairs
+are variables that might have influenced the content of the SDK. This
+information is often useful when you are trying to determine why a
+change in the package or file listings has occurred.
+
+Examining Build History Information
+-----------------------------------
+
+You can examine build history output from the command line or from a web
+interface.
+
+To see any changes that have occurred (assuming you have
+:term:`BUILDHISTORY_COMMIT` = "1"),
+you can simply use any Git command that allows you to view the history
+of a repository. Here is one method::
+
+ $ git log -p
+
+You need to realize,
+however, that this method does show changes that are not significant
+(e.g. a package's size changing by a few bytes).
+
+There is a command-line tool called ``buildhistory-diff``, though,
+that queries the Git repository and prints just the differences that
+might be significant in human-readable form. Here is an example::
+
+ $ poky/poky/scripts/buildhistory-diff . HEAD^
+ Changes to images/qemux86_64/glibc/core-image-minimal (files-in-image.txt):
+ /etc/anotherpkg.conf was added
+ /sbin/anotherpkg was added
+ * (installed-package-names.txt):
+ * anotherpkg was added
+ Changes to images/qemux86_64/glibc/core-image-minimal (installed-package-names.txt):
+ anotherpkg was added
+ packages/qemux86_64-poky-linux/v86d: PACKAGES: added "v86d-extras"
+ * PR changed from "r0" to "r1"
+ * PV changed from "0.1.10" to "0.1.12"
+ packages/qemux86_64-poky-linux/v86d/v86d: PKGSIZE changed from 110579 to 144381 (+30%)
+ * PR changed from "r0" to "r1"
+ * PV changed from "0.1.10" to "0.1.12"
+
+.. note::
+
+ The ``buildhistory-diff`` tool requires the ``GitPython``
+ package. Be sure to install it using Pip3 as follows::
+
+ $ pip3 install GitPython --user
+
+
+ Alternatively, you can install ``python3-git`` using the appropriate
+ distribution package manager (e.g. ``apt``, ``dnf``, or ``zipper``).
+
+To see changes to the build history using a web interface, follow the
+instruction in the ``README`` file
+:yocto_git:`here </buildhistory-web/>`.
+
+Here is a sample screenshot of the interface:
+
+.. image:: figures/buildhistory-web.png
+ :width: 100%
+
diff --git a/documentation/dev-manual/building.rst b/documentation/dev-manual/building.rst
new file mode 100644
index 0000000000..fe502690dd
--- /dev/null
+++ b/documentation/dev-manual/building.rst
@@ -0,0 +1,942 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Building
+********
+
+This section describes various build procedures, such as the steps
+needed for a simple build, building a target for multiple configurations,
+generating an image for more than one machine, and so forth.
+
+Building a Simple Image
+=======================
+
+In the development environment, you need to build an image whenever you
+change hardware support, add or change system libraries, or add or
+change services that have dependencies. There are several methods that allow
+you to build an image within the Yocto Project. This section presents
+the basic steps you need to build a simple image using BitBake from a
+build host running Linux.
+
+.. note::
+
+ - For information on how to build an image using
+ :term:`Toaster`, see the
+ :doc:`/toaster-manual/index`.
+
+ - For information on how to use ``devtool`` to build images, see the
+ ":ref:`sdk-manual/extensible:using \`\`devtool\`\` in your sdk workflow`"
+ section in the Yocto Project Application Development and the
+ Extensible Software Development Kit (eSDK) manual.
+
+ - For a quick example on how to build an image using the
+ OpenEmbedded build system, see the
+ :doc:`/brief-yoctoprojectqs/index` document.
+
+ - You can also use the `Yocto Project BitBake
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+ extension for Visual Studio Code to build images.
+
+The build process creates an entire Linux distribution from source and
+places it in your :term:`Build Directory` under ``tmp/deploy/images``. For
+detailed information on the build process using BitBake, see the
+":ref:`overview-manual/concepts:images`" section in the Yocto Project Overview
+and Concepts Manual.
+
+The following figure and list overviews the build process:
+
+.. image:: figures/bitbake-build-flow.png
+ :width: 100%
+
+#. *Set up Your Host Development System to Support Development Using the
+ Yocto Project*: See the ":doc:`start`" section for options on how to get a
+ build host ready to use the Yocto Project.
+
+#. *Initialize the Build Environment:* Initialize the build environment
+ by sourcing the build environment script (i.e.
+ :ref:`structure-core-script`)::
+
+ $ source oe-init-build-env [build_dir]
+
+ When you use the initialization script, the OpenEmbedded build system
+ uses ``build`` as the default :term:`Build Directory` in your current work
+ directory. You can use a `build_dir` argument with the script to
+ specify a different :term:`Build Directory`.
+
+ .. note::
+
+ A common practice is to use a different :term:`Build Directory` for
+ different targets; for example, ``~/build/x86`` for a ``qemux86``
+ target, and ``~/build/arm`` for a ``qemuarm`` target. In any
+ event, it's typically cleaner to locate the :term:`Build Directory`
+ somewhere outside of your source directory.
+
+#. *Make Sure Your* ``local.conf`` *File is Correct*: Ensure the
+ ``conf/local.conf`` configuration file, which is found in the
+ :term:`Build Directory`, is set up how you want it. This file defines many
+ aspects of the build environment including the target machine architecture
+ through the :term:`MACHINE` variable, the packaging format used during
+ the build (:term:`PACKAGE_CLASSES`), and a centralized tarball download
+ directory through the :term:`DL_DIR` variable.
+
+#. *Build the Image:* Build the image using the ``bitbake`` command::
+
+ $ bitbake target
+
+ .. note::
+
+ For information on BitBake, see the :doc:`bitbake:index`.
+
+ The target is the name of the recipe you want to build. Common
+ targets are the images in ``meta/recipes-core/images``,
+ ``meta/recipes-sato/images``, and so forth all found in the
+ :term:`Source Directory`. Alternatively, the target
+ can be the name of a recipe for a specific piece of software such as
+ BusyBox. For more details about the images the OpenEmbedded build
+ system supports, see the
+ ":ref:`ref-manual/images:Images`" chapter in the Yocto
+ Project Reference Manual.
+
+ As an example, the following command builds the
+ ``core-image-minimal`` image::
+
+ $ bitbake core-image-minimal
+
+ Once an
+ image has been built, it often needs to be installed. The images and
+ kernels built by the OpenEmbedded build system are placed in the
+ :term:`Build Directory` in ``tmp/deploy/images``. For information on how to
+ run pre-built images such as ``qemux86`` and ``qemuarm``, see the
+ :doc:`/sdk-manual/index` manual. For
+ information about how to install these images, see the documentation
+ for your particular board or machine.
+
+Building Images for Multiple Targets Using Multiple Configurations
+==================================================================
+
+You can use a single ``bitbake`` command to build multiple images or
+packages for different targets where each image or package requires a
+different configuration (multiple configuration builds). The builds, in
+this scenario, are sometimes referred to as "multiconfigs", and this
+section uses that term throughout.
+
+This section describes how to set up for multiple configuration builds
+and how to account for cross-build dependencies between the
+multiconfigs.
+
+Setting Up and Running a Multiple Configuration Build
+-----------------------------------------------------
+
+To accomplish a multiple configuration build, you must define each
+target's configuration separately using a parallel configuration file in
+the :term:`Build Directory` or configuration directory within a layer, and you
+must follow a required file hierarchy. Additionally, you must enable the
+multiple configuration builds in your ``local.conf`` file.
+
+Follow these steps to set up and execute multiple configuration builds:
+
+- *Create Separate Configuration Files*: You need to create a single
+ configuration file for each build target (each multiconfig).
+ The configuration definitions are implementation dependent but often
+ each configuration file will define the machine and the
+ temporary directory BitBake uses for the build. Whether the same
+ temporary directory (:term:`TMPDIR`) can be shared will depend on what is
+ similar and what is different between the configurations. Multiple MACHINE
+ targets can share the same (:term:`TMPDIR`) as long as the rest of the
+ configuration is the same, multiple :term:`DISTRO` settings would need separate
+ (:term:`TMPDIR`) directories.
+
+ For example, consider a scenario with two different multiconfigs for the same
+ :term:`MACHINE`: "qemux86" built
+ for two distributions such as "poky" and "poky-lsb". In this case,
+ you would need to use the different :term:`TMPDIR`.
+
+ Here is an example showing the minimal statements needed in a
+ configuration file for a "qemux86" target whose temporary build
+ directory is ``tmpmultix86``::
+
+ MACHINE = "qemux86"
+ TMPDIR = "${TOPDIR}/tmpmultix86"
+
+ The location for these multiconfig configuration files is specific.
+ They must reside in the current :term:`Build Directory` in a sub-directory of
+ ``conf`` named ``multiconfig`` or within a layer's ``conf`` directory
+ under a directory named ``multiconfig``. Here is an example that defines
+ two configuration files for the "x86" and "arm" multiconfigs:
+
+ .. image:: figures/multiconfig_files.png
+ :align: center
+ :width: 50%
+
+ The usual :term:`BBPATH` search path is used to locate multiconfig files in
+ a similar way to other conf files.
+
+- *Add the BitBake Multi-configuration Variable to the Local
+ Configuration File*: Use the
+ :term:`BBMULTICONFIG`
+ variable in your ``conf/local.conf`` configuration file to specify
+ each multiconfig. Continuing with the example from the previous
+ figure, the :term:`BBMULTICONFIG` variable needs to enable two
+ multiconfigs: "x86" and "arm" by specifying each configuration file::
+
+ BBMULTICONFIG = "x86 arm"
+
+ .. note::
+
+ A "default" configuration already exists by definition. This
+ configuration is named: "" (i.e. empty string) and is defined by
+ the variables coming from your ``local.conf``
+ file. Consequently, the previous example actually adds two
+ additional configurations to your build: "arm" and "x86" along
+ with "".
+
+- *Launch BitBake*: Use the following BitBake command form to launch
+ the multiple configuration build::
+
+ $ bitbake [mc:multiconfigname:]target [[[mc:multiconfigname:]target] ... ]
+
+ For the example in this section, the following command applies::
+
+ $ bitbake mc:x86:core-image-minimal mc:arm:core-image-sato mc::core-image-base
+
+ The previous BitBake command builds a ``core-image-minimal`` image
+ that is configured through the ``x86.conf`` configuration file, a
+ ``core-image-sato`` image that is configured through the ``arm.conf``
+ configuration file and a ``core-image-base`` that is configured
+ through your ``local.conf`` configuration file.
+
+.. note::
+
+ Support for multiple configuration builds in the Yocto Project &DISTRO;
+ (&DISTRO_NAME;) Release does not include Shared State (sstate)
+ optimizations. Consequently, if a build uses the same object twice
+ in, for example, two different :term:`TMPDIR`
+ directories, the build either loads from an existing sstate cache for
+ that build at the start or builds the object fresh.
+
+Enabling Multiple Configuration Build Dependencies
+--------------------------------------------------
+
+Sometimes dependencies can exist between targets (multiconfigs) in a
+multiple configuration build. For example, suppose that in order to
+build a ``core-image-sato`` image for an "x86" multiconfig, the root
+filesystem of an "arm" multiconfig must exist. This dependency is
+essentially that the
+:ref:`ref-tasks-image` task in the
+``core-image-sato`` recipe depends on the completion of the
+:ref:`ref-tasks-rootfs` task of the
+``core-image-minimal`` recipe.
+
+To enable dependencies in a multiple configuration build, you must
+declare the dependencies in the recipe using the following statement
+form::
+
+ task_or_package[mcdepends] = "mc:from_multiconfig:to_multiconfig:recipe_name:task_on_which_to_depend"
+
+To better show how to use this statement, consider the example scenario
+from the first paragraph of this section. The following statement needs
+to be added to the recipe that builds the ``core-image-sato`` image::
+
+ do_image[mcdepends] = "mc:x86:arm:core-image-minimal:do_rootfs"
+
+In this example, the `from_multiconfig` is "x86". The `to_multiconfig` is "arm". The
+task on which the :ref:`ref-tasks-image` task in the recipe depends is the
+:ref:`ref-tasks-rootfs` task from the ``core-image-minimal`` recipe associated
+with the "arm" multiconfig.
+
+Once you set up this dependency, you can build the "x86" multiconfig
+using a BitBake command as follows::
+
+ $ bitbake mc:x86:core-image-sato
+
+This command executes all the tasks needed to create the
+``core-image-sato`` image for the "x86" multiconfig. Because of the
+dependency, BitBake also executes through the :ref:`ref-tasks-rootfs` task for the
+"arm" multiconfig build.
+
+Having a recipe depend on the root filesystem of another build might not
+seem that useful. Consider this change to the statement in the
+``core-image-sato`` recipe::
+
+ do_image[mcdepends] = "mc:x86:arm:core-image-minimal:do_image"
+
+In this case, BitBake must
+create the ``core-image-minimal`` image for the "arm" build since the
+"x86" build depends on it.
+
+Because "x86" and "arm" are enabled for multiple configuration builds
+and have separate configuration files, BitBake places the artifacts for
+each build in the respective temporary build directories (i.e.
+:term:`TMPDIR`).
+
+Building an Initial RAM Filesystem (Initramfs) Image
+====================================================
+
+An initial RAM filesystem (:term:`Initramfs`) image provides a temporary root
+filesystem used for early system initialization, typically providing tools and
+loading modules needed to locate and mount the final root filesystem.
+
+Follow these steps to create an :term:`Initramfs` image:
+
+#. *Create the Initramfs Image Recipe:* You can reference the
+ ``core-image-minimal-initramfs.bb`` recipe found in the
+ ``meta/recipes-core`` directory of the :term:`Source Directory`
+ as an example from which to work.
+
+#. *Decide if You Need to Bundle the Initramfs Image Into the Kernel
+ Image:* If you want the :term:`Initramfs` image that is built to be bundled
+ in with the kernel image, set the :term:`INITRAMFS_IMAGE_BUNDLE`
+ variable to ``"1"`` in your ``local.conf`` configuration file and set the
+ :term:`INITRAMFS_IMAGE` variable in the recipe that builds the kernel image.
+
+ Setting the :term:`INITRAMFS_IMAGE_BUNDLE` flag causes the :term:`Initramfs`
+ image to be unpacked into the ``${B}/usr/`` directory. The unpacked
+ :term:`Initramfs` image is then passed to the kernel's ``Makefile`` using the
+ :term:`CONFIG_INITRAMFS_SOURCE` variable, allowing the :term:`Initramfs`
+ image to be built into the kernel normally.
+
+#. *Optionally Add Items to the Initramfs Image Through the Initramfs
+ Image Recipe:* If you add items to the :term:`Initramfs` image by way of its
+ recipe, you should use :term:`PACKAGE_INSTALL` rather than
+ :term:`IMAGE_INSTALL`. :term:`PACKAGE_INSTALL` gives more direct control of
+ what is added to the image as compared to the defaults you might not
+ necessarily want that are set by the :ref:`ref-classes-image`
+ or :ref:`ref-classes-core-image` classes.
+
+#. *Build the Kernel Image and the Initramfs Image:* Build your kernel
+ image using BitBake. Because the :term:`Initramfs` image recipe is a
+ dependency of the kernel image, the :term:`Initramfs` image is built as well
+ and bundled with the kernel image if you used the
+ :term:`INITRAMFS_IMAGE_BUNDLE` variable described earlier.
+
+Bundling an Initramfs Image From a Separate Multiconfig
+-------------------------------------------------------
+
+There may be a case where we want to build an :term:`Initramfs` image which does not
+inherit the same distro policy as our main image, for example, we may want
+our main image to use ``TCLIBC="glibc"``, but to use ``TCLIBC="musl"`` in our :term:`Initramfs`
+image to keep a smaller footprint. However, by performing the steps mentioned
+above the :term:`Initramfs` image will inherit ``TCLIBC="glibc"`` without allowing us
+to override it.
+
+To achieve this, you need to perform some additional steps:
+
+#. *Create a multiconfig for your Initramfs image:* You can perform the steps
+ on ":ref:`dev-manual/building:building images for multiple targets using multiple configurations`" to create a separate multiconfig.
+ For the sake of simplicity let's assume such multiconfig is called: ``initramfscfg.conf`` and
+ contains the variables::
+
+ TMPDIR="${TOPDIR}/tmp-initramfscfg"
+ TCLIBC="musl"
+
+#. *Set additional Initramfs variables on your main configuration:*
+ Additionally, on your main configuration (``local.conf``) you need to set the
+ variables::
+
+ INITRAMFS_MULTICONFIG = "initramfscfg"
+ INITRAMFS_DEPLOY_DIR_IMAGE = "${TOPDIR}/tmp-initramfscfg/deploy/images/${MACHINE}"
+
+ The variables :term:`INITRAMFS_MULTICONFIG` and :term:`INITRAMFS_DEPLOY_DIR_IMAGE`
+ are used to create a multiconfig dependency from the kernel to the :term:`INITRAMFS_IMAGE`
+ to be built coming from the ``initramfscfg`` multiconfig, and to let the
+ buildsystem know where the :term:`INITRAMFS_IMAGE` will be located.
+
+ Building a system with such configuration will build the kernel using the
+ main configuration but the :ref:`ref-tasks-bundle_initramfs` task will grab the
+ selected :term:`INITRAMFS_IMAGE` from :term:`INITRAMFS_DEPLOY_DIR_IMAGE`
+ instead, resulting in a musl based :term:`Initramfs` image bundled in the kernel
+ but a glibc based main image.
+
+ The same is applicable to avoid inheriting :term:`DISTRO_FEATURES` on :term:`INITRAMFS_IMAGE`
+ or to build a different :term:`DISTRO` for it such as ``poky-tiny``.
+
+
+Building a Tiny System
+======================
+
+Very small distributions have some significant advantages such as
+requiring less on-die or in-package memory (cheaper), better performance
+through efficient cache usage, lower power requirements due to less
+memory, faster boot times, and reduced development overhead. Some
+real-world examples where a very small distribution gives you distinct
+advantages are digital cameras, medical devices, and small headless
+systems.
+
+This section presents information that shows you how you can trim your
+distribution to even smaller sizes than the ``poky-tiny`` distribution,
+which is around 5 Mbytes, that can be built out-of-the-box using the
+Yocto Project.
+
+Tiny System Overview
+--------------------
+
+The following list presents the overall steps you need to consider and
+perform to create distributions with smaller root filesystems, achieve
+faster boot times, maintain your critical functionality, and avoid
+initial RAM disks:
+
+- :ref:`Determine your goals and guiding principles
+ <dev-manual/building:goals and guiding principles>`
+
+- :ref:`dev-manual/building:understand what contributes to your image size`
+
+- :ref:`Reduce the size of the root filesystem
+ <dev-manual/building:trim the root filesystem>`
+
+- :ref:`Reduce the size of the kernel <dev-manual/building:trim the kernel>`
+
+- :ref:`dev-manual/building:remove package management requirements`
+
+- :ref:`dev-manual/building:look for other ways to minimize size`
+
+- :ref:`dev-manual/building:iterate on the process`
+
+Goals and Guiding Principles
+----------------------------
+
+Before you can reach your destination, you need to know where you are
+going. Here is an example list that you can use as a guide when creating
+very small distributions:
+
+- Determine how much space you need (e.g. a kernel that is 1 Mbyte or
+ less and a root filesystem that is 3 Mbytes or less).
+
+- Find the areas that are currently taking 90% of the space and
+ concentrate on reducing those areas.
+
+- Do not create any difficult "hacks" to achieve your goals.
+
+- Leverage the device-specific options.
+
+- Work in a separate layer so that you keep changes isolated. For
+ information on how to create layers, see the
+ ":ref:`dev-manual/layers:understanding and creating layers`" section.
+
+Understand What Contributes to Your Image Size
+----------------------------------------------
+
+It is easiest to have something to start with when creating your own
+distribution. You can use the Yocto Project out-of-the-box to create the
+``poky-tiny`` distribution. Ultimately, you will want to make changes in
+your own distribution that are likely modeled after ``poky-tiny``.
+
+.. note::
+
+ To use ``poky-tiny`` in your build, set the :term:`DISTRO` variable in your
+ ``local.conf`` file to "poky-tiny" as described in the
+ ":ref:`dev-manual/custom-distribution:creating your own distribution`"
+ section.
+
+Understanding some memory concepts will help you reduce the system size.
+Memory consists of static, dynamic, and temporary memory. Static memory
+is the TEXT (code), DATA (initialized data in the code), and BSS
+(uninitialized data) sections. Dynamic memory represents memory that is
+allocated at runtime: stacks, hash tables, and so forth. Temporary
+memory is recovered after the boot process. This memory consists of
+memory used for decompressing the kernel and for the ``__init__``
+functions.
+
+To help you see where you currently are with kernel and root filesystem
+sizes, you can use two tools found in the :term:`Source Directory`
+in the
+``scripts/tiny/`` directory:
+
+- ``ksize.py``: Reports component sizes for the kernel build objects.
+
+- ``dirsize.py``: Reports component sizes for the root filesystem.
+
+This next tool and command help you organize configuration fragments and
+view file dependencies in a human-readable form:
+
+- ``merge_config.sh``: Helps you manage configuration files and
+ fragments within the kernel. With this tool, you can merge individual
+ configuration fragments together. The tool allows you to make
+ overrides and warns you of any missing configuration options. The
+ tool is ideal for allowing you to iterate on configurations, create
+ minimal configurations, and create configuration files for different
+ machines without having to duplicate your process.
+
+ The ``merge_config.sh`` script is part of the Linux Yocto kernel Git
+ repositories (i.e. ``linux-yocto-3.14``, ``linux-yocto-3.10``,
+ ``linux-yocto-3.8``, and so forth) in the ``scripts/kconfig``
+ directory.
+
+ For more information on configuration fragments, see the
+ ":ref:`kernel-dev/common:creating configuration fragments`"
+ section in the Yocto Project Linux Kernel Development Manual.
+
+- ``bitbake -u taskexp -g bitbake_target``: Using the BitBake command
+ with these options brings up a Dependency Explorer from which you can
+ view file dependencies. Understanding these dependencies allows you
+ to make informed decisions when cutting out various pieces of the
+ kernel and root filesystem.
+
+Trim the Root Filesystem
+------------------------
+
+The root filesystem is made up of packages for booting, libraries, and
+applications. To change things, you can configure how the packaging
+happens, which changes the way you build them. You can also modify the
+filesystem itself or select a different filesystem.
+
+First, find out what is hogging your root filesystem by running the
+``dirsize.py`` script from your root directory::
+
+ $ cd root-directory-of-image
+ $ dirsize.py 100000 > dirsize-100k.log
+ $ cat dirsize-100k.log
+
+You can apply a filter to the script to ignore files
+under a certain size. The previous example filters out any files below
+100 Kbytes. The sizes reported by the tool are uncompressed, and thus
+will be smaller by a relatively constant factor in a compressed root
+filesystem. When you examine your log file, you can focus on areas of
+the root filesystem that take up large amounts of memory.
+
+You need to be sure that what you eliminate does not cripple the
+functionality you need. One way to see how packages relate to each other
+is by using the Dependency Explorer UI with the BitBake command::
+
+ $ cd image-directory
+ $ bitbake -u taskexp -g image
+
+Use the interface to
+select potential packages you wish to eliminate and see their dependency
+relationships.
+
+When deciding how to reduce the size, get rid of packages that result in
+minimal impact on the feature set. For example, you might not need a VGA
+display. Or, you might be able to get by with ``devtmpfs`` and ``mdev``
+instead of ``udev``.
+
+Use your ``local.conf`` file to make changes. For example, to eliminate
+``udev`` and ``glib``, set the following in the local configuration
+file::
+
+ VIRTUAL-RUNTIME_dev_manager = ""
+
+Finally, you should consider exactly the type of root filesystem you
+need to meet your needs while also reducing its size. For example,
+consider ``cramfs``, ``squashfs``, ``ubifs``, ``ext2``, or an
+:term:`Initramfs` using ``initramfs``. Be aware that ``ext3`` requires a 1
+Mbyte journal. If you are okay with running read-only, you do not need
+this journal.
+
+.. note::
+
+ After each round of elimination, you need to rebuild your system and
+ then use the tools to see the effects of your reductions.
+
+Trim the Kernel
+---------------
+
+The kernel is built by including policies for hardware-independent
+aspects. What subsystems do you enable? For what architecture are you
+building? Which drivers do you build by default?
+
+.. note::
+
+ You can modify the kernel source if you want to help with boot time.
+
+Run the ``ksize.py`` script from the top-level Linux build directory to
+get an idea of what is making up the kernel::
+
+ $ cd top-level-linux-build-directory
+ $ ksize.py > ksize.log
+ $ cat ksize.log
+
+When you examine the log, you will see how much space is taken up with
+the built-in ``.o`` files for drivers, networking, core kernel files,
+filesystem, sound, and so forth. The sizes reported by the tool are
+uncompressed, and thus will be smaller by a relatively constant factor
+in a compressed kernel image. Look to reduce the areas that are large
+and taking up around the "90% rule."
+
+To examine, or drill down, into any particular area, use the ``-d``
+option with the script::
+
+ $ ksize.py -d > ksize.log
+
+Using this option
+breaks out the individual file information for each area of the kernel
+(e.g. drivers, networking, and so forth).
+
+Use your log file to see what you can eliminate from the kernel based on
+features you can let go. For example, if you are not going to need
+sound, you do not need any drivers that support sound.
+
+After figuring out what to eliminate, you need to reconfigure the kernel
+to reflect those changes during the next build. You could run
+``menuconfig`` and make all your changes at once. However, that makes it
+difficult to see the effects of your individual eliminations and also
+makes it difficult to replicate the changes for perhaps another target
+device. A better method is to start with no configurations using
+``allnoconfig``, create configuration fragments for individual changes,
+and then manage the fragments into a single configuration file using
+``merge_config.sh``. The tool makes it easy for you to iterate using the
+configuration change and build cycle.
+
+Each time you make configuration changes, you need to rebuild the kernel
+and check to see what impact your changes had on the overall size.
+
+Remove Package Management Requirements
+--------------------------------------
+
+Packaging requirements add size to the image. One way to reduce the size
+of the image is to remove all the packaging requirements from the image.
+This reduction includes both removing the package manager and its unique
+dependencies as well as removing the package management data itself.
+
+To eliminate all the packaging requirements for an image, be sure that
+"package-management" is not part of your
+:term:`IMAGE_FEATURES`
+statement for the image. When you remove this feature, you are removing
+the package manager as well as its dependencies from the root
+filesystem.
+
+Look for Other Ways to Minimize Size
+------------------------------------
+
+Depending on your particular circumstances, other areas that you can
+trim likely exist. The key to finding these areas is through tools and
+methods described here combined with experimentation and iteration. Here
+are a couple of areas to experiment with:
+
+- ``glibc``: In general, follow this process:
+
+ #. Remove ``glibc`` features from
+ :term:`DISTRO_FEATURES`
+ that you think you do not need.
+
+ #. Build your distribution.
+
+ #. If the build fails due to missing symbols in a package, determine
+ if you can reconfigure the package to not need those features. For
+ example, change the configuration to not support wide character
+ support as is done for ``ncurses``. Or, if support for those
+ characters is needed, determine what ``glibc`` features provide
+ the support and restore the configuration.
+
+ 4. Rebuild and repeat the process.
+
+- ``busybox``: For BusyBox, use a process similar as described for
+ ``glibc``. A difference is you will need to boot the resulting system
+ to see if you are able to do everything you expect from the running
+ system. You need to be sure to integrate configuration fragments into
+ Busybox because BusyBox handles its own core features and then allows
+ you to add configuration fragments on top.
+
+Iterate on the Process
+----------------------
+
+If you have not reached your goals on system size, you need to iterate
+on the process. The process is the same. Use the tools and see just what
+is taking up 90% of the root filesystem and the kernel. Decide what you
+can eliminate without limiting your device beyond what you need.
+
+Depending on your system, a good place to look might be Busybox, which
+provides a stripped down version of Unix tools in a single, executable
+file. You might be able to drop virtual terminal services or perhaps
+ipv6.
+
+Building Images for More than One Machine
+=========================================
+
+A common scenario developers face is creating images for several
+different machines that use the same software environment. In this
+situation, it is tempting to set the tunings and optimization flags for
+each build specifically for the targeted hardware (i.e. "maxing out" the
+tunings). Doing so can considerably add to build times and package feed
+maintenance collectively for the machines. For example, selecting tunes
+that are extremely specific to a CPU core used in a system might enable
+some micro optimizations in GCC for that particular system but would
+otherwise not gain you much of a performance difference across the other
+systems as compared to using a more general tuning across all the builds
+(e.g. setting :term:`DEFAULTTUNE`
+specifically for each machine's build). Rather than "max out" each
+build's tunings, you can take steps that cause the OpenEmbedded build
+system to reuse software across the various machines where it makes
+sense.
+
+If build speed and package feed maintenance are considerations, you
+should consider the points in this section that can help you optimize
+your tunings to best consider build times and package feed maintenance.
+
+- *Share the :term:`Build Directory`:* If at all possible, share the
+ :term:`TMPDIR` across builds. The Yocto Project supports switching between
+ different :term:`MACHINE` values in the same :term:`TMPDIR`. This practice
+ is well supported and regularly used by developers when building for
+ multiple machines. When you use the same :term:`TMPDIR` for multiple
+ machine builds, the OpenEmbedded build system can reuse the existing native
+ and often cross-recipes for multiple machines. Thus, build time decreases.
+
+ .. note::
+
+ If :term:`DISTRO` settings change or fundamental configuration settings
+ such as the filesystem layout, you need to work with a clean :term:`TMPDIR`.
+ Sharing :term:`TMPDIR` under these circumstances might work but since it is
+ not guaranteed, you should use a clean :term:`TMPDIR`.
+
+- *Enable the Appropriate Package Architecture:* By default, the
+ OpenEmbedded build system enables three levels of package
+ architectures: "all", "tune" or "package", and "machine". Any given
+ recipe usually selects one of these package architectures (types) for
+ its output. Depending for what a given recipe creates packages,
+ making sure you enable the appropriate package architecture can
+ directly impact the build time.
+
+ A recipe that just generates scripts can enable "all" architecture
+ because there are no binaries to build. To specifically enable "all"
+ architecture, be sure your recipe inherits the
+ :ref:`ref-classes-allarch` class.
+ This class is useful for "all" architectures because it configures
+ many variables so packages can be used across multiple architectures.
+
+ If your recipe needs to generate packages that are machine-specific
+ or when one of the build or runtime dependencies is already
+ machine-architecture dependent, which makes your recipe also
+ machine-architecture dependent, make sure your recipe enables the
+ "machine" package architecture through the
+ :term:`MACHINE_ARCH`
+ variable::
+
+ PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+ When you do not
+ specifically enable a package architecture through the
+ :term:`PACKAGE_ARCH`, The
+ OpenEmbedded build system defaults to the
+ :term:`TUNE_PKGARCH` setting::
+
+ PACKAGE_ARCH = "${TUNE_PKGARCH}"
+
+- *Choose a Generic Tuning File if Possible:* Some tunes are more
+ generic and can run on multiple targets (e.g. an ``armv5`` set of
+ packages could run on ``armv6`` and ``armv7`` processors in most
+ cases). Similarly, ``i486`` binaries could work on ``i586`` and
+ higher processors. You should realize, however, that advances on
+ newer processor versions would not be used.
+
+ If you select the same tune for several different machines, the
+ OpenEmbedded build system reuses software previously built, thus
+ speeding up the overall build time. Realize that even though a new
+ sysroot for each machine is generated, the software is not recompiled
+ and only one package feed exists.
+
+- *Manage Granular Level Packaging:* Sometimes there are cases where
+ injecting another level of package architecture beyond the three
+ higher levels noted earlier can be useful. For example, consider how
+ NXP (formerly Freescale) allows for the easy reuse of binary packages
+ in their layer
+ :yocto_git:`meta-freescale </meta-freescale/>`.
+ In this example, the
+ :yocto_git:`fsl-dynamic-packagearch </meta-freescale/tree/classes/fsl-dynamic-packagearch.bbclass>`
+ class shares GPU packages for i.MX53 boards because all boards share
+ the AMD GPU. The i.MX6-based boards can do the same because all
+ boards share the Vivante GPU. This class inspects the BitBake
+ datastore to identify if the package provides or depends on one of
+ the sub-architecture values. If so, the class sets the
+ :term:`PACKAGE_ARCH` value
+ based on the ``MACHINE_SUBARCH`` value. If the package does not
+ provide or depend on one of the sub-architecture values but it
+ matches a value in the machine-specific filter, it sets
+ :term:`MACHINE_ARCH`. This
+ behavior reduces the number of packages built and saves build time by
+ reusing binaries.
+
+- *Use Tools to Debug Issues:* Sometimes you can run into situations
+ where software is being rebuilt when you think it should not be. For
+ example, the OpenEmbedded build system might not be using shared
+ state between machines when you think it should be. These types of
+ situations are usually due to references to machine-specific
+ variables such as :term:`MACHINE`,
+ :term:`SERIAL_CONSOLES`,
+ :term:`XSERVER`,
+ :term:`MACHINE_FEATURES`,
+ and so forth in code that is supposed to only be tune-specific or
+ when the recipe depends
+ (:term:`DEPENDS`,
+ :term:`RDEPENDS`,
+ :term:`RRECOMMENDS`,
+ :term:`RSUGGESTS`, and so forth)
+ on some other recipe that already has
+ :term:`PACKAGE_ARCH` defined
+ as "${MACHINE_ARCH}".
+
+ .. note::
+
+ Patches to fix any issues identified are most welcome as these
+ issues occasionally do occur.
+
+ For such cases, you can use some tools to help you sort out the
+ situation:
+
+ - ``state-diff-machines.sh``*:* You can find this tool in the
+ ``scripts`` directory of the Source Repositories. See the comments
+ in the script for information on how to use the tool.
+
+ - *BitBake's "-S printdiff" Option:* Using this option causes
+ BitBake to try to establish the most recent signature match
+ (e.g. in the shared state cache) and then compare matched signatures
+ to determine the stamps and delta where these two stamp trees diverge.
+
+Building Software from an External Source
+=========================================
+
+By default, the OpenEmbedded build system uses the :term:`Build Directory`
+when building source code. The build process involves fetching the source
+files, unpacking them, and then patching them if necessary before the build
+takes place.
+
+There are situations where you might want to build software from source
+files that are external to and thus outside of the OpenEmbedded build
+system. For example, suppose you have a project that includes a new BSP
+with a heavily customized kernel. And, you want to minimize exposing the
+build system to the development team so that they can focus on their
+project and maintain everyone's workflow as much as possible. In this
+case, you want a kernel source directory on the development machine
+where the development occurs. You want the recipe's
+:term:`SRC_URI` variable to point to
+the external directory and use it as is, not copy it.
+
+To build from software that comes from an external source, all you need to do
+is inherit the :ref:`ref-classes-externalsrc` class and then set
+the :term:`EXTERNALSRC` variable to point to your external source code. Here
+are the statements to put in your ``local.conf`` file::
+
+ INHERIT += "externalsrc"
+ EXTERNALSRC:pn-myrecipe = "path-to-your-source-tree"
+
+This next example shows how to accomplish the same thing by setting
+:term:`EXTERNALSRC` in the recipe itself or in the recipe's append file::
+
+ EXTERNALSRC = "path"
+ EXTERNALSRC_BUILD = "path"
+
+.. note::
+
+ In order for these settings to take effect, you must globally or
+ locally inherit the :ref:`ref-classes-externalsrc` class.
+
+By default, :ref:`ref-classes-externalsrc` builds the source code in a
+directory separate from the external source directory as specified by
+:term:`EXTERNALSRC`. If you need
+to have the source built in the same directory in which it resides, or
+some other nominated directory, you can set
+:term:`EXTERNALSRC_BUILD`
+to point to that directory::
+
+ EXTERNALSRC_BUILD:pn-myrecipe = "path-to-your-source-tree"
+
+Replicating a Build Offline
+===========================
+
+It can be useful to take a "snapshot" of upstream sources used in a
+build and then use that "snapshot" later to replicate the build offline.
+To do so, you need to first prepare and populate your downloads
+directory your "snapshot" of files. Once your downloads directory is
+ready, you can use it at any time and from any machine to replicate your
+build.
+
+Follow these steps to populate your Downloads directory:
+
+#. *Create a Clean Downloads Directory:* Start with an empty downloads
+ directory (:term:`DL_DIR`). You
+ start with an empty downloads directory by either removing the files
+ in the existing directory or by setting :term:`DL_DIR` to point to either
+ an empty location or one that does not yet exist.
+
+#. *Generate Tarballs of the Source Git Repositories:* Edit your
+ ``local.conf`` configuration file as follows::
+
+ DL_DIR = "/home/your-download-dir/"
+ BB_GENERATE_MIRROR_TARBALLS = "1"
+
+ During
+ the fetch process in the next step, BitBake gathers the source files
+ and creates tarballs in the directory pointed to by :term:`DL_DIR`. See
+ the
+ :term:`BB_GENERATE_MIRROR_TARBALLS`
+ variable for more information.
+
+#. *Populate Your Downloads Directory Without Building:* Use BitBake to
+ fetch your sources but inhibit the build::
+
+ $ bitbake target --runonly=fetch
+
+ The downloads directory (i.e. ``${DL_DIR}``) now has
+ a "snapshot" of the source files in the form of tarballs, which can
+ be used for the build.
+
+#. *Optionally Remove Any Git or other SCM Subdirectories From the
+ Downloads Directory:* If you want, you can clean up your downloads
+ directory by removing any Git or other Source Control Management
+ (SCM) subdirectories such as ``${DL_DIR}/git2/*``. The tarballs
+ already contain these subdirectories.
+
+Once your downloads directory has everything it needs regarding source
+files, you can create your "own-mirror" and build your target.
+Understand that you can use the files to build the target offline from
+any machine and at any time.
+
+Follow these steps to build your target using the files in the downloads
+directory:
+
+#. *Using Local Files Only:* Inside your ``local.conf`` file, add the
+ :term:`SOURCE_MIRROR_URL` variable, inherit the
+ :ref:`ref-classes-own-mirrors` class, and use the
+ :term:`BB_NO_NETWORK` variable to your ``local.conf``::
+
+ SOURCE_MIRROR_URL ?= "file:///home/your-download-dir/"
+ INHERIT += "own-mirrors"
+ BB_NO_NETWORK = "1"
+
+ The :term:`SOURCE_MIRROR_URL` and :ref:`ref-classes-own-mirrors`
+ class set up the system to use the downloads directory as your "own
+ mirror". Using the :term:`BB_NO_NETWORK` variable makes sure that
+ BitBake's fetching process in step 3 stays local, which means files
+ from your "own-mirror" are used.
+
+#. *Start With a Clean Build:* You can start with a clean build by
+ removing the ``${``\ :term:`TMPDIR`\ ``}`` directory or using a new
+ :term:`Build Directory`.
+
+#. *Build Your Target:* Use BitBake to build your target::
+
+ $ bitbake target
+
+ The build completes using the known local "snapshot" of source
+ files from your mirror. The resulting tarballs for your "snapshot" of
+ source files are in the downloads directory.
+
+ .. note::
+
+ The offline build does not work if recipes attempt to find the
+ latest version of software by setting
+ :term:`SRCREV` to
+ ``${``\ :term:`AUTOREV`\ ``}``::
+
+ SRCREV = "${AUTOREV}"
+
+ When a recipe sets :term:`SRCREV` to
+ ``${``\ :term:`AUTOREV`\ ``}``, the build system accesses the network in an
+ attempt to determine the latest version of software from the SCM.
+ Typically, recipes that use :term:`AUTOREV` are custom or modified
+ recipes. Recipes that reside in public repositories usually do not
+ use :term:`AUTOREV`.
+
+ If you do have recipes that use :term:`AUTOREV`, you can take steps to
+ still use the recipes in an offline build. Do the following:
+
+ #. Use a configuration generated by enabling :ref:`build
+ history <dev-manual/build-quality:maintaining build output quality>`.
+
+ #. Use the ``buildhistory-collect-srcrevs`` command to collect the
+ stored :term:`SRCREV` values from the build's history. For more
+ information on collecting these values, see the
+ ":ref:`dev-manual/build-quality:build history package information`"
+ section.
+
+ #. Once you have the correct source revisions, you can modify
+ those recipes to set :term:`SRCREV` to specific versions of the
+ software.
+
diff --git a/documentation/dev-manual/common-tasks.rst b/documentation/dev-manual/common-tasks.rst
deleted file mode 100644
index d5584d6c49..0000000000
--- a/documentation/dev-manual/common-tasks.rst
+++ /dev/null
@@ -1,11782 +0,0 @@
-.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
-
-************
-Common Tasks
-************
-
-This chapter describes fundamental procedures such as creating layers,
-adding new software packages, extending or customizing images, porting
-work to new hardware (adding a new machine), and so forth. You will find
-that the procedures documented here occur often in the development cycle
-using the Yocto Project.
-
-Understanding and Creating Layers
-=================================
-
-The OpenEmbedded build system supports organizing
-:term:`Metadata` into multiple layers.
-Layers allow you to isolate different types of customizations from each
-other. For introductory information on the Yocto Project Layer Model,
-see the
-":ref:`overview-manual/yp-intro:the yocto project layer model`"
-section in the Yocto Project Overview and Concepts Manual.
-
-Creating Your Own Layer
------------------------
-
-.. note::
-
- It is very easy to create your own layers to use with the OpenEmbedded
- build system, as the Yocto Project ships with tools that speed up creating
- layers. This section describes the steps you perform by hand to create
- layers so that you can better understand them. For information about the
- layer-creation tools, see the
- ":ref:`bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script`"
- section in the Yocto Project Board Support Package (BSP) Developer's
- Guide and the ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
- section further down in this manual.
-
-Follow these general steps to create your layer without using tools:
-
-1. *Check Existing Layers:* Before creating a new layer, you should be
- sure someone has not already created a layer containing the Metadata
- you need. You can see the :oe_layerindex:`OpenEmbedded Metadata Index <>`
- for a list of layers from the OpenEmbedded community that can be used in
- the Yocto Project. You could find a layer that is identical or close
- to what you need.
-
-2. *Create a Directory:* Create the directory for your layer. When you
- create the layer, be sure to create the directory in an area not
- associated with the Yocto Project :term:`Source Directory`
- (e.g. the cloned ``poky`` repository).
-
- While not strictly required, prepend the name of the directory with
- the string "meta-". For example::
-
- meta-mylayer
- meta-GUI_xyz
- meta-mymachine
-
- With rare exceptions, a layer's name follows this form::
-
- meta-root_name
-
- Following this layer naming convention can save
- you trouble later when tools, components, or variables "assume" your
- layer name begins with "meta-". A notable example is in configuration
- files as shown in the following step where layer names without the
- "meta-" string are appended to several variables used in the
- configuration.
-
-3. *Create a Layer Configuration File:* Inside your new layer folder,
- you need to create a ``conf/layer.conf`` file. It is easiest to take
- an existing layer configuration file and copy that to your layer's
- ``conf`` directory and then modify the file as needed.
-
- The ``meta-yocto-bsp/conf/layer.conf`` file in the Yocto Project
- :yocto_git:`Source Repositories </poky/tree/meta-yocto-bsp/conf>`
- demonstrates the required syntax. For your layer, you need to replace
- "yoctobsp" with a unique identifier for your layer (e.g. "machinexyz"
- for a layer named "meta-machinexyz")::
-
- # We have a conf and classes directory, add to BBPATH
- BBPATH .= ":${LAYERDIR}"
-
- # We have recipes-* directories, add to BBFILES
- BBFILES += "${LAYERDIR}/recipes-*/*/*.bb \
- ${LAYERDIR}/recipes-*/*/*.bbappend"
-
- BBFILE_COLLECTIONS += "yoctobsp"
- BBFILE_PATTERN_yoctobsp = "^${LAYERDIR}/"
- BBFILE_PRIORITY_yoctobsp = "5"
- LAYERVERSION_yoctobsp = "4"
- LAYERSERIES_COMPAT_yoctobsp = "dunfell"
-
- Following is an explanation of the layer configuration file:
-
- - :term:`BBPATH`: Adds the layer's
- root directory to BitBake's search path. Through the use of the
- :term:`BBPATH` variable, BitBake locates class files (``.bbclass``),
- configuration files, and files that are included with ``include``
- and ``require`` statements. For these cases, BitBake uses the
- first file that matches the name found in :term:`BBPATH`. This is
- similar to the way the ``PATH`` variable is used for binaries. It
- is recommended, therefore, that you use unique class and
- configuration filenames in your custom layer.
-
- - :term:`BBFILES`: Defines the
- location for all recipes in the layer.
-
- - :term:`BBFILE_COLLECTIONS`:
- Establishes the current layer through a unique identifier that is
- used throughout the OpenEmbedded build system to refer to the
- layer. In this example, the identifier "yoctobsp" is the
- representation for the container layer named "meta-yocto-bsp".
-
- - :term:`BBFILE_PATTERN`:
- Expands immediately during parsing to provide the directory of the
- layer.
-
- - :term:`BBFILE_PRIORITY`:
- Establishes a priority to use for recipes in the layer when the
- OpenEmbedded build finds recipes of the same name in different
- layers.
-
- - :term:`LAYERVERSION`:
- Establishes a version number for the layer. You can use this
- version number to specify this exact version of the layer as a
- dependency when using the
- :term:`LAYERDEPENDS`
- variable.
-
- - :term:`LAYERDEPENDS`:
- Lists all layers on which this layer depends (if any).
-
- - :term:`LAYERSERIES_COMPAT`:
- Lists the :yocto_wiki:`Yocto Project </Releases>`
- releases for which the current version is compatible. This
- variable is a good way to indicate if your particular layer is
- current.
-
-4. *Add Content:* Depending on the type of layer, add the content. If
- the layer adds support for a machine, add the machine configuration
- in a ``conf/machine/`` file within the layer. If the layer adds
- distro policy, add the distro configuration in a ``conf/distro/``
- file within the layer. If the layer introduces new recipes, put the
- recipes you need in ``recipes-*`` subdirectories within the layer.
-
- .. note::
-
- For an explanation of layer hierarchy that is compliant with the
- Yocto Project, see the ":ref:`bsp-guide/bsp:example filesystem layout`"
- section in the Yocto Project Board Support Package (BSP) Developer's Guide.
-
-5. *Optionally Test for Compatibility:* If you want permission to use
- the Yocto Project Compatibility logo with your layer or application
- that uses your layer, perform the steps to apply for compatibility.
- See the
- ":ref:`dev-manual/common-tasks:making sure your layer is compatible with yocto project`"
- section for more information.
-
-Following Best Practices When Creating Layers
----------------------------------------------
-
-To create layers that are easier to maintain and that will not impact
-builds for other machines, you should consider the information in the
-following list:
-
-- *Avoid "Overlaying" Entire Recipes from Other Layers in Your
- Configuration:* In other words, do not copy an entire recipe into
- your layer and then modify it. Rather, use an append file
- (``.bbappend``) to override only those parts of the original recipe
- you need to modify.
-
-- *Avoid Duplicating Include Files:* Use append files (``.bbappend``)
- for each recipe that uses an include file. Or, if you are introducing
- a new recipe that requires the included file, use the path relative
- to the original layer directory to refer to the file. For example,
- use ``require recipes-core/``\ `package`\ ``/``\ `file`\ ``.inc`` instead
- of ``require`` `file`\ ``.inc``. If you're finding you have to overlay
- the include file, it could indicate a deficiency in the include file
- in the layer to which it originally belongs. If this is the case, you
- should try to address that deficiency instead of overlaying the
- include file. For example, you could address this by getting the
- maintainer of the include file to add a variable or variables to make
- it easy to override the parts needing to be overridden.
-
-- *Structure Your Layers:* Proper use of overrides within append files
- and placement of machine-specific files within your layer can ensure
- that a build is not using the wrong Metadata and negatively impacting
- a build for a different machine. Following are some examples:
-
- - *Modify Variables to Support a Different Machine:* Suppose you
- have a layer named ``meta-one`` that adds support for building
- machine "one". To do so, you use an append file named
- ``base-files.bbappend`` and create a dependency on "foo" by
- altering the :term:`DEPENDS`
- variable::
-
- DEPENDS = "foo"
-
- The dependency is created during any
- build that includes the layer ``meta-one``. However, you might not
- want this dependency for all machines. For example, suppose you
- are building for machine "two" but your ``bblayers.conf`` file has
- the ``meta-one`` layer included. During the build, the
- ``base-files`` for machine "two" will also have the dependency on
- ``foo``.
-
- To make sure your changes apply only when building machine "one",
- use a machine override with the :term:`DEPENDS` statement::
-
- DEPENDS:one = "foo"
-
- You should follow the same strategy when using ``:append``
- and ``:prepend`` operations::
-
- DEPENDS:append:one = " foo"
- DEPENDS:prepend:one = "foo "
-
- As an actual example, here's a
- snippet from the generic kernel include file ``linux-yocto.inc``,
- wherein the kernel compile and link options are adjusted in the
- case of a subset of the supported architectures::
-
- DEPENDS:append:aarch64 = " libgcc"
- KERNEL_CC:append:aarch64 = " ${TOOLCHAIN_OPTIONS}"
- KERNEL_LD:append:aarch64 = " ${TOOLCHAIN_OPTIONS}"
-
- DEPENDS:append:nios2 = " libgcc"
- KERNEL_CC:append:nios2 = " ${TOOLCHAIN_OPTIONS}"
- KERNEL_LD:append:nios2 = " ${TOOLCHAIN_OPTIONS}"
-
- DEPENDS:append:arc = " libgcc"
- KERNEL_CC:append:arc = " ${TOOLCHAIN_OPTIONS}"
- KERNEL_LD:append:arc = " ${TOOLCHAIN_OPTIONS}"
-
- KERNEL_FEATURES:append:qemuall=" features/debug/printk.scc"
-
- - *Place Machine-Specific Files in Machine-Specific Locations:* When
- you have a base recipe, such as ``base-files.bb``, that contains a
- :term:`SRC_URI` statement to a
- file, you can use an append file to cause the build to use your
- own version of the file. For example, an append file in your layer
- at ``meta-one/recipes-core/base-files/base-files.bbappend`` could
- extend :term:`FILESPATH` using :term:`FILESEXTRAPATHS` as follows::
-
- FILESEXTRAPATHS:prepend := "${THISDIR}/${BPN}:"
-
- The build for machine "one" will pick up your machine-specific file as
- long as you have the file in
- ``meta-one/recipes-core/base-files/base-files/``. However, if you
- are building for a different machine and the ``bblayers.conf``
- file includes the ``meta-one`` layer and the location of your
- machine-specific file is the first location where that file is
- found according to :term:`FILESPATH`, builds for all machines will
- also use that machine-specific file.
-
- You can make sure that a machine-specific file is used for a
- particular machine by putting the file in a subdirectory specific
- to the machine. For example, rather than placing the file in
- ``meta-one/recipes-core/base-files/base-files/`` as shown above,
- put it in ``meta-one/recipes-core/base-files/base-files/one/``.
- Not only does this make sure the file is used only when building
- for machine "one", but the build process locates the file more
- quickly.
-
- In summary, you need to place all files referenced from
- :term:`SRC_URI` in a machine-specific subdirectory within the layer in
- order to restrict those files to machine-specific builds.
-
-- *Perform Steps to Apply for Yocto Project Compatibility:* If you want
- permission to use the Yocto Project Compatibility logo with your
- layer or application that uses your layer, perform the steps to apply
- for compatibility. See the
- ":ref:`dev-manual/common-tasks:making sure your layer is compatible with yocto project`"
- section for more information.
-
-- *Follow the Layer Naming Convention:* Store custom layers in a Git
- repository that use the ``meta-layer_name`` format.
-
-- *Group Your Layers Locally:* Clone your repository alongside other
- cloned ``meta`` directories from the :term:`Source Directory`.
-
-Making Sure Your Layer is Compatible With Yocto Project
--------------------------------------------------------
-
-When you create a layer used with the Yocto Project, it is advantageous
-to make sure that the layer interacts well with existing Yocto Project
-layers (i.e. the layer is compatible with the Yocto Project). Ensuring
-compatibility makes the layer easy to be consumed by others in the Yocto
-Project community and could allow you permission to use the Yocto
-Project Compatible Logo.
-
-.. note::
-
- Only Yocto Project member organizations are permitted to use the
- Yocto Project Compatible Logo. The logo is not available for general
- use. For information on how to become a Yocto Project member
- organization, see the :yocto_home:`Yocto Project Website <>`.
-
-The Yocto Project Compatibility Program consists of a layer application
-process that requests permission to use the Yocto Project Compatibility
-Logo for your layer and application. The process consists of two parts:
-
-1. Successfully passing a script (``yocto-check-layer``) that when run
- against your layer, tests it against constraints based on experiences
- of how layers have worked in the real world and where pitfalls have
- been found. Getting a "PASS" result from the script is required for
- successful compatibility registration.
-
-2. Completion of an application acceptance form, which you can find at
- :yocto_home:`/webform/yocto-project-compatible-registration`.
-
-To be granted permission to use the logo, you need to satisfy the
-following:
-
-- Be able to check the box indicating that you got a "PASS" when
- running the script against your layer.
-
-- Answer "Yes" to the questions on the form or have an acceptable
- explanation for any questions answered "No".
-
-- Be a Yocto Project Member Organization.
-
-The remainder of this section presents information on the registration
-form and on the ``yocto-check-layer`` script.
-
-Yocto Project Compatible Program Application
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Use the form to apply for your layer's approval. Upon successful
-application, you can use the Yocto Project Compatibility Logo with your
-layer and the application that uses your layer.
-
-To access the form, use this link:
-:yocto_home:`/webform/yocto-project-compatible-registration`.
-Follow the instructions on the form to complete your application.
-
-The application consists of the following sections:
-
-- *Contact Information:* Provide your contact information as the fields
- require. Along with your information, provide the released versions
- of the Yocto Project for which your layer is compatible.
-
-- *Acceptance Criteria:* Provide "Yes" or "No" answers for each of the
- items in the checklist. There is space at the bottom of the form for
- any explanations for items for which you answered "No".
-
-- *Recommendations:* Provide answers for the questions regarding Linux
- kernel use and build success.
-
-``yocto-check-layer`` Script
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The ``yocto-check-layer`` script provides you a way to assess how
-compatible your layer is with the Yocto Project. You should run this
-script prior to using the form to apply for compatibility as described
-in the previous section. You need to achieve a "PASS" result in order to
-have your application form successfully processed.
-
-The script divides tests into three areas: COMMON, BSP, and DISTRO. For
-example, given a distribution layer (DISTRO), the layer must pass both
-the COMMON and DISTRO related tests. Furthermore, if your layer is a BSP
-layer, the layer must pass the COMMON and BSP set of tests.
-
-To execute the script, enter the following commands from your build
-directory::
-
- $ source oe-init-build-env
- $ yocto-check-layer your_layer_directory
-
-Be sure to provide the actual directory for your
-layer as part of the command.
-
-Entering the command causes the script to determine the type of layer
-and then to execute a set of specific tests against the layer. The
-following list overviews the test:
-
-- ``common.test_readme``: Tests if a ``README`` file exists in the
- layer and the file is not empty.
-
-- ``common.test_parse``: Tests to make sure that BitBake can parse the
- files without error (i.e. ``bitbake -p``).
-
-- ``common.test_show_environment``: Tests that the global or per-recipe
- environment is in order without errors (i.e. ``bitbake -e``).
-
-- ``common.test_world``: Verifies that ``bitbake world`` works.
-
-- ``common.test_signatures``: Tests to be sure that BSP and DISTRO
- layers do not come with recipes that change signatures.
-
-- ``common.test_layerseries_compat``: Verifies layer compatibility is
- set properly.
-
-- ``bsp.test_bsp_defines_machines``: Tests if a BSP layer has machine
- configurations.
-
-- ``bsp.test_bsp_no_set_machine``: Tests to ensure a BSP layer does not
- set the machine when the layer is added.
-
-- ``bsp.test_machine_world``: Verifies that ``bitbake world`` works
- regardless of which machine is selected.
-
-- ``bsp.test_machine_signatures``: Verifies that building for a
- particular machine affects only the signature of tasks specific to
- that machine.
-
-- ``distro.test_distro_defines_distros``: Tests if a DISTRO layer has
- distro configurations.
-
-- ``distro.test_distro_no_set_distros``: Tests to ensure a DISTRO layer
- does not set the distribution when the layer is added.
-
-Enabling Your Layer
--------------------
-
-Before the OpenEmbedded build system can use your new layer, you need to
-enable it. To enable your layer, simply add your layer's path to the
-:term:`BBLAYERS` variable in your ``conf/bblayers.conf`` file, which is
-found in the :term:`Build Directory`.
-The following example shows how to enable your new
-``meta-mylayer`` layer (note how your new layer exists outside of
-the official ``poky`` repository which you would have checked out earlier)::
-
- # POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf
- # changes incompatibly
- POKY_BBLAYERS_CONF_VERSION = "2"
- BBPATH = "${TOPDIR}"
- BBFILES ?= ""
- BBLAYERS ?= " \
- /home/user/poky/meta \
- /home/user/poky/meta-poky \
- /home/user/poky/meta-yocto-bsp \
- /home/user/mystuff/meta-mylayer \
- "
-
-BitBake parses each ``conf/layer.conf`` file from the top down as
-specified in the :term:`BBLAYERS` variable within the ``conf/bblayers.conf``
-file. During the processing of each ``conf/layer.conf`` file, BitBake
-adds the recipes, classes and configurations contained within the
-particular layer to the source directory.
-
-Appending Other Layers Metadata With Your Layer
------------------------------------------------
-
-A recipe that appends Metadata to another recipe is called a BitBake
-append file. A BitBake append file uses the ``.bbappend`` file type
-suffix, while the corresponding recipe to which Metadata is being
-appended uses the ``.bb`` file type suffix.
-
-You can use a ``.bbappend`` file in your layer to make additions or
-changes to the content of another layer's recipe without having to copy
-the other layer's recipe into your layer. Your ``.bbappend`` file
-resides in your layer, while the main ``.bb`` recipe file to which you
-are appending Metadata resides in a different layer.
-
-Being able to append information to an existing recipe not only avoids
-duplication, but also automatically applies recipe changes from a
-different layer into your layer. If you were copying recipes, you would
-have to manually merge changes as they occur.
-
-When you create an append file, you must use the same root name as the
-corresponding recipe file. For example, the append file
-``someapp_3.1.bbappend`` must apply to ``someapp_3.1.bb``. This
-means the original recipe and append filenames are version
-number-specific. If the corresponding recipe is renamed to update to a
-newer version, you must also rename and possibly update the
-corresponding ``.bbappend`` as well. During the build process, BitBake
-displays an error on starting if it detects a ``.bbappend`` file that
-does not have a corresponding recipe with a matching name. See the
-:term:`BB_DANGLINGAPPENDS_WARNONLY`
-variable for information on how to handle this error.
-
-Overlaying a File Using Your Layer
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-As an example, consider the main formfactor recipe and a corresponding
-formfactor append file both from the :term:`Source Directory`.
-Here is the main
-formfactor recipe, which is named ``formfactor_0.0.bb`` and located in
-the "meta" layer at ``meta/recipes-bsp/formfactor``::
-
- SUMMARY = "Device formfactor information"
- DESCRIPTION = "A formfactor configuration file provides information about the \
- target hardware for which the image is being built and information that the \
- build system cannot obtain from other sources such as the kernel."
- SECTION = "base"
- LICENSE = "MIT"
- LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
- PR = "r45"
-
- SRC_URI = "file://config file://machconfig"
- S = "${WORKDIR}"
-
- PACKAGE_ARCH = "${MACHINE_ARCH}"
- INHIBIT_DEFAULT_DEPS = "1"
-
- do_install() {
- # Install file only if it has contents
- install -d ${D}${sysconfdir}/formfactor/
- install -m 0644 ${S}/config ${D}${sysconfdir}/formfactor/
- if [ -s "${S}/machconfig" ]; then
- install -m 0644 ${S}/machconfig ${D}${sysconfdir}/formfactor/
- fi
- }
-
-In the main recipe, note the :term:`SRC_URI`
-variable, which tells the OpenEmbedded build system where to find files
-during the build.
-
-Following is the append file, which is named ``formfactor_0.0.bbappend``
-and is from the Raspberry Pi BSP Layer named ``meta-raspberrypi``. The
-file is in the layer at ``recipes-bsp/formfactor``::
-
- FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
-
-By default, the build system uses the
-:term:`FILESPATH` variable to
-locate files. This append file extends the locations by setting the
-:term:`FILESEXTRAPATHS`
-variable. Setting this variable in the ``.bbappend`` file is the most
-reliable and recommended method for adding directories to the search
-path used by the build system to find files.
-
-The statement in this example extends the directories to include
-``${``\ :term:`THISDIR`\ ``}/${``\ :term:`PN`\ ``}``,
-which resolves to a directory named ``formfactor`` in the same directory
-in which the append file resides (i.e.
-``meta-raspberrypi/recipes-bsp/formfactor``. This implies that you must
-have the supporting directory structure set up that will contain any
-files or patches you will be including from the layer.
-
-Using the immediate expansion assignment operator ``:=`` is important
-because of the reference to :term:`THISDIR`. The trailing colon character is
-important as it ensures that items in the list remain colon-separated.
-
-.. note::
-
- BitBake automatically defines the :term:`THISDIR` variable. You should
- never set this variable yourself. Using ":prepend" as part of the
- :term:`FILESEXTRAPATHS` ensures your path will be searched prior to other
- paths in the final list.
-
- Also, not all append files add extra files. Many append files simply
- allow to add build options (e.g. ``systemd``). For these cases, your
- append file would not even use the :term:`FILESEXTRAPATHS` statement.
-
-The end result of this ``.bbappend`` file is that on a Raspberry Pi, where
-``rpi`` will exist in the list of :term:`OVERRIDES`, the file
-``meta-raspberrypi/recipes-bsp/formfactor/formfactor/rpi/machconfig`` will be
-used during :ref:`ref-tasks-fetch` and the test for a non-zero file size in
-:ref:`ref-tasks-install` will return true, and the file will be installed.
-
-Installing Additional Files Using Your Layer
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-As another example, consider the main ``xserver-xf86-config`` recipe and a
-corresponding ``xserver-xf86-config`` append file both from the :term:`Source
-Directory`. Here is the main ``xserver-xf86-config`` recipe, which is named
-``xserver-xf86-config_0.1.bb`` and located in the "meta" layer at
-``meta/recipes-graphics/xorg-xserver``::
-
- SUMMARY = "X.Org X server configuration file"
- HOMEPAGE = "http://www.x.org"
- SECTION = "x11/base"
- LICENSE = "MIT"
- LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
- PR = "r33"
-
- SRC_URI = "file://xorg.conf"
-
- S = "${WORKDIR}"
-
- CONFFILES:${PN} = "${sysconfdir}/X11/xorg.conf"
-
- PACKAGE_ARCH = "${MACHINE_ARCH}"
- ALLOW_EMPTY:${PN} = "1"
-
- do_install () {
- if test -s ${WORKDIR}/xorg.conf; then
- install -d ${D}/${sysconfdir}/X11
- install -m 0644 ${WORKDIR}/xorg.conf ${D}/${sysconfdir}/X11/
- fi
- }
-
-Following is the append file, which is named ``xserver-xf86-config_%.bbappend``
-and is from the Raspberry Pi BSP Layer named ``meta-raspberrypi``. The
-file is in the layer at ``recipes-graphics/xorg-xserver``::
-
- FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
-
- SRC_URI:append:rpi = " \
- file://xorg.conf.d/98-pitft.conf \
- file://xorg.conf.d/99-calibration.conf \
- "
- do_install:append:rpi () {
- PITFT="${@bb.utils.contains("MACHINE_FEATURES", "pitft", "1", "0", d)}"
- if [ "${PITFT}" = "1" ]; then
- install -d ${D}/${sysconfdir}/X11/xorg.conf.d/
- install -m 0644 ${WORKDIR}/xorg.conf.d/98-pitft.conf ${D}/${sysconfdir}/X11/xorg.conf.d/
- install -m 0644 ${WORKDIR}/xorg.conf.d/99-calibration.conf ${D}/${sysconfdir}/X11/xorg.conf.d/
- fi
- }
-
- FILES:${PN}:append:rpi = " ${sysconfdir}/X11/xorg.conf.d/*"
-
-Building off of the previous example, we once again are setting the
-:term:`FILESEXTRAPATHS` variable. In this case we are also using
-:term:`SRC_URI` to list additional source files to use when ``rpi`` is found in
-the list of :term:`OVERRIDES`. The :ref:`ref-tasks-install` task will then perform a
-check for an additional :term:`MACHINE_FEATURES` that if set will cause these
-additional files to be installed. These additional files are listed in
-:term:`FILES` so that they will be packaged.
-
-Prioritizing Your Layer
------------------------
-
-Each layer is assigned a priority value. Priority values control which
-layer takes precedence if there are recipe files with the same name in
-multiple layers. For these cases, the recipe file from the layer with a
-higher priority number takes precedence. Priority values also affect the
-order in which multiple ``.bbappend`` files for the same recipe are
-applied. You can either specify the priority manually, or allow the
-build system to calculate it based on the layer's dependencies.
-
-To specify the layer's priority manually, use the
-:term:`BBFILE_PRIORITY`
-variable and append the layer's root name::
-
- BBFILE_PRIORITY_mylayer = "1"
-
-.. note::
-
- It is possible for a recipe with a lower version number
- :term:`PV` in a layer that has a higher
- priority to take precedence.
-
- Also, the layer priority does not currently affect the precedence
- order of ``.conf`` or ``.bbclass`` files. Future versions of BitBake
- might address this.
-
-Managing Layers
----------------
-
-You can use the BitBake layer management tool ``bitbake-layers`` to
-provide a view into the structure of recipes across a multi-layer
-project. Being able to generate output that reports on configured layers
-with their paths and priorities and on ``.bbappend`` files and their
-applicable recipes can help to reveal potential problems.
-
-For help on the BitBake layer management tool, use the following
-command::
-
- $ bitbake-layers --help
- NOTE: Starting bitbake server...
- usage: bitbake-layers [-d] [-q] [-F] [--color COLOR] [-h] <subcommand> ...
-
- BitBake layers utility
-
- optional arguments:
- -d, --debug Enable debug output
- -q, --quiet Print only errors
- -F, --force Force add without recipe parse verification
- --color COLOR Colorize output (where COLOR is auto, always, never)
- -h, --help show this help message and exit
-
- subcommands:
- <subcommand>
- layerindex-fetch Fetches a layer from a layer index along with its
- dependent layers, and adds them to conf/bblayers.conf.
- layerindex-show-depends
- Find layer dependencies from layer index.
- add-layer Add one or more layers to bblayers.conf.
- remove-layer Remove one or more layers from bblayers.conf.
- flatten flatten layer configuration into a separate output
- directory.
- show-layers show current configured layers.
- show-overlayed list overlayed recipes (where the same recipe exists
- in another layer)
- show-recipes list available recipes, showing the layer they are
- provided by
- show-appends list bbappend files and recipe files they apply to
- show-cross-depends Show dependencies between recipes that cross layer
- boundaries.
- create-layer Create a basic layer
-
- Use bitbake-layers <subcommand> --help to get help on a specific command
-
-The following list describes the available commands:
-
-- ``help:`` Displays general help or help on a specified command.
-
-- ``show-layers:`` Shows the current configured layers.
-
-- ``show-overlayed:`` Lists overlayed recipes. A recipe is overlayed
- when a recipe with the same name exists in another layer that has a
- higher layer priority.
-
-- ``show-recipes:`` Lists available recipes and the layers that
- provide them.
-
-- ``show-appends:`` Lists ``.bbappend`` files and the recipe files to
- which they apply.
-
-- ``show-cross-depends:`` Lists dependency relationships between
- recipes that cross layer boundaries.
-
-- ``add-layer:`` Adds a layer to ``bblayers.conf``.
-
-- ``remove-layer:`` Removes a layer from ``bblayers.conf``
-
-- ``flatten:`` Flattens the layer configuration into a separate
- output directory. Flattening your layer configuration builds a
- "flattened" directory that contains the contents of all layers, with
- any overlayed recipes removed and any ``.bbappend`` files appended to
- the corresponding recipes. You might have to perform some manual
- cleanup of the flattened layer as follows:
-
- - Non-recipe files (such as patches) are overwritten. The flatten
- command shows a warning for these files.
-
- - Anything beyond the normal layer setup has been added to the
- ``layer.conf`` file. Only the lowest priority layer's
- ``layer.conf`` is used.
-
- - Overridden and appended items from ``.bbappend`` files need to be
- cleaned up. The contents of each ``.bbappend`` end up in the
- flattened recipe. However, if there are appended or changed
- variable values, you need to tidy these up yourself. Consider the
- following example. Here, the ``bitbake-layers`` command adds the
- line ``#### bbappended ...`` so that you know where the following
- lines originate::
-
- ...
- DESCRIPTION = "A useful utility"
- ...
- EXTRA_OECONF = "--enable-something"
- ...
-
- #### bbappended from meta-anotherlayer ####
-
- DESCRIPTION = "Customized utility"
- EXTRA_OECONF += "--enable-somethingelse"
-
-
- Ideally, you would tidy up these utilities as follows::
-
- ...
- DESCRIPTION = "Customized utility"
- ...
- EXTRA_OECONF = "--enable-something --enable-somethingelse"
- ...
-
-- ``layerindex-fetch``: Fetches a layer from a layer index, along
- with its dependent layers, and adds the layers to the
- ``conf/bblayers.conf`` file.
-
-- ``layerindex-show-depends``: Finds layer dependencies from the
- layer index.
-
-- ``create-layer``: Creates a basic layer.
-
-Creating a General Layer Using the ``bitbake-layers`` Script
-------------------------------------------------------------
-
-The ``bitbake-layers`` script with the ``create-layer`` subcommand
-simplifies creating a new general layer.
-
-.. note::
-
- - For information on BSP layers, see the ":ref:`bsp-guide/bsp:bsp layers`"
- section in the Yocto
- Project Board Specific (BSP) Developer's Guide.
-
- - In order to use a layer with the OpenEmbedded build system, you
- need to add the layer to your ``bblayers.conf`` configuration
- file. See the ":ref:`dev-manual/common-tasks:adding a layer using the \`\`bitbake-layers\`\` script`"
- section for more information.
-
-The default mode of the script's operation with this subcommand is to
-create a layer with the following:
-
-- A layer priority of 6.
-
-- A ``conf`` subdirectory that contains a ``layer.conf`` file.
-
-- A ``recipes-example`` subdirectory that contains a further
- subdirectory named ``example``, which contains an ``example.bb``
- recipe file.
-
-- A ``COPYING.MIT``, which is the license statement for the layer. The
- script assumes you want to use the MIT license, which is typical for
- most layers, for the contents of the layer itself.
-
-- A ``README`` file, which is a file describing the contents of your
- new layer.
-
-In its simplest form, you can use the following command form to create a
-layer. The command creates a layer whose name corresponds to
-"your_layer_name" in the current directory::
-
- $ bitbake-layers create-layer your_layer_name
-
-As an example, the following command creates a layer named ``meta-scottrif``
-in your home directory::
-
- $ cd /usr/home
- $ bitbake-layers create-layer meta-scottrif
- NOTE: Starting bitbake server...
- Add your new layer with 'bitbake-layers add-layer meta-scottrif'
-
-If you want to set the priority of the layer to other than the default
-value of "6", you can either use the ``--priority`` option or you
-can edit the
-:term:`BBFILE_PRIORITY` value
-in the ``conf/layer.conf`` after the script creates it. Furthermore, if
-you want to give the example recipe file some name other than the
-default, you can use the ``--example-recipe-name`` option.
-
-The easiest way to see how the ``bitbake-layers create-layer`` command
-works is to experiment with the script. You can also read the usage
-information by entering the following::
-
- $ bitbake-layers create-layer --help
- NOTE: Starting bitbake server...
- usage: bitbake-layers create-layer [-h] [--priority PRIORITY]
- [--example-recipe-name EXAMPLERECIPE]
- layerdir
-
- Create a basic layer
-
- positional arguments:
- layerdir Layer directory to create
-
- optional arguments:
- -h, --help show this help message and exit
- --priority PRIORITY, -p PRIORITY
- Layer directory to create
- --example-recipe-name EXAMPLERECIPE, -e EXAMPLERECIPE
- Filename of the example recipe
-
-Adding a Layer Using the ``bitbake-layers`` Script
---------------------------------------------------
-
-Once you create your general layer, you must add it to your
-``bblayers.conf`` file. Adding the layer to this configuration file
-makes the OpenEmbedded build system aware of your layer so that it can
-search it for metadata.
-
-Add your layer by using the ``bitbake-layers add-layer`` command::
-
- $ bitbake-layers add-layer your_layer_name
-
-Here is an example that adds a
-layer named ``meta-scottrif`` to the configuration file. Following the
-command that adds the layer is another ``bitbake-layers`` command that
-shows the layers that are in your ``bblayers.conf`` file::
-
- $ bitbake-layers add-layer meta-scottrif
- NOTE: Starting bitbake server...
- Parsing recipes: 100% |##########################################################| Time: 0:00:49
- Parsing of 1441 .bb files complete (0 cached, 1441 parsed). 2055 targets, 56 skipped, 0 masked, 0 errors.
- $ bitbake-layers show-layers
- NOTE: Starting bitbake server...
- layer path priority
- ==========================================================================
- meta /home/scottrif/poky/meta 5
- meta-poky /home/scottrif/poky/meta-poky 5
- meta-yocto-bsp /home/scottrif/poky/meta-yocto-bsp 5
- workspace /home/scottrif/poky/build/workspace 99
- meta-scottrif /home/scottrif/poky/build/meta-scottrif 6
-
-
-Adding the layer to this file
-enables the build system to locate the layer during the build.
-
-.. note::
-
- During a build, the OpenEmbedded build system looks in the layers
- from the top of the list down to the bottom in that order.
-
-Customizing Images
-==================
-
-You can customize images to satisfy particular requirements. This
-section describes several methods and provides guidelines for each.
-
-Customizing Images Using ``local.conf``
----------------------------------------
-
-Probably the easiest way to customize an image is to add a package by
-way of the ``local.conf`` configuration file. Because it is limited to
-local use, this method generally only allows you to add packages and is
-not as flexible as creating your own customized image. When you add
-packages using local variables this way, you need to realize that these
-variable changes are in effect for every build and consequently affect
-all images, which might not be what you require.
-
-To add a package to your image using the local configuration file, use
-the :term:`IMAGE_INSTALL` variable with the ``:append`` operator::
-
- IMAGE_INSTALL:append = " strace"
-
-Use of the syntax is important; specifically, the leading space
-after the opening quote and before the package name, which is
-``strace`` in this example. This space is required since the ``:append``
-operator does not add the space.
-
-Furthermore, you must use ``:append`` instead of the ``+=`` operator if
-you want to avoid ordering issues. The reason for this is because doing
-so unconditionally appends to the variable and avoids ordering problems
-due to the variable being set in image recipes and ``.bbclass`` files
-with operators like ``?=``. Using ``:append`` ensures the operation
-takes effect.
-
-As shown in its simplest use, ``IMAGE_INSTALL:append`` affects all
-images. It is possible to extend the syntax so that the variable applies
-to a specific image only. Here is an example::
-
- IMAGE_INSTALL:append:pn-core-image-minimal = " strace"
-
-This example adds ``strace`` to the ``core-image-minimal`` image only.
-
-You can add packages using a similar approach through the
-:term:`CORE_IMAGE_EXTRA_INSTALL` variable. If you use this variable, only
-``core-image-*`` images are affected.
-
-Customizing Images Using Custom ``IMAGE_FEATURES`` and ``EXTRA_IMAGE_FEATURES``
--------------------------------------------------------------------------------
-
-Another method for customizing your image is to enable or disable
-high-level image features by using the
-:term:`IMAGE_FEATURES` and
-:term:`EXTRA_IMAGE_FEATURES`
-variables. Although the functions for both variables are nearly
-equivalent, best practices dictate using :term:`IMAGE_FEATURES` from within
-a recipe and using :term:`EXTRA_IMAGE_FEATURES` from within your
-``local.conf`` file, which is found in the
-:term:`Build Directory`.
-
-To understand how these features work, the best reference is
-:ref:`meta/classes/image.bbclass <ref-classes-image>`.
-This class lists out the available
-:term:`IMAGE_FEATURES` of which most map to package groups while some, such
-as ``debug-tweaks`` and ``read-only-rootfs``, resolve as general
-configuration settings.
-
-In summary, the file looks at the contents of the :term:`IMAGE_FEATURES`
-variable and then maps or configures the feature accordingly. Based on
-this information, the build system automatically adds the appropriate
-packages or configurations to the
-:term:`IMAGE_INSTALL` variable.
-Effectively, you are enabling extra features by extending the class or
-creating a custom class for use with specialized image ``.bb`` files.
-
-Use the :term:`EXTRA_IMAGE_FEATURES` variable from within your local
-configuration file. Using a separate area from which to enable features
-with this variable helps you avoid overwriting the features in the image
-recipe that are enabled with :term:`IMAGE_FEATURES`. The value of
-:term:`EXTRA_IMAGE_FEATURES` is added to :term:`IMAGE_FEATURES` within
-``meta/conf/bitbake.conf``.
-
-To illustrate how you can use these variables to modify your image,
-consider an example that selects the SSH server. The Yocto Project ships
-with two SSH servers you can use with your images: Dropbear and OpenSSH.
-Dropbear is a minimal SSH server appropriate for resource-constrained
-environments, while OpenSSH is a well-known standard SSH server
-implementation. By default, the ``core-image-sato`` image is configured
-to use Dropbear. The ``core-image-full-cmdline`` and ``core-image-lsb``
-images both include OpenSSH. The ``core-image-minimal`` image does not
-contain an SSH server.
-
-You can customize your image and change these defaults. Edit the
-:term:`IMAGE_FEATURES` variable in your recipe or use the
-:term:`EXTRA_IMAGE_FEATURES` in your ``local.conf`` file so that it
-configures the image you are working with to include
-``ssh-server-dropbear`` or ``ssh-server-openssh``.
-
-.. note::
-
- See the ":ref:`ref-manual/features:image features`" section in the Yocto
- Project Reference Manual for a complete list of image features that ship
- with the Yocto Project.
-
-Customizing Images Using Custom .bb Files
------------------------------------------
-
-You can also customize an image by creating a custom recipe that defines
-additional software as part of the image. The following example shows
-the form for the two lines you need::
-
- IMAGE_INSTALL = "packagegroup-core-x11-base package1 package2"
- inherit core-image
-
-Defining the software using a custom recipe gives you total control over
-the contents of the image. It is important to use the correct names of
-packages in the :term:`IMAGE_INSTALL` variable. You must use the
-OpenEmbedded notation and not the Debian notation for the names (e.g.
-``glibc-dev`` instead of ``libc6-dev``).
-
-The other method for creating a custom image is to base it on an
-existing image. For example, if you want to create an image based on
-``core-image-sato`` but add the additional package ``strace`` to the
-image, copy the ``meta/recipes-sato/images/core-image-sato.bb`` to a new
-``.bb`` and add the following line to the end of the copy::
-
- IMAGE_INSTALL += "strace"
-
-Customizing Images Using Custom Package Groups
-----------------------------------------------
-
-For complex custom images, the best approach for customizing an image is
-to create a custom package group recipe that is used to build the image
-or images. A good example of a package group recipe is
-``meta/recipes-core/packagegroups/packagegroup-base.bb``.
-
-If you examine that recipe, you see that the :term:`PACKAGES` variable lists
-the package group packages to produce. The ``inherit packagegroup``
-statement sets appropriate default values and automatically adds
-``-dev``, ``-dbg``, and ``-ptest`` complementary packages for each
-package specified in the :term:`PACKAGES` statement.
-
-.. note::
-
- The ``inherit packagegroup`` line should be located near the top of the
- recipe, certainly before the :term:`PACKAGES` statement.
-
-For each package you specify in :term:`PACKAGES`, you can use :term:`RDEPENDS`
-and :term:`RRECOMMENDS` entries to provide a list of packages the parent
-task package should contain. You can see examples of these further down
-in the ``packagegroup-base.bb`` recipe.
-
-Here is a short, fabricated example showing the same basic pieces for a
-hypothetical packagegroup defined in ``packagegroup-custom.bb``, where
-the variable :term:`PN` is the standard way to abbreviate the reference to
-the full packagegroup name ``packagegroup-custom``::
-
- DESCRIPTION = "My Custom Package Groups"
-
- inherit packagegroup
-
- PACKAGES = "\
- ${PN}-apps \
- ${PN}-tools \
- "
-
- RDEPENDS:${PN}-apps = "\
- dropbear \
- portmap \
- psplash"
-
- RDEPENDS:${PN}-tools = "\
- oprofile \
- oprofileui-server \
- lttng-tools"
-
- RRECOMMENDS:${PN}-tools = "\
- kernel-module-oprofile"
-
-In the previous example, two package group packages are created with
-their dependencies and their recommended package dependencies listed:
-``packagegroup-custom-apps``, and ``packagegroup-custom-tools``. To
-build an image using these package group packages, you need to add
-``packagegroup-custom-apps`` and/or ``packagegroup-custom-tools`` to
-:term:`IMAGE_INSTALL`. For other forms of image dependencies see the other
-areas of this section.
-
-Customizing an Image Hostname
------------------------------
-
-By default, the configured hostname (i.e. ``/etc/hostname``) in an image
-is the same as the machine name. For example, if
-:term:`MACHINE` equals "qemux86", the
-configured hostname written to ``/etc/hostname`` is "qemux86".
-
-You can customize this name by altering the value of the "hostname"
-variable in the ``base-files`` recipe using either an append file or a
-configuration file. Use the following in an append file::
-
- hostname = "myhostname"
-
-Use the following in a configuration file::
-
- hostname:pn-base-files = "myhostname"
-
-Changing the default value of the variable "hostname" can be useful in
-certain situations. For example, suppose you need to do extensive
-testing on an image and you would like to easily identify the image
-under test from existing images with typical default hostnames. In this
-situation, you could change the default hostname to "testme", which
-results in all the images using the name "testme". Once testing is
-complete and you do not need to rebuild the image for test any longer,
-you can easily reset the default hostname.
-
-Another point of interest is that if you unset the variable, the image
-will have no default hostname in the filesystem. Here is an example that
-unsets the variable in a configuration file::
-
- hostname:pn-base-files = ""
-
-Having no default hostname in the filesystem is suitable for
-environments that use dynamic hostnames such as virtual machines.
-
-Writing a New Recipe
-====================
-
-Recipes (``.bb`` files) are fundamental components in the Yocto Project
-environment. Each software component built by the OpenEmbedded build
-system requires a recipe to define the component. This section describes
-how to create, write, and test a new recipe.
-
-.. note::
-
- For information on variables that are useful for recipes and for
- information about recipe naming issues, see the
- ":ref:`ref-manual/varlocality:recipes`" section of the Yocto Project
- Reference Manual.
-
-Overview
---------
-
-The following figure shows the basic process for creating a new recipe.
-The remainder of the section provides details for the steps.
-
-.. image:: figures/recipe-workflow.png
- :align: center
- :width: 50%
-
-Locate or Automatically Create a Base Recipe
---------------------------------------------
-
-You can always write a recipe from scratch. However, there are three choices
-that can help you quickly get started with a new recipe:
-
-- ``devtool add``: A command that assists in creating a recipe and an
- environment conducive to development.
-
-- ``recipetool create``: A command provided by the Yocto Project that
- automates creation of a base recipe based on the source files.
-
-- *Existing Recipes:* Location and modification of an existing recipe
- that is similar in function to the recipe you need.
-
-.. note::
-
- For information on recipe syntax, see the
- ":ref:`dev-manual/common-tasks:recipe syntax`" section.
-
-Creating the Base Recipe Using ``devtool add``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The ``devtool add`` command uses the same logic for auto-creating the
-recipe as ``recipetool create``, which is listed below. Additionally,
-however, ``devtool add`` sets up an environment that makes it easy for
-you to patch the source and to make changes to the recipe as is often
-necessary when adding a recipe to build a new piece of software to be
-included in a build.
-
-You can find a complete description of the ``devtool add`` command in
-the ":ref:`sdk-manual/extensible:a closer look at \`\`devtool add\`\``" section
-in the Yocto Project Application Development and the Extensible Software
-Development Kit (eSDK) manual.
-
-Creating the Base Recipe Using ``recipetool create``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-``recipetool create`` automates creation of a base recipe given a set of
-source code files. As long as you can extract or point to the source
-files, the tool will construct a recipe and automatically configure all
-pre-build information into the recipe. For example, suppose you have an
-application that builds using Autotools. Creating the base recipe using
-``recipetool`` results in a recipe that has the pre-build dependencies,
-license requirements, and checksums configured.
-
-To run the tool, you just need to be in your
-:term:`Build Directory` and have sourced the
-build environment setup script (i.e.
-:ref:`structure-core-script`).
-To get help on the tool, use the following command::
-
- $ recipetool -h
- NOTE: Starting bitbake server...
- usage: recipetool [-d] [-q] [--color COLOR] [-h] <subcommand> ...
-
- OpenEmbedded recipe tool
-
- options:
- -d, --debug Enable debug output
- -q, --quiet Print only errors
- --color COLOR Colorize output (where COLOR is auto, always, never)
- -h, --help show this help message and exit
-
- subcommands:
- create Create a new recipe
- newappend Create a bbappend for the specified target in the specified
- layer
- setvar Set a variable within a recipe
- appendfile Create/update a bbappend to replace a target file
- appendsrcfiles Create/update a bbappend to add or replace source files
- appendsrcfile Create/update a bbappend to add or replace a source file
- Use recipetool <subcommand> --help to get help on a specific command
-
-Running ``recipetool create -o OUTFILE`` creates the base recipe and
-locates it properly in the layer that contains your source files.
-Following are some syntax examples:
-
- - Use this syntax to generate a recipe based on source. Once generated,
- the recipe resides in the existing source code layer::
-
- recipetool create -o OUTFILE source
-
- - Use this syntax to generate a recipe using code that
- you extract from source. The extracted code is placed in its own layer
- defined by :term:`EXTERNALSRC`.
- ::
-
- recipetool create -o OUTFILE -x EXTERNALSRC source
-
- - Use this syntax to generate a recipe based on source. The options
- direct ``recipetool`` to generate debugging information. Once generated,
- the recipe resides in the existing source code layer::
-
- recipetool create -d -o OUTFILE source
-
-Locating and Using a Similar Recipe
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Before writing a recipe from scratch, it is often useful to discover
-whether someone else has already written one that meets (or comes close
-to meeting) your needs. The Yocto Project and OpenEmbedded communities
-maintain many recipes that might be candidates for what you are doing.
-You can find a good central index of these recipes in the
-:oe_layerindex:`OpenEmbedded Layer Index <>`.
-
-Working from an existing recipe or a skeleton recipe is the best way to
-get started. Here are some points on both methods:
-
-- *Locate and modify a recipe that is close to what you want to do:*
- This method works when you are familiar with the current recipe
- space. The method does not work so well for those new to the Yocto
- Project or writing recipes.
-
- Some risks associated with this method are using a recipe that has
- areas totally unrelated to what you are trying to accomplish with
- your recipe, not recognizing areas of the recipe that you might have
- to add from scratch, and so forth. All these risks stem from
- unfamiliarity with the existing recipe space.
-
-- *Use and modify the following skeleton recipe:* If for some reason
- you do not want to use ``recipetool`` and you cannot find an existing
- recipe that is close to meeting your needs, you can use the following
- structure to provide the fundamental areas of a new recipe.
- ::
-
- DESCRIPTION = ""
- HOMEPAGE = ""
- LICENSE = ""
- SECTION = ""
- DEPENDS = ""
- LIC_FILES_CHKSUM = ""
-
- SRC_URI = ""
-
-Storing and Naming the Recipe
------------------------------
-
-Once you have your base recipe, you should put it in your own layer and
-name it appropriately. Locating it correctly ensures that the
-OpenEmbedded build system can find it when you use BitBake to process
-the recipe.
-
-- *Storing Your Recipe:* The OpenEmbedded build system locates your
- recipe through the layer's ``conf/layer.conf`` file and the
- :term:`BBFILES` variable. This
- variable sets up a path from which the build system can locate
- recipes. Here is the typical use::
-
- BBFILES += "${LAYERDIR}/recipes-*/*/*.bb \
- ${LAYERDIR}/recipes-*/*/*.bbappend"
-
- Consequently, you need to be sure you locate your new recipe inside
- your layer such that it can be found.
-
- You can find more information on how layers are structured in the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`" section.
-
-- *Naming Your Recipe:* When you name your recipe, you need to follow
- this naming convention::
-
- basename_version.bb
-
- Use lower-cased characters and do not include the reserved suffixes
- ``-native``, ``-cross``, ``-initial``, or ``-dev`` casually (i.e. do not use
- them as part of your recipe name unless the string applies). Here are some
- examples:
-
- .. code-block:: none
-
- cups_1.7.0.bb
- gawk_4.0.2.bb
- irssi_0.8.16-rc1.bb
-
-Running a Build on the Recipe
------------------------------
-
-Creating a new recipe is usually an iterative process that requires
-using BitBake to process the recipe multiple times in order to
-progressively discover and add information to the recipe file.
-
-Assuming you have sourced the build environment setup script (i.e.
-:ref:`structure-core-script`) and you are in
-the :term:`Build Directory`, use
-BitBake to process your recipe. All you need to provide is the
-``basename`` of the recipe as described in the previous section::
-
- $ bitbake basename
-
-During the build, the OpenEmbedded build system creates a temporary work
-directory for each recipe
-(``${``\ :term:`WORKDIR`\ ``}``)
-where it keeps extracted source files, log files, intermediate
-compilation and packaging files, and so forth.
-
-The path to the per-recipe temporary work directory depends on the
-context in which it is being built. The quickest way to find this path
-is to have BitBake return it by running the following::
-
- $ bitbake -e basename | grep ^WORKDIR=
-
-As an example, assume a Source Directory
-top-level folder named ``poky``, a default Build Directory at
-``poky/build``, and a ``qemux86-poky-linux`` machine target system.
-Furthermore, suppose your recipe is named ``foo_1.3.0.bb``. In this
-case, the work directory the build system uses to build the package
-would be as follows::
-
- poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0
-
-Inside this directory you can find sub-directories such as ``image``,
-``packages-split``, and ``temp``. After the build, you can examine these
-to determine how well the build went.
-
-.. note::
-
- You can find log files for each task in the recipe's ``temp``
- directory (e.g. ``poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0/temp``).
- Log files are named ``log.taskname`` (e.g. ``log.do_configure``,
- ``log.do_fetch``, and ``log.do_compile``).
-
-You can find more information about the build process in
-":doc:`/overview-manual/development-environment`"
-chapter of the Yocto Project Overview and Concepts Manual.
-
-Fetching Code
--------------
-
-The first thing your recipe must do is specify how to fetch the source
-files. Fetching is controlled mainly through the
-:term:`SRC_URI` variable. Your recipe
-must have a :term:`SRC_URI` variable that points to where the source is
-located. For a graphical representation of source locations, see the
-":ref:`overview-manual/concepts:sources`" section in
-the Yocto Project Overview and Concepts Manual.
-
-The :ref:`ref-tasks-fetch` task uses
-the prefix of each entry in the :term:`SRC_URI` variable value to determine
-which :ref:`fetcher <bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers>` to use to get your
-source files. It is the :term:`SRC_URI` variable that triggers the fetcher.
-The :ref:`ref-tasks-patch` task uses
-the variable after source is fetched to apply patches. The OpenEmbedded
-build system uses
-:term:`FILESOVERRIDES` for
-scanning directory locations for local files in :term:`SRC_URI`.
-
-The :term:`SRC_URI` variable in your recipe must define each unique location
-for your source files. It is good practice to not hard-code version
-numbers in a URL used in :term:`SRC_URI`. Rather than hard-code these
-values, use ``${``\ :term:`PV`\ ``}``,
-which causes the fetch process to use the version specified in the
-recipe filename. Specifying the version in this manner means that
-upgrading the recipe to a future version is as simple as renaming the
-recipe to match the new version.
-
-Here is a simple example from the
-``meta/recipes-devtools/strace/strace_5.5.bb`` recipe where the source
-comes from a single tarball. Notice the use of the
-:term:`PV` variable::
-
- SRC_URI = "https://strace.io/files/${PV}/strace-${PV}.tar.xz \
-
-Files mentioned in :term:`SRC_URI` whose names end in a typical archive
-extension (e.g. ``.tar``, ``.tar.gz``, ``.tar.bz2``, ``.zip``, and so
-forth), are automatically extracted during the
-:ref:`ref-tasks-unpack` task. For
-another example that specifies these types of files, see the
-":ref:`dev-manual/common-tasks:autotooled package`" section.
-
-Another way of specifying source is from an SCM. For Git repositories,
-you must specify :term:`SRCREV` and you should specify :term:`PV` to include
-the revision with :term:`SRCPV`. Here is an example from the recipe
-``meta/recipes-core/musl/gcompat_git.bb``::
-
- SRC_URI = "git://git.adelielinux.org/adelie/gcompat.git;protocol=https;branch=current"
-
- PV = "1.0.0+1.1+git${SRCPV}"
- SRCREV = "af5a49e489fdc04b9cf02547650d7aeaccd43793"
-
-If your :term:`SRC_URI` statement includes URLs pointing to individual files
-fetched from a remote server other than a version control system,
-BitBake attempts to verify the files against checksums defined in your
-recipe to ensure they have not been tampered with or otherwise modified
-since the recipe was written. Two checksums are used:
-``SRC_URI[md5sum]`` and ``SRC_URI[sha256sum]``.
-
-If your :term:`SRC_URI` variable points to more than a single URL (excluding
-SCM URLs), you need to provide the ``md5`` and ``sha256`` checksums for
-each URL. For these cases, you provide a name for each URL as part of
-the :term:`SRC_URI` and then reference that name in the subsequent checksum
-statements. Here is an example combining lines from the files
-``git.inc`` and ``git_2.24.1.bb``::
-
- SRC_URI = "${KERNELORG_MIRROR}/software/scm/git/git-${PV}.tar.gz;name=tarball \
- ${KERNELORG_MIRROR}/software/scm/git/git-manpages-${PV}.tar.gz;name=manpages"
-
- SRC_URI[tarball.md5sum] = "166bde96adbbc11c8843d4f8f4f9811b"
- SRC_URI[tarball.sha256sum] = "ad5334956301c86841eb1e5b1bb20884a6bad89a10a6762c958220c7cf64da02"
- SRC_URI[manpages.md5sum] = "31c2272a8979022497ba3d4202df145d"
- SRC_URI[manpages.sha256sum] = "9a7ae3a093bea39770eb96ca3e5b40bff7af0b9f6123f089d7821d0e5b8e1230"
-
-Proper values for ``md5`` and ``sha256`` checksums might be available
-with other signatures on the download page for the upstream source (e.g.
-``md5``, ``sha1``, ``sha256``, ``GPG``, and so forth). Because the
-OpenEmbedded build system only deals with ``sha256sum`` and ``md5sum``,
-you should verify all the signatures you find by hand.
-
-If no :term:`SRC_URI` checksums are specified when you attempt to build the
-recipe, or you provide an incorrect checksum, the build will produce an
-error for each missing or incorrect checksum. As part of the error
-message, the build system provides the checksum string corresponding to
-the fetched file. Once you have the correct checksums, you can copy and
-paste them into your recipe and then run the build again to continue.
-
-.. note::
-
- As mentioned, if the upstream source provides signatures for
- verifying the downloaded source code, you should verify those
- manually before setting the checksum values in the recipe and
- continuing with the build.
-
-This final example is a bit more complicated and is from the
-``meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.20.bb`` recipe. The
-example's :term:`SRC_URI` statement identifies multiple files as the source
-files for the recipe: a tarball, a patch file, a desktop file, and an
-icon.
-::
-
- SRC_URI = "http://dist.schmorp.de/rxvt-unicode/Attic/rxvt-unicode-${PV}.tar.bz2 \
- file://xwc.patch \
- file://rxvt.desktop \
- file://rxvt.png"
-
-When you specify local files using the ``file://`` URI protocol, the
-build system fetches files from the local machine. The path is relative
-to the :term:`FILESPATH` variable
-and searches specific directories in a certain order:
-``${``\ :term:`BP`\ ``}``,
-``${``\ :term:`BPN`\ ``}``, and
-``files``. The directories are assumed to be subdirectories of the
-directory in which the recipe or append file resides. For another
-example that specifies these types of files, see the
-":ref:`dev-manual/common-tasks:single .c file package (hello world!)`" section.
-
-The previous example also specifies a patch file. Patch files are files
-whose names usually end in ``.patch`` or ``.diff`` but can end with
-compressed suffixes such as ``diff.gz`` and ``patch.bz2``, for example.
-The build system automatically applies patches as described in the
-":ref:`dev-manual/common-tasks:patching code`" section.
-
-Fetching Code Through Firewalls
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Some users are behind firewalls and need to fetch code through a proxy.
-See the ":doc:`/ref-manual/faq`" chapter for advice.
-
-Limiting the Number of Parallel Connections
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Some users are behind firewalls or use servers where the number of parallel
-connections is limited. In such cases, you can limit the number of fetch
-tasks being run in parallel by adding the following to your ``local.conf``
-file::
-
- do_fetch[number_threads] = "4"
-
-Unpacking Code
---------------
-
-During the build, the
-:ref:`ref-tasks-unpack` task unpacks
-the source with ``${``\ :term:`S`\ ``}``
-pointing to where it is unpacked.
-
-If you are fetching your source files from an upstream source archived
-tarball and the tarball's internal structure matches the common
-convention of a top-level subdirectory named
-``${``\ :term:`BPN`\ ``}-${``\ :term:`PV`\ ``}``,
-then you do not need to set :term:`S`. However, if :term:`SRC_URI` specifies to
-fetch source from an archive that does not use this convention, or from
-an SCM like Git or Subversion, your recipe needs to define :term:`S`.
-
-If processing your recipe using BitBake successfully unpacks the source
-files, you need to be sure that the directory pointed to by ``${S}``
-matches the structure of the source.
-
-Patching Code
--------------
-
-Sometimes it is necessary to patch code after it has been fetched. Any
-files mentioned in :term:`SRC_URI` whose names end in ``.patch`` or
-``.diff`` or compressed versions of these suffixes (e.g. ``diff.gz`` are
-treated as patches. The
-:ref:`ref-tasks-patch` task
-automatically applies these patches.
-
-The build system should be able to apply patches with the "-p1" option
-(i.e. one directory level in the path will be stripped off). If your
-patch needs to have more directory levels stripped off, specify the
-number of levels using the "striplevel" option in the :term:`SRC_URI` entry
-for the patch. Alternatively, if your patch needs to be applied in a
-specific subdirectory that is not specified in the patch file, use the
-"patchdir" option in the entry.
-
-As with all local files referenced in
-:term:`SRC_URI` using ``file://``,
-you should place patch files in a directory next to the recipe either
-named the same as the base name of the recipe
-(:term:`BP` and
-:term:`BPN`) or "files".
-
-Licensing
----------
-
-Your recipe needs to have both the
-:term:`LICENSE` and
-:term:`LIC_FILES_CHKSUM`
-variables:
-
-- :term:`LICENSE`: This variable specifies the license for the software.
- If you do not know the license under which the software you are
- building is distributed, you should go to the source code and look
- for that information. Typical files containing this information
- include ``COPYING``, :term:`LICENSE`, and ``README`` files. You could
- also find the information near the top of a source file. For example,
- given a piece of software licensed under the GNU General Public
- License version 2, you would set :term:`LICENSE` as follows::
-
- LICENSE = "GPL-2.0-only"
-
- The licenses you specify within :term:`LICENSE` can have any name as long
- as you do not use spaces, since spaces are used as separators between
- license names. For standard licenses, use the names of the files in
- ``meta/files/common-licenses/`` or the :term:`SPDXLICENSEMAP` flag names
- defined in ``meta/conf/licenses.conf``.
-
-- :term:`LIC_FILES_CHKSUM`: The OpenEmbedded build system uses this
- variable to make sure the license text has not changed. If it has,
- the build produces an error and it affords you the chance to figure
- it out and correct the problem.
-
- You need to specify all applicable licensing files for the software.
- At the end of the configuration step, the build process will compare
- the checksums of the files to be sure the text has not changed. Any
- differences result in an error with the message containing the
- current checksum. For more explanation and examples of how to set the
- :term:`LIC_FILES_CHKSUM` variable, see the
- ":ref:`dev-manual/common-tasks:tracking license changes`" section.
-
- To determine the correct checksum string, you can list the
- appropriate files in the :term:`LIC_FILES_CHKSUM` variable with incorrect
- md5 strings, attempt to build the software, and then note the
- resulting error messages that will report the correct md5 strings.
- See the ":ref:`dev-manual/common-tasks:fetching code`" section for
- additional information.
-
- Here is an example that assumes the software has a ``COPYING`` file::
-
- LIC_FILES_CHKSUM = "file://COPYING;md5=xxx"
-
- When you try to build the
- software, the build system will produce an error and give you the
- correct string that you can substitute into the recipe file for a
- subsequent build.
-
-Dependencies
-------------
-
-Most software packages have a short list of other packages that they
-require, which are called dependencies. These dependencies fall into two
-main categories: build-time dependencies, which are required when the
-software is built; and runtime dependencies, which are required to be
-installed on the target in order for the software to run.
-
-Within a recipe, you specify build-time dependencies using the
-:term:`DEPENDS` variable. Although there are nuances,
-items specified in :term:`DEPENDS` should be names of other
-recipes. It is important that you specify all build-time dependencies
-explicitly.
-
-Another consideration is that configure scripts might automatically
-check for optional dependencies and enable corresponding functionality
-if those dependencies are found. If you wish to make a recipe that is
-more generally useful (e.g. publish the recipe in a layer for others to
-use), instead of hard-disabling the functionality, you can use the
-:term:`PACKAGECONFIG` variable to allow functionality and the
-corresponding dependencies to be enabled and disabled easily by other
-users of the recipe.
-
-Similar to build-time dependencies, you specify runtime dependencies
-through a variable -
-:term:`RDEPENDS`, which is
-package-specific. All variables that are package-specific need to have
-the name of the package added to the end as an override. Since the main
-package for a recipe has the same name as the recipe, and the recipe's
-name can be found through the
-``${``\ :term:`PN`\ ``}`` variable, then
-you specify the dependencies for the main package by setting
-``RDEPENDS:${PN}``. If the package were named ``${PN}-tools``, then you
-would set ``RDEPENDS:${PN}-tools``, and so forth.
-
-Some runtime dependencies will be set automatically at packaging time.
-These dependencies include any shared library dependencies (i.e. if a
-package "example" contains "libexample" and another package "mypackage"
-contains a binary that links to "libexample" then the OpenEmbedded build
-system will automatically add a runtime dependency to "mypackage" on
-"example"). See the
-":ref:`overview-manual/concepts:automatically added runtime dependencies`"
-section in the Yocto Project Overview and Concepts Manual for further
-details.
-
-Configuring the Recipe
-----------------------
-
-Most software provides some means of setting build-time configuration
-options before compilation. Typically, setting these options is
-accomplished by running a configure script with options, or by modifying
-a build configuration file.
-
-.. note::
-
- As of Yocto Project Release 1.7, some of the core recipes that
- package binary configuration scripts now disable the scripts due to
- the scripts previously requiring error-prone path substitution. The
- OpenEmbedded build system uses ``pkg-config`` now, which is much more
- robust. You can find a list of the ``*-config`` scripts that are disabled
- in the ":ref:`migration-1.7-binary-configuration-scripts-disabled`" section
- in the Yocto Project Reference Manual.
-
-A major part of build-time configuration is about checking for
-build-time dependencies and possibly enabling optional functionality as
-a result. You need to specify any build-time dependencies for the
-software you are building in your recipe's
-:term:`DEPENDS` value, in terms of
-other recipes that satisfy those dependencies. You can often find
-build-time or runtime dependencies described in the software's
-documentation.
-
-The following list provides configuration items of note based on how
-your software is built:
-
-- *Autotools:* If your source files have a ``configure.ac`` file, then
- your software is built using Autotools. If this is the case, you just
- need to modify the configuration.
-
- When using Autotools, your recipe needs to inherit the
- :ref:`autotools <ref-classes-autotools>` class
- and your recipe does not have to contain a
- :ref:`ref-tasks-configure` task.
- However, you might still want to make some adjustments. For example,
- you can set
- :term:`EXTRA_OECONF` or
- :term:`PACKAGECONFIG_CONFARGS`
- to pass any needed configure options that are specific to the recipe.
-
-- *CMake:* If your source files have a ``CMakeLists.txt`` file, then
- your software is built using CMake. If this is the case, you just
- need to modify the configuration.
-
- When you use CMake, your recipe needs to inherit the
- :ref:`cmake <ref-classes-cmake>` class and your
- recipe does not have to contain a
- :ref:`ref-tasks-configure` task.
- You can make some adjustments by setting
- :term:`EXTRA_OECMAKE` to
- pass any needed configure options that are specific to the recipe.
-
- .. note::
-
- If you need to install one or more custom CMake toolchain files
- that are supplied by the application you are building, install the
- files to ``${D}${datadir}/cmake/Modules`` during ``do_install``.
-
-- *Other:* If your source files do not have a ``configure.ac`` or
- ``CMakeLists.txt`` file, then your software is built using some
- method other than Autotools or CMake. If this is the case, you
- normally need to provide a
- :ref:`ref-tasks-configure` task
- in your recipe unless, of course, there is nothing to configure.
-
- Even if your software is not being built by Autotools or CMake, you
- still might not need to deal with any configuration issues. You need
- to determine if configuration is even a required step. You might need
- to modify a Makefile or some configuration file used for the build to
- specify necessary build options. Or, perhaps you might need to run a
- provided, custom configure script with the appropriate options.
-
- For the case involving a custom configure script, you would run
- ``./configure --help`` and look for the options you need to set.
-
-Once configuration succeeds, it is always good practice to look at the
-``log.do_configure`` file to ensure that the appropriate options have
-been enabled and no additional build-time dependencies need to be added
-to :term:`DEPENDS`. For example, if the configure script reports that it
-found something not mentioned in :term:`DEPENDS`, or that it did not find
-something that it needed for some desired optional functionality, then
-you would need to add those to :term:`DEPENDS`. Looking at the log might
-also reveal items being checked for, enabled, or both that you do not
-want, or items not being found that are in :term:`DEPENDS`, in which case
-you would need to look at passing extra options to the configure script
-as needed. For reference information on configure options specific to
-the software you are building, you can consult the output of the
-``./configure --help`` command within ``${S}`` or consult the software's
-upstream documentation.
-
-Using Headers to Interface with Devices
----------------------------------------
-
-If your recipe builds an application that needs to communicate with some
-device or needs an API into a custom kernel, you will need to provide
-appropriate header files. Under no circumstances should you ever modify
-the existing
-``meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc`` file.
-These headers are used to build ``libc`` and must not be compromised
-with custom or machine-specific header information. If you customize
-``libc`` through modified headers all other applications that use
-``libc`` thus become affected.
-
-.. note::
-
- Never copy and customize the ``libc`` header file (i.e.
- ``meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc``).
-
-The correct way to interface to a device or custom kernel is to use a
-separate package that provides the additional headers for the driver or
-other unique interfaces. When doing so, your application also becomes
-responsible for creating a dependency on that specific provider.
-
-Consider the following:
-
-- Never modify ``linux-libc-headers.inc``. Consider that file to be
- part of the ``libc`` system, and not something you use to access the
- kernel directly. You should access ``libc`` through specific ``libc``
- calls.
-
-- Applications that must talk directly to devices should either provide
- necessary headers themselves, or establish a dependency on a special
- headers package that is specific to that driver.
-
-For example, suppose you want to modify an existing header that adds I/O
-control or network support. If the modifications are used by a small
-number programs, providing a unique version of a header is easy and has
-little impact. When doing so, bear in mind the guidelines in the
-previous list.
-
-.. note::
-
- If for some reason your changes need to modify the behavior of the ``libc``,
- and subsequently all other applications on the system, use a ``.bbappend``
- to modify the ``linux-kernel-headers.inc`` file. However, take care to not
- make the changes machine specific.
-
-Consider a case where your kernel is older and you need an older
-``libc`` ABI. The headers installed by your recipe should still be a
-standard mainline kernel, not your own custom one.
-
-When you use custom kernel headers you need to get them from
-:term:`STAGING_KERNEL_DIR`,
-which is the directory with kernel headers that are required to build
-out-of-tree modules. Your recipe will also need the following::
-
- do_configure[depends] += "virtual/kernel:do_shared_workdir"
-
-Compilation
------------
-
-During a build, the ``do_compile`` task happens after source is fetched,
-unpacked, and configured. If the recipe passes through ``do_compile``
-successfully, nothing needs to be done.
-
-However, if the compile step fails, you need to diagnose the failure.
-Here are some common issues that cause failures.
-
-.. note::
-
- For cases where improper paths are detected for configuration files
- or for when libraries/headers cannot be found, be sure you are using
- the more robust ``pkg-config``. See the note in section
- ":ref:`dev-manual/common-tasks:Configuring the Recipe`" for additional information.
-
-- *Parallel build failures:* These failures manifest themselves as
- intermittent errors, or errors reporting that a file or directory
- that should be created by some other part of the build process could
- not be found. This type of failure can occur even if, upon
- inspection, the file or directory does exist after the build has
- failed, because that part of the build process happened in the wrong
- order.
-
- To fix the problem, you need to either satisfy the missing dependency
- in the Makefile or whatever script produced the Makefile, or (as a
- workaround) set :term:`PARALLEL_MAKE` to an empty string::
-
- PARALLEL_MAKE = ""
-
- For information on parallel Makefile issues, see the
- ":ref:`dev-manual/common-tasks:debugging parallel make races`" section.
-
-- *Improper host path usage:* This failure applies to recipes building
- for the target or ``nativesdk`` only. The failure occurs when the
- compilation process uses improper headers, libraries, or other files
- from the host system when cross-compiling for the target.
-
- To fix the problem, examine the ``log.do_compile`` file to identify
- the host paths being used (e.g. ``/usr/include``, ``/usr/lib``, and
- so forth) and then either add configure options, apply a patch, or do
- both.
-
-- *Failure to find required libraries/headers:* If a build-time
- dependency is missing because it has not been declared in
- :term:`DEPENDS`, or because the
- dependency exists but the path used by the build process to find the
- file is incorrect and the configure step did not detect it, the
- compilation process could fail. For either of these failures, the
- compilation process notes that files could not be found. In these
- cases, you need to go back and add additional options to the
- configure script as well as possibly add additional build-time
- dependencies to :term:`DEPENDS`.
-
- Occasionally, it is necessary to apply a patch to the source to
- ensure the correct paths are used. If you need to specify paths to
- find files staged into the sysroot from other recipes, use the
- variables that the OpenEmbedded build system provides (e.g.
- :term:`STAGING_BINDIR`, :term:`STAGING_INCDIR`, :term:`STAGING_DATADIR`, and so
- forth).
-
-Installing
-----------
-
-During ``do_install``, the task copies the built files along with their
-hierarchy to locations that would mirror their locations on the target
-device. The installation process copies files from the
-``${``\ :term:`S`\ ``}``,
-``${``\ :term:`B`\ ``}``, and
-``${``\ :term:`WORKDIR`\ ``}``
-directories to the ``${``\ :term:`D`\ ``}``
-directory to create the structure as it should appear on the target
-system.
-
-How your software is built affects what you must do to be sure your
-software is installed correctly. The following list describes what you
-must do for installation depending on the type of build system used by
-the software being built:
-
-- *Autotools and CMake:* If the software your recipe is building uses
- Autotools or CMake, the OpenEmbedded build system understands how to
- install the software. Consequently, you do not have to have a
- ``do_install`` task as part of your recipe. You just need to make
- sure the install portion of the build completes with no issues.
- However, if you wish to install additional files not already being
- installed by ``make install``, you should do this using a
- ``do_install:append`` function using the install command as described
- in the "Manual" bulleted item later in this list.
-
-- *Other (using* ``make install``\ *)*: You need to define a ``do_install``
- function in your recipe. The function should call
- ``oe_runmake install`` and will likely need to pass in the
- destination directory as well. How you pass that path is dependent on
- how the ``Makefile`` being run is written (e.g. ``DESTDIR=${D}``,
- ``PREFIX=${D}``, ``INSTALLROOT=${D}``, and so forth).
-
- For an example recipe using ``make install``, see the
- ":ref:`dev-manual/common-tasks:makefile-based package`" section.
-
-- *Manual:* You need to define a ``do_install`` function in your
- recipe. The function must first use ``install -d`` to create the
- directories under
- ``${``\ :term:`D`\ ``}``. Once the
- directories exist, your function can use ``install`` to manually
- install the built software into the directories.
-
- You can find more information on ``install`` at
- https://www.gnu.org/software/coreutils/manual/html_node/install-invocation.html.
-
-For the scenarios that do not use Autotools or CMake, you need to track
-the installation and diagnose and fix any issues until everything
-installs correctly. You need to look in the default location of
-``${D}``, which is ``${WORKDIR}/image``, to be sure your files have been
-installed correctly.
-
-.. note::
-
- - During the installation process, you might need to modify some of
- the installed files to suit the target layout. For example, you
- might need to replace hard-coded paths in an initscript with
- values of variables provided by the build system, such as
- replacing ``/usr/bin/`` with ``${bindir}``. If you do perform such
- modifications during ``do_install``, be sure to modify the
- destination file after copying rather than before copying.
- Modifying after copying ensures that the build system can
- re-execute ``do_install`` if needed.
-
- - ``oe_runmake install``, which can be run directly or can be run
- indirectly by the
- :ref:`autotools <ref-classes-autotools>` and
- :ref:`cmake <ref-classes-cmake>` classes,
- runs ``make install`` in parallel. Sometimes, a Makefile can have
- missing dependencies between targets that can result in race
- conditions. If you experience intermittent failures during
- ``do_install``, you might be able to work around them by disabling
- parallel Makefile installs by adding the following to the recipe::
-
- PARALLEL_MAKEINST = ""
-
- See :term:`PARALLEL_MAKEINST` for additional information.
-
- - If you need to install one or more custom CMake toolchain files
- that are supplied by the application you are building, install the
- files to ``${D}${datadir}/cmake/Modules`` during
- :ref:`ref-tasks-install`.
-
-Enabling System Services
-------------------------
-
-If you want to install a service, which is a process that usually starts
-on boot and runs in the background, then you must include some
-additional definitions in your recipe.
-
-If you are adding services and the service initialization script or the
-service file itself is not installed, you must provide for that
-installation in your recipe using a ``do_install:append`` function. If
-your recipe already has a ``do_install`` function, update the function
-near its end rather than adding an additional ``do_install:append``
-function.
-
-When you create the installation for your services, you need to
-accomplish what is normally done by ``make install``. In other words,
-make sure your installation arranges the output similar to how it is
-arranged on the target system.
-
-The OpenEmbedded build system provides support for starting services two
-different ways:
-
-- *SysVinit:* SysVinit is a system and service manager that manages the
- init system used to control the very basic functions of your system.
- The init program is the first program started by the Linux kernel
- when the system boots. Init then controls the startup, running and
- shutdown of all other programs.
-
- To enable a service using SysVinit, your recipe needs to inherit the
- :ref:`update-rc.d <ref-classes-update-rc.d>`
- class. The class helps facilitate safely installing the package on
- the target.
-
- You will need to set the
- :term:`INITSCRIPT_PACKAGES`,
- :term:`INITSCRIPT_NAME`,
- and
- :term:`INITSCRIPT_PARAMS`
- variables within your recipe.
-
-- *systemd:* System Management Daemon (systemd) was designed to replace
- SysVinit and to provide enhanced management of services. For more
- information on systemd, see the systemd homepage at
- https://freedesktop.org/wiki/Software/systemd/.
-
- To enable a service using systemd, your recipe needs to inherit the
- :ref:`systemd <ref-classes-systemd>` class. See
- the ``systemd.bbclass`` file located in your :term:`Source Directory`
- section for
- more information.
-
-Packaging
----------
-
-Successful packaging is a combination of automated processes performed
-by the OpenEmbedded build system and some specific steps you need to
-take. The following list describes the process:
-
-- *Splitting Files*: The ``do_package`` task splits the files produced
- by the recipe into logical components. Even software that produces a
- single binary might still have debug symbols, documentation, and
- other logical components that should be split out. The ``do_package``
- task ensures that files are split up and packaged correctly.
-
-- *Running QA Checks*: The
- :ref:`insane <ref-classes-insane>` class adds a
- step to the package generation process so that output quality
- assurance checks are generated by the OpenEmbedded build system. This
- step performs a range of checks to be sure the build's output is free
- of common problems that show up during runtime. For information on
- these checks, see the
- :ref:`insane <ref-classes-insane>` class and
- the ":ref:`ref-manual/qa-checks:qa error and warning messages`"
- chapter in the Yocto Project Reference Manual.
-
-- *Hand-Checking Your Packages*: After you build your software, you
- need to be sure your packages are correct. Examine the
- ``${``\ :term:`WORKDIR`\ ``}/packages-split``
- directory and make sure files are where you expect them to be. If you
- discover problems, you can set
- :term:`PACKAGES`,
- :term:`FILES`,
- ``do_install(:append)``, and so forth as needed.
-
-- *Splitting an Application into Multiple Packages*: If you need to
- split an application into several packages, see the
- ":ref:`dev-manual/common-tasks:splitting an application into multiple packages`"
- section for an example.
-
-- *Installing a Post-Installation Script*: For an example showing how
- to install a post-installation script, see the
- ":ref:`dev-manual/common-tasks:post-installation scripts`" section.
-
-- *Marking Package Architecture*: Depending on what your recipe is
- building and how it is configured, it might be important to mark the
- packages produced as being specific to a particular machine, or to
- mark them as not being specific to a particular machine or
- architecture at all.
-
- By default, packages apply to any machine with the same architecture
- as the target machine. When a recipe produces packages that are
- machine-specific (e.g. the
- :term:`MACHINE` value is passed
- into the configure script or a patch is applied only for a particular
- machine), you should mark them as such by adding the following to the
- recipe::
-
- PACKAGE_ARCH = "${MACHINE_ARCH}"
-
- On the other hand, if the recipe produces packages that do not
- contain anything specific to the target machine or architecture at
- all (e.g. recipes that simply package script files or configuration
- files), you should use the
- :ref:`allarch <ref-classes-allarch>` class to
- do this for you by adding this to your recipe::
-
- inherit allarch
-
- Ensuring that the package architecture is correct is not critical
- while you are doing the first few builds of your recipe. However, it
- is important in order to ensure that your recipe rebuilds (or does
- not rebuild) appropriately in response to changes in configuration,
- and to ensure that you get the appropriate packages installed on the
- target machine, particularly if you run separate builds for more than
- one target machine.
-
-Sharing Files Between Recipes
------------------------------
-
-Recipes often need to use files provided by other recipes on the build
-host. For example, an application linking to a common library needs
-access to the library itself and its associated headers. The way this
-access is accomplished is by populating a sysroot with files. Each
-recipe has two sysroots in its work directory, one for target files
-(``recipe-sysroot``) and one for files that are native to the build host
-(``recipe-sysroot-native``).
-
-.. note::
-
- You could find the term "staging" used within the Yocto project
- regarding files populating sysroots (e.g. the :term:`STAGING_DIR`
- variable).
-
-Recipes should never populate the sysroot directly (i.e. write files
-into sysroot). Instead, files should be installed into standard
-locations during the
-:ref:`ref-tasks-install` task within
-the ``${``\ :term:`D`\ ``}`` directory. The
-reason for this limitation is that almost all files that populate the
-sysroot are cataloged in manifests in order to ensure the files can be
-removed later when a recipe is either modified or removed. Thus, the
-sysroot is able to remain free from stale files.
-
-A subset of the files installed by the :ref:`ref-tasks-install` task are
-used by the :ref:`ref-tasks-populate_sysroot` task as defined by the the
-:term:`SYSROOT_DIRS` variable to automatically populate the sysroot. It
-is possible to modify the list of directories that populate the sysroot.
-The following example shows how you could add the ``/opt`` directory to
-the list of directories within a recipe::
-
- SYSROOT_DIRS += "/opt"
-
-.. note::
-
- The `/sysroot-only` is to be used by recipes that generate artifacts
- that are not included in the target filesystem, allowing them to share
- these artifacts without needing to use the :term:`DEPLOY_DIR`.
-
-For a more complete description of the :ref:`ref-tasks-populate_sysroot`
-task and its associated functions, see the
-:ref:`staging <ref-classes-staging>` class.
-
-Using Virtual Providers
------------------------
-
-Prior to a build, if you know that several different recipes provide the
-same functionality, you can use a virtual provider (i.e. ``virtual/*``)
-as a placeholder for the actual provider. The actual provider is
-determined at build-time.
-
-A common scenario where a virtual provider is used would be for the
-kernel recipe. Suppose you have three kernel recipes whose
-:term:`PN` values map to ``kernel-big``,
-``kernel-mid``, and ``kernel-small``. Furthermore, each of these recipes
-in some way uses a :term:`PROVIDES`
-statement that essentially identifies itself as being able to provide
-``virtual/kernel``. Here is one way through the
-:ref:`kernel <ref-classes-kernel>` class::
-
- PROVIDES += "virtual/kernel"
-
-Any recipe that inherits the :ref:`kernel <ref-classes-kernel>` class is
-going to utilize a :term:`PROVIDES` statement that identifies that recipe as
-being able to provide the ``virtual/kernel`` item.
-
-Now comes the time to actually build an image and you need a kernel
-recipe, but which one? You can configure your build to call out the
-kernel recipe you want by using the :term:`PREFERRED_PROVIDER` variable. As
-an example, consider the :yocto_git:`x86-base.inc
-</poky/tree/meta/conf/machine/include/x86/x86-base.inc>` include file, which is a
-machine (i.e. :term:`MACHINE`) configuration file. This include file is the
-reason all x86-based machines use the ``linux-yocto`` kernel. Here are the
-relevant lines from the include file::
-
- PREFERRED_PROVIDER_virtual/kernel ??= "linux-yocto"
- PREFERRED_VERSION_linux-yocto ??= "4.15%"
-
-When you use a virtual provider, you do not have to "hard code" a recipe
-name as a build dependency. You can use the
-:term:`DEPENDS` variable to state the
-build is dependent on ``virtual/kernel`` for example::
-
- DEPENDS = "virtual/kernel"
-
-During the build, the OpenEmbedded build system picks
-the correct recipe needed for the ``virtual/kernel`` dependency based on
-the :term:`PREFERRED_PROVIDER` variable. If you want to use the small kernel
-mentioned at the beginning of this section, configure your build as
-follows::
-
- PREFERRED_PROVIDER_virtual/kernel ??= "kernel-small"
-
-.. note::
-
- Any recipe that :term:`PROVIDES` a ``virtual/*`` item that is ultimately not
- selected through :term:`PREFERRED_PROVIDER` does not get built. Preventing these
- recipes from building is usually the desired behavior since this mechanism's
- purpose is to select between mutually exclusive alternative providers.
-
-The following lists specific examples of virtual providers:
-
-- ``virtual/kernel``: Provides the name of the kernel recipe to use
- when building a kernel image.
-
-- ``virtual/bootloader``: Provides the name of the bootloader to use
- when building an image.
-
-- ``virtual/libgbm``: Provides ``gbm.pc``.
-
-- ``virtual/egl``: Provides ``egl.pc`` and possibly ``wayland-egl.pc``.
-
-- ``virtual/libgl``: Provides ``gl.pc`` (i.e. libGL).
-
-- ``virtual/libgles1``: Provides ``glesv1_cm.pc`` (i.e. libGLESv1_CM).
-
-- ``virtual/libgles2``: Provides ``glesv2.pc`` (i.e. libGLESv2).
-
-.. note::
-
- Virtual providers only apply to build time dependencies specified with
- :term:`PROVIDES` and :term:`DEPENDS`. They do not apply to runtime
- dependencies specified with :term:`RPROVIDES` and :term:`RDEPENDS`.
-
-Properly Versioning Pre-Release Recipes
----------------------------------------
-
-Sometimes the name of a recipe can lead to versioning problems when the
-recipe is upgraded to a final release. For example, consider the
-``irssi_0.8.16-rc1.bb`` recipe file in the list of example recipes in
-the ":ref:`dev-manual/common-tasks:storing and naming the recipe`" section.
-This recipe is at a release candidate stage (i.e. "rc1"). When the recipe is
-released, the recipe filename becomes ``irssi_0.8.16.bb``. The version
-change from ``0.8.16-rc1`` to ``0.8.16`` is seen as a decrease by the
-build system and package managers, so the resulting packages will not
-correctly trigger an upgrade.
-
-In order to ensure the versions compare properly, the recommended
-convention is to set :term:`PV` within the
-recipe to "previous_version+current_version". You can use an additional
-variable so that you can use the current version elsewhere. Here is an
-example::
-
- REALPV = "0.8.16-rc1"
- PV = "0.8.15+${REALPV}"
-
-Post-Installation Scripts
--------------------------
-
-Post-installation scripts run immediately after installing a package on
-the target or during image creation when a package is included in an
-image. To add a post-installation script to a package, add a
-``pkg_postinst:``\ `PACKAGENAME`\ ``()`` function to the recipe file
-(``.bb``) and replace `PACKAGENAME` with the name of the package you want
-to attach to the ``postinst`` script. To apply the post-installation
-script to the main package for the recipe, which is usually what is
-required, specify
-``${``\ :term:`PN`\ ``}`` in place of
-PACKAGENAME.
-
-A post-installation function has the following structure::
-
- pkg_postinst:PACKAGENAME() {
- # Commands to carry out
- }
-
-The script defined in the post-installation function is called when the
-root filesystem is created. If the script succeeds, the package is
-marked as installed.
-
-.. note::
-
- Any RPM post-installation script that runs on the target should
- return a 0 exit code. RPM does not allow non-zero exit codes for
- these scripts, and the RPM package manager will cause the package to
- fail installation on the target.
-
-Sometimes it is necessary for the execution of a post-installation
-script to be delayed until the first boot. For example, the script might
-need to be executed on the device itself. To delay script execution
-until boot time, you must explicitly mark post installs to defer to the
-target. You can use ``pkg_postinst_ontarget()`` or call
-``postinst_intercept delay_to_first_boot`` from ``pkg_postinst()``. Any
-failure of a ``pkg_postinst()`` script (including exit 1) triggers an
-error during the
-:ref:`ref-tasks-rootfs` task.
-
-If you have recipes that use ``pkg_postinst`` function and they require
-the use of non-standard native tools that have dependencies during
-root filesystem construction, you need to use the
-:term:`PACKAGE_WRITE_DEPS`
-variable in your recipe to list these tools. If you do not use this
-variable, the tools might be missing and execution of the
-post-installation script is deferred until first boot. Deferring the
-script to the first boot is undesirable and impossible for read-only
-root filesystems.
-
-.. note::
-
- There is equivalent support for pre-install, pre-uninstall, and post-uninstall
- scripts by way of ``pkg_preinst``, ``pkg_prerm``, and ``pkg_postrm``,
- respectively. These scrips work in exactly the same way as does
- ``pkg_postinst`` with the exception that they run at different times. Also,
- because of when they run, they are not applicable to being run at image
- creation time like ``pkg_postinst``.
-
-Testing
--------
-
-The final step for completing your recipe is to be sure that the
-software you built runs correctly. To accomplish runtime testing, add
-the build's output packages to your image and test them on the target.
-
-For information on how to customize your image by adding specific
-packages, see ":ref:`dev-manual/common-tasks:customizing images`" section.
-
-Examples
---------
-
-To help summarize how to write a recipe, this section provides some
-examples given various scenarios:
-
-- Recipes that use local files
-
-- Using an Autotooled package
-
-- Using a Makefile-based package
-
-- Splitting an application into multiple packages
-
-- Adding binaries to an image
-
-Single .c File Package (Hello World!)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Building an application from a single file that is stored locally (e.g.
-under ``files``) requires a recipe that has the file listed in the
-:term:`SRC_URI` variable. Additionally, you need to manually write the
-``do_compile`` and ``do_install`` tasks. The :term:`S` variable defines the
-directory containing the source code, which is set to
-:term:`WORKDIR` in this case - the
-directory BitBake uses for the build.
-::
-
- SUMMARY = "Simple helloworld application"
- SECTION = "examples"
- LICENSE = "MIT"
- LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
-
- SRC_URI = "file://helloworld.c"
-
- S = "${WORKDIR}"
-
- do_compile() {
- ${CC} ${LDFLAGS} helloworld.c -o helloworld
- }
-
- do_install() {
- install -d ${D}${bindir}
- install -m 0755 helloworld ${D}${bindir}
- }
-
-By default, the ``helloworld``, ``helloworld-dbg``, and
-``helloworld-dev`` packages are built. For information on how to
-customize the packaging process, see the
-":ref:`dev-manual/common-tasks:splitting an application into multiple packages`"
-section.
-
-Autotooled Package
-~~~~~~~~~~~~~~~~~~
-
-Applications that use Autotools such as ``autoconf`` and ``automake``
-require a recipe that has a source archive listed in :term:`SRC_URI` and
-also inherit the
-:ref:`autotools <ref-classes-autotools>` class,
-which contains the definitions of all the steps needed to build an
-Autotool-based application. The result of the build is automatically
-packaged. And, if the application uses NLS for localization, packages
-with local information are generated (one package per language).
-Following is one example: (``hello_2.3.bb``)
-::
-
- SUMMARY = "GNU Helloworld application"
- SECTION = "examples"
- LICENSE = "GPL-2.0-or-later"
- LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-
- SRC_URI = "${GNU_MIRROR}/hello/hello-${PV}.tar.gz"
-
- inherit autotools gettext
-
-The variable :term:`LIC_FILES_CHKSUM` is used to track source license
-changes as described in the
-":ref:`dev-manual/common-tasks:tracking license changes`" section in
-the Yocto Project Overview and Concepts Manual. You can quickly create
-Autotool-based recipes in a manner similar to the previous example.
-
-Makefile-Based Package
-~~~~~~~~~~~~~~~~~~~~~~
-
-Applications that use GNU ``make`` also require a recipe that has the
-source archive listed in :term:`SRC_URI`. You do not need to add a
-``do_compile`` step since by default BitBake starts the ``make`` command
-to compile the application. If you need additional ``make`` options, you
-should store them in the
-:term:`EXTRA_OEMAKE` or
-:term:`PACKAGECONFIG_CONFARGS`
-variables. BitBake passes these options into the GNU ``make``
-invocation. Note that a ``do_install`` task is still required.
-Otherwise, BitBake runs an empty ``do_install`` task by default.
-
-Some applications might require extra parameters to be passed to the
-compiler. For example, the application might need an additional header
-path. You can accomplish this by adding to the :term:`CFLAGS` variable. The
-following example shows this::
-
- CFLAGS:prepend = "-I ${S}/include "
-
-In the following example, ``lz4`` is a makefile-based package::
-
- SUMMARY = "Extremely Fast Compression algorithm"
- DESCRIPTION = "LZ4 is a very fast lossless compression algorithm, providing compression speed at 400 MB/s per core, scalable with multi-cores CPU. It also features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems."
- HOMEPAGE = "https://github.com/lz4/lz4"
-
- LICENSE = "BSD-2-Clause | GPL-2.0-only"
- LIC_FILES_CHKSUM = "file://lib/LICENSE;md5=ebc2ea4814a64de7708f1571904b32cc \
- file://programs/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://LICENSE;md5=d57c0d21cb917fb4e0af2454aa48b956 \
- "
-
- PE = "1"
-
- SRCREV = "d44371841a2f1728a3f36839fd4b7e872d0927d3"
-
- SRC_URI = "git://github.com/lz4/lz4.git;branch=release;protocol=https \
- file://CVE-2021-3520.patch \
- "
- UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
-
- S = "${WORKDIR}/git"
-
- # Fixed in r118, which is larger than the current version.
- CVE_CHECK_IGNORE += "CVE-2014-4715"
-
- EXTRA_OEMAKE = "PREFIX=${prefix} CC='${CC}' CFLAGS='${CFLAGS}' DESTDIR=${D} LIBDIR=${libdir} INCLUDEDIR=${includedir} BUILD_STATIC=no"
-
- do_install() {
- oe_runmake install
- }
-
- BBCLASSEXTEND = "native nativesdk"
-
-Splitting an Application into Multiple Packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can use the variables :term:`PACKAGES` and :term:`FILES` to split an
-application into multiple packages.
-
-Following is an example that uses the ``libxpm`` recipe. By default,
-this recipe generates a single package that contains the library along
-with a few binaries. You can modify the recipe to split the binaries
-into separate packages::
-
- require xorg-lib-common.inc
-
- SUMMARY = "Xpm: X Pixmap extension library"
- LICENSE = "MIT"
- LIC_FILES_CHKSUM = "file://COPYING;md5=51f4270b012ecd4ab1a164f5f4ed6cf7"
- DEPENDS += "libxext libsm libxt"
- PE = "1"
-
- XORG_PN = "libXpm"
-
- PACKAGES =+ "sxpm cxpm"
- FILES:cxpm = "${bindir}/cxpm"
- FILES:sxpm = "${bindir}/sxpm"
-
-In the previous example, we want to ship the ``sxpm`` and ``cxpm``
-binaries in separate packages. Since ``bindir`` would be packaged into
-the main :term:`PN` package by default, we prepend the :term:`PACKAGES` variable
-so additional package names are added to the start of list. This results
-in the extra ``FILES:*`` variables then containing information that
-define which files and directories go into which packages. Files
-included by earlier packages are skipped by latter packages. Thus, the
-main :term:`PN` package does not include the above listed files.
-
-Packaging Externally Produced Binaries
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sometimes, you need to add pre-compiled binaries to an image. For
-example, suppose that there are binaries for proprietary code,
-created by a particular division of a company. Your part of the company
-needs to use those binaries as part of an image that you are building
-using the OpenEmbedded build system. Since you only have the binaries
-and not the source code, you cannot use a typical recipe that expects to
-fetch the source specified in
-:term:`SRC_URI` and then compile it.
-
-One method is to package the binaries and then install them as part of
-the image. Generally, it is not a good idea to package binaries since,
-among other things, it can hinder the ability to reproduce builds and
-could lead to compatibility problems with ABI in the future. However,
-sometimes you have no choice.
-
-The easiest solution is to create a recipe that uses the
-:ref:`bin_package <ref-classes-bin-package>` class
-and to be sure that you are using default locations for build artifacts.
-In most cases, the :ref:`bin_package <ref-classes-bin-package>` class handles "skipping" the
-configure and compile steps as well as sets things up to grab packages
-from the appropriate area. In particular, this class sets ``noexec`` on
-both the :ref:`ref-tasks-configure`
-and :ref:`ref-tasks-compile` tasks,
-sets ``FILES:${PN}`` to "/" so that it picks up all files, and sets up a
-:ref:`ref-tasks-install` task, which
-effectively copies all files from ``${S}`` to ``${D}``. The
-:ref:`bin_package <ref-classes-bin-package>` class works well when the files extracted into ``${S}``
-are already laid out in the way they should be laid out on the target.
-For more information on these variables, see the
-:term:`FILES`,
-:term:`PN`,
-:term:`S`, and
-:term:`D` variables in the Yocto Project
-Reference Manual's variable glossary.
-
-.. note::
-
- - Using :term:`DEPENDS` is a good
- idea even for components distributed in binary form, and is often
- necessary for shared libraries. For a shared library, listing the
- library dependencies in :term:`DEPENDS` makes sure that the libraries
- are available in the staging sysroot when other recipes link
- against the library, which might be necessary for successful
- linking.
-
- - Using :term:`DEPENDS` also allows runtime dependencies between
- packages to be added automatically. See the
- ":ref:`overview-manual/concepts:automatically added runtime dependencies`"
- section in the Yocto Project Overview and Concepts Manual for more
- information.
-
-If you cannot use the :ref:`bin_package <ref-classes-bin-package>` class, you need to be sure you are
-doing the following:
-
-- Create a recipe where the
- :ref:`ref-tasks-configure` and
- :ref:`ref-tasks-compile` tasks do
- nothing: It is usually sufficient to just not define these tasks in
- the recipe, because the default implementations do nothing unless a
- Makefile is found in
- ``${``\ :term:`S`\ ``}``.
-
- If ``${S}`` might contain a Makefile, or if you inherit some class
- that replaces ``do_configure`` and ``do_compile`` with custom
- versions, then you can use the
- ``[``\ :ref:`noexec <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
- flag to turn the tasks into no-ops, as follows::
-
- do_configure[noexec] = "1"
- do_compile[noexec] = "1"
-
- Unlike
- :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:deleting a task`,
- using the flag preserves the dependency chain from the
- :ref:`ref-tasks-fetch`,
- :ref:`ref-tasks-unpack`, and
- :ref:`ref-tasks-patch` tasks to the
- :ref:`ref-tasks-install` task.
-
-- Make sure your ``do_install`` task installs the binaries
- appropriately.
-
-- Ensure that you set up :term:`FILES`
- (usually
- ``FILES:${``\ :term:`PN`\ ``}``) to
- point to the files you have installed, which of course depends on
- where you have installed them and whether those files are in
- different locations than the defaults.
-
-Following Recipe Style Guidelines
----------------------------------
-
-When writing recipes, it is good to conform to existing style
-guidelines. The :oe_wiki:`OpenEmbedded Styleguide </Styleguide>` wiki page
-provides rough guidelines for preferred recipe style.
-
-It is common for existing recipes to deviate a bit from this style.
-However, aiming for at least a consistent style is a good idea. Some
-practices, such as omitting spaces around ``=`` operators in assignments
-or ordering recipe components in an erratic way, are widely seen as poor
-style.
-
-Recipe Syntax
--------------
-
-Understanding recipe file syntax is important for writing recipes. The
-following list overviews the basic items that make up a BitBake recipe
-file. For more complete BitBake syntax descriptions, see the
-":doc:`bitbake-user-manual/bitbake-user-manual-metadata`"
-chapter of the BitBake User Manual.
-
-- *Variable Assignments and Manipulations:* Variable assignments allow
- a value to be assigned to a variable. The assignment can be static
- text or might include the contents of other variables. In addition to
- the assignment, appending and prepending operations are also
- supported.
-
- The following example shows some of the ways you can use variables in
- recipes::
-
- S = "${WORKDIR}/postfix-${PV}"
- CFLAGS += "-DNO_ASM"
- SRC_URI:append = " file://fixup.patch"
-
-- *Functions:* Functions provide a series of actions to be performed.
- You usually use functions to override the default implementation of a
- task function or to complement a default function (i.e. append or
- prepend to an existing function). Standard functions use ``sh`` shell
- syntax, although access to OpenEmbedded variables and internal
- methods are also available.
-
- Here is an example function from the ``sed`` recipe::
-
- do_install () {
- autotools_do_install
- install -d ${D}${base_bindir}
- mv ${D}${bindir}/sed ${D}${base_bindir}/sed
- rmdir ${D}${bindir}/
- }
-
- It is
- also possible to implement new functions that are called between
- existing tasks as long as the new functions are not replacing or
- complementing the default functions. You can implement functions in
- Python instead of shell. Both of these options are not seen in the
- majority of recipes.
-
-- *Keywords:* BitBake recipes use only a few keywords. You use keywords
- to include common functions (``inherit``), load parts of a recipe
- from other files (``include`` and ``require``) and export variables
- to the environment (``export``).
-
- The following example shows the use of some of these keywords::
-
- export POSTCONF = "${STAGING_BINDIR}/postconf"
- inherit autoconf
- require otherfile.inc
-
-- *Comments (#):* Any lines that begin with the hash character (``#``)
- are treated as comment lines and are ignored::
-
- # This is a comment
-
-This next list summarizes the most important and most commonly used
-parts of the recipe syntax. For more information on these parts of the
-syntax, you can reference the
-:doc:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata` chapter
-in the BitBake User Manual.
-
-- *Line Continuation (\\):* Use the backward slash (``\``) character to
- split a statement over multiple lines. Place the slash character at
- the end of the line that is to be continued on the next line::
-
- VAR = "A really long \
- line"
-
- .. note::
-
- You cannot have any characters including spaces or tabs after the
- slash character.
-
-- *Using Variables (${VARNAME}):* Use the ``${VARNAME}`` syntax to
- access the contents of a variable::
-
- SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/zlib-${PV}.tar.gz"
-
- .. note::
-
- It is important to understand that the value of a variable
- expressed in this form does not get substituted automatically. The
- expansion of these expressions happens on-demand later (e.g.
- usually when a function that makes reference to the variable
- executes). This behavior ensures that the values are most
- appropriate for the context in which they are finally used. On the
- rare occasion that you do need the variable expression to be
- expanded immediately, you can use the
- :=
- operator instead of
- =
- when you make the assignment, but this is not generally needed.
-
-- *Quote All Assignments ("value"):* Use double quotes around values in
- all variable assignments (e.g. ``"value"``). Following is an example::
-
- VAR1 = "${OTHERVAR}"
- VAR2 = "The version is ${PV}"
-
-- *Conditional Assignment (?=):* Conditional assignment is used to
- assign a value to a variable, but only when the variable is currently
- unset. Use the question mark followed by the equal sign (``?=``) to
- make a "soft" assignment used for conditional assignment. Typically,
- "soft" assignments are used in the ``local.conf`` file for variables
- that are allowed to come through from the external environment.
-
- Here is an example where ``VAR1`` is set to "New value" if it is
- currently empty. However, if ``VAR1`` has already been set, it
- remains unchanged::
-
- VAR1 ?= "New value"
-
- In this next example, ``VAR1`` is left with the value "Original value"::
-
- VAR1 = "Original value"
- VAR1 ?= "New value"
-
-- *Appending (+=):* Use the plus character followed by the equals sign
- (``+=``) to append values to existing variables.
-
- .. note::
-
- This operator adds a space between the existing content of the
- variable and the new content.
-
- Here is an example::
-
- SRC_URI += "file://fix-makefile.patch"
-
-- *Prepending (=+):* Use the equals sign followed by the plus character
- (``=+``) to prepend values to existing variables.
-
- .. note::
-
- This operator adds a space between the new content and the
- existing content of the variable.
-
- Here is an example::
-
- VAR =+ "Starts"
-
-- *Appending (:append):* Use the ``:append`` operator to append values
- to existing variables. This operator does not add any additional
- space. Also, the operator is applied after all the ``+=``, and ``=+``
- operators have been applied and after all ``=`` assignments have
- occurred.
-
- The following example shows the space being explicitly added to the
- start to ensure the appended value is not merged with the existing
- value::
-
- SRC_URI:append = " file://fix-makefile.patch"
-
- You can also use
- the ``:append`` operator with overrides, which results in the actions
- only being performed for the specified target or machine::
-
- SRC_URI:append:sh4 = " file://fix-makefile.patch"
-
-- *Prepending (:prepend):* Use the ``:prepend`` operator to prepend
- values to existing variables. This operator does not add any
- additional space. Also, the operator is applied after all the ``+=``,
- and ``=+`` operators have been applied and after all ``=``
- assignments have occurred.
-
- The following example shows the space being explicitly added to the
- end to ensure the prepended value is not merged with the existing
- value::
-
- CFLAGS:prepend = "-I${S}/myincludes "
-
- You can also use the
- ``:prepend`` operator with overrides, which results in the actions
- only being performed for the specified target or machine::
-
- CFLAGS:prepend:sh4 = "-I${S}/myincludes "
-
-- *Overrides:* You can use overrides to set a value conditionally,
- typically based on how the recipe is being built. For example, to set
- the :term:`KBRANCH` variable's
- value to "standard/base" for any target
- :term:`MACHINE`, except for
- qemuarm where it should be set to "standard/arm-versatile-926ejs",
- you would do the following::
-
- KBRANCH = "standard/base"
- KBRANCH:qemuarm = "standard/arm-versatile-926ejs"
-
- Overrides are also used to separate
- alternate values of a variable in other situations. For example, when
- setting variables such as
- :term:`FILES` and
- :term:`RDEPENDS` that are
- specific to individual packages produced by a recipe, you should
- always use an override that specifies the name of the package.
-
-- *Indentation:* Use spaces for indentation rather than tabs. For
- shell functions, both currently work. However, it is a policy
- decision of the Yocto Project to use tabs in shell functions. Realize
- that some layers have a policy to use spaces for all indentation.
-
-- *Using Python for Complex Operations:* For more advanced processing,
- it is possible to use Python code during variable assignments (e.g.
- search and replacement on a variable).
-
- You indicate Python code using the ``${@python_code}`` syntax for the
- variable assignment::
-
- SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@d.getVar('PV',1).replace('.', '')}.tgz
-
-- *Shell Function Syntax:* Write shell functions as if you were writing
- a shell script when you describe a list of actions to take. You
- should ensure that your script works with a generic ``sh`` and that
- it does not require any ``bash`` or other shell-specific
- functionality. The same considerations apply to various system
- utilities (e.g. ``sed``, ``grep``, ``awk``, and so forth) that you
- might wish to use. If in doubt, you should check with multiple
- implementations - including those from BusyBox.
-
-Adding a New Machine
-====================
-
-Adding a new machine to the Yocto Project is a straightforward process.
-This section describes how to add machines that are similar to those
-that the Yocto Project already supports.
-
-.. note::
-
- Although well within the capabilities of the Yocto Project, adding a
- totally new architecture might require changes to ``gcc``/``glibc``
- and to the site information, which is beyond the scope of this
- manual.
-
-For a complete example that shows how to add a new machine, see the
-":ref:`bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script`"
-section in the Yocto Project Board Support Package (BSP) Developer's
-Guide.
-
-Adding the Machine Configuration File
--------------------------------------
-
-To add a new machine, you need to add a new machine configuration file
-to the layer's ``conf/machine`` directory. This configuration file
-provides details about the device you are adding.
-
-The OpenEmbedded build system uses the root name of the machine
-configuration file to reference the new machine. For example, given a
-machine configuration file named ``crownbay.conf``, the build system
-recognizes the machine as "crownbay".
-
-The most important variables you must set in your machine configuration
-file or include from a lower-level configuration file are as follows:
-
-- :term:`TARGET_ARCH` (e.g. "arm")
-
-- ``PREFERRED_PROVIDER_virtual/kernel``
-
-- :term:`MACHINE_FEATURES` (e.g. "apm screen wifi")
-
-You might also need these variables:
-
-- :term:`SERIAL_CONSOLES` (e.g. "115200;ttyS0 115200;ttyS1")
-
-- :term:`KERNEL_IMAGETYPE` (e.g. "zImage")
-
-- :term:`IMAGE_FSTYPES` (e.g. "tar.gz jffs2")
-
-You can find full details on these variables in the reference section.
-You can leverage existing machine ``.conf`` files from
-``meta-yocto-bsp/conf/machine/``.
-
-Adding a Kernel for the Machine
--------------------------------
-
-The OpenEmbedded build system needs to be able to build a kernel for the
-machine. You need to either create a new kernel recipe for this machine,
-or extend an existing kernel recipe. You can find several kernel recipe
-examples in the Source Directory at ``meta/recipes-kernel/linux`` that
-you can use as references.
-
-If you are creating a new kernel recipe, normal recipe-writing rules
-apply for setting up a :term:`SRC_URI`. Thus, you need to specify any
-necessary patches and set :term:`S` to point at the source code. You need to
-create a ``do_configure`` task that configures the unpacked kernel with
-a ``defconfig`` file. You can do this by using a ``make defconfig``
-command or, more commonly, by copying in a suitable ``defconfig`` file
-and then running ``make oldconfig``. By making use of ``inherit kernel``
-and potentially some of the ``linux-*.inc`` files, most other
-functionality is centralized and the defaults of the class normally work
-well.
-
-If you are extending an existing kernel recipe, it is usually a matter
-of adding a suitable ``defconfig`` file. The file needs to be added into
-a location similar to ``defconfig`` files used for other machines in a
-given kernel recipe. A possible way to do this is by listing the file in
-the :term:`SRC_URI` and adding the machine to the expression in
-:term:`COMPATIBLE_MACHINE`::
-
- COMPATIBLE_MACHINE = '(qemux86|qemumips)'
-
-For more information on ``defconfig`` files, see the
-":ref:`kernel-dev/common:changing the configuration`"
-section in the Yocto Project Linux Kernel Development Manual.
-
-Adding a Formfactor Configuration File
---------------------------------------
-
-A formfactor configuration file provides information about the target
-hardware for which the image is being built and information that the
-build system cannot obtain from other sources such as the kernel. Some
-examples of information contained in a formfactor configuration file
-include framebuffer orientation, whether or not the system has a
-keyboard, the positioning of the keyboard in relation to the screen, and
-the screen resolution.
-
-The build system uses reasonable defaults in most cases. However, if
-customization is necessary, you need to create a ``machconfig`` file in
-the ``meta/recipes-bsp/formfactor/files`` directory. This directory
-contains directories for specific machines such as ``qemuarm`` and
-``qemux86``. For information about the settings available and the
-defaults, see the ``meta/recipes-bsp/formfactor/files/config`` file
-found in the same area.
-
-Following is an example for "qemuarm" machine::
-
- HAVE_TOUCHSCREEN=1
- HAVE_KEYBOARD=1
- DISPLAY_CAN_ROTATE=0
- DISPLAY_ORIENTATION=0
- #DISPLAY_WIDTH_PIXELS=640
- #DISPLAY_HEIGHT_PIXELS=480
- #DISPLAY_BPP=16
- DISPLAY_DPI=150
- DISPLAY_SUBPIXEL_ORDER=vrgb
-
-Upgrading Recipes
-=================
-
-Over time, upstream developers publish new versions for software built
-by layer recipes. It is recommended to keep recipes up-to-date with
-upstream version releases.
-
-While there are several methods to upgrade a recipe, you might
-consider checking on the upgrade status of a recipe first. You can do so
-using the ``devtool check-upgrade-status`` command. See the
-":ref:`devtool-checking-on-the-upgrade-status-of-a-recipe`"
-section in the Yocto Project Reference Manual for more information.
-
-The remainder of this section describes three ways you can upgrade a
-recipe. You can use the Automated Upgrade Helper (AUH) to set up
-automatic version upgrades. Alternatively, you can use
-``devtool upgrade`` to set up semi-automatic version upgrades. Finally,
-you can manually upgrade a recipe by editing the recipe itself.
-
-Using the Auto Upgrade Helper (AUH)
------------------------------------
-
-The AUH utility works in conjunction with the OpenEmbedded build system
-in order to automatically generate upgrades for recipes based on new
-versions being published upstream. Use AUH when you want to create a
-service that performs the upgrades automatically and optionally sends
-you an email with the results.
-
-AUH allows you to update several recipes with a single use. You can also
-optionally perform build and integration tests using images with the
-results saved to your hard drive and emails of results optionally sent
-to recipe maintainers. Finally, AUH creates Git commits with appropriate
-commit messages in the layer's tree for the changes made to recipes.
-
-.. note::
-
- In some conditions, you should not use AUH to upgrade recipes
- and should instead use either ``devtool upgrade`` or upgrade your
- recipes manually:
-
- - When AUH cannot complete the upgrade sequence. This situation
- usually results because custom patches carried by the recipe
- cannot be automatically rebased to the new version. In this case,
- ``devtool upgrade`` allows you to manually resolve conflicts.
-
- - When for any reason you want fuller control over the upgrade
- process. For example, when you want special arrangements for
- testing.
-
-The following steps describe how to set up the AUH utility:
-
-1. *Be Sure the Development Host is Set Up:* You need to be sure that
- your development host is set up to use the Yocto Project. For
- information on how to set up your host, see the
- ":ref:`dev-manual/start:Preparing the Build Host`" section.
-
-2. *Make Sure Git is Configured:* The AUH utility requires Git to be
- configured because AUH uses Git to save upgrades. Thus, you must have
- Git user and email configured. The following command shows your
- configurations::
-
- $ git config --list
-
- If you do not have the user and
- email configured, you can use the following commands to do so::
-
- $ git config --global user.name some_name
- $ git config --global user.email username@domain.com
-
-3. *Clone the AUH Repository:* To use AUH, you must clone the repository
- onto your development host. The following command uses Git to create
- a local copy of the repository on your system::
-
- $ git clone git://git.yoctoproject.org/auto-upgrade-helper
- Cloning into 'auto-upgrade-helper'... remote: Counting objects: 768, done.
- remote: Compressing objects: 100% (300/300), done.
- remote: Total 768 (delta 499), reused 703 (delta 434)
- Receiving objects: 100% (768/768), 191.47 KiB | 98.00 KiB/s, done.
- Resolving deltas: 100% (499/499), done.
- Checking connectivity... done.
-
- AUH is not part of the :term:`OpenEmbedded-Core (OE-Core)` or
- :term:`Poky` repositories.
-
-4. *Create a Dedicated Build Directory:* Run the
- :ref:`structure-core-script`
- script to create a fresh build directory that you use exclusively for
- running the AUH utility::
-
- $ cd poky
- $ source oe-init-build-env your_AUH_build_directory
-
- Re-using an existing build directory and its configurations is not
- recommended as existing settings could cause AUH to fail or behave
- undesirably.
-
-5. *Make Configurations in Your Local Configuration File:* Several
- settings are needed in the ``local.conf`` file in the build
- directory you just created for AUH. Make these following
- configurations:
-
- - If you want to enable :ref:`Build
- History <dev-manual/common-tasks:maintaining build output quality>`,
- which is optional, you need the following lines in the
- ``conf/local.conf`` file::
-
- INHERIT =+ "buildhistory"
- BUILDHISTORY_COMMIT = "1"
-
- With this configuration and a successful
- upgrade, a build history "diff" file appears in the
- ``upgrade-helper/work/recipe/buildhistory-diff.txt`` file found in
- your build directory.
-
- - If you want to enable testing through the
- :ref:`testimage <ref-classes-testimage*>`
- class, which is optional, you need to have the following set in
- your ``conf/local.conf`` file::
-
- INHERIT += "testimage"
-
- .. note::
-
- If your distro does not enable by default ptest, which Poky
- does, you need the following in your ``local.conf`` file::
-
- DISTRO_FEATURES:append = " ptest"
-
-
-6. *Optionally Start a vncserver:* If you are running in a server
- without an X11 session, you need to start a vncserver::
-
- $ vncserver :1
- $ export DISPLAY=:1
-
-7. *Create and Edit an AUH Configuration File:* You need to have the
- ``upgrade-helper/upgrade-helper.conf`` configuration file in your
- build directory. You can find a sample configuration file in the
- :yocto_git:`AUH source repository </auto-upgrade-helper/tree/>`.
-
- Read through the sample file and make configurations as needed. For
- example, if you enabled build history in your ``local.conf`` as
- described earlier, you must enable it in ``upgrade-helper.conf``.
-
- Also, if you are using the default ``maintainers.inc`` file supplied
- with Poky and located in ``meta-yocto`` and you do not set a
- "maintainers_whitelist" or "global_maintainer_override" in the
- ``upgrade-helper.conf`` configuration, and you specify "-e all" on
- the AUH command-line, the utility automatically sends out emails to
- all the default maintainers. Please avoid this.
-
-This next set of examples describes how to use the AUH:
-
-- *Upgrading a Specific Recipe:* To upgrade a specific recipe, use the
- following form::
-
- $ upgrade-helper.py recipe_name
-
- For example, this command upgrades the ``xmodmap`` recipe::
-
- $ upgrade-helper.py xmodmap
-
-- *Upgrading a Specific Recipe to a Particular Version:* To upgrade a
- specific recipe to a particular version, use the following form::
-
- $ upgrade-helper.py recipe_name -t version
-
- For example, this command upgrades the ``xmodmap`` recipe to version 1.2.3::
-
- $ upgrade-helper.py xmodmap -t 1.2.3
-
-- *Upgrading all Recipes to the Latest Versions and Suppressing Email
- Notifications:* To upgrade all recipes to their most recent versions
- and suppress the email notifications, use the following command::
-
- $ upgrade-helper.py all
-
-- *Upgrading all Recipes to the Latest Versions and Send Email
- Notifications:* To upgrade all recipes to their most recent versions
- and send email messages to maintainers for each attempted recipe as
- well as a status email, use the following command::
-
- $ upgrade-helper.py -e all
-
-Once you have run the AUH utility, you can find the results in the AUH
-build directory::
-
- ${BUILDDIR}/upgrade-helper/timestamp
-
-The AUH utility
-also creates recipe update commits from successful upgrade attempts in
-the layer tree.
-
-You can easily set up to run the AUH utility on a regular basis by using
-a cron job. See the
-:yocto_git:`weeklyjob.sh </auto-upgrade-helper/tree/weeklyjob.sh>`
-file distributed with the utility for an example.
-
-Using ``devtool upgrade``
--------------------------
-
-As mentioned earlier, an alternative method for upgrading recipes to
-newer versions is to use
-:doc:`devtool upgrade </ref-manual/devtool-reference>`.
-You can read about ``devtool upgrade`` in general in the
-":ref:`sdk-manual/extensible:use \`\`devtool upgrade\`\` to create a version of the recipe that supports a newer version of the software`"
-section in the Yocto Project Application Development and the Extensible
-Software Development Kit (eSDK) Manual.
-
-To see all the command-line options available with ``devtool upgrade``,
-use the following help command::
-
- $ devtool upgrade -h
-
-If you want to find out what version a recipe is currently at upstream
-without any attempt to upgrade your local version of the recipe, you can
-use the following command::
-
- $ devtool latest-version recipe_name
-
-As mentioned in the previous section describing AUH, ``devtool upgrade``
-works in a less-automated manner than AUH. Specifically,
-``devtool upgrade`` only works on a single recipe that you name on the
-command line, cannot perform build and integration testing using images,
-and does not automatically generate commits for changes in the source
-tree. Despite all these "limitations", ``devtool upgrade`` updates the
-recipe file to the new upstream version and attempts to rebase custom
-patches contained by the recipe as needed.
-
-.. note::
-
- AUH uses much of ``devtool upgrade`` behind the scenes making AUH somewhat
- of a "wrapper" application for ``devtool upgrade``.
-
-A typical scenario involves having used Git to clone an upstream
-repository that you use during build operations. Because you have built the
-recipe in the past, the layer is likely added to your
-configuration already. If for some reason, the layer is not added, you
-could add it easily using the
-":ref:`bitbake-layers <bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script>`"
-script. For example, suppose you use the ``nano.bb`` recipe from the
-``meta-oe`` layer in the ``meta-openembedded`` repository. For this
-example, assume that the layer has been cloned into following area::
-
- /home/scottrif/meta-openembedded
-
-The following command from your
-:term:`Build Directory` adds the layer to
-your build configuration (i.e. ``${BUILDDIR}/conf/bblayers.conf``)::
-
- $ bitbake-layers add-layer /home/scottrif/meta-openembedded/meta-oe
- NOTE: Starting bitbake server...
- Parsing recipes: 100% |##########################################| Time: 0:00:55
- Parsing of 1431 .bb files complete (0 cached, 1431 parsed). 2040 targets, 56 skipped, 0 masked, 0 errors.
- Removing 12 recipes from the x86_64 sysroot: 100% |##############| Time: 0:00:00
- Removing 1 recipes from the x86_64_i586 sysroot: 100% |##########| Time: 0:00:00
- Removing 5 recipes from the i586 sysroot: 100% |#################| Time: 0:00:00
- Removing 5 recipes from the qemux86 sysroot: 100% |##############| Time: 0:00:00
-
-For this example, assume that the ``nano.bb`` recipe that
-is upstream has a 2.9.3 version number. However, the version in the
-local repository is 2.7.4. The following command from your build
-directory automatically upgrades the recipe for you:
-
-.. note::
-
- Using the ``-V`` option is not necessary. Omitting the version number causes
- ``devtool upgrade`` to upgrade the recipe to the most recent version.
-
-::
-
- $ devtool upgrade nano -V 2.9.3
- NOTE: Starting bitbake server...
- NOTE: Creating workspace layer in /home/scottrif/poky/build/workspace
- Parsing recipes: 100% |##########################################| Time: 0:00:46
- Parsing of 1431 .bb files complete (0 cached, 1431 parsed). 2040 targets, 56 skipped, 0 masked, 0 errors.
- NOTE: Extracting current version source...
- NOTE: Resolving any missing task queue dependencies
- .
- .
- .
- NOTE: Executing SetScene Tasks
- NOTE: Executing RunQueue Tasks
- NOTE: Tasks Summary: Attempted 74 tasks of which 72 didn't need to be rerun and all succeeded.
- Adding changed files: 100% |#####################################| Time: 0:00:00
- NOTE: Upgraded source extracted to /home/scottrif/poky/build/workspace/sources/nano
- NOTE: New recipe is /home/scottrif/poky/build/workspace/recipes/nano/nano_2.9.3.bb
-
-Continuing with this example, you can use ``devtool build`` to build the
-newly upgraded recipe::
-
- $ devtool build nano
- NOTE: Starting bitbake server...
- Loading cache: 100% |################################################################################################| Time: 0:00:01
- Loaded 2040 entries from dependency cache.
- Parsing recipes: 100% |##############################################################################################| Time: 0:00:00
- Parsing of 1432 .bb files complete (1431 cached, 1 parsed). 2041 targets, 56 skipped, 0 masked, 0 errors.
- NOTE: Resolving any missing task queue dependencies
- .
- .
- .
- NOTE: Executing SetScene Tasks
- NOTE: Executing RunQueue Tasks
- NOTE: nano: compiling from external source tree /home/scottrif/poky/build/workspace/sources/nano
- NOTE: Tasks Summary: Attempted 520 tasks of which 304 didn't need to be rerun and all succeeded.
-
-Within the ``devtool upgrade`` workflow, you can
-deploy and test your rebuilt software. For this example,
-however, running ``devtool finish`` cleans up the workspace once the
-source in your workspace is clean. This usually means using Git to stage
-and submit commits for the changes generated by the upgrade process.
-
-Once the tree is clean, you can clean things up in this example with the
-following command from the ``${BUILDDIR}/workspace/sources/nano``
-directory::
-
- $ devtool finish nano meta-oe
- NOTE: Starting bitbake server...
- Loading cache: 100% |################################################################################################| Time: 0:00:00
- Loaded 2040 entries from dependency cache.
- Parsing recipes: 100% |##############################################################################################| Time: 0:00:01
- Parsing of 1432 .bb files complete (1431 cached, 1 parsed). 2041 targets, 56 skipped, 0 masked, 0 errors.
- NOTE: Adding new patch 0001-nano.bb-Stuff-I-changed-when-upgrading-nano.bb.patch
- NOTE: Updating recipe nano_2.9.3.bb
- NOTE: Removing file /home/scottrif/meta-openembedded/meta-oe/recipes-support/nano/nano_2.7.4.bb
- NOTE: Moving recipe file to /home/scottrif/meta-openembedded/meta-oe/recipes-support/nano
- NOTE: Leaving source tree /home/scottrif/poky/build/workspace/sources/nano as-is; if you no longer need it then please delete it manually
-
-
-Using the ``devtool finish`` command cleans up the workspace and creates a patch
-file based on your commits. The tool puts all patch files back into the
-source directory in a sub-directory named ``nano`` in this case.
-
-Manually Upgrading a Recipe
----------------------------
-
-If for some reason you choose not to upgrade recipes using
-:ref:`dev-manual/common-tasks:Using the Auto Upgrade Helper (AUH)` or
-by :ref:`dev-manual/common-tasks:Using \`\`devtool upgrade\`\``,
-you can manually edit the recipe files to upgrade the versions.
-
-.. note::
-
- Manually updating multiple recipes scales poorly and involves many
- steps. The recommendation to upgrade recipe versions is through AUH
- or ``devtool upgrade``, both of which automate some steps and provide
- guidance for others needed for the manual process.
-
-To manually upgrade recipe versions, follow these general steps:
-
-1. *Change the Version:* Rename the recipe such that the version (i.e.
- the :term:`PV` part of the recipe name)
- changes appropriately. If the version is not part of the recipe name,
- change the value as it is set for :term:`PV` within the recipe itself.
-
-2. *Update* :term:`SRCREV` *if Needed*: If the source code your recipe builds
- is fetched from Git or some other version control system, update
- :term:`SRCREV` to point to the
- commit hash that matches the new version.
-
-3. *Build the Software:* Try to build the recipe using BitBake. Typical
- build failures include the following:
-
- - License statements were updated for the new version. For this
- case, you need to review any changes to the license and update the
- values of :term:`LICENSE` and
- :term:`LIC_FILES_CHKSUM`
- as needed.
-
- .. note::
-
- License changes are often inconsequential. For example, the
- license text's copyright year might have changed.
-
- - Custom patches carried by the older version of the recipe might
- fail to apply to the new version. For these cases, you need to
- review the failures. Patches might not be necessary for the new
- version of the software if the upgraded version has fixed those
- issues. If a patch is necessary and failing, you need to rebase it
- into the new version.
-
-4. *Optionally Attempt to Build for Several Architectures:* Once you
- successfully build the new software for a given architecture, you
- could test the build for other architectures by changing the
- :term:`MACHINE` variable and
- rebuilding the software. This optional step is especially important
- if the recipe is to be released publicly.
-
-5. *Check the Upstream Change Log or Release Notes:* Checking both these
- reveals if there are new features that could break
- backwards-compatibility. If so, you need to take steps to mitigate or
- eliminate that situation.
-
-6. *Optionally Create a Bootable Image and Test:* If you want, you can
- test the new software by booting it onto actual hardware.
-
-7. *Create a Commit with the Change in the Layer Repository:* After all
- builds work and any testing is successful, you can create commits for
- any changes in the layer holding your upgraded recipe.
-
-Finding Temporary Source Code
-=============================
-
-You might find it helpful during development to modify the temporary
-source code used by recipes to build packages. For example, suppose you
-are developing a patch and you need to experiment a bit to figure out
-your solution. After you have initially built the package, you can
-iteratively tweak the source code, which is located in the
-:term:`Build Directory`, and then you can
-force a re-compile and quickly test your altered code. Once you settle
-on a solution, you can then preserve your changes in the form of
-patches.
-
-During a build, the unpacked temporary source code used by recipes to
-build packages is available in the Build Directory as defined by the
-:term:`S` variable. Below is the default
-value for the :term:`S` variable as defined in the
-``meta/conf/bitbake.conf`` configuration file in the
-:term:`Source Directory`::
-
- S = "${WORKDIR}/${BP}"
-
-You should be aware that many recipes override the
-:term:`S` variable. For example, recipes that fetch their source from Git
-usually set :term:`S` to ``${WORKDIR}/git``.
-
-.. note::
-
- The :term:`BP` represents the base recipe name, which consists of the name
- and version::
-
- BP = "${BPN}-${PV}"
-
-
-The path to the work directory for the recipe
-(:term:`WORKDIR`) is defined as
-follows::
-
- ${TMPDIR}/work/${MULTIMACH_TARGET_SYS}/${PN}/${EXTENDPE}${PV}-${PR}
-
-The actual directory depends on several things:
-
-- :term:`TMPDIR`: The top-level build
- output directory.
-
-- :term:`MULTIMACH_TARGET_SYS`:
- The target system identifier.
-
-- :term:`PN`: The recipe name.
-
-- :term:`EXTENDPE`: The epoch - (if
- :term:`PE` is not specified, which is
- usually the case for most recipes, then :term:`EXTENDPE` is blank).
-
-- :term:`PV`: The recipe version.
-
-- :term:`PR`: The recipe revision.
-
-As an example, assume a Source Directory top-level folder named
-``poky``, a default Build Directory at ``poky/build``, and a
-``qemux86-poky-linux`` machine target system. Furthermore, suppose your
-recipe is named ``foo_1.3.0.bb``. In this case, the work directory the
-build system uses to build the package would be as follows::
-
- poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0
-
-Using Quilt in Your Workflow
-============================
-
-`Quilt <https://savannah.nongnu.org/projects/quilt>`__ is a powerful tool
-that allows you to capture source code changes without having a clean
-source tree. This section outlines the typical workflow you can use to
-modify source code, test changes, and then preserve the changes in the
-form of a patch all using Quilt.
-
-.. note::
-
- With regard to preserving changes to source files, if you clean a
- recipe or have ``rm_work`` enabled, the
- :ref:`devtool workflow <sdk-manual/extensible:using \`\`devtool\`\` in your sdk workflow>`
- as described in the Yocto Project Application Development and the
- Extensible Software Development Kit (eSDK) manual is a safer
- development flow than the flow that uses Quilt.
-
-Follow these general steps:
-
-1. *Find the Source Code:* Temporary source code used by the
- OpenEmbedded build system is kept in the
- :term:`Build Directory`. See the
- ":ref:`dev-manual/common-tasks:finding temporary source code`" section to
- learn how to locate the directory that has the temporary source code for a
- particular package.
-
-2. *Change Your Working Directory:* You need to be in the directory that
- has the temporary source code. That directory is defined by the
- :term:`S` variable.
-
-3. *Create a New Patch:* Before modifying source code, you need to
- create a new patch. To create a new patch file, use ``quilt new`` as
- below::
-
- $ quilt new my_changes.patch
-
-4. *Notify Quilt and Add Files:* After creating the patch, you need to
- notify Quilt about the files you plan to edit. You notify Quilt by
- adding the files to the patch you just created::
-
- $ quilt add file1.c file2.c file3.c
-
-5. *Edit the Files:* Make your changes in the source code to the files
- you added to the patch.
-
-6. *Test Your Changes:* Once you have modified the source code, the
- easiest way to test your changes is by calling the ``do_compile``
- task as shown in the following example::
-
- $ bitbake -c compile -f package
-
- The ``-f`` or ``--force`` option forces the specified task to
- execute. If you find problems with your code, you can just keep
- editing and re-testing iteratively until things work as expected.
-
- .. note::
-
- All the modifications you make to the temporary source code disappear
- once you run the ``do_clean`` or ``do_cleanall`` tasks using BitBake
- (i.e. ``bitbake -c clean package`` and ``bitbake -c cleanall package``).
- Modifications will also disappear if you use the ``rm_work`` feature as
- described in the
- ":ref:`dev-manual/common-tasks:conserving disk space during builds`"
- section.
-
-7. *Generate the Patch:* Once your changes work as expected, you need to
- use Quilt to generate the final patch that contains all your
- modifications.
- ::
-
- $ quilt refresh
-
- At this point, the
- ``my_changes.patch`` file has all your edits made to the ``file1.c``,
- ``file2.c``, and ``file3.c`` files.
-
- You can find the resulting patch file in the ``patches/``
- subdirectory of the source (:term:`S`) directory.
-
-8. *Copy the Patch File:* For simplicity, copy the patch file into a
- directory named ``files``, which you can create in the same directory
- that holds the recipe (``.bb``) file or the append (``.bbappend``)
- file. Placing the patch here guarantees that the OpenEmbedded build
- system will find the patch. Next, add the patch into the :term:`SRC_URI`
- of the recipe. Here is an example::
-
- SRC_URI += "file://my_changes.patch"
-
-Using a Development Shell
-=========================
-
-When debugging certain commands or even when just editing packages,
-``devshell`` can be a useful tool. When you invoke ``devshell``, all
-tasks up to and including
-:ref:`ref-tasks-patch` are run for the
-specified target. Then, a new terminal is opened and you are placed in
-``${``\ :term:`S`\ ``}``, the source
-directory. In the new terminal, all the OpenEmbedded build-related
-environment variables are still defined so you can use commands such as
-``configure`` and ``make``. The commands execute just as if the
-OpenEmbedded build system were executing them. Consequently, working
-this way can be helpful when debugging a build or preparing software to
-be used with the OpenEmbedded build system.
-
-Following is an example that uses ``devshell`` on a target named
-``matchbox-desktop``::
-
- $ bitbake matchbox-desktop -c devshell
-
-This command spawns a terminal with a shell prompt within the
-OpenEmbedded build environment. The
-:term:`OE_TERMINAL` variable
-controls what type of shell is opened.
-
-For spawned terminals, the following occurs:
-
-- The ``PATH`` variable includes the cross-toolchain.
-
-- The ``pkgconfig`` variables find the correct ``.pc`` files.
-
-- The ``configure`` command finds the Yocto Project site files as well
- as any other necessary files.
-
-Within this environment, you can run configure or compile commands as if
-they were being run by the OpenEmbedded build system itself. As noted
-earlier, the working directory also automatically changes to the Source
-Directory (:term:`S`).
-
-To manually run a specific task using ``devshell``, run the
-corresponding ``run.*`` script in the
-``${``\ :term:`WORKDIR`\ ``}/temp``
-directory (e.g., ``run.do_configure.``\ `pid`). If a task's script does
-not exist, which would be the case if the task was skipped by way of the
-sstate cache, you can create the task by first running it outside of the
-``devshell``::
-
- $ bitbake -c task
-
-.. note::
-
- - Execution of a task's ``run.*`` script and BitBake's execution of
- a task are identical. In other words, running the script re-runs
- the task just as it would be run using the ``bitbake -c`` command.
-
- - Any ``run.*`` file that does not have a ``.pid`` extension is a
- symbolic link (symlink) to the most recent version of that file.
-
-Remember, that the ``devshell`` is a mechanism that allows you to get
-into the BitBake task execution environment. And as such, all commands
-must be called just as BitBake would call them. That means you need to
-provide the appropriate options for cross-compilation and so forth as
-applicable.
-
-When you are finished using ``devshell``, exit the shell or close the
-terminal window.
-
-.. note::
-
- - It is worth remembering that when using ``devshell`` you need to
- use the full compiler name such as ``arm-poky-linux-gnueabi-gcc``
- instead of just using ``gcc``. The same applies to other
- applications such as ``binutils``, ``libtool`` and so forth.
- BitBake sets up environment variables such as :term:`CC` to assist
- applications, such as ``make`` to find the correct tools.
-
- - It is also worth noting that ``devshell`` still works over X11
- forwarding and similar situations.
-
-Using a Python Development Shell
-================================
-
-Similar to working within a development shell as described in the
-previous section, you can also spawn and work within an interactive
-Python development shell. When debugging certain commands or even when
-just editing packages, ``pydevshell`` can be a useful tool. When you
-invoke the ``pydevshell`` task, all tasks up to and including
-:ref:`ref-tasks-patch` are run for the
-specified target. Then a new terminal is opened. Additionally, key
-Python objects and code are available in the same way they are to
-BitBake tasks, in particular, the data store 'd'. So, commands such as
-the following are useful when exploring the data store and running
-functions::
-
- pydevshell> d.getVar("STAGING_DIR")
- '/media/build1/poky/build/tmp/sysroots'
- pydevshell> d.getVar("STAGING_DIR", False)
- '${TMPDIR}/sysroots'
- pydevshell> d.setVar("FOO", "bar")
- pydevshell> d.getVar("FOO")
- 'bar'
- pydevshell> d.delVar("FOO")
- pydevshell> d.getVar("FOO")
- pydevshell> bb.build.exec_func("do_unpack", d)
- pydevshell>
-
-The commands execute just as if the OpenEmbedded build
-system were executing them. Consequently, working this way can be
-helpful when debugging a build or preparing software to be used with the
-OpenEmbedded build system.
-
-Following is an example that uses ``pydevshell`` on a target named
-``matchbox-desktop``::
-
- $ bitbake matchbox-desktop -c pydevshell
-
-This command spawns a terminal and places you in an interactive Python
-interpreter within the OpenEmbedded build environment. The
-:term:`OE_TERMINAL` variable
-controls what type of shell is opened.
-
-When you are finished using ``pydevshell``, you can exit the shell
-either by using Ctrl+d or closing the terminal window.
-
-Building
-========
-
-This section describes various build procedures, such as the steps
-needed for a simple build, building a target for multiple configurations,
-generating an image for more than one machine, and so forth.
-
-Building a Simple Image
------------------------
-
-In the development environment, you need to build an image whenever you
-change hardware support, add or change system libraries, or add or
-change services that have dependencies. There are several methods that allow
-you to build an image within the Yocto Project. This section presents
-the basic steps you need to build a simple image using BitBake from a
-build host running Linux.
-
-.. note::
-
- - For information on how to build an image using
- :term:`Toaster`, see the
- :doc:`/toaster-manual/index`.
-
- - For information on how to use ``devtool`` to build images, see the
- ":ref:`sdk-manual/extensible:using \`\`devtool\`\` in your sdk workflow`"
- section in the Yocto Project Application Development and the
- Extensible Software Development Kit (eSDK) manual.
-
- - For a quick example on how to build an image using the
- OpenEmbedded build system, see the
- :doc:`/brief-yoctoprojectqs/index` document.
-
-The build process creates an entire Linux distribution from source and
-places it in your :term:`Build Directory` under
-``tmp/deploy/images``. For detailed information on the build process
-using BitBake, see the ":ref:`overview-manual/concepts:images`" section in the
-Yocto Project Overview and Concepts Manual.
-
-The following figure and list overviews the build process:
-
-.. image:: figures/bitbake-build-flow.png
- :width: 100%
-
-1. *Set up Your Host Development System to Support Development Using the
- Yocto Project*: See the ":doc:`start`" section for options on how to get a
- build host ready to use the Yocto Project.
-
-2. *Initialize the Build Environment:* Initialize the build environment
- by sourcing the build environment script (i.e.
- :ref:`structure-core-script`)::
-
- $ source oe-init-build-env [build_dir]
-
- When you use the initialization script, the OpenEmbedded build system
- uses ``build`` as the default :term:`Build Directory` in your current work
- directory. You can use a `build_dir` argument with the script to
- specify a different build directory.
-
- .. note::
-
- A common practice is to use a different Build Directory for
- different targets; for example, ``~/build/x86`` for a ``qemux86``
- target, and ``~/build/arm`` for a ``qemuarm`` target. In any
- event, it's typically cleaner to locate the build directory
- somewhere outside of your source directory.
-
-3. *Make Sure Your* ``local.conf`` *File is Correct*: Ensure the
- ``conf/local.conf`` configuration file, which is found in the Build
- Directory, is set up how you want it. This file defines many aspects
- of the build environment including the target machine architecture
- through the :term:`MACHINE` variable, the packaging format used during
- the build
- (:term:`PACKAGE_CLASSES`),
- and a centralized tarball download directory through the
- :term:`DL_DIR` variable.
-
-4. *Build the Image:* Build the image using the ``bitbake`` command::
-
- $ bitbake target
-
- .. note::
-
- For information on BitBake, see the :doc:`bitbake:index`.
-
- The target is the name of the recipe you want to build. Common
- targets are the images in ``meta/recipes-core/images``,
- ``meta/recipes-sato/images``, and so forth all found in the
- :term:`Source Directory`. Alternatively, the target
- can be the name of a recipe for a specific piece of software such as
- BusyBox. For more details about the images the OpenEmbedded build
- system supports, see the
- ":ref:`ref-manual/images:Images`" chapter in the Yocto
- Project Reference Manual.
-
- As an example, the following command builds the
- ``core-image-minimal`` image::
-
- $ bitbake core-image-minimal
-
- Once an
- image has been built, it often needs to be installed. The images and
- kernels built by the OpenEmbedded build system are placed in the
- Build Directory in ``tmp/deploy/images``. For information on how to
- run pre-built images such as ``qemux86`` and ``qemuarm``, see the
- :doc:`/sdk-manual/index` manual. For
- information about how to install these images, see the documentation
- for your particular board or machine.
-
-Building Images for Multiple Targets Using Multiple Configurations
-------------------------------------------------------------------
-
-You can use a single ``bitbake`` command to build multiple images or
-packages for different targets where each image or package requires a
-different configuration (multiple configuration builds). The builds, in
-this scenario, are sometimes referred to as "multiconfigs", and this
-section uses that term throughout.
-
-This section describes how to set up for multiple configuration builds
-and how to account for cross-build dependencies between the
-multiconfigs.
-
-Setting Up and Running a Multiple Configuration Build
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To accomplish a multiple configuration build, you must define each
-target's configuration separately using a parallel configuration file in
-the :term:`Build Directory`, and you
-must follow a required file hierarchy. Additionally, you must enable the
-multiple configuration builds in your ``local.conf`` file.
-
-Follow these steps to set up and execute multiple configuration builds:
-
-- *Create Separate Configuration Files*: You need to create a single
- configuration file for each build target (each multiconfig).
- Minimally, each configuration file must define the machine and the
- temporary directory BitBake uses for the build. Suggested practice
- dictates that you do not overlap the temporary directories used
- during the builds. However, it is possible that you can share the
- temporary directory
- (:term:`TMPDIR`). For example,
- consider a scenario with two different multiconfigs for the same
- :term:`MACHINE`: "qemux86" built
- for two distributions such as "poky" and "poky-lsb". In this case,
- you might want to use the same :term:`TMPDIR`.
-
- Here is an example showing the minimal statements needed in a
- configuration file for a "qemux86" target whose temporary build
- directory is ``tmpmultix86``::
-
- MACHINE = "qemux86"
- TMPDIR = "${TOPDIR}/tmpmultix86"
-
- The location for these multiconfig configuration files is specific.
- They must reside in the current build directory in a sub-directory of
- ``conf`` named ``multiconfig``. Following is an example that defines
- two configuration files for the "x86" and "arm" multiconfigs:
-
- .. image:: figures/multiconfig_files.png
- :align: center
- :width: 50%
-
- The reason for this required file hierarchy is because the :term:`BBPATH`
- variable is not constructed until the layers are parsed.
- Consequently, using the configuration file as a pre-configuration
- file is not possible unless it is located in the current working
- directory.
-
-- *Add the BitBake Multi-configuration Variable to the Local
- Configuration File*: Use the
- :term:`BBMULTICONFIG`
- variable in your ``conf/local.conf`` configuration file to specify
- each multiconfig. Continuing with the example from the previous
- figure, the :term:`BBMULTICONFIG` variable needs to enable two
- multiconfigs: "x86" and "arm" by specifying each configuration file::
-
- BBMULTICONFIG = "x86 arm"
-
- .. note::
-
- A "default" configuration already exists by definition. This
- configuration is named: "" (i.e. empty string) and is defined by
- the variables coming from your ``local.conf``
- file. Consequently, the previous example actually adds two
- additional configurations to your build: "arm" and "x86" along
- with "".
-
-- *Launch BitBake*: Use the following BitBake command form to launch
- the multiple configuration build::
-
- $ bitbake [mc:multiconfigname:]target [[[mc:multiconfigname:]target] ... ]
-
- For the example in this section, the following command applies::
-
- $ bitbake mc:x86:core-image-minimal mc:arm:core-image-sato mc::core-image-base
-
- The previous BitBake command builds a ``core-image-minimal`` image
- that is configured through the ``x86.conf`` configuration file, a
- ``core-image-sato`` image that is configured through the ``arm.conf``
- configuration file and a ``core-image-base`` that is configured
- through your ``local.conf`` configuration file.
-
-.. note::
-
- Support for multiple configuration builds in the Yocto Project &DISTRO;
- (&DISTRO_NAME;) Release does not include Shared State (sstate)
- optimizations. Consequently, if a build uses the same object twice
- in, for example, two different :term:`TMPDIR`
- directories, the build either loads from an existing sstate cache for
- that build at the start or builds the object fresh.
-
-Enabling Multiple Configuration Build Dependencies
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sometimes dependencies can exist between targets (multiconfigs) in a
-multiple configuration build. For example, suppose that in order to
-build a ``core-image-sato`` image for an "x86" multiconfig, the root
-filesystem of an "arm" multiconfig must exist. This dependency is
-essentially that the
-:ref:`ref-tasks-image` task in the
-``core-image-sato`` recipe depends on the completion of the
-:ref:`ref-tasks-rootfs` task of the
-``core-image-minimal`` recipe.
-
-To enable dependencies in a multiple configuration build, you must
-declare the dependencies in the recipe using the following statement
-form::
-
- task_or_package[mcdepends] = "mc:from_multiconfig:to_multiconfig:recipe_name:task_on_which_to_depend"
-
-To better show how to use this statement, consider the example scenario
-from the first paragraph of this section. The following statement needs
-to be added to the recipe that builds the ``core-image-sato`` image::
-
- do_image[mcdepends] = "mc:x86:arm:core-image-minimal:do_rootfs"
-
-In this example, the `from_multiconfig` is "x86". The `to_multiconfig` is "arm". The
-task on which the ``do_image`` task in the recipe depends is the
-``do_rootfs`` task from the ``core-image-minimal`` recipe associated
-with the "arm" multiconfig.
-
-Once you set up this dependency, you can build the "x86" multiconfig
-using a BitBake command as follows::
-
- $ bitbake mc:x86:core-image-sato
-
-This command executes all the tasks needed to create the
-``core-image-sato`` image for the "x86" multiconfig. Because of the
-dependency, BitBake also executes through the ``do_rootfs`` task for the
-"arm" multiconfig build.
-
-Having a recipe depend on the root filesystem of another build might not
-seem that useful. Consider this change to the statement in the
-``core-image-sato`` recipe::
-
- do_image[mcdepends] = "mc:x86:arm:core-image-minimal:do_image"
-
-In this case, BitBake must
-create the ``core-image-minimal`` image for the "arm" build since the
-"x86" build depends on it.
-
-Because "x86" and "arm" are enabled for multiple configuration builds
-and have separate configuration files, BitBake places the artifacts for
-each build in the respective temporary build directories (i.e.
-:term:`TMPDIR`).
-
-Building an Initial RAM Filesystem (initramfs) Image
-----------------------------------------------------
-
-An initial RAM filesystem (initramfs) image provides a temporary root
-filesystem used for early system initialization (e.g. loading of modules
-needed to locate and mount the "real" root filesystem).
-
-.. note::
-
- The initramfs image is the successor of initial RAM disk (initrd). It
- is a "copy in and out" (cpio) archive of the initial filesystem that
- gets loaded into memory during the Linux startup process. Because
- Linux uses the contents of the archive during initialization, the
- initramfs image needs to contain all of the device drivers and tools
- needed to mount the final root filesystem.
-
-Follow these steps to create an initramfs image:
-
-1. *Create the initramfs Image Recipe:* You can reference the
- ``core-image-minimal-initramfs.bb`` recipe found in the
- ``meta/recipes-core`` directory of the :term:`Source Directory`
- as an example
- from which to work.
-
-2. *Decide if You Need to Bundle the initramfs Image Into the Kernel
- Image:* If you want the initramfs image that is built to be bundled
- in with the kernel image, set the
- :term:`INITRAMFS_IMAGE_BUNDLE`
- variable to "1" in your ``local.conf`` configuration file and set the
- :term:`INITRAMFS_IMAGE`
- variable in the recipe that builds the kernel image.
-
- .. note::
-
- It is recommended that you bundle the initramfs image with the
- kernel image to avoid circular dependencies between the kernel
- recipe and the initramfs recipe should the initramfs image include
- kernel modules.
-
- Setting the :term:`INITRAMFS_IMAGE_BUNDLE` flag causes the initramfs
- image to be unpacked into the ``${B}/usr/`` directory. The unpacked
- initramfs image is then passed to the kernel's ``Makefile`` using the
- :term:`CONFIG_INITRAMFS_SOURCE`
- variable, allowing the initramfs image to be built into the kernel
- normally.
-
- .. note::
-
- Bundling the initramfs with the kernel conflates the code in the initramfs
- with the GPLv2 licensed Linux kernel binary. Thus only GPLv2 compatible
- software may be part of a bundled initramfs.
-
- .. note::
-
- If you choose to not bundle the initramfs image with the kernel
- image, you are essentially using an
- `Initial RAM Disk (initrd) <https://en.wikipedia.org/wiki/Initrd>`__.
- Creating an initrd is handled primarily through the :term:`INITRD_IMAGE`,
- ``INITRD_LIVE``, and ``INITRD_IMAGE_LIVE`` variables. For more
- information, see the :ref:`ref-classes-image-live` file.
-
-3. *Optionally Add Items to the initramfs Image Through the initramfs
- Image Recipe:* If you add items to the initramfs image by way of its
- recipe, you should use
- :term:`PACKAGE_INSTALL`
- rather than
- :term:`IMAGE_INSTALL`.
- :term:`PACKAGE_INSTALL` gives more direct control of what is added to the
- image as compared to the defaults you might not necessarily want that
- are set by the :ref:`image <ref-classes-image>`
- or :ref:`core-image <ref-classes-core-image>`
- classes.
-
-4. *Build the Kernel Image and the initramfs Image:* Build your kernel
- image using BitBake. Because the initramfs image recipe is a
- dependency of the kernel image, the initramfs image is built as well
- and bundled with the kernel image if you used the
- :term:`INITRAMFS_IMAGE_BUNDLE`
- variable described earlier.
-
-Bundling an Initramfs Image From a Separate Multiconfig
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-There may be a case where we want to build an initramfs image which does not
-inherit the same distro policy as our main image, for example, we may want
-our main image to use ``TCLIBC="glibc"``, but to use ``TCLIBC="musl"`` in our initramfs
-image to keep a smaller footprint. However, by performing the steps mentioned
-above the initramfs image will inherit ``TCLIBC="glibc"`` without allowing us
-to override it.
-
-To achieve this, you need to perform some additional steps:
-
-1. *Create a multiconfig for your initramfs image:* You can perform the steps
- on ":ref:`dev-manual/common-tasks:building images for multiple targets using multiple configurations`" to create a separate multiconfig.
- For the sake of simplicity let's assume such multiconfig is called: ``initramfscfg.conf`` and
- contains the variables::
-
- TMPDIR="${TOPDIR}/tmp-initramfscfg"
- TCLIBC="musl"
-
-2. *Set additional initramfs variables on your main configuration:*
- Additionally, on your main configuration (``local.conf``) you need to set the
- variables::
-
- INITRAMFS_MULTICONFIG = "initramfscfg"
- INITRAMFS_DEPLOY_DIR_IMAGE = "${TOPDIR}/tmp-initramfscfg/deploy/images/${MACHINE}"
-
- The variables :term:`INITRAMFS_MULTICONFIG` and :term:`INITRAMFS_DEPLOY_DIR_IMAGE`
- are used to create a multiconfig dependency from the kernel to the :term:`INITRAMFS_IMAGE`
- to be built coming from the ``initramfscfg`` multiconfig, and to let the
- buildsystem know where the :term:`INITRAMFS_IMAGE` will be located.
-
- Building a system with such configuration will build the kernel using the
- main configuration but the ``do_bundle_initramfs`` task will grab the
- selected :term:`INITRAMFS_IMAGE` from :term:`INITRAMFS_DEPLOY_DIR_IMAGE`
- instead, resulting in a musl based initramfs image bundled in the kernel
- but a glibc based main image.
-
- The same is applicable to avoid inheriting :term:`DISTRO_FEATURES` on :term:`INITRAMFS_IMAGE`
- or to build a different :term:`DISTRO` for it such as ``poky-tiny``.
-
-
-Building a Tiny System
-----------------------
-
-Very small distributions have some significant advantages such as
-requiring less on-die or in-package memory (cheaper), better performance
-through efficient cache usage, lower power requirements due to less
-memory, faster boot times, and reduced development overhead. Some
-real-world examples where a very small distribution gives you distinct
-advantages are digital cameras, medical devices, and small headless
-systems.
-
-This section presents information that shows you how you can trim your
-distribution to even smaller sizes than the ``poky-tiny`` distribution,
-which is around 5 Mbytes, that can be built out-of-the-box using the
-Yocto Project.
-
-Tiny System Overview
-~~~~~~~~~~~~~~~~~~~~
-
-The following list presents the overall steps you need to consider and
-perform to create distributions with smaller root filesystems, achieve
-faster boot times, maintain your critical functionality, and avoid
-initial RAM disks:
-
-- :ref:`Determine your goals and guiding principles
- <dev-manual/common-tasks:goals and guiding principles>`
-
-- :ref:`dev-manual/common-tasks:understand what contributes to your image size`
-
-- :ref:`Reduce the size of the root filesystem
- <dev-manual/common-tasks:trim the root filesystem>`
-
-- :ref:`Reduce the size of the kernel <dev-manual/common-tasks:trim the kernel>`
-
-- :ref:`dev-manual/common-tasks:remove package management requirements`
-
-- :ref:`dev-manual/common-tasks:look for other ways to minimize size`
-
-- :ref:`dev-manual/common-tasks:iterate on the process`
-
-Goals and Guiding Principles
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Before you can reach your destination, you need to know where you are
-going. Here is an example list that you can use as a guide when creating
-very small distributions:
-
-- Determine how much space you need (e.g. a kernel that is 1 Mbyte or
- less and a root filesystem that is 3 Mbytes or less).
-
-- Find the areas that are currently taking 90% of the space and
- concentrate on reducing those areas.
-
-- Do not create any difficult "hacks" to achieve your goals.
-
-- Leverage the device-specific options.
-
-- Work in a separate layer so that you keep changes isolated. For
- information on how to create layers, see the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`" section.
-
-Understand What Contributes to Your Image Size
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-It is easiest to have something to start with when creating your own
-distribution. You can use the Yocto Project out-of-the-box to create the
-``poky-tiny`` distribution. Ultimately, you will want to make changes in
-your own distribution that are likely modeled after ``poky-tiny``.
-
-.. note::
-
- To use ``poky-tiny`` in your build, set the :term:`DISTRO` variable in your
- ``local.conf`` file to "poky-tiny" as described in the
- ":ref:`dev-manual/common-tasks:creating your own distribution`"
- section.
-
-Understanding some memory concepts will help you reduce the system size.
-Memory consists of static, dynamic, and temporary memory. Static memory
-is the TEXT (code), DATA (initialized data in the code), and BSS
-(uninitialized data) sections. Dynamic memory represents memory that is
-allocated at runtime: stacks, hash tables, and so forth. Temporary
-memory is recovered after the boot process. This memory consists of
-memory used for decompressing the kernel and for the ``__init__``
-functions.
-
-To help you see where you currently are with kernel and root filesystem
-sizes, you can use two tools found in the :term:`Source Directory`
-in the
-``scripts/tiny/`` directory:
-
-- ``ksize.py``: Reports component sizes for the kernel build objects.
-
-- ``dirsize.py``: Reports component sizes for the root filesystem.
-
-This next tool and command help you organize configuration fragments and
-view file dependencies in a human-readable form:
-
-- ``merge_config.sh``: Helps you manage configuration files and
- fragments within the kernel. With this tool, you can merge individual
- configuration fragments together. The tool allows you to make
- overrides and warns you of any missing configuration options. The
- tool is ideal for allowing you to iterate on configurations, create
- minimal configurations, and create configuration files for different
- machines without having to duplicate your process.
-
- The ``merge_config.sh`` script is part of the Linux Yocto kernel Git
- repositories (i.e. ``linux-yocto-3.14``, ``linux-yocto-3.10``,
- ``linux-yocto-3.8``, and so forth) in the ``scripts/kconfig``
- directory.
-
- For more information on configuration fragments, see the
- ":ref:`kernel-dev/common:creating configuration fragments`"
- section in the Yocto Project Linux Kernel Development Manual.
-
-- ``bitbake -u taskexp -g bitbake_target``: Using the BitBake command
- with these options brings up a Dependency Explorer from which you can
- view file dependencies. Understanding these dependencies allows you
- to make informed decisions when cutting out various pieces of the
- kernel and root filesystem.
-
-Trim the Root Filesystem
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-The root filesystem is made up of packages for booting, libraries, and
-applications. To change things, you can configure how the packaging
-happens, which changes the way you build them. You can also modify the
-filesystem itself or select a different filesystem.
-
-First, find out what is hogging your root filesystem by running the
-``dirsize.py`` script from your root directory::
-
- $ cd root-directory-of-image
- $ dirsize.py 100000 > dirsize-100k.log
- $ cat dirsize-100k.log
-
-You can apply a filter to the script to ignore files
-under a certain size. The previous example filters out any files below
-100 Kbytes. The sizes reported by the tool are uncompressed, and thus
-will be smaller by a relatively constant factor in a compressed root
-filesystem. When you examine your log file, you can focus on areas of
-the root filesystem that take up large amounts of memory.
-
-You need to be sure that what you eliminate does not cripple the
-functionality you need. One way to see how packages relate to each other
-is by using the Dependency Explorer UI with the BitBake command::
-
- $ cd image-directory
- $ bitbake -u taskexp -g image
-
-Use the interface to
-select potential packages you wish to eliminate and see their dependency
-relationships.
-
-When deciding how to reduce the size, get rid of packages that result in
-minimal impact on the feature set. For example, you might not need a VGA
-display. Or, you might be able to get by with ``devtmpfs`` and ``mdev``
-instead of ``udev``.
-
-Use your ``local.conf`` file to make changes. For example, to eliminate
-``udev`` and ``glib``, set the following in the local configuration
-file::
-
- VIRTUAL-RUNTIME_dev_manager = ""
-
-Finally, you should consider exactly the type of root filesystem you
-need to meet your needs while also reducing its size. For example,
-consider ``cramfs``, ``squashfs``, ``ubifs``, ``ext2``, or an
-``initramfs`` using ``initramfs``. Be aware that ``ext3`` requires a 1
-Mbyte journal. If you are okay with running read-only, you do not need
-this journal.
-
-.. note::
-
- After each round of elimination, you need to rebuild your system and
- then use the tools to see the effects of your reductions.
-
-Trim the Kernel
-~~~~~~~~~~~~~~~
-
-The kernel is built by including policies for hardware-independent
-aspects. What subsystems do you enable? For what architecture are you
-building? Which drivers do you build by default?
-
-.. note::
-
- You can modify the kernel source if you want to help with boot time.
-
-Run the ``ksize.py`` script from the top-level Linux build directory to
-get an idea of what is making up the kernel::
-
- $ cd top-level-linux-build-directory
- $ ksize.py > ksize.log
- $ cat ksize.log
-
-When you examine the log, you will see how much space is taken up with
-the built-in ``.o`` files for drivers, networking, core kernel files,
-filesystem, sound, and so forth. The sizes reported by the tool are
-uncompressed, and thus will be smaller by a relatively constant factor
-in a compressed kernel image. Look to reduce the areas that are large
-and taking up around the "90% rule."
-
-To examine, or drill down, into any particular area, use the ``-d``
-option with the script::
-
- $ ksize.py -d > ksize.log
-
-Using this option
-breaks out the individual file information for each area of the kernel
-(e.g. drivers, networking, and so forth).
-
-Use your log file to see what you can eliminate from the kernel based on
-features you can let go. For example, if you are not going to need
-sound, you do not need any drivers that support sound.
-
-After figuring out what to eliminate, you need to reconfigure the kernel
-to reflect those changes during the next build. You could run
-``menuconfig`` and make all your changes at once. However, that makes it
-difficult to see the effects of your individual eliminations and also
-makes it difficult to replicate the changes for perhaps another target
-device. A better method is to start with no configurations using
-``allnoconfig``, create configuration fragments for individual changes,
-and then manage the fragments into a single configuration file using
-``merge_config.sh``. The tool makes it easy for you to iterate using the
-configuration change and build cycle.
-
-Each time you make configuration changes, you need to rebuild the kernel
-and check to see what impact your changes had on the overall size.
-
-Remove Package Management Requirements
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Packaging requirements add size to the image. One way to reduce the size
-of the image is to remove all the packaging requirements from the image.
-This reduction includes both removing the package manager and its unique
-dependencies as well as removing the package management data itself.
-
-To eliminate all the packaging requirements for an image, be sure that
-"package-management" is not part of your
-:term:`IMAGE_FEATURES`
-statement for the image. When you remove this feature, you are removing
-the package manager as well as its dependencies from the root
-filesystem.
-
-Look for Other Ways to Minimize Size
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Depending on your particular circumstances, other areas that you can
-trim likely exist. The key to finding these areas is through tools and
-methods described here combined with experimentation and iteration. Here
-are a couple of areas to experiment with:
-
-- ``glibc``: In general, follow this process:
-
- 1. Remove ``glibc`` features from
- :term:`DISTRO_FEATURES`
- that you think you do not need.
-
- 2. Build your distribution.
-
- 3. If the build fails due to missing symbols in a package, determine
- if you can reconfigure the package to not need those features. For
- example, change the configuration to not support wide character
- support as is done for ``ncurses``. Or, if support for those
- characters is needed, determine what ``glibc`` features provide
- the support and restore the configuration.
-
- 4. Rebuild and repeat the process.
-
-- ``busybox``: For BusyBox, use a process similar as described for
- ``glibc``. A difference is you will need to boot the resulting system
- to see if you are able to do everything you expect from the running
- system. You need to be sure to integrate configuration fragments into
- Busybox because BusyBox handles its own core features and then allows
- you to add configuration fragments on top.
-
-Iterate on the Process
-~~~~~~~~~~~~~~~~~~~~~~
-
-If you have not reached your goals on system size, you need to iterate
-on the process. The process is the same. Use the tools and see just what
-is taking up 90% of the root filesystem and the kernel. Decide what you
-can eliminate without limiting your device beyond what you need.
-
-Depending on your system, a good place to look might be Busybox, which
-provides a stripped down version of Unix tools in a single, executable
-file. You might be able to drop virtual terminal services or perhaps
-ipv6.
-
-Building Images for More than One Machine
------------------------------------------
-
-A common scenario developers face is creating images for several
-different machines that use the same software environment. In this
-situation, it is tempting to set the tunings and optimization flags for
-each build specifically for the targeted hardware (i.e. "maxing out" the
-tunings). Doing so can considerably add to build times and package feed
-maintenance collectively for the machines. For example, selecting tunes
-that are extremely specific to a CPU core used in a system might enable
-some micro optimizations in GCC for that particular system but would
-otherwise not gain you much of a performance difference across the other
-systems as compared to using a more general tuning across all the builds
-(e.g. setting :term:`DEFAULTTUNE`
-specifically for each machine's build). Rather than "max out" each
-build's tunings, you can take steps that cause the OpenEmbedded build
-system to reuse software across the various machines where it makes
-sense.
-
-If build speed and package feed maintenance are considerations, you
-should consider the points in this section that can help you optimize
-your tunings to best consider build times and package feed maintenance.
-
-- *Share the Build Directory:* If at all possible, share the
- :term:`TMPDIR` across builds. The
- Yocto Project supports switching between different
- :term:`MACHINE` values in the same
- :term:`TMPDIR`. This practice is well supported and regularly used by
- developers when building for multiple machines. When you use the same
- :term:`TMPDIR` for multiple machine builds, the OpenEmbedded build system
- can reuse the existing native and often cross-recipes for multiple
- machines. Thus, build time decreases.
-
- .. note::
-
- If :term:`DISTRO` settings change or fundamental configuration settings
- such as the filesystem layout, you need to work with a clean :term:`TMPDIR`.
- Sharing :term:`TMPDIR` under these circumstances might work but since it is
- not guaranteed, you should use a clean :term:`TMPDIR`.
-
-- *Enable the Appropriate Package Architecture:* By default, the
- OpenEmbedded build system enables three levels of package
- architectures: "all", "tune" or "package", and "machine". Any given
- recipe usually selects one of these package architectures (types) for
- its output. Depending for what a given recipe creates packages,
- making sure you enable the appropriate package architecture can
- directly impact the build time.
-
- A recipe that just generates scripts can enable "all" architecture
- because there are no binaries to build. To specifically enable "all"
- architecture, be sure your recipe inherits the
- :ref:`allarch <ref-classes-allarch>` class.
- This class is useful for "all" architectures because it configures
- many variables so packages can be used across multiple architectures.
-
- If your recipe needs to generate packages that are machine-specific
- or when one of the build or runtime dependencies is already
- machine-architecture dependent, which makes your recipe also
- machine-architecture dependent, make sure your recipe enables the
- "machine" package architecture through the
- :term:`MACHINE_ARCH`
- variable::
-
- PACKAGE_ARCH = "${MACHINE_ARCH}"
-
- When you do not
- specifically enable a package architecture through the
- :term:`PACKAGE_ARCH`, The
- OpenEmbedded build system defaults to the
- :term:`TUNE_PKGARCH` setting::
-
- PACKAGE_ARCH = "${TUNE_PKGARCH}"
-
-- *Choose a Generic Tuning File if Possible:* Some tunes are more
- generic and can run on multiple targets (e.g. an ``armv5`` set of
- packages could run on ``armv6`` and ``armv7`` processors in most
- cases). Similarly, ``i486`` binaries could work on ``i586`` and
- higher processors. You should realize, however, that advances on
- newer processor versions would not be used.
-
- If you select the same tune for several different machines, the
- OpenEmbedded build system reuses software previously built, thus
- speeding up the overall build time. Realize that even though a new
- sysroot for each machine is generated, the software is not recompiled
- and only one package feed exists.
-
-- *Manage Granular Level Packaging:* Sometimes there are cases where
- injecting another level of package architecture beyond the three
- higher levels noted earlier can be useful. For example, consider how
- NXP (formerly Freescale) allows for the easy reuse of binary packages
- in their layer
- :yocto_git:`meta-freescale </meta-freescale/>`.
- In this example, the
- :yocto_git:`fsl-dynamic-packagearch </meta-freescale/tree/classes/fsl-dynamic-packagearch.bbclass>`
- class shares GPU packages for i.MX53 boards because all boards share
- the AMD GPU. The i.MX6-based boards can do the same because all
- boards share the Vivante GPU. This class inspects the BitBake
- datastore to identify if the package provides or depends on one of
- the sub-architecture values. If so, the class sets the
- :term:`PACKAGE_ARCH` value
- based on the ``MACHINE_SUBARCH`` value. If the package does not
- provide or depend on one of the sub-architecture values but it
- matches a value in the machine-specific filter, it sets
- :term:`MACHINE_ARCH`. This
- behavior reduces the number of packages built and saves build time by
- reusing binaries.
-
-- *Use Tools to Debug Issues:* Sometimes you can run into situations
- where software is being rebuilt when you think it should not be. For
- example, the OpenEmbedded build system might not be using shared
- state between machines when you think it should be. These types of
- situations are usually due to references to machine-specific
- variables such as :term:`MACHINE`,
- :term:`SERIAL_CONSOLES`,
- :term:`XSERVER`,
- :term:`MACHINE_FEATURES`,
- and so forth in code that is supposed to only be tune-specific or
- when the recipe depends
- (:term:`DEPENDS`,
- :term:`RDEPENDS`,
- :term:`RRECOMMENDS`,
- :term:`RSUGGESTS`, and so forth)
- on some other recipe that already has
- :term:`PACKAGE_ARCH` defined
- as "${MACHINE_ARCH}".
-
- .. note::
-
- Patches to fix any issues identified are most welcome as these
- issues occasionally do occur.
-
- For such cases, you can use some tools to help you sort out the
- situation:
-
- - ``state-diff-machines.sh``*:* You can find this tool in the
- ``scripts`` directory of the Source Repositories. See the comments
- in the script for information on how to use the tool.
-
- - *BitBake's "-S printdiff" Option:* Using this option causes
- BitBake to try to establish the closest signature match it can
- (e.g. in the shared state cache) and then run ``bitbake-diffsigs``
- over the matches to determine the stamps and delta where these two
- stamp trees diverge.
-
-Building Software from an External Source
------------------------------------------
-
-By default, the OpenEmbedded build system uses the
-:term:`Build Directory` when building source
-code. The build process involves fetching the source files, unpacking
-them, and then patching them if necessary before the build takes place.
-
-There are situations where you might want to build software from source
-files that are external to and thus outside of the OpenEmbedded build
-system. For example, suppose you have a project that includes a new BSP
-with a heavily customized kernel. And, you want to minimize exposing the
-build system to the development team so that they can focus on their
-project and maintain everyone's workflow as much as possible. In this
-case, you want a kernel source directory on the development machine
-where the development occurs. You want the recipe's
-:term:`SRC_URI` variable to point to
-the external directory and use it as is, not copy it.
-
-To build from software that comes from an external source, all you need
-to do is inherit the
-:ref:`externalsrc <ref-classes-externalsrc>` class
-and then set the
-:term:`EXTERNALSRC` variable to
-point to your external source code. Here are the statements to put in
-your ``local.conf`` file::
-
- INHERIT += "externalsrc"
- EXTERNALSRC:pn-myrecipe = "path-to-your-source-tree"
-
-This next example shows how to accomplish the same thing by setting
-:term:`EXTERNALSRC` in the recipe itself or in the recipe's append file::
-
- EXTERNALSRC = "path"
- EXTERNALSRC_BUILD = "path"
-
-.. note::
-
- In order for these settings to take effect, you must globally or
- locally inherit the :ref:`externalsrc <ref-classes-externalsrc>`
- class.
-
-By default, :ref:`ref-classes-externalsrc` builds the source code in a
-directory separate from the external source directory as specified by
-:term:`EXTERNALSRC`. If you need
-to have the source built in the same directory in which it resides, or
-some other nominated directory, you can set
-:term:`EXTERNALSRC_BUILD`
-to point to that directory::
-
- EXTERNALSRC_BUILD:pn-myrecipe = "path-to-your-source-tree"
-
-Replicating a Build Offline
----------------------------
-
-It can be useful to take a "snapshot" of upstream sources used in a
-build and then use that "snapshot" later to replicate the build offline.
-To do so, you need to first prepare and populate your downloads
-directory your "snapshot" of files. Once your downloads directory is
-ready, you can use it at any time and from any machine to replicate your
-build.
-
-Follow these steps to populate your Downloads directory:
-
-1. *Create a Clean Downloads Directory:* Start with an empty downloads
- directory (:term:`DL_DIR`). You
- start with an empty downloads directory by either removing the files
- in the existing directory or by setting :term:`DL_DIR` to point to either
- an empty location or one that does not yet exist.
-
-2. *Generate Tarballs of the Source Git Repositories:* Edit your
- ``local.conf`` configuration file as follows::
-
- DL_DIR = "/home/your-download-dir/"
- BB_GENERATE_MIRROR_TARBALLS = "1"
-
- During
- the fetch process in the next step, BitBake gathers the source files
- and creates tarballs in the directory pointed to by :term:`DL_DIR`. See
- the
- :term:`BB_GENERATE_MIRROR_TARBALLS`
- variable for more information.
-
-3. *Populate Your Downloads Directory Without Building:* Use BitBake to
- fetch your sources but inhibit the build::
-
- $ bitbake target --runonly=fetch
-
- The downloads directory (i.e. ``${DL_DIR}``) now has
- a "snapshot" of the source files in the form of tarballs, which can
- be used for the build.
-
-4. *Optionally Remove Any Git or other SCM Subdirectories From the
- Downloads Directory:* If you want, you can clean up your downloads
- directory by removing any Git or other Source Control Management
- (SCM) subdirectories such as ``${DL_DIR}/git2/*``. The tarballs
- already contain these subdirectories.
-
-Once your downloads directory has everything it needs regarding source
-files, you can create your "own-mirror" and build your target.
-Understand that you can use the files to build the target offline from
-any machine and at any time.
-
-Follow these steps to build your target using the files in the downloads
-directory:
-
-1. *Using Local Files Only:* Inside your ``local.conf`` file, add the
- :term:`SOURCE_MIRROR_URL` variable, inherit the
- :ref:`own-mirrors <ref-classes-own-mirrors>` class, and use the
- :term:`BB_NO_NETWORK` variable to your ``local.conf``.
- ::
-
- SOURCE_MIRROR_URL ?= "file:///home/your-download-dir/"
- INHERIT += "own-mirrors"
- BB_NO_NETWORK = "1"
-
- The :term:`SOURCE_MIRROR_URL` and :ref:`own-mirrors <ref-classes-own-mirrors>`
- class set up the system to use the downloads directory as your "own
- mirror". Using the :term:`BB_NO_NETWORK` variable makes sure that
- BitBake's fetching process in step 3 stays local, which means files
- from your "own-mirror" are used.
-
-2. *Start With a Clean Build:* You can start with a clean build by
- removing the
- ``${``\ :term:`TMPDIR`\ ``}``
- directory or using a new :term:`Build Directory`.
-
-3. *Build Your Target:* Use BitBake to build your target::
-
- $ bitbake target
-
- The build completes using the known local "snapshot" of source
- files from your mirror. The resulting tarballs for your "snapshot" of
- source files are in the downloads directory.
-
- .. note::
-
- The offline build does not work if recipes attempt to find the
- latest version of software by setting
- :term:`SRCREV` to
- ``${``\ :term:`AUTOREV`\ ``}``::
-
- SRCREV = "${AUTOREV}"
-
- When a recipe sets :term:`SRCREV` to
- ``${``\ :term:`AUTOREV`\ ``}``, the build system accesses the network in an
- attempt to determine the latest version of software from the SCM.
- Typically, recipes that use :term:`AUTOREV` are custom or modified
- recipes. Recipes that reside in public repositories usually do not
- use :term:`AUTOREV`.
-
- If you do have recipes that use :term:`AUTOREV`, you can take steps to
- still use the recipes in an offline build. Do the following:
-
- 1. Use a configuration generated by enabling :ref:`build
- history <dev-manual/common-tasks:maintaining build output quality>`.
-
- 2. Use the ``buildhistory-collect-srcrevs`` command to collect the
- stored :term:`SRCREV` values from the build's history. For more
- information on collecting these values, see the
- ":ref:`dev-manual/common-tasks:build history package information`"
- section.
-
- 3. Once you have the correct source revisions, you can modify
- those recipes to set :term:`SRCREV` to specific versions of the
- software.
-
-Speeding Up a Build
-===================
-
-Build time can be an issue. By default, the build system uses simple
-controls to try and maximize build efficiency. In general, the default
-settings for all the following variables result in the most efficient
-build times when dealing with single socket systems (i.e. a single CPU).
-If you have multiple CPUs, you might try increasing the default values
-to gain more speed. See the descriptions in the glossary for each
-variable for more information:
-
-- :term:`BB_NUMBER_THREADS`:
- The maximum number of threads BitBake simultaneously executes.
-
-- :term:`BB_NUMBER_PARSE_THREADS`:
- The number of threads BitBake uses during parsing.
-
-- :term:`PARALLEL_MAKE`: Extra
- options passed to the ``make`` command during the
- :ref:`ref-tasks-compile` task in
- order to specify parallel compilation on the local build host.
-
-- :term:`PARALLEL_MAKEINST`:
- Extra options passed to the ``make`` command during the
- :ref:`ref-tasks-install` task in
- order to specify parallel installation on the local build host.
-
-As mentioned, these variables all scale to the number of processor cores
-available on the build system. For single socket systems, this
-auto-scaling ensures that the build system fundamentally takes advantage
-of potential parallel operations during the build based on the build
-machine's capabilities.
-
-Following are additional factors that can affect build speed:
-
-- File system type: The file system type that the build is being
- performed on can also influence performance. Using ``ext4`` is
- recommended as compared to ``ext2`` and ``ext3`` due to ``ext4``
- improved features such as extents.
-
-- Disabling the updating of access time using ``noatime``: The
- ``noatime`` mount option prevents the build system from updating file
- and directory access times.
-
-- Setting a longer commit: Using the "commit=" mount option increases
- the interval in seconds between disk cache writes. Changing this
- interval from the five second default to something longer increases
- the risk of data loss but decreases the need to write to the disk,
- thus increasing the build performance.
-
-- Choosing the packaging backend: Of the available packaging backends,
- IPK is the fastest. Additionally, selecting a singular packaging
- backend also helps.
-
-- Using ``tmpfs`` for :term:`TMPDIR`
- as a temporary file system: While this can help speed up the build,
- the benefits are limited due to the compiler using ``-pipe``. The
- build system goes to some lengths to avoid ``sync()`` calls into the
- file system on the principle that if there was a significant failure,
- the :term:`Build Directory`
- contents could easily be rebuilt.
-
-- Inheriting the
- :ref:`rm_work <ref-classes-rm-work>` class:
- Inheriting this class has shown to speed up builds due to
- significantly lower amounts of data stored in the data cache as well
- as on disk. Inheriting this class also makes cleanup of
- :term:`TMPDIR` faster, at the
- expense of being easily able to dive into the source code. File
- system maintainers have recommended that the fastest way to clean up
- large numbers of files is to reformat partitions rather than delete
- files due to the linear nature of partitions. This, of course,
- assumes you structure the disk partitions and file systems in a way
- that this is practical.
-
-Aside from the previous list, you should keep some trade offs in mind
-that can help you speed up the build:
-
-- Remove items from
- :term:`DISTRO_FEATURES`
- that you might not need.
-
-- Exclude debug symbols and other debug information: If you do not need
- these symbols and other debug information, disabling the ``*-dbg``
- package generation can speed up the build. You can disable this
- generation by setting the
- :term:`INHIBIT_PACKAGE_DEBUG_SPLIT`
- variable to "1".
-
-- Disable static library generation for recipes derived from
- ``autoconf`` or ``libtool``: Following is an example showing how to
- disable static libraries and still provide an override to handle
- exceptions::
-
- STATICLIBCONF = "--disable-static"
- STATICLIBCONF:sqlite3-native = ""
- EXTRA_OECONF += "${STATICLIBCONF}"
-
- .. note::
-
- - Some recipes need static libraries in order to work correctly
- (e.g. ``pseudo-native`` needs ``sqlite3-native``). Overrides,
- as in the previous example, account for these kinds of
- exceptions.
-
- - Some packages have packaging code that assumes the presence of
- the static libraries. If so, you might need to exclude them as
- well.
-
-Working With Libraries
-======================
-
-Libraries are an integral part of your system. This section describes
-some common practices you might find helpful when working with libraries
-to build your system:
-
-- :ref:`How to include static library files
- <dev-manual/common-tasks:including static library files>`
-
-- :ref:`How to use the Multilib feature to combine multiple versions of
- library files into a single image
- <dev-manual/common-tasks:combining multiple versions of library files into one image>`
-
-- :ref:`How to install multiple versions of the same library in parallel on
- the same system
- <dev-manual/common-tasks:installing multiple versions of the same library>`
-
-Including Static Library Files
-------------------------------
-
-If you are building a library and the library offers static linking, you
-can control which static library files (``*.a`` files) get included in
-the built library.
-
-The :term:`PACKAGES` and
-:term:`FILES:* <FILES>` variables in the
-``meta/conf/bitbake.conf`` configuration file define how files installed
-by the ``do_install`` task are packaged. By default, the :term:`PACKAGES`
-variable includes ``${PN}-staticdev``, which represents all static
-library files.
-
-.. note::
-
- Some previously released versions of the Yocto Project defined the
- static library files through ``${PN}-dev``.
-
-Following is part of the BitBake configuration file, where you can see
-how the static library files are defined::
-
- PACKAGE_BEFORE_PN ?= ""
- PACKAGES = "${PN}-src ${PN}-dbg ${PN}-staticdev ${PN}-dev ${PN}-doc ${PN}-locale ${PACKAGE_BEFORE_PN} ${PN}"
- PACKAGES_DYNAMIC = "^${PN}-locale-.*"
- FILES = ""
-
- FILES:${PN} = "${bindir}/* ${sbindir}/* ${libexecdir}/* ${libdir}/lib*${SOLIBS} \
- ${sysconfdir} ${sharedstatedir} ${localstatedir} \
- ${base_bindir}/* ${base_sbindir}/* \
- ${base_libdir}/*${SOLIBS} \
- ${base_prefix}/lib/udev ${prefix}/lib/udev \
- ${base_libdir}/udev ${libdir}/udev \
- ${datadir}/${BPN} ${libdir}/${BPN}/* \
- ${datadir}/pixmaps ${datadir}/applications \
- ${datadir}/idl ${datadir}/omf ${datadir}/sounds \
- ${libdir}/bonobo/servers"
-
- FILES:${PN}-bin = "${bindir}/* ${sbindir}/*"
-
- FILES:${PN}-doc = "${docdir} ${mandir} ${infodir} ${datadir}/gtk-doc \
- ${datadir}/gnome/help"
- SECTION:${PN}-doc = "doc"
-
- FILES_SOLIBSDEV ?= "${base_libdir}/lib*${SOLIBSDEV} ${libdir}/lib*${SOLIBSDEV}"
- FILES:${PN}-dev = "${includedir} ${FILES_SOLIBSDEV} ${libdir}/*.la \
- ${libdir}/*.o ${libdir}/pkgconfig ${datadir}/pkgconfig \
- ${datadir}/aclocal ${base_libdir}/*.o \
- ${libdir}/${BPN}/*.la ${base_libdir}/*.la \
- ${libdir}/cmake ${datadir}/cmake"
- SECTION:${PN}-dev = "devel"
- ALLOW_EMPTY:${PN}-dev = "1"
- RDEPENDS:${PN}-dev = "${PN} (= ${EXTENDPKGV})"
-
- FILES:${PN}-staticdev = "${libdir}/*.a ${base_libdir}/*.a ${libdir}/${BPN}/*.a"
- SECTION:${PN}-staticdev = "devel"
- RDEPENDS:${PN}-staticdev = "${PN}-dev (= ${EXTENDPKGV})"
-
-Combining Multiple Versions of Library Files into One Image
------------------------------------------------------------
-
-The build system offers the ability to build libraries with different
-target optimizations or architecture formats and combine these together
-into one system image. You can link different binaries in the image
-against the different libraries as needed for specific use cases. This
-feature is called "Multilib".
-
-An example would be where you have most of a system compiled in 32-bit
-mode using 32-bit libraries, but you have something large, like a
-database engine, that needs to be a 64-bit application and uses 64-bit
-libraries. Multilib allows you to get the best of both 32-bit and 64-bit
-libraries.
-
-While the Multilib feature is most commonly used for 32 and 64-bit
-differences, the approach the build system uses facilitates different
-target optimizations. You could compile some binaries to use one set of
-libraries and other binaries to use a different set of libraries. The
-libraries could differ in architecture, compiler options, or other
-optimizations.
-
-There are several examples in the ``meta-skeleton`` layer found in the
-:term:`Source Directory`:
-
-- :oe_git:`conf/multilib-example.conf </openembedded-core/tree/meta-skeleton/conf/multilib-example.conf>`
- configuration file.
-
-- :oe_git:`conf/multilib-example2.conf </openembedded-core/tree/meta-skeleton/conf/multilib-example2.conf>`
- configuration file.
-
-- :oe_git:`recipes-multilib/images/core-image-multilib-example.bb </openembedded-core/tree/meta-skeleton/recipes-multilib/images/core-image-multilib-example.bb>`
- recipe
-
-Preparing to Use Multilib
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-User-specific requirements drive the Multilib feature. Consequently,
-there is no one "out-of-the-box" configuration that would
-meet your needs.
-
-In order to enable Multilib, you first need to ensure your recipe is
-extended to support multiple libraries. Many standard recipes are
-already extended and support multiple libraries. You can check in the
-``meta/conf/multilib.conf`` configuration file in the
-:term:`Source Directory` to see how this is
-done using the
-:term:`BBCLASSEXTEND` variable.
-Eventually, all recipes will be covered and this list will not be
-needed.
-
-For the most part, the :ref:`Multilib <ref-classes-multilib*>`
-class extension works automatically to
-extend the package name from ``${PN}`` to ``${MLPREFIX}${PN}``, where
-:term:`MLPREFIX` is the particular multilib (e.g. "lib32-" or "lib64-").
-Standard variables such as
-:term:`DEPENDS`,
-:term:`RDEPENDS`,
-:term:`RPROVIDES`,
-:term:`RRECOMMENDS`,
-:term:`PACKAGES`, and
-:term:`PACKAGES_DYNAMIC` are
-automatically extended by the system. If you are extending any manual
-code in the recipe, you can use the ``${MLPREFIX}`` variable to ensure
-those names are extended correctly.
-
-Using Multilib
-~~~~~~~~~~~~~~
-
-After you have set up the recipes, you need to define the actual
-combination of multiple libraries you want to build. You accomplish this
-through your ``local.conf`` configuration file in the
-:term:`Build Directory`. An example
-configuration would be as follows::
-
- MACHINE = "qemux86-64"
- require conf/multilib.conf
- MULTILIBS = "multilib:lib32"
- DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
- IMAGE_INSTALL:append = "lib32-glib-2.0"
-
-This example enables an additional library named
-``lib32`` alongside the normal target packages. When combining these
-"lib32" alternatives, the example uses "x86" for tuning. For information
-on this particular tuning, see
-``meta/conf/machine/include/ia32/arch-ia32.inc``.
-
-The example then includes ``lib32-glib-2.0`` in all the images, which
-illustrates one method of including a multiple library dependency. You
-can use a normal image build to include this dependency, for example::
-
- $ bitbake core-image-sato
-
-You can also build Multilib packages
-specifically with a command like this::
-
- $ bitbake lib32-glib-2.0
-
-Additional Implementation Details
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-There are generic implementation details as well as details that are specific to
-package management systems. Following are implementation details
-that exist regardless of the package management system:
-
-- The typical convention used for the class extension code as used by
- Multilib assumes that all package names specified in
- :term:`PACKAGES` that contain
- ``${PN}`` have ``${PN}`` at the start of the name. When that
- convention is not followed and ``${PN}`` appears at the middle or the
- end of a name, problems occur.
-
-- The :term:`TARGET_VENDOR`
- value under Multilib will be extended to "-vendormlmultilib" (e.g.
- "-pokymllib32" for a "lib32" Multilib with Poky). The reason for this
- slightly unwieldy contraction is that any "-" characters in the
- vendor string presently break Autoconf's ``config.sub``, and other
- separators are problematic for different reasons.
-
-Here are the implementation details for the RPM Package Management System:
-
-- A unique architecture is defined for the Multilib packages, along
- with creating a unique deploy folder under ``tmp/deploy/rpm`` in the
- :term:`Build Directory`. For
- example, consider ``lib32`` in a ``qemux86-64`` image. The possible
- architectures in the system are "all", "qemux86_64",
- "lib32:qemux86_64", and "lib32:x86".
-
-- The ``${MLPREFIX}`` variable is stripped from ``${PN}`` during RPM
- packaging. The naming for a normal RPM package and a Multilib RPM
- package in a ``qemux86-64`` system resolves to something similar to
- ``bash-4.1-r2.x86_64.rpm`` and ``bash-4.1.r2.lib32_x86.rpm``,
- respectively.
-
-- When installing a Multilib image, the RPM backend first installs the
- base image and then installs the Multilib libraries.
-
-- The build system relies on RPM to resolve the identical files in the
- two (or more) Multilib packages.
-
-Here are the implementation details for the IPK Package Management System:
-
-- The ``${MLPREFIX}`` is not stripped from ``${PN}`` during IPK
- packaging. The naming for a normal RPM package and a Multilib IPK
- package in a ``qemux86-64`` system resolves to something like
- ``bash_4.1-r2.x86_64.ipk`` and ``lib32-bash_4.1-rw:x86.ipk``,
- respectively.
-
-- The IPK deploy folder is not modified with ``${MLPREFIX}`` because
- packages with and without the Multilib feature can exist in the same
- folder due to the ``${PN}`` differences.
-
-- IPK defines a sanity check for Multilib installation using certain
- rules for file comparison, overridden, etc.
-
-Installing Multiple Versions of the Same Library
-------------------------------------------------
-
-There are be situations where you need to install and use multiple versions
-of the same library on the same system at the same time. This
-almost always happens when a library API changes and you have
-multiple pieces of software that depend on the separate versions of the
-library. To accommodate these situations, you can install multiple
-versions of the same library in parallel on the same system.
-
-The process is straightforward as long as the libraries use proper
-versioning. With properly versioned libraries, all you need to do to
-individually specify the libraries is create separate, appropriately
-named recipes where the :term:`PN` part of
-the name includes a portion that differentiates each library version
-(e.g. the major part of the version number). Thus, instead of having a
-single recipe that loads one version of a library (e.g. ``clutter``),
-you provide multiple recipes that result in different versions of the
-libraries you want. As an example, the following two recipes would allow
-the two separate versions of the ``clutter`` library to co-exist on the
-same system:
-
-.. code-block:: none
-
- clutter-1.6_1.6.20.bb
- clutter-1.8_1.8.4.bb
-
-Additionally, if
-you have other recipes that depend on a given library, you need to use
-the :term:`DEPENDS` variable to
-create the dependency. Continuing with the same example, if you want to
-have a recipe depend on the 1.8 version of the ``clutter`` library, use
-the following in your recipe::
-
- DEPENDS = "clutter-1.8"
-
-Working with Pre-Built Libraries
-================================
-
-Introduction
--------------
-
-Some library vendors do not release source code for their software but do
-release pre-built binaries. When shared libraries are built, they should
-be versioned (see `this article
-<https://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`__
-for some background), but sometimes this is not done.
-
-To summarize, a versioned library must meet two conditions:
-
-#. The filename must have the version appended, for example: ``libfoo.so.1.2.3``.
-#. The library must have the ELF tag ``SONAME`` set to the major version
- of the library, for example: ``libfoo.so.1``. You can check this by
- running ``readelf -d filename | grep SONAME``.
-
-This section shows how to deal with both versioned and unversioned
-pre-built libraries.
-
-Versioned Libraries
--------------------
-
-In this example we work with pre-built libraries for the FT4222H USB I/O chip.
-Libraries are built for several target architecture variants and packaged in
-an archive as follows::
-
- ├── build-arm-hisiv300
- │   └── libft4222.so.1.4.4.44
- ├── build-arm-v5-sf
- │   └── libft4222.so.1.4.4.44
- ├── build-arm-v6-hf
- │   └── libft4222.so.1.4.4.44
- ├── build-arm-v7-hf
- │   └── libft4222.so.1.4.4.44
- ├── build-arm-v8
- │   └── libft4222.so.1.4.4.44
- ├── build-i386
- │   └── libft4222.so.1.4.4.44
- ├── build-i486
- │   └── libft4222.so.1.4.4.44
- ├── build-mips-eglibc-hf
- │   └── libft4222.so.1.4.4.44
- ├── build-pentium
- │   └── libft4222.so.1.4.4.44
- ├── build-x86_64
- │   └── libft4222.so.1.4.4.44
- ├── examples
- │   ├── get-version.c
- │   ├── i2cm.c
- │   ├── spim.c
- │   └── spis.c
- ├── ftd2xx.h
- ├── install4222.sh
- ├── libft4222.h
- ├── ReadMe.txt
- └── WinTypes.h
-
-To write a recipe to use such a library in your system:
-
-- The vendor will probably have a proprietary licence, so set
- :term:`LICENSE_FLAGS` in your recipe.
-- The vendor provides a tarball containing libraries so set :term:`SRC_URI`
- appropriately.
-- Set :term:`COMPATIBLE_HOST` so that the recipe cannot be used with an
- unsupported architecture. In the following example, we only support the 32
- and 64 bit variants of the ``x86`` architecture.
-- As the vendor provides versioned libraries, we can use ``oe_soinstall``
- from :ref:`ref-classes-utils` to install the shared library and create
- symbolic links. If the vendor does not do this, we need to follow the
- non-versioned library guidelines in the next section.
-- As the vendor likely used :term:`LDFLAGS` different from those in your Yocto
- Project build, disable the corresponding checks by adding ``ldflags``
- to :term:`INSANE_SKIP`.
-- The vendor will typically ship release builds without debugging symbols.
- Avoid errors by preventing the packaging task from stripping out the symbols
- and adding them to a separate debug package. This is done by setting the
- ``INHIBIT_`` flags shown below.
-
-The complete recipe would look like this::
-
- SUMMARY = "FTDI FT4222H Library"
- SECTION = "libs"
- LICENSE_FLAGS = "ftdi"
- LICENSE = "CLOSED"
-
- COMPATIBLE_HOST = "(i.86|x86_64).*-linux"
-
- # Sources available in a .tgz file in .zip archive
- # at https://ftdichip.com/wp-content/uploads/2021/01/libft4222-linux-1.4.4.44.zip
- # Found on https://ftdichip.com/software-examples/ft4222h-software-examples/
- # Since dealing with this particular type of archive is out of topic here,
- # we use a local link.
- SRC_URI = "file://libft4222-linux-${PV}.tgz"
-
- S = "${WORKDIR}"
-
- ARCH_DIR:x86-64 = "build-x86_64"
- ARCH_DIR:i586 = "build-i386"
- ARCH_DIR:i686 = "build-i386"
-
- INSANE_SKIP:${PN} = "ldflags"
- INHIBIT_PACKAGE_STRIP = "1"
- INHIBIT_SYSROOT_STRIP = "1"
- INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
-
- do_install () {
- install -m 0755 -d ${D}${libdir}
- oe_soinstall ${S}/${ARCH_DIR}/libft4222.so.${PV} ${D}${libdir}
- install -d ${D}${includedir}
- install -m 0755 ${S}/*.h ${D}${includedir}
- }
-
-If the precompiled binaries are not statically linked and have dependencies on
-other libraries, then by adding those libraries to :term:`DEPENDS`, the linking
-can be examined and the appropriate :term:`RDEPENDS` automatically added.
-
-Non-Versioned Libraries
------------------------
-
-Some Background
-~~~~~~~~~~~~~~~
-
-Libraries in Linux systems are generally versioned so that it is possible
-to have multiple versions of the same library installed, which eases upgrades
-and support for older software. For example, suppose that in a versioned
-library, an actual library is called ``libfoo.so.1.2``, a symbolic link named
-``libfoo.so.1`` points to ``libfoo.so.1.2``, and a symbolic link named
-``libfoo.so`` points to ``libfoo.so.1.2``. Given these conditions, when you
-link a binary against a library, you typically provide the unversioned file
-name (i.e. ``-lfoo`` to the linker). However, the linker follows the symbolic
-link and actually links against the versioned filename. The unversioned symbolic
-link is only used at development time. Consequently, the library is packaged
-along with the headers in the development package ``${PN}-dev`` along with the
-actual library and versioned symbolic links in ``${PN}``. Because versioned
-libraries are far more common than unversioned libraries, the default packaging
-rules assume versioned libraries.
-
-Yocto Library Packaging Overview
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-It follows that packaging an unversioned library requires a bit of work in the
-recipe. By default, ``libfoo.so`` gets packaged into ``${PN}-dev``, which
-triggers a QA warning that a non-symlink library is in a ``-dev`` package,
-and binaries in the same recipe link to the library in ``${PN}-dev``,
-which triggers more QA warnings. To solve this problem, you need to package the
-unversioned library into ``${PN}`` where it belongs. The following are the abridged
-default :term:`FILES` variables in ``bitbake.conf``::
-
- SOLIBS = ".so.*"
- SOLIBSDEV = ".so"
- FILES_${PN} = "... ${libdir}/lib*${SOLIBS} ..."
- FILES_SOLIBSDEV ?= "... ${libdir}/lib*${SOLIBSDEV} ..."
- FILES_${PN}-dev = "... ${FILES_SOLIBSDEV} ..."
-
-:term:`SOLIBS` defines a pattern that matches real shared object libraries.
-:term:`SOLIBSDEV` matches the development form (unversioned symlink). These two
-variables are then used in ``FILES:${PN}`` and ``FILES:${PN}-dev``, which puts
-the real libraries into ``${PN}`` and the unversioned symbolic link into ``${PN}-dev``.
-To package unversioned libraries, you need to modify the variables in the recipe
-as follows::
-
- SOLIBS = ".so"
- FILES_SOLIBSDEV = ""
-
-The modifications cause the ``.so`` file to be the real library
-and unset :term:`FILES_SOLIBSDEV` so that no libraries get packaged into
-``${PN}-dev``. The changes are required because unless :term:`PACKAGES` is changed,
-``${PN}-dev`` collects files before `${PN}`. ``${PN}-dev`` must not collect any of
-the files you want in ``${PN}``.
-
-Finally, loadable modules, essentially unversioned libraries that are linked
-at runtime using ``dlopen()`` instead of at build time, should generally be
-installed in a private directory. However, if they are installed in ``${libdir}``,
-then the modules can be treated as unversioned libraries.
-
-Example
-~~~~~~~
-
-The example below installs an unversioned x86-64 pre-built library named
-``libfoo.so``. The :term:`COMPATIBLE_HOST` variable limits recipes to the
-x86-64 architecture while the :term:`INSANE_SKIP`, :term:`INHIBIT_PACKAGE_STRIP`
-and :term:`INHIBIT_SYSROOT_STRIP` variables are all set as in the above
-versioned library example. The "magic" is setting the :term:`SOLIBS` and
-:term:`FILES_SOLIBSDEV` variables as explained above::
-
- SUMMARY = "libfoo sample recipe"
- SECTION = "libs"
- LICENSE = "CLOSED"
-
- SRC_URI = "file://libfoo.so"
-
- COMPATIBLE_HOST = "x86_64.*-linux"
-
- INSANE_SKIP:${PN} = "ldflags"
- INHIBIT_PACKAGE_STRIP = "1"
- INHIBIT_SYSROOT_STRIP = "1"
- SOLIBS = ".so"
- FILES_SOLIBSDEV = ""
-
- do_install () {
- install -d ${D}${libdir}
- install -m 0755 ${WORKDIR}/libfoo.so ${D}${libdir}
- }
-
-Using x32 psABI
-===============
-
-x32 processor-specific Application Binary Interface (`x32
-psABI <https://software.intel.com/en-us/node/628948>`__) is a native
-32-bit processor-specific ABI for Intel 64 (x86-64) architectures. An
-ABI defines the calling conventions between functions in a processing
-environment. The interface determines what registers are used and what
-the sizes are for various C data types.
-
-Some processing environments prefer using 32-bit applications even when
-running on Intel 64-bit platforms. Consider the i386 psABI, which is a
-very old 32-bit ABI for Intel 64-bit platforms. The i386 psABI does not
-provide efficient use and access of the Intel 64-bit processor
-resources, leaving the system underutilized. Now consider the x86_64
-psABI. This ABI is newer and uses 64-bits for data sizes and program
-pointers. The extra bits increase the footprint size of the programs,
-libraries, and also increases the memory and file system size
-requirements. Executing under the x32 psABI enables user programs to
-utilize CPU and system resources more efficiently while keeping the
-memory footprint of the applications low. Extra bits are used for
-registers but not for addressing mechanisms.
-
-The Yocto Project supports the final specifications of x32 psABI as
-follows:
-
-- You can create packages and images in x32 psABI format on x86_64
- architecture targets.
-
-- You can successfully build recipes with the x32 toolchain.
-
-- You can create and boot ``core-image-minimal`` and
- ``core-image-sato`` images.
-
-- There is RPM Package Manager (RPM) support for x32 binaries.
-
-- There is support for large images.
-
-To use the x32 psABI, you need to edit your ``conf/local.conf``
-configuration file as follows::
-
- MACHINE = "qemux86-64"
- DEFAULTTUNE = "x86-64-x32"
- baselib = "${@d.getVar('BASE_LIB:tune-' + (d.getVar('DEFAULTTUNE') \
- or 'INVALID')) or 'lib'}"
-
-Once you have set
-up your configuration file, use BitBake to build an image that supports
-the x32 psABI. Here is an example::
-
- $ bitbake core-image-sato
-
-Enabling GObject Introspection Support
-======================================
-
-`GObject introspection <https://gi.readthedocs.io/en/latest/>`__
-is the standard mechanism for accessing GObject-based software from
-runtime environments. GObject is a feature of the GLib library that
-provides an object framework for the GNOME desktop and related software.
-GObject Introspection adds information to GObject that allows objects
-created within it to be represented across different programming
-languages. If you want to construct GStreamer pipelines using Python, or
-control UPnP infrastructure using Javascript and GUPnP, GObject
-introspection is the only way to do it.
-
-This section describes the Yocto Project support for generating and
-packaging GObject introspection data. GObject introspection data is a
-description of the API provided by libraries built on top of the GLib
-framework, and, in particular, that framework's GObject mechanism.
-GObject Introspection Repository (GIR) files go to ``-dev`` packages,
-``typelib`` files go to main packages as they are packaged together with
-libraries that are introspected.
-
-The data is generated when building such a library, by linking the
-library with a small executable binary that asks the library to describe
-itself, and then executing the binary and processing its output.
-
-Generating this data in a cross-compilation environment is difficult
-because the library is produced for the target architecture, but its
-code needs to be executed on the build host. This problem is solved with
-the OpenEmbedded build system by running the code through QEMU, which
-allows precisely that. Unfortunately, QEMU does not always work
-perfectly as mentioned in the ":ref:`dev-manual/common-tasks:known issues`"
-section.
-
-Enabling the Generation of Introspection Data
----------------------------------------------
-
-Enabling the generation of introspection data (GIR files) in your
-library package involves the following:
-
-1. Inherit the
- :ref:`gobject-introspection <ref-classes-gobject-introspection>`
- class.
-
-2. Make sure introspection is not disabled anywhere in the recipe or
- from anything the recipe includes. Also, make sure that
- "gobject-introspection-data" is not in
- :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
- and that "qemu-usermode" is not in
- :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`.
- In either of these conditions, nothing will happen.
-
-3. Try to build the recipe. If you encounter build errors that look like
- something is unable to find ``.so`` libraries, check where these
- libraries are located in the source tree and add the following to the
- recipe::
-
- GIR_EXTRA_LIBS_PATH = "${B}/something/.libs"
-
- .. note::
-
- See recipes in the ``oe-core`` repository that use that
- :term:`GIR_EXTRA_LIBS_PATH` variable as an example.
-
-4. Look for any other errors, which probably mean that introspection
- support in a package is not entirely standard, and thus breaks down
- in a cross-compilation environment. For such cases, custom-made fixes
- are needed. A good place to ask and receive help in these cases is
- the :ref:`Yocto Project mailing
- lists <resources-mailinglist>`.
-
-.. note::
-
- Using a library that no longer builds against the latest Yocto
- Project release and prints introspection related errors is a good
- candidate for the previous procedure.
-
-Disabling the Generation of Introspection Data
-----------------------------------------------
-
-You might find that you do not want to generate introspection data. Or,
-perhaps QEMU does not work on your build host and target architecture
-combination. If so, you can use either of the following methods to
-disable GIR file generations:
-
-- Add the following to your distro configuration::
-
- DISTRO_FEATURES_BACKFILL_CONSIDERED = "gobject-introspection-data"
-
- Adding this statement disables generating introspection data using
- QEMU but will still enable building introspection tools and libraries
- (i.e. building them does not require the use of QEMU).
-
-- Add the following to your machine configuration::
-
- MACHINE_FEATURES_BACKFILL_CONSIDERED = "qemu-usermode"
-
- Adding this statement disables the use of QEMU when building packages for your
- machine. Currently, this feature is used only by introspection
- recipes and has the same effect as the previously described option.
-
- .. note::
-
- Future releases of the Yocto Project might have other features
- affected by this option.
-
-If you disable introspection data, you can still obtain it through other
-means such as copying the data from a suitable sysroot, or by generating
-it on the target hardware. The OpenEmbedded build system does not
-currently provide specific support for these techniques.
-
-Testing that Introspection Works in an Image
---------------------------------------------
-
-Use the following procedure to test if generating introspection data is
-working in an image:
-
-1. Make sure that "gobject-introspection-data" is not in
- :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
- and that "qemu-usermode" is not in
- :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`.
-
-2. Build ``core-image-sato``.
-
-3. Launch a Terminal and then start Python in the terminal.
-
-4. Enter the following in the terminal::
-
- >>> from gi.repository import GLib
- >>> GLib.get_host_name()
-
-5. For something a little more advanced, enter the following see:
- https://python-gtk-3-tutorial.readthedocs.io/en/latest/introduction.html
-
-Known Issues
-------------
-
-Here are know issues in GObject Introspection Support:
-
-- ``qemu-ppc64`` immediately crashes. Consequently, you cannot build
- introspection data on that architecture.
-
-- x32 is not supported by QEMU. Consequently, introspection data is
- disabled.
-
-- musl causes transient GLib binaries to crash on assertion failures.
- Consequently, generating introspection data is disabled.
-
-- Because QEMU is not able to run the binaries correctly, introspection
- is disabled for some specific packages under specific architectures
- (e.g. ``gcr``, ``libsecret``, and ``webkit``).
-
-- QEMU usermode might not work properly when running 64-bit binaries
- under 32-bit host machines. In particular, "qemumips64" is known to
- not work under i686.
-
-Optionally Using an External Toolchain
-======================================
-
-You might want to use an external toolchain as part of your development.
-If this is the case, the fundamental steps you need to accomplish are as
-follows:
-
-- Understand where the installed toolchain resides. For cases where you
- need to build the external toolchain, you would need to take separate
- steps to build and install the toolchain.
-
-- Make sure you add the layer that contains the toolchain to your
- ``bblayers.conf`` file through the
- :term:`BBLAYERS` variable.
-
-- Set the ``EXTERNAL_TOOLCHAIN`` variable in your ``local.conf`` file
- to the location in which you installed the toolchain.
-
-A good example of an external toolchain used with the Yocto Project is
-Mentor Graphics Sourcery G++ Toolchain. You can see information on how
-to use that particular layer in the ``README`` file at
-https://github.com/MentorEmbedded/meta-sourcery/. You can find
-further information by reading about the
-:term:`TCMODE` variable in the Yocto
-Project Reference Manual's variable glossary.
-
-Creating Partitioned Images Using Wic
-=====================================
-
-Creating an image for a particular hardware target using the
-OpenEmbedded build system does not necessarily mean you can boot that
-image as is on your device. Physical devices accept and boot images in
-various ways depending on the specifics of the device. Usually,
-information about the hardware can tell you what image format the device
-requires. Should your device require multiple partitions on an SD card,
-flash, or an HDD, you can use the OpenEmbedded Image Creator, Wic, to
-create the properly partitioned image.
-
-The ``wic`` command generates partitioned images from existing
-OpenEmbedded build artifacts. Image generation is driven by partitioning
-commands contained in an OpenEmbedded kickstart file (``.wks``)
-specified either directly on the command line or as one of a selection
-of canned kickstart files as shown with the ``wic list images`` command
-in the
-":ref:`dev-manual/common-tasks:generate an image using an existing kickstart file`"
-section. When you apply the command to a given set of build artifacts, the
-result is an image or set of images that can be directly written onto media and
-used on a particular system.
-
-.. note::
-
- For a kickstart file reference, see the
- ":ref:`ref-manual/kickstart:openembedded kickstart (\`\`.wks\`\`) reference`"
- Chapter in the Yocto Project Reference Manual.
-
-The ``wic`` command and the infrastructure it is based on is by
-definition incomplete. The purpose of the command is to allow the
-generation of customized images, and as such, was designed to be
-completely extensible through a plugin interface. See the
-":ref:`dev-manual/common-tasks:using the wic plugin interface`" section
-for information on these plugins.
-
-This section provides some background information on Wic, describes what
-you need to have in place to run the tool, provides instruction on how
-to use the Wic utility, provides information on using the Wic plugins
-interface, and provides several examples that show how to use Wic.
-
-Background
-----------
-
-This section provides some background on the Wic utility. While none of
-this information is required to use Wic, you might find it interesting.
-
-- The name "Wic" is derived from OpenEmbedded Image Creator (oeic). The
- "oe" diphthong in "oeic" was promoted to the letter "w", because
- "oeic" is both difficult to remember and to pronounce.
-
-- Wic is loosely based on the Meego Image Creator (``mic``) framework.
- The Wic implementation has been heavily modified to make direct use
- of OpenEmbedded build artifacts instead of package installation and
- configuration, which are already incorporated within the OpenEmbedded
- artifacts.
-
-- Wic is a completely independent standalone utility that initially
- provides easier-to-use and more flexible replacements for an existing
- functionality in OE-Core's
- :ref:`image-live <ref-classes-image-live>`
- class. The difference between Wic and those examples is that with Wic
- the functionality of those scripts is implemented by a
- general-purpose partitioning language, which is based on Redhat
- kickstart syntax.
-
-Requirements
-------------
-
-In order to use the Wic utility with the OpenEmbedded Build system, your
-system needs to meet the following requirements:
-
-- The Linux distribution on your development host must support the
- Yocto Project. See the ":ref:`detailed-supported-distros`"
- section in the Yocto Project Reference Manual for the list of
- distributions that support the Yocto Project.
-
-- The standard system utilities, such as ``cp``, must be installed on
- your development host system.
-
-- You must have sourced the build environment setup script (i.e.
- :ref:`structure-core-script`) found in the
- :term:`Build Directory`.
-
-- You need to have the build artifacts already available, which
- typically means that you must have already created an image using the
- OpenEmbedded build system (e.g. ``core-image-minimal``). While it
- might seem redundant to generate an image in order to create an image
- using Wic, the current version of Wic requires the artifacts in the
- form generated by the OpenEmbedded build system.
-
-- You must build several native tools, which are built to run on the
- build system::
-
- $ bitbake parted-native dosfstools-native mtools-native
-
-- Include "wic" as part of the
- :term:`IMAGE_FSTYPES`
- variable.
-
-- Include the name of the :ref:`wic kickstart file <openembedded-kickstart-wks-reference>`
- as part of the :term:`WKS_FILE` variable
-
-Getting Help
-------------
-
-You can get general help for the ``wic`` command by entering the ``wic``
-command by itself or by entering the command with a help argument as
-follows::
-
- $ wic -h
- $ wic --help
- $ wic help
-
-Currently, Wic supports seven commands: ``cp``, ``create``, ``help``,
-``list``, ``ls``, ``rm``, and ``write``. You can get help for all these
-commands except "help" by using the following form::
-
- $ wic help command
-
-For example, the following command returns help for the ``write``
-command::
-
- $ wic help write
-
-Wic supports help for three topics: ``overview``, ``plugins``, and
-``kickstart``. You can get help for any topic using the following form::
-
- $ wic help topic
-
-For example, the following returns overview help for Wic::
-
- $ wic help overview
-
-There is one additional level of help for Wic. You can get help on
-individual images through the ``list`` command. You can use the ``list``
-command to return the available Wic images as follows::
-
- $ wic list images
- genericx86 Create an EFI disk image for genericx86*
- edgerouter Create SD card image for Edgerouter
- beaglebone-yocto Create SD card image for Beaglebone
- qemux86-directdisk Create a qemu machine 'pcbios' direct disk image
- systemd-bootdisk Create an EFI disk image with systemd-boot
- mkhybridiso Create a hybrid ISO image
- mkefidisk Create an EFI disk image
- sdimage-bootpart Create SD card image with a boot partition
- directdisk-multi-rootfs Create multi rootfs image using rootfs plugin
- directdisk Create a 'pcbios' direct disk image
- directdisk-bootloader-config Create a 'pcbios' direct disk image with custom bootloader config
- qemuriscv Create qcow2 image for RISC-V QEMU machines
- directdisk-gpt Create a 'pcbios' direct disk image
- efi-bootdisk
-
-Once you know the list of available
-Wic images, you can use ``help`` with the command to get help on a
-particular image. For example, the following command returns help on the
-"beaglebone-yocto" image::
-
- $ wic list beaglebone-yocto help
-
- Creates a partitioned SD card image for Beaglebone.
- Boot files are located in the first vfat partition.
-
-Operational Modes
------------------
-
-You can use Wic in two different modes, depending on how much control
-you need for specifying the OpenEmbedded build artifacts that are used
-for creating the image: Raw and Cooked:
-
-- *Raw Mode:* You explicitly specify build artifacts through Wic
- command-line arguments.
-
-- *Cooked Mode:* The current
- :term:`MACHINE` setting and image
- name are used to automatically locate and provide the build
- artifacts. You just supply a kickstart file and the name of the image
- from which to use artifacts.
-
-Regardless of the mode you use, you need to have the build artifacts
-ready and available.
-
-Raw Mode
-~~~~~~~~
-
-Running Wic in raw mode allows you to specify all the partitions through
-the ``wic`` command line. The primary use for raw mode is if you have
-built your kernel outside of the Yocto Project
-:term:`Build Directory`. In other words, you
-can point to arbitrary kernel, root filesystem locations, and so forth.
-Contrast this behavior with cooked mode where Wic looks in the Build
-Directory (e.g. ``tmp/deploy/images/``\ machine).
-
-The general form of the ``wic`` command in raw mode is::
-
- $ wic create wks_file options ...
-
- Where:
-
- wks_file:
- An OpenEmbedded kickstart file. You can provide
- your own custom file or use a file from a set of
- existing files as described by further options.
-
- optional arguments:
- -h, --help show this help message and exit
- -o OUTDIR, --outdir OUTDIR
- name of directory to create image in
- -e IMAGE_NAME, --image-name IMAGE_NAME
- name of the image to use the artifacts from e.g. core-
- image-sato
- -r ROOTFS_DIR, --rootfs-dir ROOTFS_DIR
- path to the /rootfs dir to use as the .wks rootfs
- source
- -b BOOTIMG_DIR, --bootimg-dir BOOTIMG_DIR
- path to the dir containing the boot artifacts (e.g.
- /EFI or /syslinux dirs) to use as the .wks bootimg
- source
- -k KERNEL_DIR, --kernel-dir KERNEL_DIR
- path to the dir containing the kernel to use in the
- .wks bootimg
- -n NATIVE_SYSROOT, --native-sysroot NATIVE_SYSROOT
- path to the native sysroot containing the tools to use
- to build the image
- -s, --skip-build-check
- skip the build check
- -f, --build-rootfs build rootfs
- -c {gzip,bzip2,xz}, --compress-with {gzip,bzip2,xz}
- compress image with specified compressor
- -m, --bmap generate .bmap
- --no-fstab-update Do not change fstab file.
- -v VARS_DIR, --vars VARS_DIR
- directory with <image>.env files that store bitbake
- variables
- -D, --debug output debug information
-
-.. note::
-
- You do not need root privileges to run Wic. In fact, you should not
- run as root when using the utility.
-
-Cooked Mode
-~~~~~~~~~~~
-
-Running Wic in cooked mode leverages off artifacts in the Build
-Directory. In other words, you do not have to specify kernel or root
-filesystem locations as part of the command. All you need to provide is
-a kickstart file and the name of the image from which to use artifacts
-by using the "-e" option. Wic looks in the Build Directory (e.g.
-``tmp/deploy/images/``\ machine) for artifacts.
-
-The general form of the ``wic`` command using Cooked Mode is as follows::
-
- $ wic create wks_file -e IMAGE_NAME
-
- Where:
-
- wks_file:
- An OpenEmbedded kickstart file. You can provide
- your own custom file or use a file from a set of
- existing files provided with the Yocto Project
- release.
-
- required argument:
- -e IMAGE_NAME, --image-name IMAGE_NAME
- name of the image to use the artifacts from e.g. core-
- image-sato
-
-Using an Existing Kickstart File
---------------------------------
-
-If you do not want to create your own kickstart file, you can use an
-existing file provided by the Wic installation. As shipped, kickstart
-files can be found in the :ref:`overview-manual/development-environment:yocto project source repositories` in the
-following two locations::
-
- poky/meta-yocto-bsp/wic
- poky/scripts/lib/wic/canned-wks
-
-Use the following command to list the available kickstart files::
-
- $ wic list images
- genericx86 Create an EFI disk image for genericx86*
- beaglebone-yocto Create SD card image for Beaglebone
- edgerouter Create SD card image for Edgerouter
- qemux86-directdisk Create a QEMU machine 'pcbios' direct disk image
- directdisk-gpt Create a 'pcbios' direct disk image
- mkefidisk Create an EFI disk image
- directdisk Create a 'pcbios' direct disk image
- systemd-bootdisk Create an EFI disk image with systemd-boot
- mkhybridiso Create a hybrid ISO image
- sdimage-bootpart Create SD card image with a boot partition
- directdisk-multi-rootfs Create multi rootfs image using rootfs plugin
- directdisk-bootloader-config Create a 'pcbios' direct disk image with custom bootloader config
-
-When you use an existing file, you
-do not have to use the ``.wks`` extension. Here is an example in Raw
-Mode that uses the ``directdisk`` file::
-
- $ wic create directdisk -r rootfs_dir -b bootimg_dir \
- -k kernel_dir -n native_sysroot
-
-Here are the actual partition language commands used in the
-``genericx86.wks`` file to generate an image::
-
- # short-description: Create an EFI disk image for genericx86*
- # long-description: Creates a partitioned EFI disk image for genericx86* machines
- part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
- part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
- part swap --ondisk sda --size 44 --label swap1 --fstype=swap
-
- bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0"
-
-Using the Wic Plugin Interface
-------------------------------
-
-You can extend and specialize Wic functionality by using Wic plugins.
-This section explains the Wic plugin interface.
-
-.. note::
-
- Wic plugins consist of "source" and "imager" plugins. Imager plugins
- are beyond the scope of this section.
-
-Source plugins provide a mechanism to customize partition content during
-the Wic image generation process. You can use source plugins to map
-values that you specify using ``--source`` commands in kickstart files
-(i.e. ``*.wks``) to a plugin implementation used to populate a given
-partition.
-
-.. note::
-
- If you use plugins that have build-time dependencies (e.g. native
- tools, bootloaders, and so forth) when building a Wic image, you need
- to specify those dependencies using the :term:`WKS_FILE_DEPENDS`
- variable.
-
-Source plugins are subclasses defined in plugin files. As shipped, the
-Yocto Project provides several plugin files. You can see the source
-plugin files that ship with the Yocto Project
-:yocto_git:`here </poky/tree/scripts/lib/wic/plugins/source>`.
-Each of these plugin files contains source plugins that are designed to
-populate a specific Wic image partition.
-
-Source plugins are subclasses of the ``SourcePlugin`` class, which is
-defined in the ``poky/scripts/lib/wic/pluginbase.py`` file. For example,
-the ``BootimgEFIPlugin`` source plugin found in the ``bootimg-efi.py``
-file is a subclass of the ``SourcePlugin`` class, which is found in the
-``pluginbase.py`` file.
-
-You can also implement source plugins in a layer outside of the Source
-Repositories (external layer). To do so, be sure that your plugin files
-are located in a directory whose path is
-``scripts/lib/wic/plugins/source/`` within your external layer. When the
-plugin files are located there, the source plugins they contain are made
-available to Wic.
-
-When the Wic implementation needs to invoke a partition-specific
-implementation, it looks for the plugin with the same name as the
-``--source`` parameter used in the kickstart file given to that
-partition. For example, if the partition is set up using the following
-command in a kickstart file::
-
- part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-
-The methods defined as class
-members of the matching source plugin (i.e. ``bootimg-pcbios``) in the
-``bootimg-pcbios.py`` plugin file are used.
-
-To be more concrete, here is the corresponding plugin definition from
-the ``bootimg-pcbios.py`` file for the previous command along with an
-example method called by the Wic implementation when it needs to prepare
-a partition using an implementation-specific function::
-
- .
- .
- .
- class BootimgPcbiosPlugin(SourcePlugin):
- """
- Create MBR boot partition and install syslinux on it.
- """
-
- name = 'bootimg-pcbios'
- .
- .
- .
- @classmethod
- def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
- oe_builddir, bootimg_dir, kernel_dir,
- rootfs_dir, native_sysroot):
- """
- Called to do the actual content population for a partition i.e. it
- 'prepares' the partition to be incorporated into the image.
- In this case, prepare content for legacy bios boot partition.
- """
- .
- .
- .
-
-If a
-subclass (plugin) itself does not implement a particular function, Wic
-locates and uses the default version in the superclass. It is for this
-reason that all source plugins are derived from the ``SourcePlugin``
-class.
-
-The ``SourcePlugin`` class defined in the ``pluginbase.py`` file defines
-a set of methods that source plugins can implement or override. Any
-plugins (subclass of ``SourcePlugin``) that do not implement a
-particular method inherit the implementation of the method from the
-``SourcePlugin`` class. For more information, see the ``SourcePlugin``
-class in the ``pluginbase.py`` file for details:
-
-The following list describes the methods implemented in the
-``SourcePlugin`` class:
-
-- ``do_prepare_partition()``: Called to populate a partition with
- actual content. In other words, the method prepares the final
- partition image that is incorporated into the disk image.
-
-- ``do_configure_partition()``: Called before
- ``do_prepare_partition()`` to create custom configuration files for a
- partition (e.g. syslinux or grub configuration files).
-
-- ``do_install_disk()``: Called after all partitions have been
- prepared and assembled into a disk image. This method provides a hook
- to allow finalization of a disk image (e.g. writing an MBR).
-
-- ``do_stage_partition()``: Special content-staging hook called
- before ``do_prepare_partition()``. This method is normally empty.
-
- Typically, a partition just uses the passed-in parameters (e.g. the
- unmodified value of ``bootimg_dir``). However, in some cases, things
- might need to be more tailored. As an example, certain files might
- additionally need to be taken from ``bootimg_dir + /boot``. This hook
- allows those files to be staged in a customized fashion.
-
- .. note::
-
- ``get_bitbake_var()`` allows you to access non-standard variables that
- you might want to use for this behavior.
-
-You can extend the source plugin mechanism. To add more hooks, create
-more source plugin methods within ``SourcePlugin`` and the corresponding
-derived subclasses. The code that calls the plugin methods uses the
-``plugin.get_source_plugin_methods()`` function to find the method or
-methods needed by the call. Retrieval of those methods is accomplished
-by filling up a dict with keys that contain the method names of
-interest. On success, these will be filled in with the actual methods.
-See the Wic implementation for examples and details.
-
-Wic Examples
-------------
-
-This section provides several examples that show how to use the Wic
-utility. All the examples assume the list of requirements in the
-":ref:`dev-manual/common-tasks:requirements`" section have been met. The
-examples assume the previously generated image is
-``core-image-minimal``.
-
-Generate an Image using an Existing Kickstart File
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This example runs in Cooked Mode and uses the ``mkefidisk`` kickstart
-file::
-
- $ wic create mkefidisk -e core-image-minimal
- INFO: Building wic-tools...
- .
- .
- .
- INFO: The new image(s) can be found here:
- ./mkefidisk-201804191017-sda.direct
-
- The following build artifacts were used to create the image(s):
- ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
- BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
- KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
- NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
-
- INFO: The image(s) were created using OE kickstart file:
- /home/stephano/yocto/openembedded-core/scripts/lib/wic/canned-wks/mkefidisk.wks
-
-The previous example shows the easiest way to create an image by running
-in cooked mode and supplying a kickstart file and the "-e" option to
-point to the existing build artifacts. Your ``local.conf`` file needs to
-have the :term:`MACHINE` variable set
-to the machine you are using, which is "qemux86" in this example.
-
-Once the image builds, the output provides image location, artifact use,
-and kickstart file information.
-
-.. note::
-
- You should always verify the details provided in the output to make
- sure that the image was indeed created exactly as expected.
-
-Continuing with the example, you can now write the image from the Build
-Directory onto a USB stick, or whatever media for which you built your
-image, and boot from the media. You can write the image by using
-``bmaptool`` or ``dd``::
-
- $ oe-run-native bmaptool copy mkefidisk-201804191017-sda.direct /dev/sdX
-
-or ::
-
- $ sudo dd if=mkefidisk-201804191017-sda.direct of=/dev/sdX
-
-.. note::
-
- For more information on how to use the ``bmaptool``
- to flash a device with an image, see the
- ":ref:`dev-manual/common-tasks:flashing images using \`\`bmaptool\`\``"
- section.
-
-Using a Modified Kickstart File
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Because partitioned image creation is driven by the kickstart file, it
-is easy to affect image creation by changing the parameters in the file.
-This next example demonstrates that through modification of the
-``directdisk-gpt`` kickstart file.
-
-As mentioned earlier, you can use the command ``wic list images`` to
-show the list of existing kickstart files. The directory in which the
-``directdisk-gpt.wks`` file resides is
-``scripts/lib/image/canned-wks/``, which is located in the
-:term:`Source Directory` (e.g. ``poky``).
-Because available files reside in this directory, you can create and add
-your own custom files to the directory. Subsequent use of the
-``wic list images`` command would then include your kickstart files.
-
-In this example, the existing ``directdisk-gpt`` file already does most
-of what is needed. However, for the hardware in this example, the image
-will need to boot from ``sdb`` instead of ``sda``, which is what the
-``directdisk-gpt`` kickstart file uses.
-
-The example begins by making a copy of the ``directdisk-gpt.wks`` file
-in the ``scripts/lib/image/canned-wks`` directory and then by changing
-the lines that specify the target disk from which to boot.
-::
-
- $ cp /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisk-gpt.wks \
- /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisksdb-gpt.wks
-
-Next, the example modifies the ``directdisksdb-gpt.wks`` file and
-changes all instances of "``--ondisk sda``" to "``--ondisk sdb``". The
-example changes the following two lines and leaves the remaining lines
-untouched::
-
- part /boot --source bootimg-pcbios --ondisk sdb --label boot --active --align 1024
- part / --source rootfs --ondisk sdb --fstype=ext4 --label platform --align 1024 --use-uuid
-
-Once the lines are changed, the
-example generates the ``directdisksdb-gpt`` image. The command points
-the process at the ``core-image-minimal`` artifacts for the Next Unit of
-Computing (nuc) :term:`MACHINE` the
-``local.conf``.
-::
-
- $ wic create directdisksdb-gpt -e core-image-minimal
- INFO: Building wic-tools...
- .
- .
- .
- Initialising tasks: 100% |#######################################| Time: 0:00:01
- NOTE: Executing SetScene Tasks
- NOTE: Executing RunQueue Tasks
- NOTE: Tasks Summary: Attempted 1161 tasks of which 1157 didn't need to be rerun and all succeeded.
- INFO: Creating image(s)...
-
- INFO: The new image(s) can be found here:
- ./directdisksdb-gpt-201710090938-sdb.direct
-
- The following build artifacts were used to create the image(s):
- ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
- BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
- KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
- NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
-
- INFO: The image(s) were created using OE kickstart file:
- /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisksdb-gpt.wks
-
-Continuing with the example, you can now directly ``dd`` the image to a
-USB stick, or whatever media for which you built your image, and boot
-the resulting media::
-
- $ sudo dd if=directdisksdb-gpt-201710090938-sdb.direct of=/dev/sdb
- 140966+0 records in
- 140966+0 records out
- 72174592 bytes (72 MB, 69 MiB) copied, 78.0282 s, 925 kB/s
- $ sudo eject /dev/sdb
-
-Using a Modified Kickstart File and Running in Raw Mode
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This next example manually specifies each build artifact (runs in Raw
-Mode) and uses a modified kickstart file. The example also uses the
-``-o`` option to cause Wic to create the output somewhere other than the
-default output directory, which is the current directory::
-
- $ wic create test.wks -o /home/stephano/testwic \
- --rootfs-dir /home/stephano/yocto/build/tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/rootfs \
- --bootimg-dir /home/stephano/yocto/build/tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share \
- --kernel-dir /home/stephano/yocto/build/tmp/deploy/images/qemux86 \
- --native-sysroot /home/stephano/yocto/build/tmp/work/i586-poky-linux/wic-tools/1.0-r0/recipe-sysroot-native
-
- INFO: Creating image(s)...
-
- INFO: The new image(s) can be found here:
- /home/stephano/testwic/test-201710091445-sdb.direct
-
- The following build artifacts were used to create the image(s):
- ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
- BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
- KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
- NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
-
- INFO: The image(s) were created using OE kickstart file:
- test.wks
-
-For this example,
-:term:`MACHINE` did not have to be
-specified in the ``local.conf`` file since the artifact is manually
-specified.
-
-Using Wic to Manipulate an Image
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Wic image manipulation allows you to shorten turnaround time during
-image development. For example, you can use Wic to delete the kernel
-partition of a Wic image and then insert a newly built kernel. This
-saves you time from having to rebuild the entire image each time you
-modify the kernel.
-
-.. note::
-
- In order to use Wic to manipulate a Wic image as in this example,
- your development machine must have the ``mtools`` package installed.
-
-The following example examines the contents of the Wic image, deletes
-the existing kernel, and then inserts a new kernel:
-
-1. *List the Partitions:* Use the ``wic ls`` command to list all the
- partitions in the Wic image::
-
- $ wic ls tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic
- Num Start End Size Fstype
- 1 1048576 25041919 23993344 fat16
- 2 25165824 72157183 46991360 ext4
-
- The previous output shows two partitions in the
- ``core-image-minimal-qemux86.wic`` image.
-
-2. *Examine a Particular Partition:* Use the ``wic ls`` command again
- but in a different form to examine a particular partition.
-
- .. note::
-
- You can get command usage on any Wic command using the following
- form::
-
- $ wic help command
-
-
- For example, the following command shows you the various ways to
- use the
- wic ls
- command::
-
- $ wic help ls
-
-
- The following command shows what is in partition one::
-
- $ wic ls tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1
- Volume in drive : is boot
- Volume Serial Number is E894-1809
- Directory for ::/
-
- libcom32 c32 186500 2017-10-09 16:06
- libutil c32 24148 2017-10-09 16:06
- syslinux cfg 220 2017-10-09 16:06
- vesamenu c32 27104 2017-10-09 16:06
- vmlinuz 6904608 2017-10-09 16:06
- 5 files 7 142 580 bytes
- 16 582 656 bytes free
-
- The previous output shows five files, with the
- ``vmlinuz`` being the kernel.
-
- .. note::
-
- If you see the following error, you need to update or create a
- ``~/.mtoolsrc`` file and be sure to have the line "mtools_skip_check=1"
- in the file. Then, run the Wic command again::
-
- ERROR: _exec_cmd: /usr/bin/mdir -i /tmp/wic-parttfokuwra ::/ returned '1' instead of 0
- output: Total number of sectors (47824) not a multiple of sectors per track (32)!
- Add mtools_skip_check=1 to your .mtoolsrc file to skip this test
-
-
-3. *Remove the Old Kernel:* Use the ``wic rm`` command to remove the
- ``vmlinuz`` file (kernel)::
-
- $ wic rm tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1/vmlinuz
-
-4. *Add In the New Kernel:* Use the ``wic cp`` command to add the
- updated kernel to the Wic image. Depending on how you built your
- kernel, it could be in different places. If you used ``devtool`` and
- an SDK to build your kernel, it resides in the ``tmp/work`` directory
- of the extensible SDK. If you used ``make`` to build the kernel, the
- kernel will be in the ``workspace/sources`` area.
-
- The following example assumes ``devtool`` was used to build the
- kernel::
-
- $ wic cp poky_sdk/tmp/work/qemux86-poky-linux/linux-yocto/4.12.12+git999-r0/linux-yocto-4.12.12+git999/arch/x86/boot/bzImage \
- poky/build/tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1/vmlinuz
-
- Once the new kernel is added back into the image, you can use the
- ``dd`` command or :ref:`bmaptool
- <dev-manual/common-tasks:flashing images using \`\`bmaptool\`\`>`
- to flash your wic image onto an SD card or USB stick and test your
- target.
-
- .. note::
-
- Using ``bmaptool`` is generally 10 to 20 times faster than using ``dd``.
-
-Flashing Images Using ``bmaptool``
-==================================
-
-A fast and easy way to flash an image to a bootable device is to use
-Bmaptool, which is integrated into the OpenEmbedded build system.
-Bmaptool is a generic tool that creates a file's block map (bmap) and
-then uses that map to copy the file. As compared to traditional tools
-such as dd or cp, Bmaptool can copy (or flash) large files like raw
-system image files much faster.
-
-.. note::
-
- - If you are using Ubuntu or Debian distributions, you can install
- the ``bmap-tools`` package using the following command and then
- use the tool without specifying ``PATH`` even from the root
- account::
-
- $ sudo apt install bmap-tools
-
- - If you are unable to install the ``bmap-tools`` package, you will
- need to build Bmaptool before using it. Use the following command::
-
- $ bitbake bmap-tools-native
-
-Following, is an example that shows how to flash a Wic image. Realize
-that while this example uses a Wic image, you can use Bmaptool to flash
-any type of image. Use these steps to flash an image using Bmaptool:
-
-1. *Update your local.conf File:* You need to have the following set
- in your ``local.conf`` file before building your image::
-
- IMAGE_FSTYPES += "wic wic.bmap"
-
-2. *Get Your Image:* Either have your image ready (pre-built with the
- :term:`IMAGE_FSTYPES`
- setting previously mentioned) or take the step to build the image::
-
- $ bitbake image
-
-3. *Flash the Device:* Flash the device with the image by using Bmaptool
- depending on your particular setup. The following commands assume the
- image resides in the Build Directory's ``deploy/images/`` area:
-
- - If you have write access to the media, use this command form::
-
- $ oe-run-native bmap-tools-native bmaptool copy build-directory/tmp/deploy/images/machine/image.wic /dev/sdX
-
- - If you do not have write access to the media, set your permissions
- first and then use the same command form::
-
- $ sudo chmod 666 /dev/sdX
- $ oe-run-native bmap-tools-native bmaptool copy build-directory/tmp/deploy/images/machine/image.wic /dev/sdX
-
-For help on the ``bmaptool`` command, use the following command::
-
- $ bmaptool --help
-
-Making Images More Secure
-=========================
-
-Security is of increasing concern for embedded devices. Consider the
-issues and problems discussed in just this sampling of work found across
-the Internet:
-
-- *"*\ `Security Risks of Embedded
- Systems <https://www.schneier.com/blog/archives/2014/01/security_risks_9.html>`__\ *"*
- by Bruce Schneier
-
-- *"*\ `Internet Census
- 2012 <http://census2012.sourceforge.net/paper.html>`__\ *"* by Carna
- Botnet
-
-- *"*\ `Security Issues for Embedded
- Devices <https://elinux.org/images/6/6f/Security-issues.pdf>`__\ *"*
- by Jake Edge
-
-When securing your image is of concern, there are steps, tools, and
-variables that you can consider to help you reach the security goals you
-need for your particular device. Not all situations are identical when
-it comes to making an image secure. Consequently, this section provides
-some guidance and suggestions for consideration when you want to make
-your image more secure.
-
-.. note::
-
- Because the security requirements and risks are different for every
- type of device, this section cannot provide a complete reference on
- securing your custom OS. It is strongly recommended that you also
- consult other sources of information on embedded Linux system
- hardening and on security.
-
-General Considerations
-----------------------
-
-There are general considerations that help you create more secure images.
-You should consider the following suggestions to make your device
-more secure:
-
-- Scan additional code you are adding to the system (e.g. application
- code) by using static analysis tools. Look for buffer overflows and
- other potential security problems.
-
-- Pay particular attention to the security for any web-based
- administration interface.
-
- Web interfaces typically need to perform administrative functions and
- tend to need to run with elevated privileges. Thus, the consequences
- resulting from the interface's security becoming compromised can be
- serious. Look for common web vulnerabilities such as
- cross-site-scripting (XSS), unvalidated inputs, and so forth.
-
- As with system passwords, the default credentials for accessing a
- web-based interface should not be the same across all devices. This
- is particularly true if the interface is enabled by default as it can
- be assumed that many end-users will not change the credentials.
-
-- Ensure you can update the software on the device to mitigate
- vulnerabilities discovered in the future. This consideration
- especially applies when your device is network-enabled.
-
-- Ensure you remove or disable debugging functionality before producing
- the final image. For information on how to do this, see the
- ":ref:`dev-manual/common-tasks:considerations specific to the openembedded build system`"
- section.
-
-- Ensure you have no network services listening that are not needed.
-
-- Remove any software from the image that is not needed.
-
-- Enable hardware support for secure boot functionality when your
- device supports this functionality.
-
-Security Flags
---------------
-
-The Yocto Project has security flags that you can enable that help make
-your build output more secure. The security flags are in the
-``meta/conf/distro/include/security_flags.inc`` file in your
-:term:`Source Directory` (e.g. ``poky``).
-
-.. note::
-
- Depending on the recipe, certain security flags are enabled and
- disabled by default.
-
-Use the following line in your ``local.conf`` file or in your custom
-distribution configuration file to enable the security compiler and
-linker flags for your build::
-
- require conf/distro/include/security_flags.inc
-
-Considerations Specific to the OpenEmbedded Build System
---------------------------------------------------------
-
-You can take some steps that are specific to the OpenEmbedded build
-system to make your images more secure:
-
-- Ensure "debug-tweaks" is not one of your selected
- :term:`IMAGE_FEATURES`.
- When creating a new project, the default is to provide you with an
- initial ``local.conf`` file that enables this feature using the
- :term:`EXTRA_IMAGE_FEATURES`
- variable with the line::
-
- EXTRA_IMAGE_FEATURES = "debug-tweaks"
-
- To disable that feature, simply comment out that line in your
- ``local.conf`` file, or make sure :term:`IMAGE_FEATURES` does not contain
- "debug-tweaks" before producing your final image. Among other things,
- leaving this in place sets the root password as blank, which makes
- logging in for debugging or inspection easy during development but
- also means anyone can easily log in during production.
-
-- It is possible to set a root password for the image and also to set
- passwords for any extra users you might add (e.g. administrative or
- service type users). When you set up passwords for multiple images or
- users, you should not duplicate passwords.
-
- To set up passwords, use the
- :ref:`extrausers <ref-classes-extrausers>`
- class, which is the preferred method. For an example on how to set up
- both root and user passwords, see the
- ":ref:`ref-classes-extrausers`" section.
-
- .. note::
-
- When adding extra user accounts or setting a root password, be
- cautious about setting the same password on every device. If you
- do this, and the password you have set is exposed, then every
- device is now potentially compromised. If you need this access but
- want to ensure security, consider setting a different, random
- password for each device. Typically, you do this as a separate
- step after you deploy the image onto the device.
-
-- Consider enabling a Mandatory Access Control (MAC) framework such as
- SMACK or SELinux and tuning it appropriately for your device's usage.
- You can find more information in the
- :yocto_git:`meta-selinux </meta-selinux/>` layer.
-
-Tools for Hardening Your Image
-------------------------------
-
-The Yocto Project provides tools for making your image more secure. You
-can find these tools in the ``meta-security`` layer of the
-:yocto_git:`Yocto Project Source Repositories <>`.
-
-Creating Your Own Distribution
-==============================
-
-When you build an image using the Yocto Project and do not alter any
-distribution :term:`Metadata`, you are
-creating a Poky distribution. If you wish to gain more control over
-package alternative selections, compile-time options, and other
-low-level configurations, you can create your own distribution.
-
-To create your own distribution, the basic steps consist of creating
-your own distribution layer, creating your own distribution
-configuration file, and then adding any needed code and Metadata to the
-layer. The following steps provide some more detail:
-
-- *Create a layer for your new distro:* Create your distribution layer
- so that you can keep your Metadata and code for the distribution
- separate. It is strongly recommended that you create and use your own
- layer for configuration and code. Using your own layer as compared to
- just placing configurations in a ``local.conf`` configuration file
- makes it easier to reproduce the same build configuration when using
- multiple build machines. See the
- ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
- section for information on how to quickly set up a layer.
-
-- *Create the distribution configuration file:* The distribution
- configuration file needs to be created in the ``conf/distro``
- directory of your layer. You need to name it using your distribution
- name (e.g. ``mydistro.conf``).
-
- .. note::
-
- The :term:`DISTRO` variable in your ``local.conf`` file determines the
- name of your distribution.
-
- You can split out parts of your configuration file into include files
- and then "require" them from within your distribution configuration
- file. Be sure to place the include files in the
- ``conf/distro/include`` directory of your layer. A common example
- usage of include files would be to separate out the selection of
- desired version and revisions for individual recipes.
-
- Your configuration file needs to set the following required
- variables:
-
- - :term:`DISTRO_NAME`
-
- - :term:`DISTRO_VERSION`
-
- These following variables are optional and you typically set them
- from the distribution configuration file:
-
- - :term:`DISTRO_FEATURES`
-
- - :term:`DISTRO_EXTRA_RDEPENDS`
-
- - :term:`DISTRO_EXTRA_RRECOMMENDS`
-
- - :term:`TCLIBC`
-
- .. tip::
-
- If you want to base your distribution configuration file on the
- very basic configuration from OE-Core, you can use
- ``conf/distro/defaultsetup.conf`` as a reference and just include
- variables that differ as compared to ``defaultsetup.conf``.
- Alternatively, you can create a distribution configuration file
- from scratch using the ``defaultsetup.conf`` file or configuration files
- from another distribution such as Poky as a reference.
-
-- *Provide miscellaneous variables:* Be sure to define any other
- variables for which you want to create a default or enforce as part
- of the distribution configuration. You can include nearly any
- variable from the ``local.conf`` file. The variables you use are not
- limited to the list in the previous bulleted item.
-
-- *Point to Your distribution configuration file:* In your
- ``local.conf`` file in the :term:`Build Directory`,
- set your
- :term:`DISTRO` variable to point to
- your distribution's configuration file. For example, if your
- distribution's configuration file is named ``mydistro.conf``, then
- you point to it as follows::
-
- DISTRO = "mydistro"
-
-- *Add more to the layer if necessary:* Use your layer to hold other
- information needed for the distribution:
-
- - Add recipes for installing distro-specific configuration files
- that are not already installed by another recipe. If you have
- distro-specific configuration files that are included by an
- existing recipe, you should add an append file (``.bbappend``) for
- those. For general information and recommendations on how to add
- recipes to your layer, see the
- ":ref:`dev-manual/common-tasks:creating your own layer`" and
- ":ref:`dev-manual/common-tasks:following best practices when creating layers`"
- sections.
-
- - Add any image recipes that are specific to your distribution.
-
- - Add a ``psplash`` append file for a branded splash screen. For
- information on append files, see the
- ":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
- section.
-
- - Add any other append files to make custom changes that are
- specific to individual recipes.
-
-Creating a Custom Template Configuration Directory
-==================================================
-
-If you are producing your own customized version of the build system for
-use by other users, you might want to customize the message shown by the
-setup script or you might want to change the template configuration
-files (i.e. ``local.conf`` and ``bblayers.conf``) that are created in a
-new build directory.
-
-The OpenEmbedded build system uses the environment variable
-:term:`TEMPLATECONF` to locate the directory from which it gathers
-configuration information that ultimately ends up in the
-:term:`Build Directory` ``conf`` directory.
-By default, :term:`TEMPLATECONF` is set as follows in the ``poky``
-repository::
-
- TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
-
-This is the
-directory used by the build system to find templates from which to build
-some key configuration files. If you look at this directory, you will
-see the ``bblayers.conf.sample``, ``local.conf.sample``, and
-``conf-notes.txt`` files. The build system uses these files to form the
-respective ``bblayers.conf`` file, ``local.conf`` file, and display the
-list of BitBake targets when running the setup script.
-
-To override these default configuration files with configurations you
-want used within every new Build Directory, simply set the
-:term:`TEMPLATECONF` variable to your directory. The :term:`TEMPLATECONF`
-variable is set in the ``.templateconf`` file, which is in the top-level
-:term:`Source Directory` folder
-(e.g. ``poky``). Edit the ``.templateconf`` so that it can locate your
-directory.
-
-Best practices dictate that you should keep your template configuration
-directory in your custom distribution layer. For example, suppose you
-have a layer named ``meta-mylayer`` located in your home directory and
-you want your template configuration directory named ``myconf``.
-Changing the ``.templateconf`` as follows causes the OpenEmbedded build
-system to look in your directory and base its configuration files on the
-``*.sample`` configuration files it finds. The final configuration files
-(i.e. ``local.conf`` and ``bblayers.conf`` ultimately still end up in
-your Build Directory, but they are based on your ``*.sample`` files.
-::
-
- TEMPLATECONF=${TEMPLATECONF:-meta-mylayer/myconf}
-
-Aside from the ``*.sample`` configuration files, the ``conf-notes.txt``
-also resides in the default ``meta-poky/conf`` directory. The script
-that sets up the build environment (i.e.
-:ref:`structure-core-script`) uses this file to
-display BitBake targets as part of the script output. Customizing this
-``conf-notes.txt`` file is a good way to make sure your list of custom
-targets appears as part of the script's output.
-
-Here is the default list of targets displayed as a result of running
-either of the setup scripts::
-
- You can now run 'bitbake <target>'
-
- Common targets are:
- core-image-minimal
- core-image-sato
- meta-toolchain
- meta-ide-support
-
-Changing the listed common targets is as easy as editing your version of
-``conf-notes.txt`` in your custom template configuration directory and
-making sure you have :term:`TEMPLATECONF` set to your directory.
-
-Conserving Disk Space
-=====================
-
-Conserving Disk Space During Builds
------------------------------------
-
-To help conserve disk space during builds, you can add the following
-statement to your project's ``local.conf`` configuration file found in
-the :term:`Build Directory`::
-
- INHERIT += "rm_work"
-
-Adding this statement deletes the work directory used for
-building a recipe once the recipe is built. For more information on
-"rm_work", see the
-:ref:`rm_work <ref-classes-rm-work>` class in the
-Yocto Project Reference Manual.
-
-Purging Duplicate Shared State Cache Files
--------------------------------------------
-
-After multiple build iterations, the Shared State (sstate) cache can contain
-duplicate cache files for a given package, while only the most recent one
-is likely to be reusable. The following command purges all but the
-newest sstate cache file for each package::
-
- sstate-cache-management.sh --remove-duplicated --cache-dir=build/sstate-cache
-
-This command will ask you to confirm the deletions it identifies.
-
-Note::
-
- The duplicated sstate cache files of one package must have the same
- architecture, which means that sstate cache files with multiple
- architectures are not considered as duplicate.
-
-Run ``sstate-cache-management.sh`` for more details about this script.
-
-Working with Packages
-=====================
-
-This section describes a few tasks that involve packages:
-
-- :ref:`dev-manual/common-tasks:excluding packages from an image`
-
-- :ref:`dev-manual/common-tasks:incrementing a package version`
-
-- :ref:`dev-manual/common-tasks:handling optional module packaging`
-
-- :ref:`dev-manual/common-tasks:using runtime package management`
-
-- :ref:`dev-manual/common-tasks:generating and using signed packages`
-
-- :ref:`Setting up and running package test
- (ptest) <dev-manual/common-tasks:testing packages with ptest>`
-
-- :ref:`dev-manual/common-tasks:creating node package manager (npm) packages`
-
-- :ref:`dev-manual/common-tasks:adding custom metadata to packages`
-
-Excluding Packages from an Image
---------------------------------
-
-You might find it necessary to prevent specific packages from being
-installed into an image. If so, you can use several variables to direct
-the build system to essentially ignore installing recommended packages
-or to not install a package at all.
-
-The following list introduces variables you can use to prevent packages
-from being installed into your image. Each of these variables only works
-with IPK and RPM package types, not for Debian packages.
-Also, you can use these variables from your ``local.conf`` file
-or attach them to a specific image recipe by using a recipe name
-override. For more detail on the variables, see the descriptions in the
-Yocto Project Reference Manual's glossary chapter.
-
-- :term:`BAD_RECOMMENDATIONS`:
- Use this variable to specify "recommended-only" packages that you do
- not want installed.
-
-- :term:`NO_RECOMMENDATIONS`:
- Use this variable to prevent all "recommended-only" packages from
- being installed.
-
-- :term:`PACKAGE_EXCLUDE`:
- Use this variable to prevent specific packages from being installed
- regardless of whether they are "recommended-only" or not. You need to
- realize that the build process could fail with an error when you
- prevent the installation of a package whose presence is required by
- an installed package.
-
-Incrementing a Package Version
-------------------------------
-
-This section provides some background on how binary package versioning
-is accomplished and presents some of the services, variables, and
-terminology involved.
-
-In order to understand binary package versioning, you need to consider
-the following:
-
-- Binary Package: The binary package that is eventually built and
- installed into an image.
-
-- Binary Package Version: The binary package version is composed of two
- components - a version and a revision.
-
- .. note::
-
- Technically, a third component, the "epoch" (i.e. :term:`PE`) is involved
- but this discussion for the most part ignores :term:`PE`.
-
- The version and revision are taken from the
- :term:`PV` and
- :term:`PR` variables, respectively.
-
-- :term:`PV`: The recipe version. :term:`PV` represents the version of the
- software being packaged. Do not confuse :term:`PV` with the binary
- package version.
-
-- :term:`PR`: The recipe revision.
-
-- :term:`SRCPV`: The OpenEmbedded
- build system uses this string to help define the value of :term:`PV` when
- the source code revision needs to be included in it.
-
-- :yocto_wiki:`PR Service </PR_Service>`: A
- network-based service that helps automate keeping package feeds
- compatible with existing package manager applications such as RPM,
- APT, and OPKG.
-
-Whenever the binary package content changes, the binary package version
-must change. Changing the binary package version is accomplished by
-changing or "bumping" the :term:`PR` and/or :term:`PV` values. Increasing these
-values occurs one of two ways:
-
-- Automatically using a Package Revision Service (PR Service).
-
-- Manually incrementing the :term:`PR` and/or :term:`PV` variables.
-
-Given a primary challenge of any build system and its users is how to
-maintain a package feed that is compatible with existing package manager
-applications such as RPM, APT, and OPKG, using an automated system is
-much preferred over a manual system. In either system, the main
-requirement is that binary package version numbering increases in a
-linear fashion and that there is a number of version components that
-support that linear progression. For information on how to ensure
-package revisioning remains linear, see the
-":ref:`dev-manual/common-tasks:automatically incrementing a package version number`"
-section.
-
-The following three sections provide related information on the PR
-Service, the manual method for "bumping" :term:`PR` and/or :term:`PV`, and on
-how to ensure binary package revisioning remains linear.
-
-Working With a PR Service
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-As mentioned, attempting to maintain revision numbers in the
-:term:`Metadata` is error prone, inaccurate,
-and causes problems for people submitting recipes. Conversely, the PR
-Service automatically generates increasing numbers, particularly the
-revision field, which removes the human element.
-
-.. note::
-
- For additional information on using a PR Service, you can see the
- :yocto_wiki:`PR Service </PR_Service>` wiki page.
-
-The Yocto Project uses variables in order of decreasing priority to
-facilitate revision numbering (i.e.
-:term:`PE`,
-:term:`PV`, and
-:term:`PR` for epoch, version, and
-revision, respectively). The values are highly dependent on the policies
-and procedures of a given distribution and package feed.
-
-Because the OpenEmbedded build system uses
-":ref:`signatures <overview-manual/concepts:checksums (signatures)>`", which are
-unique to a given build, the build system knows when to rebuild
-packages. All the inputs into a given task are represented by a
-signature, which can trigger a rebuild when different. Thus, the build
-system itself does not rely on the :term:`PR`, :term:`PV`, and :term:`PE` numbers to
-trigger a rebuild. The signatures, however, can be used to generate
-these values.
-
-The PR Service works with both ``OEBasic`` and ``OEBasicHash``
-generators. The value of :term:`PR` bumps when the checksum changes and the
-different generator mechanisms change signatures under different
-circumstances.
-
-As implemented, the build system includes values from the PR Service
-into the :term:`PR` field as an addition using the form "``.x``" so ``r0``
-becomes ``r0.1``, ``r0.2`` and so forth. This scheme allows existing
-:term:`PR` values to be used for whatever reasons, which include manual
-:term:`PR` bumps, should it be necessary.
-
-By default, the PR Service is not enabled or running. Thus, the packages
-generated are just "self consistent". The build system adds and removes
-packages and there are no guarantees about upgrade paths but images will
-be consistent and correct with the latest changes.
-
-The simplest form for a PR Service is for a single host
-development system that builds the package feed (building system). For
-this scenario, you can enable a local PR Service by setting
-:term:`PRSERV_HOST` in your
-``local.conf`` file in the :term:`Build Directory`::
-
- PRSERV_HOST = "localhost:0"
-
-Once the service is started, packages will automatically
-get increasing :term:`PR` values and BitBake takes care of starting and
-stopping the server.
-
-If you have a more complex setup where multiple host development systems
-work against a common, shared package feed, you have a single PR Service
-running and it is connected to each building system. For this scenario,
-you need to start the PR Service using the ``bitbake-prserv`` command::
-
- bitbake-prserv --host ip --port port --start
-
-In addition to
-hand-starting the service, you need to update the ``local.conf`` file of
-each building system as described earlier so each system points to the
-server and port.
-
-It is also recommended you use build history, which adds some sanity
-checks to binary package versions, in conjunction with the server that
-is running the PR Service. To enable build history, add the following to
-each building system's ``local.conf`` file::
-
- # It is recommended to activate "buildhistory" for testing the PR service
- INHERIT += "buildhistory"
- BUILDHISTORY_COMMIT = "1"
-
-For information on build
-history, see the
-":ref:`dev-manual/common-tasks:maintaining build output quality`" section.
-
-.. note::
-
- The OpenEmbedded build system does not maintain :term:`PR` information as
- part of the shared state (sstate) packages. If you maintain an sstate
- feed, it's expected that either all your building systems that
- contribute to the sstate feed use a shared PR Service, or you do not
- run a PR Service on any of your building systems. Having some systems
- use a PR Service while others do not leads to obvious problems.
-
- For more information on shared state, see the
- ":ref:`overview-manual/concepts:shared state cache`"
- section in the Yocto Project Overview and Concepts Manual.
-
-Manually Bumping PR
-~~~~~~~~~~~~~~~~~~~
-
-The alternative to setting up a PR Service is to manually "bump" the
-:term:`PR` variable.
-
-If a committed change results in changing the package output, then the
-value of the PR variable needs to be increased (or "bumped") as part of
-that commit. For new recipes you should add the :term:`PR` variable and set
-its initial value equal to "r0", which is the default. Even though the
-default value is "r0", the practice of adding it to a new recipe makes
-it harder to forget to bump the variable when you make changes to the
-recipe in future.
-
-If you are sharing a common ``.inc`` file with multiple recipes, you can
-also use the :term:`INC_PR` variable to ensure that the recipes sharing the
-``.inc`` file are rebuilt when the ``.inc`` file itself is changed. The
-``.inc`` file must set :term:`INC_PR` (initially to "r0"), and all recipes
-referring to it should set :term:`PR` to "${INC_PR}.0" initially,
-incrementing the last number when the recipe is changed. If the ``.inc``
-file is changed then its :term:`INC_PR` should be incremented.
-
-When upgrading the version of a binary package, assuming the :term:`PV`
-changes, the :term:`PR` variable should be reset to "r0" (or "${INC_PR}.0"
-if you are using :term:`INC_PR`).
-
-Usually, version increases occur only to binary packages. However, if
-for some reason :term:`PV` changes but does not increase, you can increase
-the :term:`PE` variable (Package Epoch). The :term:`PE` variable defaults to
-"0".
-
-Binary package version numbering strives to follow the `Debian Version
-Field Policy
-Guidelines <https://www.debian.org/doc/debian-policy/ch-controlfields.html>`__.
-These guidelines define how versions are compared and what "increasing"
-a version means.
-
-Automatically Incrementing a Package Version Number
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When fetching a repository, BitBake uses the
-:term:`SRCREV` variable to determine
-the specific source code revision from which to build. You set the
-:term:`SRCREV` variable to
-:term:`AUTOREV` to cause the
-OpenEmbedded build system to automatically use the latest revision of
-the software::
-
- SRCREV = "${AUTOREV}"
-
-Furthermore, you need to reference :term:`SRCPV` in :term:`PV` in order to
-automatically update the version whenever the revision of the source
-code changes. Here is an example::
-
- PV = "1.0+git${SRCPV}"
-
-The OpenEmbedded build system substitutes :term:`SRCPV` with the following:
-
-.. code-block:: none
-
- AUTOINC+source_code_revision
-
-The build system replaces the ``AUTOINC``
-with a number. The number used depends on the state of the PR Service:
-
-- If PR Service is enabled, the build system increments the number,
- which is similar to the behavior of
- :term:`PR`. This behavior results in
- linearly increasing package versions, which is desirable. Here is an
- example:
-
- .. code-block:: none
-
- hello-world-git_0.0+git0+b6558dd387-r0.0_armv7a-neon.ipk
- hello-world-git_0.0+git1+dd2f5c3565-r0.0_armv7a-neon.ipk
-
-- If PR Service is not enabled, the build system replaces the
- ``AUTOINC`` placeholder with zero (i.e. "0"). This results in
- changing the package version since the source revision is included.
- However, package versions are not increased linearly. Here is an
- example:
-
- .. code-block:: none
-
- hello-world-git_0.0+git0+b6558dd387-r0.0_armv7a-neon.ipk
- hello-world-git_0.0+git0+dd2f5c3565-r0.0_armv7a-neon.ipk
-
-In summary, the OpenEmbedded build system does not track the history of
-binary package versions for this purpose. ``AUTOINC``, in this case, is
-comparable to :term:`PR`. If PR server is not enabled, ``AUTOINC`` in the
-package version is simply replaced by "0". If PR server is enabled, the
-build system keeps track of the package versions and bumps the number
-when the package revision changes.
-
-Handling Optional Module Packaging
-----------------------------------
-
-Many pieces of software split functionality into optional modules (or
-plugins) and the plugins that are built might depend on configuration
-options. To avoid having to duplicate the logic that determines what
-modules are available in your recipe or to avoid having to package each
-module by hand, the OpenEmbedded build system provides functionality to
-handle module packaging dynamically.
-
-To handle optional module packaging, you need to do two things:
-
-- Ensure the module packaging is actually done.
-
-- Ensure that any dependencies on optional modules from other recipes
- are satisfied by your recipe.
-
-Making Sure the Packaging is Done
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To ensure the module packaging actually gets done, you use the
-``do_split_packages`` function within the ``populate_packages`` Python
-function in your recipe. The ``do_split_packages`` function searches for
-a pattern of files or directories under a specified path and creates a
-package for each one it finds by appending to the
-:term:`PACKAGES` variable and
-setting the appropriate values for ``FILES:packagename``,
-``RDEPENDS:packagename``, ``DESCRIPTION:packagename``, and so forth.
-Here is an example from the ``lighttpd`` recipe::
-
- python populate_packages:prepend () {
- lighttpd_libdir = d.expand('${libdir}')
- do_split_packages(d, lighttpd_libdir, '^mod_(.*).so$',
- 'lighttpd-module-%s', 'Lighttpd module for %s',
- extra_depends='')
- }
-
-The previous example specifies a number of things in the call to
-``do_split_packages``.
-
-- A directory within the files installed by your recipe through
- ``do_install`` in which to search.
-
-- A regular expression used to match module files in that directory. In
- the example, note the parentheses () that mark the part of the
- expression from which the module name should be derived.
-
-- A pattern to use for the package names.
-
-- A description for each package.
-
-- An empty string for ``extra_depends``, which disables the default
- dependency on the main ``lighttpd`` package. Thus, if a file in
- ``${libdir}`` called ``mod_alias.so`` is found, a package called
- ``lighttpd-module-alias`` is created for it and the
- :term:`DESCRIPTION` is set to
- "Lighttpd module for alias".
-
-Often, packaging modules is as simple as the previous example. However,
-there are more advanced options that you can use within
-``do_split_packages`` to modify its behavior. And, if you need to, you
-can add more logic by specifying a hook function that is called for each
-package. It is also perfectly acceptable to call ``do_split_packages``
-multiple times if you have more than one set of modules to package.
-
-For more examples that show how to use ``do_split_packages``, see the
-``connman.inc`` file in the ``meta/recipes-connectivity/connman/``
-directory of the ``poky`` :ref:`source repository <overview-manual/development-environment:yocto project source repositories>`. You can
-also find examples in ``meta/classes/kernel.bbclass``.
-
-Following is a reference that shows ``do_split_packages`` mandatory and
-optional arguments::
-
- Mandatory arguments
-
- root
- The path in which to search
- file_regex
- Regular expression to match searched files.
- Use parentheses () to mark the part of this
- expression that should be used to derive the
- module name (to be substituted where %s is
- used in other function arguments as noted below)
- output_pattern
- Pattern to use for the package names. Must
- include %s.
- description
- Description to set for each package. Must
- include %s.
-
- Optional arguments
-
- postinst
- Postinstall script to use for all packages
- (as a string)
- recursive
- True to perform a recursive search - default
- False
- hook
- A hook function to be called for every match.
- The function will be called with the following
- arguments (in the order listed):
-
- f
- Full path to the file/directory match
- pkg
- The package name
- file_regex
- As above
- output_pattern
- As above
- modulename
- The module name derived using file_regex
- extra_depends
- Extra runtime dependencies (RDEPENDS) to be
- set for all packages. The default value of None
- causes a dependency on the main package
- (${PN}) - if you do not want this, pass empty
- string '' for this parameter.
- aux_files_pattern
- Extra item(s) to be added to FILES for each
- package. Can be a single string item or a list
- of strings for multiple items. Must include %s.
- postrm
- postrm script to use for all packages (as a
- string)
- allow_dirs
- True to allow directories to be matched -
- default False
- prepend
- If True, prepend created packages to PACKAGES
- instead of the default False which appends them
- match_path
- match file_regex on the whole relative path to
- the root rather than just the filename
- aux_files_pattern_verbatim
- Extra item(s) to be added to FILES for each
- package, using the actual derived module name
- rather than converting it to something legal
- for a package name. Can be a single string item
- or a list of strings for multiple items. Must
- include %s.
- allow_links
- True to allow symlinks to be matched - default
- False
- summary
- Summary to set for each package. Must include %s;
- defaults to description if not set.
-
-
-
-Satisfying Dependencies
-~~~~~~~~~~~~~~~~~~~~~~~
-
-The second part for handling optional module packaging is to ensure that
-any dependencies on optional modules from other recipes are satisfied by
-your recipe. You can be sure these dependencies are satisfied by using
-the :term:`PACKAGES_DYNAMIC`
-variable. Here is an example that continues with the ``lighttpd`` recipe
-shown earlier::
-
- PACKAGES_DYNAMIC = "lighttpd-module-.*"
-
-The name
-specified in the regular expression can of course be anything. In this
-example, it is ``lighttpd-module-`` and is specified as the prefix to
-ensure that any :term:`RDEPENDS` and
-:term:`RRECOMMENDS` on a package
-name starting with the prefix are satisfied during build time. If you
-are using ``do_split_packages`` as described in the previous section,
-the value you put in :term:`PACKAGES_DYNAMIC` should correspond to the name
-pattern specified in the call to ``do_split_packages``.
-
-Using Runtime Package Management
---------------------------------
-
-During a build, BitBake always transforms a recipe into one or more
-packages. For example, BitBake takes the ``bash`` recipe and produces a
-number of packages (e.g. ``bash``, ``bash-bashbug``,
-``bash-completion``, ``bash-completion-dbg``, ``bash-completion-dev``,
-``bash-completion-extra``, ``bash-dbg``, and so forth). Not all
-generated packages are included in an image.
-
-In several situations, you might need to update, add, remove, or query
-the packages on a target device at runtime (i.e. without having to
-generate a new image). Examples of such situations include:
-
-- You want to provide in-the-field updates to deployed devices (e.g.
- security updates).
-
-- You want to have a fast turn-around development cycle for one or more
- applications that run on your device.
-
-- You want to temporarily install the "debug" packages of various
- applications on your device so that debugging can be greatly improved
- by allowing access to symbols and source debugging.
-
-- You want to deploy a more minimal package selection of your device
- but allow in-the-field updates to add a larger selection for
- customization.
-
-In all these situations, you have something similar to a more
-traditional Linux distribution in that in-field devices are able to
-receive pre-compiled packages from a server for installation or update.
-Being able to install these packages on a running, in-field device is
-what is termed "runtime package management".
-
-In order to use runtime package management, you need a host or server
-machine that serves up the pre-compiled packages plus the required
-metadata. You also need package manipulation tools on the target. The
-build machine is a likely candidate to act as the server. However, that
-machine does not necessarily have to be the package server. The build
-machine could push its artifacts to another machine that acts as the
-server (e.g. Internet-facing). In fact, doing so is advantageous for a
-production environment as getting the packages away from the development
-system's build directory prevents accidental overwrites.
-
-A simple build that targets just one device produces more than one
-package database. In other words, the packages produced by a build are
-separated out into a couple of different package groupings based on
-criteria such as the target's CPU architecture, the target board, or the
-C library used on the target. For example, a build targeting the
-``qemux86`` device produces the following three package databases:
-``noarch``, ``i586``, and ``qemux86``. If you wanted your ``qemux86``
-device to be aware of all the packages that were available to it, you
-would need to point it to each of these databases individually. In a
-similar way, a traditional Linux distribution usually is configured to
-be aware of a number of software repositories from which it retrieves
-packages.
-
-Using runtime package management is completely optional and not required
-for a successful build or deployment in any way. But if you want to make
-use of runtime package management, you need to do a couple things above
-and beyond the basics. The remainder of this section describes what you
-need to do.
-
-Build Considerations
-~~~~~~~~~~~~~~~~~~~~
-
-This section describes build considerations of which you need to be
-aware in order to provide support for runtime package management.
-
-When BitBake generates packages, it needs to know what format or formats
-to use. In your configuration, you use the
-:term:`PACKAGE_CLASSES`
-variable to specify the format:
-
-1. Open the ``local.conf`` file inside your
- :term:`Build Directory` (e.g.
- ``poky/build/conf/local.conf``).
-
-2. Select the desired package format as follows::
-
- PACKAGE_CLASSES ?= "package_packageformat"
-
- where packageformat can be "ipk", "rpm",
- "deb", or "tar" which are the supported package formats.
-
- .. note::
-
- Because the Yocto Project supports four different package formats,
- you can set the variable with more than one argument. However, the
- OpenEmbedded build system only uses the first argument when
- creating an image or Software Development Kit (SDK).
-
-If you would like your image to start off with a basic package database
-containing the packages in your current build as well as to have the
-relevant tools available on the target for runtime package management,
-you can include "package-management" in the
-:term:`IMAGE_FEATURES`
-variable. Including "package-management" in this configuration variable
-ensures that when the image is assembled for your target, the image
-includes the currently-known package databases as well as the
-target-specific tools required for runtime package management to be
-performed on the target. However, this is not strictly necessary. You
-could start your image off without any databases but only include the
-required on-target package tool(s). As an example, you could include
-"opkg" in your
-:term:`IMAGE_INSTALL` variable
-if you are using the IPK package format. You can then initialize your
-target's package database(s) later once your image is up and running.
-
-Whenever you perform any sort of build step that can potentially
-generate a package or modify existing package, it is always a good idea
-to re-generate the package index after the build by using the following
-command::
-
- $ bitbake package-index
-
-It might be tempting to build the
-package and the package index at the same time with a command such as
-the following::
-
- $ bitbake some-package package-index
-
-Do not do this as
-BitBake does not schedule the package index for after the completion of
-the package you are building. Consequently, you cannot be sure of the
-package index including information for the package you just built.
-Thus, be sure to run the package update step separately after building
-any packages.
-
-You can use the
-:term:`PACKAGE_FEED_ARCHS`,
-:term:`PACKAGE_FEED_BASE_PATHS`,
-and
-:term:`PACKAGE_FEED_URIS`
-variables to pre-configure target images to use a package feed. If you
-do not define these variables, then manual steps as described in the
-subsequent sections are necessary to configure the target. You should
-set these variables before building the image in order to produce a
-correctly configured image.
-
-When your build is complete, your packages reside in the
-``${TMPDIR}/deploy/packageformat`` directory. For example, if
-``${``\ :term:`TMPDIR`\ ``}`` is
-``tmp`` and your selected package type is RPM, then your RPM packages
-are available in ``tmp/deploy/rpm``.
-
-Host or Server Machine Setup
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Although other protocols are possible, a server using HTTP typically
-serves packages. If you want to use HTTP, then set up and configure a
-web server such as Apache 2, lighttpd, or Python web server on the
-machine serving the packages.
-
-To keep things simple, this section describes how to set up a
-Python web server to share package feeds from the developer's
-machine. Although this server might not be the best for a production
-environment, the setup is simple and straight forward. Should you want
-to use a different server more suited for production (e.g. Apache 2,
-Lighttpd, or Nginx), take the appropriate steps to do so.
-
-From within the build directory where you have built an image based on
-your packaging choice (i.e. the
-:term:`PACKAGE_CLASSES`
-setting), simply start the server. The following example assumes a build
-directory of ``poky/build/tmp/deploy/rpm`` and a :term:`PACKAGE_CLASSES`
-setting of "package_rpm"::
-
- $ cd poky/build/tmp/deploy/rpm
- $ python3 -m http.server
-
-Target Setup
-~~~~~~~~~~~~
-
-Setting up the target differs depending on the package management
-system. This section provides information for RPM, IPK, and DEB.
-
-Using RPM
-^^^^^^^^^
-
-The `Dandified Packaging
-Tool <https://en.wikipedia.org/wiki/DNF_(software)>`__ (DNF) performs
-runtime package management of RPM packages. In order to use DNF for
-runtime package management, you must perform an initial setup on the
-target machine for cases where the ``PACKAGE_FEED_*`` variables were not
-set as part of the image that is running on the target. This means if
-you built your image and did not use these variables as part of the
-build and your image is now running on the target, you need to perform
-the steps in this section if you want to use runtime package management.
-
-.. note::
-
- For information on the ``PACKAGE_FEED_*`` variables, see
- :term:`PACKAGE_FEED_ARCHS`, :term:`PACKAGE_FEED_BASE_PATHS`, and
- :term:`PACKAGE_FEED_URIS` in the Yocto Project Reference Manual variables
- glossary.
-
-On the target, you must inform DNF that package databases are available.
-You do this by creating a file named
-``/etc/yum.repos.d/oe-packages.repo`` and defining the ``oe-packages``.
-
-As an example, assume the target is able to use the following package
-databases: ``all``, ``i586``, and ``qemux86`` from a server named
-``my.server``. The specifics for setting up the web server are up to
-you. The critical requirement is that the URIs in the target repository
-configuration point to the correct remote location for the feeds.
-
-.. note::
-
- For development purposes, you can point the web server to the build
- system's ``deploy`` directory. However, for production use, it is better to
- copy the package directories to a location outside of the build area and use
- that location. Doing so avoids situations where the build system
- overwrites or changes the ``deploy`` directory.
-
-When telling DNF where to look for the package databases, you must
-declare individual locations per architecture or a single location used
-for all architectures. You cannot do both:
-
-- *Create an Explicit List of Architectures:* Define individual base
- URLs to identify where each package database is located:
-
- .. code-block:: none
-
- [oe-packages]
- baseurl=http://my.server/rpm/i586 http://my.server/rpm/qemux86 http://my.server/rpm/all
-
- This example
- informs DNF about individual package databases for all three
- architectures.
-
-- *Create a Single (Full) Package Index:* Define a single base URL that
- identifies where a full package database is located::
-
- [oe-packages]
- baseurl=http://my.server/rpm
-
- This example informs DNF about a single
- package database that contains all the package index information for
- all supported architectures.
-
-Once you have informed DNF where to find the package databases, you need
-to fetch them:
-
-.. code-block:: none
-
- # dnf makecache
-
-DNF is now able to find, install, and
-upgrade packages from the specified repository or repositories.
-
-.. note::
-
- See the `DNF documentation <https://dnf.readthedocs.io/en/latest/>`__ for
- additional information.
-
-Using IPK
-^^^^^^^^^
-
-The ``opkg`` application performs runtime package management of IPK
-packages. You must perform an initial setup for ``opkg`` on the target
-machine if the
-:term:`PACKAGE_FEED_ARCHS`,
-:term:`PACKAGE_FEED_BASE_PATHS`,
-and
-:term:`PACKAGE_FEED_URIS`
-variables have not been set or the target image was built before the
-variables were set.
-
-The ``opkg`` application uses configuration files to find available
-package databases. Thus, you need to create a configuration file inside
-the ``/etc/opkg/`` directory, which informs ``opkg`` of any repository
-you want to use.
-
-As an example, suppose you are serving packages from a ``ipk/``
-directory containing the ``i586``, ``all``, and ``qemux86`` databases
-through an HTTP server named ``my.server``. On the target, create a
-configuration file (e.g. ``my_repo.conf``) inside the ``/etc/opkg/``
-directory containing the following:
-
-.. code-block:: none
-
- src/gz all http://my.server/ipk/all
- src/gz i586 http://my.server/ipk/i586
- src/gz qemux86 http://my.server/ipk/qemux86
-
-Next, instruct ``opkg`` to fetch the
-repository information:
-
-.. code-block:: none
-
- # opkg update
-
-The ``opkg`` application is now able to find, install, and upgrade packages
-from the specified repository.
-
-Using DEB
-^^^^^^^^^
-
-The ``apt`` application performs runtime package management of DEB
-packages. This application uses a source list file to find available
-package databases. You must perform an initial setup for ``apt`` on the
-target machine if the
-:term:`PACKAGE_FEED_ARCHS`,
-:term:`PACKAGE_FEED_BASE_PATHS`,
-and
-:term:`PACKAGE_FEED_URIS`
-variables have not been set or the target image was built before the
-variables were set.
-
-To inform ``apt`` of the repository you want to use, you might create a
-list file (e.g. ``my_repo.list``) inside the
-``/etc/apt/sources.list.d/`` directory. As an example, suppose you are
-serving packages from a ``deb/`` directory containing the ``i586``,
-``all``, and ``qemux86`` databases through an HTTP server named
-``my.server``. The list file should contain:
-
-.. code-block:: none
-
- deb http://my.server/deb/all ./
- deb http://my.server/deb/i586 ./
- deb http://my.server/deb/qemux86 ./
-
-Next, instruct the ``apt`` application
-to fetch the repository information:
-
-.. code-block:: none
-
- $ sudo apt update
-
-After this step,
-``apt`` is able to find, install, and upgrade packages from the
-specified repository.
-
-Generating and Using Signed Packages
-------------------------------------
-
-In order to add security to RPM packages used during a build, you can
-take steps to securely sign them. Once a signature is verified, the
-OpenEmbedded build system can use the package in the build. If security
-fails for a signed package, the build system stops the build.
-
-This section describes how to sign RPM packages during a build and how
-to use signed package feeds (repositories) when doing a build.
-
-Signing RPM Packages
-~~~~~~~~~~~~~~~~~~~~
-
-To enable signing RPM packages, you must set up the following
-configurations in either your ``local.config`` or ``distro.config``
-file::
-
- # Inherit sign_rpm.bbclass to enable signing functionality
- INHERIT += " sign_rpm"
- # Define the GPG key that will be used for signing.
- RPM_GPG_NAME = "key_name"
- # Provide passphrase for the key
- RPM_GPG_PASSPHRASE = "passphrase"
-
-.. note::
-
- Be sure to supply appropriate values for both `key_name` and
- `passphrase`.
-
-Aside from the ``RPM_GPG_NAME`` and ``RPM_GPG_PASSPHRASE`` variables in
-the previous example, two optional variables related to signing are available:
-
-- *GPG_BIN:* Specifies a ``gpg`` binary/wrapper that is executed
- when the package is signed.
-
-- *GPG_PATH:* Specifies the ``gpg`` home directory used when the
- package is signed.
-
-Processing Package Feeds
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-In addition to being able to sign RPM packages, you can also enable
-signed package feeds for IPK and RPM packages.
-
-The steps you need to take to enable signed package feed use are similar
-to the steps used to sign RPM packages. You must define the following in
-your ``local.config`` or ``distro.config`` file::
-
- INHERIT += "sign_package_feed"
- PACKAGE_FEED_GPG_NAME = "key_name"
- PACKAGE_FEED_GPG_PASSPHRASE_FILE = "path_to_file_containing_passphrase"
-
-For signed package feeds, the passphrase must be specified in a separate file,
-which is pointed to by the ``PACKAGE_FEED_GPG_PASSPHRASE_FILE``
-variable. Regarding security, keeping a plain text passphrase out of the
-configuration is more secure.
-
-Aside from the ``PACKAGE_FEED_GPG_NAME`` and
-``PACKAGE_FEED_GPG_PASSPHRASE_FILE`` variables, three optional variables
-related to signed package feeds are available:
-
-- *GPG_BIN* Specifies a ``gpg`` binary/wrapper that is executed
- when the package is signed.
-
-- *GPG_PATH:* Specifies the ``gpg`` home directory used when the
- package is signed.
-
-- *PACKAGE_FEED_GPG_SIGNATURE_TYPE:* Specifies the type of ``gpg``
- signature. This variable applies only to RPM and IPK package feeds.
- Allowable values for the ``PACKAGE_FEED_GPG_SIGNATURE_TYPE`` are
- "ASC", which is the default and specifies ascii armored, and "BIN",
- which specifies binary.
-
-Testing Packages With ptest
----------------------------
-
-A Package Test (ptest) runs tests against packages built by the
-OpenEmbedded build system on the target machine. A ptest contains at
-least two items: the actual test, and a shell script (``run-ptest``)
-that starts the test. The shell script that starts the test must not
-contain the actual test - the script only starts the test. On the other
-hand, the test can be anything from a simple shell script that runs a
-binary and checks the output to an elaborate system of test binaries and
-data files.
-
-The test generates output in the format used by Automake::
-
- result: testname
-
-where the result can be ``PASS``, ``FAIL``, or ``SKIP``, and
-the testname can be any identifying string.
-
-For a list of Yocto Project recipes that are already enabled with ptest,
-see the :yocto_wiki:`Ptest </Ptest>` wiki page.
-
-.. note::
-
- A recipe is "ptest-enabled" if it inherits the
- :ref:`ptest <ref-classes-ptest>` class.
-
-Adding ptest to Your Build
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To add package testing to your build, add the
-:term:`DISTRO_FEATURES` and
-:term:`EXTRA_IMAGE_FEATURES`
-variables to your ``local.conf`` file, which is found in the
-:term:`Build Directory`::
-
- DISTRO_FEATURES:append = " ptest"
- EXTRA_IMAGE_FEATURES += "ptest-pkgs"
-
-Once your build is complete, the ptest files are installed into the
-``/usr/lib/package/ptest`` directory within the image, where ``package``
-is the name of the package.
-
-Running ptest
-~~~~~~~~~~~~~
-
-The ``ptest-runner`` package installs a shell script that loops through
-all installed ptest test suites and runs them in sequence. Consequently,
-you might want to add this package to your image.
-
-Getting Your Package Ready
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In order to enable a recipe to run installed ptests on target hardware,
-you need to prepare the recipes that build the packages you want to
-test. Here is what you have to do for each recipe:
-
-- *Be sure the recipe inherits
- the* :ref:`ptest <ref-classes-ptest>` *class:*
- Include the following line in each recipe::
-
- inherit ptest
-
-- *Create run-ptest:* This script starts your test. Locate the
- script where you will refer to it using
- :term:`SRC_URI`. Here is an
- example that starts a test for ``dbus``::
-
- #!/bin/sh
- cd test
- make -k runtest-TESTS
-
-- *Ensure dependencies are met:* If the test adds build or runtime
- dependencies that normally do not exist for the package (such as
- requiring "make" to run the test suite), use the
- :term:`DEPENDS` and
- :term:`RDEPENDS` variables in
- your recipe in order for the package to meet the dependencies. Here
- is an example where the package has a runtime dependency on "make"::
-
- RDEPENDS:${PN}-ptest += "make"
-
-- *Add a function to build the test suite:* Not many packages support
- cross-compilation of their test suites. Consequently, you usually
- need to add a cross-compilation function to the package.
-
- Many packages based on Automake compile and run the test suite by
- using a single command such as ``make check``. However, the host
- ``make check`` builds and runs on the same computer, while
- cross-compiling requires that the package is built on the host but
- executed for the target architecture (though often, as in the case
- for ptest, the execution occurs on the host). The built version of
- Automake that ships with the Yocto Project includes a patch that
- separates building and execution. Consequently, packages that use the
- unaltered, patched version of ``make check`` automatically
- cross-compiles.
-
- Regardless, you still must add a ``do_compile_ptest`` function to
- build the test suite. Add a function similar to the following to your
- recipe::
-
- do_compile_ptest() {
- oe_runmake buildtest-TESTS
- }
-
-- *Ensure special configurations are set:* If the package requires
- special configurations prior to compiling the test code, you must
- insert a ``do_configure_ptest`` function into the recipe.
-
-- *Install the test suite:* The ``ptest`` class automatically copies
- the file ``run-ptest`` to the target and then runs make
- ``install-ptest`` to run the tests. If this is not enough, you need
- to create a ``do_install_ptest`` function and make sure it gets
- called after the "make install-ptest" completes.
-
-Creating Node Package Manager (NPM) Packages
---------------------------------------------
-
-`NPM <https://en.wikipedia.org/wiki/Npm_(software)>`__ is a package
-manager for the JavaScript programming language. The Yocto Project
-supports the NPM :ref:`fetcher <bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`. You can
-use this fetcher in combination with
-:doc:`devtool </ref-manual/devtool-reference>` to create
-recipes that produce NPM packages.
-
-There are two workflows that allow you to create NPM packages using
-``devtool``: the NPM registry modules method and the NPM project code
-method.
-
-.. note::
-
- While it is possible to create NPM recipes manually, using
- ``devtool`` is far simpler.
-
-Additionally, some requirements and caveats exist.
-
-Requirements and Caveats
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-You need to be aware of the following before using ``devtool`` to create
-NPM packages:
-
-- Of the two methods that you can use ``devtool`` to create NPM
- packages, the registry approach is slightly simpler. However, you
- might consider the project approach because you do not have to
- publish your module in the NPM registry
- (`npm-registry <https://docs.npmjs.com/misc/registry>`_), which
- is NPM's public registry.
-
-- Be familiar with
- :doc:`devtool </ref-manual/devtool-reference>`.
-
-- The NPM host tools need the native ``nodejs-npm`` package, which is
- part of the OpenEmbedded environment. You need to get the package by
- cloning the https://github.com/openembedded/meta-openembedded
- repository out of GitHub. Be sure to add the path to your local copy
- to your ``bblayers.conf`` file.
-
-- ``devtool`` cannot detect native libraries in module dependencies.
- Consequently, you must manually add packages to your recipe.
-
-- While deploying NPM packages, ``devtool`` cannot determine which
- dependent packages are missing on the target (e.g. the node runtime
- ``nodejs``). Consequently, you need to find out what files are
- missing and be sure they are on the target.
-
-- Although you might not need NPM to run your node package, it is
- useful to have NPM on your target. The NPM package name is
- ``nodejs-npm``.
-
-Using the Registry Modules Method
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This section presents an example that uses the ``cute-files`` module,
-which is a file browser web application.
-
-.. note::
-
- You must know the ``cute-files`` module version.
-
-The first thing you need to do is use ``devtool`` and the NPM fetcher to
-create the recipe::
-
- $ devtool add "npm://registry.npmjs.org;package=cute-files;version=1.0.2"
-
-The
-``devtool add`` command runs ``recipetool create`` and uses the same
-fetch URI to download each dependency and capture license details where
-possible. The result is a generated recipe.
-
-The recipe file is fairly simple and contains every license that
-``recipetool`` finds and includes the licenses in the recipe's
-:term:`LIC_FILES_CHKSUM`
-variables. You need to examine the variables and look for those with
-"unknown" in the :term:`LICENSE`
-field. You need to track down the license information for "unknown"
-modules and manually add the information to the recipe.
-
-``recipetool`` creates a "shrinkwrap" file for your recipe. Shrinkwrap
-files capture the version of all dependent modules. Many packages do not
-provide shrinkwrap files. ``recipetool`` create a shrinkwrap file as it
-runs.
-
-.. note::
-
- A package is created for each sub-module. This policy is the only
- practical way to have the licenses for all of the dependencies
- represented in the license manifest of the image.
-
-The ``devtool edit-recipe`` command lets you take a look at the recipe::
-
- $ devtool edit-recipe cute-files
- SUMMARY = "Turn any folder on your computer into a cute file browser, available on the local network."
- LICENSE = "MIT & ISC & Unknown"
- LIC_FILES_CHKSUM = "file://LICENSE;md5=71d98c0a1db42956787b1909c74a86ca \
- file://node_modules/toidentifier/LICENSE;md5=1a261071a044d02eb6f2bb47f51a3502 \
- file://node_modules/debug/LICENSE;md5=ddd815a475e7338b0be7a14d8ee35a99 \
- ...
- SRC_URI = " \
- npm://registry.npmjs.org/;package=cute-files;version=${PV} \
- npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
- "
- S = "${WORKDIR}/npm"
- inherit npm
- LICENSE:${PN} = "MIT"
- LICENSE:${PN}-accepts = "MIT"
- LICENSE:${PN}-array-flatten = "MIT"
- ...
- LICENSE:${PN}-vary = "MIT"
-
-Here are three key points in the previous example:
-
-- :term:`SRC_URI` uses the NPM
- scheme so that the NPM fetcher is used.
-
-- ``recipetool`` collects all the license information. If a
- sub-module's license is unavailable, the sub-module's name appears in
- the comments.
-
-- The ``inherit npm`` statement causes the
- :ref:`npm <ref-classes-npm>` class to package
- up all the modules.
-
-You can run the following command to build the ``cute-files`` package::
-
- $ devtool build cute-files
-
-Remember that ``nodejs`` must be installed on
-the target before your package.
-
-Assuming 192.168.7.2 for the target's IP address, use the following
-command to deploy your package::
-
- $ devtool deploy-target -s cute-files root@192.168.7.2
-
-Once the package is installed on the target, you can
-test the application:
-
-.. note::
-
- Because of a known issue, you cannot simply run ``cute-files`` as you would
- if you had run ``npm install``.
-
-::
-
- $ cd /usr/lib/node_modules/cute-files
- $ node cute-files.js
-
-On a browser,
-go to ``http://192.168.7.2:3000`` and you see the following:
-
-.. image:: figures/cute-files-npm-example.png
- :width: 100%
-
-You can find the recipe in ``workspace/recipes/cute-files``. You can use
-the recipe in any layer you choose.
-
-Using the NPM Projects Code Method
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Although it is useful to package modules already in the NPM registry,
-adding ``node.js`` projects under development is a more common developer
-use case.
-
-This section covers the NPM projects code method, which is very similar
-to the "registry" approach described in the previous section. In the NPM
-projects method, you provide ``devtool`` with an URL that points to the
-source files.
-
-Replicating the same example, (i.e. ``cute-files``) use the following
-command::
-
- $ devtool add https://github.com/martinaglv/cute-files.git
-
-The
-recipe this command generates is very similar to the recipe created in
-the previous section. However, the :term:`SRC_URI` looks like the following::
-
- SRC_URI = " \
- git://github.com/martinaglv/cute-files.git;protocol=https \
- npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
- "
-
-In this example,
-the main module is taken from the Git repository and dependencies are
-taken from the NPM registry. Other than those differences, the recipe is
-basically the same between the two methods. You can build and deploy the
-package exactly as described in the previous section that uses the
-registry modules method.
-
-Adding custom metadata to packages
-----------------------------------
-
-The variable
-:term:`PACKAGE_ADD_METADATA`
-can be used to add additional metadata to packages. This is reflected in
-the package control/spec file. To take the ipk format for example, the
-CONTROL file stored inside would contain the additional metadata as
-additional lines.
-
-The variable can be used in multiple ways, including using suffixes to
-set it for a specific package type and/or package. Note that the order
-of precedence is the same as this list:
-
-- ``PACKAGE_ADD_METADATA_<PKGTYPE>:<PN>``
-
-- ``PACKAGE_ADD_METADATA_<PKGTYPE>``
-
-- ``PACKAGE_ADD_METADATA:<PN>``
-
-- :term:`PACKAGE_ADD_METADATA`
-
-`<PKGTYPE>` is a parameter and expected to be a distinct name of specific
-package type:
-
-- IPK for .ipk packages
-
-- DEB for .deb packages
-
-- RPM for .rpm packages
-
-`<PN>` is a parameter and expected to be a package name.
-
-The variable can contain multiple [one-line] metadata fields separated
-by the literal sequence '\\n'. The separator can be redefined using the
-variable flag ``separator``.
-
-Here is an example that adds two custom fields for ipk
-packages::
-
- PACKAGE_ADD_METADATA_IPK = "Vendor: CustomIpk\nGroup:Applications/Spreadsheets"
-
-Efficiently Fetching Source Files During a Build
-================================================
-
-The OpenEmbedded build system works with source files located through
-the :term:`SRC_URI` variable. When
-you build something using BitBake, a big part of the operation is
-locating and downloading all the source tarballs. For images,
-downloading all the source for various packages can take a significant
-amount of time.
-
-This section shows you how you can use mirrors to speed up fetching
-source files and how you can pre-fetch files all of which leads to more
-efficient use of resources and time.
-
-Setting up Effective Mirrors
-----------------------------
-
-A good deal that goes into a Yocto Project build is simply downloading
-all of the source tarballs. Maybe you have been working with another
-build system for which you have built up a
-sizable directory of source tarballs. Or, perhaps someone else has such
-a directory for which you have read access. If so, you can save time by
-adding statements to your configuration file so that the build process
-checks local directories first for existing tarballs before checking the
-Internet.
-
-Here is an efficient way to set it up in your ``local.conf`` file::
-
- SOURCE_MIRROR_URL ?= "file:///home/you/your-download-dir/"
- INHERIT += "own-mirrors"
- BB_GENERATE_MIRROR_TARBALLS = "1"
- # BB_NO_NETWORK = "1"
-
-In the previous example, the
-:term:`BB_GENERATE_MIRROR_TARBALLS`
-variable causes the OpenEmbedded build system to generate tarballs of
-the Git repositories and store them in the
-:term:`DL_DIR` directory. Due to
-performance reasons, generating and storing these tarballs is not the
-build system's default behavior.
-
-You can also use the
-:term:`PREMIRRORS` variable. For
-an example, see the variable's glossary entry in the Yocto Project
-Reference Manual.
-
-Getting Source Files and Suppressing the Build
-----------------------------------------------
-
-Another technique you can use to ready yourself for a successive string
-of build operations, is to pre-fetch all the source files without
-actually starting a build. This technique lets you work through any
-download issues and ultimately gathers all the source files into your
-download directory :ref:`structure-build-downloads`,
-which is located with :term:`DL_DIR`.
-
-Use the following BitBake command form to fetch all the necessary
-sources without starting the build::
-
- $ bitbake target --runall=fetch
-
-This
-variation of the BitBake command guarantees that you have all the
-sources for that BitBake target should you disconnect from the Internet
-and want to do the build later offline.
-
-Selecting an Initialization Manager
-===================================
-
-By default, the Yocto Project uses SysVinit as the initialization
-manager. However, there is also support for systemd, which is a full
-replacement for init with parallel starting of services, reduced shell
-overhead and other features that are used by many distributions.
-
-Within the system, SysVinit treats system components as services. These
-services are maintained as shell scripts stored in the ``/etc/init.d/``
-directory. Services organize into different run levels. This
-organization is maintained by putting links to the services in the
-``/etc/rcN.d/`` directories, where `N/` is one of the following options:
-"S", "0", "1", "2", "3", "4", "5", or "6".
-
-.. note::
-
- Each runlevel has a dependency on the previous runlevel. This
- dependency allows the services to work properly.
-
-In comparison, systemd treats components as units. Using units is a
-broader concept as compared to using a service. A unit includes several
-different types of entities. Service is one of the types of entities.
-The runlevel concept in SysVinit corresponds to the concept of a target
-in systemd, where target is also a type of supported unit.
-
-In a SysVinit-based system, services load sequentially (i.e. one by one)
-during init and parallelization is not supported. With systemd, services
-start in parallel. Needless to say, the method can have an impact on
-system startup performance.
-
-If you want to use SysVinit, you do not have to do anything. But, if you
-want to use systemd, you must take some steps as described in the
-following sections.
-
-Using systemd Exclusively
--------------------------
-
-Set these variables in your distribution configuration file as follows::
-
- DISTRO_FEATURES:append = " systemd"
- VIRTUAL-RUNTIME_init_manager = "systemd"
-
-You can also prevent the SysVinit distribution feature from
-being automatically enabled as follows::
-
- DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
-
-Doing so removes any
-redundant SysVinit scripts.
-
-To remove initscripts from your image altogether, set this variable
-also::
-
- VIRTUAL-RUNTIME_initscripts = ""
-
-For information on the backfill variable, see
-:term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`.
-
-Using systemd for the Main Image and Using SysVinit for the Rescue Image
-------------------------------------------------------------------------
-
-Set these variables in your distribution configuration file as follows::
-
- DISTRO_FEATURES:append = " systemd"
- VIRTUAL-RUNTIME_init_manager = "systemd"
-
-Doing so causes your main image to use the
-``packagegroup-core-boot.bb`` recipe and systemd. The rescue/minimal
-image cannot use this package group. However, it can install SysVinit
-and the appropriate packages will have support for both systemd and
-SysVinit.
-
-Using systemd-journald without a traditional syslog daemon
-----------------------------------------------------------
-
-Counter-intuitively, ``systemd-journald`` is not a syslog runtime or provider,
-and the proper way to use systemd-journald as your sole logging mechanism is to
-effectively disable syslog entirely by setting these variables in your distribution
-configuration file::
-
- VIRTUAL-RUNTIME_syslog = ""
- VIRTUAL-RUNTIME_base-utils-syslog = ""
-
-Doing so will prevent ``rsyslog`` / ``busybox-syslog`` from being pulled in by
-default, leaving only ``journald``.
-
-Selecting a Device Manager
-==========================
-
-The Yocto Project provides multiple ways to manage the device manager
-(``/dev``):
-
-- Persistent and Pre-Populated ``/dev``: For this case, the ``/dev``
- directory is persistent and the required device nodes are created
- during the build.
-
-- Use ``devtmpfs`` with a Device Manager: For this case, the ``/dev``
- directory is provided by the kernel as an in-memory file system and
- is automatically populated by the kernel at runtime. Additional
- configuration of device nodes is done in user space by a device
- manager like ``udev`` or ``busybox-mdev``.
-
-Using Persistent and Pre-Populated ``/dev``
---------------------------------------------
-
-To use the static method for device population, you need to set the
-:term:`USE_DEVFS` variable to "0"
-as follows::
-
- USE_DEVFS = "0"
-
-The content of the resulting ``/dev`` directory is defined in a Device
-Table file. The
-:term:`IMAGE_DEVICE_TABLES`
-variable defines the Device Table to use and should be set in the
-machine or distro configuration file. Alternatively, you can set this
-variable in your ``local.conf`` configuration file.
-
-If you do not define the :term:`IMAGE_DEVICE_TABLES` variable, the default
-``device_table-minimal.txt`` is used::
-
- IMAGE_DEVICE_TABLES = "device_table-mymachine.txt"
-
-The population is handled by the ``makedevs`` utility during image
-creation:
-
-Using ``devtmpfs`` and a Device Manager
----------------------------------------
-
-To use the dynamic method for device population, you need to use (or be
-sure to set) the :term:`USE_DEVFS`
-variable to "1", which is the default::
-
- USE_DEVFS = "1"
-
-With this
-setting, the resulting ``/dev`` directory is populated by the kernel
-using ``devtmpfs``. Make sure the corresponding kernel configuration
-variable ``CONFIG_DEVTMPFS`` is set when building you build a Linux
-kernel.
-
-All devices created by ``devtmpfs`` will be owned by ``root`` and have
-permissions ``0600``.
-
-To have more control over the device nodes, you can use a device manager
-like ``udev`` or ``busybox-mdev``. You choose the device manager by
-defining the ``VIRTUAL-RUNTIME_dev_manager`` variable in your machine or
-distro configuration file. Alternatively, you can set this variable in
-your ``local.conf`` configuration file::
-
- VIRTUAL-RUNTIME_dev_manager = "udev"
-
- # Some alternative values
- # VIRTUAL-RUNTIME_dev_manager = "busybox-mdev"
- # VIRTUAL-RUNTIME_dev_manager = "systemd"
-
-Using an External SCM
-=====================
-
-If you're working on a recipe that pulls from an external Source Code
-Manager (SCM), it is possible to have the OpenEmbedded build system
-notice new recipe changes added to the SCM and then build the resulting
-packages that depend on the new recipes by using the latest versions.
-This only works for SCMs from which it is possible to get a sensible
-revision number for changes. Currently, you can do this with Apache
-Subversion (SVN), Git, and Bazaar (BZR) repositories.
-
-To enable this behavior, the :term:`PV` of
-the recipe needs to reference
-:term:`SRCPV`. Here is an example::
-
- PV = "1.2.3+git${SRCPV}"
-
-Then, you can add the following to your
-``local.conf``::
-
- SRCREV:pn-PN = "${AUTOREV}"
-
-:term:`PN` is the name of the recipe for
-which you want to enable automatic source revision updating.
-
-If you do not want to update your local configuration file, you can add
-the following directly to the recipe to finish enabling the feature::
-
- SRCREV = "${AUTOREV}"
-
-The Yocto Project provides a distribution named ``poky-bleeding``, whose
-configuration file contains the line::
-
- require conf/distro/include/poky-floating-revisions.inc
-
-This line pulls in the
-listed include file that contains numerous lines of exactly that form::
-
- #SRCREV:pn-opkg-native ?= "${AUTOREV}"
- #SRCREV:pn-opkg-sdk ?= "${AUTOREV}"
- #SRCREV:pn-opkg ?= "${AUTOREV}"
- #SRCREV:pn-opkg-utils-native ?= "${AUTOREV}"
- #SRCREV:pn-opkg-utils ?= "${AUTOREV}"
- SRCREV:pn-gconf-dbus ?= "${AUTOREV}"
- SRCREV:pn-matchbox-common ?= "${AUTOREV}"
- SRCREV:pn-matchbox-config-gtk ?= "${AUTOREV}"
- SRCREV:pn-matchbox-desktop ?= "${AUTOREV}"
- SRCREV:pn-matchbox-keyboard ?= "${AUTOREV}"
- SRCREV:pn-matchbox-panel-2 ?= "${AUTOREV}"
- SRCREV:pn-matchbox-themes-extra ?= "${AUTOREV}"
- SRCREV:pn-matchbox-terminal ?= "${AUTOREV}"
- SRCREV:pn-matchbox-wm ?= "${AUTOREV}"
- SRCREV:pn-settings-daemon ?= "${AUTOREV}"
- SRCREV:pn-screenshot ?= "${AUTOREV}"
- . . .
-
-These lines allow you to
-experiment with building a distribution that tracks the latest
-development source for numerous packages.
-
-.. note::
-
- The ``poky-bleeding`` distribution is not tested on a regular basis. Keep
- this in mind if you use it.
-
-Creating a Read-Only Root Filesystem
-====================================
-
-Suppose, for security reasons, you need to disable your target device's
-root filesystem's write permissions (i.e. you need a read-only root
-filesystem). Or, perhaps you are running the device's operating system
-from a read-only storage device. For either case, you can customize your
-image for that behavior.
-
-.. note::
-
- Supporting a read-only root filesystem requires that the system and
- applications do not try to write to the root filesystem. You must
- configure all parts of the target system to write elsewhere, or to
- gracefully fail in the event of attempting to write to the root
- filesystem.
-
-Creating the Root Filesystem
-----------------------------
-
-To create the read-only root filesystem, simply add the
-"read-only-rootfs" feature to your image, normally in one of two ways.
-The first way is to add the "read-only-rootfs" image feature in the
-image's recipe file via the :term:`IMAGE_FEATURES` variable::
-
- IMAGE_FEATURES += "read-only-rootfs"
-
-As an alternative, you can add the same feature
-from within your build directory's ``local.conf`` file with the
-associated :term:`EXTRA_IMAGE_FEATURES` variable, as in::
-
- EXTRA_IMAGE_FEATURES = "read-only-rootfs"
-
-For more information on how to use these variables, see the
-":ref:`dev-manual/common-tasks:Customizing Images Using Custom \`\`IMAGE_FEATURES\`\` and \`\`EXTRA_IMAGE_FEATURES\`\``"
-section. For information on the variables, see
-:term:`IMAGE_FEATURES` and
-:term:`EXTRA_IMAGE_FEATURES`.
-
-Post-Installation Scripts and Read-Only Root Filesystem
--------------------------------------------------------
-
-It is very important that you make sure all post-Installation
-(``pkg_postinst``) scripts for packages that are installed into the
-image can be run at the time when the root filesystem is created during
-the build on the host system. These scripts cannot attempt to run during
-the first boot on the target device. With the "read-only-rootfs" feature
-enabled, the build system makes sure that all post-installation scripts
-succeed at file system creation time. If any of these scripts
-still need to be run after the root filesystem is created, the build
-immediately fails. These build-time checks ensure that the build fails
-rather than the target device fails later during its initial boot
-operation.
-
-Most of the common post-installation scripts generated by the build
-system for the out-of-the-box Yocto Project are engineered so that they
-can run during root filesystem creation (e.g. post-installation scripts
-for caching fonts). However, if you create and add custom scripts, you
-need to be sure they can be run during this file system creation.
-
-Here are some common problems that prevent post-installation scripts
-from running during root filesystem creation:
-
-- *Not using $D in front of absolute paths:* The build system defines
- ``$``\ :term:`D` when the root
- filesystem is created. Furthermore, ``$D`` is blank when the script
- is run on the target device. This implies two purposes for ``$D``:
- ensuring paths are valid in both the host and target environments,
- and checking to determine which environment is being used as a method
- for taking appropriate actions.
-
-- *Attempting to run processes that are specific to or dependent on the
- target architecture:* You can work around these attempts by using
- native tools, which run on the host system, to accomplish the same
- tasks, or by alternatively running the processes under QEMU, which
- has the ``qemu_run_binary`` function. For more information, see the
- :ref:`qemu <ref-classes-qemu>` class.
-
-Areas With Write Access
------------------------
-
-With the "read-only-rootfs" feature enabled, any attempt by the target
-to write to the root filesystem at runtime fails. Consequently, you must
-make sure that you configure processes and applications that attempt
-these types of writes do so to directories with write access (e.g.
-``/tmp`` or ``/var/run``).
-
-Maintaining Build Output Quality
-================================
-
-Many factors can influence the quality of a build. For example, if you
-upgrade a recipe to use a new version of an upstream software package or
-you experiment with some new configuration options, subtle changes can
-occur that you might not detect until later. Consider the case where
-your recipe is using a newer version of an upstream package. In this
-case, a new version of a piece of software might introduce an optional
-dependency on another library, which is auto-detected. If that library
-has already been built when the software is building, the software will
-link to the built library and that library will be pulled into your
-image along with the new software even if you did not want the library.
-
-The :ref:`buildhistory <ref-classes-buildhistory>`
-class helps you maintain the quality of your build output. You
-can use the class to highlight unexpected and possibly unwanted changes
-in the build output. When you enable build history, it records
-information about the contents of each package and image and then
-commits that information to a local Git repository where you can examine
-the information.
-
-The remainder of this section describes the following:
-
-- :ref:`How you can enable and disable build history <dev-manual/common-tasks:enabling and disabling build history>`
-
-- :ref:`How to understand what the build history contains <dev-manual/common-tasks:understanding what the build history contains>`
-
-- :ref:`How to limit the information used for build history <dev-manual/common-tasks:using build history to gather image information only>`
-
-- :ref:`How to examine the build history from both a command-line and web interface <dev-manual/common-tasks:examining build history information>`
-
-Enabling and Disabling Build History
-------------------------------------
-
-Build history is disabled by default. To enable it, add the following
-:term:`INHERIT` statement and set the
-:term:`BUILDHISTORY_COMMIT`
-variable to "1" at the end of your ``conf/local.conf`` file found in the
-:term:`Build Directory`::
-
- INHERIT += "buildhistory"
- BUILDHISTORY_COMMIT = "1"
-
-Enabling build history as
-previously described causes the OpenEmbedded build system to collect
-build output information and commit it as a single commit to a local
-:ref:`overview-manual/development-environment:git` repository.
-
-.. note::
-
- Enabling build history increases your build times slightly,
- particularly for images, and increases the amount of disk space used
- during the build.
-
-You can disable build history by removing the previous statements from
-your ``conf/local.conf`` file.
-
-Understanding What the Build History Contains
----------------------------------------------
-
-Build history information is kept in
-``${``\ :term:`TOPDIR`\ ``}/buildhistory``
-in the Build Directory as defined by the
-:term:`BUILDHISTORY_DIR`
-variable. Here is an example abbreviated listing:
-
-.. image:: figures/buildhistory.png
- :align: center
- :width: 50%
-
-At the top level, there is a ``metadata-revs`` file that lists the
-revisions of the repositories for the enabled layers when the build was
-produced. The rest of the data splits into separate ``packages``,
-``images`` and ``sdk`` directories, the contents of which are described
-as follows.
-
-Build History Package Information
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The history for each package contains a text file that has name-value
-pairs with information about the package. For example,
-``buildhistory/packages/i586-poky-linux/busybox/busybox/latest``
-contains the following:
-
-.. code-block:: none
-
- PV = 1.22.1
- PR = r32
- RPROVIDES =
- RDEPENDS = glibc (>= 2.20) update-alternatives-opkg
- RRECOMMENDS = busybox-syslog busybox-udhcpc update-rc.d
- PKGSIZE = 540168
- FILES = /usr/bin/* /usr/sbin/* /usr/lib/busybox/* /usr/lib/lib*.so.* \
- /etc /com /var /bin/* /sbin/* /lib/*.so.* /lib/udev/rules.d \
- /usr/lib/udev/rules.d /usr/share/busybox /usr/lib/busybox/* \
- /usr/share/pixmaps /usr/share/applications /usr/share/idl \
- /usr/share/omf /usr/share/sounds /usr/lib/bonobo/servers
- FILELIST = /bin/busybox /bin/busybox.nosuid /bin/busybox.suid /bin/sh \
- /etc/busybox.links.nosuid /etc/busybox.links.suid
-
-Most of these
-name-value pairs correspond to variables used to produce the package.
-The exceptions are ``FILELIST``, which is the actual list of files in
-the package, and ``PKGSIZE``, which is the total size of files in the
-package in bytes.
-
-There is also a file that corresponds to the recipe from which the package
-came (e.g. ``buildhistory/packages/i586-poky-linux/busybox/latest``):
-
-.. code-block:: none
-
- PV = 1.22.1
- PR = r32
- DEPENDS = initscripts kern-tools-native update-rc.d-native \
- virtual/i586-poky-linux-compilerlibs virtual/i586-poky-linux-gcc \
- virtual/libc virtual/update-alternatives
- PACKAGES = busybox-ptest busybox-httpd busybox-udhcpd busybox-udhcpc \
- busybox-syslog busybox-mdev busybox-hwclock busybox-dbg \
- busybox-staticdev busybox-dev busybox-doc busybox-locale busybox
-
-Finally, for those recipes fetched from a version control system (e.g.,
-Git), there is a file that lists source revisions that are specified in
-the recipe and the actual revisions used during the build. Listed
-and actual revisions might differ when
-:term:`SRCREV` is set to
-${:term:`AUTOREV`}. Here is an
-example assuming
-``buildhistory/packages/qemux86-poky-linux/linux-yocto/latest_srcrev``)::
-
- # SRCREV_machine = "38cd560d5022ed2dbd1ab0dca9642e47c98a0aa1"
- SRCREV_machine = "38cd560d5022ed2dbd1ab0dca9642e47c98a0aa1"
- # SRCREV_meta = "a227f20eff056e511d504b2e490f3774ab260d6f"
- SRCREV_meta ="a227f20eff056e511d504b2e490f3774ab260d6f"
-
-You can use the
-``buildhistory-collect-srcrevs`` command with the ``-a`` option to
-collect the stored :term:`SRCREV` values from build history and report them
-in a format suitable for use in global configuration (e.g.,
-``local.conf`` or a distro include file) to override floating
-:term:`AUTOREV` values to a fixed set of revisions. Here is some example
-output from this command::
-
- $ buildhistory-collect-srcrevs -a
- # all-poky-linux
- SRCREV:pn-ca-certificates = "07de54fdcc5806bde549e1edf60738c6bccf50e8"
- SRCREV:pn-update-rc.d = "8636cf478d426b568c1be11dbd9346f67e03adac"
- # core2-64-poky-linux
- SRCREV:pn-binutils = "87d4632d36323091e731eb07b8aa65f90293da66"
- SRCREV:pn-btrfs-tools = "8ad326b2f28c044cb6ed9016d7c3285e23b673c8"
- SRCREV_bzip2-tests:pn-bzip2 = "f9061c030a25de5b6829e1abf373057309c734c0"
- SRCREV:pn-e2fsprogs = "02540dedd3ddc52c6ae8aaa8a95ce75c3f8be1c0"
- SRCREV:pn-file = "504206e53a89fd6eed71aeaf878aa3512418eab1"
- SRCREV_glibc:pn-glibc = "24962427071fa532c3c48c918e9d64d719cc8a6c"
- SRCREV:pn-gnome-desktop-testing = "e346cd4ed2e2102c9b195b614f3c642d23f5f6e7"
- SRCREV:pn-init-system-helpers = "dbd9197569c0935029acd5c9b02b84c68fd937ee"
- SRCREV:pn-kmod = "b6ecfc916a17eab8f93be5b09f4e4f845aabd3d1"
- SRCREV:pn-libnsl2 = "82245c0c58add79a8e34ab0917358217a70e5100"
- SRCREV:pn-libseccomp = "57357d2741a3b3d3e8425889a6b79a130e0fa2f3"
- SRCREV:pn-libxcrypt = "50cf2b6dd4fdf04309445f2eec8de7051d953abf"
- SRCREV:pn-ncurses = "51d0fd9cc3edb975f04224f29f777f8f448e8ced"
- SRCREV:pn-procps = "19a508ea121c0c4ac6d0224575a036de745eaaf8"
- SRCREV:pn-psmisc = "5fab6b7ab385080f1db725d6803136ec1841a15f"
- SRCREV:pn-ptest-runner = "bcb82804daa8f725b6add259dcef2067e61a75aa"
- SRCREV:pn-shared-mime-info = "18e558fa1c8b90b86757ade09a4ba4d6a6cf8f70"
- SRCREV:pn-zstd = "e47e674cd09583ff0503f0f6defd6d23d8b718d3"
- # qemux86_64-poky-linux
- SRCREV_machine:pn-linux-yocto = "20301aeb1a64164b72bc72af58802b315e025c9c"
- SRCREV_meta:pn-linux-yocto = "2d38a472b21ae343707c8bd64ac68a9eaca066a0"
- # x86_64-linux
- SRCREV:pn-binutils-cross-x86_64 = "87d4632d36323091e731eb07b8aa65f90293da66"
- SRCREV_glibc:pn-cross-localedef-native = "24962427071fa532c3c48c918e9d64d719cc8a6c"
- SRCREV_localedef:pn-cross-localedef-native = "794da69788cbf9bf57b59a852f9f11307663fa87"
- SRCREV:pn-debianutils-native = "de14223e5bffe15e374a441302c528ffc1cbed57"
- SRCREV:pn-libmodulemd-native = "ee80309bc766d781a144e6879419b29f444d94eb"
- SRCREV:pn-virglrenderer-native = "363915595e05fb252e70d6514be2f0c0b5ca312b"
- SRCREV:pn-zstd-native = "e47e674cd09583ff0503f0f6defd6d23d8b718d3"
-
-.. note::
-
- Here are some notes on using the ``buildhistory-collect-srcrevs`` command:
-
- - By default, only values where the :term:`SRCREV` was not hardcoded
- (usually when :term:`AUTOREV` is used) are reported. Use the ``-a``
- option to see all :term:`SRCREV` values.
-
- - The output statements might not have any effect if overrides are
- applied elsewhere in the build system configuration. Use the
- ``-f`` option to add the ``forcevariable`` override to each output
- line if you need to work around this restriction.
-
- - The script does apply special handling when building for multiple
- machines. However, the script does place a comment before each set
- of values that specifies which triplet to which they belong as
- previously shown (e.g., ``i586-poky-linux``).
-
-Build History Image Information
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The files produced for each image are as follows:
-
-- ``image-files:`` A directory containing selected files from the root
- filesystem. The files are defined by
- :term:`BUILDHISTORY_IMAGE_FILES`.
-
-- ``build-id.txt:`` Human-readable information about the build
- configuration and metadata source revisions. This file contains the
- full build header as printed by BitBake.
-
-- ``*.dot:`` Dependency graphs for the image that are compatible with
- ``graphviz``.
-
-- ``files-in-image.txt:`` A list of files in the image with
- permissions, owner, group, size, and symlink information.
-
-- ``image-info.txt:`` A text file containing name-value pairs with
- information about the image. See the following listing example for
- more information.
-
-- ``installed-package-names.txt:`` A list of installed packages by name
- only.
-
-- ``installed-package-sizes.txt:`` A list of installed packages ordered
- by size.
-
-- ``installed-packages.txt:`` A list of installed packages with full
- package filenames.
-
-.. note::
-
- Installed package information is able to be gathered and produced
- even if package management is disabled for the final image.
-
-Here is an example of ``image-info.txt``:
-
-.. code-block:: none
-
- DISTRO = poky
- DISTRO_VERSION = 3.4+snapshot-a0245d7be08f3d24ea1875e9f8872aa6bbff93be
- USER_CLASSES = buildstats
- IMAGE_CLASSES = qemuboot qemuboot license_image
- IMAGE_FEATURES = debug-tweaks
- IMAGE_LINGUAS =
- IMAGE_INSTALL = packagegroup-core-boot speex speexdsp
- BAD_RECOMMENDATIONS =
- NO_RECOMMENDATIONS =
- PACKAGE_EXCLUDE =
- ROOTFS_POSTPROCESS_COMMAND = write_package_manifest; license_create_manifest; cve_check_write_rootfs_manifest; ssh_allow_empty_password; ssh_allow_root_login; postinst_enable_logging; rootfs_update_timestamp; write_image_test_data; empty_var_volatile; sort_passwd; rootfs_reproducible;
- IMAGE_POSTPROCESS_COMMAND = buildhistory_get_imageinfo ;
- IMAGESIZE = 9265
-
-Other than ``IMAGESIZE``,
-which is the total size of the files in the image in Kbytes, the
-name-value pairs are variables that may have influenced the content of
-the image. This information is often useful when you are trying to
-determine why a change in the package or file listings has occurred.
-
-Using Build History to Gather Image Information Only
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-As you can see, build history produces image information, including
-dependency graphs, so you can see why something was pulled into the
-image. If you are just interested in this information and not interested
-in collecting specific package or SDK information, you can enable
-writing only image information without any history by adding the
-following to your ``conf/local.conf`` file found in the
-:term:`Build Directory`::
-
- INHERIT += "buildhistory"
- BUILDHISTORY_COMMIT = "0"
- BUILDHISTORY_FEATURES = "image"
-
-Here, you set the
-:term:`BUILDHISTORY_FEATURES`
-variable to use the image feature only.
-
-Build History SDK Information
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Build history collects similar information on the contents of SDKs (e.g.
-``bitbake -c populate_sdk imagename``) as compared to information it
-collects for images. Furthermore, this information differs depending on
-whether an extensible or standard SDK is being produced.
-
-The following list shows the files produced for SDKs:
-
-- ``files-in-sdk.txt:`` A list of files in the SDK with permissions,
- owner, group, size, and symlink information. This list includes both
- the host and target parts of the SDK.
-
-- ``sdk-info.txt:`` A text file containing name-value pairs with
- information about the SDK. See the following listing example for more
- information.
-
-- ``sstate-task-sizes.txt:`` A text file containing name-value pairs
- with information about task group sizes (e.g. ``do_populate_sysroot``
- tasks have a total size). The ``sstate-task-sizes.txt`` file exists
- only when an extensible SDK is created.
-
-- ``sstate-package-sizes.txt:`` A text file containing name-value pairs
- with information for the shared-state packages and sizes in the SDK.
- The ``sstate-package-sizes.txt`` file exists only when an extensible
- SDK is created.
-
-- ``sdk-files:`` A folder that contains copies of the files mentioned
- in ``BUILDHISTORY_SDK_FILES`` if the files are present in the output.
- Additionally, the default value of ``BUILDHISTORY_SDK_FILES`` is
- specific to the extensible SDK although you can set it differently if
- you would like to pull in specific files from the standard SDK.
-
- The default files are ``conf/local.conf``, ``conf/bblayers.conf``,
- ``conf/auto.conf``, ``conf/locked-sigs.inc``, and
- ``conf/devtool.conf``. Thus, for an extensible SDK, these files get
- copied into the ``sdk-files`` directory.
-
-- The following information appears under each of the ``host`` and
- ``target`` directories for the portions of the SDK that run on the
- host and on the target, respectively:
-
- .. note::
-
- The following files for the most part are empty when producing an
- extensible SDK because this type of SDK is not constructed from
- packages as is the standard SDK.
-
- - ``depends.dot:`` Dependency graph for the SDK that is compatible
- with ``graphviz``.
-
- - ``installed-package-names.txt:`` A list of installed packages by
- name only.
-
- - ``installed-package-sizes.txt:`` A list of installed packages
- ordered by size.
-
- - ``installed-packages.txt:`` A list of installed packages with full
- package filenames.
-
-Here is an example of ``sdk-info.txt``:
-
-.. code-block:: none
-
- DISTRO = poky
- DISTRO_VERSION = 1.3+snapshot-20130327
- SDK_NAME = poky-glibc-i686-arm
- SDK_VERSION = 1.3+snapshot
- SDKMACHINE =
- SDKIMAGE_FEATURES = dev-pkgs dbg-pkgs
- BAD_RECOMMENDATIONS =
- SDKSIZE = 352712
-
-Other than ``SDKSIZE``, which is
-the total size of the files in the SDK in Kbytes, the name-value pairs
-are variables that might have influenced the content of the SDK. This
-information is often useful when you are trying to determine why a
-change in the package or file listings has occurred.
-
-Examining Build History Information
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can examine build history output from the command line or from a web
-interface.
-
-To see any changes that have occurred (assuming you have
-:term:`BUILDHISTORY_COMMIT` = "1"),
-you can simply use any Git command that allows you to view the history
-of a repository. Here is one method::
-
- $ git log -p
-
-You need to realize,
-however, that this method does show changes that are not significant
-(e.g. a package's size changing by a few bytes).
-
-There is a command-line tool called ``buildhistory-diff``, though,
-that queries the Git repository and prints just the differences that
-might be significant in human-readable form. Here is an example::
-
- $ poky/poky/scripts/buildhistory-diff . HEAD^
- Changes to images/qemux86_64/glibc/core-image-minimal (files-in-image.txt):
- /etc/anotherpkg.conf was added
- /sbin/anotherpkg was added
- * (installed-package-names.txt):
- * anotherpkg was added
- Changes to images/qemux86_64/glibc/core-image-minimal (installed-package-names.txt):
- anotherpkg was added
- packages/qemux86_64-poky-linux/v86d: PACKAGES: added "v86d-extras"
- * PR changed from "r0" to "r1"
- * PV changed from "0.1.10" to "0.1.12"
- packages/qemux86_64-poky-linux/v86d/v86d: PKGSIZE changed from 110579 to 144381 (+30%)
- * PR changed from "r0" to "r1"
- * PV changed from "0.1.10" to "0.1.12"
-
-.. note::
-
- The ``buildhistory-diff`` tool requires the ``GitPython``
- package. Be sure to install it using Pip3 as follows::
-
- $ pip3 install GitPython --user
-
-
- Alternatively, you can install ``python3-git`` using the appropriate
- distribution package manager (e.g. ``apt``, ``dnf``, or ``zipper``).
-
-To see changes to the build history using a web interface, follow the
-instruction in the ``README`` file
-:yocto_git:`here </buildhistory-web/>`.
-
-Here is a sample screenshot of the interface:
-
-.. image:: figures/buildhistory-web.png
- :width: 100%
-
-Performing Automated Runtime Testing
-====================================
-
-The OpenEmbedded build system makes available a series of automated
-tests for images to verify runtime functionality. You can run these
-tests on either QEMU or actual target hardware. Tests are written in
-Python making use of the ``unittest`` module, and the majority of them
-run commands on the target system over SSH. This section describes how
-you set up the environment to use these tests, run available tests, and
-write and add your own tests.
-
-For information on the test and QA infrastructure available within the
-Yocto Project, see the ":ref:`ref-manual/release-process:testing and quality assurance`"
-section in the Yocto Project Reference Manual.
-
-Enabling Tests
---------------
-
-Depending on whether you are planning to run tests using QEMU or on the
-hardware, you have to take different steps to enable the tests. See the
-following subsections for information on how to enable both types of
-tests.
-
-Enabling Runtime Tests on QEMU
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In order to run tests, you need to do the following:
-
-- *Set up to avoid interaction with sudo for networking:* To
- accomplish this, you must do one of the following:
-
- - Add ``NOPASSWD`` for your user in ``/etc/sudoers`` either for all
- commands or just for ``runqemu-ifup``. You must provide the full
- path as that can change if you are using multiple clones of the
- source repository.
-
- .. note::
-
- On some distributions, you also need to comment out "Defaults
- requiretty" in ``/etc/sudoers``.
-
- - Manually configure a tap interface for your system.
-
- - Run as root the script in ``scripts/runqemu-gen-tapdevs``, which
- should generate a list of tap devices. This is the option
- typically chosen for Autobuilder-type environments.
-
- .. note::
-
- - Be sure to use an absolute path when calling this script
- with sudo.
-
- - The package recipe ``qemu-helper-native`` is required to run
- this script. Build the package using the following command::
-
- $ bitbake qemu-helper-native
-
-- *Set the DISPLAY variable:* You need to set this variable so that
- you have an X server available (e.g. start ``vncserver`` for a
- headless machine).
-
-- *Be sure your host's firewall accepts incoming connections from
- 192.168.7.0/24:* Some of the tests (in particular DNF tests) start an
- HTTP server on a random high number port, which is used to serve
- files to the target. The DNF module serves
- ``${WORKDIR}/oe-rootfs-repo`` so it can run DNF channel commands.
- That means your host's firewall must accept incoming connections from
- 192.168.7.0/24, which is the default IP range used for tap devices by
- ``runqemu``.
-
-- *Be sure your host has the correct packages installed:* Depending
- your host's distribution, you need to have the following packages
- installed:
-
- - Ubuntu and Debian: ``sysstat`` and ``iproute2``
-
- - openSUSE: ``sysstat`` and ``iproute2``
-
- - Fedora: ``sysstat`` and ``iproute``
-
- - CentOS: ``sysstat`` and ``iproute``
-
-Once you start running the tests, the following happens:
-
-1. A copy of the root filesystem is written to ``${WORKDIR}/testimage``.
-
-2. The image is booted under QEMU using the standard ``runqemu`` script.
-
-3. A default timeout of 500 seconds occurs to allow for the boot process
- to reach the login prompt. You can change the timeout period by
- setting
- :term:`TEST_QEMUBOOT_TIMEOUT`
- in the ``local.conf`` file.
-
-4. Once the boot process is reached and the login prompt appears, the
- tests run. The full boot log is written to
- ``${WORKDIR}/testimage/qemu_boot_log``.
-
-5. Each test module loads in the order found in :term:`TEST_SUITES`. You can
- find the full output of the commands run over SSH in
- ``${WORKDIR}/testimgage/ssh_target_log``.
-
-6. If no failures occur, the task running the tests ends successfully.
- You can find the output from the ``unittest`` in the task log at
- ``${WORKDIR}/temp/log.do_testimage``.
-
-Enabling Runtime Tests on Hardware
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The OpenEmbedded build system can run tests on real hardware, and for
-certain devices it can also deploy the image to be tested onto the
-device beforehand.
-
-For automated deployment, a "controller image" is installed onto the
-hardware once as part of setup. Then, each time tests are to be run, the
-following occurs:
-
-1. The controller image is booted into and used to write the image to be
- tested to a second partition.
-
-2. The device is then rebooted using an external script that you need to
- provide.
-
-3. The device boots into the image to be tested.
-
-When running tests (independent of whether the image has been deployed
-automatically or not), the device is expected to be connected to a
-network on a pre-determined IP address. You can either use static IP
-addresses written into the image, or set the image to use DHCP and have
-your DHCP server on the test network assign a known IP address based on
-the MAC address of the device.
-
-In order to run tests on hardware, you need to set :term:`TEST_TARGET` to an
-appropriate value. For QEMU, you do not have to change anything, the
-default value is "qemu". For running tests on hardware, the following
-options are available:
-
-- *"simpleremote":* Choose "simpleremote" if you are going to run tests
- on a target system that is already running the image to be tested and
- is available on the network. You can use "simpleremote" in
- conjunction with either real hardware or an image running within a
- separately started QEMU or any other virtual machine manager.
-
-- *"SystemdbootTarget":* Choose "SystemdbootTarget" if your hardware is
- an EFI-based machine with ``systemd-boot`` as bootloader and
- ``core-image-testmaster`` (or something similar) is installed. Also,
- your hardware under test must be in a DHCP-enabled network that gives
- it the same IP address for each reboot.
-
- If you choose "SystemdbootTarget", there are additional requirements
- and considerations. See the
- ":ref:`dev-manual/common-tasks:selecting systemdboottarget`" section, which
- follows, for more information.
-
-- *"BeagleBoneTarget":* Choose "BeagleBoneTarget" if you are deploying
- images and running tests on the BeagleBone "Black" or original
- "White" hardware. For information on how to use these tests, see the
- comments at the top of the BeagleBoneTarget
- ``meta-yocto-bsp/lib/oeqa/controllers/beaglebonetarget.py`` file.
-
-- *"EdgeRouterTarget":* Choose "EdgeRouterTarget" if you are deploying
- images and running tests on the Ubiquiti Networks EdgeRouter Lite.
- For information on how to use these tests, see the comments at the
- top of the EdgeRouterTarget
- ``meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py`` file.
-
-- *"GrubTarget":* Choose "GrubTarget" if you are deploying images and running
- tests on any generic PC that boots using GRUB. For information on how
- to use these tests, see the comments at the top of the GrubTarget
- ``meta-yocto-bsp/lib/oeqa/controllers/grubtarget.py`` file.
-
-- *"your-target":* Create your own custom target if you want to run
- tests when you are deploying images and running tests on a custom
- machine within your BSP layer. To do this, you need to add a Python
- unit that defines the target class under ``lib/oeqa/controllers/``
- within your layer. You must also provide an empty ``__init__.py``.
- For examples, see files in ``meta-yocto-bsp/lib/oeqa/controllers/``.
-
-Selecting SystemdbootTarget
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you did not set :term:`TEST_TARGET` to "SystemdbootTarget", then you do
-not need any information in this section. You can skip down to the
-":ref:`dev-manual/common-tasks:running tests`" section.
-
-If you did set :term:`TEST_TARGET` to "SystemdbootTarget", you also need to
-perform a one-time setup of your controller image by doing the following:
-
-1. *Set EFI_PROVIDER:* Be sure that :term:`EFI_PROVIDER` is as follows::
-
- EFI_PROVIDER = "systemd-boot"
-
-2. *Build the controller image:* Build the ``core-image-testmaster`` image.
- The ``core-image-testmaster`` recipe is provided as an example for a
- "controller" image and you can customize the image recipe as you would
- any other recipe.
-
- Here are the image recipe requirements:
-
- - Inherits ``core-image`` so that kernel modules are installed.
-
- - Installs normal linux utilities not BusyBox ones (e.g. ``bash``,
- ``coreutils``, ``tar``, ``gzip``, and ``kmod``).
-
- - Uses a custom Initial RAM Disk (initramfs) image with a custom
- installer. A normal image that you can install usually creates a
- single root filesystem partition. This image uses another installer that
- creates a specific partition layout. Not all Board Support
- Packages (BSPs) can use an installer. For such cases, you need to
- manually create the following partition layout on the target:
-
- - First partition mounted under ``/boot``, labeled "boot".
-
- - The main root filesystem partition where this image gets installed,
- which is mounted under ``/``.
-
- - Another partition labeled "testrootfs" where test images get
- deployed.
-
-3. *Install image:* Install the image that you just built on the target
- system.
-
-The final thing you need to do when setting :term:`TEST_TARGET` to
-"SystemdbootTarget" is to set up the test image:
-
-1. *Set up your local.conf file:* Make sure you have the following
- statements in your ``local.conf`` file::
-
- IMAGE_FSTYPES += "tar.gz"
- INHERIT += "testimage"
- TEST_TARGET = "SystemdbootTarget"
- TEST_TARGET_IP = "192.168.2.3"
-
-2. *Build your test image:* Use BitBake to build the image::
-
- $ bitbake core-image-sato
-
-Power Control
-~~~~~~~~~~~~~
-
-For most hardware targets other than "simpleremote", you can control
-power:
-
-- You can use :term:`TEST_POWERCONTROL_CMD` together with
- :term:`TEST_POWERCONTROL_EXTRA_ARGS` as a command that runs on the host
- and does power cycling. The test code passes one argument to that
- command: off, on or cycle (off then on). Here is an example that
- could appear in your ``local.conf`` file::
-
- TEST_POWERCONTROL_CMD = "powercontrol.exp test 10.11.12.1 nuc1"
-
- In this example, the expect
- script does the following:
-
- .. code-block:: shell
-
- ssh test@10.11.12.1 "pyctl nuc1 arg"
-
- It then runs a Python script that controls power for a label called
- ``nuc1``.
-
- .. note::
-
- You need to customize :term:`TEST_POWERCONTROL_CMD` and
- :term:`TEST_POWERCONTROL_EXTRA_ARGS` for your own setup. The one requirement
- is that it accepts "on", "off", and "cycle" as the last argument.
-
-- When no command is defined, it connects to the device over SSH and
- uses the classic reboot command to reboot the device. Classic reboot
- is fine as long as the machine actually reboots (i.e. the SSH test
- has not failed). It is useful for scenarios where you have a simple
- setup, typically with a single board, and where some manual
- interaction is okay from time to time.
-
-If you have no hardware to automatically perform power control but still
-wish to experiment with automated hardware testing, you can use the
-``dialog-power-control`` script that shows a dialog prompting you to perform
-the required power action. This script requires either KDialog or Zenity
-to be installed. To use this script, set the
-:term:`TEST_POWERCONTROL_CMD`
-variable as follows::
-
- TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
-
-Serial Console Connection
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-For test target classes requiring a serial console to interact with the
-bootloader (e.g. BeagleBoneTarget, EdgeRouterTarget, and GrubTarget),
-you need to specify a command to use to connect to the serial console of
-the target machine by using the
-:term:`TEST_SERIALCONTROL_CMD`
-variable and optionally the
-:term:`TEST_SERIALCONTROL_EXTRA_ARGS`
-variable.
-
-These cases could be a serial terminal program if the machine is
-connected to a local serial port, or a ``telnet`` or ``ssh`` command
-connecting to a remote console server. Regardless of the case, the
-command simply needs to connect to the serial console and forward that
-connection to standard input and output as any normal terminal program
-does. For example, to use the picocom terminal program on serial device
-``/dev/ttyUSB0`` at 115200bps, you would set the variable as follows::
-
- TEST_SERIALCONTROL_CMD = "picocom /dev/ttyUSB0 -b 115200"
-
-For local
-devices where the serial port device disappears when the device reboots,
-an additional "serdevtry" wrapper script is provided. To use this
-wrapper, simply prefix the terminal command with
-``${COREBASE}/scripts/contrib/serdevtry``::
-
- TEST_SERIALCONTROL_CMD = "${COREBASE}/scripts/contrib/serdevtry picocom -b 115200 /dev/ttyUSB0"
-
-Running Tests
--------------
-
-You can start the tests automatically or manually:
-
-- *Automatically running tests:* To run the tests automatically after
- the OpenEmbedded build system successfully creates an image, first
- set the
- :term:`TESTIMAGE_AUTO`
- variable to "1" in your ``local.conf`` file in the
- :term:`Build Directory`::
-
- TESTIMAGE_AUTO = "1"
-
- Next, build your image. If the image successfully builds, the
- tests run::
-
- bitbake core-image-sato
-
-- *Manually running tests:* To manually run the tests, first globally
- inherit the
- :ref:`testimage <ref-classes-testimage*>` class
- by editing your ``local.conf`` file::
-
- INHERIT += "testimage"
-
- Next, use BitBake to run the tests::
-
- bitbake -c testimage image
-
-All test files reside in ``meta/lib/oeqa/runtime`` in the
-:term:`Source Directory`. A test name maps
-directly to a Python module. Each test module may contain a number of
-individual tests. Tests are usually grouped together by the area tested
-(e.g tests for systemd reside in ``meta/lib/oeqa/runtime/systemd.py``).
-
-You can add tests to any layer provided you place them in the proper
-area and you extend :term:`BBPATH` in
-the ``local.conf`` file as normal. Be sure that tests reside in
-``layer/lib/oeqa/runtime``.
-
-.. note::
-
- Be sure that module names do not collide with module names used in
- the default set of test modules in ``meta/lib/oeqa/runtime``.
-
-You can change the set of tests run by appending or overriding
-:term:`TEST_SUITES` variable in
-``local.conf``. Each name in :term:`TEST_SUITES` represents a required test
-for the image. Test modules named within :term:`TEST_SUITES` cannot be
-skipped even if a test is not suitable for an image (e.g. running the
-RPM tests on an image without ``rpm``). Appending "auto" to
-:term:`TEST_SUITES` causes the build system to try to run all tests that are
-suitable for the image (i.e. each test module may elect to skip itself).
-
-The order you list tests in :term:`TEST_SUITES` is important and influences
-test dependencies. Consequently, tests that depend on other tests should
-be added after the test on which they depend. For example, since the
-``ssh`` test depends on the ``ping`` test, "ssh" needs to come after
-"ping" in the list. The test class provides no re-ordering or dependency
-handling.
-
-.. note::
-
- Each module can have multiple classes with multiple test methods.
- And, Python ``unittest`` rules apply.
-
-Here are some things to keep in mind when running tests:
-
-- The default tests for the image are defined as::
-
- DEFAULT_TEST_SUITES:pn-image = "ping ssh df connman syslog xorg scp vnc date rpm dnf dmesg"
-
-- Add your own test to the list of the by using the following::
-
- TEST_SUITES:append = " mytest"
-
-- Run a specific list of tests as follows::
-
- TEST_SUITES = "test1 test2 test3"
-
- Remember, order is important. Be sure to place a test that is
- dependent on another test later in the order.
-
-Exporting Tests
----------------
-
-You can export tests so that they can run independently of the build
-system. Exporting tests is required if you want to be able to hand the
-test execution off to a scheduler. You can only export tests that are
-defined in :term:`TEST_SUITES`.
-
-If your image is already built, make sure the following are set in your
-``local.conf`` file::
-
- INHERIT += "testexport"
- TEST_TARGET_IP = "IP-address-for-the-test-target"
- TEST_SERVER_IP = "IP-address-for-the-test-server"
-
-You can then export the tests with the
-following BitBake command form::
-
- $ bitbake image -c testexport
-
-Exporting the tests places them in the
-:term:`Build Directory` in
-``tmp/testexport/``\ image, which is controlled by the
-:term:`TEST_EXPORT_DIR` variable.
-
-You can now run the tests outside of the build environment::
-
- $ cd tmp/testexport/image
- $ ./runexported.py testdata.json
-
-Here is a complete example that shows IP addresses and uses the
-``core-image-sato`` image::
-
- INHERIT += "testexport"
- TEST_TARGET_IP = "192.168.7.2"
- TEST_SERVER_IP = "192.168.7.1"
-
-Use BitBake to export the tests::
-
- $ bitbake core-image-sato -c testexport
-
-Run the tests outside of
-the build environment using the following::
-
- $ cd tmp/testexport/core-image-sato
- $ ./runexported.py testdata.json
-
-Writing New Tests
------------------
-
-As mentioned previously, all new test files need to be in the proper
-place for the build system to find them. New tests for additional
-functionality outside of the core should be added to the layer that adds
-the functionality, in ``layer/lib/oeqa/runtime`` (as long as
-:term:`BBPATH` is extended in the
-layer's ``layer.conf`` file as normal). Just remember the following:
-
-- Filenames need to map directly to test (module) names.
-
-- Do not use module names that collide with existing core tests.
-
-- Minimally, an empty ``__init__.py`` file must be present in the runtime
- directory.
-
-To create a new test, start by copying an existing module (e.g.
-``syslog.py`` or ``gcc.py`` are good ones to use). Test modules can use
-code from ``meta/lib/oeqa/utils``, which are helper classes.
-
-.. note::
-
- Structure shell commands such that you rely on them and they return a
- single code for success. Be aware that sometimes you will need to
- parse the output. See the ``df.py`` and ``date.py`` modules for examples.
-
-You will notice that all test classes inherit ``oeRuntimeTest``, which
-is found in ``meta/lib/oetest.py``. This base class offers some helper
-attributes, which are described in the following sections:
-
-Class Methods
-~~~~~~~~~~~~~
-
-Class methods are as follows:
-
-- *hasPackage(pkg):* Returns "True" if ``pkg`` is in the installed
- package list of the image, which is based on the manifest file that
- is generated during the ``do_rootfs`` task.
-
-- *hasFeature(feature):* Returns "True" if the feature is in
- :term:`IMAGE_FEATURES` or
- :term:`DISTRO_FEATURES`.
-
-Class Attributes
-~~~~~~~~~~~~~~~~
-
-Class attributes are as follows:
-
-- *pscmd:* Equals "ps -ef" if ``procps`` is installed in the image.
- Otherwise, ``pscmd`` equals "ps" (busybox).
-
-- *tc:* The called test context, which gives access to the
- following attributes:
-
- - *d:* The BitBake datastore, which allows you to use stuff such
- as ``oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager")``.
-
- - *testslist and testsrequired:* Used internally. The tests
- do not need these.
-
- - *filesdir:* The absolute path to
- ``meta/lib/oeqa/runtime/files``, which contains helper files for
- tests meant for copying on the target such as small files written
- in C for compilation.
-
- - *target:* The target controller object used to deploy and
- start an image on a particular target (e.g. Qemu, SimpleRemote,
- and SystemdbootTarget). Tests usually use the following:
-
- - *ip:* The target's IP address.
-
- - *server_ip:* The host's IP address, which is usually used
- by the DNF test suite.
-
- - *run(cmd, timeout=None):* The single, most used method.
- This command is a wrapper for: ``ssh root@host "cmd"``. The
- command returns a tuple: (status, output), which are what their
- names imply - the return code of "cmd" and whatever output it
- produces. The optional timeout argument represents the number
- of seconds the test should wait for "cmd" to return. If the
- argument is "None", the test uses the default instance's
- timeout period, which is 300 seconds. If the argument is "0",
- the test runs until the command returns.
-
- - *copy_to(localpath, remotepath):*
- ``scp localpath root@ip:remotepath``.
-
- - *copy_from(remotepath, localpath):*
- ``scp root@host:remotepath localpath``.
-
-Instance Attributes
-~~~~~~~~~~~~~~~~~~~
-
-There is a single instance attribute, which is ``target``. The ``target``
-instance attribute is identical to the class attribute of the same name,
-which is described in the previous section. This attribute exists as
-both an instance and class attribute so tests can use
-``self.target.run(cmd)`` in instance methods instead of
-``oeRuntimeTest.tc.target.run(cmd)``.
-
-Installing Packages in the DUT Without the Package Manager
-----------------------------------------------------------
-
-When a test requires a package built by BitBake, it is possible to
-install that package. Installing the package does not require a package
-manager be installed in the device under test (DUT). It does, however,
-require an SSH connection and the target must be using the
-``sshcontrol`` class.
-
-.. note::
-
- This method uses ``scp`` to copy files from the host to the target, which
- causes permissions and special attributes to be lost.
-
-A JSON file is used to define the packages needed by a test. This file
-must be in the same path as the file used to define the tests.
-Furthermore, the filename must map directly to the test module name with
-a ``.json`` extension.
-
-The JSON file must include an object with the test name as keys of an
-object or an array. This object (or array of objects) uses the following
-data:
-
-- "pkg" - A mandatory string that is the name of the package to be
- installed.
-
-- "rm" - An optional boolean, which defaults to "false", that specifies
- to remove the package after the test.
-
-- "extract" - An optional boolean, which defaults to "false", that
- specifies if the package must be extracted from the package format.
- When set to "true", the package is not automatically installed into
- the DUT.
-
-Following is an example JSON file that handles test "foo" installing
-package "bar" and test "foobar" installing packages "foo" and "bar".
-Once the test is complete, the packages are removed from the DUT.
-::
-
- {
- "foo": {
- "pkg": "bar"
- },
- "foobar": [
- {
- "pkg": "foo",
- "rm": true
- },
- {
- "pkg": "bar",
- "rm": true
- }
- ]
- }
-
-Debugging Tools and Techniques
-==============================
-
-The exact method for debugging build failures depends on the nature of
-the problem and on the system's area from which the bug originates.
-Standard debugging practices such as comparison against the last known
-working version with examination of the changes and the re-application
-of steps to identify the one causing the problem are valid for the Yocto
-Project just as they are for any other system. Even though it is
-impossible to detail every possible potential failure, this section
-provides some general tips to aid in debugging given a variety of
-situations.
-
-.. note::
-
- A useful feature for debugging is the error reporting tool.
- Configuring the Yocto Project to use this tool causes the
- OpenEmbedded build system to produce error reporting commands as part
- of the console output. You can enter the commands after the build
- completes to log error information into a common database, that can
- help you figure out what might be going wrong. For information on how
- to enable and use this feature, see the
- ":ref:`dev-manual/common-tasks:using the error reporting tool`"
- section.
-
-The following list shows the debugging topics in the remainder of this
-section:
-
-- ":ref:`dev-manual/common-tasks:viewing logs from failed tasks`" describes
- how to find and view logs from tasks that failed during the build
- process.
-
-- ":ref:`dev-manual/common-tasks:viewing variable values`" describes how to
- use the BitBake ``-e`` option to examine variable values after a
- recipe has been parsed.
-
-- ":ref:`dev-manual/common-tasks:viewing package information with \`\`oe-pkgdata-util\`\``"
- describes how to use the ``oe-pkgdata-util`` utility to query
- :term:`PKGDATA_DIR` and
- display package-related information for built packages.
-
-- ":ref:`dev-manual/common-tasks:viewing dependencies between recipes and tasks`"
- describes how to use the BitBake ``-g`` option to display recipe
- dependency information used during the build.
-
-- ":ref:`dev-manual/common-tasks:viewing task variable dependencies`" describes
- how to use the ``bitbake-dumpsig`` command in conjunction with key
- subdirectories in the
- :term:`Build Directory` to determine
- variable dependencies.
-
-- ":ref:`dev-manual/common-tasks:running specific tasks`" describes
- how to use several BitBake options (e.g. ``-c``, ``-C``, and ``-f``)
- to run specific tasks in the build chain. It can be useful to run
- tasks "out-of-order" when trying isolate build issues.
-
-- ":ref:`dev-manual/common-tasks:general BitBake problems`" describes how
- to use BitBake's ``-D`` debug output option to reveal more about what
- BitBake is doing during the build.
-
-- ":ref:`dev-manual/common-tasks:building with no dependencies`"
- describes how to use the BitBake ``-b`` option to build a recipe
- while ignoring dependencies.
-
-- ":ref:`dev-manual/common-tasks:recipe logging mechanisms`"
- describes how to use the many recipe logging functions to produce
- debugging output and report errors and warnings.
-
-- ":ref:`dev-manual/common-tasks:debugging parallel make races`"
- describes how to debug situations where the build consists of several
- parts that are run simultaneously and when the output or result of
- one part is not ready for use with a different part of the build that
- depends on that output.
-
-- ":ref:`dev-manual/common-tasks:debugging with the gnu project debugger (gdb) remotely`"
- describes how to use GDB to allow you to examine running programs, which can
- help you fix problems.
-
-- ":ref:`dev-manual/common-tasks:debugging with the gnu project debugger (gdb) on the target`"
- describes how to use GDB directly on target hardware for debugging.
-
-- ":ref:`dev-manual/common-tasks:other debugging tips`" describes
- miscellaneous debugging tips that can be useful.
-
-Viewing Logs from Failed Tasks
-------------------------------
-
-You can find the log for a task in the file
-``${``\ :term:`WORKDIR`\ ``}/temp/log.do_``\ `taskname`.
-For example, the log for the
-:ref:`ref-tasks-compile` task of the
-QEMU minimal image for the x86 machine (``qemux86``) might be in
-``tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/temp/log.do_compile``.
-To see the commands :term:`BitBake` ran
-to generate a log, look at the corresponding ``run.do_``\ `taskname` file
-in the same directory.
-
-``log.do_``\ `taskname` and ``run.do_``\ `taskname` are actually symbolic
-links to ``log.do_``\ `taskname`\ ``.``\ `pid` and
-``log.run_``\ `taskname`\ ``.``\ `pid`, where `pid` is the PID the task had
-when it ran. The symlinks always point to the files corresponding to the
-most recent run.
-
-Viewing Variable Values
------------------------
-
-Sometimes you need to know the value of a variable as a result of
-BitBake's parsing step. This could be because some unexpected behavior
-occurred in your project. Perhaps an attempt to :ref:`modify a variable
-<bitbake:bitbake-user-manual/bitbake-user-manual-metadata:modifying existing
-variables>` did not work out as expected.
-
-BitBake's ``-e`` option is used to display variable values after
-parsing. The following command displays the variable values after the
-configuration files (i.e. ``local.conf``, ``bblayers.conf``,
-``bitbake.conf`` and so forth) have been parsed::
-
- $ bitbake -e
-
-The following command displays variable values after a specific recipe has
-been parsed. The variables include those from the configuration as well::
-
- $ bitbake -e recipename
-
-.. note::
-
- Each recipe has its own private set of variables (datastore).
- Internally, after parsing the configuration, a copy of the resulting
- datastore is made prior to parsing each recipe. This copying implies
- that variables set in one recipe will not be visible to other
- recipes.
-
- Likewise, each task within a recipe gets a private datastore based on
- the recipe datastore, which means that variables set within one task
- will not be visible to other tasks.
-
-In the output of ``bitbake -e``, each variable is preceded by a
-description of how the variable got its value, including temporary
-values that were later overridden. This description also includes
-variable flags (varflags) set on the variable. The output can be very
-helpful during debugging.
-
-Variables that are exported to the environment are preceded by
-``export`` in the output of ``bitbake -e``. See the following example::
-
- export CC="i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/ulf/poky/build/tmp/sysroots/qemux86"
-
-In addition to variable values, the output of the ``bitbake -e`` and
-``bitbake -e`` recipe commands includes the following information:
-
-- The output starts with a tree listing all configuration files and
- classes included globally, recursively listing the files they include
- or inherit in turn. Much of the behavior of the OpenEmbedded build
- system (including the behavior of the :ref:`ref-manual/tasks:normal recipe build tasks`) is
- implemented in the
- :ref:`base <ref-classes-base>` class and the
- classes it inherits, rather than being built into BitBake itself.
-
-- After the variable values, all functions appear in the output. For
- shell functions, variables referenced within the function body are
- expanded. If a function has been modified using overrides or using
- override-style operators like ``:append`` and ``:prepend``, then the
- final assembled function body appears in the output.
-
-Viewing Package Information with ``oe-pkgdata-util``
-----------------------------------------------------
-
-You can use the ``oe-pkgdata-util`` command-line utility to query
-:term:`PKGDATA_DIR` and display
-various package-related information. When you use the utility, you must
-use it to view information on packages that have already been built.
-
-Following are a few of the available ``oe-pkgdata-util`` subcommands.
-
-.. note::
-
- You can use the standard \* and ? globbing wildcards as part of
- package names and paths.
-
-- ``oe-pkgdata-util list-pkgs [pattern]``: Lists all packages
- that have been built, optionally limiting the match to packages that
- match pattern.
-
-- ``oe-pkgdata-util list-pkg-files package ...``: Lists the
- files and directories contained in the given packages.
-
- .. note::
-
- A different way to view the contents of a package is to look at
- the
- ``${``\ :term:`WORKDIR`\ ``}/packages-split``
- directory of the recipe that generates the package. This directory
- is created by the
- :ref:`ref-tasks-package` task
- and has one subdirectory for each package the recipe generates,
- which contains the files stored in that package.
-
- If you want to inspect the ``${WORKDIR}/packages-split``
- directory, make sure that
- :ref:`rm_work <ref-classes-rm-work>` is not
- enabled when you build the recipe.
-
-- ``oe-pkgdata-util find-path path ...``: Lists the names of
- the packages that contain the given paths. For example, the following
- tells us that ``/usr/share/man/man1/make.1`` is contained in the
- ``make-doc`` package::
-
- $ oe-pkgdata-util find-path /usr/share/man/man1/make.1
- make-doc: /usr/share/man/man1/make.1
-
-- ``oe-pkgdata-util lookup-recipe package ...``: Lists the name
- of the recipes that produce the given packages.
-
-For more information on the ``oe-pkgdata-util`` command, use the help
-facility::
-
- $ oe-pkgdata-util --help
- $ oe-pkgdata-util subcommand --help
-
-Viewing Dependencies Between Recipes and Tasks
-----------------------------------------------
-
-Sometimes it can be hard to see why BitBake wants to build other recipes
-before the one you have specified. Dependency information can help you
-understand why a recipe is built.
-
-To generate dependency information for a recipe, run the following
-command::
-
- $ bitbake -g recipename
-
-This command writes the following files in the current directory:
-
-- ``pn-buildlist``: A list of recipes/targets involved in building
- `recipename`. "Involved" here means that at least one task from the
- recipe needs to run when building `recipename` from scratch. Targets
- that are in
- :term:`ASSUME_PROVIDED`
- are not listed.
-
-- ``task-depends.dot``: A graph showing dependencies between tasks.
-
-The graphs are in
-`DOT <https://en.wikipedia.org/wiki/DOT_%28graph_description_language%29>`__
-format and can be converted to images (e.g. using the ``dot`` tool from
-`Graphviz <https://www.graphviz.org/>`__).
-
-.. note::
-
- - DOT files use a plain text format. The graphs generated using the
- ``bitbake -g`` command are often so large as to be difficult to
- read without special pruning (e.g. with BitBake's ``-I`` option)
- and processing. Despite the form and size of the graphs, the
- corresponding ``.dot`` files can still be possible to read and
- provide useful information.
-
- As an example, the ``task-depends.dot`` file contains lines such
- as the following::
-
- "libxslt.do_configure" -> "libxml2.do_populate_sysroot"
-
- The above example line reveals that the
- :ref:`ref-tasks-configure`
- task in ``libxslt`` depends on the
- :ref:`ref-tasks-populate_sysroot`
- task in ``libxml2``, which is a normal
- :term:`DEPENDS` dependency
- between the two recipes.
-
- - For an example of how ``.dot`` files can be processed, see the
- ``scripts/contrib/graph-tool`` Python script, which finds and
- displays paths between graph nodes.
-
-You can use a different method to view dependency information by using
-the following command::
-
- $ bitbake -g -u taskexp recipename
-
-This command
-displays a GUI window from which you can view build-time and runtime
-dependencies for the recipes involved in building recipename.
-
-Viewing Task Variable Dependencies
-----------------------------------
-
-As mentioned in the
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:checksums (signatures)`" section of the BitBake
-User Manual, BitBake tries to automatically determine what variables a
-task depends on so that it can rerun the task if any values of the
-variables change. This determination is usually reliable. However, if
-you do things like construct variable names at runtime, then you might
-have to manually declare dependencies on those variables using
-``vardeps`` as described in the
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags`" section of the BitBake
-User Manual.
-
-If you are unsure whether a variable dependency is being picked up
-automatically for a given task, you can list the variable dependencies
-BitBake has determined by doing the following:
-
-1. Build the recipe containing the task::
-
- $ bitbake recipename
-
-2. Inside the :term:`STAMPS_DIR`
- directory, find the signature data (``sigdata``) file that
- corresponds to the task. The ``sigdata`` files contain a pickled
- Python database of all the metadata that went into creating the input
- checksum for the task. As an example, for the
- :ref:`ref-tasks-fetch` task of the
- ``db`` recipe, the ``sigdata`` file might be found in the following
- location::
-
- ${BUILDDIR}/tmp/stamps/i586-poky-linux/db/6.0.30-r1.do_fetch.sigdata.7c048c18222b16ff0bcee2000ef648b1
-
- For tasks that are accelerated through the shared state
- (:ref:`sstate <overview-manual/concepts:shared state cache>`) cache, an
- additional ``siginfo`` file is written into
- :term:`SSTATE_DIR` along with
- the cached task output. The ``siginfo`` files contain exactly the
- same information as ``sigdata`` files.
-
-3. Run ``bitbake-dumpsig`` on the ``sigdata`` or ``siginfo`` file. Here
- is an example::
-
- $ bitbake-dumpsig ${BUILDDIR}/tmp/stamps/i586-poky-linux/db/6.0.30-r1.do_fetch.sigdata.7c048c18222b16ff0bcee2000ef648b1
-
- In the output of the above command, you will find a line like the
- following, which lists all the (inferred) variable dependencies for
- the task. This list also includes indirect dependencies from
- variables depending on other variables, recursively.
- ::
-
- Task dependencies: ['PV', 'SRCREV', 'SRC_URI', 'SRC_URI[md5sum]', 'SRC_URI[sha256sum]', 'base_do_fetch']
-
- .. note::
-
- Functions (e.g. ``base_do_fetch``) also count as variable dependencies.
- These functions in turn depend on the variables they reference.
-
- The output of ``bitbake-dumpsig`` also includes the value each
- variable had, a list of dependencies for each variable, and
- :term:`BB_BASEHASH_IGNORE_VARS`
- information.
-
-There is also a ``bitbake-diffsigs`` command for comparing two
-``siginfo`` or ``sigdata`` files. This command can be helpful when
-trying to figure out what changed between two versions of a task. If you
-call ``bitbake-diffsigs`` with just one file, the command behaves like
-``bitbake-dumpsig``.
-
-You can also use BitBake to dump out the signature construction
-information without executing tasks by using either of the following
-BitBake command-line options::
-
- ‐‐dump-signatures=SIGNATURE_HANDLER
- -S SIGNATURE_HANDLER
-
-
-.. note::
-
- Two common values for `SIGNATURE_HANDLER` are "none" and "printdiff", which
- dump only the signature or compare the dumped signature with the cached one,
- respectively.
-
-Using BitBake with either of these options causes BitBake to dump out
-``sigdata`` files in the ``stamps`` directory for every task it would
-have executed instead of building the specified target package.
-
-Viewing Metadata Used to Create the Input Signature of a Shared State Task
---------------------------------------------------------------------------
-
-Seeing what metadata went into creating the input signature of a shared
-state (sstate) task can be a useful debugging aid. This information is
-available in signature information (``siginfo``) files in
-:term:`SSTATE_DIR`. For
-information on how to view and interpret information in ``siginfo``
-files, see the
-":ref:`dev-manual/common-tasks:viewing task variable dependencies`" section.
-
-For conceptual information on shared state, see the
-":ref:`overview-manual/concepts:shared state`"
-section in the Yocto Project Overview and Concepts Manual.
-
-Invalidating Shared State to Force a Task to Run
-------------------------------------------------
-
-The OpenEmbedded build system uses
-:ref:`checksums <overview-manual/concepts:checksums (signatures)>` and
-:ref:`overview-manual/concepts:shared state` cache to avoid unnecessarily
-rebuilding tasks. Collectively, this scheme is known as "shared state
-code".
-
-As with all schemes, this one has some drawbacks. It is possible that
-you could make implicit changes to your code that the checksum
-calculations do not take into account. These implicit changes affect a
-task's output but do not trigger the shared state code into rebuilding a
-recipe. Consider an example during which a tool changes its output.
-Assume that the output of ``rpmdeps`` changes. The result of the change
-should be that all the ``package`` and ``package_write_rpm`` shared
-state cache items become invalid. However, because the change to the
-output is external to the code and therefore implicit, the associated
-shared state cache items do not become invalidated. In this case, the
-build process uses the cached items rather than running the task again.
-Obviously, these types of implicit changes can cause problems.
-
-To avoid these problems during the build, you need to understand the
-effects of any changes you make. Realize that changes you make directly
-to a function are automatically factored into the checksum calculation.
-Thus, these explicit changes invalidate the associated area of shared
-state cache. However, you need to be aware of any implicit changes that
-are not obvious changes to the code and could affect the output of a
-given task.
-
-When you identify an implicit change, you can easily take steps to
-invalidate the cache and force the tasks to run. The steps you can take
-are as simple as changing a function's comments in the source code. For
-example, to invalidate package shared state files, change the comment
-statements of
-:ref:`ref-tasks-package` or the
-comments of one of the functions it calls. Even though the change is
-purely cosmetic, it causes the checksum to be recalculated and forces
-the build system to run the task again.
-
-.. note::
-
- For an example of a commit that makes a cosmetic change to invalidate
- shared state, see this
- :yocto_git:`commit </poky/commit/meta/classes/package.bbclass?id=737f8bbb4f27b4837047cb9b4fbfe01dfde36d54>`.
-
-Running Specific Tasks
-----------------------
-
-Any given recipe consists of a set of tasks. The standard BitBake
-behavior in most cases is: ``do_fetch``, ``do_unpack``, ``do_patch``,
-``do_configure``, ``do_compile``, ``do_install``, ``do_package``,
-``do_package_write_*``, and ``do_build``. The default task is
-``do_build`` and any tasks on which it depends build first. Some tasks,
-such as ``do_devshell``, are not part of the default build chain. If you
-wish to run a task that is not part of the default build chain, you can
-use the ``-c`` option in BitBake. Here is an example::
-
- $ bitbake matchbox-desktop -c devshell
-
-The ``-c`` option respects task dependencies, which means that all other
-tasks (including tasks from other recipes) that the specified task
-depends on will be run before the task. Even when you manually specify a
-task to run with ``-c``, BitBake will only run the task if it considers
-it "out of date". See the
-":ref:`overview-manual/concepts:stamp files and the rerunning of tasks`"
-section in the Yocto Project Overview and Concepts Manual for how
-BitBake determines whether a task is "out of date".
-
-If you want to force an up-to-date task to be rerun (e.g. because you
-made manual modifications to the recipe's
-:term:`WORKDIR` that you want to try
-out), then you can use the ``-f`` option.
-
-.. note::
-
- The reason ``-f`` is never required when running the
- :ref:`ref-tasks-devshell` task is because the
- [\ :ref:`nostamp <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ]
- variable flag is already set for the task.
-
-The following example shows one way you can use the ``-f`` option::
-
- $ bitbake matchbox-desktop
- .
- .
- make some changes to the source code in the work directory
- .
- .
- $ bitbake matchbox-desktop -c compile -f
- $ bitbake matchbox-desktop
-
-This sequence first builds and then recompiles ``matchbox-desktop``. The
-last command reruns all tasks (basically the packaging tasks) after the
-compile. BitBake recognizes that the ``do_compile`` task was rerun and
-therefore understands that the other tasks also need to be run again.
-
-Another, shorter way to rerun a task and all
-:ref:`ref-manual/tasks:normal recipe build tasks`
-that depend on it is to use the ``-C`` option.
-
-.. note::
-
- This option is upper-cased and is separate from the ``-c``
- option, which is lower-cased.
-
-Using this option invalidates the given task and then runs the
-:ref:`ref-tasks-build` task, which is
-the default task if no task is given, and the tasks on which it depends.
-You could replace the final two commands in the previous example with
-the following single command::
-
- $ bitbake matchbox-desktop -C compile
-
-Internally, the ``-f`` and ``-C`` options work by tainting (modifying)
-the input checksum of the specified task. This tainting indirectly
-causes the task and its dependent tasks to be rerun through the normal
-task dependency mechanisms.
-
-.. note::
-
- BitBake explicitly keeps track of which tasks have been tainted in
- this fashion, and will print warnings such as the following for
- builds involving such tasks:
-
- .. code-block:: none
-
- WARNING: /home/ulf/poky/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.1.bb.do_compile is tainted from a forced run
-
-
- The purpose of the warning is to let you know that the work directory
- and build output might not be in the clean state they would be in for
- a "normal" build, depending on what actions you took. To get rid of
- such warnings, you can remove the work directory and rebuild the
- recipe, as follows::
-
- $ bitbake matchbox-desktop -c clean
- $ bitbake matchbox-desktop
-
-
-You can view a list of tasks in a given package by running the
-``do_listtasks`` task as follows::
-
- $ bitbake matchbox-desktop -c listtasks
-
-The results appear as output to the console and are also in
-the file ``${WORKDIR}/temp/log.do_listtasks``.
-
-General BitBake Problems
-------------------------
-
-You can see debug output from BitBake by using the ``-D`` option. The
-debug output gives more information about what BitBake is doing and the
-reason behind it. Each ``-D`` option you use increases the logging
-level. The most common usage is ``-DDD``.
-
-The output from ``bitbake -DDD -v targetname`` can reveal why BitBake
-chose a certain version of a package or why BitBake picked a certain
-provider. This command could also help you in a situation where you
-think BitBake did something unexpected.
-
-Building with No Dependencies
------------------------------
-
-To build a specific recipe (``.bb`` file), you can use the following
-command form::
-
- $ bitbake -b somepath/somerecipe.bb
-
-This command form does
-not check for dependencies. Consequently, you should use it only when
-you know existing dependencies have been met.
-
-.. note::
-
- You can also specify fragments of the filename. In this case, BitBake
- checks for a unique match.
-
-Recipe Logging Mechanisms
--------------------------
-
-The Yocto Project provides several logging functions for producing
-debugging output and reporting errors and warnings. For Python
-functions, the following logging functions are available. All of these functions
-log to ``${T}/log.do_``\ `task`, and can also log to standard output
-(stdout) with the right settings:
-
-- ``bb.plain(msg)``: Writes msg as is to the log while also
- logging to stdout.
-
-- ``bb.note(msg)``: Writes "NOTE: msg" to the log. Also logs to
- stdout if BitBake is called with "-v".
-
-- ``bb.debug(level, msg)``: Writes "DEBUG: msg" to the
- log. Also logs to stdout if the log level is greater than or equal to
- level. See the ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-intro:usage and syntax`" option
- in the BitBake User Manual for more information.
-
-- ``bb.warn(msg)``: Writes "WARNING: msg" to the log while also
- logging to stdout.
-
-- ``bb.error(msg)``: Writes "ERROR: msg" to the log while also
- logging to standard out (stdout).
-
- .. note::
-
- Calling this function does not cause the task to fail.
-
-- ``bb.fatal(msg)``: This logging function is similar to
- ``bb.error(msg)`` but also causes the calling task to fail.
-
- .. note::
-
- ``bb.fatal()`` raises an exception, which means you do not need to put a
- "return" statement after the function.
-
-The same logging functions are also available in shell functions, under
-the names ``bbplain``, ``bbnote``, ``bbdebug``, ``bbwarn``, ``bberror``,
-and ``bbfatal``. The
-:ref:`logging <ref-classes-logging>` class
-implements these functions. See that class in the ``meta/classes``
-folder of the :term:`Source Directory` for information.
-
-Logging With Python
-~~~~~~~~~~~~~~~~~~~
-
-When creating recipes using Python and inserting code that handles build
-logs, keep in mind the goal is to have informative logs while keeping
-the console as "silent" as possible. Also, if you want status messages
-in the log, use the "debug" loglevel.
-
-Following is an example written in Python. The code handles logging for
-a function that determines the number of tasks needed to be run. See the
-":ref:`ref-tasks-listtasks`"
-section for additional information::
-
- python do_listtasks() {
- bb.debug(2, "Starting to figure out the task list")
- if noteworthy_condition:
- bb.note("There are 47 tasks to run")
- bb.debug(2, "Got to point xyz")
- if warning_trigger:
- bb.warn("Detected warning_trigger, this might be a problem later.")
- if recoverable_error:
- bb.error("Hit recoverable_error, you really need to fix this!")
- if fatal_error:
- bb.fatal("fatal_error detected, unable to print the task list")
- bb.plain("The tasks present are abc")
- bb.debug(2, "Finished figuring out the tasklist")
- }
-
-Logging With Bash
-~~~~~~~~~~~~~~~~~
-
-When creating recipes using Bash and inserting code that handles build
-logs, you have the same goals - informative with minimal console output.
-The syntax you use for recipes written in Bash is similar to that of
-recipes written in Python described in the previous section.
-
-Following is an example written in Bash. The code logs the progress of
-the ``do_my_function`` function.
-::
-
- do_my_function() {
- bbdebug 2 "Running do_my_function"
- if [ exceptional_condition ]; then
- bbnote "Hit exceptional_condition"
- fi
- bbdebug 2 "Got to point xyz"
- if [ warning_trigger ]; then
- bbwarn "Detected warning_trigger, this might cause a problem later."
- fi
- if [ recoverable_error ]; then
- bberror "Hit recoverable_error, correcting"
- fi
- if [ fatal_error ]; then
- bbfatal "fatal_error detected"
- fi
- bbdebug 2 "Completed do_my_function"
- }
-
-
-Debugging Parallel Make Races
------------------------------
-
-A parallel ``make`` race occurs when the build consists of several parts
-that are run simultaneously and a situation occurs when the output or
-result of one part is not ready for use with a different part of the
-build that depends on that output. Parallel make races are annoying and
-can sometimes be difficult to reproduce and fix. However, there are some simple
-tips and tricks that can help you debug and fix them. This section
-presents a real-world example of an error encountered on the Yocto
-Project autobuilder and the process used to fix it.
-
-.. note::
-
- If you cannot properly fix a ``make`` race condition, you can work around it
- by clearing either the :term:`PARALLEL_MAKE` or :term:`PARALLEL_MAKEINST`
- variables.
-
-The Failure
-~~~~~~~~~~~
-
-For this example, assume that you are building an image that depends on
-the "neard" package. And, during the build, BitBake runs into problems
-and creates the following output.
-
-.. note::
-
- This example log file has longer lines artificially broken to make
- the listing easier to read.
-
-If you examine the output or the log file, you see the failure during
-``make``:
-
-.. code-block:: none
-
- | DEBUG: SITE files ['endian-little', 'bit-32', 'ix86-common', 'common-linux', 'common-glibc', 'i586-linux', 'common']
- | DEBUG: Executing shell function do_compile
- | NOTE: make -j 16
- | make --no-print-directory all-am
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/types.h include/near/types.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/log.h include/near/log.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/plugin.h include/near/plugin.h
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/tag.h include/near/tag.h
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/adapter.h include/near/adapter.h
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/ndef.h include/near/ndef.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/tlv.h include/near/tlv.h
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/setting.h include/near/setting.h
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | /bin/mkdir -p include/near
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/device.h include/near/device.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/nfc_copy.h include/near/nfc_copy.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/snep.h include/near/snep.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/version.h include/near/version.h
- | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
- 0.14-r0/neard-0.14/include/dbus.h include/near/dbus.h
- | ./src/genbuiltin nfctype1 nfctype2 nfctype3 nfctype4 p2p > src/builtin.h
- | i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/pokybuild/yocto-autobuilder/nightly-x86/
- build/build/tmp/sysroots/qemux86 -DHAVE_CONFIG_H -I. -I./include -I./src -I./gdbus -I/home/pokybuild/
- yocto-autobuilder/nightly-x86/build/build/tmp/sysroots/qemux86/usr/include/glib-2.0
- -I/home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/sysroots/qemux86/usr/
- lib/glib-2.0/include -I/home/pokybuild/yocto-autobuilder/nightly-x86/build/build/
- tmp/sysroots/qemux86/usr/include/dbus-1.0 -I/home/pokybuild/yocto-autobuilder/
- nightly-x86/build/build/tmp/sysroots/qemux86/usr/lib/dbus-1.0/include -I/home/pokybuild/yocto-autobuilder/
- nightly-x86/build/build/tmp/sysroots/qemux86/usr/include/libnl3
- -DNEAR_PLUGIN_BUILTIN -DPLUGINDIR=\""/usr/lib/near/plugins"\"
- -DCONFIGDIR=\""/etc/neard\"" -O2 -pipe -g -feliminate-unused-debug-types -c
- -o tools/snep-send.o tools/snep-send.c
- | In file included from tools/snep-send.c:16:0:
- | tools/../src/near.h:41:23: fatal error: near/dbus.h: No such file or directory
- | #include <near/dbus.h>
- | ^
- | compilation terminated.
- | make[1]: *** [tools/snep-send.o] Error 1
- | make[1]: *** Waiting for unfinished jobs....
- | make: *** [all] Error 2
- | ERROR: oe_runmake failed
-
-Reproducing the Error
-~~~~~~~~~~~~~~~~~~~~~
-
-Because race conditions are intermittent, they do not manifest
-themselves every time you do the build. In fact, most times the build
-will complete without problems even though the potential race condition
-exists. Thus, once the error surfaces, you need a way to reproduce it.
-
-In this example, compiling the "neard" package is causing the problem.
-So the first thing to do is build "neard" locally. Before you start the
-build, set the
-:term:`PARALLEL_MAKE` variable
-in your ``local.conf`` file to a high number (e.g. "-j 20"). Using a
-high value for :term:`PARALLEL_MAKE` increases the chances of the race
-condition showing up::
-
- $ bitbake neard
-
-Once the local build for "neard" completes, start a ``devshell`` build::
-
- $ bitbake neard -c devshell
-
-For information on how to use a ``devshell``, see the
-":ref:`dev-manual/common-tasks:using a development shell`" section.
-
-In the ``devshell``, do the following::
-
- $ make clean
- $ make tools/snep-send.o
-
-The ``devshell`` commands cause the failure to clearly
-be visible. In this case, there is a missing dependency for the ``neard``
-Makefile target. Here is some abbreviated, sample output with the
-missing dependency clearly visible at the end::
-
- i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/scott-lenovo/......
- .
- .
- .
- tools/snep-send.c
- In file included from tools/snep-send.c:16:0:
- tools/../src/near.h:41:23: fatal error: near/dbus.h: No such file or directory
- #include <near/dbus.h>
- ^
- compilation terminated.
- make: *** [tools/snep-send.o] Error 1
- $
-
-
-Creating a Patch for the Fix
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Because there is a missing dependency for the Makefile target, you need
-to patch the ``Makefile.am`` file, which is generated from
-``Makefile.in``. You can use Quilt to create the patch::
-
- $ quilt new parallelmake.patch
- Patch patches/parallelmake.patch is now on top
- $ quilt add Makefile.am
- File Makefile.am added to patch patches/parallelmake.patch
-
-For more information on using Quilt, see the
-":ref:`dev-manual/common-tasks:using quilt in your workflow`" section.
-
-At this point you need to make the edits to ``Makefile.am`` to add the
-missing dependency. For our example, you have to add the following line
-to the file::
-
- tools/snep-send.$(OBJEXT): include/near/dbus.h
-
-Once you have edited the file, use the ``refresh`` command to create the
-patch::
-
- $ quilt refresh
- Refreshed patch patches/parallelmake.patch
-
-Once the patch file is created, you need to add it back to the originating
-recipe folder. Here is an example assuming a top-level
-:term:`Source Directory` named ``poky``::
-
- $ cp patches/parallelmake.patch poky/meta/recipes-connectivity/neard/neard
-
-The final thing you need to do to implement the fix in the build is to
-update the "neard" recipe (i.e. ``neard-0.14.bb``) so that the
-:term:`SRC_URI` statement includes
-the patch file. The recipe file is in the folder above the patch. Here
-is what the edited :term:`SRC_URI` statement would look like::
-
- SRC_URI = "${KERNELORG_MIRROR}/linux/network/nfc/${BPN}-${PV}.tar.xz \
- file://neard.in \
- file://neard.service.in \
- file://parallelmake.patch \
- "
-
-With the patch complete and moved to the correct folder and the
-:term:`SRC_URI` statement updated, you can exit the ``devshell``::
-
- $ exit
-
-Testing the Build
-~~~~~~~~~~~~~~~~~
-
-With everything in place, you can get back to trying the build again
-locally::
-
- $ bitbake neard
-
-This build should succeed.
-
-Now you can open up a ``devshell`` again and repeat the clean and make
-operations as follows::
-
- $ bitbake neard -c devshell
- $ make clean
- $ make tools/snep-send.o
-
-The build should work without issue.
-
-As with all solved problems, if they originated upstream, you need to
-submit the fix for the recipe in OE-Core and upstream so that the
-problem is taken care of at its source. See the
-":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
-section for more information.
-
-Debugging With the GNU Project Debugger (GDB) Remotely
-------------------------------------------------------
-
-GDB allows you to examine running programs, which in turn helps you to
-understand and fix problems. It also allows you to perform post-mortem
-style analysis of program crashes. GDB is available as a package within
-the Yocto Project and is installed in SDK images by default. See the
-":ref:`ref-manual/images:Images`" chapter in the Yocto
-Project Reference Manual for a description of these images. You can find
-information on GDB at https://sourceware.org/gdb/.
-
-.. note::
-
- For best results, install debug (``-dbg``) packages for the applications you
- are going to debug. Doing so makes extra debug symbols available that give
- you more meaningful output.
-
-Sometimes, due to memory or disk space constraints, it is not possible
-to use GDB directly on the remote target to debug applications. These
-constraints arise because GDB needs to load the debugging information
-and the binaries of the process being debugged. Additionally, GDB needs
-to perform many computations to locate information such as function
-names, variable names and values, stack traces and so forth - even
-before starting the debugging process. These extra computations place
-more load on the target system and can alter the characteristics of the
-program being debugged.
-
-To help get past the previously mentioned constraints, there are two
-methods you can use: running a debuginfod server and using gdbserver.
-
-Using the debuginfod server method
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-``debuginfod`` from ``elfutils`` is a way to distribute ``debuginfo`` files.
-Running a ``debuginfod`` server makes debug symbols readily available,
-which means you don't need to download debugging information
-and the binaries of the process being debugged. You can just fetch
-debug symbols from the server.
-
-To run a ``debuginfod`` server, you need to do the following:
-
-- Ensure that ``debuginfod`` is present in :term:`DISTRO_FEATURES`
- (it already is in ``OpenEmbedded-core`` defaults and ``poky`` reference distribution).
- If not, set in your distro config file or in ``local.conf``::
-
- DISTRO_FEATURES:append = " debuginfod"
-
- This distro feature enables the server and client library in ``elfutils``,
- and enables ``debuginfod`` support in clients (at the moment, ``gdb`` and ``binutils``).
-
-- Run the following commands to launch the ``debuginfod`` server on the host::
-
- $ oe-debuginfod
-
-- To use ``debuginfod`` on the target, you need to know the ip:port where
- ``debuginfod`` is listening on the host (port defaults to 8002), and export
- that into the shell environment, for example in ``qemu``::
-
- root@qemux86-64:~# export DEBUGINFOD_URLS="http://192.168.7.1:8002/"
-
-- Then debug info fetching should simply work when running the target ``gdb``,
- ``readelf`` or ``objdump``, for example::
-
- root@qemux86-64:~# gdb /bin/cat
- ...
- Reading symbols from /bin/cat...
- Downloading separate debug info for /bin/cat...
- Reading symbols from /home/root/.cache/debuginfod_client/923dc4780cfbc545850c616bffa884b6b5eaf322/debuginfo...
-
-- It's also possible to use ``debuginfod-find`` to just query the server::
-
- root@qemux86-64:~# debuginfod-find debuginfo /bin/ls
- /home/root/.cache/debuginfod_client/356edc585f7f82d46f94fcb87a86a3fe2d2e60bd/debuginfo
-
-
-Using the gdbserver method
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-gdbserver, which runs on the remote target and does not load any
-debugging information from the debugged process. Instead, a GDB instance
-processes the debugging information that is run on a remote computer -
-the host GDB. The host GDB then sends control commands to gdbserver to
-make it stop or start the debugged program, as well as read or write
-memory regions of that debugged program. All the debugging information
-loaded and processed as well as all the heavy debugging is done by the
-host GDB. Offloading these processes gives the gdbserver running on the
-target a chance to remain small and fast.
-
-Because the host GDB is responsible for loading the debugging
-information and for doing the necessary processing to make actual
-debugging happen, you have to make sure the host can access the
-unstripped binaries complete with their debugging information and also
-be sure the target is compiled with no optimizations. The host GDB must
-also have local access to all the libraries used by the debugged
-program. Because gdbserver does not need any local debugging
-information, the binaries on the remote target can remain stripped.
-However, the binaries must also be compiled without optimization so they
-match the host's binaries.
-
-To remain consistent with GDB documentation and terminology, the binary
-being debugged on the remote target machine is referred to as the
-"inferior" binary. For documentation on GDB see the `GDB
-site <https://sourceware.org/gdb/documentation/>`__.
-
-The following steps show you how to debug using the GNU project
-debugger.
-
-1. *Configure your build system to construct the companion debug
- filesystem:*
-
- In your ``local.conf`` file, set the following::
-
- IMAGE_GEN_DEBUGFS = "1"
- IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
-
- These options cause the
- OpenEmbedded build system to generate a special companion filesystem
- fragment, which contains the matching source and debug symbols to
- your deployable filesystem. The build system does this by looking at
- what is in the deployed filesystem, and pulling the corresponding
- ``-dbg`` packages.
-
- The companion debug filesystem is not a complete filesystem, but only
- contains the debug fragments. This filesystem must be combined with
- the full filesystem for debugging. Subsequent steps in this procedure
- show how to combine the partial filesystem with the full filesystem.
-
-2. *Configure the system to include gdbserver in the target filesystem:*
-
- Make the following addition in your ``local.conf`` file::
-
- EXTRA_IMAGE_FEATURES:append = " tools-debug"
-
- The change makes
- sure the ``gdbserver`` package is included.
-
-3. *Build the environment:*
-
- Use the following command to construct the image and the companion
- Debug Filesystem::
-
- $ bitbake image
-
- Build the cross GDB component and
- make it available for debugging. Build the SDK that matches the
- image. Building the SDK is best for a production build that can be
- used later for debugging, especially during long term maintenance::
-
- $ bitbake -c populate_sdk image
-
- Alternatively, you can build the minimal toolchain components that
- match the target. Doing so creates a smaller than typical SDK and
- only contains a minimal set of components with which to build simple
- test applications, as well as run the debugger::
-
- $ bitbake meta-toolchain
-
- A final method is to build Gdb itself within the build system::
-
- $ bitbake gdb-cross-<architecture>
-
- Doing so produces a temporary copy of
- ``cross-gdb`` you can use for debugging during development. While
- this is the quickest approach, the two previous methods in this step
- are better when considering long-term maintenance strategies.
-
- .. note::
-
- If you run ``bitbake gdb-cross``, the OpenEmbedded build system suggests
- the actual image (e.g. ``gdb-cross-i586``). The suggestion is usually the
- actual name you want to use.
-
-4. *Set up the* ``debugfs``\ *:*
-
- Run the following commands to set up the ``debugfs``::
-
- $ mkdir debugfs
- $ cd debugfs
- $ tar xvfj build-dir/tmp/deploy/images/machine/image.rootfs.tar.bz2
- $ tar xvfj build-dir/tmp/deploy/images/machine/image-dbg.rootfs.tar.bz2
-
-5. *Set up GDB:*
-
- Install the SDK (if you built one) and then source the correct
- environment file. Sourcing the environment file puts the SDK in your
- ``PATH`` environment variable and sets ``$GDB`` to the SDK's debugger.
-
- If you are using the build system, Gdb is located in
- `build-dir`\ ``/tmp/sysroots/``\ `host`\ ``/usr/bin/``\ `architecture`\ ``/``\ `architecture`\ ``-gdb``
-
-6. *Boot the target:*
-
- For information on how to run QEMU, see the `QEMU
- Documentation <https://wiki.qemu.org/Documentation/GettingStartedDevelopers>`__.
-
- .. note::
-
- Be sure to verify that your host can access the target via TCP.
-
-7. *Debug a program:*
-
- Debugging a program involves running gdbserver on the target and then
- running Gdb on the host. The example in this step debugs ``gzip``:
-
- .. code-block:: shell
-
- root@qemux86:~# gdbserver localhost:1234 /bin/gzip —help
-
- For
- additional gdbserver options, see the `GDB Server
- Documentation <https://www.gnu.org/software/gdb/documentation/>`__.
-
- After running gdbserver on the target, you need to run Gdb on the
- host and configure it and connect to the target. Use these commands::
-
- $ cd directory-holding-the-debugfs-directory
- $ arch-gdb
- (gdb) set sysroot debugfs
- (gdb) set substitute-path /usr/src/debug debugfs/usr/src/debug
- (gdb) target remote IP-of-target:1234
-
- At this
- point, everything should automatically load (i.e. matching binaries,
- symbols and headers).
-
- .. note::
-
- The Gdb ``set`` commands in the previous example can be placed into the
- users ``~/.gdbinit`` file. Upon starting, Gdb automatically runs whatever
- commands are in that file.
-
-8. *Deploying without a full image rebuild:*
-
- In many cases, during development you want a quick method to deploy a
- new binary to the target and debug it, without waiting for a full
- image build.
-
- One approach to solving this situation is to just build the component
- you want to debug. Once you have built the component, copy the
- executable directly to both the target and the host ``debugfs``.
-
- If the binary is processed through the debug splitting in
- OpenEmbedded, you should also copy the debug items (i.e. ``.debug``
- contents and corresponding ``/usr/src/debug`` files) from the work
- directory. Here is an example::
-
- $ bitbake bash
- $ bitbake -c devshell bash
- $ cd ..
- $ scp packages-split/bash/bin/bash target:/bin/bash
- $ cp -a packages-split/bash-dbg/\* path/debugfs
-
-Debugging with the GNU Project Debugger (GDB) on the Target
------------------------------------------------------------
-
-The previous section addressed using GDB remotely for debugging
-purposes, which is the most usual case due to the inherent hardware
-limitations on many embedded devices. However, debugging in the target
-hardware itself is also possible with more powerful devices. This
-section describes what you need to do in order to support using GDB to
-debug on the target hardware.
-
-To support this kind of debugging, you need do the following:
-
-- Ensure that GDB is on the target. You can do this by making
- the following addition to your ``local.conf`` file::
-
- EXTRA_IMAGE_FEATURES:append = " tools-debug"
-
-- Ensure that debug symbols are present. You can do so by adding the
- corresponding ``-dbg`` package to :term:`IMAGE_INSTALL`::
-
- IMAGE_INSTALL:append = " packagename-dbg"
-
- Alternatively, you can add the following to ``local.conf`` to include
- all the debug symbols::
-
- EXTRA_IMAGE_FEATURES:append = " dbg-pkgs"
-
-.. note::
-
- To improve the debug information accuracy, you can reduce the level
- of optimization used by the compiler. For example, when adding the
- following line to your ``local.conf`` file, you will reduce optimization
- from :term:`FULL_OPTIMIZATION` of "-O2" to :term:`DEBUG_OPTIMIZATION`
- of "-O -fno-omit-frame-pointer"::
-
- DEBUG_BUILD = "1"
-
- Consider that this will reduce the application's performance and is
- recommended only for debugging purposes.
-
-Other Debugging Tips
---------------------
-
-Here are some other tips that you might find useful:
-
-- When adding new packages, it is worth watching for undesirable items
- making their way into compiler command lines. For example, you do not
- want references to local system files like ``/usr/lib/`` or
- ``/usr/include/``.
-
-- If you want to remove the ``psplash`` boot splashscreen, add
- ``psplash=false`` to the kernel command line. Doing so prevents
- ``psplash`` from loading and thus allows you to see the console. It
- is also possible to switch out of the splashscreen by switching the
- virtual console (e.g. Fn+Left or Fn+Right on a Zaurus).
-
-- Removing :term:`TMPDIR` (usually
- ``tmp/``, within the
- :term:`Build Directory`) can often fix
- temporary build issues. Removing :term:`TMPDIR` is usually a relatively
- cheap operation, because task output will be cached in
- :term:`SSTATE_DIR` (usually
- ``sstate-cache/``, which is also in the Build Directory).
-
- .. note::
-
- Removing :term:`TMPDIR` might be a workaround rather than a fix.
- Consequently, trying to determine the underlying cause of an issue before
- removing the directory is a good idea.
-
-- Understanding how a feature is used in practice within existing
- recipes can be very helpful. It is recommended that you configure
- some method that allows you to quickly search through files.
-
- Using GNU Grep, you can use the following shell function to
- recursively search through common recipe-related files, skipping
- binary files, ``.git`` directories, and the Build Directory (assuming
- its name starts with "build")::
-
- g() {
- grep -Ir \
- --exclude-dir=.git \
- --exclude-dir='build*' \
- --include='*.bb*' \
- --include='*.inc*' \
- --include='*.conf*' \
- --include='*.py*' \
- "$@"
- }
-
- Following are some usage examples::
-
- $ g FOO # Search recursively for "FOO"
- $ g -i foo # Search recursively for "foo", ignoring case
- $ g -w FOO # Search recursively for "FOO" as a word, ignoring e.g. "FOOBAR"
-
- If figuring
- out how some feature works requires a lot of searching, it might
- indicate that the documentation should be extended or improved. In
- such cases, consider filing a documentation bug using the Yocto
- Project implementation of
- :yocto_bugs:`Bugzilla <>`. For information on
- how to submit a bug against the Yocto Project, see the Yocto Project
- Bugzilla :yocto_wiki:`wiki page </Bugzilla_Configuration_and_Bug_Tracking>`
- and the
- ":ref:`dev-manual/common-tasks:submitting a defect against the yocto project`"
- section.
-
- .. note::
-
- The manuals might not be the right place to document variables
- that are purely internal and have a limited scope (e.g. internal
- variables used to implement a single ``.bbclass`` file).
-
-Making Changes to the Yocto Project
-===================================
-
-Because the Yocto Project is an open-source, community-based project,
-you can effect changes to the project. This section presents procedures
-that show you how to submit a defect against the project and how to
-submit a change.
-
-Submitting a Defect Against the Yocto Project
----------------------------------------------
-
-Use the Yocto Project implementation of
-`Bugzilla <https://www.bugzilla.org/about/>`__ to submit a defect (bug)
-against the Yocto Project. For additional information on this
-implementation of Bugzilla see the ":ref:`Yocto Project
-Bugzilla <resources-bugtracker>`" section in the
-Yocto Project Reference Manual. For more detail on any of the following
-steps, see the Yocto Project
-:yocto_wiki:`Bugzilla wiki page </Bugzilla_Configuration_and_Bug_Tracking>`.
-
-Use the following general steps to submit a bug:
-
-1. Open the Yocto Project implementation of :yocto_bugs:`Bugzilla <>`.
-
-2. Click "File a Bug" to enter a new bug.
-
-3. Choose the appropriate "Classification", "Product", and "Component"
- for which the bug was found. Bugs for the Yocto Project fall into
- one of several classifications, which in turn break down into
- several products and components. For example, for a bug against the
- ``meta-intel`` layer, you would choose "Build System, Metadata &
- Runtime", "BSPs", and "bsps-meta-intel", respectively.
-
-4. Choose the "Version" of the Yocto Project for which you found the
- bug (e.g. &DISTRO;).
-
-5. Determine and select the "Severity" of the bug. The severity
- indicates how the bug impacted your work.
-
-6. Choose the "Hardware" that the bug impacts.
-
-7. Choose the "Architecture" that the bug impacts.
-
-8. Choose a "Documentation change" item for the bug. Fixing a bug might
- or might not affect the Yocto Project documentation. If you are
- unsure of the impact to the documentation, select "Don't Know".
-
-9. Provide a brief "Summary" of the bug. Try to limit your summary to
- just a line or two and be sure to capture the essence of the bug.
-
-10. Provide a detailed "Description" of the bug. You should provide as
- much detail as you can about the context, behavior, output, and so
- forth that surrounds the bug. You can even attach supporting files
- for output from logs by using the "Add an attachment" button.
-
-11. Click the "Submit Bug" button submit the bug. A new Bugzilla number
- is assigned to the bug and the defect is logged in the bug tracking
- system.
-
-Once you file a bug, the bug is processed by the Yocto Project Bug
-Triage Team and further details concerning the bug are assigned (e.g.
-priority and owner). You are the "Submitter" of the bug and any further
-categorization, progress, or comments on the bug result in Bugzilla
-sending you an automated email concerning the particular change or
-progress to the bug.
-
-Submitting a Change to the Yocto Project
-----------------------------------------
-
-Contributions to the Yocto Project and OpenEmbedded are very welcome.
-Because the system is extremely configurable and flexible, we recognize
-that developers will want to extend, configure or optimize it for their
-specific uses.
-
-The Yocto Project uses a mailing list and a patch-based workflow that is
-similar to the Linux kernel but contains important differences. In
-general, there is a mailing list through which you can submit patches. You
-should send patches to the appropriate mailing list so that they can be
-reviewed and merged by the appropriate maintainer. The specific mailing
-list you need to use depends on the location of the code you are
-changing. Each component (e.g. layer) should have a ``README`` file that
-indicates where to send the changes and which process to follow.
-
-You can send the patch to the mailing list using whichever approach you
-feel comfortable with to generate the patch. Once sent, the patch is
-usually reviewed by the community at large. If somebody has concerns
-with the patch, they will usually voice their concern over the mailing
-list. If a patch does not receive any negative reviews, the maintainer
-of the affected layer typically takes the patch, tests it, and then
-based on successful testing, merges the patch.
-
-The "poky" repository, which is the Yocto Project's reference build
-environment, is a hybrid repository that contains several individual
-pieces (e.g. BitBake, Metadata, documentation, and so forth) built using
-the combo-layer tool. The upstream location used for submitting changes
-varies by component:
-
-- *Core Metadata:* Send your patch to the
- :oe_lists:`openembedded-core </g/openembedded-core>`
- mailing list. For example, a change to anything under the ``meta`` or
- ``scripts`` directories should be sent to this mailing list.
-
-- *BitBake:* For changes to BitBake (i.e. anything under the
- ``bitbake`` directory), send your patch to the
- :oe_lists:`bitbake-devel </g/bitbake-devel>`
- mailing list.
-
-- *"meta-\*" trees:* These trees contain Metadata. Use the
- :yocto_lists:`poky </g/poky>` mailing list.
-
-- *Documentation*: For changes to the Yocto Project documentation, use the
- :yocto_lists:`docs </g/docs>` mailing list.
-
-For changes to other layers hosted in the Yocto Project source
-repositories (i.e. ``yoctoproject.org``) and tools use the
-:yocto_lists:`Yocto Project </g/yocto/>` general mailing list.
-
-.. note::
-
- Sometimes a layer's documentation specifies to use a particular
- mailing list. If so, use that list.
-
-For additional recipes that do not fit into the core Metadata, you
-should determine which layer the recipe should go into and submit the
-change in the manner recommended by the documentation (e.g. the
-``README`` file) supplied with the layer. If in doubt, please ask on the
-Yocto general mailing list or on the openembedded-devel mailing list.
-
-You can also push a change upstream and request a maintainer to pull the
-change into the component's upstream repository. You do this by pushing
-to a contribution repository that is upstream. See the
-":ref:`overview-manual/development-environment:git workflows and the yocto project`"
-section in the Yocto Project Overview and Concepts Manual for additional
-concepts on working in the Yocto Project development environment.
-
-Maintainers commonly use ``-next`` branches to test submissions prior to
-merging patches. Thus, you can get an idea of the status of a patch based on
-whether the patch has been merged into one of these branches. The commonly
-used testing branches for OpenEmbedded-Core are as follows:
-
-- *openembedded-core "master-next" branch:* This branch is part of the
- :oe_git:`openembedded-core </openembedded-core/>` repository and contains
- proposed changes to the core metadata.
-
-- *poky "master-next" branch:* This branch is part of the
- :yocto_git:`poky </poky/>` repository and combines proposed
- changes to BitBake, the core metadata and the poky distro.
-
-Similarly, stable branches maintained by the project may have corresponding
-``-next`` branches which collect proposed changes. For example,
-``&DISTRO_NAME_NO_CAP;-next`` and ``&DISTRO_NAME_NO_CAP_MINUS_ONE;-next``
-branches in both the "openembdedded-core" and "poky" repositories.
-
-Other layers may have similar testing branches but there is no formal
-requirement or standard for these so please check the documentation for the
-layers you are contributing to.
-
-The following sections provide procedures for submitting a change.
-
-Preparing Changes for Submission
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-1. *Make Your Changes Locally:* Make your changes in your local Git
- repository. You should make small, controlled, isolated changes.
- Keeping changes small and isolated aids review, makes
- merging/rebasing easier and keeps the change history clean should
- anyone need to refer to it in future.
-
-2. *Stage Your Changes:* Stage your changes by using the ``git add``
- command on each file you changed.
-
-3. *Commit Your Changes:* Commit the change by using the ``git commit``
- command. Make sure your commit information follows standards by
- following these accepted conventions:
-
- - Be sure to include a "Signed-off-by:" line in the same style as
- required by the Linux kernel. This can be done by using the
- ``git commit -s`` command. Adding this line signifies that you,
- the submitter, have agreed to the Developer's Certificate of
- Origin 1.1 as follows:
-
- .. code-block:: none
-
- Developer's Certificate of Origin 1.1
-
- By making a contribution to this project, I certify that:
-
- (a) The contribution was created in whole or in part by me and I
- have the right to submit it under the open source license
- indicated in the file; or
-
- (b) The contribution is based upon previous work that, to the best
- of my knowledge, is covered under an appropriate open source
- license and I have the right under that license to submit that
- work with modifications, whether created in whole or in part
- by me, under the same open source license (unless I am
- permitted to submit under a different license), as indicated
- in the file; or
-
- (c) The contribution was provided directly to me by some other
- person who certified (a), (b) or (c) and I have not modified
- it.
-
- (d) I understand and agree that this project and the contribution
- are public and that a record of the contribution (including all
- personal information I submit with it, including my sign-off) is
- maintained indefinitely and may be redistributed consistent with
- this project or the open source license(s) involved.
-
- - Provide a single-line summary of the change and, if more
- explanation is needed, provide more detail in the body of the
- commit. This summary is typically viewable in the "shortlist" of
- changes. Thus, providing something short and descriptive that
- gives the reader a summary of the change is useful when viewing a
- list of many commits. You should prefix this short description
- with the recipe name (if changing a recipe), or else with the
- short form path to the file being changed.
-
- - For the body of the commit message, provide detailed information
- that describes what you changed, why you made the change, and the
- approach you used. It might also be helpful if you mention how you
- tested the change. Provide as much detail as you can in the body
- of the commit message.
-
- .. note::
-
- You do not need to provide a more detailed explanation of a
- change if the change is minor to the point of the single line
- summary providing all the information.
-
- - If the change addresses a specific bug or issue that is associated
- with a bug-tracking ID, include a reference to that ID in your
- detailed description. For example, the Yocto Project uses a
- specific convention for bug references - any commit that addresses
- a specific bug should use the following form for the detailed
- description. Be sure to use the actual bug-tracking ID from
- Bugzilla for bug-id::
-
- Fixes [YOCTO #bug-id]
-
- detailed description of change
-
-Using Email to Submit a Patch
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Depending on the components changed, you need to submit the email to a
-specific mailing list. For some guidance on which mailing list to use,
-see the
-:ref:`list <dev-manual/common-tasks:submitting a change to the yocto project>`
-at the beginning of this section. For a description of all the available
-mailing lists, see the ":ref:`Mailing Lists <resources-mailinglist>`" section in the
-Yocto Project Reference Manual.
-
-Here is the general procedure on how to submit a patch through email
-without using the scripts once the steps in
-:ref:`dev-manual/common-tasks:preparing changes for submission` have been followed:
-
-1. *Format the Commit:* Format the commit into an email message. To
- format commits, use the ``git format-patch`` command. When you
- provide the command, you must include a revision list or a number of
- patches as part of the command. For example, either of these two
- commands takes your most recent single commit and formats it as an
- email message in the current directory::
-
- $ git format-patch -1
-
- or ::
-
- $ git format-patch HEAD~
-
- After the command is run, the current directory contains a numbered
- ``.patch`` file for the commit.
-
- If you provide several commits as part of the command, the
- ``git format-patch`` command produces a series of numbered files in
- the current directory – one for each commit. If you have more than
- one patch, you should also use the ``--cover`` option with the
- command, which generates a cover letter as the first "patch" in the
- series. You can then edit the cover letter to provide a description
- for the series of patches. For information on the
- ``git format-patch`` command, see ``GIT_FORMAT_PATCH(1)`` displayed
- using the ``man git-format-patch`` command.
-
- .. note::
-
- If you are or will be a frequent contributor to the Yocto Project
- or to OpenEmbedded, you might consider requesting a contrib area
- and the necessary associated rights.
-
-2. *Send the patches via email:* Send the patches to the recipients and
- relevant mailing lists by using the ``git send-email`` command.
-
- .. note::
-
- In order to use ``git send-email``, you must have the proper Git packages
- installed on your host.
- For Ubuntu, Debian, and Fedora the package is ``git-email``.
-
- The ``git send-email`` command sends email by using a local or remote
- Mail Transport Agent (MTA) such as ``msmtp``, ``sendmail``, or
- through a direct ``smtp`` configuration in your Git ``~/.gitconfig``
- file. If you are submitting patches through email only, it is very
- important that you submit them without any whitespace or HTML
- formatting that either you or your mailer introduces. The maintainer
- that receives your patches needs to be able to save and apply them
- directly from your emails. A good way to verify that what you are
- sending will be applicable by the maintainer is to do a dry run and
- send them to yourself and then save and apply them as the maintainer
- would.
-
- The ``git send-email`` command is the preferred method for sending
- your patches using email since there is no risk of compromising
- whitespace in the body of the message, which can occur when you use
- your own mail client. The command also has several options that let
- you specify recipients and perform further editing of the email
- message. For information on how to use the ``git send-email``
- command, see ``GIT-SEND-EMAIL(1)`` displayed using the
- ``man git-send-email`` command.
-
-The Yocto Project uses a `Patchwork instance <https://patchwork.openembedded.org/>`__
-to track the status of patches submitted to the various mailing lists and to
-support automated patch testing. Each submitted patch is checked for common
-mistakes and deviations from the expected patch format and submitters are
-notified by patchtest if such mistakes are found. This process helps to
-reduce the burden of patch review on maintainers.
-
-.. note::
-
- This system is imperfect and changes can sometimes get lost in the flow.
- Asking about the status of a patch or change is reasonable if the change
- has been idle for a while with no feedback.
-
-Using Scripts to Push a Change Upstream and Request a Pull
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-For larger patch series it is preferable to send a pull request which not
-only includes the patch but also a pointer to a branch that can be pulled
-from. This involves making a local branch for your changes, pushing this
-branch to an accessible repository and then using the ``create-pull-request``
-and ``send-pull-request`` scripts from openembedded-core to create and send a
-patch series with a link to the branch for review.
-
-Follow this procedure to push a change to an upstream "contrib" Git
-repository once the steps in :ref:`dev-manual/common-tasks:preparing changes for submission` have
-been followed:
-
-.. note::
-
- You can find general Git information on how to push a change upstream
- in the
- `Git Community Book <https://git-scm.com/book/en/v2/Distributed-Git-Distributed-Workflows>`__.
-
-1. *Push Your Commits to a "Contrib" Upstream:* If you have arranged for
- permissions to push to an upstream contrib repository, push the
- change to that repository::
-
- $ git push upstream_remote_repo local_branch_name
-
- For example, suppose you have permissions to push
- into the upstream ``meta-intel-contrib`` repository and you are
- working in a local branch named `your_name`\ ``/README``. The following
- command pushes your local commits to the ``meta-intel-contrib``
- upstream repository and puts the commit in a branch named
- `your_name`\ ``/README``::
-
- $ git push meta-intel-contrib your_name/README
-
-2. *Determine Who to Notify:* Determine the maintainer or the mailing
- list that you need to notify for the change.
-
- Before submitting any change, you need to be sure who the maintainer
- is or what mailing list that you need to notify. Use either these
- methods to find out:
-
- - *Maintenance File:* Examine the ``maintainers.inc`` file, which is
- located in the :term:`Source Directory` at
- ``meta/conf/distro/include``, to see who is responsible for code.
-
- - *Search by File:* Using :ref:`overview-manual/development-environment:git`, you can
- enter the following command to bring up a short list of all
- commits against a specific file::
-
- git shortlog -- filename
-
- Just provide the name of the file for which you are interested. The
- information returned is not ordered by history but does include a
- list of everyone who has committed grouped by name. From the list,
- you can see who is responsible for the bulk of the changes against
- the file.
-
- - *Examine the List of Mailing Lists:* For a list of the Yocto
- Project and related mailing lists, see the ":ref:`Mailing
- lists <resources-mailinglist>`" section in
- the Yocto Project Reference Manual.
-
-3. *Make a Pull Request:* Notify the maintainer or the mailing list that
- you have pushed a change by making a pull request.
-
- The Yocto Project provides two scripts that conveniently let you
- generate and send pull requests to the Yocto Project. These scripts
- are ``create-pull-request`` and ``send-pull-request``. You can find
- these scripts in the ``scripts`` directory within the
- :term:`Source Directory` (e.g.
- ``poky/scripts``).
-
- Using these scripts correctly formats the requests without
- introducing any whitespace or HTML formatting. The maintainer that
- receives your patches either directly or through the mailing list
- needs to be able to save and apply them directly from your emails.
- Using these scripts is the preferred method for sending patches.
-
- First, create the pull request. For example, the following command
- runs the script, specifies the upstream repository in the contrib
- directory into which you pushed the change, and provides a subject
- line in the created patch files::
-
- $ poky/scripts/create-pull-request -u meta-intel-contrib -s "Updated Manual Section Reference in README"
-
- Running this script forms ``*.patch`` files in a folder named
- ``pull-``\ `PID` in the current directory. One of the patch files is a
- cover letter.
-
- Before running the ``send-pull-request`` script, you must edit the
- cover letter patch to insert information about your change. After
- editing the cover letter, send the pull request. For example, the
- following command runs the script and specifies the patch directory
- and email address. In this example, the email address is a mailing
- list::
-
- $ poky/scripts/send-pull-request -p ~/meta-intel/pull-10565 -t meta-intel@lists.yoctoproject.org
-
- You need to follow the prompts as the script is interactive.
-
- .. note::
-
- For help on using these scripts, simply provide the ``-h``
- argument as follows::
-
- $ poky/scripts/create-pull-request -h
- $ poky/scripts/send-pull-request -h
-
-Responding to Patch Review
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You may get feedback on your submitted patches from other community members
-or from the automated patchtest service. If issues are identified in your
-patch then it is usually necessary to address these before the patch will be
-accepted into the project. In this case you should amend the patch according
-to the feedback and submit an updated version to the relevant mailing list,
-copying in the reviewers who provided feedback to the previous version of the
-patch.
-
-The patch should be amended using ``git commit --amend`` or perhaps ``git
-rebase`` for more expert git users. You should also modify the ``[PATCH]``
-tag in the email subject line when sending the revised patch to mark the new
-iteration as ``[PATCH v2]``, ``[PATCH v3]``, etc as appropriate. This can be
-done by passing the ``-v`` argument to ``git format-patch`` with a version
-number.
-
-Lastly please ensure that you also test your revised changes. In particular
-please don't just edit the patch file written out by ``git format-patch`` and
-resend it.
-
-Submitting Changes to Stable Release Branches
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The process for proposing changes to a Yocto Project stable branch differs
-from the steps described above. Changes to a stable branch must address
-identified bugs or CVEs and should be made carefully in order to avoid the
-risk of introducing new bugs or breaking backwards compatibility. Typically
-bug fixes must already be accepted into the master branch before they can be
-backported to a stable branch unless the bug in question does not affect the
-master branch or the fix on the master branch is unsuitable for backporting.
-
-The list of stable branches along with the status and maintainer for each
-branch can be obtained from the
-:yocto_wiki:`Releases wiki page </Releases>`.
-
-.. note::
-
- Changes will not typically be accepted for branches which are marked as
- End-Of-Life (EOL).
-
-With this in mind, the steps to submit a change for a stable branch are as
-follows:
-
-1. *Identify the bug or CVE to be fixed:* This information should be
- collected so that it can be included in your submission.
-
- See :ref:`dev-manual/common-tasks:checking for vulnerabilities`
- for details about CVE tracking.
-
-2. *Check if the fix is already present in the master branch:* This will
- result in the most straightforward path into the stable branch for the
- fix.
-
- a. *If the fix is present in the master branch - Submit a backport request
- by email:* You should send an email to the relevant stable branch
- maintainer and the mailing list with details of the bug or CVE to be
- fixed, the commit hash on the master branch that fixes the issue and
- the stable branches which you would like this fix to be backported to.
-
- b. *If the fix is not present in the master branch - Submit the fix to the
- master branch first:* This will ensure that the fix passes through the
- project's usual patch review and test processes before being accepted.
- It will also ensure that bugs are not left unresolved in the master
- branch itself. Once the fix is accepted in the master branch a backport
- request can be submitted as above.
-
- c. *If the fix is unsuitable for the master branch - Submit a patch
- directly for the stable branch:* This method should be considered as a
- last resort. It is typically necessary when the master branch is using
- a newer version of the software which includes an upstream fix for the
- issue or when the issue has been fixed on the master branch in a way
- that introduces backwards incompatible changes. In this case follow the
- steps in :ref:`dev-manual/common-tasks:preparing changes for submission` and
- :ref:`dev-manual/common-tasks:using email to submit a patch` but modify the subject header of your patch
- email to include the name of the stable branch which you are
- targetting. This can be done using the ``--subject-prefix`` argument to
- ``git format-patch``, for example to submit a patch to the dunfell
- branch use
- ``git format-patch --subject-prefix='&DISTRO_NAME_NO_CAP_MINUS_ONE;][PATCH' ...``.
-
-Working With Licenses
-=====================
-
-As mentioned in the ":ref:`overview-manual/development-environment:licensing`"
-section in the Yocto Project Overview and Concepts Manual, open source
-projects are open to the public and they consequently have different
-licensing structures in place. This section describes the mechanism by
-which the :term:`OpenEmbedded Build System`
-tracks changes to
-licensing text and covers how to maintain open source license compliance
-during your project's lifecycle. The section also describes how to
-enable commercially licensed recipes, which by default are disabled.
-
-Tracking License Changes
-------------------------
-
-The license of an upstream project might change in the future. In order
-to prevent these changes going unnoticed, the
-:term:`LIC_FILES_CHKSUM`
-variable tracks changes to the license text. The checksums are validated
-at the end of the configure step, and if the checksums do not match, the
-build will fail.
-
-Specifying the ``LIC_FILES_CHKSUM`` Variable
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The :term:`LIC_FILES_CHKSUM` variable contains checksums of the license text
-in the source code for the recipe. Following is an example of how to
-specify :term:`LIC_FILES_CHKSUM`::
-
- LIC_FILES_CHKSUM = "file://COPYING;md5=xxxx \
- file://licfile1.txt;beginline=5;endline=29;md5=yyyy \
- file://licfile2.txt;endline=50;md5=zzzz \
- ..."
-
-.. note::
-
- - When using "beginline" and "endline", realize that line numbering
- begins with one and not zero. Also, the included lines are
- inclusive (i.e. lines five through and including 29 in the
- previous example for ``licfile1.txt``).
-
- - When a license check fails, the selected license text is included
- as part of the QA message. Using this output, you can determine
- the exact start and finish for the needed license text.
-
-The build system uses the :term:`S`
-variable as the default directory when searching files listed in
-:term:`LIC_FILES_CHKSUM`. The previous example employs the default
-directory.
-
-Consider this next example::
-
- LIC_FILES_CHKSUM = "file://src/ls.c;beginline=5;endline=16;\
- md5=bb14ed3c4cda583abc85401304b5cd4e"
- LIC_FILES_CHKSUM = "file://${WORKDIR}/license.html;md5=5c94767cedb5d6987c902ac850ded2c6"
-
-The first line locates a file in ``${S}/src/ls.c`` and isolates lines
-five through 16 as license text. The second line refers to a file in
-:term:`WORKDIR`.
-
-Note that :term:`LIC_FILES_CHKSUM` variable is mandatory for all recipes,
-unless the :term:`LICENSE` variable is set to "CLOSED".
-
-Explanation of Syntax
-~~~~~~~~~~~~~~~~~~~~~
-
-As mentioned in the previous section, the :term:`LIC_FILES_CHKSUM` variable
-lists all the important files that contain the license text for the
-source code. It is possible to specify a checksum for an entire file, or
-a specific section of a file (specified by beginning and ending line
-numbers with the "beginline" and "endline" parameters, respectively).
-The latter is useful for source files with a license notice header,
-README documents, and so forth. If you do not use the "beginline"
-parameter, then it is assumed that the text begins on the first line of
-the file. Similarly, if you do not use the "endline" parameter, it is
-assumed that the license text ends with the last line of the file.
-
-The "md5" parameter stores the md5 checksum of the license text. If the
-license text changes in any way as compared to this parameter then a
-mismatch occurs. This mismatch triggers a build failure and notifies the
-developer. Notification allows the developer to review and address the
-license text changes. Also note that if a mismatch occurs during the
-build, the correct md5 checksum is placed in the build log and can be
-easily copied to the recipe.
-
-There is no limit to how many files you can specify using the
-:term:`LIC_FILES_CHKSUM` variable. Generally, however, every project
-requires a few specifications for license tracking. Many projects have a
-"COPYING" file that stores the license information for all the source
-code files. This practice allows you to just track the "COPYING" file as
-long as it is kept up to date.
-
-.. note::
-
- - If you specify an empty or invalid "md5" parameter,
- :term:`BitBake` returns an md5
- mis-match error and displays the correct "md5" parameter value
- during the build. The correct parameter is also captured in the
- build log.
-
- - If the whole file contains only license text, you do not need to
- use the "beginline" and "endline" parameters.
-
-Enabling Commercially Licensed Recipes
---------------------------------------
-
-By default, the OpenEmbedded build system disables components that have
-commercial or other special licensing requirements. Such requirements
-are defined on a recipe-by-recipe basis through the
-:term:`LICENSE_FLAGS` variable
-definition in the affected recipe. For instance, the
-``poky/meta/recipes-multimedia/gstreamer/gst-plugins-ugly`` recipe
-contains the following statement::
-
- LICENSE_FLAGS = "commercial"
-
-Here is a
-slightly more complicated example that contains both an explicit recipe
-name and version (after variable expansion)::
-
- LICENSE_FLAGS = "license_${PN}_${PV}"
-
-In order for a component restricted by a
-:term:`LICENSE_FLAGS` definition to be enabled and included in an image, it
-needs to have a matching entry in the global
-:term:`LICENSE_FLAGS_ACCEPTED`
-variable, which is a variable typically defined in your ``local.conf``
-file. For example, to enable the
-``poky/meta/recipes-multimedia/gstreamer/gst-plugins-ugly`` package, you
-could add either the string "commercial_gst-plugins-ugly" or the more
-general string "commercial" to :term:`LICENSE_FLAGS_ACCEPTED`. See the
-":ref:`dev-manual/common-tasks:license flag matching`" section for a full
-explanation of how :term:`LICENSE_FLAGS` matching works. Here is the
-example::
-
- LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly"
-
-Likewise, to additionally enable the package built from the recipe
-containing ``LICENSE_FLAGS = "license_${PN}_${PV}"``, and assuming that
-the actual recipe name was ``emgd_1.10.bb``, the following string would
-enable that package as well as the original ``gst-plugins-ugly``
-package::
-
- LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly license_emgd_1.10"
-
-As a convenience, you do not need to specify the
-complete license string for every package. You can use
-an abbreviated form, which consists of just the first portion or
-portions of the license string before the initial underscore character
-or characters. A partial string will match any license that contains the
-given string as the first portion of its license. For example, the
-following value will also match both of the packages
-previously mentioned as well as any other packages that have licenses
-starting with "commercial" or "license".
-::
-
- LICENSE_FLAGS_ACCEPTED = "commercial license"
-
-License Flag Matching
-~~~~~~~~~~~~~~~~~~~~~
-
-License flag matching allows you to control what recipes the
-OpenEmbedded build system includes in the build. Fundamentally, the
-build system attempts to match :term:`LICENSE_FLAGS` strings found in
-recipes against strings found in :term:`LICENSE_FLAGS_ACCEPTED`.
-A match causes the build system to include a recipe in the
-build, while failure to find a match causes the build system to exclude
-a recipe.
-
-In general, license flag matching is simple. However, understanding some
-concepts will help you correctly and effectively use matching.
-
-Before a flag defined by a particular recipe is tested against the
-entries of :term:`LICENSE_FLAGS_ACCEPTED`, the expanded
-string ``_${PN}`` is appended to the flag. This expansion makes each
-:term:`LICENSE_FLAGS` value recipe-specific. After expansion, the
-string is then matched against the entries. Thus, specifying
-``LICENSE_FLAGS = "commercial"`` in recipe "foo", for example, results
-in the string ``"commercial_foo"``. And, to create a match, that string
-must appear among the entries of :term:`LICENSE_FLAGS_ACCEPTED`.
-
-Judicious use of the :term:`LICENSE_FLAGS` strings and the contents of the
-:term:`LICENSE_FLAGS_ACCEPTED` variable allows you a lot of flexibility for
-including or excluding recipes based on licensing. For example, you can
-broaden the matching capabilities by using license flags string subsets
-in :term:`LICENSE_FLAGS_ACCEPTED`.
-
-.. note::
-
- When using a string subset, be sure to use the part of the expanded
- string that precedes the appended underscore character (e.g.
- ``usethispart_1.3``, ``usethispart_1.4``, and so forth).
-
-For example, simply specifying the string "commercial" in the
-:term:`LICENSE_FLAGS_ACCEPTED` variable matches any expanded
-:term:`LICENSE_FLAGS` definition that starts with the string
-"commercial" such as "commercial_foo" and "commercial_bar", which
-are the strings the build system automatically generates for
-hypothetical recipes named "foo" and "bar" assuming those recipes simply
-specify the following::
-
- LICENSE_FLAGS = "commercial"
-
-Thus, you can choose to exhaustively enumerate each license flag in the
-list and allow only specific recipes into the image, or you can use a
-string subset that causes a broader range of matches to allow a range of
-recipes into the image.
-
-This scheme works even if the :term:`LICENSE_FLAGS` string already has
-``_${PN}`` appended. For example, the build system turns the license
-flag "commercial_1.2_foo" into "commercial_1.2_foo_foo" and would match
-both the general "commercial" and the specific "commercial_1.2_foo"
-strings found in the :term:`LICENSE_FLAGS_ACCEPTED` variable, as expected.
-
-Here are some other scenarios:
-
-- You can specify a versioned string in the recipe such as
- "commercial_foo_1.2" in a "foo" recipe. The build system expands this
- string to "commercial_foo_1.2_foo". Combine this license flag with a
- :term:`LICENSE_FLAGS_ACCEPTED` variable that has the string
- "commercial" and you match the flag along with any other flag that
- starts with the string "commercial".
-
-- Under the same circumstances, you can add "commercial_foo" in the
- :term:`LICENSE_FLAGS_ACCEPTED` variable and the build system not only
- matches "commercial_foo_1.2" but also matches any license flag with
- the string "commercial_foo", regardless of the version.
-
-- You can be very specific and use both the package and version parts
- in the :term:`LICENSE_FLAGS_ACCEPTED` list (e.g.
- "commercial_foo_1.2") to specifically match a versioned recipe.
-
-Other Variables Related to Commercial Licenses
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-There are other helpful variables related to commercial license handling,
-defined in the
-``poky/meta/conf/distro/include/default-distrovars.inc`` file::
-
- COMMERCIAL_AUDIO_PLUGINS ?= ""
- COMMERCIAL_VIDEO_PLUGINS ?= ""
-
-If you
-want to enable these components, you can do so by making sure you have
-statements similar to the following in your ``local.conf`` configuration
-file::
-
- COMMERCIAL_AUDIO_PLUGINS = "gst-plugins-ugly-mad \
- gst-plugins-ugly-mpegaudioparse"
- COMMERCIAL_VIDEO_PLUGINS = "gst-plugins-ugly-mpeg2dec \
- gst-plugins-ugly-mpegstream gst-plugins-bad-mpegvideoparse"
- LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly commercial_gst-plugins-bad commercial_qmmp"
-
-
-Of course, you could also create a matching list for those
-components using the more general "commercial" in the
-:term:`LICENSE_FLAGS_ACCEPTED` variable, but that would also enable all
-the other packages with :term:`LICENSE_FLAGS`
-containing "commercial", which you may or may not want::
-
- LICENSE_FLAGS_ACCEPTED = "commercial"
-
-Specifying audio and video plugins as part of the
-``COMMERCIAL_AUDIO_PLUGINS`` and ``COMMERCIAL_VIDEO_PLUGINS`` statements
-(along with the enabling :term:`LICENSE_FLAGS_ACCEPTED`) includes the
-plugins or components into built images, thus adding support for media
-formats or components.
-
-Maintaining Open Source License Compliance During Your Product's Lifecycle
---------------------------------------------------------------------------
-
-One of the concerns for a development organization using open source
-software is how to maintain compliance with various open source
-licensing during the lifecycle of the product. While this section does
-not provide legal advice or comprehensively cover all scenarios, it does
-present methods that you can use to assist you in meeting the compliance
-requirements during a software release.
-
-With hundreds of different open source licenses that the Yocto Project
-tracks, it is difficult to know the requirements of each and every
-license. However, the requirements of the major FLOSS licenses can begin
-to be covered by assuming that there are three main areas of concern:
-
-- Source code must be provided.
-
-- License text for the software must be provided.
-
-- Compilation scripts and modifications to the source code must be
- provided.
-
-- spdx files can be provided.
-
-There are other requirements beyond the scope of these three and the
-methods described in this section (e.g. the mechanism through which
-source code is distributed).
-
-As different organizations have different methods of complying with open
-source licensing, this section is not meant to imply that there is only
-one single way to meet your compliance obligations, but rather to
-describe one method of achieving compliance. The remainder of this
-section describes methods supported to meet the previously mentioned
-three requirements. Once you take steps to meet these requirements, and
-prior to releasing images, sources, and the build system, you should
-audit all artifacts to ensure completeness.
-
-.. note::
-
- The Yocto Project generates a license manifest during image creation
- that is located in ``${DEPLOY_DIR}/licenses/``\ `image_name`\ ``-``\ `datestamp`
- to assist with any audits.
-
-Providing the Source Code
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Compliance activities should begin before you generate the final image.
-The first thing you should look at is the requirement that tops the list
-for most compliance groups - providing the source. The Yocto Project has
-a few ways of meeting this requirement.
-
-One of the easiest ways to meet this requirement is to provide the
-entire :term:`DL_DIR` used by the
-build. This method, however, has a few issues. The most obvious is the
-size of the directory since it includes all sources used in the build
-and not just the source used in the released image. It will include
-toolchain source, and other artifacts, which you would not generally
-release. However, the more serious issue for most companies is
-accidental release of proprietary software. The Yocto Project provides
-an :ref:`archiver <ref-classes-archiver>` class to
-help avoid some of these concerns.
-
-Before you employ :term:`DL_DIR` or the :ref:`archiver <ref-classes-archiver>` class, you need to
-decide how you choose to provide source. The source ``archiver`` class
-can generate tarballs and SRPMs and can create them with various levels
-of compliance in mind.
-
-One way of doing this (but certainly not the only way) is to release
-just the source as a tarball. You can do this by adding the following to
-the ``local.conf`` file found in the
-:term:`Build Directory`::
-
- INHERIT += "archiver"
- ARCHIVER_MODE[src] = "original"
-
-During the creation of your
-image, the source from all recipes that deploy packages to the image is
-placed within subdirectories of ``DEPLOY_DIR/sources`` based on the
-:term:`LICENSE` for each recipe.
-Releasing the entire directory enables you to comply with requirements
-concerning providing the unmodified source. It is important to note that
-the size of the directory can get large.
-
-A way to help mitigate the size issue is to only release tarballs for
-licenses that require the release of source. Let us assume you are only
-concerned with GPL code as identified by running the following script:
-
-.. code-block:: shell
-
- # Script to archive a subset of packages matching specific license(s)
- # Source and license files are copied into sub folders of package folder
- # Must be run from build folder
- #!/bin/bash
- src_release_dir="source-release"
- mkdir -p $src_release_dir
- for a in tmp/deploy/sources/*; do
- for d in $a/*; do
- # Get package name from path
- p=`basename $d`
- p=${p%-*}
- p=${p%-*}
- # Only archive GPL packages (update *GPL* regex for your license check)
- numfiles=`ls tmp/deploy/licenses/$p/*GPL* 2> /dev/null | wc -l`
- if [ $numfiles -ge 1 ]; then
- echo Archiving $p
- mkdir -p $src_release_dir/$p/source
- cp $d/* $src_release_dir/$p/source 2> /dev/null
- mkdir -p $src_release_dir/$p/license
- cp tmp/deploy/licenses/$p/* $src_release_dir/$p/license 2> /dev/null
- fi
- done
- done
-
-At this point, you
-could create a tarball from the ``gpl_source_release`` directory and
-provide that to the end user. This method would be a step toward
-achieving compliance with section 3a of GPLv2 and with section 6 of
-GPLv3.
-
-Providing License Text
-~~~~~~~~~~~~~~~~~~~~~~
-
-One requirement that is often overlooked is inclusion of license text.
-This requirement also needs to be dealt with prior to generating the
-final image. Some licenses require the license text to accompany the
-binary. You can achieve this by adding the following to your
-``local.conf`` file::
-
- COPY_LIC_MANIFEST = "1"
- COPY_LIC_DIRS = "1"
- LICENSE_CREATE_PACKAGE = "1"
-
-Adding these statements to the
-configuration file ensures that the licenses collected during package
-generation are included on your image.
-
-.. note::
-
- Setting all three variables to "1" results in the image having two
- copies of the same license file. One copy resides in
- ``/usr/share/common-licenses`` and the other resides in
- ``/usr/share/license``.
-
- The reason for this behavior is because
- :term:`COPY_LIC_DIRS` and
- :term:`COPY_LIC_MANIFEST`
- add a copy of the license when the image is built but do not offer a
- path for adding licenses for newly installed packages to an image.
- :term:`LICENSE_CREATE_PACKAGE`
- adds a separate package and an upgrade path for adding licenses to an
- image.
-
-As the source ``archiver`` class has already archived the original
-unmodified source that contains the license files, you would have
-already met the requirements for inclusion of the license information
-with source as defined by the GPL and other open source licenses.
-
-Providing Compilation Scripts and Source Code Modifications
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-At this point, we have addressed all we need to prior to generating the
-image. The next two requirements are addressed during the final
-packaging of the release.
-
-By releasing the version of the OpenEmbedded build system and the layers
-used during the build, you will be providing both compilation scripts
-and the source code modifications in one step.
-
-If the deployment team has a :ref:`overview-manual/concepts:bsp layer`
-and a distro layer, and those
-those layers are used to patch, compile, package, or modify (in any way)
-any open source software included in your released images, you might be
-required to release those layers under section 3 of GPLv2 or section 1
-of GPLv3. One way of doing that is with a clean checkout of the version
-of the Yocto Project and layers used during your build. Here is an
-example:
-
-.. code-block:: shell
-
- # We built using the dunfell branch of the poky repo
- $ git clone -b dunfell git://git.yoctoproject.org/poky
- $ cd poky
- # We built using the release_branch for our layers
- $ git clone -b release_branch git://git.mycompany.com/meta-my-bsp-layer
- $ git clone -b release_branch git://git.mycompany.com/meta-my-software-layer
- # clean up the .git repos
- $ find . -name ".git" -type d -exec rm -rf {} \;
-
-One
-thing a development organization might want to consider for end-user
-convenience is to modify ``meta-poky/conf/bblayers.conf.sample`` to
-ensure that when the end user utilizes the released build system to
-build an image, the development organization's layers are included in
-the ``bblayers.conf`` file automatically::
-
- # POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf
- # changes incompatibly
- POKY_BBLAYERS_CONF_VERSION = "2"
-
- BBPATH = "${TOPDIR}"
- BBFILES ?= ""
-
- BBLAYERS ?= " \
- ##OEROOT##/meta \
- ##OEROOT##/meta-poky \
- ##OEROOT##/meta-yocto-bsp \
- ##OEROOT##/meta-mylayer \
- "
-
-Creating and
-providing an archive of the :term:`Metadata`
-layers (recipes, configuration files, and so forth) enables you to meet
-your requirements to include the scripts to control compilation as well
-as any modifications to the original source.
-
-Providing spdx files
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The spdx module has been integrated to a layer named meta-spdxscanner.
-meta-spdxscanner provides several kinds of scanner. If you want to enable
-this function, you have to follow the following steps:
-
-1. Add meta-spdxscanner layer into ``bblayers.conf``.
-
-2. Refer to the README in meta-spdxscanner to setup the environment (e.g,
- setup a fossology server) needed for the scanner.
-
-3. Meta-spdxscanner provides several methods within the bbclass to create spdx files.
- Please choose one that you want to use and enable the spdx task. You have to
- add some config options in ``local.conf`` file in your :term:`Build
- Directory`. Here is an example showing how to generate spdx files
- during BitBake using the fossology-python.bbclass::
-
- # Select fossology-python.bbclass.
- INHERIT += "fossology-python"
- # For fossology-python.bbclass, TOKEN is necessary, so, after setup a
- # Fossology server, you have to create a token.
- TOKEN = "eyJ0eXAiO..."
- # The fossology server is necessary for fossology-python.bbclass.
- FOSSOLOGY_SERVER = "http://xx.xx.xx.xx:8081/repo"
- # If you want to upload the source code to a special folder:
- FOLDER_NAME = "xxxx" //Optional
- # If you don't want to put spdx files in tmp/deploy/spdx, you can enable:
- SPDX_DEPLOY_DIR = "${DEPLOY_DIR}" //Optional
-
-For more usage information refer to :yocto_git:`the meta-spdxscanner repository
-</meta-spdxscanner/>`.
-
-Compliance Limitations with Executables Built from Static Libraries
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When package A is added to an image via the :term:`RDEPENDS` or :term:`RRECOMMENDS`
-mechanisms as well as explicitly included in the image recipe with
-:term:`IMAGE_INSTALL`, and depends on a static linked library recipe B
-(``DEPENDS += "B"``), package B will neither appear in the generated license
-manifest nor in the generated source tarballs. This occurs as the
-:ref:`license <ref-classes-license>` and :ref:`archiver <ref-classes-archiver>`
-classes assume that only packages included via :term:`RDEPENDS` or :term:`RRECOMMENDS`
-end up in the image.
-
-As a result, potential obligations regarding license compliance for package B
-may not be met.
-
-The Yocto Project doesn't enable static libraries by default, in part because
-of this issue. Before a solution to this limitation is found, you need to
-keep in mind that if your root filesystem is built from static libraries,
-you will need to manually ensure that your deliveries are compliant
-with the licenses of these libraries.
-
-Copying Non Standard Licenses
------------------------------
-
-Some packages, such as the linux-firmware package, have many licenses
-that are not in any way common. You can avoid adding a lot of these
-types of common license files, which are only applicable to a specific
-package, by using the
-:term:`NO_GENERIC_LICENSE`
-variable. Using this variable also avoids QA errors when you use a
-non-common, non-CLOSED license in a recipe.
-
-Here is an example that uses the ``LICENSE.Abilis.txt`` file as
-the license from the fetched source::
-
- NO_GENERIC_LICENSE[Firmware-Abilis] = "LICENSE.Abilis.txt"
-
-Checking for Vulnerabilities
-============================
-
-Vulnerabilities in images
--------------------------
-
-The Yocto Project has an infrastructure to track and address unfixed
-known security vulnerabilities, as tracked by the public
-`Common Vulnerabilities and Exposures (CVE) <https://en.wikipedia.org/wiki/Common_Vulnerabilities_and_Exposures>`__
-database.
-
-To know which packages are vulnerable to known security vulnerabilities,
-add the following setting to your configuration::
-
- INHERIT += "cve-check"
-
-This way, at build time, BitBake will warn you about known CVEs
-as in the example below::
-
- WARNING: flex-2.6.4-r0 do_cve_check: Found unpatched CVE (CVE-2019-6293), for more information check /poky/build/tmp/work/core2-64-poky-linux/flex/2.6.4-r0/temp/cve.log
- WARNING: libarchive-3.5.1-r0 do_cve_check: Found unpatched CVE (CVE-2021-36976), for more information check /poky/build/tmp/work/core2-64-poky-linux/libarchive/3.5.1-r0/temp/cve.log
-
-It is also possible to check the CVE status of individual packages as follows::
-
- bitbake -c cve_check flex libarchive
-
-Note that OpenEmbedded-Core keeps a list of known unfixed CVE issues which can
-be ignored. You can pass this list to the check as follows::
-
- bitbake -c cve_check libarchive -R conf/distro/include/cve-extra-exclusions.inc
-
-Enabling vulnerabily tracking in recipes
-----------------------------------------
-
-The :term:`CVE_PRODUCT` variable defines the name used to match the recipe name
-against the name in the upstream `NIST CVE database <https://nvd.nist.gov/>`__.
-
-Editing recipes to fix vulnerabilities
---------------------------------------
-
-To fix a given known vulnerability, you need to add a patch file to your recipe. Here's
-an example from the :oe_layerindex:`ffmpeg recipe</layerindex/recipe/47350>`::
-
- SRC_URI = "https://www.ffmpeg.org/releases/${BP}.tar.xz \
- file://0001-libavutil-include-assembly-with-full-path-from-sourc.patch \
- file://fix-CVE-2020-20446.patch \
- file://fix-CVE-2020-20453.patch \
- file://fix-CVE-2020-22015.patch \
- file://fix-CVE-2020-22021.patch \
- file://fix-CVE-2020-22033-CVE-2020-22019.patch \
- file://fix-CVE-2021-33815.patch \
-
-The :ref:`cve-check <ref-classes-cve-check>` class defines two ways of
-supplying a patch for a given CVE. The first
-way is to use a patch filename that matches the below pattern::
-
- cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
-
-As shown in the example above, multiple CVE IDs can appear in a patch filename,
-but the :ref:`cve-check <ref-classes-cve-check>` class will only consider
-the last CVE ID in the filename as patched.
-
-The second way to recognize a patched CVE ID is when a line matching the
-below pattern is found in any patch file provided by the recipe::
-
- cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
-
-This allows a single patch file to address multiple CVE IDs at the same time.
-
-Of course, another way to fix vulnerabilities is to upgrade to a version
-of the package which is not impacted, typically a more recent one.
-The NIST database knows which versions are vulnerable and which ones
-are not.
-
-Last but not least, you can choose to ignore vulnerabilities through
-the :term:`CVE_CHECK_SKIP_RECIPE` and :term:`CVE_CHECK_IGNORE`
-variables.
-
-Implementation details
-----------------------
-
-Here's what the :ref:`cve-check <ref-classes-cve-check>` class does to
-find unpatched CVE IDs.
-
-First the code goes through each patch file provided by a recipe. If a valid CVE ID
-is found in the name of the file, the corresponding CVE is considered as patched.
-Don't forget that if multiple CVE IDs are found in the filename, only the last
-one is considered. Then, the code looks for ``CVE: CVE-ID`` lines in the patch
-file. The found CVE IDs are also considered as patched.
-
-Then, the code looks up all the CVE IDs in the NIST database for all the
-products defined in :term:`CVE_PRODUCT`. Then, for each found CVE:
-
- - If the package name (:term:`PN`) is part of
- :term:`CVE_CHECK_SKIP_RECIPE`, it is considered as patched.
-
- - If the CVE ID is part of :term:`CVE_CHECK_IGNORE`, it is
- considered as patched too.
-
- - If the CVE ID is part of the patched CVE for the recipe, it is
- already considered as patched.
-
- - Otherwise, the code checks whether the recipe version (:term:`PV`)
- is within the range of versions impacted by the CVE. If so, the CVE
- is considered as unpatched.
-
-The CVE database is stored in :term:`DL_DIR` and can be inspected using
-``sqlite3`` command as follows::
-
- sqlite3 downloads/CVE_CHECK/nvdcve_1.1.db .dump | grep CVE-2021-37462
-
-Using the Error Reporting Tool
-==============================
-
-The error reporting tool allows you to submit errors encountered during
-builds to a central database. Outside of the build environment, you can
-use a web interface to browse errors, view statistics, and query for
-errors. The tool works using a client-server system where the client
-portion is integrated with the installed Yocto Project
-:term:`Source Directory` (e.g. ``poky``).
-The server receives the information collected and saves it in a
-database.
-
-There is a live instance of the error reporting server at
-https://errors.yoctoproject.org.
-When you want to get help with build failures, you can submit all of the
-information on the failure easily and then point to the URL in your bug
-report or send an email to the mailing list.
-
-.. note::
-
- If you send error reports to this server, the reports become publicly
- visible.
-
-Enabling and Using the Tool
----------------------------
-
-By default, the error reporting tool is disabled. You can enable it by
-inheriting the
-:ref:`report-error <ref-classes-report-error>`
-class by adding the following statement to the end of your
-``local.conf`` file in your
-:term:`Build Directory`.
-::
-
- INHERIT += "report-error"
-
-By default, the error reporting feature stores information in
-``${``\ :term:`LOG_DIR`\ ``}/error-report``.
-However, you can specify a directory to use by adding the following to
-your ``local.conf`` file::
-
- ERR_REPORT_DIR = "path"
-
-Enabling error
-reporting causes the build process to collect the errors and store them
-in a file as previously described. When the build system encounters an
-error, it includes a command as part of the console output. You can run
-the command to send the error file to the server. For example, the
-following command sends the errors to an upstream server::
-
- $ send-error-report /home/brandusa/project/poky/build/tmp/log/error-report/error_report_201403141617.txt
-
-In the previous example, the errors are sent to a public database
-available at https://errors.yoctoproject.org, which is used by the
-entire community. If you specify a particular server, you can send the
-errors to a different database. Use the following command for more
-information on available options::
-
- $ send-error-report --help
-
-When sending the error file, you are prompted to review the data being
-sent as well as to provide a name and optional email address. Once you
-satisfy these prompts, the command returns a link from the server that
-corresponds to your entry in the database. For example, here is a
-typical link: https://errors.yoctoproject.org/Errors/Details/9522/
-
-Following the link takes you to a web interface where you can browse,
-query the errors, and view statistics.
-
-Disabling the Tool
-------------------
-
-To disable the error reporting feature, simply remove or comment out the
-following statement from the end of your ``local.conf`` file in your
-:term:`Build Directory`.
-::
-
- INHERIT += "report-error"
-
-Setting Up Your Own Error Reporting Server
-------------------------------------------
-
-If you want to set up your own error reporting server, you can obtain
-the code from the Git repository at :yocto_git:`/error-report-web/`.
-Instructions on how to set it up are in the README document.
-
-Using Wayland and Weston
-========================
-
-`Wayland <https://en.wikipedia.org/wiki/Wayland_(display_server_protocol)>`__
-is a computer display server protocol that provides a method for
-compositing window managers to communicate directly with applications
-and video hardware and expects them to communicate with input hardware
-using other libraries. Using Wayland with supporting targets can result
-in better control over graphics frame rendering than an application
-might otherwise achieve.
-
-The Yocto Project provides the Wayland protocol libraries and the
-reference
-`Weston <https://en.wikipedia.org/wiki/Wayland_(display_server_protocol)#Weston>`__
-compositor as part of its release. You can find the integrated packages
-in the ``meta`` layer of the :term:`Source Directory`.
-Specifically, you
-can find the recipes that build both Wayland and Weston at
-``meta/recipes-graphics/wayland``.
-
-You can build both the Wayland and Weston packages for use only with
-targets that accept the `Mesa 3D and Direct Rendering
-Infrastructure <https://en.wikipedia.org/wiki/Mesa_(computer_graphics)>`__,
-which is also known as Mesa DRI. This implies that you cannot build and
-use the packages if your target uses, for example, the Intel Embedded
-Media and Graphics Driver (Intel EMGD) that overrides Mesa DRI.
-
-.. note::
-
- Due to lack of EGL support, Weston 1.0.3 will not run directly on the
- emulated QEMU hardware. However, this version of Weston will run
- under X emulation without issues.
-
-This section describes what you need to do to implement Wayland and use
-the Weston compositor when building an image for a supporting target.
-
-Enabling Wayland in an Image
-----------------------------
-
-To enable Wayland, you need to enable it to be built and enable it to be
-included (installed) in the image.
-
-Building Wayland
-~~~~~~~~~~~~~~~~
-
-To cause Mesa to build the ``wayland-egl`` platform and Weston to build
-Wayland with Kernel Mode Setting
-(`KMS <https://wiki.archlinux.org/index.php/Kernel_Mode_Setting>`__)
-support, include the "wayland" flag in the
-:term:`DISTRO_FEATURES`
-statement in your ``local.conf`` file::
-
- DISTRO_FEATURES:append = " wayland"
-
-.. note::
-
- If X11 has been enabled elsewhere, Weston will build Wayland with X11
- support
-
-Installing Wayland and Weston
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To install the Wayland feature into an image, you must include the
-following
-:term:`CORE_IMAGE_EXTRA_INSTALL`
-statement in your ``local.conf`` file::
-
- CORE_IMAGE_EXTRA_INSTALL += "wayland weston"
-
-Running Weston
---------------
-
-To run Weston inside X11, enabling it as described earlier and building
-a Sato image is sufficient. If you are running your image under Sato, a
-Weston Launcher appears in the "Utility" category.
-
-Alternatively, you can run Weston through the command-line interpretor
-(CLI), which is better suited for development work. To run Weston under
-the CLI, you need to do the following after your image is built:
-
-1. Run these commands to export ``XDG_RUNTIME_DIR``::
-
- mkdir -p /tmp/$USER-weston
- chmod 0700 /tmp/$USER-weston
- export XDG_RUNTIME_DIR=/tmp/$USER-weston
-
-2. Launch Weston in the shell::
-
- weston
diff --git a/documentation/dev-manual/custom-distribution.rst b/documentation/dev-manual/custom-distribution.rst
new file mode 100644
index 0000000000..0bc386d606
--- /dev/null
+++ b/documentation/dev-manual/custom-distribution.rst
@@ -0,0 +1,135 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Creating Your Own Distribution
+******************************
+
+When you build an image using the Yocto Project and do not alter any
+distribution :term:`Metadata`, you are using the Poky distribution.
+Poky is explicitly a *reference* distribution for testing and
+development purposes. It enables most hardware and software features
+so that they can be tested, but this also means that from a security
+point of view the attack surface is very large. Additionally, at some
+point it is likely that you will want to gain more control over package
+alternative selections, compile-time options, and other low-level
+configurations. For both of these reasons, if you are using the Yocto
+Project for production use then you are strongly encouraged to create
+your own distribution.
+
+To create your own distribution, the basic steps consist of creating
+your own distribution layer, creating your own distribution
+configuration file, and then adding any needed code and Metadata to the
+layer. The following steps provide some more detail:
+
+- *Create a layer for your new distro:* Create your distribution layer
+ so that you can keep your Metadata and code for the distribution
+ separate. It is strongly recommended that you create and use your own
+ layer for configuration and code. Using your own layer as compared to
+ just placing configurations in a ``local.conf`` configuration file
+ makes it easier to reproduce the same build configuration when using
+ multiple build machines. See the
+ ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ section for information on how to quickly set up a layer.
+
+- *Create the distribution configuration file:* The distribution
+ configuration file needs to be created in the ``conf/distro``
+ directory of your layer. You need to name it using your distribution
+ name (e.g. ``mydistro.conf``).
+
+ .. note::
+
+ The :term:`DISTRO` variable in your ``local.conf`` file determines the
+ name of your distribution.
+
+ You can split out parts of your configuration file into include files
+ and then "require" them from within your distribution configuration
+ file. Be sure to place the include files in the
+ ``conf/distro/include`` directory of your layer. A common example
+ usage of include files would be to separate out the selection of
+ desired version and revisions for individual recipes.
+
+ Your configuration file needs to set the following required
+ variables:
+
+ - :term:`DISTRO_NAME`
+
+ - :term:`DISTRO_VERSION`
+
+ These following variables are optional and you typically set them
+ from the distribution configuration file:
+
+ - :term:`DISTRO_FEATURES`
+
+ - :term:`DISTRO_EXTRA_RDEPENDS`
+
+ - :term:`DISTRO_EXTRA_RRECOMMENDS`
+
+ - :term:`TCLIBC`
+
+ .. tip::
+
+ If you want to base your distribution configuration file on the
+ very basic configuration from OE-Core, you can use
+ ``conf/distro/defaultsetup.conf`` as a reference and just include
+ variables that differ as compared to ``defaultsetup.conf``.
+ Alternatively, you can create a distribution configuration file
+ from scratch using the ``defaultsetup.conf`` file or configuration files
+ from another distribution such as Poky as a reference.
+
+- *Provide miscellaneous variables:* Be sure to define any other
+ variables for which you want to create a default or enforce as part
+ of the distribution configuration. You can include nearly any
+ variable from the ``local.conf`` file. The variables you use are not
+ limited to the list in the previous bulleted item.
+
+- *Point to Your distribution configuration file:* In your ``local.conf``
+ file in the :term:`Build Directory`, set your :term:`DISTRO` variable to
+ point to your distribution's configuration file. For example, if your
+ distribution's configuration file is named ``mydistro.conf``, then
+ you point to it as follows::
+
+ DISTRO = "mydistro"
+
+- *Add more to the layer if necessary:* Use your layer to hold other
+ information needed for the distribution:
+
+ - Add recipes for installing distro-specific configuration files
+ that are not already installed by another recipe. If you have
+ distro-specific configuration files that are included by an
+ existing recipe, you should add an append file (``.bbappend``) for
+ those. For general information and recommendations on how to add
+ recipes to your layer, see the
+ ":ref:`dev-manual/layers:creating your own layer`" and
+ ":ref:`dev-manual/layers:following best practices when creating layers`"
+ sections.
+
+ - Add any image recipes that are specific to your distribution.
+
+ - Add a ``psplash`` append file for a branded splash screen, using
+ the :term:`SPLASH_IMAGES` variable.
+
+ - Add any other append files to make custom changes that are
+ specific to individual recipes.
+
+ For information on append files, see the
+ ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
+ section.
+
+Copying and modifying the Poky distribution
+===========================================
+
+Instead of creating a custom distribution from scratch as per above, you may
+wish to start your custom distribution configuration by copying the Poky
+distribution provided within the ``meta-poky`` layer and then modifying it.
+This is fine, however if you do this you should keep the following in mind:
+
+- Every reference to Poky needs to be updated in your copy so that it
+ will still apply. This includes override usage within files (e.g. ``:poky``)
+ and in directory names. This is a good opportunity to evaluate each one of
+ these customizations to see if they are needed for your use case.
+
+- Unless you also intend to use them, the ``poky-tiny``, ``poky-altcfg`` and
+ ``poky-bleeding`` variants and any references to them can be removed.
+
+- More generally, the Poky distribution configuration enables a lot more
+ than you likely need for your production use case. You should evaluate *every*
+ configuration choice made in your copy to determine if it is needed.
diff --git a/documentation/dev-manual/custom-template-configuration-directory.rst b/documentation/dev-manual/custom-template-configuration-directory.rst
new file mode 100644
index 0000000000..06fcada822
--- /dev/null
+++ b/documentation/dev-manual/custom-template-configuration-directory.rst
@@ -0,0 +1,52 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Creating a Custom Template Configuration Directory
+**************************************************
+
+If you are producing your own customized version of the build system for
+use by other users, you might want to provide a custom build configuration
+that includes all the necessary settings and layers (i.e. ``local.conf`` and
+``bblayers.conf`` that are created in a new :term:`Build Directory`) and a custom
+message that is shown when setting up the build. This can be done by
+creating one or more template configuration directories in your
+custom distribution layer.
+
+This can be done by using ``bitbake-layers save-build-conf``::
+
+ $ bitbake-layers save-build-conf ../../meta-alex/ test-1
+ NOTE: Starting bitbake server...
+ NOTE: Configuration template placed into /srv/work/alex/meta-alex/conf/templates/test-1
+ Please review the files in there, and particularly provide a configuration description in /srv/work/alex/meta-alex/conf/templates/test-1/conf-notes.txt
+ You can try out the configuration with
+ TEMPLATECONF=/srv/work/alex/meta-alex/conf/templates/test-1 . /srv/work/alex/poky/oe-init-build-env build-try-test-1
+
+The above command takes the config files from the currently active :term:`Build Directory` under ``conf``,
+replaces site-specific paths in ``bblayers.conf`` with ``##OECORE##``-relative paths, and copies
+the config files into a specified layer under a specified template name.
+
+To use those saved templates as a starting point for a build, users should point
+to one of them with :term:`TEMPLATECONF` environment variable::
+
+ TEMPLATECONF=/srv/work/alex/meta-alex/conf/templates/test-1 . /srv/work/alex/poky/oe-init-build-env build-try-test-1
+
+The OpenEmbedded build system uses the environment variable
+:term:`TEMPLATECONF` to locate the directory from which it gathers
+configuration information that ultimately ends up in the
+:term:`Build Directory` ``conf`` directory.
+
+If :term:`TEMPLATECONF` is not set, the default value is obtained
+from ``.templateconf`` file that is read from the same directory as
+``oe-init-build-env`` script. For the Poky reference distribution this
+would be::
+
+ TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf/templates/default}
+
+If you look at a configuration template directory, you will
+see the ``bblayers.conf.sample``, ``local.conf.sample``, ``conf-summary.txt`` and
+``conf-notes.txt`` files. The build system uses these files to form the
+respective ``bblayers.conf`` file, ``local.conf`` file, and show
+users usage information about the build they're setting up
+when running the ``oe-init-build-env`` setup script. These can be
+edited further if needed to improve or change the build configurations
+available to the users, and provide useful summaries and detailed usage notes.
+
diff --git a/documentation/dev-manual/customizing-images.rst b/documentation/dev-manual/customizing-images.rst
new file mode 100644
index 0000000000..5b18958ade
--- /dev/null
+++ b/documentation/dev-manual/customizing-images.rst
@@ -0,0 +1,223 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Customizing Images
+******************
+
+You can customize images to satisfy particular requirements. This
+section describes several methods and provides guidelines for each.
+
+Customizing Images Using ``local.conf``
+=======================================
+
+Probably the easiest way to customize an image is to add a package by
+way of the ``local.conf`` configuration file. Because it is limited to
+local use, this method generally only allows you to add packages and is
+not as flexible as creating your own customized image. When you add
+packages using local variables this way, you need to realize that these
+variable changes are in effect for every build and consequently affect
+all images, which might not be what you require.
+
+To add a package to your image using the local configuration file, use
+the :term:`IMAGE_INSTALL` variable with the ``:append`` operator::
+
+ IMAGE_INSTALL:append = " strace"
+
+Use of the syntax is important; specifically, the leading space
+after the opening quote and before the package name, which is
+``strace`` in this example. This space is required since the ``:append``
+operator does not add the space.
+
+Furthermore, you must use ``:append`` instead of the ``+=`` operator if
+you want to avoid ordering issues. The reason for this is because doing
+so unconditionally appends to the variable and avoids ordering problems
+due to the variable being set in image recipes and ``.bbclass`` files
+with operators like ``?=``. Using ``:append`` ensures the operation
+takes effect.
+
+As shown in its simplest use, ``IMAGE_INSTALL:append`` affects all
+images. It is possible to extend the syntax so that the variable applies
+to a specific image only. Here is an example::
+
+ IMAGE_INSTALL:append:pn-core-image-minimal = " strace"
+
+This example adds ``strace`` to the ``core-image-minimal`` image only.
+
+You can add packages using a similar approach through the
+:term:`CORE_IMAGE_EXTRA_INSTALL` variable. If you use this variable, only
+``core-image-*`` images are affected.
+
+Customizing Images Using Custom ``IMAGE_FEATURES`` and ``EXTRA_IMAGE_FEATURES``
+===============================================================================
+
+Another method for customizing your image is to enable or disable
+high-level image features by using the
+:term:`IMAGE_FEATURES` and
+:term:`EXTRA_IMAGE_FEATURES`
+variables. Although the functions for both variables are nearly
+equivalent, best practices dictate using :term:`IMAGE_FEATURES` from within
+a recipe and using :term:`EXTRA_IMAGE_FEATURES` from within your
+``local.conf`` file, which is found in the :term:`Build Directory`.
+
+To understand how these features work, the best reference is
+:ref:`meta/classes-recipe/image.bbclass <ref-classes-image>`.
+This class lists out the available
+:term:`IMAGE_FEATURES` of which most map to package groups while some, such
+as ``debug-tweaks`` and ``read-only-rootfs``, resolve as general
+configuration settings.
+
+In summary, the file looks at the contents of the :term:`IMAGE_FEATURES`
+variable and then maps or configures the feature accordingly. Based on
+this information, the build system automatically adds the appropriate
+packages or configurations to the
+:term:`IMAGE_INSTALL` variable.
+Effectively, you are enabling extra features by extending the class or
+creating a custom class for use with specialized image ``.bb`` files.
+
+Use the :term:`EXTRA_IMAGE_FEATURES` variable from within your local
+configuration file. Using a separate area from which to enable features
+with this variable helps you avoid overwriting the features in the image
+recipe that are enabled with :term:`IMAGE_FEATURES`. The value of
+:term:`EXTRA_IMAGE_FEATURES` is added to :term:`IMAGE_FEATURES` within
+``meta/conf/bitbake.conf``.
+
+To illustrate how you can use these variables to modify your image,
+consider an example that selects the SSH server. The Yocto Project ships
+with two SSH servers you can use with your images: Dropbear and OpenSSH.
+Dropbear is a minimal SSH server appropriate for resource-constrained
+environments, while OpenSSH is a well-known standard SSH server
+implementation. By default, the ``core-image-sato`` image is configured
+to use Dropbear. The ``core-image-full-cmdline`` and ``core-image-lsb``
+images both include OpenSSH. The ``core-image-minimal`` image does not
+contain an SSH server.
+
+You can customize your image and change these defaults. Edit the
+:term:`IMAGE_FEATURES` variable in your recipe or use the
+:term:`EXTRA_IMAGE_FEATURES` in your ``local.conf`` file so that it
+configures the image you are working with to include
+``ssh-server-dropbear`` or ``ssh-server-openssh``.
+
+.. note::
+
+ See the ":ref:`ref-manual/features:image features`" section in the Yocto
+ Project Reference Manual for a complete list of image features that ship
+ with the Yocto Project.
+
+Customizing Images Using Custom .bb Files
+=========================================
+
+You can also customize an image by creating a custom recipe that defines
+additional software as part of the image. The following example shows
+the form for the two lines you need::
+
+ IMAGE_INSTALL = "packagegroup-core-x11-base package1 package2"
+ inherit core-image
+
+Defining the software using a custom recipe gives you total control over
+the contents of the image. It is important to use the correct names of
+packages in the :term:`IMAGE_INSTALL` variable. You must use the
+OpenEmbedded notation and not the Debian notation for the names (e.g.
+``glibc-dev`` instead of ``libc6-dev``).
+
+The other method for creating a custom image is to base it on an
+existing image. For example, if you want to create an image based on
+``core-image-sato`` but add the additional package ``strace`` to the
+image, copy the ``meta/recipes-sato/images/core-image-sato.bb`` to a new
+``.bb`` and add the following line to the end of the copy::
+
+ IMAGE_INSTALL += "strace"
+
+Customizing Images Using Custom Package Groups
+==============================================
+
+For complex custom images, the best approach for customizing an image is
+to create a custom package group recipe that is used to build the image
+or images. A good example of a package group recipe is
+``meta/recipes-core/packagegroups/packagegroup-base.bb``.
+
+If you examine that recipe, you see that the :term:`PACKAGES` variable lists
+the package group packages to produce. The ``inherit packagegroup``
+statement sets appropriate default values and automatically adds
+``-dev``, ``-dbg``, and ``-ptest`` complementary packages for each
+package specified in the :term:`PACKAGES` statement.
+
+.. note::
+
+ The ``inherit packagegroup`` line should be located near the top of the
+ recipe, certainly before the :term:`PACKAGES` statement.
+
+For each package you specify in :term:`PACKAGES`, you can use :term:`RDEPENDS`
+and :term:`RRECOMMENDS` entries to provide a list of packages the parent
+task package should contain. You can see examples of these further down
+in the ``packagegroup-base.bb`` recipe.
+
+Here is a short, fabricated example showing the same basic pieces for a
+hypothetical packagegroup defined in ``packagegroup-custom.bb``, where
+the variable :term:`PN` is the standard way to abbreviate the reference to
+the full packagegroup name ``packagegroup-custom``::
+
+ DESCRIPTION = "My Custom Package Groups"
+
+ inherit packagegroup
+
+ PACKAGES = "\
+ ${PN}-apps \
+ ${PN}-tools \
+ "
+
+ RDEPENDS:${PN}-apps = "\
+ dropbear \
+ portmap \
+ psplash"
+
+ RDEPENDS:${PN}-tools = "\
+ oprofile \
+ oprofileui-server \
+ lttng-tools"
+
+ RRECOMMENDS:${PN}-tools = "\
+ kernel-module-oprofile"
+
+In the previous example, two package group packages are created with
+their dependencies and their recommended package dependencies listed:
+``packagegroup-custom-apps``, and ``packagegroup-custom-tools``. To
+build an image using these package group packages, you need to add
+``packagegroup-custom-apps`` and/or ``packagegroup-custom-tools`` to
+:term:`IMAGE_INSTALL`. For other forms of image dependencies see the other
+areas of this section.
+
+Customizing an Image Hostname
+=============================
+
+By default, the configured hostname (i.e. ``/etc/hostname``) in an image
+is the same as the machine name. For example, if
+:term:`MACHINE` equals "qemux86", the
+configured hostname written to ``/etc/hostname`` is "qemux86".
+
+You can customize this name by altering the value of the "hostname"
+variable in the ``base-files`` recipe using either an append file or a
+configuration file. Use the following in an append file::
+
+ hostname = "myhostname"
+
+Use the following in a configuration file::
+
+ hostname:pn-base-files = "myhostname"
+
+Changing the default value of the variable "hostname" can be useful in
+certain situations. For example, suppose you need to do extensive
+testing on an image and you would like to easily identify the image
+under test from existing images with typical default hostnames. In this
+situation, you could change the default hostname to "testme", which
+results in all the images using the name "testme". Once testing is
+complete and you do not need to rebuild the image for test any longer,
+you can easily reset the default hostname.
+
+Another point of interest is that if you unset the variable, the image
+will have no default hostname in the filesystem. Here is an example that
+unsets the variable in a configuration file::
+
+ hostname:pn-base-files = ""
+
+Having no default hostname in the filesystem is suitable for
+environments that use dynamic hostnames such as virtual machines.
+
diff --git a/documentation/dev-manual/debugging.rst b/documentation/dev-manual/debugging.rst
new file mode 100644
index 0000000000..92458a0c37
--- /dev/null
+++ b/documentation/dev-manual/debugging.rst
@@ -0,0 +1,1271 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Debugging Tools and Techniques
+******************************
+
+The exact method for debugging build failures depends on the nature of
+the problem and on the system's area from which the bug originates.
+Standard debugging practices such as comparison against the last known
+working version with examination of the changes and the re-application
+of steps to identify the one causing the problem are valid for the Yocto
+Project just as they are for any other system. Even though it is
+impossible to detail every possible potential failure, this section
+provides some general tips to aid in debugging given a variety of
+situations.
+
+.. note::
+
+ A useful feature for debugging is the error reporting tool.
+ Configuring the Yocto Project to use this tool causes the
+ OpenEmbedded build system to produce error reporting commands as part
+ of the console output. You can enter the commands after the build
+ completes to log error information into a common database, that can
+ help you figure out what might be going wrong. For information on how
+ to enable and use this feature, see the
+ ":ref:`dev-manual/error-reporting-tool:using the error reporting tool`"
+ section.
+
+The following list shows the debugging topics in the remainder of this
+section:
+
+- ":ref:`dev-manual/debugging:viewing logs from failed tasks`" describes
+ how to find and view logs from tasks that failed during the build
+ process.
+
+- ":ref:`dev-manual/debugging:viewing variable values`" describes how to
+ use the BitBake ``-e`` option to examine variable values after a
+ recipe has been parsed.
+
+- ":ref:`dev-manual/debugging:viewing package information with \`\`oe-pkgdata-util\`\``"
+ describes how to use the ``oe-pkgdata-util`` utility to query
+ :term:`PKGDATA_DIR` and
+ display package-related information for built packages.
+
+- ":ref:`dev-manual/debugging:viewing dependencies between recipes and tasks`"
+ describes how to use the BitBake ``-g`` option to display recipe
+ dependency information used during the build.
+
+- ":ref:`dev-manual/debugging:viewing task variable dependencies`" describes
+ how to use the ``bitbake-dumpsig`` command in conjunction with key
+ subdirectories in the :term:`Build Directory` to determine variable
+ dependencies.
+
+- ":ref:`dev-manual/debugging:running specific tasks`" describes
+ how to use several BitBake options (e.g. ``-c``, ``-C``, and ``-f``)
+ to run specific tasks in the build chain. It can be useful to run
+ tasks "out-of-order" when trying isolate build issues.
+
+- ":ref:`dev-manual/debugging:general BitBake problems`" describes how
+ to use BitBake's ``-D`` debug output option to reveal more about what
+ BitBake is doing during the build.
+
+- ":ref:`dev-manual/debugging:building with no dependencies`"
+ describes how to use the BitBake ``-b`` option to build a recipe
+ while ignoring dependencies.
+
+- ":ref:`dev-manual/debugging:recipe logging mechanisms`"
+ describes how to use the many recipe logging functions to produce
+ debugging output and report errors and warnings.
+
+- ":ref:`dev-manual/debugging:debugging parallel make races`"
+ describes how to debug situations where the build consists of several
+ parts that are run simultaneously and when the output or result of
+ one part is not ready for use with a different part of the build that
+ depends on that output.
+
+- ":ref:`dev-manual/debugging:debugging with the gnu project debugger (gdb) remotely`"
+ describes how to use GDB to allow you to examine running programs, which can
+ help you fix problems.
+
+- ":ref:`dev-manual/debugging:debugging with the gnu project debugger (gdb) on the target`"
+ describes how to use GDB directly on target hardware for debugging.
+
+- ":ref:`dev-manual/debugging:other debugging tips`" describes
+ miscellaneous debugging tips that can be useful.
+
+Viewing Logs from Failed Tasks
+==============================
+
+You can find the log for a task in the file
+``${``\ :term:`WORKDIR`\ ``}/temp/log.do_``\ `taskname`.
+For example, the log for the
+:ref:`ref-tasks-compile` task of the
+QEMU minimal image for the x86 machine (``qemux86``) might be in
+``tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/temp/log.do_compile``.
+To see the commands :term:`BitBake` ran
+to generate a log, look at the corresponding ``run.do_``\ `taskname` file
+in the same directory.
+
+``log.do_``\ `taskname` and ``run.do_``\ `taskname` are actually symbolic
+links to ``log.do_``\ `taskname`\ ``.``\ `pid` and
+``log.run_``\ `taskname`\ ``.``\ `pid`, where `pid` is the PID the task had
+when it ran. The symlinks always point to the files corresponding to the
+most recent run.
+
+Viewing Variable Values
+=======================
+
+Sometimes you need to know the value of a variable as a result of
+BitBake's parsing step. This could be because some unexpected behavior
+occurred in your project. Perhaps an attempt to :ref:`modify a variable
+<bitbake-user-manual/bitbake-user-manual-metadata:modifying existing
+variables>` did not work out as expected.
+
+BitBake's ``-e`` option is used to display variable values after
+parsing. The following command displays the variable values after the
+configuration files (i.e. ``local.conf``, ``bblayers.conf``,
+``bitbake.conf`` and so forth) have been parsed::
+
+ $ bitbake -e
+
+The following command displays variable values after a specific recipe has
+been parsed. The variables include those from the configuration as well::
+
+ $ bitbake -e recipename
+
+.. note::
+
+ Each recipe has its own private set of variables (datastore).
+ Internally, after parsing the configuration, a copy of the resulting
+ datastore is made prior to parsing each recipe. This copying implies
+ that variables set in one recipe will not be visible to other
+ recipes.
+
+ Likewise, each task within a recipe gets a private datastore based on
+ the recipe datastore, which means that variables set within one task
+ will not be visible to other tasks.
+
+In the output of ``bitbake -e``, each variable is preceded by a
+description of how the variable got its value, including temporary
+values that were later overridden. This description also includes
+variable flags (varflags) set on the variable. The output can be very
+helpful during debugging.
+
+Variables that are exported to the environment are preceded by
+``export`` in the output of ``bitbake -e``. See the following example::
+
+ export CC="i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/ulf/poky/build/tmp/sysroots/qemux86"
+
+In addition to variable values, the output of the ``bitbake -e`` and
+``bitbake -e`` recipe commands includes the following information:
+
+- The output starts with a tree listing all configuration files and
+ classes included globally, recursively listing the files they include
+ or inherit in turn. Much of the behavior of the OpenEmbedded build
+ system (including the behavior of the :ref:`ref-manual/tasks:normal recipe build tasks`) is
+ implemented in the :ref:`ref-classes-base` class and the
+ classes it inherits, rather than being built into BitBake itself.
+
+- After the variable values, all functions appear in the output. For
+ shell functions, variables referenced within the function body are
+ expanded. If a function has been modified using overrides or using
+ override-style operators like ``:append`` and ``:prepend``, then the
+ final assembled function body appears in the output.
+
+Viewing Package Information with ``oe-pkgdata-util``
+====================================================
+
+You can use the ``oe-pkgdata-util`` command-line utility to query
+:term:`PKGDATA_DIR` and display
+various package-related information. When you use the utility, you must
+use it to view information on packages that have already been built.
+
+Here are a few of the available ``oe-pkgdata-util`` subcommands.
+
+.. note::
+
+ You can use the standard \* and ? globbing wildcards as part of
+ package names and paths.
+
+- ``oe-pkgdata-util list-pkgs [pattern]``: Lists all packages
+ that have been built, optionally limiting the match to packages that
+ match pattern.
+
+- ``oe-pkgdata-util list-pkg-files package ...``: Lists the
+ files and directories contained in the given packages.
+
+ .. note::
+
+ A different way to view the contents of a package is to look at
+ the
+ ``${``\ :term:`WORKDIR`\ ``}/packages-split``
+ directory of the recipe that generates the package. This directory
+ is created by the
+ :ref:`ref-tasks-package` task
+ and has one subdirectory for each package the recipe generates,
+ which contains the files stored in that package.
+
+ If you want to inspect the ``${WORKDIR}/packages-split``
+ directory, make sure that :ref:`ref-classes-rm-work` is not
+ enabled when you build the recipe.
+
+- ``oe-pkgdata-util find-path path ...``: Lists the names of
+ the packages that contain the given paths. For example, the following
+ tells us that ``/usr/share/man/man1/make.1`` is contained in the
+ ``make-doc`` package::
+
+ $ oe-pkgdata-util find-path /usr/share/man/man1/make.1
+ make-doc: /usr/share/man/man1/make.1
+
+- ``oe-pkgdata-util lookup-recipe package ...``: Lists the name
+ of the recipes that produce the given packages.
+
+For more information on the ``oe-pkgdata-util`` command, use the help
+facility::
+
+ $ oe-pkgdata-util --help
+ $ oe-pkgdata-util subcommand --help
+
+Viewing Dependencies Between Recipes and Tasks
+==============================================
+
+Sometimes it can be hard to see why BitBake wants to build other recipes
+before the one you have specified. Dependency information can help you
+understand why a recipe is built.
+
+To generate dependency information for a recipe, run the following
+command::
+
+ $ bitbake -g recipename
+
+This command writes the following files in the current directory:
+
+- ``pn-buildlist``: A list of recipes/targets involved in building
+ `recipename`. "Involved" here means that at least one task from the
+ recipe needs to run when building `recipename` from scratch. Targets
+ that are in
+ :term:`ASSUME_PROVIDED`
+ are not listed.
+
+- ``task-depends.dot``: A graph showing dependencies between tasks.
+
+The graphs are in :wikipedia:`DOT <DOT_%28graph_description_language%29>`
+format and can be converted to images (e.g. using the ``dot`` tool from
+`Graphviz <https://www.graphviz.org/>`__).
+
+.. note::
+
+ - DOT files use a plain text format. The graphs generated using the
+ ``bitbake -g`` command are often so large as to be difficult to
+ read without special pruning (e.g. with BitBake's ``-I`` option)
+ and processing. Despite the form and size of the graphs, the
+ corresponding ``.dot`` files can still be possible to read and
+ provide useful information.
+
+ As an example, the ``task-depends.dot`` file contains lines such
+ as the following::
+
+ "libxslt.do_configure" -> "libxml2.do_populate_sysroot"
+
+ The above example line reveals that the
+ :ref:`ref-tasks-configure`
+ task in ``libxslt`` depends on the
+ :ref:`ref-tasks-populate_sysroot`
+ task in ``libxml2``, which is a normal
+ :term:`DEPENDS` dependency
+ between the two recipes.
+
+ - For an example of how ``.dot`` files can be processed, see the
+ ``scripts/contrib/graph-tool`` Python script, which finds and
+ displays paths between graph nodes.
+
+You can use a different method to view dependency information by using
+either::
+
+ $ bitbake -g -u taskexp recipename
+
+or::
+
+ $ bitbake -g -u taskexp_ncurses recipename
+
+The ``-u taskdep`` option GUI window from which you can view build-time and
+runtime dependencies for the recipes involved in building recipename. The
+``-u taskexp_ncurses`` option uses ncurses instead of GTK to render the UI.
+
+Viewing Task Variable Dependencies
+==================================
+
+As mentioned in the
+":ref:`bitbake-user-manual/bitbake-user-manual-execution:checksums (signatures)`"
+section of the BitBake User Manual, BitBake tries to automatically determine
+what variables a task depends on so that it can rerun the task if any values of
+the variables change. This determination is usually reliable. However, if you
+do things like construct variable names at runtime, then you might have to
+manually declare dependencies on those variables using ``vardeps`` as described
+in the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variable flags`"
+section of the BitBake User Manual.
+
+If you are unsure whether a variable dependency is being picked up
+automatically for a given task, you can list the variable dependencies
+BitBake has determined by doing the following:
+
+#. Build the recipe containing the task::
+
+ $ bitbake recipename
+
+#. Inside the :term:`STAMPS_DIR`
+ directory, find the signature data (``sigdata``) file that
+ corresponds to the task. The ``sigdata`` files contain a pickled
+ Python database of all the metadata that went into creating the input
+ checksum for the task. As an example, for the
+ :ref:`ref-tasks-fetch` task of the
+ ``db`` recipe, the ``sigdata`` file might be found in the following
+ location::
+
+ ${BUILDDIR}/tmp/stamps/i586-poky-linux/db/6.0.30-r1.do_fetch.sigdata.7c048c18222b16ff0bcee2000ef648b1
+
+ For tasks that are accelerated through the shared state
+ (:ref:`sstate <overview-manual/concepts:shared state cache>`) cache, an
+ additional ``siginfo`` file is written into
+ :term:`SSTATE_DIR` along with
+ the cached task output. The ``siginfo`` files contain exactly the
+ same information as ``sigdata`` files.
+
+#. Run ``bitbake-dumpsig`` on the ``sigdata`` or ``siginfo`` file. Here
+ is an example::
+
+ $ bitbake-dumpsig ${BUILDDIR}/tmp/stamps/i586-poky-linux/db/6.0.30-r1.do_fetch.sigdata.7c048c18222b16ff0bcee2000ef648b1
+
+ In the output of the above command, you will find a line like the
+ following, which lists all the (inferred) variable dependencies for
+ the task. This list also includes indirect dependencies from
+ variables depending on other variables, recursively::
+
+ Task dependencies: ['PV', 'SRCREV', 'SRC_URI', 'SRC_URI[sha256sum]', 'base_do_fetch']
+
+ .. note::
+
+ Functions (e.g. ``base_do_fetch``) also count as variable dependencies.
+ These functions in turn depend on the variables they reference.
+
+ The output of ``bitbake-dumpsig`` also includes the value each
+ variable had, a list of dependencies for each variable, and
+ :term:`BB_BASEHASH_IGNORE_VARS`
+ information.
+
+Debugging signature construction and unexpected task executions
+===============================================================
+
+There is a ``bitbake-diffsigs`` command for comparing two
+``siginfo`` or ``sigdata`` files. This command can be helpful when
+trying to figure out what changed between two versions of a task. If you
+call ``bitbake-diffsigs`` with just one file, the command behaves like
+``bitbake-dumpsig``.
+
+You can also use BitBake to dump out the signature construction
+information without executing tasks by using either of the following
+BitBake command-line options::
+
+ ‐‐dump-signatures=SIGNATURE_HANDLER
+ -S SIGNATURE_HANDLER
+
+
+.. note::
+
+ Two common values for `SIGNATURE_HANDLER` are "none" and "printdiff", which
+ dump only the signature or compare the dumped signature with the most recent one,
+ respectively. "printdiff" will try to establish the most recent
+ signature match (e.g. in the sstate cache) and then
+ compare the matched signatures to determine the stamps and delta
+ where these two stamp trees diverge. This can be used to determine why
+ tasks need to be re-run in situations where that is not expected.
+
+Using BitBake with either of these options causes BitBake to dump out
+``sigdata`` files in the ``stamps`` directory for every task it would
+have executed instead of building the specified target package.
+
+Viewing Metadata Used to Create the Input Signature of a Shared State Task
+==========================================================================
+
+Seeing what metadata went into creating the input signature of a shared
+state (sstate) task can be a useful debugging aid. This information is
+available in signature information (``siginfo``) files in
+:term:`SSTATE_DIR`. For
+information on how to view and interpret information in ``siginfo``
+files, see the
+":ref:`dev-manual/debugging:viewing task variable dependencies`" section.
+
+For conceptual information on shared state, see the
+":ref:`overview-manual/concepts:shared state`"
+section in the Yocto Project Overview and Concepts Manual.
+
+Invalidating Shared State to Force a Task to Run
+================================================
+
+The OpenEmbedded build system uses
+:ref:`checksums <overview-manual/concepts:checksums (signatures)>` and
+:ref:`overview-manual/concepts:shared state` cache to avoid unnecessarily
+rebuilding tasks. Collectively, this scheme is known as "shared state
+code".
+
+As with all schemes, this one has some drawbacks. It is possible that
+you could make implicit changes to your code that the checksum
+calculations do not take into account. These implicit changes affect a
+task's output but do not trigger the shared state code into rebuilding a
+recipe. Consider an example during which a tool changes its output.
+Assume that the output of ``rpmdeps`` changes. The result of the change
+should be that all the ``package`` and ``package_write_rpm`` shared
+state cache items become invalid. However, because the change to the
+output is external to the code and therefore implicit, the associated
+shared state cache items do not become invalidated. In this case, the
+build process uses the cached items rather than running the task again.
+Obviously, these types of implicit changes can cause problems.
+
+To avoid these problems during the build, you need to understand the
+effects of any changes you make. Realize that changes you make directly
+to a function are automatically factored into the checksum calculation.
+Thus, these explicit changes invalidate the associated area of shared
+state cache. However, you need to be aware of any implicit changes that
+are not obvious changes to the code and could affect the output of a
+given task.
+
+When you identify an implicit change, you can easily take steps to
+invalidate the cache and force the tasks to run. The steps you can take
+are as simple as changing a function's comments in the source code. For
+example, to invalidate package shared state files, change the comment
+statements of
+:ref:`ref-tasks-package` or the
+comments of one of the functions it calls. Even though the change is
+purely cosmetic, it causes the checksum to be recalculated and forces
+the build system to run the task again.
+
+.. note::
+
+ For an example of a commit that makes a cosmetic change to invalidate
+ shared state, see this
+ :yocto_git:`commit </poky/commit/meta/classes/package.bbclass?id=737f8bbb4f27b4837047cb9b4fbfe01dfde36d54>`.
+
+Running Specific Tasks
+======================
+
+Any given recipe consists of a set of tasks. The standard BitBake
+behavior in most cases is: :ref:`ref-tasks-fetch`, :ref:`ref-tasks-unpack`, :ref:`ref-tasks-patch`,
+:ref:`ref-tasks-configure`, :ref:`ref-tasks-compile`, :ref:`ref-tasks-install`, :ref:`ref-tasks-package`,
+:ref:`do_package_write_* <ref-tasks-package_write_deb>`, and :ref:`ref-tasks-build`. The default task is
+:ref:`ref-tasks-build` and any tasks on which it depends build first. Some tasks,
+such as :ref:`ref-tasks-devshell`, are not part of the default build chain. If you
+wish to run a task that is not part of the default build chain, you can
+use the ``-c`` option in BitBake. Here is an example::
+
+ $ bitbake matchbox-desktop -c devshell
+
+The ``-c`` option respects task dependencies, which means that all other
+tasks (including tasks from other recipes) that the specified task
+depends on will be run before the task. Even when you manually specify a
+task to run with ``-c``, BitBake will only run the task if it considers
+it "out of date". See the
+":ref:`overview-manual/concepts:stamp files and the rerunning of tasks`"
+section in the Yocto Project Overview and Concepts Manual for how
+BitBake determines whether a task is "out of date".
+
+If you want to force an up-to-date task to be rerun (e.g. because you
+made manual modifications to the recipe's
+:term:`WORKDIR` that you want to try
+out), then you can use the ``-f`` option.
+
+.. note::
+
+ The reason ``-f`` is never required when running the
+ :ref:`ref-tasks-devshell` task is because the
+ [\ :ref:`nostamp <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ]
+ variable flag is already set for the task.
+
+The following example shows one way you can use the ``-f`` option::
+
+ $ bitbake matchbox-desktop
+ .
+ .
+ make some changes to the source code in the work directory
+ .
+ .
+ $ bitbake matchbox-desktop -c compile -f
+ $ bitbake matchbox-desktop
+
+This sequence first builds and then recompiles ``matchbox-desktop``. The
+last command reruns all tasks (basically the packaging tasks) after the
+compile. BitBake recognizes that the :ref:`ref-tasks-compile` task was rerun and
+therefore understands that the other tasks also need to be run again.
+
+Another, shorter way to rerun a task and all
+:ref:`ref-manual/tasks:normal recipe build tasks`
+that depend on it is to use the ``-C`` option.
+
+.. note::
+
+ This option is upper-cased and is separate from the ``-c``
+ option, which is lower-cased.
+
+Using this option invalidates the given task and then runs the
+:ref:`ref-tasks-build` task, which is
+the default task if no task is given, and the tasks on which it depends.
+You could replace the final two commands in the previous example with
+the following single command::
+
+ $ bitbake matchbox-desktop -C compile
+
+Internally, the ``-f`` and ``-C`` options work by tainting (modifying)
+the input checksum of the specified task. This tainting indirectly
+causes the task and its dependent tasks to be rerun through the normal
+task dependency mechanisms.
+
+.. note::
+
+ BitBake explicitly keeps track of which tasks have been tainted in
+ this fashion, and will print warnings such as the following for
+ builds involving such tasks:
+
+ .. code-block:: none
+
+ WARNING: /home/ulf/poky/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.1.bb.do_compile is tainted from a forced run
+
+
+ The purpose of the warning is to let you know that the work directory
+ and build output might not be in the clean state they would be in for
+ a "normal" build, depending on what actions you took. To get rid of
+ such warnings, you can remove the work directory and rebuild the
+ recipe, as follows::
+
+ $ bitbake matchbox-desktop -c clean
+ $ bitbake matchbox-desktop
+
+
+You can view a list of tasks in a given package by running the
+:ref:`ref-tasks-listtasks` task as follows::
+
+ $ bitbake matchbox-desktop -c listtasks
+
+The results appear as output to the console and are also in
+the file ``${WORKDIR}/temp/log.do_listtasks``.
+
+General BitBake Problems
+========================
+
+You can see debug output from BitBake by using the ``-D`` option. The
+debug output gives more information about what BitBake is doing and the
+reason behind it. Each ``-D`` option you use increases the logging
+level. The most common usage is ``-DDD``.
+
+The output from ``bitbake -DDD -v targetname`` can reveal why BitBake
+chose a certain version of a package or why BitBake picked a certain
+provider. This command could also help you in a situation where you
+think BitBake did something unexpected.
+
+Building with No Dependencies
+=============================
+
+To build a specific recipe (``.bb`` file), you can use the following
+command form::
+
+ $ bitbake -b somepath/somerecipe.bb
+
+This command form does
+not check for dependencies. Consequently, you should use it only when
+you know existing dependencies have been met.
+
+.. note::
+
+ You can also specify fragments of the filename. In this case, BitBake
+ checks for a unique match.
+
+Recipe Logging Mechanisms
+=========================
+
+The Yocto Project provides several logging functions for producing
+debugging output and reporting errors and warnings. For Python
+functions, the following logging functions are available. All of these functions
+log to ``${T}/log.do_``\ `task`, and can also log to standard output
+(stdout) with the right settings:
+
+- ``bb.plain(msg)``: Writes msg as is to the log while also
+ logging to stdout.
+
+- ``bb.note(msg)``: Writes "NOTE: msg" to the log. Also logs to
+ stdout if BitBake is called with "-v".
+
+- ``bb.debug(level, msg)``: Writes "DEBUG: msg" to the log. Also logs to
+ stdout if the log level is greater than or equal to level. See the
+ ":ref:`bitbake-user-manual/bitbake-user-manual-intro:usage and syntax`"
+ option in the BitBake User Manual for more information.
+
+- ``bb.warn(msg)``: Writes "WARNING: msg" to the log while also
+ logging to stdout.
+
+- ``bb.error(msg)``: Writes "ERROR: msg" to the log while also
+ logging to standard out (stdout).
+
+ .. note::
+
+ Calling this function does not cause the task to fail.
+
+- ``bb.fatal(msg)``: This logging function is similar to
+ ``bb.error(msg)`` but also causes the calling task to fail.
+
+ .. note::
+
+ ``bb.fatal()`` raises an exception, which means you do not need to put a
+ "return" statement after the function.
+
+The same logging functions are also available in shell functions, under
+the names ``bbplain``, ``bbnote``, ``bbdebug``, ``bbwarn``, ``bberror``,
+and ``bbfatal``. The :ref:`ref-classes-logging` class
+implements these functions. See that class in the ``meta/classes``
+folder of the :term:`Source Directory` for information.
+
+Logging With Python
+-------------------
+
+When creating recipes using Python and inserting code that handles build
+logs, keep in mind the goal is to have informative logs while keeping
+the console as "silent" as possible. Also, if you want status messages
+in the log, use the "debug" loglevel.
+
+Here is an example written in Python. The code handles logging for
+a function that determines the number of tasks needed to be run. See the
+":ref:`ref-tasks-listtasks`"
+section for additional information::
+
+ python do_listtasks() {
+ bb.debug(2, "Starting to figure out the task list")
+ if noteworthy_condition:
+ bb.note("There are 47 tasks to run")
+ bb.debug(2, "Got to point xyz")
+ if warning_trigger:
+ bb.warn("Detected warning_trigger, this might be a problem later.")
+ if recoverable_error:
+ bb.error("Hit recoverable_error, you really need to fix this!")
+ if fatal_error:
+ bb.fatal("fatal_error detected, unable to print the task list")
+ bb.plain("The tasks present are abc")
+ bb.debug(2, "Finished figuring out the tasklist")
+ }
+
+Logging With Bash
+-----------------
+
+When creating recipes using Bash and inserting code that handles build
+logs, you have the same goals --- informative with minimal console output.
+The syntax you use for recipes written in Bash is similar to that of
+recipes written in Python described in the previous section.
+
+Here is an example written in Bash. The code logs the progress of
+the ``do_my_function`` function::
+
+ do_my_function() {
+ bbdebug 2 "Running do_my_function"
+ if [ exceptional_condition ]; then
+ bbnote "Hit exceptional_condition"
+ fi
+ bbdebug 2 "Got to point xyz"
+ if [ warning_trigger ]; then
+ bbwarn "Detected warning_trigger, this might cause a problem later."
+ fi
+ if [ recoverable_error ]; then
+ bberror "Hit recoverable_error, correcting"
+ fi
+ if [ fatal_error ]; then
+ bbfatal "fatal_error detected"
+ fi
+ bbdebug 2 "Completed do_my_function"
+ }
+
+
+Debugging Parallel Make Races
+=============================
+
+A parallel ``make`` race occurs when the build consists of several parts
+that are run simultaneously and a situation occurs when the output or
+result of one part is not ready for use with a different part of the
+build that depends on that output. Parallel make races are annoying and
+can sometimes be difficult to reproduce and fix. However, there are some simple
+tips and tricks that can help you debug and fix them. This section
+presents a real-world example of an error encountered on the Yocto
+Project autobuilder and the process used to fix it.
+
+.. note::
+
+ If you cannot properly fix a ``make`` race condition, you can work around it
+ by clearing either the :term:`PARALLEL_MAKE` or :term:`PARALLEL_MAKEINST`
+ variables.
+
+The Failure
+-----------
+
+For this example, assume that you are building an image that depends on
+the "neard" package. And, during the build, BitBake runs into problems
+and creates the following output.
+
+.. note::
+
+ This example log file has longer lines artificially broken to make
+ the listing easier to read.
+
+If you examine the output or the log file, you see the failure during
+``make``:
+
+.. code-block:: none
+
+ | DEBUG: SITE files ['endian-little', 'bit-32', 'ix86-common', 'common-linux', 'common-glibc', 'i586-linux', 'common']
+ | DEBUG: Executing shell function do_compile
+ | NOTE: make -j 16
+ | make --no-print-directory all-am
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/types.h include/near/types.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/log.h include/near/log.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/plugin.h include/near/plugin.h
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/tag.h include/near/tag.h
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/adapter.h include/near/adapter.h
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/ndef.h include/near/ndef.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/tlv.h include/near/tlv.h
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/setting.h include/near/setting.h
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | /bin/mkdir -p include/near
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/device.h include/near/device.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/nfc_copy.h include/near/nfc_copy.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/snep.h include/near/snep.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/version.h include/near/version.h
+ | ln -s /home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/work/i586-poky-linux/neard/
+ 0.14-r0/neard-0.14/include/dbus.h include/near/dbus.h
+ | ./src/genbuiltin nfctype1 nfctype2 nfctype3 nfctype4 p2p > src/builtin.h
+ | i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/pokybuild/yocto-autobuilder/nightly-x86/
+ build/build/tmp/sysroots/qemux86 -DHAVE_CONFIG_H -I. -I./include -I./src -I./gdbus -I/home/pokybuild/
+ yocto-autobuilder/nightly-x86/build/build/tmp/sysroots/qemux86/usr/include/glib-2.0
+ -I/home/pokybuild/yocto-autobuilder/nightly-x86/build/build/tmp/sysroots/qemux86/usr/
+ lib/glib-2.0/include -I/home/pokybuild/yocto-autobuilder/nightly-x86/build/build/
+ tmp/sysroots/qemux86/usr/include/dbus-1.0 -I/home/pokybuild/yocto-autobuilder/
+ nightly-x86/build/build/tmp/sysroots/qemux86/usr/lib/dbus-1.0/include -I/home/pokybuild/yocto-autobuilder/
+ nightly-x86/build/build/tmp/sysroots/qemux86/usr/include/libnl3
+ -DNEAR_PLUGIN_BUILTIN -DPLUGINDIR=\""/usr/lib/near/plugins"\"
+ -DCONFIGDIR=\""/etc/neard\"" -O2 -pipe -g -feliminate-unused-debug-types -c
+ -o tools/snep-send.o tools/snep-send.c
+ | In file included from tools/snep-send.c:16:0:
+ | tools/../src/near.h:41:23: fatal error: near/dbus.h: No such file or directory
+ | #include <near/dbus.h>
+ | ^
+ | compilation terminated.
+ | make[1]: *** [tools/snep-send.o] Error 1
+ | make[1]: *** Waiting for unfinished jobs....
+ | make: *** [all] Error 2
+ | ERROR: oe_runmake failed
+
+Reproducing the Error
+---------------------
+
+Because race conditions are intermittent, they do not manifest
+themselves every time you do the build. In fact, most times the build
+will complete without problems even though the potential race condition
+exists. Thus, once the error surfaces, you need a way to reproduce it.
+
+In this example, compiling the "neard" package is causing the problem.
+So the first thing to do is build "neard" locally. Before you start the
+build, set the
+:term:`PARALLEL_MAKE` variable
+in your ``local.conf`` file to a high number (e.g. "-j 20"). Using a
+high value for :term:`PARALLEL_MAKE` increases the chances of the race
+condition showing up::
+
+ $ bitbake neard
+
+Once the local build for "neard" completes, start a ``devshell`` build::
+
+ $ bitbake neard -c devshell
+
+For information on how to use a ``devshell``, see the
+":ref:`dev-manual/development-shell:using a development shell`" section.
+
+In the ``devshell``, do the following::
+
+ $ make clean
+ $ make tools/snep-send.o
+
+The ``devshell`` commands cause the failure to clearly
+be visible. In this case, there is a missing dependency for the ``neard``
+Makefile target. Here is some abbreviated, sample output with the
+missing dependency clearly visible at the end::
+
+ i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/scott-lenovo/......
+ .
+ .
+ .
+ tools/snep-send.c
+ In file included from tools/snep-send.c:16:0:
+ tools/../src/near.h:41:23: fatal error: near/dbus.h: No such file or directory
+ #include <near/dbus.h>
+ ^
+ compilation terminated.
+ make: *** [tools/snep-send.o] Error 1
+ $
+
+
+Creating a Patch for the Fix
+----------------------------
+
+Because there is a missing dependency for the Makefile target, you need
+to patch the ``Makefile.am`` file, which is generated from
+``Makefile.in``. You can use Quilt to create the patch::
+
+ $ quilt new parallelmake.patch
+ Patch patches/parallelmake.patch is now on top
+ $ quilt add Makefile.am
+ File Makefile.am added to patch patches/parallelmake.patch
+
+For more information on using Quilt, see the
+":ref:`dev-manual/quilt:using quilt in your workflow`" section.
+
+At this point you need to make the edits to ``Makefile.am`` to add the
+missing dependency. For our example, you have to add the following line
+to the file::
+
+ tools/snep-send.$(OBJEXT): include/near/dbus.h
+
+Once you have edited the file, use the ``refresh`` command to create the
+patch::
+
+ $ quilt refresh
+ Refreshed patch patches/parallelmake.patch
+
+Once the patch file is created, you need to add it back to the originating
+recipe folder. Here is an example assuming a top-level
+:term:`Source Directory` named ``poky``::
+
+ $ cp patches/parallelmake.patch poky/meta/recipes-connectivity/neard/neard
+
+The final thing you need to do to implement the fix in the build is to
+update the "neard" recipe (i.e. ``neard-0.14.bb``) so that the
+:term:`SRC_URI` statement includes
+the patch file. The recipe file is in the folder above the patch. Here
+is what the edited :term:`SRC_URI` statement would look like::
+
+ SRC_URI = "${KERNELORG_MIRROR}/linux/network/nfc/${BPN}-${PV}.tar.xz \
+ file://neard.in \
+ file://neard.service.in \
+ file://parallelmake.patch \
+ "
+
+With the patch complete and moved to the correct folder and the
+:term:`SRC_URI` statement updated, you can exit the ``devshell``::
+
+ $ exit
+
+Testing the Build
+-----------------
+
+With everything in place, you can get back to trying the build again
+locally::
+
+ $ bitbake neard
+
+This build should succeed.
+
+Now you can open up a ``devshell`` again and repeat the clean and make
+operations as follows::
+
+ $ bitbake neard -c devshell
+ $ make clean
+ $ make tools/snep-send.o
+
+The build should work without issue.
+
+As with all solved problems, if they originated upstream, you need to
+submit the fix for the recipe in OE-Core and upstream so that the
+problem is taken care of at its source. See the
+":doc:`../contributor-guide/submit-changes`" section for more information.
+
+Debugging With the GNU Project Debugger (GDB) Remotely
+======================================================
+
+GDB allows you to examine running programs, which in turn helps you to
+understand and fix problems. It also allows you to perform post-mortem
+style analysis of program crashes. GDB is available as a package within
+the Yocto Project and is installed in SDK images by default. See the
+":ref:`ref-manual/images:Images`" chapter in the Yocto
+Project Reference Manual for a description of these images. You can find
+information on GDB at https://sourceware.org/gdb/.
+
+.. note::
+
+ For best results, install debug (``-dbg``) packages for the applications you
+ are going to debug. Doing so makes extra debug symbols available that give
+ you more meaningful output.
+
+Sometimes, due to memory or disk space constraints, it is not possible
+to use GDB directly on the remote target to debug applications. These
+constraints arise because GDB needs to load the debugging information
+and the binaries of the process being debugged. Additionally, GDB needs
+to perform many computations to locate information such as function
+names, variable names and values, stack traces and so forth --- even
+before starting the debugging process. These extra computations place
+more load on the target system and can alter the characteristics of the
+program being debugged.
+
+To help get past the previously mentioned constraints, there are two
+methods you can use: running a debuginfod server and using gdbserver.
+
+Using the debuginfod server method
+----------------------------------
+
+``debuginfod`` from ``elfutils`` is a way to distribute ``debuginfo`` files.
+Running a ``debuginfod`` server makes debug symbols readily available,
+which means you don't need to download debugging information
+and the binaries of the process being debugged. You can just fetch
+debug symbols from the server.
+
+To run a ``debuginfod`` server, you need to do the following:
+
+- Ensure that ``debuginfod`` is present in :term:`DISTRO_FEATURES`
+ (it already is in ``OpenEmbedded-core`` defaults and ``poky`` reference distribution).
+ If not, set in your distro config file or in ``local.conf``::
+
+ DISTRO_FEATURES:append = " debuginfod"
+
+ This distro feature enables the server and client library in ``elfutils``,
+ and enables ``debuginfod`` support in clients (at the moment, ``gdb`` and ``binutils``).
+
+- Run the following commands to launch the ``debuginfod`` server on the host::
+
+ $ oe-debuginfod
+
+- To use ``debuginfod`` on the target, you need to know the ip:port where
+ ``debuginfod`` is listening on the host (port defaults to 8002), and export
+ that into the shell environment, for example in ``qemu``::
+
+ root@qemux86-64:~# export DEBUGINFOD_URLS="http://192.168.7.1:8002/"
+
+- Then debug info fetching should simply work when running the target ``gdb``,
+ ``readelf`` or ``objdump``, for example::
+
+ root@qemux86-64:~# gdb /bin/cat
+ ...
+ Reading symbols from /bin/cat...
+ Downloading separate debug info for /bin/cat...
+ Reading symbols from /home/root/.cache/debuginfod_client/923dc4780cfbc545850c616bffa884b6b5eaf322/debuginfo...
+
+- It's also possible to use ``debuginfod-find`` to just query the server::
+
+ root@qemux86-64:~# debuginfod-find debuginfo /bin/ls
+ /home/root/.cache/debuginfod_client/356edc585f7f82d46f94fcb87a86a3fe2d2e60bd/debuginfo
+
+
+Using the gdbserver method
+--------------------------
+
+gdbserver, which runs on the remote target and does not load any
+debugging information from the debugged process. Instead, a GDB instance
+processes the debugging information that is run on a remote computer -
+the host GDB. The host GDB then sends control commands to gdbserver to
+make it stop or start the debugged program, as well as read or write
+memory regions of that debugged program. All the debugging information
+loaded and processed as well as all the heavy debugging is done by the
+host GDB. Offloading these processes gives the gdbserver running on the
+target a chance to remain small and fast.
+
+Because the host GDB is responsible for loading the debugging
+information and for doing the necessary processing to make actual
+debugging happen, you have to make sure the host can access the
+unstripped binaries complete with their debugging information and also
+be sure the target is compiled with no optimizations. The host GDB must
+also have local access to all the libraries used by the debugged
+program. Because gdbserver does not need any local debugging
+information, the binaries on the remote target can remain stripped.
+However, the binaries must also be compiled without optimization so they
+match the host's binaries.
+
+To remain consistent with GDB documentation and terminology, the binary
+being debugged on the remote target machine is referred to as the
+"inferior" binary. For documentation on GDB see the `GDB
+site <https://sourceware.org/gdb/documentation/>`__.
+
+The following steps show you how to debug using the GNU project
+debugger.
+
+#. *Configure your build system to construct the companion debug
+ filesystem:*
+
+ In your ``local.conf`` file, set the following::
+
+ IMAGE_GEN_DEBUGFS = "1"
+ IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
+
+ These options cause the
+ OpenEmbedded build system to generate a special companion filesystem
+ fragment, which contains the matching source and debug symbols to
+ your deployable filesystem. The build system does this by looking at
+ what is in the deployed filesystem, and pulling the corresponding
+ ``-dbg`` packages.
+
+ The companion debug filesystem is not a complete filesystem, but only
+ contains the debug fragments. This filesystem must be combined with
+ the full filesystem for debugging. Subsequent steps in this procedure
+ show how to combine the partial filesystem with the full filesystem.
+
+#. *Configure the system to include gdbserver in the target filesystem:*
+
+ Make the following addition in your ``local.conf`` file::
+
+ EXTRA_IMAGE_FEATURES:append = " tools-debug"
+
+ The change makes
+ sure the ``gdbserver`` package is included.
+
+#. *Build the environment:*
+
+ Use the following command to construct the image and the companion
+ Debug Filesystem::
+
+ $ bitbake image
+
+ Build the cross GDB component and
+ make it available for debugging. Build the SDK that matches the
+ image. Building the SDK is best for a production build that can be
+ used later for debugging, especially during long term maintenance::
+
+ $ bitbake -c populate_sdk image
+
+ Alternatively, you can build the minimal toolchain components that
+ match the target. Doing so creates a smaller than typical SDK and
+ only contains a minimal set of components with which to build simple
+ test applications, as well as run the debugger::
+
+ $ bitbake meta-toolchain
+
+ A final method is to build Gdb itself within the build system::
+
+ $ bitbake gdb-cross-<architecture>
+
+ Doing so produces a temporary copy of
+ ``cross-gdb`` you can use for debugging during development. While
+ this is the quickest approach, the two previous methods in this step
+ are better when considering long-term maintenance strategies.
+
+ .. note::
+
+ If you run ``bitbake gdb-cross``, the OpenEmbedded build system suggests
+ the actual image (e.g. ``gdb-cross-i586``). The suggestion is usually the
+ actual name you want to use.
+
+#. *Set up the* ``debugfs``\ *:*
+
+ Run the following commands to set up the ``debugfs``::
+
+ $ mkdir debugfs
+ $ cd debugfs
+ $ tar xvfj build-dir/tmp/deploy/images/machine/image.rootfs.tar.bz2
+ $ tar xvfj build-dir/tmp/deploy/images/machine/image-dbg.rootfs.tar.bz2
+
+#. *Set up GDB:*
+
+ Install the SDK (if you built one) and then source the correct
+ environment file. Sourcing the environment file puts the SDK in your
+ ``PATH`` environment variable and sets ``$GDB`` to the SDK's debugger.
+
+ If you are using the build system, Gdb is located in
+ `build-dir`\ ``/tmp/sysroots/``\ `host`\ ``/usr/bin/``\ `architecture`\ ``/``\ `architecture`\ ``-gdb``
+
+#. *Boot the target:*
+
+ For information on how to run QEMU, see the `QEMU
+ Documentation <https://wiki.qemu.org/Documentation/GettingStartedDevelopers>`__.
+
+ .. note::
+
+ Be sure to verify that your host can access the target via TCP.
+
+#. *Debug a program:*
+
+ Debugging a program involves running gdbserver on the target and then
+ running Gdb on the host. The example in this step debugs ``gzip``:
+
+ .. code-block:: shell
+
+ root@qemux86:~# gdbserver localhost:1234 /bin/gzip —help
+
+ For
+ additional gdbserver options, see the `GDB Server
+ Documentation <https://www.gnu.org/software/gdb/documentation/>`__.
+
+ After running gdbserver on the target, you need to run Gdb on the
+ host and configure it and connect to the target. Use these commands::
+
+ $ cd directory-holding-the-debugfs-directory
+ $ arch-gdb
+ (gdb) set sysroot debugfs
+ (gdb) set substitute-path /usr/src/debug debugfs/usr/src/debug
+ (gdb) target remote IP-of-target:1234
+
+ At this
+ point, everything should automatically load (i.e. matching binaries,
+ symbols and headers).
+
+ .. note::
+
+ The Gdb ``set`` commands in the previous example can be placed into the
+ users ``~/.gdbinit`` file. Upon starting, Gdb automatically runs whatever
+ commands are in that file.
+
+#. *Deploying without a full image rebuild:*
+
+ In many cases, during development you want a quick method to deploy a
+ new binary to the target and debug it, without waiting for a full
+ image build.
+
+ One approach to solving this situation is to just build the component
+ you want to debug. Once you have built the component, copy the
+ executable directly to both the target and the host ``debugfs``.
+
+ If the binary is processed through the debug splitting in
+ OpenEmbedded, you should also copy the debug items (i.e. ``.debug``
+ contents and corresponding ``/usr/src/debug`` files) from the work
+ directory. Here is an example::
+
+ $ bitbake bash
+ $ bitbake -c devshell bash
+ $ cd ..
+ $ scp packages-split/bash/bin/bash target:/bin/bash
+ $ cp -a packages-split/bash-dbg/\* path/debugfs
+
+Debugging with the GNU Project Debugger (GDB) on the Target
+===========================================================
+
+The previous section addressed using GDB remotely for debugging
+purposes, which is the most usual case due to the inherent hardware
+limitations on many embedded devices. However, debugging in the target
+hardware itself is also possible with more powerful devices. This
+section describes what you need to do in order to support using GDB to
+debug on the target hardware.
+
+To support this kind of debugging, you need do the following:
+
+- Ensure that GDB is on the target. You can do this by making
+ the following addition to your ``local.conf`` file::
+
+ EXTRA_IMAGE_FEATURES:append = " tools-debug"
+
+- Ensure that debug symbols are present. You can do so by adding the
+ corresponding ``-dbg`` package to :term:`IMAGE_INSTALL`::
+
+ IMAGE_INSTALL:append = " packagename-dbg"
+
+ Alternatively, you can add the following to ``local.conf`` to include
+ all the debug symbols::
+
+ EXTRA_IMAGE_FEATURES:append = " dbg-pkgs"
+
+.. note::
+
+ To improve the debug information accuracy, you can reduce the level
+ of optimization used by the compiler. For example, when adding the
+ following line to your ``local.conf`` file, you will reduce optimization
+ from :term:`FULL_OPTIMIZATION` of "-O2" to :term:`DEBUG_OPTIMIZATION`
+ of "-O -fno-omit-frame-pointer"::
+
+ DEBUG_BUILD = "1"
+
+ Consider that this will reduce the application's performance and is
+ recommended only for debugging purposes.
+
+Enabling Minidebuginfo
+======================
+
+Enabling the :term:`DISTRO_FEATURES` minidebuginfo adds a compressed ELF section ``.gnu_debugdata``
+to all binary files, containing only function names, and thus increasing the size of the
+binaries only by 5 to 10%. For comparison, full debug symbols can be 10 times as big as
+a stripped binary, and it is thus not always possible to deploy full debug symbols.
+Minidebuginfo data allows, on the one side, to retrieve a call-stack using
+GDB (command backtrace) without deploying full debug symbols to the target. It also
+allows to retrieve a symbolicated call-stack when using ``systemd-coredump`` to manage
+coredumps (commands ``coredumpctl list`` and ``coredumpctl info``).
+
+This feature was created by Fedora, see https://fedoraproject.org/wiki/Features/MiniDebugInfo for
+more details.
+
+Other Debugging Tips
+====================
+
+Here are some other tips that you might find useful:
+
+- When adding new packages, it is worth watching for undesirable items
+ making their way into compiler command lines. For example, you do not
+ want references to local system files like ``/usr/lib/`` or
+ ``/usr/include/``.
+
+- If you want to remove the ``psplash`` boot splashscreen, add
+ ``psplash=false`` to the kernel command line. Doing so prevents
+ ``psplash`` from loading and thus allows you to see the console. It
+ is also possible to switch out of the splashscreen by switching the
+ virtual console (e.g. Fn+Left or Fn+Right on a Zaurus).
+
+- Removing :term:`TMPDIR` (usually ``tmp/``, within the
+ :term:`Build Directory`) can often fix temporary build issues. Removing
+ :term:`TMPDIR` is usually a relatively cheap operation, because task output
+ will be cached in :term:`SSTATE_DIR` (usually ``sstate-cache/``, which is
+ also in the :term:`Build Directory`).
+
+ .. note::
+
+ Removing :term:`TMPDIR` might be a workaround rather than a fix.
+ Consequently, trying to determine the underlying cause of an issue before
+ removing the directory is a good idea.
+
+- Understanding how a feature is used in practice within existing
+ recipes can be very helpful. It is recommended that you configure
+ some method that allows you to quickly search through files.
+
+ Using GNU Grep, you can use the following shell function to
+ recursively search through common recipe-related files, skipping
+ binary files, ``.git`` directories, and the :term:`Build Directory`
+ (assuming its name starts with "build")::
+
+ g() {
+ grep -Ir \
+ --exclude-dir=.git \
+ --exclude-dir='build*' \
+ --include='*.bb*' \
+ --include='*.inc*' \
+ --include='*.conf*' \
+ --include='*.py*' \
+ "$@"
+ }
+
+ Here are some usage examples::
+
+ $ g FOO # Search recursively for "FOO"
+ $ g -i foo # Search recursively for "foo", ignoring case
+ $ g -w FOO # Search recursively for "FOO" as a word, ignoring e.g. "FOOBAR"
+
+ If figuring
+ out how some feature works requires a lot of searching, it might
+ indicate that the documentation should be extended or improved. In
+ such cases, consider filing a documentation bug using the Yocto
+ Project implementation of
+ :yocto_bugs:`Bugzilla <>`. For information on
+ how to submit a bug against the Yocto Project, see the Yocto Project
+ Bugzilla :yocto_wiki:`wiki page </Bugzilla_Configuration_and_Bug_Tracking>`
+ and the ":doc:`../contributor-guide/report-defect`" section.
+
+ .. note::
+
+ The manuals might not be the right place to document variables
+ that are purely internal and have a limited scope (e.g. internal
+ variables used to implement a single ``.bbclass`` file).
+
diff --git a/documentation/dev-manual/development-shell.rst b/documentation/dev-manual/development-shell.rst
new file mode 100644
index 0000000000..be26bcffc7
--- /dev/null
+++ b/documentation/dev-manual/development-shell.rst
@@ -0,0 +1,82 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using a Development Shell
+*************************
+
+When debugging certain commands or even when just editing packages,
+``devshell`` can be a useful tool. When you invoke ``devshell``, all
+tasks up to and including
+:ref:`ref-tasks-patch` are run for the
+specified target. Then, a new terminal is opened and you are placed in
+``${``\ :term:`S`\ ``}``, the source
+directory. In the new terminal, all the OpenEmbedded build-related
+environment variables are still defined so you can use commands such as
+``configure`` and ``make``. The commands execute just as if the
+OpenEmbedded build system were executing them. Consequently, working
+this way can be helpful when debugging a build or preparing software to
+be used with the OpenEmbedded build system.
+
+Here is an example that uses ``devshell`` on a target named
+``matchbox-desktop``::
+
+ $ bitbake matchbox-desktop -c devshell
+
+This command spawns a terminal with a shell prompt within the
+OpenEmbedded build environment. The
+:term:`OE_TERMINAL` variable
+controls what type of shell is opened.
+
+For spawned terminals, the following occurs:
+
+- The ``PATH`` variable includes the cross-toolchain.
+
+- The ``pkgconfig`` variables find the correct ``.pc`` files.
+
+- The ``configure`` command finds the Yocto Project site files as well
+ as any other necessary files.
+
+Within this environment, you can run configure or compile commands as if
+they were being run by the OpenEmbedded build system itself. As noted
+earlier, the working directory also automatically changes to the Source
+Directory (:term:`S`).
+
+To manually run a specific task using ``devshell``, run the
+corresponding ``run.*`` script in the
+``${``\ :term:`WORKDIR`\ ``}/temp``
+directory (e.g., ``run.do_configure.``\ `pid`). If a task's script does
+not exist, which would be the case if the task was skipped by way of the
+sstate cache, you can create the task by first running it outside of the
+``devshell``::
+
+ $ bitbake -c task
+
+.. note::
+
+ - Execution of a task's ``run.*`` script and BitBake's execution of
+ a task are identical. In other words, running the script re-runs
+ the task just as it would be run using the ``bitbake -c`` command.
+
+ - Any ``run.*`` file that does not have a ``.pid`` extension is a
+ symbolic link (symlink) to the most recent version of that file.
+
+Remember, that the ``devshell`` is a mechanism that allows you to get
+into the BitBake task execution environment. And as such, all commands
+must be called just as BitBake would call them. That means you need to
+provide the appropriate options for cross-compilation and so forth as
+applicable.
+
+When you are finished using ``devshell``, exit the shell or close the
+terminal window.
+
+.. note::
+
+ - It is worth remembering that when using ``devshell`` you need to
+ use the full compiler name such as ``arm-poky-linux-gnueabi-gcc``
+ instead of just using ``gcc``. The same applies to other
+ applications such as ``binutils``, ``libtool`` and so forth.
+ BitBake sets up environment variables such as :term:`CC` to assist
+ applications, such as ``make`` to find the correct tools.
+
+ - It is also worth noting that ``devshell`` still works over X11
+ forwarding and similar situations.
+
diff --git a/documentation/dev-manual/device-manager.rst b/documentation/dev-manual/device-manager.rst
new file mode 100644
index 0000000000..49fc785fec
--- /dev/null
+++ b/documentation/dev-manual/device-manager.rst
@@ -0,0 +1,74 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+.. _device-manager:
+
+Selecting a Device Manager
+**************************
+
+The Yocto Project provides multiple ways to manage the device manager
+(``/dev``):
+
+- Persistent and Pre-Populated ``/dev``: For this case, the ``/dev``
+ directory is persistent and the required device nodes are created
+ during the build.
+
+- Use ``devtmpfs`` with a Device Manager: For this case, the ``/dev``
+ directory is provided by the kernel as an in-memory file system and
+ is automatically populated by the kernel at runtime. Additional
+ configuration of device nodes is done in user space by a device
+ manager like ``udev`` or ``busybox-mdev``.
+
+Using Persistent and Pre-Populated ``/dev``
+===========================================
+
+To use the static method for device population, you need to set the
+:term:`USE_DEVFS` variable to "0"
+as follows::
+
+ USE_DEVFS = "0"
+
+The content of the resulting ``/dev`` directory is defined in a Device
+Table file. The
+:term:`IMAGE_DEVICE_TABLES`
+variable defines the Device Table to use and should be set in the
+machine or distro configuration file. Alternatively, you can set this
+variable in your ``local.conf`` configuration file.
+
+If you do not define the :term:`IMAGE_DEVICE_TABLES` variable, the default
+``device_table-minimal.txt`` is used::
+
+ IMAGE_DEVICE_TABLES = "device_table-mymachine.txt"
+
+The population is handled by the ``makedevs`` utility during image
+creation:
+
+Using ``devtmpfs`` and a Device Manager
+=======================================
+
+To use the dynamic method for device population, you need to use (or be
+sure to set) the :term:`USE_DEVFS`
+variable to "1", which is the default::
+
+ USE_DEVFS = "1"
+
+With this
+setting, the resulting ``/dev`` directory is populated by the kernel
+using ``devtmpfs``. Make sure the corresponding kernel configuration
+variable ``CONFIG_DEVTMPFS`` is set when building you build a Linux
+kernel.
+
+All devices created by ``devtmpfs`` will be owned by ``root`` and have
+permissions ``0600``.
+
+To have more control over the device nodes, you can use a device manager like
+``udev`` or ``busybox-mdev``. You choose the device manager by defining the
+:term:`VIRTUAL-RUNTIME_dev_manager <VIRTUAL-RUNTIME>` variable in your machine
+or distro configuration file. Alternatively, you can set this variable in
+your ``local.conf`` configuration file::
+
+ VIRTUAL-RUNTIME_dev_manager = "udev"
+
+ # Some alternative values
+ # VIRTUAL-RUNTIME_dev_manager = "busybox-mdev"
+ # VIRTUAL-RUNTIME_dev_manager = "systemd"
+
diff --git a/documentation/dev-manual/disk-space.rst b/documentation/dev-manual/disk-space.rst
new file mode 100644
index 0000000000..efca82601d
--- /dev/null
+++ b/documentation/dev-manual/disk-space.rst
@@ -0,0 +1,61 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Conserving Disk Space
+*********************
+
+Conserving Disk Space During Builds
+===================================
+
+To help conserve disk space during builds, you can add the following
+statement to your project's ``local.conf`` configuration file found in
+the :term:`Build Directory`::
+
+ INHERIT += "rm_work"
+
+Adding this statement deletes the work directory used for
+building a recipe once the recipe is built. For more information on
+"rm_work", see the :ref:`ref-classes-rm-work` class in the
+Yocto Project Reference Manual.
+
+When you inherit this class and build a ``core-image-sato`` image for a
+``qemux86-64`` machine from an Ubuntu 22.04 x86-64 system, you end up with a
+final disk usage of 22 Gbytes instead of &MIN_DISK_SPACE; Gbytes. However,
+&MIN_DISK_SPACE_RM_WORK; Gbytes of initial free disk space are still needed to
+create temporary files before they can be deleted.
+
+Purging Obsolete Shared State Cache Files
+=========================================
+
+After multiple build iterations, the Shared State (sstate) cache can contain
+multiple cache files for a given package, consuming a substantial amount of
+disk space. However, only the most recent ones are likely to be reused.
+
+The following command is a quick way to purge all the cache files which
+haven't been used for a least a specified number of days::
+
+ find build/sstate-cache -type f -mtime +$DAYS -delete
+
+The above command relies on the fact that BitBake touches the sstate cache
+files as it accesses them, when it has write access to the cache.
+
+You could use ``-atime`` instead of ``-mtime`` if the partition isn't mounted
+with the ``noatime`` option for a read only cache.
+
+For more advanced needs, OpenEmbedded-Core also offers a more elaborate
+command. It has the ability to purge all but the newest cache files on each
+architecture, and also to remove files that it considers unreachable by
+exploring a set of build configurations. However, this command
+requires a full build environment to be available and doesn't work well
+covering multiple releases. It won't work either on limited environments
+such as BSD based NAS::
+
+ sstate-cache-management.py --remove-duplicated --cache-dir=sstate-cache
+
+This command will ask you to confirm the deletions it identifies.
+Run ``sstate-cache-management.sh`` for more details about this script.
+
+.. note::
+
+ As this command is much more cautious and selective, removing only cache files,
+ it will execute much slower than the simple ``find`` command described above.
+ Therefore, it may not be your best option to trim huge cache directories.
diff --git a/documentation/dev-manual/efficiently-fetching-sources.rst b/documentation/dev-manual/efficiently-fetching-sources.rst
new file mode 100644
index 0000000000..a15f0a92ce
--- /dev/null
+++ b/documentation/dev-manual/efficiently-fetching-sources.rst
@@ -0,0 +1,68 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Efficiently Fetching Source Files During a Build
+************************************************
+
+The OpenEmbedded build system works with source files located through
+the :term:`SRC_URI` variable. When
+you build something using BitBake, a big part of the operation is
+locating and downloading all the source tarballs. For images,
+downloading all the source for various packages can take a significant
+amount of time.
+
+This section shows you how you can use mirrors to speed up fetching
+source files and how you can pre-fetch files all of which leads to more
+efficient use of resources and time.
+
+Setting up Effective Mirrors
+============================
+
+A good deal that goes into a Yocto Project build is simply downloading
+all of the source tarballs. Maybe you have been working with another
+build system for which you have built up a
+sizable directory of source tarballs. Or, perhaps someone else has such
+a directory for which you have read access. If so, you can save time by
+adding statements to your configuration file so that the build process
+checks local directories first for existing tarballs before checking the
+Internet.
+
+Here is an efficient way to set it up in your ``local.conf`` file::
+
+ SOURCE_MIRROR_URL ?= "file:///home/you/your-download-dir/"
+ INHERIT += "own-mirrors"
+ BB_GENERATE_MIRROR_TARBALLS = "1"
+ # BB_NO_NETWORK = "1"
+
+In the previous example, the
+:term:`BB_GENERATE_MIRROR_TARBALLS`
+variable causes the OpenEmbedded build system to generate tarballs of
+the Git repositories and store them in the
+:term:`DL_DIR` directory. Due to
+performance reasons, generating and storing these tarballs is not the
+build system's default behavior.
+
+You can also use the
+:term:`PREMIRRORS` variable. For
+an example, see the variable's glossary entry in the Yocto Project
+Reference Manual.
+
+Getting Source Files and Suppressing the Build
+==============================================
+
+Another technique you can use to ready yourself for a successive string
+of build operations, is to pre-fetch all the source files without
+actually starting a build. This technique lets you work through any
+download issues and ultimately gathers all the source files into your
+download directory :ref:`structure-build-downloads`,
+which is located with :term:`DL_DIR`.
+
+Use the following BitBake command form to fetch all the necessary
+sources without starting the build::
+
+ $ bitbake target --runall=fetch
+
+This
+variation of the BitBake command guarantees that you have all the
+sources for that BitBake target should you disconnect from the Internet
+and want to do the build later offline.
+
diff --git a/documentation/dev-manual/error-reporting-tool.rst b/documentation/dev-manual/error-reporting-tool.rst
new file mode 100644
index 0000000000..84f3d9cd1e
--- /dev/null
+++ b/documentation/dev-manual/error-reporting-tool.rst
@@ -0,0 +1,84 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using the Error Reporting Tool
+******************************
+
+The error reporting tool allows you to submit errors encountered during
+builds to a central database. Outside of the build environment, you can
+use a web interface to browse errors, view statistics, and query for
+errors. The tool works using a client-server system where the client
+portion is integrated with the installed Yocto Project
+:term:`Source Directory` (e.g. ``poky``).
+The server receives the information collected and saves it in a
+database.
+
+There is a live instance of the error reporting server at
+https://errors.yoctoproject.org.
+When you want to get help with build failures, you can submit all of the
+information on the failure easily and then point to the URL in your bug
+report or send an email to the mailing list.
+
+.. note::
+
+ If you send error reports to this server, the reports become publicly
+ visible.
+
+Enabling and Using the Tool
+===========================
+
+By default, the error reporting tool is disabled. You can enable it by
+inheriting the :ref:`ref-classes-report-error` class by adding the
+following statement to the end of your ``local.conf`` file in your
+:term:`Build Directory`::
+
+ INHERIT += "report-error"
+
+By default, the error reporting feature stores information in
+``${``\ :term:`LOG_DIR`\ ``}/error-report``.
+However, you can specify a directory to use by adding the following to
+your ``local.conf`` file::
+
+ ERR_REPORT_DIR = "path"
+
+Enabling error
+reporting causes the build process to collect the errors and store them
+in a file as previously described. When the build system encounters an
+error, it includes a command as part of the console output. You can run
+the command to send the error file to the server. For example, the
+following command sends the errors to an upstream server::
+
+ $ send-error-report /home/brandusa/project/poky/build/tmp/log/error-report/error_report_201403141617.txt
+
+In the previous example, the errors are sent to a public database
+available at https://errors.yoctoproject.org, which is used by the
+entire community. If you specify a particular server, you can send the
+errors to a different database. Use the following command for more
+information on available options::
+
+ $ send-error-report --help
+
+When sending the error file, you are prompted to review the data being
+sent as well as to provide a name and optional email address. Once you
+satisfy these prompts, the command returns a link from the server that
+corresponds to your entry in the database. For example, here is a
+typical link: https://errors.yoctoproject.org/Errors/Details/9522/
+
+Following the link takes you to a web interface where you can browse,
+query the errors, and view statistics.
+
+Disabling the Tool
+==================
+
+To disable the error reporting feature, simply remove or comment out the
+following statement from the end of your ``local.conf`` file in your
+:term:`Build Directory`::
+
+ INHERIT += "report-error"
+
+Setting Up Your Own Error Reporting Server
+==========================================
+
+If you want to set up your own error reporting server, you can obtain
+the code from the Git repository at :yocto_git:`/error-report-web/`.
+Instructions on how to set it up are in the README document.
+
diff --git a/documentation/dev-manual/external-scm.rst b/documentation/dev-manual/external-scm.rst
new file mode 100644
index 0000000000..97a7e63e36
--- /dev/null
+++ b/documentation/dev-manual/external-scm.rst
@@ -0,0 +1,67 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using an External SCM
+*********************
+
+If you're working on a recipe that pulls from an external Source Code
+Manager (SCM), it is possible to have the OpenEmbedded build system
+notice new recipe changes added to the SCM and then build the resulting
+packages that depend on the new recipes by using the latest versions.
+This only works for SCMs from which it is possible to get a sensible
+revision number for changes. Currently, you can do this with Apache
+Subversion (SVN), Git, and Bazaar (BZR) repositories.
+
+To enable this behavior, the :term:`PV` of
+the recipe needs to reference
+:term:`SRCPV`. Here is an example::
+
+ PV = "1.2.3+git${SRCPV}"
+
+Then, you can add the following to your
+``local.conf``::
+
+ SRCREV:pn-PN = "${AUTOREV}"
+
+:term:`PN` is the name of the recipe for
+which you want to enable automatic source revision updating.
+
+If you do not want to update your local configuration file, you can add
+the following directly to the recipe to finish enabling the feature::
+
+ SRCREV = "${AUTOREV}"
+
+The Yocto Project provides a distribution named ``poky-bleeding``, whose
+configuration file contains the line::
+
+ require conf/distro/include/poky-floating-revisions.inc
+
+This line pulls in the
+listed include file that contains numerous lines of exactly that form::
+
+ #SRCREV:pn-opkg-native ?= "${AUTOREV}"
+ #SRCREV:pn-opkg-sdk ?= "${AUTOREV}"
+ #SRCREV:pn-opkg ?= "${AUTOREV}"
+ #SRCREV:pn-opkg-utils-native ?= "${AUTOREV}"
+ #SRCREV:pn-opkg-utils ?= "${AUTOREV}"
+ SRCREV:pn-gconf-dbus ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-common ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-config-gtk ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-desktop ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-keyboard ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-panel-2 ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-themes-extra ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-terminal ?= "${AUTOREV}"
+ SRCREV:pn-matchbox-wm ?= "${AUTOREV}"
+ SRCREV:pn-settings-daemon ?= "${AUTOREV}"
+ SRCREV:pn-screenshot ?= "${AUTOREV}"
+ . . .
+
+These lines allow you to
+experiment with building a distribution that tracks the latest
+development source for numerous packages.
+
+.. note::
+
+ The ``poky-bleeding`` distribution is not tested on a regular basis. Keep
+ this in mind if you use it.
+
diff --git a/documentation/dev-manual/external-toolchain.rst b/documentation/dev-manual/external-toolchain.rst
new file mode 100644
index 0000000000..238f8cf467
--- /dev/null
+++ b/documentation/dev-manual/external-toolchain.rst
@@ -0,0 +1,40 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Optionally Using an External Toolchain
+**************************************
+
+You might want to use an external toolchain as part of your development.
+If this is the case, the fundamental steps you need to accomplish are as
+follows:
+
+- Understand where the installed toolchain resides. For cases where you
+ need to build the external toolchain, you would need to take separate
+ steps to build and install the toolchain.
+
+- Make sure you add the layer that contains the toolchain to your
+ ``bblayers.conf`` file through the
+ :term:`BBLAYERS` variable.
+
+- Set the :term:`EXTERNAL_TOOLCHAIN` variable in your ``local.conf`` file
+ to the location in which you installed the toolchain.
+
+The toolchain configuration is very flexible and customizable. It
+is primarily controlled with the :term:`TCMODE` variable. This variable
+controls which ``tcmode-*.inc`` file to include from the
+``meta/conf/distro/include`` directory within the :term:`Source Directory`.
+
+The default value of :term:`TCMODE` is "default", which tells the
+OpenEmbedded build system to use its internally built toolchain (i.e.
+``tcmode-default.inc``). However, other patterns are accepted. In
+particular, "external-\*" refers to external toolchains. One example is
+the Mentor Graphics Sourcery G++ Toolchain. Support for this toolchain resides
+in the separate ``meta-sourcery`` layer at
+https://github.com/MentorEmbedded/meta-sourcery/.
+See its ``README`` file for details about how to use this layer.
+
+Another example of external toolchain layer is
+:yocto_git:`meta-arm-toolchain </meta-arm/tree/meta-arm-toolchain/>`
+supporting GNU toolchains released by ARM.
+
+You can find further information by reading about the :term:`TCMODE` variable
+in the Yocto Project Reference Manual's variable glossary.
diff --git a/documentation/dev-manual/figures/cute-files-npm-example.png b/documentation/dev-manual/figures/cute-files-npm-example.png
index 1ebe74f535..a02cca097f 100644
--- a/documentation/dev-manual/figures/cute-files-npm-example.png
+++ b/documentation/dev-manual/figures/cute-files-npm-example.png
Binary files differ
diff --git a/documentation/dev-manual/gobject-introspection.rst b/documentation/dev-manual/gobject-introspection.rst
new file mode 100644
index 0000000000..f7206e6fae
--- /dev/null
+++ b/documentation/dev-manual/gobject-introspection.rst
@@ -0,0 +1,155 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Enabling GObject Introspection Support
+**************************************
+
+`GObject introspection <https://gi.readthedocs.io/en/latest/>`__
+is the standard mechanism for accessing GObject-based software from
+runtime environments. GObject is a feature of the GLib library that
+provides an object framework for the GNOME desktop and related software.
+GObject Introspection adds information to GObject that allows objects
+created within it to be represented across different programming
+languages. If you want to construct GStreamer pipelines using Python, or
+control UPnP infrastructure using Javascript and GUPnP, GObject
+introspection is the only way to do it.
+
+This section describes the Yocto Project support for generating and
+packaging GObject introspection data. GObject introspection data is a
+description of the API provided by libraries built on top of the GLib
+framework, and, in particular, that framework's GObject mechanism.
+GObject Introspection Repository (GIR) files go to ``-dev`` packages,
+``typelib`` files go to main packages as they are packaged together with
+libraries that are introspected.
+
+The data is generated when building such a library, by linking the
+library with a small executable binary that asks the library to describe
+itself, and then executing the binary and processing its output.
+
+Generating this data in a cross-compilation environment is difficult
+because the library is produced for the target architecture, but its
+code needs to be executed on the build host. This problem is solved with
+the OpenEmbedded build system by running the code through QEMU, which
+allows precisely that. Unfortunately, QEMU does not always work
+perfectly as mentioned in the ":ref:`dev-manual/gobject-introspection:known issues`"
+section.
+
+Enabling the Generation of Introspection Data
+=============================================
+
+Enabling the generation of introspection data (GIR files) in your
+library package involves the following:
+
+#. Inherit the :ref:`ref-classes-gobject-introspection` class.
+
+#. Make sure introspection is not disabled anywhere in the recipe or
+ from anything the recipe includes. Also, make sure that
+ "gobject-introspection-data" is not in
+ :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
+ and that "qemu-usermode" is not in
+ :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`.
+ In either of these conditions, nothing will happen.
+
+#. Try to build the recipe. If you encounter build errors that look like
+ something is unable to find ``.so`` libraries, check where these
+ libraries are located in the source tree and add the following to the
+ recipe::
+
+ GIR_EXTRA_LIBS_PATH = "${B}/something/.libs"
+
+ .. note::
+
+ See recipes in the ``oe-core`` repository that use that
+ :term:`GIR_EXTRA_LIBS_PATH` variable as an example.
+
+#. Look for any other errors, which probably mean that introspection
+ support in a package is not entirely standard, and thus breaks down
+ in a cross-compilation environment. For such cases, custom-made fixes
+ are needed. A good place to ask and receive help in these cases is
+ the :ref:`Yocto Project mailing
+ lists <resources-mailinglist>`.
+
+.. note::
+
+ Using a library that no longer builds against the latest Yocto
+ Project release and prints introspection related errors is a good
+ candidate for the previous procedure.
+
+Disabling the Generation of Introspection Data
+==============================================
+
+You might find that you do not want to generate introspection data. Or,
+perhaps QEMU does not work on your build host and target architecture
+combination. If so, you can use either of the following methods to
+disable GIR file generations:
+
+- Add the following to your distro configuration::
+
+ DISTRO_FEATURES_BACKFILL_CONSIDERED = "gobject-introspection-data"
+
+ Adding this statement disables generating introspection data using
+ QEMU but will still enable building introspection tools and libraries
+ (i.e. building them does not require the use of QEMU).
+
+- Add the following to your machine configuration::
+
+ MACHINE_FEATURES_BACKFILL_CONSIDERED = "qemu-usermode"
+
+ Adding this statement disables the use of QEMU when building packages for your
+ machine. Currently, this feature is used only by introspection
+ recipes and has the same effect as the previously described option.
+
+ .. note::
+
+ Future releases of the Yocto Project might have other features
+ affected by this option.
+
+If you disable introspection data, you can still obtain it through other
+means such as copying the data from a suitable sysroot, or by generating
+it on the target hardware. The OpenEmbedded build system does not
+currently provide specific support for these techniques.
+
+Testing that Introspection Works in an Image
+============================================
+
+Use the following procedure to test if generating introspection data is
+working in an image:
+
+#. Make sure that "gobject-introspection-data" is not in
+ :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
+ and that "qemu-usermode" is not in
+ :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`.
+
+#. Build ``core-image-sato``.
+
+#. Launch a Terminal and then start Python in the terminal.
+
+#. Enter the following in the terminal::
+
+ >>> from gi.repository import GLib
+ >>> GLib.get_host_name()
+
+#. For something a little more advanced, enter the following see:
+ https://python-gtk-3-tutorial.readthedocs.io/en/latest/introduction.html
+
+Known Issues
+============
+
+Here are know issues in GObject Introspection Support:
+
+- ``qemu-ppc64`` immediately crashes. Consequently, you cannot build
+ introspection data on that architecture.
+
+- x32 is not supported by QEMU. Consequently, introspection data is
+ disabled.
+
+- musl causes transient GLib binaries to crash on assertion failures.
+ Consequently, generating introspection data is disabled.
+
+- Because QEMU is not able to run the binaries correctly, introspection
+ is disabled for some specific packages under specific architectures
+ (e.g. ``gcr``, ``libsecret``, and ``webkit``).
+
+- QEMU usermode might not work properly when running 64-bit binaries
+ under 32-bit host machines. In particular, "qemumips64" is known to
+ not work under i686.
+
diff --git a/documentation/dev-manual/index.rst b/documentation/dev-manual/index.rst
index f16b135c4d..9ccf60f701 100644
--- a/documentation/dev-manual/index.rst
+++ b/documentation/dev-manual/index.rst
@@ -4,15 +4,49 @@
Yocto Project Development Tasks Manual
======================================
-|
-
.. toctree::
:caption: Table of Contents
:numbered:
intro
start
- common-tasks
+ layers
+ customizing-images
+ new-recipe
+ new-machine
+ upgrading-recipes
+ temporary-source-code
+ quilt.rst
+ development-shell
+ python-development-shell
+ building
+ speeding-up-build
+ libraries
+ prebuilt-libraries
+ x32-psabi
+ gobject-introspection
+ external-toolchain
+ wic
+ bmaptool
+ securing-images
+ custom-distribution
+ custom-template-configuration-directory
+ disk-space
+ packages
+ efficiently-fetching-sources
+ init-manager
+ device-manager
+ external-scm
+ read-only-rootfs
+ build-quality
+ runtime-testing
+ debugging
+ licenses
+ security-subjects
+ vulnerabilities
+ sbom
+ error-reporting-tool
+ wayland
qemu
.. include:: /boilerplate.rst
diff --git a/documentation/dev-manual/init-manager.rst b/documentation/dev-manual/init-manager.rst
new file mode 100644
index 0000000000..ddce82b81f
--- /dev/null
+++ b/documentation/dev-manual/init-manager.rst
@@ -0,0 +1,162 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+.. _init-manager:
+
+Selecting an Initialization Manager
+***********************************
+
+By default, the Yocto Project uses :wikipedia:`SysVinit <Init#SysV-style>` as
+the initialization manager. There is also support for BusyBox init, a simpler
+implementation, as well as support for :wikipedia:`systemd <Systemd>`, which
+is a full replacement for init with parallel starting of services, reduced
+shell overhead, increased security and resource limits for services, and other
+features that are used by many distributions.
+
+Within the system, SysVinit and BusyBox init treat system components as
+services. These services are maintained as shell scripts stored in the
+``/etc/init.d/`` directory.
+
+SysVinit is more elaborate than BusyBox init and organizes services in
+different run levels. This organization is maintained by putting links
+to the services in the ``/etc/rcN.d/`` directories, where `N/` is one
+of the following options: "S", "0", "1", "2", "3", "4", "5", or "6".
+
+.. note::
+
+ Each runlevel has a dependency on the previous runlevel. This
+ dependency allows the services to work properly.
+
+Both SysVinit and BusyBox init are configured through the ``/etc/inittab``
+file, with a very similar syntax, though of course BusyBox init features
+are more limited.
+
+In comparison, systemd treats components as units. Using units is a
+broader concept as compared to using a service. A unit includes several
+different types of entities. ``Service`` is one of the types of entities.
+The runlevel concept in SysVinit corresponds to the concept of a target
+in systemd, where target is also a type of supported unit.
+
+In systems with SysVinit or BusyBox init, services load sequentially (i.e. one
+by one) during init and parallelization is not supported. With systemd, services
+start in parallel. This method can have an impact on the startup performance
+of a given service, though systemd will also provide more services by default,
+therefore increasing the total system boot time. systemd also substantially
+increases system size because of its multiple components and the extra
+dependencies it pulls.
+
+On the contrary, BusyBox init is the simplest and the lightest solution and
+also comes with BusyBox mdev as device manager, a lighter replacement to
+:wikipedia:`udev <Udev>`, which SysVinit and systemd both use.
+
+The ":ref:`device-manager`" chapter has more details about device managers.
+
+Using SysVinit with udev
+=========================
+
+SysVinit with the udev device manager corresponds to the
+default setting in Poky. This corresponds to setting::
+
+ INIT_MANAGER = "sysvinit"
+
+Using BusyBox init with BusyBox mdev
+====================================
+
+BusyBox init with BusyBox mdev is the simplest and lightest solution
+for small root filesystems. All you need is BusyBox, which most systems
+have anyway::
+
+ INIT_MANAGER = "mdev-busybox"
+
+Using systemd
+=============
+
+The last option is to use systemd together with the udev device
+manager. This is the most powerful and versatile solution, especially
+for more complex systems::
+
+ INIT_MANAGER = "systemd"
+
+This will enable systemd and remove sysvinit components from the image.
+See :yocto_git:`meta/conf/distro/include/init-manager-systemd.inc
+</poky/tree/meta/conf/distro/include/init-manager-systemd.inc>` for exact
+details on what this does.
+
+Controling systemd from the target command line
+-----------------------------------------------
+
+Here is a quick reference for controling systemd from the command line on the
+target. Instead of opening and sometimes modifying files, most interaction
+happens through the ``systemctl`` and ``journalctl`` commands:
+
+- ``systemctl status``: show the status of all services
+- ``systemctl status <service>``: show the status of one service
+- ``systemctl [start|stop] <service>``: start or stop a service
+- ``systemctl [enable|disable] <service>``: enable or disable a service at boot time
+- ``systemctl list-units``: list all available units
+- ``journalctl -a``: show all logs for all services
+- ``journalctl -f``: show only the last log entries, and keep printing updates as they arrive
+- ``journalctl -u``: show only logs from a particular service
+
+Using systemd-journald without a traditional syslog daemon
+----------------------------------------------------------
+
+Counter-intuitively, ``systemd-journald`` is not a syslog runtime or provider,
+and the proper way to use ``systemd-journald`` as your sole logging mechanism is to
+effectively disable syslog entirely by setting these variables in your distribution
+configuration file::
+
+ VIRTUAL-RUNTIME_syslog = ""
+ VIRTUAL-RUNTIME_base-utils-syslog = ""
+
+Doing so will prevent ``rsyslog`` / ``busybox-syslog`` from being pulled in by
+default, leaving only ``systemd-journald``.
+
+Summary
+-------
+
+The Yocto Project supports three different initialization managers, offering
+increasing levels of complexity and functionality:
+
+.. list-table::
+ :widths: 40 20 20 20
+ :header-rows: 1
+
+ * -
+ - BusyBox init
+ - SysVinit
+ - systemd
+ * - Size
+ - Small
+ - Small
+ - Big [#footnote-systemd-size]_
+ * - Complexity
+ - Small
+ - Medium
+ - High
+ * - Support for boot profiles
+ - No
+ - Yes ("runlevels")
+ - Yes ("targets")
+ * - Services defined as
+ - Shell scripts
+ - Shell scripts
+ - Description files
+ * - Starting services in parallel
+ - No
+ - No
+ - Yes
+ * - Setting service resource limits
+ - No
+ - No
+ - Yes
+ * - Support service isolation
+ - No
+ - No
+ - Yes
+ * - Integrated logging
+ - No
+ - No
+ - Yes
+
+.. [#footnote-systemd-size] Using systemd increases the ``core-image-minimal``
+ image size by 160\% for ``qemux86-64`` on Mickledore (4.2), compared to SysVinit.
diff --git a/documentation/dev-manual/layers.rst b/documentation/dev-manual/layers.rst
new file mode 100644
index 0000000000..91889bd0ae
--- /dev/null
+++ b/documentation/dev-manual/layers.rst
@@ -0,0 +1,919 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Understanding and Creating Layers
+*********************************
+
+The OpenEmbedded build system supports organizing
+:term:`Metadata` into multiple layers.
+Layers allow you to isolate different types of customizations from each
+other. For introductory information on the Yocto Project Layer Model,
+see the
+":ref:`overview-manual/yp-intro:the yocto project layer model`"
+section in the Yocto Project Overview and Concepts Manual.
+
+Creating Your Own Layer
+=======================
+
+.. note::
+
+ It is very easy to create your own layers to use with the OpenEmbedded
+ build system, as the Yocto Project ships with tools that speed up creating
+ layers. This section describes the steps you perform by hand to create
+ layers so that you can better understand them. For information about the
+ layer-creation tools, see the
+ ":ref:`bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script`"
+ section in the Yocto Project Board Support Package (BSP) Developer's
+ Guide and the ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ section further down in this manual.
+
+Follow these general steps to create your layer without using tools:
+
+#. *Check Existing Layers:* Before creating a new layer, you should be
+ sure someone has not already created a layer containing the Metadata
+ you need. You can see the :oe_layerindex:`OpenEmbedded Metadata Index <>`
+ for a list of layers from the OpenEmbedded community that can be used in
+ the Yocto Project. You could find a layer that is identical or close
+ to what you need.
+
+#. *Create a Directory:* Create the directory for your layer. When you
+ create the layer, be sure to create the directory in an area not
+ associated with the Yocto Project :term:`Source Directory`
+ (e.g. the cloned ``poky`` repository).
+
+ While not strictly required, prepend the name of the directory with
+ the string "meta-". For example::
+
+ meta-mylayer
+ meta-GUI_xyz
+ meta-mymachine
+
+ With rare exceptions, a layer's name follows this form::
+
+ meta-root_name
+
+ Following this layer naming convention can save
+ you trouble later when tools, components, or variables "assume" your
+ layer name begins with "meta-". A notable example is in configuration
+ files as shown in the following step where layer names without the
+ "meta-" string are appended to several variables used in the
+ configuration.
+
+#. *Create a Layer Configuration File:* Inside your new layer folder,
+ you need to create a ``conf/layer.conf`` file. It is easiest to take
+ an existing layer configuration file and copy that to your layer's
+ ``conf`` directory and then modify the file as needed.
+
+ The ``meta-yocto-bsp/conf/layer.conf`` file in the Yocto Project
+ :yocto_git:`Source Repositories </poky/tree/meta-yocto-bsp/conf>`
+ demonstrates the required syntax. For your layer, you need to replace
+ "yoctobsp" with a unique identifier for your layer (e.g. "machinexyz"
+ for a layer named "meta-machinexyz")::
+
+ # We have a conf and classes directory, add to BBPATH
+ BBPATH .= ":${LAYERDIR}"
+
+ # We have recipes-* directories, add to BBFILES
+ BBFILES += "${LAYERDIR}/recipes-*/*/*.bb \
+ ${LAYERDIR}/recipes-*/*/*.bbappend"
+
+ BBFILE_COLLECTIONS += "yoctobsp"
+ BBFILE_PATTERN_yoctobsp = "^${LAYERDIR}/"
+ BBFILE_PRIORITY_yoctobsp = "5"
+ LAYERVERSION_yoctobsp = "4"
+ LAYERSERIES_COMPAT_yoctobsp = "dunfell"
+
+ Here is an explanation of the layer configuration file:
+
+ - :term:`BBPATH`: Adds the layer's
+ root directory to BitBake's search path. Through the use of the
+ :term:`BBPATH` variable, BitBake locates class files (``.bbclass``),
+ configuration files, and files that are included with ``include``
+ and ``require`` statements. For these cases, BitBake uses the
+ first file that matches the name found in :term:`BBPATH`. This is
+ similar to the way the ``PATH`` variable is used for binaries. It
+ is recommended, therefore, that you use unique class and
+ configuration filenames in your custom layer.
+
+ - :term:`BBFILES`: Defines the
+ location for all recipes in the layer.
+
+ - :term:`BBFILE_COLLECTIONS`:
+ Establishes the current layer through a unique identifier that is
+ used throughout the OpenEmbedded build system to refer to the
+ layer. In this example, the identifier "yoctobsp" is the
+ representation for the container layer named "meta-yocto-bsp".
+
+ - :term:`BBFILE_PATTERN`:
+ Expands immediately during parsing to provide the directory of the
+ layer.
+
+ - :term:`BBFILE_PRIORITY`:
+ Establishes a priority to use for recipes in the layer when the
+ OpenEmbedded build finds recipes of the same name in different
+ layers.
+
+ - :term:`LAYERVERSION`:
+ Establishes a version number for the layer. You can use this
+ version number to specify this exact version of the layer as a
+ dependency when using the
+ :term:`LAYERDEPENDS`
+ variable.
+
+ - :term:`LAYERDEPENDS`:
+ Lists all layers on which this layer depends (if any).
+
+ - :term:`LAYERSERIES_COMPAT`:
+ Lists the :yocto_wiki:`Yocto Project </Releases>`
+ releases for which the current version is compatible. This
+ variable is a good way to indicate if your particular layer is
+ current.
+
+
+ .. note::
+
+ A layer does not have to contain only recipes ``.bb`` or append files
+ ``.bbappend``. Generally, developers create layers using
+ ``bitbake-layers create-layer``.
+ See ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`",
+ explaining how the ``layer.conf`` file is created from a template located in
+ ``meta/lib/bblayers/templates/layer.conf``.
+ In fact, none of the variables set in ``layer.conf`` are mandatory,
+ except when :term:`BBFILE_COLLECTIONS` is present. In this case
+ :term:`LAYERSERIES_COMPAT` and :term:`BBFILE_PATTERN` have to be
+ defined too.
+
+#. *Add Content:* Depending on the type of layer, add the content. If
+ the layer adds support for a machine, add the machine configuration
+ in a ``conf/machine/`` file within the layer. If the layer adds
+ distro policy, add the distro configuration in a ``conf/distro/``
+ file within the layer. If the layer introduces new recipes, put the
+ recipes you need in ``recipes-*`` subdirectories within the layer.
+
+ .. note::
+
+ For an explanation of layer hierarchy that is compliant with the
+ Yocto Project, see the ":ref:`bsp-guide/bsp:example filesystem layout`"
+ section in the Yocto Project Board Support Package (BSP) Developer's Guide.
+
+#. *Optionally Test for Compatibility:* If you want permission to use
+ the Yocto Project Compatibility logo with your layer or application
+ that uses your layer, perform the steps to apply for compatibility.
+ See the
+ ":ref:`dev-manual/layers:making sure your layer is compatible with yocto project`"
+ section for more information.
+
+Following Best Practices When Creating Layers
+=============================================
+
+To create layers that are easier to maintain and that will not impact
+builds for other machines, you should consider the information in the
+following list:
+
+- *Avoid "Overlaying" Entire Recipes from Other Layers in Your
+ Configuration:* In other words, do not copy an entire recipe into
+ your layer and then modify it. Rather, use an append file
+ (``.bbappend``) to override only those parts of the original recipe
+ you need to modify.
+
+- *Avoid Duplicating Include Files:* Use append files (``.bbappend``)
+ for each recipe that uses an include file. Or, if you are introducing
+ a new recipe that requires the included file, use the path relative
+ to the original layer directory to refer to the file. For example,
+ use ``require recipes-core/``\ `package`\ ``/``\ `file`\ ``.inc`` instead
+ of ``require`` `file`\ ``.inc``. If you're finding you have to overlay
+ the include file, it could indicate a deficiency in the include file
+ in the layer to which it originally belongs. If this is the case, you
+ should try to address that deficiency instead of overlaying the
+ include file. For example, you could address this by getting the
+ maintainer of the include file to add a variable or variables to make
+ it easy to override the parts needing to be overridden.
+
+- *Structure Your Layers:* Proper use of overrides within append files
+ and placement of machine-specific files within your layer can ensure
+ that a build is not using the wrong Metadata and negatively impacting
+ a build for a different machine. Here are some examples:
+
+ - *Modify Variables to Support a Different Machine:* Suppose you
+ have a layer named ``meta-one`` that adds support for building
+ machine "one". To do so, you use an append file named
+ ``base-files.bbappend`` and create a dependency on "foo" by
+ altering the :term:`DEPENDS`
+ variable::
+
+ DEPENDS = "foo"
+
+ The dependency is created during any
+ build that includes the layer ``meta-one``. However, you might not
+ want this dependency for all machines. For example, suppose you
+ are building for machine "two" but your ``bblayers.conf`` file has
+ the ``meta-one`` layer included. During the build, the
+ ``base-files`` for machine "two" will also have the dependency on
+ ``foo``.
+
+ To make sure your changes apply only when building machine "one",
+ use a machine override with the :term:`DEPENDS` statement::
+
+ DEPENDS:one = "foo"
+
+ You should follow the same strategy when using ``:append``
+ and ``:prepend`` operations::
+
+ DEPENDS:append:one = " foo"
+ DEPENDS:prepend:one = "foo "
+
+ As an actual example, here's a
+ snippet from the generic kernel include file ``linux-yocto.inc``,
+ wherein the kernel compile and link options are adjusted in the
+ case of a subset of the supported architectures::
+
+ DEPENDS:append:aarch64 = " libgcc"
+ KERNEL_CC:append:aarch64 = " ${TOOLCHAIN_OPTIONS}"
+ KERNEL_LD:append:aarch64 = " ${TOOLCHAIN_OPTIONS}"
+
+ DEPENDS:append:nios2 = " libgcc"
+ KERNEL_CC:append:nios2 = " ${TOOLCHAIN_OPTIONS}"
+ KERNEL_LD:append:nios2 = " ${TOOLCHAIN_OPTIONS}"
+
+ DEPENDS:append:arc = " libgcc"
+ KERNEL_CC:append:arc = " ${TOOLCHAIN_OPTIONS}"
+ KERNEL_LD:append:arc = " ${TOOLCHAIN_OPTIONS}"
+
+ KERNEL_FEATURES:append:qemuall=" features/debug/printk.scc"
+
+ - *Place Machine-Specific Files in Machine-Specific Locations:* When
+ you have a base recipe, such as ``base-files.bb``, that contains a
+ :term:`SRC_URI` statement to a
+ file, you can use an append file to cause the build to use your
+ own version of the file. For example, an append file in your layer
+ at ``meta-one/recipes-core/base-files/base-files.bbappend`` could
+ extend :term:`FILESPATH` using :term:`FILESEXTRAPATHS` as follows::
+
+ FILESEXTRAPATHS:prepend := "${THISDIR}/${BPN}:"
+
+ The build for machine "one" will pick up your machine-specific file as
+ long as you have the file in
+ ``meta-one/recipes-core/base-files/base-files/``. However, if you
+ are building for a different machine and the ``bblayers.conf``
+ file includes the ``meta-one`` layer and the location of your
+ machine-specific file is the first location where that file is
+ found according to :term:`FILESPATH`, builds for all machines will
+ also use that machine-specific file.
+
+ You can make sure that a machine-specific file is used for a
+ particular machine by putting the file in a subdirectory specific
+ to the machine. For example, rather than placing the file in
+ ``meta-one/recipes-core/base-files/base-files/`` as shown above,
+ put it in ``meta-one/recipes-core/base-files/base-files/one/``.
+ Not only does this make sure the file is used only when building
+ for machine "one", but the build process locates the file more
+ quickly.
+
+ In summary, you need to place all files referenced from
+ :term:`SRC_URI` in a machine-specific subdirectory within the layer in
+ order to restrict those files to machine-specific builds.
+
+- *Perform Steps to Apply for Yocto Project Compatibility:* If you want
+ permission to use the Yocto Project Compatibility logo with your
+ layer or application that uses your layer, perform the steps to apply
+ for compatibility. See the
+ ":ref:`dev-manual/layers:making sure your layer is compatible with yocto project`"
+ section for more information.
+
+- *Follow the Layer Naming Convention:* Store custom layers in a Git
+ repository that use the ``meta-layer_name`` format.
+
+- *Group Your Layers Locally:* Clone your repository alongside other
+ cloned ``meta`` directories from the :term:`Source Directory`.
+
+Making Sure Your Layer is Compatible With Yocto Project
+=======================================================
+
+When you create a layer used with the Yocto Project, it is advantageous
+to make sure that the layer interacts well with existing Yocto Project
+layers (i.e. the layer is compatible with the Yocto Project). Ensuring
+compatibility makes the layer easy to be consumed by others in the Yocto
+Project community and could allow you permission to use the Yocto
+Project Compatible Logo.
+
+.. note::
+
+ Only Yocto Project member organizations are permitted to use the
+ Yocto Project Compatible Logo. The logo is not available for general
+ use. For information on how to become a Yocto Project member
+ organization, see the :yocto_home:`Yocto Project Website <>`.
+
+The Yocto Project Compatibility Program consists of a layer application
+process that requests permission to use the Yocto Project Compatibility
+Logo for your layer and application. The process consists of two parts:
+
+#. Successfully passing a script (``yocto-check-layer``) that when run
+ against your layer, tests it against constraints based on experiences
+ of how layers have worked in the real world and where pitfalls have
+ been found. Getting a "PASS" result from the script is required for
+ successful compatibility registration.
+
+#. Completion of an application acceptance form, which you can find at
+ :yocto_home:`/compatible-registration/`.
+
+To be granted permission to use the logo, you need to satisfy the
+following:
+
+- Be able to check the box indicating that you got a "PASS" when
+ running the script against your layer.
+
+- Answer "Yes" to the questions on the form or have an acceptable
+ explanation for any questions answered "No".
+
+- Be a Yocto Project Member Organization.
+
+The remainder of this section presents information on the registration
+form and on the ``yocto-check-layer`` script.
+
+Yocto Project Compatible Program Application
+--------------------------------------------
+
+Use the form to apply for your layer's approval. Upon successful
+application, you can use the Yocto Project Compatibility Logo with your
+layer and the application that uses your layer.
+
+To access the form, use this link:
+:yocto_home:`/compatible-registration`.
+Follow the instructions on the form to complete your application.
+
+The application consists of the following sections:
+
+- *Contact Information:* Provide your contact information as the fields
+ require. Along with your information, provide the released versions
+ of the Yocto Project for which your layer is compatible.
+
+- *Acceptance Criteria:* Provide "Yes" or "No" answers for each of the
+ items in the checklist. There is space at the bottom of the form for
+ any explanations for items for which you answered "No".
+
+- *Recommendations:* Provide answers for the questions regarding Linux
+ kernel use and build success.
+
+``yocto-check-layer`` Script
+----------------------------
+
+The ``yocto-check-layer`` script provides you a way to assess how
+compatible your layer is with the Yocto Project. You should run this
+script prior to using the form to apply for compatibility as described
+in the previous section. You need to achieve a "PASS" result in order to
+have your application form successfully processed.
+
+The script divides tests into three areas: COMMON, BSP, and DISTRO. For
+example, given a distribution layer (DISTRO), the layer must pass both
+the COMMON and DISTRO related tests. Furthermore, if your layer is a BSP
+layer, the layer must pass the COMMON and BSP set of tests.
+
+To execute the script, enter the following commands from your build
+directory::
+
+ $ source oe-init-build-env
+ $ yocto-check-layer your_layer_directory
+
+Be sure to provide the actual directory for your
+layer as part of the command.
+
+Entering the command causes the script to determine the type of layer
+and then to execute a set of specific tests against the layer. The
+following list overviews the test:
+
+- ``common.test_readme``: Tests if a ``README`` file exists in the
+ layer and the file is not empty.
+
+- ``common.test_parse``: Tests to make sure that BitBake can parse the
+ files without error (i.e. ``bitbake -p``).
+
+- ``common.test_show_environment``: Tests that the global or per-recipe
+ environment is in order without errors (i.e. ``bitbake -e``).
+
+- ``common.test_world``: Verifies that ``bitbake world`` works.
+
+- ``common.test_signatures``: Tests to be sure that BSP and DISTRO
+ layers do not come with recipes that change signatures.
+
+- ``common.test_layerseries_compat``: Verifies layer compatibility is
+ set properly.
+
+- ``bsp.test_bsp_defines_machines``: Tests if a BSP layer has machine
+ configurations.
+
+- ``bsp.test_bsp_no_set_machine``: Tests to ensure a BSP layer does not
+ set the machine when the layer is added.
+
+- ``bsp.test_machine_world``: Verifies that ``bitbake world`` works
+ regardless of which machine is selected.
+
+- ``bsp.test_machine_signatures``: Verifies that building for a
+ particular machine affects only the signature of tasks specific to
+ that machine.
+
+- ``distro.test_distro_defines_distros``: Tests if a DISTRO layer has
+ distro configurations.
+
+- ``distro.test_distro_no_set_distros``: Tests to ensure a DISTRO layer
+ does not set the distribution when the layer is added.
+
+Enabling Your Layer
+===================
+
+Before the OpenEmbedded build system can use your new layer, you need to
+enable it. To enable your layer, simply add your layer's path to the
+:term:`BBLAYERS` variable in your ``conf/bblayers.conf`` file, which is
+found in the :term:`Build Directory`. The following example shows how to
+enable your new ``meta-mylayer`` layer (note how your new layer exists
+outside of the official ``poky`` repository which you would have checked
+out earlier)::
+
+ # POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf
+ # changes incompatibly
+ POKY_BBLAYERS_CONF_VERSION = "2"
+ BBPATH = "${TOPDIR}"
+ BBFILES ?= ""
+ BBLAYERS ?= " \
+ /home/user/poky/meta \
+ /home/user/poky/meta-poky \
+ /home/user/poky/meta-yocto-bsp \
+ /home/user/mystuff/meta-mylayer \
+ "
+
+BitBake parses each ``conf/layer.conf`` file from the top down as
+specified in the :term:`BBLAYERS` variable within the ``conf/bblayers.conf``
+file. During the processing of each ``conf/layer.conf`` file, BitBake
+adds the recipes, classes and configurations contained within the
+particular layer to the source directory.
+
+Appending Other Layers Metadata With Your Layer
+===============================================
+
+A recipe that appends Metadata to another recipe is called a BitBake
+append file. A BitBake append file uses the ``.bbappend`` file type
+suffix, while the corresponding recipe to which Metadata is being
+appended uses the ``.bb`` file type suffix.
+
+You can use a ``.bbappend`` file in your layer to make additions or
+changes to the content of another layer's recipe without having to copy
+the other layer's recipe into your layer. Your ``.bbappend`` file
+resides in your layer, while the main ``.bb`` recipe file to which you
+are appending Metadata resides in a different layer.
+
+Being able to append information to an existing recipe not only avoids
+duplication, but also automatically applies recipe changes from a
+different layer into your layer. If you were copying recipes, you would
+have to manually merge changes as they occur.
+
+When you create an append file, you must use the same root name as the
+corresponding recipe file. For example, the append file
+``someapp_3.1.bbappend`` must apply to ``someapp_3.1.bb``. This
+means the original recipe and append filenames are version
+number-specific. If the corresponding recipe is renamed to update to a
+newer version, you must also rename and possibly update the
+corresponding ``.bbappend`` as well. During the build process, BitBake
+displays an error on starting if it detects a ``.bbappend`` file that
+does not have a corresponding recipe with a matching name. See the
+:term:`BB_DANGLINGAPPENDS_WARNONLY`
+variable for information on how to handle this error.
+
+Overlaying a File Using Your Layer
+----------------------------------
+
+As an example, consider the main formfactor recipe and a corresponding
+formfactor append file both from the :term:`Source Directory`.
+Here is the main
+formfactor recipe, which is named ``formfactor_0.0.bb`` and located in
+the "meta" layer at ``meta/recipes-bsp/formfactor``::
+
+ SUMMARY = "Device formfactor information"
+ DESCRIPTION = "A formfactor configuration file provides information about the \
+ target hardware for which the image is being built and information that the \
+ build system cannot obtain from other sources such as the kernel."
+ SECTION = "base"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+ PR = "r45"
+
+ SRC_URI = "file://config file://machconfig"
+ S = "${WORKDIR}"
+
+ PACKAGE_ARCH = "${MACHINE_ARCH}"
+ INHIBIT_DEFAULT_DEPS = "1"
+
+ do_install() {
+ # Install file only if it has contents
+ install -d ${D}${sysconfdir}/formfactor/
+ install -m 0644 ${S}/config ${D}${sysconfdir}/formfactor/
+ if [ -s "${S}/machconfig" ]; then
+ install -m 0644 ${S}/machconfig ${D}${sysconfdir}/formfactor/
+ fi
+ }
+
+In the main recipe, note the :term:`SRC_URI`
+variable, which tells the OpenEmbedded build system where to find files
+during the build.
+
+Here is the append file, which is named ``formfactor_0.0.bbappend``
+and is from the Raspberry Pi BSP Layer named ``meta-raspberrypi``. The
+file is in the layer at ``recipes-bsp/formfactor``::
+
+ FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
+
+By default, the build system uses the
+:term:`FILESPATH` variable to
+locate files. This append file extends the locations by setting the
+:term:`FILESEXTRAPATHS`
+variable. Setting this variable in the ``.bbappend`` file is the most
+reliable and recommended method for adding directories to the search
+path used by the build system to find files.
+
+The statement in this example extends the directories to include
+``${``\ :term:`THISDIR`\ ``}/${``\ :term:`PN`\ ``}``,
+which resolves to a directory named ``formfactor`` in the same directory
+in which the append file resides (i.e.
+``meta-raspberrypi/recipes-bsp/formfactor``. This implies that you must
+have the supporting directory structure set up that will contain any
+files or patches you will be including from the layer.
+
+Using the immediate expansion assignment operator ``:=`` is important
+because of the reference to :term:`THISDIR`. The trailing colon character is
+important as it ensures that items in the list remain colon-separated.
+
+.. note::
+
+ BitBake automatically defines the :term:`THISDIR` variable. You should
+ never set this variable yourself. Using ":prepend" as part of the
+ :term:`FILESEXTRAPATHS` ensures your path will be searched prior to other
+ paths in the final list.
+
+ Also, not all append files add extra files. Many append files simply
+ allow to add build options (e.g. ``systemd``). For these cases, your
+ append file would not even use the :term:`FILESEXTRAPATHS` statement.
+
+The end result of this ``.bbappend`` file is that on a Raspberry Pi, where
+``rpi`` will exist in the list of :term:`OVERRIDES`, the file
+``meta-raspberrypi/recipes-bsp/formfactor/formfactor/rpi/machconfig`` will be
+used during :ref:`ref-tasks-fetch` and the test for a non-zero file size in
+:ref:`ref-tasks-install` will return true, and the file will be installed.
+
+Installing Additional Files Using Your Layer
+--------------------------------------------
+
+As another example, consider the main ``xserver-xf86-config`` recipe and a
+corresponding ``xserver-xf86-config`` append file both from the :term:`Source
+Directory`. Here is the main ``xserver-xf86-config`` recipe, which is named
+``xserver-xf86-config_0.1.bb`` and located in the "meta" layer at
+``meta/recipes-graphics/xorg-xserver``::
+
+ SUMMARY = "X.Org X server configuration file"
+ HOMEPAGE = "http://www.x.org"
+ SECTION = "x11/base"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+ PR = "r33"
+
+ SRC_URI = "file://xorg.conf"
+
+ S = "${WORKDIR}"
+
+ CONFFILES:${PN} = "${sysconfdir}/X11/xorg.conf"
+
+ PACKAGE_ARCH = "${MACHINE_ARCH}"
+ ALLOW_EMPTY:${PN} = "1"
+
+ do_install () {
+ if test -s ${WORKDIR}/xorg.conf; then
+ install -d ${D}/${sysconfdir}/X11
+ install -m 0644 ${WORKDIR}/xorg.conf ${D}/${sysconfdir}/X11/
+ fi
+ }
+
+Here is the append file, which is named ``xserver-xf86-config_%.bbappend``
+and is from the Raspberry Pi BSP Layer named ``meta-raspberrypi``. The
+file is in the layer at ``recipes-graphics/xorg-xserver``::
+
+ FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
+
+ SRC_URI:append:rpi = " \
+ file://xorg.conf.d/98-pitft.conf \
+ file://xorg.conf.d/99-calibration.conf \
+ "
+ do_install:append:rpi () {
+ PITFT="${@bb.utils.contains("MACHINE_FEATURES", "pitft", "1", "0", d)}"
+ if [ "${PITFT}" = "1" ]; then
+ install -d ${D}/${sysconfdir}/X11/xorg.conf.d/
+ install -m 0644 ${WORKDIR}/xorg.conf.d/98-pitft.conf ${D}/${sysconfdir}/X11/xorg.conf.d/
+ install -m 0644 ${WORKDIR}/xorg.conf.d/99-calibration.conf ${D}/${sysconfdir}/X11/xorg.conf.d/
+ fi
+ }
+
+ FILES:${PN}:append:rpi = " ${sysconfdir}/X11/xorg.conf.d/*"
+
+Building off of the previous example, we once again are setting the
+:term:`FILESEXTRAPATHS` variable. In this case we are also using
+:term:`SRC_URI` to list additional source files to use when ``rpi`` is found in
+the list of :term:`OVERRIDES`. The :ref:`ref-tasks-install` task will then perform a
+check for an additional :term:`MACHINE_FEATURES` that if set will cause these
+additional files to be installed. These additional files are listed in
+:term:`FILES` so that they will be packaged.
+
+Prioritizing Your Layer
+=======================
+
+Each layer is assigned a priority value. Priority values control which
+layer takes precedence if there are recipe files with the same name in
+multiple layers. For these cases, the recipe file from the layer with a
+higher priority number takes precedence. Priority values also affect the
+order in which multiple ``.bbappend`` files for the same recipe are
+applied. You can either specify the priority manually, or allow the
+build system to calculate it based on the layer's dependencies.
+
+To specify the layer's priority manually, use the
+:term:`BBFILE_PRIORITY`
+variable and append the layer's root name::
+
+ BBFILE_PRIORITY_mylayer = "1"
+
+.. note::
+
+ It is possible for a recipe with a lower version number
+ :term:`PV` in a layer that has a higher
+ priority to take precedence.
+
+ Also, the layer priority does not currently affect the precedence
+ order of ``.conf`` or ``.bbclass`` files. Future versions of BitBake
+ might address this.
+
+Managing Layers
+===============
+
+You can use the BitBake layer management tool ``bitbake-layers`` to
+provide a view into the structure of recipes across a multi-layer
+project. Being able to generate output that reports on configured layers
+with their paths and priorities and on ``.bbappend`` files and their
+applicable recipes can help to reveal potential problems.
+
+For help on the BitBake layer management tool, use the following
+command::
+
+ $ bitbake-layers --help
+
+The following list describes the available commands:
+
+- ``help:`` Displays general help or help on a specified command.
+
+- ``show-layers:`` Shows the current configured layers.
+
+- ``show-overlayed:`` Lists overlayed recipes. A recipe is overlayed
+ when a recipe with the same name exists in another layer that has a
+ higher layer priority.
+
+- ``show-recipes:`` Lists available recipes and the layers that
+ provide them.
+
+- ``show-appends:`` Lists ``.bbappend`` files and the recipe files to
+ which they apply.
+
+- ``show-cross-depends:`` Lists dependency relationships between
+ recipes that cross layer boundaries.
+
+- ``add-layer:`` Adds a layer to ``bblayers.conf``.
+
+- ``remove-layer:`` Removes a layer from ``bblayers.conf``
+
+- ``flatten:`` Flattens the layer configuration into a separate
+ output directory. Flattening your layer configuration builds a
+ "flattened" directory that contains the contents of all layers, with
+ any overlayed recipes removed and any ``.bbappend`` files appended to
+ the corresponding recipes. You might have to perform some manual
+ cleanup of the flattened layer as follows:
+
+ - Non-recipe files (such as patches) are overwritten. The flatten
+ command shows a warning for these files.
+
+ - Anything beyond the normal layer setup has been added to the
+ ``layer.conf`` file. Only the lowest priority layer's
+ ``layer.conf`` is used.
+
+ - Overridden and appended items from ``.bbappend`` files need to be
+ cleaned up. The contents of each ``.bbappend`` end up in the
+ flattened recipe. However, if there are appended or changed
+ variable values, you need to tidy these up yourself. Consider the
+ following example. Here, the ``bitbake-layers`` command adds the
+ line ``#### bbappended ...`` so that you know where the following
+ lines originate::
+
+ ...
+ DESCRIPTION = "A useful utility"
+ ...
+ EXTRA_OECONF = "--enable-something"
+ ...
+
+ #### bbappended from meta-anotherlayer ####
+
+ DESCRIPTION = "Customized utility"
+ EXTRA_OECONF += "--enable-somethingelse"
+
+
+ Ideally, you would tidy up these utilities as follows::
+
+ ...
+ DESCRIPTION = "Customized utility"
+ ...
+ EXTRA_OECONF = "--enable-something --enable-somethingelse"
+ ...
+
+- ``layerindex-fetch``: Fetches a layer from a layer index, along
+ with its dependent layers, and adds the layers to the
+ ``conf/bblayers.conf`` file.
+
+- ``layerindex-show-depends``: Finds layer dependencies from the
+ layer index.
+
+- ``save-build-conf``: Saves the currently active build configuration
+ (``conf/local.conf``, ``conf/bblayers.conf``) as a template into a layer.
+ This template can later be used for setting up builds via :term:`TEMPLATECONF`.
+ For information about saving and using configuration templates, see
+ ":ref:`dev-manual/custom-template-configuration-directory:creating a custom template configuration directory`".
+
+- ``create-layer``: Creates a basic layer.
+
+- ``create-layers-setup``: Writes out a configuration file and/or a script that
+ can replicate the directory structure and revisions of the layers in a current build.
+ For more information, see ":ref:`dev-manual/layers:saving and restoring the layers setup`".
+
+Creating a General Layer Using the ``bitbake-layers`` Script
+============================================================
+
+The ``bitbake-layers`` script with the ``create-layer`` subcommand
+simplifies creating a new general layer.
+
+.. note::
+
+ - For information on BSP layers, see the ":ref:`bsp-guide/bsp:bsp layers`"
+ section in the Yocto
+ Project Board Specific (BSP) Developer's Guide.
+
+ - In order to use a layer with the OpenEmbedded build system, you
+ need to add the layer to your ``bblayers.conf`` configuration
+ file. See the ":ref:`dev-manual/layers:adding a layer using the \`\`bitbake-layers\`\` script`"
+ section for more information.
+
+The default mode of the script's operation with this subcommand is to
+create a layer with the following:
+
+- A layer priority of 6.
+
+- A ``conf`` subdirectory that contains a ``layer.conf`` file.
+
+- A ``recipes-example`` subdirectory that contains a further
+ subdirectory named ``example``, which contains an ``example.bb``
+ recipe file.
+
+- A ``COPYING.MIT``, which is the license statement for the layer. The
+ script assumes you want to use the MIT license, which is typical for
+ most layers, for the contents of the layer itself.
+
+- A ``README`` file, which is a file describing the contents of your
+ new layer.
+
+In its simplest form, you can use the following command form to create a
+layer. The command creates a layer whose name corresponds to
+"your_layer_name" in the current directory::
+
+ $ bitbake-layers create-layer your_layer_name
+
+As an example, the following command creates a layer named ``meta-scottrif``
+in your home directory::
+
+ $ cd /usr/home
+ $ bitbake-layers create-layer meta-scottrif
+ NOTE: Starting bitbake server...
+ Add your new layer with 'bitbake-layers add-layer meta-scottrif'
+
+If you want to set the priority of the layer to other than the default
+value of "6", you can either use the ``--priority`` option or you
+can edit the
+:term:`BBFILE_PRIORITY` value
+in the ``conf/layer.conf`` after the script creates it. Furthermore, if
+you want to give the example recipe file some name other than the
+default, you can use the ``--example-recipe-name`` option.
+
+The easiest way to see how the ``bitbake-layers create-layer`` command
+works is to experiment with the script. You can also read the usage
+information by entering the following::
+
+ $ bitbake-layers create-layer --help
+ NOTE: Starting bitbake server...
+ usage: bitbake-layers create-layer [-h] [--priority PRIORITY]
+ [--example-recipe-name EXAMPLERECIPE]
+ layerdir
+
+ Create a basic layer
+
+ positional arguments:
+ layerdir Layer directory to create
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --priority PRIORITY, -p PRIORITY
+ Layer directory to create
+ --example-recipe-name EXAMPLERECIPE, -e EXAMPLERECIPE
+ Filename of the example recipe
+
+Adding a Layer Using the ``bitbake-layers`` Script
+==================================================
+
+Once you create your general layer, you must add it to your
+``bblayers.conf`` file. Adding the layer to this configuration file
+makes the OpenEmbedded build system aware of your layer so that it can
+search it for metadata.
+
+Add your layer by using the ``bitbake-layers add-layer`` command::
+
+ $ bitbake-layers add-layer your_layer_name
+
+Here is an example that adds a
+layer named ``meta-scottrif`` to the configuration file. Following the
+command that adds the layer is another ``bitbake-layers`` command that
+shows the layers that are in your ``bblayers.conf`` file::
+
+ $ bitbake-layers add-layer meta-scottrif
+ NOTE: Starting bitbake server...
+ Parsing recipes: 100% |##########################################################| Time: 0:00:49
+ Parsing of 1441 .bb files complete (0 cached, 1441 parsed). 2055 targets, 56 skipped, 0 masked, 0 errors.
+ $ bitbake-layers show-layers
+ NOTE: Starting bitbake server...
+ layer path priority
+ ==========================================================================
+ meta /home/scottrif/poky/meta 5
+ meta-poky /home/scottrif/poky/meta-poky 5
+ meta-yocto-bsp /home/scottrif/poky/meta-yocto-bsp 5
+ workspace /home/scottrif/poky/build/workspace 99
+ meta-scottrif /home/scottrif/poky/build/meta-scottrif 6
+
+
+Adding the layer to this file
+enables the build system to locate the layer during the build.
+
+.. note::
+
+ During a build, the OpenEmbedded build system looks in the layers
+ from the top of the list down to the bottom in that order.
+
+Saving and restoring the layers setup
+=====================================
+
+Once you have a working build with the correct set of layers, it is beneficial
+to capture the layer setup --- what they are, which repositories they come from
+and which SCM revisions they're at --- into a configuration file, so that this
+setup can be easily replicated later, perhaps on a different machine. Here's
+how to do this::
+
+ $ bitbake-layers create-layers-setup /srv/work/alex/meta-alex/
+ NOTE: Starting bitbake server...
+ NOTE: Created /srv/work/alex/meta-alex/setup-layers.json
+ NOTE: Created /srv/work/alex/meta-alex/setup-layers
+
+The tool needs a single argument which tells where to place the output, consisting
+of a json formatted layer configuration, and a ``setup-layers`` script that can use that configuration
+to restore the layers in a different location, or on a different host machine. The argument
+can point to a custom layer (which is then deemed a "bootstrap" layer that needs to be
+checked out first), or into a completely independent location.
+
+The replication of the layers is performed by running the ``setup-layers`` script provided
+above:
+
+#. Clone the bootstrap layer or some other repository to obtain
+ the json config and the setup script that can use it.
+
+#. Run the script directly with no options::
+
+ alex@Zen2:/srv/work/alex/my-build$ meta-alex/setup-layers
+ Note: not checking out source meta-alex, use --force-bootstraplayer-checkout to override.
+
+ Setting up source meta-intel, revision 15.0-hardknott-3.3-310-g0a96edae, branch master
+ Running 'git init -q /srv/work/alex/my-build/meta-intel'
+ Running 'git remote remove origin > /dev/null 2>&1; git remote add origin git://git.yoctoproject.org/meta-intel' in /srv/work/alex/my-build/meta-intel
+ Running 'git fetch -q origin || true' in /srv/work/alex/my-build/meta-intel
+ Running 'git checkout -q 0a96edae609a3f48befac36af82cf1eed6786b4a' in /srv/work/alex/my-build/meta-intel
+
+ Setting up source poky, revision 4.1_M1-372-g55483d28f2, branch akanavin/setup-layers
+ Running 'git init -q /srv/work/alex/my-build/poky'
+ Running 'git remote remove origin > /dev/null 2>&1; git remote add origin git://git.yoctoproject.org/poky' in /srv/work/alex/my-build/poky
+ Running 'git fetch -q origin || true' in /srv/work/alex/my-build/poky
+ Running 'git remote remove poky-contrib > /dev/null 2>&1; git remote add poky-contrib ssh://git@push.yoctoproject.org/poky-contrib' in /srv/work/alex/my-build/poky
+ Running 'git fetch -q poky-contrib || true' in /srv/work/alex/my-build/poky
+ Running 'git checkout -q 11db0390b02acac1324e0f827beb0e2e3d0d1d63' in /srv/work/alex/my-build/poky
+
+.. note::
+ This will work to update an existing checkout as well.
+
+.. note::
+ The script is self-sufficient and requires only python3
+ and git on the build machine.
+
+.. note::
+ Both the ``create-layers-setup`` and the ``setup-layers`` provided several additional options
+ that customize their behavior - you are welcome to study them via ``--help`` command line parameter.
+
diff --git a/documentation/dev-manual/libraries.rst b/documentation/dev-manual/libraries.rst
new file mode 100644
index 0000000000..521dbb9a7c
--- /dev/null
+++ b/documentation/dev-manual/libraries.rst
@@ -0,0 +1,267 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Working With Libraries
+**********************
+
+Libraries are an integral part of your system. This section describes
+some common practices you might find helpful when working with libraries
+to build your system:
+
+- :ref:`How to include static library files
+ <dev-manual/libraries:including static library files>`
+
+- :ref:`How to use the Multilib feature to combine multiple versions of
+ library files into a single image
+ <dev-manual/libraries:combining multiple versions of library files into one image>`
+
+- :ref:`How to install multiple versions of the same library in parallel on
+ the same system
+ <dev-manual/libraries:installing multiple versions of the same library>`
+
+Including Static Library Files
+==============================
+
+If you are building a library and the library offers static linking, you
+can control which static library files (``*.a`` files) get included in
+the built library.
+
+The :term:`PACKAGES` and
+:term:`FILES:* <FILES>` variables in the
+``meta/conf/bitbake.conf`` configuration file define how files installed
+by the :ref:`ref-tasks-install` task are packaged. By default, the :term:`PACKAGES`
+variable includes ``${PN}-staticdev``, which represents all static
+library files.
+
+.. note::
+
+ Some previously released versions of the Yocto Project defined the
+ static library files through ``${PN}-dev``.
+
+Here is the part of the BitBake configuration file, where you can see
+how the static library files are defined::
+
+ PACKAGE_BEFORE_PN ?= ""
+ PACKAGES = "${PN}-src ${PN}-dbg ${PN}-staticdev ${PN}-dev ${PN}-doc ${PN}-locale ${PACKAGE_BEFORE_PN} ${PN}"
+ PACKAGES_DYNAMIC = "^${PN}-locale-.*"
+ FILES = ""
+
+ FILES:${PN} = "${bindir}/* ${sbindir}/* ${libexecdir}/* ${libdir}/lib*${SOLIBS} \
+ ${sysconfdir} ${sharedstatedir} ${localstatedir} \
+ ${base_bindir}/* ${base_sbindir}/* \
+ ${base_libdir}/*${SOLIBS} \
+ ${base_prefix}/lib/udev ${prefix}/lib/udev \
+ ${base_libdir}/udev ${libdir}/udev \
+ ${datadir}/${BPN} ${libdir}/${BPN}/* \
+ ${datadir}/pixmaps ${datadir}/applications \
+ ${datadir}/idl ${datadir}/omf ${datadir}/sounds \
+ ${libdir}/bonobo/servers"
+
+ FILES:${PN}-bin = "${bindir}/* ${sbindir}/*"
+
+ FILES:${PN}-doc = "${docdir} ${mandir} ${infodir} ${datadir}/gtk-doc \
+ ${datadir}/gnome/help"
+ SECTION:${PN}-doc = "doc"
+
+ FILES_SOLIBSDEV ?= "${base_libdir}/lib*${SOLIBSDEV} ${libdir}/lib*${SOLIBSDEV}"
+ FILES:${PN}-dev = "${includedir} ${FILES_SOLIBSDEV} ${libdir}/*.la \
+ ${libdir}/*.o ${libdir}/pkgconfig ${datadir}/pkgconfig \
+ ${datadir}/aclocal ${base_libdir}/*.o \
+ ${libdir}/${BPN}/*.la ${base_libdir}/*.la \
+ ${libdir}/cmake ${datadir}/cmake"
+ SECTION:${PN}-dev = "devel"
+ ALLOW_EMPTY:${PN}-dev = "1"
+ RDEPENDS:${PN}-dev = "${PN} (= ${EXTENDPKGV})"
+
+ FILES:${PN}-staticdev = "${libdir}/*.a ${base_libdir}/*.a ${libdir}/${BPN}/*.a"
+ SECTION:${PN}-staticdev = "devel"
+ RDEPENDS:${PN}-staticdev = "${PN}-dev (= ${EXTENDPKGV})"
+
+Combining Multiple Versions of Library Files into One Image
+===========================================================
+
+The build system offers the ability to build libraries with different
+target optimizations or architecture formats and combine these together
+into one system image. You can link different binaries in the image
+against the different libraries as needed for specific use cases. This
+feature is called "Multilib".
+
+An example would be where you have most of a system compiled in 32-bit
+mode using 32-bit libraries, but you have something large, like a
+database engine, that needs to be a 64-bit application and uses 64-bit
+libraries. Multilib allows you to get the best of both 32-bit and 64-bit
+libraries.
+
+While the Multilib feature is most commonly used for 32 and 64-bit
+differences, the approach the build system uses facilitates different
+target optimizations. You could compile some binaries to use one set of
+libraries and other binaries to use a different set of libraries. The
+libraries could differ in architecture, compiler options, or other
+optimizations.
+
+There are several examples in the ``meta-skeleton`` layer found in the
+:term:`Source Directory`:
+
+- :oe_git:`conf/multilib-example.conf </openembedded-core/tree/meta-skeleton/conf/multilib-example.conf>`
+ configuration file.
+
+- :oe_git:`conf/multilib-example2.conf </openembedded-core/tree/meta-skeleton/conf/multilib-example2.conf>`
+ configuration file.
+
+- :oe_git:`recipes-multilib/images/core-image-multilib-example.bb </openembedded-core/tree/meta-skeleton/recipes-multilib/images/core-image-multilib-example.bb>`
+ recipe
+
+Preparing to Use Multilib
+-------------------------
+
+User-specific requirements drive the Multilib feature. Consequently,
+there is no one "out-of-the-box" configuration that would
+meet your needs.
+
+In order to enable Multilib, you first need to ensure your recipe is
+extended to support multiple libraries. Many standard recipes are
+already extended and support multiple libraries. You can check in the
+``meta/conf/multilib.conf`` configuration file in the
+:term:`Source Directory` to see how this is
+done using the
+:term:`BBCLASSEXTEND` variable.
+Eventually, all recipes will be covered and this list will not be
+needed.
+
+For the most part, the :ref:`Multilib <ref-classes-multilib*>`
+class extension works automatically to
+extend the package name from ``${PN}`` to ``${MLPREFIX}${PN}``, where
+:term:`MLPREFIX` is the particular multilib (e.g. "lib32-" or "lib64-").
+Standard variables such as
+:term:`DEPENDS`,
+:term:`RDEPENDS`,
+:term:`RPROVIDES`,
+:term:`RRECOMMENDS`,
+:term:`PACKAGES`, and
+:term:`PACKAGES_DYNAMIC` are
+automatically extended by the system. If you are extending any manual
+code in the recipe, you can use the ``${MLPREFIX}`` variable to ensure
+those names are extended correctly.
+
+Using Multilib
+--------------
+
+After you have set up the recipes, you need to define the actual
+combination of multiple libraries you want to build. You accomplish this
+through your ``local.conf`` configuration file in the
+:term:`Build Directory`. An example configuration would be as follows::
+
+ MACHINE = "qemux86-64"
+ require conf/multilib.conf
+ MULTILIBS = "multilib:lib32"
+ DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+ IMAGE_INSTALL:append = " lib32-glib-2.0"
+
+This example enables an additional library named
+``lib32`` alongside the normal target packages. When combining these
+"lib32" alternatives, the example uses "x86" for tuning. For information
+on this particular tuning, see
+``meta/conf/machine/include/ia32/arch-ia32.inc``.
+
+The example then includes ``lib32-glib-2.0`` in all the images, which
+illustrates one method of including a multiple library dependency. You
+can use a normal image build to include this dependency, for example::
+
+ $ bitbake core-image-sato
+
+You can also build Multilib packages
+specifically with a command like this::
+
+ $ bitbake lib32-glib-2.0
+
+Additional Implementation Details
+---------------------------------
+
+There are generic implementation details as well as details that are specific to
+package management systems. Here are implementation details
+that exist regardless of the package management system:
+
+- The typical convention used for the class extension code as used by
+ Multilib assumes that all package names specified in
+ :term:`PACKAGES` that contain
+ ``${PN}`` have ``${PN}`` at the start of the name. When that
+ convention is not followed and ``${PN}`` appears at the middle or the
+ end of a name, problems occur.
+
+- The :term:`TARGET_VENDOR`
+ value under Multilib will be extended to "-vendormlmultilib" (e.g.
+ "-pokymllib32" for a "lib32" Multilib with Poky). The reason for this
+ slightly unwieldy contraction is that any "-" characters in the
+ vendor string presently break Autoconf's ``config.sub``, and other
+ separators are problematic for different reasons.
+
+Here are the implementation details for the RPM Package Management System:
+
+- A unique architecture is defined for the Multilib packages, along
+ with creating a unique deploy folder under ``tmp/deploy/rpm`` in the
+ :term:`Build Directory`. For example, consider ``lib32`` in a
+ ``qemux86-64`` image. The possible architectures in the system are "all",
+ "qemux86_64", "lib32:qemux86_64", and "lib32:x86".
+
+- The ``${MLPREFIX}`` variable is stripped from ``${PN}`` during RPM
+ packaging. The naming for a normal RPM package and a Multilib RPM
+ package in a ``qemux86-64`` system resolves to something similar to
+ ``bash-4.1-r2.x86_64.rpm`` and ``bash-4.1.r2.lib32_x86.rpm``,
+ respectively.
+
+- When installing a Multilib image, the RPM backend first installs the
+ base image and then installs the Multilib libraries.
+
+- The build system relies on RPM to resolve the identical files in the
+ two (or more) Multilib packages.
+
+Here are the implementation details for the IPK Package Management System:
+
+- The ``${MLPREFIX}`` is not stripped from ``${PN}`` during IPK
+ packaging. The naming for a normal RPM package and a Multilib IPK
+ package in a ``qemux86-64`` system resolves to something like
+ ``bash_4.1-r2.x86_64.ipk`` and ``lib32-bash_4.1-rw:x86.ipk``,
+ respectively.
+
+- The IPK deploy folder is not modified with ``${MLPREFIX}`` because
+ packages with and without the Multilib feature can exist in the same
+ folder due to the ``${PN}`` differences.
+
+- IPK defines a sanity check for Multilib installation using certain
+ rules for file comparison, overridden, etc.
+
+Installing Multiple Versions of the Same Library
+================================================
+
+There are be situations where you need to install and use multiple versions
+of the same library on the same system at the same time. This
+almost always happens when a library API changes and you have
+multiple pieces of software that depend on the separate versions of the
+library. To accommodate these situations, you can install multiple
+versions of the same library in parallel on the same system.
+
+The process is straightforward as long as the libraries use proper
+versioning. With properly versioned libraries, all you need to do to
+individually specify the libraries is create separate, appropriately
+named recipes where the :term:`PN` part of
+the name includes a portion that differentiates each library version
+(e.g. the major part of the version number). Thus, instead of having a
+single recipe that loads one version of a library (e.g. ``clutter``),
+you provide multiple recipes that result in different versions of the
+libraries you want. As an example, the following two recipes would allow
+the two separate versions of the ``clutter`` library to co-exist on the
+same system:
+
+.. code-block:: none
+
+ clutter-1.6_1.6.20.bb
+ clutter-1.8_1.8.4.bb
+
+Additionally, if
+you have other recipes that depend on a given library, you need to use
+the :term:`DEPENDS` variable to
+create the dependency. Continuing with the same example, if you want to
+have a recipe depend on the 1.8 version of the ``clutter`` library, use
+the following in your recipe::
+
+ DEPENDS = "clutter-1.8"
+
diff --git a/documentation/dev-manual/licenses.rst b/documentation/dev-manual/licenses.rst
new file mode 100644
index 0000000000..bffff3675f
--- /dev/null
+++ b/documentation/dev-manual/licenses.rst
@@ -0,0 +1,544 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Working With Licenses
+*********************
+
+As mentioned in the ":ref:`overview-manual/development-environment:licensing`"
+section in the Yocto Project Overview and Concepts Manual, open source
+projects are open to the public and they consequently have different
+licensing structures in place. This section describes the mechanism by
+which the :term:`OpenEmbedded Build System`
+tracks changes to
+licensing text and covers how to maintain open source license compliance
+during your project's lifecycle. The section also describes how to
+enable commercially licensed recipes, which by default are disabled.
+
+Tracking License Changes
+========================
+
+The license of an upstream project might change in the future. In order
+to prevent these changes going unnoticed, the
+:term:`LIC_FILES_CHKSUM`
+variable tracks changes to the license text. The checksums are validated
+at the end of the configure step, and if the checksums do not match, the
+build will fail.
+
+Specifying the ``LIC_FILES_CHKSUM`` Variable
+--------------------------------------------
+
+The :term:`LIC_FILES_CHKSUM` variable contains checksums of the license text
+in the source code for the recipe. Here is an example of how to
+specify :term:`LIC_FILES_CHKSUM`::
+
+ LIC_FILES_CHKSUM = "file://COPYING;md5=xxxx \
+ file://licfile1.txt;beginline=5;endline=29;md5=yyyy \
+ file://licfile2.txt;endline=50;md5=zzzz \
+ ..."
+
+.. note::
+
+ - When using "beginline" and "endline", realize that line numbering
+ begins with one and not zero. Also, the included lines are
+ inclusive (i.e. lines five through and including 29 in the
+ previous example for ``licfile1.txt``).
+
+ - When a license check fails, the selected license text is included
+ as part of the QA message. Using this output, you can determine
+ the exact start and finish for the needed license text.
+
+The build system uses the :term:`S`
+variable as the default directory when searching files listed in
+:term:`LIC_FILES_CHKSUM`. The previous example employs the default
+directory.
+
+Consider this next example::
+
+ LIC_FILES_CHKSUM = "file://src/ls.c;beginline=5;endline=16;\
+ md5=bb14ed3c4cda583abc85401304b5cd4e"
+ LIC_FILES_CHKSUM = "file://${WORKDIR}/license.html;md5=5c94767cedb5d6987c902ac850ded2c6"
+
+The first line locates a file in ``${S}/src/ls.c`` and isolates lines
+five through 16 as license text. The second line refers to a file in
+:term:`WORKDIR`.
+
+Note that :term:`LIC_FILES_CHKSUM` variable is mandatory for all recipes,
+unless the :term:`LICENSE` variable is set to "CLOSED".
+
+Explanation of Syntax
+---------------------
+
+As mentioned in the previous section, the :term:`LIC_FILES_CHKSUM` variable
+lists all the important files that contain the license text for the
+source code. It is possible to specify a checksum for an entire file, or
+a specific section of a file (specified by beginning and ending line
+numbers with the "beginline" and "endline" parameters, respectively).
+The latter is useful for source files with a license notice header,
+README documents, and so forth. If you do not use the "beginline"
+parameter, then it is assumed that the text begins on the first line of
+the file. Similarly, if you do not use the "endline" parameter, it is
+assumed that the license text ends with the last line of the file.
+
+The "md5" parameter stores the md5 checksum of the license text. If the
+license text changes in any way as compared to this parameter then a
+mismatch occurs. This mismatch triggers a build failure and notifies the
+developer. Notification allows the developer to review and address the
+license text changes. Also note that if a mismatch occurs during the
+build, the correct md5 checksum is placed in the build log and can be
+easily copied to the recipe.
+
+There is no limit to how many files you can specify using the
+:term:`LIC_FILES_CHKSUM` variable. Generally, however, every project
+requires a few specifications for license tracking. Many projects have a
+"COPYING" file that stores the license information for all the source
+code files. This practice allows you to just track the "COPYING" file as
+long as it is kept up to date.
+
+.. note::
+
+ - If you specify an empty or invalid "md5" parameter,
+ :term:`BitBake` returns an md5
+ mis-match error and displays the correct "md5" parameter value
+ during the build. The correct parameter is also captured in the
+ build log.
+
+ - If the whole file contains only license text, you do not need to
+ use the "beginline" and "endline" parameters.
+
+Enabling Commercially Licensed Recipes
+======================================
+
+By default, the OpenEmbedded build system disables components that have
+commercial or other special licensing requirements. Such requirements
+are defined on a recipe-by-recipe basis through the
+:term:`LICENSE_FLAGS` variable
+definition in the affected recipe. For instance, the
+``poky/meta/recipes-multimedia/gstreamer/gst-plugins-ugly`` recipe
+contains the following statement::
+
+ LICENSE_FLAGS = "commercial"
+
+Here is a
+slightly more complicated example that contains both an explicit recipe
+name and version (after variable expansion)::
+
+ LICENSE_FLAGS = "license_${PN}_${PV}"
+
+It is possible to give more details about a specific license
+using flags on the :term:`LICENSE_FLAGS_DETAILS` variable::
+
+ LICENSE_FLAGS_DETAILS[my-eula-license] = "For further details, see https://example.com/eula."
+
+If set, this will be displayed to the user if the license hasn't been accepted.
+
+In order for a component restricted by a
+:term:`LICENSE_FLAGS` definition to be enabled and included in an image, it
+needs to have a matching entry in the global
+:term:`LICENSE_FLAGS_ACCEPTED`
+variable, which is a variable typically defined in your ``local.conf``
+file. For example, to enable the
+``poky/meta/recipes-multimedia/gstreamer/gst-plugins-ugly`` package, you
+could add either the string "commercial_gst-plugins-ugly" or the more
+general string "commercial" to :term:`LICENSE_FLAGS_ACCEPTED`. See the
+":ref:`dev-manual/licenses:license flag matching`" section for a full
+explanation of how :term:`LICENSE_FLAGS` matching works. Here is the
+example::
+
+ LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly"
+
+Likewise, to additionally enable the package built from the recipe
+containing ``LICENSE_FLAGS = "license_${PN}_${PV}"``, and assuming that
+the actual recipe name was ``emgd_1.10.bb``, the following string would
+enable that package as well as the original ``gst-plugins-ugly``
+package::
+
+ LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly license_emgd_1.10"
+
+As a convenience, you do not need to specify the
+complete license string for every package. You can use
+an abbreviated form, which consists of just the first portion or
+portions of the license string before the initial underscore character
+or characters. A partial string will match any license that contains the
+given string as the first portion of its license. For example, the
+following value will also match both of the packages
+previously mentioned as well as any other packages that have licenses
+starting with "commercial" or "license"::
+
+ LICENSE_FLAGS_ACCEPTED = "commercial license"
+
+License Flag Matching
+---------------------
+
+License flag matching allows you to control what recipes the
+OpenEmbedded build system includes in the build. Fundamentally, the
+build system attempts to match :term:`LICENSE_FLAGS` strings found in
+recipes against strings found in :term:`LICENSE_FLAGS_ACCEPTED`.
+A match causes the build system to include a recipe in the
+build, while failure to find a match causes the build system to exclude
+a recipe.
+
+In general, license flag matching is simple. However, understanding some
+concepts will help you correctly and effectively use matching.
+
+Before a flag defined by a particular recipe is tested against the
+entries of :term:`LICENSE_FLAGS_ACCEPTED`, the expanded
+string ``_${PN}`` is appended to the flag. This expansion makes each
+:term:`LICENSE_FLAGS` value recipe-specific. After expansion, the
+string is then matched against the entries. Thus, specifying
+``LICENSE_FLAGS = "commercial"`` in recipe "foo", for example, results
+in the string ``"commercial_foo"``. And, to create a match, that string
+must appear among the entries of :term:`LICENSE_FLAGS_ACCEPTED`.
+
+Judicious use of the :term:`LICENSE_FLAGS` strings and the contents of the
+:term:`LICENSE_FLAGS_ACCEPTED` variable allows you a lot of flexibility for
+including or excluding recipes based on licensing. For example, you can
+broaden the matching capabilities by using license flags string subsets
+in :term:`LICENSE_FLAGS_ACCEPTED`.
+
+.. note::
+
+ When using a string subset, be sure to use the part of the expanded
+ string that precedes the appended underscore character (e.g.
+ ``usethispart_1.3``, ``usethispart_1.4``, and so forth).
+
+For example, simply specifying the string "commercial" in the
+:term:`LICENSE_FLAGS_ACCEPTED` variable matches any expanded
+:term:`LICENSE_FLAGS` definition that starts with the string
+"commercial" such as "commercial_foo" and "commercial_bar", which
+are the strings the build system automatically generates for
+hypothetical recipes named "foo" and "bar" assuming those recipes simply
+specify the following::
+
+ LICENSE_FLAGS = "commercial"
+
+Thus, you can choose to exhaustively enumerate each license flag in the
+list and allow only specific recipes into the image, or you can use a
+string subset that causes a broader range of matches to allow a range of
+recipes into the image.
+
+This scheme works even if the :term:`LICENSE_FLAGS` string already has
+``_${PN}`` appended. For example, the build system turns the license
+flag "commercial_1.2_foo" into "commercial_1.2_foo_foo" and would match
+both the general "commercial" and the specific "commercial_1.2_foo"
+strings found in the :term:`LICENSE_FLAGS_ACCEPTED` variable, as expected.
+
+Here are some other scenarios:
+
+- You can specify a versioned string in the recipe such as
+ "commercial_foo_1.2" in a "foo" recipe. The build system expands this
+ string to "commercial_foo_1.2_foo". Combine this license flag with a
+ :term:`LICENSE_FLAGS_ACCEPTED` variable that has the string
+ "commercial" and you match the flag along with any other flag that
+ starts with the string "commercial".
+
+- Under the same circumstances, you can add "commercial_foo" in the
+ :term:`LICENSE_FLAGS_ACCEPTED` variable and the build system not only
+ matches "commercial_foo_1.2" but also matches any license flag with
+ the string "commercial_foo", regardless of the version.
+
+- You can be very specific and use both the package and version parts
+ in the :term:`LICENSE_FLAGS_ACCEPTED` list (e.g.
+ "commercial_foo_1.2") to specifically match a versioned recipe.
+
+Other Variables Related to Commercial Licenses
+----------------------------------------------
+
+There are other helpful variables related to commercial license handling,
+defined in the
+``poky/meta/conf/distro/include/default-distrovars.inc`` file::
+
+ COMMERCIAL_AUDIO_PLUGINS ?= ""
+ COMMERCIAL_VIDEO_PLUGINS ?= ""
+
+If you want to enable these components, you can do so by making sure you have
+statements similar to the following in your ``local.conf`` configuration file::
+
+ COMMERCIAL_AUDIO_PLUGINS = "gst-plugins-ugly-mad \
+ gst-plugins-ugly-mpegaudioparse"
+ COMMERCIAL_VIDEO_PLUGINS = "gst-plugins-ugly-mpeg2dec \
+ gst-plugins-ugly-mpegstream gst-plugins-bad-mpegvideoparse"
+ LICENSE_FLAGS_ACCEPTED = "commercial_gst-plugins-ugly commercial_gst-plugins-bad commercial_qmmp"
+
+Of course, you could also create a matching list for those components using the
+more general "commercial" string in the :term:`LICENSE_FLAGS_ACCEPTED` variable,
+but that would also enable all the other packages with :term:`LICENSE_FLAGS`
+containing "commercial", which you may or may not want::
+
+ LICENSE_FLAGS_ACCEPTED = "commercial"
+
+Specifying audio and video plugins as part of the
+:term:`COMMERCIAL_AUDIO_PLUGINS` and :term:`COMMERCIAL_VIDEO_PLUGINS` statements
+(along with :term:`LICENSE_FLAGS_ACCEPTED`) includes the plugins or
+components into built images, thus adding support for media formats or
+components.
+
+.. note::
+
+ GStreamer "ugly" and "bad" plugins are actually available through
+ open source licenses. However, the "ugly" ones can be subject to software
+ patents in some countries, making it necessary to pay licensing fees
+ to distribute them. The "bad" ones are just deemed unreliable by the
+ GStreamer community and should therefore be used with care.
+
+Maintaining Open Source License Compliance During Your Product's Lifecycle
+==========================================================================
+
+One of the concerns for a development organization using open source
+software is how to maintain compliance with various open source
+licensing during the lifecycle of the product. While this section does
+not provide legal advice or comprehensively cover all scenarios, it does
+present methods that you can use to assist you in meeting the compliance
+requirements during a software release.
+
+With hundreds of different open source licenses that the Yocto Project
+tracks, it is difficult to know the requirements of each and every
+license. However, the requirements of the major FLOSS licenses can begin
+to be covered by assuming that there are three main areas of concern:
+
+- Source code must be provided.
+
+- License text for the software must be provided.
+
+- Compilation scripts and modifications to the source code must be
+ provided.
+
+There are other requirements beyond the scope of these three and the
+methods described in this section (e.g. the mechanism through which
+source code is distributed).
+
+As different organizations have different ways of releasing software,
+there can be multiple ways of meeting license obligations. At
+least, we describe here two methods for achieving compliance:
+
+- The first method is to use OpenEmbedded's ability to provide
+ the source code, provide a list of licenses, as well as
+ compilation scripts and source code modifications.
+
+ The remainder of this section describes supported methods to meet
+ the previously mentioned three requirements.
+
+- The second method is to generate a *Software Bill of Materials*
+ (:term:`SBoM`), as described in the ":doc:`/dev-manual/sbom`" section.
+ Not only do you generate :term:`SPDX` output which can be used meet
+ license compliance requirements (except for sharing the build system
+ and layers sources for the time being), but this output also includes
+ component version and patch information which can be used
+ for vulnerability assessment.
+
+Whatever method you choose, prior to releasing images, sources,
+and the build system, you should audit all artifacts to ensure
+completeness.
+
+.. note::
+
+ The Yocto Project generates a license manifest during image creation
+ that is located in
+ ``${DEPLOY_DIR}/licenses/${SSTATE_PKGARCH}/<image-name>-<machine>.rootfs-<datestamp>/``
+ to assist with any audits.
+
+Providing the Source Code
+-------------------------
+
+Compliance activities should begin before you generate the final image.
+The first thing you should look at is the requirement that tops the list
+for most compliance groups --- providing the source. The Yocto Project has
+a few ways of meeting this requirement.
+
+One of the easiest ways to meet this requirement is to provide the
+entire :term:`DL_DIR` used by the
+build. This method, however, has a few issues. The most obvious is the
+size of the directory since it includes all sources used in the build
+and not just the source used in the released image. It will include
+toolchain source, and other artifacts, which you would not generally
+release. However, the more serious issue for most companies is
+accidental release of proprietary software. The Yocto Project provides
+an :ref:`ref-classes-archiver` class to help avoid some of these concerns.
+
+Before you employ :term:`DL_DIR` or the :ref:`ref-classes-archiver` class, you
+need to decide how you choose to provide source. The source
+:ref:`ref-classes-archiver` class can generate tarballs and SRPMs and can
+create them with various levels of compliance in mind.
+
+One way of doing this (but certainly not the only way) is to release
+just the source as a tarball. You can do this by adding the following to
+the ``local.conf`` file found in the :term:`Build Directory`::
+
+ INHERIT += "archiver"
+ ARCHIVER_MODE[src] = "original"
+
+During the creation of your
+image, the source from all recipes that deploy packages to the image is
+placed within subdirectories of ``DEPLOY_DIR/sources`` based on the
+:term:`LICENSE` for each recipe.
+Releasing the entire directory enables you to comply with requirements
+concerning providing the unmodified source. It is important to note that
+the size of the directory can get large.
+
+A way to help mitigate the size issue is to only release tarballs for
+licenses that require the release of source. Let us assume you are only
+concerned with GPL code as identified by running the following script:
+
+.. code-block:: shell
+
+ # Script to archive a subset of packages matching specific license(s)
+ # Source and license files are copied into sub folders of package folder
+ # Must be run from build folder
+ #!/bin/bash
+ src_release_dir="source-release"
+ mkdir -p $src_release_dir
+ for a in tmp/deploy/sources/*; do
+ for d in $a/*; do
+ # Get package name from path
+ p=`basename $d`
+ p=${p%-*}
+ p=${p%-*}
+ # Only archive GPL packages (update *GPL* regex for your license check)
+ numfiles=`ls tmp/deploy/licenses/$p/*GPL* 2> /dev/null | wc -l`
+ if [ $numfiles -ge 1 ]; then
+ echo Archiving $p
+ mkdir -p $src_release_dir/$p/source
+ cp $d/* $src_release_dir/$p/source 2> /dev/null
+ mkdir -p $src_release_dir/$p/license
+ cp tmp/deploy/licenses/$p/* $src_release_dir/$p/license 2> /dev/null
+ fi
+ done
+ done
+
+At this point, you
+could create a tarball from the ``gpl_source_release`` directory and
+provide that to the end user. This method would be a step toward
+achieving compliance with section 3a of GPLv2 and with section 6 of
+GPLv3.
+
+Providing License Text
+----------------------
+
+One requirement that is often overlooked is inclusion of license text.
+This requirement also needs to be dealt with prior to generating the
+final image. Some licenses require the license text to accompany the
+binary. You can achieve this by adding the following to your
+``local.conf`` file::
+
+ COPY_LIC_MANIFEST = "1"
+ COPY_LIC_DIRS = "1"
+ LICENSE_CREATE_PACKAGE = "1"
+
+Adding these statements to the
+configuration file ensures that the licenses collected during package
+generation are included on your image.
+
+.. note::
+
+ Setting all three variables to "1" results in the image having two
+ copies of the same license file. One copy resides in
+ ``/usr/share/common-licenses`` and the other resides in
+ ``/usr/share/license``.
+
+ The reason for this behavior is because
+ :term:`COPY_LIC_DIRS` and
+ :term:`COPY_LIC_MANIFEST`
+ add a copy of the license when the image is built but do not offer a
+ path for adding licenses for newly installed packages to an image.
+ :term:`LICENSE_CREATE_PACKAGE`
+ adds a separate package and an upgrade path for adding licenses to an
+ image.
+
+As the source :ref:`ref-classes-archiver` class has already archived the
+original unmodified source that contains the license files, you would have
+already met the requirements for inclusion of the license information
+with source as defined by the GPL and other open source licenses.
+
+Providing Compilation Scripts and Source Code Modifications
+-----------------------------------------------------------
+
+At this point, we have addressed all we need prior to generating the
+image. The next two requirements are addressed during the final
+packaging of the release.
+
+By releasing the version of the OpenEmbedded build system and the layers
+used during the build, you will be providing both compilation scripts
+and the source code modifications in one step.
+
+If the deployment team has a :ref:`overview-manual/concepts:bsp layer`
+and a distro layer, and those
+those layers are used to patch, compile, package, or modify (in any way)
+any open source software included in your released images, you might be
+required to release those layers under section 3 of GPLv2 or section 1
+of GPLv3. One way of doing that is with a clean checkout of the version
+of the Yocto Project and layers used during your build. Here is an
+example:
+
+.. code-block:: shell
+
+ # We built using the dunfell branch of the poky repo
+ $ git clone -b dunfell git://git.yoctoproject.org/poky
+ $ cd poky
+ # We built using the release_branch for our layers
+ $ git clone -b release_branch git://git.mycompany.com/meta-my-bsp-layer
+ $ git clone -b release_branch git://git.mycompany.com/meta-my-software-layer
+ # clean up the .git repos
+ $ find . -name ".git" -type d -exec rm -rf {} \;
+
+One thing a development organization might want to consider for end-user
+convenience is to modify
+``meta-poky/conf/templates/default/bblayers.conf.sample`` to ensure that when
+the end user utilizes the released build system to build an image, the
+development organization's layers are included in the ``bblayers.conf`` file
+automatically::
+
+ # POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf
+ # changes incompatibly
+ POKY_BBLAYERS_CONF_VERSION = "2"
+
+ BBPATH = "${TOPDIR}"
+ BBFILES ?= ""
+
+ BBLAYERS ?= " \
+ ##OEROOT##/meta \
+ ##OEROOT##/meta-poky \
+ ##OEROOT##/meta-yocto-bsp \
+ ##OEROOT##/meta-mylayer \
+ "
+
+Creating and
+providing an archive of the :term:`Metadata`
+layers (recipes, configuration files, and so forth) enables you to meet
+your requirements to include the scripts to control compilation as well
+as any modifications to the original source.
+
+Compliance Limitations with Executables Built from Static Libraries
+-------------------------------------------------------------------
+
+When package A is added to an image via the :term:`RDEPENDS` or :term:`RRECOMMENDS`
+mechanisms as well as explicitly included in the image recipe with
+:term:`IMAGE_INSTALL`, and depends on a static linked library recipe B
+(``DEPENDS += "B"``), package B will neither appear in the generated license
+manifest nor in the generated source tarballs. This occurs as the
+:ref:`ref-classes-license` and :ref:`ref-classes-archiver` classes assume that
+only packages included via :term:`RDEPENDS` or :term:`RRECOMMENDS`
+end up in the image.
+
+As a result, potential obligations regarding license compliance for package B
+may not be met.
+
+The Yocto Project doesn't enable static libraries by default, in part because
+of this issue. Before a solution to this limitation is found, you need to
+keep in mind that if your root filesystem is built from static libraries,
+you will need to manually ensure that your deliveries are compliant
+with the licenses of these libraries.
+
+Copying Non Standard Licenses
+=============================
+
+Some packages, such as the linux-firmware package, have many licenses
+that are not in any way common. You can avoid adding a lot of these
+types of common license files, which are only applicable to a specific
+package, by using the
+:term:`NO_GENERIC_LICENSE`
+variable. Using this variable also avoids QA errors when you use a
+non-common, non-CLOSED license in a recipe.
+
+Here is an example that uses the ``LICENSE.Abilis.txt`` file as
+the license from the fetched source::
+
+ NO_GENERIC_LICENSE[Firmware-Abilis] = "LICENSE.Abilis.txt"
+
diff --git a/documentation/dev-manual/new-machine.rst b/documentation/dev-manual/new-machine.rst
new file mode 100644
index 0000000000..469b2d395a
--- /dev/null
+++ b/documentation/dev-manual/new-machine.rst
@@ -0,0 +1,118 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Adding a New Machine
+********************
+
+Adding a new machine to the Yocto Project is a straightforward process.
+This section describes how to add machines that are similar to those
+that the Yocto Project already supports.
+
+.. note::
+
+ Although well within the capabilities of the Yocto Project, adding a
+ totally new architecture might require changes to ``gcc``/``glibc``
+ and to the site information, which is beyond the scope of this
+ manual.
+
+For a complete example that shows how to add a new machine, see the
+":ref:`bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script`"
+section in the Yocto Project Board Support Package (BSP) Developer's
+Guide.
+
+Adding the Machine Configuration File
+=====================================
+
+To add a new machine, you need to add a new machine configuration file
+to the layer's ``conf/machine`` directory. This configuration file
+provides details about the device you are adding.
+
+The OpenEmbedded build system uses the root name of the machine
+configuration file to reference the new machine. For example, given a
+machine configuration file named ``crownbay.conf``, the build system
+recognizes the machine as "crownbay".
+
+The most important variables you must set in your machine configuration
+file or include from a lower-level configuration file are as follows:
+
+- :term:`TARGET_ARCH` (e.g. "arm")
+
+- ``PREFERRED_PROVIDER_virtual/kernel``
+
+- :term:`MACHINE_FEATURES` (e.g. "screen wifi")
+
+You might also need these variables:
+
+- :term:`SERIAL_CONSOLES` (e.g. "115200;ttyS0 115200;ttyS1")
+
+- :term:`KERNEL_IMAGETYPE` (e.g. "zImage")
+
+- :term:`IMAGE_FSTYPES` (e.g. "tar.gz jffs2")
+
+You can find full details on these variables in the reference section.
+You can leverage existing machine ``.conf`` files from
+``meta-yocto-bsp/conf/machine/``.
+
+Adding a Kernel for the Machine
+===============================
+
+The OpenEmbedded build system needs to be able to build a kernel for the
+machine. You need to either create a new kernel recipe for this machine,
+or extend an existing kernel recipe. You can find several kernel recipe
+examples in the Source Directory at ``meta/recipes-kernel/linux`` that
+you can use as references.
+
+If you are creating a new kernel recipe, normal recipe-writing rules
+apply for setting up a :term:`SRC_URI`. Thus, you need to specify any
+necessary patches and set :term:`S` to point at the source code. You need to
+create a :ref:`ref-tasks-configure` task that configures the unpacked kernel with
+a ``defconfig`` file. You can do this by using a ``make defconfig``
+command or, more commonly, by copying in a suitable ``defconfig`` file
+and then running ``make oldconfig``. By making use of ``inherit kernel``
+and potentially some of the ``linux-*.inc`` files, most other
+functionality is centralized and the defaults of the class normally work
+well.
+
+If you are extending an existing kernel recipe, it is usually a matter
+of adding a suitable ``defconfig`` file. The file needs to be added into
+a location similar to ``defconfig`` files used for other machines in a
+given kernel recipe. A possible way to do this is by listing the file in
+the :term:`SRC_URI` and adding the machine to the expression in
+:term:`COMPATIBLE_MACHINE`::
+
+ COMPATIBLE_MACHINE = '(qemux86|qemumips)'
+
+For more information on ``defconfig`` files, see the
+":ref:`kernel-dev/common:changing the configuration`"
+section in the Yocto Project Linux Kernel Development Manual.
+
+Adding a Formfactor Configuration File
+======================================
+
+A formfactor configuration file provides information about the target
+hardware for which the image is being built and information that the
+build system cannot obtain from other sources such as the kernel. Some
+examples of information contained in a formfactor configuration file
+include framebuffer orientation, whether or not the system has a
+keyboard, the positioning of the keyboard in relation to the screen, and
+the screen resolution.
+
+The build system uses reasonable defaults in most cases. However, if
+customization is necessary, you need to create a ``machconfig`` file in
+the ``meta/recipes-bsp/formfactor/files`` directory. This directory
+contains directories for specific machines such as ``qemuarm`` and
+``qemux86``. For information about the settings available and the
+defaults, see the ``meta/recipes-bsp/formfactor/files/config`` file
+found in the same area.
+
+Here is an example for "qemuarm" machine::
+
+ HAVE_TOUCHSCREEN=1
+ HAVE_KEYBOARD=1
+ DISPLAY_CAN_ROTATE=0
+ DISPLAY_ORIENTATION=0
+ #DISPLAY_WIDTH_PIXELS=640
+ #DISPLAY_HEIGHT_PIXELS=480
+ #DISPLAY_BPP=16
+ DISPLAY_DPI=150
+ DISPLAY_SUBPIXEL_ORDER=vrgb
+
diff --git a/documentation/dev-manual/new-recipe.rst b/documentation/dev-manual/new-recipe.rst
new file mode 100644
index 0000000000..61fc2eb122
--- /dev/null
+++ b/documentation/dev-manual/new-recipe.rst
@@ -0,0 +1,1639 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Writing a New Recipe
+********************
+
+Recipes (``.bb`` files) are fundamental components in the Yocto Project
+environment. Each software component built by the OpenEmbedded build
+system requires a recipe to define the component. This section describes
+how to create, write, and test a new recipe.
+
+.. note::
+
+ For information on variables that are useful for recipes and for
+ information about recipe naming issues, see the
+ ":ref:`ref-manual/varlocality:recipes`" section of the Yocto Project
+ Reference Manual.
+
+Overview
+========
+
+The following figure shows the basic process for creating a new recipe.
+The remainder of the section provides details for the steps.
+
+.. image:: figures/recipe-workflow.png
+ :align: center
+ :width: 50%
+
+Locate or Automatically Create a Base Recipe
+============================================
+
+You can always write a recipe from scratch. However, there are three choices
+that can help you quickly get started with a new recipe:
+
+- ``devtool add``: A command that assists in creating a recipe and an
+ environment conducive to development.
+
+- ``recipetool create``: A command provided by the Yocto Project that
+ automates creation of a base recipe based on the source files.
+
+- *Existing Recipes:* Location and modification of an existing recipe
+ that is similar in function to the recipe you need.
+
+.. note::
+
+ For information on recipe syntax, see the
+ ":ref:`dev-manual/new-recipe:recipe syntax`" section.
+
+Creating the Base Recipe Using ``devtool add``
+----------------------------------------------
+
+The ``devtool add`` command uses the same logic for auto-creating the
+recipe as ``recipetool create``, which is listed below. Additionally,
+however, ``devtool add`` sets up an environment that makes it easy for
+you to patch the source and to make changes to the recipe as is often
+necessary when adding a recipe to build a new piece of software to be
+included in a build.
+
+You can find a complete description of the ``devtool add`` command in
+the ":ref:`sdk-manual/extensible:a closer look at \`\`devtool add\`\``" section
+in the Yocto Project Application Development and the Extensible Software
+Development Kit (eSDK) manual.
+
+Creating the Base Recipe Using ``recipetool create``
+----------------------------------------------------
+
+``recipetool create`` automates creation of a base recipe given a set of
+source code files. As long as you can extract or point to the source
+files, the tool will construct a recipe and automatically configure all
+pre-build information into the recipe. For example, suppose you have an
+application that builds using Autotools. Creating the base recipe using
+``recipetool`` results in a recipe that has the pre-build dependencies,
+license requirements, and checksums configured.
+
+To run the tool, you just need to be in your :term:`Build Directory` and
+have sourced the build environment setup script (i.e.
+:ref:`structure-core-script`). To get help on the tool, use the following
+command::
+
+ $ recipetool -h
+ NOTE: Starting bitbake server...
+ usage: recipetool [-d] [-q] [--color COLOR] [-h] <subcommand> ...
+
+ OpenEmbedded recipe tool
+
+ options:
+ -d, --debug Enable debug output
+ -q, --quiet Print only errors
+ --color COLOR Colorize output (where COLOR is auto, always, never)
+ -h, --help show this help message and exit
+
+ subcommands:
+ create Create a new recipe
+ newappend Create a bbappend for the specified target in the specified
+ layer
+ setvar Set a variable within a recipe
+ appendfile Create/update a bbappend to replace a target file
+ appendsrcfiles Create/update a bbappend to add or replace source files
+ appendsrcfile Create/update a bbappend to add or replace a source file
+ Use recipetool <subcommand> --help to get help on a specific command
+
+Running ``recipetool create -o OUTFILE`` creates the base recipe and
+locates it properly in the layer that contains your source files.
+Here are some syntax examples:
+
+ - Use this syntax to generate a recipe based on source. Once generated,
+ the recipe resides in the existing source code layer::
+
+ recipetool create -o OUTFILE source
+
+ - Use this syntax to generate a recipe using code that
+ you extract from source. The extracted code is placed in its own layer
+ defined by :term:`EXTERNALSRC`::
+
+ recipetool create -o OUTFILE -x EXTERNALSRC source
+
+ - Use this syntax to generate a recipe based on source. The options
+ direct ``recipetool`` to generate debugging information. Once generated,
+ the recipe resides in the existing source code layer::
+
+ recipetool create -d -o OUTFILE source
+
+Locating and Using a Similar Recipe
+-----------------------------------
+
+Before writing a recipe from scratch, it is often useful to discover
+whether someone else has already written one that meets (or comes close
+to meeting) your needs. The Yocto Project and OpenEmbedded communities
+maintain many recipes that might be candidates for what you are doing.
+You can find a good central index of these recipes in the
+:oe_layerindex:`OpenEmbedded Layer Index <>`.
+
+Working from an existing recipe or a skeleton recipe is the best way to
+get started. Here are some points on both methods:
+
+- *Locate and modify a recipe that is close to what you want to do:*
+ This method works when you are familiar with the current recipe
+ space. The method does not work so well for those new to the Yocto
+ Project or writing recipes.
+
+ Some risks associated with this method are using a recipe that has
+ areas totally unrelated to what you are trying to accomplish with
+ your recipe, not recognizing areas of the recipe that you might have
+ to add from scratch, and so forth. All these risks stem from
+ unfamiliarity with the existing recipe space.
+
+- *Use and modify the following skeleton recipe:* If for some reason
+ you do not want to use ``recipetool`` and you cannot find an existing
+ recipe that is close to meeting your needs, you can use the following
+ structure to provide the fundamental areas of a new recipe::
+
+ DESCRIPTION = ""
+ HOMEPAGE = ""
+ LICENSE = ""
+ SECTION = ""
+ DEPENDS = ""
+ LIC_FILES_CHKSUM = ""
+
+ SRC_URI = ""
+
+Storing and Naming the Recipe
+=============================
+
+Once you have your base recipe, you should put it in your own layer and
+name it appropriately. Locating it correctly ensures that the
+OpenEmbedded build system can find it when you use BitBake to process
+the recipe.
+
+- *Storing Your Recipe:* The OpenEmbedded build system locates your
+ recipe through the layer's ``conf/layer.conf`` file and the
+ :term:`BBFILES` variable. This
+ variable sets up a path from which the build system can locate
+ recipes. Here is the typical use::
+
+ BBFILES += "${LAYERDIR}/recipes-*/*/*.bb \
+ ${LAYERDIR}/recipes-*/*/*.bbappend"
+
+ Consequently, you need to be sure you locate your new recipe inside
+ your layer such that it can be found.
+
+ You can find more information on how layers are structured in the
+ ":ref:`dev-manual/layers:understanding and creating layers`" section.
+
+- *Naming Your Recipe:* When you name your recipe, you need to follow
+ this naming convention::
+
+ basename_version.bb
+
+ Use lower-cased characters and do not include the reserved suffixes
+ ``-native``, ``-cross``, ``-initial``, or ``-dev`` casually (i.e. do not use
+ them as part of your recipe name unless the string applies). Here are some
+ examples:
+
+ .. code-block:: none
+
+ cups_1.7.0.bb
+ gawk_4.0.2.bb
+ irssi_0.8.16-rc1.bb
+
+Running a Build on the Recipe
+=============================
+
+Creating a new recipe is usually an iterative process that requires
+using BitBake to process the recipe multiple times in order to
+progressively discover and add information to the recipe file.
+
+Assuming you have sourced the build environment setup script (i.e.
+:ref:`structure-core-script`) and you are in the :term:`Build Directory`, use
+BitBake to process your recipe. All you need to provide is the
+``basename`` of the recipe as described in the previous section::
+
+ $ bitbake basename
+
+During the build, the OpenEmbedded build system creates a temporary work
+directory for each recipe
+(``${``\ :term:`WORKDIR`\ ``}``)
+where it keeps extracted source files, log files, intermediate
+compilation and packaging files, and so forth.
+
+The path to the per-recipe temporary work directory depends on the
+context in which it is being built. The quickest way to find this path
+is to have BitBake return it by running the following::
+
+ $ bitbake -e basename | grep ^WORKDIR=
+
+As an example, assume a Source Directory
+top-level folder named ``poky``, a default :term:`Build Directory` at
+``poky/build``, and a ``qemux86-poky-linux`` machine target system.
+Furthermore, suppose your recipe is named ``foo_1.3.0.bb``. In this
+case, the work directory the build system uses to build the package
+would be as follows::
+
+ poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0
+
+Inside this directory you can find sub-directories such as ``image``,
+``packages-split``, and ``temp``. After the build, you can examine these
+to determine how well the build went.
+
+.. note::
+
+ You can find log files for each task in the recipe's ``temp``
+ directory (e.g. ``poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0/temp``).
+ Log files are named ``log.taskname`` (e.g. ``log.do_configure``,
+ ``log.do_fetch``, and ``log.do_compile``).
+
+You can find more information about the build process in
+":doc:`/overview-manual/development-environment`"
+chapter of the Yocto Project Overview and Concepts Manual.
+
+Fetching Code
+=============
+
+The first thing your recipe must do is specify how to fetch the source
+files. Fetching is controlled mainly through the
+:term:`SRC_URI` variable. Your recipe
+must have a :term:`SRC_URI` variable that points to where the source is
+located. For a graphical representation of source locations, see the
+":ref:`overview-manual/concepts:sources`" section in
+the Yocto Project Overview and Concepts Manual.
+
+The :ref:`ref-tasks-fetch` task uses the prefix of each entry in the
+:term:`SRC_URI` variable value to determine which
+:ref:`fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
+to use to get your source files. It is the :term:`SRC_URI` variable that triggers
+the fetcher. The :ref:`ref-tasks-patch` task uses the variable after source is
+fetched to apply patches. The OpenEmbedded build system uses
+:term:`FILESOVERRIDES` for scanning directory locations for local files in
+:term:`SRC_URI`.
+
+The :term:`SRC_URI` variable in your recipe must define each unique location
+for your source files. It is good practice to not hard-code version
+numbers in a URL used in :term:`SRC_URI`. Rather than hard-code these
+values, use ``${``\ :term:`PV`\ ``}``,
+which causes the fetch process to use the version specified in the
+recipe filename. Specifying the version in this manner means that
+upgrading the recipe to a future version is as simple as renaming the
+recipe to match the new version.
+
+Here is a simple example from the
+``meta/recipes-devtools/strace/strace_5.5.bb`` recipe where the source
+comes from a single tarball. Notice the use of the
+:term:`PV` variable::
+
+ SRC_URI = "https://strace.io/files/${PV}/strace-${PV}.tar.xz \
+
+Files mentioned in :term:`SRC_URI` whose names end in a typical archive
+extension (e.g. ``.tar``, ``.tar.gz``, ``.tar.bz2``, ``.zip``, and so
+forth), are automatically extracted during the
+:ref:`ref-tasks-unpack` task. For
+another example that specifies these types of files, see the
+":ref:`dev-manual/new-recipe:building an autotooled package`" section.
+
+Another way of specifying source is from an SCM. For Git repositories,
+you must specify :term:`SRCREV` and you should specify :term:`PV` to include
+the revision with :term:`SRCPV`. Here is an example from the recipe
+``meta/recipes-core/musl/gcompat_git.bb``::
+
+ SRC_URI = "git://git.adelielinux.org/adelie/gcompat.git;protocol=https;branch=current"
+
+ PV = "1.0.0+1.1+git${SRCPV}"
+ SRCREV = "af5a49e489fdc04b9cf02547650d7aeaccd43793"
+
+If your :term:`SRC_URI` statement includes URLs pointing to individual files
+fetched from a remote server other than a version control system,
+BitBake attempts to verify the files against checksums defined in your
+recipe to ensure they have not been tampered with or otherwise modified
+since the recipe was written. Multiple checksums are supported:
+``SRC_URI[md5sum]``, ``SRC_URI[sha1sum]``, ``SRC_URI[sha256sum]``.
+``SRC_URI[sha384sum]`` and ``SRC_URI[sha512sum]``, but only
+``SRC_URI[sha256sum]`` is commonly used.
+
+.. note::
+
+ ``SRC_URI[md5sum]`` used to also be commonly used, but it is deprecated
+ and should be replaced by ``SRC_URI[sha256sum]`` when updating existing
+ recipes.
+
+If your :term:`SRC_URI` variable points to more than a single URL (excluding
+SCM URLs), you need to provide the ``sha256`` checksum for each URL. For these
+cases, you provide a name for each URL as part of the :term:`SRC_URI` and then
+reference that name in the subsequent checksum statements. Here is an example
+combining lines from the files ``git.inc`` and ``git_2.24.1.bb``::
+
+ SRC_URI = "${KERNELORG_MIRROR}/software/scm/git/git-${PV}.tar.gz;name=tarball \
+ ${KERNELORG_MIRROR}/software/scm/git/git-manpages-${PV}.tar.gz;name=manpages"
+
+ SRC_URI[tarball.sha256sum] = "ad5334956301c86841eb1e5b1bb20884a6bad89a10a6762c958220c7cf64da02"
+ SRC_URI[manpages.sha256sum] = "9a7ae3a093bea39770eb96ca3e5b40bff7af0b9f6123f089d7821d0e5b8e1230"
+
+The proper value for the ``sha256`` checksum might be available together
+with other signatures on the download page for the upstream source (e.g.
+``md5``, ``sha1``, ``sha256``, ``GPG``, and so forth). Because the
+OpenEmbedded build system typically only deals with ``sha256sum``,
+you should verify all the signatures you find by hand.
+
+If no :term:`SRC_URI` checksums are specified when you attempt to build the
+recipe, or you provide an incorrect checksum, the build will produce an
+error for each missing or incorrect checksum. As part of the error
+message, the build system provides the checksum string corresponding to
+the fetched file. Once you have the correct checksums, you can copy and
+paste them into your recipe and then run the build again to continue.
+
+.. note::
+
+ As mentioned, if the upstream source provides signatures for
+ verifying the downloaded source code, you should verify those
+ manually before setting the checksum values in the recipe and
+ continuing with the build.
+
+This final example is a bit more complicated and is from the
+``meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.20.bb`` recipe. The
+example's :term:`SRC_URI` statement identifies multiple files as the source
+files for the recipe: a tarball, a patch file, a desktop file, and an icon::
+
+ SRC_URI = "http://dist.schmorp.de/rxvt-unicode/Attic/rxvt-unicode-${PV}.tar.bz2 \
+ file://xwc.patch \
+ file://rxvt.desktop \
+ file://rxvt.png"
+
+When you specify local files using the ``file://`` URI protocol, the
+build system fetches files from the local machine. The path is relative
+to the :term:`FILESPATH` variable
+and searches specific directories in a certain order:
+``${``\ :term:`BP`\ ``}``,
+``${``\ :term:`BPN`\ ``}``, and
+``files``. The directories are assumed to be subdirectories of the
+directory in which the recipe or append file resides. For another
+example that specifies these types of files, see the
+"`building a single .c file package`_" section.
+
+The previous example also specifies a patch file. Patch files are files
+whose names usually end in ``.patch`` or ``.diff`` but can end with
+compressed suffixes such as ``diff.gz`` and ``patch.bz2``, for example.
+The build system automatically applies patches as described in the
+":ref:`dev-manual/new-recipe:patching code`" section.
+
+Fetching Code Through Firewalls
+-------------------------------
+
+Some users are behind firewalls and need to fetch code through a proxy.
+See the ":doc:`/ref-manual/faq`" chapter for advice.
+
+Limiting the Number of Parallel Connections
+-------------------------------------------
+
+Some users are behind firewalls or use servers where the number of parallel
+connections is limited. In such cases, you can limit the number of fetch
+tasks being run in parallel by adding the following to your ``local.conf``
+file::
+
+ do_fetch[number_threads] = "4"
+
+Unpacking Code
+==============
+
+During the build, the
+:ref:`ref-tasks-unpack` task unpacks
+the source with ``${``\ :term:`S`\ ``}``
+pointing to where it is unpacked.
+
+If you are fetching your source files from an upstream source archived
+tarball and the tarball's internal structure matches the common
+convention of a top-level subdirectory named
+``${``\ :term:`BPN`\ ``}-${``\ :term:`PV`\ ``}``,
+then you do not need to set :term:`S`. However, if :term:`SRC_URI` specifies to
+fetch source from an archive that does not use this convention, or from
+an SCM like Git or Subversion, your recipe needs to define :term:`S`.
+
+If processing your recipe using BitBake successfully unpacks the source
+files, you need to be sure that the directory pointed to by ``${S}``
+matches the structure of the source.
+
+Patching Code
+=============
+
+Sometimes it is necessary to patch code after it has been fetched. Any
+files mentioned in :term:`SRC_URI` whose names end in ``.patch`` or
+``.diff`` or compressed versions of these suffixes (e.g. ``diff.gz``,
+``patch.bz2``, etc.) are treated as patches. The
+:ref:`ref-tasks-patch` task
+automatically applies these patches.
+
+The build system should be able to apply patches with the "-p1" option
+(i.e. one directory level in the path will be stripped off). If your
+patch needs to have more directory levels stripped off, specify the
+number of levels using the "striplevel" option in the :term:`SRC_URI` entry
+for the patch. Alternatively, if your patch needs to be applied in a
+specific subdirectory that is not specified in the patch file, use the
+"patchdir" option in the entry.
+
+As with all local files referenced in
+:term:`SRC_URI` using ``file://``,
+you should place patch files in a directory next to the recipe either
+named the same as the base name of the recipe
+(:term:`BP` and
+:term:`BPN`) or "files".
+
+Licensing
+=========
+
+Your recipe needs to define variables related to the license
+under whith the software is distributed. See the
+:ref:`contributor-guide/recipe-style-guide:recipe license fields`
+section in the Contributor Guide for details.
+
+Dependencies
+============
+
+Most software packages have a short list of other packages that they
+require, which are called dependencies. These dependencies fall into two
+main categories: build-time dependencies, which are required when the
+software is built; and runtime dependencies, which are required to be
+installed on the target in order for the software to run.
+
+Within a recipe, you specify build-time dependencies using the
+:term:`DEPENDS` variable. Although there are nuances,
+items specified in :term:`DEPENDS` should be names of other
+recipes. It is important that you specify all build-time dependencies
+explicitly.
+
+Another consideration is that configure scripts might automatically
+check for optional dependencies and enable corresponding functionality
+if those dependencies are found. If you wish to make a recipe that is
+more generally useful (e.g. publish the recipe in a layer for others to
+use), instead of hard-disabling the functionality, you can use the
+:term:`PACKAGECONFIG` variable to allow functionality and the
+corresponding dependencies to be enabled and disabled easily by other
+users of the recipe.
+
+Similar to build-time dependencies, you specify runtime dependencies
+through a variable -
+:term:`RDEPENDS`, which is
+package-specific. All variables that are package-specific need to have
+the name of the package added to the end as an override. Since the main
+package for a recipe has the same name as the recipe, and the recipe's
+name can be found through the
+``${``\ :term:`PN`\ ``}`` variable, then
+you specify the dependencies for the main package by setting
+``RDEPENDS:${PN}``. If the package were named ``${PN}-tools``, then you
+would set ``RDEPENDS:${PN}-tools``, and so forth.
+
+Some runtime dependencies will be set automatically at packaging time.
+These dependencies include any shared library dependencies (i.e. if a
+package "example" contains "libexample" and another package "mypackage"
+contains a binary that links to "libexample" then the OpenEmbedded build
+system will automatically add a runtime dependency to "mypackage" on
+"example"). See the
+":ref:`overview-manual/concepts:automatically added runtime dependencies`"
+section in the Yocto Project Overview and Concepts Manual for further
+details.
+
+Configuring the Recipe
+======================
+
+Most software provides some means of setting build-time configuration
+options before compilation. Typically, setting these options is
+accomplished by running a configure script with options, or by modifying
+a build configuration file.
+
+.. note::
+
+ As of Yocto Project Release 1.7, some of the core recipes that
+ package binary configuration scripts now disable the scripts due to
+ the scripts previously requiring error-prone path substitution. The
+ OpenEmbedded build system uses ``pkg-config`` now, which is much more
+ robust. You can find a list of the ``*-config`` scripts that are disabled
+ in the ":ref:`migration-1.7-binary-configuration-scripts-disabled`" section
+ in the Yocto Project Reference Manual.
+
+A major part of build-time configuration is about checking for
+build-time dependencies and possibly enabling optional functionality as
+a result. You need to specify any build-time dependencies for the
+software you are building in your recipe's
+:term:`DEPENDS` value, in terms of
+other recipes that satisfy those dependencies. You can often find
+build-time or runtime dependencies described in the software's
+documentation.
+
+The following list provides configuration items of note based on how
+your software is built:
+
+- *Autotools:* If your source files have a ``configure.ac`` file, then
+ your software is built using Autotools. If this is the case, you just
+ need to modify the configuration.
+
+ When using Autotools, your recipe needs to inherit the
+ :ref:`ref-classes-autotools` class and it does not have to
+ contain a :ref:`ref-tasks-configure` task. However, you might still want to
+ make some adjustments. For example, you can set :term:`EXTRA_OECONF` or
+ :term:`PACKAGECONFIG_CONFARGS` to pass any needed configure options that
+ are specific to the recipe.
+
+- *CMake:* If your source files have a ``CMakeLists.txt`` file, then
+ your software is built using CMake. If this is the case, you just
+ need to modify the configuration.
+
+ When you use CMake, your recipe needs to inherit the
+ :ref:`ref-classes-cmake` class and it does not have to contain a
+ :ref:`ref-tasks-configure` task. You can make some adjustments by setting
+ :term:`EXTRA_OECMAKE` to pass any needed configure options that are
+ specific to the recipe.
+
+ .. note::
+
+ If you need to install one or more custom CMake toolchain files
+ that are supplied by the application you are building, install the
+ files to ``${D}${datadir}/cmake/Modules`` during :ref:`ref-tasks-install`.
+
+- *Other:* If your source files do not have a ``configure.ac`` or
+ ``CMakeLists.txt`` file, then your software is built using some
+ method other than Autotools or CMake. If this is the case, you
+ normally need to provide a
+ :ref:`ref-tasks-configure` task
+ in your recipe unless, of course, there is nothing to configure.
+
+ Even if your software is not being built by Autotools or CMake, you
+ still might not need to deal with any configuration issues. You need
+ to determine if configuration is even a required step. You might need
+ to modify a Makefile or some configuration file used for the build to
+ specify necessary build options. Or, perhaps you might need to run a
+ provided, custom configure script with the appropriate options.
+
+ For the case involving a custom configure script, you would run
+ ``./configure --help`` and look for the options you need to set.
+
+Once configuration succeeds, it is always good practice to look at the
+``log.do_configure`` file to ensure that the appropriate options have
+been enabled and no additional build-time dependencies need to be added
+to :term:`DEPENDS`. For example, if the configure script reports that it
+found something not mentioned in :term:`DEPENDS`, or that it did not find
+something that it needed for some desired optional functionality, then
+you would need to add those to :term:`DEPENDS`. Looking at the log might
+also reveal items being checked for, enabled, or both that you do not
+want, or items not being found that are in :term:`DEPENDS`, in which case
+you would need to look at passing extra options to the configure script
+as needed. For reference information on configure options specific to
+the software you are building, you can consult the output of the
+``./configure --help`` command within ``${S}`` or consult the software's
+upstream documentation.
+
+Using Headers to Interface with Devices
+=======================================
+
+If your recipe builds an application that needs to communicate with some
+device or needs an API into a custom kernel, you will need to provide
+appropriate header files. Under no circumstances should you ever modify
+the existing
+``meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc`` file.
+These headers are used to build ``libc`` and must not be compromised
+with custom or machine-specific header information. If you customize
+``libc`` through modified headers all other applications that use
+``libc`` thus become affected.
+
+.. note::
+
+ Never copy and customize the ``libc`` header file (i.e.
+ ``meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc``).
+
+The correct way to interface to a device or custom kernel is to use a
+separate package that provides the additional headers for the driver or
+other unique interfaces. When doing so, your application also becomes
+responsible for creating a dependency on that specific provider.
+
+Consider the following:
+
+- Never modify ``linux-libc-headers.inc``. Consider that file to be
+ part of the ``libc`` system, and not something you use to access the
+ kernel directly. You should access ``libc`` through specific ``libc``
+ calls.
+
+- Applications that must talk directly to devices should either provide
+ necessary headers themselves, or establish a dependency on a special
+ headers package that is specific to that driver.
+
+For example, suppose you want to modify an existing header that adds I/O
+control or network support. If the modifications are used by a small
+number programs, providing a unique version of a header is easy and has
+little impact. When doing so, bear in mind the guidelines in the
+previous list.
+
+.. note::
+
+ If for some reason your changes need to modify the behavior of the ``libc``,
+ and subsequently all other applications on the system, use a ``.bbappend``
+ to modify the ``linux-kernel-headers.inc`` file. However, take care to not
+ make the changes machine specific.
+
+Consider a case where your kernel is older and you need an older
+``libc`` ABI. The headers installed by your recipe should still be a
+standard mainline kernel, not your own custom one.
+
+When you use custom kernel headers you need to get them from
+:term:`STAGING_KERNEL_DIR`,
+which is the directory with kernel headers that are required to build
+out-of-tree modules. Your recipe will also need the following::
+
+ do_configure[depends] += "virtual/kernel:do_shared_workdir"
+
+Compilation
+===========
+
+During a build, the :ref:`ref-tasks-compile` task happens after source is fetched,
+unpacked, and configured. If the recipe passes through :ref:`ref-tasks-compile`
+successfully, nothing needs to be done.
+
+However, if the compile step fails, you need to diagnose the failure.
+Here are some common issues that cause failures.
+
+.. note::
+
+ For cases where improper paths are detected for configuration files
+ or for when libraries/headers cannot be found, be sure you are using
+ the more robust ``pkg-config``. See the note in section
+ ":ref:`dev-manual/new-recipe:Configuring the Recipe`" for additional information.
+
+- *Parallel build failures:* These failures manifest themselves as
+ intermittent errors, or errors reporting that a file or directory
+ that should be created by some other part of the build process could
+ not be found. This type of failure can occur even if, upon
+ inspection, the file or directory does exist after the build has
+ failed, because that part of the build process happened in the wrong
+ order.
+
+ To fix the problem, you need to either satisfy the missing dependency
+ in the Makefile or whatever script produced the Makefile, or (as a
+ workaround) set :term:`PARALLEL_MAKE` to an empty string::
+
+ PARALLEL_MAKE = ""
+
+ For information on parallel Makefile issues, see the
+ ":ref:`dev-manual/debugging:debugging parallel make races`" section.
+
+- *Improper host path usage:* This failure applies to recipes building
+ for the target or ":ref:`ref-classes-nativesdk`" only. The
+ failure occurs when the compilation process uses improper headers,
+ libraries, or other files from the host system when cross-compiling for
+ the target.
+
+ To fix the problem, examine the ``log.do_compile`` file to identify
+ the host paths being used (e.g. ``/usr/include``, ``/usr/lib``, and
+ so forth) and then either add configure options, apply a patch, or do
+ both.
+
+- *Failure to find required libraries/headers:* If a build-time
+ dependency is missing because it has not been declared in
+ :term:`DEPENDS`, or because the
+ dependency exists but the path used by the build process to find the
+ file is incorrect and the configure step did not detect it, the
+ compilation process could fail. For either of these failures, the
+ compilation process notes that files could not be found. In these
+ cases, you need to go back and add additional options to the
+ configure script as well as possibly add additional build-time
+ dependencies to :term:`DEPENDS`.
+
+ Occasionally, it is necessary to apply a patch to the source to
+ ensure the correct paths are used. If you need to specify paths to
+ find files staged into the sysroot from other recipes, use the
+ variables that the OpenEmbedded build system provides (e.g.
+ :term:`STAGING_BINDIR`, :term:`STAGING_INCDIR`, :term:`STAGING_DATADIR`, and so
+ forth).
+
+Installing
+==========
+
+During :ref:`ref-tasks-install`, the task copies the built files along with their
+hierarchy to locations that would mirror their locations on the target
+device. The installation process copies files from the
+``${``\ :term:`S`\ ``}``,
+``${``\ :term:`B`\ ``}``, and
+``${``\ :term:`WORKDIR`\ ``}``
+directories to the ``${``\ :term:`D`\ ``}``
+directory to create the structure as it should appear on the target
+system.
+
+How your software is built affects what you must do to be sure your
+software is installed correctly. The following list describes what you
+must do for installation depending on the type of build system used by
+the software being built:
+
+- *Autotools and CMake:* If the software your recipe is building uses
+ Autotools or CMake, the OpenEmbedded build system understands how to
+ install the software. Consequently, you do not have to have a
+ :ref:`ref-tasks-install` task as part of your recipe. You just need to make
+ sure the install portion of the build completes with no issues.
+ However, if you wish to install additional files not already being
+ installed by ``make install``, you should do this using a
+ ``do_install:append`` function using the install command as described
+ in the "Manual" bulleted item later in this list.
+
+- *Other (using* ``make install``\ *)*: You need to define a :ref:`ref-tasks-install`
+ function in your recipe. The function should call
+ ``oe_runmake install`` and will likely need to pass in the
+ destination directory as well. How you pass that path is dependent on
+ how the ``Makefile`` being run is written (e.g. ``DESTDIR=${D}``,
+ ``PREFIX=${D}``, ``INSTALLROOT=${D}``, and so forth).
+
+ For an example recipe using ``make install``, see the
+ ":ref:`dev-manual/new-recipe:building a makefile-based package`" section.
+
+- *Manual:* You need to define a :ref:`ref-tasks-install` function in your
+ recipe. The function must first use ``install -d`` to create the
+ directories under
+ ``${``\ :term:`D`\ ``}``. Once the
+ directories exist, your function can use ``install`` to manually
+ install the built software into the directories.
+
+ You can find more information on ``install`` at
+ https://www.gnu.org/software/coreutils/manual/html_node/install-invocation.html.
+
+For the scenarios that do not use Autotools or CMake, you need to track
+the installation and diagnose and fix any issues until everything
+installs correctly. You need to look in the default location of
+``${D}``, which is ``${WORKDIR}/image``, to be sure your files have been
+installed correctly.
+
+.. note::
+
+ - During the installation process, you might need to modify some of
+ the installed files to suit the target layout. For example, you
+ might need to replace hard-coded paths in an initscript with
+ values of variables provided by the build system, such as
+ replacing ``/usr/bin/`` with ``${bindir}``. If you do perform such
+ modifications during :ref:`ref-tasks-install`, be sure to modify the
+ destination file after copying rather than before copying.
+ Modifying after copying ensures that the build system can
+ re-execute :ref:`ref-tasks-install` if needed.
+
+ - ``oe_runmake install``, which can be run directly or can be run
+ indirectly by the :ref:`ref-classes-autotools` and
+ :ref:`ref-classes-cmake` classes, runs ``make install`` in parallel.
+ Sometimes, a Makefile can have missing dependencies between targets that
+ can result in race conditions. If you experience intermittent failures
+ during :ref:`ref-tasks-install`, you might be able to work around them by
+ disabling parallel Makefile installs by adding the following to the
+ recipe::
+
+ PARALLEL_MAKEINST = ""
+
+ See :term:`PARALLEL_MAKEINST` for additional information.
+
+ - If you need to install one or more custom CMake toolchain files
+ that are supplied by the application you are building, install the
+ files to ``${D}${datadir}/cmake/Modules`` during
+ :ref:`ref-tasks-install`.
+
+Enabling System Services
+========================
+
+If you want to install a service, which is a process that usually starts
+on boot and runs in the background, then you must include some
+additional definitions in your recipe.
+
+If you are adding services and the service initialization script or the
+service file itself is not installed, you must provide for that
+installation in your recipe using a ``do_install:append`` function. If
+your recipe already has a :ref:`ref-tasks-install` function, update the function
+near its end rather than adding an additional ``do_install:append``
+function.
+
+When you create the installation for your services, you need to
+accomplish what is normally done by ``make install``. In other words,
+make sure your installation arranges the output similar to how it is
+arranged on the target system.
+
+The OpenEmbedded build system provides support for starting services two
+different ways:
+
+- *SysVinit:* SysVinit is a system and service manager that manages the
+ init system used to control the very basic functions of your system.
+ The init program is the first program started by the Linux kernel
+ when the system boots. Init then controls the startup, running and
+ shutdown of all other programs.
+
+ To enable a service using SysVinit, your recipe needs to inherit the
+ :ref:`ref-classes-update-rc.d` class. The class helps
+ facilitate safely installing the package on the target.
+
+ You will need to set the
+ :term:`INITSCRIPT_PACKAGES`,
+ :term:`INITSCRIPT_NAME`,
+ and
+ :term:`INITSCRIPT_PARAMS`
+ variables within your recipe.
+
+- *systemd:* System Management Daemon (systemd) was designed to replace
+ SysVinit and to provide enhanced management of services. For more
+ information on systemd, see the systemd homepage at
+ https://freedesktop.org/wiki/Software/systemd/.
+
+ To enable a service using systemd, your recipe needs to inherit the
+ :ref:`ref-classes-systemd` class. See the ``systemd.bbclass`` file
+ located in your :term:`Source Directory` section for more information.
+
+Packaging
+=========
+
+Successful packaging is a combination of automated processes performed
+by the OpenEmbedded build system and some specific steps you need to
+take. The following list describes the process:
+
+- *Splitting Files*: The :ref:`ref-tasks-package` task splits the files produced
+ by the recipe into logical components. Even software that produces a
+ single binary might still have debug symbols, documentation, and
+ other logical components that should be split out. The :ref:`ref-tasks-package`
+ task ensures that files are split up and packaged correctly.
+
+- *Running QA Checks*: The :ref:`ref-classes-insane` class adds a
+ step to the package generation process so that output quality
+ assurance checks are generated by the OpenEmbedded build system. This
+ step performs a range of checks to be sure the build's output is free
+ of common problems that show up during runtime. For information on
+ these checks, see the :ref:`ref-classes-insane` class and
+ the ":ref:`ref-manual/qa-checks:qa error and warning messages`"
+ chapter in the Yocto Project Reference Manual.
+
+- *Hand-Checking Your Packages*: After you build your software, you
+ need to be sure your packages are correct. Examine the
+ ``${``\ :term:`WORKDIR`\ ``}/packages-split``
+ directory and make sure files are where you expect them to be. If you
+ discover problems, you can set
+ :term:`PACKAGES`,
+ :term:`FILES`,
+ ``do_install(:append)``, and so forth as needed.
+
+- *Splitting an Application into Multiple Packages*: If you need to
+ split an application into several packages, see the
+ ":ref:`dev-manual/new-recipe:splitting an application into multiple packages`"
+ section for an example.
+
+- *Installing a Post-Installation Script*: For an example showing how
+ to install a post-installation script, see the
+ ":ref:`dev-manual/new-recipe:post-installation scripts`" section.
+
+- *Marking Package Architecture*: Depending on what your recipe is
+ building and how it is configured, it might be important to mark the
+ packages produced as being specific to a particular machine, or to
+ mark them as not being specific to a particular machine or
+ architecture at all.
+
+ By default, packages apply to any machine with the same architecture
+ as the target machine. When a recipe produces packages that are
+ machine-specific (e.g. the
+ :term:`MACHINE` value is passed
+ into the configure script or a patch is applied only for a particular
+ machine), you should mark them as such by adding the following to the
+ recipe::
+
+ PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+ On the other hand, if the recipe produces packages that do not
+ contain anything specific to the target machine or architecture at
+ all (e.g. recipes that simply package script files or configuration
+ files), you should use the :ref:`ref-classes-allarch` class to
+ do this for you by adding this to your recipe::
+
+ inherit allarch
+
+ Ensuring that the package architecture is correct is not critical
+ while you are doing the first few builds of your recipe. However, it
+ is important in order to ensure that your recipe rebuilds (or does
+ not rebuild) appropriately in response to changes in configuration,
+ and to ensure that you get the appropriate packages installed on the
+ target machine, particularly if you run separate builds for more than
+ one target machine.
+
+Sharing Files Between Recipes
+=============================
+
+Recipes often need to use files provided by other recipes on the build
+host. For example, an application linking to a common library needs
+access to the library itself and its associated headers. The way this
+access is accomplished is by populating a sysroot with files. Each
+recipe has two sysroots in its work directory, one for target files
+(``recipe-sysroot``) and one for files that are native to the build host
+(``recipe-sysroot-native``).
+
+.. note::
+
+ You could find the term "staging" used within the Yocto project
+ regarding files populating sysroots (e.g. the :term:`STAGING_DIR`
+ variable).
+
+Recipes should never populate the sysroot directly (i.e. write files
+into sysroot). Instead, files should be installed into standard
+locations during the
+:ref:`ref-tasks-install` task within
+the ``${``\ :term:`D`\ ``}`` directory. The
+reason for this limitation is that almost all files that populate the
+sysroot are cataloged in manifests in order to ensure the files can be
+removed later when a recipe is either modified or removed. Thus, the
+sysroot is able to remain free from stale files.
+
+A subset of the files installed by the :ref:`ref-tasks-install` task are
+used by the :ref:`ref-tasks-populate_sysroot` task as defined by the
+:term:`SYSROOT_DIRS` variable to automatically populate the sysroot. It
+is possible to modify the list of directories that populate the sysroot.
+The following example shows how you could add the ``/opt`` directory to
+the list of directories within a recipe::
+
+ SYSROOT_DIRS += "/opt"
+
+.. note::
+
+ The `/sysroot-only` is to be used by recipes that generate artifacts
+ that are not included in the target filesystem, allowing them to share
+ these artifacts without needing to use the :term:`DEPLOY_DIR`.
+
+For a more complete description of the :ref:`ref-tasks-populate_sysroot`
+task and its associated functions, see the
+:ref:`staging <ref-classes-staging>` class.
+
+Using Virtual Providers
+=======================
+
+Prior to a build, if you know that several different recipes provide the
+same functionality, you can use a virtual provider (i.e. ``virtual/*``)
+as a placeholder for the actual provider. The actual provider is
+determined at build-time.
+
+A common scenario where a virtual provider is used would be for the kernel
+recipe. Suppose you have three kernel recipes whose :term:`PN` values map to
+``kernel-big``, ``kernel-mid``, and ``kernel-small``. Furthermore, each of
+these recipes in some way uses a :term:`PROVIDES` statement that essentially
+identifies itself as being able to provide ``virtual/kernel``. Here is one way
+through the :ref:`ref-classes-kernel` class::
+
+ PROVIDES += "virtual/kernel"
+
+Any recipe that inherits the :ref:`ref-classes-kernel` class is
+going to utilize a :term:`PROVIDES` statement that identifies that recipe as
+being able to provide the ``virtual/kernel`` item.
+
+Now comes the time to actually build an image and you need a kernel
+recipe, but which one? You can configure your build to call out the
+kernel recipe you want by using the :term:`PREFERRED_PROVIDER` variable. As
+an example, consider the :yocto_git:`x86-base.inc
+</poky/tree/meta/conf/machine/include/x86/x86-base.inc>` include file, which is a
+machine (i.e. :term:`MACHINE`) configuration file. This include file is the
+reason all x86-based machines use the ``linux-yocto`` kernel. Here are the
+relevant lines from the include file::
+
+ PREFERRED_PROVIDER_virtual/kernel ??= "linux-yocto"
+ PREFERRED_VERSION_linux-yocto ??= "4.15%"
+
+When you use a virtual provider, you do not have to "hard code" a recipe
+name as a build dependency. You can use the
+:term:`DEPENDS` variable to state the
+build is dependent on ``virtual/kernel`` for example::
+
+ DEPENDS = "virtual/kernel"
+
+During the build, the OpenEmbedded build system picks
+the correct recipe needed for the ``virtual/kernel`` dependency based on
+the :term:`PREFERRED_PROVIDER` variable. If you want to use the small kernel
+mentioned at the beginning of this section, configure your build as
+follows::
+
+ PREFERRED_PROVIDER_virtual/kernel ??= "kernel-small"
+
+.. note::
+
+ Any recipe that :term:`PROVIDES` a ``virtual/*`` item that is ultimately not
+ selected through :term:`PREFERRED_PROVIDER` does not get built. Preventing these
+ recipes from building is usually the desired behavior since this mechanism's
+ purpose is to select between mutually exclusive alternative providers.
+
+The following lists specific examples of virtual providers:
+
+- ``virtual/kernel``: Provides the name of the kernel recipe to use
+ when building a kernel image.
+
+- ``virtual/bootloader``: Provides the name of the bootloader to use
+ when building an image.
+
+- ``virtual/libgbm``: Provides ``gbm.pc``.
+
+- ``virtual/egl``: Provides ``egl.pc`` and possibly ``wayland-egl.pc``.
+
+- ``virtual/libgl``: Provides ``gl.pc`` (i.e. libGL).
+
+- ``virtual/libgles1``: Provides ``glesv1_cm.pc`` (i.e. libGLESv1_CM).
+
+- ``virtual/libgles2``: Provides ``glesv2.pc`` (i.e. libGLESv2).
+
+.. note::
+
+ Virtual providers only apply to build time dependencies specified with
+ :term:`PROVIDES` and :term:`DEPENDS`. They do not apply to runtime
+ dependencies specified with :term:`RPROVIDES` and :term:`RDEPENDS`.
+
+Properly Versioning Pre-Release Recipes
+=======================================
+
+Sometimes the name of a recipe can lead to versioning problems when the
+recipe is upgraded to a final release. For example, consider the
+``irssi_0.8.16-rc1.bb`` recipe file in the list of example recipes in
+the ":ref:`dev-manual/new-recipe:storing and naming the recipe`" section.
+This recipe is at a release candidate stage (i.e. "rc1"). When the recipe is
+released, the recipe filename becomes ``irssi_0.8.16.bb``. The version
+change from ``0.8.16-rc1`` to ``0.8.16`` is seen as a decrease by the
+build system and package managers, so the resulting packages will not
+correctly trigger an upgrade.
+
+In order to ensure the versions compare properly, the recommended
+convention is to use a tilde (``~``) character as follows::
+
+ PV = 0.8.16~rc1
+
+This way ``0.8.16~rc1`` sorts before ``0.8.16``. See the
+":ref:`contributor-guide/recipe-style-guide:version policy`" section in the
+Yocto Project and OpenEmbedded Contributor Guide for more details about
+versioning code corresponding to a pre-release or to a specific Git commit.
+
+Post-Installation Scripts
+=========================
+
+Post-installation scripts run immediately after installing a package on
+the target or during image creation when a package is included in an
+image. To add a post-installation script to a package, add a
+``pkg_postinst:``\ `PACKAGENAME`\ ``()`` function to the recipe file
+(``.bb``) and replace `PACKAGENAME` with the name of the package you want
+to attach to the ``postinst`` script. To apply the post-installation
+script to the main package for the recipe, which is usually what is
+required, specify
+``${``\ :term:`PN`\ ``}`` in place of
+PACKAGENAME.
+
+A post-installation function has the following structure::
+
+ pkg_postinst:PACKAGENAME() {
+ # Commands to carry out
+ }
+
+The script defined in the post-installation function is called when the
+root filesystem is created. If the script succeeds, the package is
+marked as installed.
+
+.. note::
+
+ Any RPM post-installation script that runs on the target should
+ return a 0 exit code. RPM does not allow non-zero exit codes for
+ these scripts, and the RPM package manager will cause the package to
+ fail installation on the target.
+
+Sometimes it is necessary for the execution of a post-installation
+script to be delayed until the first boot. For example, the script might
+need to be executed on the device itself. To delay script execution
+until boot time, you must explicitly mark post installs to defer to the
+target. You can use ``pkg_postinst_ontarget()`` or call
+``postinst_intercept delay_to_first_boot`` from ``pkg_postinst()``. Any
+failure of a ``pkg_postinst()`` script (including exit 1) triggers an
+error during the
+:ref:`ref-tasks-rootfs` task.
+
+If you have recipes that use ``pkg_postinst`` function and they require
+the use of non-standard native tools that have dependencies during
+root filesystem construction, you need to use the
+:term:`PACKAGE_WRITE_DEPS`
+variable in your recipe to list these tools. If you do not use this
+variable, the tools might be missing and execution of the
+post-installation script is deferred until first boot. Deferring the
+script to the first boot is undesirable and impossible for read-only
+root filesystems.
+
+.. note::
+
+ There is equivalent support for pre-install, pre-uninstall, and post-uninstall
+ scripts by way of ``pkg_preinst``, ``pkg_prerm``, and ``pkg_postrm``,
+ respectively. These scrips work in exactly the same way as does
+ ``pkg_postinst`` with the exception that they run at different times. Also,
+ because of when they run, they are not applicable to being run at image
+ creation time like ``pkg_postinst``.
+
+Testing
+=======
+
+The final step for completing your recipe is to be sure that the
+software you built runs correctly. To accomplish runtime testing, add
+the build's output packages to your image and test them on the target.
+
+For information on how to customize your image by adding specific
+packages, see ":ref:`dev-manual/customizing-images:customizing images`" section.
+
+Examples
+========
+
+To help summarize how to write a recipe, this section provides some
+recipe examples given various scenarios:
+
+- `Building a single .c file package`_
+
+- `Building a Makefile-based package`_
+
+- `Building an Autotooled package`_
+
+- `Building a Meson package`_
+
+- `Splitting an application into multiple packages`_
+
+- `Packaging externally produced binaries`_
+
+Building a Single .c File Package
+---------------------------------
+
+Building an application from a single file that is stored locally (e.g. under
+``files``) requires a recipe that has the file listed in the :term:`SRC_URI`
+variable. Additionally, you need to manually write the :ref:`ref-tasks-compile`
+and :ref:`ref-tasks-install` tasks. The :term:`S` variable defines the
+directory containing the source code, which is set to :term:`WORKDIR` in this
+case --- the directory BitBake uses for the build::
+
+ SUMMARY = "Simple helloworld application"
+ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+ SRC_URI = "file://helloworld.c"
+
+ S = "${WORKDIR}"
+
+ do_compile() {
+ ${CC} ${LDFLAGS} helloworld.c -o helloworld
+ }
+
+ do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+By default, the ``helloworld``, ``helloworld-dbg``, and ``helloworld-dev`` packages
+are built. For information on how to customize the packaging process, see the
+":ref:`dev-manual/new-recipe:splitting an application into multiple packages`"
+section.
+
+Building a Makefile-Based Package
+---------------------------------
+
+Applications built with GNU ``make`` require a recipe that has the source archive
+listed in :term:`SRC_URI`. You do not need to add a :ref:`ref-tasks-compile`
+step since by default BitBake starts the ``make`` command to compile the
+application. If you need additional ``make`` options, you should store them in
+the :term:`EXTRA_OEMAKE` or :term:`PACKAGECONFIG_CONFARGS` variables. BitBake
+passes these options into the GNU ``make`` invocation. Note that a
+:ref:`ref-tasks-install` task is still required. Otherwise, BitBake runs an
+empty :ref:`ref-tasks-install` task by default.
+
+Some applications might require extra parameters to be passed to the
+compiler. For example, the application might need an additional header
+path. You can accomplish this by adding to the :term:`CFLAGS` variable. The
+following example shows this::
+
+ CFLAGS:prepend = "-I ${S}/include "
+
+In the following example, ``lz4`` is a makefile-based package::
+
+ SUMMARY = "Extremely Fast Compression algorithm"
+ DESCRIPTION = "LZ4 is a very fast lossless compression algorithm, providing compression speed at 400 MB/s per core, scalable with multi-cores CPU. It also features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems."
+ HOMEPAGE = "https://github.com/lz4/lz4"
+
+ LICENSE = "BSD-2-Clause | GPL-2.0-only"
+ LIC_FILES_CHKSUM = "file://lib/LICENSE;md5=ebc2ea4814a64de7708f1571904b32cc \
+ file://programs/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://LICENSE;md5=d57c0d21cb917fb4e0af2454aa48b956 \
+ "
+
+ PE = "1"
+
+ SRCREV = "d44371841a2f1728a3f36839fd4b7e872d0927d3"
+
+ SRC_URI = "git://github.com/lz4/lz4.git;branch=release;protocol=https \
+ file://CVE-2021-3520.patch \
+ "
+ UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
+
+ S = "${WORKDIR}/git"
+
+ CVE_STATUS[CVE-2014-4715] = "fixed-version: Fixed in r118, which is larger than the current version"
+
+ EXTRA_OEMAKE = "PREFIX=${prefix} CC='${CC}' CFLAGS='${CFLAGS}' DESTDIR=${D} LIBDIR=${libdir} INCLUDEDIR=${includedir} BUILD_STATIC=no"
+
+ do_install() {
+ oe_runmake install
+ }
+
+ BBCLASSEXTEND = "native nativesdk"
+
+Building an Autotooled Package
+------------------------------
+
+Applications built with the Autotools such as ``autoconf`` and ``automake``
+require a recipe that has a source archive listed in :term:`SRC_URI` and also
+inherit the :ref:`ref-classes-autotools` class, which contains the definitions
+of all the steps needed to build an Autotool-based application. The result of
+the build is automatically packaged. And, if the application uses NLS for
+localization, packages with local information are generated (one package per
+language). Here is one example: (``hello_2.3.bb``)::
+
+ SUMMARY = "GNU Helloworld application"
+ SECTION = "examples"
+ LICENSE = "GPL-2.0-or-later"
+ LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
+
+ SRC_URI = "${GNU_MIRROR}/hello/hello-${PV}.tar.gz"
+
+ inherit autotools gettext
+
+The variable :term:`LIC_FILES_CHKSUM` is used to track source license changes
+as described in the ":ref:`dev-manual/licenses:tracking license changes`"
+section in the Yocto Project Overview and Concepts Manual. You can quickly
+create Autotool-based recipes in a manner similar to the previous example.
+
+.. _ref-building-meson-package:
+
+Building a Meson Package
+------------------------
+
+Applications built with the `Meson build system <https://mesonbuild.com/>`__
+just need a recipe that has sources described in :term:`SRC_URI` and inherits
+the :ref:`ref-classes-meson` class.
+
+The :oe_git:`ipcalc recipe </meta-openembedded/tree/meta-networking/recipes-support/ipcalc>`
+is a simple example of an application without dependencies::
+
+ SUMMARY = "Tool to assist in network address calculations for IPv4 and IPv6."
+ HOMEPAGE = "https://gitlab.com/ipcalc/ipcalc"
+
+ SECTION = "net"
+
+ LICENSE = "GPL-2.0-only"
+ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+ SRC_URI = "git://gitlab.com/ipcalc/ipcalc.git;protocol=https;branch=master"
+ SRCREV = "4c4261a47f355946ee74013d4f5d0494487cc2d6"
+
+ S = "${WORKDIR}/git"
+
+ inherit meson
+
+Applications with dependencies are likely to inherit the
+:ref:`ref-classes-pkgconfig` class, as ``pkg-config`` is the default method
+used by Meson to find dependencies and compile applications against them.
+
+Splitting an Application into Multiple Packages
+-----------------------------------------------
+
+You can use the variables :term:`PACKAGES` and :term:`FILES` to split an
+application into multiple packages.
+
+Here is an example that uses the ``libxpm`` recipe. By default,
+this recipe generates a single package that contains the library along
+with a few binaries. You can modify the recipe to split the binaries
+into separate packages::
+
+ require xorg-lib-common.inc
+
+ SUMMARY = "Xpm: X Pixmap extension library"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://COPYING;md5=51f4270b012ecd4ab1a164f5f4ed6cf7"
+ DEPENDS += "libxext libsm libxt"
+ PE = "1"
+
+ XORG_PN = "libXpm"
+
+ PACKAGES =+ "sxpm cxpm"
+ FILES:cxpm = "${bindir}/cxpm"
+ FILES:sxpm = "${bindir}/sxpm"
+
+In the previous example, we want to ship the ``sxpm`` and ``cxpm``
+binaries in separate packages. Since ``bindir`` would be packaged into
+the main :term:`PN` package by default, we prepend the :term:`PACKAGES` variable
+so additional package names are added to the start of list. This results
+in the extra ``FILES:*`` variables then containing information that
+define which files and directories go into which packages. Files
+included by earlier packages are skipped by latter packages. Thus, the
+main :term:`PN` package does not include the above listed files.
+
+Packaging Externally Produced Binaries
+--------------------------------------
+
+Sometimes, you need to add pre-compiled binaries to an image. For
+example, suppose that there are binaries for proprietary code,
+created by a particular division of a company. Your part of the company
+needs to use those binaries as part of an image that you are building
+using the OpenEmbedded build system. Since you only have the binaries
+and not the source code, you cannot use a typical recipe that expects to
+fetch the source specified in
+:term:`SRC_URI` and then compile it.
+
+One method is to package the binaries and then install them as part of
+the image. Generally, it is not a good idea to package binaries since,
+among other things, it can hinder the ability to reproduce builds and
+could lead to compatibility problems with ABI in the future. However,
+sometimes you have no choice.
+
+The easiest solution is to create a recipe that uses the
+:ref:`ref-classes-bin-package` class and to be sure that you are using default
+locations for build artifacts. In most cases, the
+:ref:`ref-classes-bin-package` class handles "skipping" the configure and
+compile steps as well as sets things up to grab packages from the appropriate
+area. In particular, this class sets ``noexec`` on both the
+:ref:`ref-tasks-configure` and :ref:`ref-tasks-compile` tasks, sets
+``FILES:${PN}`` to "/" so that it picks up all files, and sets up a
+:ref:`ref-tasks-install` task, which effectively copies all files from ``${S}``
+to ``${D}``. The :ref:`ref-classes-bin-package` class works well when the files
+extracted into ``${S}`` are already laid out in the way they should be laid out
+on the target. For more information on these variables, see the :term:`FILES`,
+:term:`PN`, :term:`S`, and :term:`D` variables in the Yocto Project Reference
+Manual's variable glossary.
+
+.. note::
+
+ - Using :term:`DEPENDS` is a good
+ idea even for components distributed in binary form, and is often
+ necessary for shared libraries. For a shared library, listing the
+ library dependencies in :term:`DEPENDS` makes sure that the libraries
+ are available in the staging sysroot when other recipes link
+ against the library, which might be necessary for successful
+ linking.
+
+ - Using :term:`DEPENDS` also allows runtime dependencies between
+ packages to be added automatically. See the
+ ":ref:`overview-manual/concepts:automatically added runtime dependencies`"
+ section in the Yocto Project Overview and Concepts Manual for more
+ information.
+
+If you cannot use the :ref:`ref-classes-bin-package` class, you need to be sure you are
+doing the following:
+
+- Create a recipe where the
+ :ref:`ref-tasks-configure` and
+ :ref:`ref-tasks-compile` tasks do
+ nothing: It is usually sufficient to just not define these tasks in
+ the recipe, because the default implementations do nothing unless a
+ Makefile is found in
+ ``${``\ :term:`S`\ ``}``.
+
+ If ``${S}`` might contain a Makefile, or if you inherit some class
+ that replaces :ref:`ref-tasks-configure` and :ref:`ref-tasks-compile` with custom
+ versions, then you can use the
+ ``[``\ :ref:`noexec <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
+ flag to turn the tasks into no-ops, as follows::
+
+ do_configure[noexec] = "1"
+ do_compile[noexec] = "1"
+
+ Unlike :ref:`bitbake-user-manual/bitbake-user-manual-metadata:deleting a task`,
+ using the flag preserves the dependency chain from the :ref:`ref-tasks-fetch`,
+ :ref:`ref-tasks-unpack`, and :ref:`ref-tasks-patch` tasks to the
+ :ref:`ref-tasks-install` task.
+
+- Make sure your :ref:`ref-tasks-install` task installs the binaries
+ appropriately.
+
+- Ensure that you set up :term:`FILES`
+ (usually
+ ``FILES:${``\ :term:`PN`\ ``}``) to
+ point to the files you have installed, which of course depends on
+ where you have installed them and whether those files are in
+ different locations than the defaults.
+
+Following Recipe Style Guidelines
+=================================
+
+When writing recipes, it is good to conform to existing style guidelines.
+See the ":doc:`../contributor-guide/recipe-style-guide`" in the Yocto Project
+and OpenEmbedded Contributor Guide for reference.
+
+It is common for existing recipes to deviate a bit from this style.
+However, aiming for at least a consistent style is a good idea. Some
+practices, such as omitting spaces around ``=`` operators in assignments
+or ordering recipe components in an erratic way, are widely seen as poor
+style.
+
+Recipe Syntax
+=============
+
+Understanding recipe file syntax is important for writing recipes. The
+following list overviews the basic items that make up a BitBake recipe
+file. For more complete BitBake syntax descriptions, see the
+":doc:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata`"
+chapter of the BitBake User Manual.
+
+- *Variable Assignments and Manipulations:* Variable assignments allow
+ a value to be assigned to a variable. The assignment can be static
+ text or might include the contents of other variables. In addition to
+ the assignment, appending and prepending operations are also
+ supported.
+
+ The following example shows some of the ways you can use variables in
+ recipes::
+
+ S = "${WORKDIR}/postfix-${PV}"
+ CFLAGS += "-DNO_ASM"
+ CFLAGS:append = " --enable-important-feature"
+
+- *Functions:* Functions provide a series of actions to be performed.
+ You usually use functions to override the default implementation of a
+ task function or to complement a default function (i.e. append or
+ prepend to an existing function). Standard functions use ``sh`` shell
+ syntax, although access to OpenEmbedded variables and internal
+ methods are also available.
+
+ Here is an example function from the ``sed`` recipe::
+
+ do_install () {
+ autotools_do_install
+ install -d ${D}${base_bindir}
+ mv ${D}${bindir}/sed ${D}${base_bindir}/sed
+ rmdir ${D}${bindir}/
+ }
+
+ It is
+ also possible to implement new functions that are called between
+ existing tasks as long as the new functions are not replacing or
+ complementing the default functions. You can implement functions in
+ Python instead of shell. Both of these options are not seen in the
+ majority of recipes.
+
+- *Keywords:* BitBake recipes use only a few keywords. You use keywords
+ to include common functions (``inherit``), load parts of a recipe
+ from other files (``include`` and ``require``) and export variables
+ to the environment (``export``).
+
+ The following example shows the use of some of these keywords::
+
+ export POSTCONF = "${STAGING_BINDIR}/postconf"
+ inherit autoconf
+ require otherfile.inc
+
+- *Comments (#):* Any lines that begin with the hash character (``#``)
+ are treated as comment lines and are ignored::
+
+ # This is a comment
+
+This next list summarizes the most important and most commonly used
+parts of the recipe syntax. For more information on these parts of the
+syntax, you can reference the
+":doc:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata`" chapter
+in the BitBake User Manual.
+
+- *Line Continuation (\\):* Use the backward slash (``\``) character to
+ split a statement over multiple lines. Place the slash character at
+ the end of the line that is to be continued on the next line::
+
+ VAR = "A really long \
+ line"
+
+ .. note::
+
+ You cannot have any characters including spaces or tabs after the
+ slash character.
+
+- *Using Variables (${VARNAME}):* Use the ``${VARNAME}`` syntax to
+ access the contents of a variable::
+
+ SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/zlib-${PV}.tar.gz"
+
+ .. note::
+
+ It is important to understand that the value of a variable
+ expressed in this form does not get substituted automatically. The
+ expansion of these expressions happens on-demand later (e.g.
+ usually when a function that makes reference to the variable
+ executes). This behavior ensures that the values are most
+ appropriate for the context in which they are finally used. On the
+ rare occasion that you do need the variable expression to be
+ expanded immediately, you can use the
+ :=
+ operator instead of
+ =
+ when you make the assignment, but this is not generally needed.
+
+- *Quote All Assignments ("value"):* Use double quotes around values in
+ all variable assignments (e.g. ``"value"``). Here is an example::
+
+ VAR1 = "${OTHERVAR}"
+ VAR2 = "The version is ${PV}"
+
+- *Conditional Assignment (?=):* Conditional assignment is used to
+ assign a value to a variable, but only when the variable is currently
+ unset. Use the question mark followed by the equal sign (``?=``) to
+ make a "soft" assignment used for conditional assignment. Typically,
+ "soft" assignments are used in the ``local.conf`` file for variables
+ that are allowed to come through from the external environment.
+
+ Here is an example where ``VAR1`` is set to "New value" if it is
+ currently empty. However, if ``VAR1`` has already been set, it
+ remains unchanged::
+
+ VAR1 ?= "New value"
+
+ In this next example, ``VAR1`` is left with the value "Original value"::
+
+ VAR1 = "Original value"
+ VAR1 ?= "New value"
+
+- *Appending (+=):* Use the plus character followed by the equals sign
+ (``+=``) to append values to existing variables.
+
+ .. note::
+
+ This operator adds a space between the existing content of the
+ variable and the new content.
+
+ Here is an example::
+
+ SRC_URI += "file://fix-makefile.patch"
+
+- *Prepending (=+):* Use the equals sign followed by the plus character
+ (``=+``) to prepend values to existing variables.
+
+ .. note::
+
+ This operator adds a space between the new content and the
+ existing content of the variable.
+
+ Here is an example::
+
+ VAR =+ "Starts"
+
+- *Appending (:append):* Use the ``:append`` operator to append values
+ to existing variables. This operator does not add any additional
+ space. Also, the operator is applied after all the ``+=``, and ``=+``
+ operators have been applied and after all ``=`` assignments have
+ occurred. This means that if ``:append`` is used in a recipe, it can
+ only be overridden by another layer using the special ``:remove``
+ operator, which in turn will prevent further layers from adding it back.
+
+ The following example shows the space being explicitly added to the
+ start to ensure the appended value is not merged with the existing
+ value::
+
+ CFLAGS:append = " --enable-important-feature"
+
+ You can also use
+ the ``:append`` operator with overrides, which results in the actions
+ only being performed for the specified target or machine::
+
+ CFLAGS:append:sh4 = " --enable-important-sh4-specific-feature"
+
+- *Prepending (:prepend):* Use the ``:prepend`` operator to prepend
+ values to existing variables. This operator does not add any
+ additional space. Also, the operator is applied after all the ``+=``,
+ and ``=+`` operators have been applied and after all ``=``
+ assignments have occurred.
+
+ The following example shows the space being explicitly added to the
+ end to ensure the prepended value is not merged with the existing
+ value::
+
+ CFLAGS:prepend = "-I${S}/myincludes "
+
+ You can also use the
+ ``:prepend`` operator with overrides, which results in the actions
+ only being performed for the specified target or machine::
+
+ CFLAGS:prepend:sh4 = "-I${S}/myincludes "
+
+- *Overrides:* You can use overrides to set a value conditionally,
+ typically based on how the recipe is being built. For example, to set
+ the :term:`KBRANCH` variable's
+ value to "standard/base" for any target
+ :term:`MACHINE`, except for
+ qemuarm where it should be set to "standard/arm-versatile-926ejs",
+ you would do the following::
+
+ KBRANCH = "standard/base"
+ KBRANCH:qemuarm = "standard/arm-versatile-926ejs"
+
+ Overrides are also used to separate
+ alternate values of a variable in other situations. For example, when
+ setting variables such as
+ :term:`FILES` and
+ :term:`RDEPENDS` that are
+ specific to individual packages produced by a recipe, you should
+ always use an override that specifies the name of the package.
+
+- *Indentation:* Use spaces for indentation rather than tabs. For
+ shell functions, both currently work. However, it is a policy
+ decision of the Yocto Project to use tabs in shell functions. Realize
+ that some layers have a policy to use spaces for all indentation.
+
+- *Using Python for Complex Operations:* For more advanced processing,
+ it is possible to use Python code during variable assignments (e.g.
+ search and replacement on a variable).
+
+ You indicate Python code using the ``${@python_code}`` syntax for the
+ variable assignment::
+
+ SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@d.getVar('PV',1).replace('.', '')}.tgz
+
+- *Shell Function Syntax:* Write shell functions as if you were writing
+ a shell script when you describe a list of actions to take. You
+ should ensure that your script works with a generic ``sh`` and that
+ it does not require any ``bash`` or other shell-specific
+ functionality. The same considerations apply to various system
+ utilities (e.g. ``sed``, ``grep``, ``awk``, and so forth) that you
+ might wish to use. If in doubt, you should check with multiple
+ implementations --- including those from BusyBox.
+
diff --git a/documentation/dev-manual/packages.rst b/documentation/dev-manual/packages.rst
new file mode 100644
index 0000000000..e5028fffdc
--- /dev/null
+++ b/documentation/dev-manual/packages.rst
@@ -0,0 +1,1250 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Working with Packages
+*********************
+
+This section describes a few tasks that involve packages:
+
+- :ref:`dev-manual/packages:excluding packages from an image`
+
+- :ref:`dev-manual/packages:incrementing a package version`
+
+- :ref:`dev-manual/packages:handling optional module packaging`
+
+- :ref:`dev-manual/packages:using runtime package management`
+
+- :ref:`dev-manual/packages:generating and using signed packages`
+
+- :ref:`Setting up and running package test
+ (ptest) <dev-manual/packages:testing packages with ptest>`
+
+- :ref:`dev-manual/packages:creating node package manager (npm) packages`
+
+- :ref:`dev-manual/packages:adding custom metadata to packages`
+
+Excluding Packages from an Image
+================================
+
+You might find it necessary to prevent specific packages from being
+installed into an image. If so, you can use several variables to direct
+the build system to essentially ignore installing recommended packages
+or to not install a package at all.
+
+The following list introduces variables you can use to prevent packages
+from being installed into your image. Each of these variables only works
+with IPK and RPM package types, not for Debian packages.
+Also, you can use these variables from your ``local.conf`` file
+or attach them to a specific image recipe by using a recipe name
+override. For more detail on the variables, see the descriptions in the
+Yocto Project Reference Manual's glossary chapter.
+
+- :term:`BAD_RECOMMENDATIONS`:
+ Use this variable to specify "recommended-only" packages that you do
+ not want installed.
+
+- :term:`NO_RECOMMENDATIONS`:
+ Use this variable to prevent all "recommended-only" packages from
+ being installed.
+
+- :term:`PACKAGE_EXCLUDE`:
+ Use this variable to prevent specific packages from being installed
+ regardless of whether they are "recommended-only" or not. You need to
+ realize that the build process could fail with an error when you
+ prevent the installation of a package whose presence is required by
+ an installed package.
+
+Incrementing a Package Version
+==============================
+
+This section provides some background on how binary package versioning
+is accomplished and presents some of the services, variables, and
+terminology involved.
+
+In order to understand binary package versioning, you need to consider
+the following:
+
+- Binary Package: The binary package that is eventually built and
+ installed into an image.
+
+- Binary Package Version: The binary package version is composed of two
+ components --- a version and a revision.
+
+ .. note::
+
+ Technically, a third component, the "epoch" (i.e. :term:`PE`) is involved
+ but this discussion for the most part ignores :term:`PE`.
+
+ The version and revision are taken from the
+ :term:`PV` and
+ :term:`PR` variables, respectively.
+
+- :term:`PV`: The recipe version. :term:`PV` represents the version of the
+ software being packaged. Do not confuse :term:`PV` with the binary
+ package version.
+
+- :term:`PR`: The recipe revision.
+
+- :term:`SRCPV`: The OpenEmbedded
+ build system uses this string to help define the value of :term:`PV` when
+ the source code revision needs to be included in it.
+
+- :yocto_wiki:`PR Service </PR_Service>`: A
+ network-based service that helps automate keeping package feeds
+ compatible with existing package manager applications such as RPM,
+ APT, and OPKG.
+
+Whenever the binary package content changes, the binary package version
+must change. Changing the binary package version is accomplished by
+changing or "bumping" the :term:`PR` and/or :term:`PV` values. Increasing these
+values occurs one of two ways:
+
+- Automatically using a Package Revision Service (PR Service).
+
+- Manually incrementing the :term:`PR` and/or :term:`PV` variables.
+
+Given a primary challenge of any build system and its users is how to
+maintain a package feed that is compatible with existing package manager
+applications such as RPM, APT, and OPKG, using an automated system is
+much preferred over a manual system. In either system, the main
+requirement is that binary package version numbering increases in a
+linear fashion and that there is a number of version components that
+support that linear progression. For information on how to ensure
+package revisioning remains linear, see the
+":ref:`dev-manual/packages:automatically incrementing a package version number`"
+section.
+
+The following three sections provide related information on the PR
+Service, the manual method for "bumping" :term:`PR` and/or :term:`PV`, and on
+how to ensure binary package revisioning remains linear.
+
+Working With a PR Service
+-------------------------
+
+As mentioned, attempting to maintain revision numbers in the
+:term:`Metadata` is error prone, inaccurate,
+and causes problems for people submitting recipes. Conversely, the PR
+Service automatically generates increasing numbers, particularly the
+revision field, which removes the human element.
+
+.. note::
+
+ For additional information on using a PR Service, you can see the
+ :yocto_wiki:`PR Service </PR_Service>` wiki page.
+
+The Yocto Project uses variables in order of decreasing priority to
+facilitate revision numbering (i.e.
+:term:`PE`,
+:term:`PV`, and
+:term:`PR` for epoch, version, and
+revision, respectively). The values are highly dependent on the policies
+and procedures of a given distribution and package feed.
+
+Because the OpenEmbedded build system uses
+":ref:`signatures <overview-manual/concepts:checksums (signatures)>`", which are
+unique to a given build, the build system knows when to rebuild
+packages. All the inputs into a given task are represented by a
+signature, which can trigger a rebuild when different. Thus, the build
+system itself does not rely on the :term:`PR`, :term:`PV`, and :term:`PE` numbers to
+trigger a rebuild. The signatures, however, can be used to generate
+these values.
+
+The PR Service works with both ``OEBasic`` and ``OEBasicHash``
+generators. The value of :term:`PR` bumps when the checksum changes and the
+different generator mechanisms change signatures under different
+circumstances.
+
+As implemented, the build system includes values from the PR Service
+into the :term:`PR` field as an addition using the form "``.x``" so ``r0``
+becomes ``r0.1``, ``r0.2`` and so forth. This scheme allows existing
+:term:`PR` values to be used for whatever reasons, which include manual
+:term:`PR` bumps, should it be necessary.
+
+By default, the PR Service is not enabled or running. Thus, the packages
+generated are just "self consistent". The build system adds and removes
+packages and there are no guarantees about upgrade paths but images will
+be consistent and correct with the latest changes.
+
+The simplest form for a PR Service is for a single host development system
+that builds the package feed (building system). For this scenario, you can
+enable a local PR Service by setting :term:`PRSERV_HOST` in your
+``local.conf`` file in the :term:`Build Directory`::
+
+ PRSERV_HOST = "localhost:0"
+
+Once the service is started, packages will automatically
+get increasing :term:`PR` values and BitBake takes care of starting and
+stopping the server.
+
+If you have a more complex setup where multiple host development systems
+work against a common, shared package feed, you have a single PR Service
+running and it is connected to each building system. For this scenario,
+you need to start the PR Service using the ``bitbake-prserv`` command::
+
+ bitbake-prserv --host ip --port port --start
+
+In addition to
+hand-starting the service, you need to update the ``local.conf`` file of
+each building system as described earlier so each system points to the
+server and port.
+
+It is also recommended you use build history, which adds some sanity
+checks to binary package versions, in conjunction with the server that
+is running the PR Service. To enable build history, add the following to
+each building system's ``local.conf`` file::
+
+ # It is recommended to activate "buildhistory" for testing the PR service
+ INHERIT += "buildhistory"
+ BUILDHISTORY_COMMIT = "1"
+
+For information on build
+history, see the
+":ref:`dev-manual/build-quality:maintaining build output quality`" section.
+
+.. note::
+
+ The OpenEmbedded build system does not maintain :term:`PR` information as
+ part of the shared state (sstate) packages. If you maintain an sstate
+ feed, it's expected that either all your building systems that
+ contribute to the sstate feed use a shared PR service, or you do not
+ run a PR service on any of your building systems.
+
+ That's because if you had multiple machines sharing a PR service but
+ not their sstate feed, you could end up with "diverging" hashes for
+ the same output artefacts. When presented to the share PR service,
+ each would be considered as new and would increase the revision
+ number, causing many unnecessary package upgrades.
+
+ For more information on shared state, see the
+ ":ref:`overview-manual/concepts:shared state cache`"
+ section in the Yocto Project Overview and Concepts Manual.
+
+Manually Bumping PR
+-------------------
+
+The alternative to setting up a PR Service is to manually "bump" the
+:term:`PR` variable.
+
+If a committed change results in changing the package output, then the
+value of the :term:`PR` variable needs to be increased (or "bumped") as part of
+that commit. For new recipes you should add the :term:`PR` variable and set
+its initial value equal to "r0", which is the default. Even though the
+default value is "r0", the practice of adding it to a new recipe makes
+it harder to forget to bump the variable when you make changes to the
+recipe in future.
+
+Usually, version increases occur only to binary packages. However, if
+for some reason :term:`PV` changes but does not increase, you can increase
+the :term:`PE` variable (Package Epoch). The :term:`PE` variable defaults to
+"0".
+
+Binary package version numbering strives to follow the `Debian Version
+Field Policy
+Guidelines <https://www.debian.org/doc/debian-policy/ch-controlfields.html>`__.
+These guidelines define how versions are compared and what "increasing"
+a version means.
+
+Automatically Incrementing a Package Version Number
+---------------------------------------------------
+
+When fetching a repository, BitBake uses the
+:term:`SRCREV` variable to determine
+the specific source code revision from which to build. You set the
+:term:`SRCREV` variable to
+:term:`AUTOREV` to cause the
+OpenEmbedded build system to automatically use the latest revision of
+the software::
+
+ SRCREV = "${AUTOREV}"
+
+Furthermore, you need to reference :term:`SRCPV` in :term:`PV` in order to
+automatically update the version whenever the revision of the source
+code changes. Here is an example::
+
+ PV = "1.0+git${SRCPV}"
+
+The OpenEmbedded build system substitutes :term:`SRCPV` with the following:
+
+.. code-block:: none
+
+ AUTOINC+source_code_revision
+
+The build system replaces the ``AUTOINC``
+with a number. The number used depends on the state of the PR Service:
+
+- If PR Service is enabled, the build system increments the number,
+ which is similar to the behavior of
+ :term:`PR`. This behavior results in
+ linearly increasing package versions, which is desirable. Here is an
+ example:
+
+ .. code-block:: none
+
+ hello-world-git_0.0+git0+b6558dd387-r0.0_armv7a-neon.ipk
+ hello-world-git_0.0+git1+dd2f5c3565-r0.0_armv7a-neon.ipk
+
+- If PR Service is not enabled, the build system replaces the
+ ``AUTOINC`` placeholder with zero (i.e. "0"). This results in
+ changing the package version since the source revision is included.
+ However, package versions are not increased linearly. Here is an
+ example:
+
+ .. code-block:: none
+
+ hello-world-git_0.0+git0+b6558dd387-r0.0_armv7a-neon.ipk
+ hello-world-git_0.0+git0+dd2f5c3565-r0.0_armv7a-neon.ipk
+
+In summary, the OpenEmbedded build system does not track the history of
+binary package versions for this purpose. ``AUTOINC``, in this case, is
+comparable to :term:`PR`. If PR server is not enabled, ``AUTOINC`` in the
+package version is simply replaced by "0". If PR server is enabled, the
+build system keeps track of the package versions and bumps the number
+when the package revision changes.
+
+Handling Optional Module Packaging
+==================================
+
+Many pieces of software split functionality into optional modules (or
+plugins) and the plugins that are built might depend on configuration
+options. To avoid having to duplicate the logic that determines what
+modules are available in your recipe or to avoid having to package each
+module by hand, the OpenEmbedded build system provides functionality to
+handle module packaging dynamically.
+
+To handle optional module packaging, you need to do two things:
+
+- Ensure the module packaging is actually done.
+
+- Ensure that any dependencies on optional modules from other recipes
+ are satisfied by your recipe.
+
+Making Sure the Packaging is Done
+---------------------------------
+
+To ensure the module packaging actually gets done, you use the
+``do_split_packages`` function within the ``populate_packages`` Python
+function in your recipe. The ``do_split_packages`` function searches for
+a pattern of files or directories under a specified path and creates a
+package for each one it finds by appending to the
+:term:`PACKAGES` variable and
+setting the appropriate values for ``FILES:packagename``,
+``RDEPENDS:packagename``, ``DESCRIPTION:packagename``, and so forth.
+Here is an example from the ``lighttpd`` recipe::
+
+ python populate_packages:prepend () {
+ lighttpd_libdir = d.expand('${libdir}')
+ do_split_packages(d, lighttpd_libdir, '^mod_(.*).so$',
+ 'lighttpd-module-%s', 'Lighttpd module for %s',
+ extra_depends='')
+ }
+
+The previous example specifies a number of things in the call to
+``do_split_packages``.
+
+- A directory within the files installed by your recipe through
+ :ref:`ref-tasks-install` in which to search.
+
+- A regular expression used to match module files in that directory. In
+ the example, note the parentheses () that mark the part of the
+ expression from which the module name should be derived.
+
+- A pattern to use for the package names.
+
+- A description for each package.
+
+- An empty string for ``extra_depends``, which disables the default
+ dependency on the main ``lighttpd`` package. Thus, if a file in
+ ``${libdir}`` called ``mod_alias.so`` is found, a package called
+ ``lighttpd-module-alias`` is created for it and the
+ :term:`DESCRIPTION` is set to
+ "Lighttpd module for alias".
+
+Often, packaging modules is as simple as the previous example. However,
+there are more advanced options that you can use within
+``do_split_packages`` to modify its behavior. And, if you need to, you
+can add more logic by specifying a hook function that is called for each
+package. It is also perfectly acceptable to call ``do_split_packages``
+multiple times if you have more than one set of modules to package.
+
+For more examples that show how to use ``do_split_packages``, see the
+``connman.inc`` file in the ``meta/recipes-connectivity/connman/``
+directory of the ``poky`` :ref:`source repository <overview-manual/development-environment:yocto project source repositories>`. You can
+also find examples in ``meta/classes-recipe/kernel.bbclass``.
+
+Here is a reference that shows ``do_split_packages`` mandatory and
+optional arguments::
+
+ Mandatory arguments
+
+ root
+ The path in which to search
+ file_regex
+ Regular expression to match searched files.
+ Use parentheses () to mark the part of this
+ expression that should be used to derive the
+ module name (to be substituted where %s is
+ used in other function arguments as noted below)
+ output_pattern
+ Pattern to use for the package names. Must
+ include %s.
+ description
+ Description to set for each package. Must
+ include %s.
+
+ Optional arguments
+
+ postinst
+ Postinstall script to use for all packages
+ (as a string)
+ recursive
+ True to perform a recursive search --- default
+ False
+ hook
+ A hook function to be called for every match.
+ The function will be called with the following
+ arguments (in the order listed):
+
+ f
+ Full path to the file/directory match
+ pkg
+ The package name
+ file_regex
+ As above
+ output_pattern
+ As above
+ modulename
+ The module name derived using file_regex
+ extra_depends
+ Extra runtime dependencies (RDEPENDS) to be
+ set for all packages. The default value of None
+ causes a dependency on the main package
+ (${PN}) --- if you do not want this, pass empty
+ string '' for this parameter.
+ aux_files_pattern
+ Extra item(s) to be added to FILES for each
+ package. Can be a single string item or a list
+ of strings for multiple items. Must include %s.
+ postrm
+ postrm script to use for all packages (as a
+ string)
+ allow_dirs
+ True to allow directories to be matched -
+ default False
+ prepend
+ If True, prepend created packages to PACKAGES
+ instead of the default False which appends them
+ match_path
+ match file_regex on the whole relative path to
+ the root rather than just the filename
+ aux_files_pattern_verbatim
+ Extra item(s) to be added to FILES for each
+ package, using the actual derived module name
+ rather than converting it to something legal
+ for a package name. Can be a single string item
+ or a list of strings for multiple items. Must
+ include %s.
+ allow_links
+ True to allow symlinks to be matched --- default
+ False
+ summary
+ Summary to set for each package. Must include %s;
+ defaults to description if not set.
+
+
+
+Satisfying Dependencies
+-----------------------
+
+The second part for handling optional module packaging is to ensure that
+any dependencies on optional modules from other recipes are satisfied by
+your recipe. You can be sure these dependencies are satisfied by using
+the :term:`PACKAGES_DYNAMIC`
+variable. Here is an example that continues with the ``lighttpd`` recipe
+shown earlier::
+
+ PACKAGES_DYNAMIC = "lighttpd-module-.*"
+
+The name
+specified in the regular expression can of course be anything. In this
+example, it is ``lighttpd-module-`` and is specified as the prefix to
+ensure that any :term:`RDEPENDS` and
+:term:`RRECOMMENDS` on a package
+name starting with the prefix are satisfied during build time. If you
+are using ``do_split_packages`` as described in the previous section,
+the value you put in :term:`PACKAGES_DYNAMIC` should correspond to the name
+pattern specified in the call to ``do_split_packages``.
+
+Using Runtime Package Management
+================================
+
+During a build, BitBake always transforms a recipe into one or more
+packages. For example, BitBake takes the ``bash`` recipe and produces a
+number of packages (e.g. ``bash``, ``bash-bashbug``,
+``bash-completion``, ``bash-completion-dbg``, ``bash-completion-dev``,
+``bash-completion-extra``, ``bash-dbg``, and so forth). Not all
+generated packages are included in an image.
+
+In several situations, you might need to update, add, remove, or query
+the packages on a target device at runtime (i.e. without having to
+generate a new image). Examples of such situations include:
+
+- You want to provide in-the-field updates to deployed devices (e.g.
+ security updates).
+
+- You want to have a fast turn-around development cycle for one or more
+ applications that run on your device.
+
+- You want to temporarily install the "debug" packages of various
+ applications on your device so that debugging can be greatly improved
+ by allowing access to symbols and source debugging.
+
+- You want to deploy a more minimal package selection of your device
+ but allow in-the-field updates to add a larger selection for
+ customization.
+
+In all these situations, you have something similar to a more
+traditional Linux distribution in that in-field devices are able to
+receive pre-compiled packages from a server for installation or update.
+Being able to install these packages on a running, in-field device is
+what is termed "runtime package management".
+
+In order to use runtime package management, you need a host or server
+machine that serves up the pre-compiled packages plus the required
+metadata. You also need package manipulation tools on the target. The
+build machine is a likely candidate to act as the server. However, that
+machine does not necessarily have to be the package server. The build
+machine could push its artifacts to another machine that acts as the
+server (e.g. Internet-facing). In fact, doing so is advantageous for a
+production environment as getting the packages away from the development
+system's :term:`Build Directory` prevents accidental overwrites.
+
+A simple build that targets just one device produces more than one
+package database. In other words, the packages produced by a build are
+separated out into a couple of different package groupings based on
+criteria such as the target's CPU architecture, the target board, or the
+C library used on the target. For example, a build targeting the
+``qemux86`` device produces the following three package databases:
+``noarch``, ``i586``, and ``qemux86``. If you wanted your ``qemux86``
+device to be aware of all the packages that were available to it, you
+would need to point it to each of these databases individually. In a
+similar way, a traditional Linux distribution usually is configured to
+be aware of a number of software repositories from which it retrieves
+packages.
+
+Using runtime package management is completely optional and not required
+for a successful build or deployment in any way. But if you want to make
+use of runtime package management, you need to do a couple things above
+and beyond the basics. The remainder of this section describes what you
+need to do.
+
+Build Considerations
+--------------------
+
+This section describes build considerations of which you need to be
+aware in order to provide support for runtime package management.
+
+When BitBake generates packages, it needs to know what format or formats
+to use. In your configuration, you use the
+:term:`PACKAGE_CLASSES`
+variable to specify the format:
+
+#. Open the ``local.conf`` file inside your :term:`Build Directory` (e.g.
+ ``poky/build/conf/local.conf``).
+
+#. Select the desired package format as follows::
+
+ PACKAGE_CLASSES ?= "package_packageformat"
+
+ where packageformat can be "ipk", "rpm",
+ "deb", or "tar" which are the supported package formats.
+
+ .. note::
+
+ Because the Yocto Project supports four different package formats,
+ you can set the variable with more than one argument. However, the
+ OpenEmbedded build system only uses the first argument when
+ creating an image or Software Development Kit (SDK).
+
+If you would like your image to start off with a basic package database
+containing the packages in your current build as well as to have the
+relevant tools available on the target for runtime package management,
+you can include "package-management" in the
+:term:`IMAGE_FEATURES`
+variable. Including "package-management" in this configuration variable
+ensures that when the image is assembled for your target, the image
+includes the currently-known package databases as well as the
+target-specific tools required for runtime package management to be
+performed on the target. However, this is not strictly necessary. You
+could start your image off without any databases but only include the
+required on-target package tool(s). As an example, you could include
+"opkg" in your
+:term:`IMAGE_INSTALL` variable
+if you are using the IPK package format. You can then initialize your
+target's package database(s) later once your image is up and running.
+
+Whenever you perform any sort of build step that can potentially
+generate a package or modify existing package, it is always a good idea
+to re-generate the package index after the build by using the following
+command::
+
+ $ bitbake package-index
+
+It might be tempting to build the
+package and the package index at the same time with a command such as
+the following::
+
+ $ bitbake some-package package-index
+
+Do not do this as
+BitBake does not schedule the package index for after the completion of
+the package you are building. Consequently, you cannot be sure of the
+package index including information for the package you just built.
+Thus, be sure to run the package update step separately after building
+any packages.
+
+You can use the
+:term:`PACKAGE_FEED_ARCHS`,
+:term:`PACKAGE_FEED_BASE_PATHS`,
+and
+:term:`PACKAGE_FEED_URIS`
+variables to pre-configure target images to use a package feed. If you
+do not define these variables, then manual steps as described in the
+subsequent sections are necessary to configure the target. You should
+set these variables before building the image in order to produce a
+correctly configured image.
+
+.. note::
+
+ Your image will need enough free storage space to run package upgrades,
+ especially if many of them need to be downloaded at the same time.
+ You should make sure images are created with enough free space
+ by setting the :term:`IMAGE_ROOTFS_EXTRA_SPACE` variable.
+
+When your build is complete, your packages reside in the
+``${TMPDIR}/deploy/packageformat`` directory. For example, if
+``${``\ :term:`TMPDIR`\ ``}`` is
+``tmp`` and your selected package type is RPM, then your RPM packages
+are available in ``tmp/deploy/rpm``.
+
+Host or Server Machine Setup
+----------------------------
+
+Although other protocols are possible, a server using HTTP typically
+serves packages. If you want to use HTTP, then set up and configure a
+web server such as Apache 2, lighttpd, or Python web server on the
+machine serving the packages.
+
+To keep things simple, this section describes how to set up a
+Python web server to share package feeds from the developer's
+machine. Although this server might not be the best for a production
+environment, the setup is simple and straight forward. Should you want
+to use a different server more suited for production (e.g. Apache 2,
+Lighttpd, or Nginx), take the appropriate steps to do so.
+
+From within the :term:`Build Directory` where you have built an image based on
+your packaging choice (i.e. the :term:`PACKAGE_CLASSES` setting), simply start
+the server. The following example assumes a :term:`Build Directory` of ``poky/build``
+and a :term:`PACKAGE_CLASSES` setting of ":ref:`ref-classes-package_rpm`"::
+
+ $ cd poky/build/tmp/deploy/rpm
+ $ python3 -m http.server
+
+Target Setup
+------------
+
+Setting up the target differs depending on the package management
+system. This section provides information for RPM, IPK, and DEB.
+
+Using RPM
+~~~~~~~~~
+
+The :wikipedia:`Dandified Packaging <DNF_(software)>` (DNF) performs
+runtime package management of RPM packages. In order to use DNF for
+runtime package management, you must perform an initial setup on the
+target machine for cases where the ``PACKAGE_FEED_*`` variables were not
+set as part of the image that is running on the target. This means if
+you built your image and did not use these variables as part of the
+build and your image is now running on the target, you need to perform
+the steps in this section if you want to use runtime package management.
+
+.. note::
+
+ For information on the ``PACKAGE_FEED_*`` variables, see
+ :term:`PACKAGE_FEED_ARCHS`, :term:`PACKAGE_FEED_BASE_PATHS`, and
+ :term:`PACKAGE_FEED_URIS` in the Yocto Project Reference Manual variables
+ glossary.
+
+On the target, you must inform DNF that package databases are available.
+You do this by creating a file named
+``/etc/yum.repos.d/oe-packages.repo`` and defining the ``oe-packages``.
+
+As an example, assume the target is able to use the following package
+databases: ``all``, ``i586``, and ``qemux86`` from a server named
+``my.server``. The specifics for setting up the web server are up to
+you. The critical requirement is that the URIs in the target repository
+configuration point to the correct remote location for the feeds.
+
+.. note::
+
+ For development purposes, you can point the web server to the build
+ system's ``deploy`` directory. However, for production use, it is better to
+ copy the package directories to a location outside of the build area and use
+ that location. Doing so avoids situations where the build system
+ overwrites or changes the ``deploy`` directory.
+
+When telling DNF where to look for the package databases, you must
+declare individual locations per architecture or a single location used
+for all architectures. You cannot do both:
+
+- *Create an Explicit List of Architectures:* Define individual base
+ URLs to identify where each package database is located:
+
+ .. code-block:: none
+
+ [oe-packages]
+ baseurl=http://my.server/rpm/i586 http://my.server/rpm/qemux86 http://my.server/rpm/all
+
+ This example
+ informs DNF about individual package databases for all three
+ architectures.
+
+- *Create a Single (Full) Package Index:* Define a single base URL that
+ identifies where a full package database is located::
+
+ [oe-packages]
+ baseurl=http://my.server/rpm
+
+ This example informs DNF about a single
+ package database that contains all the package index information for
+ all supported architectures.
+
+Once you have informed DNF where to find the package databases, you need
+to fetch them:
+
+.. code-block:: none
+
+ # dnf makecache
+
+DNF is now able to find, install, and
+upgrade packages from the specified repository or repositories.
+
+.. note::
+
+ See the `DNF documentation <https://dnf.readthedocs.io/en/latest/>`__ for
+ additional information.
+
+Using IPK
+~~~~~~~~~
+
+The ``opkg`` application performs runtime package management of IPK
+packages. You must perform an initial setup for ``opkg`` on the target
+machine if the
+:term:`PACKAGE_FEED_ARCHS`,
+:term:`PACKAGE_FEED_BASE_PATHS`,
+and
+:term:`PACKAGE_FEED_URIS`
+variables have not been set or the target image was built before the
+variables were set.
+
+The ``opkg`` application uses configuration files to find available
+package databases. Thus, you need to create a configuration file inside
+the ``/etc/opkg/`` directory, which informs ``opkg`` of any repository
+you want to use.
+
+As an example, suppose you are serving packages from a ``ipk/``
+directory containing the ``i586``, ``all``, and ``qemux86`` databases
+through an HTTP server named ``my.server``. On the target, create a
+configuration file (e.g. ``my_repo.conf``) inside the ``/etc/opkg/``
+directory containing the following:
+
+.. code-block:: none
+
+ src/gz all http://my.server/ipk/all
+ src/gz i586 http://my.server/ipk/i586
+ src/gz qemux86 http://my.server/ipk/qemux86
+
+Next, instruct ``opkg`` to fetch the
+repository information:
+
+.. code-block:: none
+
+ # opkg update
+
+The ``opkg`` application is now able to find, install, and upgrade packages
+from the specified repository.
+
+Using DEB
+~~~~~~~~~
+
+The ``apt`` application performs runtime package management of DEB
+packages. This application uses a source list file to find available
+package databases. You must perform an initial setup for ``apt`` on the
+target machine if the
+:term:`PACKAGE_FEED_ARCHS`,
+:term:`PACKAGE_FEED_BASE_PATHS`,
+and
+:term:`PACKAGE_FEED_URIS`
+variables have not been set or the target image was built before the
+variables were set.
+
+To inform ``apt`` of the repository you want to use, you might create a
+list file (e.g. ``my_repo.list``) inside the
+``/etc/apt/sources.list.d/`` directory. As an example, suppose you are
+serving packages from a ``deb/`` directory containing the ``i586``,
+``all``, and ``qemux86`` databases through an HTTP server named
+``my.server``. The list file should contain:
+
+.. code-block:: none
+
+ deb http://my.server/deb/all ./
+ deb http://my.server/deb/i586 ./
+ deb http://my.server/deb/qemux86 ./
+
+Next, instruct the ``apt`` application
+to fetch the repository information:
+
+.. code-block:: none
+
+ $ sudo apt update
+
+After this step,
+``apt`` is able to find, install, and upgrade packages from the
+specified repository.
+
+Generating and Using Signed Packages
+====================================
+
+In order to add security to RPM packages used during a build, you can
+take steps to securely sign them. Once a signature is verified, the
+OpenEmbedded build system can use the package in the build. If security
+fails for a signed package, the build system stops the build.
+
+This section describes how to sign RPM packages during a build and how
+to use signed package feeds (repositories) when doing a build.
+
+Signing RPM Packages
+--------------------
+
+To enable signing RPM packages, you must set up the following
+configurations in either your ``local.config`` or ``distro.config``
+file::
+
+ # Inherit sign_rpm.bbclass to enable signing functionality
+ INHERIT += " sign_rpm"
+ # Define the GPG key that will be used for signing.
+ RPM_GPG_NAME = "key_name"
+ # Provide passphrase for the key
+ RPM_GPG_PASSPHRASE = "passphrase"
+
+.. note::
+
+ Be sure to supply appropriate values for both `key_name` and
+ `passphrase`.
+
+Aside from the ``RPM_GPG_NAME`` and ``RPM_GPG_PASSPHRASE`` variables in
+the previous example, two optional variables related to signing are available:
+
+- *GPG_BIN:* Specifies a ``gpg`` binary/wrapper that is executed
+ when the package is signed.
+
+- *GPG_PATH:* Specifies the ``gpg`` home directory used when the
+ package is signed.
+
+Processing Package Feeds
+------------------------
+
+In addition to being able to sign RPM packages, you can also enable
+signed package feeds for IPK and RPM packages.
+
+The steps you need to take to enable signed package feed use are similar
+to the steps used to sign RPM packages. You must define the following in
+your ``local.config`` or ``distro.config`` file::
+
+ INHERIT += "sign_package_feed"
+ PACKAGE_FEED_GPG_NAME = "key_name"
+ PACKAGE_FEED_GPG_PASSPHRASE_FILE = "path_to_file_containing_passphrase"
+
+For signed package feeds, the passphrase must be specified in a separate file,
+which is pointed to by the ``PACKAGE_FEED_GPG_PASSPHRASE_FILE``
+variable. Regarding security, keeping a plain text passphrase out of the
+configuration is more secure.
+
+Aside from the ``PACKAGE_FEED_GPG_NAME`` and
+``PACKAGE_FEED_GPG_PASSPHRASE_FILE`` variables, three optional variables
+related to signed package feeds are available:
+
+- *GPG_BIN* Specifies a ``gpg`` binary/wrapper that is executed
+ when the package is signed.
+
+- *GPG_PATH:* Specifies the ``gpg`` home directory used when the
+ package is signed.
+
+- *PACKAGE_FEED_GPG_SIGNATURE_TYPE:* Specifies the type of ``gpg``
+ signature. This variable applies only to RPM and IPK package feeds.
+ Allowable values for the ``PACKAGE_FEED_GPG_SIGNATURE_TYPE`` are
+ "ASC", which is the default and specifies ascii armored, and "BIN",
+ which specifies binary.
+
+Testing Packages With ptest
+===========================
+
+A Package Test (ptest) runs tests against packages built by the
+OpenEmbedded build system on the target machine. A ptest contains at
+least two items: the actual test, and a shell script (``run-ptest``)
+that starts the test. The shell script that starts the test must not
+contain the actual test --- the script only starts the test. On the other
+hand, the test can be anything from a simple shell script that runs a
+binary and checks the output to an elaborate system of test binaries and
+data files.
+
+The test generates output in the format used by Automake::
+
+ result: testname
+
+where the result can be ``PASS``, ``FAIL``, or ``SKIP``, and
+the testname can be any identifying string.
+
+For a list of Yocto Project recipes that are already enabled with ptest,
+see the :yocto_wiki:`Ptest </Ptest>` wiki page.
+
+.. note::
+
+ A recipe is "ptest-enabled" if it inherits the :ref:`ref-classes-ptest`
+ class.
+
+Adding ptest to Your Build
+--------------------------
+
+To add package testing to your build, add the :term:`DISTRO_FEATURES` and
+:term:`EXTRA_IMAGE_FEATURES` variables to your ``local.conf`` file, which
+is found in the :term:`Build Directory`::
+
+ DISTRO_FEATURES:append = " ptest"
+ EXTRA_IMAGE_FEATURES += "ptest-pkgs"
+
+Once your build is complete, the ptest files are installed into the
+``/usr/lib/package/ptest`` directory within the image, where ``package``
+is the name of the package.
+
+Running ptest
+-------------
+
+The ``ptest-runner`` package installs a shell script that loops through
+all installed ptest test suites and runs them in sequence. Consequently,
+you might want to add this package to your image.
+
+Getting Your Package Ready
+--------------------------
+
+In order to enable a recipe to run installed ptests on target hardware,
+you need to prepare the recipes that build the packages you want to
+test. Here is what you have to do for each recipe:
+
+- *Be sure the recipe inherits the* :ref:`ref-classes-ptest` *class:*
+ Include the following line in each recipe::
+
+ inherit ptest
+
+- *Create run-ptest:* This script starts your test. Locate the
+ script where you will refer to it using
+ :term:`SRC_URI`. Here is an
+ example that starts a test for ``dbus``::
+
+ #!/bin/sh
+ cd test
+ make -k runtest-TESTS
+
+- *Ensure dependencies are met:* If the test adds build or runtime
+ dependencies that normally do not exist for the package (such as
+ requiring "make" to run the test suite), use the
+ :term:`DEPENDS` and
+ :term:`RDEPENDS` variables in
+ your recipe in order for the package to meet the dependencies. Here
+ is an example where the package has a runtime dependency on "make"::
+
+ RDEPENDS:${PN}-ptest += "make"
+
+- *Add a function to build the test suite:* Not many packages support
+ cross-compilation of their test suites. Consequently, you usually
+ need to add a cross-compilation function to the package.
+
+ Many packages based on Automake compile and run the test suite by
+ using a single command such as ``make check``. However, the host
+ ``make check`` builds and runs on the same computer, while
+ cross-compiling requires that the package is built on the host but
+ executed for the target architecture (though often, as in the case
+ for ptest, the execution occurs on the host). The built version of
+ Automake that ships with the Yocto Project includes a patch that
+ separates building and execution. Consequently, packages that use the
+ unaltered, patched version of ``make check`` automatically
+ cross-compiles.
+
+ Regardless, you still must add a ``do_compile_ptest`` function to
+ build the test suite. Add a function similar to the following to your
+ recipe::
+
+ do_compile_ptest() {
+ oe_runmake buildtest-TESTS
+ }
+
+- *Ensure special configurations are set:* If the package requires
+ special configurations prior to compiling the test code, you must
+ insert a ``do_configure_ptest`` function into the recipe.
+
+- *Install the test suite:* The :ref:`ref-classes-ptest` class
+ automatically copies the file ``run-ptest`` to the target and then runs make
+ ``install-ptest`` to run the tests. If this is not enough, you need
+ to create a ``do_install_ptest`` function and make sure it gets
+ called after the "make install-ptest" completes.
+
+Creating Node Package Manager (NPM) Packages
+============================================
+
+:wikipedia:`NPM <Npm_(software)>` is a package manager for the JavaScript
+programming language. The Yocto Project supports the NPM
+:ref:`fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`.
+You can use this fetcher in combination with
+:doc:`devtool </ref-manual/devtool-reference>` to create recipes that produce
+NPM packages.
+
+There are two workflows that allow you to create NPM packages using
+``devtool``: the NPM registry modules method and the NPM project code
+method.
+
+.. note::
+
+ While it is possible to create NPM recipes manually, using
+ ``devtool`` is far simpler.
+
+Additionally, some requirements and caveats exist.
+
+Requirements and Caveats
+------------------------
+
+You need to be aware of the following before using ``devtool`` to create
+NPM packages:
+
+- Of the two methods that you can use ``devtool`` to create NPM
+ packages, the registry approach is slightly simpler. However, you
+ might consider the project approach because you do not have to
+ publish your module in the `NPM registry <https://docs.npmjs.com/misc/registry>`__,
+ which is NPM's public registry.
+
+- Be familiar with
+ :doc:`devtool </ref-manual/devtool-reference>`.
+
+- The NPM host tools need the native ``nodejs-npm`` package, which is
+ part of the OpenEmbedded environment. You need to get the package by
+ cloning the :oe_git:`meta-openembedded </meta-openembedded>`
+ repository. Be sure to add the path to your local copy
+ to your ``bblayers.conf`` file.
+
+- ``devtool`` cannot detect native libraries in module dependencies.
+ Consequently, you must manually add packages to your recipe.
+
+- While deploying NPM packages, ``devtool`` cannot determine which
+ dependent packages are missing on the target (e.g. the node runtime
+ ``nodejs``). Consequently, you need to find out what files are
+ missing and be sure they are on the target.
+
+- Although you might not need NPM to run your node package, it is
+ useful to have NPM on your target. The NPM package name is
+ ``nodejs-npm``.
+
+Using the Registry Modules Method
+---------------------------------
+
+This section presents an example that uses the ``cute-files`` module,
+which is a file browser web application.
+
+.. note::
+
+ You must know the ``cute-files`` module version.
+
+The first thing you need to do is use ``devtool`` and the NPM fetcher to
+create the recipe::
+
+ $ devtool add "npm://registry.npmjs.org;package=cute-files;version=1.0.2"
+
+The
+``devtool add`` command runs ``recipetool create`` and uses the same
+fetch URI to download each dependency and capture license details where
+possible. The result is a generated recipe.
+
+After running for quite a long time, in particular building the
+``nodejs-native`` package, the command should end as follows::
+
+ INFO: Recipe /home/.../build/workspace/recipes/cute-files/cute-files_1.0.2.bb has been automatically created; further editing may be required to make it fully functional
+
+The recipe file is fairly simple and contains every license that
+``recipetool`` finds and includes the licenses in the recipe's
+:term:`LIC_FILES_CHKSUM`
+variables. You need to examine the variables and look for those with
+"unknown" in the :term:`LICENSE`
+field. You need to track down the license information for "unknown"
+modules and manually add the information to the recipe.
+
+``recipetool`` creates a "shrinkwrap" file for your recipe. Shrinkwrap
+files capture the version of all dependent modules. Many packages do not
+provide shrinkwrap files but ``recipetool`` will create a shrinkwrap file as it
+runs.
+
+.. note::
+
+ A package is created for each sub-module. This policy is the only
+ practical way to have the licenses for all of the dependencies
+ represented in the license manifest of the image.
+
+The ``devtool edit-recipe`` command lets you take a look at the recipe::
+
+ $ devtool edit-recipe cute-files
+ # Recipe created by recipetool
+ # This is the basis of a recipe and may need further editing in order to be fully functional.
+ # (Feel free to remove these comments when editing.)
+
+ SUMMARY = "Turn any folder on your computer into a cute file browser, available on the local network."
+ # WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is
+ # your responsibility to verify that the values are complete and correct.
+ #
+ # NOTE: multiple licenses have been detected; they have been separated with &
+ # in the LICENSE value for now since it is a reasonable assumption that all
+ # of the licenses apply. If instead there is a choice between the multiple
+ # licenses then you should change the value to separate the licenses with |
+ # instead of &. If there is any doubt, check the accompanying documentation
+ # to determine which situation is applicable.
+
+ SUMMARY = "Turn any folder on your computer into a cute file browser, available on the local network."
+ LICENSE = "BSD-3-Clause & ISC & MIT"
+ LIC_FILES_CHKSUM = "file://LICENSE;md5=71d98c0a1db42956787b1909c74a86ca \
+ file://node_modules/accepts/LICENSE;md5=bf1f9ad1e2e1d507aef4883fff7103de \
+ file://node_modules/array-flatten/LICENSE;md5=44088ba57cb871a58add36ce51b8de08 \
+ ...
+ file://node_modules/cookie-signature/Readme.md;md5=57ae8b42de3dd0c1f22d5f4cf191e15a"
+
+ SRC_URI = " \
+ npm://registry.npmjs.org/;package=cute-files;version=${PV} \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+ S = "${WORKDIR}/npm"
+
+ inherit npm
+
+ LICENSE:${PN} = "MIT"
+ LICENSE:${PN}-accepts = "MIT"
+ LICENSE:${PN}-array-flatten = "MIT"
+ ...
+ LICENSE:${PN}-vary = "MIT"
+
+Three key points in the previous example are:
+
+- :term:`SRC_URI` uses the NPM
+ scheme so that the NPM fetcher is used.
+
+- ``recipetool`` collects all the license information. If a
+ sub-module's license is unavailable, the sub-module's name appears in
+ the comments.
+
+- The ``inherit npm`` statement causes the :ref:`ref-classes-npm` class to
+ package up all the modules.
+
+You can run the following command to build the ``cute-files`` package::
+
+ $ devtool build cute-files
+
+Remember that ``nodejs`` must be installed on
+the target before your package.
+
+Assuming 192.168.7.2 for the target's IP address, use the following
+command to deploy your package::
+
+ $ devtool deploy-target -s cute-files root@192.168.7.2
+
+Once the package is installed on the target, you can
+test the application to show the contents of any directory::
+
+ $ cd /usr/lib/node_modules/cute-files
+ $ cute-files
+
+On a browser,
+go to ``http://192.168.7.2:3000`` and you see the following:
+
+.. image:: figures/cute-files-npm-example.png
+ :width: 100%
+
+You can find the recipe in ``workspace/recipes/cute-files``. You can use
+the recipe in any layer you choose.
+
+Using the NPM Projects Code Method
+----------------------------------
+
+Although it is useful to package modules already in the NPM registry,
+adding ``node.js`` projects under development is a more common developer
+use case.
+
+This section covers the NPM projects code method, which is very similar
+to the "registry" approach described in the previous section. In the NPM
+projects method, you provide ``devtool`` with an URL that points to the
+source files.
+
+Replicating the same example, (i.e. ``cute-files``) use the following
+command::
+
+ $ devtool add https://github.com/martinaglv/cute-files.git
+
+The recipe this command generates is very similar to the recipe created in
+the previous section. However, the :term:`SRC_URI` looks like the following::
+
+ SRC_URI = " \
+ git://github.com/martinaglv/cute-files.git;protocol=https;branch=master \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+In this example,
+the main module is taken from the Git repository and dependencies are
+taken from the NPM registry. Other than those differences, the recipe is
+basically the same between the two methods. You can build and deploy the
+package exactly as described in the previous section that uses the
+registry modules method.
+
+Adding custom metadata to packages
+==================================
+
+The variable
+:term:`PACKAGE_ADD_METADATA`
+can be used to add additional metadata to packages. This is reflected in
+the package control/spec file. To take the ipk format for example, the
+CONTROL file stored inside would contain the additional metadata as
+additional lines.
+
+The variable can be used in multiple ways, including using suffixes to
+set it for a specific package type and/or package. Note that the order
+of precedence is the same as this list:
+
+- ``PACKAGE_ADD_METADATA_<PKGTYPE>:<PN>``
+
+- ``PACKAGE_ADD_METADATA_<PKGTYPE>``
+
+- ``PACKAGE_ADD_METADATA:<PN>``
+
+- :term:`PACKAGE_ADD_METADATA`
+
+`<PKGTYPE>` is a parameter and expected to be a distinct name of specific
+package type:
+
+- IPK for .ipk packages
+
+- DEB for .deb packages
+
+- RPM for .rpm packages
+
+`<PN>` is a parameter and expected to be a package name.
+
+The variable can contain multiple [one-line] metadata fields separated
+by the literal sequence '\\n'. The separator can be redefined using the
+variable flag ``separator``.
+
+Here is an example that adds two custom fields for ipk
+packages::
+
+ PACKAGE_ADD_METADATA_IPK = "Vendor: CustomIpk\nGroup:Applications/Spreadsheets"
+
diff --git a/documentation/dev-manual/prebuilt-libraries.rst b/documentation/dev-manual/prebuilt-libraries.rst
new file mode 100644
index 0000000000..a05f39ca1e
--- /dev/null
+++ b/documentation/dev-manual/prebuilt-libraries.rst
@@ -0,0 +1,209 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Working with Pre-Built Libraries
+********************************
+
+Introduction
+============
+
+Some library vendors do not release source code for their software but do
+release pre-built binaries. When shared libraries are built, they should
+be versioned (see `this article
+<https://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`__
+for some background), but sometimes this is not done.
+
+To summarize, a versioned library must meet two conditions:
+
+#. The filename must have the version appended, for example: ``libfoo.so.1.2.3``.
+#. The library must have the ELF tag ``SONAME`` set to the major version
+ of the library, for example: ``libfoo.so.1``. You can check this by
+ running ``readelf -d filename | grep SONAME``.
+
+This section shows how to deal with both versioned and unversioned
+pre-built libraries.
+
+Versioned Libraries
+===================
+
+In this example we work with pre-built libraries for the FT4222H USB I/O chip.
+Libraries are built for several target architecture variants and packaged in
+an archive as follows::
+
+ ├── build-arm-hisiv300
+ │   └── libft4222.so.1.4.4.44
+ ├── build-arm-v5-sf
+ │   └── libft4222.so.1.4.4.44
+ ├── build-arm-v6-hf
+ │   └── libft4222.so.1.4.4.44
+ ├── build-arm-v7-hf
+ │   └── libft4222.so.1.4.4.44
+ ├── build-arm-v8
+ │   └── libft4222.so.1.4.4.44
+ ├── build-i386
+ │   └── libft4222.so.1.4.4.44
+ ├── build-i486
+ │   └── libft4222.so.1.4.4.44
+ ├── build-mips-eglibc-hf
+ │   └── libft4222.so.1.4.4.44
+ ├── build-pentium
+ │   └── libft4222.so.1.4.4.44
+ ├── build-x86_64
+ │   └── libft4222.so.1.4.4.44
+ ├── examples
+ │   ├── get-version.c
+ │   ├── i2cm.c
+ │   ├── spim.c
+ │   └── spis.c
+ ├── ftd2xx.h
+ ├── install4222.sh
+ ├── libft4222.h
+ ├── ReadMe.txt
+ └── WinTypes.h
+
+To write a recipe to use such a library in your system:
+
+- The vendor will probably have a proprietary licence, so set
+ :term:`LICENSE_FLAGS` in your recipe.
+- The vendor provides a tarball containing libraries so set :term:`SRC_URI`
+ appropriately.
+- Set :term:`COMPATIBLE_HOST` so that the recipe cannot be used with an
+ unsupported architecture. In the following example, we only support the 32
+ and 64 bit variants of the ``x86`` architecture.
+- As the vendor provides versioned libraries, we can use ``oe_soinstall``
+ from :ref:`ref-classes-utils` to install the shared library and create
+ symbolic links. If the vendor does not do this, we need to follow the
+ non-versioned library guidelines in the next section.
+- As the vendor likely used :term:`LDFLAGS` different from those in your Yocto
+ Project build, disable the corresponding checks by adding ``ldflags``
+ to :term:`INSANE_SKIP`.
+- The vendor will typically ship release builds without debugging symbols.
+ Avoid errors by preventing the packaging task from stripping out the symbols
+ and adding them to a separate debug package. This is done by setting the
+ ``INHIBIT_`` flags shown below.
+
+The complete recipe would look like this::
+
+ SUMMARY = "FTDI FT4222H Library"
+ SECTION = "libs"
+ LICENSE_FLAGS = "ftdi"
+ LICENSE = "CLOSED"
+
+ COMPATIBLE_HOST = "(i.86|x86_64).*-linux"
+
+ # Sources available in a .tgz file in .zip archive
+ # at https://ftdichip.com/wp-content/uploads/2021/01/libft4222-linux-1.4.4.44.zip
+ # Found on https://ftdichip.com/software-examples/ft4222h-software-examples/
+ # Since dealing with this particular type of archive is out of topic here,
+ # we use a local link.
+ SRC_URI = "file://libft4222-linux-${PV}.tgz"
+
+ S = "${WORKDIR}"
+
+ ARCH_DIR:x86-64 = "build-x86_64"
+ ARCH_DIR:i586 = "build-i386"
+ ARCH_DIR:i686 = "build-i386"
+
+ INSANE_SKIP:${PN} = "ldflags"
+ INHIBIT_PACKAGE_STRIP = "1"
+ INHIBIT_SYSROOT_STRIP = "1"
+ INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
+
+ do_install () {
+ install -m 0755 -d ${D}${libdir}
+ oe_soinstall ${S}/${ARCH_DIR}/libft4222.so.${PV} ${D}${libdir}
+ install -d ${D}${includedir}
+ install -m 0755 ${S}/*.h ${D}${includedir}
+ }
+
+If the precompiled binaries are not statically linked and have dependencies on
+other libraries, then by adding those libraries to :term:`DEPENDS`, the linking
+can be examined and the appropriate :term:`RDEPENDS` automatically added.
+
+Non-Versioned Libraries
+=======================
+
+Some Background
+---------------
+
+Libraries in Linux systems are generally versioned so that it is possible
+to have multiple versions of the same library installed, which eases upgrades
+and support for older software. For example, suppose that in a versioned
+library, an actual library is called ``libfoo.so.1.2``, a symbolic link named
+``libfoo.so.1`` points to ``libfoo.so.1.2``, and a symbolic link named
+``libfoo.so`` points to ``libfoo.so.1.2``. Given these conditions, when you
+link a binary against a library, you typically provide the unversioned file
+name (i.e. ``-lfoo`` to the linker). However, the linker follows the symbolic
+link and actually links against the versioned filename. The unversioned symbolic
+link is only used at development time. Consequently, the library is packaged
+along with the headers in the development package ``${PN}-dev`` along with the
+actual library and versioned symbolic links in ``${PN}``. Because versioned
+libraries are far more common than unversioned libraries, the default packaging
+rules assume versioned libraries.
+
+Yocto Library Packaging Overview
+--------------------------------
+
+It follows that packaging an unversioned library requires a bit of work in the
+recipe. By default, ``libfoo.so`` gets packaged into ``${PN}-dev``, which
+triggers a QA warning that a non-symlink library is in a ``-dev`` package,
+and binaries in the same recipe link to the library in ``${PN}-dev``,
+which triggers more QA warnings. To solve this problem, you need to package the
+unversioned library into ``${PN}`` where it belongs. The abridged
+default :term:`FILES` variables in ``bitbake.conf`` are::
+
+ SOLIBS = ".so.*"
+ SOLIBSDEV = ".so"
+ FILES:${PN} = "... ${libdir}/lib*${SOLIBS} ..."
+ FILES_SOLIBSDEV ?= "... ${libdir}/lib*${SOLIBSDEV} ..."
+ FILES:${PN}-dev = "... ${FILES_SOLIBSDEV} ..."
+
+:term:`SOLIBS` defines a pattern that matches real shared object libraries.
+:term:`SOLIBSDEV` matches the development form (unversioned symlink). These two
+variables are then used in ``FILES:${PN}`` and ``FILES:${PN}-dev``, which puts
+the real libraries into ``${PN}`` and the unversioned symbolic link into ``${PN}-dev``.
+To package unversioned libraries, you need to modify the variables in the recipe
+as follows::
+
+ SOLIBS = ".so"
+ FILES_SOLIBSDEV = ""
+
+The modifications cause the ``.so`` file to be the real library
+and unset :term:`FILES_SOLIBSDEV` so that no libraries get packaged into
+``${PN}-dev``. The changes are required because unless :term:`PACKAGES` is changed,
+``${PN}-dev`` collects files before `${PN}`. ``${PN}-dev`` must not collect any of
+the files you want in ``${PN}``.
+
+Finally, loadable modules, essentially unversioned libraries that are linked
+at runtime using ``dlopen()`` instead of at build time, should generally be
+installed in a private directory. However, if they are installed in ``${libdir}``,
+then the modules can be treated as unversioned libraries.
+
+Example
+-------
+
+The example below installs an unversioned x86-64 pre-built library named
+``libfoo.so``. The :term:`COMPATIBLE_HOST` variable limits recipes to the
+x86-64 architecture while the :term:`INSANE_SKIP`, :term:`INHIBIT_PACKAGE_STRIP`
+and :term:`INHIBIT_SYSROOT_STRIP` variables are all set as in the above
+versioned library example. The "magic" is setting the :term:`SOLIBS` and
+:term:`FILES_SOLIBSDEV` variables as explained above::
+
+ SUMMARY = "libfoo sample recipe"
+ SECTION = "libs"
+ LICENSE = "CLOSED"
+
+ SRC_URI = "file://libfoo.so"
+
+ COMPATIBLE_HOST = "x86_64.*-linux"
+
+ INSANE_SKIP:${PN} = "ldflags"
+ INHIBIT_PACKAGE_STRIP = "1"
+ INHIBIT_SYSROOT_STRIP = "1"
+ SOLIBS = ".so"
+ FILES_SOLIBSDEV = ""
+
+ do_install () {
+ install -d ${D}${libdir}
+ install -m 0755 ${WORKDIR}/libfoo.so ${D}${libdir}
+ }
+
diff --git a/documentation/dev-manual/python-development-shell.rst b/documentation/dev-manual/python-development-shell.rst
new file mode 100644
index 0000000000..81a5c43472
--- /dev/null
+++ b/documentation/dev-manual/python-development-shell.rst
@@ -0,0 +1,50 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using a Python Development Shell
+********************************
+
+Similar to working within a development shell as described in the
+previous section, you can also spawn and work within an interactive
+Python development shell. When debugging certain commands or even when
+just editing packages, ``pydevshell`` can be a useful tool. When you
+invoke the ``pydevshell`` task, all tasks up to and including
+:ref:`ref-tasks-patch` are run for the
+specified target. Then a new terminal is opened. Additionally, key
+Python objects and code are available in the same way they are to
+BitBake tasks, in particular, the data store 'd'. So, commands such as
+the following are useful when exploring the data store and running
+functions::
+
+ pydevshell> d.getVar("STAGING_DIR")
+ '/media/build1/poky/build/tmp/sysroots'
+ pydevshell> d.getVar("STAGING_DIR", False)
+ '${TMPDIR}/sysroots'
+ pydevshell> d.setVar("FOO", "bar")
+ pydevshell> d.getVar("FOO")
+ 'bar'
+ pydevshell> d.delVar("FOO")
+ pydevshell> d.getVar("FOO")
+ pydevshell> bb.build.exec_func("do_unpack", d)
+ pydevshell>
+
+See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:functions you can call from within python`"
+section in the BitBake User Manual for details about available functions.
+
+The commands execute just as if the OpenEmbedded build
+system were executing them. Consequently, working this way can be
+helpful when debugging a build or preparing software to be used with the
+OpenEmbedded build system.
+
+Here is an example that uses ``pydevshell`` on a target named
+``matchbox-desktop``::
+
+ $ bitbake matchbox-desktop -c pydevshell
+
+This command spawns a terminal and places you in an interactive Python
+interpreter within the OpenEmbedded build environment. The
+:term:`OE_TERMINAL` variable
+controls what type of shell is opened.
+
+When you are finished using ``pydevshell``, you can exit the shell
+either by using Ctrl+d or closing the terminal window.
+
diff --git a/documentation/dev-manual/qemu.rst b/documentation/dev-manual/qemu.rst
index 88a63c1808..19f3e40d63 100644
--- a/documentation/dev-manual/qemu.rst
+++ b/documentation/dev-manual/qemu.rst
@@ -44,13 +44,13 @@ To use QEMU, you need to have QEMU installed and initialized as well as
have the proper artifacts (i.e. image files and root filesystems)
available. Follow these general steps to run QEMU:
-1. *Install QEMU:* QEMU is made available with the Yocto Project a
+#. *Install QEMU:* QEMU is made available with the Yocto Project a
number of ways. One method is to install a Software Development Kit
(SDK). See ":ref:`sdk-manual/intro:the qemu emulator`" section in the
Yocto Project Application Development and the Extensible Software
Development Kit (eSDK) manual for information on how to install QEMU.
-2. *Setting Up the Environment:* How you set up the QEMU environment
+#. *Setting Up the Environment:* How you set up the QEMU environment
depends on how you installed QEMU:
- If you cloned the ``poky`` repository or you downloaded and
@@ -66,7 +66,7 @@ available. Follow these general steps to run QEMU:
. poky_sdk/environment-setup-core2-64-poky-linux
-3. *Ensure the Artifacts are in Place:* You need to be sure you have a
+#. *Ensure the Artifacts are in Place:* You need to be sure you have a
pre-built kernel that will boot in QEMU. You also need the target
root filesystem for your target machine's architecture:
@@ -84,7 +84,7 @@ available. Follow these general steps to run QEMU:
Extensible Software Development Kit (eSDK) manual for information on
how to extract a root filesystem.
-4. *Run QEMU:* The basic ``runqemu`` command syntax is as follows::
+#. *Run QEMU:* The basic ``runqemu`` command syntax is as follows::
$ runqemu [option ] [...]
@@ -99,12 +99,13 @@ available. Follow these general steps to run QEMU:
Here are some additional examples to help illustrate further QEMU:
- This example starts QEMU with MACHINE set to "qemux86-64".
- Assuming a standard
- :term:`Build Directory`, ``runqemu``
+ Assuming a standard :term:`Build Directory`, ``runqemu``
automatically finds the ``bzImage-qemux86-64.bin`` image file and
the ``core-image-minimal-qemux86-64-20200218002850.rootfs.ext4``
(assuming the current build created a ``core-image-minimal``
- image).
+ image)::
+
+ $ runqemu qemux86-64
.. note::
@@ -112,38 +113,31 @@ available. Follow these general steps to run QEMU:
and uses the most recently built image according to the
timestamp.
- ::
-
- $ runqemu qemux86-64
-
- This example produces the exact same results as the previous
example. This command, however, specifically provides the image
- and root filesystem type.
- ::
+ and root filesystem type::
$ runqemu qemux86-64 core-image-minimal ext4
- - This example specifies to boot an initial RAM disk image and to
- enable audio in QEMU. For this case, ``runqemu`` set the internal
- variable ``FSTYPE`` to "cpio.gz". Also, for audio to be enabled,
- an appropriate driver must be installed (see the previous
- description for the ``audio`` option for more information).
- ::
+ - This example specifies to boot an :term:`Initramfs` image and to
+ enable audio in QEMU. For this case, ``runqemu`` sets the internal
+ variable ``FSTYPE`` to ``cpio.gz``. Also, for audio to be enabled,
+ an appropriate driver must be installed (see the ``audio`` option
+ in :ref:`dev-manual/qemu:\`\`runqemu\`\` command-line options`
+ for more information)::
$ runqemu qemux86-64 ramfs audio
- This example does not provide enough information for QEMU to
launch. While the command does provide a root filesystem type, it
- must also minimally provide a `MACHINE`, `KERNEL`, or `VM` option.
- ::
+ must also minimally provide a `MACHINE`, `KERNEL`, or `VM` option::
$ runqemu ext4
- This example specifies to boot a virtual machine image
(``.wic.vmdk`` file). From the ``.wic.vmdk``, ``runqemu``
determines the QEMU architecture (`MACHINE`) to be "qemux86-64" and
- the root filesystem type to be "vmdk".
- ::
+ the root filesystem type to be "vmdk"::
$ runqemu /home/scott-lenovo/vm/core-image-minimal-qemux86-64.wic.vmdk
@@ -190,7 +184,7 @@ the system does not need root privileges to run. It uses a user space
NFS server to avoid that. Follow these steps to set up for running QEMU
using an NFS server.
-1. *Extract a Root Filesystem:* Once you are able to run QEMU in your
+#. *Extract a Root Filesystem:* Once you are able to run QEMU in your
environment, you can use the ``runqemu-extract-sdk`` script, which is
located in the ``scripts`` directory along with the ``runqemu``
script.
@@ -204,7 +198,7 @@ using an NFS server.
runqemu-extract-sdk ./tmp/deploy/images/qemux86-64/core-image-sato-qemux86-64.tar.bz2 test-nfs
-2. *Start QEMU:* Once you have extracted the file system, you can run
+#. *Start QEMU:* Once you have extracted the file system, you can run
``runqemu`` normally with the additional location of the file system.
You can then also make changes to the files within ``./test-nfs`` and
see those changes appear in the image in real time. Here is an
@@ -246,11 +240,10 @@ be a problem when QEMU is running with KVM enabled. Specifically,
software compiled with a certain CPU feature crashes when run on a CPU
under KVM that does not support that feature. To work around this
problem, you can override QEMU's runtime CPU setting by changing the
-``QB_CPU_KVM`` variable in ``qemuboot.conf`` in the
-:term:`Build Directory` ``deploy/image``
-directory. This setting specifies a ``-cpu`` option passed into QEMU in
-the ``runqemu`` script. Running ``qemu -cpu help`` returns a list of
-available supported CPU types.
+``QB_CPU_KVM`` variable in ``qemuboot.conf`` in the :term:`Build Directory`
+``deploy/image`` directory. This setting specifies a ``-cpu`` option passed
+into QEMU in the ``runqemu`` script. Running ``qemu -cpu help`` returns a
+list of available supported CPU types.
QEMU Performance
================
@@ -318,7 +311,7 @@ timestamp when it needs to look for an image. Minimally, through the use
of options, you must provide either a machine name, a virtual machine
image (``*wic.vmdk``), or a kernel image (``*.bin``).
-Following is the command-line help output for the ``runqemu`` command::
+Here is the command-line help output for the ``runqemu`` command::
$ runqemu --help
@@ -330,7 +323,7 @@ Following is the command-line help output for the ``runqemu`` command::
Simplified QEMU command-line options can be passed with:
nographic - disable video console
serial - enable a serial console on /dev/ttyS0
- slirp - enable user networking, no root privileges is required
+ slirp - enable user networking, no root privileges required
kvm - enable KVM when running x86/x86_64 (VT-capable CPU required)
kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
publicvnc - enable a VNC server open to all hosts
@@ -360,7 +353,7 @@ Following is the command-line help output for the ``runqemu`` command::
``runqemu`` Command-Line Options
================================
-Following is a description of ``runqemu`` options you can provide on the
+Here is a description of ``runqemu`` options you can provide on the
command line:
.. note::
@@ -394,7 +387,7 @@ command line:
options are basically identical. If you do not provide a MACHINE
option, ``runqemu`` tries to determine it based on other options.
-- ``ramfs``: Indicates you are booting an initial RAM disk (initramfs)
+- ``ramfs``: Indicates you are booting an :term:`Initramfs`
image, which means the ``FSTYPE`` is ``cpio.gz``.
- ``iso``: Indicates you are booting an ISO image, which means the
@@ -428,6 +421,29 @@ command line:
networking that does not need root access but also is not as easy to
use or comprehensive as the default.
+ Using ``slirp`` by default will forward the guest machine's
+ 22 and 23 TCP ports to host machine's 2222 and 2323 ports
+ (or the next free ports). Specific forwarding rules can be configured
+ by setting ``QB_SLIRP_OPT`` as environment variable or in ``qemuboot.conf``
+ in the :term:`Build Directory` ``deploy/image`` directory.
+ Examples::
+
+ QB_SLIRP_OPT="-netdev user,id=net0,hostfwd=tcp::8080-:80"
+
+ QB_SLIRP_OPT="-netdev user,id=net0,hostfwd=tcp::8080-:80,hostfwd=tcp::2222-:22"
+
+ The first example forwards TCP port 80 from the emulated system to
+ port 8080 (or the next free port) on the host system,
+ allowing access to an http server running in QEMU from
+ ``http://<host ip>:8080/``.
+
+ The second example does the same, but also forwards TCP port 22 on the
+ guest system to 2222 (or the next free port) on the host system,
+ allowing ssh access to the emulated system using
+ ``ssh -P 2222 <user>@<host ip>``.
+
+ Keep in mind that proper configuration of firewall software is required.
+
- ``kvm``: Enables KVM when running "qemux86" or "qemux86-64" QEMU
architectures. For KVM to work, all the following conditions must be
met:
diff --git a/documentation/dev-manual/quilt.rst b/documentation/dev-manual/quilt.rst
new file mode 100644
index 0000000000..59240705ad
--- /dev/null
+++ b/documentation/dev-manual/quilt.rst
@@ -0,0 +1,89 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using Quilt in Your Workflow
+****************************
+
+`Quilt <https://savannah.nongnu.org/projects/quilt>`__ is a powerful tool
+that allows you to capture source code changes without having a clean
+source tree. This section outlines the typical workflow you can use to
+modify source code, test changes, and then preserve the changes in the
+form of a patch all using Quilt.
+
+.. note::
+
+ With regard to preserving changes to source files, if you clean a
+ recipe or have :ref:`ref-classes-rm-work` enabled, the
+ :ref:`devtool workflow <sdk-manual/extensible:using \`\`devtool\`\` in your sdk workflow>`
+ as described in the Yocto Project Application Development and the
+ Extensible Software Development Kit (eSDK) manual is a safer
+ development flow than the flow that uses Quilt.
+
+Follow these general steps:
+
+#. *Find the Source Code:* Temporary source code used by the
+ OpenEmbedded build system is kept in the :term:`Build Directory`. See the
+ ":ref:`dev-manual/temporary-source-code:finding temporary source code`" section to
+ learn how to locate the directory that has the temporary source code for a
+ particular package.
+
+#. *Change Your Working Directory:* You need to be in the directory that
+ has the temporary source code. That directory is defined by the
+ :term:`S` variable.
+
+#. *Create a New Patch:* Before modifying source code, you need to
+ create a new patch. To create a new patch file, use ``quilt new`` as
+ below::
+
+ $ quilt new my_changes.patch
+
+#. *Notify Quilt and Add Files:* After creating the patch, you need to
+ notify Quilt about the files you plan to edit. You notify Quilt by
+ adding the files to the patch you just created::
+
+ $ quilt add file1.c file2.c file3.c
+
+#. *Edit the Files:* Make your changes in the source code to the files
+ you added to the patch.
+
+#. *Test Your Changes:* Once you have modified the source code, the
+ easiest way to test your changes is by calling the :ref:`ref-tasks-compile`
+ task as shown in the following example::
+
+ $ bitbake -c compile -f package
+
+ The ``-f`` or ``--force`` option forces the specified task to
+ execute. If you find problems with your code, you can just keep
+ editing and re-testing iteratively until things work as expected.
+
+ .. note::
+
+ All the modifications you make to the temporary source code disappear
+ once you run the :ref:`ref-tasks-clean` or :ref:`ref-tasks-cleanall`
+ tasks using BitBake (i.e. ``bitbake -c clean package`` and
+ ``bitbake -c cleanall package``). Modifications will also disappear if
+ you use the :ref:`ref-classes-rm-work` feature as described in
+ the ":ref:`dev-manual/disk-space:conserving disk space during builds`"
+ section.
+
+#. *Generate the Patch:* Once your changes work as expected, you need to
+ use Quilt to generate the final patch that contains all your
+ modifications::
+
+ $ quilt refresh
+
+ At this point, the
+ ``my_changes.patch`` file has all your edits made to the ``file1.c``,
+ ``file2.c``, and ``file3.c`` files.
+
+ You can find the resulting patch file in the ``patches/``
+ subdirectory of the source (:term:`S`) directory.
+
+#. *Copy the Patch File:* For simplicity, copy the patch file into a
+ directory named ``files``, which you can create in the same directory
+ that holds the recipe (``.bb``) file or the append (``.bbappend``)
+ file. Placing the patch here guarantees that the OpenEmbedded build
+ system will find the patch. Next, add the patch into the :term:`SRC_URI`
+ of the recipe. Here is an example::
+
+ SRC_URI += "file://my_changes.patch"
+
diff --git a/documentation/dev-manual/read-only-rootfs.rst b/documentation/dev-manual/read-only-rootfs.rst
new file mode 100644
index 0000000000..251178ed54
--- /dev/null
+++ b/documentation/dev-manual/read-only-rootfs.rst
@@ -0,0 +1,89 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Creating a Read-Only Root Filesystem
+************************************
+
+Suppose, for security reasons, you need to disable your target device's
+root filesystem's write permissions (i.e. you need a read-only root
+filesystem). Or, perhaps you are running the device's operating system
+from a read-only storage device. For either case, you can customize your
+image for that behavior.
+
+.. note::
+
+ Supporting a read-only root filesystem requires that the system and
+ applications do not try to write to the root filesystem. You must
+ configure all parts of the target system to write elsewhere, or to
+ gracefully fail in the event of attempting to write to the root
+ filesystem.
+
+Creating the Root Filesystem
+============================
+
+To create the read-only root filesystem, simply add the
+"read-only-rootfs" feature to your image, normally in one of two ways.
+The first way is to add the "read-only-rootfs" image feature in the
+image's recipe file via the :term:`IMAGE_FEATURES` variable::
+
+ IMAGE_FEATURES += "read-only-rootfs"
+
+As an alternative, you can add the same feature
+from within your :term:`Build Directory`'s ``local.conf`` file with the
+associated :term:`EXTRA_IMAGE_FEATURES` variable, as in::
+
+ EXTRA_IMAGE_FEATURES = "read-only-rootfs"
+
+For more information on how to use these variables, see the
+":ref:`dev-manual/customizing-images:Customizing Images Using Custom \`\`IMAGE_FEATURES\`\` and \`\`EXTRA_IMAGE_FEATURES\`\``"
+section. For information on the variables, see
+:term:`IMAGE_FEATURES` and
+:term:`EXTRA_IMAGE_FEATURES`.
+
+Post-Installation Scripts and Read-Only Root Filesystem
+=======================================================
+
+It is very important that you make sure all post-Installation
+(``pkg_postinst``) scripts for packages that are installed into the
+image can be run at the time when the root filesystem is created during
+the build on the host system. These scripts cannot attempt to run during
+the first boot on the target device. With the "read-only-rootfs" feature
+enabled, the build system makes sure that all post-installation scripts
+succeed at file system creation time. If any of these scripts
+still need to be run after the root filesystem is created, the build
+immediately fails. These build-time checks ensure that the build fails
+rather than the target device fails later during its initial boot
+operation.
+
+Most of the common post-installation scripts generated by the build
+system for the out-of-the-box Yocto Project are engineered so that they
+can run during root filesystem creation (e.g. post-installation scripts
+for caching fonts). However, if you create and add custom scripts, you
+need to be sure they can be run during this file system creation.
+
+Here are some common problems that prevent post-installation scripts
+from running during root filesystem creation:
+
+- *Not using $D in front of absolute paths:* The build system defines
+ ``$``\ :term:`D` when the root
+ filesystem is created. Furthermore, ``$D`` is blank when the script
+ is run on the target device. This implies two purposes for ``$D``:
+ ensuring paths are valid in both the host and target environments,
+ and checking to determine which environment is being used as a method
+ for taking appropriate actions.
+
+- *Attempting to run processes that are specific to or dependent on the
+ target architecture:* You can work around these attempts by using
+ native tools, which run on the host system, to accomplish the same
+ tasks, or by alternatively running the processes under QEMU, which
+ has the ``qemu_run_binary`` function. For more information, see the
+ :ref:`ref-classes-qemu` class.
+
+Areas With Write Access
+=======================
+
+With the "read-only-rootfs" feature enabled, any attempt by the target
+to write to the root filesystem at runtime fails. Consequently, you must
+make sure that you configure processes and applications that attempt
+these types of writes do so to directories with write access (e.g.
+``/tmp`` or ``/var/run``).
+
diff --git a/documentation/dev-manual/runtime-testing.rst b/documentation/dev-manual/runtime-testing.rst
new file mode 100644
index 0000000000..7a2b42f25a
--- /dev/null
+++ b/documentation/dev-manual/runtime-testing.rst
@@ -0,0 +1,594 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Performing Automated Runtime Testing
+************************************
+
+The OpenEmbedded build system makes available a series of automated
+tests for images to verify runtime functionality. You can run these
+tests on either QEMU or actual target hardware. Tests are written in
+Python making use of the ``unittest`` module, and the majority of them
+run commands on the target system over SSH. This section describes how
+you set up the environment to use these tests, run available tests, and
+write and add your own tests.
+
+For information on the test and QA infrastructure available within the
+Yocto Project, see the ":ref:`ref-manual/release-process:testing and quality assurance`"
+section in the Yocto Project Reference Manual.
+
+Enabling Tests
+==============
+
+Depending on whether you are planning to run tests using QEMU or on the
+hardware, you have to take different steps to enable the tests. See the
+following subsections for information on how to enable both types of
+tests.
+
+Enabling Runtime Tests on QEMU
+------------------------------
+
+In order to run tests, you need to do the following:
+
+- *Set up to avoid interaction with sudo for networking:* To
+ accomplish this, you must do one of the following:
+
+ - Add ``NOPASSWD`` for your user in ``/etc/sudoers`` either for all
+ commands or just for ``runqemu-ifup``. You must provide the full
+ path as that can change if you are using multiple clones of the
+ source repository.
+
+ .. note::
+
+ On some distributions, you also need to comment out "Defaults
+ requiretty" in ``/etc/sudoers``.
+
+ - Manually configure a tap interface for your system.
+
+ - Run as root the script in ``scripts/runqemu-gen-tapdevs``, which
+ should generate a list of tap devices. This is the option
+ typically chosen for Autobuilder-type environments.
+
+ .. note::
+
+ - Be sure to use an absolute path when calling this script
+ with sudo.
+
+ - Ensure that your host has the package ``iptables`` installed.
+
+ - The package recipe ``qemu-helper-native`` is required to run
+ this script. Build the package using the following command::
+
+ $ bitbake qemu-helper-native
+
+- *Set the DISPLAY variable:* You need to set this variable so that
+ you have an X server available (e.g. start ``vncserver`` for a
+ headless machine).
+
+- *Be sure your host's firewall accepts incoming connections from
+ 192.168.7.0/24:* Some of the tests (in particular DNF tests) start an
+ HTTP server on a random high number port, which is used to serve
+ files to the target. The DNF module serves
+ ``${WORKDIR}/oe-rootfs-repo`` so it can run DNF channel commands.
+ That means your host's firewall must accept incoming connections from
+ 192.168.7.0/24, which is the default IP range used for tap devices by
+ ``runqemu``.
+
+- *Be sure your host has the correct packages installed:* Depending
+ your host's distribution, you need to have the following packages
+ installed:
+
+ - Ubuntu and Debian: ``sysstat`` and ``iproute2``
+
+ - openSUSE: ``sysstat`` and ``iproute2``
+
+ - Fedora: ``sysstat`` and ``iproute``
+
+ - CentOS: ``sysstat`` and ``iproute``
+
+Once you start running the tests, the following happens:
+
+#. A copy of the root filesystem is written to ``${WORKDIR}/testimage``.
+
+#. The image is booted under QEMU using the standard ``runqemu`` script.
+
+#. A default timeout of 500 seconds occurs to allow for the boot process
+ to reach the login prompt. You can change the timeout period by
+ setting
+ :term:`TEST_QEMUBOOT_TIMEOUT`
+ in the ``local.conf`` file.
+
+#. Once the boot process is reached and the login prompt appears, the
+ tests run. The full boot log is written to
+ ``${WORKDIR}/testimage/qemu_boot_log``.
+
+#. Each test module loads in the order found in :term:`TEST_SUITES`. You can
+ find the full output of the commands run over SSH in
+ ``${WORKDIR}/testimgage/ssh_target_log``.
+
+#. If no failures occur, the task running the tests ends successfully.
+ You can find the output from the ``unittest`` in the task log at
+ ``${WORKDIR}/temp/log.do_testimage``.
+
+Enabling Runtime Tests on Hardware
+----------------------------------
+
+The OpenEmbedded build system can run tests on real hardware, and for
+certain devices it can also deploy the image to be tested onto the
+device beforehand.
+
+For automated deployment, a "controller image" is installed onto the
+hardware once as part of setup. Then, each time tests are to be run, the
+following occurs:
+
+#. The controller image is booted into and used to write the image to be
+ tested to a second partition.
+
+#. The device is then rebooted using an external script that you need to
+ provide.
+
+#. The device boots into the image to be tested.
+
+When running tests (independent of whether the image has been deployed
+automatically or not), the device is expected to be connected to a
+network on a pre-determined IP address. You can either use static IP
+addresses written into the image, or set the image to use DHCP and have
+your DHCP server on the test network assign a known IP address based on
+the MAC address of the device.
+
+In order to run tests on hardware, you need to set :term:`TEST_TARGET` to an
+appropriate value. For QEMU, you do not have to change anything, the
+default value is "qemu". For running tests on hardware, the following
+options are available:
+
+- *"simpleremote":* Choose "simpleremote" if you are going to run tests
+ on a target system that is already running the image to be tested and
+ is available on the network. You can use "simpleremote" in
+ conjunction with either real hardware or an image running within a
+ separately started QEMU or any other virtual machine manager.
+
+- *"SystemdbootTarget":* Choose "SystemdbootTarget" if your hardware is
+ an EFI-based machine with ``systemd-boot`` as bootloader and
+ ``core-image-testmaster`` (or something similar) is installed. Also,
+ your hardware under test must be in a DHCP-enabled network that gives
+ it the same IP address for each reboot.
+
+ If you choose "SystemdbootTarget", there are additional requirements
+ and considerations. See the
+ ":ref:`dev-manual/runtime-testing:selecting systemdboottarget`" section, which
+ follows, for more information.
+
+- *"BeagleBoneTarget":* Choose "BeagleBoneTarget" if you are deploying
+ images and running tests on the BeagleBone "Black" or original
+ "White" hardware. For information on how to use these tests, see the
+ comments at the top of the BeagleBoneTarget
+ ``meta-yocto-bsp/lib/oeqa/controllers/beaglebonetarget.py`` file.
+
+- *"GrubTarget":* Choose "GrubTarget" if you are deploying images and running
+ tests on any generic PC that boots using GRUB. For information on how
+ to use these tests, see the comments at the top of the GrubTarget
+ ``meta-yocto-bsp/lib/oeqa/controllers/grubtarget.py`` file.
+
+- *"your-target":* Create your own custom target if you want to run
+ tests when you are deploying images and running tests on a custom
+ machine within your BSP layer. To do this, you need to add a Python
+ unit that defines the target class under ``lib/oeqa/controllers/``
+ within your layer. You must also provide an empty ``__init__.py``.
+ For examples, see files in ``meta-yocto-bsp/lib/oeqa/controllers/``.
+
+Selecting SystemdbootTarget
+---------------------------
+
+If you did not set :term:`TEST_TARGET` to "SystemdbootTarget", then you do
+not need any information in this section. You can skip down to the
+":ref:`dev-manual/runtime-testing:running tests`" section.
+
+If you did set :term:`TEST_TARGET` to "SystemdbootTarget", you also need to
+perform a one-time setup of your controller image by doing the following:
+
+#. *Set EFI_PROVIDER:* Be sure that :term:`EFI_PROVIDER` is as follows::
+
+ EFI_PROVIDER = "systemd-boot"
+
+#. *Build the controller image:* Build the ``core-image-testmaster`` image.
+ The ``core-image-testmaster`` recipe is provided as an example for a
+ "controller" image and you can customize the image recipe as you would
+ any other recipe.
+
+ Image recipe requirements are:
+
+ - Inherits ``core-image`` so that kernel modules are installed.
+
+ - Installs normal linux utilities not BusyBox ones (e.g. ``bash``,
+ ``coreutils``, ``tar``, ``gzip``, and ``kmod``).
+
+ - Uses a custom :term:`Initramfs` image with a custom
+ installer. A normal image that you can install usually creates a
+ single root filesystem partition. This image uses another installer that
+ creates a specific partition layout. Not all Board Support
+ Packages (BSPs) can use an installer. For such cases, you need to
+ manually create the following partition layout on the target:
+
+ - First partition mounted under ``/boot``, labeled "boot".
+
+ - The main root filesystem partition where this image gets installed,
+ which is mounted under ``/``.
+
+ - Another partition labeled "testrootfs" where test images get
+ deployed.
+
+#. *Install image:* Install the image that you just built on the target
+ system.
+
+The final thing you need to do when setting :term:`TEST_TARGET` to
+"SystemdbootTarget" is to set up the test image:
+
+#. *Set up your local.conf file:* Make sure you have the following
+ statements in your ``local.conf`` file::
+
+ IMAGE_FSTYPES += "tar.gz"
+ IMAGE_CLASSES += "testimage"
+ TEST_TARGET = "SystemdbootTarget"
+ TEST_TARGET_IP = "192.168.2.3"
+
+#. *Build your test image:* Use BitBake to build the image::
+
+ $ bitbake core-image-sato
+
+Power Control
+-------------
+
+For most hardware targets other than "simpleremote", you can control
+power:
+
+- You can use :term:`TEST_POWERCONTROL_CMD` together with
+ :term:`TEST_POWERCONTROL_EXTRA_ARGS` as a command that runs on the host
+ and does power cycling. The test code passes one argument to that
+ command: off, on or cycle (off then on). Here is an example that
+ could appear in your ``local.conf`` file::
+
+ TEST_POWERCONTROL_CMD = "powercontrol.exp test 10.11.12.1 nuc1"
+
+ In this example, the expect
+ script does the following:
+
+ .. code-block:: shell
+
+ ssh test@10.11.12.1 "pyctl nuc1 arg"
+
+ It then runs a Python script that controls power for a label called
+ ``nuc1``.
+
+ .. note::
+
+ You need to customize :term:`TEST_POWERCONTROL_CMD` and
+ :term:`TEST_POWERCONTROL_EXTRA_ARGS` for your own setup. The one requirement
+ is that it accepts "on", "off", and "cycle" as the last argument.
+
+- When no command is defined, it connects to the device over SSH and
+ uses the classic reboot command to reboot the device. Classic reboot
+ is fine as long as the machine actually reboots (i.e. the SSH test
+ has not failed). It is useful for scenarios where you have a simple
+ setup, typically with a single board, and where some manual
+ interaction is okay from time to time.
+
+If you have no hardware to automatically perform power control but still
+wish to experiment with automated hardware testing, you can use the
+``dialog-power-control`` script that shows a dialog prompting you to perform
+the required power action. This script requires either KDialog or Zenity
+to be installed. To use this script, set the
+:term:`TEST_POWERCONTROL_CMD`
+variable as follows::
+
+ TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
+
+Serial Console Connection
+-------------------------
+
+For test target classes requiring a serial console to interact with the
+bootloader (e.g. BeagleBoneTarget and GrubTarget),
+you need to specify a command to use to connect to the serial console of
+the target machine by using the
+:term:`TEST_SERIALCONTROL_CMD`
+variable and optionally the
+:term:`TEST_SERIALCONTROL_EXTRA_ARGS`
+variable.
+
+These cases could be a serial terminal program if the machine is
+connected to a local serial port, or a ``telnet`` or ``ssh`` command
+connecting to a remote console server. Regardless of the case, the
+command simply needs to connect to the serial console and forward that
+connection to standard input and output as any normal terminal program
+does. For example, to use the picocom terminal program on serial device
+``/dev/ttyUSB0`` at 115200bps, you would set the variable as follows::
+
+ TEST_SERIALCONTROL_CMD = "picocom /dev/ttyUSB0 -b 115200"
+
+For local
+devices where the serial port device disappears when the device reboots,
+an additional "serdevtry" wrapper script is provided. To use this
+wrapper, simply prefix the terminal command with
+``${COREBASE}/scripts/contrib/serdevtry``::
+
+ TEST_SERIALCONTROL_CMD = "${COREBASE}/scripts/contrib/serdevtry picocom -b 115200 /dev/ttyUSB0"
+
+Running Tests
+=============
+
+You can start the tests automatically or manually:
+
+- *Automatically running tests:* To run the tests automatically after the
+ OpenEmbedded build system successfully creates an image, first set the
+ :term:`TESTIMAGE_AUTO` variable to "1" in your ``local.conf`` file in the
+ :term:`Build Directory`::
+
+ TESTIMAGE_AUTO = "1"
+
+ Next, build your image. If the image successfully builds, the
+ tests run::
+
+ bitbake core-image-sato
+
+- *Manually running tests:* To manually run the tests, first globally
+ inherit the :ref:`ref-classes-testimage` class by editing your
+ ``local.conf`` file::
+
+ IMAGE_CLASSES += "testimage"
+
+ Next, use BitBake to run the tests::
+
+ bitbake -c testimage image
+
+All test files reside in ``meta/lib/oeqa/runtime/cases`` in the
+:term:`Source Directory`. A test name maps
+directly to a Python module. Each test module may contain a number of
+individual tests. Tests are usually grouped together by the area tested
+(e.g tests for systemd reside in ``meta/lib/oeqa/runtime/cases/systemd.py``).
+
+You can add tests to any layer provided you place them in the proper
+area and you extend :term:`BBPATH` in
+the ``local.conf`` file as normal. Be sure that tests reside in
+``layer/lib/oeqa/runtime/cases``.
+
+.. note::
+
+ Be sure that module names do not collide with module names used in
+ the default set of test modules in ``meta/lib/oeqa/runtime/cases``.
+
+You can change the set of tests run by appending or overriding
+:term:`TEST_SUITES` variable in
+``local.conf``. Each name in :term:`TEST_SUITES` represents a required test
+for the image. Test modules named within :term:`TEST_SUITES` cannot be
+skipped even if a test is not suitable for an image (e.g. running the
+RPM tests on an image without ``rpm``). Appending "auto" to
+:term:`TEST_SUITES` causes the build system to try to run all tests that are
+suitable for the image (i.e. each test module may elect to skip itself).
+
+The order you list tests in :term:`TEST_SUITES` is important and influences
+test dependencies. Consequently, tests that depend on other tests should
+be added after the test on which they depend. For example, since the
+``ssh`` test depends on the ``ping`` test, "ssh" needs to come after
+"ping" in the list. The test class provides no re-ordering or dependency
+handling.
+
+.. note::
+
+ Each module can have multiple classes with multiple test methods.
+ And, Python ``unittest`` rules apply.
+
+Here are some things to keep in mind when running tests:
+
+- The default tests for the image are defined as::
+
+ DEFAULT_TEST_SUITES:pn-image = "ping ssh df connman syslog xorg scp vnc date rpm dnf dmesg"
+
+- Add your own test to the list of the by using the following::
+
+ TEST_SUITES:append = " mytest"
+
+- Run a specific list of tests as follows::
+
+ TEST_SUITES = "test1 test2 test3"
+
+ Remember, order is important. Be sure to place a test that is
+ dependent on another test later in the order.
+
+Exporting Tests
+===============
+
+You can export tests so that they can run independently of the build
+system. Exporting tests is required if you want to be able to hand the
+test execution off to a scheduler. You can only export tests that are
+defined in :term:`TEST_SUITES`.
+
+If your image is already built, make sure the following are set in your
+``local.conf`` file::
+
+ INHERIT += "testexport"
+ TEST_TARGET_IP = "IP-address-for-the-test-target"
+ TEST_SERVER_IP = "IP-address-for-the-test-server"
+
+You can then export the tests with the
+following BitBake command form::
+
+ $ bitbake image -c testexport
+
+Exporting the tests places them in the :term:`Build Directory` in
+``tmp/testexport/``\ image, which is controlled by the :term:`TEST_EXPORT_DIR`
+variable.
+
+You can now run the tests outside of the build environment::
+
+ $ cd tmp/testexport/image
+ $ ./runexported.py testdata.json
+
+Here is a complete example that shows IP addresses and uses the
+``core-image-sato`` image::
+
+ INHERIT += "testexport"
+ TEST_TARGET_IP = "192.168.7.2"
+ TEST_SERVER_IP = "192.168.7.1"
+
+Use BitBake to export the tests::
+
+ $ bitbake core-image-sato -c testexport
+
+Run the tests outside of
+the build environment using the following::
+
+ $ cd tmp/testexport/core-image-sato
+ $ ./runexported.py testdata.json
+
+Writing New Tests
+=================
+
+As mentioned previously, all new test files need to be in the proper
+place for the build system to find them. New tests for additional
+functionality outside of the core should be added to the layer that adds
+the functionality, in ``layer/lib/oeqa/runtime/cases`` (as long as
+:term:`BBPATH` is extended in the
+layer's ``layer.conf`` file as normal). Just remember the following:
+
+- Filenames need to map directly to test (module) names.
+
+- Do not use module names that collide with existing core tests.
+
+- Minimally, an empty ``__init__.py`` file must be present in the runtime
+ directory.
+
+To create a new test, start by copying an existing module (e.g.
+``oe_syslog.py`` or ``gcc.py`` are good ones to use). Test modules can use
+code from ``meta/lib/oeqa/utils``, which are helper classes.
+
+.. note::
+
+ Structure shell commands such that you rely on them and they return a
+ single code for success. Be aware that sometimes you will need to
+ parse the output. See the ``df.py`` and ``date.py`` modules for examples.
+
+You will notice that all test classes inherit ``oeRuntimeTest``, which
+is found in ``meta/lib/oetest.py``. This base class offers some helper
+attributes, which are described in the following sections:
+
+Class Methods
+-------------
+
+Class methods are as follows:
+
+- *hasPackage(pkg):* Returns "True" if ``pkg`` is in the installed
+ package list of the image, which is based on the manifest file that
+ is generated during the :ref:`ref-tasks-rootfs` task.
+
+- *hasFeature(feature):* Returns "True" if the feature is in
+ :term:`IMAGE_FEATURES` or
+ :term:`DISTRO_FEATURES`.
+
+Class Attributes
+----------------
+
+Class attributes are as follows:
+
+- *pscmd:* Equals "ps -ef" if ``procps`` is installed in the image.
+ Otherwise, ``pscmd`` equals "ps" (busybox).
+
+- *tc:* The called test context, which gives access to the
+ following attributes:
+
+ - *d:* The BitBake datastore, which allows you to use stuff such
+ as ``oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager")``.
+
+ - *testslist and testsrequired:* Used internally. The tests
+ do not need these.
+
+ - *filesdir:* The absolute path to
+ ``meta/lib/oeqa/runtime/files``, which contains helper files for
+ tests meant for copying on the target such as small files written
+ in C for compilation.
+
+ - *target:* The target controller object used to deploy and
+ start an image on a particular target (e.g. Qemu, SimpleRemote,
+ and SystemdbootTarget). Tests usually use the following:
+
+ - *ip:* The target's IP address.
+
+ - *server_ip:* The host's IP address, which is usually used
+ by the DNF test suite.
+
+ - *run(cmd, timeout=None):* The single, most used method.
+ This command is a wrapper for: ``ssh root@host "cmd"``. The
+ command returns a tuple: (status, output), which are what their
+ names imply - the return code of "cmd" and whatever output it
+ produces. The optional timeout argument represents the number
+ of seconds the test should wait for "cmd" to return. If the
+ argument is "None", the test uses the default instance's
+ timeout period, which is 300 seconds. If the argument is "0",
+ the test runs until the command returns.
+
+ - *copy_to(localpath, remotepath):*
+ ``scp localpath root@ip:remotepath``.
+
+ - *copy_from(remotepath, localpath):*
+ ``scp root@host:remotepath localpath``.
+
+Instance Attributes
+-------------------
+
+There is a single instance attribute, which is ``target``. The ``target``
+instance attribute is identical to the class attribute of the same name,
+which is described in the previous section. This attribute exists as
+both an instance and class attribute so tests can use
+``self.target.run(cmd)`` in instance methods instead of
+``oeRuntimeTest.tc.target.run(cmd)``.
+
+Installing Packages in the DUT Without the Package Manager
+==========================================================
+
+When a test requires a package built by BitBake, it is possible to
+install that package. Installing the package does not require a package
+manager be installed in the device under test (DUT). It does, however,
+require an SSH connection and the target must be using the
+``sshcontrol`` class.
+
+.. note::
+
+ This method uses ``scp`` to copy files from the host to the target, which
+ causes permissions and special attributes to be lost.
+
+A JSON file is used to define the packages needed by a test. This file
+must be in the same path as the file used to define the tests.
+Furthermore, the filename must map directly to the test module name with
+a ``.json`` extension.
+
+The JSON file must include an object with the test name as keys of an
+object or an array. This object (or array of objects) uses the following
+data:
+
+- "pkg" --- a mandatory string that is the name of the package to be
+ installed.
+
+- "rm" --- an optional boolean, which defaults to "false", that specifies
+ to remove the package after the test.
+
+- "extract" --- an optional boolean, which defaults to "false", that
+ specifies if the package must be extracted from the package format.
+ When set to "true", the package is not automatically installed into
+ the DUT.
+
+Here is an example JSON file that handles test "foo" installing
+package "bar" and test "foobar" installing packages "foo" and "bar".
+Once the test is complete, the packages are removed from the DUT::
+
+ {
+ "foo": {
+ "pkg": "bar"
+ },
+ "foobar": [
+ {
+ "pkg": "foo",
+ "rm": true
+ },
+ {
+ "pkg": "bar",
+ "rm": true
+ }
+ ]
+ }
+
diff --git a/documentation/dev-manual/sbom.rst b/documentation/dev-manual/sbom.rst
new file mode 100644
index 0000000000..b72bad1554
--- /dev/null
+++ b/documentation/dev-manual/sbom.rst
@@ -0,0 +1,83 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Creating a Software Bill of Materials
+*************************************
+
+Once you are able to build an image for your project, once the licenses for
+each software component are all identified (see
+":ref:`dev-manual/licenses:working with licenses`") and once vulnerability
+fixes are applied (see ":ref:`dev-manual/vulnerabilities:checking
+for vulnerabilities`"), the OpenEmbedded build system can generate
+a description of all the components you used, their licenses, their dependencies,
+their sources, the changes that were applied to them and the known
+vulnerabilities that were fixed.
+
+This description is generated in the form of a *Software Bill of Materials*
+(:term:`SBOM`), using the :term:`SPDX` standard.
+
+When you release software, this is the most standard way to provide information
+about the Software Supply Chain of your software image and SDK. The
+:term:`SBOM` tooling is often used to ensure open source license compliance by
+providing the license texts used in the product which legal departments and end
+users can read in standardized format.
+
+:term:`SBOM` information is also critical to performing vulnerability exposure
+assessments, as all the components used in the Software Supply Chain are listed.
+
+The OpenEmbedded build system doesn't generate such information by default.
+To make this happen, you must inherit the
+:ref:`ref-classes-create-spdx` class from a configuration file::
+
+ INHERIT += "create-spdx"
+
+Upon building an image, you will then get:
+
+- :term:`SPDX` output in JSON format as an ``IMAGE-MACHINE.spdx.json`` file in
+ ``tmp/deploy/images/MACHINE/`` inside the :term:`Build Directory`.
+
+- This toplevel file is accompanied by an ``IMAGE-MACHINE.spdx.index.json``
+ containing an index of JSON :term:`SPDX` files for individual recipes.
+
+- The compressed archive ``IMAGE-MACHINE.spdx.tar.zst`` contains the index
+ and the files for the single recipes.
+
+The :ref:`ref-classes-create-spdx` class offers options to include
+more information in the output :term:`SPDX` data:
+
+- Make the json files more human readable by setting (:term:`SPDX_PRETTY`).
+
+- Add compressed archives of the files in the generated target packages by
+ setting (:term:`SPDX_ARCHIVE_PACKAGED`).
+
+- Add a description of the source files used to generate host tools and target
+ packages (:term:`SPDX_INCLUDE_SOURCES`)
+
+- Add archives of these source files themselves (:term:`SPDX_ARCHIVE_SOURCES`).
+
+Though the toplevel :term:`SPDX` output is available in
+``tmp/deploy/images/MACHINE/`` inside the :term:`Build Directory`, ancillary
+generated files are available in ``tmp/deploy/spdx/MACHINE`` too, such as:
+
+- The individual :term:`SPDX` JSON files in the ``IMAGE-MACHINE.spdx.tar.zst``
+ archive.
+
+- Compressed archives of the files in the generated target packages,
+ in ``packages/packagename.tar.zst`` (when :term:`SPDX_ARCHIVE_PACKAGED`
+ is set).
+
+- Compressed archives of the source files used to build the host tools
+ and the target packages in ``recipes/recipe-packagename.tar.zst``
+ (when :term:`SPDX_ARCHIVE_SOURCES` is set). Those are needed to fulfill
+ "source code access" license requirements.
+
+See also the :term:`SPDX_CUSTOM_ANNOTATION_VARS` variable which allows
+to associate custom notes to a recipe.
+See the `tools page <https://spdx.dev/resources/tools/>`__ on the :term:`SPDX`
+project website for a list of tools to consume and transform the :term:`SPDX`
+data generated by the OpenEmbedded build system.
+
+See also Joshua Watt's presentations
+`Automated SBoM generation with OpenEmbedded and the Yocto Project <https://youtu.be/Q5UQUM6zxVU>`__
+at FOSDEM 2023 and
+`SPDX in the Yocto Project <https://fosdem.org/2024/schedule/event/fosdem-2024-3318-spdx-in-the-yocto-project/>`__
+at FOSDEM 2024.
diff --git a/documentation/dev-manual/securing-images.rst b/documentation/dev-manual/securing-images.rst
new file mode 100644
index 0000000000..e5791d3d6d
--- /dev/null
+++ b/documentation/dev-manual/securing-images.rst
@@ -0,0 +1,156 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Making Images More Secure
+*************************
+
+Security is of increasing concern for embedded devices. Consider the
+issues and problems discussed in just this sampling of work found across
+the Internet:
+
+- *"*\ `Security Risks of Embedded
+ Systems <https://www.schneier.com/blog/archives/2014/01/security_risks_9.html>`__\ *"*
+ by Bruce Schneier
+
+- *"*\ `Internet Census
+ 2012 <http://census2012.sourceforge.net/paper.html>`__\ *"* by Carna
+ Botnet
+
+- *"*\ `Security Issues for Embedded
+ Devices <https://elinux.org/images/6/6f/Security-issues.pdf>`__\ *"*
+ by Jake Edge
+
+When securing your image is of concern, there are steps, tools, and
+variables that you can consider to help you reach the security goals you
+need for your particular device. Not all situations are identical when
+it comes to making an image secure. Consequently, this section provides
+some guidance and suggestions for consideration when you want to make
+your image more secure.
+
+.. note::
+
+ Because the security requirements and risks are different for every
+ type of device, this section cannot provide a complete reference on
+ securing your custom OS. It is strongly recommended that you also
+ consult other sources of information on embedded Linux system
+ hardening and on security.
+
+General Considerations
+======================
+
+There are general considerations that help you create more secure images.
+You should consider the following suggestions to make your device
+more secure:
+
+- Scan additional code you are adding to the system (e.g. application
+ code) by using static analysis tools. Look for buffer overflows and
+ other potential security problems.
+
+- Pay particular attention to the security for any web-based
+ administration interface.
+
+ Web interfaces typically need to perform administrative functions and
+ tend to need to run with elevated privileges. Thus, the consequences
+ resulting from the interface's security becoming compromised can be
+ serious. Look for common web vulnerabilities such as
+ cross-site-scripting (XSS), unvalidated inputs, and so forth.
+
+ As with system passwords, the default credentials for accessing a
+ web-based interface should not be the same across all devices. This
+ is particularly true if the interface is enabled by default as it can
+ be assumed that many end-users will not change the credentials.
+
+- Ensure you can update the software on the device to mitigate
+ vulnerabilities discovered in the future. This consideration
+ especially applies when your device is network-enabled.
+
+- Regularly scan and apply fixes for CVE security issues affecting
+ all software components in the product, see ":ref:`dev-manual/vulnerabilities:checking for vulnerabilities`".
+
+- Regularly update your version of Poky and OE-Core from their upstream
+ developers, e.g. to apply updates and security fixes from stable
+ and :term:`LTS` branches.
+
+- Ensure you remove or disable debugging functionality before producing
+ the final image. For information on how to do this, see the
+ ":ref:`dev-manual/securing-images:considerations specific to the openembedded build system`"
+ section.
+
+- Ensure you have no network services listening that are not needed.
+
+- Remove any software from the image that is not needed.
+
+- Enable hardware support for secure boot functionality when your
+ device supports this functionality.
+
+Security Flags
+==============
+
+The Yocto Project has security flags that you can enable that help make
+your build output more secure. The security flags are in the
+``meta/conf/distro/include/security_flags.inc`` file in your
+:term:`Source Directory` (e.g. ``poky``).
+
+.. note::
+
+ Depending on the recipe, certain security flags are enabled and
+ disabled by default.
+
+Use the following line in your ``local.conf`` file or in your custom
+distribution configuration file to enable the security compiler and
+linker flags for your build::
+
+ require conf/distro/include/security_flags.inc
+
+Considerations Specific to the OpenEmbedded Build System
+========================================================
+
+You can take some steps that are specific to the OpenEmbedded build
+system to make your images more secure:
+
+- Ensure "debug-tweaks" is not one of your selected
+ :term:`IMAGE_FEATURES`.
+ When creating a new project, the default is to provide you with an
+ initial ``local.conf`` file that enables this feature using the
+ :term:`EXTRA_IMAGE_FEATURES`
+ variable with the line::
+
+ EXTRA_IMAGE_FEATURES = "debug-tweaks"
+
+ To disable that feature, simply comment out that line in your
+ ``local.conf`` file, or make sure :term:`IMAGE_FEATURES` does not contain
+ "debug-tweaks" before producing your final image. Among other things,
+ leaving this in place sets the root password as blank, which makes
+ logging in for debugging or inspection easy during development but
+ also means anyone can easily log in during production.
+
+- It is possible to set a root password for the image and also to set
+ passwords for any extra users you might add (e.g. administrative or
+ service type users). When you set up passwords for multiple images or
+ users, you should not duplicate passwords.
+
+ To set up passwords, use the :ref:`ref-classes-extrausers` class, which
+ is the preferred method. For an example on how to set up both root and
+ user passwords, see the ":ref:`ref-classes-extrausers`" section.
+
+ .. note::
+
+ When adding extra user accounts or setting a root password, be
+ cautious about setting the same password on every device. If you
+ do this, and the password you have set is exposed, then every
+ device is now potentially compromised. If you need this access but
+ want to ensure security, consider setting a different, random
+ password for each device. Typically, you do this as a separate
+ step after you deploy the image onto the device.
+
+- Consider enabling a Mandatory Access Control (MAC) framework such as
+ SMACK or SELinux and tuning it appropriately for your device's usage.
+ You can find more information in the
+ :yocto_git:`meta-selinux </meta-selinux/>` layer.
+
+Tools for Hardening Your Image
+==============================
+
+The Yocto Project provides tools for making your image more secure. You
+can find these tools in the ``meta-security`` layer of the
+:yocto_git:`Yocto Project Source Repositories <>`.
+
diff --git a/documentation/dev-manual/security-subjects.rst b/documentation/dev-manual/security-subjects.rst
new file mode 100644
index 0000000000..1b02b6a9e9
--- /dev/null
+++ b/documentation/dev-manual/security-subjects.rst
@@ -0,0 +1,189 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Dealing with Vulnerability Reports
+**********************************
+
+The Yocto Project and OpenEmbedded are open-source, community-based projects
+used in numerous products. They assemble multiple other open-source projects,
+and need to handle security issues and practices both internal (in the code
+maintained by both projects), and external (maintained by other projects and
+organizations).
+
+This manual assembles security-related information concerning the whole
+ecosystem. It includes information on reporting a potential security issue,
+the operation of the YP Security team and how to contribute in the
+related code. It is written to be useful for both security researchers and
+YP developers.
+
+How to report a potential security vulnerability?
+=================================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+:yocto_bugs:`Security Bugzilla </enter_bug.cgi?product=Security>`.
+
+If you are dealing with a not-yet-released issue, or an urgent one, please send
+a message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available. This mailing list is monitored by the
+Yocto Project Security team.
+
+For each layer, you might also look for specific instructions (if any) for
+reporting potential security issues in the specific ``SECURITY.md`` file at the
+root of the repository. Instructions on how and where submit a patch are
+usually available in ``README.md``. If this is your first patch to the
+Yocto Project/OpenEmbedded, you might want to have a look into the
+Contributor's Manual section
+":ref:`contributor-guide/submit-changes:preparing changes for submission`".
+
+Branches maintained with security fixes
+---------------------------------------
+
+See the
+:ref:`Release process <ref-manual/release-process:Stable Release Process>`
+documentation for details regarding the policies and maintenance of stable
+branches.
+
+The :yocto_wiki:`Releases page </Releases>` contains a list
+of all releases of the Yocto Project. Versions in gray are no longer actively
+maintained with security patches, but well-tested patches may still be accepted
+for them for significant issues.
+
+Security-related discussions at the Yocto Project
+-------------------------------------------------
+
+We have set up two security-related mailing lists:
+
+ - Public List: yocto [dash] security [at] yoctoproject[dot] org
+
+ This is a public mailing list for anyone to subscribe to. This list is an
+ open list to discuss public security issues/patches and security-related
+ initiatives. For more information, including subscription information,
+ please see the :yocto_lists:`yocto-security mailing list info page </g/yocto-security>`.
+
+ - Private List: security [at] yoctoproject [dot] org
+
+ This is a private mailing list for reporting non-published potential
+ vulnerabilities. The list is monitored by the Yocto Project Security team.
+
+
+What you should do if you find a security vulnerability
+-------------------------------------------------------
+
+If you find a security flaw: a crash, an information leakage, or anything that
+can have a security impact if exploited in any Open Source software built or
+used by the Yocto Project, please report this to the Yocto Project Security
+Team. If you prefer to contact the upstream project directly, please send a
+copy to the security team at the Yocto Project as well. If you believe this is
+highly sensitive information, please report the vulnerability in a secure way,
+i.e. encrypt the email and send it to the private list. This ensures that
+the exploit is not leaked and exploited before a response/fix has been generated.
+
+Security team
+=============
+
+The Yocto Project/OpenEmbedded security team coordinates the work on security
+subjects in the project. All general discussion takes place publicly. The
+Security Team only uses confidential communication tools to deal with private
+vulnerability reports before they are released.
+
+Security team appointment
+-------------------------
+
+The Yocto Project Security Team consists of at least three members. When new
+members are needed, the Yocto Project Technical Steering Committee (YP TSC)
+asks for nominations by public channels including a nomination deadline.
+Self-nominations are possible. When the limit time is
+reached, the YP TSC posts the list of candidates for the comments of project
+participants and developers. Comments may be sent publicly or privately to the
+YP and OE TSCs. The candidates are approved by both YP TSC and OpenEmbedded
+Technical Steering Committee (OE TSC) and the final list of the team members
+is announced publicly. The aim is to have people representing technical
+leadership, security knowledge and infrastructure present with enough people
+to provide backup/coverage but keep the notification list small enough to
+minimize information risk and maintain trust.
+
+YP Security Team members may resign at any time.
+
+Security Team Operations
+------------------------
+
+The work of the Security Team might require high confidentiality. Team members
+are individuals selected by merit and do not represent the companies they work
+for. They do not share information about confidential issues outside of the team
+and do not hint about ongoing embargoes.
+
+Team members can bring in domain experts as needed. Those people should be
+added to individual issues only and adhere to the same standards as the YP
+Security Team.
+
+The YP security team organizes its meetings and communication as needed.
+
+When the YP Security team receives a report about a potential security
+vulnerability, they quickly analyze and notify the reporter of the result.
+They might also request more information.
+
+If the issue is confirmed and affects the code maintained by the YP, they
+confidentially notify maintainers of that code and work with them to prepare
+a fix.
+
+If the issue is confirmed and affects an upstream project, the YP security team
+notifies the project. Usually, the upstream project analyzes the problem again.
+If they deem it a real security problem in their software, they develop and
+release a fix following their security policy. They may want to include the
+original reporter in the loop. There is also sometimes some coordination for
+handling patches, backporting patches etc, or just understanding the problem
+or what caused it.
+
+When the fix is publicly available, the YP security team member or the
+package maintainer sends patches against the YP code base, following usual
+procedures, including public code review.
+
+What Yocto Security Team does when it receives a security vulnerability
+-----------------------------------------------------------------------
+
+The YP Security Team team performs a quick analysis and would usually report
+the flaw to the upstream project. Normally the upstream project analyzes the
+problem. If they deem it a real security problem in their software, they
+develop and release a fix following their own security policy. They may want
+to include the original reporter in the loop. There is also sometimes some
+coordination for handling patches, backporting patches etc, or just
+understanding the problem or what caused it.
+
+The security policy of the upstream project might include a notification to
+Linux distributions or other important downstream projects in advance to
+discuss coordinated disclosure. These mailing lists are normally non-public.
+
+When the upstream project releases a version with the fix, they are responsible
+for contacting `Mitre <https://www.cve.org/>`__ to get a CVE number assigned and
+the CVE record published.
+
+If an upstream project does not respond quickly
+-----------------------------------------------
+
+If an upstream project does not fix the problem in a reasonable time,
+the Yocto's Security Team will contact other interested parties (usually
+other distributions) in the community and together try to solve the
+vulnerability as quickly as possible.
+
+The Yocto Project Security team adheres to the 90 days disclosure policy
+by default. An increase of the embargo time is possible when necessary.
+
+Current Security Team members
+-----------------------------
+
+For secure communications, please send your messages encrypted using the GPG
+keys. Remember, message headers are not encrypted so do not include sensitive
+information in the subject line.
+
+ - Ross Burton: <ross@burtonini.com> `Public key <https://keys.openpgp.org/search?q=ross%40burtonini.com>`__
+
+ - Michael Halstead: <mhalstead [at] linuxfoundation [dot] org>
+ `Public key <https://pgp.mit.edu/pks/lookup?op=vindex&search=0x3373170601861969>`__
+ or `Public key <https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xd1f2407285e571ed12a407a73373170601861969>`__
+
+ - Richard Purdie: <richard.purdie@linuxfoundation.org> `Public key <https://keys.openpgp.org/search?q=richard.purdie%40linuxfoundation.org>`__
+
+ - Marta Rybczynska: <marta DOT rybczynska [at] syslinbit [dot] com> `Public key <https://keys.openpgp.org/search?q=marta.rybczynska@syslinbit.com>`__
+
+ - Steve Sakoman: <steve [at] sakoman [dot] com> `Public key <https://keys.openpgp.org/search?q=steve%40sakoman.com>`__
diff --git a/documentation/dev-manual/speeding-up-build.rst b/documentation/dev-manual/speeding-up-build.rst
new file mode 100644
index 0000000000..6e0d7873ac
--- /dev/null
+++ b/documentation/dev-manual/speeding-up-build.rst
@@ -0,0 +1,109 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Speeding Up a Build
+*******************
+
+Build time can be an issue. By default, the build system uses simple
+controls to try and maximize build efficiency. In general, the default
+settings for all the following variables result in the most efficient
+build times when dealing with single socket systems (i.e. a single CPU).
+If you have multiple CPUs, you might try increasing the default values
+to gain more speed. See the descriptions in the glossary for each
+variable for more information:
+
+- :term:`BB_NUMBER_THREADS`:
+ The maximum number of threads BitBake simultaneously executes.
+
+- :term:`BB_NUMBER_PARSE_THREADS`:
+ The number of threads BitBake uses during parsing.
+
+- :term:`PARALLEL_MAKE`: Extra
+ options passed to the ``make`` command during the
+ :ref:`ref-tasks-compile` task in
+ order to specify parallel compilation on the local build host.
+
+- :term:`PARALLEL_MAKEINST`:
+ Extra options passed to the ``make`` command during the
+ :ref:`ref-tasks-install` task in
+ order to specify parallel installation on the local build host.
+
+As mentioned, these variables all scale to the number of processor cores
+available on the build system. For single socket systems, this
+auto-scaling ensures that the build system fundamentally takes advantage
+of potential parallel operations during the build based on the build
+machine's capabilities.
+
+Additional factors that can affect build speed are:
+
+- File system type: The file system type that the build is being
+ performed on can also influence performance. Using ``ext4`` is
+ recommended as compared to ``ext2`` and ``ext3`` due to ``ext4``
+ improved features such as extents.
+
+- Disabling the updating of access time using ``noatime``: The
+ ``noatime`` mount option prevents the build system from updating file
+ and directory access times.
+
+- Setting a longer commit: Using the "commit=" mount option increases
+ the interval in seconds between disk cache writes. Changing this
+ interval from the five second default to something longer increases
+ the risk of data loss but decreases the need to write to the disk,
+ thus increasing the build performance.
+
+- Choosing the packaging backend: Of the available packaging backends,
+ IPK is the fastest. Additionally, selecting a singular packaging
+ backend also helps.
+
+- Using ``tmpfs`` for :term:`TMPDIR`
+ as a temporary file system: While this can help speed up the build,
+ the benefits are limited due to the compiler using ``-pipe``. The
+ build system goes to some lengths to avoid ``sync()`` calls into the
+ file system on the principle that if there was a significant failure,
+ the :term:`Build Directory` contents could easily be rebuilt.
+
+- Inheriting the :ref:`ref-classes-rm-work` class:
+ Inheriting this class has shown to speed up builds due to
+ significantly lower amounts of data stored in the data cache as well
+ as on disk. Inheriting this class also makes cleanup of
+ :term:`TMPDIR` faster, at the
+ expense of being easily able to dive into the source code. File
+ system maintainers have recommended that the fastest way to clean up
+ large numbers of files is to reformat partitions rather than delete
+ files due to the linear nature of partitions. This, of course,
+ assumes you structure the disk partitions and file systems in a way
+ that this is practical.
+
+Aside from the previous list, you should keep some trade offs in mind
+that can help you speed up the build:
+
+- Remove items from
+ :term:`DISTRO_FEATURES`
+ that you might not need.
+
+- Exclude debug symbols and other debug information: If you do not need
+ these symbols and other debug information, disabling the ``*-dbg``
+ package generation can speed up the build. You can disable this
+ generation by setting the
+ :term:`INHIBIT_PACKAGE_DEBUG_SPLIT`
+ variable to "1".
+
+- Disable static library generation for recipes derived from
+ ``autoconf`` or ``libtool``: Here is an example showing how to
+ disable static libraries and still provide an override to handle
+ exceptions::
+
+ STATICLIBCONF = "--disable-static"
+ STATICLIBCONF:sqlite3-native = ""
+ EXTRA_OECONF += "${STATICLIBCONF}"
+
+ .. note::
+
+ - Some recipes need static libraries in order to work correctly
+ (e.g. ``pseudo-native`` needs ``sqlite3-native``). Overrides,
+ as in the previous example, account for these kinds of
+ exceptions.
+
+ - Some packages have packaging code that assumes the presence of
+ the static libraries. If so, you might need to exclude them as
+ well.
+
diff --git a/documentation/dev-manual/start.rst b/documentation/dev-manual/start.rst
index 8cf3ebe316..386e5f5d29 100644
--- a/documentation/dev-manual/start.rst
+++ b/documentation/dev-manual/start.rst
@@ -29,14 +29,14 @@ however, keep in mind, the procedure here is simply a starting point.
You can build off these steps and customize the procedure to fit any
particular working environment and set of practices.
-1. *Determine Who is Going to be Developing:* You first need to
+#. *Determine Who is Going to be Developing:* You first need to
understand who is going to be doing anything related to the Yocto
Project and determine their roles. Making this determination is
essential to completing subsequent steps, which are to get your
equipment together and set up your development environment's
hardware topology.
- Here are possible roles:
+ Possible roles are:
- *Application Developer:* This type of developer does application
level work on top of an existing software stack.
@@ -52,7 +52,7 @@ particular working environment and set of practices.
automated tests that are used to ensure all application and core
system development meets desired quality standards.
-2. *Gather the Hardware:* Based on the size and make-up of the team,
+#. *Gather the Hardware:* Based on the size and make-up of the team,
get the hardware together. Ideally, any development, build, or test
engineer uses a system that runs a supported Linux distribution.
These systems, in general, should be high performance (e.g. dual,
@@ -66,13 +66,13 @@ particular working environment and set of practices.
building Yocto Project development containers to be run under
Docker, which is described later.
-3. *Understand the Hardware Topology of the Environment:* Once you
+#. *Understand the Hardware Topology of the Environment:* Once you
understand the hardware involved and the make-up of the team, you
can understand the hardware topology of the development environment.
You can get a visual idea of the machines and their roles across the
development environment.
-4. *Use Git as Your Source Control Manager (SCM):* Keeping your
+#. *Use Git as Your Source Control Manager (SCM):* Keeping your
:term:`Metadata` (i.e. recipes,
configuration files, classes, and so forth) and any software you are
developing under the control of an SCM system that is compatible
@@ -88,30 +88,18 @@ particular working environment and set of practices.
For information about BitBake, see the
:doc:`bitbake:index`.
- It is relatively easy to set up Git services and create
- infrastructure like :yocto_git:`/`, which is based on
- server software called ``gitolite`` with ``cgit`` being used to
- generate the web interface that lets you view the repositories. The
- ``gitolite`` software identifies users using SSH keys and allows
+ It is relatively easy to set up Git services and create infrastructure like
+ :yocto_git:`/`, which is based on server software called
+ `Gitolite <https://gitolite.com>`__
+ with `cgit <https://git.zx2c4.com/cgit/about/>`__ being used to
+ generate the web interface that lets you view the repositories.
+ ``gitolite`` identifies users using SSH keys and allows
branch-based access controls to repositories that you can control as
little or as much as necessary.
- .. note::
-
- The setup of these services is beyond the scope of this manual.
- However, here are sites describing how to perform setup:
-
- - `Gitolite <https://gitolite.com>`__: Information for
- ``gitolite``.
-
- - `Interfaces, frontends, and
- tools <https://git.wiki.kernel.org/index.php/Interfaces,_frontends,_and_tools>`__:
- Documentation on how to create interfaces and frontends for
- Git.
-
-5. *Set up the Application Development Machines:* As mentioned earlier,
+#. *Set up the Application Development Machines:* As mentioned earlier,
application developers are creating applications on top of existing
- software stacks. Following are some best practices for setting up
+ software stacks. Here are some best practices for setting up
machines used for application development:
- Use a pre-built toolchain that contains the software stack
@@ -128,9 +116,9 @@ particular working environment and set of practices.
- Use multiple toolchains installed locally into different
locations to allow development across versions.
-6. *Set up the Core Development Machines:* As mentioned earlier, core
+#. *Set up the Core Development Machines:* As mentioned earlier, core
developers work on the contents of the operating system itself.
- Following are some best practices for setting up machines used for
+ Here are some best practices for setting up machines used for
developing images:
- Have the :term:`OpenEmbedded Build System` available on
@@ -145,7 +133,7 @@ particular working environment and set of practices.
- Share layers amongst the developers of a particular project and
contain the policy configuration that defines the project.
-7. *Set up an Autobuilder:* Autobuilders are often the core of the
+#. *Set up an Autobuilder:* Autobuilders are often the core of the
development environment. It is here that changes from individual
developers are brought together and centrally tested. Based on this
automated build and test environment, subsequent decisions about
@@ -183,12 +171,12 @@ particular working environment and set of practices.
- Allows scheduling of builds so that resources can be used
efficiently.
-8. *Set up Test Machines:* Use a small number of shared, high
+#. *Set up Test Machines:* Use a small number of shared, high
performance systems for testing purposes. Developers can use these
systems for wider, more extensive testing while they continue to
develop locally using their primary development system.
-9. *Document Policies and Change Flow:* The Yocto Project uses a
+#. *Document Policies and Change Flow:* The Yocto Project uses a
hierarchical structure and a pull model. There are scripts to create and
send pull requests (i.e. ``create-pull-request`` and
``send-pull-request``). This model is in line with other open source
@@ -213,7 +201,7 @@ particular working environment and set of practices.
possible. Chances are if you have discovered the need for changes,
someone else in the community needs them also.
-10. *Development Environment Summary:* Aside from the previous steps,
+#. *Development Environment Summary:* Aside from the previous steps,
here are best practices within the Yocto Project development
environment:
@@ -223,7 +211,7 @@ particular working environment and set of practices.
- Maintain your Metadata in layers that make sense for your
situation. See the ":ref:`overview-manual/yp-intro:the yocto project layer model`"
section in the Yocto Project Overview and Concepts Manual and the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ ":ref:`dev-manual/layers:understanding and creating layers`"
section for more information on layers.
- Separate the project's Metadata and code by using separate Git
@@ -246,14 +234,13 @@ particular working environment and set of practices.
- The Yocto Project community encourages you to send patches to the
project to fix bugs or add features. If you do submit patches,
follow the project commit guidelines for writing good commit
- messages. See the
- ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
- section.
+ messages. See the ":doc:`../contributor-guide/submit-changes`"
+ section in the Yocto Project and OpenEmbedded Contributor Guide.
- Send changes to the core sooner than later as others are likely
to run into the same issues. For some guidance on mailing lists
- to use, see the list in the
- ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
+ to use, see the lists in the
+ ":ref:`contributor-guide/submit-changes:finding a suitable mailing list`"
section. For a description
of the available mailing lists, see the ":ref:`resources-mailinglist`" section in
the Yocto Project Reference Manual.
@@ -267,16 +254,16 @@ development using the Yocto Project. Your build host can be a native
Linux machine (recommended), it can be a machine (Linux, Mac, or
Windows) that uses `CROPS <https://github.com/crops/poky-container>`__,
which leverages `Docker Containers <https://www.docker.com/>`__ or it
-can be a Windows machine capable of running Windows Subsystem For Linux
-v2 (WSL).
+can be a Windows machine capable of running version 2 of Windows Subsystem
+For Linux (WSL 2).
.. note::
- The Yocto Project is not compatible with
- `Windows Subsystem for Linux v1 <https://en.wikipedia.org/wiki/Windows_Subsystem_for_Linux>`__.
- It is compatible but not officially supported nor validated with
- WSLv2. If you still decide to use WSL please upgrade to
- `WSLv2 <https://docs.microsoft.com/en-us/windows/wsl/install-win10>`__.
+ The Yocto Project is not compatible with version 1 of
+ :wikipedia:`Windows Subsystem for Linux <Windows_Subsystem_for_Linux>`.
+ It is compatible but neither officially supported nor validated with
+ WSL 2. If you still decide to use WSL please upgrade to
+ `WSL 2 <https://learn.microsoft.com/en-us/windows/wsl/install>`__.
Once your build host is set up to use the Yocto Project, further steps
are necessary depending on what you want to accomplish. See the
@@ -296,22 +283,22 @@ Setting Up a Native Linux Host
Follow these steps to prepare a native Linux machine as your Yocto
Project Build Host:
-1. *Use a Supported Linux Distribution:* You should have a reasonably
+#. *Use a Supported Linux Distribution:* You should have a reasonably
current Linux-based host system. You will have the best results with
a recent release of Fedora, openSUSE, Debian, Ubuntu, RHEL or CentOS
as these releases are frequently tested against the Yocto Project and
officially supported. For a list of the distributions under
validation and their status, see the ":ref:`Supported Linux
- Distributions <detailed-supported-distros>`"
+ Distributions <system-requirements-supported-distros>`"
section in the Yocto Project Reference Manual and the wiki page at
:yocto_wiki:`Distribution Support </Distribution_Support>`.
-2. *Have Enough Free Memory:* Your system should have at least 50 Gbytes
+#. *Have Enough Free Memory:* Your system should have at least 50 Gbytes
of free disk space for building images.
-3. *Meet Minimal Version Requirements:* The OpenEmbedded build system
+#. *Meet Minimal Version Requirements:* The OpenEmbedded build system
should be able to run on any modern distribution that has the
- following versions for Git, tar, Python and gcc.
+ following versions for Git, tar, Python, gcc and make.
- Git &MIN_GIT_VERSION; or greater
@@ -321,13 +308,15 @@ Project Build Host:
- gcc &MIN_GCC_VERSION; or greater.
+ - GNU make &MIN_MAKE_VERSION; or greater
+
If your build host does not meet any of these listed version
requirements, you can take steps to prepare the system so that you
can still use the Yocto Project. See the
- ":ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`"
+ ":ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`"
section in the Yocto Project Reference Manual for information.
-4. *Install Development Host Packages:* Required development host
+#. *Install Development Host Packages:* Required development host
packages vary depending on your build host and what you want to do
with the Yocto Project. Collectively, the number of required packages
is large if you want to be able to cover all cases.
@@ -345,7 +334,10 @@ to use the Extensible SDK, see the ":doc:`/sdk-manual/extensible`" Chapter in th
Project Application Development and the Extensible Software Development
Kit (eSDK) manual. If you want to work on the kernel, see the :doc:`/kernel-dev/index`. If you are going to use
Toaster, see the ":doc:`/toaster-manual/setup-and-use`"
-section in the Toaster User Manual.
+section in the Toaster User Manual. If you are a VSCode user, you can configure
+the `Yocto Project BitBake
+<https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+extension accordingly.
Setting Up to Use CROss PlatformS (CROPS)
-----------------------------------------
@@ -359,7 +351,7 @@ Yocto Project on a Windows, Mac, or Linux machine.
Follow these general steps to prepare a Windows, Mac, or Linux machine
as your Yocto Project build host:
-1. *Determine What Your Build Host Needs:*
+#. *Determine What Your Build Host Needs:*
`Docker <https://www.docker.com/what-docker>`__ is a software
container platform that you need to install on the build host.
Depending on your build host, you might have to install different
@@ -368,20 +360,20 @@ as your Yocto Project build host:
Platforms <https://docs.docker.com/engine/install/#supported-platforms>`__"
your build host needs to run containers.
-2. *Choose What To Install:* Depending on whether or not your build host
+#. *Choose What To Install:* Depending on whether or not your build host
meets system requirements, you need to install "Docker CE Stable" or
the "Docker Toolbox". Most situations call for Docker CE. However, if
you have a build host that does not meet requirements (e.g.
Pre-Windows 10 or Windows 10 "Home" version), you must install Docker
Toolbox instead.
-3. *Go to the Install Site for Your Platform:* Click the link for the
+#. *Go to the Install Site for Your Platform:* Click the link for the
Docker edition associated with your build host's native software. For
example, if your build host is running Microsoft Windows Version 10
and you want the Docker CE Stable edition, click that link under
"Supported Platforms".
-4. *Install the Software:* Once you have understood all the
+#. *Install the Software:* Once you have understood all the
pre-requisites, you can download and install the appropriate
software. Follow the instructions for your specific machine and the
type of the software you need to install:
@@ -410,15 +402,15 @@ as your Yocto Project build host:
Ubuntu <https://docs.docker.com/engine/install/ubuntu/>`__
for Linux build hosts running the Ubuntu distribution.
-5. *Optionally Orient Yourself With Docker:* If you are unfamiliar with
+#. *Optionally Orient Yourself With Docker:* If you are unfamiliar with
Docker and the container concept, you can learn more here -
https://docs.docker.com/get-started/.
-6. *Launch Docker or Docker Toolbox:* You should be able to launch
+#. *Launch Docker or Docker Toolbox:* You should be able to launch
Docker or the Docker Toolbox and have a terminal shell on your
development host.
-7. *Set Up the Containers to Use the Yocto Project:* Go to
+#. *Set Up the Containers to Use the Yocto Project:* Go to
https://github.com/crops/docker-win-mac-docs/wiki and follow
the directions for your particular build host (i.e. Linux, Mac, or
Windows).
@@ -437,37 +429,41 @@ section. If you are going to use the Extensible SDK container, see the
Project Application Development and the Extensible Software Development
Kit (eSDK) manual. If you are going to use the Toaster container, see
the ":doc:`/toaster-manual/setup-and-use`"
-section in the Toaster User Manual.
+section in the Toaster User Manual. If you are a VSCode user, you can configure
+the `Yocto Project BitBake
+<https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+extension accordingly.
-Setting Up to Use Windows Subsystem For Linux (WSLv2)
+Setting Up to Use Windows Subsystem For Linux (WSL 2)
-----------------------------------------------------
-With `Windows Subsystem for Linux
-(WSLv2) <https://docs.microsoft.com/en-us/windows/wsl/wsl2-about>`__,
+With `Windows Subsystem for Linux (WSL 2)
+<https://learn.microsoft.com/en-us/windows/wsl/>`__,
you can create a Yocto Project development environment that allows you
to build on Windows. You can set up a Linux distribution inside Windows
in which you can develop using the Yocto Project.
-Follow these general steps to prepare a Windows machine using WSLv2 as
+Follow these general steps to prepare a Windows machine using WSL 2 as
your Yocto Project build host:
-1. *Make sure your Windows 10 machine is capable of running WSLv2:*
- WSLv2 is only available for Windows 10 builds > 18917. To check which
- build version you are running, you may open a command prompt on
- Windows and execute the command "ver".
- ::
+#. *Make sure your Windows machine is capable of running WSL 2:*
+
+ While all Windows 11 and Windows Server 2022 builds support WSL 2,
+ the first versions of Windows 10 and Windows Server 2019 didn't.
+ Check the minimum build numbers for `Windows 10
+ <https://learn.microsoft.com/en-us/windows/wsl/install-manual#step-2---check-requirements-for-running-wsl-2>`__
+ and for `Windows Server 2019
+ <https://learn.microsoft.com/en-us/windows/wsl/install-on-server>`__.
+
+ To check which build version you are running, you may open a command
+ prompt on Windows and execute the command "ver"::
C:\Users\myuser> ver
Microsoft Windows [Version 10.0.19041.153]
- If your build is capable of running
- WSLv2 you may continue, for more information on this subject or
- instructions on how to upgrade to WSLv2 visit `Windows 10
- WSLv2 <https://docs.microsoft.com/en-us/windows/wsl/wsl2-install>`__
-
-2. *Install the Linux distribution of your choice inside Windows 10:*
- Once you know your version of Windows 10 supports WSLv2, you can
+#. *Install the Linux distribution of your choice inside WSL 2:*
+ Once you know your version of Windows supports WSL 2, you can
install the distribution of your choice from the Microsoft Store.
Open the Microsoft Store and search for Linux. While there are
several Linux distributions available, the assumption is that your
@@ -476,31 +472,28 @@ your Yocto Project build host:
making your selection, simply click "Get" to download and install the
distribution.
-3. *Check your Linux distribution is using WSLv2:* Open a Windows
+#. *Check which Linux distribution WSL 2 is using:* Open a Windows
PowerShell and run::
C:\WINDOWS\system32> wsl -l -v
NAME STATE VERSION
*Ubuntu Running 2
- Note the version column which says the WSL version
- being used by your distribution, on compatible systems, this can be
- changed back at any point in time.
+ Note that WSL 2 supports running as many different Linux distributions
+ as you want to install.
-4. *Optionally Orient Yourself on WSL:* If you are unfamiliar with WSL,
- you can learn more here -
+#. *Optionally Get Familiar with WSL:* You can learn more on
https://docs.microsoft.com/en-us/windows/wsl/wsl2-about.
-5. *Launch your WSL Distibution:* From the Windows start menu simply
+#. *Launch your WSL Distibution:* From the Windows start menu simply
launch your WSL distribution just like any other application.
-6. *Optimize your WSLv2 storage often:* Due to the way storage is
- handled on WSLv2, the storage space used by the undelying Linux
- distribution is not reflected immedately, and since BitBake heavily
+#. *Optimize your WSL 2 storage often:* Due to the way storage is
+ handled on WSL 2, the storage space used by the underlying Linux
+ distribution is not reflected immediately, and since BitBake heavily
uses storage, after several builds, you may be unaware you are
- running out of space. WSLv2 uses a VHDX file for storage, this issue
- can be easily avoided by manually optimizing this file often, this
- can be done in the following way:
+ running out of space. As WSL 2 uses a VHDX file for storage, this issue
+ can be easily avoided by regularly optimizing this file in a manual way:
1. *Find the location of your VHDX file:*
@@ -554,20 +547,23 @@ your Yocto Project build host:
.. note::
- The current implementation of WSLv2 does not have out-of-the-box
+ The current implementation of WSL 2 does not have out-of-the-box
access to external devices such as those connected through a USB
port, but it automatically mounts your ``C:`` drive on ``/mnt/c/``
(and others), which you can use to share deploy artifacts to be later
- flashed on hardware through Windows, but your build directory should
- not reside inside this mountpoint.
+ flashed on hardware through Windows, but your :term:`Build Directory`
+ should not reside inside this mountpoint.
-Once you have WSLv2 set up, everything is in place to develop just as if
+Once you have WSL 2 set up, everything is in place to develop just as if
you were running on a native Linux machine. If you are going to use the
Extensible SDK container, see the ":doc:`/sdk-manual/extensible`" Chapter in the Yocto
Project Application Development and the Extensible Software Development
Kit (eSDK) manual. If you are going to use the Toaster container, see
the ":doc:`/toaster-manual/setup-and-use`"
-section in the Toaster User Manual.
+section in the Toaster User Manual. If you are a VSCode user, you can configure
+the `Yocto Project BitBake
+<https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+extension accordingly.
Locating Yocto Project Source Files
===================================
@@ -597,14 +593,14 @@ repository at :yocto_git:`/poky`.
Use the following procedure to locate the latest upstream copy of the
``poky`` Git repository:
-1. *Access Repositories:* Open a browser and go to
+#. *Access Repositories:* Open a browser and go to
:yocto_git:`/` to access the GUI-based interface into the
Yocto Project source repositories.
-2. *Select the Repository:* Click on the repository in which you are
+#. *Select the Repository:* Click on the repository in which you are
interested (e.g. ``poky``).
-3. *Find the URL Used to Clone the Repository:* At the bottom of the
+#. *Find the URL Used to Clone the Repository:* At the bottom of the
page, note the URL used to clone that repository
(e.g. :yocto_git:`/poky`).
@@ -613,63 +609,43 @@ Use the following procedure to locate the latest upstream copy of the
For information on cloning a repository, see the
":ref:`dev-manual/start:cloning the \`\`poky\`\` repository`" section.
-Accessing Index of Releases
----------------------------
+Accessing Source Archives
+-------------------------
+
+The Yocto Project also provides source archives of its releases, which
+are available on :yocto_dl:`/releases/yocto/`. Then, choose the subdirectory
+containing the release you wish to use, for example
+:yocto_dl:`yocto-&DISTRO; </releases/yocto/yocto-&DISTRO;/>`.
-Yocto Project maintains an Index of Releases area that contains related
-files that contribute to the Yocto Project. Rather than Git
-repositories, these files are tarballs that represent snapshots in time
-of a given component.
+You will find there source archives of individual components (if you wish
+to use them individually), and of the corresponding Poky release bundling
+a selection of these components.
.. note::
The recommended method for accessing Yocto Project components is to
use Git to clone the upstream repository and work from within that
- locally cloned repository. However, this section documents how to
- use a tarball snapshot of any given component.
-
-Follow these steps to locate and download a particular tarball:
-
-1. *Access the Index of Releases:* Open a browser and go to
- :yocto_dl:`Index of Releases </releases>`. The
- list represents released components (e.g. ``bitbake``, ``sato``, and
- so on).
-
- .. note::
-
- The ``yocto`` directory contains the full array of released Poky
- tarballs. The ``poky`` directory in the Index of Releases was
- historically used for very early releases and exists now only for
- retroactive completeness.
-
-2. *Select a Component:* Click on any released component in which you
- are interested (e.g. ``yocto``).
-
-3. *Find the Tarball:* Drill down to find the associated tarball. For
- example, click on ``yocto-&DISTRO;`` to view files associated with the
- Yocto Project &DISTRO; release.
-
-4. *Download the Tarball:* Click the tarball to download and save a
- snapshot of the given component.
+ locally cloned repository.
Using the Downloads Page
------------------------
-The :yocto_home:`Yocto Project Website <>` uses a "DOWNLOADS" page
+The :yocto_home:`Yocto Project Website <>` uses a "RELEASES" page
from which you can locate and download tarballs of any Yocto Project
release. Rather than Git repositories, these files represent snapshot
tarballs similar to the tarballs located in the Index of Releases
-described in the ":ref:`dev-manual/start:accessing index of releases`" section.
+described in the ":ref:`dev-manual/start:accessing source archives`" section.
-1. *Go to the Yocto Project Website:* Open The
+#. *Go to the Yocto Project Website:* Open The
:yocto_home:`Yocto Project Website <>` in your browser.
-2. *Get to the Downloads Area:* Select the "DOWNLOADS" item from the
- pull-down "SOFTWARE" tab menu near the top of the page.
+#. *Get to the Downloads Area:* Select the "RELEASES" item from the
+ pull-down "DEVELOPMENT" tab menu near the top of the page.
-3. *Select a Yocto Project Release:* Use the menu next to "RELEASE" to
- display and choose a recent or past supported Yocto Project release
- (e.g. &DISTRO_NAME_NO_CAP;, &DISTRO_NAME_NO_CAP_MINUS_ONE;, and so forth).
+#. *Select a Yocto Project Release:* On the top of the "RELEASE" page currently
+ supported releases are displayed, further down past supported Yocto Project
+ releases are visible. The "Download" links in the rows of the table there
+ will lead to the download tarballs for the release.
.. note::
@@ -679,9 +655,9 @@ described in the ":ref:`dev-manual/start:accessing index of releases`" section.
You can use the "RELEASE ARCHIVE" link to reveal a menu of all Yocto
Project releases.
-4. *Download Tools or Board Support Packages (BSPs):* From the
- "DOWNLOADS" page, you can download tools or BSPs as well. Just scroll
- down the page and look for what you need.
+#. *Download Tools or Board Support Packages (BSPs):* Next to the tarballs you
+ will find download tools or BSPs as well. Just select a Yocto Project
+ release and look for what you need.
Cloning and Checking Out Branches
=================================
@@ -707,10 +683,10 @@ Cloning the ``poky`` Repository
Follow these steps to create a local version of the upstream
:term:`Poky` Git repository.
-1. *Set Your Directory:* Change your working directory to where you want
+#. *Set Your Directory:* Change your working directory to where you want
to create your local copy of ``poky``.
-2. *Clone the Repository:* The following example command clones the
+#. *Clone the Repository:* The following example command clones the
``poky`` repository and uses the default name "poky" for your local
repository::
@@ -766,13 +742,13 @@ and then specifically check out that development branch.
Further development on top of the branch that occurs after check it
out can occur.
-1. *Switch to the Poky Directory:* If you have a local poky Git
+#. *Switch to the Poky Directory:* If you have a local poky Git
repository, switch to that directory. If you do not have the local
copy of poky, see the
":ref:`dev-manual/start:cloning the \`\`poky\`\` repository`"
section.
-2. *Determine Existing Branch Names:*
+#. *Determine Existing Branch Names:*
::
$ git branch -a
@@ -793,7 +769,7 @@ and then specifically check out that development branch.
remotes/origin/zeus-next
... and so on ...
-3. *Check out the Branch:* Check out the development branch in which you
+#. *Check out the Branch:* Check out the development branch in which you
want to work. For example, to access the files for the Yocto Project
&DISTRO; Release (&DISTRO_NAME;), use the following command::
@@ -827,19 +803,19 @@ similar to checking out by branch name except you use tag names.
Checking out a branch based on a tag gives you a stable set of files
not affected by development on the branch above the tag.
-1. *Switch to the Poky Directory:* If you have a local poky Git
+#. *Switch to the Poky Directory:* If you have a local poky Git
repository, switch to that directory. If you do not have the local
copy of poky, see the
":ref:`dev-manual/start:cloning the \`\`poky\`\` repository`"
section.
-2. *Fetch the Tag Names:* To checkout the branch based on a tag name,
+#. *Fetch the Tag Names:* To checkout the branch based on a tag name,
you need to fetch the upstream tags into your local repository::
$ git fetch --tags
$
-3. *List the Tag Names:* You can list the tag names now::
+#. *List the Tag Names:* You can list the tag names now::
$ git tag
1.1_M1.final
@@ -861,7 +837,7 @@ similar to checking out by branch name except you use tag names.
yocto_1.5_M5.rc8
-4. *Check out the Branch:*
+#. *Check out the Branch:*
::
$ git checkout tags/yocto-&DISTRO; -b my_yocto_&DISTRO;
diff --git a/documentation/dev-manual/temporary-source-code.rst b/documentation/dev-manual/temporary-source-code.rst
new file mode 100644
index 0000000000..08bf68d982
--- /dev/null
+++ b/documentation/dev-manual/temporary-source-code.rst
@@ -0,0 +1,66 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Finding Temporary Source Code
+*****************************
+
+You might find it helpful during development to modify the temporary
+source code used by recipes to build packages. For example, suppose you
+are developing a patch and you need to experiment a bit to figure out
+your solution. After you have initially built the package, you can
+iteratively tweak the source code, which is located in the
+:term:`Build Directory`, and then you can force a re-compile and quickly
+test your altered code. Once you settle on a solution, you can then preserve
+your changes in the form of patches.
+
+During a build, the unpacked temporary source code used by recipes to
+build packages is available in the :term:`Build Directory` as defined by the
+:term:`S` variable. Below is the default value for the :term:`S` variable as
+defined in the ``meta/conf/bitbake.conf`` configuration file in the
+:term:`Source Directory`::
+
+ S = "${WORKDIR}/${BP}"
+
+You should be aware that many recipes override the
+:term:`S` variable. For example, recipes that fetch their source from Git
+usually set :term:`S` to ``${WORKDIR}/git``.
+
+.. note::
+
+ The :term:`BP` represents the base recipe name, which consists of the name
+ and version::
+
+ BP = "${BPN}-${PV}"
+
+
+The path to the work directory for the recipe
+(:term:`WORKDIR`) is defined as
+follows::
+
+ ${TMPDIR}/work/${MULTIMACH_TARGET_SYS}/${PN}/${EXTENDPE}${PV}-${PR}
+
+The actual directory depends on several things:
+
+- :term:`TMPDIR`: The top-level build
+ output directory.
+
+- :term:`MULTIMACH_TARGET_SYS`:
+ The target system identifier.
+
+- :term:`PN`: The recipe name.
+
+- :term:`EXTENDPE`: The epoch --- if
+ :term:`PE` is not specified, which is
+ usually the case for most recipes, then :term:`EXTENDPE` is blank.
+
+- :term:`PV`: The recipe version.
+
+- :term:`PR`: The recipe revision.
+
+As an example, assume a Source Directory top-level folder named
+``poky``, a default :term:`Build Directory` at ``poky/build``, and a
+``qemux86-poky-linux`` machine target system. Furthermore, suppose your
+recipe is named ``foo_1.3.0.bb``. In this case, the work directory the
+build system uses to build the package would be as follows::
+
+ poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0
+
diff --git a/documentation/dev-manual/upgrading-recipes.rst b/documentation/dev-manual/upgrading-recipes.rst
new file mode 100644
index 0000000000..4fac78bdfb
--- /dev/null
+++ b/documentation/dev-manual/upgrading-recipes.rst
@@ -0,0 +1,397 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Upgrading Recipes
+*****************
+
+Over time, upstream developers publish new versions for software built
+by layer recipes. It is recommended to keep recipes up-to-date with
+upstream version releases.
+
+While there are several methods to upgrade a recipe, you might
+consider checking on the upgrade status of a recipe first. You can do so
+using the ``devtool check-upgrade-status`` command. See the
+":ref:`devtool-checking-on-the-upgrade-status-of-a-recipe`"
+section in the Yocto Project Reference Manual for more information.
+
+The remainder of this section describes three ways you can upgrade a
+recipe. You can use the Automated Upgrade Helper (AUH) to set up
+automatic version upgrades. Alternatively, you can use
+``devtool upgrade`` to set up semi-automatic version upgrades. Finally,
+you can manually upgrade a recipe by editing the recipe itself.
+
+Using the Auto Upgrade Helper (AUH)
+===================================
+
+The AUH utility works in conjunction with the OpenEmbedded build system
+in order to automatically generate upgrades for recipes based on new
+versions being published upstream. Use AUH when you want to create a
+service that performs the upgrades automatically and optionally sends
+you an email with the results.
+
+AUH allows you to update several recipes with a single use. You can also
+optionally perform build and integration tests using images with the
+results saved to your hard drive and emails of results optionally sent
+to recipe maintainers. Finally, AUH creates Git commits with appropriate
+commit messages in the layer's tree for the changes made to recipes.
+
+.. note::
+
+ In some conditions, you should not use AUH to upgrade recipes
+ and should instead use either ``devtool upgrade`` or upgrade your
+ recipes manually:
+
+ - When AUH cannot complete the upgrade sequence. This situation
+ usually results because custom patches carried by the recipe
+ cannot be automatically rebased to the new version. In this case,
+ ``devtool upgrade`` allows you to manually resolve conflicts.
+
+ - When for any reason you want fuller control over the upgrade
+ process. For example, when you want special arrangements for
+ testing.
+
+The following steps describe how to set up the AUH utility:
+
+#. *Be Sure the Development Host is Set Up:* You need to be sure that
+ your development host is set up to use the Yocto Project. For
+ information on how to set up your host, see the
+ ":ref:`dev-manual/start:Preparing the Build Host`" section.
+
+#. *Make Sure Git is Configured:* The AUH utility requires Git to be
+ configured because AUH uses Git to save upgrades. Thus, you must have
+ Git user and email configured. The following command shows your
+ configurations::
+
+ $ git config --list
+
+ If you do not have the user and
+ email configured, you can use the following commands to do so::
+
+ $ git config --global user.name some_name
+ $ git config --global user.email username@domain.com
+
+#. *Clone the AUH Repository:* To use AUH, you must clone the repository
+ onto your development host. The following command uses Git to create
+ a local copy of the repository on your system::
+
+ $ git clone git://git.yoctoproject.org/auto-upgrade-helper
+ Cloning into 'auto-upgrade-helper'... remote: Counting objects: 768, done.
+ remote: Compressing objects: 100% (300/300), done.
+ remote: Total 768 (delta 499), reused 703 (delta 434)
+ Receiving objects: 100% (768/768), 191.47 KiB | 98.00 KiB/s, done.
+ Resolving deltas: 100% (499/499), done.
+ Checking connectivity... done.
+
+ AUH is not part of the :term:`OpenEmbedded-Core (OE-Core)` or
+ :term:`Poky` repositories.
+
+#. *Create a Dedicated Build Directory:* Run the :ref:`structure-core-script`
+ script to create a fresh :term:`Build Directory` that you use exclusively
+ for running the AUH utility::
+
+ $ cd poky
+ $ source oe-init-build-env your_AUH_build_directory
+
+ Re-using an existing :term:`Build Directory` and its configurations is not
+ recommended as existing settings could cause AUH to fail or behave
+ undesirably.
+
+#. *Make Configurations in Your Local Configuration File:* Several
+ settings are needed in the ``local.conf`` file in the build
+ directory you just created for AUH. Make these following
+ configurations:
+
+ - If you want to enable :ref:`Build
+ History <dev-manual/build-quality:maintaining build output quality>`,
+ which is optional, you need the following lines in the
+ ``conf/local.conf`` file::
+
+ INHERIT =+ "buildhistory"
+ BUILDHISTORY_COMMIT = "1"
+
+ With this configuration and a successful
+ upgrade, a build history "diff" file appears in the
+ ``upgrade-helper/work/recipe/buildhistory-diff.txt`` file found in
+ your :term:`Build Directory`.
+
+ - If you want to enable testing through the :ref:`ref-classes-testimage`
+ class, which is optional, you need to have the following set in
+ your ``conf/local.conf`` file::
+
+ IMAGE_CLASSES += "testimage"
+
+ .. note::
+
+ If your distro does not enable by default ptest, which Poky
+ does, you need the following in your ``local.conf`` file::
+
+ DISTRO_FEATURES:append = " ptest"
+
+
+#. *Optionally Start a vncserver:* If you are running in a server
+ without an X11 session, you need to start a vncserver::
+
+ $ vncserver :1
+ $ export DISPLAY=:1
+
+#. *Create and Edit an AUH Configuration File:* You need to have the
+ ``upgrade-helper/upgrade-helper.conf`` configuration file in your
+ :term:`Build Directory`. You can find a sample configuration file in the
+ :yocto_git:`AUH source repository </auto-upgrade-helper/tree/>`.
+
+ Read through the sample file and make configurations as needed. For
+ example, if you enabled build history in your ``local.conf`` as
+ described earlier, you must enable it in ``upgrade-helper.conf``.
+
+ Also, if you are using the default ``maintainers.inc`` file supplied
+ with Poky and located in ``meta-yocto`` and you do not set a
+ "maintainers_whitelist" or "global_maintainer_override" in the
+ ``upgrade-helper.conf`` configuration, and you specify "-e all" on
+ the AUH command-line, the utility automatically sends out emails to
+ all the default maintainers. Please avoid this.
+
+This next set of examples describes how to use the AUH:
+
+- *Upgrading a Specific Recipe:* To upgrade a specific recipe, use the
+ following form::
+
+ $ upgrade-helper.py recipe_name
+
+ For example, this command upgrades the ``xmodmap`` recipe::
+
+ $ upgrade-helper.py xmodmap
+
+- *Upgrading a Specific Recipe to a Particular Version:* To upgrade a
+ specific recipe to a particular version, use the following form::
+
+ $ upgrade-helper.py recipe_name -t version
+
+ For example, this command upgrades the ``xmodmap`` recipe to version 1.2.3::
+
+ $ upgrade-helper.py xmodmap -t 1.2.3
+
+- *Upgrading all Recipes to the Latest Versions and Suppressing Email
+ Notifications:* To upgrade all recipes to their most recent versions
+ and suppress the email notifications, use the following command::
+
+ $ upgrade-helper.py all
+
+- *Upgrading all Recipes to the Latest Versions and Send Email
+ Notifications:* To upgrade all recipes to their most recent versions
+ and send email messages to maintainers for each attempted recipe as
+ well as a status email, use the following command::
+
+ $ upgrade-helper.py -e all
+
+Once you have run the AUH utility, you can find the results in the AUH
+:term:`Build Directory`::
+
+ ${BUILDDIR}/upgrade-helper/timestamp
+
+The AUH utility
+also creates recipe update commits from successful upgrade attempts in
+the layer tree.
+
+You can easily set up to run the AUH utility on a regular basis by using
+a cron job. See the
+:yocto_git:`weeklyjob.sh </auto-upgrade-helper/tree/weeklyjob.sh>`
+file distributed with the utility for an example.
+
+Using ``devtool upgrade``
+=========================
+
+As mentioned earlier, an alternative method for upgrading recipes to
+newer versions is to use
+:doc:`devtool upgrade </ref-manual/devtool-reference>`.
+You can read about ``devtool upgrade`` in general in the
+":ref:`sdk-manual/extensible:use \`\`devtool upgrade\`\` to create a version of the recipe that supports a newer version of the software`"
+section in the Yocto Project Application Development and the Extensible
+Software Development Kit (eSDK) Manual.
+
+To see all the command-line options available with ``devtool upgrade``,
+use the following help command::
+
+ $ devtool upgrade -h
+
+If you want to find out what version a recipe is currently at upstream
+without any attempt to upgrade your local version of the recipe, you can
+use the following command::
+
+ $ devtool latest-version recipe_name
+
+As mentioned in the previous section describing AUH, ``devtool upgrade``
+works in a less-automated manner than AUH. Specifically,
+``devtool upgrade`` only works on a single recipe that you name on the
+command line, cannot perform build and integration testing using images,
+and does not automatically generate commits for changes in the source
+tree. Despite all these "limitations", ``devtool upgrade`` updates the
+recipe file to the new upstream version and attempts to rebase custom
+patches contained by the recipe as needed.
+
+.. note::
+
+ AUH uses much of ``devtool upgrade`` behind the scenes making AUH somewhat
+ of a "wrapper" application for ``devtool upgrade``.
+
+A typical scenario involves having used Git to clone an upstream
+repository that you use during build operations. Because you have built the
+recipe in the past, the layer is likely added to your
+configuration already. If for some reason, the layer is not added, you
+could add it easily using the
+":ref:`bitbake-layers <bsp-guide/bsp:creating a new bsp layer using the \`\`bitbake-layers\`\` script>`"
+script. For example, suppose you use the ``nano.bb`` recipe from the
+``meta-oe`` layer in the ``meta-openembedded`` repository. For this
+example, assume that the layer has been cloned into following area::
+
+ /home/scottrif/meta-openembedded
+
+The following command from your :term:`Build Directory` adds the layer to
+your build configuration (i.e. ``${BUILDDIR}/conf/bblayers.conf``)::
+
+ $ bitbake-layers add-layer /home/scottrif/meta-openembedded/meta-oe
+ NOTE: Starting bitbake server...
+ Parsing recipes: 100% |##########################################| Time: 0:00:55
+ Parsing of 1431 .bb files complete (0 cached, 1431 parsed). 2040 targets, 56 skipped, 0 masked, 0 errors.
+ Removing 12 recipes from the x86_64 sysroot: 100% |##############| Time: 0:00:00
+ Removing 1 recipes from the x86_64_i586 sysroot: 100% |##########| Time: 0:00:00
+ Removing 5 recipes from the i586 sysroot: 100% |#################| Time: 0:00:00
+ Removing 5 recipes from the qemux86 sysroot: 100% |##############| Time: 0:00:00
+
+For this example, assume that the ``nano.bb`` recipe that
+is upstream has a 2.9.3 version number. However, the version in the
+local repository is 2.7.4. The following command from your build
+directory automatically upgrades the recipe for you::
+
+ $ devtool upgrade nano -V 2.9.3
+ NOTE: Starting bitbake server...
+ NOTE: Creating workspace layer in /home/scottrif/poky/build/workspace
+ Parsing recipes: 100% |##########################################| Time: 0:00:46
+ Parsing of 1431 .bb files complete (0 cached, 1431 parsed). 2040 targets, 56 skipped, 0 masked, 0 errors.
+ NOTE: Extracting current version source...
+ NOTE: Resolving any missing task queue dependencies
+ .
+ .
+ .
+ NOTE: Executing SetScene Tasks
+ NOTE: Executing RunQueue Tasks
+ NOTE: Tasks Summary: Attempted 74 tasks of which 72 didn't need to be rerun and all succeeded.
+ Adding changed files: 100% |#####################################| Time: 0:00:00
+ NOTE: Upgraded source extracted to /home/scottrif/poky/build/workspace/sources/nano
+ NOTE: New recipe is /home/scottrif/poky/build/workspace/recipes/nano/nano_2.9.3.bb
+
+.. note::
+
+ Using the ``-V`` option is not necessary. Omitting the version number causes
+ ``devtool upgrade`` to upgrade the recipe to the most recent version.
+
+Continuing with this example, you can use ``devtool build`` to build the
+newly upgraded recipe::
+
+ $ devtool build nano
+ NOTE: Starting bitbake server...
+ Loading cache: 100% |################################################################################################| Time: 0:00:01
+ Loaded 2040 entries from dependency cache.
+ Parsing recipes: 100% |##############################################################################################| Time: 0:00:00
+ Parsing of 1432 .bb files complete (1431 cached, 1 parsed). 2041 targets, 56 skipped, 0 masked, 0 errors.
+ NOTE: Resolving any missing task queue dependencies
+ .
+ .
+ .
+ NOTE: Executing SetScene Tasks
+ NOTE: Executing RunQueue Tasks
+ NOTE: nano: compiling from external source tree /home/scottrif/poky/build/workspace/sources/nano
+ NOTE: Tasks Summary: Attempted 520 tasks of which 304 didn't need to be rerun and all succeeded.
+
+Within the ``devtool upgrade`` workflow, you can
+deploy and test your rebuilt software. For this example,
+however, running ``devtool finish`` cleans up the workspace once the
+source in your workspace is clean. This usually means using Git to stage
+and submit commits for the changes generated by the upgrade process.
+
+Once the tree is clean, you can clean things up in this example with the
+following command from the ``${BUILDDIR}/workspace/sources/nano``
+directory::
+
+ $ devtool finish nano meta-oe
+ NOTE: Starting bitbake server...
+ Loading cache: 100% |################################################################################################| Time: 0:00:00
+ Loaded 2040 entries from dependency cache.
+ Parsing recipes: 100% |##############################################################################################| Time: 0:00:01
+ Parsing of 1432 .bb files complete (1431 cached, 1 parsed). 2041 targets, 56 skipped, 0 masked, 0 errors.
+ NOTE: Adding new patch 0001-nano.bb-Stuff-I-changed-when-upgrading-nano.bb.patch
+ NOTE: Updating recipe nano_2.9.3.bb
+ NOTE: Removing file /home/scottrif/meta-openembedded/meta-oe/recipes-support/nano/nano_2.7.4.bb
+ NOTE: Moving recipe file to /home/scottrif/meta-openembedded/meta-oe/recipes-support/nano
+ NOTE: Leaving source tree /home/scottrif/poky/build/workspace/sources/nano as-is; if you no longer need it then please delete it manually
+
+
+Using the ``devtool finish`` command cleans up the workspace and creates a patch
+file based on your commits. The tool puts all patch files back into the
+source directory in a sub-directory named ``nano`` in this case.
+
+Manually Upgrading a Recipe
+===========================
+
+If for some reason you choose not to upgrade recipes using
+:ref:`dev-manual/upgrading-recipes:Using the Auto Upgrade Helper (AUH)` or
+by :ref:`dev-manual/upgrading-recipes:Using \`\`devtool upgrade\`\``,
+you can manually edit the recipe files to upgrade the versions.
+
+.. note::
+
+ Manually updating multiple recipes scales poorly and involves many
+ steps. The recommendation to upgrade recipe versions is through AUH
+ or ``devtool upgrade``, both of which automate some steps and provide
+ guidance for others needed for the manual process.
+
+To manually upgrade recipe versions, follow these general steps:
+
+#. *Change the Version:* Rename the recipe such that the version (i.e.
+ the :term:`PV` part of the recipe name)
+ changes appropriately. If the version is not part of the recipe name,
+ change the value as it is set for :term:`PV` within the recipe itself.
+
+#. *Update* :term:`SRCREV` *if Needed*: If the source code your recipe builds
+ is fetched from Git or some other version control system, update
+ :term:`SRCREV` to point to the
+ commit hash that matches the new version.
+
+#. *Build the Software:* Try to build the recipe using BitBake. Typical
+ build failures include the following:
+
+ - License statements were updated for the new version. For this
+ case, you need to review any changes to the license and update the
+ values of :term:`LICENSE` and
+ :term:`LIC_FILES_CHKSUM`
+ as needed.
+
+ .. note::
+
+ License changes are often inconsequential. For example, the
+ license text's copyright year might have changed.
+
+ - Custom patches carried by the older version of the recipe might
+ fail to apply to the new version. For these cases, you need to
+ review the failures. Patches might not be necessary for the new
+ version of the software if the upgraded version has fixed those
+ issues. If a patch is necessary and failing, you need to rebase it
+ into the new version.
+
+#. *Optionally Attempt to Build for Several Architectures:* Once you
+ successfully build the new software for a given architecture, you
+ could test the build for other architectures by changing the
+ :term:`MACHINE` variable and
+ rebuilding the software. This optional step is especially important
+ if the recipe is to be released publicly.
+
+#. *Check the Upstream Change Log or Release Notes:* Checking both these
+ reveals if there are new features that could break
+ backwards-compatibility. If so, you need to take steps to mitigate or
+ eliminate that situation.
+
+#. *Optionally Create a Bootable Image and Test:* If you want, you can
+ test the new software by booting it onto actual hardware.
+
+#. *Create a Commit with the Change in the Layer Repository:* After all
+ builds work and any testing is successful, you can create commits for
+ any changes in the layer holding your upgraded recipe.
+
diff --git a/documentation/dev-manual/vulnerabilities.rst b/documentation/dev-manual/vulnerabilities.rst
new file mode 100644
index 0000000000..1bc2a85929
--- /dev/null
+++ b/documentation/dev-manual/vulnerabilities.rst
@@ -0,0 +1,293 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Checking for Vulnerabilities
+****************************
+
+Vulnerabilities in Poky and OE-Core
+===================================
+
+The Yocto Project has an infrastructure to track and address unfixed
+known security vulnerabilities, as tracked by the public
+:wikipedia:`Common Vulnerabilities and Exposures (CVE) <Common_Vulnerabilities_and_Exposures>`
+database.
+
+The Yocto Project maintains a `list of known vulnerabilities
+<https://autobuilder.yocto.io/pub/non-release/patchmetrics/>`__
+for packages in Poky and OE-Core, tracking the evolution of the number of
+unpatched CVEs and the status of patches. Such information is available for
+the current development version and for each supported release.
+
+Security is a process, not a product, and thus at any time, a number of security
+issues may be impacting Poky and OE-Core. It is up to the maintainers, users,
+contributors and anyone interested in the issues to investigate and possibly fix them by
+updating software components to newer versions or by applying patches to address them.
+It is recommended to work with Poky and OE-Core upstream maintainers and submit
+patches to fix them, see ":doc:`../contributor-guide/submit-changes`" for details.
+
+Vulnerability check at build time
+=================================
+
+To enable a check for CVE security vulnerabilities using
+:ref:`ref-classes-cve-check` in the specific image or target you are building,
+add the following setting to your configuration::
+
+ INHERIT += "cve-check"
+
+The CVE database contains some old incomplete entries which have been
+deemed not to impact Poky or OE-Core. These CVE entries can be excluded from the
+check using build configuration::
+
+ include conf/distro/include/cve-extra-exclusions.inc
+
+With this CVE check enabled, BitBake build will try to map each compiled software component
+recipe name and version information to the CVE database and generate recipe and
+image specific reports. These reports will contain:
+
+- metadata about the software component like names and versions
+
+- metadata about the CVE issue such as description and NVD link
+
+- for each software component, a list of CVEs which are possibly impacting this version
+
+- status of each CVE: ``Patched``, ``Unpatched`` or ``Ignored``
+
+The status ``Patched`` means that a patch file to address the security issue has been
+applied. ``Unpatched`` status means that no patches to address the issue have been
+applied and that the issue needs to be investigated. ``Ignored`` means that after
+analysis, it has been deemed to ignore the issue as it for example affects
+the software component on a different operating system platform.
+
+After a build with CVE check enabled, reports for each compiled source recipe will be
+found in ``build/tmp/deploy/cve``.
+
+For example the CVE check report for the ``flex-native`` recipe looks like::
+
+ $ cat poky/build/tmp/deploy/cve/flex-native
+ LAYER: meta
+ PACKAGE NAME: flex-native
+ PACKAGE VERSION: 2.6.4
+ CVE: CVE-2016-6354
+ CVE STATUS: Patched
+ CVE SUMMARY: Heap-based buffer overflow in the yy_get_next_buffer function in Flex before 2.6.1 might allow context-dependent attackers to cause a denial of service or possibly execute arbitrary code via vectors involving num_to_read.
+ CVSS v2 BASE SCORE: 7.5
+ CVSS v3 BASE SCORE: 9.8
+ VECTOR: NETWORK
+ MORE INFORMATION: https://nvd.nist.gov/vuln/detail/CVE-2016-6354
+
+ LAYER: meta
+ PACKAGE NAME: flex-native
+ PACKAGE VERSION: 2.6.4
+ CVE: CVE-2019-6293
+ CVE STATUS: Ignored
+ CVE SUMMARY: An issue was discovered in the function mark_beginning_as_normal in nfa.c in flex 2.6.4. There is a stack exhaustion problem caused by the mark_beginning_as_normal function making recursive calls to itself in certain scenarios involving lots of '*' characters. Remote attackers could leverage this vulnerability to cause a denial-of-service.
+ CVSS v2 BASE SCORE: 4.3
+ CVSS v3 BASE SCORE: 5.5
+ VECTOR: NETWORK
+ MORE INFORMATION: https://nvd.nist.gov/vuln/detail/CVE-2019-6293
+
+For images, a summary of all recipes included in the image and their CVEs is also
+generated in textual and JSON formats. These ``.cve`` and ``.json`` reports can be found
+in the ``tmp/deploy/images`` directory for each compiled image.
+
+At build time CVE check will also throw warnings about ``Unpatched`` CVEs::
+
+ WARNING: flex-2.6.4-r0 do_cve_check: Found unpatched CVE (CVE-2019-6293), for more information check /poky/build/tmp/work/core2-64-poky-linux/flex/2.6.4-r0/temp/cve.log
+ WARNING: libarchive-3.5.1-r0 do_cve_check: Found unpatched CVE (CVE-2021-36976), for more information check /poky/build/tmp/work/core2-64-poky-linux/libarchive/3.5.1-r0/temp/cve.log
+
+It is also possible to check the CVE status of individual packages as follows::
+
+ bitbake -c cve_check flex libarchive
+
+Fixing CVE product name and version mappings
+============================================
+
+By default, :ref:`ref-classes-cve-check` uses the recipe name :term:`BPN` as CVE
+product name when querying the CVE database. If this mapping contains false positives, e.g.
+some reported CVEs are not for the software component in question, or false negatives like
+some CVEs are not found to impact the recipe when they should, then the problems can be
+in the recipe name to CVE product mapping. These mapping issues can be fixed by setting
+the :term:`CVE_PRODUCT` variable inside the recipe. This defines the name of the software component in the
+upstream `NIST CVE database <https://nvd.nist.gov/>`__.
+
+The variable supports using vendor and product names like this::
+
+ CVE_PRODUCT = "flex_project:flex"
+
+In this example the vendor name used in the CVE database is ``flex_project`` and the
+product is ``flex``. With this setting the ``flex`` recipe only maps to this specific
+product and not products from other vendors with same name ``flex``.
+
+Similarly, when the recipe version :term:`PV` is not compatible with software versions used by
+the upstream software component releases and the CVE database, these can be fixed using
+the :term:`CVE_VERSION` variable.
+
+Note that if the CVE entries in the NVD database contain bugs or have missing or incomplete
+information, it is recommended to fix the information there directly instead of working
+around the issues possibly for a long time in Poky and OE-Core side recipes. Feedback to
+NVD about CVE entries can be provided through the `NVD contact form <https://nvd.nist.gov/info/contact-form>`__.
+
+Fixing vulnerabilities in recipes
+=================================
+
+Suppose a CVE security issue impacts a software component. In that case, it can
+be fixed by updating to a newer version, by applying a patch, or by marking it
+as patched via :term:`CVE_STATUS` variable flag. For Poky and OE-Core master
+branches, updating to a more recent software component release with fixes is
+the best option, but patches can be applied if releases are not yet available.
+
+For stable branches, we want to avoid API (Application Programming Interface)
+or ABI (Application Binary Interface) breakages. When submitting an update,
+a minor version update of a component is preferred if the version is
+backward-compatible. Many software components have backward-compatible stable
+versions, with a notable example of the Linux kernel. However, if the new
+version does or likely might introduce incompatibilities, extracting and
+backporting patches is preferred.
+
+Here is an example of fixing CVE security issues with patch files,
+an example from the :oe_layerindex:`ffmpeg recipe for dunfell </layerindex/recipe/122174>`::
+
+ SRC_URI = "https://www.ffmpeg.org/releases/${BP}.tar.xz \
+ file://mips64_cpu_detection.patch \
+ file://CVE-2020-12284.patch \
+ file://0001-libavutil-include-assembly-with-full-path-from-sourc.patch \
+ file://CVE-2021-3566.patch \
+ file://CVE-2021-38291.patch \
+ file://CVE-2022-1475.patch \
+ file://CVE-2022-3109.patch \
+ file://CVE-2022-3341.patch \
+ file://CVE-2022-48434.patch \
+ "
+
+The recipe has both generic and security-related fixes. The CVE patch files are named
+according to the CVE they fix.
+
+When preparing the patch file, take the original patch from the upstream repository.
+Do not use patches from different distributions, except if it is the only available source.
+
+Modify the patch adding OE-related metadata. We will follow the example of the
+``CVE-2022-3341.patch``.
+
+The original `commit message <https://github.com/FFmpeg/FFmpeg/commit/9cf652cef49d74afe3d454f27d49eb1a1394951e.patch/>`__
+is::
+
+ From 9cf652cef49d74afe3d454f27d49eb1a1394951e Mon Sep 17 00:00:00 2001
+ From: Jiasheng Jiang <jiasheng@iscas.ac.cn>
+ Date: Wed, 23 Feb 2022 10:31:59 +0800
+ Subject: [PATCH] avformat/nutdec: Add check for avformat_new_stream
+
+ Check for failure of avformat_new_stream() and propagate
+ the error code.
+
+ Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
+ ---
+ libavformat/nutdec.c | 16 ++++++++++++----
+ 1 file changed, 12 insertions(+), 4 deletions(-)
+
+
+For the correct operations of the ``cve-check``, it requires the CVE
+identification in a ``CVE:`` tag of the patch file commit message using
+the format::
+
+ CVE: CVE-2022-3341
+
+It is also recommended to add the ``Upstream-Status:`` tag with a link
+to the original patch and sign-off by people working on the backport.
+If there are any modifications to the original patch, note them in
+the ``Comments:`` tag.
+
+With the additional information, the header of the patch file in OE-core becomes::
+
+ From 9cf652cef49d74afe3d454f27d49eb1a1394951e Mon Sep 17 00:00:00 2001
+ From: Jiasheng Jiang <jiasheng@iscas.ac.cn>
+ Date: Wed, 23 Feb 2022 10:31:59 +0800
+ Subject: [PATCH] avformat/nutdec: Add check for avformat_new_stream
+
+ Check for failure of avformat_new_stream() and propagate
+ the error code.
+
+ Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
+
+ CVE: CVE-2022-3341
+
+ Upstream-Status: Backport [https://github.com/FFmpeg/FFmpeg/commit/9cf652cef49d74afe3d454f27d49eb1a1394951e]
+
+ Comments: Refreshed Hunk
+ Signed-off-by: Narpat Mali <narpat.mali@windriver.com>
+ Signed-off-by: Bhabu Bindu <bhabu.bindu@kpit.com>
+ ---
+ libavformat/nutdec.c | 16 ++++++++++++----
+ 1 file changed, 12 insertions(+), 4 deletions(-)
+
+A good practice is to include the CVE identifier in the patch file name, the patch file
+commit message and optionally in the recipe commit message.
+
+CVE checker will then capture this information and change the CVE status to ``Patched``
+in the generated reports.
+
+If analysis shows that the CVE issue does not impact the recipe due to configuration, platform,
+version or other reasons, the CVE can be marked as ``Ignored`` by using
+the :term:`CVE_STATUS` variable flag with appropriate reason which is mapped to ``Ignored``.
+The entry should have the format like::
+
+ CVE_STATUS[CVE-2016-10642] = "cpe-incorrect: This is specific to the npm package that installs cmake, so isn't relevant to OpenEmbedded"
+
+As mentioned previously, if data in the CVE database is wrong, it is recommended
+to fix those issues in the CVE database (NVD in the case of OE-core and Poky)
+directly.
+
+Note that if there are many CVEs with the same status and reason, those can be
+shared by using the :term:`CVE_STATUS_GROUPS` variable.
+
+Recipes can be completely skipped by CVE check by including the recipe name in
+the :term:`CVE_CHECK_SKIP_RECIPE` variable.
+
+Implementation details
+======================
+
+Here's what the :ref:`ref-classes-cve-check` class does to find unpatched CVE IDs.
+
+First the code goes through each patch file provided by a recipe. If a valid CVE ID
+is found in the name of the file, the corresponding CVE is considered as patched.
+Don't forget that if multiple CVE IDs are found in the filename, only the last
+one is considered. Then, the code looks for ``CVE: CVE-ID`` lines in the patch
+file. The found CVE IDs are also considered as patched.
+Additionally ``CVE_STATUS`` variable flags are parsed for reasons mapped to ``Patched``
+and these are also considered as patched.
+
+Then, the code looks up all the CVE IDs in the NIST database for all the
+products defined in :term:`CVE_PRODUCT`. Then, for each found CVE:
+
+- If the package name (:term:`PN`) is part of
+ :term:`CVE_CHECK_SKIP_RECIPE`, it is considered as ``Patched``.
+
+- If the CVE ID has status ``CVE_STATUS[<CVE ID>] = "ignored"`` or if it's set to
+ any reason which is mapped to status ``Ignored`` via ``CVE_CHECK_STATUSMAP``,
+ it is set as ``Ignored``.
+
+- If the CVE ID is part of the patched CVE for the recipe, it is
+ already considered as ``Patched``.
+
+- Otherwise, the code checks whether the recipe version (:term:`PV`)
+ is within the range of versions impacted by the CVE. If so, the CVE
+ is considered as ``Unpatched``.
+
+The CVE database is stored in :term:`DL_DIR` and can be inspected using
+``sqlite3`` command as follows::
+
+ sqlite3 downloads/CVE_CHECK/nvdcve_1.1.db .dump | grep CVE-2021-37462
+
+When analyzing CVEs, it is recommended to:
+
+- study the latest information in `CVE database <https://nvd.nist.gov/vuln/search>`__.
+
+- check how upstream developers of the software component addressed the issue, e.g.
+ what patch was applied, which upstream release contains the fix.
+
+- check what other Linux distributions like `Debian <https://security-tracker.debian.org/tracker/>`__
+ did to analyze and address the issue.
+
+- follow security notices from other Linux distributions.
+
+- follow public `open source security mailing lists <https://oss-security.openwall.org/wiki/mailing-lists>`__ for
+ discussions and advance notifications of CVE bugs and software releases with fixes.
+
diff --git a/documentation/dev-manual/wayland.rst b/documentation/dev-manual/wayland.rst
new file mode 100644
index 0000000000..097be9cbde
--- /dev/null
+++ b/documentation/dev-manual/wayland.rst
@@ -0,0 +1,90 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using Wayland and Weston
+************************
+
+:wikipedia:`Wayland <Wayland_(display_server_protocol)>`
+is a computer display server protocol that provides a method for
+compositing window managers to communicate directly with applications
+and video hardware and expects them to communicate with input hardware
+using other libraries. Using Wayland with supporting targets can result
+in better control over graphics frame rendering than an application
+might otherwise achieve.
+
+The Yocto Project provides the Wayland protocol libraries and the
+reference :wikipedia:`Weston <Wayland_(display_server_protocol)#Weston>`
+compositor as part of its release. You can find the integrated packages
+in the ``meta`` layer of the :term:`Source Directory`.
+Specifically, you
+can find the recipes that build both Wayland and Weston at
+``meta/recipes-graphics/wayland``.
+
+You can build both the Wayland and Weston packages for use only with targets
+that accept the :wikipedia:`Mesa 3D and Direct Rendering Infrastructure
+<Mesa_(computer_graphics)>`, which is also known as Mesa DRI. This implies that
+you cannot build and use the packages if your target uses, for example, the
+Intel Embedded Media and Graphics Driver (Intel EMGD) that overrides Mesa DRI.
+
+.. note::
+
+ Due to lack of EGL support, Weston 1.0.3 will not run directly on the
+ emulated QEMU hardware. However, this version of Weston will run
+ under X emulation without issues.
+
+This section describes what you need to do to implement Wayland and use
+the Weston compositor when building an image for a supporting target.
+
+Enabling Wayland in an Image
+============================
+
+To enable Wayland, you need to enable it to be built and enable it to be
+included (installed) in the image.
+
+Building Wayland
+----------------
+
+To cause Mesa to build the ``wayland-egl`` platform and Weston to build
+Wayland with Kernel Mode Setting
+(`KMS <https://wiki.archlinux.org/index.php/Kernel_Mode_Setting>`__)
+support, include the "wayland" flag in the
+:term:`DISTRO_FEATURES`
+statement in your ``local.conf`` file::
+
+ DISTRO_FEATURES:append = " wayland"
+
+.. note::
+
+ If X11 has been enabled elsewhere, Weston will build Wayland with X11
+ support
+
+Installing Wayland and Weston
+-----------------------------
+
+To install the Wayland feature into an image, you must include the
+following
+:term:`CORE_IMAGE_EXTRA_INSTALL`
+statement in your ``local.conf`` file::
+
+ CORE_IMAGE_EXTRA_INSTALL += "wayland weston"
+
+Running Weston
+==============
+
+To run Weston inside X11, enabling it as described earlier and building
+a Sato image is sufficient. If you are running your image under Sato, a
+Weston Launcher appears in the "Utility" category.
+
+Alternatively, you can run Weston through the command-line interpretor
+(CLI), which is better suited for development work. To run Weston under
+the CLI, you need to do the following after your image is built:
+
+#. Run these commands to export ``XDG_RUNTIME_DIR``::
+
+ mkdir -p /tmp/$USER-weston
+ chmod 0700 /tmp/$USER-weston
+ export XDG_RUNTIME_DIR=/tmp/$USER-weston
+
+#. Launch Weston in the shell::
+
+ weston
+
diff --git a/documentation/dev-manual/wic.rst b/documentation/dev-manual/wic.rst
new file mode 100644
index 0000000000..a3880f3a1c
--- /dev/null
+++ b/documentation/dev-manual/wic.rst
@@ -0,0 +1,731 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Creating Partitioned Images Using Wic
+*************************************
+
+Creating an image for a particular hardware target using the
+OpenEmbedded build system does not necessarily mean you can boot that
+image as is on your device. Physical devices accept and boot images in
+various ways depending on the specifics of the device. Usually,
+information about the hardware can tell you what image format the device
+requires. Should your device require multiple partitions on an SD card,
+flash, or an HDD, you can use the OpenEmbedded Image Creator, Wic, to
+create the properly partitioned image.
+
+The ``wic`` command generates partitioned images from existing
+OpenEmbedded build artifacts. Image generation is driven by partitioning
+commands contained in an OpenEmbedded kickstart file (``.wks``)
+specified either directly on the command line or as one of a selection
+of canned kickstart files as shown with the ``wic list images`` command
+in the
+":ref:`dev-manual/wic:generate an image using an existing kickstart file`"
+section. When you apply the command to a given set of build artifacts, the
+result is an image or set of images that can be directly written onto media and
+used on a particular system.
+
+.. note::
+
+ For a kickstart file reference, see the
+ ":ref:`ref-manual/kickstart:openembedded kickstart (\`\`.wks\`\`) reference`"
+ Chapter in the Yocto Project Reference Manual.
+
+The ``wic`` command and the infrastructure it is based on is by
+definition incomplete. The purpose of the command is to allow the
+generation of customized images, and as such, was designed to be
+completely extensible through a plugin interface. See the
+":ref:`dev-manual/wic:using the wic plugin interface`" section
+for information on these plugins.
+
+This section provides some background information on Wic, describes what
+you need to have in place to run the tool, provides instruction on how
+to use the Wic utility, provides information on using the Wic plugins
+interface, and provides several examples that show how to use Wic.
+
+Background
+==========
+
+This section provides some background on the Wic utility. While none of
+this information is required to use Wic, you might find it interesting.
+
+- The name "Wic" is derived from OpenEmbedded Image Creator (oeic). The
+ "oe" diphthong in "oeic" was promoted to the letter "w", because
+ "oeic" is both difficult to remember and to pronounce.
+
+- Wic is loosely based on the Meego Image Creator (``mic``) framework.
+ The Wic implementation has been heavily modified to make direct use
+ of OpenEmbedded build artifacts instead of package installation and
+ configuration, which are already incorporated within the OpenEmbedded
+ artifacts.
+
+- Wic is a completely independent standalone utility that initially
+ provides easier-to-use and more flexible replacements for an existing
+ functionality in OE-Core's :ref:`ref-classes-image-live`
+ class. The difference between Wic and those examples is that with Wic
+ the functionality of those scripts is implemented by a
+ general-purpose partitioning language, which is based on Redhat
+ kickstart syntax.
+
+Requirements
+============
+
+In order to use the Wic utility with the OpenEmbedded Build system, your
+system needs to meet the following requirements:
+
+- The Linux distribution on your development host must support the
+ Yocto Project. See the ":ref:`system-requirements-supported-distros`"
+ section in the Yocto Project Reference Manual for the list of
+ distributions that support the Yocto Project.
+
+- The standard system utilities, such as ``cp``, must be installed on
+ your development host system.
+
+- You must have sourced the build environment setup script (i.e.
+ :ref:`structure-core-script`) found in the :term:`Build Directory`.
+
+- You need to have the build artifacts already available, which
+ typically means that you must have already created an image using the
+ OpenEmbedded build system (e.g. ``core-image-minimal``). While it
+ might seem redundant to generate an image in order to create an image
+ using Wic, the current version of Wic requires the artifacts in the
+ form generated by the OpenEmbedded build system.
+
+- You must build several native tools, which are built to run on the
+ build system::
+
+ $ bitbake wic-tools
+
+- Include "wic" as part of the
+ :term:`IMAGE_FSTYPES`
+ variable.
+
+- Include the name of the :ref:`wic kickstart file <openembedded-kickstart-wks-reference>`
+ as part of the :term:`WKS_FILE` variable. If multiple candidate files can
+ be provided by different layers, specify all the possible names through the
+ :term:`WKS_FILES` variable instead.
+
+Getting Help
+============
+
+You can get general help for the ``wic`` command by entering the ``wic``
+command by itself or by entering the command with a help argument as
+follows::
+
+ $ wic -h
+ $ wic --help
+ $ wic help
+
+Currently, Wic supports seven commands: ``cp``, ``create``, ``help``,
+``list``, ``ls``, ``rm``, and ``write``. You can get help for all these
+commands except "help" by using the following form::
+
+ $ wic help command
+
+For example, the following command returns help for the ``write``
+command::
+
+ $ wic help write
+
+Wic supports help for three topics: ``overview``, ``plugins``, and
+``kickstart``. You can get help for any topic using the following form::
+
+ $ wic help topic
+
+For example, the following returns overview help for Wic::
+
+ $ wic help overview
+
+There is one additional level of help for Wic. You can get help on
+individual images through the ``list`` command. You can use the ``list``
+command to return the available Wic images as follows::
+
+ $ wic list images
+ genericx86 Create an EFI disk image for genericx86*
+ beaglebone-yocto Create SD card image for Beaglebone
+ qemuriscv Create qcow2 image for RISC-V QEMU machines
+ mkefidisk Create an EFI disk image
+ qemuloongarch Create qcow2 image for LoongArch QEMU machines
+ directdisk-multi-rootfs Create multi rootfs image using rootfs plugin
+ directdisk Create a 'pcbios' direct disk image
+ efi-bootdisk
+ mkhybridiso Create a hybrid ISO image
+ directdisk-gpt Create a 'pcbios' direct disk image
+ systemd-bootdisk Create an EFI disk image with systemd-boot
+ sdimage-bootpart Create SD card image with a boot partition
+ qemux86-directdisk Create a qemu machine 'pcbios' direct disk image
+ directdisk-bootloader-config Create a 'pcbios' direct disk image with custom bootloader config
+
+Once you know the list of available
+Wic images, you can use ``help`` with the command to get help on a
+particular image. For example, the following command returns help on the
+"beaglebone-yocto" image::
+
+ $ wic list beaglebone-yocto help
+
+ Creates a partitioned SD card image for Beaglebone.
+ Boot files are located in the first vfat partition.
+
+Operational Modes
+=================
+
+You can use Wic in two different modes, depending on how much control
+you need for specifying the OpenEmbedded build artifacts that are used
+for creating the image: Raw and Cooked:
+
+- *Raw Mode:* You explicitly specify build artifacts through Wic
+ command-line arguments.
+
+- *Cooked Mode:* The current
+ :term:`MACHINE` setting and image
+ name are used to automatically locate and provide the build
+ artifacts. You just supply a kickstart file and the name of the image
+ from which to use artifacts.
+
+Regardless of the mode you use, you need to have the build artifacts
+ready and available.
+
+Raw Mode
+--------
+
+Running Wic in raw mode allows you to specify all the partitions through
+the ``wic`` command line. The primary use for raw mode is if you have
+built your kernel outside of the Yocto Project :term:`Build Directory`.
+In other words, you can point to arbitrary kernel, root filesystem locations,
+and so forth. Contrast this behavior with cooked mode where Wic looks in the
+:term:`Build Directory` (e.g. ``tmp/deploy/images/``\ machine).
+
+The general form of the ``wic`` command in raw mode is::
+
+ $ wic create wks_file options ...
+
+ Where:
+
+ wks_file:
+ An OpenEmbedded kickstart file. You can provide
+ your own custom file or use a file from a set of
+ existing files as described by further options.
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -o OUTDIR, --outdir OUTDIR
+ name of directory to create image in
+ -e IMAGE_NAME, --image-name IMAGE_NAME
+ name of the image to use the artifacts from e.g. core-
+ image-sato
+ -r ROOTFS_DIR, --rootfs-dir ROOTFS_DIR
+ path to the /rootfs dir to use as the .wks rootfs
+ source
+ -b BOOTIMG_DIR, --bootimg-dir BOOTIMG_DIR
+ path to the dir containing the boot artifacts (e.g.
+ /EFI or /syslinux dirs) to use as the .wks bootimg
+ source
+ -k KERNEL_DIR, --kernel-dir KERNEL_DIR
+ path to the dir containing the kernel to use in the
+ .wks bootimg
+ -n NATIVE_SYSROOT, --native-sysroot NATIVE_SYSROOT
+ path to the native sysroot containing the tools to use
+ to build the image
+ -s, --skip-build-check
+ skip the build check
+ -f, --build-rootfs build rootfs
+ -c {gzip,bzip2,xz}, --compress-with {gzip,bzip2,xz}
+ compress image with specified compressor
+ -m, --bmap generate .bmap
+ --no-fstab-update Do not change fstab file.
+ -v VARS_DIR, --vars VARS_DIR
+ directory with <image>.env files that store bitbake
+ variables
+ -D, --debug output debug information
+
+.. note::
+
+ You do not need root privileges to run Wic. In fact, you should not
+ run as root when using the utility.
+
+Cooked Mode
+-----------
+
+Running Wic in cooked mode leverages off artifacts in the
+:term:`Build Directory`. In other words, you do not have to specify kernel or
+root filesystem locations as part of the command. All you need to provide is
+a kickstart file and the name of the image from which to use artifacts
+by using the "-e" option. Wic looks in the :term:`Build Directory` (e.g.
+``tmp/deploy/images/``\ machine) for artifacts.
+
+The general form of the ``wic`` command using Cooked Mode is as follows::
+
+ $ wic create wks_file -e IMAGE_NAME
+
+ Where:
+
+ wks_file:
+ An OpenEmbedded kickstart file. You can provide
+ your own custom file or use a file from a set of
+ existing files provided with the Yocto Project
+ release.
+
+ required argument:
+ -e IMAGE_NAME, --image-name IMAGE_NAME
+ name of the image to use the artifacts from e.g. core-
+ image-sato
+
+Using an Existing Kickstart File
+================================
+
+If you do not want to create your own kickstart file, you can use an
+existing file provided by the Wic installation. As shipped, kickstart
+files can be found in the :ref:`overview-manual/development-environment:yocto project source repositories` in the
+following two locations::
+
+ poky/meta-yocto-bsp/wic
+ poky/scripts/lib/wic/canned-wks
+
+Use the following command to list the available kickstart files::
+
+ $ wic list images
+ genericx86 Create an EFI disk image for genericx86*
+ beaglebone-yocto Create SD card image for Beaglebone
+ qemuriscv Create qcow2 image for RISC-V QEMU machines
+ mkefidisk Create an EFI disk image
+ qemuloongarch Create qcow2 image for LoongArch QEMU machines
+ directdisk-multi-rootfs Create multi rootfs image using rootfs plugin
+ directdisk Create a 'pcbios' direct disk image
+ efi-bootdisk
+ mkhybridiso Create a hybrid ISO image
+ directdisk-gpt Create a 'pcbios' direct disk image
+ systemd-bootdisk Create an EFI disk image with systemd-boot
+ sdimage-bootpart Create SD card image with a boot partition
+ qemux86-directdisk Create a qemu machine 'pcbios' direct disk image
+ directdisk-bootloader-config Create a 'pcbios' direct disk image with custom bootloader config
+
+When you use an existing file, you
+do not have to use the ``.wks`` extension. Here is an example in Raw
+Mode that uses the ``directdisk`` file::
+
+ $ wic create directdisk -r rootfs_dir -b bootimg_dir \
+ -k kernel_dir -n native_sysroot
+
+Here are the actual partition language commands used in the
+``genericx86.wks`` file to generate an image::
+
+ # short-description: Create an EFI disk image for genericx86*
+ # long-description: Creates a partitioned EFI disk image for genericx86* machines
+ part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024
+ part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid
+ part swap --ondisk sda --size 44 --label swap1 --fstype=swap
+
+ bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0"
+
+Using the Wic Plugin Interface
+==============================
+
+You can extend and specialize Wic functionality by using Wic plugins.
+This section explains the Wic plugin interface.
+
+.. note::
+
+ Wic plugins consist of "source" and "imager" plugins. Imager plugins
+ are beyond the scope of this section.
+
+Source plugins provide a mechanism to customize partition content during
+the Wic image generation process. You can use source plugins to map
+values that you specify using ``--source`` commands in kickstart files
+(i.e. ``*.wks``) to a plugin implementation used to populate a given
+partition.
+
+.. note::
+
+ If you use plugins that have build-time dependencies (e.g. native
+ tools, bootloaders, and so forth) when building a Wic image, you need
+ to specify those dependencies using the :term:`WKS_FILE_DEPENDS`
+ variable.
+
+Source plugins are subclasses defined in plugin files. As shipped, the
+Yocto Project provides several plugin files. You can see the source
+plugin files that ship with the Yocto Project
+:yocto_git:`here </poky/tree/scripts/lib/wic/plugins/source>`.
+Each of these plugin files contains source plugins that are designed to
+populate a specific Wic image partition.
+
+Source plugins are subclasses of the ``SourcePlugin`` class, which is
+defined in the ``poky/scripts/lib/wic/pluginbase.py`` file. For example,
+the ``BootimgEFIPlugin`` source plugin found in the ``bootimg-efi.py``
+file is a subclass of the ``SourcePlugin`` class, which is found in the
+``pluginbase.py`` file.
+
+You can also implement source plugins in a layer outside of the Source
+Repositories (external layer). To do so, be sure that your plugin files
+are located in a directory whose path is
+``scripts/lib/wic/plugins/source/`` within your external layer. When the
+plugin files are located there, the source plugins they contain are made
+available to Wic.
+
+When the Wic implementation needs to invoke a partition-specific
+implementation, it looks for the plugin with the same name as the
+``--source`` parameter used in the kickstart file given to that
+partition. For example, if the partition is set up using the following
+command in a kickstart file::
+
+ part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
+
+The methods defined as class
+members of the matching source plugin (i.e. ``bootimg-pcbios``) in the
+``bootimg-pcbios.py`` plugin file are used.
+
+To be more concrete, here is the corresponding plugin definition from
+the ``bootimg-pcbios.py`` file for the previous command along with an
+example method called by the Wic implementation when it needs to prepare
+a partition using an implementation-specific function::
+
+ .
+ .
+ .
+ class BootimgPcbiosPlugin(SourcePlugin):
+ """
+ Create MBR boot partition and install syslinux on it.
+ """
+
+ name = 'bootimg-pcbios'
+ .
+ .
+ .
+ @classmethod
+ def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ rootfs_dir, native_sysroot):
+ """
+ Called to do the actual content population for a partition i.e. it
+ 'prepares' the partition to be incorporated into the image.
+ In this case, prepare content for legacy bios boot partition.
+ """
+ .
+ .
+ .
+
+If a
+subclass (plugin) itself does not implement a particular function, Wic
+locates and uses the default version in the superclass. It is for this
+reason that all source plugins are derived from the ``SourcePlugin``
+class.
+
+The ``SourcePlugin`` class defined in the ``pluginbase.py`` file defines
+a set of methods that source plugins can implement or override. Any
+plugins (subclass of ``SourcePlugin``) that do not implement a
+particular method inherit the implementation of the method from the
+``SourcePlugin`` class. For more information, see the ``SourcePlugin``
+class in the ``pluginbase.py`` file for details:
+
+The following list describes the methods implemented in the
+``SourcePlugin`` class:
+
+- ``do_prepare_partition()``: Called to populate a partition with
+ actual content. In other words, the method prepares the final
+ partition image that is incorporated into the disk image.
+
+- ``do_configure_partition()``: Called before
+ ``do_prepare_partition()`` to create custom configuration files for a
+ partition (e.g. syslinux or grub configuration files).
+
+- ``do_install_disk()``: Called after all partitions have been
+ prepared and assembled into a disk image. This method provides a hook
+ to allow finalization of a disk image (e.g. writing an MBR).
+
+- ``do_stage_partition()``: Special content-staging hook called
+ before ``do_prepare_partition()``. This method is normally empty.
+
+ Typically, a partition just uses the passed-in parameters (e.g. the
+ unmodified value of ``bootimg_dir``). However, in some cases, things
+ might need to be more tailored. As an example, certain files might
+ additionally need to be taken from ``bootimg_dir + /boot``. This hook
+ allows those files to be staged in a customized fashion.
+
+ .. note::
+
+ ``get_bitbake_var()`` allows you to access non-standard variables that
+ you might want to use for this behavior.
+
+You can extend the source plugin mechanism. To add more hooks, create
+more source plugin methods within ``SourcePlugin`` and the corresponding
+derived subclasses. The code that calls the plugin methods uses the
+``plugin.get_source_plugin_methods()`` function to find the method or
+methods needed by the call. Retrieval of those methods is accomplished
+by filling up a dict with keys that contain the method names of
+interest. On success, these will be filled in with the actual methods.
+See the Wic implementation for examples and details.
+
+Wic Examples
+============
+
+This section provides several examples that show how to use the Wic
+utility. All the examples assume the list of requirements in the
+":ref:`dev-manual/wic:requirements`" section have been met. The
+examples assume the previously generated image is
+``core-image-minimal``.
+
+Generate an Image using an Existing Kickstart File
+--------------------------------------------------
+
+This example runs in Cooked Mode and uses the ``mkefidisk`` kickstart
+file::
+
+ $ wic create mkefidisk -e core-image-minimal
+ INFO: Building wic-tools...
+ .
+ .
+ .
+ INFO: The new image(s) can be found here:
+ ./mkefidisk-201804191017-sda.direct
+
+ The following build artifacts were used to create the image(s):
+ ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
+ BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
+ KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
+ NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
+
+ INFO: The image(s) were created using OE kickstart file:
+ /home/stephano/yocto/openembedded-core/scripts/lib/wic/canned-wks/mkefidisk.wks
+
+The previous example shows the easiest way to create an image by running
+in cooked mode and supplying a kickstart file and the "-e" option to
+point to the existing build artifacts. Your ``local.conf`` file needs to
+have the :term:`MACHINE` variable set
+to the machine you are using, which is "qemux86" in this example.
+
+Once the image builds, the output provides image location, artifact use,
+and kickstart file information.
+
+.. note::
+
+ You should always verify the details provided in the output to make
+ sure that the image was indeed created exactly as expected.
+
+Continuing with the example, you can now write the image from the
+:term:`Build Directory` onto a USB stick, or whatever media for which you
+built your image, and boot from the media. You can write the image by using
+``bmaptool`` or ``dd``::
+
+ $ oe-run-native bmaptool-native bmaptool copy mkefidisk-201804191017-sda.direct /dev/sdX
+
+or ::
+
+ $ sudo dd if=mkefidisk-201804191017-sda.direct of=/dev/sdX
+
+.. note::
+
+ For more information on how to use the ``bmaptool``
+ to flash a device with an image, see the
+ ":ref:`dev-manual/bmaptool:flashing images using \`\`bmaptool\`\``"
+ section.
+
+Using a Modified Kickstart File
+-------------------------------
+
+Because partitioned image creation is driven by the kickstart file, it
+is easy to affect image creation by changing the parameters in the file.
+This next example demonstrates that through modification of the
+``directdisk-gpt`` kickstart file.
+
+As mentioned earlier, you can use the command ``wic list images`` to
+show the list of existing kickstart files. The directory in which the
+``directdisk-gpt.wks`` file resides is
+``scripts/lib/image/canned-wks/``, which is located in the
+:term:`Source Directory` (e.g. ``poky``).
+Because available files reside in this directory, you can create and add
+your own custom files to the directory. Subsequent use of the
+``wic list images`` command would then include your kickstart files.
+
+In this example, the existing ``directdisk-gpt`` file already does most
+of what is needed. However, for the hardware in this example, the image
+will need to boot from ``sdb`` instead of ``sda``, which is what the
+``directdisk-gpt`` kickstart file uses.
+
+The example begins by making a copy of the ``directdisk-gpt.wks`` file
+in the ``scripts/lib/image/canned-wks`` directory and then by changing
+the lines that specify the target disk from which to boot::
+
+ $ cp /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisk-gpt.wks \
+ /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisksdb-gpt.wks
+
+Next, the example modifies the ``directdisksdb-gpt.wks`` file and
+changes all instances of "``--ondisk sda``" to "``--ondisk sdb``". The
+example changes the following two lines and leaves the remaining lines
+untouched::
+
+ part /boot --source bootimg-pcbios --ondisk sdb --label boot --active --align 1024
+ part / --source rootfs --ondisk sdb --fstype=ext4 --label platform --align 1024 --use-uuid
+
+Once the lines are changed, the
+example generates the ``directdisksdb-gpt`` image. The command points
+the process at the ``core-image-minimal`` artifacts for the Next Unit of
+Computing (nuc) :term:`MACHINE` the
+``local.conf``::
+
+ $ wic create directdisksdb-gpt -e core-image-minimal
+ INFO: Building wic-tools...
+ .
+ .
+ .
+ Initialising tasks: 100% |#######################################| Time: 0:00:01
+ NOTE: Executing SetScene Tasks
+ NOTE: Executing RunQueue Tasks
+ NOTE: Tasks Summary: Attempted 1161 tasks of which 1157 didn't need to be rerun and all succeeded.
+ INFO: Creating image(s)...
+
+ INFO: The new image(s) can be found here:
+ ./directdisksdb-gpt-201710090938-sdb.direct
+
+ The following build artifacts were used to create the image(s):
+ ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
+ BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
+ KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
+ NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
+
+ INFO: The image(s) were created using OE kickstart file:
+ /home/stephano/yocto/poky/scripts/lib/wic/canned-wks/directdisksdb-gpt.wks
+
+Continuing with the example, you can now directly ``dd`` the image to a
+USB stick, or whatever media for which you built your image, and boot
+the resulting media::
+
+ $ sudo dd if=directdisksdb-gpt-201710090938-sdb.direct of=/dev/sdb
+ 140966+0 records in
+ 140966+0 records out
+ 72174592 bytes (72 MB, 69 MiB) copied, 78.0282 s, 925 kB/s
+ $ sudo eject /dev/sdb
+
+Using a Modified Kickstart File and Running in Raw Mode
+-------------------------------------------------------
+
+This next example manually specifies each build artifact (runs in Raw
+Mode) and uses a modified kickstart file. The example also uses the
+``-o`` option to cause Wic to create the output somewhere other than the
+default output directory, which is the current directory::
+
+ $ wic create test.wks -o /home/stephano/testwic \
+ --rootfs-dir /home/stephano/yocto/build/tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/rootfs \
+ --bootimg-dir /home/stephano/yocto/build/tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share \
+ --kernel-dir /home/stephano/yocto/build/tmp/deploy/images/qemux86 \
+ --native-sysroot /home/stephano/yocto/build/tmp/work/i586-poky-linux/wic-tools/1.0-r0/recipe-sysroot-native
+
+ INFO: Creating image(s)...
+
+ INFO: The new image(s) can be found here:
+ /home/stephano/testwic/test-201710091445-sdb.direct
+
+ The following build artifacts were used to create the image(s):
+ ROOTFS_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/rootfs
+ BOOTIMG_DIR: /home/stephano/yocto/build/tmp-glibc/work/qemux86-oe-linux/core-image-minimal/1.0-r0/recipe-sysroot/usr/share
+ KERNEL_DIR: /home/stephano/yocto/build/tmp-glibc/deploy/images/qemux86
+ NATIVE_SYSROOT: /home/stephano/yocto/build/tmp-glibc/work/i586-oe-linux/wic-tools/1.0-r0/recipe-sysroot-native
+
+ INFO: The image(s) were created using OE kickstart file:
+ test.wks
+
+For this example,
+:term:`MACHINE` did not have to be
+specified in the ``local.conf`` file since the artifact is manually
+specified.
+
+Using Wic to Manipulate an Image
+--------------------------------
+
+Wic image manipulation allows you to shorten turnaround time during
+image development. For example, you can use Wic to delete the kernel
+partition of a Wic image and then insert a newly built kernel. This
+saves you time from having to rebuild the entire image each time you
+modify the kernel.
+
+.. note::
+
+ In order to use Wic to manipulate a Wic image as in this example,
+ your development machine must have the ``mtools`` package installed.
+
+The following example examines the contents of the Wic image, deletes
+the existing kernel, and then inserts a new kernel:
+
+#. *List the Partitions:* Use the ``wic ls`` command to list all the
+ partitions in the Wic image::
+
+ $ wic ls tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic
+ Num Start End Size Fstype
+ 1 1048576 25041919 23993344 fat16
+ 2 25165824 72157183 46991360 ext4
+
+ The previous output shows two partitions in the
+ ``core-image-minimal-qemux86.wic`` image.
+
+#. *Examine a Particular Partition:* Use the ``wic ls`` command again
+ but in a different form to examine a particular partition.
+
+ .. note::
+
+ You can get command usage on any Wic command using the following
+ form::
+
+ $ wic help command
+
+
+ For example, the following command shows you the various ways to
+ use the
+ wic ls
+ command::
+
+ $ wic help ls
+
+
+ The following command shows what is in partition one::
+
+ $ wic ls tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1
+ Volume in drive : is boot
+ Volume Serial Number is E894-1809
+ Directory for ::/
+
+ libcom32 c32 186500 2017-10-09 16:06
+ libutil c32 24148 2017-10-09 16:06
+ syslinux cfg 220 2017-10-09 16:06
+ vesamenu c32 27104 2017-10-09 16:06
+ vmlinuz 6904608 2017-10-09 16:06
+ 5 files 7 142 580 bytes
+ 16 582 656 bytes free
+
+ The previous output shows five files, with the
+ ``vmlinuz`` being the kernel.
+
+ .. note::
+
+ If you see the following error, you need to update or create a
+ ``~/.mtoolsrc`` file and be sure to have the line "mtools_skip_check=1"
+ in the file. Then, run the Wic command again::
+
+ ERROR: _exec_cmd: /usr/bin/mdir -i /tmp/wic-parttfokuwra ::/ returned '1' instead of 0
+ output: Total number of sectors (47824) not a multiple of sectors per track (32)!
+ Add mtools_skip_check=1 to your .mtoolsrc file to skip this test
+
+
+#. *Remove the Old Kernel:* Use the ``wic rm`` command to remove the
+ ``vmlinuz`` file (kernel)::
+
+ $ wic rm tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1/vmlinuz
+
+#. *Add In the New Kernel:* Use the ``wic cp`` command to add the
+ updated kernel to the Wic image. Depending on how you built your
+ kernel, it could be in different places. If you used ``devtool`` and
+ an SDK to build your kernel, it resides in the ``tmp/work`` directory
+ of the extensible SDK. If you used ``make`` to build the kernel, the
+ kernel will be in the ``workspace/sources`` area.
+
+ The following example assumes ``devtool`` was used to build the
+ kernel::
+
+ $ wic cp poky_sdk/tmp/work/qemux86-poky-linux/linux-yocto/4.12.12+git999-r0/linux-yocto-4.12.12+git999/arch/x86/boot/bzImage \
+ poky/build/tmp/deploy/images/qemux86/core-image-minimal-qemux86.wic:1/vmlinuz
+
+ Once the new kernel is added back into the image, you can use the
+ ``dd`` command or :ref:`bmaptool
+ <dev-manual/bmaptool:flashing images using \`\`bmaptool\`\`>`
+ to flash your wic image onto an SD card or USB stick and test your
+ target.
+
+ .. note::
+
+ Using ``bmaptool`` is generally 10 to 20 times faster than using ``dd``.
+
diff --git a/documentation/dev-manual/x32-psabi.rst b/documentation/dev-manual/x32-psabi.rst
new file mode 100644
index 0000000000..92b1f96fa4
--- /dev/null
+++ b/documentation/dev-manual/x32-psabi.rst
@@ -0,0 +1,54 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Using x32 psABI
+***************
+
+x32 processor-specific Application Binary Interface (`x32
+psABI <https://software.intel.com/en-us/node/628948>`__) is a native
+32-bit processor-specific ABI for Intel 64 (x86-64) architectures. An
+ABI defines the calling conventions between functions in a processing
+environment. The interface determines what registers are used and what
+the sizes are for various C data types.
+
+Some processing environments prefer using 32-bit applications even when
+running on Intel 64-bit platforms. Consider the i386 psABI, which is a
+very old 32-bit ABI for Intel 64-bit platforms. The i386 psABI does not
+provide efficient use and access of the Intel 64-bit processor
+resources, leaving the system underutilized. Now consider the x86_64
+psABI. This ABI is newer and uses 64-bits for data sizes and program
+pointers. The extra bits increase the footprint size of the programs,
+libraries, and also increases the memory and file system size
+requirements. Executing under the x32 psABI enables user programs to
+utilize CPU and system resources more efficiently while keeping the
+memory footprint of the applications low. Extra bits are used for
+registers but not for addressing mechanisms.
+
+The Yocto Project supports the final specifications of x32 psABI as
+follows:
+
+- You can create packages and images in x32 psABI format on x86_64
+ architecture targets.
+
+- You can successfully build recipes with the x32 toolchain.
+
+- You can create and boot ``core-image-minimal`` and
+ ``core-image-sato`` images.
+
+- There is RPM Package Manager (RPM) support for x32 binaries.
+
+- There is support for large images.
+
+To use the x32 psABI, you need to edit your ``conf/local.conf``
+configuration file as follows::
+
+ MACHINE = "qemux86-64"
+ DEFAULTTUNE = "x86-64-x32"
+ baselib = "${@d.getVar('BASE_LIB:tune-' + (d.getVar('DEFAULTTUNE') \
+ or 'INVALID')) or 'lib'}"
+
+Once you have set
+up your configuration file, use BitBake to build an image that supports
+the x32 psABI. Here is an example::
+
+ $ bitbake core-image-sato
+
diff --git a/documentation/genindex.rst b/documentation/genindex.rst
index a4af06f656..149e2d36b5 100644
--- a/documentation/genindex.rst
+++ b/documentation/genindex.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
=====
Index
=====
diff --git a/documentation/index.rst b/documentation/index.rst
index 6335c707e0..3fef1704a4 100644
--- a/documentation/index.rst
+++ b/documentation/index.rst
@@ -26,6 +26,7 @@ Welcome to the Yocto Project Documentation
:caption: Manuals
Overview and Concepts Manual <overview-manual/index>
+ Contributor Guide <contributor-guide/index>
Reference Manual <ref-manual/index>
Board Support Package (BSP) Developer's guide <bsp-guide/index>
Development Tasks Manual <dev-manual/index>
diff --git a/documentation/kernel-dev/advanced.rst b/documentation/kernel-dev/advanced.rst
index b5290b61b3..4c463503f6 100644
--- a/documentation/kernel-dev/advanced.rst
+++ b/documentation/kernel-dev/advanced.rst
@@ -69,8 +69,7 @@ to indicate the branch.
You can use the :term:`KBRANCH` value to define an alternate branch typically
with a machine override as shown here from the ``meta-yocto-bsp`` layer::
- KBRANCH:edgerouter = "standard/edgerouter"
-
+ KBRANCH:beaglebone-yocto = "standard/beaglebone"
The linux-yocto style recipes can optionally define the following
variables:
@@ -183,7 +182,7 @@ the structure:
order to define a base kernel policy or major kernel type to be
reused across multiple BSPs, place the file in ``ktypes`` directory.
-These distinctions can easily become blurred - especially as out-of-tree
+These distinctions can easily become blurred --- especially as out-of-tree
features slowly merge upstream over time. Also, remember that how the
description files are placed is a purely logical organization and has no
impact on the functionality of the kernel Metadata. There is no impact
@@ -304,8 +303,8 @@ The following listings show the ``build.scc`` file and part of the
.
.
.
- char *dump_write = NULL, *files_source = NULL;
- int opt;
+ char *dump_write = NULL, *files_source = NULL;
+ int opt;
--
2.10.1
@@ -352,17 +351,15 @@ in the manual.
Kernel Types
------------
-A kernel type defines a high-level kernel policy by aggregating
-non-hardware configuration fragments with patches you want to use when
-building a Linux kernel of a specific type (e.g. a real-time kernel).
-Syntactically, kernel types are no different than features as described
-in the ":ref:`kernel-dev/advanced:features`" section. The
-:term:`LINUX_KERNEL_TYPE`
-variable in the kernel recipe selects the kernel type. For example, in
-the ``linux-yocto_4.12.bb`` kernel recipe found in
-``poky/meta/recipes-kernel/linux``, a
-:ref:`require <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:\`\`require\`\` directive>` directive
-includes the ``poky/meta/recipes-kernel/linux/linux-yocto.inc`` file,
+A kernel type defines a high-level kernel policy by aggregating non-hardware
+configuration fragments with patches you want to use when building a Linux
+kernel of a specific type (e.g. a real-time kernel). Syntactically, kernel
+types are no different than features as described in the
+":ref:`kernel-dev/advanced:features`" section. The :term:`LINUX_KERNEL_TYPE`
+variable in the kernel recipe selects the kernel type. For example, in the
+``linux-yocto_4.12.bb`` kernel recipe found in ``poky/meta/recipes-kernel/linux``, a
+:ref:`require <bitbake-user-manual/bitbake-user-manual-metadata:\`\`require\`\` directive>`
+directive includes the ``poky/meta/recipes-kernel/linux/linux-yocto.inc`` file,
which has the following statement that defines the default kernel type::
LINUX_KERNEL_TYPE ??= "standard"
@@ -566,15 +563,7 @@ Example
Many real-world examples are more complex. Like any other ``.scc`` file,
BSP descriptions can aggregate features. Consider the Minnow BSP
definition given the ``linux-yocto-4.4`` branch of the
-``yocto-kernel-cache`` (i.e.
-``yocto-kernel-cache/bsp/minnow/minnow.scc``):
-
-.. note::
-
- Although the Minnow Board BSP is unused, the Metadata remains and is
- being used here just as an example.
-
-::
+``yocto-kernel-cache`` (i.e. ``yocto-kernel-cache/bsp/minnow/minnow.scc``)::
include cfg/x86.scc
include features/eg20t/eg20t.scc
@@ -597,6 +586,11 @@ definition given the ``linux-yocto-4.4`` branch of the
kconf hardware minnow.cfg
kconf hardware minnow-dev.cfg
+.. note::
+
+ Although the Minnow Board BSP is unused, the Metadata remains and is
+ being used here just as an example.
+
The ``minnow.scc`` description file includes a hardware configuration
fragment (``minnow.cfg``) specific to the Minnow BSP as well as several
more general configuration fragments and features enabling hardware
@@ -735,11 +729,10 @@ reside in a separate repository. The OpenEmbedded build system adds the
Metadata to the build as a "type=kmeta" repository through the
:term:`SRC_URI` variable. As an
example, consider the following :term:`SRC_URI` statement from the
-``linux-yocto_4.12.bb`` kernel recipe::
-
- SRC_URI = "git://git.yoctoproject.org/linux-yocto-4.12.git;name=machine;branch=${KBRANCH}; \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-4.12;destsuffix=${KMETA}"
+``linux-yocto_5.15.bb`` kernel recipe::
+ SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRANCH};protocol=https \
+ git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.15;destsuffix=${KMETA};protocol=https"
``${KMETA}``, in this context, is simply used to name the directory into
which the Git fetcher places the Metadata. This behavior is no different
diff --git a/documentation/kernel-dev/common.rst b/documentation/kernel-dev/common.rst
index 547da8a178..fda41694dc 100644
--- a/documentation/kernel-dev/common.rst
+++ b/documentation/kernel-dev/common.rst
@@ -52,8 +52,8 @@ image and ready to make modifications as described in the
":ref:`kernel-dev/common:using \`\`devtool\`\` to patch the kernel`"
section:
-1. *Initialize the BitBake Environment:* Before building an extensible
- SDK, you need to initialize the BitBake build environment by sourcing
+#. *Initialize the BitBake Environment:*
+ you need to initialize the BitBake build environment by sourcing
the build environment script (i.e. :ref:`structure-core-script`)::
$ cd poky
@@ -66,19 +66,15 @@ section:
(i.e. ``poky``) have been cloned using Git and the local repository is named
"poky".
-2. *Prepare Your local.conf File:* By default, the
- :term:`MACHINE` variable is set to
- "qemux86-64", which is fine if you are building for the QEMU emulator
- in 64-bit mode. However, if you are not, you need to set the
+#. *Prepare Your local.conf File:* By default, the :term:`MACHINE` variable
+ is set to "qemux86-64", which is fine if you are building for the QEMU
+ emulator in 64-bit mode. However, if you are not, you need to set the
:term:`MACHINE` variable appropriately in your ``conf/local.conf`` file
- found in the
- :term:`Build Directory` (i.e.
- ``poky/build`` in this example).
+ found in the :term:`Build Directory` (i.e. ``poky/build`` in this example).
Also, since you are preparing to work on the kernel image, you need
- to set the
- :term:`MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS`
- variable to include kernel modules.
+ to set the :term:`MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS` variable to include
+ kernel modules.
In this example we wish to build for qemux86 so we must set the
:term:`MACHINE` variable to "qemux86" and also add the "kernel-modules".
@@ -87,7 +83,7 @@ section:
MACHINE = "qemux86"
MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS += "kernel-modules"
-3. *Create a Layer for Patches:* You need to create a layer to hold
+#. *Create a Layer for Patches:* You need to create a layer to hold
patches created for the kernel image. You can use the
``bitbake-layers create-layer`` command as follows::
@@ -101,16 +97,16 @@ section:
For background information on working with common and BSP layers,
see the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual and the
":ref:`bsp-guide/bsp:bsp layers`" section in the Yocto Project Board
Support (BSP) Developer's Guide, respectively. For information on how to
use the ``bitbake-layers create-layer`` command to quickly set up a layer,
see the
- ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
section in the Yocto Project Development Tasks Manual.
-4. *Inform the BitBake Build Environment About Your Layer:* As directed
+#. *Inform the BitBake Build Environment About Your Layer:* As directed
when you created your layer, you need to add the layer to the
:term:`BBLAYERS` variable in the
``bblayers.conf`` file as follows::
@@ -120,67 +116,10 @@ section:
NOTE: Starting bitbake server...
$
-5. *Build the Extensible SDK:* Use BitBake to build the extensible SDK
- specifically for use with images to be run using QEMU::
-
- $ cd poky/build
- $ bitbake core-image-minimal -c populate_sdk_ext
-
- Once
- the build finishes, you can find the SDK installer file (i.e.
- ``*.sh`` file) in the following directory::
-
- poky/build/tmp/deploy/sdk
-
- For this example, the installer file is named
- ``poky-glibc-x86_64-core-image-minimal-i586-toolchain-ext-&DISTRO;.sh``.
-
-6. *Install the Extensible SDK:* Use the following command to install
- the SDK. For this example, install the SDK in the default
- ``poky_sdk`` directory::
-
- $ cd poky/build/tmp/deploy/sdk
- $ ./poky-glibc-x86_64-core-image-minimal-i586-toolchain-ext-&DISTRO;.sh
- Poky (Yocto Project Reference Distro) Extensible SDK installer version &DISTRO;
- ============================================================================
- Enter target directory for SDK (default: poky_sdk):
- You are about to install the SDK to "/home/scottrif/poky_sdk". Proceed [Y/n]? Y
- Extracting SDK......................................done
- Setting it up...
- Extracting buildtools...
- Preparing build system...
- Parsing recipes: 100% |#################################################################| Time: 0:00:52
- Initializing tasks: 100% |############## ###############################################| Time: 0:00:04
- Checking sstate mirror object availability: 100% |######################################| Time: 0:00:00
- Parsing recipes: 100% |#################################################################| Time: 0:00:33
- Initializing tasks: 100% |##############################################################| Time: 0:00:00
- done
- SDK has been successfully set up and is ready to be used.
- Each time you wish to use the SDK in a new shell session, you need to source the environment setup script e.g.
- $ . /home/scottrif/poky_sdk/environment-setup-i586-poky-linux
-
-
-7. *Set Up a New Terminal to Work With the Extensible SDK:* You must set
- up a new terminal to work with the SDK. You cannot use the same
- BitBake shell used to build the installer.
-
- After opening a new shell, run the SDK environment setup script as
- directed by the output from installing the SDK::
-
- $ source poky_sdk/environment-setup-i586-poky-linux
- "SDK environment now set up; additionally you may now run devtool to perform development tasks.
- Run devtool --help for further details.
-
- .. note::
-
- If you get a warning about attempting to use the extensible SDK in
- an environment set up to run BitBake, you did not use a new shell.
-
-8. *Build the Clean Image:* The final step in preparing to work on the
- kernel is to build an initial image using ``devtool`` in the new
- terminal you just set up and initialized for SDK work::
+#. *Build the Clean Image:* The final step in preparing to work on the
+ kernel is to build an initial image using ``bitbake``::
- $ devtool build-image
+ $ bitbake core-image-minimal
Parsing recipes: 100% |##########################################| Time: 0:00:05
Parsing of 830 .bb files complete (0 cached, 830 parsed). 1299 targets, 47 skipped, 0 masked, 0 errors.
WARNING: No packages to add, building image core-image-minimal unmodified
@@ -192,7 +131,6 @@ section:
NOTE: Executing SetScene Tasks
NOTE: Executing RunQueue Tasks
NOTE: Tasks Summary: Attempted 2866 tasks of which 2604 didn't need to be rerun and all succeeded.
- NOTE: Successfully built core-image-minimal. You can find output files in /home/scottrif/poky_sdk/tmp/deploy/images/qemux86
If you were
building for actual hardware and not for emulation, you could flash
@@ -202,7 +140,7 @@ section:
Wiki page.
At this point you have set up to start making modifications to the
-kernel by using the extensible SDK. For a continued example, see the
+kernel. For a continued example, see the
":ref:`kernel-dev/common:using \`\`devtool\`\` to patch the kernel`"
section.
@@ -220,7 +158,7 @@ this procedure leaves you ready to make modifications to the kernel
source as described in the ":ref:`kernel-dev/common:using traditional kernel development to patch the kernel`"
section:
-1. *Initialize the BitBake Environment:* Before you can do anything
+#. *Initialize the BitBake Environment:* Before you can do anything
using BitBake, you need to initialize the BitBake build environment
by sourcing the build environment script (i.e.
:ref:`structure-core-script`).
@@ -228,8 +166,7 @@ section:
checked out for ``poky`` is the Yocto Project &DISTRO_NAME; branch. If
you need to checkout out the &DISTRO_NAME; branch, see the
":ref:`dev-manual/start:checking out by branch in poky`"
- section in the Yocto Project Development Tasks Manual.
- ::
+ section in the Yocto Project Development Tasks Manual::
$ cd poky
$ git branch
@@ -244,14 +181,11 @@ section:
(i.e. ``poky``) have been cloned using Git and the local repository is named
"poky".
-2. *Prepare Your local.conf File:* By default, the
- :term:`MACHINE` variable is set to
- "qemux86-64", which is fine if you are building for the QEMU emulator
- in 64-bit mode. However, if you are not, you need to set the
- :term:`MACHINE` variable appropriately in your ``conf/local.conf`` file
- found in the
- :term:`Build Directory` (i.e.
- ``poky/build`` in this example).
+#. *Prepare Your local.conf File:* By default, the :term:`MACHINE` variable is
+ set to "qemux86-64", which is fine if you are building for the QEMU emulator
+ in 64-bit mode. However, if you are not, you need to set the :term:`MACHINE`
+ variable appropriately in your ``conf/local.conf`` file found in the
+ :term:`Build Directory` (i.e. ``poky/build`` in this example).
Also, since you are preparing to work on the kernel image, you need
to set the
@@ -265,7 +199,7 @@ section:
MACHINE = "qemux86"
MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS += "kernel-modules"
-3. *Create a Layer for Patches:* You need to create a layer to hold
+#. *Create a Layer for Patches:* You need to create a layer to hold
patches created for the kernel image. You can use the
``bitbake-layers create-layer`` command as follows::
@@ -278,16 +212,16 @@ section:
For background information on working with common and BSP layers,
see the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual and the
":ref:`bsp-guide/bsp:bsp layers`" section in the Yocto Project Board
Support (BSP) Developer's Guide, respectively. For information on how to
use the ``bitbake-layers create-layer`` command to quickly set up a layer,
see the
- ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
section in the Yocto Project Development Tasks Manual.
-4. *Inform the BitBake Build Environment About Your Layer:* As directed
+#. *Inform the BitBake Build Environment About Your Layer:* As directed
when you created your layer, you need to add the layer to the
:term:`BBLAYERS` variable in the
``bblayers.conf`` file as follows::
@@ -297,7 +231,7 @@ section:
NOTE: Starting bitbake server ...
$
-5. *Create a Local Copy of the Kernel Git Repository:* You can find Git
+#. *Create a Local Copy of the Kernel Git Repository:* You can find Git
repositories of supported Yocto Project kernels organized under
"Yocto Linux Kernel" in the Yocto Project Source Repositories at
:yocto_git:`/`.
@@ -309,16 +243,7 @@ section:
``standard/base`` branch.
The following commands show how to create a local copy of the
- ``linux-yocto-4.12`` kernel and be in the ``standard/base`` branch.
-
- .. note::
-
- The ``linux-yocto-4.12`` kernel can be used with the Yocto Project 2.4
- release and forward.
- You cannot use the ``linux-yocto-4.12`` kernel with releases prior to
- Yocto Project 2.4.
-
- ::
+ ``linux-yocto-4.12`` kernel and be in the ``standard/base`` branch::
$ cd ~
$ git clone git://git.yoctoproject.org/linux-yocto-4.12 --branch standard/base
@@ -330,7 +255,14 @@ section:
Resolving deltas: 100% (5152604/5152604), done. Checking connectivity... done.
Checking out files: 100% (59846/59846), done.
-6. *Create a Local Copy of the Kernel Cache Git Repository:* For
+ .. note::
+
+ The ``linux-yocto-4.12`` kernel can be used with the Yocto Project 2.4
+ release and forward.
+ You cannot use the ``linux-yocto-4.12`` kernel with releases prior to
+ Yocto Project 2.4.
+
+#. *Create a Local Copy of the Kernel Cache Git Repository:* For
simplicity, it is recommended that you create your copy of the kernel
cache Git repository outside of the
:term:`Source Directory`, which is
@@ -364,7 +296,7 @@ layer contains its own :term:`BitBake`
append files (``.bbappend``) and provides a convenient mechanism to
create your own recipe files (``.bb``) as well as store and use kernel
patch files. For background information on working with layers, see the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
+":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual.
.. note::
@@ -372,7 +304,7 @@ section in the Yocto Project Development Tasks Manual.
The Yocto Project comes with many tools that simplify tasks you need
to perform. One such tool is the ``bitbake-layers create-layer``
command, which simplifies creating a new layer. See the
- ":ref:`dev-manual/common-tasks:creating a general layer using the \`\`bitbake-layers\`\` script`"
+ ":ref:`dev-manual/layers:creating a general layer using the \`\`bitbake-layers\`\` script`"
section in the Yocto Project Development Tasks Manual for
information on how to use this script to quick set up a new layer.
@@ -381,19 +313,15 @@ following section describes how to create a layer without the aid of
tools. These steps assume creation of a layer named ``mylayer`` in your
home directory:
-1. *Create Structure*: Create the layer's structure::
+#. *Create Structure*: Create the layer's structure::
- $ mkdir meta-mylayer
- $ mkdir meta-mylayer/conf
- $ mkdir meta-mylayer/recipes-kernel
- $ mkdir meta-mylayer/recipes-kernel/linux
- $ mkdir meta-mylayer/recipes-kernel/linux/linux-yocto
+ $ mkdir -p meta-mylayer/conf meta-mylayer/recipes-kernel/linux/linux-yocto
The ``conf`` directory holds your configuration files, while the
``recipes-kernel`` directory holds your append file and eventual
patch files.
-2. *Create the Layer Configuration File*: Move to the
+#. *Create the Layer Configuration File*: Move to the
``meta-mylayer/conf`` directory and create the ``layer.conf`` file as
follows::
@@ -410,7 +338,7 @@ home directory:
Notice ``mylayer`` as part of the last three statements.
-3. *Create the Kernel Recipe Append File*: Move to the
+#. *Create the Kernel Recipe Append File*: Move to the
``meta-mylayer/recipes-kernel/linux`` directory and create the
kernel's append file. This example uses the ``linux-yocto-4.12``
kernel. Thus, the name of the append file is
@@ -418,14 +346,14 @@ home directory:
FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
- SRC_URI:append = " file://patch-file-one.patch"
- SRC_URI:append = " file://patch-file-two.patch"
- SRC_URI:append = " file://patch-file-three.patch"
+ SRC_URI += "file://patch-file-one.patch"
+ SRC_URI += "file://patch-file-two.patch"
+ SRC_URI += "file://patch-file-three.patch"
The :term:`FILESEXTRAPATHS` and :term:`SRC_URI` statements
enable the OpenEmbedded build system to find patch files. For more
information on using append files, see the
- ":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+ ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the Yocto Project Development Tasks Manual.
Modifying an Existing Recipe
@@ -455,13 +383,13 @@ Creating the Append File
You create this file in your custom layer. You also name it accordingly
based on the linux-yocto recipe you are using. For example, if you are
-modifying the ``meta/recipes-kernel/linux/linux-yocto_4.12.bb`` recipe,
+modifying the ``meta/recipes-kernel/linux/linux-yocto_6.1.bb`` recipe,
the append file will typically be located as follows within your custom
layer:
.. code-block:: none
- your-layer/recipes-kernel/linux/linux-yocto_4.12.bbappend
+ your-layer/recipes-kernel/linux/linux-yocto_6.1.bbappend
The append file should initially extend the
:term:`FILESPATH` search path by
@@ -489,36 +417,31 @@ As an example, consider the following append file used by the BSPs in
.. code-block:: none
- meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.12.bbappend
+ meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.1.bbappend
Here are the contents of this file. Be aware that the actual commit ID
strings in this example listing might be different than the actual
-strings in the file from the ``meta-yocto-bsp`` layer upstream.
-::
+strings in the file from the ``meta-yocto-bsp`` layer upstream::
- KBRANCH:genericx86 = "standard/base"
- KBRANCH:genericx86-64 = "standard/base"
+ KBRANCH:genericx86 = "v6.1/standard/base"
+ KBRANCH:genericx86-64 = "v6.1/standard/base"
+ KBRANCH:beaglebone-yocto = "v6.1/standard/beaglebone"
- KMACHINE:genericx86 ?= "common-pc"
- KMACHINE:genericx86-64 ?= "common-pc-64"
- KBRANCH:edgerouter = "standard/edgerouter"
- KBRANCH:beaglebone = "standard/beaglebone"
+ KMACHINE:genericx86 ?= "common-pc"
+ KMACHINE:genericx86-64 ?= "common-pc-64"
+ KMACHINE:beaglebone-yocto ?= "beaglebone"
- SRCREV_machine:genericx86 ?= "d09f2ce584d60ecb7890550c22a80c48b83c2e19"
- SRCREV_machine:genericx86-64 ?= "d09f2ce584d60ecb7890550c22a80c48b83c2e19"
- SRCREV_machine:edgerouter ?= "b5c8cfda2dfe296410d51e131289fb09c69e1e7d"
- SRCREV_machine:beaglebone ?= "b5c8cfda2dfe296410d51e131289fb09c69e1e7d"
+ SRCREV_machine:genericx86 ?= "6ec439b4b456ce929c4c07fe457b5d6a4b468e86"
+ SRCREV_machine:genericx86-64 ?= "6ec439b4b456ce929c4c07fe457b5d6a4b468e86"
+ SRCREV_machine:beaglebone-yocto ?= "423e1996694b61fbfc8ec3bf062fc6461d64fde1"
+ COMPATIBLE_MACHINE:genericx86 = "genericx86"
+ COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
+ COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
- COMPATIBLE_MACHINE:genericx86 = "genericx86"
- COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
- COMPATIBLE_MACHINE:edgerouter = "edgerouter"
- COMPATIBLE_MACHINE:beaglebone = "beaglebone"
-
- LINUX_VERSION:genericx86 = "4.12.7"
- LINUX_VERSION:genericx86-64 = "4.12.7"
- LINUX_VERSION:edgerouter = "4.12.10"
- LINUX_VERSION:beaglebone = "4.12.10"
+ LINUX_VERSION:genericx86 = "6.1.30"
+ LINUX_VERSION:genericx86-64 = "6.1.30"
+ LINUX_VERSION:beaglebone-yocto = "6.1.20"
This append file
contains statements used to support several BSPs that ship with the
@@ -555,8 +478,7 @@ For example, suppose you had some configuration options in a file called
``network_configs.cfg``. You can place that file inside a directory
named ``linux-yocto`` and then add a :term:`SRC_URI` statement such as the
following to the append file. When the OpenEmbedded build system builds
-the kernel, the configuration options are picked up and applied.
-::
+the kernel, the configuration options are picked up and applied::
SRC_URI += "file://network_configs.cfg"
@@ -719,12 +641,12 @@ append files, you can direct the OpenEmbedded build system to use a
To specify an "in-tree" ``defconfig`` file, use the following statement
form::
- KBUILD_DEFCONFIG_KMACHINE ?= "defconfig_file"
+ KBUILD_DEFCONFIG:<machine> ?= "defconfig_file"
-Here is an example
-that assigns the :term:`KBUILD_DEFCONFIG` variable based on "raspberrypi2"
-and provides the path to the "in-tree" ``defconfig`` file to be used for
-a Raspberry Pi 2, which is based on the Broadcom 2708/2709 chipset::
+Here is an example that assigns the :term:`KBUILD_DEFCONFIG` variable utilizing
+an override for the "raspberrypi2" :term:`MACHINE` and provides the path to the
+"in-tree" ``defconfig`` file to be used for a Raspberry Pi 2, which is based on
+the Broadcom 2708/2709 chipset::
KBUILD_DEFCONFIG:raspberrypi2 ?= "bcm2709_defconfig"
@@ -744,7 +666,7 @@ Using ``devtool`` to Patch the Kernel
=====================================
The steps in this procedure show you how you can patch the kernel using
-the extensible SDK and ``devtool``.
+``devtool``.
.. note::
@@ -765,9 +687,8 @@ modified image causes the added messages to appear on the emulator's
console. The example is a continuation of the setup procedure found in
the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Section.
-1. *Check Out the Kernel Source Files:* First you must use ``devtool``
- to checkout the kernel source code in its workspace. Be sure you are
- in the terminal set up to do work with the extensible SDK.
+#. *Check Out the Kernel Source Files:* First you must use ``devtool``
+ to checkout the kernel source code in its workspace.
.. note::
@@ -794,10 +715,10 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
You can safely ignore these messages. The source code is correctly
checked out.
-2. *Edit the Source Files* Follow these steps to make some simple
+#. *Edit the Source Files* Follow these steps to make some simple
changes to the source files:
- 1. *Change the working directory*: In the previous step, the output
+ #. *Change the working directory*: In the previous step, the output
noted where you can find the source files (e.g.
``poky_sdk/workspace/sources/linux-yocto``). Change to where the
kernel source code is before making your edits to the
@@ -805,7 +726,7 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
$ cd poky_sdk/workspace/sources/linux-yocto
- 2. *Edit the source file*: Edit the ``init/calibrate.c`` file to have
+ #. *Edit the source file*: Edit the ``init/calibrate.c`` file to have
the following changes::
void calibrate_delay(void)
@@ -825,14 +746,17 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
.
.
-3. *Build the Updated Kernel Source:* To build the updated kernel
+#. *Build the Updated Kernel Source:* To build the updated kernel
source, use ``devtool``::
$ devtool build linux-yocto
-4. *Create the Image With the New Kernel:* Use the
+#. *Create the Image With the New Kernel:* Use the
``devtool build-image`` command to create a new image that has the
- new kernel.
+ new kernel::
+
+ $ cd ~
+ $ devtool build-image core-image-minimal
.. note::
@@ -842,20 +766,15 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
:yocto_wiki:`TipsAndTricks/KernelDevelopmentWithEsdk </TipsAndTricks/KernelDevelopmentWithEsdk>`
Wiki Page.
- ::
-
- $ cd ~
- $ devtool build-image core-image-minimal
-
-5. *Test the New Image:* For this example, you can run the new image
+#. *Test the New Image:* For this example, you can run the new image
using QEMU to verify your changes:
- 1. *Boot the image*: Boot the modified image in the QEMU emulator
+ #. *Boot the image*: Boot the modified image in the QEMU emulator
using this command::
$ runqemu qemux86
- 2. *Verify the changes*: Log into the machine using ``root`` with no
+ #. *Verify the changes*: Log into the machine using ``root`` with no
password and then use the following shell command to scroll
through the console's boot output.
@@ -867,7 +786,7 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
the results of your ``printk`` statements as part of the output
when you scroll down the console window.
-6. *Stage and commit your changes*: Within your eSDK terminal, change
+#. *Stage and commit your changes*: Change
your working directory to where you modified the ``calibrate.c`` file
and use these Git commands to stage and commit your changes::
@@ -876,11 +795,9 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
$ git add init/calibrate.c
$ git commit -m "calibrate: Add printk example"
-7. *Export the Patches and Create an Append File:* To export your
+#. *Export the Patches and Create an Append File:* To export your
commits as patches and create a ``.bbappend`` file, use the following
- command in the terminal used to work with the extensible SDK. This
- example uses the previously established layer named ``meta-mylayer``.
- ::
+ command. This example uses the previously established layer named ``meta-mylayer``::
$ devtool finish linux-yocto ~/meta-mylayer
@@ -894,10 +811,9 @@ the ":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``" Se
finishes, the patches and the ``.bbappend`` file are located in the
``~/meta-mylayer/recipes-kernel/linux`` directory.
-8. *Build the Image With Your Modified Kernel:* You can now build an
+#. *Build the Image With Your Modified Kernel:* You can now build an
image that includes your kernel patches. Execute the following
- command from your
- :term:`Build Directory` in the terminal
+ command from your :term:`Build Directory` in the terminal
set up to run BitBake::
$ cd poky/build
@@ -907,8 +823,8 @@ Using Traditional Kernel Development to Patch the Kernel
========================================================
The steps in this procedure show you how you can patch the kernel using
-traditional kernel development (i.e. not using ``devtool`` and the
-extensible SDK as described in the
+traditional kernel development (i.e. not using ``devtool``
+as described in the
":ref:`kernel-dev/common:using \`\`devtool\`\` to patch the kernel`"
section).
@@ -933,20 +849,20 @@ found in the
":ref:`kernel-dev/common:getting ready for traditional kernel development`"
Section.
-1. *Edit the Source Files* Prior to this step, you should have used Git
+#. *Edit the Source Files* Prior to this step, you should have used Git
to create a local copy of the repository for your kernel. Assuming
you created the repository as directed in the
":ref:`kernel-dev/common:getting ready for traditional kernel development`"
section, use the following commands to edit the ``calibrate.c`` file:
- 1. *Change the working directory*: You need to locate the source
+ #. *Change the working directory*: You need to locate the source
files in the local copy of the kernel Git repository. Change to
where the kernel source code is before making your edits to the
``calibrate.c`` file::
$ cd ~/linux-yocto-4.12/init
- 2. *Edit the source file*: Edit the ``calibrate.c`` file to have the
+ #. *Edit the source file*: Edit the ``calibrate.c`` file to have the
following changes::
void calibrate_delay(void)
@@ -966,7 +882,7 @@ Section.
.
.
-2. *Stage and Commit Your Changes:* Use standard Git commands to stage
+#. *Stage and Commit Your Changes:* Use standard Git commands to stage
and commit the changes you just made::
$ git add calibrate.c
@@ -976,7 +892,7 @@ Section.
stage and commit your changes, the OpenEmbedded Build System will not
pick up the changes.
-3. *Update Your local.conf File to Point to Your Source Files:* In
+#. *Update Your local.conf File to Point to Your Source Files:* In
addition to your ``local.conf`` file specifying to use
"kernel-modules" and the "qemux86" machine, it must also point to the
updated kernel source files. Add
@@ -1000,21 +916,21 @@ Section.
be sure to specify the correct branch and machine types. For this
example, the branch is ``standard/base`` and the machine is ``qemux86``.
-4. *Build the Image:* With the source modified, your changes staged and
+#. *Build the Image:* With the source modified, your changes staged and
committed, and the ``local.conf`` file pointing to the kernel files,
you can now use BitBake to build the image::
$ cd poky/build
$ bitbake core-image-minimal
-5. *Boot the image*: Boot the modified image in the QEMU emulator using
+#. *Boot the image*: Boot the modified image in the QEMU emulator using
this command. When prompted to login to the QEMU console, use "root"
with no password::
$ cd poky/build
$ runqemu qemux86
-6. *Look for Your Changes:* As QEMU booted, you might have seen your
+#. *Look for Your Changes:* As QEMU booted, you might have seen your
changes rapidly scroll by. If not, use these commands to see your
changes:
@@ -1026,7 +942,7 @@ Section.
``printk`` statements as part of the output when you scroll down the
console window.
-7. *Generate the Patch File:* Once you are sure that your patch works
+#. *Generate the Patch File:* Once you are sure that your patch works
correctly, you can generate a ``*.patch`` file in the kernel source
repository::
@@ -1034,7 +950,7 @@ Section.
$ git format-patch -1
0001-calibrate.c-Added-some-printk-statements.patch
-8. *Move the Patch File to Your Layer:* In order for subsequent builds
+#. *Move the Patch File to Your Layer:* In order for subsequent builds
to pick up patches, you need to move the patch file you created in
the previous step to your layer ``meta-mylayer``. For this example,
the layer created earlier is located in your home directory as
@@ -1044,9 +960,7 @@ Section.
additional structure to your layer using the following commands::
$ cd ~/meta-mylayer
- $ mkdir recipes-kernel
- $ mkdir recipes-kernel/linux
- $ mkdir recipes-kernel/linux/linux-yocto
+ $ mkdir -p recipes-kernel recipes-kernel/linux/linux-yocto
Once you have created this
hierarchy in your layer, you can move the patch file using the
@@ -1054,7 +968,7 @@ Section.
$ mv ~/linux-yocto-4.12/init/0001-calibrate.c-Added-some-printk-statements.patch ~/meta-mylayer/recipes-kernel/linux/linux-yocto
-9. *Create the Append File:* Finally, you need to create the
+#. *Create the Append File:* Finally, you need to create the
``linux-yocto_4.12.bbappend`` file and insert statements that allow
the OpenEmbedded build system to find the patch. The append file
needs to be in your layer's ``recipes-kernel/linux`` directory and it
@@ -1062,7 +976,7 @@ Section.
contents::
FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
- SRC_URI:append = "file://0001-calibrate.c-Added-some-printk-statements.patch"
+ SRC_URI += "file://0001-calibrate.c-Added-some-printk-statements.patch"
The :term:`FILESEXTRAPATHS` and :term:`SRC_URI` statements
enable the OpenEmbedded build system to find the patch file.
@@ -1070,7 +984,7 @@ Section.
For more information on append files and patches, see the
":ref:`kernel-dev/common:creating the append file`" and
":ref:`kernel-dev/common:applying patches`" sections. You can also see the
- ":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+ ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the Yocto Project Development Tasks Manual.
.. note::
@@ -1081,7 +995,7 @@ Section.
the following sequence of commands::
$ cd poky/build
- $ bitbake -c cleanall yocto-linux
+ $ bitbake -c cleanall linux-yocto
$ bitbake core-image-minimal -c cleanall
$ bitbake core-image-minimal
$ runqemu qemux86
@@ -1111,15 +1025,14 @@ Using  ``menuconfig``
The easiest way to define kernel configurations is to set them through
the ``menuconfig`` tool. This tool provides an interactive method with
which to set kernel configurations. For general information on
-``menuconfig``, see https://en.wikipedia.org/wiki/Menuconfig.
+``menuconfig``, see :wikipedia:`Menuconfig`.
To use the ``menuconfig`` tool in the Yocto Project development
environment, you must do the following:
- Because you launch ``menuconfig`` using BitBake, you must be sure to
- set up your environment by running the
- :ref:`structure-core-script` script found in
- the :term:`Build Directory`.
+ set up your environment by running the :ref:`structure-core-script` script
+ found in the :term:`Build Directory`.
- You must be sure of the state of your build's configuration in the
:term:`Source Directory`.
@@ -1160,7 +1073,7 @@ Consider an example that configures the "CONFIG_SMP" setting for the
.. note::
The OpenEmbedded build system recognizes this kernel as ``linux-yocto``
- through Metadata (e.g. :term:`PREFERRED_VERSION`\ ``_linux-yocto ?= "12.4%"``).
+ through Metadata (e.g. :term:`PREFERRED_VERSION`\ ``_linux-yocto ?= "4.12%"``).
Once ``menuconfig`` launches, use the interface to navigate through the
selections to find the configuration settings in which you are
@@ -1171,10 +1084,10 @@ can find the option under "Processor Type and Features". To deselect
Multi-Processing Support" and enter "N" to clear the asterisk. When you
are finished, exit out and save the change.
-Saving the selections updates the ``.config`` configuration file. This
-is the file that the OpenEmbedded build system uses to configure the
-kernel during the build. You can find and examine this file in the Build
-Directory in ``tmp/work/``. The actual ``.config`` is located in the
+Saving the selections updates the ``.config`` configuration file. This is the
+file that the OpenEmbedded build system uses to configure the kernel during
+the build. You can find and examine this file in the :term:`Build Directory`
+in ``tmp/work/``. The actual ``.config`` is located in the
area where the specific kernel is built. For example, if you were
building a Linux Yocto kernel based on the ``linux-yocto-4.12`` kernel
and you were building a QEMU image targeted for ``x86`` architecture,
@@ -1300,7 +1213,7 @@ saved, and one freshly created using the ``menuconfig`` tool.
To create a configuration fragment using this method, follow these
steps:
-1. *Complete a Build Through Kernel Configuration:* Complete a build at
+#. *Complete a Build Through Kernel Configuration:* Complete a build at
least through the kernel configuration task as follows::
$ bitbake linux-yocto -c kernel_configme -f
@@ -1310,11 +1223,11 @@ steps:
your build state might become unknown, it is best to run this task
prior to starting ``menuconfig``.
-2. *Launch menuconfig:* Run the ``menuconfig`` command::
+#. *Launch menuconfig:* Run the ``menuconfig`` command::
$ bitbake linux-yocto -c menuconfig
-3. *Create the Configuration Fragment:* Run the ``diffconfig`` command
+#. *Create the Configuration Fragment:* Run the ``diffconfig`` command
to prepare a configuration fragment. The resulting file
``fragment.cfg`` is placed in the
``${``\ :term:`WORKDIR`\ ``}``
@@ -1376,7 +1289,7 @@ In order to run this task, you must have an existing ``.config`` file.
See the ":ref:`kernel-dev/common:using \`\`menuconfig\`\``" section for
information on how to create a configuration file.
-Following is sample output from the ``do_kernel_configcheck`` task:
+Here is sample output from the :ref:`ref-tasks-kernel_configcheck` task:
.. code-block:: none
@@ -1456,12 +1369,12 @@ possible by reading the output of the kernel configuration fragment
audit, noting any issues, making changes to correct the issues, and then
repeating.
-As part of the kernel build process, the ``do_kernel_configcheck`` task
+As part of the kernel build process, the :ref:`ref-tasks-kernel_configcheck` task
runs. This task validates the kernel configuration by checking the final
``.config`` file against the input files. During the check, the task
produces warning messages for the following issues:
-- Requested options that did not make the final ``.config`` file.
+- Requested options that did not make it into the final ``.config`` file.
- Configuration items that appear twice in the same configuration
fragment.
@@ -1485,18 +1398,18 @@ configuration.
To streamline the configuration, do the following:
-1. *Use a Working Configuration:* Start with a full configuration that
+#. *Use a Working Configuration:* Start with a full configuration that
you know works. Be sure the configuration builds and boots
successfully. Use this configuration file as your baseline.
-2. *Run Configure and Check Tasks:* Separately run the
- ``do_kernel_configme`` and ``do_kernel_configcheck`` tasks::
+#. *Run Configure and Check Tasks:* Separately run the
+ :ref:`ref-tasks-kernel_configme` and :ref:`ref-tasks-kernel_configcheck` tasks::
$ bitbake linux-yocto -c kernel_configme -f
$ bitbake linux-yocto -c kernel_configcheck -f
-3. *Process the Results:* Take the resulting list of files from the
- ``do_kernel_configcheck`` task warnings and do the following:
+#. *Process the Results:* Take the resulting list of files from the
+ :ref:`ref-tasks-kernel_configcheck` task warnings and do the following:
- Drop values that are redefined in the fragment but do not change
the final ``.config`` file.
@@ -1508,9 +1421,9 @@ To streamline the configuration, do the following:
- Remove repeated and invalid options.
-4. *Re-Run Configure and Check Tasks:* After you have worked through the
+#. *Re-Run Configure and Check Tasks:* After you have worked through the
output of the kernel configuration audit, you can re-run the
- ``do_kernel_configme`` and ``do_kernel_configcheck`` tasks to see the
+ :ref:`ref-tasks-kernel_configme` and :ref:`ref-tasks-kernel_configcheck` tasks to see the
results of your changes. If you have more issues, you can deal with
them as described in the previous step.
@@ -1539,20 +1452,20 @@ If you build a kernel image and the version string has a "+" or a
"-dirty" at the end, it means there are uncommitted modifications in the kernel's
source directory. Follow these steps to clean up the version string:
-1. *Discover the Uncommitted Changes:* Go to the kernel's locally cloned
+#. *Discover the Uncommitted Changes:* Go to the kernel's locally cloned
Git repository (source directory) and use the following Git command
to list the files that have been changed, added, or removed::
$ git status
-2. *Commit the Changes:* You should commit those changes to the kernel
+#. *Commit the Changes:* You should commit those changes to the kernel
source tree regardless of whether or not you will save, export, or
use the changes::
$ git add
$ git commit -s -a -m "getting rid of -dirty"
-3. *Rebuild the Kernel Image:* Once you commit the changes, rebuild the
+#. *Rebuild the Kernel Image:* Once you commit the changes, rebuild the
kernel.
Depending on your particular kernel development workflow, the
@@ -1586,18 +1499,18 @@ You can find this recipe in the ``poky`` Git repository:
Here are some basic steps you can use to work with your own sources:
-1. *Create a Copy of the Kernel Recipe:* Copy the
+#. *Create a Copy of the Kernel Recipe:* Copy the
``linux-yocto-custom.bb`` recipe to your layer and give it a
meaningful name. The name should include the version of the Yocto
Linux kernel you are using (e.g. ``linux-yocto-myproject_4.12.bb``,
where "4.12" is the base version of the Linux kernel with which you
would be working).
-2. *Create a Directory for Your Patches:* In the same directory inside
+#. *Create a Directory for Your Patches:* In the same directory inside
your layer, create a matching directory to store your patches and
configuration files (e.g. ``linux-yocto-myproject``).
-3. *Ensure You Have Configurations:* Make sure you have either a
+#. *Ensure You Have Configurations:* Make sure you have either a
``defconfig`` file or configuration fragment files in your layer.
When you use the ``linux-yocto-custom.bb`` recipe, you must specify a
configuration. If you do not have a ``defconfig`` file, you can run
@@ -1622,7 +1535,7 @@ Here are some basic steps you can use to work with your own sources:
``arch/arm/configs`` and use the one that is the best starting point
for your board).
-4. *Edit the Recipe:* Edit the following variables in your recipe as
+#. *Edit the Recipe:* Edit the following variables in your recipe as
appropriate for your project:
- :term:`SRC_URI`: The
@@ -1671,7 +1584,7 @@ Here are some basic steps you can use to work with your own sources:
COMPATIBLE_MACHINE = "qemux86|qemux86-64"
-5. *Customize Your Recipe as Needed:* Provide further customizations to
+#. *Customize Your Recipe as Needed:* Provide further customizations to
your recipe as needed just as you would customize an existing
linux-yocto recipe. See the
":ref:`ref-manual/devtool-reference:modifying an existing recipe`" section
@@ -1755,19 +1668,17 @@ looks much like the one provided with the ``hello-mod`` template::
SRC := $(shell pwd)
all:
- $(MAKE) -C $(KERNEL_SRC) M=$(SRC)
+ $(MAKE) -C $(KERNEL_SRC) M=$(SRC)
modules_install:
- $(MAKE) -C $(KERNEL_SRC) M=$(SRC) modules_install
+ $(MAKE) -C $(KERNEL_SRC) M=$(SRC) modules_install
...
The important point to note here is the :term:`KERNEL_SRC` variable. The
-:ref:`module <ref-classes-module>` class sets this variable and the
-:term:`KERNEL_PATH` variable to
-``${STAGING_KERNEL_DIR}`` with the necessary Linux kernel build
-information to build modules. If your module ``Makefile`` uses a
-different variable, you might want to override the
-:ref:`ref-tasks-compile` step, or
+:ref:`ref-classes-module` class sets this variable and the :term:`KERNEL_PATH`
+variable to ``${STAGING_KERNEL_DIR}`` with the necessary Linux kernel build
+information to build modules. If your module ``Makefile`` uses a different
+variable, you might want to override the :ref:`ref-tasks-compile` step, or
create a patch to the ``Makefile`` to work with the more typical
:term:`KERNEL_SRC` or :term:`KERNEL_PATH` variables.
@@ -1809,7 +1720,7 @@ tree. Using Git is an efficient way to see what has changed in the tree.
What Changed in a Kernel?
-------------------------
-Following are a few examples that show how to use Git commands to
+Here are a few examples that show how to use Git commands to
examine changes. These examples are by no means the only way to see
changes.
@@ -1829,8 +1740,7 @@ Here is an example that looks at what has changed in the ``emenlow``
branch of the ``linux-yocto-3.19`` kernel. The lower commit range is the
commit associated with the ``standard/base`` branch, while the upper
commit range is the commit associated with the ``standard/emenlow``
-branch.
-::
+branch::
$ git whatchanged origin/standard/base..origin/standard/emenlow
@@ -1904,7 +1814,7 @@ kernel features.
Consider the following example that adds the "test.scc" feature to the
build.
-1. *Create the Feature File:* Create a ``.scc`` file and locate it just
+#. *Create the Feature File:* Create a ``.scc`` file and locate it just
as you would any other patch file, ``.cfg`` file, or fetcher item you
specify in the :term:`SRC_URI` statement.
@@ -1932,19 +1842,19 @@ build.
``linux-yocto`` directory has both the feature ``test.scc`` file and
a similarly named configuration fragment file ``test.cfg``.
-2. *Add the Feature File to SRC_URI:* Add the ``.scc`` file to the
+#. *Add the Feature File to SRC_URI:* Add the ``.scc`` file to the
recipe's :term:`SRC_URI` statement::
- SRC_URI:append = " file://test.scc"
+ SRC_URI += "file://test.scc"
The leading space before the path is important as the path is
appended to the existing path.
-3. *Specify the Feature as a Kernel Feature:* Use the
+#. *Specify the Feature as a Kernel Feature:* Use the
:term:`KERNEL_FEATURES` statement to specify the feature as a kernel
feature::
- KERNEL_FEATURES:append = " test.scc"
+ KERNEL_FEATURES += "test.scc"
The OpenEmbedded build
system processes the kernel feature when it builds the kernel.
diff --git a/documentation/kernel-dev/concepts-appx.rst b/documentation/kernel-dev/concepts-appx.rst
index b3a2f3abbf..6a2fe4bb0b 100644
--- a/documentation/kernel-dev/concepts-appx.rst
+++ b/documentation/kernel-dev/concepts-appx.rst
@@ -117,7 +117,7 @@ upstream Linux kernel development and are managed by the Yocto Project
team's Yocto Linux kernel development strategy. It is the Yocto Project
team's policy to not back-port minor features to the released Yocto
Linux kernel. They only consider back-porting significant technological
-jumps - and, that is done after a complete gap analysis. The reason
+jumps --- and, that is done after a complete gap analysis. The reason
for this policy is that back-porting any small to medium sized change
from an evolving Linux kernel can easily create mismatches,
incompatibilities and very subtle errors.
@@ -319,8 +319,7 @@ image.
The following figure shows the temporary file structure created on your
host system when you build the kernel using BitBake. This
-:term:`Build Directory` contains all the
-source files used during the build.
+:term:`Build Directory` contains all the source files used during the build.
.. image:: figures/kernel-overview-2-generic.png
:align: center
diff --git a/documentation/kernel-dev/faq.rst b/documentation/kernel-dev/faq.rst
index e40e3ff372..4dffa90dbd 100644
--- a/documentation/kernel-dev/faq.rst
+++ b/documentation/kernel-dev/faq.rst
@@ -36,9 +36,9 @@ How do I install/not-install the kernel image on the root filesystem?
The kernel image (e.g. ``vmlinuz``) is provided by the
``kernel-image`` package. Image recipes depend on ``kernel-base``. To
specify whether or not the kernel image is installed in the generated
-root filesystem, override ``RDEPENDS:${KERNEL_PACKAGE_NAME}-base`` to include or not
+root filesystem, override ``RRECOMMENDS:${KERNEL_PACKAGE_NAME}-base`` to include or not
include "kernel-image". See the
-":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the
Yocto Project Development Tasks Manual for information on how to use an
append file to override metadata.
diff --git a/documentation/kernel-dev/intro.rst b/documentation/kernel-dev/intro.rst
index b9ce7f241c..a663733a1d 100644
--- a/documentation/kernel-dev/intro.rst
+++ b/documentation/kernel-dev/intro.rst
@@ -87,7 +87,7 @@ understand the following documentation:
as described in the Yocto Project Application Development and the
Extensible Software Development Kit (eSDK) manual.
-- The ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+- The ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual.
- The ":ref:`kernel-dev/intro:kernel modification workflow`" section.
@@ -108,19 +108,19 @@ general information and references for further information.
.. image:: figures/kernel-dev-flow.png
:width: 100%
-1. *Set up Your Host Development System to Support Development Using the
+#. *Set up Your Host Development System to Support Development Using the
Yocto Project*: See the ":doc:`/dev-manual/start`" section in
the Yocto Project Development Tasks Manual for options on how to get
a build host ready to use the Yocto Project.
-2. *Set Up Your Host Development System for Kernel Development:* It is
- recommended that you use ``devtool`` and an extensible SDK for kernel
+#. *Set Up Your Host Development System for Kernel Development:* It is
+ recommended that you use ``devtool`` for kernel
development. Alternatively, you can use traditional kernel
development methods with the Yocto Project. Either way, there are
steps you need to take to get the development environment ready.
- Using ``devtool`` and the eSDK requires that you have a clean build
- of the image and that you are set up with the appropriate eSDK. For
+ Using ``devtool`` requires that you have a clean build
+ of the image. For
more information, see the
":ref:`kernel-dev/common:getting ready to develop using \`\`devtool\`\``"
section.
@@ -131,10 +131,10 @@ general information and references for further information.
":ref:`kernel-dev/common:getting ready for traditional kernel development`"
section.
-3. *Make Changes to the Kernel Source Code if applicable:* Modifying the
+#. *Make Changes to the Kernel Source Code if applicable:* Modifying the
kernel does not always mean directly changing source files. However,
if you have to do this, you make the changes to the files in the
- eSDK's Build Directory if you are using ``devtool``. For more
+ Yocto's :term:`Build Directory` if you are using ``devtool``. For more
information, see the
":ref:`kernel-dev/common:using \`\`devtool\`\` to patch the kernel`"
section.
@@ -144,7 +144,7 @@ general information and references for further information.
":ref:`kernel-dev/common:using traditional kernel development to patch the kernel`"
section.
-4. *Make Kernel Configuration Changes if Applicable:* If your situation
+#. *Make Kernel Configuration Changes if Applicable:* If your situation
calls for changing the kernel's configuration, you can use
:ref:`menuconfig <kernel-dev/common:using \`\`menuconfig\`\`>`,
which allows you to
@@ -155,7 +155,7 @@ general information and references for further information.
.. note::
Try to resist the temptation to directly edit an existing ``.config``
- file, which is found in the Build Directory among the source code
+ file, which is found in the :term:`Build Directory` among the source code
used for the build. Doing so, can produce unexpected results when
the OpenEmbedded build system regenerates the configuration file.
@@ -169,7 +169,7 @@ general information and references for further information.
Additionally, if you are working in a BSP layer and need to modify
the BSP's kernel's configuration, you can use ``menuconfig``.
-5. *Rebuild the Kernel Image With Your Changes:* Rebuilding the kernel
+#. *Rebuild the Kernel Image With Your Changes:* Rebuilding the kernel
image applies your changes. Depending on your target hardware, you
can verify your changes on actual hardware or perhaps QEMU.
diff --git a/documentation/kernel-dev/maint-appx.rst b/documentation/kernel-dev/maint-appx.rst
index d968c856f6..53b7376089 100644
--- a/documentation/kernel-dev/maint-appx.rst
+++ b/documentation/kernel-dev/maint-appx.rst
@@ -92,11 +92,11 @@ top-level kernel feature or BSP. The following actions effectively
provide the Metadata and create the tree that includes the new feature,
patch, or BSP:
-1. *Pass Feature to the OpenEmbedded Build System:* A top-level kernel
+#. *Pass Feature to the OpenEmbedded Build System:* A top-level kernel
feature is passed to the kernel build subsystem. Normally, this
feature is a BSP for a particular kernel type.
-2. *Locate Feature:* The file that describes the top-level feature is
+#. *Locate Feature:* The file that describes the top-level feature is
located by searching these system directories:
- The in-tree kernel-cache directories, which are located in the
@@ -112,31 +112,31 @@ patch, or BSP:
bsp_root_name-kernel_type.scc
-3. *Expand Feature:* Once located, the feature description is either
+#. *Expand Feature:* Once located, the feature description is either
expanded into a simple script of actions, or into an existing
equivalent script that is already part of the shipped kernel.
-4. *Append Extra Features:* Extra features are appended to the top-level
+#. *Append Extra Features:* Extra features are appended to the top-level
feature description. These features can come from the
:term:`KERNEL_FEATURES`
variable in recipes.
-5. *Locate, Expand, and Append Each Feature:* Each extra feature is
+#. *Locate, Expand, and Append Each Feature:* Each extra feature is
located, expanded and appended to the script as described in step
three.
-6. *Execute the Script:* The script is executed to produce files
+#. *Execute the Script:* The script is executed to produce files
``.scc`` and ``.cfg`` files in appropriate directories of the
``yocto-kernel-cache`` repository. These files are descriptions of
all the branches, tags, patches and configurations that need to be
applied to the base Git repository to completely create the source
(build) branch for the new BSP or feature.
-7. *Clone Base Repository:* The base repository is cloned, and the
+#. *Clone Base Repository:* The base repository is cloned, and the
actions listed in the ``yocto-kernel-cache`` directories are applied
to the tree.
-8. *Perform Cleanup:* The Git repositories are left with the desired
+#. *Perform Cleanup:* The Git repositories are left with the desired
branches checked out and any required branching, patching and tagging
has been performed.
@@ -229,6 +229,5 @@ This behavior means that all the generated files for a particular
machine or BSP are now in the build tree directory. The files include
the final ``.config`` file, all the ``.o`` files, the ``.a`` files, and
so forth. Since each machine or BSP has its own separate
-:term:`Build Directory` in its own separate
-branch of the Git repository, you can easily switch between different
-builds.
+:term:`Build Directory` in its own separate branch of the Git repository,
+you can easily switch between different builds.
diff --git a/documentation/migration-guides/index.rst b/documentation/migration-guides/index.rst
index 4597506d05..d8edd05b89 100644
--- a/documentation/migration-guides/index.rst
+++ b/documentation/migration-guides/index.rst
@@ -12,6 +12,10 @@ to move to one release of the Yocto Project from the previous one.
.. toctree::
migration-general
+ release-5.0
+ release-4.3
+ release-4.2
+ release-4.1
release-4.0
release-3.4
migration-3.3
diff --git a/documentation/migration-guides/migration-1.3.rst b/documentation/migration-guides/migration-1.3.rst
index 6a1755d1dc..95f7e3572b 100644
--- a/documentation/migration-guides/migration-1.3.rst
+++ b/documentation/migration-guides/migration-1.3.rst
@@ -62,7 +62,7 @@ Previously, an inconsistent mix of spaces and tabs existed, which made
extending these functions using ``_append`` or ``_prepend`` complicated
given that Python treats whitespace as syntactically significant. If you
are defining or extending any Python functions (e.g.
-``populate_packages``, ``do_unpack``, ``do_patch`` and so forth) in
+``populate_packages``, :ref:`ref-tasks-unpack`, :ref:`ref-tasks-patch` and so forth) in
custom recipes or classes, you need to ensure you are using consistent
four-space indentation.
@@ -91,11 +91,11 @@ consistency.
nativesdk
~~~~~~~~~
-The suffix ``nativesdk`` is now implemented as a prefix, which
-simplifies a lot of the packaging code for ``nativesdk`` recipes. All
-custom ``nativesdk`` recipes, which are relocatable packages that are
-native to :term:`SDK_ARCH`, and any references need to
-be updated to use ``nativesdk-*`` instead of ``*-nativesdk``.
+The suffix ``nativesdk`` is now implemented as a prefix, which simplifies a lot
+of the packaging code for :ref:`ref-classes-nativesdk` recipes. All custom
+:ref:`ref-classes-nativesdk` recipes, which are relocatable packages that are
+native to :term:`SDK_ARCH`, and any references need to be updated to use
+``nativesdk-*`` instead of ``*-nativesdk``.
.. _migration-1.3-task-recipes:
@@ -109,12 +109,11 @@ automatic upgrade path for most packages. However, you should update
references in your own recipes and configurations as they could be
removed in future releases. You should also rename any custom ``task-*``
recipes to ``packagegroup-*``, and change them to inherit
-``packagegroup`` instead of ``task``, as well as taking the opportunity
-to remove anything now handled by :ref:`ref-classes-packagegroup`, such as
-providing ``-dev`` and ``-dbg`` packages, setting
-:term:`LIC_FILES_CHKSUM`, and so forth. See the
-:ref:`ref-classes-packagegroup` section for
-further details.
+:ref:`ref-classes-packagegroup` instead of ``task``, as well
+as taking the opportunity to remove anything now handled by
+:ref:`ref-classes-packagegroup`, such as providing ``-dev`` and ``-dbg``
+packages, setting :term:`LIC_FILES_CHKSUM`, and so forth. See the
+:ref:`ref-classes-packagegroup` section for further details.
.. _migration-1.3-image-features:
diff --git a/documentation/migration-guides/migration-1.4.rst b/documentation/migration-guides/migration-1.4.rst
index baf3c08379..6db2a035b8 100644
--- a/documentation/migration-guides/migration-1.4.rst
+++ b/documentation/migration-guides/migration-1.4.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 1.4 (dylan)
===================
@@ -83,7 +85,7 @@ create an append file for the ``init-ifupdown`` recipe instead, which
you can find in the :term:`Source Directory` at
``meta/recipes-core/init-ifupdown``. For information on how to use
append files, see the
-":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the Yocto Project Development Tasks Manual.
.. _migration-1.4-remote-debugging:
diff --git a/documentation/migration-guides/migration-1.5.rst b/documentation/migration-guides/migration-1.5.rst
index 93db14c3ba..c8f3cbc165 100644
--- a/documentation/migration-guides/migration-1.5.rst
+++ b/documentation/migration-guides/migration-1.5.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 1.5 (dora)
==================
@@ -26,8 +28,7 @@ provide packages for these, you can install and use the Buildtools
tarball, which provides an SDK-like environment containing them.
For more information on this requirement, see the
-":ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`"
-section.
+":ref:`system-requirements-buildtools`" section.
.. _migration-1.5-atom-pc-bsp:
@@ -94,9 +95,8 @@ The following changes have been made to the package QA checks:
this file within :ref:`ref-tasks-install` if "make
install" is installing it.
-- If you are using the :ref:`buildhistory <ref-classes-buildhistory>` class,
- the check for the package
- version going backwards is now controlled using a standard QA check.
+- If you are using the :ref:`ref-classes-buildhistory` class, the check for the
+ package version going backwards is now controlled using a standard QA check.
Thus, if you have customized your :term:`ERROR_QA` or :term:`WARN_QA` values
and still wish to have this check performed, you should add
"version-going-backwards" to your value for one or the other
@@ -126,10 +126,14 @@ The following directory changes exist:
:term:`DEPLOY_DIR_IMAGE` variable in the external environment.
- When buildhistory is enabled, its output is now written under the
- :term:`Build Directory` rather than
- :term:`TMPDIR`. Doing so makes it easier to delete
- :term:`TMPDIR` and preserve the build history. Additionally, data for
- produced SDKs is now split by :term:`IMAGE_NAME`.
+ :term:`Build Directory` rather than :term:`TMPDIR`. Doing so makes
+ it easier to delete :term:`TMPDIR` and preserve the build history.
+ Additionally, data for produced SDKs is now split by :term:`IMAGE_NAME`.
+
+- When :ref:`ref-classes-buildhistory` is enabled, its output
+ is now written under the :term:`Build Directory` rather than :term:`TMPDIR`.
+ Doing so makes it easier to delete :term:`TMPDIR` and preserve the build
+ history. Additionally, data for produced SDKs is now split by :term:`IMAGE_NAME`.
- The ``pkgdata`` directory produced as part of the packaging process
has been collapsed into a single machine-specific directory. This
@@ -217,8 +221,8 @@ Task Recipes
The previously deprecated ``task.bbclass`` has now been dropped. For
recipes that previously inherited from this class, you should rename
-them from ``task-*`` to ``packagegroup-*`` and inherit packagegroup
-instead.
+them from ``task-*`` to ``packagegroup-*`` and inherit
+:ref:`ref-classes-packagegroup` instead.
For more information, see the ":ref:`ref-classes-packagegroup`" section.
@@ -240,11 +244,11 @@ Automated Image Testing
-----------------------
A new automated image testing framework has been added through the
-:ref:`ref-classes-testimage*` classes. This
+:ref:`ref-classes-testimage` classes. This
framework replaces the older ``imagetest-qemu`` framework.
You can learn more about performing automated image tests in the
-":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
.. _migration-1.5-build-history:
@@ -252,7 +256,7 @@ section in the Yocto Project Development Tasks Manual.
Build History
-------------
-Following are changes to Build History:
+The changes to Build History are:
- Installed package sizes: ``installed-package-sizes.txt`` for an image
now records the size of the files installed by each package instead
@@ -267,7 +271,7 @@ Following are changes to Build History:
option for each utility for more information on the new syntax.
For more information on Build History, see the
-":ref:`dev-manual/common-tasks:maintaining build output quality`"
+":ref:`dev-manual/build-quality:maintaining build output quality`"
section in the Yocto Project Development Tasks Manual.
.. _migration-1.5-udev:
@@ -275,7 +279,7 @@ section in the Yocto Project Development Tasks Manual.
``udev``
--------
-Following are changes to ``udev``:
+The changes to ``udev`` are:
- ``udev`` no longer brings in ``udev-extraconf`` automatically through
:term:`RRECOMMENDS`, since this was originally
@@ -319,7 +323,7 @@ Removed and Renamed Recipes
Other Changes
-------------
-Following is a list of short entries describing other changes:
+Here is a list of short entries describing other changes:
- ``run-postinsts``: Make this generic.
diff --git a/documentation/migration-guides/migration-1.6.rst b/documentation/migration-guides/migration-1.6.rst
index 358086560b..916169e836 100644
--- a/documentation/migration-guides/migration-1.6.rst
+++ b/documentation/migration-guides/migration-1.6.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 1.6 (daisy)
===================
@@ -9,10 +11,9 @@ Project 1.6 Release (codename "daisy") from the prior release.
``archiver`` Class
------------------
-The :ref:`archiver <ref-classes-archiver>` class has been rewritten
-and its configuration has been simplified. For more details on the
-source archiver, see the
-":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
+The :ref:`ref-classes-archiver` class has been rewritten and its configuration
+has been simplified. For more details on the source archiver, see the
+":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
section in the Yocto Project Development Tasks Manual.
.. _migration-1.6-packaging-changes:
@@ -147,7 +148,7 @@ NFS mount, an error occurs.
The ``PRINC`` variable has been deprecated and triggers a warning if
detected during a build. For :term:`PR` increments on changes,
use the PR service instead. You can find out more about this service in
-the ":ref:`dev-manual/common-tasks:working with a pr service`"
+the ":ref:`dev-manual/packages:working with a pr service`"
section in the Yocto Project Development Tasks Manual.
.. _migration-1.6-variable-changes-IMAGE_TYPES:
@@ -220,9 +221,9 @@ Package Test (ptest)
Package Tests (ptest) are built but not installed by default. For
information on using Package Tests, see the
-":ref:`dev-manual/common-tasks:testing packages with ptest`"
-section in the Yocto Project Development Tasks Manual. For information on the
-``ptest`` class, see the ":ref:`ref-classes-ptest`" section.
+":ref:`dev-manual/packages:testing packages with ptest`" section in the
+Yocto Project Development Tasks Manual. See also the ":ref:`ref-classes-ptest`"
+section.
.. _migration-1.6-build-changes:
@@ -231,13 +232,13 @@ Build Changes
Separate build and source directories have been enabled by default for
selected recipes where it is known to work and for all
-recipes that inherit the :ref:`cmake <ref-classes-cmake>` class. In
-future releases the :ref:`autotools <ref-classes-autotools>` class
-will enable a separate build directory by default as well. Recipes
+recipes that inherit the :ref:`ref-classes-cmake` class. In
+future releases the :ref:`ref-classes-autotools` class
+will enable a separate :term:`Build Directory` by default as well. Recipes
building Autotools-based software that fails to build with a separate
-build directory should be changed to inherit from the
+:term:`Build Directory` should be changed to inherit from the
:ref:`autotools-brokensep <ref-classes-autotools>` class instead of
-the ``autotools`` or ``autotools_stage``\ classes.
+the :ref:`ref-classes-autotools` or ``autotools_stage`` classes.
.. _migration-1.6-building-qemu-native:
@@ -341,39 +342,39 @@ Removed and Renamed Recipes
The following recipes have been removed:
-- ``packagegroup-toolset-native`` - This recipe is largely unused.
+- ``packagegroup-toolset-native`` --- this recipe is largely unused.
-- ``linux-yocto-3.8`` - Support for the Linux yocto 3.8 kernel has been
+- ``linux-yocto-3.8`` --- support for the Linux yocto 3.8 kernel has been
dropped. Support for the 3.10 and 3.14 kernels have been added with
the ``linux-yocto-3.10`` and ``linux-yocto-3.14`` recipes.
-- ``ocf-linux`` - This recipe has been functionally replaced using
+- ``ocf-linux`` --- this recipe has been functionally replaced using
``cryptodev-linux``.
-- ``genext2fs`` - ``genext2fs`` is no longer used by the build system
+- ``genext2fs`` --- ``genext2fs`` is no longer used by the build system
and is unmaintained upstream.
-- ``js`` - This provided an ancient version of Mozilla's javascript
+- ``js`` --- this provided an ancient version of Mozilla's javascript
engine that is no longer needed.
-- ``zaurusd`` - The recipe has been moved to the ``meta-handheld``
+- ``zaurusd`` --- the recipe has been moved to the ``meta-handheld``
layer.
-- ``eglibc 2.17`` - Replaced by the ``eglibc 2.19`` recipe.
+- ``eglibc 2.17`` --- replaced by the ``eglibc 2.19`` recipe.
-- ``gcc 4.7.2`` - Replaced by the now stable ``gcc 4.8.2``.
+- ``gcc 4.7.2`` --- replaced by the now stable ``gcc 4.8.2``.
-- ``external-sourcery-toolchain`` - this recipe is now maintained in
+- ``external-sourcery-toolchain`` --- this recipe is now maintained in
the ``meta-sourcery`` layer.
-- ``linux-libc-headers-yocto 3.4+git`` - Now using version 3.10 of the
+- ``linux-libc-headers-yocto 3.4+git`` --- now using version 3.10 of the
``linux-libc-headers`` by default.
-- ``meta-toolchain-gmae`` - This recipe is obsolete.
+- ``meta-toolchain-gmae`` --- this recipe is obsolete.
-- ``packagegroup-core-sdk-gmae`` - This recipe is obsolete.
+- ``packagegroup-core-sdk-gmae`` --- this recipe is obsolete.
-- ``packagegroup-core-standalone-gmae-sdk-target`` - This recipe is
+- ``packagegroup-core-standalone-gmae-sdk-target`` --- this recipe is
obsolete.
.. _migration-1.6-removed-classes:
diff --git a/documentation/migration-guides/migration-1.7.rst b/documentation/migration-guides/migration-1.7.rst
index 88a6855d50..ca8222098a 100644
--- a/documentation/migration-guides/migration-1.7.rst
+++ b/documentation/migration-guides/migration-1.7.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 1.7 (dizzy)
===================
@@ -30,8 +32,8 @@ version required on the
build host is now 1.7.8 because the ``--list`` option is now required by
BitBake's Git fetcher. As always, if your host distribution does not
provide a version of Git that meets this requirement, you can use the
-``buildtools-tarball`` that does. See the
-":ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`"
+:term:`buildtools` tarball that does. See the
+":ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`"
section for more information.
.. _migration-1.7-autotools-class-changes:
@@ -39,13 +41,11 @@ section for more information.
Autotools Class Changes
-----------------------
-The following :ref:`autotools <ref-classes-autotools>` class changes
-occurred:
+The following :ref:`ref-classes-autotools` class changes occurred:
-- *A separate build directory is now used by default:* The
- :ref:`autotools <ref-classes-autotools>` class has been
- changed to use a directory for building
- (:term:`B`), which is separate from the source directory
+- *A separate :term:`Build Directory` is now used by default:* The
+ :ref:`ref-classes-autotools` class has been changed to use a directory for
+ building (:term:`B`), which is separate from the source directory
(:term:`S`). This is commonly referred to as ``B != S``, or
an out-of-tree build.
@@ -54,9 +54,8 @@ occurred:
However, if the software is not capable of being built in this
manner, you will need to either patch the software so that it can
build separately, or you will need to change the recipe to inherit
- the :ref:`autotools-brokensep <ref-classes-autotools>` class
- instead of the :ref:`autotools <ref-classes-autotools>`
- or ``autotools_stage`` classes.
+ the :ref:`autotools-brokensep <ref-classes-autotools>` class instead
+ of the :ref:`ref-classes-autotools` or ``autotools_stage`` classes.
- The ``--foreign`` option is no longer passed to ``automake`` when
running ``autoconf``: This option tells ``automake`` that a
@@ -165,7 +164,7 @@ The following changes have occurred to the QA check process:
more parallel execution. This change is unlikely to be an issue
except for highly customized recipes that disable packaging tasks
themselves by marking them as ``noexec``. For those packages, you
- will need to disable the ``do_package_qa`` task as well.
+ will need to disable the :ref:`ref-tasks-package_qa` task as well.
- Files being overwritten during the
:ref:`ref-tasks-populate_sysroot` task now
@@ -217,7 +216,7 @@ The following miscellaneous change occurred:
should manually remove old "build-id" files from your existing build
history repositories to avoid confusion. For information on the build
history feature, see the
- ":ref:`dev-manual/common-tasks:maintaining build output quality`"
+ ":ref:`dev-manual/build-quality:maintaining build output quality`"
section in the Yocto Project Development Tasks Manual.
diff --git a/documentation/migration-guides/migration-1.8.rst b/documentation/migration-guides/migration-1.8.rst
index 51a13873e2..5cc5f8a047 100644
--- a/documentation/migration-guides/migration-1.8.rst
+++ b/documentation/migration-guides/migration-1.8.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 1.8 (fido)
==================
@@ -68,23 +70,22 @@ the following::
Kernel Build Changes
--------------------
-The kernel build process was changed to place the source in a common
-shared work area and to place build artifacts separately in the source
-code tree. In theory, migration paths have been provided for most common
-usages in kernel recipes but this might not work in all cases. In
-particular, users need to ensure that ``${S}`` (source files) and
-``${B}`` (build artifacts) are used correctly in functions such as
-:ref:`ref-tasks-configure` and
-:ref:`ref-tasks-install`. For kernel recipes that do not
-inherit from ``kernel-yocto`` or include ``linux-yocto.inc``, you might
-wish to refer to the ``linux.inc`` file in the ``meta-oe`` layer for the
-kinds of changes you need to make. For reference, here is the
+The kernel build process was changed to place the source in a common shared work
+area and to place build artifacts separately in the source code tree. In theory,
+migration paths have been provided for most common usages in kernel recipes but
+this might not work in all cases. In particular, users need to ensure that
+``${S}`` (source files) and ``${B}`` (build artifacts) are used correctly in
+functions such as :ref:`ref-tasks-configure` and :ref:`ref-tasks-install`. For
+kernel recipes that do not inherit from :ref:`ref-classes-kernel-yocto` or
+include ``linux-yocto.inc``, you might wish to refer to the ``linux.inc`` file
+in the ``meta-oe`` layer for the kinds of changes you need to make. For reference,
+here is the
:oe_git:`commit </meta-openembedded/commit/meta-oe/recipes-kernel/linux/linux.inc?id=fc7132ede27ac67669448d3d2845ce7d46c6a1ee>`
where the ``linux.inc`` file in ``meta-oe`` was updated.
Recipes that rely on the kernel source code and do not inherit the
-module classes might need to add explicit dependencies on the
-``do_shared_workdir`` kernel task, for example::
+:ref:`module <ref-classes-module>` classes might need to add explicit
+dependencies on the :ref:`ref-tasks-shared_workdir` kernel task, for example::
do_configure[depends] += "virtual/kernel:do_shared_workdir"
@@ -121,14 +122,13 @@ need to take corrective steps.
Rebuild Improvements
--------------------
-Changes have been made to the :ref:`base <ref-classes-base>`,
-:ref:`autotools <ref-classes-autotools>`, and
-:ref:`cmake <ref-classes-cmake>` classes to clean out generated files
-when the :ref:`ref-tasks-configure` task needs to be
+Changes have been made to the :ref:`ref-classes-base`,
+:ref:`ref-classes-autotools`, and :ref:`ref-classes-cmake` classes to clean out
+generated files when the :ref:`ref-tasks-configure` task needs to be
re-executed.
One of the improvements is to attempt to run "make clean" during the
-``do_configure`` task if a ``Makefile`` exists. Some software packages
+:ref:`ref-tasks-configure` task if a ``Makefile`` exists. Some software packages
do not provide a working clean target within their make files. If you
have such recipes, you need to set
:term:`CLEANBROKEN` to "1" within the recipe, for example::
diff --git a/documentation/migration-guides/migration-2.0.rst b/documentation/migration-guides/migration-2.0.rst
index 7217853779..13be9846df 100644
--- a/documentation/migration-guides/migration-2.0.rst
+++ b/documentation/migration-guides/migration-2.0.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.0 (jethro)
====================
@@ -214,7 +216,7 @@ modifications synchronized, it is not always obvious to developers how
to manipulate the Metadata as compared to the source.
Metadata processing has now been removed from the
-:ref:`kernel-yocto <ref-classes-kernel-yocto>` class and the external
+:ref:`ref-classes-kernel-yocto` class and the external
Metadata repository ``yocto-kernel-cache``, which has always been used
to seed the ``linux-yocto`` "meta" branch. This separate ``linux-yocto``
cache repository is now the primary location for this data. Due to this
diff --git a/documentation/migration-guides/migration-2.1.rst b/documentation/migration-guides/migration-2.1.rst
index ae6268d509..18b05b52cc 100644
--- a/documentation/migration-guides/migration-2.1.rst
+++ b/documentation/migration-guides/migration-2.1.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.1 (krogoth)
=====================
@@ -64,7 +66,7 @@ Makefile Environment Changes
:term:`EXTRA_OEMAKE` now defaults to "" instead of
"-e MAKEFLAGS=". Setting :term:`EXTRA_OEMAKE` to "-e MAKEFLAGS=" by default
was a historical accident that has required many classes (e.g.
-``autotools``, ``module``) and recipes to override this default in order
+:ref:`ref-classes-autotools`, ``module``) and recipes to override this default in order
to work with sensible build systems. When upgrading to the release, you
must edit any recipe that relies upon this old default by either setting
:term:`EXTRA_OEMAKE` back to "-e MAKEFLAGS=" or by explicitly setting any
@@ -98,7 +100,7 @@ breaking FHS.
``ac_cv_sizeof_off_t`` is No Longer Cached in Site Files
--------------------------------------------------------
-For recipes inheriting the :ref:`autotools <ref-classes-autotools>`
+For recipes inheriting the :ref:`ref-classes-autotools`
class, ``ac_cv_sizeof_off_t`` is no longer cached in the site files for
``autoconf``. The reason for this change is because the
``ac_cv_sizeof_off_t`` value is not necessarily static per architecture
@@ -106,14 +108,14 @@ as was previously assumed. Rather, the value changes based on whether
large file support is enabled. For most software that uses ``autoconf``,
this change should not be a problem. However, if you have a recipe that
bypasses the standard :ref:`ref-tasks-configure` task
-from the :ref:`autotools <ref-classes-autotools>` class and the software the recipe is building
+from the :ref:`ref-classes-autotools` class and the software the recipe is building
uses a very old version of ``autoconf``, the recipe might be incapable
-of determining the correct size of ``off_t`` during ``do_configure``.
+of determining the correct size of ``off_t`` during :ref:`ref-tasks-configure`.
The best course of action is to patch the software as necessary to allow
-the default implementation from the :ref:`autotools <ref-classes-autotools>` class to work such
+the default implementation from the :ref:`ref-classes-autotools` class to work such
that ``autoreconf`` succeeds and produces a working configure script,
-and to remove the overridden ``do_configure`` task such that the default
+and to remove the overridden :ref:`ref-tasks-configure` task such that the default
implementation does get used.
.. _migration-2.1-image-generation-split-out-from-filesystem-generation:
@@ -128,17 +130,16 @@ separate :ref:`ref-tasks-image` tasks for clarity both in
operation and in the code.
For most cases, this change does not present any problems. However, if
-you have made customizations that directly modify the ``do_rootfs`` task
-or that mention ``do_rootfs``, you might need to update those changes.
-In particular, if you had added any tasks after ``do_rootfs``, you
+you have made customizations that directly modify the :ref:`ref-tasks-rootfs` task
+or that mention :ref:`ref-tasks-rootfs`, you might need to update those changes.
+In particular, if you had added any tasks after :ref:`ref-tasks-rootfs`, you
should make edits so that those tasks are after the
:ref:`ref-tasks-image-complete` task rather than
-after ``do_rootfs`` so that your added tasks run at the correct
+after :ref:`ref-tasks-rootfs` so that your added tasks run at the correct
time.
-A minor part of this restructuring is that the post-processing
-definitions and functions have been moved from the
-:ref:`image <ref-classes-image>` class to the
+A minor part of this restructuring is that the post-processing definitions and
+functions have been moved from the :ref:`ref-classes-image` class to the
:ref:`rootfs-postcommands <ref-classes-rootfs*>` class. Functionally,
however, they remain unchanged.
@@ -189,18 +190,17 @@ Class Changes
The following classes have changed:
- ``autotools_stage``: Removed because the
- :ref:`autotools <ref-classes-autotools>` class now provides its
+ :ref:`ref-classes-autotools` class now provides its
functionality. Recipes that inherited from ``autotools_stage`` should
- now inherit from ``autotools`` instead.
+ now inherit from :ref:`ref-classes-autotools` instead.
- ``boot-directdisk``: Merged into the ``image-vm`` class. The
``boot-directdisk`` class was rarely directly used. Consequently,
this change should not cause any issues.
-- ``bootimg``: Merged into the
- :ref:`image-live <ref-classes-image-live>` class. The ``bootimg``
- class was rarely directly used. Consequently, this change should not
- cause any issues.
+- ``bootimg``: Merged into the :ref:`ref-classes-image-live` class. The
+ ``bootimg`` class was rarely directly used. Consequently, this change should
+ not cause any issues.
- ``packageinfo``: Removed due to its limited use by the Hob UI, which
has itself been removed.
@@ -255,14 +255,14 @@ The following changes have been made for the Poky distribution:
not need to change anything unless you are relying on this naming
elsewhere.
-- The :ref:`uninative <ref-classes-uninative>` class is now enabled
+- The :ref:`ref-classes-uninative` class is now enabled
by default in Poky. This class attempts to isolate the build system
from the host distribution's C library and makes re-use of native
shared state artifacts across different host distributions practical.
With this class enabled, a tarball containing a pre-built C library
is downloaded at the start of the build.
- The :ref:`uninative <ref-classes-uninative>` class is enabled through the
+ The :ref:`ref-classes-uninative` class is enabled through the
``meta/conf/distro/include/yocto-uninative.inc`` file, which for
those not using the Poky distribution, can include to easily enable
the same functionality.
@@ -343,7 +343,7 @@ This release supports generation of GLib Introspective Repository (GIR)
files through GObject introspection, which is the standard mechanism for
accessing GObject-based software from runtime environments. You can
enable, disable, and test the generation of this data. See the
-":ref:`dev-manual/common-tasks:enabling gobject introspection support`"
+":ref:`dev-manual/gobject-introspection:enabling gobject introspection support`"
section in the Yocto Project Development Tasks Manual for more
information.
@@ -356,9 +356,9 @@ These additional changes exist:
- The minimum Git version has been increased to 1.8.3.1. If your host
distribution does not provide a sufficiently recent version, you can
- install the buildtools, which will provide it. See the
- :ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`
- section for more information on the buildtools tarball.
+ install the :term:`buildtools`, which will provide it. See the
+ :ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`
+ section for more information on the :term:`buildtools` tarball.
- The buggy and incomplete support for the RPM version 4 package
manager has been removed. The well-tested and maintained support for
@@ -401,9 +401,9 @@ These additional changes exist:
as these directories are automatically found and added.
- Inaccurate disk and CPU percentage data has been dropped from
- ``buildstats`` output. This data has been replaced with
+ :ref:`ref-classes-buildstats` output. This data has been replaced with
``getrusage()`` data and corrected IO statistics. You will probably
- need to update any custom code that reads the ``buildstats`` data.
+ need to update any custom code that reads the :ref:`ref-classes-buildstats` data.
- The ``meta/conf/distro/include/package_regex.inc`` is now deprecated.
The contents of this file have been moved to individual recipes.
diff --git a/documentation/migration-guides/migration-2.2.rst b/documentation/migration-guides/migration-2.2.rst
index fe7bc2cf55..9d50dc6202 100644
--- a/documentation/migration-guides/migration-2.2.rst
+++ b/documentation/migration-guides/migration-2.2.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.2 (morty)
===================
@@ -27,7 +29,7 @@ Staging Directories in Sysroot Has Been Simplified
The way directories are staged in sysroot has been simplified and
introduces the new :term:`SYSROOT_DIRS`,
:term:`SYSROOT_DIRS_NATIVE`, and ``SYSROOT_DIRS_BLACKLIST``
-(replaced by :term:`SYSROOT_DIRS_IGNORE` in version 3.5). See the
+(replaced by :term:`SYSROOT_DIRS_IGNORE` in version 4.0). See the
:oe_lists:`v2 patch series on the OE-Core Mailing List
</pipermail/openembedded-core/2016-May/121365.html>`
for additional information.
@@ -71,8 +73,8 @@ Metadata Must Now Use Python 3 Syntax
The metadata is now required to use Python 3 syntax. For help preparing
metadata, see any of the many Python 3 porting guides available.
Alternatively, you can reference the conversion commits for BitBake and
-you can use :term:`OpenEmbedded-Core (OE-Core)` as a guide for changes. Following are
-particular areas of interest:
+you can use :term:`OpenEmbedded-Core (OE-Core)` as a guide for changes.
+Particular areas of interest are:
- subprocess command-line pipes needing locale decoding
@@ -103,7 +105,7 @@ online package-manager support through SMART still require Python 2.
``buildtools-tarball`` Includes Python 3
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-``buildtools-tarball`` now includes Python 3.
+The :term:`buildtools` tarball now includes Python 3.
.. _migration-2.2-uclibc-replaced-by-musl:
@@ -119,11 +121,10 @@ compared to uClibc.
``${B}`` No Longer Default Working Directory for Tasks
------------------------------------------------------
-``${``\ :term:`B`\ ``}`` is no longer the default working
-directory for tasks. Consequently, any custom tasks you define now need
-to either have the
-``[``\ :ref:`dirs <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]`` flag
-set, or the task needs to change into the appropriate working directory
+``${``\ :term:`B`\ ``}`` is no longer the default working directory for tasks.
+Consequently, any custom tasks you define now need to either have the
+``[``\ :ref:`dirs <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
+flag set, or the task needs to change into the appropriate working directory
manually (e.g using ``cd`` for a shell task).
.. note::
@@ -181,14 +182,8 @@ root filesystem, provides an image, and uses the ``nographic`` option::
$ runqemu qemux86-64 tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.ext4 tmp/deploy/images/qemux86-64/bzImage nographic
-Following is a list of variables that can be set in configuration files
-such as ``bsp.conf`` to enable the BSP to be booted by ``runqemu``:
-
-.. note::
-
- "QB" means "QEMU Boot".
-
-::
+Here is a list of variables that can be set in configuration files
+such as ``bsp.conf`` to enable the BSP to be booted by ``runqemu``::
QB_SYSTEM_NAME: QEMU name (e.g. "qemu-system-i386")
QB_OPT_APPEND: Options to append to QEMU (e.g. "-show-cursor")
@@ -223,6 +218,10 @@ follows and run ``runqemu``:
.. note::
+ "QB" means "QEMU Boot".
+
+.. note::
+
For command-line syntax, use ``runqemu help``.
::
@@ -442,7 +441,7 @@ The following miscellaneous changes have occurred:
- :ref:`ref-classes-image`: Renamed COMPRESS(ION) to CONVERSION. This change
means that ``COMPRESSIONTYPES``, ``COMPRESS_DEPENDS`` and
``COMPRESS_CMD`` are deprecated in favor of ``CONVERSIONTYPES``,
- ``CONVERSION_DEPENDS`` and ``CONVERSION_CMD``. The ``COMPRESS*``
+ ``CONVERSION_DEPENDS`` and :term:`CONVERSION_CMD`. The ``COMPRESS*``
variable names will still work in the 2.2 release but metadata that
does not need to be backwards-compatible should be changed to use the
new names as the ``COMPRESS*`` ones will be removed in a future
diff --git a/documentation/migration-guides/migration-2.3.rst b/documentation/migration-guides/migration-2.3.rst
index d49ed474ca..60340b9592 100644
--- a/documentation/migration-guides/migration-2.3.rst
+++ b/documentation/migration-guides/migration-2.3.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.3 (pyro)
==================
@@ -50,7 +52,7 @@ Consider the following:
post-installation script that is installed by a function added to
:term:`SYSROOT_PREPROCESS_FUNCS`.
- For an example, see the :ref:`pixbufcache <ref-classes-pixbufcache>` class in ``meta/classes/`` in
+ For an example, see the :ref:`ref-classes-pixbufcache` class in ``meta/classes/`` in
the :ref:`overview-manual/development-environment:yocto project source repositories`.
.. note::
@@ -196,9 +198,9 @@ The following changes took place for BitBake:
fetcher passes the new parameter through the ``SVN_SSH`` environment
variable during the :ref:`ref-tasks-fetch` task.
- See the ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-fetching:subversion (svn) fetcher (\`\`svn://\`\`)`"
- section in the BitBake
- User Manual for additional information.
+ See the
+ ":ref:`bitbake-user-manual/bitbake-user-manual-fetching:subversion (svn) fetcher (\`\`svn://\`\`)`"
+ section in the BitBake User Manual for additional information.
- ``BB_SETSCENE_VERIFY_FUNCTION`` and ``BB_SETSCENE_VERIFY_FUNCTION2``
Removed: Because the mechanism they were part of is no longer
@@ -363,7 +365,7 @@ The following changes have been made to Wic:
.. note::
For more information on Wic, see the
- ":ref:`dev-manual/common-tasks:creating partitioned images using wic`"
+ ":ref:`dev-manual/wic:creating partitioned images using wic`"
section in the Yocto Project Development Tasks Manual.
- *Default Output Directory Changed:* Wic's default output directory is
@@ -400,7 +402,7 @@ The following QA checks have changed:
warning, you need to address missing runtime dependencies.
For additional information, see the
- :ref:`insane <ref-classes-insane>` class and the
+ :ref:`ref-classes-insane` class and the
":ref:`ref-manual/qa-checks:errors and warnings`" section.
.. _migration-2.3-miscellaneous-changes:
@@ -444,7 +446,7 @@ The following miscellaneous changes have occurred:
RSA keys only, and with recent versions of OpenSSH, which deprecates
DSA host keys.
-- The :ref:`buildhistory <ref-classes-buildhistory>` class now
+- The :ref:`ref-classes-buildhistory` class now
correctly uses tabs as separators between all columns in
``installed-package-sizes.txt`` in order to aid import into other
tools.
@@ -481,29 +483,26 @@ The following miscellaneous changes have occurred:
is an unnecessary burden.
If you need to preserve these ``.la`` files (e.g. in a custom
- distribution), you must change
- :term:`INHERIT_DISTRO` such that
- "remove-libtool" is not included in the value.
+ distribution), you must change :term:`INHERIT_DISTRO` such that
+ ":ref:`ref-classes-remove-libtool`" is not included
+ in the value.
- Extensible SDKs built for GCC 5+ now refuse to install on a
distribution where the host GCC version is 4.8 or 4.9. This change
resulted from the fact that the installation is known to fail due to
the way the ``uninative`` shared state (sstate) package is built. See
- the :ref:`uninative <ref-classes-uninative>` class for additional
- information.
+ the :ref:`ref-classes-uninative` class for additional information.
-- All native and nativesdk recipes now use a separate
- :term:`DISTRO_FEATURES` value instead of sharing the value used by
- recipes for the target, in order to avoid unnecessary rebuilds.
+- All :ref:`ref-classes-native` and :ref:`ref-classes-nativesdk` recipes now
+ use a separate :term:`DISTRO_FEATURES` value instead of sharing the value
+ used by recipes for the target, in order to avoid unnecessary rebuilds.
- The :term:`DISTRO_FEATURES` for ``native`` recipes is
- :term:`DISTRO_FEATURES_NATIVE` added to
- an intersection of :term:`DISTRO_FEATURES` and
- :term:`DISTRO_FEATURES_FILTER_NATIVE`.
+ The :term:`DISTRO_FEATURES` for :ref:`ref-classes-native` recipes
+ is :term:`DISTRO_FEATURES_NATIVE` added to an intersection of
+ :term:`DISTRO_FEATURES` and :term:`DISTRO_FEATURES_FILTER_NATIVE`.
- For nativesdk recipes, the corresponding variables are
- :term:`DISTRO_FEATURES_NATIVESDK`
- and
+ For :ref:`ref-classes-nativesdk` recipes, the corresponding
+ variables are :term:`DISTRO_FEATURES_NATIVESDK` and
:term:`DISTRO_FEATURES_FILTER_NATIVESDK`.
- The ``FILESDIR`` variable, which was previously deprecated and rarely
diff --git a/documentation/migration-guides/migration-2.4.rst b/documentation/migration-guides/migration-2.4.rst
index ef5f32e6ef..5d5d601988 100644
--- a/documentation/migration-guides/migration-2.4.rst
+++ b/documentation/migration-guides/migration-2.4.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.4 (rocko)
===================
@@ -54,7 +56,7 @@ occurred:
when "pam" is in :term:`DISTRO_FEATURES`.
- The ``switch_root`` program is now packaged in a separate
- "util-linux-switch-root" package for small initramfs images that
+ "util-linux-switch-root" package for small :term:`Initramfs` images that
do not need the whole ``util-linux`` package or the busybox
binary, which are both much larger than ``switch_root``. The main
``util-linux`` package has a recommended runtime dependency (i.e.
@@ -89,8 +91,6 @@ occurred:
Removed Recipes
---------------
-The following recipes have been removed:
-
- ``acpitests``: This recipe is not maintained.
- ``autogen-native``: No longer required by Grub, oe-core, or
@@ -195,12 +195,10 @@ Kernel Device Tree Move
-----------------------
Kernel Device Tree support is now easier to enable in a kernel recipe.
-The Device Tree code has moved to a
-:ref:`kernel-devicetree <ref-classes-kernel-devicetree>` class.
+The Device Tree code has moved to a :ref:`ref-classes-kernel-devicetree` class.
Functionality is automatically enabled for any recipe that inherits the
-:ref:`kernel <ref-classes-kernel>` class and sets the
-:term:`KERNEL_DEVICETREE` variable. The
-previous mechanism for doing this,
+:ref:`kernel <ref-classes-kernel>` class and sets the :term:`KERNEL_DEVICETREE`
+variable. The previous mechanism for doing this,
``meta/recipes-kernel/linux/linux-dtb.inc``, is still available to avoid
breakage, but triggers a deprecation warning. Future releases of the
Yocto Project will remove ``meta/recipes-kernel/linux/linux-dtb.inc``.
@@ -213,8 +211,6 @@ recipes you might have. This will avoid breakage in post 2.4 releases.
Package QA Changes
------------------
-The following package QA changes took place:
-
- The "unsafe-references-in-scripts" QA check has been removed.
- If you refer to ``${COREBASE}/LICENSE`` within
@@ -229,8 +225,6 @@ The following package QA changes took place:
``README`` File Changes
-----------------------
-The following are changes to ``README`` files:
-
- The main Poky ``README`` file has been moved to the ``meta-poky``
layer and has been renamed ``README.poky``. A symlink has been
created so that references to the old location work.
@@ -246,8 +240,6 @@ The following are changes to ``README`` files:
Miscellaneous Changes
---------------------
-The following are additional changes:
-
- The ``ROOTFS_PKGMANAGE_BOOTSTRAP`` variable and any references to it
have been removed. You should remove this variable from any custom
recipes.
@@ -269,11 +261,11 @@ The following are additional changes:
from ``meta-poky`` to OE-Core (i.e. from
``meta-poky/conf/distro/include`` to ``meta/conf/distro/include``).
-- The :ref:`buildhistory <ref-classes-buildhistory>` class now makes
+- The :ref:`ref-classes-buildhistory` class now makes
a single commit per build rather than one commit per subdirectory in
the repository. This behavior assumes the commits are enabled with
:term:`BUILDHISTORY_COMMIT` = "1", which
- is typical. Previously, the :ref:`buildhistory <ref-classes-buildhistory>` class made one commit
+ is typical. Previously, the :ref:`ref-classes-buildhistory` class made one commit
per subdirectory in the repository in order to make it easier to see
the changes for a particular subdirectory. To view a particular
change, specify that subdirectory as the last parameter on the
@@ -286,7 +278,7 @@ The following are additional changes:
- BitBake fires multiple "BuildStarted" events when multiconfig is
enabled (one per configuration). For more information, see the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:events`"
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:events`"
section in the BitBake User Manual.
- By default, the ``security_flags.inc`` file sets a
@@ -301,7 +293,7 @@ The following are additional changes:
likely be removed in the next Yocto Project release.
- The ``vmdk``, ``vdi``, and ``qcow2`` image file types are now used in
- conjunction with the "wic" image type through ``CONVERSION_CMD``.
+ conjunction with the "wic" image type through :term:`CONVERSION_CMD`.
Consequently, the equivalent image types are now ``wic.vmdk``,
``wic.vdi``, and ``wic.qcow2``, respectively.
diff --git a/documentation/migration-guides/migration-2.5.rst b/documentation/migration-guides/migration-2.5.rst
index abd26809df..facf5110b7 100644
--- a/documentation/migration-guides/migration-2.5.rst
+++ b/documentation/migration-guides/migration-2.5.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.5 (sumo)
==================
@@ -85,8 +87,6 @@ The following recipes have been removed:
Scripts and Tools Changes
-------------------------
-The following are changes to scripts and tools:
-
- ``yocto-bsp``, ``yocto-kernel``, and ``yocto-layer``: The
``yocto-bsp``, ``yocto-kernel``, and ``yocto-layer`` scripts
previously shipped with poky but not in OpenEmbedded-Core have been
@@ -117,8 +117,6 @@ The following are changes to scripts and tools:
BitBake Changes
---------------
-The following are BitBake changes:
-
- The ``--runall`` option has changed. There are two different
behaviors people might want:
@@ -137,7 +135,7 @@ The following are BitBake changes:
- Several explicit "run this task for all recipes in the dependency
tree" tasks have been removed (e.g. ``fetchall``, ``checkuriall``,
and the ``*all`` tasks provided by the ``distrodata`` and
- :ref:`archiver <ref-classes-archiver>` classes). There is a BitBake option to complete this for
+ :ref:`ref-classes-archiver` classes). There is a BitBake option to complete this for
any arbitrary task. For example::
bitbake <target> -c fetchall
@@ -151,7 +149,7 @@ The following are BitBake changes:
Python and Python 3 Changes
---------------------------
-The following are auto-packaging changes to Python and Python 3:
+Here are auto-packaging changes to Python and Python 3:
The script-managed ``python-*-manifest.inc`` files that were previously
used to generate Python and Python 3 packages have been replaced with a
@@ -185,9 +183,7 @@ change please see :yocto_git:`this commit
Miscellaneous Changes
---------------------
-The following are additional changes:
-
-- The :ref:`kernel <ref-classes-kernel>` class supports building packages for multiple kernels.
+- The :ref:`ref-classes-kernel` class supports building packages for multiple kernels.
If your kernel recipe or ``.bbappend`` file mentions packaging at
all, you should replace references to the kernel in package names
with ``${KERNEL_PACKAGE_NAME}``. For example, if you disable
@@ -195,7 +191,7 @@ The following are additional changes:
``RDEPENDS_kernel-base = ""`` you can avoid warnings using
``RDEPENDS_${KERNEL_PACKAGE_NAME}-base = ""`` instead.
-- The :ref:`buildhistory <ref-classes-buildhistory>` class commits changes to the repository by
+- The :ref:`ref-classes-buildhistory` class commits changes to the repository by
default so you no longer need to set ``BUILDHISTORY_COMMIT = "1"``.
If you want to disable commits you need to set
``BUILDHISTORY_COMMIT = "0"`` in your configuration.
@@ -207,12 +203,12 @@ The following are additional changes:
maintains a full-featured BSP in the ``meta-ti`` layer. This rename
avoids the previous name clash that existed between the two BSPs.
-- The :ref:`update-alternatives <ref-classes-update-alternatives>` class no longer works with SysV ``init``
+- The :ref:`ref-classes-update-alternatives` class no longer works with SysV ``init``
scripts because this usage has been problematic. Also, the
``sysklogd`` recipe no longer uses ``update-alternatives`` because it
is incompatible with other implementations.
-- By default, the :ref:`cmake <ref-classes-cmake>` class uses
+- By default, the :ref:`ref-classes-cmake` class uses
``ninja`` instead of ``make`` for building. This improves build
performance. If a recipe is broken with ``ninja``, then the recipe
can set ``OECMAKE_GENERATOR = "Unix Makefiles"`` to change back to
@@ -261,10 +257,10 @@ The following are additional changes:
``pkg_postinst_ontarget()`` or call
``postinst_intercept delay_to_first_boot`` from ``pkg_postinst()``.
Any failure of a ``pkg_postinst()`` script (including ``exit 1``)
- will trigger a warning during ``do_rootfs``.
+ will trigger a warning during :ref:`ref-tasks-rootfs`.
For more information, see the
- ":ref:`dev-manual/common-tasks:post-installation scripts`"
+ ":ref:`dev-manual/new-recipe:post-installation scripts`"
section in the Yocto Project Development Tasks Manual.
- The ``elf`` image type has been removed. This image type was removed
diff --git a/documentation/migration-guides/migration-2.6.rst b/documentation/migration-guides/migration-2.6.rst
index 11e659de7c..ecb559dd4c 100644
--- a/documentation/migration-guides/migration-2.6.rst
+++ b/documentation/migration-guides/migration-2.6.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.6 (thud)
==================
@@ -126,16 +128,16 @@ missing from :term:`DEPENDS`).
.. note::
- This change affects classes beyond just the two mentioned (i.e.
- ``distutils`` and ``distutils3``). Any recipe that inherits ``distutils*``
- classes are affected. For example, the ``setuptools`` and ``setuptools3``
+ This change affects classes beyond just the two mentioned (i.e. ``distutils``
+ and ``distutils3``). Any recipe that inherits ``distutils*`` classes are
+ affected. For example, the ``setuptools`` and :ref:`ref-classes-setuptools3`
recipes are affected since they inherit the ``distutils*`` classes.
Fetching these types of dependencies that are not provided in the
sysroot negatively affects the ability to reproduce builds. This type of
fetching is now explicitly disabled. Consequently, any missing
dependencies in Python recipes that use these classes now result in an
-error during the ``do_configure`` task.
+error during the :ref:`ref-tasks-configure` task.
.. _migration-2.6-linux-yocto-configuration-audit-issues-now-correctly-reported:
@@ -208,22 +210,19 @@ The following changes have been made:
``SERIAL_CONSOLE`` Deprecated
-----------------------------
-The :term:`SERIAL_CONSOLE` variable has been
-functionally replaced by the
-:term:`SERIAL_CONSOLES` variable for some time.
-With the Yocto Project 2.6 release, :term:`SERIAL_CONSOLE` has been
-officially deprecated.
+The ``SERIAL_CONSOLE`` variable has been functionally replaced by the
+:term:`SERIAL_CONSOLES` variable for some time. With the Yocto Project 2.6
+release, ``SERIAL_CONSOLE`` has been officially deprecated.
-:term:`SERIAL_CONSOLE` will continue to work as before for the 2.6 release.
+``SERIAL_CONSOLE`` will continue to work as before for the 2.6 release.
However, for the sake of future compatibility, it is recommended that
-you replace all instances of :term:`SERIAL_CONSOLE` with
-:term:`SERIAL_CONSOLES`.
+you replace all instances of ``SERIAL_CONSOLE`` with :term:`SERIAL_CONSOLES`.
.. note::
The only difference in usage is that :term:`SERIAL_CONSOLES`
expects entries to be separated using semicolons as compared to
- :term:`SERIAL_CONSOLE`, which expects spaces.
+ ``SERIAL_CONSOLE``, which expects spaces.
.. _migration-2.6-poky-sets-unknown-configure-option-to-qa-error:
@@ -275,16 +274,16 @@ The following changes have occurred:
specifying list items to remove, be aware that leading and trailing
whitespace resulting from the removal is retained.
- See the ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:removal (override style syntax)`"
+ See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:removal (override style syntax)`"
section in the BitBake User Manual for a detailed example.
-.. _migration-2.6-systemd-configuration-now-split-out-to-system-conf:
+.. _migration-2.6-systemd-configuration-now-split-out-to-systemd-conf:
``systemd`` Configuration is Now Split Into ``systemd-conf``
------------------------------------------------------------
The configuration for the ``systemd`` recipe has been moved into a
-``system-conf`` recipe. Moving this configuration to a separate recipe
+``systemd-conf`` recipe. Moving this configuration to a separate recipe
avoids the ``systemd`` recipe from becoming machine-specific for cases
where machine-specific configurations need to be applied (e.g. for
``qemu*`` machines).
@@ -315,13 +314,11 @@ This section provides information about automatic testing changes:
exists and has been replaced by the
:term:`TESTIMAGE_AUTO` variable.
-- Inheriting the ``testimage`` and ``testsdk`` Classes: Best
- practices now dictate that you use the
- :term:`IMAGE_CLASSES` variable rather than the
- :term:`INHERIT` variable when you inherit the
- :ref:`testimage <ref-classes-testimage*>` and
- :ref:`testsdk <ref-classes-testsdk>` classes used for automatic
- testing.
+- Inheriting the :ref:`ref-classes-testimage` and :ref:`ref-classes-testsdk`
+ classes: best practices now dictate that you use the :term:`IMAGE_CLASSES`
+ variable rather than the :term:`INHERIT` variable when you inherit the
+ :ref:`ref-classes-testimage` and :ref:`ref-classes-testsdk` classes used
+ for automatic testing.
.. _migration-2.6-openssl-changes:
@@ -343,8 +340,7 @@ BitBake Changes
---------------
The server logfile ``bitbake-cookerdaemon.log`` is now always placed in
-the :term:`Build Directory` instead of the current
-directory.
+the :term:`Build Directory` instead of the current directory.
.. _migration-2.6-security-changes:
@@ -368,7 +364,7 @@ Any failure of a ``pkg_postinst()`` script (including exit 1) triggers
an error during the :ref:`ref-tasks-rootfs` task.
For more information on post-installation behavior, see the
-":ref:`dev-manual/common-tasks:post-installation scripts`"
+":ref:`dev-manual/new-recipe:post-installation scripts`"
section in the Yocto Project Development Tasks Manual.
.. _migration-2.6-python-3-profile-guided-optimizations:
diff --git a/documentation/migration-guides/migration-2.7.rst b/documentation/migration-guides/migration-2.7.rst
index 1b8f1ce1bb..c49d2f05d2 100644
--- a/documentation/migration-guides/migration-2.7.rst
+++ b/documentation/migration-guides/migration-2.7.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 2.7 (warrior)
=====================
@@ -172,8 +174,7 @@ The following miscellaneous changes occurred:
- ``base/pixbufcache``: Obsolete ``sstatecompletions`` code has been
removed.
-- :ref:`native <ref-classes-native>` class:
- :term:`RDEPENDS` handling has been enabled.
+- :ref:`ref-classes-native` class: :term:`RDEPENDS` handling has been enabled.
- ``inetutils``: This recipe has rsh disabled.
diff --git a/documentation/migration-guides/migration-3.0.rst b/documentation/migration-guides/migration-3.0.rst
index 1219edf921..67fcac41f7 100644
--- a/documentation/migration-guides/migration-3.0.rst
+++ b/documentation/migration-guides/migration-3.0.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 3.0 (zeus)
==================
@@ -47,7 +49,7 @@ The following recipes have been removed.
- ``core-image-lsb-sdk``: Part of removed LSB support.
- ``cve-check-tool``: Functionally replaced by the ``cve-update-db``
- recipe and :ref:`cve-check <ref-classes-cve-check>` class.
+ recipe and :ref:`ref-classes-cve-check` class.
- ``eglinfo``: No longer maintained. ``eglinfo`` from ``mesa-demos`` is
an adequate and maintained alternative.
@@ -108,7 +110,7 @@ Packaging Changes
The following packaging changes have occurred.
-- The `Epiphany <https://en.wikipedia.org/wiki/GNOME_Web>`__ browser
+- The :wikipedia:`Epiphany <GNOME_Web>` browser
has been dropped from ``packagegroup-self-hosted`` as it has not been
needed inside ``build-appliance-image`` for quite some time and was
causing resource problems.
@@ -142,13 +144,13 @@ CVE Checking
------------
``cve-check-tool`` has been functionally replaced by a new
-``cve-update-db`` recipe and functionality built into the :ref:`cve-check <ref-classes-cve-check>`
+``cve-update-db`` recipe and functionality built into the :ref:`ref-classes-cve-check`
class. The result uses NVD JSON data feeds rather than the deprecated
XML feeds that ``cve-check-tool`` was using, supports CVSSv3 scoring,
and makes other improvements.
Additionally, the ``CVE_CHECK_CVE_WHITELIST`` variable has been replaced
-by ``CVE_CHECK_WHITELIST`` (replaced by :term:`CVE_CHECK_IGNORE` in version 3.5).
+by ``CVE_CHECK_WHITELIST`` (replaced by :term:`CVE_CHECK_IGNORE` in version 4.0).
.. _migration-3.0-bitbake-changes:
@@ -216,13 +218,13 @@ The following sanity check changes occurred.
- :term:`SRC_URI` is now checked for usage of two
problematic items:
- - "${PN}" prefix/suffix use - Warnings always appear if ${PN} is
+ - "${PN}" prefix/suffix use --- warnings always appear if ${PN} is
used. You must fix the issue regardless of whether multiconfig or
anything else that would cause prefixing/suffixing to happen.
- - Github archive tarballs - these are not guaranteed to be stable.
+ - Github archive tarballs --- these are not guaranteed to be stable.
Consequently, it is likely that the tarballs will be refreshed and
- thus the SRC_URI checksums will fail to apply. It is recommended
+ thus the :term:`SRC_URI` checksums will fail to apply. It is recommended
that you fetch either an official release tarball or a specific
revision from the actual Git repository instead.
@@ -259,7 +261,9 @@ The following miscellaneous changes have occurred.
- The ``gnome`` class has been removed because it now does very little.
You should update recipes that previously inherited this class to do
- the following: inherit gnomebase gtk-icon-cache gconf mime
+ the following::
+
+ inherit gnomebase gtk-icon-cache gconf mime
- The ``meta/recipes-kernel/linux/linux-dtb.inc`` file has been
removed. This file was previously deprecated in favor of setting
@@ -282,8 +286,8 @@ The following miscellaneous changes have occurred.
- You must change the host distro identifier used in
:term:`NATIVELSBSTRING` to use all lowercase
characters even if it does not contain a version number. This change
- is necessary only if you are not using ``uninative`` and
- :term:`SANITY_TESTED_DISTROS`.
+ is necessary only if you are not using
+ :ref:`ref-classes-uninative` and :term:`SANITY_TESTED_DISTROS`.
- In the ``base-files`` recipe, writing the hostname into
``/etc/hosts`` and ``/etc/hostname`` is now done within the main
diff --git a/documentation/migration-guides/migration-3.1.rst b/documentation/migration-guides/migration-3.1.rst
index e3fdbbe425..fdb959c4af 100644
--- a/documentation/migration-guides/migration-3.1.rst
+++ b/documentation/migration-guides/migration-3.1.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 3.1 (dunfell)
=====================
@@ -125,7 +127,7 @@ renamed to ``features_check``; the ``distro_features_check`` class still
exists but generates a warning and redirects to the new class. In
preparation for a future removal of the old class it is recommended that
you update recipes currently inheriting ``distro_features_check`` to
-inherit :ref:`features_check <ref-classes-features_check>` instead.
+inherit :ref:`ref-classes-features_check` instead.
.. _migration-3.1-removed-classes:
@@ -200,7 +202,7 @@ Packaging changes
-----------------
- ``intltool`` has been removed from ``packagegroup-core-sdk`` as it is
- rarely needed to build modern software - gettext can do most of the
+ rarely needed to build modern software --- gettext can do most of the
things it used to be needed for. ``intltool`` has also been removed
from ``packagegroup-core-self-hosted`` as it is not needed to for
standard builds.
@@ -234,14 +236,14 @@ Packaging changes
Additional warnings
-------------------
-Warnings will now be shown at ``do_package_qa`` time in the following
+Warnings will now be shown at :ref:`ref-tasks-package_qa` time in the following
circumstances:
- A recipe installs ``.desktop`` files containing ``MimeType`` keys but
- does not inherit the new ``mime-xdg`` class
+ does not inherit the new :ref:`ref-classes-mime-xdg` class
- A recipe installs ``.xml`` files into ``${datadir}/mime/packages``
- but does not inherit the :ref:`mime <ref-classes-mime>` class
+ but does not inherit the :ref:`ref-classes-mime` class
.. _migration-3.1-x86-live-wic:
diff --git a/documentation/migration-guides/migration-3.2.rst b/documentation/migration-guides/migration-3.2.rst
index a376eced52..c538df04d2 100644
--- a/documentation/migration-guides/migration-3.2.rst
+++ b/documentation/migration-guides/migration-3.2.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 3.2 (gatesgarth)
========================
@@ -11,7 +13,7 @@ Minimum system requirements
``gcc`` version 6.0 is now required at minimum on the build host. For older
host distributions where this is not available, you can use the
-``buildtools-extended-tarball`` (easily installable using
+:term:`buildtools-extended` tarball (easily installable using
``scripts/install-buildtools``).
@@ -23,7 +25,7 @@ Removed recipes
The following recipes have been removed:
- ``bjam-native``: replaced by ``boost-build-native``
-- ``avahi-ui``: folded into the main ``avahi`` recipe - the GTK UI can be disabled using :term:`PACKAGECONFIG` for ``avahi``.
+- ``avahi-ui``: folded into the main ``avahi`` recipe --- the GTK UI can be disabled using :term:`PACKAGECONFIG` for ``avahi``.
- ``build-compare``: no longer needed with the removal of the ``packagefeed-stability`` class
- ``dhcp``: obsolete, functionally replaced by ``dhcpcd`` and ``kea``
- ``libmodulemd-v1``: replaced by ``libmodulemd``
@@ -37,7 +39,7 @@ Removed classes
The following classes (.bbclass files) have been removed:
-- ``spdx``: obsolete - the Yocto Project is a strong supporter of SPDX, but this class was old code using a dated approach and had the potential to be misleading. The ``meta-sdpxscanner`` layer is a much more modern and active approach to handling this and is recommended as a replacement.
+- ``spdx``: obsolete --- the Yocto Project is a strong supporter of SPDX, but this class was old code using a dated approach and had the potential to be misleading. The ``meta-sdpxscanner`` layer is a much more modern and active approach to handling this and is recommended as a replacement.
- ``packagefeed-stability``: this class had become obsolete with the advent of hash equivalence and reproducible builds.
@@ -46,7 +48,7 @@ pseudo path filtering and mismatch behaviour
--------------------------------------------
pseudo now operates on a filtered subset of files. This is a significant change
-to the way pseudo operates within OpenEmbedded - by default, pseudo monitors and
+to the way pseudo operates within OpenEmbedded --- by default, pseudo monitors and
logs (adds to its database) any file created or modified whilst in a ``fakeroot``
environment. However, there are large numbers of files that we simply don't care
about the permissions of whilst in that ``fakeroot`` context, for example ${:term:`S`}, ${:term:`B`}, ${:term:`T`},
@@ -60,7 +62,7 @@ pseudo as the interprocess round trip to the server is avoided.
There is a possible complication where some existing recipe may break, for
example, a recipe was found to be writing to ``${B}/install`` for
-``make install`` in ``do_install`` and since ``${B}`` is listed as not to be tracked,
+``make install`` in :ref:`ref-tasks-install` and since ``${B}`` is listed as not to be tracked,
there were errors trying to ``chown root`` for files in this location. Another
example was the ``tcl`` recipe where the source directory :term:`S` is set to a
subdirectory of the source tree but files were written out to the directory
@@ -68,7 +70,7 @@ structure above that subdirectory. For these types of cases in your own recipes,
extend :term:`PSEUDO_IGNORE_PATHS` to cover additional paths that pseudo should not
be monitoring.
-In addition, pseudo's behaviour on mismatches has now been changed - rather
+In addition, pseudo's behaviour on mismatches has now been changed --- rather
than doing what turns out to be a rather dangerous "fixup" if it sees a file
with a different path but the same inode as another file it has previously seen,
pseudo will throw an ``abort()`` and direct you to a :yocto_wiki:`wiki page </Pseudo_Abort>`
@@ -137,10 +139,10 @@ DHCP server/client replaced
The ``dhcp`` software package has become unmaintained and thus has been
functionally replaced by ``dhcpcd`` (client) and ``kea`` (server). You will
-need to replace references to the recipe/package names as appropriate - most
+need to replace references to the recipe/package names as appropriate --- most
commonly, at the package level ``dhcp-client`` should be replaced by
``dhcpcd`` and ``dhcp-server`` should be replaced by ``kea``. If you have any
-custom configuration files for these they will need to be adapted - refer to
+custom configuration files for these they will need to be adapted --- refer to
the upstream documentation for ``dhcpcd`` and ``kea`` for further details.
@@ -175,13 +177,23 @@ errors:
In addition, the following new checks were added and default to triggering an error:
-- :ref:`shebang-size <qa-check-shebang-size>`: Check for shebang (#!) lines longer than 128 characters, which can give an error at runtime depending on the operating system.
+- :ref:`shebang-size <qa-check-shebang-size>`: Check for shebang (#!) lines
+ longer than 128 characters, which can give an error at runtime depending on
+ the operating system.
-- :ref:`unhandled-features-check <qa-check-unhandled-features-check>`: Check if any of the variables supported by the :ref:`features_check <ref-classes-features_check>` class is set while not inheriting the class itself.
+- :ref:`unhandled-features-check <qa-check-unhandled-features-check>`: Check
+ if any of the variables supported by the :ref:`ref-classes-features_check`
+ class is set while not inheriting the class itself.
-- :ref:`missing-update-alternatives <qa-check-missing-update-alternatives>`: Check if the recipe sets the :term:`ALTERNATIVE` variable for any of its packages, and does not inherit the :ref:`update-alternatives <ref-classes-update-alternatives>` class.
+- :ref:`missing-update-alternatives <qa-check-missing-update-alternatives>`:
+ Check if the recipe sets the :term:`ALTERNATIVE` variable for any of its
+ packages, and does not inherit the :ref:`ref-classes-update-alternatives`
+ class.
-- A trailing slash or duplicated slashes in the value of :term:`S` or :term:`B` will now trigger a warning so that they can be removed and path comparisons can be more reliable - remove any instances of these in your recipes if the warning is displayed.
+- A trailing slash or duplicated slashes in the value of :term:`S` or :term:`B`
+ will now trigger a warning so that they can be removed and path comparisons
+ can be more reliable --- remove any instances of these in your recipes if the
+ warning is displayed.
.. _migration-3.2-src-uri-file-globbing:
@@ -191,7 +203,7 @@ Globbing no longer supported in ``file://`` entries in ``SRC_URI``
Globbing (``*`` and ``?`` wildcards) in ``file://`` URLs within :term:`SRC_URI`
did not properly support file checksums, thus changes to the source files
-would not always change the do_fetch task checksum, and consequently would
+would not always change the :ref:`ref-tasks-fetch` task checksum, and consequently would
not ensure that the changed files would be incorporated in subsequent builds.
Unfortunately it is not practical to make globbing work generically here, so
@@ -207,9 +219,18 @@ files into a subdirectory and reference that instead.
deploy class now cleans ``DEPLOYDIR`` before ``do_deploy``
----------------------------------------------------------
-``do_deploy`` as implemented in the :ref:`deploy <ref-classes-deploy>` class now cleans up ${:term:`DEPLOYDIR`} before running, just as ``do_install`` cleans up ${:term:`D`} before running. This reduces the risk of :term:`DEPLOYDIR` being accidentally contaminated by files from previous runs, possibly even with different config, in case of incremental builds.
+:ref:`ref-tasks-deploy` as implemented in the :ref:`ref-classes-deploy` class
+now cleans up ${:term:`DEPLOYDIR`} before running, just as
+:ref:`ref-tasks-install` cleans up ${:term:`D`} before running. This reduces
+the risk of :term:`DEPLOYDIR` being accidentally contaminated by files from
+previous runs, possibly even with different config, in case of incremental
+builds.
-Most recipes and classes that inherit the :ref:`deploy <ref-classes-deploy>` class or interact with ``do_deploy`` are unlikely to be affected by this unless they add ``prefuncs`` to ``do_deploy`` *which also* put files into ``${DEPLOYDIR}`` - these should be refactored to use ``do_deploy_prepend`` instead.
+Most recipes and classes that inherit the :ref:`ref-classes-deploy` class or
+interact with :ref:`ref-tasks-deploy` are unlikely to be affected by this
+unless they add ``prefuncs`` to :ref:`ref-tasks-deploy` *which also* put files
+into ``${DEPLOYDIR}`` --- these should be refactored to use
+``do_deploy_prepend`` instead.
.. _migration-3.2-nativesdk-sdk-provides-dummy:
@@ -217,7 +238,13 @@ Most recipes and classes that inherit the :ref:`deploy <ref-classes-deploy>` cla
Custom SDK / SDK-style recipes need to include ``nativesdk-sdk-provides-dummy``
-------------------------------------------------------------------------------
-All ``nativesdk`` packages require ``/bin/sh`` due to their postinstall scriptlets, thus this package has to be dummy-provided within the SDK and ``nativesdk-sdk-provides-dummy`` now does this. If you have a custom SDK recipe (or your own SDK-style recipe similar to e.g. ``buildtools-tarball``), you will need to ensure ``nativesdk-sdk-provides-dummy`` or an equivalent is included in :term:`TOOLCHAIN_HOST_TASK`.
+All :ref:`ref-classes-nativesdk` packages require ``/bin/sh`` due
+to their postinstall scriptlets, thus this package has to be dummy-provided
+within the SDK and ``nativesdk-sdk-provides-dummy`` now does this. If you have
+a custom SDK recipe (or your own SDK-style recipe similar to e.g.
+``buildtools-tarball``), you will need to ensure
+``nativesdk-sdk-provides-dummy`` or an equivalent is included in
+:term:`TOOLCHAIN_HOST_TASK`.
``ld.so.conf`` now moved back to main ``glibc`` package
@@ -265,10 +292,10 @@ using the GL options.
.. _migration-3.2-initramfs-suffix:
-initramfs images now use a blank suffix
+Initramfs images now use a blank suffix
---------------------------------------
-The reference initramfs images (``core-image-minimal-initramfs``,
+The reference :term:`Initramfs` images (``core-image-minimal-initramfs``,
``core-image-tiny-initramfs`` and ``core-image-testmaster-initramfs``) now
set an empty string for :term:`IMAGE_NAME_SUFFIX`, which otherwise defaults
to ``".rootfs"``. These images aren't root filesystems and thus the rootfs
@@ -303,7 +330,7 @@ now need to be changed to ``inherit image-artifact-names``.
Miscellaneous changes
---------------------
-- Support for the long-deprecated ``PACKAGE_GROUP`` variable has now been removed - replace any remaining instances with :term:`FEATURE_PACKAGES`.
+- Support for the long-deprecated ``PACKAGE_GROUP`` variable has now been removed --- replace any remaining instances with :term:`FEATURE_PACKAGES`.
- The ``FILESPATHPKG`` variable, having been previously deprecated, has now been removed. Replace any remaining references with appropriate use of :term:`FILESEXTRAPATHS`.
- Erroneous use of ``inherit +=`` (instead of ``INHERIT +=``) in a configuration file now triggers an error instead of silently being ignored.
- ptest support has been removed from the ``kbd`` recipe, as upstream has moved to autotest which is difficult to work with in a cross-compilation environment.
diff --git a/documentation/migration-guides/migration-3.3.rst b/documentation/migration-guides/migration-3.3.rst
index 22562dacd4..d1e589d7b4 100644
--- a/documentation/migration-guides/migration-3.3.rst
+++ b/documentation/migration-guides/migration-3.3.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 3.3 (hardknott)
=======================
@@ -12,12 +14,11 @@ Minimum system requirements
You will now need at least Python 3.6 installed on your build host. Most recent
distributions provide this, but should you be building on a distribution that
-does not have it, you can use the ``buildtools-tarball`` (easily installable
-using ``scripts/install-buildtools``) - see
-:ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`
+does not have it, you can use the :term:`buildtools` tarball (easily installable
+using ``scripts/install-buildtools``) --- see
+:ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`
for details.
-
.. _migration-3.3-removed-recipes:
Removed recipes
@@ -62,13 +63,15 @@ need to update those.
New ``python3targetconfig`` class
---------------------------------
-A new :ref:`python3targetconfig <ref-classes-python3targetconfig>` class has been
-created for situations where you would previously have inherited the
-:ref:`python3native <ref-classes-python3native>` class but need access to target configuration data (such as
-correct installation directories). Recipes where this situation applies should
-be changed to inherit ``python3targetconfig`` instead of ``python3native``. This
-also adds a dependency on target ``python3``, so it should only be used where
-appropriate in order to avoid unnecessarily lengthening builds.
+A new :ref:`ref-classes-python3targetconfig` class has
+been created for situations where you would previously have inherited the
+:ref:`ref-classes-python3native` class but need access to
+target configuration data (such as correct installation directories). Recipes
+where this situation applies should be changed to inherit
+:ref:`ref-classes-python3targetconfig` instead of
+:ref:`ref-classes-python3native`. This also adds a dependency
+on target ``python3``, so it should only be used where appropriate in order to
+avoid unnecessarily lengthening builds.
Some example recipes where this change has been made: ``gpgme``, ``libcap-ng``,
``python3-pycairo``.
@@ -96,11 +99,10 @@ variable so that recipes can specify it explicitly, for example::
S = "${WORKDIR}/git"
DISTUTILS_SETUP_PATH = "${S}/python/pythonmodule"
-Recipes that inherit from ``distutils3`` (or
-:ref:`setuptools3 <ref-classes-setuptools3>` which itself inherits
-``distutils3``) that also set :term:`S` to
-point to a Python module within a subdirectory in the aforementioned
-manner should be changed to set ``DISTUTILS_SETUP_PATH`` instead.
+Recipes that inherit from ``distutils3`` (or :ref:`ref-classes-setuptools3`
+which itself inherits ``distutils3``) that also set :term:`S` to point to a
+Python module within a subdirectory in the aforementioned manner should be
+changed to set ``DISTUTILS_SETUP_PATH`` instead.
.. _migration-3.3-bitbake:
diff --git a/documentation/migration-guides/migration-3.4.rst b/documentation/migration-guides/migration-3.4.rst
index 8db43a1454..a9b1057206 100644
--- a/documentation/migration-guides/migration-3.4.rst
+++ b/documentation/migration-guides/migration-3.4.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Migration notes for 3.4 (honister)
----------------------------------
@@ -22,7 +24,7 @@ syntax, so the following::
SRC_URI_append = " file://somefile"
SRC_URI_append_qemux86 = " file://somefile2"
- SRC_URI_remove_qemux86-64 = " file://somefile3"
+ SRC_URI_remove_qemux86-64 = "file://somefile3"
SRC_URI_prepend_qemuarm = "file://somefile4 "
FILES_${PN}-ptest = "${bindir}/xyz"
IMAGE_CMD_tar = "tar"
@@ -34,7 +36,7 @@ would now become::
SRC_URI:append = " file://somefile"
SRC_URI:append:qemux86 = " file://somefile2"
- SRC_URI:remove:qemux86-64 = " file://somefile3"
+ SRC_URI:remove:qemux86-64 = "file://somefile3"
SRC_URI:prepend:qemuarm = "file://somefile4 "
FILES:${PN}-ptest = "${bindir}/xyz"
IMAGE_CMD:tar = "tar"
@@ -43,7 +45,7 @@ would now become::
BB_TASK_NICE_LEVEL:task-testimage = '0'
This also applies to
-:ref:`variable queries to the datastore <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:functions for accessing datastore variables>`,
+:ref:`variable queries to the datastore <bitbake-user-manual/bitbake-user-manual-metadata:functions for accessing datastore variables>`,
for example using ``getVar`` and similar so ``d.getVar("RDEPENDS_${PN}")``
becomes ``d.getVar("RDEPENDS:${PN}")``.
@@ -93,7 +95,7 @@ The ``lz4c``, ``pzstd`` and ``zstd`` commands are now required to be
installed on the build host to support LZ4 and Zstandard compression
functionality. These are typically provided by ``lz4`` and ``zstd``
packages in most Linux distributions. Alternatively they are available
-as part of ``buildtools-tarball`` if your distribution does not provide
+as part of :term:`buildtools` tarball if your distribution does not provide
them. For more information see
:ref:`ref-manual/system-requirements:required packages for the build host`.
@@ -124,7 +126,7 @@ Removed classes
- ``image-mklibs``: not actively tested and upstream mklibs still
requires Python 2
- ``meta``: no longer useful. Recipes that need to skip installing
- packages should inherit ``nopackages`` instead.
+ packages should inherit :ref:`ref-classes-nopackages` instead.
Prelinking disabled by default
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -146,7 +148,7 @@ Virtual runtime provides
~~~~~~~~~~~~~~~~~~~~~~~~
Recipes shouldn't use the ``virtual/`` string in :term:`RPROVIDES` and
-:term:`RDEPENDS` - it is confusing because ``virtual/`` has no special
+:term:`RDEPENDS` --- it is confusing because ``virtual/`` has no special
meaning in :term:`RPROVIDES` and :term:`RDEPENDS` (unlike in the
corresponding build-time :term:`PROVIDES` and :term:`DEPENDS`).
@@ -171,7 +173,7 @@ Extensible SDK host extension
For a normal SDK, some layers append to :term:`TOOLCHAIN_HOST_TASK`
unconditionally which is fine, until the eSDK tries to override the
variable to its own values. Instead of installing packages specified
-in this variable it uses native recipes instead - a very different
+in this variable it uses native recipes instead --- a very different
approach. This has led to confusing errors when binaries are added
to the SDK but not relocated.
@@ -195,7 +197,7 @@ Package/recipe splitting
then you may now need to add it explicitly.
- The ``rpm`` package no longer has ``rpm-build`` in its :term:`RRECOMMENDS`;
- if by chance you still need rpm package building functionality in
+ if by chance you still need rpm package building functionality in
your image and you have not already done so then you should add
``rpm-build`` to your image explicitly.
@@ -206,7 +208,7 @@ Package/recipe splitting
Image / SDK generation changes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Recursive dependencies on the ``do_build`` task are now disabled when
+- Recursive dependencies on the :ref:`ref-tasks-build` task are now disabled when
building SDKs. These are generally not needed; in the unlikely event
that you do encounter problems then it will probably be as a result of
missing explicit dependencies that need to be added.
@@ -252,12 +254,12 @@ Miscellaneous
- The previously deprecated ``COMPRESS_CMD`` and
``CVE_CHECK_CVE_WHITELIST`` variables have been removed. Use
- ``CONVERSION_CMD`` and ``CVE_CHECK_WHITELIST`` (replaced by
- :term:`CVE_CHECK_IGNORE` in version 3.5) respectively
+ :term:`CONVERSION_CMD` and ``CVE_CHECK_WHITELIST`` (replaced by
+ :term:`CVE_CHECK_IGNORE` in version 4.0) respectively
instead.
- The obsolete ``oe_machinstall`` function previously provided in the
- :ref:`utils <ref-classes-utils>` class has been removed. For
+ :ref:`ref-classes-utils` class has been removed. For
machine-specific installation it is recommended that you use the
built-in override support in the fetcher or overrides in general
instead.
diff --git a/documentation/migration-guides/migration-4.0.rst b/documentation/migration-guides/migration-4.0.rst
index a8e6b4c331..0e9e741458 100644
--- a/documentation/migration-guides/migration-4.0.rst
+++ b/documentation/migration-guides/migration-4.0.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 4.0 (kirkstone)
=======================
@@ -45,7 +47,7 @@ The following variables have changed their names:
- ``SSTATE_DUPWHITELIST`` became ``SSTATE_ALLOW_OVERLAP_FILES``
- ``SYSROOT_DIRS_BLACKLIST`` became :term:`SYSROOT_DIRS_IGNORE`
- ``UNKNOWN_CONFIGURE_WHITELIST`` became :term:`UNKNOWN_CONFIGURE_OPT_IGNORE`
-- ``WHITELIST_<license>`` became ``INCOMPATIBLE_LICENSE_EXCEPTIONS``
+- ``WHITELIST_<license>`` became :term:`INCOMPATIBLE_LICENSE_EXCEPTIONS`
In addition, ``BB_STAMP_WHITELIST``, ``BB_STAMP_POLICY``, ``INHERIT_BLACKLIST``,
``TUNEABI``, ``TUNEABI_WHITELIST``, and ``TUNEABI_OVERRIDE`` have been removed.
@@ -66,7 +68,7 @@ changes and you need to review them before committing. An example warning
looks like::
poky/scripts/lib/devtool/upgrade.py needs further work at line 275 since it contains abort
-
+
Fetching changes
~~~~~~~~~~~~~~~~
@@ -93,8 +95,8 @@ Fetching changes
do_mytask[network] = "1"
- This is allowed by default from ``do_fetch`` but not from any of our other standard
- tasks. Recipes shouldn't be accessing the network outside of ``do_fetch`` as it
+ This is allowed by default from :ref:`ref-tasks-fetch` but not from any of our other standard
+ tasks. Recipes shouldn't be accessing the network outside of :ref:`ref-tasks-fetch` as it
usually undermines fetcher source mirroring, image and licence manifests, software
auditing and supply chain security.
@@ -109,7 +111,7 @@ License changes
If they do not, by default a warning will be shown. A
:oe_git:`convert-spdx-licenses.py </openembedded-core/tree/scripts/contrib/convert-spdx-licenses.py>`
script can be used to update your recipes.
-
+
- :term:`INCOMPATIBLE_LICENSE` should now use `SPDX identifiers <https://spdx.org/licenses/>`__.
Additionally, wildcarding is now limited to specifically supported values -
see the :term:`INCOMPATIBLE_LICENSE` documentation for further information.
@@ -117,9 +119,9 @@ License changes
- The ``AVAILABLE_LICENSES`` variable has been removed. This variable was a performance
liability and is highly dependent on which layers are added to the configuration,
which can cause signature issues for users. In addition the ``available_licenses()``
- function has been removed from the :ref:`license <ref-classes-license>` class as
+ function has been removed from the :ref:`ref-classes-license` class as
it is no longer needed.
-
+
Removed recipes
~~~~~~~~~~~~~~~
@@ -134,22 +136,21 @@ The following recipes have been removed in this release:
Python changes
~~~~~~~~~~~~~~
-
+
- ``distutils`` has been deprecated upstream in Python 3.10 and thus the ``distutils*``
classes have been moved to ``meta-python``. Recipes that inherit the ``distutils*``
classes should be updated to inherit ``setuptools*`` equivalents instead.
-
+
- The Python package build process is now based on `wheels <https://pythonwheels.com/>`__.
- Here are the new Python packaging classes that should be used:
- :ref:`python_flit_core <ref-classes-python_flit_core>`,
- :ref:`python_setuptools_build_meta <ref-classes-python_setuptools_build_meta>`
- and :ref:`python_poetry_core <ref-classes-python_poetry_core>`.
+ The new Python packaging classes that should be used are
+ :ref:`ref-classes-python_flit_core`, :ref:`ref-classes-python_setuptools_build_meta`
+ and :ref:`ref-classes-python_poetry_core`.
-- The :ref:`setuptools3 <ref-classes-setuptools3>` class ``do_install()`` task now
+- The :ref:`ref-classes-setuptools3` class :ref:`ref-tasks-install` task now
installs the ``wheel`` binary archive. In current versions of ``setuptools`` the
legacy ``setup.py install`` method is deprecated. If the ``setup.py`` cannot be used
with wheels, for example it creates files outside of the Python module or standard
- entry points, then :ref:`setuptools3_legacy <ref-classes-setuptools3_legacy>` should
+ entry points, then :ref:`ref-classes-setuptools3_legacy` should
be used instead.
Prelink removed
@@ -158,7 +159,7 @@ Prelink removed
Prelink has been dropped by ``glibc`` upstream in 2.36. It already caused issues with
binary corruption, has a number of open bugs and is of questionable benefit
without disabling load address randomization and PIE executables.
-
+
We disabled prelinking by default in the honister (3.4) release, but left it able
to be enabled if desired. However, without glibc support it cannot be maintained
any further, so all of the prelinking functionality has been removed in this release.
@@ -169,9 +170,9 @@ reference(s).
Reproducible as standard
~~~~~~~~~~~~~~~~~~~~~~~~
-Reproducibility is now considered as standard functionality, thus the
+Reproducibility is now considered as standard functionality, thus the
``reproducible`` class has been removed and its previous contents merged into the
-:ref:`base <ref-classes-base>` class. If you have references in your configuration to
+:ref:`ref-classes-base` class. If you have references in your configuration to
``reproducible`` in :term:`INHERIT`, :term:`USER_CLASSES` etc. then they should be
removed.
@@ -183,13 +184,13 @@ a new :term:`KERNEL_DEBUG_TIMESTAMPS` variable to "1".
Supported host distribution changes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Support for `AlmaLinux <https://en.wikipedia.org/wiki/AlmaLinux>`__
- hosts replacing `CentOS <https://en.wikipedia.org/wiki/CentOS>`__.
+- Support for :wikipedia:`AlmaLinux <AlmaLinux>`
+ hosts replacing :wikipedia:`CentOS <CentOS>`.
The following distribution versions were dropped: CentOS 8, Ubuntu 16.04 and Fedora 30, 31 and 32.
- ``gcc`` version 7.5 is now required at minimum on the build host. For older
host distributions where this is not available, you can use the
- ``buildtools-extended-tarball`` (easily installable using
+ :term:`buildtools-extended` tarball (easily installable using
``scripts/install-buildtools``).
:append/:prepend in combination with other operators
@@ -206,22 +207,22 @@ For the ``append`` plus ``+=`` (and ``prepend`` plus ``=+``) combinations,
the content should be prefixed (respectively suffixed) by a space to maintain
the same behavior. You can learn more about override style syntax operators
(``append``, ``prepend`` and ``remove``) in the BitBake documentation:
-:ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:appending and prepending (override style syntax)`
-and :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:removal (override style syntax)`.
+:ref:`bitbake-user-manual/bitbake-user-manual-metadata:appending and prepending (override style syntax)`
+and :ref:`bitbake-user-manual/bitbake-user-manual-metadata:removal (override style syntax)`.
Miscellaneous changes
~~~~~~~~~~~~~~~~~~~~~
-
+
- ``blacklist.bbclass`` is removed and the functionality moved to the
- :ref:`base <ref-classes-base>` class with a more descriptive
+ :ref:`ref-classes-base` class with a more descriptive
``varflag`` variable named :term:`SKIP_RECIPE` which will use the `bb.parse.SkipRecipe()`
function. The usage remains the same, for example::
SKIP_RECIPE[my-recipe] = "Reason for skipping recipe"
-- :ref:`allarch <ref-classes-allarch>` packagegroups can no longer depend on packages
+- :ref:`ref-classes-allarch` packagegroups can no longer depend on packages
which use :term:`PKG` renaming such as :ref:`ref-classes-debian`. Such packagegroups
- recipes should be changed to avoid inheriting :ref:`allarch <ref-classes-allarch>`.
+ recipes should be changed to avoid inheriting :ref:`ref-classes-allarch`.
- The ``lnr`` script has been removed. ``lnr`` implemented the same behaviour as `ln --relative --symbolic`,
since at the time of creation `--relative` was only available in coreutils 8.16
@@ -230,7 +231,7 @@ Miscellaneous changes
any calls to ``lnr`` in your recipes or classes, they should be replaced with
`ln --relative --symbolic` or `ln -rs` if you prefer the short version.
-- The ``package_qa_handle_error()`` function formerly in the :ref:`insane <ref-classes-insane>`
+- The ``package_qa_handle_error()`` function formerly in the :ref:`ref-classes-insane`
class has been moved and renamed - if you have any references in your own custom
classes they should be changed to ``oe.qa.handle_error()``.
@@ -251,7 +252,7 @@ Miscellaneous changes
- The ``cortexa72-crc`` and ``cortexa72-crc-crypto`` tunes have been removed since
the crc extension is now enabled by default for cortexa72. Replace any references to
these with ``cortexa72`` and ``cortexa72-crypto`` respectively.
-
+
- The Python development shell (previously known as ``devpyshell``) feature has been
renamed to ``pydevshell``. To start it you should now run::
@@ -260,8 +261,11 @@ Miscellaneous changes
- The ``packagegroups-core-full-cmdline-libs`` packagegroup is no longer produced, as
libraries should normally be brought in via dependencies. If you have any references
to this then remove them.
-
+
- The :term:`TOPDIR` variable and the current working directory are no longer modified
when parsing recipes. Any code depending on the previous behaviour will no longer
work - change any such code to explicitly use appropriate path variables instead.
+- In order to exclude the kernel image from the image rootfs,
+ :term:`RRECOMMENDS`\ ``:${KERNEL_PACKAGE_NAME}-base`` should be set instead of
+ :term:`RDEPENDS`\ ``:${KERNEL_PACKAGE_NAME}-base``.
diff --git a/documentation/migration-guides/migration-4.1.rst b/documentation/migration-guides/migration-4.1.rst
new file mode 100644
index 0000000000..86721b9873
--- /dev/null
+++ b/documentation/migration-guides/migration-4.1.rst
@@ -0,0 +1,216 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.1 (langdale)
+======================
+
+Migration notes for 4.1 (langdale)
+-----------------------------------
+
+This section provides migration information for moving to the Yocto
+Project 4.1 Release (codename "langdale") from the prior release.
+
+
+.. _migration-4.1-make-4.0:
+
+make 4.0 is now the minimum required make version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+glibc now requires ``make`` 4.0 to build, thus it is now the version required to
+be installed on the build host. A new :term:`buildtools-make` tarball has been
+introduced to provide just make 4.0 for host distros without a current/working
+make 4.x version; if you also need other tools you can use the updated
+:term:`buildtools` tarball. For more information see
+:ref:`ref-manual/system-requirements:required packages for the build host`.
+
+
+.. _migration-4.1-complementary-deps:
+
+Complementary package installation ignores recommends
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When installing complementary packages (e.g. ``-dev`` and ``-dbg`` packages when
+building an SDK, or if you have added ``dev-deps`` to :term:`IMAGE_FEATURES`),
+recommends (as defined by :term:`RRECOMMENDS`) are no longer installed.
+
+If you wish to double-check the contents of your images after this change, see
+:ref:`Checking Image / SDK Changes <migration-general-buildhistory>`. If needed
+you can explicitly install items by adding them to :term:`IMAGE_INSTALL` in
+image recipes or :term:`TOOLCHAIN_TARGET_TASK` for the SDK.
+
+
+.. _migration-4.1-dev-recommends:
+
+dev dependencies are now recommends
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The default for ``${PN}-dev`` package is now to use :term:`RRECOMMENDS` instead
+of :term:`RDEPENDS` to pull in the main package. This takes advantage of a
+change to complimentary package installation to not follow :term:`RRECOMMENDS`
+(as mentioned above) and for example means an SDK for an image with both openssh
+and dropbear components will now build successfully.
+
+
+.. _migration-4.1-dropbear-sftp:
+
+dropbear now recommends openssh-sftp-server
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+openssh has switched the scp client to use the sftp protocol instead of scp to
+move files. This means scp from Fedora 36 and other current distributions will
+no longer be able to move files to/from a system running dropbear with no sftp
+server installed.
+
+The sftp server from openssh is small (200kb uncompressed) and standalone, so
+adding it to the packagegroup seems to be the best way to preserve the
+functionality for user sanity. However, if you wish to avoid this dependency,
+you can either:
+
+ A. Use ``dropbear`` in :term:`IMAGE_INSTALL` instead of
+ ``packagegroup-core-ssh-dropbear`` (or ``ssh-server-dropbear`` in
+ :term:`IMAGE_FEATURES`), or
+ B. Add ``openssh-sftp-server`` to :term:`BAD_RECOMMENDATIONS`.
+
+
+.. _migration-4.1-classes-split:
+
+Classes now split by usage context
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+A split directory structure has now been set up for ``.bbclass`` files - classes
+that are intended to be inherited only by recipes (e.g. ``inherit`` in a recipe
+file, :term:`IMAGE_CLASSES` or :term:`KERNEL_CLASSES`) should be in a
+``classes-recipe`` subdirectory and classes that are intended to be inherited
+globally (e.g. via ``INHERIT +=``, :term:`PACKAGE_CLASSES`, :term:`USER_CLASSES`
+or :term:`INHERIT_DISTRO`) should be in ``classes-global``. Classes in the
+existing ``classes`` subdirectory will continue to work in any context as before.
+
+Other than knowing where to look when manually browsing the class files, this is
+not likely to require any changes to your configuration. However, if in your
+configuration you were using some classes in the incorrect context, you will now
+receive an error during parsing. For example, the following in ``local.conf`` will
+now cause an error::
+
+ INHERIT += "testimage"
+
+Since :ref:`ref-classes-testimage` is a class intended solely to
+affect image recipes, this would be correctly specified as::
+
+ IMAGE_CLASSES += "testimage"
+
+
+.. _migration-4.1-local-file-error:
+
+Missing local files in SRC_URI now triggers an error
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If a file referenced in :term:`SRC_URI` does not exist, in 4.1 this will trigger
+an error at parse time where previously this only triggered a warning. In the past
+you could ignore these warnings for example if you have multiple build
+configurations (e.g. for several different target machines) and there were recipes
+that you were not building in one of the configurations. If you have this scenario
+you will now need to conditionally add entries to :term:`SRC_URI` where they are
+valid, or use :term:`COMPATIBLE_MACHINE` / :term:`COMPATIBLE_HOST` to prevent the
+recipe from being available (and therefore avoid it being parsed) in configurations
+where the files aren't available.
+
+
+.. _migration-4.1-qa-checks:
+
+QA check changes
+~~~~~~~~~~~~~~~~
+
+- The :ref:`buildpaths <qa-check-buildpaths>` QA check is now enabled by default
+ in :term:`WARN_QA`, and thus any build system paths found in output files will
+ trigger a warning. If you see these warnings for your own recipes, for full
+ binary reproducibility you should make the necessary changes to the recipe build
+ to remove these paths. If you wish to disable the warning for a particular
+ recipe you can use :term:`INSANE_SKIP`, or for the entire build you can adjust
+ :term:`WARN_QA`. For more information, see the :ref:`buildpaths QA check
+ <qa-check-buildpaths>` section.
+
+- ``do_qa_staging`` now checks shebang length in all directories specified by
+ :term:`SYSROOT_DIRS`, since there is a maximum length defined in the kernel. For
+ native recipes which write scripts to the sysroot, if the shebang line in one of
+ these scripts is too long you will get an error. This can be skipped using
+ :term:`INSANE_SKIP` if necessary, but the best course of action is of course to
+ fix the script. There is now also a ``create_cmdline_shebang_wrapper`` function
+ that you can call e.g. from ``do_install`` (or ``do_install:append``) within a
+ recipe to create a wrapper to fix such scripts - see the ``libcheck`` recipe
+ for an example usage.
+
+
+
+Miscellaneous changes
+~~~~~~~~~~~~~~~~~~~~~
+
+- ``mount.blacklist`` has been renamed to ``mount.ignorelist`` in
+ ``udev-extraconf``. If you are customising this file via ``udev-extraconf`` then
+ you will need to update your ``udev-extraconf`` ``.bbappend`` as appropriate.
+- ``help2man-native`` has been removed from implicit sysroot dependencies. If a
+ recipe needs ``help2man-native`` it should now be explicitly added to
+ :term:`DEPENDS` within the recipe.
+- For images using systemd, the reboot watchdog timeout has been set to 60
+ seconds (from the upstream default of 10 minutes). If you wish to override this
+ you can set :term:`WATCHDOG_TIMEOUT` to the desired timeout in seconds. Note
+ that the same :term:`WATCHDOG_TIMEOUT` variable also specifies the timeout used
+ for the ``watchdog`` tool (if that is being built).
+- The :ref:`ref-classes-image-buildinfo` class now writes to
+ ``${sysconfdir}/buildinfo`` instead of ``${sysconfdir}/build`` by default (i.e.
+ the default value of :term:`IMAGE_BUILDINFO_FILE` has been changed). If you have
+ code that reads this from images at build or runtime you will need to update it
+ or specify your own value for :term:`IMAGE_BUILDINFO_FILE`.
+- In the :ref:`ref-classes-archiver` class, the default
+ ``ARCHIVER_OUTDIR`` value no longer includes the :term:`MACHINE` value in order
+ to avoid the archive task running multiple times in a multiconfig setup. If you
+ have custom code that does something with the files archived by the
+ :ref:`ref-classes-archiver` class then you may need to adjust it to
+ the new structure.
+- If you are not using `systemd` then udev is now configured to use labels
+ (``LABEL`` or ``PARTLABEL``) to set the mount point for the device. For example::
+
+ /run/media/rootfs-sda2
+
+ instead of::
+
+ /run/media/sda2
+
+- ``icu`` no longer provides the ``icu-config`` configuration tool - upstream
+ have indicated ``icu-config`` is deprecated and should no longer be used. Code
+ with references to it will need to be updated, for example to use ``pkg-config``
+ instead.
+- The ``rng-tools`` systemd service name has changed from ``rngd`` to ``rng-tools``
+- The ``largefile`` :term:`DISTRO_FEATURES` item has been removed, large file
+ support is now always enabled where it was previously optional.
+- The Python ``zoneinfo`` module is now split out to its own ``python3-zoneinfo``
+ package.
+- The :term:`PACKAGECONFIG` option to enable wpa_supplicant in the ``connman``
+ recipe has been renamed to "wpa-supplicant". If you have set :term:`PACKAGECONFIG` for
+ the ``connman`` recipe to include this option you will need to update
+ your configuration. Related to this, the :term:`WIRELESS_DAEMON` variable
+ now expects the new ``wpa-supplicant`` naming and affects ``packagegroup-base``
+ as well as ``connman``.
+- The ``wpa-supplicant`` recipe no longer uses a static (and stale) ``defconfig``
+ file, instead it uses the upstream version with appropriate edits for the
+ :term:`PACKAGECONFIG`. If you are customising this file you will need to
+ update your customisations.
+- With the introduction of picobuild in
+ :ref:`ref-classes-python_pep517`, The ``PEP517_BUILD_API``
+ variable is no longer supported. If you have any references to this variable
+ you should remove them.
+
+
+.. _migration-4.1-removed-recipes:
+
+Removed recipes
+~~~~~~~~~~~~~~~
+
+The following recipes have been removed in this release:
+
+- ``alsa-utils-scripts``: merged into alsa-utils
+- ``cargo-cross-canadian``: optimised out
+- ``lzop``: obsolete, unmaintained upstream
+- ``linux-yocto (5.10)``: 5.15 and 5.19 are currently provided
+- ``rust-cross``: optimised out
+- ``rust-crosssdk``: optimised out
+- ``rust-tools-cross-canadian``: optimised out
+- ``xf86-input-keyboard``: obsolete (replaced by libinput/evdev)
diff --git a/documentation/migration-guides/migration-4.2.rst b/documentation/migration-guides/migration-4.2.rst
new file mode 100644
index 0000000000..f5f12c8871
--- /dev/null
+++ b/documentation/migration-guides/migration-4.2.rst
@@ -0,0 +1,276 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.2 (mickledore)
+========================
+
+Migration notes for 4.2 (mickledore)
+------------------------------------
+
+This section provides migration information for moving to the Yocto
+Project 4.2 Release (codename "mickledore") from the prior release.
+
+.. _migration-4.2-supported-distributions:
+
+Supported distributions
+~~~~~~~~~~~~~~~~~~~~~~~
+
+This release supports running BitBake on new GNU/Linux distributions:
+
+- Fedora 36 and 37
+- AlmaLinux 8.7 and 9.1
+- OpenSuse 15.4
+
+On the other hand, some earlier distributions are no longer supported:
+
+- Debian 10.x
+- Fedora 34 and 35
+- AlmaLinux 8.5
+
+See :ref:`all supported distributions <system-requirements-supported-distros>`.
+
+.. _migration-4.2-python-3.8:
+
+Python 3.8 is now the minimum required Python version version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+BitBake and OpenEmbedded-Core now require Python 3.8 or newer,
+making it a requirement to use a distribution providing at least this
+version, or to install a :term:`buildtools` tarball.
+
+.. _migration-4.2-gcc-8.0:
+
+gcc 8.0 is now the minimum required GNU C compiler version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This version, released in 2018, is a minimum requirement
+to build the ``mesa-native`` recipe and as the latter is in the
+default dependency chain when building QEMU this has now been
+made a requirement for all builds.
+
+In the event that your host distribution does not provide this
+or a newer version of gcc, you can install a
+:term:`buildtools-extended` tarball.
+
+.. _migration-4.2-new-nvd-api:
+
+Fetching the NVD vulnerability database through the 2.0 API
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This new version adds a new fetcher for the NVD database using the 2.0 API,
+as the 1.0 API will be retired in 2023.
+
+The implementation changes as little as possible, keeping the current
+database format (but using a different database file for the transition
+period), with a notable exception of not using the META table.
+
+Here are minor changes that you may notice:
+
+- The database starts in 1999 instead of 2002
+- The complete fetch is longer (30 minutes typically)
+
+.. _migration-4.2-rust-crate-checksums:
+
+Rust: mandatory checksums for crates
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This release now supports checksums for Rust crates and makes
+them mandatory for each crate in a recipe. See :yocto_git:`python3_bcrypt recipe changes
+</poky/commit/?h=mickledore&id=0dcb5ab3462fdaaf1646b05a00c7150eea711a9a>`
+for example.
+
+The ``cargo-update-recipe-crates`` utility
+:yocto_git:`has been extended </poky/commit/?h=mickledore&id=eef7fbea2c5bf59369390be4d5efa915591b7b22>`
+to include such checksums. So, in case you need to add the list of checksums
+to a recipe just inheriting the :ref:`ref-classes-cargo` class so far, you can
+follow these steps:
+
+#. Make the recipe inherit :ref:`ref-classes-cargo-update-recipe-crates`
+#. Remove all ``crate://`` lines from the recipe
+#. Create an empty ``${BPN}-crates.inc`` file and make your recipe require it
+#. Execute ``bitbake -c update_crates your_recipe``
+#. Copy and paste the output of BitBake about the missing checksums into the
+ ``${BPN}-crates.inc`` file.
+
+
+.. _migration-4.2-addpylib:
+
+Python library code extensions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+BitBake in this release now supports a new ``addpylib`` directive to enable
+Python libraries within layers.
+
+This directive should be added to your layer configuration
+as in the below example from ``meta/conf/layer.conf``::
+
+ addpylib ${LAYERDIR}/lib oe
+
+Layers currently adding a lib directory to extend Python library code should now
+use this directive as :term:`BBPATH` is not going to be added automatically by
+OE-Core in future. Note that the directives are immediate operations, so it does
+make modules available for use sooner than the current BBPATH-based approach.
+
+For more information, see :ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`.
+
+
+.. _migration-4.2-removed-variables:
+
+Removed variables
+~~~~~~~~~~~~~~~~~
+
+The following variables have been removed:
+
+- ``SERIAL_CONSOLE``, deprecated since version 2.6, replaced by :term:`SERIAL_CONSOLES`.
+- ``PACKAGEBUILDPKGD``, a mostly internal variable in the :ref:`ref-classes-package`
+ class was rarely used to customise packaging. If you were using this in your custom
+ recipes or bbappends, you will need to switch to using :term:`PACKAGE_PREPROCESS_FUNCS`
+ or :term:`PACKAGESPLITFUNCS` instead.
+
+.. _migration-4.2-removed-recipes:
+
+Removed recipes
+~~~~~~~~~~~~~~~
+
+The following recipes have been removed in this release:
+
+- ``python3-picobuild``: after switching to ``python3-build``
+- ``python3-strict-rfc3339``: unmaintained and not needed by anything in
+ :oe_git:`openembedded-core </openembedded-core>`
+ or :oe_git:`meta-openembedded </meta-openembedded>`.
+- ``linux-yocto``: removed version 5.19 recipes (6.1 and 5.15 still provided)
+
+
+.. _migration-4.2-removed-classes:
+
+Removed classes
+~~~~~~~~~~~~~~~
+
+The following classes have been removed in this release:
+
+- ``rust-bin``: no longer used
+- ``package_tar``: could not be used for actual packaging, and thus not particularly useful.
+
+
+LAYERSERIES_COMPAT for custom layers and devtool workspace
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Some layer maintainers have been setting :term:`LAYERSERIES_COMPAT` in their
+layer's ``conf/layer.conf`` to the value of ``LAYERSERIES_CORENAMES`` to
+effectively bypass the compatibility check - this is no longer permitted.
+Layer maintainers should set :term:`LAYERSERIES_COMPAT` appropriately to
+help users understand the compatibility status of the layer.
+
+Additionally, the :term:`LAYERSERIES_COMPAT` value for the devtool workspace
+layer is now set at the time of creation, thus if you upgrade with the
+workspace layer enabled and you wish to retain it, you will need to manually
+update the :term:`LAYERSERIES_COMPAT` value in ``workspace/conf/layer.conf``
+(or remove the path from :term:`BBLAYERS` in ``conf/bblayers.conf`` and
+delete/move the ``workspace`` directory out of the way if you no longer
+need it).
+
+
+.. _migration-4.2-runqemu-slirp:
+
+runqemu now limits slirp host port forwarding to localhost
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+With default slirp port forwarding configuration in runqemu, qemu
+previously listened on TCP ports 2222 and 2323 on all IP addresses
+available on the build host. Most use cases with runqemu only need
+it for localhost and it is not safe to run qemu images with root
+login without password enabled and listening on all available,
+possibly Internet reachable network interfaces. Thus, in this
+release we limit qemu port forwarding to localhost (127.0.0.1).
+
+However, if you need the qemu machine to be reachable from the
+network, then it can be enabled via ``conf/local.conf`` or machine
+config variable ``QB_SLIRP_OPT``::
+
+ QB_SLIRP_OPT = "-netdev user,id=net0,hostfwd=tcp::2222-:22"
+
+
+.. _migration-4.2-patch-qa:
+
+Patch QA checks
+~~~~~~~~~~~~~~~
+
+The QA checks for patch fuzz and Upstream-Status have been reworked
+slightly in this release. The Upstream-Status checking is now configurable
+from :term:`WARN_QA` / :term:`ERROR_QA` (``patch-status-core`` for the
+core layer, and ``patch-status-noncore`` for other layers).
+
+The ``patch-fuzz`` and ``patch-status-core`` checks are now in the default
+value of :term:`ERROR_QA` so that they will cause the build to fail
+if triggered. If you prefer to avoid this you will need to adjust the value
+of :term:`ERROR_QA` in your configuration as desired.
+
+
+.. _migration-4.2-mesa:
+
+Native/nativesdk mesa usage and graphics drivers
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This release includes mesa 23.0, and with that mesa release it is not longer
+possible to use drivers from the host system, as mesa upstream has added strict
+checks for matching builds between drivers and libraries that load them.
+
+This is particularly relevant when running QEMU built within the build
+system. A check has been added to runqemu so that there is a helpful error
+when there is no native/nativesdk opengl/virgl support available.
+
+To support this, a number of drivers have been enabled when building ``mesa-native``.
+The one major dependency pulled in by this change is ``llvm-native`` which will
+add a few minutes to the build on a modern machine. If this is undesirable, you
+can set the value of :term:`DISTRO_FEATURES_NATIVE` in your configuration such
+that ``opengl`` is excluded.
+
+
+.. _migration-4.2-misc-changes:
+
+Miscellaneous changes
+~~~~~~~~~~~~~~~~~~~~~
+
+- The :term:`IMAGE_NAME` variable is now set based on :term:`IMAGE_LINK_NAME`. This
+ means that if you are setting :term:`IMAGE_LINK_NAME` to "" to disable unversioned
+ image symlink creation, you also now need to set :term:`IMAGE_NAME` to still have
+ a reasonable value e.g.::
+
+ IMAGE_LINK_NAME = ""
+ IMAGE_NAME = "${IMAGE_BASENAME}${IMAGE_MACHINE_SUFFIX}${IMAGE_VERSION_SUFFIX}"
+
+- In ``/etc/os-release``, the ``VERSION_CODENAME`` field is now used instead of
+ ``DISTRO_CODENAME`` (though its value is still set from the :term:`DISTRO_CODENAME`
+ variable) for better conformance to standard os-release usage. If you have runtime
+ code reading this from ``/etc/os-release`` it may need to be updated.
+
+- The kmod recipe now enables OpenSSL support by default in order to support module
+ signing. If you do not need this and wish to reclaim some space/avoid the dependency
+ you should set :term:`PACKAGECONFIG` in a kmod bbappend (or ``PACKAGECONFIG:pn-kmod``
+ at the configuration level) to exclude ``openssl``.
+
+- The ``OEBasic`` signature handler (see :term:`BB_SIGNATURE_HANDLER`) has been
+ removed. It is unlikely that you would have selected to use this, but if you have
+ you will need to remove this setting.
+
+- The :ref:`ref-classes-package` class now checks if package names conflict via
+ ``PKG:${PN}`` override during ``do_package``. If you receive the associated error
+ you will need to address the :term:`PKG` usage so that the conflict is resolved.
+
+- openssh no longer uses :term:`RRECOMMENDS` to pull in ``rng-tools``, since rngd
+ is no longer needed as of Linux kernel 5.6. If you still need ``rng-tools``
+ installed for other reasons, you should add ``rng-tools`` explicitly to your
+ image. If you additionally need rngd to be started as a service you will also
+ need to add the ``rng-tools-service`` package as that has been split out.
+
+- The cups recipe no longer builds with the web interface enabled, saving ~1.8M of
+ space in the final image. If you wish to enable it, you should set
+ :term:`PACKAGECONFIG` in a cups bbappend (or ``PACKAGECONFIG:pn-cups`` at the
+ configuration level) to include ``webif``.
+
+- The :ref:`ref-classes-scons` class now passes a ``MAXLINELENGTH`` argument to
+ scons in order to fix an issue with scons and command line lengths when ccache is
+ enabled. However, some recipes may be using older scons versions which don't support
+ this argument. If that is the case you can set the following in the recipe in order
+ to disable this::
+
+ SCONS_MAXLINELENGTH = ""
diff --git a/documentation/migration-guides/migration-4.3.rst b/documentation/migration-guides/migration-4.3.rst
new file mode 100644
index 0000000000..fc25397d70
--- /dev/null
+++ b/documentation/migration-guides/migration-4.3.rst
@@ -0,0 +1,252 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.3 (nanbield)
+========================
+
+Migration notes for 4.3 (nanbield)
+------------------------------------
+
+This section provides migration information for moving to the Yocto
+Project 4.3 Release (codename "nanbield") from the prior release.
+
+.. _migration-4.3-supported-kernel-versions:
+
+Supported kernel versions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :term:`OLDEST_KERNEL` setting has been changed to "5.15" in this release, meaning that
+out the box, older kernels are not supported. There were two reasons for this.
+Firstly it allows glibc optimisations that improve the performance of the system
+by removing compatibility code and using modern kernel APIs exclusively. The second
+issue was this allows 64 bit time support even on 32 bit platforms and resolves Y2038
+issues.
+
+It is still possible to override this value and build for older kernels, this is just
+no longer the default supported configuration. This setting does not affect which
+kernel versions SDKs will run against and does not affect which versions of the kernel
+can be used to run builds.
+
+.. _migration-4.3-layername-override:
+
+Layername override implications
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Code can now know which layer a recipe is coming from through the newly added
+:term:`FILE_LAYERNAME` variable and the ``layer-<layername> override``. This is being used
+for enabling QA checks on a per layer basis. For existing code this has the
+side effect that the QA checks will apply to recipes being bbappended
+from other layers - for example, patches added through such bbappends will now
+need to have the "Upstream-Status" specified in the patch header.
+
+.. _migration-4.3-compiling-changes:
+
+Compiling changes
+~~~~~~~~~~~~~~~~~
+
+- Code on 32 bit platforms is now compiled with largefile support and 64
+ bit ``time_t``, to avoid the Y2038 time overflow issue. This breaks the ABI
+ and could break existing programs in untested layers.
+
+.. _migration-4.3-supported-distributions:
+
+Supported distributions
+~~~~~~~~~~~~~~~~~~~~~~~
+
+This release supports running BitBake on new GNU/Linux distributions:
+
+- Ubuntu 22.10
+- Fedora 38
+- Debian 12
+- CentOS Stream 8
+- AlmaLinux 8.8
+- AlmaLinux 9.2
+
+On the other hand, some earlier distributions are no longer supported:
+
+- Fedora 36
+- AlmaLinux 8.7
+- AlmaLinux 9.1
+
+See :ref:`all supported distributions <system-requirements-supported-distros>`.
+
+.. _migration-4.3-removed-machines:
+
+edgerouter machine removed
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``edgerouter`` reference BSP for the MIPS architecture in ``meta-yocto-bsp``
+has been removed as the hardware has been unavailable for some time. There is no
+suitable reference MIPS hardware to replace it with, but the MIPS architecture
+will continue to get coverage via QEMU build/boot testing.
+
+.. _migration-4.3-go-changes:
+
+Go language changes
+~~~~~~~~~~~~~~~~~~~
+
+- Support for the Glide package manager has been removed, as ``go mod``
+ has become the standard.
+
+.. _migration-4.3-systemd-changes:
+
+systemd changes
+~~~~~~~~~~~~~~~
+
+Upstream systemd is now more strict on filesystem layout and the ``usrmerge``
+feature is therefore required alongside systemd. The Poky test configurations
+have been updated accordingly for systemd.
+
+.. _migration-4.3-recipe-changes:
+
+Recipe changes
+~~~~~~~~~~~~~~
+
+- Runtime testing of ptest now fails if no test results are returned by
+ any given ptest.
+
+.. _migration-4.3-deprecated-variables:
+
+Deprecated variables
+~~~~~~~~~~~~~~~~~~~~
+
+The following variables have been deprecated:
+
+- :term:`CVE_CHECK_IGNORE`: use :term:`CVE_STATUS` instead.
+
+.. _migration-4.3-removed-variables:
+
+Removed variables
+~~~~~~~~~~~~~~~~~
+
+The following variables have been removed:
+
+- ``AUTHOR``
+- ``PERLARCH``
+- ``PERLVERSION``
+- ``QEMU_USE_SLIRP`` - add ``slirp`` to ``TEST_RUNQEMUPARAMS`` instead.
+- ``SERIAL_CONSOLES_CHECK`` - no longer necessary because all
+ consoles listed in :term:`SERIAL_CONSOLES` are checked for their existence
+ before a ``getty`` is started.
+
+.. _migration-4.3-removed-recipes:
+
+Removed recipes
+~~~~~~~~~~~~~~~
+
+The following recipes have been removed in this release:
+
+- ``apmd``: obsolete (``apm`` in :term:`MACHINE_FEATURES` also removed).
+- ``cve-update-db-native``: functionally replaced by ``cve-update-nvd2-native``
+- ``gcr3``: no longer needed by core recipes, moved to meta-gnome (gcr, i.e. version 4.x, is still provided).
+- ``glide``: as explained in :ref:`migration-4.3-go-changes`.
+- ``libdmx``: obsolete
+- ``linux-yocto`` version 5.15 (versions 6.1 and 6.5 provided instead).
+- ``python3-async``: obsolete - no longer needed by ``python3-gitdb`` or any other core recipe
+- ``rust-hello-world``: there are sufficient other Rust recipes and test cases such that this is no longer needed.
+
+
+.. _migration-4.3-removed-classes:
+
+Removed classes
+~~~~~~~~~~~~~~~
+
+The following classes have been removed in this release:
+
+- ``glide``: as explained in :ref:`migration-4.3-go-changes`.
+
+
+Output file naming changes
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In 4.3 there are some minor differences in image and SDK output file names.
+If you rely on the existing naming (e.g. in external scripts) you may need to
+either modify configuration or adapt to the new naming. Further details:
+
+- :term:`IMAGE_NAME` and :term:`IMAGE_LINK_NAME` now include the
+ :term:`IMAGE_NAME_SUFFIX` value directly. In practical terms, this means
+ that ``.rootfs`` will now appear in image output file names. If you do not
+ wish to have the ``.rootfs`` suffix used, you can just set
+ :term:`IMAGE_NAME_SUFFIX` to "" and this will now be consistently respected
+ in both the image file and image file symlink names. As part of this change,
+ support for the ``imgsuffix`` task varflag has been dropped (mostly
+ an internal implementation detail, but if you were implementing a custom
+ image construction with a task in a similar manner to ``do_bootimg``
+ you may have been using this).
+
+- :term:`SDK_NAME` now includes the values of :term:`IMAGE_BASENAME` and
+ :term:`MACHINE` so that they are unique when building SDKs for different
+ images and machines.
+
+
+
+.. _migration-4.3-pr-pe:
+
+Versioning changes
+~~~~~~~~~~~~~~~~~~
+
+- :term:`PR` values have been removed from all core recipes - distro maintainers
+ who make use of :term:`PR` values would need to curate these already so the
+ sparsely set base values would not be that useful anymore. If you have been
+ relying on these (i.e. you are maintaining a binary package feed where package
+ versions should only ever increase), double-check the output (perhaps with the
+ help of the :ref:`ref-classes-buildhistory` class) to ensure that package
+ versions are consistent.
+
+- The :term:`PR` value can no longer be set from the recipe file name - this
+ was rarely used, but in any case is no longer supported.
+
+- :term:`PE` and :term:`PR` are no longer included in the work directory path
+ (:term:`WORKDIR`). This may break some tool assumptions about directory paths,
+ but those should really be querying paths from the build system (or not poking
+ into :term:`WORKDIR` externally).
+
+- Source revision information has been moved from :term:`PV` to :term:`PKGV`.
+ The user visible effect of this change is that :term:`PV` will no longer have
+ revision information in it and this will now be appended to the :term:`PV`
+ value through :term:`PKGV` when the packages are written out (as long as "+"
+ is present in the :term:`PKGV` value). Since :term:`PV` is used in
+ :term:`STAMP` and :term:`WORKDIR`, you may notice small directory naming and
+ stamp naming changes.
+
+- The :term:`SRCPV` variable is no longer needed in :term:`PV`, but since
+ the default :term:`SRCPV` value is now "", using it is effectively now just a
+ null operation - you can remove it (leaving behind the "+") , but it is not
+ yet required to do so.
+
+
+.. _migration-4.3-qemu-changes:
+
+QEMU changes
+~~~~~~~~~~~~
+
+- The ``runqemu`` script no longer systematically adds two serial ports
+ (``--serial null`` and ``-serial mon:stdio``) to the QEMU emulated machine
+ if the user already adds such ports through the ``QB_OPT_APPEND`` setting.
+
+ If the user adds one port, only ``--serial null`` is added, and
+ ``-serial mon:stdio`` is no longer passed. If the user adds more than one
+ port, ``--serial null`` is no longer added either. This can break some
+ existing QEMU based configurations expecting such serial ports to be added
+ when ``runqemu`` is executed.
+
+ This change was made to avoid exceeding two serial ports, which interferes
+ with automated testing.
+
+- ``runqemu`` now uses the ``ip tuntap`` command instead of ``tunctl``, and
+ thus ``tunctl`` is no longer built by the ``qemu-helper-native`` recipe; if
+ for some reason you were calling ``tunctl`` directly from your own scripts
+ you should switch to calling ``ip tuntap`` instead.
+
+.. _migration-4.3-misc-changes:
+
+Miscellaneous changes
+~~~~~~~~~~~~~~~~~~~~~
+
+- The ``-crosssdk`` suffix and any :term:`MLPREFIX` were removed from
+ ``virtual/XXX`` provider/dependencies where a ``PREFIX`` was used as well,
+ as we don't need both and it made automated dependency rewriting
+ unnecessarily complex. In general this only affects internal toolchain
+ dependencies so isn't end user visible, but if for some reason you have
+ custom classes or recipes that rely upon the old providers then you will
+ need to update those.
+
diff --git a/documentation/migration-guides/migration-5.0.rst b/documentation/migration-guides/migration-5.0.rst
new file mode 100644
index 0000000000..cf413300c2
--- /dev/null
+++ b/documentation/migration-guides/migration-5.0.rst
@@ -0,0 +1,202 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 5.0 LTS (scarthgap)
+===========================
+
+Migration notes for 5.0 (scarthgap)
+------------------------------------
+
+This section provides migration information for moving to the Yocto
+Project 5.0 Release (codename "scarthgap") from the prior release.
+
+To migrate from an earlier LTS release, you **also** need to check all
+the previous migration notes from your release to this new one:
+
+- :doc:`/migration-guides/migration-4.3`
+- :doc:`/migration-guides/migration-4.2`
+- :doc:`/migration-guides/migration-4.1`
+- :doc:`/migration-guides/migration-4.0`
+- :doc:`/migration-guides/migration-3.4`
+- :doc:`/migration-guides/migration-3.3`
+- :doc:`/migration-guides/migration-3.2`
+
+.. _migration-5.0-supported-kernel-versions:
+
+Supported kernel versions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :term:`OLDEST_KERNEL` setting is still "5.15" in this release, meaning that
+out the box, older kernels are not supported. See :ref:`4.3 migration notes
+<migration-4.3-supported-kernel-versions>` for details.
+
+.. _migration-5.0-supported-distributions:
+
+Supported distributions
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Compared to the previous releases, running BitBake is supported on new
+GNU/Linux distributions:
+
+- Rocky 9
+
+On the other hand, some earlier distributions are no longer supported:
+
+- Fedora 37
+- Ubuntu 22.10
+- OpenSUSE Leap 15.3
+
+See :ref:`all supported distributions <system-requirements-supported-distros>`.
+
+.. _migration-5.0-go-changes:
+
+Go language changes
+~~~~~~~~~~~~~~~~~~~
+
+The ``linkmode`` flag was dropped from ``GO_LDFLAGS`` for ``nativesdk`` and
+``cross-canadian``. Also, dynamic linking was disabled for the whole set of
+(previously) supported architectures in the ``goarch`` class.
+
+.. _migration-5.0-systemd-changes:
+
+systemd changes
+~~~~~~~~~~~~~~~
+
+Systemd's nss-resolve plugin is now supported and can be added via the
+``nss-resolve`` :term:`PACKAGECONFIG` option , which is from now on required
+(along with ``resolved``) by the ``systemd-resolved`` feature. Related to that
+(i.e., Systemd's network name resolution), an option to use ``stub-resolv.conf``
+was added as well.
+
+.. _migration-5.0-recipe-changes:
+
+Recipe changes
+~~~~~~~~~~~~~~
+
+- Runtime testing of ptest now fails if no test results are returned by
+ any given ptest.
+
+.. _migration-5.0-deprecated-variables:
+
+Deprecated variables
+~~~~~~~~~~~~~~~~~~~~
+
+- ``CVE_CHECK_IGNORE`` should be replaced with :term:`CVE_STATUS`
+
+
+.. _migration-5.0-removed-variables:
+
+Removed variables
+~~~~~~~~~~~~~~~~~
+
+The following variables have been removed:
+
+- ``DEPLOY_DIR_TAR``: no longer needed since the package_tar class was removed in 4.2.
+- ``PYTHON_PN``: Python 2 has previously been removed, leaving Python 3 as the sole
+ major version. Therefore, this abstraction to differentiate both versions is
+ no longer needed.
+- ``oldincludedir``
+- ``USE_L10N``: previously deprecated, and now removed.
+- ``CVE_SOCKET_TIMEOUT``
+- ``SERIAL_CONSOLES_CHECK`` - use :term:`SERIAL_CONSOLES` instead as all consoles specified in the latter are checked for their existence before a ``getty`` is started.
+
+.. _migration-5.0-removed-recipes:
+
+Removed recipes
+~~~~~~~~~~~~~~~
+
+The following recipes have been removed in this release:
+
+- ``libcroco``: deprecated and archived by the Gnome Project.
+- ``liberror-perl``: unmaintained and no longer needed - moved to meta-perl.
+- ``linux-yocto``: version 6.1 (version 6.6 provided instead).
+- ``systemtap-uprobes``: obsolete.
+- ``zvariant``: fails to build with newer Rust.
+
+.. _migration-5.0-removed-classes:
+
+Removed classes
+~~~~~~~~~~~~~~~
+
+No classes have been removed in this release.
+
+.. _migration-5.0-qemu-changes:
+
+QEMU changes
+~~~~~~~~~~~~
+
+In ``tune-core2``, the cpu models ``n270`` and ``core2duo`` are no longer
+passed to QEMU, since its documentation recommends not using them with ``-cpu``
+option. Therefore, from now on, ``Nehalem`` model is used instead.
+
+
+ipk packaging changes
+~~~~~~~~~~~~~~~~~~~~~
+
+ipk packaging (using ``opkg``) now uses ``zstd`` compression instead of ``xz``
+for better compression and performance. This does mean that ``.ipk`` packages
+built using the 5.0 release requires Opkg built with zstd enabled --- naturally
+this is the case in 5.0, but at least by default these packages will not be
+usable on older systems where Opkg does not have zstd enabled at build time.
+
+Additionally, the internal dependency solver in Opkg is now deprecated --- it
+is still available in this release but will trigger a warning if selected.
+The default has been the external ``libsolv`` solver for some time, but if you
+have explicitly removed that from :term:`PACKAGECONFIG` for Opkg to
+select the internal solver, you should plan to switch to ``libsolv`` in the
+near future (by including ``libsolv`` your custom :term:`PACKAGECONFIG` value
+for Opkg, or reverting to the default value).
+
+
+motd message when using ``DISTRO = "poky"``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The default ``poky`` :term:`DISTRO` is explicitly a *reference* distribution
+for testing and development purposes. It enables most hardware and software
+features so that they can be tested, but this also means that
+from a security point of view the attack surface is very large.
+
+We encourage anyone using the Yocto Project for production use to create
+their own distribution and not use Poky. To encourage this behaviour
+further, in 5.0 a warning has been added to ``/etc/motd`` when Poky is used
+so that the developer will see it when they log in. If you are creating your
+own distribution this message will not show up.
+
+For information on how to create your own distribution, see
+":ref:`dev-manual/custom-distribution:creating your own distribution`".
+
+.. _migration-5.0-misc-changes:
+
+Miscellaneous changes
+~~~~~~~~~~~~~~~~~~~~~
+
+- ``bitbake-whatchanged`` script was removed as it was broken and unmaintained.
+
+- ``scripts/sstate-cache-management.sh`` has been replaced by
+ ``scripts/sstate-cache-management.py``, a more performant Python-based version.
+
+- The ``bmap-tools`` recipe has been renamed to ``bmaptool``.
+
+- ``gpgme`` has had Python binding support disabled since upstream does not
+ support Python 3.12 yet. This will be fixed in future once it is fixed upstream.)
+
+- A warning will now be shown if the ``virtual/`` prefix is used in runtime
+ contexts (:term:`RDEPENDS` / :term:`RPROVIDES`) ---
+ See :ref:`virtual-slash <qa-check-virtual-slash>` for details.
+
+- ``recipetool`` now prefixes the names of recipes created for Python modules
+ with ``python3-``.
+
+- The :ref:`ref-classes-cve-check` class no longer produces a warning for
+ remote patches --- it only logs a note and does not try to fetch the patch
+ in order to scan it for issues or CVE numbers. However, CVE number
+ references in remote patch file names will now be picked up.
+
+- The values of :term:`PE` and :term:`PR` have been dropped from
+ ``-f{file,macro,debug}-prefix-map``, in order to avoid unnecessary churn
+ in debugging symbol paths when the version is bumped. This is unlikely to
+ cause issues, but if you are paying attention to the debugging source path
+ (e.g. in recipes that need to manipulate these files during packaging) then
+ you will notice the difference. A new :term:`TARGET_DBGSRC_DIR` variable is
+ provided to make this easier.
+
+- ``ccache`` no longer supports FORTRAN.
diff --git a/documentation/migration-guides/migration-general.rst b/documentation/migration-guides/migration-general.rst
index 9eecf69af8..1820f5cfd8 100644
--- a/documentation/migration-guides/migration-general.rst
+++ b/documentation/migration-guides/migration-general.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Introduction
============
@@ -70,3 +72,37 @@ any new Yocto Project release.
bitbake-layers show-appends
+.. _migration-general-buildhistory:
+
+- *Checking Image / SDK Changes*:
+
+ The :ref:`ref-classes-buildhistory` class can be used
+ if you wish to check the impact of changes to images / SDKs across
+ the migration (e.g. added/removed packages, added/removed files, size
+ changes etc.). To do this, follow these steps:
+
+ #. Enable :ref:`ref-classes-buildhistory` before the migration
+
+ #. Run a pre-migration build
+
+ #. Capture the :ref:`ref-classes-buildhistory` output (as
+ specified by :term:`BUILDHISTORY_DIR`) and ensure it is preserved for
+ subsequent builds. How you would do this depends on how you are running
+ your builds - if you are doing this all on one workstation in the same
+ :term:`Build Directory` you may not need to do anything other than not
+ deleting the :ref:`ref-classes-buildhistory` output
+ directory. For builds in a pipeline it may be more complicated.
+
+ #. Set a tag in the :ref:`ref-classes-buildhistory` output (which is a git repository) before
+ migration, to make the commit from the pre-migration build easy to find
+ as you may end up running multiple builds during the migration.
+
+ #. Perform the migration
+
+ #. Run a build
+
+ #. Check the output changes between the previously set tag and HEAD in the
+ :ref:`ref-classes-buildhistory` output using ``git diff`` or ``buildhistory-diff``.
+
+ For more information on using :ref:`ref-classes-buildhistory`, see
+ :ref:`dev-manual/build-quality:maintaining build output quality`.
diff --git a/documentation/migration-guides/release-3.4.rst b/documentation/migration-guides/release-3.4.rst
index 81476c4adb..043fdd1c9f 100644
--- a/documentation/migration-guides/release-3.4.rst
+++ b/documentation/migration-guides/release-3.4.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 3.4 (honister)
======================
@@ -7,4 +9,6 @@ Release 3.4 (honister)
release-notes-3.4
release-notes-3.4.1
release-notes-3.4.2
+ release-notes-3.4.3
+ release-notes-3.4.4
diff --git a/documentation/migration-guides/release-4.0.rst b/documentation/migration-guides/release-4.0.rst
index 7062f9d241..685799e268 100644
--- a/documentation/migration-guides/release-4.0.rst
+++ b/documentation/migration-guides/release-4.0.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release 4.0 (kirkstone)
=======================
@@ -5,3 +7,20 @@ Release 4.0 (kirkstone)
migration-4.0
release-notes-4.0
+ release-notes-4.0.1
+ release-notes-4.0.2
+ release-notes-4.0.3
+ release-notes-4.0.4
+ release-notes-4.0.5
+ release-notes-4.0.6
+ release-notes-4.0.7
+ release-notes-4.0.8
+ release-notes-4.0.9
+ release-notes-4.0.10
+ release-notes-4.0.11
+ release-notes-4.0.12
+ release-notes-4.0.13
+ release-notes-4.0.14
+ release-notes-4.0.15
+ release-notes-4.0.16
+ release-notes-4.0.17
diff --git a/documentation/migration-guides/release-4.1.rst b/documentation/migration-guides/release-4.1.rst
new file mode 100644
index 0000000000..7d1ce95e25
--- /dev/null
+++ b/documentation/migration-guides/release-4.1.rst
@@ -0,0 +1,13 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.1 (langdale)
+======================
+
+.. toctree::
+
+ migration-4.1
+ release-notes-4.1
+ release-notes-4.1.1
+ release-notes-4.1.2
+ release-notes-4.1.3
+ release-notes-4.1.4
diff --git a/documentation/migration-guides/release-4.2.rst b/documentation/migration-guides/release-4.2.rst
new file mode 100644
index 0000000000..5ef2cc6657
--- /dev/null
+++ b/documentation/migration-guides/release-4.2.rst
@@ -0,0 +1,13 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.2 (mickledore)
+========================
+
+.. toctree::
+
+ migration-4.2
+ release-notes-4.2
+ release-notes-4.2.1
+ release-notes-4.2.2
+ release-notes-4.2.3
+ release-notes-4.2.4
diff --git a/documentation/migration-guides/release-4.3.rst b/documentation/migration-guides/release-4.3.rst
new file mode 100644
index 0000000000..1f07d229a7
--- /dev/null
+++ b/documentation/migration-guides/release-4.3.rst
@@ -0,0 +1,13 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 4.3 (nanbield)
+========================
+
+.. toctree::
+
+ migration-4.3
+ release-notes-4.3
+ release-notes-4.3.1
+ release-notes-4.3.2
+ release-notes-4.3.3
+ release-notes-4.3.4
diff --git a/documentation/migration-guides/release-5.0.rst b/documentation/migration-guides/release-5.0.rst
new file mode 100644
index 0000000000..bd19b707f6
--- /dev/null
+++ b/documentation/migration-guides/release-5.0.rst
@@ -0,0 +1,9 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release 5.0 (scarthgap)
+=======================
+
+.. toctree::
+
+ migration-5.0
+ release-notes-5.0
diff --git a/documentation/migration-guides/release-notes-3.4.1.rst b/documentation/migration-guides/release-notes-3.4.1.rst
index 0503f29b2c..097c249a90 100644
--- a/documentation/migration-guides/release-notes-3.4.1.rst
+++ b/documentation/migration-guides/release-notes-3.4.1.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release notes for 3.4.1 (honister)
----------------------------------
@@ -53,9 +55,9 @@ Fixes in 3.4.1
- ca-certificates: update 20210119 -> 20211016
- classes/populate_sdk_base: Add setscene tasks
- conf: update for release 3.4
-- convert-srcuri.py: use regex to check space in SRC_URI
+- convert-srcuri.py: use regex to check space in :term:`SRC_URI`
- create-spdx: Fix key errors in do_create_runtime_spdx
-- create-spdx: Protect against None from LICENSE_PATH
+- create-spdx: Protect against None from :term:`LICENSE_PATH`
- create-spdx: Set the Organization field via a variable
- create-spdx: add create_annotation function
- create-spdx: cross recipes are native also
@@ -82,18 +84,18 @@ Fixes in 3.4.1
- insane.bbclass: Add a check for directories that are expected to be empty
- kernel-devsrc: Add vdso.lds and other build files for riscv64 as well
- libnewt: Use python3targetconfig to fix reproducibility issue
-- libpcre/libpcre2: correct SRC_URI
-- libx11-compose-data: Update LICENSE to better reflect reality
-- libx11: Update LICENSE to better reflect reality
+- libpcre/libpcre2: correct :term:`SRC_URI`
+- libx11-compose-data: Update :term:`LICENSE` to better reflect reality
+- libx11: Update :term:`LICENSE` to better reflect reality
- libxml2: Use python3targetconfig to fix reproducibility issue
- linunistring: Add missing gperf-native dependency
- linux-firmware: upgrade to 20211027
-- linux-yocto-dev: Ensure DEPENDS matches recent 5.14 kernel changes
+- linux-yocto-dev: Ensure :term:`DEPENDS` matches recent 5.14 kernel changes
- linux-yocto-rt/5.10: update to -rt54
- linux-yocto/5.10: update to v5.10.78
- linux-yocto/5.14: common-pc: enable CONFIG_ATA_PIIX as built-in
- linux-yocto/5.14: update to v5.14.17
-- linux-yocto: add libmpc-native to DEPENDS
+- linux-yocto: add libmpc-native to :term:`DEPENDS`
- lttng-tools: replace ad hoc ptest fixup with upstream fixes
- manuals: releases.rst: move gatesgarth to outdated releases section
- mesa: Enable svga for x86 only
@@ -126,13 +128,13 @@ Fixes in 3.4.1
- qemu.inc: Remove empty egg-info directories before running meson
- recipes: Update github.com urls to use https
- ref-manual: Update how to set a useradd password
-- ref-manual: document "reproducible_build" class and SOURCE_DATE_EPOCH
+- ref-manual: document "reproducible_build" class and :term:`SOURCE_DATE_EPOCH`
- ref-manual: document BUILD_REPRODUCIBLE_BINARIES
-- ref-manual: document TOOLCHAIN_HOST_TASK_ESDK
+- ref-manual: document :term:`TOOLCHAIN_HOST_TASK_ESDK`
- ref-manual: remove meta class
- ref-manual: update system requirements
- releases.rst: fix release number for 3.3.3
-- scripts/convert-srcuri: Update SRC_URI conversion script to handle github url changes
+- scripts/convert-srcuri: Update :term:`SRC_URI` conversion script to handle github url changes
- scripts/lib/wic/help.py: Update Fedora Kickstart URLs
- scripts/oe-package-browser: Fix after overrides change
- scripts/oe-package-browser: Handle no packages being built
@@ -153,7 +155,7 @@ Fixes in 3.4.1
- waffle: convert to git, website is down
- wayland: Fix wayland-tools packaging
- wireless-regdb: upgrade 2021.07.14 -> 2021.08.28
-- wpa-supplicant: Match package override to PACKAGES for pkg_postinst
+- wpa-supplicant: Match package override to :term:`PACKAGES` for pkg_postinst
Contributors to 3.4.1
~~~~~~~~~~~~~~~~~~~~~
@@ -198,7 +200,7 @@ Repositories / Downloads for 3.4.1
poky
-- Repository Location: https://git.yoctoproject.org/poky/
+- Repository Location: :yocto_git:`/poky`
- Branch: :yocto_git:`honister </poky/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.1 </poky/tag/?h=yocto-3.4.1>`
- Git Revision: :yocto_git:`b53230c08d9f02ecaf35b4f0b70512abbf10ae11 </poky/commit/?id=b53230c08d9f02ecaf35b4f0b70512abbf10ae11>`
@@ -210,7 +212,7 @@ poky
meta-mingw
-- Repository Location: https://git.yoctoproject.org/meta-mingw
+- Repository Location: :yocto_git:`/meta-mingw`
- Branch: :yocto_git:`honister </meta-mingw/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.1 </meta-mingw/tag/?h=yocto-3.4.1>`
- Git Revision: :yocto_git:`f5d761cbd5c957e4405c5d40b0c236d263c916a8 </meta-mingw/commit/?id=f5d761cbd5c957e4405c5d40b0c236d263c916a8>`
@@ -222,7 +224,7 @@ meta-mingw
meta-gplv2
-- Repository Location: https://git.yoctoproject.org/meta-gplv2
+- Repository Location: :yocto_git:`/meta-gplv2`
- Branch: :yocto_git:`honister </meta-gplv2/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.1 </meta-gplv2/tag/?h=yocto-3.4.1>`
- Git Revision: :yocto_git:`f04e4369bf9dd3385165281b9fa2ed1043b0e400 </meta-gplv2/commit/?id=f04e4369bf9dd3385165281b9fa2ed1043b0e400>`
@@ -246,7 +248,7 @@ bitbake
yocto-docs
-- Repository Location: https://git.yoctoproject.org/yocto-docs
+- Repository Location: :yocto_git:`/yocto-docs`
- Branch: :yocto_git:`honister </yocto-docs/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.1 </yocto-docs/tag/?h=yocto-3.4.1>`
- Git Revision: :yocto_git:`b250eda5a0beba8acc9641c55a5b0e30594b5178 </yocto-docs/commit/?b250eda5a0beba8acc9641c55a5b0e30594b5178>`
diff --git a/documentation/migration-guides/release-notes-3.4.2.rst b/documentation/migration-guides/release-notes-3.4.2.rst
index 23c409397e..5ff42d3900 100644
--- a/documentation/migration-guides/release-notes-3.4.2.rst
+++ b/documentation/migration-guides/release-notes-3.4.2.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release notes for 3.4.2 (honister)
----------------------------------
@@ -15,7 +17,7 @@ Security Fixes in 3.4.2
- libsndfile1: fix :cve:`2021-4156`
- xserver-xorg: whitelist two CVEs
- grub2: fix :cve:`2021-3981`
-- xserver-xorg: update CVE_PRODUCT
+- xserver-xorg: update :term:`CVE_PRODUCT`
- binutils: :cve:`2021-42574`
- gcc: Fix :cve:`2021-42574`
- gcc: Fix :cve:`2021-35465`
@@ -38,7 +40,7 @@ Fixes in 3.4.2
- vim: upgrade to patch 4269
- vim: update to include latest CVE fixes
- expat: upgrade to 2.4.4
-- libusb1: correct SRC_URI
+- libusb1: correct :term:`SRC_URI`
- yocto-check-layer: add debug output for the layers that were found
- linux-firmware: Add CLM blob to linux-firmware-bcm4373 package
- linux-yocto/5.10: update to v5.10.93
@@ -49,7 +51,7 @@ Fixes in 3.4.2
- kernel: introduce python3-dtschema-wrapper
- vim: upgrade to 8.2 patch 3752
- bootchart2: Add missing python3-math dependency
-- socat: update SRC_URI
+- socat: update :term:`SRC_URI`
- pigz: fix one failure of command "unpigz -l"
- linux-yocto/5.14: update genericx86* machines to v5.14.21
- linux-yocto/5.10: update genericx86* machines to v5.10.87
@@ -69,7 +71,7 @@ Fixes in 3.4.2
- rpm: remove tmp folder created during install
- package_manager: ipk: Fix host manifest generation
- bitbake: utils: Update to use exec_module() instead of load_module()
-- linux-yocto: add libmpc-native to DEPENDS
+- linux-yocto: add libmpc-native to :term:`DEPENDS`
- ref-manual: fix patch documentation
- bitbake: tests/fetch: Drop gnu urls from wget connectivity test
- bitbake: fetch: npm: Use temporary file for empty user config
@@ -112,7 +114,7 @@ Fixes in 3.4.2
- classes/crate-fetch: Ensure crate fetcher is available
- rootfs-postcommands: update systemd_create_users
- classes/meson: Add optional rust definitions
-- rust-cross: Replace TARGET_ARCH with TUNE_PKGARCH
+- rust-cross: Replace :term:`TARGET_ARCH` with :term:`TUNE_PKGARCH`
- maintainers.inc: fix up rust-cross entry
- rust-cross: Fix directory not deleted for race glibc vs. musl
- wic: use shutil.which
@@ -167,16 +169,16 @@ Contributors to 3.4.2
- Vyacheslav Yurkov
- Yongxin Liu
- pgowda
-- wangmy
+- Wang Mingyu
Repositories / Downloads for 3.4.2
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
poky
-- Repository Location: https://git.yoctoproject.org/poky/
+- Repository Location: :yocto_git:`/poky`
- Branch: :yocto_git:`honister </poky/log/?h=honister>`
-- Tag: `yocto-3.4.2 <https://git.yoctoproject.org/poky/tag/?h=yocto-3.4.2>`__
+- Tag: :yocto_git:`yocto-3.4.2 </poky/tag/?h=yocto-3.4.2>`
- Git Revision: :yocto_git:`e0ab08bb6a32916b457d221021e7f402ffa36b1a </poky/commit/?id=e0ab08bb6a32916b457d221021e7f402ffa36b1a>`
- Release Artefact: poky-e0ab08bb6a32916b457d221021e7f402ffa36b1a
- sha: 8580dc5067ee426fe347a0d0f7a74c29ba539120bbe8438332339a9c8bce00fd
@@ -198,7 +200,7 @@ openembedded-core
meta-mingw
-- Repository Location: https://git.yoctoproject.org/meta-mingw
+- Repository Location: :yocto_git:`meta-mingw`
- Branch: :yocto_git:`honister </meta-mingw/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.2 </meta-mingw/tag/?h=yocto-3.4.2>`
- Git Revision: :yocto_git:`f5d761cbd5c957e4405c5d40b0c236d263c916a8 </meta-mingw/commit/?id=f5d761cbd5c957e4405c5d40b0c236d263c916a8>`
@@ -210,7 +212,7 @@ meta-mingw
meta-gplv2
-- Repository Location: https://git.yoctoproject.org/meta-gplv2
+- Repository Location: :yocto_git:`/meta-gplv2`
- Branch: :yocto_git:`honister </meta-gplv2/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.2 </meta-gplv2/tag/?h=yocto-3.4.2>`
- Git Revision: :yocto_git:`f04e4369bf9dd3385165281b9fa2ed1043b0e400 </meta-gplv2/commit/?id=f04e4369bf9dd3385165281b9fa2ed1043b0e400>`
@@ -234,7 +236,7 @@ bitbake
yocto-docs
-- Repository Location: https://git.yoctoproject.org/yocto-docs
+- Repository Location: :yocto_git:`/yocto-docs`
- Branch: :yocto_git:`honister </yocto-docs/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4.2 </yocto-docs/tag/?h=yocto-3.4.2>`
- Git Revision: :yocto_git:`3061d3d62054a5c3b9e16bfce4bcd186fa7a23d2` </yocto-docs/commit/?3061d3d62054a5c3b9e16bfce4bcd186fa7a23d2>`
diff --git a/documentation/migration-guides/release-notes-3.4.3.rst b/documentation/migration-guides/release-notes-3.4.3.rst
new file mode 100644
index 0000000000..2af802307d
--- /dev/null
+++ b/documentation/migration-guides/release-notes-3.4.3.rst
@@ -0,0 +1,199 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 3.4.3 (honister)
+----------------------------------
+
+Security Fixes in 3.4.3
+~~~~~~~~~~~~~~~~~~~~~~~
+
+- ghostscript: fix :cve:`2021-3781`
+- ghostscript: fix :cve:`2021-45949`
+- tiff: Add backports for two CVEs from upstream (:cve:`2022-0561` & :cve:`2022-0562`)
+- gcc : Fix :cve:`2021-46195`
+- virglrenderer: fix `CVE-2022-0135 <https://security-tracker.debian.org/tracker/CVE-2022-0135>`__ and `CVE-2022-0175 <https://security-tracker.debian.org/tracker/CVE-2022-0175>`__
+- binutils: Add fix for :cve:`2021-45078`
+
+
+Fixes in 3.4.3
+~~~~~~~~~~~~~~
+
+- Revert "cve-check: add lockfile to task"
+- asciidoc: update git repository
+- bitbake: build: Tweak exception handling for setscene tasks
+- bitbake: contrib: Fix hash server Dockerfile dependencies
+- bitbake: cooker: Improve parsing failure from handled exception usability
+- bitbake: data_smart: Fix overrides file/line message additions
+- bitbake: fetch2: ssh: username and password are optional
+- bitbake: tests/fetch: Handle upstream master -> main branch change
+- bitbake: utils: Ensure shell function failure in python logging is correct
+- build-appliance-image: Update to honister head revision
+- build-appliance-image: Update to honister head revision
+- coreutils: remove obsolete ignored CVE list
+- crate-fetch: fix setscene failures
+- cups: Add --with-dbusdir to :term:`EXTRA_OECONF` for deterministic build
+- cve-check: create directory of CVE_CHECK_MANIFEST before copy
+- cve-check: get_cve_info should open the database read-only
+- default-distrovars.inc: Switch connectivity check to a yoctoproject.org page
+- depmodwrapper-cross: add config directory option
+- devtool: deploy-target: Remove stripped binaries in pseudo context
+- devtool: explicitly set main or master branches in upgrades when available
+- docs: fix hardcoded link warning messages
+- documentation: conf.py: update for 3.4.2
+- documentation: prepare for 3.4.3 release
+- expat: Upgrade to 2.4.7
+- gcc-target: fix glob to remove gcc-<version> binary
+- gcsections: add nativesdk-cairo to exclude list
+- go: update to 1.16.15
+- gst-devtools: 1.18.5 -> 1.18.6
+- gst-examples: 1.18.5 -> 1.18.6
+- gstreamer1.0-libav: 1.18.5 -> 1.18.6
+- gstreamer1.0-omx: 1.18.5 -> 1.18.6
+- gstreamer1.0-plugins-bad: 1.18.5 -> 1.18.6
+- gstreamer1.0-plugins-base: 1.18.5 -> 1.18.6
+- gstreamer1.0-plugins-good: 1.18.5 -> 1.18.6
+- gstreamer1.0-plugins-ugly: 1.18.5 -> 1.18.6
+- gstreamer1.0-python: 1.18.5 -> 1.18.6
+- gstreamer1.0-rtsp-server: 1.18.5 -> 1.18.6
+- gstreamer1.0-vaapi: 1.18.5 -> 1.18.6
+- gstreamer1.0: 1.18.5 -> 1.18.6
+- harfbuzz: upgrade 2.9.0 -> 2.9.1
+- initramfs-framework: unmount automounts before switch_root
+- kernel-devsrc: do not copy Module.symvers file during install
+- libarchive : update to 3.5.3
+- libpcap: Disable DPDK explicitly
+- libxml-parser-perl: Add missing :term:`RDEPENDS`
+- linux-firmware: upgrade 20211216 -> 20220209
+- linux-yocto/5.10: Fix ramoops/ftrace
+- linux-yocto/5.10: features/zram: remove CONFIG_ZRAM_DEF_COMP
+- linux-yocto/5.10: fix dssall build error with binutils 2.3.8
+- linux-yocto/5.10: ppc/riscv: fix build with binutils 2.3.8
+- linux-yocto/5.10: update genericx86* machines to v5.10.99
+- linux-yocto/5.10: update to v5.10.103
+- mc: fix build if ncurses have been configured without wide characters
+- oeqa/buildtools: Switch to our webserver instead of example.com
+- patch.py: Prevent git repo reinitialization
+- perl: Improve and update module RPDEPENDS
+- poky.conf: bump version for 3.4.3 honister release
+- qemuboot: Fix build error if UNINATIVE_LOADER is unset
+- quilt: Disable external sendmail for deterministic build
+- recipetool: Fix circular reference in :term:`SRC_URI`
+- releases: update to include 3.3.5
+- releases: update to include 3.4.2
+- rootfs-postcommands: amend systemd_create_users add user to group check
+- ruby: update 3.0.2 -> 3.0.3
+- scripts/runqemu-ifdown: Don't treat the last iptables command as special
+- sdk: fix search for dynamic loader
+- selftest: recipetool: Correct the URI for socat
+- sstate: inside the threadedpool don't write to the shared localdata
+- uninative: Upgrade to 3.5
+- util-linux: upgrade to 2.37.4
+- vim: Update to 8.2.4524 for further CVE fixes
+- wic: Use custom kernel path if provided
+- wireless-regdb: upgrade 2021.08.28 -> 2022.02.18
+- zip: modify when match.S is built
+
+Contributors to 3.4.3
+~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Anuj Mittal
+- Bill Pittman
+- Bruce Ashfield
+- Chee Yang Lee
+- Christian Eggers
+- Daniel Gomez
+- Daniel Müller
+- Daniel Wagenknecht
+- Florian Amstutz
+- Joe Slater
+- Jose Quaresma
+- Justin Bronder
+- Lee Chee Yang
+- Michael Halstead
+- Michael Opdenacker
+- Oleksandr Ocheretnyi
+- Oleksandr Suvorov
+- Pavel Zhukov
+- Peter Kjellerstedt
+- Richard Purdie
+- Robert Yang
+- Ross Burton
+- Sakib Sajal
+- Saul Wold
+- Sean Anderson
+- Stefan Herbrechtsmeier
+- Tamizharasan Kumar
+- Tean Cunningham
+- Zoltán Böszörményi
+- pgowda
+- Wang Mingyu
+
+Repositories / Downloads for 3.4.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`honister </poky/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.3 </poky/tag/?h=yocto-3.4.3>`
+- Git Revision: :yocto_git:`ee68ae307fd951b9de6b31dc6713ea29186b7749 </poky/commit/?id=ee68ae307fd951b9de6b31dc6713ea29186b7749>`
+- Release Artefact: poky-ee68ae307fd951b9de6b31dc6713ea29186b7749
+- sha: 92c3d73c3e74f0e1d5c2ab2836ce3a3accbe47772cea70df3755845e0db1379b
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.3/poky-ee68ae307fd951b9de6b31dc6713ea29186b7749.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.3/poky-ee68ae307fd951b9de6b31dc6713ea29186b7749.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`honister </openembedded-core/log/?h=honister>`
+- Tag: :oe_git:`yocto-3.4.3 </openembedded-core/tag/?h=yocto-3.4.3>`
+- Git Revision: :oe_git:`ebca8f3ac9372b7ebb3d39e8f7f930b63b481448 </openembedded-core/commit/?id=ebca8f3ac9372b7ebb3d39e8f7f930b63b481448>`
+- Release Artefact: oecore-ebca8f3ac9372b7ebb3d39e8f7f930b63b481448
+- sha: f28e503f6f6c0bcd9192dbd528f8e3c7bcea504c089117e0094d9a4f315f4b9f
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.3/oecore-ebca8f3ac9372b7ebb3d39e8f7f930b63b481448.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.3/oecore-ebca8f3ac9372b7ebb3d39e8f7f930b63b481448.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`honister </meta-mingw/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.3 </meta-mingw/tag/?h=yocto-3.4.3>`
+- Git Revision: :yocto_git:`f5d761cbd5c957e4405c5d40b0c236d263c916a8 </meta-mingw/commit/?id=f5d761cbd5c957e4405c5d40b0c236d263c916a8>`
+- Release Artefact: meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8
+- sha: d4305d638ef80948584526c8ca386a8cf77933dffb8a3b8da98d26a5c40fcc11
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.3/meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.3/meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`honister </meta-gplv2/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.3 </meta-gplv2/tag/?h=yocto-3.4.3>`
+- Git Revision: :yocto_git:`f04e4369bf9dd3385165281b9fa2ed1043b0e400 </meta-gplv2/commit/?id=f04e4369bf9dd3385165281b9fa2ed1043b0e400>`
+- Release Artefact: meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400
+- sha: ef8e2b1ec1fb43dbee4ff6990ac736315c7bc2d8c8e79249e1d337558657d3fe
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.3/meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.3/meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`1.52 </bitbake/log/?h=1.52>`
+- Tag: :oe_git:`yocto-3.4.3 </bitbake/tag/?h=yocto-3.4.3>`
+- Git Revision: :oe_git:`43dcb2b2a2b95a5c959be57bca94fb7190ea6257 </bitbake/commit/?id=43dcb2b2a2b95a5c959be57bca94fb7190ea6257>`
+- Release Artefact: bitbake-43dcb2b2a2b95a5c959be57bca94fb7190ea6257
+- sha: 92497ff97fed81dcc6d3e202969fb63ca983a8f5d9d91cafc6aee88312f79cf9
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.3/bitbake-43dcb2b2a2b95a5c959be57bca94fb7190ea6257.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.3/bitbake-43dcb2b2a2b95a5c959be57bca94fb7190ea6257.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`honister </yocto-docs/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.3 </yocto-docs/tag/?h=yocto-3.4.3>`
+- Git Revision: :yocto_git:`15f46f97d9cad558c19fc1dc19cfbe3720271d04 </yocto-docs/commit/?15f46f97d9cad558c19fc1dc19cfbe3720271d04>`
diff --git a/documentation/migration-guides/release-notes-3.4.4.rst b/documentation/migration-guides/release-notes-3.4.4.rst
new file mode 100644
index 0000000000..0bf9a16209
--- /dev/null
+++ b/documentation/migration-guides/release-notes-3.4.4.rst
@@ -0,0 +1,157 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 3.4.4 (honister)
+----------------------------------
+
+Security Fixes in 3.4.4
+~~~~~~~~~~~~~~~~~~~~~~~
+
+- tiff: fix :cve:`2022-0865`, :cve:`2022-0891`, :cve:`2022-0907`, :cve:`2022-0908`, :cve:`2022-0909` and :cve:`2022-0924`
+- xz: fix `CVE-2022-1271 <https://security-tracker.debian.org/tracker/CVE-2022-1271>`__
+- unzip: fix `CVE-2021-4217 <https://security-tracker.debian.org/tracker/CVE-2021-4217>`__
+- zlib: fix :cve:`2018-25032`
+- grub: ignore :cve:`2021-46705`
+
+Fixes in 3.4.4
+~~~~~~~~~~~~~~
+
+- alsa-tools: Ensure we install correctly
+- bitbake.conf: mark all directories as safe for git to read
+- bitbake: knotty: display active tasks when printing keepAlive() message
+- bitbake: knotty: reduce keep-alive timeout from 5000s (83 minutes) to 10 minutes
+- bitbake: server/process: Disable gc around critical section
+- bitbake: server/xmlrpcserver: Add missing xmlrpcclient import
+- bitbake: toaster: Fix :term:`IMAGE_INSTALL` issues with _append vs :append
+- bitbake: toaster: fixtures replace gatesgarth
+- build-appliance-image: Update to honister head revision
+- conf.py/poky.yaml: Move version information to poky.yaml and read in conf.py
+- conf/machine: fix QEMU x86 sound options
+- devupstream: fix handling of :term:`SRC_URI`
+- documentation: update for 3.4.4 release
+- externalsrc/devtool: Fix to work with fixed export funcition flags handling
+- gmp: add missing COPYINGv3
+- gnu-config: update :term:`SRC_URI`
+- libxml2: fix CVE-2022-23308 regression
+- libxml2: move to gitlab.gnome.org
+- libxml2: update to 2.9.13
+- libxshmfence: Correct :term:`LICENSE` to HPND
+- license_image.bbclass: close package.manifest file
+- linux-firmware: correct license for ar3k firmware
+- linux-firmware: upgrade 20220310 -> 20220411
+- linux-yocto-rt/5.10: update to -rt61
+- linux-yocto/5.10: cfg/debug: add configs for kcsan
+- linux-yocto/5.10: split vtpm for more granular inclusion
+- linux-yocto/5.10: update to v5.10.109
+- linux-yocto: nohz_full boot arg fix
+- oe-pkgdata-util: Adapt to the new variable override syntax
+- oeqa/selftest/devtool: ensure Git username is set before upgrade tests
+- poky.conf: bump version for 3.4.4 release
+- pseudo: Add patch to workaround paths with crazy lengths
+- pseudo: Fix handling of absolute links
+- sanity: Add warning for local hasheqiv server with remote sstate mirrors
+- scripts/runqemu: Fix memory limits for qemux86-64
+- shadow-native: Simplify and fix syslog disable patch
+- tiff: Add marker for CVE-2022-1056 being fixed
+- toaster: Fix broken overrides usage
+- u-boot: Inherit pkgconfig
+- uninative: Upgrade to 3.6 with gcc 12 support
+- vim: Upgrade 8.2.4524 -> 8.2.4681
+- virglrenderer: update :term:`SRC_URI`
+- webkitgtk: update to 2.32.4
+- wireless-regdb: upgrade 2022.02.18 -> 2022.04.08
+
+Known Issues
+~~~~~~~~~~~~
+
+There were a couple of known autobuilder intermittent bugs that occurred during release testing but these are not regressions in the release.
+
+Contributors to 3.4.4
+~~~~~~~~~~~~~~~~~~~~~
+
+- Alexandre Belloni
+- Anuj Mittal
+- Bruce Ashfield
+- Chee Yang Lee
+- Dmitry Baryshkov
+- Joe Slater
+- Konrad Weihmann
+- Martin Jansa
+- Michael Opdenacker
+- Minjae Kim
+- Peter Kjellerstedt
+- Ralph Siemsen
+- Richard Purdie
+- Ross Burton
+- Tim Orling
+- Wang Mingyu
+- Zheng Ruoqin
+
+Repositories / Downloads for 3.4.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`honister </poky/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.4 </poky/tag/?h=yocto-3.4.4>`
+- Git Revision: :yocto_git:`780eeec8851950ee6ac07a2a398ba937206bd2e4 </poky/commit/?id=780eeec8851950ee6ac07a2a398ba937206bd2e4>`
+- Release Artefact: poky-780eeec8851950ee6ac07a2a398ba937206bd2e4
+- sha: 09558927064454ec2492da376156b716d9fd14aae57196435d742db7bfdb4b95
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.4/poky-780eeec8851950ee6ac07a2a398ba937206bd2e4.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.4/poky-780eeec8851950ee6ac07a2a398ba937206bd2e4.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`honister </openembedded-core/log/?h=honister>`
+- Tag: :oe_git:`yocto-3.4.4 </openembedded-core/tag/?h=yocto-3.4.4>`
+- Git Revision: :oe_git:`1a6f5e27249afb6fb4d47c523b62b5dd2482a69d </openembedded-core/commit/?id=1a6f5e27249afb6fb4d47c523b62b5dd2482a69d>`
+- Release Artefact: oecore-1a6f5e27249afb6fb4d47c523b62b5dd2482a69d
+- sha: b8354ca457756384139a579b9e51f1ba854013c99add90c0c4c6ef68421fede5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.4/oecore-1a6f5e27249afb6fb4d47c523b62b5dd2482a69d.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.4/oecore-1a6f5e27249afb6fb4d47c523b62b5dd2482a69d.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`honister </meta-mingw/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.4 </meta-mingw/tag/?h=yocto-3.4.4>`
+- Git Revision: :yocto_git:`f5d761cbd5c957e4405c5d40b0c236d263c916a8 </meta-mingw/commit/?id=f5d761cbd5c957e4405c5d40b0c236d263c916a8>`
+- Release Artefact: meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8
+- sha: d4305d638ef80948584526c8ca386a8cf77933dffb8a3b8da98d26a5c40fcc11
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.4/meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.4/meta-mingw-f5d761cbd5c957e4405c5d40b0c236d263c916a8.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`honister </meta-gplv2/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.4 </meta-gplv2/tag/?h=yocto-3.4.4>`
+- Git Revision: :yocto_git:`f04e4369bf9dd3385165281b9fa2ed1043b0e400 </meta-gplv2/commit/?id=f04e4369bf9dd3385165281b9fa2ed1043b0e400>`
+- Release Artefact: meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400
+- sha: ef8e2b1ec1fb43dbee4ff6990ac736315c7bc2d8c8e79249e1d337558657d3fe
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.4/meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.4/meta-gplv2-f04e4369bf9dd3385165281b9fa2ed1043b0e400.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`1.52 </bitbake/log/?h=1.52>`
+- Tag: :oe_git:`yocto-3.4.4 </bitbake/tag/?h=yocto-3.4.3>`
+- Git Revision: :oe_git:`c2d8f9b2137bd4a98eb0f51519493131773e7517 </bitbake/commit/?id=c2d8f9b2137bd4a98eb0f51519493131773e7517>`
+- Release Artefact: bitbake-c2d8f9b2137bd4a98eb0f51519493131773e7517
+- sha: a8b6217f2d63975bbf49f430e11046608023ee2827faa893b15d9a0d702cf833
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-3.4.4/bitbake-c2d8f9b2137bd4a98eb0f51519493131773e7517.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-3.4.4/bitbake-c2d8f9b2137bd4a98eb0f51519493131773e7517.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`honister </yocto-docs/log/?h=honister>`
+- Tag: :yocto_git:`yocto-3.4.4 </yocto-docs/tag/?h=yocto-3.4.4>`
+- Git Revision: :yocto_git:`5ead7d39aaf9044078dff27f462e29a8e31d89e4 </yocto-docs/commit/?5ead7d39aaf9044078dff27f462e29a8e31d89e4>`
diff --git a/documentation/migration-guides/release-notes-3.4.rst b/documentation/migration-guides/release-notes-3.4.rst
index 5a8fb4b5a9..d76bb004b1 100644
--- a/documentation/migration-guides/release-notes-3.4.rst
+++ b/documentation/migration-guides/release-notes-3.4.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release notes for 3.4 (honister)
--------------------------------
@@ -5,9 +7,9 @@ New Features / Enhancements in 3.4
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- Linux kernel 5.14, glibc 2.34 and ~280 other recipe upgrades
-- Switched override character to ':' (replacing '_') for more robust parsing and improved performance - see the above migration guide for help
+- Switched override character to ':' (replacing '_') for more robust parsing and improved performance --- see the above migration guide for help
- Rust integrated into core, providing rust support for cross-compilation and SDK
-- New create-spdx class for creating SPDX SBoM documents
+- New :ref:`ref-classes-create-spdx` class for creating SPDX SBoM documents
- New recipes: cargo, core-image-ptest-all, core-image-ptest-fast, core-image-weston-sdk, erofs-utils, gcompat, gi-docgen, libmicrohttpd, libseccomp, libstd-rs, perlcross, python3-markdown, python3-pyyaml, python3-smartypants, python3-typogrify, rust, rust-cross, rust-cross-canadian, rust-hello-world, rust-llvm, rust-tools-cross-canadian, rustfmt, xwayland
- Several optimisations to reduce unnecessary task dependencies for faster builds
- seccomp integrated into core, with additional enabling for gnutls, systemd, qemu
@@ -36,9 +38,9 @@ New Features / Enhancements in 3.4
- Kernel-related enhancements:
- - Support zstd-compressed modules and initramfs images
+ - Support zstd-compressed modules and :term:`Initramfs` images
- Allow opt-out of split kernel modules
- - linux-yocto-dev: base AUTOREV on specified version
+ - linux-yocto-dev: base :term:`AUTOREV` on specified version
- kernel-yocto: provide debug / summary information for metadata
- kernel-uboot: Handle gzip and lzo compression options
- linux-yocto/5.14: added devupstream support
@@ -67,8 +69,10 @@ New Features / Enhancements in 3.4
- SDK-related enhancements:
- - Enable do_populate_sdk with multilibs
- - New ``SDKPATHINSTALL`` variable decouples default install path from built in path to avoid rebuilding nativesdk components on e.g. :term:`DISTRO_VERSION` changes
+ - Enable :ref:`ref-tasks-populate_sdk` with multilibs
+ - New ``SDKPATHINSTALL`` variable decouples default install path from
+ built in path to avoid rebuilding :ref:`ref-classes-nativesdk`
+ components on e.g. :term:`DISTRO_VERSION` changes
- eSDK: Error if trying to generate an eSDK from a multiconfig
- eSDK: introduce :term:`TOOLCHAIN_HOST_TASK_ESDK` to be used in place of :term:`TOOLCHAIN_HOST_TASK` to add components to the host part of the eSDK
@@ -170,48 +174,48 @@ Known Issues in 3.4
Recipe Licenses changes in 3.4
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The following corrections have been made to the LICENSE values set by recipes:
-
-- acpica: correct LICENSE to "Intel | BSD-3-Clause | GPLv2"
-- dtc: correct LICENSE to "GPLv2 | BSD-2-Clause"
-- e2fsprogs: correct LICENSE to "GPLv2 & LGPLv2 & BSD-3-Clause & MIT"
-- ffmpeg: correct LICENSE to "GPLv2+ & LGPLv2.1+ & ISC & MIT & BSD-2-Clause & BSD-3-Clause & IJG"
-- flac: correct LICENSE to "GFDL-1.2 & GPLv2+ & LGPLv2.1+ & BSD-3-Clause"
-- flex: correct LICENSE to "BSD-3-Clause & LGPL-2.0+"
-- font-util: correct LICENSE to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
-- glib-2.0: correct LICENSE to "LGPLv2.1+ & BSD-3-Clause & PD"
-- gobject-introspection: correct LICENSE to "LGPLv2+ & GPLv2+ & MIT" (add MIT license)
-- hdparm: correct LICENSE to "BSD-2-Clause & GPLv2 & hdparm"
-- iputils: correct LICENSE to "BSD-3-Clause & GPLv2+"
-- libcap: correct LICENSE to "BSD-3-Clause | GPLv2"
-- libevent: correct LICENSE to "BSD-3-Clause & MIT"
-- libjitterentropy: correct LICENSE to "GPLv2+ | BSD-3-Clause"
-- libpam: correct LICENSE to "GPLv2+ | BSD-3-Clause"
-- libwpe: correct LICENSE to "BSD-2-Clause"
-- libx11-compose-data: correct LICENSE to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
-- libx11: correct LICENSE to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
-- libxfont2: correct LICENSE to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
-- libxfont: correct LICENSE to "MIT & MIT-style & BSD-3-Clause"
-- lsof: correct LICENSE to reflect that it uses a BSD-like (but not exactly BSD) license ("Spencer-94")
-- nfs-utils: correct LICENSE to "MIT & GPLv2+ & BSD-3-Clause"
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
+
+- acpica: correct :term:`LICENSE` to "Intel | BSD-3-Clause | GPLv2"
+- dtc: correct :term:`LICENSE` to "GPLv2 | BSD-2-Clause"
+- e2fsprogs: correct :term:`LICENSE` to "GPLv2 & LGPLv2 & BSD-3-Clause & MIT"
+- ffmpeg: correct :term:`LICENSE` to "GPLv2+ & LGPLv2.1+ & ISC & MIT & BSD-2-Clause & BSD-3-Clause & IJG"
+- flac: correct :term:`LICENSE` to "GFDL-1.2 & GPLv2+ & LGPLv2.1+ & BSD-3-Clause"
+- flex: correct :term:`LICENSE` to "BSD-3-Clause & LGPL-2.0+"
+- font-util: correct :term:`LICENSE` to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
+- glib-2.0: correct :term:`LICENSE` to "LGPLv2.1+ & BSD-3-Clause & PD"
+- gobject-introspection: correct :term:`LICENSE` to "LGPLv2+ & GPLv2+ & MIT" (add MIT license)
+- hdparm: correct :term:`LICENSE` to "BSD-2-Clause & GPLv2 & hdparm"
+- iputils: correct :term:`LICENSE` to "BSD-3-Clause & GPLv2+"
+- libcap: correct :term:`LICENSE` to "BSD-3-Clause | GPLv2"
+- libevent: correct :term:`LICENSE` to "BSD-3-Clause & MIT"
+- libjitterentropy: correct :term:`LICENSE` to "GPLv2+ | BSD-3-Clause"
+- libpam: correct :term:`LICENSE` to "GPLv2+ | BSD-3-Clause"
+- libwpe: correct :term:`LICENSE` to "BSD-2-Clause"
+- libx11-compose-data: correct :term:`LICENSE` to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
+- libx11: correct :term:`LICENSE` to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
+- libxfont2: correct :term:`LICENSE` to "MIT & MIT-style & BSD-4-Clause & BSD-2-Clause"
+- libxfont: correct :term:`LICENSE` to "MIT & MIT-style & BSD-3-Clause"
+- lsof: correct :term:`LICENSE` to reflect that it uses a BSD-like (but not exactly BSD) license ("Spencer-94")
+- nfs-utils: correct :term:`LICENSE` to "MIT & GPLv2+ & BSD-3-Clause"
- ovmf: correct license to "BSD-2-Clause-Patent"
-- ppp: correct LICENSE to "BSD-3-Clause & BSD-3-Clause-Attribution & GPLv2+ & LGPLv2+ & PD"
-- python3-packaging: correct LICENSE to "Apache-2.0 | BSD-2-Clause"
-- python-async-test: correct LICENSE to "BSD-3-Clause"
+- ppp: correct :term:`LICENSE` to "BSD-3-Clause & BSD-3-Clause-Attribution & GPLv2+ & LGPLv2+ & PD"
+- python3-packaging: correct :term:`LICENSE` to "Apache-2.0 | BSD-2-Clause"
+- python-async-test: correct :term:`LICENSE` to "BSD-3-Clause"
- quota: remove BSD license (only BSD licensed part of the code was removed in 4.05)
-- shadow: correct LICENSE to "BSD-3-Clause | Artistic-1.0"
-- shadow-sysroot: set LICENSE the same as shadow
-- sudo: correct LICENSE to "ISC & BSD-3-Clause & BSD-2-Clause & Zlib"
-- swig: correct LICENSE to "BSD-3-Clause & GPLv3"
+- shadow: correct :term:`LICENSE` to "BSD-3-Clause | Artistic-1.0"
+- shadow-sysroot: set :term:`LICENSE` the same as shadow
+- sudo: correct :term:`LICENSE` to "ISC & BSD-3-Clause & BSD-2-Clause & Zlib"
+- swig: correct :term:`LICENSE` to "BSD-3-Clause & GPLv3"
- valgrind: correct license to "GPLv2 & GPLv2+ & BSD-3-Clause"
-- webkitgtk: correct LICENSE to "BSD-2-Clause & LGPLv2+"
-- wpebackend-fdo: correct LICENSE to "BSD-2-Clause"
-- xinetd: correct LICENSE to reflect that it uses a unique BSD-like (but not exactly BSD) license
+- webkitgtk: correct :term:`LICENSE` to "BSD-2-Clause & LGPLv2+"
+- wpebackend-fdo: correct :term:`LICENSE` to "BSD-2-Clause"
+- xinetd: correct :term:`LICENSE` to reflect that it uses a unique BSD-like (but not exactly BSD) license
Other license-related notes:
-- When creating recipes for Python software, recipetool will now treat "BSD" as "BSD-3-Clause" for the purposes of setting LICENSE, as that is the most common understanding.
-- Please be aware that an initramfs bundled with the kernel using :term:`INITRAMFS_IMAGE_BUNDLE` should only contain GPLv2-compatible software; this is now mentioned in the documentation.
+- When creating recipes for Python software, recipetool will now treat "BSD" as "BSD-3-Clause" for the purposes of setting :term:`LICENSE`, as that is the most common understanding.
+- Please be aware that an :term:`Initramfs` bundled with the kernel using :term:`INITRAMFS_IMAGE_BUNDLE` should only contain GPLv2-compatible software; this is now mentioned in the documentation.
Security Fixes in 3.4
~~~~~~~~~~~~~~~~~~~~~
@@ -721,7 +725,7 @@ Repositories / Downloads for 3.4
poky
-- Repository Location: https://git.yoctoproject.org/poky/
+- Repository Location: :yocto_git:`/poky`
- Branch: :yocto_git:`honister </poky/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4 </poky/tag/?h=yocto-3.4>`
- Git Revision: :yocto_git:`f6d1126fff213460dc6954a5d5fc168606d76b66 </poky/commit/?id=f6d1126fff213460dc6954a5d5fc168606d76b66>`
@@ -745,7 +749,7 @@ openembedded-core
meta-mingw
-- Repository Location: https://git.yoctoproject.org/meta-mingw
+- Repository Location: :yocto_git:`/meta-mingw`
- Branch: :yocto_git:`honister </meta-mingw/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4 </meta-mingw/tag/?h=yocto-3.4>`
- Git Revision: :yocto_git:`f5d761cbd5c957e4405c5d40b0c236d263c916a8 </meta-mingw/commit/?id=f5d761cbd5c957e4405c5d40b0c236d263c916a8>`
@@ -757,7 +761,7 @@ meta-mingw
meta-intel
-- Repository Location: https://git.yoctoproject.org/meta-intel
+- Repository Location: :yocto_git:`/meta-intel`
- Branch: :yocto_git:`honister </meta-intel/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4 </meta-intel/tag/?h=yocto-3.4>`
- Git Revision: :yocto_git:`90170cf85fe35b4e8dc00eee50053c0205276b63 </meta-intel/commit/?id=90170cf85fe35b4e8dc00eee50053c0205276b63>`
@@ -769,7 +773,7 @@ meta-intel
meta-gplv2
-- Repository Location: https://git.yoctoproject.org/meta-gplv2
+- Repository Location: :yocto_git:`/meta-gplv2`
- Branch: :yocto_git:`honister </meta-gplv2/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4 </meta-gplv2/tag/?h=yocto-3.4>`
- Git Revision: :yocto_git:`f04e4369bf9dd3385165281b9fa2ed1043b0e400 </meta-gplv2/commit/?id=f04e4369bf9dd3385165281b9fa2ed1043b0e400>`
@@ -793,7 +797,7 @@ bitbake
yocto-docs
-- Repository Location: https://git.yoctoproject.org/yocto-docs
+- Repository Location: :yocto_git:`/yocto-docs`
- Branch: :yocto_git:`honister </yocto-docs/log/?h=honister>`
- Tag: :yocto_git:`yocto-3.4 </yocto-docs/tag/?h=yocto-3.4>`
- Git Revision: :yocto_git:`d75c5450ecf56c8ac799a633ee9ac459e88f91fc </yocto-docs/commit/?id=d75c5450ecf56c8ac799a633ee9ac459e88f91fc>`
diff --git a/documentation/migration-guides/release-notes-4.0.1.rst b/documentation/migration-guides/release-notes-4.0.1.rst
new file mode 100644
index 0000000000..5529f71c6f
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.1.rst
@@ -0,0 +1,250 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 4.0.1 (kirkstone)
+-----------------------------------
+
+Security Fixes in 4.0.1
+~~~~~~~~~~~~~~~~~~~~~~~
+
+- linux-yocto/5.15: fix :cve:`2022-28796`
+- python3: ignore :cve:`2015-20107`
+- e2fsprogs: fix :cve:`2022-1304`
+- lua: fix :cve:`2022-28805`
+- busybox: fix :cve:`2022-28391`
+
+Fixes in 4.0.1
+~~~~~~~~~~~~~~
+
+- abi_version/sstate: Bump hashequiv and sstate versions due to git changes
+- apt: add apt selftest to test signed package feeds
+- apt: upgrade 2.4.4 -> 2.4.5
+- arch-armv8-2a.inc: fix a typo in TUNEVALID variable
+- babeltrace: Disable warnings as errors
+- base: Avoid circular references to our own scripts
+- base: Drop git intercept
+- build-appliance-image: Update to kirkstone head revision
+- build-appliance: Switch to kirkstone branch
+- buildtools-tarball: Only add cert envvars if certs are included
+- busybox: Use base_bindir instead of hardcoding /bin path
+- cases/buildepoxy.py: fix typo
+- create-spdx: delete virtual/kernel dependency to fix FreeRTOS build
+- create-spdx: fix error when symlink cannot be created
+- cve-check: add JSON format to summary output
+- cve-check: fix symlinks where link and output path are equal
+- cve-check: no need to depend on the fetch task
+- cve-update-db-native: let the user to drive the update interval
+- cve-update-db-native: update the CVE database once a day only
+- cve_check: skip remote patches that haven't been fetched when searching for CVE tags
+- dev-manual: add command used to add the signed-off-by line.
+- devshell.bbclass: Allow devshell & pydevshell to use the network
+- docs: conf.py: fix cve extlinks caption for sphinx <4.0
+- docs: migration-guides: migration-3.4: mention that hardcoded password are supported if hashed
+- docs: migration-guides: release-notes-4.0: fix risc-v typo
+- docs: migration-guides: release-notes-4.0: replace kernel placeholder with correct recipe name
+- docs: ref-manual: variables: add hashed password example in :term:`EXTRA_USERS_PARAMS`
+- docs: set_versions.py: add information about obsolescence of a release
+- docs: set_versions.py: fix latest release of a branch being shown twice in switchers.js
+- docs: set_versions.py: fix latest version of an active release shown as obsolete
+- docs: set_versions.py: mark as obsolete only branches and old tags from obsolete releases
+- docs: sphinx-static: switchers.js.in: do not mark branches as outdated
+- docs: sphinx-static: switchers.js.in: fix broken switcher for branches
+- docs: sphinx-static: switchers.js.in: improve obsolete version detection
+- docs: sphinx-static: switchers.js.in: remove duplicate for outdated versions
+- docs: sphinx-static: switchers.js.in: rename all_versions to switcher_versions
+- docs: update Bitbake objects.inv location for master branch
+- documentation/brief-yoctoprojectqs: add directory for local.conf
+- gcompat: Fix build when usrmerge distro feature is enabled
+- git: correct license
+- git: upgrade 2.35.2 -> 2.35.3
+- glib: upgrade 2.72.0 -> 2.72.1
+- glibc: ptest: Fix glibc-tests package issue
+- gnupg: Disable FORTIFY_SOURCES on mips
+- go.bbclass: disable the use of the default configuration file
+- gstreamer1.0-plugins-bad: drop patch
+- gstreamer1.0-plugins-good: Fix libsoup dependency
+- gstreamer1.0: Minor documentation addition
+- install/devshell: Introduce git intercept script due to fakeroot issues
+- kernel-yocto.bbclass: Fixup do_kernel_configcheck usage of KMETA
+- libc-glibc: Use libxcrypt to provide virtual/crypt
+- libgit2: upgrade 1.4.2 -> 1.4.3
+- libsoup: upgrade 3.0.5 -> 3.0.6
+- libusb1: upgrade 1.0.25 -> 1.0.26
+- linux-firmware: correct license for ar3k firmware
+- linux-firmware: upgrade 20220310 -> 20220411
+- linux-yocto/5.10: base: enable kernel crypto userspace API
+- linux-yocto/5.10: update to v5.10.112
+- linux-yocto/5.15: arm: poky-tiny cleanup and fixes
+- linux-yocto/5.15: base: enable kernel crypto userspace API
+- linux-yocto/5.15: fix -standard kernel build issue
+- linux-yocto/5.15: fix ppc boot
+- linux-yocto/5.15: fix qemuarm graphical boot
+- linux-yocto/5.15: kasan: fix BUG: sleeping function called from invalid context
+- linux-yocto/5.15: netfilter: conntrack: avoid useless indirection during conntrack destruction
+- linux-yocto/5.15: update to v5.15.36
+- linux-yocto: enable powerpc-debug fragment
+- mdadm: Drop clang specific cflags
+- migration-3.4: add missing entry on :term:`EXTRA_USERS_PARAMS`
+- migration-guides: add release notes for 4.0
+- migration-guides: complete migration guide for 4.0
+- migration-guides: release-notes-4.0: mention :term:`LTS` release
+- migration-guides: release-notes-4.0: update 'Repositories / Downloads' section
+- migration-guides: stop including documents with ".. include"
+- musl: Fix build when usrmerge distro feature is enabled
+- ncurses: use COPYING file
+- neard: Switch :term:`SRC_URI` to git repo
+- oeqa/selftest: add test for git working correctly inside pseudo
+- openssl: minor security upgrade 3.0.2 -> 3.0.3
+- package.bbclass: Prevent perform_packagecopy from removing /sysroot-only
+- package: Ensure we track whether PRSERV was active or not
+- package_manager: fix missing dependency on gnupg when signing deb package feeds
+- poky-tiny: enable qemuarmv5/qemuarm64 and cleanups
+- poky.conf: bump version for 4.0.1 release
+- qemu.bbclass: Extend ppc/ppc64 extra options
+- qemuarm64: use virtio pci interfaces
+- qemuarmv5: use arm-versatile-926ejs :term:`KMACHINE`
+- ref-manual: Add :term:`XZ_THREADS` and :term:`XZ_MEMLIMIT`
+- ref-manual: add :term:`KERNEL_DEBUG_TIMESTAMPS`
+- ref-manual: add :term:`ZSTD_THREADS`
+- ref-manual: add a note about hard-coded passwords
+- ref-manual: add empty-dirs QA check and QA_EMPTY_DIRS*
+- ref-manual: add mention of vendor filtering to :term:`CVE_PRODUCT`
+- ref-manual: mention wildcarding support in :term:`INCOMPATIBLE_LICENSE`
+- releases: update for yocto 4.0
+- rootfs-postcommands: fix symlinks where link and output path are equal
+- ruby: upgrade 3.1.1 -> 3.1.2
+- sanity: skip make 4.2.1 warning for debian
+- scripts/git: Ensure we don't have circular references
+- scripts: Make git intercept global
+- seatd: Disable overflow warning as error on ppc64/musl
+- selftest/lic_checksum: Add test for filename containing space
+- set_versions: update for 4.0 release
+- staging: Ensure we filter out ourselves
+- strace: fix ptest failure in landlock
+- subversion: upgrade to 1.14.2
+- systemd-boot: remove outdated EFI_LD comment
+- systemtap: Fix build with gcc-12
+- terminal.py: Restore error output from Terminal
+- u-boot: Correct the :term:`SRC_URI`
+- u-boot: Inherit pkgconfig
+- update_udev_hwdb: fix multilib issue with systemd
+- util-linux: Create u-a symlink for findfs utility
+- virgl: skip headless test on alma 8.6
+- webkitgtk: adjust patch status
+- wic: do not use PARTLABEL for msdos partition tables
+- wireless-regdb: upgrade 2022.02.18 -> 2022.04.08
+- xserver-xorg: Fix build with gcc12
+- yocto-bsps: update to v5.15.36
+
+Contributors to 4.0.1
+~~~~~~~~~~~~~~~~~~~~~
+
+- Abongwa Amahnui Bonalais
+- Alexander Kanavin
+- Bruce Ashfield
+- Carlos Rafael Giani
+- Chen Qi
+- Davide Gardenal
+- Dmitry Baryshkov
+- Ferry Toth
+- Henning Schild
+- Jon Mason
+- Justin Bronder
+- Kai Kang
+- Khem Raj
+- Konrad Weihmann
+- Lee Chee Yang
+- Marta Rybczynska
+- Martin Jansa
+- Matt Madison
+- Michael Halstead
+- Michael Opdenacker
+- Naveen Saini
+- Nicolas Dechesne
+- Paul Eggleton
+- Paul Gortmaker
+- Paulo Neves
+- Peter Kjellerstedt
+- Peter Marko
+- Pgowda
+- Portia
+- Quentin Schulz
+- Rahul Kumar
+- Richard Purdie
+- Robert Joslyn
+- Robert Yang
+- Roland Hieber
+- Ross Burton
+- Russ Dill
+- Steve Sakoman
+- Wang Mingyu
+- Zheng Ruoqin
+
+Repositories / Downloads for 4.0.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.1 </poky/tag/?h=yocto-4.0.1>`
+- Git Revision: :yocto_git:`8c489602f218bcf21de0d3c9f8cf620ea5f06430 </poky/commit/?id=8c489602f218bcf21de0d3c9f8cf620ea5f06430>`
+- Release Artefact: poky-8c489602f218bcf21de0d3c9f8cf620ea5f06430
+- sha: 65c545a316bd8efb13ae1358eeccc8953543be908008103b51f7f90aed960d00
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.1/poky-8c489602f218bcf21de0d3c9f8cf620ea5f06430.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.1/poky-8c489602f218bcf21de0d3c9f8cf620ea5f06430.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.1 </openembedded-core/tag/?h=yocto-4.0>`
+- Git Revision: :oe_git:`cb8647c08959abb1d6b7c2b3a34b4b415f66d7ee </openembedded-core/commit/?id=cb8647c08959abb1d6b7c2b3a34b4b415f66d7ee>`
+- Release Artefact: oecore-cb8647c08959abb1d6b7c2b3a34b4b415f66d7ee
+- sha: 43981b8fad82f601618a133dffbec839524f0d0a055efc3d8f808cbfd811ab17
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.1/oecore-cb8647c08959abb1d6b7c2b3a34b4b415f66d7ee.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.1/oecore-cb8647c08959abb1d6b7c2b3a34b4b415f66d7ee.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.1 </meta-mingw/tag/?h=yocto-4.0.1>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.1/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.1/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.1 </meta-gplv2/tag/?h=yocto-4.0.1>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-mingw/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.1/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.1/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0 </bitbake/tag/?h=yocto-4.0>`
+- Git Revision: :oe_git:`59c16ae6c55c607c56efd2287537a1b97ba2bf52 </bitbake/commit/?id=59c16ae6c55c607c56efd2287537a1b97ba2bf52>`
+- Release Artefact: bitbake-59c16ae6c55c607c56efd2287537a1b97ba2bf52
+- sha: 3ae466c31f738fc45c3d7c6f665952d59f01697f2667ea42f0544d4298dd6ef0
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.1/bitbake-59c16ae6c55c607c56efd2287537a1b97ba2bf52.tar.bz2,
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.1/bitbake-59c16ae6c55c607c56efd2287537a1b97ba2bf52.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.1 </yocto-docs/tag/?h=yocto-4.0>`
+- Git Revision: :yocto_git:`4ec9df3336a425719a9a35532504731ce56984ca </yocto-docs/commit/?id=4ec9df3336a425719a9a35532504731ce56984ca>`
diff --git a/documentation/migration-guides/release-notes-4.0.10.rst b/documentation/migration-guides/release-notes-4.0.10.rst
new file mode 100644
index 0000000000..f37c3471ea
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.10.rst
@@ -0,0 +1,180 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.10 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.10
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: Fix :cve:`2023-1579`, :cve:`2023-1972`, :cve_mitre:`2023-25584`, :cve_mitre:`2023-25585` and :cve_mitre:`2023-25588`
+- cargo : Ignore :cve:`2022-46176`
+- connman: Fix :cve:`2023-28488`
+- curl: Fix :cve:`2023-27533`, :cve:`2023-27534`, :cve:`2023-27535`, :cve:`2023-27536` and :cve:`2023-27538`
+- ffmpeg: Fix :cve:`2022-48434`
+- freetype: Fix :cve:`2023-2004`
+- ghostscript: Fix :cve_mitre:`2023-29979`
+- git: Fix :cve:`2023-25652` and :cve:`2023-29007`
+- go: Fix :cve:`2022-41722`, :cve:`2022-41724`, :cve:`2022-41725`, :cve:`2023-24534`, :cve:`2023-24537` and :cve:`2023-24538`
+- go: Ignore :cve:`2022-41716`
+- libxml2: Fix :cve:`2023-28484` and :cve:`2023-29469`
+- libxpm: Fix :cve:`2022-44617`, :cve:`2022-46285` and :cve:`2022-4883`
+- linux-yocto: Ignore :cve:`2021-3759`, :cve:`2021-4135`, :cve:`2021-4155`, :cve:`2022-0168`, :cve:`2022-0171`, :cve:`2022-1016`, :cve:`2022-1184`, :cve:`2022-1198`, :cve:`2022-1199`, :cve:`2022-1462`, :cve:`2022-1734`, :cve:`2022-1852`, :cve:`2022-1882`, :cve:`2022-1998`, :cve:`2022-2078`, :cve:`2022-2196`, :cve:`2022-2318`, :cve:`2022-2380`, :cve:`2022-2503`, :cve:`2022-26365`, :cve:`2022-2663`, :cve:`2022-2873`, :cve:`2022-2905`, :cve:`2022-2959`, :cve:`2022-3028`, :cve:`2022-3078`, :cve:`2022-3104`, :cve:`2022-3105`, :cve:`2022-3106`, :cve:`2022-3107`, :cve:`2022-3111`, :cve:`2022-3112`, :cve:`2022-3113`, :cve:`2022-3115`, :cve:`2022-3202`, :cve:`2022-32250`, :cve:`2022-32296`, :cve:`2022-32981`, :cve:`2022-3303`, :cve:`2022-33740`, :cve:`2022-33741`, :cve:`2022-33742`, :cve:`2022-33743`, :cve:`2022-33744`, :cve:`2022-33981`, :cve:`2022-3424`, :cve:`2022-3435`, :cve:`2022-34918`, :cve:`2022-3521`, :cve:`2022-3545`, :cve:`2022-3564`, :cve:`2022-3586`, :cve:`2022-3594`, :cve:`2022-36123`, :cve:`2022-3621`, :cve:`2022-3623`, :cve:`2022-3629`, :cve:`2022-3633`, :cve:`2022-3635`, :cve:`2022-3646`, :cve:`2022-3649`, :cve:`2022-36879`, :cve:`2022-36946`, :cve:`2022-3707`, :cve:`2022-39188`, :cve:`2022-39190`, :cve:`2022-39842`, :cve:`2022-40307`, :cve:`2022-40768`, :cve:`2022-4095`, :cve:`2022-41218`, :cve:`2022-4139`, :cve:`2022-41849`, :cve:`2022-41850`, :cve:`2022-41858`, :cve:`2022-42328`, :cve:`2022-42329`, :cve:`2022-42703`, :cve:`2022-42721`, :cve:`2022-42722`, :cve:`2022-42895`, :cve:`2022-4382`, :cve:`2022-4662`, :cve:`2022-47518`, :cve:`2022-47519`, :cve:`2022-47520`, :cve:`2022-47929`, :cve:`2023-0179`, :cve:`2023-0394`, :cve:`2023-0461`, :cve:`2023-0590`, :cve:`2023-1073`, :cve:`2023-1074`, :cve:`2023-1077`, :cve:`2023-1078`, :cve:`2023-1079`, :cve:`2023-1095`, :cve:`2023-1118`, :cve:`2023-1249`, :cve:`2023-1252`, :cve:`2023-1281`, :cve:`2023-1382`, :cve:`2023-1513`, :cve:`2023-1829`, :cve:`2023-1838`, :cve:`2023-1998`, :cve:`2023-2006`, :cve:`2023-2008`, :cve:`2023-2162`, :cve:`2023-2166`, :cve:`2023-2177`, :cve:`2023-22999`, :cve:`2023-23002`, :cve:`2023-23004`, :cve:`2023-23454`, :cve:`2023-23455`, :cve:`2023-23559`, :cve:`2023-25012`, :cve:`2023-26545`, :cve:`2023-28327` and :cve:`2023-28328`
+- nasm: Fix :cve:`2022-44370`
+- python3-cryptography: Fix :cve:`2023-23931`
+- qemu: Ignore :cve:`2023-0664`
+- ruby: Fix :cve:`2023-28755` and :cve:`2023-28756`
+- screen: Fix :cve:`2023-24626`
+- shadow: Fix :cve:`2023-29383`
+- tiff: Fix :cve:`2022-4645`
+- webkitgtk: Fix :cve:`2022-32888` and :cve:`2022-32923`
+- xserver-xorg: Fix :cve:`2023-1393`
+
+
+Fixes in Yocto-4.0.10
+~~~~~~~~~~~~~~~~~~~~~
+
+- bitbake: bin/utils: Ensure locale en_US.UTF-8 is available on the system
+- build-appliance-image: Update to kirkstone head revision
+- cmake: add CMAKE_SYSROOT to generated toolchain file
+- glibc: stable 2.35 branch updates.
+- kernel-devsrc: depend on python3-core instead of python3
+- kernel: improve initramfs bundle processing time
+- libarchive: Enable acls, xattr for native as well as target
+- libbsd: Add correct license for all packages
+- libpam: Fix the xtests/tst-pam_motd[1|3] failures
+- libxpm: upgrade to 3.5.15
+- linux-firmware: upgrade to 20230404
+- linux-yocto/5.15: upgrade to v5.15.108
+- migration-guides: add release-notes for 4.0.9
+- oeqa/utils/metadata.py: Fix running oe-selftest running with no distro set
+- openssl: Move microblaze to linux-latomic config
+- package.bbclass: correct check for /build in copydebugsources()
+- poky.conf: bump version for 4.0.10
+- populate_sdk_base: add zip options
+- populate_sdk_ext.bbclass: set :term:`METADATA_REVISION` with an :term:`DISTRO` override
+- run-postinsts: Set dependency for ldconfig to avoid boot issues
+- update-alternatives.bbclass: fix old override syntax
+- wic/bootimg-efi: if fixed-size is set then use that for mkdosfs
+- wpebackend-fdo: upgrade to 1.14.2
+- xorg-lib-common: Add variable to set tarball type
+- xserver-xorg: upgrade to 21.1.8
+
+
+Known Issues in Yocto-4.0.10
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.10
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Archana Polampalli
+- Arturo Buzarra
+- Bruce Ashfield
+- Christoph Lauer
+- Deepthi Hemraj
+- Dmitry Baryshkov
+- Frank de Brabander
+- Hitendra Prajapati
+- Joe Slater
+- Kai Kang
+- Kyle Russell
+- Lee Chee Yang
+- Mark Hatle
+- Martin Jansa
+- Mingli Yu
+- Narpat Mali
+- Pascal Bach
+- Pawan Badganchi
+- Peter Bergin
+- Peter Marko
+- Piotr Łobacz
+- Randolph Sapp
+- Ranjitsinh Rathod
+- Ross Burton
+- Shubham Kulkarni
+- Siddharth Doshi
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Thomas Roos
+- Virendra Thakur
+- Vivek Kumbhar
+- Wang Mingyu
+- Xiangyu Chen
+- Yash Shinde
+- Yoann Congal
+- Yogita Urade
+- Zhixiong Chi
+
+
+Repositories / Downloads for Yocto-4.0.10
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.10 </poky/log/?h=yocto-4.0.10>`
+- Git Revision: :yocto_git:`f53ab3a2ff206a130cdc843839dd0ea5ec4ad02f </poky/commit/?id=f53ab3a2ff206a130cdc843839dd0ea5ec4ad02f>`
+- Release Artefact: poky-f53ab3a2ff206a130cdc843839dd0ea5ec4ad02f
+- sha: 8820aeac857ce6bbd1c7ef26cadbb86eca02be93deded253b4a5f07ddd69255d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.10/poky-f53ab3a2ff206a130cdc843839dd0ea5ec4ad02f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.10/poky-f53ab3a2ff206a130cdc843839dd0ea5ec4ad02f.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.10 </openembedded-core/log/?h=yocto-4.0.10>`
+- Git Revision: :oe_git:`d2713785f9cd2d58731df877bc8b7bcc71b6c8e6 </openembedded-core/commit/?id=d2713785f9cd2d58731df877bc8b7bcc71b6c8e6>`
+- Release Artefact: oecore-d2713785f9cd2d58731df877bc8b7bcc71b6c8e6
+- sha: 78e084a1aceaaa6ec022702f29f80eaffade3159e9c42b6b8985c1b7ddd2fbab
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.10/oecore-d2713785f9cd2d58731df877bc8b7bcc71b6c8e6.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.10/oecore-d2713785f9cd2d58731df877bc8b7bcc71b6c8e6.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.10 </meta-mingw/log/?h=yocto-4.0.10>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.10/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.10/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.10 </meta-gplv2/log/?h=yocto-4.0.10>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.10/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.10/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.10 </bitbake/log/?h=yocto-4.0.10>`
+- Git Revision: :oe_git:`0c6f86b60cfba67c20733516957c0a654eb2b44c </bitbake/commit/?id=0c6f86b60cfba67c20733516957c0a654eb2b44c>`
+- Release Artefact: bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c
+- sha: 4caa94ee4d644017b0cc51b702e330191677f7d179018cbcec8b1793949ebc74
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.10/bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.10/bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.10 </yocto-docs/log/?h=yocto-4.0.10>`
+- Git Revision: :yocto_git:`8388be749806bd0bf4fccf1005dae8f643aa4ef4 </yocto-docs/commit/?id=8388be749806bd0bf4fccf1005dae8f643aa4ef4>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.11.rst b/documentation/migration-guides/release-notes-4.0.11.rst
new file mode 100644
index 0000000000..8a15884908
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.11.rst
@@ -0,0 +1,214 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.11 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.11
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- cups: Fix :cve:`2023-32324`
+- curl: Fix :cve:`2023-28319`, :cve:`2023-28320`, :cve:`2023-28321` and :cve:`2023-28322`
+- git: Ignore :cve:`2023-25815`
+- go: Fix :cve:`2023-24539` and :cve:`2023-24540`
+- nasm: Fix :cve:`2022-46457`
+- openssh: Fix :cve:`2023-28531`
+- openssl: Fix :cve:`2023-1255` and :cve:`2023-2650`
+- perl: Fix :cve:`2023-31484`
+- python3-requests: Fix for :cve:`2023-32681`
+- sysstat: Fix :cve:`2023-33204`
+- vim: Fix :cve:`2023-2426`
+- webkitgtk: fix :cve:`2022-42867`, :cve:`2022-46691`, :cve:`2022-46699` and :cve:`2022-46700`
+
+
+Fixes in Yocto-4.0.11
+~~~~~~~~~~~~~~~~~~~~~
+
+- Revert "docs: conf.py: fix cve extlinks caption for sphinx <4.0"
+- Revert "ipk: Decode byte data to string in manifest handling"
+- avahi: fix D-Bus introspection
+- build-appliance-image: Update to kirkstone head revision
+- conf.py: add macro for Mitre CVE links
+- conf: add nice level to the hash config ignred variables
+- cpio: Fix wrong CRC with ASCII CRC for large files
+- cve-update-nvd2-native: added the missing http import
+- cve-update-nvd2-native: new CVE database fetcher
+- dhcpcd: use git instead of tarballs
+- e2fsprogs: fix ptest bug for second running
+- gcc-runtime: Use static dummy libstdc++
+- glibc: stable 2.35 branch updates (cbceb903c4d7)
+- go.bbclass: don't use test to check output from ls
+- gstreamer1.0: Upgrade to 1.20.6
+- iso-codes: Upgrade to 4.15.0
+- kernel-devicetree: allow specification of dtb directory
+- kernel-devicetree: make shell scripts posix compliant
+- kernel-devicetree: recursively search for dtbs
+- kernel: don't force PAHOLE=false
+- kmscube: Correct :term:`DEPENDS` to avoid overwrite
+- lib/terminal.py: Add urxvt terminal
+- license.bbclass: Include :term:`LICENSE` in the output when it fails to parse
+- linux-yocto/5.10: Upgrade to v5.10.180
+- linux-yocto/5.15: Upgrade to v5.15.113
+- llvm: backport a fix for build with gcc-13
+- maintainers.inc: Fix email address typo
+- maintainers.inc: Move repo to unassigned
+- migration-guides: add release notes for 4.0.10
+- migration-guides: use new cve_mitre macro
+- nghttp2: Deleted the entries for -client and -server, and removed a dependency on them from the main package.
+- oeqa/selftest/cases/devtool.py: skip all tests require folder a git repo
+- openssh: Remove BSD-4-clause contents completely from codebase
+- openssl: Upgrade to 3.0.9
+- overview-manual: concepts.rst: Fix a typo
+- p11-kit: add native to :term:`BBCLASSEXTEND`
+- package: enable recursion on file globs
+- package_manager/ipk: fix config path generation in _create_custom_config()
+- piglit: Add :term:`PACKAGECONFIG` for glx and opencl
+- piglit: Add missing glslang dependencies
+- piglit: Fix build time dependency
+- poky.conf: bump version for 4.0.11
+- profile-manual: fix blktrace remote usage instructions
+- quilt: Fix merge.test race condition
+- ref-manual: add clarification for :term:`SRCREV`
+- selftest/reproducible: Allow native/cross reuse in test
+- staging.bbclass: do not add extend_recipe_sysroot to prefuncs of prepare_recipe_sysroot
+- systemd-networkd: backport fix for rm unmanaged wifi
+- systemd-systemctl: fix instance template WantedBy symlink construction
+- systemd-systemctl: support instance expansion in WantedBy
+- uninative: Upgrade to 3.10 to support gcc 13
+- uninative: Upgrade to 4.0 to include latest gcc 13.1.1
+- vim: Upgrade to 9.0.1527
+- waffle: Upgrade to 1.7.2
+- weston: add xwayland to :term:`DEPENDS` for :term:`PACKAGECONFIG` xwayland
+
+
+Known Issues in Yocto-4.0.11
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.11
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Andrew Jeffery
+- Archana Polampalli
+- Bhabu Bindu
+- Bruce Ashfield
+- C. Andy Martin
+- Chen Qi
+- Daniel Ammann
+- Deepthi Hemraj
+- Ed Beroset
+- Eero Aaltonen
+- Enrico Jörns
+- Hannu Lounento
+- Hitendra Prajapati
+- Ian Ray
+- Jan Luebbe
+- Jan Vermaete
+- Khem Raj
+- Lee Chee Yang
+- Lei Maohui
+- Lorenzo Arena
+- Marek Vasut
+- Marta Rybczynska
+- Martin Jansa
+- Martin Siegumfeldt
+- Michael Halstead
+- Michael Opdenacker
+- Ming Liu
+- Narpat Mali
+- Omkar Patil
+- Pablo Saavedra
+- Pavel Zhukov
+- Peter Kjellerstedt
+- Peter Marko
+- Qiu Tingting
+- Quentin Schulz
+- Randolph Sapp
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Richard Purdie
+- Riyaz Khan
+- Sakib Sajal
+- Sanjay Chitroda
+- Soumya Sambu
+- Steve Sakoman
+- Thomas Roos
+- Tom Hochstein
+- Vivek Kumbhar
+- Wang Mingyu
+- Yogita Urade
+- Zoltan Boszormenyi
+
+
+Repositories / Downloads for Yocto-4.0.11
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.11 </poky/log/?h=yocto-4.0.11>`
+- Git Revision: :yocto_git:`fc697fe87412b9b179ae3a68d266ace85bb1fcc6 </poky/commit/?id=fc697fe87412b9b179ae3a68d266ace85bb1fcc6>`
+- Release Artefact: poky-fc697fe87412b9b179ae3a68d266ace85bb1fcc6
+- sha: d42ab1b76b9d8ab164d86dc0882c908658f6b5be0742b13a71531068f6a5ee98
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.11/poky-fc697fe87412b9b179ae3a68d266ace85bb1fcc6.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.11/poky-fc697fe87412b9b179ae3a68d266ace85bb1fcc6.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.11 </openembedded-core/log/?h=yocto-4.0.11>`
+- Git Revision: :oe_git:`7949e786cf8e50f716ff1f1c4797136637205e0c </openembedded-core/commit/?id=7949e786cf8e50f716ff1f1c4797136637205e0c>`
+- Release Artefact: oecore-7949e786cf8e50f716ff1f1c4797136637205e0c
+- sha: 3bda3f7d15961bad5490faf3194709528591a97564b5eae3da7345b63be20334
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.11/oecore-7949e786cf8e50f716ff1f1c4797136637205e0c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.11/oecore-7949e786cf8e50f716ff1f1c4797136637205e0c.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.11 </meta-mingw/log/?h=yocto-4.0.11>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.11/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.11/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.11 </meta-gplv2/log/?h=yocto-4.0.11>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.11/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.11/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.11 </bitbake/log/?h=yocto-4.0.11>`
+- Git Revision: :oe_git:`0c6f86b60cfba67c20733516957c0a654eb2b44c </bitbake/commit/?id=0c6f86b60cfba67c20733516957c0a654eb2b44c>`
+- Release Artefact: bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c
+- sha: 4caa94ee4d644017b0cc51b702e330191677f7d179018cbcec8b1793949ebc74
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.11/bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.11/bitbake-0c6f86b60cfba67c20733516957c0a654eb2b44c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.11 </yocto-docs/log/?h=yocto-4.0.11>`
+- Git Revision: :yocto_git:`6d16d2bde0aa32276a035ee49703e6eea7c7b29a </yocto-docs/commit/?id=6d16d2bde0aa32276a035ee49703e6eea7c7b29a>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.12.rst b/documentation/migration-guides/release-notes-4.0.12.rst
new file mode 100644
index 0000000000..0ea92a453d
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.12.rst
@@ -0,0 +1,277 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.12 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.12
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-2828` and :cve:`2023-2911`
+- cups: Fix :cve:`2023-34241`
+- curl: Added :cve:`2023-28320` Follow-up patch
+- dbus: Fix :cve:`2023-34969`
+- dmidecode: fix :cve:`2023-30630`
+- ghostscript: fix :cve:`2023-36664`
+- go: fix :cve_mitre:`2023-24531`, :cve:`2023-24536`, :cve:`2023-29400`, :cve:`2023-29402`, :cve:`2023-29404`, :cve:`2023-29405` and :cve:`2023-29406`
+- libarchive: Ignore :cve:`2023-30571`
+- libcap: Fix :cve:`2023-2602` and :cve:`2023-2603`
+- libjpeg-turbo: Fix :cve:`2023-2804`
+- libpcre2: Fix :cve:`2022-41409`
+- libtiff: fix :cve:`2023-26965`
+- libwebp: Fix :cve:`2023-1999`
+- libx11: Fix :cve:`2023-3138`
+- libxpm: Fix :cve:`2022-44617`
+- ninja: Ignore :cve:`2021-4336`
+- openssh: Fix :cve:`2023-38408`
+- openssl: Fix :cve:`2023-2975`, :cve:`2023-3446` and :cve:`2023-3817`
+- perl: Fix :cve:`2023-31486`
+- python3: Ignore :cve:`2023-36632`
+- qemu: Fix :cve:`2023-0330`, :cve_mitre:`2023-2861`, :cve_mitre:`2023-3255` and :cve_mitre:`2023-3301`
+- sqlite3: Fix :cve:`2023-36191`
+- tiff: Fix :cve:`2023-0795`, :cve:`2023-0796`, :cve:`2023-0797`, :cve:`2023-0798`, :cve:`2023-0799`, :cve:`2023-25433`, :cve:`2023-25434` and :cve:`2023-25435`
+- vim: :cve:`2023-2609` and :cve:`2023-2610`
+
+
+Fixes in Yocto-4.0.12
+~~~~~~~~~~~~~~~~~~~~~
+
+- babeltrace2: Always use BFD linker when building tests with ld-is-lld distro feature
+- babeltrace2: upgrade to 2.0.5
+- bitbake.conf: add unzstd in :term:`HOSTTOOLS`
+- bitbake: bitbake-layers: initialize tinfoil before registering command line arguments
+- bitbake: runqueue: Fix deferred task/multiconfig race issue
+- blktrace: ask for python3 specifically
+- build-appliance-image: Update to kirkstone head revision
+- cmake: Fix CMAKE_SYSTEM_PROCESSOR setting for SDK
+- connman: fix warning by specifying runstatedir at configure time
+- cpio: Replace fix wrong CRC with ASCII CRC for large files with upstream backport
+- cve-update-nvd2-native: actually use API keys
+- cve-update-nvd2-native: always pass str for json.loads()
+- cve-update-nvd2-native: fix cvssV3 metrics
+- cve-update-nvd2-native: handle all configuration nodes, not just first
+- cve-update-nvd2-native: increase retry count
+- cve-update-nvd2-native: log a little more
+- cve-update-nvd2-native: retry all errors and sleep between retries
+- cve-update-nvd2-native: use exact times, don't truncate
+- dbus: upgrade to 1.14.8
+- devtool: Fix the wrong variable in srcuri_entry
+- diffutils: upgrade to 3.10
+- docs: ref-manual: terms: fix typos in :term:`SPDX` term
+- fribidi: upgrade to 1.0.13
+- gcc: upgrade to v11.4
+- gcc-testsuite: Fix ppc cpu specification
+- gcc: don't pass --enable-standard-branch-protection
+- gcc: fix runpath errors in cc1 binary
+- grub: submit determinism.patch upstream
+- image_types: Fix reproducible builds for initramfs and UKI img
+- kernel: add missing path to search for debug files
+- kmod: remove unused ptest.patch
+- layer.conf: Add missing dependency exclusion
+- libassuan: upgrade to 2.5.6
+- libksba: upgrade to 1.6.4
+- libpng: Add ptest for libpng
+- libxcrypt: fix build with perl-5.38 and use master branch
+- libxcrypt: fix hard-coded ".so" extension
+- libxpm: upgrade to 3.5.16
+- linux-firmware: upgrade to 20230515
+- linux-yocto/5.10: cfg: fix DECNET configuration warning
+- linux-yocto/5.10: update to v5.10.185
+- linux-yocto/5.15: cfg: fix DECNET configuration warning
+- linux-yocto/5.15: update to v5.15.120
+- logrotate: Do not create logrotate.status file
+- lttng-ust: upgrade to 2.13.6
+- machine/arch-arm64: add -mbranch-protection=standard
+- maintainers.inc: correct Carlos Rafael Giani's email address
+- maintainers.inc: correct unassigned entries
+- maintainers.inc: unassign Adrian Bunk from wireless-regdb
+- maintainers.inc: unassign Alistair Francis from opensbi
+- maintainers.inc: unassign Andreas Müller from itstool entry
+- maintainers.inc: unassign Pascal Bach from cmake entry
+- maintainers.inc: unassign Ricardo Neri from ovmf
+- maintainers.inc: unassign Richard Weinberger from erofs-utils entry
+- mdadm: fix 07revert-inplace ptest
+- mdadm: fix segfaults when running ptests
+- mdadm: fix util-linux ptest dependency
+- mdadm: skip running known broken ptests
+- meson.bbclass: Point to llvm-config from native sysroot
+- meta: lib: oe: npm_registry: Add more safe caracters
+- migration-guides: add release notes for 4.0.11
+- minicom: remove unused patch files
+- mobile-broadband-provider-info: upgrade to 20230416
+- oe-depends-dot: Handle new format for task-depends.dot
+- oeqa/runtime/cases/rpm: fix wait_for_no_process_for_user failure case
+- oeqa/selftest/bbtests: add non-existent prefile/postfile tests
+- oeqa/selftest/devtool: add unit test for "devtool add -b"
+- openssl: Upgrade to 3.0.10
+- openssl: add PERLEXTERNAL path to test its existence
+- openssl: use a glob on the PERLEXTERNAL to track updates on the path
+- package.bbclass: moving field data process before variable process in process_pkgconfig
+- pm-utils: fix multilib conflictions
+- poky.conf: bump version for 4.0.12
+- psmisc: Set :term:`ALTERNATIVE` for pstree to resolve conflict with busybox
+- pybootchartgui: show elapsed time for each task
+- python3: fix missing comma in get_module_deps3.py
+- python3: upgrade to 3.10.12
+- recipetool: Fix inherit in created -native* recipes
+- ref-manual: add LTS and Mixin terms
+- ref-manual: document image-specific variant of :term:`INCOMPATIBLE_LICENSE`
+- ref-manual: release-process: update for LTS releases
+- rust-llvm: backport a fix for build with gcc-13
+- scripts/runqemu: allocate unfsd ports in a way that doesn't race or clash with unrelated processes
+- scripts/runqemu: split lock dir creation into a reusable function
+- sdk.py: error out when moving file fails
+- sdk.py: fix moving dnf contents
+- selftest reproducible.py: support different build targets
+- selftest/license: Exclude from world
+- selftest/reproducible: Allow chose the package manager
+- serf: upgrade to 1.3.10
+- strace: Disable failing test
+- strace: Merge two similar patches
+- strace: Update patches/tests with upstream fixes
+- sysfsutils: fetch a supported fork from github
+- systemd-systemctl: fix errors in instance name expansion
+- systemd: Backport nspawn: make sure host root can write to the uidmapped mounts we prepare for the container payload
+- tzdata: upgrade to 2023c
+- uboot-extlinux-config.bbclass: fix old override syntax in comment
+- unzip: fix configure check for cross compilation
+- useradd-staticids.bbclass: improve error message
+- util-linux: add alternative links for ipcs,ipcrm
+- v86d: Improve kernel dependency
+- vim: upgrade to 9.0.1592
+- wget: upgrade to 1.21.4
+- wic: Add dependencies for erofs-utils
+- wireless-regdb: upgrade to 2023.05.03
+- xdpyinfo: upgrade to 1.3.4
+- zip: fix configure check by using _Static_assert
+
+
+Known Issues in Yocto-4.0.12
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.12
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alberto Planas
+- Alexander Kanavin
+- Alexander Sverdlin
+- Andrej Valek
+- Archana Polampalli
+- BELOUARGA Mohamed
+- Benjamin Bouvier
+- Bruce Ashfield
+- Charlie Wu
+- Chen Qi
+- Etienne Cordonnier
+- Fabien Mahot
+- Frieder Paape
+- Frieder Schrempf
+- Heiko Thole
+- Hitendra Prajapati
+- Jermain Horsman
+- Jose Quaresma
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Marc Ferland
+- Marek Vasut
+- Martin Jansa
+- Mauro Queiros
+- Michael Opdenacker
+- Mikko Rapeli
+- Nikhil R
+- Ovidiu Panait
+- Peter Marko
+- Poonam Jadhav
+- Quentin Schulz
+- Richard Purdie
+- Ross Burton
+- Rusty Howell
+- Sakib Sajal
+- Soumya Sambu
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Tim Orling
+- Tom Hochstein
+- Trevor Gamblin
+- Vijay Anusuri
+- Vivek Kumbhar
+- Wang Mingyu
+- Xiangyu Chen
+- Yoann Congal
+- Yogita Urade
+- Yuta Hayama
+
+
+Repositories / Downloads for Yocto-4.0.12
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.12 </poky/log/?h=yocto-4.0.12>`
+- Git Revision: :yocto_git:`d6b8790370500b99ca11f0d8a05c39b661ab2ba6 </poky/commit/?id=d6b8790370500b99ca11f0d8a05c39b661ab2ba6>`
+- Release Artefact: poky-d6b8790370500b99ca11f0d8a05c39b661ab2ba6
+- sha: 35f0390e0c5a12f403ed471c0b1254c13cbb9d7c7b46e5a3538e63e36c1ac280
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.12/poky-d6b8790370500b99ca11f0d8a05c39b661ab2ba6.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.12/poky-d6b8790370500b99ca11f0d8a05c39b661ab2ba6.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.12 </openembedded-core/log/?h=yocto-4.0.12>`
+- Git Revision: :oe_git:`e1a604db8d2cf8782038b4016cc2e2052467333b </openembedded-core/commit/?id=e1a604db8d2cf8782038b4016cc2e2052467333b>`
+- Release Artefact: oecore-e1a604db8d2cf8782038b4016cc2e2052467333b
+- sha: 8b302eb3f3ffe5643f88bc6e4ae8f9a5cda63544d67e04637ecc4197e9750a1d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.12/oecore-e1a604db8d2cf8782038b4016cc2e2052467333b.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.12/oecore-e1a604db8d2cf8782038b4016cc2e2052467333b.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.12 </meta-mingw/log/?h=yocto-4.0.12>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.12/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.12/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.12 </meta-gplv2/log/?h=yocto-4.0.12>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.12/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.12/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.12 </bitbake/log/?h=yocto-4.0.12>`
+- Git Revision: :oe_git:`41b6684489d0261753344956042be2cc4adb0159 </bitbake/commit/?id=41b6684489d0261753344956042be2cc4adb0159>`
+- Release Artefact: bitbake-41b6684489d0261753344956042be2cc4adb0159
+- sha: efa2b1c4d0be115ed3960750d1e4ed958771b2db6d7baee2d13ad386589376e8
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.12/bitbake-41b6684489d0261753344956042be2cc4adb0159.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.12/bitbake-41b6684489d0261753344956042be2cc4adb0159.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.12 </yocto-docs/log/?h=yocto-4.0.12>`
+- Git Revision: :yocto_git:`4dfef81ac6164764c6541e39a9fef81d49227096 </yocto-docs/commit/?id=4dfef81ac6164764c6541e39a9fef81d49227096>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.13.rst b/documentation/migration-guides/release-notes-4.0.13.rst
new file mode 100644
index 0000000000..3c096c356f
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.13.rst
@@ -0,0 +1,271 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.13 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.13
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-2829`
+- binutils: Fix :cve:`2022-48065`
+- busybox: Fix :cve:`2022-48174`
+- cups: Fix :cve:`2023-32360`
+- curl: Fix :cve:`2023-32001`
+- dmidecode: Fix :cve:`2023-30630`
+- dropbear: Fix :cve:`2023-36328`
+- ffmpeg: Ignored :cve:`2023-39018`
+- file: Fix :cve:`2022-48554`
+- flac: Fix :cve:`2020-22219`
+- gcc: Fix :cve:`2023-4039`
+- gdb: Fix :cve:`2023-39128`
+- ghostscript: Fix :cve:`2023-38559`
+- glib-2.0: Fix :cve:`2023-29499`, :cve:`2023-32611`, :cve:`2023-32636`, :cve:`2023-32643` and :cve:`2023-32665`
+- go: Fix :cve:`2023-29409` and :cve:`2023-39319`
+- gstreamer1.0-plugins-bad: Fix :cve_mitre:`2023-37329`
+- gstreamer1.0-plugins-base: Fix :cve_mitre:`2023-37328`
+- gstreamer1.0-plugins-good: Fix :cve_mitre:`2023-37327`
+- inetutils: Fix :cve:`2023-40303`
+- json-c: Fix :cve:`2021-32292`
+- librsvg: Fix :cve:`2023-38633`
+- libssh2: Fix :cve:`2020-22218`
+- libtiff: Fix :cve:`2023-26966`
+- libxml2: Fix :cve:`2023-39615`
+- linux-yocto/5.15: Ignore :cve:`2003-1604`, :cve:`2004-0230`, :cve:`2006-3635`, :cve:`2006-5331`, :cve:`2006-6128`, :cve:`2007-4774`, :cve:`2007-6761`, :cve:`2007-6762`, :cve:`2008-7316`, :cve:`2009-2692`, :cve:`2010-0008`, :cve:`2010-3432`, :cve:`2010-4648`, :cve:`2010-5313`, :cve:`2010-5328`, :cve:`2010-5329`, :cve:`2010-5331`, :cve:`2010-5332`, :cve:`2011-4098`, :cve:`2011-4131`, :cve:`2011-4915`, :cve:`2011-5321`, :cve:`2011-5327`, :cve:`2012-0957`, :cve:`2012-2119`, :cve:`2012-2136`, :cve:`2012-2137`, :cve:`2012-2313`, :cve:`2012-2319`, :cve:`2012-2372`, :cve:`2012-2375`, :cve:`2012-2390`, :cve:`2012-2669`, :cve:`2012-2744`, :cve:`2012-2745`, :cve:`2012-3364`, :cve:`2012-3375`, :cve:`2012-3400`, :cve:`2012-3412`, :cve:`2012-3430`, :cve:`2012-3510`, :cve:`2012-3511`, :cve:`2012-3520`, :cve:`2012-3552`, :cve:`2012-4398`, :cve:`2012-4444`, :cve:`2012-4461`, :cve:`2012-4467`, :cve:`2012-4508`, :cve:`2012-4530`, :cve:`2012-4565`, :cve:`2012-5374`, :cve:`2012-5375`, :cve:`2012-5517`, :cve:`2012-6536`, :cve:`2012-6537`, :cve:`2012-6538`, :cve:`2012-6539`, :cve:`2012-6540`, :cve:`2012-6541`, :cve:`2012-6542`, :cve:`2012-6543`, :cve:`2012-6544`, :cve:`2012-6545`, :cve:`2012-6546`, :cve:`2012-6547`, :cve:`2012-6548`, :cve:`2012-6549`, :cve:`2012-6638`, :cve:`2012-6647`, :cve:`2012-6657`, :cve:`2012-6689`, :cve:`2012-6701`, :cve:`2012-6703`, :cve:`2012-6704`, :cve:`2012-6712`, :cve:`2013-0160`, :cve:`2013-0190`, :cve:`2013-0216`, :cve:`2013-0217`, :cve:`2013-0228`, :cve:`2013-0231`, :cve:`2013-0268`, :cve:`2013-0290`, :cve:`2013-0309`, :cve:`2013-0310`, :cve:`2013-0311`, :cve:`2013-0313`, :cve:`2013-0343`, :cve:`2013-0349`, :cve:`2013-0871`, :cve:`2013-0913`, :cve:`2013-0914`, :cve:`2013-1059`, :cve:`2013-1763`, :cve:`2013-1767`, :cve:`2013-1772`, :cve:`2013-1773`, :cve:`2013-1774`, :cve:`2013-1792`, :cve:`2013-1796`, :cve:`2013-1797`, :cve:`2013-1798`, :cve:`2013-1819`, :cve:`2013-1826`, :cve:`2013-1827`, :cve:`2013-1828`, :cve:`2013-1848`, :cve:`2013-1858`, :cve:`2013-1860`, :cve:`2013-1928`, :cve:`2013-1929`, :cve:`2013-1943`, :cve:`2013-1956`, :cve:`2013-1957`, :cve:`2013-1958`, :cve:`2013-1959`, :cve:`2013-1979`, :cve:`2013-2015`, :cve:`2013-2017`, :cve:`2013-2058`, :cve:`2013-2094`, :cve:`2013-2128`, :cve:`2013-2140`, :cve:`2013-2141`, :cve:`2013-2146`, :cve:`2013-2147`, :cve:`2013-2148`, :cve:`2013-2164`, :cve:`2013-2206`, :cve:`2013-2232`, :cve:`2013-2234`, :cve:`2013-2237`, :cve:`2013-2546`, :cve:`2013-2547`, :cve:`2013-2548`, :cve:`2013-2596`, :cve:`2013-2634`, :cve:`2013-2635`, :cve:`2013-2636`, :cve:`2013-2850`, :cve:`2013-2851`, :cve:`2013-2852`, :cve:`2013-2888`, :cve:`2013-2889`, :cve:`2013-2890`, :cve:`2013-2891`, :cve:`2013-2892`, :cve:`2013-2893`, :cve:`2013-2894`, :cve:`2013-2895`, :cve:`2013-2896`, :cve:`2013-2897`, :cve:`2013-2898`, :cve:`2013-2899`, :cve:`2013-2929`, :cve:`2013-2930`, :cve:`2013-3076`, :cve:`2013-3222`, :cve:`2013-3223`, :cve:`2013-3224`, :cve:`2013-3225`, :cve:`2013-3226`, :cve:`2013-3227`, :cve:`2013-3228`, :cve:`2013-3229`, :cve:`2013-3230`, :cve:`2013-3231`, :cve:`2013-3232`, :cve:`2013-3233`, :cve:`2013-3234`, :cve:`2013-3235`, :cve:`2013-3236`, :cve:`2013-3237`, :cve:`2013-3301`, :cve:`2013-3302`, :cve:`2013-4125`, :cve:`2013-4127`, :cve:`2013-4129`, :cve:`2013-4162`, :cve:`2013-4163`, :cve:`2013-4205`, :cve:`2013-4220`, :cve:`2013-4247`, :cve:`2013-4254`, :cve:`2013-4270`, :cve:`2013-4299`, :cve:`2013-4300`, :cve:`2013-4312`, :cve:`2013-4343`, :cve:`2013-4345`, :cve:`2013-4348`, :cve:`2013-4350`, :cve:`2013-4387`, :cve:`2013-4470`, :cve:`2013-4483`, :cve:`2013-4511`, :cve:`2013-4512`, :cve:`2013-4513`, :cve:`2013-4514`, :cve:`2013-4515`, :cve:`2013-4516`, :cve:`2013-4563`, :cve:`2013-4579`, :cve:`2013-4587`, :cve:`2013-4588`, :cve:`2013-4591`, :cve:`2013-4592`, :cve:`2013-5634`, :cve:`2013-6282`, :cve:`2013-6367`, :cve:`2013-6368`, :cve:`2013-6376`, :cve:`2013-6378`, :cve:`2013-6380`, :cve:`2013-6381`, :cve:`2013-6382`, :cve:`2013-6383`, :cve:`2013-6431`, :cve:`2013-6432`, :cve:`2013-6885`, :cve:`2013-7026`, :cve:`2013-7027`, :cve:`2013-7263`, :cve:`2013-7264`, :cve:`2013-7265`, :cve:`2013-7266`, :cve:`2013-7267`, :cve:`2013-7268`, :cve:`2013-7269`, :cve:`2013-7270`, :cve:`2013-7271`, :cve:`2013-7281`, :cve:`2013-7339`, :cve:`2013-7348`, :cve:`2013-7421`, :cve:`2013-7446`, :cve:`2013-7470`, :cve:`2014-0038`, :cve:`2014-0049`, :cve:`2014-0055`, :cve:`2014-0069`, :cve:`2014-0077`, :cve:`2014-0100`, :cve:`2014-0101`, :cve:`2014-0102`, :cve:`2014-0131`, :cve:`2014-0155`, :cve:`2014-0181`, :cve:`2014-0196`, :cve:`2014-0203`, :cve:`2014-0205`, :cve:`2014-0206`, :cve:`2014-1438`, :cve:`2014-1444`, :cve:`2014-1445`, :cve:`2014-1446`, :cve:`2014-1690`, :cve:`2014-1737`, :cve:`2014-1738`, :cve:`2014-1739`, :cve:`2014-1874`, :cve:`2014-2038`, :cve:`2014-2039`, :cve:`2014-2309`, :cve:`2014-2523`, :cve:`2014-2568`, :cve:`2014-2580`, :cve:`2014-2672`, :cve:`2014-2673`, :cve:`2014-2678`, :cve:`2014-2706`, :cve:`2014-2739`, :cve:`2014-2851`, :cve:`2014-2889`, :cve:`2014-3122`, :cve:`2014-3144`, :cve:`2014-3145`, :cve:`2014-3153`, :cve:`2014-3180`, :cve:`2014-3181`, :cve:`2014-3182`, :cve:`2014-3183`, :cve:`2014-3184`, :cve:`2014-3185`, :cve:`2014-3186`, :cve:`2014-3534`, :cve:`2014-3535`, :cve:`2014-3601`, :cve:`2014-3610`, :cve:`2014-3611`, :cve:`2014-3631`, :cve:`2014-3645`, :cve:`2014-3646`, :cve:`2014-3647`, :cve:`2014-3673`, :cve:`2014-3687`, :cve:`2014-3688`, :cve:`2014-3690`, :cve:`2014-3917`, :cve:`2014-3940`, :cve:`2014-4014`, :cve:`2014-4027`, :cve:`2014-4157`, :cve:`2014-4171`, :cve:`2014-4508`, :cve:`2014-4608`, :cve:`2014-4611`, :cve:`2014-4652`, :cve:`2014-4653`, :cve:`2014-4654`, :cve:`2014-4655`, :cve:`2014-4656`, :cve:`2014-4667`, :cve:`2014-4699`, :cve:`2014-4943`, :cve:`2014-5045`, :cve:`2014-5077`, :cve:`2014-5206`, :cve:`2014-5207`, :cve:`2014-5471`, :cve:`2014-5472`, :cve:`2014-6410`, :cve:`2014-6416`, :cve:`2014-6417`, :cve:`2014-6418`, :cve:`2014-7145`, :cve:`2014-7283`, :cve:`2014-7284`, :cve:`2014-7822`, :cve:`2014-7825`, :cve:`2014-7826`, :cve:`2014-7841`, :cve:`2014-7842`, :cve:`2014-7843`, :cve:`2014-7970`, :cve:`2014-7975`, :cve:`2014-8086`, :cve:`2014-8133`, :cve:`2014-8134`, :cve:`2014-8159`, :cve:`2014-8160`, :cve:`2014-8171`, :cve:`2014-8172`, :cve:`2014-8173`, :cve:`2014-8369`, :cve:`2014-8480`, :cve:`2014-8481`, :cve:`2014-8559`, :cve:`2014-8709`, :cve:`2014-8884`, :cve:`2014-8989`, :cve:`2014-9090`, :cve:`2014-9322`, :cve:`2014-9419`, :cve:`2014-9420`, :cve:`2014-9428`, :cve:`2014-9529`, :cve:`2014-9584`, :cve:`2014-9585`, :cve:`2014-9644`, :cve:`2014-9683`, :cve:`2014-9710`, :cve:`2014-9715`, :cve:`2014-9717`, :cve:`2014-9728`, :cve:`2014-9729`, :cve:`2014-9730`, :cve:`2014-9731`, :cve:`2014-9803`, :cve:`2014-9870`, :cve:`2014-9888`, :cve:`2014-9895`, :cve:`2014-9903`, :cve:`2014-9904`, :cve:`2014-9914`, :cve:`2014-9922`, :cve:`2014-9940`, :cve:`2015-0239`, :cve:`2015-0274`, :cve:`2015-0275`, :cve:`2015-1333`, :cve:`2015-1339`, :cve:`2015-1350`, :cve:`2015-1420`, :cve:`2015-1421`, :cve:`2015-1465`, :cve:`2015-1573`, :cve:`2015-1593`, :cve:`2015-1805`, :cve:`2015-2041`, :cve:`2015-2042`, :cve:`2015-2150`, :cve:`2015-2666`, :cve:`2015-2672`, :cve:`2015-2686`, :cve:`2015-2830`, :cve:`2015-2922`, :cve:`2015-2925`, :cve:`2015-3212`, :cve:`2015-3214`, :cve:`2015-3288`, :cve:`2015-3290`, :cve:`2015-3291`, :cve:`2015-3331`, :cve:`2015-3339`, :cve:`2015-3636`, :cve:`2015-4001`, :cve:`2015-4002`, :cve:`2015-4003`, :cve:`2015-4004`, :cve:`2015-4036`, :cve:`2015-4167`, :cve:`2015-4170`, :cve:`2015-4176`, :cve:`2015-4177`, :cve:`2015-4178`, :cve:`2015-4692`, :cve:`2015-4700`, :cve:`2015-5156`, :cve:`2015-5157`, :cve:`2015-5257`, :cve:`2015-5283`, :cve:`2015-5307`, :cve:`2015-5327`, :cve:`2015-5364`, :cve:`2015-5366`, :cve:`2015-5697`, :cve:`2015-5706`, :cve:`2015-5707`, :cve:`2015-6252`, :cve:`2015-6526`, :cve:`2015-6937`, :cve:`2015-7509`, :cve:`2015-7513`, :cve:`2015-7515`, :cve:`2015-7550`, :cve:`2015-7566`, :cve:`2015-7613`, :cve:`2015-7799`, :cve:`2015-7833`, :cve:`2015-7872`, :cve:`2015-7884`, :cve:`2015-7885`, :cve:`2015-7990`, :cve:`2015-8104`, :cve:`2015-8215`, :cve:`2015-8324`, :cve:`2015-8374`, :cve:`2015-8539`, :cve:`2015-8543`, :cve:`2015-8550`, :cve:`2015-8551`, :cve:`2015-8552`, :cve:`2015-8553`, :cve:`2015-8569`, :cve:`2015-8575`, :cve:`2015-8660`, :cve:`2015-8709`, :cve:`2015-8746`, :cve:`2015-8767`, :cve:`2015-8785`, :cve:`2015-8787`, :cve:`2015-8812`, :cve:`2015-8816`, :cve:`2015-8830`, :cve:`2015-8839`, :cve:`2015-8844`, :cve:`2015-8845`, :cve:`2015-8950`, :cve:`2015-8952`, :cve:`2015-8953`, :cve:`2015-8955`, :cve:`2015-8956`, :cve:`2015-8961`, :cve:`2015-8962`, :cve:`2015-8963`, :cve:`2015-8964`, :cve:`2015-8966`, :cve:`2015-8967`, :cve:`2015-8970`, :cve:`2015-9004`, :cve:`2015-9016`, :cve:`2015-9289`, :cve:`2016-0617`, :cve:`2016-0723`, :cve:`2016-0728`, :cve:`2016-0758`, :cve:`2016-0821`, :cve:`2016-0823`, :cve:`2016-10044`, :cve:`2016-10088`, :cve:`2016-10147`, :cve:`2016-10150`, :cve:`2016-10153`, :cve:`2016-10154`, :cve:`2016-10200`, :cve:`2016-10208`, :cve:`2016-10229`, :cve:`2016-10318`, :cve:`2016-10723`, :cve:`2016-10741`, :cve:`2016-10764`, :cve:`2016-10905`, :cve:`2016-10906`, :cve:`2016-10907`, :cve:`2016-1237`, :cve:`2016-1575`, :cve:`2016-1576`, :cve:`2016-1583`, :cve:`2016-2053`, :cve:`2016-2069`, :cve:`2016-2070`, :cve:`2016-2085`, :cve:`2016-2117`, :cve:`2016-2143`, :cve:`2016-2184`, :cve:`2016-2185`, :cve:`2016-2186`, :cve:`2016-2187`, :cve:`2016-2188`, :cve:`2016-2383`, :cve:`2016-2384`, :cve:`2016-2543`, :cve:`2016-2544`, :cve:`2016-2545`, :cve:`2016-2546`, :cve:`2016-2547`, :cve:`2016-2548`, :cve:`2016-2549`, :cve:`2016-2550`, :cve:`2016-2782`, :cve:`2016-2847`, :cve:`2016-3044`, :cve:`2016-3070`, :cve:`2016-3134`, :cve:`2016-3135`, :cve:`2016-3136`, :cve:`2016-3137`, :cve:`2016-3138`, :cve:`2016-3139`, :cve:`2016-3140`, :cve:`2016-3156`, :cve:`2016-3157`, :cve:`2016-3672`, :cve:`2016-3689`, :cve:`2016-3713`, :cve:`2016-3841`, :cve:`2016-3857`, :cve:`2016-3951`, :cve:`2016-3955`, :cve:`2016-3961`, :cve:`2016-4440`, :cve:`2016-4470`, :cve:`2016-4482`, :cve:`2016-4485`, :cve:`2016-4486`, :cve:`2016-4557`, :cve:`2016-4558`, :cve:`2016-4565`, :cve:`2016-4568`, :cve:`2016-4569`, :cve:`2016-4578`, :cve:`2016-4580`, :cve:`2016-4581`, :cve:`2016-4794`, :cve:`2016-4805`, :cve:`2016-4913`, :cve:`2016-4951`, :cve:`2016-4997`, :cve:`2016-4998`, :cve:`2016-5195`, :cve:`2016-5243`, :cve:`2016-5244`, :cve:`2016-5400`, :cve:`2016-5412`, :cve:`2016-5696`, :cve:`2016-5728`, :cve:`2016-5828`, :cve:`2016-5829`, :cve:`2016-6130`, :cve:`2016-6136`, :cve:`2016-6156`, :cve:`2016-6162`, :cve:`2016-6187`, :cve:`2016-6197`, :cve:`2016-6198`, :cve:`2016-6213`, :cve:`2016-6327`, :cve:`2016-6480`, :cve:`2016-6516`, :cve:`2016-6786`, :cve:`2016-6787`, :cve:`2016-6828`, :cve:`2016-7039`, :cve:`2016-7042`, :cve:`2016-7097`, :cve:`2016-7117`, :cve:`2016-7425`, :cve:`2016-7910`, :cve:`2016-7911`, :cve:`2016-7912`, :cve:`2016-7913`, :cve:`2016-7914`, :cve:`2016-7915`, :cve:`2016-7916`, :cve:`2016-7917`, :cve:`2016-8399`, :cve:`2016-8405`, :cve:`2016-8630`, :cve:`2016-8632`, :cve:`2016-8633`, :cve:`2016-8636`, :cve:`2016-8645`, :cve:`2016-8646`, :cve:`2016-8650`, :cve:`2016-8655`, :cve:`2016-8658`, :cve:`2016-8666`, :cve:`2016-9083`, :cve:`2016-9084`, :cve:`2016-9120`, :cve:`2016-9178`, :cve:`2016-9191`, :cve:`2016-9313`, :cve:`2016-9555`, :cve:`2016-9576`, :cve:`2016-9588`, :cve:`2016-9604`, :cve:`2016-9685`, :cve:`2016-9754`, :cve:`2016-9755`, :cve:`2016-9756`, :cve:`2016-9777`, :cve:`2016-9793`, :cve:`2016-9794`, :cve:`2016-9806`, :cve:`2016-9919`, :cve:`2017-0605`, :cve:`2017-0627`, :cve:`2017-0750`, :cve:`2017-0786`, :cve:`2017-0861`, :cve:`2017-1000`, :cve:`2017-1000111`, :cve:`2017-1000112`, :cve:`2017-1000251`, :cve:`2017-1000252`, :cve:`2017-1000253`, :cve:`2017-1000255`, :cve:`2017-1000363`, :cve:`2017-1000364`, :cve:`2017-1000365`, :cve:`2017-1000370`, :cve:`2017-1000371`, :cve:`2017-1000379`, :cve:`2017-1000380`, :cve:`2017-1000405`, :cve:`2017-1000407`, :cve:`2017-1000410`, :cve:`2017-10661`, :cve:`2017-10662`, :cve:`2017-10663`, :cve:`2017-10810`, :cve:`2017-10911`, :cve:`2017-11089`, :cve:`2017-11176`, :cve:`2017-11472`, :cve:`2017-11473`, :cve:`2017-11600`, :cve:`2017-12134`, :cve:`2017-12146`, :cve:`2017-12153`, :cve:`2017-12154`, :cve:`2017-12168`, :cve:`2017-12188`, :cve:`2017-12190`, :cve:`2017-12192`, :cve:`2017-12193`, :cve:`2017-12762`, :cve:`2017-13080`, :cve:`2017-13166`, :cve:`2017-13167`, :cve:`2017-13168`, :cve:`2017-13215`, :cve:`2017-13216`, :cve:`2017-13220`, :cve:`2017-13305`, :cve:`2017-13686`, :cve:`2017-13695`, :cve:`2017-13715`, :cve:`2017-14051`, :cve:`2017-14106`, :cve:`2017-14140`, :cve:`2017-14156`, :cve:`2017-14340`, :cve:`2017-14489`, :cve:`2017-14497`, :cve:`2017-14954`, :cve:`2017-14991`, :cve:`2017-15102`, :cve:`2017-15115`, :cve:`2017-15116`, :cve:`2017-15121`, :cve:`2017-15126`, :cve:`2017-15127`, :cve:`2017-15128`, :cve:`2017-15129`, :cve:`2017-15265`, :cve:`2017-15274`, :cve:`2017-15299`, :cve:`2017-15306`, :cve:`2017-15537`, :cve:`2017-15649`, :cve:`2017-15868`, :cve:`2017-15951`, :cve:`2017-16525`, :cve:`2017-16526`, :cve:`2017-16527`, :cve:`2017-16528`, :cve:`2017-16529`, :cve:`2017-16530`, :cve:`2017-16531`, :cve:`2017-16532`, :cve:`2017-16533`, :cve:`2017-16534`, :cve:`2017-16535`, :cve:`2017-16536`, :cve:`2017-16537`, :cve:`2017-16538`, :cve:`2017-16643`, :cve:`2017-16644`, :cve:`2017-16645`, :cve:`2017-16646`, :cve:`2017-16647`, :cve:`2017-16648`, :cve:`2017-16649`, :cve:`2017-16650`, :cve:`2017-16911`, :cve:`2017-16912`, :cve:`2017-16913`, :cve:`2017-16914`, :cve:`2017-16939`, :cve:`2017-16994`, :cve:`2017-16995`, :cve:`2017-16996`, :cve:`2017-17052`, :cve:`2017-17053`, :cve:`2017-17448`, :cve:`2017-17449`, :cve:`2017-17450`, :cve:`2017-17558`, :cve:`2017-17712`, :cve:`2017-17741`, :cve:`2017-17805`, :cve:`2017-17806`, :cve:`2017-17807`, :cve:`2017-17852`, :cve:`2017-17853`, :cve:`2017-17854`, :cve:`2017-17855`, :cve:`2017-17856`, :cve:`2017-17857`, :cve:`2017-17862`, :cve:`2017-17863`, :cve:`2017-17864`, :cve:`2017-17975`, :cve:`2017-18017`, :cve:`2017-18075`, :cve:`2017-18079`, :cve:`2017-18174`, :cve:`2017-18193`, :cve:`2017-18200`, :cve:`2017-18202`, :cve:`2017-18203`, :cve:`2017-18204`, :cve:`2017-18208`, :cve:`2017-18216`, :cve:`2017-18218`, :cve:`2017-18221`, :cve:`2017-18222`, :cve:`2017-18224`, :cve:`2017-18232`, :cve:`2017-18241`, :cve:`2017-18249`, :cve:`2017-18255`, :cve:`2017-18257`, :cve:`2017-18261`, :cve:`2017-18270`, :cve:`2017-18344`, :cve:`2017-18360`, :cve:`2017-18379`, :cve:`2017-18509`, :cve:`2017-18549`, :cve:`2017-18550`, :cve:`2017-18551`, :cve:`2017-18552`, :cve:`2017-18595`, :cve:`2017-2583`, :cve:`2017-2584`, :cve:`2017-2596`, :cve:`2017-2618`, :cve:`2017-2634`, :cve:`2017-2636`, :cve:`2017-2647`, :cve:`2017-2671`, :cve:`2017-5123`, :cve:`2017-5546`, :cve:`2017-5547`, :cve:`2017-5548`, :cve:`2017-5549`, :cve:`2017-5550`, :cve:`2017-5551`, :cve:`2017-5576`, :cve:`2017-5577`, :cve:`2017-5669`, :cve:`2017-5715`, :cve:`2017-5753`, :cve:`2017-5754`, :cve:`2017-5897`, :cve:`2017-5967`, :cve:`2017-5970`, :cve:`2017-5972`, :cve:`2017-5986`, :cve:`2017-6001`, :cve:`2017-6074`, :cve:`2017-6214`, :cve:`2017-6345`, :cve:`2017-6346`, :cve:`2017-6347`, :cve:`2017-6348`, :cve:`2017-6353`, :cve:`2017-6874`, :cve:`2017-6951`, :cve:`2017-7184`, :cve:`2017-7187`, :cve:`2017-7261`, :cve:`2017-7273`, :cve:`2017-7277`, :cve:`2017-7294`, :cve:`2017-7308`, :cve:`2017-7346`, :cve:`2017-7374`, :cve:`2017-7472`, :cve:`2017-7477`, :cve:`2017-7482`, :cve:`2017-7487`, :cve:`2017-7495`, :cve:`2017-7518`, :cve:`2017-7533`, :cve:`2017-7541`, :cve:`2017-7542`, :cve:`2017-7558`, :cve:`2017-7616`, :cve:`2017-7618`, :cve:`2017-7645`, :cve:`2017-7889`, :cve:`2017-7895`, :cve:`2017-7979`, :cve:`2017-8061`, :cve:`2017-8062`, :cve:`2017-8063`, :cve:`2017-8064`, :cve:`2017-8065`, :cve:`2017-8066`, :cve:`2017-8067`, :cve:`2017-8068`, :cve:`2017-8069`, :cve:`2017-8070`, :cve:`2017-8071`, :cve:`2017-8072`, :cve:`2017-8106`, :cve:`2017-8240`, :cve:`2017-8797`, :cve:`2017-8824`, :cve:`2017-8831`, :cve:`2017-8890`, :cve:`2017-8924`, :cve:`2017-8925`, :cve:`2017-9059`, :cve:`2017-9074`, :cve:`2017-9075`, :cve:`2017-9076`, :cve:`2017-9077`, :cve:`2017-9150`, :cve:`2017-9211`, :cve:`2017-9242`, :cve:`2017-9605`, :cve:`2017-9725`, :cve:`2017-9984`, :cve:`2017-9985`, :cve:`2017-9986`, :cve:`2018-1000004`, :cve:`2018-1000026`, :cve:`2018-1000028`, :cve:`2018-1000199`, :cve:`2018-1000200`, :cve:`2018-1000204`, :cve:`2018-10021`, :cve:`2018-10074`, :cve:`2018-10087`, :cve:`2018-10124`, :cve:`2018-10322`, :cve:`2018-10323`, :cve:`2018-1065`, :cve:`2018-1066`, :cve:`2018-10675`, :cve:`2018-1068`, :cve:`2018-10840`, :cve:`2018-10853`, :cve:`2018-1087`, :cve:`2018-10876`, :cve:`2018-10877`, :cve:`2018-10878`, :cve:`2018-10879`, :cve:`2018-10880`, :cve:`2018-10881`, :cve:`2018-10882`, :cve:`2018-10883`, :cve:`2018-10901`, :cve:`2018-10902`, :cve:`2018-1091`, :cve:`2018-1092`, :cve:`2018-1093`, :cve:`2018-10938`, :cve:`2018-1094`, :cve:`2018-10940`, :cve:`2018-1095`, :cve:`2018-1108`, :cve:`2018-1118`, :cve:`2018-1120`, :cve:`2018-11232`, :cve:`2018-1128`, :cve:`2018-1129`, :cve:`2018-1130`, :cve:`2018-11412`, :cve:`2018-11506`, :cve:`2018-11508`, :cve:`2018-12126`, :cve:`2018-12127`, :cve:`2018-12130`, :cve:`2018-12207`, :cve:`2018-12232`, :cve:`2018-12233`, :cve:`2018-12633`, :cve:`2018-12714`, :cve:`2018-12896`, :cve:`2018-12904`, :cve:`2018-13053`, :cve:`2018-13093`, :cve:`2018-13094`, :cve:`2018-13095`, :cve:`2018-13096`, :cve:`2018-13097`, :cve:`2018-13098`, :cve:`2018-13099`, :cve:`2018-13100`, :cve:`2018-13405`, :cve:`2018-13406`, :cve:`2018-14609`, :cve:`2018-14610`, :cve:`2018-14611`, :cve:`2018-14612`, :cve:`2018-14613`, :cve:`2018-14614`, :cve:`2018-14615`, :cve:`2018-14616`, :cve:`2018-14617`, :cve:`2018-14619`, :cve:`2018-14625`, :cve:`2018-14633`, :cve:`2018-14634`, :cve:`2018-14641`, :cve:`2018-14646`, :cve:`2018-14656`, :cve:`2018-14678`, :cve:`2018-14734`, :cve:`2018-15471`, :cve:`2018-15572`, :cve:`2018-15594`, :cve:`2018-16276`, :cve:`2018-16597`, :cve:`2018-16658`, :cve:`2018-16862`, :cve:`2018-16871`, :cve:`2018-16880`, :cve:`2018-16882`, :cve:`2018-16884`, :cve:`2018-17182`, :cve:`2018-17972`, :cve:`2018-18021`, :cve:`2018-18281`, :cve:`2018-18386`, :cve:`2018-18397`, :cve:`2018-18445`, :cve:`2018-18559`, :cve:`2018-18690`, :cve:`2018-18710`, :cve:`2018-18955`, :cve:`2018-19406`, :cve:`2018-19407`, :cve:`2018-19824`, :cve:`2018-19854`, :cve:`2018-19985`, :cve:`2018-20169`, :cve:`2018-20449`, :cve:`2018-20509`, :cve:`2018-20510`, :cve:`2018-20511`, :cve:`2018-20669`, :cve:`2018-20784`, :cve:`2018-20836`, :cve:`2018-20854`, :cve:`2018-20855`, :cve:`2018-20856`, :cve:`2018-20961`, :cve:`2018-20976`, :cve:`2018-21008`, :cve:`2018-25015`, :cve:`2018-25020`, :cve:`2018-3620`, :cve:`2018-3639`, :cve:`2018-3646`, :cve:`2018-3665`, :cve:`2018-3693`, :cve:`2018-5332`, :cve:`2018-5333`, :cve:`2018-5344`, :cve:`2018-5390`, :cve:`2018-5391`, :cve:`2018-5703`, :cve:`2018-5750`, :cve:`2018-5803`, :cve:`2018-5814`, :cve:`2018-5848`, :cve:`2018-5873`, :cve:`2018-5953`, :cve:`2018-5995`, :cve:`2018-6412`, :cve:`2018-6554`, :cve:`2018-6555`, :cve:`2018-6927`, :cve:`2018-7191`, :cve:`2018-7273`, :cve:`2018-7480`, :cve:`2018-7492`, :cve:`2018-7566`, :cve:`2018-7740`, :cve:`2018-7754`, :cve:`2018-7755`, :cve:`2018-7757`, :cve:`2018-7995`, :cve:`2018-8043`, :cve:`2018-8087`, :cve:`2018-8781`, :cve:`2018-8822`, :cve:`2018-8897`, :cve:`2018-9363`, :cve:`2018-9385`, :cve:`2018-9415`, :cve:`2018-9422`, :cve:`2018-9465`, :cve:`2018-9516`, :cve:`2018-9517`, :cve:`2018-9518`, :cve:`2018-9568`, :cve:`2019-0136`, :cve:`2019-0145`, :cve:`2019-0146`, :cve:`2019-0147`, :cve:`2019-0148`, :cve:`2019-0149`, :cve:`2019-0154`, :cve:`2019-0155`, :cve:`2019-10124`, :cve:`2019-10125`, :cve:`2019-10126`, :cve:`2019-10142`, :cve:`2019-10207`, :cve:`2019-10220`, :cve:`2019-10638`, :cve:`2019-10639`, :cve:`2019-11085`, :cve:`2019-11091`, :cve:`2019-11135`, :cve:`2019-11190`, :cve:`2019-11191`, :cve:`2019-1125`, :cve:`2019-11477`, :cve:`2019-11478`, :cve:`2019-11479`, :cve:`2019-11486`, :cve:`2019-11487`, :cve:`2019-11599`, :cve:`2019-11683`, :cve:`2019-11810`, :cve:`2019-11811`, :cve:`2019-11815`, :cve:`2019-11833`, :cve:`2019-11884`, :cve:`2019-12378`, :cve:`2019-12379`, :cve:`2019-12380`, :cve:`2019-12381`, :cve:`2019-12382`, :cve:`2019-12454`, :cve:`2019-12455`, :cve:`2019-12614`, :cve:`2019-12615`, :cve:`2019-12817`, :cve:`2019-12818`, :cve:`2019-12819`, :cve:`2019-12881`, :cve:`2019-12984`, :cve:`2019-13233`, :cve:`2019-13272`, :cve:`2019-13631`, :cve:`2019-13648`, :cve:`2019-14283`, :cve:`2019-14284`, :cve:`2019-14615`, :cve:`2019-14763`, :cve:`2019-14814`, :cve:`2019-14815`, :cve:`2019-14816`, :cve:`2019-14821`, :cve:`2019-14835`, :cve:`2019-14895`, :cve:`2019-14896`, :cve:`2019-14897`, :cve:`2019-14901`, :cve:`2019-15030`, :cve:`2019-15031`, :cve:`2019-15090`, :cve:`2019-15098`, :cve:`2019-15099`, :cve:`2019-15117`, :cve:`2019-15118`, :cve:`2019-15211`, :cve:`2019-15212`, :cve:`2019-15213`, :cve:`2019-15214`, :cve:`2019-15215`, :cve:`2019-15216`, :cve:`2019-15217`, :cve:`2019-15218`, :cve:`2019-15219`, :cve:`2019-15220`, :cve:`2019-15221`, :cve:`2019-15222`, :cve:`2019-15223`, :cve:`2019-15291`, :cve:`2019-15292`, :cve:`2019-15504`, :cve:`2019-15505`, :cve:`2019-15538`, :cve:`2019-15666`, :cve:`2019-15794`, :cve:`2019-15807`, :cve:`2019-15916`, :cve:`2019-15917`, :cve:`2019-15918`, :cve:`2019-15919`, :cve:`2019-15920`, :cve:`2019-15921`, :cve:`2019-15922`, :cve:`2019-15923`, :cve:`2019-15924`, :cve:`2019-15925`, :cve:`2019-15926`, :cve:`2019-15927`, :cve:`2019-16229`, :cve:`2019-16230`, :cve:`2019-16231`, :cve:`2019-16232`, :cve:`2019-16233`, :cve:`2019-16234`, :cve:`2019-16413`, :cve:`2019-16714`, :cve:`2019-16746`, :cve:`2019-16921`, :cve:`2019-16994`, :cve:`2019-16995`, :cve:`2019-17052`, :cve:`2019-17053`, :cve:`2019-17054`, :cve:`2019-17055`, :cve:`2019-17056`, :cve:`2019-17075`, :cve:`2019-17133`, :cve:`2019-17351`, :cve:`2019-17666`, :cve:`2019-18198`, :cve:`2019-18282`, :cve:`2019-18660`, :cve:`2019-18675`, :cve:`2019-18683`, :cve:`2019-18786`, :cve:`2019-18805`, :cve:`2019-18806`, :cve:`2019-18807`, :cve:`2019-18808`, :cve:`2019-18809`, :cve:`2019-18810`, :cve:`2019-18811`, :cve:`2019-18812`, :cve:`2019-18813`, :cve:`2019-18814`, :cve:`2019-18885`, :cve:`2019-19036`, :cve:`2019-19037`, :cve:`2019-19039`, :cve:`2019-19043`, :cve:`2019-19044`, :cve:`2019-19045`, :cve:`2019-19046`, :cve:`2019-19047`, :cve:`2019-19048`, :cve:`2019-19049`, :cve:`2019-19050`, :cve:`2019-19051`, :cve:`2019-19052`, :cve:`2019-19053`, :cve:`2019-19054`, :cve:`2019-19055`, :cve:`2019-19056`, :cve:`2019-19057`, :cve:`2019-19058`, :cve:`2019-19059`, :cve:`2019-19060`, :cve:`2019-19061`, :cve:`2019-19062`, :cve:`2019-19063`, :cve:`2019-19064`, :cve:`2019-19065`, :cve:`2019-19066`, :cve:`2019-19067`, :cve:`2019-19068`, :cve:`2019-19069`, :cve:`2019-19070`, :cve:`2019-19071`, :cve:`2019-19072`, :cve:`2019-19073`, :cve:`2019-19074`, :cve:`2019-19075`, :cve:`2019-19076`, :cve:`2019-19077`, :cve:`2019-19078`, :cve:`2019-19079`, :cve:`2019-19080`, :cve:`2019-19081`, :cve:`2019-19082`, :cve:`2019-19083`, :cve:`2019-19227`, :cve:`2019-19241`, :cve:`2019-19252`, :cve:`2019-19318`, :cve:`2019-19319`, :cve:`2019-19332`, :cve:`2019-19338`, :cve:`2019-19377`, :cve:`2019-19447`, :cve:`2019-19448`, :cve:`2019-19449`, :cve:`2019-19462`, :cve:`2019-19523`, :cve:`2019-19524`, :cve:`2019-19525`, :cve:`2019-19526`, :cve:`2019-19527`, :cve:`2019-19528`, :cve:`2019-19529`, :cve:`2019-19530`, :cve:`2019-19531`, :cve:`2019-19532`, :cve:`2019-19533`, :cve:`2019-19534`, :cve:`2019-19535`, :cve:`2019-19536`, :cve:`2019-19537`, :cve:`2019-19543`, :cve:`2019-19602`, :cve:`2019-19767`, :cve:`2019-19768`, :cve:`2019-19769`, :cve:`2019-19770`, :cve:`2019-19807`, :cve:`2019-19813`, :cve:`2019-19815`, :cve:`2019-19816`, :cve:`2019-19922`, :cve:`2019-19927`, :cve:`2019-19947`, :cve:`2019-19965` and :cve:`2019-1999`
+- nasm: Fix :cve:`2020-21528`
+- ncurses: Fix :cve:`2023-29491`
+- nghttp2: Fix :cve:`2023-35945`
+- procps: Fix :cve:`2023-4016`
+- python3-certifi: Fix :cve:`2023-37920`
+- python3-git: Fix :cve:`2022-24439` and :cve:`2023-40267`
+- python3-pygments: Fix :cve:`2022-40896`
+- python3: Fix :cve:`2023-40217`
+- qemu: Fix :cve:`2020-14394`, :cve:`2021-3638`, :cve_mitre:`2023-2861`, :cve:`2023-3180` and :cve:`2023-3354`
+- tiff: fix :cve:`2023-2908`, :cve:`2023-3316` and :cve:`2023-3618`
+- vim: Fix :cve:`2023-3896`, :cve:`2023-4733`, :cve:`2023-4734`, :cve:`2023-4735`, :cve:`2023-4736`, :cve:`2023-4738`, :cve:`2023-4750` and :cve:`2023-4752`
+- webkitgtk: fix :cve:`2022-48503` and :cve:`2023-23529`
+
+
+
+Fixes in Yocto-4.0.13
+~~~~~~~~~~~~~~~~~~~~~
+
+- acl/attr: ptest fixes and improvements
+- automake: fix buildtest patch
+- bind: Upgrade to 9.18.17
+- binutils: stable 2.38 branch updates
+- build-appliance-image: Update to kirkstone head revision
+- build-sysroots: Add :term:`SUMMARY` field
+- cargo.bbclass: set up cargo environment in common do_compile
+- contributor-guide: recipe-style-guide: add Upstream-Status
+- dbus: Specify runstatedir configure option
+- dev-manual: common-tasks: mention faster "find" command to trim sstate cache
+- dev-manual: disk-space: improve wording for obsolete sstate cache files
+- dev-manual: licenses: mention :term:`SPDX` for license compliance
+- dev-manual: licenses: update license manifest location
+- dev-manual: new-recipe.rst fix inconsistency with contributor guide
+- dev-manual: split common-tasks.rst
+- dev-manual: wic.rst: Update native tools build command
+- documentation/README: align with master
+- efivar: backport 5 patches to fix build with gold
+- externalsrc: fix dependency chain issues
+- glibc-locale: use stricter matching for metapackages' runtime dependencies
+- glibc/check-test-wrapper: don't emit warnings from ssh
+- glibc: stable 2.35 branch updates
+- gst-devtools: Upgrade to 1.20.7
+- gstreamer1.0-libav: Upgrade to 1.20.7
+- gstreamer1.0-omx: Upgrade to 1.20.7
+- gstreamer1.0-plugins-bad: Upgrade to 1.20.7
+- gstreamer1.0-plugins-base: Upgrade to 1.20.7
+- gstreamer1.0-plugins-good: Upgrade to 1.20.7
+- gstreamer1.0-plugins-ugly: Upgrade to 1.20.7
+- gstreamer1.0-python: Upgrade to 1.20.7
+- gstreamer1.0-rtsp-server: Upgrade to 1.20.7
+- gstreamer1.0-vaapi: Upgrade to 1.20.7
+- gstreamer1.0: Upgrade to 1.20.7
+- kernel: Fix path comparison in kernel staging dir symlinking
+- lib/package_manager: Improve repo artefact filtering
+- libdnf: resolve cstdint inclusion for newer gcc versions
+- libnss-nis: Upgrade to 3.2
+- librsvg: Upgrade to 2.52.10
+- libxcrypt: update :term:`PV` to match :term:`SRCREV`
+- linux-firmware : Add firmware of RTL8822 serie
+- linux-firmware: Fix mediatek mt7601u firmware path
+- linux-firmware: package firmware for Dragonboard 410c
+- linux-firmware: split platform-specific Adreno shaders to separate packages
+- linux-firmware: Upgrade to 20230625
+- linux-yocto/5.10: update to v5.10.188
+- linux-yocto/5.15: update to v5.15.124
+- linux-yocto: add script to generate kernel :term:`CVE_CHECK_IGNORE` entries
+- linux/cve-exclusion: add generated CVE_CHECK_IGNORES.
+- linux/cve-exclusion: remove obsolete manual entries
+- manuals: add new contributor guide
+- manuals: document "mime-xdg" class and :term:`MIME_XDG_PACKAGES`
+- manuals: update former references to dev-manual/common-tasks
+- mdadm: add util-linux-blockdev ptest dependency
+- migration-guides: add release notes for 4.0.12
+- npm.bbclass: avoid DeprecationWarning with new python
+- oeqa/runtime/ltp: Increase ltp test output timeout
+- oeqa/ssh: Further improve process exit handling
+- oeqa/target/ssh: Ensure EAGAIN doesn't truncate output
+- oeqa/utils/nfs: allow requesting non-udp ports
+- pixman: Remove duplication of license MIT
+- poky.conf: bump version for 4.0.13
+- poky.conf: update :term:`SANITY_TESTED_DISTROS` to match autobuilder
+- pseudo: Fix to work with glibc 2.38
+- python3-git: Upgrade to 3.1.32
+- python3: upgrade to 3.10.13
+- ref-manual: add Initramfs term
+- ref-manual: add meson class and variables
+- ref-manual: add new variables
+- ref-manual: qa-checks: align with master
+- ref-manual: system-requirements: update supported distros
+- resulttool/report: Avoid divide by zero
+- resulttool/resultutils: allow index generation despite corrupt json
+- rootfs: Add debugfs package db file copy and cleanup
+- rpm2cpio.sh: update to the last 4.x version
+- rpm: Pick debugfs package db files/dirs explicitly
+- scripts/create-pull-request: update URLs to git repositories
+- scripts/rpm2cpio.sh: Use bzip2 instead of bunzip2
+- sdk-manual: extensible.rst: align with master branch
+- selftest/cases/glibc.py: fix the override syntax
+- selftest/cases/glibc.py: increase the memory for testing
+- selftest/cases/glibc.py: switch to using NFS over TCP
+- shadow-sysroot: add license information
+- sysklogd: fix integration with systemd-journald
+- tar: Upgrade to 1.35
+- target/ssh: Ensure exit code set for commands
+- tcl: prevent installing another copy of tzdata
+- template: fix typo in section header
+- vim: Upgrade to 9.0.1894
+- vim: update obsolete comment
+- wic: fix wrong attempt to create file system in unpartitioned regions
+- yocto-uninative: Update to 4.2 for glibc 2.38
+- yocto-uninative: Update to 4.3
+
+
+Known Issues in Yocto-4.0.13
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.13
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Abe Kohandel
+- Adrian Freihofer
+- Alberto Planas
+- Alex Kiernan
+- Alexander Kanavin
+- Alexis Lothoré
+- Anuj Mittal
+- Archana Polampalli
+- Ashish Sharma
+- BELOUARGA Mohamed
+- Bruce Ashfield
+- Changqing Li
+- Dmitry Baryshkov
+- Enrico Scholz
+- Etienne Cordonnier
+- Hitendra Prajapati
+- Julien Stephan
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Marek Vasut
+- Markus Niebel
+- Martin Jansa
+- Meenali Gupta
+- Michael Halstead
+- Michael Opdenacker
+- Narpat Mali
+- Ovidiu Panait
+- Pavel Zhukov
+- Peter Marko
+- Peter Suti
+- Poonam Jadhav
+- Richard Purdie
+- Roland Hieber
+- Ross Burton
+- Sanjana
+- Siddharth Doshi
+- Soumya Sambu
+- Staffan Rydén
+- Steve Sakoman
+- Trevor Gamblin
+- Vijay Anusuri
+- Vivek Kumbhar
+- Wang Mingyu
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.0.13
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.13 </poky/log/?h=yocto-4.0.13>`
+- Git Revision: :yocto_git:`e51bf557f596c4da38789a948a3228ba11455e3c </poky/commit/?id=e51bf557f596c4da38789a948a3228ba11455e3c>`
+- Release Artefact: poky-e51bf557f596c4da38789a948a3228ba11455e3c
+- sha: afddadb367a90154751f04993077bceffdc1413f9ba9b8c03acb487d0437286e
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.13/poky-e51bf557f596c4da38789a948a3228ba11455e3c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.13/poky-e51bf557f596c4da38789a948a3228ba11455e3c.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.13 </openembedded-core/log/?h=yocto-4.0.13>`
+- Git Revision: :oe_git:`d90e4d5e3cca9cffe8f60841afc63667a9ac39fa </openembedded-core/commit/?id=d90e4d5e3cca9cffe8f60841afc63667a9ac39fa>`
+- Release Artefact: oecore-d90e4d5e3cca9cffe8f60841afc63667a9ac39fa
+- sha: 56e3bdac81b3628e74dfef2132a54be4db7d87373139a00ed64f5c9a354d716a
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.13/oecore-d90e4d5e3cca9cffe8f60841afc63667a9ac39fa.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.13/oecore-d90e4d5e3cca9cffe8f60841afc63667a9ac39fa.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.13 </meta-mingw/log/?h=yocto-4.0.13>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.13/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.13/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.13 </meta-gplv2/log/?h=yocto-4.0.13>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.13/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.13/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.13 </bitbake/log/?h=yocto-4.0.13>`
+- Git Revision: :oe_git:`41b6684489d0261753344956042be2cc4adb0159 </bitbake/commit/?id=41b6684489d0261753344956042be2cc4adb0159>`
+- Release Artefact: bitbake-41b6684489d0261753344956042be2cc4adb0159
+- sha: efa2b1c4d0be115ed3960750d1e4ed958771b2db6d7baee2d13ad386589376e8
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.13/bitbake-41b6684489d0261753344956042be2cc4adb0159.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.13/bitbake-41b6684489d0261753344956042be2cc4adb0159.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.13 </yocto-docs/log/?h=yocto-4.0.13>`
+- Git Revision: :yocto_git:`8f02741de867125f11a37822b2d206be180d4ee3 </yocto-docs/commit/?id=8f02741de867125f11a37822b2d206be180d4ee3>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.14.rst b/documentation/migration-guides/release-notes-4.0.14.rst
new file mode 100644
index 0000000000..02253f33f7
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.14.rst
@@ -0,0 +1,227 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.14 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.14
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-3341` and :cve:`2023-4236`
+- binutils: Fix :cve:`2022-44840`, :cve:`2022-45703`, :cve:`2022-47008`, :cve:`2022-47011`, :cve:`2022-47673`, :cve:`2022-47695`, :cve:`2022-47696` and :cve:`2022-48063`
+- cups: Fix :cve:`2023-4504`
+- curl: Fix :cve:`2023-38545` and :cve:`2023-38546`
+- gawk: Fix :cve:`2023-4156`
+- ghostscript: Fix :cve:`2023-43115`
+- glibc: Fix :cve:`2023-4806`, :cve:`2023-4813`, :cve:`2023-4911` and :cve:`2023-5156`
+- glibc: Ignore :cve:`2023-4527`
+- go: Fix :cve:`2023-24538` and :cve:`2023-39318`
+- gstreamer1.0-plugins-bad: fix :cve_mitre:`2023-40474`, :cve_mitre:`2023-40475` and :cve_mitre:`2023-40476`
+- libtiff: Fix :cve:`2022-40090` and :cve:`2023-1916`
+- libwebp: Fix :cve:`2023-5129`
+- libx11: Fix :cve:`2023-43785`, :cve:`2023-43786` and :cve:`2023-43787`
+- libxml2: Fix :cve:`2023-45322`
+- libxpm: Fix :cve:`2023-43788` and :cve:`2023-43789`
+- linux-firmware: Fix :cve:`2022-40982`, :cve:`2023-20569` and :cve:`2023-20593`
+- linux-yocto: update CVE exclusions
+- linux-yocto/5.10: Ignore :cve:`2003-1604`, :cve:`2004-0230`, :cve:`2006-3635`, :cve:`2006-5331`, :cve:`2006-6128`, :cve:`2007-4774`, :cve:`2007-6761`, :cve:`2007-6762`, :cve:`2008-7316`, :cve:`2009-2692`, :cve:`2010-0008`, :cve:`2010-3432`, :cve:`2010-4648`, :cve:`2010-5313`, :cve:`2010-5328`, :cve:`2010-5329`, :cve:`2010-5331`, :cve:`2010-5332`, :cve:`2011-4098`, :cve:`2011-4131`, :cve:`2011-4915`, :cve:`2011-5321`, :cve:`2011-5327`, :cve:`2012-0957`, :cve:`2012-2119`, :cve:`2012-2136`, :cve:`2012-2137`, :cve:`2012-2313`, :cve:`2012-2319`, :cve:`2012-2372`, :cve:`2012-2375`, :cve:`2012-2390`, :cve:`2012-2669`, :cve:`2012-2744`, :cve:`2012-2745`, :cve:`2012-3364`, :cve:`2012-3375`, :cve:`2012-3400`, :cve:`2012-3412`, :cve:`2012-3430`, :cve:`2012-3510`, :cve:`2012-3511`, :cve:`2012-3520`, :cve:`2012-3552`, :cve:`2012-4398`, :cve:`2012-4444`, :cve:`2012-4461`, :cve:`2012-4467`, :cve:`2012-4508`, :cve:`2012-4530`, :cve:`2012-4565`, :cve:`2012-5374`, :cve:`2012-5375`, :cve:`2012-5517`, :cve:`2012-6536`, :cve:`2012-6537`, :cve:`2012-6538`, :cve:`2012-6539`, :cve:`2012-6540`, :cve:`2012-6541`, :cve:`2012-6542`, :cve:`2012-6543`, :cve:`2012-6544`, :cve:`2012-6545`, :cve:`2012-6546`, :cve:`2012-6547`, :cve:`2012-6548`, :cve:`2012-6549`, :cve:`2012-6638`, :cve:`2012-6647`, :cve:`2012-6657`, :cve:`2012-6689`, :cve:`2012-6701`, :cve:`2012-6703`, :cve:`2012-6704`, :cve:`2012-6712`, :cve:`2013-0160`, :cve:`2013-0190`, :cve:`2013-0216`, :cve:`2013-0217`, :cve:`2013-0228`, :cve:`2013-0231`, :cve:`2013-0268`, :cve:`2013-0290`, :cve:`2013-0309`, :cve:`2013-0310`, :cve:`2013-0311`, :cve:`2013-0313`, :cve:`2013-0343`, :cve:`2013-0349`, :cve:`2013-0871`, :cve:`2013-0913`, :cve:`2013-0914`, :cve:`2013-1059`, :cve:`2013-1763`, :cve:`2013-1767`, :cve:`2013-1772`, :cve:`2013-1773`, :cve:`2013-1774`, :cve:`2013-1792`, :cve:`2013-1796`, :cve:`2013-1797`, :cve:`2013-1798`, :cve:`2013-1819`, :cve:`2013-1826`, :cve:`2013-1827`, :cve:`2013-1828`, :cve:`2013-1848`, :cve:`2013-1858`, :cve:`2013-1860`, :cve:`2013-1928`, :cve:`2013-1929`, :cve:`2013-1943`, :cve:`2013-1956`, :cve:`2013-1957`, :cve:`2013-1958`, :cve:`2013-1959`, :cve:`2013-1979`, :cve:`2013-2015`, :cve:`2013-2017`, :cve:`2013-2058`, :cve:`2013-2094`, :cve:`2013-2128`, :cve:`2013-2140`, :cve:`2013-2141`, :cve:`2013-2146`, :cve:`2013-2147`, :cve:`2013-2148`, :cve:`2013-2164`, :cve:`2013-2206`, :cve:`2013-2232`, :cve:`2013-2234`, :cve:`2013-2237`, :cve:`2013-2546`, :cve:`2013-2547`, :cve:`2013-2548`, :cve:`2013-2596`, :cve:`2013-2634`, :cve:`2013-2635`, :cve:`2013-2636`, :cve:`2013-2850`, :cve:`2013-2851`, :cve:`2013-2852`, :cve:`2013-2888`, :cve:`2013-2889`, :cve:`2013-2890`, :cve:`2013-2891`, :cve:`2013-2892`, :cve:`2013-2893`, :cve:`2013-2894`, :cve:`2013-2895`, :cve:`2013-2896`, :cve:`2013-2897`, :cve:`2013-2898`, :cve:`2013-2899`, :cve:`2013-2929`, :cve:`2013-2930`, :cve:`2013-3076`, :cve:`2013-3222`, :cve:`2013-3223`, :cve:`2013-3224`, :cve:`2013-3225`, :cve:`2013-3226`, :cve:`2013-3227`, :cve:`2013-3228`, :cve:`2013-3229`, :cve:`2013-3230`, :cve:`2013-3231`, :cve:`2013-3232`, :cve:`2013-3233`, :cve:`2013-3234`, :cve:`2013-3235`, :cve:`2013-3236`, :cve:`2013-3237`, :cve:`2013-3301`, :cve:`2013-3302`, :cve:`2013-4125`, :cve:`2013-4127`, :cve:`2013-4129`, :cve:`2013-4162`, :cve:`2013-4163`, :cve:`2013-4205`, :cve:`2013-4220`, :cve:`2013-4247`, :cve:`2013-4254`, :cve:`2013-4270`, :cve:`2013-4299`, :cve:`2013-4300`, :cve:`2013-4312`, :cve:`2013-4343`, :cve:`2013-4345`, :cve:`2013-4348`, :cve:`2013-4350`, :cve:`2013-4387`, :cve:`2013-4470`, :cve:`2013-4483`, :cve:`2013-4511`, :cve:`2013-4512`, :cve:`2013-4513`, :cve:`2013-4514`, :cve:`2013-4515`, :cve:`2013-4516`, :cve:`2013-4563`, :cve:`2013-4579`, :cve:`2013-4587`, :cve:`2013-4588`, :cve:`2013-4591`, :cve:`2013-4592`, :cve:`2013-5634`, :cve:`2013-6282`, :cve:`2013-6367`, :cve:`2013-6368`, :cve:`2013-6376`, :cve:`2013-6378`, :cve:`2013-6380`, :cve:`2013-6381`, :cve:`2013-6382`, :cve:`2013-6383`, :cve:`2013-6431`, :cve:`2013-6432`, :cve:`2013-6885`, :cve:`2013-7026`, :cve:`2013-7027`, :cve:`2013-7263`, :cve:`2013-7264`, :cve:`2013-7265`, :cve:`2013-7266`, :cve:`2013-7267`, :cve:`2013-7268`, :cve:`2013-7269`, :cve:`2013-7270`, :cve:`2013-7271`, :cve:`2013-7281`, :cve:`2013-7339`, :cve:`2013-7348`, :cve:`2013-7421`, :cve:`2013-7446`, :cve:`2013-7470`, :cve:`2014-0038`, :cve:`2014-0049`, :cve:`2014-0055`, :cve:`2014-0069`, :cve:`2014-0077`, :cve:`2014-0100`, :cve:`2014-0101`, :cve:`2014-0102`, :cve:`2014-0131`, :cve:`2014-0155`, :cve:`2014-0181`, :cve:`2014-0196`, :cve:`2014-0203`, :cve:`2014-0205`, :cve:`2014-0206`, :cve:`2014-1438`, :cve:`2014-1444`, :cve:`2014-1445`, :cve:`2014-1446`, :cve:`2014-1690`, :cve:`2014-1737`, :cve:`2014-1738`, :cve:`2014-1739`, :cve:`2014-1874`, :cve:`2014-2038`, :cve:`2014-2039`, :cve:`2014-2309`, :cve:`2014-2523`, :cve:`2014-2568`, :cve:`2014-2580`, :cve:`2014-2672`, :cve:`2014-2673`, :cve:`2014-2678`, :cve:`2014-2706`, :cve:`2014-2739`, :cve:`2014-2851`, :cve:`2014-2889`, :cve:`2014-3122`, :cve:`2014-3144`, :cve:`2014-3145`, :cve:`2014-3153`, :cve:`2014-3180`, :cve:`2014-3181`, :cve:`2014-3182`, :cve:`2014-3183`, :cve:`2014-3184`, :cve:`2014-3185`, :cve:`2014-3186`, :cve:`2014-3534`, :cve:`2014-3535`, :cve:`2014-3601`, :cve:`2014-3610`, :cve:`2014-3611`, :cve:`2014-3631`, :cve:`2014-3645`, :cve:`2014-3646`, :cve:`2014-3647`, :cve:`2014-3673`, :cve:`2014-3687`, :cve:`2014-3688`, :cve:`2014-3690`, :cve:`2014-3917`, :cve:`2014-3940`, :cve:`2014-4014`, :cve:`2014-4027`, :cve:`2014-4157`, :cve:`2014-4171`, :cve:`2014-4508`, :cve:`2014-4608`, :cve:`2014-4611`, :cve:`2014-4652`, :cve:`2014-4653`, :cve:`2014-4654`, :cve:`2014-4655`, :cve:`2014-4656`, :cve:`2014-4667`, :cve:`2014-4699`, :cve:`2014-4943`, :cve:`2014-5045`, :cve:`2014-5077`, :cve:`2014-5206`, :cve:`2014-5207`, :cve:`2014-5471`, :cve:`2014-5472`, :cve:`2014-6410`, :cve:`2014-6416`, :cve:`2014-6417`, :cve:`2014-6418`, :cve:`2014-7145`, :cve:`2014-7283`, :cve:`2014-7284`, :cve:`2014-7822`, :cve:`2014-7825`, :cve:`2014-7826`, :cve:`2014-7841`, :cve:`2014-7842`, :cve:`2014-7843`, :cve:`2014-7970`, :cve:`2014-7975`, :cve:`2014-8086`, :cve:`2014-8133`, :cve:`2014-8134`, :cve:`2014-8159`, :cve:`2014-8160`, :cve:`2014-8171`, :cve:`2014-8172`, :cve:`2014-8173`, :cve:`2014-8369`, :cve:`2014-8480`, :cve:`2014-8481`, :cve:`2014-8559`, :cve:`2014-8709`, :cve:`2014-8884`, :cve:`2014-8989`, :cve:`2014-9090`, :cve:`2014-9322`, :cve:`2014-9419`, :cve:`2014-9420`, :cve:`2014-9428`, :cve:`2014-9529`, :cve:`2014-9584`, :cve:`2014-9585`, :cve:`2014-9644`, :cve:`2014-9683`, :cve:`2014-9710`, :cve:`2014-9715`, :cve:`2014-9717`, :cve:`2014-9728`, :cve:`2014-9729`, :cve:`2014-9730`, :cve:`2014-9731`, :cve:`2014-9803`, :cve:`2014-9870`, :cve:`2014-9888`, :cve:`2014-9895`, :cve:`2014-9903`, :cve:`2014-9904`, :cve:`2014-9914`, :cve:`2014-9922`, :cve:`2014-9940`, :cve:`2015-0239`, :cve:`2015-0274`, :cve:`2015-0275`, :cve:`2015-1333`, :cve:`2015-1339`, :cve:`2015-1350`, :cve:`2015-1420`, :cve:`2015-1421`, :cve:`2015-1465`, :cve:`2015-1573`, :cve:`2015-1593`, :cve:`2015-1805`, :cve:`2015-2041`, :cve:`2015-2042`, :cve:`2015-2150`, :cve:`2015-2666`, :cve:`2015-2672`, :cve:`2015-2686`, :cve:`2015-2830`, :cve:`2015-2922`, :cve:`2015-2925`, :cve:`2015-3212`, :cve:`2015-3214`, :cve:`2015-3288`, :cve:`2015-3290`, :cve:`2015-3291`, :cve:`2015-3331`, :cve:`2015-3339`, :cve:`2015-3636`, :cve:`2015-4001`, :cve:`2015-4002`, :cve:`2015-4003`, :cve:`2015-4004`, :cve:`2015-4036`, :cve:`2015-4167`, :cve:`2015-4170`, :cve:`2015-4176`, :cve:`2015-4177`, :cve:`2015-4178`, :cve:`2015-4692`, :cve:`2015-4700`, :cve:`2015-5156`, :cve:`2015-5157`, :cve:`2015-5257`, :cve:`2015-5283`, :cve:`2015-5307`, :cve:`2015-5327`, :cve:`2015-5364`, :cve:`2015-5366`, :cve:`2015-5697`, :cve:`2015-5706`, :cve:`2015-5707`, :cve:`2015-6252`, :cve:`2015-6526`, :cve:`2015-6937`, :cve:`2015-7509`, :cve:`2015-7513`, :cve:`2015-7515`, :cve:`2015-7550`, :cve:`2015-7566`, :cve:`2015-7613`, :cve:`2015-7799`, :cve:`2015-7833`, :cve:`2015-7872`, :cve:`2015-7884`, :cve:`2015-7885`, :cve:`2015-7990`, :cve:`2015-8104`, :cve:`2015-8215`, :cve:`2015-8324`, :cve:`2015-8374`, :cve:`2015-8539`, :cve:`2015-8543`, :cve:`2015-8550`, :cve:`2015-8551`, :cve:`2015-8552`, :cve:`2015-8553`, :cve:`2015-8569`, :cve:`2015-8575`, :cve:`2015-8660`, :cve:`2015-8709`, :cve:`2015-8746`, :cve:`2015-8767`, :cve:`2015-8785`, :cve:`2015-8787`, :cve:`2015-8812`, :cve:`2015-8816`, :cve:`2015-8830`, :cve:`2015-8839`, :cve:`2015-8844`, :cve:`2015-8845`, :cve:`2015-8950`, :cve:`2015-8952`, :cve:`2015-8953`, :cve:`2015-8955`, :cve:`2015-8956`, :cve:`2015-8961`, :cve:`2015-8962`, :cve:`2015-8963`, :cve:`2015-8964`, :cve:`2015-8966`, :cve:`2015-8967`, :cve:`2015-8970`, :cve:`2015-9004`, :cve:`2015-9016`, :cve:`2015-9289`, :cve:`2016-0617`, :cve:`2016-0723`, :cve:`2016-0728`, :cve:`2016-0758`, :cve:`2016-0821`, :cve:`2016-0823`, :cve:`2016-10044`, :cve:`2016-10088`, :cve:`2016-10147`, :cve:`2016-10150`, :cve:`2016-10153`, :cve:`2016-10154`, :cve:`2016-10200`, :cve:`2016-10208`, :cve:`2016-10229`, :cve:`2016-10318`, :cve:`2016-10723`, :cve:`2016-10741`, :cve:`2016-10764`, :cve:`2016-10905`, :cve:`2016-10906`, :cve:`2016-10907`, :cve:`2016-1237`, :cve:`2016-1575`, :cve:`2016-1576`, :cve:`2016-1583`, :cve:`2016-2053`, :cve:`2016-2069`, :cve:`2016-2070`, :cve:`2016-2085`, :cve:`2016-2117`, :cve:`2016-2143`, :cve:`2016-2184`, :cve:`2016-2185`, :cve:`2016-2186`, :cve:`2016-2187`, :cve:`2016-2188`, :cve:`2016-2383`, :cve:`2016-2384`, :cve:`2016-2543`, :cve:`2016-2544`, :cve:`2016-2545`, :cve:`2016-2546`, :cve:`2016-2547`, :cve:`2016-2548`, :cve:`2016-2549`, :cve:`2016-2550`, :cve:`2016-2782`, :cve:`2016-2847`, :cve:`2016-3044`, :cve:`2016-3070`, :cve:`2016-3134`, :cve:`2016-3135`, :cve:`2016-3136`, :cve:`2016-3137`, :cve:`2016-3138`, :cve:`2016-3139`, :cve:`2016-3140`, :cve:`2016-3156`, :cve:`2016-3157`, :cve:`2016-3672`, :cve:`2016-3689`, :cve:`2016-3713`, :cve:`2016-3841`, :cve:`2016-3857`, :cve:`2016-3951`, :cve:`2016-3955`, :cve:`2016-3961`, :cve:`2016-4440`, :cve:`2016-4470`, :cve:`2016-4482`, :cve:`2016-4485`, :cve:`2016-4486`, :cve:`2016-4557`, :cve:`2016-4558`, :cve:`2016-4565`, :cve:`2016-4568`, :cve:`2016-4569`, :cve:`2016-4578`, :cve:`2016-4580`, :cve:`2016-4581`, :cve:`2016-4794`, :cve:`2016-4805`, :cve:`2016-4913`, :cve:`2016-4951`, :cve:`2016-4997`, :cve:`2016-4998`, :cve:`2016-5195`, :cve:`2016-5243`, :cve:`2016-5244`, :cve:`2016-5400`, :cve:`2016-5412`, :cve:`2016-5696`, :cve:`2016-5728`, :cve:`2016-5828`, :cve:`2016-5829`, :cve:`2016-6130`, :cve:`2016-6136`, :cve:`2016-6156`, :cve:`2016-6162`, :cve:`2016-6187`, :cve:`2016-6197`, :cve:`2016-6198`, :cve:`2016-6213`, :cve:`2016-6327`, :cve:`2016-6480`, :cve:`2016-6516`, :cve:`2016-6786`, :cve:`2016-6787`, :cve:`2016-6828`, :cve:`2016-7039`, :cve:`2016-7042`, :cve:`2016-7097`, :cve:`2016-7117`, :cve:`2016-7425`, :cve:`2016-7910`, :cve:`2016-7911`, :cve:`2016-7912`, :cve:`2016-7913`, :cve:`2016-7914`, :cve:`2016-7915`, :cve:`2016-7916`, :cve:`2016-7917`, :cve:`2016-8399`, :cve:`2016-8405`, :cve:`2016-8630`, :cve:`2016-8632`, :cve:`2016-8633`, :cve:`2016-8636`, :cve:`2016-8645`, :cve:`2016-8646`, :cve:`2016-8650`, :cve:`2016-8655`, :cve:`2016-8658`, :cve:`2016-8666`, :cve:`2016-9083`, :cve:`2016-9084`, :cve:`2016-9120`, :cve:`2016-9178`, :cve:`2016-9191`, :cve:`2016-9313`, :cve:`2016-9555`, :cve:`2016-9576`, :cve:`2016-9588`, :cve:`2016-9604`, :cve:`2016-9685`, :cve:`2016-9754`, :cve:`2016-9755`, :cve:`2016-9756`, :cve:`2016-9777`, :cve:`2016-9793`, :cve:`2016-9794`, :cve:`2016-9806`, :cve:`2016-9919`, :cve:`2017-0605`, :cve:`2017-0627`, :cve:`2017-0750`, :cve:`2017-0786`, :cve:`2017-0861`, :cve:`2017-1000`, :cve:`2017-1000111`, :cve:`2017-1000112`, :cve:`2017-1000251`, :cve:`2017-1000252`, :cve:`2017-1000253`, :cve:`2017-1000255`, :cve:`2017-1000363`, :cve:`2017-1000364`, :cve:`2017-1000365`, :cve:`2017-1000370`, :cve:`2017-1000371`, :cve:`2017-1000379`, :cve:`2017-1000380`, :cve:`2017-1000405`, :cve:`2017-1000407`, :cve:`2017-1000410`, :cve:`2017-10661`, :cve:`2017-10662`, :cve:`2017-10663`, :cve:`2017-10810`, :cve:`2017-10911`, :cve:`2017-11089`, :cve:`2017-11176`, :cve:`2017-11472`, :cve:`2017-11473`, :cve:`2017-11600`, :cve:`2017-12134`, :cve:`2017-12146`, :cve:`2017-12153`, :cve:`2017-12154`, :cve:`2017-12168`, :cve:`2017-12188`, :cve:`2017-12190`, :cve:`2017-12192`, :cve:`2017-12193`, :cve:`2017-12762`, :cve:`2017-13080`, :cve:`2017-13166`, :cve:`2017-13167`, :cve:`2017-13168`, :cve:`2017-13215`, :cve:`2017-13216`, :cve:`2017-13220`, :cve:`2017-13305`, :cve:`2017-13686`, :cve:`2017-13695`, :cve:`2017-13715`, :cve:`2017-14051`, :cve:`2017-14106`, :cve:`2017-14140`, :cve:`2017-14156`, :cve:`2017-14340`, :cve:`2017-14489`, :cve:`2017-14497`, :cve:`2017-14954`, :cve:`2017-14991`, :cve:`2017-15102`, :cve:`2017-15115`, :cve:`2017-15116`, :cve:`2017-15121`, :cve:`2017-15126`, :cve:`2017-15127`, :cve:`2017-15128`, :cve:`2017-15129`, :cve:`2017-15265`, :cve:`2017-15274`, :cve:`2017-15299`, :cve:`2017-15306`, :cve:`2017-15537`, :cve:`2017-15649`, :cve:`2017-15868`, :cve:`2017-15951`, :cve:`2017-16525`, :cve:`2017-16526`, :cve:`2017-16527`, :cve:`2017-16528`, :cve:`2017-16529`, :cve:`2017-16530`, :cve:`2017-16531`, :cve:`2017-16532`, :cve:`2017-16533`, :cve:`2017-16534`, :cve:`2017-16535`, :cve:`2017-16536`, :cve:`2017-16537`, :cve:`2017-16538`, :cve:`2017-16643`, :cve:`2017-16644`, :cve:`2017-16645`, :cve:`2017-16646`, :cve:`2017-16647`, :cve:`2017-16648`, :cve:`2017-16649`, :cve:`2017-16650`, :cve:`2017-16911`, :cve:`2017-16912`, :cve:`2017-16913`, :cve:`2017-16914`, :cve:`2017-16939`, :cve:`2017-16994`, :cve:`2017-16995`, :cve:`2017-16996`, :cve:`2017-17052`, :cve:`2017-17053`, :cve:`2017-17448`, :cve:`2017-17449`, :cve:`2017-17450`, :cve:`2017-17558`, :cve:`2017-17712`, :cve:`2017-17741`, :cve:`2017-17805`, :cve:`2017-17806`, :cve:`2017-17807`, :cve:`2017-17852`, :cve:`2017-17853`, :cve:`2017-17854`, :cve:`2017-17855`, :cve:`2017-17856`, :cve:`2017-17857`, :cve:`2017-17862`, :cve:`2017-17863`, :cve:`2017-17864`, :cve:`2017-17975`, :cve:`2017-18017`, :cve:`2017-18075`, :cve:`2017-18079`, :cve:`2017-18174`, :cve:`2017-18193`, :cve:`2017-18200`, :cve:`2017-18202`, :cve:`2017-18203`, :cve:`2017-18204`, :cve:`2017-18208`, :cve:`2017-18216`, :cve:`2017-18218`, :cve:`2017-18221`, :cve:`2017-18222`, :cve:`2017-18224`, :cve:`2017-18232`, :cve:`2017-18241`, :cve:`2017-18249`, :cve:`2017-18255`, :cve:`2017-18257`, :cve:`2017-18261`, :cve:`2017-18270`, :cve:`2017-18344`, :cve:`2017-18360`, :cve:`2017-18379`, :cve:`2017-18509`, :cve:`2017-18549`, :cve:`2017-18550`, :cve:`2017-18551`, :cve:`2017-18552`, :cve:`2017-18595`, :cve:`2017-2583`, :cve:`2017-2584`, :cve:`2017-2596`, :cve:`2017-2618`, :cve:`2017-2634`, :cve:`2017-2636`, :cve:`2017-2647`, :cve:`2017-2671`, :cve:`2017-5123`, :cve:`2017-5546`, :cve:`2017-5547`, :cve:`2017-5548`, :cve:`2017-5549`, :cve:`2017-5550`, :cve:`2017-5551`, :cve:`2017-5576`, :cve:`2017-5577`, :cve:`2017-5669`, :cve:`2017-5715`, :cve:`2017-5753`, :cve:`2017-5754`, :cve:`2017-5897`, :cve:`2017-5967`, :cve:`2017-5970`, :cve:`2017-5972`, :cve:`2017-5986`, :cve:`2017-6001`, :cve:`2017-6074`, :cve:`2017-6214`, :cve:`2017-6345`, :cve:`2017-6346`, :cve:`2017-6347`, :cve:`2017-6348`, :cve:`2017-6353`, :cve:`2017-6874`, :cve:`2017-6951`, :cve:`2017-7184`, :cve:`2017-7187`, :cve:`2017-7261`, :cve:`2017-7273`, :cve:`2017-7277`, :cve:`2017-7294`, :cve:`2017-7308`, :cve:`2017-7346`, :cve:`2017-7374`, :cve:`2017-7472`, :cve:`2017-7477`, :cve:`2017-7482`, :cve:`2017-7487`, :cve:`2017-7495`, :cve:`2017-7518`, :cve:`2017-7533`, :cve:`2017-7541`, :cve:`2017-7542`, :cve:`2017-7558`, :cve:`2017-7616`, :cve:`2017-7618`, :cve:`2017-7645`, :cve:`2017-7889`, :cve:`2017-7895`, :cve:`2017-7979`, :cve:`2017-8061`, :cve:`2017-8062`, :cve:`2017-8063`, :cve:`2017-8064`, :cve:`2017-8065`, :cve:`2017-8066`, :cve:`2017-8067`, :cve:`2017-8068`, :cve:`2017-8069`, :cve:`2017-8070`, :cve:`2017-8071`, :cve:`2017-8072`, :cve:`2017-8106`, :cve:`2017-8240`, :cve:`2017-8797`, :cve:`2017-8824`, :cve:`2017-8831`, :cve:`2017-8890`, :cve:`2017-8924`, :cve:`2017-8925`, :cve:`2017-9059`, :cve:`2017-9074`, :cve:`2017-9075`, :cve:`2017-9076`, :cve:`2017-9077`, :cve:`2017-9150`, :cve:`2017-9211`, :cve:`2017-9242`, :cve:`2017-9605`, :cve:`2017-9725`, :cve:`2017-9984`, :cve:`2017-9985`, :cve:`2017-9986`, :cve:`2018-1000004`, :cve:`2018-1000026`, :cve:`2018-1000028`, :cve:`2018-1000199`, :cve:`2018-1000200`, :cve:`2018-1000204`, :cve:`2018-10021`, :cve:`2018-10074`, :cve:`2018-10087`, :cve:`2018-10124`, :cve:`2018-10322`, :cve:`2018-10323`, :cve:`2018-1065`, :cve:`2018-1066`, :cve:`2018-10675`, :cve:`2018-1068`, :cve:`2018-10840`, :cve:`2018-10853`, :cve:`2018-1087`, :cve:`2018-10876`, :cve:`2018-10877`, :cve:`2018-10878`, :cve:`2018-10879`, :cve:`2018-10880`, :cve:`2018-10881`, :cve:`2018-10882`, :cve:`2018-10883`, :cve:`2018-10901`, :cve:`2018-10902`, :cve:`2018-1091`, :cve:`2018-1092`, :cve:`2018-1093`, :cve:`2018-10938`, :cve:`2018-1094`, :cve:`2018-10940`, :cve:`2018-1095`, :cve:`2018-1108`, :cve:`2018-1118`, :cve:`2018-1120`, :cve:`2018-11232`, :cve:`2018-1128`, :cve:`2018-1129`, :cve:`2018-1130`, :cve:`2018-11412`, :cve:`2018-11506`, :cve:`2018-11508`, :cve:`2018-12126`, :cve:`2018-12127`, :cve:`2018-12130`, :cve:`2018-12207`, :cve:`2018-12232`, :cve:`2018-12233`, :cve:`2018-12633`, :cve:`2018-12714`, :cve:`2018-12896`, :cve:`2018-12904`, :cve:`2018-13053`, :cve:`2018-13093`, :cve:`2018-13094`, :cve:`2018-13095`, :cve:`2018-13096`, :cve:`2018-13097`, :cve:`2018-13098`, :cve:`2018-13099`, :cve:`2018-13100`, :cve:`2018-13405`, :cve:`2018-13406`, :cve:`2018-14609`, :cve:`2018-14610`, :cve:`2018-14611`, :cve:`2018-14612`, :cve:`2018-14613`, :cve:`2018-14614`, :cve:`2018-14615`, :cve:`2018-14616`, :cve:`2018-14617`, :cve:`2018-14619`, :cve:`2018-14625`, :cve:`2018-14633`, :cve:`2018-14634`, :cve:`2018-14641`, :cve:`2018-14646`, :cve:`2018-14656`, :cve:`2018-14678`, :cve:`2018-14734`, :cve:`2018-15471`, :cve:`2018-15572`, :cve:`2018-15594`, :cve:`2018-16276`, :cve:`2018-16597`, :cve:`2018-16658`, :cve:`2018-16862`, :cve:`2018-16871`, :cve:`2018-16880`, :cve:`2018-16882`, :cve:`2018-16884`, :cve:`2018-17182`, :cve:`2018-17972`, :cve:`2018-18021`, :cve:`2018-18281`, :cve:`2018-18386`, :cve:`2018-18397`, :cve:`2018-18445`, :cve:`2018-18559`, :cve:`2018-18690`, :cve:`2018-18710`, :cve:`2018-18955`, :cve:`2018-19406`, :cve:`2018-19407`, :cve:`2018-19824`, :cve:`2018-19854`, :cve:`2018-19985`, :cve:`2018-20169`, :cve:`2018-20449`, :cve:`2018-20509`, :cve:`2018-20510`, :cve:`2018-20511`, :cve:`2018-20669`, :cve:`2018-20784`, :cve:`2018-20836`, :cve:`2018-20854`, :cve:`2018-20855`, :cve:`2018-20856`, :cve:`2018-20961`, :cve:`2018-20976`, :cve:`2018-21008`, :cve:`2018-25015`, :cve:`2018-25020`, :cve:`2018-3620`, :cve:`2018-3639`, :cve:`2018-3646`, :cve:`2018-3665`, :cve:`2018-3693`, :cve:`2018-5332`, :cve:`2018-5333`, :cve:`2018-5344`, :cve:`2018-5390`, :cve:`2018-5391`, :cve:`2018-5703`, :cve:`2018-5750`, :cve:`2018-5803`, :cve:`2018-5814`, :cve:`2018-5848`, :cve:`2018-5873`, :cve:`2018-5953`, :cve:`2018-5995`, :cve:`2018-6412`, :cve:`2018-6554`, :cve:`2018-6555`, :cve:`2018-6927`, :cve:`2018-7191`, :cve:`2018-7273`, :cve:`2018-7480`, :cve:`2018-7492`, :cve:`2018-7566`, :cve:`2018-7740`, :cve:`2018-7754`, :cve:`2018-7755`, :cve:`2018-7757`, :cve:`2018-7995`, :cve:`2018-8043`, :cve:`2018-8087`, :cve:`2018-8781`, :cve:`2018-8822`, :cve:`2018-8897`, :cve:`2018-9363`, :cve:`2018-9385`, :cve:`2018-9415`, :cve:`2018-9422`, :cve:`2018-9465`, :cve:`2018-9516`, :cve:`2018-9517`, :cve:`2018-9518`, :cve:`2018-9568`, :cve:`2019-0136`, :cve:`2019-0145`, :cve:`2019-0146`, :cve:`2019-0147`, :cve:`2019-0148`, :cve:`2019-0149`, :cve:`2019-0154`, :cve:`2019-0155`, :cve:`2019-10124`, :cve:`2019-10125`, :cve:`2019-10126`, :cve:`2019-10142`, :cve:`2019-10207`, :cve:`2019-10220`, :cve:`2019-10638`, :cve:`2019-10639`, :cve:`2019-11085`, :cve:`2019-11091`, :cve:`2019-11135`, :cve:`2019-11190`, :cve:`2019-11191`, :cve:`2019-1125`, :cve:`2019-11477`, :cve:`2019-11478`, :cve:`2019-11479`, :cve:`2019-11486`, :cve:`2019-11487`, :cve:`2019-11599`, :cve:`2019-11683`, :cve:`2019-11810`, :cve:`2019-11811`, :cve:`2019-11815`, :cve:`2019-11833`, :cve:`2019-11884`, :cve:`2019-12378`, :cve:`2019-12379`, :cve:`2019-12380`, :cve:`2019-12381`, :cve:`2019-12382`, :cve:`2019-12454`, :cve:`2019-12455`, :cve:`2019-12614`, :cve:`2019-12615`, :cve:`2019-12817`, :cve:`2019-12818`, :cve:`2019-12819`, :cve:`2019-12881`, :cve:`2019-12984`, :cve:`2019-13233`, :cve:`2019-13272`, :cve:`2019-13631`, :cve:`2019-13648`, :cve:`2019-14283`, :cve:`2019-14284`, :cve:`2019-14615`, :cve:`2019-14763`, :cve:`2019-14814`, :cve:`2019-14815`, :cve:`2019-14816`, :cve:`2019-14821`, :cve:`2019-14835`, :cve:`2019-14895`, :cve:`2019-14896`, :cve:`2019-14897`, :cve:`2019-14901`, :cve:`2019-15030`, :cve:`2019-15031`, :cve:`2019-15090`, :cve:`2019-15098`, :cve:`2019-15099`, :cve:`2019-15117`, :cve:`2019-15118`, :cve:`2019-15211`, :cve:`2019-15212`, :cve:`2019-15213`, :cve:`2019-15214`, :cve:`2019-15215`, :cve:`2019-15216`, :cve:`2019-15217`, :cve:`2019-15218`, :cve:`2019-15219`, :cve:`2019-15220`, :cve:`2019-15221`, :cve:`2019-15222`, :cve:`2019-15223`, :cve:`2019-15291`, :cve:`2019-15292`, :cve:`2019-15504`, :cve:`2019-15505`, :cve:`2019-15538`, :cve:`2019-15666`, :cve:`2019-15807`, :cve:`2019-15916`, :cve:`2019-15917`, :cve:`2019-15918`, :cve:`2019-15919`, :cve:`2019-15920`, :cve:`2019-15921`, :cve:`2019-15922`, :cve:`2019-15923`, :cve:`2019-15924`, :cve:`2019-15925`, :cve:`2019-15926`, :cve:`2019-15927`, :cve:`2019-16229`, :cve:`2019-16230`, :cve:`2019-16231`, :cve:`2019-16232`, :cve:`2019-16233`, :cve:`2019-16234`, :cve:`2019-16413`, :cve:`2019-16714`, :cve:`2019-16746`, :cve:`2019-16921`, :cve:`2019-16994`, :cve:`2019-16995`, :cve:`2019-17052`, :cve:`2019-17053`, :cve:`2019-17054`, :cve:`2019-17055`, :cve:`2019-17056`, :cve:`2019-17075`, :cve:`2019-17133`, :cve:`2019-17351`, :cve:`2019-17666`, :cve:`2019-18198`, :cve:`2019-18282`, :cve:`2019-18660`, :cve:`2019-18675`, :cve:`2019-18683`, :cve:`2019-18786`, :cve:`2019-18805`, :cve:`2019-18806`, :cve:`2019-18807`, :cve:`2019-18808`, :cve:`2019-18809`, :cve:`2019-18810`, :cve:`2019-18811`, :cve:`2019-18812`, :cve:`2019-18813`, :cve:`2019-18814`, :cve:`2019-18885`, :cve:`2019-19036`, :cve:`2019-19037`, :cve:`2019-19039`, :cve:`2019-19043`, :cve:`2019-19044`, :cve:`2019-19045`, :cve:`2019-19046`, :cve:`2019-19047`, :cve:`2019-19048`, :cve:`2019-19049`, :cve:`2019-19050`, :cve:`2019-19051`, :cve:`2019-19052`, :cve:`2019-19053`, :cve:`2019-19054`, :cve:`2019-19055`, :cve:`2019-19056`, :cve:`2019-19057`, :cve:`2019-19058`, :cve:`2019-19059`, :cve:`2019-19060`, :cve:`2019-19061`, :cve:`2019-19062`, :cve:`2019-19063`, :cve:`2019-19064`, :cve:`2019-19065`, :cve:`2019-19066`, :cve:`2019-19067`, :cve:`2019-19068`, :cve:`2019-19069`, :cve:`2019-19070`, :cve:`2019-19071`, :cve:`2019-19072`, :cve:`2019-19073`, :cve:`2019-19074`, :cve:`2019-19075`, :cve:`2019-19076`, :cve:`2019-19077`, :cve:`2019-19078`, :cve:`2019-19079`, :cve:`2019-19080`, :cve:`2019-19081`, :cve:`2019-19082`, :cve:`2019-19083`, :cve:`2019-19227`, :cve:`2019-19241`, :cve:`2019-19252`, :cve:`2019-19318`, :cve:`2019-19319`, :cve:`2019-19332`, :cve:`2019-19338`, :cve:`2019-19377`, :cve:`2019-19447`, :cve:`2019-19448`, :cve:`2019-19449`, :cve:`2019-19462`, :cve:`2019-19523`, :cve:`2019-19524`, :cve:`2019-19525`, :cve:`2019-19526`, :cve:`2019-19527`, :cve:`2019-19528`, :cve:`2019-19529`, :cve:`2019-19530`, :cve:`2019-19531`, :cve:`2019-19532`, :cve:`2019-19533`, :cve:`2019-19534`, :cve:`2019-19535`, :cve:`2019-19536`, :cve:`2019-19537`, :cve:`2019-19543`, :cve:`2019-19602`, :cve:`2019-19767`, :cve:`2019-19768`, :cve:`2019-19769`, :cve:`2019-19770`, :cve:`2019-19807`, :cve:`2019-19813`, :cve:`2019-19815`, :cve:`2019-19816`, :cve:`2019-19922`, :cve:`2019-19927`, :cve:`2019-19947`, :cve:`2019-19965`, :cve:`2019-19966`, :cve:`2019-1999`, :cve:`2019-20054`, :cve:`2019-20095`, :cve:`2019-20096`, :cve:`2019-2024`, :cve:`2019-2025`, :cve:`2019-20422`, :cve:`2019-2054`, :cve:`2019-20636`, :cve:`2019-20806`, :cve:`2019-20810`, :cve:`2019-20811`, :cve:`2019-20812`, :cve:`2019-20908`, :cve:`2019-20934`, :cve:`2019-2101`, :cve:`2019-2181`, :cve:`2019-2182`, :cve:`2019-2213`, :cve:`2019-2214`, :cve:`2019-2215`, :cve:`2019-25044`, :cve:`2019-25045`, :cve:`2019-3016`, :cve:`2019-3459`, :cve:`2019-3460`, :cve:`2019-3701`, :cve:`2019-3819`, :cve:`2019-3837`, :cve:`2019-3846`, :cve:`2019-3874`, :cve:`2019-3882`, :cve:`2019-3887`, :cve:`2019-3892`, :cve:`2019-3896`, :cve:`2019-3900`, :cve:`2019-3901`, :cve:`2019-5108`, :cve:`2019-6133`, :cve:`2019-6974`, :cve:`2019-7221`, :cve:`2019-7222`, :cve:`2019-7308`, :cve:`2019-8912`, :cve:`2019-8956`, :cve:`2019-8980`, :cve:`2019-9003`, :cve:`2019-9162`, :cve:`2019-9213`, :cve:`2019-9245`, :cve:`2019-9444`, :cve:`2019-9445`, :cve:`2019-9453`, :cve:`2019-9454`, :cve:`2019-9455`, :cve:`2019-9456`, :cve:`2019-9457`, :cve:`2019-9458`, :cve:`2019-9466`, :cve:`2019-9500`, :cve:`2019-9503`, :cve:`2019-9506`, :cve:`2019-9857`, :cve:`2020-0009`, :cve:`2020-0030`, :cve:`2020-0041`, :cve:`2020-0066`, :cve:`2020-0067`, :cve:`2020-0110`, :cve:`2020-0255`, :cve:`2020-0305`, :cve:`2020-0404`, :cve:`2020-0423`, :cve:`2020-0427`, :cve:`2020-0429`, :cve:`2020-0430`, :cve:`2020-0431`, :cve:`2020-0432`, :cve:`2020-0433`, :cve:`2020-0435`, :cve:`2020-0444`, :cve:`2020-0465`, :cve:`2020-0466`, :cve:`2020-0543`, :cve:`2020-10135`, :cve:`2020-10690`, :cve:`2020-10711`, :cve:`2020-10720`, :cve:`2020-10732`, :cve:`2020-10742`, :cve:`2020-10751`, :cve:`2020-10757`, :cve:`2020-10766`, :cve:`2020-10767`, :cve:`2020-10768`, :cve:`2020-10769`, :cve:`2020-10773`, :cve:`2020-10781`, :cve:`2020-10942`, :cve:`2020-11494`, :cve:`2020-11565`, :cve:`2020-11608`, :cve:`2020-11609`, :cve:`2020-11668`, :cve:`2020-11669`, :cve:`2020-11884`, :cve:`2020-12114`, :cve:`2020-12351`, :cve:`2020-12352`, :cve:`2020-12464`, :cve:`2020-12465`, :cve:`2020-12652`, :cve:`2020-12653`, :cve:`2020-12654`, :cve:`2020-12655`, :cve:`2020-12656`, :cve:`2020-12657`, :cve:`2020-12659`, :cve:`2020-12768`, :cve:`2020-12769`, :cve:`2020-12770`, :cve:`2020-12771`, :cve:`2020-12826`, :cve:`2020-12888`, :cve:`2020-12912`, :cve:`2020-13143`, :cve:`2020-13974`, :cve:`2020-14305`, :cve:`2020-14314`, :cve:`2020-14331`, :cve:`2020-14351`, :cve:`2020-14353`, :cve:`2020-14356`, :cve:`2020-14381`, :cve:`2020-14385`, :cve:`2020-14386`, :cve:`2020-14390`, :cve:`2020-14416`, :cve:`2020-15393`, :cve:`2020-15436`, :cve:`2020-15437`, :cve:`2020-15780`, :cve:`2020-15852`, :cve:`2020-16119`, :cve:`2020-16120`, :cve:`2020-16166`, :cve:`2020-1749`, :cve:`2020-24394`, :cve:`2020-24490`, :cve:`2020-24586`, :cve:`2020-24587`, :cve:`2020-24588`, :cve:`2020-25211`, :cve:`2020-25212`, :cve:`2020-25221`, :cve:`2020-25284`, :cve:`2020-25285`, :cve:`2020-25639`, :cve:`2020-25641`, :cve:`2020-25643`, :cve:`2020-25645`, :cve:`2020-25656`, :cve:`2020-25668`, :cve:`2020-25669`, :cve:`2020-25670`, :cve:`2020-25671`, :cve:`2020-25672`, :cve:`2020-25673`, :cve:`2020-25704`, :cve:`2020-25705`, :cve:`2020-26088`, :cve:`2020-26139`, :cve:`2020-26141`, :cve:`2020-26145`, :cve:`2020-26147`, :cve:`2020-26541`, :cve:`2020-26555`, :cve:`2020-26558`, :cve:`2020-27066`, :cve:`2020-27067`, :cve:`2020-27068`, :cve:`2020-27152`, :cve:`2020-27170`, :cve:`2020-27171`, :cve:`2020-27194`, :cve:`2020-2732`, :cve:`2020-27418`, :cve:`2020-27673`, :cve:`2020-27675`, :cve:`2020-27777`, :cve:`2020-27784`, :cve:`2020-27786`, :cve:`2020-27815`, :cve:`2020-27820`, :cve:`2020-27825`, :cve:`2020-27830`, :cve:`2020-27835`, :cve:`2020-28097`, :cve:`2020-28374`, :cve:`2020-28588`, :cve:`2020-28915`, :cve:`2020-28941`, :cve:`2020-28974`, :cve:`2020-29368`, :cve:`2020-29369`, :cve:`2020-29370`, :cve:`2020-29371`, :cve:`2020-29372`, :cve:`2020-29373`, :cve:`2020-29374`, :cve:`2020-29534`, :cve:`2020-29568`, :cve:`2020-29569`, :cve:`2020-29660`, :cve:`2020-29661`, :cve:`2020-35499`, :cve:`2020-35508`, :cve:`2020-35513`, :cve:`2020-35519`, :cve:`2020-36158`, :cve:`2020-36310`, :cve:`2020-36311`, :cve:`2020-36312`, :cve:`2020-36313`, :cve:`2020-36322`, :cve:`2020-36385`, :cve:`2020-36386`, :cve:`2020-36387`, :cve:`2020-36516`, :cve:`2020-36557`, :cve:`2020-36558`, :cve:`2020-36691`, :cve:`2020-36694`, :cve:`2020-36766`, :cve:`2020-3702`, :cve:`2020-4788`, :cve:`2020-7053`, :cve:`2020-8428`, :cve:`2020-8647`, :cve:`2020-8648`, :cve:`2020-8649`, :cve:`2020-8694`, :cve:`2020-8834`, :cve:`2020-8835`, :cve:`2020-8992`, :cve:`2020-9383`, :cve:`2020-9391`, :cve:`2021-0129`, :cve:`2021-0342`, :cve_mitre:`2021-0447`, :cve_mitre:`2021-0448`, :cve:`2021-0512`, :cve:`2021-0605`, :cve:`2021-0707`, :cve:`2021-0920`, :cve:`2021-0929`, :cve:`2021-0935`, :cve_mitre:`2021-0937`, :cve:`2021-0938`, :cve:`2021-0941`, :cve:`2021-1048`, :cve:`2021-20177`, :cve:`2021-20194`, :cve:`2021-20226`, :cve:`2021-20239`, :cve:`2021-20261`, :cve:`2021-20265`, :cve:`2021-20268`, :cve:`2021-20292`, :cve:`2021-20317`, :cve:`2021-20320`, :cve:`2021-20321`, :cve:`2021-20322`, :cve:`2021-21781`, :cve:`2021-22543`, :cve:`2021-22555`, :cve:`2021-22600`, :cve:`2021-23133`, :cve:`2021-23134`, :cve:`2021-26401`, :cve:`2021-26708`, :cve:`2021-26930`, :cve:`2021-26931`, :cve:`2021-26932`, :cve:`2021-27363`, :cve:`2021-27364`, :cve:`2021-27365`, :cve:`2021-28038`, :cve:`2021-28039`, :cve:`2021-28375`, :cve:`2021-28660`, :cve:`2021-28688`, :cve:`2021-28691`, :cve:`2021-28711`, :cve:`2021-28712`, :cve:`2021-28713`, :cve:`2021-28714`, :cve:`2021-28715`, :cve:`2021-28950`, :cve:`2021-28951`, :cve:`2021-28952`, :cve:`2021-28964`, :cve:`2021-28971`, :cve:`2021-28972`, :cve:`2021-29154`, :cve:`2021-29155`, :cve:`2021-29264`, :cve:`2021-29265`, :cve:`2021-29266`, :cve:`2021-29646`, :cve:`2021-29647`, :cve:`2021-29648`, :cve:`2021-29649`, :cve:`2021-29650`, :cve:`2021-29657`, :cve:`2021-30002`, :cve:`2021-30178`, :cve:`2021-31440`, :cve:`2021-3178`, :cve:`2021-31829`, :cve:`2021-31916`, :cve:`2021-32399`, :cve:`2021-32606`, :cve:`2021-33033`, :cve:`2021-33034`, :cve:`2021-33098`, :cve:`2021-33135`, :cve:`2021-33200`, :cve:`2021-3347`, :cve:`2021-3348`, :cve:`2021-33624`, :cve:`2021-33655`, :cve:`2021-33656`, :cve:`2021-33909`, :cve:`2021-3411`, :cve:`2021-3428`, :cve:`2021-3444`, :cve:`2021-34556`, :cve:`2021-34693`, :cve:`2021-3483`, :cve:`2021-34866`, :cve:`2021-3489`, :cve:`2021-3490`, :cve:`2021-3491`, :cve_mitre:`2021-34981`, :cve:`2021-3501`, :cve:`2021-35039`, :cve:`2021-3506`, :cve:`2021-3543`, :cve:`2021-35477`, :cve:`2021-3564`, :cve:`2021-3573`, :cve:`2021-3587`, :cve_mitre:`2021-3600`, :cve:`2021-3609`, :cve:`2021-3612`, :cve:`2021-3635`, :cve:`2021-3640`, :cve:`2021-3653`, :cve:`2021-3655`, :cve:`2021-3656`, :cve:`2021-3659`, :cve:`2021-3679`, :cve:`2021-3715`, :cve:`2021-37159`, :cve:`2021-3732`, :cve:`2021-3736`, :cve:`2021-3739`, :cve:`2021-3743`, :cve:`2021-3744`, :cve:`2021-3752`, :cve:`2021-3753`, :cve:`2021-37576`, :cve:`2021-3759`, :cve:`2021-3760`, :cve:`2021-3764`, :cve:`2021-3772`, :cve:`2021-38160`, :cve:`2021-38166`, :cve:`2021-38198`, :cve:`2021-38199`, :cve:`2021-38200`, :cve:`2021-38201`, :cve:`2021-38202`, :cve:`2021-38203`, :cve:`2021-38204`, :cve:`2021-38205`, :cve:`2021-38206`, :cve:`2021-38207`, :cve:`2021-38208`, :cve:`2021-38209`, :cve:`2021-38300`, :cve:`2021-3894`, :cve:`2021-3896`, :cve:`2021-3923`, :cve:`2021-39633`, :cve:`2021-39634`, :cve:`2021-39636`, :cve:`2021-39648`, :cve:`2021-39656`, :cve:`2021-39657`, :cve:`2021-39685`, :cve:`2021-39686`, :cve:`2021-39698`, :cve:`2021-39711`, :cve:`2021-39713`, :cve:`2021-39714`, :cve:`2021-4001`, :cve:`2021-4002`, :cve:`2021-4028`, :cve:`2021-4032`, :cve:`2021-4037`, :cve:`2021-40490`, :cve:`2021-4083`, :cve:`2021-4090`, :cve:`2021-4093`, :cve:`2021-4095`, :cve:`2021-41073`, :cve:`2021-4135`, :cve:`2021-4148`, :cve:`2021-4149`, :cve:`2021-4154`, :cve:`2021-4155`, :cve:`2021-4157`, :cve:`2021-4159`, :cve:`2021-41864`, :cve:`2021-4197`, :cve:`2021-42008`, :cve:`2021-4202`, :cve:`2021-4203`, :cve:`2021-4218`, :cve:`2021-42252`, :cve:`2021-42327`, :cve:`2021-42739`, :cve:`2021-43056`, :cve:`2021-43057`, :cve:`2021-43267`, :cve:`2021-43389`, :cve:`2021-43975`, :cve:`2021-43976`, :cve:`2021-44733`, :cve:`2021-45095`, :cve:`2021-45100`, :cve:`2021-45402`, :cve:`2021-45469`, :cve:`2021-45480`, :cve:`2021-45485`, :cve:`2021-45486`, :cve:`2021-45868`, :cve:`2021-46283`, :cve:`2022-0001`, :cve:`2022-0002`, :cve:`2022-0168`, :cve:`2022-0171`, :cve:`2022-0185`, :cve:`2022-0264`, :cve:`2022-0286`, :cve:`2022-0322`, :cve:`2022-0330`, :cve:`2022-0433`, :cve:`2022-0435`, :cve:`2022-0487`, :cve:`2022-0492`, :cve:`2022-0494`, :cve:`2022-0516`, :cve:`2022-0617`, :cve:`2022-0644`, :cve:`2022-0646`, :cve:`2022-0742`, :cve:`2022-0812`, :cve:`2022-0847`, :cve:`2022-0850`, :cve:`2022-0854`, :cve:`2022-0995`, :cve:`2022-1011`, :cve:`2022-1012`, :cve:`2022-1015`, :cve:`2022-1016`, :cve:`2022-1043`, :cve:`2022-1048`, :cve:`2022-1055`, :cve:`2022-1158`, :cve:`2022-1184`, :cve:`2022-1195`, :cve:`2022-1198`, :cve:`2022-1199`, :cve:`2022-1204`, :cve:`2022-1205`, :cve:`2022-1353`, :cve:`2022-1419`, :cve:`2022-1462`, :cve:`2022-1516`, :cve:`2022-1651`, :cve:`2022-1652`, :cve:`2022-1671`, :cve:`2022-1678`, :cve:`2022-1679`, :cve:`2022-1729`, :cve:`2022-1734`, :cve:`2022-1786`, :cve:`2022-1789`, :cve:`2022-1836`, :cve:`2022-1852`, :cve:`2022-1882`, :cve:`2022-1943`, :cve:`2022-1966`, :cve:`2022-1972`, :cve:`2022-1973`, :cve:`2022-1974`, :cve:`2022-1975`, :cve:`2022-1976`, :cve:`2022-1998`, :cve:`2022-20008`, :cve:`2022-20132`, :cve:`2022-20141`, :cve:`2022-20153`, :cve:`2022-20154`, :cve:`2022-20158`, :cve:`2022-20166`, :cve:`2022-20368`, :cve:`2022-20369`, :cve:`2022-20421`, :cve:`2022-20422`, :cve:`2022-20423`, :cve_mitre:`2022-20565`, :cve:`2022-20566`, :cve:`2022-20567`, :cve:`2022-20572`, :cve:`2022-2078`, :cve:`2022-21123`, :cve:`2022-21125`, :cve:`2022-21166`, :cve:`2022-21385`, :cve:`2022-21499`, :cve_mitre:`2022-21505`, :cve:`2022-2153`, :cve:`2022-2196`, :cve_mitre:`2022-22942`, :cve:`2022-23036`, :cve:`2022-23037`, :cve:`2022-23038`, :cve:`2022-23039`, :cve:`2022-23040`, :cve:`2022-23041`, :cve:`2022-23042`, :cve:`2022-2308`, :cve:`2022-2318`, :cve:`2022-2380`, :cve:`2022-23816`, :cve:`2022-23960`, :cve:`2022-24122`, :cve:`2022-24448`, :cve:`2022-24958`, :cve:`2022-24959`, :cve:`2022-2503`, :cve:`2022-25258`, :cve:`2022-25375`, :cve:`2022-25636`, :cve_mitre:`2022-2585`, :cve_mitre:`2022-2586`, :cve_mitre:`2022-2588`, :cve:`2022-2590`, :cve_mitre:`2022-2602`, :cve:`2022-26365`, :cve:`2022-26373`, :cve:`2022-2639`, :cve:`2022-26490`, :cve:`2022-2663`, :cve:`2022-26966`, :cve:`2022-27223`, :cve:`2022-27666`, :cve:`2022-2785`, :cve:`2022-27950`, :cve:`2022-28356`, :cve:`2022-28388`, :cve:`2022-28389`, :cve:`2022-28390`, :cve:`2022-2873`, :cve:`2022-28796`, :cve:`2022-28893`, :cve:`2022-2905`, :cve:`2022-29156`, :cve:`2022-2938`, :cve:`2022-29581`, :cve:`2022-29582`, :cve:`2022-2959`, :cve:`2022-2964`, :cve:`2022-2977`, :cve:`2022-2978`, :cve:`2022-29900`, :cve:`2022-29901`, :cve:`2022-29968`, :cve:`2022-3028`, :cve:`2022-30594`, :cve:`2022-3061`, :cve:`2022-3077`, :cve:`2022-3078`, :cve:`2022-3103`, :cve:`2022-3104`, :cve:`2022-3105`, :cve:`2022-3106`, :cve:`2022-3107`, :cve:`2022-3110`, :cve:`2022-3111`, :cve:`2022-3112`, :cve:`2022-3113`, :cve:`2022-3114`, :cve:`2022-3115`, :cve:`2022-3169`, :cve:`2022-3170`, :cve:`2022-3202`, :cve:`2022-32250`, :cve:`2022-32296`, :cve:`2022-3239`, :cve:`2022-32981`, :cve:`2022-3303`, :cve:`2022-33740`, :cve:`2022-33741`, :cve:`2022-33742`, :cve:`2022-33743`, :cve:`2022-33744`, :cve:`2022-33981`, :cve:`2022-3424`, :cve:`2022-3435`, :cve:`2022-34494`, :cve:`2022-34495`, :cve:`2022-34918`, :cve:`2022-3521`, :cve:`2022-3524`, :cve:`2022-3526`, :cve:`2022-3531`, :cve:`2022-3532`, :cve:`2022-3534`, :cve:`2022-3535`, :cve:`2022-3541`, :cve:`2022-3542`, :cve:`2022-3543`, :cve:`2022-3545`, :cve:`2022-3564`, :cve:`2022-3565`, :cve:`2022-3577`, :cve:`2022-3586`, :cve:`2022-3594`, :cve:`2022-36123`, :cve:`2022-3619`, :cve:`2022-3621`, :cve:`2022-3623`, :cve:`2022-3625`, :cve:`2022-3628`, :cve:`2022-36280`, :cve:`2022-3629`, :cve:`2022-3630`, :cve:`2022-3633`, :cve:`2022-3635`, :cve:`2022-3640`, :cve:`2022-3643`, :cve:`2022-3646`, :cve:`2022-3649`, :cve:`2022-36879`, :cve:`2022-36946`, :cve:`2022-3707`, :cve:`2022-3910`, :cve:`2022-39189`, :cve:`2022-39190`, :cve:`2022-3977`, :cve:`2022-39842`, :cve:`2022-40307`, :cve:`2022-40476`, :cve:`2022-40768`, :cve:`2022-4095`, :cve:`2022-40982`, :cve:`2022-41218`, :cve:`2022-41222`, :cve:`2022-4127`, :cve:`2022-4128`, :cve:`2022-4129`, :cve:`2022-4139`, :cve:`2022-41674`, :cve:`2022-41849`, :cve:`2022-41850`, :cve:`2022-41858`, :cve:`2022-42328`, :cve:`2022-42329`, :cve:`2022-42432`, :cve:`2022-4269`, :cve:`2022-42703`, :cve:`2022-42719`, :cve:`2022-42720`, :cve:`2022-42721`, :cve:`2022-42722`, :cve:`2022-42895`, :cve:`2022-42896`, :cve:`2022-43750`, :cve:`2022-4378`, :cve:`2022-4379`, :cve:`2022-4382`, :cve:`2022-43945`, :cve:`2022-45869`, :cve:`2022-45886`, :cve:`2022-45887`, :cve:`2022-45888`, :cve:`2022-45919`, :cve:`2022-45934`, :cve:`2022-4662`, :cve:`2022-4744`, :cve:`2022-47518`, :cve:`2022-47519`, :cve:`2022-47520`, :cve:`2022-47521`, :cve:`2022-47929`, :cve:`2022-47938`, :cve:`2022-47939`, :cve:`2022-47940`, :cve:`2022-47941`, :cve:`2022-47942`, :cve:`2022-47943`, :cve:`2022-4842`, :cve:`2022-48423`, :cve:`2022-48424`, :cve:`2022-48425`, :cve:`2022-48502`, :cve:`2023-0030`, :cve:`2023-0045`, :cve:`2023-0047`, :cve:`2023-0122`, :cve:`2023-0160`, :cve:`2023-0179`, :cve:`2023-0210`, :cve:`2023-0240`, :cve:`2023-0266`, :cve:`2023-0394`, :cve:`2023-0458`, :cve:`2023-0459`, :cve:`2023-0461`, :cve:`2023-0468`, :cve:`2023-0469`, :cve:`2023-0590`, :cve:`2023-0615`, :cve_mitre:`2023-1032`, :cve:`2023-1073`, :cve:`2023-1074`, :cve:`2023-1076`, :cve:`2023-1077`, :cve:`2023-1078`, :cve:`2023-1079`, :cve:`2023-1095`, :cve:`2023-1118`, :cve:`2023-1192`, :cve:`2023-1194`, :cve:`2023-1195`, :cve:`2023-1206`, :cve:`2023-1249`, :cve:`2023-1252`, :cve:`2023-1281`, :cve:`2023-1380`, :cve:`2023-1382`, :cve:`2023-1390`, :cve:`2023-1513`, :cve:`2023-1582`, :cve:`2023-1583`, :cve:`2023-1611`, :cve:`2023-1637`, :cve:`2023-1652`, :cve:`2023-1670`, :cve:`2023-1829`, :cve:`2023-1838`, :cve:`2023-1855`, :cve:`2023-1859`, :cve:`2023-1989`, :cve:`2023-1990`, :cve:`2023-1998`, :cve:`2023-2002`, :cve:`2023-2006`, :cve:`2023-2008`, :cve:`2023-2019`, :cve:`2023-20569`, :cve:`2023-20588`, :cve:`2023-20593`, :cve:`2023-20938`, :cve:`2023-21102`, :cve:`2023-21106`, :cve:`2023-2124`, :cve:`2023-21255`, :cve:`2023-21264`, :cve:`2023-2156`, :cve:`2023-2162`, :cve:`2023-2163`, :cve:`2023-2166`, :cve:`2023-2177`, :cve:`2023-2194`, :cve:`2023-2235`, :cve:`2023-2236`, :cve:`2023-2248`, :cve:`2023-2269`, :cve:`2023-22996`, :cve:`2023-22997`, :cve:`2023-22998`, :cve:`2023-22999`, :cve:`2023-23001`, :cve:`2023-23002`, :cve:`2023-23003`, :cve:`2023-23004`, :cve:`2023-23005`, :cve:`2023-23006`, :cve:`2023-23454`, :cve:`2023-23455`, :cve:`2023-23559`, :cve:`2023-2483`, :cve:`2023-25012`, :cve:`2023-2513`, :cve:`2023-25775`, :cve:`2023-2598`, :cve:`2023-26544`, :cve:`2023-26545`, :cve:`2023-26605`, :cve:`2023-26606`, :cve:`2023-26607`, :cve:`2023-28327`, :cve:`2023-28328`, :cve:`2023-28410`, :cve:`2023-28464`, :cve:`2023-28466`, :cve:`2023-2860`, :cve:`2023-28772`, :cve:`2023-28866`, :cve:`2023-2898`, :cve:`2023-2985`, :cve:`2023-3006`, :cve:`2023-30456`, :cve:`2023-30772`, :cve:`2023-3090`, :cve:`2023-3106`, :cve:`2023-3111`, :cve:`2023-3117`, :cve:`2023-31248`, :cve:`2023-3141`, :cve:`2023-31436`, :cve:`2023-3159`, :cve:`2023-3161`, :cve:`2023-3212`, :cve:`2023-3220`, :cve:`2023-32233`, :cve:`2023-32247`, :cve:`2023-32248`, :cve:`2023-32250`, :cve:`2023-32252`, :cve:`2023-32254`, :cve:`2023-32257`, :cve:`2023-32258`, :cve:`2023-32269`, :cve:`2023-3268`, :cve:`2023-3269`, :cve:`2023-3312`, :cve:`2023-3317`, :cve:`2023-33203`, :cve:`2023-33250`, :cve:`2023-33288`, :cve:`2023-3338`, :cve:`2023-3355`, :cve:`2023-3357`, :cve:`2023-3358`, :cve:`2023-3359`, :cve:`2023-3390`, :cve:`2023-33951`, :cve:`2023-33952`, :cve:`2023-34255`, :cve:`2023-34256`, :cve:`2023-34319`, :cve:`2023-3439`, :cve:`2023-35001`, :cve:`2023-3567`, :cve:`2023-35788`, :cve:`2023-35823`, :cve:`2023-35824`, :cve:`2023-35826`, :cve:`2023-35828`, :cve:`2023-35829`, :cve:`2023-3609`, :cve:`2023-3610`, :cve:`2023-3611`, :cve:`2023-37453`, :cve:`2023-3772`, :cve:`2023-3773`, :cve:`2023-3776`, :cve:`2023-3777`, :cve:`2023-3812`, :cve:`2023-38409`, :cve:`2023-38426`, :cve:`2023-38427`, :cve:`2023-38428`, :cve:`2023-38429`, :cve:`2023-38430`, :cve:`2023-38431`, :cve:`2023-38432`, :cve:`2023-3863`, :cve_mitre:`2023-3865`, :cve_mitre:`2023-3866`, :cve_mitre:`2023-3867`, :cve:`2023-39189`, :cve:`2023-39192`, :cve:`2023-39193`, :cve:`2023-39194`, :cve:`2023-4004`, :cve:`2023-4015`, :cve:`2023-40283`, :cve:`2023-4128`, :cve:`2023-4132`, :cve:`2023-4147`, :cve:`2023-4155`, :cve:`2023-4194`, :cve:`2023-4206`, :cve:`2023-4207`, :cve:`2023-4208`, :cve:`2023-4273`, :cve:`2023-42752`, :cve:`2023-42753`, :cve:`2023-42755`, :cve:`2023-42756`, :cve:`2023-4385`, :cve:`2023-4387`, :cve:`2023-4389`, :cve:`2023-4394`, :cve:`2023-44466`, :cve:`2023-4459`, :cve:`2023-4569`, :cve:`2023-45862`, :cve:`2023-45871`, :cve:`2023-4611`, :cve:`2023-4623`, :cve:`2023-4732`, :cve:`2023-4921` and :cve:`2023-5345`
+- linux-yocto/5.15: Ignore :cve:`2022-45886`, :cve:`2022-45887`, :cve:`2022-45919`, :cve:`2022-48502`, :cve:`2023-0160`, :cve:`2023-1206`, :cve:`2023-20593`, :cve:`2023-21264`, :cve:`2023-2898`, :cve:`2023-31248`, :cve:`2023-33250`, :cve:`2023-34319`, :cve:`2023-35001`, :cve:`2023-3611`, :cve:`2023-37453`, :cve:`2023-3773`, :cve:`2023-3776`, :cve:`2023-3777`, :cve:`2023-38432`, :cve:`2023-3863`, :cve_mitre:`2023-3865`, :cve_mitre:`2023-3866`, :cve:`2023-4004`, :cve:`2023-4015`, :cve:`2023-4132`, :cve:`2023-4147`, :cve:`2023-4194`, :cve:`2023-4385`, :cve:`2023-4387`, :cve:`2023-4389`, :cve:`2023-4394`, :cve:`2023-4459` and :cve:`2023-4611`
+- openssl: Fix :cve:`2023-4807` and :cve:`2023-5363`
+- python3-git: Fix :cve:`2023-40590` and :cve:`2023-41040`
+- python3-urllib3: Fix :cve:`2023-43804`
+- qemu: Ignore :cve:`2023-2680`
+- ruby: Fix :cve:`2023-36617`
+- shadow: Fix :cve_mitre:`2023-4641`
+- tiff: Fix :cve:`2023-3576` and :cve:`2023-40745`
+- vim: Fix :cve:`2023-5441` and :cve:`2023-5535`
+- webkitgtk: Fix :cve:`2023-32439`
+- xdg-utils: Fix :cve:`2022-4055`
+- xserver-xorg: ignore :cve:`2022-3553` (XQuartz-specific)
+- zlib: Fix :cve:`2023-45853`
+
+
+
+Fixes in Yocto-4.0.14
+~~~~~~~~~~~~~~~~~~~~~
+
+- SECURITY.md: Add file
+- apt: add missing <cstdint> for uint16_t
+- bind: update to 9.18.19
+- bitbake: SECURITY.md: add file
+- bitbake: bitbake-getvar: Add a quiet command line argument
+- bitbake: bitbake-worker/runqueue: Avoid unnecessary bytes object copies
+- brief-yoctoprojectqs: use new CDN mirror for sstate
+- bsp-guide: bsp.rst: replace reference to wiki
+- bsp-guide: bsp: skip Intel machines no longer supported in Poky
+- build-appliance-image: Update to kirkstone head revision
+- ccache: fix build with gcc-13
+- cml1: Fix KCONFIG_CONFIG_COMMAND not conveyed fully in do_menuconfig
+- contributor-guide/style-guide: Add a note about task idempotence
+- contributor-guide/style-guide: Refer to recipes, not packages
+- contributor-guide: deprecate "Accepted" patch status
+- contributor-guide: discourage marking patches as Inappropriate
+- contributor-guide: recipe-style-guide: add more patch tagging examples
+- contributor-guide: recipe-style-guide: add section about CVE patches
+- contributor-guide: style-guide: discourage using Pending patch status
+- dev-manual: add security team processes
+- dev-manual: fix testimage usage instructions
+- dev-manual: layers: Add notes about layer.conf
+- dev-manual: new-recipe.rst: add missing parenthesis to "Patching Code" section
+- dev-manual: new-recipe.rst: replace reference to wiki
+- dev-manual: start.rst: remove obsolete reference
+- dev-manual: wic: update "wic list images" output
+- dev/ref-manual: Document :term:`INIT_MANAGER`
+- fontcache.bbclass: avoid native recipes depending on target fontconfig
+- glibc: Update to latest on stable 2.35 branch (c84018a05aec..)
+- json-c: define :term:`CVE_VERSION`
+- kernel.bbclass: Add force flag to rm calls
+- libxpm: upgrade to 3.5.17
+- linux-firmware: create separate packages
+- linux-firmware: upgrade to 20230804
+- linux-yocto/5.10: update to v5.10.197
+- linux-yocto: update CVE exclusions
+- manuals: correct "yocto-linux" by "linux-yocto"
+- manuals: update linux-yocto append examples
+- migration-guides: add release notes for 4.0.13
+- openssl: Upgrade to 3.0.12
+- overview: Add note about non-reproducibility side effects
+- package_rpm: Allow compression mode override
+- poky.conf: bump version for 4.0.14
+- profile-manual: aesthetic cleanups
+- python3-git: upgrade to 3.1.37
+- python3-jinja2: fix for the ptest result format
+- python3-urllib3: upgrade to 1.26.17
+- ref-manual: Fix :term:`PACKAGECONFIG` term and add an example
+- ref-manual: Warn about :term:`COMPATIBLE_MACHINE` skipping native recipes
+- ref-manual: releases.svg: Scarthgap is now version 5.0
+- ref-manual: variables: add :term:`RECIPE_SYSROOT` and :term:`RECIPE_SYSROOT_NATIVE`
+- ref-manual: variables: add :term:`TOOLCHAIN_OPTIONS` variable
+- ref-manual: variables: add example for :term:`SYSROOT_DIRS` variable
+- ref-manual: variables: provide no-match example for :term:`COMPATIBLE_MACHINE`
+- sdk-manual: appendix-obtain: improve and update descriptions
+- test-manual: reproducible-builds: stop mentioning LTO bug
+- uboot-extlinux-config.bbclass: fix missed override syntax migration
+- vim: Upgrade to 9.0.2048
+
+
+Known Issues in Yocto-4.0.14
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.14
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Archana Polampalli
+- Armin Kuster
+- Arne Schwerdt
+- BELHADJ SALEM Talel
+- Bruce Ashfield
+- Chaitanya Vadrevu
+- Colin McAllister
+- Deepthi Hemraj
+- Etienne Cordonnier
+- Fahad Arslan
+- Hitendra Prajapati
+- Jaeyoon Jung
+- Joshua Watt
+- Khem Raj
+- Lee Chee Yang
+- Marta Rybczynska
+- Martin Jansa
+- Meenali Gupta
+- Michael Opdenacker
+- Narpat Mali
+- Niko Mauno
+- Paul Eggleton
+- Paulo Neves
+- Peter Marko
+- Quentin Schulz
+- Richard Purdie
+- Robert P. J. Day
+- Roland Hieber
+- Ross Burton
+- Ryan Eatmon
+- Shubham Kulkarni
+- Siddharth Doshi
+- Soumya Sambu
+- Steve Sakoman
+- Tim Orling
+- Trevor Gamblin
+- Vijay Anusuri
+- Wang Mingyu
+- Yash Shinde
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.0.14
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.14 </poky/log/?h=yocto-4.0.14>`
+- Git Revision: :yocto_git:`d8d6d921fad14b82167d9f031d4fca06b5e01883 </poky/commit/?id=d8d6d921fad14b82167d9f031d4fca06b5e01883>`
+- Release Artefact: poky-d8d6d921fad14b82167d9f031d4fca06b5e01883
+- sha: 46a6301e3921ee67cfe6be7ea544d6257f0c0f02ef15c5091287e024ff02d5f5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.14/poky-d8d6d921fad14b82167d9f031d4fca06b5e01883.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.14/poky-d8d6d921fad14b82167d9f031d4fca06b5e01883.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.14 </openembedded-core/log/?h=yocto-4.0.14>`
+- Git Revision: :oe_git:`0eb8e67aa6833df0cde29833568a70e65c21d7e5 </openembedded-core/commit/?id=0eb8e67aa6833df0cde29833568a70e65c21d7e5>`
+- Release Artefact: oecore-0eb8e67aa6833df0cde29833568a70e65c21d7e5
+- sha: d510a7067b87ba935b8a7c9f9608d0e06b057009ea753ed190ddfacc7195ecc5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.14/oecore-0eb8e67aa6833df0cde29833568a70e65c21d7e5.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.14/oecore-0eb8e67aa6833df0cde29833568a70e65c21d7e5.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.14 </meta-mingw/log/?h=yocto-4.0.14>`
+- Git Revision: :yocto_git:`f6b38ce3c90e1600d41c2ebb41e152936a0357d7 </meta-mingw/commit/?id=f6b38ce3c90e1600d41c2ebb41e152936a0357d7>`
+- Release Artefact: meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7
+- sha: 7d57167c19077f4ab95623d55a24c2267a3a3fb5ed83688659b4c03586373b25
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.14/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.14/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.14 </meta-gplv2/log/?h=yocto-4.0.14>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.14/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.14/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.14 </bitbake/log/?h=yocto-4.0.14>`
+- Git Revision: :oe_git:`6c1ffa9091d0c53a100e8c8c15122d28642034bd </bitbake/commit/?id=6c1ffa9091d0c53a100e8c8c15122d28642034bd>`
+- Release Artefact: bitbake-6c1ffa9091d0c53a100e8c8c15122d28642034bd
+- sha: 1ceffc3b3359063341530c989a3606c897d862b61111538e683f101b02a360a2
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.14/bitbake-6c1ffa9091d0c53a100e8c8c15122d28642034bd.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.14/bitbake-6c1ffa9091d0c53a100e8c8c15122d28642034bd.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.14 </yocto-docs/log/?h=yocto-4.0.14>`
+- Git Revision: :yocto_git:`260b446a1a75d99399a3421cd8d6ba276f508f37 </yocto-docs/commit/?id=260b446a1a75d99399a3421cd8d6ba276f508f37>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.15.rst b/documentation/migration-guides/release-notes-4.0.15.rst
new file mode 100644
index 0000000000..b2731530e8
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.15.rst
@@ -0,0 +1,189 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.15 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.15
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- avahi: Fix :cve:`2023-1981`, :cve:`2023-38469`, :cve:`2023-38470`, :cve:`2023-38471`, :cve:`2023-38472` and :cve:`2023-38473`
+- binutils: Fix :cve:`2022-47007`, :cve:`2022-47010` and :cve:`2022-48064`
+- bluez5: Fix :cve:`2023-45866`
+- ghostscript: Ignore GhostPCL :cve:`2023-38560`
+- gnutls: Fix :cve:`2023-5981`
+- go: Ignore :cve:`2023-45283` and :cve:`2023-45284`
+- grub: Fix :cve:`2023-4692` and :cve:`2023-4693`
+- gstreamer1.0-plugins-bad: Fix :cve_mitre:`2023-44429`
+- libsndfile: Fix :cve:`2022-33065`
+- libwebp: Fix :cve:`2023-4863`
+- openssl: Fix :cve:`2023-5678`
+- python3-cryptography: Fix :cve:`2023-49083`
+- qemu: Fix :cve:`2023-1544`
+- sudo: :cve:`2023-42456` and :cve_mitre:`2023-42465`
+- tiff: Fix :cve:`2023-41175`
+- vim: Fix :cve:`2023-46246`, :cve:`2023-48231`, :cve:`2023-48232`, :cve:`2023-48233`, :cve:`2023-48234`, :cve:`2023-48235`, :cve:`2023-48236`, :cve:`2023-48237` and :cve:`2023-48706`
+- xserver-xorg: Fix :cve:`2023-5367` and :cve:`2023-5380`
+- xwayland: Fix :cve:`2023-5367`
+
+
+Fixes in Yocto-4.0.15
+~~~~~~~~~~~~~~~~~~~~~
+
+- bash: changes to SIGINT handler while waiting for a child
+- bitbake: Fix disk space monitoring on cephfs
+- bitbake: bitbake-getvar: Make --quiet work with --recipe
+- bitbake: runqueue.py: fix PSI check logic
+- bitbake: runqueue: Add pressure change logging
+- bitbake: runqueue: convert deferral messages from bb.note to bb.debug
+- bitbake: runqueue: fix PSI check calculation
+- bitbake: runqueue: show more pressure data
+- bitbake: runqueue: show number of currently running bitbake threads when pressure changes
+- bitbake: tinfoil: Do not fail when logging is disabled and full config is used
+- build-appliance-image: Update to kirkstone head revision
+- cve-check: don't warn if a patch is remote
+- cve-check: slightly more verbose warning when adding the same package twice
+- cve-check: sort the package list in the JSON report
+- cve-exclusion_5.10.inc: update for 5.10.202
+- go: Fix issue in DNS resolver
+- goarch: Move Go architecture mapping to a library
+- gstreamer1.0-plugins-base: enable glx/opengl support
+- linux-yocto/5.10: update to v5.10.202
+- manuals: update class references
+- migration-guide: add release notes for 4.0.14
+- native: Clear TUNE_FEATURES/ABIEXTENSION
+- openssh: drop sudo from ptest dependencies
+- overview-manual: concepts: Add Bitbake Tasks Map
+- poky.conf: bump version for 4.0.15
+- python3-jinja2: Fixed ptest result output as per the standard
+- ref-manual: classes: explain cml1 class name
+- ref-manual: update :term:`SDK_NAME` variable documentation
+- ref-manual: variables: add :term:`RECIPE_MAINTAINER`
+- ref-manual: variables: document OEQA_REPRODUCIBLE_* variables
+- ref-manual: variables: mention new CDN for :term:`SSTATE_MIRRORS`
+- rust-common: Set llvm-target correctly for cross SDK targets
+- rust-cross-canadian: Fix ordering of target json config generation
+- rust-cross/rust-common: Merge arm target handling code to fix cross-canadian
+- rust-cross: Simplfy the rust_gen_target calls
+- rust-llvm: Allow overriding LLVM target archs
+- sdk-manual: extensible.rst: remove instructions for using SDK functionality directly in a yocto build
+- sudo: upgrade to 1.9.15p2
+- systemtap_git: fix used uninitialized error
+- vim: Improve locale handling
+- vim: Upgrade to 9.0.2130
+- vim: use upstream generated .po files
+
+
+Known Issues in Yocto-4.0.15
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.15
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Archana Polampalli
+- BELHADJ SALEM Talel
+- Bruce Ashfield
+- Chaitanya Vadrevu
+- Chen Qi
+- Deepthi Hemraj
+- Denys Dmytriyenko
+- Hitendra Prajapati
+- Lee Chee Yang
+- Li Wang
+- Martin Jansa
+- Meenali Gupta
+- Michael Opdenacker
+- Mikko Rapeli
+- Narpat Mali
+- Niko Mauno
+- Ninad Palsule
+- Niranjan Pradhan
+- Paul Eggleton
+- Peter Kjellerstedt
+- Peter Marko
+- Richard Purdie
+- Ross Burton
+- Samantha Jalabert
+- Sanjana
+- Soumya Sambu
+- Steve Sakoman
+- Tim Orling
+- Vijay Anusuri
+- Vivek Kumbhar
+- Wenlin Kang
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.0.15
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.15 </poky/log/?h=yocto-4.0.15>`
+- Git Revision: :yocto_git:`755632c2fcab43aa05cdcfa529727064b045073c </poky/commit/?id=755632c2fcab43aa05cdcfa529727064b045073c>`
+- Release Artefact: poky-755632c2fcab43aa05cdcfa529727064b045073c
+- sha: b40b43bd270d21a420c399981f9cfe0eb999f15e051fc2c89d124f249cdc0bd5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.15/poky-755632c2fcab43aa05cdcfa529727064b045073c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.15/poky-755632c2fcab43aa05cdcfa529727064b045073c.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.15 </openembedded-core/log/?h=yocto-4.0.15>`
+- Git Revision: :oe_git:`eea685e1caafd8e8121006d3f8b5d0b8a4f2a933 </openembedded-core/commit/?id=eea685e1caafd8e8121006d3f8b5d0b8a4f2a933>`
+- Release Artefact: oecore-eea685e1caafd8e8121006d3f8b5d0b8a4f2a933
+- sha: ddc3d4a2c8a097f2aa7132ae716affacc44b119c616a1eeffb7db56caa7fc79e
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.15/oecore-eea685e1caafd8e8121006d3f8b5d0b8a4f2a933.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.15/oecore-eea685e1caafd8e8121006d3f8b5d0b8a4f2a933.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.15 </meta-mingw/log/?h=yocto-4.0.15>`
+- Git Revision: :yocto_git:`f6b38ce3c90e1600d41c2ebb41e152936a0357d7 </meta-mingw/commit/?id=f6b38ce3c90e1600d41c2ebb41e152936a0357d7>`
+- Release Artefact: meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7
+- sha: 7d57167c19077f4ab95623d55a24c2267a3a3fb5ed83688659b4c03586373b25
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.15/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.15/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.15 </meta-gplv2/log/?h=yocto-4.0.15>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.15/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.15/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.15 </bitbake/log/?h=yocto-4.0.15>`
+- Git Revision: :oe_git:`42a1c9fe698a03feb34c5bba223c6e6e0350925b </bitbake/commit/?id=42a1c9fe698a03feb34c5bba223c6e6e0350925b>`
+- Release Artefact: bitbake-42a1c9fe698a03feb34c5bba223c6e6e0350925b
+- sha: 64c684ccd661fa13e25c859dfc68d66bec79281da0f4f81b0d6a9995acb659b5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.15/bitbake-42a1c9fe698a03feb34c5bba223c6e6e0350925b.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.15/bitbake-42a1c9fe698a03feb34c5bba223c6e6e0350925b.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.15 </yocto-docs/log/?h=yocto-4.0.15>`
+- Git Revision: :yocto_git:`08fda7a5601393617b1ecfe89229459e14a90b1d </yocto-docs/commit/?id=08fda7a5601393617b1ecfe89229459e14a90b1d>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.16.rst b/documentation/migration-guides/release-notes-4.0.16.rst
new file mode 100644
index 0000000000..0eb31832ab
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.16.rst
@@ -0,0 +1,191 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.16 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.16
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- cpio: Fix :cve_mitre:`2023-7207`
+- curl: Revert "curl: Backport fix CVE-2023-32001"
+- curl: Fix :cve:`2023-46218`
+- dropbear:Fix :cve:`2023-48795`
+- ffmpeg: Fix :cve:`2022-3964` and :cve:`2022-3965`
+- ghostscript: Fix :cve:`2023-46751`
+- gnutls: Fix :cve:`2024-0553` and :cve:`2024-0567`
+- go: Fix :cve:`2023-39326`
+- openssh: Fix :cve:`2023-48795`, :cve:`2023-51384` and :cve:`2023-51385`
+- openssl: Fix :cve:`2023-6129` and :cve_mitre:`2023-6237`
+- pam: Fix :cve_mitre:`2024-22365`
+- perl: Fix :cve:`2023-47038`
+- qemu: Fix :cve:`2023-5088`
+- sqlite3: Fix :cve:`2023-7104`
+- systemd: Fix :cve:`2023-7008`
+- tiff: Fix :cve:`2023-6228`
+- xserver-xorg: Fix :cve:`2023-6377`, :cve:`2023-6478`, :cve:`2023-6816`, :cve_mitre:`2024-0229`, :cve:`2024-0408`, :cve:`2024-0409`, :cve_mitre:`2024-21885` and :cve_mitre:`2024-21886`
+- zlib: Ignore :cve:`2023-6992`
+
+
+Fixes in Yocto-4.0.16
+~~~~~~~~~~~~~~~~~~~~~
+
+- bitbake: asyncrpc: Add context manager API
+- bitbake: data: Add missing dependency handling of remove operator
+- bitbake: lib/bb: Add workaround for libgcc issues with python 3.8 and 3.9
+- bitbake: toastergui: verify that an existing layer path is given
+- build-appliance-image: Update to kirkstone head revision
+- contributor-guide: add License-Update tag
+- contributor-guide: fix command option
+- contributor-guide: use "apt" instead of "aptitude"
+- cpio: upgrade to 2.14
+- cve-update-nvd2-native: faster requests with API keys
+- cve-update-nvd2-native: increase the delay between subsequent request failures
+- cve-update-nvd2-native: make number of fetch attemtps configurable
+- cve-update-nvd2-native: remove unused variable CVE_SOCKET_TIMEOUT
+- dev-manual: Discourage the use of SRC_URI[md5sum]
+- dev-manual: layers: update link to YP Compatible form
+- dev-manual: runtime-testing: fix test module name
+- dev-manual: start.rst: update use of Download page
+- docs:what-i-wish-id-known.rst: fix URL
+- docs: document VSCode extension
+- docs:brief-yoctoprojectqs:index.rst: align variable order with default local.conf
+- docs:migration-guides: add release notes for 4.0.15
+- docs:migration-guides: release 3.5 is actually 4.0
+- elfutils: Disable stringop-overflow warning for build host
+- externalsrc: Ensure :term:`SRCREV` is processed before accessing :term:`SRC_URI`
+- linux-firmware: upgrade to 20231030
+- manuals: Add :term:`CONVERSION_CMD` definition
+- manuals: Add :term:`UBOOT_BINARY`, extend :term:`UBOOT_CONFIG`
+- perl: upgrade to 5.34.3
+- poky.conf: bump version for 4.0.16
+- pybootchartgui: fix 2 SyntaxWarnings
+- python3-ptest: skip test_storlines
+- ref-manual: Fix reference to MIRRORS/PREMIRRORS defaults
+- ref-manual: classes: remove insserv bbclass
+- ref-manual: releases.svg: update nanbield release status
+- ref-manual: resources: sync with master branch
+- ref-manual: update tested and supported distros
+- test-manual: add links to python unittest
+- test-manual: add or improve hyperlinks
+- test-manual: explicit or fix file paths
+- test-manual: resource updates
+- test-manual: text and formatting fixes
+- test-manual: use working example
+- testimage: Exclude wtmp from target-dumper commands
+- testimage: drop target_dumper, host_dumper, and monitor_dumper
+- tzdata: Upgrade to 2023d
+
+
+Known Issues in Yocto-4.0.16
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.16
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Aatir Manzur
+- Archana Polampalli
+- Dhairya Nagodra
+- Dmitry Baryshkov
+- Enguerrand de Ribaucourt
+- Hitendra Prajapati
+- Insu Park
+- Joshua Watt
+- Justin Bronder
+- Jörg Sommer
+- Khem Raj
+- Lee Chee Yang
+- mark.yang
+- Marta Rybczynska
+- Martin Jansa
+- Maxin B. John
+- Michael Opdenacker
+- Paul Barker
+- Peter Kjellerstedt
+- Peter Marko
+- Poonam Jadhav
+- Richard Purdie
+- Shubham Kulkarni
+- Simone Weiß
+- Soumya Sambu
+- Sourav Pramanik
+- Steve Sakoman
+- Trevor Gamblin
+- Vijay Anusuri
+- Vivek Kumbhar
+- Yoann Congal
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.0.16
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.16 </poky/log/?h=yocto-4.0.16>`
+- Git Revision: :yocto_git:`54af8c5e80ebf63707ef4e51cc9d374f716da603 </poky/commit/?id=54af8c5e80ebf63707ef4e51cc9d374f716da603>`
+- Release Artefact: poky-54af8c5e80ebf63707ef4e51cc9d374f716da603
+- sha: a53ec3a661cf56ca40c0fbf1500288c2c20abe94896d66a572bc5ccf5d92e9d6
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.16/poky-54af8c5e80ebf63707ef4e51cc9d374f716da603.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.16/poky-54af8c5e80ebf63707ef4e51cc9d374f716da603.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.16 </openembedded-core/log/?h=yocto-4.0.16>`
+- Git Revision: :oe_git:`a744a897f0ea7d34c31c024c13031221f9a85f24 </openembedded-core/commit/?id=a744a897f0ea7d34c31c024c13031221f9a85f24>`
+- Release Artefact: oecore-a744a897f0ea7d34c31c024c13031221f9a85f24
+- sha: 8c2bc9487597b0caa9f5a1d72b18cfcd1ddc7e6d91f0f051313563d6af95aeec
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.16/oecore-a744a897f0ea7d34c31c024c13031221f9a85f24.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.16/oecore-a744a897f0ea7d34c31c024c13031221f9a85f24.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.16 </meta-mingw/log/?h=yocto-4.0.16>`
+- Git Revision: :yocto_git:`f6b38ce3c90e1600d41c2ebb41e152936a0357d7 </meta-mingw/commit/?id=f6b38ce3c90e1600d41c2ebb41e152936a0357d7>`
+- Release Artefact: meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7
+- sha: 7d57167c19077f4ab95623d55a24c2267a3a3fb5ed83688659b4c03586373b25
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.16/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.16/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.16 </meta-gplv2/log/?h=yocto-4.0.16>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.16/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.16/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.16 </bitbake/log/?h=yocto-4.0.16>`
+- Git Revision: :oe_git:`ee090484cc25d760b8c20f18add17b5eff485b40 </bitbake/commit/?id=ee090484cc25d760b8c20f18add17b5eff485b40>`
+- Release Artefact: bitbake-ee090484cc25d760b8c20f18add17b5eff485b40
+- sha: 479e3a57ae9fbc2aa95292a7554caeef113bbfb28c226ed19547b8dde1c95314
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.16/bitbake-ee090484cc25d760b8c20f18add17b5eff485b40.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.16/bitbake-ee090484cc25d760b8c20f18add17b5eff485b40.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.16 </yocto-docs/log/?h=yocto-4.0.16>`
+- Git Revision: :yocto_git:`aba67b58711019a6ba439b2b77337f813ed799ac </yocto-docs/commit/?id=aba67b58711019a6ba439b2b77337f813ed799ac>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.17.rst b/documentation/migration-guides/release-notes-4.0.17.rst
new file mode 100644
index 0000000000..07242584b8
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.17.rst
@@ -0,0 +1,238 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.17 (Kirkstone)
+------------------------------------------
+
+Security Fixes in Yocto-4.0.17
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-4408`, :cve:`2023-5517`, :cve:`2023-5679`, :cve:`2023-50868` and :cve:`2023-50387`
+- binutils: Fix :cve:`2023-39129` and :cve:`2023-39130`
+- curl: Fix :cve:`2023-46219`
+- curl: Ignore :cve:`2023-42915`
+- gcc: Ignore :cve:`2023-4039`
+- gdb: Fix :cve:`2023-39129` and :cve:`2023-39130`
+- glibc: Ignore :cve:`2023-0687`
+- go: Fix :cve:`2023-29406`, :cve:`2023-45285`, :cve:`2023-45287`, :cve:`2023-45289`, :cve:`2023-45290`, :cve:`2024-24784` and :cve:`2024-24785`
+- less: Fix :cve:`2022-48624`
+- libgit2: Fix :cve:`2024-24575` and :cve:`2024-24577`
+- libuv: fix :cve:`2024-24806`
+- libxml2: Fix for :cve:`2024-25062`
+- linux-yocto/5.15: Fix :cve:`2022-36402`, :cve:`2022-40982`, :cve:`2022-47940`, :cve:`2023-1193`, :cve:`2023-1194`, :cve:`2023-3772`, :cve_mitre:`2023-3867`, :cve:`2023-4128`, :cve:`2023-4206`, :cve:`2023-4207`, :cve:`2023-4208`, :cve:`2023-4244`, :cve:`2023-4273`, :cve:`2023-4563`, :cve:`2023-4569`, :cve:`2023-4623`, :cve:`2023-4881`, :cve:`2023-4921`, :cve:`2023-5158`, :cve:`2023-5717`, :cve:`2023-6040`, :cve:`2023-6121`, :cve:`2023-6176`, :cve:`2023-6546`, :cve:`2023-6606`, :cve:`2023-6622`, :cve:`2023-6817`, :cve:`2023-6915`, :cve:`2023-6931`, :cve:`2023-6932`, :cve:`2023-20569`, :cve:`2023-20588`, :cve:`2023-25775`, :cve:`2023-31085`, :cve:`2023-32247`, :cve:`2023-32250`, :cve:`2023-32252`, :cve:`2023-32254`, :cve:`2023-32257`, :cve:`2023-32258`, :cve:`2023-34324`, :cve:`2023-35827`, :cve:`2023-38427`, :cve:`2023-38430`, :cve:`2023-38431`, :cve:`2023-39189`, :cve:`2023-39192`, :cve:`2023-39193`, :cve:`2023-39194`, :cve:`2023-39198`, :cve:`2023-40283`, :cve:`2023-42752`, :cve:`2023-42753`, :cve:`2023-42754`, :cve:`2023-42755`, :cve:`2023-45871`, :cve:`2023-46343`, :cve:`2023-46813`, :cve:`2023-46838`, :cve:`2023-46862`, :cve:`2023-51042`, :cve:`2023-51779`, :cve_mitre:`2023-52340`, :cve:`2023-52429`, :cve:`2023-52435`, :cve:`2023-52436`, :cve:`2023-52438`, :cve:`2023-52439`, :cve:`2023-52441`, :cve:`2023-52442`, :cve:`2023-52443`, :cve:`2023-52444`, :cve:`2023-52445`, :cve:`2023-52448`, :cve:`2023-52449`, :cve:`2023-52451`, :cve:`2023-52454`, :cve:`2023-52456`, :cve:`2023-52457`, :cve:`2023-52458`, :cve:`2023-52463`, :cve:`2023-52464`, :cve:`2024-0340`, :cve:`2024-0584`, :cve:`2024-0607`, :cve:`2024-0641`, :cve:`2024-0646`, :cve:`2024-1085`, :cve:`2024-1086`, :cve:`2024-1151`, :cve:`2024-22705`, :cve:`2024-23849`, :cve:`2024-23850`, :cve:`2024-23851`, :cve:`2024-24860`, :cve:`2024-26586`, :cve:`2024-26589`, :cve:`2024-26591`, :cve:`2024-26592`, :cve:`2024-26593`, :cve:`2024-26594`, :cve:`2024-26597` and :cve:`2024-26598`
+- linux-yocto/5.15: Ignore :cve:`2020-27418`, :cve:`2020-36766`, :cve:`2021-33630`, :cve:`2021-33631`, :cve:`2022-48619`, :cve:`2023-2430`, :cve:`2023-4610`, :cve:`2023-4732`, :cve:`2023-5090`, :cve:`2023-5178`, :cve:`2023-5197`, :cve:`2023-5345`, :cve:`2023-5633`, :cve:`2023-5972`, :cve:`2023-6111`, :cve:`2023-6200`, :cve:`2023-6531`, :cve:`2023-6679`, :cve:`2023-7192`, :cve:`2023-40791`, :cve:`2023-42756`, :cve:`2023-44466`, :cve:`2023-45862`, :cve:`2023-45863`, :cve:`2023-45898`, :cve:`2023-51043`, :cve:`2023-51780`, :cve:`2023-51781`, :cve:`2023-51782`, :cve:`2023-52433`, :cve:`2023-52440`, :cve:`2023-52446`, :cve:`2023-52450`, :cve:`2023-52453`, :cve:`2023-52455`, :cve:`2023-52459`, :cve:`2023-52460`, :cve:`2023-52461`, :cve:`2023-52462`, :cve:`2024-0193`, :cve:`2024-0443`, :cve:`2024-0562`, :cve:`2024-0582`, :cve:`2024-0639`, :cve:`2024-0775`, :cve:`2024-26581`, :cve:`2024-26582`, :cve:`2024-26590`, :cve:`2024-26596` and :cve:`2024-26599`
+- linux-yocto/5.10: Fix :cve:`2023-6040`, :cve:`2023-6121`, :cve:`2023-6606`, :cve:`2023-6817`, :cve:`2023-6915`, :cve:`2023-6931`, :cve:`2023-6932`, :cve:`2023-39198`, :cve:`2023-46838`, :cve:`2023-51779`, :cve:`2023-51780`, :cve:`2023-51781`, :cve:`2023-51782`, :cve_mitre:`2023-52340`, :cve:`2024-0584` and :cve:`2024-0646`
+- linux-yocto/5.10: Ignore :cve:`2021-33630`, :cve:`2021-33631`, :cve:`2022-1508`, :cve:`2022-36402`, :cve:`2022-48619`, :cve:`2023-2430`, :cve:`2023-4610`, :cve:`2023-5972`, :cve:`2023-6039`, :cve:`2023-6200`, :cve:`2023-6531`, :cve:`2023-6546`, :cve:`2023-6622`, :cve:`2023-6679`, :cve:`2023-7192`, :cve:`2023-46343`, :cve:`2023-51042`, :cve:`2023-51043`, :cve:`2024-0193`, :cve:`2024-0443`, :cve:`2024-0562`, :cve:`2024-0582`, :cve:`2024-0639`, :cve:`2024-0641`, :cve:`2024-0775`, :cve:`2024-1085` and :cve:`2024-22705`
+- openssl: Fix :cve:`2024-0727`
+- python3-pycryptodome: Fix :cve:`2023-52323`
+- qemu: Fix :cve:`2023-6693`, :cve:`2023-42467` and :cve:`2024-24474`
+- vim: Fix :cve:`2024-22667`
+- xwayland: Fix :cve:`2023-6377` and :cve:`2023-6478`
+
+
+Fixes in Yocto-4.0.17
+~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Upgrade to 9.18.24
+- bitbake: bitbake/codeparser.py: address ast module deprecations in py 3.12
+- bitbake: bitbake/lib/bs4/tests/test_tree.py: python 3.12 regex
+- bitbake: codeparser: replace deprecated ast.Str and 's'
+- bitbake: fetch2: Ensure that git LFS objects are available
+- bitbake: tests/fetch: Add real git lfs tests and decorator
+- bitbake: tests/fetch: git-lfs restore _find_git_lfs
+- bitbake: toaster/toastergui: Bug-fix verify given layer path only if import/add local layer
+- build-appliance-image: Update to kirkstone head revision
+- cmake: Unset CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES
+- contributor-guide: fix lore URL
+- curl: don't enable debug builds
+- cve_check: cleanup logging
+- dbus: Add missing :term:`CVE_PRODUCT`
+- dev-manual: sbom: Rephrase spdx creation
+- dev-manual: runtime-testing: gen-tapdevs need iptables installed
+- dev-manual: packages: clarify shared :term:`PR` service constraint
+- dev-manual: packages: need enough free space
+- dev-manual: start: remove idle line
+- feature-microblaze-versions.inc: python 3.12 regex
+- ghostscript: correct :term:`LICENSE` with AGPLv3
+- image-live.bbclass: LIVE_ROOTFS_TYPE support compression
+- kernel.bbclass: Set pkg-config variables for building modules
+- kernel.bbclass: introduce KERNEL_LOCALVERSION
+- kernel: fix localversion in v6.3+
+- kernel: make LOCALVERSION consistent between recipes
+- ldconfig-native: Fix to point correctly on the DT_NEEDED entries in an ELF file
+- librsvg: Fix do_package_qa error for librsvg
+- linux-firmware: upgrade to 20231211
+- linux-yocto/5.10: update to v5.10.210
+- linux-yocto/5.15: update to v5.15.150
+- manuals: add minimum RAM requirements
+- manuals: suppress excess use of "following" word
+- manuals: update disk space requirements
+- manuals: update references to buildtools
+- manuals: updates for building on Windows (WSL 2)
+- meta/lib/oeqa: python 3.12 regex
+- meta/recipes: python 3.12 regex
+- migration-guide: add release notes for 4.0.16
+- oeqa/selftest/oelib/buildhistory: git default branch
+- oeqa/selftest/recipetool: downgrade meson version to not use pyproject.toml
+- oeqa/selftest/recipetool: expect meson.bb
+- oeqa/selftest/recipetool: fix for python 3.12
+- oeqa/selftest/runtime_test: only run the virgl tests on qemux86-64
+- oeqa: replace deprecated assertEquals
+- openssl: Upgrade to 3.0.13
+- poky.conf: bump version for 4.0.17
+- populate_sdk_ext: use ConfigParser instead of SafeConfigParser
+- python3-jinja2: upgrade to 3.1.3
+- recipetool/create_buildsys_python: use importlib instead of imp
+- ref-manual: system-requirements: recommend buildtools for not supported distros
+- ref-manual: system-requirements: add info on buildtools-make-tarball
+- ref-manual: release-process: grammar fix
+- ref-manual: system-requirements: fix AlmaLinux variable name
+- ref-manual: system-requirements: modify anchor
+- ref-manual: system-requirements: remove outdated note
+- ref-manual: system-requirements: simplify supported distro requirements
+- ref-manual: system-requirements: update packages to build docs
+- scripts/runqemu: add qmp socket support
+- scripts/runqemu: direct mesa to use its own drivers, rather than ones provided by host distro
+- scripts/runqemu: fix regex escape sequences
+- scripts: python 3.12 regex
+- selftest: skip virgl gtk/sdl test on ubuntu 18.04
+- systemd: Only add myhostname to nsswitch.conf if in :term:`PACKAGECONFIG`
+- tzdata : Upgrade to 2024a
+- u-boot: Move UBOOT_INITIAL_ENV back to u-boot.inc
+- useradd-example: do not use unsupported clear text password
+- vim: upgrade to v9.0.2190
+- yocto-bsp: update to v5.15.150
+
+
+Known Issues in Yocto-4.0.17
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.17
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adrian Freihofer
+- Alassane Yattara
+- Alexander Kanavin
+- Alexander Sverdlin
+- Archana Polampalli
+- Baruch Siach
+- Bruce Ashfield
+- Chen Qi
+- Chris Laplante
+- Deepthi Hemraj
+- Dhairya Nagodra
+- Fabien Mahot
+- Fabio Estevam
+- Hitendra Prajapati
+- Hugo SIMELIERE
+- Jermain Horsman
+- Kai Kang
+- Lee Chee Yang
+- Ludovic Jozeau
+- Michael Opdenacker
+- Ming Liu
+- Munehisa Kamata
+- Narpat Mali
+- Nikhil R
+- Paul Eggleton
+- Paulo Neves
+- Peter Marko
+- Philip Lorenz
+- Poonam Jadhav
+- Priyal Doshi
+- Ross Burton
+- Simone Weiß
+- Soumya Sambu
+- Steve Sakoman
+- Tim Orling
+- Trevor Gamblin
+- Vijay Anusuri
+- Vivek Kumbhar
+- Wang Mingyu
+- Zahir Hussain
+
+
+Repositories / Downloads for Yocto-4.0.17
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.17 </poky/log/?h=yocto-4.0.17>`
+- Git Revision: :yocto_git:`6d1a878bbf24c66f7186b270f823fcdf82e35383 </poky/commit/?id=6d1a878bbf24c66f7186b270f823fcdf82e35383>`
+- Release Artefact: poky-6d1a878bbf24c66f7186b270f823fcdf82e35383
+- sha: 3bc3010340b674f7b0dd0a7997f0167b2240b794fbd4aa28c0c4217bddd15e30
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/poky-6d1a878bbf24c66f7186b270f823fcdf82e35383.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/poky-6d1a878bbf24c66f7186b270f823fcdf82e35383.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.17 </openembedded-core/log/?h=yocto-4.0.17>`
+- Git Revision: :oe_git:`2501534c9581c6c3439f525d630be11554a57d24 </openembedded-core/commit/?id=2501534c9581c6c3439f525d630be11554a57d24>`
+- Release Artefact: oecore-2501534c9581c6c3439f525d630be11554a57d24
+- sha: 52cc6cce9e920bdce078584b89136e81cc01e0c55616fab5fca6c3e04264c88e
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/oecore-2501534c9581c6c3439f525d630be11554a57d24.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/oecore-2501534c9581c6c3439f525d630be11554a57d24.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.17 </meta-mingw/log/?h=yocto-4.0.17>`
+- Git Revision: :yocto_git:`f6b38ce3c90e1600d41c2ebb41e152936a0357d7 </meta-mingw/commit/?id=f6b38ce3c90e1600d41c2ebb41e152936a0357d7>`
+- Release Artefact: meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7
+- sha: 7d57167c19077f4ab95623d55a24c2267a3a3fb5ed83688659b4c03586373b25
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/meta-mingw-f6b38ce3c90e1600d41c2ebb41e152936a0357d7.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.17 </meta-gplv2/log/?h=yocto-4.0.17>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+meta-clang
+
+- Repository Location: :yocto_git:`/meta-clang`
+- Branch: :yocto_git:`kirkstone </meta-clang/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.17 </meta-clang/log/?h=yocto-4.0.17>`
+- Git Revision: :yocto_git:`eebe4ff2e539f3ffb01c5060cc4ca8b226ea8b52 </meta-clang/commit/?id=eebe4ff2e539f3ffb01c5060cc4ca8b226ea8b52>`
+- Release Artefact: meta-clang-eebe4ff2e539f3ffb01c5060cc4ca8b226ea8b52
+- sha: 3299e96e069a22c0971e903fbc191f2427efffc83d910ac51bf0237caad01d17
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/meta-clang-eebe4ff2e539f3ffb01c5060cc4ca8b226ea8b52.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/meta-clang-eebe4ff2e539f3ffb01c5060cc4ca8b226ea8b52.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.17 </bitbake/log/?h=yocto-4.0.17>`
+- Git Revision: :oe_git:`40fd5f4eef7460ca67f32cfce8e229e67e1ff607 </bitbake/commit/?id=40fd5f4eef7460ca67f32cfce8e229e67e1ff607>`
+- Release Artefact: bitbake-40fd5f4eef7460ca67f32cfce8e229e67e1ff607
+- sha: 5d20a0e4c5d0fce44bd84778168714a261a30a4b83f67c88df3b8a7e7115e444
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.17/bitbake-40fd5f4eef7460ca67f32cfce8e229e67e1ff607.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.17/bitbake-40fd5f4eef7460ca67f32cfce8e229e67e1ff607.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.17 </yocto-docs/log/?h=yocto-4.0.17>`
+- Git Revision: :yocto_git:`08ce7db2aa3a38deb8f5aa59bafc78542986babb </yocto-docs/commit/?id=08ce7db2aa3a38deb8f5aa59bafc78542986babb>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.2.rst b/documentation/migration-guides/release-notes-4.0.2.rst
new file mode 100644
index 0000000000..2f724e33c4
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.2.rst
@@ -0,0 +1,298 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.2 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- libxslt: Mark :cve:`2022-29824` as not applying
+- tiff: Add jbig :term:`PACKAGECONFIG` and clarify IGNORE :cve:`2022-1210`
+- tiff: mark :cve:`2022-1622` and :cve:`2022-1623` as invalid
+- pcre2:fix :cve:`2022-1586` Out-of-bounds read
+- curl: fix :cve:`2022-22576`, :cve:`2022-27775`, :cve:`2022-27776`, :cve:`2022-27774`, :cve:`2022-30115`, :cve:`2022-27780`, :cve:`2022-27781`, :cve:`2022-27779` and :cve:`2022-27782`
+- qemu: fix :cve:`2021-4206` and :cve:`2021-4207`
+- freetype: fix :cve:`2022-27404`, :cve:`2022-27405` and :cve:`2022-27406`
+
+Fixes in Yocto-4.0.2
+~~~~~~~~~~~~~~~~~~~~
+
+- alsa-plugins: fix libavtp vs. avtp packageconfig
+- archiver: don't use machine variables in shared recipes
+- archiver: use bb.note instead of echo
+- baremetal-image: fix broken symlink in do_rootfs
+- base-passwd: Disable shell for default users
+- bash: submit patch upstream
+- bind: upgrade 9.18.1 -> 9.18.2
+- binutils: Bump to latest 2.38 release branch
+- bitbake.conf: Make :term:`TCLIBC` and :term:`TCMODE` lazy assigned
+- bitbake: build: Add clean_stamp API function to allow removal of task stamps
+- bitbake: data: Do not depend on vardepvalueexclude flag
+- bitbake: fetch2/osc: Small fixes for osc fetcher
+- bitbake: server/process: Fix logging issues where only the first message was displayed
+- build-appliance-image: Update to kirkstone head revision
+- buildhistory.bbclass: fix shell syntax when using dash
+- cairo: Add missing GPLv3 license checksum entry
+- classes: rootfs-postcommands: add skip option to overlayfs_qa_check
+- cronie: upgrade 1.6.0 -> 1.6.1
+- cups: upgrade 2.4.1 -> 2.4.2
+- cve-check.bbclass: Added do_populate_sdk[recrdeptask].
+- cve-check: Add helper for symlink handling
+- cve-check: Allow warnings to be disabled
+- cve-check: Fix report generation
+- cve-check: Only include installed packages for rootfs manifest
+- cve-check: add support for Ignored CVEs
+- cve-check: fix return type in check_cves
+- cve-check: move update_symlinks to a library
+- cve-check: write empty fragment files in the text mode
+- cve-extra-exclusions: Add kernel CVEs
+- cve-update-db-native: make it possible to disable database updates
+- devtool: Fix _copy_file() TypeError
+- e2fsprogs: add alternatives handling of lsattr as well
+- e2fsprogs: update upstream status
+- efivar: add musl libc compatibility
+- epiphany: upgrade 42.0 -> 42.2
+- ffmpeg: upgrade 5.0 -> 5.0.1
+- fribidi: upgrade 1.0.11 -> 1.0.12
+- gcc-cross-canadian: Add nativesdk-zstd dependency
+- gcc-source: Fix incorrect task dependencies from ${B}
+- gcc: Upgrade to 11.3 release
+- gcc: depend on zstd-native
+- git: fix override syntax in :term:`RDEPENDS`
+- glib-2.0: upgrade 2.72.1 -> 2.72.2
+- glibc: Drop make-native dependency
+- go: upgrade 1.17.8 -> 1.17.10
+- gst-devtools: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-libav: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-omx: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-plugins-bad: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-plugins-base: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-plugins-good: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-plugins-ugly: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-python: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-rtsp-server: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0-vaapi: upgrade 1.20.1 -> 1.20.2
+- gstreamer1.0: upgrade 1.20.1 -> 1.20.2
+- gtk+3: upgrade 3.24.33 -> 3.24.34
+- gtk-doc: Fix potential shebang overflow on gtkdoc-mkhtml2
+- image.bbclass: allow overriding dependency on virtual/kernel:do_deploy
+- insane.bbclass: make sure to close .patch files
+- iso-codes: upgrade 4.9.0 -> 4.10.0
+- kernel-yocto.bbclass: Reset to exiting on non-zero return code at end of task
+- libcgroup: upgrade 2.0.1 -> 2.0.2
+- liberror-perl: Update sstate/equiv versions to clean cache
+- libinput: upgrade 1.19.3 -> 1.19.4
+- libpcre2: upgrade 10.39 -> 10.40
+- librepo: upgrade 1.14.2 -> 1.14.3
+- libseccomp: Add missing files for ptests
+- libseccomp: Correct :term:`LIC_FILES_CHKSUM`
+- libxkbcommon: upgrade 1.4.0 -> 1.4.1
+- libxml2: Upgrade 2.9.13 -> 2.9.14
+- license.bbclass: Bound beginline and endline in copy_license_files()
+- license_image.bbclass: Make QA errors fail the build
+- linux-firmware: add support for building snapshots
+- linux-firmware: package new Qualcomm firmware
+- linux-firmware: replace mkdir by install
+- linux-firmware: split ath3k firmware
+- linux-firmware: upgrade to 20220610
+- linux-yocto/5.10: update to v5.10.119
+- linux-yocto/5.15: Enable MDIO bus config
+- linux-yocto/5.15: bpf: explicitly disable unpriv eBPF by default
+- linux-yocto/5.15: cfg/xen: Move x86 configs to separate file
+- linux-yocto/5.15: update to v5.15.44
+- local.conf.sample: Update sstate url to new 'all' path
+- logrotate: upgrade 3.19.0 -> 3.20.1
+- lttng-modules: Fix build failure for 5.10.119+ and 5.15.44+ kernel
+- lttng-modules: fix build against 5.18-rc7+
+- lttng-modules: fix shell syntax
+- lttng-ust: upgrade 2.13.2 -> 2.13.3
+- lzo: Add further info to a patch and mark as Inactive-Upstream
+- makedevs: Don't use COPYING.patch just to add license file into ${S}
+- manuals: switch to the sstate mirror shared between all versions
+- mesa.inc: package 00-radv-defaults.conf
+- mesa: backport a patch to support compositors without zwp_linux_dmabuf_v1 again
+- mesa: upgrade to 22.0.3
+- meson.bbclass: add cython binary to cross/native toolchain config
+- mmc-utils: upgrade to latest revision
+- mobile-broadband-provider-info: upgrade 20220315 -> 20220511
+- ncurses: update to patchlevel 20220423
+- oeqa/selftest/cve_check: add tests for Ignored and partial reports
+- oeqa/selftest/cve_check: add tests for recipe and image reports
+- oescripts: change compare logic in OEListPackageconfigTests
+- openssl: Backport fix for ptest cert expiry
+- overlayfs: add docs about skipping QA check & service dependencies
+- ovmf: Fix native build with gcc-12
+- patch.py: make sure that patches/series file exists before quilt pop
+- pciutils: avoid lspci conflict with busybox
+- perl: Add dependency on make-native to avoid race issues
+- perl: Fix build with gcc-12
+- poky.conf: bump version for 4.0.2
+- popt: fix override syntax in :term:`RDEPENDS`
+- pypi.bbclass: Set :term:`CVE_PRODUCT` to :term:`PYPI_PACKAGE`
+- python3: Ensure stale empty python module directories don't break the build
+- python3: Remove problematic paths from sysroot files
+- python3: fix reproducibility issue with python3-core
+- python3: use built-in distutils for ptest, rather than setuptools' 'fork'
+- python: Avoid shebang overflow on python-config.py
+- rootfs-postcommands.bbclass: correct comments
+- rootfs.py: close kernel_abi_ver_file
+- rootfs.py: find .ko.zst kernel modules
+- rust-common: Drop LLVM_TARGET and simplify
+- rust-common: Ensure sstate signatures have correct dependencues for do_rust_gen_targets
+- rust-common: Fix for target definitions returning 'NoneType' for arm
+- rust-common: Fix native signature dependency issues
+- rust-common: Fix sstate signatures between arm hf and non-hf
+- sanity: Don't warn about make 4.2.1 for mint
+- sanity: Switch to make 4.0 as a minimum version
+- sed: Specify shell for "nobody" user in run-ptest
+- selftest/imagefeatures/overlayfs: Always append to :term:`DISTRO_FEATURES`
+- selftest/multiconfig: Test that multiconfigs in separate layers works
+- sqlite3: upgrade to 3.38.5
+- staging.bbclass: process direct dependencies in deterministic order
+- staging: Fix rare sysroot corruption issue
+- strace: Don't run ptest as "nobody"
+- systemd: Correct 0001-pass-correct-parameters-to-getdents64.patch
+- systemd: Correct path returned in sd_path_lookup()
+- systemd: Document future actions needed for set of musl patches
+- systemd: Drop 0001-test-parse-argument-Include-signal.h.patch
+- systemd: Drop 0002-don-t-use-glibc-specific-qsort_r.patch
+- systemd: Drop 0016-Hide-__start_BUS_ERROR_MAP-and-__stop_BUS_ERROR_MAP.patch
+- systemd: Drop redundant musl patches
+- systemd: Fix build regression with latest update
+- systemd: Remove __compare_fn_t type in musl-specific patch
+- systemd: Update patch status
+- systemd: systemd-systemctl: Support instance conf files during enable
+- systemd: update ``0008-add-missing-FTW_-macros-for-musl.patch``
+- systemd: upgrade 250.4 -> 250.5
+- uboot-sign: Fix potential index error issues
+- valgrind: submit arm patches upstream
+- vim: Upgrade to 8.2.5083
+- webkitgtk: upgrade to 2.36.3
+- wic/plugins/rootfs: Fix permissions when splitting rootfs folders across partitions
+- xwayland: upgrade 22.1.0 -> 22.1.1
+- xxhash: fix build with gcc 12
+- zip/unzip: mark all submittable patches as Inactive-Upstream
+
+Known Issues in Yocto-4.0.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- There were build failures at the autobuilder due to a known scp issue on Fedora-36 hosts.
+
+Contributors to Yocto-4.0.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alex Kiernan
+- Alexander Kanavin
+- Aryaman Gupta
+- Bruce Ashfield
+- Claudius Heine
+- Davide Gardenal
+- Dmitry Baryshkov
+- Ernst Sjöstrand
+- Felix Moessbauer
+- Gunjan Gupta
+- He Zhe
+- Hitendra Prajapati
+- Jack Mitchell
+- Jeremy Puhlman
+- Jiaqing Zhao
+- Joerg Vehlow
+- Jose Quaresma
+- Kai Kang
+- Khem Raj
+- Konrad Weihmann
+- Marcel Ziswiler
+- Markus Volk
+- Marta Rybczynska
+- Martin Jansa
+- Michael Opdenacker
+- Mingli Yu
+- Naveen Saini
+- Nick Potenski
+- Paulo Neves
+- Pavel Zhukov
+- Peter Kjellerstedt
+- Rasmus Villemoes
+- Richard Purdie
+- Robert Joslyn
+- Ross Burton
+- Samuli Piippo
+- Sean Anderson
+- Stefan Wiehler
+- Steve Sakoman
+- Sundeep Kokkonda
+- Tomasz Dziendzielski
+- Xiaobing Luo
+- Yi Zhao
+- leimaohui
+- Wang Mingyu
+
+Repositories / Downloads for Yocto-4.0.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.2 </poky/log/?h=yocto-4.0.2>`
+- Git Revision: :yocto_git:`a5ea426b1da472fc8549459fff3c1b8c6e02f4b5 </poky/commit/?id=a5ea426b1da472fc8549459fff3c1b8c6e02f4b5>`
+- Release Artefact: poky-a5ea426b1da472fc8549459fff3c1b8c6e02f4b5
+- sha: 474ddfacfed6661be054c161597a1a5273188dfe021b31d6156955d93c6b7359
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.2/poky-a5ea426b1da472fc8549459fff3c1b8c6e02f4b5.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.2/poky-a5ea426b1da472fc8549459fff3c1b8c6e02f4b5.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.2 </openembedded-core/log/?h=yocto-4.0.2>`
+- Git Revision: :oe_git:`eea52e0c3d24c79464f4afdbc3c397e1cb982231 </openembedded-core/commit/?id=eea52e0c3d24c79464f4afdbc3c397e1cb982231>`
+- Release Artefact: oecore-eea52e0c3d24c79464f4afdbc3c397e1cb982231
+- sha: 252d5c2c2db7e14e7365fcc69d32075720b37d629894bae36305eba047a39907
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.2/oecore-eea52e0c3d24c79464f4afdbc3c397e1cb982231.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.2/oecore-eea52e0c3d24c79464f4afdbc3c397e1cb982231.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.2 </meta-mingw/log/?h=yocto-4.0.2>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.2/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.2/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.2 </meta-gplv2/log/?h=yocto-4.0.2>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.2/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.2/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.2 </bitbake/log/?h=yocto-4.0.2>`
+- Git Revision: :oe_git:`b8fd6f5d9959d27176ea016c249cf6d35ac8ba03 </bitbake/commit/?id=b8fd6f5d9959d27176ea016c249cf6d35ac8ba03>`
+- Release Artefact: bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03
+- sha: 373818b1dee2c502264edf654d6d8f857b558865437f080e02d5ba6bb9e72cc3
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.2/bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.2/bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.2 </yocto-docs/log/?h=yocto-4.0.2>`
+- Git Revision: :yocto_git:`662294dccd028828d5c7e9fd8f5c8e14df53df4b </yocto-docs/commit/?id=662294dccd028828d5c7e9fd8f5c8e14df53df4b>`
diff --git a/documentation/migration-guides/release-notes-4.0.3.rst b/documentation/migration-guides/release-notes-4.0.3.rst
new file mode 100644
index 0000000000..46fe858cb7
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.3.rst
@@ -0,0 +1,316 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.3 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: fix :cve:`2019-1010204`
+- busybox: fix :cve:`2022-30065`
+- cups: ignore :cve:`2022-26691`
+- curl: Fix :cve:`2022-32205`, :cve:`2022-32206`, :cve:`2022-32207` and :cve:`2022-32208`
+- dpkg: fix :cve:`2022-1664`
+- ghostscript: fix :cve:`2022-2085`
+- harfbuzz: fix :cve:`2022-33068`
+- libtirpc: fix :cve:`2021-46828`
+- lua: fix :cve:`2022-33099`
+- nasm: ignore :cve:`2020-18974`
+- qemu: fix :cve:`2022-35414`
+- qemu: ignore :cve:`2021-20255` and :cve:`2019-12067`
+- tiff: fix :cve:`2022-1354`, :cve:`2022-1355`, :cve:`2022-2056`, :cve:`2022-2057` and :cve:`2022-2058`
+- u-boot: fix :cve:`2022-34835`
+- unzip: fix :cve:`2022-0529` and :cve:`2022-0530`
+
+
+Fixes in Yocto-4.0.3
+~~~~~~~~~~~~~~~~~~~~
+
+- alsa-state: correct license
+- at: take tarballs from debian
+- base.bbclass: Correct the test for obsolete license exceptions
+- base/reproducible: Change Source Date Epoch generation methods
+- bin_package: install into base_prefix
+- bind: Remove legacy python3 :term:`PACKAGECONFIG` code
+- bind: upgrade to 9.18.4
+- binutils: stable 2.38 branch updates
+- build-appliance-image: Update to kirkstone head revision
+- cargo_common.bbclass: enable bitbake vendoring for externalsrc
+- coreutils: Tweak packaging variable names for coreutils-dev
+- curl: backport openssl fix CN check error code
+- cve-check: hook cleanup to the BuildCompleted event, not CookerExit
+- cve-extra-exclusions: Clean up and ignore three CVEs (2xqemu and nasm)
+- devtool: finish: handle patching when :term:`S` points to subdir of a git repo
+- devtool: ignore pn- overrides when determining :term:`SRC_URI` overrides
+- docs: BB_HASHSERVE_UPSTREAM: update to new host
+- dropbear: break dependency on base package for -dev package
+- efivar: fix import functionality
+- encodings: update to 1.0.6
+- epiphany: upgrade to 42.3
+- externalsrc.bbclass: support crate fetcher on externalsrc
+- font-util: update 1.3.2 -> 1.3.3
+- gcc-runtime: Fix build when using gold
+- gcc-runtime: Fix missing :term:`MLPREFIX` in debug mappings
+- gcc-runtime: Pass -nostartfiles when building dummy libstdc++.so
+- gcc: Backport a fix for gcc bug 105039
+- git: upgrade to v2.35.4
+- glib-2.0: upgrade to 2.72.3
+- glib-networking: upgrade to 2.72.1
+- glibc : stable 2.35 branch updates
+- glibc-tests: Avoid reproducibility issues
+- glibc-tests: not clear :term:`BBCLASSEXTEND`
+- glibc: revert one upstream change to work around broken :term:`DEBUG_BUILD` build
+- glibc: stable 2.35 branch updates
+- gnupg: upgrade to 2.3.7
+- go: upgrade to v1.17.12
+- gobject-introspection-data: Disable cache for g-ir-scanner
+- gperf: Add a patch to work around reproducibility issues
+- gperf: Switch to upstream patch
+- gst-devtools: upgrade to 1.20.3
+- gstreamer1.0-libav: upgrade to 1.20.3
+- gstreamer1.0-omx: upgrade to 1.20.3
+- gstreamer1.0-plugins-bad: upgrade to 1.20.3
+- gstreamer1.0-plugins-base: upgrade to 1.20.3
+- gstreamer1.0-plugins-good: upgrade to 1.20.3
+- gstreamer1.0-plugins-ugly: upgrade to 1.20.3
+- gstreamer1.0-python: upgrade to 1.20.3
+- gstreamer1.0-rtsp-server: upgrade to 1.20.3
+- gstreamer1.0-vaapi: upgrade to 1.20.3
+- gstreamer1.0: upgrade to 1.20.3
+- gtk-doc: Remove hardcoded buildpath
+- harfbuzz: Fix compilation with clang
+- initramfs-framework: move storage mounts to actual rootfs
+- initscripts: run umountnfs as a KILL script
+- insane.bbclass: host-user-contaminated: Correct per package home path
+- insane: Fix buildpaths test to work with special devices
+- kernel-arch: Fix buildpaths leaking into external module compiles
+- kernel-devsrc: fix reproducibility and buildpaths QA warning
+- kernel-devsrc: ppc32: fix reproducibility
+- kernel-uboot.bbclass: Use vmlinux.initramfs when :term:`INITRAMFS_IMAGE_BUNDLE` set
+- kernel.bbclass: pass :term:`LD` also in savedefconfig
+- libffi: fix native build being not portable
+- libgcc: Fix standalone target builds with usrmerge distro feature
+- libmodule-build-perl: Use env utility to find perl interpreter
+- libsoup: upgrade to 3.0.7
+- libuv: upgrade to 1.44.2
+- linux-firmware: upgrade to 20220708
+- linux-firwmare: restore WHENCE_CHKSUM variable
+- linux-yocto-rt/5.15: update to -rt48 (and fix -stable merge)
+- linux-yocto/5.10: fix build_OID_registry/conmakehash buildpaths warning
+- linux-yocto/5.10: fix buildpaths issue with gen-mach-types
+- linux-yocto/5.10: fix buildpaths issue with pnmtologo
+- linux-yocto/5.10: update to v5.10.135
+- linux-yocto/5.15: drop obselete GPIO sysfs ABI
+- linux-yocto/5.15: fix build_OID_registry buildpaths warning
+- linux-yocto/5.15: fix buildpaths issue with gen-mach-types
+- linux-yocto/5.15: fix buildpaths issue with pnmtologo
+- linux-yocto/5.15: fix qemuppc buildpaths warning
+- linux-yocto/5.15: fix reproducibility issues
+- linux-yocto/5.15: update to v5.15.59
+- log4cplus: upgrade to 2.0.8
+- lttng-modules: Fix build failure for kernel v5.15.58
+- lttng-modules: upgrade to 2.13.4
+- lua: Fix multilib buildpath reproducibility issues
+- mkfontscale: upgrade to 1.2.2
+- oe-selftest-image: Ensure the image has sftp as well as dropbear
+- oe-selftest: devtool: test modify git recipe building from a subdir
+- oeqa/runtime/scp: Disable scp test for dropbear
+- oeqa/runtime: add test that the kernel has CONFIG_PREEMPT_RT enabled
+- oeqa/sdk: drop the nativesdk-python 2.x test
+- openssh: Add openssh-sftp-server to openssh :term:`RDEPENDS`
+- openssh: break dependency on base package for -dev package
+- openssl: update to 3.0.5
+- package.bbclass: Avoid stripping signed kernel modules in splitdebuginfo
+- package.bbclass: Fix base directory for debugsource files when using externalsrc
+- package.bbclass: Fix kernel source handling when not using externalsrc
+- package_manager/ipk: do not pipe stderr to stdout
+- packagegroup-core-ssh-dropbear: Add openssh-sftp-server recommendation
+- patch: handle if :term:`S` points to a subdirectory of a git repo
+- perf: fix reproducibility in 5.19+
+- perf: fix reproduciblity in older releases of Linux
+- perf: sort-pmuevents: really keep array terminators
+- perl: don't install Makefile.old into perl-ptest
+- poky.conf: bump version for 4.0.3
+- pulseaudio: add m4-native to :term:`DEPENDS`
+- python3: Backport patch to fix an issue in subinterpreters
+- qemu: Add :term:`PACKAGECONFIG` for brlapi
+- qemu: Avoid accidental librdmacm linkage
+- qemu: Avoid accidental libvdeplug linkage
+- qemu: Fix slirp determinism issue
+- qemu: add :term:`PACKAGECONFIG` for capstone
+- recipetool/devtool: Fix python egg whitespace issues in :term:`PACKAGECONFIG`
+- ref-manual: variables: remove sphinx directive from literal block
+- rootfs-postcommands.bbclass: move host-user-contaminated.txt to ${S}
+- ruby: add :term:`PACKAGECONFIG` for capstone
+- rust: fix issue building cross-canadian tools for aarch64 on x86_64
+- sanity.bbclass: Add ftps to accepted URI protocols for mirrors sanity
+- selftest/runtime_test/virgl: Disable for all almalinux
+- sstatesig: Include all dependencies in SPDX task signatures
+- strace: set :term:`COMPATIBLE_HOST` for riscv32
+- systemd: Added base_bindir into pkg_postinst:udev-hwdb.
+- udev-extraconf/initrdscripts/parted: Rename mount.blacklist -> mount.ignorelist
+- udev-extraconf/mount.sh: add LABELs to mountpoints
+- udev-extraconf/mount.sh: ignore lvm in automount
+- udev-extraconf/mount.sh: only mount devices on hotplug
+- udev-extraconf/mount.sh: save mount name in our tmp filecache
+- udev-extraconf: fix some systemd automount issues
+- udev-extraconf: force systemd-udevd to use shared MountFlags
+- udev-extraconf: let automount base directory configurable
+- udev-extraconf:mount.sh: fix a umount issue
+- udev-extraconf:mount.sh: fix path mismatching issues
+- vala: Fix on target wrapper buildpaths issue
+- vala: upgrade to 0.56.2
+- vim: upgrade to 9.0.0063
+- waffle: correctly request wayland-scanner executable
+- webkitgtk: upgrade to 2.36.4
+- weston: upgrade to 10.0.1
+- wic/plugins/rootfs: Fix NameError for 'orig_path'
+- wic: fix WicError message
+- wireless-regdb: upgrade to 2022.06.06
+- xdpyinfo: upgrade to 1.3.3
+- xev: upgrade to 1.2.5
+- xf86-input-synaptics: upgrade to 1.9.2
+- xmodmap: upgrade to 1.0.11
+- xorg-app: Tweak handling of compression changes in :term:`SRC_URI`
+- xserver-xorg: upgrade to 21.1.4
+- xwayland: upgrade to 22.1.3
+- yocto-bsps/5.10: fix buildpaths issue with gen-mach-types
+- yocto-bsps/5.10: fix buildpaths issue with pnmtologo
+- yocto-bsps/5.15: fix buildpaths issue with gen-mach-types
+- yocto-bsps/5.15: fix buildpaths issue with pnmtologo
+- yocto-bsps: buildpaths fixes
+- yocto-bsps: update to v5.10.130
+- yocto-bsps: buildpaths fixes
+- yocto-bsps: update to v5.15.54
+
+
+Known Issues in Yocto-4.0.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Ahmed Hossam
+- Alejandro Hernandez Samaniego
+- Alex Kiernan
+- Alexander Kanavin
+- Bruce Ashfield
+- Chanho Park
+- Christoph Lauer
+- David Bagonyi
+- Dmitry Baryshkov
+- He Zhe
+- Hitendra Prajapati
+- Jose Quaresma
+- Joshua Watt
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Lucas Stach
+- Markus Volk
+- Martin Jansa
+- Maxime Roussin-Bélanger
+- Michael Opdenacker
+- Mihai Lindner
+- Ming Liu
+- Mingli Yu
+- Muhammad Hamza
+- Naveen
+- Pascal Bach
+- Paul Eggleton
+- Pavel Zhukov
+- Peter Bergin
+- Peter Kjellerstedt
+- Peter Marko
+- Pgowda
+- Raju Kumar Pothuraju
+- Richard Purdie
+- Robert Joslyn
+- Ross Burton
+- Sakib Sajal
+- Shruthi Ravichandran
+- Steve Sakoman
+- Sundeep Kokkonda
+- Thomas Roos
+- Tom Hochstein
+- Wentao Zhang
+- Yi Zhao
+- Yue Tao
+- gr embeter
+- leimaohui
+- Wang Mingyu
+
+
+Repositories / Downloads for Yocto-4.0.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.3 </poky/log/?h=yocto-4.0.3>`
+- Git Revision: :yocto_git:`387ab5f18b17c3af3e9e30dc58584641a70f359f </poky/commit/?id=387ab5f18b17c3af3e9e30dc58584641a70f359f>`
+- Release Artefact: poky-387ab5f18b17c3af3e9e30dc58584641a70f359f
+- sha: fe674186bdb0684313746caa9472134fc19e6f1443c274fe02c06cb1e675b404
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.3/poky-387ab5f18b17c3af3e9e30dc58584641a70f359f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.3/poky-387ab5f18b17c3af3e9e30dc58584641a70f359f.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.3 </openembedded-core/log/?h=yocto-4.0.3>`
+- Git Revision: :oe_git:`2cafa6ed5f0aa9df5a120b6353755d56c7c7800d </openembedded-core/commit/?id=2cafa6ed5f0aa9df5a120b6353755d56c7c7800d>`
+- Release Artefact: oecore-2cafa6ed5f0aa9df5a120b6353755d56c7c7800d
+- sha: 5181d3e8118c6112936637f01a07308b715e0e3d12c7eba338556747dfcabe92
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.3/oecore-2cafa6ed5f0aa9df5a120b6353755d56c7c7800d.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.3/oecore-2cafa6ed5f0aa9df5a120b6353755d56c7c7800d.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.3 </meta-mingw/log/?h=yocto-4.0.3>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.3/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.3/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.3 </meta-gplv2/log/?h=yocto-4.0.3>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.3/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.3/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.3 </bitbake/log/?h=yocto-4.0.3>`
+- Git Revision: :oe_git:`b8fd6f5d9959d27176ea016c249cf6d35ac8ba03 </bitbake/commit/?id=b8fd6f5d9959d27176ea016c249cf6d35ac8ba03>`
+- Release Artefact: bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03
+- sha: 373818b1dee2c502264edf654d6d8f857b558865437f080e02d5ba6bb9e72cc3
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.3/bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.3/bitbake-b8fd6f5d9959d27176ea016c249cf6d35ac8ba03.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.3 </yocto-docs/log/?h=yocto-4.0.3>`
+- Git Revision: :yocto_git:`d9b3dcf65ef25c06f552482aba460dd16862bf96 </yocto-docs/commit/?id=d9b3dcf65ef25c06f552482aba460dd16862bf96>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.4.rst b/documentation/migration-guides/release-notes-4.0.4.rst
new file mode 100644
index 0000000000..1d6e525bbc
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.4.rst
@@ -0,0 +1,301 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.4 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils : fix :cve:`2022-38533`
+- curl: fix :cve:`2022-35252`
+- sqlite: fix :cve:`2022-35737`
+- grub2: fix :cve:`2021-3695`, :cve:`2021-3696`, :cve:`2021-3697`, :cve:`2022-28733`, :cve:`2022-28734` and :cve:`2022-28735`
+- u-boot: fix :cve:`2022-30552` and :cve:`2022-33967`
+- libxml2: Ignore :cve:`2016-3709`
+- libtiff: fix :cve:`2022-34526`
+- zlib: fix :cve:`2022-37434`
+- gnutls: fix :cve:`2022-2509`
+- u-boot: fix :cve:`2022-33103`
+- qemu: fix :cve:`2021-3507`, :cve:`2021-3929`, :cve:`2021-4158`, :cve:`2022-0216` and :cve:`2022-0358`
+
+
+Fixes in Yocto-4.0.4
+~~~~~~~~~~~~~~~~~~~~
+
+- apr: Cache configure tests which use AC_TRY_RUN
+- apr: Use correct strerror_r implementation based on libc type
+- apt: fix nativesdk-apt build failure during the second time build
+- archiver.bbclass: remove unsed do_deploy_archives[dirs]
+- archiver.bbclass: some recipes that uses the kernelsrc bbclass uses the shared source
+- autoconf: Fix strict prototype errors in generated tests
+- autoconf: Update K & R stype functions
+- bind: upgrade to 9.18.5
+- bitbake.conf: set :term:`BB_DEFAULT_UMASK` using ??=
+- bitbake: ConfHandler/BBHandler: Improve comment error messages and add tests
+- bitbake: ConfHandler: Remove lingering close
+- bitbake: bb/utils: movefile: use the logger for printing
+- bitbake: bb/utils: remove: check the path again the expand python glob
+- bitbake: bitbake-user-manual: Correct description of the ??= operator
+- bitbake: bitbake-user-manual: npm fetcher: improve description of :term:`SRC_URI` format
+- bitbake: bitbake: bitbake-user-manual: hashserv can be accessed on a dedicated domain
+- bitbake: bitbake: runqueue: add cpu/io pressure regulation
+- bitbake: bitbake: runqueue: add memory pressure regulation
+- bitbake: cooker: Drop sre_constants usage
+- bitbake: doc: bitbake-user-manual: add explicit target for crates fetcher
+- bitbake: doc: bitbake-user-manual: document npm and npmsw fetchers
+- bitbake: event.py: ignore exceptions from stdout and sterr operations in atexit
+- bitbake: fetch2: Ensure directory exists before creating symlink
+- bitbake: fetch2: gitsm: fix incorrect handling of git submodule relative urls
+- bitbake: runqueue: Change pressure file warning to a note
+- bitbake: runqueue: Fix unihash cache mismatch issues
+- bitbake: toaster: fix kirkstone version
+- bitbake: utils: Pass lock argument in fileslocked
+- bluez5: upgrade to 5.65
+- boost: fix install of fiber shared libraries
+- cairo: Adapt the license information based on what is being built
+- classes: cve-check: Get shared database lock
+- cmake: remove CMAKE_ASM_FLAGS variable in toolchain file
+- connman: Backports for security fixes
+- core-image.bbclass: Exclude openssh complementary packages
+- cracklib: Drop using register keyword
+- cracklib: upgrade to 2.9.8
+- create-spdx: Fix supplier field
+- create-spdx: handle links to inaccessible locations
+- create-spdx: ignore packing control files from ipk and deb
+- cve-check: Don't use f-strings
+- cve-check: close cursors as soon as possible
+- devtool/upgrade: catch bb.fetch2.decodeurl errors
+- devtool/upgrade: correctly clean up when recipe filename isn't yet known
+- devtool: error out when workspace is using old override syntax
+- ell: upgrade to 0.50
+- epiphany: upgrade to 42.4
+- externalsrc: Don't wipe out src dir when EXPORT_FUNCTIONS is used.
+- gcc-multilib-config: Fix i686 toolchain relocation issues
+- gcr: Define _GNU_SOURCE
+- gdk-pixbuf: upgrade to 2.42.9
+- glib-networking: upgrade to 2.72.2
+- go: upgrade to v1.17.13
+- insane.bbclass: Skip patches not in oe-core by full path
+- iso-codes: upgrade to 4.11.0
+- kernel-fitimage.bbclass: add padding algorithm property in config nodes
+- kernel-fitimage.bbclass: only package unique DTBs
+- kernel: Always set :term:`CC` and :term:`LD` for the kernel build
+- kernel: Use consistent make flags for menuconfig
+- lib:npm_registry: initial checkin
+- libatomic-ops: upgrade to 7.6.14
+- libcap: upgrade to 2.65
+- libjpeg-turbo: upgrade to 2.1.4
+- libpam: use /run instead of /var/run in systemd tmpfiles
+- libtasn1: upgrade to 4.19.0
+- liburcu: upgrade to 0.13.2
+- libwebp: upgrade to 1.2.4
+- libwpe: upgrade to 1.12.3
+- libxml2: Port gentest.py to Python-3
+- lighttpd: upgrade to 1.4.66
+- linux-yocto/5.10: update genericx86* machines to v5.10.135
+- linux-yocto/5.10: update to v5.10.137
+- linux-yocto/5.15: update genericx86* machines to v5.15.59
+- linux-yocto/5.15: update to v5.15.62
+- linux-yocto: Fix :term:`COMPATIBLE_MACHINE` regex match
+- linux-yocto: prepend the value with a space when append to :term:`KERNEL_EXTRA_ARGS`
+- lttng-modules: fix 5.19+ build
+- lttng-modules: fix build against mips and v5.19 kernel
+- lttng-modules: fix build for kernel 5.10.137
+- lttng-modules: replace mips compaction fix with upstream change
+- lz4: upgrade to 1.9.4
+- maintainers: update opkg maintainer
+- meta: introduce :term:`UBOOT_MKIMAGE_KERNEL_TYPE`
+- migration guides: add missing release notes
+- mobile-broadband-provider-info: upgrade to 20220725
+- nativesdk: Clear :term:`TUNE_FEATURES`
+- npm: replace 'npm pack' call by 'tar czf'
+- npm: return content of 'package.json' in 'npm_pack'
+- npm: take 'version' directly from 'package.json'
+- npm: use npm_registry to cache package
+- oeqa/gotoolchain: put writable files in the Go module cache
+- oeqa/gotoolchain: set CGO_ENABLED=1
+- oeqa/parselogs: add qemuarmv5 arm-charlcd masking
+- oeqa/qemurunner: add run_serial() comment
+- oeqa/selftest: rename git.py to intercept.py
+- oeqa: qemurunner: Report UNIX Epoch timestamp on login
+- package_rpm: Do not replace square brackets in %files
+- packagegroup-self-hosted: update for strace
+- parselogs: Ignore xf86OpenConsole error
+- perf: Fix reproducibility issues with 5.19 onwards
+- pinentry: enable _XOPEN_SOURCE on musl for wchar usage in curses
+- poky.conf: add ubuntu-22.04 to tested distros
+- poky.conf: bump version for 4.0.4
+- pseudo: Update to include recent upstream minor fixes
+- python3-pip: Fix :term:`RDEPENDS` after the update
+- ref-manual: add numa to machine features
+- relocate_sdk.py: ensure interpreter size error causes relocation to fail
+- rootfs-postcommands.bbclass: avoid moving ssh host keys if etc is writable
+- rootfs.py: dont try to list installed packages for baremetal images
+- rootfspostcommands.py: Cleanup subid backup files generated by shadow-utils
+- ruby: drop capstone support
+- runqemu: Add missing space on default display option
+- runqemu: display host uptime when starting
+- sanity: add a comment to ensure CONNECTIVITY_CHECK_URIS is correct
+- scripts/oe-setup-builddir: make it known where configurations come from
+- scripts/runqemu.README: fix typos and trailing whitespaces
+- selftest/wic: Tweak test case to not depend on kernel size
+- shadow: Avoid nss warning/error with musl
+- shadow: Enable subid support
+- system-requirements.rst: Add Ubuntu 22.04 to list of supported distros
+- systemd: Add 'no-dns-fallback' :term:`PACKAGECONFIG` option
+- systemd: Fix unwritable /var/lock when no sysvinit handling
+- sysvinit-inittab/start_getty: Fix respawn too fast
+- tcp-wrappers: Fix implicit-function-declaration warnings
+- tzdata: upgrade to 2022b
+- util-linux: Remove --enable-raw from :term:`EXTRA_OECONF`
+- vala: upgrade to 0.56.3
+- vim: Upgrade to 9.0.0453
+- watchdog: Include needed system header for function decls
+- webkitgtk: upgrade to 2.36.5
+- weston: upgrade to 10.0.2
+- wic/bootimg-efi: use cross objcopy when building unified kernel image
+- wic: add target tools to PATH when executing native commands
+- wic: depend on cross-binutils
+- wireless-regdb: upgrade to 2022.08.12
+- wpebackend-fdo: upgrade to 1.12.1
+- xinetd: Pass missing -D_GNU_SOURCE
+- xz: update to 5.2.6
+
+
+Known Issues in Yocto-4.0.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alex Stewart
+- Alexander Kanavin
+- Alexandre Belloni
+- Andrei Gherzan
+- Anuj Mittal
+- Aryaman Gupta
+- Awais Belal
+- Beniamin Sandu
+- Bertrand Marquis
+- Bruce Ashfield
+- Changqing Li
+- Chee Yang Lee
+- Daiane Angolini
+- Enrico Scholz
+- Ernst Sjöstrand
+- Gennaro Iorio
+- Hitendra Prajapati
+- Jacob Kroon
+- Jon Mason
+- Jose Quaresma
+- Joshua Watt
+- Kai Kang
+- Khem Raj
+- Kristian Amlie
+- LUIS ENRIQUEZ
+- Mark Hatle
+- Martin Beeger
+- Martin Jansa
+- Mateusz Marciniec
+- Michael Opdenacker
+- Mihai Lindner
+- Mikko Rapeli
+- Ming Liu
+- Niko Mauno
+- Ola x Nilsson
+- Otavio Salvador
+- Paul Eggleton
+- Pavel Zhukov
+- Peter Bergin
+- Peter Kjellerstedt
+- Peter Marko
+- Rajesh Dangi
+- Randy MacLeod
+- Rasmus Villemoes
+- Richard Purdie
+- Robert Joslyn
+- Roland Hieber
+- Ross Burton
+- Sakib Sajal
+- Shubham Kulkarni
+- Steve Sakoman
+- Ulrich Ölmann
+- Yang Xu
+- Yongxin Liu
+- ghassaneben
+- pgowda
+- Wang Mingyu
+
+Repositories / Downloads for Yocto-4.0.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.4 </poky/log/?h=yocto-4.0.4>`
+- Git Revision: :yocto_git:`d64bef1c7d713b92a51228e5ade945835e5a94a4 </poky/commit/?id=d64bef1c7d713b92a51228e5ade945835e5a94a4>`
+- Release Artefact: poky-d64bef1c7d713b92a51228e5ade945835e5a94a4
+- sha: b5e92506b31f88445755bad2f45978b747ad1a5bea66ca897370542df5f1e7db
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.4/poky-d64bef1c7d713b92a51228e5ade945835e5a94a4.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.4/poky-d64bef1c7d713b92a51228e5ade945835e5a94a4.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.4 </openembedded-core/log/?h=yocto-4.0.4>`
+- Git Revision: :oe_git:`f7766da462905ec67bf549d46b8017be36cd5b2a </openembedded-core/commit/?id=f7766da462905ec67bf549d46b8017be36cd5b2a>`
+- Release Artefact: oecore-f7766da462905ec67bf549d46b8017be36cd5b2a
+- sha: ce0ac011474db5e5f0bb1be3fb97f890a02e46252a719dbcac5813268e48ff16
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.4/oecore-f7766da462905ec67bf549d46b8017be36cd5b2a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.4/oecore-f7766da462905ec67bf549d46b8017be36cd5b2a.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.4 </meta-mingw/log/?h=yocto-4.0.4>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.4/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.4/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.4 </meta-gplv2/log/?h=yocto-4.0.4>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.4/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.4/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.4 </bitbake/log/?h=yocto-4.0.4>`
+- Git Revision: :oe_git:`ac576d6fad6bba0cfea931883f25264ea83747ca </bitbake/commit/?id=ac576d6fad6bba0cfea931883f25264ea83747ca>`
+- Release Artefact: bitbake-ac576d6fad6bba0cfea931883f25264ea83747ca
+- sha: 526c2768874eeda61ade8c9ddb3113c90d36ef44a026d6690f02de6f3dd0ea12
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.4/bitbake-ac576d6fad6bba0cfea931883f25264ea83747ca.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.4/bitbake-ac576d6fad6bba0cfea931883f25264ea83747ca.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.4 </yocto-docs/log/?h=yocto-4.0.4>`
+- Git Revision: :yocto_git:`f632dad24c39778f948014029e74db3c871d9d21 </yocto-docs/commit/?id=f632dad24c39778f948014029e74db3c871d9d21>`
diff --git a/documentation/migration-guides/release-notes-4.0.5.rst b/documentation/migration-guides/release-notes-4.0.5.rst
new file mode 100644
index 0000000000..cdfe85b750
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.5.rst
@@ -0,0 +1,198 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.5 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- qemu: fix :cve:`2021-3750`, :cve:`2021-3611` and :cve:`2022-2962`
+- binutils : fix :cve:`2022-38126`, :cve:`2022-38127` and :cve:`2022-38128`
+- tff: fix :cve:`2022-2867`, :cve:`2022-2868` and :cve:`2022-2869`
+- inetutils: fix :cve:`2022-39028`
+- go: fix :cve:`2022-27664`
+
+Fixes in Yocto-4.0.5
+~~~~~~~~~~~~~~~~~~~~
+
+- Revert "gcc-cross-canadian: Add symlink to real-ld alongside other symlinks"
+- bind: upgrade to 9.18.7
+- binutils: stable 2.38 branch updates (dc2474e7)
+- bitbake: Fix npm to use https rather than http
+- bitbake: asyncrpc/client: Fix unix domain socket chdir race issues
+- bitbake: bitbake: Add copyright headers where missing
+- bitbake: gitsm: Error out if submodule refers to parent repo
+- bitbake: runqueue: Drop deadlock breaking force fail
+- bitbake: runqueue: Ensure deferred tasks are sorted by multiconfig
+- bitbake: runqueue: Improve deadlock warning messages
+- bitbake: siggen: Fix insufficent entropy in sigtask file names
+- bitbake: tests/fetch: Allow handling of a file:// url within a submodule
+- build-appliance-image: Update to kirkstone head revision (4a88ada)
+- busybox: add devmem 128-bit support
+- classes: files: Extend overlayfs-etc class
+- coreutils: add openssl :term:`PACKAGECONFIG`
+- create-pull-request: don't switch the git remote protocol to git://
+- dev-manual: fix reference to BitBake user manual
+- expat: upgrade 2.4.8 -> 2.4.9
+- files: overlayfs-etc: refactor preinit template
+- gcc-cross-canadian: add default plugin linker
+- gcc: add arm-v9 support
+- git: upgrade 2.35.4 -> 2.35.5
+- glibc-locale: explicitly remove empty dirs in ${libdir}
+- glibc-tests: use += instead of :append
+- glibc: stable 2.35 branch updates.(8d125a1f)
+- go-native: switch from SRC_URI:append to :term:`SRC_URI` +=
+- image_types_wic.bbclass: fix cross binutils dependency
+- kern-tools: allow 'y' or 'm' to avoid config audit warnings
+- kern-tools: fix queue processing in relative :term:`TOPDIR` configurations
+- kernel-yocto: allow patch author date to be commit date
+- libpng: upgrade to 1.6.38
+- linux-firmware: package new Qualcomm firmware
+- linux-firmware: upgrade 20220708 -> 20220913
+- linux-libc-headers: switch from SRC_URI:append to :term:`SRC_URI` +=
+- linux-yocto-dev: add qemuarm64
+- linux-yocto/5.10: update to v5.10.149
+- linux-yocto/5.15: cfg: fix ACPI warnings for -tiny
+- linux-yocto/5.15: update to v5.15.68
+- local.conf.sample: correct the location of public hashserv
+- ltp: Fix pread02 case trigger the glibc overflow detection
+- lttng-modules: Fix crash on powerpc64
+- lttng-tools: Disable on qemuriscv32
+- lttng-tools: Disable on riscv32
+- migration-guides: add 4.0.4 release notes
+- oeqa/runtime/dnf: fix typo
+- own-mirrors: add crate
+- perf: Fix for recent kernel upgrades
+- poky.conf: bump version for 4.0.5
+- poky.yaml.in: update version requirements
+- python3-rfc3986-validator: switch from SRC_URI:append to :term:`SRC_URI` +=
+- python3: upgrade 3.10.4 -> 3.10.7
+- qemu: Backport patches from upstream to support float128 on qemu-ppc64
+- rpm: Remove -Wimplicit-function-declaration warnings
+- rpm: update to 4.17.1
+- rsync: update to 3.2.5
+- stress-cpu: disable float128 math on powerpc64 to avoid SIGILL
+- tune-neoversen2: support tune-neoversen2 base on armv9a
+- tzdata: update to 2022d
+- u-boot: switch from append to += in :term:`SRC_URI`
+- uninative: Upgrade to 3.7 to work with glibc 2.36
+- vim: Upgrade to 9.0.0598
+- webkitgtk: Update to 2.36.7
+
+
+Known Issues in Yocto-4.0.5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- There are recent CVEs in key components such as openssl. They are not included in this release as it was built before the issues were known and fixes were available but these are now available on the kirkstone branch.
+
+
+Contributors to Yocto-4.0.5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adrian Freihofer
+- Alexander Kanavin
+- Alexandre Belloni
+- Bhabu Bindu
+- Bruce Ashfield
+- Chen Qi
+- Daniel McGregor
+- Denys Dmytriyenko
+- Dmitry Baryshkov
+- Florin Diaconescu
+- He Zhe
+- Joshua Watt
+- Khem Raj
+- Martin Jansa
+- Michael Halstead
+- Michael Opdenacker
+- Mikko Rapeli
+- Mingli Yu
+- Neil Horman
+- Pavel Zhukov
+- Richard Purdie
+- Robert Joslyn
+- Ross Burton
+- Ruiqiang Hao
+- Samuli Piippo
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Teoh Jay Shen
+- Tim Orling
+- Virendra Thakur
+- Vyacheslav Yurkov
+- Xiangyu Chen
+- Yash Shinde
+- pgowda
+- Wang Mingyu
+
+
+Repositories / Downloads for Yocto-4.0.5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.5 </poky/log/?h=yocto-4.0.5>`
+- Git Revision: :yocto_git:`2e79b199114b25d81bfaa029ccfb17676946d20d </poky/commit/?id=2e79b199114b25d81bfaa029ccfb17676946d20d>`
+- Release Artefact: poky-2e79b199114b25d81bfaa029ccfb17676946d20d
+- sha: 7bcf3f901d4c5677fc95944ab096e9e306f4c758a658dde5befd16861ad2b8ea
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/poky-2e79b199114b25d81bfaa029ccfb17676946d20d.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/poky-2e79b199114b25d81bfaa029ccfb17676946d20d.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.5 </openembedded-core/log/?h=yocto-4.0.5>`
+- Git Revision: :oe_git:`fbdf93f43ff4b876487e1f26752598ec8abcb46e </openembedded-core/commit/?id=fbdf93f43ff4b876487e1f26752598ec8abcb46e>`
+- Release Artefact: oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e
+- sha: 2d9b5a8e9355b633bb57633cc8c2d319ba13fe4721f79204e61116b3faa6cbf1
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/oecore-fbdf93f43ff4b876487e1f26752598ec8abcb46e.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.5 </meta-mingw/log/?h=yocto-4.0.5>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.5 </meta-gplv2/log/?h=yocto-4.0.5>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.5 </bitbake/log/?h=yocto-4.0.5>`
+- Git Revision: :oe_git:`c90d57497b9bcd237c3ae810ee8edb5b0d2d575a </bitbake/commit/?id=c90d57497b9bcd237c3ae810ee8edb5b0d2d575a>`
+- Release Artefact: bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a
+- sha: 5698d548ce179036e46a24f80b213124c8825a4f443fa1d6be7ab0f70b01a9ff
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.5/bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.5/bitbake-c90d57497b9bcd237c3ae810ee8edb5b0d2d575a.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.5 </yocto-docs/log/?h=yocto-4.0.5>`
+- Git Revision: :yocto_git:`8c2f9f54e29781f4ee72e81eeaa12ceaa82dc2d3 </yocto-docs/commit/?id=8c2f9f54e29781f4ee72e81eeaa12ceaa82dc2d3>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.6.rst b/documentation/migration-guides/release-notes-4.0.6.rst
new file mode 100644
index 0000000000..76d23fcf0c
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.6.rst
@@ -0,0 +1,313 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.6 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.6
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bash: Fix :cve:`2022-3715`
+- curl: Fix :cve:`2022-32221`, :cve:`2022-42915` and :cve:`2022-42916`
+- dbus: Fix :cve:`2022-42010`, :cve:`2022-42011` and :cve:`2022-42012`
+- dropbear: Fix :cve:`2021-36369`
+- ffmpeg: Fix :cve:`2022-3964`, :cve:`2022-3965`
+- go: Fix :cve:`2022-2880`
+- grub2: Fix :cve:`2022-2601`, :cve:`2022-3775` and :cve:`2022-28736`
+- libarchive: Fix :cve:`2022-36227`
+- libpam: Fix :cve:`2022-28321`
+- libsndfile1: Fix :cve:`2021-4156`
+- lighttpd: Fix :cve:`2022-41556`
+- openssl: Fix :cve:`2022-3358`
+- pixman: Fix :cve:`2022-44638`
+- python3-mako: Fix :cve:`2022-40023`
+- python3: Fix :cve:`2022-42919`
+- qemu: Fix :cve:`2022-3165`
+- sysstat: Fix :cve:`2022-39377`
+- systemd: Fix :cve:`2022-3821`
+- tiff: Fix :cve:`2022-2953`, :cve:`2022-3599`, :cve:`2022-3597`, :cve:`2022-3626`, :cve:`2022-3627`, :cve:`2022-3570`, :cve:`2022-3598` and :cve:`2022-3970`
+- vim: Fix :cve:`2022-3352`, :cve:`2022-3705` and :cve:`2022-4141`
+- wayland: Fix :cve:`2021-3782`
+- xserver-xorg: Fix :cve:`2022-3550` and :cve:`2022-3551`
+
+
+Fixes in Yocto-4.0.6
+~~~~~~~~~~~~~~~~~~~~
+
+- archiver: avoid using machine variable as it breaks multiconfig
+- babeltrace: upgrade to 1.5.11
+- bind: upgrade to 9.18.8
+- bitbake.conf: Drop export of SOURCE_DATE_EPOCH_FALLBACK
+- bitbake: gitsm: Fix regression in gitsm submodule path parsing
+- bitbake: runqueue: Fix race issues around hash equivalence and sstate reuse
+- bluez5: Point hciattach bcm43xx firmware search path to /lib/firmware
+- bluez5: add dbus to RDEPENDS
+- build-appliance-image: Update to kirkstone head revision
+- buildtools-tarball: export certificates to python and curl
+- cargo_common.bbclass: Fix typos
+- classes: make TOOLCHAIN more permissive for kernel
+- cmake-native: Fix host tool contamination (Bug: 14951)
+- common-tasks.rst: fix oeqa runtime test path
+- create-spdx.bbclass: remove unused SPDX_INCLUDE_PACKAGED
+- create-spdx: Remove ";name=..." for downloadLocation
+- create-spdx: default share_src for shared sources
+- cve-update-db-native: add timeout to urlopen() calls
+- dbus: upgrade to 1.14.4
+- dhcpcd: fix to work with systemd
+- expat: upgrade to 2.5.0
+- externalsrc.bbclass: Remove a trailing slash from ${B}
+- externalsrc.bbclass: fix git repo detection
+- externalsrc: git submodule--helper list unsupported
+- gcc-shared-source: Fix source date epoch handling
+- gcc-source: Drop gengtype manipulation
+- gcc-source: Ensure deploy_source_date_epoch sstate hash doesn't change
+- gcc-source: Fix gengtypes race
+- gdk-pixbuf: upgrade to 2.42.10
+- get_module_deps3.py: Check attribute '__file__'
+- glib-2.0: fix rare GFileInfo test case failure
+- glibc-locale: Do not INHIBIT_DEFAULT_DEPS
+- gnomebase.bbclass: return the whole version for tarball directory if it is a number
+- gnutls: Unified package names to lower-case
+- groff: submit patches upstream
+- gstreamer1.0-libav: fix errors with ffmpeg 5.x
+- gstreamer1.0: upgrade to 1.20.4
+- ifupdown: upgrade to 0.8.39
+- insane.bbclass: Allow hashlib version that only accepts on parameter
+- iso-codes: upgrade to 4.12.0
+- kea: submit patch upstream (fix-multilib-conflict.patch)
+- kern-tools: fix relative path processing
+- kern-tools: integrate ZFS speedup patch
+- kernel-yocto: improve fatal error messages of symbol_why.py
+- kernel.bbclass: Include randstruct seed assets in STAGING_KERNEL_BUILDDIR
+- kernel.bbclass: make KERNEL_DEBUG_TIMESTAMPS work at rebuild
+- kernel: Clear SYSROOT_DIRS instead of replacing sysroot_stage_all
+- libcap: upgrade to 2.66
+- libepoxy: convert to git
+- libepoxy: update to 1.5.10
+- libffi: submit patch upstream (0001-arm-sysv-reverted-clang-VFP-mitigation.patch )
+- libffi: upgrade to 3.4.4
+- libical: upgrade to 3.0.16
+- libksba: upgrade to 1.6.2
+- libuv: fixup SRC_URI
+- libxcrypt: upgrade to 4.4.30
+- lighttpd: upgrade to 1.4.67
+- linux-firmware: add new fw file to ${PN}-qcom-adreno-a530
+- linux-firmware: don't put the firmware into the sysroot
+- linux-firmware: package amdgpu firmware
+- linux-firmware: split rtl8761 firmware
+- linux-firmware: upgrade to 20221109
+- linux-yocto/5.10: update genericx86* machines to v5.10.149
+- linux-yocto/5.15: fix CONFIG_CRYPTO_CCM mismatch warnings
+- linux-yocto/5.15: update genericx86* machines to v5.15.72
+- linux-yocto/5.15: update to v5.15.78
+- ltp: backport clock_gettime04 fix from upstream
+- lttng-modules: upgrade to 2.13.7
+- lttng-tools: Upgrade to 2.13.8
+- lttng-tools: submit determinism.patch upstream
+- lttng-ust: upgrade to 2.13.5
+- meson: make wrapper options sub-command specific
+- meta-selftest/staticids: add render group for systemd
+- mirrors.bbclass: update CPAN_MIRROR
+- mirrors.bbclass: use shallow tarball for binutils-native
+- mobile-broadband-provider-info: upgrade 20220725 -> 20221107
+- mtd-utils: upgrade 2.1.4 -> 2.1.5
+- numactl: upgrade to 2.0.16
+- oe/packagemanager/rpm: don't leak file objects
+- oeqa/selftest/lic_checksum: Cleanup changes to emptytest include
+- oeqa/selftest/minidebuginfo: Create selftest for minidebuginfo
+- oeqa/selftest/tinfoil: Add test for separate config_data with recipe_parse_file()
+- openssl: Fix SSL_CERT_FILE to match ca-certs location
+- openssl: upgrade to 3.0.7
+- openssl: export necessary env vars in SDK
+- opkg-utils: use a git clone, not a dynamic snapshot
+- opkg: Set correct info_dir and status_file in opkg.conf
+- overlayfs: Allow not used mount points
+- ovmf: correct patches status
+- package: Fix handling of minidebuginfo with newer binutils
+- perf: Depend on native setuptools3
+- poky.conf: bump version for 4.0.6
+- psplash: add psplash-default in rdepends
+- psplash: consider the situation of psplash not exist for systemd
+- python3: advance to version 3.10.8
+- qemu-helper-native: Correctly pass program name as argv[0]
+- qemu-helper-native: Re-write bridge helper as C program
+- qemu-native: Add PACKAGECONFIG option for jack
+- qemu: add io_uring PACKAGECONFIG
+- quilt: backport a patch to address grep 3.8 failures
+- resolvconf: make it work
+- rm_work: exclude the SSTATETASKS from the rm_work tasks sinature
+- runqemu: Do not perturb script environment
+- runqemu: Fix gl-es argument from causing other arguments to be ignored
+- sanity: Drop data finalize call
+- sanity: check for GNU tar specifically
+- scripts/oe-check-sstate: cleanup
+- scripts/oe-check-sstate: force build to run for all targets, specifically populate_sysroot
+- scripts: convert-overrides: Allow command-line customizations
+- socat: upgrade to 1.7.4.4
+- SPDX and CVE documentation updates
+- sstate: Allow optimisation of do_deploy_archives task dependencies
+- sstatesig: emit more helpful error message when not finding sstate manifest
+- sstatesig: skip the rm_work task signature
+- sudo: upgrade to 1.9.12p1
+- systemd: Consider PACKAGECONFIG in RRECOMMENDS
+- systemd: add group render to udev package
+- tcl: correct patch status
+- tiff: refresh with devtool
+- tiff: add CVE tag to b258ed69a485a9cfb299d9f060eb2a46c54e5903.patch
+- u-boot: Remove duplicate inherit of cml1
+- uboot-sign: Fix using wrong KEY_REQ_ARGS
+- vala: install vapigen-wrapper into /usr/bin/crosscripts and stage only that
+- valgrind: remove most hidden tests for arm64
+- vim: Upgrade to 9.0.0947
+- vulkan-samples: add lfs=0 to SRC_URI to avoid git smudge errors in do_unpack
+- wic: honor the SOURCE_DATE_EPOCH in case of updated fstab
+- wic: make ext2/3/4 images reproducible
+- wic: swap partitions are not added to fstab
+- wpebackend-fdo: upgrade to 1.14.0
+- xserver-xorg: move some recommended dependencies in required
+- xwayland: upgrade to 22.1.5
+
+
+Known Issues in Yocto-4.0.6
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.6
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alex Kiernan
+- Alexander Kanavin
+- Alexey Smirnov
+- Bartosz Golaszewski
+- Bernhard Rosenkränzer
+- Bhabu Bindu
+- Bruce Ashfield
+- Chee Yang Lee
+- Chen Qi
+- Christian Eggers
+- Claus Stovgaard
+- Diego Sueiro
+- Dmitry Baryshkov
+- Ed Tanous
+- Enrico Jörns
+- Etienne Cordonnier
+- Frank de Brabander
+- Harald Seiler
+- Hitendra Prajapati
+- Jan-Simon Moeller
+- Jeremy Puhlman
+- Joe Slater
+- John Edward Broadbent
+- Jose Quaresma
+- Joshua Watt
+- Kai Kang
+- Keiya Nobuta
+- Khem Raj
+- Konrad Weihmann
+- Leon Anavi
+- Liam Beguin
+- Marek Vasut
+- Mark Hatle
+- Martin Jansa
+- Michael Opdenacker
+- Mikko Rapeli
+- Narpat Mali
+- Nathan Rossi
+- Niko Mauno
+- Pavel Zhukov
+- Peter Kjellerstedt
+- Peter Marko
+- Polampalli, Archana
+- Qiu, Zheng
+- Ravula Adhitya Siddartha
+- Richard Purdie
+- Ross Burton
+- Sakib Sajal
+- Sean Anderson
+- Sergei Zhmylev
+- Steve Sakoman
+- Teoh Jay Shen
+- Thomas Perrot
+- Tim Orling
+- Vincent Davis Jr
+- Vivek Kumbhar
+- Vyacheslav Yurkov
+- Wang Mingyu
+- Xiangyu Chen
+- Zheng Qiu
+- Ciaran Courtney
+- Wang Mingyu
+
+
+Repositories / Downloads for Yocto-4.0.6
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.6 </poky/log/?h=yocto-4.0.6>`
+- Git Revision: :yocto_git:`c4e08719a782fd4119eaf643907b80cebf57f88f </poky/commit/?id=c4e08719a782fd4119eaf643907b80cebf57f88f>`
+- Release Artefact: poky-c4e08719a782fd4119eaf643907b80cebf57f88f
+- sha: 2eb3b323dd2ccd25f9442bfbcbde82bc081fad5afd146a8e6dde439db24a99d4
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/poky-c4e08719a782fd4119eaf643907b80cebf57f88f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/poky-c4e08719a782fd4119eaf643907b80cebf57f88f.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.6 </openembedded-core/log/?h=yocto-4.0.6>`
+- Git Revision: :oe_git:`45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2 </openembedded-core/commit/?id=45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2>`
+- Release Artefact: oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2
+- sha: de8b443365927befe67cc443b60db57563ff0726377223f836a3f3971cf405ec
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/oecore-45a8b4101b14453aa3020d3f2b8a76b4dc0ae3f2.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.6 </meta-mingw/log/?h=yocto-4.0.6>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.6 </meta-gplv2/log/?h=yocto-4.0.6>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.6 </bitbake/log/?h=yocto-4.0.6>`
+- Git Revision: :oe_git:`7e268c107bb0240d583d2c34e24a71e373382509 </bitbake/commit/?id=7e268c107bb0240d583d2c34e24a71e373382509>`
+- Release Artefact: bitbake-7e268c107bb0240d583d2c34e24a71e373382509
+- sha: c3e2899012358c95962c7a5c85cf98dc30c58eae0861c374124e96d9556bb901
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.6/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.6/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.6 </yocto-docs/log/?h=yocto-4.0.6>`
+- Git Revision: :yocto_git:`c10d65ef3bbdf4fe3abc03e3aef3d4ca8c2ad87f </yocto-docs/commit/?id=c10d65ef3bbdf4fe3abc03e3aef3d4ca8c2ad87f>`
+
+
diff --git a/documentation/migration-guides/release-notes-4.0.7.rst b/documentation/migration-guides/release-notes-4.0.7.rst
new file mode 100644
index 0000000000..c3885d9e0e
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.7.rst
@@ -0,0 +1,242 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.7 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.7
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: Fix :cve:`2022-4285`
+- curl: Fix :cve:`2022-43551` and :cve_mitre:`2022-43552`
+- ffmpeg: Fix :cve:`2022-3109` and :cve:`2022-3341`
+- go: Fix :cve:`2022-41715` and :cve:`2022-41717`
+- libX11: Fix :cve:`2022-3554` and :cve:`2022-3555`
+- libarchive: Fix :cve:`2022-36227`
+- libksba: Fix :cve:`2022-47629`
+- libpng: Fix :cve:`2019-6129`
+- libxml2: Fix :cve:`2022-40303` and :cve:`2022-40304`
+- openssl: Fix :cve:`2022-3996`
+- python3: Fix :cve:`2022-45061`
+- python3-git: Fix :cve:`2022-24439`
+- python3-setuptools: Fix :cve:`2022-40897`
+- python3-wheel: Fix :cve:`2022-40898`
+- qemu: Fix :cve:`2022-4144`
+- sqlite: Fix :cve:`2022-46908`
+- systemd: Fix :cve:`2022-45873`
+- vim: Fix :cve:`2023-0049`, :cve:`2023-0051`, :cve:`2023-0054` and :cve:`2023-0088`
+- webkitgtk: Fix :cve:`2022-32886`, :cve_mitre:`2022-32891` and :cve:`2022-32912`
+
+
+Fixes in Yocto-4.0.7
+~~~~~~~~~~~~~~~~~~~~
+
+- Revert "gstreamer1.0: disable flaky gstbin:test_watch_for_state_change test"
+- at: Change when files are copied
+- baremetal-image: Avoid overriding qemu variables from IMAGE_CLASSES
+- base.bbclass: Fix way to check ccache path
+- bc: extend to nativesdk
+- bind: upgrade to 9.18.10
+- busybox: always start do_compile with orig config files
+- busybox: rm temporary files if do_compile was interrupted
+- cairo: fix CVE patches assigned wrong CVE number
+- cairo: update patch for :cve:`2019-6461` with upstream solution
+- classes/create-spdx: Add SPDX_PRETTY option
+- classes: image: Set empty weak default IMAGE_LINGUAS
+- combo-layer: add sync-revs command
+- combo-layer: dont use bb.utils.rename
+- combo-layer: remove unused import
+- curl: Correct LICENSE from MIT-open-group to curl
+- cve-check: write the cve manifest to IMGDEPLOYDIR
+- cve-update-db-native: avoid incomplete updates
+- cve-update-db-native: show IP on failure
+- dbus: Add missing CVE product name
+- devtool/upgrade: correctly handle recipes where S is a subdir of upstream tree
+- devtool: process local files only for the main branch
+- dhcpcd: backport two patches to fix runtime error
+- docs: kernel-dev: faq: update tip on how to not include kernel in image
+- docs: migration-4.0: specify variable name change for kernel inclusion in image recipe
+- efibootmgr: update compilation with musl
+- externalsrc: fix lookup for .gitmodules
+- ffmpeg: refresh patches to apply cleanly
+- freetype:update mirror site.
+- gcc: Refactor linker patches and fix linker on arm with usrmerge
+- glibc: stable 2.35 branch updates.
+- go-crosssdk: avoid host contamination by GOCACHE
+- gstreamer1.0: Fix race conditions in gstbin tests
+- gstreamer1.0: upgrade to 1.20.5
+- gtk-icon-cache: Fix GTKIC_CMD if-else condition
+- harfbuzz: remove bindir only if it exists
+- kernel-fitimage: Adjust order of dtb/dtbo files
+- kernel-fitimage: Allow user to select dtb when multiple dtb exists
+- kernel.bbclass: remove empty module directories to prevent QA issues
+- lib/buildstats: fix parsing of trees with reduced_proc_pressure directories
+- lib/oe/reproducible: Use git log without gpg signature
+- libepoxy: remove upstreamed patch
+- libnewt: update 0.52.21 -> 0.52.23
+- libseccomp: fix typo in DESCRIPTION
+- libxcrypt-compat: upgrade 4.4.30 -> 4.4.33
+- libxml2: fix test data checksums
+- linux-firmware: upgrade 20221109 -> 20221214
+- linux-yocto/5.10: update to v5.10.152
+- linux-yocto/5.10: update to v5.10.154
+- linux-yocto/5.10: update to v5.10.160
+- linux-yocto/5.15: fix perf build with clang
+- linux-yocto/5.15: libbpf: Fix build warning on ref_ctr_off
+- linux-yocto/5.15: ltp and squashfs fixes
+- linux-yocto/5.15: powerpc: Fix reschedule bug in KUAP-unlocked user copy
+- linux-yocto/5.15: update to v5.15.84
+- lsof: add update-alternatives logic
+- lttng-modules: update 2.13.7 -> 2.13.8
+- manuals: add 4.0.5 and 4.0.6 release notes
+- manuals: document SPDX_PRETTY variable
+- mpfr: upgrade 4.1.0 -> 4.1.1
+- oeqa/concurrencytest: Add number of failures to summary output
+- oeqa/rpm.py: Increase timeout and add debug output
+- oeqa/selftest/externalsrc: add test for srctree_hash_files
+- openssh: remove RRECOMMENDS to rng-tools for sshd package
+- poky.conf: bump version for 4.0.7
+- qemuboot.bbclass: make sure runqemu boots bundled initramfs kernel image
+- rm_work.bbclass: use HOSTTOOLS 'rm' binary exclusively
+- rm_work: adjust dependency to make do_rm_work_all depend on do_rm_work
+- ruby: merge .inc into .bb
+- ruby: update 3.1.2 -> 3.1.3
+- selftest/virgl: use pkg-config from the host
+- tiff: Add packageconfig knob for webp
+- toolchain-scripts: compatibility with unbound variable protection
+- tzdata: update 2022d -> 2022g
+- valgrind: skip the boost_thread test on arm
+- xserver-xorg: upgrade 21.1.4 -> 21.1.6
+- xwayland: libxshmfence is needed when dri3 is enabled
+- xwayland: upgrade 22.1.5 -> 22.1.7
+- yocto-check-layer: Allow OE-Core to be tested
+
+
+Known Issues in Yocto-4.0.7
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.7
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alex Kiernan
+- Alex Stewart
+- Alexander Kanavin
+- Antonin Godard
+- Benoît Mauduit
+- Bhabu Bindu
+- Bruce Ashfield
+- Carlos Alberto Lopez Perez
+- Changqing Li
+- Chen Qi
+- Daniel Gomez
+- Florin Diaconescu
+- He Zhe
+- Hitendra Prajapati
+- Jagadeesh Krishnanjanappa
+- Jan Kircher
+- Jermain Horsman
+- Jose Quaresma
+- Joshua Watt
+- KARN JYE LAU
+- Kai Kang
+- Khem Raj
+- Luis
+- Marta Rybczynska
+- Martin Jansa
+- Mathieu Dubois-Briand
+- Michael Opdenacker
+- Narpat Mali
+- Ovidiu Panait
+- Pavel Zhukov
+- Peter Marko
+- Petr Kubizňák
+- Quentin Schulz
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Richard Purdie
+- Robert Andersson
+- Ross Burton
+- Sandeep Gundlupet Raju
+- Saul Wold
+- Steve Sakoman
+- Vivek Kumbhar
+- Wang Mingyu
+- Xiangyu Chen
+- Yash Shinde
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.0.7
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.7 </poky/log/?h=yocto-4.0.7>`
+- Git Revision: :yocto_git:`65dafea22018052fe7b2e17e6e4d7eb754224d38 </poky/commit/?id=65dafea22018052fe7b2e17e6e4d7eb754224d38>`
+- Release Artefact: poky-65dafea22018052fe7b2e17e6e4d7eb754224d38
+- sha: 6b1b67600b84503e2d5d29bcd6038547339f4f9413b830cd2408df825eda642d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.7/poky-65dafea22018052fe7b2e17e6e4d7eb754224d38.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.7/poky-65dafea22018052fe7b2e17e6e4d7eb754224d38.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.7 </openembedded-core/log/?h=yocto-4.0.7>`
+- Git Revision: :oe_git:`a8c82902384f7430519a31732a4bb631f21693ac </openembedded-core/commit/?id=a8c82902384f7430519a31732a4bb631f21693ac>`
+- Release Artefact: oecore-a8c82902384f7430519a31732a4bb631f21693ac
+- sha: 6f2dbc4ea1e388620ef77ac3a7bbb2b5956bb8bf9349b0c16cd7610e9996f5ea
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.7/oecore-a8c82902384f7430519a31732a4bb631f21693ac.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.7/oecore-a8c82902384f7430519a31732a4bb631f21693ac.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.7 </meta-mingw/log/?h=yocto-4.0.7>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.7/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.7/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.7 </meta-gplv2/log/?h=yocto-4.0.7>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.7/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.7/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.7 </bitbake/log/?h=yocto-4.0.7>`
+- Git Revision: :oe_git:`7e268c107bb0240d583d2c34e24a71e373382509 </bitbake/commit/?id=7e268c107bb0240d583d2c34e24a71e373382509>`
+- Release Artefact: bitbake-7e268c107bb0240d583d2c34e24a71e373382509
+- sha: c3e2899012358c95962c7a5c85cf98dc30c58eae0861c374124e96d9556bb901
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.7/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.7/bitbake-7e268c107bb0240d583d2c34e24a71e373382509.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.7 </yocto-docs/log/?h=yocto-4.0.7>`
+- Git Revision: :yocto_git:`5883e897c34f25401b358a597fb6e18d80f7f90b </yocto-docs/commit/?id=5883e897c34f25401b358a597fb6e18d80f7f90b>`
+
+
diff --git a/documentation/migration-guides/release-notes-4.0.8.rst b/documentation/migration-guides/release-notes-4.0.8.rst
new file mode 100644
index 0000000000..223b74fbaf
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.8.rst
@@ -0,0 +1,217 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.8 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.8
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- apr-util: Fix :cve:`2022-25147`
+- apr: Fix :cve:`2022-24963`, :cve:`2022-28331` and :cve:`2021-35940`
+- bind: Fix :cve:`2022-3094`, :cve:`2022-3736` and :cve:`2022-3924`
+- git: Ignore :cve:`2022-41953`
+- git: Fix :cve:`2022-23521` and :cve:`2022-41903`
+- libgit2: Fix :cve:`2023-22742`
+- ppp: Fix :cve:`2022-4603`
+- python3-certifi: Fix :cve:`2022-23491`
+- sudo: Fix :cve:`2023-22809`
+- tar: Fix :cve:`2022-48303`
+
+
+Fixes in Yocto-4.0.8
+~~~~~~~~~~~~~~~~~~~~
+
+- core-image.bbclass: Fix missing leading whitespace with ':append'
+- populate_sdk_ext.bbclass: Fix missing leading whitespace with ':append'
+- ptest-packagelists.inc: Fix missing leading whitespace with ':append'
+- apr-util: upgrade to 1.6.3
+- apr: upgrade to 1.7.2
+- apt: fix do_package_qa failure
+- bind: upgrade to 9.18.11
+- bitbake: bb/utils: include SSL certificate paths in export_proxies
+- bitbake: bitbake-diffsigs: Make PEP8 compliant
+- bitbake: bitbake-diffsigs: break on first dependent task difference
+- bitbake: fetch2/git: Clarify the meaning of namespace
+- bitbake: fetch2/git: Prevent git fetcher from fetching gitlab repository metadata
+- bitbake: fetch2/git: show SRCREV and git repo in error message about fixed SRCREV
+- bitbake: siggen: Fix inefficient string concatenation
+- bitbake: utils/ply: Update md5 to better report errors with hashlib
+- bootchart2: Fix usrmerge support
+- bsp-guide: fix broken git URLs and missing word
+- build-appliance-image: Update to kirkstone head revision
+- buildtools-tarball: set pkg-config search path
+- classes/fs-uuid: Fix command output decoding issue
+- dev-manual: common-tasks.rst: add link to FOSDEM 2023 video
+- dev-manual: fix old override syntax
+- devshell: Do not add scripts/git-intercept to PATH
+- devtool: fix devtool finish when gitmodules file is empty
+- diffutils: upgrade to 3.9
+- gdk-pixbuf: do not use tools from gdk-pixbuf-native when building tests
+- git: upgrade to 2.35.7
+- glslang: branch rename master -> main
+- httpserver: add error handler that write to the logger
+- image.bbclass: print all QA functions exceptions
+- kernel/linux-kernel-base: Fix kernel build artefact determinism issues
+- libc-locale: Fix on target locale generation
+- libgit2: upgrade to 1.4.5
+- libjpeg-turbo: upgrade to 2.1.5
+- libtirpc: Check if file exists before operating on it
+- libusb1: Link with latomic only if compiler has no atomic builtins
+- libusb1: Strip trailing whitespaces
+- linux-firmware: upgrade to 20230117
+- linux-yocto/5.15: update to v5.15.91
+- lsof: fix old override syntax
+- lttng-modules: Fix for 5.10.163 kernel version
+- lttng-tools: upgrade to 2.13.9
+- make-mod-scripts: Ensure kernel build output is deterministic
+- manuals: update patchwork instance URL
+- meta: remove True option to getVar and getVarFlag calls (again)
+- migration-guides: add release-notes for 4.0.7
+- native: Drop special variable handling
+- numactl: skip test case when target platform doesn't have 2 CPU node
+- oeqa context.py: fix --target-ip comment to include ssh port number
+- oeqa dump.py: add error counter and stop after 5 failures
+- oeqa qemurunner.py: add timeout to QMP calls
+- oeqa qemurunner.py: try to avoid reading one character at a time
+- oeqa qemurunner: read more data at a time from serial
+- oeqa ssh.py: add connection keep alive options to ssh client
+- oeqa ssh.py: move output prints to new line
+- oeqa/qemurunner: do not use Popen.poll() when terminating runqemu with a signal
+- oeqa/selftest/bbtests: Update message lookup for test_git_unpack_nonetwork_fail
+- oeqa/selftest/locales: Add selftest for locale generation/presence
+- poky.conf: Update SANITY_TESTED_DISTROS to match autobuilder
+- poky.conf: bump version for 4.0.8
+- profile-manual: update WireShark hyperlinks
+- python3-pytest: depend on python3-tomli instead of python3-toml
+- qemu: fix compile error
+- quilt: fix intermittent failure in faildiff.test
+- quilt: use upstreamed faildiff.test fix
+- recipe_sanity: fix old override syntax
+- ref-manual: document SSTATE_EXCLUDEDEPS_SYSROOT
+- scons.bbclass: Make MAXLINELENGTH overridable
+- scons: Pass MAXLINELENGTH to scons invocation
+- sdkext/cases/devtool: pass a logger to HTTPService
+- spirv-headers: set correct branch name
+- sudo: upgrade to 1.9.12p2
+- system-requirements.rst: add Fedora 36 and AlmaLinux 8.7 to list of supported distros
+- testimage: Fix error message to reflect new syntax
+- update-alternatives: fix typos
+- vulkan-samples: branch rename master -> main
+
+
+Known Issues in Yocto-4.0.8
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.8
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alexander Kanavin
+- Alexandre Belloni
+- Armin Kuster
+- Arnout Vandecappelle
+- Bruce Ashfield
+- Changqing Li
+- Chee Yang Lee
+- Etienne Cordonnier
+- Harald Seiler
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Louis Rannou
+- Marek Vasut
+- Marius Kriegerowski
+- Mark Hatle
+- Martin Jansa
+- Mauro Queiros
+- Michael Opdenacker
+- Mikko Rapeli
+- Mingli Yu
+- Narpat Mali
+- Niko Mauno
+- Pawel Zalewski
+- Peter Kjellerstedt
+- Richard Purdie
+- Rodolfo Quesada Zumbado
+- Ross Burton
+- Sakib Sajal
+- Schmidt, Adriaan
+- Steve Sakoman
+- Thomas Roos
+- Ulrich Ölmann
+- Xiangyu Chen
+
+
+Repositories / Downloads for Yocto-4.0.8
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.8 </poky/log/?h=yocto-4.0.8>`
+- Git Revision: :yocto_git:`a361fb3df9c87cf12963a9d785a9f99faa839222 </poky/commit/?id=a361fb3df9c87cf12963a9d785a9f99faa839222>`
+- Release Artefact: poky-a361fb3df9c87cf12963a9d785a9f99faa839222
+- sha: af4e8d64be27d3a408357c49b7952ce04c6d8bb0b9d7b50c48848d9355de7fc2
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.8/poky-a361fb3df9c87cf12963a9d785a9f99faa839222.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.8/poky-a361fb3df9c87cf12963a9d785a9f99faa839222.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.8 </openembedded-core/log/?h=yocto-4.0.8>`
+- Git Revision: :oe_git:`b20e2134daec33fbb8ce358d984751d887752bd5 </openembedded-core/commit/?id=b20e2134daec33fbb8ce358d984751d887752bd5>`
+- Release Artefact: oecore-b20e2134daec33fbb8ce358d984751d887752bd5
+- sha: 63cce6f1caf8428eefc1471351ab024affc8a41d8d7777f525e3aa9ea454d2cd
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.8/oecore-b20e2134daec33fbb8ce358d984751d887752bd5.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.8/oecore-b20e2134daec33fbb8ce358d984751d887752bd5.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.8 </meta-mingw/log/?h=yocto-4.0.8>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.8/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.8/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.8 </meta-gplv2/log/?h=yocto-4.0.8>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.8/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.8/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.8 </bitbake/log/?h=yocto-4.0.8>`
+- Git Revision: :oe_git:`9bbdedc0ba7ca819b898e2a29a151d6a2014ca11 </bitbake/commit/?id=9bbdedc0ba7ca819b898e2a29a151d6a2014ca11>`
+- Release Artefact: bitbake-9bbdedc0ba7ca819b898e2a29a151d6a2014ca11
+- sha: 8e724411f4df00737e81b33eb568f1f97d2a00d5364342c0a212c46abb7b005b
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.8/bitbake-9bbdedc0ba7ca819b898e2a29a151d6a2014ca11.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.8/bitbake-9bbdedc0ba7ca819b898e2a29a151d6a2014ca11.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.8 </yocto-docs/log/?h=yocto-4.0.8>`
+- Git Revision: :yocto_git:`16ecbe028f2b9cc021267817a5413054e070b563 </yocto-docs/commit/?id=16ecbe028f2b9cc021267817a5413054e070b563>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.9.rst b/documentation/migration-guides/release-notes-4.0.9.rst
new file mode 100644
index 0000000000..236477443a
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.0.9.rst
@@ -0,0 +1,249 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.0.9 (Kirkstone)
+-----------------------------------------
+
+Security Fixes in Yocto-4.0.9
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: Fix :cve:`2023-22608`
+- curl: Fix :cve:`2023-23914`, :cve:`2023-23915` and :cve:`2023-23916`
+- epiphany: Fix :cve:`2023-26081`
+- git: Ignore :cve:`2023-22743`
+- glibc: Fix :cve:`2023-0687`
+- gnutls: Fix :cve:`2023-0361`
+- go: Fix :cve:`2022-2879`, :cve:`2022-41720` and :cve:`2022-41723`
+- harfbuzz: Fix :cve:`2023-25193`
+- less: Fix :cve:`2022-46663`
+- libmicrohttpd: Fix :cve:`2023-27371`
+- libsdl2: Fix :cve:`2022-4743`
+- openssl: Fix :cve:`2022-3996`, :cve:`2023-0464`, :cve:`2023-0465` and :cve:`2023-0466`
+- pkgconf: Fix :cve:`2023-24056`
+- python3: Fix :cve:`2023-24329`
+- shadow: Ignore :cve:`2016-15024`
+- systemd: Fix :cve:`2022-4415`
+- tiff: Fix :cve:`2023-0800`, :cve:`2023-0801`, :cve:`2023-0802`, :cve:`2023-0803` and :cve:`2023-0804`
+- vim: Fix :cve:`2023-0433`, :cve:`2023-0512`, :cve:`2023-1127`, :cve:`2023-1170`, :cve:`2023-1175`, :cve:`2023-1264` and :cve:`2023-1355`
+- xserver-xorg: Fix :cve:`2023-0494`
+- xwayland: Fix :cve:`2023-0494`
+
+
+Fixes in Yocto-4.0.9
+~~~~~~~~~~~~~~~~~~~~
+
+- base-files: Drop localhost.localdomain from hosts file
+- binutils: Fix nativesdk ld.so search
+- bitbake: cookerdata: Drop dubious exception handling code
+- bitbake: cookerdata: Improve early exception handling
+- bitbake: cookerdata: Remove incorrect SystemExit usage
+- bitbake: fetch/git: Fix local clone url to make it work with repo
+- bitbake: utils: Allow to_boolean to support int values
+- bmap-tools: switch to main branch
+- buildtools-tarball: Handle spaces within user $PATH
+- busybox: Fix depmod patch
+- cracklib: update github branch to 'main'
+- cups: add/fix web interface packaging
+- cups: check PACKAGECONFIG for pam feature
+- cups: use BUILDROOT instead of DESTDIR
+- curl: fix dependencies when building with ldap/ldaps
+- cve-check: Fix false negative version issue
+- dbus: upgrade to 1.14.6
+- devtool/upgrade: do not delete the workspace/recipes directory
+- dhcpcd: Fix install conflict when enable multilib.
+- dhcpcd: fix dhcpcd start failure on qemuppc64
+- gcc-shared-source: do not use ${S}/.. in deploy_source_date_epoch
+- glibc: Add missing binutils dependency
+- image_types: fix multiubi var init
+- iso-codes: upgrade to 4.13.0
+- json-c: Add ptest for json-c
+- kernel-yocto: fix kernel-meta data detection
+- lib/buildstats: handle tasks that never finished
+- lib/resulttool: fix typo breaking resulttool log --ptest
+- libjpeg-turbo: upgrade to 2.1.5.1
+- libmicrohttpd: upgrade to 0.9.76
+- libseccomp: fix for the ptest result format
+- libssh2: Clean up ptest patch/coverage
+- linux-firmware: add yamato fw files to qcom-adreno-a2xx package
+- linux-firmware: properly set license for all Qualcomm firmware
+- linux-firmware: upgrade to 20230210
+- linux-yocto-rt/5.15: update to -rt59
+- linux-yocto/5.10: upgrade to v5.10.175
+- linux-yocto/5.15: upgrade to v5.15.103
+- linux: inherit pkgconfig in kernel.bbclass
+- lttng-modules: fix for kernel 6.2+
+- lttng-modules: upgrade to v2.13.9
+- lua: Fix install conflict when enable multilib.
+- mdadm: Fix raid0, 06wrmostly and 02lineargrow tests
+- meson: Fix wrapper handling of implicit setup command
+- migration-guides: add 4.0.8 release notes
+- nghttp2: never build python bindings
+- oeqa rtc.py: skip if read-only-rootfs
+- oeqa ssh.py: fix hangs in run()
+- oeqa/sdk: Improve Meson test
+- oeqa/selftest/prservice: Improve debug output for failure
+- oeqa/selftest/resulttooltests: fix minor typo
+- openssl: upgrade to 3.0.8
+- package.bbclase: Add check for /build in copydebugsources()
+- patchelf: replace a rejected patch with an equivalent uninative.bbclass tweak
+- poky.conf: bump version for 4.0.9
+- populate_sdk_ext: Handle spaces within user $PATH
+- pybootchartui: Fix python syntax issue
+- python3-git: fix indent error
+- python3-setuptools-rust-native: Add direct dependency of native python3 modules
+- qemu: Revert "fix :cve:`2021-3507`" as not applicable for qemu 6.2
+- rsync: Add missing prototypes to function declarations
+- rsync: Turn on -pedantic-errors at the end of 'configure'
+- runqemu: kill qemu if it hangs
+- scripts/lib/buildstats: handle top-level build_stats not being complete
+- selftest/recipetool: Stop test corrupting tinfoil class
+- selftest/runtime_test/virgl: Disable for all Rocky Linux
+- selftest: devtool: set BB_HASHSERVE_UPSTREAM when setting SSTATE_MIRROR
+- sstatesig: Improve output hash calculation
+- staging/multilib: Fix manifest corruption
+- staging: Separate out different multiconfig manifests
+- sudo: update 1.9.12p2 -> 1.9.13p3
+- systemd.bbclass: Add /usr/lib/systemd to searchpaths as well
+- systemd: add group sgx to udev package
+- systemd: fix wrong nobody-group assignment
+- timezone: use 'tz' subdir instead of ${WORKDIR} directly
+- toolchain-scripts: Handle spaces within user $PATH
+- tzcode-native: fix build with gcc-13 on host
+- tzdata: use separate B instead of WORKDIR for zic output
+- uninative: upgrade to 3.9 to include libgcc and glibc 2.37
+- vala: Fix install conflict when enable multilib.
+- vim: add missing pkgconfig inherit
+- vim: set modified-by to the recipe MAINTAINER
+- vim: upgrade to 9.0.1429
+- wic: Fix usage of fstype=none in wic
+- wireless-regdb: upgrade to 2023.02.13
+- xserver-xorg: upgrade to 21.1.7
+- xwayland: upgrade to 22.1.8
+
+
+Known Issues in Yocto-4.0.9
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.0.9
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Alexis Lothoré
+- Bruce Ashfield
+- Changqing Li
+- Chee Yang Lee
+- Dmitry Baryshkov
+- Federico Pellegrin
+- Geoffrey GIRY
+- Hitendra Prajapati
+- Hongxu Jia
+- Joe Slater
+- Kai Kang
+- Kenfe-Mickael Laventure
+- Khem Raj
+- Martin Jansa
+- Mateusz Marciniec
+- Michael Halstead
+- Michael Opdenacker
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Narpat Mali
+- Pavel Zhukov
+- Pawan Badganchi
+- Peter Marko
+- Piotr Łobacz
+- Poonam Jadhav
+- Randy MacLeod
+- Richard Purdie
+- Robert Yang
+- Romuald Jeanne
+- Ross Burton
+- Sakib Sajal
+- Saul Wold
+- Shubham Kulkarni
+- Siddharth Doshi
+- Simone Weiss
+- Steve Sakoman
+- Tim Orling
+- Tom Hochstein
+- Trevor Woerner
+- Ulrich Ölmann
+- Vivek Kumbhar
+- Wang Mingyu
+- Xiangyu Chen
+- Yash Shinde
+
+
+Repositories / Downloads for Yocto-4.0.9
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.9 </poky/log/?h=yocto-4.0.9>`
+- Git Revision: :yocto_git:`09def309f91929f47c6cce386016ccb777bd2cfc </poky/commit/?id=09def309f91929f47c6cce386016ccb777bd2cfc>`
+- Release Artefact: poky-09def309f91929f47c6cce386016ccb777bd2cfc
+- sha: 5c7ce209c8a6b37ec2898e5ca21858234d91999c11fa862880ba98e8bde62f63
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.9/poky-09def309f91929f47c6cce386016ccb777bd2cfc.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.9/poky-09def309f91929f47c6cce386016ccb777bd2cfc.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`kirkstone </openembedded-core/log/?h=kirkstone>`
+- Tag: :oe_git:`yocto-4.0.9 </openembedded-core/log/?h=yocto-4.0.9>`
+- Git Revision: :oe_git:`ff4b57ffff903a93b710284c7c7f916ddd74712f </openembedded-core/commit/?id=ff4b57ffff903a93b710284c7c7f916ddd74712f>`
+- Release Artefact: oecore-ff4b57ffff903a93b710284c7c7f916ddd74712f
+- sha: 726778ffc291136db1704316b196de979f68df9f96476b785e1791957fbb66b3
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.9/oecore-ff4b57ffff903a93b710284c7c7f916ddd74712f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.9/oecore-ff4b57ffff903a93b710284c7c7f916ddd74712f.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.9 </meta-mingw/log/?h=yocto-4.0.9>`
+- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
+- Release Artefact: meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1
+- sha: 49f9900bfbbc1c68136f8115b314e95d0b7f6be75edf36a75d9bcd1cca7c6302
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.9/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.9/meta-mingw-a90614a6498c3345704e9611f2842eb933dc51c1.tar.bz2
+
+meta-gplv2
+
+- Repository Location: :yocto_git:`/meta-gplv2`
+- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.9 </meta-gplv2/log/?h=yocto-4.0.9>`
+- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-gplv2/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
+- Release Artefact: meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a
+- sha: c386f59f8a672747dc3d0be1d4234b6039273d0e57933eb87caa20f56b9cca6d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.9/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.9/meta-gplv2-d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.0 </bitbake/log/?h=2.0>`
+- Tag: :oe_git:`yocto-4.0.9 </bitbake/log/?h=yocto-4.0.9>`
+- Git Revision: :oe_git:`2802adb572eb73a3eb2725a74a9bbdaafc543fa7 </bitbake/commit/?id=2802adb572eb73a3eb2725a74a9bbdaafc543fa7>`
+- Release Artefact: bitbake-2802adb572eb73a3eb2725a74a9bbdaafc543fa7
+- sha: 5c6e713b5e26b3835c0773095c7a1bc1f8affa28316b33597220ed86f1f1b643
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.0.9/bitbake-2802adb572eb73a3eb2725a74a9bbdaafc543fa7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.0.9/bitbake-2802adb572eb73a3eb2725a74a9bbdaafc543fa7.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
+- Tag: :yocto_git:`yocto-4.0.9 </yocto-docs/log/?h=yocto-4.0.9>`
+- Git Revision: :yocto_git:`86d0b38a97941ad52b1af220c7b801a399d50e93 </yocto-docs/commit/?id=86d0b38a97941ad52b1af220c7b801a399d50e93>`
+
diff --git a/documentation/migration-guides/release-notes-4.0.rst b/documentation/migration-guides/release-notes-4.0.rst
index eaa40f9317..a5d66c0410 100644
--- a/documentation/migration-guides/release-notes-4.0.rst
+++ b/documentation/migration-guides/release-notes-4.0.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
Release notes for 4.0 (kirkstone)
---------------------------------
@@ -8,9 +10,10 @@ New Features / Enhancements in 4.0
- Linux kernel 5.15, glibc 2.35 and ~300 other recipe upgrades
-- Reproducibility: this release fixes the reproducibility issues with ``rust-llvm`` and
- ``golang``. Recipes in OpenEmbedded-Core are now fully reproducible. Functionality
- previously in the optional "reproducible" class has been merged into the base class.
+- Reproducibility: this release fixes the reproducibility issues with
+ ``rust-llvm`` and ``golang``. Recipes in OpenEmbedded-Core are now fully
+ reproducible. Functionality previously in the optional "reproducible"
+ class has been merged into the :ref:`ref-classes-base` class.
- Network access is now disabled by default for tasks other than where it is expected to ensure build integrity (where host kernel supports it)
@@ -22,21 +25,20 @@ New Features / Enhancements in 4.0
BB_SIGNATURE_HANDLER = "OEEquivHash"
BB_HASHSERVE = "auto"
- BB_HASHSERVE_UPSTREAM = "typhoon.yocto.io:8687"
- SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/&YOCTO_DOC_VERSION;/PATH;downloadfilename=PATH"
+ BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+ SSTATE_MIRRORS ?= "file://.* https://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
- The Python package build process is now based on `wheels <https://pythonwheels.com/>`__
in line with the upstream direction.
-- New :ref:`overlayfs <ref-classes-overlayfs>` and
- :ref:`overlayfs-etc <ref-classes-overlayfs-etc>` classes and
- ``overlayroot`` support in the initramfs framework to make it easier to
+- New :ref:`ref-classes-overlayfs` and :ref:`ref-classes-overlayfs-etc` classes and
+ ``overlayroot`` support in the :term:`Initramfs` framework to make it easier to
overlay read-only filesystems (for example) with
- `OverlayFS <https://en.wikipedia.org/wiki/OverlayFS>`__.
+ :wikipedia:`OverlayFS <OverlayFS>`.
- Inclusive language adjustments to some variable names - see the
:ref:`4.0 migration guide <migration-4.0-inclusive-language>` for details.
-
+
- New recipes:
- ``buildtools-docs-tarball``
@@ -104,7 +106,7 @@ New Features / Enhancements in 4.0
- Shared state (sstate) improvements:
- - Switched to `ZStandard (zstd) <https://en.wikipedia.org/wiki/Zstd>`__ instead
+ - Switched to :wikipedia:`ZStandard (zstd) <Zstd>` instead
of Gzip, for better performance.
- Allow validation of sstate signatures against a list of keys
- Improved error messages and exception handling
@@ -112,8 +114,8 @@ New Features / Enhancements in 4.0
- BitBake enhancements:
- Fetcher enhancements:
-
- - New :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-fetching:crate fetcher (\`\`crate://\`\`)` for Rust packages
+
+ - New :ref:`bitbake-user-manual/bitbake-user-manual-fetching:crate fetcher (\`\`crate://\`\`)` for Rust packages
- Added striplevel support to unpack
- git: Add a warning asking users to set a branch in git urls
- git: Allow git fetcher to support subdir param
@@ -125,7 +127,7 @@ New Features / Enhancements in 4.0
- ssh: now supports checkstatus, allows : in URLs (both required for use with sstate) and no longer requires username
- wget: add redirectauth parameter
- wget: add 30s timeout for checkstatus calls
-
+
- Show warnings for append/prepend/remove operators combined with +=/.=
- Add bb.warnonce() and bb.erroronce() log methods
- Improved setscene task display
@@ -138,12 +140,12 @@ New Features / Enhancements in 4.0
- Architecture-specific enhancements:
- ARM:
-
+
- tune-cortexa72: Enable the crc extension by default for cortexa72
- qemuarm64: Add tiny ktype to qemuarm64 bsp
- armv9a/tune: Add the support for the Neoverse N2 core
- arch-armv8-5a.inc: Add tune include for armv8.5a
- - grub-efi: Add xen_boot support when 'xen' is in DISTRO_FEATURES for aarch64
+ - grub-efi: Add xen_boot support when 'xen' is in :term:`DISTRO_FEATURES` for aarch64
- tune-cortexa73: Introduce cortexa73-crypto tune
- libacpi: Build libacpi also for 'aarch64' machines
- core-image-tiny-initramfs: Mark recipe as 32 bit ARM compatible
@@ -161,28 +163,28 @@ New Features / Enhancements in 4.0
- linux-yocto-dev: add qemuriscv32
- packagegroup-core-tools-profile: Enable systemtap for riscv64
- qemuriscv: Use virtio-tablet-pci for mouse
-
+
- x86:
- kernel-yocto: conditionally enable stack protection checking on x86-64
- Kernel-related enhancements:
- - Allow initramfs to be built from a separate multiconfig
+ - Allow :term:`Initramfs` to be built from a separate multiconfig
- Make kernel-base recommend kernel-image, not depend (allowing images containing kernel modules without kernel image)
- linux-yocto: split vtpm for more granular inclusion
- linux-yocto: cfg/debug: add configs for kcsan
- linux-yocto: cfg: add kcov feature fragment
- linux-yocto: export pkgconfig variables to devshell
- linux-yocto-dev: use versioned branch as default
- - New ``KERNEL_DEBUG_TIMESTAMPS`` variable (to replace removed ``BUILD_REPRODUCIBLE_BINARIES`` for the kernel)
+ - New :term:`KERNEL_DEBUG_TIMESTAMPS` variable (to replace removed ``BUILD_REPRODUCIBLE_BINARIES`` for the kernel)
- Introduce python3-dtschema-wrapper in preparation for mandatory schema checking on dtb files in 5.16
- Allow disabling kernel artifact symlink creation
- Allow changing default .bin kernel artifact extension
- FIT image related enhancements:
- - New ``FIT_SUPPORTED_INITRAMFS_FSTYPES`` variable to allow extending initramfs image types to look for
+ - New ``FIT_SUPPORTED_INITRAMFS_FSTYPES`` variable to allow extending :term:`Initramfs` image types to look for
- New ``FIT_CONF_PREFIX`` variable to allow overriding FIT configuration prefix
- Use 'bbnote' for better logging
@@ -197,7 +199,7 @@ New Features / Enhancements in 4.0
- yocto-check-layer: improved README checks
- cve-check: add json output format
- cve-check: add coverage statistics on recipes with/without CVEs
-- Added mirrors for kernel sources and uninative binaries on kernel.org
+- Added mirrors for kernel sources and uninative binaries on kernel.org
- glibc and binutils recipes now use shallow mirror tarballs for faster fetching
- When patching fails, show more information on the fatal error
@@ -215,7 +217,7 @@ New Features / Enhancements in 4.0
- Ensure addition of patch-fuzz retriggers do_qa_patch
- Added a sanity check for allarch packagegroups
-- create-spdx class improvements:
+- :ref:`ref-classes-create-spdx` class improvements:
- Get SPDX-License-Identifier from source files
- Generate manifest also for SDKs
@@ -232,15 +234,17 @@ New Features / Enhancements in 4.0
- Detect more known licenses in Python code
- Move license md5sums data into CSV files
- npm: Use README as license fallback
-
+
- SDK-related enhancements:
- - Extended recipes to ``nativesdk``: ``cargo``, ``librsvg``, ``libstd-rs``, ``libva``, ``python3-docutil``, ``python3-packaging``
- - Enabled nativesdk recipes to find a correct version of the rust cross compiler
+ - Extended recipes to :ref:`ref-classes-nativesdk`: ``cargo``,
+ ``librsvg``, ``libstd-rs``, ``libva``, ``python3-docutil``, ``python3-packaging``
+ - Enabled :ref:`ref-classes-nativesdk` recipes to find a correct version
+ of the rust cross compiler
- Support creating per-toolchain cmake file in SDK
- Rust enhancements:
-
+
- New python_setuptools3_rust class to enable building python extensions in Rust
- classes/meson: Add optional rust definitions
@@ -276,7 +280,7 @@ New Features / Enhancements in 4.0
- volatile-binds: SELinux and overlayfs extensions in mount-copybind
- gtk-icon-cache: Allow using gtk4
- kmod: Add an exclude directive to depmod
-- os-release: add os-release-initrd package for use in systemd-based initramfs images
+- os-release: add os-release-initrd package for use in systemd-based :term:`Initramfs` images
- gstreamer1.0-plugins-base: add support for graphene
- gpg-sign: Add parameters to gpg signature function
- package_manager: sign DEB package feeds
@@ -289,7 +293,7 @@ New Features / Enhancements in 4.0
- libxkbcommon: allow building of API documentation
- libxkbcommon: split libraries and xkbcli into separate packages
- systemd: move systemd shared library into its own package
-- systemd: Minimize udev package size if DISTRO_FEATURES doen't contain sysvinit
+- systemd: Minimize udev package size if :term:`DISTRO_FEATURES` doen't contain sysvinit
Known Issues in 4.0
~~~~~~~~~~~~~~~~~~~
@@ -301,22 +305,22 @@ Known Issues in 4.0
Recipe License changes in 4.0
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The following corrections have been made to the LICENSE values set by recipes:
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
-* cmake: add BSD-1-Clause & MIT & BSD-2-Clause to LICENSE due to additional vendored libraries in native/target context
-* gettext: extend LICENSE conditional upon PACKAGECONFIG (due to vendored libraries)
+* cmake: add BSD-1-Clause & MIT & BSD-2-Clause to :term:`LICENSE` due to additional vendored libraries in native/target context
+* gettext: extend :term:`LICENSE` conditional upon :term:`PACKAGECONFIG` (due to vendored libraries)
* gstreamer1.0: update licenses of all modules to LGPL-2.1-or-later (with some exceptions that are GPL-2.0-or-later)
* gstreamer1.0-plugins-bad/ugly: use the GPL-2.0-or-later only when it is in use
* kern-tools-native: add missing MIT license due to Kconfiglib
-* libcap: add pam_cap license to LIC_FILES_CHKSUM if pam is enabled
+* libcap: add pam_cap license to :term:`LIC_FILES_CHKSUM` if pam is enabled
* libidn2: add Unicode-DFS-2016 license
-* libsdl2: add BSD-2-Clause to LICENSE due to default yuv2rgb and hidapi inclusion
-* libx11-compose-data: update LICENSE to "MIT & MIT-style & BSD-1-Clause & HPND & HPND-sell-variant" to better reflect reality
-* libx11: update LICENSE to "MIT & MIT-style & BSD-1-Clause & HPND & HPND-sell-variant" to better reflect reality
-* libxshmfence: correct LICENSE - MIT -> HPND
-* newlib: add BSD-3-Clause to LICENSE
-* python3-idna: correct LICENSE - Unicode -> Unicode-TOU
-* python3-pip: add "Apache-2.0 & MPL-2.0 & LGPL-2.1-only & BSD-3-Clause & PSF-2.0 & BSD-2-Clause" to LICENSE due to vendored libraries
+* libsdl2: add BSD-2-Clause to :term:`LICENSE` due to default yuv2rgb and hidapi inclusion
+* libx11-compose-data: update :term:`LICENSE` to "MIT & MIT-style & BSD-1-Clause & HPND & HPND-sell-variant" to better reflect reality
+* libx11: update :term:`LICENSE` to "MIT & MIT-style & BSD-1-Clause & HPND & HPND-sell-variant" to better reflect reality
+* libxshmfence: correct :term:`LICENSE` - MIT -> HPND
+* newlib: add BSD-3-Clause to :term:`LICENSE`
+* python3-idna: correct :term:`LICENSE` - Unicode -> Unicode-TOU
+* python3-pip: add "Apache-2.0 & MPL-2.0 & LGPL-2.1-only & BSD-3-Clause & PSF-2.0 & BSD-2-Clause" to :term:`LICENSE` due to vendored libraries
Other license-related notes:
@@ -866,7 +870,7 @@ Repositories / Downloads for 4.0
poky
-- Repository Location: https://git.yoctoproject.org/git/poky
+- Repository Location: :yocto_git:`/poky`
- Branch: :yocto_git:`kirkstone </poky/log/?h=kirkstone>`
- Tag: :yocto_git:`yocto-4.0 </poky/tag/?h=yocto-4.0>`
- Git Revision: :yocto_git:`00cfdde791a0176c134f31e5a09eff725e75b905 </poky/commit/?id=00cfdde791a0176c134f31e5a09eff725e75b905>`
@@ -890,7 +894,7 @@ openembedded-core
meta-mingw
-- Repository Location: https://git.yoctoproject.org/git/meta-mingw
+- Repository Location: :yocto_git:`/meta-mingw`
- Branch: :yocto_git:`kirkstone </meta-mingw/log/?h=kirkstone>`
- Tag: :yocto_git:`yocto-4.0 </meta-mingw/tag/?h=yocto-4.0>`
- Git Revision: :yocto_git:`a90614a6498c3345704e9611f2842eb933dc51c1 </meta-mingw/commit/?id=a90614a6498c3345704e9611f2842eb933dc51c1>`
@@ -902,7 +906,7 @@ meta-mingw
meta-gplv2
-- Repository Location: https://git.yoctoproject.org/git/meta-gplv2
+- Repository Location: :yocto_git:`/meta-gplv2`
- Branch: :yocto_git:`kirkstone </meta-gplv2/log/?h=kirkstone>`
- Tag: :yocto_git:`yocto-4.0 </meta-gplv2/tag/?h=yocto-4.0>`
- Git Revision: :yocto_git:`d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a </meta-mingw/commit/?id=d2f8b5cdb285b72a4ed93450f6703ca27aa42e8a>`
@@ -926,7 +930,7 @@ bitbake
yocto-docs
-- Repository Location: https://git.yoctoproject.org/git/yocto-docs
+- Repository Location: :yocto_git:`/yocto-docs`
- Branch: :yocto_git:`kirkstone </yocto-docs/log/?h=kirkstone>`
- Tag: :yocto_git:`yocto-4.0 </yocto-docs/tag/?h=yocto-4.0>`
- Git Revision: :yocto_git:`a6f571ad5b087385cad8765ed455c4b4eaeebca6 </yocto-docs/commit/?id=a6f571ad5b087385cad8765ed455c4b4eaeebca6>`
diff --git a/documentation/migration-guides/release-notes-4.1.1.rst b/documentation/migration-guides/release-notes-4.1.1.rst
new file mode 100644
index 0000000000..4f31fbf1c7
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.1.1.rst
@@ -0,0 +1,319 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.1.1 (Langdale)
+----------------------------------------
+
+Security Fixes in Yocto-4.1.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- curl: Fix :cve:`2022-32221`, :cve:`2022-35260`, :cve:`2022-42915` and :cve:`2022-42916`
+- libx11: Fix :cve:`2022-3554`
+- lighttpd: Fix :cve:`2022-41556`
+- openssl: Fix :cve:`2022-3358`, :cve:`2022-3602` and :cve:`2022-3786`
+- pixman: Fix :cve:`2022-44638`
+- qemu: Fix :cve:`2022-3165`
+- sudo: Fix :cve:`2022-43995`
+- tiff: Fix :cve:`2022-3599`, :cve:`2022-3597`, :cve:`2022-3626`, :cve:`2022-3627`, :cve:`2022-3570` and :cve:`2022-3598`
+- xserver-xorg: Fix :cve:`2022-3550` and :cve:`2022-3551`
+- xserver-xorg: Ignore :cve:`2022-3553`
+
+
+Fixes in Yocto-4.1.1
+~~~~~~~~~~~~~~~~~~~~
+
+- Add 4.1 migration guide & release notes
+- bitbake: asyncrpc: serv: correct closed client socket detection
+- bitbake: bitbake-user-manual: details about variable flags starting with underscore
+- bitbake: bitbake: bitbake-layers: checkout layer(s) branch when clone exists
+- bitbake: bitbake: user-manual: inform about spaces in :remove
+- bitbake: doc: bitbake-user-manual: expand description of BB_PRESSURE_MAX variables
+- bitbake: fetch2/git: don't set core.fsyncobjectfiles=0
+- bitbake: tests/fetch: Allow handling of a file:// url within a submodule
+- bitbake: tests: bb.tests.fetch.URLHandle: add 2 new tests
+- bitbake: utils/ply: Update md5 to better report errors with hashlib
+- bluez5: add dbus to :term:`RDEPENDS`
+- build-appliance-image: Update to langdale head revision
+- buildconf: compare abspath
+- buildtools-tarball: export certificates to python and curl
+- cmake-native: Fix host tool contamination
+- create-spdx.bbclass: remove unused SPDX_INCLUDE_PACKAGED
+- create-spdx: Remove ";name=..." for downloadLocation
+- cve-update-db-native: add timeout to urlopen() calls
+- dev-manual: common-tasks.rst: add reference to "do_clean" task
+- dev-manual: common-tasks.rst: add reference to "do_listtasks" task
+- docs: add support for langdale (4.1) release
+- dropbear: add pam to :term:`PACKAGECONFIG`
+- externalsrc.bbclass: fix git repo detection
+- externalsrc.bbclass: Remove a trailing slash from ${B}
+- externalsrc: move back to classes
+- gcc: Allow -Wno-error=poison-system-directories to take effect
+- glib-2.0: fix rare GFileInfo test case failure
+- gnutls: Unified package names to lower-case
+- gnutls: upgrade 3.7.7 -> 3.7.8
+- grub: disable build on armv7ve/a with hardfp
+- gstreamer1.0-libav: fix errors with ffmpeg 5.x
+- ifupdown: upgrade 0.8.37 -> 0.8.39
+- insane.bbclass: Allow hashlib version that only accepts on parameter
+- install-buildtools: support buildtools-make-tarball and update to 4.1
+- kern-tools: fix relative path processing
+- kernel-fitimage: Use KERNEL_OUTPUT_DIR where appropriate
+- kernel-yocto: improve fatal error messages of symbol_why.py
+- kernel: Clear :term:`SYSROOT_DIRS` instead of replacing sysroot_stage_all
+- libcap: upgrade 2.65 -> 2.66
+- libical: upgrade 3.0.14 -> 3.0.15
+- libksba: upgrade 1.6.0 -> 1.6.2
+- libsdl2: upgrade 2.24.0 -> 2.24.1
+- lighttpd: upgrade 1.4.66 -> 1.4.67
+- linux-firmware: package amdgpu firmware
+- linux-firmware: split rtl8761 firmware
+- linux-yocto/5.15: update to v5.15.72
+- linux-yocto/5.19: update to v5.19.14
+- linux-yocto: add efi entry for machine features
+- lttng-modules: upgrade 2.13.4 -> 2.13.5
+- lttng-ust: upgrade 2.13.4 -> 2.13.5
+- manuals: add reference to "do_configure" task
+- manuals: add reference to the "do_compile" task
+- manuals: add reference to the "do_install" task
+- manuals: add reference to the "do_kernel_configcheck" task
+- manuals: add reference to the "do_populate_sdk" task
+- manuals: add references to "do_package_write_*" tasks
+- manuals: add references to "do_populate_sysroot" task
+- manuals: add references to the "do_build" task
+- manuals: add references to the "do_bundle_initramfs" task
+- manuals: add references to the "do_cleanall" task
+- manuals: add references to the "do_deploy" task
+- manuals: add references to the "do_devshell" task
+- manuals: add references to the "do_fetch" task
+- manuals: add references to the "do_image" task
+- manuals: add references to the "do_kernel_configme" task
+- manuals: add references to the "do_package" task
+- manuals: add references to the "do_package_qa" task
+- manuals: add references to the "do_patch" task
+- manuals: add references to the "do_rootfs" task
+- manuals: add references to the "do_unpack" task
+- manuals: fix misc typos
+- manuals: improve initramfs details
+- manuals: updates for building on Windows (WSL 2)
+- mesa: only apply patch to fix ALWAYS_INLINE for native
+- mesa: update 22.2.0 -> 22.2.2
+- meson: make wrapper options sub-command specific
+- meson: upgrade 0.63.2 -> 0.63.3
+- migration guides: 3.4: remove spurious space in example
+- migration guides: add release notes for 4.0.4
+- migration-general: add section on using buildhistory
+- migration-guides/release-notes-4.1.rst: add more known issues
+- migration-guides/release-notes-4.1.rst: update Repositories / Downloads
+- migration-guides: add known issues for 4.1
+- migration-guides: add reference to the "do_shared_workdir" task
+- migration-guides: use contributor real name
+- migration-guides: use contributor real name
+- mirrors.bbclass: use shallow tarball for binutils-native
+- mtools: upgrade 4.0.40 -> 4.0.41
+- numactl: upgrade 2.0.15 -> 2.0.16
+- oe/packagemanager/rpm: don't leak file objects
+- openssl: export necessary env vars in SDK
+- openssl: Fix SSL_CERT_FILE to match ca-certs location
+- openssl: Upgrade 3.0.5 -> 3.0.7
+- opkg-utils: use a git clone, not a dynamic snapshot
+- overlayfs: Allow not used mount points
+- overview-manual: concepts.rst: add reference to "do_packagedata" task
+- overview-manual: concepts.rst: add reference to "do_populate_sdk_ext" task
+- overview-manual: concepts.rst: fix formating and add references
+- own-mirrors: add crate
+- pango: upgrade 1.50.9 -> 1.50.10
+- perf: Depend on native setuptools3
+- poky.conf: bump version for 4.1.1
+- poky.conf: remove Ubuntu 21.10
+- populate_sdk_base: ensure ptest-pkgs pulls in ptest-runner
+- psplash: add psplash-default in rdepends
+- qemu-native: Add :term:`PACKAGECONFIG` option for jack
+- quilt: backport a patch to address grep 3.8 failures
+- ref-manual/faq.rst: update references to products built with OE / Yocto Project
+- ref-manual/variables.rst: clarify sentence
+- ref-manual: add a note to ssh-server-dropbear feature
+- ref-manual: add :term:`CVE_CHECK_SHOW_WARNINGS`
+- ref-manual: add :term:`CVE_DB_UPDATE_INTERVAL`
+- ref-manual: add :term:`DEV_PKG_DEPENDENCY`
+- ref-manual: add :term:`DISABLE_STATIC`
+- ref-manual: add :term:`FIT_PAD_ALG`
+- ref-manual: add :term:`KERNEL_DEPLOY_DEPEND`
+- ref-manual: add missing features
+- ref-manual: add :term:`MOUNT_BASE` variable
+- ref-manual: add overlayfs class variables
+- ref-manual: add :term:`OVERLAYFS_ETC_EXPOSE_LOWER`
+- ref-manual: add :term:`OVERLAYFS_QA_SKIP`
+- ref-manual: add previous overlayfs-etc variables
+- ref-manual: add pypi class
+- ref-manual: add :term:`SDK_TOOLCHAIN_LANGS`
+- ref-manual: add section for create-spdx class
+- ref-manual: add serial-autologin-root to :term:`IMAGE_FEATURES` documentation
+- ref-manual: add :term:`UBOOT_MKIMAGE_KERNEL_TYPE`
+- ref-manual: add :term:`WATCHDOG_TIMEOUT` to variable glossary
+- ref-manual: add :term:`WIRELESS_DAEMON`
+- ref-manual: classes.rst: add links to all references to a class
+- ref-manual: complementary package installation recommends
+- ref-manual: correct default for :term:`BUILDHISTORY_COMMIT`
+- ref-manual: document new github-releases class
+- ref-manual: expand documentation on image-buildinfo class
+- ref-manual: faq.rst: reorganize into subsections, contents at top
+- ref-manual: remove reference to largefile in :term:`DISTRO_FEATURES`
+- ref-manual: remove reference to testimage-auto class
+- ref-manual: system-requirements: Ubuntu 22.04 now supported
+- ref-manual: tasks.rst: add reference to the "do_image_complete" task
+- ref-manual: tasks.rst: add reference to the "do_kernel_checkout" task
+- ref-manual: tasks.rst: add reference to the "do_kernel_metadata" task
+- ref-manual: tasks.rst: add reference to the "do_validate_branches" task
+- ref-manual: tasks.rst: add references to the "do_cleansstate" task
+- ref-manual: update buildpaths QA check documentation
+- ref-manual: update pypi documentation for :term:`CVE_PRODUCT` default in 4.1
+- ref-manual: variables.rst: add reference to "do_populate_lic" task
+- release-notes-4.1.rst remove bitbake-layers subcommand argument
+- runqemu: Do not perturb script environment
+- runqemu: Fix gl-es argument from causing other arguments to be ignored
+- rust-target-config: match riscv target names with what rust expects
+- rust: install rustfmt for riscv32 as well
+- sanity: check for GNU tar specifically
+- scripts/oe-check-sstate: cleanup
+- scripts/oe-check-sstate: force build to run for all targets, specifically populate_sysroot
+- sdk-manual: correct the bitbake target for a unified sysroot build
+- shadow: update 4.12.1 -> 4.12.3
+- systemd: add systemd-creds and systemd-cryptenroll to systemd-extra-utils
+- test-manual: fix typo in machine name
+- tiff: fix a typo for :cve:`2022-2953`.patch
+- u-boot: Add savedefconfig task
+- u-boot: Remove duplicate inherit of cml1
+- uboot-sign: Fix using wrong KEY_REQ_ARGS
+- Update documentation for classes split
+- vim: upgrade to 9.0.0820
+- vulkan-samples: add lfs=0 to :term:`SRC_URI` to avoid git smudge errors in do_unpack
+- wic: honor the :term:`SOURCE_DATE_EPOCH` in case of updated fstab
+- wic: swap partitions are not added to fstab
+- wpebackend-fdo: upgrade 1.12.1 -> 1.14.0
+- xserver-xorg: move some recommended dependencies in required
+- zlib: do out-of-tree builds
+- zlib: upgrade 1.2.12 -> 1.2.13
+- zlib: use .gz archive and set a PREMIRROR
+
+
+Known Issues in Yocto-4.1.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+
+Contributors to Yocto-4.1.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adrian Freihofer
+- Alex Kiernan
+- Alexander Kanavin
+- Bartosz Golaszewski
+- Bernhard Rosenkränzer
+- Bruce Ashfield
+- Chen Qi
+- Christian Eggers
+- Claus Stovgaard
+- Ed Tanous
+- Etienne Cordonnier
+- Frank de Brabander
+- Hitendra Prajapati
+- Jan-Simon Moeller
+- Jeremy Puhlman
+- Johan Korsnes
+- Jon Mason
+- Jose Quaresma
+- Joshua Watt
+- Justin Bronder
+- Kai Kang
+- Keiya Nobuta
+- Khem Raj
+- Lee Chee Yang
+- Liam Beguin
+- Luca Boccassi
+- Mark Asselstine
+- Mark Hatle
+- Markus Volk
+- Martin Jansa
+- Michael Opdenacker
+- Ming Liu
+- Mingli Yu
+- Paul Eggleton
+- Peter Kjellerstedt
+- Qiu, Zheng
+- Quentin Schulz
+- Richard Purdie
+- Robert Joslyn
+- Ross Burton
+- Sean Anderson
+- Sergei Zhmylev
+- Steve Sakoman
+- Takayasu Ito
+- Teoh Jay Shen
+- Thomas Perrot
+- Tim Orling
+- Vincent Davis Jr
+- Vyacheslav Yurkov
+- Ciaran Courtney
+- Wang Mingyu
+
+
+Repositories / Downloads for Yocto-4.1.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`langdale </poky/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.1 </poky/log/?h=yocto-4.1.1>`
+- Git Revision: :yocto_git:`d3cda9a3e0837eb2ac5482f5f2bd8e55e874feff </poky/commit/?id=d3cda9a3e0837eb2ac5482f5f2bd8e55e874feff>`
+- Release Artefact: poky-d3cda9a3e0837eb2ac5482f5f2bd8e55e874feff
+- sha: e92b694fbb74a26c7a875936dfeef4a13902f24b06127ee52f4d1c1e4b03ec24
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.1/poky-d3cda9a3e0837eb2ac5482f5f2bd8e55e874feff.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.1/poky-d3cda9a3e0837eb2ac5482f5f2bd8e55e874feff.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`langdale </openembedded-core/log/?h=langdale>`
+- Tag: :oe_git:`yocto-4.1.1 </openembedded-core/log/?h=yocto-4.1.1>`
+- Git Revision: :oe_git:`9237ffc4feee2dd6ff5bdd672072509ef9e82f6d </openembedded-core/commit/?id=9237ffc4feee2dd6ff5bdd672072509ef9e82f6d>`
+- Release Artefact: oecore-9237ffc4feee2dd6ff5bdd672072509ef9e82f6d
+- sha: d73198aef576f0fca0d746f9d805b1762c19c31786bc3f7d7326dfb2ed6fc1be
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.1/oecore-9237ffc4feee2dd6ff5bdd672072509ef9e82f6d.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.1/oecore-9237ffc4feee2dd6ff5bdd672072509ef9e82f6d.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`langdale </meta-mingw/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.1 </meta-mingw/log/?h=yocto-4.1.1>`
+- Git Revision: :yocto_git:`b0067202db8573df3d23d199f82987cebe1bee2c </meta-mingw/commit/?id=b0067202db8573df3d23d199f82987cebe1bee2c>`
+- Release Artefact: meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c
+- sha: 704f2940322b81ce774e9cbd27c3cfa843111d497dc7b1eeaa39cd694d9a2366
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.1/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.1/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.2 </bitbake/log/?h=2.2>`
+- Tag: :oe_git:`yocto-4.1.1 </bitbake/log/?h=yocto-4.1.1>`
+- Git Revision: :oe_git:`138dd7883ee2c521900b29985b6d24a23d96563c </bitbake/commit/?id=138dd7883ee2c521900b29985b6d24a23d96563c>`
+- Release Artefact: bitbake-138dd7883ee2c521900b29985b6d24a23d96563c
+- sha: 5dc5aff4b4a801253c627cdaab6b1a0ceee2c531f1a6b166d85d1265a35d4be5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.1/bitbake-138dd7883ee2c521900b29985b6d24a23d96563c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.1/bitbake-138dd7883ee2c521900b29985b6d24a23d96563c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`langdale </yocto-docs/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.1 </yocto-docs/log/?h=yocto-4.1.1>`
+- Git Revision: :yocto_git:`8e0841c3418caa227c66a60327db09dfbe72054a </yocto-docs/commit/?id=8e0841c3418caa227c66a60327db09dfbe72054a>`
+
+
diff --git a/documentation/migration-guides/release-notes-4.1.2.rst b/documentation/migration-guides/release-notes-4.1.2.rst
new file mode 100644
index 0000000000..ee5d4ccc51
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.1.2.rst
@@ -0,0 +1,286 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.1.2 (Langdale)
+----------------------------------------
+
+Security Fixes in Yocto-4.1.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- sudo: Fix :cve:`2022-43995`
+- binutils: Fix :cve:`2022-4285`
+- cairo: update patch for :cve:`2019-6461` with upstream solution
+- expat: Fix :cve:`2022-43680`
+- ffmpeg: Fix :cve:`2022-3964` and :cve:`2022-3965`
+- grub: Fix :cve:`2022-28736`
+- libarchive: Fix :cve:`2022-36227`
+- libpam: Fix :cve:`2022-28321`
+- libpng: Fix :cve:`2019-6129`
+- ruby: Fix :cve:`2022-28738` and :cve:`2022-28739`
+- tiff: Fix :cve:`2022-3970`
+- vim: Fix :cve:`2022-4141`
+
+
+Fixes in Yocto-4.1.2
+~~~~~~~~~~~~~~~~~~~~
+
+- Expand create-spdx class documentation
+- Expand cve-check class documentation
+- archiver: avoid using machine variable as it breaks multiconfig
+- babeltrace: Upgrade to 1.5.11
+- backport SPDX documentation and vulnerability improvements
+- baremetal-image: Avoid overriding qemu variables from IMAGE_CLASSES
+- bc: extend to nativesdk
+- bind: Upgrade to 9.18.9
+- bitbake.conf: Drop export of SOURCE_DATE_EPOCH_FALLBACK
+- bitbake: gitsm: Fix regression in gitsm submodule path parsing
+- bitbake: runqueue: Fix race issues around hash equivalence and sstate reuse
+- bluez5: Point hciattach bcm43xx firmware search path to /lib/firmware
+- build-appliance-image: Update to langdale head revision
+- cargo_common.bbclass: Fix typos
+- classes: make TOOLCHAIN more permissive for kernel
+- cmake: Upgrade to 3.24.2
+- combo-layer: add sync-revs command
+- combo-layer: dont use bb.utils.rename
+- combo-layer: remove unused import
+- common-tasks.rst: fix oeqa runtime test path
+- create-spdx: default share_src for shared sources
+- curl: Correct LICENSE from MIT-open-group to curl
+- dbus: Add missing CVE product name
+- devtool/upgrade: correctly handle recipes where S is a subdir of upstream tree
+- dhcpcd: fix to work with systemd
+- docs: kernel-dev: faq: update tip on how to not include kernel in image
+- docs: migration-4.0: specify variable name change for kernel inclusion in image recipe
+- expat: upgrade to 2.5.0
+- externalsrc: fix lookup for .gitmodules
+- ffmpeg: Upgrade to 5.1.2
+- gcc-shared-source: Fix source date epoch handling
+- gcc-source: Drop gengtype manipulation
+- gcc-source: Ensure deploy_source_date_epoch sstate hash doesn't change
+- gcc-source: Fix gengtypes race
+- gdk-pixbuf: Upgrade to 2.42.10
+- get_module_deps3.py: Check attribute '__file__'
+- glibc-tests: correctly pull in the actual tests when installing -ptest package
+- gnomebase.bbclass: return the whole version for tarball directory if it is a number
+- go-crosssdk: avoid host contamination by GOCACHE
+- go: Update reproducibility patch to fix panic errors
+- go: submit patch upstream
+- go: Upgrade to 1.19.3
+- gptfdisk: remove warning message from target system
+- groff: submit patches upstream
+- gstreamer1.0: Upgrade to 1.20.5
+- help2man: Upgrade to 1.49.3
+- insane: add codeload.github.com to src-uri-bad checkz
+- inetutils: Upgrade to 2.4
+- iso-codes: Upgrade to 4.12.0
+- kbd: Don't build tests
+- kea: submit patch upstream
+- kern-tools: integrate ZFS speedup patch
+- kernel.bbclass: Include randstruct seed assets in STAGING_KERNEL_BUILDDIR
+- kernel.bbclass: make KERNEL_DEBUG_TIMESTAMPS work at rebuild
+- kernel.bbclass: remove empty module directories to prevent QA issues
+- lib/buildstats: fix parsing of trees with reduced_proc_pressure directories
+- libdrm: Remove libdrm-kms package
+- libepoxy: convert to git
+- libepoxy: remove upstreamed patch
+- libepoxy: Upgrade to 1.5.10
+- libffi: submit patch upstream
+- libffi: Upgrade to 3.4.4
+- libical: Upgrade to 3.0.16
+- libnewt: Upgrade to 0.52.23
+- libsdl2: Upgrade to 2.24.2
+- libpng: Upgrade to 1.6.39
+- libuv: fixup SRC_URI
+- libxcrypt-compat: Upgrade to 4.4.33
+- libxcrypt: Upgrade to 4.4.30
+- libxml2: fix test data checksums
+- linux-firmware: add new fw file to ${PN}-qcom-adreno-a530
+- linux-firmware: don't put the firmware into the sysroot
+- linux-firmware: Upgrade to 20221109
+- linux-yocto/5.15: fix CONFIG_CRYPTO_CCM mismatch warnings
+- linux-yocto/5.15: update genericx86* machines to v5.15.72
+- linux-yocto/5.15: Upgrade to v5.15.78
+- linux-yocto/5.19: cfg: intel and vesa updates
+- linux-yocto/5.19: fix CONFIG_CRYPTO_CCM mismatch warnings
+- linux-yocto/5.19: fix elfutils run-backtrace-native-core ptest failure
+- linux-yocto/5.19: security.cfg: remove configs which have been dropped
+- linux-yocto/5.19: update genericx86* machines to v5.19.14
+- linux-yocto/5.19: Upgrade to v5.19.17
+- lsof: add update-alternatives logic
+- lttng-modules: Upgrade to 2.13.7
+- lttng-tools: submit determinism.patch upstream
+- manuals: add 4.0.5 and 4.0.6 release notes
+- mesa: do not rely on native llvm-config in target sysroot
+- mesa: Upgrade to 22.2.3
+- meta-selftest/staticids: add render group for systemd
+- mirrors.bbclass: update CPAN_MIRROR
+- mobile-broadband-provider-info: Upgrade to 20221107
+- mpfr: Upgrade to 4.1.1
+- mtd-utils: Upgrade to 2.1.5
+- oeqa/concurrencytest: Add number of failures to summary output
+- oeqa/runtime/dnf: rewrite test_dnf_installroot_usrmerge
+- oeqa/selftest/externalsrc: add test for srctree_hash_files
+- oeqa/selftest/lic_checksum: Cleanup changes to emptytest include
+- openssh: remove RRECOMMENDS to rng-tools for sshd package
+- opkg: Set correct info_dir and status_file in opkg.conf
+- opkg: Upgrade to 0.6.1
+- ovmf: correct patches status
+- package: Fix handling of minidebuginfo with newer binutils
+- pango: Make it build with ptest disabled
+- pango: replace a recipe fix with an upstream submitted patch
+- pango: Upgrade to 1.50.11
+- poky.conf: bump version for 4.1.2
+- psplash: consider the situation of psplash not exist for systemd
+- python3-mako: Upgrade to 1.2.3
+- qemu-helper-native: Correctly pass program name as argv[0]
+- qemu-helper-native: Re-write bridge helper as C program
+- qemu: Ensure libpng dependency is deterministic
+- qemuboot.bbclass: make sure runqemu boots bundled initramfs kernel image
+- resolvconf: make it work
+- rm_work: adjust dependency to make do_rm_work_all depend on do_rm_work
+- rm_work: exclude the SSTATETASKS from the rm_work tasks sinature
+- ruby: merge .inc into .bb
+- ruby: Upgrade to 3.1.3
+- rust: submit a rewritten version of crossbeam_atomic.patch upstream
+- sanity: Drop data finalize call
+- scripts: convert-overrides: Allow command-line customizations
+- selftest: add a copy of previous mtd-utils version to meta-selftest
+- socat: Upgrade to 1.7.4.4
+- sstate: Allow optimisation of do_deploy_archives task dependencies
+- sstatesig: emit more helpful error message when not finding sstate manifest
+- sstatesig: skip the rm_work task signature
+- sudo: Upgrade to 1.9.12p1
+- sysstat: Upgrade to 12.6.1
+- systemd: Consider PACKAGECONFIG in RRECOMMENDS
+- systemd: Make importd depend on glib-2.0 again
+- systemd: add group render to udev package
+- systemd: Upgrade to 251.8
+- tcl: correct patch status
+- tzdata: Upgrade to 2022g
+- vala: install vapigen-wrapper into /usr/bin/crosscripts and stage only that
+- valgrind: skip the boost_thread test on arm
+- vim: Upgrade to 9.0.0947
+- wic: make ext2/3/4 images reproducible
+- xwayland: libxshmfence is needed when dri3 is enabled
+- xwayland: Upgrade to 22.1.5
+- yocto-check-layer: Allow OE-Core to be tested
+
+
+Known Issues in Yocto-4.1.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.1.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alex Kiernan
+- Alex Stewart
+- Alexander Kanavin
+- Alexey Smirnov
+- Bruce Ashfield
+- Carlos Alberto Lopez Perez
+- Chen Qi
+- Diego Sueiro
+- Dmitry Baryshkov
+- Enrico Jörns
+- Harald Seiler
+- Hitendra Prajapati
+- Jagadeesh Krishnanjanappa
+- Jose Quaresma
+- Joshua Watt
+- Kai Kang
+- Konrad Weihmann
+- Leon Anavi
+- Marek Vasut
+- Martin Jansa
+- Mathieu Dubois-Briand
+- Michael Opdenacker
+- Mikko Rapeli
+- Narpat Mali
+- Nathan Rossi
+- Niko Mauno
+- Ola x Nilsson
+- Ovidiu Panait
+- Pavel Zhukov
+- Peter Bergin
+- Peter Kjellerstedt
+- Peter Marko
+- Polampalli, Archana
+- Qiu, Zheng
+- Quentin Schulz
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Ravula Adhitya Siddartha
+- Richard Purdie
+- Robert Andersson
+- Ross Burton
+- Ryan Eatmon
+- Sakib Sajal
+- Sandeep Gundlupet Raju
+- Sergei Zhmylev
+- Steve Sakoman
+- Tim Orling
+- Wang Mingyu
+- Xiangyu Chen
+- pgowda
+
+Repositories / Downloads for Yocto-4.1.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`langdale </poky/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.2 </poky/log/?h=yocto-4.1.2>`
+- Git Revision: :yocto_git:`74c92e38c701e268406bb656b45ccd68471c217e </poky/commit/?id=74c92e38c701e268406bb656b45ccd68471c217e>`
+- Release Artefact: poky-74c92e38c701e268406bb656b45ccd68471c217e
+- sha: 06a2b304d0e928b62d81087797ae86115efe925c506bcb40c7d4747e14790bb0
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.2/poky-74c92e38c701e268406bb656b45ccd68471c217e.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.2/poky-74c92e38c701e268406bb656b45ccd68471c217e.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`langdale </openembedded-core/log/?h=langdale>`
+- Tag: :oe_git:`yocto-4.1.2 </openembedded-core/log/?h=yocto-4.1.2>`
+- Git Revision: :oe_git:`670f4f103b25897524d115c1f290ecae441fe4bd </openembedded-core/commit/?id=670f4f103b25897524d115c1f290ecae441fe4bd>`
+- Release Artefact: oecore-670f4f103b25897524d115c1f290ecae441fe4bd
+- sha: 09d77700e84efc738aef5713c5e86f19fa092f876d44b870789155cc1625ef04
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.2/oecore-670f4f103b25897524d115c1f290ecae441fe4bd.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.2/oecore-670f4f103b25897524d115c1f290ecae441fe4bd.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`langdale </meta-mingw/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.2 </meta-mingw/log/?h=yocto-4.1.2>`
+- Git Revision: :yocto_git:`b0067202db8573df3d23d199f82987cebe1bee2c </meta-mingw/commit/?id=b0067202db8573df3d23d199f82987cebe1bee2c>`
+- Release Artefact: meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c
+- sha: 704f2940322b81ce774e9cbd27c3cfa843111d497dc7b1eeaa39cd694d9a2366
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.2/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.2/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.2 </bitbake/log/?h=2.2>`
+- Tag: :oe_git:`yocto-4.1.2 </bitbake/log/?h=yocto-4.1.2>`
+- Git Revision: :oe_git:`f0f166aee766b4bb1f8cf8b35dfc7d406c75e6a4 </bitbake/commit/?id=f0f166aee766b4bb1f8cf8b35dfc7d406c75e6a4>`
+- Release Artefact: bitbake-f0f166aee766b4bb1f8cf8b35dfc7d406c75e6a4
+- sha: 7faf97eca78afd3994e4e126e5f5908617408c340c6eff8cd7047e0b961e2d10
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.2/bitbake-f0f166aee766b4bb1f8cf8b35dfc7d406c75e6a4.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.2/bitbake-f0f166aee766b4bb1f8cf8b35dfc7d406c75e6a4.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`langdale </yocto-docs/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.2 </yocto-docs/log/?h=yocto-4.1.2>`
+- Git Revision: :yocto_git:`30f5f9ece260fd600f0c0fa32fc2f1fc61cf7d1b </yocto-docs/commit/?id=30f5f9ece260fd600f0c0fa32fc2f1fc61cf7d1b>`
+
diff --git a/documentation/migration-guides/release-notes-4.1.3.rst b/documentation/migration-guides/release-notes-4.1.3.rst
new file mode 100644
index 0000000000..d8474cda68
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.1.3.rst
@@ -0,0 +1,317 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.1.3 (Langdale)
+----------------------------------------
+
+Security Fixes in Yocto-4.1.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- apr-util: Fix :cve:`2022-25147`
+- apr: Fix :cve:`2022-24963` and :cve:`2022-28331`
+- bind: Fix :cve:`2022-3094`, :cve:`2022-3736` and :cve:`2022-3924`
+- curl: Fix :cve:`2022-43551` and :cve:`2022-43552`
+- dbus: Fix :cve:`2022-42010`, :cve:`2022-42011` and :cve:`2022-42012`
+- git: Fix :cve:`2022-23521`, :cve:`2022-39253`, :cve:`2022-39260` and :cve:`2022-41903`
+- git: Ignore :cve:`2022-41953`
+- go: Fix :cve:`2022-41717` and :cve:`2022-41720`
+- grub2: Fix :cve:`2022-2601` and :cve:`2022-3775`
+- less: Fix :cve:`2022-46663`
+- libarchive: Fix :cve:`2022-36227`
+- libksba: Fix :cve:`2022-47629`
+- openssl: Fix :cve:`2022-3996`
+- pkgconf: Fix :cve:`2023-24056`
+- ppp: Fix :cve:`2022-4603`
+- sudo: Fix :cve:`2023-22809`
+- tar: Fix :cve:`2022-48303`
+- vim: Fix :cve:`2023-0049`, :cve:`2023-0051`, :cve:`2023-0054`, :cve:`2023-0288`, :cve:`2023-0433` and :cve:`2023-0512`
+- xserver-xorg: Fix :cve_mitre:`2023-0494`
+- xwayland: Fix :cve_mitre:`2023-0494`
+
+
+Fixes in Yocto-4.1.3
+~~~~~~~~~~~~~~~~~~~~
+
+- apr-util: Upgrade to 1.6.3
+- apr: Upgrade to 1.7.2
+- apt: fix do_package_qa failure
+- at: Change when files are copied
+- base.bbclass: Fix way to check ccache path
+- bblayers/makesetup: skip git repos that are submodules
+- bblayers/setupwriters/oe-setup-layers: create dir if not exists
+- bind: Upgrade to 9.18.11
+- bitbake-layers: fix a typo
+- bitbake: bb/utils: include SSL certificate paths in export_proxies
+- bitbake: fetch2/git: Clarify the meaning of namespace
+- bitbake: fetch2/git: Prevent git fetcher from fetching gitlab repository metadata
+- bitbake: process: log odd unlink events with bitbake.sock
+- bitbake: server/process: Add bitbake.sock race handling
+- bitbake: siggen: Fix inefficient string concatenation
+- bootchart2: Fix usrmerge support
+- bsp-guide: fix broken git URLs and missing word
+- build-appliance-image: Update to langdale head revision
+- buildtools-tarball: set pkg-config search path
+- busybox: Fix depmod patch
+- busybox: always start do_compile with orig config files
+- busybox: rm temporary files if do_compile was interrupted
+- cairo: fix CVE patches assigned wrong CVE number
+- classes/fs-uuid: Fix command output decoding issue
+- classes/populate_sdk_base: Append cleandirs
+- classes: image: Set empty weak default IMAGE_LINGUAS
+- cml1: remove redundant addtask
+- core-image.bbclass: Fix missing leading whitespace with ':append'
+- createrepo-c: Include missing rpm/rpmstring.h
+- curl: don't enable debug builds
+- curl: fix dependencies when building with ldap/ldaps
+- cve-check: write the cve manifest to IMGDEPLOYDIR
+- cve-update-db-native: avoid incomplete updates
+- cve-update-db-native: show IP on failure
+- dbus: Upgrade to 1.14.6
+- dev-manual: common-tasks.rst: add link to FOSDEM 2023 video
+- dev-manual: fix old override syntax
+- devshell: Do not add scripts/git-intercept to PATH
+- devtool: fix devtool finish when gitmodules file is empty
+- devtool: process local files only for the main branch
+- dhcpcd: backport two patches to fix runtime error
+- dhcpcd: fix dhcpcd start failure on qemuppc64
+- diffutils: Upgrade to 3.9
+- ffmpeg: fix configure failure on noexec /tmp host
+- gdk-pixbuf: do not use tools from gdk-pixbuf-native when building tests
+- git: Upgrade to 2.37.6
+- glslang: branch rename master -> main
+- go: Upgrade to 1.19.4
+- gstreamer1.0 : Revert "disable flaky gstbin:test_watch_for_state_change test" and Fix race conditions in gstbin tests with upstream solution
+- harfbuzz: remove bindir only if it exists
+- httpserver: add error handler that write to the logger
+- image.bbclass: print all QA functions exceptions
+- kernel-fitimage: Adjust order of dtb/dtbo files
+- kernel-fitimage: Allow user to select dtb when multiple dtb exists
+- kernel-yocto: fix kernel-meta data detection
+- kernel/linux-kernel-base: Fix kernel build artefact determinism issues
+- lib/buildstats: handle tasks that never finished
+- lib/oe/reproducible: Use git log without gpg signature
+- libarchive: Upgrade to 3.6.2
+- libc-locale: Fix on target locale generation
+- libgit2: Upgrade to 1.5.1
+- libjpeg-turbo: Upgrade to 2.1.5.1
+- libksba: Upgrade to 1.6.3
+- libpng: Enable NEON for aarch64 to enensure consistency with arm32.
+- librsvg: Only enable the Vala bindings if GObject Introspection is enabled
+- librsvg: enable vapi build
+- libseccomp: fix for the ptest result format
+- libseccomp: fix typo in DESCRIPTION
+- libssh2: Clean up ptest patch/coverage
+- libtirpc: Check if file exists before operating on it
+- libusb1: Link with latomic only if compiler has no atomic builtins
+- libusb1: Strip trailing whitespaces
+- linux-firmware: add yamato fw files to qcom-adreno-a2xx package
+- linux-firmware: properly set license for all Qualcomm firmware
+- linux-firmware: Upgrade to 20230210
+- linux-yocto/5.15: fix perf build with clang
+- linux-yocto/5.15: libbpf: Fix build warning on ref_ctr_off
+- linux-yocto/5.15: ltp and squashfs fixes
+- linux-yocto/5.15: powerpc: Fix reschedule bug in KUAP-unlocked user copy
+- linux-yocto/5.15: Upgrade to v5.15.91
+- linux-yocto/5.19: fix perf build with clang
+- linux-yocto/5.19: powerpc: Fix reschedule bug in KUAP-unlocked user copy
+- lsof: fix old override syntax
+- lttng-modules: Fix for 5.10.163 kernel version
+- lttng-modules: fix for kernel 6.2+
+- lttng-modules: Upgrade to 2.13.8
+- lttng-tools: Upgrade to 2.13.9
+- make-mod-scripts: Ensure kernel build output is deterministic
+- manuals: update patchwork instance URL
+- mesa-gl: gallium is required when enabling x11
+- meta: remove True option to getVar and getVarFlag calls (again)
+- migration-guides: add release-notes for 4.0.7
+- native: Drop special variable handling
+- numactl: skip test case when target platform doesn't have 2 CPU node
+- oeqa context.py: fix --target-ip comment to include ssh port number
+- oeqa dump.py: add error counter and stop after 5 failures
+- oeqa qemurunner.py: add timeout to QMP calls
+- oeqa qemurunner.py: try to avoid reading one character at a time
+- oeqa qemurunner: read more data at a time from serial
+- oeqa ssh.py: add connection keep alive options to ssh client
+- oeqa ssh.py: fix hangs in run()
+- oeqa ssh.py: move output prints to new line
+- oeqa/qemurunner: do not use Popen.poll() when terminating runqemu with a signal
+- oeqa/rpm.py: Increase timeout and add debug output
+- oeqa/selftest/debuginfod: improve testcase
+- oeqa/selftest/locales: Add selftest for locale generation/presence
+- oeqa/selftest/resulttooltests: fix minor typo
+- openssl: Upgrade to 3.0.8
+- opkg: ensure opkg uses private gpg.conf when applying keys.
+- pango: Upgrade to 1.50.12
+- perf: Enable debug/source packaging
+- pkgconf: Upgrade to 1.9.4
+- poky.conf: Update SANITY_TESTED_DISTROS to match autobuilder
+- poky.conf: bump version for 4.1.3
+- populate_sdk_ext.bbclass: Fix missing leading whitespace with ':append'
+- profile-manual: update WireShark hyperlinks
+- ptest-packagelists.inc: Fix missing leading whitespace with ':append'
+- python3-pytest: depend on python3-tomli instead of python3-toml
+- quilt: fix intermittent failure in faildiff.test
+- quilt: use upstreamed faildiff.test fix
+- recipe_sanity: fix old override syntax
+- ref-manual: Fix invalid feature name
+- ref-manual: update DEV_PKG_DEPENDENCY in variables
+- ref-manual: variables.rst: fix broken hyperlink
+- rm_work.bbclass: use HOSTTOOLS 'rm' binary exclusively
+- runqemu: kill qemu if it hangs
+- rust: Do not use default compiler flags defined in CC crate
+- scons.bbclass: Make MAXLINELENGTH overridable
+- scons: Pass MAXLINELENGTH to scons invocation
+- sdkext/cases/devtool: pass a logger to HTTPService
+- selftest/virgl: use pkg-config from the host
+- spirv-headers/spirv-tools: set correct branch name
+- sstate.bbclass: Fetch non-existing local .sig files if needed
+- sstatesig: Improve output hash calculation
+- sudo: Upgrade to 1.9.12p2
+- system-requirements.rst: Add Fedora 36, AlmaLinux 8.7 & 9.1, and OpenSUSE 15.4 to list of supported distros
+- testimage: Fix error message to reflect new syntax
+- tiff: Add packageconfig knob for webp
+- toolchain-scripts: compatibility with unbound variable protection
+- uninative: Upgrade to 3.8.1 to include libgcc
+- update-alternatives: fix typos
+- vim: Upgrade to 9.0.1293
+- vulkan-samples: branch rename master -> main
+- wic: Fix usage of fstype=none in wic
+- wireless-regdb: Upgrade to 2023.02.13
+- xserver-xorg: Upgrade to 21.1.7
+- xwayland: Upgrade to 22.1.8
+
+
+Known Issues in Yocto-4.1.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.1.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adrian Freihofer
+- Alejandro Hernandez Samaniego
+- Alex Kiernan
+- Alexander Kanavin
+- Alexis Lothoré
+- Anton Antonov
+- Antonin Godard
+- Armin Kuster
+- Arnout Vandecappelle
+- Benoît Mauduit
+- Bruce Ashfield
+- Carlos Alberto Lopez Perez
+- Changqing Li
+- Charlie Johnston
+- Chee Yang Lee
+- Chen Qi
+- Dmitry Baryshkov
+- Enguerrand de Ribaucourt
+- Etienne Cordonnier
+- Fawzi KHABER
+- Federico Pellegrin
+- Frank de Brabander
+- Harald Seiler
+- He Zhe
+- Jan Kircher
+- Jermain Horsman
+- Jose Quaresma
+- Joshua Watt
+- Kai Kang
+- Khem Raj
+- Lei Maohui
+- Louis Rannou
+- Luis
+- Marek Vasut
+- Markus Volk
+- Marta Rybczynska
+- Martin Jansa
+- Mateusz Marciniec
+- Mauro Queiros
+- Michael Halstead
+- Michael Opdenacker
+- Mikko Rapeli
+- Mingli Yu
+- Narpat Mali
+- Niko Mauno
+- Pavel Zhukov
+- Pawel Zalewski
+- Peter Kjellerstedt
+- Petr Kubizňák
+- Quentin Schulz
+- Randy MacLeod
+- Richard Purdie
+- Robert Joslyn
+- Rodolfo Quesada Zumbado
+- Ross Burton
+- Sakib Sajal
+- Sandeep Gundlupet Raju
+- Saul Wold
+- Siddharth Doshi
+- Steve Sakoman
+- Thomas Roos
+- Tobias Hagelborn
+- Ulrich Ölmann
+- Vivek Kumbhar
+- Wang Mingyu
+- Xiangyu Chen
+
+
+Repositories / Downloads for Yocto-4.1.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`langdale </poky/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.3 </poky/log/?h=yocto-4.1.3>`
+- Git Revision: :yocto_git:`91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f </poky/commit/?id=91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f>`
+- Release Artefact: poky-91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f
+- sha: 94e4615eba651fe705436b29b854458be050cc39db936295f9d5eb7e85d3eff1
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.3/poky-91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.3/poky-91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`langdale </openembedded-core/log/?h=langdale>`
+- Tag: :oe_git:`yocto-4.1.3 </openembedded-core/log/?h=yocto-4.1.3>`
+- Git Revision: :oe_git:`b995ea45773211bd7bdd60eabcc9bbffda6beb5c </openembedded-core/commit/?id=b995ea45773211bd7bdd60eabcc9bbffda6beb5c>`
+- Release Artefact: oecore-b995ea45773211bd7bdd60eabcc9bbffda6beb5c
+- sha: 952e19361f205ee91b74e5caaa835d58fa6dd0d92ddaed50d4cd3f3fa56fab63
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.3/oecore-b995ea45773211bd7bdd60eabcc9bbffda6beb5c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.3/oecore-b995ea45773211bd7bdd60eabcc9bbffda6beb5c.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`langdale </meta-mingw/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.3 </meta-mingw/log/?h=yocto-4.1.3>`
+- Git Revision: :yocto_git:`b0067202db8573df3d23d199f82987cebe1bee2c </meta-mingw/commit/?id=b0067202db8573df3d23d199f82987cebe1bee2c>`
+- Release Artefact: meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c
+- sha: 704f2940322b81ce774e9cbd27c3cfa843111d497dc7b1eeaa39cd694d9a2366
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.3/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.3/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.2 </bitbake/log/?h=2.2>`
+- Tag: :oe_git:`yocto-4.1.3 </bitbake/log/?h=yocto-4.1.3>`
+- Git Revision: :oe_git:`592ee222a1c6da42925fb56801f226884b6724ec </bitbake/commit/?id=592ee222a1c6da42925fb56801f226884b6724ec>`
+- Release Artefact: bitbake-592ee222a1c6da42925fb56801f226884b6724ec
+- sha: 79c32f2ca66596132e32a45654ce0e9dd42b6b39186eff3540a9d6b499fe952c
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.3/bitbake-592ee222a1c6da42925fb56801f226884b6724ec.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.3/bitbake-592ee222a1c6da42925fb56801f226884b6724ec.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`langdale </yocto-docs/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.3 </yocto-docs/log/?h=yocto-4.1.3>`
+- Git Revision: :yocto_git:`3de2ad1f8ff87aeec30088779267880306a0f31a </yocto-docs/commit/?id=3de2ad1f8ff87aeec30088779267880306a0f31a>`
+
diff --git a/documentation/migration-guides/release-notes-4.1.4.rst b/documentation/migration-guides/release-notes-4.1.4.rst
new file mode 100644
index 0000000000..de469f4bee
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.1.4.rst
@@ -0,0 +1,254 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.1.4 (Langdale)
+----------------------------------------
+
+Security Fixes in Yocto-4.1.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- cve-extra-exclusions/linux-yocto: Ignore :cve:`2020-27784`, :cve:`2021-3669`, :cve:`2021-3759`, :cve:`2021-4218`, :cve:`2022-0480`, :cve:`2022-1184`, :cve:`2022-1462`, :cve:`2022-2308`, :cve:`2022-2327`, :cve:`2022-26365`, :cve:`2022-2663`, :cve:`2022-2785`, :cve:`2022-3176`, :cve:`2022-33740`, :cve:`2022-33741`, :cve:`2022-33742`, :cve:`2022-3526`, :cve:`2022-3563`, :cve:`2022-3621`, :cve:`2022-3623`, :cve:`2022-3624`, :cve:`2022-3625`, :cve:`2022-3629`, :cve:`2022-3630`, :cve:`2022-3633`, :cve:`2022-3635`, :cve:`2022-3636`, :cve:`2022-3637`, :cve:`2022-3646` and :cve:`2022-3649`
+- cve-extra-exclusions/linux-yocto 5.15: Ignore :cve:`2022-3435`, :cve:`2022-3534`, :cve:`2022-3564`, :cve:`2022-3564`, :cve:`2022-3619`, :cve:`2022-3640`, :cve:`2022-42895`, :cve:`2022-42896`, :cve:`2022-4382`, :cve:`2023-0266` and :cve:`2023-0394`
+- epiphany: Fix :cve:`2023-26081`
+- git: Ignore :cve:`2023-22743`
+- go: Fix :cve:`2022-41722`, :cve:`2022-41723`, :cve:`2022-41724`, :cve:`2022-41725` and :cve:`2023-24532`
+- harfbuzz: Fix :cve:`2023-25193`
+- libmicrohttpd: Fix :cve:`2023-27371`
+- libxml2: Fix :cve:`2022-40303` and :cve:`2022-40304`
+- openssl: Fix :cve:`2023-0464`, :cve:`2023-0465` and :cve:`2023-0466`
+- python3-setuptools: Fix :cve:`2022-40897`
+- qemu: Fix :cve:`2022-4144`
+- screen: Fix :cve:`2023-24626`
+- shadow: Ignore :cve:`2016-15024`
+- tiff: Fix :cve:`2022-48281`, :cve:`2023-0795`, :cve:`2023-0796`, :cve:`2023-0797`, :cve:`2023-0798`, :cve:`2023-0799`, :cve:`2023-0800`, :cve:`2023-0801`, :cve:`2023-0802`, :cve:`2023-0803` and :cve:`2023-0804`
+- vim: Fix :cve:`2023-1127`, :cve:`2023-1170`, :cve:`2023-1175`, :cve:`2023-1264` and :cve:`2023-1355`
+- xdg-utils: Fix :cve:`2022-4055`
+- xserver-xorg: Fix for :cve:`2023-1393`
+
+
+Fixes in Yocto-4.1.4
+~~~~~~~~~~~~~~~~~~~~
+
+- apt: re-enable version check
+- base-files: Drop localhost.localdomain from hosts file
+- binutils: Fix nativesdk ld.so search
+- bitbake: bin/utils: Ensure locale en_US.UTF-8 is available on the system
+- bitbake: cookerdata: Drop dubious exception handling code
+- bitbake: cookerdata: Improve early exception handling
+- bitbake: cookerdata: Remove incorrect SystemExit usage
+- bitbake: fetch/git: Fix local clone url to make it work with repo
+- bitbake: toaster: Add refreshed oe-core and poky fixtures
+- bitbake: toaster: fixtures/README: django 1.8 -> 3.2
+- bitbake: toaster: fixtures/gen_fixtures.py: update branches
+- bitbake: utils: Allow to_boolean to support int values
+- bmap-tools: switch to main branch
+- build-appliance-image: Update to langdale head revision
+- buildtools-tarball: Handle spaces within user $PATH
+- busybox: move hwclock init earlier in startup
+- cargo.bbclass: use offline mode for building
+- cpio: Fix wrong CRC with ASCII CRC for large files
+- cracklib: update github branch to 'main'
+- cups: add/fix web interface packaging
+- cups: check :term:`PACKAGECONFIG` for pam feature
+- cups: use BUILDROOT instead of DESTDIR
+- cve-check: Fix false negative version issue
+- devtool/upgrade: do not delete the workspace/recipes directory
+- dhcpcd: Fix install conflict when enable multilib.
+- ffmpeg: fix build failure when vulkan is enabled
+- filemap.py: enforce maximum of 4kb block size
+- gcc-shared-source: do not use ${S}/.. in deploy_source_date_epoch
+- glibc: Add missing binutils dependency
+- go: upgrade to 1.19.7
+- image_types: fix multiubi var init
+- image_types: fix vname var init in multiubi_mkfs() function
+- iso-codes: upgrade to 4.13.0
+- kernel-devsrc: fix mismatched compiler warning
+- lib/oe/gpg_sign.py: Avoid race when creating .sig files in detach_sign
+- lib/resulttool: fix typo breaking resulttool log --ptest
+- libcomps: Fix callback function prototype for PyCOMPS_hash
+- libdnf: upgrade to 0.70.0
+- libgit2: update license information
+- libmicrohttpd: upgrade to 0.9.76
+- linux-yocto-rt/5.15: upgrade to -rt59
+- linux-yocto/5.15: upgrade to v5.15.108
+- linux: inherit pkgconfig in kernel.bbclass
+- lttng-modules: upgrade to v2.13.9
+- lua: Fix install conflict when enable multilib.
+- mdadm: Fix raid0, 06wrmostly and 02lineargrow tests
+- mesa-demos: packageconfig weston should have a dependency on wayland-protocols
+- meson: Fix wrapper handling of implicit setup command
+- meson: remove obsolete RPATH stripping patch
+- migration-guides: update release notes
+- oeqa ping.py: avoid busylooping failing ping command
+- oeqa ping.py: fail test if target IP address has not been set
+- oeqa rtc.py: skip if read-only-rootfs
+- oeqa/runtime: clean up deprecated backslash expansion
+- oeqa/sdk: Improve Meson test
+- oeqa/selftest/cases/package.py: adding unittest for package rename conflicts
+- oeqa/selftest/cases/runqemu: update imports
+- oeqa/selftest/prservice: Improve debug output for failure
+- oeqa/selftest/reproducible: Split different packages from missing packages output
+- oeqa/selftest: OESelftestTestContext: convert relative to full path when newbuilddir is provided
+- oeqa/targetcontrol: do not set dump_host_cmds redundantly
+- oeqa/targetcontrol: fix misspelled RuntimeError
+- oeqa/targetcontrol: remove unused imports
+- oeqa/utils/commands: fix usage of undefined EPIPE
+- oeqa/utils/commands: remove unused imports
+- oeqa/utils/qemurunner: replace hard-coded user 'root' in debug output
+- oeqs/selftest: OESelftestTestContext: replace the os.environ after subprocess.check_output
+- package.bbclass: check packages name conflict in do_package
+- pango: upgrade to 1.50.13
+- piglit: Fix build time dependency
+- poky.conf: bump version for 4.1.4
+- populate_sdk_base: add zip options
+- populate_sdk_ext: Handle spaces within user $PATH
+- pybootchart: Fix extents handling to account for cpu/io/mem pressure changes
+- pybootchartui: Fix python syntax issue
+- report-error: catch Nothing :term:`PROVIDES` error
+- rpm: Fix hdr_hash function prototype
+- run-postinsts: Set dependency for ldconfig to avoid boot issues
+- runqemu: respect :term:`IMAGE_LINK_NAME`
+- runqemu: Revert "workaround for APIC hang on pre 4.15 kernels on qemux86q"
+- scripts/lib/buildstats: handle top-level build_stats not being complete
+- selftest/recipetool: Stop test corrupting tinfoil class
+- selftest/runtime_test/virgl: Disable for all Rocky Linux
+- selftest: devtool: set :term:`BB_HASHSERVE_UPSTREAM` when setting :term:`SSTATE_MIRRORS`
+- selftest: runqemu: better check for ROOTFS: in the log
+- selftest: runqemu: use better error message when asserts fail
+- shadow: Fix can not print full login timeout message
+- staging/multilib: Fix manifest corruption
+- staging: Separate out different multiconfig manifests
+- sudo: upgrade to 1.9.13p3
+- systemd.bbclass: Add /usr/lib/systemd to searchpaths as well
+- systemd: add group sgx to udev package
+- systemd: fix wrong nobody-group assignment
+- timezone: use 'tz' subdir instead of ${WORKDIR} directly
+- toolchain-scripts: Handle spaces within user $PATH
+- tzcode-native: fix build with gcc-13 on host
+- tzdata: upgrade to 2023c
+- tzdata: use separate :term:`B` instead of :term:`WORKDIR` for zic output
+- u-boot: Map arm64 into map for u-boot dts installation
+- uninative: Upgrade to 3.9 to include glibc 2.37
+- vala: Fix install conflict when enable multilib.
+- vim: add missing pkgconfig inherit
+- vim: set modified-by to the recipe :term:`MAINTAINER`
+- vim: upgrade to 9.0.1429
+- xcb-proto: Fix install conflict when enable multilib.
+
+
+Known Issues in Yocto-4.1.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.1.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alexander Kanavin
+- Andrew Geissler
+- Arturo Buzarra
+- Bhabu Bindu
+- Bruce Ashfield
+- Carlos Alberto Lopez Perez
+- Chee Yang Lee
+- Chris Elledge
+- Christoph Lauer
+- Dmitry Baryshkov
+- Enrico Jörns
+- Fawzi KHABER
+- Frank de Brabander
+- Frederic Martinsons
+- Geoffrey GIRY
+- Hitendra Prajapati
+- Jose Quaresma
+- Kenfe-Mickael Laventure
+- Khem Raj
+- Marek Vasut
+- Martin Jansa
+- Michael Halstead
+- Michael Opdenacker
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Narpat Mali
+- Pavel Zhukov
+- Peter Marko
+- Piotr Łobacz
+- Randy MacLeod
+- Richard Purdie
+- Robert Yang
+- Romuald JEANNE
+- Romuald Jeanne
+- Ross Burton
+- Siddharth
+- Siddharth Doshi
+- Soumya
+- Steve Sakoman
+- Sudip Mukherjee
+- Tim Orling
+- Tobias Hagelborn
+- Tom Hochstein
+- Trevor Woerner
+- Wang Mingyu
+- Xiangyu Chen
+- Zoltan Boszormenyi
+
+
+Repositories / Downloads for Yocto-4.1.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`langdale </poky/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.4 </poky/log/?h=yocto-4.1.4>`
+- Git Revision: :yocto_git:`3e95f268ce04b49ba6731fd4bbc53b1693c21963 </poky/commit/?id=3e95f268ce04b49ba6731fd4bbc53b1693c21963>`
+- Release Artefact: poky-3e95f268ce04b49ba6731fd4bbc53b1693c21963
+- sha: 54798c4b519f5e11f409e1fd074bea1bc0a1b80672aa60dddbac772c8e4d838b
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.4/poky-3e95f268ce04b49ba6731fd4bbc53b1693c21963.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.4/poky-3e95f268ce04b49ba6731fd4bbc53b1693c21963.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`langdale </openembedded-core/log/?h=langdale>`
+- Tag: :oe_git:`yocto-4.1.4 </openembedded-core/log/?h=yocto-4.1.4>`
+- Git Revision: :oe_git:`78211cda40eb018a3aa535c75b61e87337236628 </openembedded-core/commit/?id=78211cda40eb018a3aa535c75b61e87337236628>`
+- Release Artefact: oecore-78211cda40eb018a3aa535c75b61e87337236628
+- sha: 1303d836bae54c438c64d6b9f068eb91c32be4cc1779e89d0f2d915a55d59b15
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.4/oecore-78211cda40eb018a3aa535c75b61e87337236628.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.4/oecore-78211cda40eb018a3aa535c75b61e87337236628.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`langdale </meta-mingw/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.4 </meta-mingw/log/?h=yocto-4.1.4>`
+- Git Revision: :yocto_git:`b0067202db8573df3d23d199f82987cebe1bee2c </meta-mingw/commit/?id=b0067202db8573df3d23d199f82987cebe1bee2c>`
+- Release Artefact: meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c
+- sha: 704f2940322b81ce774e9cbd27c3cfa843111d497dc7b1eeaa39cd694d9a2366
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.4/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.4/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.2 </bitbake/log/?h=2.2>`
+- Tag: :oe_git:`yocto-4.1.4 </bitbake/log/?h=yocto-4.1.4>`
+- Git Revision: :oe_git:`5b105e76dd7de3b9a25b17b397f2c12c80048894 </bitbake/commit/?id=5b105e76dd7de3b9a25b17b397f2c12c80048894>`
+- Release Artefact: bitbake-5b105e76dd7de3b9a25b17b397f2c12c80048894
+- sha: 2cd6448138816f5a906f9927c6b6fdc5cf24981ef32b6402312f52ca490edb4f
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1.4/bitbake-5b105e76dd7de3b9a25b17b397f2c12c80048894.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1.4/bitbake-5b105e76dd7de3b9a25b17b397f2c12c80048894.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`langdale </yocto-docs/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1.4 </yocto-docs/log/?h=yocto-4.1.4>`
+- Git Revision: :yocto_git:`da685fc5e69d49728e3ffd6c4d623e7e1745059d </yocto-docs/commit/?id=da685fc5e69d49728e3ffd6c4d623e7e1745059d>`
+
diff --git a/documentation/migration-guides/release-notes-4.1.rst b/documentation/migration-guides/release-notes-4.1.rst
new file mode 100644
index 0000000000..a0d5196128
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.1.rst
@@ -0,0 +1,761 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 4.1 (langdale)
+---------------------------------
+
+
+New Features / Enhancements in 4.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Linux kernel 5.19, glibc 2.36 and ~260 other recipe upgrades
+
+- ``make`` 4.0 is now the minimum make version required on the build host.
+ For host distros that do not provide it, this is included as part of the
+ :term:`buildtools` tarball, and additionally a new :term:`buildtools-make` tarball
+ has been introduced to provide this in particular for host distros with
+ a broken make 4.x version. For more details see
+ :ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`.
+
+- New layer setup tooling:
+
+ - New ``scripts/oe-setup-layers`` standalone script to restore the layer
+ configuration from a json file
+ - New ``bitbake-layers create-layers-setup`` command to save the
+ layer configuration to a json file
+ - New ``bitbake-layers save-build-conf`` command to save the active build
+ configuration as a template into a layer
+
+- Rust-related enhancements:
+
+ - Support for building rust for the target
+ - Significant SDK toolchain build optimisation
+ - Support for building native components in the SDK
+ - Support ``crate://`` fetcher with :ref:`ref-classes-externalsrc`
+
+- New core recipes:
+
+ - ``buildtools-make-tarball``
+ - ``icon-naming-utils`` (previously removed)
+ - ``musl-locales``
+ - ``python3-editables`` (originally in meta-python)
+ - ``python3-hatch-vcs``
+ - ``python3-hatchling`` (originally in meta-oe)
+ - ``python3-lxml`` (originally in meta-python)
+ - ``python3-pathspec`` (originally in meta-python)
+ - ``python3-picobuild``
+ - ``sato-icon-theme`` (previously removed)
+
+- CVE checking enhancements:
+
+ - New :term:`CVE_DB_UPDATE_INTERVAL` variable to allow specifying the CVE database minimum update interval (and default to once per day)
+ - Added JSON format to summary output
+ - Added support for Ignored CVEs
+ - Enable recursive CVE checking also for ``do_populate_sdk``
+ - New :term:`CVE_CHECK_SHOW_WARNINGS` variable to disable unpatched CVE warning messages
+ - The :ref:`ref-classes-pypi` class now defaults :term:`CVE_PRODUCT` from :term:`PYPI_PACKAGE`
+ - Added current kernel CVEs to ignore list since we stay as close to the kernel stable releases as we can
+ - Optimisations to avoid dependencies on fetching
+
+- Complementary package installation (as used in SDKs and images) no longer installs recommended packages, in order to avoid conflicts
+- Dependency of -dev package on main package is now an :term:`RRECOMMENDS` and can be easily set via new :term:`DEV_PKG_DEPENDENCY` variable
+
+- Support for CPU, I/O and memory pressure regulation in BitBake
+- Pressure data gathering in :ref:`ref-classes-buildstats` and rendering in ``pybootchartgui``
+
+- New Picobuild system for lightweight Python PEP-517 build support in the :ref:`ref-classes-python_pep517` class
+
+- Many classes are now split into global and recipe contexts for better
+ validation. For more information, see
+ :ref:`Classes now split by usage context <migration-4.1-classes-split>`.
+
+- Architecture-specific enhancements:
+
+ - arch-armv8-4a.inc: add tune include for armv8.4a
+ - tune-neoversen2: support tune-neoversen2 base on armv9a
+ - riscv: Add tunes for rv64 without compressed instructions
+ - gnu-efi: enable for riscv64
+ - shadow-securetty: allow ttyS4 for amd-snowyowl-64
+
+- Kernel-related enhancements:
+
+ - linux-yocto/5.15: cfg/xen: Move x86 configs to separate file
+ - linux-yocto/5.15: Enabled MDIO bus config
+ - linux-yocto: Enable mdio for qemu
+ - linux-yocto/5.15: base: enable kernel crypto userspace API
+ - kern-tools: allow 'y' or 'm' to avoid config audit warnings
+ - kernel-yocto.bbclass: say what :term:`SRC_URI` entry is being dropped
+ - kernel.bbclass: Do not overwrite recipe's custom postinst
+ - kmod: Enable xz support by default
+ - Run depmod(wrapper) against each compiled kernel when multiple kernels are enabled
+ - linux-yocto-tiny: enable qemuarmv5/qemuarm64
+
+- wic Image Creator enhancements:
+
+ - Added dependencies to support erofs
+ - Added ``fspassno`` parameter to partition to allow specifying the value of the last column (``fs_passno``) in ``/etc/fstab``.
+ - bootimg-efi: added support for loading devicetree files
+ - Added ``none`` fstype for custom image (for use in conjunction with ``rawcopy``)
+
+- SDK-related enhancements:
+
+ - :ref:`Support for using the regular build system as an SDK <sdk-manual/extensible:Setting up the Extensible SDK environment directly in a Yocto build>`
+ - :ref:`ref-classes-image-buildinfo` class now also writes build information to SDKs
+ - New :term:`SDK_TOOLCHAIN_LANGS` variable to control support of rust / go in SDK
+ - rust-llvm: enabled :ref:`ref-classes-nativesdk` variant
+ - python3-pluggy: enabled for :ref:`ref-classes-native` / :ref:`ref-classes-nativesdk`
+
+- QEMU/runqemu enhancements:
+
+ - qemux86-64: Allow higher tunes
+ - runqemu: display host uptime when starting
+ - runqemu: add ``QB_KERNEL_CMDLINE`` that can be set to "none" to avoid overriding kernel command line specified in dtb
+
+- Image-related enhancements:
+
+ - New variable :term:`UBOOT_MKIMAGE_KERNEL_TYPE`
+ - New variable :term:`FIT_PAD_ALG` to control FIT image padding algorithm
+ - New :term:`KERNEL_DEPLOY_DEPEND` variable to allow disabling image dependency on deploying the kernel
+ - :ref:`ref-classes-image_types`: isolate the write of UBI
+ configuration to a ``write_ubi_config`` function that can be easily overridden
+
+- openssh: add support for config snippet includes to ssh and sshd
+- :ref:`ref-classes-create-spdx`: Add :term:`SPDX_PRETTY` option
+- wpa-supplicant: build static library if not disabled via :term:`DISABLE_STATIC`
+- wpa-supplicant: package dynamic modules
+- openssl: extract legacy provider module to a separate package
+- linux-firmware: split out ath3k firmware
+- linux-firmware: add support for building snapshots
+- eudev: create static-nodes in init script
+- udev-extraconf: new :term:`MOUNT_BASE` variable allows configuring automount base directory
+- udev-extraconf/mount.sh: use partition labels in mountpoint paths
+- systemd: Set RebootWatchdogSec to 60s by default
+- systemd: systemd-systemctl: Support instance conf files during enable
+- weston.init: enable ``xwayland`` in weston.ini if ``x11`` is in :term:`DISTRO_FEATURES`
+- New ``npm_registry`` Python module to enable caching with nodejs 16+
+- :ref:`ref-classes-npm`: replaced ``npm pack`` call with ``tar czf`` for nodejs 16+ compatibility and improved ``do_configure`` performance
+- Enabled :ref:`ref-classes-bin-package` class to work properly in the native case
+- Enabled :ref:`buildpaths <qa-check-buildpaths>` QA check as a warning by default
+- New :term:`OVERLAYFS_ETC_EXPOSE_LOWER` to provide read-only access to the original ``/etc`` content with :ref:`ref-classes-overlayfs-etc`
+- New :term:`OVERLAYFS_QA_SKIP` variable to allow skipping check on :ref:`ref-classes-overlayfs` mounts
+- New :term:`PACKAGECONFIG` options for individual recipes:
+
+ - apr: xsi-strerror
+ - btrfs-tools: lzo
+ - connman: iwd
+ - coreutils: openssl
+ - dropbear: enable-x11-forwarding
+ - eudev: blkid, kmod, rule-generator
+ - eudev: manpages, selinux
+ - flac: avx, ogg
+ - gnutls: fips
+ - gstreamer1.0-plugins-bad: avtp
+ - libsdl2: libusb
+ - llvm: optviewer
+ - mesa: vulkan, vulkan-beta, zink
+ - perf: bfd
+ - piglit: glx, opencl
+ - python3: editline
+ - qemu: bpf, brlapi, capstone, rdma, slirp, uring, vde
+ - rpm: readline
+ - ruby: capstone
+ - systemd: no-dns-fallback, sysext
+ - tiff: jbig
+
+- ptest enhancements in ``curl``, ``json-c``, ``libgcrypt``, ``libgpg-error``, ``libxml2``
+- ptest compile/install functions now use :term:`PARALLEL_MAKE` and :term:`PARALLEL_MAKEINST` in ptest for significant speedup
+- New :term:`TC_CXX_RUNTIME` variable to enable other layers to more easily control C++ runtime
+- Set :term:`BB_DEFAULT_UMASK` using ??= to make it easier to override
+- Set :term:`TCLIBC` and :term:`TCMODE` using ??= to make them easier to override
+- squashfs-tools: build with lzo support by default
+- insane.bbclass: make ``do_qa_staging`` check shebang length for native scripts in all :term:`SYSROOT_DIRS`
+- utils: Add ``create_cmdline_shebang_wrapper`` function to allow recipes to easily create a wrapper to fix long shebang lines
+- meson: provide relocation script and native/cross wrappers also for meson-native
+- meson.bbclass: add cython binary to cross/native toolchain config
+- New ``musl-locales`` recipe to provide a limited set of locale data for musl based systems
+- gobject-introspection: use :term:`OBJDUMP` environment variable so that objdump tool can be picked up from the environment
+- The Python ``zoneinfo`` module is now split out to its own ``python3-zoneinfo`` package.
+- busybox: added devmem 128-bit support
+- vim: split xxd out into its own package
+- New :ref:`ref-classes-github-releases` class to consolidate version checks for github-based packages
+- ``devtool reset`` now preserves ``workspace/sources`` source trees in ``workspace/attic/sources/`` instead of leaving them in-place
+- scripts/patchreview: Add commit to stored json data
+- scripts/patchreview: Make json output human parsable
+- ``wpa-supplicant`` recipe now uses the upstream ``defconfig`` modified based upon :term:`PACKAGECONFIG` instead of a stale ``defconfig`` file
+- bitbake: build: prefix the tasks with a timestamp in the log.task_order
+- bitbake: fetch2/osc: Add support to query latest revision
+- bitbake: utils: Pass lock argument in fileslocked
+- bitbake: utils: Add enable_loopback_networking()
+
+
+Known Issues in 4.1
+~~~~~~~~~~~~~~~~~~~
+
+- The change to :ref:`migration-4.1-complementary-deps` means that images
+ built with the ``ptest-pkgs`` :term:`IMAGE_FEATURES` don’t automatically
+ install ``ptest-runner``, as that package is a recommendation of the
+ individual ``-ptest`` packages. This will be resolved in the next point
+ release, and can be worked around by explicitly installing ``ptest-runner``
+ into the image. Filed as :yocto_bugs:`bug 14928 </show_bug.cgi?id=14928>`.
+
+- There is a known issue with eSDKs where sstate objects may be missing,
+ resulting in packages being unavailable to install in the sysroot. This is due
+ to image generation optimisations having unintended consequences in eSDK
+ generation. This will be resolved in the next point release. Filed as
+ :yocto_bugs:`bug 14626 </show_bug.cgi?id=14626>`, which also details the fix.
+
+- The change to :ref:`migration-4.1-classes-split` inadvertently moved the
+ :ref:`ref-classes-externalsrc` class to ``meta/classes-recipe``,
+ when it is not recipe-specific and can also be used in a global context. The
+ class will be moved back to ``meta/classes`` in the next point release. Filed
+ as :yocto_bugs:`bug 14940 </show_bug.cgi?id=14940>`.
+
+
+Recipe License changes in 4.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
+
+- alsa-state: add GPL-2.0-or-later because of alsa-state-init file
+- git: add GPL-2.0-or-later & BSD-3-Clause & MIT & BSL-1.0 & LGPL-2.1-or-later due to embedded code
+- libgcrypt: dropped GPLv3 license after upstream changes
+- linux-firmware: correct license for ar3k firmware (specific "ar3k" license)
+
+
+
+Security Fixes in 4.1
+~~~~~~~~~~~~~~~~~~~~~
+
+- bind: :cve:`2022-1183`, :cve:`2022-2795`, :cve:`2022-2881`, :cve:`2022-2906`, :cve:`2022-3080`, :cve:`2022-38178`
+- binutils: :cve:`2019-1010204`, :cve:`2022-38126`, :cve:`2022-38127`, :cve:`2022-38128`, :cve:`2022-38533`
+- busybox: :cve:`2022-30065`
+- connman: :cve:`2022-32292`, :cve:`2022-32293`
+- cups: :cve:`2022-26691`
+- e2fsprogs: :cve:`2022-1304`
+- expat: :cve:`2022-40674`
+- freetype: :cve:`2022-27404`
+- glibc: :cve:`2022-39046`
+- gnupg: :cve:`2022-34903`
+- grub2: :cve:`2021-3695`, :cve:`2021-3696`, :cve:`2021-3697`, :cve:`2022-28733`, :cve:`2022-28734`, :cve:`2022-28735`
+- inetutils: :cve:`2022-39028`
+- libtirpc: :cve:`2021-46828`
+- libxml2: :cve:`2016-3709` (ignored)
+- libxslt: :cve:`2022-29824` (not applicable)
+- linux-yocto/5.15: :cve:`2022-28796`
+- logrotate: :cve:`2022-1348`
+- lua: :cve:`2022-33099`
+- nasm: :cve:`2020-18974` (ignored)
+- ncurses: :cve:`2022-29458`
+- openssl: :cve:`2022-1292`, :cve:`2022-1343`, :cve:`2022-1434`, :cve:`2022-1473`, :cve:`2022-2068`, :cve:`2022-2274`, :cve:`2022-2097`
+- python3: :cve:`2015-20107` (ignored)
+- qemu: :cve:`2021-20255` (ignored), :cve:`2019-12067` (ignored), :cve:`2021-3507`, :cve:`2022-0216`, :cve:`2022-2962`, :cve:`2022-35414`
+- rpm: :cve:`2021-35937`, :cve:`2021-35938`, :cve:`2021-35939`
+- rsync: :cve:`2022-29154`
+- subversion: :cve:`2021-28544`, :cve:`2022-24070`
+- tiff: :cve:`2022-1210` (not applicable), :cve:`2022-1622`, :cve:`2022-1623` (invalid), :cve:`2022-2056`, :cve:`2022-2057`, :cve:`2022-2058`, :cve:`2022-2953`, :cve:`2022-34526`
+- unzip: :cve:`2022-0529`, :cve:`2022-0530`
+- vim: :cve:`2022-1381`, :cve:`2022-1420`, :cve:`2022-1621`, :cve:`2022-1629`, :cve:`2022-1674`, :cve:`2022-1733`, :cve:`2022-1735`, :cve:`2022-1769`, :cve:`2022-1771`, :cve:`2022-1785`, :cve:`2022-1796`, :cve:`2022-1927`, :cve:`2022-1942`, :cve:`2022-2257`, :cve:`2022-2264`, :cve:`2022-2284`, :cve:`2022-2285`, :cve:`2022-2286`, :cve:`2022-2287`, :cve:`2022-2816`, :cve:`2022-2817`, :cve:`2022-2819`, :cve:`2022-2845`, :cve:`2022-2849`, :cve:`2022-2862`, :cve:`2022-2874`, :cve:`2022-2889`, :cve:`2022-2980`, :cve:`2022-2946`, :cve:`2022-2982`, :cve:`2022-3099`, :cve:`2022-3134`, :cve:`2022-3234`, :cve:`2022-3278`
+- zlib: :cve:`2022-37434`
+
+
+
+
+
+Recipe Upgrades in 4.1
+~~~~~~~~~~~~~~~~~~~~~~
+
+- acpica 20211217 -> 20220331
+- adwaita-icon-theme 41.0 -> 42.0
+- alsa-lib 1.2.6.1 -> 1.2.7.2
+- alsa-plugins 1.2.6 -> 1.2.7.1
+- alsa-ucm-conf 1.2.6.3 -> 1.2.7.2
+- alsa-utils 1.2.6 -> 1.2.7
+- asciidoc 10.1.4 -> 10.2.0
+- at-spi2-core 2.42.0 -> 2.44.1
+- autoconf-archive 2022.02.11 -> 2022.09.03
+- base-passwd 3.5.29 -> 3.5.52
+- bind 9.18.5 -> 9.18.7
+- binutils 2.38 -> 2.39
+- boost 1.78.0 -> 1.80.0
+- boost-build-native 4.4.1 -> 1.80.0
+- btrfs-tools 5.16.2 -> 5.19.1
+- cargo 1.59.0 -> 1.63.0
+- ccache 4.6 -> 4.6.3
+- cmake 3.22.3 -> 3.24.0
+- cmake-native 3.22.3 -> 3.24.0
+- coreutils 9.0 -> 9.1
+- createrepo-c 0.19.0 -> 0.20.1
+- cross-localedef-native 2.35 -> 2.36
+- curl 7.82.0 -> 7.85.0
+- diffoscope 208 -> 221
+- dmidecode 3.3 -> 3.4
+- dnf 4.11.1 -> 4.14.0
+- dos2unix 7.4.2 -> 7.4.3
+- dpkg 1.21.4 -> 1.21.9
+- dropbear 2020.81 -> 2022.82
+- efibootmgr 17 -> 18
+- elfutils 0.186 -> 0.187
+- ell 0.50 -> 0.53
+- enchant2 2.3.2 -> 2.3.3
+- erofs-utils 1.4 -> 1.5
+- ethtool 5.16 -> 5.19
+- eudev 3.2.10 -> 3.2.11
+- ffmpeg 5.0.1 -> 5.1.1
+- file 5.41 -> 5.43
+- flac 1.3.4 -> 1.4.0
+- fontconfig 2.13.1 -> 2.14.0
+- freetype 2.11.1 -> 2.12.1
+- gcc 11.3.0 -> 12.2.0
+- gcompat 1.0.0+1.1+gitX (4d6a5156a6eb…) -> 1.0.0+1.1+gitX (c6921a1aa454…)
+- gdb 11.2 -> 12.1
+- ghostscript 9.55.0 -> 9.56.1
+- git 2.35.4 -> 2.37.3
+- glibc 2.35 -> 2.36
+- glslang 1.3.204.1 -> 1.3.216.0
+- gnu-config 20211108+gitX -> 20220525+gitX
+- gnu-efi 3.0.14 -> 3.0.15
+- gnutls 3.7.4 -> 3.7.7
+- go 1.17.13 -> 1.19
+- go-helloworld 0.1 (787a929d5a0d…) -> 0.1 (2e68773dfca0…)
+- gpgme 1.17.1 -> 1.18.0
+- gptfdisk 1.0.8 -> 1.0.9
+- harfbuzz 4.0.1 -> 5.1.0
+- hdparm 9.63 -> 9.64
+- help2man 1.49.1 -> 1.49.2
+- hwlatdetect 2.3 -> 2.4
+- icu 70.1 -> 71.1
+- inetutils 2.2 -> 2.3
+- init-system-helpers 1.62 -> 1.64
+- iproute2 5.17.0 -> 5.19.0
+- iptables 1.8.7 -> 1.8.8
+- iw 5.16 -> 5.19
+- json-c 0.15 -> 0.16
+- kbd 2.4.0 -> 2.5.1
+- kea 2.0.2 -> 2.2.0
+- kexec-tools 2.0.23 -> 2.0.25
+- kmod 29 -> 30
+- kmscube git (9f63f359fab1…) -> git (3bf6ee1a0233…)
+- less 600 -> 608
+- libaio 0.3.112 -> 0.3.113
+- libbsd 0.11.5 -> 0.11.6
+- libcap-ng 0.8.2 -> 0.8.3
+- libcap-ng-python 0.8.2 -> 0.8.3
+- libcgroup 2.0.2 -> 3.0.0
+- libcomps 0.1.18 -> 0.1.19
+- libdnf 0.66.0 -> 0.69.0
+- libdrm 2.4.110 -> 2.4.113
+- libevdev 1.12.1 -> 1.13.0
+- libfontenc 1.1.4 -> 1.1.6
+- libgcc 11.3.0 -> 12.2.0
+- libgcc-initial 11.3.0 -> 12.2.0
+- libgcrypt 1.9.4 -> 1.10.1
+- libgfortran 11.3.0 -> 12.2.0
+- libgit2 1.4.3 -> 1.5.0
+- libgpg-error 1.44 -> 1.45
+- libhandy 1.5.0 -> 1.6.3
+- libidn2 2.3.2 -> 2.3.3
+- libjitterentropy 3.4.0 -> 3.4.1
+- libmnl 1.0.4 -> 1.0.5
+- libnl 3.5.0 -> 3.7.0
+- libnotify 0.7.9 -> 0.8.1
+- libpipeline 1.5.5 -> 1.5.6
+- libproxy 0.4.17 -> 0.4.18
+- librepo 1.14.3 -> 1.14.5
+- librsvg 2.52.7 -> 2.54.5
+- libsdl2 2.0.20 -> 2.24.0
+- libseccomp 2.5.3 -> 2.5.4
+- libsndfile1 1.0.31 -> 1.1.0
+- libstd-rs 1.59.0 -> 1.63.0
+- libtirpc 1.3.2 -> 1.3.3
+- libubootenv 0.3.2 -> 0.3.3
+- libva 2.14.0 -> 2.15.0
+- libva-utils 2.14.0 -> 2.15.0
+- libx11 1.7.3.1 -> 1.8.1
+- libxau 1.0.9 -> 1.0.10
+- libxcb 1.14 -> 1.15
+- libxcursor 1.2.0 -> 1.2.1
+- libxcvt 0.1.1 -> 0.1.2
+- libxfont2 2.0.5 -> 2.0.6
+- libxvmc 1.0.12 -> 1.0.13
+- linux-libc-headers 5.16 -> 5.19
+- linux-yocto 5.10.143+gitX, 5.15.68+gitX -> 5.15.68+gitX, 5.19.9+gitX
+- linux-yocto-dev 5.18++gitX -> 5.19++gitX
+- linux-yocto-rt 5.10.143+gitX, 5.15.68+gitX -> 5.15.68+gitX, 5.19.9+gitX
+- linux-yocto-tiny 5.10.143+gitX, 5.15.68+gitX -> 5.15.68+gitX, 5.19.9+gitX
+- llvm 13.0.1 -> 14.0.6
+- lsof 4.94.0 -> 4.95.0
+- ltp 20220121 -> 20220527
+- lttng-tools 2.13.4 -> 2.13.8
+- lttng-ust 2.13.3 -> 2.13.4
+- mc 4.8.27 -> 4.8.28
+- mesa 22.0.3 -> 22.2.0
+- mesa-demos 8.4.0 -> 8.5.0
+- mesa-gl 22.0.3 -> 22.2.0
+- meson 0.61.3 -> 0.63.2
+- mmc-utils 0.1+gitX (b7e4d5a6ae99…) -> 0.1+gitX (d7b343fd2628…)
+- mpg123 1.29.3 -> 1.30.2
+- msmtp 1.8.20 -> 1.8.22
+- mtools 4.0.38 -> 4.0.40
+- musl 1.2.3+gitX (7a43f6fea908…) -> 1.2.3+gitX (37e18b7bf307…)
+- musl-obstack 1.1 -> 1.2
+- ncurses 6.3+20220423 (a0bc708bc695…) -> 6.3+20220423 (20db1fb41ec9…)
+- neard 0.16 -> 0.18
+- nettle 3.7.3 -> 3.8.1
+- nfs-utils 2.6.1 -> 2.6.2
+- nghttp2 1.47.0 -> 1.49.0
+- ninja 1.10.2 -> 1.11.1
+- numactl 2.0.14 -> 2.0.15
+- ofono 1.34 -> 2.0
+- opensbi 1.0 -> 1.1
+- openssh 8.9p1 -> 9.0p1
+- opkg 0.5.0 -> 0.6.0
+- ovmf edk2-stable202202 -> edk2-stable202205
+- pango 1.50.4 -> 1.50.9
+- parted 3.4 -> 3.5
+- patchelf 0.14.5 -> 0.15.0
+- pciutils 3.7.0 -> 3.8.0
+- perl 5.34.1 -> 5.36.0
+- perlcross 1.3.7 -> 1.4
+- piglit 1.0+gitrX (2f80c7cc9c02…) -> 1.0+gitrX (265896c86f90…)
+- pkgconf 1.8.0 -> 1.9.3
+- psmisc 23.4 -> 23.5
+- pulseaudio 15.0 -> 16.1
+- puzzles 0.0+gitX (c43a34fbfe43…) -> 0.0+gitX (8399cff6a3b9…)
+- python3 3.10.4 -> 3.10.6
+- python3-atomicwrites 1.4.0 -> 1.4.1
+- python3-attrs 21.4.0 -> 22.1.0
+- python3-babel 2.9.1 -> 2.10.3
+- python3-bcrypt 3.2.0 -> 3.2.2
+- python3-certifi 2021.10.8 -> 2022.9.14
+- python3-cffi 1.15.0 -> 1.15.1
+- python3-chardet 4.0.0 -> 5.0.0
+- python3-cryptography 36.0.2 -> 37.0.4
+- python3-cryptography-vectors 36.0.2 -> 37.0.4
+- python3-cython 0.29.28 -> 0.29.32
+- python3-dbusmock 0.27.3 -> 0.28.4
+- python3-docutils 0.18.1 -> 0.19
+- python3-dtschema 2022.1 -> 2022.8.3
+- python3-hypothesis 6.39.5 -> 6.54.5
+- python3-idna 3.3 -> 3.4
+- python3-imagesize 1.3.0 -> 1.4.1
+- python3-importlib-metadata 4.11.3 -> 4.12.0
+- python3-jinja2 3.1.1 -> 3.1.2
+- python3-jsonpointer 2.2 -> 2.3
+- python3-jsonschema 4.4.0 -> 4.9.1
+- python3-magic 0.4.25 -> 0.4.27
+- python3-mako 1.1.6 -> 1.2.2
+- python3-markdown 3.3.6 -> 3.4.1
+- python3-more-itertools 8.12.0 -> 8.14.0
+- python3-numpy 1.22.3 -> 1.23.3
+- python3-pbr 5.8.1 -> 5.10.0
+- python3-pip 22.0.3 -> 22.2.2
+- python3-psutil 5.9.0 -> 5.9.2
+- python3-pycryptodome 3.14.1 -> 3.15.0
+- python3-pycryptodomex 3.14.1 -> 3.15.0
+- python3-pyelftools 0.28 -> 0.29
+- python3-pygments 2.11.2 -> 2.13.0
+- python3-pygobject 3.42.0 -> 3.42.2
+- python3-pyparsing 3.0.7 -> 3.0.9
+- python3-pytest 7.1.1 -> 7.1.3
+- python3-pytest-subtests 0.7.0 -> 0.8.0
+- python3-pytz 2022.1 -> 2022.2.1
+- python3-requests 2.27.1 -> 2.28.1
+- python3-scons 4.3.0 -> 4.4.0
+- python3-semantic-version 2.9.0 -> 2.10.0
+- python3-setuptools 59.5.0 -> 65.0.2
+- python3-setuptools-scm 6.4.2 -> 7.0.5
+- python3-sphinx 4.4.0 -> 5.1.1
+- python3-sphinx-rtd-theme 0.5.0 -> 1.0.0
+- python3-typing-extensions 3.10.0.0 -> 4.3.0
+- python3-urllib3 1.26.9 -> 1.26.12
+- python3-webcolors 1.11.1 -> 1.12
+- python3-zipp 3.7.0 -> 3.8.1
+- qemu 6.2.0 -> 7.1.0
+- repo 2.22 -> 2.29.2
+- rpm 4.17.0 -> 4.18.0
+- rsync 3.2.3 -> 3.2.5
+- rt-tests 2.3 -> 2.4
+- rust 1.59.0 -> 1.63.0
+- rust-llvm 1.59.0 -> 1.63.0
+- sbc 1.5 -> 2.0
+- seatd 0.6.4 -> 0.7.0
+- shaderc 2022.1 -> 2022.2
+- shadow 4.11.1 -> 4.12.1
+- shared-mime-info 2.1 -> 2.2
+- slang 2.3.2 -> 2.3.3
+- speex 1.2.0 -> 1.2.1
+- speexdsp 1.2.0 -> 1.2.1
+- spirv-headers 1.3.204.1 -> 1.3.216.0
+- spirv-tools 1.3.204.1 -> 1.3.216.0
+- sqlite3 3.38.5 -> 3.39.3
+- squashfs-tools 4.5 -> 4.5.1
+- strace 5.16 -> 5.19
+- stress-ng 0.13.12 -> 0.14.03
+- sudo 1.9.10 -> 1.9.11p3
+- sysklogd 2.3.0 -> 2.4.4
+- sysstat 12.4.5 -> 12.6.0
+- systemd 250.5 -> 251.4
+- systemd-boot 250.5 -> 251.4
+- systemtap 4.6 -> 4.7
+- systemtap-native 4.6 -> 4.7
+- systemtap-uprobes 4.6 -> 4.7
+- sysvinit 3.01 -> 3.04
+- tiff 4.3.0 -> 4.4.0
+- tzcode-native 2022c -> 2022d
+- tzdata 2022c -> 2022d
+- u-boot 2022.01 -> 2022.07
+- u-boot-tools 2022.01 -> 2022.07
+- util-linux 2.37.4 -> 2.38.1
+- util-linux-libuuid 2.37.4 -> 2.38.1
+- valgrind 3.18.1 -> 3.19.0
+- vim 9.0.0541 -> 9.0.0598
+- vim-tiny 9.0.0541 -> 9.0.0598
+- virglrenderer 0.9.1 -> 0.10.3
+- vte 0.66.2 -> 0.68.0
+- vulkan-headers 1.3.204.1 -> 1.3.216.0
+- vulkan-loader 1.3.204.1 -> 1.3.216.0
+- vulkan-samples git (28ca2dad83ce…) -> git (74d45aace02d…)
+- vulkan-tools 1.3.204.1 -> 1.3.216.0
+- wayland 1.20.0 -> 1.21.0
+- wayland-protocols 1.25 -> 1.26
+- webkitgtk 2.36.5 -> 2.36.7
+- x264 r3039+gitX (5db6aa6cab1b…) -> r3039+gitX (baee400fa9ce…)
+- xauth 1.1.1 -> 1.1.2
+- xcb-proto 1.14.1 -> 1.15.2
+- xf86-video-cirrus 1.5.3 -> 1.6.0
+- xkeyboard-config 2.35.1 -> 2.36
+- xmlto 0.0.28 -> 0.0.28+0.0.29+gitX
+- xorgproto 2021.5 -> 2022.2
+- zlib 1.2.11 -> 1.2.12
+
+
+
+Contributors to 4.1
+~~~~~~~~~~~~~~~~~~~
+
+Thanks to the following people who contributed to this release:
+
+- Aatir Manzur
+- Ahmed Hossam
+- Alejandro Hernandez Samaniego
+- Alexander Kanavin
+- Alexandre Belloni
+- Alex Kiernan
+- Alex Stewart
+- Andrei Gherzan
+- Andrej Valek
+- Andrey Konovalov
+- Aníbal Limón
+- Anuj Mittal
+- Arkadiusz Drabczyk
+- Armin Kuster
+- Aryaman Gupta
+- Awais Belal
+- Beniamin Sandu
+- Bertrand Marquis
+- Bob Henz
+- Bruce Ashfield
+- Carlos Rafael Giani
+- Changhyeok Bae
+- Changqing Li
+- Chanho Park
+- Chen Qi
+- Christoph Lauer
+- Claudius Heine
+- Daiane Angolini
+- Daniel Gomez
+- Daniel McGregor
+- David Bagonyi
+- Davide Gardenal
+- Denys Dmytriyenko
+- Dmitry Baryshkov
+- Drew Moseley
+- Enrico Scholz
+- Ernst Sjöstrand
+- Etienne Cordonnier
+- Fabio Estevam
+- Federico Pellegrin
+- Felix Moessbauer
+- Ferry Toth
+- Florin Diaconescu
+- Gennaro Iorio
+- Grygorii Tertychnyi
+- Gunjan Gupta
+- Henning Schild
+- He Zhe
+- Hitendra Prajapati
+- Jack Mitchell
+- Jacob Kroon
+- Jan Kiszka
+- Jan Luebbe
+- Jan Vermaete
+- Jasper Orschulko
+- JeongBong Seo
+- Jeremy Puhlman
+- Jiaqing Zhao
+- Joerg Vehlow
+- Johan Korsnes
+- Johannes Schneider
+- John Edward Broadbent
+- Jon Mason
+- Jose Quaresma
+- Joshua Watt
+- Justin Bronder
+- Kai Kang
+- Kevin Hao
+- Khem Raj
+- Konrad Weihmann
+- Kory Maincent
+- Kristian Amlie
+- Lee Chee Yang
+- Lei Maohui
+- Leon Anavi
+- Luca Ceresoli
+- Lucas Stach
+- LUIS ENRIQUEZ
+- Marcel Ziswiler
+- Marius Kriegerowski
+- Mark Hatle
+- Markus Volk
+- Marta Rybczynska
+- Martin Beeger
+- Martin Jansa
+- Mateusz Marciniec
+- Mattias Jernberg
+- Matt Madison
+- Maxime Roussin-Bélanger
+- Michael Halstead
+- Michael Opdenacker
+- Mihai Lindner
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Muhammad Hamza
+- Naveen Saini
+- Neil Horman
+- Nick Potenski
+- Nicolas Dechesne
+- Niko Mauno
+- Ola x Nilsson
+- Otavio Salvador
+- Pascal Bach
+- Paul Eggleton
+- Paul Gortmaker
+- Paulo Neves
+- Pavel Zhukov
+- Peter Bergin
+- Peter Kjellerstedt
+- Peter Marko
+- Petr Vorel
+- Pgowda
+- Portia Stephens
+- Quentin Schulz
+- Rahul Kumar
+- Raju Kumar Pothuraju
+- Randy MacLeod
+- Raphael Teller
+- Rasmus Villemoes
+- Ricardo Salveti
+- Richard Purdie
+- Robert Joslyn
+- Robert Yang
+- Roland Hieber
+- Ross Burton
+- Rouven Czerwinski
+- Ruiqiang Hao
+- Russ Dill
+- Rusty Howell
+- Sakib Sajal
+- Samuli Piippo
+- Schmidt, Adriaan
+- Sean Anderson
+- Shruthi Ravichandran
+- Shubham Kulkarni
+- Simone Weiss
+- Sebastian Suesens
+- Stefan Herbrechtsmeier
+- Stefano Babic
+- Stefan Wiehler
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Teoh Jay Shen
+- Thomas Epperson
+- Thomas Perrot
+- Thomas Roos
+- Tobias Schmidl
+- Tomasz Dziendzielski
+- Tom Hochstein
+- Tom Rini
+- Trevor Woerner
+- Ulrich Ölmann
+- Vyacheslav Yurkov
+- Wang Mingyu
+- William A. Kennington III
+- Xiaobing Luo
+- Xu Huan
+- Yang Xu
+- Yi Zhao
+- Yogesh Tyagi
+- Yongxin Liu
+- Yue Tao
+- Yulong (Kevin) Liu
+- Zach Welch
+- Zheng Ruoqin
+- Zoltán Böszörményi
+
+Repositories / Downloads for 4.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`langdale </poky/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1 </poky/log/?h=yocto-4.1>`
+- Git Revision: :yocto_git:`5200799866b92259e855051112520006e1aaaac0 </poky/commit/?id=5200799866b92259e855051112520006e1aaaac0>`
+- Release Artefact: poky-5200799866b92259e855051112520006e1aaaac0
+- sha: 9d9a2f7ecf2502f89f43bf45d63e6b61cdcb95ed1d75c8281372f550d809c823
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1/poky-5200799866b92259e855051112520006e1aaaac0.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1/poky-5200799866b92259e855051112520006e1aaaac0.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`langdale </openembedded-core/log/?h=langdale>`
+- Tag: :oe_git:`yocto-4.1 </openembedded-core/log/?h=yocto-4.1>`
+- Git Revision: :oe_git:`744a2277844ec9a384a9ca7dae2a634d5a0d3590 </openembedded-core/commit/?id=744a2277844ec9a384a9ca7dae2a634d5a0d3590>`
+- Release Artefact: oecore-744a2277844ec9a384a9ca7dae2a634d5a0d3590
+- sha: 34f1fd5bb83514bf0ec8ad7f8cce088a8e28677e1338db94c188283da704c663
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1/oecore-744a2277844ec9a384a9ca7dae2a634d5a0d3590.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1/oecore-744a2277844ec9a384a9ca7dae2a634d5a0d3590.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`langdale </meta-mingw/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1 </meta-mingw/log/?h=yocto-4.1>`
+- Git Revision: :yocto_git:`b0067202db8573df3d23d199f82987cebe1bee2c </meta-mingw/commit/?id=b0067202db8573df3d23d199f82987cebe1bee2c>`
+- Release Artefact: meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c
+- sha: 704f2940322b81ce774e9cbd27c3cfa843111d497dc7b1eeaa39cd694d9a2366
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1/meta-mingw-b0067202db8573df3d23d199f82987cebe1bee2c.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.2 </bitbake/log/?h=2.2>`
+- Tag: :oe_git:`yocto-4.1 </bitbake/log/?h=yocto-4.1>`
+- Git Revision: :oe_git:`074da4c469d1f4177a1c5be72b9f3ccdfd379d67 </bitbake/commit/?id=074da4c469d1f4177a1c5be72b9f3ccdfd379d67>`
+- Release Artefact: bitbake-074da4c469d1f4177a1c5be72b9f3ccdfd379d67
+- sha: e32c300e0c8522d8d49ef10aae473bd5f293202672eb9d38e90ed92594ed1fe8
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.1/bitbake-074da4c469d1f4177a1c5be72b9f3ccdfd379d67.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.1/bitbake-074da4c469d1f4177a1c5be72b9f3ccdfd379d67.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`langdale </yocto-docs/log/?h=langdale>`
+- Tag: :yocto_git:`yocto-4.1 </yocto-docs/log/?h=yocto-4.1>`
+- Git Revision: :yocto_git:`42d3e26a0d04bc5951e640b471686f347dc9b74a </yocto-docs/commit/?id=42d3e26a0d04bc5951e640b471686f347dc9b74a>`
diff --git a/documentation/migration-guides/release-notes-4.2.1.rst b/documentation/migration-guides/release-notes-4.2.1.rst
new file mode 100644
index 0000000000..948c35fd67
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.2.1.rst
@@ -0,0 +1,206 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.2.1 (Mickledore)
+------------------------------------------
+
+Security Fixes in Yocto-4.2.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- connman: Fix :cve:`2023-28488`
+- linux-yocto: Ignore :cve:`2023-1652` and :cve:`2023-1829`
+- ghostscript: Fix :cve:`2023-28879`
+- qemu: Ignore :cve:`2023-0664`
+- ruby: Fix :cve:`2022-28738` and :cve:`2022-28739`
+- tiff: Fix :cve:`2022-4645`
+- xwayland: Fix :cve:`2023-1393`
+
+
+Fixes in Yocto-4.2.1
+~~~~~~~~~~~~~~~~~~~~
+
+- apr: upgrade to 1.7.3
+- bind: upgrade to 9.18.13
+- build-appliance-image: Update to mickledore head revision
+- cargo: Fix build on musl/riscv
+- cpio: fix appending to archives larger than 2GB
+- cracklib: upgrade to 2.9.11
+- cve-update-nvd2-native: added the missing http import
+- dev-manual: init-manager.rst: add summary
+- dhcpcd: use git instead of tarballs
+- docs: add support for mickledore (4.2) release
+- gawk: Add skipped.txt to emit test to ignore
+- gawk: Disable known ptest fails on musl
+- gawk: Remove redundant patch
+- glib-networking: Add test retry to avoid failures
+- glib-networking: Correct glib error handling in test patch
+- gtk4: upgrade to 4.10.3
+- kernel-devsrc: depend on python3-core instead of python3
+- kernel-fitimage: Fix the default dtb config check
+- kernel: improve initramfs bundle processing time
+- libarchive: Enable acls, xattr for native as well as target
+- libhandy: upgrade to 1.8.2
+- libnotify: remove dependency dbus
+- libpam: Fix the xtests/tst-pam_motd[1|3] failures
+- libpcap: upgrade to 1.10.4
+- libsdl2: upgrade to 2.26.5
+- libxml2: Disable icu tests on musl
+- license.bbclass: Include :term:`LICENSE` in the output when it fails to parse
+- linux-firmware: upgrade to 20230404
+- machine/qemuarm*: don't explicitly set vmalloc
+- maintainers.inc: Fix email address typo
+- maintainers.inc: Move repo to unassigned
+- man-pages: upgrade to 6.04
+- manuals: document :term:`SPDX_CUSTOM_ANNOTATION_VARS`
+- manuals: expand init manager documentation
+- mesa: upgrade to 23.0.3
+- migration-guides: add release-notes for 4.1.4
+- migration-guides: fixes and improvements to 4.2 release notes
+- migration-guides: release-notes-4.0.9.rst: add missing :term:`SPDX` info
+- migration-guides: release-notes-4.2: add doc improvement highlights
+- mpg123: upgrade to 1.31.3
+- mtools: upgrade to 4.0.43
+- oeqa/utils/metadata.py: Fix running oe-selftest running with no distro set
+- overview-manual: development-environment: update text and screenshots
+- overview-manual: update section about source archives
+- package_manager/ipk: fix config path generation in _create_custom_config()
+- pango: upgrade to 1.50.14
+- perl: patch out build paths from native binaries
+- poky.conf: bump version for 4.2.1 release
+- populate_sdk_ext.bbclass: redirect stderr to stdout so that both end in LOGFILE
+- populate_sdk_ext.bbclass: set :term:`METADATA_REVISION` with an :term:`DISTRO` override
+- python3targetconfig.bbclass: Extend PYTHONPATH instead of overwriting
+- qemu: Add fix for powerpc instruction fallback issue
+- qemu: Update ppc instruction fix to match revised upstream version
+- quilt: Fix merge.test race condition
+- recipes: Default to https git protocol where possible
+- ref-manual: add "Mixin" term
+- ref-manual: classes.rst: document devicetree.bbclass
+- ref-manual: classes: kernel: document automatic defconfig usage
+- ref-manual: classes: kernel: remove incorrect sentence opening
+- ref-manual: remove unused and obsolete file
+- ref-manual: system-requirements.rst: fix AlmaLinux variable name
+- ref-manual: variables.rst: add wikipedia shortcut for "getty"
+- ref-manual: variables.rst: document :term:`KERNEL_DANGLING_FEATURES_WARN_ONLY`
+- ref-manual: variables.rst: don't mention the :term:`INIT_MANAGER` "none" option
+- release-notes-4.2: remove/merge duplicates entries
+- release-notes-4.2: update RC3 changes
+- release-notes-4.2: update known issues and Repositories/Downloads
+- releases.svg: fix and explain duration of Hardknott 3.3
+- ruby: upgrade to 3.2.2
+- rust: upgrade to 1.68.2
+- selftest/distrodata: clean up exception lists in recipe maintainers test
+- systemd-systemctl: fix instance template WantedBy symlink construction
+- texinfo: upgrade to 7.0.3
+- unfs3: fix symlink time setting issue
+- update-alternatives.bbclass: fix old override syntax
+- vala: upgrade to 0.56.6
+- waffle: upgrade to 1.7.2
+- weston: add xwayland to :term:`DEPENDS` for :term:`PACKAGECONFIG` xwayland
+- wpebackend-fdo: upgrade to 1.14.2
+- xserver-xorg: upgrade to 21.1.8
+- xwayland: upgrade to 23.1.1
+
+
+Known Issues in Yocto-4.2.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.2.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alex Kiernan
+- Alexander Kanavin
+- Arslan Ahmad
+- Bruce Ashfield
+- Chen Qi
+- Dmitry Baryshkov
+- Enrico Jörns
+- Jan Vermaete
+- Joe Slater
+- Johannes Schrimpf
+- Kai Kang
+- Khem Raj
+- Kyle Russell
+- Lee Chee Yang
+- Luca Ceresoli
+- Markus Volk
+- Martin Jansa
+- Martin Siegumfeldt
+- Michael Halstead
+- Michael Opdenacker
+- Ming Liu
+- Otavio Salvador
+- Pawan Badganchi
+- Peter Bergin
+- Peter Kjellerstedt
+- Piotr Łobacz
+- Richard Purdie
+- Ross Burton
+- Steve Sakoman
+- Thomas Roos
+- Virendra Thakur
+- Wang Mingyu
+- Yoann Congal
+- Zhixiong Chi
+
+
+Repositories / Downloads for Yocto-4.2.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`mickledore </poky/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.1 </poky/log/?h=yocto-4.2.1>`
+- Git Revision: :yocto_git:`c5c69f78fc7ce4ba361363c14352e4264ce7813f </poky/commit/?id=c5c69f78fc7ce4ba361363c14352e4264ce7813f>`
+- Release Artefact: poky-c5c69f78fc7ce4ba361363c14352e4264ce7813f
+- sha: 057d7771dceebb949a79359d7d028a733a29ae7ecd98b60fefcff83fecb22eb7
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.1/poky-c5c69f78fc7ce4ba361363c14352e4264ce7813f.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.1/poky-c5c69f78fc7ce4ba361363c14352e4264ce7813f.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`mickledore </openembedded-core/log/?h=mickledore>`
+- Tag: :oe_git:`yocto-4.2.1 </openembedded-core/log/?h=yocto-4.2.1>`
+- Git Revision: :oe_git:`20cd64812d286c920bd766145ab1cd968e72667e </openembedded-core/commit/?id=20cd64812d286c920bd766145ab1cd968e72667e>`
+- Release Artefact: oecore-20cd64812d286c920bd766145ab1cd968e72667e
+- sha: 877fb909af7aa51e1c962d33cfe91ba3e075c384716006aa1345b4bcb15a48ef
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.1/oecore-20cd64812d286c920bd766145ab1cd968e72667e.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.1/oecore-20cd64812d286c920bd766145ab1cd968e72667e.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`mickledore </meta-mingw/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.1 </meta-mingw/log/?h=yocto-4.2.1>`
+- Git Revision: :yocto_git:`cc9fd0a988dc1041035a6a6cafb2d1237ef38d8e </meta-mingw/commit/?id=cc9fd0a988dc1041035a6a6cafb2d1237ef38d8e>`
+- Release Artefact: meta-mingw-cc9fd0a988dc1041035a6a6cafb2d1237ef38d8e
+- sha: 69ccc3ee503b5c35602889e85d28df64a5422ad0f1e55c96c94135b837bb4a1c
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.1/meta-mingw-cc9fd0a988dc1041035a6a6cafb2d1237ef38d8e.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.1/meta-mingw-cc9fd0a988dc1041035a6a6cafb2d1237ef38d8e.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.4 </bitbake/log/?h=2.4>`
+- Tag: :oe_git:`yocto-4.2.1 </bitbake/log/?h=yocto-4.2.1>`
+- Git Revision: :oe_git:`d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c </bitbake/commit/?id=d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c>`
+- Release Artefact: bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c
+- sha: 5edcb97cb545011226b778355bb840ebcc790552d4a885a0d83178153697ba7a
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.1/bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.1/bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`mickledore </yocto-docs/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.1 </yocto-docs/log/?h=yocto-4.2.1>`
+- Git Revision: :yocto_git:`6b04269bba72311e83139cc88b7a3539a5d832e8 </yocto-docs/commit/?id=6b04269bba72311e83139cc88b7a3539a5d832e8>`
+
diff --git a/documentation/migration-guides/release-notes-4.2.2.rst b/documentation/migration-guides/release-notes-4.2.2.rst
new file mode 100644
index 0000000000..74f2d0e82a
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.2.2.rst
@@ -0,0 +1,330 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.2.2 (Mickledore)
+------------------------------------------
+
+Security Fixes in Yocto-4.2.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: Fix :cve:`2023-1972`
+- cups: Fix :cve:`2023-32324`
+- curl: Fix :cve:`2023-28319`, :cve:`2023-28320`, :cve:`2023-28321` and :cve:`2023-28322`
+- dbus: Fix :cve:`2023-34969`
+- git: Fix :cve:`2023-25652` and :cve:`2023-29007`
+- git: Ignore :cve:`2023-25815`
+- libwebp: Fix :cve:`2023-1999`
+- libxml2: Fix :cve:`2023-28484` and :cve:`2023-29469`
+- libxpm: Fix :cve:`2022-44617`
+- ninja: Ignore :cve:`2021-4336`
+- openssl: Fix :cve:`2023-0464`, :cve:`2023-0465`, :cve:`2023-0466`, :cve:`2023-1255` and :cve:`2023-2650`
+- perl: Fix :cve:`2023-31484` and :cve:`2023-31486`
+- sysstat: Fix :cve:`2023-33204`
+- tiff: Fix :cve_mitre:`2023-25434`, :cve:`2023-26965` and :cve:`2023-2731`
+- vim: Fix :cve:`2023-2426`
+
+
+Fixes in Yocto-4.2.2
+~~~~~~~~~~~~~~~~~~~~
+
+- apr: Upgrade to 1.7.4
+- avahi: fix D-Bus introspection
+- babeltrace2: Always use BFD linker when building tests with ld-is-lld distro feature
+- babeltrace2: Upgrade to 2.0.5
+- baremetal-helloworld: Update :term:`SRCREV` to fix entry addresses for ARM architectures
+- bind: Upgrade to 9.18.15
+- binutils: move packaging of gprofng static lib into common .inc
+- binutils: package static libs from gprofng
+- binutils: stable 2.40 branch updates (7343182dd1)
+- bitbake.conf: add unzstd in :term:`HOSTTOOLS`
+- bitbake: runqueue: Fix deferred task/multiconfig race issue
+- bno_plot.py, btt_plot.py: Ask for python3 specifically
+- build-appliance-image: Update to mickledore head revision
+- busybox: Upgrade to 1.36.1
+- cmake.bbclass: do not search host paths for find_program()
+- conf: add nice level to the hash config ignred variables
+- connman: fix warning by specifying runstatedir at configure time
+- cpio: Run ptests under ptest user
+- dbus: Upgrade to 1.14.8
+- devtool: Fix the wrong variable in srcuri_entry
+- dnf: only write the log lock to root for native dnf
+- docs: bsp-guide: bsp: fix typo
+- dpkg: Upgrade to v1.21.22
+- e2fsprogs: Fix error SRCDIR when using usrmerge :term:`DISTRO_FEATURES`
+- e2fsprogs: fix ptest bug for second running
+- ell: Upgrade to 0.57
+- expect: Add ptest support
+- fribidi: Upgrade to 1.0.13
+- gawk: Upgrade to 5.2.2
+- gcc : upgrade to v12.3
+- gdb: fix crashes when debugging threads with Arm Pointer Authentication enabled
+- gdb: Upgrade to 13.2
+- git: Upgrade to 2.39.3
+- glib-networking: use correct error code in ptest
+- glibc: Pass linker choice via compiler flags
+- glibc: stable 2.37 branch updates.
+- gnupg: Upgrade to 2.4.2
+- go.bbclass: don't use test to check output from ls
+- go: Upgrade to 1.20.5
+- go: Use -no-pie to build target cgo
+- gobject-introspection: remove obsolete :term:`DEPENDS`
+- grub: submit determinism.patch upstream
+- gstreamer1.0: Upgrade to 1.22.3
+- gtk4: Upgrade to 4.10.4
+- image-live.bbclass: respect :term:`IMAGE_MACHINE_SUFFIX`
+- image_types: Fix reproducible builds for initramfs and UKI img
+- inetutils: remove unused patch files
+- ipk: Revert Decode byte data to string in manifest handling
+- iso-codes: Upgrade to 4.15.0
+- kernel: don't force PAHOLE=false
+- kmod: remove unused ptest.patch
+- kmscube: Correct :term:`DEPENDS` to avoid overwrite
+- layer.conf: Add missing dependency exclusion
+- lib/terminal.py: Add urxvt terminal
+- libbsd: Add correct license for all packages
+- libdnf: Upgrade to 0.70.1
+- libgcrypt: Upgrade to 1.10.2
+- libgloss: remove unused patch file
+- libmicrohttpd: Upgrade to 0.9.77
+- libmodule-build-perl: Upgrade to 0.4234
+- libx11: remove unused patch and :term:`FILESEXTRAPATHS`
+- libx11: Upgrade to 1.8.5
+- libxfixes: Upgrade to v6.0.1
+- libxft: Upgrade to 2.3.8
+- libxi: Upgrade to v1.8.1
+- libxml2: Do not use lld linker when building with tests on rv64
+- libxml2: Upgrade to 2.10.4
+- libxpm: Upgrade to 3.5.16
+- linux-firmware: Upgrade to 20230515
+- linux-yocto/5.15: cfg: fix DECNET configuration warning
+- linux-yocto/5.15: Upgrade to v5.15.118
+- linux-yocto/6.1: fix intermittent x86 boot hangs
+- linux-yocto/6.1: Upgrade to v6.1.35
+- linux-yocto: move build / debug dependencies to .inc
+- logrotate: Do not create logrotate.status file
+- maintainers.inc: correct Carlos Rafael Giani's email address
+- maintainers.inc: correct unassigned entries
+- maintainers.inc: unassign Adrian Bunk from wireless-regdb
+- maintainers.inc: unassign Alistair Francis from opensbi
+- maintainers.inc: unassign Andreas Müller from itstool entry
+- maintainers.inc: unassign Chase Qi from libc-test
+- maintainers.inc: unassign Oleksandr Kravchuk from python3 and all other items
+- maintainers.inc: unassign Pascal Bach from cmake entry
+- maintainers.inc: unassign Ricardo Neri from ovmf
+- maintainers.inc: update version for gcc-source
+- maintainers.inc: unassign Richard Weinberger from erofs-utils entry
+- meta: depend on autoconf-archive-native, not autoconf-archive
+- meta: lib: oe: npm_registry: Add more safe caracters
+- migration-guides: add release notes for 4.2.1
+- minicom: remove unused patch files
+- mobile-broadband-provider-info: Upgrade to 20230416
+- musl: Correct :term:`SRC_URI`
+- oeqa/selftest/bbtests: add non-existent prefile/postfile tests
+- oeqa/selftest/cases/devtool.py: skip all tests require folder a git repo
+- oeqa: adding selftest-hello and use it to speed up tests
+- openssh: Remove BSD-4-clause contents completely from codebase
+- openssl: fix building on riscv32
+- openssl: Upgrade to 3.1.1
+- overview-manual: concepts.rst: Fix a typo
+- parted: Add missing libuuid to linker cmdline for libparted-fs-resize.so
+- perf: Make built-in libtraceevent plugins cohabit with external libtraceevent
+- piglit: Add missing glslang dependencies
+- piglit: Fix c++11-narrowing warnings in tests
+- pkgconf: Upgrade to 1.9.5
+- pm-utils: fix multilib conflictions
+- poky.conf: bump version for 4.2.2 release
+- populate_sdk_base.bbclass: respect :term:`MLPREFIX` for ptest-pkgs's ptest-runner
+- profile-manual: fix blktrace remote usage instructions
+- psmisc: Set :term:`ALTERNATIVE` for pstree to resolve conflict with busybox
+- ptest-runner: Ensure data writes don't race
+- ptest-runner: Pull in "runner: Remove threads and mutexes" fix
+- ptest-runner: Pull in sync fix to improve log warnings
+- python3-bcrypt: Use BFD linker when building tests
+- python3-numpy: remove NPY_INLINE, use inline instead
+- qemu: a pending patch was submitted and accepted upstream
+- qemu: remove unused qemu-7.0.0-glibc-2.36.patch
+- qemurunner.py: fix error message about qmp
+- qemurunner: avoid leaking server_socket
+- ref-manual: add clarification for :term:`SRCREV`
+- ref-manual: classes.rst: fix typo
+- rootfs-postcommands.bbclass: add post func remove_unused_dnf_log_lock
+- rpcsvc-proto: Upgrade to 1.4.4
+- rpm: drop unused 0001-Rip-out-partial-support-for-unused-MD2-and-RIPEMD160.patch
+- rpm: Upgrade to 4.18.1
+- rpm: write macros under libdir
+- runqemu-gen-tapdevs: Refactoring
+- runqemu-ifupdown/get-tapdevs: Add support for ip tuntap
+- scripts/runqemu: allocate unfsd ports in a way that doesn't race or clash with unrelated processes
+- scripts/runqemu: split lock dir creation into a reusable function
+- scripts: fix buildstats diff/summary hard bound to host python3
+- sdk.py: error out when moving file fails
+- sdk.py: fix moving dnf contents
+- selftest/license: Exclude from world
+- selftest/reproducible: Allow native/cross reuse in test
+- serf: Upgrade to 1.3.10
+- staging.bbclass: do not add extend_recipe_sysroot to prefuncs of prepare_recipe_sysroot
+- strace: Disable failing test
+- strace: Merge two similar patches
+- strace: Update patches/tests with upstream fixes
+- sysfsutils: fetch a supported fork from github
+- systemd-systemctl: support instance expansion in WantedBy
+- systemd: Drop a backport
+- tiff: Remove unused patch from tiff
+- uninative: Upgrade to 3.10 to support gcc 13
+- uninative: Upgrade to 4.0 to include latest gcc 13.1.1
+- unzip: fix configure check for cross compilation
+- unzip: remove hardcoded LARGE_FILE_SUPPORT
+- useradd-example: package typo correction
+- useradd-staticids.bbclass: improve error message
+- v86d: Improve kernel dependency
+- vim: Upgrade to 9.0.1527
+- weston-init: add profile to point users to global socket
+- weston-init: add the weston user to the wayland group
+- weston-init: add weston user to the render group
+- weston-init: fix the mixed indentation
+- weston-init: guard against systemd configs
+- weston-init: make sure the render group exists
+- wget: Upgrade to 1.21.4
+- wireless-regdb: Upgrade to 2023.05.03
+- xdpyinfo: Upgrade to 1.3.4
+- xf86-video-intel: Use the HTTPS protocol to fetch the Git repositories
+- xinput: upgrade to v1.6.4
+- xwininfo: upgrade to v1.1.6
+- xz: Upgrade to 5.4.3
+- yocto-bsps: update to v5.15.106
+- zip: fix configure check by using _Static_assert
+- zip: remove unnecessary LARGE_FILE_SUPPORT CLFAGS
+
+
+Known Issues in Yocto-4.2.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.2.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alberto Planas
+- Alejandro Hernandez Samaniego
+- Alexander Kanavin
+- Andrej Valek
+- Andrew Jeffery
+- Anuj Mittal
+- Archana Polampalli
+- BELOUARGA Mohamed
+- Bruce Ashfield
+- Changqing Li
+- Charlie Wu
+- Chen Qi
+- Chi Xu
+- Daniel Ammann
+- Deepthi Hemraj
+- Denys Dmytriyenko
+- Dmitry Baryshkov
+- Ed Beroset
+- Eero Aaltonen
+- Fabien Mahot
+- Frieder Paape
+- Frieder Schrempf
+- Hannu Lounento
+- Ian Ray
+- Jermain Horsman
+- Jörg Sommer
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Lorenzo Arena
+- Marc Ferland
+- Markus Volk
+- Martin Jansa
+- Michael Halstead
+- Mikko Rapeli
+- Mingli Yu
+- Natasha Bailey
+- Nikhil R
+- Pablo Saavedra
+- Paul Gortmaker
+- Pavel Zhukov
+- Peter Kjellerstedt
+- Qiu Tingting
+- Quentin Schulz
+- Randolph Sapp
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Richard Purdie
+- Riyaz Khan
+- Ross Burton
+- Sakib Sajal
+- Sanjay Chitroda
+- Siddharth Doshi
+- Soumya Sambu
+- Steve Sakoman
+- Sudip Mukherjee
+- Sundeep KOKKONDA
+- Thomas Roos
+- Tim Orling
+- Tom Hochstein
+- Trevor Gamblin
+- Ulrich Ölmann
+- Wang Mingyu
+- Xiangyu Chen
+
+
+Repositories / Downloads for Yocto-4.2.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`mickledore </poky/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.2 </poky/log/?h=yocto-4.2.2>`
+- Git Revision: :yocto_git:`6e17b3e644ca15b8b4afd071ccaa6f172a0e681a </poky/commit/?id=6e17b3e644ca15b8b4afd071ccaa6f172a0e681a>`
+- Release Artefact: poky-6e17b3e644ca15b8b4afd071ccaa6f172a0e681a
+- sha: c0b4dadcf00b97d866dd4cc2f162474da2c3e3289badaa42a978bff1d479af99
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.2/poky-6e17b3e644ca15b8b4afd071ccaa6f172a0e681a.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.2/poky-6e17b3e644ca15b8b4afd071ccaa6f172a0e681a.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`mickledore </openembedded-core/log/?h=mickledore>`
+- Tag: :oe_git:`yocto-4.2.2 </openembedded-core/log/?h=yocto-4.2.2>`
+- Git Revision: :oe_git:`3ef283e02b0b91daf64c3a589e1f6bb68d4f5aa1 </openembedded-core/commit/?id=3ef283e02b0b91daf64c3a589e1f6bb68d4f5aa1>`
+- Release Artefact: oecore-3ef283e02b0b91daf64c3a589e1f6bb68d4f5aa1
+- sha: d2fd127f46e626fa4456c193af3dbd25d4b2565db59bc23be69a3b2dd4febed5
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.2/oecore-3ef283e02b0b91daf64c3a589e1f6bb68d4f5aa1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.2/oecore-3ef283e02b0b91daf64c3a589e1f6bb68d4f5aa1.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`mickledore </meta-mingw/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.2 </meta-mingw/log/?h=yocto-4.2.2>`
+- Git Revision: :yocto_git:`4608d0bb7e47c52b8f6e9be259bfb1716fda9fd6 </meta-mingw/commit/?id=4608d0bb7e47c52b8f6e9be259bfb1716fda9fd6>`
+- Release Artefact: meta-mingw-4608d0bb7e47c52b8f6e9be259bfb1716fda9fd6
+- sha: fcbae0dedb363477492b86b8f997e06f995793285535b24dc66038845483eeef
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.2/meta-mingw-4608d0bb7e47c52b8f6e9be259bfb1716fda9fd6.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.2/meta-mingw-4608d0bb7e47c52b8f6e9be259bfb1716fda9fd6.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.4 </bitbake/log/?h=2.4>`
+- Tag: :oe_git:`yocto-4.2.2 </bitbake/log/?h=yocto-4.2.2>`
+- Git Revision: :oe_git:`08033b63ae442c774bd3fce62844eac23e6882d7 </bitbake/commit/?id=08033b63ae442c774bd3fce62844eac23e6882d7>`
+- Release Artefact: bitbake-08033b63ae442c774bd3fce62844eac23e6882d7
+- sha: 1d070c133bfb6502ac04befbf082cbfda7582c8b1c48296a788384352e5061fd
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.2/bitbake-08033b63ae442c774bd3fce62844eac23e6882d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.2/bitbake-08033b63ae442c774bd3fce62844eac23e6882d7.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`mickledore </yocto-docs/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.2 </yocto-docs/log/?h=yocto-4.2.2>`
+- Git Revision: :yocto_git:`54d849d259a332389beea159d789f8fa92871475 </yocto-docs/commit/?id=54d849d259a332389beea159d789f8fa92871475>`
+
diff --git a/documentation/migration-guides/release-notes-4.2.3.rst b/documentation/migration-guides/release-notes-4.2.3.rst
new file mode 100644
index 0000000000..3b568a1c29
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.2.3.rst
@@ -0,0 +1,263 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.2.3 (Mickledore)
+------------------------------------------
+
+Security Fixes in Yocto-4.2.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-2828` and :cve:`2023-2911`
+- cups: Fix :cve:`2023-34241`
+- dmidecode: Fix :cve:`2023-30630`
+- erofs-utils: Fix :cve:`2023-33551` and :cve:`2023-33552`
+- ghostscript: Fix :cve:`2023-36664`
+- go: Fix :cve_mitre:`2023-24531`
+- libarchive: ignore :cve:`2023-30571`
+- libjpeg-turbo: Fix :cve:`2023-2804`
+- libx11: Fix :cve:`2023-3138`
+- ncurses: Fix :cve:`2023-29491`
+- openssh: Fix :cve:`2023-38408`
+- python3-certifi: Fix :cve:`2023-37920`
+- python3-requests: Fix :cve:`2023-32681`
+- python3: Ignore :cve:`2023-36632`
+- qemu: fix :cve:`2023-0330`, :cve_mitre:`2023-2861`, :cve_mitre:`2023-3255` and :cve_mitre:`2023-3301`
+- ruby: Fix :cve:`2023-36617`
+- vim: Fix :cve:`2023-2609` and :cve:`2023-2610`
+- webkitgtk: Fix :cve:`2023-27932` and :cve:`2023-27954`
+
+
+Fixes in Yocto-4.2.3
+~~~~~~~~~~~~~~~~~~~~
+
+- acpica: Update :term:`SRC_URI`
+- automake: fix buildtest patch
+- baremetal-helloworld: Fix race condition
+- bind: upgrade to v9.18.17
+- binutils: stable 2.40 branch updates
+- build-appliance-image: Update to mickledore head revision
+- cargo.bbclass: set up cargo environment in common do_compile
+- conf.py: add macro for Mitre CVE links
+- curl: ensure all ptest failures are caught
+- cve-update-nvd2-native: actually use API keys
+- cve-update-nvd2-native: fix cvssV3 metrics
+- cve-update-nvd2-native: handle all configuration nodes, not just first
+- cve-update-nvd2-native: increase retry count
+- cve-update-nvd2-native: log a little more
+- cve-update-nvd2-native: retry all errors and sleep between retries
+- cve-update-nvd2-native: use exact times, don't truncate
+- dev-manual: wic.rst: Update native tools build command
+- devtool/upgrade: raise an error if extracting source produces more than one directory
+- diffutils: upgrade to 3.10
+- docs: ref-manual: terms: fix typos in :term:`SPDX` term
+- file: fix the way path is written to environment-setup.d
+- file: return wrapper to fix builds when file is in buildtools-tarball
+- freetype: upgrade to 2.13.1
+- gcc-testsuite: Fix ppc cpu specification
+- gcc: don't pass --enable-standard-branch-protection
+- glibc-locale: use stricter matching for metapackages' runtime dependencies
+- glibc-testsuite: Fix network restrictions causing test failures
+- glibc/check-test-wrapper: don't emit warnings from ssh
+- go: upgrade to 1.20.6
+- gstreamer1.0: upgrade to 1.22.4
+- ifupdown: install missing directories
+- kernel-module-split add systemd modulesloaddir and modprobedir config
+- kernel-module-split: install config modules directories only when they are needed
+- kernel-module-split: make autoload and probeconf distribution specific
+- kernel-module-split: use context manager to open files
+- kernel: Fix path comparison in kernel staging dir symlinking
+- kernel: config modules directories are handled by kernel-module-split
+- kernel: don't fail if Modules.symvers doesn't exist
+- libassuan: upgrade to 2.5.6
+- libksba: upgrade to 1.6.4
+- libnss-nis: upgrade to 3.2
+- libproxy: fetch from git
+- libwebp: upgrade to 1.3.1
+- libx11: upgrade to 1.8.6
+- libxcrypt: fix hard-coded ".so" extension
+- linux-firmware : Add firmware of RTL8822 serie
+- linux-firmware: Fix mediatek mt7601u firmware path
+- linux-firmware: package firmare for Dragonboard 410c
+- linux-firmware: split platform-specific Adreno shaders to separate packages
+- linux-firmware: upgrade to 20230625
+- linux-yocto/5.15: update to v5.15.124
+- linux-yocto/6.1: cfg: update ima.cfg to match current meta-integrity
+- linux-yocto/6.1: upgrade to v6.1.38
+- ltp: Add kernel loopback module dependency
+- ltp: add :term:`RDEPENDS` on findutils
+- lttng-ust: upgrade to 2.13.6
+- machine/arch-arm64: add -mbranch-protection=standard
+- maintainers.inc: Modify email address
+- mdadm: add util-linux-blockdev ptest dependency
+- mdadm: fix 07revert-inplace ptest
+- mdadm: fix segfaults when running ptests
+- mdadm: fix util-linux ptest dependency
+- mdadm: re-add mdadm-ptest to PTESTS_SLOW
+- mdadm: skip running known broken ptests
+- meson.bbclass: Point to llvm-config from native sysroot
+- migration-guides: add release notes for 4.0.10
+- migration-guides: add release notes for 4.0.11
+- migration-guides: add release notes for 4.2.2
+- oeqa/runtime/cases/rpm: fix wait_for_no_process_for_user failure case
+- oeqa/runtime/ltp: Increase ltp test output timeout
+- oeqa/selftest/devtool: add unit test for "devtool add -b"
+- oeqa/ssh: Further improve process exit handling
+- oeqa/target/ssh: Ensure EAGAIN doesn't truncate output
+- oeqa/utils/nfs: allow requesting non-udp ports
+- openssh: upgrade to 9.3p2
+- openssl: add PERLEXTERNAL path to test its existence
+- openssl: use a glob on the PERLEXTERNAL to track updates on the path
+- opkg-utils: upgrade to 0.6.2
+- opkg: upgrade to 0.6.2
+- pkgconf: update :term:`SRC_URI`
+- poky.conf: bump version for 4.2.3 release
+- poky.conf: update :term:`SANITY_TESTED_DISTROS` to match autobuilder
+- ptest-runner: Pull in parallel test fixes and output handling
+- python3-certifi: upgrade to 2023.7.22
+- python3: fix missing comma in get_module_deps3.py
+- recipetool: Fix inherit in created -native* recipes
+- ref-manual: LTS releases now supported for 4 years
+- ref-manual: document image-specific variant of :term:`INCOMPATIBLE_LICENSE`
+- ref-manual: releases.svg: updates
+- resulttool/resultutils: allow index generation despite corrupt json
+- rootfs-postcommands.bbclass: Revert "add post func remove_unused_dnf_log_lock"
+- rootfs: Add debugfs package db file copy and cleanup
+- rootfs_rpm: don't depend on opkg-native for update-alternatives
+- rpm: Pick debugfs package db files/dirs explicitly
+- rust-common.bbclass: move musl-specific linking fix from rust-source.inc
+- scripts/oe-setup-builddir: copy conf-notes.txt to build dir
+- scripts/resulttool: add mention about new detected tests
+- selftest/cases/glibc.py: fix the override syntax
+- selftest/cases/glibc.py: increase the memory for testing
+- selftest/cases/glibc.py: switch to using NFS over TCP
+- shadow-sysroot: add license information
+- systemd-systemctl: fix errors in instance name expansion
+- taglib: upgrade to 1.13.1
+- target/ssh: Ensure exit code set for commands
+- tcf-agent: upgrade to 1.8.0
+- testimage/oeqa: Drop testimage_dump_host functionality
+- tiff: upgrade to 4.5.1
+- uboot-extlinux-config.bbclass: fix old override syntax in comment
+- util-linux: add alternative links for ipcs,ipcrm
+- vim: upgrade to 9.0.1592
+- webkitgtk: upgrade to 2.38.6
+- weston: Cleanup and fix x11 and xwayland dependencies
+
+
+Known Issues in Yocto-4.2.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.2.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alex Kiernan
+- Alexander Kanavin
+- Alexis Lothoré
+- Andrej Valek
+- Anuj Mittal
+- Archana Polampalli
+- BELOUARGA Mohamed
+- Benjamin Bouvier
+- Bruce Ashfield
+- Changqing Li
+- Chen Qi
+- Daniel Semkowicz
+- Dmitry Baryshkov
+- Enrico Scholz
+- Etienne Cordonnier
+- Joe Slater
+- Joel Stanley
+- Jose Quaresma
+- Julien Stephan
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Marek Vasut
+- Mark Hatle
+- Michael Halstead
+- Michael Opdenacker
+- Mingli Yu
+- Narpat Mali
+- Oleksandr Hnatiuk
+- Ovidiu Panait
+- Peter Marko
+- Quentin Schulz
+- Richard Purdie
+- Ross Burton
+- Sanjana
+- Sakib Sajal
+- Staffan Rydén
+- Steve Sakoman
+- Stéphane Veyret
+- Sudip Mukherjee
+- Thomas Roos
+- Tom Hochstein
+- Trevor Gamblin
+- Wang Mingyu
+- Yi Zhao
+- Yoann Congal
+- Yogita Urade
+- Yuta Hayama
+
+
+Repositories / Downloads for Yocto-4.2.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`mickledore </poky/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.3 </poky/log/?h=yocto-4.2.3>`
+- Git Revision: :yocto_git:`aa63b25cbe25d89ab07ca11ee72c17cab68df8de </poky/commit/?id=aa63b25cbe25d89ab07ca11ee72c17cab68df8de>`
+- Release Artefact: poky-aa63b25cbe25d89ab07ca11ee72c17cab68df8de
+- sha: 9e2b40fc25f7984b3227126ec9b8aa68d3747c8821fb7bf8cb635fc143f894c3
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.3/poky-aa63b25cbe25d89ab07ca11ee72c17cab68df8de.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.3/poky-aa63b25cbe25d89ab07ca11ee72c17cab68df8de.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`mickledore </openembedded-core/log/?h=mickledore>`
+- Tag: :oe_git:`yocto-4.2.3 </openembedded-core/log/?h=yocto-4.2.3>`
+- Git Revision: :oe_git:`7e3489c0c5970389c8a239dc7b367bcadf554eb5 </openembedded-core/commit/?id=7e3489c0c5970389c8a239dc7b367bcadf554eb5>`
+- Release Artefact: oecore-7e3489c0c5970389c8a239dc7b367bcadf554eb5
+- sha: 68620aca7c9db6b9a65d9853cacff4e60578f0df39e3e37114e062e1667ba724
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.3/oecore-7e3489c0c5970389c8a239dc7b367bcadf554eb5.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.3/oecore-7e3489c0c5970389c8a239dc7b367bcadf554eb5.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`mickledore </meta-mingw/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.3 </meta-mingw/log/?h=yocto-4.2.3>`
+- Git Revision: :yocto_git:`92258028e1b5664a9f832541d5c4f6de0bd05e07 </meta-mingw/commit/?id=92258028e1b5664a9f832541d5c4f6de0bd05e07>`
+- Release Artefact: meta-mingw-92258028e1b5664a9f832541d5c4f6de0bd05e07
+- sha: ee081460b5dff4fb8dd4869ce5631718dbaaffbede9532b879b854c18f1b3f5d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.3/meta-mingw-92258028e1b5664a9f832541d5c4f6de0bd05e07.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.3/meta-mingw-92258028e1b5664a9f832541d5c4f6de0bd05e07.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.4 </bitbake/log/?h=2.4>`
+- Tag: :oe_git:`yocto-4.2.3 </bitbake/log/?h=yocto-4.2.3>`
+- Git Revision: :oe_git:`08033b63ae442c774bd3fce62844eac23e6882d7 </bitbake/commit/?id=08033b63ae442c774bd3fce62844eac23e6882d7>`
+- Release Artefact: bitbake-08033b63ae442c774bd3fce62844eac23e6882d7
+- sha: 1d070c133bfb6502ac04befbf082cbfda7582c8b1c48296a788384352e5061fd
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.3/bitbake-08033b63ae442c774bd3fce62844eac23e6882d7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.3/bitbake-08033b63ae442c774bd3fce62844eac23e6882d7.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`mickledore </yocto-docs/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.3 </yocto-docs/log/?h=yocto-4.2.3>`
+- Git Revision: :yocto_git:`8e6752a9e55d16f3713e248b37f9d4d2745a2375 </yocto-docs/commit/?id=8e6752a9e55d16f3713e248b37f9d4d2745a2375>`
+
diff --git a/documentation/migration-guides/release-notes-4.2.4.rst b/documentation/migration-guides/release-notes-4.2.4.rst
new file mode 100644
index 0000000000..3c20140e29
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.2.4.rst
@@ -0,0 +1,364 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.2.4 (Mickledore)
+------------------------------------------
+
+Security Fixes in Yocto-4.2.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-3341` and :cve:`2023-4236`
+- binutils: Fix :cve:`2023-39128`
+- cups: fix :cve:`2023-4504`
+- curl: Fix :cve:`2023-28320`, :cve:`2023-32001`, :cve:`2023-38039`, :cve:`2023-38545` and :cve:`2023-38546`
+- dmidecode: fix for :cve:`2023-30630`
+- dropbear: fix :cve:`2023-36328`
+- ffmpeg: Ignore :cve:`2023-39018`
+- gcc: Fix :cve:`2023-4039`
+- gdb: Fix :cve:`2023-39128`
+- ghostscript: Fix :cve:`2023-38559` and :cve:`2023-43115`
+- glibc: Fix :cve:`2023-4527` and :cve:`2023-4806`
+- go: Fix :cve:`2023-29409` and :cve:`2023-39533`
+- grub: Fix :cve:`2023-4692` and :cve:`2023-4693`
+- gstreamer: Fix :cve_mitre:`2023-40474`, :cve_mitre:`2023-40475` and :cve_mitre:`2023-40476`
+- inetutils: fix :cve:`2023-40303`
+- librsvg: Fix :cve:`2023-38633`
+- libssh2: Fix :cve:`2020-22218`
+- libwebp: Fix :cve:`2023-4863` and :cve:`2023-5129`
+- libx11: Fix :cve:`2023-43785`, :cve:`2023-43786` and :cve:`2023-43787`
+- libxpm: Fix :cve:`2023-43788` and :cve:`2023-43789`
+- linux-yocto/6.1: Ignore :cve:`2003-1604`, :cve:`2004-0230`, :cve:`2006-3635`, :cve:`2006-5331`, :cve:`2006-6128`, :cve:`2007-4774`, :cve:`2007-6761`, :cve:`2007-6762`, :cve:`2008-7316`, :cve:`2009-2692`, :cve:`2010-0008`, :cve:`2010-3432`, :cve:`2010-4648`, :cve:`2010-5313`, :cve:`2010-5328`, :cve:`2010-5329`, :cve:`2010-5331`, :cve:`2010-5332`, :cve:`2011-4098`, :cve:`2011-4131`, :cve:`2011-4915`, :cve:`2011-5321`, :cve:`2011-5327`, :cve:`2012-0957`, :cve:`2012-2119`, :cve:`2012-2136`, :cve:`2012-2137`, :cve:`2012-2313`, :cve:`2012-2319`, :cve:`2012-2372`, :cve:`2012-2375`, :cve:`2012-2390`, :cve:`2012-2669`, :cve:`2012-2744`, :cve:`2012-2745`, :cve:`2012-3364`, :cve:`2012-3375`, :cve:`2012-3400`, :cve:`2012-3412`, :cve:`2012-3430`, :cve:`2012-3510`, :cve:`2012-3511`, :cve:`2012-3520`, :cve:`2012-3552`, :cve:`2012-4398`, :cve:`2012-4444`, :cve:`2012-4461`, :cve:`2012-4467`, :cve:`2012-4508`, :cve:`2012-4530`, :cve:`2012-4565`, :cve:`2012-5374`, :cve:`2012-5375`, :cve:`2012-5517`, :cve:`2012-6536`, :cve:`2012-6537`, :cve:`2012-6538`, :cve:`2012-6539`, :cve:`2012-6540`, :cve:`2012-6541`, :cve:`2012-6542`, :cve:`2012-6543`, :cve:`2012-6544`, :cve:`2012-6545`, :cve:`2012-6546`, :cve:`2012-6547`, :cve:`2012-6548`, :cve:`2012-6549`, :cve:`2012-6638`, :cve:`2012-6647`, :cve:`2012-6657`, :cve:`2012-6689`, :cve:`2012-6701`, :cve:`2012-6703`, :cve:`2012-6704`, :cve:`2012-6712`, :cve:`2013-0160`, :cve:`2013-0190`, :cve:`2013-0216`, :cve:`2013-0217`, :cve:`2013-0228`, :cve:`2013-0231`, :cve:`2013-0268`, :cve:`2013-0290`, :cve:`2013-0309`, :cve:`2013-0310`, :cve:`2013-0311`, :cve:`2013-0313`, :cve:`2013-0343`, :cve:`2013-0349`, :cve:`2013-0871`, :cve:`2013-0913`, :cve:`2013-0914`, :cve:`2013-1059`, :cve:`2013-1763`, :cve:`2013-1767`, :cve:`2013-1772`, :cve:`2013-1773`, :cve:`2013-1774`, :cve:`2013-1792`, :cve:`2013-1796`, :cve:`2013-1797`, :cve:`2013-1798`, :cve:`2013-1819`, :cve:`2013-1826`, :cve:`2013-1827`, :cve:`2013-1828`, :cve:`2013-1848`, :cve:`2013-1858`, :cve:`2013-1860`, :cve:`2013-1928`, :cve:`2013-1929`, :cve:`2013-1943`, :cve:`2013-1956`, :cve:`2013-1957`, :cve:`2013-1958`, :cve:`2013-1959`, :cve:`2013-1979`, :cve:`2013-2015`, :cve:`2013-2017`, :cve:`2013-2058`, :cve:`2013-2094`, :cve:`2013-2128`, :cve:`2013-2140`, :cve:`2013-2141`, :cve:`2013-2146`, :cve:`2013-2147`, :cve:`2013-2148`, :cve:`2013-2164`, :cve:`2013-2206`, :cve:`2013-2232`, :cve:`2013-2234`, :cve:`2013-2237`, :cve:`2013-2546`, :cve:`2013-2547`, :cve:`2013-2548`, :cve:`2013-2596`, :cve:`2013-2634`, :cve:`2013-2635`, :cve:`2013-2636`, :cve:`2013-2850`, :cve:`2013-2851`, :cve:`2013-2852`, :cve:`2013-2888`, :cve:`2013-2889`, :cve:`2013-2890`, :cve:`2013-2891`, :cve:`2013-2892`, :cve:`2013-2893`, :cve:`2013-2894`, :cve:`2013-2895`, :cve:`2013-2896`, :cve:`2013-2897`, :cve:`2013-2898`, :cve:`2013-2899`, :cve:`2013-2929`, :cve:`2013-2930`, :cve:`2013-3076`, :cve:`2013-3222`, :cve:`2013-3223`, :cve:`2013-3224`, :cve:`2013-3225`, :cve:`2013-3226`, :cve:`2013-3227`, :cve:`2013-3228`, :cve:`2013-3229`, :cve:`2013-3230`, :cve:`2013-3231`, :cve:`2013-3232`, :cve:`2013-3233`, :cve:`2013-3234`, :cve:`2013-3235`, :cve:`2013-3236`, :cve:`2013-3237`, :cve:`2013-3301`, :cve:`2013-3302`, :cve:`2013-4125`, :cve:`2013-4127`, :cve:`2013-4129`, :cve:`2013-4162`, :cve:`2013-4163`, :cve:`2013-4205`, :cve:`2013-4220`, :cve:`2013-4247`, :cve:`2013-4254`, :cve:`2013-4270`, :cve:`2013-4299`, :cve:`2013-4300`, :cve:`2013-4312`, :cve:`2013-4343`, :cve:`2013-4345`, :cve:`2013-4348`, :cve:`2013-4350`, :cve:`2013-4387`, :cve:`2013-4470`, :cve:`2013-4483`, :cve:`2013-4511`, :cve:`2013-4512`, :cve:`2013-4513`, :cve:`2013-4514`, :cve:`2013-4515`, :cve:`2013-4516`, :cve:`2013-4563`, :cve:`2013-4579`, :cve:`2013-4587`, :cve:`2013-4588`, :cve:`2013-4591`, :cve:`2013-4592`, :cve:`2013-5634`, :cve:`2013-6282`, :cve:`2013-6367`, :cve:`2013-6368`, :cve:`2013-6376`, :cve:`2013-6378`, :cve:`2013-6380`, :cve:`2013-6381`, :cve:`2013-6382`, :cve:`2013-6383`, :cve:`2013-6431`, :cve:`2013-6432`, :cve:`2013-6885`, :cve:`2013-7026`, :cve:`2013-7027`, :cve:`2013-7263`, :cve:`2013-7264`, :cve:`2013-7265`, :cve:`2013-7266`, :cve:`2013-7267`, :cve:`2013-7268`, :cve:`2013-7269`, :cve:`2013-7270`, :cve:`2013-7271`, :cve:`2013-7281`, :cve:`2013-7339`, :cve:`2013-7348`, :cve:`2013-7421`, :cve:`2013-7446`, :cve:`2013-7470`, :cve:`2014-0038`, :cve:`2014-0049`, :cve:`2014-0055`, :cve:`2014-0069`, :cve:`2014-0077`, :cve:`2014-0100`, :cve:`2014-0101`, :cve:`2014-0102`, :cve:`2014-0131`, :cve:`2014-0155`, :cve:`2014-0181`, :cve:`2014-0196`, :cve:`2014-0203`, :cve:`2014-0205`, :cve:`2014-0206`, :cve:`2014-1438`, :cve:`2014-1444`, :cve:`2014-1445`, :cve:`2014-1446`, :cve:`2014-1690`, :cve:`2014-1737`, :cve:`2014-1738`, :cve:`2014-1739`, :cve:`2014-1874`, :cve:`2014-2038`, :cve:`2014-2039`, :cve:`2014-2309`, :cve:`2014-2523`, :cve:`2014-2568`, :cve:`2014-2580`, :cve:`2014-2672`, :cve:`2014-2673`, :cve:`2014-2678`, :cve:`2014-2706`, :cve:`2014-2739`, :cve:`2014-2851`, :cve:`2014-2889`, :cve:`2014-3122`, :cve:`2014-3144`, :cve:`2014-3145`, :cve:`2014-3153`, :cve:`2014-3180`, :cve:`2014-3181`, :cve:`2014-3182`, :cve:`2014-3183`, :cve:`2014-3184`, :cve:`2014-3185`, :cve:`2014-3186`, :cve:`2014-3534`, :cve:`2014-3535`, :cve:`2014-3601`, :cve:`2014-3610`, :cve:`2014-3611`, :cve:`2014-3631`, :cve:`2014-3645`, :cve:`2014-3646`, :cve:`2014-3647`, :cve:`2014-3673`, :cve:`2014-3687`, :cve:`2014-3688`, :cve:`2014-3690`, :cve:`2014-3917`, :cve:`2014-3940`, :cve:`2014-4014`, :cve:`2014-4027`, :cve:`2014-4157`, :cve:`2014-4171`, :cve:`2014-4508`, :cve:`2014-4608`, :cve:`2014-4611`, :cve:`2014-4652`, :cve:`2014-4653`, :cve:`2014-4654`, :cve:`2014-4655`, :cve:`2014-4656`, :cve:`2014-4667`, :cve:`2014-4699`, :cve:`2014-4943`, :cve:`2014-5045`, :cve:`2014-5077`, :cve:`2014-5206`, :cve:`2014-5207`, :cve:`2014-5471`, :cve:`2014-5472`, :cve:`2014-6410`, :cve:`2014-6416`, :cve:`2014-6417`, :cve:`2014-6418`, :cve:`2014-7145`, :cve:`2014-7283`, :cve:`2014-7284`, :cve:`2014-7822`, :cve:`2014-7825`, :cve:`2014-7826`, :cve:`2014-7841`, :cve:`2014-7842`, :cve:`2014-7843`, :cve:`2014-7970`, :cve:`2014-7975`, :cve:`2014-8086`, :cve:`2014-8133`, :cve:`2014-8134`, :cve:`2014-8159`, :cve:`2014-8160`, :cve:`2014-8171`, :cve:`2014-8172`, :cve:`2014-8173`, :cve:`2014-8369`, :cve:`2014-8480`, :cve:`2014-8481`, :cve:`2014-8559`, :cve:`2014-8709`, :cve:`2014-8884`, :cve:`2014-8989`, :cve:`2014-9090`, :cve:`2014-9322`, :cve:`2014-9419`, :cve:`2014-9420`, :cve:`2014-9428`, :cve:`2014-9529`, :cve:`2014-9584`, :cve:`2014-9585`, :cve:`2014-9644`, :cve:`2014-9683`, :cve:`2014-9710`, :cve:`2014-9715`, :cve:`2014-9717`, :cve:`2014-9728`, :cve:`2014-9729`, :cve:`2014-9730`, :cve:`2014-9731`, :cve:`2014-9803`, :cve:`2014-9870`, :cve:`2014-9888`, :cve:`2014-9895`, :cve:`2014-9903`, :cve:`2014-9904`, :cve:`2014-9914`, :cve:`2014-9922`, :cve:`2014-9940`, :cve:`2015-0239`, :cve:`2015-0274`, :cve:`2015-0275`, :cve:`2015-1333`, :cve:`2015-1339`, :cve:`2015-1350`, :cve:`2015-1420`, :cve:`2015-1421`, :cve:`2015-1465`, :cve:`2015-1573`, :cve:`2015-1593`, :cve:`2015-1805`, :cve:`2015-2041`, :cve:`2015-2042`, :cve:`2015-2150`, :cve:`2015-2666`, :cve:`2015-2672`, :cve:`2015-2686`, :cve:`2015-2830`, :cve:`2015-2922`, :cve:`2015-2925`, :cve:`2015-3212`, :cve:`2015-3214`, :cve:`2015-3288`, :cve:`2015-3290`, :cve:`2015-3291`, :cve:`2015-3331`, :cve:`2015-3339`, :cve:`2015-3636`, :cve:`2015-4001`, :cve:`2015-4002`, :cve:`2015-4003`, :cve:`2015-4004`, :cve:`2015-4036`, :cve:`2015-4167`, :cve:`2015-4170`, :cve:`2015-4176`, :cve:`2015-4177`, :cve:`2015-4178`, :cve:`2015-4692`, :cve:`2015-4700`, :cve:`2015-5156`, :cve:`2015-5157`, :cve:`2015-5257`, :cve:`2015-5283`, :cve:`2015-5307`, :cve:`2015-5327`, :cve:`2015-5364`, :cve:`2015-5366`, :cve:`2015-5697`, :cve:`2015-5706`, :cve:`2015-5707`, :cve:`2015-6252`, :cve:`2015-6526`, :cve:`2015-6937`, :cve:`2015-7509`, :cve:`2015-7513`, :cve:`2015-7515`, :cve:`2015-7550`, :cve:`2015-7566`, :cve:`2015-7613`, :cve:`2015-7799`, :cve:`2015-7833`, :cve:`2015-7872`, :cve:`2015-7884`, :cve:`2015-7885`, :cve:`2015-7990`, :cve:`2015-8104`, :cve:`2015-8215`, :cve:`2015-8324`, :cve:`2015-8374`, :cve:`2015-8539`, :cve:`2015-8543`, :cve:`2015-8550`, :cve:`2015-8551`, :cve:`2015-8552`, :cve:`2015-8553`, :cve:`2015-8569`, :cve:`2015-8575`, :cve:`2015-8660`, :cve:`2015-8709`, :cve:`2015-8746`, :cve:`2015-8767`, :cve:`2015-8785`, :cve:`2015-8787`, :cve:`2015-8812`, :cve:`2015-8816`, :cve:`2015-8830`, :cve:`2015-8839`, :cve:`2015-8844`, :cve:`2015-8845`, :cve:`2015-8950`, :cve:`2015-8952`, :cve:`2015-8953`, :cve:`2015-8955`, :cve:`2015-8956`, :cve:`2015-8961`, :cve:`2015-8962`, :cve:`2015-8963`, :cve:`2015-8964`, :cve:`2015-8966`, :cve:`2015-8967`, :cve:`2015-8970`, :cve:`2015-9004`, :cve:`2015-9016`, :cve:`2015-9289`, :cve:`2016-0617`, :cve:`2016-0723`, :cve:`2016-0728`, :cve:`2016-0758`, :cve:`2016-0821`, :cve:`2016-0823`, :cve:`2016-10044`, :cve:`2016-10088`, :cve:`2016-10147`, :cve:`2016-10150`, :cve:`2016-10153`, :cve:`2016-10154`, :cve:`2016-10200`, :cve:`2016-10208`, :cve:`2016-10229`, :cve:`2016-10318`, :cve:`2016-10723`, :cve:`2016-10741`, :cve:`2016-10764`, :cve:`2016-10905`, :cve:`2016-10906`, :cve:`2016-10907`, :cve:`2016-1237`, :cve:`2016-1575`, :cve:`2016-1576`, :cve:`2016-1583`, :cve:`2016-2053`, :cve:`2016-2069`, :cve:`2016-2070`, :cve:`2016-2085`, :cve:`2016-2117`, :cve:`2016-2143`, :cve:`2016-2184`, :cve:`2016-2185`, :cve:`2016-2186`, :cve:`2016-2187`, :cve:`2016-2188`, :cve:`2016-2383`, :cve:`2016-2384`, :cve:`2016-2543`, :cve:`2016-2544`, :cve:`2016-2545`, :cve:`2016-2546`, :cve:`2016-2547`, :cve:`2016-2548`, :cve:`2016-2549`, :cve:`2016-2550`, :cve:`2016-2782`, :cve:`2016-2847`, :cve:`2016-3044`, :cve:`2016-3070`, :cve:`2016-3134`, :cve:`2016-3135`, :cve:`2016-3136`, :cve:`2016-3137`, :cve:`2016-3138`, :cve:`2016-3139`, :cve:`2016-3140`, :cve:`2016-3156`, :cve:`2016-3157`, :cve:`2016-3672`, :cve:`2016-3689`, :cve:`2016-3713`, :cve:`2016-3841`, :cve:`2016-3857`, :cve:`2016-3951`, :cve:`2016-3955`, :cve:`2016-3961`, :cve:`2016-4440`, :cve:`2016-4470`, :cve:`2016-4482`, :cve:`2016-4485`, :cve:`2016-4486`, :cve:`2016-4557`, :cve:`2016-4558`, :cve:`2016-4565`, :cve:`2016-4568`, :cve:`2016-4569`, :cve:`2016-4578`, :cve:`2016-4580`, :cve:`2016-4581`, :cve:`2016-4794`, :cve:`2016-4805`, :cve:`2016-4913`, :cve:`2016-4951`, :cve:`2016-4997`, :cve:`2016-4998`, :cve:`2016-5195`, :cve:`2016-5243`, :cve:`2016-5244`, :cve:`2016-5400`, :cve:`2016-5412`, :cve:`2016-5696`, :cve:`2016-5728`, :cve:`2016-5828`, :cve:`2016-5829`, :cve:`2016-6130`, :cve:`2016-6136`, :cve:`2016-6156`, :cve:`2016-6162`, :cve:`2016-6187`, :cve:`2016-6197`, :cve:`2016-6198`, :cve:`2016-6213`, :cve:`2016-6327`, :cve:`2016-6480`, :cve:`2016-6516`, :cve:`2016-6786`, :cve:`2016-6787`, :cve:`2016-6828`, :cve:`2016-7039`, :cve:`2016-7042`, :cve:`2016-7097`, :cve:`2016-7117`, :cve:`2016-7425`, :cve:`2016-7910`, :cve:`2016-7911`, :cve:`2016-7912`, :cve:`2016-7913`, :cve:`2016-7914`, :cve:`2016-7915`, :cve:`2016-7916`, :cve:`2016-7917`, :cve:`2016-8399`, :cve:`2016-8405`, :cve:`2016-8630`, :cve:`2016-8632`, :cve:`2016-8633`, :cve:`2016-8636`, :cve:`2016-8645`, :cve:`2016-8646`, :cve:`2016-8650`, :cve:`2016-8655`, :cve:`2016-8658`, :cve:`2016-8666`, :cve:`2016-9083`, :cve:`2016-9084`, :cve:`2016-9120`, :cve:`2016-9178`, :cve:`2016-9191`, :cve:`2016-9313`, :cve:`2016-9555`, :cve:`2016-9576`, :cve:`2016-9588`, :cve:`2016-9604`, :cve:`2016-9685`, :cve:`2016-9754`, :cve:`2016-9755`, :cve:`2016-9756`, :cve:`2016-9777`, :cve:`2016-9793`, :cve:`2016-9794`, :cve:`2016-9806`, :cve:`2016-9919`, :cve:`2017-0605`, :cve:`2017-0627`, :cve:`2017-0750`, :cve:`2017-0786`, :cve:`2017-0861`, :cve:`2017-1000`, :cve:`2017-1000111`, :cve:`2017-1000112`, :cve:`2017-1000251`, :cve:`2017-1000252`, :cve:`2017-1000253`, :cve:`2017-1000255`, :cve:`2017-1000363`, :cve:`2017-1000364`, :cve:`2017-1000365`, :cve:`2017-1000370`, :cve:`2017-1000371`, :cve:`2017-1000379`, :cve:`2017-1000380`, :cve:`2017-1000405`, :cve:`2017-1000407`, :cve:`2017-1000410`, :cve:`2017-10661`, :cve:`2017-10662`, :cve:`2017-10663`, :cve:`2017-10810`, :cve:`2017-10911`, :cve:`2017-11089`, :cve:`2017-11176`, :cve:`2017-11472`, :cve:`2017-11473`, :cve:`2017-11600`, :cve:`2017-12134`, :cve:`2017-12146`, :cve:`2017-12153`, :cve:`2017-12154`, :cve:`2017-12168`, :cve:`2017-12188`, :cve:`2017-12190`, :cve:`2017-12192`, :cve:`2017-12193`, :cve:`2017-12762`, :cve:`2017-13080`, :cve:`2017-13166`, :cve:`2017-13167`, :cve:`2017-13168`, :cve:`2017-13215`, :cve:`2017-13216`, :cve:`2017-13220`, :cve:`2017-13305`, :cve:`2017-13686`, :cve:`2017-13695`, :cve:`2017-13715`, :cve:`2017-14051`, :cve:`2017-14106`, :cve:`2017-14140`, :cve:`2017-14156`, :cve:`2017-14340`, :cve:`2017-14489`, :cve:`2017-14497`, :cve:`2017-14954`, :cve:`2017-14991`, :cve:`2017-15102`, :cve:`2017-15115`, :cve:`2017-15116`, :cve:`2017-15121`, :cve:`2017-15126`, :cve:`2017-15127`, :cve:`2017-15128`, :cve:`2017-15129`, :cve:`2017-15265`, :cve:`2017-15274`, :cve:`2017-15299`, :cve:`2017-15306`, :cve:`2017-15537`, :cve:`2017-15649`, :cve:`2017-15868`, :cve:`2017-15951`, :cve:`2017-16525`, :cve:`2017-16526`, :cve:`2017-16527`, :cve:`2017-16528`, :cve:`2017-16529`, :cve:`2017-16530`, :cve:`2017-16531`, :cve:`2017-16532`, :cve:`2017-16533`, :cve:`2017-16534`, :cve:`2017-16535`, :cve:`2017-16536`, :cve:`2017-16537`, :cve:`2017-16538`, :cve:`2017-16643`, :cve:`2017-16644`, :cve:`2017-16645`, :cve:`2017-16646`, :cve:`2017-16647`, :cve:`2017-16648`, :cve:`2017-16649`, :cve:`2017-16650`, :cve:`2017-16911`, :cve:`2017-16912`, :cve:`2017-16913`, :cve:`2017-16914`, :cve:`2017-16939`, :cve:`2017-16994`, :cve:`2017-16995`, :cve:`2017-16996`, :cve:`2017-17052`, :cve:`2017-17053`, :cve:`2017-17448`, :cve:`2017-17449`, :cve:`2017-17450`, :cve:`2017-17558`, :cve:`2017-17712`, :cve:`2017-17741`, :cve:`2017-17805`, :cve:`2017-17806`, :cve:`2017-17807`, :cve:`2017-17852`, :cve:`2017-17853`, :cve:`2017-17854`, :cve:`2017-17855`, :cve:`2017-17856`, :cve:`2017-17857`, :cve:`2017-17862`, :cve:`2017-17863`, :cve:`2017-17864`, :cve:`2017-17975`, :cve:`2017-18017`, :cve:`2017-18075`, :cve:`2017-18079`, :cve:`2017-18174`, :cve:`2017-18193`, :cve:`2017-18200`, :cve:`2017-18202`, :cve:`2017-18203`, :cve:`2017-18204`, :cve:`2017-18208`, :cve:`2017-18216`, :cve:`2017-18218`, :cve:`2017-18221`, :cve:`2017-18222`, :cve:`2017-18224`, :cve:`2017-18232`, :cve:`2017-18241`, :cve:`2017-18249`, :cve:`2017-18255`, :cve:`2017-18257`, :cve:`2017-18261`, :cve:`2017-18270`, :cve:`2017-18344`, :cve:`2017-18360`, :cve:`2017-18379`, :cve:`2017-18509`, :cve:`2017-18549`, :cve:`2017-18550`, :cve:`2017-18551`, :cve:`2017-18552`, :cve:`2017-18595`, :cve:`2017-2583`, :cve:`2017-2584`, :cve:`2017-2596`, :cve:`2017-2618`, :cve:`2017-2634`, :cve:`2017-2636`, :cve:`2017-2647`, :cve:`2017-2671`, :cve:`2017-5123`, :cve:`2017-5546`, :cve:`2017-5547`, :cve:`2017-5548`, :cve:`2017-5549`, :cve:`2017-5550`, :cve:`2017-5551`, :cve:`2017-5576`, :cve:`2017-5577`, :cve:`2017-5669`, :cve:`2017-5715`, :cve:`2017-5753`, :cve:`2017-5754`, :cve:`2017-5897`, :cve:`2017-5967`, :cve:`2017-5970`, :cve:`2017-5972`, :cve:`2017-5986`, :cve:`2017-6001`, :cve:`2017-6074`, :cve:`2017-6214`, :cve:`2017-6345`, :cve:`2017-6346`, :cve:`2017-6347`, :cve:`2017-6348`, :cve:`2017-6353`, :cve:`2017-6874`, :cve:`2017-6951`, :cve:`2017-7184`, :cve:`2017-7187`, :cve:`2017-7261`, :cve:`2017-7273`, :cve:`2017-7277`, :cve:`2017-7294`, :cve:`2017-7308`, :cve:`2017-7346`, :cve:`2017-7374`, :cve:`2017-7472`, :cve:`2017-7477`, :cve:`2017-7482`, :cve:`2017-7487`, :cve:`2017-7495`, :cve:`2017-7518`, :cve:`2017-7533`, :cve:`2017-7541`, :cve:`2017-7542`, :cve:`2017-7558`, :cve:`2017-7616`, :cve:`2017-7618`, :cve:`2017-7645`, :cve:`2017-7889`, :cve:`2017-7895`, :cve:`2017-7979`, :cve:`2017-8061`, :cve:`2017-8062`, :cve:`2017-8063`, :cve:`2017-8064`, :cve:`2017-8065`, :cve:`2017-8066`, :cve:`2017-8067`, :cve:`2017-8068`, :cve:`2017-8069`, :cve:`2017-8070`, :cve:`2017-8071`, :cve:`2017-8072`, :cve:`2017-8106`, :cve:`2017-8240`, :cve:`2017-8797`, :cve:`2017-8824`, :cve:`2017-8831`, :cve:`2017-8890`, :cve:`2017-8924`, :cve:`2017-8925`, :cve:`2017-9059`, :cve:`2017-9074`, :cve:`2017-9075`, :cve:`2017-9076`, :cve:`2017-9077`, :cve:`2017-9150`, :cve:`2017-9211`, :cve:`2017-9242`, :cve:`2017-9605`, :cve:`2017-9725`, :cve:`2017-9984`, :cve:`2017-9985`, :cve:`2017-9986`, :cve:`2018-1000004`, :cve:`2018-1000026`, :cve:`2018-1000028`, :cve:`2018-1000199`, :cve:`2018-1000200`, :cve:`2018-1000204`, :cve:`2018-10021`, :cve:`2018-10074`, :cve:`2018-10087`, :cve:`2018-10124`, :cve:`2018-10322`, :cve:`2018-10323`, :cve:`2018-1065`, :cve:`2018-1066`, :cve:`2018-10675`, :cve:`2018-1068`, :cve:`2018-10840`, :cve:`2018-10853`, :cve:`2018-1087`, :cve:`2018-10876`, :cve:`2018-10877`, :cve:`2018-10878`, :cve:`2018-10879`, :cve:`2018-10880`, :cve:`2018-10881`, :cve:`2018-10882`, :cve:`2018-10883`, :cve:`2018-10901`, :cve:`2018-10902`, :cve:`2018-1091`, :cve:`2018-1092`, :cve:`2018-1093`, :cve:`2018-10938`, :cve:`2018-1094`, :cve:`2018-10940`, :cve:`2018-1095`, :cve:`2018-1108`, :cve:`2018-1118`, :cve:`2018-1120`, :cve:`2018-11232`, :cve:`2018-1128`, :cve:`2018-1129`, :cve:`2018-1130`, :cve:`2018-11412`, :cve:`2018-11506`, :cve:`2018-11508`, :cve:`2018-12126`, :cve:`2018-12127`, :cve:`2018-12130`, :cve:`2018-12207`, :cve:`2018-12232`, :cve:`2018-12233`, :cve:`2018-12633`, :cve:`2018-12714`, :cve:`2018-12896`, :cve:`2018-12904`, :cve:`2018-13053`, :cve:`2018-13093`, :cve:`2018-13094`, :cve:`2018-13095`, :cve:`2018-13096`, :cve:`2018-13097`, :cve:`2018-13098`, :cve:`2018-13099`, :cve:`2018-13100`, :cve:`2018-13405`, :cve:`2018-13406`, :cve:`2018-14609`, :cve:`2018-14610`, :cve:`2018-14611`, :cve:`2018-14612`, :cve:`2018-14613`, :cve:`2018-14614`, :cve:`2018-14615`, :cve:`2018-14616`, :cve:`2018-14617`, :cve:`2018-14619`, :cve:`2018-14625`, :cve:`2018-14633`, :cve:`2018-14634`, :cve:`2018-14641`, :cve:`2018-14646`, :cve:`2018-14656`, :cve:`2018-14678`, :cve:`2018-14734`, :cve:`2018-15471`, :cve:`2018-15572`, :cve:`2018-15594`, :cve:`2018-16276`, :cve:`2018-16597`, :cve:`2018-16658`, :cve:`2018-16862`, :cve:`2018-16871`, :cve:`2018-16880`, :cve:`2018-16882`, :cve:`2018-16884`, :cve:`2018-17182`, :cve:`2018-17972`, :cve:`2018-18021`, :cve:`2018-18281`, :cve:`2018-18386`, :cve:`2018-18397`, :cve:`2018-18445`, :cve:`2018-18559`, :cve:`2018-18690`, :cve:`2018-18710`, :cve:`2018-18955`, :cve:`2018-19406`, :cve:`2018-19407`, :cve:`2018-19824`, :cve:`2018-19854`, :cve:`2018-19985`, :cve:`2018-20169`, :cve:`2018-20449`, :cve:`2018-20509`, :cve:`2018-20510`, :cve:`2018-20511`, :cve:`2018-20669`, :cve:`2018-20784`, :cve:`2018-20836`, :cve:`2018-20854`, :cve:`2018-20855`, :cve:`2018-20856`, :cve:`2018-20961`, :cve:`2018-20976`, :cve:`2018-21008`, :cve:`2018-25015`, :cve:`2018-25020`, :cve:`2018-3620`, :cve:`2018-3639`, :cve:`2018-3646`, :cve:`2018-3665`, :cve:`2018-3693`, :cve:`2018-5332`, :cve:`2018-5333`, :cve:`2018-5344`, :cve:`2018-5390`, :cve:`2018-5391`, :cve:`2018-5703`, :cve:`2018-5750`, :cve:`2018-5803`, :cve:`2018-5814`, :cve:`2018-5848`, :cve:`2018-5873`, :cve:`2018-5953`, :cve:`2018-5995`, :cve:`2018-6412`, :cve:`2018-6554`, :cve:`2018-6555`, :cve:`2018-6927`, :cve:`2018-7191`, :cve:`2018-7273`, :cve:`2018-7480`, :cve:`2018-7492`, :cve:`2018-7566`, :cve:`2018-7740`, :cve:`2018-7754`, :cve:`2018-7755`, :cve:`2018-7757`, :cve:`2018-7995`, :cve:`2018-8043`, :cve_mitre:`2018-8087`, :cve_mitre:`2018-8781`, :cve_mitre:`2018-8822`, :cve_mitre:`2018-8897`, :cve_mitre:`2018-9363`, :cve_mitre:`2018-9385`, :cve_mitre:`2018-9415`, :cve_mitre:`2018-9422`, :cve_mitre:`2018-9465`, :cve_mitre:`2018-9516`, :cve_mitre:`2018-9517`, :cve_mitre:`2018-9518` and :cve_mitre:`2018-9568`
+- linux-yocto/6.1 (Continued): Ignore :cve:`2019-0136`, :cve:`2019-0145`, :cve:`2019-0146`, :cve:`2019-0147`, :cve:`2019-0148`, :cve:`2019-0149`, :cve:`2019-0154`, :cve:`2019-0155`, :cve:`2019-10124`, :cve:`2019-10125`, :cve:`2019-10126`, :cve:`2019-10142`, :cve:`2019-10207`, :cve:`2019-10220`, :cve:`2019-10638`, :cve:`2019-10639`, :cve:`2019-11085`, :cve:`2019-11091`, :cve:`2019-11135`, :cve:`2019-11190`, :cve:`2019-11191`, :cve:`2019-1125`, :cve:`2019-11477`, :cve:`2019-11478`, :cve:`2019-11479`, :cve:`2019-11486`, :cve:`2019-11487`, :cve:`2019-11599`, :cve:`2019-11683`, :cve:`2019-11810`, :cve:`2019-11811`, :cve:`2019-11815`, :cve:`2019-11833`, :cve:`2019-11884`, :cve:`2019-12378`, :cve:`2019-12379`, :cve:`2019-12380`, :cve:`2019-12381`, :cve:`2019-12382`, :cve:`2019-12454`, :cve:`2019-12455`, :cve:`2019-12614`, :cve:`2019-12615`, :cve:`2019-12817`, :cve:`2019-12818`, :cve:`2019-12819`, :cve:`2019-12881`, :cve:`2019-12984`, :cve:`2019-13233`, :cve:`2019-13272`, :cve:`2019-13631`, :cve:`2019-13648`, :cve:`2019-14283`, :cve:`2019-14284`, :cve:`2019-14615`, :cve:`2019-14763`, :cve:`2019-14814`, :cve:`2019-14815`, :cve:`2019-14816`, :cve:`2019-14821`, :cve:`2019-14835`, :cve:`2019-14895`, :cve:`2019-14896`, :cve:`2019-14897`, :cve:`2019-14901`, :cve:`2019-15030`, :cve:`2019-15031`, :cve:`2019-15090`, :cve:`2019-15098`, :cve:`2019-15099`, :cve:`2019-15117`, :cve:`2019-15118`, :cve:`2019-15211`, :cve:`2019-15212`, :cve:`2019-15213`, :cve:`2019-15214`, :cve:`2019-15215`, :cve:`2019-15216`, :cve:`2019-15217`, :cve:`2019-15218`, :cve:`2019-15219`, :cve:`2019-15220`, :cve:`2019-15221`, :cve:`2019-15222`, :cve:`2019-15223`, :cve:`2019-15291`, :cve:`2019-15292`, :cve:`2019-15504`, :cve:`2019-15505`, :cve:`2019-15538`, :cve:`2019-15666`, :cve:`2019-15794`, :cve:`2019-15807`, :cve:`2019-15916`, :cve:`2019-15917`, :cve:`2019-15918`, :cve:`2019-15919`, :cve:`2019-15920`, :cve:`2019-15921`, :cve:`2019-15922`, :cve:`2019-15923`, :cve:`2019-15924`, :cve:`2019-15925`, :cve:`2019-15926`, :cve:`2019-15927`, :cve:`2019-16229`, :cve:`2019-16230`, :cve:`2019-16231`, :cve:`2019-16232`, :cve:`2019-16233`, :cve:`2019-16234`, :cve:`2019-16413`, :cve:`2019-16714`, :cve:`2019-16746`, :cve:`2019-16921`, :cve:`2019-16994`, :cve:`2019-16995`, :cve:`2019-17052`, :cve:`2019-17053`, :cve:`2019-17054`, :cve:`2019-17055`, :cve:`2019-17056`, :cve:`2019-17075`, :cve:`2019-17133`, :cve:`2019-17351`, :cve:`2019-17666`, :cve:`2019-18198`, :cve:`2019-18282`, :cve:`2019-18660`, :cve:`2019-18675`, :cve:`2019-18683`, :cve:`2019-18786`, :cve:`2019-18805`, :cve:`2019-18806`, :cve:`2019-18807`, :cve:`2019-18808`, :cve:`2019-18809`, :cve:`2019-18810`, :cve:`2019-18811`, :cve:`2019-18812`, :cve:`2019-18813`, :cve:`2019-18814`, :cve:`2019-18885`, :cve:`2019-19036`, :cve:`2019-19037`, :cve:`2019-19039`, :cve:`2019-19043`, :cve:`2019-19044`, :cve:`2019-19045`, :cve:`2019-19046`, :cve:`2019-19047`, :cve:`2019-19048`, :cve:`2019-19049`, :cve:`2019-19050`, :cve:`2019-19051`, :cve:`2019-19052`, :cve:`2019-19053`, :cve:`2019-19054`, :cve:`2019-19055`, :cve:`2019-19056`, :cve:`2019-19057`, :cve:`2019-19058`, :cve:`2019-19059`, :cve:`2019-19060`, :cve:`2019-19061`, :cve:`2019-19062`, :cve:`2019-19063`, :cve:`2019-19064`, :cve:`2019-19065`, :cve:`2019-19066`, :cve:`2019-19067`, :cve:`2019-19068`, :cve:`2019-19069`, :cve:`2019-19070`, :cve:`2019-19071`, :cve:`2019-19072`, :cve:`2019-19073`, :cve:`2019-19074`, :cve:`2019-19075`, :cve:`2019-19076`, :cve:`2019-19077`, :cve:`2019-19078`, :cve:`2019-19079`, :cve:`2019-19080`, :cve:`2019-19081`, :cve:`2019-19082`, :cve:`2019-19083`, :cve:`2019-19227`, :cve:`2019-19241`, :cve:`2019-19252`, :cve:`2019-19318`, :cve:`2019-19319`, :cve:`2019-19332`, :cve:`2019-19338`, :cve:`2019-19377`, :cve:`2019-19447`, :cve:`2019-19448`, :cve:`2019-19449`, :cve:`2019-19462`, :cve:`2019-19523`, :cve:`2019-19524`, :cve:`2019-19525`, :cve:`2019-19526`, :cve:`2019-19527`, :cve:`2019-19528`, :cve:`2019-19529`, :cve:`2019-19530`, :cve:`2019-19531`, :cve:`2019-19532`, :cve:`2019-19533`, :cve:`2019-19534`, :cve:`2019-19535`, :cve:`2019-19536`, :cve:`2019-19537`, :cve:`2019-19543`, :cve:`2019-19602`, :cve:`2019-19767`, :cve:`2019-19768`, :cve:`2019-19769`, :cve:`2019-19770`, :cve:`2019-19807`, :cve:`2019-19813`, :cve:`2019-19815`, :cve:`2019-19816`, :cve:`2019-19922`, :cve:`2019-19927`, :cve:`2019-19947`, :cve:`2019-19965`, :cve:`2019-19966`, :cve:`2019-1999`, :cve:`2019-20054`, :cve:`2019-20095`, :cve:`2019-20096`, :cve:`2019-2024`, :cve:`2019-2025`, :cve:`2019-20422`, :cve:`2019-2054`, :cve:`2019-20636`, :cve:`2019-20806`, :cve:`2019-20810`, :cve:`2019-20811`, :cve:`2019-20812`, :cve:`2019-20908`, :cve:`2019-20934`, :cve:`2019-2101`, :cve:`2019-2181`, :cve:`2019-2182`, :cve:`2019-2213`, :cve:`2019-2214`, :cve:`2019-2215`, :cve:`2019-25044`, :cve:`2019-25045`, :cve:`2019-3016`, :cve:`2019-3459`, :cve:`2019-3460`, :cve:`2019-3701`, :cve:`2019-3819`, :cve:`2019-3837`, :cve:`2019-3846`, :cve:`2019-3874`, :cve:`2019-3882`, :cve:`2019-3887`, :cve:`2019-3892`, :cve:`2019-3896`, :cve:`2019-3900`, :cve:`2019-3901`, :cve:`2019-5108`, :cve:`2019-6133`, :cve:`2019-6974`, :cve:`2019-7221`, :cve:`2019-7222`, :cve:`2019-7308`, :cve:`2019-8912`, :cve:`2019-8956`, :cve:`2019-8980`, :cve:`2019-9003`, :cve:`2019-9162`, :cve:`2019-9213`, :cve:`2019-9245`, :cve:`2019-9444`, :cve:`2019-9445`, :cve:`2019-9453`, :cve:`2019-9454`, :cve:`2019-9455`, :cve:`2019-9456`, :cve:`2019-9457`, :cve:`2019-9458`, :cve:`2019-9466`, :cve:`2019-9500`, :cve:`2019-9503`, :cve:`2019-9506`, :cve:`2019-9857`, :cve:`2020-0009`, :cve:`2020-0030`, :cve:`2020-0041`, :cve:`2020-0066`, :cve:`2020-0067`, :cve:`2020-0110`, :cve:`2020-0255`, :cve:`2020-0305`, :cve:`2020-0404`, :cve:`2020-0423`, :cve:`2020-0427`, :cve:`2020-0429`, :cve:`2020-0430`, :cve:`2020-0431`, :cve:`2020-0432`, :cve:`2020-0433`, :cve:`2020-0435`, :cve:`2020-0444`, :cve:`2020-0465`, :cve:`2020-0466`, :cve:`2020-0543`, :cve:`2020-10135`, :cve:`2020-10690`, :cve:`2020-10711`, :cve:`2020-10720`, :cve:`2020-10732`, :cve:`2020-10742`, :cve:`2020-10751`, :cve:`2020-10757`, :cve:`2020-10766`, :cve:`2020-10767`, :cve:`2020-10768`, :cve:`2020-10769`, :cve:`2020-10773`, :cve:`2020-10781`, :cve:`2020-10942`, :cve:`2020-11494`, :cve:`2020-11565`, :cve:`2020-11608`, :cve:`2020-11609`, :cve:`2020-11668`, :cve:`2020-11669`, :cve:`2020-11884`, :cve:`2020-12114`, :cve:`2020-12351`, :cve:`2020-12352`, :cve:`2020-12362`, :cve:`2020-12363`, :cve:`2020-12364`, :cve:`2020-12464`, :cve:`2020-12465`, :cve:`2020-12652`, :cve:`2020-12653`, :cve:`2020-12654`, :cve:`2020-12655`, :cve:`2020-12656`, :cve:`2020-12657`, :cve:`2020-12659`, :cve:`2020-12768`, :cve:`2020-12769`, :cve:`2020-12770`, :cve:`2020-12771`, :cve:`2020-12826`, :cve:`2020-12888`, :cve:`2020-12912`, :cve:`2020-13143`, :cve:`2020-13974`, :cve:`2020-14305`, :cve:`2020-14314`, :cve:`2020-14331`, :cve:`2020-14351`, :cve:`2020-14353`, :cve:`2020-14356`, :cve:`2020-14381`, :cve:`2020-14385`, :cve:`2020-14386`, :cve:`2020-14390`, :cve:`2020-14416`, :cve:`2020-15393`, :cve:`2020-15436`, :cve:`2020-15437`, :cve:`2020-15780`, :cve:`2020-15852`, :cve:`2020-16119`, :cve:`2020-16120`, :cve:`2020-16166`, :cve:`2020-1749`, :cve:`2020-24394`, :cve:`2020-24490`, :cve:`2020-24504`, :cve:`2020-24586`, :cve:`2020-24587`, :cve:`2020-24588`, :cve:`2020-25211`, :cve:`2020-25212`, :cve:`2020-25221`, :cve:`2020-25284`, :cve:`2020-25285`, :cve:`2020-25639`, :cve:`2020-25641`, :cve:`2020-25643`, :cve:`2020-25645`, :cve:`2020-25656`, :cve:`2020-25668`, :cve:`2020-25669`, :cve:`2020-25670`, :cve:`2020-25671`, :cve:`2020-25672`, :cve:`2020-25673`, :cve:`2020-25704`, :cve:`2020-25705`, :cve:`2020-26088`, :cve:`2020-26139`, :cve:`2020-26141`, :cve:`2020-26145`, :cve:`2020-26147`, :cve:`2020-26541`, :cve:`2020-26555`, :cve:`2020-26558`, :cve:`2020-27066`, :cve:`2020-27067`, :cve:`2020-27068`, :cve:`2020-27152`, :cve:`2020-27170`, :cve:`2020-27171`, :cve:`2020-27194`, :cve:`2020-2732`, :cve:`2020-27673`, :cve:`2020-27675`, :cve:`2020-27777`, :cve:`2020-27784`, :cve:`2020-27786`, :cve:`2020-27815`, :cve:`2020-27820`, :cve:`2020-27825`, :cve:`2020-27830`, :cve:`2020-27835`, :cve:`2020-28097`, :cve:`2020-28374`, :cve:`2020-28588`, :cve:`2020-28915`, :cve:`2020-28941`, :cve:`2020-28974`, :cve:`2020-29368`, :cve:`2020-29369`, :cve:`2020-29370`, :cve:`2020-29371`, :cve:`2020-29372`, :cve:`2020-29373`, :cve:`2020-29374`, :cve:`2020-29534`, :cve:`2020-29568`, :cve:`2020-29569`, :cve:`2020-29660`, :cve:`2020-29661`, :cve:`2020-35499`, :cve:`2020-35508`, :cve:`2020-35513`, :cve:`2020-35519`, :cve:`2020-36158`, :cve:`2020-36310`, :cve:`2020-36311`, :cve:`2020-36312`, :cve:`2020-36313`, :cve:`2020-36322`, :cve:`2020-36385`, :cve:`2020-36386`, :cve:`2020-36387`, :cve:`2020-36516`, :cve:`2020-36557`, :cve:`2020-36558`, :cve:`2020-36691`, :cve:`2020-36694`, :cve:`2020-36766`, :cve:`2020-3702`, :cve:`2020-4788`, :cve:`2020-7053`, :cve:`2020-8428`, :cve:`2020-8647`, :cve:`2020-8648`, :cve:`2020-8649`, :cve:`2020-8694`, :cve:`2020-8834`, :cve:`2020-8835`, :cve:`2020-8992`, :cve:`2020-9383`, :cve:`2020-9391`, :cve:`2021-0129`, :cve:`2021-0342`, :cve_mitre:`2021-0447`, :cve_mitre:`2021-0448`, :cve:`2021-0512`, :cve:`2021-0605`, :cve:`2021-0707`, :cve:`2021-0920`, :cve:`2021-0929`, :cve:`2021-0935`, :cve_mitre:`2021-0937`, :cve:`2021-0938`, :cve:`2021-0941`, :cve:`2021-1048`, :cve:`2021-20177`, :cve:`2021-20194`, :cve:`2021-20226`, :cve:`2021-20239`, :cve:`2021-20261`, :cve:`2021-20265`, :cve:`2021-20268`, :cve:`2021-20292`, :cve:`2021-20317`, :cve:`2021-20320`, :cve:`2021-20321`, :cve:`2021-20322`, :cve:`2021-21781`, :cve:`2021-22543`, :cve:`2021-22555`, :cve:`2021-22600`, :cve:`2021-23133`, :cve:`2021-23134`, :cve:`2021-26401`, :cve:`2021-26708`, :cve:`2021-26930`, :cve:`2021-26931`, :cve:`2021-26932`, :cve:`2021-27363`, :cve:`2021-27364`, :cve:`2021-27365`, :cve:`2021-28038`, :cve:`2021-28039`, :cve:`2021-28375`, :cve:`2021-28660`, :cve:`2021-28688`, :cve:`2021-28691`, :cve:`2021-28711`, :cve:`2021-28712`, :cve:`2021-28713`, :cve:`2021-28714`, :cve:`2021-28715`, :cve:`2021-28950`, :cve:`2021-28951`, :cve:`2021-28952`, :cve:`2021-28964`, :cve:`2021-28971`, :cve:`2021-28972`, :cve:`2021-29154`, :cve:`2021-29155`, :cve:`2021-29264`, :cve:`2021-29265`, :cve:`2021-29266`, :cve:`2021-29646`, :cve:`2021-29647`, :cve:`2021-29648`, :cve:`2021-29649`, :cve:`2021-29650`, :cve:`2021-29657`, :cve:`2021-30002`, :cve:`2021-30178`, :cve:`2021-31440`, :cve:`2021-3178`, :cve:`2021-31829`, :cve:`2021-31916`, :cve:`2021-32078`, :cve:`2021-32399`, :cve:`2021-32606`, :cve:`2021-33033`, :cve:`2021-33034`, :cve:`2021-33061`, :cve:`2021-33098`, :cve:`2021-33135`, :cve:`2021-33200`, :cve:`2021-3347`, :cve:`2021-3348`, :cve:`2021-33624`, :cve:`2021-33655`, :cve:`2021-33656`, :cve:`2021-33909`, :cve:`2021-3411`, :cve:`2021-3428`, :cve:`2021-3444`, :cve:`2021-34556`, :cve:`2021-34693`, :cve:`2021-3483`, :cve:`2021-34866`, :cve:`2021-3489`, :cve:`2021-3490`, :cve:`2021-3491`, :cve:`2021-3493`, :cve_mitre:`2021-34981`, :cve:`2021-3501`, :cve:`2021-35039`, :cve:`2021-3506`, :cve:`2021-3543`, :cve:`2021-35477`, :cve:`2021-3564`, :cve:`2021-3573`, :cve:`2021-3587`, :cve_mitre:`2021-3600`, :cve:`2021-3609`, :cve:`2021-3612`, :cve:`2021-3635`, :cve:`2021-3640`, :cve:`2021-3653`, :cve:`2021-3655`, :cve:`2021-3656`, :cve:`2021-3659`, :cve:`2021-3669`, :cve:`2021-3679`, :cve:`2021-3715`, :cve:`2021-37159`, :cve:`2021-3732`, :cve:`2021-3736`, :cve:`2021-3739`, :cve:`2021-3743`, :cve:`2021-3744`, :cve:`2021-3752`, :cve:`2021-3753`, :cve:`2021-37576`, :cve:`2021-3759`, :cve:`2021-3760`, :cve:`2021-3764`, :cve:`2021-3772`, :cve:`2021-38160`, :cve:`2021-38166`, :cve:`2021-38198`, :cve:`2021-38199`, :cve:`2021-38200`, :cve:`2021-38201`, :cve:`2021-38202`, :cve:`2021-38203`, :cve:`2021-38204`, :cve:`2021-38205`, :cve:`2021-38206`, :cve:`2021-38207`, :cve:`2021-38208`, :cve:`2021-38209`, :cve:`2021-38300`, :cve:`2021-3894`, :cve:`2021-3896`, :cve:`2021-3923`, :cve:`2021-39633`, :cve:`2021-39634`, :cve:`2021-39636`, :cve:`2021-39648`, :cve:`2021-39656`, :cve:`2021-39657`, :cve:`2021-39685`, :cve:`2021-39686`, :cve:`2021-39698`, :cve:`2021-39711`, :cve:`2021-39713`, :cve:`2021-39714`, :cve:`2021-4001`, :cve:`2021-4002`, :cve:`2021-4023`, :cve:`2021-4028`, :cve:`2021-4032`, :cve:`2021-4037`, :cve:`2021-40490`, :cve:`2021-4083`, :cve:`2021-4090`, :cve:`2021-4093`, :cve:`2021-4095`, :cve:`2021-41073`, :cve:`2021-4135`, :cve:`2021-4148`, :cve:`2021-4149`, :cve:`2021-4150`, :cve:`2021-4154`, :cve:`2021-4155`, :cve:`2021-4157`, :cve:`2021-4159`, :cve:`2021-41864`, :cve:`2021-4197`, :cve:`2021-42008`, :cve:`2021-4202`, :cve:`2021-4203`, :cve:`2021-4204`, :cve:`2021-4218`, :cve:`2021-42252`, :cve:`2021-42327`, :cve:`2021-42739`, :cve:`2021-43056`, :cve:`2021-43057`, :cve:`2021-43267`, :cve:`2021-43389`, :cve:`2021-43975`, :cve:`2021-43976`, :cve:`2021-44733`, :cve:`2021-44879`, :cve:`2021-45095`, :cve:`2021-45100`, :cve:`2021-45402`, :cve:`2021-45469`, :cve:`2021-45480`, :cve:`2021-45485`, :cve:`2021-45486`, :cve:`2021-45868`, :cve:`2021-46283`, :cve:`2022-0001`, :cve:`2022-0002`, :cve:`2022-0168`, :cve:`2022-0171`, :cve:`2022-0185`, :cve:`2022-0264`, :cve:`2022-0286`, :cve:`2022-0322`, :cve:`2022-0330`, :cve:`2022-0382`, :cve:`2022-0433`, :cve:`2022-0435`, :cve:`2022-0480`, :cve:`2022-0487`, :cve:`2022-0492`, :cve:`2022-0494`, :cve:`2022-0500`, :cve:`2022-0516`, :cve:`2022-0617`, :cve:`2022-0644`, :cve:`2022-0646`, :cve:`2022-0742`, :cve:`2022-0812`, :cve:`2022-0847`, :cve:`2022-0850`, :cve:`2022-0854`, :cve:`2022-0995`, :cve:`2022-0998`, :cve:`2022-1011`, :cve:`2022-1012`, :cve:`2022-1015`, :cve:`2022-1016`, :cve:`2022-1043`, :cve:`2022-1048`, :cve:`2022-1055`, :cve:`2022-1158`, :cve:`2022-1184`, :cve:`2022-1195`, :cve:`2022-1198`, :cve:`2022-1199`, :cve:`2022-1204`, :cve:`2022-1205`, :cve:`2022-1263`, :cve:`2022-1280`, :cve:`2022-1353`, :cve:`2022-1419`, :cve:`2022-1462`, :cve:`2022-1508`, :cve:`2022-1516`, :cve:`2022-1651`, :cve:`2022-1652`, :cve:`2022-1671`, :cve:`2022-1678`, :cve:`2022-1679`, :cve:`2022-1729`, :cve:`2022-1734`, :cve:`2022-1786`, :cve:`2022-1789`, :cve:`2022-1836`, :cve:`2022-1852`, :cve:`2022-1882`, :cve:`2022-1943`, :cve:`2022-1966`, :cve:`2022-1972`, :cve:`2022-1973`, :cve:`2022-1974`, :cve:`2022-1975`, :cve:`2022-1976`, :cve:`2022-1998`, :cve:`2022-20008`, :cve:`2022-20132`, :cve:`2022-20141`, :cve:`2022-20148`, :cve:`2022-20153`, :cve:`2022-20154`, :cve:`2022-20158`, :cve:`2022-20166`, :cve:`2022-20368`, :cve:`2022-20369`, :cve:`2022-20409`, :cve:`2022-20421`, :cve:`2022-20422`, :cve:`2022-20423`, :cve:`2022-20424`, :cve_mitre:`2022-20565`, :cve:`2022-20566`, :cve:`2022-20567`, :cve:`2022-20568`, :cve:`2022-20572`, :cve:`2022-2078`, :cve:`2022-21123`, :cve:`2022-21125`, :cve:`2022-21166`, :cve:`2022-21385`, :cve:`2022-21499`, :cve_mitre:`2022-21505`, :cve:`2022-2153`, :cve:`2022-2196`, :cve_mitre:`2022-22942`, :cve:`2022-23036`, :cve:`2022-23037`, :cve:`2022-23038`, :cve:`2022-23039`, :cve:`2022-23040`, :cve:`2022-23041`, :cve:`2022-23042`, :cve:`2022-2308`, :cve:`2022-2318`, :cve:`2022-23222`, :cve:`2022-2327`, :cve:`2022-2380`, :cve:`2022-23816`, :cve:`2022-23960`, :cve:`2022-24122`, :cve:`2022-24448`, :cve:`2022-24958`, :cve:`2022-24959`, :cve:`2022-2503`, :cve:`2022-25258`, :cve:`2022-25375`, :cve:`2022-25636`, :cve_mitre:`2022-2585`, :cve_mitre:`2022-2586`, :cve_mitre:`2022-2588`, :cve:`2022-2590`, :cve_mitre:`2022-2602`, :cve:`2022-26365`, :cve:`2022-26373`, :cve:`2022-2639`, :cve:`2022-26490`, :cve:`2022-2663`, :cve:`2022-26966`, :cve:`2022-27223`, :cve:`2022-27666`, :cve:`2022-27672`, :cve:`2022-2785`, :cve:`2022-27950`, :cve:`2022-28356`, :cve:`2022-28388`, :cve:`2022-28389`, :cve:`2022-28390`, :cve:`2022-2873`, :cve:`2022-28796`, :cve:`2022-28893`, :cve:`2022-2905`, :cve:`2022-29156`, :cve:`2022-2938`, :cve:`2022-29581`, :cve:`2022-29582`, :cve:`2022-2959`, :cve:`2022-2964`, :cve:`2022-2977`, :cve:`2022-2978`, :cve:`2022-29900`, :cve:`2022-29901`, :cve:`2022-2991`, :cve:`2022-29968`, :cve:`2022-3028`, :cve:`2022-30594`, :cve:`2022-3061`, :cve:`2022-3077`, :cve:`2022-3078`, :cve:`2022-3103`, :cve:`2022-3104`, :cve:`2022-3105`, :cve:`2022-3106`, :cve:`2022-3107`, :cve:`2022-3108`, :cve:`2022-3110`, :cve:`2022-3111`, :cve:`2022-3112`, :cve:`2022-3113`, :cve:`2022-3114`, :cve:`2022-3115`, :cve:`2022-3169`, :cve:`2022-3170`, :cve:`2022-3176`, :cve:`2022-3202`, :cve:`2022-32250`, :cve:`2022-32296`, :cve:`2022-3239`, :cve:`2022-32981`, :cve:`2022-3303`, :cve:`2022-3344`, :cve:`2022-33740`, :cve:`2022-33741`, :cve:`2022-33742`, :cve:`2022-33743`, :cve:`2022-33744`, :cve:`2022-33981`, :cve:`2022-3424`, :cve:`2022-3435`, :cve:`2022-34494`, :cve:`2022-34495`, :cve:`2022-34918`, :cve:`2022-3521`, :cve:`2022-3522`, :cve:`2022-3524`, :cve:`2022-3526`, :cve:`2022-3531`, :cve:`2022-3532`, :cve:`2022-3534`, :cve:`2022-3535`, :cve:`2022-3541`, :cve:`2022-3542`, :cve:`2022-3543`, :cve:`2022-3545`, :cve:`2022-3564`, :cve:`2022-3565`, :cve:`2022-3577`, :cve:`2022-3586`, :cve:`2022-3594`, :cve:`2022-3595`, :cve:`2022-36123`, :cve:`2022-3619`, :cve:`2022-3621`, :cve:`2022-3623`, :cve:`2022-3624`, :cve:`2022-3625`, :cve:`2022-3628`, :cve:`2022-36280`, :cve:`2022-3629`, :cve:`2022-3630`, :cve:`2022-3633`, :cve:`2022-3635`, :cve:`2022-3636`, :cve:`2022-3640`, :cve:`2022-3643`, :cve:`2022-3646`, :cve:`2022-3649`, :cve:`2022-36879`, :cve:`2022-36946`, :cve:`2022-3707`, :cve:`2022-38457`, :cve:`2022-3903`, :cve:`2022-3910`, :cve:`2022-39188`, :cve:`2022-39189`, :cve:`2022-39190`, :cve:`2022-3977`, :cve:`2022-39842`, :cve:`2022-40133`, :cve:`2022-40307`, :cve:`2022-40476`, :cve:`2022-40768`, :cve:`2022-4095`, :cve:`2022-40982`, :cve:`2022-41218`, :cve:`2022-41222`, :cve:`2022-4127`, :cve:`2022-4128`, :cve:`2022-4129`, :cve:`2022-4139`, :cve:`2022-41674`, :cve:`2022-41849`, :cve:`2022-41850`, :cve:`2022-41858`, :cve:`2022-42328`, :cve:`2022-42329`, :cve:`2022-42432`, :cve:`2022-4269`, :cve:`2022-42703`, :cve:`2022-42719`, :cve:`2022-42720`, :cve:`2022-42721`, :cve:`2022-42722`, :cve:`2022-42895`, :cve:`2022-42896`, :cve:`2022-43750`, :cve:`2022-4378`, :cve:`2022-4379`, :cve:`2022-4382`, :cve:`2022-43945`, :cve:`2022-45869`, :cve:`2022-45886`, :cve:`2022-45887`, :cve:`2022-45919`, :cve:`2022-45934`, :cve:`2022-4662`, :cve:`2022-4696`, :cve:`2022-4744`, :cve:`2022-47518`, :cve:`2022-47519`, :cve:`2022-47520`, :cve:`2022-47521`, :cve:`2022-47929`, :cve:`2022-47938`, :cve:`2022-47939`, :cve:`2022-47940`, :cve:`2022-47941`, :cve:`2022-47942`, :cve:`2022-47943`, :cve:`2022-47946`, :cve:`2022-4842`, :cve:`2022-48423`, :cve:`2022-48424`, :cve:`2022-48425`, :cve:`2022-48502`, :cve:`2023-0030`, :cve:`2023-0045`, :cve:`2023-0047`, :cve:`2023-0122`, :cve:`2023-0160`, :cve:`2023-0179`, :cve:`2023-0210`, :cve:`2023-0240`, :cve:`2023-0266`, :cve:`2023-0386`, :cve:`2023-0394`, :cve:`2023-0458`, :cve:`2023-0459`, :cve:`2023-0461`, :cve:`2023-0468`, :cve:`2023-0469`, :cve:`2023-0590`, :cve:`2023-0615`, :cve_mitre:`2023-1032`, :cve:`2023-1073`, :cve:`2023-1074`, :cve:`2023-1076`, :cve:`2023-1077`, :cve:`2023-1078`, :cve:`2023-1079`, :cve:`2023-1095`, :cve:`2023-1118`, :cve:`2023-1192`, :cve:`2023-1194`, :cve:`2023-1195`, :cve:`2023-1206`, :cve:`2023-1249`, :cve:`2023-1252`, :cve:`2023-1281`, :cve:`2023-1295`, :cve:`2023-1380`, :cve:`2023-1382`, :cve:`2023-1390`, :cve:`2023-1513`, :cve:`2023-1582`, :cve:`2023-1583`, :cve:`2023-1611`, :cve:`2023-1637`, :cve:`2023-1652`, :cve:`2023-1670`, :cve:`2023-1829`, :cve:`2023-1838`, :cve:`2023-1855`, :cve:`2023-1859`, :cve:`2023-1872`, :cve:`2023-1989`, :cve:`2023-1990`, :cve:`2023-1998`, :cve:`2023-2002`, :cve:`2023-2006`, :cve:`2023-2007`, :cve:`2023-2008`, :cve:`2023-2019`, :cve:`2023-20569`, :cve:`2023-20588`, :cve:`2023-20593`, :cve:`2023-20928`, :cve:`2023-20938`, :cve:`2023-21102`, :cve:`2023-21106`, :cve:`2023-2124`, :cve:`2023-21255`, :cve:`2023-2156`, :cve:`2023-2162`, :cve:`2023-2163`, :cve:`2023-2166`, :cve:`2023-2177`, :cve:`2023-2194`, :cve:`2023-2235`, :cve:`2023-2236`, :cve:`2023-2248`, :cve:`2023-2269`, :cve:`2023-22995`, :cve:`2023-22996`, :cve:`2023-22997`, :cve:`2023-22998`, :cve:`2023-22999`, :cve:`2023-23000`, :cve:`2023-23001`, :cve:`2023-23002`, :cve:`2023-23003`, :cve:`2023-23004`, :cve:`2023-23006`, :cve:`2023-23454`, :cve:`2023-23455`, :cve:`2023-23559`, :cve:`2023-23586`, :cve:`2023-2430`, :cve:`2023-2483`, :cve:`2023-25012`, :cve:`2023-2513`, :cve:`2023-25775`, :cve:`2023-2598`, :cve:`2023-26544`, :cve:`2023-26545`, :cve:`2023-26605`, :cve:`2023-26606`, :cve:`2023-26607`, :cve:`2023-28327`, :cve:`2023-28328`, :cve:`2023-28410`, :cve:`2023-28464`, :cve:`2023-28466`, :cve:`2023-2860`, :cve:`2023-28772`, :cve:`2023-28866`, :cve:`2023-2898`, :cve:`2023-2985`, :cve:`2023-3006`, :cve:`2023-30456`, :cve:`2023-30772`, :cve:`2023-3090`, :cve:`2023-3106`, :cve:`2023-3111`, :cve:`2023-3117`, :cve:`2023-31248`, :cve:`2023-3141`, :cve:`2023-31436`, :cve:`2023-3159`, :cve:`2023-3161`, :cve:`2023-3212`, :cve:`2023-3220`, :cve:`2023-32233`, :cve:`2023-32247`, :cve:`2023-32248`, :cve:`2023-32250`, :cve:`2023-32252`, :cve:`2023-32254`, :cve:`2023-32257`, :cve:`2023-32258`, :cve:`2023-32269`, :cve:`2023-3268`, :cve:`2023-3269`, :cve:`2023-3312`, :cve:`2023-3317`, :cve:`2023-33203`, :cve:`2023-33250`, :cve:`2023-33288`, :cve:`2023-3338`, :cve:`2023-3355`, :cve:`2023-3357`, :cve:`2023-3358`, :cve:`2023-3359`, :cve:`2023-3389`, :cve:`2023-3390`, :cve:`2023-33951`, :cve:`2023-33952`, :cve:`2023-34255`, :cve:`2023-34256`, :cve:`2023-34319`, :cve:`2023-3439`, :cve:`2023-35001`, :cve:`2023-3567`, :cve:`2023-35788`, :cve:`2023-35823`, :cve:`2023-35824`, :cve:`2023-35826`, :cve:`2023-35828`, :cve:`2023-35829`, :cve:`2023-3609`, :cve:`2023-3610`, :cve:`2023-3611`, :cve:`2023-37453`, :cve:`2023-3772`, :cve:`2023-3773`, :cve:`2023-3776`, :cve:`2023-3777`, :cve:`2023-3812`, :cve:`2023-38409`, :cve:`2023-38426`, :cve:`2023-38427`, :cve:`2023-38428`, :cve:`2023-38429`, :cve:`2023-38430`, :cve:`2023-38431`, :cve:`2023-38432`, :cve:`2023-3863`, :cve_mitre:`2023-3865`, :cve_mitre:`2023-3866`, :cve_mitre:`2023-3867`, :cve:`2023-4004`, :cve:`2023-4015`, :cve:`2023-40283`, :cve:`2023-4128`, :cve:`2023-4132`, :cve:`2023-4147`, :cve:`2023-4155`, :cve:`2023-4194`, :cve:`2023-4206`, :cve:`2023-4207`, :cve:`2023-4208`, :cve:`2023-4273`, :cve:`2023-42752`, :cve:`2023-42753`, :cve:`2023-4385`, :cve:`2023-4387`, :cve:`2023-4389`, :cve:`2023-4394`, :cve:`2023-4459`, :cve:`2023-4569`, :cve:`2023-4611` and :cve:`2023-4623`
+- nghttp2: Fix :cve:`2023-35945`
+- openssl: Fix :cve:`2023-2975`, :cve:`2023-3446`, :cve:`2023-3817`, :cve:`2023-4807` and :cve:`2023-5363`
+- pixman: Ignore :cve:`2023-37769`
+- procps: Fix :cve:`2023-4016`
+- python3-git: Fix :cve:`2023-40267`, :cve:`2023-40590` and :cve:`2023-41040`
+- python3-pygments: Fix :cve:`2022-40896`
+- python3-urllib3: Fix :cve:`2023-43804` and :cve:`2023-45803`
+- python3: Fix :cve:`2023-24329` and :cve:`2023-40217`
+- qemu: Fix :cve:`2023-3180`, :cve:`2023-3354` and :cve:`2023-42467`
+- qemu: Ignore :cve:`2023-2680`
+- screen: Fix :cve:`2023-24626`
+- shadow: Fix :cve_mitre:`2023-4641`
+- tiff: Fix :cve:`2023-40745` and :cve:`2023-41175`
+- vim: Fix :cve:`2023-3896`, :cve:`2023-4733`, :cve:`2023-4734`, :cve:`2023-4735`, :cve:`2023-4736`, :cve:`2023-4738`, :cve:`2023-4750`, :cve:`2023-4752`, :cve:`2023-4781`, :cve:`2023-5441` and :cve:`2023-5535`
+- webkitgtk: Fix :cve:`2023-32435` and :cve:`2023-32439`
+- xserver-xorg: Fix :cve:`2023-5367` and :cve:`2023-5380`
+
+
+Fixes in Yocto-4.2.4
+~~~~~~~~~~~~~~~~~~~~
+
+- README: Update to point to new contributor guide
+- README: fix mail address in git example command
+- SECURITY.md: Add file
+- avahi: handle invalid service types gracefully
+- bind: upgrade to 9.18.19
+- bitbake.conf: add bunzip2 in :term:`HOSTTOOLS`
+- bitbake: Fix disk space monitoring on cephfs
+- bitbake: SECURITY.md: add file
+- brief-yoctoprojectqs: use new CDN mirror for sstate
+- bsp-guide: bsp.rst: replace reference to wiki
+- bsp-guide: bsp: skip Intel machines no longer supported in Poky
+- build-appliance-image: Update to mickledore head revision
+- build-sysroots: Add :term:`SUMMARY` field
+- build-sysroots: Ensure dependency chains are minimal
+- build-sysroots: target or native sysroot population need to be selected explicitly
+- buildtools-tarball: Add libacl
+- busybox: Set PATH in syslog initscript
+- busybox: remove coreutils dependency in busybox-ptest
+- cmake.bbclass: fix allarch override syntax
+- cml1: Fix KCONFIG_CONFIG_COMMAND not conveyed fully in do_menuconfig
+- contributor-guide/style-guide: Add a note about task idempotence
+- contributor-guide/style-guide: Refer to recipes, not packages
+- contributor-guide: deprecate "Accepted" patch status
+- contributor-guide: discourage marking patches as Inappropriate
+- contributor-guide: recipe-style-guide: add Upstream-Status
+- contributor-guide: recipe-style-guide: add more patch tagging examples
+- contributor-guide: recipe-style-guide: add section about CVE patches
+- contributor-guide: style-guide: discourage using Pending patch status
+- core-image-ptest: Define a fallback for :term:`SUMMARY` field
+- cve-check: add CVSS vector string to CVE database and reports
+- cve-check: don't warn if a patch is remote
+- cve-check: slightly more verbose warning when adding the same package twice
+- cve-check: sort the package list in the JSON report
+- cve-exclusion_6.1.inc: update for 6.1.57
+- dbus: add additional entries to :term:`CVE_PRODUCT`
+- dbus: upgrade to 1.14.10
+- dev-manual: add security team processes
+- dev-manual: disk-space: improve wording for obsolete sstate cache files
+- dev-manual: disk-space: mention faster "find" command to trim sstate cache
+- dev-manual: fix testimage usage instructions
+- dev-manual: layers: Add notes about layer.conf
+- dev-manual: licenses: mention :term:`SPDX` for license compliance
+- dev-manual: new-recipe.rst fix inconsistency with contributor guide
+- dev-manual: new-recipe.rst: add missing parenthesis to "Patching Code" section
+- dev-manual: new-recipe.rst: replace reference to wiki
+- dev-manual: remove unsupported :term: markup inside markup
+- dev-manual: start.rst: remove obsolete reference
+- ell: upgrade to 0.58
+- externalsrc: fix dependency chain issues
+- ffmpeg: upgrade to 5.1.3
+- ffmpeg: avoid neon on unsupported machines
+- file: fix call to localtime_r()
+- file: upgrade to 5.45
+- fontcache.bbclass: avoid native recipes depending on target fontconfig
+- gcc-crosssdk: ignore MULTILIB_VARIANTS in signature computation
+- gcc-runtime: remove bashism
+- gcc: backport a fix for ICE caused by CVE-2023-4039.patch
+- gcc: depend on zstd
+- gdb: fix :term:`RDEPENDS` for PACKAGECONFIG[tui]
+- glib-2.0: libelf has a configure option now, specify it
+- glibc: stable 2.37 branch updates
+- gnupg: Fix reproducibility failure
+- gnupg: upgrade to 2.4.3
+- go: upgrade to 1.20.7
+- graphene: fix runtime detection of IEEE754 behaviour
+- gstreamer: upgrade to 1.22.6
+- gtk4: upgrade to 4.10.5
+- gzip: upgrade to 1.13
+- igt-gpu-tools: do not write shortened git commit hash into binaries
+- inetutils: don't guess target paths
+- inetutils: remove obsolete cruft from do_configure
+- insane.bbclass: Count raw bytes in shebang-size
+- kernel.bbclass: Add force flag to rm calls
+- lib/package_manager: Improve repo artefact filtering
+- libc-test: Run as non-root user
+- libconvert-asn1-perl: upgrade to 0.34
+- libevent: fix patch Upstream-Status
+- libgudev: explicitly disable tests and vapi
+- librepo: upgrade to 1.15.2
+- librsvg: upgrade to 2.54.6
+- libsndfile1: upgrade to 1.2.2
+- libsoup-2.4: Only specify --cross-file when building for target
+- libsoup-2.4: update :term:`PACKAGECONFIG`
+- libx11: upgrade to 1.8.7
+- libxkbcommon: add :term:`CVE_PRODUCT`
+- libxpm: upgrade to 3.5.17
+- linux-firmware: add firmware files for NXP BT chipsets
+- linux-firmware: package Dragonboard 845c sensors DSP firmware
+- linux-firmware: package audio topology for Lenovo X13s
+- linux-firmware: upgrade to 20230804
+- linux-yocto/5.15: update to v5.15.133
+- linux-yocto/6.1: fix CONFIG_F2FS_IO_TRACE configuration warning
+- linux-yocto/6.1: fix IRQ-80 warnings
+- linux-yocto/6.1: fix uninitialized read in nohz_full/isolcpus setup
+- linux-yocto/6.1: tiny: fix arm 32 boot
+- linux-yocto/6.1: update to v6.1.57
+- linux-yocto: add script to generate kernel :term:`CVE_CHECK_IGNORE` entries
+- linux-yocto: make sure the pahole-native available before do_kernel_configme
+- linux/cve-exclusion: add generated CVE_CHECK_IGNOREs
+- linux/generate-cve-exclusions: fix mishandling of boundary values
+- linux/generate-cve-exclusions: print the generated time in UTC
+- manuals: add new contributor guide
+- manuals: correct "yocto-linux" by "linux-yocto"
+- mdadm: Disable further tests due to intermittent failures
+- mdadm: skip running 04update-uuid and 07revert-inplace testcases
+- migration-guides: add release notes for 4.0.12
+- migration-guides: add release notes for 4.0.13
+- migration-guides: add release notes for 4.2.3
+- mpfr: upgrade to 4.2.1
+- multilib.conf: explicitly make MULTILIB_VARIANTS vardeps on MULTILIBS
+- nativesdk-intercept: Fix bad intercept chgrp/chown logic
+- nettle: avoid neon on unsupported machines
+- oe-depends-dot: improve '-w' behavior
+- oeqa dnf_runtime.py: fix HTTP server IP address and port
+- oeqa selftest context.py: remove warning from missing meta-selftest
+- oeqa selftest context.py: whitespace fix
+- oeqa/concurrencytest: Remove invalid buffering option
+- oeqa/selftest/context.py: check git command return values
+- oeqa/selftest/wic: Improve assertTrue calls
+- oeqa/selftest: Fix broken symlink removal handling
+- oeqa/utils/gitarchive: Handle broken commit counts in results repo
+- openssl: upgrade to 3.1.4
+- openssl: build and install manpages only if they are enabled
+- openssl: ensure all ptest fails are caught
+- openssl: parallelize tests
+- overview: Add note about non-reproducibility side effects
+- packages.bbclass: Correct the check for conflicts with renamed packages
+- pango: explictly enable/disable libthai
+- patch.py: use --absolute-git-dir instead of --show-toplevel to retrieve gitdir
+- pixman: Remove duplication of license MIT
+- pixman: avoid neon on unsupported machines
+- poky.conf: bump version for 4.2.4 release
+- profile-manual: aesthetic cleanups
+- pseudo: Fix to work with glibc 2.38
+- ptest: report tests that were killed on timeout
+- python3-git: upgrade to 3.1.37
+- python3-urllib3: update to v1.26.18
+- python3: upgrade to 3.11.5
+- qemu: fix "Bad FPU state detected" fault on qemu-system-i386
+- ref-manual: Fix :term:`PACKAGECONFIG` term and add an example
+- ref-manual: Warn about :term:`COMPATIBLE_MACHINE` skipping native recipes
+- ref-manual: point outdated link to the new location
+- ref-manual: releases.svg: Scarthgap is now version 5.0
+- ref-manual: system-requirements: update supported distros
+- ref-manual: variables: add :term:`RECIPE_SYSROOT` and :term:`RECIPE_SYSROOT_NATIVE`
+- ref-manual: variables: add :term:`TOOLCHAIN_OPTIONS` variable
+- ref-manual: variables: add example for :term:`SYSROOT_DIRS` variable
+- ref-manual: variables: provide no-match example for :term:`COMPATIBLE_MACHINE`
+- resulttool/report: Avoid divide by zero
+- runqemu: check permissions of available render nodes as well as their presence
+- screen: upgrade to 4.9.1
+- scripts/create-pull-request: update URLs to git repositories
+- sdk-manual: appendix-obtain: improve and update descriptions
+- sdk-manual: extensible.rst: fix multiple formatting issues
+- shadow: fix patch Upstream-Status
+- strace: parallelize ptest
+- sudo: upgrade to 1.9.15p2
+- systemd-bootchart: musl fixes have been rejected upstream
+- systemd: backport patch to fix warning in systemd-vconsole-setup
+- tar: upgrade to 1.35
+- tcl: Add a way to skip ptests
+- tcl: prevent installing another copy of tzdata
+- template: fix typo in section header
+- test-manual: reproducible-builds: stop mentioning LTO bug
+- uboot-extlinux-config.bbclass: fix missed override syntax migration
+- vim: upgrade to 9.0.2048
+- vim: update obsolete comment
+- wayland-utils: add libdrm :term:`PACKAGECONFIG`
+- weston-init: fix init code indentation
+- weston-init: remove misleading comment about udev rule
+- wic: bootimg-partition: Fix file name in debug message
+- wic: fix wrong attempt to create file system in upartitioned regions
+- wireless-regdb: upgrade to 2023.09.01
+- xz: upgrade to 5.4.4
+- yocto-uninative: Update to 4.2 for glibc 2.38
+- yocto-uninative: Update to 4.3
+
+
+Known Issues in Yocto-4.2.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.2.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alberto Planas
+- Alexander Kanavin
+- Alexis Lothoré
+- Antoine Lubineau
+- Anuj Mittal
+- Archana Polampalli
+- Arne Schwerdt
+- BELHADJ SALEM Talel
+- Benjamin Bara
+- Bruce Ashfield
+- Chen Qi
+- Colin McAllister
+- Daniel Semkowicz
+- Dmitry Baryshkov
+- Eilís 'pidge' Ní Fhlannagáin
+- Emil Kronborg Andersen
+- Etienne Cordonnier
+- Jaeyoon Jung
+- Jan Garcia
+- Joe Slater
+- Joshua Watt
+- Julien Stephan
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Markus Niebel
+- Markus Volk
+- Marta Rybczynska
+- Martijn de Gouw
+- Martin Jansa
+- Michael Halstead
+- Michael Opdenacker
+- Mikko Rapeli
+- Mingli Yu
+- Narpat Mali
+- Otavio Salvador
+- Ovidiu Panait
+- Peter Kjellerstedt
+- Peter Marko
+- Peter Suti
+- Poonam Jadhav
+- Quentin Schulz
+- Richard Purdie
+- Robert P. J. Day
+- Roland Hieber
+- Ross Burton
+- Ryan Eatmon
+- Sakib Sajal
+- Samantha Jalabert
+- Sanjana
+- Sanjay Chitroda
+- Sean Nyekjaer
+- Siddharth Doshi
+- Soumya Sambu
+- Stefan Tauner
+- Steve Sakoman
+- Tan Wen Yan
+- Tom Hochstein
+- Trevor Gamblin
+- Vijay Anusuri
+- Wang Mingyu
+- Xiangyu Chen
+- Yash Shinde
+- Yoann Congal
+- Yogita Urade
+- Yuta Hayama
+
+
+Repositories / Downloads for Yocto-4.2.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`mickledore </poky/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.4 </poky/log/?h=yocto-4.2.4>`
+- Git Revision: :yocto_git:`7235399a86b134e57d5eb783d7f1f57ca0439ae5 </poky/commit/?id=7235399a86b134e57d5eb783d7f1f57ca0439ae5>`
+- Release Artefact: poky-7235399a86b134e57d5eb783d7f1f57ca0439ae5
+- sha: 3d56bb4232ab29ae18249529856f0e638c50c764fc495d6beb1ecd295fa5e5e3
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.4/poky-7235399a86b134e57d5eb783d7f1f57ca0439ae5.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.4/poky-7235399a86b134e57d5eb783d7f1f57ca0439ae5.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`mickledore </openembedded-core/log/?h=mickledore>`
+- Tag: :oe_git:`yocto-4.2.4 </openembedded-core/log/?h=yocto-4.2.4>`
+- Git Revision: :oe_git:`23b5141400b2c676c806df3308f023f7c04e34e0 </openembedded-core/commit/?id=23b5141400b2c676c806df3308f023f7c04e34e0>`
+- Release Artefact: oecore-23b5141400b2c676c806df3308f023f7c04e34e0
+- sha: 152f4ee3cdd2e159f6bd34b01d517de44dfe670d35a5e3c84cc32ee7842d9741
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.4/oecore-23b5141400b2c676c806df3308f023f7c04e34e0.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.4/oecore-23b5141400b2c676c806df3308f023f7c04e34e0.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`mickledore </meta-mingw/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.4 </meta-mingw/log/?h=yocto-4.2.4>`
+- Git Revision: :yocto_git:`d87d4f00b9c6068fff03929a4b0f231a942d3873 </meta-mingw/commit/?id=d87d4f00b9c6068fff03929a4b0f231a942d3873>`
+- Release Artefact: meta-mingw-d87d4f00b9c6068fff03929a4b0f231a942d3873
+- sha: 8036847cf5bf3da9db4bad13aac9080d559848679f0ae03694d55a576bcaf75f
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.4/meta-mingw-d87d4f00b9c6068fff03929a4b0f231a942d3873.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.4/meta-mingw-d87d4f00b9c6068fff03929a4b0f231a942d3873.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.4 </bitbake/log/?h=2.4>`
+- Tag: :oe_git:`yocto-4.2.4 </bitbake/log/?h=yocto-4.2.4>`
+- Git Revision: :oe_git:`c7e094ec3beccef0bbbf67c100147c449d9c6836 </bitbake/commit/?id=c7e094ec3beccef0bbbf67c100147c449d9c6836>`
+- Release Artefact: bitbake-c7e094ec3beccef0bbbf67c100147c449d9c6836
+- sha: 6a35a62bee3446cd0f9e0ec1de9b8f60fc396109075b37d7c4a1f2e6d63271c6
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2.4/bitbake-c7e094ec3beccef0bbbf67c100147c449d9c6836.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2.4/bitbake-c7e094ec3beccef0bbbf67c100147c449d9c6836.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`mickledore </yocto-docs/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2.4 </yocto-docs/log/?h=yocto-4.2.4>`
+- Git Revision: :yocto_git:`91a29ca94314c87fd3dc68601cd4932bdfffde35 </yocto-docs/commit/?id=91a29ca94314c87fd3dc68601cd4932bdfffde35>`
+
diff --git a/documentation/migration-guides/release-notes-4.2.rst b/documentation/migration-guides/release-notes-4.2.rst
new file mode 100644
index 0000000000..30049b89f6
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.2.rst
@@ -0,0 +1,984 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 4.2 (mickledore)
+----------------------------------
+
+New Features / Enhancements in 4.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Linux kernel 6.1, glibc 2.37 and ~350 other recipe upgrades
+
+- Python 3.8+ and GCC 8.0+ are now the minimum required versions on the build host.
+ For host distributions that do not provide it, this is included as part of the
+ :term:`buildtools` tarball.
+
+- BitBake in this release now supports a new ``addpylib`` directive to enable
+ Python libraries within layers. For more information,
+ see :ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`.
+
+ This directive should be added to your layer configuration
+ as in the below example from ``meta/conf/layer.conf``::
+
+ addpylib ${LAYERDIR}/lib oe
+
+- BitBake has seen multiple internal changes that may improve
+ memory and disk usage as well as parsing time, in particular:
+
+ - BitBake's Cooker server is now multithreaded.
+
+ - Ctrl+C can now be used to interrupt some long-running operations
+ that previously ignored it.
+
+ - BitBake's cache has been extended to include more hash
+ debugging data, but has also been optimized to :yocto_git:`compress
+ cache data <https://git.yoctoproject.org/poky/commit/?h=mickledore&id=7d010055e2af3294e17db862f42664ca689a9356>`.
+
+ - BitBake's UI will now ping the server regularly to ensure
+ it is still alive.
+
+- New variables:
+
+ - :term:`VOLATILE_TMP_DIR` allows to specify
+ whether ``/tmp`` should be on persistent storage
+ or in RAM.
+
+ - :term:`SPDX_CUSTOM_ANNOTATION_VARS` allows to add
+ specific comments to the :term:`SPDX` description of a recipe.
+
+- Rust improvements:
+
+ - This release adds Cargo support on the target, and includes
+ automated QA tests for this functionality.
+
+ - It also supports checksums for Rust crates and makes
+ them mandatory for each crate in a recipe.
+
+ - New :ref:`ref-classes-cargo-update-recipe-crates` class to
+ enable updating :term:`SRC_URI` crate lists from ``Cargo.lock``
+
+ - Enabled building Rust for baremetal targets
+
+ - You can now also easily select to build beta or nightly
+ versions of Rust with a new :term:`RUST_CHANNEL` variable
+ (use at own risk)
+
+ - Support for local GitHub repos in :term:`SRC_URI` as
+ replacements for Cargo dependencies
+
+ - Use built-in Rust targets for ``-native`` builds to save several
+ minutes building the Rust toolchain
+
+- Architecture-specific enhancements:
+
+ - This release adds initial support for the
+ :wikipedia:`LoongArch <Loongson#LoongArch>`
+ (``loongarch64``) architecture, though there is no testing for it yet.
+
+ - New ``x86-64-v3`` tunes (AVX, AVX2, BMI1, BMI2, F16C, FMA, LZCNT, MOVBE, XSAVE)
+
+ - go: add support to build on ppc64le
+ - rust: rustfmt now working and installed for riscv32
+ - libpng: enable NEON for aarch64 to ensure consistency with arm32.
+ - baremetal-helloworld: Enable x86 and x86-64 ports
+
+- Kernel-related enhancements:
+
+ - Added some support for building 6.2/6.3-rc kernels
+ - linux-yocto-dev: mark as compatible with qemuarm64 and qemuarmv5
+ - Add kernel specific OBJCOPY to help switching toolchains cleanly for kernel build between gcc and clang
+
+- New core recipes:
+
+ - ``debugedit``
+ - ``gtk4`` (import from meta-gnome)
+ - ``gcr``: add recipe for gcr-4
+ - ``graphene`` (import from meta-oe)
+ - ``libc-test``
+ - ``libportal`` (import from meta-gnome)
+ - ``libslirp``
+ - ``libtest-fatal-perl``
+ - ``libtest-warnings-perl`` (import from meta-perl)
+ - ``libtry-tiny-perl``
+ - ``python3-build``
+ - ``python3-pyproject-hooks``
+ - ``python3-hatch-fancy-pypi-readme``
+ - ``python3-unittest-automake``
+
+- QEMU/runqemu enhancements:
+
+ - Set ``QB_SMP`` with ?= to make it easier to modify
+ - Set ``QB_CPU`` with ?= to make it easier to modify (x86 configuration only)
+ - New ``QB_NFSROOTFS_EXTRA_OPT`` to allow extra options to be appended to the NFS rootfs options in kernel boot args, e.g. ``"wsize=4096,rsize=4096"``
+ - New ``QB_SETUP_CMD`` and ``QB_CLEANUP_CMD`` to enable running custom shell setup and cleanup commands before and after QEMU.
+ - ``QB_DEFAULT_KERNEL`` now defaults to pick the bundled initramfs kernel image if the Linux kernel image is generated with :term:`INITRAMFS_IMAGE_BUNDLE` set to "1"
+ - Split out the QEMU guest agent to its own ``qemu-guest-agent`` package
+ - runqemu: new ``guestagent`` option to enable communication with the guest agent
+ - runqemu: respect :term:`IMAGE_LINK_NAME` when searching for image
+
+- Image-related enhancements:
+
+ - Add 7-Zip support in image conversion types (``7zip``)
+ - New :term:`IMAGE_MACHINE_SUFFIX` variable to allow easily removing machine name suffix from image file names
+
+- wic Image Creator enhancements:
+
+ - ``bootimg-efi.py``: add support for directly loading Linux kernel UEFI stub
+ - ``bootimg-efi.py``: implement ``--include-path``
+ - Allow usage of ``fstype=none`` to specify an unformatted partition
+ - Implement repeatable disk identifiers based on :term:`SOURCE_DATE_EPOCH`
+
+- FIT image related improvements:
+
+ - FIT image signing support has been reworked to remove interdependencies and make it more easily extensible
+ - Skip FDT section creation for applicable symlinks to avoid the same dtb being duplicated
+ - New :term:`FIT_CONF_DEFAULT_DTB` variable to enable selecting default dtb when multiple dtbs exist
+
+- SDK-related improvements:
+
+ - Extended the following recipes to nativesdk:
+
+ - ``bc``
+ - ``gi-docgen``
+ - ``gperf``
+ - ``python3-iniconfig``
+ - ``python3-atomicwrites``
+ - ``python3-markdown``
+ - ``python3-smartypants``
+ - ``python3-typogrify``
+ - ``ruby``
+ - ``unifdef``
+
+ - New :term:`SDK_ZIP_OPTIONS` variable to enable passing additional options to the zip command when preparing the SDK zip archive
+ - New Rust SDK target packagegroup (``packagegroup-rust-sdk-target``)
+
+- Testing:
+
+ - The ptest images have changed structure in this release. The
+ underlying ``core-image-ptest`` recipe now uses :term:`BBCLASSEXTEND` to
+ create a variant for each ptest enabled recipe in OE-Core.
+
+ For example, this means that ``core-image-ptest-bzip2``,
+ ``core-image-ptest-lttng-tools`` and many more image targets now exist
+ and can be built/tested individually.
+
+ The ``core-image-ptest-all`` and ``core-image-ptest-fast`` targets are now
+ wrappers that target groups of individual images and means that the tests
+ can be executed in parallel during our automated testing. This also means
+ the dependencies are more accurately tested.
+
+ - It is now possible to track regression changes between releases using
+ :oe_git:`yocto_testresults_query.py </openembedded-core/tree/scripts/yocto_testresults_query.py>`,
+ which is a thin wrapper over :oe_git:`resulttool
+ </openembedded-core/tree/scripts/resulttool>`. Here is an example
+ command, which allowed to spot and fix a regression in the
+ ``quilt`` ptest::
+
+ yocto_testresults_query.py regression-report 4.2_M1 4.2_M2
+
+ See this `blog post about regression detection
+ <https://bootlin.com/blog/continuous-integration-in-yocto-improving-the-regressions-detection/>`__.
+
+ - This release adds support for parallel ptest execution with a ptest per image.
+ This takes ptest execution time from 3.5 hours to around 45 minutes on the autobuilder.
+
+ - Basic Rust compile/run and cargo tests
+
+ - New ``python3-unittest-automake`` recipe which provides modules for pytest
+ and unittest to adjust their output to automake-style for easier integration
+ with the ptest system.
+
+ - ptest support added to ``bc``, ``cpio`` and ``gnutls``, and fixes made to
+ ptests in numerous other recipes.
+
+ - ``ptest-runner`` now adds a non-root "ptest" user to run tests.
+
+ - ``resulttool``: add a ``--list-ptest`` option to the log subcommand to list ptest names
+ in a results file
+
+ - ``resulttool``: regression: add metadata filtering for oeselftest
+
+- New :term:`PACKAGECONFIG` options in the following recipes:
+
+ - ``at-spi2-core``
+ - ``base-passwd``
+ - ``cronie``
+ - ``cups``
+ - ``curl``
+ - ``file``
+ - ``gstreamer1.0-plugins-good``
+ - ``gtk+3``
+ - ``iproute2``
+ - ``libsdl2``
+ - ``libtiff``
+ - ``llvm``
+ - ``mesa``
+ - ``psmisc``
+ - ``qemu``
+ - ``sudo``
+ - ``systemd``
+ - ``tiff``
+ - ``util-linux``
+
+- Extended the following recipes to native:
+
+ - ``iso-codes``
+ - ``libxkbcommon``
+ - ``p11-kit``
+ - ``python3-atomicwrites``
+ - ``python3-dbusmock``
+ - ``python3-iniconfig``
+ - ``xkeyboard-config``
+
+- Utility script changes:
+
+ - ``devtool``: ignore patch-fuzz errors when extracting source in order to enable fixing fuzz issues
+ - ``oe-setup-layers``: make efficiently idempotent
+ - ``oe-setup-layers``: print a note about submodules if present
+ - New ``buildstats-summary`` script to show a summary of the buildstats data
+ - :ref:`ref-classes-report-error` class: catch ``Nothing PROVIDES`` error
+ - ``combo-layer``: add ``sync-revs`` command
+ - ``convert-overrides``: allow command-line customizations
+
+- bitbake-layers improvements:
+
+ - ``layerindex-fetch``: checkout layer(s) branch when clone exists
+ - ``create``: add ``-a``/``--add-layer option`` to add layer to ``bblayers.conf`` after creating layer
+ - ``show-layers``: improve output layout
+
+- Other BitBake improvements:
+
+ - Inline Python snippets can now include dictionary expressions
+ - Evaluate the value of export/unexport/network flags so that they can be reset to "0"
+ - Make :term:`EXCLUDE_FROM_WORLD` boolean so that it can be reset to "0"
+ - Support int values in ``bb.utils.to_boolean()`` in addition to strings
+ - ``bitbake-getvar``: Add a ``quiet`` command line argument
+ - Allow the ``@`` character in variable flag names
+ - Python library code will now be included when calculating task hashes
+ - ``fetch2/npmsw``: add more short forms for git operations
+ - Display a warning when ``SRCREV = "${AUTOREV}"`` is set too late to be effective
+ - Display all missing :term:`SRC_URI` checksums at once
+ - Improve error message for a missing multiconfig
+ - Switch to a new :term:`BB_CACHEDIR` variable for codeparser cache location
+ - Mechanism introduced to reduce the codeparser cache unnecessarily growing in size
+
+- Packaging changes:
+
+ - ``rng-tools`` is no longer recommended by ``openssh``, and the ``rng-tools``
+ service files have been split out to their own package
+ - ``linux-firmware``: split ``rtl8761`` and ``amdgpu`` firmware
+ - ``linux-firmware``: add new firmware file to ``${PN}-qcom-adreno-a530``
+ - ``iproute2``: separate ``routel`` and add Python dependency
+ - ``xinetd``: move ``xconv.pl`` script to separate package
+ - ``perf``: enable debug/source packaging
+
+- Prominent documentation updates:
+
+ - Substantially expanded the ":doc:`/dev-manual/vulnerabilities`" section.
+ - Added a new ":doc:`/dev-manual/sbom`" section about SPDX SBoM generation.
+ - Expanded ":ref:`init-manager`" documentation.
+ - New section about :ref:`ref-long-term-support-releases`.
+ - System Requirements: details about :ref:`system-requirements-minimum-ram`.
+ - Details about :ref:`ref-building-meson-package` and the
+ :ref:`ref-classes-meson` class.
+ - Documentation about how to write recipes for Rust programs. See the
+ :ref:`ref-classes-cargo` class.
+ - Documentation about how to write recipes for Go programs. See the
+ :ref:`ref-classes-go` class.
+ - Variable index: added references to variables only documented in the
+ BitBake manual. All variables should be easy to access through the Yocto
+ Manual variable index.
+ - Expanded the description of the :term:`BB_NUMBER_THREADS` variable.
+
+- Miscellaneous changes:
+
+ - Supporting 64 bit dates on 32 bit platforms: several packages have been
+ updated to pass year 2038 tests, and a QA check for 32 bit time and file
+ offset functions has been added (default off)
+
+ - Patch fuzz/Upstream-Status checking has been reworked:
+
+ - Upstream-Status checking is now configurable from :term:`WARN_QA`/:term:`ERROR_QA` (``patch-status-core``)
+ - Can now be enabled for non-core layers (``patch-status-noncore``)
+ - ``patch-fuzz`` is now in :term:`ERROR_QA` by default, and actually stops the build
+
+ - Many packages were updated to add large file support.
+
+ - ``vulkan-loader``: allow headless targets to build the loader
+ - ``dhcpcd``: fix to work with systemd
+ - ``u-boot``: add /boot to :term:`SYSROOT_DIRS` to allow boot files to be used by other recipes
+ - ``linux-firmware``: don't put the firmware into the sysroot
+ - ``cups``: add :term:`PACKAGECONFIG` to control web interface and default to off
+ - ``buildtools-tarball``: export certificates to python and curl
+ - ``yocto-check-layer``: allow OE-Core to be tested
+ - ``yocto-check-layer``: check for patch file upstream status
+ - ``boost``: enable building ``Boost.URL`` library
+ - ``native``: drop special variable handling
+ - Poky: make it easier to set :term:`INIT_MANAGER` from local.conf
+ - :ref:`ref-classes-create-spdx`: add support for custom annotations (:term:`SPDX_CUSTOM_ANNOTATION_VARS`)
+ - :ref:`ref-classes-create-spdx`: report downloads as separate packages
+ - :ref:`ref-classes-create-spdx`: remove the top-level image SPDX file and the JSON index file from :term:`DEPLOYDIR` to avoid confusion
+ - ``os-release``: replace ``DISTRO_CODENAME`` with ``VERSION_CODENAME`` (still set from :term:`DISTRO_CODENAME`)
+ - ``weston``: add kiosk shell
+ - :ref:`ref-classes-overlayfs`: Allow unused mount points
+ - ``sstatesig``: emit more helpful error message when not finding sstate manifest
+ - :ref:`ref-classes-pypi`.bbclass: Set :term:`SRC_URI` downloadfilename with an optional prefix
+ - ``poky-bleeding`` distro: update and rework
+ - :ref:`package.bbclass <ref-classes-package>`: check if package names conflict via ``PKG:${PN}`` override in :ref:`do_package <ref-tasks-package>`
+ - ``cve-update-nvd2-native``: new NVD CVE database fetcher using the 2.0 API
+ - :ref:`ref-classes-mirrors` class: use shallow tarball for ``binutils-native``/``nativesdk-binutils``
+ - ``meta/conf``: move default configuration templates into ``meta/conf/templates/default``
+ - ``binutils``: enable ``--enable-new-dtags`` as per many Linux distributions
+ - ``base-files``: drop ``localhost.localdomain`` from hosts file as per many Linux distributions
+ - ``packagegroup-core-boot``: make ``init-ifupdown`` package a recommendation
+
+Known Issues in 4.2
+~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+Recipe License changes in 4.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
+
+- ``curl``: set :term:`LICENSE` appropriately to ``curl`` as it is a special derivative of the MIT/X license, not exactly that license.
+- ``libgit2``: added ``Zlib``, ``ISC``, ``LGPL-2.1-or-later`` and ``CC0-1.0`` to :term:`LICENSE` covering portions of the included code.
+- ``linux-firmware``: set package :term:`LICENSE` appropriately for all qcom packages
+
+Security Fixes in 4.2
+~~~~~~~~~~~~~~~~~~~~~
+
+- binutils: :cve:`2022-4285`, :cve_mitre:`2023-25586`
+- curl: :cve:`2022-32221`, :cve:`2022-35260`, :cve:`2022-42915`, :cve:`2022-42916`
+- epiphany: :cve:`2023-26081`
+- expat: :cve:`2022-43680`
+- ffmpeg: :cve:`2022-3964`, :cve:`2022-3965`
+- git: :cve:`2022-39260`, :cve:`2022-41903`, :cve:`2022-23521`, :cve:`2022-41953` (ignored)
+- glibc: :cve:`2023-25139` (ignored)
+- go: :cve:`2023-24532`, :cve:`2023-24537`
+- grub2: :cve:`2022-2601`, :cve:`2022-3775`, :cve_mitre:`2022-28736`
+- inetutils: :cve:`2019-0053`
+- less: :cve:`2022-46663`
+- libarchive: :cve:`2022-36227`
+- libinput: :cve:`2022-1215`
+- libpam: :cve:`2022-28321`
+- libpng: :cve:`2019-6129`
+- libx11: :cve:`2022-3554`
+- openssh: :cve:`2023-28531`
+- openssl: :cve:`2022-3358`, :cve:`2022-3786`, :cve:`2022-3602`, :cve:`2022-3996`, :cve:`2023-0286`, :cve:`2022-4304`, :cve:`2022-4203`, :cve:`2023-0215`, :cve:`2022-4450`, :cve:`2023-0216`, :cve:`2023-0217`, :cve:`2023-0401`, :cve:`2023-0464`
+- ppp: :cve:`2022-4603`
+- python3-cryptography{-vectors}: :cve:`2022-3602`, :cve:`2022-3786`, :cve:`2023-23931`
+- python3: :cve_mitre:`2022-37460`
+- qemu: :cve:`2022-3165`
+- rust: :cve:`2022-46176`
+- rxvt-unicode: :cve:`2022-4170`
+- screen: :cve:`2023-24626`
+- shadow: :cve:`2023-29383`, :cve:`2016-15024` (ignored)
+- sudo: :cve:`2022-43995`
+- systemd: :cve:`2022-4415` (ignored)
+- tar: :cve:`2022-48303`
+- tiff: :cve:`2022-3599`, :cve:`2022-3597`, :cve:`2022-3626`, :cve:`2022-3627`, :cve:`2022-3570`, :cve:`2022-3598`, :cve:`2022-3970`, :cve:`2022-48281`
+- vim: :cve:`2022-3352`, :cve:`2022-4141`, :cve:`2023-0049`, :cve:`2023-0051`, :cve:`2023-0054`, :cve:`2023-0288`, :cve:`2023-1127`, :cve:`2023-1170`, :cve:`2023-1175`, :cve:`2023-1127`, :cve:`2023-1170`, :cve:`2023-1175`, :cve:`2023-1264`, :cve:`2023-1355`, :cve:`2023-0433`, :cve:`2022-47024`, :cve:`2022-3705`
+- xdg-utils: :cve:`2022-4055`
+- xserver-xorg: :cve:`2022-3550`, :cve:`2022-3551`, :cve:`2023-1393`, :cve:`2023-0494`, :cve:`2022-3553` (ignored)
+
+Recipe Upgrades in 4.2
+~~~~~~~~~~~~~~~~~~~~~~
+
+- acpid: upgrade 2.0.33 -> 2.0.34
+- adwaita-icon-theme: update 42.0 -> 43
+- alsa-lib: upgrade 1.2.7.2 -> 1.2.8
+- alsa-ucm-conf: upgrade 1.2.7.2 -> 1.2.8
+- alsa-utils: upgrade 1.2.7 -> 1.2.8
+- apr: update 1.7.0 -> 1.7.2
+- apr-util: update 1.6.1 -> 1.6.3
+- argp-standalone: replace with a maintained fork
+- at-spi2-core: upgrade 2.44.1 -> 2.46.0
+- autoconf-archive: upgrade 2022.09.03 -> 2023.02.20
+- babeltrace: upgrade 1.5.8 -> 1.5.11
+- base-passwd: Update to 3.6.1
+- bash: update 5.1.16 -> 5.2.15
+- bind: upgrade 9.18.7 -> 9.18.12
+- binutils: Upgrade to 2.40 release
+- bluez: update 5.65 -> 5.66
+- boost-build-native: update 1.80.0 -> 1.81.0
+- boost: upgrade 1.80.0 -> 1.81.0
+- btrfs-tools: upgrade 5.19.1 -> 6.1.3
+- busybox: 1.35.0 -> 1.36.0
+- ccache: upgrade 4.6.3 -> 4.7.4
+- cmake: update 3.24.0 -> 3.25.2
+- cracklib: upgrade to v2.9.10
+- curl: upgrade 7.86.0 -> 8.0.1
+- dbus: upgrade 1.14.0 -> 1.14.6
+- diffoscope: upgrade 221 -> 236
+- diffstat: upgrade 1.64 -> 1.65
+- diffutils: update 3.8 -> 3.9
+- dos2unix: upgrade 7.4.3 -> 7.4.4
+- dpkg: update 1.21.9 -> 1.21.21
+- dropbear: upgrade 2022.82 -> 2022.83
+- dtc: upgrade 1.6.1 -> 1.7.0
+- e2fsprogs: upgrade 1.46.5 -> 1.47.0
+- ed: upgrade 1.18 -> 1.19
+- elfutils: update 0.187 -> 0.188
+- ell: upgrade 0.53 -> 0.56
+- enchant2: upgrade 2.3.3 -> 2.3.4
+- encodings: update 1.0.6 -> 1.0.7
+- epiphany: update 42.4 -> 43.1
+- ethtool: upgrade 5.19 -> 6.2
+- expat: upgrade to 2.5.0
+- ffmpeg: upgrade 5.1.1 -> 5.1.2
+- file: upgrade 5.43 -> 5.44
+- flac: update 1.4.0 -> 1.4.2
+- font-alias: update 1.0.4 -> 1.0.5
+- fontconfig: upgrade 2.14.0 -> 2.14.2
+- font-util: upgrade 1.3.3 -> 1.4.0
+- freetype: update 2.12.1 -> 2.13.0
+- gawk: update 5.1.1 -> 5.2.1
+- gcr3: update 3.40.0 -> 3.41.1
+- gcr: rename gcr -> gcr3
+- gdb: Upgrade to 13.1
+- gdk-pixbuf: upgrade 2.42.9 -> 2.42.10
+- gettext: update 0.21 -> 0.21.1
+- ghostscript: update 9.56.1 -> 10.0.0
+- gi-docgen: upgrade 2022.1 -> 2023.1
+- git: upgrade 2.37.3 -> 2.39.2
+- glib-2.0: update 2.72.3 -> 2.74.6
+- glibc: upgrade to 2.37 release + stable updates
+- glib-networking: update 2.72.2 -> 2.74.0
+- glslang: upgrade 1.3.236.0 -> 1.3.239.0
+- gnu-config: upgrade to latest revision
+- gnupg: upgrade 2.3.7 -> 2.4.0
+- gnutls: upgrade 3.7.7 -> 3.8.0
+- gobject-introspection: upgrade 1.72.0 -> 1.74.0
+- go: update 1.19 -> 1.20.1
+- grep: update 3.7 -> 3.10
+- gsettings-desktop-schemas: upgrade 42.0 -> 43.0
+- gstreamer1.0: upgrade 1.20.3 -> 1.22.0
+- gtk+3: upgrade 3.24.34 -> 3.24.36
+- gtk4: update 4.8.2 -> 4.10.0
+- harfbuzz: upgrade 5.1.0 -> 7.1.0
+- hdparm: update 9.64 -> 9.65
+- help2man: upgrade 1.49.2 -> 1.49.3
+- icu: update 71.1 -> 72-1
+- ifupdown: upgrade 0.8.37 -> 0.8.41
+- igt-gpu-tools: upgrade 1.26 -> 1.27.1
+- inetutils: upgrade 2.3 -> 2.4
+- init-system-helpers: upgrade 1.64 -> 1.65.2
+- iproute2: upgrade 5.19.0 -> 6.2.0
+- iptables: update 1.8.8 -> 1.8.9
+- iputils: update to 20221126
+- iso-codes: upgrade 4.11.0 -> 4.13.0
+- jquery: upgrade 3.6.0 -> 3.6.3
+- kexec-tools: upgrade 2.0.25 -> 2.0.26
+- kmscube: upgrade to latest revision
+- libarchive: upgrade 3.6.1 -> 3.6.2
+- libbsd: upgrade 0.11.6 -> 0.11.7
+- libcap: upgrade 2.65 -> 2.67
+- libdnf: update 0.69.0 -> 0.70.0
+- libdrm: upgrade 2.4.113 -> 2.4.115
+- libedit: upgrade 20210910-3.1 -> 20221030-3.1
+- libepoxy: update 1.5.9 -> 1.5.10
+- libffi: upgrade 3.4.2 -> 3.4.4
+- libfontenc: upgrade 1.1.6 -> 1.1.7
+- libgit2: upgrade 1.5.0 -> 1.6.3
+- libgpg-error: update 1.45 -> 1.46
+- libhandy: update 1.6.3 -> 1.8.1
+- libical: upgrade 3.0.14 -> 3.0.16
+- libice: update 1.0.10 -> 1.1.1
+- libidn2: upgrade 2.3.3 -> 2.3.4
+- libinput: upgrade 1.19.4 -> 1.22.1
+- libjpeg-turbo: upgrade 2.1.4 -> 2.1.5.1
+- libksba: upgrade 1.6.0 -> 1.6.3
+- libmicrohttpd: upgrade 0.9.75 -> 0.9.76
+- libmodule-build-perl: update 0.4231 -> 0.4232
+- libmpc: upgrade 1.2.1 -> 1.3.1
+- libnewt: update 0.52.21 -> 0.52.23
+- libnotify: upgrade 0.8.1 -> 0.8.2
+- libpcap: upgrade 1.10.1 -> 1.10.3
+- libpciaccess: update 0.16 -> 0.17
+- libpcre2: upgrade 10.40 -> 10.42
+- libpipeline: upgrade 1.5.6 -> 1.5.7
+- libpng: upgrade 1.6.38 -> 1.6.39
+- libpsl: upgrade 0.21.1 -> 0.21.2
+- librepo: upgrade 1.14.5 -> 1.15.1
+- libsdl2: upgrade 2.24.1 -> 2.26.3
+- libsm: 1.2.3 > 1.2.4
+- libsndfile1: upgrade 1.1.0 -> 1.2.0
+- libsolv: upgrade 0.7.22 -> 0.7.23
+- libsoup-2.4: upgrade 2.74.2 -> 2.74.3
+- libsoup: upgrade 3.0.7 -> 3.2.2
+- libtest-fatal-perl: upgrade 0.016 -> 0.017
+- libtest-needs-perl: upgrade 0.002009 -> 0.002010
+- libunistring: upgrade 1.0 -> 1.1
+- liburcu: upgrade 0.13.2 -> 0.14.0
+- liburi-perl: upgrade 5.08 -> 5.17
+- libva: upgrade 2.15.0 -> 2.16.0
+- libva-utils: upgrade 2.15.0 -> 2.17.1
+- libwebp: upgrade 1.2.4 -> 1.3.0
+- libwpe: upgrade 1.12.3 -> 1.14.1
+- libx11: 1.8.1 -> 1.8.4
+- libx11-compose-data: 1.6.8 -> 1.8.4
+- libxau: upgrade 1.0.10 -> 1.0.11
+- libxcomposite: update 0.4.5 -> 0.4.6
+- libxcrypt-compat: upgrade 4.4.30 -> 4.4.33
+- libxcrypt: upgrade 4.4.28 -> 4.4.30
+- libxdamage: update 1.1.5 -> 1.1.6
+- libxdmcp: update 1.1.3 -> 1.1.4
+- libxext: update 1.3.4 -> 1.3.5
+- libxft: update 2.3.4 -> 2.3.6
+- libxft: upgrade 2.3.6 -> 2.3.7
+- libxinerama: update 1.1.4 -> 1.1.5
+- libxkbcommon: upgrade 1.4.1 -> 1.5.0
+- libxkbfile: update 1.1.0 -> 1.1.1
+- libxkbfile: upgrade 1.1.1 -> 1.1.2
+- libxml2: upgrade 2.9.14 -> 2.10.3
+- libxmu: update 1.1.3 -> 1.1.4
+- libxpm: update 3.5.13 -> 3.5.15
+- libxrandr: update 1.5.2 -> 1.5.3
+- libxrender: update 0.9.10 -> 0.9.11
+- libxres: update 1.2.1 -> 1.2.2
+- libxscrnsaver: update 1.2.3 -> 1.2.4
+- libxshmfence: update 1.3 -> 1.3.2
+- libxslt: upgrade 1.1.35 -> 1.1.37
+- libxtst: update 1.2.3 -> 1.2.4
+- libxv: update 1.0.11 -> 1.0.12
+- libxxf86vm: update 1.1.4 -> 1.1.5
+- lighttpd: upgrade 1.4.66 -> 1.4.69
+- linux-firmware: upgrade 20220913 -> 20230210
+- linux-libc-headers: bump to 6.1
+- linux-yocto/5.15: update genericx86* machines to v5.15.103
+- linux-yocto/5.15: update to v5.15.108
+- linux-yocto/6.1: update to v6.1.25
+- linux-yocto-dev: bump to v6.3
+- linux-yocto-rt/5.15: update to -rt59
+- linux-yocto-rt/6.1: update to -rt7
+- llvm: update 14.0.6 -> 15.0.7
+- log4cplus: upgrade 2.0.8 -> 2.1.0
+- logrotate: upgrade 3.20.1 -> 3.21.0
+- lsof: upgrade 4.95.0 -> 4.98.0
+- ltp: upgrade 20220527 -> 20230127
+- lttng-modules: upgrade 2.13.4 -> 2.13.9
+- lttng-tools: update 2.13.8 -> 2.13.9
+- lttng-ust: upgrade 2.13.4 -> 2.13.5
+- makedepend: upgrade 1.0.6 -> 1.0.8
+- make: update 4.3 -> 4.4.1
+- man-db: update 2.10.2 -> 2.11.2
+- man-pages: upgrade 5.13 -> 6.03
+- matchbox-config-gtk: Update to latest SRCREV
+- matchbox-desktop-2: Update 2.2 -> 2.3
+- matchbox-panel-2: Update 2.11 -> 2.12
+- matchbox-terminal: Update to latest SRCREV
+- matchbox-wm: Update 1.2.2 -> 1.2.3
+- mc: update 4.8.28 -> 4.8.29
+- mesa: update 22.2.0 -> 23.0.0
+- meson: upgrade 0.63.2 -> 1.0.1
+- mmc-utils: upgrade to latest revision
+- mobile-broadband-provider-info: upgrade 20220725 -> 20221107
+- mpfr: upgrade 4.1.0 -> 4.2.0
+- mpg123: upgrade 1.30.2 -> 1.31.2
+- msmtp: upgrade 1.8.22 -> 1.8.23
+- mtd-utils: upgrade 2.1.4 -> 2.1.5
+- mtools: upgrade 4.0.40 -> 4.0.42
+- musl-obstack: Update to 1.2.3
+- musl: Upgrade to latest master
+- nasm: update 2.15.05 -> 2.16.01
+- ncurses: upgrade 6.3+20220423 -> 6.4
+- netbase: upgrade 6.3 -> 6.4
+- newlib: Upgrade 4.2.0 -> 4.3.0
+- nghttp2: upgrade 1.49.0 -> 1.52.0
+- numactl: upgrade 2.0.15 -> 2.0.16
+- opensbi: Upgrade to 1.2 release
+- openssh: upgrade 9.0p1 -> 9.3p1
+- openssl: Upgrade 3.0.5 -> 3.1.0
+- opkg: upgrade to version 0.6.1
+- orc: upgrade 0.4.32 -> 0.4.33
+- ovmf: upgrade edk2-stable202205 -> edk2-stable202211
+- pango: upgrade 1.50.9 -> 1.50.13
+- patchelf: upgrade 0.15.0 -> 0.17.2
+- pciutils: upgrade 3.8.0 -> 3.9.0
+- piglit: upgrade to latest revision
+- pinentry: update 1.2.0 -> 1.2.1
+- pixman: upgrade 0.40.0 -> 0.42.2
+- pkgconf: upgrade 1.9.3 -> 1.9.4
+- popt: update 1.18 -> 1.19
+- powertop: upgrade 2.14 -> 2.15
+- procps: update 3.3.17 -> 4.0.3
+- psmisc: upgrade 23.5 -> 23.6
+- puzzles: upgrade to latest revision
+- python3-alabaster: upgrade 0.7.12 -> 0.7.13
+- python3-attrs: upgrade 22.1.0 -> 22.2.0
+- python3-babel: upgrade 2.10.3 -> 2.12.1
+- python3-bcrypt: upgrade 3.2.2 -> 4.0.1
+- python3-certifi: upgrade 2022.9.14 -> 2022.12.7
+- python3-chardet: upgrade 5.0.0 -> 5.1.0
+- python3-cryptography: upgrade 38.0.3 -> 39.0.4
+- python3-cryptography-vectors: upgrade 37.0.4 -> 39.0.2
+- python3-cython: upgrade 0.29.32 -> 0.29.33
+- python3-dbusmock: update 0.28.4 -> 0.28.7
+- python3-dbus: upgrade 1.2.18 -> 1.3.2
+- python3-dtschema: upgrade 2022.8.3 -> 2023.1
+- python3-flit-core: upgrade 3.7.1 -> 3.8.0
+- python3-gitdb: upgrade 4.0.9 -> 4.0.10
+- python3-git: upgrade 3.1.27 -> 3.1.31
+- python3-hatch-fancy-pypi-readme: upgrade 22.7.0 -> 22.8.0
+- python3-hatchling: upgrade 1.9.0 -> 1.13.0
+- python3-hatch-vcs: upgrade 0.2.0 -> 0.3.0
+- python3-hypothesis: upgrade 6.54.5 -> 6.68.2
+- python3-importlib-metadata: upgrade 4.12.0 -> 6.0.0
+- python3-iniconfig: upgrade 1.1.1 -> 2.0.0
+- python3-installer: update 0.5.1 -> 0.6.0
+- python3-iso8601: upgrade 1.0.2 -> 1.1.0
+- python3-jsonschema: upgrade 4.9.1 -> 4.17.3
+- python3-lxml: upgrade 4.9.1 -> 4.9.2
+- python3-mako: upgrade 1.2.2 -> 1.2.4
+- python3-markupsafe: upgrade 2.1.1 -> 2.1.2
+- python3-more-itertools: upgrade 8.14.0 -> 9.1.0
+- python3-numpy: upgrade 1.23.3 -> 1.24.2
+- python3-packaging: upgrade to 23.0
+- python3-pathspec: upgrade 0.10.1 -> 0.11.0
+- python3-pbr: upgrade 5.10.0 -> 5.11.1
+- python3-pip: upgrade 22.2.2 -> 23.0.1
+- python3-poetry-core: upgrade 1.0.8 -> 1.5.2
+- python3-psutil: upgrade 5.9.2 -> 5.9.4
+- python3-pycairo: upgrade 1.21.0 -> 1.23.0
+- python3-pycryptodome: upgrade 3.15.0 -> 3.17
+- python3-pycryptodomex: upgrade 3.15.0 -> 3.17
+- python3-pygments: upgrade 2.13.0 -> 2.14.0
+- python3-pyopenssl: upgrade 22.0.0 -> 23.0.0
+- python3-pyrsistent: upgrade 0.18.1 -> 0.19.3
+- python3-pytest-subtests: upgrade 0.8.0 -> 0.10.0
+- python3-pytest: upgrade 7.1.3 -> 7.2.2
+- python3-pytz: upgrade 2022.2.1 -> 2022.7.1
+- python3-requests: upgrade 2.28.1 -> 2.28.2
+- python3-scons: upgrade 4.4.0 -> 4.5.2
+- python3-setuptools-rust: upgrade 1.5.1 -> 1.5.2
+- python3-setuptools-scm: upgrade 7.0.5 -> 7.1.0
+- python3-setuptools: upgrade 65.0.2 -> 67.6.0
+- python3-sphinxcontrib-applehelp: update 1.0.2 -> 1.0.4
+- python3-sphinxcontrib-htmlhelp: 2.0.0 -> 2.0.1
+- python3-sphinx-rtd-theme: upgrade 1.0.0 -> 1.2.0
+- python3-sphinx: upgrade 5.1.1 -> 6.1.3
+- python3-subunit: upgrade 1.4.0 -> 1.4.2
+- python3-testtools: upgrade 2.5.0 -> 2.6.0
+- python3-typing-extensions: upgrade 4.3.0 -> 4.5.0
+- python3: update 3.10.6 -> 3.11.2
+- python3-urllib3: upgrade 1.26.12 -> 1.26.15
+- python3-wcwidth: upgrade 0.2.5 -> 0.2.6
+- python3-wheel: upgrade 0.37.1 -> 0.40.0
+- python3-zipp: upgrade 3.8.1 -> 3.15.0
+- qemu: update 7.1.0 -> 7.2.0
+- quota: update 4.06 -> 4.09
+- readline: update 8.1.2 -> 8.2
+- repo: upgrade 2.29.2 -> 2.32
+- rgb: update 1.0.6 -> 1.1.0
+- rng-tools: upgrade 6.15 -> 6.16
+- rsync: update 3.2.5 -> 3.2.7
+- rt-tests: update 2.4 -> 2.5
+- ruby: update 3.1.2 -> 3.2.1
+- rust: update 1.63.0 -> 1.68.1
+- rxvt-unicode: upgrade 9.30 -> 9.31
+- sed: update 4.8 -> 4.9
+- shaderc: upgrade 2022.2 -> 2023.2
+- shadow: update 4.12.1 -> 4.13
+- socat: upgrade 1.7.4.3 -> 1.7.4.4
+- spirv-headers: upgrade 1.3.236.0 -> 1.3.239.0
+- spirv-tools: upgrade 1.3.236.0 -> 1.3.239.0
+- sqlite3: upgrade 3.39.3 -> 3.41.0
+- strace: upgrade 5.19 -> 6.2
+- stress-ng: update 0.14.03 -> 0.15.06
+- sudo: upgrade 1.9.11p3 -> 1.9.13p3
+- swig: update 4.0.2 -> 4.1.1
+- sysstat: upgrade 12.6.0 -> 12.6.2
+- systemd: update 251.4 -> 253.1
+- systemtap: upgrade 4.7 -> 4.8
+- taglib: upgrade 1.12 -> 1.13
+- tcf-agent: Update to current version
+- tcl: update 8.6.11 -> 8.6.13
+- texinfo: update 6.8 -> 7.0.2
+- tiff: update 4.4.0 -> 4.5.0
+- tzdata: update 2022d -> 2023c
+- u-boot: upgrade 2022.07 -> 2023.01
+- unfs: update 0.9.22 -> 0.10.0
+- usbutils: upgrade 014 -> 015
+- util-macros: upgrade 1.19.3 -> 1.20.0
+- vala: upgrade 0.56.3 -> 0.56.4
+- valgrind: update to 3.20.0
+- vim: Upgrade 9.0.0598 -> 9.0.1429
+- virglrenderer: upgrade 0.10.3 -> 0.10.4
+- vte: update 0.68.0 -> 0.72.0
+- vulkan-headers: upgrade 1.3.236.0 -> 1.3.239.0
+- vulkan-loader: upgrade 1.3.236.0 -> 1.3.239.0
+- vulkan-samples: update to latest revision
+- vulkan-tools: upgrade 1.3.236.0 -> 1.3.239.0
+- vulkan: update 1.3.216.0 -> 1.3.236.0
+- wayland-protocols: upgrade 1.26 -> 1.31
+- wayland-utils: update 1.0.0 -> 1.1.0
+- webkitgtk: update 2.36.7 -> 2.38.5
+- weston: update 10.0.2 -> 11.0.1
+- wireless-regdb: upgrade 2022.08.12 -> 2023.02.13
+- wpebackend-fdo: upgrade 1.12.1 -> 1.14.0
+- xcb-util: update 0.4.0 -> 0.4.1
+- xcb-util-keysyms: 0.4.0 -> 0.4.1
+- xcb-util-renderutil: 0.3.9 -> 0.3.10
+- xcb-util-wm: 0.4.1 -> 0.4.2
+- xcb-util-image: 0.4.0 -> 0.4.1
+- xf86-input-mouse: update 1.9.3 -> 1.9.4
+- xf86-input-vmmouse: update 13.1.0 -> 13.2.0
+- xf86-video-vesa: update 2.5.0 -> 2.6.0
+- xf86-video-vmware: update 13.3.0 -> 13.4.0
+- xhost: update 1.0.8 -> 1.0.9
+- xinit: update 1.4.1 -> 1.4.2
+- xkbcomp: update 1.4.5 -> 1.4.6
+- xkeyboard-config: upgrade 2.36 -> 2.38
+- xprop: update 1.2.5 -> 1.2.6
+- xrandr: upgrade 1.5.1 -> 1.5.2
+- xserver-xorg: upgrade 21.1.4 -> 21.1.7
+- xset: update 1.2.4 -> 1.2.5
+- xvinfo: update 1.1.4 -> 1.1.5
+- xwayland: upgrade 22.1.3 -> 22.1.8
+- xz: upgrade 5.2.6 -> 5.4.2
+- zlib: upgrade 1.2.12 -> 1.2.13
+- zstd: upgrade 1.5.2 -> 1.5.4
+
+Contributors to 4.2
+~~~~~~~~~~~~~~~~~~~
+
+Thanks to the following people who contributed to this release:
+
+- Adrian Freihofer
+- Ahmad Fatoum
+- Alejandro Hernandez Samaniego
+- Alexander Kanavin
+- Alexandre Belloni
+- Alexey Smirnov
+- Alexis Lothoré
+- Alex Kiernan
+- Alex Stewart
+- Andrej Valek
+- Andrew Geissler
+- Anton Antonov
+- Antonin Godard
+- Archana Polampalli
+- Armin Kuster
+- Arnout Vandecappelle
+- Arturo Buzarra
+- Atanas Bunchev
+- Benjamin Szőke
+- Benoît Mauduit
+- Bernhard Rosenkränzer
+- Bruce Ashfield
+- Caner Altinbasak
+- Carlos Alberto Lopez Perez
+- Changhyeok Bae
+- Changqing Li
+- Charlie Johnston
+- Chase Qi
+- Chee Yang Lee
+- Chen Qi
+- Chris Elledge
+- Christian Eggers
+- Christoph Lauer
+- Chuck Wolber
+- Ciaran Courtney
+- Claus Stovgaard
+- Clément Péron
+- Daniel Ammann
+- David Bagonyi
+- Denys Dmytriyenko
+- Denys Zagorui
+- Diego Sueiro
+- Dmitry Baryshkov
+- Ed Tanous
+- Enguerrand de Ribaucourt
+- Enrico Jörns
+- Enrico Scholz
+- Etienne Cordonnier
+- Fabio Estevam
+- Fabre Sébastien
+- Fawzi KHABER
+- Federico Pellegrin
+- Frank de Brabander
+- Frederic Martinsons
+- Geoffrey GIRY
+- George Kelly
+- Harald Seiler
+- He Zhe
+- Hitendra Prajapati
+- Jagadeesh Krishnanjanappa
+- James Raphael Tiovalen
+- Jan Kircher
+- Jan Luebbe
+- Jan-Simon Moeller
+- Javier Tia
+- Jeremy Puhlman
+- Jermain Horsman
+- Jialing Zhang
+- Joel Stanley
+- Joe Slater
+- Johan Korsnes
+- Jon Mason
+- Jordan Crouse
+- Jose Quaresma
+- Joshua Watt
+- Justin Bronder
+- Kai Kang
+- Kasper Revsbech
+- Keiya Nobuta
+- Kenfe-Mickael Laventure
+- Kevin Hao
+- Khem Raj
+- Konrad Weihmann
+- Lei Maohui
+- Leon Anavi
+- Liam Beguin
+- Louis Rannou
+- Luca Boccassi
+- Luca Ceresoli
+- Luis Martins
+- Maanya Goenka
+- Marek Vasut
+- Mark Asselstine
+- Mark Hatle
+- Markus Volk
+- Marta Rybczynska
+- Martin Jansa
+- Martin Larsson
+- Mateusz Marciniec
+- Mathieu Dubois-Briand
+- Mauro Queiros
+- Maxim Uvarov
+- Michael Halstead
+- Michael Opdenacker
+- Mike Crowe
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Narpat Mali
+- Nathan Rossi
+- Niko Mauno
+- Ola x Nilsson
+- Oliver Lang
+- Ovidiu Panait
+- Pablo Saavedra
+- Patrick Williams
+- Paul Eggleton
+- Paulo Neves
+- Pavel Zhukov
+- Pawel Zalewski
+- Pedro Baptista
+- Peter Bergin
+- Peter Kjellerstedt
+- Peter Marko
+- Petr Kubizňák
+- Petr Vorel
+- pgowda
+- Piotr Łobacz
+- Quentin Schulz
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Ravineet Singh
+- Ravula Adhitya Siddartha
+- Richard Elberger
+- Richard Leitner
+- Richard Purdie
+- Robert Andersson
+- Robert Joslyn
+- Robert Yang
+- Romuald JEANNE
+- Ross Burton
+- Ryan Eatmon
+- Sakib Sajal
+- Sandeep Gundlupet Raju
+- Saul Wold
+- Sean Anderson
+- Sergei Zhmylev
+- Siddharth Doshi
+- Soumya
+- Sudip Mukherjee
+- Sundeep KOKKONDA
+- Teoh Jay Shen
+- Thomas De Schampheleire
+- Thomas Perrot
+- Thomas Roos
+- Tim Orling
+- Tobias Hagelborn
+- Tom Hochstein
+- Trevor Woerner
+- Ulrich Ölmann
+- Vincent Davis Jr
+- Vivek Kumbhar
+- Vyacheslav Yurkov
+- Wang Mingyu
+- Wentao Zhang
+- Xiangyu Chen
+- Xiaotian Wu
+- Yan Xinkuan
+- Yash Shinde
+- Yi Zhao
+- Yoann Congal
+- Yureka Lilian
+- Zang Ruochen
+- Zheng Qiu
+- Zheng Ruoqin
+- Zoltan Boszormenyi
+- 张忠山
+
+
+Repositories / Downloads for Yocto-4.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`mickledore </poky/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2 </poky/log/?h=yocto-4.2>`
+- Git Revision: :yocto_git:`21790e71d55f417f27cd51fae9dd47549758d4a0 </poky/commit/?id=21790e71d55f417f27cd51fae9dd47549758d4a0>`
+- Release Artefact: poky-21790e71d55f417f27cd51fae9dd47549758d4a0
+- sha: 38606076765d912deec84e523403709ef1249122197e61454ae08818e60f83c2
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2/poky-21790e71d55f417f27cd51fae9dd47549758d4a0.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2/poky-21790e71d55f417f27cd51fae9dd47549758d4a0.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`mickledore </openembedded-core/log/?h=mickledore>`
+- Tag: :oe_git:`yocto-4.2 </openembedded-core/log/?h=yocto-4.2>`
+- Git Revision: :oe_git:`c57d1a561db563ed2f521bbac5fc12d4ac8e11a7 </openembedded-core/commit/?id=c57d1a561db563ed2f521bbac5fc12d4ac8e11a7>`
+- Release Artefact: oecore-c57d1a561db563ed2f521bbac5fc12d4ac8e11a7
+- sha: e8cdd870492017be7e7b74b8c2fb73ae6771b2d2125b2aa1f0e65d0689f96af8
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2/oecore-c57d1a561db563ed2f521bbac5fc12d4ac8e11a7.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2/oecore-c57d1a561db563ed2f521bbac5fc12d4ac8e11a7.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`mickledore </meta-mingw/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2 </meta-mingw/log/?h=yocto-4.2>`
+- Git Revision: :yocto_git:`250617ffa524c082b848487359b9d045703d59c2 </meta-mingw/commit/?id=250617ffa524c082b848487359b9d045703d59c2>`
+- Release Artefact: meta-mingw-250617ffa524c082b848487359b9d045703d59c2
+- sha: 873a97dfd5ed6fb26e1f6a2ddc2c0c9d7a7b3c7f5018588e912294618775c323
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2/meta-mingw-250617ffa524c082b848487359b9d045703d59c2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2/meta-mingw-250617ffa524c082b848487359b9d045703d59c2.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.4 </bitbake/log/?h=2.4>`
+- Tag: :oe_git:`yocto-4.2 </bitbake/log/?h=yocto-4.2>`
+- Git Revision: :oe_git:`d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c </bitbake/commit/?id=d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c>`
+- Release Artefact: bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c
+- sha: 5edcb97cb545011226b778355bb840ebcc790552d4a885a0d83178153697ba7a
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.2/bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.2/bitbake-d97d62e2cbe4bae17f0886f3b4759e8f9ba6d38c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`mickledore </yocto-docs/log/?h=mickledore>`
+- Tag: :yocto_git:`yocto-4.2 </yocto-docs/log/?h=yocto-4.2>`
+- Git Revision: :yocto_git:`4d6807e34adf5d92d9b6e5852736443a867c78fa </yocto-docs/commit/?id=4d6807e34adf5d92d9b6e5852736443a867c78fa>`
+
diff --git a/documentation/migration-guides/release-notes-4.3.1.rst b/documentation/migration-guides/release-notes-4.3.1.rst
new file mode 100644
index 0000000000..cea9c538a2
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.3.1.rst
@@ -0,0 +1,237 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.3.1 (Nanbield)
+----------------------------------------
+
+Security Fixes in Yocto-4.3.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- libsndfile1: Fix :cve:`2022-33065`
+- libxml2: Ignore :cve:`2023-45322`
+- linux-yocto/6.1: Ignore :cve:`2020-27418`, :cve:`2023-31085`, :cve_mitre:`2023-34324`, :cve:`2023-39189`, :cve:`2023-39192`, :cve:`2023-39193`, :cve:`2023-39194`, :cve:`2023-4244`, :cve:`2023-42754`, :cve:`2023-42756`, :cve:`2023-44466`, :cve:`2023-4563`, :cve:`2023-45862`, :cve:`2023-45863`, :cve:`2023-45871`, :cve:`2023-45898`, :cve:`2023-4732`, :cve:`2023-5158`, :cve:`2023-5197` and :cve:`2023-5345`
+- linux-yocto/6.5: Ignore :cve:`2020-27418`, :cve:`2023-1193`, :cve:`2023-39191`, :cve:`2023-39194`, :cve:`2023-40791`, :cve:`2023-44466`, :cve:`2023-45862`, :cve:`2023-45863`, :cve:`2023-4610` and :cve:`2023-4732`
+- openssl: Fix :cve:`2023-5363`
+- pixman: Ignore :cve:`2023-37769`
+- vim: Fix :cve:`2023-46246`
+- zlib: Ignore :cve:`2023-45853`
+
+
+Fixes in Yocto-4.3.1
+~~~~~~~~~~~~~~~~~~~~
+
+- baremetal-helloworld: Pull in fix for race condition on x86-64
+- base: Ensure recipes using mercurial-native have certificates
+- bb-matrix-plot.sh: Show underscores correctly in labels
+- bin_package.bbclass: revert "Inhibit the default dependencies"
+- bitbake: SECURITY.md: add file
+- brief-yoctoprojectqs: use new CDN mirror for sstate
+- bsp-guide: bsp.rst: update beaglebone example
+- bsp-guide: bsp: skip Intel machines no longer supported in Poky
+- build-appliance-image: Update to nanbield head revision
+- contributor-guide: add patchtest section
+- contributor-guide: clarify patchtest usage
+- cve-check: don't warn if a patch is remote
+- cve-check: slightly more verbose warning when adding the same package twice
+- cve-check: sort the package list in the JSON report
+- dev-manual: add security team processes
+- dev-manual: extend the description of CVE patch preparation
+- dev-manual: layers: Add notes about layer.conf
+- dev-manual: new-recipe.rst: add missing parenthesis to "Patching Code" section
+- dev-manual: start.rst: remove obsolete reference
+- dev-manual: wic: update "wic list images" output
+- docs: add support for nanbield (4.3) release
+- documentation.conf: drop SERIAL_CONSOLES_CHECK
+- ell: Upgrade to 0.59
+- glib-2.0: Remove unnecessary assignement
+- goarch: Move Go architecture mapping to a library
+- kernel-arch: drop CCACHE from :term:`KERNEL_STRIP` definition
+- kernel.bbclass: Use strip utility used for kernel build in do_package
+- layer.conf: Switch layer to nanbield series only
+- libsdl2: upgrade to 2.28.4
+- linux-yocto: make sure the pahole-native available before do_kernel_configme
+- llvm: Upgrade to 17.0.3
+- machine: drop obsolete SERIAL_CONSOLES_CHECK
+- manuals: correct "yocto-linux" by "linux-yocto"
+- manuals: improve description of :term:`CVE_STATUS` and :term:`CVE_STATUS_GROUPS`
+- manuals: Remove references to apm in :term:`MACHINE_FEATURES`
+- manuals: update linux-yocto append examples
+- manuals: update list of supported machines
+- migration-4.3: additional migration items
+- migration-4.3: adjustments to existing text
+- migration-4.3: remove some unnecessary items
+- migration-guides: QEMU_USE_SLIRP variable removed
+- migration-guides: add BitBake changes
+- migration-guides: add debian 12 to newly supported distros
+- migration-guides: add kernel notes
+- migration-guides: add testing notes
+- migration-guides: add utility notes
+- migration-guides: edgerouter machine removed
+- migration-guides: enabling :term:`SPDX` only for Poky, not a global default
+- migration-guides: fix empty sections
+- migration-guides: further updates for 4.3
+- migration-guides: further updates for release 4.3
+- migration-guides: git recipes reword
+- migration-guides: mention CDN
+- migration-guides: mention LLVM 17
+- migration-guides: mention runqemu change in serial port management
+- migration-guides: packaging changes
+- migration-guides: remove SERIAL_CONSOLES_CHECK
+- migration-guides: remove non-notable change
+- migration-guides: updates for 4.3
+- oeqa/selftest/debuginfod: improve selftest
+- oeqa/selftest/devtool: abort if a local workspace already exist
+- oeqa/ssh: Handle SSHCall timeout error code
+- openssl: Upgrade to 3.1.4
+- overview-manual: concepts: Add Bitbake Tasks Map
+- patchtest-send-results: add In-Reply-To
+- patchtest-send-results: check max line length, simplify responses
+- patchtest-send-results: fix sender parsing
+- patchtest-send-results: improve subject line
+- patchtest-send-results: send results to submitter
+- patchtest/selftest: add XSKIP, update test files
+- patchtest: disable merge test
+- patchtest: fix lic_files_chksum test regex
+- patchtest: make pylint tests compatible with 3.x
+- patchtest: reduce checksum test output length
+- patchtest: remove test for CVE tag in mbox
+- patchtest: remove unused imports
+- patchtest: rework license checksum tests
+- patchtest: shorten test result outputs
+- patchtest: simplify test directory structure
+- patchtest: skip merge test if not targeting master
+- patchtest: test regardless of mergeability
+- perl: fix intermittent test failure
+- poky.conf: bump version for 4.3.1 release
+- profile-manual: aesthetic cleanups
+- ref-manual: Add documentation for the unimplemented-ptest QA warning
+- ref-manual: Fix :term:`PACKAGECONFIG` term and add an example
+- ref-manual: Warn about :term:`COMPATIBLE_MACHINE` skipping native recipes
+- ref-manual: add systemd-resolved to distro features
+- ref-manual: classes: explain cml1 class name
+- ref-manual: document :term:`KERNEL_LOCALVERSION`
+- ref-manual: document :term:`KERNEL_STRIP`
+- ref-manual: document :term:`MESON_TARGET`
+- ref-manual: document cargo_c class
+- ref-manual: remove semicolons from ``*PROCESS_COMMAND`` variables
+- ref-manual: update :term:`SDK_NAME` variable documentation
+- ref-manual: variables: add :term:`RECIPE_MAINTAINER`
+- ref-manual: variables: add :term:`RECIPE_SYSROOT` and :term:`RECIPE_SYSROOT_NATIVE`
+- ref-manual: variables: add :term:`TOOLCHAIN_OPTIONS` variable
+- ref-manual: variables: add example for :term:`SYSROOT_DIRS` variable
+- ref-manual: variables: document :term:`OEQA_REPRODUCIBLE_TEST_PACKAGE`
+- ref-manual: variables: mention new CDN for :term:`SSTATE_MIRRORS`
+- ref-manual: variables: provide no-match example for :term:`COMPATIBLE_MACHINE`
+- ref-manual: variables: remove SERIAL_CONSOLES_CHECK
+- release-notes-4.3: add CVEs, recipe upgrades, license changes, contributors
+- release-notes-4.3: add Repositories / Downloads section
+- release-notes-4.3: feature additions
+- release-notes-4.3: fix some typos
+- release-notes-4.3: move new classes to Rust section
+- release-notes-4.3: remove the Distribution section
+- release-notes-4.3: tweaks to existing text
+- sdk-manual: appendix-obtain: improve and update descriptions
+- test-manual: reproducible-builds: stop mentioning LTO bug
+- vim: Improve locale handling
+- vim: Upgrade to 9.0.2068
+- vim: use upstream generated .po files
+
+
+Known Issues in Yocto-4.3.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.3.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alejandro Hernandez Samaniego
+- Alex Stewart
+- Archana Polampalli
+- Arne Schwerdt
+- BELHADJ SALEM Talel
+- Dmitry Baryshkov
+- Eero Aaltonen
+- Joshua Watt
+- Julien Stephan
+- Jérémy Rosen
+- Khem Raj
+- Lee Chee Yang
+- Marta Rybczynska
+- Max Krummenacher
+- Michael Halstead
+- Michael Opdenacker
+- Paul Eggleton
+- Peter Kjellerstedt
+- Peter Marko
+- Quentin Schulz
+- Richard Purdie
+- Robert P. J. Day
+- Ross Burton
+- Rouven Czerwinski
+- Steve Sakoman
+- Trevor Gamblin
+- Wang Mingyu
+- William Lyu
+- Xiangyu Chen
+- luca fancellu
+
+
+Repositories / Downloads for Yocto-4.3.1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`nanbield </poky/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.1 </poky/log/?h=yocto-4.3.1>`
+- Git Revision: :yocto_git:`bf9f2f6f60387b3a7cd570919cef6c4570edcb82 </poky/commit/?id=bf9f2f6f60387b3a7cd570919cef6c4570edcb82>`
+- Release Artefact: poky-bf9f2f6f60387b3a7cd570919cef6c4570edcb82
+- sha: 9b4351159d728fec2b63a50f1ac15edc412e2d726e9180a40afc06051fadb922
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.1/poky-bf9f2f6f60387b3a7cd570919cef6c4570edcb82.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.1/poky-bf9f2f6f60387b3a7cd570919cef6c4570edcb82.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`nanbield </openembedded-core/log/?h=nanbield>`
+- Tag: :oe_git:`yocto-4.3.1 </openembedded-core/log/?h=yocto-4.3.1>`
+- Git Revision: :oe_git:`cce77e8e79c860f4ef0ac4a86b9375bf87507360 </openembedded-core/commit/?id=cce77e8e79c860f4ef0ac4a86b9375bf87507360>`
+- Release Artefact: oecore-cce77e8e79c860f4ef0ac4a86b9375bf87507360
+- sha: e6cde08e7c549f57a67d833a36cdb942648fba81558dc8b0e65332d2a2c023cc
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.1/oecore-cce77e8e79c860f4ef0ac4a86b9375bf87507360.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.1/oecore-cce77e8e79c860f4ef0ac4a86b9375bf87507360.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`nanbield </meta-mingw/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.1 </meta-mingw/log/?h=yocto-4.3.1>`
+- Git Revision: :yocto_git:`49617a253e09baabbf0355bc736122e9549c8ab2 </meta-mingw/commit/?id=49617a253e09baabbf0355bc736122e9549c8ab2>`
+- Release Artefact: meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2
+- sha: 2225115b73589cdbf1e491115221035c6a61679a92a93b2a3cf761ff87bf4ecc
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.1/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.1/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.6 </bitbake/log/?h=2.6>`
+- Tag: :oe_git:`yocto-4.3.1 </bitbake/log/?h=yocto-4.3.1>`
+- Git Revision: :oe_git:`936fcec41efacc4ce988c81882a9ae6403702bea </bitbake/commit/?id=936fcec41efacc4ce988c81882a9ae6403702bea>`
+- Release Artefact: bitbake-936fcec41efacc4ce988c81882a9ae6403702bea
+- sha: efbdd5fe7f29227a3fd26d6a08a368bf8215083a588b4d23f3adf35044897520
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.1/bitbake-936fcec41efacc4ce988c81882a9ae6403702bea.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.1/bitbake-936fcec41efacc4ce988c81882a9ae6403702bea.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`nanbield </yocto-docs/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.1 </yocto-docs/log/?h=yocto-4.3.1>`
+- Git Revision: :yocto_git:`6b98a6164263298648e89b5a5ae1260a58f1bb35 </yocto-docs/commit/?id=6b98a6164263298648e89b5a5ae1260a58f1bb35>`
+
diff --git a/documentation/migration-guides/release-notes-4.3.2.rst b/documentation/migration-guides/release-notes-4.3.2.rst
new file mode 100644
index 0000000000..3a40d83bc2
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.3.2.rst
@@ -0,0 +1,247 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.3.2 (Nanbield)
+----------------------------------------
+
+Security Fixes in Yocto-4.3.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- avahi: Fix :cve:`2023-1981`, :cve:`2023-38469`, :cve:`2023-38470`, :cve:`2023-38471`, :cve:`2023-38472` and :cve:`2023-38473`
+- curl: Fix :cve:`2023-46218`
+- ghostscript: Fix :cve:`2023-46751`
+- grub: fix :cve:`2023-4692` and :cve:`2023-4693`
+- gstreamer1.0: Fix :cve_mitre:`2023-44446`
+- linux-yocto/6.1: Ignore :cve_mitre:`2023-39197`, :cve:`2023-39198`, :cve:`2023-5090`, :cve:`2023-5633`, :cve:`2023-6111`, :cve:`2023-6121` and :cve:`2023-6176`
+- linux-yocto/6.5: Ignore :cve:`2022-44034`, :cve_mitre:`2023-39197`, :cve:`2023-39198`, :cve:`2023-5972`, :cve:`2023-6039`, :cve:`2023-6111` and :cve:`2023-6176`
+- perl: fix :cve:`2023-47100`
+- python3-urllib3: Fix :cve:`2023-45803`
+- rust: Fix :cve:`2023-40030`
+- vim: Fix :cve:`2023-48231`, :cve:`2023-48232`, :cve:`2023-48233`, :cve:`2023-48234`, :cve:`2023-48235`, :cve:`2023-48236` and :cve:`2023-48237`
+- xserver-xorg: Fix :cve:`2023-5367` and :cve:`2023-5380`
+- xwayland: Fix :cve:`2023-5367`
+
+
+Fixes in Yocto-4.3.2
+~~~~~~~~~~~~~~~~~~~~
+
+- base-passwd: Upgrade to 3.6.2
+- bind: Upgrade to 9.18.20
+- binutils: stable 2.41 branch updates
+- bitbake: command: Make parseRecipeFile() handle virtual recipes correctly
+- bitbake: lib/bb: Add workaround for libgcc issues with python 3.8 and 3.9
+- bitbake: toastergui: verify that an existing layer path is given
+- bluez5: fix connection for ps5/dualshock controllers
+- build-appliance-image: Update to nanbield head revision
+- cmake: Upgrade to 3.27.7
+- contributor-guide: add License-Update tag
+- contributor-guide: fix command option
+- cups: Add root,sys,wheel to system groups
+- cve-update-nvd2-native: faster requests with API keys
+- cve-update-nvd2-native: increase the delay between subsequent request failures
+- cve-update-nvd2-native: make number of fetch attemtps configurable
+- cve-update-nvd2-native: remove unused variable CVE_SOCKET_TIMEOUT
+- dev-manual: Discourage the use of SRC_URI[md5sum]
+- dev-manual: layers: update link to YP Compatible form
+- dev-manual: runtime-testing: fix test module name
+- devtool: finish/update-recipe: restrict mode srcrev to recipes fetched from SCM
+- devtool: fix update-recipe dry-run mode
+- ell: Upgrade to 0.60
+- enchant2: Upgrade to 2.6.2
+- ghostscript: Upgrade to 10.02.1
+- glib-2.0: Upgrade to 2.78.1
+- glibc: stable 2.38 branch updates
+- gstreamer1.0: Upgrade to 1.22.7
+- gtk: Add rdepend on printbackend for cups
+- harfbuzz: Upgrade to 8.2.2
+- json-c: fix icecc compilation
+- kern-tools: bump :term:`SRCREV` for queue processing changes
+- kern-tools: make lower context patches reproducible
+- kern-tools: update :term:`SRCREV` to include SECURITY.md file
+- kernel-arch: use ccache only for compiler
+- kernel-yocto: improve metadata patching
+- lib/oe/buildcfg.py: Include missing import
+- lib/oe/buildcfg.py: Remove unused parameter
+- lib/oe/patch: ensure os.chdir restoring always happens
+- lib/oe/path: Deploy files can start only with a dot
+- libgcrypt: Upgrade to 1.10.3
+- libjpeg-turbo: Upgrade to 3.0.1
+- libnewt: Upgrade to 0.52.24
+- libnsl2: Upgrade to 2.0.1
+- libsolv: Upgrade to 0.7.26
+- libxslt: Upgrade to 1.1.39
+- linux-firmware: add audio topology symlink to the X13's audio package
+- linux-firmware: add missing depenencies on license packages
+- linux-firmware: add new fw file to ${PN}-rtl8821
+- linux-firmware: add notice file to sdm845 modem firmware
+- linux-firmware: create separate packages
+- linux-firmware: package Qualcomm Venus 6.0 firmware
+- linux-firmware: package Robotics RB5 sensors DSP firmware
+- linux-firmware: package firmware for Qualcomm Adreno a702
+- linux-firmware: package firmware for Qualcomm QCM2290 / QRB4210
+- linux-firmware: Upgrade to 20231030
+- linux-yocto-rt/6.1: update to -rt18
+- linux-yocto/6.1: cfg: restore CONFIG_DEVMEM
+- linux-yocto/6.1: drop removed IMA option
+- linux-yocto/6.1: Upgrade to v6.1.68
+- linux-yocto/6.5: cfg: restore CONFIG_DEVMEM
+- linux-yocto/6.5: cfg: split runtime and symbol debug
+- linux-yocto/6.5: drop removed IMA option
+- linux-yocto/6.5: fix AB-INT: QEMU kernel panic: No irq handler for vector
+- linux-yocto/6.5: Upgrade to v6.5.13
+- linux/cve-exclusion6.1: Update to latest kernel point release
+- log4cplus: Upgrade to 2.1.1
+- lsb-release: use https for :term:`UPSTREAM_CHECK_URI`
+- manuals: brief-yoctoprojectqs: align variable order with default local.conf
+- manuals: fix URL
+- meson: use correct targets for rust binaries
+- migration-guide: add release notes for 4.0.14, 4.0.15, 4.2.4, 4.3.1
+- migration-guides: release 3.5 is actually 4.0
+- migration-guides: reword fix in release-notes-4.3.1
+- msmtp: Upgrade to 1.8.25
+- oeqa/selftest/tinfoil: Add tests that parse virtual recipes
+- openssl: improve handshake test error reporting
+- package_ipk: Fix Source: field variable dependency
+- patchtest: shorten patch signed-off-by test output
+- perf: lift :term:`TARGET_CC_ARCH` modification out of security_flags.inc
+- perl: Upgrade to 5.38.2
+- perlcross: Upgrade to 1.5.2
+- poky.conf: bump version for 4.3.2 release
+- python3-ptest: skip test_storlines
+- python3-urllib3: Upgrade to 2.0.7
+- qemu: Upgrade to 8.1.2
+- ref-manual: Fix reference to MIRRORS/PREMIRRORS defaults
+- ref-manual: releases.svg: update nanbield release status
+- useradd_base: sed -i destroys symlinks
+- rootfs-postcommands: sed -i destroys symlinks
+- sstate: Ensure sstate searches update file mtime
+- strace: backport fix for so_peerpidfd-test
+- systemd-boot: Fix build issues on armv7a-linux
+- systemd-compat-units.bb: fix postinstall script
+- systemd: fix DynamicUser issue
+- systemd: update :term:`LICENSE` statement
+- tcl: skip async and event tests in run-ptest
+- tcl: skip timing-dependent tests in run-ptest
+- test-manual: add links to python unittest
+- test-manual: add or improve hyperlinks
+- test-manual: explicit or fix file paths
+- test-manual: resource updates
+- test-manual: text and formatting fixes
+- test-manual: use working example
+- testimage: Drop target_dumper and most of monitor_dumper
+- testimage: Exclude wtmp from target-dumper commands
+- tzdata: Upgrade to 2023d
+- update_gtk_icon_cache: Fix for GTK4-only builds
+- useradd_base: Fix sed command line for passwd-expire
+- vim: Upgrade to 9.0.2130
+- xserver-xorg: Upgrade to 21.1.9
+- xwayland: Upgrade to 23.2.2
+
+
+Known Issues in Yocto-4.3.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+Contributors to Yocto-4.3.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adam Johnston
+- Alexander Kanavin
+- Anuj Mittal
+- Bastian Krause
+- Bruce Ashfield
+- Chen Qi
+- Deepthi Hemraj
+- Dhairya Nagodra
+- Dmitry Baryshkov
+- Fahad Arslan
+- Javier Tia
+- Jermain Horsman
+- Joakim Tjernlund
+- Julien Stephan
+- Justin Bronder
+- Khem Raj
+- Lee Chee Yang
+- Marco Felsch
+- Markus Volk
+- Marta Rybczynska
+- Massimiliano Minella
+- Michael Opdenacker
+- Paul Barker
+- Peter Kjellerstedt
+- Peter Marko
+- Randy MacLeod
+- Rasmus Villemoes
+- Richard Purdie
+- Ross Burton
+- Shubham Kulkarni
+- Simone Weiß
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Tim Orling
+- Trevor Gamblin
+- Vijay Anusuri
+- Viswanath Kraleti
+- Vyacheslav Yurkov
+- Wang Mingyu
+- William Lyu
+- Zoltán Böszörményi
+
+Repositories / Downloads for Yocto-4.3.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`nanbield </poky/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.2 </poky/log/?h=yocto-4.3.2>`
+- Git Revision: :yocto_git:`f768ffb8916feb6542fcbe3e946cbf30e247b151 </poky/commit/?id=f768ffb8916feb6542fcbe3e946cbf30e247b151>`
+- Release Artefact: poky-f768ffb8916feb6542fcbe3e946cbf30e247b151
+- sha: 21ca1695d70aba9b4bd8626d160111feab76206883cd14fe41eb024692bdfd7b
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.2/poky-f768ffb8916feb6542fcbe3e946cbf30e247b151.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.2/poky-f768ffb8916feb6542fcbe3e946cbf30e247b151.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`nanbield </openembedded-core/log/?h=nanbield>`
+- Tag: :oe_git:`yocto-4.3.2 </openembedded-core/log/?h=yocto-4.3.2>`
+- Git Revision: :oe_git:`ff595b937d37d2315386aebf315cea719e2362ea </openembedded-core/commit/?id=ff595b937d37d2315386aebf315cea719e2362ea>`
+- Release Artefact: oecore-ff595b937d37d2315386aebf315cea719e2362ea
+- sha: a7c6332dc0e09ecc08221e78b11151e8e2a3fd9fa3eaad96a4c03b67012bfb97
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.2/oecore-ff595b937d37d2315386aebf315cea719e2362ea.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.2/oecore-ff595b937d37d2315386aebf315cea719e2362ea.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`nanbield </meta-mingw/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.2 </meta-mingw/log/?h=yocto-4.3.2>`
+- Git Revision: :yocto_git:`49617a253e09baabbf0355bc736122e9549c8ab2 </meta-mingw/commit/?id=49617a253e09baabbf0355bc736122e9549c8ab2>`
+- Release Artefact: meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2
+- sha: 2225115b73589cdbf1e491115221035c6a61679a92a93b2a3cf761ff87bf4ecc
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.2/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.2/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.6 </bitbake/log/?h=2.6>`
+- Tag: :oe_git:`yocto-4.3.2 </bitbake/log/?h=yocto-4.3.2>`
+- Git Revision: :oe_git:`72bf75f0b2e7f36930185e18a1de8277ce7045d8 </bitbake/commit/?id=72bf75f0b2e7f36930185e18a1de8277ce7045d8>`
+- Release Artefact: bitbake-72bf75f0b2e7f36930185e18a1de8277ce7045d8
+- sha: 0b6ccd4796ccd211605090348a3d4378358c839ae1bb4c35964d0f36f2663187
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.2/bitbake-72bf75f0b2e7f36930185e18a1de8277ce7045d8.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.2/bitbake-72bf75f0b2e7f36930185e18a1de8277ce7045d8.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`nanbield </yocto-docs/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.2 </yocto-docs/log/?h=yocto-4.3.2>`
+- Git Revision: :yocto_git:`fac88b9e80646a68b31975c915a718a9b6b2b439 </yocto-docs/commit/?id=fac88b9e80646a68b31975c915a718a9b6b2b439>`
+
diff --git a/documentation/migration-guides/release-notes-4.3.3.rst b/documentation/migration-guides/release-notes-4.3.3.rst
new file mode 100644
index 0000000000..2a0658a9c9
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.3.3.rst
@@ -0,0 +1,200 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.3.3 (Nanbield)
+----------------------------------------
+
+Security Fixes in Yocto-4.3.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- curl: Fix :cve:`2023-46219`
+- glibc: Ignore fixed :cve:`2023-0687` and :cve:`2023-5156`
+- linux-yocto/6.1: Ignore :cve:`2022-48619`, :cve:`2023-4610`, :cve:`2023-5178`, :cve:`2023-5972`, :cve:`2023-6040`, :cve:`2023-6531`, :cve:`2023-6546`, :cve:`2023-6622`, :cve:`2023-6679`, :cve:`2023-6817`, :cve:`2023-6931`, :cve:`2023-6932`, :cve:`2023-7192`, :cve:`2024-0193` and :cve:`2024-0443`
+- linux-yocto/6.1: Fix :cve:`2023-1193`, :cve_mitre:`2023-51779`, :cve:`2023-51780`, :cve:`2023-51781`, :cve:`2023-51782` and :cve:`2023-6606`
+- qemu: Fix :cve:`2023-3019`
+- shadow: Fix :cve:`2023-4641`
+- sqlite3: Fix :cve:`2024-0232`
+- sqlite3: drop obsolete CVE ignore :cve:`2023-36191`
+- sudo: Fix :cve:`2023-42456` and :cve:`2023-42465`
+- tiff: Fix :cve:`2023-6277`
+- xwayland: Fix :cve:`2023-6377` and :cve:`2023-6478`
+
+
+Fixes in Yocto-4.3.3
+~~~~~~~~~~~~~~~~~~~~
+
+- aspell: upgrade to 0.60.8.1
+- avahi: update URL for new project location
+- base-passwd: upgrade to 3.6.3
+- bitbake: asyncrpc: Add context manager API
+- bitbake: toaster/toastergui: Bug-fix verify given layer path only if import/add local layer
+- build-appliance-image: Update to nanbield head revision
+- classes-global/sstate: Fix variable typo
+- cmake: Unset CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES
+- contributor-guide: fix lore URL
+- contributor-guide: use "apt" instead of "aptitude"
+- create-spdx-2.2: combine spdx can try to write before dir creation
+- curl: Disable test 1091 due to intermittent failures
+- curl: Disable two intermittently failing tests
+- dev-manual: gen-tapdevs need iptables installed
+- dev-manual: start.rst: Update use of Download page
+- dev-manual: update license manifest path
+- devtool: deploy: provide max_process to strip_execs
+- devtool: modify: Handle recipes with a menuconfig task correctly
+- docs: document VSCode extension
+- dtc: preserve version also from shallow git clones
+- elfutils: Update license information
+- glib-2.0: upgrade to 2.78.3
+- glibc-y2038-tests: do not run tests using 32 bit time APIs
+- go: upgrade to 1.20.12
+- grub: fs/fat: Don't error when mtime is 0
+- gstreamer1.0: upgrade to 1.22.8
+- icon-naming-utils: take tarball from debian
+- kea: upgrade to 2.4.1
+- lib/prservice: Improve lock handling robustness
+- libadwaita: upgrade to 1.4.2
+- libatomic-ops: upgrade to 7.8.2
+- libva-utils: upgrade to 2.20.1
+- linux-firmware: Change bnx2 packaging
+- linux-firmware: Create bnx2x subpackage
+- linux-firmware: Fix the linux-firmware-bcm4373 :term:`FILES` variable
+- linux-firmware: Package iwlwifi .pnvm files
+- linux-yocto/6.1: security/cfg: add configs to harden protection
+- linux-yocto/6.1: update to v6.1.73
+- meta/documentation.conf: fix do_menuconfig description
+- migration-guide: add release notes for 4.0.16
+- migration-guide: add release notes for 4.3.2
+- ncurses: Fix - tty is hung after reset
+- nfs-utils: Update Upstream-Status
+- nfs-utils: upgrade to 2.6.4
+- oeqa/selftest/prservice: Improve test robustness
+- package.py: OEHasPackage: Add :term:`MLPREFIX` to packagename
+- poky.conf: bump version for 4.3.3 release
+- pseudo: Update to pull in syncfs probe fix
+- python3-license-expression: Fix the ptest failure
+- qemu.bbclass: fix a python TypeError
+- qemu: upgrade to 8.1.4
+- ref-manual: Add UBOOT_BINARY, extend :term:`UBOOT_CONFIG`
+- ref-manual: classes: remove insserv bbclass
+- ref-manual: update tested and supported distros
+- release-notes-4.3: fix spacing
+- rootfs.py: check depmodwrapper execution result
+- rpcbind: Specify state directory under /run
+- scripts/runqemu: fix regex escape sequences
+- sqlite3: upgrade to 3.43.2
+- sstate: Fix dir ownership issues in :term:`SSTATE_DIR`
+- sudo: upgrade to 1.9.15p5
+- tcl: Fix prepending to run-ptest script
+- uninative-tarball.xz - reproducibility fix
+- xwayland: upgrade to 23.2.3
+- zstd: fix :term:`LICENSE` statement
+
+
+Known Issues in Yocto-4.3.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.3.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alassane Yattara
+- Alexander Kanavin
+- Anuj Mittal
+- Baruch Siach
+- Bruce Ashfield
+- Chen Qi
+- Clay Chang
+- Enguerrand de Ribaucourt
+- Ilya A. Kriveshko
+- Jason Andryuk
+- Jeremy A. Puhlman
+- Joao Marcos Costa
+- Jose Quaresma
+- Joshua Watt
+- Jörg Sommer
+- Khem Raj
+- Lee Chee Yang
+- Markus Volk
+- Massimiliano Minella
+- Maxin B. John
+- Michael Opdenacker
+- Ming Liu
+- Mingli Yu
+- Peter Kjellerstedt
+- Peter Marko
+- Richard Purdie
+- Robert Berger
+- Robert Yang
+- Rodrigo M. Duarte
+- Ross Burton
+- Saul Wold
+- Simone Weiß
+- Soumya Sambu
+- Steve Sakoman
+- Trevor Gamblin
+- Wang Mingyu
+- William Lyu
+- Xiangyu Chen
+- Yang Xu
+- Zahir Hussain
+
+
+Repositories / Downloads for Yocto-4.3.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`nanbield </poky/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.3 </poky/log/?h=yocto-4.3.3>`
+- Git Revision: :yocto_git:`d3b27346c3a4a7ef7ec517e9d339d22bda74349d </poky/commit/?id=d3b27346c3a4a7ef7ec517e9d339d22bda74349d>`
+- Release Artefact: poky-d3b27346c3a4a7ef7ec517e9d339d22bda74349d
+- sha: 2db39f1bf7bbcee039e9970eed1f6f9233bcc95d675159647c9a2a334fc81eb0
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.3/poky-d3b27346c3a4a7ef7ec517e9d339d22bda74349d.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.3/poky-d3b27346c3a4a7ef7ec517e9d339d22bda74349d.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`nanbield </openembedded-core/log/?h=nanbield>`
+- Tag: :oe_git:`yocto-4.3.3 </openembedded-core/log/?h=yocto-4.3.3>`
+- Git Revision: :oe_git:`0584d01f623e1f9b0fef4dfa95dd66de6cbfb7b3 </openembedded-core/commit/?id=0584d01f623e1f9b0fef4dfa95dd66de6cbfb7b3>`
+- Release Artefact: oecore-0584d01f623e1f9b0fef4dfa95dd66de6cbfb7b3
+- sha: 730de0d5744f139322402ff9a6b2483c6ab929f704cec06258ae51de1daebe3d
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.3/oecore-0584d01f623e1f9b0fef4dfa95dd66de6cbfb7b3.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.3/oecore-0584d01f623e1f9b0fef4dfa95dd66de6cbfb7b3.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`nanbield </meta-mingw/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.3 </meta-mingw/log/?h=yocto-4.3.3>`
+- Git Revision: :yocto_git:`49617a253e09baabbf0355bc736122e9549c8ab2 </meta-mingw/commit/?id=49617a253e09baabbf0355bc736122e9549c8ab2>`
+- Release Artefact: meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2
+- sha: 2225115b73589cdbf1e491115221035c6a61679a92a93b2a3cf761ff87bf4ecc
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.3/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.3/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.6 </bitbake/log/?h=2.6>`
+- Tag: :oe_git:`yocto-4.3.3 </bitbake/log/?h=yocto-4.3.3>`
+- Git Revision: :oe_git:`380a9ac97de5774378ded5e37d40b79b96761a0c </bitbake/commit/?id=380a9ac97de5774378ded5e37d40b79b96761a0c>`
+- Release Artefact: bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c
+- sha: 78f579b9d29e72d09b6fb10ac62aa925104335e92d2afb3155bc9ab1994e36c1
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.3/bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.3/bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`nanbield </yocto-docs/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.3 </yocto-docs/log/?h=yocto-4.3.3>`
+- Git Revision: :yocto_git:`dde4b815db82196af086847f68ee27d7902b4ffa </yocto-docs/commit/?id=dde4b815db82196af086847f68ee27d7902b4ffa>`
+
diff --git a/documentation/migration-guides/release-notes-4.3.4.rst b/documentation/migration-guides/release-notes-4.3.4.rst
new file mode 100644
index 0000000000..4c9e67f2cb
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.3.4.rst
@@ -0,0 +1,206 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for Yocto-4.3.4 (Nanbield)
+----------------------------------------
+
+Security Fixes in Yocto-4.3.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- bind: Fix :cve:`2023-4408`, :cve:`2023-5517`, :cve:`2023-5679` and :cve:`2023-50387`
+- gcc: Update :term:`CVE_STATUS` for :cve:`2023-4039` as fixed
+- glibc: Fix :cve:`2023-6246`, :cve:`2023-6779` and :cve:`2023-6780`
+- gnutls: Fix :cve:`2024-0553` and :cve:`2024-0567`
+- gstreamer: Fix :cve_mitre:`2024-0444`
+- libssh2: fix :cve:`2023-48795`
+- libxml2: Fix :cve:`2024-25062`
+- linux-yocto/6.1: Fix :cve:`2023-6610`, :cve:`2023-6915`, :cve:`2023-46838`, :cve:`2023-50431`, :cve:`2024-1085`, :cve:`2024-1086` and :cve:`2024-23849`
+- linux-yocto/6.1: Ignore :cve:`2021-33630`, :cve:`2021-33631`, :cve:`2022-36402`, :cve:`2023-5717`, :cve:`2023-6200`, :cve:`2023-35827`, :cve:`2023-40791`, :cve:`2023-46343`, :cve:`2023-46813`, :cve:`2023-46862`, :cve:`2023-51042`, :cve:`2023-51043`, :cve_mitre:`2023-52340`, :cve:`2024-0562`, :cve:`2024-0565`, :cve:`2024-0582`, :cve:`2024-0584`, :cve:`2024-0607`, :cve:`2024-0639`, :cve:`2024-0641`, :cve:`2024-0646`, :cve:`2024-0775` and :cve:`2024-22705`
+- openssl: fix :cve:`2024-0727`
+- python3-jinja2: Fix :cve:`2024-22195`
+- tiff: Fix :cve:`2023-6228`, :cve:`2023-52355` and :cve:`2023-52356`
+- vim: Fix :cve:`2024-22667`
+- wpa-supplicant: Fix :cve:`2023-52160`
+- xserver-xorg: Fix :cve:`2023-6377`, :cve:`2023-6478`, :cve:`2023-6816`, :cve:`2024-0229`, :cve:`2024-0408`, :cve:`2024-0409`, :cve:`2024-21885` and :cve:`2024-21886`
+- xwayland: Fix :cve:`2023-6816`, :cve:`2024-0408` and :cve:`2024-0409`
+- zlib: Ignore :cve:`2023-6992`
+
+
+Fixes in Yocto-4.3.4
+~~~~~~~~~~~~~~~~~~~~
+
+- allarch: Fix allarch corner case
+- at-spi2-core: Upgrade to 2.50.1
+- bind: Upgrade to 9.18.24
+- build-appliance-image: Update to nanbield head revision
+- contributor-guide: add notes for tests
+- contributor-guide: be more specific about meta-* trees
+- core-image-ptest: Increase disk size to 1.5G for strace ptest image
+- cpio: Upgrade to 2.15
+- curl: improve run-ptest
+- curl: increase test timeouts
+- cve-check: Log if :term:`CVE_STATUS` set but not reported for component
+- cve-update-nvd2-native: Add an age threshold for incremental update
+- cve-update-nvd2-native: Fix CVE configuration update
+- cve-update-nvd2-native: Fix typo in comment
+- cve-update-nvd2-native: Remove duplicated CVE_CHECK_DB_FILE definition
+- cve-update-nvd2-native: Remove rejected CVE from database
+- cve-update-nvd2-native: nvd_request_next: Improve comment
+- cve_check: cleanup logging
+- cve_check: handle :term:`CVE_STATUS` being set to the empty string
+- dev-manual: Rephrase spdx creation
+- dev-manual: improve descriptions of 'bitbake -S printdiff'
+- dev-manual: packages: clarify shared :term:`PR` service constraint
+- dev-manual: packages: fix capitalization
+- dev-manual: packages: need enough free space
+- docs: add initial stylechecks with Vale
+- docs: correct sdk installation default path
+- docs: document VIRTUAL-RUNTIME variables
+- docs: suppress excess use of "following" word
+- docs: use "manual page(s)"
+- docs: Makefile: remove releases.rst in "make clean"
+- externalsrc: fix task dependency for do_populate_lic
+- glibc: Remove duplicate :term:`CVE_STATUS` for :cve:`2023-4527`
+- glibc: stable 2.38 branch updates (2.38+gitd37c2b20a4)
+- gnutls: Upgrade to 3.8.3
+- gstreamer1.0: skip a test that is known to be flaky
+- gstreamer: Upgrade to 1.22.9
+- gtk: Set :term:`CVE_PRODUCT`
+- kernel.bbclass: Set pkg-config variables for building modules
+- libxml2: Upgrade to 2.11.7
+- linux-firmware: Upgrade to 20240220
+- linux-yocto/6.1: update to v6.1.78
+- mdadm: Disable ptests
+- migration-guides: add release notes for 4.3.3
+- migration-guides: add release notes for 4.0.17
+- migration-guides: fix release notes for 4.3.3 linux-yocto/6.1 CVE entries
+- multilib_global.bbclass: fix parsing error with no kernel module split
+- openssl: fix crash on aarch64 if BTI is enabled but no Crypto instructions
+- openssl: Upgrade to 3.1.5
+- overlayfs: add missing closing parenthesis in selftest
+- poky.conf: bump version for 4.3.4 release
+- profile-manual: usage.rst: fix reference to bug report
+- profile-manual: usage.rst: formatting fixes
+- profile-manual: usage.rst: further style improvements
+- pseudo: Update to pull in gcc14 fix and missing statvfs64 intercept
+- python3-jinja2: Upgrade to 3.1.3
+- ref-manual: release-process: grammar fix
+- ref-manual: system-requirements: update packages to build docs
+- ref-manual: tasks: do_cleanall: recommend using '-f' instead
+- ref-manual: tasks: do_cleansstate: recommend using '-f' instead for a shared sstate
+- ref-manual: variables: adding multiple groups in :term:`GROUPADD_PARAM`
+- ref-manual: variables: add documentation of the variable :term:`SPDX_NAMESPACE_PREFIX`
+- reproducible: Fix race with externalsrc/devtool over lockfile
+- sdk-manual: extensible: correctly describe separate build-sysroots tasks in direct sdk workflows
+- tzdata : Upgrade to 2024a
+- udev-extraconf: fix unmount directories containing octal-escaped chars
+- vim: Upgrade to v9.0.2190
+- wireless-regdb: Upgrade to 2024.01.23
+- xserver-xorg: Upgrade to 21.1.11
+- xwayland: Upgrade to 23.2.4
+- yocto-uninative: Update to 4.4 for glibc 2.39
+
+
+Known Issues in Yocto-4.3.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Contributors to Yocto-4.3.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Alex Kiernan
+- Alexander Kanavin
+- Alexander Sverdlin
+- Baruch Siach
+- BELOUARGA Mohamed
+- Benjamin Bara
+- Bruce Ashfield
+- Chen Qi
+- Claus Stovgaard
+- Dhairya Nagodra
+- Geoff Parker
+- Johan Bezem
+- Jonathan GUILLOT
+- Julien Stephan
+- Kai Kang
+- Khem Raj
+- Lee Chee Yang
+- Luca Ceresoli
+- Martin Jansa
+- Michael Halstead
+- Michael Opdenacker
+- Munehisa Kamata
+- Pavel Zhukov
+- Peter Marko
+- Priyal Doshi
+- Richard Purdie
+- Robert Joslyn
+- Ross Burton
+- Simone Weiß
+- Soumya Sambu
+- Steve Sakoman
+- Tim Orling
+- Wang Mingyu
+- Yoann Congal
+- Yogita Urade
+
+
+Repositories / Downloads for Yocto-4.3.4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`nanbield </poky/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.4 </poky/log/?h=yocto-4.3.4>`
+- Git Revision: :yocto_git:`7b8aa378d069ee31373f22caba3bd7fc7863f447 </poky/commit/?id=7b8aa378d069ee31373f22caba3bd7fc7863f447>`
+- Release Artefact: poky-7b8aa378d069ee31373f22caba3bd7fc7863f447
+- sha: 0cb14125f215cc9691cff43982e2c540a5b6018df4ed25c10933135b5bf21d0f
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.4/poky-7b8aa378d069ee31373f22caba3bd7fc7863f447.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.4/poky-7b8aa378d069ee31373f22caba3bd7fc7863f447.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`nanbield </openembedded-core/log/?h=nanbield>`
+- Tag: :oe_git:`yocto-4.3.4 </openembedded-core/log/?h=yocto-4.3.4>`
+- Git Revision: :oe_git:`d0e68072d138ccc1fb5957fdc46a91871eb6a3e1 </openembedded-core/commit/?id=d0e68072d138ccc1fb5957fdc46a91871eb6a3e1>`
+- Release Artefact: oecore-d0e68072d138ccc1fb5957fdc46a91871eb6a3e1
+- sha: d311fe22ff296c466f9bea1cd26343baee5630bc37f3dda42f2d9d8cc99e3add
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.4/oecore-d0e68072d138ccc1fb5957fdc46a91871eb6a3e1.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.4/oecore-d0e68072d138ccc1fb5957fdc46a91871eb6a3e1.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`nanbield </meta-mingw/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.4 </meta-mingw/log/?h=yocto-4.3.4>`
+- Git Revision: :yocto_git:`49617a253e09baabbf0355bc736122e9549c8ab2 </meta-mingw/commit/?id=49617a253e09baabbf0355bc736122e9549c8ab2>`
+- Release Artefact: meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2
+- sha: 2225115b73589cdbf1e491115221035c6a61679a92a93b2a3cf761ff87bf4ecc
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.4/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.4/meta-mingw-49617a253e09baabbf0355bc736122e9549c8ab2.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.6 </bitbake/log/?h=2.6>`
+- Tag: :oe_git:`yocto-4.3.4 </bitbake/log/?h=yocto-4.3.4>`
+- Git Revision: :oe_git:`380a9ac97de5774378ded5e37d40b79b96761a0c </bitbake/commit/?id=380a9ac97de5774378ded5e37d40b79b96761a0c>`
+- Release Artefact: bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c
+- sha: 78f579b9d29e72d09b6fb10ac62aa925104335e92d2afb3155bc9ab1994e36c1
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3.4/bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3.4/bitbake-380a9ac97de5774378ded5e37d40b79b96761a0c.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`nanbield </yocto-docs/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3.4 </yocto-docs/log/?h=yocto-4.3.4>`
+- Git Revision: :yocto_git:`05d08b0bbaef760157c8d35a78d7405bc5ffce55 </yocto-docs/commit/?id=05d08b0bbaef760157c8d35a78d7405bc5ffce55>`
+
diff --git a/documentation/migration-guides/release-notes-4.3.rst b/documentation/migration-guides/release-notes-4.3.rst
new file mode 100644
index 0000000000..0e175067da
--- /dev/null
+++ b/documentation/migration-guides/release-notes-4.3.rst
@@ -0,0 +1,965 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 4.3 (nanbield)
+--------------------------------
+
+New Features / Enhancements in 4.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Linux kernel 6.5 and 6.1, gcc 13, glibc 2.38, LLVM 17, and over 300 other recipe upgrades
+
+- The autobuilder's shared-state artefacts are now available over the `jsDelivr
+ <https://jsdelivr.com>`__ Content Delivery Network (CDN).
+ See :term:`SSTATE_MIRRORS`.
+
+- New variables:
+
+ - :term:`CVE_CHECK_STATUSMAP`, :term:`CVE_STATUS`, :term:`CVE_STATUS_GROUPS`,
+ replacing the deprecated :term:`CVE_CHECK_IGNORE`.
+
+ - :term:`FILE_LAYERNAME`: bitbake now sets this to the name of the layer
+ containing the recipe
+
+ - :term:`FIT_ADDRESS_CELLS` and :term:`UBOOT_FIT_ADDRESS_CELLS`.
+ See details below.
+
+ - :term:`KERNEL_DTBDEST`: directory where to install DTB files.
+
+ - :term:`KERNEL_DTBVENDORED`: whether to keep vendor subdirectories.
+
+ - :term:`KERNEL_LOCALVERSION`: to add a string to the kernel version
+ information.
+
+ - :term:`KERNEL_STRIP`: to specify the command to strip the kernel binary.
+
+ - :term:`LICENSE_FLAGS_DETAILS`: add extra details about a recipe license
+ in case it is not allowed by :term:`LICENSE_FLAGS_ACCEPTED`.
+
+ - :term:`MESON_TARGET`: to compile a specific Meson target instead of the
+ default ones.
+
+ - :term:`OEQA_REPRODUCIBLE_TEST_PACKAGE`: to restrict package managers used
+ in reproducibility testing.
+
+- Layername functionality available through overrides
+
+ Code can now know which layer a recipe is coming from through the newly added :term:`FILE_LAYERNAME`
+ variable. This has been added as an override of the form ``layer-<layername>``. In particular,
+ this means QA checks can now be layer specific, for example::
+
+ ERROR_QA:layer-core:append = " patch-status"
+
+ This will enable the ``patch-status`` QA check for the core layer.
+
+- Architecture-specific enhancements:
+
+ - RISCV support is now enabled in LLVM 17.
+
+ - Loongarch support in the :ref:`ref-classes-linuxloader` class and
+ ``core-image-minimal-initramfs`` image.
+
+ - The ``arch-armv8`` and ``arch-armv9`` architectures are now given
+ `Scalable Vector Extension (SVE)
+ <https://developer.arm.com/documentation/100891/0612/sve-overview/introducing-sve>`__
+ based tune options. Commits:
+ :yocto_git:`1 </poky/commit/?id=e4be03be5be62e367a40437a389121ef97d6cff3>`,
+ :yocto_git:`2 </poky/commit/?id=8cd5d264af4c346730531cb98ae945ab862dbd69>`.
+
+ - Many changes to support 64-bit ``time_t`` on 32-bit architectures
+
+- Kernel-related enhancements:
+
+ - The default kernel is the current stable (6.5), and there is also support
+ for the latest long-term release (6.1).
+
+ - The list of fixed kernel CVEs is updated regularly using data from
+ `linuxkernelcves.com <https://linuxkernelcves.com>`__.
+
+ - A ``showconfig`` task was added to the :ref:`ref-classes-cml1` class, to
+ easily examine the final generated ``.config`` file.
+
+- New core recipes:
+
+ - `appstream <https://github.com/ximion/appstream>`__: a collaborative effort
+ for making machine-readable software metadata easily available
+ (from meta-oe)
+
+ - `cargo-c-native <https://crates.io/crates/cargo-c>`__: cargo applet to build
+ and install C-ABI compatible dynamic and static libraries
+
+ - `libadwaita <https://gitlab.gnome.org/GNOME/libadwaita>`__: Building blocks
+ for modern GNOME applications (from meta-gnome)
+
+ - `libtraceevent <https://git.kernel.org/pub/scm/libs/libtrace/libtracefs.git/>`__:
+ API to access the kernel tracefs directory (from meta-openembedded)
+
+ - `libxmlb <https://github.com/hughsie/libxmlb>`__: A library to help create
+ and query binary XML blobs (from meta-oe)
+
+ - ``musl-legacy-error``: glibc ``error()`` API implementation still needed
+ by a few packages.
+
+ - `python3-beartype <https://beartype.readthedocs.io>`__, unbearably fast
+ runtime type checking in pure Python.
+
+ - `python3-booleanpy <https://github.com/bastikr/boolean.py>`__: Define boolean
+ algebras, create and parse boolean expressions and create custom boolean DSL
+ (from meta-python)
+
+ - `python3-calver <https://github.com/di/calver>`__: Setuptools extension for
+ CalVer package versions
+
+ - `python3-click <http://click.pocoo.org/>`__: A simple wrapper around optparse
+ for powerful command line utilities (from meta-python)
+
+ - ``python3-dtc``: Python Library for the Device Tree Compiler (from
+ meta-virtualization)
+
+ - `python3-isodate <https://github.com/gweis/isodate/>`__: ISO 8601 date/time
+ parser (from meta-python)
+
+ - `python3-license-expression <https://github.com/nexB/license-expression>`__:
+ Utility library to parse, compare, simplify and normalize license expressions
+ (from meta-python)
+
+ - `python3-rdflib <https://github.com/RDFLib/rdflib>`__: a pure Python package
+ for working with RDF (from meta-python)
+
+ - `python3-spdx-tools <https://github.com/spdx/tools-python>`__,
+ tools for SPDX validation and conversion.
+
+ - `python3-trove-classifiers <https://github.com/pypa/trove-classifiers>`__:
+ Canonical source for classifiers on PyPI (pypi.org)
+
+ - `python3-uritools <https://github.com/tkem/uritools/>`__, replacement for
+ the ``urllib.parse`` module.
+
+ - `python3-xmltodict <https://github.com/martinblech/xmltodict>`__: Makes
+ working with XML feel like you are working with JSON (from meta-python)
+
+ - `ttyrun <https://github.com/ibm-s390-linux/s390-tools>`__, starts
+ ``getty`` programs only when a terminal exists, preventing respawns
+ through the ``init`` program. This enabled removing the
+ ``SERIAL_CONSOLES_CHECK`` variable.
+
+ - ``vulkan-validation-layers``: Khronos official validation layers to assist in
+ verifying that applications correctly use the
+ `Vulkan API <https://www.khronos.org/vulkan>`__.
+
+ - `xcb-util-cursor <http://xcb.freedesktop.org/XcbUtil/>`__: XCB port of
+ libXcursor (from meta-oe)
+
+- QEMU / ``runqemu`` enhancements:
+
+ - QEMU has been upgraded to version 8.1
+
+ - Many updates to the ``runqemu`` command.
+
+ - The ``qemu-system-native`` recipe is now built with PNG support, which could be
+ useful to grab screenshots for error reporting purposes.
+
+- Rust improvements:
+
+ - Rust has been upgraded to version 1.70
+
+ - New ``ptest-cargo`` class was added to allow Cargo based recipes to easily add ptests
+
+ - New :ref:`ref-classes-cargo_c` class was added to allow recipes to make Rust code
+ available to C and C++ programs. See
+ ``meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb`` for an example.
+
+- wic Image Creator enhancements:
+
+ - ``bootimg-efi``: if ``fixed-size`` is set then use that for mkdosfs
+
+ - ``bootimg-efi``: stop hardcoding VMA offsets, as required by systemd-boot v254
+ (and dracut/ukify)
+
+ - ``bootimg-pcbios``: use kernel name from :term:`KERNEL_IMAGETYPE` instead of
+ hardcoding ``vmlinuz``
+
+ - Added new ``gpt-hybrid`` option to ``ptable_format`` (formatting a disk with a hybrid
+ MBR and GPT partition scheme)
+
+ - Use ``part_name`` in default imager when defined
+
+ - Added ``--hidden`` argument to default imager to avoid MS Windows prompting to
+ format partition after flashing to a USB stick/SD card
+
+- FIT image related improvements:
+
+ - New :term:`FIT_ADDRESS_CELLS` and :term:`UBOOT_FIT_ADDRESS_CELLS` variables allowing
+ to specify 64 bit addresses, typically for loading U-Boot.
+
+ - Added ``compatible`` line to config section (with value from dtb) to allow bootloaders
+ to select the best matching configuration.
+
+
+- SDK-related improvements:
+
+ - Extended the following recipes to ``nativesdk``: ``libwebp``, ``python3-ply``
+
+- Testing:
+
+ - The :ref:`ref-classes-insane` class now adds an :ref:`unimplemented-ptest
+ <qa-check-unimplemented-ptest>` infrastructure to detect package sources
+ with unit tests but no implemented ptests in the recipe.
+
+ - A new task to perform recipe-wide QA checks was added: ``do_recipe_qa``.
+
+ - New build-time checks for set :term:`SUMMARY`, :term:`HOMEPAGE`, and
+ :term:`RECIPE_MAINTAINER` fields was added, and enabled for the core
+ recipes.
+
+ - The ``parselogs`` runtime test was rewritten. Notably it no longer uses
+ regular expressions, which may mean custom patterns need updating.
+
+ - A self-test to validate that the :term:`SPDX` manifests generated by
+ image builds are valid was added.
+
+ - The ``QEMU_USE_SLIRP`` variable has been replaced by adding ``slirp`` to
+ ``TEST_RUNQEMUPARAMS``.
+
+- Utility script changes:
+
+ - New ``scripts/patchtest`` utility to check patches to the
+ OpenEmbedded-Core project. See
+ :ref:`contributor-guide/submit-changes:validating patches with patchtest`
+ for details.
+
+ - ``scripts/bblock`` was added, allowing the user to lock/unlock specific
+ recipes from being built. This makes it possibly to work on the
+ ``python3`` recipe without causing ``python3-native`` to rebuild.
+
+- BitBake improvements:
+
+ - A fetcher for the Google Cloud Platform (``gs://``) was added.
+
+ - The BitBake Cooker log now contains notes when the caches are
+ invalidated which is useful for memory resident BitBake debugging.
+
+ - BitBake no longer watches files with :wikipedia:`inotify <inotify>` for
+ changes, as under load this can lead to races causing build instability.
+
+ - Toaster's dependencies were upgraded to current releases, specifically
+ to Django 4.2.
+
+- Packaging changes:
+
+ - :term:`FILES` now accepts a ``**`` wildcard, which matches zero or more
+ subdirectories.
+
+ - The X server packagegroup now defaults to using the ``modesetting`` X
+ driver, which obsoletes the ``fbdev`` driver.
+
+ - If a recipe uses :term:`LICENSE_FLAGS` and the licenses are not accepted,
+ it can set a custom message with :term:`LICENSE_FLAGS_DETAILS` to be
+ displayed to the users.
+
+ - Recipes that fetch specific revisions no longer need to explicitly add
+ :term:`SRCPV` to :term:`PV` as BitBake will now automatically add the
+ revision information to :term:`PKGV` if needed (as long as "+" is still
+ present in the :term:`PKGV` value, which is set from :term:`PV` by
+ default).
+
+ - The default :term:`PR` values in many recipes have been removed.
+
+- Security improvements:
+
+ - Most repositories now include a :yocto_git:`SECURITY.md
+ </poky/tree/SECURITY.md>` file with hints for security researchers
+ and other parties who might report potential security vulnerabilities.
+
+- Prominent documentation updates:
+
+ - New :doc:`../contributor-guide/index` document.
+
+ - New :doc:`../dev-manual/security-subjects` chapter in the Development
+ Tasks Manual.
+
+ - Long overdue documentation for the :ref:`ref-classes-devicetree` class.
+
+ - New :ref:`summary about available init systems
+ <dev-manual/init-manager:summary>`.
+
+ - New documentation for the :ref:`ref-classes-uboot-sign` class and
+ its variables and for the :ref:`ref-classes-kernel-devicetree` class
+ variables.
+
+- Miscellaneous changes:
+
+ - Selecting systemd via :term:`INIT_MANAGER` now adds ``usrmerge`` to
+ :term:`DISTRO_FEATURES` as current versions of systemd now require
+ merged ``/usr``.
+
+ - Generation of :term:`SPDX` manifests is now enabled by default.
+
+ - Git based recipes in OE-Core which used the ``git`` protocol have been
+ changed to use `https`` where possible, as it is typically faster and
+ more reliable.
+
+ - The ``os-release`` recipe added a ``CPE_NAME`` to the fields provided, with the
+ default being populated from :term:`DISTRO`.
+
+ - The ``psplash`` recipe now accepts a PNG format image through
+ :term:`SPLASH_IMAGES`, instead of a harder to generate and modify
+ ``.h`` file.
+
+ - The ; character is no longer needed to separate functions specified in
+ :term:`IMAGE_POSTPROCESS_COMMAND`, :term:`IMAGE_PREPROCESS_COMMAND`,
+ :term:`POPULATE_SDK_POST_HOST_COMMAND`, :term:`ROOTFS_POSTINSTALL_COMMAND`
+ etc. (If any are present they will be replaced with spaces, so existing
+ metadata does not yet need to be changed.)
+
+ - In the ``Upstream-Status`` field in a patch header, "Accepted" is no longer
+ a valid value since it is logically the same as "Backport". Change any
+ values you have (particularly in patches applied through bbappends for core
+ recipes, since they will be validated as indicated above).
+
+
+Known Issues in 4.3
+~~~~~~~~~~~~~~~~~~~
+
+- N/A
+
+
+Recipe License changes in 4.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
+
+- ``glib-networking``: make :term:`LICENSE` more accurate (``LGPL-2.1`` -> ``LGPL-2.1-or-later``) and add an exception for linking to OpenSSL if it is enabled (``openssl`` is in :term:`PACKAGECONFIG`)
+- ``libbsd``: set per-package licensing to clarify that BSD-4-Clause code is only in the ``-doc`` package
+- ``openssh``: BSD-4-Clause code has been removed completely from the codebase as part of 9.4p1 update - previously in the kirkstone release, ``BSD-4-Clause`` was removed from the :term:`LICENSE` value in our recipe, however some BSD-4-Clause code actually still remained upstream until 9.4p1.
+- ``python3-sphinx``: remove ``BSD-3-Clause`` from :term:`LICENSE` - BSD-3-Clause code was removed as part of the python3-sphinx 7.0.1 release (see `this upstream commit <https://github.com/sphinx-doc/sphinx/commit/a7f5d91c29d6f377b9fe7e926965c6f9d3e7b802>`__)
+
+
+Security Fixes in 4.3
+~~~~~~~~~~~~~~~~~~~~~
+
+- bind: :cve:`2023-2911`, :cve:`2023-2828`, :cve:`2023-3341`, :cve:`2023-4236`
+- binutils: :cve:`2023-1972`
+- connman: :cve:`2023-28488`
+- cups: :cve:`2023-32324`, :cve:`2023-34241`, :cve:`2023-4504`
+- dbus: :cve:`2023-34969`
+- dmidecode: :cve:`2023-30630`
+- dropbear: :cve:`2023-36328`
+- erofs-utils: :cve:`2023-33551`, :cve:`2023-33552`
+- gcc: :cve:`2023-4039`
+- ghostscript: :cve:`2023-28879`, :cve:`2023-36664`, :cve:`2023-38559;` ignore :cve:`2023-38560`
+- git: :cve:`2023-25652`, :cve:`2023-29007`
+- glibc: :cve:`2023-4527`, :cve:`2023-4806`
+- go: :cve:`2023-24537`, :cve:`2023-39325`
+- gstreamer: :cve:`2023-40475`, :cve:`2023-40476`
+- inetutils: :cve:`2023-40303`
+- libarchive: ignore :cve:`2023-30571`
+- librsvg: :cve:`2023-38633`
+- libwebp: :cve:`2023-1999`, :cve:`2023-4863`
+- libx11: :cve:`2023-3138`, :cve:`2023-43785`, :cve:`2023-43786`, :cve:`2023-43787`
+- libxml2: :cve:`2023-28484`, :cve:`2023-29469;` ignore disputed :cve:`2023-45322`
+- libxpm: :cve:`2023-43788`, :cve:`2023-43789`, :cve:`2022-44617`
+- linux: update CVE exclusions
+- ncurses: :cve:`2023-29491`
+- nghttp2: :cve:`2023-44487`
+- ninja: ignore :cve:`2021-4336`, wrong ninja
+- openssh: :cve:`2023-38408`
+- openssl: :cve:`2023-2650`, :cve:`2023-1255`, :cve:`2023-0466`, :cve:`2023-0465`, :cve:`2023-0464`, :cve:`2023-3817`, :cve:`2023-3446`, :cve:`2023-2975`, :cve:`2023-4807`
+- perl: :cve:`2023-31484`, :cve:`2023-31486`
+- pixman: ignore :cve:`2023-37769`
+- procps: :cve:`2023-4016`
+- python3-git: :cve:`2023-41040`
+- python3: ignore :cve:`2023-36632`
+- python3-urllib3: :cve:`2023-43804`
+- qemu: :cve:`2023-40360`, :cve:`2023-42467;` ignore :cve:`2023-0664` (Windows-specific), ignore :cve:`2023-2680` (RHEL specific)
+- screen: :cve:`2023-24626`
+- shadow: :cve:`2023-29383`
+- sqlite3: ignore :cve:`2023-36191`
+- sysstat: :cve:`2023-33204`
+- tiff: :cve:`2022-4645`, :cve:`2023-2731`, :cve:`2023-26965`, :cve:`2023-40745`, :cve:`2023-41175`
+- vim: :cve:`2023-2426`, :cve:`2023-2609`, :cve:`2023-2610`, :cve:`2023-3896`, :cve:`2023-5441`, :cve:`2023-5535`
+- zlib: ignore :cve:`2023-45853`
+
+
+Recipe Upgrades in 4.3
+~~~~~~~~~~~~~~~~~~~~~~
+
+- acpica: upgrade 20220331 -> 20230628
+- adwaita-icon-theme: 43 -> 45.0
+- alsa-lib: upgrade 1.2.8 -> 1.2.10
+- alsa-ucm-conf: upgrade 1.2.8 -> 1.2.10
+- alsa-utils: upgrade 1.2.8 -> 1.2.10
+- apr: upgrade 1.7.2 -> 1.7.4
+- apt: Upgrade to v2.6.0
+- at-spi2-core: update 2.46.0 -> 2.50.0
+- autoconf: Upgrade to 2.72c
+- babeltrace2: upgrade 2.0.4 -> 2.0.5
+- bind: upgrade 9.18.12 -> 9.18.19
+- binutils: Upgrade to 2.41 release
+- bluez5: upgrade 5.66 -> 5.69
+- boost: upgrade 1.81.0 -> 1.83.0
+- btrfs-tools: upgrade 6.1.3 -> 6.5.1
+- busybox: 1.36.0 -> 1.36.1
+- ccache: upgrade 4.7.4 -> 4.8.3
+- cmake: upgrade to 3.27.5
+- connman: update 1.41 -> 1.42
+- coreutils: upgrade 9.1 -> 9.4
+- cpio: upgrade to 2.14
+- cracklib: upgrade 2.9.10 -> 2.9.11
+- createrepo-c: update 0.20.1 -> 1.0.0
+- cryptodev: update to 1.13 + latest git
+- cups: upgrade to 2.4.6
+- curl: upgrade 8.0.1 -> 8.4.0
+- dbus: upgrade 1.14.6 -> 1.14.10
+- debianutils: upgrade 5.8 -> 5.13
+- dhcpcd: upgrade to 10.0.2
+- diffoscope: upgrade 236 -> 249
+- diffutils: update 3.9 -> 3.10
+- dmidecode: upgrade to 3.5
+- dnf: upgrade 4.14.0 -> 4.17.0
+- dos2unix: upgrade 7.4.4 -> 7.5.1
+- dpkg: upgrade to v1.22.0
+- efivar: Upgrade to tip of trunk
+- elfutils: upgrade 0.188 -> 0.189
+- ell: upgrade 0.56 -> 0.58
+- enchant2: upgrade 2.3.4 -> 2.6.1
+- epiphany: upgrade 43.1 -> 44.6
+- erofs-utils: update 1.5 -> 1.6
+- ethtool: upgrade 6.2 -> 6.5
+- eudev: Upgrade 3.2.11 -> 3.2.12
+- ffmpeg: update 5.1.2 -> 6.0
+- file: upgrade 5.44 -> 5.45
+- flac: Upgrade 1.4.2 -> 1.4.3
+- font-util: upgrade 1.4.0 -> 1.4.1
+- freetype: upgrade 2.13.0 -> 2.13.2
+- fribidi: upgrade 1.0.12 -> 1.0.13
+- gawk: upgrade 5.2.1 -> 5.2.2
+- gcc: upgrade to 13.2
+- gcompat: Upgrade to 1.1.0
+- gcr: update 4.0.0 -> 4.1.0
+- gdb: upgrade 13.1 -> 13.2
+- gettext: upgrade 0.21.1 -> 0.22
+- ghostscript: upgrade to 10.02.0
+- git: upgrade to 2.42.0
+- glib-2.0: upgrade 2.74.6 -> 2.78.0
+- glibc: upgrade to 2.38 + stable updates
+- glib-networking: upgrade 2.74.0 -> 2.76.1
+- glslang: upgrade to 1.3.243
+- gmp: upgrade 6.2.1 -> 6.3.0
+- gnu-efi: upgrade 3.0.15 -> 3.0.17
+- gnupg: upgrade 2.4.0 -> 2.4.3
+- gnutls: update 3.8.0 -> 3.8.1
+- gobject-introspection: upgrade 1.74.0 -> 1.78.1
+- go-helloworld: Upgrade to tip of trunk
+- go: update 1.20.1 -> 1.20.10
+- gpgme: update 1.18.0 -> 1.22.0
+- grep: upgrade 3.10 -> 3.11
+- groff: update 1.22.4 -> 1.23.0
+- gsettings-desktop-schemas: upgrade 43.0 -> 44.0
+- gstreamer1.0: upgrade 1.22.0 -> 1.22.5
+- gstreamer: upgrade 1.22.5 -> 1.22.6
+- gtk+3: upgrade 3.24.36 -> 3.24.38
+- gtk4: update 4.10.0 -> 4.12.3
+- gzip: update 1.12 -> 1.13
+- harfbuzz: upgrade 7.1.0 -> 8.2.1
+- icu: upgrade 72-1 -> 73-2
+- igt-gpu-tools: update 1.27.1 -> 1.28
+- iproute2: upgrade 6.2.0 -> 6.5.0
+- iso-codes: upgrade 4.13.0 -> 4.15.0
+- jquery: upgrade 3.6.3 -> 3.7.1
+- json-c: upgrade 0.16 -> 0.17
+- kbd: upgrade 2.5.1 -> 2.6.3
+- kea: upgrade to v2.4.0
+- kexec-tools: upgrade 2.0.26 -> 2.0.27
+- kmscube: upgrade to latest revision
+- less: update 608 -> 643
+- libadwaita: upgrade 1.3.3 -> 1.4.0
+- libarchive: upgrade 3.6.2 -> 3.7.2
+- libassuan: upgrade 2.5.5 -> 2.5.6
+- libatomic-ops: update 7.6.14 -> 7.8.0
+- libcap: upgrade 2.67 -> 2.69
+- libcgroup: update 3.0.0 -> 3.1.0
+- libconvert-asn1-perl: upgrade 0.33 -> 0.34
+- libdnf: update 0.70.1 -> 0.70.1
+- libdrm: upgrade 2.4.115 -> 2.4.116
+- libedit: upgrade 20221030-3.1 -> 20230828-3.1
+- libevdev: upgrade 1.13.0 -> 1.13.1
+- libgcrypt: update 1.10.1 -> 1.10.2
+- libgit2: upgrade 1.6.3 -> 1.7.1
+- libglu: update 9.0.2 -> 9.0.3
+- libgpg-error: update 1.46 -> 1.47
+- libgudev: upgrade 237 -> 238
+- libhandy: upgrade 1.8.1 -> 1.8.2
+- libinput: upgrade to 1.24.0
+- libjpeg-turbo: upgrade to 3.0.0
+- libksba: upgrade 1.6.3 -> 1.6.4
+- libmd: upgrade 1.0.4 -> 1.1.0
+- libmicrohttpd: upgrade 0.9.76 -> 0.9.77
+- libmodule-build-perl: upgrade 0.4232 -> 0.4234
+- libmodulemd: upgrade 2.14.0 -> 2.15.0
+- libnl: upgrade 3.7.0 -> 3.8.0
+- libnss-nis: upgrade 3.1 -> 3.2
+- libpam: update 1.5.2 -> 1.5.3
+- libpcap: upgrade 1.10.3 -> 1.10.4
+- libpng: upgrade 1.6.39 -> 1.6.40
+- libportal: upgrade 0.6 -> 0.7.1
+- libproxy: update 0.4.18 -> 0.5.3
+- libpthread-stubs: update 0.4 -> 0.5
+- librepo: upgrade 1.15.1 -> 1.16.0
+- librsvf: update 2.54.5 -> 2.56.0
+- librsvg: update 2.56.0 -> 2.56.3
+- libsdl2: upgrade 2.26.3 -> 2.28.3
+- libsecret: upgrade 0.20.5 -> 0.21.1
+- libsndfile1: upgrade 1.2.0 -> 1.2.2
+- libsolv: upgrade 0.7.23 -> 0.7.25
+- libsoup: upgrade 3.2.2 -> 3.4.2
+- libssh2: update 1.10.0 -> 1.11.0
+- libtraceevent: upgrade 1.7.2 -> 1.7.3
+- libubootenv: upgrade 0.3.3 -> 0.3.4
+- liburi-perl: update 5.17 -> 5.21
+- libuv: upgrade 1.44.2 -> 1.46.0
+- libva: update 2.16 -> 2.19.0
+- libva-utils: update 2.19.0 -> 2.20.0
+- libwebp: upgrade 1.3.0 -> 1.3.2
+- libx11: upgrade 1.8.4 -> 1.8.7
+- libxcb: upgrade 1.15 -> 1.16
+- libxcrypt: upgrade 4.4.33 -> 4.4.36
+- libxfixes: Upgrade to v6.0.1
+- libxft: upgrade 2.3.7 -> 2.3.8
+- libxi: upgrade to v1.8.1
+- libxml2: upgrade 2.10.3 -> 2.11.5
+- libxpm: upgrade 3.5.15 -> 3.5.17
+- libxslt: upgrade 1.1.37 -> 1.1.38
+- libxt: Upgrade to v1.3.0
+- lighttpd: upgrade 1.4.69 -> 1.4.71
+- linux-firmware: upgrade 20230210 -> 20230804
+- linux-libc-headers: uprev to v6.5
+- linux-yocto/6.1: update to v6.1.57
+- linux-yocto-dev: update to v6.6-rcX
+- linux-yocto: introduce 6.5 reference kernel recipes
+- llvm: Upgrade to 17.0.2
+- ltp: upgrade 20230127 -> 20230516
+- lttng-modules: Upgrade 2.13.9 -> 2.13.10
+- lttng-tools: Upgrade 2.13.9 -> 2.13.11
+- lttng-ust: upgrade 2.13.5 -> 2.13.6
+- lua: update 5.4.4 -> 5.4.6
+- man-pages: upgrade 6.03 -> 6.05.01
+- mc: upgrade 4.8.29 -> 4.8.30
+- mesa: upgrade 23.0.0 -> 23.2.1
+- meson: upgrade 1.0.1 -> 1.2.2
+- mmc-utils: upgrade to latest revision
+- mobile-broadband-provider-info: upgrade 20221107 -> 20230416
+- mpfr: upgrade 4.2.0 -> 4.2.1
+- mpg123: upgrade 1.31.2 -> 1.31.3
+- msmtp: upgrade 1.8.23 -> 1.8.24
+- mtd-utils: upgrade 2.1.5 -> 2.1.6
+- mtools: upgrade 4.0.42 -> 4.0.43
+- musl: update to latest master
+- neard: upgrade 0.18 -> 0.19
+- nettle: upgrade 3.8.1 -> 3.9.1
+- nfs-utils: upgrade 2.6.2 -> 2.6.3
+- nghttp2: upgrade 1.52.0 -> 1.57.0
+- ofono: upgrade 2.0 -> 2.1
+- openssh: upgrade to 9.5p1
+- openssl: upgrade 3.1.0 -> 3.1.3
+- opkg: upgrade 0.6.1 -> 0.6.2
+- opkg-utils: upgrade 0.5.0 -> 0.6.2
+- orc: upgrade 0.4.33 -> 0.4.34
+- ovmf: update 202211 -> 202305
+- ovmf: update edk2-stable202305 -> edk2-stable202308
+- p11-kit: upgrade 0.24.1 -> 0.25.0
+- pango: upgrade 1.50.13 -> 1.51.0
+- parted: upgrade 3.5 -> 3.6
+- patchelf: Upgrade 0.17.2 -> 0.18.0
+- pciutils: upgrade 3.9.0 -> 3.10.0
+- perlcross: update 1.4 -> 1.5
+- perl: update 5.36.0 -> 5.38.0
+- piglit: upgrade to latest revision
+- pigz: upgrade 2.7 -> 2.8
+- pkgconf: upgrade 1.9.4 -> 2.0.3
+- ppp: upgrade 2.4.9 -> 2.5.0
+- procps: update 4.0.3 -> 4.0.4
+- puzzles: upgrade to latest revision
+- python3-attrs: upgrade 22.2.0 -> 23.1.0
+- python3-build: upgrade to 1.0.3
+- python3-certifi: upgrade 2022.12.7 -> 2023.7.22
+- python3-chardet: upgrade 5.1.0 -> 5.2.0
+- python3-cryptography{-vectors}: upgrade 39.0.2 -> 41.0.4
+- python3-cython: upgrade 0.29.33 -> 0.29.36
+- python3-dbusmock: upgrade 0.28.7 -> 0.29.1
+- python3-docutils: upgrade 0.19 -> 0.20.1
+- python3-dtc: upgrade 1.6.1 -> 1.7.0
+- python3-dtschema: upgrade 2023.1 -> 2023.7
+- python3-editables: upgrade 0.3 -> 0.5
+- python3-flit-core: upgrade 3.8.0 -> 3.9.0
+- python3-git: upgrade 3.1.31 -> 3.1.36
+- python3-hatch-fancy-pypi-readme: upgrade 22.8.0 -> 23.1.0
+- python3-hatchling: upgrade 1.13.0 -> 1.18.0
+- python3-hypothesis: upgrade 6.68.2 -> 6.86.2
+- python3-importlib-metadata: upgrade 6.0.0 -> 6.8.0
+- python3-installer: upgrade 0.6.0 -> 0.7.0
+- python3-iso8601: upgrade 1.1.0 -> 2.0.0
+- python3-jsonpointer: upgrade to 2.4
+- python3-libarchive-c: upgrade 4.0 -> 5.0
+- python3-lxml: upgrade 4.9.2 -> 4.9.3
+- python3-markdown: upgrade 3.4.1 -> 3.4.4
+- python3-markupsafe: upgrade 2.1.2 -> 2.1.3
+- python3-more-itertools: upgrade 9.1.0 -> 10.1.0
+- python3-numpy: upgrade 1.24.2 -> 1.26.0
+- python3-packaging: upgrade 23.0 -> 23.1
+- python3-pathspec: upgrade 0.11.0 -> 0.11.2
+- python3-pip: upgrade 23.0.1 -> 23.2.1
+- python3-pluggy: upgrade 1.0.0 -> 1.3.0
+- python3-poetry-core: upgrade 1.5.2 -> 1.7.0
+- python3-psutil: upgrade 5.9.4 -> 5.9.5
+- python3-pyasn1: upgrade 0.4.8 -> 0.5.0
+- python3-pycairo: upgrade 1.23.0 -> 1.24.0
+- python3-pycryptodome: upgrade 3.17 -> 3.19.0
+- python3-pycryptodomex: upgrade 3.17 -> 3.19.0
+- python3-pyelftools: upgrade 0.29 -> 0.30
+- python3-pygments: upgrade 2.14.0 -> 2.16.1
+- python3-pygobject: upgrade 3.42.2 -> 3.46.0
+- python3-pyopenssl: upgrade 23.0.0 -> 23.2.0
+- python3-pyparsing: upgrade 3.0.9 -> 3.1.1
+- python3-pytest-subtests: upgrade 0.10.0 -> 0.11.0
+- python3-pytest: upgrade 7.2.2 -> 7.4.2
+- python3-pytz: upgrade 2022.7.1 -> 2023.3
+- python3-pyyaml: upgrade 6.0 -> 6.0.1
+- python3-requests: Upgrade to 2.31.0
+- python3-ruamel-yaml: upgrade 0.17.21 -> 0.17.32
+- python3-setuptools-rust: upgrade 1.5.2 -> 1.7.0
+- python3-setuptools: upgrade 67.6.0 -> 68.2.2
+- python3-smmap: upgrade 5.0.0 -> 6.0.0
+- python3-sphinx-rtd-theme: upgrade 1.2.0 -> 1.3.0
+- python3-sphinx: upgrade 6.1.3 -> 7.2.6
+- python3-trove-classifiers: upgrade 2023.4.29 -> 2023.9.19
+- python3-typing-extensions: upgrade 4.5.0 -> 4.8.0
+- python3: upgrade 3.11.2 -> 3.11.5
+- python3-urllib3: upgrade 1.26.15 -> 2.0.6
+- python3-webcolors: upgrade 1.12 -> 1.13
+- python3-wheel: upgrade 0.40.0 -> 0.41.2
+- python3-zipp: upgrade 3.15.0 -> 3.17.0
+- qemu: Upgrade 7.2.0 -> 8.1.0
+- re2c: upgrade 3.0 -> 3.1
+- repo: upgrade 2.32 -> 2.36.1
+- rpcsvc-proto: Upgrade to 1.4.4
+- rpm2cpio.sh: update to the last 4.x version
+- rpm: update 4.18.0 -> 4.18.1
+- ruby: upgrade 3.2.1 -> 3.2.2
+- rust: Upgrade 1.68.1 -> 1.70.0
+- screen: update 4.9.0 -> 4.9.1
+- seatd: upgrade 0.7.0 -> 0.8.0
+- serf: upgrade 1.3.9 -> 1.3.10
+- shaderc: upgrade 2023.2 -> 2023.6
+- spirv-headers: upgrade 1.3.239.0 -> 1.3.243.0
+- spirv-tools: upgrade 1.3.239.0 -> 1.3.243.0
+- sqlite3: upgrade 3.41.0 -> 3.43.1
+- squashfs-tools: upgrade 4.5.1 -> 4.6.1
+- sstatesig: Update to match bitbake changes to runtaskdeps
+- strace: upgrade 6.2 -> 6.5
+- stress-ng: upgrade 0.15.06 -> 0.16.05
+- sudo: update 1.9.13p3 -> 1.9.14p3
+- sysfsutils: update 2.1.0 -> 2.1.1
+- sysklogd: upgrade 2.4.4 -> 2.5.2
+- sysstat: update 12.6.2 -> 12.7.4
+- systemd: upgrade 253.1 -> 254.4
+- systemtap: upgrade 4.8 -> 4.9
+- taglib: upgrade 1.13 -> 1.13.1
+- tar: upgrade 1.34 -> 1.35
+- tcf-agent: Update to 1.8.0 release
+- texinfo: upgrade 7.0.2 -> 7.0.3
+- tiff: upgrade to 4.6.0
+- u-boot: Upgrade to 2023.10
+- util-linux: upgrade 2.38.1 -> 2.39.2
+- vala: upgrade 0.56.4 -> 0.56.13
+- valgrind: update 3.20.0 -> 3.21.0
+- vim: upgrade 9.0.1429 -> 9.0.2048
+- vte: upgrade 0.72.0 -> 0.72.2
+- vulkan-headers: upgrade to 1.3.243
+- vulkan-loader: upgrade to 1.3.243
+- vulkan-samples: update to latest SHA
+- vulkan-tools: upgrade to 1.3.243
+- vulkan: upgrade 1.3.243.0 -> 1.3.261.1
+- waffle: upgrade 1.7.0 -> 1.7.2
+- wayland-protocols: upgrade 1.31 -> 1.32
+- wayland: upgrade 1.21.0 -> 1.22.0
+- wayland-utils: upgrade 1.1.0 -> 1.2.0
+- webkitgtk: update 2.38.5 -> 2.40.5
+- weston: update 11.0.1 -> 12.0.2
+- wget: upgrade 1.21.3 -> 1.21.4
+- wireless-regdb: upgrade 2023.02.13 -> 2023.09.01
+- wpebackend-fdo: upgrade 1.14.0 -> 1.14.2
+- xcb-proto: upgrade 1.15.2 -> 1.16.0
+- xdpyinfo: upgrade 1.3.3 -> 1.3.4
+- xeyes: upgrade 1.2.0 -> 1.3.0
+- xf86-input-libinput: upgrade 1.2.1 -> 1.4.0
+- xf86-input-mouse: upgrade 1.9.4 -> 1.9.5
+- xinput: upgrade to v1.6.4
+- xkeyboard-config: upgrade 2.38 -> 2.39
+- xorgproto: upgrade 2022.2 -> 2023.2
+- xserver-xorg: upgrade 21.1.7 -> 21.1.8
+- xtrans: update 1.4.0 -> 1.5.0
+- xwayland: upgrade 22.1.8 -> 23.2.1
+- xwininfo: upgrade to v1.1.6
+- xxhash: upgrade 0.8.1 -> 0.8.2
+- xz: upgrade 5.4.2 -> 5.4.4
+- zlib: upgrade 1.2.13 -> 1.3
+- zstd: upgrade 1.5.4 -> 1.5.5
+
+
+
+
+Contributors to 4.3
+~~~~~~~~~~~~~~~~~~~
+
+Thanks to the following people who contributed to this release:
+
+- Adrian Freihofer
+- Alassane Yattara
+- Alberto Pianon
+- Alberto Planas
+- Alejandro Hernandez Samaniego
+- Alexander Kanavin
+- Alexandre Belloni
+- Alexis Lothoré
+- Alex Kiernan
+- Andreas Cord-Landwehr
+- André Draszik
+- Andrej Valek
+- Andrew Jeffery
+- Andrey Zhizhikin
+- Angelo Ribeiro
+- Antoine Lubineau
+- Antonin Godard
+- Anuj Mittal
+- Archana Polampalli
+- Armin Kuster
+- Arne Schwerdt
+- Arno Baumfalk
+- Arslan Ahmad
+- Bartosz Golaszewski
+- BELHADJ SALEM Talel
+- BELOUARGA Mohamed
+- Benjamin Bara
+- Benjamin Bouvier
+- Bergin, Peter
+- Bruce Ashfield
+- Changhyeok Bae
+- Changqing Li
+- Charles-Antoine Couret
+- Charlie Wu
+- Chen Qi
+- Chi Xu
+- Chris Laplante
+- Christopher Larson
+- Daniel Ammann
+- Daniel McGregor
+- Daniel Semkowicz
+- David Reyna
+- Deepthi Hemraj
+- Denis OSTERLAND-HEIM
+- Denys Dmytriyenko
+- Derek Straka
+- Dit Kozmaj
+- Dmitry Baryshkov
+- Ed Beroset
+- Eero Aaltonen
+- Eilís 'pidge' Ní Fhlannagáin
+- Emil Ekmečić
+- Emil Kronborg Andersen
+- Enrico Jörns
+- Enrico Scholz
+- Etienne Cordonnier
+- Fabien Mahot
+- Fabio Estevam
+- Fahad Arslan
+- Frank WOLFF
+- Frederic Martinsons
+- Frieder Paape
+- Frieder Schrempf
+- Geoff Parker
+- Hannu Lounento
+- Ian Ray
+- Insu Park
+- Jaeyoon Jung
+- Jamin Lin
+- Jan Garcia
+- Jan Vermaete
+- Jasper Orschulko
+- Jean-Marie Lemetayer
+- Jérémy Rosen
+- Jermain Horsman
+- Jialing Zhang
+- Joel Stanley
+- Joe Slater
+- Johannes Schrimpf
+- Jon Mason
+- Jörg Sommer
+- Jose Quaresma
+- Joshua Watt
+- Julien Stephan
+- Kai Kang
+- Khem Raj
+- Kyle Russell
+- Lee Chee Yang
+- Lei Maohui
+- Leon Anavi
+- Lorenzo Arena
+- Louis Rannou
+- Luan Rafael Carneiro
+- Luca Boccassi
+- Luca Ceresoli
+- Marc Ferland
+- Marcus Flyckt
+- Marek Vasut
+- Mark Asselstine
+- Mark Hatle
+- Markus Niebel
+- Markus Volk
+- Marlon Rodriguez Garcia
+- Marta Rybczynska
+- Martijn de Gouw
+- Martin Jansa
+- Martin Siegumfeldt
+- Matthias Schnelte
+- Mauro Queiros
+- Max Krummenacher
+- Michael Halstead
+- Michael Opdenacker
+- Mickael RAMILISON
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Narpat Mali
+- Natasha Bailey
+- Nikhil R
+- Ninad Palsule
+- Ola x Nilsson
+- Oleksandr Hnatiuk
+- Otavio Salvador
+- Ovidiu Panait
+- Pascal Bach
+- Patrick Williams
+- Paul Eggleton
+- Paul Gortmaker
+- Paulo Neves
+- Pavel Zhukov
+- Pawan Badganchi
+- Peter Bergin
+- Peter Hoyes
+- Peter Kjellerstedt
+- Peter Marko
+- Peter Suti
+- Petr Gotthard
+- Petr Kubizňák
+- Piotr Łobacz
+- Poonam Jadhav
+- Qiu Tingting
+- Quentin Schulz
+- Randolph Sapp
+- Randy MacLeod
+- Ranjitsinh Rathod
+- Rasmus Villemoes
+- Remi Peuvergne
+- Richard Purdie
+- Riyaz Khan
+- Robert Joslyn
+- Robert P. J. Day
+- Robert Yang
+- Roland Hieber
+- Ross Burton
+- Ryan Eatmon
+- Sakib Sajal
+- Samantha Jalabert
+- Sanjay Chitroda
+- Sean Nyekjaer
+- Sergei Zhmylev
+- Siddharth Doshi
+- Soumya Sambu
+- Staffan Rydén
+- Stefano Babic
+- Stefan Tauner
+- Stéphane Veyret
+- Stephan Wurm
+- Sudip Mukherjee
+- Sundeep KOKKONDA
+- Svend Meyland Nicolaisen
+- Tan Wen Yan
+- Thomas Roos
+- Tim Orling
+- Tom Hochstein
+- Tom Isaacson
+- Trevor Gamblin
+- Ulrich Ölmann
+- Victor Kamensky
+- Vincent Davis Jr
+- Virendra Thakur
+- Wang Mingyu
+- Xiangyu Chen
+- Yang Xu
+- Yash Shinde
+- Yi Zhao
+- Yoann Congal
+- Yogita Urade
+- Yuta Hayama
+- Zang Ruochen
+- Zhixiong Chi
+
+
+Repositories / Downloads for Yocto-4.3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+poky
+
+- Repository Location: :yocto_git:`/poky`
+- Branch: :yocto_git:`nanbield </poky/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3 </poky/log/?h=yocto-4.3>`
+- Git Revision: :yocto_git:`15b576c4101231d248fda7ae0824e1780e1a8901 </poky/commit/?id=15b576c4101231d248fda7ae0824e1780e1a8901>`
+- Release Artefact: poky-15b576c4101231d248fda7ae0824e1780e1a8901
+- sha: 6b0ef7914d15db057f3efdf091b169a7361c74aac0abcfa717ef55d1a0adf74c
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3/poky-15b576c4101231d248fda7ae0824e1780e1a8901.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3/poky-15b576c4101231d248fda7ae0824e1780e1a8901.tar.bz2
+
+openembedded-core
+
+- Repository Location: :oe_git:`/openembedded-core`
+- Branch: :oe_git:`nanbield </openembedded-core/log/?h=nanbield>`
+- Tag: :oe_git:`yocto-4.3 </openembedded-core/log/?h=yocto-4.3>`
+- Git Revision: :oe_git:`4c261f8cbdf0c7196a74daad041d04eb093015f3 </openembedded-core/commit/?id=4c261f8cbdf0c7196a74daad041d04eb093015f3>`
+- Release Artefact: oecore-4c261f8cbdf0c7196a74daad041d04eb093015f3
+- sha: c9e6ac75d7848ce8844cb29c98659dd8f83b3de13b916124dff76abe034e6a5c
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3/oecore-4c261f8cbdf0c7196a74daad041d04eb093015f3.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3/oecore-4c261f8cbdf0c7196a74daad041d04eb093015f3.tar.bz2
+
+meta-mingw
+
+- Repository Location: :yocto_git:`/meta-mingw`
+- Branch: :yocto_git:`nanbield </meta-mingw/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3 </meta-mingw/log/?h=yocto-4.3>`
+- Git Revision: :yocto_git:`65ef95a74f6ae815f63f636ed53e140a26a014ce </meta-mingw/commit/?id=65ef95a74f6ae815f63f636ed53e140a26a014ce>`
+- Release Artefact: meta-mingw-65ef95a74f6ae815f63f636ed53e140a26a014ce
+- sha: fb2bf806941a00a1be6349c074379b63a76490bcf0f3b740d96d1aeeefa12286
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3/meta-mingw-65ef95a74f6ae815f63f636ed53e140a26a014ce.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3/meta-mingw-65ef95a74f6ae815f63f636ed53e140a26a014ce.tar.bz2
+
+bitbake
+
+- Repository Location: :oe_git:`/bitbake`
+- Branch: :oe_git:`2.6 </bitbake/log/?h=2.6>`
+- Tag: :oe_git:`yocto-4.3 </bitbake/log/?h=yocto-4.3>`
+- Git Revision: :oe_git:`5419a8473d6d4cd1d01537de68ad8d72cf5be0b2 </bitbake/commit/?id=5419a8473d6d4cd1d01537de68ad8d72cf5be0b2>`
+- Release Artefact: bitbake-5419a8473d6d4cd1d01537de68ad8d72cf5be0b2
+- sha: e5dab4b3345d91307860803e2ad73b2fcffa9d17dd3fde0e013ca0ebea0d05ca
+- Download Locations:
+ http://downloads.yoctoproject.org/releases/yocto/yocto-4.3/bitbake-5419a8473d6d4cd1d01537de68ad8d72cf5be0b2.tar.bz2
+ http://mirrors.kernel.org/yocto/yocto/yocto-4.3/bitbake-5419a8473d6d4cd1d01537de68ad8d72cf5be0b2.tar.bz2
+
+yocto-docs
+
+- Repository Location: :yocto_git:`/yocto-docs`
+- Branch: :yocto_git:`nanbield </yocto-docs/log/?h=nanbield>`
+- Tag: :yocto_git:`yocto-4.3 </yocto-docs/log/?h=yocto-4.3>`
+- Git Revision: :yocto_git:`ceb1812e63b9fac062f886c2a1dde23137c0e1ed </yocto-docs/commit/?id=ceb1812e63b9fac062f886c2a1dde23137c0e1ed>`
+
diff --git a/documentation/migration-guides/release-notes-5.0.rst b/documentation/migration-guides/release-notes-5.0.rst
new file mode 100644
index 0000000000..4bd9125d17
--- /dev/null
+++ b/documentation/migration-guides/release-notes-5.0.rst
@@ -0,0 +1,906 @@
+.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
+
+Release notes for 5.0 (scarthgap)
+---------------------------------
+
+New Features / Enhancements in 5.0
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Linux kernel 6.6, gcc 13.2, glibc 2.39, LLVM 18.1, and over 300 other recipe upgrades
+
+- New variables:
+
+ - :term:`CVE_DB_INCR_UPDATE_AGE_THRES`: Configure the maximum age of the
+ internal CVE database for incremental update (instead of a full
+ redownload).
+
+ - :term:`RPMBUILD_EXTRA_PARAMS`: support extra user-defined fields without
+ crashing the RPM package creation.
+
+ - :term:`OPKG_MAKE_INDEX_EXTRA_PARAMS`: support extra parameters for
+ ``opkg-make-index``.
+
+ - :term:`EFI_UKI_PATH`, :term:`EFI_UKI_DIR`: define the location of UKI
+ image in the EFI System partition.
+
+ - :term:`TARGET_DBGSRC_DIR`: specifies the target path to debug source files
+
+- Architecture-specific enhancements:
+
+ - ``genericarm64``: a new :term:`MACHINE` to represent a 64-bit General Arm
+ SystemReady platform.
+
+ - Add Power8 tune to PowerPC architecture.
+
+ - ``arch-armv9``: remove CRC and SVE tunes, since FEAT_CRC32 is now mandatory
+ and SVE/SVE2 are enabled by default in GCC's ``-march=armv9-a``.
+
+ - ``arm/armv*``: add all of the additional Arm tunes in GCC 13.2.0
+
+- Kernel-related enhancements:
+
+ - The default kernel is the current LTS (6.6).
+
+ - Add support for ``genericarm64``.
+
+- New core recipes:
+
+ - `bmaptool <https://github.com/yoctoproject/bmaptool>`__: a tool for
+ creating block maps for files and flashing images, being now under the
+ Yocto Project umbrella.
+
+ - ``core-image-initramfs-boot``: a minimal initramfs image, containing just
+ ``udev`` and ``init``, designed to find the main root filesystem and
+ pivot to it.
+
+ - `lzlib <https://www.nongnu.org/lzip/lzlib.html>`__: a data compression
+ library that provides LZMA compression and decompression functions.
+
+ - `lzop <https://www.lzop.org/>`__: a compression utility based on the LZO
+ library, that was brought back after a (now reverted) removal.
+
+ - `python3-jsonschema-specifications <https://pypi.org/project/jsonschema-specifications/>`__:
+ support files for JSON Schema Specifications (meta-schemas and
+ vocabularies), added as a new dependency of ``python3-jsonschema``.
+
+ - `python3-maturin <https://github.com/pyo3/maturin>`__: a project that
+ allows building and publishing Rust crates as Python packages.
+
+ - `python3-meson-python <https://github.com/mesonbuild/meson-python>`__: a
+ Python build backend that enables the Meson build-system for Python packages.
+
+ - `python3-pyproject-metadata <https://pypi.org/project/pyproject-metadata/>`__:
+ a class to handle PEP 621 metadata, and a dependency for
+ ``python3-meson-python``.
+
+ - `python3-referencing <https://github.com/python-jsonschema/referencing>`__:
+ another dependency of ``python3-jsonschema``, it provides an
+ implementation of JSON reference resolution.
+
+ - `python3-rpds-py <https://pypi.org/project/rpds-py/>`__: Python bindings
+ to the Rust rpds crate, and a runtime dependency for ``python3-referencing``.
+
+ - `python3-sphinxcontrib-jquery <https://pypi.org/project/sphinxcontrib-jquery/>`__:
+ a Sphinx extension to include jQuery on newer Sphinx releases. Recent
+ versions of ``python3-sphinx-rtd-theme`` depend on it.
+
+ - `python3-yamllint <https://github.com/adrienverge/yamllint>`__: a linter
+ for YAML files. In U-Boot, the ``binman`` tool uses this linter to verify the
+ configurations at compile time.
+
+ - ``systemd-boot-native``: a UEFI boot manager, this time built as native to
+ provide the ``ukify`` tool.
+
+ - `utfcpp <https://github.com/nemtrif/utfcpp>`__: a C++ library to handle
+ UTF-8 encoded strings. It was added as a dependency for ``taglib`` after
+ its upgrade to v2.0.
+
+ - `vulkan-utility-libraries <https://github.com/KhronosGroup/Vulkan-Utility-Libraries>`__:
+ a set of libraries to share code across various Vulkan repositories.
+
+ - `vulkan-volk <https://github.com/zeux/volk>`__: a meta-loader for Vulkan,
+ needed to support building the latest ``vulkan-tools``.
+
+- QEMU / ``runqemu`` enhancements:
+
+ - QEMU has been upgraded to version 8.2.1
+
+ - ``qemuboot``: support predictable network interface names.
+
+ - ``runqemu``: match ".rootfs." in addition to "-image-" for the root
+ filesystem.
+
+ - :ref:`ref-classes-cmake-qemu`: a new class allowing to execute cross-compiled
+ binaries using QEMU user-mode emulation.
+
+- Rust improvements:
+
+ - Rust has been upgraded to version 1.75
+
+ - The Rust profiler (i.e., PGO - Profile-Guided Optimization) options were
+ enabled back.
+
+ - The Rust ``oe-selftest`` were enabled, except for ``mips32`` whose tests
+ are skipped.
+
+ - ``rust-cross-canadian``: added ``riscv64`` to cross-canadian hosts.
+
+- wic Image Creator enhancements:
+
+ - Allow the imager's output file extension to match the imager's name,
+ instead of hardcoding it to ``direct`` (i.e., the default imager)
+
+ - For GPT-based disks, add reproducible Disk GUID generation
+
+ - Allow generating reproducible ext4 images
+
+ - Add feature to fill a specific range of a partition with zeros
+
+ - ``bootimg-efi``: add ``install-kernel-into-boot-dir`` parameter to
+ configure kernel installation point(s) (i.e., rootfs and/or boot partition)
+
+ - ``rawcopy``: add support for zstd decompression
+
+- SDK-related improvements:
+
+ - ``nativesdk``: let :term:`MACHINE_FEATURES` be set by ``machine-sdk``
+ configuration files.
+
+ - ``nativesdk``: prevent :term:`MACHINE_FEATURES` and :term:`DISTRO_FEATURES`
+ from being backfilled.
+
+ - Support for ``riscv64`` as an SDK host architecture
+
+ - Extend recipes to ``nativesdk``: ``acpica``, ``libpcap``, ``python3-setuptools-rust``
+
+- Testing:
+
+ - Add an optional ``unimplemented-ptest`` QA warning to detect upstream
+ packages with tests, that do not use ptest.
+
+ - ``testimage``: retrieve the ptests directory, especially for the logs,
+ upon ptest failure.
+
+ - ``oeqa``, ``oe-selftest``: add test cases for Maturin (SDK and runtime).
+
+ - Enable ptests for ``python3-attrs``, ``python3-pyyaml``, ``xz``
+
+- Utility script changes:
+
+ - ``oe-init-build-env`` can generate a initial configuration (``.vscode``)
+ for VSCode and its "Yocto Project BitBake" extension.
+
+ - The ``sstate-cache-management`` script has been rewritten in python for better performance and maintainability
+
+ - ``bitbake-layers``: added an option to update the reference of repositories in layer setup
+
+- BitBake improvements:
+
+ - New ``inherit_defer`` statement which works as
+ :ref:`inherit <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` directive>`
+ does, except that it is only evaluated at the end of parsing
+ --- recommended where a conditional expression is used, e.g.::
+
+ inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3targetconfig', '', d)}
+
+ This allows conditional expressions to be evaluated 'late' meaning changes
+ to the variable after the line is parsed will take effect - with inherit this
+ is not the case.
+
+ - Add support for :term:`BB_LOADFACTOR_MAX`, so Bitbake can stop running
+ extra tasks if the system load is too high, especially in distributions
+ where ``/proc/pressure`` is disabled.
+
+ - Add garbage collection to remove unused unihashes from the database.
+
+ - ``taskexp_ncurses``: add ncurses version of ``taskexp``, the dependency
+ explorer originally implemented with GTK.
+
+ - Improve ``runqueue`` performance by adding a cache mechanism in
+ ``build_taskdepdata``.
+
+ - ``bitbake.conf``: add ``runtimedir`` to represent the path to the runtime
+ state directory (i.e., ``/run``).
+
+ - Allow to disable colored text output through the
+ `NO_OUTPUT <https://no-color.org/>`__ environment variable.
+
+ - ``git-make-shallow`` script: add support for Git's ``safe.bareRepository=explicit``
+ configuration setting.
+
+- devtool improvements:
+
+ - Introduce a new ``ide-sdk`` plugin to generate a configuration to use
+ the eSDK through an IDE.
+
+ - Add ``--no-pypi`` option for Python projects that are not hosted on PyPI.
+
+ - Add support for Git submodules.
+
+ - ``ide``: ``vscode``: generate files from recipe sysroots and debug the
+ root filesystem in read-only mode to avoid confusion.
+
+ - ``modify``: add support for multiple sources in :term:`SRC_URI`.
+
+ - Support plugins within plugins.
+
+- recipetool improvements:
+
+ - ``appendsrcfile(s)``: add a mode to update the recipe itself.
+
+ - ``appendsrcfile(s)``: add ``--dry-run`` mode.
+
+ - ``create``: add handler to create Go recipes.
+
+ - ``create``: improve identification of licenses.
+
+ - ``create``: add support for modern Python PEP-517 build systems including
+ hatchling, maturin, meson-python.
+
+ - ``create``: add PyPi support.
+
+ - ``create``: prefix created Python recipes with ``python3-``.
+
+- Packaging changes:
+
+ - ``package_rpm``: the RPM package compressor's mode can now be overriden.
+
+ - ipk packaging (using ``opkg``) now uses ``zstd`` compression instead of
+ ``xz`` for better compression and performance.
+
+- Security improvements:
+
+ - Improve incremental CVE database download from NVD. Rejected CVEs are
+ removed, configuration is kept up-to-date. The age threshold for
+ incremental update can be configured with :term:`CVE_DB_INCR_UPDATE_AGE_THRES`
+ variable.
+
+- Prominent documentation updates:
+
+ - Documentation for using the new ``devtool ide-sdk`` command and features.
+ See :ref:`using_devtool` for details.
+
+ - New ":doc:`bitbake:bitbake-user-manual/bitbake-user-manual-ref-variables-context`"
+ section in the BitBake User Manual.
+
+ - New ``make stylecheck`` command to run `Vale <https://vale.sh>`__,
+ to perform text style checks and comply with text writing standards in
+ the industry.
+
+ - New ``make sphinx-lint`` command to run `sphinx-lint
+ <https://github.com/sphinx-contrib/sphinx-lint>`__. After customization,
+ this will allow us to enforce Sphinx syntax style choices.
+
+- Miscellaneous changes:
+
+ - Systemd's following :term:`PACKAGECONFIG` options were added:
+ ``cryptsetup-plugins``, ``no-ntp-fallback``, and ``p11kit``.
+
+ - New PACKAGECONFIG options added to ``libarchive``, ``libinput``,
+ ``libunwind``, ``mesa``, ``mesa-gl``, ``openssh``, ``perf``,
+ ``python3-pyyaml``, ``qemu``, ``rpm``, ``shadow``, ``strace``,
+ ``syslinux``, ``systemd``, ``vte``, ``webkitgtk``, ``xserver-xorg``.
+
+ - ``systemd-boot`` can, from now on, be compiled as ``native``, thus
+ providing ``ukify`` tool to build UKI images.
+
+ - systemd: split bash completion for ``udevadm`` in a new
+ ``udev-bash-completion`` package.
+
+ - The :ref:`ref-classes-go-vendor` class was added to support offline builds
+ (i.e., vendoring). It can also handle modules from the same repository,
+ taking into account their versions.
+
+ - Disable strace support of bluetooth by default.
+
+ - ``openssh`` now has a systemd service: ``sshd.service``.
+
+ - The :ref:`ref-classes-python_mesonpy` class was added (moved in from
+ ``meta-python``) to support Python package builds using the meson-python
+ PEP-517 build backend.
+
+ - Support for unpacking ``.7z`` archives in :term:`SRC_URI` using ``p7zip``.
+
+ - Add minimal VS Code configuration to avoid VS Code's indexer from choking
+ on build directories.
+
+
+Known Issues in 5.0
+~~~~~~~~~~~~~~~~~~~
+
+- ``gpgme`` has had Python binding support disabled since upstream does not yet support Python 3.12.
+
+
+Recipe License changes in 5.0
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following corrections have been made to the :term:`LICENSE` values set by recipes:
+
+- ``elfutils``: split license for libraries & backend and utilities.
+- ``ghostscript``: correct :term:`LICENSE` to ``AGPL-3.0-or-later``.
+- ``libsystemd``: set its own :term:`LICENSE` value (``LGPL-2.1-or-later``) to add more granularity.
+- ``libtest-warnings-perl``: update :term:`LICENSE` ``Artistic-1.0`` to ``Artistic-1.0-Perl``.
+- ``linux-firmware``: set package :term:`LICENSE` appropriately for ``carl9170``, ``rockchip`` and ``powerpr``.
+- ``newlib``: add license ``Apache-2.0-with-LLVM-exception``.
+- ``python3-poetry-core``: add license ``BSD-3-Clause`` for ``fastjsonschema``.
+- ``systemd``: make the scope of ``LGPL`` more accurate (``LGPL-2.1`` -> ``LGPL-2.1-or-later``).
+- ``util-linux``: add ``GPL-1.0-or-later`` license for fdisk and ``MIT`` license for ``flock``.
+- ``zstd``: set to dual-licensed ``BSD-3-Clause`` or ``GPL-2.0-only``.
+
+Security Fixes in 5.0
+~~~~~~~~~~~~~~~~~~~~~
+
+- avahi: :cve:`2023-1981`, :cve:`2023-38469`, :cve:`2023-38470`, :cve:`2023-38471`, :cve:`2023-38469`, :cve:`2023-38470`, :cve:`2023-38471`, :cve:`2023-38472`, :cve:`2023-38473`
+- bind: :cve:`2023-4408`, :cve:`2023-5517`, :cve:`2023-5679`, :cve:`2023-50387`
+- bluez5: :cve:`2023-45866`
+- coreutils: :cve:`2024-0684`
+- cups: :cve:`2023-4504`
+- curl: :cve:`2023-46218`
+- expat: :cve:`2024-28757`
+- gcc: :cve:`2023-4039`
+- glibc: :cve:`2023-5156`, :cve:`2023-0687`
+- gnutls: :cve:`2024-0553`, :cve:`2024-0567`, :cve:`2024-28834`, :cve:`2024-28835`
+- go: :cve:`2023-45288`
+- grub: :cve:`2023-4692`, :cve:`2023-4693`
+- grub2: :cve:`2023-4001` (ignored), :cve:`2024-1048` (ignored)
+- libgit2: :cve:`2024-24575`, :cve:`2024-24577`
+- libsndfile1: :cve:`2022-33065`
+- libssh2: :cve:`2023-48795`
+- libuv: :cve:`2024-24806`
+- libxml2: :cve:`2023-45322` (ignored)
+- linux-yocto/6.6: :cve:`2020-16119`
+- openssh: :cve:`2023-48795`, :cve:`2023-51384`, :cve:`2023-51385`
+- openssl: :cve:`2023-5363`, :cve:`2023-5678`, :cve:`2023-6129`, :cve_mitre:`2023-6237`, :cve:`2024-0727`
+- perl: :cve:`2023-47100`
+- pixman: :cve:`2023-37769` (ignored)
+- python3-cryptography{-vectors}: :cve:`2023-49083`, :cve:`2024-26130`
+- python3-urllib3: :cve:`2023-45803`
+- shadow: :cve:`2023-4641`
+- sudo: :cve:`2023-42456`
+- tiff: :cve:`2023-6228`, :cve:`2023-6277`, :cve:`2023-52355`, :cve:`2023-52356`
+- vim: :cve:`2023-46246`, :cve:`2023-48231`, :cve:`2023-48232`, :cve:`2023-48233`, :cve:`2023-48234`, :cve:`2023-48235`, :cve:`2023-48236`, :cve:`2023-48237`, :cve:`2024-22667`
+- wpa-supplicant: :cve:`2023-52160`
+- xserver-xorg: :cve:`2023-5574`, :cve:`2023-6816`, :cve:`2024-0229`, :cve:`2024-0408`, :cve:`2024-0409`, :cve:`2024-21885`, :cve:`2024-21886`
+- xwayland: :cve:`2023-5367`, :cve:`2024-0408`, :cve:`2024-0409`, :cve:`2023-6816`, :cve:`2024-0229`, :cve:`2024-21885`, :cve:`2024-21886`
+- zlib: :cve:`2023-45853` (ignored), :cve:`2023-6992` (ignored)
+
+
+Recipe Upgrades in 5.0
+~~~~~~~~~~~~~~~~~~~~~~
+
+- acl 2.3.1 -> 2.3.2
+- acpica 20230628 -> 20240322
+- alsa-lib 1.2.10 -> 1.2.11
+- alsa-tools 1.2.5 -> 1.2.11
+- alsa-ucm-conf 1.2.10 -> 1.2.11
+- alsa-utils 1.2.10 -> 1.2.11
+- appstream 0.16.3 -> 1.0.2
+- autoconf 2.72c -> 2.72e
+- bash 5.2.15 -> 5.2.21
+- bash-completion 2.11 -> 2.12.0
+- binutils 2.41 -> 2.42
+- bluez5 5.69 -> 5.72
+- boost 1.83.0 -> 1.84.0
+- boost-build-native 1.83.0 -> 1.84.0
+- btrfs-tools 6.5.1 -> 6.7.1
+- cairo 1.16.0 -> 1.18.0
+- cargo 1.70.0 -> 1.75.0
+- cargo-c-native 0.9.18 -> 0.9.30+cargo-0.77.0
+- ccache 4.8.3 -> 4.9.1
+- cmake 3.27.7 -> 3.28.3
+- cmake-native 3.27.7 -> 3.28.3
+- createrepo-c 1.0.0 -> 1.0.4
+- cronie 1.6.1 -> 1.7.1
+- cross-localedef-native 2.38+git -> 2.39+git
+- cups 2.4.6 -> 2.4.7
+- curl 8.4.0 -> 8.7.1
+- dbus-wait 0.1+git (6cc6077a36fe…) -> 0.1+git (64bc7c8fae61…)
+- debianutils 5.13 -> 5.16
+- desktop-file-utils 0.26 -> 0.27
+- dhcpcd 10.0.2 -> 10.0.6
+- diffoscope 249 -> 259
+- diffstat 1.65 -> 1.66
+- dnf 4.17.0 -> 4.19.0
+- dos2unix 7.5.1 -> 7.5.2
+- ed 1.19 -> 1.20.1
+- efivar 38+39+git -> 39+39+git
+- elfutils 0.189 -> 0.191
+- ell 0.60 -> 0.63
+- enchant2 2.6.2 -> 2.6.7
+- epiphany 44.6 -> 46.0
+- erofs-utils 1.6 -> 1.7.1
+- ethtool 6.5 -> 6.7
+- eudev 3.2.12 -> 3.2.14
+- expat 2.5.0 -> 2.6.2
+- ffmpeg 6.0 -> 6.1.1
+- fontconfig 2.14.2 -> 2.15.0
+- gawk 5.2.2 -> 5.3.0
+- gcr 4.1.0 -> 4.2.0
+- gdb 13.2 -> 14.2
+- gettext 0.22 -> 0.22.5
+- gettext-minimal-native 0.22 -> 0.22.5
+- gi-docgen 2023.1 -> 2023.3
+- git 2.42.0 -> 2.44.0
+- glib-2.0 2.78.3 -> 2.78.4
+- glib-networking 2.76.1 -> 2.78.1
+- glibc 2.38+git -> 2.39+git
+- glibc-locale 2.38 -> 2.39+git
+- glibc-mtrace 2.38 -> 2.39+git
+- glibc-scripts 2.38 -> 2.39+git
+- glibc-testsuite 2.38+git -> 2.39+git
+- glibc-y2038-tests 2.38+git -> 2.39+git
+- glslang 1.3.261.1 -> 1.3.275.0
+- gnu-config 20230216+git -> 20240101+git
+- gnupg 2.4.3 -> 2.4.4
+- gnutls 3.8.3 -> 3.8.4
+- go 1.20.12 -> 1.22.2
+- go-binary-native 1.20.12 -> 1.22.2
+- go-native 1.20.12 -> 1.22.2
+- go-runtime 1.20.12 -> 1.22.2
+- gpgme 1.22.0 -> 1.23.2
+- grub 2.06 -> 2.12
+- grub-efi 2.06 -> 2.12
+- gsettings-desktop-schemas 44.0 -> 46.0
+- gst-devtools 1.22.9 -> 1.22.11
+- gstreamer1.0 1.22.9 -> 1.22.11
+- gstreamer1.0-libav 1.22.9 -> 1.22.11
+- gstreamer1.0-omx 1.22.9 -> 1.22.11
+- gstreamer1.0-plugins-bad 1.22.9 -> 1.22.11
+- gstreamer1.0-plugins-base 1.22.9 -> 1.22.11
+- gstreamer1.0-plugins-good 1.22.9 -> 1.22.11
+- gstreamer1.0-plugins-ugly 1.22.9 -> 1.22.11
+- gstreamer1.0-python 1.22.9 -> 1.22.11
+- gstreamer1.0-rtsp-server 1.22.9 -> 1.22.11
+- gstreamer1.0-vaapi 1.22.9 -> 1.22.11
+- gtk+3 3.24.38 -> 3.24.41
+- gtk4 4.12.3 -> 4.14.1
+- harfbuzz 8.2.2 -> 8.3.0
+- hwlatdetect 2.5 -> 2.6
+- icu 73-2 -> 74-1
+- inetutils 2.4 -> 2.5
+- init-system-helpers 1.65.2 -> 1.66
+- iproute2 6.5.0 -> 6.7.0
+- iptables 1.8.9 -> 1.8.10
+- iputils 20221126 -> 20240117
+- iso-codes 4.15.0 -> 4.16.0
+- iw 5.19 -> 6.7
+- json-glib 1.6.6 -> 1.8.0
+- kbd 2.6.3 -> 2.6.4
+- kexec-tools 2.0.27 -> 2.0.28
+- kmod 30 -> 31
+- kmscube git -> 0.0.1+git
+- libadwaita 1.4.2 -> 1.5.0
+- libbsd 0.11.7 -> 0.12.1
+- libcap-ng 0.8.3 -> 0.8.4
+- libcap-ng-python 0.8.3 -> 0.8.4
+- libcomps 0.1.19 -> 0.1.20
+- libdnf 0.71.0 -> 0.73.0
+- libdrm 2.4.116 -> 2.4.120
+- libffi 3.4.4 -> 3.4.6
+- libgit2 1.7.1 -> 1.7.2
+- libgloss 4.3.0+git -> 4.4.0+git
+- libgpg-error 1.47 -> 1.48
+- libhandy 1.8.2 -> 1.8.3
+- libical 3.0.16 -> 3.0.17
+- libidn2 2.3.4 -> 2.3.7
+- libinput 1.24.0 -> 1.25.0
+- libksba 1.6.4 -> 1.6.6
+- libmicrohttpd 0.9.77 -> 1.0.1
+- libnl 3.8.0 -> 3.9.0
+- libnotify 0.8.2 -> 0.8.3
+- libpciaccess 0.17 -> 0.18
+- libpcre2 10.42 -> 10.43
+- libpng 1.6.40 -> 1.6.42
+- libproxy 0.5.3 -> 0.5.4
+- libpsl 0.21.2 -> 0.21.5
+- librepo 1.16.0 -> 1.17.0
+- librsvg 2.56.3 -> 2.57.1
+- libsdl2 2.28.4 -> 2.30.0
+- libseccomp 2.5.4 -> 2.5.5
+- libsecret 0.21.1 -> 0.21.4
+- libsolv 0.7.26 -> 0.7.28
+- libsoup 3.4.2 -> 3.4.4
+- libstd-rs 1.70.0 -> 1.75.0
+- libtest-warnings-perl 0.031 -> 0.033
+- libtirpc 1.3.3 -> 1.3.4
+- libubootenv 0.3.4 -> 0.3.5
+- libunistring 1.1 -> 1.2
+- liburi-perl 5.21 -> 5.27
+- libusb1 1.0.26 -> 1.0.27
+- libuv 1.46.0 -> 1.48.0
+- libva 2.19.0 -> 2.20.0
+- libva-initial 2.19.0 -> 2.20.0
+- libwpe 1.14.1 -> 1.14.2
+- libxext 1.3.5 -> 1.3.6
+- libxkbcommon 1.5.0 -> 1.6.0
+- libxkbfile 1.1.2 -> 1.1.3
+- libxml-parser-perl 2.46 -> 2.47
+- libxml2 2.11.7 -> 2.12.5
+- libxmlb 0.3.14 -> 0.3.15
+- libxrandr 1.5.3 -> 1.5.4
+- libxvmc 1.0.13 -> 1.0.14
+- lighttpd 1.4.71 -> 1.4.74
+- linux-firmware 20240220 -> 20240312
+- linux-libc-headers 6.5 -> 6.6
+- linux-yocto 6.1.78+git, 6.5.13+git -> 6.6.23+git
+- linux-yocto-dev 6.6+git -> 6.9+git
+- linux-yocto-rt 6.1.78+git, 6.5.13+git -> 6.6.23+git
+- linux-yocto-tiny 6.1.78+git, 6.5.13+git -> 6.6.23+git
+- llvm 17.0.3 -> 18.1.2
+- lsof 4.98.0 -> 4.99.3
+- ltp 20230516 -> 20240129
+- lttng-modules 2.13.10 -> 2.13.12
+- lttng-ust 2.13.6 -> 2.13.7
+- lzip 1.23 -> 1.24
+- makedepend 1.0.8 -> 1.0.9
+- man-db 2.11.2 -> 2.12.0
+- man-pages 6.05.01 -> 6.06
+- mc 4.8.30 -> 4.8.31
+- mesa 23.2.1 -> 24.0.2
+- mesa-gl 23.2.1 -> 24.0.2
+- meson 1.2.2 -> 1.3.1
+- minicom 2.8 -> 2.9
+- mmc-utils 0.1+git (613495ecaca9…) -> 0.1+git (b5ca140312d2…)
+- mpg123 1.31.3 -> 1.32.5
+- newlib 4.3.0+git -> 4.4.0+git
+- nghttp2 1.57.0 -> 1.61.0
+- numactl 2.0.16 -> 2.0.18
+- ofono 2.1 -> 2.4
+- opensbi 1.2 -> 1.4
+- openssh 9.5p1 -> 9.6p1
+- openssl 3.1.5 -> 3.2.1
+- opkg 0.6.2 -> 0.6.3
+- opkg-utils 0.6.2 -> 0.6.3
+- orc 0.4.34 -> 0.4.38
+- ovmf edk2-stable202308 -> edk2-stable202402
+- p11-kit 0.25.0 -> 0.25.3
+- pango 1.51.0 -> 1.52.0
+- pciutils 3.10.0 -> 3.11.1
+- piglit 1.0+gitr (71c21b1157c4…) -> 1.0+gitr (22eaf6a91cfd…)
+- pkgconf 2.0.3 -> 2.1.1
+- psplash 0.1+git (44afb7506d43…) -> 0.1+git (ecc191375669…)
+- ptest-runner 2.4.2+git -> 2.4.3+git
+- pulseaudio 16.1 -> 17.0
+- puzzles 0.0+git (2d9e414ee316…) -> 0.0+git (80aac3104096…)
+- python3 3.11.5 -> 3.12.2
+- python3-alabaster 0.7.13 -> 0.7.16
+- python3-attrs 23.1.0 -> 23.2.0
+- python3-babel 2.12.1 -> 2.14.0
+- python3-bcrypt 4.0.1 -> 4.1.2
+- python3-beartype 0.15.0 -> 0.17.2
+- python3-build 1.0.3 -> 1.1.1
+- python3-certifi 2023.7.22 -> 2024.2.2
+- python3-cffi 1.15.1 -> 1.16.0
+- python3-cryptography 41.0.4 -> 42.0.5
+- python3-cryptography-vectors 41.0.4 -> 42.0.5
+- python3-cython 0.29.36 -> 3.0.8
+- python3-dbusmock 0.29.1 -> 0.31.1
+- python3-dtschema 2023.7 -> 2024.2
+- python3-git 3.1.36 -> 3.1.42
+- python3-gitdb 4.0.10 -> 4.0.11
+- python3-hatch-fancy-pypi-readme 23.1.0 -> 24.1.0
+- python3-hatch-vcs 0.3.0 -> 0.4.0
+- python3-hatchling 1.18.0 -> 1.21.1
+- python3-hypothesis 6.86.2 -> 6.98.15
+- python3-idna 3.4 -> 3.6
+- python3-importlib-metadata 6.8.0 -> 7.0.1
+- python3-iso8601 2.0.0 -> 2.1.0
+- python3-jsonschema 4.17.3 -> 4.21.1
+- python3-license-expression 30.1.1 -> 30.2.0
+- python3-lxml 4.9.3 -> 5.0.0
+- python3-mako 1.2.4 -> 1.3.2
+- python3-markdown 3.4.4 -> 3.5.2
+- python3-markupsafe 2.1.3 -> 2.1.5
+- python3-more-itertools 10.1.0 -> 10.2.0
+- python3-numpy 1.26.0 -> 1.26.4
+- python3-packaging 23.1 -> 23.2
+- python3-pathspec 0.11.2 -> 0.12.1
+- python3-pbr 5.11.1 -> 6.0.0
+- python3-pip 23.2.1 -> 24.0
+- python3-pluggy 1.3.0 -> 1.4.0
+- python3-poetry-core 1.7.0 -> 1.9.0
+- python3-psutil 5.9.5 -> 5.9.8
+- python3-pyasn1 0.5.0 -> 0.5.1
+- python3-pycairo 1.24.0 -> 1.26.0
+- python3-pycryptodome 3.19.0 -> 3.20.0
+- python3-pycryptodomex 3.19.0 -> 3.20.0
+- python3-pygments 2.16.1 -> 2.17.2
+- python3-pyopenssl 23.2.0 -> 24.0.0
+- python3-pyrsistent 0.19.3 -> 0.20.0
+- python3-pytest 7.4.2 -> 8.0.2
+- python3-pytest-runner 6.0.0 -> 6.0.1
+- python3-pytz 2023.3 -> 2024.1
+- python3-ruamel-yaml 0.17.32 -> 0.18.6
+- python3-scons 4.5.2 -> 4.6.0
+- python3-setuptools 68.2.2 -> 69.1.1
+- python3-setuptools-rust 1.7.0 -> 1.9.0
+- python3-setuptools-scm 7.1.0 -> 8.0.4
+- python3-spdx-tools 0.8.1 -> 0.8.2
+- python3-sphinx-rtd-theme 1.3.0 -> 2.0.0
+- python3-sphinxcontrib-applehelp 1.0.4 -> 1.0.8
+- python3-sphinxcontrib-devhelp 1.0.2 -> 1.0.6
+- python3-sphinxcontrib-htmlhelp 2.0.1 -> 2.0.5
+- python3-sphinxcontrib-qthelp 1.0.3 -> 1.0.7
+- python3-sphinxcontrib-serializinghtml 1.1.5 -> 1.1.10
+- python3-subunit 1.4.2 -> 1.4.4
+- python3-testtools 2.6.0 -> 2.7.1
+- python3-trove-classifiers 2023.9.19 -> 2024.2.23
+- python3-typing-extensions 4.8.0 -> 4.10.0
+- python3-unittest-automake-output 0.1 -> 0.2
+- python3-urllib3 2.0.7 -> 2.2.1
+- python3-wcwidth 0.2.6 -> 0.2.13
+- python3-wheel 0.41.2 -> 0.42.0
+- qemu 8.1.4 -> 8.2.1
+- qemu-native 8.1.4 -> 8.2.1
+- qemu-system-native 8.1.4 -> 8.2.1
+- repo 2.36.1 -> 2.42
+- resolvconf 1.91 -> 1.92
+- rpm 4.18.1 -> 4.19.1
+- rt-tests 2.5 -> 2.6
+- rust 1.70.0 -> 1.75.0
+- rust-cross-canadian 1.70.0 -> 1.75.0
+- rust-llvm 1.70.0 -> 1.75.0
+- shaderc 2023.6 -> 2023.8
+- shadow 4.13 -> 4.14.2
+- shared-mime-info 2.2 -> 2.4
+- socat 1.7.4.4 -> 1.8.0.0
+- spirv-headers 1.3.261.1 -> 1.3.275.0
+- spirv-tools 1.3.261.1 -> 1.3.275.0
+- sqlite3 3.43.2 -> 3.45.1
+- strace 6.5 -> 6.7
+- stress-ng 0.16.05 -> 0.17.05
+- subversion 1.14.2 -> 1.14.3
+- swig 4.1.1 -> 4.2.1
+- sysstat 12.7.4 -> 12.7.5
+- systemd 254.4 -> 255.4
+- systemd-boot 254.4 -> 255.4
+- systemd-bootchart 234 -> 235
+- systemtap 4.9 -> 5.0
+- systemtap-native 4.9 -> 5.0
+- taglib 1.13.1 -> 2.0
+- ttyrun 2.29.0 -> 2.31.0
+- u-boot 2023.10 -> 2024.01
+- u-boot-tools 2023.10 -> 2024.01
+- update-rc.d 0.8 (8636cf478d42…) -> 0.8 (b8f950105010…)
+- usbutils 015 -> 017
+- util-linux 2.39.2 -> 2.39.3
+- util-linux-libuuid 2.39.2 -> 2.39.3
+- vala 0.56.13 -> 0.56.15
+- valgrind 3.21.0 -> 3.22.0
+- vim 9.0.2190 -> 9.1.0114
+- vim-tiny 9.0.2190 -> 9.1.0114
+- virglrenderer 0.10.4 -> 1.0.1
+- vte 0.72.2 -> 0.74.2
+- vulkan-headers 1.3.261.1 -> 1.3.275.0
+- vulkan-loader 1.3.261.1 -> 1.3.275.0
+- vulkan-tools 1.3.261.1 -> 1.3.275.0
+- vulkan-validation-layers 1.3.261.1 -> 1.3.275.0
+- wayland-protocols 1.32 -> 1.33
+- webkitgtk 2.40.5 -> 2.44.0
+- weston 12.0.2 -> 13.0.0
+- xkbcomp 1.4.6 -> 1.4.7
+- xkeyboard-config 2.39 -> 2.41
+- xprop 1.2.6 -> 1.2.7
+- xwayland 23.2.4 -> 23.2.5
+- xz 5.4.4 -> 5.4.6
+- zlib 1.3 -> 1.3.1
+
+
+Contributors to 5.0
+~~~~~~~~~~~~~~~~~~~
+
+Thanks to the following people who contributed to this release:
+
+- Adam Johnston
+- Adithya Balakumar
+- Adrian Freihofer
+- Alassane Yattara
+- Alejandro Hernandez Samaniego
+- Aleksey Smirnov
+- Alexander Kanavin
+- Alexander Lussier-Cullen
+- Alexander Sverdlin
+- Alexandre Belloni
+- Alexandre Truong
+- Alex Bennée
+- Alexis Lothoré
+- Alex Kiernan
+- Alex Stewart
+- André Draszik
+- Anibal Limon
+- Anuj Mittal
+- Archana Polampalli
+- Arne Schwerdt
+- Bartosz Golaszewski
+- Baruch Siach
+- Bastian Krause
+- BELHADJ SALEM Talel
+- BELOUARGA Mohamed
+- Bruce Ashfield
+- Changhyeok Bae
+- Changqing Li
+- Charlie Johnston
+- Chen Qi
+- Chi Xu
+- Chris Laplante
+- Christian Taedcke
+- Christoph Vogtländer
+- Claus Stovgaard
+- Clay Chang
+- Clément Péron
+- Colin McAllister
+- Corentin Guillevic
+- Daniel Ammann
+- david d zuhn
+- David Reyna
+- Deepthi Hemraj
+- Denys Dmytriyenko
+- Derek Erdmann
+- Desone Burns
+- Dhairya Nagodra
+- Dmitry Baryshkov
+- Eero Aaltonen
+- Eilís 'pidge' Ní Fhlannagáin
+- Emil Kronborg
+- Enguerrand de Ribaucourt
+- Enrico Jörns
+- Enrico Scholz
+- Etienne Cordonnier
+- Fabien Mahot
+- Fabio Estevam
+- Fahad Arslan
+- Felix Moessbauer
+- Florian Wickert
+- Geoff Parker
+- Glenn Strauss
+- Harish Sadineni
+- Hongxu Jia
+- Ilya A. Kriveshko
+- Jamin Lin
+- Jan Vermaete
+- Jason Andryuk
+- Javier Tia
+- Jeremy A. Puhlman
+- Jérémy Rosen
+- Jermain Horsman
+- Jiang Kai
+- Joakim Tjernlund
+- Joao Marcos Costa
+- Joe Slater
+- Johan Bezem
+- Johannes Schneider
+- Jonathan GUILLOT
+- Jon Mason
+- Jörg Sommer
+- Jose Quaresma
+- Joshua Watt
+- Julien Stephan
+- Justin Bronder
+- Kai Kang
+- Kareem Zarka
+- Kevin Hao
+- Khem Raj
+- Konrad Weihmann
+- Lee Chee Yang
+- Lei Maohui
+- lixiaoyong
+- Logan Gunthorpe
+- Luca Ceresoli
+- luca fancellu
+- Lucas Stach
+- Ludovic Jozeau
+- Lukas Funke
+- Maanya Goenka
+- Malte Schmidt
+- Marcel Ziswiler
+- Marco Felsch
+- Marcus Folkesson
+- Marek Vasut
+- Mark Asselstine
+- Mark Hatle
+- Markus Fuchs
+- Markus Volk
+- Marlon Rodriguez Garcia
+- Marta Rybczynska
+- Martin Hundebøll
+- Martin Jansa
+- Massimiliano Minella
+- Maxin B. John
+- Max Krummenacher
+- Meenali Gupta
+- Michael Halstead
+- Michael Opdenacker
+- Michal Sieron
+- Mikko Rapeli
+- Ming Liu
+- Mingli Yu
+- Munehisa Kamata
+- Nick Owens
+- Niko Mauno
+- Ola x Nilsson
+- Oleh Matiusha
+- Patrick Williams
+- Paul Barker
+- Paul Eggleton
+- Paul Gortmaker
+- Pavel Zhukov
+- Peter A. Bigot
+- Peter Kjellerstedt
+- Peter Marko
+- Petr Vorel
+- Philip Balister
+- Philip Lorenz
+- Philippe Rivest
+- Piotr Łobacz
+- Priyal Doshi
+- Quentin Schulz
+- Ragesh Nair
+- Randolph Sapp
+- Randy MacLeod
+- Rasmus Villemoes
+- Renat Khalikov
+- Richard Haar
+- Richard Purdie
+- Robert Berger
+- Robert Joslyn
+- Robert P. J. Day
+- Robert Yang
+- Rodrigo M. Duarte
+- Ross Burton
+- Rouven Czerwinski
+- Ryan Eatmon
+- Sam Van Den Berge
+- Saul Wold
+- Sava Jakovljev
+- Sean Nyekjaer
+- Sergei Zhmylev
+- Shinji Matsunaga
+- Shubham Kulkarni
+- Simone Weiß
+- Siong W.LIM
+- Soumya Sambu
+- Sourav Kumar Pramanik
+- Stefan Herbrechtsmeier
+- Stéphane Veyret
+- Steve Sakoman
+- Sundeep KOKKONDA
+- Thomas Perrot
+- Thomas Wolber
+- Timon Bergelt
+- Tim Orling
+- Timotheus Giuliani
+- Tobias Hagelborn
+- Tom Hochstein
+- Tom Rini
+- Toni Lammi
+- Trevor Gamblin
+- Trevor Woerner
+- Ulrich Ölmann
+- Valek Andrej
+- venkata pyla
+- Victor Kamensky
+- Vijay Anusuri
+- Vikas Katariya
+- Vincent Davis Jr
+- Viswanath Kraleti
+- Vyacheslav Yurkov
+- Wang Mingyu
+- William A. Kennington III
+- William Hauser
+- William Lyu
+- Xiangyu Chen
+- Xiaotian Wu
+- Yang Xu
+- Yannick Rodriguez
+- Yash Shinde
+- Yi Zhao
+- Yoann Congal
+- Yogesh Tyagi
+- Yogita Urade
+- Zahir Hussain
+- Zang Ruochen
+- Zoltan Boszormenyi
+
+Repositories / Downloads for Yocto-5.0
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
diff --git a/documentation/overview-manual/concepts.rst b/documentation/overview-manual/concepts.rst
index 016577e07e..62f2327a7e 100644
--- a/documentation/overview-manual/concepts.rst
+++ b/documentation/overview-manual/concepts.rst
@@ -34,10 +34,10 @@ itself is of various types:
BitBake knows how to combine multiple data sources together and refers
to each data source as a layer. For information on layers, see the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
+":ref:`dev-manual/layers:understanding and creating layers`"
section of the Yocto Project Development Tasks Manual.
-Following are some brief details on these core components. For
+Here are some brief details on these core components. For
additional information on how these components interact during a build,
see the
":ref:`overview-manual/concepts:openembedded build system concepts`"
@@ -69,12 +69,10 @@ type the following::
$ bitbake matchbox-desktop
-Several different
-versions of ``matchbox-desktop`` might exist. BitBake chooses the one
-selected by the distribution configuration. You can get more details
-about how BitBake chooses between different target versions and
-providers in the
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:preferences`" section
+Several different versions of ``matchbox-desktop`` might exist. BitBake chooses
+the one selected by the distribution configuration. You can get more details
+about how BitBake chooses between different target versions and providers in the
+":ref:`bitbake-user-manual/bitbake-user-manual-execution:preferences`" section
of the BitBake User Manual.
BitBake also tries to execute any dependent tasks first. So for example,
@@ -107,10 +105,9 @@ Classes
-------
Class files (``.bbclass``) contain information that is useful to share
-between recipes files. An example is the
-:ref:`autotools <ref-classes-autotools>` class,
+between recipes files. An example is the :ref:`ref-classes-autotools` class,
which contains common settings for any application that is built with
-the `GNU Autotools <https://en.wikipedia.org/wiki/GNU_Autotools>`__.
+the :wikipedia:`GNU Autotools <GNU_Autotools>`.
The ":ref:`ref-manual/classes:Classes`" chapter in the Yocto Project
Reference Manual provides details about classes and how to use them.
@@ -149,7 +146,7 @@ Conforming to a known structure allows BitBake to make assumptions
during builds on where to find types of metadata. You can find
procedures and learn about tools (i.e. ``bitbake-layers``) for creating
layers suitable for the Yocto Project in the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
+":ref:`dev-manual/layers:understanding and creating layers`"
section of the Yocto Project Development Tasks Manual.
OpenEmbedded Build System Concepts
@@ -233,13 +230,12 @@ for creating actual configuration files when you source
:ref:`structure-core-script`, which is the
build environment script.
-Sourcing the build environment script creates a
-:term:`Build Directory` if one does not
-already exist. BitBake uses the Build Directory for all its work during
-builds. The Build Directory has a ``conf`` directory that contains
-default versions of your ``local.conf`` and ``bblayers.conf``
+Sourcing the build environment script creates a :term:`Build Directory`
+if one does not already exist. BitBake uses the :term:`Build Directory`
+for all its work during builds. The Build Directory has a ``conf`` directory
+that contains default versions of your ``local.conf`` and ``bblayers.conf``
configuration files. These default configuration files are created only
-if versions do not already exist in the Build Directory at the time you
+if versions do not already exist in the :term:`Build Directory` at the time you
source the build environment setup script.
Because the Poky repository is fundamentally an aggregation of existing
@@ -251,9 +247,9 @@ assumes the script is executed from within a cloned or unpacked version
of Poky.
Depending on where the script is sourced, different sub-scripts are
-called to set up the Build Directory (Yocto or OpenEmbedded).
+called to set up the :term:`Build Directory` (Yocto or OpenEmbedded).
Specifically, the script ``scripts/oe-setup-builddir`` inside the poky
-directory sets up the Build Directory and seeds the directory (if
+directory sets up the :term:`Build Directory` and seeds the directory (if
necessary) with configuration files appropriate for the Yocto Project
development environment.
@@ -269,7 +265,7 @@ The ``local.conf`` file provides many basic variables that define a
build environment. Here is a list of a few. To see the default
configurations in a ``local.conf`` file created by the build environment
script, see the
-:yocto_git:`local.conf.sample </poky/tree/meta-poky/conf/local.conf.sample>`
+:yocto_git:`local.conf.sample </poky/tree/meta-poky/conf/templates/default/local.conf.sample>`
in the ``meta-poky`` layer:
- *Target Machine Selection:* Controlled by the
@@ -308,7 +304,7 @@ during the build. By default, the layers listed in this file include
layers minimally needed by the build system. However, you must manually
add any custom layers you have created. You can find more information on
working with the ``bblayers.conf`` file in the
-":ref:`dev-manual/common-tasks:enabling your layer`"
+":ref:`dev-manual/layers:enabling your layer`"
section in the Yocto Project Development Tasks Manual.
The files ``site.conf`` and ``auto.conf`` are not created by the
@@ -399,7 +395,7 @@ a ``README`` file as good practice and especially if the layer is to be
distributed, a configuration directory, and recipe directories. You can
learn about the general structure for layers used with the Yocto Project
in the
-":ref:`dev-manual/common-tasks:creating your own layer`"
+":ref:`dev-manual/layers:creating your own layer`"
section in the
Yocto Project Development Tasks Manual. For a general discussion on
layers and the many layers from which you can draw, see the
@@ -428,7 +424,7 @@ The distribution layer provides policy configurations for your
distribution. Best practices dictate that you isolate these types of
configurations into their own layer. Settings you provide in
``conf/distro/distro.conf`` override similar settings that BitBake finds
-in your ``conf/local.conf`` file in the Build Directory.
+in your ``conf/local.conf`` file in the :term:`Build Directory`.
The following list provides some explanation and references for what you
typically find in the distribution layer:
@@ -531,10 +527,11 @@ repositories, which is not the default behavior, and store them in the
variable.
Judicious use of a :term:`DL_DIR` directory can save the build system a trip
-across the Internet when looking for files. A good method for using a
-download directory is to have :term:`DL_DIR` point to an area outside of
-your Build Directory. Doing so allows you to safely delete the Build
-Directory if needed without fear of removing any downloaded source file.
+across the Internet when looking for files. A good method for using a download
+directory is to have :term:`DL_DIR` point to an area outside of your
+:term:`Build Directory`. Doing so allows you to safely delete the
+:term:`Build Directory` if needed without fear of removing any downloaded
+source file.
The remainder of this section provides a deeper look into the source
files and the mirrors. Here is a more detailed look at the source file
@@ -557,33 +554,30 @@ Local Projects
~~~~~~~~~~~~~~
Local projects are custom bits of software the user provides. These bits
-reside somewhere local to a project - perhaps a directory into which the
+reside somewhere local to a project --- perhaps a directory into which the
user checks in items (e.g. a local directory containing a development
source tree used by the group).
-The canonical method through which to include a local project is to use
-the :ref:`externalsrc <ref-classes-externalsrc>`
-class to include that local project. You use either the ``local.conf``
-or a recipe's append file to override or set the recipe to point to the
-local directory on your disk to pull in the whole source tree.
+The canonical method through which to include a local project is to use the
+:ref:`ref-classes-externalsrc` class to include that local project. You use
+either the ``local.conf`` or a recipe's append file to override or set the
+recipe to point to the local directory on your disk to pull in the whole
+source tree.
Source Control Managers (Optional)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Another place from which the build system can get source files is with
-:ref:`bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers` employing various Source
-Control Managers (SCMs) such as Git or Subversion. In such cases, a
-repository is cloned or checked out. The
-:ref:`ref-tasks-fetch` task inside
-BitBake uses the :term:`SRC_URI`
-variable and the argument's prefix to determine the correct fetcher
-module.
+:ref:`bitbake-user-manual/bitbake-user-manual-fetching:fetchers` employing
+various Source Control Managers (SCMs) such as Git or Subversion. In such
+cases, a repository is cloned or checked out. The :ref:`ref-tasks-fetch` task
+inside BitBake uses the :term:`SRC_URI` variable and the argument's prefix to
+determine the correct fetcher module.
.. note::
For information on how to have the OpenEmbedded build system generate
- tarballs for Git repositories and place them in the
- DL_DIR
+ tarballs for Git repositories and place them in the :term:`DL_DIR`
directory, see the :term:`BB_GENERATE_MIRROR_TARBALLS`
variable in the Yocto Project Reference Manual.
@@ -629,26 +623,23 @@ types, and you specify which classes to enable through the
:term:`PACKAGE_CLASSES`
variable. Before placing the packages into package feeds, the build
process validates them with generated output quality assurance checks
-through the :ref:`insane <ref-classes-insane>`
-class.
+through the :ref:`ref-classes-insane` class.
-The package feed area resides in the Build Directory. The directory the
+The package feed area resides in the :term:`Build Directory`. The directory the
build system uses to temporarily store packages is determined by a
combination of variables and the particular package manager in use. See
the "Package Feeds" box in the illustration and note the information to
the right of that area. In particular, the following defines where
package files are kept:
-- :term:`DEPLOY_DIR`: Defined as
- ``tmp/deploy`` in the Build Directory.
+- :term:`DEPLOY_DIR`: Defined as ``tmp/deploy`` in the :term:`Build Directory`.
- ``DEPLOY_DIR_*``: Depending on the package manager used, the package
type sub-folder. Given RPM, IPK, or DEB packaging and tarball
creation, the
:term:`DEPLOY_DIR_RPM`,
- :term:`DEPLOY_DIR_IPK`,
- :term:`DEPLOY_DIR_DEB`, or
- :term:`DEPLOY_DIR_TAR`,
+ :term:`DEPLOY_DIR_IPK`, or
+ :term:`DEPLOY_DIR_DEB`
variables are used, respectively.
- :term:`PACKAGE_ARCH`: Defines
@@ -661,9 +652,8 @@ tasks to generate packages and place them into the package holding area
(e.g. ``do_package_write_ipk`` for IPK packages). See the
":ref:`ref-tasks-package_write_deb`",
":ref:`ref-tasks-package_write_ipk`",
-":ref:`ref-tasks-package_write_rpm`",
and
-":ref:`ref-tasks-package_write_tar`"
+":ref:`ref-tasks-package_write_rpm`"
sections in the Yocto Project Reference Manual for additional
information. As an example, consider a scenario where an IPK packaging
manager is being used and there is package architecture support for both
@@ -684,7 +674,7 @@ closer look at each of those areas.
.. note::
Documentation for the BitBake tool is available separately. See the
- BitBake User Manual
+ :doc:`BitBake User Manual <bitbake:index>`
for reference material on BitBake.
Source Fetching
@@ -696,35 +686,28 @@ code:
.. image:: figures/source-fetching.png
:width: 100%
-The :ref:`ref-tasks-fetch` and
-:ref:`ref-tasks-unpack` tasks fetch
-the source files and unpack them into the
-:term:`Build Directory`.
+The :ref:`ref-tasks-fetch` and :ref:`ref-tasks-unpack` tasks fetch
+the source files and unpack them into the :term:`Build Directory`.
.. note::
- For every local file (e.g.
- file://
- ) that is part of a recipe's
- SRC_URI
- statement, the OpenEmbedded build system takes a checksum of the file
- for the recipe and inserts the checksum into the signature for the
- do_fetch
- task. If any local file has been modified, the
- do_fetch
- task and all tasks that depend on it are re-executed.
-
-By default, everything is accomplished in the Build Directory, which has
-a defined structure. For additional general information on the Build
-Directory, see the ":ref:`structure-core-build`" section in
+ For every local file (e.g. ``file://``) that is part of a recipe's
+ :term:`SRC_URI` statement, the OpenEmbedded build system takes a
+ checksum of the file for the recipe and inserts the checksum into
+ the signature for the :ref:`ref-tasks-fetch` task. If any local
+ file has been modified, the :ref:`ref-tasks-fetch` task and all
+ tasks that depend on it are re-executed.
+
+By default, everything is accomplished in the :term:`Build Directory`, which has
+a defined structure. For additional general information on the
+:term:`Build Directory`, see the ":ref:`structure-core-build`" section in
the Yocto Project Reference Manual.
-Each recipe has an area in the Build Directory where the unpacked source
-code resides. The :term:`S` variable points
-to this area for a recipe's unpacked source code. The name of that
-directory for any given recipe is defined from several different
-variables. The preceding figure and the following list describe the
-Build Directory's hierarchy:
+Each recipe has an area in the :term:`Build Directory` where the unpacked
+source code resides. The :term:`S` variable points to this area for a recipe's
+unpacked source code. The name of that directory for any given recipe is
+defined from several different variables. The preceding figure and the
+following list describe the :term:`Build Directory`'s hierarchy:
- :term:`TMPDIR`: The base directory
where the OpenEmbedded build system performs all its work during the
@@ -806,7 +789,7 @@ For more information on how the source directories are created, see the
":ref:`overview-manual/concepts:source fetching`" section. For
more information on how to create patches and how the build system
processes patches, see the
-":ref:`dev-manual/common-tasks:patching code`"
+":ref:`dev-manual/new-recipe:patching code`"
section in the
Yocto Project Development Tasks Manual. You can also see the
":ref:`sdk-manual/extensible:use \`\`devtool modify\`\` to modify the source of an existing component`"
@@ -849,15 +832,13 @@ This step in the build process consists of the following tasks:
are specific to configurations for the source code being built by the
recipe.
- If you are using the
- :ref:`autotools <ref-classes-autotools>` class,
+ If you are using the :ref:`ref-classes-autotools` class,
you can add additional configuration options by using the
:term:`EXTRA_OECONF` or
:term:`PACKAGECONFIG_CONFARGS`
variables. For information on how this variable works within that
- class, see the
- :ref:`autotools <ref-classes-autotools>` class
- :yocto_git:`here </poky/tree/meta/classes/autotools.bbclass>`.
+ class, see the :ref:`ref-classes-autotools` class
+ :yocto_git:`here </poky/tree/meta/classes-recipe/autotools.bbclass>`.
- *do_compile*: Once a configuration task has been satisfied,
BitBake compiles the source using the
@@ -892,7 +873,7 @@ following as well as other items: splitting out debugging symbols,
looking at shared library dependencies between packages, and looking at
package relationships.
-The ``do_packagedata`` task creates package metadata based on the
+The :ref:`ref-tasks-packagedata` task creates package metadata based on the
analysis such that the build system can generate the final packages. The
:ref:`ref-tasks-populate_sysroot`
task stages (copies) a subset of the files installed by the
@@ -905,7 +886,7 @@ the analysis and package splitting process use several areas:
individual packages.
- :term:`PKGDESTWORK`: A
- temporary work area (i.e. ``pkgdata``) used by the ``do_package``
+ temporary work area (i.e. ``pkgdata``) used by the :ref:`ref-tasks-package`
task to save package metadata.
- :term:`PKGDEST`: The parent
@@ -929,13 +910,13 @@ the analysis and package splitting process use several areas:
- :term:`STAGING_DIR_TARGET`:
The path for the sysroot used when a component that is built to
execute on a system and it generates code for yet another machine
- (e.g. cross-canadian recipes).
+ (e.g. :ref:`ref-classes-cross-canadian` recipes).
The :term:`FILES` variable defines the
files that go into each package in
:term:`PACKAGES`. If you want
details on how this is accomplished, you can look at
-:yocto_git:`package.bbclass </poky/tree/meta/classes/package.bbclass>`.
+:yocto_git:`package.bbclass </poky/tree/meta/classes-global/package.bbclass>`.
Depending on the type of packages being created (RPM, DEB, or IPK), the
:ref:`do_package_write_* <ref-tasks-package_write_deb>`
@@ -1006,22 +987,20 @@ data files are deleted from the root filesystem. As part of the final
stage of package installation, post installation scripts that are part
of the packages are run. Any scripts that fail to run on the build host
are run on the target when the target system is first booted. If you are
-using a
-:ref:`read-only root filesystem <dev-manual/common-tasks:creating a read-only root filesystem>`,
+using a
+:ref:`read-only root filesystem <dev-manual/read-only-rootfs:creating a read-only root filesystem>`,
all the post installation scripts must succeed on the build host during
the package installation phase since the root filesystem on the target
is read-only.
-The final stages of the ``do_rootfs`` task handle post processing. Post
+The final stages of the :ref:`ref-tasks-rootfs` task handle post processing. Post
processing includes creation of a manifest file and optimizations.
-The manifest file (``.manifest``) resides in the same directory as the
-root filesystem image. This file lists out, line-by-line, the installed
-packages. The manifest file is useful for the
-:ref:`testimage <ref-classes-testimage*>` class,
+The manifest file (``.manifest``) resides in the same directory as the root
+filesystem image. This file lists out, line-by-line, the installed packages.
+The manifest file is useful for the :ref:`ref-classes-testimage` class,
for example, to determine whether or not to run specific tests. See the
-:term:`IMAGE_MANIFEST`
-variable for additional information.
+:term:`IMAGE_MANIFEST` variable for additional information.
Optimizing processes that are run across the image include ``mklibs``
and any other post-processing commands as defined by the
@@ -1036,7 +1015,7 @@ the
variable. This variable specifies a list of functions to call before the
build system creates the final image output files.
-The build system dynamically creates ``do_image_*`` tasks as needed,
+The build system dynamically creates :ref:`do_image_* <ref-tasks-image>` tasks as needed,
based on the image types specified in the
:term:`IMAGE_FSTYPES` variable.
The process turns everything into an image file or a set of image files
@@ -1085,7 +1064,7 @@ the extensible SDK (eSDK):
For more information on the cross-development toolchain generation,
see the ":ref:`overview-manual/concepts:cross-development toolchain generation`"
section. For information on advantages gained when building a
- cross-development toolchain using the do_populate_sdk task, see the
+ cross-development toolchain using the :ref:`ref-tasks-populate_sdk` task, see the
":ref:`sdk-manual/appendix-obtain:building an sdk installer`" section in
the Yocto Project Application Development and the Extensible Software
Development Kit (eSDK) manual.
@@ -1100,13 +1079,13 @@ actually install. For information on the variables listed in the figure,
see the ":ref:`overview-manual/concepts:application development sdk`"
section.
-The ``do_populate_sdk`` task helps create the standard SDK and handles
+The :ref:`ref-tasks-populate_sdk` task helps create the standard SDK and handles
two parts: a target part and a host part. The target part is the part
built for the target hardware and includes libraries and headers. The
host part is the part of the SDK that runs on the
:term:`SDKMACHINE`.
-The ``do_populate_sdk_ext`` task helps create the extensible SDK and
+The :ref:`ref-tasks-populate_sdk_ext` task helps create the extensible SDK and
handles host and target parts differently than its counter part does for
the standard SDK. For the extensible SDK, the task encapsulates the
build system, which includes everything needed (host and target) for the
@@ -1130,8 +1109,7 @@ checksum <overview-manual/concepts:checksums (signatures)>`.
.. note::
- This naming scheme assumes that
- BB_SIGNATURE_HANDLER
+ This naming scheme assumes that :term:`BB_SIGNATURE_HANDLER`
is "OEBasicHash", which is almost always the case in current
OpenEmbedded.
@@ -1161,12 +1139,12 @@ Since :term:`STAMPS_DIR` is usually a subdirectory of :term:`TMPDIR`, removing
properly be rerun to repopulate :term:`TMPDIR`.
If you want some task to always be considered "out of date", you can
-mark it with the :ref:`nostamp <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`
+mark it with the :ref:`nostamp <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`
varflag. If some other task depends on such a task, then that task will
also always be considered out of date, which might not be what you want.
For details on how to view information about a task's signature, see the
-":ref:`dev-manual/common-tasks:viewing task variable dependencies`"
+":ref:`dev-manual/debugging:viewing task variable dependencies`"
section in the Yocto Project Development Tasks Manual.
Setscene Tasks and Shared State
@@ -1198,7 +1176,7 @@ the work involved would be equal to or greater than the underlying task.
In the build system, the common tasks that have setscene variants are
:ref:`ref-tasks-package`,
-``do_package_write_*``,
+:ref:`do_package_write_* <ref-tasks-package_write_deb>`,
:ref:`ref-tasks-deploy`,
:ref:`ref-tasks-packagedata`, and
:ref:`ref-tasks-populate_sysroot`.
@@ -1208,15 +1186,15 @@ end result.
The build system has knowledge of the relationship between these tasks
and other preceding tasks. For example, if BitBake runs
``do_populate_sysroot_setscene`` for something, it does not make sense
-to run any of the ``do_fetch``, ``do_unpack``, ``do_patch``,
-``do_configure``, ``do_compile``, and ``do_install`` tasks. However, if
-``do_package`` needs to be run, BitBake needs to run those other tasks.
+to run any of the :ref:`ref-tasks-fetch`, :ref:`ref-tasks-unpack`, :ref:`ref-tasks-patch`,
+:ref:`ref-tasks-configure`, :ref:`ref-tasks-compile`, and :ref:`ref-tasks-install` tasks. However, if
+:ref:`ref-tasks-package` needs to be run, BitBake needs to run those other tasks.
It becomes more complicated if everything can come from an sstate cache
because some objects are simply not required at all. For example, you do
not need a compiler or native tools, such as quilt, if there isn't anything
-to compile or patch. If the ``do_package_write_*`` packages are available
-from sstate, BitBake does not need the ``do_package`` task data.
+to compile or patch. If the :ref:`do_package_write_* <ref-tasks-package_write_deb>` packages are available
+from sstate, BitBake does not need the :ref:`ref-tasks-package` task data.
To handle all these complexities, BitBake runs in two phases. The first
is the "setscene" stage. During this stage, BitBake first checks the
@@ -1262,15 +1240,12 @@ this output:
":doc:`/ref-manual/images`" chapter in the Yocto Project Reference
Manual.
-The build process writes images out to the :term:`Build Directory`
-inside the
-``tmp/deploy/images/machine/`` folder as shown in the figure. This
+The build process writes images out to the :term:`Build Directory` inside
+the ``tmp/deploy/images/machine/`` folder as shown in the figure. This
folder contains any files expected to be loaded on the target device.
-The :term:`DEPLOY_DIR` variable
-points to the ``deploy`` directory, while the
-:term:`DEPLOY_DIR_IMAGE`
-variable points to the appropriate directory containing images for the
-current configuration.
+The :term:`DEPLOY_DIR` variable points to the ``deploy`` directory, while the
+:term:`DEPLOY_DIR_IMAGE` variable points to the appropriate directory
+containing images for the current configuration.
- kernel-image: A kernel binary file. The
:term:`KERNEL_IMAGETYPE`
@@ -1344,10 +1319,9 @@ can initialize the environment before using the tools.
the :doc:`/sdk-manual/index` manual.
All the output files for an SDK are written to the ``deploy/sdk`` folder
-inside the :term:`Build Directory` as
-shown in the previous figure. Depending on the type of SDK, there are
-several variables to configure these files. Here are the variables
-associated with an extensible SDK:
+inside the :term:`Build Directory` as shown in the previous figure. Depending
+on the type of SDK, there are several variables to configure these files.
+The variables associated with an extensible SDK are:
- :term:`DEPLOY_DIR`: Points to
the ``deploy`` directory.
@@ -1401,7 +1375,7 @@ This next list, shows the variables associated with a standard SDK:
Lists packages that make up the target part of the SDK (i.e. the part
built for the target hardware).
-- :term:`SDKPATH`: Defines the
+- :term:`SDKPATHINSTALL`: Defines the
default SDK installation path offered by the installation script.
- :term:`SDK_HOST_MANIFEST`:
@@ -1435,7 +1409,7 @@ toolchain construction and use.
:width: 100%
Most of the work occurs on the Build Host. This is the machine used to
-build images and generally work within the the Yocto Project
+build images and generally work within the Yocto Project
environment. When you run
:term:`BitBake` to create an image, the
OpenEmbedded build system uses the host ``gcc`` compiler to bootstrap a
@@ -1486,12 +1460,11 @@ relocatable SDK used to develop applications. When you run the
installer, it installs the toolchain, which contains the development
tools (e.g., ``gcc-cross-canadian``, ``binutils-cross-canadian``, and
other ``nativesdk-*`` tools), which are tools native to the SDK (i.e.
-native to :term:`SDK_ARCH`), you
-need to cross-compile and test your software. The figure shows the
-commands you use to easily build out this toolchain. This
-cross-development toolchain is built to execute on the
-:term:`SDKMACHINE`, which might or
-might not be the same machine as the Build Host.
+native to :term:`SDK_ARCH`), you need to cross-compile and test your
+software. The figure shows the commands you use to easily build out
+this toolchain. This cross-development toolchain is built to execute on the
+:term:`SDKMACHINE`, which might or might not be the same machine as
+the Build Host.
.. note::
@@ -1596,15 +1569,15 @@ them if they are deemed to be valid.
the shared state packages. Consequently, there are considerations that
affect maintaining shared state feeds. For information on how the
build system works with packages and can track incrementing :term:`PR`
- information, see the ":ref:`dev-manual/common-tasks:automatically incrementing a package version number`"
+ information, see the ":ref:`dev-manual/packages:automatically incrementing a package version number`"
section in the Yocto Project Development Tasks Manual.
- The code in the build system that supports incremental builds is
complex. For techniques that help you work around issues
related to shared state code, see the
- ":ref:`dev-manual/common-tasks:viewing metadata used to create the input signature of a shared state task`"
+ ":ref:`dev-manual/debugging:viewing metadata used to create the input signature of a shared state task`"
and
- ":ref:`dev-manual/common-tasks:invalidating shared state to force a task to run`"
+ ":ref:`dev-manual/debugging:invalidating shared state to force a task to run`"
sections both in the Yocto Project Development Tasks Manual.
The rest of this section goes into detail about the overall incremental
@@ -1641,7 +1614,7 @@ you a good idea of when the task's data changes.
To complicate the problem, there are things that should not be included
in the checksum. First, there is the actual specific build path of a
-given task - the :term:`WORKDIR`. It
+given task --- the :term:`WORKDIR`. It
does not matter if the work directory changes because it should not
affect the output for target packages. Also, the build process has the
objective of making native or cross packages relocatable.
@@ -1700,7 +1673,7 @@ need to fix this situation.
Thus far, this section has limited discussion to the direct inputs into
a task. Information based on direct inputs is referred to as the
"basehash" in the code. However, the question of a task's indirect
-inputs still exits - items already built and present in the
+inputs still exits --- items already built and present in the
:term:`Build Directory`. The checksum (or
signature) for a particular task needs to add the hashes of all the
tasks on which the particular task depends. Choosing which dependencies
@@ -1766,12 +1739,11 @@ half the problem of supporting a shared state. The other half of the
problem is being able to use checksum information during the build and
being able to reuse or rebuild specific components.
-The :ref:`sstate <ref-classes-sstate>` class is a
-relatively generic implementation of how to "capture" a snapshot of a
-given task. The idea is that the build process does not care about the
-source of a task's output. Output could be freshly built or it could be
-downloaded and unpacked from somewhere. In other words, the build
-process does not need to worry about its origin.
+The :ref:`ref-classes-sstate` class is a relatively generic implementation of
+how to "capture" a snapshot of a given task. The idea is that the build process
+does not care about the source of a task's output. Output could be freshly
+built or it could be downloaded and unpacked from somewhere. In other words,
+the build process does not need to worry about its origin.
Two types of output exist. One type is just about creating a directory
in :term:`WORKDIR`. A good example is
@@ -1782,10 +1754,9 @@ type of output occurs when a set of data is merged into a shared
directory tree such as the sysroot.
The Yocto Project team has tried to keep the details of the
-implementation hidden in the :ref:`sstate <ref-classes-sstate>` class. From a user's perspective,
+implementation hidden in the :ref:`ref-classes-sstate` class. From a user's perspective,
adding shared state wrapping to a task is as simple as this
-:ref:`ref-tasks-deploy` example taken
-from the :ref:`deploy <ref-classes-deploy>` class::
+:ref:`ref-tasks-deploy` example taken from the :ref:`ref-classes-deploy` class::
DEPLOYDIR = "${WORKDIR}/deploy-${PN}"
SSTATETASKS += "do_deploy"
@@ -1801,14 +1772,12 @@ from the :ref:`deploy <ref-classes-deploy>` class::
The following list explains the previous example:
-- Adding "do_deploy" to ``SSTATETASKS`` adds some required
- sstate-related processing, which is implemented in the
- :ref:`sstate <ref-classes-sstate>` class, to
- before and after the
- :ref:`ref-tasks-deploy` task.
+- Adding ``do_deploy`` to ``SSTATETASKS`` adds some required sstate-related
+ processing, which is implemented in the :ref:`ref-classes-sstate` class, to
+ before and after the :ref:`ref-tasks-deploy` task.
- The ``do_deploy[sstate-inputdirs] = "${DEPLOYDIR}"`` declares that
- ``do_deploy`` places its output in ``${DEPLOYDIR}`` when run normally
+ :ref:`ref-tasks-deploy` places its output in ``${DEPLOYDIR}`` when run normally
(i.e. when not using the sstate cache). This output becomes the input
to the shared state cache.
@@ -1818,15 +1787,15 @@ The following list explains the previous example:
.. note::
- If ``do_deploy`` is not already in the shared state cache or if its input
+ If :ref:`ref-tasks-deploy` is not already in the shared state cache or if its input
checksum (signature) has changed from when the output was cached, the task
runs to populate the shared state cache, after which the contents of the
shared state cache is copied to ${:term:`DEPLOY_DIR_IMAGE`}. If
- ``do_deploy`` is in the shared state cache and its signature indicates
+ :ref:`ref-tasks-deploy` is in the shared state cache and its signature indicates
that the cached output is still valid (i.e. if no relevant task inputs
have changed), then the contents of the shared state cache copies
directly to ${:term:`DEPLOY_DIR_IMAGE`} by the ``do_deploy_setscene`` task
- instead, skipping the ``do_deploy`` task.
+ instead, skipping the :ref:`ref-tasks-deploy` task.
- The following task definition is glue logic needed to make the
previous settings effective::
@@ -1836,36 +1805,33 @@ The following list explains the previous example:
}
addtask do_deploy_setscene
- ``sstate_setscene()`` takes the flags above as input and accelerates the ``do_deploy`` task
- through the shared state cache if possible. If the task was
- accelerated, ``sstate_setscene()`` returns True. Otherwise, it
- returns False, and the normal ``do_deploy`` task runs. For more
- information, see the ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:setscene`"
- section in the BitBake User Manual.
+ ``sstate_setscene()`` takes the flags above as input and accelerates the
+ :ref:`ref-tasks-deploy` task through the shared state cache if possible. If
+ the task was accelerated, ``sstate_setscene()`` returns True. Otherwise, it
+ returns False, and the normal :ref:`ref-tasks-deploy` task runs. For more
+ information, see the ":ref:`bitbake-user-manual/bitbake-user-manual-execution:setscene`"
+ section in the BitBake User Manual.
-- The ``do_deploy[dirs] = "${DEPLOYDIR} ${B}"`` line creates
- ``${DEPLOYDIR}`` and ``${B}`` before the ``do_deploy`` task runs, and
- also sets the current working directory of ``do_deploy`` to ``${B}``.
- For more information, see the ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags`"
- section in the BitBake
- User Manual.
+- The ``do_deploy[dirs] = "${DEPLOYDIR} ${B}"`` line creates ``${DEPLOYDIR}``
+ and ``${B}`` before the :ref:`ref-tasks-deploy` task runs, and also sets the
+ current working directory of :ref:`ref-tasks-deploy` to ``${B}``. For more
+ information, see the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variable flags`"
+ section in the BitBake User Manual.
.. note::
In cases where ``sstate-inputdirs`` and ``sstate-outputdirs`` would be
the same, you can use ``sstate-plaindirs``. For example, to preserve the
- ${:term:`PKGD`} and ${:term:`PKGDEST`} output from the ``do_package``
+ ${:term:`PKGD`} and ${:term:`PKGDEST`} output from the :ref:`ref-tasks-package`
task, use the following::
do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST}"
-- The ``do_deploy[stamp-extra-info] = "${MACHINE_ARCH}"`` line appends
- extra metadata to the :ref:`stamp
- file <overview-manual/concepts:stamp files and the rerunning of tasks>`. In
- this case, the metadata makes the task specific to a machine's architecture.
- See
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:the task list`"
+- The ``do_deploy[stamp-extra-info] = "${MACHINE_ARCH}"`` line appends extra
+ metadata to the :ref:`stamp file <overview-manual/concepts:stamp files and the rerunning of tasks>`.
+ In this case, the metadata makes the task specific to a machine's architecture.
+ See the ":ref:`bitbake-user-manual/bitbake-user-manual-execution:the task list`"
section in the BitBake User Manual for more information on the
``stamp-extra-info`` flag.
@@ -1975,7 +1941,7 @@ Thanks to this, the depending tasks will keep a previously recorded
task hash, and BitBake will be able to retrieve their output from
the Shared State cache, instead of re-executing them. Similarly, the
output of further downstream tasks can also be retrieved from Shared
-Shate.
+State.
If the output hash is unknown, a new entry will be created on the Hash
Equivalence server, matching the task hash to that output.
@@ -1997,6 +1963,15 @@ task output from the Shared State cache.
the stability of the task's output hash. Therefore, the effectiveness
of Hash Equivalence strongly depends on it.
+ Recipes that are not reproducible may have undesired behavior if hash
+ equivalence is enabled, since the non-reproducible diverging output maybe be
+ remapped to an older sstate object in the cache by the server. If a recipe
+ is non-reproducible in trivial ways, such as different timestamps, this is
+ likely not a problem. However recipes that have more dramatic changes (such
+ as completely different file names) will likely outright fail since the
+ downstream sstate objects are not actually equivalent to what was just
+ built.
+
This applies to multiple scenarios:
- A "trivial" change to a recipe that doesn't impact its generated output,
@@ -2023,7 +1998,7 @@ variables:
- :term:`BB_HASHSERVE_UPSTREAM`, when ``BB_HASHSERVE = "auto"``,
allowing to connect the local server to an upstream one.
-- :term:`bitbake:BB_SIGNATURE_HANDLER`, which must be set to ``OEEquivHash``.
+- :term:`bitbake:BB_SIGNATURE_HANDLER`, which must be set to ``OEEquivHash``.
Therefore, the default configuration in Poky corresponds to the
below settings::
@@ -2074,7 +2049,7 @@ dependencies, you must manually declare the dependencies.
located. For each shared library, the package that contains the
shared library is registered as providing the shared library. More
specifically, the package is registered as providing the
- `soname <https://en.wikipedia.org/wiki/Soname>`__ of the library. The
+ :wikipedia:`soname <Soname>` of the library. The
resulting shared-library-to-package mapping is saved globally in
:term:`PKGDATA_DIR` by the
:ref:`ref-tasks-packagedata`
@@ -2101,12 +2076,12 @@ dependencies, you must manually declare the dependencies.
:term:`PRIVATE_LIBS` inside
the package's recipe.
-- ``pcdeps``: During the ``do_package`` task of each recipe, all
+- ``pcdeps``: During the :ref:`ref-tasks-package` task of each recipe, all
pkg-config modules (``*.pc`` files) installed by the recipe are
located. For each module, the package that contains the module is
registered as providing the module. The resulting module-to-package
mapping is saved globally in :term:`PKGDATA_DIR` by the
- ``do_packagedata`` task.
+ :ref:`ref-tasks-packagedata` task.
Simultaneously, all pkg-config modules installed by the recipe are
inspected to see what other pkg-config modules they depend on. A
@@ -2147,12 +2122,12 @@ dependencies, you must manually declare the dependencies.
:term:`ALLOW_EMPTY` variable
for more information.
-The ``do_package`` task depends on the ``do_packagedata`` task of each
-recipe in :term:`DEPENDS` through use
-of a ``[``\ :ref:`deptask <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
-declaration, which guarantees that the required
-shared-library/module-to-package mapping information will be available
-when needed as long as :term:`DEPENDS` has been correctly set.
+The :ref:`ref-tasks-package` task depends on the :ref:`ref-tasks-packagedata`
+task of each recipe in :term:`DEPENDS` through use of a
+``[``\ :ref:`deptask <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
+declaration, which guarantees that the required shared-library /
+module-to-package mapping information will be available when needed as long as
+:term:`DEPENDS` has been correctly set.
Fakeroot and Pseudo
===================
@@ -2162,8 +2137,8 @@ operations that are normally reserved for the root user (e.g.
:ref:`ref-tasks-install`,
:ref:`do_package_write* <ref-tasks-package_write_deb>`,
:ref:`ref-tasks-rootfs`, and
-:ref:`do_image* <ref-tasks-image>`). For example,
-the ``do_install`` task benefits from being able to set the UID and GID
+:ref:`do_image_* <ref-tasks-image>`). For example,
+the :ref:`ref-tasks-install` task benefits from being able to set the UID and GID
of installed files to arbitrary values.
One approach to allowing tasks to perform root-only operations would be
@@ -2214,3 +2189,173 @@ For more information, see the
BitBake User Manual. You can also reference the "`Why Not
Fakeroot? <https://github.com/wrpseudo/pseudo/wiki/WhyNotFakeroot>`__"
article for background information on Fakeroot and Pseudo.
+
+BitBake Tasks Map
+=================
+
+To understand how BitBake operates in the build directory and environment
+we can consider the following recipes and diagram, to have full picture
+about the tasks that BitBake runs to generate the final package file
+for the recipe.
+
+We will have two recipes as an example:
+
+- ``libhello``: A recipe that provides a shared library
+- ``sayhello``: A recipe that uses ``libhello`` library to do its job
+
+.. note::
+
+ ``sayhello`` depends on ``libhello`` at compile time as it needs the shared
+ library to do the dynamic linking process. It also depends on it at runtime
+ as the shared library loader needs to find the library.
+ For more details about dependencies check :ref:`ref-varlocality-recipe-dependencies`.
+
+``libhello`` sources are as follows:
+
+- ``LICENSE``: This is the license associated with this library
+- ``Makefile``: The file used by ``make`` to build the library
+- ``hellolib.c``: The implementation of the library
+- ``hellolib.h``: The C header of the library
+
+``sayhello`` sources are as follows:
+
+- ``LICENSE``: This is the license associated with this project
+- ``Makefile``: The file used by ``make`` to build the project
+- ``sayhello.c``: The source file of the project
+
+Before presenting the contents of each file, here are the steps
+that we need to follow to accomplish what we want in the first place,
+which is integrating ``sayhello`` in our root file system:
+
+#. Create a Git repository for each project with the corresponding files
+
+#. Create a recipe for each project
+
+#. Make sure that ``sayhello`` recipe :term:`DEPENDS` on ``libhello``
+
+#. Make sure that ``sayhello`` recipe :term:`RDEPENDS` on ``libhello``
+
+#. Add ``sayhello`` to :term:`IMAGE_INSTALL` to integrate it into
+ the root file system
+
+The contents of ``libhello/Makefile`` are::
+
+ LIB=libhello.so
+
+ all: $(LIB)
+
+ $(LIB): hellolib.o
+ $(CC) $< -Wl,-soname,$(LIB).1 -fPIC $(LDFLAGS) -shared -o $(LIB).1.0
+
+ %.o: %.c
+ $(CC) -c $<
+
+ clean:
+ rm -rf *.o *.so*
+
+.. note::
+
+ When creating shared libraries, it is strongly recommended to follow the Linux
+ conventions and guidelines (see `this article
+ <https://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`__
+ for some background).
+
+.. note::
+
+ When creating ``Makefile`` files, it is strongly recommended to use ``CC``, ``LDFLAGS``
+ and ``CFLAGS`` as BitBake will set them as environment variables according
+ to your build configuration.
+
+The contents of ``libhello/hellolib.h`` are::
+
+ #ifndef HELLOLIB_H
+ #define HELLOLIB_H
+
+ void Hello();
+
+ #endif
+
+The contents of ``libhello/hellolib.c`` are::
+
+ #include <stdio.h>
+
+ void Hello(){
+ puts("Hello from a Yocto demo \n");
+ }
+
+The contents of ``sayhello/Makefile`` are::
+
+ EXEC=sayhello
+ LDFLAGS += -lhello
+
+ all: $(EXEC)
+
+ $(EXEC): sayhello.c
+ $(CC) $< $(LDFLAGS) $(CFLAGS) -o $(EXEC)
+
+ clean:
+ rm -rf $(EXEC) *.o
+
+The contents of ``sayhello/sayhello.c`` are::
+
+ #include <hellolib.h>
+
+ int main(){
+ Hello();
+ return 0;
+ }
+
+The contents of ``libhello_0.1.bb`` are::
+
+ SUMMARY = "Hello demo library"
+ DESCRIPTION = "Hello shared library used in Yocto demo"
+
+ # NOTE: Set the License according to the LICENSE file of your project
+ # and then add LIC_FILES_CHKSUM accordingly
+ LICENSE = "CLOSED"
+
+ # Assuming the branch is main
+ # Change <username> accordingly
+ SRC_URI = "git://github.com/<username>/libhello;branch=main;protocol=https"
+
+ S = "${WORKDIR}/git"
+
+ do_install(){
+ install -d ${D}${includedir}
+ install -d ${D}${libdir}
+
+ install hellolib.h ${D}${includedir}
+ oe_soinstall ${PN}.so.${PV} ${D}${libdir}
+ }
+
+The contents of ``sayhello_0.1.bb`` are::
+
+ SUMMARY = "SayHello demo"
+ DESCRIPTION = "SayHello project used in Yocto demo"
+
+ # NOTE: Set the License according to the LICENSE file of your project
+ # and then add LIC_FILES_CHKSUM accordingly
+ LICENSE = "CLOSED"
+
+ # Assuming the branch is main
+ # Change <username> accordingly
+ SRC_URI = "git://github.com/<username>/sayhello;branch=main;protocol=https"
+
+ DEPENDS += "libhello"
+ RDEPENDS:${PN} += "libhello"
+
+ S = "${WORKDIR}/git"
+
+ do_install(){
+ install -d ${D}/usr/bin
+ install -m 0700 sayhello ${D}/usr/bin
+ }
+
+After placing the recipes in a custom layer we can run ``bitbake sayhello``
+to build the recipe.
+
+The following diagram shows the sequences of tasks that BitBake
+executes to accomplish that.
+
+.. image:: svg/bitbake_tasks_map.*
+ :width: 100%
diff --git a/documentation/overview-manual/development-environment.rst b/documentation/overview-manual/development-environment.rst
index f1001e0bd3..d79173ff55 100644
--- a/documentation/overview-manual/development-environment.rst
+++ b/documentation/overview-manual/development-environment.rst
@@ -39,10 +39,9 @@ Linus Torvalds in 1991. Conversely, a good example of a non-open source
project is the Windows family of operating systems developed by
Microsoft Corporation.
-Wikipedia has a good historical description of the Open Source
-Philosophy `here <https://en.wikipedia.org/wiki/Open_source>`__. You can
-also find helpful information on how to participate in the Linux
-Community
+Wikipedia has a good :wikipedia:`historical description of the Open Source
+Philosophy <Open_source>`. You can also find helpful information on how
+to participate in the Linux Community
`here <https://www.kernel.org/doc/html/latest/process/index.html>`__.
The Development Host
@@ -52,7 +51,7 @@ A development host or :term:`Build Host` is key to
using the Yocto Project. Because the goal of the Yocto Project is to
develop images or applications that run on embedded hardware,
development of those images and applications generally takes place on a
-system not intended to run the software - the development host.
+system not intended to run the software --- the development host.
You need to set up a development host in order to use it with the Yocto
Project. Most find that it is best to have a native Linux machine
@@ -94,7 +93,7 @@ are several ways of working in the Yocto Project environment:
through your Linux distribution and the Yocto Project.
For a general flow of the build procedures, see the
- ":ref:`dev-manual/common-tasks:building a simple image`"
+ ":ref:`dev-manual/building:building a simple image`"
section in the Yocto Project Development Tasks Manual.
- *Board Support Package (BSP) Development:* Development of BSPs
@@ -132,6 +131,14 @@ are several ways of working in the Yocto Project environment:
Toaster and on how to use Toaster in general, see the
:doc:`/toaster-manual/index`.
+- *Using the VSCode Extension:* You can use the `Yocto Project BitBake
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+ extension for Visual Studio Code to start your BitBake builds through a
+ graphical user interface.
+
+ Learn more about the VSCode Extension on the `extension's marketplace page
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__.
+
Yocto Project Source Repositories
=================================
@@ -170,10 +177,9 @@ these tarballs gives you a snapshot of the released files.
In summary, here is where you can get the project files needed for
development:
-- :yocto_git:`Source Repositories: <>` This area contains IDE
- Plugins, Matchbox, Poky, Poky Support, Tools, Yocto Linux Kernel, and
- Yocto Metadata Layers. You can create local copies of Git
- repositories for each of these areas.
+- :yocto_git:`Source Repositories: <>` This area contains Poky, Yocto
+ documentation, metadata layers, and Linux kernel. You can create local
+ copies of Git repositories for each of these areas.
.. image:: figures/source-repos.png
:width: 100%
@@ -182,28 +188,17 @@ development:
see the ":ref:`dev-manual/start:accessing source repositories`"
Section in the Yocto Project Development Tasks Manual.
-- :yocto_dl:`Index of /releases: </releases>` This is an index
- of releases such as Poky, Pseudo, installers for cross-development
- toolchains, miscellaneous support and all released versions of Yocto
- Project in the form of images or tarballs. Downloading and extracting
- these files does not produce a local copy of the Git repository but
- rather a snapshot of a particular release or image.
-
- .. image:: figures/index-downloads.png
- :align: center
- :width: 50%
-
- For steps on how to view and access these files, see the
- ":ref:`dev-manual/start:accessing index of releases`"
- section in the Yocto Project Development Tasks Manual.
+- :yocto_dl:`Yocto release archives: </releases/yocto>` This is where you can
+ download tarballs corresponding to each Yocto Project release. Downloading
+ and extracting these files does not produce a local copy of a Git repository
+ but rather a snapshot corresponding to a particular release.
-- *"DOWNLOADS" page for the* :yocto_home:`Yocto Project Website <>` *:*
-
- The Yocto Project website includes a "DOWNLOADS" page accessible
+- :yocto_home:`DOWNLOADS page </software-overview/downloads/>`:
+ The :yocto_home:`Yocto Project website <>` includes a "DOWNLOADS" page accessible
through the "SOFTWARE" menu that allows you to download any Yocto
Project release, tool, and Board Support Package (BSP) in tarball
- form. The tarballs are similar to those found in the
- :yocto_dl:`Index of /releases: </releases>` area.
+ form. The hyperlinks point to the tarballs under
+ :yocto_dl:`/releases/yocto/`.
.. image:: figures/yp-download.png
:width: 100%
@@ -245,8 +240,8 @@ and so forth.
For information on finding out who is responsible for (maintains) a
particular area of code in the Yocto Project, see the
- ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
- section of the Yocto Project Development Tasks Manual.
+ ":doc:`../contributor-guide/identify-component`"
+ section of the Yocto Project and OpenEmbedded Contributor Guide.
The Yocto Project ``poky`` Git repository also has an upstream
contribution Git repository named ``poky-contrib``. You can see all the
@@ -277,8 +272,8 @@ push them into the "contrib" area and subsequently request that the
maintainer include them into an upstream branch. This process is called
"submitting a patch" or "submitting a change." For information on
submitting patches and changes, see the
-":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
-section in the Yocto Project Development Tasks Manual.
+":doc:`../contributor-guide/submit-changes`" section in the Yocto Project
+and OpenEmbedded Contributor Guide.
In summary, there is a single point of entry for changes into the
development branch of the Git repository, which is controlled by the
@@ -341,11 +336,10 @@ Book <https://book.git-scm.com>`__.
software on which to develop. The Yocto Project has two scripts named
``create-pull-request`` and ``send-pull-request`` that ship with the
release to facilitate this workflow. You can find these scripts in
- the ``scripts`` folder of the
- :term:`Source Directory`. For information
+ the ``scripts`` folder of the :term:`Source Directory`. For information
on how to use these scripts, see the
- ":ref:`dev-manual/common-tasks:using scripts to push a change upstream and request a pull`"
- section in the Yocto Project Development Tasks Manual.
+ ":ref:`contributor-guide/submit-changes:using scripts to push a change upstream and request a pull`"
+ section in the Yocto Project and OpenEmbedded Contributor Guide.
- *Patch Workflow:* This workflow allows you to notify the maintainer
through an email that you have a change (or patch) you would like
@@ -353,8 +347,8 @@ Book <https://book.git-scm.com>`__.
this type of change, you format the patch and then send the email
using the Git commands ``git format-patch`` and ``git send-email``.
For information on how to use these scripts, see the
- ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`"
- section in the Yocto Project Development Tasks Manual.
+ ":doc:`../contributor-guide/submit-changes`" section in the Yocto Project
+ and OpenEmbedded Contributor Guide.
Git
===
@@ -608,26 +602,22 @@ licensing structures in place. License evolution for both Open Source
and Free Software has an interesting history. If you are interested in
this history, you can find basic information here:
-- `Open source license
- history <https://en.wikipedia.org/wiki/Open-source_license>`__
+- :wikipedia:`Open source license history <Open-source_license>`
-- `Free software license
- history <https://en.wikipedia.org/wiki/Free_software_license>`__
+- :wikipedia:`Free software license history <Free_software_license>`
In general, the Yocto Project is broadly licensed under the
Massachusetts Institute of Technology (MIT) License. MIT licensing
permits the reuse of software within proprietary software as long as the
license is distributed with that software. Patches to the Yocto Project
follow the upstream licensing scheme. You can find information on the
-MIT license `here <https://en.wikipedia.org/wiki/MIT_License>`__.
+MIT license :wikipedia:`here <MIT_License>`.
When you build an image using the Yocto Project, the build process uses
a known list of licenses to ensure compliance. You can find this list in
-the :term:`Source Directory` at
-``meta/files/common-licenses``. Once the build completes, the list of
-all licenses found and used during that build are kept in the
-:term:`Build Directory` at
-``tmp/deploy/licenses``.
+the :term:`Source Directory` at ``meta/files/common-licenses``. Once the
+build completes, the list of all licenses found and used during that build
+are kept in the :term:`Build Directory` at ``tmp/deploy/licenses``.
If a module requires a license that is not in the base list, the build
process generates a warning during the build. These tools make it easier
@@ -652,5 +642,5 @@ Project uses in the ``meta/files/common-licenses`` directory in your
For information that can help you maintain compliance with various open
source licensing during the lifecycle of a product created using the
Yocto Project, see the
-":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
+":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
section in the Yocto Project Development Tasks Manual.
diff --git a/documentation/overview-manual/figures/index-downloads.png b/documentation/overview-manual/figures/index-downloads.png
deleted file mode 100755
index d8d4475cee..0000000000
--- a/documentation/overview-manual/figures/index-downloads.png
+++ /dev/null
Binary files differ
diff --git a/documentation/overview-manual/figures/source-repos.png b/documentation/overview-manual/figures/source-repos.png
index 603300b6d2..56023d585d 100644
--- a/documentation/overview-manual/figures/source-repos.png
+++ b/documentation/overview-manual/figures/source-repos.png
Binary files differ
diff --git a/documentation/overview-manual/figures/yp-download.png b/documentation/overview-manual/figures/yp-download.png
index bfd12b678a..c8b50ec8a7 100644
--- a/documentation/overview-manual/figures/yp-download.png
+++ b/documentation/overview-manual/figures/yp-download.png
Binary files differ
diff --git a/documentation/overview-manual/intro.rst b/documentation/overview-manual/intro.rst
index a8091771f4..80446b3810 100644
--- a/documentation/overview-manual/intro.rst
+++ b/documentation/overview-manual/intro.rst
@@ -38,7 +38,7 @@ This manual does not give you the following:
procedures reside in other manuals within the Yocto Project
documentation set. For example, the :doc:`/dev-manual/index`
provides examples on how to perform
- various development tasks. As another example, the
+ various development tasks. As another example, the
:doc:`/sdk-manual/index` manual contains detailed
instructions on how to install an SDK, which is used to develop
applications for target hardware.
diff --git a/documentation/overview-manual/svg/bitbake_tasks_map.svg b/documentation/overview-manual/svg/bitbake_tasks_map.svg
new file mode 100644
index 0000000000..09ef36faae
--- /dev/null
+++ b/documentation/overview-manual/svg/bitbake_tasks_map.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Do not edit this file with editors other than draw.io -->
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="12270px" height="3804px" viewBox="-0.5 -0.5 12270 3804" content="&lt;mxfile host=&quot;app.diagrams.net&quot; modified=&quot;2023-11-01T08:31:56.536Z&quot; agent=&quot;Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36&quot; etag=&quot;p-CQVME_iMteI52En1Eq&quot; version=&quot;22.0.8&quot; type=&quot;device&quot;&gt;&lt;diagram name=&quot;Page-1&quot; id=&quot;c7558073-3199-34d8-9f00-42111426c3f3&quot;&gt;7V1bc6M6tv41rprzYAohro9JnO7O2X1JJZk9Z89LCtvEZtox3oA7nfn1R1yEQcjmLmRHPVV7YowxRp++dV9rAm9ef3/27d36m7d0NhNFXv6ewNlEUaCl6uj/oiPvyRFFM2FyZOW7y+QYOBx4dP/rpAfl9OjeXTpB4cTQ8zahuyseXHjbrbMIC8ds3/feiqe9eJvit+7slVM68LiwN+Wj/3KX4To5air64fgXx12t8TcD3UremduLnyvf22/T79t6Wyd559XGl0l/Y7C2l95b7hC8ncAb3/PC5K/X3zfOJnqu+Ikln/t05N3sln1nG9b5gPOn9ja72cPZi/UlfHp//N+7+WoKLTO5zi97s08fRnq74Tt+OvHvc6LLgAm8flu7ofO4sxfRu28IEOjYOnzdpG+/uJvNjbfxfPQ6fhrwemkH6+zjL942/GS/upsIK1/duePboett0Tc+2tsgPeHR2/vx9ddhiCCgaPAK/Qf9tOg/0QmBtPK81caxd24gLbzX+I1FEJ/66SW5PPqz8AWacp1+RfnRpU/zl+OHzu/cofRRfna8Vyf00TVl/K6lpuuaYt7Er98OAFLM9Ng6Dx4tPWinoF1lFz+sHvojXcAmi4l3XW7tnCUCevrS88O1t/K29ub2cPT6cPSr5+3SVfqPE4bv6T6196FXXGP02Pz3/0MvZMnQ8Ou/8m/OosckZ6/es1fLq2i7opeLjR0E7uJp7W6TNz65G3z9AmAWe//XeaAnwN97fIHSpQ9tf+WEJ89Mt2G0fifx6DsbdIe/ikRGg1b8UfT07ffcCTvPRY8jd+X76EAO5oZqFGBu6QTLVH1AT6nyAOzkHg4wz35Me+SrssEI+b/dMA989PKv3FsH2Ecv3vN7IPlUo91SQH4Q+t7PTDpBgmknClzKC8dRsjNz78jxv4+0e7BUG3v3mFZRSABACmniE4psnv7EMPsne7RCDWioBphytRqAl7CgBijqUGoAwoxYzDaLCWuspWIwXUugKsOv5SpSxVKZc4nrip6ijiRvfmmhVXNpARhsaXUGS3uJy5nR6SnKVdluUxmUlq5a/ywubidttLsyelA/K023FFsHm+38dUusMFaqlpbVt2bZzeSHJiPDpyVsihj/ICABUObC/jBBUephAjxmfUyx5kg/fyDbQ1UFhLmDMFThWUIYyOrJDwzlfyqrbgLDY2NYxVrZuWFYHZiH9+EP7fr+38Gnuf63/Cd0vb9n0ykQEOYQwup5ahLoE+xZmBrL1Ddhigb0ztIO7Wn09zTwF/EZ+t/7KAB7nSAkwgdCxxFsxMjIcAFnV/4iMsgX0YO7RkhHhqbjHy6J/lpF/x++7vBtpJfG75Q2XGNzH+/OdJvlPTlZvJl766y+6W+Q3nGzbPrTvDgDGv5tWLO74X/ZxrhV186Se2fHTmjAN84ZAb15/s9hGIgDmDVhwQEYKKOb8RhIZ6S3tXI2nuapS9LpMjKqQVsaV7QFNG0UISa81yNAr3e3Tl3o0Y1Ozn1/GUZzCD3glY7RQtpOIQBXM/3to4BRhSpfPIjvnDP9Dd/D/HBAnRjX3/759enu29XNl+enq4fPt0/PPx4nxiyn6M3JCwjlr6PyB4vKn0qLPLPV/nB6ghDd58qWsLbo7j2psSNbQi7ZcuPO1+j2PUF6/ZAeVHQJOzZS3tOUsY1eHCBh7XYTFHbCnK1BYXxlz2R3zhmFBfa7oLAeKcwyinrb+PyF6wfPjr8u2W6tnd6l633TWKugLMQZxLjUyjwdlDVPnj5MTDYzhwTSzxHp2nkiHRB1V4ygfl6k3sKG/zC4NziL0Fht6lvPys3zcaDFR1JiU0rV9REoVTHalDcI7YEPqCt67/EdJlCfamNoDzKf2YuyBKa+LDwRfTlTie4Y8sieCCifldLKA2S6UGftzEZD4YI6gSwXE7+ruNPST54/UAnZeYUDPgqIFU6KFxqDGEB1BA0AtqrAESgeHMW9B60YoVjDgGKKYv2sHL4fBcVQ46QlVmMU68YoKD4rx8OHQTEnkbfmKDbHcJ9BU6CYRxSbxnmiuNSdkJFnjM+yOuEZGzRHx0yxOVqODq5OONWfTdT6dmVNnFJfo6kSZ6VLbRynPUrTyl7bafAWh2tF6VI7MNIXv3cJ3pGr+KxcQhJiu/HsZSCkZD9SclrqZDp+ATruFHYOuSmXLUprlxLhrsLc0BefpUQrN1QEcw3FXBCOzFzoFsZR6FOFLnuRU81OKHR9TIBooxTyANYurKjXZUVOkp0VIv1INSq8MsT5BoteawCWS6cQXa7382hxJUST9mtEejF37gPH39qvTnY0plDpUOZ0CVya225O/I+2EaEOLbjsiVGBQlKqSkknUiBLToVwnEzlzEgWnMqCU+tWfA6VAkIJExLuQ5lAePKj0k8dQN7YZ15iZ6Ui8jMSPeud6flQSC/ouSd61samZ4BTshvRc8EtKKpK2PGsVlt3xSnuvFj0Wpl/OLHohUHfD78BItPXoBTVa0y5TSvHib/ZPx30XJzLWN2cpHrRov+lX52XYPE/mmzT4389rT1WL9K1z7Jb8o2wcG+/wgimoRZfhWfly2kzCJcHFHaQZXrdQK8KOZNlejm49vXu5vb74+3F0Qob1bhEH5RpfGzpQwFtWuAydFR0bkRak3HEmGGgg9pKNx8zkohmFxasKNEiTtdZ+CPwQ81x6Iv7W9rZ4WJ9cSzKl3JmURRz1uzKPFeqJbtKsqwXGNa0YAOORe/cO76LnprjT/rzMPMA8S6EWjcvQcGoHHvaEcmQ5klCJYfSyBCwYNRyzkTsrt24c2khKHVYSqU4ctlSataDjjMHW+gMNe2KE0wy9LBpxV4RozvYlJGajYuuu6f6SFVLVd667hplW4AH6greA9/zwunSCcKl6wsW64fFshY3J2gMKCx5DMoj56iIrmLHG95U05k1Wh3Q6TvnjM58Z+HunGnKaoLN+mEzC9RgM8A07InbOgs244jNao9EMDirJDP4HIngvtorR5BYPyQGoSzhrkYneAx7mBjR2LgBfEFjJ8ipmsawD5QXGjNZDZYcZxLaKcwVQhIfB4C9t9XuKEcNLuXoPhCOjb6kqCpLSjEHkpYHp0KcBc5Ijo7T84k/HuQBkh2oz6ybLwf6byfVEYF8NqNwt4vNfimMiN6MCE0iGoxalHm4GpQMpvxnnZU7hAfgdCGp2rlyFmf6mclnEGrjzgVB9UNQqmZl1HOCoJjrZ2XDIMvbEamQ/ebtQKUooIAMygAwKd56rNcPsPzlIiVcaCsFnlQeZCAQ0IkEFDI5DxhSubMkNQA9XPKWVdaQCyCQyok2Agb9wkBVasNgMC6waDpIp3XPPfM056lQQmb7i1SD5V73bLC2AFaTPMB9SfIrq2cY6D9XztRbmCEM24P2kZdeSn5P7sP4KANFs8y6MZJWjuSiQ4Losi5rFY1QymVChG8TAPJKya/u3FJFJwqSAGbhujOmyA8Mk3CfLXaOrW8RS72Ha3e7ij4dwRYbb/Njgev5wZ6TX7zNMjIC5XBtR7/PDSIg+k7gxD+XfsHHvx4ffvx4ep7dPTweue6bG/+cOfpeeeHtXLRHI+LzHak/8ZIvFuWuTHnQ6lUyodGkGJV0RWK4zOxW1e+d3PwNSL6tiCqIF1FHpYC6zrYBMr5bMbsmE5pZFbOTQ4YZMXurwWsddTUVFtzEkmVZFdshfkWWF7aJuxUTBBrvIspOPOt9lfVOqlGf2LsTu9W+Ak01JlU1RtlXfLZ1XnrP7jYI7egX1HO1I80gLG5le+OutujvBYJbtBmvI/3BXdibq/SNV3e5TEJLTuD+155vsNWdLi66rnY90WbRtRBRBAlnANrOy5SXY/oX1/uvvmKlGCSsKe1MLYpiRRomPVa8lWfAXLqjjlULGLO41kf8stSiWmM4tw3gM20LUdbC2764q72fGZNz/8BXgsY4oDEcZDRoOGZLW2YJxSLGyKo3gElxB7Btt6LQ4gpckFja70cQGJcEhqPm9H5SVTHz4ehMoU3i4wLP+y0iyp8C0HwDWpHJTty06ZJsLQulss4uXbkD7kAegvIiW6/DQfgS/yucN8rWyEUtcnDHZyV3E/889OUy0Ha/k88QV9Hp8Y16WwstgrsLnGqNxg526PegFy/u70gLIpJBib1BuMFy72i6BowL2jXAqLNraO3rB9w1lV2PxK4xxK4Z0/qjaEuM90ilC0PsEVPskREly5S0MCgeE8Z7ppzbwYmFgT6+iwcYCBODZ0iX8ktlmjubsYlR9gIKQUBcxRKCYExBAGptG7ayAFZ6T8W2AbLYN2NKG7VGpJzxruGzb1QUc/B2+40dOs8NW+EJxYkFlDVAJnNToJyN4GGEZeFlqpYAQEiAMYuezBrbhqkEgBrzEUrN0sk7p8qKFHQFZ23VGPzce1OadqmysKQqVeTKAo3oasIoWRbSrHViP/VaDMo1FBuEqci0UWo9jkqhwkyrGECDsFpwoRizPBKrYXBUsxrepJx0scnunDOrS4xZ7m/MMgSczVlW1LKDTMxZHia3VMFdkU80mGScXKoqpdW/1Gm4vC3+6GNys2YC3A9yzKk8jcbkinJIRVVr23gaHzbesYzloyZe6QOGysDCU8tpzIH9nhSTiYmNA5MnB1Nw1XKIBK//xa0+oyrC0irX1o+GqyGEeG6GaMZxwZ5Qte5kAohHn48tJbXGnlAdT2pk6wlVaQmswhNazYaqBiTLKtoMHDQnUseZSCCmC5xak9qd6/oP5HREE5+pumJAWZ9OT9Xkb0JZ5mUVPMYPj2lKXR7TVL54DN85ZzwmBkT1yGIWjxOi0PMZhcfEhKi+ua+2oxbPxBm6+3A5dybrxV3RNLg341ErO1l5oNV51MxX0GpPymE0eco6/CtAztIo8XHGQ6gUnU8U4t51U1kCAo79wHEKiH7Q1JHwMtMMDX3k4QNV8rnofSu6juf2wlxS40OZ65iuSmTaA1WVKLqslfQm7u0QbZRtfCeKHB0tzjT4EHqEUVuPMBjpEWhPEUlPMhGtqTvFAOgVF+ppiEH5jtOCqGM3VvqArlV54+Gp8wfyxevVzSQqS0jUUgnJONLvEX1x/F1y/j11Ylz/68fDH7O7h4mB3vyUJC8ePluvoAXtiK/23NlMBqsY20SXRyy9dHxMikvnxd7H9xcgkexuV08xcSuHA1+dl5BrljotwEpiOKqt2m7RUqePapLG64+LZ2SCG2Qf+uRVR5LSiU94Ly+BM4xRgzPcOVMn8ba5ZE2yurosjSg2UTuJlJi0MjD9Tek1QA+6KTQhOQKT1sOL5kYfbOqZgoPKnIF5JmDMLYxBCcY0I0vRaX7UAYHMpwf/UQCZVyADrdpVwBzFfA7pqK15CxQzR7ECJQ7ZmE+Xq2BjbnE8zRLheIIxn310ngSMeYWxCrhUjit76IwVAVu6vgAzt2AmRttZBiWay3JIu2LwORHK3S42e4RNAWZ+wQx1naRmGp5pw1QGxDOf+dMPtzd397fP6XRrgWleMW2ZUDKL00QNSsYNALoEWLqVoQZK6OCq/Vgh56FmYkGxNJ0D+B4FUHWWgZmeWV0wh+HES6a2yWfAQngWuCVJAEuzcwy1HHdjbZOZfAYshKuXYyCTEQseYMxnxGImYMwtjDVYjiDzgGQ+YxYir4d3PMtEAyONMkGJbVaPyaebLOr/7zvRw372nYW7E1MAuGxdYahEhiatnblCK6IYbgoALpoT7Zp6a9ek6kXPEeKxekOyBmzXpFg0dbLT+n6MdjMmIYNorWayzGVGrWYstbR0XBU+fUC/n5UGgmtUKXPWXNtiNdCCv8r4DwlAKHPmeLb4zKEQLUJ6Kx42DS47hFhGC+YTHUL4o76643qg3HuTyo4IrJxdO2bWjaC/nuhPjzp5FOjPolgRzNt34JYiotMbN1yWKWc1uIyvTm/ZnXPGZRt3LnisHx6zTKuU6EfhMcZqXCbUc8CL3Zdo5aX1ZSwyNwMHTK0oxw6dz8bK9IRy2YGNG0BJgSeV8+UEAjqRAJEqgRBAG2ZP7749HAeUvdsFEEjlprYCBv3CQFVqw2A4LqBlGgwZ5LD9RaroypygpI+1JcOUNJIH0CivrK4MN29ErrSV24z5HqnOcO0G0UPaBN4k7vUT2u42OuKF6+h8OZoyGL1+8aNvwYd1+zXC53Ye7Ca5Ced+lgbg7JAx5GwXrhNI6GOztHES+pW5d6Iv9pMnQl5gbgdRiD8CmJy9S32y5d+Ez9/jA6vFIqd578nzJoWB6jd9fCNakh16cD6i/qD+V8v2dtnt22udj26qwfM427SNlCWLLFukyUsME2takTN12qB6WszfGEwnAnwa5Gnt0/Ps9vFJpN9xnH6nk6mktAKoQ8oKGw2PotAxjNngv2N/ZRTSqhe0meTdnJXTPcXYMYhn2VV6QQHOrOLFC6qMOnWgiE9D4HN8fOp84RNU5uIvsuU6SG74soj+Vxbmn91wvb9sB3u1DEfvKFBVtZ7yR6cqIMvqoFV2reAyt7zczfKJBwAOn1k6S+/5xQkX67Ite8Y21AWmvkf1bGQOkCpTUM0y+R2C6r7fydodgAcK3iM6WSYa+eheptMejWrfWHKV/AiNxg4KtAjuLnCq2d4Oduj3oBcv7u9IQhA6UW0i1nQNGJe0b6KusDX2DWS6bxQhDIQw6FkYaGMLA0UIg0phoAlhwJswoOwbtsIA8tk4BgmDhbd9cVd73xECgW9gA6AW0wkshZJLANjCWhHioNI2UIQ8GHXbgOptw1gY8Nm1JhYGcXKAEAW8Y1onMK3R8gtpofQBUV3pNReyIOqDKWTBiPsG1tk3bKWBymcKCpIG+y1C2E8hDPgGtULoNzTPJ1tJoFYau0ISCKNgVEFgVu8ZxlKgTZ0pw8rSC2oDAnHqfnX9qMpXZooKTYESDlHSe35d/NGmg8eBrBIZMcrpOeIWME6dPxlkjrgKBdPxh2G1/xw8NhgGwDj5gWFAnD1Yzoy2KJhz0YmGDCv5DxV9KbIUysAeWlPDPqr49+EP7fr+38Gnuf63/Cd0vb9n0ynOKGHHnHnerJpqkvVsmuTzq0+nVxe4ts1YFB5w2YWftTI/05eeDxXDJPzO+AccY2fyfLxfjmokOKBIP38oMi8P2tx4C3sjJXR6CbzJvMEvpZ0dpHX4Vcv82UcHBPom4rwlbIENaxLg+dSJ1mBDSntO+kIqfbNhR+dNOdVzPt/Y744fCA7ptZcSjUOoOS/DtVHRKuM17b3COd/ykfL9/Kivqpr9ZdLaIGpmsHK2aFWTpgVh1E+h+psmiC6NbACIMYvNlejQ9f139HKavLj/M36rWcn+IZR0t6Xdy8oNj/ykZHJD9IsWa3uLdj9arbDTbyp+Vy8dBxrtbu4IvNiHoA9DStFK+1ijzaWGB5WhULClDbeXFS7N+GhUiX0yR9/FB/7x3YvXB53tBP+TQ6h7fNOJ+C1XsSgyr9OkZCtTZdxw7QK1SveWiOCKVB6u0jppzdLZhnA1kQBXuWlUsWnGnK2F53DjpnW02VqMNw23VZLuNgjt6BcI9YlnTAMIVNK6ADIlVMM2B04ThZLV2dBCGoy6c5ClUb1t2IoDXSmvsxi3MK5bXqs9Okbna3RMduecKRdRVr29EqNj+vIv6oQDxaA4F2kjkAf0n1hcAu/+j8+zi0bdWfcSBboFJZzjg6FMGU0Pdcmghc0H6ydqsoqbZ1PfjO5j36rSlQrBdtGuMZPd1VIe106MnSqq48lwmTejKleUTEciPzFQfhF+tDlpcBVbeLvo93qRPUdvWl8MW85ykVaqNyI73/kdzaZzt6vjp+eu/Sl4D3zPC6feNgIXzSLrJBuO+B+4C3wOO/BcB4akF91vtDHZyAajTrgbUFnRK/3W4wVBC4qycMFx6kjQDKS4qAS1UlzLbF1wejmtU7jgSBecaFY27s5RoYRLP/kJypjlgXDCCze2fl57dn3/+nlHHuYzxJcqF8E02G0KOYCX5xZh6Ywz5WpnHHXi3YAKbmUsbjRv3O3j00Uj77wdchaRY0TzxukUKA/nijOMUSRz5phr4pf7EFLZqiuVDciZVOYzRBHY7/FE2otmRabyWDGJQSkmZUYZ2/CYIaL83DEZ5qcaTMbZODF855wx2T7wBYn1RWLQJKuJLcq8ZRXSBi0OyGNtAqMjaWOC+6oZrbLwGAwT+qwRqZShQY4NT+42/SQB5T7Clkal/3wUXp0j/Ale7YtXoQSUIq8atJa4GqRGJYejVjy5jwK+YGdvOwU9PnmbpePHRcS+Y4cIINGv8qNiYjv+7853AidGuEutT8YHsmrh+6ubP64+3zYpzv5l+24cXYzHul+74bX9M/run1vvLbq1t7Ud3UAyd/0tvb24zHm3jyueEQ+vohP3QRzoRwecWvf66e5rhxt1ftuvu036hOt+WXRyXCz+vVGxuIy2Q1zr/ebGiFl5k4aV3g2/sJiG8RLDJH76blSLK7vBkIjIjiX4FrXltRhMkzUJFtsY0mrLoQKoBAZUMzs+AInxKUFFZsXZxIcNtSSgOcisMCsjeiKzIpoFLDIrRtw5AJKCgYPMCrxPheeTH+vfNMrW/5Ezrb7N/448zGdge+nsNt779KiSnLWm2f1c5Qix1LGmDXEKk79iWoiscJefYbXpaytIcVhSrB3YtjgLB+E754wUKWwYeygSP8Hz1cPNl6KvQlDf0NRHbbTFmPqqG221tkAQLcU+zthbOq/nHPR3r0F9l9VNrWsunXmDa8qeX+uq7u5n7atKZT/H09oJ8g5f75e7jB3B8/fyydV3k/pTntHz6/vx4Uujx9jgKSZ+4/pXR4+zwdVjwRe7Zn0nNkARrUdb/Sb2R9eDGma+m69Xj4+NPNIvMUTC1EGeoeCwXgu0QxL/fVRcN+i9pK7peHvZu50TM3Dphmo6re++f7l9uHuq//VtvXfCi0zEwYgOAjQXskprwTio/1gF2iiKcZpfgP+uM8ihbRW2KLzO9OfqGT24e9vIhdeHqSc4ZJw2cTtWd136gCZXFGqrhM+b+MBkmDptqxyuwemoU1kCU1+WqEp7pl5UOzYuQWVn3gceWKpO5LBalNJpBdd5FFy5Cpk206PyXt3jravyHtrBz+jeN0GkQSydSMGIjnuNotDN9ZqbNkpwC/9eo4B8U0tmAENmEDumseo7nD3S3vLp8HD2x3fKW7qVo70y9zZL6l6ptmEKMMqbLHTLcIIHJiS5QVmwvDBGYWu/xu8m+TfRHbbZMS2yVOqBNb30w3CXPuq5qmWbVrY8GZ5cWBhPeYGZlrpxZ08dF969W1qKQkxepvVAgbSpesAYygmnyqOkcfOw8u3NFlWu2y9KlUerfKMPj2rT3KC3KYo5+/ikSYx3YzrDZ/LhZyVSAUdfYL76H2Q3zllAKonSDyP0OEAa61R6aJA2Kst5sUccRuPUJ7UMrPOAmi78VLeAcgA/XkeCOh6EnBd1/tnt/dcffz3P7h6ea+vjx4LagmAaEoyqS7g9Op6GSKnkBorFtlRHlfnssn3Icn9+89H59RErkuJHINgm7mCtKGipeb3Z1EAmib2qXJl1JFLigXHKESNS4gffN0T+E9N8eHo9NXNbvFl4uo2ZXbDTeYBOm3h+LT347kH7z+rd/e/1p6vdf7b6dbiWnWntNiI4esaLFgxoWjDV0S0014aaqwYI01jFzTxGy7xUQXUNW4cCc/8QNgqSsErYIhvz7v6PvmM3D/ff2obpjl90dtsg7CL/o6SIL9bO4mf8xJ1d9FzzekJ2Un64sUhua1IiTXTOgaopYbIr5LfRrEYg4SKjAbYgn+VLByfHRTtHz7s1J5IgZHknpGizJkWmDNadUwV8ekKevt0LMHMMZhMW7TKFAmSmbWZVwGcN1eyrYGUugXyCgXVF0k2W2FWUFj6FHoNek0bR/nbJ9rUcEdyptrWcCp2Da3rfboVjOeqmLKmWnP0jQjOmIammZiBFJfkvoUUnP3mw5n0qHsfDGYOLoMyFBmWQ0lK0Mg2D4uJhG5PBMkbEZE7EZEwRkxlz2wCiNIq2bZiGZMwW2lO3iIzx8UIyHTQhpW63IlXpvVtRu3JBiJU3DPF0LPvRckHyA3qqWw1a/Zc92IK0ECZlQ5OyF04kWxTRhmAz9YcorFiRPgE7swCraq+bUWkbO/I8qZQaqVZrEmnWpIWXSLVS6Z1j2zTm/o/P/3q4e7qd3T00Ci/Wi1jmrt4szlgzeJm7fqN46+C1mEP1hiEN6YY/o1nBarE/DJOSO+78Xv2HdC2rKCGhCSRKSoUKdMmkOF+N0iyJ/sgJKkfJqXPj/q/uwtnG7YXijIGk7/06brvToDV75IPydntE1c7zxl00ALP9ElviNb/DDqOu8Scgf/LrkgZOcwexTNz5P+12hH97sH/Ff5a/I3kkWaOApqW2X+9unuPG/c83X/54/GeThBH3ZZJOIiDvaZOtXSxgtl5Y60HefP3xeDtrT265sz5MX31KzXEPpDNViJk0hkWbFAKVw6imQv2uPFwnDhUe93Z3Jp1UIzqMwUgAvA/i6v+4s1fgoCeL6CSWdwt351B6ks3t5AMxMtAOcf1M95TQX+m3rO0g+xlJ97LWTQXIrMXo4NPVw+fbp+c/b7/PfkSdAaaFwz8em03LEFttoK2m6hrZ9Z/akQyqdPE+lG+QmRnc2qJtEU09bt5+jLpqSBksT199zhK2YTmDd+FtX6ZItXACKfwdXpz7jlG7LdL/BiEllEfrhqgMRTygTYfwEbI4moxk/Oj9HHBqf/XQxdEajNNvh70QbCUD5Ul9GSiwWBeLnMlAfOOcpfkEQYjMoenCXhxcAReZrsmyjKpWgxEgQwk3nGFTSKUed/iNCcHHx6erp1uRL8xlvjCO9KpQskz5aA4lraCDDu8+Qr8nJz5TegJfHIAYmRcmUCXLKqw0kCltAGkzDZVeIhf0lYbM9TqgWnnNbipLsgyr1Lvo1b3ju+hXR8mrTSL5x9U8HtDYXn+DuAVNZSxfGS+Wf9K2GdBxnY0xjqZi2O42Ce/Hilkaws581vJ8725iJ/U+jKdM5wqX807vaEbKYWR1dNIh7pNebVKIUy3d6GNJzGextrdxM103eoXHXafxreQS+OsT7/r+EDVK3g7sV4fSuFf4n/vyP2d9pU4IYuo4DGswB9DxUuHO+2RQbTW/iMkSUvZpMopo6aKtE3q+6xAZOEg38+N9Zy8WvhdkGzXAOySwfyUXiJvFBzGY8QggvKVDJDCOasJiKw0WNdUP4VCs7Jg1d5M6lKqjVHoNOksdDOb3ouDxEIP7eYYvgj7YRykT8UynOq3zX5P2Gv3mVAXLJvlNdedspekPDe5W5Er1Y26Q7S8UnZa2cCxVarg9WOk26a75uXG6Qm772fF9ldC6sd8dPziC8FQzy9Q9JI68fSxS0IUDN9Et39ZOhJON56VZWYlmiR6VHcnT09rah4HzMCIGyERijkqL11F76g9WRVSj9K4ruuP/RFk1CRJf3NU+W+Ai8hNEOkGAnrFrbxKTws8+1ljufLu6+XL3/XYAKTG7e3x6+CFkxPibysJ5rJnSRpEZKk1pG2xLDTptyonZOgiSOTeJVbH23iYnkl5TtwxS5pwp2mThNDY1ps72lwArYwkAcFA603Eo49FUkLX96huw1Poa2tiDo0srVy8t5dkVx0dGJ+VjGkceI+VhH49QQLIWjWK8UWvRhnqqNLXx7J4qWfUcDfij1C/QOkIO9mBp7T/P7sHKxIMFQKY8Vkp69mCPlZaZfX6Plai3MSCFX1mClaZen9tTBWpprCfUqXVMPT1a9NL3Ir0ne+8zehDrb97Sic74fw==&lt;/diagram&gt;&lt;/mxfile&gt;" style="background-color: rgb(255, 255, 255);"><defs><style type="text/css">@import url(https://fonts.googleapis.com/css?family=Liberation+Sans);&#xa;</style></defs><g><rect x="9428" y="2640" width="1120" height="600" rx="90" ry="90" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 8868 3120.57 Q 8656.57 3120.57 8656.57 1980.33" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 8656.57 1959.33 L 8665.9 1987.33 L 8656.57 1980.33 L 8647.24 1987.33 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 9228 3600 Q 9228 3760.57 9608 3760.57 Q 9988 3760.57 9988 3280.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9988 3253.42 L 10006 3289.42 L 9988 3280.42 L 9970 3289.42 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="8868" y="2640" width="480" height="960" rx="72" ry="72" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><rect x="2868" y="2640" width="1080" height="960" rx="144" ry="144" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><rect x="1082" y="840" width="1080" height="440" rx="66" ry="66" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><rect x="2148" y="2640" width="560" height="960" rx="84" ry="84" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 4928 160 L 4928 360" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 4928.57 160 L 4928.57 200.57 Q 4928.57 240.57 4888.57 240.57 L 1662.29 240.57 Q 1622.29 240.57 1622.19 280.57 L 1622.06 334.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1622.01 355.53 L 1608.08 327.49 L 1622.06 334.53 L 1636.08 327.56 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 4928.57 160 L 4928.57 200.57 Q 4928.57 240.57 4968.57 240.57 L 5808.57 240.57 Q 5848.57 240.57 5848.38 280.57 L 5848.12 334.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5848.02 355.53 L 5834.16 327.46 L 5848.12 334.53 L 5862.16 327.6 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 4928.57 160 L 4928.57 200.57 Q 4928.57 240.57 4968.57 240.57 L 7273.71 240.57 Q 7313.71 240.57 7313.81 280.57 L 7313.94 334.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7313.99 355.53 L 7299.92 327.56 L 7313.94 334.53 L 7327.92 327.49 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 4928.57 160 L 4928.57 200.57 Q 4928.57 240.57 4888.57 240.57 L 2620 240.57 Q 2580 240.57 2580.01 280.57 L 2580.02 334.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2580.02 355.53 L 2566.01 327.53 L 2580.02 334.53 L 2594.01 327.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="4788" y="0" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 20px; margin-left: 1198px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">tmp</font></div></div></div></foreignObject><text x="1232" y="24" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">tmp</text></switch></g><path d="M 4928 520 L 4928 1040" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><rect x="4788" y="360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 110px; margin-left: 1198px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">work</font></div></div></div></foreignObject><text x="1232" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">work</text></switch></g><path d="M 4928 1200 L 4928.29 1240.57 Q 4928.57 1280.57 4968.57 1280.57 L 5648.57 1280.57 Q 5688.57 1280.57 5688.57 1320.29 L 5688.57 1360" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 4928 1200 L 4928.29 1240.57 Q 4928.57 1280.57 4888.57 1280.57 L 3156 1280.57 Q 3116 1280.57 3115.98 1320.29 L 3115.96 1360" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 4588 1120 Q 4398.29 1120.57 4398.24 1334.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 4398.24 1355.53 L 4388.91 1327.53 L 4398.24 1334.53 L 4407.58 1327.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="4588" y="1040" width="680" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 168px; height: 1px; padding-top: 280px; margin-left: 1148px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter"><b>${MULTIMACH_TARGET_OS}</b></font></div></div></div></foreignObject><text x="1232" y="284" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">${MULTIMACH_TARGET_OS}</text></switch></g><path d="M 3115.96 1520 L 3116 1680" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><rect x="2975.96" y="1360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 360px; margin-left: 745px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">libhello</font></div></div></div></foreignObject><text x="779" y="364" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello</text></switch></g><path d="M 5688.57 1520 L 5688.57 1560.57 Q 5688.57 1600.57 5688.57 1570.29 L 5688.57 1555.14 Q 5688.57 1540 5688.29 1580 L 5688 1620" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><rect x="5548" y="1360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 360px; margin-left: 1388px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">sayhello</font></div></div></div></foreignObject><text x="1422" y="364" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello</text></switch></g><path d="M 3116 1840 L 3116 1960.57 Q 3116 2000.57 3076 2000.57 L 2036 2000.57 Q 1996 2000.57 1996 2040.57 L 1996 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1996 2155.53 L 1986.67 2127.53 L 1996 2134.53 L 2005.33 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 3116 1840 L 3116 1960.57 Q 3116 2000.57 3076 2000.57 L 2468 2000.57 Q 2428 2000.57 2428 2040.57 L 2428 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2428 2155.53 L 2418.67 2127.53 L 2428 2134.53 L 2437.33 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 3116 1840 L 3115.96 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3115.96 2155.53 L 3101.96 2127.53 L 3115.96 2134.53 L 3129.96 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 3116 1840 L 3116 1960.57 Q 3116 2000.57 3156 2000.57 L 4268 2000.57 Q 4308 2000.57 4308 2040.57 L 4308 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 4308 2155.53 L 4294 2127.53 L 4308 2134.53 L 4322 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 3116 1840 L 3116 1960.57 Q 3116 2000.57 3076 2000.57 L 1508 2000.57 Q 1468 2000.57 1468 2040.57 L 1468 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1468 2155.53 L 1458.67 2127.53 L 1468 2134.53 L 1477.33 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="2976" y="1680" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 440px; margin-left: 745px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">0.1-r0</font></div></div></div></foreignObject><text x="779" y="444" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">0.1-r0</text></switch></g><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5648.57 2000.57 L 5568.57 2000.57 Q 5528.57 2000.57 5528.43 2040.57 L 5528.09 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5528.02 2155.53 L 5514.12 2127.48 L 5528.09 2134.53 L 5542.12 2127.58 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5728.57 2000.57 L 6988 2000.57 Q 7028 2000.57 7028 2040.57 L 7028 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7028 2155.53 L 7014 2127.53 L 7028 2134.53 L 7042 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5728.57 2000.57 L 7676 2000.57 Q 7716 2000.57 7715.99 2040.57 L 7715.97 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7715.96 2155.53 L 7701.97 2127.52 L 7715.97 2134.53 L 7729.97 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5728.57 2000.57 L 8313.71 2000.57 Q 8353.71 2000.57 8353.79 2040.57 L 8353.95 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 8353.99 2155.53 L 8339.94 2127.55 L 8353.95 2134.53 L 8367.94 2127.5 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5728.57 2000.57 L 9068 2000.57 Q 9108 2000.57 9108 2040.57 L 9108 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9108 2155.53 L 9094 2127.53 L 9108 2134.53 L 9122 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5688.57 1780 L 5688.57 1960.57 Q 5688.57 2000.57 5728.57 2000.57 L 9948 2000.57 Q 9988 2000.57 9988 2040.57 L 9988 2134.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9988 2155.53 L 9974 2127.53 L 9988 2134.53 L 10002 2127.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5548" y="1620" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 425px; margin-left: 1388px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">0.1-r0</font></div></div></div></foreignObject><text x="1422" y="429" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">0.1-r0</text></switch></g><path d="M 1622.29 520 L 1622.29 560.57 Q 1622.29 600.57 1622.29 560.57 L 1622.29 540.57 Q 1622.29 520.57 1622.29 560.29 L 1622.29 600" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1762 440 Q 2215.43 440.57 2215.04 205.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 2215.01 184.47 L 2224.39 212.46 L 2215.04 205.47 L 2205.72 212.49 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="1482" y="360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 110px; margin-left: 371px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">downloads</font></div></div></div></foreignObject><text x="405" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">downloads</text></switch></g><path d="M 1622 760 L 1622.14 800.57 Q 1622.29 840.57 1622.29 800.57 L 1622.29 780.57 Q 1622.29 760.57 1622.29 800.29 L 1622.29 840" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><rect x="1482" y="600" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 170px; margin-left: 371px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">git2</font></div></div></div></foreignObject><text x="405" y="174" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">git2</text></switch></g><path d="M 2082 1160 L 2628 1160.53 Q 2668 1160.57 2668 1200.57 L 2668 2200.57 Q 2668 2240.57 2628 2240.41 L 2568.42 2240.16" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2541.42 2240.05 L 2577.49 2222.2 L 2568.42 2240.16 L 2577.34 2258.2 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="1162" y="880" width="920" height="160" rx="24" ry="24" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 228px; height: 1px; padding-top: 240px; margin-left: 291px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">github.com.&lt;username&gt;.sayhello</div></div></div></foreignObject><text x="405" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">github.com.&lt;username&gt;.sayhello</text></switch></g><path d="M 2082 960 L 6628 960.57 Q 6668 960.57 6668 1000.57 L 6668 2200.57 Q 6668 2240.57 6708 2240.48 L 6887.58 2240.09" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 6914.58 2240.03 L 6878.62 2258.11 L 6887.58 2240.09 L 6878.54 2222.11 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="1162" y="1080" width="920" height="160" rx="24" ry="24" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 228px; height: 1px; padding-top: 290px; margin-left: 291px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">github.com.&lt;username&gt;.libhello</div></div></div></foreignObject><text x="405" y="294" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">github.com.&lt;username&gt;.libhello</text></switch></g><path d="M 2428 2320 Q 2428 2320 2428 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2428 2635.53 L 2414 2607.53 L 2428 2614.53 L 2442 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="2328" y="2160" width="200" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 48px; height: 1px; padding-top: 560px; margin-left: 583px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">git</font></div></div></div></foreignObject><text x="607" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">git</text></switch></g><rect x="2212" y="2720" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 690px; margin-left: 554px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">Makefile</div></div></div></foreignObject><text x="610" y="694" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">Makefile</text></switch></g><path d="M 2212 2880.57 L 2117.14 2880.57 Q 2077.14 2880.57 2077.14 2840.57 L 2077.14 2714.86 Q 2077.14 2674.86 2037.14 2674.9 L 1966.47 2674.97" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 1945.47 2675 L 1973.46 2665.63 L 1966.47 2674.97 L 1973.48 2684.3 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="2212" y="2840" width="456" height="80" rx="12" ry="12" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 720px; margin-left: 554px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">LICENSE</div></div></div></foreignObject><text x="610" y="724" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">LICENSE</text></switch></g><path d="M 2212 3000 Q 2028 3000.57 2028 3060.57 Q 2028 3120.57 2171.58 3120.13" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2198.58 3120.04 L 2162.62 3132.15 L 2171.58 3120.13 L 2162.55 3108.15 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="2212" y="2960" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 750px; margin-left: 554px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">fix.patch</div></div></div></foreignObject><text x="610" y="754" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">fix.patch</text></switch></g><path d="M 2214.74 3151.44 Q 1988 3151.43 1988 3278.29 Q 1988 3405.14 2179.58 3405.02" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 2206.58 3405.01 L 2170.6 3423.03 L 2179.58 3405.02 L 2170.57 3387.03 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="2212" y="3080" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 780px; margin-left: 554px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">hellolib.c</div></div></div></foreignObject><text x="610" y="784" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">hellolib.c</text></switch></g><rect x="1896" y="2160" width="200" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 48px; height: 1px; padding-top: 560px; margin-left: 475px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">temp</font></div></div></div></foreignObject><text x="499" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">temp</text></switch></g><path d="M 4308 2320 L 4308 2640" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><rect x="4068" y="2160" width="480" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 560px; margin-left: 1018px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">sysroot-destdir</font></div></div></div></foreignObject><text x="1077" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sysroot-destdir</text></switch></g><path d="M 5528 2320 L 5528 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5528 2635.53 L 5518.67 2607.53 L 5528 2614.53 L 5537.33 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5308" y="2160" width="440" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 108px; height: 1px; padding-top: 560px; margin-left: 1328px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">recipe-sysroot</font></div></div></div></foreignObject><text x="1382" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">recipe-sysroot</text></switch></g><path d="M 3116 2320 L 3116 2694.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3116 2715.53 L 3106.67 2687.53 L 3116 2694.53 L 3125.33 2687.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="2989.96" y="2160" width="252" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 560px; margin-left: 748px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">image</font></div></div></div></foreignObject><text x="779" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">image</text></switch></g><path d="M 3116 2880 L 3116 2920.57 Q 3116 2960.57 3115.99 2987.55 L 3115.99 3014.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3115.98 3035.53 L 3106.65 3007.53 L 3115.99 3014.53 L 3125.32 3007.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 3116 2880 Q 3116 2960.57 3354.86 2960.57 Q 3593.71 2960.57 3593.91 3014.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3593.98 3035.53 L 3584.55 3007.56 L 3593.91 3014.53 L 3603.22 3007.49 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="3029" y="2720" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 700px; margin-left: 758px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">usr</font></div></div></div></foreignObject><text x="779" y="704" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">usr</text></switch></g><path d="M 3115.98 3200 L 3116 3294.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3116 3315.53 L 3106.66 3287.53 L 3116 3294.53 L 3125.33 3287.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="3008.48" y="3040" width="215" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 52px; height: 1px; padding-top: 780px; margin-left: 753px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">include</font></div></div></div></foreignObject><text x="779" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">include</text></switch></g><path d="M 3593.71 3200 L 3593.71 3240.57 Q 3593.71 3280.57 3593.76 3287.55 L 3593.82 3294.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3593.97 3315.53 L 3579.77 3287.63 L 3593.82 3294.53 L 3607.76 3287.43 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="3507" y="3040" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 780px; margin-left: 878px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">lib</font></div></div></div></foreignObject><text x="899" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">lib</text></switch></g><rect x="2956" y="3320" width="320" height="100" rx="15" ry="15" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 843px; margin-left: 740px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">hellolib.h</div></div></div></foreignObject><text x="779" y="846" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">hellolib.h</text></switch></g><rect x="3348" y="3350" width="480" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 848px; margin-left: 838px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">libhello.so.1</div></div></div></foreignObject><text x="897" y="851" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello.so.1</text></switch></g><rect x="3348" y="3450" width="480" height="100" rx="15" ry="15" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 875px; margin-left: 838px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">libhello.so.1.0</div></div></div></foreignObject><text x="897" y="879" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello.so.1.0</text></switch></g><rect x="3320" y="3320" width="548" height="250" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 4428 3600 Q 4428 3760.57 4978.29 3760.57 Q 5528.57 3760.57 5528.14 3640.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5528.05 3613.42 L 5546.18 3649.35 L 5528.14 3640.42 L 5510.18 3649.48 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="4068" y="2640" width="480" height="960" rx="72" ry="72" fill="#eeeeee" stroke="#36393d" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 780px; margin-left: 1018px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">Everything in <b>image</b> folder that is present in <b>SYSROOT_DIRS</b> will be copied here.</div></div></div></foreignObject><text x="1077" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">Everything in image...</text></switch></g><path d="M 3678 3600 Q 3678.29 3760.57 3993.14 3760.57 Q 4308 3760.57 4308 3640.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 4308 3613.42 L 4326 3649.42 L 4308 3640.42 L 4290 3649.42 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><path d="M 2428 3600 Q 2428 3760.57 2941.14 3760.57 Q 3454.29 3760.57 3454.4 3639.46" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 3454.43 3612.46 L 3472.39 3648.47 L 3454.4 3639.46 L 3436.39 3648.44 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="2748" y="3680" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 732px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_install</font></div></div></div></foreignObject><text x="732" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_install</text></switch></g><rect x="2220" y="3350" width="456" height="110" rx="16.5" ry="16.5" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 851px; margin-left: 556px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">libhello.so.1.0</div></div></div></foreignObject><text x="612" y="855" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello.so.1.0</text></switch></g><rect x="1668" y="3180" width="440" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 810px; margin-left: 472px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_configure<br /></font></div></div></div></foreignObject><text x="472" y="814" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_configure&#xa;</text></switch></g><rect x="2212" y="3200" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 810px; margin-left: 554px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">hellolib.h</div></div></div></foreignObject><text x="610" y="814" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">hellolib.h</text></switch></g><rect x="1788" y="3000" width="320" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 765px; margin-left: 487px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_patch<br /></font></div></div></div></foreignObject><text x="487" y="769" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_patch&#xa;</text></switch></g><rect x="2494" y="1620" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 420px; margin-left: 669px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_unpack<br /></font></div></div></div></foreignObject><text x="669" y="424" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_unpack&#xa;</text></switch></g><ellipse cx="2434" cy="1680" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 420px; margin-left: 595px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">6</b></font></div></div></div></foreignObject><text x="609" y="424" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">6</text></switch></g><ellipse cx="1728" cy="3060" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 765px; margin-left: 418px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">7</b></font></div></div></div></foreignObject><text x="432" y="769" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">7</text></switch></g><ellipse cx="1608" cy="3240" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 810px; margin-left: 388px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">8</b></font></div></div></div></foreignObject><text x="402" y="814" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">8</text></switch></g><rect x="1748" y="3310" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 843px; margin-left: 482px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_compile<br /></font></div></div></div></foreignObject><text x="482" y="846" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_compile&#xa;</text></switch></g><ellipse cx="1688" cy="3370" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 843px; margin-left: 408px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">9</b></font></div></div></div></foreignObject><text x="422" y="846" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">9</text></switch></g><ellipse cx="2688" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 658px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">10</b></font></div></div></div></foreignObject><text x="672" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">10</text></switch></g><rect x="3728" y="3680" width="640" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 1012px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_populate_sysroot</font></div></div></div></foreignObject><text x="1012" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_populate_sysroot</text></switch></g><ellipse cx="3668" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 903px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">11</b></font></div></div></div></foreignObject><text x="917" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">11</text></switch></g><path d="M 7028 3280 Q 7028 3760.57 7372 3760.57 Q 7716 3760.57 7715.97 3640.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7715.96 3613.42 L 7733.97 3649.41 L 7715.97 3640.42 L 7697.97 3649.42 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="6748" y="2640" width="560" height="640" rx="84" ry="84" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 7028 2320 Q 7028 2320 7028 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7028 2635.53 L 7014 2607.53 L 7028 2614.53 L 7042 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="6928" y="2160" width="200" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 48px; height: 1px; padding-top: 560px; margin-left: 1733px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">git</font></div></div></div></foreignObject><text x="1757" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">git</text></switch></g><rect x="6800" y="2720" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 690px; margin-left: 1701px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">Makefile</div></div></div></foreignObject><text x="1757" y="694" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">Makefile</text></switch></g><rect x="6800" y="2840" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 720px; margin-left: 1701px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">LICENSE</div></div></div></foreignObject><text x="1757" y="724" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">LICENSE</text></switch></g><path d="M 6800 3000 Q 6588 3000.57 6588 3088 Q 6588 3175.43 6759.58 3175.08" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 6786.58 3175.03 L 6750.62 3193.1 L 6759.58 3175.08 L 6750.55 3157.1 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="6800" y="2960" width="456" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 750px; margin-left: 1701px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">sayhello.c</div></div></div></foreignObject><text x="1757" y="754" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello.c</text></switch></g><rect x="6800" y="3120" width="456" height="110" rx="16.5" ry="16.5" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 112px; height: 1px; padding-top: 794px; margin-left: 1701px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">sayhello</div></div></div></foreignObject><text x="1757" y="797" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello</text></switch></g><path d="M 7835.96 3600 Q 7828 3760.57 8090.86 3760.57 Q 8353.71 3760.57 8353.94 3640.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 8353.99 3613.42 L 8371.93 3649.45 L 8353.94 3640.42 L 8335.93 3649.38 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="7475.96" y="2640" width="480" height="960" rx="72" ry="72" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 7715.96 2320 L 7715.96 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7715.96 2635.53 L 7706.63 2607.53 L 7715.96 2614.53 L 7725.29 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7589.96" y="2160" width="252" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 560px; margin-left: 1898px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">image</font></div></div></div></foreignObject><text x="1929" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">image</text></switch></g><path d="M 7716 2880 L 7716 2940 Q 7716 2980 7716.01 3017.26 L 7716.01 3054.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7716.02 3075.53 L 7706.68 3047.53 L 7716.01 3054.53 L 7725.35 3047.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7629" y="2720" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 700px; margin-left: 1908px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">usr</font></div></div></div></foreignObject><text x="1929" y="704" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">usr</text></switch></g><path d="M 7716.02 3240 L 7716 3394.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7716 3415.53 L 7706.67 3387.53 L 7716 3394.53 L 7725.34 3387.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7608.52" y="3080" width="215" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 52px; height: 1px; padding-top: 790px; margin-left: 1903px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">bin</font></div></div></div></foreignObject><text x="1929" y="794" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">bin</text></switch></g><rect x="1268" y="2160" width="400" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 98px; height: 1px; padding-top: 560px; margin-left: 318px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">libhello-0.1</font></div></div></div></foreignObject><text x="367" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello-0.1</text></switch></g><path d="M 1241 2160 Q 1241.71 2220 1245.14 2050.29 Q 1248.57 1880.57 1785.14 1880.57 Q 2321.71 1880.57 2321.08 2127.06" fill="none" stroke="#000000" stroke-width="8" stroke-miterlimit="10" stroke-dasharray="8 16" pointer-events="stroke"/><path d="M 2321.02 2151.06 L 2310.44 2119.03 L 2321.08 2127.06 L 2331.77 2119.08 Z" fill="#000000" stroke="#000000" stroke-width="8" stroke-miterlimit="10" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 472px; margin-left: 435px;"><div data-drawio-colors="color: rgb(0, 0, 0); background-color: rgb(255, 255, 255); border-color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 11px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; background-color: rgb(255, 255, 255); border: 1px solid rgb(0, 0, 0); white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" style="font-size: 14px;">S = "${WORKDIR}/git"</font></div></div></div></foreignObject><text x="435" y="475" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="11px" text-anchor="middle">S = "${WORKDIR}/git"</text></switch></g><rect x="3203" y="1620" width="252" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 418px; margin-left: 802px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">WORKDIR</font></div></div></div></foreignObject><text x="832" y="421" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">WORKDIR</text></switch></g><rect x="2923" y="2160" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 553px; margin-left: 732px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">D</font></div></div></div></foreignObject><text x="744" y="556" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">D</text></switch></g><rect x="2268" y="2160" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 553px; margin-left: 568px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">S</font></div></div></div></foreignObject><text x="580" y="556" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">S</text></switch></g><rect x="2162" y="2160" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 553px; margin-left: 542px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">B</font></div></div></div></foreignObject><text x="554" y="556" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">B</text></switch></g><rect x="1188" y="2160" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 553px; margin-left: 298px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">S</font></div></div></div></foreignObject><text x="310" y="556" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">S</text></switch></g><rect x="1835" y="2160" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 553px; margin-left: 460px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">T</font></div></div></div></foreignObject><text x="472" y="556" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">T</text></switch></g><rect x="3628" y="3160" width="200" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 48px; height: 1px; padding-top: 803px; margin-left: 908px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">libdir</font></div></div></div></foreignObject><text x="932" y="806" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">libdir</text></switch></g><rect x="3135" y="3160" width="320" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 803px; margin-left: 785px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">includedir</font></div></div></div></foreignObject><text x="824" y="806" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">includedir</text></switch></g><rect x="5603.48" y="2280" width="464.52" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 114px; height: 1px; padding-top: 583px; margin-left: 1402px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">RECIPE_SYSROOT</font></div></div></div></foreignObject><text x="1459" y="586" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">RECIPE_SYSROOT</text></switch></g><path d="M 7226.86 2260 Q 7226.86 2110.29 7197.14 2110.29 Q 7167.43 2110.29 7167.49 1985.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 7167.5 1964.47 L 7176.82 1992.48 L 7167.49 1985.47 L 7158.15 1992.47 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7174" y="2260" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 578px; margin-left: 1795px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">S</font></div></div></div></foreignObject><text x="1807" y="581" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">S</text></switch></g><rect x="7068" y="2260" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 578px; margin-left: 1768px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">B</font></div></div></div></foreignObject><text x="1780" y="581" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">B</text></switch></g><rect x="7803" y="2260" width="106" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 24px; height: 1px; padding-top: 578px; margin-left: 1952px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">D</font></div></div></div></foreignObject><text x="1964" y="581" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">D</text></switch></g><rect x="5788" y="1580" width="252" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 408px; margin-left: 1448px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">WORKDIR</font></div></div></div></foreignObject><text x="1479" y="411" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">WORKDIR</text></switch></g><rect x="4640" y="3680" width="800" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 1260px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_prepare_recipe_sysroot</font></div></div></div></foreignObject><text x="1260" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_prepare_recipe_sysroot</text></switch></g><rect x="7536" y="3420" width="360" height="110" rx="16.5" ry="16.5" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 869px; margin-left: 1885px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">sayhello</div></div></div></foreignObject><text x="1929" y="872" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello</text></switch></g><rect x="4988" y="2640" width="1080" height="960" rx="144" ry="144" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><path d="M 5236 2880 Q 5236 2960.57 5235.99 3014.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5235.98 3035.53 L 5226.65 3007.53 L 5235.99 3014.53 L 5245.32 3007.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5236 2880 Q 5236 2960.57 5474.86 2960.57 Q 5713.71 2960.57 5713.91 3014.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5713.98 3035.53 L 5704.55 3007.56 L 5713.91 3014.53 L 5723.22 3007.49 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5149" y="2720" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 700px; margin-left: 1288px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">usr</font></div></div></div></foreignObject><text x="1309" y="704" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">usr</text></switch></g><path d="M 5235.98 3200 L 5236 3294.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5236 3315.53 L 5226.66 3287.53 L 5236 3294.53 L 5245.33 3287.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5128.48" y="3040" width="215" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 52px; height: 1px; padding-top: 780px; margin-left: 1283px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">include</font></div></div></div></foreignObject><text x="1309" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">include</text></switch></g><path d="M 5713.71 3200 L 5713.71 3240.57 Q 5713.71 3280.57 5713.76 3287.55 L 5713.82 3294.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5713.97 3315.53 L 5704.43 3287.6 L 5713.82 3294.53 L 5723.1 3287.46 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5627" y="3040" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 780px; margin-left: 1408px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">lib</font></div></div></div></foreignObject><text x="1429" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">lib</text></switch></g><rect x="5076" y="3320" width="320" height="100" rx="15" ry="15" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 843px; margin-left: 1270px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">hellolib.h</div></div></div></foreignObject><text x="1309" y="846" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">hellolib.h</text></switch></g><rect x="5468" y="3350" width="480" height="80" rx="12" ry="12" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 848px; margin-left: 1368px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">libhello.so.1</div></div></div></foreignObject><text x="1427" y="851" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello.so.1</text></switch></g><rect x="5468" y="3450" width="480" height="100" rx="15" ry="15" fill="#f5f5f5" stroke="#666666" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 875px; margin-left: 1368px;"><div data-drawio-colors="color: #333333; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(51, 51, 51); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">libhello.so.1.0</div></div></div></foreignObject><text x="1427" y="879" fill="#333333" font-family="Liberation Sans" font-size="12px" text-anchor="middle">libhello.so.1.0</text></switch></g><rect x="5440" y="3320" width="548" height="250" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><rect x="3880" y="1680" width="1560" height="280" fill="none" stroke="#36393d" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 455px; margin-left: 1165px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" style="font-size: 15px;">This also contains other files from other <br />dependencies. Default dependencies are:<br />basically <b style=""><u>gcc</u></b>, <b style=""><u>compilerlibs</u></b> and <b style=""><u style="">libc</u></b></font></div></div></div></foreignObject><text x="1165" y="459" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This also contains other files from other...</text></switch></g><rect x="4368" y="2280" width="510" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 126px; height: 1px; padding-top: 583px; margin-left: 1093px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">SYSROOT_DESTDIR</font></div></div></div></foreignObject><text x="1156" y="586" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">SYSROOT_DESTDIR</text></switch></g><path d="M 330 960 L 1121.58 960" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1148.58 960 L 1112.58 978 L 1121.58 960 L 1112.58 942 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><path d="M 330 1160 L 1121.58 1160" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 1148.58 1160 L 1112.58 1178 L 1121.58 1160 L 1112.58 1142 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="2" y="860" width="328" height="400" rx="49.2" ry="49.2" fill="#000000" stroke="#23445d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 80px; height: 1px; padding-top: 265px; margin-left: 1px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font color="#fcfcfc">Github</font></div></div></div></foreignObject><text x="41" y="269" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">Github</text></switch></g><rect x="535" y="900" width="320" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 240px; margin-left: 174px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_fetch<br /></font></div></div></div></foreignObject><text x="174" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_fetch&#xa;</text></switch></g><ellipse cx="475" cy="960" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 240px; margin-left: 105px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">1</b></font></div></div></div></foreignObject><text x="119" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">1</text></switch></g><rect x="535" y="1100" width="320" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 290px; margin-left: 174px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_fetch<br /></font></div></div></div></foreignObject><text x="174" y="294" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_fetch&#xa;</text></switch></g><ellipse cx="475" cy="1160" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 290px; margin-left: 105px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">5</b></font></div></div></div></foreignObject><text x="119" y="294" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">5</text></switch></g><rect x="6228" y="2980" width="440" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 760px; margin-left: 1612px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_configure<br /></font></div></div></div></foreignObject><text x="1612" y="764" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_configure&#xa;</text></switch></g><ellipse cx="6168" cy="3040" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 760px; margin-left: 1528px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">12</b></font></div></div></div></foreignObject><text x="1542" y="764" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">12</text></switch></g><rect x="6308" y="3110" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 793px; margin-left: 1622px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_compile<br /></font></div></div></div></foreignObject><text x="1622" y="796" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_compile&#xa;</text></switch></g><ellipse cx="6248" cy="3170" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 793px; margin-left: 1548px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">13</b></font></div></div></div></foreignObject><text x="1562" y="796" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">13</text></switch></g><rect x="2508" y="900" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 240px; margin-left: 672px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_unpack<br /></font></div></div></div></foreignObject><text x="672" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_unpack&#xa;</text></switch></g><ellipse cx="2448" cy="960" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 240px; margin-left: 598px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">2</b></font></div></div></div></foreignObject><text x="612" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">2</text></switch></g><path d="M 5848.57 520 Q 5848.57 600.57 5849.05 594.61" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5847.36 615.54 L 5835.66 586.51 L 5849.05 594.61 L 5863.57 588.76 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5848.57 520 Q 5848.57 560.57 5593.14 560.57 Q 5337.71 560.57 5337.31 594.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 5337.05 615.53 L 5323.39 587.36 L 5337.31 594.53 L 5351.39 587.7 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 5848.57 520 Q 5848.57 560.57 6108.57 560.57 Q 6368.57 560.57 6368.24 594.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 6368.04 615.53 L 6354.31 587.39 L 6368.24 594.53 L 6382.31 587.66 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5708" y="360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 110px; margin-left: 1428px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">conf</font></div></div></div></foreignObject><text x="1462" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">conf</text></switch></g><path d="M 5149 660 Q 5108 660 5108 510.29 Q 5108 360.57 5252 360.57 Q 5396 360.57 5396 205.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 5396 184.47 L 5405.33 212.47 L 5396 205.47 L 5386.67 212.47 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5149" y="620" width="376" height="80" rx="12" ry="12" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 92px; height: 1px; padding-top: 165px; margin-left: 1288px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">local.conf</div></div></div></foreignObject><text x="1334" y="169" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">local.conf</text></switch></g><path d="M 5847.43 700 Q 5847.43 860 6048 860 Q 6248.57 860 6248.09 994.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 6248.02 1015.53 L 6238.78 987.49 L 6248.09 994.53 L 6257.45 987.56 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="5627" y="620" width="440" height="80" rx="12" ry="12" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 108px; height: 1px; padding-top: 165px; margin-left: 1408px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">bblayers.conf</div></div></div></foreignObject><text x="1462" y="169" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">bblayers.conf</text></switch></g><rect x="6707" y="1360" width="921" height="600" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 228px; height: 1px; padding-top: 415px; margin-left: 1678px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;"><b><u>S</u></b> defaults generally to <b><u>${WORKDIR}/${BPN}-${PV}</u></b><br />In <b>git</b> recipes change it to <b><u>${WORKDIR}/git</u></b></font></div></div></div></foreignObject><text x="1792" y="419" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">S defaults generally to ${WORKDIR}/${B...</text></switch></g><rect x="6228" y="2700" width="440" height="160" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 695px; margin-left: 1612px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_patch<br /><i>(No patches)</i><br /></font></div></div></div></foreignObject><text x="1612" y="699" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_patch...</text></switch></g><ellipse cx="6168" cy="2780" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 695px; margin-left: 1528px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">3</b></font></div></div></div></foreignObject><text x="1542" y="699" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">3</text></switch></g><ellipse cx="4580" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 1131px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">4</b></font></div></div></div></foreignObject><text x="1145" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">4</text></switch></g><rect x="6927" y="3560" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 905px; margin-left: 1777px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_install<br /></font></div></div></div></foreignObject><text x="1777" y="909" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_install&#xa;</text></switch></g><ellipse cx="6868" cy="3620" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 905px; margin-left: 1703px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">14</b></font></div></div></div></foreignObject><text x="1717" y="909" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">14</text></switch></g><path d="M 8354.02 2320 L 8354.02 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 8354.02 2635.53 L 8344.69 2607.53 L 8354.02 2614.53 L 8363.35 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="8228" y="2160" width="252" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 560px; margin-left: 2058px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">package</font></div></div></div></foreignObject><text x="2089" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">package</text></switch></g><rect x="8441.04" y="2260" width="146.96" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 35px; height: 1px; padding-top: 578px; margin-left: 2111px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">PKGD</font></div></div></div></foreignObject><text x="2129" y="581" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">PKGD</text></switch></g><path d="M 8461.77 3600 Q 8461.14 3760.57 8784.57 3760.57 Q 9108 3760.57 9108 3640.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9108 3613.42 L 9126 3649.42 L 9108 3640.42 L 9090 3649.42 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="8138.52" y="2640" width="431" height="960" rx="64.65" ry="64.65" fill="#eeeeee" stroke="#36393d" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 106px; height: 1px; padding-top: 780px; margin-left: 2036px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">A copy of <b>${D}<br /></b>excluding<br /><b>/sysroot-only</b></div></div></div></foreignObject><text x="2089" y="784" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">A copy of ${D}...</text></switch></g><rect x="7960.96" y="3680" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 2035px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_package<br /></font></div></div></div></foreignObject><text x="2035" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_package&#xa;</text></switch></g><ellipse cx="7901.96" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 1961px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">15</b></font></div></div></div></foreignObject><text x="1975" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">15</text></switch></g><path d="M 9108 2320 L 9108 2614.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9108 2635.53 L 9098.67 2607.53 L 9108 2614.53 L 9117.33 2607.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="8868" y="2160" width="480" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 560px; margin-left: 2218px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">packages-split</font></div></div></div></foreignObject><text x="2277" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">packages-split</text></switch></g><rect x="9308" y="2260" width="240" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 58px; height: 1px; padding-top: 578px; margin-left: 2328px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">PKGDEST</font></div></div></div></foreignObject><text x="2357" y="581" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">PKGDEST</text></switch></g><path d="M 9108 2840 L 9108 2895.1" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9108 2916.1 L 9098.67 2888.1 L 9108 2895.1 L 9117.33 2888.1 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="8982" y="2680" width="252" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 690px; margin-left: 2247px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">sayhello</font></div></div></div></foreignObject><text x="2277" y="694" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello</text></switch></g><path d="M 9108 3080 L 9108 3120.57 Q 9108 3160.57 9108 3162.55 L 9108 3164.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9107.98 3185.53 L 9098.67 3157.52 L 9108 3164.53 L 9117.34 3157.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="9021" y="2920" width="174" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 42px; height: 1px; padding-top: 750px; margin-left: 2256px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">usr</font></div></div></div></foreignObject><text x="2277" y="754" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">usr</text></switch></g><path d="M 9107.98 3350 L 9107.97 3414.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9107.96 3435.53 L 9098.63 3407.53 L 9107.97 3414.53 L 9117.3 3407.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="9000.48" y="3190" width="215" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 52px; height: 1px; padding-top: 818px; margin-left: 2251px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">bin</font></div></div></div></foreignObject><text x="2277" y="821" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">bin</text></switch></g><rect x="7689.48" y="1360" width="1287" height="595" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 320px; height: 1px; padding-top: 414px; margin-left: 1923px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><span style="font-size: 15px;">Folders created here are present in <b><u>PACKAGES</u></b> variable, BitBake knows what and where to put things using the <b><u>FILES</u></b> variable, example: <b><u>FILES:${PN}</u></b> files will go to <b><u>${PN}</u></b> folder which is in <b><u>PACKAGES</u></b></span></div></div></div></foreignObject><text x="2083" y="418" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">Folders created here are present in PACKAGES variable...</text></switch></g><rect x="8640.48" y="3680" width="360" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 2205px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_package<br /></font></div></div></div></foreignObject><text x="2205" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_package&#xa;</text></switch></g><ellipse cx="8581.48" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 2131px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">16</b></font></div></div></div></foreignObject><text x="2145" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">16</text></switch></g><path d="M 9988 2320 L 9988 2674.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9988 2695.53 L 9978.67 2667.53 L 9988 2674.53 L 9997.33 2667.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="9748" y="2160" width="480" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 560px; margin-left: 2438px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">deploy-<b><i>pkg</i></b></font></div></div></div></foreignObject><text x="2497" y="564" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">deploy-pkg</text></switch></g><path d="M 9988 2860 L 9988 3014.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 9988 3035.53 L 9978.67 3007.53 L 9988 3014.53 L 9997.33 3007.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="9748" y="2700" width="480" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 695px; margin-left: 2438px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter"><b>${PACKAGE_ARCH}</b></font></div></div></div></foreignObject><text x="2497" y="699" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">${PACKAGE_ARCH}</text></switch></g><rect x="9028" y="1360" width="1640" height="595" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 414px; margin-left: 2258px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This can be <b><u>rpms</u></b>, <b><u>debs</u></b> or <b><u>ipks</u></b>.<br />These are provided by<br /><b><u>package_rpm</u></b>, <b><u>package_deb</u></b> and <b><u>package_ipk</u></b> classes respectively, use <b><u>PACKAGE_CLASSES</u></b> for that as<br />content of <b><u>PACKAGE_CLASSES</u></b> will be appended<br />to <b><u>INHERIT</u></b><br /></font></div></div></div></foreignObject><text x="2462" y="418" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This can be rpms, debs or ipks....</text></switch></g><path d="M 10522 3105 L 10708 3105.59 Q 10748 3105.71 10748 3065.71 L 10748 1320.57 Q 10748 1280.57 10708 1280.57 L 7353.71 1280.57 Q 7313.71 1280.57 7313.8 1240.57 L 7313.92 1180.42" fill="none" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7313.97 1153.42 L 7331.9 1189.45 L 7313.92 1180.42 L 7295.9 1189.38 Z" fill="#000000" stroke="#000000" stroke-width="12" stroke-miterlimit="10" pointer-events="all"/><rect x="9454" y="3040" width="1068" height="130" rx="19.5" ry="19.5" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 265px; height: 1px; padding-top: 776px; margin-left: 2365px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">sayhello-0.1-r0.${PACKAGE_ARCH}.<i>pkg</i></div></div></div></foreignObject><text x="2497" y="780" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello-0.1-r0.${PACKAGE_ARCH}.pkg</text></switch></g><rect x="10788" y="2640" width="1480" height="680" fill="none" stroke="#36393d" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 368px; height: 1px; padding-top: 745px; margin-left: 2698px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This task also depends on <b><u>PACKAGE_CLASSES</u></b>,<br /><b><u><i>pkg</i></u></b> can be <b><u>rpm</u></b>, <b><u>deb</u></b> or <b><u>ipk</u></b> for <b><u>package_rpm</u></b>,<br /><b><u>package_deb</u></b> or <u style="font-weight: bold;">package_ipk</u> respectively.<br />The generated package generally named using:<br /><b><u>${PN}</u></b>, <b><u>${PR}</u></b>, <b><u>${PACKAGE_ARCH}</u></b> and <b><u><i>pkg</i></u></b><br /></font></div></div></div></foreignObject><text x="2882" y="749" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This task also depends on PACKAGE_CLASSES,...</text></switch></g><path d="M 7314 520 L 7314 654.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7314 675.53 L 7300 647.53 L 7314 654.53 L 7328 647.53 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 7454 440 Q 7454 440 7742.53 440" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 7763.53 440 L 7735.53 449.33 L 7742.53 440 L 7735.53 430.67 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7174" y="360" width="280" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 110px; margin-left: 1795px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">deploy</font></div></div></div></foreignObject><text x="1829" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">deploy</text></switch></g><path d="M 7313.71 840 L 7313.71 880.57 Q 7313.71 920.57 7313.71 910.29 L 7313.71 905.14 Q 7313.71 900 7313.81 927.26 L 7313.91 954.53" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 7313.98 975.53 L 7299.88 947.58 L 7313.91 954.53 L 7327.88 947.48 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7054.48" y="680" width="519" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 128px; height: 1px; padding-top: 190px; margin-left: 1765px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><b>${DEPLOY_DIR_<i>pkg</i>}</b></div></div></div></foreignObject><text x="1828" y="194" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">${DEPLOY_DIR_pkg}</text></switch></g><rect x="9488" y="3680" width="600" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 935px; margin-left: 2447px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_package_write_<i>pkg</i><br /></font></div></div></div></foreignObject><text x="2447" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_package_write_pkg&#xa;</text></switch></g><ellipse cx="9408" cy="3740" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 935px; margin-left: 2338px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">17</b></font></div></div></div></foreignObject><text x="2352" y="939" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">17</text></switch></g><path d="M 10048 3740 Q 11528.57 3740 11528.03 3345.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 11528.01 3324.47 L 11537.38 3352.46 L 11528.03 3345.47 L 11518.71 3352.48 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="7074" y="980" width="480" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 265px; margin-left: 1770px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">${PACKAGE_ARCH}</div></div></div></foreignObject><text x="1829" y="269" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">${PACKAGE_ARCH}</text></switch></g><rect x="7768" y="672.52" width="1660" height="167.48" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 413px; height: 1px; padding-top: 189px; margin-left: 1943px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">For packages, this can be <b><u>IPK</u></b>, <b><u>RPM</u></b> or <b><u>DEB</u></b> (<i>check step 17</i>)</font></div></div></div></foreignObject><text x="2150" y="193" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">For packages, this can be IPK, RPM or DEB (check step 17)</text></switch></g><rect x="7369.48" y="480" width="320" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 133px; margin-left: 1843px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">DEPLOY_DIR</font></div></div></div></foreignObject><text x="1882" y="136" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">DEPLOY_DIR</text></switch></g><rect x="4988" y="80" width="240" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 58px; height: 1px; padding-top: 33px; margin-left: 1248px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">TMPDIR</font></div></div></div></foreignObject><text x="1277" y="36" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">TMPDIR</text></switch></g><rect x="1668" y="480" width="250.72" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 61px; height: 1px; padding-top: 133px; margin-left: 418px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">DL_DIR</font></div></div></div></foreignObject><text x="448" y="136" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">DL_DIR</text></switch></g><path d="M 7573.48 760 L 7630.29 760.34 Q 7670.29 760.57 7706.42 758.98 L 7742.55 757.38" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 7763.53 756.46 L 7735.97 767.02 L 7742.55 757.38 L 7735.15 748.37 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="10488" y="2380" width="600" height="120" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 610px; margin-left: 2697px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: nowrap;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">do_package_write_<i>pkg</i><br /></font></div></div></div></foreignObject><text x="2697" y="614" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">do_package_write_pkg&#xa;</text></switch></g><ellipse cx="10408" cy="2440" rx="60.00000000000001" ry="60.00000000000001" fill="#000000" stroke="#56517e" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 28px; height: 1px; padding-top: 610px; margin-left: 2588px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter" color="#ffffff" size="1"><b style="font-size: 15px;">18</b></font></div></div></div></foreignObject><text x="2602" y="614" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">18</text></switch></g><path d="M 10268 2310.29 Q 10879.43 2310.29 10879.43 1999.43 Q 10879.43 1688.57 10878.68 1140.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 10878.65 1119.47 L 10888.02 1147.46 L 10878.68 1140.47 L 10869.35 1147.48 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="10148" y="2260" width="120" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><path d="M 10088 2200 Q 10088.57 2077.14 10173.14 2077.14 Q 10257.71 2077.14 10257.94 1980.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 10257.99 1959.47 L 10267.26 1987.49 L 10257.94 1980.47 L 10248.59 1987.45 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="9628" y="805" width="1667.52" height="310" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 415px; height: 1px; padding-top: 240px; margin-left: 2408px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This can be <b><u>PKGWRITEDIRRPM</u></b>, <b><u>PKGWRITEDIRDEB</u></b> or <b><u>PKGWRITEDIRIPK</u></b> for <b><u>package_rpm</u></b>, <b><u>package_deb</u></b><br />or <b><u>package_ipk</u></b> respectively<br /></font></div></div></div></foreignObject><text x="2615" y="244" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This can be PKGWRITEDIRRPM, PKGWRITEDIRDEB or PKGWRITEDIRIPK for pack...</text></switch></g><rect x="628" y="2470" width="1313" height="410" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 326px; height: 1px; padding-top: 669px; margin-left: 158px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><span style="font-size: 15px;">License checking happens in <b><u>do_populate_lic</u></b> after <b><u>do_patch<br /></u></b>and before that a checksum check<br />happends on <b><u>LIC_FILES_CHKSUM</u></b> if the<br />license is not <b><u>CLOSED</u></b><br /></span></div></div></div></foreignObject><text x="321" y="672" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">License checking happens in do_populate_lic after do_pa...</text></switch></g><rect x="3528.48" y="1360" width="1739.52" height="280" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 433px; height: 1px; padding-top: 375px; margin-left: 883px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><span style="font-size: 15px;">This variable is used to separate recipes<br />based on their target. This has value of<br /><b><u>${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}</u></b><br /></span></div></div></div></foreignObject><text x="1100" y="379" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This variable is used to separate recipes...</text></switch></g><path d="M 6588 660 Q 6863.43 660 6863.4 505.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 6863.39 484.47 L 6872.73 512.47 L 6863.4 505.47 L 6854.06 512.47 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="6148" y="620" width="440" height="80" rx="12" ry="12" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 108px; height: 1px; padding-top: 165px; margin-left: 1538px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">conf-notes.txt</div></div></div></foreignObject><text x="1592" y="169" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">conf-notes.txt</text></switch></g><path d="M 2580.02 520 Q 2580 660 3042.53 660" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 3063.53 660 L 3035.53 669.33 L 3042.53 660 L 3035.53 650.67 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><path d="M 2374 440 Q 2215.43 440.57 2215.04 205.47" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 2215.01 184.47 L 2224.39 212.46 L 2215.04 205.47 L 2205.72 212.49 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="2374" y="360" width="412.04" height="160" rx="24" ry="24" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 101px; height: 1px; padding-top: 110px; margin-left: 595px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">sstate-cache</font></div></div></div></foreignObject><text x="645" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sstate-cache</text></switch></g><rect x="2643.92" y="480" width="332.04" height="100" rx="15" ry="15" fill="#000000" stroke="none" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 81px; height: 1px; padding-top: 133px; margin-left: 662px;"><div data-drawio-colors="color: #ffffff; " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(255, 255, 255); line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; overflow-wrap: normal;"><font data-font-src="https://fonts.googleapis.com/css?family=Architects+Daughter">SSTATE_DIR</font></div></div></div></foreignObject><text x="702" y="136" fill="#ffffff" font-family="Liberation Sans" font-size="12px" text-anchor="middle" font-weight="bold">SSTATE_DIR</text></switch></g><rect x="8927.96" y="3440" width="360" height="110" rx="16.5" ry="16.5" fill="#eeeeee" stroke="#36393d" stroke-width="4" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 874px; margin-left: 2233px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;">sayhello</div></div></div></foreignObject><text x="2277" y="877" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">sayhello</text></switch></g><path d="M 4660 1960 Q 4660 2300 4904.57 2300 Q 5149.14 2300 5148.94 2611.65" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/><path d="M 5148.92 2632.65 L 5139.61 2604.64 L 5148.94 2611.65 L 5158.27 2604.65 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-miterlimit="10" pointer-events="all"/><rect x="3068" y="480" width="1640" height="360" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 165px; margin-left: 768px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><span style="font-size: 15px;">This folder contains cache for recipes build output, this is used by BitBake, if the recipe checksum did not change it knows that the output to use is the same.<br /></span></div></div></div></foreignObject><text x="972" y="169" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This folder contains cache for recipes build output, this is used by...</text></switch></g><rect x="1395" y="0" width="1640" height="180" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 23px; margin-left: 350px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><span style="font-size: 15px;"><font data-font-src="https://fonts.googleapis.com/css?family=Liberation+Sans">These directories can be shared accross builds to save disk space and build time</font><br /></span></div></div></div></foreignObject><text x="554" y="26" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">These directories can be shared accross builds to save disk space an...</text></switch></g><rect x="7768" y="350" width="1667.52" height="180" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 415px; height: 1px; padding-top: 110px; margin-left: 1943px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This directory contains other output directories such as <b><u>images</u></b>, <b><u>sdk</u></b> and <b><u>licenses</u></b><br /></font></div></div></div></foreignObject><text x="2150" y="114" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This directory contains other output directories such as images, sdk...</text></switch></g><rect x="5908" y="1020" width="680" height="520" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 168px; height: 1px; padding-top: 320px; margin-left: 1478px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This file contains all <b>layers</b> that BitBake should consider when looking for metadata.<br /></font></div></div></div></foreignObject><text x="1562" y="324" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This file contains all layer...</text></switch></g><rect x="5396" y="20" width="1760" height="160" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 438px; height: 1px; padding-top: 25px; margin-left: 1350px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">This is base configuration file containing essential user config such as <b><u>MACHINE</u></b> and <b><u>DISTRO</u></b><br /></font></div></div></div></foreignObject><text x="1569" y="29" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">This is base configuration file containing essential user config such as...</text></switch></g><rect x="6140" y="320" width="964.52" height="160" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" stroke-dasharray="12 12" pointer-events="all"/><g transform="translate(-0.5 -0.5)scale(4)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 239px; height: 1px; padding-top: 100px; margin-left: 1536px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Liberation Sans&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font style="font-size: 15px;">The message to show after <b>source oe-init-build-env</b><br /></font></div></div></div></foreignObject><text x="1656" y="104" fill="rgb(0, 0, 0)" font-family="Liberation Sans" font-size="12px" text-anchor="middle">The message to show after source oe-init...</text></switch></g><rect x="10948" y="2400" width="120" height="80" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><rect x="10348" y="3065" width="80" height="80" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><rect x="9948" y="3700" width="100" height="80" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><rect x="10028" y="2200" width="80" height="80" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/><rect x="7454" y="725" width="80" height="80" fill="none" stroke="rgb(0, 0, 0)" stroke-width="4" pointer-events="all"/></g><switch><g requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"/><a transform="translate(0,-5)" xlink:href="https://www.drawio.com/doc/faq/svg-export-text-problems" target="_blank"><text text-anchor="middle" font-size="10px" x="50%" y="100%">Text is not SVG - cannot display</text></a></switch></svg> \ No newline at end of file
diff --git a/documentation/overview-manual/yp-intro.rst b/documentation/overview-manual/yp-intro.rst
index a2e0862459..4a27e12e01 100644
--- a/documentation/overview-manual/yp-intro.rst
+++ b/documentation/overview-manual/yp-intro.rst
@@ -129,7 +129,7 @@ Here are features and advantages of the Yocto Project:
arbitrarily include packages.
- *License Manifest:* The Yocto Project provides a :ref:`license
- manifest <dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle>`
+ manifest <dev-manual/licenses:maintaining open source license compliance during your product's lifecycle>`
for review by people who need to track the use of open source
licenses (e.g. legal teams).
@@ -225,7 +225,7 @@ your Metadata, the easier it is to cope with future changes.
- Layers support the inclusion of technologies, hardware components,
and software components. The :ref:`Yocto Project
- Compatible <dev-manual/common-tasks:making sure your layer is compatible with yocto project>`
+ Compatible <dev-manual/layers:making sure your layer is compatible with yocto project>`
designation provides a minimum level of standardization that
contributes to a strong ecosystem. "YP Compatible" is applied to
appropriate products and software components such as BSPs, other
@@ -269,7 +269,7 @@ of the ``poky`` repository, you will see several layers: ``meta``,
layer.
For procedures on how to create layers, see the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
+":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual.
Components and Tools
@@ -340,6 +340,18 @@ the Yocto Project:
view information about builds. For information on Toaster, see the
:doc:`/toaster-manual/index`.
+- *VSCode IDE Extension:* The `Yocto Project BitBake
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+ extension for Visual Studio Code provides a rich set of features for working
+ with BitBake recipes. The extension provides syntax highlighting,
+ hover tips, and completion for BitBake files as well as embedded Python and
+ Bash languages. Additional views and commands allow you to efficiently
+ browse, build and edit recipes. It also provides SDK integration for
+ cross-compiling and debugging through ``devtool``.
+
+ Learn more about the VSCode Extension on the `extension's frontpage
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__.
+
Production Tools
----------------
@@ -351,7 +363,7 @@ Yocto Project:
(BitBake and
OE-Core) automatically generates upgrades for recipes that are based
on new versions of the recipes published upstream. See
- :ref:`dev-manual/common-tasks:using the auto upgrade helper (auh)`
+ :ref:`dev-manual/upgrading-recipes:using the auto upgrade helper (auh)`
for how to set it up.
- *Recipe Reporting System:* The Recipe Reporting System tracks recipe
@@ -361,7 +373,7 @@ Yocto Project:
of the :oe_layerindex:`OpenEmbedded Layer Index <>`, which
is a website that indexes OpenEmbedded-Core layers.
-- *Patchwork:* `Patchwork <http://jk.ozlabs.org/projects/patchwork/>`__
+- *Patchwork:* `Patchwork <https://patchwork.yoctoproject.org/>`__
is a fork of a project originally started by
`OzLabs <https://ozlabs.org/>`__. The project is a web-based tracking
system designed to streamline the process of bringing contributions
@@ -517,18 +529,18 @@ Historically, the Build Appliance was the second of three methods by
which you could use the Yocto Project on a system that was not native to
Linux.
-1. *Hob:* Hob, which is now deprecated and is no longer available since
+#. *Hob:* Hob, which is now deprecated and is no longer available since
the 2.1 release of the Yocto Project provided a rudimentary,
GUI-based interface to the Yocto Project. Toaster has fully replaced
Hob.
-2. *Build Appliance:* Post Hob, the Build Appliance became available. It
+#. *Build Appliance:* Post Hob, the Build Appliance became available. It
was never recommended that you use the Build Appliance as a
day-to-day production development environment with the Yocto Project.
Build Appliance was useful as a way to try out development in the
Yocto Project environment.
-3. *CROPS:* The final and best solution available now for developing
+#. *CROPS:* The final and best solution available now for developing
using the Yocto Project on a system not native to Linux is with
:ref:`CROPS <overview-manual/yp-intro:development tools>`.
@@ -584,20 +596,15 @@ Build Host runs, you have several choices.
":ref:`dev-manual/start:setting up to use cross platforms (crops)`"
section in the Yocto Project Development Tasks Manual.
-- *Windows Subsystem For Linux (WSLv2):* You may use Windows Subsystem
- For Linux v2 to set up a Build Host using Windows 10.
-
- .. note::
+- *Windows Subsystem For Linux (WSL 2):* You may use Windows Subsystem
+ For Linux version 2 to set up a Build Host using Windows 10 or later,
+ or Windows Server 2019 or later.
- The Yocto Project is not compatible with WSLv1, it is compatible
- but not officially supported nor validated with WSLv2, if you
- still decide to use WSL please upgrade to WSLv2.
-
- The Windows Subsystem For Linux allows Windows 10 to run a real Linux
+ The Windows Subsystem For Linux allows Windows to run a real Linux
kernel inside of a lightweight virtual machine (VM).
- For information on how to set up a Build Host with WSLv2, see the
- ":ref:`dev-manual/start:setting up to use windows subsystem for linux (wslv2)`"
+ For information on how to set up a Build Host with WSL 2, see the
+ ":ref:`dev-manual/start:setting up to use windows subsystem for linux (wsl 2)`"
section in the Yocto Project Development Tasks Manual.
- *Toaster:* Regardless of what your Build Host is running, you can use
@@ -610,6 +617,14 @@ Build Host runs, you have several choices.
For information about and how to use Toaster, see the
:doc:`/toaster-manual/index`.
+- *Using the VSCode Extension:* You can use the `Yocto Project BitBake
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+ extension for Visual Studio Code to start your BitBake builds through a
+ graphical user interface.
+
+ Learn more about the VSCode Extension on the `extension's marketplace page
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+
Reference Embedded Distribution (Poky)
======================================
@@ -709,7 +724,7 @@ BitBake also supports both ``:prepend`` and ``:append`` operators as a
method of extending task functionality. These operators inject code into
the beginning or end of a task. For information on these BitBake
operators, see the
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:appending and prepending (override style syntax)`"
+":ref:`bitbake-user-manual/bitbake-user-manual-metadata:appending and prepending (override style syntax)`"
section in the BitBake User's Manual.
The OpenEmbedded Build System Workflow
@@ -722,29 +737,29 @@ workflow:
.. image:: figures/YP-flow-diagram.png
:width: 100%
-Following is a brief summary of the "workflow":
+Here is a brief summary of the "workflow":
-1. Developers specify architecture, policies, patches and configuration
+#. Developers specify architecture, policies, patches and configuration
details.
-2. The build system fetches and downloads the source code from the
+#. The build system fetches and downloads the source code from the
specified location. The build system supports standard methods such
as tarballs or source code repositories systems such as Git.
-3. Once source code is downloaded, the build system extracts the sources
+#. Once source code is downloaded, the build system extracts the sources
into a local work area where patches are applied and common steps for
configuring and compiling the software are run.
-4. The build system then installs the software into a temporary staging
+#. The build system then installs the software into a temporary staging
area where the binary package format you select (DEB, RPM, or IPK) is
used to roll up the software.
-5. Different QA and sanity checks run throughout entire build process.
+#. Different QA and sanity checks run throughout entire build process.
-6. After the binaries are created, the build system generates a binary
+#. After the binaries are created, the build system generates a binary
package feed that is used to create the final root file image.
-7. The build system generates the file system image and a customized
+#. The build system generates the file system image and a customized
Extensible SDK (eSDK) for application development in parallel.
For a very detailed look at this workflow, see the
@@ -781,7 +796,7 @@ helpful for getting started:
Yocto Project.
For more detailed information on layers, see the
- ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+ ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual. For a
discussion specifically on BSP Layers, see the
":ref:`bsp-guide/bsp:bsp layers`" section in the Yocto
@@ -842,7 +857,7 @@ helpful for getting started:
distribution.
Another point worth noting is that historically within the Yocto
- Project, recipes were referred to as packages - thus, the existence
+ Project, recipes were referred to as packages --- thus, the existence
of several BitBake variables that are seemingly mis-named, (e.g.
:term:`PR`,
:term:`PV`, and
diff --git a/documentation/poky.yaml.in b/documentation/poky.yaml.in
index 1e1d6c83ed..7a686ac4e1 100644
--- a/documentation/poky.yaml.in
+++ b/documentation/poky.yaml.in
@@ -1,10 +1,10 @@
-DISTRO : "4.0"
-DISTRO_NAME_NO_CAP : "kirkstone"
-DISTRO_NAME : "Kirkstone"
-DISTRO_NAME_NO_CAP_MINUS_ONE : "honister"
-DISTRO_NAME_NO_CAP_LTS : "dunfell"
-YOCTO_DOC_VERSION : "4.0"
-DISTRO_REL_TAG : "yocto-4.0"
+DISTRO : "4.3"
+DISTRO_NAME_NO_CAP : "nanbield"
+DISTRO_NAME : "Nanbield"
+DISTRO_NAME_NO_CAP_MINUS_ONE : "mickledore"
+DISTRO_NAME_NO_CAP_LTS : "kirkstone"
+YOCTO_DOC_VERSION : "4.3"
+DISTRO_REL_TAG : "yocto-4.3"
DOCCONF_VERSION : "dev"
BITBAKE_SERIES : ""
YOCTO_DL_URL : "https://downloads.yoctoproject.org"
@@ -13,35 +13,37 @@ YOCTO_RELEASE_DL_URL : "&YOCTO_DL_URL;/releases/yocto/yocto-&DISTRO;"
UBUNTU_HOST_PACKAGES_ESSENTIAL : "gawk wget git diffstat unzip texinfo gcc \
build-essential chrpath socat cpio python3 python3-pip python3-pexpect \
xz-utils debianutils iputils-ping python3-git python3-jinja2 libegl1-mesa libsdl1.2-dev \
- pylint3 xterm python3-subunit mesa-common-dev zstd liblz4-tool"
+ python3-subunit mesa-common-dev zstd liblz4-tool file locales libacl1
+ \n\ $ sudo locale-gen en_US.UTF-8"
FEDORA_HOST_PACKAGES_ESSENTIAL : "gawk make wget tar bzip2 gzip python3 unzip perl patch \
diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath \
ccache perl-Data-Dumper perl-Text-ParseWords perl-Thread-Queue perl-bignum socat \
python3-pexpect findutils which file cpio python python3-pip xz python3-GitPython \
- python3-jinja2 SDL-devel xterm rpcgen mesa-libGL-devel perl-FindBin perl-File-Compare \
- perl-File-Copy perl-locale zstd lz4"
+ python3-jinja2 SDL-devel rpcgen mesa-libGL-devel perl-FindBin perl-File-Compare \
+ perl-File-Copy perl-locale zstd lz4 hostname glibc-langpack-en libacl"
OPENSUSE_HOST_PACKAGES_ESSENTIAL : "python gcc gcc-c++ git chrpath make wget python-xml \
diffstat makeinfo python-curses patch socat python3 python3-curses tar python3-pip \
- python3-pexpect xz which python3-Jinja2 Mesa-libEGL1 libSDL-devel xterm rpcgen Mesa-dri-devel \
- zstd lz4
- \n\ $ sudo pip3 install GitPython"
-CENTOS7_HOST_PACKAGES_ESSENTIAL : "-y epel-release
- \n\ $ sudo yum makecache
- \n\ $ sudo yum install gawk make wget tar bzip2 gzip python3 unzip perl patch \
- diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath socat \
- perl-Data-Dumper perl-Text-ParseWords perl-Thread-Queue python3-pip xz \
- which SDL-devel xterm mesa-libGL-devel zstd lz4
- \n\ $ sudo pip3 install GitPython jinja2"
-CENTOS8_HOST_PACKAGES_ESSENTIAL : "-y epel-release
- \n\ $ sudo dnf config-manager --set-enabled PowerTools
- \n\ $ sudo dnf makecache
- \n\ $ sudo dnf install gawk make wget tar bzip2 gzip python3 unzip perl patch \
+ python3-pexpect xz which python3-Jinja2 Mesa-libEGL1 libSDL-devel rpcgen Mesa-dri-devel \
+ zstd lz4 bzip2 gzip hostname libacl1
+ \n\ $ sudo pip3 install GitPython"
+ALMALINUX_HOST_PACKAGES_ESSENTIAL : "-y epel-release
+ \n\ $ sudo yum install dnf-plugins-core
+ \n\ $ sudo dnf config-manager --set-enabled crb
+ \n\ $ sudo dnf makecache
+ \n\ $ sudo dnf install gawk make wget tar bzip2 gzip python3 unzip perl patch \
diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath ccache \
socat perl-Data-Dumper perl-Text-ParseWords perl-Thread-Queue python3-pip \
- python3-GitPython python3-jinja2 python3-pexpect xz which SDL-devel xterm \
- rpcgen mesa-libGL-devel zstd lz4"
+ python3-GitPython python3-jinja2 python3-pexpect xz which SDL-devel \
+ rpcgen mesa-libGL-devel zstd lz4 cpio glibc-langpack-en libacl"
PIP3_HOST_PACKAGES_DOC : "$ sudo pip3 install sphinx sphinx_rtd_theme pyyaml"
-MIN_PYTHON_VERSION : "3.6.0"
+MIN_PYTHON_VERSION : "3.8.0"
MIN_TAR_VERSION : "1.28"
MIN_GIT_VERSION : "1.8.3.1"
-MIN_GCC_VERSION : "5.0"
+MIN_GCC_VERSION : "8.0"
+MIN_MAKE_VERSION : "4.0"
+# Disk space (Gbytes) needed to generate qemux86-64 core-image-sato on Ubuntu 22.04 (x86-64), rounded up from 87
+MIN_DISK_SPACE : "90"
+# Disk space (Gbytes) needed to generate qemux86-64 core-image-sato on Ubuntu 22.04 (x86-64) with "rm_work", rounded up from 38
+MIN_DISK_SPACE_RM_WORK : "40"
+# RAM (Gbytes) needed to generate qemux86-64 core-image-sato on Ubuntu 22.04 (x86-64) on a 4 core system
+MIN_RAM : "8"
diff --git a/documentation/profile-manual/intro.rst b/documentation/profile-manual/intro.rst
index 9c8fa3dbfa..86310cf318 100644
--- a/documentation/profile-manual/intro.rst
+++ b/documentation/profile-manual/intro.rst
@@ -7,43 +7,45 @@ Yocto Project Profiling and Tracing Manual
Introduction
============
-Yocto bundles a number of tracing and profiling tools - this 'HOWTO'
+Yocto Project bundles a number of tracing and profiling tools --- this manual
describes their basic usage and shows by example how to make use of them
-to examine application and system behavior.
+to analyze application and system behavior.
-The tools presented are for the most part completely open-ended and have
+The tools presented are, for the most part, completely open-ended and have
quite good and/or extensive documentation of their own which can be used
to solve just about any problem you might come across in Linux. Each
section that describes a particular tool has links to that tool's
documentation and website.
-The purpose of this 'HOWTO' is to present a set of common and generally
+The purpose of this manual is to present a set of common and generally
useful tracing and profiling idioms along with their application (as
appropriate) to each tool, in the context of a general-purpose
'drill-down' methodology that can be applied to solving a large number
-(90%?) of problems. For help with more advanced usages and problems,
-please see the documentation and/or websites listed for each tool.
+of problems. For help with more advanced usages and problems,
+refer to the documentation and/or websites provided for each tool.
-The final section of this 'HOWTO' is a collection of real-world examples
-which we'll be continually adding to as we solve more problems using the
-tools - feel free to add your own examples to the list!
+The final section of this manual is a collection of real-world examples
+which we'll be continually updating as we solve more problems using the
+tools --- feel free to suggest additions to what you read here.
General Setup
=============
-Most of the tools are available only in 'sdk' images or in images built
-after adding 'tools-profile' to your local.conf. So, in order to be able
-to access all of the tools described here, please first build and boot
-an 'sdk' image e.g. ::
+Most of the tools are available only in ``sdk`` images or in images built
+after adding ``tools-profile`` to your ``local.conf`` file. So, in order to be able
+to access all of the tools described here, you can build and boot
+an ``sdk`` image, perhaps one of::
$ bitbake core-image-sato-sdk
+ $ bitbake core-image-weston-sdk
+ $ bitbake core-image-rt-sdk
-or alternatively by adding 'tools-profile' to the EXTRA_IMAGE_FEATURES line in
-your local.conf::
+Alternatively, you can add ``tools-profile`` to the :term:`EXTRA_IMAGE_FEATURES` line in
+your ``local.conf`` file::
EXTRA_IMAGE_FEATURES = "debug-tweaks tools-profile"
-If you use the 'tools-profile' method, you don't need to build an sdk image -
+If you use the ``tools-profile`` method, you don't need to build an sdk image ---
the tracing and profiling tools will be included in non-sdk images as well e.g.::
$ bitbake core-image-sato
@@ -64,12 +66,12 @@ the tracing and profiling tools will be included in non-sdk images as well e.g.:
If you've already built a stripped image, you can generate debug
packages (xxx-dbg) which you can manually install as needed.
-To generate debug info for packages, you can add dbg-pkgs to
-EXTRA_IMAGE_FEATURES in local.conf. For example::
+To generate debug info for packages, you can add ``dbg-pkgs`` to
+:term:`EXTRA_IMAGE_FEATURES` in ``local.conf``. For example::
EXTRA_IMAGE_FEATURES = "debug-tweaks tools-profile dbg-pkgs"
-Additionally, in order to generate the right type of debuginfo, we also need to
+Additionally, in order to generate the right type of debug info, we also need to
set :term:`PACKAGE_DEBUG_SPLIT_STYLE` in the ``local.conf`` file::
PACKAGE_DEBUG_SPLIT_STYLE = 'debug-file-directory'
diff --git a/documentation/profile-manual/usage.rst b/documentation/profile-manual/usage.rst
index 0ff9d921fd..17be149580 100644
--- a/documentation/profile-manual/usage.rst
+++ b/documentation/profile-manual/usage.rst
@@ -13,11 +13,11 @@ tools.
perf
====
-The 'perf' tool is the profiling and tracing tool that comes bundled
+The perf tool is the profiling and tracing tool that comes bundled
with the Linux kernel.
Don't let the fact that it's part of the kernel fool you into thinking
-that it's only for tracing and profiling the kernel - you can indeed use
+that it's only for tracing and profiling the kernel --- you can indeed use
it to trace and profile just the kernel, but you can also use it to
profile specific applications separately (with or without kernel
context), and you can also use it to trace and profile the kernel and
@@ -26,22 +26,22 @@ of what's going on.
In many ways, perf aims to be a superset of all the tracing and
profiling tools available in Linux today, including all the other tools
-covered in this HOWTO. The past couple of years have seen perf subsume a
+covered in this How-to. The past couple of years have seen perf subsume a
lot of the functionality of those other tools and, at the same time,
those other tools have removed large portions of their previous
functionality and replaced it with calls to the equivalent functionality
now implemented by the perf subsystem. Extrapolation suggests that at
-some point those other tools will simply become completely redundant and
+some point those other tools will become completely redundant and
go away; until then, we'll cover those other tools in these pages and in
many cases show how the same things can be accomplished in perf and the
other tools when it seems useful to do so.
The coverage below details some of the most common ways you'll likely
want to apply the tool; full documentation can be found either within
-the tool itself or in the man pages at
+the tool itself or in the manual pages at
`perf(1) <https://linux.die.net/man/1/perf>`__.
-Perf Setup
+perf Setup
----------
For this section, we'll assume you've already performed the basic setup
@@ -54,14 +54,14 @@ image built with the following in your ``local.conf`` file::
perf runs on the target system for the most part. You can archive
profile data and copy it to the host for analysis, but for the rest of
-this document we assume you've ssh'ed to the host and will be running
-the perf commands on the target.
+this document we assume you're connected to the host through SSH and will be
+running the perf commands on the target.
-Basic Perf Usage
+Basic perf Usage
----------------
The perf tool is pretty much self-documenting. To remind yourself of the
-available commands, simply type 'perf', which will show you basic usage
+available commands, just type ``perf``, which will show you basic usage
along with the available perf subcommands::
root@crownbay:~# perf
@@ -97,19 +97,19 @@ along with the available perf subcommands::
Using perf to do Basic Profiling
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-As a simple test case, we'll profile the 'wget' of a fairly large file,
+As a simple test case, we'll profile the ``wget`` of a fairly large file,
which is a minimally interesting case because it has both file and
network I/O aspects, and at least in the case of standard Yocto images,
it's implemented as part of BusyBox, so the methods we use to analyze it
-can be used in a very similar way to the whole host of supported BusyBox
-applets in Yocto. ::
+can be used in a similar way to the whole host of supported BusyBox
+applets in Yocto::
root@crownbay:~# rm linux-2.6.19.2.tar.bz2; \
wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
The quickest and easiest way to get some basic overall data about what's
-going on for a particular workload is to profile it using 'perf stat'.
-'perf stat' basically profiles using a few default counters and displays
+going on for a particular workload is to profile it using ``perf stat``.
+This command basically profiles using a few default counters and displays
the summed counts at the end of the run::
root@crownbay:~# perf stat wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
@@ -131,13 +131,13 @@ the summed counts at the end of the run::
59.836627620 seconds time elapsed
-Many times such a simple-minded test doesn't yield much of
-interest, but sometimes it does (see Real-world Yocto bug (slow
-loop-mounted write speed)).
+Such a simple-minded test doesn't always yield much of interest, but sometimes
+it does (see the :yocto_bugs:`Slow write speed on live images with denzil
+</show_bug.cgi?id=3049>` bug report).
-Also, note that 'perf stat' isn't restricted to a fixed set of counters
-- basically any event listed in the output of 'perf list' can be tallied
-by 'perf stat'. For example, suppose we wanted to see a summary of all
+Also, note that ``perf stat`` isn't restricted to a fixed set of counters
+--- basically any event listed in the output of ``perf list`` can be tallied
+by ``perf stat``. For example, suppose we wanted to see a summary of all
the events related to kernel memory allocation/freeing along with cache
hits and misses::
@@ -164,22 +164,22 @@ hits and misses::
44.831023415 seconds time elapsed
-So 'perf stat' gives us a nice easy
+As you can see, ``perf stat`` gives us a nice easy
way to get a quick overview of what might be happening for a set of
events, but normally we'd need a little more detail in order to
understand what's going on in a way that we can act on in a useful way.
-To dive down into a next level of detail, we can use 'perf record'/'perf
-report' which will collect profiling data and present it to use using an
-interactive text-based UI (or simply as text if we specify ``--stdio`` to
-'perf report').
+To dive down into a next level of detail, we can use ``perf record`` /
+``perf report`` which will collect profiling data and present it to use using an
+interactive text-based UI (or just as text if we specify ``--stdio`` to
+``perf report``).
-As our first attempt at profiling this workload, we'll simply run 'perf
-record', handing it the workload we want to profile (everything after
-'perf record' and any perf options we hand it - here none - will be
+As our first attempt at profiling this workload, we'll just run ``perf
+record``, handing it the workload we want to profile (everything after
+``perf record`` and any perf options we hand it --- here none, will be
executed in a new shell). perf collects samples until the process exits
-and records them in a file named 'perf.data' in the current working
-directory. ::
+and records them in a file named ``perf.data`` in the current working
+directory::
root@crownbay:~# perf record wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
@@ -189,7 +189,7 @@ directory. ::
[ perf record: Captured and wrote 0.176 MB perf.data (~7700 samples) ]
To see the results in a
-'text-based UI' (tui), simply run 'perf report', which will read the
+"text-based UI" (tui), just run ``perf report``, which will read the
perf.data file in the current working directory and display the results
in an interactive UI::
@@ -199,26 +199,26 @@ in an interactive UI::
:align: center
:width: 70%
-The above screenshot displays a 'flat' profile, one entry for each
-'bucket' corresponding to the functions that were profiled during the
+The above screenshot displays a "flat" profile, one entry for each
+"bucket" corresponding to the functions that were profiled during the
profiling run, ordered from the most popular to the least (perf has
options to sort in various orders and keys as well as display entries
-only above a certain threshold and so on - see the perf documentation
-for details). Note that this includes both userspace functions (entries
-containing a [.]) and kernel functions accounted to the process (entries
-containing a [k]). (perf has command-line modifiers that can be used to
-restrict the profiling to kernel or userspace, among others).
-
-Notice also that the above report shows an entry for 'busybox', which is
-the executable that implements 'wget' in Yocto, but that instead of a
+only above a certain threshold and so on --- see the perf documentation
+for details). Note that this includes both user space functions (entries
+containing a ``[.]``) and kernel functions accounted to the process (entries
+containing a ``[k]``). perf has command-line modifiers that can be used to
+restrict the profiling to kernel or user space, among others.
+
+Notice also that the above report shows an entry for ``busybox``, which is
+the executable that implements ``wget`` in Yocto, but that instead of a
useful function name in that entry, it displays a not-so-friendly hex
value instead. The steps below will show how to fix that problem.
Before we do that, however, let's try running a different profile, one
which shows something a little more interesting. The only difference
-between the new profile and the previous one is that we'll add the -g
+between the new profile and the previous one is that we'll add the ``-g``
option, which will record not just the address of a sampled function,
-but the entire callchain to the sampled function as well::
+but the entire call chain to the sampled function as well::
root@crownbay:~# perf record -g wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
Connecting to downloads.yoctoproject.org (140.211.169.59:80)
@@ -233,45 +233,45 @@ but the entire callchain to the sampled function as well::
:align: center
:width: 70%
-Using the callgraph view, we can actually see not only which functions
+Using the call graph view, we can actually see not only which functions
took the most time, but we can also see a summary of how those functions
were called and learn something about how the program interacts with the
kernel in the process.
-Notice that each entry in the above screenshot now contains a '+' on the
-left-hand side. This means that we can expand the entry and drill down
-into the callchains that feed into that entry. Pressing 'enter' on any
-one of them will expand the callchain (you can also press 'E' to expand
-them all at the same time or 'C' to collapse them all).
+Notice that each entry in the above screenshot now contains a ``+`` on the
+left side. This means that we can expand the entry and drill down
+into the call chains that feed into that entry. Pressing ``Enter`` on any
+one of them will expand the call chain (you can also press ``E`` to expand
+them all at the same time or ``C`` to collapse them all).
In the screenshot above, we've toggled the ``__copy_to_user_ll()`` entry
-and several subnodes all the way down. This lets us see which callchains
+and several subnodes all the way down. This lets us see which call chains
contributed to the profiled ``__copy_to_user_ll()`` function which
contributed 1.77% to the total profile.
-As a bit of background explanation for these callchains, think about
-what happens at a high level when you run wget to get a file out on the
+As a bit of background explanation for these call chains, think about
+what happens at a high level when you run ``wget`` to get a file out on the
network. Basically what happens is that the data comes into the kernel
-via the network connection (socket) and is passed to the userspace
-program 'wget' (which is actually a part of BusyBox, but that's not
+via the network connection (socket) and is passed to the user space
+program ``wget`` (which is actually a part of BusyBox, but that's not
important for now), which takes the buffers the kernel passes to it and
writes it to a disk file to save it.
The part of this process that we're looking at in the above call stacks
is the part where the kernel passes the data it has read from the socket
-down to wget i.e. a copy-to-user.
+down to wget i.e. a ``copy-to-user``.
Notice also that here there's also a case where the hex value is
-displayed in the callstack, here in the expanded ``sys_clock_gettime()``
-function. Later we'll see it resolve to a userspace function call in
-busybox.
+displayed in the call stack, here in the expanded ``sys_clock_gettime()``
+function. Later we'll see it resolve to a user space function call in
+BusyBox.
.. image:: figures/perf-wget-g-copy-from-user-expanded-stripped.png
:align: center
:width: 70%
-The above screenshot shows the other half of the journey for the data -
-from the wget program's userspace buffers to disk. To get the buffers to
+The above screenshot shows the other half of the journey for the data ---
+from the ``wget`` program's user space buffers to disk. To get the buffers to
disk, the wget program issues a ``write(2)``, which does a ``copy-from-user`` to
the kernel, which then takes care via some circuitous path (probably
also present somewhere in the profile data), to get it safely to disk.
@@ -281,8 +281,8 @@ of how to extract useful information out of it, let's get back to the
task at hand and see if we can get some basic idea about where the time
is spent in the program we're profiling, wget. Remember that wget is
actually implemented as an applet in BusyBox, so while the process name
-is 'wget', the executable we're actually interested in is BusyBox. So
-let's expand the first entry containing BusyBox:
+is ``wget``, the executable we're actually interested in is ``busybox``.
+Therefore, let's expand the first entry containing BusyBox:
.. image:: figures/perf-wget-busybox-expanded-stripped.png
:align: center
@@ -293,7 +293,7 @@ hex value instead of a symbol as with most of the kernel entries.
Expanding the BusyBox entry doesn't make it any better.
The problem is that perf can't find the symbol information for the
-busybox binary, which is actually stripped out by the Yocto build
+``busybox`` binary, which is actually stripped out by the Yocto build
system.
One way around that is to put the following in your ``local.conf`` file
@@ -303,40 +303,39 @@ when you build the image::
However, we already have an image with the binaries stripped, so
what can we do to get perf to resolve the symbols? Basically we need to
-install the debuginfo for the BusyBox package.
+install the debugging information for the BusyBox package.
To generate the debug info for the packages in the image, we can add
``dbg-pkgs`` to :term:`EXTRA_IMAGE_FEATURES` in ``local.conf``. For example::
EXTRA_IMAGE_FEATURES = "debug-tweaks tools-profile dbg-pkgs"
-Additionally, in order to generate the type of debuginfo that perf
-understands, we also need to set
-:term:`PACKAGE_DEBUG_SPLIT_STYLE`
+Additionally, in order to generate the type of debugging information that perf
+understands, we also need to set :term:`PACKAGE_DEBUG_SPLIT_STYLE`
in the ``local.conf`` file::
PACKAGE_DEBUG_SPLIT_STYLE = 'debug-file-directory'
-Once we've done that, we can install the
-debuginfo for BusyBox. The debug packages once built can be found in
-``build/tmp/deploy/rpm/*`` on the host system. Find the busybox-dbg-...rpm
-file and copy it to the target. For example::
+Once we've done that, we can install the debugging information for BusyBox. The
+debug packages once built can be found in ``build/tmp/deploy/rpm/*``
+on the host system. Find the ``busybox-dbg-...rpm`` file and copy it
+to the target. For example::
[trz@empanada core2]$ scp /home/trz/yocto/crownbay-tracing-dbg/build/tmp/deploy/rpm/core2_32/busybox-dbg-1.20.2-r2.core2_32.rpm root@192.168.1.31:
busybox-dbg-1.20.2-r2.core2_32.rpm 100% 1826KB 1.8MB/s 00:01
-Now install the debug rpm on the target::
+Now install the debug RPM on the target::
root@crownbay:~# rpm -i busybox-dbg-1.20.2-r2.core2_32.rpm
-Now that the debuginfo is installed, we see that the BusyBox entries now display
+Now that the debugging information is installed, we see that the BusyBox entries now display
their functions symbolically:
.. image:: figures/perf-wget-busybox-debuginfo.png
:align: center
:width: 70%
-If we expand one of the entries and press 'enter' on a leaf node, we're
+If we expand one of the entries and press ``Enter`` on a leaf node, we're
presented with a menu of actions we can take to get more information
related to that entry:
@@ -346,17 +345,17 @@ related to that entry:
One of these actions allows us to show a view that displays a
busybox-centric view of the profiled functions (in this case we've also
-expanded all the nodes using the 'E' key):
+expanded all the nodes using the ``E`` key):
.. image:: figures/perf-wget-busybox-dso-zoom.png
:align: center
:width: 70%
-Finally, we can see that now that the BusyBox debuginfo is installed,
+Finally, we can see that now that the BusyBox debugging information is installed,
the previously unresolved symbol in the ``sys_clock_gettime()`` entry
mentioned previously is now resolved, and shows that the
-sys_clock_gettime system call that was the source of 6.75% of the
-copy-to-user overhead was initiated by the ``handle_input()`` BusyBox
+``sys_clock_gettime`` system call that was the source of 6.75% of the
+``copy-to-user`` overhead was initiated by the ``handle_input()`` BusyBox
function:
.. image:: figures/perf-wget-g-copy-to-user-expanded-debuginfo.png
@@ -365,15 +364,15 @@ function:
At the lowest level of detail, we can dive down to the assembly level
and see which instructions caused the most overhead in a function.
-Pressing 'enter' on the 'udhcpc_main' function, we're again presented
+Pressing ``Enter`` on the ``udhcpc_main`` function, we're again presented
with a menu:
.. image:: figures/perf-wget-busybox-annotate-menu.png
:align: center
:width: 70%
-Selecting 'Annotate udhcpc_main', we get a detailed listing of
-percentages by instruction for the udhcpc_main function. From the
+Selecting ``Annotate udhcpc_main``, we get a detailed listing of
+percentages by instruction for the ``udhcpc_main`` function. From the
display, we can see that over 50% of the time spent in this function is
taken up by a couple tests and the move of a constant (1) to a register:
@@ -382,17 +381,17 @@ taken up by a couple tests and the move of a constant (1) to a register:
:width: 70%
As a segue into tracing, let's try another profile using a different
-counter, something other than the default 'cycles'.
+counter, something other than the default ``cycles``.
The tracing and profiling infrastructure in Linux has become unified in
a way that allows us to use the same tool with a completely different
set of counters, not just the standard hardware counters that
-traditional tools have had to restrict themselves to (of course the
-traditional tools can also make use of the expanded possibilities now
+traditional tools have had to restrict themselves to (the
+traditional tools can now actually make use of the expanded possibilities now
available to them, and in some cases have, as mentioned previously).
We can get a list of the available events that can be used to profile a
-workload via 'perf list'::
+workload via ``perf list``::
root@crownbay:~# perf list
@@ -528,14 +527,14 @@ workload via 'perf list'::
.. admonition:: Tying it Together
These are exactly the same set of events defined by the trace event
- subsystem and exposed by ftrace/tracecmd/kernelshark as files in
- /sys/kernel/debug/tracing/events, by SystemTap as
+ subsystem and exposed by ftrace / trace-cmd / KernelShark as files in
+ ``/sys/kernel/debug/tracing/events``, by SystemTap as
kernel.trace("tracepoint_name") and (partially) accessed by LTTng.
Only a subset of these would be of interest to us when looking at this
workload, so let's choose the most likely subsystems (identified by the
-string before the colon in the Tracepoint events) and do a 'perf stat'
-run using only those wildcarded subsystems::
+string before the colon in the ``Tracepoint`` events) and do a ``perf stat``
+run using only those subsystem wildcards::
root@crownbay:~# perf stat -e skb:* -e net:* -e napi:* -e sched:* -e workqueue:* -e irq:* -e syscalls:* wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
Performance counter stats for 'wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2':
@@ -607,8 +606,8 @@ and tell perf to do a profile using it as the sampling event::
The screenshot above shows the results of running a profile using
sched:sched_switch tracepoint, which shows the relative costs of various
-paths to sched_wakeup (note that sched_wakeup is the name of the
-tracepoint - it's actually defined just inside ttwu_do_wakeup(), which
+paths to ``sched_wakeup`` (note that ``sched_wakeup`` is the name of the
+tracepoint --- it's actually defined just inside ``ttwu_do_wakeup()``, which
accounts for the function name actually displayed in the profile:
.. code-block:: c
@@ -626,15 +625,15 @@ accounts for the function name actually displayed in the profile:
}
A couple of the more interesting
-callchains are expanded and displayed above, basically some network
-receive paths that presumably end up waking up wget (busybox) when
+call chains are expanded and displayed above, basically some network
+receive paths that presumably end up waking up wget (BusyBox) when
network data is ready.
Note that because tracepoints are normally used for tracing, the default
-sampling period for tracepoints is 1 i.e. for tracepoints perf will
-sample on every event occurrence (this can be changed using the -c
+sampling period for tracepoints is ``1`` i.e. for tracepoints perf will
+sample on every event occurrence (this can be changed using the ``-c``
option). This is in contrast to hardware counters such as for example
-the default 'cycles' hardware counter used for normal profiling, where
+the default ``cycles`` hardware counter used for normal profiling, where
sampling periods are much higher (in the thousands) because profiling
should have as low an overhead as possible and sampling on every cycle
would be prohibitively expensive.
@@ -645,10 +644,10 @@ Using perf to do Basic Tracing
Profiling is a great tool for solving many problems or for getting a
high-level view of what's going on with a workload or across the system.
It is however by definition an approximation, as suggested by the most
-prominent word associated with it, 'sampling'. On the one hand, it
+prominent word associated with it, ``sampling``. On the one hand, it
allows a representative picture of what's going on in the system to be
-cheaply taken, but on the other hand, that cheapness limits its utility
-when that data suggests a need to 'dive down' more deeply to discover
+cheaply taken, but alternatively, that cheapness limits its utility
+when that data suggests a need to "dive down" more deeply to discover
what's really going on. In such cases, the only way to see what's really
going on is to be able to look at (or summarize more intelligently) the
individual steps that go into the higher-level behavior exposed by the
@@ -661,7 +660,7 @@ applicable to our workload::
-e syscalls:sys_enter_read -e syscalls:sys_exit_read -e syscalls:sys_enter_write -e syscalls:sys_exit_write
wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
-We can look at the raw trace output using 'perf script' with no
+We can look at the raw trace output using ``perf script`` with no
arguments::
root@crownbay:~# perf script
@@ -692,7 +691,7 @@ arguments::
This gives us a detailed timestamped sequence of events that occurred within the
workload with respect to those events.
-In many ways, profiling can be viewed as a subset of tracing -
+In many ways, profiling can be viewed as a subset of tracing ---
theoretically, if you have a set of trace events that's sufficient to
capture all the important aspects of a workload, you can derive any of
the results or views that a profiling run can.
@@ -712,23 +711,23 @@ an infinite variety of ways.
Another way to look at it is that there are only so many ways that the
'primitive' counters can be used on their own to generate interesting
output; to get anything more complicated than simple counts requires
-some amount of additional logic, which is typically very specific to the
+some amount of additional logic, which is typically specific to the
problem at hand. For example, if we wanted to make use of a 'counter'
that maps to the value of the time difference between when a process was
scheduled to run on a processor and the time it actually ran, we
wouldn't expect such a counter to exist on its own, but we could derive
-one called say 'wakeup_latency' and use it to extract a useful view of
+one called say ``wakeup_latency`` and use it to extract a useful view of
that metric from trace data. Likewise, we really can't figure out from
standard profiling tools how much data every process on the system reads
and writes, along with how many of those reads and writes fail
completely. If we have sufficient trace data, however, we could with the
right tools easily extract and present that information, but we'd need
-something other than pre-canned profiling tools to do that.
+something other than ready-made profiling tools to do that.
Luckily, there is a general-purpose way to handle such needs, called
-'programming languages'. Making programming languages easily available
+"programming languages". Making programming languages easily available
to apply to such problems given the specific format of data is called a
-'programming language binding' for that data and language. Perf supports
+'programming language binding' for that data and language. perf supports
two programming language bindings, one for Python and one for Perl.
.. admonition:: Tying it Together
@@ -738,21 +737,21 @@ two programming language bindings, one for Python and one for Perl.
DProbes dpcc compiler, an ANSI C compiler which targeted a low-level
assembly language running on an in-kernel interpreter on the target
system. This is exactly analogous to what Sun's DTrace did, except
- that DTrace invented its own language for the purpose. Systemtap,
+ that DTrace invented its own language for the purpose. SystemTap,
heavily inspired by DTrace, also created its own one-off language,
but rather than running the product on an in-kernel interpreter,
created an elaborate compiler-based machinery to translate its
language into kernel modules written in C.
-Now that we have the trace data in perf.data, we can use 'perf script
--g' to generate a skeleton script with handlers for the read/write
-entry/exit events we recorded::
+Now that we have the trace data in ``perf.data``, we can use ``perf script
+-g`` to generate a skeleton script with handlers for the read / write
+entry / exit events we recorded::
root@crownbay:~# perf script -g python
generated Python script: perf-script.py
-The skeleton script simply creates a Python function for each event type in the
-perf.data file. The body of each function simply prints the event name along
+The skeleton script just creates a Python function for each event type in the
+``perf.data`` file. The body of each function just prints the event name along
with its parameters. For example:
.. code-block:: python
@@ -766,7 +765,7 @@ with its parameters. For example:
print "skbaddr=%u, len=%u, name=%s\n" % (skbaddr, len, name),
We can run that script directly to print all of the events contained in the
-perf.data file::
+``perf.data`` file::
root@crownbay:~# perf script -s perf-script.py
@@ -795,8 +794,8 @@ perf.data file::
syscalls__sys_exit_read 1 11624.859944032 1262 wget nr=3, ret=1024
That in itself isn't very useful; after all, we can accomplish pretty much the
-same thing by simply running 'perf script' without arguments in the same
-directory as the perf.data file.
+same thing by just running ``perf script`` without arguments in the same
+directory as the ``perf.data`` file.
We can however replace the print statements in the generated function
bodies with whatever we want, and thereby make it infinitely more
@@ -817,8 +816,8 @@ event. For example:
Each event handler function in the generated code
is modified to do this. For convenience, we define a common function
-called inc_counts() that each handler calls; inc_counts() simply tallies
-a count for each event using the 'counts' hash, which is a specialized
+called ``inc_counts()`` that each handler calls; ``inc_counts()`` just tallies
+a count for each event using the ``counts`` hash, which is a specialized
hash function that does Perl-like autovivification, a capability that's
extremely useful for kinds of multi-level aggregation commonly used in
processing traces (see perf's documentation on the Python language
@@ -836,7 +835,7 @@ binding for details):
Finally, at the end of the trace processing run, we want to print the
result of all the per-event tallies. For that, we use the special
-'trace_end()' function:
+``trace_end()`` function:
.. code-block:: python
@@ -865,7 +864,7 @@ The end result is a summary of all the events recorded in the trace::
syscalls__sys_exit_write 8990
Note that this is
-pretty much exactly the same information we get from 'perf stat', which
+pretty much exactly the same information we get from ``perf stat``, which
goes a little way to support the idea mentioned previously that given
the right kind of trace data, higher-level profiling-type summaries can
be derived from it.
@@ -877,44 +876,44 @@ System-Wide Tracing and Profiling
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The examples so far have focused on tracing a particular program or
-workload - in other words, every profiling run has specified the program
-to profile in the command-line e.g. 'perf record wget ...'.
+workload --- that is, every profiling run has specified the program
+to profile in the command-line e.g. ``perf record wget ...``.
It's also possible, and more interesting in many cases, to run a
system-wide profile or trace while running the workload in a separate
shell.
-To do system-wide profiling or tracing, you typically use the -a flag to
-'perf record'.
+To do system-wide profiling or tracing, you typically use the ``-a`` flag to
+``perf record``.
To demonstrate this, open up one window and start the profile using the
--a flag (press Ctrl-C to stop tracing)::
+``-a`` flag (press ``Ctrl-C`` to stop tracing)::
root@crownbay:~# perf record -g -a
^C[ perf record: Woken up 6 times to write data ]
[ perf record: Captured and wrote 1.400 MB perf.data (~61172 samples) ]
-In another window, run the wget test::
+In another window, run the ``wget`` test::
root@crownbay:~# wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2
Connecting to downloads.yoctoproject.org (140.211.169.59:80)
linux-2.6.19.2.tar.b 100% \|*******************************\| 41727k 0:00:00 ETA
-Here we see entries not only for our wget load, but for
+Here we see entries not only for our ``wget`` load, but for
other processes running on the system as well:
.. image:: figures/perf-systemwide.png
:align: center
:width: 70%
-In the snapshot above, we can see callchains that originate in libc, and
-a callchain from Xorg that demonstrates that we're using a proprietary X
-driver in userspace (notice the presence of 'PVR' and some other
-unresolvable symbols in the expanded Xorg callchain).
+In the snapshot above, we can see call chains that originate in ``libc``, and
+a call chain from ``Xorg`` that demonstrates that we're using a proprietary X
+driver in user space (notice the presence of ``PVR`` and some other
+unresolvable symbols in the expanded ``Xorg`` call chain).
-Note also that we have both kernel and userspace entries in the above
-snapshot. We can also tell perf to focus on userspace but providing a
-modifier, in this case 'u', to the 'cycles' hardware counter when we
+Note also that we have both kernel and user space entries in the above
+snapshot. We can also tell perf to focus on user space but providing a
+modifier, in this case ``u``, to the ``cycles`` hardware counter when we
record a profile::
root@crownbay:~# perf record -g -a -e cycles:u
@@ -925,25 +924,25 @@ record a profile::
:align: center
:width: 70%
-Notice in the screenshot above, we see only userspace entries ([.])
+Notice in the screenshot above, we see only user space entries (``[.]``)
-Finally, we can press 'enter' on a leaf node and select the 'Zoom into
-DSO' menu item to show only entries associated with a specific DSO. In
-the screenshot below, we've zoomed into the 'libc' DSO which shows all
-the entries associated with the libc-xxx.so DSO.
+Finally, we can press ``Enter`` on a leaf node and select the ``Zoom into
+DSO`` menu item to show only entries associated with a specific DSO. In
+the screenshot below, we've zoomed into the ``libc`` DSO which shows all
+the entries associated with the ``libc-xxx.so`` DSO.
.. image:: figures/perf-systemwide-libc.png
:align: center
:width: 70%
-We can also use the system-wide -a switch to do system-wide tracing.
+We can also use the system-wide ``-a`` switch to do system-wide tracing.
Here we'll trace a couple of scheduler events::
root@crownbay:~# perf record -a -e sched:sched_switch -e sched:sched_wakeup
^C[ perf record: Woken up 38 times to write data ]
[ perf record: Captured and wrote 9.780 MB perf.data (~427299 samples) ]
-We can look at the raw output using 'perf script' with no arguments::
+We can look at the raw output using ``perf script`` with no arguments::
root@crownbay:~# perf script
@@ -961,11 +960,11 @@ We can look at the raw output using 'perf script' with no arguments::
Filtering
^^^^^^^^^
-Notice that there are a lot of events that don't really have anything to
-do with what we're interested in, namely events that schedule 'perf'
+Notice that there are many events that don't really have anything to
+do with what we're interested in, namely events that schedule ``perf``
itself in and out or that wake perf up. We can get rid of those by using
-the '--filter' option - for each event we specify using -e, we can add a
---filter after that to filter out trace events that contain fields with
+the ``--filter`` option --- for each event we specify using ``-e``, we can add a
+``--filter`` after that to filter out trace events that contain fields with
specific values::
root@crownbay:~# perf record -a -e sched:sched_switch --filter 'next_comm != perf && prev_comm != perf' -e sched:sched_wakeup --filter 'comm != perf'
@@ -991,16 +990,16 @@ specific values::
kworker/0:3 1209 [000] 7932.326214: sched_switch: prev_comm=kworker/0:3 prev_pid=1209 prev_prio=120 prev_state=S ==> next_comm=swapper/0 next_pid=0 next_prio=120
In this case, we've filtered out all events that have
-'perf' in their 'comm' or 'comm_prev' or 'comm_next' fields. Notice that
+``perf`` in their ``comm``, ``comm_prev`` or ``comm_next`` fields. Notice that
there are still events recorded for perf, but notice that those events
-don't have values of 'perf' for the filtered fields. To completely
+don't have values of ``perf`` for the filtered fields. To completely
filter out anything from perf will require a bit more work, but for the
purpose of demonstrating how to use filters, it's close enough.
.. admonition:: Tying it Together
These are exactly the same set of event filters defined by the trace
- event subsystem. See the ftrace/tracecmd/kernelshark section for more
+ event subsystem. See the ftrace / trace-cmd / KernelShark section for more
discussion about these event filters.
.. admonition:: Tying it Together
@@ -1010,14 +1009,14 @@ purpose of demonstrating how to use filters, it's close enough.
indispensable part of the perf design as it relates to tracing.
kernel-based event filters provide a mechanism to precisely throttle
the event stream that appears in user space, where it makes sense to
- provide bindings to real programming languages for postprocessing the
+ provide bindings to real programming languages for post-processing the
event stream. This architecture allows for the intelligent and
flexible partitioning of processing between the kernel and user
space. Contrast this with other tools such as SystemTap, which does
all of its processing in the kernel and as such requires a special
project-defined language in order to accommodate that design, or
- LTTng, where everything is sent to userspace and as such requires a
- super-efficient kernel-to-userspace transport mechanism in order to
+ LTTng, where everything is sent to user space and as such requires a
+ super-efficient kernel-to-user space transport mechanism in order to
function properly. While perf certainly can benefit from for instance
advances in the design of the transport, it doesn't fundamentally
depend on them. Basically, if you find that your perf tracing
@@ -1028,9 +1027,9 @@ Using Dynamic Tracepoints
~~~~~~~~~~~~~~~~~~~~~~~~~
perf isn't restricted to the fixed set of static tracepoints listed by
-'perf list'. Users can also add their own 'dynamic' tracepoints anywhere
-in the kernel. For instance, suppose we want to define our own
-tracepoint on do_fork(). We can do that using the 'perf probe' perf
+``perf list``. Users can also add their own "dynamic" tracepoints anywhere
+in the kernel. For example, suppose we want to define our own
+tracepoint on ``do_fork()``. We can do that using the ``perf probe`` perf
subcommand::
root@crownbay:~# perf probe do_fork
@@ -1042,8 +1041,8 @@ subcommand::
perf record -e probe:do_fork -aR sleep 1
Adding a new tracepoint via
-'perf probe' results in an event with all the expected files and format
-in /sys/kernel/debug/tracing/events, just the same as for static
+``perf probe`` results in an event with all the expected files and format
+in ``/sys/kernel/debug/tracing/events``, just the same as for static
tracepoints (as discussed in more detail in the trace events subsystem
section::
@@ -1059,13 +1058,13 @@ section::
name: do_fork
ID: 944
format:
- field:unsigned short common_type; offset:0; size:2; signed:0;
- field:unsigned char common_flags; offset:2; size:1; signed:0;
- field:unsigned char common_preempt_count; offset:3; size:1; signed:0;
- field:int common_pid; offset:4; size:4; signed:1;
- field:int common_padding; offset:8; size:4; signed:1;
+ field:unsigned short common_type; offset:0; size:2; signed:0;
+ field:unsigned char common_flags; offset:2; size:1; signed:0;
+ field:unsigned char common_preempt_count; offset:3; size:1; signed:0;
+ field:int common_pid; offset:4; size:4; signed:1;
+ field:int common_padding; offset:8; size:4; signed:1;
- field:unsigned long __probe_ip; offset:12; size:4; signed:0;
+ field:unsigned long __probe_ip; offset:12; size:4; signed:0;
print fmt: "(%lx)", REC->__probe_ip
@@ -1076,7 +1075,7 @@ existence::
probe:do_fork (on do_fork)
probe:schedule (on schedule)
-Let's record system-wide ('sleep 30' is a
+Let's record system-wide (``sleep 30`` is a
trick for recording system-wide but basically do nothing and then wake
up after 30 seconds)::
@@ -1084,7 +1083,7 @@ up after 30 seconds)::
[ perf record: Woken up 1 times to write data ]
[ perf record: Captured and wrote 0.087 MB perf.data (~3812 samples) ]
-Using 'perf script' we can see each do_fork event that fired::
+Using ``perf script`` we can see each ``do_fork`` event that fired::
root@crownbay:~# perf script
@@ -1125,8 +1124,8 @@ Using 'perf script' we can see each do_fork event that fired::
matchbox-deskto 1311 [001] 34237.114106: do_fork: (c1028460)
gaku 1312 [000] 34237.202388: do_fork: (c1028460)
-And using 'perf report' on the same file, we can see the
-callgraphs from starting a few programs during those 30 seconds:
+And using ``perf report`` on the same file, we can see the
+call graphs from starting a few programs during those 30 seconds:
.. image:: figures/perf-probe-do_fork-profile.png
:align: center
@@ -1135,63 +1134,63 @@ callgraphs from starting a few programs during those 30 seconds:
.. admonition:: Tying it Together
The trace events subsystem accommodate static and dynamic tracepoints
- in exactly the same way - there's no difference as far as the
+ in exactly the same way --- there's no difference as far as the
infrastructure is concerned. See the ftrace section for more details
on the trace event subsystem.
.. admonition:: Tying it Together
- Dynamic tracepoints are implemented under the covers by kprobes and
- uprobes. kprobes and uprobes are also used by and in fact are the
+ Dynamic tracepoints are implemented under the covers by Kprobes and
+ Uprobes. Kprobes and Uprobes are also used by and in fact are the
main focus of SystemTap.
-Perf Documentation
+perf Documentation
------------------
-Online versions of the man pages for the commands discussed in this
+Online versions of the manual pages for the commands discussed in this
section can be found here:
-- The `'perf stat' manpage <https://linux.die.net/man/1/perf-stat>`__.
+- The `'perf stat' manual page <https://linux.die.net/man/1/perf-stat>`__.
- The `'perf record'
- manpage <https://linux.die.net/man/1/perf-record>`__.
+ manual page <https://linux.die.net/man/1/perf-record>`__.
- The `'perf report'
- manpage <https://linux.die.net/man/1/perf-report>`__.
+ manual page <https://linux.die.net/man/1/perf-report>`__.
-- The `'perf probe' manpage <https://linux.die.net/man/1/perf-probe>`__.
+- The `'perf probe' manual page <https://linux.die.net/man/1/perf-probe>`__.
- The `'perf script'
- manpage <https://linux.die.net/man/1/perf-script>`__.
+ manual page <https://linux.die.net/man/1/perf-script>`__.
- Documentation on using the `'perf script' Python
binding <https://linux.die.net/man/1/perf-script-python>`__.
-- The top-level `perf(1) manpage <https://linux.die.net/man/1/perf>`__.
+- The top-level `perf(1) manual page <https://linux.die.net/man/1/perf>`__.
-Normally, you should be able to invoke the man pages via perf itself
-e.g. 'perf help' or 'perf help record'.
+Normally, you should be able to open the manual pages via perf itself
+e.g. ``perf help`` or ``perf help record``.
-To have the perf manpages installed on your target, modify your
+To have the perf manual pages installed on your target, modify your
configuration as follows::
IMAGE_INSTALL:append = " perf perf-doc"
DISTRO_FEATURES:append = " api-documentation"
-The man pages in text form, along with some other files, such as a set
-of examples, can also be found in the 'perf' directory of the kernel tree::
+The manual pages in text form, along with some other files, such as a set
+of examples, can also be found in the ``perf`` directory of the kernel tree::
tools/perf/Documentation
There's also a nice perf tutorial on the perf
-wiki that goes into more detail than we do here in certain areas: `Perf
+wiki that goes into more detail than we do here in certain areas: `perf
Tutorial <https://perf.wiki.kernel.org/index.php/Tutorial>`__
ftrace
======
-'ftrace' literally refers to the 'ftrace function tracer' but in reality
-this encompasses a number of related tracers along with the
+"ftrace" literally refers to the "ftrace function tracer" but in reality
+this encompasses several related tracers along with the
infrastructure that they all make use of.
ftrace Setup
@@ -1200,20 +1199,20 @@ ftrace Setup
For this section, we'll assume you've already performed the basic setup
outlined in the ":ref:`profile-manual/intro:General Setup`" section.
-ftrace, trace-cmd, and kernelshark run on the target system, and are
-ready to go out-of-the-box - no additional setup is necessary. For the
-rest of this section we assume you've ssh'ed to the host and will be
-running ftrace on the target. kernelshark is a GUI application and if
-you use the '-X' option to ssh you can have the kernelshark GUI run on
+ftrace, trace-cmd, and KernelShark run on the target system, and are
+ready to go out-of-the-box --- no additional setup is necessary. For the
+rest of this section we assume you're connected to the host through SSH and
+will be running ftrace on the target. KernelShark is a GUI application and if
+you use the ``-X`` option to ``ssh`` you can have the KernelShark GUI run on
the target but display remotely on the host if you want.
Basic ftrace usage
------------------
-'ftrace' essentially refers to everything included in the /tracing
+"ftrace" essentially refers to everything included in the ``/tracing``
directory of the mounted debugfs filesystem (Yocto follows the standard
-convention and mounts it at /sys/kernel/debug). Here's a listing of all
-the files found in /sys/kernel/debug/tracing on a Yocto system::
+convention and mounts it at ``/sys/kernel/debug``). All the files found in
+``/sys/kernel/debug/tracing`` on a Yocto system are::
root@sugarbay:/sys/kernel/debug/tracing# ls
README kprobe_events trace
@@ -1229,7 +1228,7 @@ the files found in /sys/kernel/debug/tracing on a Yocto system::
free_buffer set_graph_function
The files listed above are used for various purposes
-- some relate directly to the tracers themselves, others are used to set
+--- some relate directly to the tracers themselves, others are used to set
tracing options, and yet others actually contain the tracing output when
a tracer is in effect. Some of the functions can be guessed from their
names, others need explanation; in any case, we'll cover some of the
@@ -1238,30 +1237,30 @@ the ftrace documentation.
We'll start by looking at some of the available built-in tracers.
-cat'ing the 'available_tracers' file lists the set of available tracers::
+The ``available_tracers`` file lists the set of available tracers::
root@sugarbay:/sys/kernel/debug/tracing# cat available_tracers
blk function_graph function nop
-The 'current_tracer' file contains the tracer currently in effect::
+The ``current_tracer`` file contains the tracer currently in effect::
root@sugarbay:/sys/kernel/debug/tracing# cat current_tracer
nop
-The above listing of current_tracer shows that the
-'nop' tracer is in effect, which is just another way of saying that
+The above listing of ``current_tracer`` shows that the
+``nop`` tracer is in effect, which is just another way of saying that
there's actually no tracer currently in effect.
-echo'ing one of the available_tracers into current_tracer makes the
+Writing one of the available tracers into ``current_tracer`` makes the
specified tracer the current tracer::
root@sugarbay:/sys/kernel/debug/tracing# echo function > current_tracer
root@sugarbay:/sys/kernel/debug/tracing# cat current_tracer
function
-The above sets the current tracer to be the 'function tracer'. This tracer
+The above sets the current tracer to be the ``function`` tracer. This tracer
traces every function call in the kernel and makes it available as the
-contents of the 'trace' file. Reading the 'trace' file lists the
+contents of the ``trace`` file. Reading the ``trace`` file lists the
currently buffered function calls that have been traced by the function
tracer::
@@ -1308,7 +1307,7 @@ tracer::
.
Each line in the trace above shows what was happening in the kernel on a given
-cpu, to the level of detail of function calls. Each entry shows the function
+CPU, to the level of detail of function calls. Each entry shows the function
called, followed by its caller (after the arrow).
The function tracer gives you an extremely detailed idea of what the
@@ -1318,11 +1317,11 @@ great way to learn about how the kernel code works in a dynamic sense.
.. admonition:: Tying it Together
The ftrace function tracer is also available from within perf, as the
- ftrace:function tracepoint.
+ ``ftrace:function`` tracepoint.
It is a little more difficult to follow the call chains than it needs to
-be - luckily there's a variant of the function tracer that displays the
-callchains explicitly, called the 'function_graph' tracer::
+be --- luckily there's a variant of the function tracer that displays the
+call chains explicitly, called the ``function_graph`` tracer::
root@sugarbay:/sys/kernel/debug/tracing# echo function_graph > current_tracer
root@sugarbay:/sys/kernel/debug/tracing# cat trace | less
@@ -1437,11 +1436,11 @@ callchains explicitly, called the 'function_graph' tracer::
3) + 13.784 us | }
3) | sys_ioctl() {
-As you can see, the function_graph display is much easier
+As you can see, the ``function_graph`` display is much easier
to follow. Also note that in addition to the function calls and
associated braces, other events such as scheduler events are displayed
in context. In fact, you can freely include any tracepoint available in
-the trace events subsystem described in the next section by simply
+the trace events subsystem described in the next section by just
enabling those events, and they'll appear in context in the function
graph display. Quite a powerful tool for understanding kernel dynamics.
@@ -1455,9 +1454,9 @@ The 'trace events' Subsystem
----------------------------
One especially important directory contained within the
-/sys/kernel/debug/tracing directory is the 'events' subdirectory, which
+``/sys/kernel/debug/tracing`` directory is the ``events`` subdirectory, which
contains representations of every tracepoint in the system. Listing out
-the contents of the 'events' subdirectory, we see mainly another set of
+the contents of the ``events`` subdirectory, we see mainly another set of
subdirectories::
root@sugarbay:/sys/kernel/debug/tracing# cd events
@@ -1505,9 +1504,9 @@ subdirectories::
drwxr-xr-x 26 root root 0 Nov 14 23:19 writeback
Each one of these subdirectories
-corresponds to a 'subsystem' and contains yet again more subdirectories,
+corresponds to a "subsystem" and contains yet again more subdirectories,
each one of those finally corresponding to a tracepoint. For example,
-here are the contents of the 'kmem' subsystem::
+here are the contents of the ``kmem`` subsystem::
root@sugarbay:/sys/kernel/debug/tracing/events# cd kmem
root@sugarbay:/sys/kernel/debug/tracing/events/kmem# ls -al
@@ -1529,7 +1528,7 @@ here are the contents of the 'kmem' subsystem::
drwxr-xr-x 2 root root 0 Nov 14 23:19 mm_page_pcpu_drain
Let's see what's inside the subdirectory for a
-specific tracepoint, in this case the one for kmalloc::
+specific tracepoint, in this case the one for ``kmalloc``::
root@sugarbay:/sys/kernel/debug/tracing/events/kmem# cd kmalloc
root@sugarbay:/sys/kernel/debug/tracing/events/kmem/kmalloc# ls -al
@@ -1540,28 +1539,28 @@ specific tracepoint, in this case the one for kmalloc::
-r--r--r-- 1 root root 0 Nov 14 23:19 format
-r--r--r-- 1 root root 0 Nov 14 23:19 id
-The 'format' file for the
+The ``format`` file for the
tracepoint describes the event in memory, which is used by the various
tracing tools that now make use of these tracepoint to parse the event
-and make sense of it, along with a 'print fmt' field that allows tools
-like ftrace to display the event as text. Here's what the format of the
-kmalloc event looks like::
+and make sense of it, along with a ``print fmt`` field that allows tools
+like ftrace to display the event as text. The format of the
+``kmalloc`` event looks like::
root@sugarbay:/sys/kernel/debug/tracing/events/kmem/kmalloc# cat format
name: kmalloc
ID: 313
format:
- field:unsigned short common_type; offset:0; size:2; signed:0;
- field:unsigned char common_flags; offset:2; size:1; signed:0;
- field:unsigned char common_preempt_count; offset:3; size:1; signed:0;
- field:int common_pid; offset:4; size:4; signed:1;
- field:int common_padding; offset:8; size:4; signed:1;
-
- field:unsigned long call_site; offset:16; size:8; signed:0;
- field:const void * ptr; offset:24; size:8; signed:0;
- field:size_t bytes_req; offset:32; size:8; signed:0;
- field:size_t bytes_alloc; offset:40; size:8; signed:0;
- field:gfp_t gfp_flags; offset:48; size:4; signed:0;
+ field:unsigned short common_type; offset:0; size:2; signed:0;
+ field:unsigned char common_flags; offset:2; size:1; signed:0;
+ field:unsigned char common_preempt_count; offset:3; size:1; signed:0;
+ field:int common_pid; offset:4; size:4; signed:1;
+ field:int common_padding; offset:8; size:4; signed:1;
+
+ field:unsigned long call_site; offset:16; size:8; signed:0;
+ field:const void * ptr; offset:24; size:8; signed:0;
+ field:size_t bytes_req; offset:32; size:8; signed:0;
+ field:size_t bytes_alloc; offset:40; size:8; signed:0;
+ field:gfp_t gfp_flags; offset:48; size:4; signed:0;
print fmt: "call_site=%lx ptr=%p bytes_req=%zu bytes_alloc=%zu gfp_flags=%s", REC->call_site, REC->ptr, REC->bytes_req, REC->bytes_alloc,
(REC->gfp_flags) ? __print_flags(REC->gfp_flags, "|", {(unsigned long)(((( gfp_t)0x10u) | (( gfp_t)0x40u) | (( gfp_t)0x80u) | ((
@@ -1580,11 +1579,11 @@ kmalloc event looks like::
long)(( gfp_t)0x08u), "GFP_MOVABLE"}, {(unsigned long)(( gfp_t)0), "GFP_NOTRACK"}, {(unsigned long)(( gfp_t)0x400000u), "GFP_NO_KSWAPD"},
{(unsigned long)(( gfp_t)0x800000u), "GFP_OTHER_NODE"} ) : "GFP_NOWAIT"
-The 'enable' file
+The ``enable`` file
in the tracepoint directory is what allows the user (or tools such as
-trace-cmd) to actually turn the tracepoint on and off. When enabled, the
-corresponding tracepoint will start appearing in the ftrace 'trace' file
-described previously. For example, this turns on the kmalloc tracepoint::
+``trace-cmd``) to actually turn the tracepoint on and off. When enabled, the
+corresponding tracepoint will start appearing in the ftrace ``trace`` file
+described previously. For example, this turns on the ``kmalloc`` tracepoint::
root@sugarbay:/sys/kernel/debug/tracing/events/kmem/kmalloc# echo 1 > enable
@@ -1596,8 +1595,8 @@ events in the output buffer::
root@sugarbay:/sys/kernel/debug/tracing# echo nop > current_tracer
root@sugarbay:/sys/kernel/debug/tracing# echo 1 > tracing_on
-Now, if we look at the 'trace' file, we see nothing
-but the kmalloc events we just turned on::
+Now, if we look at the ``trace`` file, we see nothing
+but the ``kmalloc`` events we just turned on::
root@sugarbay:/sys/kernel/debug/tracing# cat trace | less
# tracer: nop
@@ -1643,17 +1642,17 @@ but the kmalloc events we just turned on::
<idle>-0 [000] ..s3 18156.400660: kmalloc: call_site=ffffffff81619b36 ptr=ffff88006d554800 bytes_req=512 bytes_alloc=512 gfp_flags=GFP_ATOMIC
matchbox-termin-1361 [001] ...1 18156.552800: kmalloc: call_site=ffffffff81614050 ptr=ffff88006db34800 bytes_req=576 bytes_alloc=1024 gfp_flags=GFP_KERNEL|GFP_REPEAT
-To again disable the kmalloc event, we need to send 0 to the enable file::
+To again disable the ``kmalloc`` event, we need to send ``0`` to the ``enable`` file::
root@sugarbay:/sys/kernel/debug/tracing/events/kmem/kmalloc# echo 0 > enable
You can enable any number of events or complete subsystems (by
-using the 'enable' file in the subsystem directory) and get an
+using the ``enable`` file in the subsystem directory) and get an
arbitrarily fine-grained idea of what's going on in the system by
enabling as many of the appropriate tracepoints as applicable.
-A number of the tools described in this HOWTO do just that, including
-trace-cmd and kernelshark in the next section.
+Several tools described in this How-to do just that, including
+``trace-cmd`` and KernelShark in the next section.
.. admonition:: Tying it Together
@@ -1661,41 +1660,40 @@ trace-cmd and kernelshark in the next section.
ftrace, but by many of the other tools covered in this document and
they form a central point of integration for the various tracers
available in Linux. They form a central part of the instrumentation
- for the following tools: perf, lttng, ftrace, blktrace and SystemTap
+ for the following tools: perf, LTTng, ftrace, blktrace and SystemTap
.. admonition:: Tying it Together
Eventually all the special-purpose tracers currently available in
- /sys/kernel/debug/tracing will be removed and replaced with
- equivalent tracers based on the 'trace events' subsystem.
+ ``/sys/kernel/debug/tracing`` will be removed and replaced with
+ equivalent tracers based on the "trace events" subsystem.
-trace-cmd/kernelshark
----------------------
+trace-cmd / KernelShark
+-----------------------
-trace-cmd is essentially an extensive command-line 'wrapper' interface
+trace-cmd is essentially an extensive command-line "wrapper" interface
that hides the details of all the individual files in
-/sys/kernel/debug/tracing, allowing users to specify specific particular
-events within the /sys/kernel/debug/tracing/events/ subdirectory and to
+``/sys/kernel/debug/tracing``, allowing users to specify specific particular
+events within the ``/sys/kernel/debug/tracing/events/`` subdirectory and to
collect traces and avoid having to deal with those details directly.
-As yet another layer on top of that, kernelshark provides a GUI that
+As yet another layer on top of that, KernelShark provides a GUI that
allows users to start and stop traces and specify sets of events using
an intuitive interface, and view the output as both trace events and as
-a per-CPU graphical display. It directly uses 'trace-cmd' as the
+a per-CPU graphical display. It directly uses trace-cmd as the
plumbing that accomplishes all that underneath the covers (and actually
displays the trace-cmd command it uses, as we'll see).
-To start a trace using kernelshark, first start kernelshark::
+To start a trace using KernelShark, first start this tool::
root@sugarbay:~# kernelshark
-Then bring up the 'Capture' dialog by
-choosing from the kernelshark menu::
+Then open up the ``Capture`` dialog by choosing from the KernelShark menu::
Capture | Record
That will display the following dialog, which allows you to choose one or more
-events (or even one or more complete subsystems) to trace:
+events (or even entire subsystems) to trace:
.. image:: figures/kernelshark-choose-events.png
:align: center
@@ -1703,42 +1701,42 @@ events (or even one or more complete subsystems) to trace:
Note that these are exactly the same sets of events described in the
previous trace events subsystem section, and in fact is where trace-cmd
-gets them for kernelshark.
+gets them for KernelShark.
In the above screenshot, we've decided to explore the graphics subsystem
a bit and so have chosen to trace all the tracepoints contained within
-the 'i915' and 'drm' subsystems.
+the ``i915`` and ``drm`` subsystems.
-After doing that, we can start and stop the trace using the 'Run' and
-'Stop' button on the lower right corner of the dialog (the same button
+After doing that, we can start and stop the trace using the ``Run`` and
+``Stop`` button on the lower right corner of the dialog (the same button
will turn into the 'Stop' button after the trace has started):
.. image:: figures/kernelshark-output-display.png
:align: center
:width: 70%
-Notice that the right-hand pane shows the exact trace-cmd command-line
+Notice that the right pane shows the exact trace-cmd command-line
that's used to run the trace, along with the results of the trace-cmd
run.
-Once the 'Stop' button is pressed, the graphical view magically fills up
-with a colorful per-cpu display of the trace data, along with the
+Once the ``Stop`` button is pressed, the graphical view magically fills up
+with a colorful per-CPU display of the trace data, along with the
detailed event listing below that:
.. image:: figures/kernelshark-i915-display.png
:align: center
:width: 70%
-Here's another example, this time a display resulting from tracing 'all
-events':
+Here's another example, this time a display resulting from tracing ``all
+events``:
.. image:: figures/kernelshark-all.png
:align: center
:width: 70%
The tool is pretty self-explanatory, but for more detailed information
-on navigating through the data, see the `kernelshark
-website <https://rostedt.homelinux.com/kernelshark/>`__.
+on navigating through the data, see the `KernelShark
+website <https://kernelshark.org/Documentation.html>`__.
ftrace Documentation
--------------------
@@ -1753,41 +1751,41 @@ Documentation directory::
Documentation/trace/events.txt
-There is a nice series of articles on using ftrace and trace-cmd at LWN:
+A nice series of articles on using ftrace and trace-cmd are available at LWN:
-- `Debugging the kernel using Ftrace - part
+- `Debugging the kernel using ftrace - part
1 <https://lwn.net/Articles/365835/>`__
-- `Debugging the kernel using Ftrace - part
+- `Debugging the kernel using ftrace - part
2 <https://lwn.net/Articles/366796/>`__
-- `Secrets of the Ftrace function
+- `Secrets of the ftrace function
tracer <https://lwn.net/Articles/370423/>`__
- `trace-cmd: A front-end for
- Ftrace <https://lwn.net/Articles/410200/>`__
+ ftrace <https://lwn.net/Articles/410200/>`__
-There's more detailed documentation kernelshark usage here:
-`KernelShark <https://rostedt.homelinux.com/kernelshark/>`__
+See also `KernelShark's documentation <https://kernelshark.org/Documentation.html>`__
+for further usage details.
-An amusing yet useful README (a tracing mini-HOWTO) can be found in
+An amusing yet useful README (a tracing mini-How-to) can be found in
``/sys/kernel/debug/tracing/README``.
-systemtap
+SystemTap
=========
SystemTap is a system-wide script-based tracing and profiling tool.
SystemTap scripts are C-like programs that are executed in the kernel to
-gather/print/aggregate data extracted from the context they end up being
-invoked under.
+gather / print / aggregate data extracted from the context they end up being
+called under.
For example, this probe from the `SystemTap
-tutorial <https://sourceware.org/systemtap/tutorial/>`__ simply prints a
-line every time any process on the system open()s a file. For each line,
+tutorial <https://sourceware.org/systemtap/tutorial/>`__ just prints a
+line every time any process on the system runs ``open()`` on a file. For each line,
it prints the executable name of the program that opened the file, along
-with its PID, and the name of the file it opened (or tried to open),
-which it extracts from the open syscall's argstr.
+with its PID, and the name of the file it opened (or tried to open), which it
+extracts from the argument string (``argstr``) of the ``open`` system call.
.. code-block:: none
@@ -1802,48 +1800,48 @@ which it extracts from the open syscall's argstr.
}
Normally, to execute this
-probe, you'd simply install systemtap on the system you want to probe,
+probe, you'd just install SystemTap on the system you want to probe,
and directly run the probe on that system e.g. assuming the name of the
-file containing the above text is trace_open.stp::
+file containing the above text is ``trace_open.stp``::
# stap trace_open.stp
-What systemtap does under the covers to run this probe is 1) parse and
-convert the probe to an equivalent 'C' form, 2) compile the 'C' form
+What SystemTap does under the covers to run this probe is 1) parse and
+convert the probe to an equivalent "C" form, 2) compile the "C" form
into a kernel module, 3) insert the module into the kernel, which arms
it, and 4) collect the data generated by the probe and display it to the
user.
-In order to accomplish steps 1 and 2, the 'stap' program needs access to
+In order to accomplish steps 1 and 2, the ``stap`` program needs access to
the kernel build system that produced the kernel that the probed system
-is running. In the case of a typical embedded system (the 'target'), the
+is running. In the case of a typical embedded system (the "target"), the
kernel build system unfortunately isn't typically part of the image
-running on the target. It is normally available on the 'host' system
+running on the target. It is normally available on the "host" system
that produced the target image however; in such cases, steps 1 and 2 are
executed on the host system, and steps 3 and 4 are executed on the
-target system, using only the systemtap 'runtime'.
+target system, using only the SystemTap "runtime".
-The systemtap support in Yocto assumes that only steps 3 and 4 are run
+The SystemTap support in Yocto assumes that only steps 3 and 4 are run
on the target; it is possible to do everything on the target, but this
section assumes only the typical embedded use-case.
-So basically what you need to do in order to run a systemtap script on
+Therefore, what you need to do in order to run a SystemTap script on
the target is to 1) on the host system, compile the probe into a kernel
module that makes sense to the target, 2) copy the module onto the
target system and 3) insert the module into the target kernel, which
arms it, and 4) collect the data generated by the probe and display it
to the user.
-systemtap Setup
+SystemTap Setup
---------------
-Those are a lot of steps and a lot of details, but fortunately Yocto
-includes a script called 'crosstap' that will take care of those
-details, allowing you to simply execute a systemtap script on the remote
+Those are many steps and details, but fortunately Yocto
+includes a script called ``crosstap`` that will take care of those
+details, allowing you to just execute a SystemTap script on the remote
target, with arguments if necessary.
In order to do this from a remote host, however, you need to have access
-to the build for the image you booted. The 'crosstap' script provides
+to the build for the image you booted. The ``crosstap`` script provides
details on how to do this if you run the script on the host without
having done a build::
@@ -1852,29 +1850,35 @@ having done a build::
Error: No target kernel build found.
Did you forget to create a local build of your image?
- 'crosstap' requires a local sdk build of the target system
- (or a build that includes 'tools-profile') in order to build
- kernel modules that can probe the target system.
-
- Practically speaking, that means you need to do the following:
- - If you're running a pre-built image, download the release
- and/or BSP tarballs used to build the image.
- - If you're working from git sources, just clone the metadata
- and BSP layers needed to build the image you'll be booting.
- - Make sure you're properly set up to build a new image (see
- the BSP README and/or the widely available basic documentation
- that discusses how to build images).
- - Build an -sdk version of the image e.g.:
- $ bitbake core-image-sato-sdk
- OR
- - Build a non-sdk image but include the profiling tools:
- [ edit local.conf and add 'tools-profile' to the end of
- the EXTRA_IMAGE_FEATURES variable ]
- $ bitbake core-image-sato
+'crosstap' requires a local SDK build of the target system
+(or a build that includes 'tools-profile') in order to build
+kernel modules that can probe the target system.
+
+Practically speaking, that means you need to do the following:
+
+- If you're running a pre-built image, download the release
+ and/or BSP tarballs used to build the image.
+
+- If you're working from git sources, just clone the metadata
+ and BSP layers needed to build the image you'll be booting.
+
+- Make sure you're properly set up to build a new image (see
+ the BSP README and/or the widely available basic documentation
+ that discusses how to build images).
+
+- Build an ``-sdk`` version of the image e.g.::
+
+ $ bitbake core-image-sato-sdk
+
+- Or build a non-SDK image but include the profiling tools
+ (edit ``local.conf`` and add ``tools-profile`` to the end of
+ :term:`EXTRA_IMAGE_FEATURES` variable)::
+
+ $ bitbake core-image-sato
Once you've build the image on the host system, you're ready to
- boot it (or the equivalent pre-built image) and use 'crosstap'
- to probe it (you need to source the environment as usual first):
+ boot it (or the equivalent pre-built image) and use ``crosstap``
+ to probe it (you need to source the environment as usual first)::
$ source oe-init-build-env
$ cd ~/my/systemtap/scripts
@@ -1882,29 +1886,27 @@ having done a build::
.. note::
- SystemTap, which uses 'crosstap', assumes you can establish an ssh
+ SystemTap, which uses ``crosstap``, assumes you can establish an SSH
connection to the remote target. Please refer to the crosstap wiki
- page for details on verifying ssh connections at
- . Also, the ability to ssh into the target system is not enabled by
- default in \*-minimal images.
+ page for details on verifying SSH connections. Also, the ability to SSH
+ into the target system is not enabled by default in ``*-minimal`` images.
-So essentially what you need to
-do is build an SDK image or image with 'tools-profile' as detailed in
-the ":ref:`profile-manual/intro:General Setup`" section of this
-manual, and boot the resulting target image.
+Therefore, what you need to do is build an SDK image or image with
+``tools-profile`` as detailed in the ":ref:`profile-manual/intro:General Setup`"
+section of this manual, and boot the resulting target image.
.. note::
- If you have a build directory containing multiple machines, you need
- to have the MACHINE you're connecting to selected in local.conf, and
- the kernel in that machine's build directory must match the kernel on
- the booted system exactly, or you'll get the above 'crosstap' message
- when you try to invoke a script.
+ If you have a :term:`Build Directory` containing multiple machines, you need
+ to have the :term:`MACHINE` you're connecting to selected in ``local.conf``, and
+ the kernel in that machine's :term:`Build Directory` must match the kernel on
+ the booted system exactly, or you'll get the above ``crosstap`` message
+ when you try to call a script.
Running a Script on a Target
----------------------------
-Once you've done that, you should be able to run a systemtap script on
+Once you've done that, you should be able to run a SystemTap script on
the target::
$ cd /path/to/yocto
@@ -1922,8 +1924,8 @@ the target::
You can also run generated QEMU images with a command like 'runqemu qemux86-64'
-Once you've done that, you can cd to whatever
-directory contains your scripts and use 'crosstap' to run the script::
+Once you've done that, you can ``cd`` to whatever
+directory contains your scripts and use ``crosstap`` to run the script::
$ cd /path/to/my/systemap/script
$ crosstap root@192.168.7.2 trace_open.stp
@@ -1933,13 +1935,12 @@ If you get an error connecting to the target e.g.::
$ crosstap root@192.168.7.2 trace_open.stp
error establishing ssh connection on remote 'root@192.168.7.2'
-Try ssh'ing to the target and see what happens::
+Try connecting to the target through SSH and see what happens::
$ ssh root@192.168.7.2
-A lot of the time, connection
-problems are due specifying a wrong IP address or having a 'host key
-verification error'.
+Connection problems are often due specifying a wrong IP address or having a ``host key
+verification error``.
If everything worked as planned, you should see something like this
(enter the password when prompted, or press enter if it's set up to use
@@ -1952,7 +1953,7 @@ no password):
matchbox-termin(1036) open ("/tmp/vte3FS2LW", O_RDWR|O_CREAT|O_EXCL|O_LARGEFILE, 0600)
matchbox-termin(1036) open ("/tmp/vteJMC7LW", O_RDWR|O_CREAT|O_EXCL|O_LARGEFILE, 0600)
-systemtap Documentation
+SystemTap Documentation
-----------------------
The SystemTap language reference can be found here: `SystemTap Language
@@ -1965,7 +1966,7 @@ page <https://sourceware.org/systemtap/documentation.html>`__
Sysprof
=======
-Sysprof is a very easy to use system-wide profiler that consists of a
+Sysprof is an easy to use system-wide profiler that consists of a
single window with three panes and a few buttons which allow you to
start, stop, and view the profile from one place.
@@ -1975,18 +1976,18 @@ Sysprof Setup
For this section, we'll assume you've already performed the basic setup
outlined in the ":ref:`profile-manual/intro:General Setup`" section.
-Sysprof is a GUI-based application that runs on the target system. For
-the rest of this document we assume you've ssh'ed to the host and will
-be running Sysprof on the target (you can use the '-X' option to ssh and
+Sysprof is a GUI-based application that runs on the target system. For the rest
+of this document we assume you're connected to the host through SSH and will be
+running Sysprof on the target (you can use the ``-X`` option to ``ssh`` and
have the Sysprof GUI run on the target but display remotely on the host
if you want).
Basic Sysprof Usage
-------------------
-To start profiling the system, you simply press the 'Start' button. To
+To start profiling the system, you just press the ``Start`` button. To
stop profiling and to start viewing the profile data in one easy step,
-press the 'Profile' button.
+press the ``Profile`` button.
Once you've pressed the profile button, the three panes will fill up
with profiling data:
@@ -1998,11 +1999,11 @@ with profiling data:
The left pane shows a list of functions and processes. Selecting one of
those expands that function in the right pane, showing all its callees.
Note that this caller-oriented display is essentially the inverse of
-perf's default callee-oriented callchain display.
+perf's default callee-oriented call chain display.
In the screenshot above, we're focusing on ``__copy_to_user_ll()`` and
-looking up the callchain we can see that one of the callers of
-``__copy_to_user_ll`` is sys_read() and the complete callpath between them.
+looking up the call chain we can see that one of the callers of
+``__copy_to_user_ll`` is ``sys_read()`` and the complete call path between them.
Notice that this is essentially a portion of the same information we saw
in the perf display shown in the perf section of this page.
@@ -2011,7 +2012,7 @@ in the perf display shown in the perf section of this page.
:width: 70%
Similarly, the above is a snapshot of the Sysprof display of a
-copy-from-user callchain.
+``copy-from-user`` call chain.
Finally, looking at the third Sysprof pane in the lower left, we can see
a list of all the callers of a particular function selected in the top
@@ -2027,18 +2028,17 @@ to the selected function, and so on.
.. admonition:: Tying it Together
- If you like sysprof's 'caller-oriented' display, you may be able to
- approximate it in other tools as well. For example, 'perf report' has
- the -g (--call-graph) option that you can experiment with; one of the
- options is 'caller' for an inverted caller-based callgraph display.
+ If you like Sysprof's ``caller-oriented`` display, you may be able to
+ approximate it in other tools as well. For example, ``perf report`` has
+ the ``-g`` (``--call-graph``) option that you can experiment with; one of the
+ options is ``caller`` for an inverted caller-based call graph display.
Sysprof Documentation
---------------------
There doesn't seem to be any documentation for Sysprof, but maybe that's
-because it's pretty self-explanatory. The Sysprof website, however, is
-here: `Sysprof, System-wide Performance Profiler for
-Linux <http://sysprof.com/>`__
+because it's pretty self-explanatory. The Sysprof website, however, is here:
+`Sysprof, System-wide Performance Profiler for Linux <http://sysprof.com/>`__
LTTng (Linux Trace Toolkit, next generation)
============================================
@@ -2048,20 +2048,20 @@ LTTng Setup
For this section, we'll assume you've already performed the basic setup
outlined in the ":ref:`profile-manual/intro:General Setup`" section.
-LTTng is run on the target system by ssh'ing to it.
+LTTng is run on the target system by connecting to it through SSH.
Collecting and Viewing Traces
-----------------------------
Once you've applied the above commits and built and booted your image
-(you need to build the core-image-sato-sdk image or use one of the other
+(you need to build the ``core-image-sato-sdk`` image or use one of the other
methods described in the ":ref:`profile-manual/intro:General Setup`" section), you're ready to start
tracing.
Collecting and viewing a trace on the target (inside a shell)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-First, from the host, ssh to the target::
+First, from the host, connect to the target through SSH::
$ ssh -l root 192.168.1.47
The authenticity of host '192.168.1.47 (192.168.1.47)' can't be established.
@@ -2138,31 +2138,31 @@ You can now view the trace in text form on the target::
.
You can now safely destroy the trace
-session (note that this doesn't delete the trace - it's still there in
-~/lttng-traces)::
+session (note that this doesn't delete the trace --- it's still there in
+``~/lttng-traces``)::
root@crownbay:~# lttng destroy
Session auto-20121015-232120 destroyed at /home/root
Note that the trace is saved in a directory of the same name as returned by
-'lttng create', under the ~/lttng-traces directory (note that you can change this by
-supplying your own name to 'lttng create')::
+``lttng create``, under the ``~/lttng-traces`` directory (note that you can change this by
+supplying your own name to ``lttng create``)::
root@crownbay:~# ls -al ~/lttng-traces
drwxrwx--- 3 root root 1024 Oct 15 23:21 .
drwxr-xr-x 5 root root 1024 Oct 15 23:57 ..
drwxrwx--- 3 root root 1024 Oct 15 23:21 auto-20121015-232120
-Collecting and viewing a userspace trace on the target (inside a shell)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Collecting and viewing a user space trace on the target (inside a shell)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-For LTTng userspace tracing, you need to have a properly instrumented
-userspace program. For this example, we'll use the 'hello' test program
-generated by the lttng-ust build.
+For LTTng user space tracing, you need to have a properly instrumented
+user space program. For this example, we'll use the ``hello`` test program
+generated by the ``lttng-ust`` build.
-The 'hello' test program isn't installed on the root filesystem by the lttng-ust
-build, so we need to copy it over manually. First cd into the build
-directory that contains the hello executable::
+The ``hello`` test program isn't installed on the root filesystem by the ``lttng-ust``
+build, so we need to copy it over manually. First ``cd`` into the build
+directory that contains the ``hello`` executable::
$ cd build/tmp/work/core2_32-poky-linux/lttng-ust/2.0.5-r0/git/tests/hello/.libs
@@ -2170,10 +2170,10 @@ Copy that over to the target machine::
$ scp hello root@192.168.1.20:
-You now have the instrumented lttng 'hello world' test program on the
+You now have the instrumented LTTng "hello world" test program on the
target, ready to test.
-First, from the host, ssh to the target::
+First, from the host, connect to the target through SSH::
$ ssh -l root 192.168.1.47
The authenticity of host '192.168.1.47 (192.168.1.47)' can't be established.
@@ -2188,7 +2188,7 @@ Once on the target, use these steps to create a trace::
Session auto-20190303-021943 created.
Traces will be written in /home/root/lttng-traces/auto-20190303-021943
-Enable the events you want to trace (in this case all userspace events)::
+Enable the events you want to trace (in this case all user space events)::
root@crownbay:~# lttng enable-event --userspace --all
All UST events are enabled in channel channel0
@@ -2198,7 +2198,7 @@ Start the trace::
root@crownbay:~# lttng start
Tracing started for session auto-20190303-021943
-Run the instrumented hello world program::
+Run the instrumented "hello world" program::
root@crownbay:~# ./hello
Hello, World!
@@ -2222,7 +2222,7 @@ You can now view the trace in text form on the target::
.
You can now safely destroy the trace session (note that this doesn't delete the
-trace - it's still there in ~/lttng-traces)::
+trace --- it's still there in ``~/lttng-traces``)::
root@crownbay:~# lttng destroy
Session auto-20190303-021943 destroyed at /home/root
@@ -2260,27 +2260,27 @@ the entire blktrace and blkparse pipeline on the target, or you can run
blktrace in 'listen' mode on the target and have blktrace and blkparse
collect and analyze the data on the host (see the
":ref:`profile-manual/usage:Using blktrace Remotely`" section
-below). For the rest of this section we assume you've ssh'ed to the host and
-will be running blkrace on the target.
+below). For the rest of this section we assume you've to the host through SSH
+and will be running blktrace on the target.
Basic blktrace Usage
--------------------
-To record a trace, simply run the 'blktrace' command, giving it the name
+To record a trace, just run the ``blktrace`` command, giving it the name
of the block device you want to trace activity on::
root@crownbay:~# blktrace /dev/sdc
-In another shell, execute a workload you want to trace. ::
+In another shell, execute a workload you want to trace::
root@crownbay:/media/sdc# rm linux-2.6.19.2.tar.bz2; wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2; sync
Connecting to downloads.yoctoproject.org (140.211.169.59:80)
linux-2.6.19.2.tar.b 100% \|*******************************\| 41727k 0:00:00 ETA
-Press Ctrl-C in the blktrace shell to stop the trace. It
+Press ``Ctrl-C`` in the blktrace shell to stop the trace. It
will display how many events were logged, along with the per-cpu file
-sizes (blktrace records traces in per-cpu kernel buffers and simply
-dumps them to userspace for blkparse to merge and sort later). ::
+sizes (blktrace records traces in per-cpu kernel buffers and just
+dumps them to user space for blkparse to merge and sort later)::
^C=== sdc ===
CPU 0: 7082 events, 332 KiB data
@@ -2296,7 +2296,7 @@ with the device name as the first part of the filename::
-rw-r--r-- 1 root root 339938 Oct 27 22:40 sdc.blktrace.0
-rw-r--r-- 1 root root 75753 Oct 27 22:40 sdc.blktrace.1
-To view the trace events, simply invoke 'blkparse' in the directory
+To view the trace events, just call ``blkparse`` in the directory
containing the trace files, giving it the device name that forms the
first part of the filenames::
@@ -2355,29 +2355,29 @@ first part of the filenames::
8,32 1 0 58.516990819 0 m N cfq3551 put_queue
CPU0 (sdc):
- Reads Queued: 0, 0KiB Writes Queued: 331, 26,284KiB
- Read Dispatches: 0, 0KiB Write Dispatches: 485, 40,484KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 0, 0KiB Writes Completed: 511, 41,000KiB
- Read Merges: 0, 0KiB Write Merges: 13, 160KiB
- Read depth: 0 Write depth: 2
- IO unplugs: 23 Timer unplugs: 0
+ Reads Queued: 0, 0KiB Writes Queued: 331, 26,284KiB
+ Read Dispatches: 0, 0KiB Write Dispatches: 485, 40,484KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 0, 0KiB Writes Completed: 511, 41,000KiB
+ Read Merges: 0, 0KiB Write Merges: 13, 160KiB
+ Read depth: 0 Write depth: 2
+ IO unplugs: 23 Timer unplugs: 0
CPU1 (sdc):
- Reads Queued: 0, 0KiB Writes Queued: 249, 15,800KiB
- Read Dispatches: 0, 0KiB Write Dispatches: 42, 1,600KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 0, 0KiB Writes Completed: 16, 1,084KiB
- Read Merges: 0, 0KiB Write Merges: 40, 276KiB
- Read depth: 0 Write depth: 2
- IO unplugs: 30 Timer unplugs: 1
+ Reads Queued: 0, 0KiB Writes Queued: 249, 15,800KiB
+ Read Dispatches: 0, 0KiB Write Dispatches: 42, 1,600KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 0, 0KiB Writes Completed: 16, 1,084KiB
+ Read Merges: 0, 0KiB Write Merges: 40, 276KiB
+ Read depth: 0 Write depth: 2
+ IO unplugs: 30 Timer unplugs: 1
Total (sdc):
- Reads Queued: 0, 0KiB Writes Queued: 580, 42,084KiB
- Read Dispatches: 0, 0KiB Write Dispatches: 527, 42,084KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 0, 0KiB Writes Completed: 527, 42,084KiB
- Read Merges: 0, 0KiB Write Merges: 53, 436KiB
- IO unplugs: 53 Timer unplugs: 1
+ Reads Queued: 0, 0KiB Writes Queued: 580, 42,084KiB
+ Read Dispatches: 0, 0KiB Write Dispatches: 527, 42,084KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 0, 0KiB Writes Completed: 527, 42,084KiB
+ Read Merges: 0, 0KiB Write Merges: 53, 436KiB
+ IO unplugs: 53 Timer unplugs: 1
Throughput (R/W): 0KiB/s / 719KiB/s
Events (sdc): 6,592 entries
@@ -2388,15 +2388,15 @@ first part of the filenames::
The report shows each event that was
found in the blktrace data, along with a summary of the overall block
I/O traffic during the run. You can look at the
-`blkparse <https://linux.die.net/man/1/blkparse>`__ manpage to learn the
+`blkparse <https://linux.die.net/man/1/blkparse>`__ manual page to learn the
meaning of each field displayed in the trace listing.
Live Mode
~~~~~~~~~
blktrace and blkparse are designed from the ground up to be able to
-operate together in a 'pipe mode' where the stdout of blktrace can be
-fed directly into the stdin of blkparse::
+operate together in a "pipe mode" where the standard output of blktrace can be
+fed directly into the standard input of blkparse::
root@crownbay:~# blktrace /dev/sdc -o - | blkparse -i -
@@ -2423,30 +2423,31 @@ tracer writes to, blktrace provides a way to trace without perturbing
the traced device at all by providing native support for sending all
trace data over the network.
-To have blktrace operate in this mode, start blktrace on the target
-system being traced with the -l option, along with the device to trace::
+To have blktrace operate in this mode, start blktrace in server mode on the
+host system, which is going to store the captured data::
- root@crownbay:~# blktrace -l /dev/sdc
+ $ blktrace -l
server: waiting for connections...
-On the host system, use the -h option to connect to the target system,
-also passing it the device to trace::
+On the target system that is going to be traced, start blktrace in client
+mode with the -h option to connect to the host system, also passing it the
+device to trace::
- $ blktrace -d /dev/sdc -h 192.168.1.43
+ root@crownbay:~# blktrace -d /dev/sdc -h 192.168.1.43
blktrace: connecting to 192.168.1.43
blktrace: connected!
-On the target system, you should see this::
+On the host system, you should see this::
server: connection from 192.168.1.43
-In another shell, execute a workload you want to trace. ::
+In another shell, execute a workload you want to trace::
root@crownbay:/media/sdc# rm linux-2.6.19.2.tar.bz2; wget &YOCTO_DL_URL;/mirror/sources/linux-2.6.19.2.tar.bz2; sync
Connecting to downloads.yoctoproject.org (140.211.169.59:80)
linux-2.6.19.2.tar.b 100% \|*******************************\| 41727k 0:00:00 ETA
-When it's done, do a Ctrl-C on the host system to stop the
+When it's done, do a ``Ctrl-C`` on the target system to stop the
trace::
^C=== sdc ===
@@ -2454,7 +2455,7 @@ trace::
CPU 1: 4109 events, 193 KiB data
Total: 11800 events (dropped 0), 554 KiB data
-On the target system, you should also see a trace summary for the trace
+On the host system, you should also see a trace summary for the trace
just ended::
server: end of run for 192.168.1.43:sdc
@@ -2464,14 +2465,14 @@ just ended::
Total: 11800 events (dropped 0), 554 KiB data
The blktrace instance on the host will
-save the target output inside a hostname-timestamp directory::
+save the target output inside a ``<hostname>-<timestamp>`` directory::
$ ls -al
drwxr-xr-x 10 root root 1024 Oct 28 02:40 .
drwxr-sr-x 4 root root 1024 Oct 26 18:24 ..
drwxr-xr-x 2 root root 1024 Oct 28 02:40 192.168.1.43-2012-10-28-02:40:56
-cd into that directory to see the output files::
+``cd`` into that directory to see the output files::
$ ls -l
-rw-r--r-- 1 root root 369193 Oct 28 02:44 sdc.blktrace.0
@@ -2499,29 +2500,29 @@ And run blkparse on the host system using the device name::
8,32 1 0 177.266696560 0 m N cfq1267 put_queue
CPU0 (sdc):
- Reads Queued: 0, 0KiB Writes Queued: 270, 21,708KiB
- Read Dispatches: 59, 2,628KiB Write Dispatches: 495, 39,964KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 90, 2,752KiB Writes Completed: 543, 41,596KiB
- Read Merges: 0, 0KiB Write Merges: 9, 344KiB
- Read depth: 2 Write depth: 2
- IO unplugs: 20 Timer unplugs: 1
+ Reads Queued: 0, 0KiB Writes Queued: 270, 21,708KiB
+ Read Dispatches: 59, 2,628KiB Write Dispatches: 495, 39,964KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 90, 2,752KiB Writes Completed: 543, 41,596KiB
+ Read Merges: 0, 0KiB Write Merges: 9, 344KiB
+ Read depth: 2 Write depth: 2
+ IO unplugs: 20 Timer unplugs: 1
CPU1 (sdc):
- Reads Queued: 688, 2,752KiB Writes Queued: 381, 20,652KiB
- Read Dispatches: 31, 124KiB Write Dispatches: 59, 2,396KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 0, 0KiB Writes Completed: 11, 764KiB
- Read Merges: 598, 2,392KiB Write Merges: 88, 448KiB
- Read depth: 2 Write depth: 2
- IO unplugs: 52 Timer unplugs: 0
+ Reads Queued: 688, 2,752KiB Writes Queued: 381, 20,652KiB
+ Read Dispatches: 31, 124KiB Write Dispatches: 59, 2,396KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 0, 0KiB Writes Completed: 11, 764KiB
+ Read Merges: 598, 2,392KiB Write Merges: 88, 448KiB
+ Read depth: 2 Write depth: 2
+ IO unplugs: 52 Timer unplugs: 0
Total (sdc):
- Reads Queued: 688, 2,752KiB Writes Queued: 651, 42,360KiB
- Read Dispatches: 90, 2,752KiB Write Dispatches: 554, 42,360KiB
- Reads Requeued: 0 Writes Requeued: 0
- Reads Completed: 90, 2,752KiB Writes Completed: 554, 42,360KiB
- Read Merges: 598, 2,392KiB Write Merges: 97, 792KiB
- IO unplugs: 72 Timer unplugs: 1
+ Reads Queued: 688, 2,752KiB Writes Queued: 651, 42,360KiB
+ Read Dispatches: 90, 2,752KiB Write Dispatches: 554, 42,360KiB
+ Reads Requeued: 0 Writes Requeued: 0
+ Reads Completed: 90, 2,752KiB Writes Completed: 554, 42,360KiB
+ Read Merges: 598, 2,392KiB Write Merges: 97, 792KiB
+ IO unplugs: 72 Timer unplugs: 1
Throughput (R/W): 15KiB/s / 238KiB/s
Events (sdc): 9,301 entries
@@ -2536,16 +2537,16 @@ Tracing Block I/O via 'ftrace'
It's also possible to trace block I/O using only
:ref:`profile-manual/usage:The 'trace events' Subsystem`, which
can be useful for casual tracing if you don't want to bother dealing with the
-userspace tools.
+user space tools.
-To enable tracing for a given device, use /sys/block/xxx/trace/enable,
-where xxx is the device name. This for example enables tracing for
-/dev/sdc::
+To enable tracing for a given device, use ``/sys/block/xxx/trace/enable``,
+where ``xxx`` is the device name. This for example enables tracing for
+``/dev/sdc``::
root@crownbay:/sys/kernel/debug/tracing# echo 1 > /sys/block/sdc/trace/enable
Once you've selected the device(s) you want
-to trace, selecting the 'blk' tracer will turn the blk tracer on::
+to trace, selecting the ``blk`` tracer will turn the blk tracer on::
root@crownbay:/sys/kernel/debug/tracing# cat available_tracers
blk function_graph function nop
@@ -2556,8 +2557,8 @@ Execute the workload you're interested in::
root@crownbay:/sys/kernel/debug/tracing# cat /media/sdc/testfile.txt
-And look at the output (note here that we're using 'trace_pipe' instead of
-trace to capture this trace - this allows us to wait around on the pipe
+And look at the output (note here that we're using ``trace_pipe`` instead of
+trace to capture this trace --- this allows us to wait around on the pipe
for data to appear)::
root@crownbay:/sys/kernel/debug/tracing# cat trace_pipe
@@ -2584,7 +2585,7 @@ And this turns off tracing for the specified device::
blktrace Documentation
----------------------
-Online versions of the man pages for the commands discussed in this
+Online versions of the manual pages for the commands discussed in this
section can be found here:
- https://linux.die.net/man/8/blktrace
@@ -2593,8 +2594,8 @@ section can be found here:
- https://linux.die.net/man/8/btrace
-The above manpages, along with manpages for the other blktrace utilities
-(btt, blkiomon, etc) can be found in the /doc directory of the blktrace
-tools git repo::
+The above manual pages, along with manuals for the other blktrace utilities
+(``btt``, ``blkiomon``, etc) can be found in the ``/doc`` directory of the blktrace
+tools git repository::
$ git clone git://git.kernel.dk/blktrace.git
diff --git a/documentation/ref-manual/TODO b/documentation/ref-manual/TODO
deleted file mode 100644
index 0510f54710..0000000000
--- a/documentation/ref-manual/TODO
+++ /dev/null
@@ -1,11 +0,0 @@
-Handbook Todo List:
-
- * Document adding a new IMAGE_FEATURE to the customising images section
- * Add instructions about using zaurus/openmoko emulation
- * Add component overview/block diagrams
- * Software Development intro should mention its software development for
- intended target and could be a different arch etc and thus special case.
- * Expand insane.bbclass documentation to cover tests
- * Document remaining classes (see list in ref-classes)
- * Document formfactor
-
diff --git a/documentation/ref-manual/classes.rst b/documentation/ref-manual/classes.rst
index 729aa259e0..9520d0bf7c 100644
--- a/documentation/ref-manual/classes.rst
+++ b/documentation/ref-manual/classes.rst
@@ -13,8 +13,14 @@ some default behavior.
Any :term:`Metadata` usually found in a recipe can also be
placed in a class file. Class files are identified by the extension
-``.bbclass`` and are usually placed in a ``classes/`` directory beneath
-the ``meta*/`` directory found in the :term:`Source Directory`.
+``.bbclass`` and are usually placed in one of a set of subdirectories
+beneath the ``meta*/`` directory found in the :term:`Source Directory`:
+
+ - ``classes-recipe/`` - classes intended to be inherited by recipes
+ individually
+ - ``classes-global/`` - classes intended to be inherited globally
+ - ``classes/`` - classes whose usage context is not clearly defined
+
Class files can also be pointed to by
:term:`BUILDDIR` (e.g. ``build/``) in the same way as
``.conf`` files in the ``conf`` directory. Class files are searched for
@@ -22,16 +28,16 @@ in :term:`BBPATH` using the same method by which ``.conf``
files are searched.
This chapter discusses only the most useful and important classes. Other
-classes do exist within the ``meta/classes`` directory in the Source
+classes do exist within the ``meta/classes*`` directories in the Source
Directory. You can reference the ``.bbclass`` files directly for more
information.
.. _ref-classes-allarch:
-``allarch.bbclass``
-===================
+``allarch``
+===========
-The ``allarch`` class is inherited by recipes that do not produce
+The :ref:`ref-classes-allarch` class is inherited by recipes that do not produce
architecture-specific output. The class disables functionality that is
normally needed for recipes that produce executable binaries (such as
building the cross-compiler and a C library as pre-requisites, and
@@ -43,43 +49,43 @@ splitting out of debug symbols during packaging).
produce packages that depend on tunings through use of the
:term:`RDEPENDS` and
:term:`TUNE_PKGARCH` variables, should never be
- configured for all architectures using ``allarch``. This is the case
+ configured for all architectures using :ref:`ref-classes-allarch`. This is the case
even if the recipes do not produce architecture-specific output.
Configuring such recipes for all architectures causes the
- ``do_package_write_*`` tasks to
+ :ref:`do_package_write_* <ref-tasks-package_write_deb>` tasks to
have different signatures for the machines with different tunings.
Additionally, unnecessary rebuilds occur every time an image for a
different :term:`MACHINE` is built even when the recipe never changes.
-By default, all recipes inherit the :ref:`base <ref-classes-base>` and
-:ref:`package <ref-classes-package>` classes, which enable
+By default, all recipes inherit the :ref:`ref-classes-base` and
+:ref:`ref-classes-package` classes, which enable
functionality needed for recipes that produce executable output. If your
recipe, for example, only produces packages that contain configuration
files, media files, or scripts (e.g. Python and Perl), then it should
-inherit the ``allarch`` class.
+inherit the :ref:`ref-classes-allarch` class.
.. _ref-classes-archiver:
-``archiver.bbclass``
-====================
+``archiver``
+============
-The ``archiver`` class supports releasing source code and other
+The :ref:`ref-classes-archiver` class supports releasing source code and other
materials with the binaries.
-For more details on the source archiver, see the
-":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
+For more details on the source :ref:`ref-classes-archiver`, see the
+":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
section in the Yocto Project Development Tasks Manual. You can also see
the :term:`ARCHIVER_MODE` variable for information
about the variable flags (varflags) that help control archive creation.
.. _ref-classes-autotools:
-``autotools*.bbclass``
-======================
+``autotools*``
+==============
-The ``autotools*`` classes support packages built with the
-`GNU Autotools <https://en.wikipedia.org/wiki/GNU_Autotools>`__.
+The :ref:`autotools* <ref-classes-autotools>` classes support packages built with the
+:wikipedia:`GNU Autotools <GNU_Autotools>`.
The ``autoconf``, ``automake``, and ``libtool`` packages bring
standardization. This class defines a set of tasks (e.g. ``configure``,
@@ -87,16 +93,16 @@ standardization. This class defines a set of tasks (e.g. ``configure``,
should usually be enough to define a few standard variables and then
simply ``inherit autotools``. These classes can also work with software
that emulates Autotools. For more information, see the
-":ref:`dev-manual/common-tasks:autotooled package`" section
+":ref:`dev-manual/new-recipe:building an autotooled package`" section
in the Yocto Project Development Tasks Manual.
-By default, the ``autotools*`` classes use out-of-tree builds (i.e.
+By default, the :ref:`autotools* <ref-classes-autotools>` classes use out-of-tree builds (i.e.
``autotools.bbclass`` building with ``B != S``).
If the software being built by a recipe does not support using
out-of-tree builds, you should have the recipe inherit the
-``autotools-brokensep`` class. The ``autotools-brokensep`` class behaves
-the same as the ``autotools`` class but builds with :term:`B`
+:ref:`autotools-brokensep <ref-classes-autotools>` class. The :ref:`autotools-brokensep <ref-classes-autotools>` class behaves
+the same as the :ref:`ref-classes-autotools` class but builds with :term:`B`
== :term:`S`. This method is useful when out-of-tree build
support is either not present or is broken.
@@ -106,35 +112,34 @@ support is either not present or is broken.
all possible.
It's useful to have some idea of how the tasks defined by the
-``autotools*`` classes work and what they do behind the scenes.
+:ref:`autotools* <ref-classes-autotools>` classes work and what they do behind the scenes.
-- :ref:`ref-tasks-configure` - Regenerates the
+- :ref:`ref-tasks-configure` --- regenerates the
configure script (using ``autoreconf``) and then launches it with a
standard set of arguments used during cross-compilation. You can pass
additional parameters to ``configure`` through the :term:`EXTRA_OECONF`
or :term:`PACKAGECONFIG_CONFARGS`
variables.
-- :ref:`ref-tasks-compile` - Runs ``make`` with
+- :ref:`ref-tasks-compile` --- runs ``make`` with
arguments that specify the compiler and linker. You can pass
additional arguments through the :term:`EXTRA_OEMAKE` variable.
-- :ref:`ref-tasks-install` - Runs ``make install`` and
+- :ref:`ref-tasks-install` --- runs ``make install`` and
passes in ``${``\ :term:`D`\ ``}`` as ``DESTDIR``.
.. _ref-classes-base:
-``base.bbclass``
-================
+``base``
+========
-The ``base`` class is special in that every ``.bb`` file implicitly
+The :ref:`ref-classes-base` class is special in that every ``.bb`` file implicitly
inherits the class. This class contains definitions for standard basic
tasks such as fetching, unpacking, configuring (empty by default),
compiling (runs any ``Makefile`` present), installing (empty by default)
-and packaging (empty by default). These classes are often overridden or
-extended by other classes such as the
-:ref:`autotools <ref-classes-autotools>` class or the
-:ref:`package <ref-classes-package>` class.
+and packaging (empty by default). These tasks are often overridden or
+extended by other classes such as the :ref:`ref-classes-autotools` class or the
+:ref:`ref-classes-package` class.
The class also contains some commonly used functions such as
``oe_runmake``, which runs ``make`` with the arguments specified in
@@ -143,18 +148,18 @@ arguments passed directly to ``oe_runmake``.
.. _ref-classes-bash-completion:
-``bash-completion.bbclass``
-===========================
+``bash-completion``
+===================
Sets up packaging and dependencies appropriate for recipes that build
software that includes bash-completion data.
.. _ref-classes-bin-package:
-``bin_package.bbclass``
-=======================
+``bin_package``
+===============
-The ``bin_package`` class is a helper class for recipes that extract the
+The :ref:`ref-classes-bin-package` class is a helper class for recipes that extract the
contents of a binary package (e.g. an RPM) and install those contents
rather than building the binary from source. The binary package is
extracted and new packages in the configured output package format are
@@ -178,10 +183,10 @@ example use for this class.
.. _ref-classes-binconfig:
-``binconfig.bbclass``
-=====================
+``binconfig``
+=============
-The ``binconfig`` class helps to correct paths in shell scripts.
+The :ref:`ref-classes-binconfig` class helps to correct paths in shell scripts.
Before ``pkg-config`` had become widespread, libraries shipped shell
scripts to give information about the libraries and include paths needed
@@ -198,34 +203,33 @@ information.
.. _ref-classes-binconfig-disabled:
-``binconfig-disabled.bbclass``
-==============================
+``binconfig-disabled``
+======================
-An alternative version of the :ref:`binconfig <ref-classes-binconfig>`
+An alternative version of the :ref:`ref-classes-binconfig`
class, which disables binary configuration scripts by making them return
an error in favor of using ``pkg-config`` to query the information. The
-scripts to be disabled should be specified using the
-:term:`BINCONFIG` variable within the recipe inheriting
-the class.
+scripts to be disabled should be specified using the :term:`BINCONFIG`
+variable within the recipe inheriting the class.
.. _ref-classes-buildhistory:
-``buildhistory.bbclass``
-========================
+``buildhistory``
+================
-The ``buildhistory`` class records a history of build output metadata,
+The :ref:`ref-classes-buildhistory` class records a history of build output metadata,
which can be used to detect possible regressions as well as used for
analysis of the build output. For more information on using Build
History, see the
-":ref:`dev-manual/common-tasks:maintaining build output quality`"
+":ref:`dev-manual/build-quality:maintaining build output quality`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-buildstats:
-``buildstats.bbclass``
-======================
+``buildstats``
+==============
-The ``buildstats`` class records performance statistics about each task
+The :ref:`ref-classes-buildstats` class records performance statistics about each task
executed during the build (e.g. elapsed time, CPU usage, and I/O usage).
When you use this class, the output goes into the
@@ -239,23 +243,99 @@ Collecting build statistics is enabled by default through the
:term:`USER_CLASSES` variable from your
``local.conf`` file. Consequently, you do not have to do anything to
enable the class. However, if you want to disable the class, simply
-remove "buildstats" from the :term:`USER_CLASSES` list.
+remove ":ref:`ref-classes-buildstats`" from the :term:`USER_CLASSES` list.
.. _ref-classes-buildstats-summary:
-``buildstats-summary.bbclass``
-==============================
+``buildstats-summary``
+======================
When inherited globally, prints statistics at the end of the build on
sstate re-use. In order to function, this class requires the
-:ref:`buildstats <ref-classes-buildstats>` class be enabled.
+:ref:`ref-classes-buildstats` class be enabled.
+
+.. _ref-classes-cargo:
+
+``cargo``
+=========
+
+The :ref:`ref-classes-cargo` class allows to compile Rust language programs
+using `Cargo <https://doc.rust-lang.org/cargo/>`__. Cargo is Rust's package
+manager, allowing to fetch package dependencies and build your program.
+
+Using this class makes it very easy to build Rust programs. All you need
+is to use the :term:`SRC_URI` variable to point to a source repository
+which can be built by Cargo, typically one that was created by the
+``cargo new`` command, containing a ``Cargo.toml`` file, a ``Cargo.lock`` file and a ``src``
+subdirectory.
+
+If you want to build and package tests of the program, inherit the
+:ref:`ref-classes-ptest-cargo` class instead of :ref:`ref-classes-cargo`.
+
+You will find an example (that show also how to handle possible git source dependencies) in the
+:oe_git:`zvariant_3.12.0.bb </openembedded-core/tree/meta-selftest/recipes-extended/zvariant/zvariant_3.12.0.bb>`
+recipe. Another example, with only crate dependencies, is the
+:oe_git:`uutils-coreutils </meta-openembedded/tree/meta-oe/recipes-core/uutils-coreutils>`
+recipe, which was generated by the `cargo-bitbake <https://crates.io/crates/cargo-bitbake>`__
+tool.
+
+This class inherits the :ref:`ref-classes-cargo_common` class.
+
+.. _ref-classes-cargo_c:
+
+``cargo_c``
+===========
+
+The :ref:`ref-classes-cargo_c` class can be inherited by a recipe to generate
+a Rust library that can be called by C/C++ code. The recipe which inherits this
+class has to only replace ``inherit cargo`` by ``inherit cargo_c``.
+
+See the :yocto_git:`rust-c-lib-example_git.bb
+</poky/tree/meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb>`
+example recipe.
+
+.. _ref-classes-cargo_common:
+
+``cargo_common``
+================
+
+The :ref:`ref-classes-cargo_common` class is an internal class
+that is not intended to be used directly.
+
+An exception is the "rust" recipe, to build the Rust compiler and runtime
+library, which is built by Cargo but cannot use the :ref:`ref-classes-cargo`
+class. This is why this class was introduced.
+
+.. _ref-classes-cargo-update-recipe-crates:
+
+``cargo-update-recipe-crates``
+===============================
+
+The :ref:`ref-classes-cargo-update-recipe-crates` class allows
+recipe developers to update the list of Cargo crates in :term:`SRC_URI`
+by reading the ``Cargo.lock`` file in the source tree.
+
+To do so, create a recipe for your program, for example using
+:doc:`devtool </ref-manual/devtool-reference>`,
+make it inherit the :ref:`ref-classes-cargo` and
+:ref:`ref-classes-cargo-update-recipe-crates` and run::
+
+ bitbake -c update_crates recipe
+
+This creates a ``recipe-crates.inc`` file that you can include in your
+recipe::
+
+ require ${BPN}-crates.inc
+
+That's also something you can achieve by using the
+`cargo-bitbake <https://crates.io/crates/cargo-bitbake>`__ tool.
.. _ref-classes-ccache:
-``ccache.bbclass``
-==================
+``ccache``
+==========
-The ``ccache`` class enables the C/C++ Compiler Cache for the build.
+The :ref:`ref-classes-ccache` class enables the C/C++ Compiler Cache for the build.
This class is used to give a minor performance boost during the build.
See https://ccache.samba.org/ for information on the C/C++ Compiler
@@ -269,84 +349,118 @@ this class is not recommended.
.. _ref-classes-chrpath:
-``chrpath.bbclass``
-===================
+``chrpath``
+===========
-The ``chrpath`` class is a wrapper around the "chrpath" utility, which
-is used during the build process for ``nativesdk``, ``cross``, and
-``cross-canadian`` recipes to change ``RPATH`` records within binaries
+The :ref:`ref-classes-chrpath` class is a wrapper around the "chrpath" utility, which
+is used during the build process for :ref:`ref-classes-nativesdk`, :ref:`ref-classes-cross`, and
+:ref:`ref-classes-cross-canadian` recipes to change ``RPATH`` records within binaries
in order to make them relocatable.
.. _ref-classes-cmake:
-``cmake.bbclass``
-=================
+``cmake``
+=========
-The ``cmake`` class allows for recipes that need to build software using
-the `CMake <https://cmake.org/overview/>`__ build system. You can use
-the :term:`EXTRA_OECMAKE` variable to specify
-additional configuration options to be passed using the ``cmake``
-command line.
+The :ref:`ref-classes-cmake` class allows recipes to build software using the
+`CMake <https://cmake.org/overview/>`__ build system. You can use the
+:term:`EXTRA_OECMAKE` variable to specify additional configuration options to
+pass to the ``cmake`` command line.
+
+By default, the :ref:`ref-classes-cmake` class uses
+`Ninja <https://ninja-build.org/>`__ instead of GNU make for building, which
+offers better build performance. If a recipe is broken with Ninja, then the
+recipe can set the :term:`OECMAKE_GENERATOR` variable to ``Unix Makefiles`` to
+use GNU make instead.
+
+If you need to install custom CMake toolchain files supplied by the application
+being built, you should install them (during :ref:`ref-tasks-install`) to the
+preferred CMake Module directory: ``${D}${datadir}/cmake/modules/``.
-On the occasion that you would be installing custom CMake toolchain
-files supplied by the application being built, you should install them
-to the preferred CMake Module directory: ``${D}${datadir}/cmake/``
-Modules during
-:ref:`ref-tasks-install`.
+.. _ref-classes-cmake-qemu:
+
+``cmake-qemu``
+==============
+
+The :ref:`ref-classes-cmake-qemu` class might be used instead of the
+:ref:`ref-classes-cmake` class. In addition to the features provided by the
+:ref:`ref-classes-cmake` class, the :ref:`ref-classes-cmake-qemu` class passes
+the ``CMAKE_CROSSCOMPILING_EMULATOR`` setting to ``cmake``. This allows to use
+QEMU user-mode emulation for the execution of cross-compiled binaries on the
+host machine. For more information about ``CMAKE_CROSSCOMPILING_EMULATOR``
+please refer to the `related section of the CMake documentation
+<https://cmake.org/cmake/help/latest/variable/CMAKE_CROSSCOMPILING_EMULATOR.html>`__.
+
+Not all platforms are supported by QEMU. This class only works for machines with
+``qemu-usermode`` in the :ref:`ref-features-machine`. Using QEMU user-mode therefore
+involves a certain risk, which is also the reason why this feature is not part of
+the main :ref:`ref-classes-cmake` class by default.
+
+One use case is the execution of cross-compiled unit tests with CTest on the build
+machine. If ``CMAKE_CROSSCOMPILING_EMULATOR`` is configured::
+
+ cmake --build --target test
+
+works transparently with QEMU user-mode.
+
+If the CMake project is developed with this use case in mind this works very nicely.
+This also applies to an IDE configured to use ``cmake-native`` for cross-compiling.
.. _ref-classes-cml1:
-``cml1.bbclass``
-================
+``cml1``
+========
-The ``cml1`` class provides basic support for the Linux kernel style
-build configuration system.
+The :ref:`ref-classes-cml1` class provides basic support for the Linux kernel style
+build configuration system. "cml" stands for "Configuration Menu Language", which
+originates from the Linux kernel but is also used in other projects such as U-Boot
+and BusyBox. It could have been called "kconfig" too.
.. _ref-classes-compress_doc:
-``compress_doc.bbclass``
-========================
+``compress_doc``
+================
-Enables compression for man pages and info pages. This class is intended
+Enables compression for manual and info pages. This class is intended
to be inherited globally. The default compression mechanism is gz (gzip)
but you can select an alternative mechanism by setting the
:term:`DOC_COMPRESS` variable.
.. _ref-classes-copyleft_compliance:
-``copyleft_compliance.bbclass``
-===============================
+``copyleft_compliance``
+=======================
-The ``copyleft_compliance`` class preserves source code for the purposes
-of license compliance. This class is an alternative to the ``archiver``
+The :ref:`ref-classes-copyleft_compliance` class preserves source code for the purposes
+of license compliance. This class is an alternative to the :ref:`ref-classes-archiver`
class and is still used by some users even though it has been deprecated
-in favor of the :ref:`archiver <ref-classes-archiver>` class.
+in favor of the :ref:`ref-classes-archiver` class.
.. _ref-classes-copyleft_filter:
-``copyleft_filter.bbclass``
-===========================
+``copyleft_filter``
+===================
-A class used by the :ref:`archiver <ref-classes-archiver>` and
-:ref:`copyleft_compliance <ref-classes-copyleft_compliance>` classes
+A class used by the :ref:`ref-classes-archiver` and
+:ref:`ref-classes-copyleft_compliance` classes
for filtering licenses. The ``copyleft_filter`` class is an internal
class and is not intended to be used directly.
.. _ref-classes-core-image:
-``core-image.bbclass``
-======================
+``core-image``
+==============
-The ``core-image`` class provides common definitions for the
+The :ref:`ref-classes-core-image` class provides common definitions for the
``core-image-*`` image recipes, such as support for additional
:term:`IMAGE_FEATURES`.
.. _ref-classes-cpan:
-``cpan*.bbclass``
-=================
+``cpan*``
+=========
-The ``cpan*`` classes support Perl modules.
+The :ref:`cpan* <ref-classes-cpan>` classes support Perl modules.
Recipes for Perl modules are simple. These recipes usually only need to
point to the source's archive and then inherit the proper class file.
@@ -359,23 +473,49 @@ authors used.
- Modules that use ``Build.PL``-based build system require using
``cpan_build.bbclass`` in their recipes.
-Both build methods inherit the ``cpan-base`` class for basic Perl
+Both build methods inherit the :ref:`cpan-base <ref-classes-cpan>` class for basic Perl
support.
+.. _ref-classes-create-spdx:
+
+``create-spdx``
+===============
+
+The :ref:`ref-classes-create-spdx` class provides support for
+automatically creating :term:`SPDX` :term:`SBOM` documents based upon image
+and SDK contents.
+
+This class is meant to be inherited globally from a configuration file::
+
+ INHERIT += "create-spdx"
+
+The toplevel :term:`SPDX` output file is generated in JSON format as a
+``IMAGE-MACHINE.spdx.json`` file in ``tmp/deploy/images/MACHINE/`` inside the
+:term:`Build Directory`. There are other related files in the same directory,
+as well as in ``tmp/deploy/spdx``.
+
+The exact behaviour of this class, and the amount of output can be controlled
+by the :term:`SPDX_PRETTY`, :term:`SPDX_ARCHIVE_PACKAGED`,
+:term:`SPDX_ARCHIVE_SOURCES` and :term:`SPDX_INCLUDE_SOURCES` variables.
+
+See the description of these variables and the
+":ref:`dev-manual/sbom:creating a software bill of materials`"
+section in the Yocto Project Development Manual for more details.
+
.. _ref-classes-cross:
-``cross.bbclass``
-=================
+``cross``
+=========
-The ``cross`` class provides support for the recipes that build the
+The :ref:`ref-classes-cross` class provides support for the recipes that build the
cross-compilation tools.
.. _ref-classes-cross-canadian:
-``cross-canadian.bbclass``
-==========================
+``cross-canadian``
+==================
-The ``cross-canadian`` class provides support for the recipes that build
+The :ref:`ref-classes-cross-canadian` class provides support for the recipes that build
the Canadian Cross-compilation tools for SDKs. See the
":ref:`overview-manual/concepts:cross-development toolchain generation`"
section in the Yocto Project Overview and Concepts Manual for more
@@ -383,10 +523,10 @@ discussion on these cross-compilation tools.
.. _ref-classes-crosssdk:
-``crosssdk.bbclass``
-====================
+``crosssdk``
+============
-The ``crosssdk`` class provides support for the recipes that build the
+The :ref:`ref-classes-crosssdk` class provides support for the recipes that build the
cross-compilation tools used for building SDKs. See the
":ref:`overview-manual/concepts:cross-development toolchain generation`"
section in the Yocto Project Overview and Concepts Manual for more
@@ -394,26 +534,81 @@ discussion on these cross-compilation tools.
.. _ref-classes-cve-check:
-``cve-check.bbclass``
-=====================
+``cve-check``
+=============
-The ``cve-check`` class looks for known CVEs (Common Vulnerabilities
-and Exposures) while building an image. This class is meant to be
+The :ref:`ref-classes-cve-check` class looks for known CVEs (Common Vulnerabilities
+and Exposures) while building with BitBake. This class is meant to be
inherited globally from a configuration file::
INHERIT += "cve-check"
+To filter out obsolete CVE database entries which are known not to impact software from Poky and OE-Core,
+add following line to the build configuration file::
+
+ include cve-extra-exclusions.inc
+
You can also look for vulnerabilities in specific packages by passing
-``-c cve_check`` to BitBake. You will find details in the
-":ref:`dev-manual/common-tasks:checking for vulnerabilities`"
+``-c cve_check`` to BitBake.
+
+After building the software with Bitbake, CVE check output reports are available in ``tmp/deploy/cve``
+and image specific summaries in ``tmp/deploy/images/*.cve`` or ``tmp/deploy/images/*.json`` files.
+
+When building, the CVE checker will emit build time warnings for any detected
+issues which are in the state ``Unpatched``, meaning that CVE issue seems to affect the software component
+and version being compiled and no patches to address the issue are applied. Other states
+for detected CVE issues are: ``Patched`` meaning that a patch to address the issue is already
+applied, and ``Ignored`` meaning that the issue can be ignored.
+
+The ``Patched`` state of a CVE issue is detected from patch files with the format
+``CVE-ID.patch``, e.g. ``CVE-2019-20633.patch``, in the :term:`SRC_URI` and using
+CVE metadata of format ``CVE: CVE-ID`` in the commit message of the patch file.
+
+.. note::
+
+ Commit message metadata (``CVE: CVE-ID`` in a patch header) will not be scanned
+ in any patches that are remote, i.e. that are anything other than local files
+ referenced via ``file://`` in SRC_URI. However, a ``CVE-ID`` in a remote patch
+ file name itself will be registered.
+
+If the recipe adds ``CVE-ID`` as flag of the :term:`CVE_STATUS` variable with status
+mapped to ``Ignored``, then the CVE state is reported as ``Ignored``::
+
+ CVE_STATUS[CVE-2020-15523] = "not-applicable-platform: Issue only applies on Windows"
+
+If CVE check reports that a recipe contains false positives or false negatives, these may be
+fixed in recipes by adjusting the CVE product name using :term:`CVE_PRODUCT` and :term:`CVE_VERSION` variables.
+:term:`CVE_PRODUCT` defaults to the plain recipe name :term:`BPN` which can be adjusted to one or more CVE
+database vendor and product pairs using the syntax::
+
+ CVE_PRODUCT = "flex_project:flex"
+
+where ``flex_project`` is the CVE database vendor name and ``flex`` is the product name. Similarly
+if the default recipe version :term:`PV` does not match the version numbers of the software component
+in upstream releases or the CVE database, then the :term:`CVE_VERSION` variable can be used to set the
+CVE database compatible version number, for example::
+
+ CVE_VERSION = "2.39"
+
+Any bugs or missing or incomplete information in the CVE database entries should be fixed in the CVE database
+via the `NVD feedback form <https://nvd.nist.gov/info/contact-form>`__.
+
+Users should note that security is a process, not a product, and thus also CVE checking, analyzing results,
+patching and updating the software should be done as a regular process. The data and assumptions
+required for CVE checker to reliably detect issues are frequently broken in various ways.
+These can only be detected by reviewing the details of the issues and iterating over the generated reports,
+and following what happens in other Linux distributions and in the greater open source community.
+
+You will find some more details in the
+":ref:`dev-manual/vulnerabilities:checking for vulnerabilities`"
section in the Development Tasks Manual.
.. _ref-classes-debian:
-``debian.bbclass``
-==================
+``debian``
+==========
-The ``debian`` class renames output packages so that they follow the
+The :ref:`ref-classes-debian` class renames output packages so that they follow the
Debian naming policy (i.e. ``glibc`` becomes ``libc6`` and
``glibc-devel`` becomes ``libc6-dev``.) Renaming includes the library
name and version as part of the package name.
@@ -425,10 +620,10 @@ naming scheme.
.. _ref-classes-deploy:
-``deploy.bbclass``
-==================
+``deploy``
+==========
-The ``deploy`` class handles deploying files to the
+The :ref:`ref-classes-deploy` class handles deploying files to the
:term:`DEPLOY_DIR_IMAGE` directory. The main
function of this class is to allow the deploy step to be accelerated by
shared state. Recipes that inherit this class should define their own
@@ -439,25 +634,73 @@ add the task at the appropriate place, which is usually after
:ref:`ref-tasks-install`. The class then takes care of
staging the files from :term:`DEPLOYDIR` to :term:`DEPLOY_DIR_IMAGE`.
+.. _ref-classes-devicetree:
+
+``devicetree``
+==============
+
+The :ref:`ref-classes-devicetree` class allows to build a recipe that compiles
+device tree source files that are not in the kernel tree.
+
+The compilation of out-of-tree device tree sources is the same as the kernel
+in-tree device tree compilation process. This includes the ability to include
+sources from the kernel such as SoC ``dtsi`` files as well as C header files,
+such as ``gpio.h``.
+
+The :ref:`ref-tasks-compile` task will compile two kinds of files:
+
+- Regular device tree sources with a ``.dts`` extension.
+
+- Device tree overlays, detected from the presence of the ``/plugin/;``
+ string in the file contents.
+
+This class deploys the generated device tree binaries into
+``${``\ :term:`DEPLOY_DIR_IMAGE`\ ``}/devicetree/``. This is similar to
+what the :ref:`ref-classes-kernel-devicetree` class does, with the added
+``devicetree`` subdirectory to avoid name clashes. Additionally, the device
+trees are populated into the sysroot for access via the sysroot from within
+other recipes.
+
+By default, all device tree sources located in :term:`DT_FILES_PATH` directory
+are compiled. To select only particular sources, set :term:`DT_FILES` to
+a space-separated list of files (relative to :term:`DT_FILES_PATH`). For
+convenience, both ``.dts`` and ``.dtb`` extensions can be used.
+
+An extra padding is appended to non-overlay device trees binaries. This
+can typically be used as extra space for adding extra properties at boot time.
+The padding size can be modified by setting :term:`DT_PADDING_SIZE`
+to the desired size, in bytes.
+
+See :oe_git:`devicetree.bbclass sources
+</openembedded-core/tree/meta/classes-recipe/devicetree.bbclass>`
+for further variables controlling this class.
+
+Here is an excerpt of an example ``recipes-kernel/linux/devicetree-acme.bb``
+recipe inheriting this class::
+
+ inherit devicetree
+ COMPATIBLE_MACHINE = "^mymachine$"
+ SRC_URI:mymachine = "file://mymachine.dts"
+
.. _ref-classes-devshell:
-``devshell.bbclass``
-====================
+``devshell``
+============
-The ``devshell`` class adds the ``do_devshell`` task. Distribution
-policy dictates whether to include this class. See the ":ref:`dev-manual/common-tasks:using a development shell`"
+The :ref:`ref-classes-devshell` class adds the :ref:`ref-tasks-devshell` task. Distribution
+policy dictates whether to include this class. See the ":ref:`dev-manual/development-shell:using a development shell`"
section in the Yocto Project Development Tasks Manual for more
-information about using ``devshell``.
+information about using :ref:`ref-classes-devshell`.
.. _ref-classes-devupstream:
-``devupstream.bbclass``
-=======================
+``devupstream``
+===============
-The ``devupstream`` class uses
+The :ref:`ref-classes-devupstream` class uses
:term:`BBCLASSEXTEND` to add a variant of the
recipe that fetches from an alternative URI (e.g. Git) instead of a
-tarball. Following is an example::
+tarball. Here is an example::
BBCLASSEXTEND = "devupstream:target"
SRC_URI:class-devupstream = "git://git.example.com/example;branch=main"
@@ -476,10 +719,10 @@ Any development-specific adjustments can be done by using the
The class
currently only supports creating a development variant of the target
-recipe, not ``native`` or ``nativesdk`` variants.
+recipe, not :ref:`ref-classes-native` or :ref:`ref-classes-nativesdk` variants.
The :term:`BBCLASSEXTEND` syntax (i.e. ``devupstream:target``) provides
-support for ``native`` and ``nativesdk`` variants. Consequently, this
+support for :ref:`ref-classes-native` and :ref:`ref-classes-nativesdk` variants. Consequently, this
functionality can be added in a future release.
Support for other version control systems such as Subversion is limited
@@ -488,10 +731,10 @@ due to BitBake's automatic fetch dependencies (e.g.
.. _ref-classes-externalsrc:
-``externalsrc.bbclass``
-=======================
+``externalsrc``
+===============
-The ``externalsrc`` class supports building software from source code
+The :ref:`ref-classes-externalsrc` class supports building software from source code
that is external to the OpenEmbedded build system. Building software
from an external source tree means that the build system's normal fetch,
unpack, and patch process is not used.
@@ -499,9 +742,8 @@ unpack, and patch process is not used.
By default, the OpenEmbedded build system uses the :term:`S`
and :term:`B` variables to locate unpacked recipe source code
and to build it, respectively. When your recipe inherits the
-``externalsrc`` class, you use the
-:term:`EXTERNALSRC` and
-:term:`EXTERNALSRC_BUILD` variables to
+:ref:`ref-classes-externalsrc` class, you use the
+:term:`EXTERNALSRC` and :term:`EXTERNALSRC_BUILD` variables to
ultimately define :term:`S` and :term:`B`.
By default, this class expects the source code to support recipe builds
@@ -516,19 +758,18 @@ See these variables for more information:
:term:`WORKDIR`, :term:`BPN`, and
:term:`PV`,
-For more information on the ``externalsrc`` class, see the comments in
+For more information on the :ref:`ref-classes-externalsrc` class, see the comments in
``meta/classes/externalsrc.bbclass`` in the :term:`Source Directory`.
-For information on how to use the
-``externalsrc`` class, see the
-":ref:`dev-manual/common-tasks:building software from an external source`"
+For information on how to use the :ref:`ref-classes-externalsrc` class, see the
+":ref:`dev-manual/building:building software from an external source`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-extrausers:
-``extrausers.bbclass``
-======================
+``extrausers``
+==============
-The ``extrausers`` class allows additional user and group configuration
+The :ref:`ref-classes-extrausers` class allows additional user and group configuration
to be applied at the image level. Inheriting this class either globally
or from an image recipe allows additional user and group operations to
be performed using the
@@ -536,13 +777,11 @@ be performed using the
.. note::
- The user and group operations added using the
- :ref:`extrausers <ref-classes-extrausers>`
+ The user and group operations added using the :ref:`ref-classes-extrausers`
class are not tied to a specific recipe outside of the recipe for the
image. Thus, the operations can be performed across the image as a
- whole. Use the
- :ref:`useradd <ref-classes-useradd>`
- class to add user and group configuration to a specific recipe.
+ whole. Use the :ref:`ref-classes-useradd` class to add user and group
+ configuration to a specific recipe.
Here is an example that uses this class in an image recipe::
@@ -580,19 +819,19 @@ Finally, here is an example that sets the root password::
.. note::
From a security perspective, hardcoding a default password is not
- generally a good idea or even legal in some jurisdictions. It is
- recommended that you do not do this if you are building a production
+ generally a good idea or even legal in some jurisdictions. It is
+ recommended that you do not do this if you are building a production
image.
.. _ref-classes-features_check:
-``features_check.bbclass``
-=================================
+``features_check``
+==================
-The ``features_check`` class allows individual recipes to check
-for required and conflicting
-:term:`DISTRO_FEATURES`, :term:`MACHINE_FEATURES` or :term:`COMBINED_FEATURES`.
+The :ref:`ref-classes-features_check` class allows individual recipes to check
+for required and conflicting :term:`DISTRO_FEATURES`, :term:`MACHINE_FEATURES`
+or :term:`COMBINED_FEATURES`.
This class provides support for the following variables:
@@ -613,10 +852,10 @@ triggered.
.. _ref-classes-fontcache:
-``fontcache.bbclass``
-=====================
+``fontcache``
+=============
-The ``fontcache`` class generates the proper post-install and
+The :ref:`ref-classes-fontcache` class generates the proper post-install and
post-remove (postinst and postrm) scriptlets for font packages. These
scriptlets call ``fc-cache`` (part of ``Fontconfig``) to add the fonts
to the font information cache. Since the cache files are
@@ -629,20 +868,20 @@ packages containing the fonts.
.. _ref-classes-fs-uuid:
-``fs-uuid.bbclass``
-===================
+``fs-uuid``
+===========
-The ``fs-uuid`` class extracts UUID from
+The :ref:`ref-classes-fs-uuid` class extracts UUID from
``${``\ :term:`ROOTFS`\ ``}``, which must have been built
-by the time that this function gets called. The ``fs-uuid`` class only
+by the time that this function gets called. The :ref:`ref-classes-fs-uuid` class only
works on ``ext`` file systems and depends on ``tune2fs``.
.. _ref-classes-gconf:
-``gconf.bbclass``
-=================
+``gconf``
+=========
-The ``gconf`` class provides common functionality for recipes that need
+The :ref:`ref-classes-gconf` class provides common functionality for recipes that need
to install GConf schemas. The schemas will be put into a separate
package (``${``\ :term:`PN`\ ``}-gconf``) that is created
automatically when this class is inherited. This package uses the
@@ -651,29 +890,80 @@ register and unregister the schemas in the target image.
.. _ref-classes-gettext:
-``gettext.bbclass``
-===================
+``gettext``
+===========
-The ``gettext`` class provides support for building software that uses
-the GNU ``gettext`` internationalization and localization system. All
-recipes building software that use ``gettext`` should inherit this
+The :ref:`ref-classes-gettext` class provides support for building
+software that uses the GNU ``gettext`` internationalization and localization
+system. All recipes building software that use ``gettext`` should inherit this
class.
+.. _ref-classes-github-releases:
+
+``github-releases``
+===================
+
+For recipes that fetch release tarballs from github, the :ref:`ref-classes-github-releases`
+class sets up a standard way for checking available upstream versions
+(to support ``devtool upgrade`` and the Automated Upgrade Helper (AUH)).
+
+To use it, add ":ref:`ref-classes-github-releases`" to the inherit line in the recipe,
+and if the default value of :term:`GITHUB_BASE_URI` is not suitable,
+then set your own value in the recipe. You should then use ``${GITHUB_BASE_URI}``
+in the value you set for :term:`SRC_URI` within the recipe.
+
.. _ref-classes-gnomebase:
-``gnomebase.bbclass``
-=====================
+``gnomebase``
+=============
-The ``gnomebase`` class is the base class for recipes that build
+The :ref:`ref-classes-gnomebase` class is the base class for recipes that build
software from the GNOME stack. This class sets
:term:`SRC_URI` to download the source from the GNOME
mirrors as well as extending :term:`FILES` with the typical
GNOME installation paths.
+.. _ref-classes-go:
+
+``go``
+======
+
+The :ref:`ref-classes-go` class supports building Go programs. The behavior of
+this class is controlled by the mandatory :term:`GO_IMPORT` variable, and
+by the optional :term:`GO_INSTALL` and :term:`GO_INSTALL_FILTEROUT` ones.
+
+To build a Go program with the Yocto Project, you can use the
+:yocto_git:`go-helloworld_0.1.bb </poky/tree/meta/recipes-extended/go-examples/go-helloworld_0.1.bb>`
+recipe as an example.
+
+.. _ref-classes-go-mod:
+
+``go-mod``
+==========
+
+The :ref:`ref-classes-go-mod` class allows to use Go modules, and inherits the
+:ref:`ref-classes-go` class.
+
+See the associated :term:`GO_WORKDIR` variable.
+
+.. _ref-classes-go-vendor:
+
+``go-vendor``
+=============
+
+The :ref:`ref-classes-go-vendor` class implements support for offline builds,
+also known as Go vendoring. In such a scenario, the module dependencias are
+downloaded during the :ref:`ref-tasks-fetch` task rather than when modules are
+imported, thus being coherent with Yocto's concept of fetching every source
+beforehand.
+
+The dependencies are unpacked into the modules' ``vendor`` directory, where a
+manifest file is generated.
+
.. _ref-classes-gobject-introspection:
-``gobject-introspection.bbclass``
-=================================
+``gobject-introspection``
+=========================
Provides support for recipes building software that supports GObject
introspection. This functionality is only enabled if the
@@ -684,16 +974,17 @@ introspection. This functionality is only enabled if the
.. note::
- This functionality is backfilled by default and, if not applicable,
- should be disabled through :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED` or
+ This functionality is :ref:`backfilled <ref-features-backfill>` by default
+ and, if not applicable, should be disabled through
+ :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED` or
:term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`, respectively.
.. _ref-classes-grub-efi:
-``grub-efi.bbclass``
-====================
+``grub-efi``
+============
-The ``grub-efi`` class provides ``grub-efi``-specific functions for
+The :ref:`ref-classes-grub-efi` class provides ``grub-efi``-specific functions for
building bootable images.
This class supports several variables:
@@ -722,10 +1013,10 @@ This class supports several variables:
.. _ref-classes-gsettings:
-``gsettings.bbclass``
-=====================
+``gsettings``
+=============
-The ``gsettings`` class provides common functionality for recipes that
+The :ref:`ref-classes-gsettings` class provides common functionality for recipes that
need to install GSettings (glib) schemas. The schemas are assumed to be
part of the main package. Appropriate post-install and post-remove
(postinst/postrm) scriptlets are added to register and unregister the
@@ -733,18 +1024,18 @@ schemas in the target image.
.. _ref-classes-gtk-doc:
-``gtk-doc.bbclass``
-===================
+``gtk-doc``
+===========
-The ``gtk-doc`` class is a helper class to pull in the appropriate
+The :ref:`ref-classes-gtk-doc` class is a helper class to pull in the appropriate
``gtk-doc`` dependencies and disable ``gtk-doc``.
.. _ref-classes-gtk-icon-cache:
-``gtk-icon-cache.bbclass``
-==========================
+``gtk-icon-cache``
+==================
-The ``gtk-icon-cache`` class generates the proper post-install and
+The :ref:`ref-classes-gtk-icon-cache` class generates the proper post-install and
post-remove (postinst/postrm) scriptlets for packages that use GTK+ and
install icons. These scriptlets call ``gtk-update-icon-cache`` to add
the fonts to GTK+'s icon cache. Since the cache files are
@@ -754,10 +1045,10 @@ creation.
.. _ref-classes-gtk-immodules-cache:
-``gtk-immodules-cache.bbclass``
-===============================
+``gtk-immodules-cache``
+=======================
-The ``gtk-immodules-cache`` class generates the proper post-install and
+The :ref:`ref-classes-gtk-immodules-cache` class generates the proper post-install and
post-remove (postinst/postrm) scriptlets for packages that install GTK+
input method modules for virtual keyboards. These scriptlets call
``gtk-update-icon-cache`` to add the input method modules to the cache.
@@ -772,19 +1063,19 @@ the packages containing the modules.
.. _ref-classes-gzipnative:
-``gzipnative.bbclass``
-======================
+``gzipnative``
+==============
-The ``gzipnative`` class enables the use of different native versions of
+The :ref:`ref-classes-gzipnative` class enables the use of different native versions of
``gzip`` and ``pigz`` rather than the versions of these tools from the
build host.
.. _ref-classes-icecc:
-``icecc.bbclass``
-=================
+``icecc``
+=========
-The ``icecc`` class supports
+The :ref:`ref-classes-icecc` class supports
`Icecream <https://github.com/icecc/icecream>`__, which facilitates
taking compile jobs and distributing them among remote machines.
@@ -792,7 +1083,7 @@ The class stages directories with symlinks from ``gcc`` and ``g++`` to
``icecc``, for both native and cross compilers. Depending on each
configure or compile, the OpenEmbedded build system adds the directories
at the head of the ``PATH`` list and then sets the ``ICECC_CXX`` and
-``ICEC_CC`` variables, which are the paths to the ``g++`` and ``gcc``
+``ICECC_CC`` variables, which are the paths to the ``g++`` and ``gcc``
compilers, respectively.
For the cross compiler, the class creates a ``tar.gz`` file that
@@ -800,8 +1091,8 @@ contains the Yocto Project toolchain and sets ``ICECC_VERSION``, which
is the version of the cross-compiler used in the cross-development
toolchain, accordingly.
-The class handles all three different compile stages (i.e native
-,cross-kernel and target) and creates the necessary environment
+The class handles all three different compile stages (i.e native,
+cross-kernel and target) and creates the necessary environment
``tar.gz`` file to be used by the remote machines. The class also
supports SDK generation.
@@ -811,12 +1102,13 @@ using ``which``. If :term:`ICECC_ENV_EXEC` is set
in your ``local.conf`` file, the variable should point to the
``icecc-create-env`` script provided by the user. If you do not point to
a user-provided script, the build system uses the default script
-provided by the recipe ``icecc-create-env-native.bb``.
+provided by the recipe :oe_git:`icecc-create-env_0.1.bb
+</openembedded-core/tree/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb>`.
.. note::
This script is a modified version and not the one that comes with
- icecc.
+ ``icecream``.
If you do not want the Icecream distributed compile support to apply to
specific recipes or classes, you can ask them to be ignored by Icecream
@@ -831,13 +1123,13 @@ Additionally, you can list recipes using the
your ``local.conf`` file to force ``icecc`` to be enabled for recipes
using an empty :term:`PARALLEL_MAKE` variable.
-Inheriting the ``icecc`` class changes all sstate signatures.
+Inheriting the :ref:`ref-classes-icecc` class changes all sstate signatures.
Consequently, if a development team has a dedicated build system that
populates :term:`SSTATE_MIRRORS` and they want to
reuse sstate from :term:`SSTATE_MIRRORS`, then all developers and the build
-system need to either inherit the ``icecc`` class or nobody should.
+system need to either inherit the :ref:`ref-classes-icecc` class or nobody should.
-At the distribution level, you can inherit the ``icecc`` class to be
+At the distribution level, you can inherit the :ref:`ref-classes-icecc` class to be
sure that all builders start with the same sstate signatures. After
inheriting the class, you can then disable the feature by setting the
:term:`ICECC_DISABLED` variable to "1" as follows::
@@ -854,10 +1146,10 @@ individually as follows in your ``local.conf`` file::
.. _ref-classes-image:
-``image.bbclass``
-=================
+``image``
+=========
-The ``image`` class helps support creating images in different formats.
+The :ref:`ref-classes-image` class helps support creating images in different formats.
First, the root filesystem is created from packages using one of the
``rootfs*.bbclass`` files (depending on the package format used) and
then one or more image files are created.
@@ -869,7 +1161,7 @@ then one or more image files are created.
install into the image.
For information on customizing images, see the
-":ref:`dev-manual/common-tasks:customizing images`" section
+":ref:`dev-manual/customizing-images:customizing images`" section
in the Yocto Project Development Tasks Manual. For information on how
images are created, see the
":ref:`overview-manual/concepts:images`" section in the
@@ -877,37 +1169,55 @@ Yocto Project Overview and Concepts Manual.
.. _ref-classes-image-buildinfo:
-``image-buildinfo.bbclass``
-===========================
+``image-buildinfo``
+===================
+
+The :ref:`ref-classes-image-buildinfo` class writes a plain text file containing
+build information to the target filesystem at ``${sysconfdir}/buildinfo``
+by default (as specified by :term:`IMAGE_BUILDINFO_FILE`).
+This can be useful for manually determining the origin of any given
+image. It writes out two sections:
-The ``image-buildinfo`` class writes information to the target
-filesystem on ``/etc/build``.
+#. `Build Configuration`: a list of variables and their values (specified
+ by :term:`IMAGE_BUILDINFO_VARS`, which defaults to :term:`DISTRO` and
+ :term:`DISTRO_VERSION`)
+
+#. `Layer Revisions`: the revisions of all of the layers used in the
+ build.
+
+Additionally, when building an SDK it will write the same contents
+to ``/buildinfo`` by default (as specified by
+:term:`SDK_BUILDINFO_FILE`).
.. _ref-classes-image_types:
-``image_types.bbclass``
-=======================
+``image_types``
+===============
-The ``image_types`` class defines all of the standard image output types
+The :ref:`ref-classes-image_types` class defines all of the standard image output types
that you can enable through the
:term:`IMAGE_FSTYPES` variable. You can use this
class as a reference on how to add support for custom image output
types.
-By default, the :ref:`image <ref-classes-image>` class automatically
-enables the ``image_types`` class. The ``image`` class uses the
+By default, the :ref:`ref-classes-image` class automatically
+enables the :ref:`ref-classes-image_types` class. The :ref:`ref-classes-image` class uses the
``IMGCLASSES`` variable as follows::
IMGCLASSES = "rootfs_${IMAGE_PKGTYPE} image_types ${IMAGE_CLASSES}"
- IMGCLASSES += "${@['populate_sdk_base', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
+ # Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk_base
+ # in the non-Linux SDK_OS case, such as mingw32
+ inherit populate_sdk_base
+ IMGCLASSES += "${@['', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
IMGCLASSES += "${@bb.utils.contains_any('IMAGE_FSTYPES', 'live iso hddimg', 'image-live', '', d)}"
IMGCLASSES += "${@bb.utils.contains('IMAGE_FSTYPES', 'container', 'image-container', '', d)}"
IMGCLASSES += "image_types_wic"
IMGCLASSES += "rootfs-postcommands"
IMGCLASSES += "image-postinst-intercepts"
- inherit ${IMGCLASSES}
+ IMGCLASSES += "overlayfs-etc"
+ inherit_defer ${IMGCLASSES}
-The ``image_types`` class also handles conversion and compression of images.
+The :ref:`ref-classes-image_types` class also handles conversion and compression of images.
.. note::
@@ -917,8 +1227,8 @@ The ``image_types`` class also handles conversion and compression of images.
.. _ref-classes-image-live:
-``image-live.bbclass``
-======================
+``image-live``
+==============
This class controls building "live" (i.e. HDDIMG and ISO) images. Live
images contain syslinux for legacy booting, as well as the bootloader
@@ -930,10 +1240,10 @@ Normally, you do not use this class directly. Instead, you add "live" to
.. _ref-classes-insane:
-``insane.bbclass``
-==================
+``insane``
+==========
-The ``insane`` class adds a step to the package generation process so
+The :ref:`ref-classes-insane` class adds a step to the package generation process so
that output quality assurance checks are generated by the OpenEmbedded
build system. A range of checks are performed that check the build's
output for common problems that show up during runtime. Distribution
@@ -961,8 +1271,8 @@ Please keep in mind that the QA checks
are meant to detect real or potential problems in the packaged
output. So exercise caution when disabling these checks.
-Here are the tests you can list with the :term:`WARN_QA` and
-:term:`ERROR_QA` variables:
+The tests you can list with the :term:`WARN_QA` and
+:term:`ERROR_QA` variables are:
- ``already-stripped:`` Checks that produced binaries have not
already been stripped prior to the build system extracting debug
@@ -979,8 +1289,8 @@ Here are the tests you can list with the :term:`WARN_QA` and
software, like bootloaders, might need to bypass this check.
- ``buildpaths:`` Checks for paths to locations on the build host
- inside the output files. Currently, this test triggers too many false
- positives and thus is not normally enabled.
+ inside the output files. Not only can these leak information about
+ the build environment, they also hinder binary reproducibility.
- ``build-deps:`` Determines if a build-time dependency that is
specified through :term:`DEPENDS`, explicit
@@ -993,7 +1303,7 @@ Here are the tests you can list with the :term:`WARN_QA` and
the package is installed into the image during the
:ref:`ref-tasks-rootfs` task because the auto-detected
dependency was not satisfied. An example of this would be where the
- :ref:`update-rc.d <ref-classes-update-rc.d>` class automatically
+ :ref:`ref-classes-update-rc.d` class automatically
adds a dependency on the ``initscripts-functions`` package to
packages that install an initscript that refers to
``/etc/init.d/functions``. The recipe should really have an explicit
@@ -1002,11 +1312,21 @@ Here are the tests you can list with the :term:`WARN_QA` and
``initscripts`` recipe is actually built and thus the
``initscripts-functions`` package is made available.
+- ``configure-gettext:`` Checks that if a recipe is building something
+ that uses automake and the automake files contain an ``AM_GNU_GETTEXT``
+ directive, that the recipe also inherits the :ref:`ref-classes-gettext`
+ class to ensure that gettext is available during the build.
+
- ``compile-host-path:`` Checks the
:ref:`ref-tasks-compile` log for indications that
paths to locations on the build host were used. Using such paths
might result in host contamination of the build output.
+- ``cve_status_not_in_db:`` Checks for each component if CVEs that are ignored
+ via :term:`CVE_STATUS`, that those are (still) reported for this component
+ in the NIST database. If not, a warning is printed. This check is disabled
+ by default.
+
- ``debug-deps:`` Checks that all packages except ``-dbg`` packages
do not depend on ``-dbg`` packages, which would cause a packaging
bug.
@@ -1080,12 +1400,12 @@ Here are the tests you can list with the :term:`WARN_QA` and
might result in host contamination of the build output.
- ``installed-vs-shipped:`` Reports when files have been installed
- within ``do_install`` but have not been included in any package by
+ within :ref:`ref-tasks-install` but have not been included in any package by
way of the :term:`FILES` variable. Files that do not
appear in any package cannot be present in an image later on in the
build process. Ideally, all installed files should be packaged or not
installed at all. These files can be deleted at the end of
- ``do_install`` if the files are not needed in any package.
+ :ref:`ref-tasks-install` if the files are not needed in any package.
- ``invalid-chars:`` Checks that the recipe metadata variables
:term:`DESCRIPTION`,
@@ -1118,11 +1438,39 @@ Here are the tests you can list with the :term:`WARN_QA` and
``/usr/libexec``. This check is not performed if the ``libexecdir``
variable has been set explicitly to ``/usr/libexec``.
+- ``mime:`` Check that if a package contains mime type files (``.xml``
+ files in ``${datadir}/mime/packages``) that the recipe also inherits
+ the :ref:`ref-classes-mime` class in order to ensure that these get
+ properly installed.
+
+- ``mime-xdg:`` Checks that if a package contains a .desktop file with a
+ 'MimeType' key present, that the recipe inherits the
+ :ref:`ref-classes-mime-xdg` class that is required in order for that
+ to be activated.
+
+- ``missing-update-alternatives:`` Check that if a recipe sets the
+ :term:`ALTERNATIVE` variable that the recipe also inherits
+ :ref:`ref-classes-update-alternatives` such that the alternative will
+ be correctly set up.
+
- ``packages-list:`` Checks for the same package being listed
multiple times through the :term:`PACKAGES` variable
value. Installing the package in this manner can cause errors during
packaging.
+- ``patch-fuzz:`` Checks for fuzz in patch files that may allow
+ them to apply incorrectly if the underlying code changes.
+
+- ``patch-status-core:`` Checks that the Upstream-Status is specified
+ and valid in the headers of patches for recipes in the OE-Core layer.
+
+- ``patch-status-noncore:`` Checks that the Upstream-Status is specified
+ and valid in the headers of patches for recipes in layers other than
+ OE-Core.
+
+- ``perllocalpod:`` Checks for ``perllocal.pod`` being erroneously
+ installed and packaged by a recipe.
+
- ``perm-config:`` Reports lines in ``fs-perms.txt`` that have an
invalid format.
@@ -1176,12 +1524,20 @@ Here are the tests you can list with the :term:`WARN_QA` and
options are being passed to the linker commands and your binaries
have potential security issues.
+- ``shebang-size:`` Check that the shebang line (``#!`` in the first line)
+ in a packaged script is not longer than 128 characters, which can cause
+ an error at runtime depending on the operating system.
+
- ``split-strip:`` Reports that splitting or stripping debug symbols
from binaries has failed.
- ``staticdev:`` Checks for static library files (``*.a``) in
non-``staticdev`` packages.
+- ``src-uri-bad:`` Checks that the :term:`SRC_URI` value set by a recipe
+ does not contain a reference to ``${PN}`` (instead of the correct
+ ``${BPN}``) nor refers to unstable Github archive tarballs.
+
- ``symlink-to-sysroot:`` Checks for symlinks in packages that point
into :term:`TMPDIR` on the host. Such symlinks will
work on the host, but are clearly invalid when running on the target.
@@ -1192,6 +1548,15 @@ Here are the tests you can list with the :term:`WARN_QA` and
":doc:`/ref-manual/qa-checks`" for more information regarding runtime performance
issues.
+- ``unhandled-features-check:`` check that if one of the variables that
+ the :ref:`ref-classes-features_check` class supports (e.g.
+ :term:`REQUIRED_DISTRO_FEATURES`) is set by a recipe, then the recipe
+ also inherits :ref:`ref-classes-features_check` in order for the
+ requirement to actually work.
+
+- ``unimplemented-ptest:`` Checks that ptests are implemented for upstream
+ tests.
+
- ``unlisted-pkg-lics:`` Checks that all declared licenses applying
for a package are also declared on the recipe level (i.e. any license
in ``LICENSE:*`` should appear in :term:`LICENSE`).
@@ -1201,25 +1566,35 @@ Here are the tests you can list with the :term:`WARN_QA` and
the linker (e.g. ``/lib`` and ``/usr/lib``). While these paths will
not cause any breakage, they do waste space and are unnecessary.
+- ``usrmerge:`` If ``usrmerge`` is in :term:`DISTRO_FEATURES`, this
+ check will ensure that no package installs files to root (``/bin``,
+ ``/sbin``, ``/lib``, ``/lib64``) directories.
+
- ``var-undefined:`` Reports when variables fundamental to packaging
(i.e. :term:`WORKDIR`,
:term:`DEPLOY_DIR`, :term:`D`,
:term:`PN`, and :term:`PKGD`) are undefined
during :ref:`ref-tasks-package`.
-- ``version-going-backwards:`` If Build History is enabled, reports
- when a package being written out has a lower version than the
- previously written package under the same name. If you are placing
- output packages into a feed and upgrading packages on a target system
- using that feed, the version of a package going backwards can result
- in the target system not correctly upgrading to the "new" version of
- the package.
+- ``version-going-backwards:`` If the :ref:`ref-classes-buildhistory`
+ class is enabled, reports when a package being written out has a lower
+ version than the previously written package under the same name. If
+ you are placing output packages into a feed and upgrading packages on
+ a target system using that feed, the version of a package going
+ backwards can result in the target system not correctly upgrading to
+ the "new" version of the package.
.. note::
This is only relevant when you are using runtime package management
on your target system.
+- ``virtual-slash:`` Checks to see if ``virtual/`` is being used in
+ :term:`RDEPENDS` or :term:`RPROVIDES`, which is not good practice ---
+ ``virtual/`` is a convention intended for use in the build context
+ (i.e. :term:`PROVIDES` and :term:`DEPENDS`) rather than the runtime
+ context.
+
- ``xorg-driver-abi:`` Checks that all packages containing Xorg
drivers have ABI dependencies. The ``xserver-xorg`` recipe provides
driver ABI names. All drivers should depend on the ABI versions that
@@ -1228,184 +1603,188 @@ Here are the tests you can list with the :term:`WARN_QA` and
automatically get these versions. Consequently, you should only need
to explicitly add dependencies to binary driver recipes.
-.. _ref-classes-insserv:
-
-``insserv.bbclass``
-===================
-
-The ``insserv`` class uses the ``insserv`` utility to update the order
-of symbolic links in ``/etc/rc?.d/`` within an image based on
-dependencies specified by LSB headers in the ``init.d`` scripts
-themselves.
-
.. _ref-classes-kernel:
-``kernel.bbclass``
-==================
+``kernel``
+==========
-The ``kernel`` class handles building Linux kernels. The class contains
+The :ref:`ref-classes-kernel` class handles building Linux kernels. The class contains
code to build all kernel trees. All needed headers are staged into the
:term:`STAGING_KERNEL_DIR` directory to allow out-of-tree module builds
-using the :ref:`module <ref-classes-module>` class.
-
-This means that each built kernel module is packaged separately and
-inter-module dependencies are created by parsing the ``modinfo`` output.
-If all modules are required, then installing the ``kernel-modules``
-package installs all packages with modules and various other kernel
-packages such as ``kernel-vmlinux``.
-
-The ``kernel`` class contains logic that allows you to embed an initial
-RAM filesystem (initramfs) image when you build the kernel image. For
-information on how to build an initramfs, see the
-":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`" section in
+using the :ref:`ref-classes-module` class.
+
+If a file named ``defconfig`` is listed in :term:`SRC_URI`, then by default
+:ref:`ref-tasks-configure` copies it as ``.config`` in the build directory,
+so it is automatically used as the kernel configuration for the build. This
+copy is not performed in case ``.config`` already exists there: this allows
+recipes to produce a configuration by other means in
+``do_configure:prepend``.
+
+Each built kernel module is packaged separately and inter-module
+dependencies are created by parsing the ``modinfo`` output. If all modules
+are required, then installing the ``kernel-modules`` package installs all
+packages with modules and various other kernel packages such as
+``kernel-vmlinux``.
+
+The :ref:`ref-classes-kernel` class contains logic that allows you to embed an initial
+RAM filesystem (:term:`Initramfs`) image when you build the kernel image. For
+information on how to build an :term:`Initramfs`, see the
+":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`" section in
the Yocto Project Development Tasks Manual.
-Various other classes are used by the ``kernel`` and ``module`` classes
-internally including the :ref:`kernel-arch <ref-classes-kernel-arch>`,
-:ref:`module-base <ref-classes-module-base>`, and
-:ref:`linux-kernel-base <ref-classes-linux-kernel-base>` classes.
+Various other classes are used by the :ref:`ref-classes-kernel` and :ref:`ref-classes-module` classes
+internally including the :ref:`ref-classes-kernel-arch`, :ref:`ref-classes-module-base`, and
+:ref:`ref-classes-linux-kernel-base` classes.
.. _ref-classes-kernel-arch:
-``kernel-arch.bbclass``
-=======================
+``kernel-arch``
+===============
-The ``kernel-arch`` class sets the ``ARCH`` environment variable for
+The :ref:`ref-classes-kernel-arch` class sets the ``ARCH`` environment variable for
Linux kernel compilation (including modules).
.. _ref-classes-kernel-devicetree:
-``kernel-devicetree.bbclass``
-=============================
+``kernel-devicetree``
+=====================
+
+The :ref:`ref-classes-kernel-devicetree` class, which is inherited by the
+:ref:`ref-classes-kernel` class, supports device tree generation.
+
+Its behavior is mainly controlled by the following variables:
-The ``kernel-devicetree`` class, which is inherited by the
-:ref:`kernel <ref-classes-kernel>` class, supports device tree
-generation.
+- :term:`KERNEL_DEVICETREE_BUNDLE`: whether to bundle the kernel and device tree
+- :term:`KERNEL_DTBDEST`: directory where to install DTB files
+- :term:`KERNEL_DTBVENDORED`: whether to keep vendor subdirectories
+- :term:`KERNEL_DTC_FLAGS`: flags for ``dtc``, the Device Tree Compiler
+- :term:`KERNEL_PACKAGE_NAME`: base name of the kernel packages
.. _ref-classes-kernel-fitimage:
-``kernel-fitimage.bbclass``
-===========================
+``kernel-fitimage``
+===================
-The ``kernel-fitimage`` class provides support to pack a kernel image,
-device trees, a U-boot script, a Initramfs bundle and a RAM disk
+The :ref:`ref-classes-kernel-fitimage` class provides support to pack a kernel image,
+device trees, a U-boot script, an :term:`Initramfs` bundle and a RAM disk
into a single FIT image. In theory, a FIT image can support any number
-of kernels, U-boot scripts, Initramfs bundles, RAM disks and device-trees.
-However, ``kernel-fitimage`` currently only supports
-limited usescases: just one kernel image, an optional U-boot script,
-an optional Initramfs bundle, an optional RAM disk, and any number of
-device tree.
+of kernels, U-boot scripts, :term:`Initramfs` bundles, RAM disks and device-trees.
+However, :ref:`ref-classes-kernel-fitimage` currently only supports
+limited usecases: just one kernel image, an optional U-boot script,
+an optional :term:`Initramfs` bundle, an optional RAM disk, and any number of
+device trees.
To create a FIT image, it is required that :term:`KERNEL_CLASSES`
-is set to include "kernel-fitimage" and :term:`KERNEL_IMAGETYPE`
-is set to "fitImage".
+is set to include ":ref:`ref-classes-kernel-fitimage`" and one of :term:`KERNEL_IMAGETYPE`,
+:term:`KERNEL_ALT_IMAGETYPE` or :term:`KERNEL_IMAGETYPES` to include "fitImage".
The options for the device tree compiler passed to ``mkimage -D``
when creating the FIT image are specified using the
:term:`UBOOT_MKIMAGE_DTCOPTS` variable.
Only a single kernel can be added to the FIT image created by
-``kernel-fitimage`` and the kernel image in FIT is mandatory. The
+:ref:`ref-classes-kernel-fitimage` and the kernel image in FIT is mandatory. The
address where the kernel image is to be loaded by U-Boot is
specified by :term:`UBOOT_LOADADDRESS` and the entrypoint by
-:term:`UBOOT_ENTRYPOINT`.
+:term:`UBOOT_ENTRYPOINT`. Setting :term:`FIT_ADDRESS_CELLS` to "2"
+is necessary if such addresses are 64 bit ones.
Multiple device trees can be added to the FIT image created by
-``kernel-fitimage`` and the device tree is optional.
+:ref:`ref-classes-kernel-fitimage` and the device tree is optional.
The address where the device tree is to be loaded by U-Boot is
specified by :term:`UBOOT_DTBO_LOADADDRESS` for device tree overlays
and by :term:`UBOOT_DTB_LOADADDRESS` for device tree binaries.
Only a single RAM disk can be added to the FIT image created by
-``kernel-fitimage`` and the RAM disk in FIT is optional.
+:ref:`ref-classes-kernel-fitimage` and the RAM disk in FIT is optional.
The address where the RAM disk image is to be loaded by U-Boot
is specified by :term:`UBOOT_RD_LOADADDRESS` and the entrypoint by
-:term:`UBOOT_RD_ENTRYPOINT`. The ramdisk is added to FIT image when
-:term:`INITRAMFS_IMAGE` is specified and that :term:`INITRAMFS_IMAGE_BUNDLE`
-is set to 0.
+:term:`UBOOT_RD_ENTRYPOINT`. The ramdisk is added to the FIT image when
+:term:`INITRAMFS_IMAGE` is specified and requires that :term:`INITRAMFS_IMAGE_BUNDLE`
+is not set to 1.
-Only a single Initramfs bundle can be added to the FIT image created by
-``kernel-fitimage`` and the Initramfs bundle in FIT is optional.
-In case of Initramfs, the kernel is configured to be bundled with the root filesystem
+Only a single :term:`Initramfs` bundle can be added to the FIT image created by
+:ref:`ref-classes-kernel-fitimage` and the :term:`Initramfs` bundle in FIT is optional.
+In case of :term:`Initramfs`, the kernel is configured to be bundled with the root filesystem
in the same binary (example: zImage-initramfs-:term:`MACHINE`.bin).
-When the kernel is copied to RAM and executed, it unpacks the Initramfs root filesystem.
-The Initramfs bundle can be enabled when :term:`INITRAMFS_IMAGE`
-is specified and that :term:`INITRAMFS_IMAGE_BUNDLE` is set to 1.
-The address where the Initramfs bundle is to be loaded by U-boot is specified
+When the kernel is copied to RAM and executed, it unpacks the :term:`Initramfs` root filesystem.
+The :term:`Initramfs` bundle can be enabled when :term:`INITRAMFS_IMAGE`
+is specified and requires that :term:`INITRAMFS_IMAGE_BUNDLE` is set to 1.
+The address where the :term:`Initramfs` bundle is to be loaded by U-boot is specified
by :term:`UBOOT_LOADADDRESS` and the entrypoint by :term:`UBOOT_ENTRYPOINT`.
Only a single U-boot boot script can be added to the FIT image created by
-``kernel-fitimage`` and the boot script is optional.
+:ref:`ref-classes-kernel-fitimage` and the boot script is optional.
The boot script is specified in the ITS file as a text file containing
U-boot commands. When using a boot script the user should configure the
-U-boot ``do_install`` task to copy the script to sysroot.
-So the script can be included in the FIT image by the ``kernel-fitimage``
+U-boot :ref:`ref-tasks-install` task to copy the script to sysroot.
+So the script can be included in the FIT image by the :ref:`ref-classes-kernel-fitimage`
class. At run-time, U-boot CONFIG_BOOTCOMMAND define can be configured to
-load the boot script from the FIT image and executes it.
+load the boot script from the FIT image and execute it.
-The FIT image generated by ``kernel-fitimage`` class is signed when the
+The FIT image generated by the :ref:`ref-classes-kernel-fitimage` class is signed when the
variables :term:`UBOOT_SIGN_ENABLE`, :term:`UBOOT_MKIMAGE_DTCOPTS`,
:term:`UBOOT_SIGN_KEYDIR` and :term:`UBOOT_SIGN_KEYNAME` are set
appropriately. The default values used for :term:`FIT_HASH_ALG` and
-:term:`FIT_SIGN_ALG` in ``kernel-fitimage`` are "sha256" and
-"rsa2048" respectively. The keys for signing fitImage can be generated using
-the ``kernel-fitimage`` class when both :term:`FIT_GENERATE_KEYS` and
+:term:`FIT_SIGN_ALG` in :ref:`ref-classes-kernel-fitimage` are "sha256" and
+"rsa2048" respectively. The keys for signing the FIT image can be generated using
+the :ref:`ref-classes-kernel-fitimage` class when both :term:`FIT_GENERATE_KEYS` and
:term:`UBOOT_SIGN_ENABLE` are set to "1".
.. _ref-classes-kernel-grub:
-``kernel-grub.bbclass``
-=======================
+``kernel-grub``
+===============
-The ``kernel-grub`` class updates the boot area and the boot menu with
+The :ref:`ref-classes-kernel-grub` class updates the boot area and the boot menu with
the kernel as the priority boot mechanism while installing a RPM to
update the kernel on a deployed target.
.. _ref-classes-kernel-module-split:
-``kernel-module-split.bbclass``
-===============================
+``kernel-module-split``
+=======================
-The ``kernel-module-split`` class provides common functionality for
+The :ref:`ref-classes-kernel-module-split` class provides common functionality for
splitting Linux kernel modules into separate packages.
.. _ref-classes-kernel-uboot:
-``kernel-uboot.bbclass``
-========================
+``kernel-uboot``
+================
-The ``kernel-uboot`` class provides support for building from
+The :ref:`ref-classes-kernel-uboot` class provides support for building from
vmlinux-style kernel sources.
.. _ref-classes-kernel-uimage:
-``kernel-uimage.bbclass``
-=========================
+``kernel-uimage``
+=================
-The ``kernel-uimage`` class provides support to pack uImage.
+The :ref:`ref-classes-kernel-uimage` class provides support to pack uImage.
.. _ref-classes-kernel-yocto:
-``kernel-yocto.bbclass``
-========================
+``kernel-yocto``
+================
-The ``kernel-yocto`` class provides common functionality for building
+The :ref:`ref-classes-kernel-yocto` class provides common functionality for building
from linux-yocto style kernel source repositories.
.. _ref-classes-kernelsrc:
-``kernelsrc.bbclass``
-=====================
+``kernelsrc``
+=============
-The ``kernelsrc`` class sets the Linux kernel source and version.
+The :ref:`ref-classes-kernelsrc` class sets the Linux kernel source and version.
.. _ref-classes-lib_package:
-``lib_package.bbclass``
-=======================
+``lib_package``
+===============
-The ``lib_package`` class supports recipes that build libraries and
+The :ref:`ref-classes-lib_package` class supports recipes that build libraries and
produce executable binaries, where those binaries should not be
installed by default along with the library. Instead, the binaries are
added to a separate ``${``\ :term:`PN`\ ``}-bin`` package to
@@ -1413,40 +1792,40 @@ make their installation optional.
.. _ref-classes-libc*:
-``libc*.bbclass``
-=================
+``libc*``
+=========
-The ``libc*`` classes support recipes that build packages with ``libc``:
+The :ref:`ref-classes-libc*` classes support recipes that build packages with ``libc``:
-- The ``libc-common`` class provides common support for building with
+- The :ref:`libc-common <ref-classes-libc*>` class provides common support for building with
``libc``.
-- The ``libc-package`` class supports packaging up ``glibc`` and
+- The :ref:`libc-package <ref-classes-libc*>` class supports packaging up ``glibc`` and
``eglibc``.
.. _ref-classes-license:
-``license.bbclass``
-===================
+``license``
+===========
-The ``license`` class provides license manifest creation and license
+The :ref:`ref-classes-license` class provides license manifest creation and license
exclusion. This class is enabled by default using the default value for
the :term:`INHERIT_DISTRO` variable.
.. _ref-classes-linux-kernel-base:
-``linux-kernel-base.bbclass``
-=============================
+``linux-kernel-base``
+=====================
-The ``linux-kernel-base`` class provides common functionality for
+The :ref:`ref-classes-linux-kernel-base` class provides common functionality for
recipes that build out of the Linux kernel source tree. These builds
goes beyond the kernel itself. For example, the Perf recipe also
inherits this class.
.. _ref-classes-linuxloader:
-``linuxloader.bbclass``
-=======================
+``linuxloader``
+===============
Provides the function ``linuxloader()``, which gives the value of the
dynamic loader/linker provided on the platform. This value is used by a
@@ -1454,71 +1833,101 @@ number of other classes.
.. _ref-classes-logging:
-``logging.bbclass``
-===================
+``logging``
+===========
-The ``logging`` class provides the standard shell functions used to log
+The :ref:`ref-classes-logging` class provides the standard shell functions used to log
messages for various BitBake severity levels (i.e. ``bbplain``,
``bbnote``, ``bbwarn``, ``bberror``, ``bbfatal``, and ``bbdebug``).
-This class is enabled by default since it is inherited by the ``base``
+This class is enabled by default since it is inherited by the :ref:`ref-classes-base`
class.
+.. _ref-classes-meson:
+
+``meson``
+=========
+
+The :ref:`ref-classes-meson` class allows to create recipes that build software
+using the `Meson <https://mesonbuild.com/>`__ build system. You can use the
+:term:`MESON_BUILDTYPE`, :term:`MESON_TARGET` and :term:`EXTRA_OEMESON`
+variables to specify additional configuration options to be passed using the
+``meson`` command line.
+
.. _ref-classes-metadata_scm:
-``metadata_scm.bbclass``
-========================
+``metadata_scm``
+================
-The ``metadata_scm`` class provides functionality for querying the
+The :ref:`ref-classes-metadata_scm` class provides functionality for querying the
branch and revision of a Source Code Manager (SCM) repository.
-The :ref:`base <ref-classes-base>` class uses this class to print the
-revisions of each layer before starting every build. The
-``metadata_scm`` class is enabled by default because it is inherited by
-the ``base`` class.
+The :ref:`ref-classes-base` class uses this class to print the revisions of
+each layer before starting every build. The :ref:`ref-classes-metadata_scm`
+class is enabled by default because it is inherited by the
+:ref:`ref-classes-base` class.
.. _ref-classes-migrate_localcount:
-``migrate_localcount.bbclass``
-==============================
+``migrate_localcount``
+======================
-The ``migrate_localcount`` class verifies a recipe's localcount data and
+The :ref:`ref-classes-migrate_localcount` class verifies a recipe's localcount data and
increments it appropriately.
.. _ref-classes-mime:
-``mime.bbclass``
-================
+``mime``
+========
-The ``mime`` class generates the proper post-install and post-remove
+The :ref:`ref-classes-mime` class generates the proper post-install and post-remove
(postinst/postrm) scriptlets for packages that install MIME type files.
These scriptlets call ``update-mime-database`` to add the MIME types to
the shared database.
+.. _ref-classes-mime-xdg:
+
+``mime-xdg``
+============
+
+The :ref:`ref-classes-mime-xdg` class generates the proper
+post-install and post-remove (postinst/postrm) scriptlets for packages
+that install ``.desktop`` files containing ``MimeType`` entries.
+These scriptlets call ``update-desktop-database`` to add the MIME types
+to the database of MIME types handled by desktop files.
+
+Thanks to this class, when users open a file through a file browser
+on recently created images, they don't have to choose the application
+to open the file from the pool of all known applications, even the ones
+that cannot open the selected file.
+
+If you have recipes installing their ``.desktop`` files as absolute
+symbolic links, the detection of such files cannot be done by the current
+implementation of this class. In this case, you have to add the corresponding
+package names to the :term:`MIME_XDG_PACKAGES` variable.
+
.. _ref-classes-mirrors:
-``mirrors.bbclass``
-===================
+``mirrors``
+===========
-The ``mirrors`` class sets up some standard
+The :ref:`ref-classes-mirrors` class sets up some standard
:term:`MIRRORS` entries for source code mirrors. These
mirrors provide a fall-back path in case the upstream source specified
in :term:`SRC_URI` within recipes is unavailable.
This class is enabled by default since it is inherited by the
-:ref:`base <ref-classes-base>` class.
+:ref:`ref-classes-base` class.
.. _ref-classes-module:
-``module.bbclass``
-==================
+``module``
+==========
-The ``module`` class provides support for building out-of-tree Linux
-kernel modules. The class inherits the
-:ref:`module-base <ref-classes-module-base>` and
-:ref:`kernel-module-split <ref-classes-kernel-module-split>` classes,
-and implements the :ref:`ref-tasks-compile` and
-:ref:`ref-tasks-install` tasks. The class provides
+The :ref:`ref-classes-module` class provides support for building out-of-tree Linux
+kernel modules. The class inherits the :ref:`ref-classes-module-base` and
+:ref:`ref-classes-kernel-module-split` classes, and implements the
+:ref:`ref-tasks-compile` and :ref:`ref-tasks-install` tasks. The class provides
everything needed to build and package a kernel module.
For general information on out-of-tree Linux kernel modules, see the
@@ -1527,44 +1936,44 @@ section in the Yocto Project Linux Kernel Development Manual.
.. _ref-classes-module-base:
-``module-base.bbclass``
-=======================
+``module-base``
+===============
-The ``module-base`` class provides the base functionality for building
-Linux kernel modules. Typically, a recipe that builds software that
-includes one or more kernel modules and has its own means of building
-the module inherits this class as opposed to inheriting the
-:ref:`module <ref-classes-module>` class.
+The :ref:`ref-classes-module-base` class provides the base functionality for
+building Linux kernel modules. Typically, a recipe that builds software that
+includes one or more kernel modules and has its own means of building the module
+inherits this class as opposed to inheriting the :ref:`ref-classes-module`
+class.
.. _ref-classes-multilib*:
-``multilib*.bbclass``
-=====================
+``multilib*``
+=============
-The ``multilib*`` classes provide support for building libraries with
+The :ref:`ref-classes-multilib*` classes provide support for building libraries with
different target optimizations or target architectures and installing
them side-by-side in the same image.
For more information on using the Multilib feature, see the
-":ref:`dev-manual/common-tasks:combining multiple versions of library files into one image`"
+":ref:`dev-manual/libraries:combining multiple versions of library files into one image`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-native:
-``native.bbclass``
-==================
+``native``
+==========
-The ``native`` class provides common functionality for recipes that
+The :ref:`ref-classes-native` class provides common functionality for recipes that
build tools to run on the :term:`Build Host` (i.e. tools that use the compiler
or other tools from the build host).
You can create a recipe that builds tools that run natively on the host
a couple different ways:
-- Create a ``myrecipe-native.bb`` recipe that inherits the ``native``
+- Create a ``myrecipe-native.bb`` recipe that inherits the :ref:`ref-classes-native`
class. If you use this method, you must order the inherit statement
in the recipe after all other inherit statements so that the
- ``native`` class is inherited last.
+ :ref:`ref-classes-native` class is inherited last.
.. note::
@@ -1586,17 +1995,17 @@ a couple different ways:
specify any functionality specific to the respective native or target
case.
-Although applied differently, the ``native`` class is used with both
+Although applied differently, the :ref:`ref-classes-native` class is used with both
methods. The advantage of the second method is that you do not need to
have two separate recipes (assuming you need both) for native and
target. All common parts of the recipe are automatically shared.
.. _ref-classes-nativesdk:
-``nativesdk.bbclass``
-=====================
+``nativesdk``
+=============
-The ``nativesdk`` class provides common functionality for recipes that
+The :ref:`ref-classes-nativesdk` class provides common functionality for recipes that
wish to build tools to run as part of an SDK (i.e. tools that run on
:term:`SDKMACHINE`).
@@ -1604,11 +2013,11 @@ You can create a recipe that builds tools that run on the SDK machine a
couple different ways:
- Create a ``nativesdk-myrecipe.bb`` recipe that inherits the
- ``nativesdk`` class. If you use this method, you must order the
+ :ref:`ref-classes-nativesdk` class. If you use this method, you must order the
inherit statement in the recipe after all other inherit statements so
- that the ``nativesdk`` class is inherited last.
+ that the :ref:`ref-classes-nativesdk` class is inherited last.
-- Create a ``nativesdk`` variant of any recipe by adding the following::
+- Create a :ref:`ref-classes-nativesdk` variant of any recipe by adding the following::
BBCLASSEXTEND = "nativesdk"
@@ -1627,26 +2036,26 @@ couple different ways:
Not doing so can lead to subtle problems because there is code that
depends on the naming convention.
-Although applied differently, the ``nativesdk`` class is used with both
+Although applied differently, the :ref:`ref-classes-nativesdk` class is used with both
methods. The advantage of the second method is that you do not need to
have two separate recipes (assuming you need both) for the SDK machine
and the target. All common parts of the recipe are automatically shared.
.. _ref-classes-nopackages:
-``nopackages.bbclass``
-======================
+``nopackages``
+==============
Disables packaging tasks for those recipes and classes where packaging
is not needed.
.. _ref-classes-npm:
-``npm.bbclass``
-===============
+``npm``
+=======
-Provides support for building Node.js software fetched using the `node
-package manager (NPM) <https://en.wikipedia.org/wiki/Npm_(software)>`__.
+Provides support for building Node.js software fetched using the
+:wikipedia:`node package manager (NPM) <Npm_(software)>`.
.. note::
@@ -1654,33 +2063,33 @@ package manager (NPM) <https://en.wikipedia.org/wiki/Npm_(software)>`__.
fetcher to have dependencies fetched and packaged automatically.
For information on how to create NPM packages, see the
-":ref:`dev-manual/common-tasks:creating node package manager (npm) packages`"
+":ref:`dev-manual/packages:creating node package manager (npm) packages`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-oelint:
-``oelint.bbclass``
-==================
+``oelint``
+==========
-The ``oelint`` class is an obsolete lint checking tool available in
+The :ref:`ref-classes-oelint` class is an obsolete lint checking tool available in
``meta/classes`` in the :term:`Source Directory`.
There are some classes that could be generally useful in OE-Core but
-are never actually used within OE-Core itself. The ``oelint`` class is
+are never actually used within OE-Core itself. The :ref:`ref-classes-oelint` class is
one such example. However, being aware of this class can reduce the
proliferation of different versions of similar classes across multiple
layers.
.. _ref-classes-overlayfs:
-``overlayfs.bbclass``
-=======================
+``overlayfs``
+=============
It's often desired in Embedded System design to have a read-only root filesystem.
But a lot of different applications might want to have read-write access to
some parts of a filesystem. It can be especially useful when your update mechanism
overwrites the whole root filesystem, but you may want your application data to be preserved
-between updates. The :ref:`overlayfs <ref-classes-overlayfs>` class provides a way
+between updates. The :ref:`ref-classes-overlayfs` class provides a way
to achieve that by means of ``overlayfs`` and at the same time keeping the base
root filesystem read-only.
@@ -1695,7 +2104,7 @@ is supported by ``overlayfs``. This has to be done in your machine configuration
* QA checks fail to catch file existence if you redefine this variable in your recipe!
* Only the existence of the systemd mount unit file is checked, not its contents.
* To get more details on ``overlayfs``, its internals and supported operations, please refer
- to the official documentation of the `Linux kernel <https://www.kernel.org/doc/html/latest/filesystems/overlayfs.html>`_.
+ to the official documentation of the `Linux kernel <https://www.kernel.org/doc/html/latest/filesystems/overlayfs.html>`__.
The class assumes you have a ``data.mount`` systemd unit defined elsewhere in your BSP
(e.g. in ``systemd-machine-units`` recipe) and it's installed into the image.
@@ -1720,7 +2129,7 @@ and then in your recipe::
On a practical note, your application recipe might require multiple
overlays to be mounted before running to avoid writing to the underlying
file system (which can be forbidden in case of read-only file system)
-To achieve that :ref:`overlayfs <ref-classes-overlayfs>` provides a ``systemd``
+To achieve that :ref:`ref-classes-overlayfs` provides a ``systemd``
helper service for mounting overlays. This helper service is named
``${PN}-overlays.service`` and can be depended on in your application recipe
(named ``application`` in the following example) ``systemd`` unit by adding
@@ -1733,12 +2142,12 @@ to the unit the following::
.. note::
The class does not support the ``/etc`` directory itself, because ``systemd`` depends on it.
- In order to get ``/etc`` in overlayfs, see :ref:`overlayfs-etc <ref-classes-overlayfs-etc>`.
+ In order to get ``/etc`` in overlayfs, see :ref:`ref-classes-overlayfs-etc`.
.. _ref-classes-overlayfs-etc:
-``overlayfs-etc.bbclass``
-=========================
+``overlayfs-etc``
+=================
In order to have the ``/etc`` directory in overlayfs a special handling at early
boot stage is required. The idea is to supply a custom init script that mounts
@@ -1782,10 +2191,10 @@ The class provides two options for ``/sbin/init`` generation:
.. _ref-classes-own-mirrors:
-``own-mirrors.bbclass``
-=======================
+``own-mirrors``
+===============
-The ``own-mirrors`` class makes it easier to set up your own
+The :ref:`ref-classes-own-mirrors` class makes it easier to set up your own
:term:`PREMIRRORS` from which to first fetch source
before attempting to fetch it from the upstream specified in
:term:`SRC_URI` within each recipe.
@@ -1801,38 +2210,28 @@ in :term:`SOURCE_MIRROR_URL`.
.. _ref-classes-package:
-``package.bbclass``
-===================
+``package``
+===========
-The ``package`` class supports generating packages from a build's
+The :ref:`ref-classes-package` class supports generating packages from a build's
output. The core generic functionality is in ``package.bbclass``. The
code specific to particular package types resides in these
-package-specific classes:
-:ref:`package_deb <ref-classes-package_deb>`,
-:ref:`package_rpm <ref-classes-package_rpm>`,
-:ref:`package_ipk <ref-classes-package_ipk>`, and
-:ref:`package_tar <ref-classes-package_tar>`.
-
-.. note::
-
- The
- package_tar
- class is broken and not supported. It is recommended that you do not
- use this class.
+package-specific classes: :ref:`ref-classes-package_deb`,
+:ref:`ref-classes-package_rpm`, :ref:`ref-classes-package_ipk`.
You can control the list of resulting package formats by using the
:term:`PACKAGE_CLASSES` variable defined in your ``conf/local.conf``
configuration file, which is located in the :term:`Build Directory`.
-When defining the variable, you can
-specify one or more package types. Since images are generated from
-packages, a packaging class is needed to enable image generation. The
-first class listed in this variable is used for image generation.
+When defining the variable, you can specify one or more package types.
+Since images are generated from packages, a packaging class is needed
+to enable image generation. The first class listed in this variable is
+used for image generation.
If you take the optional step to set up a repository (package feed) on
the development host that can be used by DNF, you can install packages
from the feed while you are running the image on the target (i.e.
runtime installation of packages). For more information, see the
-":ref:`dev-manual/common-tasks:using runtime package management`"
+":ref:`dev-manual/packages:using runtime package management`"
section in the Yocto Project Development Tasks Manual.
The package-specific class you choose can affect build-time performance
@@ -1843,7 +2242,7 @@ complete build of the package with all dependencies previously built.
The reason for this discrepancy is because the RPM package manager
creates and processes more :term:`Metadata` than the IPK package
manager. Consequently, you might consider setting :term:`PACKAGE_CLASSES` to
-"package_ipk" if you are building smaller systems.
+":ref:`ref-classes-package_ipk`" if you are building smaller systems.
Before making your package manager decision, however, you should
consider some further things about using RPM:
@@ -1868,120 +2267,117 @@ at these two Yocto Project mailing list links:
.. _ref-classes-package_deb:
-``package_deb.bbclass``
-=======================
+``package_deb``
+===============
-The ``package_deb`` class provides support for creating packages that
+The :ref:`ref-classes-package_deb` class provides support for creating packages that
use the Debian (i.e. ``.deb``) file format. The class ensures the
packages are written out in a ``.deb`` file format to the
``${``\ :term:`DEPLOY_DIR_DEB`\ ``}`` directory.
-This class inherits the :ref:`package <ref-classes-package>` class and
+This class inherits the :ref:`ref-classes-package` class and
is enabled through the :term:`PACKAGE_CLASSES`
variable in the ``local.conf`` file.
.. _ref-classes-package_ipk:
-``package_ipk.bbclass``
-=======================
+``package_ipk``
+===============
-The ``package_ipk`` class provides support for creating packages that
+The :ref:`ref-classes-package_ipk` class provides support for creating packages that
use the IPK (i.e. ``.ipk``) file format. The class ensures the packages
are written out in a ``.ipk`` file format to the
``${``\ :term:`DEPLOY_DIR_IPK`\ ``}`` directory.
-This class inherits the :ref:`package <ref-classes-package>` class and
+This class inherits the :ref:`ref-classes-package` class and
is enabled through the :term:`PACKAGE_CLASSES`
variable in the ``local.conf`` file.
.. _ref-classes-package_rpm:
-``package_rpm.bbclass``
-=======================
+``package_rpm``
+===============
-The ``package_rpm`` class provides support for creating packages that
+The :ref:`ref-classes-package_rpm` class provides support for creating packages that
use the RPM (i.e. ``.rpm``) file format. The class ensures the packages
are written out in a ``.rpm`` file format to the
``${``\ :term:`DEPLOY_DIR_RPM`\ ``}`` directory.
-This class inherits the :ref:`package <ref-classes-package>` class and
+This class inherits the :ref:`ref-classes-package` class and
is enabled through the :term:`PACKAGE_CLASSES`
variable in the ``local.conf`` file.
-.. _ref-classes-package_tar:
-
-``package_tar.bbclass``
-=======================
-
-The ``package_tar`` class provides support for creating tarballs. The
-class ensures the packages are written out in a tarball format to the
-``${``\ :term:`DEPLOY_DIR_TAR`\ ``}`` directory.
-
-This class inherits the :ref:`package <ref-classes-package>` class and
-is enabled through the :term:`PACKAGE_CLASSES`
-variable in the ``local.conf`` file.
-
-.. note::
-
- You cannot specify the ``package_tar`` class first using the
- :term:`PACKAGE_CLASSES` variable. You must use ``.deb``, ``.ipk``, or ``.rpm``
- file formats for your image or SDK.
-
.. _ref-classes-packagedata:
-``packagedata.bbclass``
-=======================
+``packagedata``
+===============
-The ``packagedata`` class provides common functionality for reading
+The :ref:`ref-classes-packagedata` class provides common functionality for reading
``pkgdata`` files found in :term:`PKGDATA_DIR`. These
files contain information about each output package produced by the
OpenEmbedded build system.
This class is enabled by default because it is inherited by the
-:ref:`package <ref-classes-package>` class.
+:ref:`ref-classes-package` class.
.. _ref-classes-packagegroup:
-``packagegroup.bbclass``
-========================
+``packagegroup``
+================
-The ``packagegroup`` class sets default values appropriate for package
+The :ref:`ref-classes-packagegroup` class sets default values appropriate for package
group recipes (e.g. :term:`PACKAGES`, :term:`PACKAGE_ARCH`, :term:`ALLOW_EMPTY`, and
so forth). It is highly recommended that all package group recipes
inherit this class.
For information on how to use this class, see the
-":ref:`dev-manual/common-tasks:customizing images using custom package groups`"
+":ref:`dev-manual/customizing-images:customizing images using custom package groups`"
section in the Yocto Project Development Tasks Manual.
Previously, this class was called the ``task`` class.
.. _ref-classes-patch:
-``patch.bbclass``
-=================
+``patch``
+=========
-The ``patch`` class provides all functionality for applying patches
+The :ref:`ref-classes-patch` class provides all functionality for applying patches
during the :ref:`ref-tasks-patch` task.
This class is enabled by default because it is inherited by the
-:ref:`base <ref-classes-base>` class.
+:ref:`ref-classes-base` class.
.. _ref-classes-perlnative:
-``perlnative.bbclass``
-======================
+``perlnative``
+==============
-When inherited by a recipe, the ``perlnative`` class supports using the
+When inherited by a recipe, the :ref:`ref-classes-perlnative` class supports using the
native version of Perl built by the build system rather than using the
version provided by the build host.
+.. _ref-classes-pypi:
+
+``pypi``
+========
+
+The :ref:`ref-classes-pypi` class sets variables appropriately for recipes that build
+Python modules from `PyPI <https://pypi.org/>`__, the Python Package Index.
+By default it determines the PyPI package name based upon :term:`BPN`
+(stripping the "python-" or "python3-" prefix off if present), however in
+some cases you may need to set it manually in the recipe by setting
+:term:`PYPI_PACKAGE`.
+
+Variables set by the :ref:`ref-classes-pypi` class include :term:`SRC_URI`, :term:`SECTION`,
+:term:`HOMEPAGE`, :term:`UPSTREAM_CHECK_URI`, :term:`UPSTREAM_CHECK_REGEX`
+and :term:`CVE_PRODUCT`.
+
.. _ref-classes-python_flit_core:
-``python_flit_core.bbclass``
-============================
+``python_flit_core``
+====================
-The ``python_flit_core`` class enables building Python modules which declare
+The :ref:`ref-classes-python_flit_core` class enables building Python modules which declare
the `PEP-517 <https://www.python.org/dev/peps/pep-0517/>`__ compliant
``flit_core.buildapi`` ``build-backend`` in the ``[build-system]``
section of ``pyproject.toml`` (See `PEP-518 <https://www.python.org/dev/peps/pep-0518/>`__).
@@ -1989,42 +2385,82 @@ section of ``pyproject.toml`` (See `PEP-518 <https://www.python.org/dev/peps/pep
Python modules built with ``flit_core.buildapi`` are pure Python (no
``C`` or ``Rust`` extensions).
-Internally this uses the :ref:`python_pep517 <ref-classes-python_pep517>` class.
+Internally this uses the :ref:`ref-classes-python_pep517` class.
+
+.. _ref-classes-python_maturin:
+
+``python_maturin``
+==================
+
+The :ref:`ref-classes-python_maturin` class provides support for python-maturin, a replacement
+for setuptools_rust and another "backend" for building Python Wheels.
+
+.. _ref-classes-python_mesonpy:
+
+``python_mesonpy``
+==================
+
+The :ref:`ref-classes-python_mesonpy` class enables building Python modules which use the
+meson-python build system.
+
+Internally this uses the :ref:`ref-classes-python_pep517` class.
.. _ref-classes-python_pep517:
-``python_pep517.bbclass``
-=========================
+``python_pep517``
+=================
-The ``python_pep517`` class builds and installs a Python ``wheel`` binary
+The :ref:`ref-classes-python_pep517` class builds and installs a Python ``wheel`` binary
archive (see `PEP-517 <https://peps.python.org/pep-0517/>`__).
Recipes wouldn't inherit this directly, instead typically another class will
-inherit this, add the relevant native dependencies, and set
-:term:`PEP517_BUILD_API` to the Python class which implements the PEP-517 build
-API.
+inherit this and add the relevant native dependencies.
-Examples of classes which do this are :ref:`python_flit_core
-<ref-classes-python_flit_core>`, :ref:`python_setuptools_build_meta
-<ref-classes-python_setuptools_build_meta>`, and :ref:`python_poetry_core
-<ref-classes-python_poetry_core>`.
+Examples of classes which do this are :ref:`ref-classes-python_flit_core`,
+:ref:`ref-classes-python_setuptools_build_meta`, and
+:ref:`ref-classes-python_poetry_core`.
.. _ref-classes-python_poetry_core:
-``python_poetry_core.bbclass``
-==============================
+``python_poetry_core``
+======================
-The ``python_poetry_core`` class enables building Python modules which use the
+The :ref:`ref-classes-python_poetry_core` class enables building Python modules which use the
`Poetry Core <https://python-poetry.org>`__ build system.
-Internally this uses the :ref:`python_pep517 <ref-classes-python_pep517>` class.
+Internally this uses the :ref:`ref-classes-python_pep517` class.
+
+.. _ref-classes-python_pyo3:
+
+``python_pyo3``
+===============
+
+The :ref:`ref-classes-python_pyo3` class helps make sure that Python extensions
+written in Rust and built with `PyO3 <https://pyo3.rs/>`__, properly set up the
+environment for cross compilation.
+
+This class is internal to the :ref:`ref-classes-python-setuptools3_rust` class
+and is not meant to be used directly in recipes.
+
+.. _ref-classes-python-setuptools3_rust:
+
+``python-setuptools3_rust``
+===========================
+
+The :ref:`ref-classes-python-setuptools3_rust` class enables building Python
+extensions implemented in Rust with `PyO3 <https://pyo3.rs/>`__, which allows
+to compile and distribute Python extensions written in Rust as easily
+as if they were written in C.
+
+This class inherits the :ref:`ref-classes-setuptools3` and
+:ref:`ref-classes-python_pyo3` classes.
.. _ref-classes-pixbufcache:
-``pixbufcache.bbclass``
-=======================
+``pixbufcache``
+===============
-The ``pixbufcache`` class generates the proper post-install and
+The :ref:`ref-classes-pixbufcache` class generates the proper post-install and
post-remove (postinst/postrm) scriptlets for packages that install
pixbuf loaders, which are used with ``gdk-pixbuf``. These scriptlets
call ``update_pixbuf_cache`` to add the pixbuf loaders to the cache.
@@ -2039,24 +2475,24 @@ containing the loaders.
.. _ref-classes-pkgconfig:
-``pkgconfig.bbclass``
-=====================
+``pkgconfig``
+=============
-The ``pkgconfig`` class provides a standard way to get header and
+The :ref:`ref-classes-pkgconfig` class provides a standard way to get header and
library information by using ``pkg-config``. This class aims to smooth
integration of ``pkg-config`` into libraries that use it.
During staging, BitBake installs ``pkg-config`` data into the
``sysroots/`` directory. By making use of sysroot functionality within
-``pkg-config``, the ``pkgconfig`` class no longer has to manipulate the
+``pkg-config``, the :ref:`ref-classes-pkgconfig` class no longer has to manipulate the
files.
.. _ref-classes-populate-sdk:
-``populate_sdk.bbclass``
-========================
+``populate_sdk``
+================
-The ``populate_sdk`` class provides support for SDK-only recipes. For
+The :ref:`ref-classes-populate-sdk` class provides support for SDK-only recipes. For
information on advantages gained when building a cross-development
toolchain using the :ref:`ref-tasks-populate_sdk`
task, see the ":ref:`sdk-manual/appendix-obtain:building an sdk installer`"
@@ -2065,34 +2501,34 @@ Software Development Kit (eSDK) manual.
.. _ref-classes-populate-sdk-*:
-``populate_sdk_*.bbclass``
-==========================
+``populate_sdk_*``
+==================
-The ``populate_sdk_*`` classes support SDK creation and consist of the
+The :ref:`ref-classes-populate-sdk-*` classes support SDK creation and consist of the
following classes:
-- ``populate_sdk_base``: The base class supporting SDK creation under
+- :ref:`populate_sdk_base <ref-classes-populate-sdk-*>`: The base class supporting SDK creation under
all package managers (i.e. DEB, RPM, and opkg).
-- ``populate_sdk_deb``: Supports creation of the SDK given the Debian
+- :ref:`populate_sdk_deb <ref-classes-populate-sdk-*>`: Supports creation of the SDK given the Debian
package manager.
-- ``populate_sdk_rpm``: Supports creation of the SDK given the RPM
+- :ref:`populate_sdk_rpm <ref-classes-populate-sdk-*>`: Supports creation of the SDK given the RPM
package manager.
-- ``populate_sdk_ipk``: Supports creation of the SDK given the opkg
+- :ref:`populate_sdk_ipk <ref-classes-populate-sdk-*>`: Supports creation of the SDK given the opkg
(IPK format) package manager.
-- ``populate_sdk_ext``: Supports extensible SDK creation under all
+- :ref:`populate_sdk_ext <ref-classes-populate-sdk-*>`: Supports extensible SDK creation under all
package managers.
-The ``populate_sdk_base`` class inherits the appropriate
+The :ref:`populate_sdk_base <ref-classes-populate-sdk-*>` class inherits the appropriate
``populate_sdk_*`` (i.e. ``deb``, ``rpm``, and ``ipk``) based on
:term:`IMAGE_PKGTYPE`.
The base class ensures all source and destination directories are
established and then populates the SDK. After populating the SDK, the
-``populate_sdk_base`` class constructs two sysroots:
+:ref:`populate_sdk_base <ref-classes-populate-sdk-*>` class constructs two sysroots:
``${``\ :term:`SDK_ARCH`\ ``}-nativesdk``, which
contains the cross-compiler and associated tooling, and the target,
which contains a target root filesystem that is configured for the SDK
@@ -2105,9 +2541,9 @@ which consists of the following::
Finally, the base populate SDK class creates the toolchain environment
setup script, the tarball of the SDK, and the installer.
-The respective ``populate_sdk_deb``, ``populate_sdk_rpm``, and
-``populate_sdk_ipk`` classes each support the specific type of SDK.
-These classes are inherited by and used with the ``populate_sdk_base``
+The respective :ref:`populate_sdk_deb <ref-classes-populate-sdk-*>`, :ref:`populate_sdk_rpm <ref-classes-populate-sdk-*>`, and
+:ref:`populate_sdk_ipk <ref-classes-populate-sdk-*>` classes each support the specific type of SDK.
+These classes are inherited by and used with the :ref:`populate_sdk_base <ref-classes-populate-sdk-*>`
class.
For more information on the cross-development toolchain generation, see
@@ -2122,10 +2558,10 @@ Software Development Kit (eSDK) manual.
.. _ref-classes-prexport:
-``prexport.bbclass``
-====================
+``prexport``
+============
-The ``prexport`` class provides functionality for exporting
+The :ref:`ref-classes-prexport` class provides functionality for exporting
:term:`PR` values.
.. note::
@@ -2135,10 +2571,10 @@ The ``prexport`` class provides functionality for exporting
.. _ref-classes-primport:
-``primport.bbclass``
-====================
+``primport``
+============
-The ``primport`` class provides functionality for importing
+The :ref:`ref-classes-primport` class provides functionality for importing
:term:`PR` values.
.. note::
@@ -2148,69 +2584,80 @@ The ``primport`` class provides functionality for importing
.. _ref-classes-prserv:
-``prserv.bbclass``
-==================
+``prserv``
+==========
-The ``prserv`` class provides functionality for using a :ref:`PR
-service <dev-manual/common-tasks:working with a pr service>` in order to
+The :ref:`ref-classes-prserv` class provides functionality for using a :ref:`PR
+service <dev-manual/packages:working with a pr service>` in order to
automatically manage the incrementing of the :term:`PR`
variable for each recipe.
This class is enabled by default because it is inherited by the
-:ref:`package <ref-classes-package>` class. However, the OpenEmbedded
+:ref:`ref-classes-package` class. However, the OpenEmbedded
build system will not enable the functionality of this class unless
:term:`PRSERV_HOST` has been set.
.. _ref-classes-ptest:
-``ptest.bbclass``
-=================
+``ptest``
+=========
-The ``ptest`` class provides functionality for packaging and installing
+The :ref:`ref-classes-ptest` class provides functionality for packaging and installing
runtime tests for recipes that build software that provides these tests.
This class is intended to be inherited by individual recipes. However,
the class' functionality is largely disabled unless "ptest" appears in
:term:`DISTRO_FEATURES`. See the
-":ref:`dev-manual/common-tasks:testing packages with ptest`"
+":ref:`dev-manual/packages:testing packages with ptest`"
section in the Yocto Project Development Tasks Manual for more information
on ptest.
+.. _ref-classes-ptest-cargo:
+
+``ptest-cargo``
+===============
+
+The :ref:`ref-classes-ptest-cargo` class is a class which extends the
+:ref:`ref-classes-cargo` class and adds ``compile_ptest_cargo`` and
+``install_ptest_cargo`` steps to respectively build and install
+test suites defined in the ``Cargo.toml`` file, into a dedicated
+``-ptest`` package.
+
.. _ref-classes-ptest-gnome:
-``ptest-gnome.bbclass``
-=======================
+``ptest-gnome``
+===============
Enables package tests (ptests) specifically for GNOME packages, which
have tests intended to be executed with ``gnome-desktop-testing``.
For information on setting up and running ptests, see the
-":ref:`dev-manual/common-tasks:testing packages with ptest`"
+":ref:`dev-manual/packages:testing packages with ptest`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-python3-dir:
-``python3-dir.bbclass``
-=======================
+``python3-dir``
+===============
-The ``python3-dir`` class provides the base version, location, and site
+The :ref:`ref-classes-python3-dir` class provides the base version, location, and site
package location for Python 3.
.. _ref-classes-python3native:
-``python3native.bbclass``
-=========================
+``python3native``
+=================
-The ``python3native`` class supports using the native version of Python
+The :ref:`ref-classes-python3native` class supports using the native version of Python
3 built by the build system rather than support of the version provided
by the build host.
.. _ref-classes-python3targetconfig:
-``python3targetconfig.bbclass``
-===============================
+``python3targetconfig``
+=======================
-The ``python3targetconfig`` class supports using the native version of Python
+The :ref:`ref-classes-python3targetconfig` class supports using the native version of Python
3 built by the build system rather than support of the version provided
by the build host, except that the configuration for the target machine
is accessible (such as correct installation directories). This also adds a
@@ -2219,41 +2666,40 @@ in order to avoid unnecessarily lengthening builds.
.. _ref-classes-qemu:
-``qemu.bbclass``
-================
+``qemu``
+========
-The ``qemu`` class provides functionality for recipes that either need
+The :ref:`ref-classes-qemu` class provides functionality for recipes that either need
QEMU or test for the existence of QEMU. Typically, this class is used to
run programs for a target system on the build host using QEMU's
application emulation mode.
.. _ref-classes-recipe_sanity:
-``recipe_sanity.bbclass``
-=========================
+``recipe_sanity``
+=================
-The ``recipe_sanity`` class checks for the presence of any host system
+The :ref:`ref-classes-recipe_sanity` class checks for the presence of any host system
recipe prerequisites that might affect the build (e.g. variables that
are set or software that is present).
.. _ref-classes-relocatable:
-``relocatable.bbclass``
-=======================
+``relocatable``
+===============
-The ``relocatable`` class enables relocation of binaries when they are
+The :ref:`ref-classes-relocatable` class enables relocation of binaries when they are
installed into the sysroot.
-This class makes use of the :ref:`chrpath <ref-classes-chrpath>` class
-and is used by both the :ref:`cross <ref-classes-cross>` and
-:ref:`native <ref-classes-native>` classes.
+This class makes use of the :ref:`ref-classes-chrpath` class and is used by
+both the :ref:`ref-classes-cross` and :ref:`ref-classes-native` classes.
.. _ref-classes-remove-libtool:
-``remove-libtool.bbclass``
-==========================
+``remove-libtool``
+==================
-The ``remove-libtool`` class adds a post function to the
+The :ref:`ref-classes-remove-libtool` class adds a post function to the
:ref:`ref-tasks-install` task to remove all ``.la`` files
installed by ``libtool``. Removing these files results in them being
absent from both the sysroot and target packages.
@@ -2265,15 +2711,15 @@ override the removal by setting ``REMOVE_LIBTOOL_LA`` to "0" as follows::
.. note::
- The ``remove-libtool`` class is not enabled by default.
+ The :ref:`ref-classes-remove-libtool` class is not enabled by default.
.. _ref-classes-report-error:
-``report-error.bbclass``
-========================
+``report-error``
+================
-The ``report-error`` class supports enabling the :ref:`error reporting
-tool <dev-manual/common-tasks:using the error reporting tool>`",
+The :ref:`ref-classes-report-error` class supports enabling the :ref:`error reporting
+tool <dev-manual/error-reporting-tool:using the error reporting tool>`",
which allows you to submit build error information to a central database.
The class collects debug information for recipe, recipe version, task,
@@ -2284,10 +2730,10 @@ are created and stored in
.. _ref-classes-rm-work:
-``rm_work.bbclass``
-===================
+``rm_work``
+===========
-The ``rm_work`` class supports deletion of temporary workspace, which
+The :ref:`ref-classes-rm-work` class supports deletion of temporary workspace, which
can ease your hard drive demands during builds.
The OpenEmbedded build system can use a substantial amount of disk space
@@ -2296,60 +2742,76 @@ under the ``${TMPDIR}/work`` directory for each recipe. Once the build
system generates the packages for a recipe, the work files for that
recipe are no longer needed. However, by default, the build system
preserves these files for inspection and possible debugging purposes. If
-you would rather have these files deleted to save disk space as the
-build progresses, you can enable ``rm_work`` by adding the following to
-your ``local.conf`` file, which is found in the :term:`Build Directory`.
-::
+you would rather have these files deleted to save disk space as the build
+progresses, you can enable :ref:`ref-classes-rm-work` by adding the following to
+your ``local.conf`` file, which is found in the :term:`Build Directory`::
INHERIT += "rm_work"
-If you are
-modifying and building source code out of the work directory for a
-recipe, enabling ``rm_work`` will potentially result in your changes to
-the source being lost. To exclude some recipes from having their work
-directories deleted by ``rm_work``, you can add the names of the recipe
-or recipes you are working on to the :term:`RM_WORK_EXCLUDE` variable, which
-can also be set in your ``local.conf`` file. Here is an example::
+If you are modifying and building source code out of the work directory for a
+recipe, enabling :ref:`ref-classes-rm-work` will potentially result in your
+changes to the source being lost. To exclude some recipes from having their work
+directories deleted by :ref:`ref-classes-rm-work`, you can add the names of the
+recipe or recipes you are working on to the :term:`RM_WORK_EXCLUDE` variable,
+which can also be set in your ``local.conf`` file. Here is an example::
RM_WORK_EXCLUDE += "busybox glibc"
.. _ref-classes-rootfs*:
-``rootfs*.bbclass``
-===================
+``rootfs*``
+===========
-The ``rootfs*`` classes support creating the root filesystem for an
+The :ref:`ref-classes-rootfs*` classes support creating the root filesystem for an
image and consist of the following classes:
-- The ``rootfs-postcommands`` class, which defines filesystem
+- The :ref:`rootfs-postcommands <ref-classes-rootfs*>` class, which defines filesystem
post-processing functions for image recipes.
-- The ``rootfs_deb`` class, which supports creation of root filesystems
+- The :ref:`rootfs_deb <ref-classes-rootfs*>` class, which supports creation of root filesystems
for images built using ``.deb`` packages.
-- The ``rootfs_rpm`` class, which supports creation of root filesystems
+- The :ref:`rootfs_rpm <ref-classes-rootfs*>` class, which supports creation of root filesystems
for images built using ``.rpm`` packages.
-- The ``rootfs_ipk`` class, which supports creation of root filesystems
+- The :ref:`rootfs_ipk <ref-classes-rootfs*>` class, which supports creation of root filesystems
for images built using ``.ipk`` packages.
-- The ``rootfsdebugfiles`` class, which installs additional files found
+- The :ref:`rootfsdebugfiles <ref-classes-rootfs*>` class, which installs additional files found
on the build host directly into the root filesystem.
The root filesystem is created from packages using one of the
-``rootfs*.bbclass`` files as determined by the
-:term:`PACKAGE_CLASSES` variable.
+:ref:`ref-classes-rootfs*` files as determined by the :term:`PACKAGE_CLASSES`
+variable.
For information on how root filesystem images are created, see the
":ref:`overview-manual/concepts:image generation`"
section in the Yocto Project Overview and Concepts Manual.
+.. _ref-classes-rust:
+
+``rust``
+========
+
+The :ref:`ref-classes-rust` class is an internal class which is just used
+in the "rust" recipe, to build the Rust compiler and runtime
+library. Except for this recipe, it is not intended to be used directly.
+
+.. _ref-classes-rust-common:
+
+``rust-common``
+===============
+
+The :ref:`ref-classes-rust-common` class is an internal class to the
+:ref:`ref-classes-cargo_common` and :ref:`ref-classes-rust` classes and is not
+intended to be used directly.
+
.. _ref-classes-sanity:
-``sanity.bbclass``
-==================
+``sanity``
+==========
-The ``sanity`` class checks to see if prerequisite software is present
+The :ref:`ref-classes-sanity` class checks to see if prerequisite software is present
on the host system so that users can be notified of potential problems
that might affect their build. The class also performs basic user
configuration checks from the ``local.conf`` configuration file to
@@ -2358,29 +2820,29 @@ usually determines whether to include this class.
.. _ref-classes-scons:
-``scons.bbclass``
-=================
+``scons``
+=========
-The ``scons`` class supports recipes that need to build software that
-uses the SCons build system. You can use the
-:term:`EXTRA_OESCONS` variable to specify
-additional configuration options you want to pass SCons command line.
+The :ref:`ref-classes-scons` class supports recipes that need to build software
+that uses the SCons build system. You can use the :term:`EXTRA_OESCONS`
+variable to specify additional configuration options you want to pass SCons
+command line.
.. _ref-classes-sdl:
-``sdl.bbclass``
-===============
+``sdl``
+=======
-The ``sdl`` class supports recipes that need to build software that uses
+The :ref:`ref-classes-sdl` class supports recipes that need to build software that uses
the Simple DirectMedia Layer (SDL) library.
.. _ref-classes-python_setuptools_build_meta:
-``python_setuptools_build_meta.bbclass``
-========================================
+``python_setuptools_build_meta``
+================================
-The ``python_setuptools_build_meta`` class enables building Python modules which
-declare the
+The :ref:`ref-classes-python_setuptools_build_meta` class enables building
+Python modules which declare the
`PEP-517 <https://www.python.org/dev/peps/pep-0517/>`__ compliant
``setuptools.build_meta`` ``build-backend`` in the ``[build-system]``
section of ``pyproject.toml`` (See `PEP-518 <https://www.python.org/dev/peps/pep-0518/>`__).
@@ -2388,21 +2850,22 @@ section of ``pyproject.toml`` (See `PEP-518 <https://www.python.org/dev/peps/pep
Python modules built with ``setuptools.build_meta`` can be pure Python or
include ``C`` or ``Rust`` extensions).
-Internally this uses the :ref:`python_pep517 <ref-classes-python_pep517>` class.
+Internally this uses the :ref:`ref-classes-python_pep517` class.
.. _ref-classes-setuptools3:
-``setuptools3.bbclass``
-=======================
+``setuptools3``
+===============
-The ``setuptools3`` class supports Python version 3.x extensions that
-use build systems based on ``setuptools`` (e.g. only have a ``setup.py`` and
-have not migrated to the official ``pyproject.toml`` format). If your recipe
-uses these build systems, the recipe needs to inherit the ``setuptools3`` class.
+The :ref:`ref-classes-setuptools3` class supports Python version 3.x extensions
+that use build systems based on ``setuptools`` (e.g. only have a ``setup.py``
+and have not migrated to the official ``pyproject.toml`` format). If your recipe
+uses these build systems, the recipe needs to inherit the
+:ref:`ref-classes-setuptools3` class.
.. note::
- The ``setuptools3`` class ``do_compile()`` task now calls
+ The :ref:`ref-classes-setuptools3` class :ref:`ref-tasks-compile` task now calls
``setup.py bdist_wheel`` to build the ``wheel`` binary archive format
(See `PEP-427 <https://www.python.org/dev/peps/pep-0427/>`__).
@@ -2413,69 +2876,61 @@ uses these build systems, the recipe needs to inherit the ``setuptools3`` class.
.. note::
- The ``setuptools3`` class ``do_install()`` task now installs the ``wheel``
- binary archive. In current versions of ``setuptools`` the legacy ``setup.py
- install`` method is deprecated. If the ``setup.py`` cannot be used with
- wheels, for example it creates files outside of the Python module or
- standard entry points, then :ref:`setuptools3_legacy
- <ref-classes-setuptools3_legacy>` should be used.
+ The :ref:`ref-classes-setuptools3` class :ref:`ref-tasks-install` task now
+ installs the ``wheel`` binary archive. In current versions of
+ ``setuptools`` the legacy ``setup.py install`` method is deprecated. If
+ the ``setup.py`` cannot be used with wheels, for example it creates files
+ outside of the Python module or standard entry points, then
+ :ref:`ref-classes-setuptools3_legacy` should be used.
.. _ref-classes-setuptools3_legacy:
-``setuptools3_legacy.bbclass``
-==============================
+``setuptools3_legacy``
+======================
-The ``setuptools3_legacy`` class supports Python version 3.x extensions that use
-build systems based on ``setuptools`` (e.g. only have a ``setup.py`` and have
-not migrated to the official ``pyproject.toml`` format). Unlike
-``setuptools3.bbclass``, this uses the traditional ``setup.py`` ``build`` and
-``install`` commands and not wheels. This use of ``setuptools`` like this is
-`deprecated <https://github.com/pypa/setuptools/blob/main/CHANGES.rst#v5830>`_
+The :ref:`ref-classes-setuptools3_legacy` class supports
+Python version 3.x extensions that use build systems based on ``setuptools``
+(e.g. only have a ``setup.py`` and have not migrated to the official
+``pyproject.toml`` format). Unlike :ref:`ref-classes-setuptools3`,
+this uses the traditional ``setup.py`` ``build`` and ``install`` commands and
+not wheels. This use of ``setuptools`` like this is
+`deprecated <https://github.com/pypa/setuptools/blob/main/CHANGES.rst#v5830>`__
but still relatively common.
.. _ref-classes-setuptools3-base:
-``setuptools3-base.bbclass``
-============================
-
-The ``setuptools3-base`` class provides a reusable base for other classes
-that support building Python version 3.x extensions. If you need
-functionality that is not provided by the :ref:`setuptools3 <ref-classes-setuptools3>` class, you may
-want to ``inherit setuptools3-base``. Some recipes do not need the tasks
-in the :ref:`setuptools3 <ref-classes-setuptools3>` class and inherit this class instead.
-
-.. _ref-classes-sign_rpm:
-
-``sign_rpm.bbclass``
+``setuptools3-base``
====================
-The ``sign_rpm`` class supports generating signed RPM packages.
+The :ref:`ref-classes-setuptools3-base` class provides a reusable base for
+other classes that support building Python version 3.x extensions. If you need
+functionality that is not provided by the :ref:`ref-classes-setuptools3` class,
+you may want to ``inherit setuptools3-base``. Some recipes do not need the tasks
+in the :ref:`ref-classes-setuptools3` class and inherit this class instead.
-.. _ref-classes-sip:
+.. _ref-classes-sign_rpm:
-``sip.bbclass``
-===============
+``sign_rpm``
+============
-The ``sip`` class supports recipes that build or package SIP-based
-Python bindings.
+The :ref:`ref-classes-sign_rpm` class supports generating signed RPM packages.
.. _ref-classes-siteconfig:
-``siteconfig.bbclass``
-======================
+``siteconfig``
+==============
-The ``siteconfig`` class provides functionality for handling site
-configuration. The class is used by the
-:ref:`autotools <ref-classes-autotools>` class to accelerate the
-:ref:`ref-tasks-configure` task.
+The :ref:`ref-classes-siteconfig` class provides functionality for handling site
+configuration. The class is used by the :ref:`ref-classes-autotools` class to
+accelerate the :ref:`ref-tasks-configure` task.
.. _ref-classes-siteinfo:
-``siteinfo.bbclass``
-====================
+``siteinfo``
+============
-The ``siteinfo`` class provides information about the targets that might
-be needed by other classes or recipes.
+The :ref:`ref-classes-siteinfo` class provides information about the targets
+that might be needed by other classes or recipes.
As an example, consider Autotools, which can require tests that must
execute on the target hardware. Since this is not possible in general
@@ -2492,12 +2947,12 @@ The class also provides variables like :term:`SITEINFO_ENDIANNESS` and
.. _ref-classes-sstate:
-``sstate.bbclass``
-==================
+``sstate``
+==========
-The ``sstate`` class provides support for Shared State (sstate). By
-default, the class is enabled through the
-:term:`INHERIT_DISTRO` variable's default value.
+The :ref:`ref-classes-sstate` class provides support for Shared State (sstate).
+By default, the class is enabled through the :term:`INHERIT_DISTRO` variable's
+default value.
For more information on sstate, see the
":ref:`overview-manual/concepts:shared state cache`"
@@ -2505,10 +2960,10 @@ section in the Yocto Project Overview and Concepts Manual.
.. _ref-classes-staging:
-``staging.bbclass``
-===================
+``staging``
+===========
-The ``staging`` class installs files into individual recipe work
+The :ref:`ref-classes-staging` class installs files into individual recipe work
directories for sysroots. The class contains the following key tasks:
- The :ref:`ref-tasks-populate_sysroot` task,
@@ -2521,14 +2976,14 @@ directories for sysroots. The class contains the following key tasks:
installs the files into the individual recipe work directories (i.e.
:term:`WORKDIR`).
-The code in the ``staging`` class is complex and basically works in two
-stages:
+The code in the :ref:`ref-classes-staging` class is complex and basically works
+in two stages:
- *Stage One:* The first stage addresses recipes that have files they
want to share with other recipes that have dependencies on the
originating recipe. Normally these dependencies are installed through
the :ref:`ref-tasks-install` task into
- ``${``\ :term:`D`\ ``}``. The ``do_populate_sysroot`` task
+ ``${``\ :term:`D`\ ``}``. The :ref:`ref-tasks-populate_sysroot` task
copies a subset of these files into ``${SYSROOT_DESTDIR}``. This
subset of files is controlled by the
:term:`SYSROOT_DIRS`,
@@ -2595,8 +3050,7 @@ stages:
dependencies traversed or installed. The same sstate dependency code
is used so that builds should be identical regardless of whether
sstate was used or not. For a closer look, see the
- ``setscene_depvalid()`` function in the
- :ref:`sstate <ref-classes-sstate>` class.
+ ``setscene_depvalid()`` function in the :ref:`ref-classes-sstate` class.
The build system is careful to maintain manifests of the files it
installs so that any given dependency can be installed as needed. The
@@ -2605,11 +3059,11 @@ stages:
.. _ref-classes-syslinux:
-``syslinux.bbclass``
-====================
+``syslinux``
+============
-The ``syslinux`` class provides syslinux-specific functions for building
-bootable images.
+The :ref:`ref-classes-syslinux` class provides syslinux-specific functions for
+building bootable images.
The class supports the following variables:
@@ -2648,11 +3102,11 @@ The class supports the following variables:
.. _ref-classes-systemd:
-``systemd.bbclass``
-===================
+``systemd``
+===========
-The ``systemd`` class provides support for recipes that install systemd
-unit files.
+The :ref:`ref-classes-systemd` class provides support for recipes that install
+systemd unit files.
The functionality for this class is disabled unless you have "systemd"
in :term:`DISTRO_FEATURES`.
@@ -2677,27 +3131,27 @@ Services are set up to start on boot automatically
unless you have set
:term:`SYSTEMD_AUTO_ENABLE` to "disable".
-For more information on ``systemd``, see the
-":ref:`dev-manual/common-tasks:selecting an initialization manager`"
+For more information on :ref:`ref-classes-systemd`, see the
+":ref:`dev-manual/init-manager:selecting an initialization manager`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-systemd-boot:
-``systemd-boot.bbclass``
-========================
+``systemd-boot``
+================
-The ``systemd-boot`` class provides functions specific to the
+The :ref:`ref-classes-systemd-boot` class provides functions specific to the
systemd-boot bootloader for building bootable images. This is an
internal class and is not intended to be used directly.
.. note::
- The ``systemd-boot`` class is a result from merging the ``gummiboot`` class
+ The :ref:`ref-classes-systemd-boot` class is a result from merging the ``gummiboot`` class
used in previous Yocto Project releases with the ``systemd`` project.
-Set the :term:`EFI_PROVIDER` variable to
-"systemd-boot" to use this class. Doing so creates a standalone EFI
-bootloader that is not dependent on systemd.
+Set the :term:`EFI_PROVIDER` variable to ":ref:`ref-classes-systemd-boot`" to
+use this class. Doing so creates a standalone EFI bootloader that is not
+dependent on systemd.
For information on more variables used and supported in this class, see
the :term:`SYSTEMD_BOOT_CFG`,
@@ -2710,60 +3164,57 @@ for more information.
.. _ref-classes-terminal:
-``terminal.bbclass``
-====================
+``terminal``
+============
-The ``terminal`` class provides support for starting a terminal session.
-The :term:`OE_TERMINAL` variable controls which
-terminal emulator is used for the session.
+The :ref:`ref-classes-terminal` class provides support for starting a terminal
+session. The :term:`OE_TERMINAL` variable controls which terminal emulator is
+used for the session.
-Other classes use the ``terminal`` class anywhere a separate terminal
-session needs to be started. For example, the
-:ref:`patch <ref-classes-patch>` class assuming
-:term:`PATCHRESOLVE` is set to "user", the
-:ref:`cml1 <ref-classes-cml1>` class, and the
-:ref:`devshell <ref-classes-devshell>` class all use the ``terminal``
-class.
+Other classes use the :ref:`ref-classes-terminal` class anywhere a separate
+terminal session needs to be started. For example, the :ref:`ref-classes-patch`
+class assuming :term:`PATCHRESOLVE` is set to "user", the
+:ref:`ref-classes-cml1` class, and the :ref:`ref-classes-devshell` class all
+use the :ref:`ref-classes-terminal` class.
-.. _ref-classes-testimage*:
+.. _ref-classes-testimage:
-``testimage*.bbclass``
-======================
+``testimage``
+=============
-The ``testimage*`` classes support running automated tests against
+The :ref:`ref-classes-testimage` class supports running automated tests against
images using QEMU and on actual hardware. The classes handle loading the
tests and starting the image. To use the classes, you need to perform
steps to set up the environment.
-.. note::
+To enable this class, add the following to your configuration::
- Best practices include using :term:`IMAGE_CLASSES` rather than
- :term:`INHERIT` to inherit the ``testimage`` class for automated image
- testing.
+ IMAGE_CLASSES += "testimage"
The tests are commands that run on the target system over ``ssh``. Each
test is written in Python and makes use of the ``unittest`` module.
-The ``testimage.bbclass`` runs tests on an image when called using the
+The :ref:`ref-classes-testimage` class runs tests on an image when called using the
following::
$ bitbake -c testimage image
-The ``testimage-auto`` class
-runs tests on an image after the image is constructed (i.e.
-:term:`TESTIMAGE_AUTO` must be set to "1").
+Alternatively, if you wish to have tests automatically run for each image
+after it is built, you can set :term:`TESTIMAGE_AUTO`::
+
+ TESTIMAGE_AUTO = "1"
For information on how to enable, run, and create new tests, see the
-":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
.. _ref-classes-testsdk:
-``testsdk.bbclass``
-===================
+``testsdk``
+===========
This class supports running automated tests against software development
-kits (SDKs). The ``testsdk`` class runs tests on an SDK when called
+kits (SDKs). The :ref:`ref-classes-testsdk` class runs tests on an SDK when called
using the following::
$ bitbake -c testsdk image
@@ -2771,13 +3222,13 @@ using the following::
.. note::
Best practices include using :term:`IMAGE_CLASSES` rather than
- :term:`INHERIT` to inherit the ``testsdk`` class for automated SDK
+ :term:`INHERIT` to inherit the :ref:`ref-classes-testsdk` class for automated SDK
testing.
.. _ref-classes-texinfo:
-``texinfo.bbclass``
-===================
+``texinfo``
+===========
This class should be inherited by recipes whose upstream packages invoke
the ``texinfo`` utilities at build-time. Native and cross recipes are
@@ -2794,10 +3245,10 @@ host system.
.. _ref-classes-toaster:
-``toaster.bbclass``
-===================
+``toaster``
+===========
-The ``toaster`` class collects information about packages and images and
+The :ref:`ref-classes-toaster` class collects information about packages and images and
sends them as events that the BitBake user interface can receive. The
class is enabled when the Toaster user interface is running.
@@ -2805,18 +3256,18 @@ This class is not intended to be used directly.
.. _ref-classes-toolchain-scripts:
-``toolchain-scripts.bbclass``
-=============================
+``toolchain-scripts``
+=====================
-The ``toolchain-scripts`` class provides the scripts used for setting up
+The :ref:`ref-classes-toolchain-scripts` class provides the scripts used for setting up
the environment for installed SDKs.
.. _ref-classes-typecheck:
-``typecheck.bbclass``
-=====================
+``typecheck``
+=============
-The ``typecheck`` class provides support for validating the values of
+The :ref:`ref-classes-typecheck` class provides support for validating the values of
variables set at the configuration level against their defined types.
The OpenEmbedded build system allows you to define the type of a
variable using the "type" varflag. Here is an example::
@@ -2825,14 +3276,14 @@ variable using the "type" varflag. Here is an example::
.. _ref-classes-uboot-config:
-``uboot-config.bbclass``
-========================
+``uboot-config``
+================
-The ``uboot-config`` class provides support for U-Boot configuration for
+The :ref:`ref-classes-uboot-config` class provides support for U-Boot configuration for
a machine. Specify the machine in your recipe as follows::
UBOOT_CONFIG ??= <default>
- UBOOT_CONFIG[foo] = "config,images"
+ UBOOT_CONFIG[foo] = "config,images,binary"
You can also specify the machine using this method::
@@ -2841,10 +3292,48 @@ You can also specify the machine using this method::
See the :term:`UBOOT_CONFIG` and :term:`UBOOT_MACHINE` variables for additional
information.
+.. _ref-classes-uboot-sign:
+
+``uboot-sign``
+==============
+
+The :ref:`ref-classes-uboot-sign` class provides support for U-Boot verified boot.
+It is intended to be inherited from U-Boot recipes.
+
+The variables used by this class are:
+
+- :term:`SPL_MKIMAGE_DTCOPTS`: DTC options for U-Boot ``mkimage`` when
+ building the FIT image.
+- :term:`SPL_SIGN_ENABLE`: enable signing the FIT image.
+- :term:`SPL_SIGN_KEYDIR`: directory containing the signing keys.
+- :term:`SPL_SIGN_KEYNAME`: base filename of the signing keys.
+- :term:`UBOOT_FIT_ADDRESS_CELLS`: ``#address-cells`` value for the FIT image.
+- :term:`UBOOT_FIT_DESC`: description string encoded into the FIT image.
+- :term:`UBOOT_FIT_GENERATE_KEYS`: generate the keys if they don't exist yet.
+- :term:`UBOOT_FIT_HASH_ALG`: hash algorithm for the FIT image.
+- :term:`UBOOT_FIT_KEY_GENRSA_ARGS`: ``openssl genrsa`` arguments.
+- :term:`UBOOT_FIT_KEY_REQ_ARGS`: ``openssl req`` arguments.
+- :term:`UBOOT_FIT_SIGN_ALG`: signature algorithm for the FIT image.
+- :term:`UBOOT_FIT_SIGN_NUMBITS`: size of the private key for FIT image
+ signing.
+- :term:`UBOOT_FIT_KEY_SIGN_PKCS`: algorithm for the public key certificate
+ for FIT image signing.
+- :term:`UBOOT_FITIMAGE_ENABLE`: enable the generation of a U-Boot FIT image.
+- :term:`UBOOT_MKIMAGE_DTCOPTS`: DTC options for U-Boot ``mkimage`` when
+ rebuilding the FIT image containing the kernel.
+
+See U-Boot's documentation for details about `verified boot
+<https://source.denx.de/u-boot/u-boot/-/blob/master/doc/uImage.FIT/verified-boot.txt>`__
+and the `signature process
+<https://source.denx.de/u-boot/u-boot/-/blob/master/doc/uImage.FIT/signature.txt>`__.
+
+See also the description of :ref:`ref-classes-kernel-fitimage` class, which this class
+imitates.
+
.. _ref-classes-uninative:
-``uninative.bbclass``
-=====================
+``uninative``
+=============
Attempts to isolate the build system from the host distribution's C
library in order to make re-use of native shared state artifacts across
@@ -2859,21 +3348,21 @@ yourself, publish the resulting tarball (e.g. via HTTP) and set
``UNINATIVE_URL`` and ``UNINATIVE_CHECKSUM`` appropriately. For an
example, see the ``meta/conf/distro/include/yocto-uninative.inc``.
-The ``uninative`` class is also used unconditionally by the extensible
+The :ref:`ref-classes-uninative` class is also used unconditionally by the extensible
SDK. When building the extensible SDK, ``uninative-tarball`` is built
and the resulting tarball is included within the SDK.
.. _ref-classes-update-alternatives:
-``update-alternatives.bbclass``
-===============================
+``update-alternatives``
+=======================
-The ``update-alternatives`` class helps the alternatives system when
+The :ref:`ref-classes-update-alternatives` class helps the alternatives system when
multiple sources provide the same command. This situation occurs when
several programs that have the same or similar function are installed
with the same name. For example, the ``ar`` command is available from
the ``busybox``, ``binutils`` and ``elfutils`` packages. The
-``update-alternatives`` class handles renaming the binaries so that
+:ref:`ref-classes-update-alternatives` class handles renaming the binaries so that
multiple packages can be installed without conflicts. The ``ar`` command
still works regardless of which packages are installed or subsequently
removed. The class renames the conflicting binary in each package and
@@ -2893,7 +3382,7 @@ To use this class, you need to define a number of variables:
These variables list alternative commands needed by a package, provide
pathnames for links, default links for targets, and so forth. For
details on how to use this class, see the comments in the
-:yocto_git:`update-alternatives.bbclass </poky/tree/meta/classes/update-alternatives.bbclass>`
+:yocto_git:`update-alternatives.bbclass </poky/tree/meta/classes-recipe/update-alternatives.bbclass>`
file.
.. note::
@@ -2903,10 +3392,10 @@ file.
.. _ref-classes-update-rc.d:
-``update-rc.d.bbclass``
-=======================
+``update-rc.d``
+===============
-The ``update-rc.d`` class uses ``update-rc.d`` to safely install an
+The :ref:`ref-classes-update-rc.d` class uses ``update-rc.d`` to safely install an
initialization script on behalf of the package. The OpenEmbedded build
system takes care of details such as making sure the script is stopped
before a package is removed and started when the package is installed.
@@ -2917,10 +3406,10 @@ for details.
.. _ref-classes-useradd:
-``useradd*.bbclass``
-====================
+``useradd*``
+============
-The ``useradd*`` classes support the addition of users or groups for
+The :ref:`useradd* <ref-classes-useradd>` classes support the addition of users or groups for
usage by the package on the target. For example, if you have packages
that contain system services that should be run under their own user or
group, you can use these classes to enable creation of the user or
@@ -2929,16 +3418,16 @@ group. The :oe_git:`meta-skeleton/recipes-skeleton/useradd/useradd-example.bb
recipe in the :term:`Source Directory` provides a simple
example that shows how to add three users and groups to two packages.
-The ``useradd_base`` class provides basic functionality for user or
+The :ref:`useradd_base <ref-classes-useradd>` class provides basic functionality for user or
groups settings.
-The ``useradd*`` classes support the
+The :ref:`useradd* <ref-classes-useradd>` classes support the
:term:`USERADD_PACKAGES`,
:term:`USERADD_PARAM`,
:term:`GROUPADD_PARAM`, and
:term:`GROUPMEMS_PARAM` variables.
-The ``useradd-staticids`` class supports the addition of users or groups
+The :ref:`useradd-staticids <ref-classes-useradd>` class supports the addition of users or groups
that have static user identification (``uid``) and group identification
(``gid``) values.
@@ -2954,17 +3443,15 @@ set static values, the OpenEmbedded build system looks in
:term:`BBPATH` for ``files/passwd`` and ``files/group``
files for the values.
-To use static ``uid`` and ``gid`` values, you need to set some
-variables. See the :term:`USERADDEXTENSION`,
-:term:`USERADD_UID_TABLES`,
-:term:`USERADD_GID_TABLES`, and
-:term:`USERADD_ERROR_DYNAMIC` variables.
-You can also see the :ref:`useradd <ref-classes-useradd>` class for
-additional information.
+To use static ``uid`` and ``gid`` values, you need to set some variables. See
+the :term:`USERADDEXTENSION`, :term:`USERADD_UID_TABLES`,
+:term:`USERADD_GID_TABLES`, and :term:`USERADD_ERROR_DYNAMIC` variables.
+You can also see the :ref:`ref-classes-useradd` class for additional
+information.
.. note::
- You do not use the ``useradd-staticids`` class directly. You either enable
+ You do not use the :ref:`useradd-staticids <ref-classes-useradd>` class directly. You either enable
or disable the class by setting the :term:`USERADDEXTENSION` variable. If you
enable or disable the class in a configured system, :term:`TMPDIR` might
contain incorrect ``uid`` and ``gid`` values. Deleting the :term:`TMPDIR`
@@ -2972,43 +3459,42 @@ additional information.
.. _ref-classes-utility-tasks:
-``utility-tasks.bbclass``
-=========================
+``utility-tasks``
+=================
-The ``utility-tasks`` class provides support for various "utility" type
-tasks that are applicable to all recipes, such as
-:ref:`ref-tasks-clean` and
-:ref:`ref-tasks-listtasks`.
+The :ref:`ref-classes-utility-tasks` class provides support for various
+"utility" type tasks that are applicable to all recipes, such as
+:ref:`ref-tasks-clean` and :ref:`ref-tasks-listtasks`.
This class is enabled by default because it is inherited by the
-:ref:`base <ref-classes-base>` class.
+:ref:`ref-classes-base` class.
.. _ref-classes-utils:
-``utils.bbclass``
-=================
+``utils``
+=========
-The ``utils`` class provides some useful Python functions that are
+The :ref:`ref-classes-utils` class provides some useful Python functions that are
typically used in inline Python expressions (e.g. ``${@...}``). One
example use is for ``bb.utils.contains()``.
This class is enabled by default because it is inherited by the
-:ref:`base <ref-classes-base>` class.
+:ref:`ref-classes-base` class.
.. _ref-classes-vala:
-``vala.bbclass``
-================
+``vala``
+========
-The ``vala`` class supports recipes that need to build software written
+The :ref:`ref-classes-vala` class supports recipes that need to build software written
using the Vala programming language.
.. _ref-classes-waf:
-``waf.bbclass``
-===============
+``waf``
+=======
-The ``waf`` class supports recipes that need to build software that uses
+The :ref:`ref-classes-waf` class supports recipes that need to build software that uses
the Waf build system. You can use the
:term:`EXTRA_OECONF` or
:term:`PACKAGECONFIG_CONFARGS` variables
diff --git a/documentation/ref-manual/devtool-reference.rst b/documentation/ref-manual/devtool-reference.rst
index 10ca70a2b3..9319addc3c 100644
--- a/documentation/ref-manual/devtool-reference.rst
+++ b/documentation/ref-manual/devtool-reference.rst
@@ -164,7 +164,7 @@ Adding a New Recipe to the Workspace Layer
==========================================
Use the ``devtool add`` command to add a new recipe to the workspace
-layer. The recipe you add should not exist - ``devtool`` creates it for
+layer. The recipe you add should not exist --- ``devtool`` creates it for
you. The source files the recipe uses should exist in an external area.
The following example creates and adds a new recipe named ``jackson`` to
@@ -353,7 +353,7 @@ variables in package recipes.
:yocto_git:`maintainers.inc </poky/tree/meta/conf/distro/include/maintainers.inc>`
file.
- - If the recipe is using the :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-fetching:git fetcher (\`\`git://\`\`)`
+ - If the recipe is using the :ref:`bitbake-user-manual/bitbake-user-manual-fetching:git fetcher (\`\`git://\`\`)`
rather than a tarball, the commit hash points to the commit that matches
the recipe's latest version tag, or in the absence of suitable tags,
to the latest commit (when :term:`UPSTREAM_CHECK_COMMITS` set to ``1``
@@ -378,16 +378,7 @@ command::
Unless you provide a specific recipe name on the command line, the
command checks all recipes in all configured layers.
-Following is a partial example table that reports on all the recipes.
-Notice the reported reason for not upgrading the ``base-passwd`` recipe.
-In this example, while a new version is available upstream, you do not
-want to use it because the dependency on ``cdebconf`` is not easily
-satisfied. Maintainers can explicit the reason that is shown by adding
-the :term:`RECIPE_NO_UPDATE_REASON` variable to the corresponding recipe.
-See :yocto_git:`base-passwd.bb </poky/tree/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb>`
-for an example.
-
-::
+Here is a partial example table that reports on all the recipes::
$ devtool check-upgrade-status
...
@@ -398,6 +389,16 @@ for an example.
INFO: base-passwd 3.5.29 3.5.51 Anuj Mittal <anuj.mittal@intel.com> cannot be updated due to: Version 3.5.38 requires cdebconf for update-passwd utility
...
+Notice the reported reason for not upgrading the ``base-passwd`` recipe.
+In this example, while a new version is available upstream, you do not
+want to use it because the dependency on ``cdebconf`` is not easily
+satisfied. Maintainers can explicit the reason that is shown by adding
+the :term:`RECIPE_NO_UPDATE_REASON` variable to the corresponding recipe.
+See :yocto_git:`base-passwd.bb </poky/tree/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb?h=kirkstone>`
+for an example::
+
+ RECIPE_NO_UPDATE_REASON = "Version 3.5.38 requires cdebconf for update-passwd utility"
+
Last but not least, you may set :term:`UPSTREAM_VERSION_UNKNOWN` to ``1``
in a recipe when there's currently no way to determine its latest upstream
version.
@@ -410,7 +411,7 @@ Upgrading a Recipe
As software matures, upstream recipes are upgraded to newer versions. As
a developer, you need to keep your local recipes up-to-date with the
upstream version releases. There are several ways of upgrading recipes.
-You can read about them in the ":ref:`dev-manual/common-tasks:upgrading recipes`"
+You can read about them in the ":ref:`dev-manual/upgrading-recipes:upgrading recipes`"
section of the Yocto Project Development Tasks Manual. This section
overviews the ``devtool upgrade`` command.
@@ -438,7 +439,7 @@ You can read more on the ``devtool upgrade`` workflow in the
":ref:`sdk-manual/extensible:use \`\`devtool upgrade\`\` to create a version of the recipe that supports a newer version of the software`"
section in the Yocto Project Application Development and the Extensible
Software Development Kit (eSDK) manual. You can also see an example of
-how to use ``devtool upgrade`` in the ":ref:`dev-manual/common-tasks:using \`\`devtool upgrade\`\``"
+how to use ``devtool upgrade`` in the ":ref:`dev-manual/upgrading-recipes:using \`\`devtool upgrade\`\``"
section in the Yocto Project Development Tasks Manual.
.. _devtool-resetting-a-recipe:
@@ -555,8 +556,7 @@ Use the ``devtool undeploy-target`` command to remove deployed build
output from the target machine. For the ``devtool undeploy-target``
command to work, you must have previously used the
":ref:`devtool deploy-target <ref-manual/devtool-reference:deploying your software on the target machine>`"
-command.
-::
+command::
$ devtool undeploy-target recipe target
@@ -598,7 +598,7 @@ The ``devtool status`` command has no command-line options::
$ devtool status
-Following is sample output after using
+Here is sample output after using
:ref:`devtool add <ref-manual/devtool-reference:adding a new recipe to the workspace layer>`
to create and add the ``mtr_0.86.bb`` recipe to the ``workspace`` directory::
diff --git a/documentation/ref-manual/faq.rst b/documentation/ref-manual/faq.rst
index e06b5e6caa..bab284bbfd 100644
--- a/documentation/ref-manual/faq.rst
+++ b/documentation/ref-manual/faq.rst
@@ -4,9 +4,15 @@
FAQ
***
-**Q:** How does Poky differ from :oe_home:`OpenEmbedded <>`?
+.. contents::
-**A:** The term ``Poky`` refers to the specific reference build
+General questions
+=================
+
+How does Poky differ from OpenEmbedded?
+---------------------------------------
+
+The term ``Poky`` refers to the specific reference build
system that the Yocto Project provides. Poky is based on
:term:`OpenEmbedded-Core (OE-Core)` and :term:`BitBake`. Thus, the
generic term used here for the build system is the "OpenEmbedded build
@@ -15,19 +21,10 @@ OpenEmbedded, with changes always being merged to OE-Core or BitBake
first before being pulled back into Poky. This practice benefits both
projects immediately.
-**Q:** My development system does not meet the required Git, tar, and
-Python versions. In particular, I do not have Python &MIN_PYTHON_VERSION; or greater.
-Can I still use the Yocto Project?
-
-**A:** You can get the required tools on your host development system a
-couple different ways (i.e. building a tarball or downloading a
-tarball). See the
-":ref:`ref-manual/system-requirements:required git, tar, python and gcc versions`"
-section for steps on how to update your build tools.
-
-**Q:** How can you claim Poky / OpenEmbedded-Core is stable?
+How can you claim Poky / OpenEmbedded-Core is stable?
+-----------------------------------------------------
-**A:** There are three areas that help with stability;
+There are three areas that help with stability;
- The Yocto Project team keeps :term:`OpenEmbedded-Core (OE-Core)` small and
focused, containing around 830 recipes as opposed to the thousands
@@ -37,93 +34,108 @@ section for steps on how to update your build tools.
- The Yocto Project team runs manual and automated tests using a small,
fixed set of reference hardware as well as emulated targets.
-- The Yocto Project uses an autobuilder, which provides continuous
- build and integration tests.
+- The Yocto Project uses an :yocto_ab:`autobuilder <>`, which provides
+ continuous build and integration tests.
-**Q:** How do I get support for my board added to the Yocto Project?
+Are there any products built using the OpenEmbedded build system?
+-----------------------------------------------------------------
-**A:** Support for an additional board is added by creating a Board
-Support Package (BSP) layer for it. For more information on how to
-create a BSP layer, see the
-":ref:`dev-manual/common-tasks:understanding and creating layers`"
-section in the Yocto Project Development Tasks Manual and the
-:doc:`/bsp-guide/index`.
+See :yocto_wiki:`Products that use the Yocto Project
+</Project_Users#Products_that_use_the_Yocto_Project>` in the Yocto Project
+Wiki. Don't hesitate to contribute to this page if you know other such
+products.
-Usually, if the board is not completely exotic, adding support in the
-Yocto Project is fairly straightforward.
+Building environment
+====================
-**Q:** Are there any products built using the OpenEmbedded build system?
+Missing dependencies on the development system?
+-----------------------------------------------
-**A:** The software running on the `Vernier
-LabQuest <https://vernier.com/labquest/>`__ is built using the
-OpenEmbedded build system. See the `Vernier
-LabQuest <https://www.vernier.com/products/interfaces/labq/>`__ website
-for more information. There are a number of pre-production devices using
-the OpenEmbedded build system and the Yocto Project team announces them
-as soon as they are released.
+If your development system does not meet the required Git, tar, and
+Python versions, you can get the required tools on your host development
+system in different ways (i.e. building a tarball or downloading a
+tarball). See the ":ref:`ref-manual/system-requirements:required git, tar, python, make and gcc versions`"
+section for steps on how to update your build tools.
-**Q:** What does the OpenEmbedded build system produce as output?
+How does OpenEmbedded fetch source code? Will it work through a firewall or proxy server?
+-----------------------------------------------------------------------------------------
-**A:** Because you can use the same set of recipes to create output of
-various formats, the output of an OpenEmbedded build depends on how you
-start it. Usually, the output is a flashable image ready for the target
-device.
+The way the build system obtains source code is highly
+configurable. You can setup the build system to get source code in most
+environments if HTTP transport is available.
-**Q:** How do I add my package to the Yocto Project?
+When the build system searches for source code, it first tries the local
+download directory. If that location fails, Poky tries
+:term:`PREMIRRORS`, the upstream source, and then
+:term:`MIRRORS` in that order.
-**A:** To add a package, you need to create a BitBake recipe. For
-information on how to create a BitBake recipe, see the
-":ref:`dev-manual/common-tasks:writing a new recipe`"
-section in the Yocto Project Development Tasks Manual.
+Assuming your distribution is "poky", the OpenEmbedded build system uses
+the Yocto Project source :term:`PREMIRRORS` by default for SCM-based
+sources, upstreams for normal tarballs, and then falls back to a number
+of other mirrors including the Yocto Project source mirror if those
+fail.
-**Q:** Do I have to reflash my entire board with a new Yocto Project
-image when recompiling a package?
+As an example, you could add a specific server for the build system to
+attempt before any others by adding something like the following to the
+``local.conf`` configuration file::
-**A:** The OpenEmbedded build system can build packages in various
-formats such as IPK for OPKG, Debian package (``.deb``), or RPM. You can
-then upgrade the packages using the package tools on the device, much
-like on a desktop distribution such as Ubuntu or Fedora. However,
-package management on the target is entirely optional.
+ PREMIRRORS:prepend = "\
+ git://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ ftp://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ http://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ https://.*/.* &YOCTO_DL_URL;/mirror/sources/"
-**Q:** I see the error
-'``chmod: XXXXX new permissions are r-xrwxrwx, not r-xr-xr-x``'. What is
-wrong?
+These changes cause the build system to intercept Git, FTP, HTTP, and
+HTTPS requests and direct them to the ``http://`` sources mirror. You
+can use ``file://`` URLs to point to local directories or network shares
+as well.
-**A:** You are probably running the build on an NTFS filesystem. Use
-``ext2``, ``ext3``, or ``ext4`` instead.
+Another option is to set::
-**Q:** I see lots of 404 responses for files when the OpenEmbedded build
-system is trying to download sources. Is something wrong?
+ BB_NO_NETWORK = "1"
-**A:** Nothing is wrong. The OpenEmbedded build system checks any
-configured source mirrors before downloading from the upstream sources.
-The build system does this searching for both source archives and
-pre-checked out versions of SCM-managed software. These checks help in
-large installations because it can reduce load on the SCM servers
-themselves. The address above is one of the default mirrors configured
-into the build system. Consequently, if an upstream source disappears,
-the team can place sources there so builds continue to work.
+This statement tells BitBake to issue an error
+instead of trying to access the Internet. This technique is useful if
+you want to ensure code builds only from local sources.
-**Q:** I have machine-specific data in a package for one machine only
-but the package is being marked as machine-specific in all cases, how do
-I prevent this?
+Here is another technique::
+
+ BB_FETCH_PREMIRRORONLY = "1"
-**A:** Set :term:`SRC_URI_OVERRIDES_PACKAGE_ARCH` = "0" in the ``.bb`` file
-but make sure the package is manually marked as machine-specific for the
-case that needs it. The code that handles
-:term:`SRC_URI_OVERRIDES_PACKAGE_ARCH` is in the
-``meta/classes/base.bbclass`` file.
+This statement limits the build system to pulling source from the
+:term:`PREMIRRORS` only. Again, this technique is useful for reproducing
+builds.
-**Q:** I'm behind a firewall and need to use a proxy server. How do I do
-that?
+Here is yet another technique::
-**A:** Most source fetching by the OpenEmbedded build system is done by
+ BB_GENERATE_MIRROR_TARBALLS = "1"
+
+This statement tells the build system to generate mirror tarballs. This
+technique is useful if you want to create a mirror server. If not,
+however, the technique can simply waste time during the build.
+
+Finally, consider an example where you are behind an HTTP-only firewall.
+You could make the following changes to the ``local.conf`` configuration
+file as long as the :term:`PREMIRRORS` server is current::
+
+ PREMIRRORS:prepend = "\
+ git://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ ftp://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ http://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
+ https://.*/.* &YOCTO_DL_URL;/mirror/sources/"
+ BB_FETCH_PREMIRRORONLY = "1"
+
+These changes would cause the build system to successfully fetch source
+over HTTP and any network accesses to anything other than the
+:term:`PREMIRRORS` would fail.
+
+Most source fetching by the OpenEmbedded build system is done by
``wget`` and you therefore need to specify the proxy settings in a
``.wgetrc`` file, which can be in your home directory if you are a
single user or can be in ``/usr/local/etc/wgetrc`` as a global user
file.
-Following is the applicable code for setting various proxy types in the
+Here is the applicable code for setting various proxy types in the
``.wgetrc`` file. By default, these settings are disabled with comments.
To use them, remove the comments::
@@ -136,24 +148,53 @@ To use them, remove the comments::
# If you do not want to use proxy at all, set this to off.
#use_proxy = on
+The build system also accepts ``http_proxy``, ``ftp_proxy``, ``https_proxy``,
+and ``all_proxy`` set as to standard shell environment variables to redirect
+requests through proxy servers.
+
The Yocto Project also includes a
-``meta-poky/conf/site.conf.sample`` file that shows how to configure CVS
-and Git proxy servers if needed. For more information on setting up
-various proxy types and configuring proxy servers, see the
-":yocto_wiki:`Working Behind a Network Proxy </Working_Behind_a_Network_Proxy>`"
-Wiki page.
+``meta-poky/conf/templates/default/site.conf.sample`` file that shows
+how to configure CVS and Git proxy servers if needed.
+
+.. note::
-**Q:** What's the difference between ``target`` and ``target-native``?
+ You can find more information on the
+ ":yocto_wiki:`Working Behind a Network Proxy </Working_Behind_a_Network_Proxy>`"
+ Wiki page.
-**A:** The ``*-native`` targets are designed to run on the system being
-used for the build. These are usually tools that are needed to assist
-the build in some way such as ``quilt-native``, which is used to apply
-patches. The non-native version is the one that runs on the target
-device.
+Using the OpenEmbedded Build system
+===================================
+
+How do I use an external toolchain?
+-----------------------------------
+
+See the ":ref:`dev-manual/external-toolchain:optionally using an external toolchain`"
+section in the Development Task manual.
-**Q:** I'm seeing random build failures. Help?!
+Why do I get chmod permission issues?
+-------------------------------------
-**A:** If the same build is failing in totally different and random
+If you see the error
+``chmod: XXXXX new permissions are r-xrwxrwx, not r-xr-xr-x``,
+you are probably running the build on an NTFS filesystem. Instead,
+run the build system on a partition with a modern Linux filesystem such as
+``ext4``, ``btrfs`` or ``xfs``.
+
+I see many 404 errors trying to download sources. Is anything wrong?
+--------------------------------------------------------------------
+
+Nothing is wrong. The OpenEmbedded build system checks any
+configured source mirrors before downloading from the upstream sources.
+The build system does this searching for both source archives and
+pre-checked out versions of SCM-managed software. These checks help in
+large installations because it can reduce load on the SCM servers
+themselves. This can also allow builds to continue to work if an
+upstream source disappears.
+
+Why do I get random build failures?
+-----------------------------------
+
+If the same build is failing in totally different and random
ways, the most likely explanation is:
- The hardware you are running the build on has some problem.
@@ -166,219 +207,139 @@ causes lots of network, disk and CPU activity and is sensitive to even
single-bit failures in any of these areas. True random failures have
always been traced back to hardware or virtualization issues.
-**Q:** When I try to build a native recipe, the build fails with
-``iconv.h`` problems.
+Why does the build fail with ``iconv.h`` problems?
+--------------------------------------------------
-**A:** If you get an error message that indicates GNU ``libiconv`` is
-not in use but ``iconv.h`` has been included from ``libiconv``, you need
-to check to see if you have a previously installed version of the header
-file in ``/usr/local/include``.
-::
+When you try to build a native recipe, you may get an error message that
+indicates that GNU ``libiconv`` is not in use but ``iconv.h`` has been
+included from ``libiconv``::
#error GNU libiconv not in use but included iconv.h is from libiconv
-If you find a previously installed
-file, you should either uninstall it or temporarily rename it and try
-the build again.
+When this happens, you need to check whether you have a previously
+installed version of the header file in ``/usr/local/include/``.
+If that's the case, you should either uninstall it or temporarily rename
+it and try the build again.
This issue is just a single manifestation of "system leakage" issues
caused when the OpenEmbedded build system finds and uses previously
installed files during a native build. This type of issue might not be
-limited to ``iconv.h``. Be sure that leakage cannot occur from
+limited to ``iconv.h``. Make sure that leakage cannot occur from
``/usr/local/include`` and ``/opt`` locations.
-**Q:** What do we need to ship for license compliance?
-
-**A:** This is a difficult question and you need to consult your lawyer
-for the answer for your specific case. It is worth bearing in mind that
-for GPL compliance, there needs to be enough information shipped to
-allow someone else to rebuild and produce the same end result you are
-shipping. This means sharing the source code, any patches applied to it,
-and also any configuration information about how that package was
-configured and built.
-
-You can find more information on licensing in the
-":ref:`overview-manual/development-environment:licensing`"
-section in the Yocto
-Project Overview and Concepts Manual and also in the
-":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
-section in the Yocto Project Development Tasks Manual.
-
-**Q:** How do I disable the cursor on my touchscreen device?
-
-**A:** You need to create a form factor file as described in the
-":ref:`bsp-guide/bsp:miscellaneous bsp-specific recipe files`" section in
-the Yocto Project Board Support Packages (BSP) Developer's Guide. Set
-the ``HAVE_TOUCHSCREEN`` variable equal to one as follows::
-
- HAVE_TOUCHSCREEN=1
-
-**Q:** How do I make sure connected network interfaces are brought up by
-default?
-
-**A:** The default interfaces file provided by the netbase recipe does
-not automatically bring up network interfaces. Therefore, you will need
-to add a BSP-specific netbase that includes an interfaces file. See the
-":ref:`bsp-guide/bsp:miscellaneous bsp-specific recipe files`" section in
-the Yocto Project Board Support Packages (BSP) Developer's Guide for
-information on creating these types of miscellaneous recipe files.
-
-For example, add the following files to your layer::
-
- meta-MACHINE/recipes-bsp/netbase/netbase/MACHINE/interfaces
- meta-MACHINE/recipes-bsp/netbase/netbase_5.0.bbappend
-
-**Q:** How do I create images with more free space?
-
-**A:** By default, the OpenEmbedded build system creates images that are
-1.3 times the size of the populated root filesystem. To affect the image
-size, you need to set various configurations:
-
-- *Image Size:* The OpenEmbedded build system uses the
- :term:`IMAGE_ROOTFS_SIZE` variable to define
- the size of the image in Kbytes. The build system determines the size
- by taking into account the initial root filesystem size before any
- modifications such as requested size for the image and any requested
- additional free disk space to be added to the image.
-
-- *Overhead:* Use the
- :term:`IMAGE_OVERHEAD_FACTOR` variable
- to define the multiplier that the build system applies to the initial
- image size, which is 1.3 by default.
-
-- *Additional Free Space:* Use the
- :term:`IMAGE_ROOTFS_EXTRA_SPACE`
- variable to add additional free space to the image. The build system
- adds this space to the image after it determines its
- :term:`IMAGE_ROOTFS_SIZE`.
-
-**Q:** Why don't you support directories with spaces in the pathnames?
-
-**A:** The Yocto Project team has tried to do this before but too many
-of the tools the OpenEmbedded build system depends on, such as
-``autoconf``, break when they find spaces in pathnames. Until that
-situation changes, the team will not support spaces in pathnames.
+Why don't other recipes find the files provided by my ``*-native`` recipe?
+--------------------------------------------------------------------------
-**Q:** How do I use an external toolchain?
+Files provided by your native recipe could be missing from the native
+sysroot, your recipe could also be installing to the wrong place, or you
+could be getting permission errors during the :ref:`ref-tasks-install`
+task in your recipe.
-**A:** The toolchain configuration is very flexible and customizable. It
-is primarily controlled with the :term:`TCMODE` variable. This variable
-controls which ``tcmode-*.inc`` file to include from the
-``meta/conf/distro/include`` directory within the :term:`Source Directory`.
-
-The default value of :term:`TCMODE` is "default", which tells the
-OpenEmbedded build system to use its internally built toolchain (i.e.
-``tcmode-default.inc``). However, other patterns are accepted. In
-particular, "external-\*" refers to external toolchains. One example is
-the Sourcery G++ Toolchain. The support for this toolchain resides in
-the separate ``meta-sourcery`` layer at
-https://github.com/MentorEmbedded/meta-sourcery/.
-
-In addition to the toolchain configuration, you also need a
-corresponding toolchain recipe file. This recipe file needs to package
-up any pre-built objects in the toolchain such as ``libgcc``,
-``libstdcc++``, any locales, and ``libc``.
-
-**Q:** How does the OpenEmbedded build system obtain source code and
-will it work behind my firewall or proxy server?
-
-**A:** The way the build system obtains source code is highly
-configurable. You can setup the build system to get source code in most
-environments if HTTP transport is available.
-
-When the build system searches for source code, it first tries the local
-download directory. If that location fails, Poky tries
-:term:`PREMIRRORS`, the upstream source, and then
-:term:`MIRRORS` in that order.
-
-Assuming your distribution is "poky", the OpenEmbedded build system uses
-the Yocto Project source :term:`PREMIRRORS` by default for SCM-based
-sources, upstreams for normal tarballs, and then falls back to a number
-of other mirrors including the Yocto Project source mirror if those
-fail.
-
-As an example, you could add a specific server for the build system to
-attempt before any others by adding something like the following to the
-``local.conf`` configuration file::
+This situation happens when the build system used by a package does not
+recognize the environment variables supplied to it by :term:`BitBake`. The
+incident that prompted this FAQ entry involved a Makefile that used an
+environment variable named ``BINDIR`` instead of the more standard
+variable ``bindir``. The makefile's hardcoded default value of
+"/usr/bin" worked most of the time, but not for the recipe's ``-native``
+variant. For another example, permission errors might be caused by a
+Makefile that ignores ``DESTDIR`` or uses a different name for that
+environment variable. Check the build system of the package to see if
+these kinds of issues exist.
- PREMIRRORS:prepend = "\
- git://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- ftp://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- http://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- https://.*/.* &YOCTO_DL_URL;/mirror/sources/"
+Can I get rid of build output so I can start over?
+--------------------------------------------------
-These changes cause the build system to intercept Git, FTP, HTTP, and
-HTTPS requests and direct them to the ``http://`` sources mirror. You
-can use ``file://`` URLs to point to local directories or network shares
-as well.
+Yes --- you can easily do this. When you use BitBake to build an
+image, all the build output goes into the directory created when you run
+the build environment setup script (i.e. :ref:`structure-core-script`).
+By default, this :term:`Build Directory` is named ``build`` but can be named
+anything you want.
-Here are other options::
+Within the :term:`Build Directory`, is the ``tmp`` directory. To remove all the
+build output yet preserve any source code or downloaded files from
+previous builds, simply remove the ``tmp`` directory.
- BB_NO_NETWORK = "1"
+Customizing generated images
+============================
-This statement tells BitBake to issue an error
-instead of trying to access the Internet. This technique is useful if
-you want to ensure code builds only from local sources.
+What does the OpenEmbedded build system produce as output?
+----------------------------------------------------------
-Here is another technique::
+Because you can use the same set of recipes to create output of
+various formats, the output of an OpenEmbedded build depends on how you
+start it. Usually, the output is a flashable image ready for the target
+device.
- BB_FETCH_PREMIRRORONLY = "1"
+How do I make the Yocto Project support my board?
+-------------------------------------------------
-This statement
-limits the build system to pulling source from the :term:`PREMIRRORS` only.
-Again, this technique is useful for reproducing builds.
+Support for an additional board is added by creating a Board
+Support Package (BSP) layer for it. For more information on how to
+create a BSP layer, see the
+":ref:`dev-manual/layers:understanding and creating layers`"
+section in the Yocto Project Development Tasks Manual and the
+:doc:`/bsp-guide/index`.
-Here is another technique::
+Usually, if the board is not completely exotic, adding support in the
+Yocto Project is fairly straightforward.
- BB_GENERATE_MIRROR_TARBALLS = "1"
+How do I make the Yocto Project support my package?
+---------------------------------------------------
-This
-statement tells the build system to generate mirror tarballs. This
-technique is useful if you want to create a mirror server. If not,
-however, the technique can simply waste time during the build.
+To add a package, you need to create a BitBake recipe. For
+information on how to create a BitBake recipe, see the
+":ref:`dev-manual/new-recipe:writing a new recipe`"
+section in the Yocto Project Development Tasks Manual.
-Finally, consider an example where you are behind an HTTP-only firewall.
-You could make the following changes to the ``local.conf`` configuration
-file as long as the :term:`PREMIRRORS` server is current::
+What do I need to ship for license compliance?
+----------------------------------------------
- PREMIRRORS:prepend = "\
- git://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- ftp://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- http://.*/.* &YOCTO_DL_URL;/mirror/sources/ \
- https://.*/.* &YOCTO_DL_URL;/mirror/sources/"
- BB_FETCH_PREMIRRORONLY = "1"
+This is a difficult question and you need to consult your lawyer
+for the answer for your specific case. It is worth bearing in mind that
+for GPL compliance, there needs to be enough information shipped to
+allow someone else to rebuild and produce the same end result you are
+shipping. This means sharing the source code, any patches applied to it,
+and also any configuration information about how that package was
+configured and built.
-These changes would cause the build system to successfully fetch source
-over HTTP and any network accesses to anything other than the
-:term:`PREMIRRORS` would fail.
+You can find more information on licensing in the
+":ref:`overview-manual/development-environment:licensing`"
+section in the Yocto Project Overview and Concepts Manual and also in the
+":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
+section in the Yocto Project Development Tasks Manual.
-The build system also honors the standard shell environment variables
-``http_proxy``, ``ftp_proxy``, ``https_proxy``, and ``all_proxy`` to
-redirect requests through proxy servers.
+Do I have to make a full reflash after recompiling one package?
+---------------------------------------------------------------
-.. note::
+The OpenEmbedded build system can build packages in various
+formats such as IPK for OPKG, Debian package (``.deb``), or RPM. You can
+then upgrade only the modified packages using the package tools on the device,
+much like on a desktop distribution such as Ubuntu or Fedora. However,
+package management on the target is entirely optional.
- You can find more information on the
- ":yocto_wiki:`Working Behind a Network Proxy </Working_Behind_a_Network_Proxy>`"
- Wiki page.
+How to prevent my package from being marked as machine specific?
+----------------------------------------------------------------
-**Q:** Can I get rid of build output so I can start over?
+If you have machine-specific data in a package for one machine only
+but the package is being marked as machine-specific in all cases,
+you can set :term:`SRC_URI_OVERRIDES_PACKAGE_ARCH` = "0" in the ``.bb`` file.
+However, but make sure the package is manually marked as machine-specific for the
+case that needs it. The code that handles :term:`SRC_URI_OVERRIDES_PACKAGE_ARCH`
+is in the ``meta/classes-global/base.bbclass`` file.
-**A:** Yes - you can easily do this. When you use BitBake to build an
-image, all the build output goes into the directory created when you run
-the build environment setup script (i.e.
-:ref:`structure-core-script`). By default, this :term:`Build Directory`
-is named ``build`` but can be named
-anything you want.
+What's the difference between ``target`` and ``target-native``?
+---------------------------------------------------------------
-Within the Build Directory, is the ``tmp`` directory. To remove all the
-build output yet preserve any source code or downloaded files from
-previous builds, simply remove the ``tmp`` directory.
+The ``*-native`` targets are designed to run on the system being
+used for the build. These are usually tools that are needed to assist
+the build in some way such as ``quilt-native``, which is used to apply
+patches. The non-native version is the one that runs on the target
+device.
-**Q:** Why do ``${bindir}`` and ``${libdir}`` have strange values for
-``-native`` recipes?
+Why do ``${bindir}`` and ``${libdir}`` have strange values for ``-native`` recipes?
+-----------------------------------------------------------------------------------
-**A:** Executables and libraries might need to be used from a directory
+Executables and libraries might need to be used from a directory
other than the directory into which they were initially installed.
Complicating this situation is the fact that sometimes these executables
and libraries are compiled with the expectation of being run from that
@@ -405,20 +366,13 @@ system of that image. Thus, the build system provides a value of
forth.
Meanwhile, ``DESTDIR`` is a path within the :term:`Build Directory`.
-However, when the recipe builds a
-native program (i.e. one that is intended to run on the build machine),
-that program is never installed directly to the build machine's root
-file system. Consequently, the build system uses paths within the Build
-Directory for ``DESTDIR``, ``bindir`` and related variables. To better
-understand this, consider the following two paths where the first is
-relatively normal and the second is not:
-
-.. note::
-
- Due to these lengthy examples, the paths are artificially broken
- across lines for readability.
-
-::
+However, when the recipe builds a native program (i.e. one that is
+intended to run on the build machine), that program is never installed
+directly to the build machine's root file system. Consequently, the build
+system uses paths within the Build Directory for ``DESTDIR``, ``bindir``
+and related variables. To better understand this, consider the following
+two paths (artificially broken across lines for readability) where the
+first is relatively normal and the second is not::
/home/maxtothemax/poky-bootchart2/build/tmp/work/i586-poky-linux/zlib/
1.2.8-r0/sysroot-destdir/usr/bin
@@ -427,32 +381,76 @@ relatively normal and the second is not:
zlib-native/1.2.8-r0/sysroot-destdir/home/maxtothemax/poky-bootchart2/
build/tmp/sysroots/x86_64-linux/usr/bin
-Even if the paths look unusual,
-they both are correct - the first for a target and the second for a
-native recipe. These paths are a consequence of the ``DESTDIR``
-mechanism and while they appear strange, they are correct and in
-practice very effective.
+Even if the paths look unusual, they both are correct --- the first for
+a target and the second for a native recipe. These paths are a consequence
+of the ``DESTDIR`` mechanism and while they appear strange, they are correct
+and in practice very effective.
-**Q:** The files provided by my ``*-native`` recipe do not appear to be
-available to other recipes. Files are missing from the native sysroot,
-my recipe is installing to the wrong place, or I am getting permissions
-errors during the do_install task in my recipe! What is wrong?
+How do I create images with more free space?
+--------------------------------------------
-**A:** This situation results when a build system does not recognize the
-environment variables supplied to it by :term:`BitBake`. The
-incident that prompted this FAQ entry involved a Makefile that used an
-environment variable named ``BINDIR`` instead of the more standard
-variable ``bindir``. The makefile's hardcoded default value of
-"/usr/bin" worked most of the time, but not for the recipe's ``-native``
-variant. For another example, permissions errors might be caused by a
-Makefile that ignores ``DESTDIR`` or uses a different name for that
-environment variable. Check the build system to see if these kinds
-of issues exist.
+By default, the OpenEmbedded build system creates images that are
+1.3 times the size of the populated root filesystem. To affect the image
+size, you need to set various configurations:
+
+- *Image Size:* The OpenEmbedded build system uses the
+ :term:`IMAGE_ROOTFS_SIZE` variable to define
+ the size of the image in Kbytes. The build system determines the size
+ by taking into account the initial root filesystem size before any
+ modifications such as requested size for the image and any requested
+ additional free disk space to be added to the image.
+
+- *Overhead:* Use the
+ :term:`IMAGE_OVERHEAD_FACTOR` variable
+ to define the multiplier that the build system applies to the initial
+ image size, which is 1.3 by default.
+
+- *Additional Free Space:* Use the
+ :term:`IMAGE_ROOTFS_EXTRA_SPACE`
+ variable to add additional free space to the image. The build system
+ adds this space to the image after it determines its
+ :term:`IMAGE_ROOTFS_SIZE`.
+
+Why aren't spaces in path names supported?
+------------------------------------------
+
+The Yocto Project team has tried to do this before but too many
+of the tools the OpenEmbedded build system depends on, such as
+``autoconf``, break when they find spaces in pathnames. Until that
+situation changes, the team will not support spaces in pathnames.
+
+I'm adding a binary in a recipe. Why is it different in the image?
+------------------------------------------------------------------
+
+The first most obvious change is the system stripping debug symbols from
+it. Setting :term:`INHIBIT_PACKAGE_STRIP` to stop debug symbols being
+stripped and/or :term:`INHIBIT_PACKAGE_DEBUG_SPLIT` to stop debug symbols
+being split into a separate file will ensure the binary is unchanged.
-**Q:** I'm adding a binary in a recipe but it's different in the image, what is
-changing it?
+Issues on the running system
+============================
-**A:** The first most obvious change is the system stripping debug symbols from
-it. Setting :term:`INHIBIT_PACKAGE_STRIP` to stop debug symbols being stripped and/or
-:term:`INHIBIT_PACKAGE_DEBUG_SPLIT` to stop debug symbols being split into a separate
-file will ensure the binary is unchanged.
+How do I disable the cursor on my touchscreen device?
+-----------------------------------------------------
+
+You need to create a form factor file as described in the
+":ref:`bsp-guide/bsp:miscellaneous bsp-specific recipe files`" section in
+the Yocto Project Board Support Packages (BSP) Developer's Guide. Set
+the ``HAVE_TOUCHSCREEN`` variable equal to one as follows::
+
+ HAVE_TOUCHSCREEN=1
+
+How to always bring up connected network interfaces?
+----------------------------------------------------
+
+The default interfaces file provided by the netbase recipe does
+not automatically bring up network interfaces. Therefore, you will need
+to add a BSP-specific netbase that includes an interfaces file. See the
+":ref:`bsp-guide/bsp:miscellaneous bsp-specific recipe files`" section in
+the Yocto Project Board Support Packages (BSP) Developer's Guide for
+information on creating these types of miscellaneous recipe files.
+
+For example, add the following files to your layer::
+
+ meta-MACHINE/recipes-bsp/netbase/netbase/MACHINE/interfaces
+ meta-MACHINE/recipes-bsp/netbase/netbase_5.0.bbappend
diff --git a/documentation/ref-manual/features.rst b/documentation/ref-manual/features.rst
index f7abb417ba..2ea946b31d 100644
--- a/documentation/ref-manual/features.rst
+++ b/documentation/ref-manual/features.rst
@@ -6,7 +6,7 @@ Features
This chapter provides a reference of shipped machine and distro features
you can include as part of your image, a reference on image features you
-can select, and a reference on feature backfilling.
+can select, and a reference on :ref:`ref-features-backfill`.
Features provide a mechanism for working out which packages should be
included in the generated images. Distributions can select which
@@ -52,8 +52,6 @@ Project metadata:
- *alsa:* Hardware has ALSA audio drivers
-- *apm:* Hardware uses APM (or APM emulation)
-
- *bluetooth:* Hardware has integrated BT
- *efi:* Support for booting through EFI
@@ -62,6 +60,8 @@ Project metadata:
- *keyboard:* Hardware has a keyboard
+- *numa:* Hardware has non-uniform memory access
+
- *pcbios:* Support for booting through BIOS
- *pci:* Hardware has a PCI bus
@@ -70,6 +70,8 @@ Project metadata:
- *phone:* Mobile phone (voice) support
+- *qemu-usermode:* QEMU can support user-mode emulation for this machine
+
- *qvga:* Machine has a QVGA (320x240) display
- *rtc:* Machine has a Real-Time Clock
@@ -100,7 +102,9 @@ packages, and they can go beyond simply controlling the installation of
a package or packages. In most cases, the presence or absence of a
feature translates to the appropriate option supplied to the configure
script during the :ref:`ref-tasks-configure` task for
-the recipes that optionally support the feature.
+the recipes that optionally support the feature. Appropriate options
+must be supplied, and enabling/disabling :term:`PACKAGECONFIG` for the
+concerned packages is one way of supplying such options.
Some distro features are also machine features. These select features
make sense to be controlled both at the machine and distribution
@@ -108,11 +112,22 @@ configuration level. See the
:term:`COMBINED_FEATURES` variable for more
information.
+.. note::
+
+ :term:`DISTRO_FEATURES` is normally independent of kernel configuration,
+ so if a feature specified in :term:`DISTRO_FEATURES` also relies on
+ support in the kernel, you will also need to ensure that support is
+ enabled in the kernel configuration.
+
This list only represents features as shipped with the Yocto Project
-metadata:
+metadata, as extra layers can define their own:
+
+- *3g:* Include support for cellular data.
-- *alsa:* Include ALSA support (OSS compatibility kernel modules
- installed if available).
+- *acl:* Include :wikipedia:`Access Control List <Access-control_list>` support.
+
+- *alsa:* Include :wikipedia:`Advanced Linux Sound Architecture <Advanced_Linux_Sound_Architecture>`
+ support (OSS compatibility kernel modules installed if available).
- *api-documentation:* Enables generation of API documentation during
recipe builds. The resulting documentation is added to SDK tarballs
@@ -125,39 +140,84 @@ metadata:
- *cramfs:* Include CramFS support.
+- *debuginfod:* Include support for getting ELF debugging information through
+ a :ref:`debuginfod <dev-manual/debugging:using the debuginfod server method>`
+ server.
+
- *directfb:* Include DirectFB support.
- *ext2:* Include tools for supporting for devices with internal
HDD/Microdrive for storing files (instead of Flash only devices).
+- *gobject-introspection-data:* Include data to support
+ `GObject Introspection <https://gi.readthedocs.io/en/latest/>`__.
+
- *ipsec:* Include IPSec support.
+- *ipv4:* Include IPv4 support.
+
- *ipv6:* Include IPv6 support.
- *keyboard:* Include keyboard support (e.g. keymaps will be loaded
during boot).
+- *minidebuginfo:* Add minimal debug symbols :ref:`(minidebuginfo)<dev-manual/debugging:enabling minidebuginfo>`
+ to binary files containing, allowing ``coredumpctl`` and ``gdb`` to show symbolicated stack traces.
+
+- *multiarch:* Enable building applications with multiple architecture
+ support.
+
+- *ld-is-gold:* Use the :wikipedia:`gold <Gold_(linker)>`
+ linker instead of the standard GCC linker (bfd).
+
- *ldconfig:* Include support for ldconfig and ``ld.so.conf`` on the
target.
+- *lto:* Enable `Link-Time Optimisation <https://gcc.gnu.org/wiki/LinkTimeOptimization>`__.
+
+- *nfc:* Include support for
+ `Near Field Communication <https://en.wikipedia.org/wiki/Near-field_communication>`__.
+
- *nfs:* Include NFS client support (for mounting NFS exports on
device).
+- *nls:* Include National Language Support (NLS).
+
- *opengl:* Include the Open Graphics Library, which is a
cross-language, multi-platform application programming interface used
for rendering two and three-dimensional graphics.
+- *overlayfs:* Include `OverlayFS <https://docs.kernel.org/filesystems/overlayfs.html>`__
+ support.
+
+- *pam:* Include :wikipedia:`Pluggable Authentication Module (PAM) <Pluggable_authentication_module>`
+ support.
+
- *pci:* Include PCI bus support.
- *pcmcia:* Include PCMCIA/CompactFlash support.
+- *polkit:* Include :wikipedia:`Polkit <Polkit>` support.
+
- *ppp:* Include PPP dialup support.
- *ptest:* Enables building the package tests where supported by
individual recipes. For more information on package tests, see the
- ":ref:`dev-manual/common-tasks:testing packages with ptest`" section
+ ":ref:`dev-manual/packages:testing packages with ptest`" section
in the Yocto Project Development Tasks Manual.
+- *pulseaudio:* Include support for
+ `PulseAudio <https://www.freedesktop.org/wiki/Software/PulseAudio/>`__.
+
+- *selinux:* Include support for
+ :wikipedia:`Security-Enhanced Linux (SELinux) <Security-Enhanced_Linux>`
+ (requires `meta-selinux <https://layers.openembedded.org/layerindex/layer/meta-selinux/>`__).
+
+- *seccomp:* Enables building applications with
+ :wikipedia:`seccomp <Seccomp>` support, to
+ allow them to strictly restrict the system calls that they are allowed
+ to invoke.
+
- *smbfs:* Include SMB networks client support (for mounting
Samba/Microsoft Windows shares on device).
@@ -166,6 +226,10 @@ metadata:
reduced shell overhead, and other features. This ``init`` manager is
used by many distributions.
+- *systemd-resolved:* Include support and use ``systemd-resolved`` as the
+ main DNS name resolver in ``glibc`` Name Service Switch. This is a DNS
+ resolver daemon from ``systemd``.
+
- *usbgadget:* Include USB Gadget Device support (for USB
networking/serial/storage).
@@ -176,6 +240,11 @@ metadata:
directories into their respective counterparts in the ``/usr``
directory to provide better package and application compatibility.
+- *vfat:* Include :wikipedia:`FAT filesystem <File_Allocation_Table>`
+ support.
+
+- *vulkan:* Include support for the :wikipedia:`Vulkan API <Vulkan>`.
+
- *wayland:* Include the Wayland display server protocol and the
library that supports it.
@@ -183,6 +252,12 @@ metadata:
- *x11:* Include the X server and libraries.
+- *xattr:* Include support for
+ :wikipedia:`extended file attributes <Extended_file_attributes>`.
+
+- *zeroconf:* Include support for
+ `zero configuration networking <https://en.wikipedia.org/wiki/Zero-configuration_networking>`__.
+
.. _ref-features-image:
Image Features
@@ -196,19 +271,21 @@ you can add several different predefined packages such as development
utilities or packages with debug information needed to investigate
application problems or profile applications.
-Here are the image features available for all images:
+The image features available for all images are:
-- *allow-empty-password:* Allows Dropbear and OpenSSH to accept root
- logins and logins from accounts having an empty password string.
+- *allow-empty-password:* Allows Dropbear and OpenSSH to accept
+ logins from accounts having an empty password string.
+
+- *allow-root-login:* Allows Dropbear and OpenSSH to accept root logins.
- *dbg-pkgs:* Installs debug symbol packages for all packages installed
in a given image.
- *debug-tweaks:* Makes an image suitable for development (e.g. allows
- root logins without passwords and enables post-installation logging).
- See the 'allow-empty-password', 'empty-root-password', and
- 'post-install-logging' features in this list for additional
- information.
+ root logins, logins without passwords ---including root ones, and enables
+ post-installation logging). See the ``allow-empty-password``,
+ ``allow-root-login``, ``empty-root-password``, and ``post-install-logging``
+ features in this list for additional information.
- *dev-pkgs:* Installs development packages (headers and extra library
links) for all packages installed in a given image.
@@ -216,8 +293,22 @@ Here are the image features available for all images:
- *doc-pkgs:* Installs documentation packages for all packages
installed in a given image.
-- *empty-root-password:* Sets the root password to an empty string,
- which allows logins with a blank password.
+- *empty-root-password:* This feature or ``debug-tweaks`` is required if
+ you want to allow root login with an empty password. If these features
+ are not present in :term:`IMAGE_FEATURES`, a non-empty password is
+ forced in ``/etc/passwd`` and ``/etc/shadow`` if such files exist.
+
+ .. note::
+ ``empty-root-password`` doesn't set an empty root password by itself.
+ You get an initial empty root password thanks to the
+ :oe_git:`base-passwd </openembedded-core/tree/meta/recipes-core/base-passwd/>`
+ and :oe_git:`shadow </openembedded-core/tree/meta/recipes-extended/shadow/>`
+ recipes, and the presence of ``empty-root-password`` or ``debug-tweaks``
+ just disables the mechanism which forces an non-empty password for the
+ root user.
+
+- *lic-pkgs:* Installs license packages for all packages installed in a
+ given image.
- *overlayfs-etc:* Configures the ``/etc`` directory to be in ``overlayfs``.
This allows to store device specific information elsewhere, especially
@@ -239,23 +330,40 @@ Here are the image features available for all images:
- *read-only-rootfs:* Creates an image whose root filesystem is
read-only. See the
- ":ref:`dev-manual/common-tasks:creating a read-only root filesystem`"
+ ":ref:`dev-manual/read-only-rootfs:creating a read-only root filesystem`"
section in the Yocto Project Development Tasks Manual for more
information.
+- *read-only-rootfs-delayed-postinsts:* when specified in conjunction
+ with ``read-only-rootfs``, specifies that post-install scripts are
+ still permitted (this assumes that the root filesystem will be made
+ writeable for the first boot; this feature does not do anything to
+ ensure that - it just disables the check for post-install scripts.)
+
+- *serial-autologin-root:* when specified in conjunction with
+ ``empty-root-password`` will automatically login as root on the
+ serial console. This of course opens up a security hole if the
+ serial console is potentially accessible to an attacker, so use
+ with caution.
+
- *splash:* Enables showing a splash screen during boot. By default,
this screen is provided by ``psplash``, which does allow
customization. If you prefer to use an alternative splash screen
- package, you can do so by setting the ``SPLASH`` variable to a
+ package, you can do so by setting the :term:`SPLASH` variable to a
different package name (or names) within the image recipe or at the
distro configuration level.
+- *stateless-rootfs:*: specifies that the image should be created as
+ stateless - when using ``systemd``, ``systemctl-native`` will not
+ be run on the image, leaving the image for population at runtime by
+ systemd.
+
- *staticdev-pkgs:* Installs static development packages, which are
static libraries (i.e. ``*.a`` files), for all packages installed in
a given image.
Some image features are available only when you inherit the
-:ref:`core-image <ref-classes-core-image>` class. The current list of
+:ref:`ref-classes-core-image` class. The current list of
these valid features is as follows:
- *hwcodecs:* Installs hardware acceleration codecs.
@@ -268,6 +376,21 @@ these valid features is as follows:
- *ssh-server-dropbear:* Installs the Dropbear minimal SSH server.
+ .. note::
+
+ As of the 4.1 release, the ``ssh-server-dropbear`` feature also
+ recommends the ``openssh-sftp-server`` package, which by default
+ will be pulled into the image. This is because recent versions of
+ the OpenSSH ``scp`` client now use the SFTP protocol, and thus
+ require an SFTP server to be present to connect to. However, if
+ you wish to use the Dropbear ssh server `without` the SFTP server
+ installed, you can either remove ``ssh-server-dropbear`` from
+ ``IMAGE_FEATURES`` and add ``dropbear`` to :term:`IMAGE_INSTALL`
+ instead, or alternatively still use the feature but set
+ :term:`BAD_RECOMMENDATIONS` as follows::
+
+ BAD_RECOMMENDATIONS += "openssh-sftp-server"
+
- *ssh-server-openssh:* Installs the OpenSSH SSH server, which is more
full-featured than Dropbear. Note that if both the OpenSSH SSH server
and the Dropbear minimal SSH server are present in
@@ -276,7 +399,7 @@ these valid features is as follows:
- *tools-debug:* Installs debugging tools such as ``strace`` and
``gdb``. For information on GDB, see the
- ":ref:`dev-manual/common-tasks:debugging with the gnu project debugger (gdb) remotely`" section
+ ":ref:`dev-manual/debugging:debugging with the gnu project debugger (gdb) remotely`" section
in the Yocto Project Development Tasks Manual. For information on
tracing and profiling, see the :doc:`/profile-manual/index`.
@@ -285,6 +408,8 @@ these valid features is as follows:
- *tools-testapps:* Installs device testing tools (e.g. touchscreen
debugging).
+- *weston:* Installs Weston (reference Wayland environment).
+
- *x11:* Installs the X server.
- *x11-base:* Installs the X server with a minimal environment.
@@ -296,58 +421,50 @@ these valid features is as follows:
Feature Backfilling
===================
-Sometimes it is necessary in the OpenEmbedded build system to extend
-:term:`MACHINE_FEATURES` or
-:term:`DISTRO_FEATURES` to control functionality
-that was previously enabled and not able to be disabled. For these
-cases, we need to add an additional feature item to appear in one of
-these variables, but we do not want to force developers who have
-existing values of the variables in their configuration to add the new
-feature in order to retain the same overall level of functionality.
-Thus, the OpenEmbedded build system has a mechanism to automatically
-"backfill" these added features into existing distro or machine
+Sometimes it is necessary in the OpenEmbedded build system to
+add new functionality to :term:`MACHINE_FEATURES` or
+:term:`DISTRO_FEATURES`, but at the same time, allow existing
+distributions or machine definitions to opt out of such new
+features, to retain the same overall level of functionality.
+
+To make this possible, the OpenEmbedded build system has a mechanism to
+automatically "backfill" features into existing distro or machine
configurations. You can see the list of features for which this is done
-by finding the
-:term:`DISTRO_FEATURES_BACKFILL` and
-:term:`MACHINE_FEATURES_BACKFILL`
-variables in the ``meta/conf/bitbake.conf`` file.
-
-Because such features are backfilled by default into all configurations
-as described in the previous paragraph, developers who wish to disable
-the new features need to be able to selectively prevent the backfilling
-from occurring. They can do this by adding the undesired feature or
-features to the
+by checking the :term:`DISTRO_FEATURES_BACKFILL` and
+:term:`MACHINE_FEATURES_BACKFILL` variables in the
+``meta/conf/bitbake.conf`` file.
+
+These two variables are paired with the
:term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
-or
-:term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`
-variables for distro features and machine features respectively.
-
-Here are two examples to help illustrate feature backfilling:
-
-- *The "pulseaudio" distro feature option*: Previously, PulseAudio
- support was enabled within the Qt and GStreamer frameworks. Because
- of this, the feature is backfilled and thus enabled for all distros
- through the :term:`DISTRO_FEATURES_BACKFILL` variable in the
- ``meta/conf/bitbake.conf`` file. However, your distro needs to
- disable the feature. You can disable the feature without affecting
- other existing distro configurations that need PulseAudio support by
- adding "pulseaudio" to :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED` in
- your distro's ``.conf`` file. Adding the feature to this variable
- when it also exists in the :term:`DISTRO_FEATURES_BACKFILL` variable
- prevents the build system from adding the feature to your
- configuration's :term:`DISTRO_FEATURES`, effectively disabling the
- feature for that particular distro.
+and :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED` variables
+which allow distro or machine configuration maintainers to `consider` any
+added feature, and decide when they wish to keep or exclude such feature,
+thus preventing the backfilling from happening.
+
+Here are two examples to illustrate feature backfilling:
+
+- *The "pulseaudio" distro feature option*: Previously, PulseAudio support was
+ enabled within the Qt and GStreamer frameworks. Because of this, the feature
+ is now backfilled and thus enabled for all distros through the
+ :term:`DISTRO_FEATURES_BACKFILL` variable in the ``meta/conf/bitbake.conf``
+ file. However, if your distro needs to disable the feature, you can do so
+ without affecting other existing distro configurations that need PulseAudio
+ support. You do this by adding "pulseaudio" to
+ :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED` in your distro's ``.conf``
+ file. So, adding the feature to this variable when it also exists in the
+ :term:`DISTRO_FEATURES_BACKFILL` variable prevents the build system from
+ adding the feature to your configuration's :term:`DISTRO_FEATURES`,
+ effectively disabling the feature for that particular distro.
- *The "rtc" machine feature option*: Previously, real time clock (RTC)
support was enabled for all target devices. Because of this, the
feature is backfilled and thus enabled for all machines through the
- :term:`MACHINE_FEATURES_BACKFILL` variable in the
- ``meta/conf/bitbake.conf`` file. However, your target device does not
- have this capability. You can disable RTC support for your device
- without affecting other machines that need RTC support by adding the
- feature to your machine's :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`
- list in the machine's ``.conf`` file. Adding the feature to this
- variable when it also exists in the :term:`MACHINE_FEATURES_BACKFILL`
- variable prevents the build system from adding the feature to your
- configuration's :term:`MACHINE_FEATURES`, effectively disabling RTC
- support for that particular machine.
+ :term:`MACHINE_FEATURES_BACKFILL` variable in the ``meta/conf/bitbake.conf``
+ file. However, if your target device does not have this capability, you can
+ disable RTC support for your device without affecting other machines
+ that need RTC support. You do this by adding the "rtc" feature to the
+ :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED` list in your machine's ``.conf``
+ file. So, adding the feature to this variable when it also exists in the
+ :term:`MACHINE_FEATURES_BACKFILL` variable prevents the build system from
+ adding the feature to your configuration's :term:`MACHINE_FEATURES`,
+ effectively disabling RTC support for that particular machine.
diff --git a/documentation/ref-manual/images.rst b/documentation/ref-manual/images.rst
index 31fb567687..c45f9104a9 100644
--- a/documentation/ref-manual/images.rst
+++ b/documentation/ref-manual/images.rst
@@ -14,15 +14,17 @@ image you want.
Building an image without GNU General Public License Version 3
(GPLv3), GNU Lesser General Public License Version 3 (LGPLv3), and
the GNU Affero General Public License Version 3 (AGPL-3.0) components
- is only supported for minimal and base images. Furthermore, if you
- are going to build an image using non-GPLv3 and similarly licensed
- components, you must make the following changes in the ``local.conf``
- file before using the BitBake command to build the minimal or base
- image::
+ is only tested for core-image-minimal image. Furthermore, if you would like to
+ build an image and verify that it does not include GPLv3 and similarly licensed
+ components, you must make the following changes in the image recipe
+ file before using the BitBake command to build the image:
- 1. Comment out the EXTRA_IMAGE_FEATURES line
- 2. Set INCOMPATIBLE_LICENSE = "GPL-3.0* LGPL-3.0* AGPL-3.0*"
+ INCOMPATIBLE_LICENSE = "GPL-3.0* LGPL-3.0*"
+ Alternatively, you can adjust ``local.conf`` file, repeating and adjusting the line
+ for all images where the license restriction must apply:
+
+ INCOMPATIBLE_LICENSE:pn-your-image-name = "GPL-3.0* LGPL-3.0*"
From within the ``poky`` Git repository, you can use the following
command to display the list of directories within the :term:`Source Directory`
@@ -30,7 +32,7 @@ that contain image recipe files::
$ ls meta*/recipes*/images/*.bb
-Following is a list of supported recipes:
+Here is a list of supported recipes:
- ``build-appliance-image``: An example virtual machine that contains
all the pieces required to run builds using the build system as well
@@ -78,11 +80,11 @@ Following is a list of supported recipes:
libraries you can use in a host development environment.
- ``core-image-minimal-initramfs``: A ``core-image-minimal`` image that
- has the Minimal RAM-based Initial Root Filesystem (initramfs) as part
+ has the Minimal RAM-based Initial Root Filesystem (:term:`Initramfs`) as part
of the kernel, which allows the system to find the first "init"
program more efficiently. See the
:term:`PACKAGE_INSTALL` variable for
- additional information helpful when working with initramfs images.
+ additional information helpful when working with :term:`Initramfs` images.
- ``core-image-minimal-mtdutils``: A ``core-image-minimal`` image that
has support for the Minimal MTD Utilities, which let the user
@@ -117,17 +119,17 @@ Following is a list of supported recipes:
deployed to a separate partition so that you can boot into it and use
it to deploy a second image to be tested. You can find more
information about runtime testing in the
- ":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+ ":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
- ``core-image-testmaster-initramfs``: A RAM-based Initial Root
- Filesystem (initramfs) image tailored for use with the
+ Filesystem (:term:`Initramfs`) image tailored for use with the
``core-image-testmaster`` image.
- ``core-image-weston``: A very basic Wayland image with a terminal.
This image provides the Wayland protocol libraries and the reference
Weston compositor. For more information, see the
- ":ref:`dev-manual/common-tasks:using wayland and weston`"
+ ":ref:`dev-manual/wayland:using wayland and weston`"
section in the Yocto Project Development Tasks Manual.
- ``core-image-x11``: A very basic X11 image with a terminal.
diff --git a/documentation/ref-manual/kickstart.rst b/documentation/ref-manual/kickstart.rst
index d82da0ee75..297887805c 100644
--- a/documentation/ref-manual/kickstart.rst
+++ b/documentation/ref-manual/kickstart.rst
@@ -82,7 +82,7 @@ the ``part`` and ``partition`` commands:
source of the data that populates the partition. The most common
value for this option is "rootfs", but you can use any value that
maps to a valid source plugin. For information on the source plugins,
- see the ":ref:`dev-manual/common-tasks:using the wic plugin interface`"
+ see the ":ref:`dev-manual/wic:using the wic plugin interface`"
section in the Yocto Project Development Tasks Manual.
If you use ``--source rootfs``, Wic creates a partition as large as
@@ -177,7 +177,7 @@ the ``part`` and ``partition`` commands:
- ``--part-type``: This option is a Wic-specific option that
specifies the partition type globally unique identifier (GUID) for
GPT partitions. You can find the list of partition type GUIDs at
- https://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs.
+ :wikipedia:`GUID_Partition_Table#Partition_type_GUIDs`.
- ``--use-uuid``: This option is a Wic-specific option that causes
Wic to generate a random GUID for the partition. The generated
@@ -211,15 +211,10 @@ supports the following options:
.. note::
Bootloader functionality and boot partitions are implemented by the
- various
- --source
- plugins that implement bootloader functionality. The bootloader
+ various source plugins that implement bootloader functionality. The bootloader
command essentially provides a means of modifying bootloader
configuration.
-- ``--timeout``: Specifies the number of seconds before the
- bootloader times out and boots the default option.
-
- ``--append``: Specifies kernel parameters. These parameters will be
added to the syslinux :term:`APPEND` or ``grub`` kernel command line.
@@ -227,3 +222,13 @@ supports the following options:
the bootloader. You can provide a full pathname for the file or a
file located in the ``canned-wks`` folder. This option overrides
all other bootloader options.
+
+- ``--ptable``: Specifies the partition table format. Valid values are:
+
+ - ``msdos``
+
+ - ``gpt``
+
+- ``--timeout``: Specifies the number of seconds before the
+ bootloader times out and boots the default option.
+
diff --git a/documentation/ref-manual/qa-checks.rst b/documentation/ref-manual/qa-checks.rst
index 8c475d0f72..53b1836e74 100644
--- a/documentation/ref-manual/qa-checks.rst
+++ b/documentation/ref-manual/qa-checks.rst
@@ -162,7 +162,7 @@ Errors and Warnings
normally expected to be empty (such as ``/tmp``). These files may
be more appropriately installed to a different location, or
perhaps alternatively not installed at all, usually by updating the
- ``do_install`` task/function.
+ :ref:`ref-tasks-install` task/function.
.. _qa-check-arch:
@@ -536,7 +536,7 @@ Errors and Warnings
in (e.g. ``FILES:${``\ :term:`PN`\ ``}`` for the main
package).
- - Delete the files at the end of the ``do_install`` task if the
+ - Delete the files at the end of the :ref:`ref-tasks-install` task if the
files are not needed in any package.
 
@@ -579,10 +579,10 @@ Errors and Warnings
- ``package contains mime types but does not inherit mime: <packagename> path '<file>' [mime]``
The specified package contains mime type files (``.xml`` files in
- ``${datadir}/mime/packages``) and yet does not inherit the mime
- class which will ensure that these get properly installed. Either
- add ``inherit mime`` to the recipe or remove the files at the
- ``do_install`` step if they are not needed.
+ ``${datadir}/mime/packages``) and yet does not inherit the
+ :ref:`ref-classes-mime` class which will ensure that these get
+ properly installed. Either add ``inherit mime`` to the recipe or remove the
+ files at the :ref:`ref-tasks-install` step if they are not needed.
.. _qa-check-mime-xdg:
@@ -590,10 +590,10 @@ Errors and Warnings
- ``package contains desktop file with key 'MimeType' but does not inhert mime-xdg: <packagename> path '<file>' [mime-xdg]``
The specified package contains a .desktop file with a 'MimeType' key
- present, but does not inherit the mime-xdg class that is required in
- order for that to be activated. Either add ``inherit mime`` to the
- recipe or remove the files at the ``do_install`` step if they are not
- needed.
+ present, but does not inherit the :ref:`ref-classes-mime-xdg`
+ class that is required in order for that to be activated. Either add
+ ``inherit mime`` to the recipe or remove the files at the
+ :ref:`ref-tasks-install` step if they are not needed.
.. _qa-check-src-uri-bad:
@@ -602,7 +602,7 @@ Errors and Warnings
GitHub provides "archive" tarballs, however these can be re-generated
on the fly and thus the file's signature will not necessarily match that
- in the SRC_URI checksums in future leading to build failures. It is
+ in the :term:`SRC_URI` checksums in future leading to build failures. It is
recommended that you use an official release tarball or switch to
pulling the corresponding revision in the actual git repository instead.
@@ -613,18 +613,20 @@ Errors and Warnings
so using ${:term:`BPN`} rather than ${:term:`PN`} as the latter will change
for different variants of the same recipe e.g. when :term:`BBCLASSEXTEND`
or multilib are being used. This check will fail if a reference to ``${PN}``
- is found within the :term:`SRC_URI` value - change it to ``${BPN}`` instead.
+ is found within the :term:`SRC_URI` value --- change it to ``${BPN}`` instead.
.. _qa-check-unhandled-features-check:
- ``<recipename>: recipe doesn't inherit features_check [unhandled-features-check]``
- This check ensures that if one of the variables that the :ref:`features_check <ref-classes-features_check>`
- class supports (e.g. :term:`REQUIRED_DISTRO_FEATURES`) is used, then the recipe
- inherits ``features_check`` in order for the requirement to actually work. If
- you are seeing this message, either add ``inherit features_check`` to your recipe
- or remove the reference to the variable if it is not needed.
+ This check ensures that if one of the variables that the
+ :ref:`ref-classes-features_check` class supports (e.g.
+ :term:`REQUIRED_DISTRO_FEATURES`) is used, then the recipe
+ inherits :ref:`ref-classes-features_check` in order for
+ the requirement to actually work. If you are seeing this message, either
+ add ``inherit features_check`` to your recipe or remove the reference to
+ the variable if it is not needed.
.. _qa-check-missing-update-alternatives:
@@ -632,7 +634,7 @@ Errors and Warnings
- ``<recipename>: recipe defines ALTERNATIVE:<packagename> but doesn't inherit update-alternatives. This might fail during do_rootfs later! [missing-update-alternatives]``
This check ensures that if a recipe sets the :term:`ALTERNATIVE` variable that the
- recipe also inherits :ref:`update-alternatives <ref-classes-update-alternatives>` such
+ recipe also inherits :ref:`ref-classes-update-alternatives` such
that the alternative will be correctly set up. If you are seeing this message, either
add ``inherit update-alternatives`` to your recipe or remove the reference to the variable
if it is not needed.
@@ -653,7 +655,7 @@ Errors and Warnings
- ``<packagename> contains perllocal.pod (<files>), should not be installed [perllocalpod]``
``perllocal.pod`` is an index file of locally installed modules and so shouldn't be
- installed by any distribution packages. The :ref:`cpan <ref-classes-cpan>` class
+ installed by any distribution packages. The :ref:`ref-classes-cpan` class
already sets ``NO_PERLLOCAL`` to stop this file being generated by most Perl recipes,
but if a recipe is using ``MakeMaker`` directly then they might not be doing this
correctly. This check ensures that perllocal.pod is not in any package in order to
@@ -667,8 +669,8 @@ Errors and Warnings
If ``usrmerge`` is in :term:`DISTRO_FEATURES`, this check will ensure that no package
installs files to root (``/bin``, ``/sbin``, ``/lib``, ``/lib64``) directories. If you are seeing this
- message, it indicates that the ``do_install`` step (or perhaps the build process that
- ``do_install`` is calling into, e.g. ``make install`` is using hardcoded paths instead
+ message, it indicates that the :ref:`ref-tasks-install` step (or perhaps the build process that
+ :ref:`ref-tasks-install` is calling into, e.g. ``make install`` is using hardcoded paths instead
of the variables set up for this (``bindir``, ``sbindir``, etc.), and should be
changed so that it does.
@@ -677,7 +679,7 @@ Errors and Warnings
- ``Fuzz detected: <patch output> [patch-fuzz]``
- This check looks for evidence of "fuzz" when applying patches within the ``do_patch``
+ This check looks for evidence of "fuzz" when applying patches within the :ref:`ref-tasks-patch`
task. Patch fuzz is a situation when the ``patch`` tool ignores some of the context
lines in order to apply the patch. Consider this example:
@@ -727,7 +729,7 @@ Errors and Warnings
devtool modify <recipe>
This will apply all of the patches, and create new commits out of them in
- the workspace - with the patch context updated.
+ the workspace --- with the patch context updated.
Then, replace the patches in the recipe layer::
@@ -748,6 +750,64 @@ Errors and Warnings
other things in the patches, those can be discarded.
+.. _qa-check-patch-status:
+
+- ``Missing Upstream-Status in patch <patchfile> Please add according to <url> [patch-status-core/patch-status-noncore]``
+
+ The ``Upstream-Status`` value is missing in the specified patch file's header.
+ This value is intended to track whether or not the patch has been sent
+ upstream, whether or not it has been merged, etc.
+
+ There are two options for this same check - ``patch-status-core`` (for
+ recipes in OE-Core) and ``patch-status-noncore`` (for recipes in any other
+ layer).
+
+ For more information, see the
+ ":ref:`contributor-guide/recipe-style-guide:patch upstream status`"
+ section in the Yocto Project and OpenEmbedded Contributor Guide.
+
+- ``Malformed Upstream-Status in patch <patchfile> Please correct according to <url> [patch-status-core/patch-status-noncore]``
+
+ The ``Upstream-Status`` value in the specified patch file's header is invalid -
+ it must be a specific format. See the "Missing Upstream-Status" entry above
+ for more information.
+
+
+.. _qa-check-buildpaths:
+
+- ``File <filename> in package <packagename> contains reference to TMPDIR [buildpaths]``
+
+ This check ensures that build system paths (including :term:`TMPDIR`) do not
+ appear in output files, which not only leaks build system configuration into
+ the target, but also hinders binary reproducibility as the output will change
+ if the build system configuration changes.
+
+ Typically these paths will enter the output through some mechanism in the
+ configuration or compilation of the software being built by the recipe. To
+ resolve this issue you will need to determine how the detected path is
+ entering the output. Sometimes it may require adjusting scripts or code to
+ use a relative path rather than an absolute one, or to pick up the path from
+ runtime configuration or environment variables.
+
+.. _qa-check-unimplemented-ptest:
+
+- ``<tool> tests detected [unimplemented-ptest]``
+
+ This check will detect if the source of the package contains some
+ upstream-provided tests and, if so, that ptests are implemented for this
+ recipe. See the ":ref:`dev-manual/packages:testing packages with ptest`"
+ section in the Yocto Project Development Tasks Manual. See also the
+ ":ref:`ref-classes-ptest`" section.
+
+.. _qa-check-virtual-slash:
+
+- ``<variable> is set to <value> but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead.``
+
+ ``virtual/`` is a convention intended for use in the build context
+ (i.e. :term:`PROVIDES` and :term:`DEPENDS`) rather than the runtime
+ context (i.e. :term:`RPROVIDES` and :term:`RDEPENDS`). Use
+ :term:`VIRTUAL-RUNTIME` variables instead for the latter.
+
Configuring and Disabling QA Checks
===================================
diff --git a/documentation/ref-manual/release-process.rst b/documentation/ref-manual/release-process.rst
index 8acb4b8e09..920794679d 100644
--- a/documentation/ref-manual/release-process.rst
+++ b/documentation/ref-manual/release-process.rst
@@ -14,13 +14,13 @@ Major and Minor Release Cadence
The Yocto Project delivers major releases (e.g. &DISTRO;) using a six
month cadence roughly timed each April and October of the year.
-Following are examples of some major YP releases with their codenames
+Here are examples of some major YP releases with their codenames
also shown. See the ":ref:`ref-manual/release-process:major release codenames`"
section for information on codenames used with major releases.
- - 2.2 (Morty)
- - 2.1 (Krogoth)
- - 2.0 (Jethro)
+ - 4.1 ("Langdale")
+ - 4.0 ("Kirkstone")
+ - 3.4 ("Honister")
While the cadence is never perfect, this timescale facilitates
regular releases that have strong QA cycles while not overwhelming users
@@ -29,12 +29,12 @@ major holidays in various geographies.
The Yocto project delivers minor (point) releases on an unscheduled
basis and are usually driven by the accumulation of enough significant
-fixes or enhancements to the associated major release. Following are
-some example past point releases:
+fixes or enhancements to the associated major release.
+Some example past point releases are:
- - 2.1.1
- - 2.1.2
- - 2.2.1
+ - 4.1.3
+ - 4.0.8
+ - 3.4.4
The point release
indicates a point in the major release branch where a full QA cycle and
@@ -87,15 +87,51 @@ stable release.
exception to this policy occurs when there is a strong reason such as
the fix happens to also be the preferred upstream approach.
-Stable release branches have strong maintenance for about a year after
-their initial release. Should significant issues be found for any
-release regardless of its age, fixes could be backported to older
-releases. For issues that are not backported given an older release,
-Community LTS trees and branches allow community members to share
-patches for older releases. However, these types of patches do not go
-through the same release process as do point releases. You can find more
-information about stable branch maintenance at
-:yocto_wiki:`/Stable_branch_maintenance`.
+.. _ref-long-term-support-releases:
+
+Long Term Support Releases
+==========================
+
+While stable releases are supported for a duration of seven months,
+some specific ones are now supported for a longer period by the Yocto
+Project, and are called Long Term Support (:term:`LTS`) releases.
+
+When significant issues are found, :term:`LTS` releases allow to publish
+fixes not only for the current stable release, but also to the
+:term:`LTS` releases that are still supported. Older stable releases which
+have reached their End of Life (EOL) won't receive such updates.
+
+This started with version 3.1 ("Dunfell"), released in April 2020, which
+the project initially committed to supporting for two years, but this duration
+was later extended to four years. Similarly, the following :term:`LTS` release,
+version 4.0 ("Kirkstone"), was released two years later in May 2022 and the
+project committed to supporting it for four years too.
+
+Therefore, a new :term:`LTS` release is made every two years and is supported
+for four years. This offers more stability to project users and leaves more
+time to upgrade to the following :term:`LTS` release.
+
+See :yocto_wiki:`/Stable_Release_and_LTS` for details about the management
+of stable and :term:`LTS` releases.
+
+.. image:: svg/releases.*
+ :width: 100%
+
+.. note::
+
+ In some circumstances, a layer can be created by the community in order to
+ add a specific feature or support a new version of some package for an :term:`LTS`
+ release. This is called a :term:`Mixin` layer. These are thin and specific
+ purpose layers which can be stacked with an :term:`LTS` release to "mix" a specific
+ feature into that build. These are created on an as-needed basis and
+ maintained by the people who need them.
+
+ Policies on testing these layers depend on how widespread their usage is and
+ determined on a case-by-case basis. You can find some :term:`Mixin` layers in the
+ :yocto_git:`meta-lts-mixins </meta-lts-mixins>` repository. While the Yocto
+ Project provides hosting for those repositories, it does not provides
+ testing on them. Other :term:`Mixin` layers may be released elsewhere by the wider
+ community.
Testing and Quality Assurance
=============================
@@ -107,7 +143,7 @@ Additionally, because the test strategies are visible to you as a
developer, you can validate your projects. This section overviews the
available test infrastructure used in the Yocto Project. For information
on how to run available tests on your projects, see the
-":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
The QA/testing infrastructure is woven into the project to the point
@@ -127,42 +163,34 @@ consists of the following pieces:
an ARM target, did the build produce ARM binaries. If, for example,
the build produced PPC binaries then there is a problem.
-- :ref:`ref-classes-testimage*`: This class
+- :ref:`ref-classes-testimage`: This class
performs runtime testing of images after they are built. The tests
are usually used with :doc:`QEMU </dev-manual/qemu>`
to boot the images and check the combined runtime result boot
operation and functions. However, the test can also use the IP
address of a machine to test.
-- :ref:`ptest <dev-manual/common-tasks:testing packages with ptest>`:
+- :ref:`ptest <dev-manual/packages:testing packages with ptest>`:
Runs tests against packages produced during the build for a given
piece of software. The test allows the packages to be run within a
target image.
-- ``oe-selftest``: Tests combination BitBake invocations. These tests
+- ``oe-selftest``: Tests combinations of BitBake invocations. These tests
operate outside the OpenEmbedded build system itself. The
``oe-selftest`` can run all tests by default or can run selected
tests or test suites.
- .. note::
-
- Running ``oe-selftest`` requires host packages beyond the "Essential"
- grouping. See the :ref:`ref-manual/system-requirements:required packages for the build host`
- section for more information.
-
Originally, much of this testing was done manually. However, significant
effort has been made to automate the tests so that more people can use
them and the Yocto Project development team can run them faster and more
efficiently.
-The Yocto Project's main Autobuilder (&YOCTO_AB_URL;)
-publicly tests each Yocto Project release's code in the
-:term:`OpenEmbedded-Core (OE-Core)`, Poky, and BitBake repositories. The testing
-occurs for both the current state of the "master" branch and also for
+The Yocto Project's main Autobuilder (&YOCTO_AB_URL;) publicly tests each Yocto
+Project release's code in the :oe_git:`openembedded-core </openembedded-core>`,
+:yocto_git:`poky </poky>` and :oe_git:`bitbake </bitbake>` repositories. The
+testing occurs for both the current state of the "master" branch and also for
submitted patches. Testing for submitted patches usually occurs in the
-"ross/mut" branch in the ``poky-contrib`` repository (i.e. the
-master-under-test branch) or in the "master-next" branch in the ``poky``
-repository.
+in the "master-next" branch in the :yocto_git:`poky </poky>` repository.
.. note::
diff --git a/documentation/ref-manual/resources.rst b/documentation/ref-manual/resources.rst
index c942384662..4eaaca942e 100644
--- a/documentation/ref-manual/resources.rst
+++ b/documentation/ref-manual/resources.rst
@@ -23,8 +23,7 @@ The Yocto Project gladly accepts contributions. You can submit changes
to the project either by creating and sending pull requests, or by
submitting patches through email. For information on how to do both as
well as information on how to identify the maintainer for each area of
-code, see the ":ref:`dev-manual/common-tasks:submitting a change to the yocto project`" section in the
-Yocto Project Development Tasks Manual.
+code, see the :doc:`../contributor-guide/index`.
.. _resources-bugtracker:
@@ -46,8 +45,8 @@ your expectations).
For a general procedure and guidelines on how to use Bugzilla to submit a bug
against the Yocto Project, see the following:
-- The ":ref:`dev-manual/common-tasks:submitting a defect against the yocto project`"
- section in the Yocto Project Development Tasks Manual.
+- The ":doc:`../contributor-guide/report-defect`"
+ section in the Yocto Project and OpenEmbedded Contributor Guide.
- The Yocto Project :yocto_wiki:`Bugzilla wiki page </Bugzilla_Configuration_and_Bug_Tracking>`
@@ -64,26 +63,31 @@ and announcements. To subscribe to one of the following mailing lists,
click on the appropriate URL in the following list and follow the
instructions:
-- :yocto_lists:`/g/yocto` - General Yocto Project
+- :yocto_lists:`/g/yocto` --- general Yocto Project
discussion mailing list.
-- :oe_lists:`/g/openembedded-core` - Discussion mailing
+- :yocto_lists:`/g/yocto-patches` --- patch contribution mailing list for Yocto
+ Project-related layers which do not have their own mailing list.
+
+- :oe_lists:`/g/openembedded-core` --- discussion mailing
list about OpenEmbedded-Core (the core metadata).
-- :oe_lists:`/g/openembedded-devel` - Discussion
+- :oe_lists:`/g/openembedded-devel` --- discussion
mailing list about OpenEmbedded.
-- :oe_lists:`/g/bitbake-devel` - Discussion mailing
+- :oe_lists:`/g/bitbake-devel` --- discussion mailing
list about the :term:`BitBake` build tool.
-- :yocto_lists:`/g/poky` - Discussion mailing list
+- :yocto_lists:`/g/poky` --- discussion mailing list
about :term:`Poky`.
-- :yocto_lists:`/g/yocto-announce` - Mailing list to
+- :yocto_lists:`/g/yocto-announce` --- mailing list to
receive official Yocto Project release and milestone announcements.
-For more Yocto Project-related mailing lists, see the
-:yocto_home:`Yocto Project Website <>`.
+- :yocto_lists:`/g/docs` --- discussion mailing list about the Yocto Project
+ documentation.
+
+See also :yocto_home:`the description of all mailing lists </community/mailing-lists/>`.
.. _resources-irc:
@@ -104,93 +108,96 @@ Links and Related Documentation
Here is a list of resources you might find helpful:
-- :yocto_home:`The Yocto Project Website <>`\ *:* The home site
+- :yocto_home:`The Yocto Project Website <>`: The home site
for the Yocto Project.
-- :yocto_wiki:`The Yocto Project Main Wiki Page <>`\ *:* The main wiki page for
+- :yocto_wiki:`The Yocto Project Main Wiki Page <>`: The main wiki page for
the Yocto Project. This page contains information about project
planning, release engineering, QA & automation, a reference site map,
and other resources related to the Yocto Project.
-- :oe_home:`OpenEmbedded <>`\ *:* The build system used by the
+- :oe_home:`OpenEmbedded <>`: The build system used by the
Yocto Project. This project is the upstream, generic, embedded
distribution from which the Yocto Project derives its build system
(Poky) and to which it contributes.
-- :oe_wiki:`BitBake </BitBake>`\ *:* The tool used to process metadata.
+- :oe_wiki:`BitBake </BitBake>`: The tool used to process metadata.
-- :doc:`BitBake User Manual <bitbake:index>`\ *:* A comprehensive
+- :doc:`BitBake User Manual <bitbake:index>`: A comprehensive
guide to the BitBake tool. If you want information on BitBake, see
this manual.
-- :doc:`/brief-yoctoprojectqs/index` *:* This
+- :doc:`/brief-yoctoprojectqs/index`: This
short document lets you experience building an image using the Yocto
Project without having to understand any concepts or details.
-- :doc:`/overview-manual/index` *:* This manual provides overview
+- :doc:`/overview-manual/index`: This manual provides overview
and conceptual information about the Yocto Project.
-- :doc:`/dev-manual/index` *:* This manual is a "how-to" guide
+- :doc:`/dev-manual/index`: This manual is a "how-to" guide
that presents procedures useful to both application and system
developers who use the Yocto Project.
-- :doc:`/sdk-manual/index` *manual :* This
+- :doc:`/sdk-manual/index` manual: This
guide provides information that lets you get going with the standard
or extensible SDK. An SDK, with its cross-development toolchains,
allows you to develop projects inside or outside of the Yocto Project
environment.
-- :doc:`/bsp-guide/bsp` *:* This guide defines the structure
+- :doc:`/bsp-guide/bsp`: This guide defines the structure
for BSP components. Having a commonly understood structure encourages
standardization.
-- :doc:`/kernel-dev/index` *:* This manual describes
+- :doc:`/kernel-dev/index`: This manual describes
how to work with Linux Yocto kernels as well as provides a bit of
conceptual information on the construction of the Yocto Linux kernel
tree.
-- :doc:`/ref-manual/index` *:* This
+- :doc:`/ref-manual/index`: This
manual provides reference material such as variable, task, and class
descriptions.
-- :yocto_docs:`Yocto Project Mega-Manual </singleindex.html>`\ *:* This manual
+- :yocto_docs:`Yocto Project Mega-Manual </singleindex.html>`: This manual
is simply a single HTML file comprised of the bulk of the Yocto
Project manuals. It makes it easy to search for phrases and terms used
in the Yocto Project documentation set.
-- :doc:`/profile-manual/index` *:* This manual presents a set of
+- :doc:`/profile-manual/index`: This manual presents a set of
common and generally useful tracing and profiling schemes along with
their applications (as appropriate) to each tool.
-- :doc:`/toaster-manual/index` *:* This manual
+- :doc:`/toaster-manual/index`: This manual
introduces and describes how to set up and use Toaster. Toaster is an
Application Programming Interface (API) and web-based interface to
the :term:`OpenEmbedded Build System`, which uses
BitBake, that reports build information.
-- :yocto_wiki:`FAQ </FAQ>`\ *:* A list of commonly asked
+- `Yocto Project BitBake extension for VSCode
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__:
+ This extension provides a rich feature set when working with BitBake recipes
+ within the Visual Studio Code IDE.
+
+- :yocto_wiki:`FAQ </FAQ>`: A list of commonly asked
questions and their answers.
-- *Release Notes:* Features, updates and known issues for the current
- release of the Yocto Project. To access the Release Notes, go to the
- :yocto_home:`Downloads </software-overview/downloads>` page on
- the Yocto Project website and click on the "RELEASE INFORMATION" link
- for the appropriate release.
+- :doc:`Release Information </migration-guides/index>`:
+ Migration guides, release notes, new features, updates and known issues
+ for the current and past releases of the Yocto Project.
-- :yocto_bugs:`Bugzilla <>`\ *:* The bug tracking application
+- :yocto_bugs:`Bugzilla <>`: The bug tracking application
the Yocto Project uses. If you find problems with the Yocto Project,
you should report them using this application.
- :yocto_wiki:`Bugzilla Configuration and Bug Tracking Wiki Page
- </Bugzilla_Configuration_and_Bug_Tracking>`\ *:*
+ </Bugzilla_Configuration_and_Bug_Tracking>`:
Information on how to get set up and use the Yocto Project
implementation of Bugzilla for logging and tracking Yocto Project
defects.
-- *Internet Relay Chat (IRC):* Two IRC channels on
+- Internet Relay Chat (IRC): Two IRC channels on
`Libera Chat <https://libera.chat/>`__ are
available for Yocto Project and OpenEmbeddded discussions: ``#yocto`` and
``#oe``, respectively.
-- `Quick EMUlator (QEMU) <https://wiki.qemu.org/Index.html>`__\ *:* An
+- `Quick EMUlator (QEMU) <https://wiki.qemu.org/Index.html>`__: An
open-source machine emulator and virtualizer.
diff --git a/documentation/ref-manual/structure.rst b/documentation/ref-manual/structure.rst
index 12a085552f..e4d8b54bb9 100644
--- a/documentation/ref-manual/structure.rst
+++ b/documentation/ref-manual/structure.rst
@@ -57,9 +57,8 @@ For more information on BitBake, see the :doc:`BitBake User Manual
This directory contains user configuration files and the output
generated by the OpenEmbedded build system in its standard configuration
where the source tree is combined with the output. The :term:`Build Directory`
-is created initially when you ``source``
-the OpenEmbedded build environment setup script (i.e.
-:ref:`structure-core-script`).
+is created initially when you ``source`` the OpenEmbedded build environment
+setup script (i.e. :ref:`structure-core-script`).
It is also possible to place output and configuration files in a
directory separate from the :term:`Source Directory` by
@@ -68,6 +67,9 @@ information on separating output from your local Source Directory files
(commonly described as an "out of tree" build), see the
":ref:`structure-core-script`" section.
+See the ":ref:`The Build Directory --- build/ <structure-build>`" section for details
+about the contents of the :term:`Build Directory`.
+
.. _handbook:
``documentation/``
@@ -150,10 +152,10 @@ BitBake commands. The script uses other scripts within the ``scripts``
directory to do the bulk of the work.
When you run this script, your Yocto Project environment is set up, a
-:term:`Build Directory` is created, your working
-directory becomes the Build Directory, and you are presented with some
-simple suggestions as to what to do next, including a list of some
-possible targets to build. Here is an example::
+:term:`Build Directory` is created, your working directory becomes the
+:term:`Build Directory`, and you are presented with some simple
+suggestions as to what to do next, including a list of some possible
+targets to build. Here is an example::
$ source oe-init-build-env
@@ -170,28 +172,30 @@ possible targets to build. Here is an example::
You can also run generated QEMU images with a command like 'runqemu qemux86-64'
The default output of the ``oe-init-build-env`` script is from the
-``conf-notes.txt`` file, which is found in the ``meta-poky`` directory
+``conf-summary.txt`` and ``conf-notes.txt`` files, which are found in the ``meta-poky`` directory
within the :term:`Source Directory`. If you design a
-custom distribution, you can include your own version of this
-configuration file to mention the targets defined by your distribution.
+custom distribution, you can include your own versions of these
+configuration files where you can provide a brief summary and detailed usage
+notes, such as a list of the targets defined by your distribution.
See the
-":ref:`dev-manual/common-tasks:creating a custom template configuration directory`"
+":ref:`dev-manual/custom-template-configuration-directory:creating a custom template configuration directory`"
section in the Yocto Project Development Tasks Manual for more
information.
-By default, running this script without a Build Directory argument
+By default, running this script without a :term:`Build Directory` argument
creates the ``build/`` directory in your current working directory. If
-you provide a Build Directory argument when you ``source`` the script,
-you direct the OpenEmbedded build system to create a Build Directory of
-your choice. For example, the following command creates a Build
-Directory named ``mybuilds/`` that is outside of the :term:`Source Directory`::
+you provide a :term:`Build Directory` argument when you ``source`` the script,
+you direct the OpenEmbedded build system to create a :term:`Build Directory` of
+your choice. For example, the following command creates a
+:term:`Build Directory` named ``mybuilds/`` that is outside of the
+:term:`Source Directory`::
$ source oe-init-build-env ~/mybuilds
The OpenEmbedded build system uses the template configuration files, which
-are found by default in the ``meta-poky/conf/`` directory in the Source
+are found by default in the ``meta-poky/conf/templates/default`` directory in the Source
Directory. See the
-":ref:`dev-manual/common-tasks:creating a custom template configuration directory`"
+":ref:`dev-manual/custom-template-configuration-directory:creating a custom template configuration directory`"
section in the Yocto Project Development Tasks Manual for more
information.
@@ -213,14 +217,13 @@ These files are standard top-level files.
.. _structure-build:
-The Build Directory - ``build/``
-================================
+The Build Directory --- ``build/``
+==================================
-The OpenEmbedded build system creates the :term:`Build Directory`
-when you run the build environment setup
-script :ref:`structure-core-script`. If you do not give the Build
-Directory a specific name when you run the setup script, the name
-defaults to ``build/``.
+The OpenEmbedded build system creates the :term:`Build Directory` when you run
+the build environment setup script :ref:`structure-core-script`. If you do not
+give the :term:`Build Directory` a specific name when you run the setup script,
+the name defaults to ``build/``.
For subsequent parsing and processing, the name of the Build directory
is available via the :term:`TOPDIR` variable.
@@ -231,12 +234,24 @@ is available via the :term:`TOPDIR` variable.
-----------------------
The OpenEmbedded build system creates this directory when you enable
-build history via the :ref:`buildhistory <ref-classes-buildhistory>` class file. The directory
+build history via the :ref:`ref-classes-buildhistory` class file. The directory
organizes build information into image, packages, and SDK
subdirectories. For information on the build history feature, see the
-":ref:`dev-manual/common-tasks:maintaining build output quality`"
+":ref:`dev-manual/build-quality:maintaining build output quality`"
section in the Yocto Project Development Tasks Manual.
+.. _structure-build-cache:
+
+``build/cache/``
+----------------
+
+This directory contains several internal files used by the OpenEmbedded
+build system.
+
+It also contains ``sanity_info``, a text file keeping track of important
+build information such as the values of :term:`TMPDIR`, :term:`SSTATE_DIR`,
+as well as the name and version of the host distribution.
+
.. _structure-build-conf-local.conf:
``build/conf/local.conf``
@@ -261,15 +276,15 @@ OpenEmbedded build system creates it from ``local.conf.sample`` when you
:ref:`structure-core-script`.
The source ``local.conf.sample`` file used depends on the
-:term:`TEMPLATECONF` script variable, which defaults to ``meta-poky/conf/``
+:term:`TEMPLATECONF` script variable, which defaults to ``meta-poky/conf/templates/default``
when you are building from the Yocto Project development environment,
-and to ``meta/conf/`` when you are building from the OpenEmbedded-Core
+and to ``meta/conf/templates/default`` when you are building from the OpenEmbedded-Core
environment. Because the script variable points to the source of the
``local.conf.sample`` file, this implies that you can configure your
build environment from any layer by setting the variable in the
top-level build environment setup script as follows::
- TEMPLATECONF=your_layer/conf
+ TEMPLATECONF=your_layer/conf/templates/your_template_name
Once the build process gets the sample
file, it uses ``sed`` to substitute final
@@ -281,7 +296,7 @@ file, it uses ``sed`` to substitute final
You can see how the :term:`TEMPLATECONF` variable is used by looking at the
``scripts/oe-setup-builddir`` script in the :term:`Source Directory`.
You can find the Yocto Project version of the ``local.conf.sample`` file in
- the ``meta-poky/conf`` directory.
+ the ``meta-poky/conf/templates/default`` directory.
.. _structure-build-conf-bblayers.conf:
@@ -289,7 +304,7 @@ file, it uses ``sed`` to substitute final
----------------------------
This configuration file defines
-:ref:`layers <dev-manual/common-tasks:understanding and creating layers>`,
+:ref:`layers <dev-manual/layers:understanding and creating layers>`,
which are directory trees, traversed (or walked) by BitBake. The
``bblayers.conf`` file uses the :term:`BBLAYERS`
variable to list the layers BitBake tries to find.
@@ -301,14 +316,14 @@ you ``source`` the top-level build environment setup script (i.e.
As with the ``local.conf`` file, the source ``bblayers.conf.sample``
file used depends on the :term:`TEMPLATECONF` script variable, which
-defaults to ``meta-poky/conf/`` when you are building from the Yocto
-Project development environment, and to ``meta/conf/`` when you are
+defaults to ``meta-poky/conf/templates/default`` when you are building from the Yocto
+Project development environment, and to ``meta/conf/templates/default`` when you are
building from the OpenEmbedded-Core environment. Because the script
variable points to the source of the ``bblayers.conf.sample`` file, this
implies that you can base your build from any layer by setting the
variable in the top-level build environment setup script as follows::
- TEMPLATECONF=your_layer/conf
+ TEMPLATECONF=your_layer/conf/templates/your_template_name
Once the build process gets the sample file, it uses ``sed`` to substitute final
``${``\ :term:`OEROOT`\ ``}`` values for all ``##OEROOT##`` values.
@@ -317,17 +332,9 @@ Once the build process gets the sample file, it uses ``sed`` to substitute final
You can see how the :term:`TEMPLATECONF` variable is defined by the ``scripts/oe-setup-builddir``
script in the :term:`Source Directory`. You can find the Yocto Project
- version of the ``bblayers.conf.sample`` file in the ``meta-poky/conf/``
+ version of the ``bblayers.conf.sample`` file in the ``meta-poky/conf/templates/default``
directory.
-.. _structure-build-conf-sanity_info:
-
-``build/cache/sanity_info``
----------------------------
-
-This file indicates the state of the sanity checks and is created during
-the build.
-
.. _structure-build-downloads:
``build/downloads/``
@@ -366,14 +373,15 @@ remove the ``build/sstate-cache`` directory.
.. _structure-build-tmp-buildstats:
``build/tmp/buildstats/``
--------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~
-This directory stores the build statistics.
+This directory stores the build statistics as generated by the
+:ref:`ref-classes-buildstats` class.
.. _structure-build-tmp-cache:
``build/tmp/cache/``
---------------------
+~~~~~~~~~~~~~~~~~~~~
When BitBake parses the metadata (recipes and configuration files), it
caches the results in ``build/tmp/cache/`` to speed up future builds.
@@ -389,7 +397,7 @@ cache is reused. If the file has changed, it is reparsed.
.. _structure-build-tmp-deploy:
``build/tmp/deploy/``
----------------------
+~~~~~~~~~~~~~~~~~~~~~
This directory contains any "end result" output from the OpenEmbedded
build process. The :term:`DEPLOY_DIR` variable points
@@ -402,7 +410,7 @@ Project Overview and Concepts Manual.
.. _structure-build-tmp-deploy-deb:
``build/tmp/deploy/deb/``
--------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^
This directory receives any ``.deb`` packages produced by the build
process. The packages are sorted into feeds for different architecture
@@ -411,7 +419,7 @@ types.
.. _structure-build-tmp-deploy-rpm:
``build/tmp/deploy/rpm/``
--------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^
This directory receives any ``.rpm`` packages produced by the build
process. The packages are sorted into feeds for different architecture
@@ -420,27 +428,27 @@ types.
.. _structure-build-tmp-deploy-ipk:
``build/tmp/deploy/ipk/``
--------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^
This directory receives ``.ipk`` packages produced by the build process.
.. _structure-build-tmp-deploy-licenses:
``build/tmp/deploy/licenses/``
-------------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This directory receives package licensing information. For example, the
directory contains sub-directories for ``bash``, ``busybox``, and
``glibc`` (among others) that in turn contain appropriate ``COPYING``
license files with other licensing information. For information on
licensing, see the
-":ref:`dev-manual/common-tasks:maintaining open source license compliance during your product's lifecycle`"
+":ref:`dev-manual/licenses:maintaining open source license compliance during your product's lifecycle`"
section in the Yocto Project Development Tasks Manual.
.. _structure-build-tmp-deploy-images:
``build/tmp/deploy/images/``
-----------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This directory is populated with the basic output objects of the build
(think of them as the "generated artifacts" of the build process),
@@ -467,7 +475,7 @@ the kernel files::
.. _structure-build-tmp-deploy-sdk:
``build/tmp/deploy/sdk/``
--------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^
The OpenEmbedded build system creates this directory to hold toolchain
installer scripts which, when executed, install the sysroot that matches
@@ -479,7 +487,7 @@ Software Development Kit (eSDK) manual.
.. _structure-build-tmp-sstate-control:
``build/tmp/sstate-control/``
------------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The OpenEmbedded build system uses this directory for the shared state
manifest files. The shared state code uses these files to record the
@@ -492,7 +500,7 @@ another.
.. _structure-build-tmp-sysroots-components:
``build/tmp/sysroots-components/``
-----------------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This directory is the location of the sysroot contents that the task
:ref:`ref-tasks-prepare_recipe_sysroot`
@@ -507,7 +515,7 @@ should be automatic, and recipes should not directly reference
.. _structure-build-tmp-sysroots:
``build/tmp/sysroots/``
------------------------
+~~~~~~~~~~~~~~~~~~~~~~~
Previous versions of the OpenEmbedded build system used to create a
global shared sysroot per machine along with a native sysroot. Since
@@ -525,12 +533,12 @@ recipe-specific :term:`WORKDIR` directories. Thus, the
.. _structure-build-tmp-stamps:
``build/tmp/stamps/``
----------------------
+~~~~~~~~~~~~~~~~~~~~~
This directory holds information that BitBake uses for accounting
purposes to track what tasks have run and when they have run. The
directory is sub-divided by architecture, package name, and version.
-Following is an example::
+Here is an example::
stamps/all-poky-linux/distcc-config/1.0-r0.do_build-2fdd....2do
@@ -545,7 +553,7 @@ section in the Yocto Project Overview and Concepts Manual.
.. _structure-build-tmp-log:
``build/tmp/log/``
-------------------
+~~~~~~~~~~~~~~~~~~
This directory contains general logs that are not otherwise placed using
the package's :term:`WORKDIR`. Examples of logs are the output from the
@@ -555,7 +563,7 @@ necessarily mean this directory is created.
.. _structure-build-tmp-work:
``build/tmp/work/``
--------------------
+~~~~~~~~~~~~~~~~~~~
This directory contains architecture-specific work sub-directories for
packages built by BitBake. All tasks execute from the appropriate work
@@ -571,7 +579,7 @@ built within the Yocto Project. For this package, a work directory of
``tmp/work/qemux86-poky-linux/linux-yocto/3.0+git1+<.....>``, referred
to as the :term:`WORKDIR`, is created. Within this directory, the source is
unpacked to ``linux-qemux86-standard-build`` and then patched by Quilt.
-(See the ":ref:`dev-manual/common-tasks:using quilt in your workflow`" section in
+(See the ":ref:`dev-manual/quilt:using quilt in your workflow`" section in
the Yocto Project Development Tasks Manual for more information.) Within
the ``linux-qemux86-standard-build`` directory, standard Quilt
directories ``linux-3.0/patches`` and ``linux-3.0/.pc`` are created, and
@@ -587,9 +595,9 @@ install" places its output that is then split into sub-packages within
.. _structure-build-tmp-work-tunearch-recipename-version:
``build/tmp/work/tunearch/recipename/version/``
------------------------------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-The recipe work directory - ``${WORKDIR}``.
+The recipe work directory --- ``${WORKDIR}``.
As described earlier in the
":ref:`structure-build-tmp-sysroots`" section,
@@ -623,7 +631,7 @@ Here are key subdirectories within each recipe work directory:
split into individual packages.
- ``${WORKDIR}/packages-split``: Contains the output of the
- ``do_package`` task after the output has been split into individual
+ :ref:`ref-tasks-package` task after the output has been split into individual
packages. There are subdirectories for each individual package created by
the recipe.
@@ -645,7 +653,7 @@ Here are key subdirectories within each recipe work directory:
.. _structure-build-work-shared:
``build/tmp/work-shared/``
---------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~
For efficiency, the OpenEmbedded build system creates and uses this
directory to hold recipes that share a work directory with other
@@ -654,18 +662,18 @@ recipes. In practice, this is only used for ``gcc`` and its variants
.. _structure-meta:
-The Metadata - ``meta/``
-========================
+The Metadata --- ``meta/``
+==========================
As mentioned previously, :term:`Metadata` is the core of the
Yocto Project. Metadata has several important subdivisions:
.. _structure-meta-classes:
-``meta/classes/``
------------------
+``meta/classes*/``
+------------------
-This directory contains the ``*.bbclass`` files. Class files are used to
+These directories contain the ``*.bbclass`` files. Class files are used to
abstract common code so it can be reused by multiple packages. Every
package inherits the :ref:`ref-classes-base` file. Examples of other important
classes are :ref:`ref-classes-autotools`, which in theory allows any
@@ -695,7 +703,7 @@ distribution configuration file.
.. _structure-meta-conf-machine:
``meta/conf/machine/``
-----------------------
+~~~~~~~~~~~~~~~~~~~~~~
This directory contains all the machine configuration files. If you set
``MACHINE = "qemux86"``, the OpenEmbedded build system looks for a
@@ -706,7 +714,7 @@ support for a new machine to the Yocto Project, look in this directory.
.. _structure-meta-conf-distro:
``meta/conf/distro/``
----------------------
+~~~~~~~~~~~~~~~~~~~~~
The contents of this directory controls any distribution-specific
configurations. For the Yocto Project, the ``defaultsetup.conf`` is the
@@ -718,7 +726,7 @@ file mainly inherits its configuration from Poky.
.. _structure-meta-conf-machine-sdk:
``meta/conf/machine-sdk/``
---------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~
The OpenEmbedded build system searches this directory for configuration
files that correspond to the value of
@@ -743,7 +751,9 @@ and lists of files and directories with known permissions.
-------------
This directory contains OpenEmbedded Python library code used during the
-build process.
+build process. It is enabled via the ``addpylib`` directive in
+``meta/conf/local.conf``. For more information, see
+:ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`.
.. _structure-meta-recipes-bsp:
diff --git a/documentation/ref-manual/svg/releases.svg b/documentation/ref-manual/svg/releases.svg
new file mode 100644
index 0000000000..198d4632b1
--- /dev/null
+++ b/documentation/ref-manual/svg/releases.svg
@@ -0,0 +1,1744 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+ version="1.1"
+ id="svg2"
+ width="2040.0006"
+ height="624.30518"
+ viewBox="0 0 2040.0006 624.30515"
+ sodipodi:docname="releases.svg"
+ inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/">
+ <title
+ id="title8568">Yocto Project Release Timeline</title>
+ <metadata
+ id="metadata8">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <cc:license
+ rdf:resource="http://artlibre.org/licence/lal" />
+ <dc:title>Yocto Project Release Timeline</dc:title>
+ <dc:creator>
+ <cc:Agent>
+ <dc:title>The Yocto Project</dc:title>
+ </cc:Agent>
+ </dc:creator>
+ </cc:Work>
+ <cc:License
+ rdf:about="http://creativecommons.org/licenses/by-sa/4.0/">
+ <cc:permits
+ rdf:resource="http://creativecommons.org/ns#Reproduction" />
+ <cc:permits
+ rdf:resource="http://creativecommons.org/ns#Distribution" />
+ <cc:requires
+ rdf:resource="http://creativecommons.org/ns#Notice" />
+ <cc:requires
+ rdf:resource="http://creativecommons.org/ns#Attribution" />
+ <cc:permits
+ rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
+ <cc:requires
+ rdf:resource="http://creativecommons.org/ns#ShareAlike" />
+ </cc:License>
+ </rdf:RDF>
+ </metadata>
+ <defs
+ id="defs6">
+ <inkscape:path-effect
+ effect="powerstroke"
+ id="path-effect6121"
+ is_visible="true"
+ lpeversion="1"
+ offset_points="0,0.5"
+ sort_points="true"
+ interpolator_type="CubicBezierJohan"
+ interpolator_beta="0.2"
+ start_linecap_type="zerowidth"
+ linejoin_type="extrp_arc"
+ miter_limit="4"
+ scale_width="1"
+ end_linecap_type="zerowidth"
+ not_jump="false" />
+ <marker
+ style="overflow:visible"
+ id="marker5783"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5781" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5623"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5621" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5487"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5485" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5285"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5283" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5161"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5159" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker4860"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path4858" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker4504"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ffa348;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path4502" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow1Mend"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow1Mend"
+ inkscape:isstock="true">
+ <path
+ transform="matrix(-0.4,0,0,-0.4,-4,0)"
+ style="fill:#62a0ea;fill-opacity:1;fill-rule:evenodd;stroke:#62a0ea;stroke-width:1pt;stroke-opacity:1"
+ d="M 0,0 5,-5 -12.5,0 5,5 Z"
+ id="path3318" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker4174"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#62a0ea;fill-opacity:1;fill-rule:evenodd;stroke:#62a0ea;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path4172" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow2Mend"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ffa348;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path3336" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow1Mstart"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow1Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="matrix(0.4,0,0,0.4,4,0)"
+ style="fill:#ff7800;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:1pt;stroke-opacity:1"
+ d="M 0,0 5,-5 -12.5,0 5,5 Z"
+ id="path3315" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow2Lstart"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Lstart"
+ inkscape:isstock="true">
+ <path
+ transform="matrix(1.1,0,0,1.1,1.1,0)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ff7800;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path3327" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow1Lstart"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow1Lstart"
+ inkscape:isstock="true">
+ <path
+ transform="matrix(0.8,0,0,0.8,10,0)"
+ style="fill:#ff7800;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:1pt;stroke-opacity:1"
+ d="M 0,0 5,-5 -12.5,0 5,5 Z"
+ id="path3309" />
+ </marker>
+ <linearGradient
+ id="linearGradient921"
+ inkscape:swatch="solid">
+ <stop
+ style="stop-color:#deddda;stop-opacity:1;"
+ offset="0"
+ id="stop919" />
+ </linearGradient>
+ <linearGradient
+ id="linearGradient6035-4">
+ <stop
+ id="stop6037-2"
+ style="stop-color:#ffffff"
+ offset="0" />
+ <stop
+ id="stop6039-9"
+ style="stop-color:#ffffff;stop-opacity:0"
+ offset="1" />
+ </linearGradient>
+ <marker
+ style="overflow:visible"
+ id="Arrow2Mstart-4"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ffa348;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path3333-2" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow2Mend-2"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ffa348;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path3336-7" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5623-2"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5621-3" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="Arrow2Mend-4"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#ffa348;fill-opacity:1;fill-rule:evenodd;stroke:#ffa348;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path3336-3" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5285-1"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5283-7" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5161-4"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5159-3" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5285-1-4"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5283-7-6" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker5161-4-9"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mend"
+ inkscape:isstock="true">
+ <path
+ transform="scale(-0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path5159-3-4" />
+ </marker>
+ <marker
+ style="overflow:visible"
+ id="marker4174-8"
+ refX="0"
+ refY="0"
+ orient="auto"
+ inkscape:stockid="Arrow2Mstart"
+ inkscape:isstock="true">
+ <path
+ transform="scale(0.6)"
+ d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+ style="fill:#62a0ea;fill-opacity:1;fill-rule:evenodd;stroke:#62a0ea;stroke-width:0.625;stroke-linejoin:round;stroke-opacity:1"
+ id="path4172-8" />
+ </marker>
+ </defs>
+ <sodipodi:namedview
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1"
+ objecttolerance="10"
+ gridtolerance="10"
+ guidetolerance="10"
+ inkscape:pageopacity="0"
+ inkscape:pageshadow="2"
+ inkscape:window-width="1920"
+ inkscape:window-height="1043"
+ id="namedview4"
+ showgrid="true"
+ inkscape:zoom="1.4472045"
+ inkscape:cx="736.24703"
+ inkscape:cy="312.32629"
+ inkscape:window-x="1728"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1"
+ inkscape:current-layer="g10"
+ inkscape:document-rotation="0"
+ inkscape:snap-perpendicular="true"
+ fit-margin-top="30"
+ lock-margins="true"
+ fit-margin-left="30"
+ fit-margin-right="30"
+ fit-margin-bottom="30"
+ inkscape:pagecheckerboard="0">
+ <inkscape:grid
+ type="xygrid"
+ id="grid1257"
+ originx="-289.99936"
+ originy="325" />
+ </sodipodi:namedview>
+ <g
+ inkscape:groupmode="layer"
+ inkscape:label="Image"
+ id="g10"
+ transform="translate(-289.99936,325.00004)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1080,220.00003 v -515.00007 0 0"
+ id="path207708" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00003 v -515.00007 0 0"
+ id="path207708-4" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1320,220.00003 v -515.00007 0 0"
+ id="path207708-4-3" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1440,219.99998 v -515.00002 0 0"
+ id="path207708-4-3-6" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1560,219.99998 v -515.00001 0 0"
+ id="path207708-4-3-6-2" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1680,219.99998 v -515.00002 0 0"
+ id="path207708-4-3-6-2-8" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1800,219.99998 v -515.00002 0 0"
+ id="path207708-4-3-6-2-8-4" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1920,219.99998 v -515.00002 0 0"
+ id="path207708-4-3-6-2-8-4-3" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 2040,219.99997 v -460.00002 0 0"
+ id="path207708-4-3-6-2-8-4-3-8" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 2040,219.99998 v -515.00002 0 0"
+ id="path207708-4-3-6-2-8-4-3-8-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 2159.954,219.99997 v -514.99999 0 0"
+ id="path207708-4-3-6-2-8-4-3-8-4" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 2280,219.99997 v -514.99999 0 0"
+ id="path207708-4-3-6-2-8-4-3-8-4-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 960,220.00003 v -515.00007 0 0"
+ id="path207708-9" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 840,220.00001 v -375 0 0"
+ id="path207708-9-6" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 840,220.00002 v -515.00004 0 0"
+ id="path207708-9-6-2" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 720,220.00003 v -515.00007 0 0"
+ id="path207708-9-6-2-5" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 600,220.00003 v -515.00007 0 0"
+ id="path207708-9-6-2-5-9" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 480,220.00003 v -515.00007 0 0"
+ id="path207708-9-6-2-5-9-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 360,220.00003 v -515.00007 0 0"
+ id="path207708-9-6-2-5-9-0-5" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:42.5884px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="142.22464"
+ y="565.10297"
+ id="text907"><tspan
+ sodipodi:role="line"
+ id="tspan905"
+ x="142.22464"
+ y="565.10297" /></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:42.5884px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="98.363503"
+ y="637.8432"
+ id="text911"><tspan
+ sodipodi:role="line"
+ id="tspan909"
+ x="98.363503"
+ y="637.8432" /></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:42.5884px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="-59.575905"
+ y="580.05695"
+ id="text915"><tspan
+ sodipodi:role="line"
+ id="tspan913"
+ x="-59.575905"
+ y="580.05695" /></text>
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0"
+ width="980"
+ height="45.000004"
+ x="360"
+ y="154.99997"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="420.52835"
+ y="174.12433"
+ id="text1185-3-55-4"><tspan
+ sodipodi:role="line"
+ x="420.52835"
+ y="174.12433"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8">Dunfell (LTS)</tspan><tspan
+ sodipodi:role="line"
+ x="420.52835"
+ y="192.121"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317">3.1</tspan></text>
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4"
+ width="140.00002"
+ height="45.000004"
+ x="480"
+ y="99.999969"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="534.10651"
+ y="118.94971"
+ id="text1185-3-55-4-0"><tspan
+ sodipodi:role="line"
+ x="534.10651"
+ y="118.94971"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6">Gatesgarth</tspan><tspan
+ sodipodi:role="line"
+ x="534.10651"
+ y="136.94638"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2">3.2</tspan></text>
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4"
+ width="260"
+ height="45.000004"
+ x="599.99994"
+ y="45.000011"
+ ry="2.2558987" />
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9"
+ width="160.00002"
+ height="45.000004"
+ x="720"
+ y="-9.9999905"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="766.10297"
+ y="9.57586"
+ id="text1185-3-55-4-0-0"><tspan
+ sodipodi:role="line"
+ x="766.10297"
+ y="9.57586"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3">Honister</tspan><tspan
+ sodipodi:role="line"
+ x="766.10297"
+ y="27.57254"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9">3.4</tspan></text>
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9-4"
+ width="160.00002"
+ height="45.000004"
+ x="959.99994"
+ y="-120"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1008.4941"
+ y="-100.605"
+ id="text1185-3-55-4-0-0-0"><tspan
+ sodipodi:role="line"
+ x="1008.4941"
+ y="-100.605"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-9">Langdale</tspan><tspan
+ sodipodi:role="line"
+ x="1008.4941"
+ y="-82.608322"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-1">4.1</tspan></text>
+ <rect
+ style="opacity:1;fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9-4-5"
+ width="140.00003"
+ height="45.000004"
+ x="1100"
+ y="-175.00003"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1156.057"
+ y="-155.49881"
+ id="text1185-3-55-4-0-0-0-1"><tspan
+ sodipodi:role="line"
+ x="1156.057"
+ y="-155.49881"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-9-7">Mickledore</tspan><tspan
+ sodipodi:role="line"
+ x="1156.057"
+ y="-137.50214"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-1-4">4.2</tspan></text>
+ <g
+ id="g1379">
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9-4-5-38"
+ width="140.00003"
+ height="45.000004"
+ x="1220"
+ y="-230.00005"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1269.2329"
+ y="-210.32925"
+ id="text1185-3-55-4-0-0-0-1-1"><tspan
+ sodipodi:role="line"
+ x="1269.2329"
+ y="-210.32925"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-9-7-4">Nanbield</tspan><tspan
+ sodipodi:role="line"
+ x="1269.2329"
+ y="-192.33258"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-1-4-6">4.3</tspan></text>
+ </g>
+ <rect
+ style="opacity:0.75;fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9-4-5-3-9"
+ width="979.99994"
+ height="45.000004"
+ x="1320"
+ y="-285.00003"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1373.233"
+ y="-265.32928"
+ id="text1185-3-55-4-0-0-0-1-1-6"><tspan
+ sodipodi:role="line"
+ x="1373.233"
+ y="-265.32928"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-9-7-4-2">Scarthgap</tspan><tspan
+ sodipodi:role="line"
+ x="1373.233"
+ y="-247.33261"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-1-4-6-5">5.0</tspan></text>
+ <rect
+ style="fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:2;stroke-opacity:1"
+ id="rect917-0-0-4-4-9-9"
+ width="960.00012"
+ height="45.000004"
+ x="859.99994"
+ y="-64.999992"
+ ry="2.2558987" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="928.49872"
+ y="-45.648258"
+ id="text1185-3-55-4-0-0-9"><tspan
+ sodipodi:role="line"
+ x="928.49872"
+ y="-45.648258"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-6">Kirkstone (LTS)</tspan><tspan
+ sodipodi:role="line"
+ x="928.49872"
+ y="-27.651579"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-0">4.0</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#fffefe;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="653.72168"
+ y="64.866302"
+ id="text1185-3-55-4-0-0-7"><tspan
+ sodipodi:role="line"
+ x="653.72168"
+ y="64.866302"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan957-2-8-6-3-2">Hardknott </tspan><tspan
+ sodipodi:role="line"
+ x="653.72168"
+ y="82.862984"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans Bold';text-align:center;text-anchor:middle;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan10317-2-9-8">3.3</tspan></text>
+ <g
+ id="g1125-0"
+ transform="matrix(0.42240595,0,0,0.41654472,354.53445,-399.96314)"
+ style="stroke:none;stroke-width:2.38399">
+ <rect
+ style="opacity:1;fill:#333333;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:4.76797;stroke-opacity:1"
+ id="rect1061-9"
+ width="452.00439"
+ height="192.0562"
+ x="11.609296"
+ y="276.44562"
+ ry="4.0176301" />
+ <g
+ id="g1109-1"
+ transform="translate(-2.7615661,-1.7576335)"
+ style="stroke:none;stroke-width:2.38399">
+ <path
+ id="path14-9"
+ class="st0"
+ d="m 439.74452,358.11274 c 0,4.22 -3.41,7.64 -7.64,7.64 -4.22,0 -7.63,-3.42 -7.63,-7.64 0,-4.22 3.41,-7.64 7.63,-7.64 4.23,0 7.64,3.42 7.64,7.64 v 0"
+ style="fill:#4a97d2;fill-opacity:1;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path16-6"
+ class="st1"
+ d="m 114.56452,324.94274 -11.13,-6.3 -22.409996,45.41 -23.9,-45.41 -11.27,6.3 28.41,53.38 c -0.21,0.51 -0.86,1.9 -1.95,4.22 -1.11,2.21 -2.25,4.41 -3.46,6.62 -2.11,3.81 -4.26,6.91 -6.46,9.32 -2.21,2.51 -4.46,4.51 -6.78,6.02 -2.3,1.51 -4.7,2.65 -7.21,3.46 -2.41,0.8 -4.87,1.45 -7.38,1.95 l 5.12,10.68 c 1.6,-0.21 3.75,-0.71 6.46,-1.51 2.81,-0.7 5.86,-2.06 9.17,-4.06 3.3,-2 6.67,-4.86 10.07,-8.57 3.52,-3.71 6.78,-8.62 9.78,-14.73 l 32.939996,-66.78"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path18-2"
+ class="st1"
+ d="m 175.39452,358.18274 c 0,3.51 -0.6,6.76 -1.81,9.78 -1.21,3 -2.86,5.62 -4.95,7.81 -2.01,2.11 -4.41,3.81 -7.22,5.12 -2.71,1.19 -5.67,1.8 -8.87,1.8 -3.21,0 -6.22,-0.6 -9.02,-1.8 -2.81,-1.31 -5.27,-3.01 -7.38,-5.12 -2,-2.19 -3.6,-4.81 -4.81,-7.81 -1.21,-3.01 -1.81,-6.27 -1.81,-9.78 0,-3.51 0.6,-6.76 1.81,-9.77 1.21,-3 2.81,-5.61 4.81,-7.82 2.11,-2.21 4.57,-3.92 7.38,-5.11 2.8,-1.32 5.81,-1.97 9.02,-1.97 3.21,0 6.16,0.65 8.87,1.97 2.81,1.19 5.21,2.9 7.22,5.11 2.1,2.21 3.75,4.81 4.95,7.82 1.2,3.01 1.81,6.26 1.81,9.77 m 13.98,0 c 0,-5.21 -0.95,-10.08 -2.86,-14.59 -1.81,-4.51 -4.36,-8.42 -7.67,-11.73 -3.32,-3.3 -7.22,-5.86 -11.73,-7.67 -4.51,-1.9 -9.38,-2.86 -14.59,-2.86 -5.21,0 -10.08,0.95 -14.59,2.86 -4.51,1.81 -8.43,4.36 -11.73,7.67 -3.3,3.31 -5.92,7.22 -7.82,11.73 -1.9,4.51 -2.86,9.38 -2.86,14.59 0,5.21 0.95,10.08 2.86,14.59 1.9,4.41 4.52,8.27 7.82,11.57 3.3,3.32 7.22,5.92 11.73,7.82 4.51,1.81 9.38,2.71 14.59,2.71 5.21,0 10.08,-0.9 14.59,-2.71 4.51,-1.91 8.41,-4.51 11.73,-7.82 3.3,-3.3 5.86,-7.16 7.67,-11.57 1.91,-4.51 2.86,-9.38 2.86,-14.59"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path20-5"
+ class="st1"
+ d="m 373.22452,358.18274 c 0,3.51 -0.6,6.76 -1.81,9.78 -1.21,3 -2.86,5.62 -4.97,7.81 -2,2.11 -4.4,3.81 -7.21,5.12 -2.71,1.19 -5.67,1.8 -8.87,1.8 -3.21,0 -6.22,-0.6 -9.03,-1.8 -2.8,-1.31 -5.26,-3.01 -7.37,-5.12 -2,-2.19 -3.61,-4.81 -4.81,-7.81 -1.21,-3.01 -1.81,-6.27 -1.81,-9.78 0,-3.51 0.6,-6.76 1.81,-9.77 1.21,-3 2.81,-5.61 4.81,-7.82 2.11,-2.21 4.57,-3.92 7.37,-5.11 2.81,-1.32 5.82,-1.97 9.03,-1.97 3.21,0 6.16,0.65 8.87,1.97 2.81,1.19 5.21,2.9 7.21,5.11 2.11,2.21 3.76,4.81 4.97,7.82 1.21,3.01 1.81,6.26 1.81,9.77 m 13.98,0 c 0,-5.21 -0.95,-10.08 -2.86,-14.59 -1.81,-4.51 -4.36,-8.42 -7.67,-11.73 -3.32,-3.3 -7.22,-5.86 -11.73,-7.67 -4.51,-1.9 -9.38,-2.86 -14.59,-2.86 -5.22,0 -10.08,0.95 -14.59,2.86 -4.51,1.81 -8.43,4.36 -11.73,7.67 -3.3,3.31 -5.92,7.22 -7.82,11.73 -1.9,4.51 -2.86,9.38 -2.86,14.59 0,5.21 0.95,10.08 2.86,14.59 1.9,4.41 4.52,8.27 7.82,11.57 3.3,3.32 7.22,5.92 11.73,7.82 4.51,1.81 9.37,2.71 14.59,2.71 5.21,0 10.08,-0.9 14.59,-2.71 4.51,-1.91 8.41,-4.51 11.73,-7.82 3.3,-3.3 5.86,-7.16 7.67,-11.57 1.91,-4.51 2.86,-9.38 2.86,-14.59"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path22-4"
+ class="st1"
+ d="m 288.05452,298.59274 -13.39,7.52 v 16.45 h -36.21 c -26.15,0 -41.9,12.74 -41.9,35.95 0,36.04 37.55,42.84 64.25,29.96 l -5.63,-10.92 c -21.13,9.23 -44.53,5.3 -44.53,-19.28 0,-15.86 8.26,-24.54 27.49,-24.54 h 36.54 v 43.82 c 0,19.37 22.19,19.81 35.95,11.86 l -5.29,-10.45 c -8.85,4.48 -17.26,5.06 -17.26,-3.53 v -41.7 h 18.32 v -11.17 h -18.32 l -0.02,-23.97 v 0"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path24-4"
+ class="st1"
+ d="m 136.79452,428.30274 h 3.35 c 1.69,-0.01 3.34,1.19 3.34,2.9 0,2.87 -3.23,3.3 -3.23,3.3 l -3.46,0.02 z m -4.84,-4.1 v 25.3 h 4.83 l 0.06,-10.67 c 8.62,0.54 11.84,-2.46 11.84,-7.75 0,-4.75 -4.26,-6.88 -8.34,-6.88 h -8.39 v 0"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path26-9"
+ class="st1"
+ d="m 224.38452,436.87274 c 0,1.23 -0.23,2.39 -0.69,3.46 -0.42,1.04 -1.02,1.95 -1.81,2.74 -0.78,0.78 -1.7,1.4 -2.75,1.86 -1.04,0.42 -2.17,0.64 -3.38,0.64 -1.22,0 -2.35,-0.22 -3.39,-0.64 -1.05,-0.46 -1.96,-1.07 -2.75,-1.86 -0.76,-0.78 -1.36,-1.7 -1.81,-2.74 -0.46,-1.07 -0.69,-2.23 -0.69,-3.46 0,-1.23 0.23,-2.37 0.69,-3.42 0.45,-1.06 1.05,-1.99 1.81,-2.77 0.78,-0.78 1.7,-1.39 2.75,-1.82 1.04,-0.45 2.17,-0.67 3.39,-0.67 1.21,0 2.34,0.23 3.38,0.67 1.05,0.43 1.96,1.04 2.75,1.82 0.78,0.78 1.39,1.71 1.81,2.77 0.46,1.05 0.69,2.19 0.69,3.42 m 4.9,0 c 0,-1.81 -0.35,-3.5 -1.06,-5.06 -0.69,-1.59 -1.65,-2.97 -2.89,-4.12 -1.21,-1.17 -2.64,-2.09 -4.3,-2.75 -1.64,-0.69 -3.41,-1.04 -5.3,-1.04 -1.9,0 -3.69,0.35 -5.35,1.04 -1.64,0.66 -3.06,1.58 -4.27,2.75 -1.22,1.16 -2.17,2.53 -2.89,4.12 -0.69,1.57 -1.03,3.25 -1.03,5.06 0,1.83 0.34,3.53 1.03,5.1 0.72,1.57 1.68,2.94 2.89,4.12 1.21,1.17 2.63,2.09 4.27,2.75 1.66,0.66 3.45,1 5.35,1 1.89,0 3.67,-0.34 5.3,-1 1.66,-0.66 3.1,-1.58 4.3,-2.75 1.24,-1.18 2.21,-2.55 2.89,-4.12 0.71,-1.56 1.06,-3.26 1.06,-5.1"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path28-9"
+ class="st1"
+ d="m 249.02452,424.25274 v 19.18 c 0,0.77 -0.57,1.63 -1.51,1.65 l -1.51,0.06 0.08,4.7 1.41,0.02 c 4.73,0.07 6.36,-4.37 6.36,-6.45 v -19.13"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path30-3"
+ class="st1"
+ d="m 290.20452,424.19274 h -16.13 v 25.22 h 16.2 l -0.07,-4.06 h -11.28 v -6.58 h 9.44 v -4.06 h -9.44 v -6.38 h 11.21 l 0.07,-4.14"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path32-6"
+ class="st1"
+ d="m 327.39452,430.24274 c -8.26,-4.53 -16.39,-1.78 -16.39,6.52 0,6.69 6.43,11.84 17,6.31 l 1.53,4.13 c -10.15,5.58 -23.51,1.6 -23.51,-10.44 0,-10.91 11.85,-16.59 23.36,-10.61 l -1.99,4.09"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path34-0"
+ class="st1"
+ d="m 366.07452,424.19274 0.01,4.13 h -8.05 v 21.16 h -4.85 v -21.15 h -7.73 l 0.11,-4.13 h 20.51"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ <path
+ id="path36-5"
+ class="st1"
+ d="m 172.67452,428.30274 h 3.35 c 1.69,-0.01 3.34,1.19 3.34,2.9 0,2.87 -3.23,3.3 -3.23,3.3 l -3.46,0.02 z m -4.82,-4.1 v 25.3 h 4.82 v -10.89 h 3.2 l 5.59,10.89 h 5.5 l -6.67,-12.2 c 2.64,-1.18 4.01,-3.26 4.01,-6.22 0,-4.94 -4.33,-6.88 -9.09,-6.88 h -7.36 v 0"
+ style="fill:#ffffff;stroke:none;stroke-width:0.238399;stroke-opacity:1" />
+ </g>
+ </g>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:42.5884px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="313.46567"
+ y="412.9321"
+ id="text3781"><tspan
+ sodipodi:role="line"
+ id="tspan3779"
+ x="313.46567"
+ y="412.9321" /></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1199.6055"
+ y="250.21216"
+ id="text1185-9-7-1-1"><tspan
+ sodipodi:role="line"
+ x="1199.6055"
+ y="250.21216"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="1199.6055"
+ y="268.20883"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906">2023</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1439.3904"
+ y="249.86044"
+ id="text1185-9-7-1-1-89"><tspan
+ sodipodi:role="line"
+ x="1439.3904"
+ y="249.86044"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-7">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="1439.3904"
+ y="267.85712"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-76">2024</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1679.3094"
+ y="250.58356"
+ id="text1185-9-7-1-1-89-6"><tspan
+ sodipodi:role="line"
+ x="1679.3094"
+ y="250.58356"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-7-8">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="1679.3094"
+ y="268.58023"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-76-0">2025</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:6.66667px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="849.49744"
+ y="61.106953"
+ id="text1185-9-7-1-1-0"><tspan
+ sodipodi:role="line"
+ x="849.49744"
+ y="61.106953"
+ style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.66667px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:end;text-anchor:end;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan46212">Support for this version was extended to leave</tspan><tspan
+ sodipodi:role="line"
+ x="849.49744"
+ y="70.105324"
+ style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.66667px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:end;text-anchor:end;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan120364">users more time to adapt to override syntax</tspan><tspan
+ sodipodi:role="line"
+ x="849.49744"
+ y="79.103691"
+ style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.66667px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:end;text-anchor:end;fill:#fffefe;fill-opacity:1;stroke:none"
+ id="tspan123280">changes in the 3.4 release.</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="959.52008"
+ y="250.67822"
+ id="text1185-9-7-1-1-0-7"><tspan
+ sodipodi:role="line"
+ x="959.52008"
+ y="250.67822"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-42-7">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="959.52008"
+ y="268.6749"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-9-6">2022</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="719.13617"
+ y="250.21216"
+ id="text1185-9-7-1-1-2"><tspan
+ sodipodi:role="line"
+ x="719.13617"
+ y="250.21216"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-1">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="719.13617"
+ y="268.20883"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-5">2021</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="478.82367"
+ y="250.21216"
+ id="text1185-9-7-1-1-80"><tspan
+ sodipodi:role="line"
+ x="478.82367"
+ y="250.21216"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-5">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="478.82367"
+ y="268.20883"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-6">2020</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="361.81961"
+ y="250.07544"
+ id="text1185-9-7-1-1-8"><tspan
+ sodipodi:role="line"
+ x="361.81961"
+ y="250.07544"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="361.81961"
+ y="268.07211"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7">2020</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="840.9248"
+ y="250.07544"
+ id="text1185-9-7-1-1-8-1"><tspan
+ sodipodi:role="line"
+ x="840.9248"
+ y="250.07544"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0">Apr</tspan><tspan
+ sodipodi:role="line"
+ x="840.9248"
+ y="268.07211"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3">2022</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1321.8608"
+ y="250.07544"
+ id="text1185-9-7-1-1-8-1-0"><tspan
+ sodipodi:role="line"
+ x="1321.8608"
+ y="250.07544"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="1321.8608"
+ y="268.07211"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8">2024</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1561.8163"
+ y="249.66977"
+ id="text1185-9-7-1-1-8-1-0-4"><tspan
+ sodipodi:role="line"
+ x="1561.8163"
+ y="249.66977"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4-81">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="1561.8163"
+ y="267.66644"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8-2">2025</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1802.1477"
+ y="250.26334"
+ id="text1185-9-7-1-1-8-1-0-4-2"><tspan
+ sodipodi:role="line"
+ x="1802.1477"
+ y="250.26334"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4-81-5">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="1802.1477"
+ y="268.26001"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8-2-8">2026</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1081.4458"
+ y="250.07544"
+ id="text1185-9-7-1-1-8-1-0-2"><tspan
+ sodipodi:role="line"
+ x="1081.4458"
+ y="250.07544"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4-8">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="1081.4458"
+ y="268.07211"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8-3">2023</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="602.51526"
+ y="250.07544"
+ id="text1185-9-7-1-1-8-1-7"><tspan
+ sodipodi:role="line"
+ x="602.51526"
+ y="250.07544"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-5">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="602.51526"
+ y="268.07211"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-6">2021</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:42.5884px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:none;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="-16.290483"
+ y="345.7359"
+ id="text3116"><tspan
+ sodipodi:role="line"
+ id="tspan3114"
+ x="-16.290483"
+ y="345.7359" /></text>
+ <path
+ id="path29430"
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="M 319.99936,219.99912 H 2300 Z" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 360,219.99997 v 10.00004 0"
+ id="path29548" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 480,219.99996 v 10 0"
+ id="path29548-5" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 600,219.99992 v 10.00005 0"
+ id="path29548-5-1" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 720,220.00002 v 9.99999 0"
+ id="path29548-5-1-3" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 840,220.00002 v 9.99995 0"
+ id="path29548-5-1-3-6" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 960,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1080,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 380,219.99997 v 5.00004 0"
+ id="path29548-8"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 400,219.99997 v 5.00004 0"
+ id="path29548-8-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.999997;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 420,219.99997 v 5 0"
+ id="path29548-8-5-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282155" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.999997;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 440,219.99997 v 5 0"
+ id="path29548-8-5-0-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282155" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 460,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 500,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.999997;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 520,219.99997 v 5 0"
+ id="path29548-8-5-0-6-4-6-2-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282155" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 540,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 560,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 580,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 620.266,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 640,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 660,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 679.61073,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 700,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 740,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 760,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 780.36587,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 800,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 820,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 860,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 880,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 899.72384,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 920,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 940,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 980,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1000,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1020,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1040,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1059.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1100,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1120,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1140,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1160,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1179.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1320,219.99996 v 10 0"
+ id="path29548-5-1-3-6-3-1-0-8" />
+ <g
+ id="g1267">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4"
+ transform="translate(240,-4e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4-5"
+ transform="translate(480,-5e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-4"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-6"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4-5-22"
+ transform="translate(600,-4e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-0-55"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-5-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-9-90"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-4-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-6-8"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4-5-9"
+ transform="translate(360,-4e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-2" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-0-2"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-5-4"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-9-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-4-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-6-5"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1800,219.99997 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-2-0" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1340,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-3"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1360,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1380,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-3"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1400,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1419.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-9"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="1919.3904"
+ y="249.86044"
+ id="text1185-9-7-1-1-89-62"><tspan
+ sodipodi:role="line"
+ x="1919.3904"
+ y="249.86044"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-7-6">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="1919.3904"
+ y="267.85712"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-76-7">2026</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="2159.3093"
+ y="250.58356"
+ id="text1185-9-7-1-1-89-6-5"><tspan
+ sodipodi:role="line"
+ x="2159.3093"
+ y="250.58356"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-7-8-6">Oct.</tspan><tspan
+ sodipodi:role="line"
+ x="2159.3093"
+ y="268.58023"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-76-0-9">2027</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="2041.8163"
+ y="249.66977"
+ id="text1185-9-7-1-1-8-1-0-4-8"><tspan
+ sodipodi:role="line"
+ x="2041.8163"
+ y="249.66977"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4-81-7">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="2041.8163"
+ y="267.66644"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8-2-2">2027</tspan></text>
+ <text
+ xml:space="preserve"
+ style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+ x="2282.1477"
+ y="250.26334"
+ id="text1185-9-7-1-1-8-1-0-4-2-8"><tspan
+ sodipodi:role="line"
+ x="2282.1477"
+ y="250.26334"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan31345-4-0-4-81-5-2">Apr.</tspan><tspan
+ sodipodi:role="line"
+ x="2282.1477"
+ y="268.26001"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:13.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;text-align:center;text-anchor:middle;stroke:none"
+ id="tspan49906-7-3-8-2-8-9">2028</tspan></text>
+ <g
+ id="g1267-4-9"
+ transform="translate(720,-3e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-6" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-02"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-6"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-3"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4-5-2"
+ transform="translate(960,-4e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-1" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-0-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-5-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-9-9"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-4-1"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-6-4"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <g
+ id="g1267-4-5-9-9"
+ transform="translate(840,-3e-5)">
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1200,220.00002 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-2-1" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1220,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-0-5-0-0-2-0"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1240,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-7-5-3-5-4-7"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1260,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-5-2-0-9-7-5"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1280,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-8-9-9-4-7-8"
+ inkscape:transform-center-x="14.782001"
+ inkscape:transform-center-y="-0.085282837" />
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 1299.7216,219.99997 v 5.00004 0"
+ id="path29548-8-5-0-6-4-6-2-9-0-8-1-3-1-9-6-9-3-4-0-4-6-2-2-7-6-1-9-9-1-4-9-7-0-2-6-5-7"
+ inkscape:transform-center-x="-14.78205"
+ inkscape:transform-center-y="-0.085282837" />
+ </g>
+ <path
+ style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 2280,219.99998 v 9.99999 0"
+ id="path29548-5-1-3-6-3-1-0-3-4-2-0-0" />
+ </g>
+ <style
+ type="text/css"
+ id="style1021"> .st0{fill:#4A97D2;} .st1{fill:#333333;} </style>
+</svg>
diff --git a/documentation/ref-manual/system-requirements.rst b/documentation/ref-manual/system-requirements.rst
index 04f9efaa23..0fc92550a5 100644
--- a/documentation/ref-manual/system-requirements.rst
+++ b/documentation/ref-manual/system-requirements.rst
@@ -29,35 +29,70 @@ and conceptual information in the :doc:`/overview-manual/index`.
For more information about the Yocto Project Documentation set, see
the :ref:`ref-manual/resources:links and related documentation` section.
-.. _detailed-supported-distros:
+Minimum Free Disk Space
+=======================
+
+To build an image such as ``core-image-sato`` for the ``qemux86-64`` machine,
+you need a system with at least &MIN_DISK_SPACE; Gbytes of free disk space.
+However, much more disk space will be necessary to build more complex images,
+to run multiple builds and to cache build artifacts, improving build efficiency.
+
+If you have a shortage of disk space, see the ":doc:`/dev-manual/disk-space`"
+section of the Development Tasks Manual.
+
+.. _system-requirements-minimum-ram:
+
+Minimum System RAM
+==================
+
+You will manage to build an image such as ``core-image-sato`` for the
+``qemux86-64`` machine with as little as &MIN_RAM; Gbytes of RAM on an old
+system with 4 CPU cores, but your builds will be much faster on a system with
+as much RAM and as many CPU cores as possible.
+
+.. _system-requirements-supported-distros:
Supported Linux Distributions
=============================
-Currently, the Yocto Project is supported on the following
-distributions:
-
-- Ubuntu 18.04 (LTS)
+Currently, the &DISTRO; release ("&DISTRO_NAME;") of the Yocto Project is
+supported on the following distributions:
- Ubuntu 20.04 (LTS)
-- Fedora 34
+- Ubuntu 22.04 (LTS)
-- Fedora 35
+- Fedora 38
-- CentOS 7.x
+- CentOS Stream 8
-- CentOS 8.x
+- Debian GNU/Linux 11 (Bullseye)
-- AlmaLinux 8.5
+- Debian GNU/Linux 12 (Bookworm)
+
+- OpenSUSE Leap 15.4
+
+- AlmaLinux 8
+
+- AlmaLinux 9
+
+- Rocky 9
+
+The following distribution versions are still tested, even though the
+organizations publishing them no longer make updates publicly available:
+
+- Ubuntu 18.04 (LTS)
-- Debian GNU/Linux 9.x (Stretch)
+- Ubuntu 23.04
-- Debian GNU/Linux 10.x (Buster)
+Note that the Yocto Project doesn't have access to private updates
+that some of these versions may have. Therefore, our testing has
+limited value if you have access to such updates.
-- Debian GNU/Linux 11.x (Bullseye)
+Finally, here are the distribution versions which were previously
+tested on former revisions of "&DISTRO_NAME;", but no longer are:
-- OpenSUSE Leap 15.3
+*This list is currently empty*
.. note::
@@ -75,24 +110,29 @@ distributions:
has no plans to support rolling-releases or development
distributions due to their constantly changing nature. We welcome
patches and bug reports, but keep in mind that our priority is on
- the supported platforms listed below.
+ the supported platforms listed above.
+
+ - If your Linux distribution is not in the above list, we recommend to
+ get the :term:`buildtools` or :term:`buildtools-extended` tarballs
+ containing the host tools required by your Yocto Project release,
+ typically by running ``scripts/install-buildtools`` as explained in
+ the ":ref:`system-requirements-buildtools`" section.
- You may use Windows Subsystem For Linux v2 to set up a build host
- using Windows 10, but validation is not performed against build
- hosts using WSLv2.
+ using Windows 10 or later, or Windows Server 2019 or later, but validation
+ is not performed against build hosts using WSL 2.
- - The Yocto Project is not compatible with WSLv1, it is
- compatible but not officially supported nor validated with
- WSLv2, if you still decide to use WSL please upgrade to WSLv2.
+ See the
+ :ref:`dev-manual/start:setting up to use windows subsystem for linux (wsl 2)`
+ section in the Yocto Project Development Tasks Manual for more information.
- If you encounter problems, please go to :yocto_bugs:`Yocto Project
Bugzilla <>` and submit a bug. We are
interested in hearing about your experience. For information on
how to submit a bug, see the Yocto Project
:yocto_wiki:`Bugzilla wiki page </Bugzilla_Configuration_and_Bug_Tracking>`
- and the ":ref:`dev-manual/common-tasks:submitting a defect against the yocto project`"
- section in the Yocto Project Development Tasks Manual.
-
+ and the ":doc:`../contributor-guide/report-defect`"
+ section in the Yocto Project and OpenEmbedded Contributor Guide.
Required Packages for the Build Host
====================================
@@ -107,8 +147,10 @@ function.
Ubuntu and Debian
-----------------
-Here are the required packages by function given a
-supported Ubuntu or Debian Linux distribution:
+Here are the packages needed to build an image on a headless system
+with a supported Ubuntu or Debian Linux distribution::
+
+ $ sudo apt install &UBUNTU_HOST_PACKAGES_ESSENTIAL;
.. note::
@@ -120,122 +162,68 @@ supported Ubuntu or Debian Linux distribution:
$ sudo apt build-dep qemu
$ sudo apt remove oss4-dev
- - For Debian-8, ``python3-git`` and ``pylint3`` are no longer
- available via ``apt``.
- ::
-
- $ sudo pip3 install GitPython pylint==1.9.5
-
-- *Essentials:* Packages needed to build an image on a headless system::
-
- $ sudo apt install &UBUNTU_HOST_PACKAGES_ESSENTIAL;
-
-- *Documentation:* Packages needed if you are going to build out the
- Yocto Project documentation manuals::
+Here are the packages needed to build Project documentation manuals::
- $ sudo apt install make python3-pip
- &PIP3_HOST_PACKAGES_DOC;
-
- .. note::
-
- It is currently not possible to build out documentation from Debian 8
- (Jessie) because of outdated ``pip3`` and ``python3``. ``python3-sphinx``
- is too outdated.
+ $ sudo apt install git make inkscape texlive-latex-extra
+ $ sudo apt install sphinx python3-saneyaml python3-sphinx-rtd-theme
Fedora Packages
---------------
-Here are the required packages by function given a
-supported Fedora Linux distribution:
-
-- *Essentials:* Packages needed to build an image for a headless
- system::
+Here are the packages needed to build an image on a headless system
+with a supported Fedora Linux distribution::
- $ sudo dnf install &FEDORA_HOST_PACKAGES_ESSENTIAL;
+ $ sudo dnf install &FEDORA_HOST_PACKAGES_ESSENTIAL;
-- *Documentation:* Packages needed if you are going to build out the
- Yocto Project documentation manuals::
+Here are the packages needed to build Project documentation manuals::
- $ sudo dnf install make python3-pip which
- &PIP3_HOST_PACKAGES_DOC;
+ $ sudo dnf install git make python3-pip which inkscape texlive-fncychap
+ &PIP3_HOST_PACKAGES_DOC;
openSUSE Packages
-----------------
-Here are the required packages by function given a
-supported openSUSE Linux distribution:
+Here are the packages needed to build an image on a headless system
+with a supported openSUSE distribution::
-- *Essentials:* Packages needed to build an image for a headless
- system::
+ $ sudo zypper install &OPENSUSE_HOST_PACKAGES_ESSENTIAL;
- $ sudo zypper install &OPENSUSE_HOST_PACKAGES_ESSENTIAL;
+Here are the packages needed to build Project documentation manuals::
-- *Documentation:* Packages needed if you are going to build out the
- Yocto Project documentation manuals::
+ $ sudo zypper install git make python3-pip which inkscape texlive-fncychap
+ &PIP3_HOST_PACKAGES_DOC;
- $ sudo zypper install make python3-pip which
- &PIP3_HOST_PACKAGES_DOC;
-
-
-CentOS-7 Packages
------------------
-Here are the required packages by function given a
-supported CentOS-7 Linux distribution:
+AlmaLinux Packages
+------------------
-- *Essentials:* Packages needed to build an image for a headless
- system::
+Here are the packages needed to build an image on a headless system
+with a supported AlmaLinux distribution::
- $ sudo yum install &CENTOS7_HOST_PACKAGES_ESSENTIAL;
+ $ sudo dnf install &ALMALINUX_HOST_PACKAGES_ESSENTIAL;
- .. note::
-
- - Extra Packages for Enterprise Linux (i.e. ``epel-release``) is
- a collection of packages from Fedora built on RHEL/CentOS for
- easy installation of packages not included in enterprise Linux
- by default. You need to install these packages separately.
-
- - The ``makecache`` command consumes additional Metadata from
- ``epel-release``.
-
-- *Documentation:* Packages needed if you are going to build out the
- Yocto Project documentation manuals::
-
- $ sudo yum install make python3-pip which
- &PIP3_HOST_PACKAGES_DOC;
-
-CentOS-8 Packages
------------------
-
-Here are the required packages by function given a
-supported CentOS-8 Linux distribution:
-
-- *Essentials:* Packages needed to build an image for a headless
- system::
+.. note::
- $ sudo dnf install &CENTOS8_HOST_PACKAGES_ESSENTIAL;
+ - Extra Packages for Enterprise Linux (i.e. ``epel-release``) is
+ a collection of packages from Fedora built on RHEL/CentOS for
+ easy installation of packages not included in enterprise Linux
+ by default. You need to install these packages separately.
- .. note::
+ - The ``PowerTools/CRB`` repo provides additional packages such as
+ ``rpcgen`` and ``texinfo``.
- - Extra Packages for Enterprise Linux (i.e. ``epel-release``) is
- a collection of packages from Fedora built on RHEL/CentOS for
- easy installation of packages not included in enterprise Linux
- by default. You need to install these packages separately.
+ - The ``makecache`` command consumes additional Metadata from
+ ``epel-release``.
- - The ``PowerTools`` repo provides additional packages such as
- ``rpcgen`` and ``texinfo``.
+Here are the packages needed to build Project documentation manuals::
- - The ``makecache`` command consumes additional Metadata from
- ``epel-release``.
+ $ sudo dnf install git make python3-pip which inkscape texlive-fncychap
+ &PIP3_HOST_PACKAGES_DOC;
-- *Documentation:* Packages needed if you are going to build out the
- Yocto Project documentation manuals::
+.. _system-requirements-buildtools:
- $ sudo dnf install make python3-pip which
- &PIP3_HOST_PACKAGES_DOC;
-
-Required Git, tar, Python and gcc Versions
-==========================================
+Required Git, tar, Python, make and gcc Versions
+================================================
In order to use the build system, your host development system must meet
the following version requirements for Git, tar, and Python:
@@ -246,10 +234,12 @@ the following version requirements for Git, tar, and Python:
- Python &MIN_PYTHON_VERSION; or greater
+- GNU make &MIN_MAKE_VERSION; or greater
+
If your host development system does not meet all these requirements,
-you can resolve this by installing a ``buildtools`` tarball that
-contains these tools. You can get the tarball one of two ways: download
-a pre-built tarball or use BitBake to build the tarball.
+you can resolve this by installing a :term:`buildtools` tarball that
+contains these tools. You can either download a pre-built tarball or
+use BitBake to build one.
In addition, your host development system must meet the following
version requirement for gcc:
@@ -257,21 +247,26 @@ version requirement for gcc:
- gcc &MIN_GCC_VERSION; or greater
If your host development system does not meet this requirement, you can
-resolve this by installing a ``buildtools-extended`` tarball that
+resolve this by installing a :term:`buildtools-extended` tarball that
contains additional tools, the equivalent of the Debian/Ubuntu ``build-essential``
package.
+For systems with a broken make version (e.g. make 4.2.1 without patches) but
+where the rest of the host tools are usable, you can use the :term:`buildtools-make`
+tarball instead.
+
In the sections that follow, three different methods will be described for
-installing the ``buildtools`` or ``buildtools-extended`` toolset.
+installing the :term:`buildtools`, :term:`buildtools-extended` or :term:`buildtools-make`
+toolset.
Installing a Pre-Built ``buildtools`` Tarball with ``install-buildtools`` script
--------------------------------------------------------------------------------
The ``install-buildtools`` script is the easiest of the three methods by
-which you can get these tools. It downloads a pre-built buildtools
+which you can get these tools. It downloads a pre-built :term:`buildtools`
installer and automatically installs the tools for you:
-1. Execute the ``install-buildtools`` script. Here is an example::
+#. Execute the ``install-buildtools`` script. Here is an example::
$ cd poky
$ scripts/install-buildtools \
@@ -280,7 +275,7 @@ installer and automatically installs the tools for you:
--release yocto-&DISTRO; \
--installer-version &DISTRO;
- During execution, the buildtools tarball will be downloaded, the
+ During execution, the :term:`buildtools` tarball will be downloaded, the
checksum of the download will be verified, the installer will be run
for you, and some basic checks will be run to make sure the
installation is functional.
@@ -291,25 +286,29 @@ installer and automatically installs the tools for you:
/path/to/poky/buildtools
If your host development system needs the additional tools provided
- in the ``buildtools-extended`` tarball, you can instead execute the
+ in the :term:`buildtools-extended` tarball, you can instead execute the
``install-buildtools`` script with the default parameters::
$ cd poky
$ scripts/install-buildtools
-2. Source the tools environment setup script by using a command like the
+ Alternatively if your host development system has a broken ``make``
+ version such that you only need a known good version of ``make``,
+ you can use the ``--make-only`` option::
+
+ $ cd poky
+ $ scripts/install-buildtools --make-only
+
+#. Source the tools environment setup script by using a command like the
following::
$ source /path/to/poky/buildtools/environment-setup-x86_64-pokysdk-linux
- Of course, you need to supply your installation directory and be sure to
- use the right file (i.e. i586 or x86_64).
-
After you have sourced the setup script, the tools are added to
``PATH`` and any other environment variables required to run the
tools are initialized. The results are working versions versions of
Git, tar, Python and ``chrpath``. And in the case of the
- ``buildtools-extended`` tarball, additional working versions of tools
+ :term:`buildtools-extended` tarball, additional working versions of tools
including ``gcc``, ``make`` and the other tools included in
``packagegroup-core-buildessential``.
@@ -317,12 +316,14 @@ Downloading a Pre-Built ``buildtools`` Tarball
----------------------------------------------
If you would prefer not to use the ``install-buildtools`` script, you can instead
-download and run a pre-built buildtools installer yourself with the following
+download and run a pre-built :term:`buildtools` installer yourself with the following
steps:
-1. Locate and download the ``*.sh`` at :yocto_dl:`/releases/yocto/yocto-&DISTRO;/buildtools/`
+#. Go to :yocto_dl:`/releases/yocto/yocto-&DISTRO;/buildtools/`, locate and
+ download the ``.sh`` file corresponding to your host architecture
+ and to :term:`buildtools`, :term:`buildtools-extended` or :term:`buildtools-make`.
-2. Execute the installation script. Here is an example for the
+#. Execute the installation script. Here is an example for the
traditional installer::
$ sh ~/Downloads/x86_64-buildtools-nativesdk-standalone-&DISTRO;.sh
@@ -331,51 +332,55 @@ steps:
$ sh ~/Downloads/x86_64-buildtools-extended-nativesdk-standalone-&DISTRO;.sh
+ An example for the make-only installer::
+
+ $ sh ~/Downloads/x86_64-buildtools-make-nativesdk-standalone-&DISTRO;.sh
+
During execution, a prompt appears that allows you to choose the
installation directory. For example, you could choose the following:
``/home/your-username/buildtools``
-3. Source the tools environment setup script by using a command like the
- following::
-
- $ source /home/your_username/buildtools/environment-setup-i586-poky-linux
+#. As instructed by the installer script, you will have to source the tools
+ environment setup script::
- Of
- course, you need to supply your installation directory and be sure to
- use the right file (i.e. i585 or x86-64).
+ $ source /home/your_username/buildtools/environment-setup-x86_64-pokysdk-linux
After you have sourced the setup script, the tools are added to
``PATH`` and any other environment variables required to run the
tools are initialized. The results are working versions versions of
Git, tar, Python and ``chrpath``. And in the case of the
- ``buildtools-extended`` tarball, additional working versions of tools
+ :term:`buildtools-extended` tarball, additional working versions of tools
including ``gcc``, ``make`` and the other tools included in
``packagegroup-core-buildessential``.
Building Your Own ``buildtools`` Tarball
----------------------------------------
-Building and running your own buildtools installer applies only when you
+Building and running your own :term:`buildtools` installer applies only when you
have a build host that can already run BitBake. In this case, you use
that machine to build the ``.sh`` file and then take steps to transfer
and run it on a machine that does not meet the minimal Git, tar, and
Python (or gcc) requirements.
-Here are the steps to take to build and run your own buildtools
+Here are the steps to take to build and run your own :term:`buildtools`
installer:
-1. On the machine that is able to run BitBake, be sure you have set up
+#. On the machine that is able to run BitBake, be sure you have set up
your build environment with the setup script
(:ref:`structure-core-script`).
-2. Run the BitBake command to build the tarball::
+#. Run the BitBake command to build the tarball::
$ bitbake buildtools-tarball
- or run the BitBake command to build the extended tarball::
+ or to build the extended tarball::
$ bitbake buildtools-extended-tarball
+ or to build the make-only tarball::
+
+ $ bitbake buildtools-make-tarball
+
.. note::
The :term:`SDKMACHINE` variable in your ``local.conf`` file determines
@@ -384,37 +389,37 @@ installer:
Once the build completes, you can find the ``.sh`` file that installs
the tools in the ``tmp/deploy/sdk`` subdirectory of the
:term:`Build Directory`. The installer file has the string
- "buildtools" (or "buildtools-extended") in the name.
+ "buildtools" or "buildtools-extended" in the name.
-3. Transfer the ``.sh`` file from the build host to the machine that
+#. Transfer the ``.sh`` file from the build host to the machine that
does not meet the Git, tar, or Python (or gcc) requirements.
-4. On the machine that does not meet the requirements, run the ``.sh``
- file to install the tools. Here is an example for the traditional
- installer::
+#. On this machine, run the ``.sh`` file to install the tools. Here is an
+ example for the traditional installer::
$ sh ~/Downloads/x86_64-buildtools-nativesdk-standalone-&DISTRO;.sh
- Here is an example for the extended installer::
+ For the extended installer::
$ sh ~/Downloads/x86_64-buildtools-extended-nativesdk-standalone-&DISTRO;.sh
+ And for the make-only installer::
+
+ $ sh ~/Downloads/x86_64-buildtools-make-nativesdk-standalone-&DISTRO;.sh
+
During execution, a prompt appears that allows you to choose the
installation directory. For example, you could choose the following:
``/home/your_username/buildtools``
-5. Source the tools environment setup script by using a command like the
+#. Source the tools environment setup script by using a command like the
following::
$ source /home/your_username/buildtools/environment-setup-x86_64-poky-linux
- Of course, you need to supply your installation directory and be sure to
- use the right file (i.e. i586 or x86_64).
-
After you have sourced the setup script, the tools are added to
``PATH`` and any other environment variables required to run the
tools are initialized. The results are working versions versions of
Git, tar, Python and ``chrpath``. And in the case of the
- ``buildtools-extended`` tarball, additional working versions of tools
+ :term:`buildtools-extended` tarball, additional working versions of tools
including ``gcc``, ``make`` and the other tools included in
``packagegroup-core-buildessential``.
diff --git a/documentation/ref-manual/tasks.rst b/documentation/ref-manual/tasks.rst
index cb08a75c90..2e4b23408d 100644
--- a/documentation/ref-manual/tasks.rst
+++ b/documentation/ref-manual/tasks.rst
@@ -14,8 +14,8 @@ Normal Recipe Build Tasks
The following sections describe normal tasks associated with building a
recipe. For more information on tasks and dependencies, see the
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
-":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:dependencies`" sections in the
+":ref:`bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
+":ref:`bitbake-user-manual/bitbake-user-manual-execution:dependencies`" sections in the
BitBake User Manual.
.. _ref-tasks-build:
@@ -36,7 +36,7 @@ directory set to ``${``\ :term:`B`\ ``}``.
The default behavior of this task is to run the ``oe_runmake`` function
if a makefile (``Makefile``, ``makefile``, or ``GNUmakefile``) is found.
-If no such file is found, the ``do_compile`` task does nothing.
+If no such file is found, the :ref:`ref-tasks-compile` task does nothing.
.. _ref-tasks-compile_ptest_base:
@@ -58,7 +58,7 @@ The default behavior of this task is to run ``oe_runmake clean`` if a
makefile (``Makefile``, ``makefile``, or ``GNUmakefile``) is found and
:term:`CLEANBROKEN` is not set to "1". If no such
file is found or the :term:`CLEANBROKEN` variable is set to "1", the
-``do_configure`` task does nothing.
+:ref:`ref-tasks-configure` task does nothing.
.. _ref-tasks-configure_ptest_base:
@@ -78,10 +78,10 @@ task runs with the current working directory set to
``${``\ :term:`B`\ ``}``.
Recipes implementing this task should inherit the
-:ref:`deploy <ref-classes-deploy>` class and should write the output
+:ref:`ref-classes-deploy` class and should write the output
to ``${``\ :term:`DEPLOYDIR`\ ``}``, which is not to be
-confused with ``${DEPLOY_DIR}``. The :ref:`deploy <ref-classes-deploy>` class sets up
-``do_deploy`` as a shared state (sstate) task that can be accelerated
+confused with ``${DEPLOY_DIR}``. The :ref:`ref-classes-deploy` class sets up
+:ref:`ref-tasks-deploy` as a shared state (sstate) task that can be accelerated
through sstate use. The sstate mechanism takes care of copying the
output from ``${DEPLOYDIR}`` to ``${DEPLOY_DIR_IMAGE}``.
@@ -90,19 +90,19 @@ output from ``${DEPLOYDIR}`` to ``${DEPLOY_DIR_IMAGE}``.
Do not write the output directly to ``${DEPLOY_DIR_IMAGE}``, as this causes
the sstate mechanism to malfunction.
-The ``do_deploy`` task is not added as a task by default and
+The :ref:`ref-tasks-deploy` task is not added as a task by default and
consequently needs to be added manually. If you want the task to run
after :ref:`ref-tasks-compile`, you can add it by doing
the following::
addtask deploy after do_compile
-Adding ``do_deploy`` after other tasks works the same way.
+Adding :ref:`ref-tasks-deploy` after other tasks works the same way.
.. note::
You do not need to add ``before do_build`` to the ``addtask`` command
- (though it is harmless), because the :ref:`base <ref-classes-base>` class contains the following::
+ (though it is harmless), because the :ref:`ref-classes-base` class contains the following::
do_build[recrdeptask] += "do_deploy"
@@ -110,7 +110,7 @@ Adding ``do_deploy`` after other tasks works the same way.
See the ":ref:`bitbake-user-manual/bitbake-user-manual-execution:dependencies`"
section in the BitBake User Manual for more information.
-If the ``do_deploy`` task re-executes, any previous output is removed
+If the :ref:`ref-tasks-deploy` task re-executes, any previous output is removed
(i.e. "cleaned").
.. _ref-tasks-fetch:
@@ -118,9 +118,9 @@ If the ``do_deploy`` task re-executes, any previous output is removed
``do_fetch``
------------
-Fetches the source code. This task uses the
-:term:`SRC_URI` variable and the argument's prefix to
-determine the correct :ref:`fetcher <bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
+Fetches the source code. This task uses the :term:`SRC_URI` variable and the
+argument's prefix to determine the correct
+:ref:`fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
module.
.. _ref-tasks-image:
@@ -128,15 +128,15 @@ module.
``do_image``
------------
-Starts the image generation process. The ``do_image`` task runs after
+Starts the image generation process. The :ref:`ref-tasks-image` task runs after
the OpenEmbedded build system has run the
:ref:`ref-tasks-rootfs` task during which packages are
identified for installation into the image and the root filesystem is
created, complete with post-processing.
-The ``do_image`` task performs pre-processing on the image through the
+The :ref:`ref-tasks-image` task performs pre-processing on the image through the
:term:`IMAGE_PREPROCESS_COMMAND` and
-dynamically generates supporting ``do_image_*`` tasks as needed.
+dynamically generates supporting :ref:`do_image_* <ref-tasks-image>` tasks as needed.
For more information on image creation, see the ":ref:`overview-manual/concepts:image generation`"
section in the Yocto Project Overview and Concepts Manual.
@@ -146,13 +146,13 @@ section in the Yocto Project Overview and Concepts Manual.
``do_image_complete``
---------------------
-Completes the image generation process. The ``do_image_complete`` task
+Completes the image generation process. The :ref:`do_image_complete <ref-tasks-image-complete>` task
runs after the OpenEmbedded build system has run the
:ref:`ref-tasks-image` task during which image
-pre-processing occurs and through dynamically generated ``do_image_*``
+pre-processing occurs and through dynamically generated :ref:`do_image_* <ref-tasks-image>`
tasks the image is constructed.
-The ``do_image_complete`` task performs post-processing on the image
+The :ref:`do_image_complete <ref-tasks-image-complete>` task performs post-processing on the image
through the
:term:`IMAGE_POSTPROCESS_COMMAND`.
@@ -168,9 +168,9 @@ section in the Yocto Project Overview and Concepts Manual.
Copies files that are to be packaged into the holding area
``${``\ :term:`D`\ ``}``. This task runs with the current
working directory set to ``${``\ :term:`B`\ ``}``, which is the
-compilation directory. The ``do_install`` task, as well as other tasks
+compilation directory. The :ref:`ref-tasks-install` task, as well as other tasks
that either directly or indirectly depend on the installed files (e.g.
-:ref:`ref-tasks-package`, ``do_package_write_*``, and
+:ref:`ref-tasks-package`, :ref:`do_package_write_* <ref-tasks-package_write_deb>`, and
:ref:`ref-tasks-rootfs`), run under
:ref:`fakeroot <overview-manual/concepts:fakeroot and pseudo>`.
@@ -190,8 +190,8 @@ that either directly or indirectly depend on the installed files (e.g.
- The ``cp`` command with the ``--no-preserve=ownership`` option.
- The ``tar`` command with the ``--no-same-owner`` option. See the
- ``bin_package.bbclass`` file in the ``meta/classes`` directory of
- the :term:`Source Directory` for an example.
+ ``bin_package.bbclass`` file in the ``meta/classes-recipe``
+ subdirectory of the :term:`Source Directory` for an example.
.. _ref-tasks-install_ptest_base:
@@ -212,7 +212,7 @@ based on available packages and files. This task makes use of the
:term:`PACKAGES` and :term:`FILES`
variables.
-The ``do_package`` task, in conjunction with the
+The :ref:`ref-tasks-package` task, in conjunction with the
:ref:`ref-tasks-packagedata` task, also saves some
important package metadata. For additional information, see the
:term:`PKGDESTWORK` variable and the
@@ -225,7 +225,7 @@ section in the Yocto Project Overview and Concepts Manual.
-----------------
Runs QA checks on packaged files. For more information on these checks,
-see the :ref:`insane <ref-classes-insane>` class.
+see the :ref:`ref-classes-insane` class.
.. _ref-tasks-package_write_deb:
@@ -260,17 +260,6 @@ the package feeds area. For more information, see the
":ref:`overview-manual/concepts:package feeds`" section in
the Yocto Project Overview and Concepts Manual.
-.. _ref-tasks-package_write_tar:
-
-``do_package_write_tar``
-------------------------
-
-Creates tarballs and places them in the
-``${``\ :term:`DEPLOY_DIR_TAR`\ ``}`` directory in
-the package feeds area. For more information, see the
-":ref:`overview-manual/concepts:package feeds`" section in
-the Yocto Project Overview and Concepts Manual.
-
.. _ref-tasks-packagedata:
``do_packagedata``
@@ -327,7 +316,7 @@ file as a patch file::
"
Conversely, if you have a file whose file type is ``.patch`` or ``.diff``
-and you want to exclude it so that the ``do_patch`` task does not apply
+and you want to exclude it so that the :ref:`ref-tasks-patch` task does not apply
it during the patch phase, you can use the "apply=no" parameter with the
:term:`SRC_URI` statement::
@@ -343,7 +332,7 @@ while ``file2.patch`` would not be applied.
You can find out more about the patching process in the
":ref:`overview-manual/concepts:patching`" section in
the Yocto Project Overview and Concepts Manual and the
-":ref:`dev-manual/common-tasks:patching code`" section in the
+":ref:`dev-manual/new-recipe:patching code`" section in the
Yocto Project Development Tasks Manual.
.. _ref-tasks-populate_lic:
@@ -369,7 +358,7 @@ information.
``do_populate_sdk_ext``
-----------------------
-Creates the file and directory structure for an installable extensible
+Creates the file and directory structure for an installable extensible
SDK (eSDK). See the ":ref:`overview-manual/concepts:sdk generation`"
section in the Yocto Project Overview and Concepts Manual for more
information.
@@ -392,7 +381,7 @@ For information on what directories are copied by default, see the
these variables inside your recipe if you need to make additional (or
fewer) directories available to other recipes at build time.
-The ``do_populate_sysroot`` task is a shared state (sstate) task, which
+The :ref:`ref-tasks-populate_sysroot` task is a shared state (sstate) task, which
means that the task can be accelerated through sstate use. Realize also
that if the task is re-executed, any previous output is removed (i.e.
"cleaned").
@@ -406,7 +395,7 @@ Installs the files into the individual recipe specific sysroots (i.e.
``recipe-sysroot`` and ``recipe-sysroot-native`` under
``${``\ :term:`WORKDIR`\ ``}`` based upon the
dependencies specified by :term:`DEPENDS`). See the
-":ref:`staging <ref-classes-staging>`" class for more information.
+":ref:`ref-classes-staging`" class for more information.
.. _ref-tasks-rm_work:
@@ -447,7 +436,7 @@ Validates the :term:`SRC_URI` value.
------------
Removes all output files for a target from the
-:ref:`ref-tasks-unpack` task forward (i.e. ``do_unpack``,
+:ref:`ref-tasks-unpack` task forward (i.e. :ref:`ref-tasks-unpack`,
:ref:`ref-tasks-configure`,
:ref:`ref-tasks-compile`,
:ref:`ref-tasks-install`, and
@@ -473,7 +462,7 @@ use the :ref:`ref-tasks-cleansstate` task instead
Removes all output files, shared state
(:ref:`sstate <overview-manual/concepts:shared state cache>`) cache, and
downloaded source files for a target (i.e. the contents of
-:term:`DL_DIR`). Essentially, the ``do_cleanall`` task is
+:term:`DL_DIR`). Essentially, the :ref:`ref-tasks-cleanall` task is
identical to the :ref:`ref-tasks-cleansstate` task
with the added removal of downloaded source files.
@@ -481,9 +470,29 @@ You can run this task using BitBake as follows::
$ bitbake -c cleanall recipe
-Typically, you would not normally use the ``cleanall`` task. Do so only
-if you want to start fresh with the :ref:`ref-tasks-fetch`
-task.
+You should never use the :ref:`ref-tasks-cleanall` task in a normal
+scenario. If you want to start fresh with the :ref:`ref-tasks-fetch` task,
+use instead::
+
+ $ bitbake -f -c fetch recipe
+
+.. note::
+
+ The reason to prefer ``bitbake -f -c fetch`` is that the
+ :ref:`ref-tasks-cleanall` task would break in some cases, such as::
+
+ $ bitbake -c fetch recipe
+ $ bitbake -c cleanall recipe-native
+ $ bitbake -c unpack recipe
+
+ because after step 1 there is a stamp file for the
+ :ref:`ref-tasks-fetch` task of ``recipe``, and it won't be removed at
+ step 2 because step 2 uses a different work directory. So the unpack task
+ at step 3 will try to extract the downloaded archive and fail as it has
+ been deleted in step 2.
+
+ Note that this also applies to BitBake from concurrent processes when a
+ shared download directory (:term:`DL_DIR`) is setup.
.. _ref-tasks-cleansstate:
@@ -492,7 +501,7 @@ task.
Removes all output files and shared state
(:ref:`sstate <overview-manual/concepts:shared state cache>`) cache for a
-target. Essentially, the ``do_cleansstate`` task is identical to the
+target. Essentially, the :ref:`ref-tasks-cleansstate` task is identical to the
:ref:`ref-tasks-clean` task with the added removal of
shared state (:ref:`sstate <overview-manual/concepts:shared state cache>`)
cache.
@@ -501,13 +510,25 @@ You can run this task using BitBake as follows::
$ bitbake -c cleansstate recipe
-When you run the ``do_cleansstate`` task, the OpenEmbedded build system
+When you run the :ref:`ref-tasks-cleansstate` task, the OpenEmbedded build system
no longer uses any sstate. Consequently, building the recipe from
scratch is guaranteed.
.. note::
- The ``do_cleansstate`` task cannot remove sstate from a remote sstate
+ Using :ref:`ref-tasks-cleansstate` with a shared :term:`SSTATE_DIR` is
+ not recommended because it could trigger an error during the build of a
+ separate BitBake instance. This is because the builds check sstate "up
+ front" but download the files later, so it if is deleted in the
+ meantime, it will cause an error but not a total failure as it will
+ rebuild it.
+
+ The reliable and preferred way to force a new build is to use ``bitbake
+ -f`` instead.
+
+.. note::
+
+ The :ref:`ref-tasks-cleansstate` task cannot remove sstate from a remote sstate
mirror. If you need to build a target from scratch using remote mirrors, use
the "-f" option as follows::
@@ -522,7 +543,7 @@ scratch is guaranteed.
Starts a shell in which an interactive Python interpreter allows you to
interact with the BitBake build environment. From within this shell, you
can directly examine and set bits from the data store and execute
-functions as if within the BitBake environment. See the ":ref:`dev-manual/common-tasks:using a Python development shell`" section in
+functions as if within the BitBake environment. See the ":ref:`dev-manual/python-development-shell:using a Python development shell`" section in
the Yocto Project Development Tasks Manual for more information about
using ``pydevshell``.
@@ -532,7 +553,7 @@ using ``pydevshell``.
---------------
Starts a shell whose environment is set up for development, debugging,
-or both. See the ":ref:`dev-manual/common-tasks:using a development shell`" section in the
+or both. See the ":ref:`dev-manual/development-shell:using a development shell`" section in the
Yocto Project Development Tasks Manual for more information about using
``devshell``.
@@ -575,10 +596,8 @@ information on live image types.
``do_bundle_initramfs``
-----------------------
-Combines an initial RAM disk (initramfs) image and kernel together to
-form a single image. The
-:term:`CONFIG_INITRAMFS_SOURCE` variable
-has some more information about these types of images.
+Combines an :term:`Initramfs` image and kernel together to
+form a single image.
.. _ref-tasks-rootfs:
@@ -597,7 +616,7 @@ information on how the root filesystem is created.
Boots an image and performs runtime tests within the image. For
information on automatically testing images, see the
-":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
.. _ref-tasks-testimage_auto:
@@ -610,7 +629,7 @@ after it has been built. This task is enabled when you set
:term:`TESTIMAGE_AUTO` equal to "1".
For information on automatically testing images, see the
-":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
Kernel-Related Tasks
@@ -657,7 +676,7 @@ section in the Yocto Project Linux Kernel Development Manual.
Converts the newly unpacked kernel source into a form with which the
OpenEmbedded build system can work. Because the kernel source can be
-fetched in several different ways, the ``do_kernel_checkout`` task makes
+fetched in several different ways, the :ref:`ref-tasks-kernel_checkout` task makes
sure that subsequent tasks are given a clean working tree copy of the
kernel with the correct branches checked out.
@@ -668,7 +687,7 @@ kernel with the correct branches checked out.
Validates the configuration produced by the
:ref:`ref-tasks-kernel_menuconfig` task. The
-``do_kernel_configcheck`` task produces warnings when a requested
+:ref:`ref-tasks-kernel_configcheck` task produces warnings when a requested
configuration does not appear in the final ``.config`` file or when you
override a policy configuration in a hardware configuration fragment.
You can run this task explicitly and view the output by using the
@@ -686,7 +705,7 @@ section in the Yocto Project Linux Kernel Development Manual.
----------------------
After the kernel is patched by the :ref:`ref-tasks-patch`
-task, the ``do_kernel_configme`` task assembles and merges all the
+task, the :ref:`ref-tasks-kernel_configme` task assembles and merges all the
kernel config fragments into a merged configuration that can then be
passed to the kernel configuration phase proper. This is also the time
during which user-specified defconfigs are applied if present, and where
@@ -719,7 +738,7 @@ information on this configuration tool.
Collects all the features required for a given kernel build, whether the
features come from :term:`SRC_URI` or from Git
-repositories. After collection, the ``do_kernel_metadata`` task
+repositories. After collection, the :ref:`ref-tasks-kernel_metadata` task
processes the features into a series of config fragments and patches,
which can then be applied by subsequent tasks such as
:ref:`ref-tasks-patch` and
@@ -791,4 +810,4 @@ After the kernel is unpacked but before it is patched, this task makes
sure that the machine and metadata branches as specified by the
:term:`SRCREV` variables actually exist on the specified
branches. Otherwise, if :term:`AUTOREV` is not being used, the
-``do_validate_branches`` task fails during the build.
+:ref:`ref-tasks-validate_branches` task fails during the build.
diff --git a/documentation/ref-manual/terms.rst b/documentation/ref-manual/terms.rst
index cba514c345..b18c4183b6 100644
--- a/documentation/ref-manual/terms.rst
+++ b/documentation/ref-manual/terms.rst
@@ -4,7 +4,7 @@
Yocto Project Terms
*******************
-Following is a list of terms and definitions users new to the Yocto Project
+Here is a list of terms and definitions users new to the Yocto Project
development environment might find helpful. While some of these terms are
universal, the list includes them just in case:
@@ -21,7 +21,7 @@ universal, the list includes them just in case:
Information in append files extends or overrides the information in the
similarly-named recipe file. For an example of an append file in use, see
- the ":ref:`dev-manual/common-tasks:appending other layers metadata with your layer`"
+ the ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
section in the Yocto Project Development Tasks Manual.
When you name an append file, you can use the "``%``" wildcard character
@@ -64,31 +64,31 @@ universal, the list includes them just in case:
builds. The area is created when you ``source`` the setup environment
script that is found in the Source Directory
(i.e. :ref:`ref-manual/structure:\`\`oe-init-build-env\`\``). The
- :term:`TOPDIR` variable points to the Build Directory.
+ :term:`TOPDIR` variable points to the :term:`Build Directory`.
- You have a lot of flexibility when creating the Build Directory.
- Following are some examples that show how to create the directory. The
+ You have a lot of flexibility when creating the :term:`Build Directory`.
+ Here are some examples that show how to create the directory. The
examples assume your :term:`Source Directory` is named ``poky``:
- - Create the Build Directory inside your Source Directory and let
- the name of the Build Directory default to ``build``:
+ - Create the :term:`Build Directory` inside your Source Directory and let
+ the name of the :term:`Build Directory` default to ``build``:
.. code-block:: shell
$ cd poky
$ source oe-init-build-env
- - Create the Build Directory inside your home directory and
+ - Create the :term:`Build Directory` inside your home directory and
specifically name it ``test-builds``:
.. code-block:: shell
$ source poky/oe-init-build-env test-builds
- - Provide a directory path and specifically name the Build
- Directory. Any intermediate folders in the pathname must exist.
- This next example creates a Build Directory named
- ``YP-&DISTRO;`` within the existing directory ``mybuilds``:
+ - Provide a directory path and specifically name the
+ :term:`Build Directory`. Any intermediate folders in the pathname
+ must exist. This next example creates a :term:`Build Directory`
+ named ``YP-&DISTRO;`` within the existing directory ``mybuilds``:
.. code-block:: shell
@@ -96,19 +96,41 @@ universal, the list includes them just in case:
.. note::
- By default, the Build Directory contains :term:`TMPDIR`, which is a
+ By default, the :term:`Build Directory` contains :term:`TMPDIR`, which is a
temporary directory the build system uses for its work. :term:`TMPDIR` cannot
- be under NFS. Thus, by default, the Build Directory cannot be under
- NFS. However, if you need the Build Directory to be under NFS, you can
+ be under NFS. Thus, by default, the :term:`Build Directory` cannot be under
+ NFS. However, if you need the :term:`Build Directory` to be under NFS, you can
set this up by setting :term:`TMPDIR` in your ``local.conf`` file to use a local
drive. Doing so effectively separates :term:`TMPDIR` from :term:`TOPDIR`, which is the
- Build Directory.
+ :term:`Build Directory`.
:term:`Build Host`
The system used to build images in a Yocto Project Development
environment. The build system is sometimes referred to as the development
host.
+ :term:`buildtools`
+ Build tools in binary form, providing required versions of development
+ tools (such as Git, GCC, Python and make), to run the OpenEmbedded build
+ system on a development host without such minimum versions.
+
+ See the ":ref:`system-requirements-buildtools`" paragraph in the
+ Reference Manual for details about downloading or building an archive
+ of such tools.
+
+ :term:`buildtools-extended`
+ A set of :term:`buildtools` binaries extended with additional development
+ tools, such as a required version of the GCC compiler to run the
+ OpenEmbedded build system.
+
+ See the ":ref:`system-requirements-buildtools`" paragraph in the
+ Reference Manual for details about downloading or building an archive
+ of such tools.
+
+ :term:`buildtools-make`
+ A variant of :term:`buildtools`, just providing the required
+ version of ``make`` to run the OpenEmbedded build system.
+
:term:`Classes`
Files that provide for logic encapsulation and inheritance so that
commonly used patterns can be defined once and then easily used in
@@ -138,14 +160,12 @@ universal, the list includes them just in case:
which contains multiple (and typically related) sub-layers which can
be included independently in your project's ``bblayers.conf`` file.
- In some cases, such as with OpenEmbedded's
- `meta-openembedded <https://github.com/openembedded/meta-openembedded>`_
+ In some cases, such as with OpenEmbedded's :oe_git:`meta-openembedded </meta-openembedded>`
layer, the top level ``meta-openembedded/`` directory is not itself an actual layer,
so you would never explicitly include it in a ``bblayers.conf`` file;
rather, you would include any number of its layer subdirectories, such as
- `meta-openembedded/meta-oe <https://github.com/openembedded/meta-openembedded/tree/master/meta-oe>`_,
- `meta-openembedded/meta-python <https://github.com/openembedded/meta-openembedded/tree/master/meta-python>`_
- and so on.
+ :oe_git:`meta-oe </meta-openembedded/tree/meta-oe>`, :oe_git:`meta-python
+ </meta-openembedded/tree/meta-python>` and so on.
On the other hand, some container layers (such as
:yocto_git:`meta-security </meta-security>`)
@@ -192,6 +212,48 @@ universal, the list includes them just in case:
of the supported image types that the Yocto Project provides, see the
":ref:`ref-manual/images:Images`" chapter.
+ :term:`Initramfs`
+ An Initial RAM Filesystem (:term:`Initramfs`) is an optionally compressed
+ :wikipedia:`cpio <Cpio>` archive which is extracted
+ by the Linux kernel into RAM in a special :wikipedia:`tmpfs <Tmpfs>`
+ instance, used as the initial root filesystem.
+
+ This is a replacement for the legacy init RAM disk ("initrd")
+ technique, booting on an emulated block device in RAM, but being less
+ efficient because of the overhead of going through a filesystem and
+ having to duplicate accessed file contents in the file cache in RAM,
+ as for any block device.
+
+ .. note::
+
+ As far as bootloaders are concerned, :term:`Initramfs` and "initrd"
+ images are still copied to RAM in the same way. That's why most
+ most bootloaders refer to :term:`Initramfs` images as "initrd"
+ or "init RAM disk".
+
+ This kind of mechanism is typically used for two reasons:
+
+ - For booting the same kernel binary on multiple systems requiring
+ different device drivers. The :term:`Initramfs` image is then customized
+ for each type of system, to include the specific kernel modules
+ necessary to access the final root filesystem. This technique
+ is used on all GNU / Linux distributions for desktops and servers.
+
+ - For booting faster. As the root filesystem is extracted into RAM,
+ accessing the first user-space applications is very fast, compared
+ to having to initialize a block device, to access multiple blocks
+ from it, and to go through a filesystem having its own overhead.
+ For example, this allows to display a splashscreen very early,
+ and to later take care of mounting the final root filesystem and
+ loading less time-critical kernel drivers.
+
+ This cpio archive can either be loaded to RAM by the bootloader,
+ or be included in the kernel binary.
+
+ For information on creating and using an :term:`Initramfs`, see the
+ ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`"
+ section in the Yocto Project Development Tasks Manual.
+
:term:`Layer`
A collection of related recipes. Layers allow you to consolidate related
metadata to customize your build. Layers also isolate information used
@@ -205,12 +267,18 @@ universal, the list includes them just in case:
":ref:`overview-manual/yp-intro:The Yocto Project Layer
Model`" section in the Yocto Project Overview and Concepts Manual. For
more detailed information on layers, see the
- ":ref:`dev-manual/common-tasks:Understanding and Creating
+ ":ref:`dev-manual/layers:Understanding and Creating
Layers`" section in the Yocto Project Development Tasks Manual. For a
discussion specifically on BSP Layers, see the ":ref:`bsp-guide/bsp:BSP
Layers`" section in the Yocto Project Board Support Packages (BSP)
Developer's Guide.
+ :term:`LTS`
+ This term means "Long Term Support", and in the context of the Yocto
+ Project, it corresponds to selected stable releases for which bug and
+ security fixes are provided for at least four years. See
+ the :ref:`ref-long-term-support-releases` section for details.
+
:term:`Metadata`
A key element of the Yocto Project is the Metadata that
is used to construct a Linux distribution and is contained in the
@@ -230,6 +298,12 @@ universal, the list includes them just in case:
:yocto_git:`yocto-kernel-cache </yocto-kernel-cache>`
Git repository.
+ :term:`Mixin`
+ A :term:`Mixin` layer is a layer which can be created by the community to
+ add a specific feature or support a new version of some package for an
+ :term:`LTS` release. See the :ref:`ref-long-term-support-releases`
+ section for details.
+
:term:`OpenEmbedded-Core (OE-Core)`
OE-Core is metadata comprised of
foundational recipes, classes, and associated files that are meant to
@@ -270,7 +344,7 @@ universal, the list includes them just in case:
your Linux distribution.
Another point worth noting is that historically within the Yocto
- Project, recipes were referred to as packages - thus, the existence
+ Project, recipes were referred to as packages --- thus, the existence
of several BitBake variables that are seemingly mis-named, (e.g.
:term:`PR`, :term:`PV`, and
:term:`PE`).
@@ -323,6 +397,23 @@ universal, the list includes them just in case:
:term:`build host<Build Host>` and other components, that can
work on specific hardware.
+ :term:`SBOM`
+ This term means *Software Bill of Materials*. When you distribute
+ software, it offers a description of all the components you used,
+ their corresponding licenses, their dependencies, the changes that were
+ applied and the known vulnerabilities that were fixed.
+
+ This can be used by the recipients of the software to assess
+ their exposure to license compliance and security vulnerability issues.
+
+ See the :wikipedia:`Software Supply Chain <Software_supply_chain>`
+ article on Wikipedia for more details.
+
+ The OpenEmbedded Build System can generate such documentation for your
+ project, in :term:`SPDX` format, based on all the metadata it used to
+ build the software images. See the ":ref:`dev-manual/sbom:creating
+ a software bill of materials`" section of the Development Tasks manual.
+
:term:`Source Directory`
This term refers to the directory structure
created as a result of creating a local copy of the ``poky`` Git
@@ -369,7 +460,7 @@ universal, the list includes them just in case:
Directory created by unpacking a released tarball as compared to
cloning ``git://git.yoctoproject.org/poky``. When you unpack a
tarball, you have an exact copy of the files based on the time of
- release - a fixed release point. Any changes you make to your local
+ release --- a fixed release point. Any changes you make to your local
files in the Source Directory are on top of the release and will
remain local only. On the other hand, when you clone the ``poky`` Git
repository, you have an active development repository with access to
@@ -383,6 +474,42 @@ universal, the list includes them just in case:
":ref:`overview-manual/development-environment:repositories, tags, and branches`"
section in the Yocto Project Overview and Concepts Manual.
+ :term:`SPDX`
+ This term means *Software Package Data Exchange*, and is used as an open
+ standard for providing a *Software Bill of Materials* (:term:`SBOM`).
+ This standard is developed through a `Linux Foundation project
+ <https://spdx.dev/>`__ and is used by the OpenEmbedded Build System to
+ provide an :term:`SBOM` associated to each software image.
+
+ For details, see Wikipedia's :wikipedia:`SPDX page <Software_Package_Data_Exchange>`
+ and the ":ref:`dev-manual/sbom:creating a software bill of materials`"
+ section of the Development Tasks manual.
+
+ :term:`Sysroot`
+ When cross-compiling, the target file system may be differently laid
+ out and contain different things compared to the host system. The concept
+ of a *sysroot* is directory which looks like the target filesystem and
+ can be used to cross-compile against.
+
+ In the context of cross-compiling toolchains, a *sysroot*
+ typically contains C library and kernel headers, plus the
+ compiled binaries for the C library. A *multilib toolchain*
+ can contain multiple variants of the C library binaries,
+ each compiled for a target instruction set (such as ``armv5``,
+ ``armv7`` and ``armv8``), and possibly optimized for a specific CPU core.
+
+ In the more specific context of the OpenEmbedded build System and
+ of the Yocto Project, each recipe has two sysroots:
+
+ - A *target sysroot* contains all the **target** libraries and headers
+ needed to build the recipe.
+
+ - A *native sysroot* contains all the **host** files and executables
+ needed to build the recipe.
+
+ See the :term:`SYSROOT_* <SYSROOT_DESTDIR>` variables controlling
+ how sysroots are created and stored.
+
:term:`Task`
A per-recipe unit of execution for BitBake (e.g.
:ref:`ref-tasks-compile`,
diff --git a/documentation/ref-manual/variables.rst b/documentation/ref-manual/variables.rst
index a947caddf6..9cdcc1b61b 100644
--- a/documentation/ref-manual/variables.rst
+++ b/documentation/ref-manual/variables.rst
@@ -126,8 +126,7 @@ system and gives an overview of their function and contents.
":ref:`ref-classes-update-alternatives`" section.
:term:`ANY_OF_DISTRO_FEATURES`
- When inheriting the
- :ref:`features_check <ref-classes-features_check>`
+ When inheriting the :ref:`ref-classes-features_check`
class, this variable identifies a list of distribution features where
at least one must be enabled in the current configuration in order
for the OpenEmbedded build system to build the recipe. In other words,
@@ -135,20 +134,19 @@ system and gives an overview of their function and contents.
appear in :term:`DISTRO_FEATURES` within the current configuration, then
the recipe will be skipped, and if the build system attempts to build
the recipe then an error will be triggered.
-
:term:`APPEND`
An override list of append strings for each target specified with
:term:`LABELS`.
- See the :ref:`grub-efi <ref-classes-grub-efi>` class for more
+ See the :ref:`ref-classes-grub-efi` class for more
information on how this variable is used.
:term:`AR`
The minimal command and arguments used to run ``ar``.
:term:`ARCHIVER_MODE`
- When used with the :ref:`archiver <ref-classes-archiver>` class,
+ When used with the :ref:`ref-classes-archiver` class,
determines the type of information used to create a released archive.
You can use this variable to create archives of patched source,
original source, configured source, and so forth by employing the
@@ -194,18 +192,15 @@ system and gives an overview of their function and contents.
ASSUME_SHLIBS = "libEGL.so.1:libegl-implementation"
- :term:`AUTHOR`
- The email address used to contact the original author or authors in
- order to send patches and forward bugs.
-
:term:`AUTO_LIBNAME_PKGS`
- When the :ref:`debian <ref-classes-debian>` class is inherited,
+ When the :ref:`ref-classes-debian` class is inherited,
which is the default behavior, :term:`AUTO_LIBNAME_PKGS` specifies which
packages should be checked for libraries and renamed according to
Debian library package naming.
- The default value is "${PACKAGES}", which causes the debian class to
- act on all packages that are explicitly generated by the recipe.
+ The default value is "${PACKAGES}", which causes the
+ :ref:`ref-classes-debian` class to act on all packages that are
+ explicitly generated by the recipe.
:term:`AUTOREV`
When :term:`SRCREV` is set to the value of this variable, it specifies to
@@ -215,21 +210,20 @@ system and gives an overview of their function and contents.
If you use the previous statement to retrieve the latest version of
software, you need to be sure :term:`PV` contains
- ``${``\ :term:`SRCPV`\ ``}``. For example, suppose you
- have a kernel recipe that inherits the
- :ref:`kernel <ref-classes-kernel>` class and you use the previous
- statement. In this example, ``${SRCPV}`` does not automatically get
- into :term:`PV`. Consequently, you need to change :term:`PV` in your recipe
- so that it does contain ``${SRCPV}``.
+ ``${``\ :term:`SRCPV`\ ``}``. For example, suppose you have a kernel
+ recipe that inherits the :ref:`ref-classes-kernel` class and you
+ use the previous statement. In this example, ``${SRCPV}`` does not
+ automatically get into :term:`PV`. Consequently, you need to change
+ :term:`PV` in your recipe so that it does contain ``${SRCPV}``.
For more information see the
- ":ref:`dev-manual/common-tasks:automatically incrementing a package version number`"
+ ":ref:`dev-manual/packages:automatically incrementing a package version number`"
section in the Yocto Project Development Tasks Manual.
:term:`AUTO_SYSLINUXMENU`
Enables creating an automatic menu for the syslinux bootloader. You
must set this variable in your recipe. The
- :ref:`syslinux <ref-classes-syslinux>` class checks this variable.
+ :ref:`ref-classes-syslinux` class checks this variable.
:term:`AVAILTUNES`
The list of defined CPU and Application Binary Interface (ABI)
@@ -239,21 +233,20 @@ system and gives an overview of their function and contents.
The list simply presents the tunes that are available. Not all tunes
may be compatible with a particular machine configuration, or with
each other in a
- :ref:`Multilib <dev-manual/common-tasks:combining multiple versions of library files into one image>`
+ :ref:`Multilib <dev-manual/libraries:combining multiple versions of library files into one image>`
configuration.
To add a tune to the list, be sure to append it with spaces using the
"+=" BitBake operator. Do not simply replace the list by using the
"=" operator. See the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:basic syntax`" section in the BitBake
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:basic syntax`" section in the BitBake
User Manual for more information.
:term:`AZ_SAS`
Azure Storage Shared Access Signature, when using the
- :ref:`Azure Storage fetcher (az://) <bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
+ :ref:`Azure Storage fetcher (az://) <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
This variable can be defined to be used by the fetcher to authenticate
- and gain access to non-public artifacts.
- ::
+ and gain access to non-public artifacts::
AZ_SAS = ""se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>""
@@ -261,9 +254,9 @@ system and gives an overview of their function and contents.
https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview
:term:`B`
- The directory within the :term:`Build Directory` in
- which the OpenEmbedded build system places generated objects during a
- recipe's build process. By default, this directory is the same as the
+ The directory within the :term:`Build Directory` in which the
+ OpenEmbedded build system places generated objects during a recipe's
+ build process. By default, this directory is the same as the
:term:`S` directory, which is defined as::
S = "${WORKDIR}/${BP}"
@@ -304,7 +297,7 @@ system and gives an overview of their function and contents.
:term:`BASE_LIB`
The library directory name for the CPU or Application Binary
Interface (ABI) tune. The :term:`BASE_LIB` applies only in the Multilib
- context. See the ":ref:`dev-manual/common-tasks:combining multiple versions of library files into one image`"
+ context. See the ":ref:`dev-manual/libraries:combining multiple versions of library files into one image`"
section in the Yocto Project Development Tasks Manual for information
on Multilib.
@@ -318,7 +311,7 @@ system and gives an overview of their function and contents.
:term:`BB_ALLOWED_NETWORKS`
Specifies a space-delimited list of hosts that the fetcher is allowed
- to use to obtain the required source code. Following are
+ to use to obtain the required source code. Here are
considerations surrounding this variable:
- This host list is only used if :term:`BB_NO_NETWORK` is either not set
@@ -326,8 +319,7 @@ system and gives an overview of their function and contents.
- There is limited support for wildcard matching against the beginning of
host names. For example, the following setting matches
- ``git.gnu.org``, ``ftp.gnu.org``, and ``foo.git.gnu.org``.
- ::
+ ``git.gnu.org``, ``ftp.gnu.org``, and ``foo.git.gnu.org``::
BB_ALLOWED_NETWORKS = "*.gnu.org"
@@ -355,6 +347,21 @@ system and gives an overview of their function and contents.
host listed in :term:`SRC_URI` after a successful fetch from the
:term:`PREMIRRORS` occurs.
+ :term:`BB_BASEHASH_IGNORE_VARS`
+ See :term:`bitbake:BB_BASEHASH_IGNORE_VARS` in the BitBake manual.
+
+ :term:`BB_CACHEDIR`
+ See :term:`bitbake:BB_CACHEDIR` in the BitBake manual.
+
+ :term:`BB_CHECK_SSL_CERTS`
+ See :term:`bitbake:BB_CHECK_SSL_CERTS` in the BitBake manual.
+
+ :term:`BB_CONSOLELOG`
+ See :term:`bitbake:BB_CONSOLELOG` in the BitBake manual.
+
+ :term:`BB_CURRENTTASK`
+ See :term:`bitbake:BB_CURRENTTASK` in the BitBake manual.
+
:term:`BB_DANGLINGAPPENDS_WARNONLY`
Defines how BitBake handles situations where an append file
(``.bbappend``) has no corresponding recipe file (``.bb``). This
@@ -373,6 +380,12 @@ system and gives an overview of their function and contents.
BB_DANGLINGAPPENDS_WARNONLY = "1"
+ :term:`BB_DEFAULT_TASK`
+ See :term:`bitbake:BB_DEFAULT_TASK` in the BitBake manual.
+
+ :term:`BB_DEFAULT_UMASK`
+ See :term:`bitbake:BB_DEFAULT_UMASK` in the BitBake manual.
+
:term:`BB_DISKMON_DIRS`
Monitors disk space and available inodes during the build and allows
you to control the build based on these parameters.
@@ -494,6 +507,18 @@ system and gives an overview of their function and contents.
a respective interval is reached beyond the initial warning (i.e. 1
Gbytes and 100 Kbytes).
+ :term:`BB_ENV_PASSTHROUGH`
+ See :term:`bitbake:BB_ENV_PASSTHROUGH` in the BitBake manual.
+
+ :term:`BB_ENV_PASSTHROUGH_ADDITIONS`
+ See :term:`bitbake:BB_ENV_PASSTHROUGH_ADDITIONS` in the BitBake manual.
+
+ :term:`BB_FETCH_PREMIRRORONLY`
+ See :term:`bitbake:BB_FETCH_PREMIRRORONLY` in the BitBake manual.
+
+ :term:`BB_FILENAME`
+ See :term:`bitbake:BB_FILENAME` in the BitBake manual.
+
:term:`BB_GENERATE_MIRROR_TARBALLS`
Causes tarballs of the source control repositories (e.g. Git
repositories), including metadata, to be placed in the
@@ -501,8 +526,7 @@ system and gives an overview of their function and contents.
For performance reasons, creating and placing tarballs of these
repositories is not the default action by the OpenEmbedded build
- system.
- ::
+ system::
BB_GENERATE_MIRROR_TARBALLS = "1"
@@ -513,6 +537,52 @@ system and gives an overview of their function and contents.
clean up your :term:`DL_DIR` directory by deleting any Git or other
source control work directories.
+ :term:`BB_GENERATE_SHALLOW_TARBALLS`
+ See :term:`bitbake:BB_GENERATE_SHALLOW_TARBALLS` in the BitBake manual.
+
+ :term:`BB_GIT_SHALLOW`
+ See :term:`bitbake:BB_GIT_SHALLOW` in the BitBake manual.
+
+ :term:`BB_GIT_SHALLOW_DEPTH`
+ See :term:`bitbake:BB_GIT_SHALLOW_DEPTH` in the BitBake manual.
+
+ :term:`BB_HASHCHECK_FUNCTION`
+ See :term:`bitbake:BB_HASHCHECK_FUNCTION` in the BitBake manual.
+
+ :term:`BB_HASHCONFIG_IGNORE_VARS`
+ See :term:`bitbake:BB_HASHCONFIG_IGNORE_VARS` in the BitBake manual.
+
+ :term:`BB_HASHSERVE`
+ See :term:`bitbake:BB_HASHSERVE` in the BitBake manual.
+
+ :term:`BB_HASHSERVE_UPSTREAM`
+ See :term:`bitbake:BB_HASHSERVE_UPSTREAM` in the BitBake manual.
+
+ :term:`BB_INVALIDCONF`
+ See :term:`bitbake:BB_INVALIDCONF` in the BitBake manual.
+
+ :term:`BB_LOADFACTOR_MAX`
+ The system load threshold above which BitBake will stop runnig extra
+ tasks.
+
+ :term:`BB_LOGCONFIG`
+ See :term:`bitbake:BB_LOGCONFIG` in the BitBake manual.
+
+ :term:`BB_LOGFMT`
+ See :term:`bitbake:BB_LOGFMT` in the BitBake manual.
+
+ :term:`BB_MULTI_PROVIDER_ALLOWED`
+ See :term:`bitbake:BB_MULTI_PROVIDER_ALLOWED` in the BitBake manual.
+
+ :term:`BB_NICE_LEVEL`
+ See :term:`bitbake:BB_NICE_LEVEL` in the BitBake manual.
+
+ :term:`BB_NO_NETWORK`
+ See :term:`bitbake:BB_NO_NETWORK` in the BitBake manual.
+
+ :term:`BB_NUMBER_PARSE_THREADS`
+ See :term:`bitbake:BB_NUMBER_PARSE_THREADS` in the BitBake manual.
+
:term:`BB_NUMBER_THREADS`
The maximum number of tasks BitBake should run in parallel at any one
time. The OpenEmbedded build system automatically configures this
@@ -528,9 +598,63 @@ system and gives an overview of their function and contents.
is not set higher than "20".
For more information on speeding up builds, see the
- ":ref:`dev-manual/common-tasks:speeding up a build`"
+ ":ref:`dev-manual/speeding-up-build:speeding up a build`"
section in the Yocto Project Development Tasks Manual.
+ On the other hand, if your goal is to limit the amount of system
+ resources consumed by BitBake tasks, setting :term:`BB_NUMBER_THREADS`
+ to a number lower than the number of CPU threads in your machine
+ won't be sufficient. That's because each package will still be built
+ and installed through a number of parallel jobs specified by the
+ :term:`PARALLEL_MAKE` variable, which is by default the number of CPU
+ threads in your system, and is not impacted by the
+ :term:`BB_NUMBER_THREADS` value.
+
+ So, if you set :term:`BB_NUMBER_THREADS` to "1" but don't set
+ :term:`PARALLEL_MAKE`, most of your system resources will be consumed
+ anyway.
+
+ Therefore, if you intend to reduce the load of your build system by
+ setting :term:`BB_NUMBER_THREADS` to a relatively low value compared
+ to the number of CPU threads on your system, you should also set
+ :term:`PARALLEL_MAKE` to a similarly low value.
+
+ An alternative to using :term:`BB_NUMBER_THREADS` to keep the usage
+ of build system resources under control is to use the smarter
+ :term:`BB_PRESSURE_MAX_CPU`, :term:`BB_PRESSURE_MAX_IO` or
+ :term:`BB_PRESSURE_MAX_MEMORY` controls. They will prevent BitBake
+ from starting new tasks as long as thresholds are exceeded. Anyway,
+ as with :term:`BB_NUMBER_THREADS`, such controls won't prevent the
+ tasks already being run from using all CPU threads on the system
+ if :term:`PARALLEL_MAKE` is not set to a low value.
+
+ :term:`BB_ORIGENV`
+ See :term:`bitbake:BB_ORIGENV` in the BitBake manual.
+
+ :term:`BB_PRESERVE_ENV`
+ See :term:`bitbake:BB_PRESERVE_ENV` in the BitBake manual.
+
+ :term:`BB_PRESSURE_MAX_CPU`
+ See :term:`bitbake:BB_PRESSURE_MAX_CPU` in the BitBake manual.
+
+ :term:`BB_PRESSURE_MAX_IO`
+ See :term:`bitbake:BB_PRESSURE_MAX_IO` in the BitBake manual.
+
+ :term:`BB_PRESSURE_MAX_MEMORY`
+ See :term:`bitbake:BB_PRESSURE_MAX_MEMORY` in the BitBake manual.
+
+ :term:`BB_RUNFMT`
+ See :term:`bitbake:BB_RUNFMT` in the BitBake manual.
+
+ :term:`BB_RUNTASK`
+ See :term:`bitbake:BB_RUNTASK` in the BitBake manual.
+
+ :term:`BB_SCHEDULER`
+ See :term:`bitbake:BB_SCHEDULER` in the BitBake manual.
+
+ :term:`BB_SCHEDULERS`
+ See :term:`bitbake:BB_SCHEDULERS` in the BitBake manual.
+
:term:`BB_SERVER_TIMEOUT`
Specifies the time (in seconds) after which to unload the BitBake
server due to inactivity. Set :term:`BB_SERVER_TIMEOUT` to determine how
@@ -544,15 +668,45 @@ system and gives an overview of their function and contents.
If you want the server to never be unloaded,
set :term:`BB_SERVER_TIMEOUT` to "-1".
+ :term:`BB_SETSCENE_DEPVALID`
+ See :term:`bitbake:BB_SETSCENE_DEPVALID` in the BitBake manual.
+
+ :term:`BB_SIGNATURE_EXCLUDE_FLAGS`
+ See :term:`bitbake:BB_SIGNATURE_EXCLUDE_FLAGS` in the BitBake manual.
+
+ :term:`BB_SIGNATURE_HANDLER`
+ See :term:`bitbake:BB_SIGNATURE_HANDLER` in the BitBake manual.
+
+ :term:`BB_SRCREV_POLICY`
+ See :term:`bitbake:BB_SRCREV_POLICY` in the BitBake manual.
+
+ :term:`BB_STRICT_CHECKSUM`
+ See :term:`bitbake:BB_STRICT_CHECKSUM` in the BitBake manual.
+
+ :term:`BB_TASK_IONICE_LEVEL`
+ See :term:`bitbake:BB_TASK_IONICE_LEVEL` in the BitBake manual.
+
+ :term:`BB_TASK_NICE_LEVEL`
+ See :term:`bitbake:BB_TASK_NICE_LEVEL` in the BitBake manual.
+
+ :term:`BB_TASKHASH`
+ See :term:`bitbake:BB_TASKHASH` in the BitBake manual.
+
+ :term:`BB_VERBOSE_LOGS`
+ See :term:`bitbake:BB_VERBOSE_LOGS` in the BitBake manual.
+
+ :term:`BB_WORKERCONTEXT`
+ See :term:`bitbake:BB_WORKERCONTEXT` in the BitBake manual.
+
:term:`BBCLASSEXTEND`
Allows you to extend a recipe so that it builds variants of the
software. There are common variants for recipes as "natives" like
``quilt-native``, which is a copy of Quilt built to run on the build
system; "crosses" such as ``gcc-cross``, which is a compiler built to
run on the build machine but produces binaries that run on the target
- :term:`MACHINE`; "nativesdk", which targets the SDK
- machine instead of :term:`MACHINE`; and "mulitlibs" in the form
- "``multilib:``\ multilib_name".
+ :term:`MACHINE`; ":ref:`ref-classes-nativesdk`", which
+ targets the SDK machine instead of :term:`MACHINE`; and "mulitlibs" in
+ the form "``multilib:``\ multilib_name".
To build a different variant of the recipe with a minimal amount of
code, it usually is as simple as adding the following to your recipe::
@@ -574,6 +728,9 @@ system and gives an overview of their function and contents.
since ``include`` statements are processed when the recipe is
parsed.
+ :term:`BBDEBUG`
+ See :term:`bitbake:BBDEBUG` in the BitBake manual.
+
:term:`BBFILE_COLLECTIONS`
Lists the names of configured layers. These names are used to find
the other ``BBFILE_*`` variables. Typically, each layer will append
@@ -591,7 +748,7 @@ system and gives an overview of their function and contents.
This variable is useful in situations where the same recipe appears
in more than one layer. Setting this variable allows you to
prioritize a layer against other layers that contain the same recipe
- - effectively letting you control the precedence for the multiple
+ --- effectively letting you control the precedence for the multiple
layers. The precedence established through this variable stands
regardless of a recipe's version (:term:`PV` variable). For
example, a layer that has a recipe with a higher :term:`PV` value but for
@@ -616,7 +773,7 @@ system and gives an overview of their function and contents.
software.
When specifying recipe files, you can pattern match using Python's
- `glob <https://docs.python.org/3/library/glob.html>`_ syntax.
+ `glob <https://docs.python.org/3/library/glob.html>`__ syntax.
For details on the syntax, see the documentation by following the
previous link.
@@ -649,6 +806,9 @@ system and gives an overview of their function and contents.
/work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend
/work/my-layer/bbappends/openembedded-core/meta/*/*/*.bbappend
+ :term:`BBINCLUDED`
+ See :term:`bitbake:BBINCLUDED` in the BitBake manual.
+
:term:`BBINCLUDELOGS`
Variable that controls how BitBake displays logs on build failure.
@@ -673,6 +833,9 @@ system and gives an overview of their function and contents.
This example enables four layers, one of which is a custom,
user-defined layer named ``meta-mykernel``.
+ :term:`BBLAYERS_FETCH_DIR`
+ See :term:`bitbake:BBLAYERS_FETCH_DIR` in the BitBake manual.
+
:term:`BBMASK`
Prevents BitBake from processing recipes and recipe append files.
@@ -718,31 +881,19 @@ system and gives an overview of their function and contents.
BBMULTICONFIG = "configA configB configC"
- Each configuration file you
- use must reside in the :term:`Build Directory`
- ``conf/multiconfig`` directory (e.g.
- ``build_directory/conf/multiconfig/configA.conf``).
+ Each configuration file you use must reside in a ``multiconfig``
+ subdirectory of a configuration directory within a layer, or
+ within the :term:`Build Directory` (e.g.
+ ``build_directory/conf/multiconfig/configA.conf`` or
+ ``mylayer/conf/multiconfig/configB.conf``).
For information on how to use :term:`BBMULTICONFIG` in an environment
that supports building targets with multiple configurations, see the
- ":ref:`dev-manual/common-tasks:building images for multiple targets using multiple configurations`"
+ ":ref:`dev-manual/building:building images for multiple targets using multiple configurations`"
section in the Yocto Project Development Tasks Manual.
:term:`BBPATH`
- Used by BitBake to locate ``.bbclass`` and configuration files. This
- variable is analogous to the ``PATH`` variable.
-
- .. note::
-
- If you run BitBake from a directory outside of the
- :term:`Build Directory`, you must be sure to set :term:`BBPATH`
- to point to the Build Directory. Set the variable as you would any
- environment variable and then run BitBake::
-
- $ BBPATH = "build_directory"
- $ export BBPATH
- $ bitbake target
-
+ See :term:`bitbake:BBPATH` in the BitBake manual.
:term:`BBSERVER`
If defined in the BitBake environment, :term:`BBSERVER` points to the
@@ -757,14 +908,16 @@ system and gives an overview of their function and contents.
Consequently, :term:`BBSERVER` is excluded from checksum and dependency
data.
+ :term:`BBTARGETS`
+ See :term:`bitbake:BBTARGETS` in the BitBake manual.
+
:term:`BINCONFIG`
- When inheriting the
- :ref:`binconfig-disabled <ref-classes-binconfig-disabled>` class,
- this variable specifies binary configuration scripts to disable in
- favor of using ``pkg-config`` to query the information. The
- :ref:`binconfig-disabled <ref-classes-binconfig-disabled>` class will modify the specified scripts to
- return an error so that calls to them can be easily found and
- replaced.
+ When inheriting the :ref:`ref-classes-binconfig-disabled` class, this
+ variable specifies binary configuration scripts to disable in favor of
+ using ``pkg-config`` to query the information. The
+ :ref:`ref-classes-binconfig-disabled` class will modify the specified
+ scripts to return an error so that calls to them can be easily found
+ and replaced.
To add multiple scripts, separate them by spaces. Here is an example
from the ``libpng`` recipe::
@@ -772,7 +925,7 @@ system and gives an overview of their function and contents.
BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
:term:`BINCONFIG_GLOB`
- When inheriting the :ref:`binconfig <ref-classes-binconfig>` class,
+ When inheriting the :ref:`ref-classes-binconfig` class,
this variable specifies a wildcard for configuration scripts that
need editing. The scripts are edited to correct any paths that have
been set up during compilation so that they are correct for use when
@@ -789,11 +942,14 @@ system and gives an overview of their function and contents.
and `glob <https://docs.python.org/3/library/glob.html>`__.
For more information on how this variable works, see
- ``meta/classes/binconfig.bbclass`` in the :term:`Source Directory`.
+ ``meta/classes-recipe/binconfig.bbclass`` in the :term:`Source Directory`.
You can also find general
information on the class in the
":ref:`ref-classes-binconfig`" section.
+ :term:`BITBAKE_UI`
+ See :term:`bitbake:BITBAKE_UI` in the BitBake manual.
+
:term:`BP`
The base recipe name and version but without any special recipe name
suffix (i.e. ``-native``, ``lib64-``, and so forth). :term:`BP` is
@@ -888,14 +1044,13 @@ system and gives an overview of their function and contents.
:term:`BUILD_OS`
Specifies the operating system in use on the build host (e.g.
"linux"). The OpenEmbedded build system sets the value of
- :term:`BUILD_OS` from the OS reported by the ``uname`` command - the
+ :term:`BUILD_OS` from the OS reported by the ``uname`` command --- the
first word, converted to lower-case characters.
:term:`BUILD_PREFIX`
The toolchain binary prefix used for native recipes. The OpenEmbedded
build system uses the :term:`BUILD_PREFIX` value to set the
- :term:`TARGET_PREFIX` when building for
- ``native`` recipes.
+ :term:`TARGET_PREFIX` when building for :ref:`ref-classes-native` recipes.
:term:`BUILD_STRIP`
Specifies the command to be used to strip debugging symbols from
@@ -906,7 +1061,7 @@ system and gives an overview of their function and contents.
:term:`BUILD_SYS`
Specifies the system, including the architecture and the operating
system, to use when building for the build host (i.e. when building
- ``native`` recipes).
+ :ref:`ref-classes-native` recipes).
The OpenEmbedded build system automatically sets this variable based
on :term:`BUILD_ARCH`,
@@ -919,30 +1074,29 @@ system and gives an overview of their function and contents.
The default value is an empty string ("").
:term:`BUILDDIR`
- Points to the location of the :term:`Build Directory`.
- You can define this directory indirectly through the
- :ref:`structure-core-script` script by passing in a Build
- Directory path when you run the script. If you run the script and do
- not provide a Build Directory path, the :term:`BUILDDIR` defaults to
- ``build`` in the current directory.
+ Points to the location of the :term:`Build Directory`. You can define
+ this directory indirectly through the :ref:`structure-core-script` script
+ by passing in a :term:`Build Directory` path when you run the script. If
+ you run the script and do not provide a :term:`Build Directory` path, the
+ :term:`BUILDDIR` defaults to ``build`` in the current directory.
:term:`BUILDHISTORY_COMMIT`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
- class, this variable specifies whether or not to commit the build
- history output in a local Git repository. If set to "1", this local
- repository will be maintained automatically by the :ref:`buildhistory <ref-classes-buildhistory>`
- class and a commit will be created on every build for changes to each
- top-level subdirectory of the build history output (images, packages,
- and sdk). If you want to track changes to build history over time,
- you should set this value to "1".
+ When inheriting the :ref:`ref-classes-buildhistory` class, this variable
+ specifies whether or not to commit the build history output in a local
+ Git repository. If set to "1", this local repository will be maintained
+ automatically by the :ref:`ref-classes-buildhistory` class and a commit
+ will be created on every build for changes to each top-level subdirectory
+ of the build history output (images, packages, and sdk). If you want to
+ track changes to build history over time, you should set this value to
+ "1".
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class does not commit the build
- history output in a local Git repository::
+ By default, the :ref:`ref-classes-buildhistory` class
+ enables committing the buildhistory output in a local Git repository::
- BUILDHISTORY_COMMIT ?= "0"
+ BUILDHISTORY_COMMIT ?= "1"
:term:`BUILDHISTORY_COMMIT_AUTHOR`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
+ When inheriting the :ref:`ref-classes-buildhistory`
class, this variable specifies the author to use for each Git commit.
In order for the :term:`BUILDHISTORY_COMMIT_AUTHOR` variable to work, the
:term:`BUILDHISTORY_COMMIT` variable must
@@ -953,25 +1107,27 @@ system and gives an overview of their function and contents.
email@host". Providing an email address or host that is not valid
does not produce an error.
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class sets the variable as follows::
+ By default, the :ref:`ref-classes-buildhistory` class sets the variable
+ as follows::
BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
:term:`BUILDHISTORY_DIR`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
+ When inheriting the :ref:`ref-classes-buildhistory`
class, this variable specifies the directory in which build history
information is kept. For more information on how the variable works,
see the :ref:`ref-classes-buildhistory` class.
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class sets the directory as follows::
+ By default, the :ref:`ref-classes-buildhistory` class sets the directory
+ as follows::
BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
:term:`BUILDHISTORY_FEATURES`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
+ When inheriting the :ref:`ref-classes-buildhistory`
class, this variable specifies the build history features to be
enabled. For more information on how build history works, see the
- ":ref:`dev-manual/common-tasks:maintaining build output quality`"
+ ":ref:`dev-manual/build-quality:maintaining build output quality`"
section in the Yocto Project Development Tasks Manual.
You can specify these features in the form of a space-separated list:
@@ -990,13 +1146,13 @@ system and gives an overview of their function and contents.
This saves one file per task and lists the SHA-256 checksums for
each file staged (i.e. the output of the task).
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class enables the following
- features::
+ By default, the :ref:`ref-classes-buildhistory` class enables the
+ following features::
BUILDHISTORY_FEATURES ?= "image package sdk"
:term:`BUILDHISTORY_IMAGE_FILES`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
+ When inheriting the :ref:`ref-classes-buildhistory`
class, this variable specifies a list of paths to files copied from
the image contents into the build history directory under an
"image-files" directory in the directory for the image, so that you
@@ -1006,42 +1162,45 @@ system and gives an overview of their function and contents.
any file. Specifying an invalid path does not produce an error.
Consequently, you can include files that might not always be present.
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class provides paths to the
- following files::
+ By default, the :ref:`ref-classes-buildhistory` class provides paths to
+ the following files::
BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group"
:term:`BUILDHISTORY_PATH_PREFIX_STRIP`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
+ When inheriting the :ref:`ref-classes-buildhistory`
class, this variable specifies a common path prefix that should be
stripped off the beginning of paths in the task signature list when the
``task`` feature is active in :term:`BUILDHISTORY_FEATURES`. This can be
useful when build history is populated from multiple sources that may not
all use the same top level directory.
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class sets the variable as follows::
+ By default, the :ref:`ref-classes-buildhistory` class sets the variable
+ as follows::
BUILDHISTORY_PATH_PREFIX_STRIP ?= ""
In this case, no prefixes will be stripped.
:term:`BUILDHISTORY_PUSH_REPO`
- When inheriting the :ref:`buildhistory <ref-classes-buildhistory>`
- class, this variable optionally specifies a remote repository to
- which build history pushes Git changes. In order for
- :term:`BUILDHISTORY_PUSH_REPO` to work,
- :term:`BUILDHISTORY_COMMIT` must be set to
- "1".
+ When inheriting the :ref:`ref-classes-buildhistory` class, this variable
+ optionally specifies a remote repository to which build history pushes
+ Git changes. In order for :term:`BUILDHISTORY_PUSH_REPO` to work,
+ :term:`BUILDHISTORY_COMMIT` must be set to "1".
The repository should correspond to a remote address that specifies a
repository as understood by Git, or alternatively to a remote name
that you have set up manually using ``git remote`` within the local
repository.
- By default, the :ref:`buildhistory <ref-classes-buildhistory>` class sets the variable as follows::
+ By default, the :ref:`ref-classes-buildhistory` class sets the variable
+ as follows::
BUILDHISTORY_PUSH_REPO ?= ""
+ :term:`BUILDNAME`
+ See :term:`bitbake:BUILDNAME` in the BitBake manual.
+
:term:`BUILDSDK_CFLAGS`
Specifies the flags to pass to the C compiler when building for the
SDK. When building in the ``nativesdk-`` context,
@@ -1068,8 +1227,7 @@ system and gives an overview of their function and contents.
:term:`BUILDSTATS_BASE`
Points to the location of the directory that holds build statistics
- when you use and enable the
- :ref:`buildstats <ref-classes-buildstats>` class. The
+ when you use and enable the :ref:`ref-classes-buildstats` class. The
:term:`BUILDSTATS_BASE` directory defaults to
``${``\ :term:`TMPDIR`\ ``}/buildstats/``.
@@ -1083,6 +1241,9 @@ system and gives an overview of their function and contents.
splitting the output executable file. Set the variable to "0" to get
a single output executable file.
+ :term:`BZRDIR`
+ See :term:`bitbake:BZRDIR` in the BitBake manual.
+
:term:`CACHE`
Specifies the directory BitBake uses to store a cache of the
:term:`Metadata` so it does not need to be parsed every time
@@ -1112,9 +1273,8 @@ system and gives an overview of their function and contents.
An internal variable specifying the special class override that
should currently apply (e.g. "class-target", "class-native", and so
forth). The classes that use this variable (e.g.
- :ref:`native <ref-classes-native>`,
- :ref:`nativesdk <ref-classes-nativesdk>`, and so forth) set the
- variable to appropriate values.
+ :ref:`ref-classes-native`, :ref:`ref-classes-nativesdk`, and so forth)
+ set the variable to appropriate values.
.. note::
@@ -1156,6 +1316,26 @@ system and gives an overview of their function and contents.
optional at the distribution level, in case the hardware supports
Bluetooth but you do not ever intend to use it.
+ :term:`COMMERCIAL_AUDIO_PLUGINS`
+ This variable is specific to the :yocto_git:`GStreamer recipes
+ </poky/tree/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb>`.
+ It allows to build the GStreamer `"ugly"
+ <https://github.com/GStreamer/gst-plugins-ugly>`__ and
+ `"bad" <https://github.com/GStreamer/gst-plugins-bad>`__ audio plugins.
+
+ See the :ref:`dev-manual/licenses:other variables related to commercial licenses`
+ section for usage details.
+
+ :term:`COMMERCIAL_VIDEO_PLUGINS`
+ This variable is specific to the :yocto_git:`GStreamer recipes
+ </poky/tree/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb>`.
+ It allows to build the GStreamer `"ugly"
+ <https://github.com/GStreamer/gst-plugins-ugly>`__ and
+ `"bad" <https://github.com/GStreamer/gst-plugins-bad>`__ video plugins.
+
+ See the :ref:`dev-manual/licenses:other variables related to commercial licenses`
+ section for usage details.
+
:term:`COMMON_LICENSE_DIR`
Points to ``meta/files/common-licenses`` in the
:term:`Source Directory`, which is where generic license
@@ -1182,17 +1362,41 @@ system and gives an overview of their function and contents.
speed since the build system skips parsing recipes not compatible
with the current machine.
+ If one wants to have a recipe only available for some architectures
+ (here ``aarch64`` and ``mips64``), the following can be used::
+
+ COMPATIBLE_MACHINE = "^$"
+ COMPATIBLE_MACHINE:arch64 = "^(aarch64)$"
+ COMPATIBLE_MACHINE:mips64 = "^(mips64)$"
+
+ The first line means "match all machines whose :term:`MACHINEOVERRIDES`
+ contains the empty string", which will always be none.
+
+ The second is for matching all machines whose :term:`MACHINEOVERRIDES`
+ contains one override which is exactly ``aarch64``.
+
+ The third is for matching all machines whose :term:`MACHINEOVERRIDES`
+ contains one override which is exactly ``mips64``.
+
+ The same could be achieved with::
+
+ COMPATIBLE_MACHINE = "^(aarch64|mips64)$"
+
+ .. note::
+
+ When :term:`COMPATIBLE_MACHINE` is set in a recipe inherits from
+ native, the recipe is always skipped. All native recipes must be
+ entirely target independent and should not rely on :term:`MACHINE`.
+
:term:`COMPLEMENTARY_GLOB`
Defines wildcards to match when installing a list of complementary
packages for all the packages explicitly (or implicitly) installed in
an image.
- .. note::
-
- The :term:`COMPLEMENTARY_GLOB` variable uses Unix filename pattern matching
- (`fnmatch <https://docs.python.org/3/library/fnmatch.html#module-fnmatch>`__),
- which is similar to the Unix style pathname pattern expansion
- (`glob <https://docs.python.org/3/library/glob.html>`__).
+ The :term:`COMPLEMENTARY_GLOB` variable uses Unix filename pattern matching
+ (`fnmatch <https://docs.python.org/3/library/fnmatch.html#module-fnmatch>`__),
+ which is similar to the Unix style pathname pattern expansion
+ (`glob <https://docs.python.org/3/library/glob.html>`__).
The resulting list of complementary packages is associated with an
item that can be added to
@@ -1207,6 +1411,11 @@ system and gives an overview of their function and contents.
COMPLEMENTARY_GLOB[dev-pkgs] = '*-dev'
+ .. note::
+
+ When installing complementary packages, recommends relationships
+ (set via :term:`RRECOMMENDS`) are always ignored.
+
:term:`COMPONENTS_DIR`
Stores sysroot components for each recipe. The OpenEmbedded build
system uses :term:`COMPONENTS_DIR` when constructing recipe-specific
@@ -1258,24 +1467,24 @@ system and gives an overview of their function and contents.
:term:`Source Directory`.
:term:`CONFIG_INITRAMFS_SOURCE`
- Identifies the initial RAM filesystem (initramfs) source files. The
+ Identifies the initial RAM filesystem (:term:`Initramfs`) source files. The
OpenEmbedded build system receives and uses this kernel Kconfig
variable as an environment variable. By default, the variable is set
to null ("").
The :term:`CONFIG_INITRAMFS_SOURCE` can be either a single cpio archive
with a ``.cpio`` suffix or a space-separated list of directories and
- files for building the initramfs image. A cpio archive should contain
- a filesystem archive to be used as an initramfs image. Directories
- should contain a filesystem layout to be included in the initramfs
+ files for building the :term:`Initramfs` image. A cpio archive should contain
+ a filesystem archive to be used as an :term:`Initramfs` image. Directories
+ should contain a filesystem layout to be included in the :term:`Initramfs`
image. Files should contain entries according to the format described
by the ``usr/gen_init_cpio`` program in the kernel tree.
- If you specify multiple directories and files, the initramfs image
+ If you specify multiple directories and files, the :term:`Initramfs` image
will be the aggregate of all of them.
- For information on creating an initramfs, see the
- ":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`" section
+ For information on creating an :term:`Initramfs`, see the
+ ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`" section
in the Yocto Project Development Tasks Manual.
:term:`CONFIG_SITE`
@@ -1287,8 +1496,7 @@ system and gives an overview of their function and contents.
The minimal arguments for GNU configure.
:term:`CONFLICT_DISTRO_FEATURES`
- When inheriting the
- :ref:`features_check <ref-classes-features_check>`
+ When inheriting the :ref:`ref-classes-features_check`
class, this variable identifies distribution features that would be
in conflict should the recipe be built. In other words, if the
:term:`CONFLICT_DISTRO_FEATURES` variable lists a feature that also
@@ -1296,6 +1504,19 @@ system and gives an overview of their function and contents.
the recipe will be skipped, and if the build system attempts to build
the recipe then an error will be triggered.
+ :term:`CONVERSION_CMD`
+ This variable is used for storing image conversion commands.
+ Image conversion can convert an image into different objects like:
+
+ - Compressed version of the image
+
+ - Checksums for the image
+
+ An example of :term:`CONVERSION_CMD` from :ref:`ref-classes-image_types`
+ class is::
+
+ CONVERSION_CMD:lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+
:term:`COPY_LIC_DIRS`
If set to "1" along with the
:term:`COPY_LIC_MANIFEST` variable, the
@@ -1310,7 +1531,7 @@ system and gives an overview of their function and contents.
newly installed packages to an image, which might be most suitable for
read-only filesystems that cannot be upgraded. See the
:term:`LICENSE_CREATE_PACKAGE` variable for additional information.
- You can also reference the ":ref:`dev-manual/common-tasks:providing license text`"
+ You can also reference the ":ref:`dev-manual/licenses:providing license text`"
section in the Yocto Project Development Tasks Manual for
information on providing license text.
@@ -1326,15 +1547,14 @@ system and gives an overview of their function and contents.
newly installed packages to an image, which might be most suitable for
read-only filesystems that cannot be upgraded. See the
:term:`LICENSE_CREATE_PACKAGE` variable for additional information.
- You can also reference the ":ref:`dev-manual/common-tasks:providing license text`"
+ You can also reference the ":ref:`dev-manual/licenses:providing license text`"
section in the Yocto Project Development Tasks Manual for
information on providing license text.
:term:`COPYLEFT_LICENSE_EXCLUDE`
- A space-separated list of licenses to exclude from the source
- archived by the :ref:`archiver <ref-classes-archiver>` class. In
- other words, if a license in a recipe's
- :term:`LICENSE` value is in the value of
+ A space-separated list of licenses to exclude from the source archived by
+ the :ref:`ref-classes-archiver` class. In other words, if a license in a
+ recipe's :term:`LICENSE` value is in the value of
:term:`COPYLEFT_LICENSE_EXCLUDE`, then its source is not archived by the
class.
@@ -1345,58 +1565,54 @@ system and gives an overview of their function and contents.
The default value, which is "CLOSED Proprietary", for
:term:`COPYLEFT_LICENSE_EXCLUDE` is set by the
- :ref:`copyleft_filter <ref-classes-copyleft_filter>` class, which
- is inherited by the :ref:`archiver <ref-classes-archiver>` class.
+ :ref:`ref-classes-copyleft_filter` class, which
+ is inherited by the :ref:`ref-classes-archiver` class.
:term:`COPYLEFT_LICENSE_INCLUDE`
A space-separated list of licenses to include in the source archived
- by the :ref:`archiver <ref-classes-archiver>` class. In other
+ by the :ref:`ref-classes-archiver` class. In other
words, if a license in a recipe's :term:`LICENSE`
value is in the value of :term:`COPYLEFT_LICENSE_INCLUDE`, then its
source is archived by the class.
- The default value is set by the
- :ref:`copyleft_filter <ref-classes-copyleft_filter>` class, which
- is inherited by the :ref:`archiver <ref-classes-archiver>` class. The default value includes
- "GPL*", "LGPL*", and "AGPL*".
+ The default value is set by the :ref:`ref-classes-copyleft_filter` class,
+ which is inherited by the :ref:`ref-classes-archiver` class. The default
+ value includes "GPL*", "LGPL*", and "AGPL*".
:term:`COPYLEFT_PN_EXCLUDE`
A list of recipes to exclude in the source archived by the
- :ref:`archiver <ref-classes-archiver>` class. The
- :term:`COPYLEFT_PN_EXCLUDE` variable overrides the license inclusion and
- exclusion caused through the
- :term:`COPYLEFT_LICENSE_INCLUDE` and
- :term:`COPYLEFT_LICENSE_EXCLUDE`
+ :ref:`ref-classes-archiver` class. The :term:`COPYLEFT_PN_EXCLUDE`
+ variable overrides the license inclusion and exclusion caused through the
+ :term:`COPYLEFT_LICENSE_INCLUDE` and :term:`COPYLEFT_LICENSE_EXCLUDE`
variables, respectively.
The default value, which is "" indicating to not explicitly exclude
any recipes by name, for :term:`COPYLEFT_PN_EXCLUDE` is set by the
- :ref:`copyleft_filter <ref-classes-copyleft_filter>` class, which
- is inherited by the :ref:`archiver <ref-classes-archiver>` class.
+ :ref:`ref-classes-copyleft_filter` class, which is inherited by the
+ :ref:`ref-classes-archiver` class.
:term:`COPYLEFT_PN_INCLUDE`
A list of recipes to include in the source archived by the
- :ref:`archiver <ref-classes-archiver>` class. The
- :term:`COPYLEFT_PN_INCLUDE` variable overrides the license inclusion and
- exclusion caused through the
- :term:`COPYLEFT_LICENSE_INCLUDE` and
- :term:`COPYLEFT_LICENSE_EXCLUDE`
+ :ref:`ref-classes-archiver` class. The :term:`COPYLEFT_PN_INCLUDE`
+ variable overrides the license inclusion and exclusion caused through the
+ :term:`COPYLEFT_LICENSE_INCLUDE` and :term:`COPYLEFT_LICENSE_EXCLUDE`
variables, respectively.
The default value, which is "" indicating to not explicitly include
any recipes by name, for :term:`COPYLEFT_PN_INCLUDE` is set by the
- :ref:`copyleft_filter <ref-classes-copyleft_filter>` class, which
- is inherited by the :ref:`archiver <ref-classes-archiver>` class.
+ :ref:`ref-classes-copyleft_filter` class, which is inherited by the
+ :ref:`ref-classes-archiver` class.
:term:`COPYLEFT_RECIPE_TYPES`
A space-separated list of recipe types to include in the source
archived by the :ref:`archiver <ref-classes-archiver>` class.
- Recipe types are ``target``, ``native``, ``nativesdk``, ``cross``,
- ``crosssdk``, and ``cross-canadian``.
+ Recipe types are ``target``, :ref:`ref-classes-native`,
+ :ref:`ref-classes-nativesdk`, :ref:`ref-classes-cross`,
+ :ref:`ref-classes-crosssdk`, and :ref:`ref-classes-cross-canadian`.
The default value, which is "target*", for :term:`COPYLEFT_RECIPE_TYPES`
- is set by the :ref:`copyleft_filter <ref-classes-copyleft_filter>`
- class, which is inherited by the :ref:`archiver <ref-classes-archiver>` class.
+ is set by the :ref:`ref-classes-copyleft_filter` class, which is
+ inherited by the :ref:`ref-classes-archiver` class.
:term:`CORE_IMAGE_EXTRA_INSTALL`
Specifies the list of packages to be added to the image. You should
@@ -1463,21 +1679,38 @@ system and gives an overview of their function and contents.
and kernel module recipes).
:term:`CVE_CHECK_IGNORE`
- The list of CVE IDs which are ignored. Here is
- an example from the :oe_layerindex:`Python3 recipe</layerindex/recipe/23823>`::
+ This variable is deprecated and should be replaced by :term:`CVE_STATUS`.
- # This is windows only issue.
- CVE_CHECK_IGNORE += "CVE-2020-15523"
+ :term:`CVE_CHECK_SHOW_WARNINGS`
+ Specifies whether or not the :ref:`ref-classes-cve-check`
+ class should generate warning messages on the console when unpatched
+ CVEs are found. The default is "1", but you may wish to set it to "0" if
+ you are already examining/processing the logs after the build has
+ completed and thus do not need the warning messages.
:term:`CVE_CHECK_SKIP_RECIPE`
The list of package names (:term:`PN`) for which
CVEs (Common Vulnerabilities and Exposures) are ignored.
+ :term:`CVE_DB_INCR_UPDATE_AGE_THRES`
+ Specifies the maximum age of the CVE database in seconds for an
+ incremental update (instead of a full-download). Use "0" to force a
+ full-download.
+
+ :term:`CVE_DB_UPDATE_INTERVAL`
+ Specifies the CVE database update interval in seconds, as used by
+ ``cve-update-db-native``. The default value is "86400" i.e. once a day
+ (24*60*60). If the value is set to "0" then the update will be forced
+ every time. Alternatively, a negative value e.g. "-1" will disable
+ updates entirely.
+
:term:`CVE_PRODUCT`
In a recipe, defines the name used to match the recipe name
against the name in the upstream `NIST CVE database <https://nvd.nist.gov/>`__.
- The default is ${:term:`BPN`}. If it does not match the name in the NIST CVE
+ The default is ${:term:`BPN`} (except for recipes that inherit the
+ :ref:`ref-classes-pypi` class where it is set based upon
+ :term:`PYPI_PACKAGE`). If it does not match the name in the NIST CVE
database or matches with multiple entries in the database, the default
value needs to be changed.
@@ -1492,6 +1725,47 @@ system and gives an overview of their function and contents.
CVE_PRODUCT = "vendor:package"
+ :term:`CVE_STATUS`
+ The CVE ID which is patched or should be ignored. Here is
+ an example from the :oe_layerindex:`Python3 recipe</layerindex/recipe/23823>`::
+
+ CVE_STATUS[CVE-2020-15523] = "not-applicable-platform: Issue only applies on Windows"
+
+ It has the format "reason: description" and the description is optional.
+ The Reason is mapped to the final CVE state by mapping via
+ :term:`CVE_CHECK_STATUSMAP`. See :ref:`dev-manual/vulnerabilities:fixing vulnerabilities in recipes`
+ for details.
+
+ :term:`CVE_STATUS_GROUPS`
+ If there are many CVEs with the same status and reason, they can by simplified by using this
+ variable instead of many similar lines with :term:`CVE_STATUS`::
+
+ CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED"
+
+ CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0002"
+ CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows"
+ CVE_STATUS_PATCHED = "CVE-1234-0003 CVE-1234-0004"
+ CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally"
+
+ :term:`CVE_CHECK_STATUSMAP`
+ Mapping variable for all possible reasons of :term:`CVE_STATUS`:
+ ``Patched``, ``Unpatched`` and ``Ignored``.
+ See :ref:`ref-classes-cve-check` or ``meta/conf/cve-check-map.conf`` for more details::
+
+ CVE_CHECK_STATUSMAP[cpe-incorrect] = "Ignored"
+
+ :term:`CVE_VERSION`
+ In a recipe, defines the version used to match the recipe version
+ against the version in the `NIST CVE database <https://nvd.nist.gov/>`__
+ when usign :ref:`ref-classes-cve-check`.
+
+ The default is ${:term:`PV`} but if recipes use custom version numbers
+ which do not map to upstream software component release versions and the versions
+ used in the CVE database, then this variable can be used to set the
+ version number for :ref:`ref-classes-cve-check`. Example::
+
+ CVE_VERSION = "2.39"
+
:term:`CVSDIR`
The directory in which files checked out under the CVS system are
stored.
@@ -1538,7 +1812,7 @@ system and gives an overview of their function and contents.
suitable for timestamps.
:term:`DEBIAN_NOAUTONAME`
- When the :ref:`debian <ref-classes-debian>` class is inherited,
+ When the :ref:`ref-classes-debian` class is inherited,
which is the default behavior, :term:`DEBIAN_NOAUTONAME` specifies a
particular package should not be renamed according to Debian library
package naming. You must use the package name as an override when you
@@ -1547,7 +1821,7 @@ system and gives an overview of their function and contents.
DEBIAN_NOAUTONAME:fontconfig-utils = "1"
:term:`DEBIANNAME`
- When the :ref:`debian <ref-classes-debian>` class is inherited,
+ When the :ref:`ref-classes-debian` class is inherited,
which is the default behavior, :term:`DEBIANNAME` allows you to override
the library name for an individual package. Overriding the library
name in these cases is rare. You must use the package name as an
@@ -1618,16 +1892,15 @@ system and gives an overview of their function and contents.
DEPENDS = "bar"
- The practical effect of the previous
- assignment is that all files installed by bar will be available in
- the appropriate staging sysroot, given by the
- :term:`STAGING_DIR* <STAGING_DIR>` variables, by the time the
- :ref:`ref-tasks-configure` task for ``foo`` runs.
- This mechanism is implemented by having ``do_configure`` depend on
- the :ref:`ref-tasks-populate_sysroot` task of
- each recipe listed in :term:`DEPENDS`, through a
- ``[``\ :ref:`deptask <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
- declaration in the :ref:`base <ref-classes-base>` class.
+ The practical effect of the previous assignment is that all files
+ installed by bar will be available in the appropriate staging sysroot,
+ given by the :term:`STAGING_DIR* <STAGING_DIR>` variables, by the time
+ the :ref:`ref-tasks-configure` task for ``foo`` runs. This mechanism is
+ implemented by having :ref:`ref-tasks-configure` depend on the
+ :ref:`ref-tasks-populate_sysroot` task of each recipe listed in
+ :term:`DEPENDS`, through a
+ ``[``\ :ref:`deptask <bitbake-user-manual/bitbake-user-manual-metadata:variable flags>`\ ``]``
+ declaration in the :ref:`ref-classes-base` class.
.. note::
@@ -1643,7 +1916,7 @@ system and gives an overview of their function and contents.
DEPENDS = "codegen-native"
For more
- information, see the :ref:`native <ref-classes-native>` class and
+ information, see the :ref:`ref-classes-native` class and
the :term:`EXTRANATIVEPATH` variable.
.. note::
@@ -1673,22 +1946,21 @@ system and gives an overview of their function and contents.
to the recipe that installs ``libbar``, other recipes might
fail to link against ``libfoo``.
- For information on runtime dependencies, see the
- :term:`RDEPENDS` variable. You can also see the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:dependencies`" sections in the
- BitBake User Manual for additional information on tasks and
- dependencies.
+ For information on runtime dependencies, see the :term:`RDEPENDS`
+ variable. You can also see the
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
+ ":ref:`bitbake-user-manual/bitbake-user-manual-execution:dependencies`"
+ sections in the BitBake User Manual for additional information on tasks
+ and dependencies.
:term:`DEPLOY_DIR`
Points to the general area that the OpenEmbedded build system uses to
place images, packages, SDKs, and other output files that are ready
to be used outside of the build system. By default, this directory
- resides within the :term:`Build Directory` as
- ``${TMPDIR}/deploy``.
+ resides within the :term:`Build Directory` as ``${TMPDIR}/deploy``.
For more information on the structure of the Build Directory, see
- ":ref:`ref-manual/structure:the build directory - \`\`build/\`\``" section.
+ ":ref:`ref-manual/structure:the build directory --- \`\`build/\`\``" section.
For more detail on the contents of the ``deploy`` directory, see the
":ref:`overview-manual/concepts:images`",
":ref:`overview-manual/concepts:package feeds`", and
@@ -1698,9 +1970,8 @@ system and gives an overview of their function and contents.
:term:`DEPLOY_DIR_DEB`
Points to the area that the OpenEmbedded build system uses to place
Debian packages that are ready to be used outside of the build
- system. This variable applies only when
- :term:`PACKAGE_CLASSES` contains
- "package_deb".
+ system. This variable applies only when :term:`PACKAGE_CLASSES` contains
+ ":ref:`ref-classes-package_deb`".
The BitBake configuration file initially defines the
:term:`DEPLOY_DIR_DEB` variable as a sub-folder of
@@ -1708,7 +1979,7 @@ system and gives an overview of their function and contents.
DEPLOY_DIR_DEB = "${DEPLOY_DIR}/deb"
- The :ref:`package_deb <ref-classes-package_deb>` class uses the
+ The :ref:`ref-classes-package_deb` class uses the
:term:`DEPLOY_DIR_DEB` variable to make sure the
:ref:`ref-tasks-package_write_deb` task
writes Debian packages into the appropriate folder. For more
@@ -1727,12 +1998,11 @@ system and gives an overview of their function and contents.
It must not be used directly in recipes when deploying files. Instead,
it's only useful when a recipe needs to "read" a file already deployed
by a dependency. So, it should be filled with the contents of
- :term:`DEPLOYDIR` by the :ref:`deploy <ref-classes-deploy>` class or
- with the contents of :term:`IMGDEPLOYDIR` by the :ref:`image
- <ref-classes-image>` class.
+ :term:`DEPLOYDIR` by the :ref:`ref-classes-deploy` class or with the
+ contents of :term:`IMGDEPLOYDIR` by the :ref:`ref-classes-image` class.
- For more information on the structure of the Build Directory, see
- ":ref:`ref-manual/structure:the build directory - \`\`build/\`\``" section.
+ For more information on the structure of the :term:`Build Directory`, see
+ ":ref:`ref-manual/structure:the build directory --- \`\`build/\`\``" section.
For more detail on the contents of the ``deploy`` directory, see the
":ref:`overview-manual/concepts:images`" and
":ref:`overview-manual/concepts:application development sdk`" sections both in
@@ -1741,18 +2011,16 @@ system and gives an overview of their function and contents.
:term:`DEPLOY_DIR_IPK`
Points to the area that the OpenEmbedded build system uses to place
IPK packages that are ready to be used outside of the build system.
- This variable applies only when
- :term:`PACKAGE_CLASSES` contains
- "package_ipk".
+ This variable applies only when :term:`PACKAGE_CLASSES` contains
+ ":ref:`ref-classes-package_ipk`".
The BitBake configuration file initially defines this variable as a
sub-folder of :term:`DEPLOY_DIR`::
DEPLOY_DIR_IPK = "${DEPLOY_DIR}/ipk"
- The :ref:`package_ipk <ref-classes-package_ipk>` class uses the
- :term:`DEPLOY_DIR_IPK` variable to make sure the
- :ref:`ref-tasks-package_write_ipk` task
+ The :ref:`ref-classes-package_ipk` class uses the :term:`DEPLOY_DIR_IPK`
+ variable to make sure the :ref:`ref-tasks-package_write_ipk` task
writes IPK packages into the appropriate folder. For more information
on how packaging works, see the
":ref:`overview-manual/concepts:package feeds`" section
@@ -1761,16 +2029,15 @@ system and gives an overview of their function and contents.
:term:`DEPLOY_DIR_RPM`
Points to the area that the OpenEmbedded build system uses to place
RPM packages that are ready to be used outside of the build system.
- This variable applies only when
- :term:`PACKAGE_CLASSES` contains
- "package_rpm".
+ This variable applies only when :term:`PACKAGE_CLASSES` contains
+ ":ref:`ref-classes-package_rpm`".
The BitBake configuration file initially defines this variable as a
sub-folder of :term:`DEPLOY_DIR`::
DEPLOY_DIR_RPM = "${DEPLOY_DIR}/rpm"
- The :ref:`package_rpm <ref-classes-package_rpm>` class uses the
+ The :ref:`ref-classes-package_rpm` class uses the
:term:`DEPLOY_DIR_RPM` variable to make sure the
:ref:`ref-tasks-package_write_rpm` task
writes RPM packages into the appropriate folder. For more information
@@ -1778,34 +2045,14 @@ system and gives an overview of their function and contents.
":ref:`overview-manual/concepts:package feeds`" section
in the Yocto Project Overview and Concepts Manual.
- :term:`DEPLOY_DIR_TAR`
- Points to the area that the OpenEmbedded build system uses to place
- tarballs that are ready to be used outside of the build system. This
- variable applies only when
- :term:`PACKAGE_CLASSES` contains
- "package_tar".
-
- The BitBake configuration file initially defines this variable as a
- sub-folder of :term:`DEPLOY_DIR`::
-
- DEPLOY_DIR_TAR = "${DEPLOY_DIR}/tar"
-
- The :ref:`package_tar <ref-classes-package_tar>` class uses the
- :term:`DEPLOY_DIR_TAR` variable to make sure the
- :ref:`ref-tasks-package_write_tar` task
- writes TAR packages into the appropriate folder. For more information
- on how packaging works, see the
- ":ref:`overview-manual/concepts:package feeds`" section
- in the Yocto Project Overview and Concepts Manual.
-
:term:`DEPLOYDIR`
- When inheriting the :ref:`deploy <ref-classes-deploy>` class, the
+ When inheriting the :ref:`ref-classes-deploy` class, the
:term:`DEPLOYDIR` points to a temporary work area for deployed files that
- is set in the :ref:`deploy <ref-classes-deploy>` class as follows::
+ is set in the :ref:`ref-classes-deploy` class as follows::
DEPLOYDIR = "${WORKDIR}/deploy-${PN}"
- Recipes inheriting the :ref:`deploy <ref-classes-deploy>` class should copy files to be
+ Recipes inheriting the :ref:`ref-classes-deploy` class should copy files to be
deployed into :term:`DEPLOYDIR`, and the class will take care of copying
them into :term:`DEPLOY_DIR_IMAGE`
afterwards.
@@ -1815,6 +2062,23 @@ system and gives an overview of their function and contents.
:term:`DESCRIPTION` takes the value of the :term:`SUMMARY`
variable.
+ :term:`DEV_PKG_DEPENDENCY`
+ Provides an easy way for recipes to disable or adjust the runtime recommendation
+ (:term:`RRECOMMENDS`) of the ``${PN}-dev`` package on the main
+ (``${PN}``) package.
+
+ :term:`DISABLE_STATIC`
+ Used in order to disable static linking by default (in order to save
+ space, since static libraries are often unused in embedded systems.)
+ The default value is " --disable-static", however it can be set to ""
+ in order to enable static linking if desired. Certain recipes do this
+ individually, and also there is a
+ ``meta/conf/distro/include/no-static-libs.inc`` include file that
+ disables static linking for a number of recipes. Some software
+ packages or build tools (such as CMake) have explicit support for
+ enabling / disabling static linking, and in those cases
+ :term:`DISABLE_STATIC` is not used.
+
:term:`DISTRO`
The short name of the distribution. For information on the long name
of the distribution, see the :term:`DISTRO_NAME`
@@ -1874,24 +2138,39 @@ system and gives an overview of their function and contents.
target that can optionally support X11 to have its X11 support
enabled.
+ .. note::
+
+ Just enabling :term:`DISTRO_FEATURES` alone doesn't
+ enable feature support for packages. Mechanisms such as making
+ :term:`PACKAGECONFIG` track :term:`DISTRO_FEATURES` are used
+ to enable/disable package features.
+
Two more examples are Bluetooth and NFS support. For a more complete
list of features that ships with the Yocto Project and that you can
provide with this variable, see the ":ref:`ref-features-distro`" section.
:term:`DISTRO_FEATURES_BACKFILL`
- Features to be added to :term:`DISTRO_FEATURES` if not also present in
- :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`.
+ A space-separated list of features to be added to :term:`DISTRO_FEATURES`
+ if not also present in :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`.
This variable is set in the ``meta/conf/bitbake.conf`` file. It is
not intended to be user-configurable. It is best to just reference
- the variable to see which distro features are being backfilled for
- all distro configurations. See the ":ref:`ref-features-backfill`" section
- for more information.
+ the variable to see which distro features are being
+ :ref:`backfilled <ref-features-backfill>` for all distro configurations.
:term:`DISTRO_FEATURES_BACKFILL_CONSIDERED`
- Features from :term:`DISTRO_FEATURES_BACKFILL` that should not be
- backfilled (i.e. added to :term:`DISTRO_FEATURES`) during the build. See
- the ":ref:`ref-features-backfill`" section for more information.
+ A space-separated list of features from :term:`DISTRO_FEATURES_BACKFILL`
+ that should not be :ref:`backfilled <ref-features-backfill>` (i.e. added
+ to :term:`DISTRO_FEATURES`) during the build.
+
+ This corresponds to an opt-out mechanism. When new default distro
+ features are introduced, distribution maintainers can review (`consider`)
+ them and decide to exclude them from the
+ :ref:`backfilled <ref-features-backfill>` features. Therefore, the
+ combination of :term:`DISTRO_FEATURES_BACKFILL` and
+ :term:`DISTRO_FEATURES_BACKFILL_CONSIDERED` makes it possible to
+ add new default features without breaking existing distributions.
+
:term:`DISTRO_FEATURES_DEFAULT`
A convenience variable that gives you the default list of distro
@@ -1916,11 +2195,10 @@ system and gives an overview of their function and contents.
:term:`DISTRO_FEATURES_FILTER_NATIVESDK`
Specifies a list of features that if present in the target
- :term:`DISTRO_FEATURES` value should be
- included in :term:`DISTRO_FEATURES` when building nativesdk recipes. This
- variable is used in addition to the features filtered using the
- :term:`DISTRO_FEATURES_NATIVESDK`
- variable.
+ :term:`DISTRO_FEATURES` value should be included in
+ :term:`DISTRO_FEATURES` when building :ref:`ref-classes-nativesdk`
+ recipes. This variable is used in addition to the features filtered using
+ the :term:`DISTRO_FEATURES_NATIVESDK` variable.
:term:`DISTRO_FEATURES_NATIVE`
Specifies a list of features that should be included in
@@ -1933,10 +2211,9 @@ system and gives an overview of their function and contents.
:term:`DISTRO_FEATURES_NATIVESDK`
Specifies a list of features that should be included in
:term:`DISTRO_FEATURES` when building
- nativesdk recipes. This variable is used in addition to the features
- filtered using the
- :term:`DISTRO_FEATURES_FILTER_NATIVESDK`
- variable.
+ :ref:`ref-classes-nativesdk` recipes. This variable is used
+ in addition to the features filtered using the
+ :term:`DISTRO_FEATURES_FILTER_NATIVESDK` variable.
:term:`DISTRO_NAME`
The long name of the distribution. For information on the short name
@@ -1979,6 +2256,11 @@ system and gives an overview of their function and contents.
is included in the default value of
:term:`OVERRIDES`.
+ Here is an example from :yocto_git:`meta-poky/conf/distro/poky-tiny.conf
+ </poky/tree/meta-poky/conf/distro/poky-tiny.conf>`::
+
+ DISTROOVERRIDES = "poky:poky-tiny"
+
:term:`DL_DIR`
The central download directory used by the build process to store
downloads. By default, :term:`DL_DIR` gets files suitable for mirroring
@@ -1990,8 +2272,7 @@ system and gives an overview of their function and contents.
You can set this directory by defining the :term:`DL_DIR` variable in the
``conf/local.conf`` file. This directory is self-maintaining and you
should not have to touch it. By default, the directory is
- ``downloads`` in the :term:`Build Directory`.
- ::
+ ``downloads`` in the :term:`Build Directory`::
#DL_DIR ?= "${TOPDIR}/downloads"
@@ -2018,14 +2299,36 @@ system and gives an overview of their function and contents.
Wiki page.
:term:`DOC_COMPRESS`
- When inheriting the :ref:`compress_doc <ref-classes-compress_doc>`
+ When inheriting the :ref:`ref-classes-compress_doc`
class, this variable sets the compression policy used when the
- OpenEmbedded build system compresses man pages and info pages. By
+ OpenEmbedded build system compresses manual and info pages. By
default, the compression method used is gz (gzip). Other policies
available are xz and bz2.
For information on policies and on how to use this variable, see the
- comments in the ``meta/classes/compress_doc.bbclass`` file.
+ comments in the ``meta/classes-recipe/compress_doc.bbclass`` file.
+
+ :term:`DT_FILES`
+ Space-separated list of device tree source files to compile using
+ a recipe that inherits the :ref:`ref-classes-devicetree` class. These
+ are relative to the :term:`DT_FILES_PATH`.
+
+ For convenience, both ``.dts`` and ``.dtb`` extensions can be used.
+
+ Use an empty string (default) to build all device tree sources within
+ the :term:`DT_FILES_PATH` directory.
+
+ :term:`DT_FILES_PATH`
+ When compiling out-of-tree device tree sources using a recipe that
+ inherits the :ref:`ref-classes-devicetree` class, this variable specifies
+ the path to the directory containing dts files to build.
+
+ Defaults to the :term:`S` directory.
+
+ :term:`DT_PADDING_SIZE`
+ When inheriting the :ref:`ref-classes-devicetree` class, this variable
+ specifies the size of padding appended to the device tree blob, used as
+ extra space typically for additional properties during boot.
:term:`EFI_PROVIDER`
When building bootable images (i.e. where ``hddimg``, ``iso``, or
@@ -2033,9 +2336,14 @@ system and gives an overview of their function and contents.
:term:`EFI_PROVIDER` variable specifies the EFI bootloader to use. The
default is "grub-efi", but "systemd-boot" can be used instead.
- See the :ref:`systemd-boot <ref-classes-systemd-boot>` and
- :ref:`image-live <ref-classes-image-live>` classes for more
- information.
+ See the :ref:`ref-classes-systemd-boot` and :ref:`ref-classes-image-live`
+ classes for more information.
+
+ :term:`EFI_UKI_DIR`
+ The primary place for the UKI image inside the EFI System Partition.
+
+ :term:`EFI_UKI_PATH`
+ The path for the UKI image inside the root filesystem.
:term:`ENABLE_BINARY_LOCALE_GENERATION`
Variable that controls which locales for ``glibc`` are generated
@@ -2043,11 +2351,10 @@ system and gives an overview of their function and contents.
less).
:term:`ERR_REPORT_DIR`
- When used with the :ref:`report-error <ref-classes-report-error>`
- class, specifies the path used for storing the debug files created by
- the :ref:`error reporting
- tool <dev-manual/common-tasks:using the error reporting tool>`, which
- allows you to submit build errors you encounter to a central
+ When used with the :ref:`ref-classes-report-error` class, specifies the
+ path used for storing the debug files created by the :ref:`error reporting
+ tool <dev-manual/error-reporting-tool:using the error reporting tool>`,
+ which allows you to submit build errors you encounter to a central
database. By default, the value of this variable is
``${``\ :term:`LOG_DIR`\ ``}/error-report``.
@@ -2191,12 +2498,27 @@ system and gives an overview of their function and contents.
When kernel tools are available in the tree, they are preferred over
any externally installed tools. Setting the :term:`EXTERNAL_KERNEL_TOOLS`
variable tells the OpenEmbedded build system to prefer the installed
- external tools. See the
- :ref:`kernel-yocto <ref-classes-kernel-yocto>` class in
- ``meta/classes`` to see how the variable is used.
+ external tools. See the :ref:`ref-classes-kernel-yocto` class in
+ ``meta/classes-recipe`` to see how the variable is used.
+
+ :term:`KERNEL_LOCALVERSION`
+ This variable allows to append a string to the version
+ of the kernel image. This corresponds to the ``CONFIG_LOCALVERSION``
+ kernel configuration parameter.
+
+ Using this variable is only useful when you are using a kernel recipe
+ inheriting the :ref:`ref-classes-kernel` class, and which doesn't
+ already set a local version. Therefore, setting this variable has no
+ impact on ``linux-yocto`` kernels.
+
+ :term:`EXTERNAL_TOOLCHAIN`
+ When you intend to use an
+ :ref:`external toolchain <dev-manual/external-toolchain:optionally using an external toolchain>`,
+ this variable allows to specify the directory where this toolchain was
+ installed.
:term:`EXTERNALSRC`
- When inheriting the :ref:`externalsrc <ref-classes-externalsrc>`
+ When inheriting the :ref:`ref-classes-externalsrc`
class, this variable points to the source tree, which is outside of
the OpenEmbedded build system. When set, this variable sets the
:term:`S` variable, which is what the OpenEmbedded build
@@ -2204,24 +2526,24 @@ system and gives an overview of their function and contents.
See the ":ref:`ref-classes-externalsrc`" section for details. You
can also find information on how to use this variable in the
- ":ref:`dev-manual/common-tasks:building software from an external source`"
+ ":ref:`dev-manual/building:building software from an external source`"
section in the Yocto Project Development Tasks Manual.
:term:`EXTERNALSRC_BUILD`
- When inheriting the :ref:`externalsrc <ref-classes-externalsrc>`
+ When inheriting the :ref:`ref-classes-externalsrc`
class, this variable points to the directory in which the recipe's
source code is built, which is outside of the OpenEmbedded build
system. When set, this variable sets the :term:`B` variable,
- which is what the OpenEmbedded build system uses to locate the Build
- Directory.
+ which is what the OpenEmbedded build system uses to locate the
+ :term:`Build Directory`.
See the ":ref:`ref-classes-externalsrc`" section for details. You
can also find information on how to use this variable in the
- ":ref:`dev-manual/common-tasks:building software from an external source`"
+ ":ref:`dev-manual/building:building software from an external source`"
section in the Yocto Project Development Tasks Manual.
:term:`EXTRA_AUTORECONF`
- For recipes inheriting the :ref:`autotools <ref-classes-autotools>`
+ For recipes inheriting the :ref:`ref-classes-autotools`
class, you can use :term:`EXTRA_AUTORECONF` to specify extra options to
pass to the ``autoreconf`` command that is executed during the
:ref:`ref-tasks-configure` task.
@@ -2233,9 +2555,8 @@ system and gives an overview of their function and contents.
more than one feature, separate them with a space.
Typically, you configure this variable in your ``local.conf`` file,
- which is found in the :term:`Build Directory`.
- Although you can use this variable from within a recipe, best
- practices dictate that you do not.
+ which is found in the :term:`Build Directory`. Although you can use this
+ variable from within a recipe, best practices dictate that you do not.
.. note::
@@ -2244,31 +2565,31 @@ system and gives an overview of their function and contents.
Here are some examples of features you can add:
- - "dbg-pkgs" - Adds -dbg packages for all installed packages including
+ - "dbg-pkgs" --- adds -dbg packages for all installed packages including
symbol information for debugging and profiling.
- - "debug-tweaks" - Makes an image suitable for debugging. For example, allows root logins without passwords and
+ - "debug-tweaks" --- makes an image suitable for debugging. For example, allows root logins without passwords and
enables post-installation logging. See the 'allow-empty-password' and
'post-install-logging' features in the ":ref:`ref-features-image`"
section for more information.
- - "dev-pkgs" - Adds -dev packages for all installed packages. This is
+ - "dev-pkgs" --- adds -dev packages for all installed packages. This is
useful if you want to develop against the libraries in the image.
- - "read-only-rootfs" - Creates an image whose root filesystem is
+ - "read-only-rootfs" --- creates an image whose root filesystem is
read-only. See the
- ":ref:`dev-manual/common-tasks:creating a read-only root filesystem`"
+ ":ref:`dev-manual/read-only-rootfs:creating a read-only root filesystem`"
section in the Yocto Project Development Tasks Manual for more
information
- - "tools-debug" - Adds debugging tools such as gdb and strace.
- - "tools-sdk" - Adds development tools such as gcc, make,
+ - "tools-debug" --- adds debugging tools such as gdb and strace.
+ - "tools-sdk" --- adds development tools such as gcc, make,
pkgconfig and so forth.
- - "tools-testapps" - Adds useful testing tools
+ - "tools-testapps" --- adds useful testing tools
such as ts_print, aplay, arecord and so forth.
For a complete list of image features that ships with the Yocto
Project, see the ":ref:`ref-features-image`" section.
For an example that shows how to customize your image by using this
- variable, see the ":ref:`dev-manual/common-tasks:customizing images using custom \`\`image_features\`\` and \`\`extra_image_features\`\``"
+ variable, see the ":ref:`dev-manual/customizing-images:customizing images using custom \`\`image_features\`\` and \`\`extra_image_features\`\``"
section in the Yocto Project Development Tasks Manual.
:term:`EXTRA_IMAGECMD`
@@ -2295,7 +2616,7 @@ system and gives an overview of their function and contents.
:term:`EXTRA_OECMAKE`
Additional `CMake <https://cmake.org/overview/>`__ options. See the
- :ref:`cmake <ref-classes-cmake>` class for additional information.
+ :ref:`ref-classes-cmake` class for additional information.
:term:`EXTRA_OECONF`
Additional ``configure`` script options. See
@@ -2313,21 +2634,36 @@ system and gives an overview of their function and contents.
:term:`EXTRA_OEMAKE` to pass the required flags.
:term:`EXTRA_OESCONS`
- When inheriting the :ref:`scons <ref-classes-scons>` class, this
+ When inheriting the :ref:`ref-classes-scons` class, this
variable specifies additional configuration options you want to pass
to the ``scons`` command line.
+ :term:`EXTRA_OEMESON`
+ Additional `Meson <https://mesonbuild.com/>`__ options. See the
+ :ref:`ref-classes-meson` class for additional information.
+
+ In addition to standard Meson options, such options correspond to
+ `Meson build options <https://mesonbuild.com/Build-options.html>`__
+ defined in the ``meson_options.txt`` file in the sources to build.
+ Here is an example::
+
+ EXTRA_OEMESON = "-Dpython=disabled -Dvalgrind=disabled"
+
+ Note that any custom value for the Meson ``--buildtype`` option
+ should be set through the :term:`MESON_BUILDTYPE` variable.
+
:term:`EXTRA_USERS_PARAMS`
- When inheriting the :ref:`extrausers <ref-classes-extrausers>`
+ When inheriting the :ref:`ref-classes-extrausers`
class, this variable provides image level user and group operations.
This is a more global method of providing user and group
configuration as compared to using the
- :ref:`useradd <ref-classes-useradd>` class, which ties user and
+ :ref:`ref-classes-useradd` class, which ties user and
group configurations to a specific recipe.
The set list of commands you can configure using the
- :term:`EXTRA_USERS_PARAMS` is shown in the :ref:`extrausers <ref-classes-extrausers>` class. These
- commands map to the normal Unix commands of the same names::
+ :term:`EXTRA_USERS_PARAMS` is shown in the
+ :ref:`ref-classes-extrausers` class. These commands map to the normal
+ Unix commands of the same names::
# EXTRA_USERS_PARAMS = "\
# useradd -p '' tester; \
@@ -2365,8 +2701,8 @@ system and gives an overview of their function and contents.
.. note::
From a security perspective, hardcoding a default password is not
- generally a good idea or even legal in some jurisdictions. It is
- recommended that you do not do this if you are building a production
+ generally a good idea or even legal in some jurisdictions. It is
+ recommended that you do not do this if you are building a production
image.
Additionally there is a special ``passwd-expire`` command that will
@@ -2390,6 +2726,24 @@ system and gives an overview of their function and contents.
EXTRANATIVEPATH = "foo bar"
+ :term:`FAKEROOT`
+ See :term:`bitbake:FAKEROOT` in the BitBake manual.
+
+ :term:`FAKEROOTBASEENV`
+ See :term:`bitbake:FAKEROOTBASEENV` in the BitBake manual.
+
+ :term:`FAKEROOTCMD`
+ See :term:`bitbake:FAKEROOTCMD` in the BitBake manual.
+
+ :term:`FAKEROOTDIRS`
+ See :term:`bitbake:FAKEROOTDIRS` in the BitBake manual.
+
+ :term:`FAKEROOTENV`
+ See :term:`bitbake:FAKEROOTENV` in the BitBake manual.
+
+ :term:`FAKEROOTNOENV`
+ See :term:`bitbake:FAKEROOTNOENV` in the BitBake manual.
+
:term:`FEATURE_PACKAGES`
Defines one or more packages to include in an image when a specific
item is included in :term:`IMAGE_FEATURES`.
@@ -2425,6 +2779,12 @@ system and gives an overview of their function and contents.
a set of configuration files for you in your target that work with
the feed.
+ :term:`FETCHCMD`
+ See :term:`bitbake:FETCHCMD` in the BitBake manual.
+
+ :term:`FILE`
+ See :term:`bitbake:FILE` in the BitBake manual.
+
:term:`FILES`
The list of files and directories that are placed in a package. The
:term:`PACKAGES` variable lists the packages
@@ -2441,7 +2801,7 @@ system and gives an overview of their function and contents.
- When specifying files or paths, you can pattern match using
Python's
- `glob <https://docs.python.org/3/library/glob.html>`_
+ `glob <https://docs.python.org/3/library/glob.html>`__
syntax. For details on the syntax, see the documentation by
following the previous link.
@@ -2473,12 +2833,11 @@ system and gives an overview of their function and contents.
FILES_SOLIBSDEV ?= "${base_libdir}/lib*${SOLIBSDEV} ${libdir}/lib*${SOLIBSDEV}"
:term:`FILESEXTRAPATHS`
- Extends the search path the OpenEmbedded build system uses when
- looking for files and patches as it processes recipes and append
- files. The default directories BitBake uses when it processes recipes
- are initially defined by the :term:`FILESPATH`
- variable. You can extend :term:`FILESPATH` variable by using
- :term:`FILESEXTRAPATHS`.
+ A colon-separated list to extend the search path the OpenEmbedded build
+ system uses when looking for files and patches as it processes recipes
+ and append files. The default directories BitBake uses when it processes
+ recipes are initially defined by the :term:`FILESPATH` variable. You can
+ extend :term:`FILESPATH` variable by using :term:`FILESEXTRAPATHS`.
Best practices dictate that you accomplish this by using
:term:`FILESEXTRAPATHS` from within a ``.bbappend`` file and that you
@@ -2539,15 +2898,13 @@ system and gives an overview of their function and contents.
recipe to correctly extend the path.
:term:`FILESOVERRIDES`
- A subset of :term:`OVERRIDES` used by the
- OpenEmbedded build system for creating
- :term:`FILESPATH`. The :term:`FILESOVERRIDES` variable
- uses overrides to automatically extend the
- :term:`FILESPATH` variable. For an example of how
- that works, see the :term:`FILESPATH` variable
- description. Additionally, you find more information on how overrides
- are handled in the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax (overrides)`"
+ A colon-separated list to specify a subset of :term:`OVERRIDES` used by
+ the OpenEmbedded build system for creating :term:`FILESPATH`. The
+ :term:`FILESOVERRIDES` variable uses overrides to automatically extend
+ the :term:`FILESPATH` variable. For an example of how that works, see the
+ :term:`FILESPATH` variable description. Additionally, you find more
+ information on how overrides are handled in the
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax (overrides)`"
section of the BitBake User Manual.
By default, the :term:`FILESOVERRIDES` variable is defined as::
@@ -2561,8 +2918,8 @@ system and gives an overview of their function and contents.
build system.
:term:`FILESPATH`
- The default set of directories the OpenEmbedded build system uses
- when searching for patches and files.
+ A colon-separated list specifying the default set of directories the
+ OpenEmbedded build system uses when searching for patches and files.
During the build process, BitBake searches each directory in
:term:`FILESPATH` in the specified order when looking for files and
@@ -2570,7 +2927,7 @@ system and gives an overview of their function and contents.
:term:`SRC_URI` statements.
The default value for the :term:`FILESPATH` variable is defined in the
- :ref:`ref-classes-base` class found in ``meta/classes`` in the
+ :ref:`ref-classes-base` class found in ``meta/classes-global`` in the
:term:`Source Directory`::
FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", \
@@ -2613,7 +2970,7 @@ system and gives an overview of their function and contents.
You can find out more about the patching process in the
":ref:`overview-manual/concepts:patching`" section
in the Yocto Project Overview and Concepts Manual and the
- ":ref:`dev-manual/common-tasks:patching code`" section in
+ ":ref:`dev-manual/new-recipe:patching code`" section in
the Yocto Project Development Tasks Manual. See the
:ref:`ref-tasks-patch` task as well.
@@ -2632,77 +2989,119 @@ system and gives an overview of their function and contents.
You define the :term:`FILESYSTEM_PERMS_TABLES` variable in the
``conf/local.conf`` file, which is found in the :term:`Build Directory`,
- to point to your custom
- ``fs-perms.txt``. You can specify more than a single file permissions
- setting table. The paths you specify to these files must be defined
- within the :term:`BBPATH` variable.
+ to point to your custom ``fs-perms.txt``. You can specify more than a
+ single file permissions setting table. The paths you specify to these
+ files must be defined within the :term:`BBPATH` variable.
For guidance on how to create your own file permissions settings
table file, examine the existing ``fs-perms.txt``.
+ :term:`FIT_ADDRESS_CELLS`
+ Specifies the value of the ``#address-cells`` value for the
+ description of the FIT image.
+
+ The default value is set to "1" by the :ref:`ref-classes-kernel-fitimage`
+ class, which corresponds to 32 bit addresses.
+
+ For platforms that need to set 64 bit addresses, for example in
+ :term:`UBOOT_LOADADDRESS` and :term:`UBOOT_ENTRYPOINT`, you need to
+ set this value to "2", as two 32 bit values (cells) will be needed
+ to represent such addresses.
+
+ Here is an example setting "0x400000000" as a load address::
+
+ FIT_ADDRESS_CELLS = "2"
+ UBOOT_LOADADDRESS= "0x04 0x00000000"
+
+ See `more details about #address-cells <https://elinux.org/Device_Tree_Usage#How_Addressing_Works>`__.
+
+ :term:`FIT_CONF_DEFAULT_DTB`
+ Specifies the default device tree binary (dtb) file for a FIT image
+ when multiple ones are provided.
+
+ This variable is used in the :ref:`ref-classes-kernel-fitimage` class.
+
:term:`FIT_DESC`
- Specifies the description string encoded into a fitImage. The default
- value is set by the :ref:`kernel-fitimage <ref-classes-kernel-fitimage>`
- class as follows::
+ Specifies the description string encoded into a FIT image. The
+ default value is set by the :ref:`ref-classes-kernel-fitimage` class as
+ follows::
FIT_DESC ?= "U-Boot fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
:term:`FIT_GENERATE_KEYS`
- Decides whether to generate the keys for signing fitImage if they
- don't already exist. The keys are created in :term:`UBOOT_SIGN_KEYDIR`.
- The default value is 0.
+ Decides whether to generate the keys for signing the FIT image if
+ they don't already exist. The keys are created in
+ :term:`UBOOT_SIGN_KEYDIR`. The default value is set to "0"
+ by the :ref:`ref-classes-kernel-fitimage` class.
:term:`FIT_HASH_ALG`
- Specifies the hash algorithm used in creating the FIT Image. For e.g. sha256.
+ Specifies the hash algorithm used in creating the FIT Image.
+ This variable is set by default to "sha256" by the
+ :ref:`ref-classes-kernel-fitimage` class.
:term:`FIT_KERNEL_COMP_ALG`
- Compression algorithm to use for the kernel image inside the FIT Image.
- At present, the only supported values are "gzip" (default) or "none"
+ The compression algorithm to use for the kernel image inside the FIT Image.
+ At present, the only supported values are "gzip" (default), "lzo" or "none".
If you set this variable to anything other than "none" you may also need
to set :term:`FIT_KERNEL_COMP_ALG_EXTENSION`.
+ This variable is used in the :ref:`ref-classes-kernel-uboot` class.
+
:term:`FIT_KERNEL_COMP_ALG_EXTENSION`
File extension corresponding to :term:`FIT_KERNEL_COMP_ALG`. The default
- value is ".gz".
+ value is set ".gz" by the :ref:`ref-classes-kernel-uboot` class. If you
+ set :term:`FIT_KERNEL_COMP_ALG` to "lzo", you may want to set this
+ variable to ".lzo".
:term:`FIT_KEY_GENRSA_ARGS`
- Arguments to openssl genrsa for generating RSA private key for signing
- fitImage. The default value is "-F4". i.e. the public exponent 65537 to
- use.
+ Arguments to ``openssl genrsa`` for generating a RSA private key for
+ signing the FIT image. The default value is set to "-F4" by the
+ :ref:`ref-classes-kernel-fitimage` class.
:term:`FIT_KEY_REQ_ARGS`
- Arguments to openssl req for generating certificate for signing fitImage.
- The default value is "-batch -new". batch for non interactive mode
- and new for generating new keys.
+ Arguments to ``openssl req`` for generating a certificate for signing
+ the FIT image. The default value is "-batch -new" by the
+ :ref:`ref-classes-kernel-fitimage` class, "batch" for
+ non interactive mode and "new" for generating new keys.
:term:`FIT_KEY_SIGN_PKCS`
- Format for public key certificate used in signing fitImage.
- The default value is "x509".
+ Format for the public key certificate used for signing the FIT image.
+ The default value is set to "x509" by the
+ :ref:`ref-classes-kernel-fitimage` class.
:term:`FIT_SIGN_ALG`
Specifies the signature algorithm used in creating the FIT Image.
- For e.g. rsa2048.
+ This variable is set by default to "rsa2048" by the
+ :ref:`ref-classes-kernel-fitimage` class.
+
+ :term:`FIT_PAD_ALG`
+ Specifies the padding algorithm used in creating the FIT Image.
+ The default value is set to "pkcs-1.5" by the
+ :ref:`ref-classes-kernel-fitimage` class.
:term:`FIT_SIGN_INDIVIDUAL`
- If set to "1", then the :ref:`kernel-fitimage <ref-classes-kernel-fitimage>`
+ If set to "1", then the :ref:`ref-classes-kernel-fitimage`
class will sign the kernel, dtb and ramdisk images individually in addition
- to signing the fitImage itself. This could be useful if you are
+ to signing the FIT image itself. This could be useful if you are
intending to verify signatures in another context than booting via
U-Boot.
+ This variable is set to "0" by default.
+
:term:`FIT_SIGN_NUMBITS`
- Size of private key in number of bits used in fitImage. The default
- value is "2048".
+ Size of the private key used in the FIT image, in number of bits.
+ The default value for this variable is set to "2048"
+ by the :ref:`ref-classes-kernel-fitimage` class.
:term:`FONT_EXTRA_RDEPENDS`
- When inheriting the :ref:`fontcache <ref-classes-fontcache>` class,
+ When inheriting the :ref:`ref-classes-fontcache` class,
this variable specifies the runtime dependencies for font packages.
By default, the :term:`FONT_EXTRA_RDEPENDS` is set to "fontconfig-utils".
:term:`FONT_PACKAGES`
- When inheriting the :ref:`fontcache <ref-classes-fontcache>` class,
- this variable identifies packages containing font files that need to
- be cached by Fontconfig. By default, the :ref:`fontcache <ref-classes-fontcache>` class assumes
+ When inheriting the :ref:`ref-classes-fontcache` class, this variable
+ identifies packages containing font files that need to be cached by
+ Fontconfig. By default, the :ref:`ref-classes-fontcache` class assumes
that fonts are in the recipe's main package (i.e.
``${``\ :term:`PN`\ ``}``). Use this variable if fonts you
need are in a package other than that main package.
@@ -2745,13 +3144,20 @@ system and gives an overview of their function and contents.
Allows to specify an extra search path for ``.so`` files
in GLib related recipes using GObject introspection,
and which do not compile without this setting.
- See the ":ref:`dev-manual/common-tasks:enabling gobject introspection support`"
+ See the ":ref:`dev-manual/gobject-introspection:enabling gobject introspection support`"
section for details.
:term:`GITDIR`
The directory in which a local copy of a Git repository is stored
when it is cloned.
+ :term:`GITHUB_BASE_URI`
+ When inheriting the :ref:`ref-classes-github-releases`
+ class, specifies the base URL for fetching releases for the github
+ project you wish to fetch sources from. The default value is as follows::
+
+ GITHUB_BASE_URI ?= "https://github.com/${BPN}/${BPN}/releases/"
+
:term:`GLIBC_GENERATE_LOCALES`
Specifies the list of GLIBC locales to generate should you not wish
to generate all LIBC locals, which can be time consuming.
@@ -2762,13 +3168,79 @@ system and gives an overview of their function and contents.
:term:`IMAGE_LINGUAS` appropriately.
You can set :term:`GLIBC_GENERATE_LOCALES` in your ``local.conf`` file.
- By default, all locales are generated.
- ::
+ By default, all locales are generated::
GLIBC_GENERATE_LOCALES = "en_GB.UTF-8 en_US.UTF-8"
+ :term:`GO_IMPORT`
+ When inheriting the :ref:`ref-classes-go` class, this mandatory variable
+ sets the import path for the Go package that will be created for the code
+ to build. If you have a ``go.mod`` file in the source directory, this
+ typically matches the path in the ``module`` line in this file.
+
+ Other Go programs importing this package will use this path.
+
+ Here is an example setting from the
+ :yocto_git:`go-helloworld_0.1.bb </poky/tree/meta/recipes-extended/go-examples/go-helloworld_0.1.bb>`
+ recipe::
+
+ GO_IMPORT = "golang.org/x/example"
+
+ :term:`GO_INSTALL`
+ When inheriting the :ref:`ref-classes-go` class, this optional variable
+ specifies which packages in the sources should be compiled and
+ installed in the Go build space by the
+ `go install <https://go.dev/ref/mod#go-install>`__ command.
+
+ Here is an example setting from the
+ :oe_git:`crucible </meta-openembedded/tree/meta-oe/recipes-support/crucible/>`
+ recipe::
+
+ GO_INSTALL = "\
+ ${GO_IMPORT}/cmd/crucible \
+ ${GO_IMPORT}/cmd/habtool \
+ "
+
+ By default, :term:`GO_INSTALL` is defined as::
+
+ GO_INSTALL ?= "${GO_IMPORT}/..."
+
+ The ``...`` wildcard means that it will catch all
+ packages found in the sources.
+
+ See the :term:`GO_INSTALL_FILTEROUT` variable for
+ filtering out unwanted packages from the ones
+ found from the :term:`GO_INSTALL` value.
+
+ :term:`GO_INSTALL_FILTEROUT`
+ When using the Go "vendor" mechanism to bring in dependencies for a Go
+ package, the default :term:`GO_INSTALL` setting, which uses the ``...``
+ wildcard, will include the vendored packages in the build, which produces
+ incorrect results.
+
+ There are also some Go packages that are structured poorly, so that the
+ ``...`` wildcard results in building example or test code that should not
+ be included in the build, or could fail to build.
+
+ This optional variable allows for filtering out a subset of the sources.
+ It defaults to excluding everything under the ``vendor`` subdirectory
+ under package's main directory. This is the normal location for vendored
+ packages, but it can be overridden by a recipe to filter out other
+ subdirectories if needed.
+
+ :term:`GO_WORKDIR`
+ When using Go Modules, the current working directory must be the directory
+ containing the ``go.mod`` file, or one of its subdirectories. When the
+ ``go`` tool is used, it will automatically look for the ``go.mod`` file
+ in the Go working directory or in any parent directory, but not in
+ subdirectories.
+
+ When using the :ref:`ref-classes-go-mod` class to use Go modules,
+ the optional :term:`GO_WORKDIR` variable, defaulting to the value
+ of :term:`GO_IMPORT`, allows to specify a different Go working directory.
+
:term:`GROUPADD_PARAM`
- When inheriting the :ref:`useradd <ref-classes-useradd>` class,
+ When inheriting the :ref:`ref-classes-useradd` class,
this variable specifies for a package what parameters should be
passed to the ``groupadd`` command if you wish to add a group to the
system when the package is installed.
@@ -2777,11 +3249,19 @@ system and gives an overview of their function and contents.
GROUPADD_PARAM:${PN} = "-r netdev"
+ More than one group can be added by separating each set of different
+ groups' parameters with a semicolon.
+
+ Here is an example adding multiple groups from the ``useradd-example.bb``
+ file in the ``meta-skeleton`` layer::
+
+ GROUPADD_PARAM:${PN} = "-g 880 group1; -g 890 group2"
+
For information on the standard Linux shell command
``groupadd``, see https://linux.die.net/man/8/groupadd.
:term:`GROUPMEMS_PARAM`
- When inheriting the :ref:`useradd <ref-classes-useradd>` class,
+ When inheriting the :ref:`ref-classes-useradd` class,
this variable specifies for a package what parameters should be
passed to the ``groupmems`` command if you wish to modify the members
of a group when the package is installed.
@@ -2795,7 +3275,7 @@ system and gives an overview of their function and contents.
``local.conf`` or distribution configuration file to enable graphics
and serial in the menu.
- See the :ref:`grub-efi <ref-classes-grub-efi>` class for more
+ See the :ref:`ref-classes-grub-efi` class for more
information on how this variable is used.
:term:`GRUB_OPTS`
@@ -2804,7 +3284,7 @@ system and gives an overview of their function and contents.
multiple options.
The :term:`GRUB_OPTS` variable is optional. See the
- :ref:`grub-efi <ref-classes-grub-efi>` class for more information
+ :ref:`ref-classes-grub-efi` class for more information
on how this variable is used.
:term:`GRUB_TIMEOUT`
@@ -2812,16 +3292,18 @@ system and gives an overview of their function and contents.
GNU GRand Unified Bootloader (GRUB).
The :term:`GRUB_TIMEOUT` variable is optional. See the
- :ref:`grub-efi <ref-classes-grub-efi>` class for more information
+ :ref:`ref-classes-grub-efi` class for more information
on how this variable is used.
:term:`GTKIMMODULES_PACKAGES`
- When inheriting the
- :ref:`gtk-immodules-cache <ref-classes-gtk-immodules-cache>` class,
+ When inheriting the :ref:`ref-classes-gtk-immodules-cache` class,
this variable specifies the packages that contain the GTK+ input
method modules being installed when the modules are in packages other
than the main package.
+ :term:`HGDIR`
+ See :term:`bitbake:HGDIR` in the BitBake manual.
+
:term:`HOMEPAGE`
Website where more information about the software the recipe is
building can be found.
@@ -2917,7 +3399,7 @@ system and gives an overview of their function and contents.
:term:`ICECC_CLASS_DISABLE`
Identifies user classes that you do not want the Icecream distributed
compile support to consider. This variable is used by the
- :ref:`icecc <ref-classes-icecc>` class. You set this variable in
+ :ref:`ref-classes-icecc` class. You set this variable in
your ``local.conf`` file.
When you list classes using this variable, the recipes inheriting
@@ -2941,13 +3423,15 @@ system and gives an overview of their function and contents.
:term:`ICECC_ENV_EXEC`
Points to the ``icecc-create-env`` script that you provide. This
- variable is used by the :ref:`icecc <ref-classes-icecc>` class. You
+ variable is used by the :ref:`ref-classes-icecc` class. You
set this variable in your ``local.conf`` file.
If you do not point to a script that you provide, the OpenEmbedded
build system uses the default script provided by the
- ``icecc-create-env.bb`` recipe, which is a modified version and not
- the one that comes with ``icecc``.
+ :oe_git:`icecc-create-env_0.1.bb
+ </openembedded-core/tree/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb>`
+ recipe, which is a modified version and not the one that comes with
+ ``icecream``.
:term:`ICECC_PARALLEL_MAKE`
Extra options passed to the ``make`` command during the
@@ -2976,13 +3460,13 @@ system and gives an overview of their function and contents.
:term:`ICECC_PATH`
The location of the ``icecc`` binary. You can set this variable in
your ``local.conf`` file. If your ``local.conf`` file does not define
- this variable, the :ref:`icecc <ref-classes-icecc>` class attempts
+ this variable, the :ref:`ref-classes-icecc` class attempts
to define it by locating ``icecc`` using ``which``.
:term:`ICECC_RECIPE_DISABLE`
Identifies user recipes that you do not want the Icecream distributed
compile support to consider. This variable is used by the
- :ref:`icecc <ref-classes-icecc>` class. You set this variable in
+ :ref:`ref-classes-icecc` class. You set this variable in
your ``local.conf`` file.
When you list recipes using this variable, you are excluding them
@@ -2994,7 +3478,7 @@ system and gives an overview of their function and contents.
:term:`PARALLEL_MAKE` variable that you want to
force remote distributed compilation on using the Icecream
distributed compile support. This variable is used by the
- :ref:`icecc <ref-classes-icecc>` class. You set this variable in
+ :ref:`ref-classes-icecc` class. You set this variable in
your ``local.conf`` file.
:term:`IMAGE_BASENAME`
@@ -3030,22 +3514,28 @@ system and gives an overview of their function and contents.
the same files into a ``boot`` directory within the target partition.
You can find information on how to use the Wic tool in the
- ":ref:`dev-manual/common-tasks:creating partitioned images using wic`"
+ ":ref:`dev-manual/wic:creating partitioned images using wic`"
section of the Yocto Project Development Tasks Manual. Reference
material for Wic is located in the
":doc:`/ref-manual/kickstart`" chapter.
- :term:`IMAGE_CLASSES`
- A list of classes that all images should inherit. You typically use
- this variable to specify the list of classes that register the
- different types of images the OpenEmbedded build system creates.
+ :term:`IMAGE_BUILDINFO_FILE`
+ When using the :ref:`ref-classes-image-buildinfo` class,
+ specifies the file in the image to write the build information into. The
+ default value is "``${sysconfdir}/buildinfo``".
- The default value for :term:`IMAGE_CLASSES` is ``image_types``. You can
- set this variable in your ``local.conf`` or in a distribution
- configuration file.
+ :term:`IMAGE_BUILDINFO_VARS`
+ When using the :ref:`ref-classes-image-buildinfo` class,
+ specifies the list of variables to include in the `Build Configuration`
+ section of the output file (as a space-separated list). Defaults to
+ ":term:`DISTRO` :term:`DISTRO_VERSION`".
- For more information, see ``meta/classes/image_types.bbclass`` in the
- :term:`Source Directory`.
+ :term:`IMAGE_CLASSES`
+ A list of classes that all images should inherit. This is typically used
+ to enable functionality across all image recipes.
+
+ Classes specified in :term:`IMAGE_CLASSES` must be located in the
+ ``classes-recipe/`` or ``classes/`` subdirectories.
:term:`IMAGE_CMD`
Specifies the command to create the image file for a specific image
@@ -3060,8 +3550,8 @@ system and gives an overview of their function and contents.
You typically do not need to set this variable unless you are adding
support for a new image type. For more examples on how to set this
- variable, see the :ref:`image_types <ref-classes-image_types>`
- class file, which is ``meta/classes/image_types.bbclass``.
+ variable, see the :ref:`ref-classes-image_types`
+ class file, which is ``meta/classes-recipe/image_types.bbclass``.
:term:`IMAGE_DEVICE_TABLES`
Specifies one or more files that contain custom device tables that
@@ -3102,7 +3592,7 @@ system and gives an overview of their function and contents.
the same files into a ``boot`` directory within the target partition.
You can find information on how to use the Wic tool in the
- ":ref:`dev-manual/common-tasks:creating partitioned images using wic`"
+ ":ref:`dev-manual/wic:creating partitioned images using wic`"
section of the Yocto Project Development Tasks Manual. Reference
material for Wic is located in the
":doc:`/ref-manual/kickstart`" chapter.
@@ -3123,7 +3613,7 @@ system and gives an overview of their function and contents.
the ":ref:`ref-features-image`" section.
For an example that shows how to customize your image by using this
- variable, see the ":ref:`dev-manual/common-tasks:customizing images using custom \`\`image_features\`\` and \`\`extra_image_features\`\``"
+ variable, see the ":ref:`dev-manual/customizing-images:customizing images using custom \`\`image_features\`\` and \`\`extra_image_features\`\``"
section in the Yocto Project Development Tasks Manual.
:term:`IMAGE_FSTYPES`
@@ -3150,16 +3640,15 @@ system and gives an overview of their function and contents.
:term:`IMAGE_INSTALL`
Used by recipes to specify the packages to install into an image
- through the :ref:`image <ref-classes-image>` class. Use the
+ through the :ref:`ref-classes-image` class. Use the
:term:`IMAGE_INSTALL` variable with care to avoid ordering issues.
Image recipes set :term:`IMAGE_INSTALL` to specify the packages to
install into an image through :ref:`ref-classes-image`. Additionally,
- there are "helper" classes such as the
- :ref:`core-image <ref-classes-core-image>` class which can
- take lists used with :term:`IMAGE_FEATURES` and turn them into
- auto-generated entries in :term:`IMAGE_INSTALL` in addition to its
- default contents.
+ there are "helper" classes such as the :ref:`ref-classes-core-image`
+ class which can take lists used with :term:`IMAGE_FEATURES` and turn
+ them into auto-generated entries in :term:`IMAGE_INSTALL` in addition
+ to its default contents.
When you use this variable, it is best to use it as follows::
@@ -3176,26 +3665,26 @@ system and gives an overview of their function and contents.
image, do not use the :term:`IMAGE_INSTALL` variable to specify
packages for installation. Instead, use the
:term:`PACKAGE_INSTALL` variable, which
- allows the initial RAM filesystem (initramfs) recipe to use a
+ allows the initial RAM filesystem (:term:`Initramfs`) recipe to use a
fixed set of packages and not be affected by :term:`IMAGE_INSTALL`.
- For information on creating an initramfs, see the
- ":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`"
+ For information on creating an :term:`Initramfs`, see the
+ ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`"
section in the Yocto Project Development Tasks Manual.
- Using :term:`IMAGE_INSTALL` with the
- :ref:`+= <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:appending (+=) and prepending (=+) with spaces>`
+ :ref:`+= <bitbake-user-manual/bitbake-user-manual-metadata:appending (+=) and prepending (=+) with spaces>`
BitBake operator within the ``/conf/local.conf`` file or from
- within an image recipe is not recommended. Use of this operator
- in these ways can cause ordering issues. Since
- :ref:`ref-classes-core-image` sets :term:`IMAGE_INSTALL` to a default
- value using the
- :ref:`?= <bitbake:bitbake-user-manual/bitbake-user-manual-metadata:setting a default value (?=)>`
+ within an image recipe is not recommended. Use of this operator in
+ these ways can cause ordering issues. Since
+ :ref:`ref-classes-core-image` sets :term:`IMAGE_INSTALL` to a
+ default value using the
+ :ref:`?= <bitbake-user-manual/bitbake-user-manual-metadata:setting a default value (?=)>`
operator, using a ``+=`` operation against :term:`IMAGE_INSTALL`
results in unexpected behavior when used within
- ``conf/local.conf``. Furthermore, the same operation from
- within an image recipe may or may not succeed depending on the
- specific situation. In both these cases, the behavior is
- contrary to how most users expect the ``+=`` operator to work.
+ ``conf/local.conf``. Furthermore, the same operation from within an
+ image recipe may or may not succeed depending on the specific
+ situation. In both these cases, the behavior is contrary to how
+ most users expect the ``+=`` operator to work.
:term:`IMAGE_LINGUAS`
Specifies the list of locales to install into the image during the
@@ -3222,11 +3711,34 @@ system and gives an overview of their function and contents.
:term:`IMAGE_LINK_NAME`
The name of the output image symlink (which does not include
the version part as :term:`IMAGE_NAME` does). The default value
- is derived using the :term:`IMAGE_BASENAME` and :term:`MACHINE`
- variables::
+ is derived using the :term:`IMAGE_BASENAME` and
+ :term:`IMAGE_MACHINE_SUFFIX` variables::
+
+ IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}${IMAGE_MACHINE_SUFFIX}"
+
+ .. note::
- IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}-${MACHINE}"
+ It is possible to set this to "" to disable symlink creation,
+ however, you also need to set :term:`IMAGE_NAME` to still have
+ a reasonable value e.g.::
+ IMAGE_LINK_NAME = ""
+ IMAGE_NAME = "${IMAGE_BASENAME}${IMAGE_MACHINE_SUFFIX}${IMAGE_VERSION_SUFFIX}"
+
+ :term:`IMAGE_MACHINE_SUFFIX`
+ Specifies the by default machine-specific suffix for image file names
+ (before the extension). The default value is set as follows::
+
+ IMAGE_MACHINE_SUFFIX ??= "-${MACHINE}"
+
+ The default :term:`DEPLOY_DIR_IMAGE` already has a :term:`MACHINE`
+ subdirectory, so you may find it unnecessary to also include this suffix
+ in the name of every image file. If you prefer to remove the suffix you
+ can set this variable to an empty string::
+
+ IMAGE_MACHINE_SUFFIX = ""
+
+ (Not to be confused with :term:`IMAGE_NAME_SUFFIX`.)
:term:`IMAGE_MANIFEST`
The manifest file for the image. This file lists all the installed
@@ -3247,19 +3759,18 @@ system and gives an overview of their function and contents.
section in the Yocto Project Overview and Concepts Manual.
:term:`IMAGE_NAME`
- The name of the output image files minus the extension. This variable
- is derived using the :term:`IMAGE_BASENAME`,
- :term:`MACHINE`, and :term:`IMAGE_VERSION_SUFFIX`
- variables::
+ The name of the output image files minus the extension. By default
+ this variable is set using the :term:`IMAGE_LINK_NAME`, and
+ :term:`IMAGE_VERSION_SUFFIX` variables::
- IMAGE_NAME ?= "${IMAGE_BASENAME}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ IMAGE_NAME ?= "${IMAGE_LINK_NAME}${IMAGE_VERSION_SUFFIX}"
:term:`IMAGE_NAME_SUFFIX`
- Suffix used for the image output filename - defaults to ``".rootfs"``
+ Suffix used for the image output filename --- defaults to ``".rootfs"``
to distinguish the image file from other files created during image
building; however if this suffix is redundant or not desired you can
clear the value of this variable (set the value to ""). For example,
- this is typically cleared in initramfs image recipes.
+ this is typically cleared in :term:`Initramfs` image recipes.
:term:`IMAGE_OVERHEAD_FACTOR`
Defines a multiplier that the build system applies to the initial
@@ -3292,19 +3803,11 @@ system and gives an overview of their function and contents.
:term:`IMAGE_PKGTYPE`
Defines the package type (i.e. DEB, RPM, IPK, or TAR) used by the
OpenEmbedded build system. The variable is defined appropriately by
- the :ref:`package_deb <ref-classes-package_deb>`,
- :ref:`package_rpm <ref-classes-package_rpm>`,
- :ref:`package_ipk <ref-classes-package_ipk>`, or
- :ref:`package_tar <ref-classes-package_tar>` class.
-
- .. note::
+ the :ref:`ref-classes-package_deb`, :ref:`ref-classes-package_rpm`,
+ or :ref:`ref-classes-package_ipk` class.
- The ``package_tar`` class is broken and is not supported. It is
- recommended that you do not use it.
-
- The :ref:`populate_sdk_* <ref-classes-populate-sdk-*>` and
- :ref:`image <ref-classes-image>` classes use the :term:`IMAGE_PKGTYPE`
- for packaging up images and SDKs.
+ The :ref:`ref-classes-populate-sdk-*` and :ref:`ref-classes-image`
+ classes use the :term:`IMAGE_PKGTYPE` for packaging up images and SDKs.
You should not set the :term:`IMAGE_PKGTYPE` manually. Rather, the
variable is set indirectly through the appropriate
@@ -3322,9 +3825,9 @@ system and gives an overview of their function and contents.
:term:`IMAGE_POSTPROCESS_COMMAND`
Specifies a list of functions to call once the OpenEmbedded build
system creates the final image output files. You can specify
- functions separated by semicolons::
+ functions separated by spaces::
- IMAGE_POSTPROCESS_COMMAND += "function; ... "
+ IMAGE_POSTPROCESS_COMMAND += "function"
If you need to pass the root filesystem path to a command within the
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -3335,9 +3838,9 @@ system and gives an overview of their function and contents.
:term:`IMAGE_PREPROCESS_COMMAND`
Specifies a list of functions to call before the OpenEmbedded build
system creates the final image output files. You can specify
- functions separated by semicolons::
+ functions separated by spaces::
- IMAGE_PREPROCESS_COMMAND += "function; ... "
+ IMAGE_PREPROCESS_COMMAND += "function"
If you need to pass the root filesystem path to a command within the
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -3401,7 +3904,7 @@ system and gives an overview of their function and contents.
:term:`IMAGE_TYPEDEP`
Specifies a dependency from one image type on another. Here is an
- example from the :ref:`image-live <ref-classes-image-live>` class::
+ example from the :ref:`ref-classes-image-live` class::
IMAGE_TYPEDEP:live = "ext3"
@@ -3457,7 +3960,7 @@ system and gives an overview of their function and contents.
- wic.lzma
For more information about these types of images, see
- ``meta/classes/image_types*.bbclass`` in the :term:`Source Directory`.
+ ``meta/classes-recipe/image_types*.bbclass`` in the :term:`Source Directory`.
:term:`IMAGE_VERSION_SUFFIX`
Version suffix that is part of the default :term:`IMAGE_NAME` and
@@ -3468,60 +3971,32 @@ system and gives an overview of their function and contents.
the build artifacts.
:term:`IMGDEPLOYDIR`
- When inheriting the :ref:`image <ref-classes-image>` class directly or
- through the :ref:`core-image <ref-classes-core-image>` class, the
+ When inheriting the :ref:`ref-classes-image` class directly or
+ through the :ref:`ref-classes-core-image` class, the
:term:`IMGDEPLOYDIR` points to a temporary work area for deployed files
that is set in the ``image`` class as follows::
IMGDEPLOYDIR = "${WORKDIR}/deploy-${PN}-image-complete"
- Recipes inheriting the ``image`` class should copy files to be
- deployed into :term:`IMGDEPLOYDIR`, and the class will take care of
- copying them into :term:`DEPLOY_DIR_IMAGE` afterwards.
-
- :term:`INC_PR`
- Helps define the recipe revision for recipes that share a common
- ``include`` file. You can think of this variable as part of the
- recipe revision as set from within an include file.
-
- Suppose, for example, you have a set of recipes that are used across
- several projects. And, within each of those recipes the revision (its
- :term:`PR` value) is set accordingly. In this case, when
- the revision of those recipes changes, the burden is on you to find
- all those recipes and be sure that they get changed to reflect the
- updated version of the recipe. In this scenario, it can get
- complicated when recipes that are used in many places and provide
- common functionality are upgraded to a new revision.
-
- A more efficient way of dealing with this situation is to set the
- :term:`INC_PR` variable inside the ``include`` files that the recipes
- share and then expand the :term:`INC_PR` variable within the recipes to
- help define the recipe revision.
-
- The following provides an example that shows how to use the
- :term:`INC_PR` variable given a common ``include`` file that defines the
- variable. Once the variable is defined in the ``include`` file, you
- can use the variable to set the :term:`PR` values in each recipe. You
- will notice that when you set a recipe's :term:`PR` you can provide more
- granular revisioning by appending values to the :term:`INC_PR` variable::
-
- recipes-graphics/xorg-font/xorg-font-common.inc:INC_PR = "r2"
- recipes-graphics/xorg-font/encodings_1.0.4.bb:PR = "${INC_PR}.1"
- recipes-graphics/xorg-font/font-util_1.3.0.bb:PR = "${INC_PR}.0"
- recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
-
- The
- first line of the example establishes the baseline revision to be
- used for all recipes that use the ``include`` file. The remaining
- lines in the example are from individual recipes and show how the
- :term:`PR` value is set.
+ Recipes inheriting the :ref:`ref-classes-image` class should copy
+ files to be deployed into :term:`IMGDEPLOYDIR`, and the class will take
+ care of copying them into :term:`DEPLOY_DIR_IMAGE` afterwards.
:term:`INCOMPATIBLE_LICENSE`
Specifies a space-separated list of license names (as they would
appear in :term:`LICENSE`) that should be excluded
- from the build. Recipes that provide no alternatives to listed
+ from the build (if set globally), or from an image (if set locally
+ in an image recipe).
+
+ When the variable is set globally, recipes that provide no alternatives to listed
incompatible licenses are not built. Packages that are individually
licensed with the specified incompatible licenses will be deleted.
+ Most of the time this does not allow a feasible build (because it becomes impossible
+ to satisfy build time dependencies), so the recommended way to
+ implement license restrictions is to set the variable in specific
+ image recipes where the restrictions must apply. That way there
+ are no build time restrictions, but the license check is still
+ performed when the image's filesystem is assembled from packages.
There is some support for wildcards in this variable's value,
however it is restricted to specific licenses. Currently only
@@ -3540,27 +4015,40 @@ system and gives an overview of their function and contents.
Although you can use other settings, you might be required to
- remove dependencies on or provide alternatives to components that
+ remove dependencies on (or provide alternatives to) components that
are required to produce a functional system image.
+ :term:`INCOMPATIBLE_LICENSE_EXCEPTIONS`
+ Specifies a space-separated list of package and license pairs that
+ are allowed to be used even if the license is specified in
+ :term:`INCOMPATIBLE_LICENSE`. The package and license pairs are
+ separated using a colon. Example::
+
+ INCOMPATIBLE_LICENSE_EXCEPTIONS = "gdbserver:GPL-3.0-only gdbserver:LGPL-3.0-only"
+
:term:`INHERIT`
Causes the named class or classes to be inherited globally. Anonymous
functions in the class or classes are not executed for the base
configuration and in each individual recipe. The OpenEmbedded build
system ignores changes to :term:`INHERIT` in individual recipes.
+ Classes inherited using :term:`INHERIT` must be located in the
+ ``classes-global/`` or ``classes/`` subdirectories.
For more information on :term:`INHERIT`, see the
- :ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` configuration directive`"
+ :ref:`bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` configuration directive`"
section in the BitBake User Manual.
:term:`INHERIT_DISTRO`
Lists classes that will be inherited at the distribution level. It is
unlikely that you want to edit this variable.
+ Classes specified in :term:`INHERIT_DISTRO` must be located in the
+ ``classes-global/`` or ``classes/`` subdirectories.
+
The default value of the variable is set as follows in the
``meta/conf/distro/defaultsetup.conf`` file::
- INHERIT_DISTRO ?= "debian devshell sstate license"
+ INHERIT_DISTRO ?= "debian devshell sstate license remove-libtool create-spdx"
:term:`INHIBIT_DEFAULT_DEPS`
Prevents the default dependencies, namely the C compiler and standard
@@ -3605,10 +4093,9 @@ system and gives an overview of their function and contents.
:term:`INHIBIT_SYSROOT_STRIP` variable to "1" in your recipe, you inhibit
this stripping.
- If you want to use this variable, include the
- :ref:`staging <ref-classes-staging>` class. This class uses a
- ``sys_strip()`` function to test for the variable and acts
- accordingly.
+ If you want to use this variable, include the :ref:`ref-classes-staging`
+ class. This class uses a ``sys_strip()`` function to test for the variable
+ and acts accordingly.
.. note::
@@ -3618,37 +4105,65 @@ system and gives an overview of their function and contents.
even if the toolchain's binaries are strippable, there are other files
needed for the build that are not strippable.
+ :term:`INIT_MANAGER`
+ Specifies the system init manager to use. Available options are:
+
+ - ``sysvinit``
+ - ``systemd``
+ - ``mdev-busybox``
+
+ With ``sysvinit``, the init manager is set to
+ :wikipedia:`SysVinit <Init#SysV-style>`, the traditional UNIX init
+ system. This is the default choice in the Poky distribution, together with
+ the Udev device manager (see the ":ref:`device-manager`" section).
+
+ With ``systemd``, the init manager becomes :wikipedia:`systemd <Systemd>`,
+ which comes with the :wikipedia:`udev <Udev>` device manager.
+
+ With ``mdev-busybox``, the init manager becomes the much simpler BusyBox
+ init, together with the BusyBox mdev device manager. This is the simplest
+ and lightest solution, and probably the best choice for low-end systems
+ with a rather slow CPU and a limited amount of RAM.
+
+ More concretely, this is used to include
+ ``conf/distro/include/init-manager-${INIT_MANAGER}.inc`` into the global
+ configuration. You can have a look at the
+ :yocto_git:`meta/conf/distro/include/init-manager-*.inc </poky/tree/meta/conf/distro/include>`
+ files for more information, and also the ":ref:`init-manager`"
+ section in the Yocto Project Development Tasks Manual.
+
:term:`INITRAMFS_DEPLOY_DIR_IMAGE`
- Indicates the deploy directory used by ``do_bundle_initramfs`` where the
- :term:`INITRAMFS_IMAGE` will be fetched from.
- This variable is set by default to ``${DEPLOY_DIR_IMAGE}`` in the
- :ref:`kernel <ref-classes-kernel>` class and it's only meant to be changed
- when building an initramfs image from a separate multiconfig via :term:`INITRAMFS_MULTICONFIG`.
+ Indicates the deploy directory used by :ref:`ref-tasks-bundle_initramfs`
+ where the :term:`INITRAMFS_IMAGE` will be fetched from. This variable is
+ set by default to ``${DEPLOY_DIR_IMAGE}`` in the
+ :ref:`ref-classes-kernel` class and it's only meant to be changed when
+ building an :term:`Initramfs` image from a separate multiconfig via
+ :term:`INITRAMFS_MULTICONFIG`.
:term:`INITRAMFS_FSTYPES`
Defines the format for the output image of an initial RAM filesystem
- (initramfs), which is used during boot. Supported formats are the
+ (:term:`Initramfs`), which is used during boot. Supported formats are the
same as those supported by the
:term:`IMAGE_FSTYPES` variable.
The default value of this variable, which is set in the
``meta/conf/bitbake.conf`` configuration file in the
:term:`Source Directory`, is "cpio.gz". The Linux kernel's
- initramfs mechanism, as opposed to the initial RAM filesystem
- `initrd <https://en.wikipedia.org/wiki/Initrd>`__ mechanism, expects
+ :term:`Initramfs` mechanism, as opposed to the initial RAM filesystem
+ :wikipedia:`initrd <Initrd>` mechanism, expects
an optionally compressed cpio archive.
:term:`INITRAMFS_IMAGE`
Specifies the :term:`PROVIDES` name of an image
- recipe that is used to build an initial RAM filesystem (initramfs)
+ recipe that is used to build an initial RAM filesystem (:term:`Initramfs`)
image. In other words, the :term:`INITRAMFS_IMAGE` variable causes an
additional recipe to be built as a dependency to whatever root
filesystem recipe you might be using (e.g. ``core-image-sato``). The
- initramfs image recipe you provide should set
+ :term:`Initramfs` image recipe you provide should set
:term:`IMAGE_FSTYPES` to
:term:`INITRAMFS_FSTYPES`.
- An initramfs image provides a temporary root filesystem used for
+ An :term:`Initramfs` image provides a temporary root filesystem used for
early system initialization (e.g. loading of modules needed to locate
and mount the "real" root filesystem).
@@ -3656,24 +4171,24 @@ system and gives an overview of their function and contents.
See the ``meta/recipes-core/images/core-image-minimal-initramfs.bb``
recipe in the :term:`Source Directory`
- for an example initramfs recipe. To select this sample recipe as
- the one built to provide the initramfs image, set :term:`INITRAMFS_IMAGE`
+ for an example :term:`Initramfs` recipe. To select this sample recipe as
+ the one built to provide the :term:`Initramfs` image, set :term:`INITRAMFS_IMAGE`
to "core-image-minimal-initramfs".
You can also find more information by referencing the
- ``meta-poky/conf/local.conf.sample.extended`` configuration file in
- the Source Directory, the :ref:`image <ref-classes-image>` class,
- and the :ref:`kernel <ref-classes-kernel>` class to see how to use
- the :term:`INITRAMFS_IMAGE` variable.
+ ``meta-poky/conf/templates/default/local.conf.sample.extended``
+ configuration file in the Source Directory, the :ref:`ref-classes-image`
+ class, and the :ref:`ref-classes-kernel` class to see how to use the
+ :term:`INITRAMFS_IMAGE` variable.
If :term:`INITRAMFS_IMAGE` is empty, which is the default, then no
- initramfs image is built.
+ :term:`Initramfs` image is built.
For more information, you can also see the
:term:`INITRAMFS_IMAGE_BUNDLE`
variable, which allows the generated image to be bundled inside the
- kernel image. Additionally, for information on creating an initramfs
- image, see the ":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`" section
+ kernel image. Additionally, for information on creating an :term:`Initramfs`
+ image, see the ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`" section
in the Yocto Project Development Tasks Manual.
:term:`INITRAMFS_IMAGE_BUNDLE`
@@ -3682,37 +4197,36 @@ system and gives an overview of their function and contents.
extra pass
(:ref:`ref-tasks-bundle_initramfs`) during
kernel compilation in order to build a single binary that contains
- both the kernel image and the initial RAM filesystem (initramfs)
+ both the kernel image and the initial RAM filesystem (:term:`Initramfs`)
image. This makes use of the
:term:`CONFIG_INITRAMFS_SOURCE` kernel
feature.
.. note::
- Bundling the initramfs with the kernel conflates the code in the
- initramfs with the GPLv2 licensed Linux kernel binary. Thus only GPLv2
- compatible software may be part of a bundled initramfs.
+ Bundling the :term:`Initramfs` with the kernel conflates the code in the
+ :term:`Initramfs` with the GPLv2 licensed Linux kernel binary. Thus only GPLv2
+ compatible software may be part of a bundled :term:`Initramfs`.
.. note::
- Using an extra compilation pass to bundle the initramfs avoids a
- circular dependency between the kernel recipe and the initramfs
- recipe should the initramfs include kernel modules. Should that be
- the case, the initramfs recipe depends on the kernel for the
- kernel modules, and the kernel depends on the initramfs recipe
- since the initramfs is bundled inside the kernel image.
+ Using an extra compilation pass to bundle the :term:`Initramfs` avoids a
+ circular dependency between the kernel recipe and the :term:`Initramfs`
+ recipe should the :term:`Initramfs` include kernel modules. Should that be
+ the case, the :term:`Initramfs` recipe depends on the kernel for the
+ kernel modules, and the kernel depends on the :term:`Initramfs` recipe
+ since the :term:`Initramfs` is bundled inside the kernel image.
The combined binary is deposited into the ``tmp/deploy`` directory,
which is part of the :term:`Build Directory`.
Setting the variable to "1" in a configuration file causes the
OpenEmbedded build system to generate a kernel image with the
- initramfs specified in :term:`INITRAMFS_IMAGE` bundled within::
+ :term:`Initramfs` specified in :term:`INITRAMFS_IMAGE` bundled within::
INITRAMFS_IMAGE_BUNDLE = "1"
- By default, the
- :ref:`kernel <ref-classes-kernel>` class sets this variable to a
+ By default, the :ref:`ref-classes-kernel` class sets this variable to a
null string as follows::
INITRAMFS_IMAGE_BUNDLE ?= ""
@@ -3723,14 +4237,27 @@ system and gives an overview of their function and contents.
configuration file. You cannot set the variable in a recipe file.
See the
- :yocto_git:`local.conf.sample.extended </poky/tree/meta-poky/conf/local.conf.sample.extended>`
+ :yocto_git:`local.conf.sample.extended </poky/tree/meta-poky/conf/templates/default/local.conf.sample.extended>`
file for additional information. Also, for information on creating an
- initramfs, see the ":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`" section
+ :term:`Initramfs`, see the ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`" section
in the Yocto Project Development Tasks Manual.
+ :term:`INITRAMFS_IMAGE_NAME`
+
+ This value needs to stay in sync with :term:`IMAGE_LINK_NAME`, but with
+ :term:`INITRAMFS_IMAGE` instead of :term:`IMAGE_BASENAME`. The default value
+ is set as follows:
+
+ INITRAMFS_IMAGE_NAME ?= "${@['${INITRAMFS_IMAGE}${IMAGE_MACHINE_SUFFIX}', ''][d.getVar('INITRAMFS_IMAGE') == '']}"
+
+ That is, if :term:`INITRAMFS_IMAGE` is set, the value of
+ :term:`INITRAMFS_IMAGE_NAME` will be set based upon
+ :term:`INITRAMFS_IMAGE` and :term:`IMAGE_MACHINE_SUFFIX`.
+
+
:term:`INITRAMFS_LINK_NAME`
The link name of the initial RAM filesystem image. This variable is
- set in the ``meta/classes/kernel-artifact-names.bbclass`` file as
+ set in the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as
follows::
INITRAMFS_LINK_NAME ?= "initramfs-${KERNEL_ARTIFACT_LINK_NAME}"
@@ -3745,33 +4272,31 @@ system and gives an overview of their function and contents.
information.
:term:`INITRAMFS_MULTICONFIG`
- Defines the multiconfig to create a multiconfig dependency to be used by the :ref:`kernel <ref-classes-kernel>` class.
+ Defines the multiconfig to create a multiconfig dependency to be used by
+ the :ref:`ref-classes-kernel` class.
This allows the kernel to bundle an :term:`INITRAMFS_IMAGE` coming from
a separate multiconfig, this is meant to be used in addition to :term:`INITRAMFS_DEPLOY_DIR_IMAGE`.
- For more information on how to bundle an initramfs image from a separate
- multiconfig see the ":ref:`dev-manual/common-tasks:Bundling an Initramfs Image From a Separate Multiconfig`"
+ For more information on how to bundle an :term:`Initramfs` image from a separate
+ multiconfig see the ":ref:`dev-manual/building:Bundling an Initramfs Image From a Separate Multiconfig`"
section in the Yocto Project Development Tasks Manual.
:term:`INITRAMFS_NAME`
The base name of the initial RAM filesystem image. This variable is
- set in the ``meta/classes/kernel-artifact-names.bbclass`` file as
+ set in the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as
follows::
INITRAMFS_NAME ?= "initramfs-${KERNEL_ARTIFACT_NAME}"
- The value of the :term:`KERNEL_ARTIFACT_NAME`
- variable, which is set in the same file, has the following value::
-
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ See :term:`KERNEL_ARTIFACT_NAME` for additional information.
:term:`INITRD`
Indicates list of filesystem images to concatenate and use as an
initial RAM disk (``initrd``).
The :term:`INITRD` variable is an optional variable used with the
- :ref:`image-live <ref-classes-image-live>` class.
+ :ref:`ref-classes-image-live` class.
:term:`INITRD_IMAGE`
When building a "live" bootable image (i.e. when
@@ -3780,8 +4305,7 @@ system and gives an overview of their function and contents.
provide the initial RAM disk image. The default value is
"core-image-minimal-initramfs".
- See the :ref:`image-live <ref-classes-image-live>` class for more
- information.
+ See the :ref:`ref-classes-image-live` class for more information.
:term:`INITSCRIPT_NAME`
The filename of the initialization script as installed to
@@ -3808,7 +4332,7 @@ system and gives an overview of their function and contents.
in initlevels 2 and 5, and stops the script in levels 0, 1 and 6.
The variable's default value is "defaults", which is set in the
- :ref:`update-rc.d <ref-classes-update-rc.d>` class.
+ :ref:`ref-classes-update-rc.d` class.
The value in :term:`INITSCRIPT_PARAMS` is passed through to the
``update-rc.d`` command. For more information on valid parameters,
@@ -3869,24 +4393,23 @@ system and gives an overview of their function and contents.
This variable is also used from the kernel's append file to identify
the kernel branch specific to a particular machine or target
hardware. Continuing with the previous kernel example, the kernel's
- append file (i.e. ``linux-yocto_4.12.bbappend``) is located in the
+ append file is located in the
BSP layer for a given machine. For example, the append file for the
- Beaglebone, EdgeRouter, and generic versions of both 32 and 64-bit IA
+ Beaglebone and generic versions of both 32 and 64-bit IA
machines (``meta-yocto-bsp``) is named
- ``meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.12.bbappend``.
+ ``meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.1.bbappend``.
Here are the related statements from that append file::
- KBRANCH:genericx86 = "standard/base"
- KBRANCH:genericx86-64 = "standard/base"
- KBRANCH:edgerouter = "standard/edgerouter"
- KBRANCH:beaglebone = "standard/beaglebone"
+ KBRANCH:genericx86 = "v6.1/standard/base"
+ KBRANCH:genericx86-64 = "v6.1/standard/base"
+ KBRANCH:beaglebone-yocto = "v6.1/standard/beaglebone"
The :term:`KBRANCH` statements
identify the kernel branch to use when building for each supported
BSP.
:term:`KBUILD_DEFCONFIG`
- When used with the :ref:`kernel-yocto <ref-classes-kernel-yocto>`
+ When used with the :ref:`ref-classes-kernel-yocto`
class, specifies an "in-tree" kernel configuration file for use
during a kernel build.
@@ -3902,9 +4425,9 @@ system and gives an overview of their function and contents.
To use the variable, set it in the append file for your kernel recipe
using the following form::
- KBUILD_DEFCONFIG_KMACHINE ?= defconfig_file
+ KBUILD_DEFCONFIG:<machine> ?= "defconfig_file"
- Here is an example from a "raspberrypi2" :term:`KMACHINE` build that uses
+ Here is an example from a "raspberrypi2" :term:`MACHINE` build that uses
a ``defconfig`` file named "bcm2709_defconfig"::
KBUILD_DEFCONFIG:raspberrypi2 = "bcm2709_defconfig"
@@ -3919,7 +4442,7 @@ system and gives an overview of their function and contents.
section in the Yocto Project Linux Kernel Development Manual.
:term:`KCONFIG_MODE`
- When used with the :ref:`kernel-yocto <ref-classes-kernel-yocto>`
+ When used with the :ref:`ref-classes-kernel-yocto`
class, specifies the kernel configuration values to use for options
not specified in the provided ``defconfig`` file. Valid options are::
@@ -3955,11 +4478,10 @@ system and gives an overview of their function and contents.
KCONFIG_MODE = "alldefconfig"
-
:term:`KERNEL_ALT_IMAGETYPE`
Specifies an alternate kernel image type for creation in addition to
- the kernel image type specified using the
- :term:`KERNEL_IMAGETYPE` variable.
+ the kernel image type specified using the :term:`KERNEL_IMAGETYPE` and
+ :term:`KERNEL_IMAGETYPES` variables.
:term:`KERNEL_ARTIFACT_NAME`
Specifies the name of all of the build artifacts. You can change the
@@ -3967,28 +4489,48 @@ system and gives an overview of their function and contents.
variable.
The value of :term:`KERNEL_ARTIFACT_NAME`, which is set in the
- ``meta/classes/kernel-artifact-names.bbclass`` file, has the
+ ``meta/classes-recipe/kernel-artifact-names.bbclass`` file, has the
following default value::
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}${IMAGE_MACHINE_SUFFIX}${IMAGE_VERSION_SUFFIX}"
- See the :term:`PKGE`, :term:`PKGV`, :term:`PKGR`, :term:`MACHINE`
+ See the :term:`PKGE`, :term:`PKGV`, :term:`PKGR`, :term:`IMAGE_MACHINE_SUFFIX`
and :term:`IMAGE_VERSION_SUFFIX` variables for additional information.
:term:`KERNEL_CLASSES`
A list of classes defining kernel image types that the
- :ref:`kernel <ref-classes-kernel>` class should inherit. You
- typically append this variable to enable extended image types. An
- example is the "kernel-fitimage", which enables fitImage support and
- resides in ``meta/classes/kernel-fitimage.bbclass``. You can register
- custom kernel image types with the :ref:`kernel <ref-classes-kernel>` class using this
- variable.
+ :ref:`ref-classes-kernel` class should inherit. You typically
+ append this variable to enable extended image types. An example is
+ ":ref:`ref-classes-kernel-fitimage`", which enables
+ FIT image support and resides in ``meta/classes-recipe/kernel-fitimage.bbclass``.
+ You can register custom kernel image types with the
+ :ref:`ref-classes-kernel` class using this variable.
+
+ :term:`KERNEL_DANGLING_FEATURES_WARN_ONLY`
+ When kernel configuration fragments are missing for some
+ :term:`KERNEL_FEATURES` specified by layers or BSPs,
+ building and configuring the kernel stops with an error.
+
+ You can turn these errors into warnings by setting the
+ following in ``conf/local.conf``::
+
+ KERNEL_DANGLING_FEATURES_WARN_ONLY = "1"
+
+ You will still be warned that runtime issues may occur,
+ but at least the kernel configuration and build process will
+ be allowed to continue.
:term:`KERNEL_DEBUG_TIMESTAMPS`
If set to "1", enables timestamping functionality during building
the kernel. The default is "0" to disable this for reproducibility
reasons.
+ :term:`KERNEL_DEPLOY_DEPEND`
+ Provides a means of controlling the dependency of an image recipe
+ on the kernel. The default value is "virtual/kernel:do_deploy",
+ however for a small initramfs image or other images that do not
+ need the kernel, this can be set to "" in the image recipe.
+
:term:`KERNEL_DEVICETREE`
Specifies the name of the generated Linux kernel device tree (i.e.
the ``.dtb``) file.
@@ -3998,13 +4540,22 @@ system and gives an overview of their function and contents.
There is legacy support for specifying the full path to the device
tree. However, providing just the ``.dtb`` file is preferred.
- In order to use this variable, the
- :ref:`kernel-devicetree <ref-classes-kernel-devicetree>` class must
- be inherited.
+ In order to use this variable, the :ref:`ref-classes-kernel-devicetree`
+ class must be inherited.
+
+ :term:`KERNEL_DEVICETREE_BUNDLE`
+ When set to "1", this variable allows to bundle the Linux kernel
+ and the Device Tree Binary together in a single file.
+
+ This feature is currently only supported on the "arm" (32 bit)
+ architecture.
+
+ This variable is set to "0" by default by the
+ :ref:`ref-classes-kernel-devicetree` class.
:term:`KERNEL_DTB_LINK_NAME`
The link name of the kernel device tree binary (DTB). This variable
- is set in the ``meta/classes/kernel-artifact-names.bbclass`` file as
+ is set in the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as
follows::
KERNEL_DTB_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
@@ -4020,24 +4571,37 @@ system and gives an overview of their function and contents.
:term:`KERNEL_DTB_NAME`
The base name of the kernel device tree binary (DTB). This variable
- is set in the ``meta/classes/kernel-artifact-names.bbclass`` file as
+ is set in the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as
follows::
KERNEL_DTB_NAME ?= "${KERNEL_ARTIFACT_NAME}"
- The value of the :term:`KERNEL_ARTIFACT_NAME`
- variable, which is set in the same file, has the following value::
+ See :term:`KERNEL_ARTIFACT_NAME` for additional information.
+
+ :term:`KERNEL_DTBDEST`
+ This variable, used by the :ref:`ref-classes-kernel-devicetree`
+ class, allows to change the installation directory of the DTB
+ (Device Tree Binary) files.
+
+ It is set by default to "${KERNEL_IMAGEDEST}" by the
+ :ref:`ref-classes-kernel` class.
+
+ :term:`KERNEL_DTBVENDORED`
+ This variable, used by the :ref:`ref-classes-kernel-devicetree`,
+ allows to ignore vendor subdirectories when installing DTB
+ (Device Tree Binary) files, when it is set to "false".
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ To keep vendor subdirectories, set this variable to "true".
+
+ It is set by default to "false" by the :ref:`ref-classes-kernel` class.
:term:`KERNEL_DTC_FLAGS`
Specifies the ``dtc`` flags that are passed to the Linux kernel build
system when generating the device trees (via ``DTC_FLAGS`` environment
variable).
- In order to use this variable, the
- :ref:`kernel-devicetree <ref-classes-kernel-devicetree>` class must
- be inherited.
+ In order to use this variable, the :ref:`ref-classes-kernel-devicetree`
+ class must be inherited.
:term:`KERNEL_EXTRA_ARGS`
Specifies additional ``make`` command-line arguments the OpenEmbedded
@@ -4064,14 +4628,14 @@ system and gives an overview of their function and contents.
statements add specific configurations to targeted machine types::
KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
- KERNEL_FEATURES:append = "${KERNEL_EXTRA_FEATURES}"
- KERNEL_FEATURES:append:qemuall = "cfg/virtio.scc"
- KERNEL_FEATURES:append:qemux86 = " cfg/sound.scc cfg/paravirt_kvm.scc"
- KERNEL_FEATURES:append:qemux86-64 = "cfg/sound.scc"
+ KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
+ KERNEL_FEATURES:append:qemuall = " cfg/virtio.scc"
+ KERNEL_FEATURES:append:qemux86 = " cfg/sound.scc cfg/paravirt_kvm.scc"
+ KERNEL_FEATURES:append:qemux86-64 = " cfg/sound.scc"
:term:`KERNEL_FIT_LINK_NAME`
The link name of the kernel flattened image tree (FIT) image. This
- variable is set in the ``meta/classes/kernel-artifact-names.bbclass``
+ variable is set in the ``meta/classes-recipe/kernel-artifact-names.bbclass``
file as follows::
KERNEL_FIT_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
@@ -4087,19 +4651,16 @@ system and gives an overview of their function and contents.
:term:`KERNEL_FIT_NAME`
The base name of the kernel flattened image tree (FIT) image. This
- variable is set in the ``meta/classes/kernel-artifact-names.bbclass``
+ variable is set in the ``meta/classes-recipe/kernel-artifact-names.bbclass``
file as follows::
KERNEL_FIT_NAME ?= "${KERNEL_ARTIFACT_NAME}"
- The value of the :term:`KERNEL_ARTIFACT_NAME`
- variable, which is set in the same file, has the following value::
-
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ See :term:`KERNEL_ARTIFACT_NAME` for additional information.
:term:`KERNEL_IMAGE_LINK_NAME`
The link name for the kernel image. This variable is set in the
- ``meta/classes/kernel-artifact-names.bbclass`` file as follows::
+ ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as follows::
KERNEL_IMAGE_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
@@ -4127,15 +4688,11 @@ system and gives an overview of their function and contents.
:term:`KERNEL_IMAGE_NAME`
The base name of the kernel image. This variable is set in the
- ``meta/classes/kernel-artifact-names.bbclass`` file as follows::
+ ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as follows::
KERNEL_IMAGE_NAME ?= "${KERNEL_ARTIFACT_NAME}"
- The value of the
- :term:`KERNEL_ARTIFACT_NAME` variable,
- which is set in the same file, has the following value::
-
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ See :term:`KERNEL_ARTIFACT_NAME` for additional information.
:term:`KERNEL_IMAGETYPE`
The type of kernel to build for a device, usually set by the machine
@@ -4143,9 +4700,12 @@ system and gives an overview of their function and contents.
when building the kernel and is passed to ``make`` as the target to
build.
- If you want to build an alternate kernel image type in addition to that
- specified by :term:`KERNEL_IMAGETYPE`, use the :term:`KERNEL_ALT_IMAGETYPE`
- variable.
+ To build additional kernel image types, use :term:`KERNEL_IMAGETYPES`.
+
+ :term:`KERNEL_IMAGETYPES`
+ Lists additional types of kernel images to build for a device in addition
+ to image type specified in :term:`KERNEL_IMAGETYPE`. Usually set by the
+ machine configuration files.
:term:`KERNEL_MODULE_AUTOLOAD`
Lists kernel modules that need to be auto-loaded during boot.
@@ -4183,11 +4743,18 @@ system and gives an overview of their function and contents.
provide those module configurations, see the
:term:`module_conf_* <module_conf>` variable.
+ :term:`KERNEL_PACKAGE_NAME`
+ Specifies the base name of the kernel packages, such as "kernel"
+ in the kernel packages such as "kernel-modules", "kernel-image" and
+ "kernel-dbg".
+
+ The default value for this variable is set to "kernel" by the
+ :ref:`ref-classes-kernel` class.
+
:term:`KERNEL_PATH`
The location of the kernel sources. This variable is set to the value
- of the :term:`STAGING_KERNEL_DIR` within
- the :ref:`module <ref-classes-module>` class. For information on
- how this variable is used, see the
+ of the :term:`STAGING_KERNEL_DIR` within the :ref:`ref-classes-module`
+ class. For information on how this variable is used, see the
":ref:`kernel-dev/common:incorporating out-of-tree modules`"
section in the Yocto Project Linux Kernel Development Manual.
@@ -4199,9 +4766,8 @@ system and gives an overview of their function and contents.
:term:`KERNEL_SRC`
The location of the kernel sources. This variable is set to the value
- of the :term:`STAGING_KERNEL_DIR` within
- the :ref:`module <ref-classes-module>` class. For information on
- how this variable is used, see the
+ of the :term:`STAGING_KERNEL_DIR` within the :ref:`ref-classes-module`
+ class. For information on how this variable is used, see the
":ref:`kernel-dev/common:incorporating out-of-tree modules`"
section in the Yocto Project Linux Kernel Development Manual.
@@ -4211,6 +4777,10 @@ system and gives an overview of their function and contents.
to the :term:`KERNEL_SRC` variable. Both variables are common variables
used by external Makefiles to point to the kernel source directory.
+ :term:`KERNEL_STRIP`
+ Allows to specific which ``strip`` command to use to strip the kernel
+ binary, typically either GNU binutils ``strip`` or ``llvm-strip``.
+
:term:`KERNEL_VERSION`
Specifies the version of the kernel as extracted from ``version.h``
or ``utsrelease.h`` within the kernel sources. Effects of setting
@@ -4224,7 +4794,7 @@ system and gives an overview of their function and contents.
:term:`KERNELDEPMODDEPEND` does not control whether or not that data
exists, but simply whether or not it is used. If you do not need to
use the data, set the :term:`KERNELDEPMODDEPEND` variable in your
- ``initramfs`` recipe. Setting the variable there when the data is not
+ :term:`Initramfs` recipe. Setting the variable there when the data is not
needed avoids a potential dependency loop.
:term:`KFEATURE_DESCRIPTION`
@@ -4255,7 +4825,7 @@ system and gives an overview of their function and contents.
SRCREV_machine:core2-32-intel-common = "43b9eced9ba8a57add36af07736344dcc383f711"
KMACHINE:core2-32-intel-common = "intel-core2-32"
KBRANCH:core2-32-intel-common = "standard/base"
- KERNEL_FEATURES:append:core2-32-intel-common = "${KERNEL_FEATURES_INTEL_COMMON}"
+ KERNEL_FEATURES:append:core2-32-intel-common = " ${KERNEL_FEATURES_INTEL_COMMON}"
The :term:`KMACHINE` statement says
that the kernel understands the machine name as "intel-core2-32".
@@ -4279,7 +4849,7 @@ system and gives an overview of their function and contents.
:term:`LABELS`
Provides a list of targets for automatic configuration.
- See the :ref:`grub-efi <ref-classes-grub-efi>` class for more
+ See the :ref:`ref-classes-grub-efi` class for more
information on how this variable is used.
:term:`LAYERDEPENDS`
@@ -4304,6 +4874,9 @@ system and gives an overview of their function and contents.
available outside of ``layer.conf`` and references are expanded
immediately when parsing of the file completes.
+ :term:`LAYERDIR_RE`
+ See :term:`bitbake:LAYERDIR_RE` in the BitBake manual.
+
:term:`LAYERRECOMMENDS`
Lists the layers, separated by spaces, recommended for use with this
layer.
@@ -4322,31 +4895,7 @@ system and gives an overview of their function and contents.
``LAYERRECOMMENDS_mylayer``).
:term:`LAYERSERIES_COMPAT`
- Lists the versions of the :term:`OpenEmbedded-Core (OE-Core)` for which
- a layer is compatible. Using the :term:`LAYERSERIES_COMPAT` variable
- allows the layer maintainer to indicate which combinations of the
- layer and OE-Core can be expected to work. The variable gives the
- system a way to detect when a layer has not been tested with new
- releases of OE-Core (e.g. the layer is not maintained).
-
- To specify the OE-Core versions for which a layer is compatible, use
- this variable in your layer's ``conf/layer.conf`` configuration file.
- For the list, use the Yocto Project
- :yocto_wiki:`Release Name </Releases>` (e.g.
- &DISTRO_NAME_NO_CAP;). To specify multiple OE-Core versions for the
- layer, use a space-separated list::
-
- LAYERSERIES_COMPAT_layer_root_name = "&DISTRO_NAME_NO_CAP; &DISTRO_NAME_NO_CAP_MINUS_ONE;"
-
- .. note::
-
- Setting :term:`LAYERSERIES_COMPAT` is required by the Yocto Project
- Compatible version 2 standard.
- The OpenEmbedded build system produces a warning if the variable
- is not set for any given layer.
-
- See the ":ref:`dev-manual/common-tasks:creating your own layer`"
- section in the Yocto Project Development Tasks Manual.
+ See :term:`bitbake:LAYERSERIES_COMPAT` in the BitBake manual.
:term:`LAYERVERSION`
Optionally specifies the version of a layer as a single number. You
@@ -4378,10 +4927,11 @@ system and gives an overview of their function and contents.
:term:`LEAD_SONAME`
Specifies the lead (or primary) compiled library file (i.e. ``.so``)
- that the :ref:`debian <ref-classes-debian>` class applies its
+ that the :ref:`ref-classes-debian` class applies its
naming policy to given a recipe that packages multiple libraries.
- This variable works in conjunction with the :ref:`debian <ref-classes-debian>` class.
+ This variable works in conjunction with the :ref:`ref-classes-debian`
+ class.
:term:`LIC_FILES_CHKSUM`
Checksums of the license text in the recipe source code.
@@ -4394,7 +4944,7 @@ system and gives an overview of their function and contents.
This variable must be defined for all recipes (unless
:term:`LICENSE` is set to "CLOSED").
- For more information, see the ":ref:`dev-manual/common-tasks:tracking license changes`"
+ For more information, see the ":ref:`dev-manual/licenses:tracking license changes`"
section in the Yocto Project Development Tasks Manual.
:term:`LICENSE`
@@ -4458,7 +5008,7 @@ system and gives an overview of their function and contents.
For related information on providing license text, see the
:term:`COPY_LIC_DIRS` variable, the
:term:`COPY_LIC_MANIFEST` variable, and the
- ":ref:`dev-manual/common-tasks:providing license text`"
+ ":ref:`dev-manual/licenses:providing license text`"
section in the Yocto Project Development Tasks Manual.
:term:`LICENSE_FLAGS`
@@ -4471,16 +5021,33 @@ system and gives an overview of their function and contents.
typically used to mark recipes that might require additional licenses
in order to be used in a commercial product. For more information,
see the
- ":ref:`dev-manual/common-tasks:enabling commercially licensed recipes`"
+ ":ref:`dev-manual/licenses:enabling commercially licensed recipes`"
section in the Yocto Project Development Tasks Manual.
:term:`LICENSE_FLAGS_ACCEPTED`
Lists license flags that when specified in
:term:`LICENSE_FLAGS` within a recipe should not
prevent that recipe from being built. For more information, see the
- ":ref:`dev-manual/common-tasks:enabling commercially licensed recipes`"
+ ":ref:`dev-manual/licenses:enabling commercially licensed recipes`"
section in the Yocto Project Development Tasks Manual.
+ :term:`LICENSE_FLAGS_DETAILS`
+ Adds details about a flag in :term:`LICENSE_FLAGS`. This way,
+ if such a flag is not accepted through :term:`LICENSE_FLAGS_ACCEPTED`,
+ the error message will be more informative, containing the specified
+ extra details.
+
+ For example, a recipe with an EULA may set::
+
+ LICENSE_FLAGS = "FooBar-EULA"
+ LICENSE_FLAGS_DETAILS[FooBar-EULA] = "For further details, see https://example.com/eula."
+
+ If ``Foobar-EULA`` isn't in :term:`LICENSE_FLAGS_ACCEPTED`, the
+ error message is more useful::
+
+ Has a restricted license 'FooBar-EULA' which is not listed in your LICENSE_FLAGS_ACCEPTED.
+ For further details, see https://example.com/eula.
+
:term:`LICENSE_PATH`
Path to additional licenses used during the build. By default, the
OpenEmbedded build system uses :term:`COMMON_LICENSE_DIR` to define the
@@ -4571,7 +5138,6 @@ system and gives an overview of their function and contents.
MACHINE ?= "genericx86"
MACHINE ?= "genericx86-64"
MACHINE ?= "beaglebone"
- MACHINE ?= "edgerouter"
The last five are Yocto Project reference hardware
boards, which are provided in the ``meta-yocto-bsp`` layer.
@@ -4714,19 +5280,27 @@ system and gives an overview of their function and contents.
shipped, see the ":ref:`ref-features-machine`" section.
:term:`MACHINE_FEATURES_BACKFILL`
- Features to be added to :term:`MACHINE_FEATURES` if not also present in
+ A list of space-separated features to be added to
+ :term:`MACHINE_FEATURES` if not also present in
:term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`.
- This variable is set in the ``meta/conf/bitbake.conf`` file. It is
- not intended to be user-configurable. It is best to just reference
- the variable to see which machine features are being backfilled for
- all machine configurations. See the ":ref:`ref-features-backfill`"
- section for more information.
+ This variable is set in the ``meta/conf/bitbake.conf`` file. It is not
+ intended to be user-configurable. It is best to just reference the
+ variable to see which machine features are being
+ :ref:`backfilled <ref-features-backfill>` for all machine configurations.
:term:`MACHINE_FEATURES_BACKFILL_CONSIDERED`
- Features from :term:`MACHINE_FEATURES_BACKFILL` that should not be
- backfilled (i.e. added to :term:`MACHINE_FEATURES`) during the build. See
- the ":ref:`ref-features-backfill`" section for more information.
+ A list of space-separated features from :term:`MACHINE_FEATURES_BACKFILL`
+ that should not be :ref:`backfilled <ref-features-backfill>` (i.e. added
+ to :term:`MACHINE_FEATURES`) during the build.
+
+ This corresponds to an opt-out mechanism. When new default machine
+ features are introduced, machine definition maintainers can review
+ (`consider`) them and decide to exclude them from the
+ :ref:`backfilled <ref-features-backfill>` features. Therefore, the
+ combination of :term:`MACHINE_FEATURES_BACKFILL` and
+ :term:`MACHINE_FEATURES_BACKFILL_CONSIDERED` makes it possible to
+ add new default features without breaking existing machine definitions.
:term:`MACHINEOVERRIDES`
A colon-separated list of overrides that apply to the current
@@ -4757,6 +5331,22 @@ system and gives an overview of their function and contents.
:term:`MAINTAINER`
The email address of the distribution maintainer.
+ :term:`MESON_BUILDTYPE`
+ Value of the Meson ``--buildtype`` argument used by the
+ :ref:`ref-classes-meson` class. It defaults to ``debug`` if
+ :term:`DEBUG_BUILD` is set to "1", and ``plain`` otherwise.
+
+ See `Meson build options <https://mesonbuild.com/Builtin-options.html>`__
+ for the values you could set in a recipe. Values such as ``plain``,
+ ``debug``, ``debugoptimized``, ``release`` and ``minsize`` allow
+ you to specify the inclusion of debugging symbols and the compiler
+ optimizations (none, performance or size).
+
+ :term:`MESON_TARGET`
+ A variable for the :ref:`ref-classes-meson` class, allowing to choose
+ a Meson target to build in :ref:`ref-tasks-compile`. Otherwise, the
+ default targets are built.
+
:term:`METADATA_BRANCH`
The branch currently checked out for the OpenEmbedded-Core layer (path
determined by :term:`COREBASE`).
@@ -4765,6 +5355,13 @@ system and gives an overview of their function and contents.
The revision currently checked out for the OpenEmbedded-Core layer (path
determined by :term:`COREBASE`).
+ :term:`MIME_XDG_PACKAGES`
+ The current implementation of the :ref:`ref-classes-mime-xdg`
+ class cannot detect ``.desktop`` files installed through absolute
+ symbolic links. Use this setting to make the class create post-install
+ and post-remove scripts for these packages anyway, to invoke the
+ ``update-destop-database`` command.
+
:term:`MIRRORS`
Specifies additional paths from which the OpenEmbedded build system
gets source code. When the build system searches for source code, it
@@ -4773,32 +5370,31 @@ system and gives an overview of their function and contents.
:term:`PREMIRRORS`, the upstream source, and then
locations specified by :term:`MIRRORS` in that order.
- Assuming your distribution (:term:`DISTRO`) is "poky",
- the default value for :term:`MIRRORS` is defined in the
- ``conf/distro/poky.conf`` file in the ``meta-poky`` Git repository.
+ The default value for :term:`MIRRORS` is defined in the
+ ``meta/classes-global/mirrors.bbclass`` file in the core metadata layer.
:term:`MLPREFIX`
Specifies a prefix has been added to :term:`PN` to create a
special version of a recipe or package (i.e. a Multilib version). The
variable is used in places where the prefix needs to be added to or
- removed from a the name (e.g. the :term:`BPN` variable).
+ removed from a name (e.g. the :term:`BPN` variable).
:term:`MLPREFIX` gets set when a prefix has been added to :term:`PN`.
.. note::
- The "ML" in :term:`MLPREFIX` stands for "MultiLib". This representation is
- historical and comes from a time when ``nativesdk`` was a suffix
- rather than a prefix on the recipe name. When ``nativesdk`` was turned
- into a prefix, it made sense to set :term:`MLPREFIX` for it as well.
+ The "ML" in :term:`MLPREFIX` stands for "MultiLib". This representation
+ is historical and comes from a time when ":ref:`ref-classes-nativesdk`"
+ was a suffix rather than a prefix on the recipe name. When
+ ":ref:`ref-classes-nativesdk`" was turned into a prefix, it made sense
+ to set :term:`MLPREFIX` for it as well.
To help understand when :term:`MLPREFIX` might be needed, consider when
- :term:`BBCLASSEXTEND` is used to provide a
- ``nativesdk`` version of a recipe in addition to the target version.
- If that recipe declares build-time dependencies on tasks in other
- recipes by using :term:`DEPENDS`, then a dependency on
- "foo" will automatically get rewritten to a dependency on
- "nativesdk-foo". However, dependencies like the following will not
- get rewritten automatically::
+ :term:`BBCLASSEXTEND` is used to provide a :ref:`ref-classes-nativesdk`
+ version of a recipe in addition to the target version. If that recipe
+ declares build-time dependencies on tasks in other recipes by using
+ :term:`DEPENDS`, then a dependency on "foo" will automatically get
+ rewritten to a dependency on "nativesdk-foo". However, dependencies like
+ the following will not get rewritten automatically::
do_foo[depends] += "recipe:do_foo"
@@ -4821,7 +5417,7 @@ system and gives an overview of their function and contents.
See the :term:`KERNEL_MODULE_AUTOLOAD` variable for more information.
:term:`module_conf`
- Specifies `modprobe.d <https://linux.die.net/man/5/modprobe.d>`_
+ Specifies `modprobe.d <https://linux.die.net/man/5/modprobe.d>`__
syntax lines for inclusion in the ``/etc/modprobe.d/modname.conf``
file.
@@ -4859,7 +5455,7 @@ system and gives an overview of their function and contents.
:term:`MODULE_TARBALL_LINK_NAME`
The link name of the kernel module tarball. This variable is set in
- the ``meta/classes/kernel-artifact-names.bbclass`` file as follows::
+ the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as follows::
MODULE_TARBALL_LINK_NAME ?= "${KERNEL_ARTIFACT_LINK_NAME}"
@@ -4873,14 +5469,16 @@ system and gives an overview of their function and contents.
:term:`MODULE_TARBALL_NAME`
The base name of the kernel module tarball. This variable is set in
- the ``meta/classes/kernel-artifact-names.bbclass`` file as follows::
+ the ``meta/classes-recipe/kernel-artifact-names.bbclass`` file as follows::
MODULE_TARBALL_NAME ?= "${KERNEL_ARTIFACT_NAME}"
- The value of the :term:`KERNEL_ARTIFACT_NAME` variable,
- which is set in the same file, has the following value::
+ See :term:`KERNEL_ARTIFACT_NAME` for additional information.
- KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+ :term:`MOUNT_BASE`
+ On non-systemd systems (where ``udev-extraconf`` is being used),
+ specifies the base directory for auto-mounting filesystems. The
+ default value is "/run/media".
:term:`MULTIMACH_TARGET_SYS`
Uniquely identifies the type of the target system for which packages
@@ -4892,8 +5490,7 @@ system and gives an overview of their function and contents.
${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}
- Some classes (e.g.
- :ref:`cross-canadian <ref-classes-cross-canadian>`) modify the
+ Some classes (e.g. :ref:`ref-classes-cross-canadian`) modify the
:term:`MULTIMACH_TARGET_SYS` value.
See the :term:`STAMP` variable for an example. See the
@@ -4995,7 +5592,7 @@ system and gives an overview of their function and contents.
The minimal command and arguments to run ``objdump``.
:term:`OE_BINCONFIG_EXTRA_MANGLE`
- When inheriting the :ref:`binconfig <ref-classes-binconfig>` class,
+ When inheriting the :ref:`ref-classes-binconfig` class,
this variable specifies additional arguments passed to the "sed"
command. The sed command alters any paths in configuration scripts
that have been set up during compilation. Inheriting this class
@@ -5003,10 +5600,20 @@ system and gives an overview of their function and contents.
``sysroots/`` directory so that all builds that use the script will
use the correct directories for the cross compiling layout.
- See the ``meta/classes/binconfig.bbclass`` in the
+ See the ``meta/classes-recipe/binconfig.bbclass`` in the
:term:`Source Directory` for details on how this class
applies these additional sed command arguments.
+ :term:`OECMAKE_GENERATOR`
+ A variable for the :ref:`ref-classes-cmake` class, allowing to choose
+ which back-end will be generated by CMake to build an application.
+
+ By default, this variable is set to ``Ninja``, which is faster than GNU
+ make, but if building is broken with Ninja, a recipe can use this
+ variable to use GNU make instead::
+
+ OECMAKE_GENERATOR = "Unix Makefiles"
+
:term:`OE_IMPORTS`
An internal variable used to tell the OpenEmbedded build system what
Python modules to import for every Python function run by the system.
@@ -5027,7 +5634,7 @@ system and gives an overview of their function and contents.
Controls how the OpenEmbedded build system spawns interactive
terminals on the host development system (e.g. using the BitBake
command with the ``-c devshell`` command-line option). For more
- information, see the ":ref:`dev-manual/common-tasks:using a development shell`" section in
+ information, see the ":ref:`dev-manual/development-shell:using a development shell`" section in
the Yocto Project Development Tasks Manual.
You can use the following values for the :term:`OE_TERMINAL` variable:
@@ -5050,6 +5657,20 @@ system and gives an overview of their function and contents.
For additional information on how this variable is used, see the
initialization script.
+ :term:`OEQA_REPRODUCIBLE_TEST_PACKAGE`
+ Set the package manager(s) for build reproducibility testing.
+ See :yocto_git:`reproducible.py </poky/tree/meta/lib/oeqa/selftest/cases/reproducible.py>`
+ and :doc:`/test-manual/reproducible-builds`.
+
+ :term:`OEQA_REPRODUCIBLE_TEST_TARGET`
+ Set build target for build reproducibility testing. By default
+ all available recipes are compiled with "bitbake world", see also :term:`EXCLUDE_FROM_WORLD`
+ and :doc:`/test-manual/reproducible-builds`.
+
+ :term:`OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS`
+ Set build targets which can be rebuilt using :ref:`shared state <overview-manual/concepts:shared state cache>`
+ when running build reproducibility tests. See :doc:`/test-manual/reproducible-builds`.
+
:term:`OLDEST_KERNEL`
Declares the oldest version of the Linux kernel that the produced
binaries must support. This variable is passed into the build of the
@@ -5060,6 +5681,89 @@ system and gives an overview of their function and contents.
default by setting the variable in a custom distribution
configuration file.
+ :term:`OPKG_MAKE_INDEX_EXTRA_PARAMS`
+ Specifies extra parameters for the ``opkg-make-index`` command.
+
+ :term:`OVERLAYFS_ETC_DEVICE`
+ When the :ref:`ref-classes-overlayfs-etc` class is
+ inherited, specifies the device to be mounted for the read/write
+ layer of ``/etc``. There is no default, so you must set this if you
+ wish to enable :ref:`ref-classes-overlayfs-etc`, for
+ example, assuming ``/dev/mmcblk0p2`` was the desired device::
+
+ OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p2"
+
+ :term:`OVERLAYFS_ETC_EXPOSE_LOWER`
+ When the :ref:`ref-classes-overlayfs-etc` class is
+ inherited, if set to "1" then a read-only access to the original
+ ``/etc`` content will be provided as a ``lower/`` subdirectory of
+ :term:`OVERLAYFS_ETC_MOUNT_POINT`. The default value is "0".
+
+ :term:`OVERLAYFS_ETC_FSTYPE`
+ When the :ref:`ref-classes-overlayfs-etc` class is
+ inherited, specifies the file system type for the read/write
+ layer of ``/etc``. There is no default, so you must set this if you
+ wish to enable :ref:`ref-classes-overlayfs-etc`,
+ for example, assuming the file system is ext4::
+
+ OVERLAYFS_ETC_FSTYPE = "ext4"
+
+ :term:`OVERLAYFS_ETC_MOUNT_OPTIONS`
+ When the :ref:`ref-classes-overlayfs-etc` class is
+ inherited, specifies the mount options for the read-write layer.
+ The default value is "defaults".
+
+ :term:`OVERLAYFS_ETC_MOUNT_POINT`
+ When the :ref:`ref-classes-overlayfs-etc` class is
+ inherited, specifies the parent mount path for the filesystem layers.
+ There is no default, so you must set this if you wish to enable
+ :ref:`ref-classes-overlayfs-etc`, for example if the desired path is
+ "/data"::
+
+ OVERLAYFS_ETC_MOUNT_POINT = "/data"
+
+ :term:`OVERLAYFS_ETC_USE_ORIG_INIT_NAME`
+ When the :ref:`ref-classes-overlayfs-etc` class is inherited, controls
+ how the generated init will be named. For more information, see the
+ :ref:`ref-classes-overlayfs-etc` class documentation. The default value
+ is "1".
+
+ :term:`OVERLAYFS_MOUNT_POINT`
+ When inheriting the :ref:`ref-classes-overlayfs` class,
+ specifies mount point(s) to be used. For example::
+
+ OVERLAYFS_MOUNT_POINT[data] = "/data"
+
+ The assumes you have a ``data.mount`` systemd unit defined elsewhere in
+ your BSP (e.g. in ``systemd-machine-units`` recipe) and it is installed
+ into the image. For more information see :ref:`ref-classes-overlayfs`.
+
+ .. note::
+
+ Although the :ref:`ref-classes-overlayfs` class is
+ inherited by individual recipes, :term:`OVERLAYFS_MOUNT_POINT`
+ should be set in your machine configuration.
+
+ :term:`OVERLAYFS_QA_SKIP`
+ When inheriting the :ref:`ref-classes-overlayfs` class,
+ provides the ability to disable QA checks for particular overlayfs
+ mounts. For example::
+
+ OVERLAYFS_QA_SKIP[data] = "mount-configured"
+
+ .. note::
+
+ Although the :ref:`ref-classes-overlayfs` class is
+ inherited by individual recipes, :term:`OVERLAYFS_QA_SKIP`
+ should be set in your machine configuration.
+
+ :term:`OVERLAYFS_WRITABLE_PATHS`
+ When inheriting the :ref:`ref-classes-overlayfs` class,
+ specifies writable paths used at runtime for the recipe. For
+ example::
+
+ OVERLAYFS_WRITABLE_PATHS[data] = "/usr/share/my-custom-application"
+
:term:`OVERRIDES`
A colon-separated list of overrides that currently apply. Overrides
are a BitBake mechanism that allows variables to be selectively
@@ -5076,7 +5780,7 @@ system and gives an overview of their function and contents.
FOO:an-override = "overridden"
See the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax (overrides)`"
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax (overrides)`"
section in the BitBake User Manual for more information on the
overrides mechanism.
@@ -5094,7 +5798,7 @@ system and gives an overview of their function and contents.
An easy way to see what overrides apply is to search for :term:`OVERRIDES`
in the output of the ``bitbake -e`` command. See the
- ":ref:`dev-manual/common-tasks:viewing variable values`" section in the Yocto
+ ":ref:`dev-manual/debugging:viewing variable values`" section in the Yocto
Project Development Tasks Manual for more information.
:term:`P`
@@ -5102,6 +5806,9 @@ system and gives an overview of their function and contents.
${PN}-${PV}
+ :term:`P4DIR`
+ See :term:`bitbake:P4DIR` in the BitBake manual.
+
:term:`PACKAGE_ADD_METADATA`
This variable defines additional metadata to add to packages.
@@ -5115,7 +5822,7 @@ system and gives an overview of their function and contents.
specific by using the package name as a suffix.
You can find out more about applying this variable in the
- ":ref:`dev-manual/common-tasks:adding custom metadata to packages`"
+ ":ref:`dev-manual/packages:adding custom metadata to packages`"
section in the Yocto Project Development Tasks Manual.
:term:`PACKAGE_ARCH`
@@ -5157,15 +5864,9 @@ system and gives an overview of their function and contents.
OpenEmbedded build system uses when packaging data.
You can provide one or more of the following arguments for the
- variable: PACKAGE_CLASSES ?= "package_rpm package_deb package_ipk
- package_tar"
-
- .. note::
+ variable::
- While it is a legal option, the ``package_tar``
- class has limited functionality due to no support for package
- dependencies by that backend. Therefore, it is recommended that
- you do not use it.
+ PACKAGE_CLASSES ?= "package_rpm package_deb package_ipk"
The build system uses only the first argument in the list as the
package manager when creating your image or SDK. However, packages
@@ -5222,7 +5923,7 @@ system and gives an overview of their function and contents.
use of the :term:`INHIBIT_PACKAGE_DEBUG_SPLIT` variable.
You can find out more about debugging using GDB by reading the
- ":ref:`dev-manual/common-tasks:debugging with the gnu project debugger (gdb) remotely`" section
+ ":ref:`dev-manual/debugging:debugging with the gnu project debugger (gdb) remotely`" section
in the Yocto Project Development Tasks Manual.
:term:`PACKAGE_EXCLUDE`
@@ -5379,9 +6080,9 @@ system and gives an overview of their function and contents.
:term:`IMAGE_INSTALL` variable to specify
packages for installation. The exception to this is when working with
the :ref:`core-image-minimal-initramfs <ref-manual/images:images>`
- image. When working with an initial RAM filesystem (initramfs) image,
+ image. When working with an initial RAM filesystem (:term:`Initramfs`) image,
use the :term:`PACKAGE_INSTALL` variable. For information on creating an
- initramfs, see the ":ref:`dev-manual/common-tasks:building an initial ram filesystem (initramfs) image`" section
+ :term:`Initramfs`, see the ":ref:`dev-manual/building:building an initial ram filesystem (Initramfs) image`" section
in the Yocto Project Development Tasks Manual.
:term:`PACKAGE_INSTALL_ATTEMPTONLY`
@@ -5404,7 +6105,7 @@ system and gives an overview of their function and contents.
:term:`PACKAGE_WRITE_DEPS`.
For information on running post-installation scripts, see the
- ":ref:`dev-manual/common-tasks:post-installation scripts`"
+ ":ref:`dev-manual/new-recipe:post-installation scripts`"
section in the Yocto Project Development Tasks Manual.
:term:`PACKAGECONFIG`
@@ -5432,31 +6133,28 @@ system and gives an overview of their function and contents.
omit any argument you like but must retain the separating commas. The
order is important and specifies the following:
- 1. Extra arguments that should be added to the configure script
- argument list (:term:`EXTRA_OECONF` or
- :term:`PACKAGECONFIG_CONFARGS`) if
- the feature is enabled.
+ #. Extra arguments that should be added to :term:`PACKAGECONFIG_CONFARGS`
+ if the feature is enabled.
- 2. Extra arguments that should be added to :term:`EXTRA_OECONF` or
- :term:`PACKAGECONFIG_CONFARGS` if the feature is disabled.
+ #. Extra arguments that should be added to :term:`PACKAGECONFIG_CONFARGS`
+ if the feature is disabled.
- 3. Additional build dependencies (:term:`DEPENDS`)
+ #. Additional build dependencies (:term:`DEPENDS`)
that should be added if the feature is enabled.
- 4. Additional runtime dependencies (:term:`RDEPENDS`)
+ #. Additional runtime dependencies (:term:`RDEPENDS`)
that should be added if the feature is enabled.
- 5. Additional runtime recommendations
+ #. Additional runtime recommendations
(:term:`RRECOMMENDS`) that should be added if
the feature is enabled.
- 6. Any conflicting (that is, mutually exclusive) :term:`PACKAGECONFIG`
+ #. Any conflicting (that is, mutually exclusive) :term:`PACKAGECONFIG`
settings for this feature.
Consider the following :term:`PACKAGECONFIG` block taken from the
``librsvg`` recipe. In this example the feature is ``gtk``, which has
- three arguments that determine the feature's behavior.
- ::
+ three arguments that determine the feature's behavior::
PACKAGECONFIG[gtk] = "--with-gtk3,--without-gtk3,gtk+3"
@@ -5497,20 +6195,51 @@ system and gives an overview of their function and contents.
PACKAGECONFIG:append:pn-recipename = " f4"
+ Consider the following example of a :ref:`ref-classes-cmake` recipe with a systemd service
+ in which :term:`PACKAGECONFIG` is used to transform the systemd service
+ into a feature that can be easily enabled or disabled via :term:`PACKAGECONFIG`::
+
+ example.c
+ example.service
+ CMakeLists.txt
+
+ The ``CMakeLists.txt`` file contains::
+
+ if(WITH_SYSTEMD)
+ install(FILES ${PROJECT_SOURCE_DIR}/example.service DESTINATION /etc/systemd/systemd)
+ endif(WITH_SYSTEMD)
+
+ In order to enable the installation of ``example.service`` we need to
+ ensure that ``-DWITH_SYSTEMD=ON`` is passed to the ``cmake`` command
+ execution. Recipes that have ``CMakeLists.txt`` generally inherit the
+ :ref:`ref-classes-cmake` class, that runs ``cmake`` with
+ :term:`EXTRA_OECMAKE`, which :term:`PACKAGECONFIG_CONFARGS` will be
+ appended to. Now, knowing that :term:`PACKAGECONFIG_CONFARGS` is
+ automatically filled with either the first or second element of
+ :term:`PACKAGECONFIG` flag value, the recipe would be like::
+
+ inherit cmake
+ PACKAGECONFIG = "systemd"
+ PACKAGECONFIG[systemd] = "-DWITH_SYSTEMD=ON,-DWITH_SYSTEMD=OFF"
+
+ A side note to this recipe is to check if ``systemd`` is in fact the used :term:`INIT_MANAGER`
+ or not::
+
+ PACKAGECONFIG = "${@'systemd' if d.getVar('INIT_MANAGER') == 'systemd' else ''}"
+
:term:`PACKAGECONFIG_CONFARGS`
A space-separated list of configuration options generated from the
:term:`PACKAGECONFIG` setting.
- Classes such as :ref:`autotools <ref-classes-autotools>` and
- :ref:`cmake <ref-classes-cmake>` use :term:`PACKAGECONFIG_CONFARGS` to
- pass :term:`PACKAGECONFIG` options to ``configure`` and ``cmake``,
- respectively. If you are using :term:`PACKAGECONFIG` but not a class that
- handles the ``do_configure`` task, then you need to use
+ Classes such as :ref:`ref-classes-autotools` and :ref:`ref-classes-cmake`
+ use :term:`PACKAGECONFIG_CONFARGS` to pass :term:`PACKAGECONFIG` options
+ to ``configure`` and ``cmake``, respectively. If you are using
+ :term:`PACKAGECONFIG` but not a class that handles the
+ :ref:`ref-tasks-configure` task, then you need to use
:term:`PACKAGECONFIG_CONFARGS` appropriately.
:term:`PACKAGEGROUP_DISABLE_COMPLEMENTARY`
- For recipes inheriting the
- :ref:`packagegroup <ref-classes-packagegroup>` class, setting
+ For recipes inheriting the :ref:`ref-classes-packagegroup` class, setting
:term:`PACKAGEGROUP_DISABLE_COMPLEMENTARY` to "1" specifies that the
normal complementary packages (i.e. ``-dev``, ``-dbg``, and so forth)
should not be automatically created by the ``packagegroup`` recipe,
@@ -5555,7 +6284,7 @@ system and gives an overview of their function and contents.
For an example of how to use the :term:`PACKAGES_DYNAMIC` variable when
you are splitting packages, see the
- ":ref:`dev-manual/common-tasks:handling optional module packaging`"
+ ":ref:`dev-manual/packages:handling optional module packaging`"
section in the Yocto Project Development Tasks Manual.
:term:`PACKAGESPLITFUNCS`
@@ -5569,17 +6298,20 @@ system and gives an overview of their function and contents.
desired splitting.
:term:`PARALLEL_MAKE`
- Extra options passed to the ``make`` command during the
- :ref:`ref-tasks-compile` task in order to specify
- parallel compilation on the local build host. This variable is
- usually in the form "-j x", where x represents the maximum number of
- parallel threads ``make`` can run.
+
+ Extra options passed to the build tool command (``make``,
+ ``ninja`` or more specific build engines, like the Go language one)
+ during the :ref:`ref-tasks-compile` task, to specify parallel compilation
+ on the local build host. This variable is usually in the form "-j x",
+ where x represents the maximum number of parallel threads such engines
+ can run.
.. note::
- In order for :term:`PARALLEL_MAKE` to be effective, ``make`` must be
- called with ``${``\ :term:`EXTRA_OEMAKE`\ ``}``. An easy way to ensure
- this is to use the ``oe_runmake`` function.
+ For software compiled by ``make``, in order for :term:`PARALLEL_MAKE`
+ to be effective, ``make`` must be called with
+ ``${``\ :term:`EXTRA_OEMAKE`\ ``}``. An easy
+ way to ensure this is to use the ``oe_runmake`` function.
By default, the OpenEmbedded build system automatically sets this
variable to be equal to the number of cores the build system uses.
@@ -5587,10 +6319,10 @@ system and gives an overview of their function and contents.
.. note::
If the software being built experiences dependency issues during
- the ``do_compile`` task that result in race conditions, you can clear
+ the :ref:`ref-tasks-compile` task that result in race conditions, you can clear
the :term:`PARALLEL_MAKE` variable within the recipe as a workaround. For
information on addressing race conditions, see the
- ":ref:`dev-manual/common-tasks:debugging parallel make races`"
+ ":ref:`dev-manual/debugging:debugging parallel make races`"
section in the Yocto Project Development Tasks Manual.
For single socket systems (i.e. one CPU), you should not have to
@@ -5600,27 +6332,28 @@ system and gives an overview of their function and contents.
not set higher than "-j 20".
For more information on speeding up builds, see the
- ":ref:`dev-manual/common-tasks:speeding up a build`"
+ ":ref:`dev-manual/speeding-up-build:speeding up a build`"
section in the Yocto Project Development Tasks Manual.
:term:`PARALLEL_MAKEINST`
- Extra options passed to the ``make install`` command during the
- :ref:`ref-tasks-install` task in order to specify
+ Extra options passed to the build tool install command
+ (``make install``, ``ninja install`` or more specific ones)
+ during the :ref:`ref-tasks-install` task in order to specify
parallel installation. This variable defaults to the value of
:term:`PARALLEL_MAKE`.
.. note::
- In order for :term:`PARALLEL_MAKEINST` to be effective, ``make`` must
- be called with
+ For software compiled by ``make``, in order for :term:`PARALLEL_MAKEINST`
+ to be effective, ``make`` must be called with
``${``\ :term:`EXTRA_OEMAKE`\ ``}``. An easy
way to ensure this is to use the ``oe_runmake`` function.
If the software being built experiences dependency issues during
- the ``do_install`` task that result in race conditions, you can
+ the :ref:`ref-tasks-install` task that result in race conditions, you can
clear the :term:`PARALLEL_MAKEINST` variable within the recipe as a
workaround. For information on addressing race conditions, see the
- ":ref:`dev-manual/common-tasks:debugging parallel make races`"
+ ":ref:`dev-manual/debugging:debugging parallel make races`"
section in the Yocto Project Development Tasks Manual.
:term:`PATCHRESOLVE`
@@ -5657,17 +6390,14 @@ system and gives an overview of their function and contents.
:term:`PE` is the default value of the :term:`PKGE` variable.
- :term:`PEP517_BUILD_API`
- When used by recipes that inherit the :ref:`python_pep517
- <ref-classes-python_pep517>` class, specifies the entry point to the
- PEP-517 compliant build API (such as ``flit_core.buildapi``).
-
:term:`PEP517_WHEEL_PATH`
- When used by recipes that inherit the
- :ref:`python_pep517 <ref-classes-python_pep517>` class,
- denotes the path to ``dist/`` (short for distribution) where the
+ When used by recipes that inherit the :ref:`ref-classes-python_pep517`
+ class, denotes the path to ``dist/`` (short for distribution) where the
binary archive ``wheel`` is built.
+ :term:`PERSISTENT_DIR`
+ See :term:`bitbake:PERSISTENT_DIR` in the BitBake manual.
+
:term:`PF`
Specifies the recipe or package name and includes all version and
revision numbers (i.e. ``glibc-2.13-r20+svnr15508/`` and
@@ -5675,10 +6405,11 @@ system and gives an overview of their function and contents.
${:term:`PN`}-${:term:`EXTENDPE`}${:term:`PV`}-${:term:`PR`}
:term:`PIXBUF_PACKAGES`
- When inheriting the :ref:`pixbufcache <ref-classes-pixbufcache>`
+ When inheriting the :ref:`ref-classes-pixbufcache`
class, this variable identifies packages that contain the pixbuf
- loaders used with ``gdk-pixbuf``. By default, the ``pixbufcache``
- class assumes that the loaders are in the recipe's main package (i.e.
+ loaders used with ``gdk-pixbuf``. By default, the
+ :ref:`ref-classes-pixbufcache` class assumes that
+ the loaders are in the recipe's main package (i.e.
``${``\ :term:`PN`\ ``}``). Use this variable if the
loaders you need are in a package other than that main package.
@@ -5690,9 +6421,8 @@ system and gives an overview of their function and contents.
When using the :term:`PKG` variable, you must use a package name override.
- For example, when the :ref:`debian <ref-classes-debian>` class
- renames the output package, it does so by setting
- ``PKG:packagename``.
+ For example, when the :ref:`ref-classes-debian` class renames the output
+ package, it does so by setting ``PKG:packagename``.
:term:`PKG_CONFIG_PATH`
The path to ``pkg-config`` files for the current build context.
@@ -5720,7 +6450,7 @@ system and gives an overview of their function and contents.
For examples of how this data is used, see the
":ref:`overview-manual/concepts:automatically added runtime dependencies`"
section in the Yocto Project Overview and Concepts Manual and the
- ":ref:`dev-manual/common-tasks:viewing package information with \`\`oe-pkgdata-util\`\``"
+ ":ref:`dev-manual/debugging:viewing package information with \`\`oe-pkgdata-util\`\``"
section in the Yocto Project Development Tasks Manual. For more
information on the shared, global-state directory, see
:term:`STAGING_DIR_HOST`.
@@ -5783,9 +6513,9 @@ system and gives an overview of their function and contents.
:term:`POPULATE_SDK_POST_HOST_COMMAND`
Specifies a list of functions to call once the OpenEmbedded build
system has created the host part of the SDK. You can specify
- functions separated by semicolons::
+ functions separated by spaces::
- POPULATE_SDK_POST_HOST_COMMAND += "function; ... "
+ POPULATE_SDK_POST_HOST_COMMAND += "function"
If you need to pass the SDK path to a command within a function, you
can use ``${SDK_DIR}``, which points to the parent directory used by
@@ -5795,9 +6525,9 @@ system and gives an overview of their function and contents.
:term:`POPULATE_SDK_POST_TARGET_COMMAND`
Specifies a list of functions to call once the OpenEmbedded build
system has created the target part of the SDK. You can specify
- functions separated by semicolons::
+ functions separated by spaces::
- POPULATE_SDK_POST_TARGET_COMMAND += "function; ... "
+ POPULATE_SDK_POST_TARGET_COMMAND += "function"
If you need to pass the SDK path to a command within a function, you
can use ``${SDK_DIR}``, which points to the parent directory used by
@@ -5836,7 +6566,7 @@ system and gives an overview of their function and contents.
Because manually managing :term:`PR` can be cumbersome and error-prone,
an automated solution exists. See the
- ":ref:`dev-manual/common-tasks:working with a pr service`" section
+ ":ref:`dev-manual/packages:working with a pr service`" section
in the Yocto Project Development Tasks Manual for more information.
:term:`PREFERRED_PROVIDER`
@@ -5853,13 +6583,13 @@ system and gives an overview of their function and contents.
The :term:`PREFERRED_PROVIDER` variable is set with the name (:term:`PN`) of
the recipe you prefer to provide "virtual/kernel".
- Following are more examples::
+ Here are more examples::
PREFERRED_PROVIDER_virtual/xserver = "xserver-xf86"
PREFERRED_PROVIDER_virtual/libgl ?= "mesa"
For more
- information, see the ":ref:`dev-manual/common-tasks:using virtual providers`"
+ information, see the ":ref:`dev-manual/new-recipe:using virtual providers`"
section in the Yocto Project Development Tasks Manual.
.. note::
@@ -5870,6 +6600,9 @@ system and gives an overview of their function and contents.
desirable since this mechanism is designed to select between mutually
exclusive alternative providers.
+ :term:`PREFERRED_PROVIDERS`
+ See :term:`bitbake:PREFERRED_PROVIDERS` in the BitBake manual.
+
:term:`PREFERRED_VERSION`
If there are multiple versions of a recipe available, this variable
determines which version should be given preference. You must always
@@ -5939,9 +6672,8 @@ system and gives an overview of their function and contents.
source, and then locations specified by
:term:`MIRRORS` in that order.
- Assuming your distribution (:term:`DISTRO`) is "poky",
- the default value for :term:`PREMIRRORS` is defined in the
- ``conf/distro/poky.conf`` file in the ``meta-poky`` Git repository.
+ The default value for :term:`PREMIRRORS` is defined in the
+ ``meta/classes-global/mirrors.bbclass`` file in the core metadata layer.
Typically, you could add a specific server for the build system to
attempt before any others by adding something like the following to
@@ -6016,7 +6748,7 @@ system and gives an overview of their function and contents.
.. note::
A recipe's own recipe name (:term:`PN`) is always implicitly prepended
- to `PROVIDES`, so while using "+=" in the above example may not be
+ to :term:`PROVIDES`, so while using "+=" in the above example may not be
strictly necessary it is recommended to avoid confusion.
In addition to providing recipes under alternate names, the
@@ -6036,11 +6768,11 @@ system and gives an overview of their function and contents.
.. note::
- A corresponding mechanism for virtual runtime dependencies
- (packages) exists. However, the mechanism does not depend on any
- special functionality beyond ordinary variable assignments. For
- example, ``VIRTUAL-RUNTIME_dev_manager`` refers to the package of
- the component that manages the ``/dev`` directory.
+ A corresponding mechanism for virtual runtime dependencies (packages)
+ exists. However, the mechanism does not depend on any special
+ functionality beyond ordinary variable assignments. For example,
+ :term:`VIRTUAL-RUNTIME_dev_manager <VIRTUAL-RUNTIME>` refers to the
+ package of the component that manages the ``/dev`` directory.
Setting the "preferred provider" for runtime dependencies is as
simple as using the following assignment in a configuration file::
@@ -6051,15 +6783,15 @@ system and gives an overview of their function and contents.
:term:`PRSERV_HOST`
The network based :term:`PR` service host and port.
- The ``conf/local.conf.sample.extended`` configuration file in the
- :term:`Source Directory` shows how the
- :term:`PRSERV_HOST` variable is set::
+ The ``conf/templates/default/local.conf.sample.extended`` configuration
+ file in the :term:`Source Directory` shows how the :term:`PRSERV_HOST`
+ variable is set::
PRSERV_HOST = "localhost:0"
You must
set the variable if you want to automatically start a local :ref:`PR
- service <dev-manual/common-tasks:working with a pr service>`. You can
+ service <dev-manual/packages:working with a pr service>`. You can
set :term:`PRSERV_HOST` to other values to use a remote PR service.
@@ -6073,7 +6805,7 @@ system and gives an overview of their function and contents.
:term:`PTEST_ENABLED`
Specifies whether or not :ref:`Package
- Test <dev-manual/common-tasks:testing packages with ptest>` (ptest)
+ Test <dev-manual/packages:testing packages with ptest>` (ptest)
functionality is enabled when building a recipe. You should not set
this variable directly. Enabling and disabling building Package Tests
at build time should be done by adding "ptest" to (or removing it
@@ -6089,32 +6821,24 @@ system and gives an overview of their function and contents.
:term:`PV` is the default value of the :term:`PKGV` variable.
+ :term:`PYPI_PACKAGE`
+ When inheriting the :ref:`ref-classes-pypi` class, specifies the
+ `PyPI <https://pypi.org/>`__ package name to be built. The default value
+ is set based upon :term:`BPN` (stripping any "python-" or "python3-"
+ prefix off if present), however for some packages it will need to be set
+ explicitly if that will not match the package name (e.g. where the
+ package name has a prefix, underscores, uppercase letters etc.)
+
:term:`PYTHON_ABI`
- When used by recipes that inherit the
- :ref:`setuptools3 <ref-classes-setuptools3>` class, denotes the
- Application Binary Interface (ABI) currently in use for Python. By
- default, the ABI is "m". You do not have to set this variable as the
- OpenEmbedded build system sets it for you.
+ When used by recipes that inherit the :ref:`ref-classes-setuptools3`
+ class, denotes the Application Binary Interface (ABI) currently in use
+ for Python. By default, the ABI is "m". You do not have to set this
+ variable as the OpenEmbedded build system sets it for you.
The OpenEmbedded build system uses the ABI to construct directory
names used when installing the Python headers and libraries in
sysroot (e.g. ``.../python3.3m/...``).
- :term:`PYTHON_PN`
- When used by recipes that inherit the
- :ref:`setuptools3 <ref-classes-setuptools3>` classe, specifies the
- major Python version being built. For Python 3.x, :term:`PYTHON_PN` would
- be "python3". You do not have to set this variable as the
- OpenEmbedded build system automatically sets it for you.
-
- The variable allows recipes to use common infrastructure such as the
- following::
-
- DEPENDS += "${PYTHON_PN}-native"
-
- In the previous example,
- the version of the dependency is :term:`PYTHON_PN`.
-
:term:`QA_EMPTY_DIRS`
Specifies a list of directories that are expected to be empty when
packaging; if ``empty-dirs`` appears in :term:`ERROR_QA` or
@@ -6189,7 +6913,7 @@ system and gives an overview of their function and contents.
The practical effect of the above :term:`RDEPENDS` assignment is that
``bar`` and ``baz`` will be declared as dependencies inside the
package ``foo`` when it is written out by one of the
- :ref:`do_package_write_\* <ref-tasks-package_write_deb>` tasks.
+ :ref:`do_package_write_* <ref-tasks-package_write_deb>` tasks.
Exactly how this is done depends on which package format is used,
which is determined by
:term:`PACKAGE_CLASSES`. When the
@@ -6201,11 +6925,11 @@ system and gives an overview of their function and contents.
added. This dependency is from the recipe's
:ref:`ref-tasks-build` (not to be confused with
:ref:`ref-tasks-compile`) task to the
- ``do_package_write_*`` task of the recipes that build ``bar`` and
+ :ref:`do_package_write_* <ref-tasks-package_write_deb>` task of the recipes that build ``bar`` and
``baz``.
The names of the packages you list within :term:`RDEPENDS` must be the
- names of other packages - they cannot be recipe names. Although
+ names of other packages --- they cannot be recipe names. Although
package names and recipe names usually match, the important point
here is that you are providing package names within the :term:`RDEPENDS`
variable. For an example of the default list of packages created from
@@ -6236,7 +6960,7 @@ system and gives an overview of their function and contents.
The package names you use with :term:`RDEPENDS` must appear as they would
in the :term:`PACKAGES` variable. The :term:`PKG` variable
allows a different name to be used for the final package (e.g. the
- :ref:`debian <ref-classes-debian>` class uses this to rename
+ :ref:`ref-classes-debian` class uses this to rename
packages), but this final package name cannot be used with
:term:`RDEPENDS`, which makes sense as :term:`RDEPENDS` is meant to be
independent of the package format used.
@@ -6269,12 +6993,27 @@ system and gives an overview of their function and contents.
RDEPENDS:${PN} = "foo (>= 1.2)"
- For information on build-time dependencies, see the
- :term:`DEPENDS` variable. You can also see the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-execution:dependencies`" sections in the
- BitBake User Manual for additional information on tasks and
- dependencies.
+ For information on build-time dependencies, see the :term:`DEPENDS`
+ variable. You can also see the
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:tasks`" and
+ ":ref:`bitbake-user-manual/bitbake-user-manual-execution:dependencies`" sections in the
+ BitBake User Manual for additional information on tasks and dependencies.
+
+ :term:`RECIPE_MAINTAINER`
+ This variable defines the name and e-mail address of the maintainer of a
+ recipe. Such information can be used by human users submitted changes,
+ and by automated tools to send notifications, for example about
+ vulnerabilities or source updates.
+
+ The variable can be defined in a global distribution :oe_git:`maintainers.inc
+ </openembedded-core/tree/meta/conf/distro/include/maintainers.inc>` file::
+
+ meta/conf/distro/include/maintainers.inc:RECIPE_MAINTAINER:pn-sysvinit = "Ross Burton <ross.burton@arm.com>"
+
+ It can also be directly defined in a recipe,
+ for example in the ``libgpiod`` one::
+
+ RECIPE_MAINTAINER = "Bartosz Golaszewski <brgl@bgdev.pl>"
:term:`RECIPE_NO_UPDATE_REASON`
If a recipe should not be replaced by a more recent upstream version,
@@ -6283,9 +7022,41 @@ system and gives an overview of their function and contents.
in the ":ref:`ref-manual/devtool-reference:checking on the upgrade status of a recipe`"
section.
+ :term:`RECIPE_SYSROOT`
+ This variable points to the directory that holds all files populated from
+ recipes specified in :term:`DEPENDS`. As the name indicates,
+ think of this variable as a custom root (``/``) for the recipe that will be
+ used by the compiler in order to find headers and other files needed to complete
+ its job.
+
+ This variable is related to :term:`STAGING_DIR_HOST` or :term:`STAGING_DIR_TARGET`
+ according to the type of the recipe and the build target.
+
+ To better understand this variable, consider the following examples:
+
+ - For ``#include <header.h>``, ``header.h`` should be in ``"${RECIPE_SYSROOT}/usr/include"``
+
+ - For ``-lexample``, ``libexample.so`` should be in ``"${RECIPE_SYSROOT}/lib"``
+ or other library sysroot directories.
+
+ The default value is ``"${WORKDIR}/recipe-sysroot"``.
+ Do not modify it.
+
+ :term:`RECIPE_SYSROOT_NATIVE`
+ This is similar to :term:`RECIPE_SYSROOT` but the populated files are from
+ ``-native`` recipes. This allows a recipe built for the target machine to
+ use ``native`` tools.
+
+ This variable is related to :term:`STAGING_DIR_NATIVE`.
+
+ The default value is ``"${WORKDIR}/recipe-sysroot-native"``.
+ Do not modify it.
+
+ :term:`REPODIR`
+ See :term:`bitbake:REPODIR` in the BitBake manual.
+
:term:`REQUIRED_DISTRO_FEATURES`
- When inheriting the
- :ref:`features_check <ref-classes-features_check>`
+ When inheriting the :ref:`ref-classes-features_check`
class, this variable identifies distribution features that must exist
in the current configuration in order for the OpenEmbedded build
system to build the recipe. In other words, if the
@@ -6306,10 +7077,9 @@ system and gives an overview of their function and contents.
for the same recipe, the :term:`REQUIRED_VERSION` value applies.
:term:`RM_WORK_EXCLUDE`
- With ``rm_work`` enabled, this variable specifies a list of recipes
- whose work directories should not be removed. See the
- ":ref:`ref-classes-rm-work`" section for more
- details.
+ With :ref:`ref-classes-rm-work` enabled, this variable
+ specifies a list of recipes whose work directories should not be removed.
+ See the ":ref:`ref-classes-rm-work`" section for more details.
:term:`ROOT_HOME`
Defines the root home directory. By default, this directory is set as
@@ -6339,14 +7109,14 @@ system and gives an overview of their function and contents.
Indicates a filesystem image to include as the root filesystem.
The :term:`ROOTFS` variable is an optional variable used with the
- :ref:`image-live <ref-classes-image-live>` class.
+ :ref:`ref-classes-image-live` class.
:term:`ROOTFS_POSTINSTALL_COMMAND`
Specifies a list of functions to call after the OpenEmbedded build
system has installed packages. You can specify functions separated by
- semicolons::
+ spaces::
- ROOTFS_POSTINSTALL_COMMAND += "function; ... "
+ ROOTFS_POSTINSTALL_COMMAND += "function"
If you need to pass the root filesystem path to a command within a
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -6357,9 +7127,9 @@ system and gives an overview of their function and contents.
:term:`ROOTFS_POSTPROCESS_COMMAND`
Specifies a list of functions to call once the OpenEmbedded build
system has created the root filesystem. You can specify functions
- separated by semicolons::
+ separated by spaces::
- ROOTFS_POSTPROCESS_COMMAND += "function; ... "
+ ROOTFS_POSTPROCESS_COMMAND += "function"
If you need to pass the root filesystem path to a command within a
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -6372,9 +7142,9 @@ system and gives an overview of their function and contents.
system has removed unnecessary packages. When runtime package
management is disabled in the image, several packages are removed
including ``base-passwd``, ``shadow``, and ``update-alternatives``.
- You can specify functions separated by semicolons::
+ You can specify functions separated by spaces::
- ROOTFS_POSTUNINSTALL_COMMAND += "function; ... "
+ ROOTFS_POSTUNINSTALL_COMMAND += "function"
If you need to pass the root filesystem path to a command within a
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -6385,9 +7155,9 @@ system and gives an overview of their function and contents.
:term:`ROOTFS_PREPROCESS_COMMAND`
Specifies a list of functions to call before the OpenEmbedded build
system has created the root filesystem. You can specify functions
- separated by semicolons::
+ separated by spaces::
- ROOTFS_PREPROCESS_COMMAND += "function; ... "
+ ROOTFS_PREPROCESS_COMMAND += "function"
If you need to pass the root filesystem path to a command within a
function, you can use ``${IMAGE_ROOTFS}``, which points to the
@@ -6395,6 +7165,9 @@ system and gives an overview of their function and contents.
:term:`IMAGE_ROOTFS` variable for more
information.
+ :term:`RPMBUILD_EXTRA_PARAMS`
+ Specifies extra user-defined parameters for the ``rpmbuild`` command.
+
:term:`RPROVIDES`
A list of package name aliases that a package also provides. These
aliases are useful for satisfying runtime dependencies of other
@@ -6512,6 +7285,11 @@ system and gives an overview of their function and contents.
RSUGGESTS:${PN} = "useful_package another_package"
+ :term:`RUST_CHANNEL`
+ Specifies which version of Rust to build - "stable", "beta" or "nightly".
+ The default value is "stable". Set this at your own risk, as values other
+ than "stable" are not guaranteed to work at a given time.
+
:term:`S`
The location in the :term:`Build Directory` where
unpacked recipe source code resides. By default, this directory is
@@ -6524,7 +7302,7 @@ system and gives an overview of their function and contents.
to find the unpacked source.
As an example, assume a :term:`Source Directory`
- top-level folder named ``poky`` and a default Build Directory at
+ top-level folder named ``poky`` and a default :term:`Build Directory` at
``poky/build``. In this case, the work directory the build system
uses to keep the unpacked recipe for ``db`` is the following::
@@ -6562,16 +7340,29 @@ system and gives an overview of their function and contents.
The target architecture for the SDK. Typically, you do not directly
set this variable. Instead, use :term:`SDKMACHINE`.
+ :term:`SDK_ARCHIVE_TYPE`
+ Specifies the type of archive to create for the SDK. Valid values:
+
+ - ``tar.xz`` (default)
+ - ``zip``
+
+ Only one archive type can be specified.
+
+ :term:`SDK_BUILDINFO_FILE`
+ When using the :ref:`ref-classes-image-buildinfo` class,
+ specifies the file in the SDK to write the build information into. The
+ default value is "``/buildinfo``".
+
:term:`SDK_CUSTOM_TEMPLATECONF`
When building the extensible SDK, if :term:`SDK_CUSTOM_TEMPLATECONF` is set to
- "1" and a ``conf/templateconf.conf`` file exists in the build directory
+ "1" and a ``conf/templateconf.cfg`` file exists in the :term:`Build Directory`
(:term:`TOPDIR`) then this will be copied into the SDK.
:term:`SDK_DEPLOY`
The directory set up and used by the
- :ref:`populate_sdk_base <ref-classes-populate-sdk>` class to which
- the SDK is deployed. The ``populate_sdk_base`` class defines
- :term:`SDK_DEPLOY` as follows::
+ :ref:`populate_sdk_base <ref-classes-populate-sdk>` class to which the
+ SDK is deployed. The :ref:`populate_sdk_base <ref-classes-populate-sdk>`
+ class defines :term:`SDK_DEPLOY` as follows::
SDK_DEPLOY = "${TMPDIR}/deploy/sdk"
@@ -6644,13 +7435,16 @@ system and gives an overview of their function and contents.
:term:`SDK_EXT_TYPE` is set to "full".
:term:`SDK_NAME`
- The base name for SDK output files. The name is derived from the
- :term:`DISTRO`, :term:`TCLIBC`,
- :term:`SDK_ARCH`,
- :term:`IMAGE_BASENAME`, and
- :term:`TUNE_PKGARCH` variables::
+ The base name for SDK output files. The default value (as set in
+ ``meta-poky/conf/distro/poky.conf``) is derived from the
+ :term:`DISTRO`,
+ :term:`TCLIBC`,
+ :term:`SDKMACHINE`,
+ :term:`IMAGE_BASENAME`,
+ :term:`TUNE_PKGARCH`, and
+ :term:`MACHINE` variables::
- SDK_NAME = "${DISTRO}-${TCLIBC}-${SDK_ARCH}-${IMAGE_BASENAME}-${TUNE_PKGARCH}"
+ SDK_NAME = "${DISTRO}-${TCLIBC}-${SDKMACHINE}-${IMAGE_BASENAME}-${TUNE_PKGARCH}-${MACHINE}"
:term:`SDK_OS`
Specifies the operating system for which the SDK will be built. The
@@ -6681,7 +7475,9 @@ system and gives an overview of their function and contents.
:term:`SDK_POSTPROCESS_COMMAND`
Specifies a list of functions to call once the OpenEmbedded build
system creates the SDK. You can specify functions separated by
- semicolons: SDK_POSTPROCESS_COMMAND += "function; ... "
+ spaces:
+
+ SDK_POSTPROCESS_COMMAND += "function"
If you need to pass an SDK path to a command within a function, you
can use ``${SDK_DIR}``, which points to the parent directory used by
@@ -6689,7 +7485,8 @@ system and gives an overview of their function and contents.
:term:`SDK_DIR` variable for more information.
:term:`SDK_PREFIX`
- The toolchain binary prefix used for ``nativesdk`` recipes. The
+ The toolchain binary prefix used for
+ :ref:`ref-classes-nativesdk` recipes. The
OpenEmbedded build system uses the :term:`SDK_PREFIX` value to set the
:term:`TARGET_PREFIX` when building
``nativesdk`` recipes. The default value is "${SDK_SYS}-".
@@ -6698,10 +7495,10 @@ system and gives an overview of their function and contents.
A list of shared state tasks added to the extensible SDK. By default,
the following tasks are added:
- - do_populate_lic
- - do_package_qa
- - do_populate_sysroot
- - do_deploy
+ - :ref:`ref-tasks-populate_lic`
+ - :ref:`ref-tasks-package_qa`
+ - :ref:`ref-tasks-populate_sysroot`
+ - :ref:`ref-tasks-deploy`
Despite the default value of "" for the
:term:`SDK_RECRDEP_TASKS` variable, the above four tasks are always added
@@ -6761,6 +7558,10 @@ system and gives an overview of their function and contents.
section in the Yocto Project Application Development and the
Extensible Software Development Kit (eSDK) manual.
+ :term:`SDK_TOOLCHAIN_LANGS`
+ Specifies programming languages to support in the SDK, as a
+ space-separated list. Currently supported items are ``rust`` and ``go``.
+
:term:`SDK_UPDATE_URL`
An optional URL for an update server for the extensible SDK. If set,
the value is used as the default update server when running
@@ -6780,6 +7581,11 @@ system and gives an overview of their function and contents.
:term:`DISTRO_VERSION` and
:term:`METADATA_REVISION` variables.
+ :term:`SDK_ZIP_OPTIONS`
+ Specifies extra options to pass to the ``zip`` command when zipping the SDK
+ (i.e. when :term:`SDK_ARCHIVE_TYPE` is set to "zip"). The default value is
+ "-y".
+
:term:`SDKEXTPATH`
The default installation directory for the Extensible SDK. By
default, this directory is based on the :term:`DISTRO`
@@ -6821,6 +7627,10 @@ system and gives an overview of their function and contents.
configuration will not take effect.
:term:`SDKPATH`
+ Defines the path used to collect the SDK components and build the
+ installer.
+
+ :term:`SDKPATHINSTALL`
Defines the path offered to the user for installation of the SDK that
is generated by the OpenEmbedded build system. The path appears as
the default location for installing the SDK when you run the SDK's
@@ -6830,7 +7640,7 @@ system and gives an overview of their function and contents.
:term:`SDKTARGETSYSROOT`
The full path to the sysroot used for cross-compilation within an SDK
as it will be when installed into the default
- :term:`SDKPATH`.
+ :term:`SDKPATHINSTALL`.
:term:`SECTION`
The section in which packages should be categorized. Package
@@ -6845,64 +7655,31 @@ system and gives an overview of their function and contents.
:term:`FULL_OPTIMIZATION` unless :term:`DEBUG_BUILD` = "1", in which
case the value of :term:`DEBUG_OPTIMIZATION` is used.
- :term:`SERIAL_CONSOLE`
- Defines a serial console (TTY) to enable using
- `getty <https://en.wikipedia.org/wiki/Getty_(Unix)>`__. Provide a
- value that specifies the baud rate followed by the TTY device name
- separated by a space. You cannot specify more than one TTY device::
-
- SERIAL_CONSOLE = "115200 ttyS0"
-
- .. note::
-
- The :term:`SERIAL_CONSOLE` variable is deprecated. Please use the
- :term:`SERIAL_CONSOLES` variable.
-
:term:`SERIAL_CONSOLES`
Defines a serial console (TTY) to enable using
- `getty <https://en.wikipedia.org/wiki/Getty_(Unix)>`__. Provide a
- value that specifies the baud rate followed by the TTY device name
- separated by a semicolon. Use spaces to separate multiple devices::
+ :wikipedia:`getty <Getty_(Unix)>`. Provide a value that specifies the
+ baud rate followed by the TTY device name separated by a semicolon.
+ Use spaces to separate multiple devices::
SERIAL_CONSOLES = "115200;ttyS0 115200;ttyS1"
- :term:`SERIAL_CONSOLES_CHECK`
- Specifies serial consoles, which must be listed in
- :term:`SERIAL_CONSOLES`, to check against
- ``/proc/console`` before enabling them using getty. This variable
- allows aliasing in the format: <device>:<alias>. If a device was
- listed as "sclp_line0" in ``/dev/`` and "ttyS0" was listed in
- ``/proc/console``, you would do the following::
-
- SERIAL_CONSOLES_CHECK = "slcp_line0:ttyS0"
-
- This variable is currently only supported with SysVinit (i.e. not
- with systemd). Note that :term:`SERIAL_CONSOLES_CHECK` also requires
- ``/etc/inittab`` to be writable when used with SysVinit. This makes it
- incompatible with customizations such as the following::
-
- EXTRA_IMAGE_FEATURES += "read-only-rootfs"
-
:term:`SETUPTOOLS_BUILD_ARGS`
- When used by recipes that inherit the
- :ref:`setuptools3 <ref-classes-setuptools3>` class, this variable can
- be used to specify additional arguments to be passed to ``setup.py build``
- in the ``setuptools3_do_compile()`` task.
+ When used by recipes that inherit the :ref:`ref-classes-setuptools3`
+ class, this variable can be used to specify additional arguments to be
+ passed to ``setup.py build`` in the ``setuptools3_do_compile()`` task.
:term:`SETUPTOOLS_INSTALL_ARGS`
- When used by recipes that inherit the
- :ref:`setuptools3 <ref-classes-setuptools3>` class, this variable can
- be used to specify additional arguments to be passed to ``setup.py install``
- in the ``setuptools3_do_install()`` task.
+ When used by recipes that inherit the :ref:`ref-classes-setuptools3`
+ class, this variable can be used to specify additional arguments to be
+ passed to ``setup.py install`` in the ``setuptools3_do_install()`` task.
:term:`SETUPTOOLS_SETUP_PATH`
- When used by recipes that inherit the
- :ref:`setuptools3 <ref-classes-setuptools3>` class, this variable should
- be used to specify the directory in which the ``setup.py`` file is
- located if it is not at the root of the source tree (as specified by
- :term:`S`). For example, in a recipe where the sources are fetched from
- a Git repository and ``setup.py`` is in a ``python/pythonmodule``
- subdirectory, you would have this::
+ When used by recipes that inherit the :ref:`ref-classes-setuptools3`
+ class, this variable should be used to specify the directory in which
+ the ``setup.py`` file is located if it is not at the root of the source
+ tree (as specified by :term:`S`). For example, in a recipe where the
+ sources are fetched from a Git repository and ``setup.py`` is in a
+ ``python/pythonmodule`` subdirectory, you would have this::
S = "${WORKDIR}/git"
SETUPTOOLS_SETUP_PATH = "${S}/python/pythonmodule"
@@ -6963,8 +7740,7 @@ system and gives an overview of their function and contents.
To enable file removal, set the variable to "1" in your
``conf/local.conf`` configuration file in your:
- :term:`Build Directory`.
- ::
+ :term:`Build Directory`::
SKIP_FILEDEPS = "1"
@@ -6981,9 +7757,10 @@ system and gives an overview of their function and contents.
SKIP_RECIPE[myrecipe] = "Not supported by our organization."
:term:`SOC_FAMILY`
- Groups together machines based upon the same family of SOC (System On
- Chip). You typically set this variable in a common ``.inc`` file that
- you include in the configuration files of all the machines.
+ A colon-separated list grouping together machines based upon the same
+ family of SoC (System On Chip). You typically set this variable in a
+ common ``.inc`` file that you include in the configuration files of all
+ the machines.
.. note::
@@ -7048,7 +7825,7 @@ system and gives an overview of their function and contents.
specified in :term:`SRC_URI`.
To use this variable, you must globally inherit the
- :ref:`own-mirrors <ref-classes-own-mirrors>` class and then provide
+ :ref:`ref-classes-own-mirrors` class and then provide
the URL to your mirrors. Here is the general syntax::
INHERIT += "own-mirrors"
@@ -7058,6 +7835,119 @@ system and gives an overview of their function and contents.
You can specify only a single URL in :term:`SOURCE_MIRROR_URL`.
+ :term:`SPDX_ARCHIVE_PACKAGED`
+ This option allows to add to :term:`SPDX` output compressed archives
+ of the files in the generated target packages.
+
+ Such archives are available in
+ ``tmp/deploy/spdx/MACHINE/packages/packagename.tar.zst``
+ under the :term:`Build Directory`.
+
+ Enable this option as follows::
+
+ SPDX_ARCHIVE_PACKAGED = "1"
+
+ According to our tests on release 4.1 "langdale", building
+ ``core-image-minimal`` for the ``qemux86-64`` machine, enabling this
+ option multiplied the size of the ``tmp/deploy/spdx`` directory by a
+ factor of 13 (+1.6 GiB for this image), compared to just using the
+ :ref:`ref-classes-create-spdx` class with no option.
+
+ Note that this option doesn't increase the size of :term:`SPDX`
+ files in ``tmp/deploy/images/MACHINE``.
+
+ :term:`SPDX_ARCHIVE_SOURCES`
+ This option allows to add to :term:`SPDX` output compressed archives
+ of the sources for packages installed on the target. It currently
+ only works when :term:`SPDX_INCLUDE_SOURCES` is set.
+
+ This is one way of fulfilling "source code access" license
+ requirements.
+
+ Such source archives are available in
+ ``tmp/deploy/spdx/MACHINE/recipes/recipe-packagename.tar.zst``
+ under the :term:`Build Directory`.
+
+ Enable this option as follows::
+
+ SPDX_INCLUDE_SOURCES = "1"
+ SPDX_ARCHIVE_SOURCES = "1"
+
+ According to our tests on release 4.1 "langdale", building
+ ``core-image-minimal`` for the ``qemux86-64`` machine, enabling
+ these options multiplied the size of the ``tmp/deploy/spdx``
+ directory by a factor of 11 (+1.4 GiB for this image),
+ compared to just using the :ref:`ref-classes-create-spdx`
+ class with no option.
+
+ Note that using this option only marginally increases the size
+ of the :term:`SPDX` output in ``tmp/deploy/images/MACHINE/``
+ (+ 0.07\% with the tested image), compared to just enabling
+ :term:`SPDX_INCLUDE_SOURCES`.
+
+ :term:`SPDX_CUSTOM_ANNOTATION_VARS`
+ This option allows to associate `SPDX annotations
+ <https://spdx.github.io/spdx-spec/v2.3/annotations/>`__ to a recipe,
+ using the values of variables in the recipe::
+
+ ANNOTATION1 = "First annotation for recipe"
+ ANNOTATION2 = "Second annotation for recipe"
+ SPDX_CUSTOM_ANNOTATION_VARS = "ANNOTATION1 ANNOTATION2"
+
+ This will add a new block to the recipe ``.sdpx.json`` output::
+
+ "annotations": [
+ {
+ "annotationDate": "2023-04-18T08:32:12Z",
+ "annotationType": "OTHER",
+ "annotator": "Tool: oe-spdx-creator - 1.0",
+ "comment": "ANNOTATION1=First annotation for recipe"
+ },
+ {
+ "annotationDate": "2023-04-18T08:32:12Z",
+ "annotationType": "OTHER",
+ "annotator": "Tool: oe-spdx-creator - 1.0",
+ "comment": "ANNOTATION2=Second annotation for recipe"
+ }
+ ],
+
+ :term:`SPDX_INCLUDE_SOURCES`
+ This option allows to add a description of the source files used to build
+ the host tools and the target packages, to the ``spdx.json`` files in
+ ``tmp/deploy/spdx/MACHINE/recipes/`` under the :term:`Build Directory`.
+ As a consequence, the ``spdx.json`` files under the ``by-namespace`` and
+ ``packages`` subdirectories in ``tmp/deploy/spdx/MACHINE`` are also
+ modified to include references to such source file descriptions.
+
+ Enable this option as follows::
+
+ SPDX_INCLUDE_SOURCES = "1"
+
+ According to our tests on release 4.1 "langdale", building
+ ``core-image-minimal`` for the ``qemux86-64`` machine, enabling
+ this option multiplied the total size of the ``tmp/deploy/spdx``
+ directory by a factor of 3 (+291 MiB for this image),
+ and the size of the ``IMAGE-MACHINE.spdx.tar.zst`` in
+ ``tmp/deploy/images/MACHINE`` by a factor of 130 (+15 MiB for this
+ image), compared to just using the :ref:`ref-classes-create-spdx` class
+ with no option.
+
+ :term:`SPDX_NAMESPACE_PREFIX`
+ This option could be used in order to change the prefix of ``spdxDocument``
+ and the prefix of ``documentNamespace``. It is set by default to
+ ``http://spdx.org/spdxdoc``.
+
+ :term:`SPDX_PRETTY`
+ This option makes the SPDX output more human-readable, using
+ identation and newlines, instead of the default output in a
+ single line::
+
+ SPDX_PRETTY = "1"
+
+ The generated SPDX files are approximately 20% bigger, but
+ this option is recommended if you want to inspect the SPDX
+ output files with a text editor.
+
:term:`SPDXLICENSEMAP`
Maps commonly used license names to their SPDX counterparts found in
``meta/files/common-licenses/``. For the default :term:`SPDXLICENSEMAP`
@@ -7095,10 +7985,77 @@ system and gives an overview of their function and contents.
various ``SPL_*`` variables used by the OpenEmbedded build system.
See the BeagleBone machine configuration example in the
- ":ref:`dev-manual/common-tasks:adding a layer using the \`\`bitbake-layers\`\` script`"
+ ":ref:`dev-manual/layers:adding a layer using the \`\`bitbake-layers\`\` script`"
section in the Yocto Project Board Support Package Developer's Guide
for additional information.
+ :term:`SPL_MKIMAGE_DTCOPTS`
+ Options for the device tree compiler passed to ``mkimage -D`` feature
+ while creating a FIT image with the :ref:`ref-classes-uboot-sign`
+ class. If :term:`SPL_MKIMAGE_DTCOPTS` is not set then the
+ :ref:`ref-classes-uboot-sign` class will not pass the ``-D`` option
+ to ``mkimage``.
+
+ The default value is set to "" by the :ref:`ref-classes-uboot-config`
+ class.
+
+ :term:`SPL_SIGN_ENABLE`
+ Enable signing of the U-Boot FIT image. The default value is "0".
+ This variable is used by the :ref:`ref-classes-uboot-sign` class.
+
+ :term:`SPL_SIGN_KEYDIR`
+ Location of the directory containing the RSA key and certificate used for
+ signing the U-Boot FIT image, used by the :ref:`ref-classes-uboot-sign`
+ class.
+
+ :term:`SPL_SIGN_KEYNAME`
+ The name of keys used by the :ref:`ref-classes-kernel-fitimage` class
+ for signing U-Boot FIT image stored in the :term:`SPL_SIGN_KEYDIR`
+ directory. If we have for example a ``dev.key`` key and a ``dev.crt``
+ certificate stored in the :term:`SPL_SIGN_KEYDIR` directory, you will
+ have to set :term:`SPL_SIGN_KEYNAME` to ``dev``.
+
+ :term:`SPLASH`
+ This variable, used by the :ref:`ref-classes-image` class, allows
+ to choose splashscreen applications. Set it to the names of packages
+ for such applications to use. This variable is set by default to
+ ``psplash``.
+
+ :term:`SPLASH_IMAGES`
+ This variable, used by the ``psplash`` recipe, allows to customize
+ the default splashscreen image.
+
+ Specified images in PNG format are converted to ``.h`` files by the recipe,
+ and are included in the ``psplash`` binary, so you won't find them in
+ the root filesystem.
+
+ To make such a change, it is recommended to customize the
+ ``psplash`` recipe in a custom layer. Here is an example structure for
+ an ``ACME`` board::
+
+ meta-acme/recipes-core/psplash
+ ├── files
+ │   └── logo-acme.png
+ └── psplash_%.bbappend
+
+ And here are the contents of the ``psplash_%.bbappend`` file in
+ this example::
+
+ SPLASH_IMAGES = "file://logo-acme.png;outsuffix=default"
+ FILESEXTRAPATHS:prepend := "${THISDIR}/files:"
+
+ You could even add specific configuration options for ``psplash``,
+ for example::
+
+ EXTRA_OECONF += "--disable-startup-msg --enable-img-fullscreen"
+
+ For information on append files, see the
+ ":ref:`dev-manual/layers:appending other layers metadata with your layer`"
+ section.
+
+ :term:`SRCREV_FORMAT`
+ See :term:`bitbake:SRCREV_FORMAT` in the BitBake manual.
+
:term:`SRC_URI`
See the BitBake manual for the initial description for this variable:
@@ -7108,35 +8065,35 @@ system and gives an overview of their function and contents.
There are standard and recipe-specific options. Here are standard ones:
- - ``apply`` - Whether to apply the patch or not. The default
+ - ``apply`` --- whether to apply the patch or not. The default
action is to apply the patch.
- - ``striplevel`` - Which striplevel to use when applying the
+ - ``striplevel`` --- which striplevel to use when applying the
patch. The default level is 1.
- - ``patchdir`` - Specifies the directory in which the patch should
+ - ``patchdir`` --- specifies the directory in which the patch should
be applied. The default is ``${``\ :term:`S`\ ``}``.
Here are options specific to recipes building code from a revision
control system:
- - ``mindate`` - Apply the patch only if
+ - ``mindate`` --- apply the patch only if
:term:`SRCDATE` is equal to or greater than
``mindate``.
- - ``maxdate`` - Apply the patch only if :term:`SRCDATE` is not later
+ - ``maxdate`` --- apply the patch only if :term:`SRCDATE` is not later
than ``maxdate``.
- - ``minrev`` - Apply the patch only if :term:`SRCREV` is equal to or
+ - ``minrev`` --- apply the patch only if :term:`SRCREV` is equal to or
greater than ``minrev``.
- - ``maxrev`` - Apply the patch only if :term:`SRCREV` is not later
+ - ``maxrev`` --- apply the patch only if :term:`SRCREV` is not later
than ``maxrev``.
- - ``rev`` - Apply the patch only if :term:`SRCREV` is equal to
+ - ``rev`` --- apply the patch only if :term:`SRCREV` is equal to
``rev``.
- - ``notrev`` - Apply the patch only if :term:`SRCREV` is not equal to
+ - ``notrev`` --- apply the patch only if :term:`SRCREV` is not equal to
``rev``.
.. note::
@@ -7180,27 +8137,27 @@ system and gives an overview of their function and contents.
that if you want to build a fixed revision and you want to avoid
performing a query on the remote repository every time BitBake parses
your recipe, you should specify a :term:`SRCREV` that is a full revision
- identifier and not just a tag.
+ identifier (e.g. the full SHA hash in git) and not just a tag.
.. note::
For information on limitations when inheriting the latest revision
of software using :term:`SRCREV`, see the :term:`AUTOREV` variable
description and the
- ":ref:`dev-manual/common-tasks:automatically incrementing a package version number`"
+ ":ref:`dev-manual/packages:automatically incrementing a package version number`"
section, which is in the Yocto Project Development Tasks Manual.
:term:`SRCTREECOVEREDTASKS`
A list of tasks that are typically not relevant (and therefore skipped)
- when building using the :ref:`externalsrc <ref-classes-externalsrc>`
+ when building using the :ref:`ref-classes-externalsrc`
class. The default value as set in that class file is the set of tasks
that are rarely needed when using external source::
SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
The notable exception is when processing external kernel source as
- defined in the :ref:`kernel-yocto <ref-classes-kernel-yocto>`
- class file (formatted for aesthetics)::
+ defined in the :ref:`ref-classes-kernel-yocto` class file (formatted for
+ aesthetics)::
SRCTREECOVEREDTASKS += "\
do_validate_branches \
@@ -7217,6 +8174,32 @@ system and gives an overview of their function and contents.
:term:`SSTATE_DIR`
The directory for the shared state cache.
+ :term:`SSTATE_EXCLUDEDEPS_SYSROOT`
+ This variable allows to specify indirect dependencies to exclude
+ from sysroots, for example to avoid the situations when a dependency on
+ any ``-native`` recipe will pull in all dependencies of that recipe
+ in the recipe sysroot. This behaviour might not always be wanted,
+ for example when that ``-native`` recipe depends on build tools
+ that are not relevant for the current recipe.
+
+ This way, irrelevant dependencies are ignored, which could have
+ prevented the reuse of prebuilt artifacts stored in the Shared
+ State Cache.
+
+ :term:`SSTATE_EXCLUDEDEPS_SYSROOT` is evaluated as two regular
+ expressions of recipe and dependency to ignore. An example
+ is the rule in :oe_git:`meta/conf/layer.conf </openembedded-core/tree/meta/conf/layer.conf>`::
+
+ # Nothing needs to depend on libc-initial
+ # base-passwd/shadow-sysroot don't need their dependencies
+ SSTATE_EXCLUDEDEPS_SYSROOT += "\
+ .*->.*-initial.* \
+ .*(base-passwd|shadow-sysroot)->.* \
+ "
+
+ The ``->`` substring represents the dependency between
+ the two regular expressions.
+
:term:`SSTATE_MIRROR_ALLOW_NETWORK`
If set to "1", allows fetches from mirrors that are specified in
:term:`SSTATE_MIRRORS` to work even when
@@ -7242,23 +8225,31 @@ system and gives an overview of their function and contents.
a different GCC version for native builds, you must configure
:term:`SSTATE_MIRRORS` with a regular expression that maps local search
paths to server paths. The paths need to take into account
- :term:`NATIVELSBSTRING` set by the
- :ref:`uninative <ref-classes-uninative>` class. For example, the
- following maps the local search path ``universal-4.9`` to the
- server-provided path server_url_sstate_path::
+ :term:`NATIVELSBSTRING` set by the :ref:`ref-classes-uninative` class.
+ For example, the following maps the local search path ``universal-4.9``
+ to the server-provided path server_url_sstate_path::
SSTATE_MIRRORS ?= "file://universal-4.9/(.*) https://server_url_sstate_path/universal-4.8/\1"
If a mirror uses the same structure as
:term:`SSTATE_DIR`, you need to add "PATH" at the
end as shown in the examples below. The build system substitutes the
- correct path within the directory structure.
- ::
+ correct path within the directory structure::
SSTATE_MIRRORS ?= "\
file://.* https://someserver.tld/share/sstate/PATH;downloadfilename=PATH \
file://.* file:///some-local-dir/sstate/PATH"
+ The Yocto Project actually shares the cache data objects built by its
+ autobuilder::
+
+ SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
+
+ As such binary artifacts are built for the generic QEMU machines
+ supported by the various Poky releases, they are less likely to be
+ reusable in real projects building binaries optimized for a specific
+ CPU family.
+
:term:`SSTATE_SCAN_FILES`
Controls the list of files the OpenEmbedded build system scans for
hardcoded installation paths. The variable uses a space-separated
@@ -7272,11 +8263,9 @@ system and gives an overview of their function and contents.
by the :term:`SSTATE_SCAN_FILES` variable. Typically, recipes add files
they want to be scanned to the value of :term:`SSTATE_SCAN_FILES` rather
than the variable being comprehensively set. The
- :ref:`sstate <ref-classes-sstate>` class specifies the default list
- of files.
+ :ref:`ref-classes-sstate` class specifies the default list of files.
- For details on the process, see the
- :ref:`staging <ref-classes-staging>` class.
+ For details on the process, see the :ref:`ref-classes-staging` class.
:term:`STAGING_BASE_LIBDIR_NATIVE`
Specifies the path to the ``/lib`` subdirectory of the sysroot
@@ -7345,7 +8334,7 @@ system and gives an overview of their function and contents.
For most recipes, this sysroot is the one in which that recipe's
:ref:`ref-tasks-populate_sysroot` task copies
files. Exceptions include ``-native`` recipes, where the
- ``do_populate_sysroot`` task instead uses
+ :ref:`ref-tasks-populate_sysroot` task instead uses
:term:`STAGING_DIR_NATIVE`. Depending on
the type of recipe and the build target, :term:`STAGING_DIR_HOST` can
have the following values:
@@ -7377,20 +8366,25 @@ system and gives an overview of their function and contents.
for ``-native`` recipes, as they make use of host headers and
libraries.
+ Check :term:`RECIPE_SYSROOT` and :term:`RECIPE_SYSROOT_NATIVE`.
+
:term:`STAGING_DIR_NATIVE`
Specifies the path to the sysroot directory used when building
components that run on the build host itself.
+ The default value is ``"${RECIPE_SYSROOT_NATIVE}"``,
+ check :term:`RECIPE_SYSROOT_NATIVE`.
+
:term:`STAGING_DIR_TARGET`
Specifies the path to the sysroot used for the system for which the
component generates code. For components that do not generate code,
which is the majority, :term:`STAGING_DIR_TARGET` is set to match
:term:`STAGING_DIR_HOST`.
- Some recipes build binaries that can run on the target system but
- those binaries in turn generate code for another different system
- (e.g. cross-canadian recipes). Using terminology from GNU, the
- primary system is referred to as the "HOST" and the secondary, or
+ Some recipes build binaries that can run on the target system but those
+ binaries in turn generate code for another different system (e.g.
+ :ref:`ref-classes-cross-canadian` recipes). Using terminology from GNU,
+ the primary system is referred to as the "HOST" and the secondary, or
different, system is referred to as the "TARGET". Thus, the binaries
run on the "HOST" system and generate binaries for the "TARGET"
system. The :term:`STAGING_DIR_HOST` variable points to the sysroot used
@@ -7455,6 +8449,9 @@ system and gives an overview of their function and contents.
:term:`PV`, and :term:`PR` for related variable
information.
+ :term:`STAMPCLEAN`
+ See :term:`bitbake:STAMPCLEAN` in the BitBake manual.
+
:term:`STAMPS_DIR`
Specifies the base directory in which the OpenEmbedded build system
places stamps. The default directory is ``${TMPDIR}/stamps``.
@@ -7481,7 +8478,7 @@ system and gives an overview of their function and contents.
SYSLINUX_DEFAULT_CONSOLE = "console=ttyX"
- The :ref:`syslinux <ref-classes-syslinux>` class initially sets
+ The :ref:`ref-classes-syslinux` class initially sets
this variable to null but then checks for a value later.
:term:`SYSLINUX_OPTS`
@@ -7489,14 +8486,14 @@ system and gives an overview of their function and contents.
this variable in your recipe. If you want to list multiple options,
separate the options with a semicolon character (``;``).
- The :ref:`syslinux <ref-classes-syslinux>` class uses this variable
+ The :ref:`ref-classes-syslinux` class uses this variable
to create a set of options.
:term:`SYSLINUX_SERIAL`
Specifies the alternate serial port or turns it off. To turn off
serial, set this variable to an empty string in your recipe. The
variable's default value is set in the
- :ref:`syslinux <ref-classes-syslinux>` class as follows::
+ :ref:`ref-classes-syslinux` class as follows::
SYSLINUX_SERIAL ?= "0 115200"
@@ -7504,8 +8501,8 @@ system and gives an overview of their function and contents.
:term:`SYSLINUX_SERIAL_TTY`
Specifies the alternate console=tty... kernel boot argument. The
- variable's default value is set in the
- :ref:`syslinux <ref-classes-syslinux>` class as follows::
+ variable's default value is set in the :ref:`ref-classes-syslinux`
+ class as follows::
SYSLINUX_SERIAL_TTY ?= "console=ttyS0,115200"
@@ -7515,7 +8512,7 @@ system and gives an overview of their function and contents.
An ``.LSS`` file used as the background for the VGA boot menu when
you use the boot menu. You need to set this variable in your recipe.
- The :ref:`syslinux <ref-classes-syslinux>` class checks for this
+ The :ref:`ref-classes-syslinux` class checks for this
variable and if found, the OpenEmbedded build system installs the
splash screen.
@@ -7562,6 +8559,35 @@ system and gives an overview of their function and contents.
${libdir}/${BPN}/ptest \
"
+ Consider the following example in which you need to manipulate this variable.
+ Assume you have a recipe ``A`` that provides a shared library ``.so.*`` that is
+ installed into a custom folder other than "``${libdir}``"
+ or "``${base_libdir}``", let's say "``/opt/lib``".
+
+ .. note::
+
+ This is not a recommended way to deal with shared libraries, but this
+ is just to show the usefulness of setting :term:`SYSROOT_DIRS`.
+
+ When a recipe ``B`` :term:`DEPENDS` on ``A``, it means what is in
+ :term:`SYSROOT_DIRS` will be copied from :term:`D` of the recipe ``B``
+ into ``B``'s :term:`SYSROOT_DESTDIR` that is "``${WORKDIR}/sysroot-destdir``".
+
+ Now, since ``/opt/lib`` is not in :term:`SYSROOT_DIRS`, it will never be copied to
+ ``A``'s :term:`RECIPE_SYSROOT`, which is "``${WORKDIR}/recipe-sysroot``". So,
+ the linking process will fail.
+
+ To fix this, you need to add ``/opt/lib`` to :term:`SYSROOT_DIRS`::
+
+ SYSROOT_DIRS:append = " /opt/lib"
+
+ .. note::
+ Even after setting ``/opt/lib`` to :term:`SYSROOT_DIRS`, the linking process will still fail
+ because the linker does not know that location, since :term:`TARGET_LDFLAGS`
+ doesn't contain it (if your recipe is for the target). Therefore, so you should add::
+
+ TARGET_LDFLAGS:append = " -L${RECIPE_SYSROOT}/opt/lib"
+
:term:`SYSROOT_DIRS_NATIVE`
Extra directories staged into the sysroot by the
:ref:`ref-tasks-populate_sysroot` task for
@@ -7591,12 +8617,12 @@ system and gives an overview of their function and contents.
processing on the staged files, or to stage additional files.
:term:`SYSTEMD_AUTO_ENABLE`
- When inheriting the :ref:`systemd <ref-classes-systemd>` class,
+ When inheriting the :ref:`ref-classes-systemd` class,
this variable specifies whether the specified service in
:term:`SYSTEMD_SERVICE` should start
automatically or not. By default, the service is enabled to
automatically start at boot time. The default setting is in the
- :ref:`systemd <ref-classes-systemd>` class as follows::
+ :ref:`ref-classes-systemd` class as follows::
SYSTEMD_AUTO_ENABLE ??= "enable"
@@ -7606,10 +8632,10 @@ system and gives an overview of their function and contents.
When :term:`EFI_PROVIDER` is set to
"systemd-boot", the :term:`SYSTEMD_BOOT_CFG` variable specifies the
configuration file that should be used. By default, the
- :ref:`systemd-boot <ref-classes-systemd-boot>` class sets the
+ :ref:`ref-classes-systemd-boot` class sets the
:term:`SYSTEMD_BOOT_CFG` as follows::
- SYSTEMD_BOOT_CFG ?= "${:term:`S`}/loader.conf"
+ SYSTEMD_BOOT_CFG ?= "${S}/loader.conf"
For information on Systemd-boot, see the `Systemd-boot
documentation <https://www.freedesktop.org/wiki/Software/systemd/systemd-boot/>`__.
@@ -7618,9 +8644,8 @@ system and gives an overview of their function and contents.
When :term:`EFI_PROVIDER` is set to
"systemd-boot", the :term:`SYSTEMD_BOOT_ENTRIES` variable specifies a
list of entry files (``*.conf``) to install that contain one boot
- entry per file. By default, the
- :ref:`systemd-boot <ref-classes-systemd-boot>` class sets the
- :term:`SYSTEMD_BOOT_ENTRIES` as follows::
+ entry per file. By default, the :ref:`ref-classes-systemd-boot` class
+ sets the :term:`SYSTEMD_BOOT_ENTRIES` as follows::
SYSTEMD_BOOT_ENTRIES ?= ""
@@ -7631,7 +8656,7 @@ system and gives an overview of their function and contents.
When :term:`EFI_PROVIDER` is set to
"systemd-boot", the :term:`SYSTEMD_BOOT_TIMEOUT` variable specifies the
boot menu timeout in seconds. By default, the
- :ref:`systemd-boot <ref-classes-systemd-boot>` class sets the
+ :ref:`ref-classes-systemd-boot` class sets the
:term:`SYSTEMD_BOOT_TIMEOUT` as follows::
SYSTEMD_BOOT_TIMEOUT ?= "10"
@@ -7639,8 +8664,25 @@ system and gives an overview of their function and contents.
For information on Systemd-boot, see the `Systemd-boot
documentation <https://www.freedesktop.org/wiki/Software/systemd/systemd-boot/>`__.
+ :term:`SYSTEMD_DEFAULT_TARGET`
+
+ This variable allows to set the default unit that systemd starts at bootup.
+ Usually, this is either ``multi-user.target`` or ``graphical.target``.
+ This works by creating a ``default.target`` symbolic link to the chosen systemd
+ target file.
+
+ See `systemd's documentation
+ <https://www.freedesktop.org/software/systemd/man/systemd.special.html>`__
+ for details.
+
+ For example, this variable is used in the :oe_git:`core-image-minimal-xfce.bb
+ </meta-openembedded/tree/meta-xfce/recipes-core/images/core-image-minimal-xfce.bb>`
+ recipe::
+
+ SYSTEMD_DEFAULT_TARGET = "graphical.target"
+
:term:`SYSTEMD_PACKAGES`
- When inheriting the :ref:`systemd <ref-classes-systemd>` class,
+ When inheriting the :ref:`ref-classes-systemd` class,
this variable locates the systemd unit files when they are not found
in the main recipe's package. By default, the :term:`SYSTEMD_PACKAGES`
variable is set such that the systemd unit files are assumed to
@@ -7653,22 +8695,26 @@ system and gives an overview of their function and contents.
the build system can find the systemd unit files.
:term:`SYSTEMD_SERVICE`
- When inheriting the :ref:`systemd <ref-classes-systemd>` class,
+ When inheriting the :ref:`ref-classes-systemd` class,
this variable specifies the systemd service name for a package.
+ Multiple services can be specified, each one separated by a space.
+
When you specify this file in your recipe, use a package name
override to indicate the package to which the value applies. Here is
an example from the connman recipe::
SYSTEMD_SERVICE:${PN} = "connman.service"
+ The package overrides that can be specified are directly related to the value of
+ :term:`SYSTEMD_PACKAGES`. Overrides not included in :term:`SYSTEMD_PACKAGES`
+ will be silently ignored.
+
:term:`SYSVINIT_ENABLED_GETTYS`
- When using
- :ref:`SysVinit <dev-manual/common-tasks:enabling system services>`,
+ When using :ref:`SysVinit <dev-manual/new-recipe:enabling system services>`,
specifies a space-separated list of the virtual terminals that should
- run a `getty <https://en.wikipedia.org/wiki/Getty_%28Unix%29>`__
- (allowing login), assuming :term:`USE_VT` is not set to
- "0".
+ run a :wikipedia:`getty <Getty_(Unix)>` (allowing login), assuming
+ :term:`USE_VT` is not set to "0".
The default value for :term:`SYSVINIT_ENABLED_GETTYS` is "1" (i.e. only
run a getty on the first virtual terminal).
@@ -7767,6 +8813,10 @@ system and gives an overview of their function and contents.
value so that executables built using the SDK also have the flags
applied.
+ :term:`TARGET_DBGSRC_DIR`
+ Specifies the target path to debug source files. The default is
+ ``/usr/src/debug/${PN}/${PV}``.
+
:term:`TARGET_FPU`
Specifies the method for handling FPU code. For FPU-less targets,
which include most ARM CPUs, the variable must be set to "soft". If
@@ -7810,8 +8860,8 @@ system and gives an overview of their function and contents.
- For native recipes, the build system sets the variable to the
value of :term:`BUILD_PREFIX`.
- - For native SDK recipes (``nativesdk``), the build system sets the
- variable to the value of :term:`SDK_PREFIX`.
+ - For native SDK recipes (:ref:`ref-classes-nativesdk`),
+ the build system sets the variable to the value of :term:`SDK_PREFIX`.
:term:`TARGET_SYS`
Specifies the system, including the architecture and the operating
@@ -7845,12 +8895,11 @@ system and gives an overview of their function and contents.
You can select "glibc", "musl", "newlib", or "baremetal".
:term:`TCLIBCAPPEND`
- Specifies a suffix to be appended onto the
- :term:`TMPDIR` value. The suffix identifies the
- ``libc`` variant for building. When you are building for multiple
- variants with the same :term:`Build Directory`, this
- mechanism ensures that output for different ``libc`` variants is kept
- separate to avoid potential conflicts.
+ Specifies a suffix to be appended onto the :term:`TMPDIR` value. The
+ suffix identifies the ``libc`` variant for building. When you are
+ building for multiple variants with the same :term:`Build Directory`,
+ this mechanism ensures that output for different ``libc`` variants is
+ kept separate to avoid potential conflicts.
In the ``defaultsetup.conf`` file, the default value of
:term:`TCLIBCAPPEND` is "-${TCLIBC}". However, distros such as poky,
@@ -7871,12 +8920,10 @@ system and gives an overview of their function and contents.
If :term:`TCMODE` is set to a value other than "default", then it is your
responsibility to ensure that the toolchain is compatible with the
default toolchain. Using older or newer versions of these
- components might cause build problems. See the Release Notes for
- the Yocto Project release for the specific components with which
- the toolchain must be compatible. To access the Release Notes, go
- to the :yocto_home:`Downloads </software-overview/downloads>`
- page on the Yocto Project website and click on the "RELEASE
- INFORMATION" link for the appropriate release.
+ components might cause build problems. See
+ :doc:`Release Information </migration-guides/index>` for your
+ version of the Yocto Project, to find the specific components with
+ which the toolchain must be compatible.
The :term:`TCMODE` variable is similar to :term:`TCLIBC`,
which controls the variant of the GNU standard C library (``libc``)
@@ -7889,16 +8936,26 @@ system and gives an overview of their function and contents.
https://github.com/MentorEmbedded/meta-sourcery/.
The layer's ``README`` file contains information on how to use the
- Sourcery G++ Toolchain as an external toolchain. In summary, you must
- be sure to add the layer to your ``bblayers.conf`` file in front of
- the ``meta`` layer and then set the ``EXTERNAL_TOOLCHAIN`` variable
- in your ``local.conf`` file to the location in which you installed
- the toolchain.
+ Sourcery G++ Toolchain as an external toolchain. You will have to
+ add the layer to your ``bblayers.conf`` file and then set the
+ :term:`EXTERNAL_TOOLCHAIN` variable in your ``local.conf`` file to
+ the location of the toolchain.
The fundamentals used for this example apply to any external
toolchain. You can use ``meta-sourcery`` as a template for adding
support for other external toolchains.
+ In addition to toolchain configuration, you will also need a
+ corresponding toolchain recipe file. This recipe file needs to package
+ up any pre-built objects in the toolchain such as ``libgcc``,
+ ``libstdcc++``, any locales, and ``libc``.
+
+ :term:`TC_CXX_RUNTIME`
+ Specifies the C/C++ STL and runtime variant to use during
+ the build process. Default value is 'gnu'
+
+ You can select "gnu", "llvm", or "android".
+
:term:`TEMPLATECONF`
Specifies the directory used by the build system to find templates
from which to build the ``bblayers.conf`` and ``local.conf`` files.
@@ -7906,7 +8963,7 @@ system and gives an overview of their function and contents.
BitBake targets shown when sourcing the ``oe-init-build-env`` script.
For details, see the
- :ref:`dev-manual/common-tasks:creating a custom template configuration directory`
+ :ref:`dev-manual/custom-template-configuration-directory:creating a custom template configuration directory`
section in the Yocto Project Development Tasks manual.
.. note::
@@ -7961,7 +9018,7 @@ system and gives an overview of their function and contents.
file.
For more information on testing images, see the
- ":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+ ":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
:term:`TEST_SERIALCONTROL_CMD`
@@ -8016,8 +9073,7 @@ system and gives an overview of their function and contents.
Alternatively, you can
provide the "auto" option to have all applicable tests run against
- the image.
- ::
+ the image::
TEST_SUITES:append = " auto"
@@ -8034,7 +9090,7 @@ system and gives an overview of their function and contents.
TEST_SUITES = "test_A test_B"
For more information on testing images, see the
- ":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+ ":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual.
:term:`TEST_TARGET`
@@ -8053,7 +9109,7 @@ system and gives an overview of their function and contents.
You can provide the following arguments with :term:`TEST_TARGET`:
- *"qemu":* Boots a QEMU image and runs the tests. See the
- ":ref:`dev-manual/common-tasks:enabling runtime tests on qemu`" section
+ ":ref:`dev-manual/runtime-testing:enabling runtime tests on qemu`" section
in the Yocto Project Development Tasks Manual for more
information.
@@ -8069,7 +9125,7 @@ system and gives an overview of their function and contents.
``meta/lib/oeqa/controllers/simpleremote.py``.
For information on running tests on hardware, see the
- ":ref:`dev-manual/common-tasks:enabling runtime tests on hardware`"
+ ":ref:`dev-manual/runtime-testing:enabling runtime tests on hardware`"
section in the Yocto Project Development Tasks Manual.
:term:`TEST_TARGET_IP`
@@ -8106,9 +9162,9 @@ system and gives an overview of their function and contents.
For more information
on enabling, running, and writing these tests, see the
- ":ref:`dev-manual/common-tasks:performing automated runtime testing`"
+ ":ref:`dev-manual/runtime-testing:performing automated runtime testing`"
section in the Yocto Project Development Tasks Manual and the
- ":ref:`ref-classes-testimage*`" section.
+ ":ref:`ref-classes-testimage`" section.
:term:`THISDIR`
The directory in which the file BitBake is currently parsing is
@@ -8132,7 +9188,7 @@ system and gives an overview of their function and contents.
#TMPDIR = "${TOPDIR}/tmp"
An example use for this scenario is to set :term:`TMPDIR` to a local disk,
- which does not use NFS, while having the Build Directory use NFS.
+ which does not use NFS, while having the :term:`Build Directory` use NFS.
The filesystem used by :term:`TMPDIR` must have standard filesystem
semantics (i.e. mixed-case files are unique, POSIX file locking, and
@@ -8165,7 +9221,7 @@ system and gives an overview of their function and contents.
:doc:`/sdk-manual/index` manual.
Note that this variable applies to building an SDK, not an eSDK,
- in which case the term:`TOOLCHAIN_HOST_TASK_ESDK` setting should be
+ in which case the :term:`TOOLCHAIN_HOST_TASK_ESDK` setting should be
used instead.
:term:`TOOLCHAIN_HOST_TASK_ESDK`
@@ -8173,6 +9229,16 @@ system and gives an overview of their function and contents.
portion of an eSDK. This is similar to :term:`TOOLCHAIN_HOST_TASK`
applying to SDKs.
+ :term:`TOOLCHAIN_OPTIONS`
+ This variable holds extra options passed to the compiler and the linker
+ for non ``-native`` recipes as they have to point to their custom
+ ``sysroot`` folder pointed to by :term:`RECIPE_SYSROOT`::
+
+ TOOLCHAIN_OPTIONS = " --sysroot=${RECIPE_SYSROOT}"
+
+ Native recipes don't need this variable to be set, as they are
+ built for the host machine with the native compiler.
+
:term:`TOOLCHAIN_OUTPUTNAME`
This variable defines the name used for the toolchain output. The
:ref:`populate_sdk_base <ref-classes-populate-sdk-*>` class sets
@@ -8202,6 +9268,9 @@ system and gives an overview of their function and contents.
information on setting up a cross-development environment, see the
:doc:`/sdk-manual/index` manual.
+ :term:`TOPDIR`
+ See :term:`bitbake:TOPDIR` in the BitBake manual.
+
:term:`TRANSLATED_TARGET_ARCH`
A sanitized version of :term:`TARGET_ARCH`. This
variable is used where the architecture is needed in a value where
@@ -8342,51 +9411,152 @@ system and gives an overview of their function and contents.
See the machine include files in the :term:`Source Directory`
for these features.
- :term:`UBOOT_CONFIG`
- Configures the :term:`UBOOT_MACHINE` and can
- also define :term:`IMAGE_FSTYPES` for individual
- cases.
-
- Following is an example from the ``meta-fsl-arm`` layer. ::
+ :term:`UBOOT_BINARY`
+ Specifies the name of the binary build by U-Boot.
- UBOOT_CONFIG ??= "sd"
- UBOOT_CONFIG[sd] = "mx6qsabreauto_config,sdcard"
- UBOOT_CONFIG[eimnor] = "mx6qsabreauto_eimnor_config"
- UBOOT_CONFIG[nand] = "mx6qsabreauto_nand_config,ubifs"
- UBOOT_CONFIG[spinor] = "mx6qsabreauto_spinor_config"
-
- In this example, "sd" is selected as the configuration of the possible four for the
- :term:`UBOOT_MACHINE`. The "sd" configuration defines
- "mx6qsabreauto_config" as the value for :term:`UBOOT_MACHINE`, while the
- "sdcard" specifies the :term:`IMAGE_FSTYPES` to use for the U-Boot image.
+ :term:`UBOOT_CONFIG`
+ Configures one or more U-Boot configurations to build. Each
+ configuration can define the :term:`UBOOT_MACHINE` and optionally the
+ :term:`IMAGE_FSTYPES` and the :term:`UBOOT_BINARY`.
+
+ Here is an example from the ``meta-freescale`` layer. ::
+
+ UBOOT_CONFIG ??= "sdcard-ifc-secure-boot sdcard-ifc sdcard-qspi lpuart qspi secure-boot nor"
+ UBOOT_CONFIG[nor] = "ls1021atwr_nor_defconfig"
+ UBOOT_CONFIG[sdcard-ifc] = "ls1021atwr_sdcard_ifc_defconfig,,u-boot-with-spl-pbl.bin"
+ UBOOT_CONFIG[sdcard-qspi] = "ls1021atwr_sdcard_qspi_defconfig,,u-boot-with-spl-pbl.bin"
+ UBOOT_CONFIG[lpuart] = "ls1021atwr_nor_lpuart_defconfig"
+ UBOOT_CONFIG[qspi] = "ls1021atwr_qspi_defconfig"
+ UBOOT_CONFIG[secure-boot] = "ls1021atwr_nor_SECURE_BOOT_defconfig"
+ UBOOT_CONFIG[sdcard-ifc-secure-boot] = "ls1021atwr_sdcard_ifc_SECURE_BOOT_defconfig,,u-boot-with-spl-pbl.bin"
+
+ In this example, all possible seven configurations are selected. Each
+ configuration specifies "..._defconfig" as :term:`UBOOT_MACHINE`, and
+ the "sd..." configurations define an individual name for
+ :term:`UBOOT_BINARY`. No configuration defines a second parameter for
+ :term:`IMAGE_FSTYPES` to use for the U-Boot image.
For more information on how the :term:`UBOOT_CONFIG` is handled, see the
- :ref:`uboot-config <ref-classes-uboot-config>`
- class.
+ :ref:`ref-classes-uboot-config` class.
:term:`UBOOT_DTB_LOADADDRESS`
Specifies the load address for the dtb image used by U-Boot. During FIT
image creation, the :term:`UBOOT_DTB_LOADADDRESS` variable is used in
- :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to specify
- the load address to be used in
- creating the dtb sections of Image Tree Source for the FIT image.
+ :ref:`ref-classes-kernel-fitimage` class to specify the load address to be
+ used in creating the dtb sections of Image Tree Source for the FIT image.
:term:`UBOOT_DTBO_LOADADDRESS`
Specifies the load address for the dtbo image used by U-Boot. During FIT
image creation, the :term:`UBOOT_DTBO_LOADADDRESS` variable is used in
- :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to specify the load address to be used in
- creating the dtbo sections of Image Tree Source for the FIT image.
+ :ref:`ref-classes-kernel-fitimage` class to specify the load address to be
+ used in creating the dtbo sections of Image Tree Source for the FIT image.
:term:`UBOOT_ENTRYPOINT`
Specifies the entry point for the U-Boot image. During U-Boot image
creation, the :term:`UBOOT_ENTRYPOINT` variable is passed as a
command-line parameter to the ``uboot-mkimage`` utility.
+ To pass a 64 bit address for FIT image creation, you will need to set:
+ - The :term:`FIT_ADDRESS_CELLS` variable for FIT image creation.
+ - The :term:`UBOOT_FIT_ADDRESS_CELLS` variable for U-Boot FIT image creation.
+
+ This variable is used by the :ref:`ref-classes-kernel-fitimage`,
+ :ref:`ref-classes-kernel-uimage`, :ref:`ref-classes-kernel`,
+ :ref:`ref-classes-uboot-config` and :ref:`ref-classes-uboot-sign`
+ classes.
+
+ :term:`UBOOT_FIT_ADDRESS_CELLS`
+ Specifies the value of the ``#address-cells`` value for the
+ description of the U-Boot FIT image.
+
+ The default value is set to "1" by the :ref:`ref-classes-uboot-sign`
+ class, which corresponds to 32 bit addresses.
+
+ For platforms that need to set 64 bit addresses in
+ :term:`UBOOT_LOADADDRESS` and :term:`UBOOT_ENTRYPOINT`, you need to
+ set this value to "2", as two 32 bit values (cells) will be needed
+ to represent such addresses.
+
+ Here is an example setting "0x400000000" as a load address::
+
+ UBOOT_FIT_ADDRESS_CELLS = "2"
+ UBOOT_LOADADDRESS= "0x04 0x00000000"
+
+ See `more details about #address-cells <https://elinux.org/Device_Tree_Usage#How_Addressing_Works>`__.
+
+ :term:`UBOOT_FIT_DESC`
+ Specifies the description string encoded into a U-Boot fitImage. The default
+ value is set by the :ref:`ref-classes-uboot-sign` class as follows::
+
+ UBOOT_FIT_DESC ?= "U-Boot fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
+
+ :term:`UBOOT_FIT_GENERATE_KEYS`
+ Decides whether to generate the keys for signing the U-Boot fitImage if
+ they don't already exist. The keys are created in :term:`SPL_SIGN_KEYDIR`.
+ The default value is "0".
+
+ Enable this as follows::
+
+ UBOOT_FIT_GENERATE_KEYS = "1"
+
+ This variable is used in the :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_FIT_HASH_ALG`
+ Specifies the hash algorithm used in creating the U-Boot FIT Image.
+ It is set by default to ``sha256`` by the :ref:`ref-classes-uboot-sign`
+ class.
+
+ :term:`UBOOT_FIT_KEY_GENRSA_ARGS`
+ Arguments to ``openssl genrsa`` for generating a RSA private key for
+ signing the U-Boot FIT image. The default value of this variable
+ is set to "-F4" by the :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_FIT_KEY_REQ_ARGS`
+ Arguments to ``openssl req`` for generating a certificate for signing
+ the U-Boot FIT image. The default value is "-batch -new" by the
+ :ref:`ref-classes-uboot-sign` class, "batch" for
+ non interactive mode and "new" for generating new keys.
+
+ :term:`UBOOT_FIT_KEY_SIGN_PKCS`
+ Format for the public key certificate used for signing the U-Boot FIT
+ image. The default value is set to "x509" by the
+ :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_FIT_SIGN_ALG`
+ Specifies the signature algorithm used in creating the U-Boot FIT Image.
+ This variable is set by default to "rsa2048" by the
+ :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_FIT_SIGN_NUMBITS`
+ Size of the private key used in signing the U-Boot FIT image, in number
+ of bits. The default value for this variable is set to "2048"
+ by the :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_FITIMAGE_ENABLE`
+ This variable allows to generate a FIT image for U-Boot, which is one
+ of the ways to implement a verified boot process.
+
+ Its default value is "0", so set it to "1" to enable this functionality::
+
+ UBOOT_FITIMAGE_ENABLE = "1"
+
+ See the :ref:`ref-classes-uboot-sign` class for details.
+
:term:`UBOOT_LOADADDRESS`
Specifies the load address for the U-Boot image. During U-Boot image
creation, the :term:`UBOOT_LOADADDRESS` variable is passed as a
command-line parameter to the ``uboot-mkimage`` utility.
+ To pass a 64 bit address, you will also need to set:
+
+ - The :term:`FIT_ADDRESS_CELLS` variable for FIT image creation.
+ - The :term:`UBOOT_FIT_ADDRESS_CELLS` variable for U-Boot FIT image creation.
+
+ This variable is used by the :ref:`ref-classes-kernel-fitimage`,
+ :ref:`ref-classes-kernel-uimage`, :ref:`ref-classes-kernel`,
+ :ref:`ref-classes-uboot-config` and :ref:`ref-classes-uboot-sign`
+ classes.
+
:term:`UBOOT_LOCALVERSION`
Appends a string to the name of the local version of the U-Boot
image. For example, assuming the version of the U-Boot image built
@@ -8410,56 +9580,65 @@ system and gives an overview of their function and contents.
:term:`UBOOT_MKIMAGE`
Specifies the name of the mkimage command as used by the
- :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to assemble
+ :ref:`ref-classes-kernel-fitimage` class to assemble
the FIT image. This can be used to substitute an alternative command, wrapper
script or function if desired. The default is "uboot-mkimage".
:term:`UBOOT_MKIMAGE_DTCOPTS`
- Options for the device tree compiler passed to mkimage '-D'
- feature while creating FIT image in :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class.
- If :term:`UBOOT_MKIMAGE_DTCOPTS` is not set then kernel-fitimage will not
- pass the ``-D`` option to mkimage.
+ Options for the device tree compiler passed to ``mkimage -D`` feature
+ while creating a FIT image with the :ref:`ref-classes-kernel-fitimage`
+ class. If :term:`UBOOT_MKIMAGE_DTCOPTS` is not set then the
+ :ref:`ref-classes-kernel-fitimage` class will not pass the ``-D`` option
+ to ``mkimage``.
+
+ This variable is also used by the :ref:`ref-classes-uboot-sign` class.
+
+ :term:`UBOOT_MKIMAGE_KERNEL_TYPE`
+ Specifies the type argument for the kernel as passed to ``uboot-mkimage``.
+ The default value is "kernel".
:term:`UBOOT_MKIMAGE_SIGN`
Specifies the name of the mkimage command as used by the
- :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to sign
+ :ref:`ref-classes-kernel-fitimage` class to sign
the FIT image after it has been assembled (if enabled). This can be used
to substitute an alternative command, wrapper script or function if
desired. The default is "${:term:`UBOOT_MKIMAGE`}".
:term:`UBOOT_MKIMAGE_SIGN_ARGS`
Optionally specifies additional arguments for the
- :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to pass to the
+ :ref:`ref-classes-kernel-fitimage` class to pass to the
mkimage command when signing the FIT image.
:term:`UBOOT_RD_ENTRYPOINT`
- Specifies the entrypoint for the RAM disk image.
- During FIT image creation, the
- :term:`UBOOT_RD_ENTRYPOINT` variable is used
- in :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to specify the
- entrypoint to be used in creating the Image Tree Source for
- the FIT image.
+ Specifies the entrypoint for the RAM disk image. During FIT image
+ creation, the :term:`UBOOT_RD_ENTRYPOINT` variable is used in
+ :ref:`ref-classes-kernel-fitimage` class to specify the entrypoint to be
+ used in creating the Image Tree Source for the FIT image.
:term:`UBOOT_RD_LOADADDRESS`
- Specifies the load address for the RAM disk image.
- During FIT image creation, the
- :term:`UBOOT_RD_LOADADDRESS` variable is used
- in :ref:`kernel-fitimage <ref-classes-kernel-fitimage>` class to specify the
- load address to be used in creating the Image Tree Source for
- the FIT image.
+ Specifies the load address for the RAM disk image. During FIT image
+ creation, the :term:`UBOOT_RD_LOADADDRESS` variable is used in
+ :ref:`ref-classes-kernel-fitimage` class to specify the load address to
+ be used in creating the Image Tree Source for the FIT image.
:term:`UBOOT_SIGN_ENABLE`
Enable signing of FIT image. The default value is "0".
+ This variable is used by the :ref:`ref-classes-kernel-fitimage`,
+ :ref:`ref-classes-uboot-config` and :ref:`ref-classes-uboot-sign`
+ classes.
+
:term:`UBOOT_SIGN_KEYDIR`
- Location of the directory containing the RSA key and
- certificate used for signing FIT image.
+ Location of the directory containing the RSA key and certificate used for
+ signing FIT image, used by the :ref:`ref-classes-kernel-fitimage` and
+ :ref:`ref-classes-uboot-sign` classes.
:term:`UBOOT_SIGN_KEYNAME`
- The name of keys used for signing U-Boot FIT image stored in
- :term:`UBOOT_SIGN_KEYDIR` directory. For e.g. dev.key key and dev.crt
- certificate stored in :term:`UBOOT_SIGN_KEYDIR` directory will have
- :term:`UBOOT_SIGN_KEYNAME` set to "dev".
+ The name of keys used by the :ref:`ref-classes-kernel-fitimage` class
+ for signing U-Boot FIT image stored in the :term:`UBOOT_SIGN_KEYDIR`
+ directory. If we have for example a ``dev.key`` key and a ``dev.crt``
+ certificate stored in the :term:`UBOOT_SIGN_KEYDIR` directory, you will
+ have to set :term:`UBOOT_SIGN_KEYNAME` to ``dev``.
:term:`UBOOT_SUFFIX`
Points to the generated U-Boot extension. For example, ``u-boot.sb``
@@ -8488,12 +9667,12 @@ system and gives an overview of their function and contents.
The configure arguments check that uses
:term:`UNKNOWN_CONFIGURE_OPT_IGNORE` is part of the
- :ref:`insane <ref-classes-insane>` class and is only enabled if the
- recipe inherits the :ref:`autotools <ref-classes-autotools>` class.
+ :ref:`ref-classes-insane` class and is only enabled if the
+ recipe inherits the :ref:`ref-classes-autotools` class.
:term:`UPDATERCPN`
For recipes inheriting the
- :ref:`update-rc.d <ref-classes-update-rc.d>` class, :term:`UPDATERCPN`
+ :ref:`ref-classes-update-rc.d` class, :term:`UPDATERCPN`
specifies the package that contains the initscript that is enabled.
The default value is "${PN}". Given that almost all recipes that
@@ -8507,8 +9686,7 @@ system and gives an overview of their function and contents.
releases are not identified by Git tags, set :term:`UPSTREAM_CHECK_COMMITS`
to ``1`` in the recipe, and the OpenEmbedded build system
will compare the latest commit with the one currently specified
- by the recipe (:term:`SRCREV`).
- ::
+ by the recipe (:term:`SRCREV`)::
UPSTREAM_CHECK_COMMITS = "1"
@@ -8521,8 +9699,7 @@ system and gives an overview of their function and contents.
You can use the :term:`UPSTREAM_CHECK_GITTAGREGEX` variable to provide a
regular expression to filter only the relevant tags should the
- default filter not work correctly.
- ::
+ default filter not work correctly::
UPSTREAM_CHECK_GITTAGREGEX = "git_tag_regex"
@@ -8530,8 +9707,7 @@ system and gives an overview of their function and contents.
Use the :term:`UPSTREAM_CHECK_REGEX` variable to specify a different
regular expression instead of the default one when the package
checking system is parsing the page found using
- :term:`UPSTREAM_CHECK_URI`.
- ::
+ :term:`UPSTREAM_CHECK_URI`::
UPSTREAM_CHECK_REGEX = "package_regex"
@@ -8542,8 +9718,7 @@ system and gives an overview of their function and contents.
determined by fetching the directory listing where the tarball is and
attempting to find a later tarball. When this approach does not work,
you can use :term:`UPSTREAM_CHECK_URI` to provide a different URI that
- contains the link to the latest tarball.
- ::
+ contains the link to the latest tarball::
UPSTREAM_CHECK_URI = "recipe_url"
@@ -8554,8 +9729,7 @@ system and gives an overview of their function and contents.
:term:`UPSTREAM_CHECK_GITTAGREGEX` and :term:`UPSTREAM_CHECK_COMMITS` variables in
the recipe allows to determine what the latest upstream version is,
you can set :term:`UPSTREAM_VERSION_UNKNOWN` to ``1`` in the recipe
- to acknowledge that the check cannot be performed.
- ::
+ to acknowledge that the check cannot be performed::
UPSTREAM_VERSION_UNKNOWN = "1"
@@ -8565,16 +9739,15 @@ system and gives an overview of their function and contents.
specifically set. Typically, you would set :term:`USE_DEVFS` to "0" for a
statically populated ``/dev`` directory.
- See the ":ref:`dev-manual/common-tasks:selecting a device manager`" section in
+ See the ":ref:`dev-manual/device-manager:selecting a device manager`" section in
the Yocto Project Development Tasks Manual for information on how to
use this variable.
:term:`USE_VT`
When using
- :ref:`SysVinit <dev-manual/common-tasks:enabling system services>`,
- determines whether or not to run a
- `getty <https://en.wikipedia.org/wiki/Getty_%28Unix%29>`__ on any
- virtual terminals in order to enable logging in through those
+ :ref:`SysVinit <dev-manual/new-recipe:enabling system services>`,
+ determines whether or not to run a :wikipedia:`getty <Getty_(Unix)>`
+ on any virtual terminals in order to enable logging in through those
terminals.
The default value used for :term:`USE_VT` is "1" when no default value is
@@ -8587,12 +9760,16 @@ system and gives an overview of their function and contents.
A list of classes to globally inherit. These classes are used by the
OpenEmbedded build system to enable extra features.
+ Classes inherited using :term:`USER_CLASSES` must be located in the
+ ``classes-global/`` or ``classes/`` subdirectories.
+
The default list is set in your ``local.conf`` file::
USER_CLASSES ?= "buildstats"
For more information, see
- ``meta-poky/conf/local.conf.sample`` in the :term:`Source Directory`.
+ ``meta-poky/conf/templates/default/local.conf.sample`` in the
+ :term:`Source Directory`.
:term:`USERADD_ERROR_DYNAMIC`
If set to ``error``, forces the OpenEmbedded build system to produce
@@ -8648,7 +9825,7 @@ system and gives an overview of their function and contents.
causes the build system to use static ``gid`` values.
:term:`USERADD_PACKAGES`
- When inheriting the :ref:`useradd <ref-classes-useradd>` class,
+ When inheriting the :ref:`ref-classes-useradd` class,
this variable specifies the individual packages within the recipe
that require users and/or groups to be added.
@@ -8665,7 +9842,7 @@ system and gives an overview of their function and contents.
:term:`GROUPADD_PARAM`, or :term:`GROUPMEMS_PARAM` variables.
:term:`USERADD_PARAM`
- When inheriting the :ref:`useradd <ref-classes-useradd>` class,
+ When inheriting the :ref:`ref-classes-useradd` class,
this variable specifies for a package what parameters should pass to
the ``useradd`` command if you add a user to the system when the
package is installed.
@@ -8719,6 +9896,33 @@ system and gives an overview of their function and contents.
Additionally, you should also set the
:term:`USERADD_ERROR_DYNAMIC` variable.
+ :term:`VIRTUAL-RUNTIME`
+ :term:`VIRTUAL-RUNTIME` is a commonly used prefix for defining virtual
+ packages for runtime usage, typically for use in :term:`RDEPENDS`
+ or in image definitions.
+
+ An example is ``VIRTUAL-RUNTIME_base-utils`` that makes it possible
+ to either use BusyBox based utilities::
+
+ VIRTUAL-RUNTIME_base-utils = "busybox"
+
+ or their full featured implementations from GNU Coreutils
+ and other projects::
+
+ VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
+
+ Here are two examples using this virtual runtime package. The
+ first one is in :yocto_git:`initramfs-framework_1.0.bb
+ </poky/tree/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb?h=scarthgap>`::
+
+ RDEPENDS:${PN} += "${VIRTUAL-RUNTIME_base-utils}"
+
+ The second example is in the :yocto_git:`core-image-initramfs-boot
+ </poky/tree/meta/recipes-core/images/core-image-initramfs-boot.bb?h=scarthgap>`
+ image definition::
+
+ PACKAGE_INSTALL = "${INITRAMFS_SCRIPTS} ${VIRTUAL-RUNTIME_base-utils} base-passwd"
+
:term:`VOLATILE_LOG_DIR`
Specifies the persistence of the target's ``/var/log`` directory,
which is used to house postinstall target log files.
@@ -8727,6 +9931,18 @@ system and gives an overview of their function and contents.
file is not persistent. You can override this setting by setting the
variable to "no" to make the log directory persistent.
+ :term:`VOLATILE_TMP_DIR`
+ Specifies the persistence of the target's ``/tmp`` directory.
+
+ By default, :term:`VOLATILE_TMP_DIR` is set to "yes", in which case
+ ``/tmp`` links to a directory which resides in RAM in a ``tmpfs``
+ filesystem.
+
+ If instead, you want the ``/tmp`` directory to be persistent, set the
+ variable to "no" to make it a regular directory in the root filesystem.
+
+ This supports both sysvinit and systemd based systems.
+
:term:`WARN_QA`
Specifies the quality assurance checks whose failures are reported as
warnings by the OpenEmbedded build system. You set this variable in
@@ -8734,12 +9950,21 @@ system and gives an overview of their function and contents.
can control with this variable, see the
":ref:`ref-classes-insane`" section.
+ :term:`WATCHDOG_TIMEOUT`
+ Specifies the timeout in seconds used by the ``watchdog`` recipe and
+ also by ``systemd`` during reboot. The default is 60 seconds.
+
+ :term:`WIRELESS_DAEMON`
+ For ``connman`` and ``packagegroup-base``, specifies the wireless
+ daemon to use. The default is "wpa-supplicant" (note that the value
+ uses a dash and not an underscore).
+
:term:`WKS_FILE`
Specifies the location of the Wic kickstart file that is used by the
OpenEmbedded build system to create a partitioned image
(``image.wic``). For information on how to create a partitioned
image, see the
- ":ref:`dev-manual/common-tasks:creating partitioned images using wic`"
+ ":ref:`dev-manual/wic:creating partitioned images using wic`"
section in the Yocto Project Development Tasks Manual. For details on
the kickstart file format, see the ":doc:`/ref-manual/kickstart`" Chapter.
@@ -8759,7 +9984,7 @@ system and gives an overview of their function and contents.
With the :term:`WKS_FILE_DEPENDS` variable, you have the possibility to
specify a list of additional dependencies (e.g. native tools,
bootloaders, and so forth), that are required to build Wic images.
- Following is an example::
+ Here is an example::
WKS_FILE_DEPENDS = "some-native-tool"
@@ -8767,6 +9992,19 @@ system and gives an overview of their function and contents.
previous example, some-native-tool would be replaced with an actual
native tool on which the build would depend.
+ :term:`WKS_FILES`
+ Specifies a list of candidate Wic kickstart files to be used by the
+ OpenEmbedded build system to create a partitioned image. Only the
+ first one that is found, from left to right, will be used.
+
+ This is only useful when there are multiple ``.wks`` files that can be
+ used to produce an image. A typical case is when multiple layers are
+ used for different hardware platforms, each supplying a different
+ ``.wks`` file. In this case, you specify all possible ones through
+ :term:`WKS_FILES`.
+
+ If only one ``.wks`` file is used, set :term:`WKS_FILE` instead.
+
:term:`WORKDIR`
The pathname of the work directory in which the OpenEmbedded build
system builds a recipe. This directory is located within the
@@ -8782,13 +10020,13 @@ system and gives an overview of their function and contents.
- :term:`TMPDIR`: The top-level build output directory
- :term:`MULTIMACH_TARGET_SYS`: The target system identifier
- :term:`PN`: The recipe name
- - :term:`EXTENDPE`: The epoch - (if :term:`PE` is not specified, which
- is usually the case for most recipes, then `EXTENDPE` is blank)
+ - :term:`EXTENDPE`: The epoch --- if :term:`PE` is not specified, which
+ is usually the case for most recipes, then :term:`EXTENDPE` is blank.
- :term:`PV`: The recipe version
- :term:`PR`: The recipe revision
As an example, assume a Source Directory top-level folder name
- ``poky``, a default Build Directory at ``poky/build``, and a
+ ``poky``, a default :term:`Build Directory` at ``poky/build``, and a
``qemux86-poky-linux`` machine target system. Furthermore, suppose
your recipe is named ``foo_1.3.0-r0.bb``. In this case, the work
directory the build system uses to build the package would be as
@@ -8819,7 +10057,7 @@ system and gives an overview of their function and contents.
On systems where many tasks run in parallel, setting a limit to this
can be helpful in controlling system resource usage.
- :term:`XZ_MEMLIMIT`
+ :term:`XZ_MEMLIMIT`
Specifies the maximum memory the xz compression should use as a percentage
of system memory. If unconstrained the xz compressor can use large amounts of
memory and become problematic with parallelism elsewhere in the build.
@@ -8837,4 +10075,3 @@ system and gives an overview of their function and contents.
On systems where many tasks run in parallel, setting a limit to this
can be helpful in controlling system resource usage.
-
diff --git a/documentation/ref-manual/varlocality.rst b/documentation/ref-manual/varlocality.rst
index 5f7dba8775..e2c086ffa0 100644
--- a/documentation/ref-manual/varlocality.rst
+++ b/documentation/ref-manual/varlocality.rst
@@ -113,7 +113,7 @@ This section lists variables that are required for recipes.
- :term:`LIC_FILES_CHKSUM`
-- :term:`SRC_URI` - used in recipes that fetch local or remote files.
+- :term:`SRC_URI` --- used in recipes that fetch local or remote files.
.. _ref-varlocality-recipe-dependencies:
diff --git a/documentation/releases.rst b/documentation/releases.rst
deleted file mode 100644
index 0abe4fb215..0000000000
--- a/documentation/releases.rst
+++ /dev/null
@@ -1,248 +0,0 @@
-.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
-
-..
- NOTE FOR RELEASE MAINTAINERS:
- This file only needs updating in the development release ("master" branch)
- When documentation for stable releases is built,
- the latest version from "master" is used
- by https://git.yoctoproject.org/yocto-autobuilder-helper/tree/scripts/run-docs-build
-
-===========================
- Supported Release Manuals
-===========================
-
-******************************
-Release Series 4.0 (kirkstone)
-******************************
-
-- :yocto_docs:`4.0 Documentation </4.0>`
-
-******************************
-Release Series 3.4 (honister)
-******************************
-
-- :yocto_docs:`3.4 Documentation </3.4>`
-- :yocto_docs:`3.4.1 Documentation </3.4.1>`
-- :yocto_docs:`3.4.2 Documentation </3.4.2>`
-- :yocto_docs:`3.4.3 Documentation </3.4.3>`
-- :yocto_docs:`3.4.4 Documentation </3.4.4>`
-
-******************************
-Release Series 3.3 (hardknott)
-******************************
-
-- :yocto_docs:`3.3 Documentation </3.3>`
-- :yocto_docs:`3.3.1 Documentation </3.3.1>`
-- :yocto_docs:`3.3.2 Documentation </3.3.2>`
-- :yocto_docs:`3.3.3 Documentation </3.3.3>`
-- :yocto_docs:`3.3.4 Documentation </3.3.4>`
-- :yocto_docs:`3.3.5 Documentation </3.3.5>`
-- :yocto_docs:`3.3.6 Documentation </3.3.6>`
-
-****************************
-Release Series 3.1 (dunfell)
-****************************
-
-- :yocto_docs:`3.1 Documentation </3.1>`
-- :yocto_docs:`3.1.1 Documentation </3.1.1>`
-- :yocto_docs:`3.1.2 Documentation </3.1.2>`
-- :yocto_docs:`3.1.3 Documentation </3.1.3>`
-- :yocto_docs:`3.1.4 Documentation </3.1.4>`
-- :yocto_docs:`3.1.5 Documentation </3.1.5>`
-- :yocto_docs:`3.1.6 Documentation </3.1.6>`
-- :yocto_docs:`3.1.7 Documentation </3.1.7>`
-- :yocto_docs:`3.1.8 Documentation </3.1.8>`
-- :yocto_docs:`3.1.9 Documentation </3.1.9>`
-- :yocto_docs:`3.1.10 Documentation </3.1.10>`
-- :yocto_docs:`3.1.11 Documentation </3.1.11>`
-- :yocto_docs:`3.1.12 Documentation </3.1.12>`
-- :yocto_docs:`3.1.13 Documentation </3.1.13>`
-- :yocto_docs:`3.1.14 Documentation </3.1.14>`
-- :yocto_docs:`3.1.15 Documentation </3.1.15>`
-- :yocto_docs:`3.1.16 Documentation </3.1.16>`
-
-==========================
- Outdated Release Manuals
-==========================
-
-*******************************
-Release Series 3.2 (gatesgarth)
-*******************************
-
-- :yocto_docs:`3.2 Documentation </3.2>`
-- :yocto_docs:`3.2.1 Documentation </3.2.1>`
-- :yocto_docs:`3.2.2 Documentation </3.2.2>`
-- :yocto_docs:`3.2.3 Documentation </3.2.3>`
-- :yocto_docs:`3.2.4 Documentation </3.2.4>`
-
-*************************
-Release Series 3.0 (zeus)
-*************************
-
-- :yocto_docs:`3.0 Documentation </3.0>`
-- :yocto_docs:`3.0.1 Documentation </3.0.1>`
-- :yocto_docs:`3.0.2 Documentation </3.0.2>`
-- :yocto_docs:`3.0.3 Documentation </3.0.3>`
-- :yocto_docs:`3.0.4 Documentation </3.0.4>`
-
-****************************
-Release Series 2.7 (warrior)
-****************************
-
-- :yocto_docs:`2.7 Documentation </2.7>`
-- :yocto_docs:`2.7.1 Documentation </2.7.1>`
-- :yocto_docs:`2.7.2 Documentation </2.7.2>`
-- :yocto_docs:`2.7.3 Documentation </2.7.3>`
-- :yocto_docs:`2.7.4 Documentation </2.7.4>`
-
-*************************
-Release Series 2.6 (thud)
-*************************
-
-- :yocto_docs:`2.6 Documentation </2.6>`
-- :yocto_docs:`2.6.1 Documentation </2.6.1>`
-- :yocto_docs:`2.6.2 Documentation </2.6.2>`
-- :yocto_docs:`2.6.3 Documentation </2.6.3>`
-- :yocto_docs:`2.6.4 Documentation </2.6.4>`
-
-*************************
-Release Series 2.5 (sumo)
-*************************
-
-- :yocto_docs:`2.5 Documentation </2.5>`
-- :yocto_docs:`2.5.1 Documentation </2.5.1>`
-- :yocto_docs:`2.5.2 Documentation </2.5.2>`
-- :yocto_docs:`2.5.3 Documentation </2.5.3>`
-
-**************************
-Release Series 2.4 (rocko)
-**************************
-
-- :yocto_docs:`2.4 Documentation </2.4>`
-- :yocto_docs:`2.4.1 Documentation </2.4.1>`
-- :yocto_docs:`2.4.2 Documentation </2.4.2>`
-- :yocto_docs:`2.4.3 Documentation </2.4.3>`
-- :yocto_docs:`2.4.4 Documentation </2.4.4>`
-
-*************************
-Release Series 2.3 (pyro)
-*************************
-
-- :yocto_docs:`2.3 Documentation </2.3>`
-- :yocto_docs:`2.3.1 Documentation </2.3.1>`
-- :yocto_docs:`2.3.2 Documentation </2.3.2>`
-- :yocto_docs:`2.3.3 Documentation </2.3.3>`
-- :yocto_docs:`2.3.4 Documentation </2.3.4>`
-
-**************************
-Release Series 2.2 (morty)
-**************************
-
-- :yocto_docs:`2.2 Documentation </2.2>`
-- :yocto_docs:`2.2.1 Documentation </2.2.1>`
-- :yocto_docs:`2.2.2 Documentation </2.2.2>`
-- :yocto_docs:`2.2.3 Documentation </2.2.3>`
-
-****************************
-Release Series 2.1 (krogoth)
-****************************
-
-- :yocto_docs:`2.1 Documentation </2.1>`
-- :yocto_docs:`2.1.1 Documentation </2.1.1>`
-- :yocto_docs:`2.1.2 Documentation </2.1.2>`
-- :yocto_docs:`2.1.3 Documentation </2.1.3>`
-
-***************************
-Release Series 2.0 (jethro)
-***************************
-
-- :yocto_docs:`1.9 Documentation </1.9>`
-- :yocto_docs:`2.0 Documentation </2.0>`
-- :yocto_docs:`2.0.1 Documentation </2.0.1>`
-- :yocto_docs:`2.0.2 Documentation </2.0.2>`
-- :yocto_docs:`2.0.3 Documentation </2.0.3>`
-
-*************************
-Release Series 1.8 (fido)
-*************************
-
-- :yocto_docs:`1.8 Documentation </1.8>`
-- :yocto_docs:`1.8.1 Documentation </1.8.1>`
-- :yocto_docs:`1.8.2 Documentation </1.8.2>`
-
-**************************
-Release Series 1.7 (dizzy)
-**************************
-
-- :yocto_docs:`1.7 Documentation </1.7>`
-- :yocto_docs:`1.7.1 Documentation </1.7.1>`
-- :yocto_docs:`1.7.2 Documentation </1.7.2>`
-- :yocto_docs:`1.7.3 Documentation </1.7.3>`
-
-**************************
-Release Series 1.6 (daisy)
-**************************
-
-- :yocto_docs:`1.6 Documentation </1.6>`
-- :yocto_docs:`1.6.1 Documentation </1.6.1>`
-- :yocto_docs:`1.6.2 Documentation </1.6.2>`
-- :yocto_docs:`1.6.3 Documentation </1.6.3>`
-
-*************************
-Release Series 1.5 (dora)
-*************************
-
-- :yocto_docs:`1.5 Documentation </1.5>`
-- :yocto_docs:`1.5.1 Documentation </1.5.1>`
-- :yocto_docs:`1.5.2 Documentation </1.5.2>`
-- :yocto_docs:`1.5.3 Documentation </1.5.3>`
-- :yocto_docs:`1.5.4 Documentation </1.5.4>`
-
-**************************
-Release Series 1.4 (dylan)
-**************************
-
-- :yocto_docs:`1.4 Documentation </1.4>`
-- :yocto_docs:`1.4.1 Documentation </1.4.1>`
-- :yocto_docs:`1.4.2 Documentation </1.4.2>`
-- :yocto_docs:`1.4.3 Documentation </1.4.3>`
-- :yocto_docs:`1.4.4 Documentation </1.4.4>`
-- :yocto_docs:`1.4.5 Documentation </1.4.5>`
-
-**************************
-Release Series 1.3 (danny)
-**************************
-
-- :yocto_docs:`1.3 Documentation </1.3>`
-- :yocto_docs:`1.3.1 Documentation </1.3.1>`
-- :yocto_docs:`1.3.2 Documentation </1.3.2>`
-
-***************************
-Release Series 1.2 (denzil)
-***************************
-
-- :yocto_docs:`1.2 Documentation </1.2>`
-- :yocto_docs:`1.2.1 Documentation </1.2.1>`
-- :yocto_docs:`1.2.2 Documentation </1.2.2>`
-
-***************************
-Release Series 1.1 (edison)
-***************************
-
-- :yocto_docs:`1.1 Documentation </1.1>`
-- :yocto_docs:`1.1.1 Documentation </1.1.1>`
-- :yocto_docs:`1.1.2 Documentation </1.1.2>`
-
-****************************
-Release Series 1.0 (bernard)
-****************************
-
-- :yocto_docs:`1.0 Documentation </1.0>`
-- :yocto_docs:`1.0.1 Documentation </1.0.1>`
-- :yocto_docs:`1.0.2 Documentation </1.0.2>`
-
-****************************
-Release Series 0.9 (laverne)
-****************************
-
-- :yocto_docs:`0.9 Documentation </0.9>`
diff --git a/documentation/sdk-manual/appendix-customizing.rst b/documentation/sdk-manual/appendix-customizing.rst
index 9a76cc59d6..61091d83ba 100644
--- a/documentation/sdk-manual/appendix-customizing.rst
+++ b/documentation/sdk-manual/appendix-customizing.rst
@@ -1,11 +1,17 @@
.. SPDX-License-Identifier: CC-BY-SA-2.0-UK
-******************************
-Customizing the Extensible SDK
-******************************
+***************************************************
+Customizing the Extensible SDK standalone installer
+***************************************************
This appendix describes customizations you can apply to the extensible
-SDK.
+SDK when using in the standalone installer version.
+
+.. note::
+
+ It is also possible to use the Extensible SDK functionality directly in a
+ Yocto build, avoiding separate installer artefacts. Please refer to
+ ":ref:`sdk-manual/extensible:Installing the Extensible SDK`"
Configuring the Extensible SDK
==============================
@@ -38,14 +44,12 @@ build system applies them against ``local.conf`` and ``auto.conf``:
:term:`ESDK_LOCALCONF_ALLOW` overrides either of the previous two
filters. The default value is blank.
-- Classes inherited globally with
- :term:`INHERIT` that are listed in
- :term:`ESDK_CLASS_INHERIT_DISABLE`
- are disabled. Using :term:`ESDK_CLASS_INHERIT_DISABLE` to disable these
- classes is the typical method to disable classes that are problematic
- or unnecessary in the SDK context. The default value disables the
- :ref:`buildhistory <ref-classes-buildhistory>`
- and :ref:`icecc <ref-classes-icecc>` classes.
+- Classes inherited globally with :term:`INHERIT` that are listed in
+ :term:`ESDK_CLASS_INHERIT_DISABLE` are disabled. Using
+ :term:`ESDK_CLASS_INHERIT_DISABLE` to disable these classes is the typical
+ method to disable classes that are problematic or unnecessary in the SDK
+ context. The default value disables the
+ :ref:`ref-classes-buildhistory` and :ref:`ref-classes-icecc` classes.
Additionally, the contents of ``conf/sdk-extra.conf``, when present, are
appended to the end of ``conf/local.conf`` within the produced SDK,
@@ -68,13 +72,12 @@ adjustments:
.. note::
- The default value of
- ESDK_CLASS_INHERIT_DISABLE
+ The default value of :term:`ESDK_CLASS_INHERIT_DISABLE`
is set using the "?=" operator. Consequently, you will need to
either define the entire list by using the "=" operator, or you
will need to append a value using either ":append" or the "+="
operator. You can learn more about these operators in the
- ":ref:`bitbake:bitbake-user-manual/bitbake-user-manual-metadata:basic syntax`"
+ ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:basic syntax`"
section of the BitBake User Manual.
- If you have classes or recipes that add additional tasks to the
@@ -169,13 +172,12 @@ perform additional steps. These steps make it possible for anyone using
the installed SDKs to update the installed SDKs by using the
``devtool sdk-update`` command:
-1. Create a directory that can be shared over HTTP or HTTPS. You can do
- this by setting up a web server such as an `Apache HTTP
- Server <https://en.wikipedia.org/wiki/Apache_HTTP_Server>`__ or
- `Nginx <https://en.wikipedia.org/wiki/Nginx>`__ server in the cloud
+#. Create a directory that can be shared over HTTP or HTTPS. You can do
+ this by setting up a web server such as an :wikipedia:`Apache HTTP Server
+ <Apache_HTTP_Server>` or :wikipedia:`Nginx <Nginx>` server in the cloud
to host the directory. This directory must contain the published SDK.
-2. Set the
+#. Set the
:term:`SDK_UPDATE_URL`
variable to point to the corresponding HTTP or HTTPS URL. Setting
this variable causes any SDK built to default to that URL and thus,
@@ -184,10 +186,10 @@ the installed SDKs to update the installed SDKs by using the
":ref:`sdk-manual/extensible:applying updates to an installed extensible sdk`"
section.
-3. Build the extensible SDK normally (i.e., use the
+#. Build the extensible SDK normally (i.e., use the
``bitbake -c populate_sdk_ext`` imagename command).
-4. Publish the SDK using the following command::
+#. Publish the SDK using the following command::
$ oe-publish-sdk some_path/sdk-installer.sh path_to_shared_http_directory
@@ -242,7 +244,7 @@ If you want the users of an extensible SDK you build to be able to add
items to the SDK without requiring the users to build the items from
source, you need to do a number of things:
-1. Ensure the additional items you want the user to be able to install
+#. Ensure the additional items you want the user to be able to install
are already built:
- Build the items explicitly. You could use one or more "meta"
@@ -254,13 +256,12 @@ source, you need to do a number of things:
:term:`EXCLUDE_FROM_WORLD`
variable for additional information.
-2. Expose the ``sstate-cache`` directory produced by the build.
+#. Expose the ``sstate-cache`` directory produced by the build.
Typically, you expose this directory by making it available through
- an `Apache HTTP
- Server <https://en.wikipedia.org/wiki/Apache_HTTP_Server>`__ or
- `Nginx <https://en.wikipedia.org/wiki/Nginx>`__ server.
+ an :wikipedia:`Apache HTTP Server <Apache_HTTP_Server>` or
+ :wikipedia:`Nginx <Nginx>` server.
-3. Set the appropriate configuration so that the produced SDK knows how
+#. Set the appropriate configuration so that the produced SDK knows how
to find the configuration. The variable you need to set is
:term:`SSTATE_MIRRORS`::
@@ -280,9 +281,8 @@ source, you need to do a number of things:
ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
- Alternatively, if you just want to set the :term:`SSTATE_MIRRORS`
- variable's value for the SDK alone, create a
- ``conf/sdk-extra.conf`` file either in your
- :term:`Build Directory` or within any
+ variable's value for the SDK alone, create a ``conf/sdk-extra.conf``
+ file either in your :term:`Build Directory` or within any
layer and put your :term:`SSTATE_MIRRORS` setting within that file.
.. note::
diff --git a/documentation/sdk-manual/appendix-obtain.rst b/documentation/sdk-manual/appendix-obtain.rst
index ece378c75e..d06d6ec6b5 100644
--- a/documentation/sdk-manual/appendix-obtain.rst
+++ b/documentation/sdk-manual/appendix-obtain.rst
@@ -4,8 +4,22 @@
Obtaining the SDK
*****************
+Working with the SDK components directly in a Yocto build
+=========================================================
+
+Please refer to section
+":ref:`sdk-manual/extensible:Setting up the Extensible SDK environment directly in a Yocto build`"
+
+Note that to use this feature effectively either a powerful build
+machine, or a well-functioning sstate cache infrastructure is required:
+otherwise significant time could be spent waiting for components to be built
+by BitBake from source code.
+
+Working with standalone SDK Installers
+======================================
+
Locating Pre-Built SDK Installers
-=================================
+---------------------------------
You can use existing, pre-built toolchains by locating and running an
SDK installer script that ships with the Yocto Project. Using this
@@ -14,38 +28,31 @@ and then run the script to hand-install the toolchain.
Follow these steps to locate and hand-install the toolchain:
-1. *Go to the Installers Directory:* Go to
+#. *Go to the Installers Directory:* Go to
:yocto_dl:`/releases/yocto/yocto-&DISTRO;/toolchain/`
-2. *Open the Folder for Your Build Host:* Open the folder that matches
+#. *Open the Folder for Your Build Host:* Open the folder that matches
your :term:`Build Host` (i.e.
``i686`` for 32-bit machines or ``x86_64`` for 64-bit machines).
-3. *Locate and Download the SDK Installer:* You need to find and
+#. *Locate and Download the SDK Installer:* You need to find and
download the installer appropriate for your build host, target
hardware, and image type.
- The installer files (``*.sh``) follow this naming convention::
-
- poky-glibc-host_system-core-image-type-arch-toolchain[-ext]-release.sh
+ The installer files (``*.sh``) follow this naming convention:
+ ``poky-glibc-host_system-core-image-type-arch-toolchain[-ext]-release.sh``:
- Where:
- host_system is a string representing your development system:
- "i686" or "x86_64"
+ - ``host_system``: string representing your development system: ``i686`` or ``x86_64``
- type is a string representing the image:
- "sato" or "minimal"
+ - ``type``: string representing the image: ``sato`` or ``minimal``
- arch is a string representing the target architecture:
- "aarch64", "armv5e", "core2-64", "cortexa8hf-neon", "i586", "mips32r2",
- "mips64", or "ppc7400"
+ - ``arch``: string representing the target architecture such as ``cortexa57-qemuarm64``
- release is the version of Yocto Project.
-
- NOTE:
- The standard SDK installer does not have the "-ext" string as
- part of the filename.
+ - ``release``: version of the Yocto Project.
+ .. note::
+ The standard SDK installer does not have the ``-ext`` string as
+ part of the filename.
The toolchains provided by the Yocto
Project are based off of the ``core-image-sato`` and
@@ -53,16 +60,16 @@ Follow these steps to locate and hand-install the toolchain:
developing against those images.
For example, if your build host is a 64-bit x86 system and you need
- an extended SDK for a 64-bit core2 target, go into the ``x86_64``
+ an extended SDK for a 64-bit core2 QEMU target, go into the ``x86_64``
folder and download the following installer::
- poky-glibc-x86_64-core-image-sato-core2-64-toolchain-ext-&DISTRO;.sh
+ poky-glibc-x86_64-core-image-sato-core2-64-qemux86-64-toolchain-&DISTRO;.sh
-4. *Run the Installer:* Be sure you have execution privileges and run
- the installer. Following is an example from the ``Downloads``
+#. *Run the Installer:* Be sure you have execution privileges and run
+ the installer. Here is an example from the ``Downloads``
directory::
- $ ~/Downloads/poky-glibc-x86_64-core-image-sato-core2-64-toolchain-ext-&DISTRO;.sh
+ $ ~/Downloads/poky-glibc-x86_64-core-image-sato-core2-64-qemux86-64-toolchain-&DISTRO;.sh
During execution of the script, you choose the root location for the
toolchain. See the
@@ -72,18 +79,18 @@ Follow these steps to locate and hand-install the toolchain:
section for more information.
Building an SDK Installer
-=========================
+-------------------------
As an alternative to locating and downloading an SDK installer, you can
build the SDK installer. Follow these steps:
-1. *Set Up the Build Environment:* Be sure you are set up to use BitBake
+#. *Set Up the Build Environment:* Be sure you are set up to use BitBake
in a shell. See the ":ref:`dev-manual/start:preparing the build host`" section
in the Yocto Project Development Tasks Manual for information on how
to get a build host ready that is either a native Linux machine or a
machine that uses CROPS.
-2. *Clone the ``poky`` Repository:* You need to have a local copy of the
+#. *Clone the ``poky`` Repository:* You need to have a local copy of the
Yocto Project :term:`Source Directory`
(i.e. a local
``poky`` repository). See the ":ref:`dev-manual/start:cloning the \`\`poky\`\` repository`" and
@@ -93,50 +100,46 @@ build the SDK installer. Follow these steps:
how to clone the ``poky`` repository and check out the appropriate
branch for your work.
-3. *Initialize the Build Environment:* While in the root directory of
+#. *Initialize the Build Environment:* While in the root directory of
the Source Directory (i.e. ``poky``), run the
:ref:`structure-core-script` environment
setup script to define the OpenEmbedded build environment on your
- build host.
- ::
+ build host::
$ source oe-init-build-env
- Among other things, the script
- creates the :term:`Build Directory`,
- which is
- ``build`` in this case and is located in the Source Directory. After
- the script runs, your current working directory is set to the
- ``build`` directory.
-
-4. *Make Sure You Are Building an Installer for the Correct Machine:*
- Check to be sure that your
- :term:`MACHINE` variable in the
- ``local.conf`` file in your Build Directory matches the architecture
+ Among other things, the script creates the :term:`Build Directory`, which
+ is ``build`` in this case and is located in the Source Directory. After
+ the script runs, your current working directory is set to the ``build``
+ directory.
+
+#. *Make Sure You Are Building an Installer for the Correct Machine:*
+ Check to be sure that your :term:`MACHINE` variable in the ``local.conf``
+ file in your :term:`Build Directory` matches the architecture
for which you are building.
-5. *Make Sure Your SDK Machine is Correctly Set:* If you are building a
+#. *Make Sure Your SDK Machine is Correctly Set:* If you are building a
toolchain designed to run on an architecture that differs from your
current development host machine (i.e. the build host), be sure that
- the :term:`SDKMACHINE` variable
- in the ``local.conf`` file in your Build Directory is correctly set.
+ the :term:`SDKMACHINE` variable in the ``local.conf`` file in your
+ :term:`Build Directory` is correctly set.
.. note::
If you are building an SDK installer for the Extensible SDK, the
- SDKMACHINE
- value must be set for the architecture of the machine you are
- using to build the installer. If
- SDKMACHINE
+ :term:`SDKMACHINE` value must be set for the architecture of the
+ machine you are using to build the installer. If :term:`SDKMACHINE`
is not set appropriately, the build fails and provides an error
message similar to the following::
- The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is
- set to i686 (likely via setting SDKMACHINE) which is different from the architecture of the build machine (x86_64).
- Unable to continue.
+ The extensible SDK can currently only be built for the same
+ architecture as the machine being built on - SDK_ARCH
+ is set to i686 (likely via setting SDKMACHINE) which is
+ different from the architecture of the build machine (x86_64).
+ Unable to continue.
-6. *Build the SDK Installer:* To build the SDK installer for a standard
+#. *Build the SDK Installer:* To build the SDK installer for a standard
SDK and populate the SDK image, use the following command form. Be
sure to replace ``image`` with an image (e.g. "core-image-sato")::
@@ -150,7 +153,7 @@ build the SDK installer. Follow these steps:
that matches your target root filesystem.
When the ``bitbake`` command completes, the SDK installer will be in
- ``tmp/deploy/sdk`` in the Build Directory.
+ ``tmp/deploy/sdk`` in the :term:`Build Directory`.
.. note::
@@ -162,12 +165,12 @@ build the SDK installer. Follow these steps:
variable inside your ``local.conf`` file before building the
SDK installer. Doing so ensures that the eventual SDK
installation process installs the appropriate library packages
- as part of the SDK. Following is an example using ``libc``
+ as part of the SDK. Here is an example using ``libc``
static development libraries: TOOLCHAIN_TARGET_TASK:append = "
libc-staticdev"
-7. *Run the Installer:* You can now run the SDK installer from
- ``tmp/deploy/sdk`` in the Build Directory. Following is an example::
+#. *Run the Installer:* You can now run the SDK installer from
+ ``tmp/deploy/sdk`` in the :term:`Build Directory`. Here is an example::
$ cd poky/build/tmp/deploy/sdk
$ ./poky-glibc-x86_64-core-image-sato-core2-64-toolchain-ext-&DISTRO;.sh
@@ -194,7 +197,7 @@ separately extract a root filesystem:
Follow these steps to extract the root filesystem:
-1. *Locate and Download the Tarball for the Pre-Built Root Filesystem
+#. *Locate and Download the Tarball for the Pre-Built Root Filesystem
Image File:* You need to find and download the root filesystem image
file that is appropriate for your target system. These files are kept
in machine-specific folders in the
@@ -206,21 +209,14 @@ Follow these steps to extract the root filesystem:
also contain flattened root filesystem image files (``*.ext4``),
which you can use with QEMU directly.
- The pre-built root filesystem image files follow these naming
- conventions::
-
- core-image-profile-arch.tar.bz2
+ The pre-built root filesystem image files follow the
+ ``core-image-profile-machine.tar.bz2`` naming convention:
- Where:
- profile is the filesystem image's profile:
- lsb, lsb-dev, lsb-sdk, minimal, minimal-dev, minimal-initramfs,
- sato, sato-dev, sato-sdk, sato-sdk-ptest. For information on
- these types of image profiles, see the "Images" chapter in
- the Yocto Project Reference Manual.
+ - ``profile``: filesystem image's profile, such as ``minimal``,
+ ``minimal-dev`` or ``sato``. For information on these types of image
+ profiles, see the "Images" chapter in the Yocto Project Reference Manual.
- arch is a string representing the target architecture:
- beaglebone-yocto, beaglebone-yocto-lsb, edgerouter, edgerouter-lsb,
- genericx86, genericx86-64, genericx86-64-lsb, genericx86-lsb and qemu*.
+ - ``machine``: same string as the name of the parent download directory.
The root filesystems
provided by the Yocto Project are based off of the
@@ -232,22 +228,22 @@ Follow these steps to extract the root filesystem:
core-image-sato-sdk-beaglebone-yocto.tar.bz2
-2. *Initialize the Cross-Development Environment:* You must ``source``
+#. *Initialize the Cross-Development Environment:* You must ``source``
the cross-development environment setup script to establish necessary
environment variables.
This script is located in the top-level directory in which you
installed the toolchain (e.g. ``poky_sdk``).
- Following is an example based on the toolchain installed in the
+ Here is an example based on the toolchain installed in the
":ref:`sdk-manual/appendix-obtain:locating pre-built sdk installers`" section::
$ source poky_sdk/environment-setup-core2-64-poky-linux
-3. *Extract the Root Filesystem:* Use the ``runqemu-extract-sdk``
+#. *Extract the Root Filesystem:* Use the ``runqemu-extract-sdk``
command and provide the root filesystem image.
- Following is an example command that extracts the root filesystem
+ Here is an example command that extracts the root filesystem
from a previously built root filesystem image that was downloaded
from the :yocto_dl:`Index of Releases </releases/yocto/yocto-&DISTRO;/machines/>`.
This command extracts the root filesystem into the ``core2-64-sato``
diff --git a/documentation/sdk-manual/extensible.rst b/documentation/sdk-manual/extensible.rst
index 6bb262273d..3f6a754d88 100644
--- a/documentation/sdk-manual/extensible.rst
+++ b/documentation/sdk-manual/extensible.rst
@@ -41,6 +41,46 @@ functionality.
Installing the Extensible SDK
=============================
+Two ways to install the Extensible SDK
+--------------------------------------
+
+Extensible SDK can be installed in two different ways, and both have
+their own pros and cons:
+
+#. *Setting up the Extensible SDK environment directly in a Yocto build*. This
+ avoids having to produce, test, distribute and maintain separate SDK
+ installer archives, which can get very large. There is only one environment
+ for the regular Yocto build and the SDK and less code paths where things can
+ go not according to plan. It's easier to update the SDK: it simply means
+ updating the Yocto layers with git fetch or layer management tooling. The
+ SDK extensibility is better than in the second option: just run ``bitbake``
+ again to add more things to the sysroot, or add layers if even more things
+ are required.
+
+#. *Setting up the Extensible SDK from a standalone installer*. This has the
+ benefit of having a single, self-contained archive that includes all the
+ needed binary artifacts. So nothing needs to be rebuilt, and there is no
+ need to provide a well-functioning binary artefact cache over the network
+ for developers with underpowered laptops.
+
+.. _setting_up_ext_sdk_in_build:
+
+Setting up the Extensible SDK environment directly in a Yocto build
+-------------------------------------------------------------------
+
+#. Set up all the needed layers and a Yocto :term:`Build Directory`, e.g. a regular Yocto
+ build where ``bitbake`` can be executed.
+
+#. Run::
+
+ $ bitbake meta-ide-support
+ $ bitbake -c populate_sysroot gtk+3
+ # or any other target or native item that the application developer would need
+ $ bitbake build-sysroots -c build_native_sysroot && bitbake build-sysroots -c build_target_sysroot
+
+Setting up the Extensible SDK from a standalone installer
+---------------------------------------------------------
+
The first thing you need to do is install the SDK on your :term:`Build
Host` by running the ``*.sh`` installation script.
@@ -102,16 +142,7 @@ must be writable for whichever users need to use the SDK.
The following command shows how to run the installer given a toolchain
tarball for a 64-bit x86 development host system and a 64-bit x86 target
architecture. The example assumes the SDK installer is located in
-``~/Downloads/`` and has execution rights.
-
-.. note::
-
- If you do not have write permissions for the directory into which you
- are installing the SDK, the installer notifies you and exits. For
- that case, set up the proper permissions in the directory and run the
- installer again.
-
-::
+``~/Downloads/`` and has execution rights::
$ ./Downloads/poky-glibc-x86_64-core-image-minimal-core2-64-toolchain-ext-2.5.sh
Poky (Yocto Project Reference Distro) Extensible SDK installer version 2.5
@@ -132,11 +163,25 @@ architecture. The example assumes the SDK installer is located in
Each time you wish to use the SDK in a new shell session, you need to source the environment setup script e.g.
$ . /home/scottrif/poky_sdk/environment-setup-core2-64-poky-linux
+.. note::
+
+ If you do not have write permissions for the directory into which you
+ are installing the SDK, the installer notifies you and exits. For
+ that case, set up the proper permissions in the directory and run the
+ installer again.
+
+.. _running_the_ext_sdk_env:
+
Running the Extensible SDK Environment Setup Script
===================================================
Once you have the SDK installed, you must run the SDK environment setup
-script before you can actually use the SDK. This setup script resides in
+script before you can actually use the SDK.
+
+When using a SDK directly in a Yocto build, you will find the script in
+``tmp/deploy/images/qemux86-64/`` in your :term:`Build Directory`.
+
+When using a standalone SDK installer, this setup script resides in
the directory you chose when you installed the SDK, which is either the
default ``poky_sdk`` directory or the directory you chose during
installation.
@@ -154,13 +199,18 @@ script is for an IA-based target machine using i586 tuning::
SDK environment now set up; additionally you may now run devtool to perform development tasks.
Run devtool --help for further details.
-Running the setup script defines many environment variables needed in
-order to use the SDK (e.g. ``PATH``,
-:term:`CC`,
-:term:`LD`, and so forth). If you want to
-see all the environment variables the script exports, examine the
+When using the environment script directly in a Yocto build, it can
+be run similarly::
+
+ $ source tmp/deploy/images/qemux86-64/environment-setup-core2-64-poky-linux
+
+Running the setup script defines many environment variables needed in order to
+use the SDK (e.g. ``PATH``, :term:`CC`, :term:`LD`, and so forth). If you want
+to see all the environment variables the script exports, examine the
installation file itself.
+.. _using_devtool:
+
Using ``devtool`` in Your SDK Workflow
======================================
@@ -172,11 +222,8 @@ system.
.. note::
- The use of
- devtool
- is not limited to the extensible SDK. You can use
- devtool
- to help you easily develop any project whose build output must be
+ The use of ``devtool`` is not limited to the extensible SDK. You can use
+ ``devtool`` to help you easily develop any project whose build output must be
part of an image built using the build system.
The ``devtool`` command line is organized similarly to
@@ -186,21 +233,18 @@ all the commands.
.. note::
- See the "
- devtool
-  Quick Reference
- " in the Yocto Project Reference Manual for a
- devtool
- quick reference.
+ See the ":doc:`/ref-manual/devtool-reference`"
+ section in the Yocto Project Reference Manual.
-Three ``devtool`` subcommands provide entry-points into
-development:
+``devtool`` subcommands provide entry-points into development:
- *devtool add*: Assists in adding new software to be built.
- *devtool modify*: Sets up an environment to enable you to modify
the source of an existing component.
+- *devtool ide-sdk*: Generates a configuration for an IDE.
+
- *devtool upgrade*: Updates an existing recipe so that you can
build it for an updated set of source files.
@@ -235,7 +279,7 @@ command:
.. image:: figures/sdk-devtool-add-flow.png
:width: 100%
-1. *Generating the New Recipe*: The top part of the flow shows three
+#. *Generating the New Recipe*: The top part of the flow shows three
scenarios by which you could use ``devtool add`` to generate a recipe
based on existing source code.
@@ -252,7 +296,7 @@ command:
- *Left*: The left scenario in the figure represents a common
situation where the source code does not exist locally and needs
to be extracted. In this situation, the source code is extracted
- to the default workspace - you do not want the files in some
+ to the default workspace --- you do not want the files in some
specific location outside of the workspace. Thus, everything you
need will be located in the workspace::
@@ -267,13 +311,12 @@ command:
- *Middle*: The middle scenario in the figure also represents a
situation where the source code does not exist locally. In this
case, the code is again upstream and needs to be extracted to some
- local area - this time outside of the default workspace.
+ local area --- this time outside of the default workspace.
.. note::
- If required,
- devtool
- always creates a Git repository locally during the extraction.
+ If required, ``devtool`` always creates a Git repository locally
+ during the extraction.
Furthermore, the first positional argument ``srctree`` in this case
identifies where the ``devtool add`` command will locate the
@@ -282,8 +325,7 @@ command:
$ devtool add recipe srctree fetchuri
- In summary,
- the source code is pulled from fetchuri and extracted into the
+ In summary, the source code is pulled from fetchuri and extracted into the
location defined by ``srctree`` as a local Git repository.
Within workspace, ``devtool`` creates a recipe named recipe along
@@ -302,28 +344,26 @@ command:
recipe for the code and places the recipe into the workspace.
Because the extracted source code already exists, ``devtool`` does
- not try to relocate the source code into the workspace - only the
+ not try to relocate the source code into the workspace --- only the
new recipe is placed in the workspace.
Aside from a recipe folder, the command also creates an associated
append folder and places an initial ``*.bbappend`` file within.
-2. *Edit the Recipe*: You can use ``devtool edit-recipe`` to open up the
+#. *Edit the Recipe*: You can use ``devtool edit-recipe`` to open up the
editor as defined by the ``$EDITOR`` environment variable and modify
the file::
$ devtool edit-recipe recipe
- From within the editor, you
- can make modifications to the recipe that take effect when you build
- it later.
+ From within the editor, you can make modifications to the recipe that
+ take effect when you build it later.
-3. *Build the Recipe or Rebuild the Image*: The next step you take
+#. *Build the Recipe or Rebuild the Image*: The next step you take
depends on what you are going to do with the new code.
If you need to eventually move the build output to the target
- hardware, use the following ``devtool`` command:
- :;
+ hardware, use the following ``devtool`` command::
$ devtool build recipe
@@ -334,7 +374,7 @@ command:
$ devtool build-image image
-4. *Deploy the Build Output*: When you use the ``devtool build`` command
+#. *Deploy the Build Output*: When you use the ``devtool build`` command
to build out your recipe, you probably want to see if the resulting
build output works as expected on the target hardware.
@@ -348,20 +388,22 @@ command:
development machine.
You can deploy your build output to that target hardware by using the
- ``devtool deploy-target`` command: $ devtool deploy-target recipe
- target The target is a live target machine running as an SSH server.
+ ``devtool deploy-target`` command::
+
+ $ devtool deploy-target recipe target
+
+ The target is a live target machine running as an SSH server.
You can, of course, also deploy the image you build to actual
hardware by using the ``devtool build-image`` command. However,
``devtool`` does not provide a specific command that allows you to
deploy the image to actual hardware.
-5. *Finish Your Work With the Recipe*: The ``devtool finish`` command
+#. *Finish Your Work With the Recipe*: The ``devtool finish`` command
creates any patches corresponding to commits in the local Git
repository, moves the new recipe to a more permanent layer, and then
resets the recipe so that the recipe is built normally rather than
- from the workspace.
- ::
+ from the workspace::
$ devtool finish recipe layer
@@ -379,11 +421,9 @@ command:
.. note::
- You can use the
- devtool reset
- command to put things back should you decide you do not want to
- proceed with your work. If you do use this command, realize that
- the source tree is preserved.
+ You can use the ``devtool reset`` command to put things back should you
+ decide you do not want to proceed with your work. If you do use this
+ command, realize that the source tree is preserved.
Use ``devtool modify`` to Modify the Source of an Existing Component
--------------------------------------------------------------------
@@ -403,7 +443,7 @@ command:
.. image:: figures/sdk-devtool-modify-flow.png
:width: 100%
-1. *Preparing to Modify the Code*: The top part of the flow shows three
+#. *Preparing to Modify the Code*: The top part of the flow shows three
scenarios by which you could use ``devtool modify`` to prepare to
work on source files. Each scenario assumes the following:
@@ -430,11 +470,9 @@ command:
$ devtool modify recipe
- Once
- ``devtool``\ locates the recipe, ``devtool`` uses the recipe's
- :term:`SRC_URI` statements to
- locate the source code and any local patch files from other
- developers.
+ Once ``devtool`` locates the recipe, ``devtool`` uses the recipe's
+ :term:`SRC_URI` statements to locate the source code and any local
+ patch files from other developers.
With this scenario, there is no ``srctree`` argument. Consequently, the
default behavior of the ``devtool modify`` command is to extract
@@ -470,11 +508,7 @@ command:
.. note::
- You cannot provide a URL for
- srctree
- using the
- devtool
- command.
+ You cannot provide a URL for ``srctree`` using the ``devtool`` command.
As with all extractions, the command uses the recipe's :term:`SRC_URI`
statements to locate the source files and any associated patch
@@ -512,11 +546,11 @@ command:
append file for the recipe in the ``devtool`` workspace. The
recipe and the source code remain in their original locations.
-2. *Edit the Source*: Once you have used the ``devtool modify`` command,
+#. *Edit the Source*: Once you have used the ``devtool modify`` command,
you are free to make changes to the source files. You can use any
editor you like to make and save your source code modifications.
-3. *Build the Recipe or Rebuild the Image*: The next step you take
+#. *Build the Recipe or Rebuild the Image*: The next step you take
depends on what you are going to do with the new code.
If you need to eventually move the build output to the target
@@ -527,9 +561,11 @@ command:
On the other hand, if you want an image to contain the recipe's
packages from the workspace for immediate deployment onto a device
(e.g. for testing purposes), you can use the ``devtool build-image``
- command: $ devtool build-image image
+ command::
+
+ $ devtool build-image image
-4. *Deploy the Build Output*: When you use the ``devtool build`` command
+#. *Deploy the Build Output*: When you use the ``devtool build`` command
to build out your recipe, you probably want to see if the resulting
build output works as expected on target hardware.
@@ -554,13 +590,12 @@ command:
``devtool`` does not provide a specific command to deploy the image
to actual hardware.
-5. *Finish Your Work With the Recipe*: The ``devtool finish`` command
+#. *Finish Your Work With the Recipe*: The ``devtool finish`` command
creates any patches corresponding to commits in the local Git
repository, updates the recipe to point to them (or creates a
``.bbappend`` file to do so, depending on the specified destination
layer), and then resets the recipe so that the recipe is built
- normally rather than from the workspace.
- ::
+ normally rather than from the workspace::
$ devtool finish recipe layer
@@ -568,8 +603,7 @@ command:
Any changes you want to turn into patches must be staged and
committed within the local Git repository before you use the
- devtool finish
- command.
+ ``devtool finish`` command.
Because there is no need to move the recipe, ``devtool finish``
either updates the original recipe in the original layer or the
@@ -584,11 +618,282 @@ command:
.. note::
- You can use the
- devtool reset
- command to put things back should you decide you do not want to
- proceed with your work. If you do use this command, realize that
- the source tree is preserved.
+ You can use the ``devtool reset`` command to put things back should you
+ decide you do not want to proceed with your work. If you do use this
+ command, realize that the source tree is preserved.
+
+``devtool ide-sdk`` configures IDEs for the extensible SDK
+----------------------------------------------------------
+
+``devtool ide-sdk`` automatically configures IDEs to use the extensible SDK.
+To make sure that all parts of the extensible SDK required by the generated
+IDE configuration are available, ``devtool ide-sdk`` uses BitBake in the
+background to bootstrap the extensible SDK.
+
+The extensible SDK supports two different development modes.
+``devtool ide-sdk`` supports both of them:
+
+#. *Modified mode*:
+
+ By default ``devtool ide-sdk`` generates IDE configurations for recipes in
+ workspaces created by ``devtool modify`` or ``devtool add`` as described in
+ :ref:`using_devtool`. This mode creates IDE configurations with support for
+ advanced features, such as deploying the binaries to the remote target
+ device and performing remote debugging sessions. The generated IDE
+ configurations use the per recipe sysroots as Bitbake does internally.
+
+ In order to use the tool, a few settings are needed. As a starting example,
+ the following lines of code can be added to the ``local.conf`` file::
+
+ # Build the companion debug file system
+ IMAGE_GEN_DEBUGFS = "1"
+ # Optimize build time: with devtool ide-sdk the dbg tar is not needed
+ IMAGE_FSTYPES_DEBUGFS = ""
+ # Without copying the binaries into roofs-dbg, GDB does not find all source files.
+ IMAGE_CLASSES += "image-combined-dbg"
+
+ # SSH is mandatory, no password simplifies the usage
+ EXTRA_IMAGE_FEATURES += "\
+ ssh-server-openssh \
+ debug-tweaks \
+ "
+
+ # Remote debugging needs gdbserver on the target device
+ IMAGE_INSTALL:append = " gdbserver"
+
+ # Add the recipes which should be modified to the image
+ # Otherwise some dependencies might be missing.
+ IMAGE_INSTALL:append = " my-recipe"
+
+ Assuming the BitBake environment is set up correctly and a workspace has
+ been created for the recipe using ``devtool modify my-recipe``, the
+ following command can create the SDK and the configuration for VSCode in
+ the recipe workspace::
+
+ $ devtool ide-sdk my-recipe core-image-minimal --target root@192.168.7.2
+
+ The command requires an image recipe (``core-image-minimal`` for this example)
+ that is used to create the SDK. This firmware image should also be installed
+ on the target device. It is possible to pass multiple package recipes.
+ ``devtool ide-sdk`` tries to create an IDE configuration for all package
+ recipes.
+
+ What this command does exactly depends on the recipe, more precisely on the
+ build tool used by the recipe. The basic idea is to configure the IDE so
+ that it calls the build tool exactly as ``bitbake`` does.
+
+ For example, a CMake preset is created for a recipe that inherits
+ :ref:`ref-classes-cmake`. In the case of VSCode, CMake presets are supported
+ by the CMake Tools plugin. This is an example of how the build
+ configuration used by ``bitbake`` is exported to an IDE configuration that
+ gives exactly the same build results.
+
+ Support for remote debugging with seamless integration into the IDE is
+ important for a cross-SDK. ``devtool ide-sdk`` automatically generates the
+ necessary helper scripts for deploying the compiled artifacts to the target
+ device as well as the necessary configuration for the debugger and the IDE.
+
+ .. note::
+
+ To ensure that the debug symbols on the build machine match the binaries
+ running on the target device, it is essential that the image built by
+ ``devtool ide-sdk`` is running on the target device.
+
+ ``devtool ide-sdk`` aims to support multiple programming languages and
+ multiple IDEs natively. "Natively" means that the IDE is configured to call
+ the build tool (e.g. CMake or Meson) directly. This has several advantages.
+ First of all, it is much faster than ``devtool build``, but it also allows
+ to use the very good integration of tools like CMake or GDB in VSCode and
+ other IDEs. However, supporting many programming languages and multiple
+ IDEs is quite an elaborate and constantly evolving thing. Support for IDEs
+ is therefore implemented as plugins. Plugins can also be provided by
+ optional layers.
+
+ The default IDE is VSCode. Some hints about using VSCode:
+
+ - To work on the source code of a recipe an instance of VSCode is started in
+ the recipe's workspace. Example::
+
+ code build/workspace/sources/my-recipe
+
+ - To work with CMake press ``Ctrl + Shift + p``, type ``cmake``. This will
+ show some possible commands like selecting a CMake preset, compiling or
+ running CTest.
+
+ For recipes inheriting :ref:`ref-classes-cmake-qemu` rather than
+ :ref:`ref-classes-cmake`, executing cross-compiled unit tests on the host
+ can be supported transparently with QEMU user-mode.
+
+ - To work with Meson press ``Ctrl + Shift + p``, type ``meson``. This will
+ show some possible commands like compiling or executing the unit tests.
+
+ A note on running cross-compiled unit tests on the host: Meson enables
+ support for QEMU user-mode by default. It is expected that the execution
+ of the unit tests from the IDE will work easily without any additional
+ steps, provided that the code is suitable for execution on the host
+ machine.
+
+ - For the deployment to the target device, just press ``Ctrl + Shift + p``,
+ type ``task``. Select ``install && deploy-target``.
+
+ - For remote debugging, switch to the debugging view by pressing the "play"
+ button with the ``bug icon`` on the left side. This will provide a green
+ play button with a drop-down list where a debug configuration can be
+ selected. After selecting one of the generated configurations, press the
+ "play" button.
+
+ Starting a remote debugging session automatically initiates the deployment
+ to the target device. If this is not desired, the
+ ``"dependsOn": ["install && deploy-target...]`` parameter of the tasks
+ with ``"label": "gdbserver start...`` can be removed from the
+ ``tasks.json`` file.
+
+ VSCode supports GDB with many different setups and configurations for many
+ different use cases. However, most of these setups have some limitations
+ when it comes to cross-development, support only a few target
+ architectures or require a high performance target device. Therefore
+ ``devtool ide-sdk`` supports the classic, generic setup with GDB on the
+ development host and gdbserver on the target device.
+
+ Roughly summarized, this means:
+
+ - The binaries are copied via SSH to the remote target device by a script
+ referred by ``tasks.json``.
+
+ - gdbserver is started on the remote target device via SSH by a script
+ referred by ``tasks.json``.
+
+ Changing the parameters that are passed to the debugging executable
+ requires modifying the generated script. The script is located at
+ ``oe-scripts/gdbserver_*``. Defining the parameters in the ``args``
+ field in the ``launch.json`` file does not work.
+
+ - VSCode connects to gdbserver as documented in
+ `Remote debugging or debugging with a local debugger server
+ <https://code.visualstudio.com/docs/cpp/launch-json-reference#_remote-debugging-or-debugging-with-a-local-debugger-server>`__.
+
+ Additionally ``--ide=none`` is supported. With the ``none`` IDE parameter,
+ some generic configuration files like ``gdbinit`` files and some helper
+ scripts starting gdbserver remotely on the target device as well as the GDB
+ client on the host are generated.
+
+ Here is a usage example for the ``cmake-example`` recipe from the
+ ``meta-selftest`` layer which inherits :ref:`ref-classes-cmake-qemu`:
+
+ .. code-block:: sh
+
+ # Create the SDK
+ devtool modify cmake-example
+ devtool ide-sdk cmake-example core-image-minimal -c --debug-build-config --ide=none
+
+ # Install the firmware on a target device or start QEMU
+ runqemu
+
+ # From exploring the workspace of cmake-example
+ cd build/workspace/sources/cmake-example
+
+ # Find cmake-native and save the path into a variable
+ # Note: using just cmake instead of $CMAKE_NATIVE would work in many cases
+ CMAKE_NATIVE="$(jq -r '.configurePresets[0] | "\(.cmakeExecutable)"' CMakeUserPresets.json)"
+
+ # List available CMake presets
+ "$CMAKE_NATIVE" --list-presets
+ Available configure presets:
+
+ "cmake-example-cortexa57" - cmake-example: cortexa57
+
+ # Re-compile the already compiled sources
+ "$CMAKE_NATIVE" --build --preset cmake-example-cortexa57
+ ninja: no work to do.
+ # Do a clean re-build
+ "$CMAKE_NATIVE" --build --preset cmake-example-cortexa57 --target clean
+ [1/1] Cleaning all built files...
+ Cleaning... 8 files.
+ "$CMAKE_NATIVE" --build --preset cmake-example-cortexa57 --target all
+ [7/7] Linking CXX executable cmake-example
+
+ # Run the cross-compiled unit tests with QEMU user-mode
+ "$CMAKE_NATIVE" --build --preset cmake-example-cortexa57 --target test
+ [0/1] Running tests...
+ Test project .../build/tmp/work/cortexa57-poky-linux/cmake-example/1.0/cmake-example-1.0
+ Start 1: test-cmake-example
+ 1/1 Test #1: test-cmake-example ............... Passed 0.03 sec
+
+ 100% tests passed, 0 tests failed out of 1
+
+ Total Test time (real) = 0.03 sec
+
+ # Using CTest directly is possible as well
+ CTEST_NATIVE="$(dirname "$CMAKE_NATIVE")/ctest"
+
+ # List available CMake presets
+ "$CTEST_NATIVE" --list-presets
+ Available test presets:
+
+ "cmake-example-cortexa57" - cmake-example: cortexa57
+
+ # Run the cross-compiled unit tests with QEMU user-mode
+ "$CTEST_NATIVE" --preset "cmake-example-cortexa57"
+ Test project ...build/tmp/work/cortexa57-poky-linux/cmake-example/1.0/cmake-example-1.0
+ Start 1: test-cmake-example
+ 1/1 Test #1: test-cmake-example ............... Passed 0.03 sec
+
+ 100% tests passed, 0 tests failed out of 1
+
+ Total Test time (real) = 0.03 sec
+
+ # Deploying the new build to the target device (default is QEUM at 192.168.7.2)
+ oe-scripts/install_and_deploy_cmake-example-cortexa57
+
+ # Start a remote debugging session with gdbserver on the target and GDB on the host
+ oe-scripts/gdbserver_1234_usr-bin-cmake-example_m
+ oe-scripts/gdb_1234_usr-bin-cmake-example
+ break main
+ run
+ step
+ stepi
+ continue
+ quit
+
+ # Stop gdbserver on the target device
+ oe-scripts/gdbserver_1234_usr-bin-cmake-example_m stop
+
+#. *Shared sysroots mode*
+
+ For some recipes and use cases a per-recipe sysroot based SDK is not
+ suitable. Optionally ``devtool ide-sdk`` configures the IDE to use the
+ toolchain provided by the extensible SDK as described in
+ :ref:`running_the_ext_sdk_env`. ``devtool ide-sdk --mode=shared`` is
+ basically a wrapper for the setup of the extensible SDK as described in
+ :ref:`setting_up_ext_sdk_in_build`. The IDE gets a configuration to use the
+ shared sysroots.
+
+ Creating a SDK with shared sysroots that contains all the dependencies needed
+ to work with ``my-recipe`` is possible with the following example command::
+
+ $ devtool ide-sdk --mode=shared my-recipe
+
+ For VSCode the cross-toolchain is exposed as a CMake kit. CMake kits are
+ defined in ``~/.local/share/CMakeTools/cmake-tools-kits.json``.
+ The following example shows how the cross-toolchain can be selected in
+ VSCode. First of all we need a folder containing a CMake project.
+ For this example, let's create a CMake project and start VSCode::
+
+ mkdir kit-test
+ echo "project(foo VERSION 1.0)" > kit-test/CMakeLists.txt
+ code kit-test
+
+ If there is a CMake project in the workspace, cross-compilation is supported:
+
+ - Press ``Ctrl + Shift + P``, type ``CMake: Scan for Kits``
+ - Press ``Ctrl + Shift + P``, type ``CMake: Select a Kit``
+
+ Finally most of the features provided by CMake and the IDE should be available.
+
+ Other IDEs than VSCode are supported as well. However,
+ ``devtool ide-sdk --mode=shared --ide=none my-recipe`` is currently
+ just a simple wrapper for the setup of the extensible SDK, as described in
+ :ref:`setting_up_ext_sdk_in_build`.
Use ``devtool upgrade`` to Create a Version of the Recipe that Supports a Newer Version of the Software
-------------------------------------------------------------------------------------------------------
@@ -602,19 +907,17 @@ counterparts.
.. note::
- Several methods exist by which you can upgrade recipes -
- ``devtool upgrade``
- happens to be one. You can read about all the methods by which you
- can upgrade recipes in the
- :ref:`dev-manual/common-tasks:upgrading recipes` section
- of the Yocto Project Development Tasks Manual.
+ Several methods exist by which you can upgrade recipes ---
+ ``devtool upgrade`` happens to be one. You can read about all the methods by
+ which you can upgrade recipes in the
+ :ref:`dev-manual/upgrading-recipes:upgrading recipes` section of the Yocto
+ Project Development Tasks Manual.
-The ``devtool upgrade`` command is flexible enough to allow you to
-specify source code revision and versioning schemes, extract code into
-or out of the ``devtool``
-:ref:`devtool-the-workspace-layer-structure`,
-and work with any source file forms that the
-:ref:`bitbake:bitbake-user-manual/bitbake-user-manual-fetching:fetchers` support.
+The ``devtool upgrade`` command is flexible enough to allow you to specify
+source code revision and versioning schemes, extract code into or out of the
+``devtool`` :ref:`devtool-the-workspace-layer-structure`, and work with any
+source file forms that the
+:ref:`bitbake-user-manual/bitbake-user-manual-fetching:fetchers` support.
The following diagram shows the common development flow used with the
``devtool upgrade`` command:
@@ -622,7 +925,7 @@ The following diagram shows the common development flow used with the
.. image:: figures/sdk-devtool-upgrade-flow.png
:width: 100%
-1. *Initiate the Upgrade*: The top part of the flow shows the typical
+#. *Initiate the Upgrade*: The top part of the flow shows the typical
scenario by which you use the ``devtool upgrade`` command. The
following conditions exist:
@@ -674,7 +977,7 @@ The following diagram shows the common development flow used with the
are incorporated into the build the next time you build the software
just as are other changes you might have made to the source.
-2. *Resolve any Conflicts created by the Upgrade*: Conflicts could happen
+#. *Resolve any Conflicts created by the Upgrade*: Conflicts could happen
after upgrading the software to a new version. Conflicts occur
if your recipe specifies some patch files in :term:`SRC_URI` that
conflict with changes made in the new version of the software. For
@@ -685,7 +988,7 @@ The following diagram shows the common development flow used with the
conflicts created through use of a newer or different version of the
software.
-3. *Build the Recipe or Rebuild the Image*: The next step you take
+#. *Build the Recipe or Rebuild the Image*: The next step you take
depends on what you are going to do with the new code.
If you need to eventually move the build output to the target
@@ -700,7 +1003,7 @@ The following diagram shows the common development flow used with the
$ devtool build-image image
-4. *Deploy the Build Output*: When you use the ``devtool build`` command
+#. *Deploy the Build Output*: When you use the ``devtool build`` command
or ``bitbake`` to build your recipe, you probably want to see if the
resulting build output works as expected on target hardware.
@@ -714,15 +1017,18 @@ The following diagram shows the common development flow used with the
development machine.
You can deploy your build output to that target hardware by using the
- ``devtool deploy-target`` command: $ devtool deploy-target recipe
- target The target is a live target machine running as an SSH server.
+ ``devtool deploy-target`` command::
+
+ $ devtool deploy-target recipe target
+
+ The target is a live target machine running as an SSH server.
You can, of course, also deploy the image you build using the
``devtool build-image`` command to actual hardware. However,
``devtool`` does not provide a specific command that allows you to do
this.
-5. *Finish Your Work With the Recipe*: The ``devtool finish`` command
+#. *Finish Your Work With the Recipe*: The ``devtool finish`` command
creates any patches corresponding to commits in the local Git
repository, moves the new recipe to a more permanent layer, and then
resets the recipe so that the recipe is built normally rather than
@@ -734,8 +1040,7 @@ The following diagram shows the common development flow used with the
If you specify a destination layer that is the same as the original
source, then the old version of the recipe and associated files are
- removed prior to adding the new version.
- ::
+ removed prior to adding the new version::
$ devtool finish recipe layer
@@ -750,11 +1055,9 @@ The following diagram shows the common development flow used with the
.. note::
- You can use the
- devtool reset
- command to put things back should you decide you do not want to
- proceed with your work. If you do use this command, realize that
- the source tree is preserved.
+ You can use the ``devtool reset`` command to put things back should you
+ decide you do not want to proceed with your work. If you do use this
+ command, realize that the source tree is preserved.
A Closer Look at ``devtool add``
================================
@@ -822,10 +1125,9 @@ run ``devtool add`` again and provide the name or the version.
Dependency Detection and Mapping
--------------------------------
-The ``devtool add`` command attempts to detect build-time dependencies
-and map them to other recipes in the system. During this mapping, the
-command fills in the names of those recipes as part of the
-:term:`DEPENDS` variable within the
+The ``devtool add`` command attempts to detect build-time dependencies and map
+them to other recipes in the system. During this mapping, the command fills in
+the names of those recipes as part of the :term:`DEPENDS` variable within the
recipe. If a dependency cannot be mapped, ``devtool`` places a comment
in the recipe indicating such. The inability to map a dependency can
result from naming not being recognized or because the dependency simply
@@ -842,10 +1144,8 @@ following to your recipe::
.. note::
- The
- devtool add
- command often cannot distinguish between mandatory and optional
- dependencies. Consequently, some of the detected dependencies might
+ The ``devtool add`` command often cannot distinguish between mandatory and
+ optional dependencies. Consequently, some of the detected dependencies might
in fact be optional. When in doubt, consult the documentation or the
configure script for the software the recipe is building for further
details. In some cases, you might find you can substitute the
@@ -855,16 +1155,14 @@ following to your recipe::
License Detection
-----------------
-The ``devtool add`` command attempts to determine if the software you
-are adding is able to be distributed under a common, open-source
-license. If so, the command sets the
-:term:`LICENSE` value accordingly.
+The ``devtool add`` command attempts to determine if the software you are
+adding is able to be distributed under a common, open-source license. If
+so, the command sets the :term:`LICENSE` value accordingly.
You should double-check the value added by the command against the
documentation or source files for the software you are building and, if
necessary, update that :term:`LICENSE` value.
-The ``devtool add`` command also sets the
-:term:`LIC_FILES_CHKSUM`
+The ``devtool add`` command also sets the :term:`LIC_FILES_CHKSUM`
value to point to all files that appear to be license-related. Realize
that license statements often appear in comments at the top of source
files or within the documentation. In such cases, the command does not
@@ -944,10 +1242,9 @@ mind:
Adding Native Tools
-------------------
-Often, you need to build additional tools that run on the :term:`Build
-Host` as opposed to
-the target. You should indicate this requirement by using one of the
-following methods when you run ``devtool add``:
+Often, you need to build additional tools that run on the :term:`Build Host`
+as opposed to the target. You should indicate this requirement by using one of
+the following methods when you run ``devtool add``:
- Specify the name of the recipe such that it ends with "-native".
Specifying the name like this produces a recipe that only builds for
@@ -971,8 +1268,7 @@ Adding Node.js Modules
----------------------
You can use the ``devtool add`` command two different ways to add
-Node.js modules: 1) Through ``npm`` and, 2) from a repository or local
-source.
+Node.js modules: through ``npm`` or from a repository or local source.
Use the following form to add Node.js modules through ``npm``::
@@ -987,7 +1283,7 @@ these behaviors ensure the reproducibility and integrity of the build.
.. note::
- - You must use quotes around the URL. The ``devtool add`` does not
+ - You must use quotes around the URL. ``devtool add`` does not
require the quotes, but the shell considers ";" as a splitter
between multiple commands. Thus, without the quotes,
``devtool add`` does not receive the other parts, which results in
@@ -1002,9 +1298,8 @@ repository or local source tree. To add modules this way, use
$ devtool add https://github.com/diversario/node-ssdp
-In this example, ``devtool``
-fetches the specified Git repository, detects the code as Node.js code,
-fetches dependencies using ``npm``, and sets
+In this example, ``devtool`` fetches the specified Git repository, detects the
+code as Node.js code, fetches dependencies using ``npm``, and sets
:term:`SRC_URI` accordingly.
Working With Recipes
@@ -1013,17 +1308,17 @@ Working With Recipes
When building a recipe using the ``devtool build`` command, the typical
build progresses as follows:
-1. Fetch the source
+#. Fetch the source
-2. Unpack the source
+#. Unpack the source
-3. Configure the source
+#. Configure the source
-4. Compile the source
+#. Compile the source
-5. Install the build output
+#. Install the build output
-6. Package the installed output
+#. Package the installed output
For recipes in the workspace, fetching and unpacking is disabled as the
source tree has already been prepared and is persistent. Each of these
@@ -1038,9 +1333,8 @@ does not include complete instructions for building the software.
Instead, common functionality is encapsulated in classes inherited with
the ``inherit`` directive. This technique leaves the recipe to describe
just the things that are specific to the software being built. There is
-a :ref:`base <ref-classes-base>` class that
-is implicitly inherited by all recipes and provides the functionality
-that most recipes typically need.
+a :ref:`ref-classes-base` class that is implicitly inherited by all recipes
+and provides the functionality that most recipes typically need.
The remainder of this section presents information useful when working
with recipes.
@@ -1066,9 +1360,9 @@ links created within the source tree:
``${``\ :term:`D`\ ``}``.
- ``sysroot-destdir/``: Contains a subset of files installed within
- ``do_install`` that have been put into the shared sysroot. For
+ :ref:`ref-tasks-install` that have been put into the shared sysroot. For
more information, see the
- ":ref:`dev-manual/common-tasks:sharing files between recipes`" section.
+ ":ref:`dev-manual/new-recipe:sharing files between recipes`" section.
- ``packages-split/``: Contains subdirectories for each package
produced by the recipe. For more information, see the
@@ -1082,18 +1376,13 @@ Setting Configure Arguments
If the software your recipe is building uses GNU autoconf, then a fixed
set of arguments is passed to it to enable cross-compilation plus any
-extras specified by
-:term:`EXTRA_OECONF` or
-:term:`PACKAGECONFIG_CONFARGS`
+extras specified by :term:`EXTRA_OECONF` or :term:`PACKAGECONFIG_CONFARGS`
set within the recipe. If you wish to pass additional options, add them
to :term:`EXTRA_OECONF` or :term:`PACKAGECONFIG_CONFARGS`. Other supported build
-tools have similar variables (e.g.
-:term:`EXTRA_OECMAKE` for
-CMake, :term:`EXTRA_OESCONS`
-for Scons, and so forth). If you need to pass anything on the ``make``
-command line, you can use :term:`EXTRA_OEMAKE` or the
-:term:`PACKAGECONFIG_CONFARGS`
-variables to do so.
+tools have similar variables (e.g. :term:`EXTRA_OECMAKE` for CMake,
+:term:`EXTRA_OESCONS` for Scons, and so forth). If you need to pass anything on
+the ``make`` command line, you can use :term:`EXTRA_OEMAKE` or the
+:term:`PACKAGECONFIG_CONFARGS` variables to do so.
You can use the ``devtool configure-help`` command to help you set the
arguments listed in the previous paragraph. The command determines the
@@ -1117,8 +1406,7 @@ the build host.
Recipes should never write files directly into the sysroot. Instead,
files should be installed into standard locations during the
-:ref:`ref-tasks-install` task within
-the ``${``\ :term:`D`\ ``}`` directory. A
+:ref:`ref-tasks-install` task within the ``${``\ :term:`D`\ ``}`` directory. A
subset of these files automatically goes into the sysroot. The reason
for this limitation is that almost all files that go into the sysroot
are cataloged in manifests in order to ensure they can be removed later
@@ -1134,14 +1422,12 @@ the target device, it is important to understand packaging because the
contents of the image are expressed in terms of packages and not
recipes.
-During the :ref:`ref-tasks-package`
-task, files installed during the
-:ref:`ref-tasks-install` task are
-split into one main package, which is almost always named the same as
-the recipe, and into several other packages. This separation exists
-because not all of those installed files are useful in every image. For
-example, you probably do not need any of the documentation installed in
-a production image. Consequently, for each recipe the documentation
+During the :ref:`ref-tasks-package` task, files installed during the
+:ref:`ref-tasks-install` task are split into one main package, which is almost
+always named the same as the recipe, and into several other packages. This
+separation exists because not all of those installed files are useful in every
+image. For example, you probably do not need any of the documentation installed
+in a production image. Consequently, for each recipe the documentation
files are separated into a ``-doc`` package. Recipes that package
software containing optional modules or plugins might undergo additional
package splitting as well.
@@ -1149,8 +1435,7 @@ package splitting as well.
After building a recipe, you can see where files have gone by looking in
the ``oe-workdir/packages-split`` directory, which contains a
subdirectory for each package. Apart from some advanced cases, the
-:term:`PACKAGES` and
-:term:`FILES` variables controls
+:term:`PACKAGES` and :term:`FILES` variables controls
splitting. The :term:`PACKAGES` variable lists all of the packages to be
produced, while the :term:`FILES` variable specifies which files to include
in each package by using an override to specify the package. For
@@ -1192,16 +1477,11 @@ target machine.
.. note::
- The
- devtool deploy-target
- and
- devtool undeploy-target
- commands do not currently interact with any package management system
- on the target device (e.g. RPM or OPKG). Consequently, you should not
- intermingle
- devtool deploy-target
- and package manager operations on the target device. Doing so could
- result in a conflicting set of files.
+ The ``devtool deploy-target`` and ``devtool undeploy-target`` commands do
+ not currently interact with any package management system on the target
+ device (e.g. RPM or OPKG). Consequently, you should not intermingle
+ ``devtool deploy-target`` and package manager operations on the target
+ device. Doing so could result in a conflicting set of files.
Installing Additional Items Into the Extensible SDK
===================================================
@@ -1215,9 +1495,29 @@ need to link to libGL but you are not sure which recipe provides libGL.
You can use the following command to find out::
$ devtool search libGL mesa
+ A free implementation of the OpenGL API
+
+Once you know the recipe
+(i.e. ``mesa`` in this example), you can install it.
-A free implementation of the OpenGL API Once you know the recipe
-(i.e. ``mesa`` in this example), you can install it::
+When using the extensible SDK directly in a Yocto build
+-------------------------------------------------------
+
+In this scenario, the Yocto build tooling, e.g. ``bitbake``
+is directly accessible to build additional items, and it
+can simply be executed directly::
+
+ $ bitbake curl-native
+ # Add newly built native items to native sysroot
+ $ bitbake build-sysroots -c build_native_sysroot
+ $ bitbake mesa
+ # Add newly built target items to target sysroot
+ $ bitbake build-sysroots -c build_target_sysroot
+
+When using a standalone installer for the Extensible SDK
+--------------------------------------------------------
+
+::
$ devtool sdk-install mesa
@@ -1244,13 +1544,13 @@ To update your installed SDK, use ``devtool`` as follows::
$ devtool sdk-update
-The previous command assumes your SDK provider has set the
-default update URL for you through the :term:`SDK_UPDATE_URL`
-variable as described in the
+The previous command assumes your SDK provider has set the default update URL
+for you through the :term:`SDK_UPDATE_URL` variable as described in the
":ref:`sdk-manual/appendix-customizing:Providing Updates to the Extensible SDK After Installation`"
section. If the SDK provider has not set that default URL, you need to
-specify it yourself in the command as follows: $ devtool sdk-update
-path_to_update_directory
+specify it yourself in the command as follows::
+
+ $ devtool sdk-update path_to_update_directory
.. note::
@@ -1267,15 +1567,15 @@ those customers need an SDK that has custom libraries. In such a case,
you can produce a derivative SDK based on the currently installed SDK
fairly easily by following these steps:
-1. If necessary, install an extensible SDK that you want to use as a
+#. If necessary, install an extensible SDK that you want to use as a
base for your derivative SDK.
-2. Source the environment script for the SDK.
+#. Source the environment script for the SDK.
-3. Add the extra libraries or other components you want by using the
+#. Add the extra libraries or other components you want by using the
``devtool add`` command.
-4. Run the ``devtool build-sdk`` command.
+#. Run the ``devtool build-sdk`` command.
The previous steps take the recipes added to the workspace and construct
a new SDK installer that contains those recipes and the resulting binary
diff --git a/documentation/sdk-manual/intro.rst b/documentation/sdk-manual/intro.rst
index ce00538b2a..e8fd191dbc 100644
--- a/documentation/sdk-manual/intro.rst
+++ b/documentation/sdk-manual/intro.rst
@@ -66,7 +66,7 @@ The SDK development environment consists of the following:
In summary, the extensible and standard SDK share many features.
However, the extensible SDK has powerful development tools to help you
-more quickly develop applications. Following is a table that summarizes
+more quickly develop applications. Here is a table that summarizes
the primary differences between the standard and extensible SDK types
when considering which to build:
@@ -164,11 +164,11 @@ image.
You just need to follow these general steps:
-1. *Install the SDK for your target hardware:* For information on how to
+#. *Install the SDK for your target hardware:* For information on how to
install the SDK, see the ":ref:`sdk-manual/using:installing the sdk`"
section.
-2. *Download or Build the Target Image:* The Yocto Project supports
+#. *Download or Build the Target Image:* The Yocto Project supports
several target architectures and has many pre-built kernel images and
root filesystem images.
@@ -195,7 +195,7 @@ You just need to follow these general steps:
":ref:`sdk-manual/appendix-obtain:extracting the root filesystem`"
section for information on how to do this extraction.
-3. *Develop and Test your Application:* At this point, you have the
+#. *Develop and Test your Application:* At this point, you have the
tools to develop your application. If you need to separately install
and use the QEMU emulator, you can go to `QEMU Home
Page <https://wiki.qemu.org/Main_Page>`__ to download and learn about
diff --git a/documentation/sdk-manual/using.rst b/documentation/sdk-manual/using.rst
index 301627812c..f1ff0c76ca 100644
--- a/documentation/sdk-manual/using.rst
+++ b/documentation/sdk-manual/using.rst
@@ -52,8 +52,7 @@ libraries appropriate for developing against the corresponding image.
The names of the tarball installer scripts are such that a string
representing the host system appears first in the filename and then is
-immediately followed by a string representing the target architecture.
-::
+immediately followed by a string representing the target architecture::
poky-glibc-host_system-image_type-arch-toolchain-release_version.sh
@@ -97,16 +96,7 @@ must be writable for whichever users need to use the SDK.
The following command shows how to run the installer given a toolchain
tarball for a 64-bit x86 development host system and a 64-bit x86 target
architecture. The example assumes the SDK installer is located in
-``~/Downloads/`` and has execution rights.
-
-.. note::
-
- If you do not have write permissions for the directory into which you
- are installing the SDK, the installer notifies you and exits. For
- that case, set up the proper permissions in the directory and run the
- installer again.
-
-::
+``~/Downloads/`` and has execution rights::
$ ./Downloads/poky-glibc-x86_64-core-image-sato-i586-toolchain-&DISTRO;.sh
Poky (Yocto Project Reference Distro) SDK installer version &DISTRO;
@@ -119,6 +109,13 @@ architecture. The example assumes the SDK installer is located in
Each time you wish to use the SDK in a new shell session, you need to source the environment setup script e.g.
$ . /opt/poky/&DISTRO;/environment-setup-i586-poky-linux
+.. note::
+
+ If you do not have write permissions for the directory into which you
+ are installing the SDK, the installer notifies you and exits. For
+ that case, set up the proper permissions in the directory and run the
+ installer again.
+
Again, reference the
":ref:`sdk-manual/appendix-obtain:installed standard sdk directory structure`"
section for more details on the resulting directory structure of the installed
diff --git a/documentation/sdk-manual/working-projects.rst b/documentation/sdk-manual/working-projects.rst
index efef5c8bd2..4236bcec24 100644
--- a/documentation/sdk-manual/working-projects.rst
+++ b/documentation/sdk-manual/working-projects.rst
@@ -11,9 +11,9 @@ Autotools-Based Projects
========================
Once you have a suitable :ref:`sdk-manual/intro:the cross-development toolchain`
-installed, it is very easy to develop a project using the `GNU
-Autotools-based <https://en.wikipedia.org/wiki/GNU_Build_System>`__
-workflow, which is outside of the :term:`OpenEmbedded Build System`.
+installed, it is very easy to develop a project using the :wikipedia:`GNU
+Autotools-based <GNU_Build_System>` workflow, which is outside of the
+:term:`OpenEmbedded Build System`.
The following figure presents a simple Autotools workflow.
@@ -31,10 +31,9 @@ project:
GNOME Developer
site.
-1. *Create a Working Directory and Populate It:* Create a clean
+#. *Create a Working Directory and Populate It:* Create a clean
directory for your project and then make that directory your working
- location.
- ::
+ location::
$ mkdir $HOME/helloworld
$ cd $HOME/helloworld
@@ -75,7 +74,7 @@ project:
bin_PROGRAMS = hello
hello_SOURCES = hello.c
-2. *Source the Cross-Toolchain Environment Setup File:* As described
+#. *Source the Cross-Toolchain Environment Setup File:* As described
earlier in the manual, installing the cross-toolchain creates a
cross-toolchain environment setup script in the directory that the
SDK was installed. Before you can use the tools to develop your
@@ -88,9 +87,13 @@ project:
$ source /opt/poky/&DISTRO;/environment-setup-i586-poky-linux
-3. *Create the configure Script:* Use the ``autoreconf`` command to
- generate the ``configure`` script.
- ::
+ Another example is sourcing the environment setup directly in a Yocto
+ build::
+
+ $ source tmp/deploy/images/qemux86-64/environment-setup-core2-64-poky-linux
+
+#. *Create the configure Script:* Use the ``autoreconf`` command to
+ generate the ``configure`` script::
$ autoreconf
@@ -105,7 +108,7 @@ project:
which ensures missing auxiliary files are copied to the build
host.
-4. *Cross-Compile the Project:* This command compiles the project using
+#. *Cross-Compile the Project:* This command compiles the project using
the cross-compiler. The
:term:`CONFIGURE_FLAGS`
environment variable provides the minimal arguments for GNU
@@ -126,7 +129,7 @@ project:
$ ./configure --host=armv5te-poky-linux-gnueabi --with-libtool-sysroot=sysroot_dir
-5. *Make and Install the Project:* These two commands generate and
+#. *Make and Install the Project:* These two commands generate and
install the project into the destination directory::
$ make
@@ -142,12 +145,11 @@ project:
This next command is a simple way to verify the installation of your
project. Running the command prints the architecture on which the
binary file can run. This architecture should be the same
- architecture that the installed cross-toolchain supports.
- ::
+ architecture that the installed cross-toolchain supports::
$ file ./tmp/usr/local/bin/hello
-6. *Execute Your Project:* To execute the project, you would need to run
+#. *Execute Your Project:* To execute the project, you would need to run
it on your target hardware. If your target hardware happens to be
your build host, you could run the project as follows::
@@ -174,19 +176,19 @@ variables and Makefile variables during development.
The main point of this section is to explain the following three cases
regarding variable behavior:
-- *Case 1 - No Variables Set in the Makefile Map to Equivalent
+- *Case 1 --- No Variables Set in the Makefile Map to Equivalent
Environment Variables Set in the SDK Setup Script:* Because matching
variables are not specifically set in the ``Makefile``, the variables
retain their values based on the environment setup script.
-- *Case 2 - Variables Are Set in the Makefile that Map to Equivalent
+- *Case 2 --- Variables Are Set in the Makefile that Map to Equivalent
Environment Variables from the SDK Setup Script:* Specifically
setting matching variables in the ``Makefile`` during the build
results in the environment settings of the variables being
overwritten. In this case, the variables you set in the ``Makefile``
are used.
-- *Case 3 - Variables Are Set Using the Command Line that Map to
+- *Case 3 --- Variables Are Set Using the Command Line that Map to
Equivalent Environment Variables from the SDK Setup Script:*
Executing the ``Makefile`` from the command line results in the
environment variables being overwritten. In this case, the
@@ -206,8 +208,7 @@ demonstrates these variable behaviors.
In a new shell environment variables are not established for the SDK
until you run the setup script. For example, the following commands show
a null value for the compiler variable (i.e.
-:term:`CC`).
-::
+:term:`CC`)::
$ echo ${CC}
@@ -226,10 +227,9 @@ established through the script::
To illustrate variable use, work through this simple "Hello World!"
example:
-1. *Create a Working Directory and Populate It:* Create a clean
+#. *Create a Working Directory and Populate It:* Create a clean
directory for your project and then make that directory your working
- location.
- ::
+ location::
$ mkdir $HOME/helloworld
$ cd $HOME/helloworld
@@ -266,7 +266,7 @@ example:
printf("\n");
}
-2. *Source the Cross-Toolchain Environment Setup File:* As described
+#. *Source the Cross-Toolchain Environment Setup File:* As described
earlier in the manual, installing the cross-toolchain creates a
cross-toolchain environment setup script in the directory that the
SDK was installed. Before you can use the tools to develop your
@@ -279,7 +279,12 @@ example:
$ source /opt/poky/&DISTRO;/environment-setup-i586-poky-linux
-3. *Create the Makefile:* For this example, the Makefile contains
+ Another example is sourcing the environment setup directly in a Yocto
+ build::
+
+ $ source tmp/deploy/images/qemux86-64/environment-setup-core2-64-poky-linux
+
+#. *Create the Makefile:* For this example, the Makefile contains
two lines that can be used to set the :term:`CC` variable. One line is
identical to the value that is set when you run the SDK environment
setup script, and the other line sets :term:`CC` to "gcc", the default
@@ -288,16 +293,16 @@ example:
# CC=i586-poky-linux-gcc -m32 -march=i586 --sysroot=/opt/poky/2.5/sysroots/i586-poky-linux
# CC="gcc"
all: main.o module.o
- ${CC} main.o module.o -o target_bin
+ ${CC} main.o module.o -o target_bin
main.o: main.c module.h
- ${CC} -I . -c main.c
- module.o: module.c
- module.h ${CC} -I . -c module.c
+ ${CC} -I . -c main.c
+ module.o: module.c module.h
+ ${CC} -I . -c module.c
clean:
- rm -rf *.o
- rm target_bin
+ rm -rf *.o
+ rm target_bin
-4. *Make the Project:* Use the ``make`` command to create the binary
+#. *Make the Project:* Use the ``make`` command to create the binary
output file. Because variables are commented out in the Makefile, the
value used for :term:`CC` is the value set when the SDK environment setup
file was run::
@@ -313,8 +318,7 @@ example:
You can override the :term:`CC` environment variable with the same
variable as set from the Makefile by uncommenting the line in the
- Makefile and running ``make`` again.
- ::
+ Makefile and running ``make`` again::
$ make clean
rm -rf *.o
@@ -383,7 +387,7 @@ example:
use the SDK environment variables regardless of the values in the
Makefile.
-5. *Execute Your Project:* To execute the project (i.e. ``target_bin``),
+#. *Execute Your Project:* To execute the project (i.e. ``target_bin``),
use the following command::
$ ./target_bin
diff --git a/documentation/set_versions.py b/documentation/set_versions.py
index c409d5ea86..90e08fc5e0 100755
--- a/documentation/set_versions.py
+++ b/documentation/set_versions.py
@@ -1,9 +1,11 @@
#!/usr/bin/env python3
#
# Add version information to poky.yaml based upon current git branch/tags
+# Also generate the list of available manuals (releases.rst file)
#
# Copyright Linux Foundation
# Author: Richard Purdie <richard.purdie@linuxfoundation.org>
+# Author: Quentin Schulz <foss@0leil.net>
#
# SPDX-License-Identifier: MIT
#
@@ -14,6 +16,7 @@ import collections
import sys
import os
import itertools
+import re
ourversion = None
if len(sys.argv) == 2:
@@ -23,8 +26,8 @@ ourversion = None
if len(sys.argv) == 2:
ourversion = sys.argv[1]
-activereleases = ["kirkstone", "honister", "dunfell"]
-devbranch = "langdale"
+activereleases = ["nanbield", "kirkstone", "dunfell"]
+devbranch = "scarthgap"
ltsseries = ["kirkstone", "dunfell"]
# used by run-docs-builds to get the default page
@@ -33,6 +36,9 @@ if ourversion == "getlatest":
sys.exit(0)
release_series = collections.OrderedDict()
+release_series["scarthgap"] = "5.0"
+release_series["nanbield"] = "4.3"
+release_series["mickledore"] = "4.2"
release_series["langdale"] = "4.1"
release_series["kirkstone"] = "4.0"
release_series["honister"] = "3.4"
@@ -62,6 +68,9 @@ release_series["laverne"] = "0.9"
bitbake_mapping = {
+ "scarthgap" : "2.8",
+ "nanbield" : "2.6",
+ "mickledore" : "2.4",
"langdale" : "2.2",
"kirkstone" : "2.0",
"honister" : "1.52",
@@ -86,7 +95,7 @@ docconfver = None
# Test tags exist and inform the user to fetch if not
try:
- subprocess.run(["git", "show", "yocto-3.4.2"], capture_output=True, check=True)
+ subprocess.run(["git", "show", "yocto-%s" % release_series[activereleases[0]]], capture_output=True, check=True)
except subprocess.CalledProcessError:
sys.exit("Please run 'git fetch --tags' before building the documentation")
@@ -231,3 +240,77 @@ with open("sphinx-static/switchers.js.in", "r") as r, open("sphinx-static/switch
print("switchers.js generated from switchers.js.in")
+# generate releases.rst
+
+# list missing tags in yocto-docs
+missing_tags = [
+ 'yocto-0.9',
+ 'yocto-1.0', 'yocto-1.0.1',
+ 'yocto-1.1', 'yocto-1.1.1',
+ 'yocto-1.2',
+ 'yocto-1.4.4', 'yocto-1.4.5',
+ 'yocto-1.5', 'yocto-1.5.2', 'yocto-1.5.3', 'yocto-1.5.4',
+ 'yocto-1.6', 'yocto-1.6.1', 'yocto-1.6.2',
+ 'yocto-1.7', 'yocto-1.7.1',
+ 'yocto-1.9',
+ 'yocto-2.5.3',
+ 'yocto-3.1', 'yocto-3.1.1', 'yocto-3.1.2', 'yocto-3.1.3',
+ ]
+
+semver = re.compile(r'yocto-(\d+)\.(\d+)(?:\.)?(\d*)')
+
+# git is able to properly order semver versions but not python
+# instead of adding a dependency on semver module, let's convert the version
+# into a decimal number, e.g. 11.23.1 will be 112301 and 1.5 will be 010500 so
+# it can be used as a key for the sorting algorithm.
+# This can be removed once all the old tags are re-created.
+def tag_to_semver_like(v):
+ v_semver = semver.search(v)
+ v_maj, v_min, v_patch = v_semver.groups('0')
+ return int("{:0>2}{:0>2}{:0>2}".format(v_maj, v_min, v_patch), 10)
+
+yocto_tags = subprocess.run(["git", "tag", "--list", "--sort=version:refname", "yocto-*"], capture_output=True, text=True).stdout
+yocto_tags = sorted(yocto_tags.split() + missing_tags, key=tag_to_semver_like)
+tags = [tag[6:] for tag in yocto_tags]
+
+with open('releases.rst', 'w') as f:
+ f.write('===========================\n')
+ f.write(' Supported Release Manuals\n')
+ f.write('===========================\n')
+ f.write('\n')
+
+ for activerelease in activereleases:
+ title = "Release Series %s (%s)" % (release_series[activerelease], activerelease)
+ f.write('*' * len(title) + '\n')
+ f.write(title + '\n')
+ f.write('*' * len(title) + '\n')
+ f.write('\n')
+
+ for tag in tags:
+ if tag == release_series[activerelease] or tag.startswith('%s.' % release_series[activerelease]):
+ f.write('- :yocto_docs:`%s Documentation </%s>`\n' % (tag, tag))
+ f.write('\n')
+
+ f.write('==========================\n')
+ f.write(' Outdated Release Manuals\n')
+ f.write('==========================\n')
+ f.write('\n')
+
+ for series in release_series:
+ if series == devbranch or series in activereleases:
+ continue
+
+ if series == "jethro-pre":
+ continue
+
+ title = "Release Series %s (%s)" % (release_series[series], series)
+ f.write('*' * len(title) + '\n')
+ f.write(title + '\n')
+ f.write('*' * len(title) + '\n')
+ f.write('\n')
+ if series == "jethro":
+ f.write('- :yocto_docs:`1.9 Documentation </1.9>`\n')
+ for tag in tags:
+ if tag == release_series[series] or tag.startswith('%s.' % release_series[series]):
+ f.write('- :yocto_docs:`%s Documentation </%s>`\n' % (tag, tag))
+ f.write('\n')
diff --git a/documentation/standards.md b/documentation/standards.md
index 81aff5f193..e0c0cba83c 100644
--- a/documentation/standards.md
+++ b/documentation/standards.md
@@ -5,8 +5,71 @@ documentation is created.
It is currently a work in progress.
+## Automatic style validation
+
+There is an ongoing effort to automate style validation
+through the [Vale](https://vale.sh/). To try it, run:
+
+ $ make stylecheck
+
+Note that this just applies to text. Therefore, the syntax
+conventions described below still apply.
+
+If you wish to add a new word to an "accept.txt" file
+(./styles/config/vocabularies/<Vocab>/accept.txt),
+make sure the spelling and capitalization matches
+what Wikipedia or the project defining this word uses.
+
## Text standards
+### Bulleted lists
+
+Though Sphinx supports both the ``*`` and ``-`` characters
+for introducing bulleted lists, we have chosen to use
+only ``-`` for this purpose.
+
+Though not strictly required by Sphinx, we have also chosen
+to use two space characters after ``-`` to introduce each
+list item:
+
+ - Paragraph 1
+
+ - Paragraph 2
+
+As shown in the above example, there should also be an empty
+line between each list item.
+
+An exception to this rule is when the list items are just made
+of a few words, instead of entire paragraphs:
+
+ - Item 1
+ - Item 2
+
+This is again a matter of style, not syntax.
+
+### Line wrapping
+
+Source code for the documentation shouldn't have lines
+wider than 80 characters. This makes patch lines more
+readable and code easier to quote in e-mail clients.
+
+If you have to include long commands or lines in configuration
+files, provided the syntax makes this possible, split them
+into multiple lines, using the ``\`` character.
+
+Here is an example:
+
+ $ scripts/install-buildtools \
+ --without-extended-buildtools \
+ --base-url https://downloads.yoctoproject.org/releases/yocto \
+ --release yocto-4.0.1 \
+ --installer-version 4.0.1
+
+Exceptions are granted for file contents whose lines
+cannot be split without infringing syntactic rules
+or reducing readability, as well as for command output
+which should be kept unmodified.
+
### Project names
Project names should be capitalized in the same
@@ -23,13 +86,25 @@ in lower case:
* When used in a cross-reference title. Such
titles are usually in lower case.
-### File names
+### File, tool and command names
+
+File, tool and command names should be double tick-quoted.
+For example, ``` ``conf/local.conf`` ``` is preferred over
+`"conf/local.conf"`.
+
+### Variables
+
+Every variable should be mentioned with:
-File names should be quoted as in the below example:
+ :term:`VARIABLE`
- ``conf/local.conf``
+This assumes that `VARIABLE` is described either
+in the Yocto Project documentation variable index (`ref-manual/variables.rst`)
+or in the BitBake User Manual
+(`doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst`)
-Using "conf/local/conf" would be wrong.
+If it is not described yet, the variable should be added to the
+glossary before or in the same patch it is used, so that `:term:` can be used.
## ReStructured Text Syntax standards
diff --git a/documentation/styles/config/vocabularies/OpenSource/accept.txt b/documentation/styles/config/vocabularies/OpenSource/accept.txt
new file mode 100644
index 0000000000..e378fbf79b
--- /dev/null
+++ b/documentation/styles/config/vocabularies/OpenSource/accept.txt
@@ -0,0 +1,20 @@
+autovivification
+blkparse
+blktrace
+callee
+debugfs
+ftrace
+KernelShark
+Kprobe
+LTTng
+perf
+profiler
+subcommand
+subnode
+superset
+Sysprof
+systemd
+toolchain
+tracepoint
+Uprobe
+wget
diff --git a/documentation/styles/config/vocabularies/Yocto/accept.txt b/documentation/styles/config/vocabularies/Yocto/accept.txt
new file mode 100644
index 0000000000..ca622ba412
--- /dev/null
+++ b/documentation/styles/config/vocabularies/Yocto/accept.txt
@@ -0,0 +1,5 @@
+BitBake
+BSP
+crosstap
+OpenEmbedded
+Yocto
diff --git a/documentation/template/template.svg b/documentation/template/template.svg
index 43043e3afb..50715c08b0 100644
--- a/documentation/template/template.svg
+++ b/documentation/template/template.svg
@@ -1019,7 +1019,7 @@
id="tspan1183-1-8"
x="-52.348656"
y="518.42615"
- style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:37.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke:none">Objets</tspan></text>
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:37.3333px;font-family:'Liberation Sans';-inkscape-font-specification:'Liberation Sans, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;stroke:none">Objects</tspan></text>
<text
xml:space="preserve"
style="font-weight:bold;font-size:13.3333px;line-height:125%;font-family:'Nimbus Roman';-inkscape-font-specification:'Nimbus Roman, Bold';letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
diff --git a/documentation/test-manual/intro.rst b/documentation/test-manual/intro.rst
index eb9ebe2d5f..c31fd11c7a 100644
--- a/documentation/test-manual/intro.rst
+++ b/documentation/test-manual/intro.rst
@@ -14,15 +14,13 @@ release works as intended. All the project's testing infrastructure and
processes are publicly visible and available so that the community can
see what testing is being performed, how it's being done and the current
status of the tests and the project at any given time. It is intended
-that Other organizations can leverage off the process and testing
+that other organizations can leverage off the process and testing
environment used by the Yocto Project to create their own automated,
production test environment, building upon the foundations from the
project core.
-Currently, the Yocto Project Test Environment Manual has no projected
-release date. This manual is a work-in-progress and is being initially
-loaded with information from the README files and notes from key
-engineers:
+This manual is a work-in-progress and is being initially loaded with
+information from the README files and notes from key engineers:
- *yocto-autobuilder2:* This
:yocto_git:`README.md </yocto-autobuilder2/tree/README.md>`
@@ -39,7 +37,7 @@ engineers:
As a result, it can be used by any Continuous Improvement (CI) system
to run builds, support getting the correct code revisions, configure
builds and layers, run builds, and collect results. The code is
- independent of any CI system, which means the code can work `Buildbot <https://docs.buildbot.net/0.9.15.post1/>`__,
+ independent of any CI system, which means the code can work `Buildbot <https://docs.buildbot.net/current/>`__,
Jenkins, or others. This repository has a branch per release of the
project defining the tests to run on a per release basis.
@@ -54,8 +52,8 @@ the Autobuilder tests if things work. The Autobuilder builds all test
targets and runs all the tests.
The Yocto Project uses now uses standard upstream
-`Buildbot <https://docs.buildbot.net/0.9.15.post1/>`__ (version 9) to
-drive its integration and testing. Buildbot Nine has a plug-in interface
+Buildbot (`version 3.8 <https://docs.buildbot.net/3.8.0/>`__) to
+drive its integration and testing. Buildbot has a plug-in interface
that the Yocto Project customizes using code from the
``yocto-autobuilder2`` repository, adding its own console UI plugin. The
resulting UI plug-in allows you to visualize builds in a way suited to
@@ -74,8 +72,7 @@ simple JSON files.
The project uses Buildbot for historical reasons but also because
many of the project developers have knowledge of Python. It is
possible to use the outer layers from another Continuous Integration
- (CI) system such as
- `Jenkins <https://en.wikipedia.org/wiki/Jenkins_(software)>`__
+ (CI) system such as :wikipedia:`Jenkins <Jenkins_(software)>`
instead of Buildbot.
The following figure shows the Yocto Project Autobuilder stack with a
@@ -85,8 +82,8 @@ topology that includes a controller and a cluster of workers:
:align: center
:width: 70%
-Yocto Project Tests - Types of Testing Overview
-===============================================
+Yocto Project Tests --- Types of Testing Overview
+=================================================
The Autobuilder tests different elements of the project by using
the following types of tests:
@@ -94,19 +91,18 @@ the following types of tests:
- *Build Testing:* Tests whether specific configurations build by
varying :term:`MACHINE`,
:term:`DISTRO`, other configuration
- options, and the specific target images being built (or world). Used
- to trigger builds of all the different test configurations on the
+ options, and the specific target images being built (or ``world``). This is
+ used to trigger builds of all the different test configurations on the
Autobuilder. Builds usually cover many different targets for
different architectures, machines, and distributions, as well as
different configurations, such as different init systems. The
Autobuilder tests literally hundreds of configurations and targets.
- - *Sanity Checks During the Build Process:* Tests initiated through
- the :ref:`insane <ref-classes-insane>`
- class. These checks ensure the output of the builds are correct.
- For example, does the ELF architecture in the generated binaries
- match the target system? ARM binaries would not work in a MIPS
- system!
+ - *Sanity Checks During the Build Process:* Tests initiated through the
+ :ref:`ref-classes-insane` class. These checks ensure the output of the
+ builds are correct. For example, does the ELF architecture in the
+ generated binaries match the target system? ARM binaries would not work
+ in a MIPS system!
- *Build Performance Testing:* Tests whether or not commonly used steps
during builds work efficiently and avoid regressions. Tests to time
@@ -122,7 +118,8 @@ the following types of tests:
$ bitbake image -c testsdkext
- The tests utilize the :ref:`testsdkext <ref-classes-testsdk>` class and the ``do_testsdkext`` task.
+ The tests use the :ref:`ref-classes-testsdk` class and the
+ ``do_testsdkext`` task.
- *Feature Testing:* Various scenario-based tests are run through the
:ref:`OpenEmbedded Self test (oe-selftest) <ref-manual/release-process:Testing and Quality Assurance>`. We test oe-selftest on each of the main distributions
@@ -132,8 +129,8 @@ the following types of tests:
$ bitbake image -c testimage
- The tests utilize the :ref:`testimage* <ref-classes-testimage*>`
- classes and the :ref:`ref-tasks-testimage` task.
+ The tests use the :ref:`ref-classes-testimage`
+ class and the :ref:`ref-tasks-testimage` task.
- *Layer Testing:* The Autobuilder has the possibility to test whether
specific layers work with the test of the system. The layers tested
@@ -143,7 +140,7 @@ the following types of tests:
- *Package Testing:* A Package Test (ptest) runs tests against packages
built by the OpenEmbedded build system on the target machine. See the
:ref:`Testing Packages With
- ptest <dev-manual/common-tasks:Testing Packages With ptest>` section
+ ptest <dev-manual/packages:Testing Packages With ptest>` section
in the Yocto Project Development Tasks Manual and the
":yocto_wiki:`Ptest </Ptest>`" Wiki page for more
information on Ptest.
@@ -152,7 +149,7 @@ the following types of tests:
$ bitbake image -c testsdk
- The tests utilize the :ref:`testsdk <ref-classes-testsdk>` class and
+ The tests use the :ref:`ref-classes-testsdk` class and
the ``do_testsdk`` task.
- *Unit Testing:* Unit tests on various components of the system run
@@ -175,48 +172,55 @@ Tests map into the codebase as follows:
which include the fetchers. The tests are located in
``bitbake/lib/*/tests``.
+ Some of these tests run the ``bitbake`` command, so ``bitbake/bin``
+ must be added to the ``PATH`` before running ``bitbake-selftest``.
From within the BitBake repository, run the following::
- $ bitbake-selftest
+ $ export PATH=$PWD/bin:$PATH
- To skip tests that access the Internet, use the ``BB_SKIP_NETTESTS``
- variable when running "bitbake-selftest" as follows::
+ After that, you can run the selftest script::
- $ BB_SKIP_NETTESTS=yes bitbake-selftest
+ $ bitbake-selftest
The default output is quiet and just prints a summary of what was
run. To see more information, there is a verbose option::
$ bitbake-selftest -v
+ To skip tests that access the Internet, use the ``BB_SKIP_NETTESTS``
+ variable when running ``bitbake-selftest`` as follows::
+
+ $ BB_SKIP_NETTESTS=yes bitbake-selftest
+
Use this option when you wish to skip tests that access the network,
which are mostly necessary to test the fetcher modules. To specify
individual test modules to run, append the test module name to the
- "bitbake-selftest" command. For example, to specify the tests for the
- bb.data.module, run::
+ ``bitbake-selftest`` command. For example, to specify the tests for
+ ``bb.tests.data.DataExpansions``, run::
- $ bitbake-selftest bb.test.data.module
+ $ bitbake-selftest bb.tests.data.DataExpansions
You can also specify individual tests by defining the full name and module
plus the class path of the test, for example::
- $ bitbake-selftest bb.tests.data.TestOverrides.test_one_override
+ $ bitbake-selftest bb.tests.data.DataExpansions.test_one_var
- The tests are based on `Python
- unittest <https://docs.python.org/3/library/unittest.html>`__.
+ The tests are based on
+ `Python unittest <https://docs.python.org/3/library/unittest.html>`__.
- *oe-selftest:*
- These tests use OE to test the workflows, which include testing
specific features, behaviors of tasks, and API unit tests.
- - The tests can take advantage of parallelism through the "-j"
+ - The tests can take advantage of parallelism through the ``-j``
option, which can specify a number of threads to spread the tests
across. Note that all tests from a given class of tests will run
in the same thread. To parallelize large numbers of tests you can
split the class into multiple units.
- - The tests are based on Python unittest.
+ - The tests are based on
+ `Python unittest <https://docs.python.org/3/library/unittest.html>`__.
- The code for the tests resides in
``meta/lib/oeqa/selftest/cases/``.
@@ -226,18 +230,18 @@ Tests map into the codebase as follows:
$ oe-selftest -a
- To run a specific test, use the following command form where
- testname is the name of the specific test::
+ ``testname`` is the name of the specific test::
$ oe-selftest -r <testname>
- For example, the following command would run the tinfoil
- getVar API test::
+ For example, the following command would run the ``tinfoil``
+ ``getVar`` API test::
$ oe-selftest -r tinfoil.TinfoilTests.test_getvar
It is also possible to run a set
of tests. For example the following command will run all of the
- tinfoil tests::
+ ``tinfoil`` tests::
$ oe-selftest -r tinfoil
@@ -272,7 +276,7 @@ Tests map into the codebase as follows:
- These tests build an extended SDK (eSDK), install that eSDK, and
run tests against the eSDK.
- - The code for these tests resides in ``meta/lib/oeqa/esdk``.
+ - The code for these tests resides in ``meta/lib/oeqa/sdkext/cases/``.
- To run the tests, use the following command form::
@@ -299,13 +303,13 @@ Tests map into the codebase as follows:
Git repository.
Use the ``oe-build-perf-report`` command to generate text reports
- and HTML reports with graphs of the performance data. For
- examples, see
- :yocto_dl:`/releases/yocto/yocto-2.7/testresults/buildperf-centos7/perf-centos7.yoctoproject.org_warrior_20190414204758_0e39202.html`
+ and HTML reports with graphs of the performance data. See
+ :yocto_dl:`html </releases/yocto/yocto-4.3/testresults/buildperf-debian11/perf-debian11_nanbield_20231019191258_15b576c410.html>`
and
- :yocto_dl:`/releases/yocto/yocto-2.7/testresults/buildperf-centos7/perf-centos7.yoctoproject.org_warrior_20190414204758_0e39202.txt`.
+ :yocto_dl:`txt </releases/yocto/yocto-4.3/testresults/buildperf-debian11/perf-debian11_nanbield_20231019191258_15b576c410.txt>`
+ examples.
- - The tests are contained in ``lib/oeqa/buildperf/test_basic.py``.
+ - The tests are contained in ``meta/lib/oeqa/buildperf/test_basic.py``.
Test Examples
=============
@@ -313,16 +317,14 @@ Test Examples
This section provides example tests for each of the tests listed in the
:ref:`test-manual/intro:How Tests Map to Areas of Code` section.
-For oeqa tests, testcases for each area reside in the main test
-directory at ``meta/lib/oeqa/selftest/cases`` directory.
+- ``oe-selftest`` testcases reside in the ``meta/lib/oeqa/selftest/cases`` directory.
-For oe-selftest. bitbake testcases reside in the ``lib/bb/tests/``
-directory.
+- ``bitbake-selftest`` testcases reside in the ``bitbake/lib/bb/tests/`` directory.
``bitbake-selftest``
--------------------
-A simple test example from ``lib/bb/tests/data.py`` is::
+A simple test example from ``bitbake/lib/bb/tests/data.py`` is::
class DataExpansions(unittest.TestCase):
def setUp(self):
@@ -335,21 +337,24 @@ A simple test example from ``lib/bb/tests/data.py`` is::
val = self.d.expand("${foo}")
self.assertEqual(str(val), "value_of_foo")
-In this example, a ``DataExpansions`` class of tests is created,
-derived from standard Python unittest. The class has a common ``setUp``
-function which is shared by all the tests in the class. A simple test is
-then added to test that when a variable is expanded, the correct value
-is found.
+In this example, a ``DataExpansions`` class of tests is created, derived from
+standard `Python unittest <https://docs.python.org/3/library/unittest.html>`__.
+The class has a common ``setUp`` function which is shared by all the tests in
+the class. A simple test is then added to test that when a variable is
+expanded, the correct value is found.
-BitBake selftests are straightforward Python unittest. Refer to the
-Python unittest documentation for additional information on writing
-these tests at: https://docs.python.org/3/library/unittest.html.
+BitBake selftests are straightforward
+`Python unittest <https://docs.python.org/3/library/unittest.html>`__.
+Refer to the `Python unittest documentation
+<https://docs.python.org/3/library/unittest.html>`__ for additional information
+on writing such tests.
``oe-selftest``
---------------
These tests are more complex due to the setup required behind the scenes
-for full builds. Rather than directly using Python's unittest, the code
+for full builds. Rather than directly using `Python unittest
+<https://docs.python.org/3/library/unittest.html>`__, the code
wraps most of the standard objects. The tests can be simple, such as
testing a command from within the OE build environment using the
following example::
@@ -386,14 +391,14 @@ so tests within a given test class should always run in the same build,
while tests in different classes or modules may be split into different
builds. There is no data store available for these tests since the tests
launch the ``bitbake`` command and exist outside of its context. As a
-result, common bitbake library functions (bb.\*) are also unavailable.
+result, common BitBake library functions (``bb.\*``) are also unavailable.
``testimage``
-------------
These tests are run once an image is up and running, either on target
hardware or under QEMU. As a result, they are assumed to be running in a
-target image environment, as opposed to a host build environment. A
+target image environment, as opposed to in a host build environment. A
simple example from ``meta/lib/oeqa/runtime/cases/python.py`` contains
the following::
@@ -408,19 +413,19 @@ the following::
In this example, the ``OERuntimeTestCase`` class wraps
``unittest.TestCase``. Within the test, ``self.target`` represents the
-target system, where commands can be run on it using the ``run()``
+target system, where commands can be run using the ``run()``
method.
-To ensure certain test or package dependencies are met, you can use the
+To ensure certain tests or package dependencies are met, you can use the
``OETestDepends`` and ``OEHasPackage`` decorators. For example, the test
-in this example would only make sense if python3-core is installed in
+in this example would only make sense if ``python3-core`` is installed in
the image.
``testsdk_ext``
---------------
These tests are run against built extensible SDKs (eSDKs). The tests can
-assume that the eSDK environment has already been setup. An example from
+assume that the eSDK environment has already been set up. An example from
``meta/lib/oeqa/sdk/cases/devtool.py`` contains the following::
class DevtoolTest(OESDKExtTestCase):
@@ -467,9 +472,9 @@ following::
output = self._run(cmd)
self.assertEqual(output, "Hello, world\n")
-In this example, if nativesdk-python3-core has been installed into the SDK, the code runs
-the python3 interpreter with a basic command to check it is working
-correctly. The test would only run if Python3 is installed in the SDK.
+In this example, if ``nativesdk-python3-core`` has been installed into the SDK,
+the code runs the ``python3`` interpreter with a basic command to check it is
+working correctly. The test would only run if Python3 is installed in the SDK.
``oe-build-perf-test``
----------------------
@@ -507,15 +512,15 @@ workers, consider the following:
**Running "cleanall" is not permitted.**
-This can delete files from DL_DIR which would potentially break other
-builds running in parallel. If this is required, DL_DIR must be set to
+This can delete files from :term:`DL_DIR` which would potentially break other
+builds running in parallel. If this is required, :term:`DL_DIR` must be set to
an isolated directory.
**Running "cleansstate" is not permitted.**
-This can delete files from SSTATE_DIR which would potentially break
-other builds running in parallel. If this is required, SSTATE_DIR must
-be set to an isolated directory. Alternatively, you can use the "-f"
+This can delete files from :term:`SSTATE_DIR` which would potentially break
+other builds running in parallel. If this is required, :term:`SSTATE_DIR` must
+be set to an isolated directory. Alternatively, you can use the ``-f``
option with the ``bitbake`` command to "taint" tasks by changing the
sstate checksums to ensure sstate cache items will not be reused.
diff --git a/documentation/test-manual/reproducible-builds.rst b/documentation/test-manual/reproducible-builds.rst
index 5977366c9e..91f94a5c74 100644
--- a/documentation/test-manual/reproducible-builds.rst
+++ b/documentation/test-manual/reproducible-builds.rst
@@ -19,13 +19,13 @@ Why it matters
==============
The project aligns with the `Reproducible Builds project
-<https://reproducible-builds.org/>`_, which shares information about why
+<https://reproducible-builds.org/>`__, which shares information about why
reproducibility matters. The primary focus of the project is the ability to
detect security issues being introduced. However, from a Yocto Project
perspective, it is also hugely important that our builds are deterministic. When
you build a given input set of metadata, we expect you to get consistent output.
-This has always been a key focus but, :yocto_docs:`since release 3.1 ("dunfell")
-</ref-manual/migration-3.1.html#reproducible-builds-now-enabled-by-default>`,
+This has always been a key focus but, :ref:`since release 3.1 ("dunfell")
+<migration-guides/migration-3.1:reproducible builds now enabled by default>`,
it is now true down to the binary level including timestamps.
For example, at some point in the future life of a product, you find that you
@@ -68,17 +68,6 @@ things we do within the build system to ensure reproducibility include:
- Filtering the tools available from the host's ``PATH`` to only a specific set
of tools, set using the :term:`HOSTTOOLS` variable.
-.. note::
-
- Because of an open bug in GCC, using ``DISTRO_FEATURES:append = " lto"`` or
- adding ``-flto`` (Link Time Optimization) to ``CFLAGS`` makes the resulting
- binary non-reproducible, in that it depends on the full absolute build path
- to ``recipe-sysroot-native``, so installing the Yocto Project in a different
- directory results in a different binary.
-
- This issue is addressed by
- :yocto_bugs:`bug 14481 - Programs built with -flto are not reproducible</show_bug.cgi?id=14481>`.
-
=========================================
Can we prove the project is reproducible?
=========================================
@@ -103,10 +92,12 @@ run::
oe-selftest -r reproducible.ReproducibleTests.test_reproducible_builds
This defaults to including a ``world`` build so, if other layers are added, it would
-also run the tests for recipes in the additional layers. The first build will be
-run using :ref:`Shared State <overview-manual/concepts:Shared State>` if
+also run the tests for recipes in the additional layers. Different build targets
+can be defined using the :term:`OEQA_REPRODUCIBLE_TEST_TARGET` variable in ``local.conf``.
+The first build will be run using :ref:`Shared State <overview-manual/concepts:Shared State>` if
available, the second build explicitly disables
-:ref:`Shared State <overview-manual/concepts:Shared State>` and builds on the
+:ref:`Shared State <overview-manual/concepts:Shared State>` except for recipes defined in
+the :term:`OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS` variable, and builds on the
specific host the build is running on. This means we can test reproducibility
builds between different host distributions over time on the Autobuilder.
diff --git a/documentation/test-manual/test-process.rst b/documentation/test-manual/test-process.rst
index 4c3b32bfea..7bec5ba828 100644
--- a/documentation/test-manual/test-process.rst
+++ b/documentation/test-manual/test-process.rst
@@ -20,8 +20,8 @@ helps review and test patches and this is his testing tree).
We have two broad categories of test builds, including "full" and
"quick". On the Autobuilder, these can be seen as "a-quick" and
"a-full", simply for ease of sorting in the UI. Use our Autobuilder
-console view to see where me manage most test-related items, available
-at: :yocto_ab:`/typhoon/#/console`.
+:yocto_ab:`console view </typhoon/#/console>` to see where we manage most
+test-related items.
Builds are triggered manually when the test branches are ready. The
builds are monitored by the SWAT team. For additional information, see
@@ -34,24 +34,21 @@ which the result was required.
The Autobuilder does build the ``master`` branch once daily for several
reasons, in particular, to ensure the current ``master`` branch does
-build, but also to keep ``yocto-testresults``
-(:yocto_git:`/yocto-testresults/`),
-buildhistory
-(:yocto_git:`/poky-buildhistory/`), and
-our sstate up to date. On the weekend, there is a master-next build
+build, but also to keep (:yocto_git:`yocto-testresults </yocto-testresults/>`),
+(:yocto_git:`buildhistory </poky-buildhistory/>`), and
+our sstate up to date. On the weekend, there is a ``master-next`` build
instead to ensure the test results are updated for the less frequently
run targets.
-Performance builds (buildperf-\* targets in the console) are triggered
+Performance builds (``buildperf-\*`` targets in the console) are triggered
separately every six hours and automatically push their results to the
-buildstats repository at:
-:yocto_git:`/yocto-buildstats/`.
+:yocto_git:`buildstats </yocto-buildstats/>` repository.
-The 'quick' targets have been selected to be the ones which catch the
-most failures or give the most valuable data. We run 'fast' ptests in
+The "quick" targets have been selected to be the ones which catch the
+most failures or give the most valuable data. We run "fast" ptests in
this case for example but not the ones which take a long time. The quick
-target doesn't include \*-lsb builds for all architectures, some world
-builds and doesn't trigger performance tests or ltp testing. The full
+target doesn't include ``\*-lsb`` builds for all architectures, some ``world``
+builds and doesn't trigger performance tests or ``ltp`` testing. The full
build includes all these things and is slower but more comprehensive.
Release Builds
@@ -67,12 +64,12 @@ that in :ref:`test-manual/test-process:day to day development`, in that the
a-full target of the Autobuilder is used but in addition the form is
configured to generate and publish artifacts and the milestone number,
version, release candidate number and other information is entered. The
-box to "generate an email to QA"is also checked.
+box to "generate an email to QA" is also checked.
-When the build completes, an email is sent out using the send-qa-email
-script in the ``yocto-autobuilder-helper`` repository to the list of
-people configured for that release. Release builds are placed into a
-directory in https://autobuilder.yocto.io/pub/releases on the
+When the build completes, an email is sent out using the ``send-qa-email``
+script in the :yocto_git:`yocto-autobuilder-helper </yocto-autobuilder-helper>`
+repository to the list of people configured for that release. Release builds
+are placed into a directory in https://autobuilder.yocto.io/pub/releases on the
Autobuilder which is included in the email. The process from here is
more manual and control is effectively passed to release engineering.
The next steps include:
@@ -80,14 +77,15 @@ The next steps include:
- QA teams respond to the email saying which tests they plan to run and
when the results will be available.
-- QA teams run their tests and share their results in the yocto-
- testresults-contrib repository, along with a summary of their
- findings.
+- QA teams run their tests and share their results in the
+ :yocto_git:`yocto-testresults-contrib </yocto-testresults-contrib>`
+ repository, along with a summary of their findings.
- Release engineering prepare the release as per their process.
- Test results from the QA teams are included into the release in
- separate directories and also uploaded to the yocto-testresults
+ separate directories and also uploaded to the
+ :yocto_git:`yocto-testresults </yocto-testresults>`
repository alongside the other test results for the given revision.
- The QA report in the final release is regenerated using resulttool to
diff --git a/documentation/test-manual/understand-autobuilder.rst b/documentation/test-manual/understand-autobuilder.rst
index b6809ce7bd..6b4fab4f0b 100644
--- a/documentation/test-manual/understand-autobuilder.rst
+++ b/documentation/test-manual/understand-autobuilder.rst
@@ -9,8 +9,8 @@ Execution Flow within the Autobuilder
The "a-full" and "a-quick" targets are the usual entry points into the
Autobuilder and it makes sense to follow the process through the system
-starting there. This is best visualized from the Autobuilder Console
-view (:yocto_ab:`/typhoon/#/console`).
+starting there. This is best visualized from the :yocto_ab:`Autobuilder
+Console view </typhoon/#/console>`.
Each item along the top of that view represents some "target build" and
these targets are all run in parallel. The 'full' build will trigger the
@@ -18,9 +18,9 @@ majority of them, the "quick" build will trigger some subset of them.
The Autobuilder effectively runs whichever configuration is defined for
each of those targets on a separate buildbot worker. To understand the
configuration, you need to look at the entry on ``config.json`` file
-within the ``yocto-autobuilder-helper`` repository. The targets are
-defined in the ‘overrides' section, a quick example could be qemux86-64
-which looks like::
+within the :yocto_git:`yocto-autobuilder-helper </yocto-autobuilder-helper>`
+repository. The targets are defined in the ``overrides`` section, a quick
+example could be ``qemux86-64`` which looks like::
"qemux86-64" : {
"MACHINE" : "qemux86-64",
@@ -32,8 +32,8 @@ which looks like::
}
},
-And to expand that, you need the "arch-qemu" entry from
-the "templates" section, which looks like::
+And to expand that, you need the ``arch-qemu`` entry from
+the ``templates`` section, which looks like::
"arch-qemu" : {
"BUILDINFO" : true,
@@ -54,11 +54,11 @@ the "templates" section, which looks like::
}
},
-Combining these two entries you can see that "qemux86-64" is a three step build where the
-``bitbake BBTARGETS`` would be run, then ``bitbake SANITYTARGETS`` for each step; all for
-``MACHINE="qemx86-64"`` but with differing SDKMACHINE settings. In step
-1 an extra variable is added to the ``auto.conf`` file to enable wic
-image generation.
+Combining these two entries you can see that ``qemux86-64`` is a three step
+build where ``bitbake BBTARGETS`` would be run, then ``bitbake SANITYTARGETS``
+for each step; all for ``MACHINE="qemux86-64"`` but with differing
+:term:`SDKMACHINE` settings. In step 1, an extra variable is added to the
+``auto.conf`` file to enable wic image generation.
While not every detail of this is covered here, you can see how the
template mechanism allows quite complex configurations to be built up
@@ -88,9 +88,9 @@ roughly consist of:
#. *Obtain yocto-autobuilder-helper*
- This step clones the ``yocto-autobuilder-helper`` git repository.
- This is necessary to prevent the requirement to maintain all the
- release or project-specific code within Buildbot. The branch chosen
+ This step clones the :yocto_git:`yocto-autobuilder-helper </yocto-autobuilder-helper>`
+ git repository. This is necessary to avoid the requirement to maintain all
+ the release or project-specific code within Buildbot. The branch chosen
matches the release being built so we can support older releases and
still make changes in newer ones.
@@ -163,16 +163,17 @@ Autobuilder Worker Janitor
--------------------------
This is a process running on each Worker that performs two basic
-operations, including background file deletion at IO idle (see :ref:`test-manual/understand-autobuilder:Autobuilder Target Execution Overview`: Run clobberdir) and
-maintenance of a cache of cloned repositories to improve the speed
+operations, including background file deletion at IO idle (see
+"Run clobberdir" in :ref:`test-manual/understand-autobuilder:Autobuilder Target Execution Overview`)
+and maintenance of a cache of cloned repositories to improve the speed
the system can checkout repositories.
Shared DL_DIR
-------------
-The Workers are all connected over NFS which allows DL_DIR to be shared
+The Workers are all connected over NFS which allows :term:`DL_DIR` to be shared
between them. This reduces network accesses from the system and allows
-the build to be sped up. Usage of the directory within the build system
+the build to be sped up. The usage of the directory within the build system
is designed to be able to be shared over NFS.
Shared SSTATE_DIR
@@ -180,8 +181,8 @@ Shared SSTATE_DIR
The Workers are all connected over NFS which allows the ``sstate``
directory to be shared between them. This means once a Worker has built
-an artifact, all the others can benefit from it. Usage of the directory
-within the directory is designed for sharing over NFS.
+an artifact, all the others can benefit from it. The usage of the directory
+within the build system is designed for sharing over NFS.
Resulttool
----------
@@ -192,7 +193,7 @@ in a given build and their status. Additional information, such as
failure logs or the time taken to run the tests, may also be included.
Resulttool is part of OpenEmbedded-Core and is used to manipulate these
-json results files. It has the ability to merge files together, display
+JSON results files. It has the ability to merge files together, display
reports of the test results and compare different result files.
For details, see :yocto_wiki:`/Resulttool`.
@@ -204,9 +205,9 @@ The ``scripts/run-config`` execution is where most of the work within
the Autobuilder happens. It runs through a number of steps; the first
are general setup steps that are run once and include:
-#. Set up any ``buildtools-tarball`` if configured.
+#. Set up any :term:`buildtools` tarball if configured.
-#. Call "buildhistory-init" if buildhistory is configured.
+#. Call ``buildhistory-init`` if :ref:`ref-classes-buildhistory` is configured.
For each step that is configured in ``config.json``, it will perform the
following:
@@ -242,7 +243,7 @@ of post-build steps, including:
#. Call ``scripts/upload-error-reports`` to send any error reports
generated to the remote server.
-#. Cleanup the build directory using
+#. Cleanup the :term:`Build Directory` using
:ref:`test-manual/understand-autobuilder:clobberdir` if the build was successful,
else rename it to "build-renamed" for potential future debugging.
@@ -250,15 +251,16 @@ Deploying Yocto Autobuilder
===========================
The most up to date information about how to setup and deploy your own
-Autobuilder can be found in README.md in the ``yocto-autobuilder2``
-repository.
+Autobuilder can be found in :yocto_git:`README.md </yocto-autobuilder2/tree/README.md>`
+in the :yocto_git:`yocto-autobuilder2 </yocto-autobuilder2>` repository.
-We hope that people can use the ``yocto-autobuilder2`` code directly but
-it is inevitable that users will end up needing to heavily customise the
-``yocto-autobuilder-helper`` repository, particularly the
-``config.json`` file as they will want to define their own test matrix.
+We hope that people can use the :yocto_git:`yocto-autobuilder2 </yocto-autobuilder2>`
+code directly but it is inevitable that users will end up needing to heavily
+customize the :yocto_git:`yocto-autobuilder-helper </yocto-autobuilder-helper>`
+repository, particularly the ``config.json`` file as they will want to define
+their own test matrix.
-The Autobuilder supports wo customization options:
+The Autobuilder supports two customization options:
- variable substitution
@@ -278,7 +280,7 @@ environment::
$ ABHELPER_JSON="config.json /some/location/local.json"
One issue users often run into is validation of the ``config.json`` files. A
-tip for minimizing issues from invalid json files is to use a Git
+tip for minimizing issues from invalid JSON files is to use a Git
``pre-commit-hook.sh`` script to verify the JSON file before committing
it. Create a symbolic link as follows::
diff --git a/documentation/test-manual/yocto-project-compatible.rst b/documentation/test-manual/yocto-project-compatible.rst
index 96c12ac083..65d924fad9 100644
--- a/documentation/test-manual/yocto-project-compatible.rst
+++ b/documentation/test-manual/yocto-project-compatible.rst
@@ -27,7 +27,7 @@ In the second version of the program, a script was added to make validation
easier and clearer, the script is called ``yocto-check-layer`` and is
available in :term:`OpenEmbedded-Core (OE-Core)`.
-See :ref:`dev-manual/common-tasks:making sure your layer is compatible with yocto project`
+See :ref:`dev-manual/layers:making sure your layer is compatible with yocto project`
for details.
========
diff --git a/documentation/toaster-manual/intro.rst b/documentation/toaster-manual/intro.rst
index a324744b7d..046ff88ce8 100644
--- a/documentation/toaster-manual/intro.rst
+++ b/documentation/toaster-manual/intro.rst
@@ -76,9 +76,8 @@ extensive information about the build process.
- See performance information such as build time, task time, CPU
usage, and disk I/O.
-For an overview of Toaster shipped with the Yocto Project &DISTRO;
-Release, see the "`Toaster - Yocto Project
-2.2 <https://youtu.be/BlXdOYLgPxA>`__" video.
+For an overview of Toaster, see this
+`introduction video <https://youtu.be/BlXdOYLgPxA>`__.
Installation Options
====================
diff --git a/documentation/toaster-manual/reference.rst b/documentation/toaster-manual/reference.rst
index 1bb9f98cca..755b895cee 100644
--- a/documentation/toaster-manual/reference.rst
+++ b/documentation/toaster-manual/reference.rst
@@ -28,8 +28,7 @@ at :oe_layerindex:`/`. You can find the code for this
layer index's web application at :yocto_git:`/layerindex-web/`.
When you tie a layer source into Toaster, it can query the layer source
-through a
-`REST <https://en.wikipedia.org/wiki/Representational_state_transfer>`__
+through a :wikipedia:`REST <Representational_state_transfer>`
API, store the information about the layers in the Toaster database, and
then show the information to users. Users are then able to view that
information and build layers from Toaster itself without having to
@@ -66,7 +65,7 @@ layers.
For general information on layers, see the
":ref:`overview-manual/yp-intro:the yocto project layer model`"
section in the Yocto Project Overview and Concepts Manual. For information on how
-to create layers, see the ":ref:`dev-manual/common-tasks:understanding and creating layers`"
+to create layers, see the ":ref:`dev-manual/layers:understanding and creating layers`"
section in the Yocto Project Development Tasks Manual.
Configuring Toaster to Hook Into Your Layer Index
@@ -189,17 +188,17 @@ The ``bldcontrol/management/commands/checksettings.py`` file controls
workflow configuration. Here is the process to
initially populate this database.
-1. The default project settings are set from
+#. The default project settings are set from
``orm/fixtures/settings.xml``.
-2. The default project distro and layers are added from
+#. The default project distro and layers are added from
``orm/fixtures/poky.xml`` if poky is installed. If poky is not
installed, they are added from ``orm/fixtures/oe-core.xml``.
-3. If the ``orm/fixtures/custom.xml`` file exists, then its values are
+#. If the ``orm/fixtures/custom.xml`` file exists, then its values are
added.
-4. The layer index is then scanned and added to the database.
+#. The layer index is then scanned and added to the database.
Once these steps complete, Toaster is set up and ready to use.
@@ -369,8 +368,8 @@ Remote Toaster Monitoring
Toaster has an API that allows remote management applications to
directly query the state of the Toaster server and its builds in a
machine-to-machine manner. This API uses the
-`REST <https://en.wikipedia.org/wiki/Representational_state_transfer>`__
-interface and the transfer of JSON files. For example, you might monitor
+:wikipedia:`REST <Representational_state_transfer>` interface and the
+transfer of JSON files. For example, you might monitor
a build inside a container through well supported known HTTP ports in
order to easily access a Toaster server inside the container. In this
example, when you use this direct JSON API, you avoid having web page
@@ -522,14 +521,13 @@ tasks. You can locate these commands in the
- When using ``manage.py`` commands given a default configuration,
you must be sure that your working directory is set to the
- :term:`Build Directory`. Using
- ``manage.py`` commands from the Build Directory allows Toaster to
- find the ``toaster.sqlite`` file, which is located in the Build
- Directory.
+ :term:`Build Directory`. Using ``manage.py`` commands from the
+ :term:`Build Directory` allows Toaster to find the ``toaster.sqlite``
+ file, which is located in the :term:`Build Directory`.
- For non-default database configurations, it is possible that you
can use ``manage.py`` commands from a directory other than the
- Build Directory. To do so, the ``toastermain/settings.py`` file
+ :term:`Build Directory`. To do so, the ``toastermain/settings.py`` file
must be configured to point to the correct database backend.
``buildslist``
@@ -549,7 +547,7 @@ database.
You need to run the ``buildslist`` command first to identify existing
builds in the database before using the
:ref:`toaster-manual/reference:\`\`builddelete\`\`` command. Here is an
-example that assumes default repository and build directory names:
+example that assumes default repository and :term:`Build Directory` names:
.. code-block:: shell
diff --git a/documentation/toaster-manual/setup-and-use.rst b/documentation/toaster-manual/setup-and-use.rst
index 72a15b5f2d..a0c27499ba 100644
--- a/documentation/toaster-manual/setup-and-use.rst
+++ b/documentation/toaster-manual/setup-and-use.rst
@@ -23,8 +23,8 @@ Once in that directory, source the build environment script::
$ source oe-init-build-env
-Next, from the build directory (e.g.
-``poky/build``), start Toaster using this command::
+Next, from the :term:`Build Directory` (e.g. ``poky/build``), start Toaster
+using this command::
$ source toaster start
@@ -124,14 +124,14 @@ causes Toaster to create and use ``$TOASTER_DIR./_toaster_clones``.
The Build Directory
===================
-Toaster creates a build directory within your Source Directory (e.g.
+Toaster creates a :term:`Build Directory` within your Source Directory (e.g.
``poky``) to execute the builds.
Alternatively, if you would like all of your Toaster related files and
directories to be in a particular location, you can set the
``TOASTER_DIR`` environment variable, which takes precedence over your
current working directory. Setting this environment variable causes
-Toaster to use ``$TOASTER_DIR/build`` as the build directory.
+Toaster to use ``$TOASTER_DIR/build`` as the :term:`Build Directory`.
Creating a Django Superuser
===========================
@@ -152,8 +152,8 @@ superuser by following these steps:
$ export PATH=$PATH:$HOME/.local/bin
#. From the directory containing the Toaster database, which by default
- is the :term:`Build Directory`,
- invoke the ``createsuperuser`` command from ``manage.py``::
+ is the :term:`Build Directory`, invoke the ``createsuperuser`` command from
+ ``manage.py``::
$ cd poky/build
$ ../bitbake/lib/toaster/manage.py createsuperuser
@@ -179,7 +179,7 @@ example, if you are running Toaster locally, use the following URL::
http://127.0.0.1:8000/admin
You can use the Django administration interface to set Toaster configuration
-parameters such as the build directory, layer sources, default variable
+parameters such as the :term:`Build Directory`, layer sources, default variable
values, and BitBake versions.
Setting Up a Production Instance of Toaster
@@ -317,8 +317,7 @@ Perform the following steps to install Toaster:
the ":ref:`toaster-manual/reference:Configuring Toaster`" section.
This line also runs the ``checksettings`` command, which configures
- the location of the Toaster :term:`Build Directory`.
- The Toaster
+ the location of the Toaster :term:`Build Directory`. The Toaster
root directory ``TOASTER_DIR`` determines where the Toaster build
directory is created on the file system. In the example above,
``TOASTER_DIR`` is set as follows::
@@ -326,7 +325,7 @@ Perform the following steps to install Toaster:
/var/www/toaster/poky
- This setting causes the Toaster build directory to be::
+ This setting causes the Toaster :term:`Build Directory` to be::
/var/www/toaster/poky/build
@@ -366,7 +365,7 @@ Perform the following steps to install Toaster:
/etc/apache2/conf.d/toaster.conf
- Following is a sample Apache configuration for Toaster you can follow:
+ Here is a sample Apache configuration for Toaster you can follow:
.. code-block:: apache
@@ -496,7 +495,7 @@ The Toaster web interface allows you to do the following:
Toaster Web Interface Videos
----------------------------
-Following are several videos that show how to use the Toaster GUI:
+Here are several videos that show how to use the Toaster GUI:
- *Build Configuration:* This
`video <https://www.youtube.com/watch?v=qYgDZ8YzV6w>`__ overviews and
diff --git a/documentation/transitioning-to-a-custom-environment.rst b/documentation/transitioning-to-a-custom-environment.rst
index 6b34fedc26..6ff55e5619 100644
--- a/documentation/transitioning-to-a-custom-environment.rst
+++ b/documentation/transitioning-to-a-custom-environment.rst
@@ -42,7 +42,7 @@ Transitioning to a custom environment for systems development
You might want to start with the build specification that Poky provides
(which is reference embedded distribution) and then add your newly chosen
layers to that. Here is the information :ref:`about adding layers
- <dev-manual/common-tasks:Understanding and Creating Layers>`.
+ <dev-manual/layers:Understanding and Creating Layers>`.
#. **Based on the layers you've chosen, make needed changes in your
configuration**.
@@ -58,7 +58,7 @@ Transitioning to a custom environment for systems development
releases. If you are using a Yocto Project release earlier than 2.4, use the
``yocto-layer create`` tool. The ``bitbake-layers`` tool also provides a number
of other useful layer-related commands. See
- :ref:`dev-manual/common-tasks:creating a general layer using the
+ :ref:`dev-manual/layers:creating a general layer using the
\`\`bitbake-layers\`\` script` section.
#. **Create your own layer for the BSP you're going to use**.
@@ -79,12 +79,12 @@ Transitioning to a custom environment for systems development
process of refinement. Start by getting each step of the build process
working beginning with fetching all the way through packaging. Next, run the
software on your target and refine further as needed. See :ref:`Writing a New
- Recipe <dev-manual/common-tasks:writing a new recipe>` in the
+ Recipe <dev-manual/new-recipe:writing a new recipe>` in the
Yocto Project Development Tasks Manual for more information.
#. **Now you're ready to create an image recipe**.
There are a number of ways to do this. However, it is strongly recommended
- that you have your own image recipe - don't try appending to existing image
+ that you have your own image recipe --- don't try appending to existing image
recipes. Recipes for images are trivial to create and you usually want to
fully customize their contents.
@@ -103,7 +103,7 @@ Transitioning to a custom environment for systems development
needs to change for your distribution. If you find yourself adding a lot of
configuration to your local.conf file aside from paths and other typical
local settings, it's time to :ref:`consider creating your own distribution
- <dev-manual/common-tasks:creating your own distribution>`.
+ <dev-manual/custom-distribution:creating your own distribution>`.
You can add product specifications that can customize the distribution if
needed in other layers. You can also add other functionality specific to the
diff --git a/documentation/what-i-wish-id-known.rst b/documentation/what-i-wish-id-known.rst
index 46c5cf19f9..5bc55804f6 100644
--- a/documentation/what-i-wish-id-known.rst
+++ b/documentation/what-i-wish-id-known.rst
@@ -29,8 +29,9 @@ contact us with other suggestions.
#. **Get to know the layer index:**
All layers can be found in the :oe_layerindex:`layer index <>`. Layers which
have applied for Yocto Project Compatible status (structure continuity
- assurance and testing) can be found in the :yocto_home:`Yocto Project Compatible index
- </software-over/layer/>`. Generally check the Compatible layer index first,
+ assurance and testing) can be found in the :yocto_home:`Yocto Project
+ Compatible Layers </development/yocto-project-compatible-layers/>` page.
+ Generally check the Compatible layer index first,
and if you don't find the necessary layer check the general layer index. The
layer index is an original artifact from the Open Embedded Project. As such,
that index doesn't have the curating and testing that the Yocto Project
@@ -132,7 +133,7 @@ contact us with other suggestions.
say "bitbake foo" where "foo" is the name for a specific recipe. As you
become more advanced using the Yocto Project, and if builds are failing, it
can be useful to make sure the fetch itself works as desired. Here are some
- valuable links: :ref:`dev-manual/common-tasks:Using a Development
+ valuable links: :ref:`dev-manual/development-shell:Using a Development
Shell` for information on how to build and run a specific task using
devshell. Also, the :ref:`SDK manual shows how to build out a specific recipe
<sdk-manual/extensible:use \`\`devtool modify\`\` to modify the source of an existing component>`.
@@ -213,6 +214,13 @@ contact us with other suggestions.
OpenEmbedded build system. If you are interested in using this type of
interface to create images, see the :doc:`/toaster-manual/index`.
+ * **Discover the VSCode extension**: The `Yocto Project BitBake
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__
+ extension for the Visual Studio Code IDE provides language features and
+ commands for working with the Yocto Project. If you are interested in using
+ this extension, visit its `marketplace page
+ <https://marketplace.visualstudio.com/items?itemName=yocto-project.yocto-bitbake>`__.
+
* **Have Available the Yocto Project Reference Manual**: Unlike the rest of
the Yocto Project manual set, this manual is comprised of material suited
for reference rather than procedures. You can get build details, a closer
diff --git a/meta-poky/README.poky.md b/meta-poky/README.poky.md
index a99c75ed5c..7dd5e6d831 100644
--- a/meta-poky/README.poky.md
+++ b/meta-poky/README.poky.md
@@ -34,19 +34,8 @@ For information about OpenEmbedded, see the
Contribution Guidelines
-----------------------
-The project works using a mailing list patch submission process. Patches
-should be sent to the mailing list for the repository the components
-originate from (see below). Throughout the Yocto Project, the README
-files in the component in question should detail where to send patches,
-who the maintainers are and where bugs should be reported.
-
-A guide to submitting patches to OpenEmbedded is available at:
-
-<https://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded>
-
-There is good documentation on how to write/format patches at:
-
-<https://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines>
+Please refer to our contributor guide here: https://docs.yoctoproject.org/dev/contributor-guide/
+for full details on how to submit changes.
Where to Send Patches
---------------------
diff --git a/meta-poky/classes/poky-bleeding.bbclass b/meta-poky/classes/poky-bleeding.bbclass
new file mode 100644
index 0000000000..3bfdcf1454
--- /dev/null
+++ b/meta-poky/classes/poky-bleeding.bbclass
@@ -0,0 +1,20 @@
+#
+# AUTOREV and PV containing '+git' needs to be set early, before any anonymous python
+# expands anything containing PV, else the parse process won't trigger the fetcher to
+# cache the needed version data
+#
+python pokybleeding_version_handler () {
+ bpn = d.getVar("BPN")
+ # We're running before the class extension code at PreFinalise so manually fix BPN
+ bpn = bpn.replace("-nativesdk", "").replace("nativesdk-", "")
+
+ if bpn in d.getVar("POKY_AUTOREV_RECIPES").split():
+ if "pseudo" in bpn:
+ bb.warn("Here 5 %s %s" % (d.getVar("PN"), bpn))
+ d.setVar("SRCREV", "${AUTOREV}")
+ if "+git" not in d.getVar("PV"):
+ d.appendVar("PV", "+git")
+}
+
+addhandler pokybleeding_version_handler
+pokybleeding_version_handler[eventmask] = "bb.event.RecipePreFinalise"
diff --git a/meta-poky/conf/distro/include/gcsections.inc b/meta-poky/conf/distro/include/gcsections.inc
index a1f8651ae9..0e7bd2efff 100644
--- a/meta-poky/conf/distro/include/gcsections.inc
+++ b/meta-poky/conf/distro/include/gcsections.inc
@@ -27,8 +27,10 @@ LDFLAGS_SECTION_REMOVAL:pn-nativesdk-mingw-w64-winpthreads = ""
# set default for target
CFLAGS:append:class-target = " ${CFLAGS_SECTION_REMOVAL}"
+CXXFLAGS:append:class-target = " ${CFLAGS_SECTION_REMOVAL}"
LDFLAGS:append:class-target = " ${LDFLAGS_SECTION_REMOVAL}"
# set default for nativesdk
CFLAGS:append:class-nativesdk = " ${CFLAGS_SECTION_REMOVAL}"
+CXXFLAGS:append:class-nativesdk = " ${CFLAGS_SECTION_REMOVAL}"
LDFLAGS:append:class-nativesdk = " ${LDFLAGS_SECTION_REMOVAL}"
diff --git a/meta-poky/conf/distro/include/poky-distro-alt-test-config.inc b/meta-poky/conf/distro/include/poky-distro-alt-test-config.inc
index 0de2013826..3e10251e8b 100644
--- a/meta-poky/conf/distro/include/poky-distro-alt-test-config.inc
+++ b/meta-poky/conf/distro/include/poky-distro-alt-test-config.inc
@@ -1,8 +1,8 @@
-# Add an extra DISTRO_FEATURE
-DISTRO_FEATURES:append = " pam"
+# Add extra DISTRO_FEATUREs
+DISTRO_FEATURES:append = " pam usrmerge"
-# Use the LTSI Kernel
-PREFERRED_VERSION_linux-yocto = "5.10%"
+# Use our alternate kernel version
+PREFERRED_VERSION_linux-yocto = "6.6%"
# Ensure the kernel nfs server is enabled
KERNEL_FEATURES:append:pn-linux-yocto = " features/nfsd/nfsd-enable.scc"
diff --git a/meta-poky/conf/distro/include/poky-floating-revisions.inc b/meta-poky/conf/distro/include/poky-floating-revisions.inc
index 6f473de88c..0721a718bd 100644
--- a/meta-poky/conf/distro/include/poky-floating-revisions.inc
+++ b/meta-poky/conf/distro/include/poky-floating-revisions.inc
@@ -1,54 +1,22 @@
#
-# Package Versions for cutting edge testing:
+# Set recipe versions to auto-rev for cutting edge testing
#
-#SRCREV:pn-opkg-native ?= "${AUTOREV}"
-#SRCREV:pn-opkg-sdk ?= "${AUTOREV}"
-#SRCREV:pn-opkg ?= "${AUTOREV}"
-#SRCREV:pn-opkg-utils-native ?= "${AUTOREV}"
-#SRCREV:pn-opkg-utils ?= "${AUTOREV}"
-SRCREV:pn-gconf-dbus ?= "${AUTOREV}"
-SRCREV:pn-matchbox-common ?= "${AUTOREV}"
-SRCREV:pn-matchbox-config-gtk ?= "${AUTOREV}"
-SRCREV:pn-matchbox-desktop ?= "${AUTOREV}"
-SRCREV:pn-matchbox-keyboard ?= "${AUTOREV}"
-SRCREV:pn-matchbox-panel-2 ?= "${AUTOREV}"
-SRCREV:pn-matchbox-themes-extra ?= "${AUTOREV}"
-SRCREV:pn-matchbox-terminal ?= "${AUTOREV}"
-SRCREV:pn-matchbox-wm ?= "${AUTOREV}"
-SRCREV:pn-settings-daemon ?= "${AUTOREV}"
-SRCREV:pn-screenshot ?= "${AUTOREV}"
-SRCREV:pn-libfakekey ?= "${AUTOREV}"
-SRCREV:pn-psplash ?= "${AUTOREV}"
-SRCREV:pn-gtk-sato-engine ?= "${AUTOREV}"
-SRCREV:pn-matchbox-theme-sato ?= "${AUTOREV}"
-SRCREV:pn-sato-icon-theme ?= "${AUTOREV}"
-SRCREV:pn-matchbox-desktop-sato ?= "${AUTOREV}"
-SRCREV:pn-oh-puzzles ?= "${AUTOREV}"
-SRCREV:pn-libowl ?= "${AUTOREV}"
-SRCREV:pn-libmatchbox ?= "${AUTOREV}"
-SRCREV:pn-ofono ?= "${AUTOREV}"
-
-SRCREV:pn-dri2proto = "${AUTOREV}"
-#PREFERRED_VERSION_dri2proto ?= "1.99.1+git%"
-SRCREV:pn-libdrm = "${AUTOREV}"
-#PREFERRED_VERSION_libdrm ?= "2.4.0+git%"
-SRCREV:pn-libxcb = "${AUTOREV}"
-#PREFERRED_VERSION_libxcb ?= "1.1.90.1+gitr%"
-SRCREV:pn-lib-proto = "${AUTOREV}"
-#PREFERRED_VERSION_xcb-proto ?= "1.2+gitr%"
-SRCREV:pn-libxcb-sdk = "${AUTOREV}"
-#PREFERRED_VERSION_libxcb-sdk ?= "1.1.90.1+gitr%"
-SRCREV:pn-xf86-input-evdev = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-evdev ?= "2.0.4"
-SRCREV:pn-xf86-input-mouse = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-mouse ?= "1.3.0+git%"
-SRCREV:pn-xf86-input-keyboard = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-keyboard ?= "1.3.1+git%"
-SRCREV:pn-xf86-input-synaptics = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-synaptics ?= "0.15.2+git%"
-
-#SRCDATE_oprofile ?= "${DATE}"
-
-PREFERRED_VERSION_oprofile ?= "0.9.4+cvs${SRCDATE_oprofile}"
+INHERIT += "poky-bleeding"
+POKY_AUTOREV_RECIPES = "\
+ libmatchbox \
+ opkg-utils \
+ matchbox-config-gtk \
+ matchbox-desktop \
+ matchbox-keyboard \
+ matchbox-panel-2 \
+ matchbox-terminal \
+ matchbox-theme-sato \
+ matchbox-wm \
+ pseudo \
+ puzzles \
+ sato-icon-theme \
+ sato-screenshot \
+ settings-daemon \
+"
diff --git a/meta-poky/conf/distro/poky-altcfg.conf b/meta-poky/conf/distro/poky-altcfg.conf
index f03306e798..3d64bdd1fe 100644
--- a/meta-poky/conf/distro/poky-altcfg.conf
+++ b/meta-poky/conf/distro/poky-altcfg.conf
@@ -2,14 +2,16 @@
# An example of subclassing a distro, primarily used for testing alternate configuration
# combinations on the Yocto Project autobuilder
#
+
+PACKAGE_CLASSES ?= "package_ipk"
require conf/distro/poky.conf
DISTRO = "poky-altcfg"
DISTROOVERRIDES = "poky:poky-altcfg"
#DISTROOVERRIDES = "poky:linuxstdbase"
-INIT_MANAGER:poky-altcfg = "systemd"
+POKY_INIT_MANAGER:poky-altcfg = "systemd"
# systemd isn't suitable with musl
-INIT_MANAGER:poky-altcfg:libc-musl = "sysvinit"
+POKY_INIT_MANAGER:poky-altcfg:libc-musl = "sysvinit"
require conf/distro/include/poky-distro-alt-test-config.inc
diff --git a/meta-poky/conf/distro/poky-bleeding.conf b/meta-poky/conf/distro/poky-bleeding.conf
index 2d3e046a45..8cbf2284d0 100644
--- a/meta-poky/conf/distro/poky-bleeding.conf
+++ b/meta-poky/conf/distro/poky-bleeding.conf
@@ -1,8 +1,4 @@
-PREFERRED_VERSION_glib-2.0 ?= "2.17.4"
-PREFERRED_VERSION_glib-2.0-native ?= "2.17.4"
-PREFERRED_VERSION_atk ?= "1.22.0"
-PREFERRED_VERSION_pango ?= "1.21.2"
-PREFERRED_VERSION_gtk+ ?= "2.13.3"
+PREFERRED_PROVIDER_virtual/kernel = "linux-yocto-dev"
require conf/distro/include/poky-floating-revisions.inc
require conf/distro/poky.conf
diff --git a/meta-poky/conf/distro/poky-tiny.conf b/meta-poky/conf/distro/poky-tiny.conf
index 2fe0d478ff..f3dfa8107a 100644
--- a/meta-poky/conf/distro/poky-tiny.conf
+++ b/meta-poky/conf/distro/poky-tiny.conf
@@ -44,7 +44,7 @@ FULL_OPTIMIZATION="-Os -pipe ${DEBUG_FLAGS}"
# Distro config is evaluated after the machine config, so we have to explicitly
# set the kernel provider to override a machine config.
PREFERRED_PROVIDER_virtual/kernel = "linux-yocto-tiny"
-PREFERRED_VERSION_linux-yocto-tiny ?= "5.15%"
+PREFERRED_VERSION_linux-yocto-tiny ?= "6.6%"
# We can use packagegroup-core-boot, but in the future we may need a new packagegroup-core-tiny
#POKY_DEFAULT_EXTRA_RDEPENDS += "packagegroup-core-boot"
@@ -82,14 +82,11 @@ DISTRO_FEATURES = "${DISTRO_FEATURES_TINY} \
${DISTRO_FEATURES_WIFI} \
"
-# Enable LFS - see bug YOCTO #5865
-DISTRO_FEATURES:append:libc-musl = " largefile"
-
DISTRO_FEATURES:class-native = "${DISTRO_FEATURES_DEFAULT} ${POKY_DEFAULT_DISTRO_FEATURES}"
DISTRO_FEATURES:class-nativesdk = "${DISTRO_FEATURES_DEFAULT} ${POKY_DEFAULT_DISTRO_FEATURES}"
# enable mdev/busybox for init
-INIT_MANAGER:poky-tiny = "mdev-busybox"
+POKY_INIT_MANAGER:poky-tiny = "mdev-busybox"
# FIXME: Consider adding "modules" to MACHINE_FEATURES and using that in
# packagegroup-core-base to select modutils-initscripts or not. Similar with "net" and
@@ -125,3 +122,7 @@ SKIP_RECIPE[core-image-weston] = "not buildable with poky-tiny"
# Disable python usage in opkg-utils since it won't build with tiny config
PACKAGECONFIG:remove:pn-opkg-utils = "python"
+
+# If shadow-base is brought into the image, logins will fail because it
+# doesn't have the heuristics to work when CONFIG_MULTIUSER is unset.
+PACKAGE_EXCLUDE += "shadow-base"
diff --git a/meta-poky/conf/distro/poky.conf b/meta-poky/conf/distro/poky.conf
index b4c64c495f..5285753c31 100644
--- a/meta-poky/conf/distro/poky.conf
+++ b/meta-poky/conf/distro/poky.conf
@@ -1,26 +1,26 @@
DISTRO = "poky"
DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
-DISTRO_VERSION = "4.1+snapshot-${METADATA_REVISION}"
-DISTRO_CODENAME = "langdale"
+DISTRO_VERSION = "5.0+snapshot-${METADATA_REVISION}"
+DISTRO_CODENAME = "styhead"
SDK_VENDOR = "-pokysdk"
SDK_VERSION = "${@d.getVar('DISTRO_VERSION').replace('snapshot-${METADATA_REVISION}', 'snapshot')}"
SDK_VERSION[vardepvalue] = "${SDK_VERSION}"
-MAINTAINER = "Poky <poky@lists.yoctoproject.org>"
+MAINTAINER = "Poky Maintainers <poky@lists.yoctoproject.org>"
TARGET_VENDOR = "-poky"
LOCALCONF_VERSION = "2"
# Override these in poky based distros
-POKY_DEFAULT_DISTRO_FEATURES = "largefile opengl ptest multiarch wayland vulkan"
+POKY_DEFAULT_DISTRO_FEATURES = "opengl ptest multiarch wayland vulkan"
POKY_DEFAULT_EXTRA_RDEPENDS = "packagegroup-core-boot"
POKY_DEFAULT_EXTRA_RRECOMMENDS = "kernel-module-af-packet"
DISTRO_FEATURES ?= "${DISTRO_FEATURES_DEFAULT} ${POKY_DEFAULT_DISTRO_FEATURES}"
-PREFERRED_VERSION_linux-yocto ?= "5.15%"
-PREFERRED_VERSION_linux-yocto-rt ?= "5.15%"
+PREFERRED_VERSION_linux-yocto ?= "6.6%"
+PREFERRED_VERSION_linux-yocto-rt ?= "6.6%"
SDK_NAME = "${DISTRO}-${TCLIBC}-${SDKMACHINE}-${IMAGE_BASENAME}-${TUNE_PKGARCH}-${MACHINE}"
SDKPATHINSTALL = "/opt/${DISTRO}/${SDK_VERSION}"
@@ -33,21 +33,20 @@ TCLIBCAPPEND = ""
PACKAGE_CLASSES ?= "package_rpm"
SANITY_TESTED_DISTROS ?= " \
- poky-3.3 \n \
- poky-3.4 \n \
- ubuntu-16.04 \n \
- ubuntu-18.04 \n \
+ poky-4.3 \n \
+ poky-5.0 \n \
ubuntu-20.04 \n \
- ubuntu-21.10 \n \
- fedora-34 \n \
- fedora-35 \n \
- centos-7 \n \
- centos-8 \n \
- debian-9 \n \
- debian-10 \n \
+ ubuntu-22.04 \n \
+ ubuntu-23.04 \n \
+ fedora-38 \n \
+ fedora-39 \n \
+ centosstream-8 \n \
debian-11 \n \
- opensuseleap-15.3 \n \
- almalinux-8.5 \n \
+ debian-12 \n \
+ opensuseleap-15.4 \n \
+ almalinux-8.8 \n \
+ almalinux-9.2 \n \
+ rocky-9 \n \
"
# add poky sanity bbclass
INHERIT += "poky-sanity"
@@ -67,4 +66,11 @@ INHERIT += "uninative"
BB_SIGNATURE_HANDLER ?= "OEEquivHash"
BB_HASHSERVE ??= "auto"
-INIT_MANAGER = "sysvinit"
+POKY_INIT_MANAGER = "sysvinit"
+INIT_MANAGER ?= "${POKY_INIT_MANAGER}"
+
+# We need debug symbols so that SPDX license manifests for the kernel work
+KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
+
+# Enable creation of SPDX manifests by default
+INHERIT += "create-spdx"
diff --git a/meta-poky/conf/layer.conf b/meta-poky/conf/layer.conf
index 357cc803da..13bba953a0 100644
--- a/meta-poky/conf/layer.conf
+++ b/meta-poky/conf/layer.conf
@@ -9,7 +9,7 @@ BBFILE_COLLECTIONS += "yocto"
BBFILE_PATTERN_yocto = "^${LAYERDIR}/"
BBFILE_PRIORITY_yocto = "5"
-LAYERSERIES_COMPAT_yocto = "kirkstone langdale"
+LAYERSERIES_COMPAT_yocto = "scarthgap"
# This should only be incremented on significant changes that will
# cause compatibility issues with other layers
diff --git a/meta-poky/conf/bblayers.conf.sample b/meta-poky/conf/templates/default/bblayers.conf.sample
index 8b1cbdfc5c..8b1cbdfc5c 100644
--- a/meta-poky/conf/bblayers.conf.sample
+++ b/meta-poky/conf/templates/default/bblayers.conf.sample
diff --git a/meta-poky/conf/conf-notes.txt b/meta-poky/conf/templates/default/conf-notes.txt
index cfd1f1977b..cfd1f1977b 100644
--- a/meta-poky/conf/conf-notes.txt
+++ b/meta-poky/conf/templates/default/conf-notes.txt
diff --git a/meta-poky/conf/templates/default/conf-summary.txt b/meta-poky/conf/templates/default/conf-summary.txt
new file mode 100644
index 0000000000..8fc03030c7
--- /dev/null
+++ b/meta-poky/conf/templates/default/conf-summary.txt
@@ -0,0 +1 @@
+This is the default build configuration for the Poky reference distribution.
diff --git a/meta-poky/conf/local.conf.sample b/meta-poky/conf/templates/default/local.conf.sample
index 4d2fa516fe..1a93c9bdcf 100644
--- a/meta-poky/conf/local.conf.sample
+++ b/meta-poky/conf/templates/default/local.conf.sample
@@ -5,7 +5,8 @@
# be set in this file. More adventurous users can look at
# local.conf.sample.extended which contains other examples of configuration which
# can be placed in this file but new users likely won't need any of them
-# initially.
+# initially. There's also site.conf.sample which contains examples of site specific
+# information such as proxy server addresses.
#
# Lines starting with the '#' character are commented out and in some cases the
# default values are provided as comments to show people example syntax. Enabling
@@ -30,13 +31,17 @@
# demonstration purposes:
#
#MACHINE ?= "beaglebone-yocto"
+#MACHINE ?= "genericarm64"
#MACHINE ?= "genericx86"
#MACHINE ?= "genericx86-64"
-#MACHINE ?= "edgerouter"
#
# This sets the default machine to be qemux86-64 if no other machine is selected:
MACHINE ??= "qemux86-64"
+# These are some of the more commonly used values. Looking at the files in the
+# meta/conf/machine directory, or the conf/machine directory of any additional layers
+# you add in will show all the available machines.
+
#
# Where to place downloads
#
@@ -138,7 +143,8 @@ DISTRO ?= "poky"
# "debug-tweaks" - make an image suitable for development
# e.g. ssh root access has a blank password
# There are other application targets that can be used here too, see
-# meta/classes/image.bbclass and meta/classes/core-image.bbclass for more details.
+# meta/classes-recipe/image.bbclass and
+# meta/classes-recipe/core-image.bbclass for more details.
# We default to enabling the debugging tweaks.
EXTRA_IMAGE_FEATURES ?= "debug-tweaks"
@@ -157,7 +163,7 @@ USER_CLASSES ?= "buildstats"
# The build system can test booting virtual machine images under qemu (an emulator)
# after any root filesystems are created and run tests against those images. It can also
# run tests against any SDK that are built. To enable this uncomment these lines.
-# See classes/test{image,sdk}.bbclass for further details.
+# See meta/classes-recipe/test{image,sdk}.bbclass for further details.
#IMAGE_CLASSES += "testimage testsdk"
#TESTIMAGE_AUTO:qemuall = "1"
@@ -222,15 +228,21 @@ BB_DISKMON_DIRS ??= "\
# Yocto Project SState Mirror
#
# The Yocto Project has prebuilt artefacts available for its releases, you can enable
-# use of these by uncommenting the following lines. This will mean the build uses
+# use of these by uncommenting some of the following lines. This will mean the build uses
# the network to check for artefacts at the start of builds, which does slow it down
-# equally, it will also speed up the builds by not having to build things if they are
+# initially but it will then speed up the builds by not having to build things if they are
# present in the cache. It assumes you can download something faster than you can build it
# which will depend on your network.
# Note: For this to work you also need hash-equivalence passthrough to the matching server
+# There is a choice between our sstate server directly and a faster content delivery network
+# (CDN) kindly provided by JSDelivr, uncomment one of the SSTATE_MIRRORS lines, not both.
+# Using the CDN rather than the yoctoproject.org address is suggested/preferred.
+#
+#BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+#SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
#
-#BB_HASHSERVE_UPSTREAM = "typhoon.yocto.io:8687"
-#SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/4.0/PATH;downloadfilename=PATH"
+###SSTATE_MIRRORS ?= "file://.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
+
#
# Qemu configuration
diff --git a/meta-poky/conf/local.conf.sample.extended b/meta-poky/conf/templates/default/local.conf.sample.extended
index bc2dec9f52..bc2dec9f52 100644
--- a/meta-poky/conf/local.conf.sample.extended
+++ b/meta-poky/conf/templates/default/local.conf.sample.extended
diff --git a/meta-poky/conf/site.conf.sample b/meta-poky/conf/templates/default/site.conf.sample
index 5164fedf63..5164fedf63 100644
--- a/meta-poky/conf/site.conf.sample
+++ b/meta-poky/conf/templates/default/site.conf.sample
diff --git a/meta-poky/recipes-core/base-files/base-files_%.bbappend b/meta-poky/recipes-core/base-files/base-files_%.bbappend
new file mode 100644
index 0000000000..8802adbb46
--- /dev/null
+++ b/meta-poky/recipes-core/base-files/base-files_%.bbappend
@@ -0,0 +1 @@
+FILESEXTRAPATHS:prepend := "${THISDIR}/files:"
diff --git a/meta-poky/recipes-core/base-files/files/poky/motd b/meta-poky/recipes-core/base-files/files/poky/motd
new file mode 100644
index 0000000000..0fb6e633f0
--- /dev/null
+++ b/meta-poky/recipes-core/base-files/files/poky/motd
@@ -0,0 +1,5 @@
+
+WARNING: Poky is a reference Yocto Project distribution that should be used for
+testing and development purposes only. It is recommended that you create your
+own distribution for production use.
+
diff --git a/meta-selftest/classes/base-do-configure-modified.bbclass b/meta-selftest/classes/base-do-configure-modified.bbclass
new file mode 100644
index 0000000000..9175bb8ad9
--- /dev/null
+++ b/meta-selftest/classes/base-do-configure-modified.bbclass
@@ -0,0 +1,3 @@
+base_do_configure:append () {
+ echo "this changes base_do_configure() definiton "
+}
diff --git a/meta-selftest/conf/layer.conf b/meta-selftest/conf/layer.conf
index 0899806029..763ea011d4 100644
--- a/meta-selftest/conf/layer.conf
+++ b/meta-selftest/conf/layer.conf
@@ -9,4 +9,6 @@ BBFILE_COLLECTIONS += "selftest"
BBFILE_PATTERN_selftest = "^${LAYERDIR}/"
BBFILE_PRIORITY_selftest = "5"
-LAYERSERIES_COMPAT_selftest = "kirkstone langdale"
+addpylib ${LAYERDIR}/lib oeqa
+
+LAYERSERIES_COMPAT_selftest = "styhead"
diff --git a/meta-selftest/conf/multiconfig/muslmc.conf b/meta-selftest/conf/multiconfig/muslmc.conf
new file mode 100644
index 0000000000..043cd1ccc3
--- /dev/null
+++ b/meta-selftest/conf/multiconfig/muslmc.conf
@@ -0,0 +1,2 @@
+TCLIBC = "musl"
+TMPDIR = "${TOPDIR}/tmp-mc-musl"
diff --git a/meta-selftest/files/static-group b/meta-selftest/files/static-group
index b2e0e2f870..f7a66de24d 100644
--- a/meta-selftest/files/static-group
+++ b/meta-selftest/files/static-group
@@ -23,3 +23,7 @@ _apt:x:523:
weston-launch:x:524:
weston:x:525:
wayland:x:526:
+render:x:527:
+sgx:x:528:
+ptest:x:529:
+xuser:x:530:
diff --git a/meta-selftest/files/static-passwd b/meta-selftest/files/static-passwd
index 2cfd0c9b42..cc6c5acd5c 100644
--- a/meta-selftest/files/static-passwd
+++ b/meta-selftest/files/static-passwd
@@ -17,3 +17,5 @@ bind:x:521:521::/:/bin/nologin
builder:x:522:522::/:/bin/nologin
_apt:x:523:523::/:/bin/nologin
weston:x:525:525::/:/bin/nologin
+ptest:x:529:529::/:/bin/nologin
+xuser:x:530:530::/:/bin/nologin
diff --git a/meta-selftest/lib/oeqa/runtime/cases/dnf_runtime.py b/meta-selftest/lib/oeqa/runtime/cases/dnf_runtime.py
index 64a3502370..d2f0f88f7d 100644
--- a/meta-selftest/lib/oeqa/runtime/cases/dnf_runtime.py
+++ b/meta-selftest/lib/oeqa/runtime/cases/dnf_runtime.py
@@ -10,7 +10,8 @@ class DnfSelftest(DnfTest):
import tempfile
cls.temp_dir = tempfile.TemporaryDirectory(prefix="oeqa-remotefeeds-")
cls.repo_server = HTTPService(os.path.join(cls.tc.td['WORKDIR'], 'oe-rootfs-repo'),
- cls.tc.target.server_ip)
+ '0.0.0.0', port=cls.tc.target.server_port,
+ logger=cls.tc.logger)
cls.repo_server.start()
@classmethod
diff --git a/meta-selftest/lib/oeqa/runtime/cases/virgl.py b/meta-selftest/lib/oeqa/runtime/cases/virgl.py
index 144decdd6b..f19cdee9f0 100644
--- a/meta-selftest/lib/oeqa/runtime/cases/virgl.py
+++ b/meta-selftest/lib/oeqa/runtime/cases/virgl.py
@@ -15,4 +15,4 @@ class VirglTest(OERuntimeTestCase):
def test_kmscube(self):
status, output = self.target.run('kmscube')
self.assertEqual(status, 0, "kmscube exited with non-zero status %d and output:\n%s" %(status, output))
- self.assertIn('renderer: "virgl"', output, "kmscube does not seem to use virgl:\n%s" %(output))
+ self.assertIn('renderer: "virgl', output, "kmscube does not seem to use virgl:\n%s" %(output))
diff --git a/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest/0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch b/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest/0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch
new file mode 100644
index 0000000000..73d4a8475f
--- /dev/null
+++ b/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest/0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch
@@ -0,0 +1,31 @@
+From 6fb10bd18488ed84776675bc1b2982800a51d839 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 6 Aug 2022 20:14:38 -0700
+Subject: [mtd-utils][PATCH] tests: Remove unused linux/fs.h header from includes
+
+This header is not needed, moreover it includes linux/mount.h which is
+now in conflict[1] with glibc provided sys/mount.h from glibc 2.36 onwards
+
+[1] https://sourceware.org/glibc/wiki/Release/2.36
+
+Upstream-Status: Submitted [https://lists.infradead.org/pipermail/linux-mtd/2022-August/094667.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ tests/fs-tests/lib/tests.c | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/tests/fs-tests/lib/tests.c b/tests/fs-tests/lib/tests.c
+index d1a2e0c..3db0426 100644
+--- a/tests/fs-tests/lib/tests.c
++++ b/tests/fs-tests/lib/tests.c
+@@ -35,7 +35,6 @@
+ #include <sys/vfs.h>
+ #include <sys/mount.h>
+ #include <sys/statvfs.h>
+-#include <linux/fs.h>
+ #include <linux/jffs2.h>
+
+ #include "tests.h"
+--
+2.37.1
+
diff --git a/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest_git.bb b/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest_git.bb
new file mode 100644
index 0000000000..ca2141c972
--- /dev/null
+++ b/meta-selftest/recipes-devtools/mtd/mtd-utils-selftest_git.bb
@@ -0,0 +1,77 @@
+SUMMARY = "Tools for managing memory technology devices"
+HOMEPAGE = "http://www.linux-mtd.infradead.org/"
+DESCRIPTION = "mtd-utils tool is a generic Linux subsystem for memory devices, especially Flash devices."
+SECTION = "base"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
+ file://include/common.h;beginline=1;endline=17;md5=ba05b07912a44ea2bf81ce409380049c"
+
+inherit autotools pkgconfig update-alternatives
+
+DEPENDS = "zlib e2fsprogs util-linux"
+RDEPENDS:mtd-utils-tests += "bash"
+
+PV = "2.1.4"
+
+SRCREV = "c7f1bfa44a84d02061787e2f6093df5cc40b9f5c"
+SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master \
+ file://0001-tests-Remove-unused-linux-fs.h-header-from-includes.patch \
+ "
+
+S = "${WORKDIR}/git"
+
+# xattr support creates an additional compile-time dependency on acl because
+# the sys/acl.h header is needed. libacl is not needed and thus enabling xattr
+# regardless whether acl is enabled or disabled in the distro should be okay.
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'xattr', d)} lzo jffs ubifs"
+PACKAGECONFIG[lzo] = "--with-lzo,--without-lzo,lzo"
+PACKAGECONFIG[xattr] = "--with-xattr,--without-xattr,acl"
+PACKAGECONFIG[crypto] = "--with-crypto,--without-crypto,openssl"
+PACKAGECONFIG[jffs] = "--with-jffs,--without-jffs"
+PACKAGECONFIG[ubifs] = "--with-ubifs,--without-ubifs"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
+
+CPPFLAGS:append:riscv64 = " -pthread -D_REENTRANT"
+
+EXTRA_OEMAKE = "'CC=${CC}' 'RANLIB=${RANLIB}' 'AR=${AR}' 'CFLAGS=${CFLAGS} ${@bb.utils.contains('PACKAGECONFIG', 'xattr', '', '-DWITHOUT_XATTR', d)} -I${S}/include' 'BUILDDIR=${S}'"
+
+# Use higher priority than corresponding BusyBox-provided applets
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN} = "flashcp flash_eraseall flash_lock flash_unlock nanddump nandwrite"
+ALTERNATIVE:${PN}-ubifs = "ubiattach ubidetach ubimkvol ubirename ubirmvol ubirsvol ubiupdatevol"
+
+ALTERNATIVE_LINK_NAME[nandwrite] = "${sbindir}/nandwrite"
+ALTERNATIVE_LINK_NAME[nanddump] = "${sbindir}/nanddump"
+ALTERNATIVE_LINK_NAME[ubiattach] = "${sbindir}/ubiattach"
+ALTERNATIVE_LINK_NAME[ubidetach] = "${sbindir}/ubidetach"
+ALTERNATIVE_LINK_NAME[ubimkvol] = "${sbindir}/ubimkvol"
+ALTERNATIVE_LINK_NAME[ubirename] = "${sbindir}/ubirename"
+ALTERNATIVE_LINK_NAME[ubirmvol] = "${sbindir}/ubirmvol"
+ALTERNATIVE_LINK_NAME[ubirsvol] = "${sbindir}/ubirsvol"
+ALTERNATIVE_LINK_NAME[ubiupdatevol] = "${sbindir}/ubiupdatevol"
+ALTERNATIVE_LINK_NAME[flash_eraseall] = "${sbindir}/flash_eraseall"
+ALTERNATIVE_LINK_NAME[flash_lock] = "${sbindir}/flash_lock"
+ALTERNATIVE_LINK_NAME[flash_unlock] = "${sbindir}/flash_unlock"
+ALTERNATIVE_LINK_NAME[flashcp] = "${sbindir}/flashcp"
+
+do_install () {
+ oe_runmake install DESTDIR=${D} SBINDIR=${sbindir} MANDIR=${mandir} INCLUDEDIR=${includedir}
+}
+
+PACKAGES =+ "mtd-utils-misc mtd-utils-tests"
+PACKAGES =+ "${@bb.utils.contains("PACKAGECONFIG", "jffs", "mtd-utils-jffs2", "", d)}"
+PACKAGES =+ "${@bb.utils.contains("PACKAGECONFIG", "ubifs", "mtd-utils-ubifs", "", d)}"
+
+FILES:mtd-utils-jffs2 = "${sbindir}/mkfs.jffs2 ${sbindir}/jffs2dump ${sbindir}/jffs2reader ${sbindir}/sumtool"
+FILES:mtd-utils-ubifs = "${sbindir}/mkfs.ubifs ${sbindir}/ubi*"
+FILES:mtd-utils-misc = "${sbindir}/nftl* ${sbindir}/ftl* ${sbindir}/rfd* ${sbindir}/doc* ${sbindir}/serve_image ${sbindir}/recv_image"
+FILES:mtd-utils-tests = "${libexecdir}/mtd-utils/*"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# git/.compr.c.dep:46: warning: NUL character seen; rest of line ignored
+# git/.compr.c.dep:47: *** missing separator. Stop.
+PARALLEL_MAKE = ""
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-devtools/python/python-async-test.inc b/meta-selftest/recipes-devtools/python/python-async-test.inc
index 6d7c7458b0..a7dd1744f2 100644
--- a/meta-selftest/recipes-devtools/python/python-async-test.inc
+++ b/meta-selftest/recipes-devtools/python/python-async-test.inc
@@ -11,6 +11,6 @@ PYPI_PACKAGE = "async"
SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
-RDEPENDS:${PN} += "${PYTHON_PN}-threading"
+RDEPENDS:${PN} += "python3-threading"
BBCLASSEXTEND = "nativesdk"
diff --git a/meta-selftest/recipes-devtools/rust/rust-c-lib-example-bin_git.bb b/meta-selftest/recipes-devtools/rust/rust-c-lib-example-bin_git.bb
new file mode 100644
index 0000000000..47d878597a
--- /dev/null
+++ b/meta-selftest/recipes-devtools/rust/rust-c-lib-example-bin_git.bb
@@ -0,0 +1,16 @@
+DESCRIPTION = "A simple example for C wrapper around a rust library"
+HOMEPAGE = "https://gitlab.com/fmartinsonsHome/rust-c-lib-example"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=cb9c441273ed8a029701a086befbfc63"
+
+SRC_URI = " \
+ git://gitlab.com/fmartinsonsHome/rust-c-lib-example.git;branch=main;protocol=https \
+"
+
+SRCREV = "fc53c457f69aa5221ec1f8619a007e8150db5e60"
+S = "${WORKDIR}/git"
+
+DEPENDS = "rust-c-lib-example"
+
+inherit meson pkgconfig
+
diff --git a/meta-selftest/recipes-devtools/rust/rust-c-lib-example-crates.inc b/meta-selftest/recipes-devtools/rust/rust-c-lib-example-crates.inc
new file mode 100644
index 0000000000..05f5949203
--- /dev/null
+++ b/meta-selftest/recipes-devtools/rust/rust-c-lib-example-crates.inc
@@ -0,0 +1,79 @@
+SRC_URI += " \
+ crate://crates.io/android-tzdata/0.1.1 \
+ crate://crates.io/android_system_properties/0.1.5 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/bumpalo/3.13.0 \
+ crate://crates.io/cc/1.0.82 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/chrono/0.4.26 \
+ crate://crates.io/core-foundation-sys/0.8.4 \
+ crate://crates.io/iana-time-zone/0.1.57 \
+ crate://crates.io/iana-time-zone-haiku/0.1.2 \
+ crate://crates.io/js-sys/0.3.64 \
+ crate://crates.io/libc/0.2.147 \
+ crate://crates.io/log/0.4.19 \
+ crate://crates.io/num-traits/0.2.16 \
+ crate://crates.io/once_cell/1.18.0 \
+ crate://crates.io/proc-macro2/1.0.66 \
+ crate://crates.io/quote/1.0.32 \
+ crate://crates.io/syn/2.0.28 \
+ crate://crates.io/time/0.1.45 \
+ crate://crates.io/unicode-ident/1.0.11 \
+ crate://crates.io/wasi/0.10.0+wasi-snapshot-preview1 \
+ crate://crates.io/wasm-bindgen/0.2.87 \
+ crate://crates.io/wasm-bindgen-backend/0.2.87 \
+ crate://crates.io/wasm-bindgen-macro/0.2.87 \
+ crate://crates.io/wasm-bindgen-macro-support/0.2.87 \
+ crate://crates.io/wasm-bindgen-shared/0.2.87 \
+ crate://crates.io/winapi/0.3.9 \
+ crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0 \
+ crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0 \
+ crate://crates.io/windows/0.48.0 \
+ crate://crates.io/windows-targets/0.48.1 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.0 \
+ crate://crates.io/windows_aarch64_msvc/0.48.0 \
+ crate://crates.io/windows_i686_gnu/0.48.0 \
+ crate://crates.io/windows_i686_msvc/0.48.0 \
+ crate://crates.io/windows_x86_64_gnu/0.48.0 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.0 \
+ crate://crates.io/windows_x86_64_msvc/0.48.0 \
+"
+
+SRC_URI[android-tzdata-0.1.1.sha256sum] = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+SRC_URI[android_system_properties-0.1.5.sha256sum] = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[bumpalo-3.13.0.sha256sum] = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
+SRC_URI[cc-1.0.82.sha256sum] = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[chrono-0.4.26.sha256sum] = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5"
+SRC_URI[core-foundation-sys-0.8.4.sha256sum] = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
+SRC_URI[iana-time-zone-0.1.57.sha256sum] = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613"
+SRC_URI[iana-time-zone-haiku-0.1.2.sha256sum] = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+SRC_URI[js-sys-0.3.64.sha256sum] = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
+SRC_URI[libc-0.2.147.sha256sum] = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
+SRC_URI[log-0.4.19.sha256sum] = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
+SRC_URI[num-traits-0.2.16.sha256sum] = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
+SRC_URI[once_cell-1.18.0.sha256sum] = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+SRC_URI[proc-macro2-1.0.66.sha256sum] = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
+SRC_URI[quote-1.0.32.sha256sum] = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
+SRC_URI[syn-2.0.28.sha256sum] = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
+SRC_URI[time-0.1.45.sha256sum] = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a"
+SRC_URI[unicode-ident-1.0.11.sha256sum] = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
+SRC_URI[wasi-0.10.0+wasi-snapshot-preview1.sha256sum] = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
+SRC_URI[wasm-bindgen-0.2.87.sha256sum] = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
+SRC_URI[wasm-bindgen-backend-0.2.87.sha256sum] = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
+SRC_URI[wasm-bindgen-macro-0.2.87.sha256sum] = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
+SRC_URI[wasm-bindgen-macro-support-0.2.87.sha256sum] = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
+SRC_URI[wasm-bindgen-shared-0.2.87.sha256sum] = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
+SRC_URI[winapi-0.3.9.sha256sum] = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+SRC_URI[winapi-i686-pc-windows-gnu-0.4.0.sha256sum] = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+SRC_URI[winapi-x86_64-pc-windows-gnu-0.4.0.sha256sum] = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+SRC_URI[windows-0.48.0.sha256sum] = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
+SRC_URI[windows-targets-0.48.1.sha256sum] = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
+SRC_URI[windows_aarch64_gnullvm-0.48.0.sha256sum] = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+SRC_URI[windows_aarch64_msvc-0.48.0.sha256sum] = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+SRC_URI[windows_i686_gnu-0.48.0.sha256sum] = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+SRC_URI[windows_i686_msvc-0.48.0.sha256sum] = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+SRC_URI[windows_x86_64_gnu-0.48.0.sha256sum] = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+SRC_URI[windows_x86_64_gnullvm-0.48.0.sha256sum] = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+SRC_URI[windows_x86_64_msvc-0.48.0.sha256sum] = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
diff --git a/meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb b/meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb
new file mode 100644
index 0000000000..6e7c250b92
--- /dev/null
+++ b/meta-selftest/recipes-devtools/rust/rust-c-lib-example_git.bb
@@ -0,0 +1,15 @@
+DESCRIPTION = "A simple example for rust C library"
+HOMEPAGE = "https://gitlab.com/fmartinsonsHome/rust-c-lib-example"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=cb9c441273ed8a029701a086befbfc63"
+
+SRC_URI = " \
+ git://gitlab.com/fmartinsonsHome/rust-c-lib-example.git;branch=main;protocol=https \
+"
+
+SRCREV = "fc53c457f69aa5221ec1f8619a007e8150db5e60"
+S = "${WORKDIR}/git"
+
+inherit cargo_c
+
+require ${BPN}-crates.inc
diff --git a/meta-selftest/recipes-extended/hello-rs/hello-rs-crates.inc b/meta-selftest/recipes-extended/hello-rs/hello-rs-crates.inc
new file mode 100644
index 0000000000..9646482b28
--- /dev/null
+++ b/meta-selftest/recipes-extended/hello-rs/hello-rs-crates.inc
@@ -0,0 +1,8 @@
+# Autogenerated with 'bitbake -c update_crates hello-rs'
+
+# from Cargo.lock
+SRC_URI += " \
+ crate://crates.io/figlet-rs/0.1.5 \
+"
+
+SRC_URI[figlet-rs-0.1.5.sha256sum] = "4742a071cd9694fc86f9fa1a08fa3e53d40cc899d7ee532295da2d085639fbc5"
diff --git a/meta-selftest/recipes-extended/hello-rs/hello-rs/0001-Greet-OE-Core.patch b/meta-selftest/recipes-extended/hello-rs/hello-rs/0001-Greet-OE-Core.patch
new file mode 100644
index 0000000000..7569ccef7b
--- /dev/null
+++ b/meta-selftest/recipes-extended/hello-rs/hello-rs/0001-Greet-OE-Core.patch
@@ -0,0 +1,24 @@
+From 3aea1ece0ca6ac0bf0ffe42736827af22966f767 Mon Sep 17 00:00:00 2001
+From: Alex Kiernan <alexk@zuma.ai>
+Date: Wed, 27 Dec 2023 09:55:48 +0000
+Subject: [PATCH] Greet "OE-Core"
+
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+Upstream-Status: Inappropriate
+---
+ src/main.rs | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/main.rs b/src/main.rs
+index f4a310758b19..224eca65c38d 100644
+--- a/src/main.rs
++++ b/src/main.rs
+@@ -3,7 +3,7 @@ use hello_lib::greet;
+
+ fn main() {
+ let standard_font = FIGfont::standard().unwrap();
+- let greeting = greet(None);
++ let greeting = greet(Some("OE-Core"));
+ let figure = standard_font.convert(&greeting);
+ println!("{}", figure.unwrap());
+ }
diff --git a/meta-selftest/recipes-extended/hello-rs/hello-rs_0.1.0.bb b/meta-selftest/recipes-extended/hello-rs/hello-rs_0.1.0.bb
new file mode 100644
index 0000000000..9a136f2978
--- /dev/null
+++ b/meta-selftest/recipes-extended/hello-rs/hello-rs_0.1.0.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Simple hello world example"
+HOMEPAGE = "https://github.com/akiernan/hello-bin"
+LICENSE = "Unlicense"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=7246f848faa4e9c9fc0ea91122d6e680"
+
+SRC_URI = "git://github.com/akiernan/hello-bin.git;protocol=https;branch=main;subpath=rust \
+ file://0001-Greet-OE-Core.patch \
+ git://github.com/akiernan/hello-lib.git;protocol=https;branch=main;name=hello-lib;destsuffix=hello-lib;type=git-dependency \
+ "
+SRCREV = "d3d096eda182644868f8e7458dcfa538ff637db3"
+
+SRCREV_FORMAT .= "_hello-lib"
+SRCREV_hello-lib = "59c84574e844617043cf337bc8fa537cf87ad8ae"
+
+S = "${WORKDIR}/rust"
+
+inherit cargo cargo-update-recipe-crates ptest-cargo
+
+# Remove this when the recipe is reproducible
+EXCLUDE_FROM_WORLD = "1"
+
+require ${BPN}-crates.inc
diff --git a/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0055-Add-cstdint-for-uintXX_t-types.patch b/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0055-Add-cstdint-for-uintXX_t-types.patch
new file mode 100644
index 0000000000..e564958dad
--- /dev/null
+++ b/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0055-Add-cstdint-for-uintXX_t-types.patch
@@ -0,0 +1,38 @@
+From 3d076ea588eb3c7f334133b4c31172a14beadf5b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 2 Feb 2023 20:18:27 -0800
+Subject: [PATCH] Add <cstdint> for uintXX_t types
+
+gcc 13 moved some includes around and as a result <cstdint> is no
+longer transitively included [1]. Explicitly include it
+for uintXX_t.
+
+[1] https://gcc.gnu.org/gcc-13/porting_to.html#header-dep-changes
+
+Upstream-Status: Submitted [https://github.com/falcosecurity/libs/pull/862]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ userspace/libsinsp/filter/parser.h | 1 +
+ userspace/libsinsp/filter_value.h | 1 +
+ 2 files changed, 2 insertions(+)
+
+--- a/userspace/libsinsp/filter/parser.h
++++ b/userspace/libsinsp/filter/parser.h
+@@ -18,6 +18,7 @@ limitations under the License.
+ #pragma once
+
+ #include "ast.h"
++#include <cstdint>
+
+ //
+ // Context-free Grammar for Sinsp Filters
+--- a/userspace/libsinsp/filter_value.h
++++ b/userspace/libsinsp/filter_value.h
+@@ -18,6 +18,7 @@ limitations under the License.
+ #pragma once
+
+ #include <string.h>
++#include <cstdint>
+ #include <utility>
+
+ // Used for CO_IN/CO_PMATCH filterchecks using PT_CHARBUFs to allow
diff --git a/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch b/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch
new file mode 100644
index 0000000000..903ccdf36a
--- /dev/null
+++ b/meta-selftest/recipes-extended/sysdig/sysdig-selftest/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch
@@ -0,0 +1,29 @@
+From ed8969a233adb6bf701de96d0fd0570e5ddcc787 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 21 Mar 2022 19:35:48 -0700
+Subject: [PATCH] cmake: Pass PROBE_NAME via CFLAGS
+
+This helps compliation of driver code where its calling modprobe on the
+given kernel module via system() API
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ CMakeLists.txt | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 7dceb7ae..e156c36f 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -149,6 +149,7 @@ if(CMAKE_SYSTEM_NAME MATCHES "Linux")
+ if(NOT DEFINED PROBE_NAME)
+ set(PROBE_NAME "scap")
+ endif()
++ add_definitions(-DPROBE_NAME="${PROBE_NAME}")
+
+ set(DRIVERS_REPO "https://download.sysdig.com/scap-drivers")
+
+--
+2.35.1
+
diff --git a/meta-selftest/recipes-extended/sysdig/sysdig-selftest_0.28.0.bb b/meta-selftest/recipes-extended/sysdig/sysdig-selftest_0.28.0.bb
new file mode 100644
index 0000000000..2ce85fe451
--- /dev/null
+++ b/meta-selftest/recipes-extended/sysdig/sysdig-selftest_0.28.0.bb
@@ -0,0 +1,66 @@
+SUMMARY = "A New System Troubleshooting Tool Built for the Way You Work"
+DESCRIPTION = "Sysdig is open source, system-level exploration: capture \
+system state and activity from a running Linux instance, then save, \
+filter and analyze."
+HOMEPAGE = "http://www.sysdig.org/"
+LICENSE = "Apache-2.0 & (MIT | GPL-2.0-only)"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f8fee3d59797546cffab04f3b88b2d44"
+
+inherit cmake pkgconfig
+
+#OECMAKE_GENERATOR = "Unix Makefiles"
+JIT ?= "jit"
+JIT:mipsarchn32 = ""
+JIT:mipsarchn64 = ""
+JIT:riscv64 = ""
+JIT:riscv32 = ""
+JIT:powerpc = ""
+JIT:powerpc64le = ""
+JIT:powerpc64 = ""
+
+#DEPENDS += "libb64 lua${JIT} zlib c-ares grpc-native grpc curl ncurses jsoncpp \
+# tbb jq openssl elfutils protobuf protobuf-native jq-native valijson"
+RDEPENDS:${PN} = "bash"
+
+SRC_URI = "git://github.com/draios/sysdig.git;branch=dev;protocol=https;name=sysdig \
+ git://github.com/falcosecurity/libs;protocol=https;branch=master;name=falco;subdir=git/falcosecurity-libs \
+ file://0055-Add-cstdint-for-uintXX_t-types.patch;patchdir=./falcosecurity-libs \
+ file://0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch \
+ "
+SRCREV_sysdig = "4fb6288275f567f63515df0ff0a6518043ecfa9b"
+SRCREV_falco= "caa0e4d0044fdaaebab086592a97f0c7f32aeaa9"
+
+SRCREV_FORMAT = "sysdig_falco"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OECMAKE = "\
+ -DBUILD_DRIVER=OFF \
+ -DMINIMAL_BUILD=ON \
+ -DUSE_BUNDLED_DEPS=OFF \
+ -DCREATE_TEST_TARGETS=OFF \
+ -DDIR_ETC=${sysconfdir} \
+ -DLUA_INCLUDE_DIR=${STAGING_INCDIR}/luajit-2.1 \
+ -DFALCOSECURITY_LIBS_SOURCE_DIR=${S}/falcosecurity-libs \
+ -DVALIJSON_INCLUDE=${STAGING_INCDIR}/valijson \
+"
+
+#CMAKE_VERBOSE = "VERBOSE=1"
+
+FILES:${PN} += " \
+ ${DIR_ETC}/* \
+ ${datadir}/zsh/* \
+ ${prefix}/src/* \
+"
+# Use getaddrinfo_a is a GNU extension in libsinsp
+# It should be fixed in sysdig, until then disable
+# on musl
+# Something like this https://code.videolan.org/ePirat/vlc/-/commit/01fd9fe4c7f6c5558f7345f38abf0152e17853ab is needed to fix it
+COMPATIBLE_HOST:libc-musl = "null"
+COMPATIBLE_HOST:mips = "null"
+COMPATIBLE_HOST:riscv64 = "null"
+COMPATIBLE_HOST:riscv32 = "null"
+COMPATIBLE_HOST:powerpc = "null"
+COMPATIBLE_HOST:powerpc64le = "null"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/aspell/aspell_0.60.8.bbappend b/meta-selftest/recipes-test/aspell/aspell_%.bbappend
index 205720982c..205720982c 100644
--- a/meta-selftest/recipes-test/aspell/aspell_0.60.8.bbappend
+++ b/meta-selftest/recipes-test/aspell/aspell_%.bbappend
diff --git a/meta-selftest/recipes-test/binutils/binutils_%.bbappend b/meta-selftest/recipes-test/binutils/binutils_%.bbappend
new file mode 100644
index 0000000000..205720982c
--- /dev/null
+++ b/meta-selftest/recipes-test/binutils/binutils_%.bbappend
@@ -0,0 +1,2 @@
+# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests.
+include test_recipe.inc
diff --git a/meta-selftest/recipes-test/cpp/.gitignore b/meta-selftest/recipes-test/cpp/.gitignore
new file mode 100644
index 0000000000..30d388a12b
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/.gitignore
@@ -0,0 +1 @@
+build* \ No newline at end of file
diff --git a/meta-selftest/recipes-test/cpp/cmake-example.bb b/meta-selftest/recipes-test/cpp/cmake-example.bb
new file mode 100644
index 0000000000..aecfcf780a
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/cmake-example.bb
@@ -0,0 +1,25 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+SUMMARY = "A C++ example compiled with cmake."
+
+require cpp-example.inc
+
+SRC_URI += "file://CMakeLists.txt"
+
+inherit cmake-qemu
+
+PACKAGECONFIG[failing_test] = "-DFAILING_TEST=ON"
+
+FILES:${PN}-ptest += "${bindir}/test-cmake-example"
+
+do_run_tests () {
+ bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' --target test -- ${EXTRA_OECMAKE_BUILD}
+ eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' --target test -- ${EXTRA_OECMAKE_BUILD}
+}
+do_run_tests[doc] = "Run cmake --target=test using qemu-user"
+
+addtask do_run_tests after do_compile
diff --git a/meta-selftest/recipes-test/cpp/cmake-example/run-ptest b/meta-selftest/recipes-test/cpp/cmake-example/run-ptest
new file mode 100644
index 0000000000..94b620a198
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/cmake-example/run-ptest
@@ -0,0 +1,10 @@
+#!/bin/sh
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+test-cmake-example
+
+# Note: run-ptests exits with exit value from test-cmake-example
diff --git a/meta-selftest/recipes-test/cpp/cpp-example.inc b/meta-selftest/recipes-test/cpp/cpp-example.inc
new file mode 100644
index 0000000000..ad374be9d0
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/cpp-example.inc
@@ -0,0 +1,24 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+DEPENDS += "json-c"
+
+PV = "1.0"
+
+SRC_URI = "\
+ file://cpp-example.cpp \
+ file://cpp-example-lib.hpp \
+ file://cpp-example-lib.cpp \
+ file://test-cpp-example.cpp \
+ file://run-ptest \
+"
+
+S = "${WORKDIR}"
+
+inherit ptest
diff --git a/meta-selftest/recipes-test/cpp/files/CMakeLists.txt b/meta-selftest/recipes-test/cpp/files/CMakeLists.txt
new file mode 100644
index 0000000000..6fa6917d89
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/CMakeLists.txt
@@ -0,0 +1,61 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+cmake_minimum_required(VERSION 3.22)
+
+project(cmake-example
+ VERSION 1.0.0
+ LANGUAGES CXX
+)
+
+option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
+option(FAILING_TEST "Compile a failing unit test to test the test infrastructure" OFF)
+
+set(CMAKE_CXX_STANDARD 17)
+set(CMAKE_CXX_STANDARD_REQUIRED On)
+set(CMAKE_CXX_EXTENSIONS Off)
+
+include(GNUInstallDirs)
+
+# Linking a small library makes the example more useful for testing.
+find_package(json-c)
+
+# A simple library linking json-c library found by pkgconfig
+add_library(cmake-example-lib cpp-example-lib.cpp cpp-example-lib.hpp)
+set_target_properties(cmake-example-lib PROPERTIES
+ VERSION ${PROJECT_VERSION}
+ SOVERSION ${PROJECT_VERSION_MAJOR}
+)
+target_link_libraries(cmake-example-lib PRIVATE json-c::json-c)
+
+install(TARGETS cmake-example-lib
+ INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+)
+
+# A simple executable linking the library
+add_executable(cmake-example cpp-example.cpp)
+target_link_libraries(cmake-example PRIVATE cmake-example-lib)
+
+install(TARGETS cmake-example
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+)
+
+# A simple test executable for testing the library
+add_executable(test-cmake-example test-cpp-example.cpp)
+target_link_libraries(test-cmake-example PRIVATE cmake-example-lib)
+
+if (FAILING_TEST)
+ target_compile_definitions(test-cmake-example PRIVATE FAIL_COMPARISON_STR="foo")
+endif(FAILING_TEST)
+
+install(TARGETS test-cmake-example
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+)
+
+include(CTest)
+add_test(NAME test-cmake-example COMMAND test-cmake-example)
diff --git a/meta-selftest/recipes-test/cpp/files/cpp-example-lib.cpp b/meta-selftest/recipes-test/cpp/files/cpp-example-lib.cpp
new file mode 100644
index 0000000000..d3dc976864
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/cpp-example-lib.cpp
@@ -0,0 +1,33 @@
+/*
+ * Copyright OpenEmbedded Contributors
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+#include <iostream>
+#include <string>
+#include <json-c/json.h>
+#include "cpp-example-lib.hpp"
+
+const std::string &CppExample::get_string()
+{
+ return test_string;
+}
+
+const char *CppExample::get_json_c_version()
+{
+ return json_c_version();
+}
+
+void CppExample::print_json()
+{
+ struct json_object *jobj;
+ const int flag = JSON_C_TO_STRING_SPACED | JSON_C_TO_STRING_PRETTY;
+
+ jobj = json_object_new_object();
+ json_object_object_add(jobj, "test_string", json_object_new_string(test_string.c_str()));
+
+ std::cout << json_object_to_json_string_ext(jobj, flag) << std::endl;
+
+ json_object_put(jobj); // Delete the json object
+}
diff --git a/meta-selftest/recipes-test/cpp/files/cpp-example-lib.hpp b/meta-selftest/recipes-test/cpp/files/cpp-example-lib.hpp
new file mode 100644
index 0000000000..0ad9e7b7b2
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/cpp-example-lib.hpp
@@ -0,0 +1,21 @@
+/*
+ * Copyright OpenEmbedded Contributors
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+#pragma once
+
+#include <string>
+
+struct CppExample
+{
+ inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
+
+ /* Retrieve a constant string */
+ const std::string &get_string();
+ /* Retrieve a constant string from a library */
+ const char *get_json_c_version();
+ /* Call a more advanced function from a library */
+ void print_json();
+};
diff --git a/meta-selftest/recipes-test/cpp/files/cpp-example.cpp b/meta-selftest/recipes-test/cpp/files/cpp-example.cpp
new file mode 100644
index 0000000000..9889554e0c
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/cpp-example.cpp
@@ -0,0 +1,18 @@
+/*
+ * Copyright OpenEmbedded Contributors
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+#include "cpp-example-lib.hpp"
+
+#include <iostream>
+
+int main()
+{
+ auto cpp_example = CppExample();
+ std::cout << "C++ example linking " << cpp_example.get_string() << std::endl;
+ std::cout << "Linking json-c version " << cpp_example.get_json_c_version() << std::endl;
+ cpp_example.print_json();
+ return 0;
+}
diff --git a/meta-selftest/recipes-test/cpp/files/meson.build b/meta-selftest/recipes-test/cpp/files/meson.build
new file mode 100644
index 0000000000..0e2b55f3a2
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/meson.build
@@ -0,0 +1,38 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+project('meson-example', 'cpp',
+ version: '1.0.0',
+ default_options: ['cpp_std=c++17']
+ )
+
+jsoncdep = dependency('json-c')
+
+if get_option('FAILING_TEST').enabled()
+ add_project_arguments('-DFAIL_COMPARISON_STR=foo', language: 'cpp')
+endif
+
+mesonexlib = shared_library('mesonexlib',
+ 'cpp-example-lib.cpp', 'cpp-example-lib.hpp',
+ version: meson.project_version(),
+ soversion: meson.project_version().split('.')[0],
+ dependencies : jsoncdep,
+ install : true
+ )
+
+executable('mesonex',
+ 'cpp-example.cpp',
+ link_with : mesonexlib,
+ install : true
+ )
+
+test_mesonex = executable('test-mesonex',
+ 'test-cpp-example.cpp',
+ link_with : mesonexlib,
+ install : true
+)
+
+test('meson example test', test_mesonex)
diff --git a/meta-selftest/recipes-test/cpp/files/meson.options b/meta-selftest/recipes-test/cpp/files/meson.options
new file mode 100644
index 0000000000..58a0bf9e61
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/meson.options
@@ -0,0 +1,3 @@
+
+option('FAILING_TEST', type : 'feature', value : 'disabled',
+ description : 'Compile a failing unit test to test the test infrastructure')
diff --git a/meta-selftest/recipes-test/cpp/files/test-cpp-example.cpp b/meta-selftest/recipes-test/cpp/files/test-cpp-example.cpp
new file mode 100644
index 0000000000..83c9bfa844
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/files/test-cpp-example.cpp
@@ -0,0 +1,25 @@
+/*
+* Copyright OpenEmbedded Contributors
+*
+* SPDX-License-Identifier: MIT
+*/
+
+#include "cpp-example-lib.hpp"
+
+#include <iostream>
+
+/* This is for creating a failing test for testing the test infrastructure */
+#ifndef FAIL_COMPARISON_STR
+#define FAIL_COMPARISON_STR ""
+#endif
+
+int main() {
+ auto cpp_example = CppExample();
+ auto ret_string = cpp_example.get_string();
+ if(0 == ret_string.compare(CppExample::test_string + FAIL_COMPARISON_STR)) {
+ std::cout << "PASS: " << ret_string << " = " << CppExample::test_string << std::endl;
+ } else {
+ std::cout << "FAIL: " << ret_string << " != " << CppExample::test_string << std::endl;
+ return 1;
+ }
+}
diff --git a/meta-selftest/recipes-test/cpp/meson-example.bb b/meta-selftest/recipes-test/cpp/meson-example.bb
new file mode 100644
index 0000000000..14a7ca8dc9
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/meson-example.bb
@@ -0,0 +1,27 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+SUMMARY = "A C++ example compiled with meson."
+
+require cpp-example.inc
+
+SRC_URI += "\
+ file://meson.build \
+ file://meson.options \
+"
+
+inherit pkgconfig meson
+
+PACKAGECONFIG[failing_test] = "-DFAILING_TEST=enabled"
+
+FILES:${PN}-ptest += "${bindir}/test-mesonex"
+
+do_run_tests () {
+ meson test -C "${B}" --no-rebuild
+}
+do_run_tests[doc] = "Run meson test using qemu-user"
+
+addtask do_run_tests after do_compile
diff --git a/meta-selftest/recipes-test/cpp/meson-example/run-ptest b/meta-selftest/recipes-test/cpp/meson-example/run-ptest
new file mode 100644
index 0000000000..b1804f0096
--- /dev/null
+++ b/meta-selftest/recipes-test/cpp/meson-example/run-ptest
@@ -0,0 +1,10 @@
+#!/bin/sh
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+test-mesonex
+
+# Note: run-ptests exits with exit value from test-mesonex
diff --git a/meta-selftest/recipes-test/devtool/devtool-test-local/file3 b/meta-selftest/recipes-test/devtool/devtool-test-local/file3
new file mode 100644
index 0000000000..0f30e9eec4
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-test-local/file3
@@ -0,0 +1 @@
+The third file.
diff --git a/meta-selftest/recipes-test/devtool/devtool-test-local_6.03.bb b/meta-selftest/recipes-test/devtool/devtool-test-local_6.03.bb
index 463cfe0a7a..d0fd697978 100644
--- a/meta-selftest/recipes-test/devtool/devtool-test-local_6.03.bb
+++ b/meta-selftest/recipes-test/devtool/devtool-test-local_6.03.bb
@@ -7,9 +7,12 @@ SRC_URI = "http://downloads.yoctoproject.org/mirror/sources/syslinux-${PV}.tar.x
file://file1 \
file://file2"
+SRC_URI:append:class-native = " file://file3"
+
SRC_URI[md5sum] = "92a253df9211e9c20172796ecf388f13"
SRC_URI[sha256sum] = "26d3986d2bea109d5dc0e4f8c4822a459276cf021125e8c9f23c3cca5d8c850e"
S = "${WORKDIR}/syslinux-${PV}"
EXCLUDE_FROM_WORLD = "1"
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb b/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb
index 3f7123cda0..e767619879 100644
--- a/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb
+++ b/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb
@@ -4,4 +4,7 @@ INHIBIT_DEFAULT_DEPS = "1"
SRC_URI = "file://file1 \
file://file2"
+SRC_URI:append:class-native = " file://file3"
+
EXCLUDE_FROM_WORLD = "1"
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/devtool/devtool-test-localonly/file3 b/meta-selftest/recipes-test/devtool/devtool-test-localonly/file3
new file mode 100644
index 0000000000..0f30e9eec4
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-test-localonly/file3
@@ -0,0 +1 @@
+The third file.
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb b/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb
index 203f4b61c2..2558a22ce5 100644
--- a/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb
@@ -8,7 +8,7 @@ DEPENDS = "dbus"
# Note: this is intentionally not the latest version in the original .bb
SRCREV = "1a3e1343761b30750bed70e0fd688f6d3c7b3717"
-PV = "0.1+git${SRCPV}"
+PV = "0.1+git"
PR = "r2"
SRC_URI = "git://git.yoctoproject.org/dbus-wait;branch=master"
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded b/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded
index 3d45fc4857..eaa8bd898d 100644
--- a/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded
@@ -8,7 +8,7 @@ DEPENDS = "dbus"
# Note: this is intentionally not the latest version in the original .bb
SRCREV = "6cc6077a36fe2648a5f993fe7c16c9632f946517"
-PV = "0.1+git${SRCPV}"
+PV = "0.1+git"
SRC_URI = "git://git.yoctoproject.org/dbus-wait;branch=master"
UPSTREAM_CHECK_COMMITS = "1"
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb b/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb
new file mode 100644
index 0000000000..69c0d351ec
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Pipe viewer test recipe for devtool upgrade test"
+LICENSE = "Artistic-2.0"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=9c50db2589ee3ef10a9b7b2e50ce1d02"
+
+SRC_URI = "http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz"
+UPSTREAM_CHECK_URI = "http://www.ivarch.com/programs/pv.shtml"
+RECIPE_NO_UPDATE_REASON = "This recipe is used to test devtool upgrade feature"
+
+SRC_URI[md5sum] = "9365d86bd884222b4bf1039b5a9ed1bd"
+
+S = "${WORKDIR}/pv-${PV}"
+
+EXCLUDE_FROM_WORLD = "1"
+
+inherit autotools
+
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb.upgraded b/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb.upgraded
new file mode 100644
index 0000000000..3ce7e85e10
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test3_1.5.3.bb.upgraded
@@ -0,0 +1,15 @@
+SUMMARY = "Pipe viewer test recipe for devtool upgrade test"
+LICENSE = "Artistic-2.0"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=9c50db2589ee3ef10a9b7b2e50ce1d02"
+
+SRC_URI[sha256sum] = "9dd45391806b0ed215abee4c5ac1597d018c386fe9c1f5afd2f6bc3b07fd82c3"
+SRC_URI = "http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz"
+UPSTREAM_CHECK_URI = "http://www.ivarch.com/programs/pv.shtml"
+RECIPE_NO_UPDATE_REASON = "This recipe is used to test devtool upgrade feature"
+
+S = "${WORKDIR}/pv-${PV}"
+
+EXCLUDE_FROM_WORLD = "1"
+
+inherit autotools
+
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb b/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb
new file mode 100644
index 0000000000..9abf80e6ed
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Pipe viewer test recipe for devtool upgrade test"
+LICENSE = "Artistic-2.0"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=9c50db2589ee3ef10a9b7b2e50ce1d02"
+
+SRC_URI = "http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz"
+UPSTREAM_CHECK_URI = "http://www.ivarch.com/programs/pv.shtml"
+RECIPE_NO_UPDATE_REASON = "This recipe is used to test devtool upgrade feature"
+
+SRC_URI[md5sum] = "9365d86bd884222b4bf1039b5a9ed1bd"
+SRC_URI[sha1sum] = "63a0801350e812541c7f8e9ad74e0d6b629d0b39"
+SRC_URI[sha256sum] = "681bcca9784bf3cb2207e68236d1f68e2aa7b80f999b5750dc77dcd756e81fbc"
+SRC_URI[sha384sum] = "5fff6390465ff23dbf573fcf39dfad3aed2f92074a35e6c02abe58b7678858d90fa6572ff4cb56df8b3e217c739cdbe3"
+SRC_URI[sha512sum] = "32efe7071a363f547afc74e96774f711795edda1d2702823a347d0f9953e859b7d8c45b3e63e18ffb9e0d5ed5910be652d7d727c8676e81b6cb3aed0b13aec00"
+
+PR = "r5"
+
+S = "${WORKDIR}/pv-${PV}"
+
+EXCLUDE_FROM_WORLD = "1"
+
+inherit autotools
+
diff --git a/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb.upgraded b/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb.upgraded
new file mode 100644
index 0000000000..cd2a0842f4
--- /dev/null
+++ b/meta-selftest/recipes-test/devtool/devtool-upgrade-test4_1.5.3.bb.upgraded
@@ -0,0 +1,19 @@
+SUMMARY = "Pipe viewer test recipe for devtool upgrade test"
+LICENSE = "Artistic-2.0"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=9c50db2589ee3ef10a9b7b2e50ce1d02"
+
+SRC_URI = "http://www.ivarch.com/programs/sources/pv-${PV}.tar.gz"
+UPSTREAM_CHECK_URI = "http://www.ivarch.com/programs/pv.shtml"
+RECIPE_NO_UPDATE_REASON = "This recipe is used to test devtool upgrade feature"
+
+SRC_URI[sha1sum] = "395ce62f4f3e035b86c77038f04b96c5aa233595"
+SRC_URI[sha256sum] = "9dd45391806b0ed215abee4c5ac1597d018c386fe9c1f5afd2f6bc3b07fd82c3"
+SRC_URI[sha384sum] = "218c8d2d097aeba5310be759bc20573f18ffa0b11701eac6dd2e7e14ddf13c6e0e094ca7ca026eaa05ef92a056402e36"
+SRC_URI[sha512sum] = "1cf9d7376fceefcd594d0a8b591afc8e11ce89f7210d10ad74438974ecebe9cc5d9ec4db9cc79e0566bfd2b0278c0cc263c07547803e7536432cd1ffd32d8a45"
+
+S = "${WORKDIR}/pv-${PV}"
+
+EXCLUDE_FROM_WORLD = "1"
+
+inherit autotools
+
diff --git a/meta-selftest/recipes-test/gcc-source/gcc-source_%.bbappend b/meta-selftest/recipes-test/gcc-source/gcc-source_%.bbappend
new file mode 100644
index 0000000000..205720982c
--- /dev/null
+++ b/meta-selftest/recipes-test/gcc-source/gcc-source_%.bbappend
@@ -0,0 +1,2 @@
+# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests.
+include test_recipe.inc
diff --git a/meta-selftest/recipes-test/git-submodule-test/git-submodule-test.bb b/meta-selftest/recipes-test/git-submodule-test/git-submodule-test.bb
index fa3041b7d8..90d9b66b2c 100644
--- a/meta-selftest/recipes-test/git-submodule-test/git-submodule-test.bb
+++ b/meta-selftest/recipes-test/git-submodule-test/git-submodule-test.bb
@@ -5,6 +5,8 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda
INHIBIT_DEFAULT_DEPS = "1"
+UPSTREAM_VERSION_UNKNOWN = "1"
+
SRC_URI = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master"
SRCREV = "a2885dd7d25380d23627e7544b7bbb55014b16ee"
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
index 602e895199..5146129666 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
@@ -1,16 +1,4 @@
-SUMMARY = "Test recipe for fetching git submodules"
-HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+require gitunpackoffline.inc
-INHIBIT_DEFAULT_DEPS = "1"
-
-TAGVALUE = "2.10"
-
-# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
-SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
-SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
-SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
-SRCREV:gitunpack-enable-recipe = ""
-
-S = "${WORKDIR}/git"
+# Clear the base.bbclass magic srcrev call
+fetcher_hashes_dummyfunc[vardepvalue] = ""
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
index 597c89b199..d9a54690b2 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
@@ -1,5 +1,5 @@
-require gitunpackoffline-fail.bb
+require gitunpackoffline.inc
TAGVALUE = "2.11"
-PV = "0.0+git${SRCPV}"
+PV = "0.0+git"
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
new file mode 100644
index 0000000000..602e895199
--- /dev/null
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
@@ -0,0 +1,16 @@
+SUMMARY = "Test recipe for fetching git submodules"
+HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+
+INHIBIT_DEFAULT_DEPS = "1"
+
+TAGVALUE = "2.10"
+
+# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
+SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
+SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
+SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
+SRCREV:gitunpack-enable-recipe = ""
+
+S = "${WORKDIR}/git"
diff --git a/meta-selftest/recipes-test/images/oe-selftest-image.bb b/meta-selftest/recipes-test/images/oe-selftest-image.bb
index e295943ae5..317a0712aa 100644
--- a/meta-selftest/recipes-test/images/oe-selftest-image.bb
+++ b/meta-selftest/recipes-test/images/oe-selftest-image.bb
@@ -1,7 +1,7 @@
SUMMARY = "An image used during oe-selftest tests"
# libudev is needed for deploy mdadm via devtool
-IMAGE_INSTALL = "packagegroup-core-boot dropbear libudev"
+IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-dropbear libudev"
IMAGE_FEATURES = "debug-tweaks"
IMAGE_LINGUAS = " "
diff --git a/meta-selftest/recipes-test/license/incompatible-license-alias.bb b/meta-selftest/recipes-test/license/incompatible-license-alias.bb
index e0b4e13c26..1af99e7809 100644
--- a/meta-selftest/recipes-test/license/incompatible-license-alias.bb
+++ b/meta-selftest/recipes-test/license/incompatible-license-alias.bb
@@ -1,3 +1,5 @@
SUMMARY = "Recipe with an alias of an SPDX license"
DESCRIPTION = "Is licensed with an alias of an SPDX license to be used for testing"
LICENSE = "GPLv3"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/license/incompatible-license.bb b/meta-selftest/recipes-test/license/incompatible-license.bb
index 282f5c2875..6fdc58fd30 100644
--- a/meta-selftest/recipes-test/license/incompatible-license.bb
+++ b/meta-selftest/recipes-test/license/incompatible-license.bb
@@ -1,3 +1,5 @@
SUMMARY = "Recipe with an SPDX license"
DESCRIPTION = "Is licensed with an SPDX license to be used for testing"
LICENSE = "GPL-3.0-only"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/license/incompatible-licenses.bb b/meta-selftest/recipes-test/license/incompatible-licenses.bb
index 9709892644..47bd8d7c00 100644
--- a/meta-selftest/recipes-test/license/incompatible-licenses.bb
+++ b/meta-selftest/recipes-test/license/incompatible-licenses.bb
@@ -1,3 +1,5 @@
SUMMARY = "Recipe with multiple SPDX licenses"
DESCRIPTION = "Is licensed with multiple SPDX licenses to be used for testing"
LICENSE = "GPL-2.0-only & GPL-3.0-only & LGPL-3.0-only"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/license/incompatible-nonspdx-license.bb b/meta-selftest/recipes-test/license/incompatible-nonspdx-license.bb
index 35af0966ef..142d73158e 100644
--- a/meta-selftest/recipes-test/license/incompatible-nonspdx-license.bb
+++ b/meta-selftest/recipes-test/license/incompatible-nonspdx-license.bb
@@ -1,3 +1,5 @@
SUMMARY = "Recipe with a non-SPDX license"
DESCRIPTION = "Is licensed with a non-SPDX license to be used for testing"
LICENSE = "FooLicense"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/multiconfig/multiconfig-image-packager_0.1.bb b/meta-selftest/recipes-test/multiconfig/multiconfig-image-packager_0.1.bb
index daf2834958..d7785cee2e 100644
--- a/meta-selftest/recipes-test/multiconfig/multiconfig-image-packager_0.1.bb
+++ b/meta-selftest/recipes-test/multiconfig/multiconfig-image-packager_0.1.bb
@@ -7,15 +7,19 @@ MCIMGTYPE:virtclass-mcextend-musl = "ext4"
MCIMGTYPE:virtclass-mcextend-tiny = "cpio.gz"
MC_DEPLOY_DIR_IMAGE = "${TOPDIR}/tmp-mc-${MCNAME}/deploy/images/${MCMACHINE}"
+MC_DEPLOY_IMAGE_BASENAME = "core-image-minimal"
do_install[mcdepends] += "mc::${MCNAME}:core-image-minimal:do_image_complete mc::${MCNAME}:virtual/kernel:do_deploy"
do_install () {
install -d ${D}/var/lib/machines/${MCNAME}
- install ${MC_DEPLOY_DIR_IMAGE}/core-image-minimal-${MCMACHINE}.${MCIMGTYPE} ${D}/var/lib/machines/${MCNAME}/core-image-minimal.${MCIMGTYPE}
+ install ${MC_DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL}.${MCIMGTYPE} ${D}/var/lib/machines/${MCNAME}/${MC_DEPLOY_IMAGE_BASENAME}.${MCIMGTYPE}
install ${MC_DEPLOY_DIR_IMAGE}/bzImage ${D}/var/lib/machines/${MCNAME}
}
+# for IMAGE_LINK_NAME, IMAGE_BASENAME
+inherit image-artifact-names
+
python () {
mcname = d.getVar('MCNAME')
if not mcname:
@@ -23,6 +27,18 @@ python () {
multiconfigs = d.getVar('BBMULTICONFIG') or ""
if mcname not in multiconfigs:
raise bb.parse.SkipRecipe("multiconfig target %s not enabled" % mcname)
+
+ # these will most likely start with my BPN multiconfig-image-packager, but I want them from core-image-minimal
+ # as there is no good way to query core-image-minimal's context lets assume that there are no overrides
+ # and that we can just replace IMAGE_BASENAME
+ image_link_name = d.getVar('IMAGE_LINK_NAME')
+ image_basename = d.getVar('IMAGE_BASENAME')
+ machine = d.getVar('MACHINE')
+ mcmachine = d.getVar('MCMACHINE')
+ image_to_deploy = d.getVar('MC_DEPLOY_IMAGE_BASENAME')
+ image_link_name_to_deploy = image_link_name.replace(image_basename, image_to_deploy).replace(machine, mcmachine)
+ bb.warn('%s: assuming that "%s" built for "%s" has IMAGE_LINK_NAME "%s"' % (d.getVar('PN'), mcmachine, image_to_deploy, image_link_name_to_deploy))
+ d.setVar('IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL', image_link_name_to_deploy)
}
BBCLASSEXTEND = "mcextend:tiny mcextend:musl"
diff --git a/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb b/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb
index 913a4d1fdb..3b59e37619 100644
--- a/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb
+++ b/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb
@@ -5,13 +5,18 @@ LICENSE = "MIT"
INHIBIT_DEFAULT_DEPS = "1"
EXCLUDE_FROM_WORLD = "1"
-inherit ${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs", "", d)}
+inherit_defer ${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs", "", d)}
include test_recipe.inc
OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/my-application"
do_install() {
install -d ${D}/usr/share/my-application
+ install -d ${D}${sysconfdir}
+ echo "Original file in /etc" >> ${D}${sysconfdir}/lower-layer-test.txt
}
-FILES:${PN} += "/usr"
+FILES:${PN} += "\
+ ${exec_prefix} \
+ ${sysconfdir} \
+"
diff --git a/meta-selftest/recipes-test/packagenameconflict/packagenameconflict.bb b/meta-selftest/recipes-test/packagenameconflict/packagenameconflict.bb
new file mode 100644
index 0000000000..5d19a4dd25
--- /dev/null
+++ b/meta-selftest/recipes-test/packagenameconflict/packagenameconflict.bb
@@ -0,0 +1,10 @@
+SUMMARY = "Test case that tries to rename a package to an existing one and fails"
+DESCRIPTION = "This generates a packaging error when a package is renamed to a pre-existing name"
+LICENSE = "MIT"
+
+# Add a new package ${PN}-renametest
+PACKAGES += "${PN}-renametest"
+# ... and try to rename the ${PN}-dev to the new ${PN}-renametest (conflict)
+PKG:${PN}-dev = "${PN}-renametest"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/perlcross/perlcross_%.bbappend b/meta-selftest/recipes-test/perlcross/perlcross_%.bbappend
new file mode 100644
index 0000000000..205720982c
--- /dev/null
+++ b/meta-selftest/recipes-test/perlcross/perlcross_%.bbappend
@@ -0,0 +1,2 @@
+# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests.
+include test_recipe.inc
diff --git a/meta-selftest/recipes-test/poison/poison.bb b/meta-selftest/recipes-test/poison/poison.bb
index 7ace901cc3..e9eee0cdba 100644
--- a/meta-selftest/recipes-test/poison/poison.bb
+++ b/meta-selftest/recipes-test/poison/poison.bb
@@ -9,8 +9,12 @@ inherit nopackages
# will result in compiler errors. This recipe should will fail to build and
# oe-selftest has a test that verifies that.
do_compile() {
- touch empty.c
- ${CPP} ${CFLAGS} -I/usr/include empty.c
+ bbnote Testing preprocessor
+ echo "int main(int argc, char** argv) {}" | ${CPP} -I/usr/include -
+ bbnote Testing C compiler
+ echo "int main(int argc, char** argv) {}" | ${CC} -x c -I/usr/include -
+ bbnote Testing C++ compiler
+ echo "int main(int argc, char** argv) {}" | ${CC} -x c++ -I/usr/include -
}
EXCLUDE_FROM_WORLD = "1"
diff --git a/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb b/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb
index c5037a4912..1a140a532f 100644
--- a/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb
+++ b/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb
@@ -5,7 +5,6 @@ LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=6ddd5335ef96fb858a138230af773710 \
file://main.c;beginline=1;endline=17;md5=36d4b85e5ae9028e918d1cc775c2475e"
-PR = "r2"
SRC_URI = "${SAVANNAH_GNU_MIRROR}/ed/ed-${PV}.tar.bz2"
SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632"
diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
new file mode 100644
index 0000000000..fc7169b7b8
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello world!\n");
+
+ return 0;
+}
diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
new file mode 100644
index 0000000000..547587bef4
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
@@ -0,0 +1,19 @@
+DESCRIPTION = "Simple helloworld application -- selftest variant"
+SECTION = "examples"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+SRC_URI = "file://helloworld.c"
+
+S = "${WORKDIR}"
+
+do_compile() {
+ ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld
+}
+
+do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 helloworld ${D}${bindir}
+}
+
+BBCLASSEXTEND = "native nativesdk" \ No newline at end of file
diff --git a/meta-selftest/recipes-test/selftest-users/acreategroup.bb b/meta-selftest/recipes-test/selftest-users/acreategroup.bb
new file mode 100644
index 0000000000..66ed5695a2
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/acreategroup.bb
@@ -0,0 +1,32 @@
+SUMMARY = "creategroup_a"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+S = "${WORKDIR}"
+
+EXCLUDE_FROM_WORLD="1"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5560 --gid a_group a_user"
+GROUPADD_PARAM:${PN} = "-r a_group"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown a_user:a_group ${TESTDIR}/file
+ chown -R a_user:a_group ${TESTDIR}/dir
+ chown -h a_user:a_group ${TESTDIR}/symlink
+ chown -R a_user:a_group ${TESTDIR}/fifotest
+}
+
+FILES:${PN} = "${sysconfdir}/creategroup/*"
diff --git a/meta-selftest/recipes-test/selftest-users/bcreategroup.bb b/meta-selftest/recipes-test/selftest-users/bcreategroup.bb
new file mode 100644
index 0000000000..c4844dd0da
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/bcreategroup.bb
@@ -0,0 +1,37 @@
+SUMMARY = "creategroup_b"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+# This recipe requires a and c. C requires A. Reverse alpha.
+
+USERADD_DEPENDS = "acreategroup ccreategroup"
+
+S = "${WORKDIR}"
+
+EXCLUDE_FROM_WORLD="1"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5561 -g b_group -G a_group,c_group b_user "
+GROUPADD_PARAM:${PN} = "-r b_group"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown a_user:a_group ${TESTDIR}/file
+ chown -R c_user:c_group ${TESTDIR}/dir
+ chown -h a_user:a_group ${TESTDIR}/symlink
+ chown -R b_user:b_group ${TESTDIR}/fifotest
+}
+
+FILES:${PN} = "${sysconfdir}/creategroup/*"
+
diff --git a/meta-selftest/recipes-test/selftest-users/ccreategroup.bb b/meta-selftest/recipes-test/selftest-users/ccreategroup.bb
new file mode 100644
index 0000000000..021b1ebbf7
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/ccreategroup.bb
@@ -0,0 +1,34 @@
+SUMMARY = "creategroup_c"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+USERADD_DEPENDS = "acreategroup"
+
+S = "${WORKDIR}"
+
+EXCLUDE_FROM_WORLD="1"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5563 --gid c_group -G a_group c_user"
+GROUPADD_PARAM:${PN} = "-r c_group"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown c_user:c_group ${TESTDIR}/file
+ chown -R c_user:c_group ${TESTDIR}/dir
+ chown -h c_user:c_group ${TESTDIR}/symlink
+ chown -R c_user:c_group ${TESTDIR}/fifotest
+}
+
+FILES:${PN} = "${sysconfdir}/creategroup/*"
diff --git a/meta-selftest/recipes-test/selftest-users/creategroup1.bb b/meta-selftest/recipes-test/selftest-users/creategroup1.bb
new file mode 100644
index 0000000000..afd23ed1ee
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/creategroup1.bb
@@ -0,0 +1,30 @@
+SUMMARY = "creategroup pt 1"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+S = "${WORKDIR}"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5555 --gid grouptest gt1"
+GROUPADD_PARAM:${PN} = "-r grouptest"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown gt1:grouptest ${TESTDIR}/file
+ chown -R gt1:grouptest ${TESTDIR}/dir
+ chown -h gt1:grouptest ${TESTDIR}/symlink
+ chown -R gt1:grouptest ${TESTDIR}/fifotest
+}
+
+FILES:${PN} = "${sysconfdir}/creategroup/*"
diff --git a/meta-selftest/recipes-test/selftest-users/creategroup2.bb b/meta-selftest/recipes-test/selftest-users/creategroup2.bb
new file mode 100644
index 0000000000..f776f43aed
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/creategroup2.bb
@@ -0,0 +1,32 @@
+SUMMARY = "creategroup pt 2"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+USERADD_DEPENDS = "creategroup1"
+
+S = "${WORKDIR}"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5556 --gid grouptest gt2"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown gt2:grouptest ${TESTDIR}/file
+ chown -R gt2:grouptest ${TESTDIR}/dir
+ chown -h gt2:grouptest ${TESTDIR}/symlink
+ chown -R gt2:grouptest ${TESTDIR}/fifotest
+}
+
+FILES:${PN} = "${sysconfdir}/creategroup/*"
+
diff --git a/meta-selftest/recipes-test/selftest-users/dcreategroup.bb b/meta-selftest/recipes-test/selftest-users/dcreategroup.bb
new file mode 100644
index 0000000000..b96ca92a16
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/dcreategroup.bb
@@ -0,0 +1,33 @@
+SUMMARY = "creategroup_d"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+USERADD_DEPENDS = "bcreategroup"
+
+S = "${WORKDIR}"
+
+EXCLUDE_FROM_WORLD="1"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5564 -g d_group -G b_group d_user "
+GROUPADD_PARAM:${PN} = "-r d_group"
+
+TESTDIR = "${D}${sysconfdir}/creategroup"
+
+do_install() {
+ install -d ${TESTDIR}
+ install -d ${TESTDIR}/dir
+ touch ${TESTDIR}/file
+ ln -s ./file ${TESTDIR}/symlink
+ install -d ${TESTDIR}/fifotest
+ mkfifo ${TESTDIR}/fifotest/fifo
+
+ chown d_user:d_group ${TESTDIR}/file
+ chown -R d_user:b_group ${TESTDIR}/dir
+ chown -h d_user:d_group ${TESTDIR}/symlink
+ chown -R d_user:b_group ${TESTDIR}/fifotest
+}
+
diff --git a/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb b/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb
new file mode 100644
index 0000000000..99e04a80b3
--- /dev/null
+++ b/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb
@@ -0,0 +1,20 @@
+SUMMARY = "UserAddBadTask"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+DEPENDS:append = "coreutils-native"
+
+S = "${WORKDIR}"
+
+inherit useradd allarch
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "-u 5555 --gid groupaddtask useraddtask"
+GROUPADD_PARAM:${PN} = "-r groupaddtask"
+
+do_badthingshappen() {
+ echo "foo"
+}
+
+addtask badthingshappen after do_populate_sysroot before do_package
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb
new file mode 100644
index 0000000000..21f06782fb
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-la-test_1.0.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Produce a broken la file"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+
+# remove-libtool.bbclass is inherited by default and removes all
+# .la files which for this test we specifically do not want.
+REMOVE_LIBTOOL_LA = "0"
+
+do_install() {
+ install -d ${D}${libdir}/test/
+ echo '${WORKDIR}' > ${D}${libdir}/test/la-test.la
+}
+
+BBCLASSEXTEND += "native"
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb
new file mode 100644
index 0000000000..e748310fc4
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-pc-test_1.0.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Produce a broken pc file"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+
+do_install() {
+ install -d ${D}${libdir}/test/
+ echo '${WORKDIR}' > ${D}${libdir}/test/test.pc
+}
+
+BBCLASSEXTEND += "native"
diff --git a/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb b/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb
new file mode 100644
index 0000000000..6c834be897
--- /dev/null
+++ b/meta-selftest/recipes-test/sysroot-test/sysroot-shebang-test_1.0.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Check that shebang does not exceed 128 characters"
+LICENSE = "CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+do_install() {
+ install -d ${D}${bindir}
+ echo '#!BiM3cnVd1Amtv6PG+FynrQiVMbZnX5ELgF21q3EkuB+44JEGWtq8TvBJ7EGidfVs3eR3wVOUbLnjYDlKUWcm7YC/ute7f+KDHbwxziRUSUBZAUqgjiQdfQ0HnxajI0ozbM863E9JV9k13yZKYfh9/zR77Y6Dl4Dd3zOWS75LSpkAXV' > ${D}${bindir}/max-shebang
+ chmod 755 ${D}${bindir}/max-shebang
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/testrpm/files/testfile.txt b/meta-selftest/recipes-test/testrpm/files/testfile.txt
new file mode 100644
index 0000000000..c4d7630c1e
--- /dev/null
+++ b/meta-selftest/recipes-test/testrpm/files/testfile.txt
@@ -0,0 +1 @@
+== This file serves the purposes of SRC_URI only
diff --git a/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb b/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb
new file mode 100644
index 0000000000..5e8761ab55
--- /dev/null
+++ b/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Test recipe for testing rpm generated by oe-core"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+LICENSE = "MIT"
+
+SRC_URI = "file://testfile.txt"
+INHIBIT_DEFAULT_DEPS = "1"
+
+do_compile(){
+ echo "testdata" > ${B}/"file with [brackets].txt"
+ echo "testdata" > ${B}/"file with (parentheses).txt"
+}
+
+do_install(){
+ install ${B}/* ${D}/
+}
+
+FILES:${PN} = "*"
diff --git a/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb b/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb
new file mode 100644
index 0000000000..c3d3548d4a
--- /dev/null
+++ b/meta-selftest/recipes-test/wrapper/cmdline-shebang-wrapper-test.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Check that create_cmdline_shebang works"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+INHIBIT_DEFAULT_DEPS = "1"
+
+SRC_URI += "file://test.awk"
+
+EXCLUDE_FROM_WORLD = "1"
+do_install() {
+ install -d ${D}${bindir}
+ # was not able to make ownership preservation check
+ install -m 0400 ${WORKDIR}/test.awk ${D}${bindir}/test
+
+ perm_old="$(stat --format='%a' ${D}${bindir}/test)"
+ sed -i -e 's|@AWK_BIN@|${bindir}/awk|g' ${D}${bindir}/test
+ create_cmdline_shebang_wrapper ${D}${bindir}/test
+ if [ $(${D}${bindir}/test) != "Don't Panic!" ]; then
+ bbfatal "Wrapper is broken"
+ else
+ bbnote "Wrapper is good"
+ fi
+
+ perm_new="$(stat --format='%a' ${D}${bindir}/test.real)"
+
+ if [ "$perm_new" != "$perm_old" ]; then
+ bbfatal "Wrapper permissions for ${D}${bindir}/test.real not preserved. Found $perm_new but expected $perm_old"
+ fi
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta-selftest/recipes-test/wrapper/files/test.awk b/meta-selftest/recipes-test/wrapper/files/test.awk
new file mode 100644
index 0000000000..91429197b1
--- /dev/null
+++ b/meta-selftest/recipes-test/wrapper/files/test.awk
@@ -0,0 +1,2 @@
+#! @AWK_BIN@ -f
+BEGIN { print "Don't Panic!" }
diff --git a/meta-selftest/wic/overlayfs_etc.wks.in b/meta-selftest/wic/overlayfs_etc.wks.in
index 1e1e5836e7..066cd35b15 100644
--- a/meta-selftest/wic/overlayfs_etc.wks.in
+++ b/meta-selftest/wic/overlayfs_etc.wks.in
@@ -1,4 +1,4 @@
part /boot --active --source bootimg-biosplusefi --ondisk sda --sourceparams="loader=grub-efi" --align 1024
-part / --source rootfs --ondisk sda --fstype=ext4 --use-uuid --align 1024
+part / --source rootfs --ondisk sda --fstype=${OVERLAYFS_ROOTFS_TYPE} --use-uuid --align 1024
part --ondisk sda --fstype=ext4 --size=5 --align 1024
-bootloader --ptable gpt --timeout=1 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0 ${OVERLAYFS_INIT_OPTION}"
+bootloader --ptable gpt --timeout=1 --append="rootfstype=${OVERLAYFS_ROOTFS_TYPE} console=ttyS0,115200 console=tty0 ${OVERLAYFS_INIT_OPTION}"
diff --git a/meta-selftest/wic/test_gpt_partition_name.wks b/meta-selftest/wic/test_gpt_partition_name.wks
new file mode 100644
index 0000000000..7db6da9aee
--- /dev/null
+++ b/meta-selftest/wic/test_gpt_partition_name.wks
@@ -0,0 +1,7 @@
+# short-description: image to test part-name in GPT partitions
+
+part --fstype=ext4 --part-name boot-A --label boot --size 1M --align 1024
+part / --source rootfs --fstype=ext4 --part-name root-A --align 1024
+part --fstype=ext4 --label ext-space --size 1M --align 1024
+
+bootloader --ptable gpt
diff --git a/meta-selftest/wic/test_rawcopy_plugin.wks.in b/meta-selftest/wic/test_rawcopy_plugin.wks.in
index 83be4be914..a865dd1d32 100644
--- a/meta-selftest/wic/test_rawcopy_plugin.wks.in
+++ b/meta-selftest/wic/test_rawcopy_plugin.wks.in
@@ -1,6 +1,6 @@
# short-description: This file is used in oe-selftest wic module to test rawcopy plugin
part /boot --active --source bootimg-pcbios
-part / --source rawcopy --sourceparams="file=core-image-minimal-${MACHINE}.ext4" --use-uuid
+part / --source rawcopy --sourceparams="file=${IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL}.ext4" --use-uuid
bootloader --timeout=0 --append="console=ttyS0,115200n8"
diff --git a/meta-selftest/wic/test_uefikernel.wks b/meta-selftest/wic/test_uefikernel.wks
new file mode 100644
index 0000000000..bede2288f6
--- /dev/null
+++ b/meta-selftest/wic/test_uefikernel.wks
@@ -0,0 +1,5 @@
+# short-description: This file is used in oe-selftest wic module to test uefi-kernel loader
+
+part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"
+part / --source rootfs --fstype=ext4 --align 1024 --use-uuid
+
diff --git a/meta-skeleton/conf/layer.conf b/meta-skeleton/conf/layer.conf
index 1704a753a0..963d2d5ce9 100644
--- a/meta-skeleton/conf/layer.conf
+++ b/meta-skeleton/conf/layer.conf
@@ -14,4 +14,4 @@ LAYERVERSION_skeleton = "1"
LAYERDEPENDS_skeleton = "core"
-LAYERSERIES_COMPAT_skeleton = "kirkstone langdale"
+LAYERSERIES_COMPAT_skeleton = "styhead"
diff --git a/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb b/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb
deleted file mode 100644
index d11e2e530e..0000000000
--- a/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-SUMMARY = "Baremetal examples to work with the several QEMU architectures supported on OpenEmbedded"
-HOMEPAGE = "https://github.com/aehs29/baremetal-helloqemu"
-DESCRIPTION = "These are introductory examples to showcase the use of QEMU to run baremetal applications."
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=39346640a23c701e4f459e05f56f4449"
-
-SRCREV = "31b4e5a337018b4a00a7426b0e5ed83b81df30c7"
-PV = "0.1+git${SRCPV}"
-
-SRC_URI = "git://github.com/aehs29/baremetal-helloqemu.git;protocol=https;branch=master"
-
-S = "${WORKDIR}/git"
-
-# The following variables should be set to accomodate each application
-BAREMETAL_BINNAME ?= "hello_baremetal_${MACHINE}"
-IMAGE_LINK_NAME ?= "baremetal-helloworld-image-${MACHINE}"
-IMAGE_NAME_SUFFIX ?= ""
-
-# Baremetal-Image creates the proper wiring, assumes the output is provided in
-# binary and ELF format, installed on ${base_libdir}/firmware/ , we want a
-# package to be created since we might have some way of updating the baremetal
-# firmware from Linux
-inherit baremetal-image
-
-
-# These parameters are app specific for this example
-# This will be translated automatically to the architecture and
-# machine that QEMU uses on OE, e.g. -machine virt -cpu cortex-a57
-# but the examples can also be run on other architectures/machines
-# such as vexpress-a15 by overriding the setting on the machine.conf
-COMPATIBLE_MACHINE = "qemuarmv5|qemuarm|qemuarm64|qemuriscv64|qemuriscv32"
-
-BAREMETAL_QEMUARCH ?= ""
-BAREMETAL_QEMUARCH:qemuarmv5 = "versatile"
-BAREMETAL_QEMUARCH:qemuarm = "arm"
-BAREMETAL_QEMUARCH:qemuarm64 = "aarch64"
-BAREMETAL_QEMUARCH:qemuriscv64 = "riscv64"
-BAREMETAL_QEMUARCH:qemuriscv32 = "riscv32"
-
-EXTRA_OEMAKE:append = " QEMUARCH=${BAREMETAL_QEMUARCH} V=1"
-
-
-# Install binaries on the proper location for baremetal-image to fetch and deploy
-do_install(){
- install -d ${D}/${base_libdir}/firmware
- install -m 755 ${B}/build/hello_baremetal_${BAREMETAL_QEMUARCH}.bin ${D}/${base_libdir}/firmware/${BAREMETAL_BINNAME}.bin
- install -m 755 ${B}/build/hello_baremetal_${BAREMETAL_QEMUARCH}.elf ${D}/${base_libdir}/firmware/${BAREMETAL_BINNAME}.elf
-}
-
-FILES:${PN} += " \
- ${base_libdir}/firmware/${BAREMETAL_BINNAME}.bin \
- ${base_libdir}/firmware/${BAREMETAL_BINNAME}.elf \
-"
diff --git a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
index 6b73a79524..4f73455d20 100644
--- a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
+++ b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
@@ -2,18 +2,7 @@
*
* Copyright (C) 2011 Intel Corporation. All rights reserved.
*
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
- * the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ * SPDX-License-Identifier: GPL-2.0-only
*
*****************************************************************************/
diff --git a/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb b/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
index 6062f8422c..9437240fcf 100644
--- a/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
+++ b/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
@@ -64,7 +64,7 @@ LINUX_VERSION_EXTENSION:append = "-custom"
# tag: v4.2 64291f7db5bd8150a74ad2036f1037e6a0428df2
SRCREV_machine="64291f7db5bd8150a74ad2036f1037e6a0428df2"
-PV = "${LINUX_VERSION}+git${SRCPV}"
+PV = "${LINUX_VERSION}+git"
# Override COMPATIBLE_MACHINE to include your machine in a copy of this recipe
# file. Leaving it empty here ensures an early explicit build failure.
diff --git a/meta-skeleton/recipes-skeleton/libxpm/libxpm_3.5.6.bb b/meta-skeleton/recipes-skeleton/libxpm/libxpm_3.5.6.bb
index a1ad2e87d4..1fcbbdd65e 100644
--- a/meta-skeleton/recipes-skeleton/libxpm/libxpm_3.5.6.bb
+++ b/meta-skeleton/recipes-skeleton/libxpm/libxpm_3.5.6.bb
@@ -4,7 +4,6 @@ DESCRIPTION = "X11 Pixmap library"
LICENSE = "X-BSD"
LIC_FILES_CHKSUM = "file://COPYING;md5=3e07763d16963c3af12db271a31abaa5"
DEPENDS += "libxext"
-PR = "r2"
PE = "1"
XORG_PN = "libXpm"
diff --git a/meta-skeleton/recipes-skeleton/service/service_0.1.bb b/meta-skeleton/recipes-skeleton/service/service_0.1.bb
index d1d8c5f365..912f6b0f61 100644
--- a/meta-skeleton/recipes-skeleton/service/service_0.1.bb
+++ b/meta-skeleton/recipes-skeleton/service/service_0.1.bb
@@ -9,6 +9,8 @@ SRC_URI = "file://skeleton \
file://COPYRIGHT \
"
+S = "${WORKDIR}"
+
do_compile () {
${CC} ${CFLAGS} ${LDFLAGS} ${WORKDIR}/skeleton_test.c -o ${WORKDIR}/skeleton-test
}
diff --git a/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb b/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb
index 3f4c42d714..8437a5a774 100644
--- a/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb
+++ b/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb
@@ -1,7 +1,6 @@
SUMMARY = "Example recipe for using inherit useradd"
DESCRIPTION = "This recipe serves as an example for using features from useradd.bbclass"
SECTION = "examples"
-PR = "r1"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
@@ -32,9 +31,9 @@ USERADD_PACKAGES = "${PN} ${PN}-user3"
# user1 and user2:
USERADD_PARAM:${PN} = "-u 1200 -d /home/user1 -r -s /bin/bash user1; -u 1201 -d /home/user2 -r -s /bin/bash user2"
-# user3 will be managed in the useradd-example-user3 pacakge:
-# As an example, we use the -P option to set clear text password for user3
-USERADD_PARAM:${PN}-user3 = "-u 1202 -d /home/user3 -r -s /bin/bash -P 'user3' user3"
+# user3 will be managed in the useradd-example-user3 package:
+# As an example, we use the -p option to set password ('user3') for user3
+USERADD_PARAM:${PN}-user3 = "-u 1202 -d /home/user3 -r -s /bin/bash -p '\$6\$XAWr.8nc\$bUE4pYYaVb8n6BbnBitU0zeJMtfhTpFpiOBLL9zRl4e4YQo88UU4r/1kjRzmTimCy.BvDh4xoFwVqcO.pihLa1' user3"
# GROUPADD_PARAM works the same way, which you set to the options
# you'd normally pass to the groupadd command. This will create
diff --git a/meta-yocto-bsp/README.hardware.md b/meta-yocto-bsp/README.hardware.md
index 9151d641d5..6045c3d867 100644
--- a/meta-yocto-bsp/README.hardware.md
+++ b/meta-yocto-bsp/README.hardware.md
@@ -1,5 +1,5 @@
- Yocto Project Hardware Reference BSPs README
- ============================================
+Yocto Project Hardware Reference BSPs README
+============================================
This file gives details about using the Yocto Project hardware reference BSPs.
The machines supported can be seen in the conf/machine/ directory and are listed
@@ -13,9 +13,7 @@ consult the documentation for your board/device.
Support for additional devices is normally added by adding BSP layers to your
configuration. For more information please see the Yocto Board Support Package
(BSP) Developer's Guide - documentation source is in documentation/bspguide or
-download the PDF from:
-
- https://docs.yoctoproject.org/
+download the PDF from https://docs.yoctoproject.org/
Note that these reference BSPs use the linux-yocto kernel and in general don't
pull in binary module support for the platforms. This means some device functionality
@@ -27,40 +25,49 @@ Hardware Reference Boards
The following boards are supported by the meta-yocto-bsp layer:
- * Texas Instruments Beaglebone (beaglebone-yocto)
- * Ubiquiti Networks EdgeRouter Lite (edgerouter)
- * General IA platforms (genericx86 and genericx86-64)
+ * Texas Instruments Beaglebone (`beaglebone-yocto`)
+ * General 64-bit Arm SystemReady platforms (`genericarm64`)
+ * General IA platforms (`genericx86` and `genericx86-64`)
For more information see the board's section below. The appropriate MACHINE
variable value corresponding to the board is given in brackets.
-Reference Board Maintenance
-===========================
+Reference Board Maintenance and Contributions
+=============================================
+
+Please refer to our contributor guide here: https://docs.yoctoproject.org/dev/contributor-guide/
+for full details on how to submit changes.
+
+As a quick guide, patches should be sent to <poky@lists.yoctoproject.org>
+The git command to do that would be:
+
+ git send-email -M -1 --to poky@lists.yoctoproject.org
-Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@lists.yoctoproject.org
+Send pull requests, patches, comments or questions about meta-yocto-bsp to
+<poky@lists.yoctoproject.org>.
-Maintainers: Kevin Hao <kexin.hao@windriver.com>
- Bruce Ashfield <bruce.ashfield@gmail.com>
+Maintainers:
+* Kevin Hao <kexin.hao@windriver.com>
+* Bruce Ashfield <bruce.ashfield@gmail.com>
Consumer Devices
================
The following consumer devices are supported by the meta-yocto-bsp layer:
- * Intel x86 based PCs and devices (genericx86)
- * Ubiquiti Networks EdgeRouter Lite (edgerouter)
+ * Arm-based SystemReady devices (`genericarm64`)
+ * Intel x86 based PCs and devices (`genericx86` and `genericx86-64`)
For more information see the device's section below. The appropriate MACHINE
variable value corresponding to the device is given in brackets.
-
- Specific Hardware Documentation
- ===============================
+Specific Hardware Documentation
+===============================
Intel x86 based PCs and devices (genericx86*)
-=============================================
+---------------------------------------------
The genericx86 and genericx86-64 MACHINE are tested on the following platforms:
@@ -87,17 +94,18 @@ target boot device is /dev/sdb, be sure to verify this and use the correct
device as the following commands are run as root and are not reversable.
USB Device:
+
1. Build a live image. This image type consists of a simple filesystem
without a partition table, which is suitable for USB keys, and with the
default setup for the genericx86 machine, this image type is built
automatically for any image you build. For example:
- $ bitbake core-image-minimal
+ $ bitbake core-image-minimal
- 2. Use the "dd" utility to write the image to the raw block device. For
+ 2. Use the `dd` utility to write the image to the raw block device. For
example:
- # dd if=core-image-minimal-genericx86.hddimg of=/dev/sdb
+ # dd if=core-image-minimal-genericx86.hddimg of=/dev/sdb
If the device fails to boot with "Boot error" displayed, or apparently
stops just after the SYSLINUX version banner, it is likely the BIOS cannot
@@ -111,24 +119,43 @@ USB Device:
2. Use a ".wic" image with an EFI partition
- a) With a default grub-efi bootloader:
- # dd if=core-image-minimal-genericx86-64.wic of=/dev/sdb
+ 1. With a default grub-efi bootloader:
+
+ # dd if=core-image-minimal-genericx86-64.wic of=/dev/sdb
+
+ 2. Use systemd-boot instead. Build an image with `EFI_PROVIDER="systemd-boot"` then use the above
+ `dd` command to write the image to a USB stick.
+
+
+SystemReady Arm Platforms (genericarm64)
+----------------------------------------
+
+The genericarm64 MACHINE is designed to work on standard SystemReady IR
+compliant boards with preinstalled firmware.
+
+The genericarm64 MACHINE is currently tested on the following platforms:
- b) Use systemd-boot instead
- - Build an image with EFI_PROVIDER="systemd-boot" then use the above
- dd command to write the image to a USB stick.
+ * Texas Instruments BeaglePlay
+
+The images built are EFI bootable disk images and can be written directly to a
+SD card for booting, for example.
+
+There is also limited support for booting a genericarm64 image inside QEMU. When
+building the image also build the `u-boot` recipe to build the required
+firmware (note that this firmware will _not_ boot on real hardware), then use
+`runqemu` as usual.
Texas Instruments Beaglebone (beaglebone-yocto)
-===============================================
+-----------------------------------------------
The Beaglebone is an ARM Cortex-A8 development board with USB, Ethernet, 2D/3D
accelerated graphics, audio, serial, JTAG, and SD/MMC. The Black adds a faster
CPU, more RAM, eMMC flash and a micro HDMI port. The beaglebone MACHINE is
tested on the following platforms:
- o Beaglebone Black A6
- o Beaglebone A6 (the original "White" model)
+ * Beaglebone Black A6
+ * Beaglebone A6 (the original "White" model)
The Beaglebone Black has eMMC, while the White does not. Pressing the USER/BOOT
button when powering on will temporarily change the boot order. But for the sake
@@ -146,120 +173,10 @@ From a Linux system with access to the image files perform the following steps:
1. Build an image. For example:
- $ bitbake core-image-minimal
+ $ bitbake core-image-minimal
2. Use the "dd" utility to write the image to the SD card. For example:
- # dd if=core-image-minimal-beaglebone-yocto.wic of=/dev/sdb
+ # dd if=core-image-minimal-beaglebone-yocto.wic of=/dev/sdb
3. Insert the SD card into the Beaglebone and boot the board.
-
-Ubiquiti Networks EdgeRouter Lite (edgerouter)
-==============================================
-
-The EdgeRouter Lite is part of the EdgeMax series. It is a MIPS64 router
-(based on the Cavium Octeon processor) with 512MB of RAM, which uses an
-internal USB pendrive for storage.
-
-Setup instructions
-------------------
-
-You will need the following:
-* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
- port on the device
-* Ethernet connected to the first ethernet port on the board
-
-If using NFS as part of the setup process, you will also need:
-* NFS root setup on your workstation
-* TFTP server installed on your workstation (if fetching the kernel from
- TFTP, see below).
-
---- Preparation ---
-
-Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
-In the following instruction it is based on core-image-minimal. Another target
-may be similiar with it.
-
---- Booting from NFS root / kernel via TFTP ---
-
-Load the kernel, and boot the system as follows:
-
- 1. Get the kernel (vmlinux) file from the tmp/deploy/images/edgerouter
- directory, and make them available on your TFTP server.
-
- 2. Connect the board's first serial port to your workstation and then start up
- your favourite serial terminal so that you will be able to interact with
- the serial console. If you don't have a favourite, picocom is suggested:
-
- $ picocom /dev/ttyS0 -b 115200
-
- 3. Power up or reset the board and press a key on the terminal when prompted
- to get to the U-Boot command line
-
- 4. Set up the environment in U-Boot:
-
- => setenv ipaddr <board ip>
- => setenv serverip <tftp server ip>
-
- 5. Download the kernel and boot:
-
- => tftp tftp $loadaddr vmlinux
- => bootoctlinux $loadaddr coremask=0x3 root=/dev/nfs rw nfsroot=<nfsroot ip>:<rootfs path> ip=<board ip>:<server ip>:<gateway ip>:<netmask>:edgerouter:eth0:off mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
-
---- Booting from USB disk ---
-
-To boot from the USB disk, you either need to remove it from the edgerouter
-box and populate it from another computer, or use a previously booted NFS
-image and populate from the edgerouter itself.
-
-Type 1: Use partitioned image
------------------------------
-
-Steps:
-
- 1. Remove the USB disk from the edgerouter and insert it into a computer
- that has access to your build artifacts.
-
- 2. Flash the image.
-
- # dd if=core-image-minimal-edgerouter.wic of=/dev/sdb
-
- 3. Insert USB disk into the edgerouter and boot it.
-
-Type 2: NFS
------------
-
-Note: If you place the kernel on the ext3 partition, you must re-create the
- ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and
- cannot read the partition otherwise.
-
- These boot instructions assume that you have recreated the ext3 filesystem with
- 128 byte inodes, you have an updated uboot or you are running and image capable
- of making the filesystem on the board itself.
-
-
- 1. Boot from NFS root
-
- 2. Mount the USB disk partition 2 and then extract the contents of
- tmp/deploy/core-image-XXXX.tar.bz2 into it.
-
- Before starting, copy core-image-minimal-xxx.tar.bz2 and vmlinux into
- rootfs path on your workstation.
-
- and then,
-
- # mount /dev/sda2 /media/sda2
- # tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /media/sda2
- # cp vmlinux /media/sda2/boot/vmlinux
- # umount /media/sda2
- # reboot
-
- 3. Reboot the board and press a key on the terminal when prompted to get to the U-Boot
- command line:
-
- # reboot
-
- 4. Load the kernel and boot:
-
- => ext2load usb 0:2 $loadaddr boot/vmlinux
- => bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
diff --git a/meta-yocto-bsp/conf/layer.conf b/meta-yocto-bsp/conf/layer.conf
index 29f6037cce..60902b4de9 100644
--- a/meta-yocto-bsp/conf/layer.conf
+++ b/meta-yocto-bsp/conf/layer.conf
@@ -9,4 +9,4 @@ BBFILE_COLLECTIONS += "yoctobsp"
BBFILE_PATTERN_yoctobsp = "^${LAYERDIR}/"
BBFILE_PRIORITY_yoctobsp = "5"
LAYERVERSION_yoctobsp = "4"
-LAYERSERIES_COMPAT_yoctobsp = "kirkstone langdale"
+LAYERSERIES_COMPAT_yoctobsp = "scarthgap"
diff --git a/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf b/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf
index 284559c50c..459c83f5c2 100644
--- a/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf
+++ b/meta-yocto-bsp/conf/machine/beaglebone-yocto.conf
@@ -3,11 +3,8 @@
#@DESCRIPTION: Reference machine configuration for http://beagleboard.org/bone and http://beagleboard.org/black boards
PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
-XSERVER ?= "xserver-xorg \
- xf86-video-modesetting \
- "
-MACHINE_EXTRA_RRECOMMENDS = "kernel-modules kernel-devicetree"
+MACHINE_EXTRA_RRECOMMENDS = "kernel-modules"
EXTRA_IMAGEDEPENDS += "virtual/bootloader"
@@ -21,26 +18,23 @@ MACHINE_ESSENTIAL_EXTRA_RDEPENDS += "kernel-image kernel-devicetree"
do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot virtual/bootloader:do_deploy"
SERIAL_CONSOLES ?= "115200;ttyS0 115200;ttyO0 115200;ttyAMA0"
-SERIAL_CONSOLES_CHECK = "${SERIAL_CONSOLES}"
PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
-PREFERRED_VERSION_linux-yocto ?= "5.15%"
+PREFERRED_VERSION_linux-yocto ?= "6.6%"
KERNEL_IMAGETYPE = "zImage"
-KERNEL_DEVICETREE = "am335x-bone.dtb am335x-boneblack.dtb am335x-bonegreen.dtb"
-KERNEL_EXTRA_ARGS += "LOADADDR=${UBOOT_ENTRYPOINT}"
+DTB_FILES = "am335x-bone.dtb am335x-boneblack.dtb am335x-bonegreen.dtb"
+KERNEL_DEVICETREE = '${@' '.join('ti/omap/%s' % d for d in '${DTB_FILES}'.split())}'
PREFERRED_PROVIDER_virtual/bootloader ?= "u-boot"
SPL_BINARY = "MLO"
UBOOT_SUFFIX = "img"
UBOOT_MACHINE = "am335x_evm_defconfig"
-UBOOT_ENTRYPOINT = "0x80008000"
-UBOOT_LOADADDRESS = "0x80008000"
MACHINE_FEATURES = "usbgadget usbhost vfat alsa"
-IMAGE_BOOT_FILES ?= "u-boot.${UBOOT_SUFFIX} ${SPL_BINARY} ${KERNEL_IMAGETYPE} ${KERNEL_DEVICETREE}"
+IMAGE_BOOT_FILES ?= "u-boot.${UBOOT_SUFFIX} ${SPL_BINARY} ${KERNEL_IMAGETYPE} ${DTB_FILES}"
# support runqemu
EXTRA_IMAGEDEPENDS += "qemu-native qemu-helper-native"
diff --git a/meta-yocto-bsp/conf/machine/edgerouter.conf b/meta-yocto-bsp/conf/machine/edgerouter.conf
deleted file mode 100644
index 249864e9ce..0000000000
--- a/meta-yocto-bsp/conf/machine/edgerouter.conf
+++ /dev/null
@@ -1,26 +0,0 @@
-#@TYPE: Machine
-#@NAME: Edgerouter
-#@DESCRIPTION: Machine configuration for a generic edgerouter
-
-require conf/machine/include/mips/tune-mips64.inc
-
-MACHINE_FEATURES = "pci ext2 ext3 serial"
-
-KERNEL_IMAGETYPE = "vmlinux"
-KERNEL_ALT_IMAGETYPE = "vmlinux.bin"
-KERNEL_IMAGE_STRIP_EXTRA_SECTIONS = ".comment"
-
-PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
-PREFERRED_VERSION_linux-yocto ?= "5.15%"
-
-SERIAL_CONSOLES = "115200;ttyS0"
-USE_VT ?= "0"
-
-MACHINE_EXTRA_RRECOMMENDS = "kernel-modules"
-
-IMAGE_FSTYPES ?= "jffs2 tar.bz2 wic wic.bmap"
-JFFS2_ERASEBLOCK = "0x10000"
-
-WKS_FILE ?= "edgerouter.wks"
-IMAGE_BOOT_FILES ?= "vmlinux;vmlinux.64"
-do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot"
diff --git a/meta-yocto-bsp/conf/machine/genericarm64.conf b/meta-yocto-bsp/conf/machine/genericarm64.conf
new file mode 100644
index 0000000000..4fa9395b31
--- /dev/null
+++ b/meta-yocto-bsp/conf/machine/genericarm64.conf
@@ -0,0 +1,60 @@
+#@TYPE: Machine
+#@NAME: genericarm64
+#@DESCRIPTION: Generic Arm64 machine for typical SystemReady IR/ES platforms, which
+#have working firmware and boot via EFI.
+
+require conf/machine/include/arm/arch-armv8a.inc
+
+# Arm Base System Architecture says v8.0+ is allowed, but FEAT_CRC32 is required
+DEFAULTTUNE = "armv8a-crc"
+
+MACHINE_FEATURES = "acpi alsa bluetooth efi keyboard pci qemu-usermode rtc screen usbhost vfat wifi"
+
+KERNEL_IMAGETYPE = "Image"
+PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
+
+# Install all the kernel modules into the rootfs
+MACHINE_EXTRA_RRECOMMENDS += "kernel-modules"
+# Install selected pieces of firmware
+MACHINE_EXTRA_RRECOMMENDS += "linux-firmware-wl12xx linux-firmware-wl18xx linux-firmware-rtl-nic"
+
+# Use an initramfs and populate it with the kernel modules and key firmware
+INITRAMFS_IMAGE ?= "core-image-initramfs-boot"
+PACKAGE_INSTALL:append:pn-core-image-initramfs-boot = " ${MACHINE_EXTRA_RRECOMMENDS}"
+
+IMAGE_FSTYPES ?= "wic"
+WKS_FILE ?= "genericarm64.wks.in"
+
+EFI_PROVIDER ?= "${@bb.utils.contains("DISTRO_FEATURES", "systemd", "systemd-boot", "grub-efi", d)}"
+
+# Try to bring up one physical serial console, or a virtualized serial console
+SERIAL_CONSOLES ?= "115200;ttyAMA0 115200;hvc0"
+
+# Allow u-boot to be built for use with qemu-system-aarch64.
+# This u-boot is _not_ suitable for use with real hardware, and the expectation
+# of this machine is that real hardware comes with the firmware pre-loaded.
+UBOOT_MACHINE = "qemu_arm64_defconfig"
+
+# runqemu configuration to run a genericarm64 image inside a qemu-system-aarch64. You will need
+# to build u-boot explicitly.
+IMAGE_CLASSES += "qemuboot"
+QB_SYSTEM_NAME = "qemu-system-aarch64"
+# Boot the virtual machine with either an emulated Cortex-A76, or the host if using KVM
+QB_MACHINE = "-machine virt"
+QB_CPU = "-cpu cortex-a76"
+QB_CPU_KVM = "-cpu host -machine gic-version=3"
+QB_SMP = "-smp 4"
+# Boot into U-Boot and let that scan the disk for the next step, don't pass any kernel or filesystem hints
+QB_DEFAULT_BIOS = "u-boot.bin"
+QB_DEFAULT_KERNEL = "none"
+QB_DEFAULT_FSTYPE = "wic"
+QB_FSINFO = "wic:no-kernel-in-fs"
+# Mount the wic rootfs as a virtio block device
+QB_ROOTFS_OPT = "-drive id=root,file=@ROOTFS@,if=none,format=raw -device virtio-blk-pci,drive=root"
+# Virtio graphics
+QB_GRAPHICS = "-device virtio-gpu-pci"
+# Virtio serial consoles
+QB_SERIAL_OPT = "-device virtio-serial-pci -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
+QB_TCPSERIAL_OPT = "-device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon"
+# Virtio networking
+QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no"
diff --git a/meta-yocto-bsp/conf/machine/genericx86-64.conf b/meta-yocto-bsp/conf/machine/genericx86-64.conf
index 14913ea1f1..f19a1c1527 100644
--- a/meta-yocto-bsp/conf/machine/genericx86-64.conf
+++ b/meta-yocto-bsp/conf/machine/genericx86-64.conf
@@ -6,6 +6,5 @@ DEFAULTTUNE ?= "core2-64"
require conf/machine/include/x86/tune-core2.inc
require conf/machine/include/genericx86-common.inc
-SERIAL_CONSOLES_CHECK = "ttyS0"
#For runqemu
QB_SYSTEM_NAME = "qemu-system-x86_64"
diff --git a/meta-yocto-bsp/conf/machine/genericx86.conf b/meta-yocto-bsp/conf/machine/genericx86.conf
index ed4fc8a3de..34e1448a8c 100644
--- a/meta-yocto-bsp/conf/machine/genericx86.conf
+++ b/meta-yocto-bsp/conf/machine/genericx86.conf
@@ -6,6 +6,5 @@ DEFAULTTUNE ?= "core2-32"
require conf/machine/include/x86/tune-core2.inc
require conf/machine/include/genericx86-common.inc
-MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS += "gma500-gfx-check"
#For runqemu
QB_SYSTEM_NAME = "qemu-system-i386"
diff --git a/meta-yocto-bsp/conf/machine/include/genericx86-common.inc b/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
index 8c5e5522bc..50a233dd8f 100644
--- a/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
+++ b/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
@@ -2,7 +2,7 @@ include conf/machine/include/x86/x86-base.inc
require conf/machine/include/x86/qemuboot-x86.inc
MACHINE_FEATURES += "wifi efi pcbios"
-PREFERRED_VERSION_linux-yocto ?= "5.15%"
+PREFERRED_VERSION_linux-yocto ?= "6.6%"
PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
XSERVER ?= "${XSERVER_X86_BASE} \
@@ -16,10 +16,6 @@ XSERVER ?= "${XSERVER_X86_BASE} \
MACHINE_EXTRA_RRECOMMENDS += "kernel-modules linux-firmware"
-GLIBC_ADDONS = "nptl"
-
-EXTRA_OECONF:append:pn-matchbox-panel-2 = " --with-battery=acpi"
-
IMAGE_FSTYPES += "wic wic.bmap"
WKS_FILE ?= "genericx86.wks.in"
EFI_PROVIDER ??= "grub-efi"
diff --git a/meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py b/meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py
deleted file mode 100644
index e31670db48..0000000000
--- a/meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2014 Intel Corporation
-#
-# Released under the MIT license (see COPYING.MIT)
-
-# This module adds support to testimage.bbclass to deploy images and run
-# tests on a Ubiquiti Networks EdgeRouter Lite. The device must be set up
-# to boot into the master image already - the easiest way to do that is as
-# follows:
-#
-# 1. Take out the internal USB drive and plug it into your PC
-# 2. Repartition the USB drive so that you have three partitions in this
-# order:
-# 1: vfat, labelled "boot" (it will need to be formatted with mkfs.vfat
-# for this to be possible, since FAT partitions formatted under
-# DOS/Windows will only support uppercase labels)
-# 2: ext3 (for master image) labelled "testmaster"
-# 3: ext3 (for image under test) labelled "testrootfs"
-# 3. Copy the kernel to be used by the master image to the FAT partition
-# (it should be named "vmlinux.64" with the factory u-boot configuration)
-# 4. Install the master image onto the "testmaster" ext3 partition. If
-# you do this by just extracting the contents of an image onto the
-# partition, you will also likely need to create the master image marker
-# file /etc/masterimage within this partition so that we can tell when
-# we're booted into it that it is the master image.
-# 5. Put the USB drive back into the device, and ensure the console port
-# and first ethernet port are connected before powering on
-#
-# TEST_SERIALCONTROL_CMD will need to be set in local.conf so that we can
-# interact with u-boot over the serial console port.
-
-import os
-import bb
-import time
-import subprocess
-import sys
-import pexpect
-
-from oeqa.controllers.controllerimage import ControllerImageHardwareTarget
-
-
-class EdgeRouterTarget(ControllerImageHardwareTarget):
-
- def __init__(self, d):
- super(EdgeRouterTarget, self).__init__(d)
-
- self.image_fstype = self.get_image_fstype(d)
- self.deploy_cmds = [
- 'mount -L boot /boot',
- 'mkdir -p /mnt/testrootfs',
- 'mount -L testrootfs /mnt/testrootfs',
- 'cp ~/test-kernel /boot',
- 'rm -rf /mnt/testrootfs/*',
- 'tar xvf ~/test-rootfs.%s -C /mnt/testrootfs' % self.image_fstype
- ]
- if not self.serialcontrol_cmd:
- bb.fatal("This TEST_TARGET needs a TEST_SERIALCONTROL_CMD defined in local.conf.")
-
-
- def _deploy(self):
- self.controller.run("umount /mnt/testrootfs;")
- self.controller.ignore_status = False
- self.controller.copy_to(self.kernel, "~/test-kernel")
- self.controller.copy_to(self.rootfs, "~/test-rootfs.%s" % self.image_fstype)
- for cmd in self.deploy_cmds:
- self.controller.run(cmd)
-
- def _start(self, params=None):
- self.power_cycle(self.controller)
- try:
- serialconn = pexpect.spawn(self.serialcontrol_cmd, env=self.origenv, logfile=sys.stdout)
- serialconn.expect("U-Boot")
- serialconn.sendline("a")
- serialconn.expect("Octeon ubnt_e100#")
- serialconn.sendline("fatload usb 0:1 $loadaddr test-kernel")
- serialconn.expect(" bytes read")
- serialconn.expect("Octeon ubnt_e100#")
- serialconn.sendline("bootoctlinux $loadaddr coremask=0x3 root=/dev/sda3 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)")
- serialconn.expect("login:", timeout=120)
- serialconn.close()
- except pexpect.ExceptionPexpect as e:
- bb.fatal('Serial interaction failed: %s' % str(e))
-
- def _wait_until_booted(self):
- try:
- serialconn = pexpect.spawn(self.serialcontrol_cmd, env=self.origenv, logfile=sys.stdout)
- serialconn.expect("login:", timeout=120)
- serialconn.close()
- except pexpect.ExceptionPexpect as e:
- bb.fatal('Serial interaction failed: %s' % str(e))
diff --git a/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-beaglebone-yocto.txt b/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-beaglebone-yocto.txt
new file mode 100644
index 0000000000..b0d98418d1
--- /dev/null
+++ b/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-beaglebone-yocto.txt
@@ -0,0 +1,4 @@
+# These should be reviewed to see if they are still needed
+l4_wkup_cm
+Failed to make EGL context current
+glamor initialization failed \ No newline at end of file
diff --git a/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-genericx86-64.txt b/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-genericx86-64.txt
new file mode 100644
index 0000000000..9a655564cd
--- /dev/null
+++ b/meta-yocto-bsp/lib/oeqa/runtime/cases/parselogs-ignores-genericx86-64.txt
@@ -0,0 +1,7 @@
+# These should be reviewed to see if they are still needed
+Direct firmware load for i915
+Failed to load firmware i915
+Failed to fetch GuC
+Failed to initialize GuC
+Failed to load DMC firmware
+The driver is built-in, so to load the firmware you need to \ No newline at end of file
diff --git a/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py b/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py
index 57599e19aa..6ce9a3b3f8 100644
--- a/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py
+++ b/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py
@@ -2,7 +2,7 @@ import os
from oeqa.selftest.case import OESelftestTestCase
from oeqa.core.decorator.depends import OETestDepends
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
class Systemdboot(OESelftestTestCase):
@@ -21,8 +21,9 @@ class Systemdboot(OESelftestTestCase):
features += 'MACHINE = "genericx86-64"'
self.append_config(features)
- deploydir = get_bb_var('DEPLOY_DIR_IMAGE', "core-image-minimal")
- systemdbootfile = os.path.join(deploydir, 'systemd-bootx64.efi')
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ systemdbootfile = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 'systemd-bootx64.efi')
# Ensure we're actually testing that this gets built and not that
# it was around from an earlier build
@@ -50,8 +51,8 @@ class Systemdboot(OESelftestTestCase):
AutomatedBy: Jose Perez Carranza <jose.perez.carranza at linux-intel.com>
"""
- systemdbootimage = os.path.join(deploydir, 'core-image-minimal-genericx86-64.wic')
- imagebootfile = os.path.join(deploydir, 'bootx64.efi')
+ systemdbootimage = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+ imagebootfile = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 'bootx64.efi')
# Clean environment before start the test
if os.path.isfile(imagebootfile):
diff --git a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.conf b/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.conf
deleted file mode 100644
index 74d33c871f..0000000000
--- a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.conf
+++ /dev/null
@@ -1,2 +0,0 @@
-# Mimic modprobe's install funcitonality with busybox's modprobe
-install gma500_gfx dmesg | grep gma500_gfx_checked || { /etc/modprobe.d/gma500-gfx-check.sh || modprobe gma500_gfx; }
diff --git a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.sh b/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.sh
deleted file mode 100644
index 75cda99f54..0000000000
--- a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check/gma500-gfx-check.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-
-# Check for devices we wish to avoid gma500_gfx for
-DEVICES="0x8119 0x4108"
-
-# Checked flag to avoid infinite modprobe
-echo "gma500_gfx_checked" >> /dev/kmsg;
-
-for DEVICE in $DEVICES; do
- if udevadm trigger --subsystem-match=pci --verbose --attr-match=device=$DEVICE | grep "pci" >> /dev/null ; then
- echo "Found $DEVICE, avoiding gma500_gfx module" >> /dev/kmsg;
- exit 0
- fi
-done
-exit 1
diff --git a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check_1.0.bb b/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check_1.0.bb
deleted file mode 100644
index 6726f68319..0000000000
--- a/meta-yocto-bsp/recipes-bsp/gma500-gfx-check/gma500-gfx-check_1.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "Intel gma500_gfx fix for certain hardware"
-DESCRIPTION = "Avoid inserting gma500_gfx module for certain hardware devices."
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-
-SRC_URI = "file://gma500-gfx-check.conf \
- file://gma500-gfx-check.sh "
-
-do_install(){
- install -d ${D}${sysconfdir}/modprobe.d/
- install -m 755 ${WORKDIR}/gma500-gfx-check.sh ${D}${sysconfdir}/modprobe.d/gma500-gfx-check.sh
- install -m 644 ${WORKDIR}/gma500-gfx-check.conf ${D}${sysconfdir}/modprobe.d/gma500-gfx-check.conf
-}
-
-FILES:${PN}="${sysconfdir}/modprobe.d/gma500-gfx-check.conf \
- ${sysconfdir}/modprobe.d/gma500-gfx-check.sh"
-
-COMPATIBLE_MACHINE = "genericx86"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto-dev.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto-dev.bbappend
index a90958f546..5b1b736b1c 100644
--- a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,13 +1,13 @@
KBRANCH:genericx86 = "standard/base"
KBRANCH:genericx86-64 = "standard/base"
-KBRANCH:edgerouter = "standard/edgerouter"
KBRANCH:beaglebone-yocto = "standard/beaglebone"
+KMACHINE:genericarm64 ?= "genericarm64"
KMACHINE:genericx86 ?= "common-pc"
KMACHINE:genericx86-64 ?= "common-pc-64"
KMACHINE:beaglebone-yocto ?= "beaglebone"
+COMPATIBLE_MACHINE:genericarm64 = "genericarm64"
COMPATIBLE_MACHINE:genericx86 = "genericx86"
COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
-COMPATIBLE_MACHINE:edgerouter = "edgerouter"
COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.10.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.10.bbappend
deleted file mode 100644
index 94192afffe..0000000000
--- a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.10.bbappend
+++ /dev/null
@@ -1,23 +0,0 @@
-KBRANCH:genericx86 = "v5.10/standard/base"
-KBRANCH:genericx86-64 = "v5.10/standard/base"
-KBRANCH:edgerouter = "v5.10/standard/edgerouter"
-KBRANCH:beaglebone-yocto = "v5.10/standard/beaglebone"
-
-KMACHINE:genericx86 ?= "common-pc"
-KMACHINE:genericx86-64 ?= "common-pc-64"
-KMACHINE:beaglebone-yocto ?= "beaglebone"
-
-SRCREV_machine:genericx86 ?= "a8b4c628f382412e5e7df5750f2be711df95fa06"
-SRCREV_machine:genericx86-64 ?= "a8b4c628f382412e5e7df5750f2be711df95fa06"
-SRCREV_machine:edgerouter ?= "43577894d2295a92fce760dc403b97527fb55835"
-SRCREV_machine:beaglebone-yocto ?= "8038166b729c192d06f1eb37ab6868a5769f8bc5"
-
-COMPATIBLE_MACHINE:genericx86 = "genericx86"
-COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
-COMPATIBLE_MACHINE:edgerouter = "edgerouter"
-COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
-
-LINUX_VERSION:genericx86 = "5.10.113"
-LINUX_VERSION:genericx86-64 = "5.10.113"
-LINUX_VERSION:edgerouter = "5.10.113"
-LINUX_VERSION:beaglebone-yocto = "5.10.113"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.15.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.15.bbappend
deleted file mode 100644
index 85d02a46e4..0000000000
--- a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_5.15.bbappend
+++ /dev/null
@@ -1,23 +0,0 @@
-KBRANCH:genericx86 = "v5.15/standard/base"
-KBRANCH:genericx86-64 = "v5.15/standard/base"
-KBRANCH:edgerouter = "v5.15/standard/edgerouter"
-KBRANCH:beaglebone-yocto = "v5.15/standard/beaglebone"
-
-KMACHINE:genericx86 ?= "common-pc"
-KMACHINE:genericx86-64 ?= "common-pc-64"
-KMACHINE:beaglebone-yocto ?= "beaglebone"
-
-SRCREV_machine:genericx86 ?= "ebfb1822e9f9726d8c587fc0f60cfed43fa0873e"
-SRCREV_machine:genericx86-64 ?= "ebfb1822e9f9726d8c587fc0f60cfed43fa0873e"
-SRCREV_machine:edgerouter ?= "b978686694c3e41968821d6cc2a2a371fd9c2fb0"
-SRCREV_machine:beaglebone-yocto ?= "4c875cf1376178dfab4913aa1350cab50bb093d3"
-
-COMPATIBLE_MACHINE:genericx86 = "genericx86"
-COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
-COMPATIBLE_MACHINE:edgerouter = "edgerouter"
-COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
-
-LINUX_VERSION:genericx86 = "5.15.36"
-LINUX_VERSION:genericx86-64 = "5.15.36"
-LINUX_VERSION:edgerouter = "5.15.36"
-LINUX_VERSION:beaglebone-yocto = "5.15.36"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.6.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.6.bbappend
new file mode 100644
index 0000000000..3f33ec991d
--- /dev/null
+++ b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_6.6.bbappend
@@ -0,0 +1,21 @@
+COMPATIBLE_MACHINE:genericarm64 = "genericarm64"
+COMPATIBLE_MACHINE:genericx86 = "genericx86"
+COMPATIBLE_MACHINE:genericx86-64 = "genericx86-64"
+COMPATIBLE_MACHINE:beaglebone-yocto = "beaglebone-yocto"
+
+KBRANCH:genericx86 = "v6.6/standard/base"
+KBRANCH:genericx86-64 = "v6.6/standard/base"
+KBRANCH:beaglebone-yocto = "v6.6/standard/beaglebone"
+
+KMACHINE:genericarm64 ?= "genericarm64"
+KMACHINE:genericx86 ?= "common-pc"
+KMACHINE:genericx86-64 ?= "common-pc-64"
+KMACHINE:beaglebone-yocto ?= "beaglebone"
+
+SRCREV_machine:genericx86 ?= "06644f0d7193d7ec39d7fe41939a21953e7a0c65"
+SRCREV_machine:genericx86-64 ?= "06644f0d7193d7ec39d7fe41939a21953e7a0c65"
+SRCREV_machine:beaglebone-yocto ?= "06644f0d7193d7ec39d7fe41939a21953e7a0c65"
+
+LINUX_VERSION:genericx86 = "6.6.21"
+LINUX_VERSION:genericx86-64 = "6.6.21"
+LINUX_VERSION:beaglebone-yocto = "6.6.21"
diff --git a/meta-yocto-bsp/wic/edgerouter.wks b/meta-yocto-bsp/wic/edgerouter.wks
deleted file mode 100644
index 7176fe436b..0000000000
--- a/meta-yocto-bsp/wic/edgerouter.wks
+++ /dev/null
@@ -1,4 +0,0 @@
-# short-description: Create SD card image for Edgerouter
-# long-description: Create a partitioned SD card image for MIPS64 Edgerouter reference hardware.
-part /boot --source bootimg-partition --ondisk sda --fstype=vfat --label boot --active --align 4 --size 16
-part / --source rootfs --ondisk sda --fstype=ext4 --label root --align 4
diff --git a/meta-yocto-bsp/wic/genericarm64.wks.in b/meta-yocto-bsp/wic/genericarm64.wks.in
new file mode 100644
index 0000000000..ee7da87ded
--- /dev/null
+++ b/meta-yocto-bsp/wic/genericarm64.wks.in
@@ -0,0 +1,11 @@
+# short-description: Create an EFI disk image
+# long-description: Creates a partitioned EFI disk image that the user
+# can directly dd to boot media.
+
+part /boot --source bootimg-efi --sourceparams="loader=${EFI_PROVIDER},initrd=${INITRAMFS_IMAGE}-${MACHINE}.${INITRAMFS_FSTYPES}" --label boot --active --align 1024 --use-uuid
+
+part swap --size 44 --label swap --fstype=swap --use-uuid
+
+part / --source rootfs --fstype=ext4 --label root --align 1024 --use-uuid
+
+bootloader --ptable gpt --timeout=5 --append="rootwait rootfstype=ext4"
diff --git a/meta/classes/base.bbclass b/meta/classes-global/base.bbclass
index bdb3ac33c6..0999b42daa 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes-global/base.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
BB_DEFAULT_TASK ?= "build"
CLASSOVERRIDE ?= "class-target"
@@ -7,33 +13,11 @@ inherit staging
inherit mirrors
inherit utils
inherit utility-tasks
-inherit metadata_scm
inherit logging
-OE_EXTRA_IMPORTS ?= ""
-
-OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible oe.rust ${OE_EXTRA_IMPORTS}"
-OE_IMPORTS[type] = "list"
-
PACKAGECONFIG_CONFARGS ??= ""
-def oe_import(d):
- import sys
-
- bbpath = [os.path.join(dir, "lib") for dir in d.getVar("BBPATH").split(":")]
- sys.path[0:0] = [dir for dir in bbpath if dir not in sys.path]
-
- import oe.data
- for toimport in oe.data.typed_value("OE_IMPORTS", d):
- try:
- # Make a python object accessible from the metadata
- bb.utils._context[toimport.split(".", 1)[0]] = __import__(toimport)
- except AttributeError as e:
- bb.error("Error importing OE modules: %s" % str(e))
- return ""
-
-# We need the oe module name space early (before INHERITs get added)
-OE_IMPORTED := "${@oe_import(d)}"
+inherit metadata_scm
def lsb_distro_identifier(d):
adjust = d.getVar('LSB_DISTRO_ADJUST')
@@ -132,7 +116,7 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
# /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
# would return /usr/local/bin/ccache/gcc, but what we need is
# /usr/bin/gcc, this code can check and fix that.
- if "ccache" in srctool:
+ if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache':
srctool = bb.utils.which(path, tool, executable=True, direction=1)
if srctool:
os.symlink(srctool, desttool)
@@ -142,11 +126,18 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
if notfound and fatal:
bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
+# We can't use vardepvalue against do_fetch directly since that would overwrite
+# the other task dependencies so we use an indirect function.
+python fetcher_hashes_dummyfunc() {
+ return
+}
+fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}"
+
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
-do_fetch[vardeps] += "SRCREV"
+do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
do_fetch[network] = "1"
python base_do_fetch() {
@@ -208,6 +199,7 @@ addtask do_deploy_source_date_epoch_setscene
addtask do_deploy_source_date_epoch before do_configure after do_patch
python create_source_date_epoch_stamp() {
+ # Version: 1
source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S'))
oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
}
@@ -217,11 +209,8 @@ def get_source_date_epoch_value(d):
return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d)
def get_layers_branch_rev(d):
- layers = (d.getVar("BBLAYERS") or "").split()
- layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
- base_get_metadata_git_branch(i, None).strip(), \
- base_get_metadata_git_revision(i, None)) \
- for i in layers]
+ revisions = oe.buildcfg.get_layer_revisions(d)
+ layers_branch_rev = ["%-20s = \"%s:%s\"" % (r[1], r[2], r[3]) for r in revisions]
i = len(layers_branch_rev)-1
p1 = layers_branch_rev[i].find("=")
s1 = layers_branch_rev[i][p1:]
@@ -306,7 +295,7 @@ python base_eventhandler() {
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
# This code is to silence warnings where the SDK variables overwrite the
- # target ones and we'd see dulpicate key names overwriting each other
+ # target ones and we'd see duplicate key names overwriting each other
# for various PREFERRED_PROVIDERS
if isinstance(e, bb.event.RecipePreFinalise):
if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
@@ -369,17 +358,13 @@ base_do_compile() {
addtask install after do_compile
do_install[dirs] = "${B}"
-# Remove and re-create ${D} so that is it guaranteed to be empty
+# Remove and re-create ${D} so that it is guaranteed to be empty
do_install[cleandirs] = "${D}"
base_do_install() {
:
}
-base_do_package() {
- :
-}
-
addtask build after do_populate_sysroot
do_build[noexec] = "1"
do_build[recrdeptask] += "do_deploy"
@@ -538,12 +523,12 @@ python () {
check_license_format(d)
unmatched_license_flags = check_license_flags(d)
if unmatched_license_flags:
- if len(unmatched_license_flags) == 1:
- message = "because it has a restricted license '{0}'. Which is not listed in LICENSE_FLAGS_ACCEPTED".format(unmatched_license_flags[0])
- else:
- message = "because it has restricted licenses {0}. Which are not listed in LICENSE_FLAGS_ACCEPTED".format(
- ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
- bb.debug(1, "Skipping %s %s" % (pn, message))
+ for unmatched in unmatched_license_flags:
+ message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched
+ details = d.getVarFlag("LICENSE_FLAGS_DETAILS", unmatched)
+ if details:
+ message += "\n" + details
+ bb.debug(1, "Skipping %s: %s" % (pn, message))
raise bb.parse.SkipRecipe(message)
# If we're building a target package we need to use fakeroot (pseudo)
@@ -596,9 +581,9 @@ python () {
for lic_exception in exceptions:
if ":" in lic_exception:
- lic_exception.split(":")[0]
+ lic_exception = lic_exception.split(":")[1]
if lic_exception in oe.license.obsolete_license_list():
- bb.fatal("Invalid license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
+ bb.fatal("Obsolete license %s used in INCOMPATIBLE_LICENSE_EXCEPTIONS" % lic_exception)
pkgs = d.getVar('PACKAGES').split()
skipped_pkgs = {}
@@ -628,7 +613,6 @@ python () {
bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
- needsrcrev = False
srcuri = d.getVar('SRC_URI')
for uri_string in srcuri.split():
uri = bb.fetch.URI(uri_string)
@@ -641,23 +625,16 @@ python () {
# Svn packages should DEPEND on subversion-native
if uri.scheme == "svn":
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
# Git packages should DEPEND on git-native
elif uri.scheme in ("git", "gitsm"):
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
# Mercurial packages should DEPEND on mercurial-native
elif uri.scheme == "hg":
- needsrcrev = True
d.appendVar("EXTRANATIVEPATH", ' python3-native ')
- d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
-
- # Perforce packages support SRCREV = "${AUTOREV}"
- elif uri.scheme == "p4":
- needsrcrev = True
+ d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot ca-certificates-native:do_populate_sysroot')
# OSC packages should DEPEND on osc-native
elif uri.scheme == "osc":
@@ -667,7 +644,6 @@ python () {
d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
elif uri.scheme == "repo":
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
# *.lz4 should DEPEND on lz4-native for unpacking
@@ -698,20 +674,9 @@ python () {
elif path.endswith('.deb'):
d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
- if needsrcrev:
- d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
-
- # Gather all named SRCREVs to add to the sstate hash calculation
- # This anonymous python snippet is called multiple times so we
- # need to be careful to not double up the appends here and cause
- # the base hash to mismatch the task hash
- for uri in srcuri.split():
- parm = bb.fetch.decodeurl(uri)[5]
- uri_names = parm.get("name", "").split(",")
- for uri_name in filter(None, uri_names):
- srcrev_name = "SRCREV_{}".format(uri_name)
- if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
- d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
+ # *.7z should DEPEND on p7zip-native for unpacking
+ elif path.endswith('.7z'):
+ d.appendVarFlag('do_unpack', 'depends', ' p7zip-native:do_populate_sysroot')
set_packagetriplet(d)
@@ -781,4 +746,4 @@ python do_cleanall() {
do_cleanall[nostamp] = "1"
-EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package
+EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install
diff --git a/meta/classes/buildstats.bbclass b/meta/classes-global/buildstats.bbclass
index 0de605200a..f49a67aa4f 100644
--- a/meta/classes/buildstats.bbclass
+++ b/meta/classes-global/buildstats.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
BUILDSTATS_BASE = "${TMPDIR}/buildstats/"
################################################################################
@@ -285,7 +291,8 @@ python runqueue_stats () {
if system_stats:
# Ensure that we sample at important events.
done = isinstance(e, bb.event.BuildCompleted)
- system_stats.sample(e, force=done)
+ if system_stats.sample(e, force=done):
+ d.setVar('_buildstats_system_stats', system_stats)
if done:
system_stats.close()
d.delVar('_buildstats_system_stats')
diff --git a/meta/classes/debian.bbclass b/meta/classes-global/debian.bbclass
index 8367be9f37..e2a129d028 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes-global/debian.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Debian package renaming only occurs when a package is built
# We therefore have to make sure we build all runtime packages
# before building the current package to make the packages runtime
@@ -13,21 +19,6 @@ AUTO_LIBNAME_PKGS = "${PACKAGES}"
inherit package
-DEBIANRDEP = "do_packagedata"
-do_package_write_ipk[deptask] = "${DEBIANRDEP}"
-do_package_write_deb[deptask] = "${DEBIANRDEP}"
-do_package_write_tar[deptask] = "${DEBIANRDEP}"
-do_package_write_rpm[deptask] = "${DEBIANRDEP}"
-do_package_write_ipk[rdeptask] = "${DEBIANRDEP}"
-do_package_write_deb[rdeptask] = "${DEBIANRDEP}"
-do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
-do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
-
-python () {
- if not d.getVar("PACKAGES"):
- d.setVar("DEBIANRDEP", "")
-}
-
python debian_package_name_hook () {
import glob, copy, stat, errno, re, pathlib, subprocess
diff --git a/meta/classes/devshell.bbclass b/meta/classes-global/devshell.bbclass
index 247d04478c..4c23049cf0 100644
--- a/meta/classes/devshell.bbclass
+++ b/meta/classes-global/devshell.bbclass
@@ -1,9 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit terminal
DEVSHELL = "${SHELL}"
-PATH:prepend:task-devshell = "${COREBASE}/scripts/git-intercept:"
-
python do_devshell () {
if d.getVarFlag("do_devshell", "manualfakeroot"):
d.prependVar("DEVSHELL", "pseudo ")
diff --git a/meta/classes/insane.bbclass b/meta/classes-global/insane.bbclass
index 0bc6492c83..e963001d09 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes-global/insane.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# BB Class inspired by ebuild.sh
#
# This class will test files after installation for certain
@@ -20,14 +26,15 @@
# Elect whether a given type of error is a warning or error, they may
# have been set by other files.
-WARN_QA ?= " libdir xorg-driver-abi \
+WARN_QA ?= " libdir xorg-driver-abi buildpaths \
textrel incompatible-license files-invalid \
infodir build-deps src-uri-bad symlink-to-sysroot multilib \
- invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \
+ invalid-packageconfig host-user-contaminated uppercase-pn \
mime mime-xdg unlisted-pkg-lics unhandled-features-check \
missing-update-alternatives native-last missing-ptest \
license-exists license-no-generic license-syntax license-format \
license-incompatible license-file-missing obsolete-license \
+ 32bit-time virtual-slash \
"
ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
@@ -38,9 +45,12 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
already-stripped installed-vs-shipped ldflags compile-host-path \
install-host-path pn-overrides unknown-configure-option \
useless-rpaths rpaths staticdev empty-dirs \
+ patch-fuzz \
"
# Add usrmerge QA check based on distro feature
ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
+ERROR_QA:append:layer-core = " patch-status"
+WARN_QA:append:layer-core = " missing-metadata missing-maintainer"
FAKEROOT_QA = "host-user-contaminated"
FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \
@@ -87,15 +97,14 @@ def package_qa_check_shebang_size(path, name, d, elf, messages):
return
if stanza.startswith(b'#!'):
- #Shebang not found
try:
- stanza = stanza.decode("utf-8")
+ stanza.decode("utf-8")
except UnicodeDecodeError:
#If it is not a text file, it is not a script
return
if len(stanza) > 129:
- oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
+ oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name)))
return
QAPATHTEST[libexec] = "package_qa_check_libexec"
@@ -107,7 +116,7 @@ def package_qa_check_libexec(path,name, d, elf, messages):
return True
if 'libexec' in path.split(os.path.sep):
- oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec))
+ oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec))
return False
return True
@@ -199,7 +208,7 @@ def package_qa_check_staticdev(path, name, d, elf, messages):
if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
- (name, package_qa_clean_path(path,d, name)))
+ (name, package_qa_clean_path(path, d, name)))
QAPATHTEST[mime] = "package_qa_check_mime"
def package_qa_check_mime(path, name, d, elf, messages):
@@ -210,7 +219,7 @@ def package_qa_check_mime(path, name, d, elf, messages):
if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
- (name, package_qa_clean_path(path,d)))
+ (name, package_qa_clean_path(path, d, name)))
QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
def package_qa_check_mime_xdg(path, name, d, elf, messages):
@@ -230,7 +239,7 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages):
except:
# At least libreoffice installs symlinks with absolute paths that are dangling here.
# We could implement some magic but for few (one) recipes it is not worth the effort so just warn:
- wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d))
+ wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path, d, name))
wstr += "Please check if (linked) file contains key 'MimeType'.\n"
pkgname = name
if name == d.getVar('PN'):
@@ -238,8 +247,8 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages):
wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
oe.qa.add_message(messages, "mime-xdg", wstr)
if mime_type_found:
- oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
- (name, package_qa_clean_path(path,d)))
+ oe.qa.add_message(messages, "mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \
+ (name, package_qa_clean_path(path, d, name)))
def package_qa_check_libdir(d):
"""
@@ -312,8 +321,8 @@ def package_qa_check_dbg(path, name, d, elf, messages):
if not "-dbg" in name and not "-ptest" in name:
if '.debug' in path.split(os.path.sep):
- oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
- (name, package_qa_clean_path(path,d)))
+ oe.qa.add_message(messages, "debug-files", "%s: non debug package contains .debug directory %s" % \
+ (name, package_qa_clean_path(path, d, name)))
QAPATHTEST[arch] = "package_qa_check_arch"
def package_qa_check_arch(path,name,d, elf, messages):
@@ -362,7 +371,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
(elf.abiSize(), bits, package_qa_clean_path(path, d, name)))
elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
- (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name)))
+ (elf.isLittleEndian(), littleendian, package_qa_clean_path(path, d, name)))
QAPATHTEST[desktop] = "package_qa_check_desktop"
def package_qa_check_desktop(path, name, d, elf, messages):
@@ -444,12 +453,11 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
Check for build paths inside target files and error if paths are not
explicitly ignored.
"""
- # Ignore .debug files, not interesting
- if path.find(".debug") != -1:
- return
+ import stat
- # Ignore symlinks
- if os.path.islink(path):
+ # Ignore symlinks/devs/fifos
+ mode = os.lstat(path).st_mode
+ if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode):
return
tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
@@ -501,6 +509,132 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
+QAPATHTEST[32bit-time] = "check_32bit_symbols"
+def check_32bit_symbols(path, packagename, d, elf, messages):
+ """
+ Check that ELF files do not use any 32 bit time APIs from glibc.
+ """
+ thirtytwo_bit_time_archs = {'arm','armeb','mipsarcho32','powerpc','x86'}
+ overrides = set(d.getVar('OVERRIDES').split(':'))
+ if not (thirtytwo_bit_time_archs & overrides):
+ return
+
+ import re
+ # This list is manually constructed by searching the image folder of the
+ # glibc recipe for __USE_TIME_BITS64. There is no good way to do this
+ # automatically.
+ api32 = {
+ # /usr/include/time.h
+ "clock_getres", "clock_gettime", "clock_nanosleep", "clock_settime",
+ "ctime", "ctime_r", "difftime", "gmtime", "gmtime_r", "localtime",
+ "localtime_r", "mktime", "nanosleep", "time", "timegm", "timelocal",
+ "timer_gettime", "timer_settime", "timespec_get", "timespec_getres",
+ # /usr/include/bits/time.h
+ "clock_adjtime",
+ # /usr/include/signal.h
+ "sigtimedwait",
+ # /usr/include/sys/time.h
+ "adjtime",
+ "futimes", "futimesat", "getitimer", "gettimeofday", "lutimes",
+ "setitimer", "settimeofday", "utimes",
+ # /usr/include/sys/timex.h
+ "adjtimex", "ntp_adjtime", "ntp_gettime", "ntp_gettimex",
+ # /usr/include/sys/wait.h
+ "wait3", "wait4",
+ # /usr/include/sys/stat.h
+ "fstat", "fstat64", "fstatat", "fstatat64", "futimens", "lstat",
+ "lstat64", "stat", "stat64", "utimensat",
+ # /usr/include/sys/poll.h
+ "ppoll",
+ # /usr/include/sys/resource.h
+ "getrusage",
+ # /usr/include/sys/ioctl.h
+ "ioctl",
+ # /usr/include/sys/select.h
+ "select", "pselect",
+ # /usr/include/sys/prctl.h
+ "prctl",
+ # /usr/include/sys/epoll.h
+ "epoll_pwait2",
+ # /usr/include/sys/timerfd.h
+ "timerfd_gettime", "timerfd_settime",
+ # /usr/include/sys/socket.h
+ "getsockopt", "recvmmsg", "recvmsg", "sendmmsg", "sendmsg",
+ "setsockopt",
+ # /usr/include/sys/msg.h
+ "msgctl",
+ # /usr/include/sys/sem.h
+ "semctl", "semtimedop",
+ # /usr/include/sys/shm.h
+ "shmctl",
+ # /usr/include/pthread.h
+ "pthread_clockjoin_np", "pthread_cond_clockwait",
+ "pthread_cond_timedwait", "pthread_mutex_clocklock",
+ "pthread_mutex_timedlock", "pthread_rwlock_clockrdlock",
+ "pthread_rwlock_clockwrlock", "pthread_rwlock_timedrdlock",
+ "pthread_rwlock_timedwrlock", "pthread_timedjoin_np",
+ # /usr/include/semaphore.h
+ "sem_clockwait", "sem_timedwait",
+ # /usr/include/threads.h
+ "cnd_timedwait", "mtx_timedlock", "thrd_sleep",
+ # /usr/include/aio.h
+ "aio_cancel", "aio_error", "aio_read", "aio_return", "aio_suspend",
+ "aio_write", "lio_listio",
+ # /usr/include/mqueue.h
+ "mq_timedreceive", "mq_timedsend",
+ # /usr/include/glob.h
+ "glob", "glob64", "globfree", "globfree64",
+ # /usr/include/sched.h
+ "sched_rr_get_interval",
+ # /usr/include/fcntl.h
+ "fcntl", "fcntl64",
+ # /usr/include/utime.h
+ "utime",
+ # /usr/include/ftw.h
+ "ftw", "ftw64", "nftw", "nftw64",
+ # /usr/include/fts.h
+ "fts64_children", "fts64_close", "fts64_open", "fts64_read",
+ "fts64_set", "fts_children", "fts_close", "fts_open", "fts_read",
+ "fts_set",
+ # /usr/include/netdb.h
+ "gai_suspend",
+ }
+
+ ptrn = re.compile(
+ r'''
+ (?P<value>[\da-fA-F]+) \s+
+ (?P<flags>[lgu! ][w ][C ][W ][Ii ][dD ]F) \s+
+ (?P<section>\*UND\*) \s+
+ (?P<alignment>(?P<size>[\da-fA-F]+)) \s+
+ (?P<symbol>
+ ''' +
+ r'(?P<notag>' + r'|'.join(sorted(api32)) + r')' +
+ r'''
+ (@+(?P<tag>GLIBC_\d+\.\d+\S*)))
+ ''', re.VERBOSE
+ )
+
+ # elf is a oe.qa.ELFFile object
+ if elf is not None:
+ phdrs = elf.run_objdump("-tw", d)
+ syms = re.finditer(ptrn, phdrs)
+ usedapis = {sym.group('notag') for sym in syms}
+ if usedapis:
+ elfpath = package_qa_clean_path(path, d, packagename)
+ # Remove any .debug dir, heuristic that probably works
+ # At this point, any symbol information is stripped into the debug
+ # package, so that is the only place we will find them.
+ elfpath = elfpath.replace('.debug/', '')
+ allowed = "32bit-time" in (d.getVar('INSANE_SKIP') or '').split()
+ if not allowed:
+ msgformat = elfpath + " uses 32-bit api '%s'"
+ for sym in usedapis:
+ oe.qa.add_message(messages, '32bit-time', msgformat % sym)
+ oe.qa.add_message(
+ messages, '32bit-time',
+ 'Suppress with INSANE_SKIP = "32bit-time"'
+ )
+
# Check license variables
do_populate_lic[postfuncs] += "populate_lic_qa_checksum"
python populate_lic_qa_checksum() {
@@ -550,7 +684,10 @@ python populate_lic_qa_checksum() {
import hashlib
lineno = 0
license = []
- m = hashlib.new('MD5', usedforsecurity=False)
+ try:
+ m = hashlib.new('MD5', usedforsecurity=False)
+ except TypeError:
+ m = hashlib.new('MD5')
for line in f:
lineno += 1
if (lineno >= beginline):
@@ -630,6 +767,11 @@ def qa_check_staged(path,d):
bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN'))
skip_pkgconfig = True
+ skip_shebang_size = False
+ if 'shebang-size' in skip:
+ bb.note("Recipe %s skipping qa checkking: shebang-size" % d.getVar('PN'))
+ skip_shebang_size = True
+
# find all .la and .pc files
# read the content
# and check for stuff that looks wrong
@@ -651,6 +793,13 @@ def qa_check_staged(path,d):
error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
oe.qa.handle_error("pkgconfig", error_msg, d)
+ if not skip_shebang_size:
+ errors = {}
+ package_qa_check_shebang_size(path, "", d, None, errors)
+ for e in errors:
+ oe.qa.handle_error(e, errors[e], d)
+
+
# Run all package-wide warnfuncs and errorfuncs
def package_qa_package(warnfuncs, errorfuncs, package, d):
warnings = {}
@@ -756,13 +905,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
continue
if not rdep_data or not 'PN' in rdep_data:
- pkgdata_dir = d.getVar("PKGDATA_DIR")
- try:
- possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
- except OSError:
- possibles = []
- for p in possibles:
- rdep_data = oe.packagedata.read_subpkgdata(p, d)
+ for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdepend):
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
break
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
@@ -775,8 +918,12 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if "file-rdeps" not in skip:
ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
+ if bb.utils.contains('DISTRO_FEATURES', 'usrmerge', True, False, d):
+ ignored_file_rdeps |= set(['/usr/bin/sh'])
if bb.data.inherits_class('nativesdk', d):
ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl'])
+ if bb.utils.contains('DISTRO_FEATURES', 'usrmerge', True, False, d):
+ ignored_file_rdeps |= set(['/usr/bin/bash'])
# For Saving the FILERDEPENDS
filerdepends = {}
rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
@@ -806,17 +953,17 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# perl
filerdepends.pop(rdep,None)
- # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
- rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
- for key in rdep_data:
- if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"):
- for subkey in bb.utils.explode_deps(rdep_data[key]):
- filerdepends.pop(subkey,None)
- # Add the files list to the rprovides
- if key.startswith("FILES_INFO:"):
- # Use eval() to make it as a dict
- for subkey in eval(rdep_data[key]):
- filerdepends.pop(subkey,None)
+ for _, rdep_data in oe.packagedata.foreach_runtime_provider_pkgdata(d, rdep, True):
+ for key in rdep_data:
+ if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"):
+ for subkey in bb.utils.explode_deps(rdep_data[key]):
+ filerdepends.pop(subkey,None)
+ # Add the files list to the rprovides
+ if key.startswith("FILES_INFO:"):
+ # Use eval() to make it as a dict
+ for subkey in eval(rdep_data[key]):
+ filerdepends.pop(subkey,None)
+
if not filerdepends:
# Break if all the file rdepends are met
break
@@ -970,7 +1117,7 @@ def package_qa_check_host_user(path, name, d, elf, messages):
dest = d.getVar('PKGDEST')
pn = d.getVar('PN')
- home = os.path.join(dest, 'home')
+ home = os.path.join(dest, name, 'home')
if path == home or path.startswith(home + os.sep):
return
@@ -1137,11 +1284,14 @@ python do_package_qa_setscene () {
}
addtask do_package_qa_setscene
-python do_qa_staging() {
- bb.note("QA checking staging")
- qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d)
- oe.qa.exit_with_message_if_errors("QA staging was broken by the package built above", d)
+python do_qa_sysroot() {
+ bb.note("QA checking do_populate_sysroot")
+ sysroot_destdir = d.expand('${SYSROOT_DESTDIR}')
+ for sysroot_dir in d.expand('${SYSROOT_DIRS}').split():
+ qa_check_staged(sysroot_destdir + sysroot_dir, d)
+ oe.qa.exit_with_message_if_errors("do_populate_sysroot for this recipe installed files with QA issues", d)
}
+do_populate_sysroot[postfuncs] += "do_qa_sysroot"
python do_qa_patch() {
import subprocess
@@ -1183,11 +1333,7 @@ python do_qa_patch() {
msg += " devtool modify %s\n" % d.getVar('PN')
msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN')
msg += "Don't forget to review changes done by devtool!\n"
- if bb.utils.filter('ERROR_QA', 'patch-fuzz', d):
- bb.error(msg)
- elif bb.utils.filter('WARN_QA', 'patch-fuzz', d):
- bb.warn(msg)
- msg = "Patch log indicates that patches do not apply cleanly."
+ msg += "\nPatch log indicates that patches do not apply cleanly."
oe.qa.handle_error("patch-fuzz", msg, d)
# Check if the patch contains a correctly formatted and spelled Upstream-Status
@@ -1195,24 +1341,68 @@ python do_qa_patch() {
from oe import patch
for url in patch.src_patches(d):
- (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url)
-
- # skip patches not in oe-core
- if '/meta/' not in fullpath:
- continue
-
- content = open(fullpath, encoding='utf-8', errors='ignore').read()
- kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
- strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Accepted|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
- match_kinda = kinda_status_re.search(content)
- match_strict = strict_status_re.search(content)
- guidelines = "https://www.openembedded.org/wiki/Commit_Patch_Message_Guidelines#Patch_Header_Recommendations:_Upstream-Status"
-
- if not match_strict:
- if match_kinda:
- bb.error("Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0)))
- else:
- bb.error("Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines))
+ (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url)
+
+ msg = oe.qa.check_upstream_status(fullpath)
+ if msg:
+ oe.qa.handle_error("patch-status", msg, d)
+
+ ###########################################################################
+ # Check for missing ptests
+ ###########################################################################
+ def match_line_in_files(toplevel, filename_glob, line_regex):
+ import pathlib
+ try:
+ toppath = pathlib.Path(toplevel)
+ for entry in toppath.glob(filename_glob):
+ try:
+ with open(entry, 'r', encoding='utf-8', errors='ignore') as f:
+ for line in f.readlines():
+ if re.match(line_regex, line):
+ return True
+ except FileNotFoundError:
+ # Broken symlink in source
+ pass
+ except FileNotFoundError:
+ # pathlib.Path.glob() might throw this when file/directory
+ # disappear while scanning.
+ bb.note("unimplemented-ptest: FileNotFoundError exception while scanning (disappearing file while scanning?). Check was ignored." % d.getVar('PN'))
+ pass
+ return False
+
+ srcdir = d.getVar('S')
+ if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d):
+ pass
+ elif bb.data.inherits_class('ptest', d):
+ bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN'))
+ elif srcdir == d.getVar('WORKDIR'):
+ bb.note("Package %s QA: skipping unimplemented-ptest: This check is not supported for recipe with \"S = \"${WORKDIR}\"" % d.getVar('PN'))
+
+ # Detect perl Test:: based tests
+ elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))):
+ oe.qa.handle_error("unimplemented-ptest", "%s: perl Test:: based tests detected" % d.getVar('PN'), d)
+
+ # Detect pytest-based tests
+ elif match_line_in_files(srcdir, "**/*.py", r'\s*(?:import\s*pytest|from\s*pytest)'):
+ oe.qa.handle_error("unimplemented-ptest", "%s: pytest-based tests detected" % d.getVar('PN'), d)
+
+ # Detect meson-based tests
+ elif os.path.exists(os.path.join(srcdir, "meson.build")) and match_line_in_files(srcdir, "**/meson.build", r'\s*test\s*\('):
+ oe.qa.handle_error("unimplemented-ptest", "%s: meson-based tests detected" % d.getVar('PN'), d)
+
+ # Detect cmake-based tests
+ elif os.path.exists(os.path.join(srcdir, "CMakeLists.txt")) and match_line_in_files(srcdir, "**/CMakeLists.txt", r'\s*(?:add_test|enable_testing)\s*\('):
+ oe.qa.handle_error("unimplemented-ptest", "%s: cmake-based tests detected" % d.getVar('PN'), d)
+
+ # Detect autotools-based·tests
+ elif os.path.exists(os.path.join(srcdir, "Makefile.in")) and (match_line_in_files(srcdir, "**/Makefile.in", r'\s*TESTS\s*\+?=') or match_line_in_files(srcdir,"**/*.at",r'.*AT_INIT')):
+ oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d)
+
+ # Last resort, detect a test directory in sources
+ elif any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)):
+ oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d)
+
+ oe.qa.exit_if_errors(d)
}
python do_qa_configure() {
@@ -1307,33 +1497,66 @@ Rerun configure task after fixing this."""
oe.qa.exit_if_errors(d)
}
-def unpack_check_src_uri(pn, d):
- import re
-
- skip = (d.getVar('INSANE_SKIP') or "").split()
- if 'src-uri-bad' in skip:
- bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN'))
- return
-
- if "${PN}" in d.getVar("SRC_URI", False):
- oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
-
- for url in d.getVar("SRC_URI").split():
- if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url):
- oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
-
python do_qa_unpack() {
src_uri = d.getVar('SRC_URI')
s_dir = d.getVar('S')
if src_uri and not os.path.exists(s_dir):
bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
+}
+
+python do_recipe_qa() {
+ import re
+
+ def test_missing_metadata(pn, d):
+ fn = d.getVar("FILE")
+ srcfile = d.getVar('SRC_URI').split()
+ # Check that SUMMARY is not the same as the default from bitbake.conf
+ if d.getVar('SUMMARY') == d.expand("${PN} version ${PV}-${PR}"):
+ oe.qa.handle_error("missing-metadata", "Recipe {} in {} does not contain a SUMMARY. Please add an entry.".format(pn, fn), d)
+ if not d.getVar('HOMEPAGE'):
+ if srcfile and srcfile[0].startswith('file') or not d.getVar('SRC_URI'):
+ # We are only interested in recipes SRC_URI fetched from external sources
+ pass
+ else:
+ oe.qa.handle_error("missing-metadata", "Recipe {} in {} does not contain a HOMEPAGE. Please add an entry.".format(pn, fn), d)
+
+ def test_missing_maintainer(pn, d):
+ fn = d.getVar("FILE")
+ if pn.endswith("-native") or pn.startswith("nativesdk-") or "packagegroup-" in pn or "core-image-ptest-" in pn:
+ return
+ if not d.getVar('RECIPE_MAINTAINER'):
+ oe.qa.handle_error("missing-maintainer", "Recipe {} in {} does not have an assigned maintainer. Please add an entry into meta/conf/distro/include/maintainers.inc.".format(pn, fn), d)
+
+ def test_srcuri(pn, d):
+ skip = (d.getVar('INSANE_SKIP') or "").split()
+ if 'src-uri-bad' in skip:
+ bb.note("Recipe %s skipping qa checking: src-uri-bad" % pn)
+ return
+
+ if "${PN}" in d.getVar("SRC_URI", False):
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
- unpack_check_src_uri(d.getVar('PN'), d)
+ for url in d.getVar("SRC_URI").split():
+ # Search for github and gitlab URLs that pull unstable archives (comment for future greppers)
+ if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url:
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
+
+ pn = d.getVar('PN')
+ test_missing_metadata(pn, d)
+ test_missing_maintainer(pn, d)
+ test_srcuri(pn, d)
+ oe.qa.exit_if_errors(d)
}
-# The Staging Func, to check all staging
-#addtask qa_staging after do_populate_sysroot before do_build
-do_populate_sysroot[postfuncs] += "do_qa_staging "
+addtask do_recipe_qa before do_fetch do_package_qa do_build
+
+SSTATETASKS += "do_recipe_qa"
+do_recipe_qa[sstate-inputdirs] = ""
+do_recipe_qa[sstate-outputdirs] = ""
+python do_recipe_qa_setscene () {
+ sstate_setscene(d)
+}
+addtask do_recipe_qa_setscene
# Check for patch fuzz
do_patch[postfuncs] += "do_qa_patch "
@@ -1348,7 +1571,7 @@ do_unpack[postfuncs] += "do_qa_unpack"
python () {
import re
-
+
tests = d.getVar('ALL_QA').split()
if "desktop" in tests:
d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
@@ -1384,6 +1607,13 @@ python () {
if (d.getVar(d.expand('DEPENDS:${PN}'))):
oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d)
+ # virtual/ is meaningless for these variables
+ if "virtual-slash" in (d.getVar("ALL_QA") or "").split():
+ for k in ['RDEPENDS', 'RPROVIDES']:
+ for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""):
+ if var.startswith("virtual/"):
+ oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d)
+
issues = []
if (d.getVar('PACKAGES') or "").split():
for dep in (d.getVar('QADEPENDS') or "").split():
@@ -1406,7 +1636,7 @@ python () {
if bb.data.inherits_class(native_class, d):
inherited_classes = d.getVar('__inherit_cache', False) or []
- needle = os.path.join('classes', native_class)
+ needle = "/" + native_class
bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split()
# BBCLASSEXTEND items are always added in the end
@@ -1419,7 +1649,7 @@ python () {
for class_item in reversed(inherited_classes):
if needle not in class_item:
for extend_item in skip_classes:
- if os.path.join('classes', '%s.bbclass' % extend_item) in class_item:
+ if '/%s.bbclass' % extend_item in class_item:
break
else:
pn = d.getVar('PN')
diff --git a/meta/classes/license.bbclass b/meta/classes-global/license.bbclass
index 0c637e966e..b2e0d3faba 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes-global/license.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Populates LICENSE_DIRECTORY as set in distro config with the license files as set by
# LIC_FILES_CHKSUM.
# TODO:
@@ -23,7 +29,7 @@ python do_populate_lic() {
lic_files_paths = find_license_files(d)
# The base directory we wrangle licenses to
- destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN'))
+ destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('SSTATE_PKGARCH'), d.getVar('PN'))
copy_license_files(lic_files_paths, destdir)
info = get_recipe_info(d)
with open(os.path.join(destdir, "recipeinfo"), "w") as f:
@@ -84,17 +90,17 @@ def copy_license_files(lic_files_paths, destdir):
os.link(src, dst)
except OSError as err:
if err.errno == errno.EXDEV:
- # Copy license files if hard-link is not possible even if st_dev is the
+ # Copy license files if hardlink is not possible even if st_dev is the
# same on source and destination (docker container with device-mapper?)
canlink = False
else:
raise
- # Only chown if we did hardling, and, we're running under pseudo
+ # Only chown if we did hardlink and we're running under pseudo
if canlink and os.environ.get('PSEUDO_DISABLED') == '0':
os.chown(dst,0,0)
if not canlink:
- begin_idx = int(beginline)-1 if beginline is not None else None
- end_idx = int(endline) if endline is not None else None
+ begin_idx = max(0, int(beginline) - 1) if beginline is not None else None
+ end_idx = max(0, int(endline)) if endline is not None else None
if begin_idx is None and end_idx is None:
shutil.copyfile(src, dst)
else:
@@ -223,7 +229,7 @@ def find_license_files(d):
bb.fatal('%s: %s' % (d.getVar('PF'), exc))
except SyntaxError:
oe.qa.handle_error("license-syntax",
- "%s: Failed to parse it's LICENSE field." % (d.getVar('PF')), d)
+ "%s: Failed to parse LICENSE: %s" % (d.getVar('PF'), d.getVar('LICENSE')), d)
# Add files from LIC_FILES_CHKSUM to list of license files
lic_chksum_paths = defaultdict(OrderedDict)
for path, data in sorted(lic_chksums.items()):
diff --git a/meta/classes/logging.bbclass b/meta/classes-global/logging.bbclass
index a0c94e98c7..ce03abfe42 100644
--- a/meta/classes/logging.bbclass
+++ b/meta/classes-global/logging.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# The following logging mechanisms are to be used in bash functions of recipes.
# They are intended to map one to one in intention and output format with the
# python recipe logging functions of a similar naming convention: bb.plain(),
diff --git a/meta/classes/mirrors.bbclass b/meta/classes-global/mirrors.bbclass
index ffdccff5fb..862648eec5 100644
--- a/meta/classes/mirrors.bbclass
+++ b/meta/classes-global/mirrors.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
MIRRORS += "\
${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian/20180310T215105Z/pool \
${DEBIAN_MIRROR} http://snapshot.debian.org/archive/debian-archive/20120328T092752Z/debian/pool \
@@ -61,8 +67,7 @@ osc://.*/.* http://sources.openembedded.org/ \
https?://.*/.* http://sources.openembedded.org/ \
ftp://.*/.* http://sources.openembedded.org/ \
npm://.*/?.* http://sources.openembedded.org/ \
-${CPAN_MIRROR} http://cpan.metacpan.org/ \
-${CPAN_MIRROR} http://search.cpan.org/CPAN/ \
+${CPAN_MIRROR} https://cpan.metacpan.org/ \
https?://downloads.yoctoproject.org/releases/uninative/ https://mirrors.kernel.org/yocto/uninative/ \
https?://downloads.yoctoproject.org/mirror/sources/ https://mirrors.kernel.org/yocto-sources/ \
"
@@ -73,17 +78,27 @@ https?://downloads.yoctoproject.org/mirror/sources/ https://mirrors.kernel.org/y
MIRRORS += "\
git://salsa.debian.org/.* git://salsa.debian.org/PATH;protocol=https \
git://git.gnome.org/.* git://gitlab.gnome.org/GNOME/PATH;protocol=https \
+git://git.infradead.org/.* git://git.infraroot.at/PATH;protocol=https \
git://.*/.* git://HOST/PATH;protocol=https \
git://.*/.* git://HOST/git/PATH;protocol=https \
"
-# Switch glibc and binutils recipes to use shallow clones as they're large and this
+# Switch llvm, glibc and binutils recipes to use shallow clones as they're large and this
# improves user experience whilst allowing the flexibility of git urls in the recipes
BB_GIT_SHALLOW:pn-binutils = "1"
BB_GIT_SHALLOW:pn-binutils-cross-${TARGET_ARCH} = "1"
BB_GIT_SHALLOW:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "1"
BB_GIT_SHALLOW:pn-binutils-cross-testsuite = "1"
BB_GIT_SHALLOW:pn-binutils-crosssdk-${SDK_SYS} = "1"
+BB_GIT_SHALLOW:pn-binutils-native = "1"
+BB_GIT_SHALLOW:pn-nativesdk-binutils = "1"
+
+BB_GIT_SHALLOW:pn-cross-localedef-native = "1"
BB_GIT_SHALLOW:pn-glibc = "1"
+BB_GIT_SHALLOW:pn-glibc-tests = "1"
PREMIRRORS += "git://sourceware.org/git/glibc.git https://downloads.yoctoproject.org/mirror/sources/ \
git://sourceware.org/git/binutils-gdb.git https://downloads.yoctoproject.org/mirror/sources/"
+
+BB_GIT_SHALLOW:pn-llvm = "1"
+BB_GIT_SHALLOW:pn-llvm-native = "1"
+BB_GIT_SHALLOW:pn-nativesdk-llvm = "1"
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass
new file mode 100644
index 0000000000..aa1eb5e901
--- /dev/null
+++ b/meta/classes-global/package.bbclass
@@ -0,0 +1,616 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
+# Packaging process
+#
+# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
+# Taking D and splitting it up into the packages listed in PACKAGES, placing the
+# resulting output in PKGDEST.
+#
+# There are the following default steps but PACKAGEFUNCS can be extended:
+#
+# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
+#
+# b) perform_packagecopy - Copy D into PKGD
+#
+# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
+#
+# d) split_and_strip_files - split the files into runtime and debug and strip them.
+# Debug files include debug info split, and associated sources that end up in -dbg packages
+#
+# e) fixup_perms - Fix up permissions in the package before we split it.
+#
+# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
+# Also triggers the binary stripping code to put files in -dbg packages.
+#
+# g) package_do_filedeps - Collect perfile run-time dependency metadata
+# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
+# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
+#
+# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
+# dependencies found. Also stores the package name so anyone else using this library
+# knows which package to depend on.
+#
+# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
+#
+# j) read_shlibdeps - Reads the stored shlibs information into the metadata
+#
+# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
+#
+# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
+# packaging steps
+
+inherit packagedata
+inherit chrpath
+inherit package_pkgdata
+inherit insane
+
+PKGD = "${WORKDIR}/package"
+PKGDEST = "${WORKDIR}/packages-split"
+
+LOCALE_SECTION ?= ''
+
+ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
+
+# rpm is used for the per-file dependency identification
+# dwarfsrcfiles is used to determine the list of debug source files
+PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
+
+# If your postinstall can execute at rootfs creation time rather than on
+# target but depends on a native/cross tool in order to execute, you need to
+# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
+# in the package dependencies as normal, this is just for native/cross support
+# tools at rootfs build time.
+PACKAGE_WRITE_DEPS ??= ""
+
+def legitimize_package_name(s):
+ return oe.package.legitimize_package_name(s)
+
+def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
+ """
+ Used in .bb files to split up dynamically generated subpackages of a
+ given package, usually plugins or modules.
+
+ Arguments:
+ root -- the path in which to search
+ file_regex -- regular expression to match searched files. Use
+ parentheses () to mark the part of this expression
+ that should be used to derive the module name (to be
+ substituted where %s is used in other function
+ arguments as noted below)
+ output_pattern -- pattern to use for the package names. Must include %s.
+ description -- description to set for each package. Must include %s.
+ postinst -- postinstall script to use for all packages (as a
+ string)
+ recursive -- True to perform a recursive search - default False
+ hook -- a hook function to be called for every match. The
+ function will be called with the following arguments
+ (in the order listed):
+ f: full path to the file/directory match
+ pkg: the package name
+ file_regex: as above
+ output_pattern: as above
+ modulename: the module name derived using file_regex
+ extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
+ all packages. The default value of None causes a
+ dependency on the main package (${PN}) - if you do
+ not want this, pass '' for this parameter.
+ aux_files_pattern -- extra item(s) to be added to FILES for each
+ package. Can be a single string item or a list of
+ strings for multiple items. Must include %s.
+ postrm -- postrm script to use for all packages (as a string)
+ allow_dirs -- True allow directories to be matched - default False
+ prepend -- if True, prepend created packages to PACKAGES instead
+ of the default False which appends them
+ match_path -- match file_regex on the whole relative path to the
+ root rather than just the file name
+ aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
+ each package, using the actual derived module name
+ rather than converting it to something legal for a
+ package name. Can be a single string item or a list
+ of strings for multiple items. Must include %s.
+ allow_links -- True to allow symlinks to be matched - default False
+ summary -- Summary to set for each package. Must include %s;
+ defaults to description if not set.
+
+ """
+
+ dvar = d.getVar('PKGD')
+ root = d.expand(root)
+ output_pattern = d.expand(output_pattern)
+ extra_depends = d.expand(extra_depends)
+
+ # If the root directory doesn't exist, don't error out later but silently do
+ # no splitting.
+ if not os.path.exists(dvar + root):
+ return []
+
+ ml = d.getVar("MLPREFIX")
+ if ml:
+ if not output_pattern.startswith(ml):
+ output_pattern = ml + output_pattern
+
+ newdeps = []
+ for dep in (extra_depends or "").split():
+ if dep.startswith(ml):
+ newdeps.append(dep)
+ else:
+ newdeps.append(ml + dep)
+ if newdeps:
+ extra_depends = " ".join(newdeps)
+
+
+ packages = d.getVar('PACKAGES').split()
+ split_packages = set()
+
+ if postinst:
+ postinst = '#!/bin/sh\n' + postinst + '\n'
+ if postrm:
+ postrm = '#!/bin/sh\n' + postrm + '\n'
+ if not recursive:
+ objs = os.listdir(dvar + root)
+ else:
+ objs = []
+ for walkroot, dirs, files in os.walk(dvar + root):
+ for file in files:
+ relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
+ if relpath:
+ objs.append(relpath)
+
+ if extra_depends == None:
+ extra_depends = d.getVar("PN")
+
+ if not summary:
+ summary = description
+
+ for o in sorted(objs):
+ import re, stat
+ if match_path:
+ m = re.match(file_regex, o)
+ else:
+ m = re.match(file_regex, os.path.basename(o))
+
+ if not m:
+ continue
+ f = os.path.join(dvar + root, o)
+ mode = os.lstat(f).st_mode
+ if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
+ continue
+ on = oe.package.legitimize_package_name(m.group(1))
+ pkg = output_pattern % on
+ split_packages.add(pkg)
+ if not pkg in packages:
+ if prepend:
+ packages = [pkg] + packages
+ else:
+ packages.append(pkg)
+ oldfiles = d.getVar('FILES:' + pkg)
+ newfile = os.path.join(root, o)
+ # These names will be passed through glob() so if the filename actually
+ # contains * or ? (rare, but possible) we need to handle that specially
+ newfile = newfile.replace('*', '[*]')
+ newfile = newfile.replace('?', '[?]')
+ if not oldfiles:
+ the_files = [newfile]
+ if aux_files_pattern:
+ if type(aux_files_pattern) is list:
+ for fp in aux_files_pattern:
+ the_files.append(fp % on)
+ else:
+ the_files.append(aux_files_pattern % on)
+ if aux_files_pattern_verbatim:
+ if type(aux_files_pattern_verbatim) is list:
+ for fp in aux_files_pattern_verbatim:
+ the_files.append(fp % m.group(1))
+ else:
+ the_files.append(aux_files_pattern_verbatim % m.group(1))
+ d.setVar('FILES:' + pkg, " ".join(the_files))
+ else:
+ d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
+ if extra_depends != '':
+ d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
+ if not d.getVar('DESCRIPTION:' + pkg):
+ d.setVar('DESCRIPTION:' + pkg, description % on)
+ if not d.getVar('SUMMARY:' + pkg):
+ d.setVar('SUMMARY:' + pkg, summary % on)
+ if postinst:
+ d.setVar('pkg_postinst:' + pkg, postinst)
+ if postrm:
+ d.setVar('pkg_postrm:' + pkg, postrm)
+ if callable(hook):
+ hook(f, pkg, file_regex, output_pattern, m.group(1))
+
+ d.setVar('PACKAGES', ' '.join(packages))
+ return list(split_packages)
+
+PACKAGE_DEPENDS += "file-native"
+
+python () {
+ if d.getVar('PACKAGES') != '':
+ deps = ""
+ for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
+ deps += " %s:do_populate_sysroot" % dep
+ if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
+ deps += ' xz-native:do_populate_sysroot'
+ d.appendVarFlag('do_package', 'depends', deps)
+
+ # shlibs requires any DEPENDS to have already packaged for the *.list files
+ d.appendVarFlag('do_package', 'deptask', " do_packagedata")
+}
+
+
+PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
+PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
+package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
+package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
+python package_get_auto_pr() {
+ import oe.prservice
+
+ def get_do_package_hash(pn):
+ if d.getVar("BB_RUNTASK") != "do_package":
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ for dep in taskdepdata:
+ if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
+ return taskdepdata[dep][6]
+ return None
+
+ # Support per recipe PRSERV_HOST
+ pn = d.getVar('PN')
+ host = d.getVar("PRSERV_HOST_" + pn)
+ if not (host is None):
+ d.setVar("PRSERV_HOST", host)
+
+ pkgv = d.getVar("PKGV")
+
+ # PR Server not active, handle AUTOINC
+ if not d.getVar('PRSERV_HOST'):
+ d.setVar("PRSERV_PV_AUTOINC", "0")
+ return
+
+ auto_pr = None
+ pv = d.getVar("PV")
+ version = d.getVar("PRAUTOINX")
+ pkgarch = d.getVar("PACKAGE_ARCH")
+ checksum = get_do_package_hash(pn)
+
+ # If do_package isn't in the dependencies, we can't get the checksum...
+ if not checksum:
+ bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
+ #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ #for dep in taskdepdata:
+ # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
+ return
+
+ if d.getVar('PRSERV_LOCKDOWN'):
+ auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from lockdown exported file")
+ d.setVar('PRAUTO',str(auto_pr))
+ return
+
+ try:
+ conn = oe.prservice.prserv_make_conn(d)
+ if conn is not None:
+ if "AUTOINC" in pkgv:
+ srcpv = bb.fetch2.get_srcrev(d)
+ base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
+ value = conn.getPR(base_ver, pkgarch, srcpv)
+ d.setVar("PRSERV_PV_AUTOINC", str(value))
+
+ auto_pr = conn.getPR(version, pkgarch, checksum)
+ conn.close()
+ except Exception as e:
+ bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from remote PR service")
+ d.setVar('PRAUTO',str(auto_pr))
+}
+
+#
+# Package functions suitable for inclusion in PACKAGEFUNCS
+#
+
+python package_setup_pkgv() {
+ pkgv = d.getVar("PKGV")
+ # Expand SRCPV into PKGV if not present
+ srcpv = bb.fetch.get_pkgv_string(d)
+ if srcpv and "+" in pkgv:
+ d.appendVar("PKGV", srcpv)
+ pkgv = d.getVar("PKGV")
+
+ # Adjust pkgv as necessary...
+ if 'AUTOINC' in pkgv:
+ d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
+}
+
+
+python package_convert_pr_autoinc() {
+ # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
+ d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
+ d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
+}
+
+LOCALEBASEPN ??= "${PN}"
+LOCALE_PATHS ?= "${datadir}/locale"
+
+python package_do_split_locales() {
+ oe.package.split_locales(d)
+}
+
+python perform_packagecopy () {
+ import subprocess
+ import shutil
+
+ dest = d.getVar('D')
+ dvar = d.getVar('PKGD')
+
+ # Start by package population by taking a copy of the installed
+ # files to operate on
+ # Preserve sparse files and hard links
+ cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # replace RPATHs for the nativesdk binaries, to make them relocatable
+ if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
+ rpath_replace (dvar, d)
+}
+perform_packagecopy[cleandirs] = "${PKGD}"
+perform_packagecopy[dirs] = "${PKGD}"
+
+python populate_packages () {
+ oe.package.populate_packages(d)
+}
+populate_packages[dirs] = "${D}"
+
+python package_fixsymlinks () {
+ oe.package.process_fixsymlinks(pkgfiles, d)
+}
+
+python package_package_name_hook() {
+ """
+ A package_name_hook function can be used to rewrite the package names by
+ changing PKG. For an example, see debian.bbclass.
+ """
+ pass
+}
+
+EXPORT_FUNCTIONS package_name_hook
+
+
+PKGDESTWORK = "${WORKDIR}/pkgdata"
+
+PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
+
+python emit_pkgdata() {
+ import oe.packagedata
+ oe.packagedata.emit_pkgdata(pkgfiles, d)
+}
+emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
+
+ldconfig_postinst_fragment() {
+if [ x"$D" = "x" ]; then
+ if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
+fi
+}
+
+RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
+
+python package_do_filedeps() {
+ oe.package.process_filedeps(pkgfiles, d)
+}
+
+SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
+SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
+
+python package_do_shlibs() {
+ oe.package.process_shlibs(pkgfiles, d)
+}
+
+python package_do_pkgconfig () {
+ oe.package.process_pkgconfig(pkgfiles, d)
+}
+
+python read_shlibdeps () {
+ pkglibdeps = oe.package.read_libdep_files(d)
+
+ packages = d.getVar('PACKAGES').split()
+ for pkg in packages:
+ rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
+ for dep in sorted(pkglibdeps[pkg]):
+ # Add the dep if it's not already there, or if no comparison is set
+ if dep not in rdepends:
+ rdepends[dep] = []
+ for v in pkglibdeps[pkg][dep]:
+ if v not in rdepends[dep]:
+ rdepends[dep].append(v)
+ d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+}
+
+python package_depchains() {
+ oe.package.process_depchains(pkgfiles, d)
+}
+
+# Since bitbake can't determine which variables are accessed during package
+# iteration, we need to list them here:
+PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
+
+def gen_packagevar(d, pkgvars="PACKAGEVARS"):
+ ret = []
+ pkgs = (d.getVar("PACKAGES") or "").split()
+ vars = (d.getVar(pkgvars) or "").split()
+ for v in vars:
+ ret.append(v)
+ for p in pkgs:
+ for v in vars:
+ ret.append(v + ":" + p)
+
+ # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
+ # affected recipes.
+ ret.append('_exclude_incompatible-%s' % p)
+ return " ".join(ret)
+
+
+# Functions for setting up PKGD
+PACKAGE_PREPROCESS_FUNCS ?= ""
+# Functions which split PKGD up into separate packages
+PACKAGESPLITFUNCS ?= " \
+ package_do_split_locales \
+ populate_packages"
+# Functions which process metadata based on split packages
+PACKAGEFUNCS += " \
+ package_fixsymlinks \
+ package_name_hook \
+ package_do_filedeps \
+ package_do_shlibs \
+ package_do_pkgconfig \
+ read_shlibdeps \
+ package_depchains \
+ emit_pkgdata"
+
+python do_package () {
+ # Change the following version to cause sstate to invalidate the package
+ # cache. This is useful if an item this class depends on changes in a
+ # way that the output of this class changes. rpmdeps is a good example
+ # as any change to rpmdeps requires this to be rerun.
+ # PACKAGE_BBCLASS_VERSION = "5"
+
+ # Init cachedpath
+ global cpath
+ cpath = oe.cachedpath.CachedPath()
+
+ ###########################################################################
+ # Sanity test the setup
+ ###########################################################################
+
+ packages = (d.getVar('PACKAGES') or "").split()
+ if len(packages) < 1:
+ bb.debug(1, "No packages to build, skipping do_package")
+ return
+
+ workdir = d.getVar('WORKDIR')
+ outdir = d.getVar('DEPLOY_DIR')
+ dest = d.getVar('D')
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('PN')
+
+ if not workdir or not outdir or not dest or not dvar or not pn:
+ msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
+ oe.qa.handle_error("var-undefined", msg, d)
+ return
+
+ bb.build.exec_func("package_setup_pkgv", d)
+ bb.build.exec_func("package_convert_pr_autoinc", d)
+
+ # Check for conflict between renamed packages and existing ones
+ # for each package in PACKAGES, check if it will be renamed to an existing one
+ for p in packages:
+ rename = d.getVar('PKG:%s' % p)
+ if rename and rename in packages:
+ bb.fatal('package "%s" is renamed to "%s" using PKG:%s, but package name already exists' % (p, rename, p))
+
+ ###########################################################################
+ # Optimisations
+ ###########################################################################
+
+ # Continually expanding complex expressions is inefficient, particularly
+ # when we write to the datastore and invalidate the expansion cache. This
+ # code pre-expands some frequently used variables
+
+ def expandVar(x, d):
+ d.setVar(x, d.getVar(x))
+
+ for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
+ expandVar(x, d)
+
+ ###########################################################################
+ # Setup PKGD (from D)
+ ###########################################################################
+
+ bb.build.exec_func("package_prepare_pkgdata", d)
+ bb.build.exec_func("perform_packagecopy", d)
+ for f in (d.getVar('PACKAGE_PREPROCESS_FUNCS') or '').split():
+ bb.build.exec_func(f, d)
+ oe.package.process_split_and_strip_files(d)
+ oe.package.fixup_perms(d)
+
+ ###########################################################################
+ # Split up PKGD into PKGDEST
+ ###########################################################################
+
+ cpath = oe.cachedpath.CachedPath()
+
+ for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
+ bb.build.exec_func(f, d)
+
+ ###########################################################################
+ # Process PKGDEST
+ ###########################################################################
+
+ # Build global list of files in each split package
+ global pkgfiles
+ pkgfiles = {}
+ packages = d.getVar('PACKAGES').split()
+ pkgdest = d.getVar('PKGDEST')
+ for pkg in packages:
+ pkgfiles[pkg] = []
+ for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
+ for file in files:
+ pkgfiles[pkg].append(walkroot + os.sep + file)
+
+ for f in (d.getVar('PACKAGEFUNCS') or '').split():
+ bb.build.exec_func(f, d)
+
+ oe.qa.exit_if_errors(d)
+}
+
+do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
+do_package[vardeps] += "${PACKAGE_PREPROCESS_FUNCS} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
+addtask package after do_install
+
+SSTATETASKS += "do_package"
+do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
+do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
+do_package_setscene[dirs] = "${STAGING_DIR}"
+
+python do_package_setscene () {
+ sstate_setscene(d)
+}
+addtask do_package_setscene
+
+# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
+# do_package_setscene and do_packagedata_setscene leading to races
+python do_packagedata () {
+ bb.build.exec_func("package_setup_pkgv", d)
+ bb.build.exec_func("package_get_auto_pr", d)
+
+ src = d.expand("${PKGDESTWORK}")
+ dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
+ oe.path.copyhardlinktree(src, dest)
+
+ bb.build.exec_func("packagedata_translate_pr_autoinc", d)
+}
+do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
+
+# Translate the EXTENDPRAUTO and AUTOINC to the final values
+packagedata_translate_pr_autoinc() {
+ find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
+ sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
+ -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
+}
+
+addtask packagedata before do_build after do_package
+
+SSTATETASKS += "do_packagedata"
+do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
+do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
+do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
+
+python do_packagedata_setscene () {
+ sstate_setscene(d)
+}
+addtask do_packagedata_setscene
+
diff --git a/meta/classes/package_deb.bbclass b/meta/classes-global/package_deb.bbclass
index a9b8ba0118..1f10b15a00 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes-global/package_deb.bbclass
@@ -1,6 +1,8 @@
#
# Copyright 2006-2008 OpenedHand Ltd.
#
+# SPDX-License-Identifier: MIT
+#
inherit package
@@ -167,12 +169,12 @@ def deb_write_pkg(pkg, d):
# more fields
- custom_fields_chunk = get_package_additional_metadata("deb", localdata)
+ custom_fields_chunk = oe.packagedata.get_package_additional_metadata("deb", localdata)
if custom_fields_chunk:
ctrlfile.write(custom_fields_chunk)
ctrlfile.write("\n")
- mapping_rename_hook(localdata)
+ oe.packagedata.mapping_rename_hook(localdata)
def debian_cmp_remap(var):
# dpkg does not allow for '(', ')' or ':' in a dependency name
@@ -267,7 +269,7 @@ def deb_write_pkg(pkg, d):
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0o755)
- conffiles_str = ' '.join(get_conffiles(pkg, d))
+ conffiles_str = ' '.join(oe.package.get_conffiles(pkg, d))
if conffiles_str:
conffiles = open(os.path.join(controldir, 'conffiles'), 'w')
for f in conffiles_str.split():
@@ -311,6 +313,10 @@ python () {
deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_deb', 'depends', deps)
d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
+
+ # Needed to ensure PKG_xxx renaming of dependency packages works
+ d.setVarFlag('do_package_write_deb', 'deptask', "do_packagedata")
+ d.setVarFlag('do_package_write_deb', 'rdeptask', "do_packagedata")
}
python do_package_write_deb () {
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes-global/package_ipk.bbclass
index 9fe3c52fae..209568ae5f 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes-global/package_ipk.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit package
IMAGE_PKGTYPE ?= "ipk"
@@ -9,7 +15,7 @@ IPKGCONF_SDK_TARGET = "${WORKDIR}/opkg-sdk-target.conf"
PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
# Program to be used to build opkg packages
-OPKGBUILDCMD ??= 'opkg-build -Z xz -a "${XZ_DEFAULTS}"'
+OPKGBUILDCMD ??= 'opkg-build -Z zstd -a "${ZSTD_DEFAULTS}"'
OPKG_ARGS += "--force_postinstall --prefer-arch-to-version"
OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
@@ -41,6 +47,10 @@ python do_package_ipk () {
do_package_ipk[vardeps] += "ipk_write_pkg"
do_package_ipk[vardepsexclude] = "BB_NUMBER_THREADS"
+# FILE isn't included by default but we want the recipe to change if basename() changes
+IPK_RECIPE_FILE = "${@os.path.basename(d.getVar('FILE'))}"
+IPK_RECIPE_FILE[vardepvalue] = "${IPK_RECIPE_FILE}"
+
def ipk_write_pkg(pkg, d):
import re, copy
import subprocess
@@ -56,7 +66,7 @@ def ipk_write_pkg(pkg, d):
outdir = d.getVar('PKGWRITEDIRIPK')
pkgdest = d.getVar('PKGDEST')
- recipesource = os.path.basename(d.getVar('FILE'))
+ recipesource = d.getVar('IPK_RECIPE_FILE')
localdata = bb.data.createCopy(d)
root = "%s/%s" % (pkgdest, pkg)
@@ -156,12 +166,12 @@ def ipk_write_pkg(pkg, d):
else:
ctrlfile.write(c % tuple(pullData(fs, localdata)))
- custom_fields_chunk = get_package_additional_metadata("ipk", localdata)
+ custom_fields_chunk = oe.packagedata.get_package_additional_metadata("ipk", localdata)
if custom_fields_chunk is not None:
ctrlfile.write(custom_fields_chunk)
ctrlfile.write("\n")
- mapping_rename_hook(localdata)
+ oe.packagedata.mapping_rename_hook(localdata)
def debian_cmp_remap(var):
# In debian '>' and '<' do not mean what it appears they mean
@@ -220,7 +230,7 @@ def ipk_write_pkg(pkg, d):
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0o755)
- conffiles_str = ' '.join(get_conffiles(pkg, d))
+ conffiles_str = ' '.join(oe.package.get_conffiles(pkg, d))
if conffiles_str:
conffiles = open(os.path.join(controldir, 'conffiles'), 'w')
for f in conffiles_str.split():
@@ -267,9 +277,13 @@ addtask do_package_write_ipk_setscene
python () {
if d.getVar('PACKAGES') != '':
- deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot xz-native:do_populate_sysroot'
+ deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot zstd-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_ipk', 'depends', deps)
d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
+
+ # Needed to ensure PKG_xxx renaming of dependency packages works
+ d.setVarFlag('do_package_write_ipk', 'deptask', "do_packagedata")
+ d.setVarFlag('do_package_write_ipk', 'rdeptask', "do_packagedata")
}
python do_package_write_ipk () {
diff --git a/meta/classes/package_pkgdata.bbclass b/meta/classes-global/package_pkgdata.bbclass
index a1ea8fc041..f653bd9240 100644
--- a/meta/classes/package_pkgdata.bbclass
+++ b/meta/classes-global/package_pkgdata.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
WORKDIR_PKGDATA = "${WORKDIR}/pkgdata-sysroot"
def package_populate_pkgdata_dir(pkgdatadir, d):
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes-global/package_rpm.bbclass
index e9ff1f7e65..2e3e4e8c79 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes-global/package_rpm.bbclass
@@ -1,13 +1,20 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit package
IMAGE_PKGTYPE ?= "rpm"
-RPM="rpm"
-RPMBUILD="rpmbuild"
+RPM = "rpm"
+RPMBUILD = "rpmbuild"
+RPMBUILD_COMPMODE ?= "${@'w19T%d.zstdio' % int(d.getVar('ZSTD_THREADS'))}"
PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
-# Maintaining the perfile dependencies has singificant overhead when writing the
+# Maintaining the perfile dependencies has significant overhead when writing the
# packages. When set, this value merges them for efficiency.
MERGEPERFILEDEPS = "1"
@@ -56,8 +63,8 @@ def write_rpm_perfiledata(srcname, d):
for dep in depends_dict:
ver = depends_dict[dep]
if dep and ver:
- ver = ver.replace("(","")
- ver = ver.replace(")","")
+ ver = ver.replace("(", "")
+ ver = ver.replace(")", "")
outfile.write(dep + " " + ver + " ")
else:
outfile.write(dep + " ")
@@ -96,11 +103,12 @@ def write_rpm_perfiledata(srcname, d):
python write_specfile () {
import oe.packagedata
+ import os,pwd,grp,stat
# append information for logs and patches to %prep
- def add_prep(d,spec_files_bottom):
+ def add_prep(d, spec_files_bottom):
if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
- spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
+ spec_files_bottom.append('%%prep -n %s' % d.getVar('PN'))
spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
spec_files_bottom.append('')
@@ -113,7 +121,7 @@ python write_specfile () {
source_list = os.listdir(ar_outdir)
source_number = 0
for source in source_list:
- # do_deploy_archives may have already run (from sstate) meaning a .src.rpm may already
+ # do_deploy_archives may have already run (from sstate) meaning a .src.rpm may already
# exist in ARCHIVER_OUTDIR so skip if present.
if source.endswith(".src.rpm"):
continue
@@ -153,7 +161,9 @@ python write_specfile () {
pv = subd['PV']
pkgv = subd['PKGV']
reppv = pkgv.replace('-', '+')
- ver = ver.replace(pv, reppv).replace(pkgv, reppv)
+ if ver.startswith(pv):
+ ver = ver.replace(pv, reppv)
+ ver = ver.replace(pkgv, reppv)
if 'PKGR' in subd:
# Make sure PKGR rather than PR in ver
pr = '-' + subd['PR']
@@ -183,18 +193,35 @@ python write_specfile () {
def walk_files(walkpath, target, conffiles, dirfiles):
# We can race against the ipk/deb backends which create CONTROL or DEBIAN directories
- # when packaging. We just ignore these files which are created in
+ # when packaging. We just ignore these files which are created in
# packages-split/ and not package/
# We have the odd situation where the CONTROL/DEBIAN directory can be removed in the middle of
# of the walk, the isdir() test would then fail and the walk code would assume its a file
# hence we check for the names in files too.
for rootpath, dirs, files in os.walk(walkpath):
+ def get_attr(path):
+ stat_f = os.stat(rootpath + "/" + path, follow_symlinks=False)
+ mode = stat.S_IMODE(stat_f.st_mode)
+ try:
+ owner = pwd.getpwuid(stat_f.st_uid).pw_name
+ except Exception as e:
+ bb.error("Content of /etc/passwd in sysroot:\n{}".format(
+ open(d.getVar("RECIPE_SYSROOT") +"/etc/passwd").read()))
+ raise e
+ try:
+ group = grp.getgrgid(stat_f.st_gid).gr_name
+ except Exception as e:
+ bb.error("Content of /etc/group in sysroot:\n{}".format(
+ open(d.getVar("RECIPE_SYSROOT") +"/etc/group").read()))
+ raise e
+ return "%attr({:o},{},{}) ".format(mode, owner, group)
+
+ def escape_chars(p):
+ return p.replace("%", "%%").replace("\\", "\\\\").replace('"', '\\"')
+
path = rootpath.replace(walkpath, "")
if path.endswith("DEBIAN") or path.endswith("CONTROL"):
continue
- path = path.replace("%", "%%%%%%%%")
- path = path.replace("[", "?")
- path = path.replace("]", "?")
# Treat all symlinks to directories as normal files.
# os.walk() lists them as directories.
@@ -213,29 +240,25 @@ python write_specfile () {
for dir in dirs:
if dir == "CONTROL" or dir == "DEBIAN":
continue
- dir = dir.replace("%", "%%%%%%%%")
- dir = dir.replace("[", "?")
- dir = dir.replace("]", "?")
+ p = path + '/' + dir
# All packages own the directories their files are in...
- target.append('%dir "' + path + '/' + dir + '"')
- else:
+ target.append(get_attr(dir) + '%dir "' + escape_chars(p) + '"')
+ elif path:
# packages own only empty directories or explict directory.
# This will prevent the overlapping of security permission.
- if path and not files and not dirs:
- target.append('%dir "' + path + '"')
- elif path and path in dirfiles:
- target.append('%dir "' + path + '"')
+ attr = get_attr(path)
+ if (not files and not dirs) or path in dirfiles:
+ target.append(attr + '%dir "' + escape_chars(path) + '"')
for file in files:
if file == "CONTROL" or file == "DEBIAN":
continue
- file = file.replace("%", "%%%%%%%%")
- file = file.replace("[", "?")
- file = file.replace("]", "?")
- if conffiles.count(path + '/' + file):
- target.append('%config "' + path + '/' + file + '"')
+ attr = get_attr(file)
+ p = path + '/' + file
+ if conffiles.count(p):
+ target.append(attr + '%config "' + escape_chars(p) + '"')
else:
- target.append('"' + path + '/' + file + '"')
+ target.append(attr + '"' + escape_chars(p) + '"')
# Prevent the prerm/postrm scripts from being run during an upgrade
def wrap_uninstall(scriptvar):
@@ -297,7 +320,7 @@ python write_specfile () {
srcmaintainer = localdata.getVar('MAINTAINER')
srchomepage = localdata.getVar('HOMEPAGE')
srcdescription = localdata.getVar('DESCRIPTION') or "."
- srccustomtagschunk = get_package_additional_metadata("rpm", localdata)
+ srccustomtagschunk = oe.packagedata.get_package_additional_metadata("rpm", localdata)
srcdepends = d.getVar('DEPENDS')
srcrdepends = ""
@@ -339,7 +362,7 @@ python write_specfile () {
localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg)
- conffiles = get_conffiles(pkg, d)
+ conffiles = oe.package.get_conffiles(pkg, d)
dirfiles = localdata.getVar('DIRFILES')
if dirfiles is not None:
dirfiles = dirfiles.split()
@@ -353,7 +376,7 @@ python write_specfile () {
splitlicense = (localdata.getVar('LICENSE') or "")
splitsection = (localdata.getVar('SECTION') or "")
splitdescription = (localdata.getVar('DESCRIPTION') or ".")
- splitcustomtagschunk = get_package_additional_metadata("rpm", localdata)
+ splitcustomtagschunk = oe.packagedata.get_package_additional_metadata("rpm", localdata)
translate_vers('RDEPENDS', localdata)
translate_vers('RRECOMMENDS', localdata)
@@ -363,7 +386,7 @@ python write_specfile () {
translate_vers('RCONFLICTS', localdata)
# Map the dependencies into their final form
- mapping_rename_hook(localdata)
+ oe.packagedata.mapping_rename_hook(localdata)
splitrdepends = localdata.getVar('RDEPENDS') or ""
splitrrecommends = localdata.getVar('RRECOMMENDS') or ""
@@ -443,9 +466,9 @@ python write_specfile () {
rprovides = bb.utils.explode_dep_versions2(splitrprovides)
rreplaces = bb.utils.explode_dep_versions2(splitrreplaces)
for dep in rreplaces:
- if not dep in robsoletes:
+ if dep not in robsoletes:
robsoletes[dep] = rreplaces[dep]
- if not dep in rprovides:
+ if dep not in rprovides:
rprovides[dep] = rreplaces[dep]
splitrobsoletes = bb.utils.join_deps(robsoletes, commasep=False)
splitrprovides = bb.utils.join_deps(rprovides, commasep=False)
@@ -515,8 +538,8 @@ python write_specfile () {
spec_files_bottom.append('')
del localdata
-
- add_prep(d,spec_files_bottom)
+
+ add_prep(d, spec_files_bottom)
spec_preamble_top.append('Summary: %s' % srcsummary)
spec_preamble_top.append('Name: %s' % srcname)
spec_preamble_top.append('Version: %s' % srcversion)
@@ -537,9 +560,9 @@ python write_specfile () {
rprovides = bb.utils.explode_dep_versions2(srcrprovides)
rreplaces = bb.utils.explode_dep_versions2(srcrreplaces)
for dep in rreplaces:
- if not dep in robsoletes:
+ if dep not in robsoletes:
robsoletes[dep] = rreplaces[dep]
- if not dep in rprovides:
+ if dep not in rprovides:
rprovides[dep] = rreplaces[dep]
srcrobsoletes = bb.utils.join_deps(robsoletes, commasep=False)
srcrprovides = bb.utils.join_deps(rprovides, commasep=False)
@@ -631,7 +654,6 @@ python do_package_rpm () {
workdir = d.getVar('WORKDIR')
tmpdir = d.getVar('TMPDIR')
pkgd = d.getVar('PKGD')
- pkgdest = d.getVar('PKGDEST')
if not workdir or not pkgd or not tmpdir:
bb.error("Variables incorrectly set, unable to package")
return
@@ -642,7 +664,7 @@ python do_package_rpm () {
return
# Construct the spec file...
- # If the spec file already exist, and has not been stored into
+ # If the spec file already exist, and has not been stored into
# pseudo's files.db, it maybe cause rpmbuild src.rpm fail,
# so remove it before doing rpmbuild src.rpm.
srcname = d.getVar('PN')
@@ -658,8 +680,8 @@ python do_package_rpm () {
# Setup the rpmbuild arguments...
rpmbuild = d.getVar('RPMBUILD')
- targetsys = d.getVar('TARGET_SYS')
- targetvendor = d.getVar('HOST_VENDOR')
+ rpmbuild_compmode = d.getVar('RPMBUILD_COMPMODE')
+ rpmbuild_extra_params = d.getVar('RPMBUILD_EXTRA_PARAMS') or ""
# Too many places in dnf stack assume that arch-independent packages are "noarch".
# Let's not fight against this.
@@ -667,7 +689,6 @@ python do_package_rpm () {
if package_arch == "all":
package_arch = "noarch"
- sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX') or "nativesdk").replace("-", "_")
d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
d.setVar('RPM_PKGWRITEDIR', pkgwritedir)
@@ -684,8 +705,8 @@ python do_package_rpm () {
cmd = cmd + " --define '_use_internal_dependency_generator 0'"
cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'"
cmd = cmd + " --define '_build_id_links none'"
- cmd = cmd + " --define '_binary_payload w19T%d.zstdio'" % int(d.getVar("ZSTD_THREADS"))
- cmd = cmd + " --define '_source_payload w19T%d.zstdio'" % int(d.getVar("ZSTD_THREADS"))
+ cmd = cmd + " --define '_source_payload %s'" % rpmbuild_compmode
+ cmd = cmd + " --define '_binary_payload %s'" % rpmbuild_compmode
cmd = cmd + " --define 'clamp_mtime_to_source_date_epoch 1'"
cmd = cmd + " --define 'use_source_date_epoch_as_buildtime 1'"
cmd = cmd + " --define '_buildhost reproducible'"
@@ -699,6 +720,10 @@ python do_package_rpm () {
cmd = cmd + " --define '_unpackaged_files_terminate_build 0'"
cmd = cmd + " --define 'debug_package %{nil}'"
cmd = cmd + " --define '_tmppath " + workdir + "'"
+ cmd = cmd + " --define '_use_weak_usergroup_deps 1'"
+ cmd = cmd + " --define '_passwd_path " + "/completely/bogus/path" + "'"
+ cmd = cmd + " --define '_group_path " + "/completely/bogus/path" + "'"
+ cmd = cmd + rpmbuild_extra_params
if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_RPMOUTDIR') + "'"
@@ -726,6 +751,10 @@ python () {
deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_rpm', 'depends', deps)
d.setVarFlag('do_package_write_rpm', 'fakeroot', '1')
+
+ # Needed to ensure PKG_xxx renaming of dependency packages works
+ d.setVarFlag('do_package_write_rpm', 'deptask', "do_packagedata")
+ d.setVarFlag('do_package_write_rpm', 'rdeptask', "do_packagedata")
}
SSTATETASKS += "do_package_write_rpm"
diff --git a/meta/classes/packagedata.bbclass b/meta/classes-global/packagedata.bbclass
index c2760e2bf0..9f72c01d77 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes-global/packagedata.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
python read_subpackage_metadata () {
import oe.packagedata
diff --git a/meta/classes/patch.bbclass b/meta/classes-global/patch.bbclass
index 8de7025491..e5786b1c9a 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes-global/patch.bbclass
@@ -1,4 +1,6 @@
# Copyright (C) 2006 OpenedHand LTD
+#
+# SPDX-License-Identifier: MIT
# Point to an empty file so any user's custom settings don't break things
QUILTRCFILE ?= "${STAGING_ETCDIR_NATIVE}/quiltrc"
@@ -77,9 +79,7 @@ python patch_task_postfunc() {
bb.process.run('git checkout patches', cwd=srcsubdir)
stdout, _ = bb.process.run('git status --porcelain .', cwd=srcsubdir)
if stdout:
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
- bb.process.run('git add .; git %s commit -a -m "Committing changes from %s\n\n%s"' % (' '.join(useroptions), func, oe.patch.GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir)
+ oe.patch.GitApplyTree.commitIgnored("Add changes from %s" % func, dir=srcsubdir, files=['.'], d=d)
}
def src_patches(d, all=False, expand=True):
diff --git a/meta/classes/sanity.bbclass b/meta/classes-global/sanity.bbclass
index b416918013..180c6b77d8 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes-global/sanity.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Sanity check the users setup for common misconfigurations
#
@@ -34,9 +40,9 @@ BBLAYERS_CONF_UPDATE_FUNCS += " \
conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \
"
-SANITY_DIFF_TOOL ?= "meld"
+SANITY_DIFF_TOOL ?= "diff -u"
-SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
+SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/local.conf.sample"
python oecore_update_localconf() {
# Check we are using a valid local.conf
current_conf = d.getVar('CONF_VERSION')
@@ -56,7 +62,7 @@ is a good way to visualise the changes."""
raise NotImplementedError(failmsg)
}
-SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
+SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/site.conf.sample"
python oecore_update_siteconf() {
# If we have a site.conf, check it's valid
current_sconf = d.getVar('SCONF_VERSION')
@@ -76,7 +82,7 @@ is a good way to visualise the changes."""
raise NotImplementedError(failmsg)
}
-SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
+SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/bblayers.conf.sample"
python oecore_update_bblayers() {
# bblayers.conf is out of date, so see if we can resolve that
@@ -351,6 +357,7 @@ def check_connectivity(d):
if len(msg) == 0:
msg = "%s.\n" % err
msg += " Please ensure your host's network is configured correctly.\n"
+ msg += " Please ensure CONNECTIVITY_CHECK_URIS is correct and specified URIs are available.\n"
msg += " If your ISP or network is blocking the above URL,\n"
msg += " try with another domain name, for example by setting:\n"
msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\""
@@ -432,8 +439,7 @@ def check_patch_version(sanity_data):
except subprocess.CalledProcessError as e:
return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output)
-# Unpatched versions of make 3.82 are known to be broken. See GNU Savannah Bug 30612.
-# Use a modified reproducer from http://savannah.gnu.org/bugs/?30612 to validate.
+# Glibc needs make 4.0 or later, we may as well match at this point
def check_make_version(sanity_data):
import subprocess
@@ -442,31 +448,8 @@ def check_make_version(sanity_data):
except subprocess.CalledProcessError as e:
return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output)
version = result.split()[2]
- if bb.utils.vercmp_string_op(version, "3.82", "=="):
- # Construct a test file
- f = open("makefile_test", "w")
- f.write("makefile_test.a: makefile_test_a.c makefile_test_b.c makefile_test.a( makefile_test_a.c makefile_test_b.c)\n")
- f.write("\n")
- f.write("makefile_test_a.c:\n")
- f.write(" touch $@\n")
- f.write("\n")
- f.write("makefile_test_b.c:\n")
- f.write(" touch $@\n")
- f.close()
-
- # Check if make 3.82 has been patched
- try:
- subprocess.check_call(['make', '-f', 'makefile_test'])
- except subprocess.CalledProcessError as e:
- return "Your version of make 3.82 is broken. Please revert to 3.81 or install a patched version.\n"
- finally:
- os.remove("makefile_test")
- if os.path.exists("makefile_test_a.c"):
- os.remove("makefile_test_a.c")
- if os.path.exists("makefile_test_b.c"):
- os.remove("makefile_test_b.c")
- if os.path.exists("makefile_test.a"):
- os.remove("makefile_test.a")
+ if bb.utils.vercmp_string_op(version, "4.0", "<"):
+ return "Please install a make version of 4.0 or later.\n"
if bb.utils.vercmp_string_op(version, "4.2.1", "=="):
distro = oe.lsb.distro_identifier()
@@ -492,7 +475,7 @@ def check_wsl(d):
bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
return None
-# Require at least gcc version 7.5.
+# Require at least gcc version 8.0
#
# This can be fixed on CentOS-7 with devtoolset-6+
# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/
@@ -505,13 +488,13 @@ def check_gcc_version(sanity_data):
build_cc, version = oe.utils.get_host_compiler_version(sanity_data)
if build_cc.strip() == "gcc":
- if bb.utils.vercmp_string_op(version, "7.5", "<"):
- return "Your version of gcc is older than 7.5 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
+ if bb.utils.vercmp_string_op(version, "8.0", "<"):
+ return "Your version of gcc is older than 8.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
return None
# Tar version 1.24 and onwards handle overwriting symlinks correctly
# but earlier versions do not; this needs to work properly for sstate
-# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled
+# Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled
def check_tar_version(sanity_data):
import subprocess
try:
@@ -521,6 +504,14 @@ def check_tar_version(sanity_data):
version = result.split()[3]
if bb.utils.vercmp_string_op(version, "1.28", "<"):
return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
+
+ try:
+ result = subprocess.check_output(["tar", "--help"], stderr=subprocess.STDOUT).decode('utf-8')
+ if "--xattrs" not in result:
+ return "Your tar doesn't support --xattrs, please use GNU tar.\n"
+ except subprocess.CalledProcessError as e:
+ return "Unable to execute tar --help, exit code %d\n%s\n" % (e.returncode, e.output)
+
return None
# We use git parameters and functionality only found in 1.7.8 or later
@@ -541,7 +532,7 @@ def check_git_version(sanity_data):
def check_perl_modules(sanity_data):
import subprocess
ret = ""
- modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper" )
+ modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper", "File::Compare", "File::Copy", "open ':std'", "FindBin" )
errresult = ''
for m in modules:
try:
@@ -631,14 +622,12 @@ def check_sanity_sstate_dir_change(sstate_dir, data):
def check_sanity_version_change(status, d):
# Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes
# In other words, these tests run once in a given build directory and then
- # never again until the sanity version or host distrubution id/version changes.
+ # never again until the sanity version or host distribution id/version changes.
# Check the python install is complete. Examples that are often removed in
- # minimal installations: glib-2.0-natives requries # xml.parsers.expat and icu
- # requires distutils.sysconfig.
+ # minimal installations: glib-2.0-natives requires xml.parsers.expat
try:
import xml.parsers.expat
- import distutils.sysconfig
except ImportError as e:
status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name)
@@ -693,7 +682,7 @@ def check_sanity_version_change(status, d):
if i and workdir.startswith(i):
status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n")
- # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap
+ # Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap
pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}"
pseudocontroldir = d.expand(pseudo_control_dir).split(",")
@@ -770,10 +759,10 @@ def check_sanity_everybuild(status, d):
if 0 == os.getuid():
raise_sanity_error("Do not use Bitbake as root.", d)
- # Check the Python version, we now have a minimum of Python 3.6
+ # Check the Python version, we now have a minimum of Python 3.8
import sys
- if sys.hexversion < 0x030600F0:
- status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n')
+ if sys.hexversion < 0x030800F0:
+ status.addresult('The system requires at least Python 3.8 to run. Please update your Python interpreter.\n')
# Check the bitbake version meets minimum requirements
minversion = d.getVar('BB_MIN_VERSION')
@@ -851,6 +840,10 @@ def check_sanity_everybuild(status, d):
status.addresult("Please use a umask which allows a+rx and u+rwx\n")
os.umask(omask)
+ # Ensure /tmp is NOT mounted with noexec
+ if os.statvfs("/tmp").f_flag & os.ST_NOEXEC:
+ raise_sanity_error("/tmp shouldn't be mounted with noexec.", d)
+
if d.getVar('TARGET_ARCH') == "arm":
# This path is no longer user-readable in modern (very recent) Linux
try:
@@ -882,7 +875,8 @@ def check_sanity_everybuild(status, d):
mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS']
protocols = ['http', 'ftp', 'file', 'https', \
'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
- 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', 'az' ]
+ 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', \
+ 'az', 'ftps', 'crate', 'gs']
for mirror_var in mirror_vars:
mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split()
@@ -1014,13 +1008,6 @@ def check_sanity(sanity_data):
if status.messages != "":
raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error)
-# Create a copy of the datastore and finalise it to ensure appends and
-# overrides are set - the datastore has yet to be finalised at ConfigParsed
-def copy_data(e):
- sanity_data = bb.data.createCopy(e.data)
- sanity_data.finalize()
- return sanity_data
-
addhandler config_reparse_eventhandler
config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed"
python config_reparse_eventhandler() {
@@ -1031,13 +1018,13 @@ addhandler check_sanity_eventhandler
check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest"
python check_sanity_eventhandler() {
if bb.event.getName(e) == "SanityCheck":
- sanity_data = copy_data(e)
+ sanity_data = bb.data.createCopy(e.data)
check_sanity(sanity_data)
if e.generateevents:
sanity_data.setVar("SANITY_USE_EVENTS", "1")
bb.event.fire(bb.event.SanityCheckPassed(), e.data)
elif bb.event.getName(e) == "NetworkTest":
- sanity_data = copy_data(e)
+ sanity_data = bb.data.createCopy(e.data)
if e.generateevents:
sanity_data.setVar("SANITY_USE_EVENTS", "1")
bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data)
diff --git a/meta/classes/sstate.bbclass b/meta/classes-global/sstate.bbclass
index 3513269bca..04539bbb99 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes-global/sstate.bbclass
@@ -1,4 +1,10 @@
-SSTATE_VERSION = "10"
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+SSTATE_VERSION = "12"
SSTATE_ZSTD_CLEVEL ??= "8"
@@ -49,8 +55,6 @@ PV[vardepvalue] = "${PV}"
SSTATE_EXTRAPATH[vardepvalue] = ""
SSTATE_EXTRAPATHWILDCARD[vardepvalue] = ""
-# For multilib rpm the allarch packagegroup files can overwrite (in theory they're identical)
-SSTATE_ALLOW_OVERLAP_FILES = "${DEPLOY_DIR}/licenses/"
# Avoid docbook/sgml catalog warnings for now
SSTATE_ALLOW_OVERLAP_FILES += "${STAGING_ETCDIR_NATIVE}/sgml ${STAGING_DATADIR_NATIVE}/sgml"
# sdk-provides-dummy-nativesdk and nativesdk-buildtools-perl-dummy overlap for different SDKMACHINE
@@ -79,14 +83,15 @@ SSTATE_HASHEQUIV_FILEMAP ?= " \
BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
+SSTATE_ARCHS_TUNEPKG ??= "${TUNE_PKGARCH}"
SSTATE_ARCHS = " \
${BUILD_ARCH} \
${BUILD_ARCH}_${ORIGNATIVELSBSTRING} \
${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS} \
${SDK_ARCH}_${SDK_OS} \
- ${SDK_ARCH}_${PACKAGE_ARCH} \
+ ${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX} \
allarch \
- ${PACKAGE_ARCH} \
+ ${SSTATE_ARCHS_TUNEPKG} \
${PACKAGE_EXTRA_ARCHS} \
${MACHINE_ARCH}"
SSTATE_ARCHS[vardepsexclude] = "ORIGNATIVELSBSTRING"
@@ -262,7 +267,7 @@ def sstate_install(ss, d):
overlap_allowed = (d.getVar("SSTATE_ALLOW_OVERLAP_FILES") or "").split()
match = []
for f in sharedfiles:
- if os.path.exists(f) and not os.path.islink(f):
+ if os.path.exists(f):
f = os.path.normpath(f)
realmatch = True
for w in overlap_allowed:
@@ -272,36 +277,18 @@ def sstate_install(ss, d):
break
if realmatch:
match.append(f)
- sstate_search_cmd = "grep -rlF '%s' %s --exclude=master.list | sed -e 's:^.*/::'" % (f, d.expand("${SSTATE_MANIFESTS}"))
+ sstate_search_cmd = "grep -rlF '%s' %s --exclude=index-* | sed -e 's:^.*/::'" % (f, d.expand("${SSTATE_MANIFESTS}"))
search_output = subprocess.Popen(sstate_search_cmd, shell=True, stdout=subprocess.PIPE).communicate()[0]
if search_output:
match.append(" (matched in %s)" % search_output.decode('utf-8').rstrip())
else:
match.append(" (not matched to any task)")
if match:
- bb.error("The recipe %s is trying to install files into a shared " \
- "area when those files already exist. Those files and their manifest " \
- "location are:\n %s\nPlease verify which recipe should provide the " \
- "above files.\n\nThe build has stopped, as continuing in this scenario WILL " \
- "break things - if not now, possibly in the future (we've seen builds fail " \
- "several months later). If the system knew how to recover from this " \
- "automatically it would, however there are several different scenarios " \
- "which can result in this and we don't know which one this is. It may be " \
- "you have switched providers of something like virtual/kernel (e.g. from " \
- "linux-yocto to linux-yocto-dev), in that case you need to execute the " \
- "clean task for both recipes and it will resolve this error. It may be " \
- "you changed DISTRO_FEATURES from systemd to udev or vice versa. Cleaning " \
- "those recipes should again resolve this error, however switching " \
- "DISTRO_FEATURES on an existing build directory is not supported - you " \
- "should really clean out tmp and rebuild (reusing sstate should be safe). " \
- "It could be the overlapping files detected are harmless in which case " \
- "adding them to SSTATE_ALLOW_OVERLAP_FILES may be the correct solution. It could " \
- "also be your build is including two different conflicting versions of " \
- "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \
- "be to resolve the conflict. If in doubt, please ask on the mailing list, " \
- "sharing the error and filelist above." % \
+ bb.fatal("Recipe %s is trying to install files into a shared " \
+ "area when those files already exist. The files and the manifests listing " \
+ "them are:\n %s\n"
+ "Please adjust the recipes so only one recipe provides a given file. " % \
(d.getVar('PN'), "\n ".join(match)))
- bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
if ss['fixmedir'] and os.path.exists(ss['fixmedir'] + "/fixmepath.cmd"):
sharedfiles.append(ss['fixmedir'] + "/fixmepath.cmd")
@@ -349,7 +336,7 @@ def sstate_install(ss, d):
for lock in locks:
bb.utils.unlockfile(lock)
-sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES STATE_MANMACH SSTATE_MANFILEPREFIX"
+sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES SSTATE_MANMACH SSTATE_MANFILEPREFIX"
sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}"
def sstate_installpkg(ss, d):
@@ -359,8 +346,9 @@ def sstate_installpkg(ss, d):
d.setVar("SSTATE_CURRTASK", ss['task'])
sstatefetch = d.getVar('SSTATE_PKGNAME')
sstatepkg = d.getVar('SSTATE_PKG')
+ verify_sig = bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False)
- if not os.path.exists(sstatepkg):
+ if not os.path.exists(sstatepkg) or (verify_sig and not os.path.exists(sstatepkg + '.sig')):
pstaging_fetch(sstatefetch, d)
if not os.path.isfile(sstatepkg):
@@ -371,7 +359,7 @@ def sstate_installpkg(ss, d):
d.setVar('SSTATE_INSTDIR', sstateinst)
- if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
+ if verify_sig:
if not os.path.isfile(sstatepkg + '.sig'):
bb.warn("No signature file for sstate package %s, skipping acceleration..." % sstatepkg)
return False
@@ -715,9 +703,7 @@ def sstate_package(ss, d):
if d.getVar('SSTATE_SKIP_CREATION') == '1':
return
- sstate_create_package = ['sstate_report_unihash', 'sstate_create_package']
- if d.getVar('SSTATE_SIG_KEY'):
- sstate_create_package.append('sstate_sign_package')
+ sstate_create_package = ['sstate_report_unihash', 'sstate_create_and_sign_package']
for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \
sstate_create_package + \
@@ -762,7 +748,6 @@ def pstaging_fetch(sstatefetch, d):
localdata.setVar('FILESPATH', dldir)
localdata.setVar('DL_DIR', dldir)
localdata.setVar('PREMIRRORS', mirrors)
- localdata.setVar('SRCPV', d.getVar('SRCPV'))
# if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
# we'll want to allow network access for the current set of fetches.
@@ -778,6 +763,7 @@ def pstaging_fetch(sstatefetch, d):
uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)]
for srcuri in uris:
+ localdata.delVar('SRC_URI')
localdata.setVar('SRC_URI', srcuri)
try:
fetcher = bb.fetch2.Fetch([srcuri], localdata, cache=False)
@@ -787,9 +773,6 @@ def pstaging_fetch(sstatefetch, d):
except bb.fetch2.BBFetchException:
pass
-pstaging_fetch[vardepsexclude] += "SRCPV"
-
-
def sstate_setscene(d):
shared_state = sstate_state_fromvars(d)
accelerate = sstate_installpkg(shared_state, d)
@@ -825,21 +808,100 @@ python sstate_task_postfunc () {
}
sstate_task_postfunc[dirs] = "${WORKDIR}"
+# Create a sstate package
+# If enabled, sign the package.
+# Package and signature are created in a sub-directory
+# and renamed in place once created.
+python sstate_create_and_sign_package () {
+ from pathlib import Path
-#
-# Shell function to generate a sstate package from a directory
-# set as SSTATE_BUILDDIR. Will be run from within SSTATE_BUILDDIR.
-#
-sstate_create_package () {
- # Exit early if it already exists
- if [ -e ${SSTATE_PKG} ]; then
- touch ${SSTATE_PKG} 2>/dev/null || true
- return
- fi
+ # Best effort touch
+ def touch(file):
+ try:
+ file.touch()
+ except:
+ pass
+
+ def update_file(src, dst, force=False):
+ if dst.is_symlink() and not dst.exists():
+ force=True
+ try:
+ # This relies on that src is a temporary file that can be renamed
+ # or left as is.
+ if force:
+ src.rename(dst)
+ else:
+ os.link(src, dst)
+ return True
+ except:
+ pass
+
+ if dst.exists():
+ touch(dst)
+
+ return False
- mkdir --mode=0775 -p `dirname ${SSTATE_PKG}`
- TFILE=`mktemp ${SSTATE_PKG}.XXXXXXXX`
+ sign_pkg = (
+ bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG")) and
+ bool(d.getVar("SSTATE_SIG_KEY"))
+ )
+
+ sstate_pkg = Path(d.getVar("SSTATE_PKG"))
+ sstate_pkg_sig = Path(str(sstate_pkg) + ".sig")
+ if sign_pkg:
+ if sstate_pkg.exists() and sstate_pkg_sig.exists():
+ touch(sstate_pkg)
+ touch(sstate_pkg_sig)
+ return
+ else:
+ if sstate_pkg.exists():
+ touch(sstate_pkg)
+ return
+ # Create the required sstate directory if it is not present.
+ if not sstate_pkg.parent.is_dir():
+ with bb.utils.umask(0o002):
+ bb.utils.mkdirhier(str(sstate_pkg.parent))
+
+ if sign_pkg:
+ from tempfile import TemporaryDirectory
+ with TemporaryDirectory(dir=sstate_pkg.parent) as tmp_dir:
+ tmp_pkg = Path(tmp_dir) / sstate_pkg.name
+ d.setVar("TMP_SSTATE_PKG", str(tmp_pkg))
+ bb.build.exec_func('sstate_archive_package', d)
+
+ from oe.gpg_sign import get_signer
+ signer = get_signer(d, 'local')
+ signer.detach_sign(str(tmp_pkg), d.getVar('SSTATE_SIG_KEY'), None,
+ d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
+
+ tmp_pkg_sig = Path(tmp_dir) / sstate_pkg_sig.name
+ if not update_file(tmp_pkg_sig, sstate_pkg_sig):
+ # If the created signature file could not be copied into place,
+ # then we should not use the sstate package either.
+ return
+
+ # If the .sig file was updated, then the sstate package must also
+ # be updated.
+ update_file(tmp_pkg, sstate_pkg, force=True)
+ else:
+ from tempfile import NamedTemporaryFile
+ with NamedTemporaryFile(prefix=sstate_pkg.name, dir=sstate_pkg.parent) as tmp_pkg_fd:
+ tmp_pkg = tmp_pkg_fd.name
+ d.setVar("TMP_SSTATE_PKG", str(tmp_pkg))
+ bb.build.exec_func('sstate_archive_package',d)
+ update_file(tmp_pkg, sstate_pkg)
+ # update_file() may have renamed tmp_pkg, which must exist when the
+ # NamedTemporaryFile() context handler ends.
+ touch(Path(tmp_pkg))
+
+}
+
+# Shell function to generate a sstate package from a directory
+# set as SSTATE_BUILDDIR. Will be run from within SSTATE_BUILDDIR.
+# The calling function handles moving the sstate package into the final
+# destination.
+sstate_archive_package () {
OPT="-cS"
ZSTD="zstd -${SSTATE_ZSTD_CLEVEL} -T${ZSTD_THREADS}"
# Use pzstd if available
@@ -850,42 +912,18 @@ sstate_create_package () {
# Need to handle empty directories
if [ "$(ls -A)" ]; then
set +e
- tar -I "$ZSTD" $OPT -f $TFILE *
+ tar -I "$ZSTD" $OPT -f ${TMP_SSTATE_PKG} *
ret=$?
if [ $ret -ne 0 ] && [ $ret -ne 1 ]; then
exit 1
fi
set -e
else
- tar -I "$ZSTD" $OPT --file=$TFILE --files-from=/dev/null
- fi
- chmod 0664 $TFILE
- # Skip if it was already created by some other process
- if [ -h ${SSTATE_PKG} ] && [ ! -e ${SSTATE_PKG} ]; then
- # There is a symbolic link, but it links to nothing.
- # Forcefully replace it with the new file.
- ln -f $TFILE ${SSTATE_PKG} || true
- elif [ ! -e ${SSTATE_PKG} ]; then
- # Move into place using ln to attempt an atomic op.
- # Abort if it already exists
- ln $TFILE ${SSTATE_PKG} || true
- else
- touch ${SSTATE_PKG} 2>/dev/null || true
+ tar -I "$ZSTD" $OPT --file=${TMP_SSTATE_PKG} --files-from=/dev/null
fi
- rm $TFILE
+ chmod 0664 ${TMP_SSTATE_PKG}
}
-python sstate_sign_package () {
- from oe.gpg_sign import get_signer
-
-
- signer = get_signer(d, 'local')
- sstate_pkg = d.getVar('SSTATE_PKG')
- if os.path.exists(sstate_pkg + '.sig'):
- os.unlink(sstate_pkg + '.sig')
- signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None,
- d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
-}
python sstate_report_unihash() {
report_unihash = getattr(bb.parse.siggen, 'report_unihash', None)
@@ -918,6 +956,8 @@ sstate_unpack_package () {
BB_HASHCHECK_FUNCTION = "sstate_checkhashes"
def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True, **kwargs):
+ import itertools
+
found = set()
missed = set()
@@ -950,6 +990,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
sstatefile = d.expand("${SSTATE_DIR}/" + getsstatefile(tid, siginfo, d))
if os.path.exists(sstatefile):
+ oe.utils.touch(sstatefile)
found.add(tid)
bb.debug(2, "SState: Found valid sstate file %s" % sstatefile)
else:
@@ -977,15 +1018,19 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
localdata.delVar('BB_NO_NETWORK')
from bb.fetch2 import FetchConnectionCache
- def checkstatus_init(thread_worker):
- thread_worker.connection_cache = FetchConnectionCache()
+ def checkstatus_init():
+ while not connection_cache_pool.full():
+ connection_cache_pool.put(FetchConnectionCache())
- def checkstatus_end(thread_worker):
- thread_worker.connection_cache.close_connections()
+ def checkstatus_end():
+ while not connection_cache_pool.empty():
+ connection_cache = connection_cache_pool.get()
+ connection_cache.close_connections()
- def checkstatus(thread_worker, arg):
+ def checkstatus(arg):
(tid, sstatefile) = arg
+ connection_cache = connection_cache_pool.get()
localdata2 = bb.data.createCopy(localdata)
srcuri = "file://" + sstatefile
localdata2.setVar('SRC_URI', srcuri)
@@ -995,7 +1040,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
try:
fetcher = bb.fetch2.Fetch(srcuri.split(), localdata2,
- connection_cache=thread_worker.connection_cache)
+ connection_cache=connection_cache)
fetcher.checkstatus()
bb.debug(2, "SState: Successful fetch test for %s" % srcuri)
found.add(tid)
@@ -1005,8 +1050,11 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
except Exception as e:
bb.error("SState: cannot test %s: %s\n%s" % (srcuri, repr(e), traceback.format_exc()))
+ connection_cache_pool.put(connection_cache)
+
if progress:
- bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d)
+ bb.event.fire(bb.event.ProcessProgress(msg, next(cnt_tasks_done)), d)
+ bb.event.check_for_interrupts(d)
tasklist = []
for tid in missed:
@@ -1016,6 +1064,8 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
if tasklist:
nproc = min(int(d.getVar("BB_NUMBER_THREADS")), len(tasklist))
+ ## thread-safe counter
+ cnt_tasks_done = itertools.count(start = 1)
progress = len(tasklist) >= 100
if progress:
msg = "Checking sstate mirror object availability"
@@ -1025,13 +1075,13 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
fetcherenv = bb.fetch2.get_fetcher_environment(d)
with bb.utils.environment(**fetcherenv):
bb.event.enable_threadlock()
- pool = oe.utils.ThreadedPool(nproc, len(tasklist),
- worker_init=checkstatus_init, worker_end=checkstatus_end,
- name="sstate_checkhashes-")
- for t in tasklist:
- pool.add_task(checkstatus, t)
- pool.start()
- pool.wait_completion()
+ import concurrent.futures
+ from queue import Queue
+ connection_cache_pool = Queue(nproc)
+ checkstatus_init()
+ with concurrent.futures.ThreadPoolExecutor(max_workers=nproc) as executor:
+ executor.map(checkstatus, tasklist.copy())
+ checkstatus_end()
bb.event.disable_threadlock()
if progress:
@@ -1084,7 +1134,7 @@ def setscene_depvalid(task, taskdependees, notneeded, d, log=None):
logit("Considering setscene task: %s" % (str(taskdependees[task])), log)
- directtasks = ["do_populate_lic", "do_deploy_source_date_epoch", "do_shared_workdir", "do_stash_locale", "do_gcc_stash_builddir", "do_create_spdx"]
+ directtasks = ["do_populate_lic", "do_deploy_source_date_epoch", "do_shared_workdir", "do_stash_locale", "do_gcc_stash_builddir", "do_create_spdx", "do_deploy_archives"]
def isNativeCross(x):
return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross")
@@ -1120,11 +1170,6 @@ def setscene_depvalid(task, taskdependees, notneeded, d, log=None):
if isNativeCross(taskdependees[dep][0]) and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package', 'do_package_qa']:
continue
- # This is due to the [depends] in useradd.bbclass complicating matters
- # The logic *is* reversed here due to the way hard setscene dependencies are injected
- if (taskdependees[task][1] == 'do_package' or taskdependees[task][1] == 'do_populate_sysroot') and taskdependees[dep][0].endswith(('shadow-native', 'shadow-sysroot', 'base-passwd', 'pseudo-native')) and taskdependees[dep][1] == 'do_populate_sysroot':
- continue
-
# Consider sysroot depending on sysroot tasks
if taskdependees[task][1] == 'do_populate_sysroot' and taskdependees[dep][1] == 'do_populate_sysroot':
# Allow excluding certain recursive dependencies. If a recipe needs it should add a
@@ -1187,16 +1232,7 @@ python sstate_eventhandler() {
if not os.path.exists(siginfo):
bb.siggen.dump_this_task(siginfo, d)
else:
- try:
- os.utime(siginfo, None)
- except PermissionError:
- pass
- except OSError as e:
- # Handle read-only file systems gracefully
- import errno
- if e.errno != errno.EROFS:
- raise e
-
+ oe.utils.touch(siginfo)
}
SSTATE_PRUNE_OBSOLETEWORKDIR ?= "1"
@@ -1278,6 +1314,7 @@ python sstate_eventhandler_reachablestamps() {
lines.remove(r)
removed = removed + 1
bb.event.fire(bb.event.ProcessProgress(msg, removed), d)
+ bb.event.check_for_interrupts(d)
bb.event.fire(bb.event.ProcessFinished(msg), d)
@@ -1347,6 +1384,7 @@ python sstate_eventhandler_stalesstate() {
bb.utils.remove(stamp)
removed = removed + 1
bb.event.fire(bb.event.ProcessProgress(msg, removed), d)
+ bb.event.check_for_interrupts(d)
bb.event.fire(bb.event.ProcessFinished(msg), d)
}
diff --git a/meta/classes/staging.bbclass b/meta/classes-global/staging.bbclass
index 8372a4574a..3678a1b441 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes-global/staging.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# These directories will be staged in the sysroot
SYSROOT_DIRS = " \
${includedir} \
@@ -86,7 +92,8 @@ python sysroot_strip () {
qa_already_stripped = 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split()
strip_cmd = d.getVar("STRIP")
- oe.package.strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d,
+ max_process = oe.utils.get_bb_number_threads(d)
+ oe.package.strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process,
qa_already_stripped=qa_already_stripped)
}
@@ -119,8 +126,8 @@ do_populate_sysroot[vardeps] += "${SYSROOT_PREPROCESS_FUNCS}"
do_populate_sysroot[vardepsexclude] += "BB_MULTI_PROVIDER_ALLOWED"
POPULATESYSROOTDEPS = ""
-POPULATESYSROOTDEPS:class-target = "virtual/${MLPREFIX}${HOST_PREFIX}binutils:do_populate_sysroot"
-POPULATESYSROOTDEPS:class-nativesdk = "virtual/${HOST_PREFIX}binutils-crosssdk:do_populate_sysroot"
+POPULATESYSROOTDEPS:class-target = "virtual/${HOST_PREFIX}binutils:do_populate_sysroot"
+POPULATESYSROOTDEPS:class-nativesdk = "virtual/${HOST_PREFIX}binutils:do_populate_sysroot"
do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}"
SSTATETASKS += "do_populate_sysroot"
@@ -238,8 +245,8 @@ def staging_populate_sysroot_dir(targetsysroot, nativesysroot, native, d):
continue
staging_processfixme(fixme, targetdir, targetsysroot, nativesysroot, d)
- for p in postinsts:
- subprocess.check_output(p, shell=True, stderr=subprocess.STDOUT)
+ for p in sorted(postinsts):
+ bb.note("Running postinst {}, output:\n{}".format(p, subprocess.check_output(p, shell=True, stderr=subprocess.STDOUT)))
#
# Manifests here are complicated. The main sysroot area has the unpacked sstate
@@ -269,6 +276,10 @@ python extend_recipe_sysroot() {
pn = d.getVar("PN")
stagingdir = d.getVar("STAGING_DIR")
sharedmanifests = d.getVar("COMPONENTS_DIR") + "/manifests"
+ # only needed by multilib cross-canadian since it redefines RECIPE_SYSROOT
+ manifestprefix = d.getVar("RECIPE_SYSROOT_MANIFEST_SUBDIR")
+ if manifestprefix:
+ sharedmanifests = sharedmanifests + "/" + manifestprefix
recipesysroot = d.getVar("RECIPE_SYSROOT")
recipesysrootnative = d.getVar("RECIPE_SYSROOT_NATIVE")
@@ -352,7 +363,7 @@ python extend_recipe_sysroot() {
#bb.note(" start is %s" % str(start))
# Direct dependencies should be present and can be depended upon
- for dep in set(start):
+ for dep in sorted(set(start)):
if setscenedeps[dep][1] == "do_populate_sysroot":
if dep not in configuredeps:
configuredeps.append(dep)
@@ -510,7 +521,7 @@ python extend_recipe_sysroot() {
binfiles = {}
# Now handle installs
- for dep in configuredeps:
+ for dep in sorted(configuredeps):
c = setscenedeps[dep][0]
if c not in installed:
continue
@@ -618,8 +629,8 @@ python extend_recipe_sysroot() {
for f in fixme:
staging_processfixme(fixme[f], f, recipesysroot, recipesysrootnative, d)
- for p in postinsts:
- subprocess.check_output(p, shell=True, stderr=subprocess.STDOUT)
+ for p in sorted(postinsts):
+ bb.note("Running postinst {}, output:\n{}".format(p, subprocess.check_output(p, shell=True, stderr=subprocess.STDOUT)))
for dep in manifests:
c = setscenedeps[dep][0]
@@ -644,7 +655,7 @@ python staging_taskhandler() {
bbtasks = e.tasklist
for task in bbtasks:
deps = d.getVarFlag(task, "depends")
- if task == "do_configure" or (deps and "populate_sysroot" in deps):
+ if task != 'do_prepare_recipe_sysroot' and (task == "do_configure" or (deps and "populate_sysroot" in deps)):
d.prependVarFlag(task, "prefuncs", "extend_recipe_sysroot ")
}
staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess"
diff --git a/meta/classes/uninative.bbclass b/meta/classes-global/uninative.bbclass
index 6a9e862bcd..d2297b53f5 100644
--- a/meta/classes/uninative.bbclass
+++ b/meta/classes-global/uninative.bbclass
@@ -1,4 +1,10 @@
-UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'i686', 'ld-linux.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'aarch64', 'ld-linux-aarch64.so.1', '', d)}${@bb.utils.contains('BUILD_ARCH', 'ppc64le', 'ld64.so.2', '', d)}"
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'i686', 'ld-linux.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'aarch64', 'ld-linux-aarch64.so.1', '', d)}${@bb.utils.contains('BUILD_ARCH', 'ppc64le', 'ld64.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'riscv64', 'ld-linux-riscv64-lp64d.so.1', '', d)}"
UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}"
UNINATIVE_URL ?= "unset"
@@ -34,6 +40,8 @@ python uninative_event_fetchloader() {
with open(loaderchksum, "r") as f:
readchksum = f.read().strip()
if readchksum == chksum:
+ if "uninative" not in d.getVar("SSTATEPOSTUNPACKFUNCS"):
+ enable_uninative(d)
return
import subprocess
@@ -135,10 +143,10 @@ def enable_uninative(d):
d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d))
d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp")
d.appendVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", "| uninative_changeinterp")
- d.appendVar("BUILD_LDFLAGS", " -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER}")
- d.appendVarFlag("BUILD_LDFLAGS", "vardepvalueexclude", "| -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER}")
+ d.appendVar("BUILD_LDFLAGS", " -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER} -pthread")
+ d.appendVarFlag("BUILD_LDFLAGS", "vardepvalueexclude", "| -Wl,--allow-shlib-undefined -Wl,--dynamic-linker=${UNINATIVE_LOADER} -pthread")
d.appendVarFlag("BUILD_LDFLAGS", "vardepsexclude", "UNINATIVE_LOADER")
- d.prependVar("PATH", "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
+ d.prependVar("PATH", "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
python uninative_changeinterp () {
import subprocess
@@ -167,5 +175,7 @@ python uninative_changeinterp () {
if not elf.isDynamic():
continue
+ os.chmod(f, s[stat.ST_MODE] | stat.S_IWUSR)
subprocess.check_output(("patchelf-uninative", "--set-interpreter", d.getVar("UNINATIVE_LOADER"), f), stderr=subprocess.STDOUT)
+ os.chmod(f, s[stat.ST_MODE])
}
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes-global/utility-tasks.bbclass
index 0466325c13..ae2da330b8 100644
--- a/meta/classes/utility-tasks.bbclass
+++ b/meta/classes-global/utility-tasks.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
diff --git a/meta/classes/utils.bbclass b/meta/classes-global/utils.bbclass
index b4eb3d38ab..957389928f 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes-global/utils.bbclass
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
oe_soinstall() {
# Purpose: Install shared library file and
@@ -10,7 +15,7 @@ oe_soinstall() {
;;
esac
install -m 755 $1 $2/$libname
- sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
+ sonamelink=`${READELF} -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
if [ -z $sonamelink ]; then
bbfatal "oe_soinstall: $libname is missing ELF tag 'SONAME'."
fi
@@ -142,7 +147,7 @@ oe_libinstall() {
# special case hack for non-libtool .so.#.#.# links
baselibfile=`basename "$libfile"`
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
- sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
+ sonamelink=`${READELF} -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
__runcmd ln -sf $baselibfile $destpath/$sonamelink
@@ -184,6 +189,43 @@ END
chmod +x $cmd
}
+create_cmdline_shebang_wrapper () {
+ # Create a wrapper script where commandline options are needed
+ #
+ # These are useful to work around shebang relocation issues, where shebangs are too
+ # long or have arguments in them, thus preventing them from using the /usr/bin/env
+ # shebang
+ #
+ # Usage: create_cmdline_wrapper FILENAME <extra-options>
+
+ cmd=$1
+ shift
+
+ echo "Generating wrapper script for $cmd"
+
+ # Strip #! and get remaining interpreter + arg
+ argument="$(sed -ne 's/^#! *//p;q' $cmd)"
+ # strip the shebang from the real script as we do not want it to be usable anyway
+ tail -n +2 $cmd > $cmd.real
+ chown --reference=$cmd $cmd.real
+ chmod --reference=$cmd $cmd.real
+ rm -f $cmd
+ cmdname=$(basename $cmd)
+ dirname=$(dirname $cmd)
+ cmdoptions=$@
+ if [ "${base_prefix}" != "" ]; then
+ relpath=`python3 -c "import os; print(os.path.relpath('${D}${base_prefix}', '$dirname'))"`
+ cmdoptions=`echo $@ | sed -e "s:${base_prefix}:\\$realdir/$relpath:g"`
+ fi
+ cat <<END >$cmd
+#!/usr/bin/env bash
+realpath=\`readlink -fn \$0\`
+realdir=\`dirname \$realpath\`
+exec -a \$realdir/$cmdname $argument \$realdir/$cmdname.real $cmdoptions "\$@"
+END
+ chmod +x $cmd
+}
+
create_wrapper () {
# Create a wrapper script where extra environment variables are needed
#
diff --git a/meta/classes/allarch.bbclass b/meta/classes-recipe/allarch.bbclass
index a766a654a9..e429b92437 100644
--- a/meta/classes/allarch.bbclass
+++ b/meta/classes-recipe/allarch.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class is used for architecture independent recipes/data files (usually scripts)
#
@@ -57,9 +63,9 @@ python () {
d.appendVarFlag("emit_pkgdata", "vardepsexclude", " MULTILIB_VARIANTS")
d.appendVarFlag("write_specfile", "vardepsexclude", " MULTILIBS")
d.appendVarFlag("do_package", "vardepsexclude", " package_do_shlibs")
+
+ d.setVar("qemu_wrapper_cmdline", "def qemu_wrapper_cmdline(data, rootfs_path, library_paths):\n return 'false'")
elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d):
bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE"))
}
-def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
- return 'false'
diff --git a/meta/classes-recipe/autotools-brokensep.bbclass b/meta/classes-recipe/autotools-brokensep.bbclass
new file mode 100644
index 0000000000..a0fb4b7b50
--- /dev/null
+++ b/meta/classes-recipe/autotools-brokensep.bbclass
@@ -0,0 +1,11 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Autotools class for recipes where separate build dir doesn't work
+# Ideally we should fix software so it does work. Standard autotools supports
+# this.
+inherit autotools
+B = "${S}"
diff --git a/meta/classes/autotools.bbclass b/meta/classes-recipe/autotools.bbclass
index 4ab2460990..9359c9b4e1 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes-recipe/autotools.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def get_autotools_dep(d):
if d.getVar('INHIBIT_AUTOTOOLS_DEPS'):
return ''
@@ -31,13 +37,14 @@ inherit siteinfo
export CONFIG_SITE
acpaths ?= "default"
-EXTRA_AUTORECONF = "--exclude=autopoint --exclude=gtkdocize"
+EXTRA_AUTORECONF += "--exclude=autopoint"
export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}"
# When building tools for use at build-time it's recommended for the build
# system to use these variables when cross-compiling.
-# (http://sources.redhat.com/autobook/autobook/autobook_270.html)
+# https://www.gnu.org/software/autoconf-archive/ax_prog_cc_for_build.html
+# https://stackoverflow.com/questions/24201260/autotools-cross-compilation-and-generated-sources/24208587#24208587
export CPP_FOR_BUILD = "${BUILD_CPP}"
export CPPFLAGS_FOR_BUILD = "${BUILD_CPPFLAGS}"
@@ -70,7 +77,7 @@ CONFIGUREOPTS = " --build=${BUILD_SYS} \
--localstatedir=${localstatedir} \
--libdir=${libdir} \
--includedir=${includedir} \
- --oldincludedir=${oldincludedir} \
+ --oldincludedir=${includedir} \
--infodir=${infodir} \
--mandir=${mandir} \
--disable-silent-rules \
@@ -151,7 +158,7 @@ python autotools_aclocals () {
do_configure[file-checksums] += "${@' '.join(siteinfo_get_files(d, sysrootcache=False)[1])}"
-CONFIGURE_FILES = "${S}/configure.in ${S}/configure.ac ${S}/config.h.in ${S}/acinclude.m4 Makefile.am"
+CONFIGURE_FILES = "${S}/configure.in ${S}/configure.ac ${S}/config.h.in *.m4 Makefile.am"
autotools_do_configure() {
# WARNING: gross hack follows:
diff --git a/meta/classes/baremetal-image.bbclass b/meta/classes-recipe/baremetal-image.bbclass
index 81f5e5e93d..b9a584351a 100644
--- a/meta/classes/baremetal-image.bbclass
+++ b/meta/classes-recipe/baremetal-image.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Baremetal image class
#
# This class is meant to be inherited by recipes for baremetal/RTOS applications
@@ -47,9 +53,10 @@ python do_rootfs(){
Path(manifest_name).touch()
if os.path.exists(manifest_name) and link_name:
manifest_link = deploy_dir + "/" + link_name + ".manifest"
- if os.path.lexists(manifest_link):
- os.remove(manifest_link)
- os.symlink(os.path.basename(manifest_name), manifest_link)
+ if manifest_link != manifest_name:
+ if os.path.lexists(manifest_link):
+ os.remove(manifest_link)
+ os.symlink(os.path.basename(manifest_name), manifest_link)
# A lot of postprocess commands assume the existence of rootfs/etc
sysconfdir = d.getVar("IMAGE_ROOTFS") + d.getVar('sysconfdir')
bb.utils.mkdirhier(sysconfdir)
@@ -79,6 +86,11 @@ QB_DEFAULT_FSTYPE ?= "bin"
QB_DTB ?= ""
QB_OPT_APPEND:append = " -nographic"
+# QEMU x86 requires an .elf kernel to boot rather than a .bin
+QB_DEFAULT_KERNEL:qemux86 ?= "${IMAGE_LINK_NAME}.elf"
+# QEMU x86-64 refuses to boot from -kernel, needs a multiboot compatible image
+QB_DEFAULT_FSTYPE:qemux86-64 ?= "iso"
+
# RISC-V tunes set the BIOS, unset, and instruct QEMU to
# ignore the BIOS and boot from -kernel
QB_DEFAULT_BIOS:qemuriscv64 = ""
@@ -94,6 +106,17 @@ QB_OPT_APPEND:append:qemuriscv32 = " -bios none"
CFLAGS:append:qemuriscv64 = " -mcmodel=medany"
+## Emulate image.bbclass
+# Handle inherits of any of the image classes we need
+IMAGE_CLASSES ??= ""
+IMGCLASSES = " ${IMAGE_CLASSES}"
+inherit_defer ${IMGCLASSES}
+# Set defaults to satisfy IMAGE_FEATURES check
+IMAGE_FEATURES ?= ""
+IMAGE_FEATURES[type] = "list"
+IMAGE_FEATURES[validitems] += ""
+
+
# This next part is necessary to trick the build system into thinking
# its building an image recipe so it generates the qemuboot.conf
addtask do_rootfs before do_image after do_install
diff --git a/meta/classes/bash-completion.bbclass b/meta/classes-recipe/bash-completion.bbclass
index 803b2cae4d..b656e76c09 100644
--- a/meta/classes/bash-completion.bbclass
+++ b/meta/classes-recipe/bash-completion.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
DEPENDS:append:class-target = " bash-completion"
PACKAGES += "${PN}-bash-completion"
diff --git a/meta/classes/bin_package.bbclass b/meta/classes-recipe/bin_package.bbclass
index c3aca20443..3a1befc29c 100644
--- a/meta/classes/bin_package.bbclass
+++ b/meta/classes-recipe/bin_package.bbclass
@@ -1,7 +1,9 @@
#
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+# Copyright OpenEmbedded Contributors
#
+# SPDX-License-Identifier: MIT
+#
+
# Common variable and task for the binary package recipe.
# Basic principle:
# * The files have been unpacked to ${S} by base.bbclass
@@ -30,8 +32,9 @@ bin_package_do_install () {
bbfatal bin_package has nothing to install. Be sure the SRC_URI unpacks into S.
fi
cd ${S}
+ install -d ${D}${base_prefix}
tar --no-same-owner --exclude='./patches' --exclude='./.pc' -cpf - . \
- | tar --no-same-owner -xpf - -C ${D}
+ | tar --no-same-owner -xpf - -C ${D}${base_prefix}
}
FILES:${PN} = "/"
diff --git a/meta/classes/binconfig-disabled.bbclass b/meta/classes-recipe/binconfig-disabled.bbclass
index e8ac41b2d4..cbe2078e0f 100644
--- a/meta/classes/binconfig-disabled.bbclass
+++ b/meta/classes-recipe/binconfig-disabled.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Class to disable binconfig files instead of installing them
#
diff --git a/meta/classes/binconfig.bbclass b/meta/classes-recipe/binconfig.bbclass
index 6e0c88269a..427dba7f1f 100644
--- a/meta/classes/binconfig.bbclass
+++ b/meta/classes-recipe/binconfig.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
FILES:${PN}-dev += "${bindir}/*-config"
# The namespaces can clash here hence the two step replace
diff --git a/meta/classes-recipe/cargo-update-recipe-crates.bbclass b/meta/classes-recipe/cargo-update-recipe-crates.bbclass
new file mode 100644
index 0000000000..8980137d02
--- /dev/null
+++ b/meta/classes-recipe/cargo-update-recipe-crates.bbclass
@@ -0,0 +1,79 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+##
+## Purpose:
+## This class is used to update the list of crates in SRC_URI
+## by reading Cargo.lock in the source tree.
+##
+## See meta/recipes-devtools/python/python3-bcrypt_*.bb for an example
+##
+## To perform the update: bitbake -c update_crates recipe-name
+
+addtask do_update_crates after do_patch
+do_update_crates[depends] = "python3-native:do_populate_sysroot"
+do_update_crates[nostamp] = "1"
+do_update_crates[doc] = "Update the recipe by reading Cargo.lock and write in ${THISDIR}/${BPN}-crates.inc"
+
+# The directory where to search for Cargo.lock files
+CARGO_LOCK_SRC_DIR ??= "${S}"
+
+do_update_crates() {
+ TARGET_FILE="${THISDIR}/${BPN}-crates.inc"
+
+ nativepython3 - <<EOF
+
+def get_crates(f):
+ import tomllib
+ c_list = '# from %s' % os.path.relpath(f, '${CARGO_LOCK_SRC_DIR}')
+ c_list += '\nSRC_URI += " \\\'
+ crates = tomllib.load(open(f, 'rb'))
+
+ # Build a list with crates info that have crates.io in the source
+ crates_candidates = list(filter(lambda c: 'crates.io' in c.get('source', ''), crates['package']))
+
+ if not crates_candidates:
+ raise ValueError("Unable to find any candidate crates that use crates.io")
+
+ # Update crates uri and their checksum, to avoid name clashing on the checksum
+ # we need to rename crates with name and version to have a unique key
+ cksum_list = ''
+ for c in crates_candidates:
+ rename = "%s-%s" % (c['name'], c['version'])
+ c_list += '\n crate://crates.io/%s/%s \\\' % (c['name'], c['version'])
+ if 'checksum' in c:
+ cksum_list += '\nSRC_URI[%s.sha256sum] = "%s"' % (rename, c['checksum'])
+
+ c_list += '\n"\n'
+ c_list += cksum_list
+ c_list += '\n'
+ return c_list
+
+import os
+crates = "# Autogenerated with 'bitbake -c update_crates ${PN}'\n\n"
+found = False
+for root, dirs, files in os.walk('${CARGO_LOCK_SRC_DIR}'):
+ # ignore git and patches directories
+ if root.startswith(os.path.join('${CARGO_LOCK_SRC_DIR}', '.pc')):
+ continue
+ if root.startswith(os.path.join('${CARGO_LOCK_SRC_DIR}', '.git')):
+ continue
+ for file in files:
+ if file == 'Cargo.lock':
+ try:
+ cargo_lock_path = os.path.join(root, file)
+ crates += get_crates(os.path.join(root, file))
+ except Exception as e:
+ raise ValueError("Cannot parse '%s'" % cargo_lock_path) from e
+ else:
+ found = True
+if not found:
+ raise ValueError("Unable to find any Cargo.lock in ${CARGO_LOCK_SRC_DIR}")
+open("${TARGET_FILE}", 'w').write(crates)
+EOF
+
+ bbnote "Successfully update crates inside '${TARGET_FILE}'"
+}
diff --git a/meta/classes-recipe/cargo.bbclass b/meta/classes-recipe/cargo.bbclass
new file mode 100644
index 0000000000..0829a58dd9
--- /dev/null
+++ b/meta/classes-recipe/cargo.bbclass
@@ -0,0 +1,93 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+##
+## Purpose:
+## This class is used by any recipes that are built using
+## Cargo.
+
+inherit cargo_common
+inherit rust-target-config
+
+# the binary we will use
+CARGO = "cargo"
+
+# We need cargo to compile for the target
+BASEDEPENDS:append = " cargo-native"
+
+# Ensure we get the right rust variant
+DEPENDS:append:class-target = " rust-native ${RUSTLIB_DEP}"
+DEPENDS:append:class-nativesdk = " rust-native ${RUSTLIB_DEP}"
+DEPENDS:append:class-native = " rust-native"
+
+# Enable build separation
+B = "${WORKDIR}/build"
+
+# In case something fails in the build process, give a bit more feedback on
+# where the issue occured
+export RUST_BACKTRACE = "1"
+
+RUSTFLAGS ??= ""
+BUILD_MODE = "${@['--release', ''][d.getVar('DEBUG_BUILD') == '1']}"
+# --frozen flag will prevent network access (which is required since only
+# the do_fetch step is authorized to access network)
+# and will require an up to date Cargo.lock file.
+# This force the package being built to already ship a Cargo.lock, in the end
+# this is what we want, at least, for reproducibility of the build.
+CARGO_BUILD_FLAGS = "-v --frozen --target ${RUST_HOST_SYS} ${BUILD_MODE} --manifest-path=${CARGO_MANIFEST_PATH}"
+
+# This is based on the content of CARGO_BUILD_FLAGS and generally will need to
+# change if CARGO_BUILD_FLAGS changes.
+BUILD_DIR = "${@['release', 'debug'][d.getVar('DEBUG_BUILD') == '1']}"
+CARGO_TARGET_SUBDIR="${RUST_HOST_SYS}/${BUILD_DIR}"
+oe_cargo_build () {
+ export RUSTFLAGS="${RUSTFLAGS}"
+ bbnote "Using rust targets from ${RUST_TARGET_PATH}"
+ bbnote "cargo = $(which ${CARGO})"
+ bbnote "${CARGO} build ${CARGO_BUILD_FLAGS} $@"
+ "${CARGO}" build ${CARGO_BUILD_FLAGS} "$@"
+}
+
+do_compile[progress] = "outof:\s+(\d+)/(\d+)"
+cargo_do_compile () {
+ oe_cargo_build
+}
+
+cargo_do_install () {
+ local have_installed=false
+ for tgt in "${B}/target/${CARGO_TARGET_SUBDIR}/"*; do
+ case $tgt in
+ *.so|*.rlib)
+ install -d "${D}${rustlibdir}"
+ install -m755 "$tgt" "${D}${rustlibdir}"
+ have_installed=true
+ ;;
+ *examples)
+ if [ -d "$tgt" ]; then
+ for example in "$tgt/"*; do
+ if [ -f "$example" ] && [ -x "$example" ]; then
+ install -d "${D}${bindir}"
+ install -m755 "$example" "${D}${bindir}"
+ have_installed=true
+ fi
+ done
+ fi
+ ;;
+ *)
+ if [ -f "$tgt" ] && [ -x "$tgt" ]; then
+ install -d "${D}${bindir}"
+ install -m755 "$tgt" "${D}${bindir}"
+ have_installed=true
+ fi
+ ;;
+ esac
+ done
+ if ! $have_installed; then
+ die "Did not find anything to install"
+ fi
+}
+
+EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes-recipe/cargo_c.bbclass b/meta/classes-recipe/cargo_c.bbclass
new file mode 100644
index 0000000000..ef431634a2
--- /dev/null
+++ b/meta/classes-recipe/cargo_c.bbclass
@@ -0,0 +1,41 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+##
+## Purpose:
+## This class is used by any recipes that want to compile a C ABI compatible
+## library with header and pkg config file
+
+inherit cargo pkgconfig
+
+# the binaries we will use
+CARGO_C_BUILD = "cargo-cbuild"
+CARGO_C_INSTALL = "cargo-cinstall"
+
+# We need cargo-c to compile for the target
+BASEDEPENDS:append = " cargo-c-native"
+
+do_compile[progress] = "outof:\s+(\d+)/(\d+)"
+cargo_c_do_compile() {
+ oe_cargo_fix_env
+ export RUSTFLAGS="${RUSTFLAGS}"
+ bbnote "Using rust targets from ${RUST_TARGET_PATH}"
+ bbnote "cargo-cbuild = $(which ${CARGO_C_BUILD})"
+ bbnote "${CARGO_C_BUILD} cbuild ${CARGO_BUILD_FLAGS}"
+ "${CARGO_C_BUILD}" cbuild ${CARGO_BUILD_FLAGS}
+}
+
+cargo_c_do_install() {
+ oe_cargo_fix_env
+ export RUSTFLAGS="${RUSTFLAGS}"
+ bbnote "cargo-cinstall = $(which ${CARGO_C_INSTALL})"
+ "${CARGO_C_INSTALL}" cinstall ${CARGO_BUILD_FLAGS} \
+ --destdir ${D} \
+ --prefix ${prefix} \
+ --library-type cdylib
+}
+
+EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes-recipe/cargo_common.bbclass b/meta/classes-recipe/cargo_common.bbclass
new file mode 100644
index 0000000000..0fb443edbd
--- /dev/null
+++ b/meta/classes-recipe/cargo_common.bbclass
@@ -0,0 +1,238 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+##
+## Purpose:
+## This class is to support building with cargo. It
+## must be different than cargo.bbclass because Rust
+## now builds with Cargo but cannot use cargo.bbclass
+## due to dependencies and assumptions in cargo.bbclass
+## that Rust & Cargo are already installed. So this
+## is used by cargo.bbclass and Rust
+##
+
+# add crate fetch support
+inherit rust-common
+
+# Where we download our registry and dependencies to
+export CARGO_HOME = "${WORKDIR}/cargo_home"
+
+# The pkg-config-rs library used by cargo build scripts disables itself when
+# cross compiling unless this is defined. We set up pkg-config appropriately
+# for cross compilation, so tell it we know better than it.
+export PKG_CONFIG_ALLOW_CROSS = "1"
+
+# Don't instruct cargo to use crates downloaded by bitbake. Some rust packages,
+# for example the rust compiler itself, come with their own vendored sources.
+# Specifying two [source.crates-io] will not work.
+CARGO_DISABLE_BITBAKE_VENDORING ??= "0"
+
+# Used by libstd-rs to point to the vendor dir included in rustc src
+CARGO_VENDORING_DIRECTORY ??= "${CARGO_HOME}/bitbake"
+
+# The directory of the Cargo.toml relative to the root directory, per default
+# assume there's a Cargo.toml directly in the root directory
+CARGO_SRC_DIR ??= ""
+
+# The actual path to the Cargo.toml
+CARGO_MANIFEST_PATH ??= "${S}/${CARGO_SRC_DIR}/Cargo.toml"
+
+# Path to Cargo.lock
+CARGO_LOCK_PATH ??= "${@ os.path.join(os.path.dirname(d.getVar('CARGO_MANIFEST_PATH', True)), 'Cargo.lock')}"
+
+CARGO_RUST_TARGET_CCLD ??= "${RUST_TARGET_CCLD}"
+cargo_common_do_configure () {
+ mkdir -p ${CARGO_HOME}/bitbake
+
+ cat <<- EOF > ${CARGO_HOME}/config
+ # EXTRA_OECARGO_PATHS
+ paths = [
+ $(for p in ${EXTRA_OECARGO_PATHS}; do echo \"$p\",; done)
+ ]
+ EOF
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # Local mirror vendored by bitbake
+ [source.bitbake]
+ directory = "${CARGO_VENDORING_DIRECTORY}"
+ EOF
+
+ if [ ${CARGO_DISABLE_BITBAKE_VENDORING} = "0" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [source.crates-io]
+ replace-with = "bitbake"
+ local-registry = "/nonexistent"
+ EOF
+ fi
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [http]
+ # Multiplexing can't be enabled because http2 can't be enabled
+ # in curl-native without dependency loops
+ multiplexing = false
+
+ # Ignore the hard coded and incorrect path to certificates
+ cainfo = "${STAGING_ETCDIR_NATIVE}/ssl/certs/ca-certificates.crt"
+
+ EOF
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # HOST_SYS
+ [target.${RUST_HOST_SYS}]
+ linker = "${CARGO_RUST_TARGET_CCLD}"
+ EOF
+
+ if [ "${RUST_HOST_SYS}" != "${RUST_BUILD_SYS}" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # BUILD_SYS
+ [target.${RUST_BUILD_SYS}]
+ linker = "${RUST_BUILD_CCLD}"
+ EOF
+ fi
+
+ if [ "${RUST_TARGET_SYS}" != "${RUST_BUILD_SYS}" -a "${RUST_TARGET_SYS}" != "${RUST_HOST_SYS}" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # TARGET_SYS
+ [target.${RUST_TARGET_SYS}]
+ linker = "${RUST_TARGET_CCLD}"
+ EOF
+ fi
+
+ # Put build output in build directory preferred by bitbake instead of
+ # inside source directory unless they are the same
+ if [ "${B}" != "${S}" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [build]
+ # Use out of tree build destination to avoid polluting the source tree
+ target-dir = "${B}/target"
+ EOF
+ fi
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [term]
+ progress.when = 'always'
+ progress.width = 80
+ EOF
+}
+
+python cargo_common_do_patch_paths() {
+ import shutil
+
+ cargo_config = os.path.join(d.getVar("CARGO_HOME"), "config")
+ if not os.path.exists(cargo_config):
+ return
+
+ src_uri = (d.getVar('SRC_URI') or "").split()
+ if len(src_uri) == 0:
+ return
+
+ patches = dict()
+ workdir = d.getVar('WORKDIR')
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ for url in fetcher.urls:
+ ud = fetcher.ud[url]
+ if ud.type == 'git':
+ name = ud.parm.get('name')
+ destsuffix = ud.parm.get('destsuffix')
+ if name is not None and destsuffix is not None:
+ if ud.user:
+ repo = '%s://%s@%s%s' % (ud.proto, ud.user, ud.host, ud.path)
+ else:
+ repo = '%s://%s%s' % (ud.proto, ud.host, ud.path)
+ path = '%s = { path = "%s" }' % (name, os.path.join(workdir, destsuffix))
+ patches.setdefault(repo, []).append(path)
+
+ with open(cargo_config, "a+") as config:
+ for k, v in patches.items():
+ print('\n[patch."%s"]' % k, file=config)
+ for name in v:
+ print(name, file=config)
+
+ if not patches:
+ return
+
+ # Cargo.lock file is needed for to be sure that artifacts
+ # downloaded by the fetch steps are those expected by the
+ # project and that the possible patches are correctly applied.
+ # Moreover since we do not want any modification
+ # of this file (for reproducibility purpose), we prevent it by
+ # using --frozen flag (in CARGO_BUILD_FLAGS) and raise a clear error
+ # here is better than letting cargo tell (in case the file is missing)
+ # "Cargo.lock should be modified but --frozen was given"
+
+ lockfile = d.getVar("CARGO_LOCK_PATH", True)
+ if not os.path.exists(lockfile):
+ bb.fatal(f"{lockfile} file doesn't exist")
+
+ # There are patched files and so Cargo.lock should be modified but we use
+ # --frozen so let's handle that modifications here.
+ #
+ # Note that a "better" (more elegant ?) would have been to use cargo update for
+ # patched packages:
+ # cargo update --offline -p package_1 -p package_2
+ # But this is not possible since it requires that cargo local git db
+ # to be populated and this is not the case as we fetch git repo ourself.
+
+ lockfile_orig = lockfile + ".orig"
+ if not os.path.exists(lockfile_orig):
+ shutil.copy(lockfile, lockfile_orig)
+
+ newlines = []
+ with open(lockfile_orig, "r") as f:
+ for line in f.readlines():
+ if not line.startswith("source = \"git"):
+ newlines.append(line)
+
+ with open(lockfile, "w") as f:
+ f.writelines(newlines)
+}
+do_configure[postfuncs] += "cargo_common_do_patch_paths"
+
+do_compile:prepend () {
+ oe_cargo_fix_env
+}
+
+oe_cargo_fix_env () {
+ export CC="${RUST_TARGET_CC}"
+ export CXX="${RUST_TARGET_CXX}"
+ export CFLAGS="${CFLAGS}"
+ export CXXFLAGS="${CXXFLAGS}"
+ export AR="${AR}"
+ export TARGET_CC="${RUST_TARGET_CC}"
+ export TARGET_CXX="${RUST_TARGET_CXX}"
+ export TARGET_CFLAGS="${CFLAGS}"
+ export TARGET_CXXFLAGS="${CXXFLAGS}"
+ export TARGET_AR="${AR}"
+ export HOST_CC="${RUST_BUILD_CC}"
+ export HOST_CXX="${RUST_BUILD_CXX}"
+ export HOST_CFLAGS="${BUILD_CFLAGS}"
+ export HOST_CXXFLAGS="${BUILD_CXXFLAGS}"
+ export HOST_AR="${BUILD_AR}"
+}
+
+EXTRA_OECARGO_PATHS ??= ""
+
+EXPORT_FUNCTIONS do_configure
+
+# The culprit for this setting is the libc crate,
+# which as of Jun 2023 calls directly into 32 bit time functions in glibc,
+# bypassing all of glibc provisions to choose the right Y2038-safe functions. As
+# rust components statically link with that crate, pretty much everything
+# is affected, and so there's no point trying to have recipe-specific
+# INSANE_SKIP entries.
+#
+# Upstream ticket and PR:
+# https://github.com/rust-lang/libc/issues/3223
+# https://github.com/rust-lang/libc/pull/3175
+INSANE_SKIP:append = " 32bit-time"
diff --git a/meta/classes-recipe/cmake-qemu.bbclass b/meta/classes-recipe/cmake-qemu.bbclass
new file mode 100644
index 0000000000..46a89e2827
--- /dev/null
+++ b/meta/classes-recipe/cmake-qemu.bbclass
@@ -0,0 +1,32 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+# Not all platforms are supported by Qemu. Using qemu-user therefore
+# involves a certain risk, which is also the reason why this feature
+# is not part of the main cmake class by default.
+#
+# One use case is the execution of cross-compiled unit tests with CTest
+# on the build machine. If CMAKE_EXEWRAPPER_ENABLED is configured,
+# cmake --build --target test
+# works transparently with qemu-user. If the cmake project is developed
+# with this use case in mind this works very nicely also out of an IDE
+# configured to use cmake-native for cross compiling.
+
+inherit qemu cmake
+
+DEPENDS:append:class-target = "${@' qemu-native' if bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', True, False, d) else ''}"
+
+cmake_do_generate_toolchain_file:append:class-target() {
+ if [ "${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d)}" ]; then
+ # Write out a qemu wrapper that will be used as exe_wrapper so that cmake
+ # can run target helper binaries through that. This also allows to execute ctest.
+ qemu_binary="${@qemu_wrapper_cmdline(d, '${STAGING_DIR_HOST}', ['${STAGING_DIR_HOST}/${libdir}','${STAGING_DIR_HOST}/${base_libdir}'])}"
+ echo "#!/bin/sh" > "${WORKDIR}/cmake-qemuwrapper"
+ echo "$qemu_binary \"\$@\"" >> "${WORKDIR}/cmake-qemuwrapper"
+ chmod +x "${WORKDIR}/cmake-qemuwrapper"
+ echo "set( CMAKE_CROSSCOMPILING_EMULATOR ${WORKDIR}/cmake-qemuwrapper)" \
+ >> ${WORKDIR}/toolchain.cmake
+ fi
+}
diff --git a/meta/classes-recipe/cmake.bbclass b/meta/classes-recipe/cmake.bbclass
new file mode 100644
index 0000000000..3d3781ef33
--- /dev/null
+++ b/meta/classes-recipe/cmake.bbclass
@@ -0,0 +1,247 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Path to the CMake file to process.
+OECMAKE_SOURCEPATH ??= "${S}"
+
+DEPENDS:prepend = "cmake-native "
+B = "${WORKDIR}/build"
+
+# What CMake generator to use.
+# The supported options are "Unix Makefiles" or "Ninja".
+OECMAKE_GENERATOR ?= "Ninja"
+
+python() {
+ generator = d.getVar("OECMAKE_GENERATOR")
+ if "Unix Makefiles" in generator:
+ args = "-G '" + generator + "' -DCMAKE_MAKE_PROGRAM=" + d.getVar("MAKE")
+ d.setVar("OECMAKE_GENERATOR_ARGS", args)
+ d.setVarFlag("do_compile", "progress", "percent")
+ elif "Ninja" in generator:
+ args = "-G '" + generator + "' -DCMAKE_MAKE_PROGRAM=ninja"
+ d.appendVar("DEPENDS", " ninja-native")
+ d.setVar("OECMAKE_GENERATOR_ARGS", args)
+ d.setVarFlag("do_compile", "progress", r"outof:^\[(\d+)/(\d+)\]\s+")
+ else:
+ bb.fatal("Unknown CMake Generator %s" % generator)
+}
+OECMAKE_AR ?= "${AR}"
+
+# Compiler flags
+OECMAKE_C_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CFLAGS}"
+OECMAKE_CXX_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS}"
+OECMAKE_C_FLAGS_RELEASE ?= "-DNDEBUG"
+OECMAKE_CXX_FLAGS_RELEASE ?= "-DNDEBUG"
+OECMAKE_C_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CPPFLAGS} ${LDFLAGS}"
+OECMAKE_CXX_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS} ${LDFLAGS}"
+
+def oecmake_map_compiler(compiler, d):
+ args = d.getVar(compiler).split()
+ if args[0] == "ccache":
+ return args[1], args[0]
+ return args[0], ""
+
+# C/C++ Compiler (without cpu arch/tune arguments)
+OECMAKE_C_COMPILER ?= "${@oecmake_map_compiler('CC', d)[0]}"
+OECMAKE_C_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CC', d)[1]}"
+OECMAKE_CXX_COMPILER ?= "${@oecmake_map_compiler('CXX', d)[0]}"
+OECMAKE_CXX_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CXX', d)[1]}"
+
+# clear compiler vars for allarch to avoid sig hash difference
+OECMAKE_C_COMPILER:allarch = ""
+OECMAKE_C_COMPILER_LAUNCHER:allarch = ""
+OECMAKE_CXX_COMPILER:allarch = ""
+OECMAKE_CXX_COMPILER_LAUNCHER:allarch = ""
+
+OECMAKE_RPATH ?= ""
+OECMAKE_PERLNATIVE_DIR ??= ""
+OECMAKE_EXTRA_ROOT_PATH ?= ""
+
+OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "ONLY"
+
+EXTRA_OECMAKE:append = " ${PACKAGECONFIG_CONFARGS}"
+
+export CMAKE_BUILD_PARALLEL_LEVEL
+CMAKE_BUILD_PARALLEL_LEVEL:task-compile = "${@oe.utils.parallel_make(d, False)}"
+CMAKE_BUILD_PARALLEL_LEVEL:task-install = "${@oe.utils.parallel_make(d, True)}"
+
+OECMAKE_TARGET_COMPILE ?= "all"
+OECMAKE_TARGET_INSTALL ?= "install"
+
+def map_host_os_to_system_name(host_os):
+ if host_os.startswith('darwin'):
+ return 'Darwin'
+ if host_os.startswith('mingw'):
+ return 'Windows'
+ if host_os.startswith('linux'):
+ return 'Linux'
+ return host_os
+
+# CMake expects target architectures in the format of uname(2),
+# which do not always match TARGET_ARCH, so all the necessary
+# conversions should happen here.
+def map_host_arch_to_uname_arch(host_arch):
+ if host_arch == "powerpc":
+ return "ppc"
+ if host_arch == "powerpc64le":
+ return "ppc64le"
+ if host_arch == "powerpc64":
+ return "ppc64"
+ return host_arch
+
+
+cmake_do_generate_toolchain_file() {
+ if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
+ cmake_crosscompiling="set( CMAKE_CROSSCOMPILING FALSE )"
+ else
+ cmake_sysroot="set( CMAKE_SYSROOT \"${RECIPE_SYSROOT}\" )"
+ fi
+
+ cat > ${WORKDIR}/toolchain.cmake <<EOF
+# CMake system name must be something like "Linux".
+# This is important for cross-compiling.
+$cmake_crosscompiling
+set( CMAKE_SYSTEM_NAME ${@map_host_os_to_system_name(d.getVar('HOST_OS'))} )
+set( CMAKE_SYSTEM_PROCESSOR ${@map_host_arch_to_uname_arch(d.getVar('HOST_ARCH'))} )
+set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
+set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
+set( CMAKE_C_COMPILER_LAUNCHER ${OECMAKE_C_COMPILER_LAUNCHER} )
+set( CMAKE_CXX_COMPILER_LAUNCHER ${OECMAKE_CXX_COMPILER_LAUNCHER} )
+set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
+find_program( CMAKE_AR ${OECMAKE_AR} DOC "Archiver" REQUIRED )
+
+set( CMAKE_C_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "CFLAGS" )
+set( CMAKE_CXX_FLAGS "${OECMAKE_CXX_FLAGS}" CACHE STRING "CXXFLAGS" )
+set( CMAKE_ASM_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "ASM FLAGS" )
+set( CMAKE_C_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional CFLAGS for release" )
+set( CMAKE_CXX_FLAGS_RELEASE "${OECMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "Additional CXXFLAGS for release" )
+set( CMAKE_ASM_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional ASM FLAGS for release" )
+set( CMAKE_C_LINK_FLAGS "${OECMAKE_C_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
+set( CMAKE_CXX_LINK_FLAGS "${OECMAKE_CXX_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
+
+# only search in the paths provided so cmake doesnt pick
+# up libraries and tools from the native build machine
+set( CMAKE_FIND_ROOT_PATH ${STAGING_DIR_HOST} ${STAGING_DIR_NATIVE} ${CROSS_DIR} ${OECMAKE_PERLNATIVE_DIR} ${OECMAKE_EXTRA_ROOT_PATH} ${EXTERNAL_TOOLCHAIN} ${HOSTTOOLS_DIR})
+set( CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY )
+set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ${OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM} )
+set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )
+set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )
+set( CMAKE_PROGRAM_PATH "/" )
+
+$cmake_sysroot
+
+# Use qt.conf settings
+set( ENV{QT_CONF_PATH} ${WORKDIR}/qt.conf )
+
+# We need to set the rpath to the correct directory as cmake does not provide any
+# directory as rpath by default
+set( CMAKE_INSTALL_RPATH ${OECMAKE_RPATH} )
+
+# Use RPATHs relative to build directory for reproducibility
+set( CMAKE_BUILD_RPATH_USE_ORIGIN ON )
+
+# Use our cmake modules
+list(APPEND CMAKE_MODULE_PATH "${STAGING_DATADIR}/cmake/Modules/")
+
+# add for non /usr/lib libdir, e.g. /usr/lib64
+set( CMAKE_LIBRARY_PATH ${libdir} ${base_libdir})
+
+# add include dir to implicit includes in case it differs from /usr/include
+list(APPEND CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
+list(APPEND CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
+
+EOF
+}
+
+addtask generate_toolchain_file after do_patch before do_configure
+
+CONFIGURE_FILES = "CMakeLists.txt *.cmake"
+
+do_configure[cleandirs] = "${@d.getVar('B') if d.getVar('S') != d.getVar('B') else ''}"
+
+OECMAKE_ARGS = "\
+ -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
+ -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
+ -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
+ -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix') + '/')} \
+ -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix') + '/')} \
+ -DPYTHON_EXECUTABLE:PATH=${PYTHON} \
+ -DPython_EXECUTABLE:PATH=${PYTHON} \
+ -DPython3_EXECUTABLE:PATH=${PYTHON} \
+ -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
+ -DCMAKE_INSTALL_SO_NO_EXE=0 \
+ -DCMAKE_TOOLCHAIN_FILE:FILEPATH=${WORKDIR}/toolchain.cmake \
+ -DCMAKE_NO_SYSTEM_FROM_IMPORTED=1 \
+ -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON \
+ -DFETCHCONTENT_FULLY_DISCONNECTED=ON \
+ -DCMAKE_EXPORT_COMPILE_COMMANDS:BOOL=ON \
+"
+
+cmake_do_configure() {
+ if [ "${OECMAKE_BUILDPATH}" ]; then
+ bbnote "cmake.bbclass no longer uses OECMAKE_BUILDPATH. The default behaviour is now out-of-tree builds with B=WORKDIR/build."
+ fi
+
+ if [ "${S}" = "${B}" ]; then
+ find ${B} -name CMakeFiles -or -name Makefile -or -name cmake_install.cmake -or -name CMakeCache.txt -delete
+ fi
+
+ # Just like autotools cmake can use a site file to cache result that need generated binaries to run
+ if [ -e ${WORKDIR}/site-file.cmake ] ; then
+ oecmake_sitefile="-C ${WORKDIR}/site-file.cmake"
+ else
+ oecmake_sitefile=
+ fi
+
+ cmake \
+ ${OECMAKE_GENERATOR_ARGS} \
+ $oecmake_sitefile \
+ ${OECMAKE_SOURCEPATH} \
+ ${OECMAKE_ARGS} \
+ ${EXTRA_OECMAKE} \
+ -Wno-dev
+}
+
+# To disable verbose cmake logs for a given recipe or globally config metadata e.g. local.conf
+# add following
+#
+# CMAKE_VERBOSE = ""
+#
+
+CMAKE_VERBOSE ??= "VERBOSE=1"
+
+# Then run do_compile again
+cmake_runcmake_build() {
+ bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
+ eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
+}
+
+# Install an already-generated project binary tree. Not checking the compile
+# dependencies again is particularly important for SDK use cases.
+cmake_runcmake_install() {
+ bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --install '${B}'
+ eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --install '${B}'
+}
+
+cmake_do_compile() {
+ cmake_runcmake_build --target ${OECMAKE_TARGET_COMPILE}
+}
+
+cmake_do_install() {
+ if [ "${OECMAKE_TARGET_INSTALL}" = "install" ]; then
+ DESTDIR='${D}' cmake_runcmake_install
+ else
+ # Legacy path which supports also custom install targets
+ DESTDIR='${D}' cmake_runcmake_build --target ${OECMAKE_TARGET_INSTALL}
+ fi
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install do_generate_toolchain_file
diff --git a/meta/classes/cml1.bbclass b/meta/classes-recipe/cml1.bbclass
index d319d66ab2..03e5fe6f47 100644
--- a/meta/classes/cml1.bbclass
+++ b/meta/classes-recipe/cml1.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# returns all the elements from the src uri that are .cfg files
def find_cfgs(d):
sources=src_patches(d, True)
@@ -15,7 +21,6 @@ cml1_do_configure() {
}
EXPORT_FUNCTIONS do_configure
-addtask configure after do_unpack do_patch before do_compile
inherit terminal
@@ -27,10 +32,15 @@ CROSS_CURSES_INC = '-DCURSES_LOC="<curses.h>"'
TERMINFO = "${STAGING_DATADIR_NATIVE}/terminfo"
KCONFIG_CONFIG_COMMAND ??= "menuconfig"
+KCONFIG_CONFIG_ENABLE_MENUCONFIG ??= "true"
KCONFIG_CONFIG_ROOTDIR ??= "${B}"
python do_menuconfig() {
import shutil
+ if not bb.utils.to_boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG")):
+ bb.fatal("do_menuconfig is disabled, please check KCONFIG_CONFIG_ENABLE_MENUCONFIG variable.")
+ return
+
config = os.path.join(d.getVar('KCONFIG_CONFIG_ROOTDIR'), ".config")
configorig = os.path.join(d.getVar('KCONFIG_CONFIG_ROOTDIR'), ".config.orig")
@@ -48,19 +58,17 @@ python do_menuconfig() {
# ensure that environment variables are overwritten with this tasks 'd' values
d.appendVar("OE_TERMINAL_EXPORTS", " PKG_CONFIG_DIR PKG_CONFIG_PATH PKG_CONFIG_LIBDIR PKG_CONFIG_SYSROOT_DIR")
- oe_terminal("sh -c \"make %s; if [ \\$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
+ oe_terminal("sh -c 'make %s; if [ \\$? -ne 0 ]; then echo \"Command failed.\"; printf \"Press any key to continue... \"; read r; fi'" % d.getVar('KCONFIG_CONFIG_COMMAND'),
d.getVar('PN') + ' Configuration', d)
- # FIXME this check can be removed when the minimum bitbake version has been bumped
- if hasattr(bb.build, 'write_taint'):
- try:
- newmtime = os.path.getmtime(config)
- except OSError:
- newmtime = 0
+ try:
+ newmtime = os.path.getmtime(config)
+ except OSError:
+ newmtime = 0
- if newmtime > mtime:
- bb.note("Configuration changed, recompile will be forced")
- bb.build.write_taint('do_compile', d)
+ if newmtime > mtime:
+ bb.plain("Changed configuration saved at:\n %s\nRecompile will be forced" % config)
+ bb.build.write_taint('do_compile', d)
}
do_menuconfig[depends] += "ncurses-native:do_populate_sysroot"
do_menuconfig[nostamp] = "1"
@@ -99,3 +107,9 @@ python do_diffconfig() {
do_diffconfig[nostamp] = "1"
do_diffconfig[dirs] = "${KCONFIG_CONFIG_ROOTDIR}"
addtask diffconfig
+
+do_showconfig() {
+ bbplain "Config file written to ${KCONFIG_CONFIG_ROOTDIR}/.config"
+}
+do_showconfig[nostamp] = "1"
+addtask showconfig after do_configure
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes-recipe/compress_doc.bbclass
index 379b6c169e..d603caf858 100644
--- a/meta/classes/compress_doc.bbclass
+++ b/meta/classes-recipe/compress_doc.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Compress man pages in ${mandir} and info pages in ${infodir}
#
# 1. The doc will be compressed to gz format by default.
diff --git a/meta/classes/core-image.bbclass b/meta/classes-recipe/core-image.bbclass
index 84fd3eeb38..40fc15cb04 100644
--- a/meta/classes/core-image.bbclass
+++ b/meta/classes-recipe/core-image.bbclass
@@ -1,6 +1,8 @@
# Common code for generating core reference images
#
# Copyright (C) 2007-2011 Linux Foundation
+#
+# SPDX-License-Identifier: MIT
# IMAGE_FEATURES control content of the core reference images
#
@@ -29,6 +31,7 @@
# - allow-empty-password
# - allow-root-login
# - post-install-logging
+# - serial-autologin-root - with 'empty-root-password': autologin 'root' on the serial console
# - dev-pkgs - development packages (headers, etc.) for all installed packages in the rootfs
# - dbg-pkgs - debug symbol packages for all installed packages in the rootfs
# - lic-pkgs - license packages for all installed pacakges in the rootfs, requires
@@ -59,6 +62,10 @@ FEATURE_PACKAGES_hwcodecs = "${MACHINE_HWCODECS}"
# IMAGE_FEATURES_REPLACES_foo = 'bar1 bar2'
# Including image feature foo would replace the image features bar1 and bar2
IMAGE_FEATURES_REPLACES_ssh-server-openssh = "ssh-server-dropbear"
+# Do not install openssh complementary packages if either packagegroup-core-ssh-dropbear or dropbear
+# is installed # to avoid openssh-dropbear conflict
+# see [Yocto #14858] for more information
+PACKAGE_EXCLUDE_COMPLEMENTARY:append = "${@bb.utils.contains_any('PACKAGE_INSTALL', 'packagegroup-core-ssh-dropbear dropbear', ' openssh', '' , d)}"
# IMAGE_FEATURES_CONFLICTS_foo = 'bar1 bar2'
# An error exception would be raised if both image features foo and bar1(or bar2) are included
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes-recipe/cpan-base.bbclass
index 93d11e1bee..1db0a4ded6 100644
--- a/meta/classes/cpan-base.bbclass
+++ b/meta/classes-recipe/cpan-base.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# cpan-base providers various perl related information needed for building
# cpan modules
#
diff --git a/meta/classes/cpan.bbclass b/meta/classes-recipe/cpan.bbclass
index 18f1b9d575..bb76a5b326 100644
--- a/meta/classes/cpan.bbclass
+++ b/meta/classes-recipe/cpan.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This is for perl modules that use the old Makefile.PL build system
#
inherit cpan-base perlnative
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes-recipe/cpan_build.bbclass
index f3fb4666ef..026859b6c7 100644
--- a/meta/classes/cpan_build.bbclass
+++ b/meta/classes-recipe/cpan_build.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This is for perl modules that use the new Build.PL build system
#
inherit cpan-base perlnative
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes-recipe/cross-canadian.bbclass
index a0e9d23836..1670217d69 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes-recipe/cross-canadian.bbclass
@@ -1,4 +1,8 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
# NOTE - When using this class the user is responsible for ensuring that
# TRANSLATED_TARGET_ARCH is added into PN. This ensures that if the TARGET_ARCH
# is changed, another nativesdk xxx-canadian-cross can be installed
diff --git a/meta/classes/cross.bbclass b/meta/classes-recipe/cross.bbclass
index 9d951076a7..93de9a5274 100644
--- a/meta/classes/cross.bbclass
+++ b/meta/classes-recipe/cross.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit relocatable
# Cross packages are built indirectly via dependency,
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes-recipe/crosssdk.bbclass
index 04aecb694e..824b1bcff4 100644
--- a/meta/classes/crosssdk.bbclass
+++ b/meta/classes-recipe/crosssdk.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit cross
CLASSOVERRIDE = "class-crosssdk"
diff --git a/meta/classes/deploy.bbclass b/meta/classes-recipe/deploy.bbclass
index 7fbffe996b..f56fe98d6d 100644
--- a/meta/classes/deploy.bbclass
+++ b/meta/classes-recipe/deploy.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
DEPLOYDIR = "${WORKDIR}/deploy-${PN}"
SSTATETASKS += "do_deploy"
do_deploy[sstate-inputdirs] = "${DEPLOYDIR}"
diff --git a/meta/classes/devicetree.bbclass b/meta/classes-recipe/devicetree.bbclass
index 2a62ae7bc8..bd50d7fa1d 100644
--- a/meta/classes/devicetree.bbclass
+++ b/meta/classes-recipe/devicetree.bbclass
@@ -1,4 +1,10 @@
-# This bbclass implements device tree compliation for user provided device tree
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# This bbclass implements device tree compilation for user provided device tree
# sources. The compilation of the device tree sources is the same as the kernel
# device tree compilation process, this includes being able to include sources
# from the kernel such as soc dtsi files or header files such as gpio.h. In
@@ -47,8 +53,10 @@ KERNEL_INCLUDE ??= " \
DT_INCLUDE[doc] = "Search paths to be made available to both the device tree compiler and preprocessor for inclusion."
DT_INCLUDE ?= "${DT_FILES_PATH} ${KERNEL_INCLUDE}"
-DT_FILES_PATH[doc] = "Defaults to source directory, can be used to select dts files that are not in source (e.g. generated)."
+DT_FILES_PATH[doc] = "Path to the directory containing dts files to build. Defaults to source directory."
DT_FILES_PATH ?= "${S}"
+DT_FILES[doc] = "Space-separated list of dts or dtb files (relative to DT_FILES_PATH) to build. If empty, all dts files are built."
+DT_FILES ?= ""
DT_PADDING_SIZE[doc] = "Size of padding on the device tree blob, used as extra space typically for additional properties during boot."
DT_PADDING_SIZE ??= "0x3000"
@@ -119,9 +127,12 @@ def devicetree_compile(dtspath, includes, d):
subprocess.run(dtcargs, check = True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
python devicetree_do_compile() {
+ import re
includes = expand_includes("DT_INCLUDE", d)
+ dtfiles = d.getVar("DT_FILES").split()
+ dtfiles = [ re.sub(r"\.dtbo?$", ".dts", dtfile) for dtfile in dtfiles ]
listpath = d.getVar("DT_FILES_PATH")
- for dts in os.listdir(listpath):
+ for dts in dtfiles or os.listdir(listpath):
dtspath = os.path.join(listpath, dts)
try:
if not(os.path.isfile(dtspath)) or not(dts.endswith(".dts") or devicetree_source_is_overlay(dtspath)):
diff --git a/meta/classes/devupstream.bbclass b/meta/classes-recipe/devupstream.bbclass
index ba6dc4136c..d941763fb7 100644
--- a/meta/classes/devupstream.bbclass
+++ b/meta/classes-recipe/devupstream.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for use in BBCLASSEXTEND to make it easier to have a single recipe that
# can build both stable tarballs and snapshots from upstream source
# repositories.
@@ -40,7 +46,7 @@ python devupstream_virtclass_handler () {
pv = d.getVar("PV")
proto_marker = "+" + uri.scheme
if proto_marker not in pv and not d.getVar("PV:class-devupstream"):
- d.setVar("PV", pv + proto_marker + "${SRCPV}")
+ d.setVar("PV", pv + proto_marker)
if variant == "native":
pn = d.getVar("PN")
diff --git a/meta/classes-recipe/distro_features_check.bbclass b/meta/classes-recipe/distro_features_check.bbclass
new file mode 100644
index 0000000000..1f2674fd6e
--- /dev/null
+++ b/meta/classes-recipe/distro_features_check.bbclass
@@ -0,0 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Temporarily provide fallback to the old name of the class
+
+python __anonymous() {
+ bb.warn("distro_features_check.bbclass is deprecated, please use features_check.bbclass instead")
+}
+
+inherit features_check
diff --git a/meta/classes/dos2unix.bbclass b/meta/classes-recipe/dos2unix.bbclass
index 3fc17e2196..18e89b1cf2 100644
--- a/meta/classes/dos2unix.bbclass
+++ b/meta/classes-recipe/dos2unix.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for use to convert all CRLF line terminators to LF
# provided that some projects are being developed/maintained
# on Windows so they have different line terminators(CRLF) vs
diff --git a/meta/classes/features_check.bbclass b/meta/classes-recipe/features_check.bbclass
index 3ef6b35baa..163a7bc3fc 100644
--- a/meta/classes/features_check.bbclass
+++ b/meta/classes-recipe/features_check.bbclass
@@ -11,6 +11,9 @@
#
# Copyright 2019 (C) Texas Instruments Inc.
# Copyright 2013 (C) O.S. Systems Software LTDA.
+#
+# SPDX-License-Identifier: MIT
+
python () {
if d.getVar('PARSE_ALL_RECIPES', False):
diff --git a/meta/classes/fontcache.bbclass b/meta/classes-recipe/fontcache.bbclass
index 442bfc7392..6f4978369d 100644
--- a/meta/classes/fontcache.bbclass
+++ b/meta/classes-recipe/fontcache.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class will generate the proper postinst/postrm scriptlets for font
# packages.
#
@@ -7,6 +13,7 @@ PACKAGE_WRITE_DEPS += "qemu-native"
inherit qemu
FONT_PACKAGES ??= "${PN}"
+FONT_PACKAGES:class-native = ""
FONT_EXTRA_RDEPENDS ?= "${MLPREFIX}fontconfig-utils"
FONTCONFIG_CACHE_DIR ?= "${localstatedir}/cache/fontconfig"
FONTCONFIG_CACHE_PARAMS ?= "-v"
diff --git a/meta/classes/fs-uuid.bbclass b/meta/classes-recipe/fs-uuid.bbclass
index 9b53dfba7a..e215f06c80 100644
--- a/meta/classes/fs-uuid.bbclass
+++ b/meta/classes-recipe/fs-uuid.bbclass
@@ -1,10 +1,16 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Extract UUID from ${ROOTFS}, which must have been built
# by the time that this function gets called. Only works
# on ext file systems and depends on tune2fs.
def get_rootfs_uuid(d):
import subprocess
rootfs = d.getVar('ROOTFS')
- output = subprocess.check_output(['tune2fs', '-l', rootfs])
+ output = subprocess.check_output(['tune2fs', '-l', rootfs], text=True)
for line in output.split('\n'):
if line.startswith('Filesystem UUID:'):
uuid = line.split()[-1]
diff --git a/meta/classes/gconf.bbclass b/meta/classes-recipe/gconf.bbclass
index 9d3668edd3..b81851bc78 100644
--- a/meta/classes/gconf.bbclass
+++ b/meta/classes-recipe/gconf.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
DEPENDS += "gconf"
PACKAGE_WRITE_DEPS += "gconf-native"
diff --git a/meta/classes/gettext.bbclass b/meta/classes-recipe/gettext.bbclass
index f11cb04456..c313885d52 100644
--- a/meta/classes/gettext.bbclass
+++ b/meta/classes-recipe/gettext.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def gettext_dependencies(d):
if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
return ""
diff --git a/meta/classes-recipe/gi-docgen.bbclass b/meta/classes-recipe/gi-docgen.bbclass
new file mode 100644
index 0000000000..b178d1c387
--- /dev/null
+++ b/meta/classes-recipe/gi-docgen.bbclass
@@ -0,0 +1,32 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# gi-docgen is a new gnome documentation generator, which
+# seems to be a successor to gtk-doc:
+# https://gitlab.gnome.org/GNOME/gi-docgen
+
+# True if api-documentation and gobject-introspection-data are in DISTRO_FEATURES,
+# and qemu-user is in MACHINE_FEATURES, False otherwise.
+GIDOCGEN_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation gobject-introspection-data', \
+ bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d), 'False', d)}"
+
+# When building native recipes, disable gi-docgen, as it is not necessary,
+# pulls in additional dependencies, and makes build times longer
+GIDOCGEN_ENABLED:class-native = "False"
+GIDOCGEN_ENABLED:class-nativesdk = "False"
+
+# meson: default option name to enable/disable gi-docgen. This matches most
+# projects' configuration. In doubts - check meson_options.txt in project's
+# source path.
+GIDOCGEN_MESON_OPTION ?= 'gtk_doc'
+GIDOCGEN_MESON_ENABLE_FLAG ?= 'true'
+GIDOCGEN_MESON_DISABLE_FLAG ?= 'false'
+
+# Auto enable/disable based on GIDOCGEN_ENABLED
+EXTRA_OEMESON:prepend = "-D${GIDOCGEN_MESON_OPTION}=${@bb.utils.contains('GIDOCGEN_ENABLED', 'True', '${GIDOCGEN_MESON_ENABLE_FLAG}', '${GIDOCGEN_MESON_DISABLE_FLAG}', d)} "
+
+DEPENDS:append = "${@' gi-docgen-native gi-docgen' if d.getVar('GIDOCGEN_ENABLED') == 'True' else ''}"
+
diff --git a/meta/classes/gio-module-cache.bbclass b/meta/classes-recipe/gio-module-cache.bbclass
index 021eeb1cf8..d12e03c4a0 100644
--- a/meta/classes/gio-module-cache.bbclass
+++ b/meta/classes-recipe/gio-module-cache.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PACKAGE_WRITE_DEPS += "qemu-native"
inherit qemu
diff --git a/meta/classes-recipe/github-releases.bbclass b/meta/classes-recipe/github-releases.bbclass
new file mode 100644
index 0000000000..ed83b83731
--- /dev/null
+++ b/meta/classes-recipe/github-releases.bbclass
@@ -0,0 +1,3 @@
+GITHUB_BASE_URI ?= "https://github.com/${BPN}/${BPN}/releases/"
+UPSTREAM_CHECK_URI ?= "${GITHUB_BASE_URI}"
+UPSTREAM_CHECK_REGEX ?= "releases/tag/v?(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/classes/gnomebase.bbclass b/meta/classes-recipe/gnomebase.bbclass
index 9a5bd9a232..74073321b8 100644
--- a/meta/classes/gnomebase.bbclass
+++ b/meta/classes-recipe/gnomebase.bbclass
@@ -1,5 +1,11 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def gnome_verdir(v):
- return ".".join(v.split(".")[:-1])
+ return ".".join(v.split(".")[:-1]) or v
GNOME_COMPRESS_TYPE ?= "xz"
@@ -21,8 +27,9 @@ FILES:${PN} += "${datadir}/application-registry \
FILES:${PN}-doc += "${datadir}/devhelp"
-GNOMEBASEBUILDCLASS ??= "autotools"
-inherit ${GNOMEBASEBUILDCLASS} pkgconfig
+GNOMEBASEBUILDCLASS ??= "meson"
+inherit pkgconfig
+inherit_defer ${GNOMEBASEBUILDCLASS}
do_install:append() {
rm -rf ${D}${localstatedir}/lib/scrollkeeper/*
diff --git a/meta/classes/go-mod.bbclass b/meta/classes-recipe/go-mod.bbclass
index 674d2434e0..ca3a690d05 100644
--- a/meta/classes/go-mod.bbclass
+++ b/meta/classes-recipe/go-mod.bbclass
@@ -1,6 +1,12 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Handle Go Modules support
#
-# When using Go Modules, the the current working directory MUST be at or below
+# When using Go Modules, the current working directory MUST be at or below
# the location of the 'go.mod' file when the go tool is used, and there is no
# way to tell it to look elsewhere. It will automatically look upwards for the
# file, but not downwards.
@@ -18,3 +24,7 @@ inherit go
GO_WORKDIR ?= "${GO_IMPORT}"
do_compile[dirs] += "${B}/src/${GO_WORKDIR}"
+
+export GOMODCACHE = "${B}/.mod"
+
+do_compile[cleandirs] += "${B}/.mod"
diff --git a/meta/classes/go-ptest.bbclass b/meta/classes-recipe/go-ptest.bbclass
index b282ff7374..54fcbb535d 100644
--- a/meta/classes/go-ptest.bbclass
+++ b/meta/classes-recipe/go-ptest.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit go ptest
do_compile_ptest_base() {
diff --git a/meta/classes/go.bbclass b/meta/classes-recipe/go.bbclass
index 1a9a0bc1d4..cc3564c36a 100644
--- a/meta/classes/go.bbclass
+++ b/meta/classes-recipe/go.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit goarch
inherit linuxloader
@@ -31,7 +37,7 @@ GOMIPS:mips:class-target[export] = "1"
DEPENDS_GOLANG:class-target = "virtual/${TUNE_PKGARCH}-go virtual/${TARGET_PREFIX}go-runtime"
DEPENDS_GOLANG:class-native = "go-native"
-DEPENDS_GOLANG:class-nativesdk = "virtual/${TARGET_PREFIX}go-crosssdk virtual/${TARGET_PREFIX}go-runtime"
+DEPENDS_GOLANG:class-nativesdk = "virtual/${TARGET_PREFIX}go virtual/${TARGET_PREFIX}go-runtime"
DEPENDS:append = " ${DEPENDS_GOLANG}"
@@ -45,7 +51,10 @@ GO_LINKMODE ?= ""
GO_LINKMODE:class-nativesdk = "--linkmode=external"
GO_LINKMODE:class-native = "--linkmode=external"
GO_EXTRA_LDFLAGS ?= ""
-GO_LDFLAGS ?= '-ldflags="${GO_RPATH} ${GO_LINKMODE} -I ${@get_linuxloader(d)} ${GO_EXTRA_LDFLAGS} -extldflags '${GO_EXTLDFLAGS}'"'
+GO_LINUXLOADER ?= "-I ${@get_linuxloader(d)}"
+# Use system loader. If uninative is used, the uninative loader will be patched automatically
+GO_LINUXLOADER:class-native = ""
+GO_LDFLAGS ?= '-ldflags="${GO_RPATH} ${GO_LINKMODE} ${GO_LINUXLOADER} ${GO_EXTRA_LDFLAGS} -extldflags '${GO_EXTLDFLAGS}'"'
export GOBUILDFLAGS ?= "-v ${GO_LDFLAGS} -trimpath"
export GOPATH_OMIT_IN_ACTIONID ?= "1"
export GOPTESTBUILDFLAGS ?= "${GOBUILDFLAGS} -c"
@@ -69,6 +78,7 @@ GO_INSTALL_FILTEROUT ?= "${GO_IMPORT}/vendor/"
B = "${WORKDIR}/build"
export GOPATH = "${B}"
export GOENV = "off"
+export GOPROXY ??= "https://proxy.golang.org,direct"
export GOTMPDIR ?= "${WORKDIR}/build-tmp"
GOTMPDIR[vardepvalue] = ""
@@ -123,7 +133,7 @@ go_do_install() {
tar -C ${B} -cf - --exclude-vcs --exclude '*.test' --exclude 'testdata' pkg | \
tar -C ${D}${libdir}/go --no-same-owner -xf -
- if [ -n "`ls ${B}/${GO_BUILD_BINDIR}/`" ]; then
+ if ls ${B}/${GO_BUILD_BINDIR}/* >/dev/null 2>/dev/null ; then
install -d ${D}${bindir}
install -m 0755 ${B}/${GO_BUILD_BINDIR}/* ${D}${bindir}/
fi
diff --git a/meta/classes/goarch.bbclass b/meta/classes-recipe/goarch.bbclass
index 92fec16b82..6899ec28e4 100644
--- a/meta/classes/goarch.bbclass
+++ b/meta/classes-recipe/goarch.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
BUILD_GOOS = "${@go_map_os(d.getVar('BUILD_OS'), d)}"
BUILD_GOARCH = "${@go_map_arch(d.getVar('BUILD_ARCH'), d)}"
BUILD_GOTUPLE = "${BUILD_GOOS}_${BUILD_GOARCH}"
@@ -32,13 +38,13 @@ BASE_GOARM:armv5 = '5'
# Go supports dynamic linking on a limited set of architectures.
# See the supportsDynlink function in go/src/cmd/compile/internal/gc/main.go
GO_DYNLINK = ""
-GO_DYNLINK:arm ?= "1"
-GO_DYNLINK:aarch64 ?= "1"
-GO_DYNLINK:x86 ?= "1"
-GO_DYNLINK:x86-64 ?= "1"
-GO_DYNLINK:powerpc64 ?= "1"
-GO_DYNLINK:powerpc64le ?= "1"
-GO_DYNLINK:class-native ?= ""
+GO_DYNLINK:arm = ""
+GO_DYNLINK:aarch64 = ""
+GO_DYNLINK:x86 = ""
+GO_DYNLINK:x86-64 = ""
+GO_DYNLINK:powerpc64 = ""
+GO_DYNLINK:powerpc64le = ""
+GO_DYNLINK:class-native = ""
GO_DYNLINK:class-nativesdk = ""
# define here because everybody inherits this class
@@ -48,6 +54,7 @@ COMPATIBLE_HOST:linux-muslx32 = "null"
COMPATIBLE_HOST:powerpc = "null"
COMPATIBLE_HOST:powerpc64 = "null"
COMPATIBLE_HOST:mipsarchn32 = "null"
+COMPATIBLE_HOST:riscv32 = "null"
ARM_INSTRUCTION_SET:armv4 = "arm"
ARM_INSTRUCTION_SET:armv5 = "arm"
@@ -61,31 +68,10 @@ SECURITY_NOPIE_CFLAGS ??= ""
CCACHE_DISABLE ?= "1"
def go_map_arch(a, d):
- import re
- if re.match('i.86', a):
- return '386'
- elif a == 'x86_64':
- return 'amd64'
- elif re.match('arm.*', a):
- return 'arm'
- elif re.match('aarch64.*', a):
- return 'arm64'
- elif re.match('mips64el.*', a):
- return 'mips64le'
- elif re.match('mips64.*', a):
- return 'mips64'
- elif a == 'mips':
- return 'mips'
- elif a == 'mipsel':
- return 'mipsle'
- elif re.match('p(pc|owerpc)(64le)', a):
- return 'ppc64le'
- elif re.match('p(pc|owerpc)(64)', a):
- return 'ppc64'
- elif a == 'riscv64':
- return 'riscv64'
- else:
+ arch = oe.go.map_arch(a)
+ if not arch:
raise bb.parse.SkipRecipe("Unsupported CPU architecture: %s" % a)
+ return arch
def go_map_arm(a, d):
if a.startswith("arm"):
diff --git a/meta/classes/gobject-introspection-data.bbclass b/meta/classes-recipe/gobject-introspection-data.bbclass
index 2ef684626a..aa04c70ca6 100644
--- a/meta/classes/gobject-introspection-data.bbclass
+++ b/meta/classes-recipe/gobject-introspection-data.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This variable is set to True if gobject-introspection-data is in
# DISTRO_FEATURES and qemu-usermode is in MACHINE_FEATURES, and False otherwise.
#
diff --git a/meta/classes/gobject-introspection.bbclass b/meta/classes-recipe/gobject-introspection.bbclass
index 7bf9feb0d6..d0052cd623 100644
--- a/meta/classes/gobject-introspection.bbclass
+++ b/meta/classes-recipe/gobject-introspection.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Inherit this class in recipes to enable building their introspection files
# python3native is inherited to prevent introspection tools being run with
@@ -29,12 +35,10 @@ EXTRA_OEMESON:prepend:class-nativesdk = "${@['', '${GIRMESONBUILD}'][d.getVar('G
# Generating introspection data depends on a combination of native and target
# introspection tools, and qemu to run the target tools.
-DEPENDS:append:class-target = " gobject-introspection gobject-introspection-native qemu-native"
+DEPENDS:append:class-target = " ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'gobject-introspection qemu-native', '', d)}"
-# Even though introspection is disabled on -native, gobject-introspection package is still
-# needed for m4 macros.
-DEPENDS:append:class-native = " gobject-introspection-native"
-DEPENDS:append:class-nativesdk = " gobject-introspection-native"
+# Even when introspection is disabled, the gobject-introspection package is still needed for m4 macros.
+DEPENDS:append = " gobject-introspection-native"
# This is used by introspection tools to find .gir includes
export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}"
@@ -43,7 +47,12 @@ do_configure:prepend:class-target () {
# introspection.m4 pre-packaged with upstream tarballs does not yet
# have our fixes
mkdir -p ${S}/m4
- cp ${STAGING_DIR_TARGET}/${datadir}/aclocal/introspection.m4 ${S}/m4
+ cp ${STAGING_DIR_NATIVE}/${datadir}/aclocal/introspection.m4 ${S}/m4
+}
+
+do_compile:prepend() {
+ # This prevents g-ir-scanner from writing cache data to $HOME
+ export GI_SCANNER_DISABLE_CACHE=1
}
# .typelib files are needed at runtime and so they go to the main package (so
diff --git a/meta/classes/grub-efi-cfg.bbclass b/meta/classes-recipe/grub-efi-cfg.bbclass
index ea21b3de3d..52e85a3bb0 100644
--- a/meta/classes/grub-efi-cfg.bbclass
+++ b/meta/classes-recipe/grub-efi-cfg.bbclass
@@ -1,8 +1,7 @@
# grub-efi.bbclass
# Copyright (c) 2011, Intel Corporation.
-# All rights reserved.
#
-# Released under the MIT license (see packages/COPYING)
+# SPDX-License-Identifier: MIT
# Provide grub-efi specific functions for building bootable images.
diff --git a/meta/classes-recipe/grub-efi.bbclass b/meta/classes-recipe/grub-efi.bbclass
new file mode 100644
index 0000000000..4afd12195f
--- /dev/null
+++ b/meta/classes-recipe/grub-efi.bbclass
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit grub-efi-cfg
+require conf/image-uefi.conf
+
+efi_populate() {
+ efi_populate_common "$1" grub-efi
+
+ install -m 0644 ${GRUB_CFG} ${DEST}${EFIDIR}/grub.cfg
+}
diff --git a/meta/classes/gsettings.bbclass b/meta/classes-recipe/gsettings.bbclass
index 3fa5bd40b3..adb027ea0a 100644
--- a/meta/classes/gsettings.bbclass
+++ b/meta/classes-recipe/gsettings.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# A bbclass to handle installed GSettings (glib) schemas, updated the compiled
# form on package install and remove.
#
diff --git a/meta/classes-recipe/gtk-doc.bbclass b/meta/classes-recipe/gtk-doc.bbclass
new file mode 100644
index 0000000000..9d3911966b
--- /dev/null
+++ b/meta/classes-recipe/gtk-doc.bbclass
@@ -0,0 +1,72 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Helper class to pull in the right gtk-doc dependencies and configure
+# gtk-doc to enable or disable documentation building (which requries the
+# use of usermode qemu).
+
+# This variable is set to True if api-documentation is in
+# DISTRO_FEATURES and qemu-usermode is in MACHINE_FEATURES, and False otherwise.
+#
+# It should be used in recipes to determine whether gtk-doc based documentation should be built,
+# so that qemu use can be avoided when necessary.
+GTKDOC_ENABLED:class-native = "False"
+GTKDOC_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', \
+ bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d), 'False', d)}"
+
+# meson: default option name to enable/disable gtk-doc. This matches most
+# project's configuration. In doubts - check meson_options.txt in project's
+# source path.
+GTKDOC_MESON_OPTION ?= 'docs'
+GTKDOC_MESON_ENABLE_FLAG ?= 'true'
+GTKDOC_MESON_DISABLE_FLAG ?= 'false'
+
+# Auto enable/disable based on GTKDOC_ENABLED
+EXTRA_OECONF:prepend = "${@bb.utils.contains('GTKDOC_ENABLED', 'True', '--enable-gtk-doc --enable-gtk-doc-html --disable-gtk-doc-pdf', \
+ '--disable-gtk-doc', d)} "
+EXTRA_OEMESON:prepend = "-D${GTKDOC_MESON_OPTION}=${@bb.utils.contains('GTKDOC_ENABLED', 'True', '${GTKDOC_MESON_ENABLE_FLAG}', '${GTKDOC_MESON_DISABLE_FLAG}', d)} "
+
+# Even though gtkdoc is disabled on -native, gtk-doc package is still
+# needed for m4 macros.
+DEPENDS:append = " gtk-doc-native"
+
+export STAGING_DIR_HOST
+
+inherit python3native pkgconfig qemu
+DEPENDS:append = "${@' qemu-native' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}"
+
+do_compile:prepend:class-target () {
+ if [ ${GTKDOC_ENABLED} = True ]; then
+ # Write out a qemu wrapper that will be given to gtkdoc-scangobj so that it
+ # can run target helper binaries through that.
+ qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\\$GIR_EXTRA_LIBS_PATH','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
+ cat > ${B}/gtkdoc-qemuwrapper << EOF
+#!/bin/sh
+# Use a modules directory which doesn't exist so we don't load random things
+# which may then get deleted (or their dependencies) and potentially segfault
+export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
+
+GIR_EXTRA_LIBS_PATH=\`find ${B} -name *.so -printf "%h\n"|sort|uniq| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
+GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
+
+# meson sets this wrongly (only to libs in build-dir), qemu_wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly
+unset LD_LIBRARY_PATH
+
+if [ -d ".libs" ]; then
+ $qemu_binary ".libs/\$@"
+else
+ $qemu_binary "\$@"
+fi
+
+if [ \$? -ne 0 ]; then
+ echo "If the above error message is about missing .so libraries, then setting up GIR_EXTRA_LIBS_PATH in the recipe should help."
+ echo "(typically like this: GIR_EXTRA_LIBS_PATH=\"$""{B}/something/.libs\" )"
+ exit 1
+fi
+EOF
+ chmod +x ${B}/gtkdoc-qemuwrapper
+ fi
+}
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes-recipe/gtk-icon-cache.bbclass
index 6808339b90..9ecb49916c 100644
--- a/meta/classes/gtk-icon-cache.bbclass
+++ b/meta/classes-recipe/gtk-icon-cache.bbclass
@@ -1,9 +1,15 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
FILES:${PN} += "${datadir}/icons/hicolor"
GTKIC_VERSION ??= '3'
GTKPN = "${@ 'gtk4' if d.getVar('GTKIC_VERSION') == '4' else 'gtk+3' }"
-GTKIC_CMD = "${@ 'gtk-update-icon-cache-3.0.0' if d.getVar('GTKIC_VERSION') == '4' else 'gtk4-update-icon-cache' }"
+GTKIC_CMD = "${@ 'gtk4-update-icon-cache' if d.getVar('GTKIC_VERSION') == '4' else 'gtk-update-icon-cache-3.0' }"
#gtk+3/gtk4 require GTK3DISTROFEATURES, DEPENDS on it make all the
#recipes inherit this class require GTK3DISTROFEATURES
diff --git a/meta/classes/gtk-immodules-cache.bbclass b/meta/classes-recipe/gtk-immodules-cache.bbclass
index 2107517540..8fbe1dd1fb 100644
--- a/meta/classes/gtk-immodules-cache.bbclass
+++ b/meta/classes-recipe/gtk-immodules-cache.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class will update the inputmethod module cache for virtual keyboards
#
# Usage: Set GTKIMMODULES_PACKAGES to the packages that needs to update the inputmethod modules
diff --git a/meta/classes-recipe/image-artifact-names.bbclass b/meta/classes-recipe/image-artifact-names.bbclass
new file mode 100644
index 0000000000..bc76ff0e16
--- /dev/null
+++ b/meta/classes-recipe/image-artifact-names.bbclass
@@ -0,0 +1,41 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+##################################################################
+# Specific image creation and rootfs population info.
+##################################################################
+
+IMAGE_BASENAME ?= "${PN}"
+IMAGE_VERSION_SUFFIX ?= "-${DATETIME}"
+IMAGE_VERSION_SUFFIX[vardepsexclude] += "DATETIME SOURCE_DATE_EPOCH"
+IMAGE_NAME ?= "${IMAGE_LINK_NAME}${IMAGE_VERSION_SUFFIX}"
+IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}${IMAGE_MACHINE_SUFFIX}${IMAGE_NAME_SUFFIX}"
+
+# This needs to stay in sync with IMAGE_LINK_NAME, but with INITRAMFS_IMAGE instead of IMAGE_BASENAME
+# and without ${IMAGE_NAME_SUFFIX} which all initramfs images should set to empty
+INITRAMFS_IMAGE_NAME ?= "${@['${INITRAMFS_IMAGE}${IMAGE_MACHINE_SUFFIX}', ''][d.getVar('INITRAMFS_IMAGE') == '']}"
+
+# The default DEPLOY_DIR_IMAGE is ${MACHINE} directory:
+# meta/conf/bitbake.conf:DEPLOY_DIR_IMAGE ?= "${DEPLOY_DIR}/images/${MACHINE}"
+# so many people find it unnecessary to include this suffix to every image
+# stored there, but other people often fetch various images for different
+# MACHINEs to the same downloads directory and then the suffix is very helpful
+# add separate variable for projects to decide which scheme works best for them
+# without understanding the IMAGE_NAME/IMAGE_LINK_NAME structure.
+IMAGE_MACHINE_SUFFIX ??= "-${MACHINE}"
+
+# IMAGE_NAME is the base name for everything produced when building images.
+# The actual image that contains the rootfs has an additional suffix (.rootfs
+# by default) followed by additional suffices which describe the format (.ext4,
+# .ext4.xz, etc.).
+IMAGE_NAME_SUFFIX ??= ".rootfs"
+
+python () {
+ if bb.data.inherits_class('deploy', d) and d.getVar("IMAGE_VERSION_SUFFIX") == "-${DATETIME}":
+ import datetime
+ d.setVar("IMAGE_VERSION_SUFFIX", "-" + datetime.datetime.fromtimestamp(int(d.getVar("SOURCE_DATE_EPOCH")), datetime.timezone.utc).strftime('%Y%m%d%H%M%S'))
+ d.setVarFlag("IMAGE_VERSION_SUFFIX", "vardepvalue", "")
+}
diff --git a/meta/classes-recipe/image-combined-dbg.bbclass b/meta/classes-recipe/image-combined-dbg.bbclass
new file mode 100644
index 0000000000..729313739c
--- /dev/null
+++ b/meta/classes-recipe/image-combined-dbg.bbclass
@@ -0,0 +1,15 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+IMAGE_PREPROCESS_COMMAND:append = " combine_dbg_image"
+
+combine_dbg_image () {
+ if [ "${IMAGE_GEN_DEBUGFS}" = "1" -a -e ${IMAGE_ROOTFS}-dbg ]; then
+ # copy target files into -dbg rootfs, so it can be used for
+ # debug purposes directly
+ tar -C ${IMAGE_ROOTFS} -cf - . | tar -C ${IMAGE_ROOTFS}-dbg -xf -
+ fi
+}
diff --git a/meta/classes/image-container.bbclass b/meta/classes-recipe/image-container.bbclass
index 3d1993576a..d24b030453 100644
--- a/meta/classes/image-container.bbclass
+++ b/meta/classes-recipe/image-container.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
ROOTFS_BOOTSTRAP_INSTALL = ""
IMAGE_TYPES_MASKED += "container"
IMAGE_TYPEDEP:container = "tar.bz2"
diff --git a/meta/classes/image-live.bbclass b/meta/classes-recipe/image-live.bbclass
index 2c948190cf..d2e95ef51c 100644
--- a/meta/classes/image-live.bbclass
+++ b/meta/classes-recipe/image-live.bbclass
@@ -1,5 +1,6 @@
-# Copyright (C) 2004, Advanced Micro Devices, Inc. All Rights Reserved
-# Released under the MIT license (see packages/COPYING)
+# Copyright (C) 2004, Advanced Micro Devices, Inc.
+#
+# SPDX-License-Identifier: MIT
# Creates a bootable image using syslinux, your kernel and an optional
# initrd
@@ -30,14 +31,14 @@ do_bootimg[depends] += "dosfstools-native:do_populate_sysroot \
virtual/kernel:do_deploy \
${MLPREFIX}syslinux:do_populate_sysroot \
syslinux-native:do_populate_sysroot \
- ${@'%s:do_image_%s' % (d.getVar('PN'), d.getVar('LIVE_ROOTFS_TYPE').replace('-', '_')) if d.getVar('ROOTFS') else ''} \
+ ${@'%s:do_image_%s' % (d.getVar('PN'), d.getVar('LIVE_ROOTFS_TYPE').replace('-', '_').split('.')[0]) if d.getVar('ROOTFS') else ''} \
"
LABELS_LIVE ?= "boot install"
ROOT_LIVE ?= "root=/dev/ram0"
INITRD_IMAGE_LIVE ?= "${MLPREFIX}core-image-minimal-initramfs"
-INITRD_LIVE ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE_LIVE}-${MACHINE}.${INITRAMFS_FSTYPES}"
+INITRD_LIVE ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE_LIVE}${IMAGE_MACHINE_SUFFIX}.${@d.getVar('INITRAMFS_FSTYPES').split()[0]}"
LIVE_ROOTFS_TYPE ?= "ext4"
ROOTFS ?= "${IMGDEPLOYDIR}/${IMAGE_LINK_NAME}.${LIVE_ROOTFS_TYPE}"
@@ -259,6 +260,5 @@ python do_bootimg() {
bb.build.exec_func('create_symlinks', d)
}
do_bootimg[subimages] = "hddimg iso"
-do_bootimg[imgsuffix] = "."
addtask bootimg before do_image_complete after do_rootfs
diff --git a/meta/classes/image-postinst-intercepts.bbclass b/meta/classes-recipe/image-postinst-intercepts.bbclass
index ed30bbd98d..fc15926384 100644
--- a/meta/classes/image-postinst-intercepts.bbclass
+++ b/meta/classes-recipe/image-postinst-intercepts.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Gather existing and candidate postinst intercepts from BBPATH
POSTINST_INTERCEPTS_DIR ?= "${COREBASE}/scripts/postinst-intercepts"
POSTINST_INTERCEPTS_PATHS ?= "${@':'.join('%s/postinst-intercepts' % p for p in '${BBPATH}'.split(':'))}:${POSTINST_INTERCEPTS_DIR}"
diff --git a/meta/classes/image.bbclass b/meta/classes-recipe/image.bbclass
index 2139a7e576..28be6c6362 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes-recipe/image.bbclass
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
IMAGE_CLASSES ??= ""
@@ -9,18 +14,19 @@ ROOTFS_BOOTSTRAP_INSTALL = "run-postinsts"
IMGCLASSES = "rootfs_${IMAGE_PKGTYPE} image_types ${IMAGE_CLASSES}"
# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk_base
# in the non-Linux SDK_OS case, such as mingw32
-IMGCLASSES += "${@['populate_sdk_base', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
+inherit populate_sdk_base
+IMGCLASSES += "${@['', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
IMGCLASSES += "${@bb.utils.contains_any('IMAGE_FSTYPES', 'live iso hddimg', 'image-live', '', d)}"
IMGCLASSES += "${@bb.utils.contains('IMAGE_FSTYPES', 'container', 'image-container', '', d)}"
IMGCLASSES += "image_types_wic"
IMGCLASSES += "rootfs-postcommands"
IMGCLASSES += "image-postinst-intercepts"
IMGCLASSES += "overlayfs-etc"
-inherit ${IMGCLASSES}
+inherit_defer ${IMGCLASSES}
TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}"
TOOLCHAIN_TARGET_TASK_ATTEMPTONLY += "${PACKAGE_INSTALL_ATTEMPTONLY}"
-POPULATE_SDK_POST_TARGET_COMMAND += "rootfs_sysroot_relativelinks; "
+POPULATE_SDK_POST_TARGET_COMMAND += "rootfs_sysroot_relativelinks"
LICENSE ?= "MIT"
PACKAGES = ""
@@ -34,7 +40,7 @@ INHIBIT_DEFAULT_DEPS = "1"
# IMAGE_FEATURES may contain any available package group
IMAGE_FEATURES ?= ""
IMAGE_FEATURES[type] = "list"
-IMAGE_FEATURES[validitems] += "debug-tweaks read-only-rootfs read-only-rootfs-delayed-postinsts stateless-rootfs empty-root-password allow-empty-password allow-root-login post-install-logging overlayfs-etc"
+IMAGE_FEATURES[validitems] += "debug-tweaks read-only-rootfs read-only-rootfs-delayed-postinsts stateless-rootfs empty-root-password allow-empty-password allow-root-login serial-autologin-root post-install-logging overlayfs-etc"
# Generate companion debugfs?
IMAGE_GEN_DEBUGFS ?= "0"
@@ -91,6 +97,7 @@ USE_DEPMOD ?= "1"
PID = "${@os.getpid()}"
PACKAGE_ARCH = "${MACHINE_ARCH}"
+SSTATE_ARCHS_TUNEPKG = "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}"
LDCONFIGDEPEND ?= "ldconfig-native:do_populate_sysroot"
LDCONFIGDEPEND:libc-musl = ""
@@ -115,8 +122,7 @@ def rootfs_command_variables(d):
python () {
variables = rootfs_command_variables(d)
for var in variables:
- if d.getVar(var, False):
- d.setVarFlag(var, 'func', '1')
+ d.setVarFlag(var, 'vardeps', d.getVar(var))
}
def rootfs_variables(d):
@@ -177,8 +183,7 @@ python () {
IMAGE_POSTPROCESS_COMMAND ?= ""
-# some default locales
-IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
+IMAGE_LINGUAS ??= ""
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
@@ -198,6 +203,7 @@ fakeroot python do_rootfs () {
from oe.rootfs import create_rootfs
from oe.manifest import create_manifest
import logging
+ import oe.packagedata
logger = d.getVar('BB_TASK_LOGGER', False)
if logger:
@@ -242,9 +248,9 @@ fakeroot python do_rootfs () {
# otherwise, the multilib renaming could step in and squash any fixups that
# may have occurred.
pn = d.getVar('PN')
- runtime_mapping_rename("PACKAGE_INSTALL", pn, d)
- runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d)
- runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d)
+ oe.packagedata.runtime_mapping_rename("PACKAGE_INSTALL", pn, d)
+ oe.packagedata.runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d)
+ oe.packagedata.runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d)
# Generate the initial manifest
create_manifest(d)
@@ -314,7 +320,7 @@ fakeroot python do_image_qa () {
except oe.utils.ImageQAFailed as e:
qamsg = qamsg + '\tImage QA function %s failed: %s\n' % (e.name, e.description)
except Exception as e:
- qamsg = qamsg + '\tImage QA function %s failed\n' % cmd
+ qamsg = qamsg + '\tImage QA function %s failed: %s\n' % (cmd, e)
if qamsg:
imgname = d.getVar('IMAGE_NAME')
@@ -441,7 +447,7 @@ python () {
localdata.delVar('DATE')
localdata.delVar('TMPDIR')
localdata.delVar('IMAGE_VERSION_SUFFIX')
- vardepsexclude = (d.getVarFlag('IMAGE_CMD:' + realt, 'vardepsexclude', True) or '').split()
+ vardepsexclude = (d.getVarFlag('IMAGE_CMD:' + realt, 'vardepsexclude') or '').split()
for dep in vardepsexclude:
localdata.delVar(dep)
@@ -475,14 +481,14 @@ python () {
if subimage not in subimages:
subimages.append(subimage)
if type not in alltypes:
- rm_tmp_images.add(localdata.expand("${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"))
+ rm_tmp_images.add(localdata.expand("${IMAGE_NAME}.${type}"))
for bt in basetypes[t]:
gen_conversion_cmds(bt)
localdata.setVar('type', realt)
if t not in alltypes:
- rm_tmp_images.add(localdata.expand("${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"))
+ rm_tmp_images.add(localdata.expand("${IMAGE_NAME}.${type}"))
else:
subimages.append(realt)
@@ -589,13 +595,12 @@ python create_symlinks() {
manifest_name = d.getVar('IMAGE_MANIFEST')
taskname = d.getVar("BB_CURRENTTASK")
subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
- imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix') or d.expand("${IMAGE_NAME_SUFFIX}.")
if not link_name:
return
for type in subimages:
dst = os.path.join(deploy_dir, link_name + "." + type)
- src = img_name + imgsuffix + type
+ src = img_name + "." + type
if os.path.exists(os.path.join(deploy_dir, src)):
bb.note("Creating symlink: %s -> %s" % (dst, src))
if os.path.islink(dst):
@@ -605,7 +610,7 @@ python create_symlinks() {
bb.note("Skipping symlink, source does not exist: %s -> %s" % (dst, src))
}
-MULTILIBRE_ALLOW_REP =. "${base_bindir}|${base_sbindir}|${bindir}|${sbindir}|${libexecdir}|${sysconfdir}|${nonarch_base_libdir}/udev|/lib/modules/[^/]*/modules.*|"
+MULTILIBRE_ALLOW_REP += "${base_bindir} ${base_sbindir} ${bindir} ${sbindir} ${libexecdir} ${sysconfdir} ${nonarch_base_libdir}/udev /lib/modules/[^/]*/modules.*"
MULTILIB_CHECK_FILE = "${WORKDIR}/multilib_check.py"
MULTILIB_TEMP_ROOTFS = "${WORKDIR}/multilib"
@@ -653,8 +658,8 @@ create_merged_usr_symlinks_sdk() {
create_merged_usr_symlinks ${SDK_OUTPUT}${SDKTARGETSYSROOT}
}
-ROOTFS_PREPROCESS_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', 'create_merged_usr_symlinks_rootfs; ', '',d)}"
-POPULATE_SDK_PRE_TARGET_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', 'create_merged_usr_symlinks_sdk; ', '',d)}"
+ROOTFS_PREPROCESS_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', 'create_merged_usr_symlinks_rootfs', '',d)}"
+POPULATE_SDK_PRE_TARGET_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', 'create_merged_usr_symlinks_sdk', '',d)}"
reproducible_final_image_task () {
if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
@@ -674,6 +679,6 @@ systemd_preset_all () {
fi
}
-IMAGE_PREPROCESS_COMMAND:append = " ${@ 'systemd_preset_all;' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''} reproducible_final_image_task; "
+IMAGE_PREPROCESS_COMMAND:append = " ${@ 'systemd_preset_all' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''} reproducible_final_image_task "
CVE_PRODUCT = ""
diff --git a/meta/classes-recipe/image_types.bbclass b/meta/classes-recipe/image_types.bbclass
new file mode 100644
index 0000000000..913cb8788c
--- /dev/null
+++ b/meta/classes-recipe/image_types.bbclass
@@ -0,0 +1,387 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# The default aligment of the size of the rootfs is set to 1KiB. In case
+# you're using the SD card emulation of a QEMU system simulator you may
+# set this value to 2048 (2MiB alignment).
+IMAGE_ROOTFS_ALIGNMENT ?= "1"
+
+def imagetypes_getdepends(d):
+ def adddep(depstr, deps):
+ for d in (depstr or "").split():
+ # Add task dependency if not already present
+ if ":" not in d:
+ d += ":do_populate_sysroot"
+ deps.add(d)
+
+ # Take a type in the form of foo.bar.car and split it into the items
+ # needed for the image deps "foo", and the conversion deps ["bar", "car"]
+ def split_types(typestring):
+ types = typestring.split(".")
+ return types[0], types[1:]
+
+ fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split())
+ fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split())
+
+ deprecated = set()
+ deps = set()
+ for typestring in fstypes:
+ basetype, resttypes = split_types(typestring)
+
+ var = "IMAGE_DEPENDS_%s" % basetype
+ if d.getVar(var) is not None:
+ deprecated.add(var)
+
+ for typedepends in (d.getVar("IMAGE_TYPEDEP:%s" % basetype) or "").split():
+ base, rest = split_types(typedepends)
+ resttypes += rest
+
+ var = "IMAGE_DEPENDS_%s" % base
+ if d.getVar(var) is not None:
+ deprecated.add(var)
+
+ for ctype in resttypes:
+ adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps)
+ adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps)
+
+ if deprecated:
+ bb.fatal('Deprecated variable(s) found: "%s". '
+ 'Use do_image_<type>[depends] += "<recipe>:<task>" instead' % ', '.join(deprecated))
+
+ # Sort the set so that ordering is consistant
+ return " ".join(sorted(deps))
+
+XZ_COMPRESSION_LEVEL ?= "-6"
+XZ_INTEGRITY_CHECK ?= "crc32"
+
+ZIP_COMPRESSION_LEVEL ?= "-9"
+
+7ZIP_COMPRESSION_LEVEL ?= "9"
+7ZIP_COMPRESSION_METHOD ?= "BZip2"
+7ZIP_EXTENSION ?= "7z"
+
+JFFS2_SUM_EXTRA_ARGS ?= ""
+IMAGE_CMD:jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime --output=${IMGDEPLOYDIR}/${IMAGE_NAME}.jffs2 ${EXTRA_IMAGECMD}"
+
+IMAGE_CMD:cramfs = "mkfs.cramfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}.cramfs ${EXTRA_IMAGECMD}"
+
+oe_mkext234fs () {
+ fstype=$1
+ extra_imagecmd=""
+
+ if [ $# -gt 1 ]; then
+ shift
+ extra_imagecmd=$@
+ fi
+
+ # If generating an empty image the size of the sparse block should be large
+ # enough to allocate an ext4 filesystem using 4096 bytes per inode, this is
+ # about 60K, so dd needs a minimum count of 60, with bs=1024 (bytes per IO)
+ eval local COUNT=\"0\"
+ eval local MIN_COUNT=\"60\"
+ if [ $ROOTFS_SIZE -lt $MIN_COUNT ]; then
+ eval COUNT=\"$MIN_COUNT\"
+ fi
+ # Create a sparse image block
+ bbdebug 1 Executing "dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype seek=$ROOTFS_SIZE count=$COUNT bs=1024"
+ dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype seek=$ROOTFS_SIZE count=$COUNT bs=1024
+ bbdebug 1 "Actual Rootfs size: `du -s ${IMAGE_ROOTFS}`"
+ bbdebug 1 "Actual Partition size: `stat -c '%s' ${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype`"
+ bbdebug 1 Executing "mkfs.$fstype -F $extra_imagecmd ${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype -d ${IMAGE_ROOTFS}"
+ mkfs.$fstype -F $extra_imagecmd ${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype -d ${IMAGE_ROOTFS}
+ # Error codes 0-3 indicate successfull operation of fsck (no errors or errors corrected)
+ fsck.$fstype -pvfD ${IMGDEPLOYDIR}/${IMAGE_NAME}.$fstype || [ $? -le 3 ]
+}
+
+IMAGE_CMD:ext2 = "oe_mkext234fs ext2 ${EXTRA_IMAGECMD}"
+IMAGE_CMD:ext3 = "oe_mkext234fs ext3 ${EXTRA_IMAGECMD}"
+IMAGE_CMD:ext4 = "oe_mkext234fs ext4 ${EXTRA_IMAGECMD}"
+
+MIN_BTRFS_SIZE ?= "16384"
+IMAGE_CMD:btrfs () {
+ size=${ROOTFS_SIZE}
+ if [ ${size} -lt ${MIN_BTRFS_SIZE} ] ; then
+ size=${MIN_BTRFS_SIZE}
+ bbwarn "Rootfs size is too small for BTRFS. Filesystem will be extended to ${size}K"
+ fi
+ dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}.btrfs seek=${size} count=0 bs=1024
+ mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}.btrfs
+}
+
+oe_mksquashfs () {
+ local comp=$1
+ local suffix=$2
+
+ # Use the bitbake reproducible timestamp instead of the hardcoded squashfs one
+ export SOURCE_DATE_EPOCH=$(stat -c '%Y' ${IMAGE_ROOTFS})
+ mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}.squashfs${comp:+-}${suffix:-$comp} ${EXTRA_IMAGECMD} -noappend ${comp:+-comp }$comp
+}
+IMAGE_CMD:squashfs = "oe_mksquashfs"
+IMAGE_CMD:squashfs-xz = "oe_mksquashfs xz"
+IMAGE_CMD:squashfs-lzo = "oe_mksquashfs lzo"
+IMAGE_CMD:squashfs-lz4 = "oe_mksquashfs lz4"
+IMAGE_CMD:squashfs-zst = "oe_mksquashfs zstd zst"
+
+IMAGE_CMD:erofs = "mkfs.erofs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}.erofs ${IMAGE_ROOTFS}"
+IMAGE_CMD:erofs-lz4 = "mkfs.erofs -zlz4 ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}.erofs-lz4 ${IMAGE_ROOTFS}"
+IMAGE_CMD:erofs-lz4hc = "mkfs.erofs -zlz4hc ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}.erofs-lz4hc ${IMAGE_ROOTFS}"
+
+# Note that vfat can't handle all types of files that a real linux file system
+# can (e.g. device files, symlinks, etc.) and therefore it not suitable for all
+# use cases
+oe_mkvfatfs () {
+ mkfs.vfat $@ -C ${IMGDEPLOYDIR}/${IMAGE_NAME}.vfat ${ROOTFS_SIZE}
+ mcopy -i "${IMGDEPLOYDIR}/${IMAGE_NAME}.vfat" -vsmpQ ${IMAGE_ROOTFS}/* ::/
+}
+
+IMAGE_CMD:vfat = "oe_mkvfatfs ${EXTRA_IMAGECMD}"
+
+IMAGE_CMD_TAR ?= "tar"
+# ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs
+IMAGE_CMD:tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
+
+do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append"
+IMAGE_CMD:cpio () {
+ (cd ${IMAGE_ROOTFS} && find . | sort | cpio --reproducible -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}.cpio)
+ # We only need the /init symlink if we're building the real
+ # image. The -dbg image doesn't need it! By being clever
+ # about this we also avoid 'touch' below failing, as it
+ # might be trying to touch /sbin/init on the host since both
+ # the normal and the -dbg image share the same WORKDIR
+ if [ "${IMAGE_BUILDING_DEBUGFS}" != "true" ]; then
+ if [ ! -L ${IMAGE_ROOTFS}/init ] && [ ! -e ${IMAGE_ROOTFS}/init ]; then
+ if [ -L ${IMAGE_ROOTFS}/sbin/init ] || [ -e ${IMAGE_ROOTFS}/sbin/init ]; then
+ ln -sf /sbin/init ${WORKDIR}/cpio_append/init
+ touch -h -r ${IMAGE_ROOTFS}/sbin/init ${WORKDIR}/cpio_append/init
+ else
+ touch -r ${IMAGE_ROOTFS} ${WORKDIR}/cpio_append/init
+ fi
+ (cd ${WORKDIR}/cpio_append && echo ./init | cpio --reproducible -oA -H newc -F ${IMGDEPLOYDIR}/${IMAGE_NAME}.cpio)
+ fi
+ fi
+}
+
+UBI_VOLNAME ?= "${MACHINE}-rootfs"
+UBI_VOLTYPE ?= "dynamic"
+UBI_IMGTYPE ?= "ubifs"
+
+write_ubi_config() {
+ local vname="$1"
+
+ cat <<EOF > ubinize${vname}-${IMAGE_NAME}.cfg
+[ubifs]
+mode=ubi
+image=${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}.${UBI_IMGTYPE}
+vol_id=0
+vol_type=${UBI_VOLTYPE}
+vol_name=${UBI_VOLNAME}
+vol_flags=autoresize
+EOF
+}
+
+multiubi_mkfs() {
+ local mkubifs_args="$1"
+ local ubinize_args="$2"
+
+ # Added prompt error message for ubi and ubifs image creation.
+ if [ -z "$mkubifs_args" ] || [ -z "$ubinize_args" ]; then
+ bbfatal "MKUBIFS_ARGS and UBINIZE_ARGS have to be set, see http://www.linux-mtd.infradead.org/faq/ubifs.html for details"
+ fi
+
+ if [ -z "$3" ]; then
+ local vname=""
+ else
+ local vname="_$3"
+ fi
+ write_ubi_config "${vname}"
+
+ if [ -n "$vname" ]; then
+ mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}.ubifs ${mkubifs_args}
+ fi
+ ubinize -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}.ubi ${ubinize_args} ubinize${vname}-${IMAGE_NAME}.cfg
+
+ # Cleanup cfg file
+ mv ubinize${vname}-${IMAGE_NAME}.cfg ${IMGDEPLOYDIR}/
+
+ # Create own symlinks for 'named' volumes
+ if [ -n "$vname" ]; then
+ cd ${IMGDEPLOYDIR}
+ if [ -e ${IMAGE_NAME}${vname}.ubifs ]; then
+ ln -sf ${IMAGE_NAME}${vname}.ubifs \
+ ${IMAGE_LINK_NAME}${vname}.ubifs
+ fi
+ if [ -e ${IMAGE_NAME}${vname}.ubi ]; then
+ ln -sf ${IMAGE_NAME}${vname}.ubi \
+ ${IMAGE_LINK_NAME}${vname}.ubi
+ fi
+ cd -
+ fi
+}
+
+MULTIUBI_ARGS = "MKUBIFS_ARGS UBINIZE_ARGS"
+
+IMAGE_CMD:multiubi () {
+ ${@' '.join(['%s_%s="%s";' % (arg, name, d.getVar('%s_%s' % (arg, name))) for arg in d.getVar('MULTIUBI_ARGS').split() for name in d.getVar('MULTIUBI_BUILD').split()])}
+ # Split MKUBIFS_ARGS_<name> and UBINIZE_ARGS_<name>
+ for name in ${MULTIUBI_BUILD}; do
+ eval local mkubifs_args=\"\$MKUBIFS_ARGS_${name}\"
+ eval local ubinize_args=\"\$UBINIZE_ARGS_${name}\"
+
+ multiubi_mkfs "${mkubifs_args}" "${ubinize_args}" "${name}"
+ done
+}
+
+IMAGE_CMD:ubi () {
+ multiubi_mkfs "${MKUBIFS_ARGS}" "${UBINIZE_ARGS}"
+}
+IMAGE_TYPEDEP:ubi = "${UBI_IMGTYPE}"
+
+IMAGE_CMD:ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}.ubifs ${MKUBIFS_ARGS}"
+
+MIN_F2FS_SIZE ?= "524288"
+IMAGE_CMD:f2fs () {
+ # We need to add additional smarts here form devices smaller than 1.5G
+ # Need to scale appropriately between 40M -> 1.5G as the "overprovision
+ # ratio" goes down as the device gets bigger (70% -> 4.5%), below about
+ # 500M the standard IMAGE_OVERHEAD_FACTOR does not work, so add additional
+ # space here when under 500M
+ size=${ROOTFS_SIZE}
+ if [ ${size} -lt ${MIN_F2FS_SIZE} ] ; then
+ size=${MIN_F2FS_SIZE}
+ bbwarn "Rootfs size is too small for F2FS. Filesystem will be extended to ${size}K"
+ fi
+ dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}.f2fs seek=${size} count=0 bs=1024
+ mkfs.f2fs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}.f2fs
+ sload.f2fs -f ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}.f2fs
+}
+
+EXTRA_IMAGECMD = ""
+
+inherit siteinfo kernel-arch image-artifact-names
+
+JFFS2_ENDIANNESS ?= "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'le', '-l', '-b', d)}"
+JFFS2_ERASEBLOCK ?= "0x40000"
+EXTRA_IMAGECMD:jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLOCK} --no-cleanmarkers"
+
+# Change these if you want default mkfs behavior (i.e. create minimal inode number)
+EXTRA_IMAGECMD:ext2 ?= "-i 4096"
+EXTRA_IMAGECMD:ext3 ?= "-i 4096"
+EXTRA_IMAGECMD:ext4 ?= "-i 4096"
+EXTRA_IMAGECMD:btrfs ?= "-n 4096 --shrink"
+EXTRA_IMAGECMD:f2fs ?= ""
+
+# If a specific FAT size is needed, set it here (e.g. "-F 32"/"-F 16"/"-F 12")
+# otherwise mkfs.vfat will automatically pick one.
+EXTRA_IMAGECMD:vfat ?= ""
+
+do_image_cpio[depends] += "cpio-native:do_populate_sysroot"
+do_image_jffs2[depends] += "mtd-utils-native:do_populate_sysroot"
+do_image_cramfs[depends] += "util-linux-native:do_populate_sysroot"
+do_image_ext2[depends] += "e2fsprogs-native:do_populate_sysroot"
+do_image_ext3[depends] += "e2fsprogs-native:do_populate_sysroot"
+do_image_ext4[depends] += "e2fsprogs-native:do_populate_sysroot"
+do_image_btrfs[depends] += "btrfs-tools-native:do_populate_sysroot"
+do_image_squashfs[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_squashfs_xz[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_squashfs_lzo[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_squashfs_lz4[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_squashfs_zst[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_ubi[depends] += "mtd-utils-native:do_populate_sysroot"
+do_image_ubifs[depends] += "mtd-utils-native:do_populate_sysroot"
+do_image_multiubi[depends] += "mtd-utils-native:do_populate_sysroot"
+do_image_f2fs[depends] += "f2fs-tools-native:do_populate_sysroot"
+do_image_erofs[depends] += "erofs-utils-native:do_populate_sysroot"
+do_image_erofs_lz4[depends] += "erofs-utils-native:do_populate_sysroot"
+do_image_erofs_lz4hc[depends] += "erofs-utils-native:do_populate_sysroot"
+do_image_vfat[depends] += "dosfstools-native:do_populate_sysroot mtools-native:do_populate_sysroot"
+
+# This variable is available to request which values are suitable for IMAGE_FSTYPES
+IMAGE_TYPES = " \
+ jffs2 jffs2.sum \
+ cramfs \
+ ext2 ext2.gz ext2.bz2 ext2.lzma \
+ ext3 ext3.gz \
+ ext4 ext4.gz \
+ btrfs \
+ vfat \
+ squashfs squashfs-xz squashfs-lzo squashfs-lz4 squashfs-zst \
+ ubi ubifs multiubi \
+ tar tar.gz tar.bz2 tar.xz tar.lz4 tar.zst \
+ cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 cpio.zst \
+ wic wic.gz wic.bz2 wic.lzma wic.zst \
+ container \
+ f2fs \
+ erofs erofs-lz4 erofs-lz4hc \
+"
+# These image types are x86 specific as they need syslinux
+IMAGE_TYPES:append:x86 = " hddimg iso"
+IMAGE_TYPES:append:x86-64 = " hddimg iso"
+
+# Compression is a special case of conversion. The old variable
+# names are still supported for backward-compatibility. When defining
+# new compression or conversion commands, use CONVERSIONTYPES and
+# CONVERSION_CMD/DEPENDS.
+COMPRESSIONTYPES ?= ""
+
+CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip 7zip zst sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vhd vhdx vdi qcow2 base64 gzsync zsync ${COMPRESSIONTYPES}"
+CONVERSION_CMD:lzma = "lzma -k -f -7 ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.gz"
+CONVERSION_CMD:bz2 = "pbzip2 -f -k ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.xz"
+CONVERSION_CMD:lz4 = "lz4 -9 -z -l ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.lz4"
+CONVERSION_CMD:lzo = "lzop -9 ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}.${type}.zip ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:7zip = "7za a -mx=${7ZIP_COMPRESSION_LEVEL} -mm=${7ZIP_COMPRESSION_METHOD} ${IMAGE_NAME}.${type}.${7ZIP_EXTENSION} ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:zst = "zstd -f -k -c ${ZSTD_DEFAULTS} ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.zst"
+CONVERSION_CMD:sum = "sumtool -i ${IMAGE_NAME}.${type} -o ${IMAGE_NAME}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}"
+CONVERSION_CMD:md5sum = "md5sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.md5sum"
+CONVERSION_CMD:sha1sum = "sha1sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.sha1sum"
+CONVERSION_CMD:sha224sum = "sha224sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.sha224sum"
+CONVERSION_CMD:sha256sum = "sha256sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.sha256sum"
+CONVERSION_CMD:sha384sum = "sha384sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.sha384sum"
+CONVERSION_CMD:sha512sum = "sha512sum ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.sha512sum"
+CONVERSION_CMD:bmap = "bmaptool create ${IMAGE_NAME}.${type} -o ${IMAGE_NAME}.${type}.bmap"
+CONVERSION_CMD:u-boot = "mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C none -n ${IMAGE_NAME} -d ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.u-boot"
+CONVERSION_CMD:vmdk = "qemu-img convert -O vmdk ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.vmdk"
+CONVERSION_CMD:vhdx = "qemu-img convert -O vhdx -o subformat=dynamic ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.vhdx"
+CONVERSION_CMD:vhd = "qemu-img convert -O vpc -o subformat=fixed ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.vhd"
+CONVERSION_CMD:vdi = "qemu-img convert -O vdi ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.vdi"
+CONVERSION_CMD:qcow2 = "qemu-img convert -O qcow2 ${IMAGE_NAME}.${type} ${IMAGE_NAME}.${type}.qcow2"
+CONVERSION_CMD:base64 = "base64 ${IMAGE_NAME}.${type} > ${IMAGE_NAME}.${type}.base64"
+CONVERSION_CMD:zsync = "zsyncmake_curl ${IMAGE_NAME}.${type}"
+CONVERSION_CMD:gzsync = "zsyncmake_curl -z ${IMAGE_NAME}.${type}"
+CONVERSION_DEPENDS_lzma = "xz-native"
+CONVERSION_DEPENDS_gz = "pigz-native"
+CONVERSION_DEPENDS_bz2 = "pbzip2-native"
+CONVERSION_DEPENDS_xz = "xz-native"
+CONVERSION_DEPENDS_lz4 = "lz4-native"
+CONVERSION_DEPENDS_lzo = "lzop-native"
+CONVERSION_DEPENDS_zip = "zip-native"
+CONVERSION_DEPENDS_7zip = "p7zip-native"
+CONVERSION_DEPENDS_zst = "zstd-native"
+CONVERSION_DEPENDS_sum = "mtd-utils-native"
+CONVERSION_DEPENDS_bmap = "bmaptool-native"
+CONVERSION_DEPENDS_u-boot = "u-boot-tools-native"
+CONVERSION_DEPENDS_vmdk = "qemu-system-native"
+CONVERSION_DEPENDS_vdi = "qemu-system-native"
+CONVERSION_DEPENDS_qcow2 = "qemu-system-native"
+CONVERSION_DEPENDS_base64 = "coreutils-native"
+CONVERSION_DEPENDS_vhdx = "qemu-system-native"
+CONVERSION_DEPENDS_vhd = "qemu-system-native"
+CONVERSION_DEPENDS_zsync = "zsync-curl-native"
+CONVERSION_DEPENDS_gzsync = "zsync-curl-native"
+
+RUNNABLE_IMAGE_TYPES ?= "ext2 ext3 ext4"
+RUNNABLE_MACHINE_PATTERNS ?= "qemu"
+
+DEPLOYABLE_IMAGE_TYPES ?= "hddimg iso"
+
+# The IMAGE_TYPES_MASKED variable will be used to mask out from the IMAGE_FSTYPES,
+# images that will not be built at do_rootfs time: vmdk, vhd, vhdx, vdi, qcow2, hddimg, iso, etc.
+IMAGE_TYPES_MASKED ?= ""
+
+# bmap requires python3 to be in the PATH
+EXTRANATIVEPATH += "${@'python3-native' if d.getVar('IMAGE_FSTYPES').find('.bmap') else ''}"
diff --git a/meta/classes/image_types_wic.bbclass b/meta/classes-recipe/image_types_wic.bbclass
index 79f8d93b94..cf3be909b3 100644
--- a/meta/classes/image_types_wic.bbclass
+++ b/meta/classes-recipe/image_types_wic.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# The WICVARS variable is used to define list of bitbake variables used in wic code
# variables from this list is written to <image>.env file
WICVARS ?= "\
@@ -32,7 +38,7 @@ WICVARS ?= "\
TARGET_SYS \
"
-inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)}
+inherit_defer ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)}
WKS_FILE ??= "${IMAGE_BASENAME}.${MACHINE}.wks"
WKS_FILES ?= "${WKS_FILE} ${IMAGE_BASENAME}.wks"
@@ -65,7 +71,24 @@ IMAGE_CMD:wic () {
bbfatal "No kickstart files from WKS_FILES were found: ${WKS_FILES}. Please set WKS_FILE or WKS_FILES appropriately."
fi
BUILDDIR="${TOPDIR}" PSEUDO_UNLOAD=1 wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" -w "$tmp_wic" ${WIC_CREATE_EXTRA_ARGS}
- mv "$build_wic/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic"
+
+ # look to see if the user specifies a custom imager
+ IMAGER=direct
+ eval set -- "${WIC_CREATE_EXTRA_ARGS} --"
+ while [ 1 ]; do
+ case "$1" in
+ --imager|-i)
+ shift
+ IMAGER=$1
+ ;;
+ --)
+ shift
+ break
+ ;;
+ esac
+ shift
+ done
+ mv "$build_wic/$(basename "${wks%.wks}")"*.${IMAGER} "$out.wic"
}
IMAGE_CMD:wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR"
do_image_wic[cleandirs] = "${WORKDIR}/build-wic"
@@ -83,7 +106,9 @@ do_image_wic[recrdeptask] += "do_deploy"
do_image_wic[deptask] += "do_image_complete"
WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}'
-WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native erofs-utils-native"
+WKS_FILE_DEPENDS_DEFAULT += "bmaptool-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native erofs-utils-native"
+# Unified kernel images need objcopy
+WKS_FILE_DEPENDS_DEFAULT += "virtual/${TARGET_PREFIX}binutils"
WKS_FILE_DEPENDS_BOOTLOADERS = ""
WKS_FILE_DEPENDS_BOOTLOADERS:x86 = "syslinux grub-efi systemd-boot os-release"
WKS_FILE_DEPENDS_BOOTLOADERS:x86-64 = "syslinux grub-efi systemd-boot os-release"
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes-recipe/kernel-arch.bbclass
index 07ec242e63..b32f6137a2 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes-recipe/kernel-arch.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# set the ARCH environment variable for kernel compilation (including
# modules). return value must match one of the architecture directories
# in the kernel source "arch" directory
@@ -13,6 +19,7 @@ valid_archs = "alpha cris ia64 \
sh sh64 um h8300 \
parisc s390 v850 \
avr32 blackfin \
+ loongarch64 \
microblaze \
nios2 arc riscv xtensa"
@@ -28,6 +35,7 @@ def map_kernel_arch(a, d):
elif re.match('aarch64_be$', a): return 'arm64'
elif re.match('aarch64_ilp32$', a): return 'arm64'
elif re.match('aarch64_be_ilp32$', a): return 'arm64'
+ elif re.match('loongarch(32|64|)$', a): return 'loongarch'
elif re.match('mips(isa|)(32|64|)(r6|)(el|)$', a): return 'mips'
elif re.match('mcf', a): return 'm68k'
elif re.match('riscv(32|64|)(eb|)$', a): return 'riscv'
@@ -60,9 +68,13 @@ TARGET_LD_KERNEL_ARCH ?= ""
HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
TARGET_AR_KERNEL_ARCH ?= ""
HOST_AR_KERNEL_ARCH ?= "${TARGET_AR_KERNEL_ARCH}"
+TARGET_OBJCOPY_KERNEL_ARCH ?= ""
+HOST_OBJCOPY_KERNEL_ARCH ?= "${TARGET_OBJCOPY_KERNEL_ARCH}"
-KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH}"
-KERNEL_LD = "${CCACHE}${HOST_PREFIX}ld.bfd ${HOST_LD_KERNEL_ARCH}"
-KERNEL_AR = "${CCACHE}${HOST_PREFIX}ar ${HOST_AR_KERNEL_ARCH}"
-TOOLCHAIN = "gcc"
-
+KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd ${DEBUG_PREFIX_MAP} -fdebug-prefix-map=${STAGING_KERNEL_DIR}=${KERNEL_SRC_PATH} -fdebug-prefix-map=${STAGING_KERNEL_BUILDDIR}=${KERNEL_SRC_PATH}"
+KERNEL_LD = "${HOST_PREFIX}ld.bfd ${HOST_LD_KERNEL_ARCH}"
+KERNEL_AR = "${HOST_PREFIX}ar ${HOST_AR_KERNEL_ARCH}"
+KERNEL_OBJCOPY = "${HOST_PREFIX}objcopy ${HOST_OBJCOPY_KERNEL_ARCH}"
+# Code in package.py can't handle options on KERNEL_STRIP
+KERNEL_STRIP = "${HOST_PREFIX}strip"
+TOOLCHAIN ?= "gcc"
diff --git a/meta/classes/kernel-artifact-names.bbclass b/meta/classes-recipe/kernel-artifact-names.bbclass
index e77107c893..1a7611a15e 100644
--- a/meta/classes/kernel-artifact-names.bbclass
+++ b/meta/classes-recipe/kernel-artifact-names.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
##################################################################
# Specific kernel creation info
# for recipes/bbclasses which need to reuse some of the kernel
@@ -6,7 +12,7 @@
inherit image-artifact-names
-KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
+KERNEL_ARTIFACT_NAME ?= "${PKGE}-${PKGV}-${PKGR}${IMAGE_MACHINE_SUFFIX}${IMAGE_VERSION_SUFFIX}"
KERNEL_ARTIFACT_LINK_NAME ?= "${MACHINE}"
KERNEL_ARTIFACT_BIN_EXT ?= ".bin"
diff --git a/meta/classes-recipe/kernel-devicetree.bbclass b/meta/classes-recipe/kernel-devicetree.bbclass
new file mode 100644
index 0000000000..eff052b402
--- /dev/null
+++ b/meta/classes-recipe/kernel-devicetree.bbclass
@@ -0,0 +1,139 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Support for device tree generation
+python () {
+ if not bb.data.inherits_class('nopackages', d):
+ d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-devicetree")
+ if d.getVar('KERNEL_DEVICETREE_BUNDLE') == '1':
+ d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-image-zimage-bundle")
+}
+
+# recursivly search for devicetree files
+FILES:${KERNEL_PACKAGE_NAME}-devicetree = " \
+ /${KERNEL_DTBDEST}/**/*.dtb \
+ /${KERNEL_DTBDEST}/**/*.dtbo \
+"
+
+FILES:${KERNEL_PACKAGE_NAME}-image-zimage-bundle = "/${KERNEL_IMAGEDEST}/zImage-*.dtb.bin"
+
+# Generate kernel+devicetree bundle
+KERNEL_DEVICETREE_BUNDLE ?= "0"
+
+# dtc flags passed via DTC_FLAGS env variable
+KERNEL_DTC_FLAGS ?= ""
+
+normalize_dtb () {
+ dtb="$1"
+ if echo $dtb | grep -q '/dts/'; then
+ bbwarn "$dtb contains the full path to the the dts file, but only the dtb name should be used."
+ dtb=`basename $dtb | sed 's,\.dts$,.dtb,g'`
+ fi
+ echo "$dtb"
+}
+
+get_real_dtb_path_in_kernel () {
+ dtb="$1"
+ dtb_path="${B}/arch/${ARCH}/boot/dts/$dtb"
+ if [ ! -e "$dtb_path" ]; then
+ dtb_path="${B}/arch/${ARCH}/boot/$dtb"
+ fi
+ echo "$dtb_path"
+}
+
+do_configure:append() {
+ if [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then
+ if echo ${KERNEL_IMAGETYPE_FOR_MAKE} | grep -q 'zImage'; then
+ case "${ARCH}" in
+ "arm")
+ config="${B}/.config"
+ if ! grep -q 'CONFIG_ARM_APPENDED_DTB=y' $config; then
+ bbwarn 'CONFIG_ARM_APPENDED_DTB is NOT enabled in the kernel. Enabling it to allow the kernel to boot with the Device Tree appended!'
+ sed -i "/CONFIG_ARM_APPENDED_DTB[ =]/d" $config
+ echo "CONFIG_ARM_APPENDED_DTB=y" >> $config
+ echo "# CONFIG_ARM_ATAG_DTB_COMPAT is not set" >> $config
+ fi
+ ;;
+ *)
+ bberror "KERNEL_DEVICETREE_BUNDLE is not supported for ${ARCH}. Currently it is only supported for 'ARM'."
+ esac
+ else
+ bberror 'The KERNEL_DEVICETREE_BUNDLE requires the KERNEL_IMAGETYPE to contain zImage.'
+ fi
+ fi
+}
+
+do_compile:append() {
+ if [ -n "${KERNEL_DTC_FLAGS}" ]; then
+ export DTC_FLAGS="${KERNEL_DTC_FLAGS}"
+ fi
+
+ for dtbf in ${KERNEL_DEVICETREE}; do
+ dtb=`normalize_dtb "$dtbf"`
+ oe_runmake $dtb CC="${KERNEL_CC} $cc_extra " LD="${KERNEL_LD}" OBJCOPY="${KERNEL_OBJCOPY}" STRIP="${KERNEL_STRIP}" ${KERNEL_EXTRA_ARGS}
+ done
+}
+
+do_install:append() {
+ install -d ${D}/${KERNEL_DTBDEST}
+ for dtbf in ${KERNEL_DEVICETREE}; do
+ dtb=`normalize_dtb "$dtbf"`
+ dtb_path=`get_real_dtb_path_in_kernel "$dtb"`
+ if "${@'false' if oe.types.boolean(d.getVar('KERNEL_DTBVENDORED')) else 'true'}"; then
+ dtb_ext=${dtb##*.}
+ dtb_base_name=`basename $dtb .$dtb_ext`
+ dtb=$dtb_base_name.$dtb_ext
+ fi
+ install -Dm 0644 $dtb_path ${D}/${KERNEL_DTBDEST}/$dtb
+ done
+}
+
+do_deploy:append() {
+ for dtbf in ${KERNEL_DEVICETREE}; do
+ dtb=`normalize_dtb "$dtbf"`
+ dtb_ext=${dtb##*.}
+ dtb_base_name=`basename $dtb .$dtb_ext`
+ install -d $deployDir
+ if "${@'false' if oe.types.boolean(d.getVar('KERNEL_DTBVENDORED')) else 'true'}"; then
+ dtb=$dtb_base_name.$dtb_ext
+ fi
+ install -m 0644 ${D}/${KERNEL_DTBDEST}/$dtb $deployDir/$dtb_base_name.$dtb_ext
+ if [ -n "${KERNEL_DTB_NAME}" ] ; then
+ ln -sf $dtb_base_name.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext
+ fi
+ if [ -n "${KERNEL_DTB_LINK_NAME}" ] ; then
+ ln -sf $dtb_base_name.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext
+ fi
+ for type in ${KERNEL_IMAGETYPE_FOR_MAKE}; do
+ if [ "$type" = "zImage" ] && [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then
+ cat ${D}/${KERNEL_IMAGEDEST}/$type \
+ $deployDir/$dtb_base_name.$dtb_ext \
+ > $deployDir/$type-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ if [ -n "${KERNEL_DTB_NAME}" ]; then
+ ln -sf $type-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT} \
+ $deployDir/$type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ fi
+ if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
+ ln -sf $type-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT} \
+ $deployDir/$type-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ fi
+ if [ -e "${KERNEL_OUTPUT_DIR}/${type}.initramfs" ]; then
+ cat ${KERNEL_OUTPUT_DIR}/${type}.initramfs \
+ $deployDir/$dtb_base_name.$dtb_ext \
+ > $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ if [ -n "${KERNEL_DTB_NAME}" ]; then
+ ln -sf ${type}-${INITRAMFS_NAME}-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT} \
+ $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ fi
+ if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
+ ln -sf ${type}-${INITRAMFS_NAME}-$dtb_base_name.$dtb_ext${KERNEL_DTB_BIN_EXT} \
+ $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
+ fi
+ fi
+ fi
+ done
+ done
+}
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes-recipe/kernel-fitimage.bbclass
index 7e09b075ff..4b74ddc201 100644
--- a/meta/classes/kernel-fitimage.bbclass
+++ b/meta/classes-recipe/kernel-fitimage.bbclass
@@ -1,4 +1,10 @@
-inherit kernel-uboot kernel-artifact-names uboot-sign
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit kernel-uboot kernel-artifact-names uboot-config
def get_fit_replacement_type(d):
kerneltypes = d.getVar('KERNEL_IMAGETYPES') or ""
@@ -44,21 +50,37 @@ python __anonymous () {
d.appendVarFlag('do_assemble_fitimage', 'depends', ' virtual/dtb:do_populate_sysroot')
d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' virtual/dtb:do_populate_sysroot')
d.setVar('EXTERNAL_KERNEL_DEVICETREE', "${RECIPE_SYSROOT}/boot/devicetree")
-
- # Verified boot will sign the fitImage and append the public key to
- # U-Boot dtb. We ensure the U-Boot dtb is deployed before assembling
- # the fitImage:
- if d.getVar('UBOOT_SIGN_ENABLE') == "1" and d.getVar('UBOOT_DTB_BINARY'):
- uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
- d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_populate_sysroot' % uboot_pn)
- if d.getVar('INITRAMFS_IMAGE_BUNDLE') == "1":
- d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' %s:do_populate_sysroot' % uboot_pn)
}
# Description string
FIT_DESC ?= "Kernel fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
+# Kernel fitImage Hash Algo
+FIT_HASH_ALG ?= "sha256"
+
+# Kernel fitImage Signature Algo
+FIT_SIGN_ALG ?= "rsa2048"
+
+# Kernel / U-Boot fitImage Padding Algo
+FIT_PAD_ALG ?= "pkcs-1.5"
+
+# Generate keys for signing Kernel fitImage
+FIT_GENERATE_KEYS ?= "0"
+
+# Size of private keys in number of bits
+FIT_SIGN_NUMBITS ?= "2048"
+
+# args to openssl genrsa (Default is just the public exponent)
+FIT_KEY_GENRSA_ARGS ?= "-F4"
+
+# args to openssl req (Default is -batch for non interactive mode and
+# -new for new certificate)
+FIT_KEY_REQ_ARGS ?= "-batch -new"
+
+# Standard format for public key certificate
+FIT_KEY_SIGN_PKCS ?= "-x509"
+
# Sign individual images as well
FIT_SIGN_INDIVIDUAL ?= "0"
@@ -67,6 +89,13 @@ FIT_CONF_PREFIX[doc] = "Prefix to use for FIT configuration node name"
FIT_SUPPORTED_INITRAMFS_FSTYPES ?= "cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio"
+# Allow user to select the default DTB for FIT image when multiple dtb's exists.
+FIT_CONF_DEFAULT_DTB ?= ""
+
+# length of address in number of <u32> cells
+# ex: 1 32bits address, 2 64bits address
+FIT_ADDRESS_CELLS ?= "1"
+
# Keys used to sign individually image nodes.
# The keys to sign image nodes must be different from those used to sign
# configuration nodes, otherwise the "required" property, from
@@ -85,7 +114,7 @@ fitimage_emit_fit_header() {
/ {
description = "${FIT_DESC}";
- #address-cells = <1>;
+ #address-cells = <${FIT_ADDRESS_CELLS}>;
EOF
}
@@ -148,7 +177,7 @@ fitimage_emit_section_kernel() {
kernel-$2 {
description = "Linux kernel";
data = /incbin/("$3");
- type = "kernel";
+ type = "${UBOOT_MKIMAGE_KERNEL_TYPE}";
arch = "${UBOOT_ARCH}";
os = "linux";
compression = "$4";
@@ -333,6 +362,27 @@ EOF
}
#
+# echoes symlink destination if it points below directory
+#
+# $1 ... file that's a potential symlink
+# $2 ... expected parent directory
+symlink_points_below() {
+ file="$2/$1"
+ dir=$2
+
+ if ! [ -L "$file" ]; then
+ return
+ fi
+
+ realpath="$(realpath --relative-to=$dir $file)"
+ if [ -z "${realpath%%../*}" ]; then
+ return
+ fi
+
+ echo "$realpath"
+}
+
+#
# Emit the fitImage ITS configuration section
#
# $1 ... .its filename
@@ -342,10 +392,12 @@ EOF
# $5 ... u-boot script ID
# $6 ... config ID
# $7 ... default flag
+# $8 ... default DTB image name
fitimage_emit_section_config() {
conf_csum="${FIT_HASH_ALG}"
conf_sign_algo="${FIT_SIGN_ALG}"
+ conf_padding_algo="${FIT_PAD_ALG}"
if [ "${UBOOT_SIGN_ENABLE}" = "1" ] ; then
conf_sign_keyname="${UBOOT_SIGN_KEYNAME}"
fi
@@ -357,6 +409,7 @@ fitimage_emit_section_config() {
bootscr_id="$5"
config_id="$6"
default_flag="$7"
+ default_dtb_image="$8"
# Test if we have any DTBs at all
sep=""
@@ -368,6 +421,23 @@ fitimage_emit_section_config() {
bootscr_line=""
setup_line=""
default_line=""
+ compatible_line=""
+
+ dtb_image_sect=$(symlink_points_below $dtb_image "${EXTERNAL_KERNEL_DEVICETREE}")
+ if [ -z "$dtb_image_sect" ]; then
+ dtb_image_sect=$dtb_image
+ fi
+
+ dtb_path="${EXTERNAL_KERNEL_DEVICETREE}/${dtb_image_sect}"
+ if [ -e "$dtb_path" ]; then
+ compat=$(fdtget -t s "$dtb_path" / compatible | sed 's/ /", "/g')
+ if [ -n "$compat" ]; then
+ compatible_line="compatible = \"$compat\";"
+ fi
+ fi
+
+ dtb_image=$(echo $dtb_image | tr '/' '_')
+ dtb_image_sect=$(echo "${dtb_image_sect}" | tr '/' '_')
# conf node name is selected based on dtb ID if it is present,
# otherwise its selected based on kernel ID
@@ -386,7 +456,7 @@ fitimage_emit_section_config() {
if [ -n "$dtb_image" ]; then
conf_desc="$conf_desc${sep}FDT blob"
sep=", "
- fdt_line="fdt = \"fdt-$dtb_image\";"
+ fdt_line="fdt = \"fdt-$dtb_image_sect\";"
fi
if [ -n "$ramdisk_id" ]; then
@@ -410,7 +480,13 @@ fitimage_emit_section_config() {
# default node is selected based on dtb ID if it is present,
# otherwise its selected based on kernel ID
if [ -n "$dtb_image" ]; then
- default_line="default = \"${FIT_CONF_PREFIX}$dtb_image\";"
+ # Select default node as user specified dtb when
+ # multiple dtb exists.
+ if [ -n "$default_dtb_image" ]; then
+ default_line="default = \"${FIT_CONF_PREFIX}$default_dtb_image\";"
+ else
+ default_line="default = \"${FIT_CONF_PREFIX}$dtb_image\";"
+ fi
else
default_line="default = \"${FIT_CONF_PREFIX}$kernel_id\";"
fi
@@ -420,6 +496,7 @@ fitimage_emit_section_config() {
$default_line
$conf_node {
description = "$default_flag $conf_desc";
+ $compatible_line
$kernel_line
$fdt_line
$ramdisk_line
@@ -465,6 +542,7 @@ EOF
signature-1 {
algo = "$conf_csum,$conf_sign_algo";
key-name-hint = "$conf_sign_keyname";
+ padding = "$conf_padding_algo";
$sign_line
};
EOF
@@ -488,6 +566,7 @@ fitimage_assemble() {
ramdiskcount=$3
setupcount=""
bootscr_id=""
+ default_dtb_image=""
rm -f $1 arch/${ARCH}/boot/$2
if [ -n "${UBOOT_SIGN_IMG_KEYNAME}" -a "${UBOOT_SIGN_KEYNAME}" = "${UBOOT_SIGN_IMG_KEYNAME}" ]; then
@@ -521,26 +600,60 @@ fitimage_assemble() {
continue
fi
- DTB_PATH="arch/${ARCH}/boot/dts/$DTB"
+ DTB_PATH="${KERNEL_OUTPUT_DIR}/dts/$DTB"
if [ ! -e "$DTB_PATH" ]; then
- DTB_PATH="arch/${ARCH}/boot/$DTB"
+ DTB_PATH="${KERNEL_OUTPUT_DIR}/$DTB"
+ fi
+
+ # Strip off the path component from the filename
+ if "${@'false' if oe.types.boolean(d.getVar('KERNEL_DTBVENDORED')) else 'true'}"; then
+ DTB=`basename $DTB`
+ fi
+
+ # Set the default dtb image if it exists in the devicetree.
+ if [ ${FIT_CONF_DEFAULT_DTB} = $DTB ];then
+ default_dtb_image=$(echo "$DTB" | tr '/' '_')
fi
DTB=$(echo "$DTB" | tr '/' '_')
+
+ # Skip DTB if we've picked it up previously
+ echo "$DTBS" | tr ' ' '\n' | grep -xq "$DTB" && continue
+
DTBS="$DTBS $DTB"
+ DTB=$(echo $DTB | tr '/' '_')
fitimage_emit_section_dtb $1 $DTB $DTB_PATH
done
fi
if [ -n "${EXTERNAL_KERNEL_DEVICETREE}" ]; then
dtbcount=1
- for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" \( -name '*.dtb' -o -name '*.dtbo' \) -printf '%P\n' | sort); do
+ for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" -name '*.dtb' -printf '%P\n' | sort) \
+ $(find "${EXTERNAL_KERNEL_DEVICETREE}" -name '*.dtbo' -printf '%P\n' | sort); do
+ # Set the default dtb image if it exists in the devicetree.
+ if [ ${FIT_CONF_DEFAULT_DTB} = $DTB ];then
+ default_dtb_image=$(echo "$DTB" | tr '/' '_')
+ fi
+
DTB=$(echo "$DTB" | tr '/' '_')
+
+ # Skip DTB/DTBO if we've picked it up previously
+ echo "$DTBS" | tr ' ' '\n' | grep -xq "$DTB" && continue
+
DTBS="$DTBS $DTB"
+
+ # Also skip if a symlink. We'll later have each config section point at it
+ [ $(symlink_points_below $DTB "${EXTERNAL_KERNEL_DEVICETREE}") ] && continue
+
+ DTB=$(echo $DTB | tr '/' '_')
fitimage_emit_section_dtb $1 $DTB "${EXTERNAL_KERNEL_DEVICETREE}/$DTB"
done
fi
+ if [ -n "${FIT_CONF_DEFAULT_DTB}" ] && [ -z $default_dtb_image ]; then
+ bbwarn "${FIT_CONF_DEFAULT_DTB} is not available in the list of device trees."
+ fi
+
#
# Step 3: Prepare a u-boot script section
#
@@ -558,9 +671,9 @@ fitimage_assemble() {
#
# Step 4: Prepare a setup section. (For x86)
#
- if [ -e arch/${ARCH}/boot/setup.bin ]; then
+ if [ -e ${KERNEL_OUTPUT_DIR}/setup.bin ]; then
setupcount=1
- fitimage_emit_section_setup $1 $setupcount arch/${ARCH}/boot/setup.bin
+ fitimage_emit_section_setup $1 $setupcount ${KERNEL_OUTPUT_DIR}/setup.bin
fi
#
@@ -613,15 +726,15 @@ fitimage_assemble() {
for DTB in ${DTBS}; do
dtb_ext=${DTB##*.}
if [ "$dtb_ext" = "dtbo" ]; then
- fitimage_emit_section_config $1 "" "$DTB" "" "$bootscr_id" "" "`expr $i = $dtbcount`"
+ fitimage_emit_section_config $1 "" "$DTB" "" "$bootscr_id" "" "`expr $i = $dtbcount`" "$default_dtb_image"
else
- fitimage_emit_section_config $1 $kernelcount "$DTB" "$ramdiskcount" "$bootscr_id" "$setupcount" "`expr $i = $dtbcount`"
+ fitimage_emit_section_config $1 $kernelcount "$DTB" "$ramdiskcount" "$bootscr_id" "$setupcount" "`expr $i = $dtbcount`" "$default_dtb_image"
fi
i=`expr $i + 1`
done
else
defaultconfigcount=1
- fitimage_emit_section_config $1 $kernelcount "" "$ramdiskcount" "$bootscr_id" "$setupcount" $defaultconfigcount
+ fitimage_emit_section_config $1 $kernelcount "" "$ramdiskcount" "$bootscr_id" "$setupcount" $defaultconfigcount "$default_dtb_image"
fi
fitimage_emit_section_maint $1 sectend
@@ -634,24 +747,16 @@ fitimage_assemble() {
${UBOOT_MKIMAGE} \
${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
-f $1 \
- arch/${ARCH}/boot/$2
+ ${KERNEL_OUTPUT_DIR}/$2
#
- # Step 8: Sign the image and add public key to U-Boot dtb
+ # Step 8: Sign the image
#
if [ "x${UBOOT_SIGN_ENABLE}" = "x1" ] ; then
- add_key_to_u_boot=""
- if [ -n "${UBOOT_DTB_BINARY}" ]; then
- # The u-boot.dtb is a symlink to UBOOT_DTB_IMAGE, so we need copy
- # both of them, and don't dereference the symlink.
- cp -P ${STAGING_DATADIR}/u-boot*.dtb ${B}
- add_key_to_u_boot="-K ${B}/${UBOOT_DTB_BINARY}"
- fi
${UBOOT_MKIMAGE_SIGN} \
${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
-F -k "${UBOOT_SIGN_KEYDIR}" \
- $add_key_to_u_boot \
- -r arch/${ARCH}/boot/$2 \
+ -r ${KERNEL_OUTPUT_DIR}/$2 \
${UBOOT_MKIMAGE_SIGN_ARGS}
fi
}
@@ -659,18 +764,30 @@ fitimage_assemble() {
do_assemble_fitimage() {
if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage"; then
cd ${B}
- fitimage_assemble fit-image.its fitImage ""
+ fitimage_assemble fit-image.its fitImage-none ""
+ if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then
+ ln -sf fitImage-none ${B}/${KERNEL_OUTPUT_DIR}/fitImage
+ fi
fi
}
addtask assemble_fitimage before do_install after do_compile
+SYSROOT_DIRS:append = " /sysroot-only"
+do_install:append() {
+ if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage" && \
+ [ "${UBOOT_SIGN_ENABLE}" = "1" ]; then
+ install -D ${B}/${KERNEL_OUTPUT_DIR}/fitImage-none ${D}/sysroot-only/fitImage
+ fi
+}
+
do_assemble_fitimage_initramfs() {
if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage" && \
test -n "${INITRAMFS_IMAGE}" ; then
cd ${B}
if [ "${INITRAMFS_IMAGE_BUNDLE}" = "1" ]; then
- fitimage_assemble fit-image-${INITRAMFS_IMAGE}.its fitImage ""
+ fitimage_assemble fit-image-${INITRAMFS_IMAGE}.its fitImage-bundle ""
+ ln -sf fitImage-bundle ${B}/${KERNEL_OUTPUT_DIR}/fitImage
else
fitimage_assemble fit-image-${INITRAMFS_IMAGE}.its fitImage-${INITRAMFS_IMAGE} 1
fi
@@ -754,42 +871,11 @@ kernel_do_deploy:append() {
if [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then
bbnote "Copying fitImage-${INITRAMFS_IMAGE} file..."
- install -m 0644 ${B}/arch/${ARCH}/boot/fitImage-${INITRAMFS_IMAGE} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT}"
+ install -m 0644 ${B}/${KERNEL_OUTPUT_DIR}/fitImage-${INITRAMFS_IMAGE} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT}"
if [ -n "${KERNEL_FIT_LINK_NAME}" ] ; then
ln -snf fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}${KERNEL_FIT_BIN_EXT} "$deployDir/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}"
fi
fi
fi
fi
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -o "${UBOOT_FITIMAGE_ENABLE}" = "1" ] && \
- [ -n "${UBOOT_DTB_BINARY}" ] ; then
- # UBOOT_DTB_IMAGE is a realfile, but we can't use
- # ${UBOOT_DTB_IMAGE} since it contains ${PV} which is aimed
- # for u-boot, but we are in kernel env now.
- install -m 0644 ${B}/u-boot-${MACHINE}*.dtb "$deployDir/"
- fi
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" -a -n "${UBOOT_BINARY}" -a -n "${SPL_DTB_BINARY}" ] ; then
- # If we're also creating and/or signing the uboot fit, now we need to
- # deploy it, it's its file, as well as u-boot-spl.dtb
- install -m 0644 ${B}/u-boot-spl-${MACHINE}*.dtb "$deployDir/"
- bbnote "Copying u-boot-fitImage file..."
- install -m 0644 ${B}/u-boot-fitImage-* "$deployDir/"
- bbnote "Copying u-boot-its file..."
- install -m 0644 ${B}/u-boot-its-* "$deployDir/"
- fi
-}
-
-# The function below performs the following in case of initramfs bundles:
-# - Removes do_assemble_fitimage. FIT generation is done through
-# do_assemble_fitimage_initramfs. do_assemble_fitimage is not needed
-# and should not be part of the tasks to be executed.
-# - Since do_kernel_generate_rsa_keys is inserted by default
-# between do_compile and do_assemble_fitimage, this is
-# not suitable in case of initramfs bundles. do_kernel_generate_rsa_keys
-# should be between do_bundle_initramfs and do_assemble_fitimage_initramfs.
-python () {
- if d.getVar('INITRAMFS_IMAGE_BUNDLE') == "1":
- bb.build.deltask('do_assemble_fitimage', d)
- bb.build.deltask('kernel_generate_rsa_keys', d)
- bb.build.addtask('kernel_generate_rsa_keys', 'do_assemble_fitimage_initramfs', 'do_bundle_initramfs', d)
}
diff --git a/meta/classes/kernel-grub.bbclass b/meta/classes-recipe/kernel-grub.bbclass
index 44b2015468..2325e635e1 100644
--- a/meta/classes/kernel-grub.bbclass
+++ b/meta/classes-recipe/kernel-grub.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# While installing a rpm to update kernel on a deployed target, it will update
# the boot area and the boot menu with the kernel as the priority but allow
# you to fall back to the original kernel as well.
diff --git a/meta/classes-recipe/kernel-module-split.bbclass b/meta/classes-recipe/kernel-module-split.bbclass
new file mode 100644
index 0000000000..9487365eb7
--- /dev/null
+++ b/meta/classes-recipe/kernel-module-split.bbclass
@@ -0,0 +1,188 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+pkg_postinst:modules () {
+if [ -z "$D" ]; then
+ depmod -a ${KERNEL_VERSION}
+else
+ # image.bbclass will call depmodwrapper after everything is installed,
+ # no need to do it here as well
+ :
+fi
+}
+
+pkg_postrm:modules () {
+if [ -z "$D" ]; then
+ depmod -a ${KERNEL_VERSION}
+else
+ depmodwrapper -a -b $D ${KERNEL_VERSION} ${KERNEL_PACKAGE_NAME}
+fi
+}
+
+autoload_postinst_fragment() {
+if [ x"$D" = "x" ]; then
+ modprobe %s || true
+fi
+}
+
+PACKAGE_WRITE_DEPS += "kmod-native depmodwrapper-cross"
+
+modulesloaddir ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '${nonarch_libdir}', '${sysconfdir}', d)}/modules-load.d"
+modprobedir ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '${nonarch_base_libdir}', '${sysconfdir}', d)}/modprobe.d"
+
+KERNEL_SPLIT_MODULES ?= "1"
+PACKAGESPLITFUNCS =+ "split_kernel_module_packages"
+
+KERNEL_MODULES_META_PACKAGE ?= "${@ d.getVar("KERNEL_PACKAGE_NAME") or "kernel" }-modules"
+
+KERNEL_MODULE_PACKAGE_PREFIX ?= ""
+KERNEL_MODULE_PACKAGE_SUFFIX ?= "-${KERNEL_VERSION}"
+KERNEL_MODULE_PROVIDE_VIRTUAL ?= "1"
+
+python split_kernel_module_packages () {
+ import re
+
+ modinfoexp = re.compile("([^=]+)=(.*)")
+
+ def extract_modinfo(file):
+ import tempfile, subprocess
+ tempfile.tempdir = d.getVar("WORKDIR")
+ compressed = re.match( r'.*\.(gz|xz|zst)$', file)
+ tf = tempfile.mkstemp()
+ tmpfile = tf[1]
+ if compressed:
+ tmpkofile = tmpfile + ".ko"
+ if compressed.group(1) == 'gz':
+ cmd = "gunzip -dc %s > %s" % (file, tmpkofile)
+ subprocess.check_call(cmd, shell=True)
+ elif compressed.group(1) == 'xz':
+ cmd = "xz -dc %s > %s" % (file, tmpkofile)
+ subprocess.check_call(cmd, shell=True)
+ elif compressed.group(1) == 'zst':
+ cmd = "zstd -dc %s > %s" % (file, tmpkofile)
+ subprocess.check_call(cmd, shell=True)
+ else:
+ msg = "Cannot decompress '%s'" % file
+ raise msg
+ cmd = "%s -j .modinfo -O binary %s %s" % (d.getVar("OBJCOPY"), tmpkofile, tmpfile)
+ else:
+ cmd = "%s -j .modinfo -O binary %s %s" % (d.getVar("OBJCOPY"), file, tmpfile)
+ subprocess.check_call(cmd, shell=True)
+ # errors='replace': Some old kernel versions contain invalid utf-8 characters in mod descriptions (like 0xf6, 'ö')
+ with open(tmpfile, errors='replace') as f:
+ l = f.read().split("\000")
+ os.close(tf[0])
+ os.unlink(tmpfile)
+ if compressed:
+ os.unlink(tmpkofile)
+ vals = {}
+ for i in l:
+ m = modinfoexp.match(i)
+ if not m:
+ continue
+ vals[m.group(1)] = m.group(2)
+ return vals
+
+ def frob_metadata(file, pkg, pattern, format, basename):
+ vals = extract_modinfo(file)
+
+ dvar = d.getVar('PKGD')
+
+ # If autoloading is requested, output ${modulesloaddir}/<name>.conf and append
+ # appropriate modprobe commands to the postinst
+ autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split()
+ autoload = d.getVar('module_autoload_%s' % basename)
+ if autoload and autoload == basename:
+ bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename)
+ if autoload and basename not in autoloadlist:
+ bb.warn("module_autoload_%s is defined but '%s' isn't included in KERNEL_MODULE_AUTOLOAD, please add it there" % (basename, basename))
+ if basename in autoloadlist:
+ conf = '%s/%s.conf' % (d.getVar('modulesloaddir'), basename)
+ name = '%s%s' % (dvar, conf)
+ os.makedirs(os.path.dirname(name), exist_ok=True)
+ with open(name, 'w') as f:
+ if autoload:
+ for m in autoload.split():
+ f.write('%s\n' % m)
+ else:
+ f.write('%s\n' % basename)
+ conf2append = ' %s' % conf
+ d.appendVar('FILES:%s' % pkg, conf2append)
+ d.appendVar('CONFFILES:%s' % pkg, conf2append)
+ postinst = d.getVar('pkg_postinst:%s' % pkg)
+ if not postinst:
+ bb.fatal("pkg_postinst:%s not defined" % pkg)
+ postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename)
+ d.setVar('pkg_postinst:%s' % pkg, postinst)
+
+ # Write out any modconf fragment
+ modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()
+ modconf = d.getVar('module_conf_%s' % basename)
+ if modconf and basename in modconflist:
+ conf = '%s/%s.conf' % (d.getVar('modprobedir'), basename)
+ name = '%s%s' % (dvar, conf)
+ os.makedirs(os.path.dirname(name), exist_ok=True)
+ with open(name, 'w') as f:
+ f.write("%s\n" % modconf)
+ conf2append = ' %s' % conf
+ d.appendVar('FILES:%s' % pkg, conf2append)
+ d.appendVar('CONFFILES:%s' % pkg, conf2append)
+
+ elif modconf:
+ bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename))
+
+ if "description" in vals:
+ old_desc = d.getVar('DESCRIPTION:' + pkg) or ""
+ d.setVar('DESCRIPTION:' + pkg, old_desc + "; " + vals["description"])
+
+ rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
+ modinfo_deps = []
+ if "depends" in vals and vals["depends"] != "":
+ for dep in vals["depends"].split(","):
+ on = legitimize_package_name(dep)
+ dependency_pkg = format % on
+ modinfo_deps.append(dependency_pkg)
+ for dep in modinfo_deps:
+ if not dep in rdepends:
+ rdepends[dep] = []
+ d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+
+ # Avoid automatic -dev recommendations for modules ending with -dev.
+ d.setVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs', 1)
+
+ # Provide virtual package without postfix
+ providevirt = d.getVar('KERNEL_MODULE_PROVIDE_VIRTUAL')
+ if providevirt == "1":
+ postfix = format.split('%s')[1]
+ d.setVar('RPROVIDES:' + pkg, pkg.replace(postfix, ''))
+
+ kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel"
+ kernel_version = d.getVar("KERNEL_VERSION")
+
+ metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
+ splitmods = d.getVar('KERNEL_SPLIT_MODULES')
+ postinst = d.getVar('pkg_postinst:modules')
+ postrm = d.getVar('pkg_postrm:modules')
+
+ if splitmods != '1':
+ d.appendVar('FILES:' + metapkg, '%s %s %s/modules' %
+ (d.getVar('modulesloaddir'), d.getVar('modprobedir'), d.getVar("nonarch_base_libdir")))
+ d.appendVar('pkg_postinst:%s' % metapkg, postinst)
+ d.prependVar('pkg_postrm:%s' % metapkg, postrm);
+ return
+
+ module_regex = r'^(.*)\.k?o(?:\.(gz|xz|zst))?$'
+
+ module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
+ module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX')
+ module_pattern = module_pattern_prefix + kernel_package_name + '-module-%s' + module_pattern_suffix
+
+ modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%s-%s' % (kernel_package_name, kernel_version))
+ if modules:
+ d.appendVar('RDEPENDS:' + metapkg, ' '+' '.join(modules))
+}
+
+do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}'
diff --git a/meta/classes-recipe/kernel-uboot.bbclass b/meta/classes-recipe/kernel-uboot.bbclass
new file mode 100644
index 0000000000..30a85ccc28
--- /dev/null
+++ b/meta/classes-recipe/kernel-uboot.bbclass
@@ -0,0 +1,49 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# fitImage kernel compression algorithm
+FIT_KERNEL_COMP_ALG ?= "gzip"
+FIT_KERNEL_COMP_ALG_EXTENSION ?= ".gz"
+
+# Kernel image type passed to mkimage (i.e. kernel kernel_noload...)
+UBOOT_MKIMAGE_KERNEL_TYPE ?= "kernel"
+
+uboot_prep_kimage() {
+ if [ -e arch/${ARCH}/boot/compressed/vmlinux ]; then
+ vmlinux_path="arch/${ARCH}/boot/compressed/vmlinux"
+ linux_suffix=""
+ linux_comp="none"
+ elif [ -e arch/${ARCH}/boot/vmlinuz.bin ]; then
+ rm -f linux.bin
+ cp -l arch/${ARCH}/boot/vmlinuz.bin linux.bin
+ vmlinux_path=""
+ linux_suffix=""
+ linux_comp="none"
+ else
+ vmlinux_path="vmlinux"
+ # Use vmlinux.initramfs for linux.bin when INITRAMFS_IMAGE_BUNDLE set
+ # As per the implementation in kernel.bbclass.
+ # See do_bundle_initramfs function
+ if [ "${INITRAMFS_IMAGE_BUNDLE}" = "1" ] && [ -e vmlinux.initramfs ]; then
+ vmlinux_path="vmlinux.initramfs"
+ fi
+ linux_suffix="${FIT_KERNEL_COMP_ALG_EXTENSION}"
+ linux_comp="${FIT_KERNEL_COMP_ALG}"
+ fi
+
+ [ -n "${vmlinux_path}" ] && ${KERNEL_OBJCOPY} -O binary -R .note -R .comment -S "${vmlinux_path}" linux.bin
+
+ if [ "${linux_comp}" != "none" ] ; then
+ if [ "${linux_comp}" = "gzip" ] ; then
+ gzip -9 linux.bin
+ elif [ "${linux_comp}" = "lzo" ] ; then
+ lzop -9 linux.bin
+ fi
+ mv -f "linux.bin${linux_suffix}" linux.bin
+ fi
+
+ echo "${linux_comp}"
+}
diff --git a/meta/classes/kernel-uimage.bbclass b/meta/classes-recipe/kernel-uimage.bbclass
index cedb4fa070..1a599e656c 100644
--- a/meta/classes/kernel-uimage.bbclass
+++ b/meta/classes-recipe/kernel-uimage.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit kernel-uboot
python __anonymous () {
@@ -30,6 +36,6 @@ do_uboot_mkimage() {
awk '$3=="${UBOOT_ENTRYSYMBOL}" {print "0x"$1;exit}'`
fi
- uboot-mkimage -A ${UBOOT_ARCH} -O linux -T kernel -C "${linux_comp}" -a ${UBOOT_LOADADDRESS} -e $ENTRYPOINT -n "${DISTRO_NAME}/${PV}/${MACHINE}" -d linux.bin ${B}/arch/${ARCH}/boot/uImage
+ uboot-mkimage -A ${UBOOT_ARCH} -O linux -T ${UBOOT_MKIMAGE_KERNEL_TYPE} -C "${linux_comp}" -a ${UBOOT_LOADADDRESS} -e $ENTRYPOINT -n "${DISTRO_NAME}/${PV}/${MACHINE}" -d linux.bin ${B}/arch/${ARCH}/boot/uImage
rm -f linux.bin
}
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes-recipe/kernel-yocto.bbclass
index b276ded775..9a86616dad 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes-recipe/kernel-yocto.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# remove tasks that modify the source tree in case externalsrc is inherited
SRCTREECOVEREDTASKS += "do_validate_branches do_kernel_configcheck do_kernel_checkout do_fetch do_unpack do_patch"
PATCH_GIT_USER_EMAIL ?= "kernel-yocto@oe"
@@ -170,12 +176,32 @@ do_kernel_metadata() {
# kernel source tree, where they'll be used later.
check_git_config
patches="${@" ".join(find_patches(d,'kernel-meta'))}"
- for p in $patches; do
+ if [ -n "$patches" ]; then
(
- cd ${WORKDIR}/kernel-meta
- git am -s $p
- )
- done
+ cd ${WORKDIR}/kernel-meta
+
+ # take the SRC_URI patches, and create a series file
+ # this is required to support some better processing
+ # of issues with the patches
+ rm -f series
+ for p in $patches; do
+ cp $p .
+ echo "$(basename $p)" >> series
+ done
+
+ # process the series with kgit-s2q, which is what is
+ # handling the rest of the kernel. This allows us
+ # more flexibility for handling failures or advanced
+ # mergeing functinoality
+ message=$(kgit-s2q --gen -v --patches ${WORKDIR}/kernel-meta 2>&1)
+ if [ $? -ne 0 ]; then
+ # setup to try the patch again
+ kgit-s2q --prev
+ bberror "Problem applying patches to: ${WORKDIR}/kernel-meta"
+ bbfatal_log "\n($message)"
+ fi
+ )
+ fi
fi
sccs_from_src_uri="${@" ".join(find_sccs(d))}"
@@ -206,7 +232,7 @@ do_kernel_metadata() {
# SRC_URI. If they were supplied, we convert them into include directives
# for the update part of the process
for f in ${feat_dirs}; do
- if [ -d "${WORKDIR}/$f/meta" ]; then
+ if [ -d "${WORKDIR}/$f/kernel-meta" ]; then
includes="$includes -I${WORKDIR}/$f/kernel-meta"
elif [ -d "${WORKDIR}/../oe-local-files/$f" ]; then
includes="$includes -I${WORKDIR}/../oe-local-files/$f"
@@ -310,6 +336,8 @@ do_kernel_metadata() {
bbnote "KERNEL_FEATURES: $KERNEL_FEATURES_FINAL"
bbnote "Final scc/cfg list: $sccs_defconfig $bsp_definition $sccs $KERNEL_FEATURES_FINAL"
fi
+
+ set -e
}
do_patch() {
@@ -343,6 +371,8 @@ do_patch() {
fi
done
fi
+
+ set -e
}
do_kernel_checkout() {
@@ -398,9 +428,11 @@ do_kernel_checkout() {
git init
check_git_config
git add .
- git commit -q -m "baseline commit: creating repo for ${PN}-${PV}"
+ git commit -q -n -m "baseline commit: creating repo for ${PN}-${PV}"
git clean -d -f
fi
+
+ set -e
}
do_kernel_checkout[dirs] = "${S} ${WORKDIR}"
@@ -443,7 +475,7 @@ do_kernel_configme() {
bbfatal_log "Could not find configuration queue (${meta_dir}/config.queue)"
fi
- CFLAGS="${CFLAGS} ${TOOLCHAIN_OPTIONS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" ARCH=${ARCH} merge_config.sh -O ${B} ${config_flags} ${configs} > ${meta_dir}/cfg/merge_config_build.log 2>&1
+ CFLAGS="${CFLAGS} ${TOOLCHAIN_OPTIONS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" OBJCOPY="${KERNEL_OBJCOPY}" STRIP="${KERNEL_STRIP}" ARCH=${ARCH} merge_config.sh -O ${B} ${config_flags} ${configs} > ${meta_dir}/cfg/merge_config_build.log 2>&1
if [ $? -ne 0 -o ! -f ${B}/.config ]; then
bberror "Could not generate a .config for ${KMACHINE}-${LINUX_KERNEL_TYPE}"
if [ ${KCONF_AUDIT_LEVEL} -gt 1 ]; then
@@ -477,6 +509,8 @@ python do_config_analysis() {
env['PATH'] = "%s:%s%s" % (d.getVar('PATH'), s, "/scripts/util/")
env['LD'] = d.getVar('KERNEL_LD')
env['CC'] = d.getVar('KERNEL_CC')
+ env['OBJCOPY'] = d.getVar('KERNEL_OBJCOPY')
+ env['STRIP'] = d.getVar('KERNEL_STRIP')
env['ARCH'] = d.getVar('ARCH')
env['srctree'] = s
@@ -494,7 +528,7 @@ python do_config_analysis() {
try:
analysis = subprocess.check_output(['symbol_why.py', '--dotconfig', '{}'.format( d.getVar('B') + '/.config' ), '--blame', c], cwd=s, env=env ).decode('utf-8')
except subprocess.CalledProcessError as e:
- bb.fatal( "config analysis failed: %s" % e.output.decode('utf-8'))
+ bb.fatal( "config analysis failed when running '%s': %s" % (" ".join(e.cmd), e.output.decode('utf-8')))
outfile = d.getVar( 'CONFIG_ANALYSIS_FILE' )
@@ -502,7 +536,7 @@ python do_config_analysis() {
try:
analysis = subprocess.check_output(['symbol_why.py', '--dotconfig', '{}'.format( d.getVar('B') + '/.config' ), '--summary', '--extended', '--sanity', c], cwd=s, env=env ).decode('utf-8')
except subprocess.CalledProcessError as e:
- bb.fatal( "config analysis failed: %s" % e.output.decode('utf-8'))
+ bb.fatal( "config analysis failed when running '%s': %s" % (" ".join(e.cmd), e.output.decode('utf-8')))
outfile = d.getVar( 'CONFIG_AUDIT_FILE' )
@@ -538,6 +572,8 @@ python do_kernel_configcheck() {
env['PATH'] = "%s:%s%s" % (d.getVar('PATH'), s, "/scripts/util/")
env['LD'] = d.getVar('KERNEL_LD')
env['CC'] = d.getVar('KERNEL_CC')
+ env['OBJCOPY'] = d.getVar('KERNEL_OBJCOPY')
+ env['STRIP'] = d.getVar('KERNEL_STRIP')
env['ARCH'] = d.getVar('ARCH')
env['srctree'] = s
@@ -563,7 +599,7 @@ python do_kernel_configcheck() {
try:
analysis = subprocess.check_output(['symbol_why.py', '--dotconfig', '{}'.format( d.getVar('B') + '/.config' ), '--mismatches', extra_params], cwd=s, env=env ).decode('utf-8')
except subprocess.CalledProcessError as e:
- bb.fatal( "config analysis failed: %s" % e.output.decode('utf-8'))
+ bb.fatal( "config analysis failed when running '%s': %s" % (" ".join(e.cmd), e.output.decode('utf-8')))
if analysis:
outfile = "{}/{}/cfg/mismatch.txt".format( s, kmeta )
@@ -585,7 +621,7 @@ python do_kernel_configcheck() {
try:
analysis = subprocess.check_output(['symbol_why.py', '--dotconfig', '{}'.format( d.getVar('B') + '/.config' ), '--invalid', extra_params], cwd=s, env=env ).decode('utf-8')
except subprocess.CalledProcessError as e:
- bb.fatal( "config analysis failed: %s" % e.output.decode('utf-8'))
+ bb.fatal( "config analysis failed when running '%s': %s" % (" ".join(e.cmd), e.output.decode('utf-8')))
if analysis:
outfile = "{}/{}/cfg/invalid.txt".format(s,kmeta)
@@ -604,7 +640,7 @@ python do_kernel_configcheck() {
try:
analysis = subprocess.check_output(['symbol_why.py', '--dotconfig', '{}'.format( d.getVar('B') + '/.config' ), '--sanity'], cwd=s, env=env ).decode('utf-8')
except subprocess.CalledProcessError as e:
- bb.fatal( "config analysis failed: %s" % e.output.decode('utf-8'))
+ bb.fatal( "config analysis failed when running '%s': %s" % (" ".join(e.cmd), e.output.decode('utf-8')))
if analysis:
outfile = "{}/{}/cfg/redefinition.txt".format(s,kmeta)
@@ -695,6 +731,8 @@ do_validate_branches() {
kgit-s2q --clean
fi
fi
+
+ set -e
}
OE_TERMINAL_EXPORTS += "KBUILD_OUTPUT"
diff --git a/meta/classes/kernel.bbclass b/meta/classes-recipe/kernel.bbclass
index 83785c3b5b..b084d6d69d 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes-recipe/kernel.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit linux-kernel-base kernel-module-split
COMPATIBLE_HOST = ".*-linux"
@@ -27,7 +33,6 @@ INHIBIT_DEFAULT_DEPS = "1"
KERNEL_IMAGETYPE ?= "zImage"
INITRAMFS_IMAGE ?= ""
-INITRAMFS_IMAGE_NAME ?= "${@['${INITRAMFS_IMAGE}-${MACHINE}', ''][d.getVar('INITRAMFS_IMAGE') == '']}"
INITRAMFS_TASK ?= ""
INITRAMFS_IMAGE_BUNDLE ?= ""
INITRAMFS_DEPLOY_DIR_IMAGE ?= "${DEPLOY_DIR_IMAGE}"
@@ -106,11 +111,11 @@ python __anonymous () {
d.appendVar('RDEPENDS:%s-image' % kname, ' %s-modules (= ${EXTENDPKGV})' % kname)
d.appendVar('RDEPENDS:%s-image-%s' % (kname, typelower), ' %s-modules-${KERNEL_VERSION_PKG_NAME} (= ${EXTENDPKGV})' % kname)
d.setVar('PKG:%s-modules' % kname, '%s-modules-${KERNEL_VERSION_PKG_NAME}' % kname)
- d.appendVar('RPROVIDES:%s-modules' % kname, '%s-modules-${KERNEL_VERSION_PKG_NAME}' % kname)
+ d.appendVar('RPROVIDES:%s-modules' % kname, ' %s-modules-${KERNEL_VERSION_PKG_NAME}' % kname)
d.setVar('PKG:%s-image-%s' % (kname,typelower), '%s-image-%s-${KERNEL_VERSION_PKG_NAME}' % (kname, typelower))
d.setVar('ALLOW_EMPTY:%s-image-%s' % (kname, typelower), '1')
- d.setVar('pkg_postinst:%s-image-%s' % (kname,typelower), """set +e
+ d.prependVar('pkg_postinst:%s-image-%s' % (kname,typelower), """set +e
if [ -n "$D" ]; then
ln -sf %s-${KERNEL_VERSION} $D/${KERNEL_IMAGEDEST}/%s > /dev/null 2>&1
else
@@ -166,7 +171,7 @@ set -e
# image types.
KERNEL_CLASSES ?= " kernel-uimage "
-inherit ${KERNEL_CLASSES}
+inherit_defer ${KERNEL_CLASSES}
# Old style kernels may set ${S} = ${WORKDIR}/git for example
# We need to move these over to STAGING_KERNEL_DIR. We can't just
@@ -176,13 +181,14 @@ do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILD
do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
python do_symlink_kernsrc () {
s = d.getVar("S")
- if s[-1] == '/':
- # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail
- s=s[:-1]
kernsrc = d.getVar("STAGING_KERNEL_DIR")
if s != kernsrc:
bb.utils.mkdirhier(kernsrc)
bb.utils.remove(kernsrc, recurse=True)
+ if s[-1] == '/':
+ # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as
+ # directory name and fail
+ s = s[:-1]
if d.getVar("EXTERNALSRC"):
# With EXTERNALSRC S will not be wiped so we can symlink to it
os.symlink(s, kernsrc)
@@ -204,15 +210,14 @@ PACKAGES_DYNAMIC += "^${KERNEL_PACKAGE_NAME}-firmware-.*"
export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}"
-export KBUILD_BUILD_VERSION = "1"
-export KBUILD_BUILD_USER ?= "oe-user"
-export KBUILD_BUILD_HOST ?= "oe-host"
KERNEL_RELEASE ?= "${KERNEL_VERSION}"
# The directory where built kernel lies in the kernel tree
KERNEL_OUTPUT_DIR ?= "arch/${ARCH}/boot"
KERNEL_IMAGEDEST ?= "boot"
+KERNEL_DTBDEST ?= "${KERNEL_IMAGEDEST}"
+KERNEL_DTBVENDORED ?= "0"
#
# configuration
@@ -231,8 +236,11 @@ UBOOT_LOADADDRESS ?= "${UBOOT_ENTRYPOINT}"
# Some Linux kernel configurations need additional parameters on the command line
KERNEL_EXTRA_ARGS ?= ""
-EXTRA_OEMAKE = " HOSTCC="${BUILD_CC}" HOSTCFLAGS="${BUILD_CFLAGS}" HOSTLDFLAGS="${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}""
-EXTRA_OEMAKE += " HOSTCXX="${BUILD_CXX}" HOSTCXXFLAGS="${BUILD_CXXFLAGS}" PAHOLE=false"
+EXTRA_OEMAKE += ' CC="${KERNEL_CC}" LD="${KERNEL_LD}" OBJCOPY="${KERNEL_OBJCOPY}" STRIP="${KERNEL_STRIP}"'
+EXTRA_OEMAKE += ' HOSTCC="${BUILD_CC}" HOSTCFLAGS="${BUILD_CFLAGS}" HOSTLDFLAGS="${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}"'
+EXTRA_OEMAKE += ' HOSTCXX="${BUILD_CXX}" HOSTCXXFLAGS="${BUILD_CXXFLAGS}"'
+# Only for newer kernels (5.19+), native pkg-config variables are set for older kernels when building kernel and modules
+EXTRA_OEMAKE += ' HOSTPKG_CONFIG="pkg-config-native"'
KERNEL_ALT_IMAGETYPE ??= ""
@@ -329,6 +337,10 @@ kernel_do_transform_bundled_initramfs() {
}
do_transform_bundled_initramfs[dirs] = "${B}"
+python do_package:prepend () {
+ d.setVar('STRIP', d.getVar('KERNEL_STRIP').strip())
+}
+
python do_devshell:prepend () {
os.environ["LDFLAGS"] = ''
}
@@ -360,6 +372,10 @@ kernel_do_compile() {
export KBUILD_BUILD_TIMESTAMP="$ts"
export KCONFIG_NOTIMESTAMP=1
bbnote "KBUILD_BUILD_TIMESTAMP: $ts"
+ else
+ ts=`LC_ALL=C date`
+ export KBUILD_BUILD_TIMESTAMP="$ts"
+ bbnote "KBUILD_BUILD_TIMESTAMP: $ts"
fi
# The $use_alternate_initrd is only set from
# do_bundle_initramfs() This variable is specifically for the
@@ -375,7 +391,7 @@ kernel_do_compile() {
use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio
fi
for typeformake in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do
- oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
+ oe_runmake ${PARALLEL_MAKE} ${typeformake} ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
done
}
@@ -391,6 +407,13 @@ addtask transform_kernel after do_compile before do_install
do_compile_kernelmodules() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
+
+ # setup native pkg-config variables (kconfig scripts call pkg-config directly, cannot generically be overriden to pkg-config-native)
+ export PKG_CONFIG_DIR="${STAGING_DIR_NATIVE}${libdir_native}/pkgconfig"
+ export PKG_CONFIG_PATH="$PKG_CONFIG_DIR:${STAGING_DATADIR_NATIVE}/pkgconfig"
+ export PKG_CONFIG_LIBDIR="$PKG_CONFIG_DIR"
+ export PKG_CONFIG_SYSROOT_DIR=""
+
if [ "${KERNEL_DEBUG_TIMESTAMPS}" != "1" ]; then
# kernel sources do not use do_unpack, so SOURCE_DATE_EPOCH may not
# be set....
@@ -405,11 +428,15 @@ do_compile_kernelmodules() {
export KBUILD_BUILD_TIMESTAMP="$ts"
export KCONFIG_NOTIMESTAMP=1
bbnote "KBUILD_BUILD_TIMESTAMP: $ts"
+ else
+ ts=`LC_ALL=C date`
+ export KBUILD_BUILD_TIMESTAMP="$ts"
+ bbnote "KBUILD_BUILD_TIMESTAMP: $ts"
fi
if (grep -q -i -e '^CONFIG_MODULES=y$' ${B}/.config); then
- oe_runmake -C ${B} ${PARALLEL_MAKE} modules CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS}
+ oe_runmake -C ${B} ${PARALLEL_MAKE} modules ${KERNEL_EXTRA_ARGS}
- # Module.symvers gets updated during the
+ # Module.symvers gets updated during the
# building of the kernel modules. We need to
# update this in the shared workdir since some
# external kernel modules has a dependency on
@@ -433,10 +460,10 @@ kernel_do_install() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
oe_runmake DEPMOD=echo MODLIB=${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION} INSTALL_FW_PATH=${D}${nonarch_base_libdir}/firmware modules_install
- rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build"
- rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/source"
- # If the kernel/ directory is empty remove it to prevent QA issues
- rmdir --ignore-fail-on-non-empty "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/kernel"
+ rm -f "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build"
+ rm -f "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/source"
+ # Remove empty module directories to prevent QA issues
+ find "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/kernel" -type d -empty -delete
else
bbnote "no modules to install"
fi
@@ -465,9 +492,7 @@ kernel_do_install() {
install -m 0644 System.map ${D}/${KERNEL_IMAGEDEST}/System.map-${KERNEL_VERSION}
install -m 0644 .config ${D}/${KERNEL_IMAGEDEST}/config-${KERNEL_VERSION}
install -m 0644 vmlinux ${D}/${KERNEL_IMAGEDEST}/vmlinux-${KERNEL_VERSION}
- [ -e Module.symvers ] && install -m 0644 Module.symvers ${D}/${KERNEL_IMAGEDEST}/Module.symvers-${KERNEL_VERSION}
- install -d ${D}${sysconfdir}/modules-load.d
- install -d ${D}${sysconfdir}/modprobe.d
+ ! [ -e Module.symvers ] || install -m 0644 Module.symvers ${D}/${KERNEL_IMAGEDEST}/Module.symvers-${KERNEL_VERSION}
}
# Must be ran no earlier than after do_kernel_checkout or else Makefile won't be in ${S}/Makefile
@@ -532,10 +557,11 @@ do_shared_workdir () {
#
echo "${KERNEL_VERSION}" > $kerneldir/${KERNEL_PACKAGE_NAME}-abiversion
+ echo "${KERNEL_LOCALVERSION}" > $kerneldir/${KERNEL_PACKAGE_NAME}-localversion
# Copy files required for module builds
cp System.map $kerneldir/System.map-${KERNEL_VERSION}
- [ -e Module.symvers ] && cp Module.symvers $kerneldir/
+ ! [ -e Module.symvers ] || cp Module.symvers $kerneldir/
cp .config $kerneldir/
mkdir -p $kerneldir/include/config
cp include/config/kernel.release $kerneldir/include/config/kernel.release
@@ -584,14 +610,28 @@ do_shared_workdir () {
cp tools/objtool/objtool ${kerneldir}/tools/objtool/
fi
fi
+
+ # When building with CONFIG_MODVERSIONS=y and CONFIG_RANDSTRUCT=y we need
+ # to copy the build assets generated for the randstruct seed to
+ # STAGING_KERNEL_BUILDDIR, otherwise the out-of-tree modules build will
+ # generate those assets which will result in a different
+ # RANDSTRUCT_HASHED_SEED
+ if [ -d scripts/basic ]; then
+ mkdir -p ${kerneldir}/scripts
+ cp -r scripts/basic ${kerneldir}/scripts
+ fi
+
+ if [ -d scripts/gcc-plugins ]; then
+ mkdir -p ${kerneldir}/scripts
+ cp -r scripts/gcc-plugins ${kerneldir}/scripts
+ fi
+
}
# We don't need to stage anything, not the modules/firmware since those would clash with linux-firmware
-sysroot_stage_all () {
- :
-}
+SYSROOT_DIRS = ""
-KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} CC="${KERNEL_CC}" LD="${KERNEL_LD}" O=${B} olddefconfig || oe_runmake -C ${S} O=${B} CC="${KERNEL_CC}" LD="${KERNEL_LD}" oldnoconfig"
+KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} olddefconfig || oe_runmake -C ${S} O=${B} oldnoconfig"
python check_oldest_kernel() {
oldest_kernel = d.getVar('OLDEST_KERNEL')
@@ -605,14 +645,33 @@ python check_oldest_kernel() {
}
check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION"
-do_configure[prefuncs] += "check_oldest_kernel"
+do_compile[postfuncs] += "check_oldest_kernel"
+
+KERNEL_LOCALVERSION ??= ""
+
+# 6.3+ requires the variable LOCALVERSION to be set to not get a "+" in
+# the local version. Having it empty means nothing will be added, and any
+# value will be appended to the local kernel version. This replaces the
+# use of .scmversion file for setting a localversion without using
+# the CONFIG_LOCALVERSION option.
+#
+# Note: This class saves the value of localversion to a file
+# so other recipes like make-mod-scripts can restore it via the
+# helper function get_kernellocalversion_file
+export LOCALVERSION="${KERNEL_LOCALVERSION}"
kernel_do_configure() {
# fixes extra + in /lib/modules/2.6.37+
# $ scripts/setlocalversion . => +
# $ make kernelversion => 2.6.37
# $ make kernelrelease => 2.6.37+
- touch ${B}/.scmversion ${S}/.scmversion
+ # See kernel-arch.bbclass for post v6.3 removal of the extra
+ # + in localversion. .scmversion is no longer used, and the
+ # variable LOCALVERSION must be used
+ if [ ! -e ${B}/.scmversion -a ! -e ${S}/.scmversion ]; then
+ echo ${KERNEL_LOCALVERSION} > ${B}/.scmversion
+ echo ${KERNEL_LOCALVERSION} > ${S}/.scmversion
+ fi
if [ "${S}" != "${B}" ] && [ -f "${S}/.config" ] && [ ! -f "${B}/.config" ]; then
mv "${S}/.config" "${B}/.config"
@@ -634,9 +693,10 @@ do_savedefconfig() {
do_savedefconfig[nostamp] = "1"
addtask savedefconfig after do_configure
-inherit cml1
+inherit cml1 pkgconfig
-KCONFIG_CONFIG_COMMAND:append = " PAHOLE=false LD='${KERNEL_LD}' HOSTLDFLAGS='${BUILD_LDFLAGS}'"
+# Need LD, HOSTLDFLAGS and more for config operations
+KCONFIG_CONFIG_COMMAND:append = " ${EXTRA_OEMAKE}"
EXPORT_FUNCTIONS do_compile do_transform_kernel do_transform_bundled_initramfs do_install do_configure
@@ -669,13 +729,13 @@ pkg_postinst:${KERNEL_PACKAGE_NAME}-base () {
mkdir -p $D/lib/modules/${KERNEL_VERSION}
fi
if [ -n "$D" ]; then
- depmodwrapper -a -b $D ${KERNEL_VERSION}
+ depmodwrapper -a -b $D ${KERNEL_VERSION} ${KERNEL_PACKAGE_NAME}
else
depmod -a ${KERNEL_VERSION}
fi
}
-PACKAGESPLITFUNCS:prepend = "split_kernel_packages "
+PACKAGESPLITFUNCS =+ "split_kernel_packages"
python split_kernel_packages () {
do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex=r'^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='${KERNEL_PACKAGE_NAME}-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
@@ -706,7 +766,7 @@ addtask kernel_link_images after do_compile before do_strip
python do_strip() {
import shutil
- strip = d.getVar('STRIP')
+ strip = d.getVar('KERNEL_STRIP')
extra_sections = d.getVar('KERNEL_IMAGE_STRIP_EXTRA_SECTIONS')
kernel_image = d.getVar('B') + "/" + d.getVar('KERNEL_OUTPUT_DIR') + "/vmlinux"
diff --git a/meta/classes-recipe/kernelsrc.bbclass b/meta/classes-recipe/kernelsrc.bbclass
new file mode 100644
index 0000000000..ecb02dc9ed
--- /dev/null
+++ b/meta/classes-recipe/kernelsrc.bbclass
@@ -0,0 +1,17 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+S = "${STAGING_KERNEL_DIR}"
+deltask do_fetch
+deltask do_unpack
+do_patch[depends] += "virtual/kernel:do_shared_workdir"
+do_patch[noexec] = "1"
+do_package[depends] += "virtual/kernel:do_populate_sysroot"
+KERNEL_VERSION = "${@get_kernelversion_file("${STAGING_KERNEL_BUILDDIR}")}"
+LOCAL_VERSION = "${@get_kernellocalversion_file("${STAGING_KERNEL_BUILDDIR}")}"
+
+inherit linux-kernel-base
+
diff --git a/meta/classes-recipe/lib_package.bbclass b/meta/classes-recipe/lib_package.bbclass
new file mode 100644
index 0000000000..6d110155e5
--- /dev/null
+++ b/meta/classes-recipe/lib_package.bbclass
@@ -0,0 +1,12 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+#
+# ${PN}-bin is defined in bitbake.conf
+#
+# We need to allow the other packages to be greedy with what they
+# want out of /usr/bin and /usr/sbin before ${PN}-bin gets greedy.
+#
+PACKAGE_BEFORE_PN = "${PN}-bin"
diff --git a/meta/classes/libc-package.bbclass b/meta/classes-recipe/libc-package.bbclass
index 13ef8cdc0d..c06a2ce90a 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes-recipe/libc-package.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class knows how to package up [e]glibc. Its shared since prebuild binary toolchains
# may need packaging and its pointless to duplicate this code.
#
@@ -45,6 +51,7 @@ PACKAGE_NO_GCONV ?= "0"
OVERRIDES:append = ":${TARGET_ARCH}-${TARGET_OS}"
locale_base_postinst_ontarget() {
+mkdir ${libdir}/locale
localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s %s
}
@@ -261,7 +268,8 @@ python package_do_split_gconvs () {
"riscv32": " --uint32-align=4 --little-endian ", \
"i586": " --uint32-align=4 --little-endian ", \
"i686": " --uint32-align=4 --little-endian ", \
- "x86_64": " --uint32-align=4 --little-endian " }
+ "x86_64": " --uint32-align=4 --little-endian ", \
+ "loongarch64": " --uint32-align=4 --little-endian " }
if target_arch in locale_arch_options:
localedef_opts = locale_arch_options[target_arch]
@@ -270,7 +278,7 @@ python package_do_split_gconvs () {
bb.fatal("unknown arch:" + target_arch + " for locale_arch_options")
localedef_opts += " --force --no-hard-links --no-archive --prefix=%s \
- --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
+ --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s --no-warnings=ascii" \
% (treedir, treedir, datadir, locale, encoding, outputpath, name)
cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
diff --git a/meta/classes/license_image.bbclass b/meta/classes-recipe/license_image.bbclass
index 3213ea758e..19b3dc55ba 100644
--- a/meta/classes/license_image.bbclass
+++ b/meta/classes-recipe/license_image.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
ROOTFS_LICENSE_DIR = "${IMAGE_ROOTFS}/usr/share/common-licenses"
# This requires LICENSE_CREATE_PACKAGE=1 to work too
@@ -12,7 +18,7 @@ python() {
python write_package_manifest() {
# Get list of installed packages
- license_image_dir = d.expand('${LICENSE_DIRECTORY}/${IMAGE_NAME}')
+ license_image_dir = d.expand('${LICENSE_DIRECTORY}/${SSTATE_PKGARCH}/${IMAGE_NAME}')
bb.utils.mkdirhier(license_image_dir)
from oe.rootfs import image_list_installed_packages
from oe.utils import format_pkg_list
@@ -43,7 +49,7 @@ python license_create_manifest() {
pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name]
rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'),
- d.getVar('IMAGE_NAME'), 'license.manifest')
+ d.getVar('SSTATE_PKGARCH'), d.getVar('IMAGE_NAME'), 'license.manifest')
write_license_files(d, rootfs_license_manifest, pkg_dic, rootfs=True)
}
@@ -53,6 +59,8 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split()
bad_licenses = expand_wildcard_licenses(d, bad_licenses)
+ pkgarchs = d.getVar("SSTATE_ARCHS").split()
+ pkgarchs.reverse()
exceptions = (d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") or "").split()
with open(license_manifest, "w") as license_file:
@@ -92,9 +100,13 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"])
for lic in pkg_dic[pkg]["LICENSES"]:
- lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'),
- pkg_dic[pkg]["PN"], "generic_%s" %
- re.sub(r'\+', '', lic))
+ for pkgarch in pkgarchs:
+ lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'),
+ pkgarch,
+ pkg_dic[pkg]["PN"], "generic_%s" %
+ re.sub(r'\+', '', lic))
+ if os.path.exists(lic_file):
+ break
# add explicity avoid of CLOSED license because isn't generic
if lic == "CLOSED":
continue
@@ -124,8 +136,13 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
for pkg in sorted(pkg_dic):
pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg)
bb.utils.mkdirhier(pkg_rootfs_license_dir)
- pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
- pkg_dic[pkg]["PN"])
+ for pkgarch in pkgarchs:
+ pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
+ pkgarch, pkg_dic[pkg]["PN"])
+ if os.path.exists(pkg_license_dir):
+ break
+ if not os.path.exists(pkg_license_dir ):
+ bb.fatal("Couldn't find license information for dependency %s" % pkg)
pkg_manifest_licenses = [canonical_license(d, lic) \
for lic in pkg_dic[pkg]["LICENSES"]]
@@ -177,7 +194,7 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
os.lchown(p, 0, 0)
os.chmod(p, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
-
+write_license_files[vardepsexclude] = "SSTATE_ARCHS"
def license_deployed_manifest(d):
"""
@@ -189,6 +206,8 @@ def license_deployed_manifest(d):
dep_dic = {}
man_dic = {}
lic_dir = d.getVar("LICENSE_DIRECTORY")
+ pkgarchs = d.getVar("SSTATE_ARCHS").split()
+ pkgarchs.reverse()
dep_dic = get_deployed_dependencies(d)
for dep in dep_dic.keys():
@@ -198,12 +217,19 @@ def license_deployed_manifest(d):
man_dic[dep]["PN"] = dep
man_dic[dep]["FILES"] = \
" ".join(get_deployed_files(dep_dic[dep]))
- with open(os.path.join(lic_dir, dep, "recipeinfo"), "r") as f:
+
+ for pkgarch in pkgarchs:
+ licfile = os.path.join(lic_dir, pkgarch, dep, "recipeinfo")
+ if os.path.exists(licfile):
+ break
+ if not os.path.exists(licfile):
+ bb.fatal("Couldn't find license information for dependency %s" % dep)
+ with open(licfile, "r") as f:
for line in f.readlines():
key,val = line.split(": ", 1)
man_dic[dep][key] = val[:-1]
- lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
+ lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'), d.getVar('SSTATE_PKGARCH'),
d.getVar('IMAGE_NAME'))
bb.utils.mkdirhier(lic_manifest_dir)
image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest')
@@ -211,7 +237,7 @@ def license_deployed_manifest(d):
link_name = d.getVar('IMAGE_LINK_NAME')
if link_name:
- lic_manifest_symlink_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
+ lic_manifest_symlink_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'), d.getVar('SSTATE_PKGARCH'),
link_name)
# remove old symlink
if os.path.islink(lic_manifest_symlink_dir):
@@ -221,6 +247,8 @@ def license_deployed_manifest(d):
if lic_manifest_dir != lic_manifest_symlink_dir:
os.symlink(lic_manifest_dir, lic_manifest_symlink_dir)
+license_deployed_manifest[vardepsexclude] = "SSTATE_ARCHS"
+
def get_deployed_dependencies(d):
"""
Get all the deployed dependencies of an image
@@ -229,7 +257,7 @@ def get_deployed_dependencies(d):
deploy = {}
# Get all the dependencies for the current task (rootfs).
taskdata = d.getVar("BB_TASKDEPDATA", False)
- pn = d.getVar("PN", True)
+ pn = d.getVar("PN")
depends = list(set([dep[0] for dep
in list(taskdata.values())
if not dep[0].endswith("-native") and not dep[0] == pn]))
@@ -249,7 +277,7 @@ def get_deployed_dependencies(d):
break
return deploy
-get_deployed_dependencies[vardepsexclude] = "BB_TASKDEPDATA"
+get_deployed_dependencies[vardepsexclude] = "BB_TASKDEPDATA SSTATE_ARCHS"
def get_deployed_files(man_file):
"""
@@ -266,7 +294,7 @@ def get_deployed_files(man_file):
dep_files.append(os.path.basename(f))
return dep_files
-ROOTFS_POSTPROCESS_COMMAND:prepend = "write_package_manifest; license_create_manifest; "
+ROOTFS_POSTPROCESS_COMMAND:prepend = "write_package_manifest license_create_manifest "
do_rootfs[recrdeptask] += "do_populate_lic"
python do_populate_lic_deploy() {
diff --git a/meta/classes/linux-dummy.bbclass b/meta/classes-recipe/linux-dummy.bbclass
index 9a06a509dd..9291533cf9 100644
--- a/meta/classes/linux-dummy.bbclass
+++ b/meta/classes-recipe/linux-dummy.bbclass
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
python __anonymous () {
if d.getVar('PREFERRED_PROVIDER_virtual/kernel') == 'linux-dummy':
diff --git a/meta/classes-recipe/linux-kernel-base.bbclass b/meta/classes-recipe/linux-kernel-base.bbclass
new file mode 100644
index 0000000000..e2187a73f0
--- /dev/null
+++ b/meta/classes-recipe/linux-kernel-base.bbclass
@@ -0,0 +1,62 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# parse kernel ABI version out of <linux/version.h>
+def get_kernelversion_headers(p):
+ import re
+
+ fn = p + '/include/linux/utsrelease.h'
+ if not os.path.isfile(fn):
+ # after 2.6.33-rc1
+ fn = p + '/include/generated/utsrelease.h'
+ if not os.path.isfile(fn):
+ fn = p + '/include/linux/version.h'
+
+ try:
+ f = open(fn, 'r')
+ except IOError:
+ return None
+
+ l = f.readlines()
+ f.close()
+ r = re.compile("#define UTS_RELEASE \"(.*)\"")
+ for s in l:
+ m = r.match(s)
+ if m:
+ return m.group(1)
+ return None
+
+
+def get_kernelversion_file(p):
+ fn = p + '/kernel-abiversion'
+
+ try:
+ with open(fn, 'r') as f:
+ return f.readlines()[0].strip()
+ except IOError:
+ return None
+
+def get_kernellocalversion_file(p):
+ fn = p + '/kernel-localversion'
+
+ try:
+ with open(fn, 'r') as f:
+ return f.readlines()[0].strip()
+ except IOError:
+ return ""
+
+ return ""
+
+def linux_module_packages(s, d):
+ suffix = ""
+ return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
+
+export KBUILD_BUILD_VERSION = "1"
+export KBUILD_BUILD_USER ?= "oe-user"
+export KBUILD_BUILD_HOST ?= "oe-host"
+
+# that's all
+
diff --git a/meta/classes/linuxloader.bbclass b/meta/classes-recipe/linuxloader.bbclass
index 4447c8847c..2ea1b62254 100644
--- a/meta/classes/linuxloader.bbclass
+++ b/meta/classes-recipe/linuxloader.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def get_musl_loader_arch(d):
import re
ldso_arch = "NotSupported"
@@ -40,6 +46,8 @@ def get_glibc_loader(d):
dynamic_loader = "${base_libdir}/ld-linux-mipsn8.so.1"
elif targetarch.startswith("mips"):
dynamic_loader = "${base_libdir}/ld.so.1"
+ elif targetarch.startswith("loongarch64"):
+ dynamic_loader = "${base_libdir}/ld-linux-loongarch-lp64d.so.1"
elif targetarch == "powerpc64le":
dynamic_loader = "${base_libdir}/ld64.so.2"
elif targetarch == "powerpc64":
diff --git a/meta/classes/live-vm-common.bbclass b/meta/classes-recipe/live-vm-common.bbclass
index 74e7074a53..d90cc67ebc 100644
--- a/meta/classes/live-vm-common.bbclass
+++ b/meta/classes-recipe/live-vm-common.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Some of the vars for vm and live image are conflicted, this function
# is used for fixing the problem.
def set_live_vm_vars(d, suffix):
@@ -62,8 +68,8 @@ efi_hddimg_populate() {
efi_populate $1
}
-inherit ${EFI_CLASS}
-inherit ${PCBIOS_CLASS}
+inherit_defer ${EFI_CLASS}
+inherit_defer ${PCBIOS_CLASS}
populate_kernel() {
dest=$1
diff --git a/meta/classes-recipe/manpages.bbclass b/meta/classes-recipe/manpages.bbclass
new file mode 100644
index 0000000000..e9ca2f895b
--- /dev/null
+++ b/meta/classes-recipe/manpages.bbclass
@@ -0,0 +1,41 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Inherit this class to enable or disable building and installation of manpages
+# depending on whether 'api-documentation' is in DISTRO_FEATURES. Such building
+# tends to pull in the entire XML stack and other tools, so it's not enabled
+# by default.
+PACKAGECONFIG:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'manpages', '', d)}"
+
+inherit qemu
+
+# usually manual files are packaged to ${PN}-doc except man-pages
+MAN_PKG ?= "${PN}-doc"
+
+# only add man-db to RDEPENDS when manual files are built and installed
+RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}"
+
+pkg_postinst:${MAN_PKG}:append () {
+ # only update manual page index caches when manual files are built and installed
+ if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
+ if test -n "$D"; then
+ if ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'true', 'false', d)}; then
+ $INTERCEPT_DIR/postinst_intercept update_mandb ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX} bindir=${bindir} sysconfdir=${sysconfdir} mandir=${mandir}
+ else
+ $INTERCEPT_DIR/postinst_intercept delay_to_first_boot ${PKG} mlprefix=${MLPREFIX}
+ fi
+ else
+ mandb -q
+ fi
+ fi
+}
+
+pkg_postrm:${MAN_PKG}:append () {
+ # only update manual page index caches when manual files are built and installed
+ if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
+ mandb -q
+ fi
+}
diff --git a/meta/classes/meson-routines.bbclass b/meta/classes-recipe/meson-routines.bbclass
index be3aeedeba..a944a8fff1 100644
--- a/meta/classes/meson-routines.bbclass
+++ b/meta/classes-recipe/meson-routines.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit siteinfo
def meson_array(var, d):
@@ -17,6 +23,8 @@ def meson_cpu_family(var, d):
return 'arm'
elif arch == 'aarch64_be':
return 'aarch64'
+ elif arch == 'loongarch64':
+ return 'loongarch64'
elif arch == 'mipsel':
return 'mips'
elif arch == 'mips64el':
diff --git a/meta/classes/meson.bbclass b/meta/classes-recipe/meson.bbclass
index 0bfe945811..03fa2c06eb 100644
--- a/meta/classes/meson.bbclass
+++ b/meta/classes-recipe/meson.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit python3native meson-routines qemu
DEPENDS:append = " meson-native ninja-native"
@@ -14,6 +20,9 @@ do_configure[cleandirs] = "${B}"
# Where the meson.build build configuration is
MESON_SOURCEPATH = "${S}"
+# The target to build in do_compile. If unset the default targets are built.
+MESON_TARGET ?= ""
+
def noprefix(var, d):
return d.getVar(var).replace(d.getVar('prefix') + '/', '', 1)
@@ -52,24 +61,25 @@ def rust_tool(d, target_var):
return "rust = %s" % repr(cmd)
addtask write_config before do_configure
-do_write_config[vardeps] += "CC CXX LD AR NM STRIP READELF CFLAGS CXXFLAGS LDFLAGS RUSTC RUSTFLAGS"
+do_write_config[vardeps] += "CC CXX AR NM STRIP READELF OBJCOPY CFLAGS CXXFLAGS LDFLAGS RUSTC RUSTFLAGS EXEWRAPPER_ENABLED"
do_write_config() {
# This needs to be Py to split the args into single-element lists
cat >${WORKDIR}/meson.cross <<EOF
[binaries]
c = ${@meson_array('CC', d)}
cpp = ${@meson_array('CXX', d)}
+cython = 'cython3'
ar = ${@meson_array('AR', d)}
nm = ${@meson_array('NM', d)}
strip = ${@meson_array('STRIP', d)}
readelf = ${@meson_array('READELF', d)}
objcopy = ${@meson_array('OBJCOPY', d)}
-pkgconfig = 'pkg-config'
-llvm-config = 'llvm-config${LLVMVERSION}'
+pkg-config = 'pkg-config'
+llvm-config = 'llvm-config'
cups-config = 'cups-config'
g-ir-scanner = '${STAGING_BINDIR}/g-ir-scanner-wrapper'
g-ir-compiler = '${STAGING_BINDIR}/g-ir-compiler-wrapper'
-${@rust_tool(d, "HOST_SYS")}
+${@rust_tool(d, "RUST_HOST_SYS")}
${@"exe_wrapper = '${WORKDIR}/meson-qemuwrapper'" if d.getVar('EXEWRAPPER_ENABLED') == 'True' else ""}
[built-in options]
@@ -80,6 +90,7 @@ cpp_link_args = ${@meson_array('LDFLAGS', d)}
[properties]
needs_exe_wrapper = true
+sys_root = '${STAGING_DIR_HOST}'
[host_machine]
system = '${@meson_operating_system('HOST_OS', d)}'
@@ -98,13 +109,15 @@ EOF
[binaries]
c = ${@meson_array('BUILD_CC', d)}
cpp = ${@meson_array('BUILD_CXX', d)}
+cython = 'cython3'
ar = ${@meson_array('BUILD_AR', d)}
nm = ${@meson_array('BUILD_NM', d)}
strip = ${@meson_array('BUILD_STRIP', d)}
readelf = ${@meson_array('BUILD_READELF', d)}
objcopy = ${@meson_array('BUILD_OBJCOPY', d)}
-pkgconfig = 'pkg-config-native'
-${@rust_tool(d, "BUILD_SYS")}
+llvm-config = '${STAGING_BINDIR_NATIVE}/llvm-config'
+pkg-config = 'pkg-config-native'
+${@rust_tool(d, "RUST_BUILD_SYS")}
[built-in options]
c_args = ${@meson_array('BUILD_CFLAGS', d)}
@@ -140,21 +153,8 @@ meson_do_configure() {
# https://github.com/mesonbuild/meson/commit/ef9aeb188ea2bc7353e59916c18901cde90fa2b3
unset LD
- # sstate.bbclass no longer removes empty directories to avoid a race (see
- # commit 4f94d929 "sstate/staging: Handle directory creation race issue").
- # Unfortunately Python apparently treats an empty egg-info directory as if
- # the version it previously contained still exists and fails if a newer
- # version is required, which Meson does. To avoid this, make sure there are
- # no empty egg-info directories from previous versions left behind. Ignore
- # all errors from rmdir since the egg-info may be a file rather than a
- # directory.
- rmdir ${STAGING_LIBDIR_NATIVE}/${PYTHON_DIR}/site-packages/*.egg-info 2>/dev/null || :
-
- # Work around "Meson fails if /tmp is mounted with noexec #2972"
- mkdir -p "${B}/meson-private/tmp"
- export TMPDIR="${B}/meson-private/tmp"
bbnote Executing meson ${EXTRA_OEMESON}...
- if ! meson ${MESONOPTS} "${MESON_SOURCEPATH}" "${B}" ${MESON_CROSS_FILE} ${EXTRA_OEMESON}; then
+ if ! meson setup ${MESONOPTS} "${MESON_SOURCEPATH}" "${B}" ${MESON_CROSS_FILE} ${EXTRA_OEMESON}; then
bbfatal_log meson failed
fi
}
@@ -171,11 +171,11 @@ do_configure[postfuncs] += "meson_do_qa_configure"
do_compile[progress] = "outof:^\[(\d+)/(\d+)\]\s+"
meson_do_compile() {
- ninja -v ${PARALLEL_MAKE}
+ meson compile -v ${PARALLEL_MAKE} ${MESON_TARGET}
}
meson_do_install() {
- DESTDIR='${D}' ninja -v ${PARALLEL_MAKEINST} install
+ meson install --destdir ${D} --no-rebuild
}
EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/meta/classes/mime-xdg.bbclass b/meta/classes-recipe/mime-xdg.bbclass
index 271f48dd72..cbdcb4c7e9 100644
--- a/meta/classes/mime-xdg.bbclass
+++ b/meta/classes-recipe/mime-xdg.bbclass
@@ -1,4 +1,8 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
# This class creates mime <-> application associations based on entry
# 'MimeType' in *.desktop files
#
diff --git a/meta/classes/mime.bbclass b/meta/classes-recipe/mime.bbclass
index 8d176a884e..9b13f62bda 100644
--- a/meta/classes/mime.bbclass
+++ b/meta/classes-recipe/mime.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class is used by recipes installing mime types
#
diff --git a/meta/classes/module-base.bbclass b/meta/classes-recipe/module-base.bbclass
index 27bd69ff33..2a225881ba 100644
--- a/meta/classes/module-base.bbclass
+++ b/meta/classes-recipe/module-base.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit kernel-arch
# We do the dependency this way because the output is not preserved
@@ -14,6 +20,7 @@ export CROSS_COMPILE = "${TARGET_PREFIX}"
export KBUILD_OUTPUT = "${STAGING_KERNEL_BUILDDIR}"
export KERNEL_VERSION = "${@oe.utils.read_file('${STAGING_KERNEL_BUILDDIR}/kernel-abiversion')}"
+export LOCALVERSION = "${@oe.utils.read_file('${STAGING_KERNEL_BUILDDIR}/kernel-localversion')}"
KERNEL_OBJECT_SUFFIX = ".ko"
# kernel modules are generally machine specific
diff --git a/meta/classes/module.bbclass b/meta/classes-recipe/module.bbclass
index a09ec3ed1e..f2f0b25a2d 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes-recipe/module.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit module-base kernel-module-split pkgconfig
EXTRA_OEMAKE += "KERNEL_SRC=${STAGING_KERNEL_DIR}"
@@ -14,6 +20,10 @@ python __anonymous () {
d.setVar('KBUILD_EXTRA_SYMBOLS', " ".join(extra_symbols))
}
+python do_package:prepend () {
+ os.environ['STRIP'] = d.getVar('KERNEL_STRIP')
+}
+
python do_devshell:prepend () {
os.environ['CFLAGS'] = ''
os.environ['CPPFLAGS'] = ''
@@ -26,6 +36,8 @@ python do_devshell:prepend () {
os.environ['CC'] = d.getVar('KERNEL_CC')
os.environ['LD'] = d.getVar('KERNEL_LD')
os.environ['AR'] = d.getVar('KERNEL_AR')
+ os.environ['OBJCOPY'] = d.getVar('KERNEL_OBJCOPY')
+ os.environ['STRIP'] = d.getVar('KERNEL_STRIP')
os.environ['O'] = d.getVar('STAGING_KERNEL_BUILDDIR')
kbuild_extra_symbols = d.getVar('KBUILD_EXTRA_SYMBOLS')
if kbuild_extra_symbols:
@@ -39,7 +51,8 @@ module_do_compile() {
oe_runmake KERNEL_PATH=${STAGING_KERNEL_DIR} \
KERNEL_VERSION=${KERNEL_VERSION} \
CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
- AR="${KERNEL_AR}" \
+ AR="${KERNEL_AR}" OBJCOPY="${KERNEL_OBJCOPY}" \
+ STRIP="${KERNEL_STRIP}" \
O=${STAGING_KERNEL_BUILDDIR} \
KBUILD_EXTRA_SYMBOLS="${KBUILD_EXTRA_SYMBOLS}" \
${MAKE_TARGETS}
@@ -49,7 +62,8 @@ module_do_install() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
oe_runmake DEPMOD=echo MODLIB="${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}" \
INSTALL_FW_PATH="${D}${nonarch_base_libdir}/firmware" \
- CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
+ CC="${KERNEL_CC}" LD="${KERNEL_LD}" OBJCOPY="${KERNEL_OBJCOPY}" \
+ STRIP="${KERNEL_STRIP}" \
O=${STAGING_KERNEL_BUILDDIR} \
${MODULES_INSTALL_TARGET}
diff --git a/meta/classes/multilib_header.bbclass b/meta/classes-recipe/multilib_header.bbclass
index efbc24f59b..33f7e027f0 100644
--- a/meta/classes/multilib_header.bbclass
+++ b/meta/classes-recipe/multilib_header.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit siteinfo
# If applicable on the architecture, this routine will rename the header and
diff --git a/meta/classes-recipe/multilib_script.bbclass b/meta/classes-recipe/multilib_script.bbclass
new file mode 100644
index 0000000000..e6f0249529
--- /dev/null
+++ b/meta/classes-recipe/multilib_script.bbclass
@@ -0,0 +1,41 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
+# Recipe needs to set MULTILIB_SCRIPTS in the form <pkgname>:<scriptname>, e.g.
+# MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/file1 ${PN}:${base_bindir}/file2"
+# to indicate which script files to process from which packages.
+#
+
+inherit update-alternatives
+
+MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
+
+PACKAGE_PREPROCESS_FUNCS += "multilibscript_rename"
+
+multilibscript_rename() {
+ :
+}
+
+python () {
+ # Do nothing if multilib isn't being used
+ if not d.getVar("MULTILIB_VARIANTS"):
+ return
+ # Do nothing for native/cross
+ if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d):
+ return
+
+ for entry in (d.getVar("MULTILIB_SCRIPTS", False) or "").split():
+ pkg, script = entry.split(":")
+ epkg = d.expand(pkg)
+ escript = d.expand(script)
+ scriptname = os.path.basename(escript)
+ d.appendVar("ALTERNATIVE:" + epkg, " " + scriptname + " ")
+ d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, escript)
+ d.setVarFlag("ALTERNATIVE_TARGET", scriptname, escript + "-${MULTILIB_SUFFIX}")
+ d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + escript + " ${PKGD}" + escript + "-${MULTILIB_SUFFIX}")
+ d.appendVar("FILES:" + epkg, " " + escript + "-${MULTILIB_SUFFIX}")
+}
diff --git a/meta/classes/native.bbclass b/meta/classes-recipe/native.bbclass
index fc7422c5d7..84a3ec65da 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes-recipe/native.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# We want native packages to be relocatable
inherit relocatable
@@ -23,6 +29,8 @@ TARGET_CFLAGS = "${BUILD_CFLAGS}"
TARGET_CXXFLAGS = "${BUILD_CXXFLAGS}"
TARGET_LDFLAGS = "${BUILD_LDFLAGS}"
TARGET_FPU = ""
+TUNE_FEATURES = ""
+ABIEXTENSION = ""
HOST_ARCH = "${BUILD_ARCH}"
HOST_OS = "${BUILD_OS}"
@@ -69,7 +77,7 @@ exec_prefix = "${STAGING_DIR_NATIVE}${prefix_native}"
bindir = "${STAGING_BINDIR_NATIVE}"
sbindir = "${STAGING_SBINDIR_NATIVE}"
-base_libdir = "${STAGING_LIBDIR_NATIVE}"
+base_libdir = "${STAGING_BASE_LIBDIR_NATIVE}"
libdir = "${STAGING_LIBDIR_NATIVE}"
includedir = "${STAGING_INCDIR_NATIVE}"
sysconfdir = "${STAGING_ETCDIR_NATIVE}"
@@ -131,7 +139,7 @@ python native_virtclass_handler () {
if "native" not in classextend:
return
- def map_dependencies(varname, d, suffix = "", selfref=True):
+ def map_dependencies(varname, d, suffix = "", selfref=True, regex=False):
if suffix:
varname = varname + ":" + suffix
deps = d.getVar(varname)
@@ -140,7 +148,9 @@ python native_virtclass_handler () {
deps = bb.utils.explode_deps(deps)
newdeps = []
for dep in deps:
- if dep == pn:
+ if regex and dep.startswith("^") and dep.endswith("$"):
+ newdeps.append(dep[:-1].replace(pn, bpn) + "-native$")
+ elif dep == pn:
if not selfref:
continue
newdeps.append(dep)
@@ -153,7 +163,7 @@ python native_virtclass_handler () {
newdeps.append(dep.replace(pn, bpn) + "-native")
else:
newdeps.append(dep)
- d.setVar(varname, " ".join(newdeps), parsing=True)
+ d.setVar(varname, " ".join(newdeps))
map_dependencies("DEPENDS", e.data, selfref=False)
for pkg in e.data.getVar("PACKAGES", False).split():
@@ -163,6 +173,7 @@ python native_virtclass_handler () {
map_dependencies("RPROVIDES", e.data, pkg)
map_dependencies("RREPLACES", e.data, pkg)
map_dependencies("PACKAGES", e.data)
+ map_dependencies("PACKAGES_DYNAMIC", e.data, regex=True)
provides = e.data.getVar("PROVIDES")
nprovides = []
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes-recipe/nativesdk.bbclass
index f8e9607513..de6debda93 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes-recipe/nativesdk.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# SDK packages are built either explicitly by the user,
# or indirectly via dependency. No need to be in 'world'.
EXCLUDE_FROM_WORLD = "1"
@@ -9,7 +15,10 @@ NATIVESDKLIBC ?= "libc-glibc"
LIBCOVERRIDE = ":${NATIVESDKLIBC}"
CLASSOVERRIDE = "class-nativesdk"
MACHINEOVERRIDES = ""
-MACHINE_FEATURES = ""
+
+MACHINE_FEATURES = "${SDK_MACHINE_FEATURES}"
+DISTRO_FEATURES_BACKFILL = ""
+MACHINE_FEATURES_BACKFILL = ""
MULTILIBS = ""
@@ -55,6 +64,7 @@ TARGET_CXXFLAGS = "${BUILDSDK_CXXFLAGS}"
TARGET_LDFLAGS = "${BUILDSDK_LDFLAGS}"
TARGET_FPU = ""
EXTRA_OECONF_GCC_FLOAT = ""
+TUNE_FEATURES = ""
CPPFLAGS = "${BUILDSDK_CPPFLAGS}"
CFLAGS = "${BUILDSDK_CFLAGS}"
diff --git a/meta/classes/nopackages.bbclass b/meta/classes-recipe/nopackages.bbclass
index 7a4f632d71..9ea7273530 100644
--- a/meta/classes/nopackages.bbclass
+++ b/meta/classes-recipe/nopackages.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
deltask do_package
deltask do_package_write_rpm
deltask do_package_write_ipk
diff --git a/meta/classes-recipe/npm.bbclass b/meta/classes-recipe/npm.bbclass
new file mode 100644
index 0000000000..91da3295f2
--- /dev/null
+++ b/meta/classes-recipe/npm.bbclass
@@ -0,0 +1,352 @@
+# Copyright (C) 2020 Savoir-Faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This bbclass builds and installs an npm package to the target. The package
+# sources files should be fetched in the calling recipe by using the SRC_URI
+# variable. The ${S} variable should be updated depending of your fetcher.
+#
+# Usage:
+# SRC_URI = "..."
+# inherit npm
+#
+# Optional variables:
+# NPM_ARCH:
+# Override the auto generated npm architecture.
+#
+# NPM_INSTALL_DEV:
+# Set to 1 to also install devDependencies.
+
+inherit python3native
+
+DEPENDS:prepend = "nodejs-native nodejs-oe-cache-native "
+RDEPENDS:${PN}:append:class-target = " nodejs"
+
+EXTRA_OENPM = ""
+
+NPM_INSTALL_DEV ?= "0"
+
+NPM_NODEDIR ?= "${RECIPE_SYSROOT_NATIVE}${prefix_native}"
+
+## must match mapping in nodejs.bb (openembedded-meta)
+def map_nodejs_arch(a, d):
+ import re
+
+ if re.match('i.86$', a): return 'ia32'
+ elif re.match('x86_64$', a): return 'x64'
+ elif re.match('aarch64$', a): return 'arm64'
+ elif re.match('(powerpc64|powerpc64le|ppc64le)$', a): return 'ppc64'
+ elif re.match('powerpc$', a): return 'ppc'
+ return a
+
+NPM_ARCH ?= "${@map_nodejs_arch(d.getVar("TARGET_ARCH"), d)}"
+
+NPM_PACKAGE = "${WORKDIR}/npm-package"
+NPM_CACHE = "${WORKDIR}/npm-cache"
+NPM_BUILD = "${WORKDIR}/npm-build"
+NPM_REGISTRY = "${WORKDIR}/npm-registry"
+
+def npm_global_configs(d):
+ """Get the npm global configuration"""
+ configs = []
+ # Ensure no network access is done
+ configs.append(("offline", "true"))
+ configs.append(("proxy", "http://invalid"))
+ configs.append(("fund", False))
+ configs.append(("audit", False))
+ # Configure the cache directory
+ configs.append(("cache", d.getVar("NPM_CACHE")))
+ return configs
+
+## 'npm pack' runs 'prepare' and 'prepack' scripts. Support for
+## 'ignore-scripts' which prevents this behavior has been removed
+## from nodejs 16. Use simple 'tar' instead of.
+def npm_pack(env, srcdir, workdir):
+ """Emulate 'npm pack' on a specified directory"""
+ import subprocess
+ import os
+ import json
+
+ src = os.path.join(srcdir, 'package.json')
+ with open(src) as f:
+ j = json.load(f)
+
+ # base does not really matter and is for documentation purposes
+ # only. But the 'version' part must exist because other parts of
+ # the bbclass rely on it.
+ base = j['name'].split('/')[-1]
+ tarball = os.path.join(workdir, "%s-%s.tgz" % (base, j['version']));
+
+ # TODO: real 'npm pack' does not include directories while 'tar'
+ # does. But this does not seem to matter...
+ subprocess.run(['tar', 'czf', tarball,
+ '--exclude', './node-modules',
+ '--exclude-vcs',
+ '--transform', r's,^\./,package/,',
+ '--mtime', '1985-10-26T08:15:00.000Z',
+ '.'],
+ check = True, cwd = srcdir)
+
+ return (tarball, j)
+
+python npm_do_configure() {
+ """
+ Step one: configure the npm cache and the main npm package
+
+ Every dependencies have been fetched and patched in the source directory.
+ They have to be packed (this remove unneeded files) and added to the npm
+ cache to be available for the next step.
+
+ The main package and its associated manifest file and shrinkwrap file have
+ to be configured to take into account these cached dependencies.
+ """
+ import base64
+ import copy
+ import json
+ import re
+ import shlex
+ import stat
+ import tempfile
+ from bb.fetch2.npm import NpmEnvironment
+ from bb.fetch2.npm import npm_unpack
+ from bb.fetch2.npm import npm_package
+ from bb.fetch2.npmsw import foreach_dependencies
+ from bb.progress import OutOfProgressHandler
+ from oe.npm_registry import NpmRegistry
+
+ bb.utils.remove(d.getVar("NPM_CACHE"), recurse=True)
+ bb.utils.remove(d.getVar("NPM_PACKAGE"), recurse=True)
+
+ env = NpmEnvironment(d, configs=npm_global_configs(d))
+ registry = NpmRegistry(d.getVar('NPM_REGISTRY'), d.getVar('NPM_CACHE'))
+
+ def _npm_cache_add(tarball, pkg):
+ """Add tarball to local registry and register it in the
+ cache"""
+ registry.add_pkg(tarball, pkg)
+
+ def _npm_integrity(tarball):
+ """Return the npm integrity of a specified tarball"""
+ sha512 = bb.utils.sha512_file(tarball)
+ return "sha512-" + base64.b64encode(bytes.fromhex(sha512)).decode()
+
+ # Manage the manifest file and shrinkwrap files
+ orig_manifest_file = d.expand("${S}/package.json")
+ orig_shrinkwrap_file = d.expand("${S}/npm-shrinkwrap.json")
+ cached_manifest_file = d.expand("${NPM_PACKAGE}/package.json")
+ cached_shrinkwrap_file = d.expand("${NPM_PACKAGE}/npm-shrinkwrap.json")
+
+ with open(orig_manifest_file, "r") as f:
+ orig_manifest = json.load(f)
+
+ cached_manifest = copy.deepcopy(orig_manifest)
+ cached_manifest.pop("dependencies", None)
+ cached_manifest.pop("devDependencies", None)
+
+ has_shrinkwrap_file = True
+
+ try:
+ with open(orig_shrinkwrap_file, "r") as f:
+ orig_shrinkwrap = json.load(f)
+ except IOError:
+ has_shrinkwrap_file = False
+
+ if has_shrinkwrap_file:
+ cached_shrinkwrap = copy.deepcopy(orig_shrinkwrap)
+ for package in orig_shrinkwrap["packages"]:
+ if package != "":
+ cached_shrinkwrap["packages"].pop(package, None)
+ cached_shrinkwrap["packages"][""].pop("dependencies", None)
+ cached_shrinkwrap["packages"][""].pop("devDependencies", None)
+ cached_shrinkwrap["packages"][""].pop("peerDependencies", None)
+
+ # Manage the dependencies
+ progress = OutOfProgressHandler(d, r"^(\d+)/(\d+)$")
+ progress_total = 1 # also count the main package
+ progress_done = 0
+
+ def _count_dependency(name, params, destsuffix):
+ nonlocal progress_total
+ progress_total += 1
+
+ def _cache_dependency(name, params, destsuffix):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ # Add the dependency to the npm cache
+ destdir = os.path.join(d.getVar("S"), destsuffix)
+ (tarball, pkg) = npm_pack(env, destdir, tmpdir)
+ _npm_cache_add(tarball, pkg)
+ # Add its signature to the cached shrinkwrap
+ dep = params
+ dep["version"] = pkg['version']
+ dep["integrity"] = _npm_integrity(tarball)
+ if params.get("dev", False):
+ dep["dev"] = True
+ if "devDependencies" not in cached_shrinkwrap["packages"][""]:
+ cached_shrinkwrap["packages"][""]["devDependencies"] = {}
+ cached_shrinkwrap["packages"][""]["devDependencies"][name] = pkg['version']
+
+ else:
+ if "dependencies" not in cached_shrinkwrap["packages"][""]:
+ cached_shrinkwrap["packages"][""]["dependencies"] = {}
+ cached_shrinkwrap["packages"][""]["dependencies"][name] = pkg['version']
+
+ cached_shrinkwrap["packages"][destsuffix] = dep
+ # Display progress
+ nonlocal progress_done
+ progress_done += 1
+ progress.write("%d/%d" % (progress_done, progress_total))
+
+ dev = bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False)
+
+ if has_shrinkwrap_file:
+ foreach_dependencies(orig_shrinkwrap, _count_dependency, dev)
+ foreach_dependencies(orig_shrinkwrap, _cache_dependency, dev)
+
+ # Manage Peer Dependencies
+ if has_shrinkwrap_file:
+ packages = orig_shrinkwrap.get("packages", {})
+ peer_deps = packages.get("", {}).get("peerDependencies", {})
+ package_runtime_dependencies = d.getVar("RDEPENDS:%s" % d.getVar("PN"))
+
+ for peer_dep in peer_deps:
+ peer_dep_yocto_name = npm_package(peer_dep)
+ if peer_dep_yocto_name not in package_runtime_dependencies:
+ bb.warn(peer_dep + " is a peer dependencie that is not in RDEPENDS variable. " +
+ "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
+ % peer_dep_yocto_name)
+
+ # Configure the main package
+ with tempfile.TemporaryDirectory() as tmpdir:
+ (tarball, _) = npm_pack(env, d.getVar("S"), tmpdir)
+ npm_unpack(tarball, d.getVar("NPM_PACKAGE"), d)
+
+ # Configure the cached manifest file and cached shrinkwrap file
+ def _update_manifest(depkey):
+ for name in orig_manifest.get(depkey, {}):
+ version = cached_shrinkwrap["packages"][""][depkey][name]
+ if depkey not in cached_manifest:
+ cached_manifest[depkey] = {}
+ cached_manifest[depkey][name] = version
+
+ if has_shrinkwrap_file:
+ _update_manifest("dependencies")
+
+ if dev:
+ if has_shrinkwrap_file:
+ _update_manifest("devDependencies")
+
+ os.chmod(cached_manifest_file, os.stat(cached_manifest_file).st_mode | stat.S_IWUSR)
+ with open(cached_manifest_file, "w") as f:
+ json.dump(cached_manifest, f, indent=2)
+
+ if has_shrinkwrap_file:
+ with open(cached_shrinkwrap_file, "w") as f:
+ json.dump(cached_shrinkwrap, f, indent=2)
+}
+
+python npm_do_compile() {
+ """
+ Step two: install the npm package
+
+ Use the configured main package and the cached dependencies to run the
+ installation process. The installation is done in a directory which is
+ not the destination directory yet.
+
+ A combination of 'npm pack' and 'npm install' is used to ensure that the
+ installed files are actual copies instead of symbolic links (which is the
+ default npm behavior).
+ """
+ import shlex
+ import tempfile
+ from bb.fetch2.npm import NpmEnvironment
+
+ bb.utils.remove(d.getVar("NPM_BUILD"), recurse=True)
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ args = []
+ configs = npm_global_configs(d)
+
+ if bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False):
+ configs.append(("also", "development"))
+ else:
+ configs.append(("only", "production"))
+
+ # Report as many logs as possible for debugging purpose
+ configs.append(("loglevel", "silly"))
+
+ # Configure the installation to be done globally in the build directory
+ configs.append(("global", "true"))
+ configs.append(("prefix", d.getVar("NPM_BUILD")))
+
+ # Add node-gyp configuration
+ configs.append(("arch", d.getVar("NPM_ARCH")))
+ configs.append(("release", "true"))
+ configs.append(("nodedir", d.getVar("NPM_NODEDIR")))
+ configs.append(("python", d.getVar("PYTHON")))
+
+ env = NpmEnvironment(d, configs)
+
+ # Add node-pre-gyp configuration
+ args.append(("target_arch", d.getVar("NPM_ARCH")))
+ args.append(("build-from-source", "true"))
+
+ # Don't install peer dependencies as they should be in RDEPENDS variable
+ args.append(("legacy-peer-deps", "true"))
+
+ # Pack and install the main package
+ (tarball, _) = npm_pack(env, d.getVar("NPM_PACKAGE"), tmpdir)
+ cmd = "npm install %s %s" % (shlex.quote(tarball), d.getVar("EXTRA_OENPM"))
+ env.run(cmd, args=args)
+}
+
+npm_do_install() {
+ # Step three: final install
+ #
+ # The previous installation have to be filtered to remove some extra files.
+
+ rm -rf ${D}
+
+ # Copy the entire lib and bin directories
+ install -d ${D}/${nonarch_libdir}
+ cp --no-preserve=ownership --recursive ${NPM_BUILD}/lib/. ${D}/${nonarch_libdir}
+
+ if [ -d "${NPM_BUILD}/bin" ]
+ then
+ install -d ${D}/${bindir}
+ cp --no-preserve=ownership --recursive ${NPM_BUILD}/bin/. ${D}/${bindir}
+ fi
+
+ # If the package (or its dependencies) uses node-gyp to build native addons,
+ # object files, static libraries or other temporary files can be hidden in
+ # the lib directory. To reduce the package size and to avoid QA issues
+ # (staticdev with static library files) these files must be removed.
+ local GYP_REGEX=".*/build/Release/[^/]*.node"
+
+ # Remove any node-gyp directory in ${D} to remove temporary build files
+ for GYP_D_FILE in $(find ${D} -regex "${GYP_REGEX}")
+ do
+ local GYP_D_DIR=${GYP_D_FILE%/Release/*}
+
+ rm --recursive --force ${GYP_D_DIR}
+ done
+
+ # Copy only the node-gyp release files
+ for GYP_B_FILE in $(find ${NPM_BUILD} -regex "${GYP_REGEX}")
+ do
+ local GYP_D_FILE=${D}/${prefix}/${GYP_B_FILE#${NPM_BUILD}}
+
+ install -d ${GYP_D_FILE%/*}
+ install -m 755 ${GYP_B_FILE} ${GYP_D_FILE}
+ done
+
+ # Remove the shrinkwrap file which does not need to be packed
+ rm -f ${D}/${nonarch_libdir}/node_modules/*/npm-shrinkwrap.json
+ rm -f ${D}/${nonarch_libdir}/node_modules/@*/*/npm-shrinkwrap.json
+}
+
+FILES:${PN} += " \
+ ${bindir} \
+ ${nonarch_libdir} \
+"
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/meta/classes/overlayfs-etc.bbclass b/meta/classes-recipe/overlayfs-etc.bbclass
index 91afee695c..d339fbbeee 100644
--- a/meta/classes/overlayfs-etc.bbclass
+++ b/meta/classes-recipe/overlayfs-etc.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for setting up /etc in overlayfs
#
# In order to have /etc directory in overlayfs a special handling at early boot stage is required
@@ -25,7 +31,7 @@
# Regardless which mode you choose, update and migration strategy of configuration files under /etc
# overlay is out of scope of this class
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "overlayfs-etc", "create_overlayfs_etc_preinit;", "", d)}'
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "overlayfs-etc", "create_overlayfs_etc_preinit", "", d)}'
IMAGE_FEATURES_CONFLICTS_overlayfs-etc = "${@ 'package-management' if bb.utils.to_boolean(d.getVar('OVERLAYFS_ETC_USE_ORIG_INIT_NAME'), True) else ''}"
OVERLAYFS_ETC_MOUNT_POINT ??= ""
@@ -34,6 +40,8 @@ OVERLAYFS_ETC_DEVICE ??= ""
OVERLAYFS_ETC_USE_ORIG_INIT_NAME ??= "1"
OVERLAYFS_ETC_MOUNT_OPTIONS ??= "defaults"
OVERLAYFS_ETC_INIT_TEMPLATE ??= "${COREBASE}/meta/files/overlayfs-etc-preinit.sh.in"
+OVERLAYFS_ETC_EXPOSE_LOWER ??= "0"
+OVERLAYFS_ETC_CREATE_MOUNT_DIRS ??= "1"
python create_overlayfs_etc_preinit() {
overlayEtcMountPoint = d.getVar("OVERLAYFS_ETC_MOUNT_POINT")
@@ -54,13 +62,17 @@ python create_overlayfs_etc_preinit() {
preinitPath = oe.path.join(d.getVar("IMAGE_ROOTFS"), d.getVar("base_sbindir"), "preinit")
initBaseName = oe.path.join(d.getVar("base_sbindir"), "init")
origInitNameSuffix = ".orig"
+ exposeLower = oe.types.boolean(d.getVar('OVERLAYFS_ETC_EXPOSE_LOWER'))
+ createMoundDirs = oe.types.boolean(d.getVar('OVERLAYFS_ETC_CREATE_MOUNT_DIRS'))
args = {
'OVERLAYFS_ETC_MOUNT_POINT': overlayEtcMountPoint,
'OVERLAYFS_ETC_MOUNT_OPTIONS': d.getVar('OVERLAYFS_ETC_MOUNT_OPTIONS'),
'OVERLAYFS_ETC_FSTYPE': overlayEtcFsType,
'OVERLAYFS_ETC_DEVICE': overlayEtcDevice,
- 'SBIN_INIT_NAME': initBaseName + origInitNameSuffix if useOrigInit else initBaseName
+ 'SBIN_INIT_NAME': initBaseName + origInitNameSuffix if useOrigInit else initBaseName,
+ 'OVERLAYFS_ETC_EXPOSE_LOWER': "true" if exposeLower else "false",
+ 'CREATE_MOUNT_DIRS': "true" if createMoundDirs else "false"
}
if useOrigInit:
diff --git a/meta/classes/overlayfs.bbclass b/meta/classes-recipe/overlayfs.bbclass
index f7069edd41..a82763ec10 100644
--- a/meta/classes/overlayfs.bbclass
+++ b/meta/classes-recipe/overlayfs.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for generation of overlayfs mount units
#
# It's often desired in Embedded System design to have a read-only rootfs.
@@ -96,7 +102,11 @@ python do_create_overlayfs_units() {
overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
for mountPoint in overlayMountPoints:
bb.debug(1, "Process variable flag %s" % mountPoint)
- for lower in d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint).split():
+ lowerList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
+ if not lowerList:
+ bb.note("No mount points defined for %s flag, skipping" % (mountPoint))
+ continue
+ for lower in lowerList.split():
bb.debug(1, "Prepare mount unit for %s with data mount point %s" %
(lower, d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint)))
prepareUnits(d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint), lower)
@@ -128,4 +138,5 @@ do_install:append() {
done
}
+do_create_overlayfs_units[vardeps] += "OVERLAYFS_WRITABLE_PATHS"
addtask create_overlayfs_units before do_install
diff --git a/meta/classes/packagegroup.bbclass b/meta/classes-recipe/packagegroup.bbclass
index 557b1b6382..cf6fc354a8 100644
--- a/meta/classes/packagegroup.bbclass
+++ b/meta/classes-recipe/packagegroup.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for packagegroup (package group) recipes
# By default, only the packagegroup package itself is in PACKAGES.
@@ -16,7 +22,7 @@ PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}"
LICENSE ?= "MIT"
-inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')}
+inherit_defer ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')}
# This automatically adds -dbg and -dev flavours of all PACKAGES
# to the list. Their dependencies (RRECOMMENDS) are handled as usual
@@ -48,6 +54,9 @@ deltask do_compile
deltask do_install
deltask do_populate_sysroot
+do_create_runtime_spdx[deptask] = "do_create_spdx"
+do_create_runtime_spdx[rdeptask] = ""
+
INHIBIT_DEFAULT_DEPS = "1"
python () {
diff --git a/meta/classes/perl-version.bbclass b/meta/classes-recipe/perl-version.bbclass
index 84b67b8180..74e33175d9 100644
--- a/meta/classes/perl-version.bbclass
+++ b/meta/classes-recipe/perl-version.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PERL_OWN_DIR = ""
# Determine the staged version of perl from the perl configuration file
@@ -20,9 +26,6 @@ def get_perl_version(d):
return m.group(1)
return None
-PERLVERSION := "${@get_perl_version(d)}"
-PERLVERSION[vardepvalue] = ""
-
# Determine the staged arch of perl from the perl configuration file
# Assign vardepvalue, because otherwise signature is changed before and after
@@ -43,9 +46,6 @@ def get_perl_arch(d):
return m.group(1)
return None
-PERLARCH := "${@get_perl_arch(d)}"
-PERLARCH[vardepvalue] = ""
-
# Determine the staged arch of perl-native from the perl configuration file
# Assign vardepvalue, because otherwise signature is changed before and after
# perl is built (from None to real version in config.sh).
diff --git a/meta/classes-recipe/perlnative.bbclass b/meta/classes-recipe/perlnative.bbclass
new file mode 100644
index 0000000000..d56ec4ae72
--- /dev/null
+++ b/meta/classes-recipe/perlnative.bbclass
@@ -0,0 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+EXTRANATIVEPATH += "perl-native"
+DEPENDS += "perl-native"
+OECMAKE_PERLNATIVE_DIR = "${STAGING_BINDIR_NATIVE}/perl-native"
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes-recipe/pixbufcache.bbclass
index 886bf195b3..107e38885e 100644
--- a/meta/classes/pixbufcache.bbclass
+++ b/meta/classes-recipe/pixbufcache.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class will generate the proper postinst/postrm scriptlets for pixbuf
# packages.
#
diff --git a/meta/classes-recipe/pkgconfig.bbclass b/meta/classes-recipe/pkgconfig.bbclass
new file mode 100644
index 0000000000..1e1f3824dd
--- /dev/null
+++ b/meta/classes-recipe/pkgconfig.bbclass
@@ -0,0 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+DEPENDS:prepend = "pkgconfig-native "
+
diff --git a/meta/classes-recipe/populate_sdk.bbclass b/meta/classes-recipe/populate_sdk.bbclass
new file mode 100644
index 0000000000..caeef5d2b2
--- /dev/null
+++ b/meta/classes-recipe/populate_sdk.bbclass
@@ -0,0 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# The majority of populate_sdk is located in populate_sdk_base
+# This chunk simply facilitates compatibility with SDK only recipes.
+
+inherit populate_sdk_base
+
+addtask populate_sdk after do_install before do_build
+
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes-recipe/populate_sdk_base.bbclass
index 16f929bf59..81896d808f 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes-recipe/populate_sdk_base.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PACKAGES = ""
inherit image-postinst-intercepts image-artifact-names
@@ -9,7 +15,7 @@ COMPLEMENTARY_GLOB[staticdev-pkgs] = '*-staticdev'
COMPLEMENTARY_GLOB[doc-pkgs] = '*-doc'
COMPLEMENTARY_GLOB[dbg-pkgs] = '*-dbg'
COMPLEMENTARY_GLOB[src-pkgs] = '*-src'
-COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest'
+COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest ${MLPREFIX}ptest-runner'
COMPLEMENTARY_GLOB[bash-completion-pkgs] = '*-bash-completion'
def complementary_globs(featurevar, d):
@@ -31,7 +37,7 @@ SDK_PACKAGE_ARCHS += "sdk-provides-dummy-${SDKPKGSUFFIX}"
# List of locales to install, or "all" for all of them, or unset for none.
SDKIMAGE_LINGUAS ?= "all"
-inherit rootfs_${IMAGE_PKGTYPE}
+inherit_defer rootfs_${IMAGE_PKGTYPE}
SDK_DIR = "${WORKDIR}/sdk"
SDK_OUTPUT = "${SDK_DIR}/image"
@@ -43,9 +49,24 @@ B:task-populate-sdk = "${SDK_DIR}"
SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${REAL_MULTIMACH_TARGET_SYS}"
-TOOLCHAIN_HOST_TASK ?= "nativesdk-packagegroup-sdk-host packagegroup-cross-canadian-${MACHINE}"
+SDK_TOOLCHAIN_LANGS ??= ""
+SDK_TOOLCHAIN_LANGS:remove:sdkmingw32 = "rust"
+# libstd-rs doesn't build for mips n32 with compiler constraint errors
+SDK_TOOLCHAIN_LANGS:remove:mipsarchn32 = "rust"
+
+TOOLCHAIN_HOST_TASK ?= " \
+ nativesdk-packagegroup-sdk-host \
+ packagegroup-cross-canadian-${MACHINE} \
+ ${@bb.utils.contains('SDK_TOOLCHAIN_LANGS', 'go', 'packagegroup-go-cross-canadian-${MACHINE}', '', d)} \
+ ${@bb.utils.contains('SDK_TOOLCHAIN_LANGS', 'rust', 'packagegroup-rust-cross-canadian-${MACHINE}', '', d)} \
+"
TOOLCHAIN_HOST_TASK_ATTEMPTONLY ?= ""
-TOOLCHAIN_TARGET_TASK ?= "${@multilib_pkg_extend(d, 'packagegroup-core-standalone-sdk-target')} target-sdk-provides-dummy"
+TOOLCHAIN_TARGET_TASK ?= " \
+ ${@multilib_pkg_extend(d, 'packagegroup-core-standalone-sdk-target')} \
+ ${@bb.utils.contains('SDK_TOOLCHAIN_LANGS', 'go', multilib_pkg_extend(d, 'packagegroup-go-sdk-target'), '', d)} \
+ ${@bb.utils.contains('SDK_TOOLCHAIN_LANGS', 'rust', multilib_pkg_extend(d, 'libstd-rs'), '', d)} \
+ target-sdk-provides-dummy \
+"
TOOLCHAIN_TARGET_TASK_ATTEMPTONLY ?= ""
TOOLCHAIN_OUTPUTNAME ?= "${SDK_NAME}-toolchain-${SDK_VERSION}"
@@ -53,6 +74,8 @@ TOOLCHAIN_OUTPUTNAME ?= "${SDK_NAME}-toolchain-${SDK_VERSION}"
SDK_ARCHIVE_TYPE ?= "tar.xz"
SDK_XZ_COMPRESSION_LEVEL ?= "-9"
SDK_XZ_OPTIONS ?= "${XZ_DEFAULTS} ${SDK_XZ_COMPRESSION_LEVEL}"
+SDK_ZIP_OPTIONS ?= "-y"
+
# To support different sdk type according to SDK_ARCHIVE_TYPE, now support zip and tar.xz
python () {
@@ -60,7 +83,7 @@ python () {
d.setVar('SDK_ARCHIVE_DEPENDS', 'zip-native')
# SDK_ARCHIVE_CMD used to generate archived sdk ${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE} from input dir ${SDK_OUTPUT}/${SDKPATH} to output dir ${SDKDEPLOYDIR}
# recommand to cd into input dir first to avoid archive with buildpath
- d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; zip -r -y ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE} .')
+ d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; zip -r ${SDK_ZIP_OPTIONS} ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE} .')
else:
d.setVar('SDK_ARCHIVE_DEPENDS', 'xz-native')
d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; tar ${SDKTAROPTS} -cf - . | xz ${SDK_XZ_OPTIONS} > ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE}')
@@ -129,16 +152,17 @@ python write_host_sdk_manifest () {
output.write(format_pkg_list(pkgs, 'ver'))
}
-POPULATE_SDK_POST_TARGET_COMMAND:append = " write_sdk_test_data ; "
-POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " write_target_sdk_manifest; sdk_prune_dirs; "
-POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " write_host_sdk_manifest; "
+POPULATE_SDK_POST_TARGET_COMMAND:append = " write_sdk_test_data"
+POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " write_target_sdk_manifest sdk_prune_dirs"
+POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " write_host_sdk_manifest"
-SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}"
-SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} "
+SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC}' if '${SDK_PACKAGING_FUNC}' else ''}"
+SDK_POSTPROCESS_COMMAND = "create_sdk_files check_sdk_sysroots archive_sdk ${SDK_PACKAGING_COMMAND}"
def populate_sdk_common(d):
from oe.sdk import populate_sdk
from oe.manifest import create_manifest, Manifest
+ import oe.packagedata
# Handle package exclusions
excl_pkgs = (d.getVar("PACKAGE_EXCLUDE") or "").split()
@@ -161,13 +185,13 @@ def populate_sdk_common(d):
d.setVar("PACKAGE_INSTALL_ATTEMPTONLY", ' '.join(inst_attempt_pkgs))
pn = d.getVar('PN')
- runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
- runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
+ oe.packagedata.runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
+ oe.packagedata.runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
ld = bb.data.createCopy(d)
ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata")
- runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
- runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
+ oe.packagedata.runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
+ oe.packagedata.runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK"))
d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY"))
@@ -184,7 +208,7 @@ fakeroot python do_populate_sdk() {
}
SSTATETASKS += "do_populate_sdk"
SSTATE_SKIP_CREATION:task-populate-sdk = '1'
-do_populate_sdk[cleandirs] = "${SDKDEPLOYDIR}"
+do_populate_sdk[cleandirs] += "${SDKDEPLOYDIR}"
do_populate_sdk[sstate-inputdirs] = "${SDKDEPLOYDIR}"
do_populate_sdk[sstate-outputdirs] = "${SDK_DEPLOY}"
do_populate_sdk[stamp-extra-info] = "${MACHINE_ARCH}${SDKMACHINE}"
@@ -261,7 +285,7 @@ python check_sdk_sysroots() {
dir_walk(SCAN_ROOT)
}
-SDKTAROPTS = "--owner=root --group=root"
+SDKTAROPTS = "--owner=root --group=root --clamp-mtime --mtime=@${SOURCE_DATE_EPOCH}"
fakeroot archive_sdk() {
# Package it up
@@ -348,8 +372,7 @@ do_populate_sdk[vardeps] += "${@sdk_variables(d)}"
python () {
variables = sdk_command_variables(d)
for var in variables:
- if d.getVar(var, False):
- d.setVarFlag(var, 'func', '1')
+ d.setVarFlag(var, 'vardeps', d.getVar(var))
}
do_populate_sdk[file-checksums] += "${TOOLCHAIN_SHAR_REL_TMPL}:True \
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes-recipe/populate_sdk_ext.bbclass
index e2019f9bbf..f5687e5899 100644
--- a/meta/classes/populate_sdk_ext.bbclass
+++ b/meta/classes-recipe/populate_sdk_ext.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Extensible SDK
inherit populate_sdk_base
@@ -114,7 +120,7 @@ python write_host_sdk_ext_manifest () {
f.write("%s %s %s\n" % (info[1], info[2], info[3]))
}
-SDK_POSTPROCESS_COMMAND:append:task-populate-sdk-ext = "write_target_sdk_ext_manifest; write_host_sdk_ext_manifest; "
+SDK_POSTPROCESS_COMMAND:append:task-populate-sdk-ext = " write_target_sdk_ext_manifest write_host_sdk_ext_manifest"
SDK_TITLE:task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK"
@@ -180,12 +186,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
if os.path.exists(localconf + '.bak'):
os.replace(localconf + '.bak', localconf)
-python copy_buildsystem () {
- import re
- import shutil
- import glob
- import oe.copy_buildsystem
-
+def copy_bitbake_and_layers(d, baseoutpath, derivative):
oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT')
conf_bbpath = ''
@@ -194,13 +195,7 @@ python copy_buildsystem () {
# Copy in all metadata layers + bitbake (as repositories)
buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d)
- baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH')
- #check if custome templateconf path is set
- use_custom_templateconf = d.getVar('SDK_CUSTOM_TEMPLATECONF')
-
- # Determine if we're building a derivative extensible SDK (from devtool build-sdk)
- derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1'
if derivative:
workspace_name = 'orig-workspace'
else:
@@ -214,20 +209,23 @@ python copy_buildsystem () {
if os.path.exists(os.path.join(baseoutpath, relpath)):
conf_initpath = relpath
- relpath = os.path.join('layers', path, 'scripts', 'devtool')
+ relpath = os.path.join('layers', path, 'scripts', 'esdk-tools', 'devtool')
if os.path.exists(os.path.join(baseoutpath, relpath)):
- scriptrelpath = os.path.dirname(relpath)
+ esdk_tools_path = os.path.dirname(relpath)
relpath = os.path.join('layers', path, 'meta')
if os.path.exists(os.path.join(baseoutpath, relpath, 'lib', 'oe')):
core_meta_subdir = relpath
d.setVar('oe_init_build_env_path', conf_initpath)
- d.setVar('scriptrelpath', scriptrelpath)
+ d.setVar('esdk_tools_path', esdk_tools_path)
+
+ return (conf_initpath, conf_bbpath, core_meta_subdir, sdkbblayers)
+def write_devtool_config(d, baseoutpath, conf_bbpath, conf_initpath, core_meta_subdir):
# Write out config file for devtool
import configparser
- config = configparser.SafeConfigParser()
+ config = configparser.ConfigParser()
config.add_section('General')
config.set('General', 'bitbake_subdir', conf_bbpath)
config.set('General', 'init_path', conf_initpath)
@@ -241,15 +239,17 @@ python copy_buildsystem () {
with open(os.path.join(baseoutpath, 'conf', 'devtool.conf'), 'w') as f:
config.write(f)
+def write_unlocked_sigs(d, baseoutpath):
unlockedsigs = os.path.join(baseoutpath, 'conf', 'unlocked-sigs.inc')
with open(unlockedsigs, 'w') as f:
pass
+def write_bblayers_conf(d, baseoutpath, sdkbblayers):
# Create a layer for new recipes / appends
bbpath = d.getVar('BBPATH')
env = os.environ.copy()
env['PYTHONDONTWRITEBYTECODE'] = '1'
- bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')], env=env)
+ bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--layerseries', d.getVar("LAYERSERIES_CORENAMES"), '--create-only', os.path.join(baseoutpath, 'workspace')], env=env)
# Create bblayers.conf
bb.utils.mkdirhier(baseoutpath + '/conf')
@@ -273,6 +273,9 @@ python copy_buildsystem () {
f.write(' $' + '{SDKBASEMETAPATH}/workspace \\\n')
f.write(' "\n')
+def copy_uninative(d, baseoutpath):
+ import shutil
+
# Copy uninative tarball
# For now this is where uninative.bbclass expects the tarball
if bb.data.inherits_class('uninative', d):
@@ -282,6 +285,12 @@ python copy_buildsystem () {
bb.utils.mkdirhier(uninative_outdir)
shutil.copy(uninative_file, uninative_outdir)
+ return uninative_checksum
+
+def write_local_conf(d, baseoutpath, derivative, core_meta_subdir, uninative_checksum):
+ #check if custome templateconf path is set
+ use_custom_templateconf = d.getVar('SDK_CUSTOM_TEMPLATECONF')
+
env_passthrough = (d.getVar('BB_ENV_PASSTHROUGH_ADDITIONS') or '').split()
env_passthrough_values = {}
@@ -363,7 +372,8 @@ python copy_buildsystem () {
f.write('BUILDCFG_HEADER = ""\n\n')
# Write METADATA_REVISION
- f.write('METADATA_REVISION = "%s"\n\n' % d.getVar('METADATA_REVISION'))
+ # Needs distro override so it can override the value set in the bbclass code (later than local.conf)
+ f.write('METADATA_REVISION:%s = "%s"\n\n' % (d.getVar('DISTRO'), d.getVar('METADATA_REVISION')))
f.write('# Provide a flag to indicate we are in the EXT_SDK Context\n')
f.write('WITHIN_EXT_SDK = "1"\n\n')
@@ -401,10 +411,9 @@ python copy_buildsystem () {
if os.path.exists(builddir + dest_stub):
shutil.copyfile(builddir + dest_stub, baseoutpath + dest_stub)
- if os.path.exists(builddir + '/cache/bb_unihashes.dat'):
- bb.parse.siggen.save_unitaskhashes()
- bb.utils.mkdirhier(os.path.join(baseoutpath, 'cache'))
- shutil.copyfile(builddir + '/cache/bb_unihashes.dat', baseoutpath + '/cache/bb_unihashes.dat')
+ cachedir = os.path.join(baseoutpath, 'cache')
+ bb.utils.mkdirhier(cachedir)
+ bb.parse.siggen.copy_unitaskhashes(cachedir)
# If PR Service is in use, we need to export this as well
bb.note('Do we have a pr database?')
@@ -433,7 +442,8 @@ python copy_buildsystem () {
else:
# Write a templateconf.cfg
with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f:
- f.write('meta/conf\n')
+ f.write('meta/conf/templates/default\n')
+ os.makedirs(os.path.join(baseoutpath, core_meta_subdir, 'conf/templates/default'), exist_ok=True)
# Ensure any variables set from the external environment (by way of
# BB_ENV_PASSTHROUGH_ADDITIONS) are set in the SDK's configuration
@@ -450,6 +460,9 @@ python copy_buildsystem () {
f.write(line)
f.write('\n')
+def prepare_locked_cache(d, baseoutpath, derivative, conf_initpath):
+ import shutil
+
# Filter the locked signatures file to just the sstate tasks we are interested in
excluded_targets = get_sdk_install_targets(d, images_only=True)
sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
@@ -481,7 +494,7 @@ python copy_buildsystem () {
bb.utils.remove(sstate_out, True)
# uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d)
- fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d)
+ fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d) if bb.data.inherits_class('uninative', d) else ""
sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1')
sdk_ext_type = d.getVar('SDK_EXT_TYPE')
@@ -492,10 +505,9 @@ python copy_buildsystem () {
else:
tasklistfn = None
- if os.path.exists(builddir + '/cache/bb_unihashes.dat'):
- bb.parse.siggen.save_unitaskhashes()
- bb.utils.mkdirhier(os.path.join(baseoutpath, 'cache'))
- shutil.copyfile(builddir + '/cache/bb_unihashes.dat', baseoutpath + '/cache/bb_unihashes.dat')
+ cachedir = os.path.join(baseoutpath, 'cache')
+ bb.utils.mkdirhier(cachedir)
+ bb.parse.siggen.copy_unitaskhashes(cachedir)
# Add packagedata if enabled
if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
@@ -554,6 +566,9 @@ python copy_buildsystem () {
f = os.path.join(root, name)
os.remove(f)
+def write_manifest(d, baseoutpath):
+ import glob
+
# Write manifest file
# Note: at the moment we cannot include the env setup script here to keep
# it updated, since it gets modified during SDK installation (see
@@ -577,6 +592,32 @@ python copy_buildsystem () {
continue
chksum = bb.utils.sha256_file(fn)
f.write('%s\t%s\n' % (chksum, os.path.relpath(fn, baseoutpath)))
+
+
+python copy_buildsystem () {
+ import oe.copy_buildsystem
+
+ baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH')
+
+ # Determine if we're building a derivative extensible SDK (from devtool build-sdk)
+ derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1'
+
+ conf_initpath, conf_bbpath, core_meta_subdir, sdkbblayers = copy_bitbake_and_layers(d, baseoutpath, derivative)
+
+ write_devtool_config(d, baseoutpath, conf_bbpath, conf_initpath, core_meta_subdir)
+
+ write_unlocked_sigs(d, baseoutpath)
+
+ write_bblayers_conf(d, baseoutpath, sdkbblayers)
+
+ uninative_checksum = copy_uninative(d, baseoutpath)
+
+ write_local_conf(d, baseoutpath, derivative, core_meta_subdir, uninative_checksum)
+
+ prepare_locked_cache(d, baseoutpath, derivative, conf_initpath)
+
+ write_manifest(d, baseoutpath)
+
}
def get_current_buildtools(d):
@@ -621,21 +662,6 @@ def get_sdk_required_utilities(buildtools_fn, d):
return ' '.join(sanity_required_utilities)
install_tools() {
- install -d ${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}
- scripts="devtool recipetool oe-find-native-sysroot runqemu* wic"
- for script in $scripts; do
- for scriptfn in `find ${SDK_OUTPUT}/${SDKPATH}/${scriptrelpath} -maxdepth 1 -executable -name "$script"`; do
- targetscriptfn="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/$(basename $scriptfn)"
- test -e ${targetscriptfn} || ln -rs ${scriptfn} ${targetscriptfn}
- done
- done
- # We can't use the same method as above because files in the sysroot won't exist at this point
- # (they get populated from sstate on installation)
- unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd"
- if [ "${SDK_INCLUDE_TOOLCHAIN}" = "1" -a ! -e $unfsd_path ] ; then
- binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE'), d.getVar('TMPDIR'))}
- ln -rs ${SDK_OUTPUT}/${SDKPATH}/tmp/$binrelpath/unfsd $unfsd_path
- fi
touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase
# find latest buildtools-tarball and install it
@@ -714,7 +740,7 @@ sdk_ext_postinst() {
# A bit of another hack, but we need this in the path only for devtool
# so put it at the end of $PATH.
- echo "export PATH=$target_sdk_dir/sysroots/${SDK_SYS}${bindir_nativesdk}:\$PATH" >> $env_setup_script
+ echo "export PATH=\"$target_sdk_dir/${esdk_tools_path}:\$PATH\"" >> $env_setup_script
echo "printf 'SDK environment now set up; additionally you may now run devtool to perform development tasks.\nRun devtool --help for further details.\n'" >> $env_setup_script
@@ -727,7 +753,7 @@ sdk_ext_postinst() {
# current working directory when first ran, nor will it set $1 when
# sourcing a script. That is why this has to look so ugly.
LOGFILE="$target_sdk_dir/preparing_build_system.log"
- sh -c ". buildtools/environment-setup* > $LOGFILE && cd $target_sdk_dir/`dirname ${oe_init_build_env_path}` && set $target_sdk_dir && . $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE && python3 $target_sdk_dir/ext-sdk-prepare.py $LOGFILE '${SDK_INSTALL_TARGETS}'" || { echo "printf 'ERROR: this SDK was not fully installed and needs reinstalling\n'" >> $env_setup_script ; exit 1 ; }
+ sh -c ". buildtools/environment-setup* > $LOGFILE 2>&1 && cd $target_sdk_dir/`dirname ${oe_init_build_env_path}` && set $target_sdk_dir && . $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE 2>&1 && python3 $target_sdk_dir/ext-sdk-prepare.py $LOGFILE '${SDK_INSTALL_TARGETS}'" || { echo "printf 'ERROR: this SDK was not fully installed and needs reinstalling\n'" >> $env_setup_script ; exit 1 ; }
fi
if [ -e $target_sdk_dir/ext-sdk-prepare.py ]; then
rm $target_sdk_dir/ext-sdk-prepare.py
@@ -737,7 +763,7 @@ sdk_ext_postinst() {
SDK_POST_INSTALL_COMMAND:task-populate-sdk-ext = "${sdk_ext_postinst}"
-SDK_POSTPROCESS_COMMAND:prepend:task-populate-sdk-ext = "copy_buildsystem; install_tools; "
+SDK_POSTPROCESS_COMMAND:prepend:task-populate-sdk-ext = "copy_buildsystem install_tools "
SDK_INSTALL_TARGETS = ""
fakeroot python do_populate_sdk_ext() {
diff --git a/meta/classes-recipe/ptest-cargo.bbclass b/meta/classes-recipe/ptest-cargo.bbclass
new file mode 100644
index 0000000000..c46df362bf
--- /dev/null
+++ b/meta/classes-recipe/ptest-cargo.bbclass
@@ -0,0 +1,138 @@
+inherit cargo ptest
+
+RUST_TEST_ARGS ??= ""
+RUST_TEST_ARGS[doc] = "Arguments to give to the test binaries (e.g. --shuffle)"
+
+# I didn't find a cleaner way to share data between compile and install tasks
+CARGO_TEST_BINARIES_FILES ?= "${B}/test_binaries_list"
+
+# Sadly, generated test binaries have no deterministic names (https://github.com/rust-lang/cargo/issues/1924)
+# This forces us to parse the cargo output in json format to find those test binaries.
+python do_compile_ptest_cargo() {
+ import subprocess
+ import json
+
+ cargo = bb.utils.which(d.getVar("PATH"), d.getVar("CARGO", True))
+ cargo_build_flags = d.getVar("CARGO_BUILD_FLAGS", True)
+ rust_flags = d.getVar("RUSTFLAGS", True)
+ manifest_path = d.getVar("CARGO_MANIFEST_PATH", True)
+ project_manifest_path = os.path.normpath(manifest_path)
+ manifest_dir = os.path.dirname(manifest_path)
+
+ env = os.environ.copy()
+ env['RUSTFLAGS'] = rust_flags
+ cmd = f"{cargo} build --tests --message-format json {cargo_build_flags}"
+ bb.note(f"Building tests with cargo ({cmd})")
+
+ try:
+ proc = subprocess.Popen(cmd, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal(f"Cannot build test with cargo: {e}")
+
+ lines = []
+ for line in proc.stdout:
+ data = line.strip('\n')
+ lines.append(data)
+ bb.note(data)
+ proc.communicate()
+ if proc.returncode != 0:
+ bb.fatal(f"Unable to compile test with cargo, '{cmd}' failed")
+
+ # Definition of the format: https://doc.rust-lang.org/cargo/reference/external-tools.html#json-messages
+ test_bins = []
+ for line in lines:
+ try:
+ data = json.loads(line)
+ except json.JSONDecodeError:
+ # skip lines that are not a json
+ pass
+ else:
+ try:
+ # Filter the test packages coming from the current project:
+ # - test binaries from the root manifest
+ # - test binaries from sub manifest of the current project if any
+ current_manifest_path = os.path.normpath(data['manifest_path'])
+ common_path = os.path.commonpath([current_manifest_path, project_manifest_path])
+ if common_path in [manifest_dir, current_manifest_path]:
+ if (data['target']['test'] or data['target']['doctest']) and data['executable']:
+ test_bins.append(data['executable'])
+ except (KeyError, ValueError) as e:
+ # skip lines that do not meet the requirements
+ pass
+
+ # All rust project will generate at least one unit test binary
+ # It will just run a test suite with 0 tests, if the project didn't define some
+ # So it is not expected to have an empty list here
+ if not test_bins:
+ bb.fatal("Unable to find any test binaries")
+
+ cargo_test_binaries_file = d.getVar('CARGO_TEST_BINARIES_FILES', True)
+ bb.note(f"Found {len(test_bins)} tests, write their paths into {cargo_test_binaries_file}")
+ with open(cargo_test_binaries_file, "w") as f:
+ for test_bin in test_bins:
+ f.write(f"{test_bin}\n")
+
+}
+
+python do_install_ptest_cargo() {
+ import shutil
+
+ dest_dir = d.getVar("D", True)
+ pn = d.getVar("PN", True)
+ ptest_path = d.getVar("PTEST_PATH", True)
+ cargo_test_binaries_file = d.getVar('CARGO_TEST_BINARIES_FILES', True)
+ rust_test_args = d.getVar('RUST_TEST_ARGS') or ""
+
+ ptest_dir = os.path.join(dest_dir, ptest_path.lstrip('/'))
+ os.makedirs(ptest_dir, exist_ok=True)
+
+ test_bins = []
+ with open(cargo_test_binaries_file, "r") as f:
+ for line in f.readlines():
+ test_bins.append(line.strip('\n'))
+
+ test_paths = []
+ for test_bin in test_bins:
+ shutil.copy2(test_bin, ptest_dir)
+ test_paths.append(os.path.join(ptest_path, os.path.basename(test_bin)))
+
+ ptest_script = os.path.join(ptest_dir, "run-ptest")
+ if os.path.exists(ptest_script):
+ with open(ptest_script, "a") as f:
+ f.write(f"\necho \"\"\n")
+ f.write(f"echo \"## starting to run rust tests ##\"\n")
+ for test_path in test_paths:
+ f.write(f"{test_path} {rust_test_args}\n")
+ else:
+ with open(ptest_script, "a") as f:
+ f.write("#!/bin/sh\n")
+ for test_path in test_paths:
+ f.write(f"{test_path} {rust_test_args}\n")
+ os.chmod(ptest_script, 0o755)
+
+ # this is chown -R root:root ${D}${PTEST_PATH}
+ for root, dirs, files in os.walk(ptest_dir):
+ for d in dirs:
+ shutil.chown(os.path.join(root, d), "root", "root")
+ for f in files:
+ shutil.chown(os.path.join(root, f), "root", "root")
+}
+
+do_install_ptest_cargo[dirs] = "${B}"
+do_install_ptest_cargo[doc] = "Create or update the run-ptest script with rust test binaries generated"
+do_compile_ptest_cargo[dirs] = "${B}"
+do_compile_ptest_cargo[doc] = "Generate rust test binaries through cargo"
+
+addtask compile_ptest_cargo after do_compile before do_compile_ptest_base
+addtask install_ptest_cargo after do_install_ptest_base before do_package
+
+python () {
+ if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
+ d.setVarFlag('do_install_ptest_cargo', 'fakeroot', '1')
+ d.setVarFlag('do_install_ptest_cargo', 'umask', '022')
+
+ # Remove all '*ptest_cargo' tasks when ptest is not enabled
+ if not(d.getVar('PTEST_ENABLED') == "1"):
+ for i in ['do_compile_ptest_cargo', 'do_install_ptest_cargo']:
+ bb.build.deltask(i, d)
+}
diff --git a/meta/classes/ptest-gnome.bbclass b/meta/classes-recipe/ptest-gnome.bbclass
index 18bd3dbff9..d4ad22d85d 100644
--- a/meta/classes/ptest-gnome.bbclass
+++ b/meta/classes-recipe/ptest-gnome.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit ptest
EXTRA_OECONF:append = " ${@bb.utils.contains('PTEST_ENABLED', '1', '--enable-installed-tests', '--disable-installed-tests', d)}"
diff --git a/meta/classes/ptest-perl.bbclass b/meta/classes-recipe/ptest-perl.bbclass
index 5dd72c9dad..c283fdd1fc 100644
--- a/meta/classes/ptest-perl.bbclass
+++ b/meta/classes-recipe/ptest-perl.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit ptest
FILESEXTRAPATHS:prepend := "${COREBASE}/meta/files:"
diff --git a/meta/classes/ptest.bbclass b/meta/classes-recipe/ptest.bbclass
index 1ec23c0923..348d729aef 100644
--- a/meta/classes/ptest.bbclass
+++ b/meta/classes-recipe/ptest.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
SUMMARY:${PN}-ptest ?= "${SUMMARY} - Package test files"
DESCRIPTION:${PN}-ptest ?= "${DESCRIPTION} \
This package contains a test directory ${PTEST_PATH} for package test purposes."
@@ -5,6 +11,10 @@ This package contains a test directory ${PTEST_PATH} for package test purposes."
PTEST_PATH ?= "${libdir}/${BPN}/ptest"
PTEST_BUILD_HOST_FILES ?= "Makefile"
PTEST_BUILD_HOST_PATTERN ?= ""
+PTEST_PARALLEL_MAKE ?= "${PARALLEL_MAKE}"
+PTEST_PARALLEL_MAKEINST ?= "${PARALLEL_MAKEINST}"
+EXTRA_OEMAKE:prepend:task-compile-ptest-base = "${PTEST_PARALLEL_MAKE} "
+EXTRA_OEMAKE:prepend:task-install-ptest-base = "${PTEST_PARALLEL_MAKEINST} "
FILES:${PN}-ptest += "${PTEST_PATH}"
SECTION:${PN}-ptest = "devel"
@@ -46,9 +56,9 @@ do_install_ptest_base() {
if [ -f ${WORKDIR}/run-ptest ]; then
install -D ${WORKDIR}/run-ptest ${D}${PTEST_PATH}/run-ptest
fi
- if grep -q install-ptest: Makefile; then
- oe_runmake DESTDIR=${D}${PTEST_PATH} install-ptest
- fi
+
+ grep -q install-ptest: Makefile 2>/dev/null && oe_runmake DESTDIR=${D}${PTEST_PATH} install-ptest
+
do_install_ptest
chown -R root:root ${D}${PTEST_PATH}
@@ -128,5 +138,5 @@ def package_qa_check_missing_ptest(pn, d, messages):
return
enabled_ptests = " ".join([d.getVar('PTESTS_FAST'), d.getVar('PTESTS_SLOW'), d.getVar('PTESTS_PROBLEMS')]).split()
- if (pn + "-ptest").replace(d.getVar('MLPREFIX'), '') not in enabled_ptests:
+ if pn.replace(d.getVar('MLPREFIX'), '') not in enabled_ptests:
oe.qa.handle_error("missing-ptest", "supports ptests but is not included in oe-core's ptest-packagelists.inc", d)
diff --git a/meta/classes-recipe/pypi.bbclass b/meta/classes-recipe/pypi.bbclass
new file mode 100644
index 0000000000..c6bbe8119a
--- /dev/null
+++ b/meta/classes-recipe/pypi.bbclass
@@ -0,0 +1,43 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+def pypi_package(d):
+ bpn = d.getVar('BPN')
+ if bpn.startswith('python-'):
+ return bpn[7:]
+ elif bpn.startswith('python3-'):
+ return bpn[8:]
+ return bpn
+
+# The PyPi package name (defaults to PN without the python3- prefix)
+PYPI_PACKAGE ?= "${@pypi_package(d)}"
+# The file extension of the source archive
+PYPI_PACKAGE_EXT ?= "tar.gz"
+# An optional prefix for the download file in the case of name collisions
+PYPI_ARCHIVE_NAME_PREFIX ?= ""
+
+def pypi_src_uri(d):
+ """
+ Construct a source URL as per https://warehouse.pypa.io/api-reference/integration-guide.html#predictable-urls.
+ """
+ package = d.getVar('PYPI_PACKAGE')
+ archive_name = d.expand('${PYPI_PACKAGE}-${PV}.${PYPI_PACKAGE_EXT}')
+ archive_downloadname = d.getVar('PYPI_ARCHIVE_NAME_PREFIX') + archive_name
+ return 'https://files.pythonhosted.org/packages/source/%s/%s/%s;downloadfilename=%s' % (package[0], package, archive_name, archive_downloadname)
+
+PYPI_SRC_URI ?= "${@pypi_src_uri(d)}"
+
+HOMEPAGE ?= "https://pypi.python.org/pypi/${PYPI_PACKAGE}/"
+SECTION = "devel/python"
+SRC_URI:prepend = "${PYPI_SRC_URI} "
+S = "${WORKDIR}/${PYPI_PACKAGE}-${PV}"
+
+# Replace any '_' characters in the pypi URI with '-'s to follow the PyPi website naming conventions
+UPSTREAM_CHECK_PYPI_PACKAGE ?= "${@d.getVar('PYPI_PACKAGE').replace('_', '-')}"
+UPSTREAM_CHECK_URI ?= "https://pypi.org/project/${UPSTREAM_CHECK_PYPI_PACKAGE}/"
+UPSTREAM_CHECK_REGEX ?= "/${UPSTREAM_CHECK_PYPI_PACKAGE}/(?P<pver>(\d+[\.\-_]*)+)/"
+
+CVE_PRODUCT ?= "python:${PYPI_PACKAGE}"
diff --git a/meta/classes-recipe/python3-dir.bbclass b/meta/classes-recipe/python3-dir.bbclass
new file mode 100644
index 0000000000..3d07de99b8
--- /dev/null
+++ b/meta/classes-recipe/python3-dir.bbclass
@@ -0,0 +1,11 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+PYTHON_BASEVERSION = "3.12"
+PYTHON_ABI = ""
+PYTHON_DIR = "python${PYTHON_BASEVERSION}"
+PYTHON_PN = "python3"
+PYTHON_SITEPACKAGES_DIR = "${libdir}/${PYTHON_DIR}/site-packages"
diff --git a/meta/classes/python3native.bbclass b/meta/classes-recipe/python3native.bbclass
index 3783c0c47e..654a002fdb 100644
--- a/meta/classes/python3native.bbclass
+++ b/meta/classes-recipe/python3native.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit python3-dir
PYTHON="${STAGING_BINDIR_NATIVE}/python3-native/python3"
diff --git a/meta/classes-recipe/python3targetconfig.bbclass b/meta/classes-recipe/python3targetconfig.bbclass
new file mode 100644
index 0000000000..08bc619398
--- /dev/null
+++ b/meta/classes-recipe/python3targetconfig.bbclass
@@ -0,0 +1,41 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python3native
+
+EXTRA_PYTHON_DEPENDS ?= ""
+EXTRA_PYTHON_DEPENDS:class-target = "python3"
+DEPENDS:append = " ${EXTRA_PYTHON_DEPENDS}"
+
+setup_target_config() {
+ export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
+ export PYTHONPATH=${STAGING_LIBDIR}/python-sysconfigdata:$PYTHONPATH
+ export PATH=${STAGING_EXECPREFIXDIR}/python-target-config/:$PATH
+}
+
+do_configure:prepend:class-target() {
+ setup_target_config
+}
+
+do_compile:prepend:class-target() {
+ setup_target_config
+}
+
+do_install:prepend:class-target() {
+ setup_target_config
+}
+
+do_configure:prepend:class-nativesdk() {
+ setup_target_config
+}
+
+do_compile:prepend:class-nativesdk() {
+ setup_target_config
+}
+
+do_install:prepend:class-nativesdk() {
+ setup_target_config
+}
diff --git a/meta/classes-recipe/python_flit_core.bbclass b/meta/classes-recipe/python_flit_core.bbclass
new file mode 100644
index 0000000000..a0b1feb70a
--- /dev/null
+++ b/meta/classes-recipe/python_flit_core.bbclass
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python_pep517 python3native python3-dir setuptools3-base
+
+DEPENDS += "python3 python3-flit-core-native"
+
+python_flit_core_do_manual_build () {
+ cd ${PEP517_SOURCE_PATH}
+ nativepython3 -m flit_core.wheel --outdir ${PEP517_WHEEL_PATH} .
+}
diff --git a/meta/classes-recipe/python_hatchling.bbclass b/meta/classes-recipe/python_hatchling.bbclass
new file mode 100644
index 0000000000..b5a3c3feea
--- /dev/null
+++ b/meta/classes-recipe/python_hatchling.bbclass
@@ -0,0 +1,27 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python_pep517 python3native python3-dir setuptools3-base
+
+DEPENDS += "python3-hatchling-native"
+
+# delete nested, empty directories from the python site-packages path. Make
+# sure that we remove the native ones for target builds as well
+hatchling_rm_emptydirs:class-target () {
+ find ${STAGING_LIBDIR}/${PYTHON_DIR}/site-packages/* -depth -type d -empty -delete
+ find ${STAGING_LIBDIR_NATIVE}/${PYTHON_DIR}/site-packages/* -depth -type d -empty -delete
+}
+
+hatchling_rm_emptydirs:class-native () {
+ find ${STAGING_LIBDIR_NATIVE}/${PYTHON_DIR}/site-packages/* -depth -type d -empty -delete
+}
+
+# Define a default empty version of hatchling_rm_emptydirs to appease bitbake
+hatchling_rm_emptydirs () {
+ :
+}
+
+do_prepare_recipe_sysroot[postfuncs] += " hatchling_rm_emptydirs"
diff --git a/meta/classes-recipe/python_maturin.bbclass b/meta/classes-recipe/python_maturin.bbclass
new file mode 100644
index 0000000000..c39d6c6e37
--- /dev/null
+++ b/meta/classes-recipe/python_maturin.bbclass
@@ -0,0 +1,17 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python_pyo3 python_setuptools_build_meta
+
+DEPENDS += "python3-maturin-native"
+
+python_maturin_do_configure() {
+ python_pyo3_do_configure
+ cargo_common_do_configure
+ python_pep517_do_configure
+}
+
+EXPORT_FUNCTIONS do_configure
diff --git a/meta/classes-recipe/python_mesonpy.bbclass b/meta/classes-recipe/python_mesonpy.bbclass
new file mode 100644
index 0000000000..131fa74bed
--- /dev/null
+++ b/meta/classes-recipe/python_mesonpy.bbclass
@@ -0,0 +1,52 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit meson setuptools3-base python3targetconfig python_pep517
+
+# meson_do_qa_configure does the wrong thing here because
+# mesonpy runs "meson setup ..." in do_compile context.
+# Make it a dummy function.
+meson_do_qa_configure () {
+ :
+}
+
+# This prevents the meson error:
+# ERROR: Got argument buildtype as both -Dbuildtype and --buildtype. Pick one.
+MESONOPTS:remove = "--buildtype ${MESON_BUILDTYPE}"
+
+CONFIGURE_FILES = "pyproject.toml"
+
+DEPENDS += "python3-wheel-native python3-meson-python-native"
+
+def mesonpy_get_args(d):
+ vars = ['MESONOPTS', 'MESON_CROSS_FILE', 'EXTRA_OEMESON']
+ varlist = []
+ for var in vars:
+ value = d.getVar(var)
+ vallist = value.split()
+ for elem in vallist:
+ varlist.append("-Csetup-args=" + elem)
+ return ' '.join(varlist)
+
+PEP517_BUILD_OPTS = "-Cbuilddir='${B}' ${@mesonpy_get_args(d)}"
+
+# Python pyx -> c -> so build leaves absolute build paths in the code
+INSANE_SKIP:${PN} += "buildpaths"
+INSANE_SKIP:${PN}-src += "buildpaths"
+
+python_mesonpy_do_configure () {
+ python_pep517_do_configure
+}
+
+python_mesonpy_do_compile () {
+ python_pep517_do_compile
+}
+
+python_mesonpy_do_install () {
+ python_pep517_do_install
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/meta/classes/python_pep517.bbclass b/meta/classes-recipe/python_pep517.bbclass
index 34ffdc9c0d..c30674c8ec 100644
--- a/meta/classes/python_pep517.bbclass
+++ b/meta/classes-recipe/python_pep517.bbclass
@@ -1,20 +1,26 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Common infrastructure for Python packages that use PEP-517 compliant packaging.
# https://www.python.org/dev/peps/pep-0517/
#
# This class will build a wheel in do_compile, and use pypa/installer to install
# it in do_install.
-DEPENDS:append = " python3-installer-native"
+DEPENDS:append = " python3-build-native python3-installer-native"
# Where to execute the build process from
PEP517_SOURCE_PATH ?= "${S}"
-# The PEP517 build API entry point
-PEP517_BUILD_API ?= "unset"
-
# The directory where wheels will be written
PEP517_WHEEL_PATH ?= "${WORKDIR}/dist"
+# Other options to pass to build
+PEP517_BUILD_OPTS ?= ""
+
# The interpreter to use for installed scripts
PEP517_INSTALL_PYTHON = "python3"
PEP517_INSTALL_PYTHON:class-native = "nativepython3"
@@ -31,13 +37,12 @@ python_pep517_do_configure () {
# When we have Python 3.11 we can parse pyproject.toml to determine the build
# API entry point directly
python_pep517_do_compile () {
- cd ${PEP517_SOURCE_PATH}
- nativepython3 -c "import ${PEP517_BUILD_API} as api; api.build_wheel('${PEP517_WHEEL_PATH}')"
+ nativepython3 -m build --no-isolation --wheel --outdir ${PEP517_WHEEL_PATH} ${PEP517_SOURCE_PATH} ${PEP517_BUILD_OPTS}
}
do_compile[cleandirs] += "${PEP517_WHEEL_PATH}"
python_pep517_do_install () {
- COUNT=$(find ${PEP517_WHEEL_PATH} -name '*.whl' | wc -l)
+ COUNT=$(find ${PEP517_WHEEL_PATH} -name '*.whl' -maxdepth 1 | wc -l)
if test $COUNT -eq 0; then
bbfatal No wheels found in ${PEP517_WHEEL_PATH}
elif test $COUNT -gt 1; then
diff --git a/meta/classes-recipe/python_poetry_core.bbclass b/meta/classes-recipe/python_poetry_core.bbclass
new file mode 100644
index 0000000000..c7dc5d0382
--- /dev/null
+++ b/meta/classes-recipe/python_poetry_core.bbclass
@@ -0,0 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python_pep517 python3native setuptools3-base
+
+DEPENDS += "python3-poetry-core-native"
diff --git a/meta/classes/python_pyo3.bbclass b/meta/classes-recipe/python_pyo3.bbclass
index 10cc3a0645..9a32eac6fd 100644
--- a/meta/classes/python_pyo3.bbclass
+++ b/meta/classes-recipe/python_pyo3.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class helps make sure that Python extensions built with PyO3
# and setuptools_rust properly set up the environment for cross compilation
#
@@ -8,7 +14,7 @@ inherit cargo python3-dir siteinfo
export PYO3_CROSS="1"
export PYO3_CROSS_PYTHON_VERSION="${PYTHON_BASEVERSION}"
export PYO3_CROSS_LIB_DIR="${STAGING_LIBDIR}"
-export CARGO_BUILD_TARGET="${HOST_SYS}"
+export CARGO_BUILD_TARGET="${RUST_HOST_SYS}"
export RUSTFLAGS
export PYO3_PYTHON="${PYTHON}"
export PYO3_CONFIG_FILE="${WORKDIR}/pyo3.config"
diff --git a/meta/classes-recipe/python_setuptools3_rust.bbclass b/meta/classes-recipe/python_setuptools3_rust.bbclass
new file mode 100644
index 0000000000..d3d7590cbe
--- /dev/null
+++ b/meta/classes-recipe/python_setuptools3_rust.bbclass
@@ -0,0 +1,17 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python_pyo3 python_setuptools_build_meta
+
+DEPENDS += "python3-setuptools-rust-native"
+
+python_setuptools3_rust_do_configure() {
+ python_pyo3_do_configure
+ cargo_common_do_configure
+ python_pep517_do_configure
+}
+
+EXPORT_FUNCTIONS do_configure
diff --git a/meta/classes-recipe/python_setuptools_build_meta.bbclass b/meta/classes-recipe/python_setuptools_build_meta.bbclass
new file mode 100644
index 0000000000..4c84d1e8d0
--- /dev/null
+++ b/meta/classes-recipe/python_setuptools_build_meta.bbclass
@@ -0,0 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit setuptools3-base python_pep517
+
+DEPENDS += "python3-setuptools-native python3-wheel-native"
diff --git a/meta/classes/qemu.bbclass b/meta/classes-recipe/qemu.bbclass
index 7493ac34d4..dbb5ee0b66 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes-recipe/qemu.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class contains functions for recipes that need QEMU or test for its
# existence.
#
@@ -28,7 +34,7 @@ def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
if qemu_binary == "qemu-allarch":
qemu_binary = "qemuwrapper"
- qemu_options = data.getVar("QEMU_OPTIONS")
+ qemu_options = data.getVar("QEMU_OPTIONS") or ""
return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
+ " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
diff --git a/meta/classes/qemuboot.bbclass b/meta/classes-recipe/qemuboot.bbclass
index ad8489902a..895fd38d68 100644
--- a/meta/classes/qemuboot.bbclass
+++ b/meta/classes-recipe/qemuboot.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Help runqemu boot target board, "QB" means Qemu Boot, the following
# vars can be set in conf files, such as <bsp.conf> to make it can be
# boot by runqemu:
@@ -7,6 +13,7 @@
# QB_OPT_APPEND: options to append to qemu, e.g., "-device usb-mouse"
#
# QB_DEFAULT_KERNEL: default kernel to boot, e.g., "bzImage"
+# e.g., "bzImage-initramfs-qemux86-64.bin" if INITRAMFS_IMAGE_BUNDLE is set to 1.
#
# QB_DEFAULT_FSTYPE: default FSTYPE to boot, e.g., "ext4"
#
@@ -55,8 +62,8 @@
# QB_SLIRP_OPT: network option for SLIRP mode, e.g., -netdev user,id=net0"
#
# QB_CMDLINE_IP_SLIRP: If QB_NETWORK_DEVICE adds more than one network interface to qemu, usually the
-# ip= kernel comand line argument needs to be changed accordingly. Details are documented
-# in the kernel docuemntation https://www.kernel.org/doc/Documentation/filesystems/nfs/nfsroot.txt
+# ip= kernel command line argument needs to be changed accordingly. Details are documented
+# in the kernel documentation https://www.kernel.org/doc/Documentation/filesystems/nfs/nfsroot.txt
# Example to configure only the first interface: "ip=eth0:dhcp"
# QB_CMDLINE_IP_TAP: This parameter is similar to the QB_CMDLINE_IP_SLIRP parameter. Since the tap interface requires
# static IP configuration @CLIENT@ and @GATEWAY@ place holders are replaced by the IP and the gateway
@@ -79,6 +86,8 @@
# without the need to specify a dedicated qemu configuration
#
# QB_GRAPHICS: QEMU video card type (e.g. "-vga std")
+# QB_NFSROOTFS_EXTRA_OPT: extra options to be appended to the nfs rootfs options in kernel boot arg, e.g.,
+# "wsize=4096,rsize=4096"
#
# Usage:
# IMAGE_CLASSES += "qemuboot"
@@ -87,15 +96,28 @@
QB_MEM ?= "-m 256"
QB_SMP ?= ""
QB_SERIAL_OPT ?= "-serial mon:stdio -serial null"
-QB_DEFAULT_KERNEL ?= "${KERNEL_IMAGETYPE}"
+QB_DEFAULT_KERNEL ?= "${@bb.utils.contains("INITRAMFS_IMAGE_BUNDLE", "1", "${KERNEL_IMAGETYPE}-${INITRAMFS_LINK_NAME}.bin", "${KERNEL_IMAGETYPE}", d)}"
QB_DEFAULT_FSTYPE ?= "ext4"
QB_RNG ?= "-object rng-random,filename=/dev/urandom,id=rng0 -device virtio-rng-pci,rng=rng0"
QB_OPT_APPEND ?= ""
QB_NETWORK_DEVICE ?= "-device virtio-net-pci,netdev=net0,mac=@MAC@"
+
+# qemurunner needs ip information first, so append QB_NO_PNI
+#
+QB_NO_PNI ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pni-names', '', 'net.ifnames=0', d)}"
QB_CMDLINE_IP_SLIRP ?= "ip=dhcp"
-QB_CMDLINE_IP_TAP ?= "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8"
+QB_CMDLINE_IP_TAP ?= "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8 ${QB_NO_PNI}"
+
QB_ROOTFS_EXTRA_OPT ?= ""
QB_GRAPHICS ?= ""
+QB_NFSROOTFS_EXTRA_OPT ?= ""
+
+# With 6.5+ (specifically, if DMA_BOUNCE_UNALIGNED_KMALLOC is set) the SW IO TLB
+# is used, and it defaults to 64MB. This is too much when there's only 256MB of
+# RAM, so request 0 slabs and lets the kernel round up to the appropriate minimum
+# (1MB, typically). In virtual hardware there's very little need for these bounce
+# buffers, so the 64MB would be mostly wasted.
+QB_KERNEL_CMDLINE_APPEND:append = " swiotlb=0"
# This should be kept align with ROOT_VM
QB_DRIVE_TYPE ?= "/dev/sd"
@@ -133,7 +155,7 @@ python do_write_qemuboot_conf() {
# contains all tools required by runqemu
if k == 'STAGING_BINDIR_NATIVE':
val = os.path.join(d.getVar('BASE_WORKDIR'), d.getVar('BUILD_SYS'),
- 'qemu-helper-native/1.0-r1/recipe-sysroot-native/usr/bin/')
+ 'qemu-helper-native/1.0/recipe-sysroot-native/usr/bin/')
else:
val = d.getVar(k)
if val is None:
@@ -163,3 +185,5 @@ python do_write_qemuboot_conf() {
os.remove(qemuboot_link)
os.symlink(os.path.basename(qemuboot), qemuboot_link)
}
+
+EXTRA_IMAGEDEPENDS += "qemu-system-native qemu-helper-native:do_addto_recipe_sysroot"
diff --git a/meta/classes-recipe/rootfs-postcommands.bbclass b/meta/classes-recipe/rootfs-postcommands.bbclass
new file mode 100644
index 0000000000..e81b69a239
--- /dev/null
+++ b/meta/classes-recipe/rootfs-postcommands.bbclass
@@ -0,0 +1,549 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Zap the root password if debug-tweaks and empty-root-password features are not enabled
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'empty-root-password' ], "", "zap_empty_root_password ",d)}'
+
+# Allow dropbear/openssh to accept logins from accounts with an empty password string if debug-tweaks or allow-empty-password is enabled
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'allow-empty-password' ], "ssh_allow_empty_password ", "",d)}'
+
+# Allow dropbear/openssh to accept root logins if debug-tweaks or allow-root-login is enabled
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'allow-root-login' ], "ssh_allow_root_login ", "",d)}'
+
+# Autologin the root user on the serial console, if empty-root-password and serial-autologin-root are active
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", [ 'empty-root-password', 'serial-autologin-root' ], "serial_autologin_root ", "",d)}'
+
+# Enable postinst logging if debug-tweaks or post-install-logging is enabled
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'post-install-logging' ], "postinst_enable_logging ", "",d)}'
+
+# Create /etc/timestamp during image construction to give a reasonably sane default time setting
+ROOTFS_POSTPROCESS_COMMAND += "rootfs_update_timestamp "
+
+# Tweak files in /etc if read-only-rootfs is enabled
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", "read_only_rootfs_hook ", "",d)}'
+
+# We also need to do the same for the kernel boot parameters,
+# otherwise kernel or initramfs end up mounting the rootfs read/write
+# (the default) if supported by the underlying storage.
+#
+# We do this with :append because the default value might get set later with ?=
+# and we don't want to disable such a default that by setting a value here.
+APPEND:append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}'
+
+# Generates test data file with data store variables expanded in json format
+ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data "
+
+# Write manifest
+IMAGE_MANIFEST = "${IMGDEPLOYDIR}/${IMAGE_NAME}.manifest"
+ROOTFS_POSTUNINSTALL_COMMAND =+ "write_image_manifest"
+# Set default postinst log file
+POSTINST_LOGFILE ?= "${localstatedir}/log/postinstall.log"
+# Set default target for systemd images
+SYSTEMD_DEFAULT_TARGET ?= '${@bb.utils.contains_any("IMAGE_FEATURES", [ "x11-base", "weston" ], "graphical.target", "multi-user.target", d)}'
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "systemd", "set_systemd_default_target systemd_sysusers_check", "", d)}'
+
+ROOTFS_POSTPROCESS_COMMAND += 'empty_var_volatile'
+
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs_qa_check overlayfs_postprocess", "", d)}'
+
+inherit image-artifact-names
+
+# Sort the user and group entries in /etc by ID in order to make the content
+# deterministic. Package installs are not deterministic, causing the ordering
+# of entries to change between builds. In case that this isn't desired,
+# the command can be overridden.
+SORT_PASSWD_POSTPROCESS_COMMAND ??= "tidy_shadowutils_files"
+ROOTFS_POSTPROCESS_COMMAND += '${SORT_PASSWD_POSTPROCESS_COMMAND}'
+
+#
+# Note that useradd-staticids.bbclass has to be used to ensure that
+# the numeric IDs of dynamically created entries remain stable.
+#
+ROOTFS_POSTPROCESS_COMMAND += 'rootfs_reproducible'
+
+# Resolve the ID as described in the sysusers.d(5) manual: ID can be a numeric
+# uid, a couple uid:gid or uid:groupname or it is '-' meaning leaving it
+# automatic or it can be a path. In the latter, the uid/gid matches the
+# user/group owner of that file.
+def resolve_sysusers_id(d, sid):
+ # If the id is a path, the uid/gid matchs to the target's uid/gid in the
+ # rootfs.
+ if '/' in sid:
+ try:
+ osstat = os.stat(os.path.join(d.getVar('IMAGE_ROOTFS'), sid))
+ except FileNotFoundError:
+ bb.error('sysusers.d: file %s is required but it does not exist in the rootfs', sid)
+ return ('-', '-')
+ return (osstat.st_uid, osstat.st_gid)
+ # Else it is a uid:gid or uid:groupname syntax
+ if ':' in sid:
+ return sid.split(':')
+ else:
+ return (sid, '-')
+
+# Check a user exists in the rootfs password file and return its properties
+def check_user_exists(d, uname=None, uid=None):
+ with open(os.path.join(d.getVar('IMAGE_ROOTFS'), 'etc/passwd'), 'r') as pwfile:
+ for line in pwfile:
+ (name, _, u_id, gid, comment, homedir, ushell) = line.strip().split(':')
+ if uname == name or uid == u_id:
+ return (name, u_id, gid, comment or '-', homedir or '/', ushell or '-')
+ return None
+
+# Check a group exists in the rootfs group file and return its properties
+def check_group_exists(d, gname=None, gid=None):
+ with open(os.path.join(d.getVar('IMAGE_ROOTFS'), 'etc/group'), 'r') as gfile:
+ for line in gfile:
+ (name, _, g_id, _) = line.strip().split(':')
+ if name == gname or g_id == gid:
+ return (name, g_id)
+ return None
+
+def compare_users(user, e_user):
+ # user and e_user must not have None values. Unset values must be '-'.
+ (name, uid, gid, comment, homedir, ushell) = user
+ (e_name, e_uid, e_gid, e_comment, e_homedir, e_ushell) = e_user
+ # Ignore 'uid', 'gid' or 'homedir' if they are not set
+ # Ignore 'shell' and 'ushell' if one is not set
+ return name == e_name \
+ and (uid == '-' or uid == e_uid) \
+ and (gid == '-' or gid == e_gid) \
+ and (homedir == '-' or e_homedir == '-' or homedir == e_homedir) \
+ and (ushell == '-' or e_ushell == '-' or ushell == e_ushell)
+
+# Open sysusers.d configuration files and parse each line to check the users and
+# groups are already defined in /etc/passwd and /etc/groups with similar
+# properties. Refer to the sysusers.d(5) manual for its syntax.
+python systemd_sysusers_check() {
+ import glob
+ import re
+
+ pattern_comment = r'(-|\"[^:\"]+\")'
+ pattern_word = r'[^\s]+'
+ pattern_line = r'(' + pattern_word + r')\s+(' + pattern_word + r')\s+(' + pattern_word + r')(\s+' \
+ + pattern_comment + r')?' + r'(\s+(' + pattern_word + r'))?' + r'(\s+(' + pattern_word + r'))?'
+
+ for conffile in glob.glob(os.path.join(d.getVar('IMAGE_ROOTFS'), 'usr/lib/sysusers.d/*.conf')):
+ with open(conffile, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if not len(line) or line[0] == '#': continue
+ ret = re.fullmatch(pattern_line, line.strip())
+ if not ret: continue
+ (stype, sname, sid, _, scomment, _, shomedir, _, sshell) = ret.groups()
+ if stype == 'u':
+ if sid:
+ (suid, sgid) = resolve_sysusers_id(d, sid)
+ if sgid.isalpha():
+ sgid = check_group_exists(d, gname=sgid)
+ elif sgid.isdigit():
+ check_group_exists(d, gid=sgid)
+ else:
+ sgid = '-'
+ else:
+ suid = '-'
+ sgid = '-'
+ scomment = scomment.replace('"', '') if scomment else '-'
+ shomedir = shomedir or '-'
+ sshell = sshell or '-'
+ e_user = check_user_exists(d, uname=sname)
+ if not e_user:
+ bb.warn('User %s has never been defined' % sname)
+ elif not compare_users((sname, suid, sgid, scomment, shomedir, sshell), e_user):
+ bb.warn('User %s has been defined as (%s) but sysusers.d expects it as (%s)'
+ % (sname, ', '.join(e_user),
+ ', '.join((sname, suid, sgid, scomment, shomedir, sshell))))
+ elif stype == 'g':
+ gid = sid or '-'
+ if '/' in gid:
+ (_, gid) = resolve_sysusers_id(d, sid)
+ e_group = check_group_exists(d, gname=sname)
+ if not e_group:
+ bb.warn('Group %s has never been defined' % sname)
+ elif gid != '-':
+ (_, e_gid) = e_group
+ if gid != e_gid:
+ bb.warn('Group %s has been defined with id (%s) but sysusers.d expects gid (%s)'
+ % (sname, e_gid, gid))
+ elif stype == 'm':
+ check_user_exists(d, sname)
+ check_group_exists(d, sid)
+}
+
+#
+# A hook function to support read-only-rootfs IMAGE_FEATURES
+#
+read_only_rootfs_hook () {
+ # Tweak the mount option and fs_passno for rootfs in fstab
+ if [ -f ${IMAGE_ROOTFS}/etc/fstab ]; then
+ sed -i -e '/^[#[:space:]]*\/dev\/root/{s/defaults/ro/;s/\([[:space:]]*[[:digit:]]\)\([[:space:]]*\)[[:digit:]]$/\1\20/}' ${IMAGE_ROOTFS}/etc/fstab
+ fi
+
+ # Tweak the "mount -o remount,rw /" command in busybox-inittab inittab
+ if [ -f ${IMAGE_ROOTFS}/etc/inittab ]; then
+ sed -i 's|/bin/mount -o remount,rw /|/bin/mount -o remount,ro /|' ${IMAGE_ROOTFS}/etc/inittab
+ fi
+
+ # If we're using openssh and the /etc/ssh directory has no pre-generated keys,
+ # we should configure openssh to use the configuration file /etc/ssh/sshd_config_readonly
+ # and the keys under /var/run/ssh.
+ # If overlayfs-etc is used this is not done as /etc is treated as writable
+ # If stateless-rootfs is enabled this is always done as we don't want to save keys then
+ if ${@ 'true' if not bb.utils.contains('IMAGE_FEATURES', 'overlayfs-etc', True, False, d) or bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else 'false'}; then
+ if [ -d ${IMAGE_ROOTFS}/etc/ssh ]; then
+ if [ -e ${IMAGE_ROOTFS}/etc/ssh/ssh_host_rsa_key ]; then
+ echo "SYSCONFDIR=\${SYSCONFDIR:-/etc/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh
+ echo "SSHD_OPTS=" >> ${IMAGE_ROOTFS}/etc/default/ssh
+ else
+ echo "SYSCONFDIR=\${SYSCONFDIR:-/var/run/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh
+ echo "SSHD_OPTS='-f /etc/ssh/sshd_config_readonly'" >> ${IMAGE_ROOTFS}/etc/default/ssh
+ fi
+ fi
+
+ # Also tweak the key location for dropbear in the same way.
+ if [ -d ${IMAGE_ROOTFS}/etc/dropbear ]; then
+ if [ ! -e ${IMAGE_ROOTFS}/etc/dropbear/dropbear_rsa_host_key ]; then
+ echo "DROPBEAR_RSAKEY_DIR=/var/lib/dropbear" >> ${IMAGE_ROOTFS}/etc/default/dropbear
+ fi
+ fi
+ fi
+
+ if ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "true", "false", d)}; then
+ # Change the value of ROOTFS_READ_ONLY in /etc/default/rcS to yes
+ if [ -e ${IMAGE_ROOTFS}/etc/default/rcS ]; then
+ sed -i 's/ROOTFS_READ_ONLY=no/ROOTFS_READ_ONLY=yes/' ${IMAGE_ROOTFS}/etc/default/rcS
+ fi
+ # Run populate-volatile.sh at rootfs time to set up basic files
+ # and directories to support read-only rootfs.
+ if [ -x ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh ]; then
+ ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh
+ fi
+ fi
+
+ if ${@bb.utils.contains("DISTRO_FEATURES", "systemd", "true", "false", d)}; then
+ # Create machine-id
+ # 20:12 < mezcalero> koen: you have three options: a) run systemd-machine-id-setup at install time, b) have / read-only and an empty file there (for stateless) and c) boot with / writable
+ touch ${IMAGE_ROOTFS}${sysconfdir}/machine-id
+ fi
+}
+
+#
+# This function disallows empty root passwords
+#
+zap_empty_root_password () {
+ if [ -e ${IMAGE_ROOTFS}/etc/shadow ]; then
+ sed --follow-symlinks -i 's%^root::%root:*:%' ${IMAGE_ROOTFS}/etc/shadow
+ fi
+ if [ -e ${IMAGE_ROOTFS}/etc/passwd ]; then
+ sed --follow-symlinks -i 's%^root::%root:*:%' ${IMAGE_ROOTFS}/etc/passwd
+ fi
+}
+
+#
+# allow dropbear/openssh to accept logins from accounts with an empty password string
+#
+ssh_allow_empty_password () {
+ for config in sshd_config sshd_config_readonly; do
+ if [ -e ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config ]; then
+ sed -i 's/^[#[:space:]]*PermitEmptyPasswords.*/PermitEmptyPasswords yes/' ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config
+ fi
+ done
+
+ if [ -e ${IMAGE_ROOTFS}${sbindir}/dropbear ] ; then
+ if grep -q DROPBEAR_EXTRA_ARGS ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear 2>/dev/null ; then
+ if ! grep -q "DROPBEAR_EXTRA_ARGS=.*-B" ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear ; then
+ sed -i 's/^DROPBEAR_EXTRA_ARGS="*\([^"]*\)"*/DROPBEAR_EXTRA_ARGS="\1 -B"/' ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
+ fi
+ else
+ printf '\nDROPBEAR_EXTRA_ARGS="-B"\n' >> ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
+ fi
+ fi
+
+ if [ -d ${IMAGE_ROOTFS}${sysconfdir}/pam.d ] ; then
+ for f in `find ${IMAGE_ROOTFS}${sysconfdir}/pam.d/* -type f -exec test -e {} \; -print`
+ do
+ sed -i 's/nullok_secure/nullok/' $f
+ done
+ fi
+}
+
+#
+# allow dropbear/openssh to accept root logins
+#
+ssh_allow_root_login () {
+ for config in sshd_config sshd_config_readonly; do
+ if [ -e ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config ]; then
+ sed -i 's/^[#[:space:]]*PermitRootLogin.*/PermitRootLogin yes/' ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config
+ fi
+ done
+
+ if [ -e ${IMAGE_ROOTFS}${sbindir}/dropbear ] ; then
+ if grep -q DROPBEAR_EXTRA_ARGS ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear 2>/dev/null ; then
+ sed -i '/^DROPBEAR_EXTRA_ARGS=/ s/-w//' ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
+ sed -i '/^# Disallow root/d' ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
+ fi
+ fi
+}
+
+#
+# Autologin the 'root' user on the serial terminal,
+# if empty-root-password' AND 'serial-autologin-root are enabled
+#
+serial_autologin_root () {
+ if ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "true", "false", d)}; then
+ # add autologin option to util-linux getty only
+ sed -i 's/options="/&--autologin root /' \
+ "${IMAGE_ROOTFS}${base_bindir}/start_getty"
+ elif ${@bb.utils.contains("DISTRO_FEATURES", "systemd", "true", "false", d)}; then
+ if [ -e ${IMAGE_ROOTFS}${systemd_system_unitdir}/serial-getty@.service ]; then
+ sed -i '/^\s*ExecStart\b/ s/getty /&--autologin root /' \
+ "${IMAGE_ROOTFS}${systemd_system_unitdir}/serial-getty@.service"
+ fi
+ fi
+}
+
+python tidy_shadowutils_files () {
+ import rootfspostcommands
+ rootfspostcommands.tidy_shadowutils_files(d.expand('${IMAGE_ROOTFS}${sysconfdir}'))
+}
+
+python sort_passwd () {
+ """
+ Deprecated in the favour of tidy_shadowutils_files.
+ """
+ import rootfspostcommands
+ bb.warn('[sort_passwd] You are using a deprecated function for '
+ 'SORT_PASSWD_POSTPROCESS_COMMAND. The default one is now called '
+ '"tidy_shadowutils_files".')
+ rootfspostcommands.tidy_shadowutils_files(d.expand('${IMAGE_ROOTFS}${sysconfdir}'))
+}
+
+#
+# Enable postinst logging
+#
+postinst_enable_logging () {
+ mkdir -p ${IMAGE_ROOTFS}${sysconfdir}/default
+ echo "POSTINST_LOGGING=1" >> ${IMAGE_ROOTFS}${sysconfdir}/default/postinst
+ echo "LOGFILE=${POSTINST_LOGFILE}" >> ${IMAGE_ROOTFS}${sysconfdir}/default/postinst
+}
+
+#
+# Modify systemd default target
+#
+set_systemd_default_target () {
+ if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ]; then
+ ln -sf ${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target
+ fi
+}
+
+# If /var/volatile is not empty, we have seen problems where programs such as the
+# journal make assumptions based on the contents of /var/volatile. The journal
+# would then write to /var/volatile before it was mounted, thus hiding the
+# items previously written.
+#
+# This change is to attempt to fix those types of issues in a way that doesn't
+# affect users that may not be using /var/volatile.
+empty_var_volatile () {
+ if [ -e ${IMAGE_ROOTFS}/etc/fstab ]; then
+ match=`awk '$1 !~ "#" && $2 ~ /\/var\/volatile/{print $2}' ${IMAGE_ROOTFS}/etc/fstab 2> /dev/null`
+ if [ -n "$match" ]; then
+ find ${IMAGE_ROOTFS}/var/volatile -mindepth 1 -delete
+ fi
+ fi
+}
+
+# Turn any symbolic /sbin/init link into a file
+remove_init_link () {
+ if [ -h ${IMAGE_ROOTFS}/sbin/init ]; then
+ LINKFILE=${IMAGE_ROOTFS}`readlink ${IMAGE_ROOTFS}/sbin/init`
+ rm ${IMAGE_ROOTFS}/sbin/init
+ cp $LINKFILE ${IMAGE_ROOTFS}/sbin/init
+ fi
+}
+
+python write_image_manifest () {
+ from oe.rootfs import image_list_installed_packages
+ from oe.utils import format_pkg_list
+
+ deploy_dir = d.getVar('IMGDEPLOYDIR')
+ link_name = d.getVar('IMAGE_LINK_NAME')
+ manifest_name = d.getVar('IMAGE_MANIFEST')
+
+ if not manifest_name:
+ return
+
+ pkgs = image_list_installed_packages(d)
+ with open(manifest_name, 'w+') as image_manifest:
+ image_manifest.write(format_pkg_list(pkgs, "ver"))
+
+ if os.path.exists(manifest_name) and link_name:
+ manifest_link = deploy_dir + "/" + link_name + ".manifest"
+ if manifest_link != manifest_name:
+ if os.path.lexists(manifest_link):
+ os.remove(manifest_link)
+ os.symlink(os.path.basename(manifest_name), manifest_link)
+}
+
+# Can be used to create /etc/timestamp during image construction to give a reasonably
+# sane default time setting
+rootfs_update_timestamp () {
+ if [ "${REPRODUCIBLE_TIMESTAMP_ROOTFS}" != "" ]; then
+ # Convert UTC into %4Y%2m%2d%2H%2M%2S
+ sformatted=`date -u -d @${REPRODUCIBLE_TIMESTAMP_ROOTFS} +%4Y%2m%2d%2H%2M%2S`
+ else
+ sformatted=`date -u +%4Y%2m%2d%2H%2M%2S`
+ fi
+ echo $sformatted > ${IMAGE_ROOTFS}/etc/timestamp
+ bbnote "rootfs_update_timestamp: set /etc/timestamp to $sformatted"
+}
+
+# Prevent X from being started
+rootfs_no_x_startup () {
+ if [ -f ${IMAGE_ROOTFS}/etc/init.d/xserver-nodm ]; then
+ chmod a-x ${IMAGE_ROOTFS}/etc/init.d/xserver-nodm
+ fi
+}
+
+rootfs_trim_schemas () {
+ for schema in ${IMAGE_ROOTFS}/etc/gconf/schemas/*.schemas
+ do
+ # Need this in case no files exist
+ if [ -e $schema ]; then
+ oe-trim-schemas $schema > $schema.new
+ mv $schema.new $schema
+ fi
+ done
+}
+
+rootfs_check_host_user_contaminated () {
+ contaminated="${S}/host-user-contaminated.txt"
+ HOST_USER_UID="$(PSEUDO_UNLOAD=1 id -u)"
+ HOST_USER_GID="$(PSEUDO_UNLOAD=1 id -g)"
+
+ find "${IMAGE_ROOTFS}" -path "${IMAGE_ROOTFS}/home" -prune -o \
+ -user "$HOST_USER_UID" -print -o -group "$HOST_USER_GID" -print >"$contaminated"
+
+ sed -e "s,${IMAGE_ROOTFS},," $contaminated | while read line; do
+ bbwarn "Path in the rootfs is owned by the same user or group as the user running bitbake:" $line `ls -lan ${IMAGE_ROOTFS}/$line`
+ done
+
+ if [ -s "$contaminated" ]; then
+ bbwarn "/etc/passwd:" `cat ${IMAGE_ROOTFS}/etc/passwd`
+ bbwarn "/etc/group:" `cat ${IMAGE_ROOTFS}/etc/group`
+ fi
+}
+
+# Make any absolute links in a sysroot relative
+rootfs_sysroot_relativelinks () {
+ sysroot-relativelinks.py ${SDK_OUTPUT}/${SDKTARGETSYSROOT}
+}
+
+# Generated test data json file
+python write_image_test_data() {
+ from oe.data import export2json
+
+ deploy_dir = d.getVar('IMGDEPLOYDIR')
+ link_name = d.getVar('IMAGE_LINK_NAME')
+ testdata_name = os.path.join(deploy_dir, "%s.testdata.json" % d.getVar('IMAGE_NAME'))
+
+ searchString = "%s/"%(d.getVar("TOPDIR")).replace("//","/")
+ export2json(d, testdata_name, searchString=searchString, replaceString="")
+
+ if os.path.exists(testdata_name) and link_name:
+ testdata_link = os.path.join(deploy_dir, "%s.testdata.json" % link_name)
+ if testdata_link != testdata_name:
+ if os.path.lexists(testdata_link):
+ os.remove(testdata_link)
+ os.symlink(os.path.basename(testdata_name), testdata_link)
+}
+write_image_test_data[vardepsexclude] += "TOPDIR"
+
+# Check for unsatisfied recommendations (RRECOMMENDS)
+python rootfs_log_check_recommends() {
+ log_path = d.expand("${T}/log.do_rootfs")
+ with open(log_path, 'r') as log:
+ for line in log:
+ if 'log_check' in line:
+ continue
+
+ if 'unsatisfied recommendation for' in line:
+ bb.warn('[log_check] %s: %s' % (d.getVar('PN'), line))
+}
+
+# Perform any additional adjustments needed to make rootf binary reproducible
+rootfs_reproducible () {
+ if [ "${REPRODUCIBLE_TIMESTAMP_ROOTFS}" != "" ]; then
+ # Convert UTC into %4Y%2m%2d%2H%2M%2S
+ sformatted=`date -u -d @${REPRODUCIBLE_TIMESTAMP_ROOTFS} +%4Y%2m%2d%2H%2M%2S`
+ echo $sformatted > ${IMAGE_ROOTFS}/etc/version
+ bbnote "rootfs_reproducible: set /etc/version to $sformatted"
+
+ if [ -d ${IMAGE_ROOTFS}${sysconfdir}/gconf ]; then
+ find ${IMAGE_ROOTFS}${sysconfdir}/gconf -name '%gconf.xml' -print0 | xargs -0r \
+ sed -i -e 's@\bmtime="[0-9][0-9]*"@mtime="'${REPRODUCIBLE_TIMESTAMP_ROOTFS}'"@g'
+ fi
+ fi
+}
+
+# Perform a dumb check for unit existence, not its validity
+python overlayfs_qa_check() {
+ from oe.overlayfs import mountUnitName
+
+ overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT") or {}
+ imagepath = d.getVar("IMAGE_ROOTFS")
+ sysconfdir = d.getVar("sysconfdir")
+ searchpaths = [oe.path.join(imagepath, sysconfdir, "systemd", "system"),
+ oe.path.join(imagepath, d.getVar("systemd_system_unitdir"))]
+ fstabpath = oe.path.join(imagepath, sysconfdir, "fstab")
+
+ if not any(os.path.exists(path) for path in [*searchpaths, fstabpath]):
+ return
+
+ fstabDevices = []
+ if os.path.isfile(fstabpath):
+ with open(fstabpath, 'r') as f:
+ for line in f:
+ if line[0] == '#':
+ continue
+ path = line.split(maxsplit=2)
+ if len(path) > 2:
+ fstabDevices.append(path[1])
+
+ allUnitExist = True;
+ for mountPoint in overlayMountPoints:
+ qaSkip = (d.getVarFlag("OVERLAYFS_QA_SKIP", mountPoint) or "").split()
+ if "mount-configured" in qaSkip:
+ continue
+
+ mountPath = d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint)
+ if mountPath in fstabDevices:
+ continue
+
+ mountUnit = mountUnitName(mountPath)
+ if any(os.path.isfile(oe.path.join(dirpath, mountUnit))
+ for dirpath in searchpaths):
+ continue
+
+ bb.warn(f'Mount path {mountPath} not found in fstab and unit '
+ f'{mountUnit} not found in systemd unit directories.')
+ bb.warn(f'Skip this check by setting OVERLAYFS_QA_SKIP[{mountPoint}] = '
+ '"mount-configured"')
+ allUnitExist = False;
+
+ if not allUnitExist:
+ bb.fatal('Not all mount paths and units are installed in the image')
+}
+
+python overlayfs_postprocess() {
+ import shutil
+
+ # install helper script
+ helperScriptName = "overlayfs-create-dirs.sh"
+ helperScriptSource = oe.path.join(d.getVar("COREBASE"), "meta/files", helperScriptName)
+ helperScriptDest = oe.path.join(d.getVar("IMAGE_ROOTFS"), "/usr/sbin/", helperScriptName)
+ shutil.copyfile(helperScriptSource, helperScriptDest)
+ os.chmod(helperScriptDest, 0o755)
+}
diff --git a/meta/classes/rootfs_deb.bbclass b/meta/classes-recipe/rootfs_deb.bbclass
index 0469ba7059..c5c6426abb 100644
--- a/meta/classes/rootfs_deb.bbclass
+++ b/meta/classes-recipe/rootfs_deb.bbclass
@@ -1,6 +1,8 @@
#
# Copyright 2006-2007 Openedhand Ltd.
#
+# SPDX-License-Identifier: MIT
+#
ROOTFS_PKGMANAGE = "dpkg apt"
diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes-recipe/rootfs_ipk.bbclass
index 245c256a6f..87fff53a58 100644
--- a/meta/classes/rootfs_ipk.bbclass
+++ b/meta/classes-recipe/rootfs_ipk.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Creates a root filesystem out of IPKs
#
# This rootfs can be mounted via root-nfs or it can be put into an cramfs/jffs etc.
@@ -23,7 +29,7 @@ OPKG_POSTPROCESS_COMMANDS = ""
OPKGLIBDIR ??= "${localstatedir}/lib"
-MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
+MULTILIBRE_ALLOW_REP += "${OPKGLIBDIR}/opkg /usr/lib/opkg"
python () {
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes-recipe/rootfs_rpm.bbclass
index bec4d63ed6..55f1cc92ca 100644
--- a/meta/classes/rootfs_rpm.bbclass
+++ b/meta/classes-recipe/rootfs_rpm.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Creates a root filesystem out of rpm packages
#
@@ -14,11 +20,9 @@ IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("PACKAGE_INSTALL", "dnf"
# Dnf is python based, so be sure python3-native is available to us.
EXTRANATIVEPATH += "python3-native"
-# opkg is needed for update-alternatives
RPMROOTFSDEPENDS = "rpm-native:do_populate_sysroot \
dnf-native:do_populate_sysroot \
- createrepo-c-native:do_populate_sysroot \
- opkg-native:do_populate_sysroot"
+ createrepo-c-native:do_populate_sysroot"
do_rootfs[depends] += "${RPMROOTFSDEPENDS}"
do_populate_sdk[depends] += "${RPMROOTFSDEPENDS}"
diff --git a/meta/classes/rootfsdebugfiles.bbclass b/meta/classes-recipe/rootfsdebugfiles.bbclass
index 85c7ec7434..4c2fc1de25 100644
--- a/meta/classes/rootfsdebugfiles.bbclass
+++ b/meta/classes-recipe/rootfsdebugfiles.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class installs additional files found on the build host
# directly into the rootfs.
#
@@ -28,7 +34,7 @@
ROOTFS_DEBUG_FILES ?= ""
ROOTFS_DEBUG_FILES[doc] = "Lists additional files or directories to be installed with 'cp -a' in the format 'source1 target1;source2 target2;...'"
-ROOTFS_POSTPROCESS_COMMAND += "rootfs_debug_files;"
+ROOTFS_POSTPROCESS_COMMAND += "rootfs_debug_files"
rootfs_debug_files () {
#!/bin/sh -e
echo "${ROOTFS_DEBUG_FILES}" | sed -e 's/;/\n/g' | while read source target mode; do
diff --git a/meta/classes-recipe/rust-common.bbclass b/meta/classes-recipe/rust-common.bbclass
new file mode 100644
index 0000000000..6940093e59
--- /dev/null
+++ b/meta/classes-recipe/rust-common.bbclass
@@ -0,0 +1,196 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python3native
+inherit rust-target-config
+
+# Common variables used by all Rust builds
+export rustlibdir = "${libdir}/rustlib/${RUST_HOST_SYS}/lib"
+FILES:${PN} += "${rustlibdir}/*.so"
+FILES:${PN}-dev += "${rustlibdir}/*.rlib ${rustlibdir}/*.rmeta"
+FILES:${PN}-dbg += "${rustlibdir}/.debug"
+
+RUSTLIB = "-L ${STAGING_DIR_HOST}${rustlibdir}"
+RUST_DEBUG_REMAP = "--remap-path-prefix=${WORKDIR}=${TARGET_DBGSRC_DIR}"
+RUSTFLAGS += "${RUSTLIB} ${RUST_DEBUG_REMAP}"
+RUSTLIB_DEP ??= "libstd-rs"
+RUST_PANIC_STRATEGY ??= "unwind"
+
+def target_is_armv7(d):
+ '''Determine if target is armv7'''
+ # TUNE_FEATURES may include arm* even if the target is not arm
+ # in the case of *-native packages
+ if d.getVar('TARGET_ARCH') != 'arm':
+ return False
+
+ feat = d.getVar('TUNE_FEATURES')
+ feat = frozenset(feat.split())
+ mach_overrides = d.getVar('MACHINEOVERRIDES')
+ mach_overrides = frozenset(mach_overrides.split(':'))
+
+ v7=frozenset(['armv7a', 'armv7r', 'armv7m', 'armv7ve'])
+ if mach_overrides.isdisjoint(v7) and feat.isdisjoint(v7):
+ return False
+ else:
+ return True
+target_is_armv7[vardepvalue] = "${@target_is_armv7(d)}"
+
+# Responsible for taking Yocto triples and converting it to Rust triples
+def rust_base_triple(d, thing):
+ '''
+ Mangle bitbake's *_SYS into something that rust might support (see
+ rust/mk/cfg/* for a list)
+
+ Note that os is assumed to be some linux form
+ '''
+
+ # The llvm-target for armv7 is armv7-unknown-linux-gnueabihf
+ if d.getVar('{}_ARCH'.format(thing)) == d.getVar('TARGET_ARCH') and target_is_armv7(d):
+ arch = "armv7"
+ else:
+ arch = oe.rust.arch_to_rust_arch(d.getVar('{}_ARCH'.format(thing)))
+
+ # Substituting "unknown" when vendor is empty will match rust's standard
+ # targets when building native recipes (including rust-native itself)
+ vendor = d.getVar('{}_VENDOR'.format(thing)) or "-unknown"
+
+ # Default to glibc
+ libc = "-gnu"
+ os = d.getVar('{}_OS'.format(thing))
+ # This catches ARM targets and appends the necessary hard float bits
+ if os == "linux-gnueabi" or os == "linux-musleabi":
+ libc = bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hf', '', d)
+ elif os == "linux-gnux32" or os == "linux-muslx32":
+ libc = ""
+ elif "musl" in os:
+ libc = "-musl"
+ os = "linux"
+ elif "elf" in os:
+ libc = "-elf"
+ os = "none"
+ elif "eabi" in os:
+ libc = "-eabi"
+ os = "none"
+
+ return arch + vendor + '-' + os + libc
+
+
+# In some cases uname and the toolchain differ on their idea of the arch name
+RUST_BUILD_ARCH = "${@oe.rust.arch_to_rust_arch(d.getVar('BUILD_ARCH'))}"
+
+# Naming explanation
+# Yocto
+# - BUILD_SYS - Yocto triple of the build environment
+# - HOST_SYS - What we're building for in Yocto
+# - TARGET_SYS - What we're building for in Yocto
+#
+# So when building '-native' packages BUILD_SYS == HOST_SYS == TARGET_SYS
+# When building packages for the image HOST_SYS == TARGET_SYS
+# This is a gross over simplification as there are other modes but
+# currently this is all that's supported.
+#
+# Rust
+# - TARGET - the system where the binary will run
+# - HOST - the system where the binary is being built
+#
+# Rust additionally will use two additional cases:
+# - undecorated (e.g. CC) - equivalent to TARGET
+# - triple suffix (e.g. CC:x86_64_unknown_linux_gnu) - both
+# see: https://github.com/rust-lang/cc-rs
+# The way that Rust's internal triples and Yocto triples are mapped together
+# its likely best to not use the triple suffix due to potential confusion.
+
+RUST_BUILD_SYS = "${@rust_base_triple(d, 'BUILD')}"
+RUST_BUILD_SYS[vardepvalue] = "${RUST_BUILD_SYS}"
+RUST_HOST_SYS = "${@rust_base_triple(d, 'HOST')}"
+RUST_HOST_SYS[vardepvalue] = "${RUST_HOST_SYS}"
+RUST_TARGET_SYS = "${@rust_base_triple(d, 'TARGET')}"
+RUST_TARGET_SYS[vardepvalue] = "${RUST_TARGET_SYS}"
+
+# wrappers to get around the fact that Rust needs a single
+# binary but Yocto's compiler and linker commands have
+# arguments. Technically the archiver is always one command but
+# this is necessary for builds that determine the prefix and then
+# use those commands based on the prefix.
+WRAPPER_DIR = "${WORKDIR}/wrapper"
+RUST_BUILD_CC = "${WRAPPER_DIR}/build-rust-cc"
+RUST_BUILD_CXX = "${WRAPPER_DIR}/build-rust-cxx"
+RUST_BUILD_CCLD = "${WRAPPER_DIR}/build-rust-ccld"
+RUST_BUILD_AR = "${WRAPPER_DIR}/build-rust-ar"
+RUST_TARGET_CC = "${WRAPPER_DIR}/target-rust-cc"
+RUST_TARGET_CXX = "${WRAPPER_DIR}/target-rust-cxx"
+RUST_TARGET_CCLD = "${WRAPPER_DIR}/target-rust-ccld"
+RUST_TARGET_AR = "${WRAPPER_DIR}/target-rust-ar"
+
+create_wrapper_rust () {
+ file="$1"
+ shift
+ extras="$1"
+ shift
+ crate_cc_extras="$1"
+ shift
+
+ cat <<- EOF > "${file}"
+ #!/usr/bin/env python3
+ import os, sys
+ orig_binary = "$@"
+ extras = "${extras}"
+
+ # Apply a required subset of CC crate compiler flags
+ # when we build a target recipe for a non-bare-metal target.
+ # https://github.com/rust-lang/cc-rs/blob/main/src/lib.rs#L1614
+ if "CRATE_CC_NO_DEFAULTS" in os.environ.keys() and \
+ "TARGET" in os.environ.keys() and not "-none-" in os.environ["TARGET"]:
+ orig_binary += "${crate_cc_extras}"
+
+ binary = orig_binary.split()[0]
+ args = orig_binary.split() + sys.argv[1:]
+ if extras:
+ args.append(extras)
+ os.execvp(binary, args)
+ EOF
+ chmod +x "${file}"
+}
+
+WRAPPER_TARGET_CC = "${CC}"
+WRAPPER_TARGET_CXX = "${CXX}"
+WRAPPER_TARGET_CCLD = "${CCLD}"
+WRAPPER_TARGET_LDFLAGS = "${LDFLAGS}"
+WRAPPER_TARGET_EXTRALD = ""
+# see recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch
+# we need to link with ssp_nonshared on musl to avoid "undefined reference to `__stack_chk_fail_local'"
+# when building MACHINE=qemux86 for musl
+WRAPPER_TARGET_EXTRALD:libc-musl = "-lssp_nonshared"
+WRAPPER_TARGET_AR = "${AR}"
+
+# compiler is used by gcc-rs
+# linker is used by rustc/cargo
+# archiver is used by the build of libstd-rs
+do_rust_create_wrappers () {
+ mkdir -p "${WRAPPER_DIR}"
+
+ # Yocto Build / Rust Host C compiler
+ create_wrapper_rust "${RUST_BUILD_CC}" "" "${CRATE_CC_FLAGS}" "${BUILD_CC}"
+ # Yocto Build / Rust Host C++ compiler
+ create_wrapper_rust "${RUST_BUILD_CXX}" "" "${CRATE_CC_FLAGS}" "${BUILD_CXX}"
+ # Yocto Build / Rust Host linker
+ create_wrapper_rust "${RUST_BUILD_CCLD}" "" "" "${BUILD_CCLD}" "${BUILD_LDFLAGS}"
+ # Yocto Build / Rust Host archiver
+ create_wrapper_rust "${RUST_BUILD_AR}" "" "" "${BUILD_AR}"
+
+ # Yocto Target / Rust Target C compiler
+ create_wrapper_rust "${RUST_TARGET_CC}" "${WRAPPER_TARGET_EXTRALD}" "${CRATE_CC_FLAGS}" "${WRAPPER_TARGET_CC}" "${WRAPPER_TARGET_LDFLAGS}"
+ # Yocto Target / Rust Target C++ compiler
+ create_wrapper_rust "${RUST_TARGET_CXX}" "${WRAPPER_TARGET_EXTRALD}" "${CRATE_CC_FLAGS}" "${WRAPPER_TARGET_CXX}" "${CXXFLAGS}"
+ # Yocto Target / Rust Target linker
+ create_wrapper_rust "${RUST_TARGET_CCLD}" "${WRAPPER_TARGET_EXTRALD}" "" "${WRAPPER_TARGET_CCLD}" "${WRAPPER_TARGET_LDFLAGS}"
+ # Yocto Target / Rust Target archiver
+ create_wrapper_rust "${RUST_TARGET_AR}" "" "" "${WRAPPER_TARGET_AR}"
+
+}
+
+addtask rust_create_wrappers before do_configure after do_patch do_prepare_recipe_sysroot
+do_rust_create_wrappers[dirs] += "${WRAPPER_DIR}"
diff --git a/meta/classes-recipe/rust-target-config.bbclass b/meta/classes-recipe/rust-target-config.bbclass
new file mode 100644
index 0000000000..926b0630b1
--- /dev/null
+++ b/meta/classes-recipe/rust-target-config.bbclass
@@ -0,0 +1,437 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Right now this is focused on arm-specific tune features.
+# We get away with this for now as one can only use x86-64 as the build host
+# (not arm).
+# Note that TUNE_FEATURES is _always_ refering to the target, so we really
+# don't want to use this for the host/build.
+def llvm_features_from_tune(d):
+ f = []
+ feat = d.getVar('TUNE_FEATURES')
+ if not feat:
+ return []
+ feat = frozenset(feat.split())
+
+ mach_overrides = d.getVar('MACHINEOVERRIDES')
+ mach_overrides = frozenset(mach_overrides.split(':'))
+
+ if 'vfpv4' in feat:
+ f.append("+vfp4")
+ elif 'vfpv3' in feat:
+ f.append("+vfp3")
+ elif 'vfpv3d16' in feat:
+ f.append("+vfp3")
+ f.append("-d32")
+ elif 'vfpv2' in feat or 'vfp' in feat:
+ f.append("+vfp2")
+
+ if 'neon' in feat:
+ f.append("+neon")
+ elif target_is_armv7(d):
+ f.append("-neon")
+
+ if 'mips32' in feat:
+ f.append("+mips32")
+
+ if 'mips32r2' in feat:
+ f.append("+mips32r2")
+
+ if target_is_armv7(d):
+ f.append('+v7')
+
+ if ('armv6' in mach_overrides) or ('armv6' in feat):
+ f.append("+v6")
+ if 'armv5te' in feat:
+ f.append("+strict-align")
+ f.append("+v5te")
+ elif 'armv5' in feat:
+ f.append("+strict-align")
+ f.append("+v5")
+
+ if ('armv4' in mach_overrides) or ('armv4' in feat):
+ f.append("+strict-align")
+
+ if 'dsp' in feat:
+ f.append("+dsp")
+
+ if 'thumb' in feat:
+ if d.getVar('ARM_THUMB_OPT') == "thumb":
+ if target_is_armv7(d):
+ f.append('+thumb2')
+ f.append("+thumb-mode")
+
+ if 'cortexa5' in feat:
+ f.append("+a5")
+ if 'cortexa7' in feat:
+ f.append("+a7")
+ if 'cortexa9' in feat:
+ f.append("+a9")
+ if 'cortexa15' in feat:
+ f.append("+a15")
+ if 'cortexa17' in feat:
+ f.append("+a17")
+ if ('riscv64' in feat) or ('riscv32' in feat):
+ f.append("+a,+c,+d,+f,+m")
+ return f
+llvm_features_from_tune[vardepvalue] = "${@llvm_features_from_tune(d)}"
+
+# TARGET_CC_ARCH changes from build/cross/target so it'll do the right thing
+# this should go away when https://github.com/rust-lang/rust/pull/31709 is
+# stable (1.9.0?)
+def llvm_features_from_cc_arch(d):
+ f = []
+ feat = d.getVar('TARGET_CC_ARCH')
+ if not feat:
+ return []
+ feat = frozenset(feat.split())
+
+ if '-mmmx' in feat:
+ f.append("+mmx")
+ if '-msse' in feat:
+ f.append("+sse")
+ if '-msse2' in feat:
+ f.append("+sse2")
+ if '-msse3' in feat:
+ f.append("+sse3")
+ if '-mssse3' in feat:
+ f.append("+ssse3")
+ if '-msse4.1' in feat:
+ f.append("+sse4.1")
+ if '-msse4.2' in feat:
+ f.append("+sse4.2")
+ if '-msse4a' in feat:
+ f.append("+sse4a")
+ if '-mavx' in feat:
+ f.append("+avx")
+ if '-mavx2' in feat:
+ f.append("+avx2")
+
+ return f
+
+def llvm_features_from_target_fpu(d):
+ # TARGET_FPU can be hard or soft. +soft-float tell llvm to use soft float
+ # ABI. There is no option for hard.
+
+ fpu = d.getVar('TARGET_FPU')
+ return ["+soft-float"] if fpu == "soft" else []
+
+def llvm_features(d):
+ return ','.join(llvm_features_from_tune(d) +
+ llvm_features_from_cc_arch(d) +
+ llvm_features_from_target_fpu(d))
+
+llvm_features[vardepvalue] = "${@llvm_features(d)}"
+
+## arm-unknown-linux-gnueabihf
+DATA_LAYOUT[arm-eabi] = "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64"
+TARGET_ENDIAN[arm-eabi] = "little"
+TARGET_POINTER_WIDTH[arm-eabi] = "32"
+TARGET_C_INT_WIDTH[arm-eabi] = "32"
+MAX_ATOMIC_WIDTH[arm-eabi] = "64"
+FEATURES[arm-eabi] = "+v6,+vfp2"
+
+## armv7-unknown-linux-gnueabihf
+DATA_LAYOUT[armv7-eabi] = "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64"
+TARGET_ENDIAN[armv7-eabi] = "little"
+TARGET_POINTER_WIDTH[armv7-eabi] = "32"
+TARGET_C_INT_WIDTH[armv7-eabi] = "32"
+MAX_ATOMIC_WIDTH[armv7-eabi] = "64"
+FEATURES[armv7-eabi] = "+v7,+vfp2,+thumb2"
+
+## aarch64-unknown-linux-{gnu, musl}
+DATA_LAYOUT[aarch64] = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
+TARGET_ENDIAN[aarch64] = "little"
+TARGET_POINTER_WIDTH[aarch64] = "64"
+TARGET_C_INT_WIDTH[aarch64] = "32"
+MAX_ATOMIC_WIDTH[aarch64] = "128"
+
+## x86_64-unknown-linux-{gnu, musl}
+DATA_LAYOUT[x86_64] = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
+TARGET_ENDIAN[x86_64] = "little"
+TARGET_POINTER_WIDTH[x86_64] = "64"
+TARGET_C_INT_WIDTH[x86_64] = "32"
+MAX_ATOMIC_WIDTH[x86_64] = "64"
+
+## x86_64-unknown-linux-gnux32
+DATA_LAYOUT[x86_64-x32] = "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
+TARGET_ENDIAN[x86_64-x32] = "little"
+TARGET_POINTER_WIDTH[x86_64-x32] = "32"
+TARGET_C_INT_WIDTH[x86_64-x32] = "32"
+MAX_ATOMIC_WIDTH[x86_64-x32] = "64"
+
+## i686-unknown-linux-{gnu, musl}
+DATA_LAYOUT[i686] = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128"
+TARGET_ENDIAN[i686] = "little"
+TARGET_POINTER_WIDTH[i686] = "32"
+TARGET_C_INT_WIDTH[i686] = "32"
+MAX_ATOMIC_WIDTH[i686] = "64"
+
+## XXX: a bit of a hack so qemux86 builds, clone of i686-unknown-linux-{gnu, musl} above
+DATA_LAYOUT[i586] = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128"
+TARGET_ENDIAN[i586] = "little"
+TARGET_POINTER_WIDTH[i586] = "32"
+TARGET_C_INT_WIDTH[i586] = "32"
+MAX_ATOMIC_WIDTH[i586] = "64"
+
+## mips-unknown-linux-{gnu, musl}
+DATA_LAYOUT[mips] = "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64"
+TARGET_ENDIAN[mips] = "big"
+TARGET_POINTER_WIDTH[mips] = "32"
+TARGET_C_INT_WIDTH[mips] = "32"
+MAX_ATOMIC_WIDTH[mips] = "32"
+
+## mipsel-unknown-linux-{gnu, musl}
+DATA_LAYOUT[mipsel] = "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64"
+TARGET_ENDIAN[mipsel] = "little"
+TARGET_POINTER_WIDTH[mipsel] = "32"
+TARGET_C_INT_WIDTH[mipsel] = "32"
+MAX_ATOMIC_WIDTH[mipsel] = "32"
+
+## mips64-unknown-linux-{gnu, musl}
+DATA_LAYOUT[mips64] = "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128"
+TARGET_ENDIAN[mips64] = "big"
+TARGET_POINTER_WIDTH[mips64] = "64"
+TARGET_C_INT_WIDTH[mips64] = "64"
+MAX_ATOMIC_WIDTH[mips64] = "64"
+
+## mips64-n32-unknown-linux-{gnu, musl}
+DATA_LAYOUT[mips64-n32] = "E-m:e-p:32:32-i8:8:32-i16:16:32-i64:64-n32:64-S128"
+TARGET_ENDIAN[mips64-n32] = "big"
+TARGET_POINTER_WIDTH[mips64-n32] = "32"
+TARGET_C_INT_WIDTH[mips64-n32] = "32"
+MAX_ATOMIC_WIDTH[mips64-n32] = "64"
+
+## mips64el-unknown-linux-{gnu, musl}
+DATA_LAYOUT[mips64el] = "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128"
+TARGET_ENDIAN[mips64el] = "little"
+TARGET_POINTER_WIDTH[mips64el] = "64"
+TARGET_C_INT_WIDTH[mips64el] = "64"
+MAX_ATOMIC_WIDTH[mips64el] = "64"
+
+## powerpc-unknown-linux-{gnu, musl}
+DATA_LAYOUT[powerpc] = "E-m:e-p:32:32-i64:64-n32"
+TARGET_ENDIAN[powerpc] = "big"
+TARGET_POINTER_WIDTH[powerpc] = "32"
+TARGET_C_INT_WIDTH[powerpc] = "32"
+MAX_ATOMIC_WIDTH[powerpc] = "32"
+
+## powerpc64-unknown-linux-{gnu, musl}
+DATA_LAYOUT[powerpc64] = "E-m:e-i64:64-n32:64-S128-v256:256:256-v512:512:512"
+TARGET_ENDIAN[powerpc64] = "big"
+TARGET_POINTER_WIDTH[powerpc64] = "64"
+TARGET_C_INT_WIDTH[powerpc64] = "64"
+MAX_ATOMIC_WIDTH[powerpc64] = "64"
+
+## powerpc64le-unknown-linux-{gnu, musl}
+DATA_LAYOUT[powerpc64le] = "e-m:e-i64:64-n32:64-v256:256:256-v512:512:512"
+TARGET_ENDIAN[powerpc64le] = "little"
+TARGET_POINTER_WIDTH[powerpc64le] = "64"
+TARGET_C_INT_WIDTH[powerpc64le] = "64"
+MAX_ATOMIC_WIDTH[powerpc64le] = "64"
+
+## riscv32gc-unknown-linux-{gnu, musl}
+DATA_LAYOUT[riscv32gc] = "e-m:e-p:32:32-i64:64-n32-S128"
+TARGET_ENDIAN[riscv32gc] = "little"
+TARGET_POINTER_WIDTH[riscv32gc] = "32"
+TARGET_C_INT_WIDTH[riscv32gc] = "32"
+MAX_ATOMIC_WIDTH[riscv32gc] = "32"
+
+## riscv64gc-unknown-linux-{gnu, musl}
+DATA_LAYOUT[riscv64gc] = "e-m:e-p:64:64-i64:64-i128:128-n64-S128"
+TARGET_ENDIAN[riscv64gc] = "little"
+TARGET_POINTER_WIDTH[riscv64gc] = "64"
+TARGET_C_INT_WIDTH[riscv64gc] = "64"
+MAX_ATOMIC_WIDTH[riscv64gc] = "64"
+
+## loongarch64-unknown-linux-{gnu, musl}
+DATA_LAYOUT[loongarch64] = "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128"
+TARGET_ENDIAN[loongarch64] = "little"
+TARGET_POINTER_WIDTH[loongarch64] = "64"
+TARGET_C_INT_WIDTH[loongarch64] = "32"
+MAX_ATOMIC_WIDTH[loongarch64] = "64"
+FEATURES[loongarch64] = "+d"
+
+# Convert a normal arch (HOST_ARCH, TARGET_ARCH, BUILD_ARCH, etc) to something
+# rust's internals won't choke on.
+def arch_to_rust_target_arch(arch):
+ if arch == "i586" or arch == "i686":
+ return "x86"
+ elif arch == "mipsel":
+ return "mips"
+ elif arch == "mip64sel":
+ return "mips64"
+ elif arch == "armv7":
+ return "arm"
+ elif arch == "powerpc64le":
+ return "powerpc64"
+ elif arch == "riscv32gc":
+ return "riscv32"
+ elif arch == "riscv64gc":
+ return "riscv64"
+ else:
+ return arch
+
+# Convert a rust target string to a llvm-compatible triplet
+def rust_sys_to_llvm_target(sys):
+ if sys.startswith('riscv32gc-'):
+ return sys.replace('riscv32gc-', 'riscv32-', 1)
+ if sys.startswith('riscv64gc-'):
+ return sys.replace('riscv64gc-', 'riscv64-', 1)
+ return sys
+
+# generates our target CPU value
+def llvm_cpu(d):
+ cpu = d.getVar('PACKAGE_ARCH')
+ target = d.getVar('TRANSLATED_TARGET_ARCH')
+
+ trans = {}
+ trans['corei7-64'] = "corei7"
+ trans['core2-32'] = "core2"
+ trans['x86-64'] = "x86-64"
+ trans['i686'] = "i686"
+ trans['i586'] = "i586"
+ trans['mips64'] = "mips64"
+ trans['mips64el'] = "mips64"
+ trans['powerpc64le'] = "ppc64le"
+ trans['powerpc64'] = "ppc64"
+ trans['riscv64'] = "generic-rv64"
+ trans['riscv32'] = "generic-rv32"
+ trans['loongarch64'] = "la464"
+
+ if target in ["mips", "mipsel", "powerpc"]:
+ feat = frozenset(d.getVar('TUNE_FEATURES').split())
+ if "mips32r2" in feat:
+ trans['mipsel'] = "mips32r2"
+ trans['mips'] = "mips32r2"
+ elif "mips32" in feat:
+ trans['mipsel'] = "mips32"
+ trans['mips'] = "mips32"
+ elif "ppc7400" in feat:
+ trans['powerpc'] = "7400"
+
+ try:
+ return trans[cpu]
+ except:
+ return trans.get(target, "generic")
+
+llvm_cpu[vardepvalue] = "${@llvm_cpu(d)}"
+
+def rust_gen_target(d, thing, wd, arch):
+ import json
+
+ build_sys = d.getVar('BUILD_SYS')
+ target_sys = d.getVar('TARGET_SYS')
+
+ sys = d.getVar('{}_SYS'.format(thing))
+ prefix = d.getVar('{}_PREFIX'.format(thing))
+ rustsys = d.getVar('RUST_{}_SYS'.format(thing))
+
+ abi = None
+ cpu = "generic"
+ features = ""
+
+ # Need to apply the target tuning consitently, only if the triplet applies to the target
+ # and not in the native case
+ if sys == target_sys and sys != build_sys:
+ abi = d.getVar('ABIEXTENSION')
+ cpu = llvm_cpu(d)
+ if bb.data.inherits_class('native', d):
+ features = ','.join(llvm_features_from_cc_arch(d))
+ else:
+ features = llvm_features(d) or ""
+ # arm and armv7 have different targets in llvm
+ if arch == "arm" and target_is_armv7(d):
+ arch = 'armv7'
+
+ rust_arch = oe.rust.arch_to_rust_arch(arch)
+
+ if abi:
+ arch_abi = "{}-{}".format(rust_arch, abi)
+ else:
+ arch_abi = rust_arch
+
+ features = features or d.getVarFlag('FEATURES', arch_abi) or ""
+ features = features.strip()
+
+ # build tspec
+ tspec = {}
+ tspec['llvm-target'] = rust_sys_to_llvm_target(rustsys)
+ tspec['data-layout'] = d.getVarFlag('DATA_LAYOUT', arch_abi)
+ if tspec['data-layout'] is None:
+ bb.fatal("No rust target defined for %s" % arch_abi)
+ tspec['max-atomic-width'] = int(d.getVarFlag('MAX_ATOMIC_WIDTH', arch_abi))
+ tspec['target-pointer-width'] = d.getVarFlag('TARGET_POINTER_WIDTH', arch_abi)
+ tspec['target-c-int-width'] = d.getVarFlag('TARGET_C_INT_WIDTH', arch_abi)
+ tspec['target-endian'] = d.getVarFlag('TARGET_ENDIAN', arch_abi)
+ tspec['arch'] = arch_to_rust_target_arch(rust_arch)
+ if "baremetal" in d.getVar('TCLIBC'):
+ tspec['os'] = "none"
+ else:
+ tspec['os'] = "linux"
+ if "musl" in tspec['llvm-target']:
+ tspec['env'] = "musl"
+ else:
+ tspec['env'] = "gnu"
+ if "riscv64" in tspec['llvm-target']:
+ tspec['llvm-abiname'] = "lp64d"
+ if "riscv32" in tspec['llvm-target']:
+ tspec['llvm-abiname'] = "ilp32d"
+ if "loongarch64" in tspec['llvm-target']:
+ tspec['llvm-abiname'] = "lp64d"
+ tspec['vendor'] = "unknown"
+ tspec['target-family'] = "unix"
+ tspec['linker'] = "{}{}gcc".format(d.getVar('CCACHE'), prefix)
+ tspec['cpu'] = cpu
+ if features != "":
+ tspec['features'] = features
+ tspec['dynamic-linking'] = True
+ tspec['executables'] = True
+ tspec['linker-is-gnu'] = True
+ tspec['linker-flavor'] = "gcc"
+ tspec['has-rpath'] = True
+ tspec['position-independent-executables'] = True
+ tspec['panic-strategy'] = d.getVar("RUST_PANIC_STRATEGY")
+
+ # write out the target spec json file
+ with open(wd + rustsys + '.json', 'w') as f:
+ json.dump(tspec, f, indent=4)
+
+# These are accounted for in tmpdir path names so don't need to be in the task sig
+rust_gen_target[vardepsexclude] += "ABIEXTENSION llvm_cpu"
+
+do_rust_gen_targets[vardeps] += "DATA_LAYOUT TARGET_ENDIAN TARGET_POINTER_WIDTH TARGET_C_INT_WIDTH MAX_ATOMIC_WIDTH FEATURES"
+
+RUST_TARGETS_DIR = "${WORKDIR}/rust-targets/"
+export RUST_TARGET_PATH = "${RUST_TARGETS_DIR}"
+
+python do_rust_gen_targets () {
+ wd = d.getVar('RUST_TARGETS_DIR')
+ # Order of BUILD, HOST, TARGET is important in case the files overwrite, most specific last
+ rust_gen_target(d, 'BUILD', wd, d.getVar('BUILD_ARCH'))
+ rust_gen_target(d, 'HOST', wd, d.getVar('HOST_ARCH'))
+ rust_gen_target(d, 'TARGET', wd, d.getVar('TARGET_ARCH'))
+}
+
+addtask rust_gen_targets after do_patch before do_compile
+do_rust_gen_targets[dirs] += "${RUST_TARGETS_DIR}"
+
+# For building target C dependecies use only compiler parameters defined in OE
+# and ignore the CC crate defaults which conflicts with OE ones in some cases.
+# https://github.com/rust-lang/cc-rs#external-configuration-via-environment-variables
+# Some CC crate compiler flags are still required.
+# We apply them conditionally in rust wrappers.
+
+CRATE_CC_FLAGS:class-native = ""
+CRATE_CC_FLAGS:class-nativesdk = ""
+CRATE_CC_FLAGS:class-target = " -ffunction-sections -fdata-sections -fPIC"
+
+do_compile:prepend:class-target() {
+ export CRATE_CC_NO_DEFAULTS=1
+}
+do_install:prepend:class-target() {
+ export CRATE_CC_NO_DEFAULTS=1
+}
diff --git a/meta/classes/rust.bbclass b/meta/classes-recipe/rust.bbclass
index 5c8938d09f..dae25cac2a 100644
--- a/meta/classes/rust.bbclass
+++ b/meta/classes-recipe/rust.bbclass
@@ -1,8 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit rust-common
RUSTC = "rustc"
-RUSTC_ARCHFLAGS += "--target=${HOST_SYS} ${RUSTFLAGS}"
+RUSTC_ARCHFLAGS += "--target=${RUST_HOST_SYS} ${RUSTFLAGS}"
def rust_base_dep(d):
# Taken from meta/classes/base.bbclass `base_dep_prepend` and modified to
@@ -10,7 +16,7 @@ def rust_base_dep(d):
deps = ""
if not d.getVar('INHIBIT_DEFAULT_RUST_DEPS'):
if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
- deps += " virtual/${TARGET_PREFIX}rust ${RUSTLIB_DEP}"
+ deps += " rust-native ${RUSTLIB_DEP}"
else:
deps += " rust-native"
return deps
@@ -37,7 +43,7 @@ HOST_CFLAGS ?= "${CFLAGS}"
HOST_CXXFLAGS ?= "${CXXFLAGS}"
HOST_CPPFLAGS ?= "${CPPFLAGS}"
-rustlib_suffix="${TUNE_ARCH}${TARGET_VENDOR}-${TARGET_OS}/rustlib/${HOST_SYS}/lib"
+rustlib_suffix="${TUNE_ARCH}${TARGET_VENDOR}-${TARGET_OS}/rustlib/${RUST_HOST_SYS}/lib"
# Native sysroot standard library path
rustlib_src="${prefix}/lib/${rustlib_suffix}"
# Host sysroot standard library path
diff --git a/meta/classes-recipe/scons.bbclass b/meta/classes-recipe/scons.bbclass
new file mode 100644
index 0000000000..d20a78dc6e
--- /dev/null
+++ b/meta/classes-recipe/scons.bbclass
@@ -0,0 +1,40 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit python3native
+
+DEPENDS += "python3-scons-native"
+
+EXTRA_OESCONS ?= ""
+# This value below is derived from $(getconf ARG_MAX)
+SCONS_MAXLINELENGTH ?= "MAXLINELENGTH=2097152"
+EXTRA_OESCONS:append = " ${SCONS_MAXLINELENGTH}"
+do_configure() {
+ if [ -n "${CONFIGURESTAMPFILE}" -a "${S}" = "${B}" ]; then
+ if [ -e "${CONFIGURESTAMPFILE}" -a "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a "${CLEANBROKEN}" != "1" ]; then
+ ${STAGING_BINDIR_NATIVE}/scons --directory=${S} --clean PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS}
+ fi
+
+ mkdir -p `dirname ${CONFIGURESTAMPFILE}`
+ echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
+ fi
+}
+
+scons_do_compile() {
+ ${STAGING_BINDIR_NATIVE}/scons --directory=${S} ${PARALLEL_MAKE} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} || \
+ die "scons build execution failed."
+}
+
+scons_do_install() {
+ ${STAGING_BINDIR_NATIVE}/scons --directory=${S} install_root=${D}${prefix} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} install || \
+ die "scons install execution failed."
+}
+
+do_configure[vardepsexclude] = "SCONS_MAXLINELENGTH"
+do_compile[vardepsexclude] = "SCONS_MAXLINELENGTH"
+do_install[vardepsexclude] = "SCONS_MAXLINELENGTH"
+
+EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes-recipe/setuptools3-base.bbclass b/meta/classes-recipe/setuptools3-base.bbclass
new file mode 100644
index 0000000000..27af6abc58
--- /dev/null
+++ b/meta/classes-recipe/setuptools3-base.bbclass
@@ -0,0 +1,30 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+DEPENDS:append:class-target = " python3-native python3"
+DEPENDS:append:class-nativesdk = " python3-native python3"
+RDEPENDS:${PN}:append:class-target = " python3-core"
+
+export STAGING_INCDIR
+export STAGING_LIBDIR
+
+# LDSHARED is the ld *command* used to create shared library
+export LDSHARED = "${CCLD} -shared"
+# LDXXSHARED is the ld *command* used to create shared library of C++
+# objects
+export LDCXXSHARED = "${CXX} -shared"
+# CCSHARED are the C *flags* used to create objects to go into a shared
+# library (module)
+export CCSHARED = "-fPIC -DPIC"
+# LINKFORSHARED are the flags passed to the $(CC) command that links
+# the python executable
+export LINKFORSHARED = "${SECURITY_CFLAGS} -Xlinker -export-dynamic"
+
+FILES:${PN} += "${PYTHON_SITEPACKAGES_DIR}"
+FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
+FILES:${PN}-dev += "${PYTHON_SITEPACKAGES_DIR}/*.la"
+
+inherit python3native python3targetconfig
diff --git a/meta/classes-recipe/setuptools3.bbclass b/meta/classes-recipe/setuptools3.bbclass
new file mode 100644
index 0000000000..d71a089539
--- /dev/null
+++ b/meta/classes-recipe/setuptools3.bbclass
@@ -0,0 +1,38 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+inherit setuptools3-base python_pep517
+
+DEPENDS += "python3-setuptools-native python3-wheel-native"
+
+SETUPTOOLS_BUILD_ARGS ?= ""
+
+SETUPTOOLS_SETUP_PATH ?= "${S}"
+
+setuptools3_do_configure() {
+ :
+}
+
+setuptools3_do_compile() {
+ cd ${SETUPTOOLS_SETUP_PATH}
+ NO_FETCH_BUILD=1 \
+ STAGING_INCDIR=${STAGING_INCDIR} \
+ STAGING_LIBDIR=${STAGING_LIBDIR} \
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 setup.py \
+ bdist_wheel --verbose --dist-dir ${PEP517_WHEEL_PATH} ${SETUPTOOLS_BUILD_ARGS} || \
+ bbfatal_log "'python3 setup.py bdist_wheel ${SETUPTOOLS_BUILD_ARGS}' execution failed."
+}
+setuptools3_do_compile[vardepsexclude] = "MACHINE"
+do_compile[cleandirs] += "${PEP517_WHEEL_PATH}"
+
+# This could be removed in the future but some recipes in meta-oe still use it
+setuptools3_do_install() {
+ python_pep517_do_install
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
+
+export LDSHARED="${CCLD} -shared"
diff --git a/meta/classes/setuptools3_legacy.bbclass b/meta/classes-recipe/setuptools3_legacy.bbclass
index 5a99daadb5..264b1f5cfb 100644
--- a/meta/classes/setuptools3_legacy.bbclass
+++ b/meta/classes-recipe/setuptools3_legacy.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class is for packages which use the deprecated setuptools behaviour,
# specifically custom install tasks which don't work correctly with bdist_wheel.
# This behaviour is deprecated in setuptools[1] and won't work in the future, so
@@ -32,9 +38,9 @@ setuptools3_legacy_do_compile() {
NO_FETCH_BUILD=1 \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 setup.py \
build --build-base=${B} ${SETUPTOOLS_BUILD_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py build ${SETUPTOOLS_BUILD_ARGS}' execution failed."
+ bbfatal_log "'python3 setup.py build ${SETUPTOOLS_BUILD_ARGS}' execution failed."
}
setuptools3_legacy_do_compile[vardepsexclude] = "MACHINE"
@@ -43,10 +49,10 @@ setuptools3_legacy_do_install() {
install -d ${D}${PYTHON_SITEPACKAGES_DIR}
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
- PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
+ PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR}:$PYTHONPATH \
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 setup.py \
build --build-base=${B} install --skip-build ${SETUPTOOLS_INSTALL_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install ${SETUPTOOLS_INSTALL_ARGS}' execution failed."
+ bbfatal_log "'python3 setup.py install ${SETUPTOOLS_INSTALL_ARGS}' execution failed."
# support filenames with *spaces*
find ${D} -name "*.py" -exec grep -q ${D} {} \; \
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes-recipe/siteinfo.bbclass
index 3555d5a663..68aefb8eda 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes-recipe/siteinfo.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class exists to provide information about the targets that
# may be needed by other classes and/or recipes. If you add a new
# target this will probably need to be updated.
@@ -33,6 +39,8 @@ def siteinfo_data_for_machine(arch, os, d):
"i686": "endian-little bit-32 ix86-common",
"ia64": "endian-little bit-64",
"lm32": "endian-big bit-32",
+ "loongarch32": "endian-little bit-32 loongarch",
+ "loongarch64": "endian-little bit-64 loongarch",
"m68k": "endian-big bit-32",
"microblaze": "endian-big bit-32 microblaze-common",
"microblazeel": "endian-little bit-32 microblaze-common",
@@ -65,6 +73,8 @@ def siteinfo_data_for_machine(arch, os, d):
osinfo = {
"darwin": "common-darwin",
"darwin9": "common-darwin",
+ "darwin19": "common-darwin",
+ "darwin21": "common-darwin",
"linux": "common-linux common-glibc",
"linux-gnu": "common-linux common-glibc",
"linux-gnu_ilp32": "common-linux common-glibc",
@@ -91,6 +101,8 @@ def siteinfo_data_for_machine(arch, os, d):
"arm-linux-musleabi": "arm-linux",
"armeb-linux-gnueabi": "armeb-linux",
"armeb-linux-musleabi": "armeb-linux",
+ "loongarch32-linux": "loongarch32-linux",
+ "loongarch64-linux": "loongarch64-linux",
"microblazeel-linux" : "microblaze-linux",
"microblazeel-linux-musl" : "microblaze-linux",
"mips-linux-musl": "mips-linux",
@@ -120,6 +132,8 @@ def siteinfo_data_for_machine(arch, os, d):
"x86_64-cygwin": "bit-64",
"x86_64-darwin": "bit-64",
"x86_64-darwin9": "bit-64",
+ "x86_64-darwin19": "bit-64",
+ "x86_64-darwin21": "bit-64",
"x86_64-linux": "bit-64",
"x86_64-linux-musl": "x86_64-linux bit-64",
"x86_64-linux-muslx32": "bit-32 ix86-common x32-linux",
diff --git a/meta/classes/syslinux.bbclass b/meta/classes-recipe/syslinux.bbclass
index 894f6b3718..be3b898b4d 100644
--- a/meta/classes/syslinux.bbclass
+++ b/meta/classes-recipe/syslinux.bbclass
@@ -1,6 +1,6 @@
# syslinux.bbclass
-# Copyright (C) 2004-2006, Advanced Micro Devices, Inc. All Rights Reserved
-# Released under the MIT license (see packages/COPYING)
+# Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
+# SPDX-License-Identifier: MIT
# Provide syslinux specific functions for building bootable images.
diff --git a/meta/classes/systemd-boot-cfg.bbclass b/meta/classes-recipe/systemd-boot-cfg.bbclass
index b3e0e6ad41..12da41ebad 100644
--- a/meta/classes/systemd-boot-cfg.bbclass
+++ b/meta/classes-recipe/systemd-boot-cfg.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
SYSTEMD_BOOT_CFG ?= "${S}/loader.conf"
SYSTEMD_BOOT_ENTRIES ?= ""
SYSTEMD_BOOT_TIMEOUT ?= "10"
@@ -29,7 +35,7 @@ python build_efi_cfg() {
bb.fatal('Unable to open %s' % cfile)
cfgfile.write('# Automatically created by OE\n')
- cfgfile.write('default %s\n' % (labels.split()[0]))
+ cfgfile.write('default %s.conf\n' % (labels.split()[0]))
timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT')
if timeout:
cfgfile.write('timeout %s\n' % timeout)
diff --git a/meta/classes/systemd-boot.bbclass b/meta/classes-recipe/systemd-boot.bbclass
index 57ec0acbc5..5aa32dd997 100644
--- a/meta/classes/systemd-boot.bbclass
+++ b/meta/classes-recipe/systemd-boot.bbclass
@@ -1,6 +1,6 @@
# Copyright (C) 2016 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
+# SPDX-License-Identifier: MIT
# systemd-boot.bbclass - The "systemd-boot" is essentially the gummiboot merged into systemd.
# The original standalone gummiboot project is dead without any more
diff --git a/meta/classes/systemd.bbclass b/meta/classes-recipe/systemd.bbclass
index 09ec52792d..48b364c1d4 100644
--- a/meta/classes/systemd.bbclass
+++ b/meta/classes-recipe/systemd.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# The list of packages that should have systemd packaging scripts added. For
# each entry, optionally have a SYSTEMD_SERVICE:[package] that lists the service
# files in this package. If this variable isn't set, [package].service is used.
@@ -146,6 +152,7 @@ python systemd_populate_packages() {
def systemd_check_services():
searchpaths = [oe.path.join(d.getVar("sysconfdir"), "systemd", "system"),]
searchpaths.append(d.getVar("systemd_system_unitdir"))
+ searchpaths.append(d.getVar("systemd_user_unitdir"))
systemd_packages = d.getVar('SYSTEMD_PACKAGES')
keys = 'Also'
@@ -163,7 +170,7 @@ python systemd_populate_packages() {
base = service[:at] + '@' + service[ext:]
for path in searchpaths:
- if os.path.exists(oe.path.join(d.getVar("D"), path, service)):
+ if os.path.lexists(oe.path.join(d.getVar("D"), path, service)):
path_found = path
break
elif base is not None:
@@ -199,7 +206,7 @@ python systemd_populate_packages() {
systemd_check_services()
}
-PACKAGESPLITFUNCS:prepend = "systemd_populate_packages "
+PACKAGESPLITFUNCS =+ "systemd_populate_packages"
python rm_systemd_unitdir (){
import shutil
diff --git a/meta/classes/testexport.bbclass b/meta/classes-recipe/testexport.bbclass
index 1b0fb44a4a..572f5d9e76 100644
--- a/meta/classes/testexport.bbclass
+++ b/meta/classes-recipe/testexport.bbclass
@@ -1,19 +1,20 @@
# Copyright (C) 2016 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
-#
+# SPDX-License-Identifier: MIT
#
# testexport.bbclass allows to execute runtime test outside OE environment.
# Most of the tests are commands run on target image over ssh.
# To use it add testexport to global inherit and call your target image with -c testexport
# You can try it out like this:
# - First build an image. i.e. core-image-sato
-# - Add INHERIT += "testexport" in local.conf
+# - Add IMAGE_CLASSES += "testexport" in local.conf
# - Then bitbake core-image-sato -c testexport. That will generate the directory structure
# to execute the runtime tests using runexported.py.
#
# For more information on TEST_SUITES check testimage class.
+inherit testimage
+
TEST_LOG_DIR ?= "${WORKDIR}/testexport"
TEST_EXPORT_DIR ?= "${TMPDIR}/testexport/${PN}"
TEST_EXPORT_PACKAGED_DIR ?= "packages/packaged"
@@ -23,10 +24,9 @@ TEST_TARGET ?= "simpleremote"
TEST_TARGET_IP ?= ""
TEST_SERVER_IP ?= ""
-TEST_EXPORT_SDK_PACKAGES ?= ""
+require conf/testexport.conf
+
TEST_EXPORT_SDK_ENABLED ?= "0"
-TEST_EXPORT_SDK_NAME ?= "testexport-tools-nativesdk"
-TEST_EXPORT_SDK_DIR ?= "sdk"
TEST_EXPORT_DEPENDS = ""
TEST_EXPORT_DEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}"
@@ -61,16 +61,12 @@ def testexport_main(d):
d.getVar("TEST_TARGET"), None, d.getVar("TEST_TARGET_IP"),
d.getVar("TEST_SERVER_IP"))
- host_dumper = OERuntimeTestContextExecutor.getHostDumper(
- d.getVar("testimage_dump_host"), d.getVar("TESTIMAGE_DUMP_DIR"))
-
image_manifest = "%s.manifest" % image_name
image_packages = OERuntimeTestContextExecutor.readPackagesManifest(image_manifest)
extract_dir = d.getVar("TEST_EXTRACTED_DIR")
- tc = OERuntimeTestContext(td, logger, target, host_dumper,
- image_packages, extract_dir)
+ tc = OERuntimeTestContext(td, logger, target, image_packages, extract_dir)
copy_needed_files(d, tc)
@@ -178,5 +174,3 @@ def testexport_create_tarball(d, tar_name, src_dir):
tar.add(base_name)
tar.close()
os.chdir(current_dir)
-
-inherit testimage
diff --git a/meta/classes/testimage.bbclass b/meta/classes-recipe/testimage.bbclass
index 8ffaeab284..ed0d87b7a7 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes-recipe/testimage.bbclass
@@ -1,6 +1,6 @@
# Copyright (C) 2013 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
+# SPDX-License-Identifier: MIT
inherit metadata_scm
inherit image-artifact-names
@@ -18,6 +18,18 @@ inherit image-artifact-names
TESTIMAGE_AUTO ??= "0"
+# When any test fails, TESTIMAGE_FAILED_QA ARTIFACTS will be parsed and for
+# each entry in it, if artifact pointed by path description exists on target,
+# it will be retrieved onto host
+
+TESTIMAGE_FAILED_QA_ARTIFACTS = "\
+ ${localstatedir}/log \
+ ${sysconfdir}/version \
+ ${sysconfdir}/os-release"
+
+# If some ptests are run and fail, retrieve corresponding directories
+TESTIMAGE_FAILED_QA_ARTIFACTS += "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '${libdir}/${MCNAME}/ptest', '', d)}"
+
# You can set (or append to) TEST_SUITES in local.conf to select the tests
# which you want to run for your target.
# The test names are the module names in meta/lib/oeqa/runtime/cases.
@@ -98,34 +110,7 @@ TESTIMAGELOCK:qemuall = ""
TESTIMAGE_DUMP_DIR ?= "${LOG_DIR}/runtime-hostdump/"
-TESTIMAGE_UPDATE_VARS ?= "DL_DIR WORKDIR DEPLOY_DIR"
-
-testimage_dump_target () {
- top -bn1
- ps
- free
- df
- # The next command will export the default gateway IP
- export DEFAULT_GATEWAY=$(ip route | awk '/default/ { print $3}')
- ping -c3 $DEFAULT_GATEWAY
- dmesg
- netstat -an
- ip address
- # Next command will dump logs from /var/log/
- find /var/log/ -type f 2>/dev/null -exec echo "====================" \; -exec echo {} \; -exec echo "====================" \; -exec cat {} \; -exec echo "" \;
-}
-
-testimage_dump_host () {
- top -bn1
- iostat -x -z -N -d -p ALL 20 2
- ps -ef
- free
- df
- memstat
- dmesg
- ip -s link
- netstat -an
-}
+TESTIMAGE_UPDATE_VARS ?= "DL_DIR WORKDIR DEPLOY_DIR_IMAGE IMAGE_LINK_NAME"
testimage_dump_monitor () {
query-status
@@ -164,13 +149,6 @@ def get_testimage_configuration(d, test_type, machine):
return configuration
get_testimage_configuration[vardepsexclude] = "DATETIME"
-def get_testimage_json_result_dir(d):
- json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
- custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
- if custom_json_result_dir:
- json_result_dir = custom_json_result_dir
- return json_result_dir
-
def get_testimage_result_id(configuration):
return '%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['MACHINE'], configuration['STARTTIME'])
@@ -189,14 +167,9 @@ def get_testimage_boot_patterns(d):
search_login_succeeded,search_cmd_finished\n Make sure your TESTIMAGE_BOOT_PATTERNS=%s \
contains an accepted flag.' % d.getVar('TESTIMAGE_BOOT_PATTERNS'))
return
- # We know boot prompt is searched through in binary format, others might be expressions
- if flag == 'search_reached_prompt':
- boot_patterns[flag] = flagval.encode()
- else:
- boot_patterns[flag] = flagval.encode().decode('unicode-escape')
+ boot_patterns[flag] = flagval.encode().decode('unicode-escape')
return boot_patterns
-
def testimage_main(d):
import os
import json
@@ -210,6 +183,8 @@ def testimage_main(d):
from oeqa.core.target.qemu import supported_fstypes
from oeqa.core.utils.test import getSuiteCases
from oeqa.utils import make_logger_bitbake_compatible
+ from oeqa.utils import get_json_result_dir
+ from oeqa.utils.postactions import run_failed_tests_post_actions
def sigterm_exception(signum, stackframe):
"""
@@ -240,12 +215,13 @@ def testimage_main(d):
with open(tdname, "r") as f:
td = json.load(f)
except FileNotFoundError as err:
- bb.fatal('File %s not found (%s).\nHave you built the image with INHERIT += "testimage" in the conf/local.conf?' % (tdname, err))
+ bb.fatal('File %s not found (%s).\nHave you built the image with IMAGE_CLASSES += "testimage" in the conf/local.conf?' % (tdname, err))
# Some variables need to be updates (mostly paths) with the
# ones of the current environment because some tests require them.
for var in d.getVar('TESTIMAGE_UPDATE_VARS').split():
td[var] = d.getVar(var)
+ td['ORIGPATH'] = d.getVar("BB_ORIGENV").getVar("PATH")
image_manifest = "%s.manifest" % image_name
image_packages = OERuntimeTestContextExecutor.readPackagesManifest(image_manifest)
@@ -296,7 +272,7 @@ def testimage_main(d):
ovmf = d.getVar("QEMU_USE_OVMF")
slirp = False
- if d.getVar("QEMU_USE_SLIRP"):
+ if bb.utils.contains('TEST_RUNQEMUPARAMS', 'slirp', True, False, d):
slirp = True
# TODO: We use the current implementation of qemu runner because of
@@ -326,7 +302,6 @@ def testimage_main(d):
target_kwargs['serialcontrol_cmd'] = d.getVar("TEST_SERIALCONTROL_CMD") or None
target_kwargs['serialcontrol_extra_args'] = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS") or ""
target_kwargs['testimage_dump_monitor'] = d.getVar("testimage_dump_monitor") or ""
- target_kwargs['testimage_dump_target'] = d.getVar("testimage_dump_target") or ""
def export_ssh_agent(d):
import os
@@ -343,19 +318,24 @@ def testimage_main(d):
# runtime use network for download projects for build
export_proxies(d)
- # we need the host dumper in test context
- host_dumper = OERuntimeTestContextExecutor.getHostDumper(
- d.getVar("testimage_dump_host"),
- d.getVar("TESTIMAGE_DUMP_DIR"))
+ if slirp:
+ # Default to 127.0.0.1 and let the runner identify the port forwarding
+ # (as OEQemuTarget does), but allow overriding.
+ target_ip = d.getVar("TEST_TARGET_IP") or "127.0.0.1"
+ # Default to 10.0.2.2 as this is the IP that the guest has with the
+ # default qemu slirp networking configuration, but allow overriding.
+ server_ip = d.getVar("TEST_SERVER_IP") or "10.0.2.2"
+ else:
+ target_ip = d.getVar("TEST_TARGET_IP")
+ server_ip = d.getVar("TEST_SERVER_IP")
# the robot dance
target = OERuntimeTestContextExecutor.getTarget(
- d.getVar("TEST_TARGET"), logger, d.getVar("TEST_TARGET_IP"),
- d.getVar("TEST_SERVER_IP"), **target_kwargs)
+ d.getVar("TEST_TARGET"), logger, target_ip,
+ server_ip, **target_kwargs)
# test context
- tc = OERuntimeTestContext(td, logger, target, host_dumper,
- image_packages, extract_dir)
+ tc = OERuntimeTestContext(td, logger, target, image_packages, extract_dir)
# Load tests before starting the target
test_paths = get_runtime_paths(d)
@@ -387,6 +367,8 @@ def testimage_main(d):
pass
results = tc.runTests()
complete = True
+ if results.hasAnyFailingTest():
+ run_failed_tests_post_actions(d, tc)
except (KeyboardInterrupt, BlockingIOError) as err:
if isinstance(err, KeyboardInterrupt):
bb.error('testimage interrupted, shutting down...')
@@ -402,14 +384,14 @@ def testimage_main(d):
# Show results (if we have them)
if results:
configuration = get_testimage_configuration(d, 'runtime', machine)
- results.logDetails(get_testimage_json_result_dir(d),
+ results.logDetails(get_json_result_dir(d),
configuration,
get_testimage_result_id(configuration),
dump_streams=d.getVar('TESTREPORT_FULLLOGS'))
results.logSummary(pn)
# Copy additional logs to tmp/log/oeqa so it's easier to find them
- targetdir = os.path.join(get_testimage_json_result_dir(d), d.getVar("PN"))
+ targetdir = os.path.join(get_json_result_dir(d), d.getVar("PN"))
os.makedirs(targetdir, exist_ok=True)
os.symlink(bootlog, os.path.join(targetdir, os.path.basename(bootlog)))
os.symlink(d.getVar("BB_LOGFILE"), os.path.join(targetdir, os.path.basename(d.getVar("BB_LOGFILE") + "." + d.getVar('DATETIME'))))
@@ -472,7 +454,7 @@ def create_rpm_index(d):
package_list = glob.glob(idx_path + "*/*.rpm")
for pkg in package_list:
- if not os.path.basename(pkg).startswith(("rpm", "run-postinsts", "busybox", "bash", "update-alternatives", "libc6", "curl", "musl")):
+ if not os.path.basename(pkg).startswith(("dnf-test-", "busybox", "update-alternatives", "libc6", "musl")):
bb.utils.remove(pkg)
bb.utils.unlockfile(lf)
diff --git a/meta/classes/testsdk.bbclass b/meta/classes-recipe/testsdk.bbclass
index 8b2e74f606..fd82e6ef41 100644
--- a/meta/classes/testsdk.bbclass
+++ b/meta/classes-recipe/testsdk.bbclass
@@ -1,6 +1,6 @@
# Copyright (C) 2013 - 2016 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
+# SPDX-License-Identifier: MIT
# testsdk.bbclass enables testing for SDK and Extensible SDK
#
diff --git a/meta/classes/texinfo.bbclass b/meta/classes-recipe/texinfo.bbclass
index 68c9d4fb70..380247faf5 100644
--- a/meta/classes/texinfo.bbclass
+++ b/meta/classes-recipe/texinfo.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class is inherited by recipes whose upstream packages invoke the
# texinfo utilities at build-time. Native and cross recipes are made to use the
# dummy scripts provided by texinfo-dummy-native, for improved performance.
diff --git a/meta/classes/toolchain-scripts-base.bbclass b/meta/classes-recipe/toolchain-scripts-base.bbclass
index 2489b9dbeb..d24a986e02 100644
--- a/meta/classes/toolchain-scripts-base.bbclass
+++ b/meta/classes-recipe/toolchain-scripts-base.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
#This function create a version information file
toolchain_create_sdk_version () {
local versionfile=$1
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes-recipe/toolchain-scripts.bbclass
index 1d7c703748..6bfe0b6de0 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes-recipe/toolchain-scripts.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit toolchain-scripts-base siteinfo kernel-arch
# We want to be able to change the value of MULTIMACH_TARGET_SYS, because it
@@ -31,7 +37,7 @@ toolchain_create_sdk_env_script () {
echo '# http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html#AEN80' >> $script
echo '# http://xahlee.info/UnixResource_dir/_/ldpath.html' >> $script
echo '# Only disable this check if you are absolutely know what you are doing!' >> $script
- echo 'if [ ! -z "$LD_LIBRARY_PATH" ]; then' >> $script
+ echo 'if [ ! -z "${LD_LIBRARY_PATH:-}" ]; then' >> $script
echo " echo \"Your environment is misconfigured, you probably need to 'unset LD_LIBRARY_PATH'\"" >> $script
echo " echo \"but please check why this was set in the first place and that it's safe to unset.\"" >> $script
echo ' echo "The SDK will not operate correctly in most cases when LD_LIBRARY_PATH is set."' >> $script
@@ -47,7 +53,7 @@ toolchain_create_sdk_env_script () {
for i in ${CANADIANEXTRAOS}; do
EXTRAPATH="$EXTRAPATH:$sdkpathnative$bindir/${TARGET_ARCH}${TARGET_VENDOR}-$i"
done
- echo "export PATH=$sdkpathnative$bindir:$sdkpathnative$sbindir:$sdkpathnative$base_bindir:$sdkpathnative$base_sbindir:$sdkpathnative$bindir/../${HOST_SYS}/bin:$sdkpathnative$bindir/${TARGET_SYS}"$EXTRAPATH':$PATH' >> $script
+ echo "export PATH=$sdkpathnative$bindir:$sdkpathnative$sbindir:$sdkpathnative$base_bindir:$sdkpathnative$base_sbindir:$sdkpathnative$bindir/../${HOST_SYS}/bin:$sdkpathnative$bindir/${TARGET_SYS}"$EXTRAPATH':"$PATH"' >> $script
echo 'export PKG_CONFIG_SYSROOT_DIR=$SDKTARGETSYSROOT' >> $script
echo 'export PKG_CONFIG_PATH=$SDKTARGETSYSROOT'"$libdir"'/pkgconfig:$SDKTARGETSYSROOT'"$prefix"'/share/pkgconfig' >> $script
echo 'export CONFIG_SITE=${SDKPATH}/site-config-'"${multimach_target_sys}" >> $script
@@ -63,24 +69,46 @@ toolchain_create_sdk_env_script () {
toolchain_shared_env_script
}
-# This function creates an environment-setup-script in the TMPDIR which enables
+# This function creates an environment-setup-script in B which enables
# a OE-core IDE to integrate with the build tree
# Caller must ensure CONFIG_SITE is setup
toolchain_create_tree_env_script () {
- script=${TMPDIR}/environment-setup-${REAL_MULTIMACH_TARGET_SYS}
+ script=${B}/environment-setup-${REAL_MULTIMACH_TARGET_SYS}
rm -f $script
touch $script
+ echo 'standalone_sysroot_target="${STAGING_DIR}/${MACHINE}"' >> $script
+ echo 'standalone_sysroot_native="${STAGING_DIR}/${BUILD_ARCH}"' >> $script
echo 'orig=`pwd`; cd ${COREBASE}; . ./oe-init-build-env ${TOPDIR}; cd $orig' >> $script
- echo 'export PATH=${STAGING_DIR_NATIVE}/usr/bin:${STAGING_BINDIR_TOOLCHAIN}:$PATH' >> $script
- echo 'export PKG_CONFIG_SYSROOT_DIR=${PKG_CONFIG_SYSROOT_DIR}' >> $script
- echo 'export PKG_CONFIG_PATH=${PKG_CONFIG_PATH}' >> $script
+ echo 'export PATH=$standalone_sysroot_native/${bindir_native}:$standalone_sysroot_native/${bindir_native}/${TARGET_SYS}:$PATH' >> $script
+ echo 'export PKG_CONFIG_SYSROOT_DIR=$standalone_sysroot_target' >> $script
+ echo 'export PKG_CONFIG_PATH=$standalone_sysroot_target'"$libdir"'/pkgconfig:$standalone_sysroot_target'"$prefix"'/share/pkgconfig' >> $script
echo 'export CONFIG_SITE="${CONFIG_SITE}"' >> $script
- echo 'export SDKTARGETSYSROOT=${STAGING_DIR_TARGET}' >> $script
- echo 'export OECORE_NATIVE_SYSROOT="${STAGING_DIR_NATIVE}"' >> $script
- echo 'export OECORE_TARGET_SYSROOT="${STAGING_DIR_TARGET}"' >> $script
- echo 'export OECORE_ACLOCAL_OPTS="-I ${STAGING_DIR_NATIVE}/usr/share/aclocal"' >> $script
+ echo 'export SDKTARGETSYSROOT=$standalone_sysroot_target' >> $script
+ echo 'export OECORE_NATIVE_SYSROOT=$standalone_sysroot_native' >> $script
+ echo 'export OECORE_TARGET_SYSROOT=$standalone_sysroot_target' >> $script
+ echo 'export OECORE_ACLOCAL_OPTS="-I $standalone_sysroot_native/usr/share/aclocal"' >> $script
+ echo 'export OECORE_BASELIB="${baselib}"' >> $script
+ echo 'export OECORE_TARGET_ARCH="${TARGET_ARCH}"' >>$script
+ echo 'export OECORE_TARGET_OS="${TARGET_OS}"' >>$script
toolchain_shared_env_script
+
+ cat >> $script <<EOF
+
+if [ -d "\$OECORE_NATIVE_SYSROOT/${datadir}/post-relocate-setup.d/" ]; then
+ for s in \$OECORE_NATIVE_SYSROOT/${datadir}/post-relocate-setup.d/*; do
+ if [ ! -x \$s ]; then
+ continue
+ fi
+ \$s "\$1"
+ status=\$?
+ if [ \$status != 0 ]; then
+ echo "post-relocate command \"\$s \$1\" failed with status \$status" >&2
+ exit \$status
+ fi
+ done
+fi
+EOF
}
toolchain_shared_env_script () {
diff --git a/meta/classes/uboot-config.bbclass b/meta/classes-recipe/uboot-config.bbclass
index b9ad35821a..e55fc38b7c 100644
--- a/meta/classes/uboot-config.bbclass
+++ b/meta/classes-recipe/uboot-config.bbclass
@@ -10,12 +10,18 @@
# UBOOT_MACHINE = "config"
#
# Copyright 2013, 2014 (C) O.S. Systems Software LTDA.
+#
+# SPDX-License-Identifier: MIT
+
def removesuffix(s, suffix):
if suffix and s.endswith(suffix):
return s[:-len(suffix)]
return s
+UBOOT_ENTRYPOINT ?= "20008000"
+UBOOT_LOADADDRESS ?= "${UBOOT_ENTRYPOINT}"
+
# Some versions of u-boot use .bin and others use .img. By default use .bin
# but enable individual recipes to change this value.
UBOOT_SUFFIX ??= "bin"
@@ -59,10 +65,6 @@ UBOOT_ENV_BINARY ?= "${UBOOT_ENV}.${UBOOT_ENV_SUFFIX}"
UBOOT_ENV_IMAGE ?= "${UBOOT_ENV}-${MACHINE}-${PV}-${PR}.${UBOOT_ENV_SUFFIX}"
UBOOT_ENV_SYMLINK ?= "${UBOOT_ENV}-${MACHINE}.${UBOOT_ENV_SUFFIX}"
-# Default name of u-boot initial env, but enable individual recipes to change
-# this value.
-UBOOT_INITIAL_ENV ?= "${PN}-initial-env"
-
# U-Boot EXTLINUX variables. U-Boot searches for /boot/extlinux/extlinux.conf
# to find EXTLINUX conf file.
UBOOT_EXTLINUX_INSTALL_DIR ?= "/boot/extlinux"
@@ -77,6 +79,9 @@ SPL_MKIMAGE_DTCOPTS ??= ""
UBOOT_MKIMAGE ?= "uboot-mkimage"
UBOOT_MKIMAGE_SIGN ?= "${UBOOT_MKIMAGE}"
+# Signature activation - this requires KERNEL_IMAGETYPE = "fitImage"
+UBOOT_SIGN_ENABLE ?= "0"
+
# Arguments passed to mkimage for signing
UBOOT_MKIMAGE_SIGN_ARGS ?= ""
SPL_MKIMAGE_SIGN_ARGS ?= ""
@@ -85,6 +90,9 @@ SPL_MKIMAGE_SIGN_ARGS ?= ""
UBOOT_DTB ?= ""
UBOOT_DTB_BINARY ??= ""
+# uboot-fit_check_sign command
+UBOOT_FIT_CHECK_SIGN ?= "uboot-fit_check_sign"
+
python () {
ubootmachine = d.getVar("UBOOT_MACHINE")
ubootconfigflags = d.getVarFlags('UBOOT_CONFIG')
@@ -109,8 +117,10 @@ python () {
if len(ubootconfig) > 0:
for config in ubootconfig:
+ found = False
for f, v in ubootconfigflags.items():
if config == f:
+ found = True
items = v.split(',')
if items[0] and len(items) > 3:
raise bb.parse.SkipRecipe('Only config,images,binary can be specified!')
@@ -126,4 +136,13 @@ python () {
bb.debug(1, "Appending '%s' to UBOOT_BINARIES." % ubootbinary)
d.appendVar('UBOOT_BINARIES', ' ' + ubootbinary)
break
+
+ if not found:
+ raise bb.parse.SkipRecipe("The selected UBOOT_CONFIG key %s has no match in %s." % (ubootconfig, ubootconfigflags.keys()))
+
+ if len(ubootconfig) == 1:
+ d.setVar('KCONFIG_CONFIG_ROOTDIR', os.path.join(d.getVar("B"), d.getVar("UBOOT_MACHINE").strip()))
+ else:
+ # Disable menuconfig for multiple configs
+ d.setVar('KCONFIG_CONFIG_ENABLE_MENUCONFIG', "false")
}
diff --git a/meta/classes/uboot-extlinux-config.bbclass b/meta/classes-recipe/uboot-extlinux-config.bbclass
index dcebe7ff31..0413e760bd 100644
--- a/meta/classes/uboot-extlinux-config.bbclass
+++ b/meta/classes-recipe/uboot-extlinux-config.bbclass
@@ -6,6 +6,8 @@
#
# External variables:
#
+# UBOOT_EXTLINUX - Set to "1" to enable generation
+# of extlinux.conf using this class.
# UBOOT_EXTLINUX_CONSOLE - Set to "console=ttyX" to change kernel boot
# default console.
# UBOOT_EXTLINUX_LABELS - A list of targets for the automatic config.
@@ -20,7 +22,10 @@
# UBOOT_EXTLINUX_TIMEOUT - Timeout before DEFAULT selection is made.
# Measured in 1/10 of a second.
# UBOOT_EXTLINUX_DEFAULT_LABEL - Target to be selected by default after
-# the timeout period
+# the timeout period.
+# UBOOT_EXTLINUX_MENU_TITLE - Menu title. If empty, MENU TITLE entry
+# will not be added to the output file.
+# UBOOT_EXTLINUX_CONFIG - Output file.
#
# If there's only one label system will boot automatically and menu won't be
# created. If you want to use more than one labels, e.g linux and alternate,
@@ -33,11 +38,11 @@
# UBOOT_EXTLINUX_DEFAULT_LABEL ??= "Linux Default"
# UBOOT_EXTLINUX_TIMEOUT ??= "30"
#
-# UBOOT_EXTLINUX_KERNEL_IMAGE_default ??= "../zImage"
-# UBOOT_EXTLINUX_MENU_DESCRIPTION_default ??= "Linux Default"
+# UBOOT_EXTLINUX_KERNEL_IMAGE:default ??= "../zImage"
+# UBOOT_EXTLINUX_MENU_DESCRIPTION:default ??= "Linux Default"
#
-# UBOOT_EXTLINUX_KERNEL_IMAGE_fallback ??= "../zImage-fallback"
-# UBOOT_EXTLINUX_MENU_DESCRIPTION_fallback ??= "Linux Fallback"
+# UBOOT_EXTLINUX_KERNEL_IMAGE:fallback ??= "../zImage-fallback"
+# UBOOT_EXTLINUX_MENU_DESCRIPTION:fallback ??= "Linux Fallback"
#
# Results:
#
@@ -54,7 +59,7 @@
# APPEND root=/dev/mmcblk2p2 rootwait rw console=${console}
#
# Copyright (C) 2016, O.S. Systems Software LTDA. All Rights Reserved
-# Released under the MIT license (see packages/COPYING)
+# SPDX-License-Identifier: MIT
#
# The kernel has an internal default console, which you can override with
# a console=...some_tty...
@@ -65,6 +70,7 @@ UBOOT_EXTLINUX_FDTDIR ??= "../"
UBOOT_EXTLINUX_KERNEL_IMAGE ??= "../${KERNEL_IMAGETYPE}"
UBOOT_EXTLINUX_KERNEL_ARGS ??= "rootwait rw"
UBOOT_EXTLINUX_MENU_DESCRIPTION:linux ??= "${DISTRO_NAME}"
+UBOOT_EXTLINUX_MENU_TITLE ??= "Select the boot mode"
UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf"
@@ -92,10 +98,11 @@ python do_create_extlinux_config() {
with open(cfile, 'w') as cfgfile:
cfgfile.write('# Generic Distro Configuration file generated by OpenEmbedded\n')
- if len(labels.split()) > 1:
- cfgfile.write('menu title Select the boot mode\n')
+ menu_title = localdata.getVar('UBOOT_EXTLINUX_MENU_TITLE')
+ if len(labels.split()) > 1 and menu_title:
+ cfgfile.write('MENU TITLE %s\n' % (menu_title))
- timeout = localdata.getVar('UBOOT_EXTLINUX_TIMEOUT')
+ timeout = localdata.getVar('UBOOT_EXTLINUX_TIMEOUT')
if timeout:
cfgfile.write('TIMEOUT %s\n' % (timeout))
@@ -152,7 +159,7 @@ python do_create_extlinux_config() {
bb.fatal('Unable to open %s' % (cfile))
}
UBOOT_EXTLINUX_VARS = "CONSOLE MENU_DESCRIPTION ROOT KERNEL_IMAGE FDTDIR FDT KERNEL_ARGS INITRD"
-do_create_extlinux_config[vardeps] += "${@' '.join(['UBOOT_EXTLINUX_%s_%s' % (v, l) for v in d.getVar('UBOOT_EXTLINUX_VARS').split() for l in d.getVar('UBOOT_EXTLINUX_LABELS').split()])}"
+do_create_extlinux_config[vardeps] += "${@' '.join(['UBOOT_EXTLINUX_%s:%s' % (v, l) for v in d.getVar('UBOOT_EXTLINUX_VARS').split() for l in d.getVar('UBOOT_EXTLINUX_LABELS').split()])}"
do_create_extlinux_config[vardepsexclude] += "OVERRIDES"
addtask create_extlinux_config before do_install do_deploy after do_compile
diff --git a/meta/classes-recipe/uboot-sign.bbclass b/meta/classes-recipe/uboot-sign.bbclass
new file mode 100644
index 0000000000..7a0b8047e4
--- /dev/null
+++ b/meta/classes-recipe/uboot-sign.bbclass
@@ -0,0 +1,466 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# This file is part of U-Boot verified boot support and is intended to be
+# inherited from the u-boot recipe.
+#
+# The signature procedure requires the user to generate an RSA key and
+# certificate in a directory and to define the following variable:
+#
+# UBOOT_SIGN_KEYDIR = "/keys/directory"
+# UBOOT_SIGN_KEYNAME = "dev" # keys name in keydir (eg. "dev.crt", "dev.key")
+# UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+# UBOOT_SIGN_ENABLE = "1"
+#
+# As verified boot depends on fitImage generation, following is also required:
+#
+# KERNEL_CLASSES ?= " kernel-fitimage "
+# KERNEL_IMAGETYPE ?= "fitImage"
+#
+# The signature support is limited to the use of CONFIG_OF_SEPARATE in U-Boot.
+#
+# For more details on signature process, please refer to U-Boot documentation.
+
+# We need some variables from u-boot-config
+inherit uboot-config
+
+# Enable use of a U-Boot fitImage
+UBOOT_FITIMAGE_ENABLE ?= "0"
+
+# Signature activation - this requires UBOOT_FITIMAGE_ENABLE = "1"
+SPL_SIGN_ENABLE ?= "0"
+
+# Default value for deployment filenames.
+UBOOT_DTB_IMAGE ?= "u-boot-${MACHINE}-${PV}-${PR}.dtb"
+UBOOT_DTB_BINARY ?= "u-boot.dtb"
+UBOOT_DTB_SIGNED ?= "${UBOOT_DTB_BINARY}-signed"
+UBOOT_DTB_SYMLINK ?= "u-boot-${MACHINE}.dtb"
+UBOOT_NODTB_IMAGE ?= "u-boot-nodtb-${MACHINE}-${PV}-${PR}.bin"
+UBOOT_NODTB_BINARY ?= "u-boot-nodtb.bin"
+UBOOT_NODTB_SYMLINK ?= "u-boot-nodtb-${MACHINE}.bin"
+UBOOT_ITS_IMAGE ?= "u-boot-its-${MACHINE}-${PV}-${PR}"
+UBOOT_ITS ?= "u-boot.its"
+UBOOT_ITS_SYMLINK ?= "u-boot-its-${MACHINE}"
+UBOOT_FITIMAGE_IMAGE ?= "u-boot-fitImage-${MACHINE}-${PV}-${PR}"
+UBOOT_FITIMAGE_BINARY ?= "u-boot-fitImage"
+UBOOT_FITIMAGE_SYMLINK ?= "u-boot-fitImage-${MACHINE}"
+SPL_DIR ?= "spl"
+SPL_DTB_IMAGE ?= "u-boot-spl-${MACHINE}-${PV}-${PR}.dtb"
+SPL_DTB_BINARY ?= "u-boot-spl.dtb"
+SPL_DTB_SIGNED ?= "${SPL_DTB_BINARY}-signed"
+SPL_DTB_SYMLINK ?= "u-boot-spl-${MACHINE}.dtb"
+SPL_NODTB_IMAGE ?= "u-boot-spl-nodtb-${MACHINE}-${PV}-${PR}.bin"
+SPL_NODTB_BINARY ?= "u-boot-spl-nodtb.bin"
+SPL_NODTB_SYMLINK ?= "u-boot-spl-nodtb-${MACHINE}.bin"
+
+# U-Boot fitImage description
+UBOOT_FIT_DESC ?= "U-Boot fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
+
+# U-Boot fitImage Hash Algo
+UBOOT_FIT_HASH_ALG ?= "sha256"
+
+# U-Boot fitImage Signature Algo
+UBOOT_FIT_SIGN_ALG ?= "rsa2048"
+
+# Generate keys for signing U-Boot fitImage
+UBOOT_FIT_GENERATE_KEYS ?= "0"
+
+# Size of private keys in number of bits
+UBOOT_FIT_SIGN_NUMBITS ?= "2048"
+
+# args to openssl genrsa (Default is just the public exponent)
+UBOOT_FIT_KEY_GENRSA_ARGS ?= "-F4"
+
+# args to openssl req (Default is -batch for non interactive mode and
+# -new for new certificate)
+UBOOT_FIT_KEY_REQ_ARGS ?= "-batch -new"
+
+# Standard format for public key certificate
+UBOOT_FIT_KEY_SIGN_PKCS ?= "-x509"
+
+# length of address in number of <u32> cells
+# ex: 1 32bits address, 2 64bits address
+UBOOT_FIT_ADDRESS_CELLS ?= "1"
+
+# This is only necessary for determining the signing configuration
+KERNEL_PN = "${PREFERRED_PROVIDER_virtual/kernel}"
+
+UBOOT_FIT_UBOOT_LOADADDRESS ?= "${UBOOT_LOADADDRESS}"
+UBOOT_FIT_UBOOT_ENTRYPOINT ?= "${UBOOT_ENTRYPOINT}"
+
+python() {
+ # We need u-boot-tools-native if we're creating a U-Boot fitImage
+ sign = d.getVar('UBOOT_SIGN_ENABLE') == '1'
+ if d.getVar('UBOOT_FITIMAGE_ENABLE') == '1' or sign:
+ d.appendVar('DEPENDS', " u-boot-tools-native dtc-native")
+ if sign:
+ d.appendVar('DEPENDS', " " + d.getVar('KERNEL_PN'))
+}
+
+concat_dtb() {
+ type="$1"
+ binary="$2"
+
+ if [ -e "${UBOOT_DTB_BINARY}" ]; then
+ # Re-sign the kernel in order to add the keys to our dtb
+ ${UBOOT_MKIMAGE_SIGN} \
+ ${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
+ -F -k "${UBOOT_SIGN_KEYDIR}" \
+ -K "${UBOOT_DTB_BINARY}" \
+ -r ${B}/fitImage-linux \
+ ${UBOOT_MKIMAGE_SIGN_ARGS}
+ # Verify the kernel image and u-boot dtb
+ ${UBOOT_FIT_CHECK_SIGN} \
+ -k "${UBOOT_DTB_BINARY}" \
+ -f ${B}/fitImage-linux
+ cp ${UBOOT_DTB_BINARY} ${UBOOT_DTB_SIGNED}
+ fi
+
+ # If we're not using a signed u-boot fit, concatenate SPL w/o DTB & U-Boot DTB
+ # with public key (otherwise U-Boot will be packaged by uboot_fitimage_assemble)
+ if [ "${SPL_SIGN_ENABLE}" != "1" ] ; then
+ if [ "x${UBOOT_SUFFIX}" = "ximg" -o "x${UBOOT_SUFFIX}" = "xrom" ] && \
+ [ -e "${UBOOT_DTB_BINARY}" ]; then
+ oe_runmake EXT_DTB="${UBOOT_DTB_SIGNED}" ${UBOOT_MAKE_TARGET}
+ if [ -n "${binary}" ]; then
+ cp ${binary} ${UBOOT_BINARYNAME}-${type}.${UBOOT_SUFFIX}
+ fi
+ elif [ -e "${UBOOT_NODTB_BINARY}" -a -e "${UBOOT_DTB_BINARY}" ]; then
+ if [ -n "${binary}" ]; then
+ cat ${UBOOT_NODTB_BINARY} ${UBOOT_DTB_SIGNED} | tee ${binary} > \
+ ${UBOOT_BINARYNAME}-${type}.${UBOOT_SUFFIX}
+ else
+ cat ${UBOOT_NODTB_BINARY} ${UBOOT_DTB_SIGNED} > ${UBOOT_BINARY}
+ fi
+ else
+ bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available."
+ fi
+ fi
+}
+
+deploy_dtb() {
+ type="$1"
+
+ if [ -n "${type}" ]; then
+ uboot_dtb_binary="u-boot-${type}-${PV}-${PR}.dtb"
+ uboot_nodtb_binary="u-boot-nodtb-${type}-${PV}-${PR}.bin"
+ else
+ uboot_dtb_binary="${UBOOT_DTB_IMAGE}"
+ uboot_nodtb_binary="${UBOOT_NODTB_IMAGE}"
+ fi
+
+ if [ -e "${UBOOT_DTB_SIGNED}" ]; then
+ install -Dm644 ${UBOOT_DTB_SIGNED} ${DEPLOYDIR}/${uboot_dtb_binary}
+ if [ -n "${type}" ]; then
+ ln -sf ${uboot_dtb_binary} ${DEPLOYDIR}/${UBOOT_DTB_IMAGE}
+ fi
+ fi
+
+ if [ -f "${UBOOT_NODTB_BINARY}" ]; then
+ install -Dm644 ${UBOOT_NODTB_BINARY} ${DEPLOYDIR}/${uboot_nodtb_binary}
+ if [ -n "${type}" ]; then
+ ln -sf ${uboot_nodtb_binary} ${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}
+ fi
+ fi
+}
+
+concat_spl_dtb() {
+ if [ -e "${SPL_DIR}/${SPL_NODTB_BINARY}" -a -e "${SPL_DIR}/${SPL_DTB_BINARY}" ] ; then
+ cat ${SPL_DIR}/${SPL_NODTB_BINARY} ${SPL_DIR}/${SPL_DTB_SIGNED} > "${SPL_BINARY}"
+ else
+ bbwarn "Failure while adding public key to spl binary. Verified U-Boot boot won't be available."
+ fi
+}
+
+deploy_spl_dtb() {
+ type="$1"
+
+ if [ -n "${type}" ]; then
+ spl_dtb_binary="u-boot-spl-${type}-${PV}-${PR}.dtb"
+ spl_nodtb_binary="u-boot-spl-nodtb-${type}-${PV}-${PR}.bin"
+ else
+ spl_dtb_binary="${SPL_DTB_IMAGE}"
+ spl_nodtb_binary="${SPL_NODTB_IMAGE}"
+ fi
+
+ if [ -e "${SPL_DIR}/${SPL_DTB_SIGNED}" ] ; then
+ install -Dm644 ${SPL_DIR}/${SPL_DTB_SIGNED} ${DEPLOYDIR}/${spl_dtb_binary}
+ if [ -n "${type}" ]; then
+ ln -sf ${spl_dtb_binary} ${DEPLOYDIR}/${SPL_DTB_IMAGE}
+ fi
+ fi
+
+ if [ -f "${SPL_DIR}/${SPL_NODTB_BINARY}" ] ; then
+ install -Dm644 ${SPL_DIR}/${SPL_NODTB_BINARY} ${DEPLOYDIR}/${spl_nodtb_binary}
+ if [ -n "${type}" ]; then
+ ln -sf ${spl_nodtb_binary} ${DEPLOYDIR}/${SPL_NODTB_IMAGE}
+ fi
+ fi
+
+ # For backwards compatibility...
+ install -Dm644 ${SPL_BINARY} ${DEPLOYDIR}/${SPL_IMAGE}
+}
+
+do_uboot_generate_rsa_keys() {
+ if [ "${SPL_SIGN_ENABLE}" = "0" ] && [ "${UBOOT_FIT_GENERATE_KEYS}" = "1" ]; then
+ bbwarn "UBOOT_FIT_GENERATE_KEYS is set to 1 eventhough SPL_SIGN_ENABLE is set to 0. The keys will not be generated as they won't be used."
+ fi
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" ] && [ "${UBOOT_FIT_GENERATE_KEYS}" = "1" ]; then
+
+ # Generate keys only if they don't already exist
+ if [ ! -f "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key ] || \
+ [ ! -f "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".crt ]; then
+
+ # make directory if it does not already exist
+ mkdir -p "${SPL_SIGN_KEYDIR}"
+
+ echo "Generating RSA private key for signing U-Boot fitImage"
+ openssl genrsa ${UBOOT_FIT_KEY_GENRSA_ARGS} -out \
+ "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key \
+ "${UBOOT_FIT_SIGN_NUMBITS}"
+
+ echo "Generating certificate for signing U-Boot fitImage"
+ openssl req ${UBOOT_FIT_KEY_REQ_ARGS} "${UBOOT_FIT_KEY_SIGN_PKCS}" \
+ -key "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key \
+ -out "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".crt
+ fi
+ fi
+
+}
+
+addtask uboot_generate_rsa_keys before do_uboot_assemble_fitimage after do_compile
+
+# Create a ITS file for the U-boot FIT, for use when
+# we want to sign it so that the SPL can verify it
+uboot_fitimage_assemble() {
+ rm -f ${UBOOT_ITS} ${UBOOT_FITIMAGE_BINARY}
+
+ # First we create the ITS script
+ cat << EOF >> ${UBOOT_ITS}
+/dts-v1/;
+
+/ {
+ description = "${UBOOT_FIT_DESC}";
+ #address-cells = <${UBOOT_FIT_ADDRESS_CELLS}>;
+
+ images {
+ uboot {
+ description = "U-Boot image";
+ data = /incbin/("${UBOOT_NODTB_BINARY}");
+ type = "standalone";
+ os = "u-boot";
+ arch = "${UBOOT_ARCH}";
+ compression = "none";
+ load = <${UBOOT_FIT_UBOOT_LOADADDRESS}>;
+ entry = <${UBOOT_FIT_UBOOT_ENTRYPOINT}>;
+EOF
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
+ cat << EOF >> ${UBOOT_ITS}
+ signature {
+ algo = "${UBOOT_FIT_HASH_ALG},${UBOOT_FIT_SIGN_ALG}";
+ key-name-hint = "${SPL_SIGN_KEYNAME}";
+ };
+EOF
+ fi
+
+ cat << EOF >> ${UBOOT_ITS}
+ };
+ fdt {
+ description = "U-Boot FDT";
+ data = /incbin/("${UBOOT_DTB_BINARY}");
+ type = "flat_dt";
+ arch = "${UBOOT_ARCH}";
+ compression = "none";
+EOF
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
+ cat << EOF >> ${UBOOT_ITS}
+ signature {
+ algo = "${UBOOT_FIT_HASH_ALG},${UBOOT_FIT_SIGN_ALG}";
+ key-name-hint = "${SPL_SIGN_KEYNAME}";
+ };
+EOF
+ fi
+
+ cat << EOF >> ${UBOOT_ITS}
+ };
+ };
+
+ configurations {
+ default = "conf";
+ conf {
+ description = "Boot with signed U-Boot FIT";
+ loadables = "uboot";
+ fdt = "fdt";
+ };
+ };
+};
+EOF
+
+ #
+ # Assemble the U-boot FIT image
+ #
+ ${UBOOT_MKIMAGE} \
+ ${@'-D "${SPL_MKIMAGE_DTCOPTS}"' if len('${SPL_MKIMAGE_DTCOPTS}') else ''} \
+ -f ${UBOOT_ITS} \
+ ${UBOOT_FITIMAGE_BINARY}
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
+ #
+ # Sign the U-boot FIT image and add public key to SPL dtb
+ #
+ ${UBOOT_MKIMAGE_SIGN} \
+ ${@'-D "${SPL_MKIMAGE_DTCOPTS}"' if len('${SPL_MKIMAGE_DTCOPTS}') else ''} \
+ -F -k "${SPL_SIGN_KEYDIR}" \
+ -K "${SPL_DIR}/${SPL_DTB_BINARY}" \
+ -r ${UBOOT_FITIMAGE_BINARY} \
+ ${SPL_MKIMAGE_SIGN_ARGS}
+ #
+ # Verify the U-boot FIT image and SPL dtb
+ #
+ ${UBOOT_FIT_CHECK_SIGN} \
+ -k "${SPL_DIR}/${SPL_DTB_BINARY}" \
+ -f ${UBOOT_FITIMAGE_BINARY}
+ fi
+
+ if [ -e "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then
+ cp ${SPL_DIR}/${SPL_DTB_BINARY} ${SPL_DIR}/${SPL_DTB_SIGNED}
+ fi
+}
+
+uboot_assemble_fitimage_helper() {
+ type="$1"
+ binary="$2"
+
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then
+ concat_dtb $type $binary
+ fi
+
+ if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ]; then
+ uboot_fitimage_assemble
+ fi
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ] ; then
+ concat_spl_dtb
+ fi
+}
+
+do_uboot_assemble_fitimage() {
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" ] ; then
+ cp "${STAGING_DIR_HOST}/sysroot-only/fitImage" "${B}/fitImage-linux"
+ fi
+
+ if [ -n "${UBOOT_CONFIG}" ]; then
+ unset i j k
+ for config in ${UBOOT_MACHINE}; do
+ i=$(expr $i + 1);
+ for type in ${UBOOT_CONFIG}; do
+ j=$(expr $j + 1);
+ if [ $j -eq $i ]; then
+ break;
+ fi
+ done
+
+ for binary in ${UBOOT_BINARIES}; do
+ k=$(expr $j + 1);
+ if [ $k -eq $i ]; then
+ break;
+ fi
+ done
+
+ cd ${B}/${config}
+ uboot_assemble_fitimage_helper ${type} ${binary}
+ done
+ else
+ cd ${B}
+ uboot_assemble_fitimage_helper "" ${UBOOT_BINARY}
+ fi
+}
+
+addtask uboot_assemble_fitimage before do_install do_deploy after do_compile
+
+deploy_helper() {
+ type="$1"
+
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_SIGNED}" ] ; then
+ deploy_dtb $type
+ fi
+
+ if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ]; then
+ if [ -n "${type}" ]; then
+ uboot_its_image="u-boot-its-${type}-${PV}-${PR}"
+ uboot_fitimage_image="u-boot-fitImage-${type}-${PV}-${PR}"
+ else
+ uboot_its_image="${UBOOT_ITS_IMAGE}"
+ uboot_fitimage_image="${UBOOT_FITIMAGE_IMAGE}"
+ fi
+
+ install -Dm644 ${UBOOT_FITIMAGE_BINARY} ${DEPLOYDIR}/$uboot_fitimage_image
+ install -Dm644 ${UBOOT_ITS} ${DEPLOYDIR}/$uboot_its_image
+
+ if [ -n "${type}" ]; then
+ ln -sf $uboot_its_image ${DEPLOYDIR}/${UBOOT_ITS_IMAGE}
+ ln -sf $uboot_fitimage_image ${DEPLOYDIR}/${UBOOT_FITIMAGE_IMAGE}
+ fi
+ fi
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" -a -n "${SPL_DTB_SIGNED}" ] ; then
+ deploy_spl_dtb $type
+ fi
+}
+
+do_deploy:prepend() {
+ if [ -n "${UBOOT_CONFIG}" ]; then
+ unset i j k
+ for config in ${UBOOT_MACHINE}; do
+ i=$(expr $i + 1);
+ for type in ${UBOOT_CONFIG}; do
+ j=$(expr $j + 1);
+ if [ $j -eq $i ]; then
+ cd ${B}/${config}
+ deploy_helper ${type}
+ fi
+ done
+ unset j
+ done
+ unset i
+ else
+ cd ${B}
+ deploy_helper ""
+ fi
+
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then
+ ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_BINARY}
+ ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_SYMLINK}
+ ln -sf ${UBOOT_NODTB_IMAGE} ${DEPLOYDIR}/${UBOOT_NODTB_SYMLINK}
+ ln -sf ${UBOOT_NODTB_IMAGE} ${DEPLOYDIR}/${UBOOT_NODTB_BINARY}
+ fi
+
+ if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then
+ ln -sf ${UBOOT_ITS_IMAGE} ${DEPLOYDIR}/${UBOOT_ITS}
+ ln -sf ${UBOOT_ITS_IMAGE} ${DEPLOYDIR}/${UBOOT_ITS_SYMLINK}
+ ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_FITIMAGE_BINARY}
+ ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_FITIMAGE_SYMLINK}
+ fi
+
+ if [ "${SPL_SIGN_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ] ; then
+ ln -sf ${SPL_DTB_IMAGE} ${DEPLOYDIR}/${SPL_DTB_SYMLINK}
+ ln -sf ${SPL_DTB_IMAGE} ${DEPLOYDIR}/${SPL_DTB_BINARY}
+ ln -sf ${SPL_NODTB_IMAGE} ${DEPLOYDIR}/${SPL_NODTB_SYMLINK}
+ ln -sf ${SPL_NODTB_IMAGE} ${DEPLOYDIR}/${SPL_NODTB_BINARY}
+ fi
+}
+
+do_deploy:append() {
+ # If we're creating a u-boot fitImage, point u-boot.bin
+ # symlink since it might get used by image recipes
+ if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then
+ ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_BINARY}
+ ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_SYMLINK}
+ fi
+}
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes-recipe/update-alternatives.bbclass
index fc1ffd828c..b153e1b297 100644
--- a/meta/classes/update-alternatives.bbclass
+++ b/meta/classes-recipe/update-alternatives.bbclass
@@ -1,5 +1,11 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class is used to help the alternatives system which is useful when
-# multiple sources provide same command. You can use update-alternatives
+# multiple sources provide the same command. You can use update-alternatives
# command directly in your recipe, but in most cases this class simplifies
# that job.
#
@@ -29,7 +35,7 @@
# A non-default link to create for a target
# ALTERNATIVE_TARGET[name] = "target"
#
-# This is the name of the binary as it's been install by do_install
+# This is the name of the binary as it's been installed by do_install
# i.e. ALTERNATIVE_TARGET[sh] = "/bin/bash"
#
# A package specific link for a target
@@ -62,7 +68,7 @@ ALTERNATIVE_PRIORITY = "10"
# We need special processing for vardeps because it can not work on
# modified flag values. So we aggregate the flags into a new variable
-# and include that vairable in the set.
+# and include that variable in the set.
UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY"
PACKAGE_WRITE_DEPS += "virtual/update-alternatives-native"
@@ -80,10 +86,10 @@ def gen_updatealternativesvardeps(d):
for p in pkgs:
for v in vars:
- for flag in sorted((d.getVarFlags("%s_%s" % (v,p)) or {}).keys()):
+ for flag in sorted((d.getVarFlags("%s:%s" % (v,p)) or {}).keys()):
if flag == "doc" or flag == "vardeps" or flag == "vardepsexp":
continue
- d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False)))
+ d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s:%s' % (v,p), flag, False)))
def ua_extend_depends(d):
if not 'virtual/update-alternatives' in d.getVar('PROVIDES'):
@@ -259,7 +265,7 @@ def update_alternatives_alt_targets(d, pkg):
return updates
-PACKAGESPLITFUNCS:prepend = "populate_packages_updatealternatives "
+PACKAGESPLITFUNCS =+ "populate_packages_updatealternatives"
python populate_packages_updatealternatives () {
if not update_alternatives_enabled(d):
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes-recipe/update-rc.d.bbclass
index 0a3a608662..a19e704741 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes-recipe/update-rc.d.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
UPDATERCPN ?= "${PN}"
DEPENDS:append:class-target = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', ' update-rc.d initscripts', '', d)}"
@@ -62,8 +68,8 @@ python __anonymous() {
update_rc_after_parse(d)
}
-PACKAGESPLITFUNCS:prepend = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'populate_packages_updatercd ', '', d)}"
-PACKAGESPLITFUNCS:remove:class-nativesdk = "populate_packages_updatercd "
+PACKAGESPLITFUNCS =+ "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'populate_packages_updatercd', '', d)}"
+PACKAGESPLITFUNCS:remove:class-nativesdk = "populate_packages_updatercd"
populate_packages_updatercd[vardeps] += "updatercd_prerm updatercd_postrm updatercd_postinst"
populate_packages_updatercd[vardepsexclude] += "OVERRIDES"
diff --git a/meta/classes/upstream-version-is-even.bbclass b/meta/classes-recipe/upstream-version-is-even.bbclass
index 256c752423..19587cb12c 100644
--- a/meta/classes/upstream-version-is-even.bbclass
+++ b/meta/classes-recipe/upstream-version-is-even.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class ensures that the upstream version check only
# accepts even minor versions (i.e. 3.0.x, 3.2.x, 3.4.x, etc.)
# This scheme is used by Gnome and a number of other projects
diff --git a/meta/classes/vala.bbclass b/meta/classes-recipe/vala.bbclass
index bfcceff7cf..460ddb36f0 100644
--- a/meta/classes/vala.bbclass
+++ b/meta/classes-recipe/vala.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Everyone needs vala-native and targets need vala, too,
# because that is where target builds look for .vapi files.
#
diff --git a/meta/classes/waf.bbclass b/meta/classes-recipe/waf.bbclass
index 464564afa1..01707c8e2c 100644
--- a/meta/classes/waf.bbclass
+++ b/meta/classes-recipe/waf.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# avoids build breaks when using no-static-libs.inc
DISABLE_STATIC = ""
@@ -48,8 +54,21 @@ python waf_preconfigure() {
wafbin = os.path.join(subsrcdir, 'waf')
try:
result = subprocess.check_output([python, wafbin, '--version'], cwd=subsrcdir, stderr=subprocess.STDOUT)
- version = result.decode('utf-8').split()[1]
- if bb.utils.vercmp_string_op(version, "1.8.7", ">="):
+ # Output looks like:
+ # # output from lower modules (e.g. warnings, ...)
+ # waf X.Y.Z ...
+ # So, look for the line starting with "waf "
+ version = None
+ for line in result.decode('utf-8').split("\n"):
+ if line.startswith("waf "):
+ version = line.split()[1]
+ break
+
+ if not version or not bb.utils.is_semver(version):
+ bb.warn("Unable to parse \"waf --version\" output. Assuming waf version without bindir/libdir support.")
+ bb.warn("waf·--version·output = \n%s" % result.decode('utf-8'))
+ elif bb.utils.vercmp_string_op(version, "1.8.7", ">="):
+ bb.note("waf version is high enough to add --bindir and --libdir")
d.setVar("WAF_EXTRA_CONF", "--bindir=${bindir} --libdir=${libdir}")
except subprocess.CalledProcessError as e:
bb.warn("Unable to execute waf --version, exit code %d. Assuming waf version without bindir/libdir support." % e.returncode)
diff --git a/meta/classes-recipe/xmlcatalog.bbclass b/meta/classes-recipe/xmlcatalog.bbclass
new file mode 100644
index 0000000000..d3ef7ff43c
--- /dev/null
+++ b/meta/classes-recipe/xmlcatalog.bbclass
@@ -0,0 +1,36 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Note that this recipe only handles XML catalogues in the native sysroot, and doesn't
+# yet support catalogue management in the target sysroot or on the target itself.
+# (https://bugzilla.yoctoproject.org/13271)
+
+# A whitespace-separated list of XML catalogs to be registered, for example
+# "${sysconfdir}/xml/docbook-xml.xml".
+XMLCATALOGS ?= ""
+
+DEPENDS:append = " libxml2-native"
+
+SYSROOT_PREPROCESS_FUNCS:append:class-native = " xmlcatalog_sstate_postinst"
+
+xmlcatalog_complete() {
+ ROOTCATALOG="${STAGING_ETCDIR_NATIVE}/xml/catalog"
+ if [ ! -f $ROOTCATALOG ]; then
+ mkdir --parents $(dirname $ROOTCATALOG)
+ xmlcatalog --noout --create $ROOTCATALOG
+ fi
+ for CATALOG in ${XMLCATALOGS}; do
+ xmlcatalog --noout --add nextCatalog unused file://$CATALOG $ROOTCATALOG
+ done
+}
+
+xmlcatalog_sstate_postinst() {
+ mkdir -p ${SYSROOT_DESTDIR}${bindir}
+ dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}-xmlcatalog
+ echo '#!/bin/sh' > $dest
+ echo '${xmlcatalog_complete}' >> $dest
+ chmod 0755 $dest
+}
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index c19c770d11..2d0bbfbd42 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -1,5 +1,9 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
#
# This bbclass is used for creating archive for:
# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
@@ -55,9 +59,10 @@ ARCHIVER_MODE[compression] ?= "xz"
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
-ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
+ARCHIVER_ARCH = "${TARGET_SYS}"
+ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/"
ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
-ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
+ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/"
ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
# When producing a combined mirror directory, allow duplicates for the case
@@ -68,39 +73,48 @@ SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror"
do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
-do_deploy_archives[dirs] = "${WORKDIR}"
# This is a convenience for the shell script to use it
-
-python () {
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
+def include_package(d, pn):
included, reason = copyleft_should_include(d)
if not included:
bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
- return
+ return False
+
else:
bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
-
# glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
# so avoid archiving source here.
if pn.startswith('glibc-locale'):
- return
+ return False
# We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN') in ['gcc', 'libgcc'] \
and not pn.startswith('gcc-source'):
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
+ return False
+
+ return True
+
+python () {
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ if not include_package(d, pn):
return
+ # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
+ if pn.startswith('gcc-source'):
+ d.setVar('ARCHIVER_ARCH', "allarch")
+
def hasTask(task):
return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
@@ -387,19 +401,11 @@ python do_ar_mirror() {
subprocess.check_call(cmd, shell=True)
}
-def exclude_useless_paths(tarinfo):
- if tarinfo.isdir():
- if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
- return None
- elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
- return None
- return tarinfo
-
def create_tarball(d, srcdir, suffix, ar_outdir):
"""
create the tarball from srcdir
"""
- import tarfile
+ import subprocess
# Make sure we are only creating a single tarball for gcc sources
if (d.getVar('SRC_URI') == ""):
@@ -411,6 +417,16 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
srcdir = os.path.realpath(srcdir)
compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression')
+ if compression_method == "xz":
+ compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS')
+ # To keep compatibility with ARCHIVER_MODE[compression]
+ elif compression_method == "gz":
+ compression_cmd = "gzip"
+ elif compression_method == "bz2":
+ compression_cmd = "bzip2"
+ else:
+ bb.fatal("Unsupported compression_method: %s" % compression_method)
+
bb.utils.mkdirhier(ar_outdir)
if suffix:
filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method)
@@ -419,9 +435,11 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
tarname = os.path.join(ar_outdir, filename)
bb.note('Creating %s' % tarname)
- tar = tarfile.open(tarname, 'w:%s' % compression_method)
- tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
- tar.close()
+ dirname = os.path.dirname(srcdir)
+ basename = os.path.basename(srcdir)
+ exclude = "--exclude=temp --exclude=patches --exclude='.pc'"
+ tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname)
+ subprocess.check_call(tar_cmd, cwd=dirname, shell=True)
# creating .diff.gz between source.orig and source
def create_diff_gz(d, src_orig, src, ar_outdir):
@@ -454,8 +472,8 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
os.chdir(cwd)
def is_work_shared(d):
- pn = d.getVar('PN')
- return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
+ sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared')
+ return d.getVar('S').startswith(sharedworkdir)
# Run do_unpack and do_patch
python do_unpack_and_patch() {
@@ -579,7 +597,7 @@ python do_dumpdata () {
SSTATETASKS += "do_deploy_archives"
do_deploy_archives () {
- echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
+ bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
}
python do_deploy_archives_setscene () {
sstate_setscene(d)
diff --git a/meta/classes/autotools-brokensep.bbclass b/meta/classes/autotools-brokensep.bbclass
deleted file mode 100644
index 71cf97a391..0000000000
--- a/meta/classes/autotools-brokensep.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-# Autotools class for recipes where separate build dir doesn't work
-# Ideally we should fix software so it does work. Standard autotools supports
-# this.
-inherit autotools
-B = "${S}"
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index 8db79a4829..fd53e92402 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -6,8 +6,10 @@
# Copyright (C) 2011-2016 Intel Corporation
# Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org>
#
+# SPDX-License-Identifier: MIT
+#
-inherit image-artifact-names
+IMAGE_CLASSES += "image-artifact-names"
BUILDHISTORY_FEATURES ?= "image package sdk"
BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
@@ -508,7 +510,7 @@ buildhistory_get_installed() {
# Set correct pkgdatadir
pkgdatadir=${PKGDATA_DIR}
- if [ "$2" == "sdk" ] && [ "$3" == "host" ]; then
+ if [ "$2" = "sdk" ] && [ "$3" = "host" ] ; then
pkgdatadir="${PKGDATA_DIR_SDK}"
fi
@@ -689,28 +691,28 @@ python buildhistory_get_extra_sdkinfo() {
# By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of
# unneeded packages but before the removal of packaging files
-ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image ;"
-ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed ;"
-ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image ;| buildhistory_get_image_installed ;"
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image"
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed"
+ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image| buildhistory_get_image_installed"
ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed"
-IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo ;"
-IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo ;"
+IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo"
+IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo"
IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo"
# We want these to be the last run so that we get called after complementary package installation
-POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target;"
-POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target;"
-POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target;| buildhistory_get_sdk_installed_target;"
+POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target"
+POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target"
+POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target| buildhistory_get_sdk_installed_target"
POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target"
-POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host;"
-POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host;"
-POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host;| buildhistory_get_sdk_installed_host;"
+POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host"
+POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host"
+POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host| buildhistory_get_sdk_installed_host"
POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host"
-SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
-SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
+SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
+SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
python buildhistory_write_sigs() {
@@ -741,30 +743,10 @@ def buildhistory_get_build_id(d):
statusheader = d.getVar('BUILDCFG_HEADER')
return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
-def buildhistory_get_modified(path):
- # copied from get_layer_git_status() in image-buildinfo.bbclass
- import subprocess
- try:
- subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
- git diff --quiet --no-ext-diff
- git diff --quiet --no-ext-diff --cached""" % path,
- shell=True,
- stderr=subprocess.STDOUT)
- return ""
- except subprocess.CalledProcessError as ex:
- # Silently treat errors as "modified", without checking for the
- # (expected) return code 1 in a modified git repo. For example, we get
- # output and a 129 return code when a layer isn't a git repo at all.
- return " -- modified"
-
def buildhistory_get_metadata_revs(d):
- # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
- layers = (d.getVar("BBLAYERS") or "").split()
- medadata_revs = ["%-17s = %s:%s%s" % (os.path.basename(i), \
- base_get_metadata_git_branch(i, None).strip(), \
- base_get_metadata_git_revision(i, None), \
- buildhistory_get_modified(i)) \
- for i in layers]
+ # We want an easily machine-readable format here
+ revisions = oe.buildcfg.get_layer_revisions(d)
+ medadata_revs = ["%-17s = %s:%s%s" % (r[1], r[2], r[3], r[4]) for r in revisions]
return '\n'.join(medadata_revs)
def outputvars(vars, listvars, d):
diff --git a/meta/classes/buildstats-summary.bbclass b/meta/classes/buildstats-summary.bbclass
index f9b241b6c5..12e8f17836 100644
--- a/meta/classes/buildstats-summary.bbclass
+++ b/meta/classes/buildstats-summary.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Summarize sstate usage at the end of the build
python buildstats_summary () {
import collections
diff --git a/meta/classes/cargo.bbclass b/meta/classes/cargo.bbclass
deleted file mode 100644
index 4a780a501f..0000000000
--- a/meta/classes/cargo.bbclass
+++ /dev/null
@@ -1,90 +0,0 @@
-##
-## Purpose:
-## This class is used by any recipes that are built using
-## Cargo.
-
-inherit cargo_common
-
-# the binary we will use
-CARGO = "cargo"
-
-# We need cargo to compile for the target
-BASEDEPENDS:append = " cargo-native"
-
-# Ensure we get the right rust variant
-DEPENDS:append:class-target = " virtual/${TARGET_PREFIX}rust ${RUSTLIB_DEP}"
-DEPENDS:append:class-nativesdk = " virtual/${TARGET_PREFIX}rust ${RUSTLIB_DEP}"
-DEPENDS:append:class-native = " rust-native"
-
-# Enable build separation
-B = "${WORKDIR}/build"
-
-# In case something fails in the build process, give a bit more feedback on
-# where the issue occured
-export RUST_BACKTRACE = "1"
-
-# The directory of the Cargo.toml relative to the root directory, per default
-# assume there's a Cargo.toml directly in the root directory
-CARGO_SRC_DIR ??= ""
-
-# The actual path to the Cargo.toml
-MANIFEST_PATH ??= "${S}/${CARGO_SRC_DIR}/Cargo.toml"
-
-RUSTFLAGS ??= ""
-BUILD_MODE = "${@['--release', ''][d.getVar('DEBUG_BUILD') == '1']}"
-CARGO_BUILD_FLAGS = "-v --target ${HOST_SYS} ${BUILD_MODE} --manifest-path=${MANIFEST_PATH}"
-
-# This is based on the content of CARGO_BUILD_FLAGS and generally will need to
-# change if CARGO_BUILD_FLAGS changes.
-BUILD_DIR = "${@['release', 'debug'][d.getVar('DEBUG_BUILD') == '1']}"
-CARGO_TARGET_SUBDIR="${HOST_SYS}/${BUILD_DIR}"
-oe_cargo_build () {
- export RUSTFLAGS="${RUSTFLAGS}"
- export RUST_TARGET_PATH="${RUST_TARGET_PATH}"
- bbnote "cargo = $(which ${CARGO})"
- bbnote "rustc = $(which ${RUSTC})"
- bbnote "${CARGO} build ${CARGO_BUILD_FLAGS} $@"
- "${CARGO}" build ${CARGO_BUILD_FLAGS} "$@"
-}
-
-do_compile[progress] = "outof:\s+(\d+)/(\d+)"
-cargo_do_compile () {
- oe_cargo_fix_env
- oe_cargo_build
-}
-
-cargo_do_install () {
- local have_installed=false
- for tgt in "${B}/target/${CARGO_TARGET_SUBDIR}/"*; do
- case $tgt in
- *.so|*.rlib)
- install -d "${D}${rustlibdir}"
- install -m755 "$tgt" "${D}${rustlibdir}"
- have_installed=true
- ;;
- *examples)
- if [ -d "$tgt" ]; then
- for example in "$tgt/"*; do
- if [ -f "$example" ] && [ -x "$example" ]; then
- install -d "${D}${bindir}"
- install -m755 "$example" "${D}${bindir}"
- have_installed=true
- fi
- done
- fi
- ;;
- *)
- if [ -f "$tgt" ] && [ -x "$tgt" ]; then
- install -d "${D}${bindir}"
- install -m755 "$tgt" "${D}${bindir}"
- have_installed=true
- fi
- ;;
- esac
- done
- if ! $have_installed; then
- die "Did not find anything to install"
- fi
-}
-
-EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes/cargo_common.bbclass b/meta/classes/cargo_common.bbclass
deleted file mode 100644
index 39f32829fd..0000000000
--- a/meta/classes/cargo_common.bbclass
+++ /dev/null
@@ -1,124 +0,0 @@
-##
-## Purpose:
-## This class is to support building with cargo. It
-## must be different than cargo.bbclass because Rust
-## now builds with Cargo but cannot use cargo.bbclass
-## due to dependencies and assumptions in cargo.bbclass
-## that Rust & Cargo are already installed. So this
-## is used by cargo.bbclass and Rust
-##
-
-# add crate fetch support
-inherit rust-common
-
-# Where we download our registry and dependencies to
-export CARGO_HOME = "${WORKDIR}/cargo_home"
-
-# The pkg-config-rs library used by cargo build scripts disables itself when
-# cross compiling unless this is defined. We set up pkg-config appropriately
-# for cross compilation, so tell it we know better than it.
-export PKG_CONFIG_ALLOW_CROSS = "1"
-
-# Don't instruct cargo to use crates downloaded by bitbake. Some rust packages,
-# for example the rust compiler itself, come with their own vendored sources.
-# Specifying two [source.crates-io] will not work.
-CARGO_DISABLE_BITBAKE_VENDORING ?= "0"
-
-# Used by libstd-rs to point to the vendor dir included in rustc src
-CARGO_VENDORING_DIRECTORY ?= "${CARGO_HOME}/bitbake"
-
-CARGO_RUST_TARGET_CCLD ?= "${RUST_TARGET_CCLD}"
-cargo_common_do_configure () {
- mkdir -p ${CARGO_HOME}/bitbake
-
- cat <<- EOF > ${CARGO_HOME}/config
- # EXTRA_OECARGO_PATHS
- paths = [
- $(for p in ${EXTRA_OECARGO_PATHS}; do echo \"$p\",; done)
- ]
- EOF
-
- cat <<- EOF >> ${CARGO_HOME}/config
-
- # Local mirror vendored by bitbake
- [source.bitbake]
- directory = "${CARGO_VENDORING_DIRECTORY}"
- EOF
-
- if [ ${CARGO_DISABLE_BITBAKE_VENDORING} = "0" ]; then
- cat <<- EOF >> ${CARGO_HOME}/config
-
- [source.crates-io]
- replace-with = "bitbake"
- local-registry = "/nonexistant"
- EOF
- fi
-
- cat <<- EOF >> ${CARGO_HOME}/config
-
- [http]
- # Multiplexing can't be enabled because http2 can't be enabled
- # in curl-native without dependency loops
- multiplexing = false
-
- # Ignore the hard coded and incorrect path to certificates
- cainfo = "${STAGING_ETCDIR_NATIVE}/ssl/certs/ca-certificates.crt"
-
- EOF
-
- cat <<- EOF >> ${CARGO_HOME}/config
-
- # HOST_SYS
- [target.${HOST_SYS}]
- linker = "${CARGO_RUST_TARGET_CCLD}"
- EOF
-
- if [ "${HOST_SYS}" != "${BUILD_SYS}" ]; then
- cat <<- EOF >> ${CARGO_HOME}/config
-
- # BUILD_SYS
- [target.${BUILD_SYS}]
- linker = "${RUST_BUILD_CCLD}"
- EOF
- fi
-
- # Put build output in build directory preferred by bitbake instead of
- # inside source directory unless they are the same
- if [ "${B}" != "${S}" ]; then
- cat <<- EOF >> ${CARGO_HOME}/config
-
- [build]
- # Use out of tree build destination to avoid poluting the source tree
- target-dir = "${B}/target"
- EOF
- fi
-
- cat <<- EOF >> ${CARGO_HOME}/config
-
- [term]
- progress.when = 'always'
- progress.width = 80
- EOF
-}
-
-oe_cargo_fix_env () {
- export CC="${RUST_TARGET_CC}"
- export CXX="${RUST_TARGET_CXX}"
- export CFLAGS="${CFLAGS}"
- export CXXFLAGS="${CXXFLAGS}"
- export AR="${AR}"
- export TARGET_CC="${RUST_TARGET_CC}"
- export TARGET_CXX="${RUST_TARGET_CXX}"
- export TARGET_CFLAGS="${CFLAGS}"
- export TARGET_CXXFLAGS="${CXXFLAGS}"
- export TARGET_AR="${AR}"
- export HOST_CC="${RUST_BUILD_CC}"
- export HOST_CXX="${RUST_BUILD_CXX}"
- export HOST_CFLAGS="${BUILD_CFLAGS}"
- export HOST_CXXFLAGS="${BUILD_CXXFLAGS}"
- export HOST_AR="${BUILD_AR}"
-}
-
-EXTRA_OECARGO_PATHS ??= ""
-
-EXPORT_FUNCTIONS do_configure
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass
index 4532894c57..262db6672c 100644
--- a/meta/classes/ccache.bbclass
+++ b/meta/classes/ccache.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Usage:
# - Enable ccache
# Add the following line to a conffile such as conf/local.conf:
@@ -22,6 +28,11 @@
# be shared between different builds.
CCACHE_TOP_DIR ?= "${TMPDIR}/ccache"
+# ccache-native and cmake-native have a circular dependency
+# that affects other native recipes, but not all.
+# Allows to use ccache in specified native recipes.
+CCACHE_NATIVE_RECIPES_ALLOWED ?= ""
+
# ccahe removes CCACHE_BASEDIR from file path, so that hashes will be the same
# in different builds.
export CCACHE_BASEDIR ?= "${TMPDIR}"
@@ -48,9 +59,9 @@ python() {
Enable ccache for the recipe
"""
pn = d.getVar('PN')
- # quilt-native doesn't need ccache since no c files
- if not (bb.data.inherits_class("native", d) or
- bb.utils.to_boolean(d.getVar('CCACHE_DISABLE'))):
+ if (pn in d.getVar('CCACHE_NATIVE_RECIPES_ALLOWED') or
+ not (bb.data.inherits_class("native", d) or
+ bb.utils.to_boolean(d.getVar('CCACHE_DISABLE')))):
d.appendVar('DEPENDS', ' ccache-native')
d.setVar('CCACHE', 'ccache ')
}
diff --git a/meta/classes/ccmake.bbclass b/meta/classes/ccmake.bbclass
index df5134a108..c5b4bf6260 100644
--- a/meta/classes/ccmake.bbclass
+++ b/meta/classes/ccmake.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit terminal
python do_ccmake() {
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass
index 26b984c4db..16729dcf61 100644
--- a/meta/classes/chrpath.bbclass
+++ b/meta/classes/chrpath.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
CHRPATH_BIN ?= "chrpath"
PREPROCESS_RELOCATE_DIRS ?= ""
@@ -56,7 +62,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlin
def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
import subprocess as sub
- p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE)
+ p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE, text=True)
out, err = p.communicate()
# If returned successfully, process stdout for results
if p.returncode != 0:
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass
deleted file mode 100644
index d9bcddbdbb..0000000000
--- a/meta/classes/cmake.bbclass
+++ /dev/null
@@ -1,217 +0,0 @@
-# Path to the CMake file to process.
-OECMAKE_SOURCEPATH ??= "${S}"
-
-DEPENDS:prepend = "cmake-native "
-B = "${WORKDIR}/build"
-
-# What CMake generator to use.
-# The supported options are "Unix Makefiles" or "Ninja".
-OECMAKE_GENERATOR ?= "Ninja"
-
-python() {
- generator = d.getVar("OECMAKE_GENERATOR")
- if "Unix Makefiles" in generator:
- args = "-G '" + generator + "' -DCMAKE_MAKE_PROGRAM=" + d.getVar("MAKE")
- d.setVar("OECMAKE_GENERATOR_ARGS", args)
- d.setVarFlag("do_compile", "progress", "percent")
- elif "Ninja" in generator:
- args = "-G '" + generator + "' -DCMAKE_MAKE_PROGRAM=ninja"
- d.appendVar("DEPENDS", " ninja-native")
- d.setVar("OECMAKE_GENERATOR_ARGS", args)
- d.setVarFlag("do_compile", "progress", r"outof:^\[(\d+)/(\d+)\]\s+")
- else:
- bb.fatal("Unknown CMake Generator %s" % generator)
-}
-OECMAKE_AR ?= "${AR}"
-
-# Compiler flags
-OECMAKE_C_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CFLAGS}"
-OECMAKE_CXX_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS}"
-OECMAKE_C_FLAGS_RELEASE ?= "-DNDEBUG"
-OECMAKE_CXX_FLAGS_RELEASE ?= "-DNDEBUG"
-OECMAKE_C_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CPPFLAGS} ${LDFLAGS}"
-OECMAKE_CXX_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS} ${LDFLAGS}"
-
-def oecmake_map_compiler(compiler, d):
- args = d.getVar(compiler).split()
- if args[0] == "ccache":
- return args[1], args[0]
- return args[0], ""
-
-# C/C++ Compiler (without cpu arch/tune arguments)
-OECMAKE_C_COMPILER ?= "${@oecmake_map_compiler('CC', d)[0]}"
-OECMAKE_C_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CC', d)[1]}"
-OECMAKE_CXX_COMPILER ?= "${@oecmake_map_compiler('CXX', d)[0]}"
-OECMAKE_CXX_COMPILER_LAUNCHER ?= "${@oecmake_map_compiler('CXX', d)[1]}"
-
-# clear compiler vars for allarch to avoid sig hash difference
-OECMAKE_C_COMPILER_allarch = ""
-OECMAKE_C_COMPILER_LAUNCHER_allarch = ""
-OECMAKE_CXX_COMPILER_allarch = ""
-OECMAKE_CXX_COMPILER_LAUNCHER_allarch = ""
-
-OECMAKE_RPATH ?= ""
-OECMAKE_PERLNATIVE_DIR ??= ""
-OECMAKE_EXTRA_ROOT_PATH ?= ""
-
-OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "ONLY"
-OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM:class-native = "BOTH"
-
-EXTRA_OECMAKE:append = " ${PACKAGECONFIG_CONFARGS}"
-
-export CMAKE_BUILD_PARALLEL_LEVEL
-CMAKE_BUILD_PARALLEL_LEVEL:task-compile = "${@oe.utils.parallel_make(d, False)}"
-CMAKE_BUILD_PARALLEL_LEVEL:task-install = "${@oe.utils.parallel_make(d, True)}"
-
-OECMAKE_TARGET_COMPILE ?= "all"
-OECMAKE_TARGET_INSTALL ?= "install"
-
-def map_host_os_to_system_name(host_os):
- if host_os.startswith('mingw'):
- return 'Windows'
- if host_os.startswith('linux'):
- return 'Linux'
- return host_os
-
-# CMake expects target architectures in the format of uname(2),
-# which do not always match TARGET_ARCH, so all the necessary
-# conversions should happen here.
-def map_host_arch_to_uname_arch(host_arch):
- if host_arch == "powerpc":
- return "ppc"
- if host_arch == "powerpc64le":
- return "ppc64le"
- if host_arch == "powerpc64":
- return "ppc64"
- return host_arch
-
-cmake_do_generate_toolchain_file() {
- if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
- cmake_crosscompiling="set( CMAKE_CROSSCOMPILING FALSE )"
- fi
- cat > ${WORKDIR}/toolchain.cmake <<EOF
-# CMake system name must be something like "Linux".
-# This is important for cross-compiling.
-$cmake_crosscompiling
-set( CMAKE_SYSTEM_NAME ${@map_host_os_to_system_name(d.getVar('HOST_OS'))} )
-set( CMAKE_SYSTEM_PROCESSOR ${@map_host_arch_to_uname_arch(d.getVar('HOST_ARCH'))} )
-set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
-set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
-set( CMAKE_C_COMPILER_LAUNCHER ${OECMAKE_C_COMPILER_LAUNCHER} )
-set( CMAKE_CXX_COMPILER_LAUNCHER ${OECMAKE_CXX_COMPILER_LAUNCHER} )
-set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
-find_program( CMAKE_AR ${OECMAKE_AR} DOC "Archiver" REQUIRED )
-
-set( CMAKE_C_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "CFLAGS" )
-set( CMAKE_CXX_FLAGS "${OECMAKE_CXX_FLAGS}" CACHE STRING "CXXFLAGS" )
-set( CMAKE_ASM_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "ASM FLAGS" )
-set( CMAKE_C_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional CFLAGS for release" )
-set( CMAKE_CXX_FLAGS_RELEASE "${OECMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "Additional CXXFLAGS for release" )
-set( CMAKE_ASM_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "Additional ASM FLAGS for release" )
-set( CMAKE_C_LINK_FLAGS "${OECMAKE_C_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
-set( CMAKE_CXX_LINK_FLAGS "${OECMAKE_CXX_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
-
-# only search in the paths provided so cmake doesnt pick
-# up libraries and tools from the native build machine
-set( CMAKE_FIND_ROOT_PATH ${STAGING_DIR_HOST} ${STAGING_DIR_NATIVE} ${CROSS_DIR} ${OECMAKE_PERLNATIVE_DIR} ${OECMAKE_EXTRA_ROOT_PATH} ${EXTERNAL_TOOLCHAIN} ${HOSTTOOLS_DIR})
-set( CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY )
-set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ${OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM} )
-set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )
-set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )
-set( CMAKE_PROGRAM_PATH "/" )
-
-# Use qt.conf settings
-set( ENV{QT_CONF_PATH} ${WORKDIR}/qt.conf )
-
-# We need to set the rpath to the correct directory as cmake does not provide any
-# directory as rpath by default
-set( CMAKE_INSTALL_RPATH ${OECMAKE_RPATH} )
-
-# Use RPATHs relative to build directory for reproducibility
-set( CMAKE_BUILD_RPATH_USE_ORIGIN ON )
-
-# Use our cmake modules
-list(APPEND CMAKE_MODULE_PATH "${STAGING_DATADIR}/cmake/Modules/")
-
-# add for non /usr/lib libdir, e.g. /usr/lib64
-set( CMAKE_LIBRARY_PATH ${libdir} ${base_libdir})
-
-# add include dir to implicit includes in case it differs from /usr/include
-list(APPEND CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
-list(APPEND CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
-
-EOF
-}
-
-addtask generate_toolchain_file after do_patch before do_configure
-
-CONFIGURE_FILES = "CMakeLists.txt"
-
-do_configure[cleandirs] = "${@d.getVar('B') if d.getVar('S') != d.getVar('B') else ''}"
-
-cmake_do_configure() {
- if [ "${OECMAKE_BUILDPATH}" ]; then
- bbnote "cmake.bbclass no longer uses OECMAKE_BUILDPATH. The default behaviour is now out-of-tree builds with B=WORKDIR/build."
- fi
-
- if [ "${S}" = "${B}" ]; then
- find ${B} -name CMakeFiles -or -name Makefile -or -name cmake_install.cmake -or -name CMakeCache.txt -delete
- fi
-
- # Just like autotools cmake can use a site file to cache result that need generated binaries to run
- if [ -e ${WORKDIR}/site-file.cmake ] ; then
- oecmake_sitefile="-C ${WORKDIR}/site-file.cmake"
- else
- oecmake_sitefile=
- fi
-
- cmake \
- ${OECMAKE_GENERATOR_ARGS} \
- $oecmake_sitefile \
- ${OECMAKE_SOURCEPATH} \
- -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
- -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
- -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
- -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix') + '/')} \
- -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix') + '/')} \
- -DPYTHON_EXECUTABLE:PATH=${PYTHON} \
- -DPython_EXECUTABLE:PATH=${PYTHON} \
- -DPython3_EXECUTABLE:PATH=${PYTHON} \
- -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
- -DCMAKE_INSTALL_SO_NO_EXE=0 \
- -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
- -DCMAKE_NO_SYSTEM_FROM_IMPORTED=1 \
- -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON \
- -DFETCHCONTENT_FULLY_DISCONNECTED=ON \
- ${EXTRA_OECMAKE} \
- -Wno-dev
-}
-
-# To disable verbose cmake logs for a given recipe or globally config metadata e.g. local.conf
-# add following
-#
-# CMAKE_VERBOSE = ""
-#
-
-CMAKE_VERBOSE ??= "VERBOSE=1"
-
-# Then run do_compile again
-cmake_runcmake_build() {
- bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
- eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
-}
-
-cmake_do_compile() {
- cmake_runcmake_build --target ${OECMAKE_TARGET_COMPILE}
-}
-
-cmake_do_install() {
- DESTDIR='${D}' cmake_runcmake_build --target ${OECMAKE_TARGET_INSTALL}
-}
-
-EXPORT_FUNCTIONS do_configure do_compile do_install do_generate_toolchain_file
diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass
index eabf12ce7a..9ff9956fe9 100644
--- a/meta/classes/copyleft_compliance.bbclass
+++ b/meta/classes/copyleft_compliance.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Deploy sources for recipes for compliance with copyleft-style licenses
# Defaults to using symlinks, as it's a quick operation, and one can easily
# follow the links when making use of the files (e.g. tar with the -h arg).
diff --git a/meta/classes/copyleft_filter.bbclass b/meta/classes/copyleft_filter.bbclass
index c36bce431a..83cd90060d 100644
--- a/meta/classes/copyleft_filter.bbclass
+++ b/meta/classes/copyleft_filter.bbclass
@@ -1,10 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Filter the license, the copyleft_should_include returns True for the
# COPYLEFT_LICENSE_INCLUDE recipe, and False for the
# COPYLEFT_LICENSE_EXCLUDE.
#
# By default, includes all GPL and LGPL, and excludes CLOSED and Proprietary.
-#
-# vi:sts=4:sw=4:et
COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL* AGPL*'
COPYLEFT_LICENSE_INCLUDE[type] = 'list'
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
new file mode 100644
index 0000000000..486efadba9
--- /dev/null
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -0,0 +1,1158 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx"
+
+# The product name that the CVE database uses. Defaults to BPN, but may need to
+# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
+CVE_PRODUCT ??= "${BPN}"
+CVE_VERSION ??= "${PV}"
+
+SPDXDIR ??= "${WORKDIR}/spdx"
+SPDXDEPLOY = "${SPDXDIR}/deploy"
+SPDXWORK = "${SPDXDIR}/work"
+SPDXIMAGEWORK = "${SPDXDIR}/image-work"
+SPDXSDKWORK = "${SPDXDIR}/sdk-work"
+SPDXDEPS = "${SPDXDIR}/deps.json"
+
+SPDX_TOOL_NAME ??= "oe-spdx-creator"
+SPDX_TOOL_VERSION ??= "1.0"
+
+SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
+
+SPDX_INCLUDE_SOURCES ??= "0"
+SPDX_ARCHIVE_SOURCES ??= "0"
+SPDX_ARCHIVE_PACKAGED ??= "0"
+
+SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
+SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
+SPDX_PRETTY ??= "0"
+
+SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
+
+SPDX_CUSTOM_ANNOTATION_VARS ??= ""
+
+SPDX_ORG ??= "OpenEmbedded ()"
+SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
+SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
+ this recipe. For SPDX documents create using this class during the build, this \
+ is the contact information for the person or organization who is doing the \
+ build."
+
+def extract_licenses(filename):
+ import re
+
+ lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
+
+ try:
+ with open(filename, 'rb') as f:
+ size = min(15000, os.stat(filename).st_size)
+ txt = f.read(size)
+ licenses = re.findall(lic_regex, txt)
+ if licenses:
+ ascii_licenses = [lic.decode('ascii') for lic in licenses]
+ return ascii_licenses
+ except Exception as e:
+ bb.warn(f"Exception reading {filename}: {e}")
+ return None
+
+def get_doc_namespace(d, doc):
+ import uuid
+ namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
+ return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
+
+def create_annotation(d, comment):
+ from datetime import datetime, timezone
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ annotation = oe.spdx.SPDXAnnotation()
+ annotation.annotationDate = creation_time
+ annotation.annotationType = "OTHER"
+ annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
+ annotation.comment = comment
+ return annotation
+
+def recipe_spdx_is_native(d, recipe):
+ return any(a.annotationType == "OTHER" and
+ a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
+ a.comment == "isNative" for a in recipe.annotations)
+
+def is_work_shared_spdx(d):
+ return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
+
+def get_json_indent(d):
+ if d.getVar("SPDX_PRETTY") == "1":
+ return 2
+ return None
+
+python() {
+ import json
+ if d.getVar("SPDX_LICENSE_DATA"):
+ return
+
+ with open(d.getVar("SPDX_LICENSES"), "r") as f:
+ data = json.load(f)
+ # Transform the license array to a dictionary
+ data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
+ d.setVar("SPDX_LICENSE_DATA", data)
+}
+
+def convert_license_to_spdx(lic, document, d, existing={}):
+ from pathlib import Path
+ import oe.spdx
+
+ license_data = d.getVar("SPDX_LICENSE_DATA")
+ extracted = {}
+
+ def add_extracted_license(ident, name):
+ nonlocal document
+
+ if name in extracted:
+ return
+
+ extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
+ extracted_info.name = name
+ extracted_info.licenseId = ident
+ extracted_info.extractedText = None
+
+ if name == "PD":
+ # Special-case this.
+ extracted_info.extractedText = "Software released to the public domain"
+ else:
+ # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
+ for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
+ try:
+ with (Path(directory) / name).open(errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ break
+ except FileNotFoundError:
+ pass
+ if extracted_info.extractedText is None:
+ # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
+ filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
+ if filename:
+ filename = d.expand("${S}/" + filename)
+ with open(filename, errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ else:
+ bb.fatal("Cannot find any text for license %s" % name)
+
+ extracted[name] = extracted_info
+ document.hasExtractedLicensingInfos.append(extracted_info)
+
+ def convert(l):
+ if l == "(" or l == ")":
+ return l
+
+ if l == "&":
+ return "AND"
+
+ if l == "|":
+ return "OR"
+
+ if l == "CLOSED":
+ return "NONE"
+
+ spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
+ if spdx_license in license_data["licenses"]:
+ return spdx_license
+
+ try:
+ spdx_license = existing[l]
+ except KeyError:
+ spdx_license = "LicenseRef-" + l
+ add_extracted_license(spdx_license, l)
+
+ return spdx_license
+
+ lic_split = lic.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split()
+
+ return ' '.join(convert(l) for l in lic_split)
+
+def process_sources(d):
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith('glibc-locale'):
+ return False
+ if d.getVar('PN') == "libtool-cross":
+ return False
+ if d.getVar('PN') == "libgcc-initial":
+ return False
+ if d.getVar('PN') == "shadow-sysroot":
+ return False
+
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return False
+
+ return True
+
+
+def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
+ from pathlib import Path
+ import oe.spdx
+ import hashlib
+
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+ if source_date_epoch:
+ source_date_epoch = int(source_date_epoch)
+
+ sha1s = []
+ spdx_files = []
+
+ file_counter = 1
+ for subdir, dirs, files in os.walk(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_dirs]
+ if subdir == str(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
+
+ for file in files:
+ filepath = Path(subdir) / file
+ filename = str(filepath.relative_to(topdir))
+
+ if not filepath.is_symlink() and filepath.is_file():
+ spdx_file = oe.spdx.SPDXFile()
+ spdx_file.SPDXID = get_spdxid(file_counter)
+ for t in get_types(filepath):
+ spdx_file.fileTypes.append(t)
+ spdx_file.fileName = filename
+
+ if archive is not None:
+ with filepath.open("rb") as f:
+ info = archive.gettarinfo(fileobj=f)
+ info.name = filename
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > source_date_epoch:
+ info.mtime = source_date_epoch
+
+ archive.addfile(info, f)
+
+ sha1 = bb.utils.sha1_file(filepath)
+ sha1s.append(sha1)
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA1",
+ checksumValue=sha1,
+ ))
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA256",
+ checksumValue=bb.utils.sha256_file(filepath),
+ ))
+
+ if "SOURCE" in spdx_file.fileTypes:
+ extracted_lics = extract_licenses(filepath)
+ if extracted_lics:
+ spdx_file.licenseInfoInFiles = extracted_lics
+
+ doc.files.append(spdx_file)
+ doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
+ spdx_pkg.hasFiles.append(spdx_file.SPDXID)
+
+ spdx_files.append(spdx_file)
+
+ file_counter += 1
+
+ sha1s.sort()
+ verifier = hashlib.sha1()
+ for v in sha1s:
+ verifier.update(v.encode("utf-8"))
+ spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
+
+ return spdx_files
+
+
+def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
+ from pathlib import Path
+ import hashlib
+ import oe.packagedata
+ import oe.spdx
+
+ debug_search_paths = [
+ Path(d.getVar('PKGD')),
+ Path(d.getVar('STAGING_DIR_TARGET')),
+ Path(d.getVar('STAGING_DIR_NATIVE')),
+ Path(d.getVar('STAGING_KERNEL_DIR')),
+ ]
+
+ pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
+
+ if pkg_data is None:
+ return
+
+ for file_path, file_data in pkg_data["files_info"].items():
+ if not "debugsrc" in file_data:
+ continue
+
+ for pkg_file in package_files:
+ if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
+ break
+ else:
+ bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package,
+ " ".join(p.fileName for p in package_files)))
+ continue
+
+ for debugsrc in file_data["debugsrc"]:
+ ref_id = "NOASSERTION"
+ for search in debug_search_paths:
+ if debugsrc.startswith("/usr/src/kernel"):
+ debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
+ else:
+ debugsrc_path = search / debugsrc.lstrip("/")
+ if not debugsrc_path.exists():
+ continue
+
+ file_sha256 = bb.utils.sha256_file(debugsrc_path)
+
+ if file_sha256 in sources:
+ source_file = sources[file_sha256]
+
+ doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
+ if doc_ref is None:
+ doc_ref = oe.spdx.SPDXExternalDocumentRef()
+ doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
+ doc_ref.spdxDocument = source_file.doc.documentNamespace
+ doc_ref.checksum.algorithm = "SHA1"
+ doc_ref.checksum.checksumValue = source_file.doc_sha1
+ package_doc.externalDocumentRefs.append(doc_ref)
+
+ ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
+ else:
+ bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
+ break
+ else:
+ bb.debug(1, "Debug source %s not found" % debugsrc)
+
+ package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
+
+add_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
+
+def collect_dep_recipes(d, doc, spdx_recipe):
+ import json
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_deps_file = Path(d.getVar("SPDXDEPS"))
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ dep_recipes = []
+
+ with spdx_deps_file.open("r") as f:
+ deps = json.load(f)
+
+ for dep_pn, dep_hashfn in deps:
+ dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn)
+ if not dep_recipe_path:
+ bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn))
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pn:
+ spdx_dep_recipe = pkg
+ break
+ else:
+ continue
+
+ dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
+
+ dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
+ dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_recipe_ref.checksum.algorithm = "SHA1"
+ dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
+
+ doc.externalDocumentRefs.append(dep_recipe_ref)
+
+ doc.add_relationship(
+ "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
+ "BUILD_DEPENDENCY_OF",
+ spdx_recipe
+ )
+
+ return dep_recipes
+
+collect_dep_recipes[vardepsexclude] = "SSTATE_ARCHS"
+
+def collect_dep_sources(d, dep_recipes):
+ import oe.sbom
+
+ sources = {}
+ for dep in dep_recipes:
+ # Don't collect sources from native recipes as they
+ # match non-native sources also.
+ if recipe_spdx_is_native(d, dep.recipe):
+ continue
+ recipe_files = set(dep.recipe.hasFiles)
+
+ for spdx_file in dep.doc.files:
+ if spdx_file.SPDXID not in recipe_files:
+ continue
+
+ if "SOURCE" in spdx_file.fileTypes:
+ for checksum in spdx_file.checksums:
+ if checksum.algorithm == "SHA256":
+ sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
+ break
+
+ return sources
+
+def add_download_packages(d, doc, recipe):
+ import os.path
+ from bb.fetch2 import decodeurl, CHECKSUM_LIST
+ import bb.process
+ import oe.spdx
+ import oe.sbom
+
+ for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()):
+ f = bb.fetch2.FetchData(src_uri, d)
+
+ for name in f.names:
+ package = oe.spdx.SPDXPackage()
+ package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1)
+ package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
+
+ if f.type == "file":
+ continue
+
+ uri = f.type
+ proto = getattr(f, "proto", None)
+ if proto is not None:
+ uri = uri + "+" + proto
+ uri = uri + "://" + f.host + f.path
+
+ if f.method.supports_srcrev():
+ uri = uri + "@" + f.revisions[name]
+
+ if f.method.supports_checksum(f):
+ for checksum_id in CHECKSUM_LIST:
+ if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
+ continue
+
+ expected_checksum = getattr(f, "%s_expected" % checksum_id)
+ if expected_checksum is None:
+ continue
+
+ c = oe.spdx.SPDXChecksum()
+ c.algorithm = checksum_id.upper()
+ c.checksumValue = expected_checksum
+ package.checksums.append(c)
+
+ package.downloadLocation = uri
+ doc.packages.append(package)
+ doc.add_relationship(doc, "DESCRIBES", package)
+ # In the future, we might be able to do more fancy dependencies,
+ # but this should be sufficient for now
+ doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
+
+def collect_direct_deps(d, dep_task):
+ current_task = "do_" + d.getVar("BB_CURRENTTASK")
+ pn = d.getVar("PN")
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+
+ for this_dep in taskdepdata.values():
+ if this_dep[0] == pn and this_dep[1] == current_task:
+ break
+ else:
+ bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
+
+ deps = set()
+ for dep_name in this_dep[3]:
+ dep_data = taskdepdata[dep_name]
+ if dep_data[1] == dep_task and dep_data[0] != pn:
+ deps.add((dep_data[0], dep_data[7]))
+
+ return sorted(deps)
+
+collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
+collect_direct_deps[vardeps] += "DEPENDS"
+
+python do_collect_spdx_deps() {
+ # This task calculates the build time dependencies of the recipe, and is
+ # required because while a task can deptask on itself, those dependencies
+ # do not show up in BB_TASKDEPDATA. To work around that, this task does the
+ # deptask on do_create_spdx and writes out the dependencies it finds, then
+ # do_create_spdx reads in the found dependencies when writing the actual
+ # SPDX document
+ import json
+ from pathlib import Path
+
+ spdx_deps_file = Path(d.getVar("SPDXDEPS"))
+
+ deps = collect_direct_deps(d, "do_create_spdx")
+
+ with spdx_deps_file.open("w") as f:
+ json.dump(deps, f)
+}
+# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
+addtask do_collect_spdx_deps after do_unpack
+do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
+do_collect_spdx_deps[deptask] = "do_create_spdx"
+do_collect_spdx_deps[dirs] = "${SPDXDIR}"
+
+python do_create_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import uuid
+ from pathlib import Path
+ from contextlib import contextmanager
+ import oe.cve_check
+
+ @contextmanager
+ def optional_tarfile(name, guard, mode="w"):
+ import tarfile
+ import bb.compress.zstd
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ if guard:
+ name.parent.mkdir(parents=True, exist_ok=True)
+ with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode=mode + "|") as tf:
+ yield tf
+ else:
+ yield None
+
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_workdir = Path(d.getVar("SPDXWORK"))
+ include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
+ archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
+ archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ doc = oe.spdx.SPDXDocument()
+
+ doc.name = "recipe-" + d.getVar("PN")
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ recipe = oe.spdx.SPDXPackage()
+ recipe.name = d.getVar("PN")
+ recipe.versionInfo = d.getVar("PV")
+ recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
+ recipe.supplier = d.getVar("SPDX_SUPPLIER")
+ if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
+ recipe.annotations.append(create_annotation(d, "isNative"))
+
+ homepage = d.getVar("HOMEPAGE")
+ if homepage:
+ recipe.homepage = homepage
+
+ license = d.getVar("LICENSE")
+ if license:
+ recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
+
+ summary = d.getVar("SUMMARY")
+ if summary:
+ recipe.summary = summary
+
+ description = d.getVar("DESCRIPTION")
+ if description:
+ recipe.description = description
+
+ if d.getVar("SPDX_CUSTOM_ANNOTATION_VARS"):
+ for var in d.getVar('SPDX_CUSTOM_ANNOTATION_VARS').split():
+ recipe.annotations.append(create_annotation(d, var + "=" + d.getVar(var)))
+
+ # Some CVEs may be patched during the build process without incrementing the version number,
+ # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
+ # save the CVEs fixed by patches to source information field in the SPDX.
+ patched_cves = oe.cve_check.get_patched_cves(d)
+ patched_cves = list(patched_cves)
+ patched_cves = ' '.join(patched_cves)
+ if patched_cves:
+ recipe.sourceInfo = "CVEs fixed: " + patched_cves
+
+ cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
+ if cpe_ids:
+ for cpe_id in cpe_ids:
+ cpe = oe.spdx.SPDXExternalReference()
+ cpe.referenceCategory = "SECURITY"
+ cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
+ cpe.referenceLocator = cpe_id
+ recipe.externalRefs.append(cpe)
+
+ doc.packages.append(recipe)
+ doc.add_relationship(doc, "DESCRIBES", recipe)
+
+ add_download_packages(d, doc, recipe)
+
+ if process_sources(d) and include_sources:
+ recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
+ with optional_tarfile(recipe_archive, archive_sources) as archive:
+ spdx_get_src(d)
+
+ add_package_files(
+ d,
+ doc,
+ recipe,
+ spdx_workdir,
+ lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
+ lambda filepath: ["SOURCE"],
+ ignore_dirs=[".git"],
+ ignore_top_level_dirs=["temp"],
+ archive=archive,
+ )
+
+ if archive is not None:
+ recipe.packageFileName = str(recipe_archive.name)
+
+ dep_recipes = collect_dep_recipes(d, doc, recipe)
+
+ doc_sha1 = oe.sbom.write_doc(d, doc, pkg_arch, "recipes", indent=get_json_indent(d))
+ dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
+
+ recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
+ recipe_ref.spdxDocument = doc.documentNamespace
+ recipe_ref.checksum.algorithm = "SHA1"
+ recipe_ref.checksum.checksumValue = doc_sha1
+
+ sources = collect_dep_sources(d, dep_recipes)
+ found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
+
+ if not recipe_spdx_is_native(d, recipe):
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ if not oe.packagedata.packaged(package, d):
+ continue
+
+ package_doc = oe.spdx.SPDXDocument()
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ package_doc.name = pkg_name
+ package_doc.documentNamespace = get_doc_namespace(d, package_doc)
+ package_doc.creationInfo.created = creation_time
+ package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
+ package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ package_doc.creationInfo.creators.append("Person: N/A ()")
+ package_doc.externalDocumentRefs.append(recipe_ref)
+
+ package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
+
+ spdx_package = oe.spdx.SPDXPackage()
+
+ spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
+ spdx_package.name = pkg_name
+ spdx_package.versionInfo = d.getVar("PV")
+ spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
+ spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
+
+ package_doc.packages.append(spdx_package)
+
+ package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
+ package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
+
+ package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
+ with optional_tarfile(package_archive, archive_packaged) as archive:
+ package_files = add_package_files(
+ d,
+ package_doc,
+ spdx_package,
+ pkgdest / package,
+ lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
+ lambda filepath: ["BINARY"],
+ ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
+ archive=archive,
+ )
+
+ if archive is not None:
+ spdx_package.packageFileName = str(package_archive.name)
+
+ add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
+
+ oe.sbom.write_doc(d, package_doc, pkg_arch, "packages", indent=get_json_indent(d))
+}
+do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS"
+# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
+addtask do_create_spdx after do_package do_packagedata do_unpack do_collect_spdx_deps before do_populate_sdk do_build do_rm_work
+
+SSTATETASKS += "do_create_spdx"
+do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
+do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_spdx_setscene
+
+do_create_spdx[dirs] = "${SPDXWORK}"
+do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
+do_create_spdx[depends] += "${PATCHDEPENDENCY}"
+
+def collect_package_providers(d):
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+ import json
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ providers = {}
+
+ deps = collect_direct_deps(d, "do_create_spdx")
+ deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME")))
+
+ for dep_pn, dep_hashfn in deps:
+ localdata = d
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+ if not recipe_data:
+ localdata = bb.data.createCopy(d)
+ localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+
+ for pkg in recipe_data.get("PACKAGES", "").split():
+
+ pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
+ rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
+ rprovides.add(pkg)
+
+ if "PKG" in pkg_data:
+ pkg = pkg_data["PKG"]
+ rprovides.add(pkg)
+
+ for r in rprovides:
+ providers[r] = (pkg, dep_hashfn)
+
+ return providers
+
+collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
+
+python do_create_runtime_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import oe.packagedata
+ from pathlib import Path
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
+ is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ providers = collect_package_providers(d)
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ if not is_native:
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ dep_package_cache = {}
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ localdata = bb.data.createCopy(d)
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ localdata.setVar("PKG", pkg_name)
+ localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
+
+ if not oe.packagedata.packaged(package, localdata):
+ continue
+
+ pkg_spdx_path = oe.sbom.doc_path(deploy_dir_spdx, pkg_name, pkg_arch, "packages")
+
+ package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in package_doc.packages:
+ if p.name == pkg_name:
+ spdx_package = p
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
+
+ runtime_doc = oe.spdx.SPDXDocument()
+ runtime_doc.name = "runtime-" + pkg_name
+ runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
+ runtime_doc.creationInfo.created = creation_time
+ runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
+ runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ runtime_doc.creationInfo.creators.append("Person: N/A ()")
+
+ package_ref = oe.spdx.SPDXExternalDocumentRef()
+ package_ref.externalDocumentId = "DocumentRef-package-" + package
+ package_ref.spdxDocument = package_doc.documentNamespace
+ package_ref.checksum.algorithm = "SHA1"
+ package_ref.checksum.checksumValue = package_doc_sha1
+
+ runtime_doc.externalDocumentRefs.append(package_ref)
+
+ runtime_doc.add_relationship(
+ runtime_doc.SPDXID,
+ "AMENDS",
+ "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
+ )
+
+ deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
+ seen_deps = set()
+ for dep, _ in deps.items():
+ if dep in seen_deps:
+ continue
+
+ if dep not in providers:
+ continue
+
+ (dep, dep_hashfn) = providers[dep]
+
+ if not oe.packagedata.packaged(dep, localdata):
+ continue
+
+ dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
+ dep_pkg = dep_pkg_data["PKG"]
+
+ if dep in dep_package_cache:
+ (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
+ else:
+ dep_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, dep_pkg, dep_hashfn)
+ if not dep_path:
+ bb.fatal("No SPDX file found for package %s, %s" % (dep_pkg, dep_hashfn))
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pkg:
+ dep_spdx_package = pkg
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
+
+ dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
+ dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_package_ref.checksum.algorithm = "SHA1"
+ dep_package_ref.checksum.checksumValue = spdx_dep_sha1
+
+ dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
+
+ runtime_doc.externalDocumentRefs.append(dep_package_ref)
+
+ runtime_doc.add_relationship(
+ "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
+ "RUNTIME_DEPENDENCY_OF",
+ "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
+ )
+ seen_deps.add(dep)
+
+ oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d))
+}
+
+do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS"
+
+addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
+SSTATETASKS += "do_create_runtime_spdx"
+do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_runtime_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_runtime_spdx_setscene
+
+do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[rdeptask] = "do_create_spdx"
+
+def spdx_get_src(d):
+ """
+ save patched source of the recipe in SPDX_WORKDIR.
+ """
+ import shutil
+ spdx_workdir = d.getVar('SPDXWORK')
+ spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
+ pn = d.getVar('PN')
+
+ workdir = d.getVar("WORKDIR")
+
+ try:
+ # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
+ if not is_work_shared_spdx(d):
+ # Change the WORKDIR to make do_unpack do_patch run in another dir.
+ d.setVar('WORKDIR', spdx_workdir)
+ # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+
+ # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
+ # possibly requiring of the following tasks (such as some recipes's
+ # do_patch required 'B' existed).
+ bb.utils.mkdirhier(d.getVar('B'))
+
+ bb.build.exec_func('do_unpack', d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared_spdx(d):
+ share_src = d.getVar('WORKDIR')
+ d.setVar('WORKDIR', spdx_workdir)
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+ src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class('kernel',d):
+ share_src = d.getVar('STAGING_KERNEL_DIR')
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_shared_res = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ shutils.rmtree(git_path)
+
+ # Make sure gcc and kernel sources are patched only once
+ if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
+ bb.build.exec_func('do_patch', d)
+
+ # Some userland has no source.
+ if not os.path.exists( spdx_workdir ):
+ bb.utils.mkdirhier(spdx_workdir)
+ finally:
+ d.setVar("WORKDIR", workdir)
+
+spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
+
+do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
+do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
+
+ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx"
+
+do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
+do_populate_sdk[cleandirs] += "${SPDXSDKWORK}"
+POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx"
+POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx"
+
+python image_combine_spdx() {
+ import os
+ import oe.sbom
+ from pathlib import Path
+ from oe.rootfs import image_list_installed_packages
+
+ image_name = d.getVar("IMAGE_NAME")
+ image_link_name = d.getVar("IMAGE_LINK_NAME")
+ imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
+ img_spdxid = oe.sbom.get_image_spdxid(image_name)
+ packages = image_list_installed_packages(d)
+
+ combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages, Path(d.getVar("SPDXIMAGEWORK")))
+
+ def make_image_link(target_path, suffix):
+ if image_link_name:
+ link = imgdeploydir / (image_link_name + suffix)
+ if link != target_path:
+ link.symlink_to(os.path.relpath(target_path, link.parent))
+
+ spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
+ make_image_link(spdx_tar_path, ".spdx.tar.zst")
+}
+
+python sdk_host_combine_spdx() {
+ sdk_combine_spdx(d, "host")
+}
+
+python sdk_target_combine_spdx() {
+ sdk_combine_spdx(d, "target")
+}
+
+def sdk_combine_spdx(d, sdk_type):
+ import oe.sbom
+ from pathlib import Path
+ from oe.sdk import sdk_list_installed_packages
+
+ sdk_name = d.getVar("TOOLCHAIN_OUTPUTNAME") + "-" + sdk_type
+ sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
+ sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
+ sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
+ combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages, Path(d.getVar('SPDXSDKWORK')))
+
+def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx_workdir):
+ import os
+ import oe.spdx
+ import oe.sbom
+ import io
+ import json
+ from datetime import timezone, datetime
+ from pathlib import Path
+ import tarfile
+ import bb.compress.zstd
+
+ providers = collect_package_providers(d)
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+
+ doc = oe.spdx.SPDXDocument()
+ doc.name = rootfs_name
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ image = oe.spdx.SPDXPackage()
+ image.name = d.getVar("PN")
+ image.versionInfo = d.getVar("PV")
+ image.SPDXID = rootfs_spdxid
+ image.supplier = d.getVar("SPDX_SUPPLIER")
+
+ doc.packages.append(image)
+
+ for name in sorted(packages.keys()):
+ if name not in providers:
+ bb.fatal("Unable to find SPDX provider for '%s'" % name)
+
+ pkg_name, pkg_hashfn = providers[name]
+
+ pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn)
+ if not pkg_spdx_path:
+ bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn))
+
+ pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in pkg_doc.packages:
+ if p.name == name:
+ pkg_ref = oe.spdx.SPDXExternalDocumentRef()
+ pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
+ pkg_ref.spdxDocument = pkg_doc.documentNamespace
+ pkg_ref.checksum.algorithm = "SHA1"
+ pkg_ref.checksum.checksumValue = pkg_doc_sha1
+
+ doc.externalDocumentRefs.append(pkg_ref)
+ doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
+ break
+ else:
+ bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
+
+ runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn)
+ if not runtime_spdx_path:
+ bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn))
+
+ runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
+
+ runtime_ref = oe.spdx.SPDXExternalDocumentRef()
+ runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
+ runtime_ref.spdxDocument = runtime_doc.documentNamespace
+ runtime_ref.checksum.algorithm = "SHA1"
+ runtime_ref.checksum.checksumValue = runtime_doc_sha1
+
+ # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
+ doc.externalDocumentRefs.append(runtime_ref)
+ doc.add_relationship(
+ image,
+ "OTHER",
+ "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
+ comment="Runtime dependencies for %s" % name
+ )
+ bb.utils.mkdirhier(spdx_workdir)
+ image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json")
+
+ with image_spdx_path.open("wb") as f:
+ doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ visited_docs = set()
+
+ index = {"documents": []}
+
+ spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
+ with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode="w|") as tar:
+ def collect_spdx_document(path):
+ nonlocal tar
+ nonlocal deploy_dir_spdx
+ nonlocal source_date_epoch
+ nonlocal index
+
+ if path in visited_docs:
+ return
+
+ visited_docs.add(path)
+
+ with path.open("rb") as f:
+ doc, sha1 = oe.sbom.read_doc(f)
+ f.seek(0)
+
+ if doc.documentNamespace in visited_docs:
+ return
+
+ bb.note("Adding SPDX document %s" % path)
+ visited_docs.add(doc.documentNamespace)
+ info = tar.gettarinfo(fileobj=f)
+
+ info.name = doc.name + ".spdx.json"
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > int(source_date_epoch):
+ info.mtime = int(source_date_epoch)
+
+ tar.addfile(info, f)
+
+ index["documents"].append({
+ "filename": info.name,
+ "documentNamespace": doc.documentNamespace,
+ "sha1": sha1,
+ })
+
+ for ref in doc.externalDocumentRefs:
+ ref_path = oe.sbom.doc_find_by_namespace(deploy_dir_spdx, package_archs, ref.spdxDocument)
+ if not ref_path:
+ bb.fatal("Cannot find any SPDX file for document %s" % ref.spdxDocument)
+ collect_spdx_document(ref_path)
+
+ collect_spdx_document(image_spdx_path)
+
+ index["documents"].sort(key=lambda x: x["filename"])
+
+ index_str = io.BytesIO(json.dumps(
+ index,
+ sort_keys=True,
+ indent=get_json_indent(d),
+ ).encode("utf-8"))
+
+ info = tarfile.TarInfo()
+ info.name = "index.json"
+ info.size = len(index_str.getvalue())
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ tar.addfile(info, fileobj=index_str)
+
+combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SSTATE_ARCHS"
diff --git a/meta/classes/create-spdx.bbclass b/meta/classes/create-spdx.bbclass
index 37b6b569a1..19c6c0ff0b 100644
--- a/meta/classes/create-spdx.bbclass
+++ b/meta/classes/create-spdx.bbclass
@@ -1,1013 +1,8 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
-
-DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${MACHINE}"
-
-# The product name that the CVE database uses. Defaults to BPN, but may need to
-# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
-CVE_PRODUCT ??= "${BPN}"
-CVE_VERSION ??= "${PV}"
-
-SPDXDIR ??= "${WORKDIR}/spdx"
-SPDXDEPLOY = "${SPDXDIR}/deploy"
-SPDXWORK = "${SPDXDIR}/work"
-
-SPDX_TOOL_NAME ??= "oe-spdx-creator"
-SPDX_TOOL_VERSION ??= "1.0"
-
-SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
-
-SPDX_INCLUDE_SOURCES ??= "0"
-SPDX_INCLUDE_PACKAGED ??= "0"
-SPDX_ARCHIVE_SOURCES ??= "0"
-SPDX_ARCHIVE_PACKAGED ??= "0"
-
-SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
-SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
-
-SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
-
-SPDX_ORG ??= "OpenEmbedded ()"
-SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
-SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
- this recipe. For SPDX documents create using this class during the build, this \
- is the contact information for the person or organization who is doing the \
- build."
-
-def extract_licenses(filename):
- import re
-
- lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
-
- try:
- with open(filename, 'rb') as f:
- size = min(15000, os.stat(filename).st_size)
- txt = f.read(size)
- licenses = re.findall(lic_regex, txt)
- if licenses:
- ascii_licenses = [lic.decode('ascii') for lic in licenses]
- return ascii_licenses
- except Exception as e:
- bb.warn(f"Exception reading {filename}: {e}")
- return None
-
-def get_doc_namespace(d, doc):
- import uuid
- namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
- return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
-
-def create_annotation(d, comment):
- from datetime import datetime, timezone
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
- annotation = oe.spdx.SPDXAnnotation()
- annotation.annotationDate = creation_time
- annotation.annotationType = "OTHER"
- annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
- annotation.comment = comment
- return annotation
-
-def recipe_spdx_is_native(d, recipe):
- return any(a.annotationType == "OTHER" and
- a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
- a.comment == "isNative" for a in recipe.annotations)
-
-def is_work_shared_spdx(d):
- return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
-
-python() {
- import json
- if d.getVar("SPDX_LICENSE_DATA"):
- return
-
- with open(d.getVar("SPDX_LICENSES"), "r") as f:
- data = json.load(f)
- # Transform the license array to a dictionary
- data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
- d.setVar("SPDX_LICENSE_DATA", data)
-}
-
-def convert_license_to_spdx(lic, document, d, existing={}):
- from pathlib import Path
- import oe.spdx
-
- license_data = d.getVar("SPDX_LICENSE_DATA")
- extracted = {}
-
- def add_extracted_license(ident, name):
- nonlocal document
-
- if name in extracted:
- return
-
- extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
- extracted_info.name = name
- extracted_info.licenseId = ident
- extracted_info.extractedText = None
-
- if name == "PD":
- # Special-case this.
- extracted_info.extractedText = "Software released to the public domain"
- else:
- # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
- for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
- try:
- with (Path(directory) / name).open(errors="replace") as f:
- extracted_info.extractedText = f.read()
- break
- except FileNotFoundError:
- pass
- if extracted_info.extractedText is None:
- # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
- filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
- if filename:
- filename = d.expand("${S}/" + filename)
- with open(filename, errors="replace") as f:
- extracted_info.extractedText = f.read()
- else:
- bb.error("Cannot find any text for license %s" % name)
-
- extracted[name] = extracted_info
- document.hasExtractedLicensingInfos.append(extracted_info)
-
- def convert(l):
- if l == "(" or l == ")":
- return l
-
- if l == "&":
- return "AND"
-
- if l == "|":
- return "OR"
-
- if l == "CLOSED":
- return "NONE"
-
- spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
- if spdx_license in license_data["licenses"]:
- return spdx_license
-
- try:
- spdx_license = existing[l]
- except KeyError:
- spdx_license = "LicenseRef-" + l
- add_extracted_license(spdx_license, l)
-
- return spdx_license
-
- lic_split = lic.replace("(", " ( ").replace(")", " ) ").split()
-
- return ' '.join(convert(l) for l in lic_split)
-
-def process_sources(d):
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
-
- # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
- # so avoid archiving source here.
- if pn.startswith('glibc-locale'):
- return False
- if d.getVar('PN') == "libtool-cross":
- return False
- if d.getVar('PN') == "libgcc-initial":
- return False
- if d.getVar('PN') == "shadow-sysroot":
- return False
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return False
-
- return True
-
-
-def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
- from pathlib import Path
- import oe.spdx
- import hashlib
-
- source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
- if source_date_epoch:
- source_date_epoch = int(source_date_epoch)
-
- sha1s = []
- spdx_files = []
-
- file_counter = 1
- for subdir, dirs, files in os.walk(topdir):
- dirs[:] = [d for d in dirs if d not in ignore_dirs]
- if subdir == str(topdir):
- dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
-
- for file in files:
- filepath = Path(subdir) / file
- filename = str(filepath.relative_to(topdir))
-
- if filepath.is_file() and not filepath.is_symlink():
- spdx_file = oe.spdx.SPDXFile()
- spdx_file.SPDXID = get_spdxid(file_counter)
- for t in get_types(filepath):
- spdx_file.fileTypes.append(t)
- spdx_file.fileName = filename
-
- if archive is not None:
- with filepath.open("rb") as f:
- info = archive.gettarinfo(fileobj=f)
- info.name = filename
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- if source_date_epoch is not None and info.mtime > source_date_epoch:
- info.mtime = source_date_epoch
-
- archive.addfile(info, f)
-
- sha1 = bb.utils.sha1_file(filepath)
- sha1s.append(sha1)
- spdx_file.checksums.append(oe.spdx.SPDXChecksum(
- algorithm="SHA1",
- checksumValue=sha1,
- ))
- spdx_file.checksums.append(oe.spdx.SPDXChecksum(
- algorithm="SHA256",
- checksumValue=bb.utils.sha256_file(filepath),
- ))
-
- if "SOURCE" in spdx_file.fileTypes:
- extracted_lics = extract_licenses(filepath)
- if extracted_lics:
- spdx_file.licenseInfoInFiles = extracted_lics
-
- doc.files.append(spdx_file)
- doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
- spdx_pkg.hasFiles.append(spdx_file.SPDXID)
-
- spdx_files.append(spdx_file)
-
- file_counter += 1
-
- sha1s.sort()
- verifier = hashlib.sha1()
- for v in sha1s:
- verifier.update(v.encode("utf-8"))
- spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
-
- return spdx_files
-
-
-def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
- from pathlib import Path
- import hashlib
- import oe.packagedata
- import oe.spdx
-
- debug_search_paths = [
- Path(d.getVar('PKGD')),
- Path(d.getVar('STAGING_DIR_TARGET')),
- Path(d.getVar('STAGING_DIR_NATIVE')),
- Path(d.getVar('STAGING_KERNEL_DIR')),
- ]
-
- pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
-
- if pkg_data is None:
- return
-
- for file_path, file_data in pkg_data["files_info"].items():
- if not "debugsrc" in file_data:
- continue
-
- for pkg_file in package_files:
- if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
- break
- else:
- bb.fatal("No package file found for %s" % str(file_path))
- continue
-
- for debugsrc in file_data["debugsrc"]:
- ref_id = "NOASSERTION"
- for search in debug_search_paths:
- if debugsrc.startswith("/usr/src/kernel"):
- debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
- else:
- debugsrc_path = search / debugsrc.lstrip("/")
- if not debugsrc_path.exists():
- continue
-
- file_sha256 = bb.utils.sha256_file(debugsrc_path)
-
- if file_sha256 in sources:
- source_file = sources[file_sha256]
-
- doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
- if doc_ref is None:
- doc_ref = oe.spdx.SPDXExternalDocumentRef()
- doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
- doc_ref.spdxDocument = source_file.doc.documentNamespace
- doc_ref.checksum.algorithm = "SHA1"
- doc_ref.checksum.checksumValue = source_file.doc_sha1
- package_doc.externalDocumentRefs.append(doc_ref)
-
- ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
- else:
- bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
- break
- else:
- bb.debug(1, "Debug source %s not found" % debugsrc)
-
- package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
-
-def collect_dep_recipes(d, doc, spdx_recipe):
- from pathlib import Path
- import oe.sbom
- import oe.spdx
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
-
- dep_recipes = []
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- deps = sorted(set(
- dep[0] for dep in taskdepdata.values() if
- dep[1] == "do_create_spdx" and dep[0] != d.getVar("PN")
- ))
- for dep_pn in deps:
- dep_recipe_path = deploy_dir_spdx / "recipes" / ("recipe-%s.spdx.json" % dep_pn)
-
- spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
-
- for pkg in spdx_dep_doc.packages:
- if pkg.name == dep_pn:
- spdx_dep_recipe = pkg
- break
- else:
- continue
-
- dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
-
- dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
- dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
- dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
- dep_recipe_ref.checksum.algorithm = "SHA1"
- dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
-
- doc.externalDocumentRefs.append(dep_recipe_ref)
-
- doc.add_relationship(
- "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
- "BUILD_DEPENDENCY_OF",
- spdx_recipe
- )
-
- return dep_recipes
-
-collect_dep_recipes[vardepsexclude] += "BB_TASKDEPDATA"
-
-
-def collect_dep_sources(d, dep_recipes):
- import oe.sbom
-
- sources = {}
- for dep in dep_recipes:
- # Don't collect sources from native recipes as they
- # match non-native sources also.
- if recipe_spdx_is_native(d, dep.recipe):
- continue
- recipe_files = set(dep.recipe.hasFiles)
-
- for spdx_file in dep.doc.files:
- if spdx_file.SPDXID not in recipe_files:
- continue
-
- if "SOURCE" in spdx_file.fileTypes:
- for checksum in spdx_file.checksums:
- if checksum.algorithm == "SHA256":
- sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
- break
-
- return sources
-
-
-python do_create_spdx() {
- from datetime import datetime, timezone
- import oe.sbom
- import oe.spdx
- import uuid
- from pathlib import Path
- from contextlib import contextmanager
- import oe.cve_check
-
- @contextmanager
- def optional_tarfile(name, guard, mode="w"):
- import tarfile
- import bb.compress.zstd
-
- num_threads = int(d.getVar("BB_NUMBER_THREADS"))
-
- if guard:
- name.parent.mkdir(parents=True, exist_ok=True)
- with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
- with tarfile.open(fileobj=f, mode=mode + "|") as tf:
- yield tf
- else:
- yield None
-
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- spdx_workdir = Path(d.getVar("SPDXWORK"))
- include_packaged = d.getVar("SPDX_INCLUDE_PACKAGED") == "1"
- include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
- archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
- archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
-
- doc = oe.spdx.SPDXDocument()
-
- doc.name = "recipe-" + d.getVar("PN")
- doc.documentNamespace = get_doc_namespace(d, doc)
- doc.creationInfo.created = creation_time
- doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- doc.creationInfo.creators.append("Person: N/A ()")
-
- recipe = oe.spdx.SPDXPackage()
- recipe.name = d.getVar("PN")
- recipe.versionInfo = d.getVar("PV")
- recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
- recipe.packageSupplier = d.getVar("SPDX_SUPPLIER")
- if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
- recipe.annotations.append(create_annotation(d, "isNative"))
-
- for s in d.getVar('SRC_URI').split():
- if not s.startswith("file://"):
- recipe.downloadLocation = s
- break
- else:
- recipe.downloadLocation = "NOASSERTION"
-
- homepage = d.getVar("HOMEPAGE")
- if homepage:
- recipe.homepage = homepage
-
- license = d.getVar("LICENSE")
- if license:
- recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
-
- summary = d.getVar("SUMMARY")
- if summary:
- recipe.summary = summary
-
- description = d.getVar("DESCRIPTION")
- if description:
- recipe.description = description
-
- # Some CVEs may be patched during the build process without incrementing the version number,
- # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
- # save the CVEs fixed by patches to source information field in the SPDX.
- patched_cves = oe.cve_check.get_patched_cves(d)
- patched_cves = list(patched_cves)
- patched_cves = ' '.join(patched_cves)
- if patched_cves:
- recipe.sourceInfo = "CVEs fixed: " + patched_cves
-
- cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
- if cpe_ids:
- for cpe_id in cpe_ids:
- cpe = oe.spdx.SPDXExternalReference()
- cpe.referenceCategory = "SECURITY"
- cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
- cpe.referenceLocator = cpe_id
- recipe.externalRefs.append(cpe)
-
- doc.packages.append(recipe)
- doc.add_relationship(doc, "DESCRIBES", recipe)
-
- if process_sources(d) and include_sources:
- recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
- with optional_tarfile(recipe_archive, archive_sources) as archive:
- spdx_get_src(d)
-
- add_package_files(
- d,
- doc,
- recipe,
- spdx_workdir,
- lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
- lambda filepath: ["SOURCE"],
- ignore_dirs=[".git"],
- ignore_top_level_dirs=["temp"],
- archive=archive,
- )
-
- if archive is not None:
- recipe.packageFileName = str(recipe_archive.name)
-
- dep_recipes = collect_dep_recipes(d, doc, recipe)
-
- doc_sha1 = oe.sbom.write_doc(d, doc, "recipes")
- dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
-
- recipe_ref = oe.spdx.SPDXExternalDocumentRef()
- recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
- recipe_ref.spdxDocument = doc.documentNamespace
- recipe_ref.checksum.algorithm = "SHA1"
- recipe_ref.checksum.checksumValue = doc_sha1
-
- sources = collect_dep_sources(d, dep_recipes)
- found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
-
- if not recipe_spdx_is_native(d, recipe):
- bb.build.exec_func("read_subpackage_metadata", d)
-
- pkgdest = Path(d.getVar("PKGDEST"))
- for package in d.getVar("PACKAGES").split():
- if not oe.packagedata.packaged(package, d):
- continue
-
- package_doc = oe.spdx.SPDXDocument()
- pkg_name = d.getVar("PKG:%s" % package) or package
- package_doc.name = pkg_name
- package_doc.documentNamespace = get_doc_namespace(d, package_doc)
- package_doc.creationInfo.created = creation_time
- package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
- package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- package_doc.creationInfo.creators.append("Person: N/A ()")
- package_doc.externalDocumentRefs.append(recipe_ref)
-
- package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
-
- spdx_package = oe.spdx.SPDXPackage()
-
- spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
- spdx_package.name = pkg_name
- spdx_package.versionInfo = d.getVar("PV")
- spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
- spdx_package.packageSupplier = d.getVar("SPDX_SUPPLIER")
-
- package_doc.packages.append(spdx_package)
-
- package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
- package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
-
- package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
- with optional_tarfile(package_archive, archive_packaged) as archive:
- package_files = add_package_files(
- d,
- package_doc,
- spdx_package,
- pkgdest / package,
- lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
- lambda filepath: ["BINARY"],
- archive=archive,
- )
-
- if archive is not None:
- spdx_package.packageFileName = str(package_archive.name)
-
- add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
-
- oe.sbom.write_doc(d, package_doc, "packages")
-}
-# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
-addtask do_create_spdx after do_package do_packagedata do_unpack before do_populate_sdk do_build do_rm_work
-
-SSTATETASKS += "do_create_spdx"
-do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
-do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
-
-python do_create_spdx_setscene () {
- sstate_setscene(d)
-}
-addtask do_create_spdx_setscene
-
-do_create_spdx[dirs] = "${SPDXWORK}"
-do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
-do_create_spdx[depends] += "${PATCHDEPENDENCY}"
-do_create_spdx[deptask] = "do_create_spdx"
-
-def collect_package_providers(d):
- from pathlib import Path
- import oe.sbom
- import oe.spdx
- import json
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
-
- providers = {}
-
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- deps = sorted(set(
- dep[0] for dep in taskdepdata.values() if dep[0] != d.getVar("PN")
- ))
- deps.append(d.getVar("PN"))
-
- for dep_pn in deps:
- recipe_data = oe.packagedata.read_pkgdata(dep_pn, d)
-
- for pkg in recipe_data.get("PACKAGES", "").split():
-
- pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, d)
- rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
- rprovides.add(pkg)
-
- for r in rprovides:
- providers[r] = pkg
-
- return providers
-
-collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
-
-python do_create_runtime_spdx() {
- from datetime import datetime, timezone
- import oe.sbom
- import oe.spdx
- import oe.packagedata
- from pathlib import Path
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
- is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
-
- providers = collect_package_providers(d)
-
- if not is_native:
- bb.build.exec_func("read_subpackage_metadata", d)
-
- dep_package_cache = {}
-
- pkgdest = Path(d.getVar("PKGDEST"))
- for package in d.getVar("PACKAGES").split():
- localdata = bb.data.createCopy(d)
- pkg_name = d.getVar("PKG:%s" % package) or package
- localdata.setVar("PKG", pkg_name)
- localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
-
- if not oe.packagedata.packaged(package, localdata):
- continue
-
- pkg_spdx_path = deploy_dir_spdx / "packages" / (pkg_name + ".spdx.json")
-
- package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
-
- for p in package_doc.packages:
- if p.name == pkg_name:
- spdx_package = p
- break
- else:
- bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
-
- runtime_doc = oe.spdx.SPDXDocument()
- runtime_doc.name = "runtime-" + pkg_name
- runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
- runtime_doc.creationInfo.created = creation_time
- runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
- runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- runtime_doc.creationInfo.creators.append("Person: N/A ()")
-
- package_ref = oe.spdx.SPDXExternalDocumentRef()
- package_ref.externalDocumentId = "DocumentRef-package-" + package
- package_ref.spdxDocument = package_doc.documentNamespace
- package_ref.checksum.algorithm = "SHA1"
- package_ref.checksum.checksumValue = package_doc_sha1
-
- runtime_doc.externalDocumentRefs.append(package_ref)
-
- runtime_doc.add_relationship(
- runtime_doc.SPDXID,
- "AMENDS",
- "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
- )
-
- deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
- seen_deps = set()
- for dep, _ in deps.items():
- if dep in seen_deps:
- continue
-
- if dep not in providers:
- continue
-
- dep = providers[dep]
-
- if not oe.packagedata.packaged(dep, localdata):
- continue
-
- dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
- dep_pkg = dep_pkg_data["PKG"]
-
- if dep in dep_package_cache:
- (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
- else:
- dep_path = deploy_dir_spdx / "packages" / ("%s.spdx.json" % dep_pkg)
-
- spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
-
- for pkg in spdx_dep_doc.packages:
- if pkg.name == dep_pkg:
- dep_spdx_package = pkg
- break
- else:
- bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
-
- dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
- dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
- dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
- dep_package_ref.checksum.algorithm = "SHA1"
- dep_package_ref.checksum.checksumValue = spdx_dep_sha1
-
- dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
-
- runtime_doc.externalDocumentRefs.append(dep_package_ref)
-
- runtime_doc.add_relationship(
- "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
- "RUNTIME_DEPENDENCY_OF",
- "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
- )
- seen_deps.add(dep)
-
- oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy)
-}
-
-addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
-SSTATETASKS += "do_create_runtime_spdx"
-do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
-
-python do_create_runtime_spdx_setscene () {
- sstate_setscene(d)
-}
-addtask do_create_runtime_spdx_setscene
-
-do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[rdeptask] = "do_create_spdx"
-
-def spdx_get_src(d):
- """
- save patched source of the recipe in SPDX_WORKDIR.
- """
- import shutil
- spdx_workdir = d.getVar('SPDXWORK')
- spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
- pn = d.getVar('PN')
-
- workdir = d.getVar("WORKDIR")
-
- try:
- # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
- if not is_work_shared_spdx(d):
- # Change the WORKDIR to make do_unpack do_patch run in another dir.
- d.setVar('WORKDIR', spdx_workdir)
- # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
-
- # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
- # possibly requiring of the following tasks (such as some recipes's
- # do_patch required 'B' existed).
- bb.utils.mkdirhier(d.getVar('B'))
-
- bb.build.exec_func('do_unpack', d)
- # Copy source of kernel to spdx_workdir
- if is_work_shared_spdx(d):
- d.setVar('WORKDIR', spdx_workdir)
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
- src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
- bb.utils.mkdirhier(src_dir)
- if bb.data.inherits_class('kernel',d):
- share_src = d.getVar('STAGING_KERNEL_DIR')
- cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
- cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
- bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
-
- git_path = src_dir + "/.git"
- if os.path.exists(git_path):
- shutils.rmtree(git_path)
-
- # Make sure gcc and kernel sources are patched only once
- if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
- bb.build.exec_func('do_patch', d)
-
- # Some userland has no source.
- if not os.path.exists( spdx_workdir ):
- bb.utils.mkdirhier(spdx_workdir)
- finally:
- d.setVar("WORKDIR", workdir)
-
-do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
-
-ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx ; "
-
-do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
-POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx; "
-POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx; "
-
-python image_combine_spdx() {
- import os
- import oe.sbom
- from pathlib import Path
- from oe.rootfs import image_list_installed_packages
-
- image_name = d.getVar("IMAGE_NAME")
- image_link_name = d.getVar("IMAGE_LINK_NAME")
- imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
- img_spdxid = oe.sbom.get_image_spdxid(image_name)
- packages = image_list_installed_packages(d)
-
- combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages)
-
- def make_image_link(target_path, suffix):
- if image_link_name:
- link = imgdeploydir / (image_link_name + suffix)
- if link != target_path:
- link.symlink_to(os.path.relpath(target_path, link.parent))
-
- image_spdx_path = imgdeploydir / (image_name + ".spdx.json")
- make_image_link(image_spdx_path, ".spdx.json")
- spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
- make_image_link(spdx_tar_path, ".spdx.tar.zst")
- spdx_index_path = imgdeploydir / (image_name + ".spdx.index.json")
- make_image_link(spdx_index_path, ".spdx.index.json")
-}
-
-python sdk_host_combine_spdx() {
- sdk_combine_spdx(d, "host")
-}
-
-python sdk_target_combine_spdx() {
- sdk_combine_spdx(d, "target")
-}
-
-def sdk_combine_spdx(d, sdk_type):
- import oe.sbom
- from pathlib import Path
- from oe.sdk import sdk_list_installed_packages
-
- sdk_name = d.getVar("SDK_NAME") + "-" + sdk_type
- sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
- sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
- sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
- combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages)
-
-def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages):
- import os
- import oe.spdx
- import oe.sbom
- import io
- import json
- from datetime import timezone, datetime
- from pathlib import Path
- import tarfile
- import bb.compress.zstd
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
-
- doc = oe.spdx.SPDXDocument()
- doc.name = rootfs_name
- doc.documentNamespace = get_doc_namespace(d, doc)
- doc.creationInfo.created = creation_time
- doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- doc.creationInfo.creators.append("Person: N/A ()")
-
- image = oe.spdx.SPDXPackage()
- image.name = d.getVar("PN")
- image.versionInfo = d.getVar("PV")
- image.SPDXID = rootfs_spdxid
- image.packageSupplier = d.getVar("SPDX_SUPPLIER")
-
- doc.packages.append(image)
-
- for name in sorted(packages.keys()):
- pkg_spdx_path = deploy_dir_spdx / "packages" / (name + ".spdx.json")
- pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
-
- for p in pkg_doc.packages:
- if p.name == name:
- pkg_ref = oe.spdx.SPDXExternalDocumentRef()
- pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
- pkg_ref.spdxDocument = pkg_doc.documentNamespace
- pkg_ref.checksum.algorithm = "SHA1"
- pkg_ref.checksum.checksumValue = pkg_doc_sha1
-
- doc.externalDocumentRefs.append(pkg_ref)
- doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
- break
- else:
- bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
-
- runtime_spdx_path = deploy_dir_spdx / "runtime" / ("runtime-" + name + ".spdx.json")
- runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
-
- runtime_ref = oe.spdx.SPDXExternalDocumentRef()
- runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
- runtime_ref.spdxDocument = runtime_doc.documentNamespace
- runtime_ref.checksum.algorithm = "SHA1"
- runtime_ref.checksum.checksumValue = runtime_doc_sha1
-
- # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
- doc.externalDocumentRefs.append(runtime_ref)
- doc.add_relationship(
- image,
- "OTHER",
- "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
- comment="Runtime dependencies for %s" % name
- )
-
- image_spdx_path = rootfs_deploydir / (rootfs_name + ".spdx.json")
-
- with image_spdx_path.open("wb") as f:
- doc.to_json(f, sort_keys=True)
-
- num_threads = int(d.getVar("BB_NUMBER_THREADS"))
-
- visited_docs = set()
-
- index = {"documents": []}
-
- spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
- with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
- with tarfile.open(fileobj=f, mode="w|") as tar:
- def collect_spdx_document(path):
- nonlocal tar
- nonlocal deploy_dir_spdx
- nonlocal source_date_epoch
- nonlocal index
-
- if path in visited_docs:
- return
-
- visited_docs.add(path)
-
- with path.open("rb") as f:
- doc, sha1 = oe.sbom.read_doc(f)
- f.seek(0)
-
- if doc.documentNamespace in visited_docs:
- return
-
- bb.note("Adding SPDX document %s" % path)
- visited_docs.add(doc.documentNamespace)
- info = tar.gettarinfo(fileobj=f)
-
- info.name = doc.name + ".spdx.json"
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- if source_date_epoch is not None and info.mtime > int(source_date_epoch):
- info.mtime = int(source_date_epoch)
-
- tar.addfile(info, f)
-
- index["documents"].append({
- "filename": info.name,
- "documentNamespace": doc.documentNamespace,
- "sha1": sha1,
- })
-
- for ref in doc.externalDocumentRefs:
- ref_path = deploy_dir_spdx / "by-namespace" / ref.spdxDocument.replace("/", "_")
- collect_spdx_document(ref_path)
-
- collect_spdx_document(image_spdx_path)
-
- index["documents"].sort(key=lambda x: x["filename"])
-
- index_str = io.BytesIO(json.dumps(index, sort_keys=True).encode("utf-8"))
-
- info = tarfile.TarInfo()
- info.name = "index.json"
- info.size = len(index_str.getvalue())
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- tar.addfile(info, fileobj=index_str)
-
- spdx_index_path = rootfs_deploydir / (rootfs_name + ".spdx.index.json")
- with spdx_index_path.open("w") as f:
- json.dump(index, f, sort_keys=True)
+# Include this class when you don't care what version of SPDX you get; it will
+# be updated to the latest stable version that is supported
+inherit create-spdx-2.2
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 7cd98ae462..56ba8bceef 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class is used to check recipes against public CVEs.
#
# In order to use this class just inherit the class in the
@@ -26,7 +32,7 @@ CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
-CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.1.db"
+CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_2-1.db"
CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock"
CVE_CHECK_LOG ?= "${T}/cve.log"
@@ -42,13 +48,16 @@ CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
-CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
-CVE_CHECK_MANIFEST_JSON ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
+CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.cve"
+CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json"
CVE_CHECK_COPY_FILES ??= "1"
CVE_CHECK_CREATE_MANIFEST ??= "1"
+# Report Patched or Ignored CVEs
CVE_CHECK_REPORT_PATCHED ??= "1"
+CVE_CHECK_SHOW_WARNINGS ??= "1"
+
# Provide text output
CVE_CHECK_FORMAT_TEXT ??= "1"
@@ -61,12 +70,28 @@ CVE_CHECK_COVERAGE ??= "1"
# Skip CVE Check for packages (PN)
CVE_CHECK_SKIP_RECIPE ?= ""
-# Ingore the check for a given list of CVEs. If a CVE is found,
-# then it is considered patched. The value is a string containing
-# space separated CVE values:
+# Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned
+# separately with optional detail and description for this status.
+#
+# CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally"
+#
+# Settings the same status and reason for multiple CVEs is possible
+# via CVE_STATUS_GROUPS variable.
#
-# CVE_CHECK_IGNORE = 'CVE-2014-2524 CVE-2018-1234'
+# CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED"
#
+# CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003"
+# CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004"
+# CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally"
+#
+# All possible CVE statuses could be found in cve-check-map.conf
+# CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored"
+# CVE_CHECK_STATUSMAP[fixed-version] = "Patched"
+#
+# CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead.
+# Keep CVE_CHECK_IGNORE until other layers migrate to new variables
CVE_CHECK_IGNORE ?= ""
# Layers to be excluded
@@ -79,10 +104,28 @@ CVE_CHECK_LAYER_INCLUDELIST ??= ""
# set to "alphabetical" for version using single alphabetical character as increment release
CVE_VERSION_SUFFIX ??= ""
+python () {
+ # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS
+ cve_check_ignore = d.getVar("CVE_CHECK_IGNORE")
+ if cve_check_ignore:
+ bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS")
+ for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split():
+ d.setVarFlag("CVE_STATUS", cve, "ignored")
+
+ # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
+ for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
+ cve_group = d.getVar(cve_status_group)
+ if cve_group is not None:
+ for cve in cve_group.split():
+ d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
+ else:
+ bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
+}
+
def generate_json_report(d, out_path, link_path):
if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
import json
- from oe.cve_check import cve_check_merge_jsons
+ from oe.cve_check import cve_check_merge_jsons, update_symlinks
bb.note("Generating JSON CVE summary")
index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
@@ -95,17 +138,17 @@ def generate_json_report(d, out_path, link_path):
cve_check_merge_jsons(summary, data)
filename = f.readline()
+ summary["package"].sort(key=lambda d: d['name'])
+
with open(out_path, "w") as f:
json.dump(summary, f, indent=2)
- if link_path != out_path:
- if os.path.exists(os.path.realpath(link_path)):
- os.remove(link_path)
- os.symlink(os.path.basename(out_path), link_path)
+ update_symlinks(out_path, link_path)
python cve_save_summary_handler () {
import shutil
import datetime
+ from oe.cve_check import update_symlinks
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
@@ -118,14 +161,9 @@ python cve_save_summary_handler () {
if os.path.exists(cve_tmp_file):
shutil.copyfile(cve_tmp_file, cve_summary_file)
-
- if cve_summary_file and os.path.exists(cve_summary_file):
- cvefile_link = os.path.join(cvelogpath, cve_summary_name)
- # if the paths are the same don't create the link
- if cvefile_link != cve_summary_file:
- if os.path.exists(os.path.realpath(cvefile_link)):
- os.remove(cvefile_link)
- os.symlink(os.path.basename(cve_summary_file), cvefile_link)
+ cvefile_link = os.path.join(cvelogpath, cve_summary_name)
+ update_symlinks(cve_summary_file, cvefile_link)
+ bb.plain("Complete CVE report summary created at: %s" % cvefile_link)
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
@@ -143,22 +181,23 @@ python do_cve_check () {
"""
from oe.cve_check import get_patched_cves
- if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
- try:
- patched_cves = get_patched_cves(d)
- except FileNotFoundError:
- bb.fatal("Failure in searching patches")
- ignored, patched, unpatched, status = check_cves(d, patched_cves)
- if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
- cve_data = get_cve_info(d, patched + unpatched)
- cve_write_data(d, patched, unpatched, ignored, cve_data, status)
- else:
- bb.note("No CVE database found, skipping CVE check")
+ with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True):
+ if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
+ try:
+ patched_cves = get_patched_cves(d)
+ except FileNotFoundError:
+ bb.fatal("Failure in searching patches")
+ ignored, patched, unpatched, status = check_cves(d, patched_cves)
+ if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
+ cve_data = get_cve_info(d, patched + unpatched + ignored)
+ cve_write_data(d, patched, unpatched, ignored, cve_data, status)
+ else:
+ bb.note("No CVE database found, skipping CVE check")
}
addtask cve_check before do_build
-do_cve_check[depends] = "cve-update-db-native:do_fetch"
+do_cve_check[depends] = "cve-update-nvd2-native:do_fetch"
do_cve_check[nostamp] = "1"
python cve_check_cleanup () {
@@ -170,7 +209,7 @@ python cve_check_cleanup () {
}
addhandler cve_check_cleanup
-cve_check_cleanup[eventmask] = "bb.cooker.CookerExit"
+cve_check_cleanup[eventmask] = "bb.event.BuildCompleted"
python cve_check_write_rootfs_manifest () {
"""
@@ -178,7 +217,9 @@ python cve_check_write_rootfs_manifest () {
"""
import shutil
- from oe.cve_check import cve_check_merge_jsons
+ import json
+ from oe.rootfs import image_list_installed_packages
+ from oe.cve_check import cve_check_merge_jsons, update_symlinks
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
@@ -188,55 +229,88 @@ python cve_check_write_rootfs_manifest () {
if os.path.exists(deploy_file_json):
bb.utils.remove(deploy_file_json)
- if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
- bb.note("Writing rootfs CVE manifest")
- deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
- link_name = d.getVar("IMAGE_LINK_NAME")
+ # Create a list of relevant recipies
+ recipies = set()
+ for pkg in list(image_list_installed_packages(d)):
+ pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
+ 'runtime-reverse', pkg)
+ pkg_data = oe.packagedata.read_pkgdatafile(pkg_info)
+ recipies.add(pkg_data["PN"])
+
+ bb.note("Writing rootfs CVE manifest")
+ deploy_dir = d.getVar("IMGDEPLOYDIR")
+ link_name = d.getVar("IMAGE_LINK_NAME")
+
+ json_data = {"version":"1", "package": []}
+ text_data = ""
+ enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1"
+ enable_text = d.getVar("CVE_CHECK_FORMAT_TEXT") == "1"
+
+ save_pn = d.getVar("PN")
+
+ for pkg in recipies:
+ # To be able to use the CVE_CHECK_RECIPE_FILE variable we have to evaluate
+ # it with the different PN names set each time.
+ d.setVar("PN", pkg)
+ if enable_text:
+ pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE")
+ if os.path.exists(pkgfilepath):
+ with open(pkgfilepath) as pfile:
+ text_data += pfile.read()
+
+ if enable_json:
+ pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
+ if os.path.exists(pkgfilepath):
+ with open(pkgfilepath) as j:
+ data = json.load(j)
+ cve_check_merge_jsons(json_data, data)
+
+ d.setVar("PN", save_pn)
+
+ if enable_text:
+ link_path = os.path.join(deploy_dir, "%s.cve" % link_name)
manifest_name = d.getVar("CVE_CHECK_MANIFEST")
- cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
-
- bb.utils.mkdirhier(os.path.dirname(manifest_name))
- shutil.copyfile(cve_tmp_file, manifest_name)
-
- if manifest_name and os.path.exists(manifest_name):
- manifest_link = os.path.join(deploy_dir, "%s.cve" % link_name)
- # if they are the same don't create the link
- if manifest_link != manifest_name:
- # If we already have another manifest, update symlinks
- if os.path.exists(os.path.realpath(manifest_link)):
- os.remove(manifest_link)
- os.symlink(os.path.basename(manifest_name), manifest_link)
- bb.plain("Image CVE report stored in: %s" % manifest_name)
-
- if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
- link_path = os.path.join(deploy_dir, "%s.json" % link_name)
- manifest_path = d.getVar("CVE_CHECK_MANIFEST_JSON")
- bb.note("Generating JSON CVE manifest")
- generate_json_report(d, manifest_path, link_path)
- bb.plain("Image CVE JSON report stored in: %s" % link_path)
+
+ with open(manifest_name, "w") as f:
+ f.write(text_data)
+
+ update_symlinks(manifest_name, link_path)
+ bb.plain("Image CVE report stored in: %s" % manifest_name)
+
+ if enable_json:
+ link_path = os.path.join(deploy_dir, "%s.json" % link_name)
+ manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
+
+ with open(manifest_name, "w") as f:
+ json.dump(json_data, f, indent=2)
+
+ update_symlinks(manifest_name, link_path)
+ bb.plain("Image CVE JSON report stored in: %s" % manifest_name)
}
-ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
+ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
+do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
def check_cves(d, patched_cves):
"""
Connect to the NVD database and find unpatched cves.
"""
- from oe.cve_check import Version
+ from oe.cve_check import Version, convert_cve_version, decode_cve_status
pn = d.getVar("PN")
real_pv = d.getVar("PV")
suffix = d.getVar("CVE_VERSION_SUFFIX")
cves_unpatched = []
+ cves_ignored = []
cves_status = []
cves_in_recipe = False
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
products = d.getVar("CVE_PRODUCT").split()
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
if not products:
- return ([], [], [], {})
+ return ([], [], [], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
# If the recipe has been skipped/ignored we return empty lists
@@ -244,7 +318,12 @@ def check_cves(d, patched_cves):
bb.note("Recipe has been skipped by cve-check")
return ([], [], [], [])
- cve_ignore = d.getVar("CVE_CHECK_IGNORE").split()
+ # Convert CVE_STATUS into ignored CVEs and check validity
+ cve_ignore = []
+ for cve in (d.getVarFlags("CVE_STATUS") or {}):
+ decoded_status, _, _ = decode_cve_status(d, cve)
+ if decoded_status == "Ignored":
+ cve_ignore.append(cve)
import sqlite3
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
@@ -259,13 +338,13 @@ def check_cves(d, patched_cves):
vendor = "%"
# Find all relevant CVE IDs.
- for cverow in conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)):
+ cve_cursor = conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor))
+ for cverow in cve_cursor:
cve = cverow[0]
if cve in cve_ignore:
- bb.note("%s-%s has been ignored for %s" % (product, pv, cve))
- # TODO: this should be in the report as 'ignored'
- patched_cves.add(cve)
+ bb.note("%s-%s ignores %s" % (product, pv, cve))
+ cves_ignored.append(cve)
continue
elif cve in patched_cves:
bb.note("%s has been patched" % (cve))
@@ -277,9 +356,17 @@ def check_cves(d, patched_cves):
cves_in_recipe = True
vulnerable = False
- for row in conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor)):
+ ignored = False
+
+ product_cursor = conn.execute("SELECT * FROM PRODUCTS WHERE ID IS ? AND PRODUCT IS ? AND VENDOR LIKE ?", (cve, product, vendor))
+ for row in product_cursor:
(_, _, _, version_start, operator_start, version_end, operator_end) = row
#bb.debug(2, "Evaluating row " + str(row))
+ if cve in cve_ignore:
+ ignored = True
+
+ version_start = convert_cve_version(version_start)
+ version_end = convert_cve_version(version_end)
if (operator_start == '=' and pv == version_start) or version_start == '-':
vulnerable = True
@@ -312,25 +399,33 @@ def check_cves(d, patched_cves):
vulnerable = vulnerable_start or vulnerable_end
if vulnerable:
- bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
- cves_unpatched.append(cve)
+ if ignored:
+ bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
+ cves_ignored.append(cve)
+ else:
+ bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
+ cves_unpatched.append(cve)
break
+ product_cursor.close()
if not vulnerable:
bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
- # TODO: not patched but not vulnerable
patched_cves.add(cve)
+ cve_cursor.close()
if not cves_in_product:
bb.note("No CVE records found for product %s, pn %s" % (product, pn))
cves_status.append([product, False])
conn.close()
+ diff_ignore = list(set(cve_ignore) - set(cves_ignored))
+ if diff_ignore:
+ oe.qa.handle_error("cve_status_not_in_db", "Found CVE (%s) with CVE_STATUS set that are not found in database for this component" % " ".join(diff_ignore), d)
if not cves_in_recipe:
bb.note("No CVE records for products in recipe %s" % (pn))
- return (list(cve_ignore), list(patched_cves), cves_unpatched, cves_status)
+ return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status)
def get_cve_info(d, cves):
"""
@@ -344,14 +439,16 @@ def get_cve_info(d, cves):
conn = sqlite3.connect(db_file, uri=True)
for cve in cves:
- for row in conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)):
+ cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,))
+ for row in cursor:
cve_data[row[0]] = {}
cve_data[row[0]]["summary"] = row[1]
cve_data[row[0]]["scorev2"] = row[2]
cve_data[row[0]]["scorev3"] = row[3]
cve_data[row[0]]["modified"] = row[4]
cve_data[row[0]]["vector"] = row[5]
-
+ cve_data[row[0]]["vectorString"] = row[6]
+ cursor.close()
conn.close()
return cve_data
@@ -361,6 +458,8 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
CVE manifest if enabled.
"""
+ from oe.cve_check import decode_cve_status
+
cve_file = d.getVar("CVE_CHECK_LOG")
fdir_name = d.getVar("FILE_DIRNAME")
layer = fdir_name.split("/")[-3]
@@ -368,6 +467,8 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
+ report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
+
if exclude_layers and layer in exclude_layers:
return
@@ -375,7 +476,7 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
return
# Early exit, the text format does not report packages without CVEs
- if not patched+unpatched:
+ if not patched+unpatched+ignored:
return
nvd_link = "https://nvd.nist.gov/vuln/detail/"
@@ -385,45 +486,55 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
for cve in sorted(cve_data):
is_patched = cve in patched
- if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
+ is_ignored = cve in ignored
+
+ status = "Unpatched"
+ if (is_patched or is_ignored) and not report_all:
continue
+ if is_ignored:
+ status = "Ignored"
+ elif is_patched:
+ status = "Patched"
+ else:
+ # default value of status is Unpatched
+ unpatched_cves.append(cve)
+
write_string += "LAYER: %s\n" % layer
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
write_string += "CVE: %s\n" % cve
- if cve in ignored:
- write_string += "CVE STATUS: Ignored\n"
- elif is_patched:
- write_string += "CVE STATUS: Patched\n"
- else:
- unpatched_cves.append(cve)
- write_string += "CVE STATUS: Unpatched\n"
+ write_string += "CVE STATUS: %s\n" % status
+ _, detail, description = decode_cve_status(d, cve)
+ if detail:
+ write_string += "CVE DETAIL: %s\n" % detail
+ if description:
+ write_string += "CVE DESCRIPTION: %s\n" % description
write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"]
write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"]
write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"]
write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
+ write_string += "VECTORSTRING: %s\n" % cve_data[cve]["vectorString"]
write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
- if unpatched_cves:
+ if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
- if write_string:
- with open(cve_file, "w") as f:
- bb.note("Writing file %s with CVE information" % cve_file)
- f.write(write_string)
+ with open(cve_file, "w") as f:
+ bb.note("Writing file %s with CVE information" % cve_file)
+ f.write(write_string)
- if d.getVar("CVE_CHECK_COPY_FILES") == "1":
- deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
- bb.utils.mkdirhier(os.path.dirname(deploy_file))
- with open(deploy_file, "w") as f:
- f.write(write_string)
+ if d.getVar("CVE_CHECK_COPY_FILES") == "1":
+ deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
+ bb.utils.mkdirhier(os.path.dirname(deploy_file))
+ with open(deploy_file, "w") as f:
+ f.write(write_string)
- if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
- cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
- bb.utils.mkdirhier(cvelogpath)
+ if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
+ cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
+ bb.utils.mkdirhier(cvelogpath)
- with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
- f.write("%s" % write_string)
+ with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
+ f.write("%s" % write_string)
def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
"""
@@ -460,6 +571,8 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
Prepare CVE data for the JSON format, then write it.
"""
+ from oe.cve_check import decode_cve_status
+
output = {"version":"1", "package": []}
nvd_link = "https://nvd.nist.gov/vuln/detail/"
@@ -469,6 +582,8 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
+ report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
+
if exclude_layers and layer in exclude_layers:
return
@@ -495,10 +610,11 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
for cve in sorted(cve_data):
is_patched = cve in patched
+ is_ignored = cve in ignored
status = "Unpatched"
- if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
+ if (is_patched or is_ignored) and not report_all:
continue
- if cve in ignored:
+ if is_ignored:
status = "Ignored"
elif is_patched:
status = "Patched"
@@ -514,9 +630,15 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
"scorev2" : cve_data[cve]["scorev2"],
"scorev3" : cve_data[cve]["scorev3"],
"vector" : cve_data[cve]["vector"],
+ "vectorString" : cve_data[cve]["vectorString"],
"status" : status,
"link": issue_link
}
+ _, detail, description = decode_cve_status(d, cve)
+ if detail:
+ cve_item["detail"] = detail
+ if description:
+ cve_item["description"] = description
cve_list.append(cve_item)
package_data["issue"] = cve_list
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass
index 41900e651f..4158c20c7e 100644
--- a/meta/classes/devtool-source.bbclass
+++ b/meta/classes/devtool-source.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Development tool - source extraction helper class
#
# NOTE: this class is intended for use by devtool and should not be
@@ -226,6 +232,9 @@ python devtool_post_patch() {
bb.process.run('git rebase devtool-no-overrides', cwd=srcsubdir)
bb.process.run('git checkout %s' % devbranch, cwd=srcsubdir)
bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
+ if os.path.exists(os.path.join(srcsubdir, '.gitmodules')):
+ bb.process.run('git submodule foreach --recursive "git tag -f devtool-patched"', cwd=srcsubdir)
+
}
python devtool_post_configure() {
diff --git a/meta/classes/distro_features_check.bbclass b/meta/classes/distro_features_check.bbclass
deleted file mode 100644
index 8124a8ca27..0000000000
--- a/meta/classes/distro_features_check.bbclass
+++ /dev/null
@@ -1,7 +0,0 @@
-# Temporarily provide fallback to the old name of the class
-
-python __anonymous() {
- bb.warn("distro_features_check.bbclass is deprecated, please use features_check.bbclass instead")
-}
-
-inherit features_check
diff --git a/meta/classes/distrooverrides.bbclass b/meta/classes/distrooverrides.bbclass
index bf3a2b2090..8d9d7cda7d 100644
--- a/meta/classes/distrooverrides.bbclass
+++ b/meta/classes/distrooverrides.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Turns certain DISTRO_FEATURES into overrides with the same
# name plus a df- prefix. Ensures that these special
# distro features remain set also for native and nativesdk
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 90792a737b..70e27a8d35 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -2,7 +2,8 @@
# Author: Richard Purdie
# Some code and influence taken from srctree.bbclass:
# Copyright (C) 2009 Chris Larson <clarson@kergoth.com>
-# Released under the MIT license (see COPYING.MIT for the terms)
+#
+# SPDX-License-Identifier: MIT
#
# externalsrc.bbclass enables use of an existing source tree, usually external to
# the build system to build a piece of software rather than the usual fetch/unpack/patch
@@ -60,22 +61,21 @@ python () {
if externalsrcbuild:
d.setVar('B', externalsrcbuild)
else:
- d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')
+ d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
+ bb.fetch.get_hashvalue(d)
local_srcuri = []
fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
for url in fetch.urls:
url_data = fetch.ud[url]
parm = url_data.parm
- if (url_data.type == 'file' or
- url_data.type == 'npmsw' or url_data.type == 'crate' or
- 'type' in parm and parm['type'] == 'kmeta'):
+ if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']:
local_srcuri.append(url)
d.setVar('SRC_URI', ' '.join(local_srcuri))
- # Dummy value because the default function can't be called with blank SRC_URI
- d.setVar('SRCPV', '999')
+ # sstate is never going to work for external source trees, disable it
+ d.setVar('SSTATE_SKIP_CREATION', '1')
if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking':
d.setVar('CONFIGUREOPT_DEPTRACK', '')
@@ -83,28 +83,28 @@ python () {
tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
for task in tasks:
- if task.endswith("_setscene"):
- # sstate is never going to work for external source trees, disable it
- bb.build.deltask(task, d)
- elif os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')):
+ if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')):
# Since configure will likely touch ${S}, ensure only we lock so one task has access at a time
d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock")
- for funcname in [task, "base_" + task, "kernel_" + task]:
+ for v in d.keys():
+ cleandirs = d.getVarFlag(v, "cleandirs", False)
+ if cleandirs:
# We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
- cleandirs = oe.recipeutils.split_var_value(d.getVarFlag(funcname, 'cleandirs', False) or '')
+ cleandirs = oe.recipeutils.split_var_value(cleandirs)
setvalue = False
for cleandir in cleandirs[:]:
if oe.path.is_path_parent(externalsrc, d.expand(cleandir)):
cleandirs.remove(cleandir)
setvalue = True
if setvalue:
- d.setVarFlag(funcname, 'cleandirs', ' '.join(cleandirs))
+ d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs))
fetch_tasks = ['do_fetch', 'do_unpack']
# If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
# Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
+ d.setVarFlag('do_populate_lic', 'deps', (d.getVarFlag('do_populate_lic', 'deps', False) or []) + ['do_unpack'])
for task in d.getVar("SRCTREECOVEREDTASKS").split():
if local_srcuri and task in fetch_tasks:
@@ -126,6 +126,9 @@ python () {
d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
+ d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
+ d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
+
# We don't want the workdir to go away
d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
@@ -209,8 +212,8 @@ def srctree_hash_files(d, srcdir=None):
try:
git_dir = os.path.join(s_dir,
subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
- top_git_dir = os.path.join(s_dir, subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'],
- stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ top_git_dir = os.path.join(d.getVar("TOPDIR"),
+ subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
if git_dir == top_git_dir:
git_dir = None
except subprocess.CalledProcessError:
@@ -227,15 +230,16 @@ def srctree_hash_files(d, srcdir=None):
env['GIT_INDEX_FILE'] = tmp_index.name
subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
- submodule_helper = subprocess.check_output(['git', 'submodule--helper', 'list'], cwd=s_dir, env=env).decode("utf-8")
- for line in submodule_helper.splitlines():
- module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
- if os.path.isdir(module_dir):
- proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
- proc.communicate()
- proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
- stdout, _ = proc.communicate()
- git_sha1 += stdout.decode("utf-8")
+ if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0:
+ submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8")
+ for line in submodule_helper.splitlines():
+ module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
+ if os.path.isdir(module_dir):
+ proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ proc.communicate()
+ proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
+ stdout, _ = proc.communicate()
+ git_sha1 += stdout.decode("utf-8")
sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest()
with open(oe_hash_file, 'w') as fobj:
fobj.write(sha1)
@@ -249,6 +253,8 @@ def srctree_configure_hash_files(d):
Get the list of files that should trigger do_configure to re-execute,
based on the value of CONFIGURE_FILES
"""
+ import fnmatch
+
in_files = (d.getVar('CONFIGURE_FILES') or '').split()
out_items = []
search_files = []
@@ -260,8 +266,8 @@ def srctree_configure_hash_files(d):
if search_files:
s_dir = d.getVar('EXTERNALSRC')
for root, _, files in os.walk(s_dir):
- for f in files:
- if f in search_files:
+ for p in search_files:
+ for f in fnmatch.filter(files, p):
out_items.append('%s:True' % os.path.join(root, f))
return ' '.join(out_items)
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass
index a8ef660b30..c825c06df9 100644
--- a/meta/classes/extrausers.bbclass
+++ b/meta/classes/extrausers.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This bbclass is used for image level user/group configuration.
# Inherit this class if you want to make EXTRA_USERS_PARAMS effective.
@@ -17,7 +23,7 @@ inherit useradd_base
PACKAGE_INSTALL:append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
# Image level user / group settings
-ROOTFS_POSTPROCESS_COMMAND:append = " set_user_group;"
+ROOTFS_POSTPROCESS_COMMAND:append = " set_user_group"
# Image level user / group settings
set_user_group () {
diff --git a/meta/classes/gi-docgen.bbclass b/meta/classes/gi-docgen.bbclass
deleted file mode 100644
index 15581ca127..0000000000
--- a/meta/classes/gi-docgen.bbclass
+++ /dev/null
@@ -1,24 +0,0 @@
-# gi-docgen is a new gnome documentation generator, which
-# seems to be a successor to gtk-doc:
-# https://gitlab.gnome.org/GNOME/gi-docgen
-
-# This variable is set to True if api-documentation is in
-# DISTRO_FEATURES, and False otherwise.
-GIDOCGEN_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'True', 'False', d)}"
-# When building native recipes, disable gi-docgen, as it is not necessary,
-# pulls in additional dependencies, and makes build times longer
-GIDOCGEN_ENABLED:class-native = "False"
-GIDOCGEN_ENABLED:class-nativesdk = "False"
-
-# meson: default option name to enable/disable gi-docgen. This matches most
-# projects' configuration. In doubts - check meson_options.txt in project's
-# source path.
-GIDOCGEN_MESON_OPTION ?= 'gtk_doc'
-GIDOCGEN_MESON_ENABLE_FLAG ?= 'true'
-GIDOCGEN_MESON_DISABLE_FLAG ?= 'false'
-
-# Auto enable/disable based on GIDOCGEN_ENABLED
-EXTRA_OEMESON:prepend = "-D${GIDOCGEN_MESON_OPTION}=${@bb.utils.contains('GIDOCGEN_ENABLED', 'True', '${GIDOCGEN_MESON_ENABLE_FLAG}', '${GIDOCGEN_MESON_DISABLE_FLAG}', d)} "
-
-DEPENDS:append = "${@' gi-docgen-native gi-docgen' if d.getVar('GIDOCGEN_ENABLED') == 'True' else ''}"
-
diff --git a/meta/classes/glide.bbclass b/meta/classes/glide.bbclass
deleted file mode 100644
index 2db4ac6846..0000000000
--- a/meta/classes/glide.bbclass
+++ /dev/null
@@ -1,9 +0,0 @@
-# Handle Glide Vendor Package Management use
-#
-# Copyright 2018 (C) O.S. Systems Software LTDA.
-
-DEPENDS:append = " glide-native"
-
-do_compile:prepend() {
- ( cd ${B}/src/${GO_IMPORT} && glide install )
-}
diff --git a/meta/classes/go-vendor.bbclass b/meta/classes/go-vendor.bbclass
new file mode 100644
index 0000000000..1bbb99ac79
--- /dev/null
+++ b/meta/classes/go-vendor.bbclass
@@ -0,0 +1,211 @@
+#
+# Copyright 2023 (C) Weidmueller GmbH & Co KG
+# Author: Lukas Funke <lukas.funke@weidmueller.com>
+#
+# Handle Go vendor support for offline builds
+#
+# When importing Go modules, Go downloads the imported modules using
+# a network (proxy) connection ahead of the compile stage. This contradicts
+# the yocto build concept of fetching every source ahead of build-time
+# and supporting offline builds.
+#
+# To support offline builds, we use Go 'vendoring': module dependencies are
+# downloaded during the fetch-phase and unpacked into the modules 'vendor'
+# folder. Additionally a manifest file is generated for the 'vendor' folder
+#
+
+inherit go-mod
+
+def go_src_uri(repo, version, path=None, subdir=None, \
+ vcs='git', replaces=None, pathmajor=None):
+
+ destsuffix = "git/src/import/vendor.fetch"
+ module_path = repo if not path else path
+
+ src_uri = "{}://{};name={}".format(vcs, repo, module_path.replace('/', '.'))
+ src_uri += ";destsuffix={}/{}@{}".format(destsuffix, repo, version)
+
+ if vcs == "git":
+ src_uri += ";nobranch=1;protocol=https"
+
+ src_uri += ";go_module_path={}".format(module_path)
+
+ if replaces:
+ src_uri += ";go_module_replacement={}".format(replaces)
+ if subdir:
+ src_uri += ";go_subdir={}".format(subdir)
+ if pathmajor:
+ src_uri += ";go_pathmajor={}".format(pathmajor)
+ src_uri += ";is_go_dependency=1"
+
+ return src_uri
+
+python do_vendor_unlink() {
+ go_import = d.getVar('GO_IMPORT')
+ source_dir = d.getVar('S')
+ linkname = os.path.join(source_dir, *['src', go_import, 'vendor'])
+
+ os.unlink(linkname)
+}
+
+addtask vendor_unlink before do_package after do_install
+
+python do_go_vendor() {
+ import shutil
+
+ src_uri = (d.getVar('SRC_URI') or "").split()
+
+ if not src_uri:
+ bb.fatal("SRC_URI is empty")
+
+ default_destsuffix = "git/src/import/vendor.fetch"
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ go_import = d.getVar('GO_IMPORT')
+ source_dir = d.getVar('S')
+
+ linkname = os.path.join(source_dir, *['src', go_import, 'vendor'])
+ vendor_dir = os.path.join(source_dir, *['src', 'import', 'vendor'])
+ import_dir = os.path.join(source_dir, *['src', 'import', 'vendor.fetch'])
+
+ if os.path.exists(vendor_dir):
+ # Nothing to do except re-establish link to actual vendor folder
+ if not os.path.exists(linkname):
+ os.symlink(vendor_dir, linkname)
+ return
+
+ bb.utils.mkdirhier(vendor_dir)
+
+ modules = {}
+
+ for url in fetcher.urls:
+ srcuri = fetcher.ud[url].host + fetcher.ud[url].path
+
+ # Skip non Go module src uris
+ if not fetcher.ud[url].parm.get('is_go_dependency'):
+ continue
+
+ destsuffix = fetcher.ud[url].parm.get('destsuffix')
+ # We derive the module repo / version in the following manner (exmaple):
+ #
+ # destsuffix = git/src/import/vendor.fetch/github.com/foo/bar@v1.2.3
+ # p = github.com/foo/bar@v1.2.3
+ # repo = github.com/foo/bar
+ # version = v1.2.3
+
+ p = destsuffix[len(default_destsuffix)+1:]
+ repo, version = p.split('@')
+
+ module_path = fetcher.ud[url].parm.get('go_module_path')
+
+ subdir = fetcher.ud[url].parm.get('go_subdir')
+ subdir = None if not subdir else subdir
+
+ pathMajor = fetcher.ud[url].parm.get('go_pathmajor')
+ pathMajor = None if not pathMajor else pathMajor.strip('/')
+
+ if not (repo, version) in modules:
+ modules[(repo, version)] = {
+ "repo_path": os.path.join(import_dir, p),
+ "module_path": module_path,
+ "subdir": subdir,
+ "pathMajor": pathMajor }
+
+ for module_key, module in modules.items():
+
+ # only take the version which is explicitly listed
+ # as a dependency in the go.mod
+ module_path = module['module_path']
+ rootdir = module['repo_path']
+ subdir = module['subdir']
+ pathMajor = module['pathMajor']
+
+ src = rootdir
+
+ if subdir:
+ src = os.path.join(rootdir, subdir)
+
+ # If the module is released at major version 2 or higher, the module
+ # path must end with a major version suffix like /v2.
+ # This may or may not be part of the subdirectory name
+ #
+ # https://go.dev/ref/mod#modules-overview
+ if pathMajor:
+ tmp = os.path.join(src, pathMajor)
+ # source directory including major version path may or may not exist
+ if os.path.exists(tmp):
+ src = tmp
+
+ dst = os.path.join(vendor_dir, module_path)
+
+ bb.debug(1, "cp %s --> %s" % (src, dst))
+ shutil.copytree(src, dst, symlinks=True, dirs_exist_ok=True, \
+ ignore=shutil.ignore_patterns(".git", \
+ "vendor", \
+ "*._test.go"))
+
+ # If the root directory has a LICENSE file but not the subdir
+ # we copy the root license to the sub module since the license
+ # applies to all modules in the repository
+ # see https://go.dev/ref/mod#vcs-license
+ if subdir:
+ rootdirLicese = os.path.join(rootdir, "LICENSE")
+ subdirLicense = os.path.join(src, "LICENSE")
+
+ if not os.path.exists(subdir) and \
+ os.path.exists(rootdirLicese):
+ shutil.copy2(rootdirLicese, subdirLicense)
+
+ # Copy vendor manifest
+ modules_txt_src = os.path.join(d.getVar('WORKDIR'), "modules.txt")
+ bb.debug(1, "cp %s --> %s" % (modules_txt_src, vendor_dir))
+ shutil.copy2(modules_txt_src, vendor_dir)
+
+ # Clean up vendor dir
+ # We only require the modules in the modules_txt file
+ fetched_paths = set([os.path.relpath(x[0], vendor_dir) for x in os.walk(vendor_dir)])
+
+ # Remove toplevel dir
+ fetched_paths.remove('.')
+
+ vendored_paths = set()
+ replaced_paths = dict()
+ with open(modules_txt_src) as f:
+ for line in f:
+ if not line.startswith("#"):
+ line = line.strip()
+ vendored_paths.add(line)
+
+ # Add toplevel dirs into vendored dir, as we want to keep them
+ topdir = os.path.dirname(line)
+ while len(topdir):
+ if not topdir in vendored_paths:
+ vendored_paths.add(topdir)
+
+ topdir = os.path.dirname(topdir)
+ else:
+ replaced_module = line.split("=>")
+ if len(replaced_module) > 1:
+ # This module has been replaced, use a local path
+ # we parse the line that has a pattern "# module-name [module-version] => local-path
+ actual_path = replaced_module[1].strip()
+ vendored_name = replaced_module[0].split()[1]
+ bb.debug(1, "added vendored name %s for actual path %s" % (vendored_name, actual_path))
+ replaced_paths[vendored_name] = actual_path
+
+ for path in fetched_paths:
+ if path not in vendored_paths:
+ realpath = os.path.join(vendor_dir, path)
+ if os.path.exists(realpath):
+ shutil.rmtree(realpath)
+
+ for vendored_name, replaced_path in replaced_paths.items():
+ symlink_target = os.path.join(source_dir, *['src', go_import, replaced_path])
+ symlink_name = os.path.join(vendor_dir, vendored_name)
+ bb.debug(1, "vendored name %s, symlink name %s" % (vendored_name, symlink_name))
+ os.symlink(symlink_target, symlink_name)
+
+ # Create a symlink to the actual directory
+ os.symlink(vendor_dir, linkname)
+}
+
+addtask go_vendor before do_patch after do_unpack
diff --git a/meta/classes/grub-efi.bbclass b/meta/classes/grub-efi.bbclass
deleted file mode 100644
index 8fc6999e52..0000000000
--- a/meta/classes/grub-efi.bbclass
+++ /dev/null
@@ -1,8 +0,0 @@
-inherit grub-efi-cfg
-require conf/image-uefi.conf
-
-efi_populate() {
- efi_populate_common "$1" grub-efi
-
- install -m 0644 ${GRUB_CFG} ${DEST}${EFIDIR}/grub.cfg
-}
diff --git a/meta/classes/gtk-doc.bbclass b/meta/classes/gtk-doc.bbclass
deleted file mode 100644
index 07b46ac829..0000000000
--- a/meta/classes/gtk-doc.bbclass
+++ /dev/null
@@ -1,83 +0,0 @@
-# Helper class to pull in the right gtk-doc dependencies and configure
-# gtk-doc to enable or disable documentation building (which requries the
-# use of usermode qemu).
-
-# This variable is set to True if api-documentation is in
-# DISTRO_FEATURES and qemu-usermode is in MACHINE_FEATURES, and False otherwise.
-#
-# It should be used in recipes to determine whether gtk-doc based documentation should be built,
-# so that qemu use can be avoided when necessary.
-GTKDOC_ENABLED:class-native = "False"
-GTKDOC_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', \
- bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d), 'False', d)}"
-
-# meson: default option name to enable/disable gtk-doc. This matches most
-# project's configuration. In doubts - check meson_options.txt in project's
-# source path.
-GTKDOC_MESON_OPTION ?= 'docs'
-GTKDOC_MESON_ENABLE_FLAG ?= 'true'
-GTKDOC_MESON_DISABLE_FLAG ?= 'false'
-
-# Auto enable/disable based on GTKDOC_ENABLED
-EXTRA_OECONF:prepend:class-target = "${@bb.utils.contains('GTKDOC_ENABLED', 'True', '--enable-gtk-doc --enable-gtk-doc-html --disable-gtk-doc-pdf', \
- '--disable-gtk-doc', d)} "
-EXTRA_OEMESON:prepend:class-target = "-D${GTKDOC_MESON_OPTION}=${@bb.utils.contains('GTKDOC_ENABLED', 'True', '${GTKDOC_MESON_ENABLE_FLAG}', '${GTKDOC_MESON_DISABLE_FLAG}', d)} "
-
-# When building native recipes, disable gtkdoc, as it is not necessary,
-# pulls in additional dependencies, and makes build times longer
-EXTRA_OECONF:prepend:class-native = "--disable-gtk-doc "
-EXTRA_OECONF:prepend:class-nativesdk = "--disable-gtk-doc "
-EXTRA_OEMESON:prepend:class-native = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} "
-EXTRA_OEMESON:prepend:class-nativesdk = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} "
-
-# Even though gtkdoc is disabled on -native, gtk-doc package is still
-# needed for m4 macros.
-DEPENDS:append = " gtk-doc-native"
-
-# The documentation directory, where the infrastructure will be copied.
-# gtkdocize has a default of "." so to handle out-of-tree builds set this to $S.
-GTKDOC_DOCDIR ?= "${S}"
-
-export STAGING_DIR_HOST
-
-inherit python3native pkgconfig qemu
-DEPENDS:append = "${@' qemu-native' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}"
-
-do_configure:prepend () {
- # Need to use ||true as this is only needed if configure.ac both exists
- # and uses GTK_DOC_CHECK.
- gtkdocize --srcdir ${S} --docdir ${GTKDOC_DOCDIR} || true
-}
-
-do_compile:prepend:class-target () {
- if [ ${GTKDOC_ENABLED} = True ]; then
- # Write out a qemu wrapper that will be given to gtkdoc-scangobj so that it
- # can run target helper binaries through that.
- qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\\$GIR_EXTRA_LIBS_PATH','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
- cat > ${B}/gtkdoc-qemuwrapper << EOF
-#!/bin/sh
-# Use a modules directory which doesn't exist so we don't load random things
-# which may then get deleted (or their dependencies) and potentially segfault
-export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
-
-GIR_EXTRA_LIBS_PATH=\`find ${B} -name *.so -printf "%h\n"|sort|uniq| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
-GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
-
-# meson sets this wrongly (only to libs in build-dir), qemu_wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly
-unset LD_LIBRARY_PATH
-
-if [ -d ".libs" ]; then
- $qemu_binary ".libs/\$@"
-else
- $qemu_binary "\$@"
-fi
-
-if [ \$? -ne 0 ]; then
- echo "If the above error message is about missing .so libraries, then setting up GIR_EXTRA_LIBS_PATH in the recipe should help."
- echo "(typically like this: GIR_EXTRA_LIBS_PATH=\"$""{B}/something/.libs\" )"
- exit 1
-fi
-EOF
- chmod +x ${B}/gtkdoc-qemuwrapper
- fi
-}
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index 9b912a3083..159cae20f8 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -1,32 +1,38 @@
-# IceCream distributed compiling support
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Icecream distributed compiling support
#
# Stages directories with symlinks from gcc/g++ to icecc, for both
# native and cross compilers. Depending on each configure or compile,
# the directories are added at the head of the PATH list and ICECC_CXX
-# and ICEC_CC are set.
+# and ICECC_CC are set.
#
# For the cross compiler, creates a tar.gz of our toolchain and sets
# ICECC_VERSION accordingly.
#
# The class now handles all 3 different compile 'stages' (i.e native ,cross-kernel and target) creating the
# necessary environment tar.gz file to be used by the remote machines.
-# It also supports meta-toolchain generation
+# It also supports meta-toolchain generation.
#
# If ICECC_PATH is not set in local.conf then the class will try to locate it using 'bb.utils.which'
-# but nothing is sure ;)
+# but nothing is sure. ;)
#
# If ICECC_ENV_EXEC is set in local.conf, then it should point to the icecc-create-env script provided by the user
-# or the default one provided by icecc-create-env.bb will be used
-# (NOTE that this is a modified version of the script need it and *not the one that comes with icecc*
+# or the default one provided by icecc-create-env_0.1.bb will be used.
+# (NOTE that this is a modified version of the needed script and *not the one that comes with icecream*).
#
-# User can specify if specific recipes or recipes belonging to class should not use icecc to distribute
-# compile jobs to remote machines, but handled locally, by defining ICECC_CLASS_DISABLE and ICECC_RECIPE_DISABLE
+# User can specify if specific recipes or recipes inheriting specific classes should not use icecc to distribute
+# compile jobs to remote machines, but handle them locally by defining ICECC_CLASS_DISABLE and ICECC_RECIPE_DISABLE
# with the appropriate values in local.conf. In addition the user can force to enable icecc for recipes
# which set an empty PARALLEL_MAKE variable by defining ICECC_RECIPE_ENABLE.
#
#########################################################################################
-#Error checking is kept to minimum so double check any parameters you pass to the class
-###########################################################################################
+# Error checking is kept to minimum so double check any parameters you pass to the class
+#########################################################################################
BB_BASEHASH_IGNORE_VARS += "ICECC_PARALLEL_MAKE ICECC_DISABLED ICECC_RECIPE_DISABLE \
ICECC_CLASS_DISABLE ICECC_RECIPE_ENABLE ICECC_PATH ICECC_ENV_EXEC \
@@ -44,7 +50,7 @@ HOSTTOOLS_NONFATAL += "icecc patchelf"
# invalidate the version on the compile nodes. Changing it will cause a new
# environment to be created.
#
-# A useful thing to do for testing Icecream changes locally is to add a
+# A useful thing to do for testing icecream changes locally is to add a
# subversion in local.conf:
# ICECC_ENV_VERSION:append = "-my-ver-1"
ICECC_ENV_VERSION = "2"
@@ -66,16 +72,16 @@ CXXFLAGS += "${ICECC_CFLAGS}"
ICECC_ENV_DEBUG ??= ""
# Disable recipe list contains a list of recipes that can not distribute
-# compile tasks for one reason or the other. When adding new entry, please
+# compile tasks for one reason or the other. When adding a new entry, please
# document why (how it failed) so that we can re-evaluate it later e.g. when
-# there is new version
+# there is a new version.
#
# libgcc-initial - fails with CPP sanity check error if host sysroot contains
-# cross gcc built for another target tune/variant
+# cross gcc built for another target tune/variant.
# pixman - prng_state: TLS reference mismatches non-TLS reference, possibly due to
-# pragma omp threadprivate(prng_state)
+# pragma omp threadprivate(prng_state).
# systemtap - _HelperSDT.c undefs macros and uses the identifiers in macros emitting
-# inline assembly
+# inline assembly.
# target-sdk-provides-dummy - ${HOST_PREFIX} is empty which triggers the "NULL
# prefix" error.
ICECC_RECIPE_DISABLE += "\
@@ -85,10 +91,10 @@ ICECC_RECIPE_DISABLE += "\
target-sdk-provides-dummy \
"
-# Classes that should not use icecc. When adding new entry, please
-# document why (how it failed) so that we can re-evaluate it later
+# Classes that should not use icecc. When adding a new entry, please
+# document why (how it failed) so that we can re-evaluate it later.
#
-# image - Image aren't compiling, but the testing framework for images captures
+# image - images aren't compiling, but the testing framework for images captures
# PARALLEL_MAKE as part of the test environment. Many tests won't use
# icecream, but leaving the high level of parallelism can cause them to
# consume an unnecessary amount of resources.
@@ -97,7 +103,7 @@ ICECC_CLASS_DISABLE += "\
"
def get_icecc_dep(d):
- # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
+ # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS'):
@@ -253,7 +259,7 @@ def icecc_get_tool_link(tool, d):
def icecc_get_path_tool(tool, d):
# This is a little ugly, but we want to make sure we add an actual
# compiler to the toolchain, not ccache. Some distros (e.g. Fedora)
- # have ccache enabled by default using symlinks PATH, meaning ccache
+ # have ccache enabled by default using symlinks in PATH, meaning ccache
# would be found first when looking for the compiler.
paths = os.getenv("PATH").split(':')
while True:
@@ -374,7 +380,6 @@ set_icecc_env() {
fi
for compiler in $compilers; do
ln -sf $ICECC_BIN $ICE_PATH/symlinks/$compiler
- rm -f $ICE_PATH/$compiler
cat <<-__EOF__ > $ICE_PATH/$compiler
#!/bin/sh -e
export ICECC_VERSION=$ICECC_VERSION
@@ -423,32 +428,34 @@ set_icecc_env() {
bbnote "Using icecc tarball: $ICECC_VERSION"
}
-do_configure[network] = "1"
do_configure:prepend() {
set_icecc_env
}
-do_compile[network] = "1"
do_compile:prepend() {
set_icecc_env
}
-do_compile_kernelmodules[network] = "1"
do_compile_kernelmodules:prepend() {
set_icecc_env
}
-do_install[network] = "1"
do_install:prepend() {
set_icecc_env
}
-# IceCream is not (currently) supported in the extensible SDK
+# Icecream is not (currently) supported in the extensible SDK
ICECC_SDK_HOST_TASK = "nativesdk-icecc-toolchain"
ICECC_SDK_HOST_TASK:task-populate-sdk-ext = ""
-# Don't include IceCream in uninative tarball
+# Don't include icecream in uninative tarball
ICECC_SDK_HOST_TASK:pn-uninative-tarball = ""
# Add the toolchain scripts to the SDK
TOOLCHAIN_HOST_TASK:append = " ${ICECC_SDK_HOST_TASK}"
+
+python () {
+ if d.getVar('ICECC_DISABLED') != "1":
+ for task in ['do_configure', 'do_compile', 'do_compile_kernelmodules', 'do_install']:
+ d.setVarFlag(task, 'network', '1')
+}
diff --git a/meta/classes/image-artifact-names.bbclass b/meta/classes/image-artifact-names.bbclass
deleted file mode 100644
index f5769e520f..0000000000
--- a/meta/classes/image-artifact-names.bbclass
+++ /dev/null
@@ -1,22 +0,0 @@
-##################################################################
-# Specific image creation and rootfs population info.
-##################################################################
-
-IMAGE_BASENAME ?= "${PN}"
-IMAGE_VERSION_SUFFIX ?= "-${DATETIME}"
-IMAGE_VERSION_SUFFIX[vardepsexclude] += "DATETIME SOURCE_DATE_EPOCH"
-IMAGE_NAME ?= "${IMAGE_BASENAME}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
-IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}-${MACHINE}"
-
-# IMAGE_NAME is the base name for everything produced when building images.
-# The actual image that contains the rootfs has an additional suffix (.rootfs
-# by default) followed by additional suffices which describe the format (.ext4,
-# .ext4.xz, etc.).
-IMAGE_NAME_SUFFIX ??= ".rootfs"
-
-python () {
- if bb.data.inherits_class('deploy', d) and d.getVar("IMAGE_VERSION_SUFFIX") == "-${DATETIME}":
- import datetime
- d.setVar("IMAGE_VERSION_SUFFIX", "-" + datetime.datetime.fromtimestamp(int(d.getVar("SOURCE_DATE_EPOCH")), datetime.timezone.utc).strftime('%Y%m%d%H%M%S'))
- d.setVarFlag("IMAGE_VERSION_SUFFIX", "vardepvalue", "")
-}
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass
index 94c585d4cd..b83ce650ad 100644
--- a/meta/classes/image-buildinfo.bbclass
+++ b/meta/classes/image-buildinfo.bbclass
@@ -1,10 +1,10 @@
#
-# Writes build information to target filesystem on /etc/build
+# Writes build information to target filesystem on /etc/buildinfo
#
# Copyright (C) 2014 Intel Corporation
# Author: Alejandro Enedino Hernandez Samaniego <alejandro.hernandez@intel.com>
#
-# Licensed under the MIT license, see COPYING.MIT for details
+# SPDX-License-Identifier: MIT
#
# Usage: add INHERIT += "image-buildinfo" to your conf file
#
@@ -13,7 +13,8 @@
IMAGE_BUILDINFO_VARS ?= "DISTRO DISTRO_VERSION"
# Desired location of the output file in the image.
-IMAGE_BUILDINFO_FILE ??= "${sysconfdir}/build"
+IMAGE_BUILDINFO_FILE ??= "${sysconfdir}/buildinfo"
+SDK_BUILDINFO_FILE ??= "/buildinfo"
# From buildhistory.bbclass
def image_buildinfo_outputvars(vars, d):
@@ -26,30 +27,10 @@ def image_buildinfo_outputvars(vars, d):
ret += "%s = %s\n" % (var, value)
return ret.rstrip('\n')
-# Gets git branch's status (clean or dirty)
-def get_layer_git_status(path):
- import subprocess
- try:
- subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
- git diff --quiet --no-ext-diff
- git diff --quiet --no-ext-diff --cached""" % path,
- shell=True,
- stderr=subprocess.STDOUT)
- return ""
- except subprocess.CalledProcessError as ex:
- # Silently treat errors as "modified", without checking for the
- # (expected) return code 1 in a modified git repo. For example, we get
- # output and a 129 return code when a layer isn't a git repo at all.
- return "-- modified"
-
# Returns layer revisions along with their respective status
def get_layer_revs(d):
- layers = (d.getVar("BBLAYERS") or "").split()
- medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \
- base_get_metadata_git_branch(i, None).strip(), \
- base_get_metadata_git_revision(i, None), \
- get_layer_git_status(i)) \
- for i in layers]
+ revisions = oe.buildcfg.get_layer_revisions(d)
+ medadata_revs = ["%-17s = %s:%s%s" % (r[1], r[2], r[3], r[4]) for r in revisions]
return '\n'.join(medadata_revs)
def buildinfo_target(d):
@@ -60,11 +41,12 @@ def buildinfo_target(d):
vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "")
return image_buildinfo_outputvars(vars, d)
-# Write build information to target filesystem
-python buildinfo () {
+python buildinfo() {
if not d.getVar('IMAGE_BUILDINFO_FILE'):
return
- with open(d.expand('${IMAGE_ROOTFS}${IMAGE_BUILDINFO_FILE}'), 'w') as build:
+ destfile = d.expand('${BUILDINFODEST}${IMAGE_BUILDINFO_FILE}')
+ bb.utils.mkdirhier(os.path.dirname(destfile))
+ with open(destfile, 'w') as build:
build.writelines((
'''-----------------------
Build Configuration: |
@@ -82,4 +64,18 @@ Layer Revisions: |
))
}
-IMAGE_PREPROCESS_COMMAND += "buildinfo;"
+# Write build information to target filesystem
+python buildinfo_image () {
+ d.setVar("BUILDINFODEST", "${IMAGE_ROOTFS}")
+ bb.build.exec_func("buildinfo", d)
+}
+
+python buildinfo_sdk () {
+ d.setVar("BUILDINFODEST", "${SDK_OUTPUT}/${SDKPATH}")
+ d.setVar("IMAGE_BUILDINFO_FILE", d.getVar("SDK_BUILDINFO_FILE"))
+ bb.build.exec_func("buildinfo", d)
+}
+
+IMAGE_PREPROCESS_COMMAND += "buildinfo_image"
+POPULATE_SDK_PRE_TARGET_COMMAND += "buildinfo_sdk"
+
diff --git a/meta/classes/image-combined-dbg.bbclass b/meta/classes/image-combined-dbg.bbclass
deleted file mode 100644
index e5dc61f857..0000000000
--- a/meta/classes/image-combined-dbg.bbclass
+++ /dev/null
@@ -1,9 +0,0 @@
-IMAGE_PREPROCESS_COMMAND:append = " combine_dbg_image; "
-
-combine_dbg_image () {
- if [ "${IMAGE_GEN_DEBUGFS}" = "1" -a -e ${IMAGE_ROOTFS}-dbg ]; then
- # copy target files into -dbg rootfs, so it can be used for
- # debug purposes directly
- tar -C ${IMAGE_ROOTFS} -cf - . | tar -C ${IMAGE_ROOTFS}-dbg -xf -
- fi
-}
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
deleted file mode 100644
index 0ffea91195..0000000000
--- a/meta/classes/image_types.bbclass
+++ /dev/null
@@ -1,349 +0,0 @@
-# The default aligment of the size of the rootfs is set to 1KiB. In case
-# you're using the SD card emulation of a QEMU system simulator you may
-# set this value to 2048 (2MiB alignment).
-IMAGE_ROOTFS_ALIGNMENT ?= "1"
-
-def imagetypes_getdepends(d):
- def adddep(depstr, deps):
- for d in (depstr or "").split():
- # Add task dependency if not already present
- if ":" not in d:
- d += ":do_populate_sysroot"
- deps.add(d)
-
- # Take a type in the form of foo.bar.car and split it into the items
- # needed for the image deps "foo", and the conversion deps ["bar", "car"]
- def split_types(typestring):
- types = typestring.split(".")
- return types[0], types[1:]
-
- fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split())
- fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split())
-
- deprecated = set()
- deps = set()
- for typestring in fstypes:
- basetype, resttypes = split_types(typestring)
-
- var = "IMAGE_DEPENDS_%s" % basetype
- if d.getVar(var) is not None:
- deprecated.add(var)
-
- for typedepends in (d.getVar("IMAGE_TYPEDEP:%s" % basetype) or "").split():
- base, rest = split_types(typedepends)
- resttypes += rest
-
- var = "IMAGE_DEPENDS_%s" % base
- if d.getVar(var) is not None:
- deprecated.add(var)
-
- for ctype in resttypes:
- adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps)
- adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps)
-
- if deprecated:
- bb.fatal('Deprecated variable(s) found: "%s". '
- 'Use do_image_<type>[depends] += "<recipe>:<task>" instead' % ', '.join(deprecated))
-
- # Sort the set so that ordering is consistant
- return " ".join(sorted(deps))
-
-XZ_COMPRESSION_LEVEL ?= "-9"
-XZ_INTEGRITY_CHECK ?= "crc32"
-
-ZIP_COMPRESSION_LEVEL ?= "-9"
-
-ZSTD_COMPRESSION_LEVEL ?= "-3"
-
-JFFS2_SUM_EXTRA_ARGS ?= ""
-IMAGE_CMD:jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime --output=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.jffs2 ${EXTRA_IMAGECMD}"
-
-IMAGE_CMD:cramfs = "mkfs.cramfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cramfs ${EXTRA_IMAGECMD}"
-
-oe_mkext234fs () {
- fstype=$1
- extra_imagecmd=""
-
- if [ $# -gt 1 ]; then
- shift
- extra_imagecmd=$@
- fi
-
- # If generating an empty image the size of the sparse block should be large
- # enough to allocate an ext4 filesystem using 4096 bytes per inode, this is
- # about 60K, so dd needs a minimum count of 60, with bs=1024 (bytes per IO)
- eval local COUNT=\"0\"
- eval local MIN_COUNT=\"60\"
- if [ $ROOTFS_SIZE -lt $MIN_COUNT ]; then
- eval COUNT=\"$MIN_COUNT\"
- fi
- # Create a sparse image block
- bbdebug 1 Executing "dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype seek=$ROOTFS_SIZE count=$COUNT bs=1024"
- dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype seek=$ROOTFS_SIZE count=$COUNT bs=1024
- bbdebug 1 "Actual Rootfs size: `du -s ${IMAGE_ROOTFS}`"
- bbdebug 1 "Actual Partition size: `stat -c '%s' ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype`"
- bbdebug 1 Executing "mkfs.$fstype -F $extra_imagecmd ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype -d ${IMAGE_ROOTFS}"
- mkfs.$fstype -F $extra_imagecmd ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype -d ${IMAGE_ROOTFS}
- # Error codes 0-3 indicate successfull operation of fsck (no errors or errors corrected)
- fsck.$fstype -pvfD ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype || [ $? -le 3 ]
-}
-
-IMAGE_CMD:ext2 = "oe_mkext234fs ext2 ${EXTRA_IMAGECMD}"
-IMAGE_CMD:ext3 = "oe_mkext234fs ext3 ${EXTRA_IMAGECMD}"
-IMAGE_CMD:ext4 = "oe_mkext234fs ext4 ${EXTRA_IMAGECMD}"
-
-MIN_BTRFS_SIZE ?= "16384"
-IMAGE_CMD:btrfs () {
- size=${ROOTFS_SIZE}
- if [ ${size} -lt ${MIN_BTRFS_SIZE} ] ; then
- size=${MIN_BTRFS_SIZE}
- bbwarn "Rootfs size is too small for BTRFS. Filesystem will be extended to ${size}K"
- fi
- dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs seek=${size} count=0 bs=1024
- mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs
-}
-
-IMAGE_CMD:squashfs = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs ${EXTRA_IMAGECMD} -noappend"
-IMAGE_CMD:squashfs-xz = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-xz ${EXTRA_IMAGECMD} -noappend -comp xz"
-IMAGE_CMD:squashfs-lzo = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lzo ${EXTRA_IMAGECMD} -noappend -comp lzo"
-IMAGE_CMD:squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lz4 ${EXTRA_IMAGECMD} -noappend -comp lz4"
-IMAGE_CMD:squashfs-zst = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-zst ${EXTRA_IMAGECMD} -noappend -comp zstd"
-
-IMAGE_CMD:erofs = "mkfs.erofs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs ${IMAGE_ROOTFS}"
-IMAGE_CMD:erofs-lz4 = "mkfs.erofs -zlz4 ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4 ${IMAGE_ROOTFS}"
-IMAGE_CMD:erofs-lz4hc = "mkfs.erofs -zlz4hc ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4hc ${IMAGE_ROOTFS}"
-
-
-IMAGE_CMD_TAR ?= "tar"
-# ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs
-IMAGE_CMD:tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
-
-do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append"
-IMAGE_CMD:cpio () {
- (cd ${IMAGE_ROOTFS} && find . | sort | cpio --reproducible -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio)
- # We only need the /init symlink if we're building the real
- # image. The -dbg image doesn't need it! By being clever
- # about this we also avoid 'touch' below failing, as it
- # might be trying to touch /sbin/init on the host since both
- # the normal and the -dbg image share the same WORKDIR
- if [ "${IMAGE_BUILDING_DEBUGFS}" != "true" ]; then
- if [ ! -L ${IMAGE_ROOTFS}/init ] && [ ! -e ${IMAGE_ROOTFS}/init ]; then
- if [ -L ${IMAGE_ROOTFS}/sbin/init ] || [ -e ${IMAGE_ROOTFS}/sbin/init ]; then
- ln -sf /sbin/init ${WORKDIR}/cpio_append/init
- else
- touch ${WORKDIR}/cpio_append/init
- fi
- (cd ${WORKDIR}/cpio_append && echo ./init | cpio -oA -H newc -F ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio)
- fi
- fi
-}
-
-UBI_VOLNAME ?= "${MACHINE}-rootfs"
-UBI_VOLTYPE ?= "dynamic"
-UBI_IMGTYPE ?= "ubifs"
-
-write_ubi_config() {
- if [ -z "$1" ]; then
- local vname=""
- else
- local vname="_$1"
- fi
-
- cat <<EOF > ubinize${vname}-${IMAGE_NAME}.cfg
-[ubifs]
-mode=ubi
-image=${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.${UBI_IMGTYPE}
-vol_id=0
-vol_type=${UBI_VOLTYPE}
-vol_name=${UBI_VOLNAME}
-vol_flags=autoresize
-EOF
-}
-
-multiubi_mkfs() {
- local mkubifs_args="$1"
- local ubinize_args="$2"
-
- # Added prompt error message for ubi and ubifs image creation.
- if [ -z "$mkubifs_args" ] || [ -z "$ubinize_args" ]; then
- bbfatal "MKUBIFS_ARGS and UBINIZE_ARGS have to be set, see http://www.linux-mtd.infradead.org/faq/ubifs.html for details"
- fi
-
- write_ubi_config "$3"
-
- if [ -n "$vname" ]; then
- mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubifs ${mkubifs_args}
- fi
- ubinize -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubi ${ubinize_args} ubinize${vname}-${IMAGE_NAME}.cfg
-
- # Cleanup cfg file
- mv ubinize${vname}-${IMAGE_NAME}.cfg ${IMGDEPLOYDIR}/
-
- # Create own symlinks for 'named' volumes
- if [ -n "$vname" ]; then
- cd ${IMGDEPLOYDIR}
- if [ -e ${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubifs ]; then
- ln -sf ${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubifs \
- ${IMAGE_LINK_NAME}${vname}.ubifs
- fi
- if [ -e ${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubi ]; then
- ln -sf ${IMAGE_NAME}${vname}${IMAGE_NAME_SUFFIX}.ubi \
- ${IMAGE_LINK_NAME}${vname}.ubi
- fi
- cd -
- fi
-}
-
-IMAGE_CMD:multiubi () {
- # Split MKUBIFS_ARGS_<name> and UBINIZE_ARGS_<name>
- for name in ${MULTIUBI_BUILD}; do
- eval local mkubifs_args=\"\$MKUBIFS_ARGS_${name}\"
- eval local ubinize_args=\"\$UBINIZE_ARGS_${name}\"
-
- multiubi_mkfs "${mkubifs_args}" "${ubinize_args}" "${name}"
- done
-}
-
-IMAGE_CMD:ubi () {
- multiubi_mkfs "${MKUBIFS_ARGS}" "${UBINIZE_ARGS}"
-}
-IMAGE_TYPEDEP:ubi = "${UBI_IMGTYPE}"
-
-IMAGE_CMD:ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.ubifs ${MKUBIFS_ARGS}"
-
-MIN_F2FS_SIZE ?= "524288"
-IMAGE_CMD:f2fs () {
- # We need to add additional smarts here form devices smaller than 1.5G
- # Need to scale appropriately between 40M -> 1.5G as the "overprovision
- # ratio" goes down as the device gets bigger (70% -> 4.5%), below about
- # 500M the standard IMAGE_OVERHEAD_FACTOR does not work, so add additional
- # space here when under 500M
- size=${ROOTFS_SIZE}
- if [ ${size} -lt ${MIN_F2FS_SIZE} ] ; then
- size=${MIN_F2FS_SIZE}
- bbwarn "Rootfs size is too small for F2FS. Filesystem will be extended to ${size}K"
- fi
- dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.f2fs seek=${size} count=0 bs=1024
- mkfs.f2fs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.f2fs
- sload.f2fs -f ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.f2fs
-}
-
-EXTRA_IMAGECMD = ""
-
-inherit siteinfo kernel-arch image-artifact-names
-
-JFFS2_ENDIANNESS ?= "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'le', '-l', '-b', d)}"
-JFFS2_ERASEBLOCK ?= "0x40000"
-EXTRA_IMAGECMD:jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLOCK} --no-cleanmarkers"
-
-# Change these if you want default mkfs behavior (i.e. create minimal inode number)
-EXTRA_IMAGECMD:ext2 ?= "-i 4096"
-EXTRA_IMAGECMD:ext3 ?= "-i 4096"
-EXTRA_IMAGECMD:ext4 ?= "-i 4096"
-EXTRA_IMAGECMD:btrfs ?= "-n 4096 --shrink"
-EXTRA_IMAGECMD:f2fs ?= ""
-
-do_image_cpio[depends] += "cpio-native:do_populate_sysroot"
-do_image_jffs2[depends] += "mtd-utils-native:do_populate_sysroot"
-do_image_cramfs[depends] += "util-linux-native:do_populate_sysroot"
-do_image_ext2[depends] += "e2fsprogs-native:do_populate_sysroot"
-do_image_ext3[depends] += "e2fsprogs-native:do_populate_sysroot"
-do_image_ext4[depends] += "e2fsprogs-native:do_populate_sysroot"
-do_image_btrfs[depends] += "btrfs-tools-native:do_populate_sysroot"
-do_image_squashfs[depends] += "squashfs-tools-native:do_populate_sysroot"
-do_image_squashfs_xz[depends] += "squashfs-tools-native:do_populate_sysroot"
-do_image_squashfs_lzo[depends] += "squashfs-tools-native:do_populate_sysroot"
-do_image_squashfs_lz4[depends] += "squashfs-tools-native:do_populate_sysroot"
-do_image_squashfs_zst[depends] += "squashfs-tools-native:do_populate_sysroot"
-do_image_ubi[depends] += "mtd-utils-native:do_populate_sysroot"
-do_image_ubifs[depends] += "mtd-utils-native:do_populate_sysroot"
-do_image_multiubi[depends] += "mtd-utils-native:do_populate_sysroot"
-do_image_f2fs[depends] += "f2fs-tools-native:do_populate_sysroot"
-do_image_erofs[depends] += "erofs-utils-native:do_populate_sysroot"
-do_image_erofs_lz4[depends] += "erofs-utils-native:do_populate_sysroot"
-do_image_erofs_lz4hc[depends] += "erofs-utils-native:do_populate_sysroot"
-
-# This variable is available to request which values are suitable for IMAGE_FSTYPES
-IMAGE_TYPES = " \
- jffs2 jffs2.sum \
- cramfs \
- ext2 ext2.gz ext2.bz2 ext2.lzma \
- ext3 ext3.gz \
- ext4 ext4.gz \
- btrfs \
- squashfs squashfs-xz squashfs-lzo squashfs-lz4 squashfs-zst \
- ubi ubifs multiubi \
- tar tar.gz tar.bz2 tar.xz tar.lz4 tar.zst \
- cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 cpio.zst \
- wic wic.gz wic.bz2 wic.lzma wic.zst \
- container \
- f2fs \
- erofs erofs-lz4 erofs-lz4hc \
-"
-# These image types are x86 specific as they need syslinux
-IMAGE_TYPES:append:x86 = " hddimg iso"
-IMAGE_TYPES:append:x86-64 = " hddimg iso"
-
-# Compression is a special case of conversion. The old variable
-# names are still supported for backward-compatibility. When defining
-# new compression or conversion commands, use CONVERSIONTYPES and
-# CONVERSION_CMD/DEPENDS.
-COMPRESSIONTYPES ?= ""
-
-CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip zst sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vhd vhdx vdi qcow2 base64 gzsync zsync ${COMPRESSIONTYPES}"
-CONVERSION_CMD:lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD:gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz"
-CONVERSION_CMD:bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD:xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz"
-CONVERSION_CMD:lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4"
-CONVERSION_CMD:lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD:zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD:zst = "zstd -f -k -T0 -c ${ZSTD_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zst"
-CONVERSION_CMD:sum = "sumtool -i ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}"
-CONVERSION_CMD:md5sum = "md5sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.md5sum"
-CONVERSION_CMD:sha1sum = "sha1sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha1sum"
-CONVERSION_CMD:sha224sum = "sha224sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha224sum"
-CONVERSION_CMD:sha256sum = "sha256sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha256sum"
-CONVERSION_CMD:sha384sum = "sha384sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha384sum"
-CONVERSION_CMD:sha512sum = "sha512sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha512sum"
-CONVERSION_CMD:bmap = "bmaptool create ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.bmap"
-CONVERSION_CMD:u-boot = "mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C none -n ${IMAGE_NAME} -d ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.u-boot"
-CONVERSION_CMD:vmdk = "qemu-img convert -O vmdk ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vmdk"
-CONVERSION_CMD:vhdx = "qemu-img convert -O vhdx -o subformat=dynamic ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhdx"
-CONVERSION_CMD:vhd = "qemu-img convert -O vpc -o subformat=fixed ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhd"
-CONVERSION_CMD:vdi = "qemu-img convert -O vdi ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vdi"
-CONVERSION_CMD:qcow2 = "qemu-img convert -O qcow2 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.qcow2"
-CONVERSION_CMD:base64 = "base64 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.base64"
-CONVERSION_CMD:zsync = "zsyncmake_curl ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD:gzsync = "zsyncmake_curl -z ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_DEPENDS_lzma = "xz-native"
-CONVERSION_DEPENDS_gz = "pigz-native"
-CONVERSION_DEPENDS_bz2 = "pbzip2-native"
-CONVERSION_DEPENDS_xz = "xz-native"
-CONVERSION_DEPENDS_lz4 = "lz4-native"
-CONVERSION_DEPENDS_lzo = "lzop-native"
-CONVERSION_DEPENDS_zip = "zip-native"
-CONVERSION_DEPENDS_zst = "zstd-native"
-CONVERSION_DEPENDS_sum = "mtd-utils-native"
-CONVERSION_DEPENDS_bmap = "bmap-tools-native"
-CONVERSION_DEPENDS_u-boot = "u-boot-tools-native"
-CONVERSION_DEPENDS_vmdk = "qemu-system-native"
-CONVERSION_DEPENDS_vdi = "qemu-system-native"
-CONVERSION_DEPENDS_qcow2 = "qemu-system-native"
-CONVERSION_DEPENDS_base64 = "coreutils-native"
-CONVERSION_DEPENDS_vhdx = "qemu-system-native"
-CONVERSION_DEPENDS_vhd = "qemu-system-native"
-CONVERSION_DEPENDS_zsync = "zsync-curl-native"
-CONVERSION_DEPENDS_gzsync = "zsync-curl-native"
-
-RUNNABLE_IMAGE_TYPES ?= "ext2 ext3 ext4"
-RUNNABLE_MACHINE_PATTERNS ?= "qemu"
-
-DEPLOYABLE_IMAGE_TYPES ?= "hddimg iso"
-
-# The IMAGE_TYPES_MASKED variable will be used to mask out from the IMAGE_FSTYPES,
-# images that will not be built at do_rootfs time: vmdk, vhd, vhdx, vdi, qcow2, hddimg, iso, etc.
-IMAGE_TYPES_MASKED ?= ""
-
-# bmap requires python3 to be in the PATH
-EXTRANATIVEPATH += "${@'python3-native' if d.getVar('IMAGE_FSTYPES').find('.bmap') else ''}"
diff --git a/meta/classes/kernel-devicetree.bbclass b/meta/classes/kernel-devicetree.bbclass
deleted file mode 100644
index b4338da1b1..0000000000
--- a/meta/classes/kernel-devicetree.bbclass
+++ /dev/null
@@ -1,113 +0,0 @@
-# Support for device tree generation
-python () {
- if not bb.data.inherits_class('nopackages', d):
- d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-devicetree")
- if d.getVar('KERNEL_DEVICETREE_BUNDLE') == '1':
- d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-image-zimage-bundle")
-}
-
-FILES:${KERNEL_PACKAGE_NAME}-devicetree = "/${KERNEL_IMAGEDEST}/*.dtb /${KERNEL_IMAGEDEST}/*.dtbo"
-FILES:${KERNEL_PACKAGE_NAME}-image-zimage-bundle = "/${KERNEL_IMAGEDEST}/zImage-*.dtb.bin"
-
-# Generate kernel+devicetree bundle
-KERNEL_DEVICETREE_BUNDLE ?= "0"
-
-# dtc flags passed via DTC_FLAGS env variable
-KERNEL_DTC_FLAGS ?= ""
-
-normalize_dtb () {
- dtb="$1"
- if echo $dtb | grep -q '/dts/'; then
- bbwarn "$dtb contains the full path to the the dts file, but only the dtb name should be used."
- dtb=`basename $dtb | sed 's,\.dts$,.dtb,g'`
- fi
- echo "$dtb"
-}
-
-get_real_dtb_path_in_kernel () {
- dtb="$1"
- dtb_path="${B}/arch/${ARCH}/boot/dts/$dtb"
- if [ ! -e "$dtb_path" ]; then
- dtb_path="${B}/arch/${ARCH}/boot/$dtb"
- fi
- echo "$dtb_path"
-}
-
-do_configure:append() {
- if [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then
- if echo ${KERNEL_IMAGETYPE_FOR_MAKE} | grep -q 'zImage'; then
- case "${ARCH}" in
- "arm")
- config="${B}/.config"
- if ! grep -q 'CONFIG_ARM_APPENDED_DTB=y' $config; then
- bbwarn 'CONFIG_ARM_APPENDED_DTB is NOT enabled in the kernel. Enabling it to allow the kernel to boot with the Device Tree appended!'
- sed -i "/CONFIG_ARM_APPENDED_DTB[ =]/d" $config
- echo "CONFIG_ARM_APPENDED_DTB=y" >> $config
- echo "# CONFIG_ARM_ATAG_DTB_COMPAT is not set" >> $config
- fi
- ;;
- *)
- bberror "KERNEL_DEVICETREE_BUNDLE is not supported for ${ARCH}. Currently it is only supported for 'ARM'."
- esac
- else
- bberror 'The KERNEL_DEVICETREE_BUNDLE requires the KERNEL_IMAGETYPE to contain zImage.'
- fi
- fi
-}
-
-do_compile:append() {
- if [ -n "${KERNEL_DTC_FLAGS}" ]; then
- export DTC_FLAGS="${KERNEL_DTC_FLAGS}"
- fi
-
- for dtbf in ${KERNEL_DEVICETREE}; do
- dtb=`normalize_dtb "$dtbf"`
- oe_runmake $dtb CC="${KERNEL_CC} $cc_extra " LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS}
- done
-}
-
-do_install:append() {
- for dtbf in ${KERNEL_DEVICETREE}; do
- dtb=`normalize_dtb "$dtbf"`
- dtb_ext=${dtb##*.}
- dtb_base_name=`basename $dtb .$dtb_ext`
- dtb_path=`get_real_dtb_path_in_kernel "$dtb"`
- install -m 0644 $dtb_path ${D}/${KERNEL_IMAGEDEST}/$dtb_base_name.$dtb_ext
- done
-}
-
-do_deploy:append() {
- for dtbf in ${KERNEL_DEVICETREE}; do
- dtb=`normalize_dtb "$dtbf"`
- dtb_ext=${dtb##*.}
- dtb_base_name=`basename $dtb .$dtb_ext`
- install -d $deployDir
- install -m 0644 ${D}/${KERNEL_IMAGEDEST}/$dtb_base_name.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext
- if [ "${KERNEL_IMAGETYPE_SYMLINK}" = "1" ] ; then
- ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name.$dtb_ext
- fi
- if [ -n "${KERNEL_DTB_LINK_NAME}" ] ; then
- ln -sf $dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext $deployDir/$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext
- fi
- for type in ${KERNEL_IMAGETYPE_FOR_MAKE}; do
- if [ "$type" = "zImage" ] && [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then
- cat ${D}/${KERNEL_IMAGEDEST}/$type \
- $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \
- > $deployDir/$type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
- if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
- ln -sf $type-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT} \
- $deployDir/$type-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
- fi
- if [ -e "${KERNEL_OUTPUT_DIR}/${type}.initramfs" ]; then
- cat ${KERNEL_OUTPUT_DIR}/${type}.initramfs \
- $deployDir/$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext \
- > $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
- if [ -n "${KERNEL_DTB_LINK_NAME}" ]; then
- ln -sf ${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT} \
- $deployDir/${type}-${INITRAMFS_NAME}-$dtb_base_name-${KERNEL_DTB_LINK_NAME}.$dtb_ext${KERNEL_DTB_BIN_EXT}
- fi
- fi
- fi
- done
- done
-}
diff --git a/meta/classes/kernel-module-split.bbclass b/meta/classes/kernel-module-split.bbclass
deleted file mode 100644
index a29c294810..0000000000
--- a/meta/classes/kernel-module-split.bbclass
+++ /dev/null
@@ -1,191 +0,0 @@
-pkg_postinst:modules () {
-if [ -z "$D" ]; then
- depmod -a ${KERNEL_VERSION}
-else
- # image.bbclass will call depmodwrapper after everything is installed,
- # no need to do it here as well
- :
-fi
-}
-
-pkg_postrm:modules () {
-if [ -z "$D" ]; then
- depmod -a ${KERNEL_VERSION}
-else
- depmodwrapper -a -b $D ${KERNEL_VERSION}
-fi
-}
-
-autoload_postinst_fragment() {
-if [ x"$D" = "x" ]; then
- modprobe %s || true
-fi
-}
-
-PACKAGE_WRITE_DEPS += "kmod-native depmodwrapper-cross"
-
-do_install:append() {
- install -d ${D}${sysconfdir}/modules-load.d/ ${D}${sysconfdir}/modprobe.d/
-}
-
-KERNEL_SPLIT_MODULES ?= "1"
-PACKAGESPLITFUNCS:prepend = "split_kernel_module_packages "
-
-KERNEL_MODULES_META_PACKAGE ?= "${@ d.getVar("KERNEL_PACKAGE_NAME") or "kernel" }-modules"
-
-KERNEL_MODULE_PACKAGE_PREFIX ?= ""
-KERNEL_MODULE_PACKAGE_SUFFIX ?= "-${KERNEL_VERSION}"
-KERNEL_MODULE_PROVIDE_VIRTUAL ?= "1"
-
-python split_kernel_module_packages () {
- import re
-
- modinfoexp = re.compile("([^=]+)=(.*)")
-
- def extract_modinfo(file):
- import tempfile, subprocess
- tempfile.tempdir = d.getVar("WORKDIR")
- compressed = re.match( r'.*\.(gz|xz|zst)$', file)
- tf = tempfile.mkstemp()
- tmpfile = tf[1]
- if compressed:
- tmpkofile = tmpfile + ".ko"
- if compressed.group(1) == 'gz':
- cmd = "gunzip -dc %s > %s" % (file, tmpkofile)
- subprocess.check_call(cmd, shell=True)
- elif compressed.group(1) == 'xz':
- cmd = "xz -dc %s > %s" % (file, tmpkofile)
- subprocess.check_call(cmd, shell=True)
- elif compressed.group(1) == 'zst':
- cmd = "zstd -dc %s > %s" % (file, tmpkofile)
- subprocess.check_call(cmd, shell=True)
- else:
- msg = "Cannot decompress '%s'" % file
- raise msg
- cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", tmpkofile, tmpfile)
- else:
- cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile)
- subprocess.check_call(cmd, shell=True)
- # errors='replace': Some old kernel versions contain invalid utf-8 characters in mod descriptions (like 0xf6, 'ö')
- f = open(tmpfile, errors='replace')
- l = f.read().split("\000")
- f.close()
- os.close(tf[0])
- os.unlink(tmpfile)
- if compressed:
- os.unlink(tmpkofile)
- vals = {}
- for i in l:
- m = modinfoexp.match(i)
- if not m:
- continue
- vals[m.group(1)] = m.group(2)
- return vals
-
- def frob_metadata(file, pkg, pattern, format, basename):
- vals = extract_modinfo(file)
-
- dvar = d.getVar('PKGD')
-
- # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
- # appropriate modprobe commands to the postinst
- autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split()
- autoload = d.getVar('module_autoload_%s' % basename)
- if autoload and autoload == basename:
- bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename)
- if autoload and basename not in autoloadlist:
- bb.warn("module_autoload_%s is defined but '%s' isn't included in KERNEL_MODULE_AUTOLOAD, please add it there" % (basename, basename))
- if basename in autoloadlist:
- name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
- f = open(name, 'w')
- if autoload:
- for m in autoload.split():
- f.write('%s\n' % m)
- else:
- f.write('%s\n' % basename)
- f.close()
- postinst = d.getVar('pkg_postinst:%s' % pkg)
- if not postinst:
- bb.fatal("pkg_postinst:%s not defined" % pkg)
- postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename)
- d.setVar('pkg_postinst:%s' % pkg, postinst)
-
- # Write out any modconf fragment
- modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()
- modconf = d.getVar('module_conf_%s' % basename)
- if modconf and basename in modconflist:
- name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
- f = open(name, 'w')
- f.write("%s\n" % modconf)
- f.close()
- elif modconf:
- bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename))
-
- files = d.getVar('FILES:%s' % pkg)
- files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
- d.setVar('FILES:%s' % pkg, files)
-
- conffiles = d.getVar('CONFFILES:%s' % pkg)
- conffiles = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (conffiles, basename, basename)
- d.setVar('CONFFILES:%s' % pkg, conffiles)
-
- if "description" in vals:
- old_desc = d.getVar('DESCRIPTION:' + pkg) or ""
- d.setVar('DESCRIPTION:' + pkg, old_desc + "; " + vals["description"])
-
- rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
- modinfo_deps = []
- if "depends" in vals and vals["depends"] != "":
- for dep in vals["depends"].split(","):
- on = legitimize_package_name(dep)
- dependency_pkg = format % on
- modinfo_deps.append(dependency_pkg)
- for dep in modinfo_deps:
- if not dep in rdepends:
- rdepends[dep] = []
- d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
-
- # Avoid automatic -dev recommendations for modules ending with -dev.
- d.setVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs', 1)
-
- # Provide virtual package without postfix
- providevirt = d.getVar('KERNEL_MODULE_PROVIDE_VIRTUAL')
- if providevirt == "1":
- postfix = format.split('%s')[1]
- d.setVar('RPROVIDES:' + pkg, pkg.replace(postfix, ''))
-
- kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel"
- kernel_version = d.getVar("KERNEL_VERSION")
-
- metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
- splitmods = d.getVar('KERNEL_SPLIT_MODULES')
- postinst = d.getVar('pkg_postinst:modules')
- postrm = d.getVar('pkg_postrm:modules')
-
- if splitmods != '1':
- etcdir = d.getVar('sysconfdir')
- d.appendVar('FILES:' + metapkg, '%s/modules-load.d/ %s/modprobe.d/ %s/modules/' % (etcdir, etcdir, d.getVar("nonarch_base_libdir")))
- d.appendVar('pkg_postinst:%s' % metapkg, postinst)
- d.prependVar('pkg_postrm:%s' % metapkg, postrm);
- return
-
- module_regex = r'^(.*)\.k?o(?:\.(gz|xz|zst))?$'
-
- module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
- module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX')
- module_pattern = module_pattern_prefix + kernel_package_name + '-module-%s' + module_pattern_suffix
-
- modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%s-%s' % (kernel_package_name, kernel_version))
- if modules:
- d.appendVar('RDEPENDS:' + metapkg, ' '+' '.join(modules))
-
- # If modules-load.d and modprobe.d are empty at this point, remove them to
- # avoid warnings. removedirs only raises an OSError if an empty
- # directory cannot be removed.
- dvar = d.getVar('PKGD')
- for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
- if len(os.listdir(dir)) == 0:
- os.rmdir(dir)
-}
-
-do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}'
diff --git a/meta/classes/kernel-uboot.bbclass b/meta/classes/kernel-uboot.bbclass
deleted file mode 100644
index 2daa068298..0000000000
--- a/meta/classes/kernel-uboot.bbclass
+++ /dev/null
@@ -1,34 +0,0 @@
-# fitImage kernel compression algorithm
-FIT_KERNEL_COMP_ALG ?= "gzip"
-FIT_KERNEL_COMP_ALG_EXTENSION ?= ".gz"
-
-uboot_prep_kimage() {
- if [ -e arch/${ARCH}/boot/compressed/vmlinux ]; then
- vmlinux_path="arch/${ARCH}/boot/compressed/vmlinux"
- linux_suffix=""
- linux_comp="none"
- elif [ -e arch/${ARCH}/boot/vmlinuz.bin ]; then
- rm -f linux.bin
- cp -l arch/${ARCH}/boot/vmlinuz.bin linux.bin
- vmlinux_path=""
- linux_suffix=""
- linux_comp="none"
- else
- vmlinux_path="vmlinux"
- linux_suffix="${FIT_KERNEL_COMP_ALG_EXTENSION}"
- linux_comp="${FIT_KERNEL_COMP_ALG}"
- fi
-
- [ -n "${vmlinux_path}" ] && ${OBJCOPY} -O binary -R .note -R .comment -S "${vmlinux_path}" linux.bin
-
- if [ "${linux_comp}" != "none" ] ; then
- if [ "${linux_comp}" = "gzip" ] ; then
- gzip -9 linux.bin
- elif [ "${linux_comp}" = "lzo" ] ; then
- lzop -9 linux.bin
- fi
- mv -f "linux.bin${linux_suffix}" linux.bin
- fi
-
- echo "${linux_comp}"
-}
diff --git a/meta/classes/kernelsrc.bbclass b/meta/classes/kernelsrc.bbclass
deleted file mode 100644
index a951ba3325..0000000000
--- a/meta/classes/kernelsrc.bbclass
+++ /dev/null
@@ -1,10 +0,0 @@
-S = "${STAGING_KERNEL_DIR}"
-deltask do_fetch
-deltask do_unpack
-do_patch[depends] += "virtual/kernel:do_shared_workdir"
-do_patch[noexec] = "1"
-do_package[depends] += "virtual/kernel:do_populate_sysroot"
-KERNEL_VERSION = "${@get_kernelversion_file("${STAGING_KERNEL_BUILDDIR}")}"
-
-inherit linux-kernel-base
-
diff --git a/meta/classes/lib_package.bbclass b/meta/classes/lib_package.bbclass
deleted file mode 100644
index 8849f59042..0000000000
--- a/meta/classes/lib_package.bbclass
+++ /dev/null
@@ -1,7 +0,0 @@
-#
-# ${PN}-bin is defined in bitbake.conf
-#
-# We need to allow the other packages to be greedy with what they
-# want out of /usr/bin and /usr/sbin before ${PN}-bin gets greedy.
-#
-PACKAGE_BEFORE_PN = "${PN}-bin"
diff --git a/meta/classes/linux-kernel-base.bbclass b/meta/classes/linux-kernel-base.bbclass
deleted file mode 100644
index ba59222c24..0000000000
--- a/meta/classes/linux-kernel-base.bbclass
+++ /dev/null
@@ -1,41 +0,0 @@
-# parse kernel ABI version out of <linux/version.h>
-def get_kernelversion_headers(p):
- import re
-
- fn = p + '/include/linux/utsrelease.h'
- if not os.path.isfile(fn):
- # after 2.6.33-rc1
- fn = p + '/include/generated/utsrelease.h'
- if not os.path.isfile(fn):
- fn = p + '/include/linux/version.h'
-
- try:
- f = open(fn, 'r')
- except IOError:
- return None
-
- l = f.readlines()
- f.close()
- r = re.compile("#define UTS_RELEASE \"(.*)\"")
- for s in l:
- m = r.match(s)
- if m:
- return m.group(1)
- return None
-
-
-def get_kernelversion_file(p):
- fn = p + '/kernel-abiversion'
-
- try:
- with open(fn, 'r') as f:
- return f.readlines()[0].strip()
- except IOError:
- return None
-
-def linux_module_packages(s, d):
- suffix = ""
- return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
-
-# that's all
-
diff --git a/meta/classes/manpages.bbclass b/meta/classes/manpages.bbclass
deleted file mode 100644
index 5e09c77fe6..0000000000
--- a/meta/classes/manpages.bbclass
+++ /dev/null
@@ -1,45 +0,0 @@
-# Inherit this class to enable or disable building and installation of manpages
-# depending on whether 'api-documentation' is in DISTRO_FEATURES. Such building
-# tends to pull in the entire XML stack and other tools, so it's not enabled
-# by default.
-PACKAGECONFIG:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'manpages', '', d)}"
-
-inherit qemu
-
-# usually manual files are packaged to ${PN}-doc except man-pages
-MAN_PKG ?= "${PN}-doc"
-
-# only add man-db to RDEPENDS when manual files are built and installed
-RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}"
-
-pkg_postinst:${MAN_PKG}:append () {
- # only update manual page index caches when manual files are built and installed
- if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
- if test -n "$D"; then
- if ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'true', 'false', d)}; then
- sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf | ${@qemu_run_binary(d, '$D', '${bindir}/mandb')} -C - -u -q $D${mandir}
- chown -R root:root $D${mandir}
-
- mkdir -p $D${localstatedir}/cache/man
- cd $D${mandir}
- find . -name index.db | while read index; do
- mkdir -p $D${localstatedir}/cache/man/$(dirname ${index})
- mv ${index} $D${localstatedir}/cache/man/${index}
- chown man:man $D${localstatedir}/cache/man/${index}
- done
- cd -
- else
- $INTERCEPT_DIR/postinst_intercept delay_to_first_boot ${PKG} mlprefix=${MLPREFIX}
- fi
- else
- mandb -q
- fi
- fi
-}
-
-pkg_postrm:${MAN_PKG}:append () {
- # only update manual page index caches when manual files are built and installed
- if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
- mandb -q
- fi
-}
diff --git a/meta/classes/mcextend.bbclass b/meta/classes/mcextend.bbclass
index 0f8f962298..a489eeb3c7 100644
--- a/meta/classes/mcextend.bbclass
+++ b/meta/classes/mcextend.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
python mcextend_virtclass_handler () {
cls = e.data.getVar("BBEXTENDCURR")
variant = e.data.getVar("BBEXTENDVARIANT")
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index 47cb969b8d..6842119b6b 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -1,44 +1,10 @@
-def base_detect_revision(d):
- path = base_get_scmbasepath(d)
- return base_get_metadata_git_revision(path, d)
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
-def base_detect_branch(d):
- path = base_get_scmbasepath(d)
- return base_get_metadata_git_branch(path, d)
-
-def base_get_scmbasepath(d):
- return os.path.join(d.getVar('COREBASE'), 'meta')
-
-def base_get_metadata_svn_revision(path, d):
- # This only works with older subversion. For newer versions
- # this function will need to be fixed by someone interested
- revision = "<unknown>"
- try:
- with open("%s/.svn/entries" % path) as f:
- revision = f.readlines()[3].strip()
- except (IOError, IndexError):
- pass
- return revision
-
-def base_get_metadata_git_branch(path, d):
- import bb.process
-
- try:
- rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
- except bb.process.ExecutionError:
- rev = '<unknown>'
- return rev.strip()
-
-def base_get_metadata_git_revision(path, d):
- import bb.process
-
- try:
- rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
- except bb.process.ExecutionError:
- rev = '<unknown>'
- return rev.strip()
-
-METADATA_BRANCH := "${@base_detect_branch(d)}"
+METADATA_BRANCH := "${@oe.buildcfg.detect_branch(d)}"
METADATA_BRANCH[vardepvalue] = "${METADATA_BRANCH}"
-METADATA_REVISION := "${@base_detect_revision(d)}"
+METADATA_REVISION := "${@oe.buildcfg.detect_revision(d)}"
METADATA_REVISION[vardepvalue] = "${METADATA_REVISION}"
diff --git a/meta/classes/migrate_localcount.bbclass b/meta/classes/migrate_localcount.bbclass
index 810a541316..1d00c110e2 100644
--- a/meta/classes/migrate_localcount.bbclass
+++ b/meta/classes/migrate_localcount.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PRSERV_DUMPDIR ??= "${LOG_DIR}/db"
LOCALCOUNT_DUMPFILE ??= "${PRSERV_DUMPDIR}/prserv-localcount-exports.inc"
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index 5859ca8d21..b6c09969b1 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
python multilib_virtclass_handler () {
cls = e.data.getVar("BBEXTENDCURR")
variant = e.data.getVar("BBEXTENDVARIANT")
@@ -24,6 +30,9 @@ python multilib_virtclass_handler () {
if val:
e.data.setVar(name + "_MULTILIB_ORIGINAL", val)
+ # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it
+ d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}")
+
overrides = e.data.getVar("OVERRIDES", False)
pn = e.data.getVar("PN", False)
overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn)
@@ -45,6 +54,7 @@ python multilib_virtclass_handler () {
e.data.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot")
e.data.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot")
e.data.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot")
+ e.data.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant)
e.data.setVar("MLPREFIX", variant + "-")
override = ":virtclass-multilib-" + variant
e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override)
@@ -130,6 +140,7 @@ python multilib_virtclass_handler_postkeyexp () {
return
clsextend.map_depends_variable("DEPENDS")
+ clsextend.map_depends_variable("PACKAGE_WRITE_DEPS")
clsextend.map_variable("PROVIDES")
if bb.data.inherits_class('cross-canadian', d):
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index e06307d057..6095d278dd 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def preferred_ml_updates(d):
# If any of PREFERRED_PROVIDER, PREFERRED_RPROVIDER, REQUIRED_VERSION
# or PREFERRED_VERSION are set, we need to mirror these variables in
@@ -189,6 +195,7 @@ python multilib_virtclass_handler_global () {
# from a copy of the datastore
localdata = bb.data.createCopy(d)
localdata.delVar("KERNEL_VERSION")
+ localdata.delVar("KERNEL_VERSION_PKG_NAME")
variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split()
diff --git a/meta/classes/multilib_script.bbclass b/meta/classes/multilib_script.bbclass
deleted file mode 100644
index 41597341cd..0000000000
--- a/meta/classes/multilib_script.bbclass
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Recipe needs to set MULTILIB_SCRIPTS in the form <pkgname>:<scriptname>, e.g.
-# MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/file1 ${PN}:${base_bindir}/file2"
-# to indicate which script files to process from which packages.
-#
-
-inherit update-alternatives
-
-MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
-
-PACKAGE_PREPROCESS_FUNCS += "multilibscript_rename"
-
-multilibscript_rename() {
- :
-}
-
-python () {
- # Do nothing if multilib isn't being used
- if not d.getVar("MULTILIB_VARIANTS"):
- return
- # Do nothing for native/cross
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d):
- return
-
- for entry in (d.getVar("MULTILIB_SCRIPTS", False) or "").split():
- pkg, script = entry.split(":")
- epkg = d.expand(pkg)
- scriptname = os.path.basename(script)
- d.appendVar("ALTERNATIVE:" + epkg, " " + scriptname + " ")
- d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, script)
- d.setVarFlag("ALTERNATIVE_TARGET", scriptname, script + "-${MULTILIB_SUFFIX}")
- d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + script + " ${PKGD}" + script + "-${MULTILIB_SUFFIX}")
- d.appendVar("FILES:" + epkg, " " + script + "-${MULTILIB_SUFFIX}")
-}
diff --git a/meta/classes/npm.bbclass b/meta/classes/npm.bbclass
deleted file mode 100644
index dbfc2e728e..0000000000
--- a/meta/classes/npm.bbclass
+++ /dev/null
@@ -1,317 +0,0 @@
-# Copyright (C) 2020 Savoir-Faire Linux
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This bbclass builds and installs an npm package to the target. The package
-# sources files should be fetched in the calling recipe by using the SRC_URI
-# variable. The ${S} variable should be updated depending of your fetcher.
-#
-# Usage:
-# SRC_URI = "..."
-# inherit npm
-#
-# Optional variables:
-# NPM_ARCH:
-# Override the auto generated npm architecture.
-#
-# NPM_INSTALL_DEV:
-# Set to 1 to also install devDependencies.
-
-inherit python3native
-
-DEPENDS:prepend = "nodejs-native "
-RDEPENDS:${PN}:append:class-target = " nodejs"
-
-EXTRA_OENPM = ""
-
-NPM_INSTALL_DEV ?= "0"
-
-NPM_NODEDIR ?= "${RECIPE_SYSROOT_NATIVE}${prefix_native}"
-
-def npm_target_arch_map(target_arch):
- """Maps arch names to npm arch names"""
- import re
- if re.match("p(pc|owerpc)(|64)", target_arch):
- return "ppc"
- elif re.match("i.86$", target_arch):
- return "ia32"
- elif re.match("x86_64$", target_arch):
- return "x64"
- elif re.match("arm64$", target_arch):
- return "arm"
- return target_arch
-
-NPM_ARCH ?= "${@npm_target_arch_map(d.getVar("TARGET_ARCH"))}"
-
-NPM_PACKAGE = "${WORKDIR}/npm-package"
-NPM_CACHE = "${WORKDIR}/npm-cache"
-NPM_BUILD = "${WORKDIR}/npm-build"
-
-def npm_global_configs(d):
- """Get the npm global configuration"""
- configs = []
- # Ensure no network access is done
- configs.append(("offline", "true"))
- configs.append(("proxy", "http://invalid"))
- # Configure the cache directory
- configs.append(("cache", d.getVar("NPM_CACHE")))
- return configs
-
-def npm_pack(env, srcdir, workdir):
- """Run 'npm pack' on a specified directory"""
- import shlex
- cmd = "npm pack %s" % shlex.quote(srcdir)
- args = [("ignore-scripts", "true")]
- tarball = env.run(cmd, args=args, workdir=workdir).strip("\n")
- return os.path.join(workdir, tarball)
-
-python npm_do_configure() {
- """
- Step one: configure the npm cache and the main npm package
-
- Every dependencies have been fetched and patched in the source directory.
- They have to be packed (this remove unneeded files) and added to the npm
- cache to be available for the next step.
-
- The main package and its associated manifest file and shrinkwrap file have
- to be configured to take into account these cached dependencies.
- """
- import base64
- import copy
- import json
- import re
- import shlex
- import stat
- import tempfile
- from bb.fetch2.npm import NpmEnvironment
- from bb.fetch2.npm import npm_unpack
- from bb.fetch2.npmsw import foreach_dependencies
- from bb.progress import OutOfProgressHandler
-
- bb.utils.remove(d.getVar("NPM_CACHE"), recurse=True)
- bb.utils.remove(d.getVar("NPM_PACKAGE"), recurse=True)
-
- env = NpmEnvironment(d, configs=npm_global_configs(d))
-
- def _npm_cache_add(tarball):
- """Run 'npm cache add' for a specified tarball"""
- cmd = "npm cache add %s" % shlex.quote(tarball)
- env.run(cmd)
-
- def _npm_integrity(tarball):
- """Return the npm integrity of a specified tarball"""
- sha512 = bb.utils.sha512_file(tarball)
- return "sha512-" + base64.b64encode(bytes.fromhex(sha512)).decode()
-
- def _npm_version(tarball):
- """Return the version of a specified tarball"""
- regex = r"-(\d+\.\d+\.\d+(-.*)?(\+.*)?)\.tgz"
- return re.search(regex, tarball).group(1)
-
- def _npmsw_dependency_dict(orig, deptree):
- """
- Return the sub dictionary in the 'orig' dictionary corresponding to the
- 'deptree' dependency tree. This function follows the shrinkwrap file
- format.
- """
- ptr = orig
- for dep in deptree:
- if "dependencies" not in ptr:
- ptr["dependencies"] = {}
- ptr = ptr["dependencies"]
- if dep not in ptr:
- ptr[dep] = {}
- ptr = ptr[dep]
- return ptr
-
- # Manage the manifest file and shrinkwrap files
- orig_manifest_file = d.expand("${S}/package.json")
- orig_shrinkwrap_file = d.expand("${S}/npm-shrinkwrap.json")
- cached_manifest_file = d.expand("${NPM_PACKAGE}/package.json")
- cached_shrinkwrap_file = d.expand("${NPM_PACKAGE}/npm-shrinkwrap.json")
-
- with open(orig_manifest_file, "r") as f:
- orig_manifest = json.load(f)
-
- cached_manifest = copy.deepcopy(orig_manifest)
- cached_manifest.pop("dependencies", None)
- cached_manifest.pop("devDependencies", None)
-
- has_shrinkwrap_file = True
-
- try:
- with open(orig_shrinkwrap_file, "r") as f:
- orig_shrinkwrap = json.load(f)
- except IOError:
- has_shrinkwrap_file = False
-
- if has_shrinkwrap_file:
- cached_shrinkwrap = copy.deepcopy(orig_shrinkwrap)
- cached_shrinkwrap.pop("dependencies", None)
-
- # Manage the dependencies
- progress = OutOfProgressHandler(d, r"^(\d+)/(\d+)$")
- progress_total = 1 # also count the main package
- progress_done = 0
-
- def _count_dependency(name, params, deptree):
- nonlocal progress_total
- progress_total += 1
-
- def _cache_dependency(name, params, deptree):
- destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
- destsuffix = os.path.join(*destsubdirs)
- with tempfile.TemporaryDirectory() as tmpdir:
- # Add the dependency to the npm cache
- destdir = os.path.join(d.getVar("S"), destsuffix)
- tarball = npm_pack(env, destdir, tmpdir)
- _npm_cache_add(tarball)
- # Add its signature to the cached shrinkwrap
- dep = _npmsw_dependency_dict(cached_shrinkwrap, deptree)
- dep["version"] = _npm_version(tarball)
- dep["integrity"] = _npm_integrity(tarball)
- if params.get("dev", False):
- dep["dev"] = True
- # Display progress
- nonlocal progress_done
- progress_done += 1
- progress.write("%d/%d" % (progress_done, progress_total))
-
- dev = bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False)
-
- if has_shrinkwrap_file:
- foreach_dependencies(orig_shrinkwrap, _count_dependency, dev)
- foreach_dependencies(orig_shrinkwrap, _cache_dependency, dev)
-
- # Configure the main package
- with tempfile.TemporaryDirectory() as tmpdir:
- tarball = npm_pack(env, d.getVar("S"), tmpdir)
- npm_unpack(tarball, d.getVar("NPM_PACKAGE"), d)
-
- # Configure the cached manifest file and cached shrinkwrap file
- def _update_manifest(depkey):
- for name in orig_manifest.get(depkey, {}):
- version = cached_shrinkwrap["dependencies"][name]["version"]
- if depkey not in cached_manifest:
- cached_manifest[depkey] = {}
- cached_manifest[depkey][name] = version
-
- if has_shrinkwrap_file:
- _update_manifest("dependencies")
-
- if dev:
- if has_shrinkwrap_file:
- _update_manifest("devDependencies")
-
- os.chmod(cached_manifest_file, os.stat(cached_manifest_file).st_mode | stat.S_IWUSR)
- with open(cached_manifest_file, "w") as f:
- json.dump(cached_manifest, f, indent=2)
-
- if has_shrinkwrap_file:
- with open(cached_shrinkwrap_file, "w") as f:
- json.dump(cached_shrinkwrap, f, indent=2)
-}
-
-python npm_do_compile() {
- """
- Step two: install the npm package
-
- Use the configured main package and the cached dependencies to run the
- installation process. The installation is done in a directory which is
- not the destination directory yet.
-
- A combination of 'npm pack' and 'npm install' is used to ensure that the
- installed files are actual copies instead of symbolic links (which is the
- default npm behavior).
- """
- import shlex
- import tempfile
- from bb.fetch2.npm import NpmEnvironment
-
- bb.utils.remove(d.getVar("NPM_BUILD"), recurse=True)
-
- with tempfile.TemporaryDirectory() as tmpdir:
- args = []
- configs = npm_global_configs(d)
-
- if bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False):
- configs.append(("also", "development"))
- else:
- configs.append(("only", "production"))
-
- # Report as many logs as possible for debugging purpose
- configs.append(("loglevel", "silly"))
-
- # Configure the installation to be done globally in the build directory
- configs.append(("global", "true"))
- configs.append(("prefix", d.getVar("NPM_BUILD")))
-
- # Add node-gyp configuration
- configs.append(("arch", d.getVar("NPM_ARCH")))
- configs.append(("release", "true"))
- configs.append(("nodedir", d.getVar("NPM_NODEDIR")))
- configs.append(("python", d.getVar("PYTHON")))
-
- env = NpmEnvironment(d, configs)
-
- # Add node-pre-gyp configuration
- args.append(("target_arch", d.getVar("NPM_ARCH")))
- args.append(("build-from-source", "true"))
-
- # Pack and install the main package
- tarball = npm_pack(env, d.getVar("NPM_PACKAGE"), tmpdir)
- cmd = "npm install %s %s" % (shlex.quote(tarball), d.getVar("EXTRA_OENPM"))
- env.run(cmd, args=args)
-}
-
-npm_do_install() {
- # Step three: final install
- #
- # The previous installation have to be filtered to remove some extra files.
-
- rm -rf ${D}
-
- # Copy the entire lib and bin directories
- install -d ${D}/${nonarch_libdir}
- cp --no-preserve=ownership --recursive ${NPM_BUILD}/lib/. ${D}/${nonarch_libdir}
-
- if [ -d "${NPM_BUILD}/bin" ]
- then
- install -d ${D}/${bindir}
- cp --no-preserve=ownership --recursive ${NPM_BUILD}/bin/. ${D}/${bindir}
- fi
-
- # If the package (or its dependencies) uses node-gyp to build native addons,
- # object files, static libraries or other temporary files can be hidden in
- # the lib directory. To reduce the package size and to avoid QA issues
- # (staticdev with static library files) these files must be removed.
- local GYP_REGEX=".*/build/Release/[^/]*.node"
-
- # Remove any node-gyp directory in ${D} to remove temporary build files
- for GYP_D_FILE in $(find ${D} -regex "${GYP_REGEX}")
- do
- local GYP_D_DIR=${GYP_D_FILE%/Release/*}
-
- rm --recursive --force ${GYP_D_DIR}
- done
-
- # Copy only the node-gyp release files
- for GYP_B_FILE in $(find ${NPM_BUILD} -regex "${GYP_REGEX}")
- do
- local GYP_D_FILE=${D}/${prefix}/${GYP_B_FILE#${NPM_BUILD}}
-
- install -d ${GYP_D_FILE%/*}
- install -m 755 ${GYP_B_FILE} ${GYP_D_FILE}
- done
-
- # Remove the shrinkwrap file which does not need to be packed
- rm -f ${D}/${nonarch_libdir}/node_modules/*/npm-shrinkwrap.json
- rm -f ${D}/${nonarch_libdir}/node_modules/@*/*/npm-shrinkwrap.json
-}
-
-FILES:${PN} += " \
- ${bindir} \
- ${nonarch_libdir} \
-"
-
-EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/meta/classes/oelint.bbclass b/meta/classes/oelint.bbclass
index 2589d34059..458a25ecc3 100644
--- a/meta/classes/oelint.bbclass
+++ b/meta/classes/oelint.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
addtask lint before do_build
do_lint[nostamp] = "1"
python do_lint() {
diff --git a/meta/classes/own-mirrors.bbclass b/meta/classes/own-mirrors.bbclass
index ef972740ce..36c7f8e3f3 100644
--- a/meta/classes/own-mirrors.bbclass
+++ b/meta/classes/own-mirrors.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PREMIRRORS:prepend = " \
cvs://.*/.* ${SOURCE_MIRROR_URL} \
svn://.*/.* ${SOURCE_MIRROR_URL} \
@@ -11,4 +17,6 @@ https?://.*/.* ${SOURCE_MIRROR_URL} \
ftp://.*/.* ${SOURCE_MIRROR_URL} \
npm://.*/?.* ${SOURCE_MIRROR_URL} \
s3://.*/.* ${SOURCE_MIRROR_URL} \
+crate://.*/.* ${SOURCE_MIRROR_URL} \
+gs://.*/.* ${SOURCE_MIRROR_URL} \
"
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
deleted file mode 100644
index 62050a18b8..0000000000
--- a/meta/classes/package.bbclass
+++ /dev/null
@@ -1,2522 +0,0 @@
-#
-# Packaging process
-#
-# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
-# Taking D and splitting it up into the packages listed in PACKAGES, placing the
-# resulting output in PKGDEST.
-#
-# There are the following default steps but PACKAGEFUNCS can be extended:
-#
-# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
-#
-# b) perform_packagecopy - Copy D into PKGD
-#
-# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
-#
-# d) split_and_strip_files - split the files into runtime and debug and strip them.
-# Debug files include debug info split, and associated sources that end up in -dbg packages
-#
-# e) fixup_perms - Fix up permissions in the package before we split it.
-#
-# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
-# Also triggers the binary stripping code to put files in -dbg packages.
-#
-# g) package_do_filedeps - Collect perfile run-time dependency metadata
-# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
-# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
-#
-# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
-# dependencies found. Also stores the package name so anyone else using this library
-# knows which package to depend on.
-#
-# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
-#
-# j) read_shlibdeps - Reads the stored shlibs information into the metadata
-#
-# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
-#
-# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
-# packaging steps
-
-inherit packagedata
-inherit chrpath
-inherit package_pkgdata
-inherit insane
-
-PKGD = "${WORKDIR}/package"
-PKGDEST = "${WORKDIR}/packages-split"
-
-LOCALE_SECTION ?= ''
-
-ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
-
-# rpm is used for the per-file dependency identification
-# dwarfsrcfiles is used to determine the list of debug source files
-PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
-
-
-# If your postinstall can execute at rootfs creation time rather than on
-# target but depends on a native/cross tool in order to execute, you need to
-# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
-# in the package dependencies as normal, this is just for native/cross support
-# tools at rootfs build time.
-PACKAGE_WRITE_DEPS ??= ""
-
-def legitimize_package_name(s):
- """
- Make sure package names are legitimate strings
- """
- import re
-
- def fixutf(m):
- cp = m.group(1)
- if cp:
- return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
-
- # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
- s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
-
- # Remaining package name validity fixes
- return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
-
-def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
- """
- Used in .bb files to split up dynamically generated subpackages of a
- given package, usually plugins or modules.
-
- Arguments:
- root -- the path in which to search
- file_regex -- regular expression to match searched files. Use
- parentheses () to mark the part of this expression
- that should be used to derive the module name (to be
- substituted where %s is used in other function
- arguments as noted below)
- output_pattern -- pattern to use for the package names. Must include %s.
- description -- description to set for each package. Must include %s.
- postinst -- postinstall script to use for all packages (as a
- string)
- recursive -- True to perform a recursive search - default False
- hook -- a hook function to be called for every match. The
- function will be called with the following arguments
- (in the order listed):
- f: full path to the file/directory match
- pkg: the package name
- file_regex: as above
- output_pattern: as above
- modulename: the module name derived using file_regex
- extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
- all packages. The default value of None causes a
- dependency on the main package (${PN}) - if you do
- not want this, pass '' for this parameter.
- aux_files_pattern -- extra item(s) to be added to FILES for each
- package. Can be a single string item or a list of
- strings for multiple items. Must include %s.
- postrm -- postrm script to use for all packages (as a string)
- allow_dirs -- True allow directories to be matched - default False
- prepend -- if True, prepend created packages to PACKAGES instead
- of the default False which appends them
- match_path -- match file_regex on the whole relative path to the
- root rather than just the file name
- aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
- each package, using the actual derived module name
- rather than converting it to something legal for a
- package name. Can be a single string item or a list
- of strings for multiple items. Must include %s.
- allow_links -- True to allow symlinks to be matched - default False
- summary -- Summary to set for each package. Must include %s;
- defaults to description if not set.
-
- """
-
- dvar = d.getVar('PKGD')
- root = d.expand(root)
- output_pattern = d.expand(output_pattern)
- extra_depends = d.expand(extra_depends)
-
- # If the root directory doesn't exist, don't error out later but silently do
- # no splitting.
- if not os.path.exists(dvar + root):
- return []
-
- ml = d.getVar("MLPREFIX")
- if ml:
- if not output_pattern.startswith(ml):
- output_pattern = ml + output_pattern
-
- newdeps = []
- for dep in (extra_depends or "").split():
- if dep.startswith(ml):
- newdeps.append(dep)
- else:
- newdeps.append(ml + dep)
- if newdeps:
- extra_depends = " ".join(newdeps)
-
-
- packages = d.getVar('PACKAGES').split()
- split_packages = set()
-
- if postinst:
- postinst = '#!/bin/sh\n' + postinst + '\n'
- if postrm:
- postrm = '#!/bin/sh\n' + postrm + '\n'
- if not recursive:
- objs = os.listdir(dvar + root)
- else:
- objs = []
- for walkroot, dirs, files in os.walk(dvar + root):
- for file in files:
- relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
- if relpath:
- objs.append(relpath)
-
- if extra_depends == None:
- extra_depends = d.getVar("PN")
-
- if not summary:
- summary = description
-
- for o in sorted(objs):
- import re, stat
- if match_path:
- m = re.match(file_regex, o)
- else:
- m = re.match(file_regex, os.path.basename(o))
-
- if not m:
- continue
- f = os.path.join(dvar + root, o)
- mode = os.lstat(f).st_mode
- if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
- continue
- on = legitimize_package_name(m.group(1))
- pkg = output_pattern % on
- split_packages.add(pkg)
- if not pkg in packages:
- if prepend:
- packages = [pkg] + packages
- else:
- packages.append(pkg)
- oldfiles = d.getVar('FILES:' + pkg)
- newfile = os.path.join(root, o)
- # These names will be passed through glob() so if the filename actually
- # contains * or ? (rare, but possible) we need to handle that specially
- newfile = newfile.replace('*', '[*]')
- newfile = newfile.replace('?', '[?]')
- if not oldfiles:
- the_files = [newfile]
- if aux_files_pattern:
- if type(aux_files_pattern) is list:
- for fp in aux_files_pattern:
- the_files.append(fp % on)
- else:
- the_files.append(aux_files_pattern % on)
- if aux_files_pattern_verbatim:
- if type(aux_files_pattern_verbatim) is list:
- for fp in aux_files_pattern_verbatim:
- the_files.append(fp % m.group(1))
- else:
- the_files.append(aux_files_pattern_verbatim % m.group(1))
- d.setVar('FILES:' + pkg, " ".join(the_files))
- else:
- d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
- if extra_depends != '':
- d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
- if not d.getVar('DESCRIPTION:' + pkg):
- d.setVar('DESCRIPTION:' + pkg, description % on)
- if not d.getVar('SUMMARY:' + pkg):
- d.setVar('SUMMARY:' + pkg, summary % on)
- if postinst:
- d.setVar('pkg_postinst:' + pkg, postinst)
- if postrm:
- d.setVar('pkg_postrm:' + pkg, postrm)
- if callable(hook):
- hook(f, pkg, file_regex, output_pattern, m.group(1))
-
- d.setVar('PACKAGES', ' '.join(packages))
- return list(split_packages)
-
-PACKAGE_DEPENDS += "file-native"
-
-python () {
- if d.getVar('PACKAGES') != '':
- deps = ""
- for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
- deps += " %s:do_populate_sysroot" % dep
- if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
- deps += ' xz-native:do_populate_sysroot'
- d.appendVarFlag('do_package', 'depends', deps)
-
- # shlibs requires any DEPENDS to have already packaged for the *.list files
- d.appendVarFlag('do_package', 'deptask', " do_packagedata")
-}
-
-# Get a list of files from file vars by searching files under current working directory
-# The list contains symlinks, directories and normal files.
-def files_from_filevars(filevars):
- import os,glob
- cpath = oe.cachedpath.CachedPath()
- files = []
- for f in filevars:
- if os.path.isabs(f):
- f = '.' + f
- if not f.startswith("./"):
- f = './' + f
- globbed = glob.glob(f)
- if globbed:
- if [ f ] != globbed:
- files += globbed
- continue
- files.append(f)
-
- symlink_paths = []
- for ind, f in enumerate(files):
- # Handle directory symlinks. Truncate path to the lowest level symlink
- parent = ''
- for dirname in f.split('/')[:-1]:
- parent = os.path.join(parent, dirname)
- if dirname == '.':
- continue
- if cpath.islink(parent):
- bb.warn("FILES contains file '%s' which resides under a "
- "directory symlink. Please fix the recipe and use the "
- "real path for the file." % f[1:])
- symlink_paths.append(f)
- files[ind] = parent
- f = parent
- break
-
- if not cpath.islink(f):
- if cpath.isdir(f):
- newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
- if newfiles:
- files += newfiles
-
- return files, symlink_paths
-
-# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
-def get_conffiles(pkg, d):
- pkgdest = d.getVar('PKGDEST')
- root = os.path.join(pkgdest, pkg)
- cwd = os.getcwd()
- os.chdir(root)
-
- conffiles = d.getVar('CONFFILES:%s' % pkg);
- if conffiles == None:
- conffiles = d.getVar('CONFFILES')
- if conffiles == None:
- conffiles = ""
- conffiles = conffiles.split()
- conf_orig_list = files_from_filevars(conffiles)[0]
-
- # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
- conf_list = []
- for f in conf_orig_list:
- if os.path.isdir(f):
- continue
- if os.path.islink(f):
- continue
- if not os.path.exists(f):
- continue
- conf_list.append(f)
-
- # Remove the leading './'
- for i in range(0, len(conf_list)):
- conf_list[i] = conf_list[i][1:]
-
- os.chdir(cwd)
- return conf_list
-
-def checkbuildpath(file, d):
- tmpdir = d.getVar('TMPDIR')
- with open(file) as f:
- file_content = f.read()
- if tmpdir in file_content:
- return True
-
- return False
-
-def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
- debugfiles = {}
-
- for line in dwarfsrcfiles_output.splitlines():
- if line.startswith("\t"):
- debugfiles[os.path.normpath(line.split()[0])] = ""
-
- return debugfiles.keys()
-
-def source_info(file, d, fatal=True):
- import subprocess
-
- cmd = ["dwarfsrcfiles", file]
- try:
- output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
- retval = 0
- except subprocess.CalledProcessError as exc:
- output = exc.output
- retval = exc.returncode
-
- # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
- if retval != 0 and retval != 255:
- msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
- if fatal:
- bb.fatal(msg)
- bb.note(msg)
-
- debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
-
- return list(debugsources)
-
-def splitdebuginfo(file, dvar, dv, d):
- # Function to split a single file into two components, one is the stripped
- # target system binary, the other contains any debugging information. The
- # two files are linked to reference each other.
- #
- # return a mapping of files:debugsources
-
- import stat
- import subprocess
-
- src = file[len(dvar):]
- dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
- debugfile = dvar + dest
- sources = []
-
- # Split the file...
- bb.utils.mkdirhier(os.path.dirname(debugfile))
- #bb.note("Split %s -> %s" % (file, debugfile))
- # Only store off the hard link reference if we successfully split!
-
- dvar = d.getVar('PKGD')
- objcopy = d.getVar("OBJCOPY")
-
- newmode = None
- if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
- origmode = os.stat(file)[stat.ST_MODE]
- newmode = origmode | stat.S_IWRITE | stat.S_IREAD
- os.chmod(file, newmode)
-
- # We need to extract the debug src information here...
- if dv["srcdir"]:
- sources = source_info(file, d)
-
- bb.utils.mkdirhier(os.path.dirname(debugfile))
-
- subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
-
- # Set the debuglink to have the view of the file path on the target
- subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
-
- if newmode:
- os.chmod(file, origmode)
-
- return (file, sources)
-
-def splitstaticdebuginfo(file, dvar, dv, d):
- # Unlike the function above, there is no way to split a static library
- # two components. So to get similar results we will copy the unmodified
- # static library (containing the debug symbols) into a new directory.
- # We will then strip (preserving symbols) the static library in the
- # typical location.
- #
- # return a mapping of files:debugsources
-
- import stat
-
- src = file[len(dvar):]
- dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
- debugfile = dvar + dest
- sources = []
-
- # Copy the file...
- bb.utils.mkdirhier(os.path.dirname(debugfile))
- #bb.note("Copy %s -> %s" % (file, debugfile))
-
- dvar = d.getVar('PKGD')
-
- newmode = None
- if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
- origmode = os.stat(file)[stat.ST_MODE]
- newmode = origmode | stat.S_IWRITE | stat.S_IREAD
- os.chmod(file, newmode)
-
- # We need to extract the debug src information here...
- if dv["srcdir"]:
- sources = source_info(file, d)
-
- bb.utils.mkdirhier(os.path.dirname(debugfile))
-
- # Copy the unmodified item to the debug directory
- shutil.copy2(file, debugfile)
-
- if newmode:
- os.chmod(file, origmode)
-
- return (file, sources)
-
-def inject_minidebuginfo(file, dvar, dv, d):
- # Extract just the symbols from debuginfo into minidebuginfo,
- # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
- # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
-
- import subprocess
-
- readelf = d.getVar('READELF')
- nm = d.getVar('NM')
- objcopy = d.getVar('OBJCOPY')
-
- minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
-
- src = file[len(dvar):]
- dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
- debugfile = dvar + dest
- minidebugfile = minidebuginfodir + src + '.minidebug'
- bb.utils.mkdirhier(os.path.dirname(minidebugfile))
-
- # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
- # so skip it.
- if not os.path.exists(debugfile):
- bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
- return
-
- # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
- # We will exclude all of these from minidebuginfo to save space.
- remove_section_names = []
- for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
- fields = line.split()
- if len(fields) < 8:
- continue
- name = fields[0]
- type = fields[1]
- flags = fields[7]
- # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
- if name.startswith('.debug_'):
- continue
- if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
- remove_section_names.append(name)
-
- # List dynamic symbols in the binary. We can exclude these from minidebuginfo
- # because they are always present in the binary.
- dynsyms = set()
- for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
- dynsyms.add(line.split()[0])
-
- # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
- # These are the ones we want to keep in minidebuginfo.
- keep_symbols_file = minidebugfile + '.symlist'
- found_any_symbols = False
- with open(keep_symbols_file, 'w') as f:
- for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
- fields = line.split('|')
- if len(fields) < 7:
- continue
- name = fields[0].strip()
- type = fields[3].strip()
- if type == 'FUNC' and name not in dynsyms:
- f.write('{}\n'.format(name))
- found_any_symbols = True
-
- if not found_any_symbols:
- bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
- return
-
- bb.utils.remove(minidebugfile)
- bb.utils.remove(minidebugfile + '.xz')
-
- subprocess.check_call([objcopy, '-S'] +
- ['--remove-section={}'.format(s) for s in remove_section_names] +
- ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
-
- subprocess.check_call(['xz', '--keep', minidebugfile])
-
- subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
-
-def copydebugsources(debugsrcdir, sources, d):
- # The debug src information written out to sourcefile is further processed
- # and copied to the destination here.
-
- import stat
- import subprocess
-
- if debugsrcdir and sources:
- sourcefile = d.expand("${WORKDIR}/debugsources.list")
- bb.utils.remove(sourcefile)
-
- # filenames are null-separated - this is an artefact of the previous use
- # of rpm's debugedit, which was writing them out that way, and the code elsewhere
- # is still assuming that.
- debuglistoutput = '\0'.join(sources) + '\0'
- with open(sourcefile, 'a') as sf:
- sf.write(debuglistoutput)
-
- dvar = d.getVar('PKGD')
- strip = d.getVar("STRIP")
- objcopy = d.getVar("OBJCOPY")
- workdir = d.getVar("WORKDIR")
- workparentdir = os.path.dirname(os.path.dirname(workdir))
- workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
-
- # If build path exists in sourcefile, it means toolchain did not use
- # -fdebug-prefix-map to compile
- if checkbuildpath(sourcefile, d):
- localsrc_prefix = workparentdir + "/"
- else:
- localsrc_prefix = "/usr/src/debug/"
-
- nosuchdir = []
- basepath = dvar
- for p in debugsrcdir.split("/"):
- basepath = basepath + "/" + p
- if not cpath.exists(basepath):
- nosuchdir.append(basepath)
- bb.utils.mkdirhier(basepath)
- cpath.updatecache(basepath)
-
- # Ignore files from the recipe sysroots (target and native)
- processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
- # We need to ignore files that are not actually ours
- # we do this by only paying attention to items from this package
- processdebugsrc += "fgrep -zw '%s' | "
- # Remove prefix in the source paths
- processdebugsrc += "sed 's#%s##g' | "
- processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
-
- cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
- try:
- subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- # Can "fail" if internal headers/transient sources are attempted
- pass
-
- # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
- # Work around this by manually finding and copying any symbolic links that made it through.
- cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
- (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
- subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
- # The copy by cpio may have resulted in some empty directories! Remove these
- cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
- subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
- # Also remove debugsrcdir if its empty
- for p in nosuchdir[::-1]:
- if os.path.exists(p) and not os.listdir(p):
- os.rmdir(p)
-
-#
-# Package data handling routines
-#
-
-def get_package_mapping (pkg, basepkg, d, depversions=None):
- import oe.packagedata
-
- data = oe.packagedata.read_subpkgdata(pkg, d)
- key = "PKG:%s" % pkg
-
- if key in data:
- if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
- bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
- # Have to avoid undoing the write_extra_pkgs(global_variants...)
- if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
- and data[key] == basepkg:
- return pkg
- if depversions == []:
- # Avoid returning a mapping if the renamed package rprovides its original name
- rprovkey = "RPROVIDES:%s" % pkg
- if rprovkey in data:
- if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
- bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
- return pkg
- # Do map to rewritten package name
- return data[key]
-
- return pkg
-
-def get_package_additional_metadata (pkg_type, d):
- base_key = "PACKAGE_ADD_METADATA"
- for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
- if d.getVar(key, False) is None:
- continue
- d.setVarFlag(key, "type", "list")
- if d.getVarFlag(key, "separator") is None:
- d.setVarFlag(key, "separator", "\\n")
- metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
- return "\n".join(metadata_fields).strip()
-
-def runtime_mapping_rename (varname, pkg, d):
- #bb.note("%s before: %s" % (varname, d.getVar(varname)))
-
- new_depends = {}
- deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
- for depend, depversions in deps.items():
- new_depend = get_package_mapping(depend, pkg, d, depversions)
- if depend != new_depend:
- bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
- new_depends[new_depend] = deps[depend]
-
- d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
-
- #bb.note("%s after: %s" % (varname, d.getVar(varname)))
-
-#
-# Used by do_packagedata (and possibly other routines post do_package)
-#
-
-PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
-PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
-package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
-package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
-python package_get_auto_pr() {
- import oe.prservice
-
- def get_do_package_hash(pn):
- if d.getVar("BB_RUNTASK") != "do_package":
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- for dep in taskdepdata:
- if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
- return taskdepdata[dep][6]
- return None
-
- # Support per recipe PRSERV_HOST
- pn = d.getVar('PN')
- host = d.getVar("PRSERV_HOST_" + pn)
- if not (host is None):
- d.setVar("PRSERV_HOST", host)
-
- pkgv = d.getVar("PKGV")
-
- # PR Server not active, handle AUTOINC
- if not d.getVar('PRSERV_HOST'):
- d.setVar("PRSERV_PV_AUTOINC", "0")
- return
-
- auto_pr = None
- pv = d.getVar("PV")
- version = d.getVar("PRAUTOINX")
- pkgarch = d.getVar("PACKAGE_ARCH")
- checksum = get_do_package_hash(pn)
-
- # If do_package isn't in the dependencies, we can't get the checksum...
- if not checksum:
- bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
- #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- #for dep in taskdepdata:
- # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
- return
-
- if d.getVar('PRSERV_LOCKDOWN'):
- auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
- if auto_pr is None:
- bb.fatal("Can NOT get PRAUTO from lockdown exported file")
- d.setVar('PRAUTO',str(auto_pr))
- return
-
- try:
- conn = oe.prservice.prserv_make_conn(d)
- if conn is not None:
- if "AUTOINC" in pkgv:
- srcpv = bb.fetch2.get_srcrev(d)
- base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
- value = conn.getPR(base_ver, pkgarch, srcpv)
- d.setVar("PRSERV_PV_AUTOINC", str(value))
-
- auto_pr = conn.getPR(version, pkgarch, checksum)
- conn.close()
- except Exception as e:
- bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
- if auto_pr is None:
- bb.fatal("Can NOT get PRAUTO from remote PR service")
- d.setVar('PRAUTO',str(auto_pr))
-}
-
-#
-# Package functions suitable for inclusion in PACKAGEFUNCS
-#
-
-python package_convert_pr_autoinc() {
- pkgv = d.getVar("PKGV")
-
- # Adjust pkgv as necessary...
- if 'AUTOINC' in pkgv:
- d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
-
- # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
- d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
- d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
-}
-
-LOCALEBASEPN ??= "${PN}"
-
-python package_do_split_locales() {
- if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
- bb.debug(1, "package requested not splitting locales")
- return
-
- packages = (d.getVar('PACKAGES') or "").split()
-
- datadir = d.getVar('datadir')
- if not datadir:
- bb.note("datadir not defined")
- return
-
- dvar = d.getVar('PKGD')
- pn = d.getVar('LOCALEBASEPN')
-
- if pn + '-locale' in packages:
- packages.remove(pn + '-locale')
-
- localedir = os.path.join(dvar + datadir, 'locale')
-
- if not cpath.isdir(localedir):
- bb.debug(1, "No locale files in this package")
- return
-
- locales = os.listdir(localedir)
-
- summary = d.getVar('SUMMARY') or pn
- description = d.getVar('DESCRIPTION') or ""
- locale_section = d.getVar('LOCALE_SECTION')
- mlprefix = d.getVar('MLPREFIX') or ""
- for l in sorted(locales):
- ln = legitimize_package_name(l)
- pkg = pn + '-locale-' + ln
- packages.append(pkg)
- d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
- d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
- d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
- d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
- d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
- if locale_section:
- d.setVar('SECTION:' + pkg, locale_section)
-
- d.setVar('PACKAGES', ' '.join(packages))
-
- # Disabled by RP 18/06/07
- # Wildcards aren't supported in debian
- # They break with ipkg since glibc-locale* will mean that
- # glibc-localedata-translit* won't install as a dependency
- # for some other package which breaks meta-toolchain
- # Probably breaks since virtual-locale- isn't provided anywhere
- #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
- #rdep.append('%s-locale*' % pn)
- #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
-}
-
-python perform_packagecopy () {
- import subprocess
- import shutil
-
- dest = d.getVar('D')
- dvar = d.getVar('PKGD')
-
- # Start by package population by taking a copy of the installed
- # files to operate on
- # Preserve sparse files and hard links
- cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
- subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
-
- # replace RPATHs for the nativesdk binaries, to make them relocatable
- if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
- rpath_replace (dvar, d)
-}
-perform_packagecopy[cleandirs] = "${PKGD}"
-perform_packagecopy[dirs] = "${PKGD}"
-
-# We generate a master list of directories to process, we start by
-# seeding this list with reasonable defaults, then load from
-# the fs-perms.txt files
-python fixup_perms () {
- import pwd, grp
-
- # init using a string with the same format as a line as documented in
- # the fs-perms.txt file
- # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
- # <path> link <link target>
- #
- # __str__ can be used to print out an entry in the input format
- #
- # if fs_perms_entry.path is None:
- # an error occurred
- # if fs_perms_entry.link, you can retrieve:
- # fs_perms_entry.path = path
- # fs_perms_entry.link = target of link
- # if not fs_perms_entry.link, you can retrieve:
- # fs_perms_entry.path = path
- # fs_perms_entry.mode = expected dir mode or None
- # fs_perms_entry.uid = expected uid or -1
- # fs_perms_entry.gid = expected gid or -1
- # fs_perms_entry.walk = 'true' or something else
- # fs_perms_entry.fmode = expected file mode or None
- # fs_perms_entry.fuid = expected file uid or -1
- # fs_perms_entry_fgid = expected file gid or -1
- class fs_perms_entry():
- def __init__(self, line):
- lsplit = line.split()
- if len(lsplit) == 3 and lsplit[1].lower() == "link":
- self._setlink(lsplit[0], lsplit[2])
- elif len(lsplit) == 8:
- self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
- else:
- msg = "Fixup Perms: invalid config line %s" % line
- oe.qa.handle_error("perm-config", msg, d)
- self.path = None
- self.link = None
-
- def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
- self.path = os.path.normpath(path)
- self.link = None
- self.mode = self._procmode(mode)
- self.uid = self._procuid(uid)
- self.gid = self._procgid(gid)
- self.walk = walk.lower()
- self.fmode = self._procmode(fmode)
- self.fuid = self._procuid(fuid)
- self.fgid = self._procgid(fgid)
-
- def _setlink(self, path, link):
- self.path = os.path.normpath(path)
- self.link = link
-
- def _procmode(self, mode):
- if not mode or (mode and mode == "-"):
- return None
- else:
- return int(mode,8)
-
- # Note uid/gid -1 has special significance in os.lchown
- def _procuid(self, uid):
- if uid is None or uid == "-":
- return -1
- elif uid.isdigit():
- return int(uid)
- else:
- return pwd.getpwnam(uid).pw_uid
-
- def _procgid(self, gid):
- if gid is None or gid == "-":
- return -1
- elif gid.isdigit():
- return int(gid)
- else:
- return grp.getgrnam(gid).gr_gid
-
- # Use for debugging the entries
- def __str__(self):
- if self.link:
- return "%s link %s" % (self.path, self.link)
- else:
- mode = "-"
- if self.mode:
- mode = "0%o" % self.mode
- fmode = "-"
- if self.fmode:
- fmode = "0%o" % self.fmode
- uid = self._mapugid(self.uid)
- gid = self._mapugid(self.gid)
- fuid = self._mapugid(self.fuid)
- fgid = self._mapugid(self.fgid)
- return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
-
- def _mapugid(self, id):
- if id is None or id == -1:
- return "-"
- else:
- return "%d" % id
-
- # Fix the permission, owner and group of path
- def fix_perms(path, mode, uid, gid, dir):
- if mode and not os.path.islink(path):
- #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
- os.chmod(path, mode)
- # -1 is a special value that means don't change the uid/gid
- # if they are BOTH -1, don't bother to lchown
- if not (uid == -1 and gid == -1):
- #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
- os.lchown(path, uid, gid)
-
- # Return a list of configuration files based on either the default
- # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
- # paths are resolved via BBPATH
- def get_fs_perms_list(d):
- str = ""
- bbpath = d.getVar('BBPATH')
- fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
- for conf_file in fs_perms_tables.split():
- confpath = bb.utils.which(bbpath, conf_file)
- if confpath:
- str += " %s" % bb.utils.which(bbpath, conf_file)
- else:
- bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
- return str
-
-
-
- dvar = d.getVar('PKGD')
-
- fs_perms_table = {}
- fs_link_table = {}
-
- # By default all of the standard directories specified in
- # bitbake.conf will get 0755 root:root.
- target_path_vars = [ 'base_prefix',
- 'prefix',
- 'exec_prefix',
- 'base_bindir',
- 'base_sbindir',
- 'base_libdir',
- 'datadir',
- 'sysconfdir',
- 'servicedir',
- 'sharedstatedir',
- 'localstatedir',
- 'infodir',
- 'mandir',
- 'docdir',
- 'bindir',
- 'sbindir',
- 'libexecdir',
- 'libdir',
- 'includedir',
- 'oldincludedir' ]
-
- for path in target_path_vars:
- dir = d.getVar(path) or ""
- if dir == "":
- continue
- fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
-
- # Now we actually load from the configuration files
- for conf in get_fs_perms_list(d).split():
- if not os.path.exists(conf):
- continue
- with open(conf) as f:
- for line in f:
- if line.startswith('#'):
- continue
- lsplit = line.split()
- if len(lsplit) == 0:
- continue
- if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
- msg = "Fixup perms: %s invalid line: %s" % (conf, line)
- oe.qa.handle_error("perm-line", msg, d)
- continue
- entry = fs_perms_entry(d.expand(line))
- if entry and entry.path:
- if entry.link:
- fs_link_table[entry.path] = entry
- if entry.path in fs_perms_table:
- fs_perms_table.pop(entry.path)
- else:
- fs_perms_table[entry.path] = entry
- if entry.path in fs_link_table:
- fs_link_table.pop(entry.path)
-
- # Debug -- list out in-memory table
- #for dir in fs_perms_table:
- # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
- #for link in fs_link_table:
- # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
-
- # We process links first, so we can go back and fixup directory ownership
- # for any newly created directories
- # Process in sorted order so /run gets created before /run/lock, etc.
- for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
- link = entry.link
- dir = entry.path
- origin = dvar + dir
- if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
- continue
-
- if link[0] == "/":
- target = dvar + link
- ptarget = link
- else:
- target = os.path.join(os.path.dirname(origin), link)
- ptarget = os.path.join(os.path.dirname(dir), link)
- if os.path.exists(target):
- msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
- oe.qa.handle_error("perm-link", msg, d)
- continue
-
- # Create path to move directory to, move it, and then setup the symlink
- bb.utils.mkdirhier(os.path.dirname(target))
- #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
- bb.utils.rename(origin, target)
- #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
- os.symlink(link, origin)
-
- for dir in fs_perms_table:
- origin = dvar + dir
- if not (cpath.exists(origin) and cpath.isdir(origin)):
- continue
-
- fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
-
- if fs_perms_table[dir].walk == 'true':
- for root, dirs, files in os.walk(origin):
- for dr in dirs:
- each_dir = os.path.join(root, dr)
- fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
- for f in files:
- each_file = os.path.join(root, f)
- fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
-}
-
-def package_debug_vars(d):
- # We default to '.debug' style
- if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
- # Single debug-file-directory style debug info
- debug_vars = {
- "append": ".debug",
- "staticappend": "",
- "dir": "",
- "staticdir": "",
- "libdir": "/usr/lib/debug",
- "staticlibdir": "/usr/lib/debug-static",
- "srcdir": "/usr/src/debug",
- }
- elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
- # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
- debug_vars = {
- "append": "",
- "staticappend": "",
- "dir": "/.debug",
- "staticdir": "/.debug-static",
- "libdir": "",
- "staticlibdir": "",
- "srcdir": "",
- }
- elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
- debug_vars = {
- "append": "",
- "staticappend": "",
- "dir": "/.debug",
- "staticdir": "/.debug-static",
- "libdir": "",
- "staticlibdir": "",
- "srcdir": "/usr/src/debug",
- }
- else:
- # Original OE-core, a.k.a. ".debug", style debug info
- debug_vars = {
- "append": "",
- "staticappend": "",
- "dir": "/.debug",
- "staticdir": "/.debug-static",
- "libdir": "",
- "staticlibdir": "",
- "srcdir": "/usr/src/debug",
- }
-
- return debug_vars
-
-python split_and_strip_files () {
- import stat, errno
- import subprocess
-
- dvar = d.getVar('PKGD')
- pn = d.getVar('PN')
- hostos = d.getVar('HOST_OS')
-
- oldcwd = os.getcwd()
- os.chdir(dvar)
-
- dv = package_debug_vars(d)
-
- #
- # First lets figure out all of the files we may have to process ... do this only once!
- #
- elffiles = {}
- symlinks = {}
- staticlibs = []
- inodes = {}
- libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
- baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
- skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
- if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
- d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
- checkelf = {}
- checkelflinks = {}
- for root, dirs, files in cpath.walk(dvar):
- for f in files:
- file = os.path.join(root, f)
-
- # Skip debug files
- if dv["append"] and file.endswith(dv["append"]):
- continue
- if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
- continue
-
- if file in skipfiles:
- continue
-
- if oe.package.is_static_lib(file):
- staticlibs.append(file)
- continue
-
- try:
- ltarget = cpath.realpath(file, dvar, False)
- s = cpath.lstat(ltarget)
- except OSError as e:
- (err, strerror) = e.args
- if err != errno.ENOENT:
- raise
- # Skip broken symlinks
- continue
- if not s:
- continue
- # Check its an executable
- if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
- or (s[stat.ST_MODE] & stat.S_IXOTH) \
- or ((file.startswith(libdir) or file.startswith(baselibdir)) \
- and (".so" in f or ".node" in f)) \
- or (f.startswith('vmlinux') or ".ko" in f):
-
- if cpath.islink(file):
- checkelflinks[file] = ltarget
- continue
- # Use a reference of device ID and inode number to identify files
- file_reference = "%d_%d" % (s.st_dev, s.st_ino)
- checkelf[file] = (file, file_reference)
-
- results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
- results_map = {}
- for (ltarget, elf_file) in results:
- results_map[ltarget] = elf_file
- for file in checkelflinks:
- ltarget = checkelflinks[file]
- # If it's a symlink, and points to an ELF file, we capture the readlink target
- if results_map[ltarget]:
- target = os.readlink(file)
- #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
- symlinks[file] = target
-
- results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
-
- # Sort results by file path. This ensures that the files are always
- # processed in the same order, which is important to make sure builds
- # are reproducible when dealing with hardlinks
- results.sort(key=lambda x: x[0])
-
- for (file, elf_file) in results:
- # It's a file (or hardlink), not a link
- # ...but is it ELF, and is it already stripped?
- if elf_file & 1:
- if elf_file & 2:
- if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
- bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
- else:
- msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
- oe.qa.handle_error("already-stripped", msg, d)
- continue
-
- # At this point we have an unstripped elf file. We need to:
- # a) Make sure any file we strip is not hardlinked to anything else outside this tree
- # b) Only strip any hardlinked file once (no races)
- # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
-
- # Use a reference of device ID and inode number to identify files
- file_reference = checkelf[file][1]
- if file_reference in inodes:
- os.unlink(file)
- os.link(inodes[file_reference][0], file)
- inodes[file_reference].append(file)
- else:
- inodes[file_reference] = [file]
- # break hardlink
- bb.utils.break_hardlinks(file)
- elffiles[file] = elf_file
- # Modified the file so clear the cache
- cpath.updatecache(file)
-
- def strip_pkgd_prefix(f):
- nonlocal dvar
-
- if f.startswith(dvar):
- return f[len(dvar):]
-
- return f
-
- #
- # First lets process debug splitting
- #
- if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
- results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
-
- if dv["srcdir"] and not hostos.startswith("mingw"):
- if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
- results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
- else:
- for file in staticlibs:
- results.append( (file,source_info(file, d)) )
-
- d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
-
- sources = set()
- for r in results:
- sources.update(r[1])
-
- # Hardlink our debug symbols to the other hardlink copies
- for ref in inodes:
- if len(inodes[ref]) == 1:
- continue
-
- target = inodes[ref][0][len(dvar):]
- for file in inodes[ref][1:]:
- src = file[len(dvar):]
- dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
- fpath = dvar + dest
- ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
- bb.utils.mkdirhier(os.path.dirname(fpath))
- # Only one hardlink of separated debug info file in each directory
- if not os.access(fpath, os.R_OK):
- #bb.note("Link %s -> %s" % (fpath, ftarget))
- os.link(ftarget, fpath)
-
- # Create symlinks for all cases we were able to split symbols
- for file in symlinks:
- src = file[len(dvar):]
- dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
- fpath = dvar + dest
- # Skip it if the target doesn't exist
- try:
- s = os.stat(fpath)
- except OSError as e:
- (err, strerror) = e.args
- if err != errno.ENOENT:
- raise
- continue
-
- ltarget = symlinks[file]
- lpath = os.path.dirname(ltarget)
- lbase = os.path.basename(ltarget)
- ftarget = ""
- if lpath and lpath != ".":
- ftarget += lpath + dv["dir"] + "/"
- ftarget += lbase + dv["append"]
- if lpath.startswith(".."):
- ftarget = os.path.join("..", ftarget)
- bb.utils.mkdirhier(os.path.dirname(fpath))
- #bb.note("Symlink %s -> %s" % (fpath, ftarget))
- os.symlink(ftarget, fpath)
-
- # Process the dv["srcdir"] if requested...
- # This copies and places the referenced sources for later debugging...
- copydebugsources(dv["srcdir"], sources, d)
- #
- # End of debug splitting
- #
-
- #
- # Now lets go back over things and strip them
- #
- if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
- strip = d.getVar("STRIP")
- sfiles = []
- for file in elffiles:
- elf_file = int(elffiles[file])
- #bb.note("Strip %s" % file)
- sfiles.append((file, elf_file, strip))
- if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
- for f in staticlibs:
- sfiles.append((f, 16, strip))
-
- oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
-
- # Build "minidebuginfo" and reinject it back into the stripped binaries
- if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
- oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
- extraargs=(dvar, dv, d))
-
- #
- # End of strip
- #
- os.chdir(oldcwd)
-}
-
-python populate_packages () {
- import glob, re
-
- workdir = d.getVar('WORKDIR')
- outdir = d.getVar('DEPLOY_DIR')
- dvar = d.getVar('PKGD')
- packages = d.getVar('PACKAGES').split()
- pn = d.getVar('PN')
-
- bb.utils.mkdirhier(outdir)
- os.chdir(dvar)
-
- autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
-
- split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
-
- # If debug-with-srcpkg mode is enabled then add the source package if it
- # doesn't exist and add the source file contents to the source package.
- if split_source_package:
- src_package_name = ('%s-src' % d.getVar('PN'))
- if not src_package_name in packages:
- packages.append(src_package_name)
- d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
-
- # Sanity check PACKAGES for duplicates
- # Sanity should be moved to sanity.bbclass once we have the infrastructure
- package_dict = {}
-
- for i, pkg in enumerate(packages):
- if pkg in package_dict:
- msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
- oe.qa.handle_error("packages-list", msg, d)
- # Ensure the source package gets the chance to pick up the source files
- # before the debug package by ordering it first in PACKAGES. Whether it
- # actually picks up any source files is controlled by
- # PACKAGE_DEBUG_SPLIT_STYLE.
- elif pkg.endswith("-src"):
- package_dict[pkg] = (10, i)
- elif autodebug and pkg.endswith("-dbg"):
- package_dict[pkg] = (30, i)
- else:
- package_dict[pkg] = (50, i)
- packages = sorted(package_dict.keys(), key=package_dict.get)
- d.setVar('PACKAGES', ' '.join(packages))
- pkgdest = d.getVar('PKGDEST')
-
- seen = []
-
- # os.mkdir masks the permissions with umask so we have to unset it first
- oldumask = os.umask(0)
-
- debug = []
- for root, dirs, files in cpath.walk(dvar):
- dir = root[len(dvar):]
- if not dir:
- dir = os.sep
- for f in (files + dirs):
- path = "." + os.path.join(dir, f)
- if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
- debug.append(path)
-
- for pkg in packages:
- root = os.path.join(pkgdest, pkg)
- bb.utils.mkdirhier(root)
-
- filesvar = d.getVar('FILES:%s' % pkg) or ""
- if "//" in filesvar:
- msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
- oe.qa.handle_error("files-invalid", msg, d)
- filesvar.replace("//", "/")
-
- origfiles = filesvar.split()
- files, symlink_paths = files_from_filevars(origfiles)
-
- if autodebug and pkg.endswith("-dbg"):
- files.extend(debug)
-
- for file in files:
- if (not cpath.islink(file)) and (not cpath.exists(file)):
- continue
- if file in seen:
- continue
- seen.append(file)
-
- def mkdir(src, dest, p):
- src = os.path.join(src, p)
- dest = os.path.join(dest, p)
- fstat = cpath.stat(src)
- os.mkdir(dest)
- os.chmod(dest, fstat.st_mode)
- os.chown(dest, fstat.st_uid, fstat.st_gid)
- if p not in seen:
- seen.append(p)
- cpath.updatecache(dest)
-
- def mkdir_recurse(src, dest, paths):
- if cpath.exists(dest + '/' + paths):
- return
- while paths.startswith("./"):
- paths = paths[2:]
- p = "."
- for c in paths.split("/"):
- p = os.path.join(p, c)
- if not cpath.exists(os.path.join(dest, p)):
- mkdir(src, dest, p)
-
- if cpath.isdir(file) and not cpath.islink(file):
- mkdir_recurse(dvar, root, file)
- continue
-
- mkdir_recurse(dvar, root, os.path.dirname(file))
- fpath = os.path.join(root,file)
- if not cpath.islink(file):
- os.link(file, fpath)
- continue
- ret = bb.utils.copyfile(file, fpath)
- if ret is False or ret == 0:
- bb.fatal("File population failed")
-
- # Check if symlink paths exist
- for file in symlink_paths:
- if not os.path.exists(os.path.join(root,file)):
- bb.fatal("File '%s' cannot be packaged into '%s' because its "
- "parent directory structure does not exist. One of "
- "its parent directories is a symlink whose target "
- "directory is not included in the package." %
- (file, pkg))
-
- os.umask(oldumask)
- os.chdir(workdir)
-
- # Handle excluding packages with incompatible licenses
- package_list = []
- for pkg in packages:
- licenses = d.getVar('_exclude_incompatible-' + pkg)
- if licenses:
- msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
- oe.qa.handle_error("incompatible-license", msg, d)
- else:
- package_list.append(pkg)
- d.setVar('PACKAGES', ' '.join(package_list))
-
- unshipped = []
- for root, dirs, files in cpath.walk(dvar):
- dir = root[len(dvar):]
- if not dir:
- dir = os.sep
- for f in (files + dirs):
- path = os.path.join(dir, f)
- if ('.' + path) not in seen:
- unshipped.append(path)
-
- if unshipped != []:
- msg = pn + ": Files/directories were installed but not shipped in any package:"
- if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
- bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
- else:
- for f in unshipped:
- msg = msg + "\n " + f
- msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
- msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
- oe.qa.handle_error("installed-vs-shipped", msg, d)
-}
-populate_packages[dirs] = "${D}"
-
-python package_fixsymlinks () {
- import errno
- pkgdest = d.getVar('PKGDEST')
- packages = d.getVar("PACKAGES", False).split()
-
- dangling_links = {}
- pkg_files = {}
- for pkg in packages:
- dangling_links[pkg] = []
- pkg_files[pkg] = []
- inst_root = os.path.join(pkgdest, pkg)
- for path in pkgfiles[pkg]:
- rpath = path[len(inst_root):]
- pkg_files[pkg].append(rpath)
- rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
- if not cpath.lexists(rtarget):
- dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
-
- newrdepends = {}
- for pkg in dangling_links:
- for l in dangling_links[pkg]:
- found = False
- bb.debug(1, "%s contains dangling link %s" % (pkg, l))
- for p in packages:
- if l in pkg_files[p]:
- found = True
- bb.debug(1, "target found in %s" % p)
- if p == pkg:
- break
- if pkg not in newrdepends:
- newrdepends[pkg] = []
- newrdepends[pkg].append(p)
- break
- if found == False:
- bb.note("%s contains dangling symlink to %s" % (pkg, l))
-
- for pkg in newrdepends:
- rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
- for p in newrdepends[pkg]:
- if p not in rdepends:
- rdepends[p] = []
- d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
-}
-
-
-python package_package_name_hook() {
- """
- A package_name_hook function can be used to rewrite the package names by
- changing PKG. For an example, see debian.bbclass.
- """
- pass
-}
-
-EXPORT_FUNCTIONS package_name_hook
-
-
-PKGDESTWORK = "${WORKDIR}/pkgdata"
-
-PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
-
-python emit_pkgdata() {
- from glob import glob
- import json
- import bb.compress.zstd
-
- def process_postinst_on_target(pkg, mlprefix):
- pkgval = d.getVar('PKG:%s' % pkg)
- if pkgval is None:
- pkgval = pkg
-
- defer_fragment = """
-if [ -n "$D" ]; then
- $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
- exit 0
-fi
-""" % (pkgval, mlprefix)
-
- postinst = d.getVar('pkg_postinst:%s' % pkg)
- postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
-
- if postinst_ontarget:
- bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
- if not postinst:
- postinst = '#!/bin/sh\n'
- postinst += defer_fragment
- postinst += postinst_ontarget
- d.setVar('pkg_postinst:%s' % pkg, postinst)
-
- def add_set_e_to_scriptlets(pkg):
- for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
- scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
- if scriptlet:
- scriptlet_split = scriptlet.split('\n')
- if scriptlet_split[0].startswith("#!"):
- scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
- else:
- scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
- d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
-
- def write_if_exists(f, pkg, var):
- def encode(str):
- import codecs
- c = codecs.getencoder("unicode_escape")
- return c(str)[0].decode("latin1")
-
- val = d.getVar('%s:%s' % (var, pkg))
- if val:
- f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
- return val
- val = d.getVar('%s' % (var))
- if val:
- f.write('%s: %s\n' % (var, encode(val)))
- return val
-
- def write_extra_pkgs(variants, pn, packages, pkgdatadir):
- for variant in variants:
- with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
- fd.write("PACKAGES: %s\n" % ' '.join(
- map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
-
- def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
- for variant in variants:
- for pkg in packages.split():
- ml_pkg = "%s-%s" % (variant, pkg)
- subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
- with open(subdata_file, 'w') as fd:
- fd.write("PKG:%s: %s" % (ml_pkg, pkg))
-
- packages = d.getVar('PACKAGES')
- pkgdest = d.getVar('PKGDEST')
- pkgdatadir = d.getVar('PKGDESTWORK')
-
- data_file = pkgdatadir + d.expand("/${PN}")
- with open(data_file, 'w') as fd:
- fd.write("PACKAGES: %s\n" % packages)
-
- pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
-
- pn = d.getVar('PN')
- global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
- variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
-
- if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
- write_extra_pkgs(variants, pn, packages, pkgdatadir)
-
- if bb.data.inherits_class('allarch', d) and not variants \
- and not bb.data.inherits_class('packagegroup', d):
- write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
-
- workdir = d.getVar('WORKDIR')
-
- for pkg in packages.split():
- pkgval = d.getVar('PKG:%s' % pkg)
- if pkgval is None:
- pkgval = pkg
- d.setVar('PKG:%s' % pkg, pkg)
-
- extended_data = {
- "files_info": {}
- }
-
- pkgdestpkg = os.path.join(pkgdest, pkg)
- files = {}
- files_extra = {}
- total_size = 0
- seen = set()
- for f in pkgfiles[pkg]:
- fpath = os.sep + os.path.relpath(f, pkgdestpkg)
-
- fstat = os.lstat(f)
- files[fpath] = fstat.st_size
-
- extended_data["files_info"].setdefault(fpath, {})
- extended_data["files_info"][fpath]['size'] = fstat.st_size
-
- if fstat.st_ino not in seen:
- seen.add(fstat.st_ino)
- total_size += fstat.st_size
-
- if fpath in pkgdebugsource:
- extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
- del pkgdebugsource[fpath]
-
- d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
-
- process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
- add_set_e_to_scriptlets(pkg)
-
- subdata_file = pkgdatadir + "/runtime/%s" % pkg
- with open(subdata_file, 'w') as sf:
- for var in (d.getVar('PKGDATA_VARS') or "").split():
- val = write_if_exists(sf, pkg, var)
-
- write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
- for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
- write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
-
- write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
- for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
- write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
-
- sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
-
- subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
- num_threads = int(d.getVar("BB_NUMBER_THREADS"))
- with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
- json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
-
- # Symlinks needed for rprovides lookup
- rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
- if rprov:
- for p in bb.utils.explode_deps(rprov):
- subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
- bb.utils.mkdirhier(os.path.dirname(subdata_sym))
- oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
-
- allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
- if not allow_empty:
- allow_empty = d.getVar('ALLOW_EMPTY')
- root = "%s/%s" % (pkgdest, pkg)
- os.chdir(root)
- g = glob('*')
- if g or allow_empty == "1":
- # Symlinks needed for reverse lookups (from the final package name)
- subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
- oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
-
- packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
- open(packagedfile, 'w').close()
-
- if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
- write_extra_runtime_pkgs(variants, packages, pkgdatadir)
-
- if bb.data.inherits_class('allarch', d) and not variants \
- and not bb.data.inherits_class('packagegroup', d):
- write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
-
-}
-emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
-emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
-
-ldconfig_postinst_fragment() {
-if [ x"$D" = "x" ]; then
- if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
-fi
-}
-
-RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
-
-# Collect perfile run-time dependency metadata
-# Output:
-# FILERPROVIDESFLIST:pkg - list of all files w/ deps
-# FILERPROVIDES:filepath:pkg - per file dep
-#
-# FILERDEPENDSFLIST:pkg - list of all files w/ deps
-# FILERDEPENDS:filepath:pkg - per file dep
-
-python package_do_filedeps() {
- if d.getVar('SKIP_FILEDEPS') == '1':
- return
-
- pkgdest = d.getVar('PKGDEST')
- packages = d.getVar('PACKAGES')
- rpmdeps = d.getVar('RPMDEPS')
-
- def chunks(files, n):
- return [files[i:i+n] for i in range(0, len(files), n)]
-
- pkglist = []
- for pkg in packages.split():
- if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
- continue
- if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
- continue
- for files in chunks(pkgfiles[pkg], 100):
- pkglist.append((pkg, files, rpmdeps, pkgdest))
-
- processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
-
- provides_files = {}
- requires_files = {}
-
- for result in processed:
- (pkg, provides, requires) = result
-
- if pkg not in provides_files:
- provides_files[pkg] = []
- if pkg not in requires_files:
- requires_files[pkg] = []
-
- for file in sorted(provides):
- provides_files[pkg].append(file)
- key = "FILERPROVIDES:" + file + ":" + pkg
- d.appendVar(key, " " + " ".join(provides[file]))
-
- for file in sorted(requires):
- requires_files[pkg].append(file)
- key = "FILERDEPENDS:" + file + ":" + pkg
- d.appendVar(key, " " + " ".join(requires[file]))
-
- for pkg in requires_files:
- d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
- for pkg in provides_files:
- d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
-}
-
-SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
-SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
-
-python package_do_shlibs() {
- import itertools
- import re, pipes
- import subprocess
-
- exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
- if exclude_shlibs:
- bb.note("not generating shlibs")
- return
-
- lib_re = re.compile(r"^.*\.so")
- libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
-
- packages = d.getVar('PACKAGES')
-
- shlib_pkgs = []
- exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
- if exclusion_list:
- for pkg in packages.split():
- if pkg not in exclusion_list.split():
- shlib_pkgs.append(pkg)
- else:
- bb.note("not generating shlibs for %s" % pkg)
- else:
- shlib_pkgs = packages.split()
-
- hostos = d.getVar('HOST_OS')
-
- workdir = d.getVar('WORKDIR')
-
- ver = d.getVar('PKGV')
- if not ver:
- msg = "PKGV not defined"
- oe.qa.handle_error("pkgv-undefined", msg, d)
- return
-
- pkgdest = d.getVar('PKGDEST')
-
- shlibswork_dir = d.getVar('SHLIBSWORKDIR')
-
- def linux_so(file, pkg, pkgver, d):
- needs_ldconfig = False
- needed = set()
- sonames = set()
- renames = []
- ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
- cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
- fd = os.popen(cmd)
- lines = fd.readlines()
- fd.close()
- rpath = tuple()
- for l in lines:
- m = re.match(r"\s+RPATH\s+([^\s]*)", l)
- if m:
- rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
- rpath = tuple(map(os.path.normpath, rpaths))
- for l in lines:
- m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
- if m:
- dep = m.group(1)
- if dep not in needed:
- needed.add((dep, file, rpath))
- m = re.match(r"\s+SONAME\s+([^\s]*)", l)
- if m:
- this_soname = m.group(1)
- prov = (this_soname, ldir, pkgver)
- if not prov in sonames:
- # if library is private (only used by package) then do not build shlib for it
- import fnmatch
- if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
- sonames.add(prov)
- if libdir_re.match(os.path.dirname(file)):
- needs_ldconfig = True
- if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
- renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
- return (needs_ldconfig, needed, sonames, renames)
-
- def darwin_so(file, needed, sonames, renames, pkgver):
- if not os.path.exists(file):
- return
- ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
-
- def get_combinations(base):
- #
- # Given a base library name, find all combinations of this split by "." and "-"
- #
- combos = []
- options = base.split(".")
- for i in range(1, len(options) + 1):
- combos.append(".".join(options[0:i]))
- options = base.split("-")
- for i in range(1, len(options) + 1):
- combos.append("-".join(options[0:i]))
- return combos
-
- if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
- # Drop suffix
- name = os.path.basename(file).rsplit(".",1)[0]
- # Find all combinations
- combos = get_combinations(name)
- for combo in combos:
- if not combo in sonames:
- prov = (combo, ldir, pkgver)
- sonames.add(prov)
- if file.endswith('.dylib') or file.endswith('.so'):
- rpath = []
- p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- # If returned successfully, process stdout for results
- if p.returncode == 0:
- for l in out.split("\n"):
- l = l.strip()
- if l.startswith('path '):
- rpath.append(l.split()[1])
-
- p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- # If returned successfully, process stdout for results
- if p.returncode == 0:
- for l in out.split("\n"):
- l = l.strip()
- if not l or l.endswith(":"):
- continue
- if "is not an object file" in l:
- continue
- name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
- if name and name not in needed[pkg]:
- needed[pkg].add((name, file, tuple()))
-
- def mingw_dll(file, needed, sonames, renames, pkgver):
- if not os.path.exists(file):
- return
-
- if file.endswith(".dll"):
- # assume all dlls are shared objects provided by the package
- sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
-
- if (file.endswith(".dll") or file.endswith(".exe")):
- # use objdump to search for "DLL Name: .*\.dll"
- p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- # process the output, grabbing all .dll names
- if p.returncode == 0:
- for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
- dllname = m.group(1)
- if dllname:
- needed[pkg].add((dllname, file, tuple()))
-
- if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
- snap_symlinks = True
- else:
- snap_symlinks = False
-
- needed = {}
-
- shlib_provider = oe.package.read_shlib_providers(d)
-
- for pkg in shlib_pkgs:
- private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
- private_libs = private_libs.split()
- needs_ldconfig = False
- bb.debug(2, "calculating shlib provides for %s" % pkg)
-
- pkgver = d.getVar('PKGV:' + pkg)
- if not pkgver:
- pkgver = d.getVar('PV_' + pkg)
- if not pkgver:
- pkgver = ver
-
- needed[pkg] = set()
- sonames = set()
- renames = []
- linuxlist = []
- for file in pkgfiles[pkg]:
- soname = None
- if cpath.islink(file):
- continue
- if hostos == "darwin" or hostos == "darwin8":
- darwin_so(file, needed, sonames, renames, pkgver)
- elif hostos.startswith("mingw"):
- mingw_dll(file, needed, sonames, renames, pkgver)
- elif os.access(file, os.X_OK) or lib_re.match(file):
- linuxlist.append(file)
-
- if linuxlist:
- results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
- for r in results:
- ldconfig = r[0]
- needed[pkg] |= r[1]
- sonames |= r[2]
- renames.extend(r[3])
- needs_ldconfig = needs_ldconfig or ldconfig
-
- for (old, new) in renames:
- bb.note("Renaming %s to %s" % (old, new))
- bb.utils.rename(old, new)
- pkgfiles[pkg].remove(old)
-
- shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
- if len(sonames):
- with open(shlibs_file, 'w') as fd:
- for s in sorted(sonames):
- if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
- (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
- if old_pkg != pkg:
- bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
- bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
- fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
- if s[0] not in shlib_provider:
- shlib_provider[s[0]] = {}
- shlib_provider[s[0]][s[1]] = (pkg, pkgver)
- if needs_ldconfig:
- bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
- postinst = d.getVar('pkg_postinst:%s' % pkg)
- if not postinst:
- postinst = '#!/bin/sh\n'
- postinst += d.getVar('ldconfig_postinst_fragment')
- d.setVar('pkg_postinst:%s' % pkg, postinst)
- bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
-
- assumed_libs = d.getVar('ASSUME_SHLIBS')
- if assumed_libs:
- libdir = d.getVar("libdir")
- for e in assumed_libs.split():
- l, dep_pkg = e.split(":")
- lib_ver = None
- dep_pkg = dep_pkg.rsplit("_", 1)
- if len(dep_pkg) == 2:
- lib_ver = dep_pkg[1]
- dep_pkg = dep_pkg[0]
- if l not in shlib_provider:
- shlib_provider[l] = {}
- shlib_provider[l][libdir] = (dep_pkg, lib_ver)
-
- libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
-
- for pkg in shlib_pkgs:
- bb.debug(2, "calculating shlib requirements for %s" % pkg)
-
- private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
- private_libs = private_libs.split()
-
- deps = list()
- for n in needed[pkg]:
- # if n is in private libraries, don't try to search provider for it
- # this could cause problem in case some abc.bb provides private
- # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
- # but skipping it is still better alternative than providing own
- # version and then adding runtime dependency for the same system library
- import fnmatch
- if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
- bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
- continue
- if n[0] in shlib_provider.keys():
- shlib_provider_map = shlib_provider[n[0]]
- matches = set()
- for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
- if p in shlib_provider_map:
- matches.add(p)
- if len(matches) > 1:
- matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
- bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
- elif len(matches) == 1:
- (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
-
- bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
-
- if dep_pkg == pkg:
- continue
-
- if ver_needed:
- dep = "%s (>= %s)" % (dep_pkg, ver_needed)
- else:
- dep = dep_pkg
- if not dep in deps:
- deps.append(dep)
- continue
- bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
-
- deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
- if os.path.exists(deps_file):
- os.remove(deps_file)
- if deps:
- with open(deps_file, 'w') as fd:
- for dep in sorted(deps):
- fd.write(dep + '\n')
-}
-
-python package_do_pkgconfig () {
- import re
-
- packages = d.getVar('PACKAGES')
- workdir = d.getVar('WORKDIR')
- pkgdest = d.getVar('PKGDEST')
-
- shlibs_dirs = d.getVar('SHLIBSDIRS').split()
- shlibswork_dir = d.getVar('SHLIBSWORKDIR')
-
- pc_re = re.compile(r'(.*)\.pc$')
- var_re = re.compile(r'(.*)=(.*)')
- field_re = re.compile(r'(.*): (.*)')
-
- pkgconfig_provided = {}
- pkgconfig_needed = {}
- for pkg in packages.split():
- pkgconfig_provided[pkg] = []
- pkgconfig_needed[pkg] = []
- for file in sorted(pkgfiles[pkg]):
- m = pc_re.match(file)
- if m:
- pd = bb.data.init()
- name = m.group(1)
- pkgconfig_provided[pkg].append(os.path.basename(name))
- if not os.access(file, os.R_OK):
- continue
- with open(file, 'r') as f:
- lines = f.readlines()
- for l in lines:
- m = var_re.match(l)
- if m:
- name = m.group(1)
- val = m.group(2)
- pd.setVar(name, pd.expand(val))
- continue
- m = field_re.match(l)
- if m:
- hdr = m.group(1)
- exp = pd.expand(m.group(2))
- if hdr == 'Requires':
- pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
-
- for pkg in packages.split():
- pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
- if pkgconfig_provided[pkg] != []:
- with open(pkgs_file, 'w') as f:
- for p in sorted(pkgconfig_provided[pkg]):
- f.write('%s\n' % p)
-
- # Go from least to most specific since the last one found wins
- for dir in reversed(shlibs_dirs):
- if not os.path.exists(dir):
- continue
- for file in sorted(os.listdir(dir)):
- m = re.match(r'^(.*)\.pclist$', file)
- if m:
- pkg = m.group(1)
- with open(os.path.join(dir, file)) as fd:
- lines = fd.readlines()
- pkgconfig_provided[pkg] = []
- for l in lines:
- pkgconfig_provided[pkg].append(l.rstrip())
-
- for pkg in packages.split():
- deps = []
- for n in pkgconfig_needed[pkg]:
- found = False
- for k in pkgconfig_provided.keys():
- if n in pkgconfig_provided[k]:
- if k != pkg and not (k in deps):
- deps.append(k)
- found = True
- if found == False:
- bb.note("couldn't find pkgconfig module '%s' in any package" % n)
- deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
- if len(deps):
- with open(deps_file, 'w') as fd:
- for dep in deps:
- fd.write(dep + '\n')
-}
-
-def read_libdep_files(d):
- pkglibdeps = {}
- packages = d.getVar('PACKAGES').split()
- for pkg in packages:
- pkglibdeps[pkg] = {}
- for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
- depsfile = d.expand("${PKGDEST}/" + pkg + extension)
- if os.access(depsfile, os.R_OK):
- with open(depsfile) as fd:
- lines = fd.readlines()
- for l in lines:
- l.rstrip()
- deps = bb.utils.explode_dep_versions2(l)
- for dep in deps:
- if not dep in pkglibdeps[pkg]:
- pkglibdeps[pkg][dep] = deps[dep]
- return pkglibdeps
-
-python read_shlibdeps () {
- pkglibdeps = read_libdep_files(d)
-
- packages = d.getVar('PACKAGES').split()
- for pkg in packages:
- rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
- for dep in sorted(pkglibdeps[pkg]):
- # Add the dep if it's not already there, or if no comparison is set
- if dep not in rdepends:
- rdepends[dep] = []
- for v in pkglibdeps[pkg][dep]:
- if v not in rdepends[dep]:
- rdepends[dep].append(v)
- d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
-}
-
-python package_depchains() {
- """
- For a given set of prefix and postfix modifiers, make those packages
- RRECOMMENDS on the corresponding packages for its RDEPENDS.
-
- Example: If package A depends upon package B, and A's .bb emits an
- A-dev package, this would make A-dev Recommends: B-dev.
-
- If only one of a given suffix is specified, it will take the RRECOMMENDS
- based on the RDEPENDS of *all* other packages. If more than one of a given
- suffix is specified, its will only use the RDEPENDS of the single parent
- package.
- """
-
- packages = d.getVar('PACKAGES')
- postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
- prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
-
- def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
-
- #bb.note('depends for %s is %s' % (base, depends))
- rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
-
- for depend in sorted(depends):
- if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
- #bb.note("Skipping %s" % depend)
- continue
- if depend.endswith('-dev'):
- depend = depend[:-4]
- if depend.endswith('-dbg'):
- depend = depend[:-4]
- pkgname = getname(depend, suffix)
- #bb.note("Adding %s for %s" % (pkgname, depend))
- if pkgname not in rreclist and pkgname != pkg:
- rreclist[pkgname] = []
-
- #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
- d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
- def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
-
- #bb.note('rdepends for %s is %s' % (base, rdepends))
- rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
-
- for depend in sorted(rdepends):
- if depend.find('virtual-locale-') != -1:
- #bb.note("Skipping %s" % depend)
- continue
- if depend.endswith('-dev'):
- depend = depend[:-4]
- if depend.endswith('-dbg'):
- depend = depend[:-4]
- pkgname = getname(depend, suffix)
- #bb.note("Adding %s for %s" % (pkgname, depend))
- if pkgname not in rreclist and pkgname != pkg:
- rreclist[pkgname] = []
-
- #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
- d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
- def add_dep(list, dep):
- if dep not in list:
- list.append(dep)
-
- depends = []
- for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
- add_dep(depends, dep)
-
- rdepends = []
- for pkg in packages.split():
- for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
- add_dep(rdepends, dep)
-
- #bb.note('rdepends is %s' % rdepends)
-
- def post_getname(name, suffix):
- return '%s%s' % (name, suffix)
- def pre_getname(name, suffix):
- return '%s%s' % (suffix, name)
-
- pkgs = {}
- for pkg in packages.split():
- for postfix in postfixes:
- if pkg.endswith(postfix):
- if not postfix in pkgs:
- pkgs[postfix] = {}
- pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
-
- for prefix in prefixes:
- if pkg.startswith(prefix):
- if not prefix in pkgs:
- pkgs[prefix] = {}
- pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
-
- if "-dbg" in pkgs:
- pkglibdeps = read_libdep_files(d)
- pkglibdeplist = []
- for pkg in pkglibdeps:
- for k in pkglibdeps[pkg]:
- add_dep(pkglibdeplist, k)
- dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
-
- for suffix in pkgs:
- for pkg in pkgs[suffix]:
- if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
- continue
- (base, func) = pkgs[suffix][pkg]
- if suffix == "-dev":
- pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
- elif suffix == "-dbg":
- if not dbgdefaultdeps:
- pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
- continue
- if len(pkgs[suffix]) == 1:
- pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
- else:
- rdeps = []
- for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
- add_dep(rdeps, dep)
- pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
-}
-
-# Since bitbake can't determine which variables are accessed during package
-# iteration, we need to list them here:
-PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
-
-def gen_packagevar(d, pkgvars="PACKAGEVARS"):
- ret = []
- pkgs = (d.getVar("PACKAGES") or "").split()
- vars = (d.getVar(pkgvars) or "").split()
- for v in vars:
- ret.append(v)
- for p in pkgs:
- for v in vars:
- ret.append(v + ":" + p)
-
- # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
- # affected recipes.
- ret.append('_exclude_incompatible-%s' % p)
- return " ".join(ret)
-
-PACKAGE_PREPROCESS_FUNCS ?= ""
-# Functions for setting up PKGD
-PACKAGEBUILDPKGD ?= " \
- package_prepare_pkgdata \
- perform_packagecopy \
- ${PACKAGE_PREPROCESS_FUNCS} \
- split_and_strip_files \
- fixup_perms \
- "
-# Functions which split PKGD up into separate packages
-PACKAGESPLITFUNCS ?= " \
- package_do_split_locales \
- populate_packages"
-# Functions which process metadata based on split packages
-PACKAGEFUNCS += " \
- package_fixsymlinks \
- package_name_hook \
- package_do_filedeps \
- package_do_shlibs \
- package_do_pkgconfig \
- read_shlibdeps \
- package_depchains \
- emit_pkgdata"
-
-python do_package () {
- # Change the following version to cause sstate to invalidate the package
- # cache. This is useful if an item this class depends on changes in a
- # way that the output of this class changes. rpmdeps is a good example
- # as any change to rpmdeps requires this to be rerun.
- # PACKAGE_BBCLASS_VERSION = "4"
-
- # Init cachedpath
- global cpath
- cpath = oe.cachedpath.CachedPath()
-
- ###########################################################################
- # Sanity test the setup
- ###########################################################################
-
- packages = (d.getVar('PACKAGES') or "").split()
- if len(packages) < 1:
- bb.debug(1, "No packages to build, skipping do_package")
- return
-
- workdir = d.getVar('WORKDIR')
- outdir = d.getVar('DEPLOY_DIR')
- dest = d.getVar('D')
- dvar = d.getVar('PKGD')
- pn = d.getVar('PN')
-
- if not workdir or not outdir or not dest or not dvar or not pn:
- msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
- oe.qa.handle_error("var-undefined", msg, d)
- return
-
- bb.build.exec_func("package_convert_pr_autoinc", d)
-
- ###########################################################################
- # Optimisations
- ###########################################################################
-
- # Continually expanding complex expressions is inefficient, particularly
- # when we write to the datastore and invalidate the expansion cache. This
- # code pre-expands some frequently used variables
-
- def expandVar(x, d):
- d.setVar(x, d.getVar(x))
-
- for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
- expandVar(x, d)
-
- ###########################################################################
- # Setup PKGD (from D)
- ###########################################################################
-
- for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
- bb.build.exec_func(f, d)
-
- ###########################################################################
- # Split up PKGD into PKGDEST
- ###########################################################################
-
- cpath = oe.cachedpath.CachedPath()
-
- for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
- bb.build.exec_func(f, d)
-
- ###########################################################################
- # Process PKGDEST
- ###########################################################################
-
- # Build global list of files in each split package
- global pkgfiles
- pkgfiles = {}
- packages = d.getVar('PACKAGES').split()
- pkgdest = d.getVar('PKGDEST')
- for pkg in packages:
- pkgfiles[pkg] = []
- for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
- for file in files:
- pkgfiles[pkg].append(walkroot + os.sep + file)
-
- for f in (d.getVar('PACKAGEFUNCS') or '').split():
- bb.build.exec_func(f, d)
-
- oe.qa.exit_if_errors(d)
-}
-
-do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
-do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
-addtask package after do_install
-
-SSTATETASKS += "do_package"
-do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
-do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
-do_package_setscene[dirs] = "${STAGING_DIR}"
-
-python do_package_setscene () {
- sstate_setscene(d)
-}
-addtask do_package_setscene
-
-# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
-# do_package_setscene and do_packagedata_setscene leading to races
-python do_packagedata () {
- bb.build.exec_func("package_get_auto_pr", d)
-
- src = d.expand("${PKGDESTWORK}")
- dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
- oe.path.copyhardlinktree(src, dest)
-
- bb.build.exec_func("packagedata_translate_pr_autoinc", d)
-}
-do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
-
-# Translate the EXTENDPRAUTO and AUTOINC to the final values
-packagedata_translate_pr_autoinc() {
- find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
- sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
- -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
-}
-
-addtask packagedata before do_build after do_package
-
-SSTATETASKS += "do_packagedata"
-do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
-do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
-do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
-
-python do_packagedata_setscene () {
- sstate_setscene(d)
-}
-addtask do_packagedata_setscene
-
-#
-# Helper functions for the package writing classes
-#
-
-def mapping_rename_hook(d):
- """
- Rewrite variables to account for package renaming in things
- like debian.bbclass or manual PKG variable name changes
- """
- pkg = d.getVar("PKG")
- runtime_mapping_rename("RDEPENDS", pkg, d)
- runtime_mapping_rename("RRECOMMENDS", pkg, d)
- runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass
deleted file mode 100644
index d6c1b306fc..0000000000
--- a/meta/classes/package_tar.bbclass
+++ /dev/null
@@ -1,71 +0,0 @@
-inherit package
-
-IMAGE_PKGTYPE ?= "tar"
-
-python do_package_tar () {
- import subprocess
-
- oldcwd = os.getcwd()
-
- workdir = d.getVar('WORKDIR')
- if not workdir:
- bb.error("WORKDIR not defined, unable to package")
- return
-
- outdir = d.getVar('DEPLOY_DIR_TAR')
- if not outdir:
- bb.error("DEPLOY_DIR_TAR not defined, unable to package")
- return
-
- dvar = d.getVar('D')
- if not dvar:
- bb.error("D not defined, unable to package")
- return
-
- packages = d.getVar('PACKAGES')
- if not packages:
- bb.debug(1, "PACKAGES not defined, nothing to package")
- return
-
- pkgdest = d.getVar('PKGDEST')
-
- bb.utils.mkdirhier(outdir)
- bb.utils.mkdirhier(dvar)
-
- for pkg in packages.split():
- localdata = bb.data.createCopy(d)
- root = "%s/%s" % (pkgdest, pkg)
-
- overrides = localdata.getVar('OVERRIDES', False)
- localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg))
-
- bb.utils.mkdirhier(root)
- basedir = os.path.dirname(root)
- tarfn = localdata.expand("${DEPLOY_DIR_TAR}/${PKG}-${PKGV}-${PKGR}.tar.gz")
- os.chdir(root)
- dlist = os.listdir(root)
- if not dlist:
- bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
- continue
- args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split()
- ret = subprocess.call(args + [tarfn] + dlist)
- if ret != 0:
- bb.error("Creation of tar %s failed." % tarfn)
-
- os.chdir(oldcwd)
-}
-
-python () {
- if d.getVar('PACKAGES') != '':
- deps = ' tar-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
- d.appendVarFlag('do_package_write_tar', 'depends', deps)
- d.setVarFlag('do_package_write_tar', 'fakeroot', "1")
-}
-
-
-python do_package_write_tar () {
- bb.build.exec_func("read_subpackage_metadata", d)
- bb.build.exec_func("do_package_tar", d)
-}
-do_package_write_tar[dirs] = "${D}"
-addtask package_write_tar before do_build after do_packagedata do_package
diff --git a/meta/classes/perlnative.bbclass b/meta/classes/perlnative.bbclass
deleted file mode 100644
index cc8de8b381..0000000000
--- a/meta/classes/perlnative.bbclass
+++ /dev/null
@@ -1,3 +0,0 @@
-EXTRANATIVEPATH += "perl-native"
-DEPENDS += "perl-native"
-OECMAKE_PERLNATIVE_DIR = "${STAGING_BINDIR_NATIVE}/perl-native"
diff --git a/meta/classes/pkgconfig.bbclass b/meta/classes/pkgconfig.bbclass
deleted file mode 100644
index fa94527ce9..0000000000
--- a/meta/classes/pkgconfig.bbclass
+++ /dev/null
@@ -1,2 +0,0 @@
-DEPENDS:prepend = "pkgconfig-native "
-
diff --git a/meta/classes/populate_sdk.bbclass b/meta/classes/populate_sdk.bbclass
deleted file mode 100644
index f64a911b72..0000000000
--- a/meta/classes/populate_sdk.bbclass
+++ /dev/null
@@ -1,7 +0,0 @@
-# The majority of populate_sdk is located in populate_sdk_base
-# This chunk simply facilitates compatibility with SDK only recipes.
-
-inherit populate_sdk_base
-
-addtask populate_sdk after do_install before do_build
-
diff --git a/meta/classes/prexport.bbclass b/meta/classes/prexport.bbclass
index 6dcf99e29f..e5098e3308 100644
--- a/meta/classes/prexport.bbclass
+++ b/meta/classes/prexport.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
PRSERV_DUMPOPT_VERSION = "${PRAUTOINX}"
PRSERV_DUMPOPT_PKGARCH = ""
PRSERV_DUMPOPT_CHECKSUM = ""
diff --git a/meta/classes/primport.bbclass b/meta/classes/primport.bbclass
index 8ed45f03f0..00924174c1 100644
--- a/meta/classes/primport.bbclass
+++ b/meta/classes/primport.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
python primport_handler () {
import bb.event
if not e.data:
diff --git a/meta/classes/pypi.bbclass b/meta/classes/pypi.bbclass
deleted file mode 100644
index 5fa7b8a6ae..0000000000
--- a/meta/classes/pypi.bbclass
+++ /dev/null
@@ -1,28 +0,0 @@
-def pypi_package(d):
- bpn = d.getVar('BPN')
- if bpn.startswith('python-'):
- return bpn[7:]
- elif bpn.startswith('python3-'):
- return bpn[8:]
- return bpn
-
-PYPI_PACKAGE ?= "${@pypi_package(d)}"
-PYPI_PACKAGE_EXT ?= "tar.gz"
-PYPI_ARCHIVE_NAME ?= "${PYPI_PACKAGE}-${PV}.${PYPI_PACKAGE_EXT}"
-
-def pypi_src_uri(d):
- package = d.getVar('PYPI_PACKAGE')
- archive_name = d.getVar('PYPI_ARCHIVE_NAME')
- return 'https://files.pythonhosted.org/packages/source/%s/%s/%s' % (package[0], package, archive_name)
-
-PYPI_SRC_URI ?= "${@pypi_src_uri(d)}"
-
-HOMEPAGE ?= "https://pypi.python.org/pypi/${PYPI_PACKAGE}/"
-SECTION = "devel/python"
-SRC_URI:prepend = "${PYPI_SRC_URI} "
-S = "${WORKDIR}/${PYPI_PACKAGE}-${PV}"
-
-UPSTREAM_CHECK_URI ?= "https://pypi.org/project/${PYPI_PACKAGE}/"
-UPSTREAM_CHECK_REGEX ?= "/${PYPI_PACKAGE}/(?P<pver>(\d+[\.\-_]*)+)/"
-
-CVE_PRODUCT ?= "python:${PYPI_PACKAGE}"
diff --git a/meta/classes/python3-dir.bbclass b/meta/classes/python3-dir.bbclass
deleted file mode 100644
index ff03e584d4..0000000000
--- a/meta/classes/python3-dir.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-PYTHON_BASEVERSION = "3.10"
-PYTHON_ABI = ""
-PYTHON_DIR = "python${PYTHON_BASEVERSION}"
-PYTHON_PN = "python3"
-PYTHON_SITEPACKAGES_DIR = "${libdir}/${PYTHON_DIR}/site-packages"
diff --git a/meta/classes/python3targetconfig.bbclass b/meta/classes/python3targetconfig.bbclass
deleted file mode 100644
index 2476858cae..0000000000
--- a/meta/classes/python3targetconfig.bbclass
+++ /dev/null
@@ -1,29 +0,0 @@
-inherit python3native
-
-EXTRA_PYTHON_DEPENDS ?= ""
-EXTRA_PYTHON_DEPENDS:class-target = "python3"
-DEPENDS:append = " ${EXTRA_PYTHON_DEPENDS}"
-
-do_configure:prepend:class-target() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
-
-do_compile:prepend:class-target() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
-
-do_install:prepend:class-target() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
-
-do_configure:prepend:class-nativesdk() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
-
-do_compile:prepend:class-nativesdk() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
-
-do_install:prepend:class-nativesdk() {
- export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-}
diff --git a/meta/classes/python_flit_core.bbclass b/meta/classes/python_flit_core.bbclass
deleted file mode 100644
index 96652aa204..0000000000
--- a/meta/classes/python_flit_core.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-inherit python_pep517 python3native python3-dir setuptools3-base
-
-DEPENDS += "python3 python3-flit-core-native"
-
-PEP517_BUILD_API = "flit_core.buildapi"
diff --git a/meta/classes/python_poetry_core.bbclass b/meta/classes/python_poetry_core.bbclass
deleted file mode 100644
index 577663b8f1..0000000000
--- a/meta/classes/python_poetry_core.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-inherit python_pep517 python3native setuptools3-base
-
-DEPENDS += "python3-poetry-core-native"
-
-PEP517_BUILD_API = "poetry.core.masonry.api"
diff --git a/meta/classes/python_setuptools3_rust.bbclass b/meta/classes/python_setuptools3_rust.bbclass
deleted file mode 100644
index f12e5d0cbd..0000000000
--- a/meta/classes/python_setuptools3_rust.bbclass
+++ /dev/null
@@ -1,11 +0,0 @@
-inherit python_pyo3 setuptools3
-
-DEPENDS += "python3-setuptools-rust-native"
-
-python_setuptools3_rust_do_configure() {
- python_pyo3_do_configure
- cargo_common_do_configure
- setuptools3_do_configure
-}
-
-EXPORT_FUNCTIONS do_configure
diff --git a/meta/classes/python_setuptools_build_meta.bbclass b/meta/classes/python_setuptools_build_meta.bbclass
deleted file mode 100644
index b2bba35a0b..0000000000
--- a/meta/classes/python_setuptools_build_meta.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-inherit setuptools3-base python_pep517
-
-DEPENDS += "python3-setuptools-native python3-wheel-native"
-
-PEP517_BUILD_API = "setuptools.build_meta"
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass
index 7fa4a849ea..a5cc4315fb 100644
--- a/meta/classes/recipe_sanity.bbclass
+++ b/meta/classes/recipe_sanity.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
def __note(msg, d):
bb.note("%s: recipe_sanity: %s" % (d.getVar("P"), msg))
@@ -10,7 +16,7 @@ def bad_runtime_vars(cfgdata, d):
for var in d.getVar("__recipe_sanity_badruntimevars").split():
val = d.getVar(var, False)
if val and val != cfgdata.get(var):
- __note("%s should be %s_${PN}" % (var, var), d)
+ __note("%s should be %s:${PN}" % (var, var), d)
__recipe_sanity_reqvars = "DESCRIPTION"
__recipe_sanity_reqdiffvars = ""
diff --git a/meta/classes/relative_symlinks.bbclass b/meta/classes/relative_symlinks.bbclass
index 3157737347..9ee20e0d09 100644
--- a/meta/classes/relative_symlinks.bbclass
+++ b/meta/classes/relative_symlinks.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
do_install[postfuncs] += "install_relative_symlinks"
python install_relative_symlinks () {
diff --git a/meta/classes/relocatable.bbclass b/meta/classes/relocatable.bbclass
index af04be5cca..d0a623fb0a 100644
--- a/meta/classes/relocatable.bbclass
+++ b/meta/classes/relocatable.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit chrpath
SYSROOT_PREPROCESS_FUNCS += "relocatable_binaries_preprocess relocatable_native_pcfiles"
diff --git a/meta/classes/remove-libtool.bbclass b/meta/classes/remove-libtool.bbclass
index 3fd0cd58f9..8e987388c8 100644
--- a/meta/classes/remove-libtool.bbclass
+++ b/meta/classes/remove-libtool.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This class removes libtool .la files after do_install
REMOVE_LIBTOOL_LA ?= "1"
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index de48e4ff0f..1452513a66 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -4,7 +4,8 @@
# Copyright (C) 2013 Intel Corporation
# Author: Andreea Brandusa Proca <andreea.b.proca@intel.com>
#
-# Licensed under the MIT license, see COPYING.MIT for details
+# SPDX-License-Identifier: MIT
+#
ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
@@ -38,6 +39,19 @@ def get_conf_data(e, filename):
jsonstring=jsonstring + line
return jsonstring
+def get_common_data(e):
+ data = {}
+ data['machine'] = e.data.getVar("MACHINE")
+ data['build_sys'] = e.data.getVar("BUILD_SYS")
+ data['distro'] = e.data.getVar("DISTRO")
+ data['target_sys'] = e.data.getVar("TARGET_SYS")
+ data['branch_commit'] = str(oe.buildcfg.detect_branch(e.data)) + ": " + str(oe.buildcfg.detect_revision(e.data))
+ data['bitbake_version'] = e.data.getVar("BB_VERSION")
+ data['layer_version'] = get_layers_branch_rev(e.data)
+ data['local_conf'] = get_conf_data(e, 'local.conf')
+ data['auto_conf'] = get_conf_data(e, 'auto.conf')
+ return data
+
python errorreport_handler () {
import json
import codecs
@@ -55,19 +69,10 @@ python errorreport_handler () {
if isinstance(e, bb.event.BuildStarted):
bb.utils.mkdirhier(logpath)
data = {}
- machine = e.data.getVar("MACHINE")
- data['machine'] = machine
- data['build_sys'] = e.data.getVar("BUILD_SYS")
+ data = get_common_data(e)
data['nativelsb'] = nativelsb()
- data['distro'] = e.data.getVar("DISTRO")
- data['target_sys'] = e.data.getVar("TARGET_SYS")
data['failures'] = []
data['component'] = " ".join(e.getPkgs())
- data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data))
- data['bitbake_version'] = e.data.getVar("BB_VERSION")
- data['layer_version'] = get_layers_branch_rev(e.data)
- data['local_conf'] = get_conf_data(e, 'local.conf')
- data['auto_conf'] = get_conf_data(e, 'auto.conf')
lock = bb.utils.lockfile(datafile + '.lock')
errorreport_savedata(e, data, "error-report.txt")
bb.utils.unlockfile(lock)
@@ -106,6 +111,37 @@ python errorreport_handler () {
errorreport_savedata(e, jsondata, "error-report.txt")
bb.utils.unlockfile(lock)
+ elif isinstance(e, bb.event.NoProvider):
+ bb.utils.mkdirhier(logpath)
+ data = {}
+ data = get_common_data(e)
+ data['nativelsb'] = nativelsb()
+ data['failures'] = []
+ data['component'] = str(e._item)
+ taskdata={}
+ taskdata['log'] = str(e)
+ taskdata['package'] = str(e._item)
+ taskdata['task'] = "Nothing provides " + "'" + str(e._item) + "'"
+ data['failures'].append(taskdata)
+ lock = bb.utils.lockfile(datafile + '.lock')
+ errorreport_savedata(e, data, "error-report.txt")
+ bb.utils.unlockfile(lock)
+
+ elif isinstance(e, bb.event.ParseError):
+ bb.utils.mkdirhier(logpath)
+ data = {}
+ data = get_common_data(e)
+ data['nativelsb'] = nativelsb()
+ data['failures'] = []
+ data['component'] = "parse"
+ taskdata={}
+ taskdata['log'] = str(e._msg)
+ taskdata['task'] = str(e._msg)
+ data['failures'].append(taskdata)
+ lock = bb.utils.lockfile(datafile + '.lock')
+ errorreport_savedata(e, data, "error-report.txt")
+ bb.utils.unlockfile(lock)
+
elif isinstance(e, bb.event.BuildCompleted):
lock = bb.utils.lockfile(datafile + '.lock')
jsondata = json.loads(errorreport_getdata(e))
@@ -119,4 +155,4 @@ python errorreport_handler () {
}
addhandler errorreport_handler
-errorreport_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.build.TaskFailed"
+errorreport_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.build.TaskFailed bb.event.NoProvider bb.event.ParseError"
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index 5f12d5aaeb..52ecfafb72 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# Removes source after build
#
# To use it add that line to conf/local.conf:
@@ -27,6 +33,13 @@ BB_SCHEDULER ?= "completion"
BB_TASK_IONICE_LEVEL:task-rm_work = "3.0"
do_rm_work () {
+ # Force using the HOSTTOOLS 'rm' - otherwise the SYSROOT_NATIVE 'rm' can be selected depending on PATH
+ # Avoids race-condition accessing 'rm' when deleting WORKDIR folders at the end of this function
+ RM_BIN="$(PATH=${HOSTTOOLS_DIR} command -v rm)"
+ if [ -z "${RM_BIN}" ]; then
+ bbfatal "Binary 'rm' not found in HOSTTOOLS_DIR, cannot remove WORKDIR data."
+ fi
+
# If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
for p in ${RM_WORK_EXCLUDE}; do
if [ "$p" = "${PN}" ]; then
@@ -44,55 +57,58 @@ do_rm_work () {
# Change normal stamps into setscene stamps as they better reflect the
# fact WORKDIR is now empty
# Also leave noexec stamps since setscene stamps don't cover them
- cd `dirname ${STAMP}`
- for i in `basename ${STAMP}`*
- do
- case $i in
- *sigdata*|*sigbasedata*)
- # Save/skip anything that looks like a signature data file.
- ;;
- *do_image_complete_setscene*|*do_image_qa_setscene*)
- # Ensure we don't 'stack' setscene extensions to these stamps with the sections below
- ;;
- *do_image_complete*)
- # Promote do_image_complete stamps to setscene versions (ahead of *do_image* below)
- mv $i `echo $i | sed -e "s#do_image_complete#do_image_complete_setscene#"`
- ;;
- *do_image_qa*)
- # Promote do_image_qa stamps to setscene versions (ahead of *do_image* below)
- mv $i `echo $i | sed -e "s#do_image_qa#do_image_qa_setscene#"`
- ;;
- *do_package_write*|*do_rootfs*|*do_image*|*do_bootimg*|*do_write_qemuboot_conf*|*do_build*)
- ;;
- *do_addto_recipe_sysroot*)
- # Preserve recipe-sysroot-native if do_addto_recipe_sysroot has been used
- excludes="$excludes recipe-sysroot-native"
- ;;
- *do_package|*do_package.*|*do_package_setscene.*)
- # We remove do_package entirely, including any
- # sstate version since otherwise we'd need to leave 'plaindirs' around
- # such as 'packages' and 'packages-split' and these can be large. No end
- # of chain tasks depend directly on do_package anymore.
- rm -f -- $i;
- ;;
- *_setscene*)
- # Skip stamps which are already setscene versions
- ;;
- *)
- # For everything else: if suitable, promote the stamp to a setscene
- # version, otherwise remove it
- for j in ${SSTATETASKS} do_shared_workdir
- do
- case $i in
- *$j|*$j.*)
- mv $i `echo $i | sed -e "s#${j}#${j}_setscene#"`
- break
- ;;
- esac
- done
- rm -f -- $i
- esac
- done
+ STAMPDIR=`dirname ${STAMP}`
+ if test -d $STAMPDIR; then
+ cd $STAMPDIR
+ for i in `basename ${STAMP}`*
+ do
+ case $i in
+ *sigdata*|*sigbasedata*)
+ # Save/skip anything that looks like a signature data file.
+ ;;
+ *do_image_complete_setscene*|*do_image_qa_setscene*)
+ # Ensure we don't 'stack' setscene extensions to these stamps with the sections below
+ ;;
+ *do_image_complete*)
+ # Promote do_image_complete stamps to setscene versions (ahead of *do_image* below)
+ mv $i `echo $i | sed -e "s#do_image_complete#do_image_complete_setscene#"`
+ ;;
+ *do_image_qa*)
+ # Promote do_image_qa stamps to setscene versions (ahead of *do_image* below)
+ mv $i `echo $i | sed -e "s#do_image_qa#do_image_qa_setscene#"`
+ ;;
+ *do_package_write*|*do_rootfs*|*do_image*|*do_bootimg*|*do_write_qemuboot_conf*|*do_build*)
+ ;;
+ *do_addto_recipe_sysroot*)
+ # Preserve recipe-sysroot-native if do_addto_recipe_sysroot has been used
+ excludes="$excludes recipe-sysroot-native"
+ ;;
+ *do_package|*do_package.*|*do_package_setscene.*)
+ # We remove do_package entirely, including any
+ # sstate version since otherwise we'd need to leave 'plaindirs' around
+ # such as 'packages' and 'packages-split' and these can be large. No end
+ # of chain tasks depend directly on do_package anymore.
+ "${RM_BIN}" -f -- $i;
+ ;;
+ *_setscene*)
+ # Skip stamps which are already setscene versions
+ ;;
+ *)
+ # For everything else: if suitable, promote the stamp to a setscene
+ # version, otherwise remove it
+ for j in ${SSTATETASKS} do_shared_workdir
+ do
+ case $i in
+ *$j|*$j.*)
+ mv $i `echo $i | sed -e "s#${j}#${j}_setscene#"`
+ break
+ ;;
+ esac
+ done
+ "${RM_BIN}" -f -- $i
+ esac
+ done
+ fi
cd ${WORKDIR}
for dir in *
@@ -100,12 +116,14 @@ do_rm_work () {
# Retain only logs and other files in temp, safely ignore
# failures of removing pseudo folers on NFS2/3 server.
if [ $dir = 'pseudo' ]; then
- rm -rf -- $dir 2> /dev/null || true
+ "${RM_BIN}" -rf -- $dir 2> /dev/null || true
elif ! echo "$excludes" | grep -q -w "$dir"; then
- rm -rf -- $dir
+ "${RM_BIN}" -rf -- $dir
fi
done
}
+do_rm_work[vardepsexclude] += "SSTATETASKS"
+
do_rm_work_all () {
:
}
@@ -172,7 +190,7 @@ python inject_rm_work() {
# other recipes and thus will typically run much later than completion of
# work in the recipe itself.
# In practice, addtask() here merely updates the dependencies.
- bb.build.addtask('do_rm_work', 'do_build', ' '.join(deps), d)
+ bb.build.addtask('do_rm_work', 'do_rm_work_all do_build', ' '.join(deps), d)
# Always update do_build_without_rm_work dependencies.
bb.build.addtask('do_build_without_rm_work', '', ' '.join(deps), d)
diff --git a/meta/classes/rm_work_and_downloads.bbclass b/meta/classes/rm_work_and_downloads.bbclass
index 15e6091b9d..2695a3807f 100644
--- a/meta/classes/rm_work_and_downloads.bbclass
+++ b/meta/classes/rm_work_and_downloads.bbclass
@@ -1,8 +1,7 @@
# Author: Patrick Ohly <patrick.ohly@intel.com>
# Copyright: Copyright (C) 2015 Intel Corporation
#
-# This file is licensed under the MIT license, see COPYING.MIT in
-# this source distribution for the terms.
+# SPDX-License-Identifier: MIT
# This class is used like rm_work:
# INHERIT += "rm_work_and_downloads"
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
deleted file mode 100644
index d302c23cf4..0000000000
--- a/meta/classes/rootfs-postcommands.bbclass
+++ /dev/null
@@ -1,424 +0,0 @@
-
-# Zap the root password if debug-tweaks feature is not enabled
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'empty-root-password' ], "", "zap_empty_root_password; ",d)}'
-
-# Allow dropbear/openssh to accept logins from accounts with an empty password string if debug-tweaks or allow-empty-password is enabled
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'allow-empty-password' ], "ssh_allow_empty_password; ", "",d)}'
-
-# Allow dropbear/openssh to accept root logins if debug-tweaks or allow-root-login is enabled
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'allow-root-login' ], "ssh_allow_root_login; ", "",d)}'
-
-# Enable postinst logging if debug-tweaks is enabled
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains_any("IMAGE_FEATURES", [ 'debug-tweaks', 'post-install-logging' ], "postinst_enable_logging; ", "",d)}'
-
-# Create /etc/timestamp during image construction to give a reasonably sane default time setting
-ROOTFS_POSTPROCESS_COMMAND += "rootfs_update_timestamp; "
-
-# Tweak the mount options for rootfs in /etc/fstab if read-only-rootfs is enabled
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", "read_only_rootfs_hook; ", "",d)}'
-
-# We also need to do the same for the kernel boot parameters,
-# otherwise kernel or initramfs end up mounting the rootfs read/write
-# (the default) if supported by the underlying storage.
-#
-# We do this with :append because the default value might get set later with ?=
-# and we don't want to disable such a default that by setting a value here.
-APPEND:append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}'
-
-# Generates test data file with data store variables expanded in json format
-ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data; "
-
-# Write manifest
-IMAGE_MANIFEST = "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.manifest"
-ROOTFS_POSTUNINSTALL_COMMAND =+ "write_image_manifest ; "
-# Set default postinst log file
-POSTINST_LOGFILE ?= "${localstatedir}/log/postinstall.log"
-# Set default target for systemd images
-SYSTEMD_DEFAULT_TARGET ?= '${@bb.utils.contains_any("IMAGE_FEATURES", [ "x11-base", "weston" ], "graphical.target", "multi-user.target", d)}'
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "systemd", "set_systemd_default_target; systemd_create_users;", "", d)}'
-
-ROOTFS_POSTPROCESS_COMMAND += 'empty_var_volatile;'
-
-ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs_qa_check;", "", d)}'
-
-inherit image-artifact-names
-
-# Sort the user and group entries in /etc by ID in order to make the content
-# deterministic. Package installs are not deterministic, causing the ordering
-# of entries to change between builds. In case that this isn't desired,
-# the command can be overridden.
-#
-# Note that useradd-staticids.bbclass has to be used to ensure that
-# the numeric IDs of dynamically created entries remain stable.
-#
-# We want this to run as late as possible, in particular after
-# systemd_sysusers_create and set_user_group. Using :append is not
-# enough for that, set_user_group is added that way and would end
-# up running after us.
-SORT_PASSWD_POSTPROCESS_COMMAND ??= " sort_passwd; "
-python () {
- d.appendVar('ROOTFS_POSTPROCESS_COMMAND', '${SORT_PASSWD_POSTPROCESS_COMMAND}')
- d.appendVar('ROOTFS_POSTPROCESS_COMMAND', 'rootfs_reproducible;')
-}
-
-systemd_create_users () {
- for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/*.conf; do
- [ -e $conffile ] || continue
- grep -v "^#" $conffile | sed -e '/^$/d' | while read type name id comment; do
- if [ "$type" = "u" ]; then
- useradd_params="--shell /sbin/nologin"
- [ "$id" != "-" ] && useradd_params="$useradd_params --uid $id"
- [ "$comment" != "-" ] && useradd_params="$useradd_params --comment $comment"
- useradd_params="$useradd_params --system $name"
- eval useradd --root ${IMAGE_ROOTFS} $useradd_params || true
- elif [ "$type" = "g" ]; then
- groupadd_params=""
- [ "$id" != "-" ] && groupadd_params="$groupadd_params --gid $id"
- groupadd_params="$groupadd_params --system $name"
- eval groupadd --root ${IMAGE_ROOTFS} $groupadd_params || true
- elif [ "$type" = "m" ]; then
- group=$id
- eval groupadd --root ${IMAGE_ROOTFS} --system $group || true
- eval useradd --root ${IMAGE_ROOTFS} --shell /sbin/nologin --system $name --no-user-group || true
- eval usermod --root ${IMAGE_ROOTFS} -a -G $group $name
- fi
- done
- done
-}
-
-#
-# A hook function to support read-only-rootfs IMAGE_FEATURES
-#
-read_only_rootfs_hook () {
- # Tweak the mount option and fs_passno for rootfs in fstab
- if [ -f ${IMAGE_ROOTFS}/etc/fstab ]; then
- sed -i -e '/^[#[:space:]]*\/dev\/root/{s/defaults/ro/;s/\([[:space:]]*[[:digit:]]\)\([[:space:]]*\)[[:digit:]]$/\1\20/}' ${IMAGE_ROOTFS}/etc/fstab
- fi
-
- # Tweak the "mount -o remount,rw /" command in busybox-inittab inittab
- if [ -f ${IMAGE_ROOTFS}/etc/inittab ]; then
- sed -i 's|/bin/mount -o remount,rw /|/bin/mount -o remount,ro /|' ${IMAGE_ROOTFS}/etc/inittab
- fi
-
- # If we're using openssh and the /etc/ssh directory has no pre-generated keys,
- # we should configure openssh to use the configuration file /etc/ssh/sshd_config_readonly
- # and the keys under /var/run/ssh.
- if [ -d ${IMAGE_ROOTFS}/etc/ssh ]; then
- if [ -e ${IMAGE_ROOTFS}/etc/ssh/ssh_host_rsa_key ]; then
- echo "SYSCONFDIR=\${SYSCONFDIR:-/etc/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh
- echo "SSHD_OPTS=" >> ${IMAGE_ROOTFS}/etc/default/ssh
- else
- echo "SYSCONFDIR=\${SYSCONFDIR:-/var/run/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh
- echo "SSHD_OPTS='-f /etc/ssh/sshd_config_readonly'" >> ${IMAGE_ROOTFS}/etc/default/ssh
- fi
- fi
-
- # Also tweak the key location for dropbear in the same way.
- if [ -d ${IMAGE_ROOTFS}/etc/dropbear ]; then
- if [ ! -e ${IMAGE_ROOTFS}/etc/dropbear/dropbear_rsa_host_key ]; then
- echo "DROPBEAR_RSAKEY_DIR=/var/lib/dropbear" >> ${IMAGE_ROOTFS}/etc/default/dropbear
- fi
- fi
-
- if ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "true", "false", d)}; then
- # Change the value of ROOTFS_READ_ONLY in /etc/default/rcS to yes
- if [ -e ${IMAGE_ROOTFS}/etc/default/rcS ]; then
- sed -i 's/ROOTFS_READ_ONLY=no/ROOTFS_READ_ONLY=yes/' ${IMAGE_ROOTFS}/etc/default/rcS
- fi
- # Run populate-volatile.sh at rootfs time to set up basic files
- # and directories to support read-only rootfs.
- if [ -x ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh ]; then
- ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh
- fi
- fi
-
- if ${@bb.utils.contains("DISTRO_FEATURES", "systemd", "true", "false", d)}; then
- # Create machine-id
- # 20:12 < mezcalero> koen: you have three options: a) run systemd-machine-id-setup at install time, b) have / read-only and an empty file there (for stateless) and c) boot with / writable
- touch ${IMAGE_ROOTFS}${sysconfdir}/machine-id
- fi
-}
-
-#
-# This function is intended to disallow empty root password if 'debug-tweaks' is not in IMAGE_FEATURES.
-#
-zap_empty_root_password () {
- if [ -e ${IMAGE_ROOTFS}/etc/shadow ]; then
- sed -i 's%^root::%root:*:%' ${IMAGE_ROOTFS}/etc/shadow
- fi
- if [ -e ${IMAGE_ROOTFS}/etc/passwd ]; then
- sed -i 's%^root::%root:*:%' ${IMAGE_ROOTFS}/etc/passwd
- fi
-}
-
-#
-# allow dropbear/openssh to accept logins from accounts with an empty password string
-#
-ssh_allow_empty_password () {
- for config in sshd_config sshd_config_readonly; do
- if [ -e ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config ]; then
- sed -i 's/^[#[:space:]]*PermitEmptyPasswords.*/PermitEmptyPasswords yes/' ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config
- fi
- done
-
- if [ -e ${IMAGE_ROOTFS}${sbindir}/dropbear ] ; then
- if grep -q DROPBEAR_EXTRA_ARGS ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear 2>/dev/null ; then
- if ! grep -q "DROPBEAR_EXTRA_ARGS=.*-B" ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear ; then
- sed -i 's/^DROPBEAR_EXTRA_ARGS="*\([^"]*\)"*/DROPBEAR_EXTRA_ARGS="\1 -B"/' ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
- fi
- else
- printf '\nDROPBEAR_EXTRA_ARGS="-B"\n' >> ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
- fi
- fi
-
- if [ -d ${IMAGE_ROOTFS}${sysconfdir}/pam.d ] ; then
- for f in `find ${IMAGE_ROOTFS}${sysconfdir}/pam.d/* -type f -exec test -e {} \; -print`
- do
- sed -i 's/nullok_secure/nullok/' $f
- done
- fi
-}
-
-#
-# allow dropbear/openssh to accept root logins
-#
-ssh_allow_root_login () {
- for config in sshd_config sshd_config_readonly; do
- if [ -e ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config ]; then
- sed -i 's/^[#[:space:]]*PermitRootLogin.*/PermitRootLogin yes/' ${IMAGE_ROOTFS}${sysconfdir}/ssh/$config
- fi
- done
-
- if [ -e ${IMAGE_ROOTFS}${sbindir}/dropbear ] ; then
- if grep -q DROPBEAR_EXTRA_ARGS ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear 2>/dev/null ; then
- sed -i '/^DROPBEAR_EXTRA_ARGS=/ s/-w//' ${IMAGE_ROOTFS}${sysconfdir}/default/dropbear
- fi
- fi
-}
-
-python sort_passwd () {
- import rootfspostcommands
- rootfspostcommands.sort_passwd(d.expand('${IMAGE_ROOTFS}${sysconfdir}'))
-}
-
-#
-# Enable postinst logging if debug-tweaks is enabled
-#
-postinst_enable_logging () {
- mkdir -p ${IMAGE_ROOTFS}${sysconfdir}/default
- echo "POSTINST_LOGGING=1" >> ${IMAGE_ROOTFS}${sysconfdir}/default/postinst
- echo "LOGFILE=${POSTINST_LOGFILE}" >> ${IMAGE_ROOTFS}${sysconfdir}/default/postinst
-}
-
-#
-# Modify systemd default target
-#
-set_systemd_default_target () {
- if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ]; then
- ln -sf ${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target
- fi
-}
-
-# If /var/volatile is not empty, we have seen problems where programs such as the
-# journal make assumptions based on the contents of /var/volatile. The journal
-# would then write to /var/volatile before it was mounted, thus hiding the
-# items previously written.
-#
-# This change is to attempt to fix those types of issues in a way that doesn't
-# affect users that may not be using /var/volatile.
-empty_var_volatile () {
- if [ -e ${IMAGE_ROOTFS}/etc/fstab ]; then
- match=`awk '$1 !~ "#" && $2 ~ /\/var\/volatile/{print $2}' ${IMAGE_ROOTFS}/etc/fstab 2> /dev/null`
- if [ -n "$match" ]; then
- find ${IMAGE_ROOTFS}/var/volatile -mindepth 1 -delete
- fi
- fi
-}
-
-# Turn any symbolic /sbin/init link into a file
-remove_init_link () {
- if [ -h ${IMAGE_ROOTFS}/sbin/init ]; then
- LINKFILE=${IMAGE_ROOTFS}`readlink ${IMAGE_ROOTFS}/sbin/init`
- rm ${IMAGE_ROOTFS}/sbin/init
- cp $LINKFILE ${IMAGE_ROOTFS}/sbin/init
- fi
-}
-
-make_zimage_symlink_relative () {
- if [ -L ${IMAGE_ROOTFS}/boot/zImage ]; then
- (cd ${IMAGE_ROOTFS}/boot/ && for i in `ls zImage-* | sort`; do ln -sf $i zImage; done)
- fi
-}
-
-python write_image_manifest () {
- from oe.rootfs import image_list_installed_packages
- from oe.utils import format_pkg_list
-
- deploy_dir = d.getVar('IMGDEPLOYDIR')
- link_name = d.getVar('IMAGE_LINK_NAME')
- manifest_name = d.getVar('IMAGE_MANIFEST')
-
- if not manifest_name:
- return
-
- pkgs = image_list_installed_packages(d)
- with open(manifest_name, 'w+') as image_manifest:
- image_manifest.write(format_pkg_list(pkgs, "ver"))
-
- if os.path.exists(manifest_name) and link_name:
- manifest_link = deploy_dir + "/" + link_name + ".manifest"
- if manifest_link != manifest_name:
- if os.path.lexists(manifest_link):
- os.remove(manifest_link)
- os.symlink(os.path.basename(manifest_name), manifest_link)
-}
-
-# Can be used to create /etc/timestamp during image construction to give a reasonably
-# sane default time setting
-rootfs_update_timestamp () {
- if [ "${REPRODUCIBLE_TIMESTAMP_ROOTFS}" != "" ]; then
- # Convert UTC into %4Y%2m%2d%2H%2M%2S
- sformatted=`date -u -d @${REPRODUCIBLE_TIMESTAMP_ROOTFS} +%4Y%2m%2d%2H%2M%2S`
- else
- sformatted=`date -u +%4Y%2m%2d%2H%2M%2S`
- fi
- echo $sformatted > ${IMAGE_ROOTFS}/etc/timestamp
- bbnote "rootfs_update_timestamp: set /etc/timestamp to $sformatted"
-}
-
-# Prevent X from being started
-rootfs_no_x_startup () {
- if [ -f ${IMAGE_ROOTFS}/etc/init.d/xserver-nodm ]; then
- chmod a-x ${IMAGE_ROOTFS}/etc/init.d/xserver-nodm
- fi
-}
-
-rootfs_trim_schemas () {
- for schema in ${IMAGE_ROOTFS}/etc/gconf/schemas/*.schemas
- do
- # Need this in case no files exist
- if [ -e $schema ]; then
- oe-trim-schemas $schema > $schema.new
- mv $schema.new $schema
- fi
- done
-}
-
-rootfs_check_host_user_contaminated () {
- contaminated="${WORKDIR}/host-user-contaminated.txt"
- HOST_USER_UID="$(PSEUDO_UNLOAD=1 id -u)"
- HOST_USER_GID="$(PSEUDO_UNLOAD=1 id -g)"
-
- find "${IMAGE_ROOTFS}" -path "${IMAGE_ROOTFS}/home" -prune -o \
- -user "$HOST_USER_UID" -print -o -group "$HOST_USER_GID" -print >"$contaminated"
-
- sed -e "s,${IMAGE_ROOTFS},," $contaminated | while read line; do
- bbwarn "Path in the rootfs is owned by the same user or group as the user running bitbake:" $line `ls -lan ${IMAGE_ROOTFS}/$line`
- done
-
- if [ -s "$contaminated" ]; then
- bbwarn "/etc/passwd:" `cat ${IMAGE_ROOTFS}/etc/passwd`
- bbwarn "/etc/group:" `cat ${IMAGE_ROOTFS}/etc/group`
- fi
-}
-
-# Make any absolute links in a sysroot relative
-rootfs_sysroot_relativelinks () {
- sysroot-relativelinks.py ${SDK_OUTPUT}/${SDKTARGETSYSROOT}
-}
-
-# Generated test data json file
-python write_image_test_data() {
- from oe.data import export2json
-
- deploy_dir = d.getVar('IMGDEPLOYDIR')
- link_name = d.getVar('IMAGE_LINK_NAME')
- testdata_name = os.path.join(deploy_dir, "%s.testdata.json" % d.getVar('IMAGE_NAME'))
-
- searchString = "%s/"%(d.getVar("TOPDIR")).replace("//","/")
- export2json(d, testdata_name, searchString=searchString, replaceString="")
-
- if os.path.exists(testdata_name) and link_name:
- testdata_link = os.path.join(deploy_dir, "%s.testdata.json" % link_name)
- if testdata_link != testdata_name:
- if os.path.lexists(testdata_link):
- os.remove(testdata_link)
- os.symlink(os.path.basename(testdata_name), testdata_link)
-}
-write_image_test_data[vardepsexclude] += "TOPDIR"
-
-# Check for unsatisfied recommendations (RRECOMMENDS)
-python rootfs_log_check_recommends() {
- log_path = d.expand("${T}/log.do_rootfs")
- with open(log_path, 'r') as log:
- for line in log:
- if 'log_check' in line:
- continue
-
- if 'unsatisfied recommendation for' in line:
- bb.warn('[log_check] %s: %s' % (d.getVar('PN'), line))
-}
-
-# Perform any additional adjustments needed to make rootf binary reproducible
-rootfs_reproducible () {
- if [ "${REPRODUCIBLE_TIMESTAMP_ROOTFS}" != "" ]; then
- # Convert UTC into %4Y%2m%2d%2H%2M%2S
- sformatted=`date -u -d @${REPRODUCIBLE_TIMESTAMP_ROOTFS} +%4Y%2m%2d%2H%2M%2S`
- echo $sformatted > ${IMAGE_ROOTFS}/etc/version
- bbnote "rootfs_reproducible: set /etc/version to $sformatted"
-
- if [ -d ${IMAGE_ROOTFS}${sysconfdir}/gconf ]; then
- find ${IMAGE_ROOTFS}${sysconfdir}/gconf -name '%gconf.xml' -print0 | xargs -0r \
- sed -i -e 's@\bmtime="[0-9][0-9]*"@mtime="'${REPRODUCIBLE_TIMESTAMP_ROOTFS}'"@g'
- fi
- fi
-}
-
-# Perform a dumb check for unit existence, not its validity
-python overlayfs_qa_check() {
- from oe.overlayfs import mountUnitName
-
- overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT") or {}
- imagepath = d.getVar("IMAGE_ROOTFS")
- sysconfdir = d.getVar("sysconfdir")
- searchpaths = [oe.path.join(imagepath, sysconfdir, "systemd", "system"),
- oe.path.join(imagepath, d.getVar("systemd_system_unitdir"))]
- fstabpath = oe.path.join(imagepath, sysconfdir, "fstab")
-
- if not any(os.path.exists(path) for path in [*searchpaths, fstabpath]):
- return
-
- fstabDevices = []
- if os.path.isfile(fstabpath):
- with open(fstabpath, 'r') as f:
- for line in f:
- if line[0] == '#':
- continue
- path = line.split(maxsplit=2)
- if len(path) > 2:
- fstabDevices.append(path[1])
-
- allUnitExist = True;
- for mountPoint in overlayMountPoints:
- qaSkip = (d.getVarFlag("OVERLAYFS_QA_SKIP", mountPoint) or "").split()
- if "mount-configured" in qaSkip:
- continue
-
- mountPath = d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint)
- if mountPath in fstabDevices:
- continue
-
- mountUnit = mountUnitName(mountPath)
- if any(os.path.isfile(oe.path.join(dirpath, mountUnit))
- for dirpath in searchpaths):
- continue
-
- bb.warn(f'Mount path {mountPath} not found in fstab and unit '
- f'{mountUnit} not found in systemd unit directories.')
- bb.warn(f'Skip this check by setting OVERLAYFS_QA_SKIP[{mountPoint}] = '
- '"mount-configured"')
- allUnitExist = False;
-
- if not allUnitExist:
- bb.fatal('Not all mount paths and units are installed in the image')
-}
diff --git a/meta/classes/rust-bin.bbclass b/meta/classes/rust-bin.bbclass
deleted file mode 100644
index c87343b3cf..0000000000
--- a/meta/classes/rust-bin.bbclass
+++ /dev/null
@@ -1,149 +0,0 @@
-inherit rust
-
-RDEPENDS:${PN}:append:class-target = " ${RUSTLIB_DEP}"
-
-RUSTC_ARCHFLAGS += "-C opt-level=3 -g -L ${STAGING_DIR_HOST}/${rustlibdir} -C linker=${RUST_TARGET_CCLD}"
-EXTRA_OEMAKE += 'RUSTC_ARCHFLAGS="${RUSTC_ARCHFLAGS}"'
-
-# Some libraries alias with the standard library but libstd is configured to
-# make it difficult or imposisble to use its version. Unfortunately libstd
-# must be explicitly overridden using extern.
-OVERLAP_LIBS = "\
- libc \
- log \
- getopts \
- rand \
-"
-def get_overlap_deps(d):
- deps = d.getVar("DEPENDS").split()
- overlap_deps = []
- for o in d.getVar("OVERLAP_LIBS").split():
- l = len([o for dep in deps if (o + '-rs' in dep)])
- if l > 0:
- overlap_deps.append(o)
- return " ".join(overlap_deps)
-OVERLAP_DEPS = "${@get_overlap_deps(d)}"
-
-# Prevents multiple static copies of standard library modules
-# See https://github.com/rust-lang/rust/issues/19680
-RUSTC_PREFER_DYNAMIC = "-C prefer-dynamic"
-RUSTC_FLAGS += "${RUSTC_PREFER_DYNAMIC}"
-
-CRATE_NAME ?= "${@d.getVar('BPN').replace('-rs', '').replace('-', '_')}"
-BINNAME ?= "${BPN}"
-LIBNAME ?= "lib${CRATE_NAME}-rs"
-CRATE_TYPE ?= "dylib"
-BIN_SRC ?= "${S}/src/main.rs"
-LIB_SRC ?= "${S}/src/lib.rs"
-
-rustbindest ?= "${bindir}"
-rustlibdest ?= "${rustlibdir}"
-RUST_RPATH_ABS ?= "${rustlibdir}:${rustlib}"
-
-def relative_rpaths(paths, base):
- relpaths = set()
- for p in paths.split(':'):
- if p == base:
- relpaths.add('$ORIGIN')
- continue
- relpaths.add(os.path.join('$ORIGIN', os.path.relpath(p, base)))
- return '-rpath=' + ':'.join(relpaths) if len(relpaths) else ''
-
-RUST_LIB_RPATH_FLAGS ?= "${@relative_rpaths(d.getVar('RUST_RPATH_ABS', True), d.getVar('rustlibdest', True))}"
-RUST_BIN_RPATH_FLAGS ?= "${@relative_rpaths(d.getVar('RUST_RPATH_ABS', True), d.getVar('rustbindest', True))}"
-
-def libfilename(d):
- if d.getVar('CRATE_TYPE', True) == 'dylib':
- return d.getVar('LIBNAME', True) + '.so'
- else:
- return d.getVar('LIBNAME', True) + '.rlib'
-
-def link_args(d, bin):
- linkargs = []
- if bin:
- rpaths = d.getVar('RUST_BIN_RPATH_FLAGS', False)
- else:
- rpaths = d.getVar('RUST_LIB_RPATH_FLAGS', False)
- if d.getVar('CRATE_TYPE', True) == 'dylib':
- linkargs.append('-soname')
- linkargs.append(libfilename(d))
- if len(rpaths):
- linkargs.append(rpaths)
- if len(linkargs):
- return ' '.join(['-Wl,' + arg for arg in linkargs])
- else:
- return ''
-
-get_overlap_externs () {
- externs=
- for dep in ${OVERLAP_DEPS}; do
- extern=$(ls ${STAGING_DIR_HOST}/${rustlibdir}/lib$dep-rs.{so,rlib} 2>/dev/null \
- | awk '{print $1}');
- if [ -n "$extern" ]; then
- externs="$externs --extern $dep=$extern"
- else
- echo "$dep in depends but no such library found in ${rustlibdir}!" >&2
- exit 1
- fi
- done
- echo "$externs"
-}
-
-do_configure () {
-}
-
-oe_runrustc () {
- export RUST_TARGET_PATH="${RUST_TARGET_PATH}"
- bbnote ${RUSTC} ${RUSTC_ARCHFLAGS} ${RUSTC_FLAGS} "$@"
- "${RUSTC}" ${RUSTC_ARCHFLAGS} ${RUSTC_FLAGS} "$@"
-}
-
-oe_compile_rust_lib () {
- rm -rf ${LIBNAME}.{rlib,so}
- local -a link_args
- if [ -n '${@link_args(d, False)}' ]; then
- link_args[0]='-C'
- link_args[1]='link-args=${@link_args(d, False)}'
- fi
- oe_runrustc $(get_overlap_externs) \
- "${link_args[@]}" \
- ${LIB_SRC} \
- -o ${@libfilename(d)} \
- --crate-name=${CRATE_NAME} --crate-type=${CRATE_TYPE} \
- "$@"
-}
-oe_compile_rust_lib[vardeps] += "get_overlap_externs"
-
-oe_compile_rust_bin () {
- rm -rf ${BINNAME}
- local -a link_args
- if [ -n '${@link_args(d, True)}' ]; then
- link_args[0]='-C'
- link_args[1]='link-args=${@link_args(d, True)}'
- fi
- oe_runrustc $(get_overlap_externs) \
- "${link_args[@]}" \
- ${BIN_SRC} -o ${BINNAME} "$@"
-}
-oe_compile_rust_bin[vardeps] += "get_overlap_externs"
-
-oe_install_rust_lib () {
- for lib in $(ls ${LIBNAME}.{so,rlib} 2>/dev/null); do
- echo Installing $lib
- install -D -m 755 $lib ${D}/${rustlibdest}/$lib
- done
-}
-
-oe_install_rust_bin () {
- echo Installing ${BINNAME}
- install -D -m 755 ${BINNAME} ${D}/${rustbindest}/${BINNAME}
-}
-
-do_rust_bin_fixups() {
- for f in `find ${PKGD} -name '*.so*'`; do
- echo "Strip rust note: $f"
- ${OBJCOPY} -R .note.rustc $f $f
- done
-}
-PACKAGE_PREPROCESS_FUNCS += "do_rust_bin_fixups"
-
diff --git a/meta/classes/rust-common.bbclass b/meta/classes/rust-common.bbclass
deleted file mode 100644
index cb811ac5da..0000000000
--- a/meta/classes/rust-common.bbclass
+++ /dev/null
@@ -1,189 +0,0 @@
-inherit python3native
-
-# Common variables used by all Rust builds
-export rustlibdir = "${libdir}/rust"
-FILES:${PN} += "${rustlibdir}/*.so"
-FILES:${PN}-dev += "${rustlibdir}/*.rlib ${rustlibdir}/*.rmeta"
-FILES:${PN}-dbg += "${rustlibdir}/.debug"
-
-RUSTLIB = "-L ${STAGING_LIBDIR}/rust"
-RUST_DEBUG_REMAP = "--remap-path-prefix=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}"
-RUSTFLAGS += "${RUSTLIB} ${RUST_DEBUG_REMAP}"
-RUSTLIB_DEP ?= "libstd-rs"
-export RUST_TARGET_PATH = "${STAGING_LIBDIR_NATIVE}/rustlib"
-RUST_PANIC_STRATEGY ?= "unwind"
-
-# Native builds are not effected by TCLIBC. Without this, rust-native
-# thinks it's "target" (i.e. x86_64-linux) is a musl target.
-RUST_LIBC = "${TCLIBC}"
-RUST_LIBC:class-crosssdk = "glibc"
-RUST_LIBC:class-native = "glibc"
-
-def determine_libc(d, thing):
- '''Determine which libc something should target'''
-
- # BUILD is never musl, TARGET may be musl or glibc,
- # HOST could be musl, but only if a compiler is built to be run on
- # target in which case HOST_SYS != BUILD_SYS.
- if thing == 'TARGET':
- libc = d.getVar('RUST_LIBC')
- elif thing == 'BUILD' and (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
- libc = d.getVar('RUST_LIBC')
- else:
- libc = d.getVar('RUST_LIBC:class-native')
-
- return libc
-
-def target_is_armv7(d):
- '''Determine if target is armv7'''
- # TUNE_FEATURES may include arm* even if the target is not arm
- # in the case of *-native packages
- if d.getVar('TARGET_ARCH') != 'arm':
- return False
-
- feat = d.getVar('TUNE_FEATURES')
- feat = frozenset(feat.split())
- mach_overrides = d.getVar('MACHINEOVERRIDES')
- mach_overrides = frozenset(mach_overrides.split(':'))
-
- v7=frozenset(['armv7a', 'armv7r', 'armv7m', 'armv7ve'])
- if mach_overrides.isdisjoint(v7) and feat.isdisjoint(v7):
- return False
- else:
- return True
-target_is_armv7[vardepvalue] = "${@target_is_armv7(d)}"
-
-# Responsible for taking Yocto triples and converting it to Rust triples
-def rust_base_triple(d, thing):
- '''
- Mangle bitbake's *_SYS into something that rust might support (see
- rust/mk/cfg/* for a list)
-
- Note that os is assumed to be some linux form
- '''
-
- # The llvm-target for armv7 is armv7-unknown-linux-gnueabihf
- if thing == "TARGET" and target_is_armv7(d):
- arch = "armv7"
- else:
- arch = oe.rust.arch_to_rust_arch(d.getVar('{}_ARCH'.format(thing)))
-
- # All the Yocto targets are Linux and are 'unknown'
- vendor = "-unknown"
- os = d.getVar('{}_OS'.format(thing))
- libc = determine_libc(d, thing)
-
- # Prefix with a dash and convert glibc -> gnu
- if libc == "glibc":
- libc = "-gnu"
- elif libc == "musl":
- libc = "-musl"
-
- # Don't double up musl (only appears to be the case on aarch64)
- if os == "linux-musl":
- if libc != "-musl":
- bb.fatal("{}_OS was '{}' but TCLIBC was not 'musl'".format(thing, os))
- os = "linux"
-
- # This catches ARM targets and appends the necessary hard float bits
- if os == "linux-gnueabi" or os == "linux-musleabi":
- libc = bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hf', '', d)
- return arch + vendor + '-' + os + libc
-
-
-# In some cases uname and the toolchain differ on their idea of the arch name
-RUST_BUILD_ARCH = "${@oe.rust.arch_to_rust_arch(d.getVar('BUILD_ARCH'))}"
-
-# Naming explanation
-# Yocto
-# - BUILD_SYS - Yocto triple of the build environment
-# - HOST_SYS - What we're building for in Yocto
-# - TARGET_SYS - What we're building for in Yocto
-#
-# So when building '-native' packages BUILD_SYS == HOST_SYS == TARGET_SYS
-# When building packages for the image HOST_SYS == TARGET_SYS
-# This is a gross over simplification as there are other modes but
-# currently this is all that's supported.
-#
-# Rust
-# - TARGET - the system where the binary will run
-# - HOST - the system where the binary is being built
-#
-# Rust additionally will use two additional cases:
-# - undecorated (e.g. CC) - equivalent to TARGET
-# - triple suffix (e.g. CC:x86_64_unknown_linux_gnu) - both
-# see: https://github.com/alexcrichton/gcc-rs
-# The way that Rust's internal triples and Yocto triples are mapped together
-# its likely best to not use the triple suffix due to potential confusion.
-
-RUST_BUILD_SYS = "${@rust_base_triple(d, 'BUILD')}"
-RUST_BUILD_SYS[vardepvalue] = "${RUST_BUILD_SYS}"
-RUST_HOST_SYS = "${@rust_base_triple(d, 'HOST')}"
-RUST_HOST_SYS[vardepvalue] = "${RUST_HOST_SYS}"
-RUST_TARGET_SYS = "${@rust_base_triple(d, 'TARGET')}"
-RUST_TARGET_SYS[vardepvalue] = "${RUST_TARGET_SYS}"
-
-# wrappers to get around the fact that Rust needs a single
-# binary but Yocto's compiler and linker commands have
-# arguments. Technically the archiver is always one command but
-# this is necessary for builds that determine the prefix and then
-# use those commands based on the prefix.
-WRAPPER_DIR = "${WORKDIR}/wrapper"
-RUST_BUILD_CC = "${WRAPPER_DIR}/build-rust-cc"
-RUST_BUILD_CXX = "${WRAPPER_DIR}/build-rust-cxx"
-RUST_BUILD_CCLD = "${WRAPPER_DIR}/build-rust-ccld"
-RUST_BUILD_AR = "${WRAPPER_DIR}/build-rust-ar"
-RUST_TARGET_CC = "${WRAPPER_DIR}/target-rust-cc"
-RUST_TARGET_CXX = "${WRAPPER_DIR}/target-rust-cxx"
-RUST_TARGET_CCLD = "${WRAPPER_DIR}/target-rust-ccld"
-RUST_TARGET_AR = "${WRAPPER_DIR}/target-rust-ar"
-
-create_wrapper () {
- file="$1"
- shift
-
- cat <<- EOF > "${file}"
- #!/usr/bin/env python3
- import os, sys
- orig_binary = "$@"
- binary = orig_binary.split()[0]
- args = orig_binary.split() + sys.argv[1:]
- os.execvp(binary, args)
- EOF
- chmod +x "${file}"
-}
-
-export WRAPPER_TARGET_CC = "${CC}"
-export WRAPPER_TARGET_CXX = "${CXX}"
-export WRAPPER_TARGET_CCLD = "${CCLD}"
-export WRAPPER_TARGET_LDFLAGS = "${LDFLAGS}"
-export WRAPPER_TARGET_AR = "${AR}"
-
-# compiler is used by gcc-rs
-# linker is used by rustc/cargo
-# archiver is used by the build of libstd-rs
-do_rust_create_wrappers () {
- mkdir -p "${WRAPPER_DIR}"
-
- # Yocto Build / Rust Host C compiler
- create_wrapper "${RUST_BUILD_CC}" "${BUILD_CC}"
- # Yocto Build / Rust Host C++ compiler
- create_wrapper "${RUST_BUILD_CXX}" "${BUILD_CXX}"
- # Yocto Build / Rust Host linker
- create_wrapper "${RUST_BUILD_CCLD}" "${BUILD_CCLD}" "${BUILD_LDFLAGS}"
- # Yocto Build / Rust Host archiver
- create_wrapper "${RUST_BUILD_AR}" "${BUILD_AR}"
-
- # Yocto Target / Rust Target C compiler
- create_wrapper "${RUST_TARGET_CC}" "${WRAPPER_TARGET_CC}" "${WRAPPER_TARGET_LDFLAGS}"
- # Yocto Target / Rust Target C++ compiler
- create_wrapper "${RUST_TARGET_CXX}" "${WRAPPER_TARGET_CXX}"
- # Yocto Target / Rust Target linker
- create_wrapper "${RUST_TARGET_CCLD}" "${WRAPPER_TARGET_CCLD}" "${WRAPPER_TARGET_LDFLAGS}"
- # Yocto Target / Rust Target archiver
- create_wrapper "${RUST_TARGET_AR}" "${WRAPPER_TARGET_AR}"
-
-}
-
-addtask rust_create_wrappers before do_configure after do_patch do_prepare_recipe_sysroot
-do_rust_create_wrappers[dirs] += "${WRAPPER_DIR}"
diff --git a/meta/classes/scons.bbclass b/meta/classes/scons.bbclass
deleted file mode 100644
index 80f8382107..0000000000
--- a/meta/classes/scons.bbclass
+++ /dev/null
@@ -1,28 +0,0 @@
-inherit python3native
-
-DEPENDS += "python3-scons-native"
-
-EXTRA_OESCONS ?= ""
-
-do_configure() {
- if [ -n "${CONFIGURESTAMPFILE}" -a "${S}" = "${B}" ]; then
- if [ -e "${CONFIGURESTAMPFILE}" -a "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a "${CLEANBROKEN}" != "1" ]; then
- ${STAGING_BINDIR_NATIVE}/scons --directory=${S} --clean PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS}
- fi
-
- mkdir -p `dirname ${CONFIGURESTAMPFILE}`
- echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
- fi
-}
-
-scons_do_compile() {
- ${STAGING_BINDIR_NATIVE}/scons --directory=${S} ${PARALLEL_MAKE} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} || \
- die "scons build execution failed."
-}
-
-scons_do_install() {
- ${STAGING_BINDIR_NATIVE}/scons --directory=${S} install_root=${D}${prefix} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} install || \
- die "scons install execution failed."
-}
-
-EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes/setuptools3-base.bbclass b/meta/classes/setuptools3-base.bbclass
deleted file mode 100644
index 15abe1dd63..0000000000
--- a/meta/classes/setuptools3-base.bbclass
+++ /dev/null
@@ -1,31 +0,0 @@
-DEPENDS:append:class-target = " ${PYTHON_PN}-native ${PYTHON_PN}"
-DEPENDS:append:class-nativesdk = " ${PYTHON_PN}-native ${PYTHON_PN}"
-RDEPENDS:${PN}:append:class-target = " ${PYTHON_PN}-core"
-
-export STAGING_INCDIR
-export STAGING_LIBDIR
-
-# LDSHARED is the ld *command* used to create shared library
-export LDSHARED = "${CCLD} -shared"
-# LDXXSHARED is the ld *command* used to create shared library of C++
-# objects
-export LDCXXSHARED = "${CXX} -shared"
-# CCSHARED are the C *flags* used to create objects to go into a shared
-# library (module)
-export CCSHARED = "-fPIC -DPIC"
-# LINKFORSHARED are the flags passed to the $(CC) command that links
-# the python executable
-export LINKFORSHARED = "${SECURITY_CFLAGS} -Xlinker -export-dynamic"
-
-FILES:${PN} += "${libdir}/* ${libdir}/${PYTHON_DIR}/*"
-
-FILES:${PN}-staticdev += "\
- ${PYTHON_SITEPACKAGES_DIR}/*.a \
-"
-FILES:${PN}-dev += "\
- ${datadir}/pkgconfig \
- ${libdir}/pkgconfig \
- ${PYTHON_SITEPACKAGES_DIR}/*.la \
-"
-inherit python3native python3targetconfig
-
diff --git a/meta/classes/setuptools3.bbclass b/meta/classes/setuptools3.bbclass
deleted file mode 100644
index 556bc801af..0000000000
--- a/meta/classes/setuptools3.bbclass
+++ /dev/null
@@ -1,33 +0,0 @@
-inherit setuptools3-base python_pep517
-
-# bdist_wheel builds in ./dist
-#B = "${WORKDIR}/build"
-
-SETUPTOOLS_BUILD_ARGS ?= ""
-
-SETUPTOOLS_SETUP_PATH ?= "${S}"
-
-setuptools3_do_configure() {
- :
-}
-
-setuptools3_do_compile() {
- cd ${SETUPTOOLS_SETUP_PATH}
- NO_FETCH_BUILD=1 \
- STAGING_INCDIR=${STAGING_INCDIR} \
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
- bdist_wheel --verbose --dist-dir ${PEP517_WHEEL_PATH} ${SETUPTOOLS_BUILD_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py bdist_wheel ${SETUPTOOLS_BUILD_ARGS}' execution failed."
-}
-setuptools3_do_compile[vardepsexclude] = "MACHINE"
-do_compile[cleandirs] += "${PEP517_WHEEL_PATH}"
-
-setuptools3_do_install() {
- python_pep517_do_install
-}
-
-EXPORT_FUNCTIONS do_configure do_compile do_install
-
-export LDSHARED="${CCLD} -shared"
-DEPENDS += "python3-setuptools-native python3-wheel-native"
diff --git a/meta/classes/sign_ipk.bbclass b/meta/classes/sign_ipk.bbclass
index e5057b7799..51c24b38b2 100644
--- a/meta/classes/sign_ipk.bbclass
+++ b/meta/classes/sign_ipk.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for generating signed IPK packages.
#
# Configuration variables used by this class:
diff --git a/meta/classes/sign_package_feed.bbclass b/meta/classes/sign_package_feed.bbclass
index f1504c2225..e9d664750c 100644
--- a/meta/classes/sign_package_feed.bbclass
+++ b/meta/classes/sign_package_feed.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for signing package feeds
#
# Related configuration variables that will be used after this class is
diff --git a/meta/classes/sign_rpm.bbclass b/meta/classes/sign_rpm.bbclass
index 73a55a512d..ee0c4808fa 100644
--- a/meta/classes/sign_rpm.bbclass
+++ b/meta/classes/sign_rpm.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Class for generating signed RPM packages.
#
# Configuration variables used by this class:
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 0cfa5a6834..953cafd285 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
python siteconfig_do_siteconfig () {
shared_state = sstate_state_fromvars(d)
if shared_state['task'] != 'populate_sysroot':
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass
index a564ee7494..2dfc7db255 100644
--- a/meta/classes/terminal.bbclass
+++ b/meta/classes/terminal.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
OE_TERMINAL ?= 'auto'
OE_TERMINAL[type] = 'choice'
OE_TERMINAL[choices] = 'auto none \
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index f365c09142..03c4f3a930 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -3,7 +3,7 @@
#
# Copyright (C) 2013 Intel Corporation
#
-# Released under the MIT license (see COPYING.MIT)
+# SPDX-License-Identifier: MIT
#
# This bbclass is designed to extract data used by OE-Core during the build process,
# for recording in the Toaster system.
diff --git a/meta/classes/typecheck.bbclass b/meta/classes/typecheck.bbclass
index 72da932232..160f7a024b 100644
--- a/meta/classes/typecheck.bbclass
+++ b/meta/classes/typecheck.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Check types of bitbake configuration variables
#
# See oe.types for details.
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass
deleted file mode 100644
index 4ca8118eb2..0000000000
--- a/meta/classes/uboot-sign.bbclass
+++ /dev/null
@@ -1,494 +0,0 @@
-# This file is part of U-Boot verified boot support and is intended to be
-# inherited from u-boot recipe and from kernel-fitimage.bbclass.
-#
-# The signature procedure requires the user to generate an RSA key and
-# certificate in a directory and to define the following variable:
-#
-# UBOOT_SIGN_KEYDIR = "/keys/directory"
-# UBOOT_SIGN_KEYNAME = "dev" # keys name in keydir (eg. "dev.crt", "dev.key")
-# UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
-# UBOOT_SIGN_ENABLE = "1"
-#
-# As verified boot depends on fitImage generation, following is also required:
-#
-# KERNEL_CLASSES ?= " kernel-fitimage "
-# KERNEL_IMAGETYPE ?= "fitImage"
-#
-# The signature support is limited to the use of CONFIG_OF_SEPARATE in U-Boot.
-#
-# The tasks sequence is set as below, using DEPLOY_IMAGE_DIR as common place to
-# treat the device tree blob:
-#
-# * u-boot:do_install:append
-# Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for
-# signing, and kernel will deploy UBOOT_DTB_BINARY after signs it.
-#
-# * virtual/kernel:do_assemble_fitimage
-# Sign the image
-#
-# * u-boot:do_deploy[postfuncs]
-# Deploy files like UBOOT_DTB_IMAGE, UBOOT_DTB_SYMLINK and others.
-#
-# For more details on signature process, please refer to U-Boot documentation.
-
-# We need some variables from u-boot-config
-inherit uboot-config
-
-# Enable use of a U-Boot fitImage
-UBOOT_FITIMAGE_ENABLE ?= "0"
-
-# Signature activation - these require their respective fitImages
-UBOOT_SIGN_ENABLE ?= "0"
-SPL_SIGN_ENABLE ?= "0"
-
-# Default value for deployment filenames.
-UBOOT_DTB_IMAGE ?= "u-boot-${MACHINE}-${PV}-${PR}.dtb"
-UBOOT_DTB_BINARY ?= "u-boot.dtb"
-UBOOT_DTB_SYMLINK ?= "u-boot-${MACHINE}.dtb"
-UBOOT_NODTB_IMAGE ?= "u-boot-nodtb-${MACHINE}-${PV}-${PR}.bin"
-UBOOT_NODTB_BINARY ?= "u-boot-nodtb.bin"
-UBOOT_NODTB_SYMLINK ?= "u-boot-nodtb-${MACHINE}.bin"
-UBOOT_ITS_IMAGE ?= "u-boot-its-${MACHINE}-${PV}-${PR}"
-UBOOT_ITS ?= "u-boot.its"
-UBOOT_ITS_SYMLINK ?= "u-boot-its-${MACHINE}"
-UBOOT_FITIMAGE_IMAGE ?= "u-boot-fitImage-${MACHINE}-${PV}-${PR}"
-UBOOT_FITIMAGE_BINARY ?= "u-boot-fitImage"
-UBOOT_FITIMAGE_SYMLINK ?= "u-boot-fitImage-${MACHINE}"
-SPL_DIR ?= "spl"
-SPL_DTB_IMAGE ?= "u-boot-spl-${MACHINE}-${PV}-${PR}.dtb"
-SPL_DTB_BINARY ?= "u-boot-spl.dtb"
-SPL_DTB_SYMLINK ?= "u-boot-spl-${MACHINE}.dtb"
-SPL_NODTB_IMAGE ?= "u-boot-spl-nodtb-${MACHINE}-${PV}-${PR}.bin"
-SPL_NODTB_BINARY ?= "u-boot-spl-nodtb.bin"
-SPL_NODTB_SYMLINK ?= "u-boot-spl-nodtb-${MACHINE}.bin"
-
-# U-Boot fitImage description
-UBOOT_FIT_DESC ?= "U-Boot fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
-
-# Kernel / U-Boot fitImage Hash Algo
-FIT_HASH_ALG ?= "sha256"
-UBOOT_FIT_HASH_ALG ?= "sha256"
-
-# Kernel / U-Boot fitImage Signature Algo
-FIT_SIGN_ALG ?= "rsa2048"
-UBOOT_FIT_SIGN_ALG ?= "rsa2048"
-
-# Generate keys for signing Kernel / U-Boot fitImage
-FIT_GENERATE_KEYS ?= "0"
-UBOOT_FIT_GENERATE_KEYS ?= "0"
-
-# Size of private keys in number of bits
-FIT_SIGN_NUMBITS ?= "2048"
-UBOOT_FIT_SIGN_NUMBITS ?= "2048"
-
-# args to openssl genrsa (Default is just the public exponent)
-FIT_KEY_GENRSA_ARGS ?= "-F4"
-UBOOT_FIT_KEY_GENRSA_ARGS ?= "-F4"
-
-# args to openssl req (Default is -batch for non interactive mode and
-# -new for new certificate)
-FIT_KEY_REQ_ARGS ?= "-batch -new"
-UBOOT_FIT_KEY_REQ_ARGS ?= "-batch -new"
-
-# Standard format for public key certificate
-FIT_KEY_SIGN_PKCS ?= "-x509"
-UBOOT_FIT_KEY_SIGN_PKCS ?= "-x509"
-
-# Functions on this bbclass can apply to either U-boot or Kernel,
-# depending on the scenario
-UBOOT_PN = "${@d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'}"
-KERNEL_PN = "${@d.getVar('PREFERRED_PROVIDER_virtual/kernel')}"
-
-# We need u-boot-tools-native if we're creating a U-Boot fitImage
-python() {
- if d.getVar('UBOOT_FITIMAGE_ENABLE') == '1':
- depends = d.getVar("DEPENDS")
- depends = "%s u-boot-tools-native dtc-native" % depends
- d.setVar("DEPENDS", depends)
-}
-
-concat_dtb_helper() {
- if [ -e "${UBOOT_DTB_BINARY}" ]; then
- ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_BINARY}
- ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_SYMLINK}
- fi
-
- if [ -f "${UBOOT_NODTB_BINARY}" ]; then
- install ${UBOOT_NODTB_BINARY} ${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}
- ln -sf ${UBOOT_NODTB_IMAGE} ${DEPLOYDIR}/${UBOOT_NODTB_SYMLINK}
- ln -sf ${UBOOT_NODTB_IMAGE} ${DEPLOYDIR}/${UBOOT_NODTB_BINARY}
- fi
-
- # If we're not using a signed u-boot fit, concatenate SPL w/o DTB & U-Boot DTB
- # with public key (otherwise it will be deployed by the equivalent
- # concat_spl_dtb_helper function - cf. kernel-fitimage.bbclass for more details)
- if [ "${SPL_SIGN_ENABLE}" != "1" ] ; then
- deployed_uboot_dtb_binary='${DEPLOY_DIR_IMAGE}/${UBOOT_DTB_IMAGE}'
- if [ "x${UBOOT_SUFFIX}" = "ximg" -o "x${UBOOT_SUFFIX}" = "xrom" ] && \
- [ -e "$deployed_uboot_dtb_binary" ]; then
- oe_runmake EXT_DTB=$deployed_uboot_dtb_binary
- install ${UBOOT_BINARY} ${DEPLOYDIR}/${UBOOT_IMAGE}
- elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then
- cd ${DEPLOYDIR}
- cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
-
- if [ -n "${UBOOT_CONFIG}" ]
- then
- for config in ${UBOOT_MACHINE}; do
- i=$(expr $i + 1);
- for type in ${UBOOT_CONFIG}; do
- j=$(expr $j + 1);
- if [ $j -eq $i ]
- then
- cp ${UBOOT_IMAGE} ${B}/${CONFIG_B_PATH}/u-boot-$type.${UBOOT_SUFFIX}
- fi
- done
- done
- fi
- else
- bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available."
- fi
- fi
-}
-
-concat_spl_dtb_helper() {
-
- # We only deploy symlinks to the u-boot-spl.dtb,as the KERNEL_PN will
- # be responsible for deploying the real file
- if [ -e "${SPL_DIR}/${SPL_DTB_BINARY}" ] ; then
- ln -sf ${SPL_DTB_IMAGE} ${DEPLOYDIR}/${SPL_DTB_SYMLINK}
- ln -sf ${SPL_DTB_IMAGE} ${DEPLOYDIR}/${SPL_DTB_BINARY}
- fi
-
- # Concatenate the SPL nodtb binary and u-boot.dtb
- deployed_spl_dtb_binary='${DEPLOY_DIR_IMAGE}/${SPL_DTB_IMAGE}'
- if [ -e "${DEPLOYDIR}/${SPL_NODTB_IMAGE}" -a -e "$deployed_spl_dtb_binary" ] ; then
- cd ${DEPLOYDIR}
- cat ${SPL_NODTB_IMAGE} $deployed_spl_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${SPL_BINARY} > ${SPL_IMAGE}
- else
- bbwarn "Failure while adding public key to spl binary. Verified U-Boot boot won't be available."
- fi
-}
-
-
-concat_dtb() {
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${PN}" = "${UBOOT_PN}" -a -n "${UBOOT_DTB_BINARY}" ]; then
- mkdir -p ${DEPLOYDIR}
- if [ -n "${UBOOT_CONFIG}" ]; then
- for config in ${UBOOT_MACHINE}; do
- CONFIG_B_PATH="$config"
- cd ${B}/$config
- concat_dtb_helper
- done
- else
- CONFIG_B_PATH=""
- cd ${B}
- concat_dtb_helper
- fi
- fi
-}
-
-concat_spl_dtb() {
- if [ "${SPL_SIGN_ENABLE}" = "1" -a "${PN}" = "${UBOOT_PN}" -a -n "${SPL_DTB_BINARY}" ]; then
- mkdir -p ${DEPLOYDIR}
- if [ -n "${UBOOT_CONFIG}" ]; then
- for config in ${UBOOT_MACHINE}; do
- CONFIG_B_PATH="$config"
- cd ${B}/$config
- concat_spl_dtb_helper
- done
- else
- CONFIG_B_PATH=""
- cd ${B}
- concat_spl_dtb_helper
- fi
- fi
-}
-
-
-# Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for
-# signing, and kernel will deploy UBOOT_DTB_BINARY after signs it.
-install_helper() {
- if [ -f "${UBOOT_DTB_BINARY}" ]; then
- # UBOOT_DTB_BINARY is a symlink to UBOOT_DTB_IMAGE, so we
- # need both of them.
- install -Dm 0644 ${UBOOT_DTB_BINARY} ${D}${datadir}/${UBOOT_DTB_IMAGE}
- ln -sf ${UBOOT_DTB_IMAGE} ${D}${datadir}/${UBOOT_DTB_BINARY}
- else
- bbwarn "${UBOOT_DTB_BINARY} not found"
- fi
-}
-
-# Install SPL dtb and u-boot nodtb to datadir,
-install_spl_helper() {
- if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then
- install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE}
- ln -sf ${SPL_DTB_IMAGE} ${D}${datadir}/${SPL_DTB_BINARY}
- else
- bbwarn "${SPL_DTB_BINARY} not found"
- fi
- if [ -f "${UBOOT_NODTB_BINARY}" ] ; then
- install -Dm 0644 ${UBOOT_NODTB_BINARY} ${D}${datadir}/${UBOOT_NODTB_IMAGE}
- ln -sf ${UBOOT_NODTB_IMAGE} ${D}${datadir}/${UBOOT_NODTB_BINARY}
- else
- bbwarn "${UBOOT_NODTB_BINARY} not found"
- fi
-
- # We need to install a 'stub' u-boot-fitimage + its to datadir,
- # so that the KERNEL_PN can use the correct filename when
- # assembling and deploying them
- touch ${D}/${datadir}/${UBOOT_FITIMAGE_IMAGE}
- touch ${D}/${datadir}/${UBOOT_ITS_IMAGE}
-}
-
-do_install:append() {
- if [ "${PN}" = "${UBOOT_PN}" ]; then
- if [ -n "${UBOOT_CONFIG}" ]; then
- for config in ${UBOOT_MACHINE}; do
- cd ${B}/$config
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -o "${UBOOT_FITIMAGE_ENABLE}" = "1" ] && \
- [ -n "${UBOOT_DTB_BINARY}" ]; then
- install_helper
- fi
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ]; then
- install_spl_helper
- fi
- done
- else
- cd ${B}
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -o "${UBOOT_FITIMAGE_ENABLE}" = "1" ] && \
- [ -n "${UBOOT_DTB_BINARY}" ]; then
- install_helper
- fi
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ]; then
- install_spl_helper
- fi
- fi
- fi
-}
-
-do_uboot_generate_rsa_keys() {
- if [ "${SPL_SIGN_ENABLE}" = "0" ] && [ "${UBOOT_FIT_GENERATE_KEYS}" = "1" ]; then
- bbwarn "UBOOT_FIT_GENERATE_KEYS is set to 1 eventhough SPL_SIGN_ENABLE is set to 0. The keys will not be generated as they won't be used."
- fi
-
- if [ "${SPL_SIGN_ENABLE}" = "1" ] && [ "${UBOOT_FIT_GENERATE_KEYS}" = "1" ]; then
-
- # Generate keys only if they don't already exist
- if [ ! -f "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key ] || \
- [ ! -f "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".crt ]; then
-
- # make directory if it does not already exist
- mkdir -p "${SPL_SIGN_KEYDIR}"
-
- echo "Generating RSA private key for signing U-Boot fitImage"
- openssl genrsa ${UBOOT_FIT_KEY_GENRSA_ARGS} -out \
- "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key \
- "${UBOOT_FIT_SIGN_NUMBITS}"
-
- echo "Generating certificate for signing U-Boot fitImage"
- openssl req ${FIT_KEY_REQ_ARGS} "${UBOOT_FIT_KEY_SIGN_PKCS}" \
- -key "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".key \
- -out "${SPL_SIGN_KEYDIR}/${SPL_SIGN_KEYNAME}".crt
- fi
- fi
-
-}
-
-addtask uboot_generate_rsa_keys before do_uboot_assemble_fitimage after do_compile
-
-# Create a ITS file for the U-boot FIT, for use when
-# we want to sign it so that the SPL can verify it
-uboot_fitimage_assemble() {
- uboot_its="$1"
- uboot_nodtb_bin="$2"
- uboot_dtb="$3"
- uboot_bin="$4"
- spl_dtb="$5"
- uboot_csum="${UBOOT_FIT_HASH_ALG}"
- uboot_sign_algo="${UBOOT_FIT_SIGN_ALG}"
- uboot_sign_keyname="${SPL_SIGN_KEYNAME}"
-
- rm -f $uboot_its $uboot_bin
-
- # First we create the ITS script
- cat << EOF >> $uboot_its
-/dts-v1/;
-
-/ {
- description = "${UBOOT_FIT_DESC}";
- #address-cells = <1>;
-
- images {
- uboot {
- description = "U-Boot image";
- data = /incbin/("$uboot_nodtb_bin");
- type = "standalone";
- os = "u-boot";
- arch = "${UBOOT_ARCH}";
- compression = "none";
- load = <${UBOOT_LOADADDRESS}>;
- entry = <${UBOOT_ENTRYPOINT}>;
-EOF
-
- if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
- cat << EOF >> $uboot_its
- signature {
- algo = "$uboot_csum,$uboot_sign_algo";
- key-name-hint = "$uboot_sign_keyname";
- };
-EOF
- fi
-
- cat << EOF >> $uboot_its
- };
- fdt {
- description = "U-Boot FDT";
- data = /incbin/("$uboot_dtb");
- type = "flat_dt";
- arch = "${UBOOT_ARCH}";
- compression = "none";
-EOF
-
- if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
- cat << EOF >> $uboot_its
- signature {
- algo = "$uboot_csum,$uboot_sign_algo";
- key-name-hint = "$uboot_sign_keyname";
- };
-EOF
- fi
-
- cat << EOF >> $uboot_its
- };
- };
-
- configurations {
- default = "conf";
- conf {
- description = "Boot with signed U-Boot FIT";
- loadables = "uboot";
- fdt = "fdt";
- };
- };
-};
-EOF
-
- #
- # Assemble the U-boot FIT image
- #
- ${UBOOT_MKIMAGE} \
- ${@'-D "${SPL_MKIMAGE_DTCOPTS}"' if len('${SPL_MKIMAGE_DTCOPTS}') else ''} \
- -f $uboot_its \
- $uboot_bin
-
- if [ "${SPL_SIGN_ENABLE}" = "1" ] ; then
- #
- # Sign the U-boot FIT image and add public key to SPL dtb
- #
- ${UBOOT_MKIMAGE_SIGN} \
- ${@'-D "${SPL_MKIMAGE_DTCOPTS}"' if len('${SPL_MKIMAGE_DTCOPTS}') else ''} \
- -F -k "${SPL_SIGN_KEYDIR}" \
- -K "$spl_dtb" \
- -r $uboot_bin \
- ${SPL_MKIMAGE_SIGN_ARGS}
- fi
-
-}
-
-do_uboot_assemble_fitimage() {
- # This function runs in KERNEL_PN context. The reason for that is that we need to
- # support the scenario where UBOOT_SIGN_ENABLE is placing the Kernel fitImage's
- # pubkey in the u-boot.dtb file, so that we can use it when building the U-Boot
- # fitImage itself.
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] && \
- [ -n "${SPL_DTB_BINARY}" -a "${PN}" = "${KERNEL_PN}" ] ; then
- if [ "${UBOOT_SIGN_ENABLE}" != "1" ]; then
- # If we're not signing the Kernel fitImage, that means
- # we need to copy the u-boot.dtb from staging ourselves
- cp -P ${STAGING_DATADIR}/u-boot*.dtb ${B}
- fi
- # As we are in the kernel context, we need to copy u-boot-spl.dtb from staging first.
- # Unfortunately, need to glob on top of ${SPL_DTB_BINARY} since _IMAGE and _SYMLINK
- # will contain U-boot's PV
- # Similarly, we need to get the filename for the 'stub' u-boot-fitimage + its in
- # staging so that we can use it for creating the image with the correct filename
- # in the KERNEL_PN context.
- # As for the u-boot.dtb (with fitimage's pubkey), it should come from the dependent
- # do_assemble_fitimage task
- cp -P ${STAGING_DATADIR}/u-boot-spl*.dtb ${B}
- cp -P ${STAGING_DATADIR}/u-boot-nodtb*.bin ${B}
- rm -rf ${B}/u-boot-fitImage-* ${B}/u-boot-its-*
- kernel_uboot_fitimage_name=`basename ${STAGING_DATADIR}/u-boot-fitImage-*`
- kernel_uboot_its_name=`basename ${STAGING_DATADIR}/u-boot-its-*`
- cd ${B}
- uboot_fitimage_assemble $kernel_uboot_its_name ${UBOOT_NODTB_BINARY} \
- ${UBOOT_DTB_BINARY} $kernel_uboot_fitimage_name \
- ${SPL_DTB_BINARY}
- fi
-}
-
-addtask uboot_assemble_fitimage before do_deploy after do_compile
-
-do_deploy:prepend:pn-${UBOOT_PN}() {
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then
- concat_dtb
- fi
-
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then
- # Deploy the u-boot-nodtb binary and symlinks...
- if [ -f "${SPL_DIR}/${SPL_NODTB_BINARY}" ] ; then
- echo "Copying u-boot-nodtb binary..."
- install -m 0644 ${SPL_DIR}/${SPL_NODTB_BINARY} ${DEPLOYDIR}/${SPL_NODTB_IMAGE}
- ln -sf ${SPL_NODTB_IMAGE} ${DEPLOYDIR}/${SPL_NODTB_SYMLINK}
- ln -sf ${SPL_NODTB_IMAGE} ${DEPLOYDIR}/${SPL_NODTB_BINARY}
- fi
-
-
- # We only deploy the symlinks to the uboot-fitImage and uboot-its
- # images, as the KERNEL_PN will take care of deploying the real file
- ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_FITIMAGE_BINARY}
- ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_FITIMAGE_SYMLINK}
- ln -sf ${UBOOT_ITS_IMAGE} ${DEPLOYDIR}/${UBOOT_ITS}
- ln -sf ${UBOOT_ITS_IMAGE} ${DEPLOYDIR}/${UBOOT_ITS_SYMLINK}
- fi
-
- if [ "${SPL_SIGN_ENABLE}" = "1" -a -n "${SPL_DTB_BINARY}" ] ; then
- concat_spl_dtb
- fi
-
-
-}
-
-do_deploy:append:pn-${UBOOT_PN}() {
- # If we're creating a u-boot fitImage, point u-boot.bin
- # symlink since it might get used by image recipes
- if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then
- ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_BINARY}
- ln -sf ${UBOOT_FITIMAGE_IMAGE} ${DEPLOYDIR}/${UBOOT_SYMLINK}
- fi
-}
-
-python () {
- if ( (d.getVar('UBOOT_SIGN_ENABLE') == '1'
- or d.getVar('UBOOT_FITIMAGE_ENABLE') == '1')
- and d.getVar('PN') == d.getVar('UBOOT_PN')
- and d.getVar('UBOOT_DTB_BINARY')):
-
- # Make "bitbake u-boot -cdeploy" deploys the signed u-boot.dtb
- # and/or the U-Boot fitImage
- d.appendVarFlag('do_deploy', 'depends', ' %s:do_deploy' % d.getVar('KERNEL_PN'))
-
- if d.getVar('UBOOT_FITIMAGE_ENABLE') == '1' and d.getVar('PN') == d.getVar('KERNEL_PN'):
- # As the U-Boot fitImage is created by the KERNEL_PN, we need
- # to make sure that the u-boot-spl.dtb and u-boot-spl-nodtb.bin
- # files are in the staging dir for it's use
- d.appendVarFlag('do_uboot_assemble_fitimage', 'depends', ' %s:do_populate_sysroot' % d.getVar('UBOOT_PN'))
-
- # If the Kernel fitImage is being signed, we need to
- # create the U-Boot fitImage after it
- if d.getVar('UBOOT_SIGN_ENABLE') == '1':
- d.appendVarFlag('do_uboot_assemble_fitimage', 'depends', ' %s:do_assemble_fitimage' % d.getVar('KERNEL_PN'))
- d.appendVarFlag('do_uboot_assemble_fitimage', 'depends', ' %s:do_assemble_fitimage_initramfs' % d.getVar('KERNEL_PN'))
-
-}
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass
index 3acf59cd46..1dbcba2bf1 100644
--- a/meta/classes/useradd-staticids.bbclass
+++ b/meta/classes/useradd-staticids.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# In order to support a deterministic set of 'dynamic' users/groups,
# we need a function to reformat the params based on a static file
def update_useradd_static_config(d):
@@ -41,7 +47,7 @@ def update_useradd_static_config(d):
def handle_missing_id(id, type, pkg, files, var, value):
# For backwards compatibility we accept "1" in addition to "error"
error_dynamic = d.getVar('USERADD_ERROR_DYNAMIC')
- msg = "%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN'), pkg, type, id)
+ msg = 'Recipe %s, package %s: %sname "%s" does not have a static ID defined.' % (d.getVar('PN'), pkg, type, id)
if files:
msg += " Add %s to one of these files: %s" % (id, files)
else:
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index 20771a0ce5..16a65ac323 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
inherit useradd_base
# base-passwd-cross provides the default passwd and group files in the
@@ -97,6 +103,18 @@ fi
}
useradd_sysroot () {
+ user_group_groupmems_add_sysroot user
+}
+
+groupadd_sysroot () {
+ user_group_groupmems_add_sysroot group
+}
+
+groupmemsadd_sysroot () {
+ user_group_groupmems_add_sysroot groupmems
+}
+
+user_group_groupmems_add_sysroot () {
# Pseudo may (do_prepare_recipe_sysroot) or may not (do_populate_sysroot_setscene) be running
# at this point so we're explicit about the environment so pseudo can load if
# not already present.
@@ -125,9 +143,15 @@ useradd_sysroot () {
fi
# Add groups and users defined for all recipe packages
- GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}"
- USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}"
- GROUPMEMS_PARAM="${@get_all_cmd_params(d, 'groupmems')}"
+ if test "$1" = "group"; then
+ GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}"
+ elif test "$1" = "user"; then
+ USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}"
+ elif test "$1" = "groupmems"; then
+ GROUPMEMS_PARAM="${@get_all_cmd_params(d, 'groupmems')}"
+ elif test "x$1" = "x"; then
+ bbwarn "missing type of passwd db action"
+ fi
# Tell the system to use the environment vars
UA_SYSROOT=1
@@ -142,25 +166,30 @@ useradd_sysroot () {
EXTRA_STAGING_FIXMES += "PSEUDO_SYSROOT PSEUDO_LOCALSTATEDIR LOGFIFO"
python useradd_sysroot_sstate () {
- scriptfile = None
- task = d.getVar("BB_CURRENTTASK")
- if task == "package_setscene":
- bb.build.exec_func("useradd_sysroot", d)
- elif task == "prepare_recipe_sysroot":
- # Used to update this recipe's own sysroot so the user/groups are available to do_install
- scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-${PN}")
- bb.build.exec_func("useradd_sysroot", d)
- elif task == "populate_sysroot":
- # Used when installed in dependent task sysroots
- scriptfile = d.expand("${SYSROOT_DESTDIR}${bindir}/postinst-useradd-${PN}")
-
- if scriptfile:
- bb.utils.mkdirhier(os.path.dirname(scriptfile))
- with open(scriptfile, 'w') as script:
- script.write("#!/bin/sh\n")
- bb.data.emit_func("useradd_sysroot", script, d)
- script.write("useradd_sysroot\n")
- os.chmod(scriptfile, 0o755)
+ for type, sort_prefix in [("group", "01"), ("user", "02"), ("groupmems", "03")]:
+ scriptfile = None
+ task = d.getVar("BB_CURRENTTASK")
+ if task == "package_setscene":
+ bb.build.exec_func(type + "add_sysroot", d)
+ elif task == "prepare_recipe_sysroot":
+ # Used to update this recipe's own sysroot so the user/groups are available to do_install
+
+ # If do_populate_sysroot is triggered and we write the file here, there would be an overlapping
+ # files. See usergrouptests.UserGroupTests.test_add_task_between_p_sysroot_and_package
+ scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-" + sort_prefix + type + "-${PN}-recipedebug")
+
+ bb.build.exec_func(type + "add_sysroot", d)
+ elif task == "populate_sysroot":
+ # Used when installed in dependent task sysroots
+ scriptfile = d.expand("${SYSROOT_DESTDIR}${bindir}/postinst-useradd-" + sort_prefix + type + "-${PN}")
+
+ if scriptfile:
+ bb.utils.mkdirhier(os.path.dirname(scriptfile))
+ with open(scriptfile, 'w') as script:
+ script.write("#!/bin/sh -e\n")
+ bb.data.emit_func(type + "add_sysroot", script, d)
+ script.write(type + "add_sysroot\n")
+ os.chmod(scriptfile, 0o755)
}
do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}"
@@ -171,9 +200,11 @@ SYSROOT_PREPROCESS_FUNCS += "${SYSROOTFUNC}"
SSTATEPREINSTFUNCS:append:class-target = " useradd_sysroot_sstate"
+USERADD_DEPENDS ??= ""
+DEPENDS += "${USERADD_DEPENDS}"
do_package_setscene[depends] += "${USERADDSETSCENEDEPS}"
do_populate_sysroot_setscene[depends] += "${USERADDSETSCENEDEPS}"
-USERADDSETSCENEDEPS:class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene"
+USERADDSETSCENEDEPS:class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene ${@' '.join(['%s:do_populate_sysroot_setscene' % pkg for pkg in d.getVar("USERADD_DEPENDS").split()])}"
USERADDSETSCENEDEPS = ""
# Recipe parse-time sanity checks
@@ -184,7 +215,7 @@ def update_useradd_after_parse(d):
bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False))
for pkg in useradd_packages.split():
- d.appendVarFlag("do_populate_sysroot", "vardeps", "USERADD_PARAM:%s GROUPADD_PARAM:%s GROUPMEMS_PARAM:%s" % (pkg, pkg, pkg))
+ d.appendVarFlag("do_populate_sysroot", "vardeps", " USERADD_PARAM:%s GROUPADD_PARAM:%s GROUPMEMS_PARAM:%s" % (pkg, pkg, pkg))
if not d.getVar('USERADD_PARAM:%s' % pkg) and not d.getVar('GROUPADD_PARAM:%s' % pkg) and not d.getVar('GROUPMEMS_PARAM:%s' % pkg):
bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg))
@@ -256,4 +287,4 @@ fakeroot python populate_packages:prepend () {
# Use the following to extend the useradd with custom functions
USERADDEXTENSION ?= ""
-inherit ${USERADDEXTENSION}
+inherit_defer ${USERADDEXTENSION}
diff --git a/meta/classes/useradd_base.bbclass b/meta/classes/useradd_base.bbclass
index 7f5b9b7219..5e1c699118 100644
--- a/meta/classes/useradd_base.bbclass
+++ b/meta/classes/useradd_base.bbclass
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# This bbclass provides basic functionality for user/group settings.
# This bbclass is intended to be inherited by useradd.bbclass and
# extrausers.bbclass.
@@ -154,7 +160,7 @@ perform_passwd_expire () {
local username=`echo "$opts" | awk '{ print $NF }'`
local user_exists="`grep "^$username:" $rootdir/etc/passwd || true`"
if test "x$user_exists" != "x"; then
- eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO sed -i \''s/^\('$username':[^:]*\):[^:]*:/\1:0:/'\' $rootdir/etc/shadow \" || true
+ eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO sed --follow-symlinks -i \''s/^\('$username':[^:]*\):[^:]*:/\1:0:/'\' $rootdir/etc/shadow \" || true
local passwd_lastchanged="`grep "^$username:" $rootdir/etc/shadow | cut -d: -f3`"
if test "x$passwd_lastchanged" != "x0"; then
bbfatal "${PN}: passwd --expire operation did not succeed."
diff --git a/meta/classes/xmlcatalog.bbclass b/meta/classes/xmlcatalog.bbclass
deleted file mode 100644
index be155b7bc2..0000000000
--- a/meta/classes/xmlcatalog.bbclass
+++ /dev/null
@@ -1,26 +0,0 @@
-DEPENDS = "libxml2-native"
-
-# A whitespace-separated list of XML catalogs to be registered, for example
-# "${sysconfdir}/xml/docbook-xml.xml".
-XMLCATALOGS ?= ""
-
-SYSROOT_PREPROCESS_FUNCS:append = " xmlcatalog_sstate_postinst"
-
-xmlcatalog_complete() {
- ROOTCATALOG="${STAGING_ETCDIR_NATIVE}/xml/catalog"
- if [ ! -f $ROOTCATALOG ]; then
- mkdir --parents $(dirname $ROOTCATALOG)
- xmlcatalog --noout --create $ROOTCATALOG
- fi
- for CATALOG in ${XMLCATALOGS}; do
- xmlcatalog --noout --add nextCatalog unused file://$CATALOG $ROOTCATALOG
- done
-}
-
-xmlcatalog_sstate_postinst() {
- mkdir -p ${SYSROOT_DESTDIR}${bindir}
- dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}-xmlcatalog
- echo '#!/bin/sh' > $dest
- echo '${xmlcatalog_complete}' >> $dest
- chmod 0755 $dest
-}
diff --git a/meta/classes/yocto-check-layer.bbclass b/meta/classes/yocto-check-layer.bbclass
index 329d3f8edb..404f5fd9f2 100644
--- a/meta/classes/yocto-check-layer.bbclass
+++ b/meta/classes/yocto-check-layer.bbclass
@@ -1,4 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+#
# This class is used by yocto-check-layer script for additional per-recipe tests
# The first test ensures that the layer has no recipes skipping 'installed-vs-shipped' QA checks
#
diff --git a/meta/conf/abi_version.conf b/meta/conf/abi_version.conf
index c716bd7feb..13c2c45b42 100644
--- a/meta/conf/abi_version.conf
+++ b/meta/conf/abi_version.conf
@@ -12,4 +12,4 @@ OELAYOUT_ABI = "15"
# a reset of the equivalence, for example when reproducibility issues break the
# existing match data. Distros can also append to this value for the same effect.
#
-HASHEQUIV_HASH_VERSION = "14"
+HASHEQUIV_HASH_VERSION = "16"
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 0e939aca4f..ba8bd5f975 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -31,6 +31,7 @@ export sysconfdir = "${base_prefix}/etc"
export servicedir = "${base_prefix}/srv"
export sharedstatedir = "${base_prefix}/com"
export localstatedir = "${base_prefix}/var"
+runtimedir = "${base_prefix}/run"
export datadir = "${prefix}/share"
export infodir = "${datadir}/info"
export mandir = "${datadir}/man"
@@ -46,7 +47,6 @@ export sbindir = "${exec_prefix}/sbin"
export libdir = "${exec_prefix}/${baselib}"
export libexecdir = "${exec_prefix}/libexec"
export includedir = "${exec_prefix}/include"
-export oldincludedir = "${exec_prefix}/include"
localedir = "${libdir}/locale"
# Linkage between native/cross/nativesdk layouts
@@ -90,6 +90,10 @@ ROOT_HOME ??= "/home/root"
# If set to boolean false ('no', 'n', 'false', 'f', '0'), /var/log is on persistent storage.
VOLATILE_LOG_DIR ?= "yes"
+# if set to 'yes': /tmp links to /var/tmp which links to /var/volatile/tmp
+# otherwise: /tmp is on persistent storage
+VOLATILE_TMP_DIR ?= "yes"
+
BB_RENAMED_VARIABLES[PNBLACKLIST] = "SKIP_RECIPE"
BB_RENAMED_VARIABLES[CVE_CHECK_PN_WHITELIST] = "CVE_CHECK_SKIP_RECIPE"
BB_RENAMED_VARIABLES[CVE_CHECK_WHITELIST] = "CVE_CHECK_IGNORE"
@@ -233,7 +237,7 @@ ASSUME_PROVIDED = "\
PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
-PR = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}"
+PR = "r0"
PE = ""
PF = "${PN}-${EXTENDPE}${PV}-${PR}"
EXTENDPE = "${@['','${PE}_'][int(d.getVar('PE') or 0) > 0]}"
@@ -349,7 +353,8 @@ FILES:${PN}-dev = "${includedir} ${FILES_SOLIBSDEV} ${libdir}/*.la \
${libdir}/cmake ${datadir}/cmake"
SECTION:${PN}-dev = "devel"
ALLOW_EMPTY:${PN}-dev = "1"
-RDEPENDS:${PN}-dev = "${PN} (= ${EXTENDPKGV})"
+DEV_PKG_DEPENDENCY = "${PN} (= ${EXTENDPKGV})"
+RRECOMMENDS:${PN}-dev = "${DEV_PKG_DEPENDENCY}"
FILES:${PN}-staticdev = "${libdir}/*.a ${base_libdir}/*.a ${libdir}/${BPN}/*.a"
SECTION:${PN}-staticdev = "devel"
@@ -378,28 +383,28 @@ FILESEXTRAPATHS ?= "__default:"
# the builtin definitions will be used. Builtin definitions included:
# base_prefix, prefix, exec_prefix, base_bindir, base_sbindir, base_libdir,
# datadir, sysconfdir, servicedir, sharedstatedir, localstatedir, infodir,
-# mandir, docdir, bindir, sbindir, libexecdir, libdir, includedir and
-# oldincludedir
+# mandir, docdir, bindir, sbindir, libexecdir, libdir and includedir
FILESYSTEM_PERMS_TABLES ?= "${@'files/fs-perms.txt' if oe.types.boolean(d.getVar('VOLATILE_LOG_DIR')) else 'files/fs-perms-persistent-log.txt'}"
##################################################################
# General work and output directories for the build system.
##################################################################
-TCMODE ?= "default"
-TCLIBC ?= "glibc"
+TCMODE ??= "default"
+TCLIBC ??= "glibc"
+TC_CXX_RUNTIME ??= "gnu"
TMPDIR ?= "${TOPDIR}/tmp"
-CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE'))][bool(d.getVar('MACHINE'))]}${@['', '/' + str(d.getVar('SDKMACHINE'))][bool(d.getVar('SDKMACHINE'))]}"
+CACHE = "${TMPDIR}/cache"
# The persistent cache should be shared by all builds
PERSISTENT_DIR = "${TOPDIR}/cache"
LOG_DIR = "${TMPDIR}/log"
STAMPS_DIR ?= "${TMPDIR}/stamps"
-STAMP = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/${EXTENDPE}${PV}-${PR}"
+STAMP = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/${PV}"
STAMPCLEAN = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/*-*"
BASE_WORKDIR ?= "${TMPDIR}/work"
-WORKDIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/${EXTENDPE}${PV}-${PR}"
+WORKDIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/${PV}"
T = "${WORKDIR}/temp"
D = "${WORKDIR}/image"
S = "${WORKDIR}/${BP}"
@@ -438,7 +443,6 @@ STAGING_DIR_TARGET = "${RECIPE_SYSROOT}"
# Setting DEPLOY_DIR outside of TMPDIR is helpful, when you are using
# packaged staging and/or multimachine.
DEPLOY_DIR ?= "${TMPDIR}/deploy"
-DEPLOY_DIR_TAR = "${DEPLOY_DIR}/tar"
DEPLOY_DIR_IPK = "${DEPLOY_DIR}/ipk"
DEPLOY_DIR_RPM = "${DEPLOY_DIR}/rpm"
DEPLOY_DIR_DEB = "${DEPLOY_DIR}/deb"
@@ -453,7 +457,7 @@ PKGDATA_DIR_SDK = "${TMPDIR}/pkgdata/${SDK_SYS}"
##################################################################
SDK_NAME_PREFIX ?= "oecore"
-SDK_NAME = "${SDK_NAME_PREFIX}-${SDK_ARCH}-${TUNE_PKGARCH}"
+SDK_NAME = "${SDK_NAME_PREFIX}-${IMAGE_BASENAME}-${SDK_ARCH}-${TUNE_PKGARCH}-${MACHINE}"
SDKPATH = "/usr/local/oe-sdk-hardcoded-buildpath"
SDKPATHNATIVE = "${SDKPATH}/sysroots/${SDK_SYS}"
# The path to default to installing the SDK to
@@ -463,12 +467,7 @@ SDKPATHINSTALL = "/usr/local/${SDK_NAME_PREFIX}-${SDK_ARCH}"
# Kernel info.
##################################################################
-OLDEST_KERNEL = "3.2.0"
-OLDEST_KERNEL:aarch64 = "3.14"
-OLDEST_KERNEL:nios2 = "3.19"
-OLDEST_KERNEL:powerpc64le = "3.10.0"
-OLDEST_KERNEL:riscv32 = "5.4"
-OLDEST_KERNEL:riscv64 = "4.15"
+OLDEST_KERNEL = "5.15"
# SDK_OLDEST_KERNEL can't be set using overrides since there are
# none for the SDK architecture. Best to set it from a machine-sdk
@@ -515,7 +514,7 @@ HOSTTOOLS_DIR = "${TMPDIR}/hosttools"
# Tools needed to run builds with OE-Core
HOSTTOOLS += " \
- [ ar as awk basename bash bzip2 cat chgrp chmod chown chrpath cmp comm cp cpio \
+ [ ar as awk basename bash bunzip2 bzip2 cat chgrp chmod chown chrpath cmp comm cp cpio \
cpp cut date dd diff diffstat dirname du echo egrep env expand expr false \
fgrep file find flock g++ gawk gcc getconf getopt git grep gunzip gzip \
head hostname iconv id install ld ldd ln ls lz4c make md5sum mkdir mkfifo mknod \
@@ -523,12 +522,15 @@ HOSTTOOLS += " \
python3 pzstd ranlib readelf readlink realpath rm rmdir rpcgen sed seq sh \
sha1sum sha224sum sha256sum sha384sum sha512sum \
sleep sort split stat strings strip tail tar tee test touch tr true uname \
- uniq wc wget which xargs zstd \
+ uniq unzstd wc wget which xargs zstd \
"
# Tools needed to run testimage runtime image testing
HOSTTOOLS += "${@'ip ping ps scp ssh stty' if (bb.utils.contains_any('IMAGE_CLASSES', 'testimage testsdk', True, False, d) or any(x in (d.getVar("BBINCLUDED") or "") for x in ["testimage.bbclass", "testsdk.bbclass"])) else ''}"
+# Used by archiver.bbclass when compression is xz
+HOSTTOOLS += "${@'xz' if (('archiver.bbclass' in (d.getVar('BBINCLUDED') or '')) and (d.getVarFlag('ARCHIVER_MODE', 'compression') == 'xz')) else ''}"
+
# Link to these if present
HOSTTOOLS_NONFATAL += "aws gcc-ar gpg gpg-agent ld.bfd ld.gold nc pigz sftp socat ssh sudo"
@@ -541,6 +543,12 @@ HOSTTOOLS_NONFATAL += "bzr"
# Used by ssh fetcher
HOSTTOOLS_NONFATAL += "scp"
+# Used by Mercurial fetcher
+HOSTTOOLS_NONFATAL += "hg"
+
+# Used by gcp fetcher
+HOSTTOOLS_NONFATAL += "gsutil"
+
# Link to git-lfs if present
HOSTTOOLS_NONFATAL += "git-lfs"
@@ -550,7 +558,7 @@ TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}"
export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
-export FC = "${CCACHE}${HOST_PREFIX}gfortran ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
+export FC = "${HOST_PREFIX}gfortran ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}"
export CPP = "${HOST_PREFIX}gcc -E${TOOLCHAIN_OPTIONS} ${HOST_CC_ARCH}"
export LD = "${HOST_PREFIX}ld${TOOLCHAIN_OPTIONS} ${HOST_LD_ARCH}"
export CCLD = "${CC}"
@@ -567,7 +575,7 @@ PYTHON = "${@sys.executable}"
export BUILD_CC = "${CCACHE}${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
export BUILD_CXX = "${CCACHE}${BUILD_PREFIX}g++ ${BUILD_CC_ARCH}"
-export BUILD_FC = "${CCACHE}${BUILD_PREFIX}gfortran ${BUILD_CC_ARCH}"
+export BUILD_FC = "${BUILD_PREFIX}gfortran ${BUILD_CC_ARCH}"
export BUILD_CPP = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH} -E"
export BUILD_LD = "${BUILD_PREFIX}ld ${BUILD_LD_ARCH}"
export BUILD_CCLD = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
@@ -643,11 +651,16 @@ EXTRA_OEMAKE:prepend:task-install = "${PARALLEL_MAKEINST} "
##################################################################
# Optimization flags.
##################################################################
+TARGET_DBGSRC_DIR ?= "/usr/src/debug/${PN}/${PV}"
# Beware: applied last to first
-DEBUG_PREFIX_MAP ?= "-fmacro-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR} \
- -fdebug-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR} \
- -fdebug-prefix-map=${STAGING_DIR_HOST}= \
- -fdebug-prefix-map=${STAGING_DIR_NATIVE}= \
+DEBUG_PREFIX_MAP ?= "-fcanon-prefix-map \
+ -fmacro-prefix-map=${S}=${TARGET_DBGSRC_DIR} \
+ -fdebug-prefix-map=${S}=${TARGET_DBGSRC_DIR} \
+ -fmacro-prefix-map=${B}=${TARGET_DBGSRC_DIR} \
+ -fdebug-prefix-map=${B}=${TARGET_DBGSRC_DIR} \
+ -fdebug-prefix-map=${STAGING_DIR_HOST}= \
+ -fmacro-prefix-map=${STAGING_DIR_HOST}= \
+ -fdebug-prefix-map=${STAGING_DIR_NATIVE}= \
"
DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types ${DEBUG_PREFIX_MAP}"
@@ -671,7 +684,7 @@ export PYTHONHASHSEED = "0"
export PERL_HASH_SEED = "0"
export SOURCE_DATE_EPOCH ?= "${@get_source_date_epoch_value(d)}"
# A SOURCE_DATE_EPOCH of '0' might be misinterpreted as no SDE
-export SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400"
+SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400"
REPRODUCIBLE_TIMESTAMP_ROOTFS ??= "1520598896"
##################################################################
@@ -727,10 +740,7 @@ SRC_URI[vardepsexclude] += "\
SRCDATE = "${DATE}"
SRCREV ??= "INVALID"
AUTOREV = "${@bb.fetch2.get_autorev(d)}"
-AUTOREV[vardepvalue] = "${SRCPV}"
-# Set Dynamically in base.bbclass
-# SRCPV = "${@bb.fetch2.get_srcrev(d)}"
-SRCPV[vardepvalue] = "${SRCPV}"
+SRCPV = ""
SRC_URI = ""
@@ -798,7 +808,8 @@ DISTRO_NAME ??= "OpenEmbedded"
# And finally '<foo>:forcevariable' overrides any standard variable, with the highest priority.
# This works for functions as well, they are really just variables.
#
-OVERRIDES = "${TARGET_OS}:${TRANSLATED_TARGET_ARCH}:pn-${PN}:${MACHINEOVERRIDES}:${DISTROOVERRIDES}:${CLASSOVERRIDE}${LIBCOVERRIDE}:forcevariable"
+OVERRIDES = "${TARGET_OS}:${TRANSLATED_TARGET_ARCH}:pn-${PN}:layer-${FILE_LAYERNAME}:${MACHINEOVERRIDES}:${DISTROOVERRIDES}:${CLASSOVERRIDE}${LIBCOVERRIDE}:forcevariable"
+FILE_LAYERNAME ??= "config"
LIBCOVERRIDE ?= ""
CLASSOVERRIDE ?= "class-target"
DISTROOVERRIDES ?= "${@d.getVar('DISTRO') or ''}"
@@ -822,6 +833,8 @@ include conf/distro/defaultsetup.conf
include conf/documentation.conf
include conf/licenses.conf
require conf/sanity.conf
+require conf/cve-check-map.conf
+include conf/bblock.conf
##################################################################
# Weak variables (usually to retain backwards compatibility)
@@ -867,6 +880,10 @@ XZ_DEFAULTS[vardepsexclude] += "XZ_MEMLIMIT XZ_THREADS"
ZSTD_THREADS ?= "${@oe.utils.cpu_count(at_least=2)}"
ZSTD_THREADS[vardepvalue] = "1"
+ZSTD_COMPRESSION_LEVEL ?= "-3"
+ZSTD_DEFAULTS ?= "--threads=${ZSTD_THREADS} ${ZSTD_COMPRESSION_LEVEL}"
+ZSTD_DEFAULTS[vardepsexclude] = "ZSTD_THREADS"
+
# Limit the number of threads that OpenMP libraries will use. Otherwise they
# may fallback to using all CPUs
export OMP_NUM_THREADS = "${BB_NUMBER_THREADS}"
@@ -881,6 +898,8 @@ OES_BITBAKE_CONF = "1"
##################################################################
MACHINE_FEATURES ?= ""
+SDK_MACHINE_FEATURES ?= ""
+
DISTRO_FEATURES ?= ""
DISTRO_EXTRA_RDEPENDS ?= ""
@@ -895,7 +914,7 @@ IMAGE_FEATURES += "${EXTRA_IMAGE_FEATURES}"
# Native distro features (will always be used for -native, even if they
# are not enabled for target)
-DISTRO_FEATURES_NATIVE ?= "x11 ipv6 xattr"
+DISTRO_FEATURES_NATIVE ?= "acl x11 ipv6 xattr"
DISTRO_FEATURES_NATIVESDK ?= "x11"
# Normally target distro features will not be applied to native builds:
@@ -909,8 +928,7 @@ MACHINE_FEATURES_BACKFILL = "rtc qemu-usermode"
COMBINED_FEATURES = "${@oe.utils.set_intersect('DISTRO_FEATURES', 'MACHINE_FEATURES', d)}"
COMBINED_FEATURES[vardeps] += "DISTRO_FEATURES MACHINE_FEATURES"
-SERIAL_CONSOLE ??= ""
-SERIAL_CONSOLES ??= "${@d.getVar('SERIAL_CONSOLE').replace(' ', ';')}"
+SERIAL_CONSOLES ??= ""
NO_RECOMMENDATIONS ??= ""
BAD_RECOMMENDATIONS ?= ""
@@ -924,10 +942,11 @@ SHELL[unexport] = "1"
TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH').replace("_", "-")}"
# Set a default umask to use for tasks for determinism
-BB_DEFAULT_UMASK = "022"
+BB_DEFAULT_UMASK ??= "022"
# Complete output from bitbake
BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log"
+BB_DEFAULT_EVENTLOG ?= "${LOG_DIR}/eventlog/${DATETIME}.json"
# Setup our default hash policy
BB_SIGNATURE_HANDLER ?= "OEBasicHash"
@@ -942,19 +961,20 @@ BB_HASHEXCLUDE_COMMON ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH BBSERVER DL_DI
SSTATE_HASHEQUIV_OWNER CCACHE_TOP_DIR BB_HASHSERVE GIT_CEILING_DIRECTORIES \
OMP_NUM_THREADS BB_CURRENTTASK"
BB_BASEHASH_IGNORE_VARS ?= "${BB_HASHEXCLUDE_COMMON} PSEUDO_IGNORE_PATHS BUILDHISTORY_DIR \
- SSTATE_DIR SOURCE_DATE_EPOCH"
+ SSTATE_DIR SOURCE_DATE_EPOCH RUST_BUILD_SYS RUST_HOST_SYS RUST_TARGET_SYS"
BB_HASHCONFIG_IGNORE_VARS ?= "${BB_HASHEXCLUDE_COMMON} DATE TIME SSH_AGENT_PID \
SSH_AUTH_SOCK PSEUDO_BUILD BB_ENV_PASSTHROUGH_ADDITIONS DISABLE_SANITY_CHECKS \
PARALLEL_MAKE BB_NUMBER_THREADS BB_ORIGENV BB_INVALIDCONF BBINCLUDED \
GIT_PROXY_COMMAND ALL_PROXY all_proxy NO_PROXY no_proxy FTP_PROXY ftp_proxy \
HTTP_PROXY http_proxy HTTPS_PROXY https_proxy SOCKS5_USER SOCKS5_PASSWD \
- BB_SETSCENE_ENFORCE BB_CMDLINE BB_SERVER_TIMEOUT"
+ BB_SETSCENE_ENFORCE BB_CMDLINE BB_SERVER_TIMEOUT BB_NICE_LEVEL"
BB_SIGNATURE_EXCLUDE_FLAGS ?= "doc deps depends \
lockfiles vardepsexclude vardeps vardepvalue vardepvalueexclude \
file-checksums python task nostamp \
sstate-lockfile-shared prefuncs postfuncs export_func deptask rdeptask \
recrdeptask nodeprrecs stamp-extra-info sstate-outputdirs filename lineno \
progress mcdepends number_threads"
+BB_HASH_CODEPARSER_VALS = "LOGFIFO=/ T=/ WORKDIR=/ DATE=1234 TIME=1234 PV=0.0-1 PN=no-pn METADATA_REVISION=1234 SRC_URI="
MLPREFIX ??= ""
MULTILIB_VARIANTS ??= ""
@@ -964,3 +984,10 @@ MULTILIB_VARIANTS ??= ""
# what it would be anyway if the signature generator (e.g. OEEquivHash) doesn't
# support unihashes.
BB_UNIHASH ?= "${BB_TASKHASH}"
+
+oe.sstatesig.find_sstate_manifest[vardepsexclude] = "BBEXTENDCURR BBEXTENDVARIANT OVERRIDES PACKAGE_EXTRA_ARCHS"
+oe.utils.get_multilib_datastore[vardepsexclude] = "DEFAULTTUNE_MULTILIB_ORIGINAL OVERRIDES"
+oe.path.format_display[vardepsexclude] = "TOPDIR"
+oe.utils.get_bb_number_threads[vardepsexclude] = "BB_NUMBER_THREADS"
+oe.packagedata.emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
+oe.packagedata.read_subpkgdata_extended[vardepsexclude] = "BB_NUMBER_THREADS"
diff --git a/meta/conf/ccache.conf b/meta/conf/ccache.conf
index 931012dec9..4406ae561b 100644
--- a/meta/conf/ccache.conf
+++ b/meta/conf/ccache.conf
@@ -1,2 +1 @@
max_size = 0
-cache_dir_levels = 1
diff --git a/meta/conf/conf-notes.txt b/meta/conf/conf-notes.txt
deleted file mode 100644
index cfd1f1977b..0000000000
--- a/meta/conf/conf-notes.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-
-### Shell environment set up for builds. ###
-
-You can now run 'bitbake <target>'
-
-Common targets are:
- core-image-minimal
- core-image-full-cmdline
- core-image-sato
- core-image-weston
- meta-toolchain
- meta-ide-support
-
-You can also run generated qemu images with a command like 'runqemu qemux86-64'.
-
-Other commonly useful commands are:
- - 'devtool' and 'recipetool' handle common recipe tasks
- - 'bitbake-layers' handles common layer tasks
- - 'oe-pkgdata-util' handles common target package tasks
diff --git a/meta/conf/cve-check-map.conf b/meta/conf/cve-check-map.conf
new file mode 100644
index 0000000000..17b0f15571
--- /dev/null
+++ b/meta/conf/cve-check-map.conf
@@ -0,0 +1,28 @@
+# Possible options for CVE statuses
+
+# used by this class internally when fix is detected (NVD DB version check or CVE patch file)
+CVE_CHECK_STATUSMAP[patched] = "Patched"
+# use when this class does not detect backported patch (e.g. vendor kernel repo with cherry-picked CVE patch)
+CVE_CHECK_STATUSMAP[backported-patch] = "Patched"
+# use when NVD DB does not mention patched versions of stable/LTS branches which have upstream CVE backports
+CVE_CHECK_STATUSMAP[cpe-stable-backport] = "Patched"
+# use when NVD DB does not mention correct version or does not mention any verion at all
+CVE_CHECK_STATUSMAP[fixed-version] = "Patched"
+
+# used internally by this class if CVE vulnerability is detected which is not marked as fixed or ignored
+CVE_CHECK_STATUSMAP[unpatched] = "Unpatched"
+# use when CVE is confirmed by upstream but fix is still not available
+CVE_CHECK_STATUSMAP[vulnerable-investigating] = "Unpatched"
+
+# used for migration from old concept, do not use for new vulnerabilities
+CVE_CHECK_STATUSMAP[ignored] = "Ignored"
+# use when NVD DB wrongly indicates vulnerability which is actually for a different component
+CVE_CHECK_STATUSMAP[cpe-incorrect] = "Ignored"
+# use when upstream does not accept the report as a vulnerability (e.g. works as designed)
+CVE_CHECK_STATUSMAP[disputed] = "Ignored"
+# use when vulnerability depends on build or runtime configuration which is not used
+CVE_CHECK_STATUSMAP[not-applicable-config] = "Ignored"
+# use when vulnerability affects other platform (e.g. Windows or Debian)
+CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored"
+# use when upstream acknowledged the vulnerability but does not plan to fix it
+CVE_CHECK_STATUSMAP[upstream-wontfix] = "Ignored"
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf
index f6894f3ab5..90b68057ad 100644
--- a/meta/conf/distro/defaultsetup.conf
+++ b/meta/conf/distro/defaultsetup.conf
@@ -2,7 +2,7 @@ include conf/distro/include/default-providers.inc
include conf/distro/include/default-versions.inc
include conf/distro/include/default-distrovars.inc
include conf/distro/include/maintainers.inc
-
+include conf/distro/include/time64.inc
require conf/distro/include/tcmode-${TCMODE}.inc
require conf/distro/include/tclibc-${TCLIBC}.inc
@@ -14,7 +14,7 @@ TMPDIR .= "${TCLIBCAPPEND}"
USER_CLASSES ?= ""
PACKAGE_CLASSES ?= "package_ipk"
-INHERIT_DISTRO ?= "debian devshell sstate license remove-libtool"
+INHERIT_DISTRO ?= "debian devshell sstate license remove-libtool create-spdx"
INHERIT += "${PACKAGE_CLASSES} ${USER_CLASSES} ${INHERIT_DISTRO}"
INIT_MANAGER ??= "none"
diff --git a/meta/conf/distro/include/cve-extra-exclusions.inc b/meta/conf/distro/include/cve-extra-exclusions.inc
index 6c19cd293d..fcef6a14fb 100644
--- a/meta/conf/distro/include/cve-extra-exclusions.inc
+++ b/meta/conf/distro/include/cve-extra-exclusions.inc
@@ -15,62 +15,70 @@
# the aim of sharing that work and ensuring we don't duplicate it.
#
-
-# strace https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2000-0006
-# CVE is more than 20 years old with no resolution evident
-# broken links in CVE database references make resolution impractical
-CVE_CHECK_IGNORE += "CVE-2000-0006"
-
-# epiphany https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2005-0238
-# The issue here is spoofing of domain names using characters from other character sets.
-# There has been much discussion amongst the epiphany and webkit developers and
-# whilst there are improvements about how domains are handled and displayed to the user
-# there is unlikely ever to be a single fix to webkit or epiphany which addresses this
-# problem. Ignore this CVE as there isn't any mitigation or fix or way to progress this further
-# we can seem to take.
-CVE_CHECK_IGNORE += "CVE-2005-0238"
-
-# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2010-4756
-# Issue is memory exhaustion via glob() calls, e.g. from within an ftp server
-# Best discussion in https://bugzilla.redhat.com/show_bug.cgi?id=681681
-# Upstream don't see it as a security issue, ftp servers shouldn't be passing
-# this to libc glob. Exclude as upstream have no plans to add BSD's GLOB_LIMIT or similar
-CVE_CHECK_IGNORE += "CVE-2010-4756"
-
-# go https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2020-29509
-# go https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2020-29511
-# The encoding/xml package in go can potentially be used for security exploits if not used correctly
-# CVE applies to a netapp product as well as flagging a general issue. We don't ship anything
-# exposing this interface in an exploitable way
-CVE_CHECK_IGNORE += "CVE-2020-29509 CVE-2020-29511"
+# strace https://nvd.nist.gov/vuln/detail/CVE-2000-0006
+CVE_STATUS[CVE-2000-0006] = "upstream-wontfix: CVE is more than 20 years old \
+with no resolution evident. Broken links in CVE database references make resolution impractical."
+
+# epiphany https://nvd.nist.gov/vuln/detail/CVE-2005-0238
+CVE_STATUS[CVE-2005-0238] = "upstream-wontfix: \
+The issue here is spoofing of domain names using characters from other character sets. \
+There has been much discussion amongst the epiphany and webkit developers and \
+whilst there are improvements about how domains are handled and displayed to the user \
+there is unlikely ever to be a single fix to webkit or epiphany which addresses this \
+problem. There isn't any mitigation or fix or way to progress this further."
+
+# glibc https://nvd.nist.gov/vuln/detail/CVE-2010-4756
+CVE_STATUS[CVE-2010-4756] = "upstream-wontfix: \
+Issue is memory exhaustion via glob() calls, e.g. from within an ftp server \
+Best discussion in https://bugzilla.redhat.com/show_bug.cgi?id=681681 \
+Upstream don't see it as a security issue, ftp servers shouldn't be passing \
+this to libc glob. Upstream have no plans to add BSD's GLOB_LIMIT or similar."
+
+# go https://nvd.nist.gov/vuln/detail/CVE-2020-29509
+# go https://nvd.nist.gov/vuln/detail/CVE-2020-29511
+CVE_STATUS_GROUPS += "CVE_STATUS_GO"
+CVE_STATUS_GO = "CVE-2020-29509 CVE-2020-29511"
+CVE_STATUS_GO[status] = "not-applicable-config: \
+The encoding/xml package in go can potentially be used for security exploits if not used correctly \
+CVE applies to a netapp product as well as flagging a general issue. We don't ship anything \
+exposing this interface in an exploitable way"
# db
-# Since Oracle relicensed bdb, the open source community is slowly but surely replacing bdb with
-# supported and open source friendly alternatives. As a result these CVEs are unlikely to ever be fixed.
-CVE_CHECK_IGNORE += "CVE-2015-2583 CVE-2015-2624 CVE-2015-2626 CVE-2015-2640 CVE-2015-2654 \
+CVE_STATUS_GROUPS += "CVE_STATUS_DB"
+CVE_STATUS_DB = "CVE-2015-2583 CVE-2015-2624 CVE-2015-2626 CVE-2015-2640 CVE-2015-2654 \
CVE-2015-2656 CVE-2015-4754 CVE-2015-4764 CVE-2015-4774 CVE-2015-4775 CVE-2015-4776 CVE-2015-4777 \
CVE-2015-4778 CVE-2015-4779 CVE-2015-4780 CVE-2015-4781 CVE-2015-4782 CVE-2015-4783 CVE-2015-4784 \
CVE-2015-4785 CVE-2015-4786 CVE-2015-4787 CVE-2015-4788 CVE-2015-4789 CVE-2015-4790 CVE-2016-0682 \
CVE-2016-0689 CVE-2016-0692 CVE-2016-0694 CVE-2016-3418 CVE-2020-2981"
+CVE_STATUS_DB[status] = "upstream-wontfix: Since Oracle relicensed bdb, the open source community is slowly but surely \
+replacing bdb with supported and open source friendly alternatives. As a result this CVE is unlikely to ever be fixed."
-#### CPE update pending ####
-
-# groff:groff-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2000-0803
-# Appears it was fixed in https://git.savannah.gnu.org/cgit/groff.git/commit/?id=07f95f1674217275ed4612f1dcaa95a88435c6a7
-# so from 1.17 onwards. Reported to the database for update by RP 2021/5/9. Update accepted 2021/5/10.
-#CVE_CHECK_IGNORE += "CVE-2000-0803"
-
-
-
-#### Upstream still working on ####
-
-# qemu:qemu-native:qemu-system-native https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-20255
-# There was a proposed patch https://lists.gnu.org/archive/html/qemu-devel/2021-02/msg06098.html
-# however qemu maintainers are sure the patch is incorrect and should not be applied.
-
-# wget https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2021-31879
-# https://mail.gnu.org/archive/html/bug-wget/2021-02/msg00002.html
-# No response upstream as of 2021/5/12
-
-
-
+# Kernel CVEs that are generic but can't be added to the kernel's hand-maintained cve-exclusion.inc
+# or machine-maintained cve-exclusion_VERSION.inc files, such as issues that describe TCP/IP design
+# flaws or processor-specific exploits that can't be mitigated.
+#
+# For OE-Core our policy is to stay as close to the kernel stable releases as we can. This should
+# ensure the bulk of the major kernel CVEs are fixed and we don't dive into each individual issue
+# as the stable maintainers are much more able to do that.
+CVE_STATUS[CVE-1999-0524] = "ignored: issue is that ICMP exists, can be filewalled if required"
+CVE_STATUS[CVE-2008-4609] = "ignored: describes design flaws in TCP"
+CVE_STATUS[CVE-2010-4563] = "ignored: low impact, only enables detection of hosts which are sniffing network traffic"
+CVE_STATUS[CVE-2011-0640] = "ignored: requires physical access and any mitigation would mean USB is impractical to use"
+
+# qemu:qemu-native:qemu-system-native https://nvd.nist.gov/vuln/detail/CVE-2021-20255
+CVE_STATUS[CVE-2021-20255] = "upstream-wontfix: \
+There was a proposed patch https://lists.gnu.org/archive/html/qemu-devel/2021-02/msg06098.html \
+qemu maintainers say the patch is incorrect and should not be applied \
+The issue is of low impact, at worst sitting in an infinite loop rather than exploitable."
+
+# qemu:qemu-native:qemu-system-native https://nvd.nist.gov/vuln/detail/CVE-2019-12067
+CVE_STATUS[CVE-2019-12067] = "upstream-wontfix: \
+There was a proposed patch but rejected by upstream qemu. It is unclear if the issue can \
+still be reproduced or where exactly any bug is. \
+We'll pick up any fix when upstream accepts one."
+
+# nasm:nasm-native https://nvd.nist.gov/vuln/detail/CVE-2020-18974
+CVE_STATUS[CVE-2020-18974] = "upstream-wontfix: \
+It is a fuzzing related buffer overflow. It is of low impact since most devices \
+wouldn't expose an assembler. The upstream is inactive and there is little to be \
+done about the bug, ignore from an OE perspective."
diff --git a/meta/conf/distro/include/default-distrovars.inc b/meta/conf/distro/include/default-distrovars.inc
index 9f4617be01..7554081e8b 100644
--- a/meta/conf/distro/include/default-distrovars.inc
+++ b/meta/conf/distro/include/default-distrovars.inc
@@ -4,7 +4,10 @@ OEINCLUDELOGS ?= "yes"
KERNEL_CONSOLE ?= "ttyS0"
KEEPUIMAGE ??= "yes"
-IMAGE_LINGUAS ?= "en-us en-gb"
+DEFAULT_IMAGE_LINGUAS = "en-us en-gb"
+DEFAULT_IMAGE_LINGUAS:libc-glibc = "c en-us en-gb"
+IMAGE_LINGUAS ?= "${DEFAULT_IMAGE_LINGUAS}"
+
ENABLE_BINARY_LOCALE_GENERATION ?= "1"
LOCALE_UTF8_ONLY ?= "0"
LOCALE_UTF8_IS_DEFAULT ?= "1"
@@ -16,7 +19,13 @@ DISTRO_FEATURES_DEFAULT:remove:riscv32 = "seccomp"
# seccomp is not yet ported to ARC
DISTRO_FEATURES_DEFAULT:remove:arc = "seccomp"
-DISTRO_FEATURES_DEFAULT ?= "acl alsa bluetooth debuginfod ext2 ipv4 ipv6 largefile pcmcia usbgadget usbhost wifi xattr nfs zeroconf pci 3g nfc x11 vfat seccomp"
+# seccomp is not yet ported to microblaze
+DISTRO_FEATURES_DEFAULT:remove:microblaze = "seccomp"
+
+# seccomp is not yet ported to loongarch64
+DISTRO_FEATURES_DEFAULT:remove:loongarch64 = "seccomp"
+
+DISTRO_FEATURES_DEFAULT ?= "acl alsa bluetooth debuginfod ext2 ipv4 ipv6 pcmcia usbgadget usbhost wifi xattr nfs zeroconf pci 3g nfc x11 vfat seccomp"
DISTRO_FEATURES ?= "${DISTRO_FEATURES_DEFAULT}"
IMAGE_FEATURES ?= ""
diff --git a/meta/conf/distro/include/default-providers.inc b/meta/conf/distro/include/default-providers.inc
index 6defdca12d..d18173c744 100644
--- a/meta/conf/distro/include/default-providers.inc
+++ b/meta/conf/distro/include/default-providers.inc
@@ -5,6 +5,7 @@ PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
PREFERRED_PROVIDER_virtual/xserver-xf86 ?= "xserver-xorg"
PREFERRED_PROVIDER_virtual/egl ?= "mesa"
PREFERRED_PROVIDER_virtual/libgl ?= "mesa"
+PREFERRED_PROVIDER_virtual/libglx ?= "mesa"
PREFERRED_PROVIDER_virtual/libgl-native ?= "mesa-native"
PREFERRED_PROVIDER_virtual/nativesdk-libgl ?= "nativesdk-mesa"
PREFERRED_PROVIDER_virtual/libgles1 ?= "mesa"
@@ -23,7 +24,6 @@ PREFERRED_PROVIDER_virtual/make-native ?= "make-native"
# Default virtual runtime providers
#
VIRTUAL-RUNTIME_update-alternatives ?= "update-alternatives-opkg"
-VIRTUAL-RUNTIME_apm ?= "apm"
VIRTUAL-RUNTIME_alsa-state ?= "alsa-state"
VIRTUAL-RUNTIME_getopt ?= "util-linux-getopt"
VIRTUAL-RUNTIME_base-utils ?= "busybox"
diff --git a/meta/conf/distro/include/distro_alias.inc b/meta/conf/distro/include/distro_alias.inc
index e43c0acd80..e6131c815a 100644
--- a/meta/conf/distro/include/distro_alias.inc
+++ b/meta/conf/distro/include/distro_alias.inc
@@ -317,7 +317,6 @@ DISTRO_PN_ALIAS:pn-sysprof = "Fedora=sysprof Debian=sysprof"
DISTRO_PN_ALIAS:pn-systemd-compat-units = "Fedora=systemd Ubuntu=systemd"
DISTRO_PN_ALIAS:pn-systemd-systemctl = "OE-Core"
DISTRO_PN_ALIAS:pn-systemd-systemdctl = "Fedora=systemd Ubuntu=systemd"
-DISTRO_PN_ALIAS:pn-systemtap-uprobes = "Ubuntu=systemtap Debian=systemtap"
DISTRO_PN_ALIAS:pn-sysvinit-inittab = "OE-Core"
DISTRO_PN_ALIAS:pn-tar-replacement = "Fedora=tar Ubuntu=tar"
DISTRO_PN_ALIAS:pn-tcf-agent = "Windriver upstream=http://www.eclipse.org/dsdp/tm/"
diff --git a/meta/conf/distro/include/init-manager-systemd.inc b/meta/conf/distro/include/init-manager-systemd.inc
index 7867d90028..0a76647459 100644
--- a/meta/conf/distro/include/init-manager-systemd.inc
+++ b/meta/conf/distro/include/init-manager-systemd.inc
@@ -1,7 +1,9 @@
# Use systemd for system initialization
-DISTRO_FEATURES:append = " systemd"
+DISTRO_FEATURES:append = " systemd usrmerge"
DISTRO_FEATURES_BACKFILL_CONSIDERED:append = " sysvinit"
VIRTUAL-RUNTIME_init_manager ??= "systemd"
VIRTUAL-RUNTIME_initscripts ??= "systemd-compat-units"
VIRTUAL-RUNTIME_login_manager ??= "shadow-base"
VIRTUAL-RUNTIME_dev_manager ??= "systemd"
+# systemd hardcodes /root in its source codes, other values are not offically supported
+ROOT_HOME ?= "/root"
diff --git a/meta/conf/distro/include/maintainers.inc b/meta/conf/distro/include/maintainers.inc
index 3990d1d507..20eb3a0446 100644
--- a/meta/conf/distro/include/maintainers.inc
+++ b/meta/conf/distro/include/maintainers.inc
@@ -38,18 +38,15 @@ RECIPE_MAINTAINER:pn-alsa-tools = "Michael Opdenacker <michael.opdenacker@bootli
RECIPE_MAINTAINER:pn-alsa-topology-conf = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
RECIPE_MAINTAINER:pn-alsa-ucm-conf = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
RECIPE_MAINTAINER:pn-alsa-utils = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
-RECIPE_MAINTAINER:pn-alsa-utils-scripts = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
-RECIPE_MAINTAINER:pn-apmd = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-appstream = "Markus Volk <f_l_k@t-online.de>"
RECIPE_MAINTAINER:pn-apr = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-apr-util = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER:pn-apt = "Aníbal Limón <limon.anibal@gmail.com>"
+RECIPE_MAINTAINER:pn-apt = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-argp-standalone = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-asciidoc = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-aspell = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-at = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER:pn-at-spi2-atk = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-at-spi2-core = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-atk = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-attr = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-autoconf = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-autoconf-archive = "Robert Yang <liezhi.yang@windriver.com>"
@@ -57,12 +54,13 @@ RECIPE_MAINTAINER:pn-automake = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-avahi = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-babeltrace = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-babeltrace2 = "Alexander Kanavin <alex.kanavin@gmail.com>"
+RECIPE_MAINTAINER:pn-baremetal-helloworld = "Alejandro Hernandez <alejandro@enedino.org>"
RECIPE_MAINTAINER:pn-base-files = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-base-passwd = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-bash = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-bash-completion = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-bc = "Anuj Mittal <anuj.mittal@intel.com>"
-RECIPE_MAINTAINER:pn-bind = "Armin Kuster <akuster808@gmail.com>"
+RECIPE_MAINTAINER:pn-bind = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-binutils = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-binutils-cross-${TARGET_ARCH} = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Khem Raj <raj.khem@gmail.com>"
@@ -72,7 +70,7 @@ RECIPE_MAINTAINER:pn-bison = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-blktool = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-blktrace = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-bluez5 = "Anuj Mittal <anuj.mittal@intel.com>"
-RECIPE_MAINTAINER:pn-bmap-tools = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-bmaptool = "Trevor Woerner <twoerner@gmail.com>"
RECIPE_MAINTAINER:pn-boost = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-boost-build-native = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-bootchart2 = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -85,24 +83,25 @@ RECIPE_MAINTAINER:pn-buildtools-extended-tarball = "Richard Purdie <richard.purd
RECIPE_MAINTAINER:pn-buildtools-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-buildtools-docs-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-buildtools-make-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>"
-RECIPE_MAINTAINER:pn-busybox = "Andrej Valek <andrej.valek@siemens.com>"
+RECIPE_MAINTAINER:pn-busybox = "Andrej Valek <andrej.v@skyrain.eu>"
RECIPE_MAINTAINER:pn-busybox-inittab = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-bzip2 = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-ca-certificates = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-cairo = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-cargo = "Randy MacLeod <Randy.MacLeod@windriver.com>"
-RECIPE_MAINTAINER:pn-cargo-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Randy MacLeod <Randy.MacLeod@windriver.com>"
+RECIPE_MAINTAINER:pn-cargo-c-native = "Frederic Martinsons <frederic.martinsons@gmail.com>"
RECIPE_MAINTAINER:pn-cantarell-fonts = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-ccache = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-cdrtools-native = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-chrpath = "Yi Zhao <yi.zhao@windriver.com>"
-RECIPE_MAINTAINER:pn-cmake = "Pascal Bach <pascal.bach@siemens.com>"
-RECIPE_MAINTAINER:pn-cmake-native = "Pascal Bach <pascal.bach@siemens.com>"
+RECIPE_MAINTAINER:pn-cmake = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-cmake-native = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-connman = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-connman-conf = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-connman-gnome = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-consolekit = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-core-image-base = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER:pn-core-image-initramfs-boot = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-core-image-minimal = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-core-image-minimal-dev = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-core-image-minimal-initramfs = "Richard Purdie <richard.purdie@linuxfoundation.org>"
@@ -131,13 +130,14 @@ RECIPE_MAINTAINER:pn-cryptodev-module = "Robert Yang <liezhi.yang@windriver.com>
RECIPE_MAINTAINER:pn-cryptodev-tests = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-cups = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-curl = "Robert Joslyn <robert.joslyn@redrectangle.org>"
-RECIPE_MAINTAINER:pn-cve-update-db-native = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-cve-update-nvd2-native = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-cwautomacros = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-db = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-dbus = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-dbus-glib = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-dbus-wait = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-debianutils = "Yi Zhao <yi.zhao@windriver.com>"
+RECIPE_MAINTAINER:pn-debugedit = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-dejagnu = "Nathan Rossi <nathan@nathanrossi.com>"
RECIPE_MAINTAINER:pn-depmodwrapper-cross = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-desktop-file-utils = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -153,7 +153,7 @@ RECIPE_MAINTAINER:pn-docbook-xml-dtd4 = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-docbook-xsl-stylesheets = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-dos2unix = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-dosfstools = "Yi Zhao <yi.zhao@windriver.com>"
-RECIPE_MAINTAINER:pn-dpkg = "Aníbal Limón <limon.anibal@gmail.com>"
+RECIPE_MAINTAINER:pn-dpkg = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-dropbear = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-dtc = "Wang Mingyu <wangmy@fujitsu.com>"
RECIPE_MAINTAINER:pn-dwarfsrcfiles = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -161,12 +161,12 @@ RECIPE_MAINTAINER:pn-e2fsprogs = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-ed = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-efivar = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-efibootmgr = "Ross Burton <ross.burton@arm.com>"
-RECIPE_MAINTAINER:pn-elfutils = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
-RECIPE_MAINTAINER:pn-ell = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-elfutils = "Zang Ruochen <zangruochen@loongson.cn>"
+RECIPE_MAINTAINER:pn-ell = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-enchant2 = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-encodings = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-epiphany = "Alexander Kanavin <alex.kanavin@gmail.com>"
-RECIPE_MAINTAINER:pn-erofs-utils = "Richard Weinberger <richard@nod.at>"
+RECIPE_MAINTAINER:pn-erofs-utils = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-ethtool = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-eudev = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-expat = "Yi Zhao <yi.zhao@windriver.com>"
@@ -174,7 +174,7 @@ RECIPE_MAINTAINER:pn-expect = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-ffmpeg = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-file = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-findutils = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER:pn-flac = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-flac = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
RECIPE_MAINTAINER:pn-flex = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-font-alias = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-font-util = "Unassigned <unassigned@yoctoproject.org>"
@@ -190,7 +190,7 @@ RECIPE_MAINTAINER:pn-gcc-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Khem Raj <r
RECIPE_MAINTAINER:pn-gcc-crosssdk-${SDK_SYS} = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-gcc-runtime = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-gcc-sanitizers = "Khem Raj <raj.khem@gmail.com>"
-RECIPE_MAINTAINER:pn-gcc-source-12.1.0 = "Khem Raj <raj.khem@gmail.com>"
+RECIPE_MAINTAINER:pn-gcc-source-13.2.0 = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-gconf = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-gcr = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-gdb = "Khem Raj <raj.khem@gmail.com>"
@@ -210,16 +210,15 @@ RECIPE_MAINTAINER:pn-glibc = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-glibc-locale = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-glibc-mtrace = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-glibc-scripts = "Khem Raj <raj.khem@gmail.com>"
-RECIPE_MAINTAINER:pn-glibc-tests = "Lukasz Majewski <lukma@denx.de>"
+RECIPE_MAINTAINER:pn-glibc-y2038-tests = "Lukasz Majewski <lukma@denx.de>"
RECIPE_MAINTAINER:pn-glibc-testsuite = "Khem Raj <raj.khem@gmail.com>"
-RECIPE_MAINTAINER:pn-glide = "Otavio Salvador <otavio.salvador@ossystems.com.br>"
RECIPE_MAINTAINER:pn-gmp = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-glslang = "Jose Quaresma <quaresma.jose@gmail.com>"
RECIPE_MAINTAINER:pn-gnome-desktop-testing = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-gnu-config = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-gnu-efi = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-gnupg = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER:pn-gnutls = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-gnutls = "Simone Weiß <simone.p.weiss@posteo.net>"
RECIPE_MAINTAINER:pn-go = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-go-binary-native = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-go-cross-${TUNE_PKGARCH} = "Khem Raj <raj.khem@gmail.com>"
@@ -233,6 +232,7 @@ RECIPE_MAINTAINER:pn-gperf = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-gpgme = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-gptfdisk = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-gcompat = "Khem Raj <raj.khem@gmail.com>"
+RECIPE_MAINTAINER:pn-graphene = "Markus Volk <f_l_k@t-online.de>"
RECIPE_MAINTAINER:pn-grep = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-groff = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-grub = "Anuj Mittal <anuj.mittal@intel.com>"
@@ -253,6 +253,7 @@ RECIPE_MAINTAINER:pn-gstreamer1.0-python = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-gstreamer1.0-rtsp-server = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-gstreamer1.0-vaapi = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-gtk+3 = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-gtk4 = "Markus Volk <f_l_k@t-online.de>"
RECIPE_MAINTAINER:pn-gtk-doc = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-gzip = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-harfbuzz = "Anuj Mittal <anuj.mittal@intel.com>"
@@ -262,6 +263,7 @@ RECIPE_MAINTAINER:pn-hicolor-icon-theme = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-hwlatdetect = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-i2c-tools = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-icecc-create-env = "Joshua Watt <JPEWhacker@gmail.com>"
+RECIPE_MAINTAINER:pn-icon-naming-utils = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-icu = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-ifupdown = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-igt-gpu-tools = "Anuj Mittal <anuj.mittal@intel.com>"
@@ -276,13 +278,16 @@ RECIPE_MAINTAINER:pn-initramfs-live-install = "Anuj Mittal <anuj.mittal@intel.co
RECIPE_MAINTAINER:pn-initramfs-live-install-efi = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-initramfs-live-install-efi-testfs = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-initramfs-live-install-testfs = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-initramfs-module-install = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-initramfs-module-install-efi = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-initramfs-module-setup-live = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-initscripts = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-intltool = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-iproute2 = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-iptables = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-iputils = "Changhyeok Bae <changhyeok.bae@gmail.com>"
-RECIPE_MAINTAINER:pn-iso-codes = "Wang Mingyu <wangmy@cn.ujitsu.com>"
-RECIPE_MAINTAINER:pn-itstool = "Andreas Müller <schnitzeltony@gmail.com>"
+RECIPE_MAINTAINER:pn-iso-codes = "Wang Mingyu <wangmy@cn.fujitsu.com>"
+RECIPE_MAINTAINER:pn-itstool = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-iw = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-libjpeg-turbo = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-json-c = "Yi Zhao <yi.zhao@windriver.com>"
@@ -295,18 +300,20 @@ RECIPE_MAINTAINER:pn-kernel-devsrc = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-kexec-tools = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-keymaps = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-kmod = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER:pn-kmscube = "Carlos Rafael Giani <dv@pseudoterminal.org>"
+RECIPE_MAINTAINER:pn-kmscube = "Carlos Rafael Giani <crg7475@mailbox.org>"
RECIPE_MAINTAINER:pn-l3afpad = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-lame = "Michael Opdenacker <michael.opdenacker@bootlin.com>"
RECIPE_MAINTAINER:pn-ldconfig-native = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-less = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-liba52 = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libacpi = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-libadwaita = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libaio = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libarchive = "Otavio Salvador <otavio.salvador@ossystems.com.br>"
RECIPE_MAINTAINER:pn-libassuan = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libatomic-ops = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libbsd = "Yi Zhao <yi.zhao@windriver.com>"
+RECIPE_MAINTAINER:pn-libc-test = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libcap = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-libcap-ng = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-libcap-ng-python = "Yi Zhao <yi.zhao@windriver.com>"
@@ -314,16 +321,13 @@ RECIPE_MAINTAINER:pn-libcgroup = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libcheck = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-libcomps = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libconvert-asn1-perl = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-libcroco = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libdaemon = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libdazzle = "Alexander Kanavin <alex.kanavin@gmail.com>"
-RECIPE_MAINTAINER:pn-libdmx = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libdnf = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libdrm = "Otavio Salvador <otavio.salvador@ossystems.com.br>"
RECIPE_MAINTAINER:pn-libedit = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-libepoxy = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-liberation-fonts = "Alexander Kanavin <alex.kanavin@gmail.com>"
-RECIPE_MAINTAINER:pn-liberror-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libevdev = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libevent = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libexif = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -337,7 +341,7 @@ RECIPE_MAINTAINER:pn-libgcc-initial = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-libgcrypt = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-libgfortran = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-libgit2 = "Unassigned <unassigned@yoctoproject.org>"
-RECIPE_MAINTAINER:pn-libgloss = "Alejandro Hernandez <aehs29@gmail.com>"
+RECIPE_MAINTAINER:pn-libgloss = "Alejandro Hernandez <alejandro@enedino.org>"
RECIPE_MAINTAINER:pn-libglu = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-libgpg-error = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-libgudev = "Ross Burton <ross.burton@arm.com>"
@@ -370,6 +374,7 @@ RECIPE_MAINTAINER:pn-libpcre = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-libpcre2 = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libpipeline = "Wang Mingyu <wangmy@fujitsu.com>"
RECIPE_MAINTAINER:pn-libpng = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-libportal = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libproxy = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libpthread-stubs = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libptytty = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -379,8 +384,9 @@ RECIPE_MAINTAINER:pn-librsvg = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libstd-rs = "Randy MacLeod <Randy.MacLeod@windriver.com>"
RECIPE_MAINTAINER:pn-libsamplerate0 = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libsdl2 = "Yi Zhao <yi.zhao@windriver.com>"
-RECIPE_MAINTAINER:pn-libseccomp = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-libseccomp = "Simone Weiß <simone.p.weiss@posteo.net>"
RECIPE_MAINTAINER:pn-libsecret = "Alexander Kanavin <alex.kanavin@gmail.com>"
+RECIPE_MAINTAINER:pn-libslirp = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libsm = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libsndfile1 = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libsolv = "Anuj Mittal <anuj.mittal@intel.com>"
@@ -389,13 +395,17 @@ RECIPE_MAINTAINER:pn-libsoup-2.4 = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libssh2 = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libssp-nonshared = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-libtasn1 = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-libtest-fatal-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libtest-needs-perl = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-libtest-warnings-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libtheora = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libtimedate-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libtirpc = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libtool = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-libtool-cross = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-libtool-native = "Robert Yang <liezhi.yang@windriver.com>"
+RECIPE_MAINTAINER:pn-libtraceevent = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-libtry-tiny-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libucontext = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-libunistring = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libunwind = "Bruce Ashfield <bruce.ashfield@gmail.com>"
@@ -403,17 +413,18 @@ RECIPE_MAINTAINER:pn-liburcu = "Wang Mingyu <wangmy@fujitsu.com>"
RECIPE_MAINTAINER:pn-liburi-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libusb1 = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libubootenv = "Stefano Babic <sbabic@denx.de>"
-RECIPE_MAINTAINER:pn-libuv = "Armin Kuster <akuster@mvista.com>"
+RECIPE_MAINTAINER:pn-libuv = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libva = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libva-initial = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-libva-utils = "Anuj Mittal <anuj.mittal@intel.com>"
-RECIPE_MAINTAINER:pn-libvorbis = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-libvorbis = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-libwebp = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libwpe = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-libx11 = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-libx11-compose-data = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxau = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxcb = "Unassigned <unassigned@yoctoproject.org>"
-RECIPE_MAINTAINER:pn-libxcvt = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-libxcvt = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxcomposite = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxcursor = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxcrypt = "Khem Raj <raj.khem@gmail.com>"
@@ -436,6 +447,7 @@ RECIPE_MAINTAINER:pn-libxml-sax-base-perl = "Tim Orling <tim.orling@konsulko.com
RECIPE_MAINTAINER:pn-libxml-sax-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libxml-simple-perl = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-libxml2 = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER:pn-libxmlb = "Markus Volk <f_l_k@t-online.de>"
RECIPE_MAINTAINER:pn-libxmu = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxpm = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-libxrandr = "Unassigned <unassigned@yoctoproject.org>"
@@ -472,7 +484,8 @@ RECIPE_MAINTAINER:pn-lua = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-lz4 = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-lzo = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-lzip = "Denys Dmytriyenko <denis@denix.org>"
-RECIPE_MAINTAINER:pn-lzop = "Denys Dmytriyenko <denis@denix.org>"
+RECIPE_MAINTAINER:pn-lzlib = "Denys Dmytriyenko <denis@denix.org>"
+RECIPE_MAINTAINER:pn-lzop = "Marek Vasut <marex@denx.de>"
RECIPE_MAINTAINER:pn-m4 = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-m4-native = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-make = "Robert Yang <liezhi.yang@windriver.com>"
@@ -519,6 +532,7 @@ RECIPE_MAINTAINER:pn-mtd-utils = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-mtdev = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-mtools = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-musl = "Khem Raj <raj.khem@gmail.com>"
+RECIPE_MAINTAINER:pn-musl-legacy-error = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-musl-locales = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-musl-obstack = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-musl-utils = "Khem Raj <raj.khem@gmail.com>"
@@ -529,7 +543,7 @@ RECIPE_MAINTAINER:pn-nativesdk-libtool = "Richard Purdie <richard.purdie@linuxfo
RECIPE_MAINTAINER:pn-nativesdk-packagegroup-sdk-host = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-nativesdk-qemu-helper = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-nativesdk-sdk-provides-dummy = "Richard Purdie <richard.purdie@linuxfoundation.org>"
-RECIPE_MAINTAINER:pn-newlib = "Alejandro Hernandez <aehs29@gmail.com>"
+RECIPE_MAINTAINER:pn-newlib = "Alejandro Hernandez <alejandro@enedino.org>"
RECIPE_MAINTAINER:pn-ncurses = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER:pn-neard = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-net-tools = "Unassigned <unassigned@yoctoproject.org>"
@@ -543,17 +557,17 @@ RECIPE_MAINTAINER:pn-npth = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-nss-myhostname = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-numactl = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-ofono = "Ross Burton <ross.burton@arm.com>"
-RECIPE_MAINTAINER:pn-opensbi = "Alistair Francis <alistair.francis@wdc.com>"
+RECIPE_MAINTAINER:pn-opensbi = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-openssh = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-openssl = "Alexander Kanavin <alex.kanavin@gmail.com>"
-RECIPE_MAINTAINER:pn-opkg = "Alejandro del Castillo <alejandro.delcastillo@ni.com>"
-RECIPE_MAINTAINER:pn-opkg-arch-config = "Alejandro del Castillo <alejandro.delcastillo@ni.com>"
-RECIPE_MAINTAINER:pn-opkg-keyrings = "Alejandro del Castillo <alejandro.delcastillo@ni.com>"
-RECIPE_MAINTAINER:pn-opkg-utils = "Alejandro del Castillo <alejandro.delcastillo@ni.com>"
+RECIPE_MAINTAINER:pn-opkg = "Alex Stewart <alex.stewart@ni.com>"
+RECIPE_MAINTAINER:pn-opkg-arch-config = "Alex Stewart <alex.stewart@ni.com>"
+RECIPE_MAINTAINER:pn-opkg-keyrings = "Alex Stewart <alex.stewart@ni.com>"
+RECIPE_MAINTAINER:pn-opkg-utils = "Alex Stewart <alex.stewart@ni.com>"
RECIPE_MAINTAINER:pn-orc = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-os-release = "Ross Burton <ross.burton@arm.com>"
-RECIPE_MAINTAINER:pn-ovmf = "Ricardo Neri <ricardo.neri-calderon@linux.intel.com>"
-RECIPE_MAINTAINER:pn-ovmf-shell-image = "Ricardo Neri <ricardo.neri-calderon@linux.intel.com>"
+RECIPE_MAINTAINER:pn-ovmf = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-ovmf-shell-image = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-p11-kit = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-package-index = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-pango = "Ross Burton <ross.burton@arm.com>"
@@ -587,55 +601,72 @@ RECIPE_MAINTAINER:pn-ptest-runner = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-pulseaudio = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-pulseaudio-client-conf-sato = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-puzzles = "Anuj Mittal <anuj.mittal@intel.com>"
-RECIPE_MAINTAINER:pn-python3 = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3 = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-alabaster = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-async = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
RECIPE_MAINTAINER:pn-python3-asn1crypto = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-atomicwrites = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-attrs = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-babel = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-bcrypt = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-beartype = "Marta Rybczynska <mrybczynska@syslinbit.com>"
+RECIPE_MAINTAINER:pn-python3-booleanpy = "zhengrq.fnst <zhengrq.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-build = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-python3-calver = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-certifi = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-cffi = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-chardet = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-click = "Wang Mingyu <wangmy@fujitsu.com>"
RECIPE_MAINTAINER:pn-python3-cryptography = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-cryptography-vectors = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-cython = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
-RECIPE_MAINTAINER:pn-python3-dbus = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
-RECIPE_MAINTAINER:pn-python3-dbusmock = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
-RECIPE_MAINTAINER:pn-python3-docutils = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-cython = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-dbus = "Zang Ruochen <zangruochen@loongson.cn>"
+RECIPE_MAINTAINER:pn-python3-dbusmock = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-docutils = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-dtc = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-dtschema = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-dtschema-wrapper = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-editables = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-python3-pycryptodome = "Joshua Watt <JPEWhacker@gmail.com>"
RECIPE_MAINTAINER:pn-python3-pycryptodomex = "Joshua Watt <JPEWhacker@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-pyproject-metadata = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pyrsistent = "Bruce Ashfield <bruce.ashfield@gmail.com>"
-RECIPE_MAINTAINER:pn-python3-extras = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-extras = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-flit-core = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-git = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
-RECIPE_MAINTAINER:pn-python3-gitdb = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-git = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-gitdb = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-hatchling = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-python3-hatch-fancy-pypi-readme = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-python3-hatch-vcs = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-python3-hypothesis = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-idna = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-imagesize = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-importlib-metadata = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-iniconfig = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-iniparse = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-iniparse = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-iso8601 = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-isodate = "Leon Anavi <leon.anavi@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-installer = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-python3-jinja2 = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-python3-jsonpointer = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-jsonschema = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-jsonschema-specifications = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-license-expression = "Wang Mingyu <wangmy@fujitsu.com>"
RECIPE_MAINTAINER:pn-python3-libarchive-c = "Joshua Watt <JPEWhacker@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-lxml = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-python3-magic = "Joshua Watt <JPEWhacker@gmail.com>"
-RECIPE_MAINTAINER:pn-python3-mako = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-mako = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-markdown = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-python3-markupsafe = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER:pn-python3-maturin = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-meson-python = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-more-itertools = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-ndg-httpsclient = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-numpy = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-numpy = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-packaging = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pathlib2 = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-pbr = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
-RECIPE_MAINTAINER:pn-python3-pip = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-pathspec = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-python3-pbr = "Zang Ruochen <zangruochen@loongson.cn>"
+RECIPE_MAINTAINER:pn-python3-pip = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-python3-pluggy = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-ply = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-poetry-core = "Tim Orling <tim.orling@konsulko.com>"
@@ -643,54 +674,65 @@ RECIPE_MAINTAINER:pn-python3-pretend = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-psutil = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-py = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pyasn1 = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-pycairo = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-pycairo = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-python3-pycparser = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pyelftools = "Joshua Watt <JPEWhacker@gmail.com>"
-RECIPE_MAINTAINER:pn-python3-pygments = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
-RECIPE_MAINTAINER:pn-python3-pygobject = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-pygments = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-pygobject = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-python3-pyopenssl = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-pyparsing = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-pyparsing = "Trevor Gamblin <tgamblin@baylibre.com>"
+RECIPE_MAINTAINER:pn-python3-pyproject-hooks = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-python3-pysocks = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pytest = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pytest-runner = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pytest-subtests = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pytz = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-pyyaml = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-rdflib = "Wang Mingyu <wangmy@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-referencing = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-requests = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-rfc3339-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-rfc3986-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-rfc3987 = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-rpds-py = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-ruamel-yaml = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-scons = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-semantic-version = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-setuptools = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-setuptools = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-python3-setuptools-rust = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-setuptools-scm = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-six = "Zang Ruochen <zangrc.fnst@fujitsu.com>"
+RECIPE_MAINTAINER:pn-python3-six = "Zang Ruochen <zangruochen@loongson.cn>"
RECIPE_MAINTAINER:pn-python3-smartypants = "Alexander Kanavin <alex.kanavin@gmail.com>"
-RECIPE_MAINTAINER:pn-python3-smmap = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-smmap = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-python3-snowballstemmer = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-sortedcontainers = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-spdx-tools = "Marta Rybczynska <mrybczynska@syslinbit.com>"
RECIPE_MAINTAINER:pn-python3-sphinx = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-sphinxcontrib-qthelp = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-sphinxcontrib-devhelp = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-sphinxcontrib-htmlhelp = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-sphinxcontrib-serializinghtml = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-sphinxcontrib-jsmath = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-sphinxcontrib-jquery = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-sphinxcontrib-qthelp = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-sphinxcontrib-serializinghtml = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-sphinx-rtd-theme = "Tim Orling <tim.orling@konsulko.com>"
-RECIPE_MAINTAINER:pn-python3-strict-rfc3339 = "Bruce Ashfield <bruce.ashfield@gmail.com>"
-RECIPE_MAINTAINER:pn-python3-subunit = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
-RECIPE_MAINTAINER:pn-python3-testtools = "Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>"
+RECIPE_MAINTAINER:pn-python3-subunit = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-python3-testtools = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-python3-toml = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-tomli = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-trove-classifiers = "Trevor Gamblin <tgamblin@baylibre.com>"
RECIPE_MAINTAINER:pn-python3-typing-extensions = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-typogrify = "Alexander Kanavin <alex.kanavin@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-unittest-automake-output = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-python3-uritools = "Marta Rybczynska <mrybczynska@syslinbit.com>"
RECIPE_MAINTAINER:pn-python3-urllib3 = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-vcversioner = "Bruce Ashfield <bruce.ashfield@gmail.com>"
RECIPE_MAINTAINER:pn-python3-wcwidth = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-webcolors = "Bruce Ashfield <bruce.ashfield@gmail.com>"
+RECIPE_MAINTAINER:pn-python3-websockets = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-wheel = "Tim Orling <tim.orling@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-xmltodict = "Leon Anavi <leon.anavi@konsulko.com>"
+RECIPE_MAINTAINER:pn-python3-yamllint = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-python3-zipp = "Tim Orling <tim.orling@konsulko.com>"
RECIPE_MAINTAINER:pn-qemu = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-qemu-helper-native = "Richard Purdie <richard.purdie@linuxfoundation.org>"
@@ -702,7 +744,7 @@ RECIPE_MAINTAINER:pn-quilt-native = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-quota = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-re2c = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER:pn-readline = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER:pn-repo = "Jasper Orschulko <Jasper.Orschulko@iris-sensing.com>"
+RECIPE_MAINTAINER:pn-repo = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-resolvconf = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-rgb = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-rpcbind = "Hongxu Jia <hongxu.jia@windriver.com>"
@@ -714,14 +756,11 @@ RECIPE_MAINTAINER:pn-rt-tests = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-ruby = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-run-postinsts = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-rust = "Randy MacLeod <Randy.MacLeod@windriver.com>"
-RECIPE_MAINTAINER:pn-rust-cross-${TUNE_PKGARCH}-${TCLIBC} = "Randy MacLeod <Randy.MacLeod@windriver.com>"
-RECIPE_MAINTAINER:pn-rust-crosssdk-${SDK_ARCH}-glibc = "Randy MacLeod <Randy.MacLeod@windriver.com>"
RECIPE_MAINTAINER:pn-rust-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Randy MacLeod <Randy.MacLeod@windriver.com>"
-RECIPE_MAINTAINER:pn-rust-hello-world = "Randy MacLeod <Randy.MacLeod@windriver.com>"
RECIPE_MAINTAINER:pn-rust-llvm = "Randy MacLeod <Randy.MacLeod@windriver.com>"
-RECIPE_MAINTAINER:pn-rust-tools-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Randy MacLeod <Randy.MacLeod@windriver.com>"
RECIPE_MAINTAINER:pn-rxvt-unicode = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-sato-screenshot = "Ross Burton <ross.burton@arm.com>"
+RECIPE_MAINTAINER:pn-sato-icon-theme = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-sbc = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-screen = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-seatd = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -759,17 +798,19 @@ RECIPE_MAINTAINER:pn-systemd = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-boot = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-bootchart = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-bootconf = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER:pn-systemd-boot-native = "Viswanath Kraleti <quic_vkraleti@quicinc.com>"
RECIPE_MAINTAINER:pn-systemd-conf = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-compat-units = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER:pn-systemd-machine-units = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-serialgetty = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemd-systemctl-native = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-systemtap = "Victor Kamensky <victor.kamensky7@gmail.com>"
RECIPE_MAINTAINER:pn-systemtap-native = "Victor Kamensky <victor.kamensky7@gmail.com>"
-RECIPE_MAINTAINER:pn-systemtap-uprobes = "Victor Kamensky <victor.kamensky7@gmail.com>"
RECIPE_MAINTAINER:pn-sysvinit = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-sysvinit-inittab = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-taglib = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-tar = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER:pn-target-sdk-provides-dummy = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER:pn-tcf-agent = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-tcl = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-tcp-wrappers = "Robert Yang <liezhi.yang@windriver.com>"
@@ -779,6 +820,7 @@ RECIPE_MAINTAINER:pn-texinfo-dummy-native = "Anuj Mittal <anuj.mittal@intel.com>
RECIPE_MAINTAINER:pn-tiff = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-time = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER:pn-ttf-bitstream-vera = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-ttyrun = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-tzcode-native = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-tzdata = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-u-boot = "Marek Vasut <marek.vasut@gmail.com>"
@@ -794,6 +836,7 @@ RECIPE_MAINTAINER:pn-usbutils = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-util-linux = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-util-linux-libuuid = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER:pn-util-macros = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-utfcpp = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-v86d = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-vala = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-valgrind = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -806,6 +849,9 @@ RECIPE_MAINTAINER:pn-vulkan-headers = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-vulkan-loader = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-vulkan-samples = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-vulkan-tools = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-vulkan-utility-libraries = "Anuj Mittal <anuj.mittal@intel.com>"
+RECIPE_MAINTAINER:pn-vulkan-validation-layers = "Vincent Davis Jr <vince@underview.tech>"
+RECIPE_MAINTAINER:pn-vulkan-volk = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-waffle = "Ross Burton <ross.burton@arm.com>"
RECIPE_MAINTAINER:pn-watchdog = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-watchdog-config = "Alexander Kanavin <alex.kanavin@gmail.com>"
@@ -818,7 +864,7 @@ RECIPE_MAINTAINER:pn-weston-init = "Denys Dmytriyenko <denis@denix.org>"
RECIPE_MAINTAINER:pn-wget = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER:pn-which = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-wic-tools = "Anuj Mittal <anuj.mittal@intel.com>"
-RECIPE_MAINTAINER:pn-wireless-regdb = "Adrian Bunk <bunk@kernel.org>"
+RECIPE_MAINTAINER:pn-wireless-regdb = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-wpa-supplicant = "Changhyeok Bae <changhyeok.bae@gmail.com>"
RECIPE_MAINTAINER:pn-wpebackend-fdo = "Alexander Kanavin <alex.kanavin@gmail.com>"
RECIPE_MAINTAINER:pn-x11perf = "Unassigned <unassigned@yoctoproject.org>"
@@ -826,6 +872,7 @@ RECIPE_MAINTAINER:pn-x264 = "Anuj Mittal <anuj.mittal@intel.com>"
RECIPE_MAINTAINER:pn-xauth = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xcb-proto = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xcb-util = "Unassigned <unassigned@yoctoproject.org>"
+RECIPE_MAINTAINER:pn-xcb-util-cursor = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xcb-util-image = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xcb-util-keysyms = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xcb-util-renderutil = "Unassigned <unassigned@yoctoproject.org>"
@@ -836,7 +883,6 @@ RECIPE_MAINTAINER:pn-xdpyinfo = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xev = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xeyes = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xf86-input-evdev = "Unassigned <unassigned@yoctoproject.org>"
-RECIPE_MAINTAINER:pn-xf86-input-keyboard = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xf86-input-libinput = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xf86-input-mouse = "Unassigned <unassigned@yoctoproject.org>"
RECIPE_MAINTAINER:pn-xf86-input-synaptics = "Unassigned <unassigned@yoctoproject.org>"
diff --git a/meta/conf/distro/include/no-gplv3.inc b/meta/conf/distro/include/no-gplv3.inc
new file mode 100644
index 0000000000..b3eb936d94
--- /dev/null
+++ b/meta/conf/distro/include/no-gplv3.inc
@@ -0,0 +1,30 @@
+#
+# This include file is to document commonly used configuration options to minimise
+# GPLv3 content in images.
+#
+# The intent is to allow some images/configurations to build, not everything. There
+# will be a cost in reduced functionality.
+
+# Settings for full-cmdline
+RDEPENDS:packagegroup-core-full-cmdline-utils:remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-shell mc-helpers mc-helpers-perl sed tar time"
+RDEPENDS:packagegroup-core-full-cmdline-dev-utils:remove = "diffutils m4 make patch"
+RDEPENDS:packagegroup-core-full-cmdline-multiuser:remove = "gzip"
+
+# Settings for weston
+# direct gpl3 dependencies
+RRECOMMENDS:packagegroup-base-vfat:remove = "dosfstools"
+PACKAGECONFIG:remove:pn-bluez5 = "readline"
+
+# dnf pulls in gpg which is gpl3; it also pulls in python3-rpm which pulls in rpm-build which pulls in bash
+# so install rpm but not dnf
+IMAGE_FEATURES:remove:pn-core-image-weston = "package-management"
+CORE_IMAGE_EXTRA_INSTALL:pn-core-image-weston += "rpm"
+IMAGE_FEATURES:remove:pn-core-image-full-cmdline = "package-management"
+CORE_IMAGE_EXTRA_INSTALL:pn-core-image-full-cmdline += "rpm"
+
+# matchbox-terminal depends on vte, which is gpl3
+CORE_IMAGE_BASE_INSTALL:remove:pn-core-image-weston = "matchbox-terminal"
+
+# Some python-tests use bash outside of ptest
+RDEPENDS:${PN}-tests:remove:class-target:pn-python3 = "${MLPREFIX}bash"
+INSANE_SKIP:${PN}-tests:pn-python3 = "file-rdeps"
diff --git a/meta/conf/distro/include/no-static-libs.inc b/meta/conf/distro/include/no-static-libs.inc
index ee67383460..75359928a1 100644
--- a/meta/conf/distro/include/no-static-libs.inc
+++ b/meta/conf/distro/include/no-static-libs.inc
@@ -18,6 +18,8 @@ DISABLE_STATIC:pn-nativesdk-openssl = ""
DISABLE_STATIC:pn-gcc-runtime = ""
# libusb1-native is used to build static dfu-util-native
DISABLE_STATIC:pn-libusb1-native = ""
+# needed by rust
+DISABLE_STATIC:pn-musl = ""
EXTRA_OECONF:append = "${DISABLE_STATIC}"
diff --git a/meta/conf/distro/include/ptest-packagelists.inc b/meta/conf/distro/include/ptest-packagelists.inc
index a1ead90649..5975db25cc 100644
--- a/meta/conf/distro/include/ptest-packagelists.inc
+++ b/meta/conf/distro/include/ptest-packagelists.inc
@@ -4,127 +4,152 @@
# ptests which take less than ~30s each
#
PTESTS_FAST = "\
- acl-ptest \
- apr-ptest \
- apr-util-ptest \
- attr-ptest \
- bluez5-ptest \
- bzip2-ptest \
- diffstat-ptest \
- diffutils-ptest \
- elfutils-ptest \
- ethtool-ptest \
- expat-ptest \
- flex-ptest \
- gawk-ptest \
- gdbm-ptest \
- gdk-pixbuf-ptest \
- gettext-ptest \
- glib-networking-ptest \
- gzip-ptest \
- json-glib-ptest \
- libconvert-asn1-perl-ptest \
- liberror-perl-ptest \
- libgpg-error-ptest\
- libnl-ptest \
- libmodule-build-perl-ptest \
- libpcre-ptest \
- libssh2-ptest \
- libtimedate-perl-ptest \
- libtest-needs-perl-ptest \
- liburi-perl-ptest \
- libusb1-ptest \
- libxml-namespacesupport-perl-ptest \
- libxml-perl-ptest \
- libxml-parser-perl-ptest \
- libxml-sax-perl-ptest \
- libxml-sax-base-perl-ptest \
- libxml-simple-perl-ptest \
- libxml2-ptest \
- lua-ptest \
- lzo-ptest \
- m4-ptest \
- nettle-ptest \
- openssl-ptest \
- opkg-ptest \
- pango-ptest \
- parted-ptest \
- popt-ptest \
- python3-atomicwrites-ptest \
- python3-bcrypt-ptest \
- python3-hypothesis-ptest \
- python3-jinja2-ptest \
- python3-jsonpointer-ptest \
- python3-markupsafe-ptest \
- python3-more-itertools-ptest \
- python3-pluggy-ptest \
- python3-pyasn1-ptest \
- python3-pytz-ptest \
- python3-wcwidth-ptest \
- python3-webcolors-ptest \
- qemu-ptest \
- quilt-ptest \
- sed-ptest \
- slang-ptest \
- wayland-ptest \
- zlib-ptest \
+ acl \
+ apr-util \
+ attr \
+ babeltrace \
+ babeltrace2 \
+ bc \
+ bluez5 \
+ busybox \
+ cpio \
+ diffstat \
+ diffutils \
+ ethtool \
+ expat \
+ expect \
+ findutils \
+ flex \
+ gawk \
+ gdbm \
+ gdk-pixbuf \
+ glib-networking \
+ gzip \
+ json-c \
+ json-glib \
+ libconvert-asn1-perl \
+ libgpg-error\
+ libnl \
+ libpcre \
+ libssh2 \
+ libtimedate-perl \
+ libtest-fatal-perl \
+ libtest-needs-perl \
+ libtest-warnings-perl \
+ libtry-tiny-perl \
+ liburi-perl \
+ libusb1 \
+ libxml-namespacesupport-perl \
+ libxml-perl \
+ libxml-parser-perl \
+ libxml-sax-perl \
+ libxml-sax-base-perl \
+ libxml-simple-perl \
+ libxml2 \
+ libxmlb \
+ logrotate \
+ lua \
+ lzo \
+ m4 \
+ nettle \
+ opkg \
+ pango \
+ popt \
+ python3-atomicwrites \
+ python3-attrs \
+ python3-bcrypt \
+ python3-calver \
+ python3-hypothesis \
+ python3-jinja2 \
+ python3-jsonpointer \
+ python3-license-expression \
+ python3-markupsafe \
+ python3-more-itertools \
+ python3-pluggy \
+ python3-pyasn1 \
+ python3-pytz \
+ python3-pyyaml \
+ python3-trove-classifiers \
+ python3-wcwidth \
+ python3-webcolors \
+ qemu \
+ quilt \
+ sed \
+ slang \
+ wayland \
+ xz \
+ zlib \
+ libexif \
"
-PTESTS_FAST:append:libc-glibc = " glibc-tests-ptest"
-PTESTS_PROBLEMS:remove:libc-glibc = "glibc-tests-ptest"
-PTESTS_FAST:remove:mips64 = "qemu-ptest"
-PTESTS_PROBLEMS:append:mips64 = " qemu-ptest"
-PTESTS_FAST:remove:riscv32 = "qemu-ptest"
-PTESTS_PROBLEMS:append:riscv32 = " qemu-ptest"
+PTESTS_FAST:append:libc-glibc = " glibc-y2038-tests"
+PTESTS_PROBLEMS:remove:libc-glibc = "glibc-y2038-tests"
+PTESTS_FAST:remove:mips64 = "qemu"
+PTESTS_PROBLEMS:append:mips64 = " qemu"
+PTESTS_FAST:remove:riscv32 = "qemu"
+PTESTS_PROBLEMS:append:riscv32 = " qemu"
PTESTS_SLOW = "\
- babeltrace-ptest \
- babeltrace2-ptest \
- busybox-ptest \
- coreutils-ptest \
- dbus-ptest \
- e2fsprogs-ptest \
- findutils-ptest \
- glib-2.0-ptest \
- gstreamer1.0-ptest \
- libevent-ptest \
- libgcrypt-ptest \
- lttng-tools-ptest \
- openssh-ptest \
- openssl-ptest \
- perl-ptest \
- python3-cryptography-ptest \
- python3-ptest \
- strace-ptest \
- tcl-ptest \
- util-linux-ptest \
- valgrind-ptest \
+ apr \
+ bzip2 \
+ coreutils \
+ curl \
+ dbus \
+ e2fsprogs \
+ elfutils \
+ gettext \
+ glib-2.0 \
+ gnutls \
+ gstreamer1.0 \
+ less \
+ libevent \
+ libgcrypt \
+ libmodule-build-perl \
+ lttng-tools \
+ openssh \
+ openssl \
+ parted \
+ perl \
+ python3-cryptography \
+ python3 \
+ python3-click \
+ python3-xmltodict \
+ strace \
+ tar \
+ tcl \
+ util-linux \
+ valgrind \
+ lz4 \
+ libseccomp \
"
-PTESTS_SLOW:remove:riscv64 = "valgrind-ptest"
-PTESTS_PROBLEMS:append:riscv64 = "valgrind-ptest"
+# python3 ptests hang on qemuriscv64
+PTESTS_SLOW:remove:riscv64 = "valgrind python3"
+PTESTS_PROBLEMS:append:riscv64 = " valgrind python3"
+PTESTS_SLOW:remove:riscv32 = "lttng-tools strace valgrind"
+PTESTS_PROBLEMS:append:riscv32 = " lttng-tools strace valgrind"
+PTESTS_SLOW:append:libc-musl = " libc-test"
+PTESTS_SLOW:remove:x86 = "valgrind"
+PTESTS_PROBLEMS:append:x86 = " valgrind"
-# ruby-ptest \ # Timeout
-# lz4-ptest \ # Needs a rewrite
-# rt-tests-ptest \ # Needs to be checked whether it runs at all
-# bash-ptest \ # Test outcomes are non-deterministic by design
-# ifupdown-ptest \ # Tested separately in lib/oeqa/selftest/cases/imagefeatures.py
-# mdadm-ptest \ # Tests rely on non-deterministic sleep() amounts
-# libinput-ptest \ # Tests need an unloaded system to be reliable
-# libpam-ptest \ # Needs pam DISTRO_FEATURE
-# numactl-ptest \ # qemu not (yet) configured for numa; all tests are skipped
-# libseccomp-ptest \ # tests failed: 38; add to slow tests once addressed
-# python3-numpy-ptest \ # requires even more RAM and (possibly) disk space; multiple failures
+# ruby \ # Timeout
+# rt-tests \ # Needs to be checked whether it runs at all
+# bash \ # Test outcomes are non-deterministic by design
+# ifupdown \ # Tested separately in lib/oeqa/selftest/cases/imagefeatures.py
+# libinput \ # Tests need an unloaded system to be reliable
+# libpam \ # Needs pam DISTRO_FEATURE
+# mdadm \ # tests are flaky in AB.
+# numactl \ # qemu not (yet) configured for numa; all tests are skipped
+# python3-numpy \ # requires even more RAM and (possibly) disk space; multiple failures
PTESTS_PROBLEMS = "\
- ruby-ptest \
- lz4-ptest \
- rt-tests-ptest \
- bash-ptest \
- ifupdown-ptest \
- mdadm-ptest \
- libinput-ptest \
- libpam-ptest \
- libseccomp-ptest \
- numactl-ptest \
- python3-numpy-ptest \
+ ruby \
+ rt-tests \
+ bash \
+ ifupdown \
+ libinput \
+ libpam \
+ mdadm \
+ numactl \
+ python3-license-expression \
+ python3-numpy \
"
diff --git a/meta/conf/distro/include/security_flags.inc b/meta/conf/distro/include/security_flags.inc
index 8374cb8544..d97a6edb0f 100644
--- a/meta/conf/distro/include/security_flags.inc
+++ b/meta/conf/distro/include/security_flags.inc
@@ -61,6 +61,7 @@ TARGET_LDFLAGS:append:class-cross-canadian = " ${SECURITY_LDFLAGS}"
SECURITY_STACK_PROTECTOR:pn-gcc-runtime = ""
SECURITY_STACK_PROTECTOR:pn-glibc = ""
SECURITY_STACK_PROTECTOR:pn-glibc-testsuite = ""
+SECURITY_STACK_PROTECTOR:pn-ltp = ""
# All xorg module drivers need to be linked this way as well and are
# handled in recipes-graphics/xorg-driver/xorg-driver-common.inc
SECURITY_LDFLAGS:pn-xserver-xorg = "${SECURITY_X_LDFLAGS}"
@@ -68,4 +69,3 @@ SECURITY_LDFLAGS:pn-xserver-xorg = "${SECURITY_X_LDFLAGS}"
TARGET_CC_ARCH:append:pn-binutils = " ${SELECTED_OPTIMIZATION}"
TARGET_CC_ARCH:append:pn-gcc = " ${SELECTED_OPTIMIZATION}"
TARGET_CC_ARCH:append:pn-gdb = " ${SELECTED_OPTIMIZATION}"
-TARGET_CC_ARCH:append:pn-perf = " ${SELECTED_OPTIMIZATION}"
diff --git a/meta/conf/distro/include/tcmode-default.inc b/meta/conf/distro/include/tcmode-default.inc
index 1179ba172f..643394f3eb 100644
--- a/meta/conf/distro/include/tcmode-default.inc
+++ b/meta/conf/distro/include/tcmode-default.inc
@@ -8,7 +8,7 @@ PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++ = "gcc-cross-${TARGET_ARCH}"
PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs = "gcc-runtime"
PREFERRED_PROVIDER_gdb = "gdb"
-PREFERRED_PROVIDER_virtual/${SDK_PREFIX}binutils-crosssdk ?= "binutils-crosssdk-${SDK_SYS}"
+PREFERRED_PROVIDER_virtual/${SDK_PREFIX}binutils ?= "binutils-crosssdk-${SDK_SYS}"
PREFERRED_PROVIDER_virtual/${SDK_PREFIX}gcc = "gcc-crosssdk-${SDK_SYS}"
PREFERRED_PROVIDER_virtual/${SDK_PREFIX}g++ = "gcc-crosssdk-${SDK_SYS}"
PREFERRED_PROVIDER_virtual/${SDK_PREFIX}compilerlibs = "nativesdk-gcc-runtime"
@@ -16,18 +16,15 @@ PREFERRED_PROVIDER_virtual/${SDK_PREFIX}compilerlibs = "nativesdk-gcc-runtime"
# Default libc config
PREFERRED_PROVIDER_virtual/gettext ??= "gettext"
-GCCVERSION ?= "12.%"
+GCCVERSION ?= "13.%"
SDKGCCVERSION ?= "${GCCVERSION}"
-BINUVERSION ?= "2.38%"
-GDBVERSION ?= "12.%"
-GLIBCVERSION ?= "2.35"
-LINUXLIBCVERSION ?= "5.16%"
-QEMUVERSION ?= "7.0%"
-GOVERSION ?= "1.18%"
-# This can not use wildcards like 8.0.% since it is also used in mesa to denote
-# llvm version being used, so always bump it with llvm recipe version bump
-LLVMVERSION ?= "14.0.3"
-RUSTVERSION ?= "1.60%"
+BINUVERSION ?= "2.42%"
+GDBVERSION ?= "14.%"
+GLIBCVERSION ?= "2.39%"
+LINUXLIBCVERSION ?= "6.6%"
+QEMUVERSION ?= "8.2%"
+GOVERSION ?= "1.22%"
+RUSTVERSION ?= "1.75%"
PREFERRED_VERSION_gcc ?= "${GCCVERSION}"
PREFERRED_VERSION_gcc-cross-${TARGET_ARCH} ?= "${GCCVERSION}"
@@ -79,10 +76,6 @@ PREFERRED_VERSION_go-runtime ?= "${GOVERSION}"
PREFERRED_VERSION_nativesdk-go ?= "${GOVERSION}"
PREFERRED_VERSION_nativesdk-go-runtime ?= "${GOVERSION}"
-PREFERRED_VERSION_llvm = "${LLVMVERSION}"
-PREFERRED_VERSION_llvm-native = "${LLVMVERSION}"
-PREFERRED_VERSION_nativesdk-llvm = "${LLVMVERSION}"
-
# Rust toolchain preferred versions:
PREFERRED_VERSION_cargo ?= "${RUSTVERSION}"
diff --git a/meta/conf/distro/include/time64.inc b/meta/conf/distro/include/time64.inc
new file mode 100644
index 0000000000..2e85753e55
--- /dev/null
+++ b/meta/conf/distro/include/time64.inc
@@ -0,0 +1,49 @@
+# To simulate Y2038 occurring in qemu, add to your build configuration:
+# QB_OPT_APPEND:append = " -rtc base=2040-02-02"
+#
+# Note that this does result in ptest failures on qemux86:
+# perl python3 dbus openssl glibc-tests openssh curl glib-2.0 tcl libmodule-build-perl
+# and a subset of those occurs in qemux86-64 as well:
+# curl python3 openssl openssl tcl python3-cryptography
+#
+# Working to address those (before Y2038 rolls in) will be appreciated.
+
+GLIBC_64BIT_TIME_FLAGS = " -D_TIME_BITS=64 -D_FILE_OFFSET_BITS=64"
+
+# Only needed for some 32-bit architectures, some relatively newer
+# architectures do not need it ( e.g. riscv32 )
+TARGET_CC_ARCH:append:arm = "${GLIBC_64BIT_TIME_FLAGS}"
+TARGET_CC_ARCH:append:armeb = "${GLIBC_64BIT_TIME_FLAGS}"
+TARGET_CC_ARCH:append:mipsarcho32 = "${GLIBC_64BIT_TIME_FLAGS}"
+TARGET_CC_ARCH:append:powerpc = "${@bb.utils.contains('TUNE_FEATURES', 'm32', '${GLIBC_64BIT_TIME_FLAGS}', '', d)}"
+TARGET_CC_ARCH:append:x86 = "${@bb.utils.contains('TUNE_FEATURES', 'm32', '${GLIBC_64BIT_TIME_FLAGS}', '', d)}"
+
+GLIBC_64BIT_TIME_FLAGS:pn-glibc = ""
+GLIBC_64BIT_TIME_FLAGS:pn-glibc-y2038-tests = ""
+GLIBC_64BIT_TIME_FLAGS:pn-glibc-testsuite = ""
+# pipewire-v4l2 explicitly sets _FILE_OFFSET_BITS=32 to get access to
+# both 32 and 64 bit file APIs. But it does not handle the time side?
+# Needs further investigation
+GLIBC_64BIT_TIME_FLAGS:pn-pipewire = ""
+# Pulseaudio override certain LFS64 functions e.g. open64 and intentionally
+# undefines _FILE_OFFSET_BITS, which wont work when _TIME_BITS=64 is set
+# See https://gitlab.freedesktop.org/pulseaudio/pulseaudio/-/issues/3770
+GLIBC_64BIT_TIME_FLAGS:pn-pulseaudio = ""
+# Undefines _FILE_OFFSET_BITS on purpose in
+# libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp
+GLIBC_64BIT_TIME_FLAGS:pn-gcc-sanitizers = ""
+
+# Caused by the flags exceptions above
+INSANE_SKIP:append:pn-gcc-sanitizers = " 32bit-time"
+INSANE_SKIP:append:pn-glibc = " 32bit-time"
+INSANE_SKIP:append:pn-glibc-y2038-tests = " 32bit-time"
+INSANE_SKIP:append:pn-pulseaudio = " 32bit-time"
+
+# Strace has tests that call 32 bit API directly, which is fair enough, e.g.
+# /usr/lib/strace/ptest/tests/ioctl_termios uses 32-bit api 'ioctl'
+INSANE_SKIP:append:pn-strace = " 32bit-time"
+
+# Additionally cargo_common class (i.e. everything written in rust)
+# has the same INSANE_SKIP setting.
+# Please check the comment in meta/classes-recipe/cargo_common.bbclass
+# for information about why, and the overall Y2038 situation in rust.
diff --git a/meta/conf/distro/include/yocto-uninative.inc b/meta/conf/distro/include/yocto-uninative.inc
index 411fe45a24..4ac66fd506 100644
--- a/meta/conf/distro/include/yocto-uninative.inc
+++ b/meta/conf/distro/include/yocto-uninative.inc
@@ -6,10 +6,10 @@
# to the distro running on the build machine.
#
-UNINATIVE_MAXGLIBCVERSION = "2.35"
-UNINATIVE_VERSION = "3.6"
+UNINATIVE_MAXGLIBCVERSION = "2.39"
+UNINATIVE_VERSION = "4.4"
UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/${UNINATIVE_VERSION}/"
-UNINATIVE_CHECKSUM[aarch64] ?= "d64831cf2792c8e470c2e42230660e1a8e5de56a579cdd59978791f663c2f3ed"
-UNINATIVE_CHECKSUM[i686] ?= "2f0ee9b66b1bb2c85e2b592fb3c9c7f5d77399fa638d74961330cdb8de34ca3b"
-UNINATIVE_CHECKSUM[x86_64] ?= "9bfc4c970495b3716b2f9e52c4df9f968c02463a9a95000f6657fbc3fde1f098"
+UNINATIVE_CHECKSUM[aarch64] ?= "b61876130f494f75092f21086b4a64ea5fb064045769bf1d32e9cb6af17ea8ec"
+UNINATIVE_CHECKSUM[i686] ?= "9f28627828f0082cc0344eede4d9a861a9a064bfa8f36e072e46212f0fe45fcc"
+UNINATIVE_CHECKSUM[x86_64] ?= "d81c54284be2bb886931fc87281d58177a2cd381cf99d1981f8923039a72a302"
diff --git a/meta/conf/documentation.conf b/meta/conf/documentation.conf
index ab2addb321..b0591881ba 100644
--- a/meta/conf/documentation.conf
+++ b/meta/conf/documentation.conf
@@ -28,7 +28,7 @@ do_kernel_configcheck[doc] = "Validates the kernel configuration for a linux-yoc
do_kernel_configme[doc] = "Assembles the kernel configuration for a linux-yocto style kernel"
do_kernel_link_images[doc] = "Creates a symbolic link in arch/$arch/boot for vmlinux and vmlinuz kernel images"
do_listtasks[doc] = "Lists all defined tasks for a target"
-do_menuconfig[doc] = "Runs 'make menuconfig' for the kernel"
+do_menuconfig[doc] = "Runs 'make menuconfig' in the compilation directory"
do_package[doc] = "Analyzes the content of the holding area and splits it into subsets based on available packages and files"
do_package_index[doc] = "Creates or updates the index in the Package Feed area"
do_package_qa[doc] = "Runs QA checks on packaged files"
@@ -106,6 +106,7 @@ BUSYBOX_SPLIT_SUID[doc] = "For the BusyBox recipe, specifies whether to split th
#C
CACHE[doc] = "The directory holding the cache of the metadata."
+CCACHE_NATIVE_RECIPES_ALLOWED[doc] = "A list of native recipes to use Ccache, this variable is useful to bypass a circular dependency between ccache-native and cmake-native that inhibits to use Ccache in other native recipes."
CFLAGS[doc] = "Flags passed to the C compiler for the target system. This variable evaluates to the same as TARGET_CFLAGS."
CLASSOVERRIDE[doc] = "An internal variable specifying the special class override that should currently apply (e.g. "class-target", "class-native", and so forth)."
CLEANBROKEN[doc] = "Specifies if 'make clean' does not work for a recipe (and therefore the build system should not try to use it during do_configure)"
@@ -253,6 +254,7 @@ KERNEL_MODULE_PROBECONF[doc] = "Lists kernel modules for which the build system
KERNEL_PACKAGE_NAME[doc] = "Name prefix for kernel packages. Defaults to 'kernel'."
KERNEL_PATH[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
KERNEL_SRC[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
+KERNEL_LOCALVERSION[doc] = "Appends a string to the name of the local version of the kernel image."
KFEATURE_DESCRIPTION[doc] = "Provides a short description of a configuration fragment. You use this variable in the .scc file that describes a configuration fragment file."
KMACHINE[doc] = "The machine as known by the kernel."
KTYPE[doc] = "Defines the kernel type to be used in assembling the configuration."
@@ -269,6 +271,7 @@ LICENSE_PATH[doc] = "Path to additional licenses used during the build."
LINUX_KERNEL_TYPE[doc] = "Defines the kernel type to be used in assembling the configuration."
LINUX_VERSION[doc] = "The Linux version from kernel.org on which the Linux kernel image being built using the OpenEmbedded build system is based. You define this variable in the kernel recipe."
LINUX_VERSION_EXTENSION[doc] = "A string extension compiled into the version string of the Linux kernel built with the OpenEmbedded build system. You define this variable in the kernel recipe."
+LOCALE_PATHS[doc] = "Whitespace separated list of paths that are scanned to construct locale packages. The list already contains ${datadir}/locale by default. Note that all subdirectories in these paths are assumed to be locales."
LOCALE_UTF8_IS_DEFAULT[doc] = "If set, locale names are renamed such that those lacking an explicit encoding (e.g. en_US) will always be UTF-8, and non-UTF-8 encodings are renamed to, e.g., en_US.ISO-8859-1. Otherwise, the encoding is specified by glibc's SUPPORTED file. Not supported for precompiled locales."
LOG_DIR[doc] = "Specifies the directory to which the OpenEmbedded build system writes overall log files. The default directory is ${TMPDIR}/log"
@@ -376,9 +379,7 @@ SDKIMAGE_FEATURES[doc] = "Equivalent to IMAGE_FEATURES. However, this variable a
SDKMACHINE[doc] = "Specifies the architecture (i.e. i686 or x86_64) for which to build SDK and ADT items."
SECTION[doc] = "The section in which packages should be categorized. Package management utilities can make use of this variable."
SELECTED_OPTIMIZATION[doc] = "The variable takes the value of FULL_OPTIMIZATION unless DEBUG_BUILD = '1'. In this case, the value of DEBUG_OPTIMIZATION is used."
-SERIAL_CONSOLE[doc] = "The speed and device for the serial port used to attach the serial console. This variable is given to the kernel as the 'console' parameter. After booting occurs, getty is started on that port so remote login is possible."
SERIAL_CONSOLES[doc] = "Defines the serial consoles (TTYs) to enable using getty."
-SERIAL_CONSOLES_CHECK[doc] = "Similar to SERIAL_CONSOLES except the device is checked for existence before attempting to enable it. Supported only by SysVinit."
SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS[doc] = "A list of recipe dependencies that should not be used to determine signatures of tasks from one recipe when they depend on tasks from another recipe."
SIGGEN_EXCLUDERECIPES_ABISAFE[doc] = "A list of recipes that are completely stable and will never change."
SITEINFO_BITS[doc] = "Specifies the number of bits for the target system CPU."
@@ -393,7 +394,6 @@ SPECIAL_PKGSUFFIX[doc] = "A list of prefixes for PN used by the OpenEmbedded bui
SRC_URI[doc] = "The list of source files - local or remote. This variable tells the OpenEmbedded build system what bits to pull in for the build and how to pull them in."
SRC_URI_OVERRIDES_PACKAGE_ARCH[doc] = "By default, the OpenEmbedded build system automatically detects whether SRC_URI contains files that are machine-specific. If so, the build system automatically changes PACKAGE_ARCH. Setting this variable to '0' disables this behavior."
SRCDATE[doc] = "The date of the source code used to build the package. This variable applies only if the source was fetched from a Source Code Manager (SCM)."
-SRCPV[doc] = "Returns the version string of the current package. This string is used to help define the value of PV."
SRCREV[doc] = "The revision of the source code used to build the package. This variable applies to Subversion, Git, Mercurial and Bazaar only."
SSTATE_DIR[doc] = "The directory for the shared state cache."
SSTATE_MIRRORS[doc] = "Configures the OpenEmbedded build system to search other mirror locations for prebuilt cache data objects before building out the data. You can specify a filesystem directory or a remote URL such as HTTP or FTP."
diff --git a/meta/conf/image-uefi.conf b/meta/conf/image-uefi.conf
index 04a151f3a4..aec91bcfe2 100644
--- a/meta/conf/image-uefi.conf
+++ b/meta/conf/image-uefi.conf
@@ -1,12 +1,16 @@
# Location of EFI files inside EFI System Partition
EFIDIR ?= "/EFI/BOOT"
+# Location of UKI inside EFI System Partition
+EFI_UKI_DIR ?= "/EFI/Linux"
+
# Prefix where ESP is mounted inside rootfs. Set to empty if package is going
# to be installed to ESP directly
EFI_PREFIX ?= "/boot"
# Location inside rootfs.
EFI_FILES_PATH = "${EFI_PREFIX}${EFIDIR}"
+EFI_UKI_PATH = "${EFI_PREFIX}${EFI_UKI_DIR}"
# The EFI name for the architecture
EFI_ARCH ?= "INVALID"
@@ -16,6 +20,7 @@ EFI_ARCH:aarch64 = "aa64"
EFI_ARCH:arm = "arm"
EFI_ARCH:riscv32 = "riscv32"
EFI_ARCH:riscv64 = "riscv64"
+EFI_ARCH:loongarch64 = "loongarch64"
# Determine name of bootloader image
EFI_BOOT_IMAGE ?= "boot${EFI_ARCH}.efi"
diff --git a/meta/conf/layer.conf b/meta/conf/layer.conf
index 076cba58d1..f2bca0aa5b 100644
--- a/meta/conf/layer.conf
+++ b/meta/conf/layer.conf
@@ -7,12 +7,12 @@ BBFILE_COLLECTIONS += "core"
BBFILE_PATTERN_core = "^${LAYERDIR}/"
BBFILE_PRIORITY_core = "5"
-LAYERSERIES_CORENAMES = "kirkstone langdale"
+LAYERSERIES_CORENAMES = "scarthgap styhead"
# This should only be incremented on significant changes that will
# cause compatibility issues with other layers
-LAYERVERSION_core = "14"
-LAYERSERIES_COMPAT_core = "kirkstone langdale"
+LAYERVERSION_core = "15"
+LAYERSERIES_COMPAT_core = "styhead"
BBLAYERS_LAYERINDEX_NAME_core = "openembedded-core"
@@ -69,6 +69,7 @@ SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \
initramfs-module-install->grub \
initramfs-module-install->parted \
initramfs-module-install->util-linux \
+ initramfs-module-setup-live->udev-extraconf \
grub-efi->grub-bootconf \
liberation-fonts->fontconfig \
cantarell-fonts->fontconfig \
@@ -82,8 +83,11 @@ SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \
weston-init->kbd \
connman->xl2tpd \
lttng-tools->lttng-modules \
+ sato-icon-theme->gdk-pixbuf \
+ sato-icon-theme->gtk+3 \
adwaita-icon-theme->gdk-pixbuf \
adwaita-icon-theme->gtk+3 \
+ run-postinsts->util-linux \
"
# Avoid adding bison-native to the sysroot without a specific
@@ -123,3 +127,8 @@ SSTATE_EXCLUDEDEPS_SYSROOT += ".*->autoconf-archive-native"
# Avoid empty path entries
BITBAKEPATH := "${@os.path.dirname(bb.utils.which(d.getVar('PATH'),'bitbake'))}"
PATH := "${@'${BITBAKEPATH}:' if '${BITBAKEPATH}' != '' else ''}${HOSTTOOLS_DIR}"
+
+# Only OE-Core should set/change this
+BB_GLOBAL_PYMODULES = "os sys time"
+
+addpylib ${LAYERDIR}/lib oe
diff --git a/meta/conf/machine-sdk/aarch64.conf b/meta/conf/machine-sdk/aarch64.conf
index 4a285cf6cb..d2023e2174 100644
--- a/meta/conf/machine-sdk/aarch64.conf
+++ b/meta/conf/machine-sdk/aarch64.conf
@@ -1,2 +1,4 @@
SDK_ARCH = "aarch64"
ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/i586.conf b/meta/conf/machine-sdk/i586.conf
index 05e826965e..ccee3a4881 100644
--- a/meta/conf/machine-sdk/i586.conf
+++ b/meta/conf/machine-sdk/i586.conf
@@ -2,3 +2,4 @@ SDK_ARCH = "i586"
SDK_CC_ARCH = "-march=i586"
ABIEXTENSION:class-nativesdk = ""
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/i686.conf b/meta/conf/machine-sdk/i686.conf
index 22f580e70f..abb7a4bc1a 100644
--- a/meta/conf/machine-sdk/i686.conf
+++ b/meta/conf/machine-sdk/i686.conf
@@ -1,3 +1,5 @@
SDK_ARCH = "i686"
SDK_CC_ARCH = "-march=i686"
ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/loongarch64.conf b/meta/conf/machine-sdk/loongarch64.conf
new file mode 100644
index 0000000000..6bb4c7a4ea
--- /dev/null
+++ b/meta/conf/machine-sdk/loongarch64.conf
@@ -0,0 +1,4 @@
+SDK_ARCH = "loongarch64"
+ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/ppc64.conf b/meta/conf/machine-sdk/ppc64.conf
index b41325dfe7..8ba3fccc67 100644
--- a/meta/conf/machine-sdk/ppc64.conf
+++ b/meta/conf/machine-sdk/ppc64.conf
@@ -1,2 +1,4 @@
SDK_ARCH = "ppc64"
ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/ppc64le.conf b/meta/conf/machine-sdk/ppc64le.conf
index db1f863496..3ee12faf57 100644
--- a/meta/conf/machine-sdk/ppc64le.conf
+++ b/meta/conf/machine-sdk/ppc64le.conf
@@ -1,2 +1,4 @@
SDK_ARCH = "ppc64le"
ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/riscv64.conf b/meta/conf/machine-sdk/riscv64.conf
new file mode 100644
index 0000000000..a43fe433fe
--- /dev/null
+++ b/meta/conf/machine-sdk/riscv64.conf
@@ -0,0 +1,4 @@
+SDK_ARCH = "riscv64"
+ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine-sdk/x86_64.conf b/meta/conf/machine-sdk/x86_64.conf
index 2ab607e65c..357dab0b88 100644
--- a/meta/conf/machine-sdk/x86_64.conf
+++ b/meta/conf/machine-sdk/x86_64.conf
@@ -1,3 +1,6 @@
SDK_ARCH = "x86_64"
+SDK_CC_ARCH = "-march=x86-64"
ABIEXTENSION:class-crosssdk = ""
ABIEXTENSION:class-nativesdk = ""
+
+SDK_MACHINE_FEATURES = "qemu-usermode"
diff --git a/meta/conf/machine/include/arm/arch-arm64.inc b/meta/conf/machine/include/arm/arch-arm64.inc
index 0e2efb5a40..832d0000ac 100644
--- a/meta/conf/machine/include/arm/arch-arm64.inc
+++ b/meta/conf/machine/include/arm/arch-arm64.inc
@@ -37,3 +37,8 @@ TUNE_ARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_ARCH_64}',
TUNE_PKGARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_PKGARCH_64}', '${TUNE_PKGARCH_32}', d)}"
ABIEXTENSION = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${ABIEXTENSION_64}', '${ABIEXTENSION_32}', d)}"
TARGET_FPU = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TARGET_FPU_64}', '${TARGET_FPU_32}', d)}"
+
+# Emit branch protection (PAC/BTI) instructions. On hardware that doesn't
+# support these they're meaningless NOP instructions, so there's very little
+# reason not to.
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', ' -mbranch-protection=standard', '', d)}"
diff --git a/meta/conf/machine/include/arm/arch-armv8-1a.inc b/meta/conf/machine/include/arm/arch-armv8-1a.inc
new file mode 100644
index 0000000000..be8e814a39
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-armv8-1a.inc
@@ -0,0 +1,18 @@
+DEFAULTTUNE ?= "armv8-1a"
+
+TUNEVALID[armv8-1a] = "Enable instructions for ARMv8.1-a"
+TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-1a', ' -march=armv8.1-a', '', d)}"
+# TUNE crypto will be handled by arch-armv8a.inc below
+MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-1a', 'armv8-1a:', '', d)}"
+
+require conf/machine/include/arm/arch-armv8a.inc
+
+AVAILTUNES += "armv8-1a armv8-1a-crypto"
+ARMPKGARCH:tune-armv8-1a ?= "armv8-1a"
+ARMPKGARCH:tune-armv8-1a-crypto ?= "armv8-1a"
+TUNE_FEATURES:tune-armv8-1a = "aarch64 armv8-1a"
+TUNE_FEATURES:tune-armv8-1a-crypto = "${TUNE_FEATURES:tune-armv8-1a} crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv8-1a = "${PACKAGE_EXTRA_ARCHS:tune-armv8a} armv8-1a"
+PACKAGE_EXTRA_ARCHS:tune-armv8-1a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-1a} armv8-1a-crypto"
+BASE_LIB:tune-armv8-1a = "lib64"
+BASE_LIB:tune-armv8-1a-crypto = "lib64"
diff --git a/meta/conf/machine/include/arm/arch-armv8-2a.inc b/meta/conf/machine/include/arm/arch-armv8-2a.inc
index 95368b0af7..2b5fae1406 100644
--- a/meta/conf/machine/include/arm/arch-armv8-2a.inc
+++ b/meta/conf/machine/include/arm/arch-armv8-2a.inc
@@ -6,6 +6,7 @@ TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-2a', ' -march
MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-2a', 'armv8-2a:', '', d)}"
require conf/machine/include/arm/arch-armv8a.inc
+require conf/machine/include/arm/feature-arm-sve.inc
# Little Endian base configs
AVAILTUNES += "armv8-2a armv8-2a-crypto"
diff --git a/meta/conf/machine/include/arm/arch-armv8-3a.inc b/meta/conf/machine/include/arm/arch-armv8-3a.inc
new file mode 100644
index 0000000000..49493fb3b5
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-armv8-3a.inc
@@ -0,0 +1,22 @@
+DEFAULTTUNE ?= "armv8-3a"
+
+TUNEVALID[armv8-3a] = "Enable instructions for ARMv8.3-a"
+TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-3a', ' -march=armv8.3-a', '', d)}"
+# TUNE crypto will be handled by arch-armv8a.inc below
+MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-3a', 'armv8-3a:', '', d)}"
+
+require conf/machine/include/arm/arch-armv8a.inc
+
+AVAILTUNES += "armv8-3a armv8-3a-crypto armv8-3a-crypto-sve"
+ARMPKGARCH:tune-armv8-3a ?= "armv8-3a"
+ARMPKGARCH:tune-armv8-3a-crypto ?= "armv8-3a"
+ARMPKGARCH:tune-armv8-3a-crypto-sve ?= "armv8-3a"
+TUNE_FEATURES:tune-armv8-3a = "aarch64 armv8-3a"
+TUNE_FEATURES:tune-armv8-3a-crypto = "${TUNE_FEATURES:tune-armv8-3a} crypto"
+TUNE_FEATURES:tune-armv8-3a-crypto-sve = "${TUNE_FEATURES:tune-armv8-3a-crypto} sve"
+PACKAGE_EXTRA_ARCHS:tune-armv8-3a = "${PACKAGE_EXTRA_ARCHS:tune-armv8a} armv8-3a"
+PACKAGE_EXTRA_ARCHS:tune-armv8-3a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-3a} armv8-3a-crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv8-3a-crypto-sve = "${PACKAGE_EXTRA_ARCHS:tune-armv8-3a-crypto} armv8-3a-crypto-sve"
+BASE_LIB:tune-armv8-3a = "lib64"
+BASE_LIB:tune-armv8-3a-crypto = "lib64"
+BASE_LIB:tune-armv8-3a-crypto-sve = "lib64"
diff --git a/meta/conf/machine/include/arm/arch-armv8-4a.inc b/meta/conf/machine/include/arm/arch-armv8-4a.inc
index 29feddb64c..b61d50daa5 100644
--- a/meta/conf/machine/include/arm/arch-armv8-4a.inc
+++ b/meta/conf/machine/include/arm/arch-armv8-4a.inc
@@ -6,14 +6,19 @@ TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-4a', ' -march
MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-4a', 'armv8-4a:', '', d)}"
require conf/machine/include/arm/arch-armv8a.inc
+require conf/machine/include/arm/feature-arm-sve.inc
# Little Endian base configs
-AVAILTUNES += "armv8-4a armv8-4a-crypto"
+AVAILTUNES += "armv8-4a armv8-4a-crypto armv8-4a-crypto-sve"
ARMPKGARCH:tune-armv8-4a ?= "armv8-4a"
ARMPKGARCH:tune-armv8-4a-crypto ?= "armv8-4a"
+ARMPKGARCH:tune-armv8-4a-crypto-sve ?= "armv8-4a"
TUNE_FEATURES:tune-armv8-4a = "aarch64 armv8-4a"
TUNE_FEATURES:tune-armv8-4a-crypto = "${TUNE_FEATURES:tune-armv8-4a} crypto"
+TUNE_FEATURES:tune-armv8-4a-crypto-sve = "${TUNE_FEATURES:tune-armv8-4a-crypto} sve"
PACKAGE_EXTRA_ARCHS:tune-armv8-4a = "${PACKAGE_EXTRA_ARCHS:tune-armv8a} armv8-4a"
PACKAGE_EXTRA_ARCHS:tune-armv8-4a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-4a} armv8-4a-crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv8-4a-crypto-sve = "${PACKAGE_EXTRA_ARCHS:tune-armv8-4a-crypto} armv8-4a-crypto-sve"
BASE_LIB:tune-armv8-4a = "lib64"
BASE_LIB:tune-armv8-4a-crypto = "lib64"
+BASE_LIB:tune-armv8-4a-crypto-sve = "lib64"
diff --git a/meta/conf/machine/include/arm/arch-armv8-5a.inc b/meta/conf/machine/include/arm/arch-armv8-5a.inc
index a1bcb7fb9a..176bc9033c 100644
--- a/meta/conf/machine/include/arm/arch-armv8-5a.inc
+++ b/meta/conf/machine/include/arm/arch-armv8-5a.inc
@@ -6,14 +6,19 @@ TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-5a', ' -march
MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-5a', 'armv8-5a:', '', d)}"
require conf/machine/include/arm/arch-armv8a.inc
+require conf/machine/include/arm/feature-arm-sve.inc
# Little Endian base configs
-AVAILTUNES += "armv8-5a armv8-5a-crypto"
+AVAILTUNES += "armv8-5a armv8-5a-crypto armv8-5a-crypto-sve"
ARMPKGARCH:tune-armv8-5a ?= "armv8-5a"
ARMPKGARCH:tune-armv8-5a-crypto ?= "armv8-5a"
+ARMPKGARCH:tune-armv8-5a-crypto-sve ?= "armv8-5a"
TUNE_FEATURES:tune-armv8-5a = "aarch64 armv8-5a"
TUNE_FEATURES:tune-armv8-5a-crypto = "${TUNE_FEATURES:tune-armv8-5a} crypto"
+TUNE_FEATURES:tune-armv8-5a-crypto-sve = "${TUNE_FEATURES:tune-armv8-5a-crypto} sve"
PACKAGE_EXTRA_ARCHS:tune-armv8-5a = "${PACKAGE_EXTRA_ARCHS:tune-armv8a} armv8-5a"
PACKAGE_EXTRA_ARCHS:tune-armv8-5a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-5a} armv8-5a-crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv8-5a-crypto-sve = "${PACKAGE_EXTRA_ARCHS:tune-armv8-5a-crypto} armv8-5a-crypto-sve"
BASE_LIB:tune-armv8-5a = "lib64"
BASE_LIB:tune-armv8-5a-crypto = "lib64"
+BASE_LIB:tune-armv8-5a-crypto-sve = "lib64"
diff --git a/meta/conf/machine/include/arm/arch-armv8-6a.inc b/meta/conf/machine/include/arm/arch-armv8-6a.inc
new file mode 100644
index 0000000000..27f85325ca
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-armv8-6a.inc
@@ -0,0 +1,22 @@
+DEFAULTTUNE ?= "armv8-6a"
+
+TUNEVALID[armv8-6a] = "Enable instructions for ARMv8.6-a"
+TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv8-6a', ' -march=armv8.6-a', '', d)}"
+# TUNE crypto will be handled by arch-armv8a.inc below
+MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8-6a', 'armv8-6a:', '', d)}"
+
+require conf/machine/include/arm/arch-armv8a.inc
+
+AVAILTUNES += "armv8-6a armv8-6a-crypto armv8-6a-crypto-sve"
+ARMPKGARCH:tune-armv8-6a ?= "armv8-6a"
+ARMPKGARCH:tune-armv8-6a-crypto ?= "armv8-6a"
+ARMPKGARCH:tune-armv8-6a-crypto-sve ?= "armv8-6a"
+TUNE_FEATURES:tune-armv8-6a = "aarch64 armv8-6a"
+TUNE_FEATURES:tune-armv8-6a-crypto = "${TUNE_FEATURES:tune-armv8-6a} crypto"
+TUNE_FEATURES:tune-armv8-6a-crypto-sve = "${TUNE_FEATURES:tune-armv8-6a-crypto} sve"
+PACKAGE_EXTRA_ARCHS:tune-armv8-6a = "${PACKAGE_EXTRA_ARCHS:tune-armv8a} armv8-6a"
+PACKAGE_EXTRA_ARCHS:tune-armv8-6a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-6a} armv8-6a-crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv8-6a-crypto-sve = "${PACKAGE_EXTRA_ARCHS:tune-armv8-6a-crypto} armv8-6a-crypto-sve"
+BASE_LIB:tune-armv8-6a = "lib64"
+BASE_LIB:tune-armv8-6a-crypto = "lib64"
+BASE_LIB:tune-armv8-6a-crypto-sve = "lib64"
diff --git a/meta/conf/machine/include/arm/arch-armv8r.inc b/meta/conf/machine/include/arm/arch-armv8r.inc
index 0dcfd34c1a..2fea8d2517 100644
--- a/meta/conf/machine/include/arm/arch-armv8r.inc
+++ b/meta/conf/machine/include/arm/arch-armv8r.inc
@@ -10,6 +10,7 @@ MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv8r', 'armv8r:',
require conf/machine/include/arm/arch-arm64.inc
require conf/machine/include/arm/feature-arm-crc.inc
require conf/machine/include/arm/feature-arm-crypto.inc
+require conf/machine/include/arm/feature-arm-sve.inc
# All ARMv8 has floating point hardware built in. Null it here to avoid any confusion for 32bit.
TARGET_FPU_32 = ""
diff --git a/meta/conf/machine/include/arm/arch-armv9a.inc b/meta/conf/machine/include/arm/arch-armv9a.inc
new file mode 100644
index 0000000000..e8d7c08a63
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-armv9a.inc
@@ -0,0 +1,19 @@
+DEFAULTTUNE ?= "armv9a"
+
+TUNEVALID[armv9a] = "Enable instructions for ARMv9-a"
+TUNE_CCARGS_MARCH .= "${@bb.utils.contains('TUNE_FEATURES', 'armv9a', ' -march=armv9-a', '', d)}"
+MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'armv9a', 'armv9a:', '', d)}"
+
+require conf/machine/include/arm/arch-arm64.inc
+require conf/machine/include/arm/feature-arm-crypto.inc
+
+# Little Endian base configs
+AVAILTUNES += "armv9a armv9a-crypto"
+ARMPKGARCH:tune-armv9a ?= "armv9a"
+ARMPKGARCH:tune-armv9a-crypto ?= "armv9a"
+TUNE_FEATURES:tune-armv9a = "aarch64 armv9a"
+TUNE_FEATURES:tune-armv9a-crypto = "${TUNE_FEATURES:tune-armv9a} crypto"
+PACKAGE_EXTRA_ARCHS:tune-armv9a = "aarch64 armv9a"
+PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv9a} armv9a-crypto"
+BASE_LIB:tune-armv9a = "lib64"
+BASE_LIB:tune-armv9a-crypto = "lib64"
diff --git a/meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc b/meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc
index ecaaa0d846..4c8985292b 100644
--- a/meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc
+++ b/meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc
@@ -12,3 +12,8 @@ AVAILTUNES += "cortexr5"
ARMPKGARCH:tune-cortexr5 = "cortexr5"
TUNE_FEATURES:tune-cortexr5 = "${TUNE_FEATURES:tune-armv7r-vfpv3d16} cortexr5 idiv"
PACKAGE_EXTRA_ARCHS:tune-cortexr5 = "${PACKAGE_EXTRA_ARCHS:tune-armv7r-vfpv3d16} cortexr5-vfpv3d16"
+
+AVAILTUNES += "cortexr5hf"
+ARMPKGARCH:tune-cortexr5hf = "cortexr5"
+TUNE_FEATURES:tune-cortexr5hf = "${TUNE_FEATURES:tune-cortexr5} callconvention-hard"
+PACKAGE_EXTRA_ARCHS:tune-cortexr5hf = "cortexr5hf-vfpv3d16"
diff --git a/meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc b/meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc
index 493ad67b21..0a115be8a4 100644
--- a/meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc
+++ b/meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc
@@ -10,5 +10,6 @@ require conf/machine/include/arm/arch-armv8-1m-main.inc
AVAILTUNES += "cortexm55"
ARMPKGARCH:tune-cortexm55 = "cortexm55"
-TUNE_FEATURES:tune-cortexm55 = "${TUNE_FEATURES:tune-armv8-1m-main} cortexm55"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexm55 = "cortexm55"
PACKAGE_EXTRA_ARCHS:tune-cortexm55 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-1m-main} cortexm55"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc
index d130b4b90a..5e63b45ae0 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc
@@ -8,6 +8,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa55"
ARMPKGARCH:tune-cortexa55 = "cortexa55"
-TUNE_FEATURES:tune-cortexa55 = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa55"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa55 = "aarch64 crypto cortexa55"
PACKAGE_EXTRA_ARCHS:tune-cortexa55 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa55"
BASE_LIB:tune-cortexa55 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65.inc
index c7e86887b5..ba96d0452e 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa65"
ARMPKGARCH:tune-cortexa65 = "cortexa65"
-TUNE_FEATURES:tune-cortexa65 = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa65"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa65 = "aarch64 crypto cortexa65"
PACKAGE_EXTRA_ARCHS:tune-cortexa65 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa65"
BASE_LIB:tune-cortexa65 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65ae.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65ae.inc
index dad6d1b174..cc92147441 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65ae.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa65ae.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa65ae"
ARMPKGARCH:tune-cortexa65ae = "cortexa65ae"
-TUNE_FEATURES:tune-cortexa65ae = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa65ae"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa65ae = "aarch64 crypto cortexa65ae"
PACKAGE_EXTRA_ARCHS:tune-cortexa65ae = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa65ae"
BASE_LIB:tune-cortexa65ae = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75-cortexa55.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75-cortexa55.inc
index 3a47e8278d..e18b2cb6e0 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75-cortexa55.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75-cortexa55.inc
@@ -12,7 +12,8 @@ require conf/machine/include/arm/arch-armv8-2a.inc
AVAILTUNES += "cortexa75-cortexa55 cortexa75-cortexa55-crypto"
ARMPKGARCH:tune-cortexa75-cortexa55 = "cortexa75-cortexa55"
ARMPKGARCH:tune-cortexa75-cortexa55-crypto = "cortexa75-cortexa55-crypto"
-TUNE_FEATURES:tune-cortexa75-cortexa55 = "${TUNE_FEATURES:tune-armv8-2a} cortexa75-cortexa55"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa75-cortexa55 = "aarch64 cortexa75-cortexa55"
TUNE_FEATURES:tune-cortexa75-cortexa55-crypto = "${TUNE_FEATURES:tune-cortexa75-cortexa55} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa75-cortexa55 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a} cortexa75-cortexa55"
PACKAGE_EXTRA_ARCHS:tune-cortexa75-cortexa55-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa75-cortexa55 cortexa75-cortexa55-crypto"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75.inc
index 2d9a1159f1..453be2e6fd 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa75.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa75"
ARMPKGARCH:tune-cortexa75 = "cortexa75"
-TUNE_FEATURES:tune-cortexa75 = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa75"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa75 = "aarch64 crypto cortexa75"
PACKAGE_EXTRA_ARCHS:tune-cortexa75 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa75"
BASE_LIB:tune-cortexa75 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76-cortexa55.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76-cortexa55.inc
index f4c99ad6bb..7daf9d91a8 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76-cortexa55.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76-cortexa55.inc
@@ -12,7 +12,8 @@ require conf/machine/include/arm/arch-armv8-2a.inc
AVAILTUNES += "cortexa76-cortexa55 cortexa76-cortexa55-crypto"
ARMPKGARCH:tune-cortexa76-cortexa55 = "cortexa76-cortexa55"
ARMPKGARCH:tune-cortexa76-cortexa55-crypto = "cortexa76-cortexa55-crypto"
-TUNE_FEATURES:tune-cortexa76-cortexa55 = "${TUNE_FEATURES:tune-armv8-2a} cortexa76-cortexa55"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa76-cortexa55 = "aarch64 cortexa76-cortexa55"
TUNE_FEATURES:tune-cortexa76-cortexa55-crypto = "${TUNE_FEATURES:tune-cortexa76-cortexa55} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa76-cortexa55 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a} cortexa76-cortexa55"
PACKAGE_EXTRA_ARCHS:tune-cortexa76-cortexa55-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa76-cortexa55 cortexa76-cortexa55-crypto"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76.inc
index 0dfdb8c5e4..14ed81214d 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa76"
ARMPKGARCH:tune-cortexa76 = "cortexa76"
-TUNE_FEATURES:tune-cortexa76 = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa76"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa76 = "aarch64 crypto cortexa76"
PACKAGE_EXTRA_ARCHS:tune-cortexa76 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa76"
BASE_LIB:tune-cortexa76 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76ae.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76ae.inc
index b2863dca68..191863bac8 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76ae.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa76ae.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa76ae"
ARMPKGARCH:tune-cortexa76ae = "cortexa76ae"
-TUNE_FEATURES:tune-cortexa76ae = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa76ae"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa76ae = "aarch64 crypto cortexa76ae"
PACKAGE_EXTRA_ARCHS:tune-cortexa76ae = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa76ae"
BASE_LIB:tune-cortexa76ae = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa77.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa77.inc
index 654b1f6323..1522fd6abd 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa77.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa77.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "cortexa77"
ARMPKGARCH:tune-cortexa77 = "cortexa77"
-TUNE_FEATURES:tune-cortexa77 = "${TUNE_FEATURES:tune-armv8-2a-crypto} cortexa77"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa77 = "aarch64 crypto cortexa77"
PACKAGE_EXTRA_ARCHS:tune-cortexa77 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa77"
BASE_LIB:tune-cortexa77 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78.inc
new file mode 100644
index 0000000000..198b94c679
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78.inc
@@ -0,0 +1,17 @@
+#
+# Tune Settings for Cortex-A78
+#
+DEFAULTTUNE ?= "cortexa78"
+
+TUNEVALID[cortexa78] = "Enable Cortex-A78 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa78', ' -mcpu=cortex-a78', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-2a.inc
+
+# Little Endian base configs
+AVAILTUNES += "cortexa78"
+ARMPKGARCH:tune-cortexa78 = "cortexa78"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa78 = "aarch64 crypto cortexa78"
+PACKAGE_EXTRA_ARCHS:tune-cortexa78 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa78"
+BASE_LIB:tune-cortexa78 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78ae.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78ae.inc
new file mode 100644
index 0000000000..fe68bda9a0
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78ae.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-a78ae
+#
+DEFAULTTUNE ?= "cortexa78ae"
+
+TUNEVALID[cortexa78ae] = "Enable cortex-a78ae specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa78ae', ' -mcpu=cortex-a78ae', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-2a.inc
+
+AVAILTUNES += "cortexa78ae"
+ARMPKGARCH:tune-cortexa78ae = "cortexa78ae"
+TUNE_FEATURES:tune-cortexa78ae = "aarch64 crypto cortexa78ae"
+PACKAGE_EXTRA_ARCHS:tune-cortexa78ae = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa78ae"
+BASE_LIB:tune-cortexa78ae = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78c.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78c.inc
new file mode 100644
index 0000000000..cb1fe91dfc
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexa78c.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-a78c
+#
+DEFAULTTUNE ?= "cortexa78c"
+
+TUNEVALID[cortexa78c] = "Enable cortex-a78c specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa78c', ' -mcpu=cortex-a78c', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-2a.inc
+
+AVAILTUNES += "cortexa78c"
+ARMPKGARCH:tune-cortexa78c = "cortexa78c"
+TUNE_FEATURES:tune-cortexa78c = "aarch64 crypto cortexa78c"
+PACKAGE_EXTRA_ARCHS:tune-cortexa78c = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexa78c"
+BASE_LIB:tune-cortexa78c = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1.inc
new file mode 100644
index 0000000000..0a99156e62
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-x1
+#
+DEFAULTTUNE ?= "cortexx1"
+
+TUNEVALID[cortexx1] = "Enable cortex-x1 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexx1', ' -mcpu=cortex-x1', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-2a.inc
+
+AVAILTUNES += "cortexx1"
+ARMPKGARCH:tune-cortexx1 = "cortexx1"
+TUNE_FEATURES:tune-cortexx1 = "aarch64 crypto cortexx1"
+PACKAGE_EXTRA_ARCHS:tune-cortexx1 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexx1"
+BASE_LIB:tune-cortexx1 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1c.inc b/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1c.inc
new file mode 100644
index 0000000000..2a16d1695d
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-cortexx1c.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-x1c
+#
+DEFAULTTUNE ?= "cortexx1c"
+
+TUNEVALID[cortexx1c] = "Enable cortex-x1c specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexx1c', ' -mcpu=cortex-x1c', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-2a.inc
+
+AVAILTUNES += "cortexx1c"
+ARMPKGARCH:tune-cortexx1c = "cortexx1c"
+TUNE_FEATURES:tune-cortexx1c = "aarch64 crypto cortexx1c"
+PACKAGE_EXTRA_ARCHS:tune-cortexx1c = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} cortexx1c"
+BASE_LIB:tune-cortexx1c = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-neoversee1.inc b/meta/conf/machine/include/arm/armv8-2a/tune-neoversee1.inc
index 15ed595bde..e906cf965c 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-neoversee1.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-neoversee1.inc
@@ -11,6 +11,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "neoversee1"
ARMPKGARCH:tune-neoversee1 = "neoversee1"
-TUNE_FEATURES:tune-neoversee1 = "${TUNE_FEATURES:tune-armv8-2a-crypto} neoversee1"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-neoversee1 = "aarch64 crypto neoversee1"
PACKAGE_EXTRA_ARCHS:tune-neoversee1 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} neoversee1"
BASE_LIB:tune-neoversee1 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-neoversen1.inc b/meta/conf/machine/include/arm/armv8-2a/tune-neoversen1.inc
index 9d181ef4d5..55f054713f 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-neoversen1.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-neoversen1.inc
@@ -12,6 +12,6 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "neoversen1"
ARMPKGARCH:tune-neoversen1 = "neoversen1"
-TUNE_FEATURES:tune-neoversen1 = "${TUNE_FEATURES:tune-armv8-2a-crypto} neoversen1"
+TUNE_FEATURES:tune-neoversen1 = "aarch64 crypto neoversen1"
PACKAGE_EXTRA_ARCHS:tune-neoversen1 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} neoversen1"
BASE_LIB:tune-neoversen1 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-2a/tune-octeontx2.inc b/meta/conf/machine/include/arm/armv8-2a/tune-octeontx2.inc
index 2cac70ea52..e9d7a59c51 100644
--- a/meta/conf/machine/include/arm/armv8-2a/tune-octeontx2.inc
+++ b/meta/conf/machine/include/arm/armv8-2a/tune-octeontx2.inc
@@ -8,6 +8,7 @@ require conf/machine/include/arm/arch-armv8-2a.inc
# Little Endian base configs
AVAILTUNES += "octeontx2"
ARMPKGARCH:tune-octeontx2 = "octeontx2"
-TUNE_FEATURES:tune-octeontx2 = "${TUNE_FEATURES:tune-armv8-2a-crypto} octeontx2"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-octeontx2 = "aarch64 crypto octeontx2"
PACKAGE_EXTRA_ARCHS:tune-octeontx2 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-2a-crypto} octeontx2"
BASE_LIB:tune-octeontx2 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-4a/tune-neoverse512tvb.inc b/meta/conf/machine/include/arm/armv8-4a/tune-neoverse512tvb.inc
new file mode 100644
index 0000000000..450bf74896
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-4a/tune-neoverse512tvb.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for neoverse-512tvb
+#
+DEFAULTTUNE ?= "neoverse512tvb"
+
+TUNEVALID[neoverse512tvb] = "Enable neoverse-512tvb specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'neoverse512tvb', ' -mcpu=neoverse-512tvb', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-4a.inc
+
+AVAILTUNES += "neoverse512tvb"
+ARMPKGARCH:tune-neoverse512tvb = "neoverse512tvb"
+TUNE_FEATURES:tune-neoverse512tvb = "aarch64 crypto neoverse512tvb"
+PACKAGE_EXTRA_ARCHS:tune-neoverse512tvb = "${PACKAGE_EXTRA_ARCHS:tune-armv8-4a-crypto} neoverse512tvb"
+BASE_LIB:tune-neoverse512tvb = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-4a/tune-neoversev1.inc b/meta/conf/machine/include/arm/armv8-4a/tune-neoversev1.inc
new file mode 100644
index 0000000000..2b4da7db3c
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8-4a/tune-neoversev1.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for neoverse-v1
+#
+DEFAULTTUNE ?= "neoversev1"
+
+TUNEVALID[neoversev1] = "Enable neoverse-v1 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'neoversev1', ' -mcpu=neoverse-v1', '', d)}"
+
+require conf/machine/include/arm/arch-armv8-4a.inc
+
+AVAILTUNES += "neoversev1"
+ARMPKGARCH:tune-neoversev1 = "neoversev1"
+TUNE_FEATURES:tune-neoversev1 = "aarch64 crypto neoversev1"
+PACKAGE_EXTRA_ARCHS:tune-neoversev1 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-4a-crypto} neoversev1"
+BASE_LIB:tune-neoversev1 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc b/meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc
index 25780bc080..e83e0ba68a 100644
--- a/meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc
+++ b/meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc
@@ -10,5 +10,6 @@ require conf/machine/include/arm/arch-armv8m-base.inc
AVAILTUNES += "cortexm23"
ARMPKGARCH:tune-cortexm23 = "cortexm23"
-TUNE_FEATURES:tune-cortexm23 = "${TUNE_FEATURES:tune-armv8m-base} cortexm23"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexm23 = "cortexm23"
PACKAGE_EXTRA_ARCHS:tune-cortexm23 = "${PACKAGE_EXTRA_ARCHS:tune-armv8m-base} cortexm23"
diff --git a/meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc b/meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc
index 04d1fe2bde..606900d7a2 100644
--- a/meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc
+++ b/meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc
@@ -13,5 +13,6 @@ require conf/machine/include/arm/arch-armv8m-main.inc
# be fixed in GCC
AVAILTUNES += "cortexm33"
ARMPKGARCH:tune-cortexm33 = "cortexm33"
-TUNE_FEATURES:tune-cortexm33 = "${TUNE_FEATURES:tune-armv8m-maine-vfpv5spd16} cortexm33"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexm33 = "vfpv5spd16 dsp cortexm33"
PACKAGE_EXTRA_ARCHS:tune-cortexm33 = "${PACKAGE_EXTRA_ARCHS:tune-armv8m-maine-vfpv5spd16} cortexm33e-fpv5-spd16"
diff --git a/meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc b/meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc
index 60e978facd..4394adab0b 100644
--- a/meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc
+++ b/meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc
@@ -13,5 +13,6 @@ require conf/machine/include/arm/arch-armv8m-main.inc
# be fixed in GCC
AVAILTUNES += "cortexm35p"
ARMPKGARCH:tune-cortexm35p = "cortexm35p"
-TUNE_FEATURES:tune-cortexm35p = "${TUNE_FEATURES:tune-armv8m-maine-vfpv5spd16} cortexm35p"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexm35p = "vfpv5spd16 dsp cortexm35p"
PACKAGE_EXTRA_ARCHS:tune-cortexm35p = "${PACKAGE_EXTRA_ARCHS:tune-armv8m-maine-vfpv5spd16} cortexm35pe-fpv5-spd16"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc
index c7b01a2906..25bdf12b18 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc
@@ -9,7 +9,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa32 cortexa32-crypto"
ARMPKGARCH:tune-cortexa32 = "cortexa32"
ARMPKGARCH:tune-cortexa32-crypto = "cortexa32"
-TUNE_FEATURES:tune-cortexa32 = "armv8a cortexa32 crc callconvention-hard neon"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa32 = "aarch64 cortexa32 crc callconvention-hard neon"
TUNE_FEATURES:tune-cortexa32-crypto = "${TUNE_FEATURES:tune-cortexa32} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa32 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa32 cortexa32hf-neon"
PACKAGE_EXTRA_ARCHS:tune-cortexa32-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa32 cortexa32hf-neon cortexa32hf-neon-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa34.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa34.inc
index 55dd845b00..c195d73378 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa34.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa34.inc
@@ -12,7 +12,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa34 cortexa34-crypto"
ARMPKGARCH:tune-cortexa34 = "cortexa34"
ARMPKGARCH:tune-cortexa34-crypto = "cortexa34"
-TUNE_FEATURES:tune-cortexa34 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa34"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa34 = "aarch64 crc cortexa34"
TUNE_FEATURES:tune-cortexa34-crypto = "${TUNE_FEATURES:tune-cortexa34} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa34 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa34"
PACKAGE_EXTRA_ARCHS:tune-cortexa34-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa34 cortexa34-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc
index 33afb19386..d811c84455 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc
@@ -9,7 +9,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa35 cortexa35-crypto"
ARMPKGARCH:tune-cortexa35 = "cortexa35"
ARMPKGARCH:tune-cortexa35-crypto = "cortexa35"
-TUNE_FEATURES:tune-cortexa35 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa35"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa35 = "aarch64 crc cortexa35"
TUNE_FEATURES:tune-cortexa35-crypto = "${TUNE_FEATURES:tune-cortexa35} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa35 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa35"
PACKAGE_EXTRA_ARCHS:tune-cortexa35-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa35 cortexa35-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc
index a534ad358d..a88575eb15 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc
@@ -9,7 +9,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa53 cortexa53-crypto"
ARMPKGARCH:tune-cortexa53 = "cortexa53"
ARMPKGARCH:tune-cortexa53-crypto = "cortexa53-crypto"
-TUNE_FEATURES:tune-cortexa53 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa53"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa53 = "aarch64 crc cortexa53"
TUNE_FEATURES:tune-cortexa53-crypto = "${TUNE_FEATURES:tune-cortexa53} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa53 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa53"
PACKAGE_EXTRA_ARCHS:tune-cortexa53-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa53 cortexa53-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc
index 7de671a2e5..052d1173c9 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc
@@ -9,6 +9,7 @@ require conf/machine/include/arm/arch-armv8a.inc
# Little Endian base configs
AVAILTUNES += "cortexa57-cortexa53"
ARMPKGARCH:tune-cortexa57-cortexa53 = "cortexa57-cortexa53"
-TUNE_FEATURES:tune-cortexa57-cortexa53 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa57-cortexa53"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa57-cortexa53 = "aarch64 crc cortexa57-cortexa53"
PACKAGE_EXTRA_ARCHS:tune-cortexa57-cortexa53 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa57-cortexa53"
BASE_LIB:tune-cortexa57-cortexa53 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc
index 37650d8798..b0de20f836 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc
@@ -9,7 +9,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa57 cortexa57-crypto"
ARMPKGARCH:tune-cortexa57 = "cortexa57"
ARMPKGARCH:tune-cortexa57-crypto = "cortexa57-crypto"
-TUNE_FEATURES:tune-cortexa57 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa57"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa57 = "aarch64 crc cortexa57"
TUNE_FEATURES:tune-cortexa57-crypto = "${TUNE_FEATURES:tune-cortexa57} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa57 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa57"
PACKAGE_EXTRA_ARCHS:tune-cortexa57-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa57 cortexa57-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc
index a77ef59d62..ff188aec5f 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc
@@ -10,7 +10,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa72-cortexa53 cortexa72-cortexa53-crypto"
ARMPKGARCH:tune-cortexa72-cortexa53 = "cortexa72-cortexa53"
ARMPKGARCH:tune-cortexa72-cortexa53-crypto = "cortexa72-cortexa53-crypto"
-TUNE_FEATURES:tune-cortexa72-cortexa53 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa72-cortexa53"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa72-cortexa53 = "aarch64 crc cortexa72-cortexa53"
TUNE_FEATURES:tune-cortexa72-cortexa53-crypto = "${TUNE_FEATURES:tune-cortexa72-cortexa53} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa72-cortexa53 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa72-cortexa53"
PACKAGE_EXTRA_ARCHS:tune-cortexa72-cortexa53-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa72-cortexa53 cortexa72-cortexa53-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc
index b0a017e444..cbb6418c06 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc
@@ -9,7 +9,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa72 cortexa72-crypto"
ARMPKGARCH:tune-cortexa72 = "cortexa72"
ARMPKGARCH:tune-cortexa72-crypto = "cortexa72"
-TUNE_FEATURES:tune-cortexa72 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa72"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa72 = "aarch64 crc cortexa72"
TUNE_FEATURES:tune-cortexa72-crypto = "${TUNE_FEATURES:tune-cortexa72} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa72 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa72"
PACKAGE_EXTRA_ARCHS:tune-cortexa72-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa72 cortexa72-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa35.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa35.inc
index 869670bf0c..4f4f25f511 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa35.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa35.inc
@@ -13,7 +13,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa73-cortexa35 cortexa73-cortexa35-crypto"
ARMPKGARCH:tune-cortexa73-cortexa35 = "cortexa73-cortexa35"
ARMPKGARCH:tune-cortexa73-cortexa35-crypto = "cortexa73-cortexa35-crypto"
-TUNE_FEATURES:tune-cortexa73-cortexa35 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa73-cortexa35"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa73-cortexa35 = "aarch64 crc cortexa73-cortexa35"
TUNE_FEATURES:tune-cortexa73-cortexa35-crypto = "${TUNE_FEATURES:tune-cortexa73-cortexa35} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa73-cortexa35 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa73-cortexa35"
PACKAGE_EXTRA_ARCHS:tune-cortexa73-cortexa35-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa73-cortexa35 cortexa73-cortexa35-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc
index 9cebffd54d..1d152ed83b 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc
@@ -10,7 +10,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa73-cortexa53 cortexa73-cortexa53-crypto"
ARMPKGARCH:tune-cortexa73-cortexa53 = "cortexa73-cortexa53"
ARMPKGARCH:tune-cortexa73-cortexa53-crypto = "cortexa73-cortexa53-crypto"
-TUNE_FEATURES:tune-cortexa73-cortexa53 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa73-cortexa53"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa73-cortexa53 = "aarch64 crc cortexa73-cortexa53"
TUNE_FEATURES:tune-cortexa73-cortexa53-crypto = "${TUNE_FEATURES:tune-cortexa73-cortexa53} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa73-cortexa53 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa73-cortexa53"
PACKAGE_EXTRA_ARCHS:tune-cortexa73-cortexa53-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa73-cortexa53 cortexa73-cortexa53-crypto"
diff --git a/meta/conf/machine/include/arm/armv8a/tune-cortexa73.inc b/meta/conf/machine/include/arm/armv8a/tune-cortexa73.inc
index 13876e7245..b3b06a4f09 100644
--- a/meta/conf/machine/include/arm/armv8a/tune-cortexa73.inc
+++ b/meta/conf/machine/include/arm/armv8a/tune-cortexa73.inc
@@ -12,7 +12,8 @@ require conf/machine/include/arm/arch-armv8a.inc
AVAILTUNES += "cortexa73 cortexa73-crypto"
ARMPKGARCH:tune-cortexa73 = "cortexa73"
ARMPKGARCH:tune-cortexa73-crypto = "cortexa73"
-TUNE_FEATURES:tune-cortexa73 = "${TUNE_FEATURES:tune-armv8a-crc} cortexa73"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexa73 = "aarch64 crc cortexa73"
TUNE_FEATURES:tune-cortexa73-crypto = "${TUNE_FEATURES:tune-cortexa73} crypto"
PACKAGE_EXTRA_ARCHS:tune-cortexa73 = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc} cortexa73"
PACKAGE_EXTRA_ARCHS:tune-cortexa73-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8a-crc-crypto} cortexa73 cortexa73-crypto"
diff --git a/meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc b/meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc
index 3a97cf8ee8..89f0e09450 100644
--- a/meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc
+++ b/meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc
@@ -10,5 +10,11 @@ require conf/machine/include/arm/arch-armv8r.inc
AVAILTUNES += "cortexr52"
ARMPKGARCH:tune-cortexr52 = "cortexr52"
-TUNE_FEATURES:tune-cortexr52 = "${TUNE_FEATURES:tune-armv8r-crc-simd} cortexr52"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-cortexr52 = "aarch64 crc simd cortexr52"
PACKAGE_EXTRA_ARCHS:tune-cortexr52 = "${PACKAGE_EXTRA_ARCHS:tune-armv8r-crc-simd} cortexr52"
+
+AVAILTUNES += "cortexr52hf"
+ARMPKGARCH:tune-cortexr52hf = "cortexr52"
+TUNE_FEATURES:tune-cortexr52hf = "${TUNE_FEATURES:tune-cortexr52} callconvention-hard"
+PACKAGE_EXTRA_ARCHS:tune-cortexr52hf = "cortexr52hf"
diff --git a/meta/conf/machine/include/arm/armv8r/tune-cortexr82.inc b/meta/conf/machine/include/arm/armv8r/tune-cortexr82.inc
new file mode 100644
index 0000000000..84b2471c6b
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv8r/tune-cortexr82.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-r82
+#
+DEFAULTTUNE ?= "cortexr82"
+
+TUNEVALID[cortexr82] = "Enable cortex-r82 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexr82', ' -mcpu=cortex-r82', '', d)}"
+
+require conf/machine/include/arm/arch-armv8r.inc
+
+AVAILTUNES += "cortexr82"
+ARMPKGARCH:tune-cortexr82 = "cortexr82"
+TUNE_FEATURES:tune-cortexr82 = "${TUNE_FEATURES:tune-armv8r-crc-simd} cortexr82"
+PACKAGE_EXTRA_ARCHS:tune-cortexr82 = "${PACKAGE_EXTRA_ARCHS:tune-armv8r-crc-simd} cortexr82"
+BASE_LIB:tune-cortexr82 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-cortexa510.inc b/meta/conf/machine/include/arm/armv9a/tune-cortexa510.inc
new file mode 100644
index 0000000000..09219ec7f1
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-cortexa510.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-a510
+#
+DEFAULTTUNE ?= "cortexa510"
+
+TUNEVALID[cortexa510] = "Enable cortex-a510 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa510', ' -mcpu=cortex-a510', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "cortexa510"
+ARMPKGARCH:tune-cortexa510 = "cortexa510"
+TUNE_FEATURES:tune-cortexa510 = "aarch64 crypto cortexa510"
+PACKAGE_EXTRA_ARCHS:tune-cortexa510 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} cortexa510"
+BASE_LIB:tune-cortexa510 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-cortexa710.inc b/meta/conf/machine/include/arm/armv9a/tune-cortexa710.inc
new file mode 100644
index 0000000000..19743d67db
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-cortexa710.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-a710
+#
+DEFAULTTUNE ?= "cortexa710"
+
+TUNEVALID[cortexa710] = "Enable cortex-a710 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa710', ' -mcpu=cortex-a710', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "cortexa710"
+ARMPKGARCH:tune-cortexa710 = "cortexa710"
+TUNE_FEATURES:tune-cortexa710 = "aarch64 crypto cortexa710"
+PACKAGE_EXTRA_ARCHS:tune-cortexa710 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} cortexa710"
+BASE_LIB:tune-cortexa710 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-cortexa715.inc b/meta/conf/machine/include/arm/armv9a/tune-cortexa715.inc
new file mode 100644
index 0000000000..2f6d8c6f8f
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-cortexa715.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-a715
+#
+DEFAULTTUNE ?= "cortexa715"
+
+TUNEVALID[cortexa715] = "Enable cortex-a715 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexa715', ' -mcpu=cortex-a715', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "cortexa715"
+ARMPKGARCH:tune-cortexa715 = "cortexa715"
+TUNE_FEATURES:tune-cortexa715 = "aarch64 crypto cortexa715"
+PACKAGE_EXTRA_ARCHS:tune-cortexa715 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} cortexa715"
+BASE_LIB:tune-cortexa715 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-cortexx2.inc b/meta/conf/machine/include/arm/armv9a/tune-cortexx2.inc
new file mode 100644
index 0000000000..c116e30ff5
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-cortexx2.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-x2
+#
+DEFAULTTUNE ?= "cortexx2"
+
+TUNEVALID[cortexx2] = "Enable cortex-x2 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexx2', ' -mcpu=cortex-x2', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "cortexx2"
+ARMPKGARCH:tune-cortexx2 = "cortexx2"
+TUNE_FEATURES:tune-cortexx2 = "aarch64 crypto cortexx2"
+PACKAGE_EXTRA_ARCHS:tune-cortexx2 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} cortexx2"
+BASE_LIB:tune-cortexx2 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-cortexx3.inc b/meta/conf/machine/include/arm/armv9a/tune-cortexx3.inc
new file mode 100644
index 0000000000..7982079ef8
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-cortexx3.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for cortex-x3
+#
+DEFAULTTUNE ?= "cortexx3"
+
+TUNEVALID[cortexx3] = "Enable cortex-x3 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'cortexx3', ' -mcpu=cortex-x3', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "cortexx3"
+ARMPKGARCH:tune-cortexx3 = "cortexx3"
+TUNE_FEATURES:tune-cortexx3 = "aarch64 crypto cortexx3"
+PACKAGE_EXTRA_ARCHS:tune-cortexx3 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} cortexx3"
+BASE_LIB:tune-cortexx3 = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-neoversen2.inc b/meta/conf/machine/include/arm/armv9a/tune-neoversen2.inc
index 36355f7bed..ad60a3c9f3 100644
--- a/meta/conf/machine/include/arm/armv9a/tune-neoversen2.inc
+++ b/meta/conf/machine/include/arm/armv9a/tune-neoversen2.inc
@@ -6,17 +6,16 @@ DEFAULTTUNE ?= "neoversen2"
TUNEVALID[neoversen2] = "Enable Neoverse-N2 specific processor optimizations"
TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'neoversen2', ' -mcpu=neoverse-n2', '', d)}"
-# Even though the Neoverse N2 core implemnts the Arm v9.0-A architecture,
-# but the support of it in GCC is based on the Arm v8.5-A architecture.
-require conf/machine/include/arm/arch-armv8-5a.inc
+require conf/machine/include/arm/arch-armv9a.inc
# Little Endian base configs
AVAILTUNES += "neoversen2 neoversen2-crypto"
ARMPKGARCH:tune-neoversen2 = "neoversen2"
ARMPKGARCH:tune-neoversen2-crypto = "neoversen2-crypto"
-TUNE_FEATURES:tune-neoversen2 = "${TUNE_FEATURES:tune-armv8-5a} neoversen2"
+# We do not want -march since -mcpu is added above to cover for it
+TUNE_FEATURES:tune-neoversen2 = "aarch64 neoversen2"
TUNE_FEATURES:tune-neoversen2-crypto = "${TUNE_FEATURES:tune-neoversen2} crypto"
-PACKAGE_EXTRA_ARCHS:tune-neoversen2 = "${PACKAGE_EXTRA_ARCHS:tune-armv8-5a} neoversen2"
-PACKAGE_EXTRA_ARCHS:tune-neoversen2-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv8-5a-crypto} neoversen2 neoversen2-crypto"
+PACKAGE_EXTRA_ARCHS:tune-neoversen2 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a} neoversen2"
+PACKAGE_EXTRA_ARCHS:tune-neoversen2-crypto = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} neoversen2 neoversen2-crypto"
BASE_LIB:tune-neoversen2 = "lib64"
BASE_LIB:tune-neoversen2-crypto = "lib64"
diff --git a/meta/conf/machine/include/arm/armv9a/tune-neoversev2.inc b/meta/conf/machine/include/arm/armv9a/tune-neoversev2.inc
new file mode 100644
index 0000000000..5d1e108468
--- /dev/null
+++ b/meta/conf/machine/include/arm/armv9a/tune-neoversev2.inc
@@ -0,0 +1,15 @@
+#
+# Tune Settings for neoverse-v2
+#
+DEFAULTTUNE ?= "neoversev2"
+
+TUNEVALID[neoversev2] = "Enable neoverse-v2 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'neoversev2', ' -mcpu=neoverse-v2', '', d)}"
+
+require conf/machine/include/arm/arch-armv9a.inc
+
+AVAILTUNES += "neoversev2"
+ARMPKGARCH:tune-neoversev2 = "neoversev2"
+TUNE_FEATURES:tune-neoversev2 = "aarch64 crypto neoversev2"
+PACKAGE_EXTRA_ARCHS:tune-neoversev2 = "${PACKAGE_EXTRA_ARCHS:tune-armv9a-crypto} neoversev2"
+BASE_LIB:tune-neoversev2 = "lib64"
diff --git a/meta/conf/machine/include/arm/feature-arm-sve.inc b/meta/conf/machine/include/arm/feature-arm-sve.inc
new file mode 100644
index 0000000000..bdae3d8fc3
--- /dev/null
+++ b/meta/conf/machine/include/arm/feature-arm-sve.inc
@@ -0,0 +1,8 @@
+# Scalable Vector Extension (SVE) for Armv8-A and R
+# Enabled by default for Armv9
+
+TUNEVALID[sve] = "Enable SVE instructions for ARMv8"
+TUNE_CCARGS_MARCH_OPTS .= "${@bb.utils.contains('TUNE_FEATURES', 'sve', '+sve', '', d)}"
+
+TUNEVALID[sve2] = "Enable SVE2 instructions for ARMv8"
+TUNE_CCARGS_MARCH_OPTS .= "${@bb.utils.contains('TUNE_FEATURES', 'sve2', '+sve2', '', d)}"
diff --git a/meta/conf/machine/include/arm/feature-arm-vfp.inc b/meta/conf/machine/include/arm/feature-arm-vfp.inc
index 678888e638..d020100daa 100644
--- a/meta/conf/machine/include/arm/feature-arm-vfp.inc
+++ b/meta/conf/machine/include/arm/feature-arm-vfp.inc
@@ -5,11 +5,12 @@
TUNEVALID[vfp] = "Enable Vector Floating Point (vfp) unit."
TUNE_CCARGS_MFPU .= "${@bb.utils.contains('TUNE_FEATURES', 'vfp', ' vfp', '', d)}"
+# simd is special, we don't pass this to the -mfpu, it's implied
TUNE_CCARGS .= "${@ (' -mfpu=%s' % d.getVar('TUNE_CCARGS_MFPU').split()[-1]) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}"
# The following deals with both vfpv3-d16 and vfpv4-d16
ARMPKGSFX_FPU = "${@ ('-%s' % d.getVar('TUNE_CCARGS_MFPU').split()[-1].replace('-d16', 'd16')) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}"
TUNEVALID[callconvention-hard] = "Enable EABI hard float call convention, requires VFP."
-TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU') != '') else '' }"
+TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU') != '' or bb.utils.contains('TUNE_FEATURES', 'simd', True, False, d)) else '' }"
TUNE_CCARGS .= "${@ ' -mfloat-abi=${TUNE_CCARGS_MFLOAT}' if (d.getVar('TUNE_CCARGS_MFLOAT') != '') else ''}"
ARMPKGSFX_EABI = "${@ 'hf' if (d.getVar('TUNE_CCARGS_MFLOAT') == 'hard') else ''}"
diff --git a/meta/conf/machine/include/loongarch/arch-loongarch.inc b/meta/conf/machine/include/loongarch/arch-loongarch.inc
new file mode 100644
index 0000000000..d0a51b34c7
--- /dev/null
+++ b/meta/conf/machine/include/loongarch/arch-loongarch.inc
@@ -0,0 +1,7 @@
+# LoongArch Architecture definition
+
+DEFAULTTUNE ?= "loongarch64"
+
+TUNE_ARCH = "${TUNE_ARCH:tune-${DEFAULTTUNE}}"
+TUNE_PKGARCH = "${TUNE_PKGARCH:tune-${DEFAULTTUNE}}"
+TUNE_CCARGS:append = "${@bb.utils.contains('TUNE_FEATURES', 'loongarch64', ' -march=loongarch64 -mabi=lp64d', ' ', d)}"
diff --git a/meta/conf/machine/include/loongarch/qemuloongarch.inc b/meta/conf/machine/include/loongarch/qemuloongarch.inc
new file mode 100644
index 0000000000..e1bcfabc43
--- /dev/null
+++ b/meta/conf/machine/include/loongarch/qemuloongarch.inc
@@ -0,0 +1,35 @@
+PREFERRED_PROVIDER_virtual/bootloader ?= "u-boot"
+
+require conf/machine/include/qemu.inc
+require conf/machine/include/loongarch/tune-loongarch.inc
+
+MACHINE_FEATURES = "screen keyboard ext2 ext3 serial"
+
+KERNEL_IMAGETYPE = "vmlinuz"
+KERNEL_IMAGETYPES += "vmlinuz"
+KEEPUIMAGE = "no"
+
+SERIAL_CONSOLES ?= "115200;ttyS0 115200;hvc0"
+
+IMAGE_FSTYPES += "ext4 wic.qcow2"
+
+WKS_FILE ?= "qemuloongarch.wks"
+
+MACHINE_EXTRA_RRECOMMENDS += " kernel-modules"
+
+#EXTRA_IMAGEDEPENDS += "opensbi"
+
+UBOOT_ENTRYPOINT_loongarch32 = "0x80400000"
+UBOOT_ENTRYPOINT_loongarch64 = "0x80200000"
+
+# qemuboot options
+QB_KERNEL_CMDLINE_APPEND = "earlycon=sbi"
+QB_MACHINE = "-machine virt"
+QB_DEFAULT_BIOS = "fw_jump.elf"
+QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no"
+QB_NETWORK_DEVICE = "-device virtio-net-device,netdev=net0,mac=@MAC@"
+QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0"
+QB_SERIAL_OPT = "-device virtio-serial-pci -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
+QB_TCPSERIAL_OPT = " -device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon"
+# Add the 'virtio-rng-pci' device otherwise the guest may run out of entropy
+QB_OPT_APPEND = " -object rng-random,filename=/dev/urandom,id=rng0 -device virtio-rng-device,rng=rng0"
diff --git a/meta/conf/machine/include/loongarch/tune-loongarch.inc b/meta/conf/machine/include/loongarch/tune-loongarch.inc
new file mode 100644
index 0000000000..f02ddda474
--- /dev/null
+++ b/meta/conf/machine/include/loongarch/tune-loongarch.inc
@@ -0,0 +1,13 @@
+require conf/machine/include/loongarch/arch-loongarch.inc
+
+TUNEVALID[loongarch64] = "Enable 64-bit LoongArch optimizations"
+
+TUNEVALID[littleendian] = "Little endian mode"
+
+AVAILTUNES += "loongarch64"
+
+# Default
+TUNE_FEATURES:tune-loongarch64 = "loongarch64"
+TUNE_ARCH:tune-loongarch64 = "loongarch64"
+TUNE_PKGARCH:tune-loongarch64 = "loongarch64"
+PACKAGE_EXTRA_ARCHS:tune-loongarch64 = "loongarch64"
diff --git a/meta/conf/machine/include/microblaze/feature-microblaze-versions.inc b/meta/conf/machine/include/microblaze/feature-microblaze-versions.inc
index 5c37f49abb..658e87b8cd 100644
--- a/meta/conf/machine/include/microblaze/feature-microblaze-versions.inc
+++ b/meta/conf/machine/include/microblaze/feature-microblaze-versions.inc
@@ -16,7 +16,7 @@ def microblaze_current_version(d, gcc = False):
# find the current version, and convert it to major/minor integers
version = None
for t in (d.getVar("TUNE_FEATURES") or "").split():
- m = re.search("^v(\d+)\.(\d+)", t)
+ m = re.search(r"^v(\d+)\.(\d+)", t)
if m:
version = int(m.group(1)), int(m.group(2))
break
diff --git a/meta/conf/machine/include/mips/arch-mips.inc b/meta/conf/machine/include/mips/arch-mips.inc
index e48ddd2d6b..e39cdcab5d 100644
--- a/meta/conf/machine/include/mips/arch-mips.inc
+++ b/meta/conf/machine/include/mips/arch-mips.inc
@@ -26,6 +26,8 @@ MACHINE_FEATURES_BACKFILL_CONSIDERED:append = " ${@bb.utils.contains('TUNE_FEATU
TUNEVALID[n64] = "MIPS64 n64 ABI"
TUNECONFLICTS[n64] = "o32 n32"
TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'n64', ' -mabi=64', '', d)}"
+LD64ARG = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', '-m elf64btsmip', '-m elf64ltsmip', d)}"
+TUNE_LDARGS += "${@bb.utils.contains('TUNE_FEATURES', 'n64', '${LD64ARG}', '', d)}"
# Floating point
TUNEVALID[fpu-hard] = "Use hardware FPU"
diff --git a/meta/conf/machine/include/mips/tune-mips64r2.inc b/meta/conf/machine/include/mips/tune-mips64r2.inc
index c644f40918..e9ca4201ff 100644
--- a/meta/conf/machine/include/mips/tune-mips64r2.inc
+++ b/meta/conf/machine/include/mips/tune-mips64r2.inc
@@ -12,11 +12,13 @@ TUNE_FEATURES:tune-mips64r2 = "${TUNE_FEATURES:tune-mips64} mips64r2"
BASE_LIB:tune-mips64r2 = "lib64"
MIPSPKGSFX_VARIANT:tune-mips64r2 = "mips64r2"
PACKAGE_EXTRA_ARCHS:tune-mips64r2 = "mips64 mips64r2"
+QEMU_EXTRAOPTIONS_mips64r2 = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el = "${TUNE_FEATURES:tune-mips64el} mips64r2"
BASE_LIB:tune-mips64r2el = "lib64"
MIPSPKGSFX_VARIANT:tune-mips64r2el = "mips64r2el"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el = "mips64el mips64r2el"
+QEMU_EXTRAOPTIONS_mips64r2el = " -cpu MIPS64R2-generic"
# MIPS 64r2 Soft Float
AVAILTUNES += "mips64r2-nf mips64r2el-nf"
@@ -25,11 +27,13 @@ TUNE_FEATURES:tune-mips64r2-nf = "${TUNE_FEATURES:tune-mips64-nf} mips64r2"
BASE_LIB:tune-mips64r2-nf = "lib64"
MIPSPKGSFX_VARIANT:tune-mips64r2-nf = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2-nf = "mips64-nf mips64r2-nf"
+QEMU_EXTRAOPTIONS_mips64r2-nf = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el-nf = "${TUNE_FEATURES:tune-mips64el-nf} mips64r2"
BASE_LIB:tune-mips64r2el-nf = "lib64"
MIPSPKGSFX_VARIANT:tune-mips64r2el-nf = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el-nf = "mips64el-nf mips64r2el-nf"
+QEMU_EXTRAOPTIONS_mips64r2el-nf = " -cpu MIPS64R2-generic"
# MIPS 64r2 n32
AVAILTUNES += "mips64r2-n32 mips64r2el-n32"
@@ -38,11 +42,13 @@ TUNE_FEATURES:tune-mips64r2-n32 = "${TUNE_FEATURES:tune-mips64-n32} mips64r2"
BASE_LIB:tune-mips64r2-n32 = "lib32"
MIPSPKGSFX_VARIANT:tune-mips64r2-n32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2-n32 = "mips64-n32 mips64r2-n32"
+QEMU_EXTRAOPTIONS_mips64r2-n32 = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el-n32 = "${TUNE_FEATURES:tune-mips64el-n32} mips64r2"
BASE_LIB:tune-mips64r2el-n32 = "lib32"
MIPSPKGSFX_VARIANT:tune-mips64r2el-n32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el-n32 = "mips64el-n32 mips64r2el-n32"
+QEMU_EXTRAOPTIONS_mips64r2el-n32 = " -cpu MIPS64R2-generic"
# MIPS 64r2 n32 and Soft Float
AVAILTUNES += "mips64r2-nf-n32 mips64r2el-nf-n32"
@@ -51,11 +57,13 @@ TUNE_FEATURES:tune-mips64r2-nf-n32 = "${TUNE_FEATURES:tune-mips64-nf-n32} mips64
BASE_LIB:tune-mips64r2-nf-n32 = "lib32"
MIPSPKGSFX_VARIANT:tune-mips64r2-nf-n32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2-nf-n32 = "mips64-nf-n32 mips64r2-nf-n32"
+QEMU_EXTRAOPTIONS_mips64r2-nf-n32 = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el-nf-n32 = "${TUNE_FEATURES:tune-mips64el-nf-n32} mips64r2"
BASE_LIB:tune-mips64r2el-nf-n32 = "lib32"
MIPSPKGSFX_VARIANT:tune-mips64r2el-nf-n32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el-nf-n32 = "mips64el-nf-n32 mips64r2el-nf-n32"
+QEMU_EXTRAOPTIONS_mips64r2el-nf-32 = " -cpu MIPS64R2-generic"
# MIPS 64r2 o32
AVAILTUNES += "mips64r2-o32 mips64r2el-o32"
@@ -64,11 +72,13 @@ TUNE_FEATURES:tune-mips64r2-o32 = "${TUNE_FEATURES:tune-mips64-o32} mips64r2"
BASE_LIB:tune-mips64r2-o32 = "lib"
MIPSPKGSFX_VARIANT:tune-mips64r2-o32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2-o32 = "mips mips64-o32 mips64r2-o32"
+QEMU_EXTRAOPTIONS_mips64r2-o32 = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el-o32 = "${TUNE_FEATURES:tune-mips64el-o32} mips64r2"
BASE_LIB:tune-mips64r2el-o32 = "lib"
MIPSPKGSFX_VARIANT:tune-mips64r2el-o32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el-o32 = "mipsel mips64el-o32 mips64r2el-o32"
+QEMU_EXTRAOPTIONS_mips64r2el-o32 = " -cpu MIPS64R2-generic"
# MIPS 64r2 o32 and Soft Float
AVAILTUNES += "mips64r2-nf-o32 mips64r2el-nf-o32"
@@ -77,8 +87,10 @@ TUNE_FEATURES:tune-mips64r2-nf-o32 = "${TUNE_FEATURES:tune-mips64-nf-o32} mips64
BASE_LIB:tune-mips64r2-nf-o32 = "lib"
MIPSPKGSFX_VARIANT:tune-mips64r2-nf-o32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2-nf-o32 = "mips-nf mips64r2-nf-o32"
+QEMU_EXTRAOPTIONS_mips64r2-nf-o32 = " -cpu MIPS64R2-generic"
TUNE_FEATURES:tune-mips64r2el-nf-o32 = "${TUNE_FEATURES:tune-mips64el-nf-o32} mips64r2"
BASE_LIB:tune-mips64r2el-nf-o32 = "lib"
MIPSPKGSFX_VARIANT:tune-mips64r2el-nf-o32 = "${TUNE_ARCH}"
PACKAGE_EXTRA_ARCHS:tune-mips64r2el-nf-o32 = "mipsel-nf mips64r2el-nf-o32"
+QEMU_EXTRAOPTIONS_mips64r2el-nf-o32 = " -cpu MIPS64R2-generic"
diff --git a/meta/conf/machine/include/powerpc/tune-power8.inc b/meta/conf/machine/include/powerpc/tune-power8.inc
new file mode 100644
index 0000000000..ee10f2428f
--- /dev/null
+++ b/meta/conf/machine/include/powerpc/tune-power8.inc
@@ -0,0 +1,31 @@
+DEFAULTTUNE ?= "ppc64p8le"
+
+require conf/machine/include/powerpc/arch-powerpc64.inc
+
+TUNEVALID[power8] = "Enable IBM Power8 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'power8', ' -mcpu=power8', '', d)}"
+
+AVAILTUNES += "ppcp8 ppc64p8 ppcp8le ppc64p8le"
+
+TUNE_FEATURES:tune-ppcp8 = "m32 fpu-hard power8 altivec bigendian"
+BASE_LIB:tune-ppcp8 = "lib"
+TUNE_PKGARCH:tune-ppcp8 = "ppcp8"
+PACKAGE_EXTRA_ARCHS:tune-ppcp8 = "${PACKAGE_EXTRA_ARCHS:tune-powerpc} ppcp8"
+
+TUNE_FEATURES:tune-ppc64p8 = "m64 fpu-hard power8 altivec bigendian"
+BASE_LIB:tune-ppc64p8 = "lib64"
+TUNE_PKGARCH:tune-ppc64p8 = "ppc64p8"
+PACKAGE_EXTRA_ARCHS:tune-ppc64p8 = "${PACKAGE_EXTRA_ARCHS:tune-powerpc64} ppc64p8"
+
+TUNE_FEATURES:tune-ppcp8le = "m32 fpu-hard power8 altivec"
+BASE_LIB:tune-ppcp8le = "lib"
+TUNE_PKGARCH:tune-ppcp8le = "ppcp8le"
+PACKAGE_EXTRA_ARCHS:tune-ppcp8le = "${PACKAGE_EXTRA_ARCHS:tune-powerpcle} ppcp8le"
+
+TUNE_FEATURES:tune-ppc64p8le = "m64 fpu-hard power8 altivec"
+BASE_LIB:tune-ppc64p8le = "lib64"
+TUNE_PKGARCH:tune-ppc64p8le = "ppc64p8le"
+PACKAGE_EXTRA_ARCHS:tune-ppc64p8le = "${PACKAGE_EXTRA_ARCHS:tune-powerpc64le} ppc64p8le"
+
+# glibc configure options to get power8 specific library
+GLIBC_EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'power8', '--with-cpu=power8', '', d)}"
diff --git a/meta/conf/machine/include/qemu.inc b/meta/conf/machine/include/qemu.inc
index 0d71bcbbad..bb7aec7675 100644
--- a/meta/conf/machine/include/qemu.inc
+++ b/meta/conf/machine/include/qemu.inc
@@ -8,6 +8,7 @@ PREFERRED_PROVIDER_virtual/libgles3 ?= "mesa"
XSERVER ?= "xserver-xorg \
${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \
xf86-video-fbdev \
+ xf86-video-modesetting \
"
MACHINE_FEATURES = "alsa bluetooth usbgadget screen vfat"
@@ -22,8 +23,6 @@ RDEPENDS:${KERNEL_PACKAGE_NAME}-base = ""
# Use a common kernel recipe for all QEMU machines
PREFERRED_PROVIDER_virtual/kernel ??= "linux-yocto"
-EXTRA_IMAGEDEPENDS += "qemu-system-native qemu-helper-native:do_addto_recipe_sysroot"
-
# Provide the nfs server kernel module for all qemu images
KERNEL_FEATURES:append:pn-linux-yocto = " features/nfsd/nfsd-enable.scc"
KERNEL_FEATURES:append:pn-linux-yocto-rt = " features/nfsd/nfsd-enable.scc"
diff --git a/meta/conf/machine/include/riscv/qemuriscv.inc b/meta/conf/machine/include/riscv/qemuriscv.inc
index 1d32b4a582..d01d988eee 100644
--- a/meta/conf/machine/include/riscv/qemuriscv.inc
+++ b/meta/conf/machine/include/riscv/qemuriscv.inc
@@ -3,14 +3,13 @@ PREFERRED_PROVIDER_virtual/bootloader ?= "u-boot"
require conf/machine/include/qemu.inc
require conf/machine/include/riscv/tune-riscv.inc
-MACHINE_FEATURES = "screen keyboard ext2 ext3 serial"
+MACHINE_FEATURES += "keyboard ext2 ext3 serial"
KERNEL_IMAGETYPE = "Image"
KERNEL_IMAGETYPES += "uImage"
KEEPUIMAGE = "no"
SERIAL_CONSOLES ?= "115200;ttyS0 115200;hvc0"
-SERIAL_CONSOLES_CHECK = "${SERIAL_CONSOLES}"
IMAGE_FSTYPES += "ext4 wic.qcow2"
@@ -26,7 +25,7 @@ UBOOT_ENTRYPOINT:riscv32 = "0x80400000"
UBOOT_ENTRYPOINT:riscv64 = "0x80200000"
# qemuboot options
-QB_SMP = "-smp 4"
+QB_SMP ?= "-smp 4"
QB_KERNEL_CMDLINE_APPEND = "earlycon=sbi"
QB_MACHINE = "-machine virt"
QB_DEFAULT_BIOS = "fw_jump.elf"
@@ -34,6 +33,7 @@ QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no"
QB_NETWORK_DEVICE = "-device virtio-net-device,netdev=net0,mac=@MAC@"
QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0"
QB_SERIAL_OPT = "-device virtio-serial-device -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
-QB_TCPSERIAL_OPT = " -device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon"
+QB_TCPSERIAL_OPT = " -device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon"
QB_GRAPHICS = "-device bochs-display"
-QB_OPT_APPEND = "-device virtio-tablet-pci -device virtio-keyboard-pci"
+QB_OPT_APPEND = "-device qemu-xhci -device usb-tablet -device usb-kbd"
+QB_OPT_APPEND:riscv32 = "-device virtio-tablet-pci -device virtio-keyboard-pci"
diff --git a/meta/conf/machine/include/riscv/tune-riscv.inc b/meta/conf/machine/include/riscv/tune-riscv.inc
index 659801496c..804712077e 100644
--- a/meta/conf/machine/include/riscv/tune-riscv.inc
+++ b/meta/conf/machine/include/riscv/tune-riscv.inc
@@ -10,7 +10,7 @@ TUNEVALID[riscv64nc] = "Enable 64-bit RISC-V optimizations without compressed in
TUNEVALID[bigendian] = "Big endian mode"
-AVAILTUNES += "riscv64 riscv32 riscv64nf riscv32nf"
+AVAILTUNES += "riscv64 riscv32 riscv64nc riscv64nf riscv32nf"
# Default
TUNE_FEATURES:tune-riscv64 = "riscv64"
diff --git a/meta/conf/machine/include/x86/qemuboot-x86.inc b/meta/conf/machine/include/x86/qemuboot-x86.inc
index 3953679366..6ae03633ae 100644
--- a/meta/conf/machine/include/x86/qemuboot-x86.inc
+++ b/meta/conf/machine/include/x86/qemuboot-x86.inc
@@ -1,11 +1,11 @@
# For runqemu
IMAGE_CLASSES += "qemuboot"
-QB_SMP = "-smp 4"
-QB_CPU:x86 = "-cpu IvyBridge -machine q35,i8042=off"
-QB_CPU_KVM:x86 = "-cpu IvyBridge -machine q35,i8042=off"
+QB_SMP ?= "-smp 4"
+QB_CPU:x86 ?= "-cpu IvyBridge -machine q35,i8042=off"
+QB_CPU_KVM:x86 ?= "-cpu IvyBridge -machine q35,i8042=off"
-QB_CPU:x86-64 = "-cpu IvyBridge -machine q35,i8042=off"
-QB_CPU_KVM:x86-64 = "-cpu IvyBridge -machine q35,i8042=off"
+QB_CPU:x86-64 ?= "-cpu IvyBridge -machine q35,i8042=off"
+QB_CPU_KVM:x86-64 ?= "-cpu IvyBridge -machine q35,i8042=off"
QB_AUDIO_DRV = "alsa"
QB_AUDIO_OPT = "-device AC97"
diff --git a/meta/conf/machine/include/x86/tune-core2.inc b/meta/conf/machine/include/x86/tune-core2.inc
index 97b7c1b188..082fd4efc3 100644
--- a/meta/conf/machine/include/x86/tune-core2.inc
+++ b/meta/conf/machine/include/x86/tune-core2.inc
@@ -21,18 +21,18 @@ TUNE_FEATURES:tune-core2-32 = "${TUNE_FEATURES:tune-x86} core2"
BASE_LIB:tune-core2-32 = "lib"
TUNE_PKGARCH:tune-core2-32 = "core2-32"
PACKAGE_EXTRA_ARCHS:tune-core2-32 = "${PACKAGE_EXTRA_ARCHS:tune-i686} core2-32"
-QEMU_EXTRAOPTIONS_core2-32 = " -cpu n270"
+QEMU_EXTRAOPTIONS_core2-32 = " -cpu Nehalem,check=false"
AVAILTUNES += "core2-64"
TUNE_FEATURES:tune-core2-64 = "${TUNE_FEATURES:tune-x86-64} core2"
BASE_LIB:tune-core2-64 = "lib64"
TUNE_PKGARCH:tune-core2-64 = "core2-64"
PACKAGE_EXTRA_ARCHS:tune-core2-64 = "${PACKAGE_EXTRA_ARCHS:tune-x86-64} core2-64"
-QEMU_EXTRAOPTIONS_core2-64 = " -cpu core2duo"
+QEMU_EXTRAOPTIONS_core2-64 = " -cpu Nehalem,check=false"
AVAILTUNES += "core2-64-x32"
TUNE_FEATURES:tune-core2-64-x32 = "${TUNE_FEATURES:tune-x86-64-x32} core2"
BASE_LIB:tune-core2-64-x32 = "libx32"
TUNE_PKGARCH:tune-core2-64-x32 = "core2-64-x32"
PACKAGE_EXTRA_ARCHS:tune-core2-64-x32 = "${PACKAGE_EXTRA_ARCHS:tune-x86-64-x32} core2-64-x32"
-QEMU_EXTRAOPTIONS_core2-64-x32 = " -cpu core2duo"
+QEMU_EXTRAOPTIONS_core2-64-x32 = " -cpu Nehalem,check=false"
diff --git a/meta/conf/machine/include/x86/tune-x86-64-v3.inc b/meta/conf/machine/include/x86/tune-x86-64-v3.inc
new file mode 100644
index 0000000000..254f03c590
--- /dev/null
+++ b/meta/conf/machine/include/x86/tune-x86-64-v3.inc
@@ -0,0 +1,31 @@
+# Settings for the GCC(1) cpu-type "x86-64-v3":
+#
+# CPUs with AVX, AVX2, BMI1, BMI2, F16C, FMA, LZCNT, MOVBE, XSAVE.
+# (but not AVX512).
+# See https://www.phoronix.com/news/GCC-11-x86-64-Feature-Levels for details.
+#
+# This tune is recommended for Intel Haswell/AMD Excavator CPUs (and later).
+#
+DEFAULTTUNE ?= "x86-64-v3"
+
+# Include the previous tune to pull in PACKAGE_EXTRA_ARCHS
+require conf/machine/include/x86/tune-corei7.inc
+
+# Extra tune features
+TUNEVALID[x86-64-v3] = "Enable x86-64-v3 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'x86-64-v3', ' -march=x86-64-v3', '', d)}"
+
+# Extra tune selections
+AVAILTUNES += "x86-64-v3"
+TUNE_FEATURES:tune-x86-64-v3 = "${TUNE_FEATURES:tune-x86-64} x86-64-v3"
+BASE_LIB:tune-x86-64-v3 = "lib64"
+TUNE_PKGARCH:tune-x86-64-v3 = "x86-64-v3"
+PACKAGE_EXTRA_ARCHS:tune-x86-64-v3 = "${PACKAGE_EXTRA_ARCHS:tune-corei7-64} x86-64-v3"
+QEMU_EXTRAOPTIONS_x86-64-v3 = " -cpu Skylake-Client,check=false"
+
+AVAILTUNES += "x86-64-v3-x32"
+TUNE_FEATURES:tune-x86-64-v3-x32 = "${TUNE_FEATURES:tune-x86-64-x32} x86-64-v3"
+BASE_LIB:tune-x86-64-v3-x32 = "libx32"
+TUNE_PKGARCH:tune-x86-64-v3-x32 = "x86-64-v3-x32"
+PACKAGE_EXTRA_ARCHS:tune-x86-64-v3-x32 = "${PACKAGE_EXTRA_ARCHS:tune-corei7-64-x32} x86-64-v3-x32"
+QEMU_EXTRAOPTIONS_x86-64-v3-x32 = " -cpu Skylake-Client,check=false"
diff --git a/meta/conf/machine/include/x86/x86-base.inc b/meta/conf/machine/include/x86/x86-base.inc
index b70924f7d9..fc6c39148d 100644
--- a/meta/conf/machine/include/x86/x86-base.inc
+++ b/meta/conf/machine/include/x86/x86-base.inc
@@ -18,7 +18,6 @@ SERIAL_CONSOLES ?= "115200;ttyS0"
# kernel-related variables
#
PREFERRED_PROVIDER_virtual/kernel ??= "linux-yocto"
-PREFERRED_VERSION_linux-yocto ??= "5.15%"
#
# XSERVER subcomponents, used to build the XSERVER variable
diff --git a/meta/conf/machine/qemuarm.conf b/meta/conf/machine/qemuarm.conf
index 1bd4e3e154..943ce7c16a 100644
--- a/meta/conf/machine/qemuarm.conf
+++ b/meta/conf/machine/qemuarm.conf
@@ -10,15 +10,12 @@ KERNEL_IMAGETYPE = "zImage"
UBOOT_MACHINE ?= "qemu_arm_defconfig"
SERIAL_CONSOLES ?= "115200;ttyAMA0 115200;hvc0"
-SERIAL_CONSOLES_CHECK = "${SERIAL_CONSOLES}"
# For runqemu
QB_SYSTEM_NAME = "qemu-system-arm"
QB_MACHINE = "-machine virt,highmem=off"
QB_CPU = "-cpu cortex-a15"
-QB_SMP = "-smp 4"
-# Standard Serial console
-QB_KERNEL_CMDLINE_APPEND = "vmalloc=256"
+QB_SMP ?= "-smp 4"
# For graphics to work we need to define the VGA device as well as the necessary USB devices
QB_GRAPHICS = "-device virtio-gpu-pci"
QB_OPT_APPEND = "-device qemu-xhci -device usb-tablet -device usb-kbd"
@@ -29,6 +26,6 @@ QB_NETWORK_DEVICE = "-device virtio-net-device,netdev=net0,mac=@MAC@"
QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0"
# Virtio serial console
QB_SERIAL_OPT = "-device virtio-serial-device -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
-QB_TCPSERIAL_OPT = "-device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon"
+QB_TCPSERIAL_OPT = "-device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon"
KMACHINE:qemuarm = "qemuarma15"
diff --git a/meta/conf/machine/qemuarm64.conf b/meta/conf/machine/qemuarm64.conf
index ffc51c803f..a096d964db 100644
--- a/meta/conf/machine/qemuarm64.conf
+++ b/meta/conf/machine/qemuarm64.conf
@@ -10,13 +10,12 @@ KERNEL_IMAGETYPE = "Image"
UBOOT_MACHINE ?= "qemu_arm64_defconfig"
SERIAL_CONSOLES ?= "115200;ttyAMA0 115200;hvc0"
-SERIAL_CONSOLES_CHECK = "${SERIAL_CONSOLES}"
# For runqemu
QB_SYSTEM_NAME = "qemu-system-aarch64"
QB_MACHINE = "-machine virt"
QB_CPU = "-cpu cortex-a57"
-QB_SMP = "-smp 4"
+QB_SMP ?= "-smp 4"
QB_CPU_KVM = "-cpu host -machine gic-version=3"
# For graphics to work we need to define the VGA device as well as the necessary USB devices
QB_GRAPHICS = "-device virtio-gpu-pci"
@@ -28,4 +27,4 @@ QB_NETWORK_DEVICE = "-device virtio-net-pci,netdev=net0,mac=@MAC@"
QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-pci,drive=disk0"
# Virtio serial console
QB_SERIAL_OPT = "-device virtio-serial-pci -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
-QB_TCPSERIAL_OPT = "-device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon"
+QB_TCPSERIAL_OPT = "-device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon"
diff --git a/meta/conf/machine/qemuarmv5.conf b/meta/conf/machine/qemuarmv5.conf
index abdae5f361..ecd2873d91 100644
--- a/meta/conf/machine/qemuarmv5.conf
+++ b/meta/conf/machine/qemuarmv5.conf
@@ -12,10 +12,8 @@ SERIAL_CONSOLES ?= "115200;ttyAMA0 115200;ttyAMA1"
# For runqemu
QB_SYSTEM_NAME = "qemu-system-arm"
QB_MACHINE = "-machine versatilepb"
-QB_KERNEL_CMDLINE_APPEND = "vmalloc=256"
QB_GRAPHICS = "-device virtio-gpu-pci"
QB_OPT_APPEND = "-device qemu-xhci -device usb-tablet -device usb-kbd"
-QB_DTB = "${@oe.utils.version_less_or_equal('PREFERRED_VERSION_linux-yocto', '4.7', '', 'zImage-versatile-pb.dtb', d)}"
+QB_DTB ?= "zImage-versatile-pb.dtb"
-PREFERRED_VERSION_linux-yocto ??= "5.15%"
KMACHINE:qemuarmv5 = "arm-versatile-926ejs"
diff --git a/meta/conf/machine/qemuloongarch64.conf b/meta/conf/machine/qemuloongarch64.conf
new file mode 100644
index 0000000000..675d525afd
--- /dev/null
+++ b/meta/conf/machine/qemuloongarch64.conf
@@ -0,0 +1,11 @@
+#@TYPE: Machine
+#@NAME: generic loongarch64 machine
+#@DESCRIPTION: Machine configuration for running a generic loongarch64
+
+require conf/machine/include/loongarch/qemuloongarch.inc
+
+XVISOR_PLAT = "loongarch/virt64"
+
+EXTRA_IMAGEDEPENDS += "u-boot"
+UBOOT_MACHINE = "qemu-loongarch64_smode_defconfig"
+UBOOT_ELF = "u-boot"
diff --git a/meta/conf/machine/qemuppc64.conf b/meta/conf/machine/qemuppc64.conf
index 304f06a30d..2fbd26a6f9 100644
--- a/meta/conf/machine/qemuppc64.conf
+++ b/meta/conf/machine/qemuppc64.conf
@@ -13,8 +13,9 @@ SERIAL_CONSOLES ?= "115200;hvc0"
QB_SYSTEM_NAME = "qemu-system-ppc64"
QB_MACHINE = "-machine pseries"
QB_CPU = "-cpu POWER9"
-QB_SMP = "-smp 2"
+QB_SMP ?= "-smp 2"
+QB_NFSROOTFS_EXTRA_OPT = "wsize=524288,rsize=524288"
QB_KERNEL_CMDLINE_APPEND = "console=hvc0 nohugevmalloc"
#QB_OPT_APPEND += "-device qemu-xhci -device usb-tablet -device usb-kbd"
QB_OPT_APPEND = "-usb -device usb-tablet"
diff --git a/meta/conf/machine/qemux86-64.conf b/meta/conf/machine/qemux86-64.conf
index 901353499c..14873a3b4f 100644
--- a/meta/conf/machine/qemux86-64.conf
+++ b/meta/conf/machine/qemux86-64.conf
@@ -10,7 +10,7 @@ PREFERRED_PROVIDER_virtual/libgles3 ?= "mesa"
require conf/machine/include/qemu.inc
DEFAULTTUNE ?= "core2-64"
-require conf/machine/include/x86/tune-core2.inc
+require conf/machine/include/x86/tune-x86-64-v3.inc
require conf/machine/include/x86/qemuboot-x86.inc
UBOOT_MACHINE ?= "qemu-x86_64_defconfig"
diff --git a/meta/conf/multilib.conf b/meta/conf/multilib.conf
index 7f3b9463ef..ef3605a73d 100644
--- a/meta/conf/multilib.conf
+++ b/meta/conf/multilib.conf
@@ -2,6 +2,7 @@
baselib = "${@d.getVar('BASE_LIB:tune-' + (d.getVar('DEFAULTTUNE') or 'INVALID')) or d.getVar('BASELIB')}"
MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}"
+MULTILIB_VARIANTS[vardeps] += "MULTILIBS"
MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR"
MULTILIBS ??= "multilib:lib32"
diff --git a/meta/conf/sanity.conf b/meta/conf/sanity.conf
index e6cb2b45fe..d2f56a3fb0 100644
--- a/meta/conf/sanity.conf
+++ b/meta/conf/sanity.conf
@@ -3,7 +3,7 @@
# See sanity.bbclass
#
# Expert users can confirm their sanity with "touch conf/sanity.conf"
-BB_MIN_VERSION = "1.53.1"
+BB_MIN_VERSION = "2.7.3"
SANITY_ABIFILE = "${TMPDIR}/abi_version"
diff --git a/meta/conf/templates/default/conf-summary.txt b/meta/conf/templates/default/conf-summary.txt
new file mode 100644
index 0000000000..e49172fa0c
--- /dev/null
+++ b/meta/conf/templates/default/conf-summary.txt
@@ -0,0 +1 @@
+This is the default build configuration for the openembedded-core layer.
diff --git a/meta/conf/testexport.conf b/meta/conf/testexport.conf
new file mode 100644
index 0000000000..8880f108fb
--- /dev/null
+++ b/meta/conf/testexport.conf
@@ -0,0 +1,3 @@
+TEST_EXPORT_SDK_PACKAGES ?= ""
+TEST_EXPORT_SDK_DIR ?= "sdk"
+TEST_EXPORT_SDK_NAME ?= "testexport-tools-nativesdk"
diff --git a/meta/files/common-licenses/LGPL-3.0-with-zeromq-exception b/meta/files/common-licenses/LGPL-3.0-with-zeromq-exception
new file mode 100644
index 0000000000..02e943c4ac
--- /dev/null
+++ b/meta/files/common-licenses/LGPL-3.0-with-zeromq-exception
@@ -0,0 +1,181 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
+
+--------------------------------------------------------------------------------
+
+ SPECIAL EXCEPTION GRANTED BY COPYRIGHT HOLDERS
+
+As a special exception, copyright holders give you permission to link this
+library with independent modules to produce an executable, regardless of
+the license terms of these independent modules, and to copy and distribute
+the resulting executable under terms of your choice, provided that you also
+meet, for each linked independent module, the terms and conditions of
+the license of that module. An independent module is a module which is not
+derived from or based on this library. If you modify this library, you must
+extend this exception to your version of the library.
+
+Note: this exception relieves you of any obligations under sections 4 and 5
+of this license, and section 6 of the GNU General Public License.
diff --git a/meta/files/ext-sdk-prepare.py b/meta/files/ext-sdk-prepare.py
index d191e5e19c..89b0403089 100644
--- a/meta/files/ext-sdk-prepare.py
+++ b/meta/files/ext-sdk-prepare.py
@@ -71,7 +71,7 @@ def main():
ret = run_command_interruptible('BB_SETSCENE_ENFORCE=1 bitbake --quiet %s' % ' '.join(sdk_targets))
if not ret:
- ret = run_command_interruptible('bitbake --quiet build-sysroots')
+ ret = run_command_interruptible('bitbake --quiet build-sysroots -c build_native_sysroot && bitbake --quiet build-sysroots -c build_target_sysroot')
lastlog = get_last_consolelog()
if lastlog:
with open(lastlog, 'r') as f:
diff --git a/meta/files/fs-perms-persistent-log.txt b/meta/files/fs-perms-persistent-log.txt
index 518c1be3c9..61f0a6e26a 100644
--- a/meta/files/fs-perms-persistent-log.txt
+++ b/meta/files/fs-perms-persistent-log.txt
@@ -38,7 +38,6 @@ ${datadir}/locale 0755 root root true 0644 root root
# Cleanup headers
${includedir} 0755 root root true 0644 root root
-${oldincludedir} 0755 root root true 0644 root root
# Cleanup debug src
/usr/src/debug 0755 root root true 0644 root root
diff --git a/meta/files/fs-perms.txt b/meta/files/fs-perms.txt
index daa4aed840..48191f504c 100644
--- a/meta/files/fs-perms.txt
+++ b/meta/files/fs-perms.txt
@@ -38,7 +38,6 @@ ${datadir}/locale 0755 root root true 0644 root root
# Cleanup headers
${includedir} 0755 root root true 0644 root root
-${oldincludedir} 0755 root root true 0644 root root
# Cleanup debug src
/usr/src/debug 0755 root root true 0644 root root
diff --git a/meta/files/layers.example.json b/meta/files/layers.example.json
new file mode 100644
index 0000000000..0a6a6a7b48
--- /dev/null
+++ b/meta/files/layers.example.json
@@ -0,0 +1,48 @@
+{
+ "sources": {
+ "meta-alex": {
+ "contains_this_file": true,
+ "git-remote": {
+ "branch": "master",
+ "describe": "",
+ "remotes": {
+ "remote-alex": {
+ "uri": "https://github.com/kanavin/meta-alex"
+ }
+ },
+ "rev": "05b25605fb8b2399e4706d7323828676bf0da0b5"
+ },
+ "path": "meta-alex"
+ },
+ "meta-intel": {
+ "git-remote": {
+ "branch": "master",
+ "describe": "15.0-hardknott-3.3-310-g0a96edae",
+ "remotes": {
+ "origin": {
+ "uri": "git://git.yoctoproject.org/meta-intel"
+ }
+ },
+ "rev": "0a96edae609a3f48befac36af82cf1eed6786b4a"
+ },
+ "path": "meta-intel"
+ },
+ "poky": {
+ "git-remote": {
+ "branch": "akanavin/setup-layers",
+ "describe": "4.1_M1-374-g9dda719b2a",
+ "remotes": {
+ "origin": {
+ "uri": "git://git.yoctoproject.org/poky"
+ },
+ "poky-contrib": {
+ "uri": "ssh://git@push.yoctoproject.org/poky-contrib"
+ }
+ },
+ "rev": "9dda719b2a4727a4d43a6ab8d9e23f8ca68790ec"
+ },
+ "path": "poky"
+ }
+ },
+ "version": "1.0"
+}
diff --git a/meta/files/layers.schema.json b/meta/files/layers.schema.json
new file mode 100644
index 0000000000..659ee8da49
--- /dev/null
+++ b/meta/files/layers.schema.json
@@ -0,0 +1,76 @@
+{
+ "description": "OpenEmbedder Layer Setup Manifest",
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "version"
+ ],
+ "properties": {
+ "version": {
+ "description": "The version of this document; currently '1.0'",
+ "enum": ["1.0"]
+ },
+ "sources": {
+ "description": "The dict of layer sources",
+ "type": "object",
+ "patternProperties": { ".*" : {
+ "type": "object",
+ "description": "The upstream source from which a set of layers may be fetched",
+ "additionalProperties": false,
+ "required": [
+ "path"
+ ],
+ "properties": {
+ "path": {
+ "description": "The path where this layer source will be placed when fetching",
+ "type": "string"
+ },
+ "contains_this_file": {
+ "description": "Whether the directory with the layer source also contains this json description. Tools may want to skip the checkout of the source then.",
+ "type": "boolean"
+ },
+ "git-remote": {
+ "description": "A remote git source from which to fetch",
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "rev"
+ ],
+ "properties": {
+ "branch": {
+ "description": "The git branch to fetch (optional)",
+ "type": "string"
+ },
+ "rev": {
+ "description": "The git revision to checkout",
+ "type": "string"
+ },
+ "describe": {
+ "description": "The output of 'git describe' (human readable description of the revision using tags in revision history).",
+ "type": "string"
+ },
+ "remotes": {
+ "description": "The dict of git remotes to add to this repository",
+ "type": "object",
+ "patternProperties": { ".*" : {
+ "description": "A git remote",
+ "type": "object",
+ "addtionalProperties": false,
+ "required": [
+ "uri"
+ ],
+ "properties": {
+ "uri": {
+ "description": "The URI for the remote",
+ "type": "string"
+ }
+ }
+ }}
+ }
+ }
+ }
+ }
+ }
+ }}
+ }
+}
diff --git a/meta/files/overlayfs-create-dirs.service.in b/meta/files/overlayfs-create-dirs.service.in
index 61b2b9321b..c949a6dc73 100644
--- a/meta/files/overlayfs-create-dirs.service.in
+++ b/meta/files/overlayfs-create-dirs.service.in
@@ -6,8 +6,7 @@ DefaultDependencies=no
[Service]
Type=oneshot
-ExecStart=mkdir -p {DATA_MOUNT_POINT}/upper{LOWERDIR}
-ExecStart=mkdir -p {DATA_MOUNT_POINT}/workdir{LOWERDIR}
+ExecStart=/usr/sbin/overlayfs-create-dirs.sh {LOWERDIR} {DATA_MOUNT_POINT}
RemainAfterExit=true
StandardOutput=journal
diff --git a/meta/files/overlayfs-create-dirs.sh b/meta/files/overlayfs-create-dirs.sh
new file mode 100644
index 0000000000..9954c34352
--- /dev/null
+++ b/meta/files/overlayfs-create-dirs.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+# This script is intended to be used sorely by overlayfs-create-dirs.service
+# Usage: overlayfs-create-dirs.sh <LOWERDIR> <DATA_MOUNT_POINT>
+
+lowerdir=$1
+datamountpoint=$2
+mkdir -p ${datamountpoint}/upper${lowerdir}
+mkdir -p ${datamountpoint}/workdir${lowerdir}
+if [ -d "$lowerdir" ]; then
+ chown $(stat -c "%U:%G" ${lowerdir}) ${datamountpoint}/upper${lowerdir}
+fi
diff --git a/meta/files/overlayfs-etc-preinit.sh.in b/meta/files/overlayfs-etc-preinit.sh.in
index 43c9b04eb9..b05e3957a3 100644
--- a/meta/files/overlayfs-etc-preinit.sh.in
+++ b/meta/files/overlayfs-etc-preinit.sh.in
@@ -3,31 +3,46 @@
echo "PREINIT: Start"
PATH=/sbin:/bin:/usr/sbin:/usr/bin
-mount -o remount,rw /
+if {CREATE_MOUNT_DIRS}; then
+ mount -o remount,rw /
-mkdir -p /proc
-mkdir -p /sys
-mkdir -p /run
-mkdir -p /var/run
+ mkdir -p /proc
+ mkdir -p /sys
+ mkdir -p /run
+ mkdir -p /var/run
+ mkdir -p {OVERLAYFS_ETC_MOUNT_POINT}
+fi
mount -t proc proc /proc
mount -t sysfs sysfs /sys
[ -z "$CONSOLE" ] && CONSOLE="/dev/console"
-mkdir -p {OVERLAYFS_ETC_MOUNT_POINT}
+BASE_OVERLAY_ETC_DIR={OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc
+UPPER_DIR=$BASE_OVERLAY_ETC_DIR/upper
+WORK_DIR=$BASE_OVERLAY_ETC_DIR/work
+LOWER_DIR=$BASE_OVERLAY_ETC_DIR/lower
+
if mount -n -t {OVERLAYFS_ETC_FSTYPE} \
-o {OVERLAYFS_ETC_MOUNT_OPTIONS} \
{OVERLAYFS_ETC_DEVICE} {OVERLAYFS_ETC_MOUNT_POINT}
then
- mkdir -p {OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc/upper
- mkdir -p {OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc/work
+ mkdir -p $UPPER_DIR
+ mkdir -p $WORK_DIR
+
+ if {OVERLAYFS_ETC_EXPOSE_LOWER}; then
+ mkdir -p $LOWER_DIR
+
+ # provide read-only access to original /etc content
+ mount -o bind,ro /etc $LOWER_DIR
+ fi
+
mount -n -t overlay \
- -o upperdir={OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc/upper \
+ -o upperdir=$UPPER_DIR \
-o lowerdir=/etc \
- -o workdir={OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc/work \
+ -o workdir=$WORK_DIR \
-o index=off,xino=off,redirect_dir=off,metacopy=off \
- {OVERLAYFS_ETC_MOUNT_POINT}/overlay-etc/upper /etc || \
+ $UPPER_DIR /etc || \
echo "PREINIT: Mounting etc-overlay failed!"
else
echo "PREINIT: Mounting </data> failed!"
diff --git a/meta/files/screenshot-tests/core-image-sato-qemuarm.png b/meta/files/screenshot-tests/core-image-sato-qemuarm.png
new file mode 100644
index 0000000000..5156eb2f16
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemuarm.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemuarm64.png b/meta/files/screenshot-tests/core-image-sato-qemuarm64.png
new file mode 100644
index 0000000000..372964e383
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemuarm64.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemumips.png b/meta/files/screenshot-tests/core-image-sato-qemumips.png
new file mode 100644
index 0000000000..372964e383
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemumips.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemumips64.png b/meta/files/screenshot-tests/core-image-sato-qemumips64.png
new file mode 100644
index 0000000000..372964e383
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemumips64.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemuppc.png b/meta/files/screenshot-tests/core-image-sato-qemuppc.png
new file mode 100644
index 0000000000..17f7d1b578
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemuppc.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemuriscv64.png b/meta/files/screenshot-tests/core-image-sato-qemuriscv64.png
new file mode 100644
index 0000000000..4f8980f6f5
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemuriscv64.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemux86-64.png b/meta/files/screenshot-tests/core-image-sato-qemux86-64.png
new file mode 100644
index 0000000000..201c523bde
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemux86-64.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-qemux86.png b/meta/files/screenshot-tests/core-image-sato-qemux86.png
new file mode 100644
index 0000000000..201c523bde
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-qemux86.png
Binary files differ
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm.png
new file mode 120000
index 0000000000..9ba26a030c
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm.png
@@ -0,0 +1 @@
+core-image-sato-qemuarm.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm64.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm64.png
new file mode 120000
index 0000000000..67c3d41f2b
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemuarm64.png
@@ -0,0 +1 @@
+core-image-sato-qemuarm64.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemumips.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemumips.png
new file mode 120000
index 0000000000..167227c235
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemumips.png
@@ -0,0 +1 @@
+core-image-sato-qemumips.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemumips64.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemumips64.png
new file mode 120000
index 0000000000..70ba35c255
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemumips64.png
@@ -0,0 +1 @@
+core-image-sato-qemumips64.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemuppc.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemuppc.png
new file mode 120000
index 0000000000..bdc87d3b1d
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemuppc.png
@@ -0,0 +1 @@
+core-image-sato-qemuppc.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemuriscv64.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemuriscv64.png
new file mode 120000
index 0000000000..87b4e66023
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemuriscv64.png
@@ -0,0 +1 @@
+core-image-sato-qemuriscv64.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemux86-64.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemux86-64.png
new file mode 120000
index 0000000000..4ae853c070
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemux86-64.png
@@ -0,0 +1 @@
+core-image-sato-qemux86-64.png \ No newline at end of file
diff --git a/meta/files/screenshot-tests/core-image-sato-sdk-qemux86.png b/meta/files/screenshot-tests/core-image-sato-sdk-qemux86.png
new file mode 120000
index 0000000000..90d707fab5
--- /dev/null
+++ b/meta/files/screenshot-tests/core-image-sato-sdk-qemux86.png
@@ -0,0 +1 @@
+core-image-sato-qemux86.png \ No newline at end of file
diff --git a/meta/files/toolchain-shar-relocate.sh b/meta/files/toolchain-shar-relocate.sh
index cee9adbf39..b017714df0 100644
--- a/meta/files/toolchain-shar-relocate.sh
+++ b/meta/files/toolchain-shar-relocate.sh
@@ -1,11 +1,14 @@
-if ! xargs --version > /dev/null 2>&1; then
- echo "xargs is required by the relocation script, please install it first. Abort!"
- exit 1
-fi
+for cmd in xargs file; do
+ if ! command -v $cmd > /dev/null 2>&1; then
+ echo "The command '$cmd' is required by the relocation script, please install it first. Abort!"
+ exit 1
+ fi
+done
# fix dynamic loader paths in all ELF SDK binaries
+# allow symlinks to be accessed via the find command too
native_sysroot=$($SUDO_EXEC cat $env_setup_script |grep 'OECORE_NATIVE_SYSROOT='|cut -d'=' -f2|tr -d '"')
-dl_path=$($SUDO_EXEC find $native_sysroot/lib -maxdepth 1 -name "ld-linux*")
+dl_path=$($SUDO_EXEC find $native_sysroot/lib/ -maxdepth 1 -name "ld-linux*")
if [ "$dl_path" = "" ] ; then
echo "SDK could not be set up. Relocate script unable to find ld-linux.so. Abort!"
exit 1
diff --git a/meta/lib/bblayers/buildconf.py b/meta/lib/bblayers/buildconf.py
new file mode 100644
index 0000000000..722cf0723c
--- /dev/null
+++ b/meta/lib/bblayers/buildconf.py
@@ -0,0 +1,84 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import os
+import sys
+
+from bblayers.common import LayerPlugin
+
+logger = logging.getLogger('bitbake-layers')
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+
+import oe.buildcfg
+
+def plugin_init(plugins):
+ return BuildConfPlugin()
+
+class BuildConfPlugin(LayerPlugin):
+ notes_fixme = """FIXME: Please place here the detailed instructions for using this build configuration.
+They will be shown to the users when they set up their builds via TEMPLATECONF.
+"""
+ summary_fixme = """FIXME: Please place here the short summary of what this build configuration is for.
+It will be shown to the users when they set up their builds via TEMPLATECONF.
+"""
+
+ def _save_conf(self, templatename, templatepath, oecorepath, relpaths_to_oecore):
+ confdir = os.path.join(os.environ["BBPATH"], "conf")
+ destdir = os.path.join(templatepath, "conf", "templates", templatename)
+ os.makedirs(destdir, exist_ok=True)
+
+ with open(os.path.join(confdir, "local.conf")) as src:
+ with open(os.path.join(destdir, "local.conf.sample"), 'w') as dest:
+ dest.write(src.read())
+
+ with open(os.path.join(confdir, "bblayers.conf")) as src:
+ with open(os.path.join(destdir, "bblayers.conf.sample"), 'w') as dest:
+ bblayers_data = src.read()
+
+ for (abspath, relpath) in relpaths_to_oecore:
+ bblayers_data = bblayers_data.replace(abspath, "##OEROOT##/" + relpath)
+ dest.write(bblayers_data)
+
+ with open(os.path.join(destdir, "conf-summary.txt"), 'w') as dest:
+ dest.write(self.summary_fixme)
+ with open(os.path.join(destdir, "conf-notes.txt"), 'w') as dest:
+ dest.write(self.notes_fixme)
+
+ logger.info("""Configuration template placed into {}
+Please review the files in there, and particularly provide a configuration summary in {}
+and notes in {}
+You can try out the configuration with
+TEMPLATECONF={} . {}/oe-init-build-env build-try-{}"""
+.format(destdir, os.path.join(destdir, "conf-summary.txt"), os.path.join(destdir, "conf-notes.txt"), destdir, oecorepath, templatename))
+
+ def do_save_build_conf(self, args):
+ """ Save the currently active build configuration (conf/local.conf, conf/bblayers.conf) as a template into a layer.\n This template can later be used for setting up builds via TEMPLATECONF. """
+ layers = oe.buildcfg.get_layer_revisions(self.tinfoil.config_data)
+ targetlayer = None
+ oecore = None
+
+ for l in layers:
+ if os.path.abspath(l[0]) == os.path.abspath(args.layerpath):
+ targetlayer = l[0]
+ if l[1] == 'meta':
+ oecore = os.path.dirname(l[0])
+
+ if not targetlayer:
+ logger.error("Layer {} not in one of the currently enabled layers:\n{}".format(args.layerpath, "\n".join([l[0] for l in layers])))
+ elif not oecore:
+ logger.error("Openembedded-core not in one of the currently enabled layers:\n{}".format("\n".join([l[0] for l in layers])))
+ else:
+ relpaths_to_oecore = [(l[0], os.path.relpath(l[0], start=oecore)) for l in layers]
+ self._save_conf(args.templatename, targetlayer, oecore, relpaths_to_oecore)
+
+ def register_commands(self, sp):
+ parser_build_conf = self.add_command(sp, 'save-build-conf', self.do_save_build_conf, parserecipes=False)
+ parser_build_conf.add_argument('layerpath',
+ help='The path to the layer where the configuration template should be saved.')
+ parser_build_conf.add_argument('templatename',
+ help='The name of the configuration template.')
diff --git a/meta/lib/bblayers/create.py b/meta/lib/bblayers/create.py
index 7ddb777dc7..517554c587 100644
--- a/meta/lib/bblayers/create.py
+++ b/meta/lib/bblayers/create.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -10,6 +12,7 @@ import shutil
import bb.utils
from bblayers.common import LayerPlugin
+from bblayers.action import ActionPlugin
logger = logging.getLogger('bitbake-layers')
@@ -51,7 +54,7 @@ class CreatePlugin(LayerPlugin):
shutil.copy(license_src, license_dst)
# Get the compat value for core layer.
- compat = self.tinfoil.config_data.getVar('LAYERSERIES_COMPAT_core') or ""
+ compat = self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or ""
# Create the layer.conf from templates/layer.conf
layerconf_template = read_template('layer.conf').format(
@@ -67,11 +70,19 @@ class CreatePlugin(LayerPlugin):
with open(os.path.join(example, args.examplerecipe + '_%s.bb') % args.version, 'w') as fd:
fd.write(example_template)
- logger.plain('Add your new layer with \'bitbake-layers add-layer %s\'' % args.layerdir)
+ if args.add_layer:
+ # Add the layer to bblayers.conf
+ args.layerdir = [layerdir]
+ ActionPlugin.do_add_layer(self, args)
+ logger.plain('Layer added %s' % args.layerdir)
+
+ else:
+ logger.plain('Add your new layer with \'bitbake-layers add-layer %s\'' % args.layerdir)
def register_commands(self, sp):
parser_create_layer = self.add_command(sp, 'create-layer', self.do_create_layer, parserecipes=False)
parser_create_layer.add_argument('layerdir', help='Layer directory to create')
+ parser_create_layer.add_argument('--add-layer', '-a', action='store_true', help='Add the layer to bblayers.conf after creation')
parser_create_layer.add_argument('--layerid', '-i', help='Layer id to use if different from layername')
parser_create_layer.add_argument('--priority', '-p', default=6, help='Priority of recipes in layer')
parser_create_layer.add_argument('--example-recipe-name', '-e', dest='examplerecipe', default='example', help='Filename of the example recipe')
diff --git a/meta/lib/bblayers/makesetup.py b/meta/lib/bblayers/makesetup.py
new file mode 100644
index 0000000000..99d5973760
--- /dev/null
+++ b/meta/lib/bblayers/makesetup.py
@@ -0,0 +1,102 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import os
+import sys
+
+import bb.utils
+
+from bblayers.common import LayerPlugin
+
+logger = logging.getLogger('bitbake-layers')
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+
+import oe.buildcfg
+
+def plugin_init(plugins):
+ return MakeSetupPlugin()
+
+class MakeSetupPlugin(LayerPlugin):
+
+ def _get_remotes_with_url(self, repo_path):
+ remotes = {}
+ for r in oe.buildcfg.get_metadata_git_remotes(repo_path):
+ remotes[r] = {'uri':oe.buildcfg.get_metadata_git_remote_url(repo_path, r)}
+ return remotes
+
+ def _is_submodule(self, repo_path):
+ # This is slightly brittle: git does not offer a way to tell whether
+ # a given repo dir is a submodule checkout, so we need to rely on .git
+ # being a file (rather than a dir like it is in standalone checkouts).
+ # The file typically contains a gitdir pointer to elsewhere.
+ return os.path.isfile(os.path.join(repo_path,".git"))
+
+ def make_repo_config(self, destdir):
+ """ This is a helper function for the writer plugins that discovers currently configured layers.
+ The writers do not have to use it, but it can save a bit of work and avoid duplicated code, hence it is
+ available here. """
+ repos = {}
+ layers = oe.buildcfg.get_layer_revisions(self.tinfoil.config_data)
+ destdir_repo = oe.buildcfg.get_metadata_git_toplevel(destdir)
+
+ for (l_path, l_name, l_branch, l_rev, l_ismodified) in layers:
+ if l_name == 'workspace':
+ continue
+ if l_ismodified:
+ logger.error("Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path))
+ return
+ repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path)
+
+ if self._is_submodule(repo_path):
+ continue
+ if repo_path not in repos.keys():
+ repos[repo_path] = {'path':os.path.basename(repo_path),'git-remote':{
+ 'rev':l_rev,
+ 'branch':l_branch,
+ 'remotes':self._get_remotes_with_url(repo_path),
+ 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}}
+ if repo_path == destdir_repo:
+ repos[repo_path]['contains_this_file'] = True
+ if not repos[repo_path]['git-remote']['remotes'] and not repos[repo_path]['contains_this_file']:
+ logger.error("Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=repo_path))
+ return
+
+ top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()])
+
+ repos_nopaths = {}
+ for r in repos.keys():
+ r_nopath = os.path.basename(r)
+ repos_nopaths[r_nopath] = repos[r]
+ r_relpath = os.path.relpath(r, top_path)
+ repos_nopaths[r_nopath]['path'] = r_relpath
+ return repos_nopaths
+
+ def do_make_setup(self, args):
+ """ Writes out a configuration file and/or a script that replicate the directory structure and revisions of the layers in a current build. """
+ for p in self.plugins:
+ if str(p) == args.writer:
+ p.do_write(self, args)
+
+ def register_commands(self, sp):
+ parser_setup_layers = self.add_command(sp, 'create-layers-setup', self.do_make_setup, parserecipes=False)
+ parser_setup_layers.add_argument('destdir',
+ help='Directory where to write the output\n(if it is inside one of the layers, the layer becomes a bootstrap repository and thus will be excluded from fetching).')
+ parser_setup_layers.add_argument('--output-prefix', '-o',
+ help='File name prefix for the output files, if the default (setup-layers) is undesirable.')
+
+ self.plugins = []
+
+ for path in (self.tinfoil.config_data.getVar('BBPATH').split(':')):
+ pluginpath = os.path.join(path, 'lib', 'bblayers', 'setupwriters')
+ bb.utils.load_plugins(logger, self.plugins, pluginpath)
+
+ parser_setup_layers.add_argument('--writer', '-w', choices=[str(p) for p in self.plugins], help="Choose the output format (defaults to oe-setup-layers).\n\nCurrently supported options are:\noe-setup-layers - a self-contained python script and a json config for it.\n\n", default="oe-setup-layers")
+
+ for plugin in self.plugins:
+ if hasattr(plugin, 'register_arguments'):
+ plugin.register_arguments(parser_setup_layers)
diff --git a/meta/lib/bblayers/setupwriters/oe-setup-layers.py b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
new file mode 100644
index 0000000000..59ca968ff3
--- /dev/null
+++ b/meta/lib/bblayers/setupwriters/oe-setup-layers.py
@@ -0,0 +1,117 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import os
+import json
+import stat
+
+logger = logging.getLogger('bitbake-layers')
+
+def plugin_init(plugins):
+ return OeSetupLayersWriter()
+
+class OeSetupLayersWriter():
+
+ def __str__(self):
+ return "oe-setup-layers"
+
+ def _write_python(self, input, output):
+ with open(input) as f:
+ script = f.read()
+ with open(output, 'w') as f:
+ f.write(script)
+ st = os.stat(output)
+ os.chmod(output, st.st_mode | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
+
+ def _write_json(self, repos, output):
+ with open(output, 'w') as f:
+ json.dump(repos, f, sort_keys=True, indent=4)
+
+ def _read_repo_config(self, json_path):
+ with open(json_path) as f:
+ json_config = json.load(f)
+
+ supported_versions = ["1.0"]
+ if json_config["version"] not in supported_versions:
+ err = "File {} has version {}, which is not in supported versions: {}".format(json_path, json_config["version"], supported_versions)
+ logger.error(err)
+ raise Exception(err)
+
+ return json_config
+
+ def _modify_repo_config(self, json_config, args):
+ sources = json_config['sources']
+ for pair in args.custom_references:
+ try:
+ repo, rev = pair.split(':', maxsplit=1)
+ except ValueError:
+ err = "Invalid custom reference specified: '{}'. Provide one using 'REPOSITORY:REFERENCE'.".format(pair)
+ logger.error(err)
+ raise Exception(err)
+ if not repo in sources.keys():
+ err = "Repository {} does not exist in setup-layers config".format(repo)
+ logger.error(err)
+ raise Exception(err)
+
+ layer_remote = json_config['sources'][repo]['git-remote']
+ layer_remote['rev'] = rev
+ # Clear describe
+ layer_remote['describe'] = ''
+
+ def do_write(self, parent, args):
+ """ Writes out a python script and a json config that replicate the directory structure and revisions of the layers in a current build. """
+ output = args.output_prefix or "setup-layers"
+ output = os.path.join(os.path.abspath(args.destdir), output)
+
+ if args.update:
+ # Modify existing layers setup
+ if args.custom_references is None:
+ err = "No custom reference specified. Please provide one using '--use-custom-reference REPOSITORY:REFERENCE'."
+ logger.error(err)
+ raise Exception(err)
+
+ json = self._read_repo_config(output + ".json")
+ if not 'sources' in json.keys():
+ err = "File {}.json does not contain valid layer sources.".format(output)
+ logger.error(err)
+ raise Exception(err)
+
+ else:
+ # Create new layers setup
+ if not os.path.exists(args.destdir):
+ os.makedirs(args.destdir)
+ repos = parent.make_repo_config(args.destdir)
+ json = {"version":"1.0","sources":repos}
+ if not repos:
+ err = "Could not determine layer sources"
+ logger.error(err)
+ raise Exception(err)
+
+ if args.custom_references is not None:
+ self._modify_repo_config(json, args)
+
+ self._write_json(json, output + ".json")
+ logger.info('Created {}.json'.format(output))
+ if not args.json_only:
+ self._write_python(os.path.join(os.path.dirname(__file__),'../../../../scripts/oe-setup-layers'), output)
+ logger.info('Created {}'.format(output))
+
+ def register_arguments(self, parser):
+ parser.add_argument('--json-only', action='store_true',
+ help='When using the oe-setup-layers writer, write only the layer configuruation in json format. Otherwise, also a copy of scripts/oe-setup-layers (from oe-core or poky) is provided, which is a self contained python script that fetches all the needed layers and sets them to correct revisions using the data from the json.')
+
+ parser.add_argument('--update', '-u',
+ action='store_true',
+ help=("Instead of writing a new json file, update an existing layer setup json file with custom references provided via the '--use-custom-reference' option."
+ "\nThis will only update repositories for which a custom reference is specified, all other repositores will be left unchanged."))
+ parser.add_argument('--use-custom-reference', '-r',
+ action='append',
+ dest='custom_references',
+ metavar='REPOSITORY:REFERENCE',
+ help=("A pair consisting of a repository and a custom reference to use for it (by default the currently checked out commit id would be written out)."
+ "\nThis value can be any reference that 'git checkout' would accept, and is not checked for validity."
+ "\nThis option can be used multiple times."))
diff --git a/meta/lib/buildstats.py b/meta/lib/buildstats.py
index c52b6c3b72..1ffe679801 100644
--- a/meta/lib/buildstats.py
+++ b/meta/lib/buildstats.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Implements system state sampling. Called by buildstats.bbclass.
@@ -14,13 +16,27 @@ class SystemStats:
bn = d.getVar('BUILDNAME')
bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
bb.utils.mkdirhier(bsdir)
+ file_handlers = [('diskstats', self._reduce_diskstats),
+ ('meminfo', self._reduce_meminfo),
+ ('stat', self._reduce_stat)]
+
+ # Some hosts like openSUSE have readable /proc/pressure files
+ # but throw errors when these files are opened. Catch these error
+ # and ensure that the reduce_proc_pressure directory is not created.
+ if os.path.exists("/proc/pressure"):
+ try:
+ with open('/proc/pressure/cpu', 'rb') as source:
+ source.read()
+ pressuredir = os.path.join(bsdir, 'reduced_proc_pressure')
+ bb.utils.mkdirhier(pressuredir)
+ file_handlers.extend([('pressure/cpu', self._reduce_pressure),
+ ('pressure/io', self._reduce_pressure),
+ ('pressure/memory', self._reduce_pressure)])
+ except Exception:
+ pass
self.proc_files = []
- for filename, handler in (
- ('diskstats', self._reduce_diskstats),
- ('meminfo', self._reduce_meminfo),
- ('stat', self._reduce_stat),
- ):
+ for filename, handler in (file_handlers):
# The corresponding /proc files might not exist on the host.
# For example, /proc/diskstats is not available in virtualized
# environments like Linux-VServer. Silently skip collecting
@@ -37,24 +53,32 @@ class SystemStats:
# Last time that we sampled /proc data resp. recorded disk monitoring data.
self.last_proc = 0
self.last_disk_monitor = 0
- # Minimum number of seconds between recording a sample. This
- # becames relevant when we get called very often while many
- # short tasks get started. Sampling during quiet periods
+ # Minimum number of seconds between recording a sample. This becames relevant when we get
+ # called very often while many short tasks get started. Sampling during quiet periods
# depends on the heartbeat event, which fires less often.
- self.min_seconds = 1
+ # By default, the Heartbeat events occur roughly once every second but the actual time
+ # between these events deviates by a few milliseconds, in most cases. Hence
+ # pick a somewhat arbitary tolerance such that we sample a large majority
+ # of the Heartbeat events. This ignores rare events that fall outside the minimum
+ # and may lead an extra sample in a given second every so often. However, it allows for fairly
+ # consistent intervals between samples without missing many events.
+ self.tolerance = 0.01
+ self.min_seconds = 1.0 - self.tolerance
self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
self.diskstats_ltime = None
self.diskstats_data = None
self.stat_ltimes = None
+ # Last time we sampled /proc/pressure. All resources stored in a single dict with the key as filename
+ self.last_pressure = {"pressure/cpu": None, "pressure/io": None, "pressure/memory": None}
def close(self):
self.monitor_disk.close()
for _, output, _ in self.proc_files:
output.close()
- def _reduce_meminfo(self, time, data):
+ def _reduce_meminfo(self, time, data, filename):
"""
Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree'
and writes their values into a single line, in that order.
@@ -75,7 +99,7 @@ class SystemStats:
disk = linetokens[2]
return self.diskstats_regex.match(disk)
- def _reduce_diskstats(self, time, data):
+ def _reduce_diskstats(self, time, data, filename):
relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n')))
diskdata = [0] * 3
reduced = None
@@ -104,10 +128,10 @@ class SystemStats:
return reduced
- def _reduce_nop(self, time, data):
+ def _reduce_nop(self, time, data, filename):
return (time, data)
- def _reduce_stat(self, time, data):
+ def _reduce_stat(self, time, data, filename):
if not data:
return None
# CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line
@@ -126,14 +150,41 @@ class SystemStats:
self.stat_ltimes = times
return reduced
+ def _reduce_pressure(self, time, data, filename):
+ """
+ Return reduced pressure: {avg10, avg60, avg300} and delta total compared to the previous sample
+ for the cpu, io and memory resources. A common function is used for all 3 resources since the
+ format of the /proc/pressure file is the same in each case.
+ """
+ if not data:
+ return None
+ tokens = data.split(b'\n', 1)[0].split()
+ avg10 = float(tokens[1].split(b'=')[1])
+ avg60 = float(tokens[2].split(b'=')[1])
+ avg300 = float(tokens[3].split(b'=')[1])
+ total = int(tokens[4].split(b'=')[1])
+
+ reduced = None
+ if self.last_pressure[filename]:
+ delta = total - self.last_pressure[filename]
+ reduced = (time, (avg10, avg60, avg300, delta))
+ self.last_pressure[filename] = total
+ return reduced
+
def sample(self, event, force):
+ """
+ Collect and log proc or disk_monitor stats periodically.
+ Return True if a new sample is collected and hence the value last_proc or last_disk_monitor
+ is changed.
+ """
+ retval = False
now = time.time()
if (now - self.last_proc > self.min_seconds) or force:
for filename, output, handler in self.proc_files:
with open(os.path.join('/proc', filename), 'rb') as input:
data = input.read()
if handler:
- reduced = handler(now, data)
+ reduced = handler(now, data, filename)
else:
reduced = (now, data)
if reduced:
@@ -150,6 +201,7 @@ class SystemStats:
data +
b'\n')
self.last_proc = now
+ retval = True
if isinstance(event, bb.event.MonitorDiskEvent) and \
((now - self.last_disk_monitor > self.min_seconds) or force):
@@ -159,3 +211,5 @@ class SystemStats:
for dev, sample in event.disk_usage.items()]).encode('ascii') +
b'\n')
self.last_disk_monitor = now
+ retval = True
+ return retval \ No newline at end of file
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
index 4e7c09da04..6eb536ad28 100644
--- a/meta/lib/oe/__init__.py
+++ b/meta/lib/oe/__init__.py
@@ -1,6 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
+
+BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \
+ "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \
+ "qa", "reproducible", "rust", "buildcfg", "go"]
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py
new file mode 100644
index 0000000000..27b059b834
--- /dev/null
+++ b/meta/lib/oe/buildcfg.py
@@ -0,0 +1,79 @@
+
+import os
+import subprocess
+import bb.process
+
+def detect_revision(d):
+ path = get_scmbasepath(d)
+ return get_metadata_git_revision(path)
+
+def detect_branch(d):
+ path = get_scmbasepath(d)
+ return get_metadata_git_branch(path)
+
+def get_scmbasepath(d):
+ return os.path.join(d.getVar('COREBASE'), 'meta')
+
+def get_metadata_git_branch(path):
+ try:
+ rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
+ except bb.process.ExecutionError:
+ rev = '<unknown>'
+ return rev.strip()
+
+def get_metadata_git_revision(path):
+ try:
+ rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
+ except bb.process.ExecutionError:
+ rev = '<unknown>'
+ return rev.strip()
+
+def get_metadata_git_toplevel(path):
+ try:
+ toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path)
+ except bb.process.ExecutionError:
+ return ""
+ return toplevel.strip()
+
+def get_metadata_git_remotes(path):
+ try:
+ remotes_list, _ = bb.process.run('git remote', cwd=path)
+ remotes = remotes_list.split()
+ except bb.process.ExecutionError:
+ remotes = []
+ return remotes
+
+def get_metadata_git_remote_url(path, remote):
+ try:
+ uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path)
+ except bb.process.ExecutionError:
+ return ""
+ return uri.strip()
+
+def get_metadata_git_describe(path):
+ try:
+ describe, _ = bb.process.run('git describe --tags', cwd=path)
+ except bb.process.ExecutionError:
+ return ""
+ return describe.strip()
+
+def is_layer_modified(path):
+ try:
+ subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
+ git diff --quiet --no-ext-diff
+ git diff --quiet --no-ext-diff --cached""" % path,
+ shell=True,
+ stderr=subprocess.STDOUT)
+ return ""
+ except subprocess.CalledProcessError as ex:
+ # Silently treat errors as "modified", without checking for the
+ # (expected) return code 1 in a modified git repo. For example, we get
+ # output and a 129 return code when a layer isn't a git repo at all.
+ return " -- modified"
+
+def get_layer_revisions(d):
+ layers = (d.getVar("BBLAYERS") or "").split()
+ revisions = []
+ for i in layers:
+ revisions.append((i, os.path.basename(i), get_metadata_git_branch(i).strip(), get_metadata_git_revision(i), is_layer_modified(i)))
+ return revisions
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py
index b1856846b6..4edad01580 100644
--- a/meta/lib/oe/buildhistory_analysis.py
+++ b/meta/lib/oe/buildhistory_analysis.py
@@ -562,7 +562,7 @@ def compare_siglists(a_blob, b_blob, taskdiff=False):
elif not hash2 in hashfiles:
out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2))
else:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, collapsed=True)
for line in out2:
m = hashlib.sha256()
m.update(line.encode('utf-8'))
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py
index 254257a83f..0138b791d4 100644
--- a/meta/lib/oe/cachedpath.py
+++ b/meta/lib/oe/cachedpath.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Based on standard python library functions but avoid
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index e08d788b75..5161d33d2d 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -30,6 +32,9 @@ class ClassExtender(object):
if name.endswith("-" + self.extname):
name = name.replace("-" + self.extname, "")
if name.startswith("virtual/"):
+ # Assume large numbers of dashes means a triplet is present and we don't need to convert
+ if name.count("-") >= 3 and name.endswith(("-go", "-binutils", "-gcc", "-g++")):
+ return name
subs = name.split("/", 1)[1]
if not subs.startswith(self.extname):
return "virtual/" + self.extname + "-" + subs
@@ -148,9 +153,7 @@ class NativesdkClassExtender(ClassExtender):
def map_depends(self, dep):
if dep.startswith(self.extname):
return dep
- if dep.endswith(("-gcc", "-g++")):
- return dep + "-crosssdk"
- elif dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
+ if dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
return dep
else:
return self.extend_name(dep)
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py
index 08bb66b365..ec3f6ad720 100644
--- a/meta/lib/oe/classutils.py
+++ b/meta/lib/oe/classutils.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 79642fd76a..81abfbf9e2 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This class should provide easy access to the different aspects of the
@@ -20,7 +22,7 @@ def _smart_copy(src, dest):
mode = os.stat(src).st_mode
if stat.S_ISDIR(mode):
bb.utils.mkdirhier(dest)
- cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -chf - -C %s -p . \
+ cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -cf - -C %s -p . \
| tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest)
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
else:
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index dc7d2e2826..ed5c714cb8 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import collections
import re
import itertools
@@ -73,33 +79,33 @@ def get_patched_cves(d):
import re
import oe.patch
- pn = d.getVar("PN")
- cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
+ cve_match = re.compile(r"CVE:( CVE-\d{4}-\d+)+")
# Matches the last "CVE-YYYY-ID" in the file name, also if written
# in lowercase. Possible to have multiple CVE IDs in a single
# file name, but only the last one will be detected from the file name.
# However, patch files contents addressing multiple CVE IDs are supported
# (cve_match regular expression)
-
- cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
+ cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE)
patched_cves = set()
- bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
- for url in oe.patch.src_patches(d):
+ patches = oe.patch.src_patches(d)
+ bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
+ for url in patches:
patch_file = bb.fetch.decodeurl(url)[2]
- # Remote compressed patches may not be unpacked, so silently ignore them
- if not os.path.isfile(patch_file):
- bb.warn("%s does not exist, cannot extract CVE list" % patch_file)
- continue
-
# Check patch file name for CVE ID
fname_match = cve_file_name_match.search(patch_file)
if fname_match:
cve = fname_match.group(1).upper()
patched_cves.add(cve)
- bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
+ bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file))
+
+ # Remote patches won't be present and compressed patches won't be
+ # unpacked, so say we're not scanning them
+ if not os.path.isfile(patch_file):
+ bb.note("%s is remote or compressed, not scanning content" % patch_file)
+ continue
with open(patch_file, "r", encoding="utf-8") as f:
try:
@@ -124,6 +130,13 @@ def get_patched_cves(d):
if not fname_match and not text_match:
bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
+ # Search for additional patched CVEs
+ for cve in (d.getVarFlags("CVE_STATUS") or {}):
+ decoded_status, _, _ = decode_cve_status(d, cve)
+ if decoded_status == "Patched":
+ bb.debug(2, "CVE %s is additionally patched" % cve)
+ patched_cves.add(cve)
+
return patched_cves
@@ -143,7 +156,7 @@ def get_cpe_ids(cve_product, version):
else:
vendor = "*"
- cpe_id = f'cpe:2.3:a:{vendor}:{product}:{version}:*:*:*:*:*:*:*'
+ cpe_id = 'cpe:2.3:*:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
cpe_ids.append(cpe_id)
return cpe_ids
@@ -159,7 +172,74 @@ def cve_check_merge_jsons(output, data):
for product in output["package"]:
if product["name"] == data["package"][0]["name"]:
- bb.error("Error adding the same package twice")
+ bb.error("Error adding the same package %s twice" % product["name"])
return
output["package"].append(data["package"][0])
+
+def update_symlinks(target_path, link_path):
+ """
+ Update a symbolic link link_path to point to target_path.
+ Remove the link and recreate it if exist and is different.
+ """
+ if link_path != target_path and os.path.exists(target_path):
+ if os.path.exists(os.path.realpath(link_path)):
+ os.remove(link_path)
+ os.symlink(os.path.basename(target_path), link_path)
+
+
+def convert_cve_version(version):
+ """
+ This function converts from CVE format to Yocto version format.
+ eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
+
+ Unless it is redefined using CVE_VERSION in the recipe,
+ cve_check uses the version in the name of the recipe (${PV})
+ to check vulnerabilities against a CVE in the database downloaded from NVD.
+
+ When the version has an update, i.e.
+ "p1" in OpenSSH 8.3p1,
+ "-rc1" in linux kernel 6.2-rc1,
+ the database stores the version as version_update (8.3_p1, 6.2_rc1).
+ Therefore, we must transform this version before comparing to the
+ recipe version.
+
+ In this case, the parameter of the function is 8.3_p1.
+ If the version uses the Release Candidate format, "rc",
+ this function replaces the '_' by '-'.
+ If the version uses the Update format, "p",
+ this function removes the '_' completely.
+ """
+ import re
+
+ matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
+
+ if not matches:
+ return version
+
+ version = matches.group(1)
+ update = matches.group(2)
+
+ if matches.group(3) == "rc":
+ return version + '-' + update
+
+ return version + update
+
+def decode_cve_status(d, cve):
+ """
+ Convert CVE_STATUS into status, detail and description.
+ """
+ status = d.getVarFlag("CVE_STATUS", cve)
+ if not status:
+ return ("", "", "")
+
+ status_split = status.split(':', 1)
+ detail = status_split[0]
+ description = status_split[1].strip() if (len(status_split) > 1) else ""
+
+ status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail)
+ if status_mapping is None:
+ bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status))
+ status_mapping = "Unpatched"
+
+ return (status_mapping, detail, description)
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
index 602130a904..37121cfad2 100644
--- a/meta/lib/oe/data.py
+++ b/meta/lib/oe/data.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index 4b2a9bec01..3494520f40 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py
index 46c884a775..eab2349a4f 100644
--- a/meta/lib/oe/elf.py
+++ b/meta/lib/oe/elf.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -19,6 +21,7 @@ def machine_dict(d):
"x86_64": (62, 0, 0, True, 64),
"epiphany": (4643, 0, 0, True, 32),
"lm32": (138, 0, 0, False, 32),
+ "loongarch64":(258, 0, 0, True, 64),
"mips": ( 8, 0, 0, False, 32),
"mipsel": ( 8, 0, 0, True, 32),
"microblaze": (189, 0, 0, False, 32),
@@ -43,6 +46,7 @@ def machine_dict(d):
"ia64": (50, 0, 0, True, 64),
"alpha": (36902, 0, 0, True, 64),
"hppa": (15, 3, 0, False, 32),
+ "loongarch64":(258, 0, 0, True, 64),
"m68k": ( 4, 0, 0, False, 32),
"mips": ( 8, 0, 0, False, 32),
"mipsel": ( 8, 0, 0, True, 32),
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py
new file mode 100644
index 0000000000..dfd957d157
--- /dev/null
+++ b/meta/lib/oe/go.py
@@ -0,0 +1,34 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import re
+
+def map_arch(a):
+ if re.match('i.86', a):
+ return '386'
+ elif a == 'x86_64':
+ return 'amd64'
+ elif re.match('arm.*', a):
+ return 'arm'
+ elif re.match('aarch64.*', a):
+ return 'arm64'
+ elif re.match('mips64el.*', a):
+ return 'mips64le'
+ elif re.match('mips64.*', a):
+ return 'mips64'
+ elif a == 'mips':
+ return 'mips'
+ elif a == 'mipsel':
+ return 'mipsle'
+ elif re.match('p(pc|owerpc)(64le)', a):
+ return 'ppc64le'
+ elif re.match('p(pc|owerpc)(64)', a):
+ return 'ppc64'
+ elif a == 'riscv64':
+ return 'riscv64'
+ elif a == 'loongarch64':
+ return 'loong64'
+ return ''
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py
index aa9bb49f2c..ede6186c84 100644
--- a/meta/lib/oe/gpg_sign.py
+++ b/meta/lib/oe/gpg_sign.py
@@ -1,13 +1,16 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Helper module for GPG signing"""
-import os
import bb
-import subprocess
+import os
import shlex
+import subprocess
+import tempfile
class LocalSigner(object):
"""Class for handling local (on the build host) signing"""
@@ -71,8 +74,6 @@ class LocalSigner(object):
cmd += ['--homedir', self.gpg_path]
if armor:
cmd += ['--armor']
- if output_suffix:
- cmd += ['-o', input_file + "." + output_suffix]
if use_sha256:
cmd += ['--digest-algo', "SHA256"]
@@ -81,19 +82,27 @@ class LocalSigner(object):
if self.gpg_version > (2,1,):
cmd += ['--pinentry-mode', 'loopback']
- cmd += [input_file]
-
try:
if passphrase_file:
with open(passphrase_file) as fobj:
passphrase = fobj.readline();
- job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
- (_, stderr) = job.communicate(passphrase.encode("utf-8"))
+ if not output_suffix:
+ output_suffix = 'asc' if armor else 'sig'
+ output_file = input_file + "." + output_suffix
+ with tempfile.TemporaryDirectory(dir=os.path.dirname(output_file)) as tmp_dir:
+ tmp_file = os.path.join(tmp_dir, os.path.basename(output_file))
+ cmd += ['-o', tmp_file]
+
+ cmd += [input_file]
+
+ job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+ (_, stderr) = job.communicate(passphrase.encode("utf-8"))
- if job.returncode:
- bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
+ if job.returncode:
+ bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
+ os.rename(tmp_file, output_file)
except IOError as e:
bb.error("IO error (%s): %s" % (e.errno, e.strerror))
raise Exception("Failed to sign '%s'" % input_file)
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index 99cfa5f733..d9c8d94da4 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Code for parsing OpenEmbedded license strings"""
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py
index 43e46380d7..3ec03e5042 100644
--- a/meta/lib/oe/lsb.py
+++ b/meta/lib/oe/lsb.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py
index d36082c535..7a83bdf602 100644
--- a/meta/lib/oe/maketype.py
+++ b/meta/lib/oe/maketype.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
"""OpenEmbedded variable typing support
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py
index 1a058dcd73..61f18adc4a 100644
--- a/meta/lib/oe/manifest.py
+++ b/meta/lib/oe/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..d97ced7cda
--- /dev/null
+++ b/meta/lib/oe/npm_registry.py
@@ -0,0 +1,175 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import bb
+import json
+import subprocess
+
+_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '_.-~()')
+
+MISSING_OK = object()
+
+REGISTRY = "https://registry.npmjs.org"
+
+# we can not use urllib.parse here because npm expects lowercase
+# hex-chars but urllib generates uppercase ones
+def uri_quote(s, safe = '/'):
+ res = ""
+ safe_set = set(safe)
+ for c in s:
+ if c in _ALWAYS_SAFE or c in safe_set:
+ res += c
+ else:
+ res += '%%%02x' % ord(c)
+ return res
+
+class PackageJson:
+ def __init__(self, spec):
+ self.__spec = spec
+
+ @property
+ def name(self):
+ return self.__spec['name']
+
+ @property
+ def version(self):
+ return self.__spec['version']
+
+ @property
+ def empty_manifest(self):
+ return {
+ 'name': self.name,
+ 'description': self.__spec.get('description', ''),
+ 'versions': {},
+ }
+
+ def base_filename(self):
+ return uri_quote(self.name, safe = '@')
+
+ def as_manifest_entry(self, tarball_uri):
+ res = {}
+
+ ## NOTE: 'npm install' requires more than basic meta information;
+ ## e.g. it takes 'bin' from this manifest entry but not the actual
+ ## 'package.json'
+ for (idx,dflt) in [('name', None),
+ ('description', ""),
+ ('version', None),
+ ('bin', MISSING_OK),
+ ('man', MISSING_OK),
+ ('scripts', MISSING_OK),
+ ('directories', MISSING_OK),
+ ('dependencies', MISSING_OK),
+ ('devDependencies', MISSING_OK),
+ ('optionalDependencies', MISSING_OK),
+ ('license', "unknown")]:
+ if idx in self.__spec:
+ res[idx] = self.__spec[idx]
+ elif dflt == MISSING_OK:
+ pass
+ elif dflt != None:
+ res[idx] = dflt
+ else:
+ raise Exception("%s-%s: missing key %s" % (self.name,
+ self.version,
+ idx))
+
+ res['dist'] = {
+ 'tarball': tarball_uri,
+ }
+
+ return res
+
+class ManifestImpl:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+
+ def load(self):
+ try:
+ with open(self.filename, "r") as f:
+ res = json.load(f)
+ except IOError:
+ res = self.__spec.empty_manifest
+
+ return res
+
+ def save(self, meta):
+ with open(self.filename, "w") as f:
+ json.dump(meta, f, indent = 2)
+
+ @property
+ def filename(self):
+ return self.__base + ".meta"
+
+class Manifest:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+ self.__lockf = None
+ self.__impl = None
+
+ def __enter__(self):
+ self.__lockf = bb.utils.lockfile(self.__base + ".lock")
+ self.__impl = ManifestImpl(self.__base, self.__spec)
+ return self.__impl
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ bb.utils.unlockfile(self.__lockf)
+
+class NpmCache:
+ def __init__(self, cache):
+ self.__cache = cache
+
+ @property
+ def path(self):
+ return self.__cache
+
+ def run(self, type, key, fname):
+ subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
+ check = True)
+
+class NpmRegistry:
+ def __init__(self, path, cache):
+ self.__path = path
+ self.__cache = NpmCache(cache + '/_cacache')
+ bb.utils.mkdirhier(self.__path)
+ bb.utils.mkdirhier(self.__cache.path)
+
+ @staticmethod
+ ## This function is critical and must match nodejs expectations
+ def _meta_uri(spec):
+ return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
+
+ @staticmethod
+ ## Exact return value does not matter; just make it look like a
+ ## usual registry url
+ def _tarball_uri(spec):
+ return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
+ uri_quote(spec.name, safe = '@'),
+ uri_quote(spec.name, safe = '@/'),
+ spec.version)
+
+ def add_pkg(self, tarball, pkg_json):
+ pkg_json = PackageJson(pkg_json)
+ base = os.path.join(self.__path, pkg_json.base_filename())
+
+ with Manifest(base, pkg_json) as manifest:
+ meta = manifest.load()
+ tarball_uri = self._tarball_uri(pkg_json)
+
+ meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
+
+ manifest.save(meta)
+
+ ## Cache entries are a little bit dependent on the nodejs
+ ## version; version specific cache implementation must
+ ## mitigate differences
+ self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
+ self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py
index b5d5e88e80..8b88900f71 100644
--- a/meta/lib/oe/overlayfs.py
+++ b/meta/lib/oe/overlayfs.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This file contains common functions for overlayfs and its QA check
@@ -38,7 +40,11 @@ def unitFileList(d):
bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint)
for mountPoint in overlayMountPoints:
- for path in d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint).split():
+ mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
+ if not mountPointList:
+ bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint)
+ continue
+ for path in mountPointList.split():
fileList.append(mountUnitName(path))
fileList.append(helperUnitName(path))
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index 7d387ee81d..1511ba47c4 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -1,11 +1,22 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
+import errno
+import fnmatch
+import itertools
+import os
+import shlex
+import re
+import glob
import stat
import mmap
import subprocess
+import oe.cachedpath
+
def runstrip(arg):
# Function to strip a single file, called from split_and_strip_files below
# A working 'file' (one which works on the target architecture)
@@ -30,7 +41,7 @@ def runstrip(arg):
stripcmd = [strip]
skip_strip = False
- # kernel module
+ # kernel module
if elftype & 16:
if is_kernel_module_signed(file):
bb.debug(1, "Skip strip on signed module %s" % file)
@@ -103,7 +114,7 @@ def is_static_lib(path):
return start == magic
return False
-def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripped=False):
+def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
"""
Strip executable code (like executables, shared libraries) _in_place_
- Based on sysroot_strip in staging.bbclass
@@ -111,6 +122,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
:param strip_cmd: Strip command (usually ${STRIP})
:param libdir: ${libdir} - strip .so files in this directory
:param base_libdir: ${base_libdir} - strip .so files in this directory
+ :param max_process: number of stripping processes started in parallel
:param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
This is for proper logging and messages only.
"""
@@ -153,7 +165,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
# ...but is it ELF, and is it already stripped?
checkelf.append(file)
inodecache[file] = s.st_ino
- results = oe.utils.multiprocess_launch(is_elf, checkelf, d)
+ results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
for (file, elf_file) in results:
#elf_file = is_elf(file)
if elf_file & 1:
@@ -181,7 +193,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
elf_file = int(elffiles[file])
sfiles.append((file, elf_file, strip_cmd))
- oe.utils.multiprocess_launch(runstrip, sfiles, d)
+ oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
def file_translate(file):
@@ -290,3 +302,1725 @@ def read_shlib_providers(d):
shlib_provider[s[0]] = {}
shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
return shlib_provider
+
+# We generate a master list of directories to process, we start by
+# seeding this list with reasonable defaults, then load from
+# the fs-perms.txt files
+def fixup_perms(d):
+ import pwd, grp
+
+ cpath = oe.cachedpath.CachedPath()
+ dvar = d.getVar('PKGD')
+
+ # init using a string with the same format as a line as documented in
+ # the fs-perms.txt file
+ # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
+ # <path> link <link target>
+ #
+ # __str__ can be used to print out an entry in the input format
+ #
+ # if fs_perms_entry.path is None:
+ # an error occurred
+ # if fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.link = target of link
+ # if not fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.mode = expected dir mode or None
+ # fs_perms_entry.uid = expected uid or -1
+ # fs_perms_entry.gid = expected gid or -1
+ # fs_perms_entry.walk = 'true' or something else
+ # fs_perms_entry.fmode = expected file mode or None
+ # fs_perms_entry.fuid = expected file uid or -1
+ # fs_perms_entry_fgid = expected file gid or -1
+ class fs_perms_entry():
+ def __init__(self, line):
+ lsplit = line.split()
+ if len(lsplit) == 3 and lsplit[1].lower() == "link":
+ self._setlink(lsplit[0], lsplit[2])
+ elif len(lsplit) == 8:
+ self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
+ else:
+ msg = "Fixup Perms: invalid config line %s" % line
+ oe.qa.handle_error("perm-config", msg, d)
+ self.path = None
+ self.link = None
+
+ def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
+ self.path = os.path.normpath(path)
+ self.link = None
+ self.mode = self._procmode(mode)
+ self.uid = self._procuid(uid)
+ self.gid = self._procgid(gid)
+ self.walk = walk.lower()
+ self.fmode = self._procmode(fmode)
+ self.fuid = self._procuid(fuid)
+ self.fgid = self._procgid(fgid)
+
+ def _setlink(self, path, link):
+ self.path = os.path.normpath(path)
+ self.link = link
+
+ def _procmode(self, mode):
+ if not mode or (mode and mode == "-"):
+ return None
+ else:
+ return int(mode,8)
+
+ # Note uid/gid -1 has special significance in os.lchown
+ def _procuid(self, uid):
+ if uid is None or uid == "-":
+ return -1
+ elif uid.isdigit():
+ return int(uid)
+ else:
+ return pwd.getpwnam(uid).pw_uid
+
+ def _procgid(self, gid):
+ if gid is None or gid == "-":
+ return -1
+ elif gid.isdigit():
+ return int(gid)
+ else:
+ return grp.getgrnam(gid).gr_gid
+
+ # Use for debugging the entries
+ def __str__(self):
+ if self.link:
+ return "%s link %s" % (self.path, self.link)
+ else:
+ mode = "-"
+ if self.mode:
+ mode = "0%o" % self.mode
+ fmode = "-"
+ if self.fmode:
+ fmode = "0%o" % self.fmode
+ uid = self._mapugid(self.uid)
+ gid = self._mapugid(self.gid)
+ fuid = self._mapugid(self.fuid)
+ fgid = self._mapugid(self.fgid)
+ return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
+
+ def _mapugid(self, id):
+ if id is None or id == -1:
+ return "-"
+ else:
+ return "%d" % id
+
+ # Fix the permission, owner and group of path
+ def fix_perms(path, mode, uid, gid, dir):
+ if mode and not os.path.islink(path):
+ #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
+ os.chmod(path, mode)
+ # -1 is a special value that means don't change the uid/gid
+ # if they are BOTH -1, don't bother to lchown
+ if not (uid == -1 and gid == -1):
+ #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
+ os.lchown(path, uid, gid)
+
+ # Return a list of configuration files based on either the default
+ # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
+ # paths are resolved via BBPATH
+ def get_fs_perms_list(d):
+ str = ""
+ bbpath = d.getVar('BBPATH')
+ fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
+ for conf_file in fs_perms_tables.split():
+ confpath = bb.utils.which(bbpath, conf_file)
+ if confpath:
+ str += " %s" % bb.utils.which(bbpath, conf_file)
+ else:
+ bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
+ return str
+
+ fs_perms_table = {}
+ fs_link_table = {}
+
+ # By default all of the standard directories specified in
+ # bitbake.conf will get 0755 root:root.
+ target_path_vars = [ 'base_prefix',
+ 'prefix',
+ 'exec_prefix',
+ 'base_bindir',
+ 'base_sbindir',
+ 'base_libdir',
+ 'datadir',
+ 'sysconfdir',
+ 'servicedir',
+ 'sharedstatedir',
+ 'localstatedir',
+ 'infodir',
+ 'mandir',
+ 'docdir',
+ 'bindir',
+ 'sbindir',
+ 'libexecdir',
+ 'libdir',
+ 'includedir' ]
+
+ for path in target_path_vars:
+ dir = d.getVar(path) or ""
+ if dir == "":
+ continue
+ fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
+
+ # Now we actually load from the configuration files
+ for conf in get_fs_perms_list(d).split():
+ if not os.path.exists(conf):
+ continue
+ with open(conf) as f:
+ for line in f:
+ if line.startswith('#'):
+ continue
+ lsplit = line.split()
+ if len(lsplit) == 0:
+ continue
+ if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
+ msg = "Fixup perms: %s invalid line: %s" % (conf, line)
+ oe.qa.handle_error("perm-line", msg, d)
+ continue
+ entry = fs_perms_entry(d.expand(line))
+ if entry and entry.path:
+ if entry.link:
+ fs_link_table[entry.path] = entry
+ if entry.path in fs_perms_table:
+ fs_perms_table.pop(entry.path)
+ else:
+ fs_perms_table[entry.path] = entry
+ if entry.path in fs_link_table:
+ fs_link_table.pop(entry.path)
+
+ # Debug -- list out in-memory table
+ #for dir in fs_perms_table:
+ # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
+ #for link in fs_link_table:
+ # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
+
+ # We process links first, so we can go back and fixup directory ownership
+ # for any newly created directories
+ # Process in sorted order so /run gets created before /run/lock, etc.
+ for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
+ link = entry.link
+ dir = entry.path
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
+ continue
+
+ if link[0] == "/":
+ target = dvar + link
+ ptarget = link
+ else:
+ target = os.path.join(os.path.dirname(origin), link)
+ ptarget = os.path.join(os.path.dirname(dir), link)
+ if os.path.exists(target):
+ msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
+ oe.qa.handle_error("perm-link", msg, d)
+ continue
+
+ # Create path to move directory to, move it, and then setup the symlink
+ bb.utils.mkdirhier(os.path.dirname(target))
+ #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
+ bb.utils.rename(origin, target)
+ #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
+ os.symlink(link, origin)
+
+ for dir in fs_perms_table:
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin)):
+ continue
+
+ fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+
+ if fs_perms_table[dir].walk == 'true':
+ for root, dirs, files in os.walk(origin):
+ for dr in dirs:
+ each_dir = os.path.join(root, dr)
+ fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+ for f in files:
+ each_file = os.path.join(root, f)
+ fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
+
+# Get a list of files from file vars by searching files under current working directory
+# The list contains symlinks, directories and normal files.
+def files_from_filevars(filevars):
+ cpath = oe.cachedpath.CachedPath()
+ files = []
+ for f in filevars:
+ if os.path.isabs(f):
+ f = '.' + f
+ if not f.startswith("./"):
+ f = './' + f
+ globbed = glob.glob(f, recursive=True)
+ if globbed:
+ if [ f ] != globbed:
+ files += globbed
+ continue
+ files.append(f)
+
+ symlink_paths = []
+ for ind, f in enumerate(files):
+ # Handle directory symlinks. Truncate path to the lowest level symlink
+ parent = ''
+ for dirname in f.split('/')[:-1]:
+ parent = os.path.join(parent, dirname)
+ if dirname == '.':
+ continue
+ if cpath.islink(parent):
+ bb.warn("FILES contains file '%s' which resides under a "
+ "directory symlink. Please fix the recipe and use the "
+ "real path for the file." % f[1:])
+ symlink_paths.append(f)
+ files[ind] = parent
+ f = parent
+ break
+
+ if not cpath.islink(f):
+ if cpath.isdir(f):
+ newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
+ if newfiles:
+ files += newfiles
+
+ return files, symlink_paths
+
+# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
+def get_conffiles(pkg, d):
+ pkgdest = d.getVar('PKGDEST')
+ root = os.path.join(pkgdest, pkg)
+ cwd = os.getcwd()
+ os.chdir(root)
+
+ conffiles = d.getVar('CONFFILES:%s' % pkg);
+ if conffiles == None:
+ conffiles = d.getVar('CONFFILES')
+ if conffiles == None:
+ conffiles = ""
+ conffiles = conffiles.split()
+ conf_orig_list = files_from_filevars(conffiles)[0]
+
+ # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
+ conf_list = []
+ for f in conf_orig_list:
+ if os.path.isdir(f):
+ continue
+ if os.path.islink(f):
+ continue
+ if not os.path.exists(f):
+ continue
+ conf_list.append(f)
+
+ # Remove the leading './'
+ for i in range(0, len(conf_list)):
+ conf_list[i] = conf_list[i][1:]
+
+ os.chdir(cwd)
+ return sorted(conf_list)
+
+def legitimize_package_name(s):
+ """
+ Make sure package names are legitimate strings
+ """
+
+ def fixutf(m):
+ cp = m.group(1)
+ if cp:
+ return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
+
+ # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
+ s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
+
+ # Remaining package name validity fixes
+ return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
+
+def split_locales(d):
+ cpath = oe.cachedpath.CachedPath()
+ if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
+ bb.debug(1, "package requested not splitting locales")
+ return
+
+ packages = (d.getVar('PACKAGES') or "").split()
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('LOCALEBASEPN')
+
+ try:
+ locale_index = packages.index(pn + '-locale')
+ packages.pop(locale_index)
+ except ValueError:
+ locale_index = len(packages)
+
+ localepaths = []
+ locales = set()
+ for localepath in (d.getVar('LOCALE_PATHS') or "").split():
+ localedir = dvar + localepath
+ if not cpath.isdir(localedir):
+ bb.debug(1, 'No locale files in %s' % localepath)
+ continue
+
+ localepaths.append(localepath)
+ with os.scandir(localedir) as it:
+ for entry in it:
+ if entry.is_dir():
+ locales.add(entry.name)
+
+ if len(locales) == 0:
+ bb.debug(1, "No locale files in this package")
+ return
+
+ summary = d.getVar('SUMMARY') or pn
+ description = d.getVar('DESCRIPTION') or ""
+ locale_section = d.getVar('LOCALE_SECTION')
+ mlprefix = d.getVar('MLPREFIX') or ""
+ for l in sorted(locales):
+ ln = legitimize_package_name(l)
+ pkg = pn + '-locale-' + ln
+ packages.insert(locale_index, pkg)
+ locale_index += 1
+ files = []
+ for localepath in localepaths:
+ files.append(os.path.join(localepath, l))
+ d.setVar('FILES:' + pkg, " ".join(files))
+ d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
+ d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
+ d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
+ d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
+ if locale_section:
+ d.setVar('SECTION:' + pkg, locale_section)
+
+ d.setVar('PACKAGES', ' '.join(packages))
+
+ # Disabled by RP 18/06/07
+ # Wildcards aren't supported in debian
+ # They break with ipkg since glibc-locale* will mean that
+ # glibc-localedata-translit* won't install as a dependency
+ # for some other package which breaks meta-toolchain
+ # Probably breaks since virtual-locale- isn't provided anywhere
+ #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
+ #rdep.append('%s-locale*' % pn)
+ #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
+
+def package_debug_vars(d):
+ # We default to '.debug' style
+ if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
+ # Single debug-file-directory style debug info
+ debug_vars = {
+ "append": ".debug",
+ "staticappend": "",
+ "dir": "",
+ "staticdir": "",
+ "libdir": "/usr/lib/debug",
+ "staticlibdir": "/usr/lib/debug-static",
+ "srcdir": "/usr/src/debug",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
+ # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+ else:
+ # Original OE-core, a.k.a. ".debug", style debug info
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+
+ return debug_vars
+
+
+def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
+ debugfiles = {}
+
+ for line in dwarfsrcfiles_output.splitlines():
+ if line.startswith("\t"):
+ debugfiles[os.path.normpath(line.split()[0])] = ""
+
+ return debugfiles.keys()
+
+def source_info(file, d, fatal=True):
+ cmd = ["dwarfsrcfiles", file]
+ try:
+ output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
+ retval = 0
+ except subprocess.CalledProcessError as exc:
+ output = exc.output
+ retval = exc.returncode
+
+ # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
+ if retval != 0 and retval != 255:
+ msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
+ if fatal:
+ bb.fatal(msg)
+ bb.note(msg)
+
+ debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
+
+ return list(debugsources)
+
+def splitdebuginfo(file, dvar, dv, d):
+ # Function to split a single file into two components, one is the stripped
+ # target system binary, the other contains any debugging information. The
+ # two files are linked to reference each other.
+ #
+ # return a mapping of files:debugsources
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ sources = []
+
+ if file.endswith(".ko") and file.find("/lib/modules/") != -1:
+ if oe.package.is_kernel_module_signed(file):
+ bb.debug(1, "Skip strip on signed module %s" % file)
+ return (file, sources)
+
+ # Split the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Split %s -> %s" % (file, debugfile))
+ # Only store off the hard link reference if we successfully split!
+
+ dvar = d.getVar('PKGD')
+ objcopy = d.getVar("OBJCOPY")
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
+
+ # Set the debuglink to have the view of the file path on the target
+ subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def splitstaticdebuginfo(file, dvar, dv, d):
+ # Unlike the function above, there is no way to split a static library
+ # two components. So to get similar results we will copy the unmodified
+ # static library (containing the debug symbols) into a new directory.
+ # We will then strip (preserving symbols) the static library in the
+ # typical location.
+ #
+ # return a mapping of files:debugsources
+
+ src = file[len(dvar):]
+ dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
+ debugfile = dvar + dest
+ sources = []
+
+ # Copy the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Copy %s -> %s" % (file, debugfile))
+
+ dvar = d.getVar('PKGD')
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ # Copy the unmodified item to the debug directory
+ shutil.copy2(file, debugfile)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def inject_minidebuginfo(file, dvar, dv, d):
+ # Extract just the symbols from debuginfo into minidebuginfo,
+ # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
+ # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
+
+ readelf = d.getVar('READELF')
+ nm = d.getVar('NM')
+ objcopy = d.getVar('OBJCOPY')
+
+ minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ minidebugfile = minidebuginfodir + src + '.minidebug'
+ bb.utils.mkdirhier(os.path.dirname(minidebugfile))
+
+ # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
+ # so skip it.
+ if not os.path.exists(debugfile):
+ bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
+ return
+
+ # minidebuginfo does not make sense to apply to ELF objects other than
+ # executables and shared libraries, skip applying the minidebuginfo
+ # generation for objects like kernel modules.
+ for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
+ if not line.strip().startswith("Type:"):
+ continue
+ elftype = line.split(":")[1].strip()
+ if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
+ bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
+ return
+ break
+
+ # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
+ # We will exclude all of these from minidebuginfo to save space.
+ remove_section_names = []
+ for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
+ # strip the leading " [ 1]" section index to allow splitting on space
+ if ']' not in line:
+ continue
+ fields = line[line.index(']') + 1:].split()
+ if len(fields) < 7:
+ continue
+ name = fields[0]
+ type = fields[1]
+ flags = fields[6]
+ # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
+ if name.startswith('.debug_'):
+ continue
+ if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
+ remove_section_names.append(name)
+
+ # List dynamic symbols in the binary. We can exclude these from minidebuginfo
+ # because they are always present in the binary.
+ dynsyms = set()
+ for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
+ dynsyms.add(line.split()[0])
+
+ # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
+ # These are the ones we want to keep in minidebuginfo.
+ keep_symbols_file = minidebugfile + '.symlist'
+ found_any_symbols = False
+ with open(keep_symbols_file, 'w') as f:
+ for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
+ fields = line.split('|')
+ if len(fields) < 7:
+ continue
+ name = fields[0].strip()
+ type = fields[3].strip()
+ if type == 'FUNC' and name not in dynsyms:
+ f.write('{}\n'.format(name))
+ found_any_symbols = True
+
+ if not found_any_symbols:
+ bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
+ return
+
+ bb.utils.remove(minidebugfile)
+ bb.utils.remove(minidebugfile + '.xz')
+
+ subprocess.check_call([objcopy, '-S'] +
+ ['--remove-section={}'.format(s) for s in remove_section_names] +
+ ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
+
+ subprocess.check_call(['xz', '--keep', minidebugfile])
+
+ subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
+
+def copydebugsources(debugsrcdir, sources, d):
+ # The debug src information written out to sourcefile is further processed
+ # and copied to the destination here.
+
+ cpath = oe.cachedpath.CachedPath()
+
+ if debugsrcdir and sources:
+ sourcefile = d.expand("${WORKDIR}/debugsources.list")
+ bb.utils.remove(sourcefile)
+
+ # filenames are null-separated - this is an artefact of the previous use
+ # of rpm's debugedit, which was writing them out that way, and the code elsewhere
+ # is still assuming that.
+ debuglistoutput = '\0'.join(sources) + '\0'
+ with open(sourcefile, 'a') as sf:
+ sf.write(debuglistoutput)
+
+ dvar = d.getVar('PKGD')
+ strip = d.getVar("STRIP")
+ objcopy = d.getVar("OBJCOPY")
+ workdir = d.getVar("WORKDIR")
+ sdir = d.getVar("S")
+ cflags = d.expand("${CFLAGS}")
+
+ prefixmap = {}
+ for flag in cflags.split():
+ if not flag.startswith("-fdebug-prefix-map"):
+ continue
+ if "recipe-sysroot" in flag:
+ continue
+ flag = flag.split("=")
+ prefixmap[flag[1]] = flag[2]
+
+ nosuchdir = []
+ basepath = dvar
+ for p in debugsrcdir.split("/"):
+ basepath = basepath + "/" + p
+ if not cpath.exists(basepath):
+ nosuchdir.append(basepath)
+ bb.utils.mkdirhier(basepath)
+ cpath.updatecache(basepath)
+
+ for pmap in prefixmap:
+ # Ignore files from the recipe sysroots (target and native)
+ cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
+ # We need to ignore files that are not actually ours
+ # we do this by only paying attention to items from this package
+ cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
+ # Remove prefix in the source paths
+ cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
+ cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
+
+ try:
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # Can "fail" if internal headers/transient sources are attempted
+ pass
+ # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
+ # Work around this by manually finding and copying any symbolic links that made it through.
+ cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
+ (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # debugsources.list may be polluted from the host if we used externalsrc,
+ # cpio uses copy-pass and may have just created a directory structure
+ # matching the one from the host, if thats the case move those files to
+ # debugsrcdir to avoid host contamination.
+ # Empty dir structure will be deleted in the next step.
+
+ # Same check as above for externalsrc
+ if workdir not in sdir:
+ if os.path.exists(dvar + debugsrcdir + sdir):
+ cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # The copy by cpio may have resulted in some empty directories! Remove these
+ cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # Also remove debugsrcdir if its empty
+ for p in nosuchdir[::-1]:
+ if os.path.exists(p) and not os.listdir(p):
+ os.rmdir(p)
+
+
+def process_split_and_strip_files(d):
+ cpath = oe.cachedpath.CachedPath()
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('PN')
+ hostos = d.getVar('HOST_OS')
+
+ oldcwd = os.getcwd()
+ os.chdir(dvar)
+
+ dv = package_debug_vars(d)
+
+ #
+ # First lets figure out all of the files we may have to process ... do this only once!
+ #
+ elffiles = {}
+ symlinks = {}
+ staticlibs = []
+ inodes = {}
+ libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
+ baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
+ skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
+ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
+ d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
+ checkelf = {}
+ checkelflinks = {}
+ for root, dirs, files in cpath.walk(dvar):
+ for f in files:
+ file = os.path.join(root, f)
+
+ # Skip debug files
+ if dv["append"] and file.endswith(dv["append"]):
+ continue
+ if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
+ continue
+
+ if file in skipfiles:
+ continue
+
+ if oe.package.is_static_lib(file):
+ staticlibs.append(file)
+ continue
+
+ try:
+ ltarget = cpath.realpath(file, dvar, False)
+ s = cpath.lstat(ltarget)
+ except OSError as e:
+ (err, strerror) = e.args
+ if err != errno.ENOENT:
+ raise
+ # Skip broken symlinks
+ continue
+ if not s:
+ continue
+ # Check its an executable
+ if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
+ or (s[stat.ST_MODE] & stat.S_IXOTH) \
+ or ((file.startswith(libdir) or file.startswith(baselibdir)) \
+ and (".so" in f or ".node" in f)) \
+ or (f.startswith('vmlinux') or ".ko" in f):
+
+ if cpath.islink(file):
+ checkelflinks[file] = ltarget
+ continue
+ # Use a reference of device ID and inode number to identify files
+ file_reference = "%d_%d" % (s.st_dev, s.st_ino)
+ checkelf[file] = (file, file_reference)
+
+ results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
+ results_map = {}
+ for (ltarget, elf_file) in results:
+ results_map[ltarget] = elf_file
+ for file in checkelflinks:
+ ltarget = checkelflinks[file]
+ # If it's a symlink, and points to an ELF file, we capture the readlink target
+ if results_map[ltarget]:
+ target = os.readlink(file)
+ #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
+ symlinks[file] = target
+
+ results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
+
+ # Sort results by file path. This ensures that the files are always
+ # processed in the same order, which is important to make sure builds
+ # are reproducible when dealing with hardlinks
+ results.sort(key=lambda x: x[0])
+
+ for (file, elf_file) in results:
+ # It's a file (or hardlink), not a link
+ # ...but is it ELF, and is it already stripped?
+ if elf_file & 1:
+ if elf_file & 2:
+ if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
+ bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
+ else:
+ msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
+ oe.qa.handle_error("already-stripped", msg, d)
+ continue
+
+ # At this point we have an unstripped elf file. We need to:
+ # a) Make sure any file we strip is not hardlinked to anything else outside this tree
+ # b) Only strip any hardlinked file once (no races)
+ # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
+
+ # Use a reference of device ID and inode number to identify files
+ file_reference = checkelf[file][1]
+ if file_reference in inodes:
+ os.unlink(file)
+ os.link(inodes[file_reference][0], file)
+ inodes[file_reference].append(file)
+ else:
+ inodes[file_reference] = [file]
+ # break hardlink
+ bb.utils.break_hardlinks(file)
+ elffiles[file] = elf_file
+ # Modified the file so clear the cache
+ cpath.updatecache(file)
+
+ def strip_pkgd_prefix(f):
+ nonlocal dvar
+
+ if f.startswith(dvar):
+ return f[len(dvar):]
+
+ return f
+
+ #
+ # First lets process debug splitting
+ #
+ if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
+ results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
+
+ if dv["srcdir"] and not hostos.startswith("mingw"):
+ if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
+ else:
+ for file in staticlibs:
+ results.append( (file,source_info(file, d)) )
+
+ d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
+
+ sources = set()
+ for r in results:
+ sources.update(r[1])
+
+ # Hardlink our debug symbols to the other hardlink copies
+ for ref in inodes:
+ if len(inodes[ref]) == 1:
+ continue
+
+ target = inodes[ref][0][len(dvar):]
+ for file in inodes[ref][1:]:
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
+ fpath = dvar + dest
+ ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ # Only one hardlink of separated debug info file in each directory
+ if not os.access(fpath, os.R_OK):
+ #bb.note("Link %s -> %s" % (fpath, ftarget))
+ os.link(ftarget, fpath)
+
+ # Create symlinks for all cases we were able to split symbols
+ for file in symlinks:
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ fpath = dvar + dest
+ # Skip it if the target doesn't exist
+ try:
+ s = os.stat(fpath)
+ except OSError as e:
+ (err, strerror) = e.args
+ if err != errno.ENOENT:
+ raise
+ continue
+
+ ltarget = symlinks[file]
+ lpath = os.path.dirname(ltarget)
+ lbase = os.path.basename(ltarget)
+ ftarget = ""
+ if lpath and lpath != ".":
+ ftarget += lpath + dv["dir"] + "/"
+ ftarget += lbase + dv["append"]
+ if lpath.startswith(".."):
+ ftarget = os.path.join("..", ftarget)
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ #bb.note("Symlink %s -> %s" % (fpath, ftarget))
+ os.symlink(ftarget, fpath)
+
+ # Process the dv["srcdir"] if requested...
+ # This copies and places the referenced sources for later debugging...
+ copydebugsources(dv["srcdir"], sources, d)
+ #
+ # End of debug splitting
+ #
+
+ #
+ # Now lets go back over things and strip them
+ #
+ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
+ strip = d.getVar("STRIP")
+ sfiles = []
+ for file in elffiles:
+ elf_file = int(elffiles[file])
+ #bb.note("Strip %s" % file)
+ sfiles.append((file, elf_file, strip))
+ if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ for f in staticlibs:
+ sfiles.append((f, 16, strip))
+
+ oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
+
+ # Build "minidebuginfo" and reinject it back into the stripped binaries
+ if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
+ oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
+ extraargs=(dvar, dv, d))
+
+ #
+ # End of strip
+ #
+ os.chdir(oldcwd)
+
+
+def populate_packages(d):
+ cpath = oe.cachedpath.CachedPath()
+
+ workdir = d.getVar('WORKDIR')
+ outdir = d.getVar('DEPLOY_DIR')
+ dvar = d.getVar('PKGD')
+ packages = d.getVar('PACKAGES').split()
+ pn = d.getVar('PN')
+
+ bb.utils.mkdirhier(outdir)
+ os.chdir(dvar)
+
+ autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
+
+ split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
+
+ # If debug-with-srcpkg mode is enabled then add the source package if it
+ # doesn't exist and add the source file contents to the source package.
+ if split_source_package:
+ src_package_name = ('%s-src' % d.getVar('PN'))
+ if not src_package_name in packages:
+ packages.append(src_package_name)
+ d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
+
+ # Sanity check PACKAGES for duplicates
+ # Sanity should be moved to sanity.bbclass once we have the infrastructure
+ package_dict = {}
+
+ for i, pkg in enumerate(packages):
+ if pkg in package_dict:
+ msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
+ oe.qa.handle_error("packages-list", msg, d)
+ # Ensure the source package gets the chance to pick up the source files
+ # before the debug package by ordering it first in PACKAGES. Whether it
+ # actually picks up any source files is controlled by
+ # PACKAGE_DEBUG_SPLIT_STYLE.
+ elif pkg.endswith("-src"):
+ package_dict[pkg] = (10, i)
+ elif autodebug and pkg.endswith("-dbg"):
+ package_dict[pkg] = (30, i)
+ else:
+ package_dict[pkg] = (50, i)
+ packages = sorted(package_dict.keys(), key=package_dict.get)
+ d.setVar('PACKAGES', ' '.join(packages))
+ pkgdest = d.getVar('PKGDEST')
+
+ seen = []
+
+ # os.mkdir masks the permissions with umask so we have to unset it first
+ oldumask = os.umask(0)
+
+ debug = []
+ for root, dirs, files in cpath.walk(dvar):
+ dir = root[len(dvar):]
+ if not dir:
+ dir = os.sep
+ for f in (files + dirs):
+ path = "." + os.path.join(dir, f)
+ if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
+ debug.append(path)
+
+ for pkg in packages:
+ root = os.path.join(pkgdest, pkg)
+ bb.utils.mkdirhier(root)
+
+ filesvar = d.getVar('FILES:%s' % pkg) or ""
+ if "//" in filesvar:
+ msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
+ oe.qa.handle_error("files-invalid", msg, d)
+ filesvar.replace("//", "/")
+
+ origfiles = filesvar.split()
+ files, symlink_paths = oe.package.files_from_filevars(origfiles)
+
+ if autodebug and pkg.endswith("-dbg"):
+ files.extend(debug)
+
+ for file in files:
+ if (not cpath.islink(file)) and (not cpath.exists(file)):
+ continue
+ if file in seen:
+ continue
+ seen.append(file)
+
+ def mkdir(src, dest, p):
+ src = os.path.join(src, p)
+ dest = os.path.join(dest, p)
+ fstat = cpath.stat(src)
+ os.mkdir(dest)
+ os.chmod(dest, fstat.st_mode)
+ os.chown(dest, fstat.st_uid, fstat.st_gid)
+ if p not in seen:
+ seen.append(p)
+ cpath.updatecache(dest)
+
+ def mkdir_recurse(src, dest, paths):
+ if cpath.exists(dest + '/' + paths):
+ return
+ while paths.startswith("./"):
+ paths = paths[2:]
+ p = "."
+ for c in paths.split("/"):
+ p = os.path.join(p, c)
+ if not cpath.exists(os.path.join(dest, p)):
+ mkdir(src, dest, p)
+
+ if cpath.isdir(file) and not cpath.islink(file):
+ mkdir_recurse(dvar, root, file)
+ continue
+
+ mkdir_recurse(dvar, root, os.path.dirname(file))
+ fpath = os.path.join(root,file)
+ if not cpath.islink(file):
+ os.link(file, fpath)
+ continue
+ ret = bb.utils.copyfile(file, fpath)
+ if ret is False or ret == 0:
+ bb.fatal("File population failed")
+
+ # Check if symlink paths exist
+ for file in symlink_paths:
+ if not os.path.exists(os.path.join(root,file)):
+ bb.fatal("File '%s' cannot be packaged into '%s' because its "
+ "parent directory structure does not exist. One of "
+ "its parent directories is a symlink whose target "
+ "directory is not included in the package." %
+ (file, pkg))
+
+ os.umask(oldumask)
+ os.chdir(workdir)
+
+ # Handle excluding packages with incompatible licenses
+ package_list = []
+ for pkg in packages:
+ licenses = d.getVar('_exclude_incompatible-' + pkg)
+ if licenses:
+ msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
+ oe.qa.handle_error("incompatible-license", msg, d)
+ else:
+ package_list.append(pkg)
+ d.setVar('PACKAGES', ' '.join(package_list))
+
+ unshipped = []
+ for root, dirs, files in cpath.walk(dvar):
+ dir = root[len(dvar):]
+ if not dir:
+ dir = os.sep
+ for f in (files + dirs):
+ path = os.path.join(dir, f)
+ if ('.' + path) not in seen:
+ unshipped.append(path)
+
+ if unshipped != []:
+ msg = pn + ": Files/directories were installed but not shipped in any package:"
+ if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
+ bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
+ else:
+ for f in unshipped:
+ msg = msg + "\n " + f
+ msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
+ msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
+ oe.qa.handle_error("installed-vs-shipped", msg, d)
+
+def process_fixsymlinks(pkgfiles, d):
+ cpath = oe.cachedpath.CachedPath()
+ pkgdest = d.getVar('PKGDEST')
+ packages = d.getVar("PACKAGES", False).split()
+
+ dangling_links = {}
+ pkg_files = {}
+ for pkg in packages:
+ dangling_links[pkg] = []
+ pkg_files[pkg] = []
+ inst_root = os.path.join(pkgdest, pkg)
+ for path in pkgfiles[pkg]:
+ rpath = path[len(inst_root):]
+ pkg_files[pkg].append(rpath)
+ rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
+ if not cpath.lexists(rtarget):
+ dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
+
+ newrdepends = {}
+ for pkg in dangling_links:
+ for l in dangling_links[pkg]:
+ found = False
+ bb.debug(1, "%s contains dangling link %s" % (pkg, l))
+ for p in packages:
+ if l in pkg_files[p]:
+ found = True
+ bb.debug(1, "target found in %s" % p)
+ if p == pkg:
+ break
+ if pkg not in newrdepends:
+ newrdepends[pkg] = []
+ newrdepends[pkg].append(p)
+ break
+ if found == False:
+ bb.note("%s contains dangling symlink to %s" % (pkg, l))
+
+ for pkg in newrdepends:
+ rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
+ for p in newrdepends[pkg]:
+ if p not in rdepends:
+ rdepends[p] = []
+ d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+
+def process_filedeps(pkgfiles, d):
+ """
+ Collect perfile run-time dependency metadata
+ Output:
+ FILERPROVIDESFLIST:pkg - list of all files w/ deps
+ FILERPROVIDES:filepath:pkg - per file dep
+
+ FILERDEPENDSFLIST:pkg - list of all files w/ deps
+ FILERDEPENDS:filepath:pkg - per file dep
+ """
+ if d.getVar('SKIP_FILEDEPS') == '1':
+ return
+
+ pkgdest = d.getVar('PKGDEST')
+ packages = d.getVar('PACKAGES')
+ rpmdeps = d.getVar('RPMDEPS')
+
+ def chunks(files, n):
+ return [files[i:i+n] for i in range(0, len(files), n)]
+
+ pkglist = []
+ for pkg in packages.split():
+ if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
+ continue
+ if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
+ continue
+ for files in chunks(pkgfiles[pkg], 100):
+ pkglist.append((pkg, files, rpmdeps, pkgdest))
+
+ processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
+
+ provides_files = {}
+ requires_files = {}
+
+ for result in processed:
+ (pkg, provides, requires) = result
+
+ if pkg not in provides_files:
+ provides_files[pkg] = []
+ if pkg not in requires_files:
+ requires_files[pkg] = []
+
+ for file in sorted(provides):
+ provides_files[pkg].append(file)
+ key = "FILERPROVIDES:" + file + ":" + pkg
+ d.appendVar(key, " " + " ".join(provides[file]))
+
+ for file in sorted(requires):
+ requires_files[pkg].append(file)
+ key = "FILERDEPENDS:" + file + ":" + pkg
+ d.appendVar(key, " " + " ".join(requires[file]))
+
+ for pkg in requires_files:
+ d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
+ for pkg in provides_files:
+ d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
+
+def process_shlibs(pkgfiles, d):
+ cpath = oe.cachedpath.CachedPath()
+
+ exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
+ if exclude_shlibs:
+ bb.note("not generating shlibs")
+ return
+
+ lib_re = re.compile(r"^.*\.so")
+ libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
+
+ packages = d.getVar('PACKAGES')
+
+ shlib_pkgs = []
+ exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
+ if exclusion_list:
+ for pkg in packages.split():
+ if pkg not in exclusion_list.split():
+ shlib_pkgs.append(pkg)
+ else:
+ bb.note("not generating shlibs for %s" % pkg)
+ else:
+ shlib_pkgs = packages.split()
+
+ hostos = d.getVar('HOST_OS')
+
+ workdir = d.getVar('WORKDIR')
+
+ ver = d.getVar('PKGV')
+ if not ver:
+ msg = "PKGV not defined"
+ oe.qa.handle_error("pkgv-undefined", msg, d)
+ return
+
+ pkgdest = d.getVar('PKGDEST')
+
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR')
+
+ def linux_so(file, pkg, pkgver, d):
+ needs_ldconfig = False
+ needed = set()
+ sonames = set()
+ renames = []
+ ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
+ cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
+ fd = os.popen(cmd)
+ lines = fd.readlines()
+ fd.close()
+ rpath = tuple()
+ for l in lines:
+ m = re.match(r"\s+RPATH\s+([^\s]*)", l)
+ if m:
+ rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
+ rpath = tuple(map(os.path.normpath, rpaths))
+ for l in lines:
+ m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
+ if m:
+ dep = m.group(1)
+ if dep not in needed:
+ needed.add((dep, file, rpath))
+ m = re.match(r"\s+SONAME\s+([^\s]*)", l)
+ if m:
+ this_soname = m.group(1)
+ prov = (this_soname, ldir, pkgver)
+ if not prov in sonames:
+ # if library is private (only used by package) then do not build shlib for it
+ if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
+ sonames.add(prov)
+ if libdir_re.match(os.path.dirname(file)):
+ needs_ldconfig = True
+ if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
+ renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
+ return (needs_ldconfig, needed, sonames, renames)
+
+ def darwin_so(file, needed, sonames, renames, pkgver):
+ if not os.path.exists(file):
+ return
+ ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
+
+ def get_combinations(base):
+ #
+ # Given a base library name, find all combinations of this split by "." and "-"
+ #
+ combos = []
+ options = base.split(".")
+ for i in range(1, len(options) + 1):
+ combos.append(".".join(options[0:i]))
+ options = base.split("-")
+ for i in range(1, len(options) + 1):
+ combos.append("-".join(options[0:i]))
+ return combos
+
+ if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
+ # Drop suffix
+ name = os.path.basename(file).rsplit(".",1)[0]
+ # Find all combinations
+ combos = get_combinations(name)
+ for combo in combos:
+ if not combo in sonames:
+ prov = (combo, ldir, pkgver)
+ sonames.add(prov)
+ if file.endswith('.dylib') or file.endswith('.so'):
+ rpath = []
+ p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ out, err = p.communicate()
+ # If returned successfully, process stdout for results
+ if p.returncode == 0:
+ for l in out.split("\n"):
+ l = l.strip()
+ if l.startswith('path '):
+ rpath.append(l.split()[1])
+
+ p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ out, err = p.communicate()
+ # If returned successfully, process stdout for results
+ if p.returncode == 0:
+ for l in out.split("\n"):
+ l = l.strip()
+ if not l or l.endswith(":"):
+ continue
+ if "is not an object file" in l:
+ continue
+ name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
+ if name and name not in needed[pkg]:
+ needed[pkg].add((name, file, tuple()))
+
+ def mingw_dll(file, needed, sonames, renames, pkgver):
+ if not os.path.exists(file):
+ return
+
+ if file.endswith(".dll"):
+ # assume all dlls are shared objects provided by the package
+ sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
+
+ if (file.endswith(".dll") or file.endswith(".exe")):
+ # use objdump to search for "DLL Name: .*\.dll"
+ p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ # process the output, grabbing all .dll names
+ if p.returncode == 0:
+ for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
+ dllname = m.group(1)
+ if dllname:
+ needed[pkg].add((dllname, file, tuple()))
+
+ if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
+ snap_symlinks = True
+ else:
+ snap_symlinks = False
+
+ needed = {}
+
+ shlib_provider = oe.package.read_shlib_providers(d)
+
+ for pkg in shlib_pkgs:
+ private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
+ private_libs = private_libs.split()
+ needs_ldconfig = False
+ bb.debug(2, "calculating shlib provides for %s" % pkg)
+
+ pkgver = d.getVar('PKGV:' + pkg)
+ if not pkgver:
+ pkgver = d.getVar('PV_' + pkg)
+ if not pkgver:
+ pkgver = ver
+
+ needed[pkg] = set()
+ sonames = set()
+ renames = []
+ linuxlist = []
+ for file in pkgfiles[pkg]:
+ soname = None
+ if cpath.islink(file):
+ continue
+ if hostos.startswith("darwin"):
+ darwin_so(file, needed, sonames, renames, pkgver)
+ elif hostos.startswith("mingw"):
+ mingw_dll(file, needed, sonames, renames, pkgver)
+ elif os.access(file, os.X_OK) or lib_re.match(file):
+ linuxlist.append(file)
+
+ if linuxlist:
+ results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
+ for r in results:
+ ldconfig = r[0]
+ needed[pkg] |= r[1]
+ sonames |= r[2]
+ renames.extend(r[3])
+ needs_ldconfig = needs_ldconfig or ldconfig
+
+ for (old, new) in renames:
+ bb.note("Renaming %s to %s" % (old, new))
+ bb.utils.rename(old, new)
+ pkgfiles[pkg].remove(old)
+
+ shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
+ if len(sonames):
+ with open(shlibs_file, 'w') as fd:
+ for s in sorted(sonames):
+ if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
+ (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
+ if old_pkg != pkg:
+ bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
+ bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
+ fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
+ if s[0] not in shlib_provider:
+ shlib_provider[s[0]] = {}
+ shlib_provider[s[0]][s[1]] = (pkg, pkgver)
+ if needs_ldconfig:
+ bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
+ postinst = d.getVar('pkg_postinst:%s' % pkg)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += d.getVar('ldconfig_postinst_fragment')
+ d.setVar('pkg_postinst:%s' % pkg, postinst)
+ bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
+
+ assumed_libs = d.getVar('ASSUME_SHLIBS')
+ if assumed_libs:
+ libdir = d.getVar("libdir")
+ for e in assumed_libs.split():
+ l, dep_pkg = e.split(":")
+ lib_ver = None
+ dep_pkg = dep_pkg.rsplit("_", 1)
+ if len(dep_pkg) == 2:
+ lib_ver = dep_pkg[1]
+ dep_pkg = dep_pkg[0]
+ if l not in shlib_provider:
+ shlib_provider[l] = {}
+ shlib_provider[l][libdir] = (dep_pkg, lib_ver)
+
+ libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
+
+ for pkg in shlib_pkgs:
+ bb.debug(2, "calculating shlib requirements for %s" % pkg)
+
+ private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
+ private_libs = private_libs.split()
+
+ deps = list()
+ for n in needed[pkg]:
+ # if n is in private libraries, don't try to search provider for it
+ # this could cause problem in case some abc.bb provides private
+ # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
+ # but skipping it is still better alternative than providing own
+ # version and then adding runtime dependency for the same system library
+ if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
+ bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
+ continue
+ if n[0] in shlib_provider.keys():
+ shlib_provider_map = shlib_provider[n[0]]
+ matches = set()
+ for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
+ if p in shlib_provider_map:
+ matches.add(p)
+ if len(matches) > 1:
+ matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
+ bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
+ elif len(matches) == 1:
+ (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
+
+ bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
+
+ if dep_pkg == pkg:
+ continue
+
+ if ver_needed:
+ dep = "%s (>= %s)" % (dep_pkg, ver_needed)
+ else:
+ dep = dep_pkg
+ if not dep in deps:
+ deps.append(dep)
+ continue
+ bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
+
+ deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
+ if os.path.exists(deps_file):
+ os.remove(deps_file)
+ if deps:
+ with open(deps_file, 'w') as fd:
+ for dep in sorted(deps):
+ fd.write(dep + '\n')
+
+def process_pkgconfig(pkgfiles, d):
+ packages = d.getVar('PACKAGES')
+ workdir = d.getVar('WORKDIR')
+ pkgdest = d.getVar('PKGDEST')
+
+ shlibs_dirs = d.getVar('SHLIBSDIRS').split()
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR')
+
+ pc_re = re.compile(r'(.*)\.pc$')
+ var_re = re.compile(r'(.*)=(.*)')
+ field_re = re.compile(r'(.*): (.*)')
+
+ pkgconfig_provided = {}
+ pkgconfig_needed = {}
+ for pkg in packages.split():
+ pkgconfig_provided[pkg] = []
+ pkgconfig_needed[pkg] = []
+ for file in sorted(pkgfiles[pkg]):
+ m = pc_re.match(file)
+ if m:
+ pd = bb.data.init()
+ name = m.group(1)
+ pkgconfig_provided[pkg].append(os.path.basename(name))
+ if not os.access(file, os.R_OK):
+ continue
+ with open(file, 'r') as f:
+ lines = f.readlines()
+ for l in lines:
+ m = field_re.match(l)
+ if m:
+ hdr = m.group(1)
+ exp = pd.expand(m.group(2))
+ if hdr == 'Requires':
+ pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
+ continue
+ m = var_re.match(l)
+ if m:
+ name = m.group(1)
+ val = m.group(2)
+ pd.setVar(name, pd.expand(val))
+
+ for pkg in packages.split():
+ pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
+ if pkgconfig_provided[pkg] != []:
+ with open(pkgs_file, 'w') as f:
+ for p in sorted(pkgconfig_provided[pkg]):
+ f.write('%s\n' % p)
+
+ # Go from least to most specific since the last one found wins
+ for dir in reversed(shlibs_dirs):
+ if not os.path.exists(dir):
+ continue
+ for file in sorted(os.listdir(dir)):
+ m = re.match(r'^(.*)\.pclist$', file)
+ if m:
+ pkg = m.group(1)
+ with open(os.path.join(dir, file)) as fd:
+ lines = fd.readlines()
+ pkgconfig_provided[pkg] = []
+ for l in lines:
+ pkgconfig_provided[pkg].append(l.rstrip())
+
+ for pkg in packages.split():
+ deps = []
+ for n in pkgconfig_needed[pkg]:
+ found = False
+ for k in pkgconfig_provided.keys():
+ if n in pkgconfig_provided[k]:
+ if k != pkg and not (k in deps):
+ deps.append(k)
+ found = True
+ if found == False:
+ bb.note("couldn't find pkgconfig module '%s' in any package" % n)
+ deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
+ if len(deps):
+ with open(deps_file, 'w') as fd:
+ for dep in deps:
+ fd.write(dep + '\n')
+
+def read_libdep_files(d):
+ pkglibdeps = {}
+ packages = d.getVar('PACKAGES').split()
+ for pkg in packages:
+ pkglibdeps[pkg] = {}
+ for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
+ depsfile = d.expand("${PKGDEST}/" + pkg + extension)
+ if os.access(depsfile, os.R_OK):
+ with open(depsfile) as fd:
+ lines = fd.readlines()
+ for l in lines:
+ l.rstrip()
+ deps = bb.utils.explode_dep_versions2(l)
+ for dep in deps:
+ if not dep in pkglibdeps[pkg]:
+ pkglibdeps[pkg][dep] = deps[dep]
+ return pkglibdeps
+
+def process_depchains(pkgfiles, d):
+ """
+ For a given set of prefix and postfix modifiers, make those packages
+ RRECOMMENDS on the corresponding packages for its RDEPENDS.
+
+ Example: If package A depends upon package B, and A's .bb emits an
+ A-dev package, this would make A-dev Recommends: B-dev.
+
+ If only one of a given suffix is specified, it will take the RRECOMMENDS
+ based on the RDEPENDS of *all* other packages. If more than one of a given
+ suffix is specified, its will only use the RDEPENDS of the single parent
+ package.
+ """
+
+ packages = d.getVar('PACKAGES')
+ postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
+ prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
+
+ def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
+
+ #bb.note('depends for %s is %s' % (base, depends))
+ rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
+
+ for depend in sorted(depends):
+ if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
+ #bb.note("Skipping %s" % depend)
+ continue
+ if depend.endswith('-dev'):
+ depend = depend[:-4]
+ if depend.endswith('-dbg'):
+ depend = depend[:-4]
+ pkgname = getname(depend, suffix)
+ #bb.note("Adding %s for %s" % (pkgname, depend))
+ if pkgname not in rreclist and pkgname != pkg:
+ rreclist[pkgname] = []
+
+ #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
+ d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+ def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
+
+ #bb.note('rdepends for %s is %s' % (base, rdepends))
+ rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
+
+ for depend in sorted(rdepends):
+ if depend.find('virtual-locale-') != -1:
+ #bb.note("Skipping %s" % depend)
+ continue
+ if depend.endswith('-dev'):
+ depend = depend[:-4]
+ if depend.endswith('-dbg'):
+ depend = depend[:-4]
+ pkgname = getname(depend, suffix)
+ #bb.note("Adding %s for %s" % (pkgname, depend))
+ if pkgname not in rreclist and pkgname != pkg:
+ rreclist[pkgname] = []
+
+ #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
+ d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+ def add_dep(list, dep):
+ if dep not in list:
+ list.append(dep)
+
+ depends = []
+ for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
+ add_dep(depends, dep)
+
+ rdepends = []
+ for pkg in packages.split():
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
+ add_dep(rdepends, dep)
+
+ #bb.note('rdepends is %s' % rdepends)
+
+ def post_getname(name, suffix):
+ return '%s%s' % (name, suffix)
+ def pre_getname(name, suffix):
+ return '%s%s' % (suffix, name)
+
+ pkgs = {}
+ for pkg in packages.split():
+ for postfix in postfixes:
+ if pkg.endswith(postfix):
+ if not postfix in pkgs:
+ pkgs[postfix] = {}
+ pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
+
+ for prefix in prefixes:
+ if pkg.startswith(prefix):
+ if not prefix in pkgs:
+ pkgs[prefix] = {}
+ pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
+
+ if "-dbg" in pkgs:
+ pkglibdeps = read_libdep_files(d)
+ pkglibdeplist = []
+ for pkg in pkglibdeps:
+ for k in pkglibdeps[pkg]:
+ add_dep(pkglibdeplist, k)
+ dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
+
+ for suffix in pkgs:
+ for pkg in pkgs[suffix]:
+ if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
+ continue
+ (base, func) = pkgs[suffix][pkg]
+ if suffix == "-dev":
+ pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
+ elif suffix == "-dbg":
+ if not dbgdefaultdeps:
+ pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
+ continue
+ if len(pkgs[suffix]) == 1:
+ pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
+ else:
+ rdeps = []
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
+ add_dep(rdeps, dep)
+ pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 80bc1a6bc6..6774cdb794 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -120,7 +122,8 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
"riscv32": ["--uint32-align=4", "--little-endian"],
"i586": ["--uint32-align=4", "--little-endian"],
"i686": ["--uint32-align=4", "--little-endian"],
- "x86_64": ["--uint32-align=4", "--little-endian"]
+ "x86_64": ["--uint32-align=4", "--little-endian"],
+ "loongarch64": ["--uint32-align=4", "--little-endian"]
}
if target_arch in locale_arch_options:
arch_options = locale_arch_options[target_arch]
@@ -266,7 +269,7 @@ class PackageManager(object, metaclass=ABCMeta):
pass
@abstractmethod
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
"""
Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
True, installation failures are ignored.
@@ -396,7 +399,7 @@ class PackageManager(object, metaclass=ABCMeta):
bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
' '.join(install_pkgs),
' '.join(skip_pkgs)))
- self.install(install_pkgs)
+ self.install(install_pkgs, hard_depends_only=True)
except subprocess.CalledProcessError as e:
bb.fatal("Could not compute complementary packages list. Command "
"'%s' returned %d:\n%s" %
@@ -467,7 +470,10 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
# Detect bitbake -b usage
nodeps = d.getVar("BB_LIMITEDDEPS") or False
if nodeps or not filterbydependencies:
- oe.path.symlink(deploydir, subrepo_dir, True)
+ for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
+ target = os.path.join(deploydir + "/" + arch)
+ if os.path.exists(target):
+ oe.path.symlink(target, subrepo_dir + "/" + arch, True)
return
start = None
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 86ddb130ad..0c23c884c1 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -80,15 +82,15 @@ class DpkgIndexer(Indexer):
return
oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
- if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
- signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True))
+ if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
+ signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
else:
signer = None
if signer:
for f in index_sign_files:
signer.detach_sign(f,
- self.d.getVar('PACKAGE_FEED_GPG_NAME', True),
- self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True),
+ self.d.getVar('PACKAGE_FEED_GPG_NAME'),
+ self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
output_suffix="gpg",
use_sha256=True)
@@ -289,14 +291,18 @@ class DpkgPM(OpkgDpkgPM):
self.deploy_dir_unlock()
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if attempt_only and len(pkgs) == 0:
return
os.environ['APT_CONFIG'] = self.apt_conf_file
- cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s" % \
- (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
+ extra_args = ""
+ if hard_depends_only:
+ extra_args = "--no-install-recommends"
+
+ cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \
+ (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs))
try:
bb.note("Installing the following packages: %s" % ' '.join(pkgs))
diff --git a/meta/lib/oe/package_manager/deb/manifest.py b/meta/lib/oe/package_manager/deb/manifest.py
index d8eab24a06..72983bae98 100644
--- a/meta/lib/oe/package_manager/deb/manifest.py
+++ b/meta/lib/oe/package_manager/deb/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/package_manager/deb/rootfs.py b/meta/lib/oe/package_manager/deb/rootfs.py
index 8fbaca11d6..1e25b64ed9 100644
--- a/meta/lib/oe/package_manager/deb/rootfs.py
+++ b/meta/lib/oe/package_manager/deb/rootfs.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/package_manager/deb/sdk.py b/meta/lib/oe/package_manager/deb/sdk.py
index f4b0b6510a..6f3005053e 100644
--- a/meta/lib/oe/package_manager/deb/sdk.py
+++ b/meta/lib/oe/package_manager/deb/sdk.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -67,7 +69,12 @@ class PkgSdk(Sdk):
self.target_pm.run_pre_post_installs()
+ env_bkp = os.environ.copy()
+ os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
+ os.pathsep + os.environ["PATH"]
+
self.target_pm.run_intercepts(populate_sdk='target')
+ os.environ.update(env_bkp)
execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index 4cd3963111..8cc9953a02 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -14,6 +16,7 @@ class OpkgIndexer(Indexer):
]
opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
+ opkg_index_cmd_extra_params = self.d.getVar('OPKG_MAKE_INDEX_EXTRA_PARAMS') or ""
if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
else:
@@ -39,8 +42,8 @@ class OpkgIndexer(Indexer):
if not os.path.exists(pkgs_file):
open(pkgs_file, "w").close()
- index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' %
- (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
+ index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s %s' %
+ (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir, opkg_index_cmd_extra_params))
index_sign_files.add(pkgs_file)
@@ -102,12 +105,14 @@ class OpkgDpkgPM(PackageManager):
This method extracts the common parts for Opkg and Dpkg
"""
- try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
- except subprocess.CalledProcessError as e:
+ proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
+ if proc.returncode:
bb.fatal("Unable to list available packages. Command '%s' "
- "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
- return opkg_query(output)
+ "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
+ elif proc.stderr:
+ bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
+
+ return opkg_query(proc.stdout)
def extract(self, pkg, pkg_info):
"""
@@ -129,7 +134,7 @@ class OpkgDpkgPM(PackageManager):
tmp_dir = tempfile.mkdtemp()
current_dir = os.getcwd()
os.chdir(tmp_dir)
- data_tar = 'data.tar.xz'
+ data_tar = 'data.tar.zst'
try:
cmd = [ar_cmd, 'x', pkg_path]
@@ -243,7 +248,7 @@ class OpkgPM(OpkgDpkgPM):
"""
if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
for arch in self.pkg_archs.split():
- cfg_file_name = os.path.join(self.target_rootfs,
+ cfg_file_name = oe.path.join(self.target_rootfs,
self.d.getVar("sysconfdir"),
"opkg",
"local-%s-feed.conf" % arch)
@@ -337,7 +342,7 @@ class OpkgPM(OpkgDpkgPM):
self.deploy_dir_unlock()
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if not pkgs:
return
@@ -346,6 +351,8 @@ class OpkgPM(OpkgDpkgPM):
cmd += " --add-exclude %s" % exclude
for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
cmd += " --add-ignore-recommends %s" % bad_recommendation
+ if hard_depends_only:
+ cmd += " --no-install-recommends"
cmd += " install "
cmd += " ".join(pkgs)
@@ -443,15 +450,16 @@ class OpkgPM(OpkgDpkgPM):
cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
opkg_args,
' '.join(pkgs))
- try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
- except subprocess.CalledProcessError as e:
+ proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
+ if proc.returncode:
bb.fatal("Unable to dummy install packages. Command '%s' "
- "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
+ "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
+ elif proc.stderr:
+ bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
bb.utils.remove(temp_rootfs, True)
- return output
+ return proc.stdout
def backup_packaging_data(self):
# Save the opkglib for increment ipk image generation
@@ -498,6 +506,6 @@ class OpkgPM(OpkgDpkgPM):
"trying to extract the package." % pkg)
tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
- bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
+ bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst"))
return tmp_dir
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py
index ae451c5c70..3549d7428d 100644
--- a/meta/lib/oe/package_manager/ipk/manifest.py
+++ b/meta/lib/oe/package_manager/ipk/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -62,7 +64,7 @@ class PkgManifest(Manifest):
if len(pkgs_to_install) == 0:
return
- output = pm.dummy_install(pkgs_to_install).decode('utf-8')
+ output = pm.dummy_install(pkgs_to_install)
with open(self.full_manifest, 'w+') as manifest:
pkg_re = re.compile('^Installing ([^ ]+) [^ ].*')
diff --git a/meta/lib/oe/package_manager/ipk/rootfs.py b/meta/lib/oe/package_manager/ipk/rootfs.py
index 10a831994e..ba93eb62ea 100644
--- a/meta/lib/oe/package_manager/ipk/rootfs.py
+++ b/meta/lib/oe/package_manager/ipk/rootfs.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -163,7 +165,7 @@ class PkgRootfs(DpkgOpkgRootfs):
"""
def _multilib_sanity_test(self, dirs):
- allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP")
+ allow_replace = "|".join((self.d.getVar("MULTILIBRE_ALLOW_REP") or "").split())
if allow_replace is None:
allow_replace = ""
diff --git a/meta/lib/oe/package_manager/ipk/sdk.py b/meta/lib/oe/package_manager/ipk/sdk.py
index e2ca415c8e..3acd55f548 100644
--- a/meta/lib/oe/package_manager/ipk/sdk.py
+++ b/meta/lib/oe/package_manager/ipk/sdk.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -61,12 +63,19 @@ class PkgSdk(Sdk):
self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
+ env_bkp = os.environ.copy()
+ os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
+ os.pathsep + os.environ["PATH"]
+
self.target_pm.run_intercepts(populate_sdk='target')
+ os.environ.update(env_bkp)
execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
self.target_pm.remove_packaging_data()
+ else:
+ self.target_pm.remove_lists()
bb.note("Installing NATIVESDK packages")
self._populate_sysroot(self.host_pm, self.host_manifest)
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
self.host_pm.remove_packaging_data()
+ else:
+ self.host_pm.remove_lists()
target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py
index b392581069..f40c880af4 100644
--- a/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/meta/lib/oe/package_manager/rpm/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -96,11 +98,15 @@ class RpmPM(PackageManager):
archs = ["sdk_provides_dummy_target"] + archs
confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
bb.utils.mkdirhier(confdir)
- open(confdir + "arch", 'w').write(":".join(archs))
+ with open(confdir + "arch", 'w') as f:
+ f.write(":".join(archs))
+
distro_codename = self.d.getVar('DISTRO_CODENAME')
- open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '')
+ with open(confdir + "releasever", 'w') as f:
+ f.write(distro_codename if distro_codename is not None else '')
- open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("")
+ with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f:
+ f.write("")
def _configure_rpm(self):
@@ -110,14 +116,17 @@ class RpmPM(PackageManager):
platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
bb.utils.mkdirhier(platformconfdir)
- open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch)
+ with open(platformconfdir + "platform", 'w') as f:
+ f.write("%s-pc-linux" % self.primary_arch)
with open(rpmrcconfdir + "rpmrc", 'w') as f:
f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
- open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n")
+ with open(platformconfdir + "macros", 'w') as f:
+ f.write("%_transaction_color 7\n")
if self.d.getVar('RPM_PREFER_ELF_ARCH'):
- open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
+ with open(platformconfdir + "macros", 'a') as f:
+ f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
@@ -164,13 +173,13 @@ class RpmPM(PackageManager):
repo_uri = uri + "/" + arch
repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
- open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write(
- "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
+ with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f:
+ f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
else:
repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
repo_uri = uri
- open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write(
- "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
+ with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f:
+ f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
def _prepare_pkg_transaction(self):
os.environ['D'] = self.target_rootfs
@@ -181,7 +190,7 @@ class RpmPM(PackageManager):
os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
- def install(self, pkgs, attempt_only = False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if len(pkgs) == 0:
return
self._prepare_pkg_transaction()
@@ -192,7 +201,7 @@ class RpmPM(PackageManager):
output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
(["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
- (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) +
+ (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) +
(["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
["install"] +
pkgs)
@@ -329,7 +338,8 @@ class RpmPM(PackageManager):
return e.output.decode("utf-8")
def dump_install_solution(self, pkgs):
- open(self.solution_manifest, 'w').write(" ".join(pkgs))
+ with open(self.solution_manifest, 'w') as f:
+ f.write(" ".join(pkgs))
return pkgs
def load_old_install_solution(self):
@@ -363,7 +373,8 @@ class RpmPM(PackageManager):
bb.utils.mkdirhier(target_path)
num = self._script_num_prefix(target_path)
saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
- open(saved_script_name, 'w').write(output)
+ with open(saved_script_name, 'w') as f:
+ f.write(output)
os.chmod(saved_script_name, 0o755)
def _handle_intercept_failure(self, registered_pkgs):
@@ -375,11 +386,12 @@ class RpmPM(PackageManager):
self.save_rpmpostinst(pkg)
def extract(self, pkg):
- output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg])
+ output = self._invoke_dnf(["repoquery", "--location", pkg])
pkg_name = output.splitlines()[-1]
if not pkg_name.endswith(".rpm"):
bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output))
- pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name)
+ # Strip file: prefix
+ pkg_path = pkg_name[5:]
cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio")
rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio")
diff --git a/meta/lib/oe/package_manager/rpm/manifest.py b/meta/lib/oe/package_manager/rpm/manifest.py
index e6604b301f..6ee7c329f0 100644
--- a/meta/lib/oe/package_manager/rpm/manifest.py
+++ b/meta/lib/oe/package_manager/rpm/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py
index 00d07cd9cc..3ba5396320 100644
--- a/meta/lib/oe/package_manager/rpm/rootfs.py
+++ b/meta/lib/oe/package_manager/rpm/rootfs.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -108,7 +110,7 @@ class PkgRootfs(Rootfs):
if self.progress_reporter:
self.progress_reporter.next_stage()
- self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
+ self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
execute_pre_post_process(self.d, rpm_post_process_cmds)
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py
index c5f232431f..ea79fe050b 100644
--- a/meta/lib/oe/package_manager/rpm/sdk.py
+++ b/meta/lib/oe/package_manager/rpm/sdk.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -65,7 +67,12 @@ class PkgSdk(Sdk):
self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
+ env_bkp = os.environ.copy()
+ os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
+ os.pathsep + os.environ["PATH"]
+
self.target_pm.run_intercepts(populate_sdk='target')
+ os.environ.update(env_bkp)
execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
@@ -110,5 +117,6 @@ class PkgSdk(Sdk):
for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")):
self.movefile(f, native_sysconf_dir)
for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")):
- self.movefile(f, native_sysconf_dir)
+ self.mkdirhier(native_sysconf_dir + "/dnf")
+ self.movefile(f, native_sysconf_dir + "/dnf")
self.remove(os.path.join(self.sdk_output, "etc"), True)
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index 212f048bc6..2d1d6ddeb7 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -1,9 +1,16 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import codecs
import os
+import json
+import bb.compress.zstd
+import oe.path
+
+from glob import glob
def packaged(pkg, d):
return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
@@ -108,3 +115,252 @@ def recipename(pkg, d):
"""Return the recipe name for the given binary package name."""
return pkgmap(d).get(pkg)
+
+def foreach_runtime_provider_pkgdata(d, rdep, include_rdep=False):
+ pkgdata_dir = d.getVar("PKGDATA_DIR")
+ possibles = set()
+ try:
+ possibles |= set(os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdep)))
+ except OSError:
+ pass
+
+ if include_rdep:
+ possibles.add(rdep)
+
+ for p in sorted(list(possibles)):
+ rdep_data = read_subpkgdata(p, d)
+ yield p, rdep_data
+
+def get_package_mapping(pkg, basepkg, d, depversions=None):
+ import oe.packagedata
+
+ data = oe.packagedata.read_subpkgdata(pkg, d)
+ key = "PKG:%s" % pkg
+
+ if key in data:
+ if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
+ bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
+ # Have to avoid undoing the write_extra_pkgs(global_variants...)
+ if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
+ and data[key] == basepkg:
+ return pkg
+ if depversions == []:
+ # Avoid returning a mapping if the renamed package rprovides its original name
+ rprovkey = "RPROVIDES:%s" % pkg
+ if rprovkey in data:
+ if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
+ bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
+ return pkg
+ # Do map to rewritten package name
+ return data[key]
+
+ return pkg
+
+def get_package_additional_metadata(pkg_type, d):
+ base_key = "PACKAGE_ADD_METADATA"
+ for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
+ if d.getVar(key, False) is None:
+ continue
+ d.setVarFlag(key, "type", "list")
+ if d.getVarFlag(key, "separator") is None:
+ d.setVarFlag(key, "separator", "\\n")
+ metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
+ return "\n".join(metadata_fields).strip()
+
+def runtime_mapping_rename(varname, pkg, d):
+ #bb.note("%s before: %s" % (varname, d.getVar(varname)))
+
+ new_depends = {}
+ deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
+ for depend, depversions in deps.items():
+ new_depend = get_package_mapping(depend, pkg, d, depversions)
+ if depend != new_depend:
+ bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
+ new_depends[new_depend] = deps[depend]
+
+ d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
+
+ #bb.note("%s after: %s" % (varname, d.getVar(varname)))
+
+def emit_pkgdata(pkgfiles, d):
+ def process_postinst_on_target(pkg, mlprefix):
+ pkgval = d.getVar('PKG:%s' % pkg)
+ if pkgval is None:
+ pkgval = pkg
+
+ defer_fragment = """
+if [ -n "$D" ]; then
+ $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
+ exit 0
+fi
+""" % (pkgval, mlprefix)
+
+ postinst = d.getVar('pkg_postinst:%s' % pkg)
+ postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
+
+ if postinst_ontarget:
+ bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += defer_fragment
+ postinst += postinst_ontarget
+ d.setVar('pkg_postinst:%s' % pkg, postinst)
+
+ def add_set_e_to_scriptlets(pkg):
+ for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
+ scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
+ if scriptlet:
+ scriptlet_split = scriptlet.split('\n')
+ if scriptlet_split[0].startswith("#!"):
+ scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
+ else:
+ scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
+ d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
+
+ def write_if_exists(f, pkg, var):
+ def encode(str):
+ import codecs
+ c = codecs.getencoder("unicode_escape")
+ return c(str)[0].decode("latin1")
+
+ val = d.getVar('%s:%s' % (var, pkg))
+ if val:
+ f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
+ return val
+ val = d.getVar('%s' % (var))
+ if val:
+ f.write('%s: %s\n' % (var, encode(val)))
+ return val
+
+ def write_extra_pkgs(variants, pn, packages, pkgdatadir):
+ for variant in variants:
+ with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
+ fd.write("PACKAGES: %s\n" % ' '.join(
+ map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
+
+ def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
+ for variant in variants:
+ for pkg in packages.split():
+ ml_pkg = "%s-%s" % (variant, pkg)
+ subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
+ with open(subdata_file, 'w') as fd:
+ fd.write("PKG:%s: %s" % (ml_pkg, pkg))
+
+ packages = d.getVar('PACKAGES')
+ pkgdest = d.getVar('PKGDEST')
+ pkgdatadir = d.getVar('PKGDESTWORK')
+
+ data_file = pkgdatadir + d.expand("/${PN}")
+ with open(data_file, 'w') as fd:
+ fd.write("PACKAGES: %s\n" % packages)
+
+ pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
+
+ pn = d.getVar('PN')
+ global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
+ variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
+
+ if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
+ write_extra_pkgs(variants, pn, packages, pkgdatadir)
+
+ if bb.data.inherits_class('allarch', d) and not variants \
+ and not bb.data.inherits_class('packagegroup', d):
+ write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
+
+ workdir = d.getVar('WORKDIR')
+
+ for pkg in packages.split():
+ pkgval = d.getVar('PKG:%s' % pkg)
+ if pkgval is None:
+ pkgval = pkg
+ d.setVar('PKG:%s' % pkg, pkg)
+
+ extended_data = {
+ "files_info": {}
+ }
+
+ pkgdestpkg = os.path.join(pkgdest, pkg)
+ files = {}
+ files_extra = {}
+ total_size = 0
+ seen = set()
+ for f in pkgfiles[pkg]:
+ fpath = os.sep + os.path.relpath(f, pkgdestpkg)
+
+ fstat = os.lstat(f)
+ files[fpath] = fstat.st_size
+
+ extended_data["files_info"].setdefault(fpath, {})
+ extended_data["files_info"][fpath]['size'] = fstat.st_size
+
+ if fstat.st_ino not in seen:
+ seen.add(fstat.st_ino)
+ total_size += fstat.st_size
+
+ if fpath in pkgdebugsource:
+ extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
+ del pkgdebugsource[fpath]
+
+ d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
+
+ process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
+ add_set_e_to_scriptlets(pkg)
+
+ subdata_file = pkgdatadir + "/runtime/%s" % pkg
+ with open(subdata_file, 'w') as sf:
+ for var in (d.getVar('PKGDATA_VARS') or "").split():
+ val = write_if_exists(sf, pkg, var)
+
+ write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
+ for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
+
+ write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
+ for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
+
+ sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
+
+ subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+ with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
+ json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
+
+ # Symlinks needed for rprovides lookup
+ rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
+ if rprov:
+ for p in bb.utils.explode_deps(rprov):
+ subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
+ bb.utils.mkdirhier(os.path.dirname(subdata_sym))
+ oe.path.relsymlink(subdata_file, subdata_sym, True)
+
+ allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
+ if not allow_empty:
+ allow_empty = d.getVar('ALLOW_EMPTY')
+ root = "%s/%s" % (pkgdest, pkg)
+ os.chdir(root)
+ g = glob('*')
+ if g or allow_empty == "1":
+ # Symlinks needed for reverse lookups (from the final package name)
+ subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
+ oe.path.relsymlink(subdata_file, subdata_sym, True)
+
+ packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
+ open(packagedfile, 'w').close()
+
+ if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
+ write_extra_runtime_pkgs(variants, packages, pkgdatadir)
+
+ if bb.data.inherits_class('allarch', d) and not variants \
+ and not bb.data.inherits_class('packagegroup', d):
+ write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
+
+def mapping_rename_hook(d):
+ """
+ Rewrite variables to account for package renaming in things
+ like debian.bbclass or manual PKG variable name changes
+ """
+ pkg = d.getVar("PKG")
+ oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d)
+ oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d)
+ oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
index 8fcaecde82..7b7594751a 100644
--- a/meta/lib/oe/packagegroup.py
+++ b/meta/lib/oe/packagegroup.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index 9034fcae03..60a0cc8291 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -1,10 +1,14 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
+import shlex
+import subprocess
import oe.path
import oe.types
-import subprocess
class NotFoundError(bb.BBHandledException):
def __init__(self, path):
@@ -25,8 +29,6 @@ class CmdError(bb.BBHandledException):
def runcmd(args, dir = None):
- import pipes
-
if dir:
olddir = os.path.abspath(os.curdir)
if not os.path.exists(dir):
@@ -35,7 +37,7 @@ def runcmd(args, dir = None):
# print("cwd: %s -> %s" % (olddir, dir))
try:
- args = [ pipes.quote(str(arg)) for arg in args ]
+ args = [ shlex.quote(str(arg)) for arg in args ]
cmd = " ".join(args)
# print("cmd: %s" % cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
@@ -215,7 +217,7 @@ class PatchTree(PatchSet):
with open(self.seriespath, 'w') as f:
for p in patches:
f.write(p)
-
+
def Import(self, patch, force = None):
""""""
PatchSet.Import(self, patch, force)
@@ -292,17 +294,18 @@ class PatchTree(PatchSet):
self.Pop(all=True)
class GitApplyTree(PatchTree):
- patch_line_prefix = '%% original patch'
- ignore_commit_prefix = '%% ignore'
+ notes_ref = "refs/notes/devtool"
+ original_patch = 'original patch'
+ ignore_commit = 'ignore'
def __init__(self, dir, d):
PatchTree.__init__(self, dir, d)
self.commituser = d.getVar('PATCH_GIT_USER_NAME')
self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
- if not self._isInitialized():
+ if not self._isInitialized(d):
self._initRepo()
- def _isInitialized(self):
+ def _isInitialized(self, d):
cmd = "git rev-parse --show-toplevel"
try:
output = runcmd(cmd.split(), self.dir).strip()
@@ -310,8 +313,8 @@ class GitApplyTree(PatchTree):
## runcmd returned non-zero which most likely means 128
## Not a git directory
return False
- ## Make sure repo is in builddir to not break top-level git repos
- return os.path.samefile(output, self.dir)
+ ## Make sure repo is in builddir to not break top-level git repos, or under workdir
+ return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
def _initRepo(self):
runcmd("git init".split(), self.dir)
@@ -450,7 +453,7 @@ class GitApplyTree(PatchTree):
# Prepare git command
cmd = ["git"]
GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
- cmd += ["commit", "-F", tmpfile]
+ cmd += ["commit", "-F", tmpfile, "--no-verify"]
# git doesn't like plain email addresses as authors
if author and '<' in author:
cmd.append('--author="%s"' % author)
@@ -459,44 +462,131 @@ class GitApplyTree(PatchTree):
return (tmpfile, cmd)
@staticmethod
- def extractPatches(tree, startcommit, outdir, paths=None):
+ def addNote(repo, ref, key, value=None):
+ note = key + (": %s" % value if value else "")
+ notes_ref = GitApplyTree.notes_ref
+ runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo)
+ runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo)
+ runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo)
+ runcmd(["git", "notes", "--ref", notes_ref, "append", "-m", note, ref], repo)
+
+ @staticmethod
+ def removeNote(repo, ref, key):
+ notes = GitApplyTree.getNotes(repo, ref)
+ notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")}
+ runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo)
+ for note, value in notes.items():
+ GitApplyTree.addNote(repo, ref, note, value)
+
+ @staticmethod
+ def getNotes(repo, ref):
+ import re
+
+ note = None
+ try:
+ note = runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "show", ref], repo)
+ prefix = ""
+ except CmdError:
+ note = runcmd(['git', 'show', '-s', '--format=%B', ref], repo)
+ prefix = "%% "
+
+ note_re = re.compile(r'^%s(.*?)(?::\s*(.*))?$' % prefix)
+ notes = dict()
+ for line in note.splitlines():
+ m = note_re.match(line)
+ if m:
+ notes[m.group(1)] = m.group(2)
+
+ return notes
+
+ @staticmethod
+ def commitIgnored(subject, dir=None, files=None, d=None):
+ if files:
+ runcmd(['git', 'add'] + files, dir)
+ cmd = ["git"]
+ GitApplyTree.gitCommandUserOptions(cmd, d=d)
+ cmd += ["commit", "-m", subject, "--no-verify"]
+ runcmd(cmd, dir)
+ GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit)
+
+ @staticmethod
+ def extractPatches(tree, startcommits, outdir, paths=None):
import tempfile
import shutil
tempdir = tempfile.mkdtemp(prefix='oepatch')
try:
- shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", startcommit, "-o", tempdir]
- if paths:
- shellcmd.append('--')
- shellcmd.extend(paths)
- out = runcmd(["sh", "-c", " ".join(shellcmd)], tree)
- if out:
- for srcfile in out.split():
- for encoding in ['utf-8', 'latin-1']:
- patchlines = []
- outfile = None
- try:
- with open(srcfile, 'r', encoding=encoding) as f:
- for line in f:
- if line.startswith(GitApplyTree.patch_line_prefix):
- outfile = line.split()[-1].strip()
- continue
- if line.startswith(GitApplyTree.ignore_commit_prefix):
- continue
- patchlines.append(line)
- except UnicodeDecodeError:
+ for name, rev in startcommits.items():
+ shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", rev, "-o", tempdir]
+ if paths:
+ shellcmd.append('--')
+ shellcmd.extend(paths)
+ out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.join(tree, name))
+ if out:
+ for srcfile in out.split():
+ # This loop, which is used to remove any line that
+ # starts with "%% original patch", is kept for backwards
+ # compatibility. If/when that compatibility is dropped,
+ # it can be replaced with code to just read the first
+ # line of the patch file to get the SHA-1, and the code
+ # below that writes the modified patch file can be
+ # replaced with a simple file move.
+ for encoding in ['utf-8', 'latin-1']:
+ patchlines = []
+ try:
+ with open(srcfile, 'r', encoding=encoding, newline='') as f:
+ for line in f:
+ if line.startswith("%% " + GitApplyTree.original_patch):
+ continue
+ patchlines.append(line)
+ except UnicodeDecodeError:
+ continue
+ break
+ else:
+ raise PatchError('Unable to find a character encoding to decode %s' % srcfile)
+
+ sha1 = patchlines[0].split()[1]
+ notes = GitApplyTree.getNotes(os.path.join(tree, name), sha1)
+ if GitApplyTree.ignore_commit in notes:
continue
- break
- else:
- raise PatchError('Unable to find a character encoding to decode %s' % srcfile)
-
- if not outfile:
- outfile = os.path.basename(srcfile)
- with open(os.path.join(outdir, outfile), 'w') as of:
- for line in patchlines:
- of.write(line)
+ outfile = notes.get(GitApplyTree.original_patch, os.path.basename(srcfile))
+
+ bb.utils.mkdirhier(os.path.join(outdir, name))
+ with open(os.path.join(outdir, name, outfile), 'w') as of:
+ for line in patchlines:
+ of.write(line)
finally:
shutil.rmtree(tempdir)
+ def _need_dirty_check(self):
+ fetch = bb.fetch2.Fetch([], self.d)
+ check_dirtyness = False
+ for url in fetch.urls:
+ url_data = fetch.ud[url]
+ parm = url_data.parm
+ # a git url with subpath param will surely be dirty
+ # since the git tree from which we clone will be emptied
+ # from all files that are not in the subpath
+ if url_data.type == 'git' and parm.get('subpath'):
+ check_dirtyness = True
+ return check_dirtyness
+
+ def _commitpatch(self, patch, patchfilevar):
+ output = ""
+ # Add all files
+ shellcmd = ["git", "add", "-f", "-A", "."]
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Exclude the patches directory
+ shellcmd = ["git", "reset", "HEAD", self.patchdir]
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Commit the result
+ (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
+ try:
+ shellcmd.insert(0, patchfilevar)
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ finally:
+ os.remove(tmpfile)
+ return output
+
def _applypatch(self, patch, force = False, reverse = False, run = True):
import shutil
@@ -511,27 +601,26 @@ class GitApplyTree(PatchTree):
return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
- # Add hooks which add a pointer to the original patch file name in the commit message
reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip()
if not reporoot:
raise Exception("Cannot get repository root for directory %s" % self.dir)
- hooks_dir = os.path.join(reporoot, '.git', 'hooks')
- hooks_dir_backup = hooks_dir + '.devtool-orig'
- if os.path.lexists(hooks_dir_backup):
- raise Exception("Git hooks backup directory already exists: %s" % hooks_dir_backup)
- if os.path.lexists(hooks_dir):
- shutil.move(hooks_dir, hooks_dir_backup)
- os.mkdir(hooks_dir)
- commithook = os.path.join(hooks_dir, 'commit-msg')
- applyhook = os.path.join(hooks_dir, 'applypatch-msg')
- with open(commithook, 'w') as f:
- # NOTE: the formatting here is significant; if you change it you'll also need to
- # change other places which read it back
- f.write('echo "\n%s: $PATCHFILE" >> $1' % GitApplyTree.patch_line_prefix)
- os.chmod(commithook, 0o755)
- shutil.copy2(commithook, applyhook)
+
+ patch_applied = True
try:
patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file'])
+ if self._need_dirty_check():
+ # Check dirtyness of the tree
+ try:
+ output = runcmd(["git", "--work-tree=%s" % reporoot, "status", "--short"])
+ except CmdError:
+ pass
+ else:
+ if output:
+ # The tree is dirty, no need to try to apply patches with git anymore
+ # since they fail, fallback directly to patch
+ output = PatchTree._applypatch(self, patch, force, reverse, run)
+ output += self._commitpatch(patch, patchfilevar)
+ return output
try:
shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot]
self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail)
@@ -558,24 +647,14 @@ class GitApplyTree(PatchTree):
except CmdError:
# Fall back to patch
output = PatchTree._applypatch(self, patch, force, reverse, run)
- # Add all files
- shellcmd = ["git", "add", "-f", "-A", "."]
- output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
- # Exclude the patches directory
- shellcmd = ["git", "reset", "HEAD", self.patchdir]
- output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
- # Commit the result
- (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
- try:
- shellcmd.insert(0, patchfilevar)
- output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
- finally:
- os.remove(tmpfile)
+ output += self._commitpatch(patch, patchfilevar)
return output
+ except:
+ patch_applied = False
+ raise
finally:
- shutil.rmtree(hooks_dir)
- if os.path.lexists(hooks_dir_backup):
- shutil.move(hooks_dir_backup, hooks_dir)
+ if patch_applied:
+ GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']))
class QuiltTree(PatchSet):
@@ -598,6 +677,8 @@ class QuiltTree(PatchSet):
def Clean(self):
try:
+ # make sure that patches/series file exists before quilt pop to keep quilt-0.67 happy
+ open(os.path.join(self.dir, "patches","series"), 'a').close()
self._runcmd(["pop", "-a", "-f"])
oe.path.remove(os.path.join(self.dir, "patches","series"))
except Exception:
@@ -734,8 +815,9 @@ class NOOPResolver(Resolver):
self.patchset.Push()
except Exception:
import sys
- os.chdir(olddir)
raise
+ finally:
+ os.chdir(olddir)
# Patch resolver which relies on the user doing all the work involved in the
# resolution, with the exception of refreshing the remote copy of the patch
@@ -795,9 +877,9 @@ class UserResolver(Resolver):
# User did not fix the problem. Abort.
raise PatchError("Patch application failed, and user did not fix and refresh the patch.")
except Exception:
- os.chdir(olddir)
raise
- os.chdir(olddir)
+ finally:
+ os.chdir(olddir)
def patch_path(url, fetch, workdir, expand=True):
@@ -917,4 +999,3 @@ def should_apply(parm, d):
return False, "applies to later version"
return True, None
-
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
index c8d8ad05b9..5d21cdcbdf 100644
--- a/meta/lib/oe/path.py
+++ b/meta/lib/oe/path.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -123,7 +125,8 @@ def copyhardlinktree(src, dst):
if os.path.isdir(src):
if len(glob.glob('%s/.??*' % src)) > 0:
source = './.??* '
- source += './*'
+ if len(glob.glob('%s/**' % src)) > 0:
+ source += './*'
s_dir = src
else:
source = src
@@ -169,6 +172,9 @@ def symlink(source, destination, force=False):
if e.errno != errno.EEXIST or os.readlink(destination) != source:
raise
+def relsymlink(target, name, force=False):
+ symlink(os.path.relpath(target, os.path.dirname(name)), name, force=force)
+
def find(dir, **walkoptions):
""" Given a directory, recurses into that directory,
returning all files as absolute paths. """
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
index 339f7aebca..c41242c878 100644
--- a/meta/lib/oe/prservice.py
+++ b/meta/lib/oe/prservice.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -76,8 +78,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR'))
df = d.getVar('PRSERV_DUMPFILE')
#write data
- lf = bb.utils.lockfile("%s.lock" % df)
- with open(df, "a") as f:
+ with open(df, "a") as f, bb.utils.fileslocked(["%s.lock" % df]) as locks:
if metainfo:
#dump column info
f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
@@ -111,7 +112,6 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
if not nomax:
for i in idx:
f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
- bb.utils.unlockfile(lf)
def prserv_check_avail(d):
host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index 89acd3ead0..f8ae3c743f 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -211,6 +213,23 @@ def exit_with_message_if_errors(message, d):
def exit_if_errors(d):
exit_with_message_if_errors("Fatal QA errors were found, failing task.", d)
+def check_upstream_status(fullpath):
+ import re
+ kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
+ strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
+ guidelines = "https://docs.yoctoproject.org/contributor-guide/recipe-style-guide.html#patch-upstream-status"
+
+ with open(fullpath, encoding='utf-8', errors='ignore') as f:
+ file_content = f.read()
+ match_kinda = kinda_status_re.search(file_content)
+ match_strict = strict_status_re.search(file_content)
+
+ if not match_strict:
+ if match_kinda:
+ return "Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0))
+ else:
+ return "Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines)
+
if __name__ == "__main__":
import sys
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 872ff97b89..de1fbdd3a8 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -24,9 +24,9 @@ from collections import OrderedDict, defaultdict
from bb.utils import vercmp_string
# Help us to find places to insert values
-recipe_progression = ['SUMMARY', 'DESCRIPTION', 'AUTHOR', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
+recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
# Variables that sometimes are a bit long but shouldn't be wrapped
-nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha256sum\]']
+nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]']
list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
@@ -421,8 +421,6 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
# Ensure we handle class-target if we're dealing with one of the variants
variants.append('target')
for variant in variants:
- if variant.startswith("devupstream"):
- localdata.setVar('SRCPV', 'git')
localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
fetch_urls(localdata)
@@ -666,19 +664,23 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
return (appendpath, pathok)
-def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None):
+def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False):
"""
Writes a bbappend file for a recipe
Parameters:
rd: data dictionary for the recipe
destlayerdir: base directory of the layer to place the bbappend in
(subdirectory path from there will be determined automatically)
- srcfiles: dict of source files to add to SRC_URI, where the value
- is the full path to the file to be added, and the value is the
- original filename as it would appear in SRC_URI or None if it
- isn't already present. You may pass None for this parameter if
- you simply want to specify your own content via the extralines
- parameter.
+ srcfiles: dict of source files to add to SRC_URI, where the key
+ is the full path to the file to be added, and the value is a
+ dict with following optional keys:
+ path: the original filename as it would appear in SRC_URI
+ or None if it isn't already present.
+ patchdir: the patchdir parameter
+ newname: the name to give to the new added file. None to use
+ the default value: basename(path)
+ You may pass None for this parameter if you simply want to specify
+ your own content via the extralines parameter.
install: dict mapping entries in srcfiles to a tuple of two elements:
install path (*without* ${D} prefix) and permission value (as a
string, e.g. '0644').
@@ -696,18 +698,32 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
redirect_output:
If specified, redirects writing the output file to the
specified directory (for dry-run purposes)
+ params:
+ Parameters to use when adding entries to SRC_URI. If specified,
+ should be a list of dicts with the same length as srcfiles.
+ update_original_recipe:
+ Force to update the original recipe instead of creating/updating
+ a bbapend. destlayerdir must contain the original recipe
"""
if not removevalues:
removevalues = {}
- # Determine how the bbappend should be named
- appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
- if not appendpath:
- bb.error('Unable to determine layer directory containing %s' % recipefile)
- return (None, None)
- if not pathok:
- bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
+ recipefile = rd.getVar('FILE')
+ if update_original_recipe:
+ if destlayerdir not in recipefile:
+ bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile))
+ return (None, None)
+
+ appendpath = recipefile
+ else:
+ # Determine how the bbappend should be named
+ appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
+ if not appendpath:
+ bb.error('Unable to determine layer directory containing %s' % recipefile)
+ return (None, None)
+ if not pathok:
+ bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
appenddir = os.path.dirname(appendpath)
if not redirect_output:
@@ -752,7 +768,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
bbappendlines.append((varname, op, value))
destsubdir = rd.getVar('PN')
- if srcfiles:
+ if not update_original_recipe and srcfiles:
bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:'))
appendoverride = ''
@@ -762,20 +778,38 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
copyfiles = {}
if srcfiles:
instfunclines = []
- for newfile, origsrcfile in srcfiles.items():
- srcfile = origsrcfile
+ for i, (newfile, param) in enumerate(srcfiles.items()):
srcurientry = None
- if not srcfile:
- srcfile = os.path.basename(newfile)
+ if not 'path' in param or not param['path']:
+ if 'newname' in param and param['newname']:
+ srcfile = param['newname']
+ else:
+ srcfile = os.path.basename(newfile)
srcurientry = 'file://%s' % srcfile
+ oldentry = None
+ for uri in rd.getVar('SRC_URI').split():
+ if srcurientry in uri:
+ oldentry = uri
+ if params and params[i]:
+ srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
# Double-check it's not there already
# FIXME do we care if the entry is added by another bbappend that might go away?
if not srcurientry in rd.getVar('SRC_URI').split():
if machine:
+ if oldentry:
+ appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry)
appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry)
else:
+ if oldentry:
+ if update_original_recipe:
+ removevalues['SRC_URI'] = oldentry
+ else:
+ appendline('SRC_URI:remove', '=', oldentry)
appendline('SRC_URI', '+=', srcurientry)
- copyfiles[newfile] = srcfile
+ param['path'] = srcfile
+ else:
+ srcfile = param['path']
+ copyfiles[newfile] = param
if install:
institem = install.pop(newfile, None)
if institem:
@@ -795,6 +829,8 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
# multiple times per operation when we're handling overrides)
if os.path.exists(appendpath) and not os.path.exists(outfile):
shutil.copy2(appendpath, outfile)
+ elif update_original_recipe:
+ outfile = recipefile
else:
bb.note('Writing append file %s' % appendpath)
outfile = appendpath
@@ -898,7 +934,12 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
outdir = redirect_output
else:
outdir = appenddir
- for newfile, srcfile in copyfiles.items():
+ for newfile, param in copyfiles.items():
+ srcfile = param['path']
+ patchdir = param.get('patchdir', ".")
+
+ if patchdir != ".":
+ newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1])
filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile))
if os.path.abspath(newfile) != os.path.abspath(filedest):
if newfile.startswith(tempfile.gettempdir()):
@@ -942,10 +983,9 @@ def replace_dir_vars(path, d):
path = path.replace(dirpath, '${%s}' % dirvars[dirpath])
return path
-def get_recipe_pv_without_srcpv(pv, uri_type):
+def get_recipe_pv_with_pfx_sfx(pv, uri_type):
"""
- Get PV without SRCPV common in SCM's for now only
- support git.
+ Get PV separating prefix and suffix components.
Returns tuple with pv, prefix and suffix.
"""
@@ -953,7 +993,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
sfx = ''
if uri_type == 'git':
- git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+))(?P<rev>.*)")
+ git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)")
m = git_regex.match(pv)
if m:
@@ -1005,7 +1045,7 @@ def get_recipe_upstream_version(rd):
src_uri = src_uris.split()[0]
uri_type, _, _, _, _, _ = decodeurl(src_uri)
- (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type)
+ (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type)
ru['current_version'] = pv
manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
@@ -1033,7 +1073,7 @@ def get_recipe_upstream_version(rd):
revision = ud.method.latest_revision(ud, rd, 'default')
upversion = pv
if revision != rd.getVar("SRCREV"):
- upversion = upversion + "-new-commits-available"
+ upversion = upversion + "-new-commits-available"
else:
pupver = ud.method.latest_versionstring(ud, rd)
(upversion, revision) = pupver
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 35b8be6d08..448befce33 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
@@ -113,7 +115,8 @@ def get_source_date_epoch_from_git(d, sourcedir):
return None
bb.debug(1, "git repository: %s" % gitpath)
- p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE)
+ p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
+ check=True, stdout=subprocess.PIPE)
return int(p.stdout.decode('utf-8'))
def get_source_date_epoch_from_youngest_file(d, sourcedir):
@@ -128,6 +131,9 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir):
files = [f for f in files if not f[0] == '.']
for fname in files:
+ if fname == "singletask.lock":
+ # Ignore externalsrc/devtool lockfile [YOCTO #14921]
+ continue
filename = os.path.join(root, fname)
try:
mtime = int(os.lstat(filename).st_mtime)
@@ -152,7 +158,6 @@ def fixed_source_date_epoch(d):
def get_source_date_epoch(d, sourcedir):
return (
get_source_date_epoch_from_git(d, sourcedir) or
- get_source_date_epoch_from_known_files(d, sourcedir) or
get_source_date_epoch_from_youngest_file(d, sourcedir) or
fixed_source_date_epoch(d) # Last resort
)
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 98cf3f244d..8cd48f9450 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
from abc import ABCMeta, abstractmethod
@@ -104,7 +106,7 @@ class Rootfs(object, metaclass=ABCMeta):
def _cleanup(self):
pass
- def _setup_dbg_rootfs(self, dirs):
+ def _setup_dbg_rootfs(self, package_paths):
gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
if gen_debugfs != '1':
return
@@ -120,11 +122,12 @@ class Rootfs(object, metaclass=ABCMeta):
bb.utils.mkdirhier(self.image_rootfs)
bb.note(" Copying back package database...")
- for dir in dirs:
- if not os.path.isdir(self.image_rootfs + '-orig' + dir):
- continue
- bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir))
- shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True)
+ for path in package_paths:
+ bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path))
+ if os.path.isdir(self.image_rootfs + '-orig' + path):
+ shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True)
+ elif os.path.isfile(self.image_rootfs + '-orig' + path):
+ shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path)
# Copy files located in /usr/lib/debug or /usr/src/debug
for dir in ["/usr/lib/debug", "/usr/src/debug"]:
@@ -160,6 +163,13 @@ class Rootfs(object, metaclass=ABCMeta):
bb.note(" Install extra debug packages...")
self.pm.install(extra_debug_pkgs.split(), True)
+ bb.note(" Removing package database...")
+ for path in package_paths:
+ if os.path.isdir(self.image_rootfs + path):
+ shutil.rmtree(self.image_rootfs + path)
+ elif os.path.isfile(self.image_rootfs + path):
+ os.remove(self.image_rootfs + path)
+
bb.note(" Rename debug rootfs...")
try:
shutil.rmtree(self.image_rootfs + '-dbg')
@@ -171,14 +181,8 @@ class Rootfs(object, metaclass=ABCMeta):
bb.utils.rename(self.image_rootfs + '-orig', self.image_rootfs)
def _exec_shell_cmd(self, cmd):
- fakerootcmd = self.d.getVar('FAKEROOT')
- if fakerootcmd is not None:
- exec_cmd = [fakerootcmd, cmd]
- else:
- exec_cmd = cmd
-
try:
- subprocess.check_output(exec_cmd, stderr=subprocess.STDOUT)
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output))
@@ -190,6 +194,18 @@ class Rootfs(object, metaclass=ABCMeta):
post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND")
rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND')
+ def make_last(command, commands):
+ commands = commands.split()
+ if command in commands:
+ commands.remove(command)
+ commands.append(command)
+ return "".join(commands)
+
+ # We want this to run as late as possible, in particular after
+ # systemd_sysusers_create and set_user_group. Using :append is not enough
+ make_last("tidy_shadowutils_files", post_process_cmds)
+ make_last("rootfs_reproducible", post_process_cmds)
+
execute_pre_post_process(self.d, pre_process_cmds)
if self.progress_reporter:
@@ -311,7 +327,7 @@ class Rootfs(object, metaclass=ABCMeta):
def _check_for_kernel_modules(self, modules_dir):
for root, dirs, files in os.walk(modules_dir, topdown=True):
for name in files:
- found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz"))
+ found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz", ".ko.zst"))
if found_ko:
return found_ko
return False
@@ -323,17 +339,30 @@ class Rootfs(object, metaclass=ABCMeta):
bb.note("No Kernel Modules found, not running depmod")
return
- kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod",
- 'kernel-abiversion')
- if not os.path.exists(kernel_abi_ver_file):
- bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
+ pkgdatadir = self.d.getVar('PKGDATA_DIR')
+
+ # PKGDATA_DIR can include multiple kernels so we run depmod for each
+ # one of them.
+ for direntry in os.listdir(pkgdatadir):
+ match = re.match('(.*)-depmod', direntry)
+ if not match:
+ continue
+ kernel_package_name = match.group(1)
+
+ kernel_abi_ver_file = oe.path.join(pkgdatadir, direntry, kernel_package_name + '-abiversion')
+ if not os.path.exists(kernel_abi_ver_file):
+ bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
+
+ with open(kernel_abi_ver_file) as f:
+ kernel_ver = f.read().strip(' \n')
- kernel_ver = open(kernel_abi_ver_file).read().strip(' \n')
- versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
+ versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
- bb.utils.mkdirhier(versioned_modules_dir)
+ bb.utils.mkdirhier(versioned_modules_dir)
- self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver])
+ bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir)
+ if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]):
+ bb.fatal("Kernel modules dependency generation failed")
"""
Create devfs:
@@ -382,6 +411,10 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
def image_list_installed_packages(d, rootfs_dir=None):
+ # Theres no rootfs for baremetal images
+ if bb.data.inherits_class('baremetal-image', d):
+ return ""
+
if not rootfs_dir:
rootfs_dir = d.getVar('IMAGE_ROOTFS')
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py
index ec70b34805..185553eeeb 100644
--- a/meta/lib/oe/rust.py
+++ b/meta/lib/oe/rust.py
@@ -1,5 +1,13 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
# Handle mismatches between `uname -m`-style output and Rust's arch names
def arch_to_rust_arch(arch):
if arch == "ppc64le":
return "powerpc64le"
+ if arch in ('riscv32', 'riscv64'):
+ return arch + 'gc'
return arch
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
index 3372f13a9d..fd4b6895d8 100644
--- a/meta/lib/oe/sbom.py
+++ b/meta/lib/oe/sbom.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -12,6 +14,10 @@ def get_recipe_spdxid(d):
return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
+def get_download_spdxid(d, idx):
+ return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx)
+
+
def get_package_spdxid(pkg):
return "SPDXRef-Package-%s" % pkg
@@ -32,18 +38,54 @@ def get_sdk_spdxid(sdk):
return "SPDXRef-SDK-%s" % sdk
-def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
+def _doc_path_by_namespace(spdx_deploy, arch, doc_namespace):
+ return spdx_deploy / "by-namespace" / arch / doc_namespace.replace("/", "_")
+
+
+def doc_find_by_namespace(spdx_deploy, search_arches, doc_namespace):
+ for pkgarch in search_arches:
+ p = _doc_path_by_namespace(spdx_deploy, pkgarch, doc_namespace)
+ if os.path.exists(p):
+ return p
+ return None
+
+
+def _doc_path_by_hashfn(spdx_deploy, arch, doc_name, hashfn):
+ return (
+ spdx_deploy / "by-hash" / arch / hashfn.split()[1] / (doc_name + ".spdx.json")
+ )
+
+
+def doc_find_by_hashfn(spdx_deploy, search_arches, doc_name, hashfn):
+ for pkgarch in search_arches:
+ p = _doc_path_by_hashfn(spdx_deploy, pkgarch, doc_name, hashfn)
+ if os.path.exists(p):
+ return p
+ return None
+
+
+def doc_path(spdx_deploy, doc_name, arch, subdir):
+ return spdx_deploy / arch / subdir / (doc_name + ".spdx.json")
+
+
+def write_doc(d, spdx_doc, arch, subdir, spdx_deploy=None, indent=None):
from pathlib import Path
if spdx_deploy is None:
spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
- dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
+ dest = doc_path(spdx_deploy, spdx_doc.name, arch, subdir)
dest.parent.mkdir(exist_ok=True, parents=True)
with dest.open("wb") as f:
- doc_sha1 = spdx_doc.to_json(f, sort_keys=True)
+ doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
+
+ l = _doc_path_by_namespace(spdx_deploy, arch, spdx_doc.documentNamespace)
+ l.parent.mkdir(exist_ok=True, parents=True)
+ l.symlink_to(os.path.relpath(dest, l.parent))
- l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
+ l = _doc_path_by_hashfn(
+ spdx_deploy, arch, spdx_doc.name, d.getVar("BB_HASHFILENAME")
+ )
l.parent.mkdir(exist_ok=True, parents=True)
l.symlink_to(os.path.relpath(dest, l.parent))
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index 27347667e8..3dc3672210 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -68,7 +70,7 @@ class Sdk(object, metaclass=ABCMeta):
#FIXME: using umbrella exc catching because bb.utils method raises it
except Exception as e:
bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
- bb.error("unable to place %s in final SDK location" % sourcefile)
+ bb.fatal("unable to place %s in final SDK location" % sourcefile)
def mkdirhier(self, dirpath):
try:
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
index 14ca706895..7aaf2af5ed 100644
--- a/meta/lib/oe/spdx.py
+++ b/meta/lib/oe/spdx.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -214,11 +216,23 @@ class SPDXPackageVerificationCode(SPDXObject):
class SPDXPackage(SPDXObject):
+ ALLOWED_CHECKSUMS = [
+ "SHA1",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA512",
+ "MD2",
+ "MD4",
+ "MD5",
+ "MD6",
+ ]
+
name = _String()
SPDXID = _String()
versionInfo = _String()
downloadLocation = _String(default="NOASSERTION")
- packageSupplier = _String(default="NOASSERTION")
+ supplier = _String(default="NOASSERTION")
homepage = _String()
licenseConcluded = _String(default="NOASSERTION")
licenseDeclared = _String(default="NOASSERTION")
@@ -232,6 +246,7 @@ class SPDXPackage(SPDXObject):
hasFiles = _StringList()
packageFileName = _String()
annotations = _ObjectList(SPDXAnnotation)
+ checksums = _ObjectList(SPDXChecksum)
class SPDXFile(SPDXObject):
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 7150bd0929..a46e5502ab 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -1,9 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import bb.siggen
import bb.runqueue
import oe
+import netrc
def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
# Return True if we should keep the dependency, False to drop it
@@ -28,6 +31,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
mc, _ = bb.runqueue.split_mc(fn)
+ # We can skip the rm_work task signature to avoid running the task
+ # when we remove some tasks from the dependencie chain
+ # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
+ if task == "do_rm_work":
+ return False
+
# (Almost) always include our own inter-task dependencies (unless it comes
# from a mcdepends). The exception is the special
# do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -84,15 +93,6 @@ def sstate_lockedsigs(d):
sigs[pn][task] = [h, siggen_lockedsigs_var]
return sigs
-class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
- name = "OEBasic"
- def init_rundepcheck(self, data):
- self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
- self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
- pass
- def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
- return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
-
class SignatureGeneratorOEBasicHashMixIn(object):
supports_multiconfig_datacaches = True
@@ -105,6 +105,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.lockedhashfn = {}
self.machine = data.getVar("MACHINE")
self.mismatch_msgs = []
+ self.mismatch_number = 0
+ self.lockedsigs_msgs = ""
self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
"").split()
self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
@@ -141,9 +143,10 @@ class SignatureGeneratorOEBasicHashMixIn(object):
super().set_taskdata(data[3:])
def dump_sigs(self, dataCache, options):
- sigfile = os.getcwd() + "/locked-sigs.inc"
- bb.plain("Writing locked sigs to %s" % sigfile)
- self.dump_lockedsigs(sigfile)
+ if 'lockedsigs' in options:
+ sigfile = os.getcwd() + "/locked-sigs.inc"
+ bb.plain("Writing locked sigs to %s" % sigfile)
+ self.dump_lockedsigs(sigfile)
return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
@@ -188,6 +191,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
#bb.warn("Using %s %s %s" % (recipename, task, h))
if h != h_locked and h_locked != unihash:
+ self.mismatch_number += 1
self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
% (recipename, task, h, h_locked, var))
@@ -202,10 +206,10 @@ class SignatureGeneratorOEBasicHashMixIn(object):
return self.lockedhashes[tid]
return super().get_stampfile_hash(tid)
- def get_unihash(self, tid):
+ def get_cached_unihash(self, tid):
if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
return self.lockedhashes[tid]
- return super().get_unihash(tid)
+ return super().get_cached_unihash(tid)
def dump_sigtask(self, fn, task, stampbase, runtime):
tid = fn + ":" + task
@@ -216,6 +220,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
def dump_lockedsigs(self, sigfile, taskfilter=None):
types = {}
for tid in self.runtaskdeps:
+ # Bitbake changed this to a tuple in newer versions
+ if isinstance(tid, tuple):
+ tid = tid[1]
if taskfilter:
if not tid in taskfilter:
continue
@@ -265,6 +272,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
warn_msgs = []
error_msgs = []
sstate_missing_msgs = []
+ info_msgs = None
+
+ if self.lockedsigs:
+ if len(self.lockedsigs) > 10:
+ self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
+ else:
+ self.lockedsigs_msgs = "The following recipes have locked tasks:"
+ for pn in self.lockedsigs:
+ self.lockedsigs_msgs += " %s" % (pn)
for tid in sq_data['hash']:
if tid not in found:
@@ -277,7 +293,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
% (pn, taskname, sq_data['hash'][tid]))
checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
- if checklevel == 'warn':
+ if checklevel == 'info':
+ info_msgs = self.lockedsigs_msgs
+ if checklevel == 'warn' or checklevel == 'info':
warn_msgs += self.mismatch_msgs
elif checklevel == 'error':
error_msgs += self.mismatch_msgs
@@ -288,6 +306,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
elif checklevel == 'error':
error_msgs += sstate_missing_msgs
+ if info_msgs:
+ bb.note(info_msgs)
if warn_msgs:
bb.warn("\n".join(warn_msgs))
if error_msgs:
@@ -307,9 +327,21 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
if not self.method:
bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
+ self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1)
+ self.username = data.getVar("BB_HASHSERVE_USERNAME")
+ self.password = data.getVar("BB_HASHSERVE_PASSWORD")
+ if not self.username or not self.password:
+ try:
+ n = netrc.netrc()
+ auth = n.authenticators(self.server)
+ if auth is not None:
+ self.username, _, self.password = auth
+ except FileNotFoundError:
+ pass
+ except netrc.NetrcParseError as e:
+ bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
# Insert these classes into siggen's namespace so it can see and select them
-bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
@@ -323,14 +355,14 @@ def find_siginfo(pn, taskname, taskhashlist, d):
if not taskname:
# We have to derive pn and taskname
key = pn
- splitit = key.split('.bb:')
- taskname = splitit[1]
- pn = os.path.basename(splitit[0]).split('_')[0]
- if key.startswith('virtual:native:'):
- pn = pn + '-native'
+ if key.startswith("mc:"):
+ # mc:<mc>:<pn>:<task>
+ _, _, pn, taskname = key.split(':', 3)
+ else:
+ # <pn>:<task>
+ pn, taskname = key.split(':', 1)
hashfiles = {}
- filedates = {}
def get_hashval(siginfo):
if siginfo.endswith('.siginfo'):
@@ -338,6 +370,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
else:
return siginfo.rpartition('.')[2]
+ def get_time(fullpath):
+ return os.stat(fullpath).st_mtime
+
# First search in stamps dir
localdata = d.createCopy()
localdata.setVar('MULTIMACH_TARGET_SYS', '*')
@@ -353,24 +388,21 @@ def find_siginfo(pn, taskname, taskhashlist, d):
filespec = '%s.%s.sigdata.*' % (stamp, taskname)
foundall = False
import glob
+ bb.debug(1, "Calling glob.glob on {}".format(filespec))
for fullpath in glob.glob(filespec):
match = False
if taskhashlist:
for taskhash in taskhashlist:
if fullpath.endswith('.%s' % taskhash):
- hashfiles[taskhash] = fullpath
+ hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
if len(hashfiles) == len(taskhashlist):
foundall = True
break
else:
- try:
- filedates[fullpath] = os.stat(fullpath).st_mtime
- except OSError:
- continue
hashval = get_hashval(fullpath)
- hashfiles[hashval] = fullpath
+ hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
- if not taskhashlist or (len(filedates) < 2 and not foundall):
+ if not taskhashlist or (len(hashfiles) < 2 and not foundall):
# That didn't work, look in sstate-cache
hashes = taskhashlist or ['?' * 64]
localdata = bb.data.createCopy(d)
@@ -379,6 +411,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
localdata.setVar('TARGET_VENDOR', '*')
localdata.setVar('TARGET_OS', '*')
localdata.setVar('PN', pn)
+ # gcc-source is a special case, same as with local stamps above
+ if pn.startswith("gcc-source"):
+ localdata.setVar('PN', "gcc")
localdata.setVar('PV', '*')
localdata.setVar('PR', '*')
localdata.setVar('BB_TASKHASH', hashval)
@@ -390,24 +425,18 @@ def find_siginfo(pn, taskname, taskhashlist, d):
localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
+ bb.debug(1, "Calling glob.glob on {}".format(filespec))
matchedfiles = glob.glob(filespec)
for fullpath in matchedfiles:
actual_hashval = get_hashval(fullpath)
if actual_hashval in hashfiles:
continue
- hashfiles[hashval] = fullpath
- if not taskhashlist:
- try:
- filedates[fullpath] = os.stat(fullpath).st_mtime
- except:
- continue
+ hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)}
- if taskhashlist:
- return hashfiles
- else:
- return filedates
+ return hashfiles
bb.siggen.find_siginfo = find_siginfo
+bb.siggen.find_siginfo_version = 2
def sstate_get_manifest_filename(task, d):
@@ -452,11 +481,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
pkgarchs.append('allarch')
pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
+ searched_manifests = []
+
for pkgarch in pkgarchs:
manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
if os.path.exists(manifest):
return manifest, d2
- bb.fatal("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
+ searched_manifests.append(manifest)
+ bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
+ % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
return None, d2
def OEOuthashBasic(path, sigfile, task, d):
@@ -576,9 +609,9 @@ def OEOuthashBasic(path, sigfile, task, d):
update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
except KeyError as e:
- bb.warn("KeyError in %s" % path)
msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
- "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid))
+ "any user/group on target. This may be due to host contamination." %
+ (e, os.path.abspath(path), s.st_uid, s.st_gid))
raise Exception(msg).with_traceback(e.__traceback__)
if include_timestamps:
@@ -641,6 +674,10 @@ def OEOuthashBasic(path, sigfile, task, d):
if f == 'fixmepath':
continue
process(os.path.join(root, f))
+
+ for dir in dirs:
+ if os.path.islink(os.path.join(root, dir)):
+ process(os.path.join(root, dir))
finally:
os.chdir(prev_dir)
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index de8dcebf94..4412bc14c1 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import logging
@@ -102,6 +104,10 @@ class Rxvt(XTerminal):
command = 'rxvt -T "{title}" -e {command}'
priority = 1
+class URxvt(XTerminal):
+ command = 'urxvt -T "{title}" -e {command}'
+ priority = 1
+
class Screen(Terminal):
command = 'screen -D -m -t "{title}" -S devshell {command}'
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py
index bbbabafbf6..b929afb1f3 100644
--- a/meta/lib/oe/types.py
+++ b/meta/lib/oe/types.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oe/useradd.py b/meta/lib/oe/useradd.py
index 3caa3f851a..54aa86feb5 100644
--- a/meta/lib/oe/useradd.py
+++ b/meta/lib/oe/useradd.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 46fc76c261..14a7d07ef0 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -1,10 +1,13 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import subprocess
import multiprocessing
import traceback
+import errno
def read_file(filename):
try:
@@ -256,16 +259,23 @@ def execute_pre_post_process(d, cmds):
if cmds is None:
return
- for cmd in cmds.strip().split(';'):
- cmd = cmd.strip()
- if cmd != '':
- bb.note("Executing %s ..." % cmd)
- bb.build.exec_func(cmd, d)
+ cmds = cmds.replace(";", " ")
+
+ for cmd in cmds.split():
+ bb.note("Executing %s ..." % cmd)
+ bb.build.exec_func(cmd, d)
+
+def get_bb_number_threads(d):
+ return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
-# For each item in items, call the function 'target' with item as the first
+def multiprocess_launch(target, items, d, extraargs=None):
+ max_process = get_bb_number_threads(d)
+ return multiprocess_launch_mp(target, items, max_process, extraargs)
+
+# For each item in items, call the function 'target' with item as the first
# argument, extraargs as the other arguments and handle any exceptions in the
# parent thread
-def multiprocess_launch(target, items, d, extraargs=None):
+def multiprocess_launch_mp(target, items, max_process, extraargs=None):
class ProcessLaunch(multiprocessing.Process):
def __init__(self, *args, **kwargs):
@@ -300,7 +310,6 @@ def multiprocess_launch(target, items, d, extraargs=None):
self.update()
return self._result
- max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
launched = []
errors = []
results = []
@@ -473,70 +482,6 @@ def get_multilib_datastore(variant, d):
localdata.setVar("MLPREFIX", "")
return localdata
-#
-# Python 2.7 doesn't have threaded pools (just multiprocessing)
-# so implement a version here
-#
-
-from queue import Queue
-from threading import Thread
-
-class ThreadedWorker(Thread):
- """Thread executing tasks from a given tasks queue"""
- def __init__(self, tasks, worker_init, worker_end, name=None):
- Thread.__init__(self, name=name)
- self.tasks = tasks
- self.daemon = True
-
- self.worker_init = worker_init
- self.worker_end = worker_end
-
- def run(self):
- from queue import Empty
-
- if self.worker_init is not None:
- self.worker_init(self)
-
- while True:
- try:
- func, args, kargs = self.tasks.get(block=False)
- except Empty:
- if self.worker_end is not None:
- self.worker_end(self)
- break
-
- try:
- func(self, *args, **kargs)
- except Exception as e:
- # Eat all exceptions
- bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e)
- finally:
- self.tasks.task_done()
-
-class ThreadedPool:
- """Pool of threads consuming tasks from a queue"""
- def __init__(self, num_workers, num_tasks, worker_init=None, worker_end=None, name="ThreadedPool-"):
- self.tasks = Queue(num_tasks)
- self.workers = []
-
- for i in range(num_workers):
- worker = ThreadedWorker(self.tasks, worker_init, worker_end, name=name + str(i))
- self.workers.append(worker)
-
- def start(self):
- for worker in self.workers:
- worker.start()
-
- def add_task(self, func, *args, **kargs):
- """Add a task to the queue"""
- self.tasks.put((func, args, kargs))
-
- def wait_completion(self):
- """Wait for completion of all the tasks in the queue"""
- self.tasks.join()
- for worker in self.workers:
- worker.join()
-
class ImageQAFailed(Exception):
def __init__(self, description, name=None, logfile=None):
self.description = description
@@ -584,3 +529,14 @@ def directory_size(root, blocksize=4096):
total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
total += roundup(getsize(root))
return total
+
+# Update the mtime of a file, skip if permission/read-only issues
+def touch(filename):
+ try:
+ os.utime(filename, None)
+ except PermissionError:
+ pass
+ except OSError as e:
+ # Handle read-only file systems gracefully
+ if e.errno != errno.EROFS:
+ raise e
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py
index 5f1805d86c..5d656c781a 100644
--- a/meta/lib/oeqa/buildperf/base.py
+++ b/meta/lib/oeqa/buildperf/base.py
@@ -444,7 +444,7 @@ class BuildPerfTestCase(unittest.TestCase):
buildstats = []
for fname in os.listdir(bs_dir):
recipe_dir = os.path.join(bs_dir, fname)
- if not os.path.isdir(recipe_dir):
+ if not os.path.isdir(recipe_dir) or fname == "reduced_proc_pressure":
continue
name, epoch, version, revision = split_nevr(fname)
recipe_bs = OrderedDict((('name', name),
diff --git a/meta/lib/oeqa/controllers/__init__.py b/meta/lib/oeqa/controllers/__init__.py
index cc3836c4bf..0fc905be9a 100644
--- a/meta/lib/oeqa/controllers/__init__.py
+++ b/meta/lib/oeqa/controllers/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Enable other layers to have modules in the same named directory
diff --git a/meta/lib/oeqa/controllers/testtargetloader.py b/meta/lib/oeqa/controllers/testtargetloader.py
index 23101c7371..209ff7061a 100644
--- a/meta/lib/oeqa/controllers/testtargetloader.py
+++ b/meta/lib/oeqa/controllers/testtargetloader.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/meta/lib/oeqa/core/context.py b/meta/lib/oeqa/core/context.py
index 2abe353d27..9313271f58 100644
--- a/meta/lib/oeqa/core/context.py
+++ b/meta/lib/oeqa/core/context.py
@@ -81,7 +81,7 @@ class OETestContext(object):
def runTests(self, processes=None, skips=[]):
self.runner = self.runnerClass(self, descriptions=False, verbosity=2)
- # Dinamically skip those tests specified though arguments
+ # Dynamically skip those tests specified though arguments
self.skipTests(skips)
self._run_start_time = time.time()
diff --git a/meta/lib/oeqa/core/decorator/data.py b/meta/lib/oeqa/core/decorator/data.py
index 3ce10e5499..5444b2cb75 100644
--- a/meta/lib/oeqa/core/decorator/data.py
+++ b/meta/lib/oeqa/core/decorator/data.py
@@ -186,6 +186,16 @@ class skipIfNotQemu(OETestDecorator):
self.case.skipTest('Test only runs on qemu machines')
@registerDecorator
+class skipIfNotQemuUsermode(OETestDecorator):
+ """
+ Skip test if MACHINE_FEATURES does not contain qemu-usermode
+ """
+ def setUpDecorator(self):
+ self.logger.debug("Checking if MACHINE_FEATURES does not contain qemu-usermode")
+ if 'qemu-usermode' not in self.case.td.get('MACHINE_FEATURES', '').split():
+ self.case.skipTest('Test requires qemu-usermode in MACHINE_FEATURES')
+
+@registerDecorator
class skipIfQemu(OETestDecorator):
"""
Skip test if MACHINE is qemu*
@@ -194,3 +204,27 @@ class skipIfQemu(OETestDecorator):
self.logger.debug("Checking if qemu MACHINE")
if self.case.td.get('MACHINE', '').startswith('qemu'):
self.case.skipTest('Test only runs on real hardware')
+
+@registerDecorator
+class skipIfArch(OETestDecorator):
+ """
+ Skip test if HOST_ARCH is present in the tuple specified.
+ """
+
+ attrs = ('archs',)
+ def setUpDecorator(self):
+ arch = self.case.td['HOST_ARCH']
+ if arch in self.archs:
+ self.case.skipTest('Test skipped on %s' % arch)
+
+@registerDecorator
+class skipIfNotArch(OETestDecorator):
+ """
+ Skip test if HOST_ARCH is not present in the tuple specified.
+ """
+
+ attrs = ('archs',)
+ def setUpDecorator(self):
+ arch = self.case.td['HOST_ARCH']
+ if arch not in self.archs:
+ self.case.skipTest('Test skipped on %s' % arch)
diff --git a/meta/lib/oeqa/core/loader.py b/meta/lib/oeqa/core/loader.py
index 11978213b8..d12d5a055c 100644
--- a/meta/lib/oeqa/core/loader.py
+++ b/meta/lib/oeqa/core/loader.py
@@ -37,7 +37,7 @@ def _find_duplicated_modules(suite, directory):
if path:
raise ImportError("Duplicated %s module found in %s" % (module, path))
-def _built_modules_dict(modules):
+def _built_modules_dict(modules, logger):
modules_dict = {}
if modules == None:
@@ -48,6 +48,9 @@ def _built_modules_dict(modules):
# characters, whereas class names do
m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII)
if not m:
+ logger.warn("module '%s' was skipped from selected modules, "\
+ "because it doesn't match with module name assumptions: "\
+ "package and module names do not contain upper case characters, whereas class names do" % module)
continue
module_name, class_name, test_name = m.groups()
@@ -58,6 +61,8 @@ def _built_modules_dict(modules):
modules_dict[module_name][class_name] = []
if test_name and test_name not in modules_dict[module_name][class_name]:
modules_dict[module_name][class_name].append(test_name)
+ if modules and not modules_dict:
+ raise OEQATestNotFound("All selected modules were skipped, this would trigger selftest with all tests and -r ignored.")
return modules_dict
@@ -71,7 +76,7 @@ class OETestLoader(unittest.TestLoader):
*args, **kwargs):
self.tc = tc
- self.modules = _built_modules_dict(modules)
+ self.modules = _built_modules_dict(modules, tc.logger)
self.tests = tests
self.modules_required = modules_required
@@ -311,6 +316,9 @@ class OETestLoader(unittest.TestLoader):
module_name_small in self.modules) \
else False
+ if any(c.isupper() for c in module.__name__):
+ raise SystemExit("Module '%s' contains uppercase characters and this isn't supported. Please fix the module name." % module.__name__)
+
return (load_module, load_underscore)
diff --git a/meta/lib/oeqa/core/runner.py b/meta/lib/oeqa/core/runner.py
index d50690ab37..a86a706bd9 100644
--- a/meta/lib/oeqa/core/runner.py
+++ b/meta/lib/oeqa/core/runner.py
@@ -44,6 +44,7 @@ class OETestResult(_TestResult):
self.endtime = {}
self.progressinfo = {}
self.extraresults = {}
+ self.shownmsg = []
# Inject into tc so that TestDepends decorator can see results
tc.results = self
@@ -74,6 +75,7 @@ class OETestResult(_TestResult):
for (scase, msg) in getattr(self, t):
if test.id() == scase.id():
self.tc.logger.info(str(msg))
+ self.shownmsg.append(test.id())
break
def logSummary(self, component, context_msg=''):
@@ -169,7 +171,6 @@ class OETestResult(_TestResult):
def logDetails(self, json_file_dir=None, configuration=None, result_id=None,
dump_streams=False):
- self.tc.logger.info("RESULTS:")
result = self.extraresults
logs = {}
@@ -193,6 +194,10 @@ class OETestResult(_TestResult):
report = {'status': status}
if log:
report['log'] = log
+ # Class setup failures wouldn't enter stopTest so would never display
+ if case.id() not in self.shownmsg:
+ self.tc.logger.info("Failure (%s) for %s:\n" % (status, case.id()) + log)
+
if duration:
report['duration'] = duration
@@ -215,6 +220,7 @@ class OETestResult(_TestResult):
report['stderr'] = stderr
result[case.id()] = report
+ self.tc.logger.info("RESULTS:")
for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']:
if i not in logs:
continue
@@ -229,6 +235,10 @@ class OETestResult(_TestResult):
# Override as we unexpected successes aren't failures for us
return (len(self.failures) == len(self.errors) == 0)
+ def hasAnyFailingTest(self):
+ # Account for expected failures
+ return not self.wasSuccessful() or len(self.expectedFailures)
+
class OEListTestsResult(object):
def wasSuccessful(self):
return True
diff --git a/meta/lib/oeqa/core/target/qemu.py b/meta/lib/oeqa/core/target/qemu.py
index 79fd724f7d..d93b3ac94a 100644
--- a/meta/lib/oeqa/core/target/qemu.py
+++ b/meta/lib/oeqa/core/target/qemu.py
@@ -14,15 +14,13 @@ from collections import defaultdict
from .ssh import OESSHTarget
from oeqa.utils.qemurunner import QemuRunner
-from oeqa.utils.dump import MonitorDumper
-from oeqa.utils.dump import TargetDumper
supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
class OEQemuTarget(OESSHTarget):
def __init__(self, logger, server_ip, timeout=300, user='root',
port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False,
- dump_dir='', dump_host_cmds='', display='', bootlog='',
+ dump_dir='', display='', bootlog='',
tmpdir='', dir_image='', boottime=60, serial_ports=2,
boot_patterns = defaultdict(str), ovmf=False, tmpfsdir=None, **kwargs):
@@ -44,18 +42,9 @@ class OEQemuTarget(OESSHTarget):
self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir,
deploy_dir_image=dir_image, display=display,
logfile=bootlog, boottime=boottime,
- use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir,
- dump_host_cmds=dump_host_cmds, logger=logger,
+ use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, logger=logger,
serial_ports=serial_ports, boot_patterns = boot_patterns,
use_ovmf=ovmf, tmpfsdir=tmpfsdir)
- dump_monitor_cmds = kwargs.get("testimage_dump_monitor")
- self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner)
- if self.monitor_dumper:
- self.monitor_dumper.create_dir("qmp")
-
- dump_target_cmds = kwargs.get("testimage_dump_target")
- self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
- self.target_dumper.create_dir("qemu")
def start(self, params=None, extra_bootparams=None, runqemuparams=''):
if self.use_slirp and not self.server_ip:
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index f956a7744f..09cdd14c75 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -34,17 +34,20 @@ class OESSHTarget(OETarget):
self.timeout = timeout
self.user = user
ssh_options = [
+ '-o', 'ServerAliveCountMax=2',
+ '-o', 'ServerAliveInterval=30',
'-o', 'UserKnownHostsFile=/dev/null',
'-o', 'StrictHostKeyChecking=no',
'-o', 'LogLevel=ERROR'
]
+ scp_options = [
+ '-r'
+ ]
self.ssh = ['ssh', '-l', self.user ] + ssh_options
- self.scp = ['scp'] + ssh_options
+ self.scp = ['scp'] + ssh_options + scp_options
if port:
self.ssh = self.ssh + [ '-p', port ]
self.scp = self.scp + [ '-P', port ]
- self._monitor_dumper = None
- self.target_dumper = None
def start(self, **kwargs):
pass
@@ -52,15 +55,6 @@ class OESSHTarget(OETarget):
def stop(self, **kwargs):
pass
- @property
- def monitor_dumper(self):
- return self._monitor_dumper
-
- @monitor_dumper.setter
- def monitor_dumper(self, dumper):
- self._monitor_dumper = dumper
- self.monitor_dumper.dump_monitor()
-
def _run(self, command, timeout=None, ignore_status=True):
"""
Runs command in target using SSHProcess.
@@ -78,7 +72,7 @@ class OESSHTarget(OETarget):
return (status, output)
- def run(self, command, timeout=None):
+ def run(self, command, timeout=None, ignore_status=True):
"""
Runs command in target.
@@ -97,16 +91,9 @@ class OESSHTarget(OETarget):
else:
processTimeout = self.timeout
- status, output = self._run(sshCmd, processTimeout, True)
+ status, output = self._run(sshCmd, processTimeout, ignore_status)
self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
- if (status == 255) and (('No route to host') in output):
- if self.monitor_dumper:
- self.monitor_dumper.dump_monitor()
- if status == 255:
- if self.target_dumper:
- self.target_dumper.dump_target()
- if self.monitor_dumper:
- self.monitor_dumper.dump_monitor()
+
return (status, output)
def copyTo(self, localSrc, remoteDst):
@@ -224,27 +211,41 @@ def SSHCall(command, logger, timeout=None, **opts):
def run():
nonlocal output
nonlocal process
+ output_raw = b''
starttime = time.time()
process = subprocess.Popen(command, **options)
+ has_timeout = False
if timeout:
endtime = starttime + timeout
eof = False
- while time.time() < endtime and not eof:
- logger.debug('time: %s, endtime: %s' % (time.time(), endtime))
+ os.set_blocking(process.stdout.fileno(), False)
+ while not has_timeout and not eof:
try:
+ logger.debug('Waiting for process output: time: %s, endtime: %s' % (time.time(), endtime))
if select.select([process.stdout], [], [], 5)[0] != []:
- reader = codecs.getreader('utf-8')(process.stdout, 'ignore')
- data = reader.read(1024, 4096)
+ # wait a bit for more data, tries to avoid reading single characters
+ time.sleep(0.2)
+ data = process.stdout.read()
if not data:
- process.stdout.close()
eof = True
else:
- output += data
- logger.debug('Partial data from SSH call: %s' % data)
+ output_raw += data
+ # ignore errors to capture as much as possible
+ logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore'))
endtime = time.time() + timeout
except InterruptedError:
+ logger.debug('InterruptedError')
+ continue
+ except BlockingIOError:
+ logger.debug('BlockingIOError')
continue
+ if time.time() >= endtime:
+ logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime))
+ has_timeout = True
+
+ process.stdout.close()
+
# process hasn't returned yet
if not eof:
process.terminate()
@@ -252,16 +253,42 @@ def SSHCall(command, logger, timeout=None, **opts):
try:
process.kill()
except OSError:
+ logger.debug('OSError when killing process')
pass
endtime = time.time() - starttime
lastline = ("\nProcess killed - no output for %d seconds. Total"
" running time: %d seconds." % (timeout, endtime))
- logger.debug('Received data from SSH call %s ' % lastline)
+ logger.debug('Received data from SSH call:\n%s ' % lastline)
output += lastline
+ process.wait()
else:
- output = process.communicate()[0].decode('utf-8', errors='ignore')
- logger.debug('Data from SSH call: %s' % output.rstrip())
+ output_raw = process.communicate()[0]
+
+ output = output_raw.decode('utf-8', errors='ignore')
+ logger.debug('Data from SSH call:\n%s' % output.rstrip())
+
+ # timout or not, make sure process exits and is not hanging
+ if process.returncode == None:
+ try:
+ process.wait(timeout=5)
+ except TimeoutExpired:
+ try:
+ process.kill()
+ except OSError:
+ logger.debug('OSError')
+ pass
+ process.wait()
+
+ if has_timeout:
+ # Version of openssh before 8.6_p1 returns error code 0 when killed
+ # by a signal, when the timeout occurs we will receive a 0 error
+ # code because the process is been terminated and it's wrong because
+ # that value means success, but the process timed out.
+ # Afterwards, from version 8.6_p1 onwards, the returned code is 255.
+ # Fix this behaviour by checking the return code
+ if process.returncode == 0:
+ process.returncode = 255
options = {
"stdout": subprocess.PIPE,
@@ -288,6 +315,9 @@ def SSHCall(command, logger, timeout=None, **opts):
# whilst running and ensure we don't leave a process behind.
if process.poll() is None:
process.kill()
+ if process.returncode == None:
+ process.wait()
logger.debug('Something went wrong, killing SSH process')
raise
- return (process.wait(), output.rstrip())
+
+ return (process.returncode, output.rstrip())
diff --git a/meta/lib/oeqa/core/utils/concurrencytest.py b/meta/lib/oeqa/core/utils/concurrencytest.py
index 161a2f6e90..d10f8f7f04 100644
--- a/meta/lib/oeqa/core/utils/concurrencytest.py
+++ b/meta/lib/oeqa/core/utils/concurrencytest.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Modified for use in OE by Richard Purdie, 2018
@@ -57,6 +59,7 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
self.outputbuf = output
self.finalresult = finalresult
self.finalresult.buffer = True
+ self.target = target
def _add_result_with_semaphore(self, method, test, *args, **kwargs):
self.semaphore.acquire()
@@ -65,13 +68,14 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
self.result.starttime[test.id()] = self._test_start.timestamp()
self.result.threadprogress[self.threadnum].append(test.id())
totalprogress = sum(len(x) for x in self.result.threadprogress.values())
- self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % (
+ self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
self.threadnum,
len(self.result.threadprogress[self.threadnum]),
self.totalinprocess,
totalprogress,
self.totaltests,
"{0:.2f}".format(time.time()-self._test_start.timestamp()),
+ self.target.failed_tests,
test.id())
finally:
self.semaphore.release()
@@ -189,11 +193,12 @@ class dummybuf(object):
#
class ConcurrentTestSuite(unittest.TestSuite):
- def __init__(self, suite, processes, setupfunc, removefunc):
+ def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
super(ConcurrentTestSuite, self).__init__([suite])
self.processes = processes
self.setupfunc = setupfunc
self.removefunc = removefunc
+ self.bb_vars = bb_vars
def run(self, result):
testservers, totaltests = fork_for_tests(self.processes, self)
@@ -239,7 +244,7 @@ class ConcurrentTestSuite(unittest.TestSuite):
def fork_for_tests(concurrency_num, suite):
testservers = []
if 'BUILDDIR' in os.environ:
- selftestdir = get_test_layer()
+ selftestdir = get_test_layer(suite.bb_vars['BBLAYERS'])
test_blocks = partition_tests(suite, concurrency_num)
# Clear the tests from the original suite so it doesn't keep them alive
@@ -259,7 +264,7 @@ def fork_for_tests(concurrency_num, suite):
ourpid = os.getpid()
try:
newbuilddir = None
- stream = os.fdopen(c2pwrite, 'wb', 1)
+ stream = os.fdopen(c2pwrite, 'wb')
os.close(c2pread)
(builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite)
@@ -304,7 +309,7 @@ def fork_for_tests(concurrency_num, suite):
os._exit(0)
else:
os.close(c2pwrite)
- stream = os.fdopen(c2pread, 'rb', 1)
+ stream = os.fdopen(c2pread, 'rb')
# Collect stdout/stderr into an io buffer
output = io.BytesIO()
testserver = ProtocolTestCase(stream, passthrough=output)
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
new file mode 100644
index 0000000000..de95025e86
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "guessing-game"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[lib]
+name = "guessing_game"
+# "cdylib" is necessary to produce a shared library for Python to import from.
+crate-type = ["cdylib"]
+
+[dependencies]
+rand = "0.8.4"
+
+[dependencies.pyo3]
+version = "0.19.0"
+# "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8
+features = ["abi3-py38"]
+
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
new file mode 100644
index 0000000000..c4a9a58791
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2018 konstin
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
new file mode 100644
index 0000000000..ff35abc472
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
@@ -0,0 +1,8 @@
+[build-system]
+requires = ["maturin>=1.0,<2.0"]
+build-backend = "maturin"
+
+[tool.maturin]
+# "extension-module" tells pyo3 we want to build an extension module (skips linking against libpython.so)
+features = ["pyo3/extension-module"]
+
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
new file mode 100644
index 0000000000..6828466ed1
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
@@ -0,0 +1,48 @@
+use pyo3::prelude::*;
+use rand::Rng;
+use std::cmp::Ordering;
+use std::io;
+
+#[pyfunction]
+fn guess_the_number() {
+ println!("Guess the number!");
+
+ let secret_number = rand::thread_rng().gen_range(1..101);
+
+ loop {
+ println!("Please input your guess.");
+
+ let mut guess = String::new();
+
+ io::stdin()
+ .read_line(&mut guess)
+ .expect("Failed to read line");
+
+ let guess: u32 = match guess.trim().parse() {
+ Ok(num) => num,
+ Err(_) => continue,
+ };
+
+ println!("You guessed: {}", guess);
+
+ match guess.cmp(&secret_number) {
+ Ordering::Less => println!("Too small!"),
+ Ordering::Greater => println!("Too big!"),
+ Ordering::Equal => {
+ println!("You win!");
+ break;
+ }
+ }
+ }
+}
+
+/// A Python module implemented in Rust. The name of this function must match
+/// the `lib.name` setting in the `Cargo.toml`, else Python will not be able to
+/// import the module.
+#[pymodule]
+fn guessing_game(_py: Python, m: &PyModule) -> PyResult<()> {
+ m.add_function(wrap_pyfunction!(guess_the_number, m)?)?;
+
+ Ok(())
+}
+
diff --git a/meta/lib/oeqa/files/test.rs b/meta/lib/oeqa/files/test.rs
new file mode 100644
index 0000000000..f79c691f08
--- /dev/null
+++ b/meta/lib/oeqa/files/test.rs
@@ -0,0 +1,2 @@
+fn main() {
+}
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 9c84466dd0..bcb6a878c7 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -28,7 +28,7 @@ try:
import oeqa.sdkext
except ImportError:
pass
-from oeqa.utils.decorators import LogResults, gettag, getResults
+from oeqa.utils.decorators import LogResults, gettag
logger = logging.getLogger("BitBake")
@@ -57,7 +57,6 @@ def filterByTagExp(testsuite, tagexp):
@LogResults
class oeTest(unittest.TestCase):
- pscmd = "ps"
longMessage = True
@classmethod
@@ -110,20 +109,6 @@ class oeRuntimeTest(oeTest):
def tearDown(self):
# Uninstall packages in the DUT
self.tc.install_uninstall_packages(self.id(), False)
-
- res = getResults()
- # If a test fails or there is an exception dump
- # for QemuTarget only
- if (type(self.target).__name__ == "QemuTarget" and
- (self.id() in res.getErrorList() or
- self.id() in res.getFailList())):
- self.tc.host_dumper.create_dir(self._testMethodName)
- self.tc.host_dumper.dump_host()
- self.target.target_dumper.dump_target(
- self.tc.host_dumper.dump_dir)
- print ("%s dump data stored in %s" % (self._testMethodName,
- self.tc.host_dumper.dump_dir))
-
self.tearDownLocal()
# Method to be run after tearDown and implemented by child classes
@@ -256,7 +241,7 @@ class TestContext(object):
modules = []
for test in self.testslist:
- if re.search("\w+\.\w+\.test_\S+", test):
+ if re.search(r"\w+\.\w+\.test_\S+", test):
test = '.'.join(t.split('.')[:3])
module = pkgutil.get_loader(test)
modules.append(module)
@@ -398,11 +383,6 @@ class RuntimeTestContext(TestContext):
def _get_test_suites_required(self):
return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"]
- def loadTests(self):
- super(RuntimeTestContext, self).loadTests()
- if oeTest.hasPackage("procps"):
- oeRuntimeTest.pscmd = "ps -ef"
-
def extract_packages(self):
"""
Find packages that will be needed during runtime.
diff --git a/meta/lib/oeqa/runtime/cases/_qemutiny.py b/meta/lib/oeqa/runtime/cases/_qemutiny.py
index 6886e36502..816fd4a7cb 100644
--- a/meta/lib/oeqa/runtime/cases/_qemutiny.py
+++ b/meta/lib/oeqa/runtime/cases/_qemutiny.py
@@ -1,12 +1,19 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.target.qemu import OEQemuTarget
class QemuTinyTest(OERuntimeTestCase):
def test_boot_tiny(self):
- status, output = self.target.run_serial('uname -a')
- msg = "Cannot detect poky tiny boot!"
- self.assertTrue("yocto-tiny" in output, msg)
+ # Until the target has explicit run_serial support, check that the
+ # target is the qemu runner
+ if isinstance(self.target, OEQemuTarget):
+ status, output = self.target.runner.run_serial('uname -a')
+ self.assertIn("Linux", output)
+ else:
+ self.skipTest("Target %s is not OEQemuTarget" % self.target)
diff --git a/meta/lib/oeqa/runtime/cases/apt.py b/meta/lib/oeqa/runtime/cases/apt.py
index 574a34f148..8000645843 100644
--- a/meta/lib/oeqa/runtime/cases/apt.py
+++ b/meta/lib/oeqa/runtime/cases/apt.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -37,9 +39,9 @@ class AptRepoTest(AptTest):
self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s/all ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server))
def setup_source_config_for_package_install_signed(self):
- apt_get_source_server = 'http:\/\/%s:%s' % (self.tc.target.server_ip, self.repo_server.port)
+ apt_get_source_server = 'http://%s:%s' % (self.tc.target.server_ip, self.repo_server.port)
apt_get_sourceslist_dir = '/etc/apt/'
- self.target.run("cd %s; cp sources.list sources.list.bak; sed -i 's/\[trusted=yes\] http:\/\/bogus_ip:bogus_port/%s/g' sources.list" % (apt_get_sourceslist_dir, apt_get_source_server))
+ self.target.run("cd %s; cp sources.list sources.list.bak; sed -i 's|\[trusted=yes\] http://bogus_ip:bogus_port|%s|g' sources.list" % (apt_get_sourceslist_dir, apt_get_source_server))
def cleanup_source_config_for_package_install(self):
apt_get_sourceslist_dir = '/etc/apt/'
diff --git a/meta/lib/oeqa/runtime/cases/boot.py b/meta/lib/oeqa/runtime/cases/boot.py
index e1ad88a174..dcee3311f7 100644
--- a/meta/lib/oeqa/runtime/cases/boot.py
+++ b/meta/lib/oeqa/runtime/cases/boot.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/buildcpio.py b/meta/lib/oeqa/runtime/cases/buildcpio.py
index e29bf16ccb..7be734cb4f 100644
--- a/meta/lib/oeqa/runtime/cases/buildcpio.py
+++ b/meta/lib/oeqa/runtime/cases/buildcpio.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -12,7 +14,7 @@ class BuildCpioTest(OERuntimeTestCase):
@classmethod
def setUpClass(cls):
- uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.13.tar.gz'
+ uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.15.tar.gz'
cls.project = TargetBuildProject(cls.tc.target,
uri,
dl_dir = cls.tc.td['DL_DIR'])
@@ -27,7 +29,6 @@ class BuildCpioTest(OERuntimeTestCase):
@OEHasPackage(['autoconf'])
def test_cpio(self):
self.project.download_archive()
- self.project.run_configure('--disable-maintainer-mode',
- 'sed -i -e "/char \*program_name/d" src/global.c;')
+ self.project.run_configure()
self.project.run_make()
self.project.run_install()
diff --git a/meta/lib/oeqa/runtime/cases/buildgalculator.py b/meta/lib/oeqa/runtime/cases/buildgalculator.py
index e5cc3e2888..2cfb3243dc 100644
--- a/meta/lib/oeqa/runtime/cases/buildgalculator.py
+++ b/meta/lib/oeqa/runtime/cases/buildgalculator.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/buildlzip.py b/meta/lib/oeqa/runtime/cases/buildlzip.py
index bc70b41461..44f4f1be71 100644
--- a/meta/lib/oeqa/runtime/cases/buildlzip.py
+++ b/meta/lib/oeqa/runtime/cases/buildlzip.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/connman.py b/meta/lib/oeqa/runtime/cases/connman.py
index f0d15fac9b..a488752e3f 100644
--- a/meta/lib/oeqa/runtime/cases/connman.py
+++ b/meta/lib/oeqa/runtime/cases/connman.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py
index bd6537400e..a2523de67a 100644
--- a/meta/lib/oeqa/runtime/cases/date.py
+++ b/meta/lib/oeqa/runtime/cases/date.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/df.py b/meta/lib/oeqa/runtime/cases/df.py
index bb155c9cf9..43e0ebf9ea 100644
--- a/meta/lib/oeqa/runtime/cases/df.py
+++ b/meta/lib/oeqa/runtime/cases/df.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/dnf.py b/meta/lib/oeqa/runtime/cases/dnf.py
index f40c63026e..3ccb18ce83 100644
--- a/meta/lib/oeqa/runtime/cases/dnf.py
+++ b/meta/lib/oeqa/runtime/cases/dnf.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -73,48 +75,43 @@ class DnfRepoTest(DnfTest):
def test_dnf_makecache(self):
self.dnf_with_repo('makecache')
-
-# Does not work when repo is specified on the command line
-# @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
-# def test_dnf_repolist(self):
-# self.dnf_with_repo('repolist')
-
@OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
def test_dnf_repoinfo(self):
self.dnf_with_repo('repoinfo')
@OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
def test_dnf_install(self):
- output = self.dnf_with_repo('list run-postinsts-dev')
- if 'Installed Packages' in output:
- self.dnf_with_repo('remove -y run-postinsts-dev')
- self.dnf_with_repo('install -y run-postinsts-dev')
+ self.dnf_with_repo('remove -y dnf-test-*')
+ self.dnf_with_repo('install -y dnf-test-dep')
@OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
def test_dnf_install_dependency(self):
- self.dnf_with_repo('remove -y run-postinsts')
- self.dnf_with_repo('install -y run-postinsts-dev')
+ self.dnf_with_repo('remove -y dnf-test-*')
+ self.dnf_with_repo('install -y dnf-test-main')
+ output = self.dnf('list --installed dnf-test-*')
+ self.assertIn("dnf-test-main.", output)
+ self.assertIn("dnf-test-dep.", output)
@OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency'])
def test_dnf_install_from_disk(self):
- self.dnf_with_repo('remove -y run-postinsts-dev')
- self.dnf_with_repo('install -y --downloadonly run-postinsts-dev')
- status, output = self.target.run('find /var/cache/dnf -name run-postinsts-dev*rpm', 1500)
+ self.dnf_with_repo('remove -y dnf-test-dep')
+ self.dnf_with_repo('install -y --downloadonly dnf-test-dep')
+ status, output = self.target.run('find /var/cache/dnf -name dnf-test-dep*rpm')
self.assertEqual(status, 0, output)
self.dnf_with_repo('install -y %s' % output)
@OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk'])
def test_dnf_install_from_http(self):
- output = subprocess.check_output('%s %s -name run-postinsts-dev*' % (bb.utils.which(os.getenv('PATH'), "find"),
+ output = subprocess.check_output('%s %s -name dnf-test-dep*' % (bb.utils.which(os.getenv('PATH'), "find"),
os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8")
rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1]
url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path)
- self.dnf_with_repo('remove -y run-postinsts-dev')
+ self.dnf_with_repo('remove -y dnf-test-dep')
self.dnf_with_repo('install -y %s' % url)
@OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
def test_dnf_reinstall(self):
- self.dnf_with_repo('reinstall -y run-postinsts-dev')
+ self.dnf_with_repo('reinstall -y dnf-test-main')
@OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
@skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge')
@@ -137,55 +134,40 @@ class DnfRepoTest(DnfTest):
self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
- self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath)
+ self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
self.assertEqual(0, status, output)
status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500)
self.assertEqual(0, status, output)
@OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
- @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmege')
+ @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmerge')
@OEHasPackage('busybox')
def test_dnf_installroot_usrmerge(self):
rootpath = '/home/root/chroot/test'
#Copy necessary files to avoid errors with not yet installed tools on
#installroot directory.
- self.target.run('mkdir -p %s/etc' % rootpath, 1500)
- self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath), 1500)
- self.target.run('ln -sf -r %s/usr/bin %s/bin' % (rootpath, rootpath), 1500)
- self.target.run('ln -sf -r %s/usr/sbin %s/sbin' % (rootpath, rootpath), 1500)
- self.target.run('mkdir -p %s/dev' % rootpath, 1500)
+ self.target.run('mkdir -p %s/etc' % rootpath)
+ self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath))
+ self.target.run('ln -sf usr/bin %s/bin' % (rootpath))
+ self.target.run('ln -sf usr/sbin %s/sbin' % (rootpath))
+ self.target.run('mkdir -p %s/dev' % rootpath)
#Handle different architectures lib dirs
- self.target.run('mkdir -p %s/usr/lib' % rootpath, 1500)
- self.target.run('mkdir -p %s/usr/libx32' % rootpath, 1500)
- self.target.run('mkdir -p %s/usr/lib64' % rootpath, 1500)
- self.target.run('cp /lib/libtinfo.so.5 %s/usr/lib' % rootpath, 1500)
- self.target.run('cp /libx32/libtinfo.so.5 %s/usr/libx32' % rootpath, 1500)
- self.target.run('cp /lib64/libtinfo.so.5 %s/usr/lib64' % rootpath, 1500)
- self.target.run('ln -sf -r %s/lib %s/usr/lib' % (rootpath,rootpath), 1500)
- self.target.run('ln -sf -r %s/libx32 %s/usr/libx32' % (rootpath,rootpath), 1500)
- self.target.run('ln -sf -r %s/lib64 %s/usr/lib64' % (rootpath,rootpath), 1500)
- self.target.run('cp -r /etc/rpm %s/etc' % rootpath, 1500)
- self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
- self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
- self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
- self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath)
- status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
+ self.target.run("for l in /lib*; do mkdir -p %s/usr/$l; ln -s usr/$l %s/$l; done" % (rootpath, rootpath))
+ self.target.run('cp -r /etc/rpm %s/etc' % rootpath)
+ self.target.run('cp -r /etc/dnf %s/etc' % rootpath)
+ self.target.run('cp /bin/busybox %s/bin/sh' % rootpath)
+ self.target.run('mount -o bind /dev %s/dev/' % rootpath)
+ self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
+ status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath)
self.assertEqual(0, status, output)
- status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500)
+ status, output = self.target.run('test -e %s/bin/busybox' % rootpath)
self.assertEqual(0, status, output)
@OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
def test_dnf_exclude(self):
- excludepkg = 'curl-dev'
- self.dnf_with_repo('install -y curl*')
- self.dnf('list %s' % excludepkg, 0)
- #Avoid remove dependencies to skip some errors on different archs and images
- self.dnf_with_repo('remove --setopt=clean_requirements_on_remove=0 -y curl*')
- #check curl-dev is not installed adter removing all curl occurrences
- status, output = self.target.run('dnf list --installed | grep %s'% excludepkg, 1500)
- self.assertEqual(1, status, "%s was not removed, is listed as installed"%excludepkg)
- self.dnf_with_repo('install -y --exclude=%s --exclude=curl-staticdev curl*' % excludepkg)
- #check curl-dev is not installed after being excluded
- status, output = self.target.run('dnf list --installed | grep %s'% excludepkg , 1500)
- self.assertEqual(1, status, "%s was not excluded, is listed as installed"%excludepkg)
+ self.dnf_with_repo('remove -y dnf-test-*')
+ self.dnf_with_repo('install -y --exclude=dnf-test-dep dnf-test-*')
+ output = self.dnf('list --installed dnf-test-*')
+ self.assertIn("dnf-test-main.", output)
+ self.assertNotIn("dnf-test-dev.", output)
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
index b93ee29941..eac8f2d082 100644
--- a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
+++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.core.decorator.data import skipIfQemu
diff --git a/meta/lib/oeqa/runtime/cases/gcc.py b/meta/lib/oeqa/runtime/cases/gcc.py
index 1b6e431bf4..17b1483e8d 100644
--- a/meta/lib/oeqa/runtime/cases/gcc.py
+++ b/meta/lib/oeqa/runtime/cases/gcc.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/gi.py b/meta/lib/oeqa/runtime/cases/gi.py
index 42bd100a31..78c7ddda2c 100644
--- a/meta/lib/oeqa/runtime/cases/gi.py
+++ b/meta/lib/oeqa/runtime/cases/gi.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/go.py b/meta/lib/oeqa/runtime/cases/go.py
index 89ba2c3ecb..39a80f4dca 100644
--- a/meta/lib/oeqa/runtime/cases/go.py
+++ b/meta/lib/oeqa/runtime/cases/go.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -16,4 +18,4 @@ class GoHelloworldTest(OERuntimeTestCase):
self.assertEqual(status, 0, msg=msg)
msg = 'Incorrect output: %s' % output
- self.assertEqual(output, "Hello, Go examples!", msg=msg)
+ self.assertEqual(output, "Hello, world!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/gstreamer.py b/meta/lib/oeqa/runtime/cases/gstreamer.py
index f735f82e3b..2295769cfd 100644
--- a/meta/lib/oeqa/runtime/cases/gstreamer.py
+++ b/meta/lib/oeqa/runtime/cases/gstreamer.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/kernelmodule.py b/meta/lib/oeqa/runtime/cases/kernelmodule.py
index 47fd2f850c..9c42fcc586 100644
--- a/meta/lib/oeqa/runtime/cases/kernelmodule.py
+++ b/meta/lib/oeqa/runtime/cases/kernelmodule.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py
index c69e3fe4ac..b6848762e3 100644
--- a/meta/lib/oeqa/runtime/cases/ksample.py
+++ b/meta/lib/oeqa/runtime/cases/ksample.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/ldd.py b/meta/lib/oeqa/runtime/cases/ldd.py
index 9c2caa8f65..f6841c6675 100644
--- a/meta/lib/oeqa/runtime/cases/ldd.py
+++ b/meta/lib/oeqa/runtime/cases/ldd.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/login.py b/meta/lib/oeqa/runtime/cases/login.py
new file mode 100644
index 0000000000..e1bc60d49b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/login.py
@@ -0,0 +1,116 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import shutil
+import subprocess
+import tempfile
+import time
+import os
+from datetime import datetime
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.runtime.decorator.package import OEHasPackage
+
+### Status of qemu images.
+# - runqemu qemuppc64 comes up blank. (skip)
+# - qemuarmv5 comes up with multiple heads but sending "head" to screendump.
+# seems to create a png with a bad header? (skip for now, but come back to fix)
+# - qemuriscv32 and qemuloongarch64 doesn't work with testimage apparently? (skip)
+# - qemumips64 is missing mouse icon.
+# - qemumips takes forever to render and is missing mouse icon.
+# - qemuarm and qemuppc are odd as they don't resize so we need to just set width.
+# - All images have home and screen flipper icons not always rendered fully at first.
+# the sleep seems to help this out some, depending on machine load.
+###
+
+class LoginTest(OERuntimeTestCase):
+ @OEHasPackage(['matchbox-desktop', 'dbus-wait'])
+ def test_screenshot(self):
+ if self.td.get('MACHINE') in ("qemuppc64", "qemuarmv5", "qemuriscv32", "qemuriscv64", "qemuloongarch64"):
+ self.skipTest("{0} is not currently supported.".format(self.td.get('MACHINE')))
+
+ pn = self.td.get('PN')
+
+ ourenv = os.environ.copy()
+ origpath = self.td.get("ORIGPATH")
+ if origpath:
+ ourenv['PATH'] = ourenv['PATH'] + ":" + origpath
+
+ for cmd in ["identify.im7", "convert.im7", "compare.im7"]:
+ try:
+ subprocess.check_output(["which", cmd], env=ourenv)
+ except subprocess.CalledProcessError:
+ self.skipTest("%s (from imagemagick) not available" % cmd)
+
+
+ # Store images so we can debug them if needed
+ saved_screenshots_dir = self.td.get('T') + "/saved-screenshots/"
+
+ ###
+ # This is a really horrible way of doing this but I've not found the
+ # right event to determine "The system is loaded and screen is rendered"
+ #
+ # Using dbus-wait for matchbox is the wrong answer because while it
+ # ensures the system is up, it doesn't mean the screen is rendered.
+ #
+ # Checking the qmp socket doesn't work afaik either.
+ #
+ # One way to do this is to do compares of known good screendumps until
+ # we either get expected or close to expected or we time out. Part of the
+ # issue here with that is that there is a very fine difference in the
+ # diff between a screendump where the icons haven't loaded yet and
+ # one where they won't load. I'll look at that next, but, for now, this.
+ #
+ # Which is ugly and I hate it but it 'works' for various definitions of
+ # 'works'.
+ ###
+ # RP: if the signal is sent before we run this, it will never be seen and we'd timeout
+ #status, output = self.target.run('dbus-wait org.matchbox_project.desktop Loaded')
+ #if status != 0 or "Timeout" in output:
+ # self.fail('dbus-wait failed (%s, %s). This could mean that the image never loaded the matchbox desktop.' % (status, output))
+
+ # Start taking screenshots every 2 seconds until diff=0 or timeout is 60 seconds
+ timeout = time.time() + 60
+ diff = True
+ with tempfile.NamedTemporaryFile(prefix="oeqa-screenshot-login", suffix=".png") as t:
+ while diff != 0 and time.time() < timeout:
+ time.sleep(2)
+ ret = self.target.runner.run_monitor("screendump", args={"filename": t.name, "format":"png"})
+
+ # Find out size of image so we can determine where to blank out clock.
+ # qemuarm and qemuppc are odd as it doesn't resize the window and returns
+ # incorrect widths
+ if self.td.get('MACHINE') == "qemuarm" or self.td.get('MACHINE') == "qemuppc":
+ width = "640"
+ else:
+ cmd = "identify.im7 -ping -format '%w' {0}".format(t.name)
+ width = subprocess.check_output(cmd, shell=True, env=ourenv).decode()
+
+ rblank = int(float(width))
+ lblank = rblank-80
+
+ # Use the meta-oe version of convert, along with it's suffix. This blanks out the clock.
+ cmd = "convert.im7 {0} -fill white -draw 'rectangle {1},4 {2},28' {3}".format(t.name, str(rblank), str(lblank), t.name)
+ convert_out=subprocess.check_output(cmd, shell=True, env=ourenv).decode()
+
+ bb.utils.mkdirhier(saved_screenshots_dir)
+ savedfile = "{0}/saved-{1}-{2}-{3}.png".format(saved_screenshots_dir, \
+ datetime.timestamp(datetime.now()), \
+ pn, \
+ self.td.get('MACHINE'))
+ shutil.copy2(t.name, savedfile)
+
+ refimage = self.td.get('COREBASE') + "/meta/files/screenshot-tests/" + pn + "-" + self.td.get('MACHINE') +".png"
+ if not os.path.exists(refimage):
+ self.skipTest("No reference image for comparision (%s)" % refimage)
+
+ cmd = "compare.im7 -metric MSE {0} {1} /dev/null".format(t.name, refimage)
+ compare_out = subprocess.run(cmd, shell=True, capture_output=True, text=True, env=ourenv)
+ diff=float(compare_out.stderr.replace("(", "").replace(")","").split()[1])
+ if diff > 0:
+ # Keep a copy of the failed screenshot so we can see what happened.
+ self.fail("Screenshot diff is {0}. Failed image stored in {1}".format(str(diff), savedfile))
+ else:
+ self.assertEqual(0, diff, "Screenshot diff is {0}.".format(str(diff)))
diff --git a/meta/lib/oeqa/runtime/cases/logrotate.py b/meta/lib/oeqa/runtime/cases/logrotate.py
index 2bff08f9da..6ad980cb6a 100644
--- a/meta/lib/oeqa/runtime/cases/logrotate.py
+++ b/meta/lib/oeqa/runtime/cases/logrotate.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index a66d5d13d7..f588a93200 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,37 +57,47 @@ class LtpTestBase(OERuntimeTestCase):
class LtpTest(LtpTestBase):
- ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "connectors", "commands", "net.ipv6_lib", "input","fs_perms_simple"]
+ ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
- ltp_fs = ["fs", "fsx", "fs_bind"]
+ ltp_fs = ["fs", "fs_bind"]
# skip kernel cpuhotplug
ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"]
ltp_groups += ltp_fs
def runltp(self, ltp_group):
- cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
+ # LTP appends to log files, so ensure we start with a clean log
+ self.target.deleteFiles("/opt/ltp/results/", ltp_group)
+
+ cmd = '/opt/ltp/runltp -f %s -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
+
starttime = time.time()
- (status, output) = self.target.run(cmd)
+ (status, output) = self.target.run(cmd, timeout=1200)
endtime = time.time()
+ # status of 1 is 'just' tests failing. 255 likely was a command output timeout
+ if status and status != 1:
+ msg = 'Command %s returned exit code %s' % (cmd, status)
+ self.target.logger.warning(msg)
+
+ # Write the console log to disk for convenience
with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f:
f.write(output)
+ # Also put the console log into the test result JSON
self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output
- # copy nice log from DUT
- dst = os.path.join(self.ltptest_log_dir, "%s" % ltp_group )
+ # Copy the machine-readable test results locally so we can parse it
+ dst = os.path.join(self.ltptest_log_dir, ltp_group)
remote_src = "/opt/ltp/results/%s" % ltp_group
(status, output) = self.target.copyFrom(remote_src, dst, True)
- msg = 'File could not be copied. Output: %s' % output
if status:
+ msg = 'File could not be copied. Output: %s' % output
self.target.logger.warning(msg)
parser = LtpParser()
results, sections = parser.parse(dst)
- runtime = int(endtime-starttime)
- sections['duration'] = runtime
+ sections['duration'] = int(endtime-starttime)
self.sections[ltp_group] = sections
failed_tests = {}
diff --git a/meta/lib/oeqa/runtime/cases/maturin.py b/meta/lib/oeqa/runtime/cases/maturin.py
new file mode 100644
index 0000000000..4e6384fe5e
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/maturin.py
@@ -0,0 +1,58 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+from oeqa.runtime.decorator.package import OEHasPackage
+
+
+class MaturinTest(OERuntimeTestCase):
+ @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
+ @OEHasPackage(['python3-maturin'])
+ def test_maturin_list_python(self):
+ status, output = self.target.run("maturin list-python")
+ self.assertEqual(status, 0)
+ _, py_major = self.target.run("python3 -c 'import sys; print(sys.version_info.major)'")
+ _, py_minor = self.target.run("python3 -c 'import sys; print(sys.version_info.minor)'")
+ python_version = "%s.%s" % (py_major, py_minor)
+ self.assertEqual(output, "🐍 1 python interpreter found:\n"
+ " - CPython %s at /usr/bin/python%s" % (python_version, python_version))
+
+
+class MaturinDevelopTest(OERuntimeTestCase):
+ @classmethod
+ def setUp(cls):
+ dst = '/tmp'
+ src = os.path.join(cls.tc.files_dir, "maturin/guessing-game")
+ cls.tc.target.copyTo(src, dst)
+
+ @classmethod
+ def tearDown(cls):
+ cls.tc.target.run('rm -rf %s' % '/tmp/guessing-game/target')
+
+ @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
+ @OEHasPackage(['python3-maturin'])
+ def test_maturin_develop(self):
+ """
+ This test case requires:
+ (1) that a .venv can been created.
+ (2) DNS nameserver to resolve crate URIs for fetching
+ (3) a functional 'rustc' and 'cargo'
+ """
+ targetdir = os.path.join("/tmp", "guessing-game")
+ self.target.run("cd %s; python3 -m venv .venv" % targetdir)
+ self.target.run("echo 'nameserver 8.8.8.8' > /etc/resolv.conf")
+ cmd = "cd %s; maturin develop" % targetdir
+ status, output = self.target.run(cmd)
+ self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
+ self.assertRegex(output, r"🐍 Not using a specific python interpreter")
+ self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
+ self.assertRegex(output, r"Compiling guessing-game v0.1.0")
+ self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
+ self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
+ self.assertEqual(status, 0)
diff --git a/meta/lib/oeqa/runtime/cases/multilib.py b/meta/lib/oeqa/runtime/cases/multilib.py
index 0d1b9ae2c9..68556e45c5 100644
--- a/meta/lib/oeqa/runtime/cases/multilib.py
+++ b/meta/lib/oeqa/runtime/cases/multilib.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/oe_syslog.py b/meta/lib/oeqa/runtime/cases/oe_syslog.py
index 150b70d9f0..adb876160d 100644
--- a/meta/lib/oeqa/runtime/cases/oe_syslog.py
+++ b/meta/lib/oeqa/runtime/cases/oe_syslog.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -114,7 +116,7 @@ class SyslogTestConfig(OERuntimeTestCase):
@OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger'])
@OEHasPackage(["busybox-syslog"])
@skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
- 'Not appropiate for systemd image')
+ 'Not appropriate for systemd image')
def test_syslog_startup_config(self):
cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf'
self.target.run(cmd)
diff --git a/meta/lib/oeqa/runtime/cases/opkg.py b/meta/lib/oeqa/runtime/cases/opkg.py
index 9cfee1cd88..a29c93e59a 100644
--- a/meta/lib/oeqa/runtime/cases/opkg.py
+++ b/meta/lib/oeqa/runtime/cases/opkg.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/pam.py b/meta/lib/oeqa/runtime/cases/pam.py
index a482ded945..b3e8b56c3c 100644
--- a/meta/lib/oeqa/runtime/cases/pam.py
+++ b/meta/lib/oeqa/runtime/cases/pam.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
new file mode 100644
index 0000000000..f91abbc941
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
@@ -0,0 +1,62 @@
+# Xserver explains what the short codes mean
+(WW) warning, (EE) error, (NI) not implemented, (??) unknown.
+
+# Xserver warns if compiled with ACPI but no acpid running
+Open ACPI failed (/var/run/acpid.socket) (No such file or directory)
+
+# Some machines (eg qemux86) don't enable PAE (they probably should though)
+NX (Execute Disable) protection cannot be enabled: non-PAE kernel!
+
+# Connman's pacrunner warns if external connectivity isn't available
+Failed to find URL:http://ipv4.connman.net/online/status.html
+Failed to find URL:http://ipv6.connman.net/online/status.html
+
+# x86 on 6.6+ outputs this message, it is informational, not an error
+ACPI: _OSC evaluation for CPUs failed, trying _PDC
+
+# These should be reviewed to see if they are still needed
+dma timeout
+can\'t add hid device:
+usbhid: probe of
+_OSC failed (AE_ERROR)
+_OSC failed (AE_SUPPORT)
+AE_ALREADY_EXISTS
+ACPI _OSC request failed (AE_SUPPORT)
+can\'t disable ASPM
+Failed to load module "vesa"
+Failed to load module "modesetting"
+Failed to load module "glx"
+Failed to load module "fbdev"
+Failed to load module "ati"
+[drm] Cannot find any crtc or sizes
+_OSC failed (AE_NOT_FOUND); disabling ASPM
+hd.: possibly failed opcode
+NETLINK INITIALIZATION FAILED
+kernel: Cannot find map file
+omap_hwmod: debugss: _wait_target_disable failed
+VGA arbiter: cannot open kernel arbiter, no multi-card support
+Online check failed for
+netlink init failed
+Fast TSC calibration
+controller can't do DEVSLP, turning off
+stmmac_dvr_probe: warning: cannot get CSR clock
+error: couldn\'t mount because of unsupported optional features
+GPT: Use GNU Parted to correct GPT errors
+Cannot set xattr user.Librepo.DownloadInProgress
+Failed to read /var/lib/nfs/statd/state: Success
+error retry time-out =
+logind: cannot setup systemd-logind helper (-61), using legacy fallback
+Failed to rename network interface
+Failed to process device, ignoring: Device or resource busy
+Cannot find a map file
+[rdrand]: Initialization Failed
+[rndr ]: Initialization Failed
+[pulseaudio] authkey.c: Failed to open cookie file
+[pulseaudio] authkey.c: Failed to load authentication key
+was skipped because of a failed condition check
+was skipped because all trigger condition checks failed
+xf86OpenConsole: Switching VT failed
+Failed to read LoaderConfigTimeoutOneShot variable, ignoring: Operation not supported
+Failed to read LoaderEntryOneShot variable, ignoring: Operation not supported
+Direct firmware load for regulatory.db
+failed to load regulatory.db
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
new file mode 100644
index 0000000000..2c0bd9a247
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -0,0 +1,2 @@
+# These should be reviewed to see if they are still needed
+cacheinfo: Failed to find cpu0 device node
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
new file mode 100644
index 0000000000..b0c0fc9ddf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -0,0 +1,27 @@
+# psplash
+FBIOPUT_VSCREENINFO failed, double buffering disabled
+
+# PCI host bridge to bus 0000:00
+# pci_bus 0000:00: root bus resource [mem 0x10000000-0x17ffffff]
+# pci_bus 0000:00: root bus resource [io 0x1000-0x1fffff]
+# pci_bus 0000:00: No busn resource found for root bus, will use [bus 00-ff]
+# pci 0000:00:00.0: [2046:ab11] type 00 class 0x100000
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x10: invalid BAR (can't size)
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x14: invalid BAR (can't size)
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x18: invalid BAR (can't size)
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x1c: invalid BAR (can't size)
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
+# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
+invalid BAR (can't size)
+
+# These should be reviewed to see if they are still needed
+wrong ELF class
+fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge
+can't claim BAR
+amd_nb: Cannot enumerate AMD northbridges
+tsc: HPET/PMTIMER calibration failed
+modeset(0): Failed to initialize the DRI2 extension
+glamor initialization failed
+blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)
+floppy: error
+failed to IDENTIFY (I/O error, err_mask=0x4)
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
new file mode 100644
index 0000000000..260cdde620
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
@@ -0,0 +1,6 @@
+# These should be reviewed to see if they are still needed
+Fatal server error:
+(EE) Server terminated with error (1). Closing log file.
+dmi: Firmware registration failed.
+irq: type mismatch, failed to map hwirq-27 for /intc
+logind: failed to get session seat \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
new file mode 100644
index 0000000000..ed91107b7d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
@@ -0,0 +1,19 @@
+# Code is 2 JENT_ECOARSETIME: Timer too coarse for RNG.
+jitterentropy: Initialization failed with host not compliant with requirements: 2
+
+# These should be reviewed to see if they are still needed
+mmci-pl18x: probe of fpga:05 failed with error -22
+mmci-pl18x: probe of fpga:0b failed with error -22
+
+OF: amba_device_add() failed (-19) for /amba/smc@10100000
+OF: amba_device_add() failed (-19) for /amba/mpmc@10110000
+OF: amba_device_add() failed (-19) for /amba/sctl@101e0000
+OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000
+OF: amba_device_add() failed (-19) for /amba/sci@101f0000
+OF: amba_device_add() failed (-19) for /amba/spi@101f4000
+OF: amba_device_add() failed (-19) for /amba/ssp@101f4000
+OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000
+Failed to initialize '/amba/timer@101e3000': -22
+
+clcd-pl11x: probe of 10120000.display failed with error -2
+arm-charlcd 10008000.lcd: error -ENXIO: IRQ index 0 not found
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
new file mode 100644
index 0000000000..d9b58b58f1
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
@@ -0,0 +1,6 @@
+# These should be reviewed to see if they are still needed
+PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]
+host side 80-wire cable detection failed, limiting max speed
+mode "640x480" test failed
+can't handle BAR above 4GB
+Cannot reserve Legacy IO \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
new file mode 100644
index 0000000000..b736a2aeb7
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
@@ -0,0 +1,4 @@
+# These should be reviewed to see if they are still needed
+vio vio: uevent: failed to send synthetic uevent
+synth uevent: /devices/vio: failed to send uevent
+PCI 0000:00 Cannot reserve Legacy IO [io 0x10000-0x10fff] \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
new file mode 100644
index 0000000000..ebb76f1221
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
@@ -0,0 +1,2 @@
+# These should be reviewed to see if they are still needed
+Failed to access perfctr msr (MSR
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
new file mode 100644
index 0000000000..5985247daf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
@@ -0,0 +1,10 @@
+# These should be reviewed to see if they are still needed
+[drm:psb_do_init] *ERROR* Debug is
+wrong ELF class
+Could not enable PowerButton event
+probe of LNXPWRBN:00 failed with error -22
+pmd_set_huge: Cannot satisfy
+failed to setup card detect gpio
+amd_nb: Cannot enumerate AMD northbridges
+failed to retrieve link info, disabling eDP
+Direct firmware load for iwlwifi
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
new file mode 120000
index 0000000000..404e384c32
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
@@ -0,0 +1 @@
+parselogs-ignores-x86.txt \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index 1f9365f3a8..6966923c94 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -1,216 +1,49 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
+import collections
import os
+import sys
-from subprocess import check_output
from shutil import rmtree
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
-from oeqa.core.decorator.data import skipIfDataVar
-from oeqa.runtime.decorator.package import OEHasPackage
-
-#in the future these lists could be moved outside of module
-errors = ["error", "cannot", "can\'t", "failed"]
-
-common_errors = [
- "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
- "dma timeout",
- "can\'t add hid device:",
- "usbhid: probe of ",
- "_OSC failed (AE_ERROR)",
- "_OSC failed (AE_SUPPORT)",
- "AE_ALREADY_EXISTS",
- "ACPI _OSC request failed (AE_SUPPORT)",
- "can\'t disable ASPM",
- "Failed to load module \"vesa\"",
- "Failed to load module vesa",
- "Failed to load module \"modesetting\"",
- "Failed to load module modesetting",
- "Failed to load module \"glx\"",
- "Failed to load module \"fbdev\"",
- "Failed to load module fbdev",
- "Failed to load module glx",
- "[drm] Cannot find any crtc or sizes",
- "_OSC failed (AE_NOT_FOUND); disabling ASPM",
- "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
- "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
- "hd.: possibly failed opcode",
- 'NETLINK INITIALIZATION FAILED',
- 'kernel: Cannot find map file',
- 'omap_hwmod: debugss: _wait_target_disable failed',
- 'VGA arbiter: cannot open kernel arbiter, no multi-card support',
- 'Failed to find URL:http://ipv4.connman.net/online/status.html',
- 'Online check failed for',
- 'netlink init failed',
- 'Fast TSC calibration',
- "BAR 0-9",
- "Failed to load module \"ati\"",
- "controller can't do DEVSLP, turning off",
- "stmmac_dvr_probe: warning: cannot get CSR clock",
- "error: couldn\'t mount because of unsupported optional features",
- "GPT: Use GNU Parted to correct GPT errors",
- "Cannot set xattr user.Librepo.DownloadInProgress",
- "Failed to read /var/lib/nfs/statd/state: Success",
- "error retry time-out =",
- "logind: cannot setup systemd-logind helper (-61), using legacy fallback",
- "Failed to rename network interface",
- "Failed to process device, ignoring: Device or resource busy",
- "Cannot find a map file",
- "[rdrand]: Initialization Failed",
- "[rndr ]: Initialization Failed",
- "[pulseaudio] authkey.c: Failed to open cookie file",
- "[pulseaudio] authkey.c: Failed to load authentication key",
- "was skipped because of a failed condition check",
- "was skipped because all trigger condition checks failed",
- ]
-video_related = [
-]
+# importlib.resources.open_text in Python <3.10 doesn't search all directories
+# when a package is split across multiple directories. Until we can rely on
+# 3.10+, reimplement the searching logic.
+if sys.version_info < (3, 10):
+ def _open_text(package, resource):
+ import importlib, pathlib
+ module = importlib.import_module(package)
+ for path in module.__path__:
+ candidate = pathlib.Path(path) / resource
+ if candidate.exists():
+ return candidate.open(encoding='utf-8')
+ raise FileNotFoundError
+else:
+ from importlib.resources import open_text as _open_text
-x86_common = [
- '[drm:psb_do_init] *ERROR* Debug is',
- 'wrong ELF class',
- 'Could not enable PowerButton event',
- 'probe of LNXPWRBN:00 failed with error -22',
- 'pmd_set_huge: Cannot satisfy',
- 'failed to setup card detect gpio',
- 'amd_nb: Cannot enumerate AMD northbridges',
- 'failed to retrieve link info, disabling eDP',
- 'Direct firmware load for iwlwifi',
- 'Direct firmware load for regulatory.db',
- 'failed to load regulatory.db',
-] + common_errors
-qemux86_common = [
- 'wrong ELF class',
- "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
- "can't claim BAR ",
- 'amd_nb: Cannot enumerate AMD northbridges',
- 'tsc: HPET/PMTIMER calibration failed',
- "modeset(0): Failed to initialize the DRI2 extension",
- "glamor initialization failed",
- "blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)",
- "floppy: error",
- 'failed to IDENTIFY (I/O error, err_mask=0x4)',
-] + common_errors
+class ParseLogsTest(OERuntimeTestCase):
-ignore_errors = {
- 'default' : common_errors,
- 'qemux86' : [
- 'Failed to access perfctr msr (MSR',
- 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
- ] + qemux86_common,
- 'qemux86-64' : qemux86_common,
- 'qemumips' : [
- 'Failed to load module "glx"',
- 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
- 'cacheinfo: Failed to find cpu0 device node',
- ] + common_errors,
- 'qemumips64' : [
- 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
- 'cacheinfo: Failed to find cpu0 device node',
- ] + common_errors,
- 'qemuppc' : [
- 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
- 'host side 80-wire cable detection failed, limiting max speed',
- 'mode "640x480" test failed',
- 'Failed to load module "glx"',
- 'can\'t handle BAR above 4GB',
- 'Cannot reserve Legacy IO',
- ] + common_errors,
- 'qemuppc64' : [
- 'vio vio: uevent: failed to send synthetic uevent',
- 'synth uevent: /devices/vio: failed to send uevent',
- 'PCI 0000:00 Cannot reserve Legacy IO [io 0x10000-0x10fff]',
- ] + common_errors,
- 'qemuarmv5' : [
- 'mmci-pl18x: probe of fpga:05 failed with error -22',
- 'mmci-pl18x: probe of fpga:0b failed with error -22',
- 'Failed to load module "glx"',
- 'OF: amba_device_add() failed (-19) for /amba/smc@10100000',
- 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000',
- 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000',
- 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000',
- 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000',
- 'OF: amba_device_add() failed (-19) for /amba/spi@101f4000',
- 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000',
- 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000',
- 'Failed to initialize \'/amba/timer@101e3000\': -22',
- 'jitterentropy: Initialization failed with host not compliant with requirements: 2',
- 'clcd-pl11x: probe of 10120000.display failed with error -2',
- ] + common_errors,
- 'qemuarm64' : [
- 'Fatal server error:',
- '(EE) Server terminated with error (1). Closing log file.',
- 'dmi: Firmware registration failed.',
- 'irq: type mismatch, failed to map hwirq-27 for /intc',
- 'logind: failed to get session seat',
- ] + common_errors,
- 'intel-core2-32' : [
- 'ACPI: No _BQC method, cannot determine initial brightness',
- '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
- '(EE) Failed to load module "psb"',
- '(EE) Failed to load module psb',
- '(EE) Failed to load module "psbdrv"',
- '(EE) Failed to load module psbdrv',
- '(EE) open /dev/fb0: No such file or directory',
- '(EE) AIGLX: reverting to software rendering',
- 'dmi: Firmware registration failed.',
- 'ioremap error for 0x78',
- ] + x86_common,
- 'intel-corei7-64' : [
- 'can\'t set Max Payload Size to 256',
- 'intel_punit_ipc: can\'t request region for resource',
- '[drm] parse error at position 4 in video mode \'efifb\'',
- 'ACPI Error: Could not enable RealTimeClock event',
- 'ACPI Warning: Could not enable fixed event - RealTimeClock',
- 'hci_intel INT33E1:00: Unable to retrieve gpio',
- 'hci_intel: probe of INT33E1:00 failed',
- 'can\'t derive routing for PCI INT A',
- 'failed to read out thermal zone',
- 'Bluetooth: hci0: Setting Intel event mask failed',
- 'ttyS2 - failed to request DMA',
- 'Bluetooth: hci0: Failed to send firmware data (-38)',
- 'atkbd serio0: Failed to enable keyboard on isa0060/serio0',
- ] + x86_common,
- 'genericx86' : x86_common,
- 'genericx86-64' : [
- 'Direct firmware load for i915',
- 'Failed to load firmware i915',
- 'Failed to fetch GuC',
- 'Failed to initialize GuC',
- 'Failed to load DMC firmware',
- 'The driver is built-in, so to load the firmware you need to',
- ] + x86_common,
- 'edgerouter' : [
- 'not creating \'/sys/firmware/fdt\'',
- 'Failed to find cpu0 device node',
- 'Fatal server error:',
- 'Server terminated with error',
- ] + common_errors,
- 'beaglebone-yocto' : [
- 'Direct firmware load for regulatory.db',
- 'failed to load regulatory.db',
- 'l4_wkup_cm',
- 'Failed to load module "glx"',
- 'Failed to make EGL context current',
- 'glamor initialization failed',
- ] + common_errors,
-}
+ # Which log files should be collected
+ log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
-log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"]
+ # The keywords that identify error messages in the log files
+ errors = ["error", "cannot", "can't", "failed"]
-class ParseLogsTest(OERuntimeTestCase):
+ # A list of error messages that should be ignored
+ ignore_errors = []
@classmethod
def setUpClass(cls):
- cls.errors = errors
-
# When systemd is enabled we need to notice errors on
# circular dependencies in units.
- if 'systemd' in cls.td.get('DISTRO_FEATURES', ''):
+ if 'systemd' in cls.td.get('DISTRO_FEATURES'):
cls.errors.extend([
'Found ordering cycle on',
'Breaking ordering cycle by deleting job',
@@ -218,48 +51,22 @@ class ParseLogsTest(OERuntimeTestCase):
'Ordering cycle found, skipping',
])
- cls.ignore_errors = ignore_errors
- cls.log_locations = log_locations
- cls.msg = ''
- is_lsb, _ = cls.tc.target.run("which LSB_Test.sh")
- if is_lsb == 0:
- for machine in cls.ignore_errors:
- cls.ignore_errors[machine] = cls.ignore_errors[machine] \
- + video_related
-
- def getMachine(self):
- return self.td.get('MACHINE', '')
-
- def getWorkdir(self):
- return self.td.get('WORKDIR', '')
-
- # Get some information on the CPU of the machine to display at the
- # beginning of the output. This info might be useful in some cases.
- def getHardwareInfo(self):
- hwi = ""
- cmd = ('cat /proc/cpuinfo | grep "model name" | head -n1 | '
- " awk 'BEGIN{FS=\":\"}{print $2}'")
- _, cpu_name = self.target.run(cmd)
-
- cmd = ('cat /proc/cpuinfo | grep "cpu cores" | head -n1 | '
- "awk {'print $4'}")
- _, cpu_physical_cores = self.target.run(cmd)
-
- cmd = 'cat /proc/cpuinfo | grep "processor" | wc -l'
- _, cpu_logical_cores = self.target.run(cmd)
-
- _, cpu_arch = self.target.run('uname -m')
+ cls.errors = [s.casefold() for s in cls.errors]
- hwi += 'Machine information: \n'
- hwi += '*******************************\n'
- hwi += 'Machine name: ' + self.getMachine() + '\n'
- hwi += 'CPU: ' + str(cpu_name) + '\n'
- hwi += 'Arch: ' + str(cpu_arch)+ '\n'
- hwi += 'Physical cores: ' + str(cpu_physical_cores) + '\n'
- hwi += 'Logical cores: ' + str(cpu_logical_cores) + '\n'
- hwi += '*******************************\n'
+ cls.load_machine_ignores()
- return hwi
+ @classmethod
+ def load_machine_ignores(cls):
+ # Add TARGET_ARCH explicitly as not every machine has that in MACHINEOVERRDES (eg qemux86-64)
+ for candidate in ["common", cls.td.get("TARGET_ARCH")] + cls.td.get("MACHINEOVERRIDES").split(":"):
+ try:
+ name = f"parselogs-ignores-{candidate}.txt"
+ for line in _open_text("oeqa.runtime.cases", name):
+ line = line.strip()
+ if line and not line.startswith("#"):
+ cls.ignore_errors.append(line.casefold())
+ except FileNotFoundError:
+ pass
# Go through the log locations provided and if it's a folder
# create a list with all the .log files in it, if it's a file
@@ -267,23 +74,23 @@ class ParseLogsTest(OERuntimeTestCase):
def getLogList(self, log_locations):
logs = []
for location in log_locations:
- status, _ = self.target.run('test -f ' + str(location))
+ status, _ = self.target.run('test -f %s' % location)
if status == 0:
- logs.append(str(location))
+ logs.append(location)
else:
- status, _ = self.target.run('test -d ' + str(location))
+ status, _ = self.target.run('test -d %s' % location)
if status == 0:
- cmd = 'find ' + str(location) + '/*.log -maxdepth 1 -type f'
+ cmd = 'find %s -name \\*.log -maxdepth 1 -type f' % location
status, output = self.target.run(cmd)
if status == 0:
output = output.splitlines()
for logfile in output:
- logs.append(os.path.join(location, str(logfile)))
+ logs.append(os.path.join(location, logfile))
return logs
# Copy the log files to be parsed locally
def transfer_logs(self, log_list):
- workdir = self.getWorkdir()
+ workdir = self.td.get('WORKDIR')
self.target_logs = workdir + '/' + 'target_logs'
target_logs = self.target_logs
if os.path.exists(target_logs):
@@ -300,65 +107,55 @@ class ParseLogsTest(OERuntimeTestCase):
logs = [f for f in dir_files if os.path.isfile(f)]
return logs
- # Build the grep command to be used with filters and exclusions
- def build_grepcmd(self, errors, ignore_errors, log):
- grepcmd = 'grep '
- grepcmd += '-Ei "'
- for error in errors:
- grepcmd += r'\<' + error + r'\>' + '|'
- grepcmd = grepcmd[:-1]
- grepcmd += '" ' + str(log) + " | grep -Eiv \'"
-
- try:
- errorlist = ignore_errors[self.getMachine()]
- except KeyError:
- self.msg += 'No ignore list found for this machine, using default\n'
- errorlist = ignore_errors['default']
-
- for ignore_error in errorlist:
- ignore_error = ignore_error.replace('(', r'\(')
- ignore_error = ignore_error.replace(')', r'\)')
- ignore_error = ignore_error.replace("'", '.')
- ignore_error = ignore_error.replace('?', r'\?')
- ignore_error = ignore_error.replace('[', r'\[')
- ignore_error = ignore_error.replace(']', r'\]')
- ignore_error = ignore_error.replace('*', r'\*')
- ignore_error = ignore_error.replace('0-9', '[0-9]')
- grepcmd += ignore_error + '|'
- grepcmd = grepcmd[:-1]
- grepcmd += "\'"
-
- return grepcmd
-
- # Grep only the errors so that their context could be collected.
- # Default context is 10 lines before and after the error itself
- def parse_logs(self, errors, ignore_errors, logs,
- lines_before = 10, lines_after = 10):
- results = {}
- rez = []
- grep_output = ''
+ def get_context(self, lines, index, before=6, after=3):
+ """
+ Given a set of lines and the index of the line that is important, return
+ a number of lines surrounding that line.
+ """
+ last = len(lines)
+
+ start = index - before
+ end = index + after + 1
+
+ if start < 0:
+ end -= start
+ start = 0
+ if end > last:
+ start -= end - last
+ end = last
+
+ return lines[start:end]
+
+ def test_get_context(self):
+ """
+ A test case for the test case.
+ """
+ lines = list(range(0,10))
+ self.assertEqual(self.get_context(lines, 0, 2, 1), [0, 1, 2, 3])
+ self.assertEqual(self.get_context(lines, 5, 2, 1), [3, 4, 5, 6])
+ self.assertEqual(self.get_context(lines, 9, 2, 1), [6, 7, 8, 9])
+
+ def parse_logs(self, logs, lines_before=10, lines_after=10):
+ """
+ Search the log files @logs looking for error lines (marked by
+ @self.errors), ignoring anything listed in @self.ignore_errors.
+
+ Returns a dictionary of log filenames to a dictionary of error lines to
+ the error context (controlled by @lines_before and @lines_after).
+ """
+ results = collections.defaultdict(dict)
for log in logs:
- result = None
- thegrep = self.build_grepcmd(errors, ignore_errors, log)
+ with open(log) as f:
+ lines = f.readlines()
- try:
- result = check_output(thegrep, shell=True).decode('utf-8')
- except:
- pass
+ for i, line in enumerate(lines):
+ line = line.strip()
+ line_lower = line.casefold()
- if result is not None:
- results[log] = {}
- rez = result.splitlines()
-
- for xrez in rez:
- try:
- cmd = ['grep', '-F', xrez, '-B', str(lines_before)]
- cmd += ['-A', str(lines_after), log]
- grep_output = check_output(cmd).decode('utf-8')
- except:
- pass
- results[log][xrez]=grep_output
+ if any(keyword in line_lower for keyword in self.errors):
+ if not any(ignore in line_lower for ignore in self.ignore_errors):
+ results[log][line] = "".join(self.get_context(lines, i, lines_before, lines_after))
return results
@@ -371,17 +168,18 @@ class ParseLogsTest(OERuntimeTestCase):
def test_parselogs(self):
self.write_dmesg()
log_list = self.get_local_log_list(self.log_locations)
- result = self.parse_logs(self.errors, self.ignore_errors, log_list)
- print(self.getHardwareInfo())
+ result = self.parse_logs(log_list)
+
errcount = 0
+ self.msg = ""
for log in result:
self.msg += 'Log: ' + log + '\n'
self.msg += '-----------------------\n'
for error in result[log]:
errcount += 1
- self.msg += 'Central error: ' + str(error) + '\n'
+ self.msg += 'Central error: ' + error + '\n'
self.msg += '***********************\n'
- self.msg += result[str(log)][str(error)] + '\n'
+ self.msg += result[log][error] + '\n'
self.msg += '***********************\n'
self.msg += '%s errors found in logs.' % errcount
self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/cases/perl.py b/meta/lib/oeqa/runtime/cases/perl.py
index 2c6b3b7846..f11b300836 100644
--- a/meta/lib/oeqa/runtime/cases/perl.py
+++ b/meta/lib/oeqa/runtime/cases/perl.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py
index 498f80d0a5..f72460e7f3 100644
--- a/meta/lib/oeqa/runtime/cases/ping.py
+++ b/meta/lib/oeqa/runtime/cases/ping.py
@@ -1,8 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from subprocess import Popen, PIPE
+from time import sleep
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.oetimeout import OETimeout
@@ -14,6 +17,7 @@ class PingTest(OERuntimeTestCase):
def test_ping(self):
output = ''
count = 0
+ self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set")
try:
while count < 5:
cmd = 'ping -c 1 %s' % self.target.ip
@@ -23,6 +27,7 @@ class PingTest(OERuntimeTestCase):
count += 1
else:
count = 0
+ sleep(1)
except OEQATimeoutError:
self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output))
msg = ('Expected 5 consecutive, got %d.\n'
diff --git a/meta/lib/oeqa/runtime/cases/ptest.py b/meta/lib/oeqa/runtime/cases/ptest.py
index 00742da2b5..fbaeb84d00 100644
--- a/meta/lib/oeqa/runtime/cases/ptest.py
+++ b/meta/lib/oeqa/runtime/cases/ptest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -81,17 +83,20 @@ class PtestRunnerTest(OERuntimeTestCase):
extras['ptestresult.sections'] = sections
+ zerolength = []
trans = str.maketrans("()", "__")
for section in results:
for test in results[section]:
result = results[section][test]
testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split())
extras[testname] = {'status': result}
+ if not results[section]:
+ zerolength.append(section)
failed_tests = {}
for section in sections:
- if 'exitcode' in sections[section].keys():
+ if 'exitcode' in sections[section].keys() or 'timeout' in sections[section].keys():
failed_tests[section] = sections[section]["log"]
for section in results:
@@ -105,7 +110,10 @@ class PtestRunnerTest(OERuntimeTestCase):
failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output
if failed_tests:
- failmsg = failmsg + "Failed ptests:\n%s" % pprint.pformat(failed_tests)
+ failmsg = failmsg + "\nFailed ptests:\n%s\n" % pprint.pformat(failed_tests)
+
+ if zerolength:
+ failmsg = failmsg + "\nptests which had no test results:\n%s" % pprint.pformat(zerolength)
if failmsg:
self.logger.warning("There were failing ptests.")
diff --git a/meta/lib/oeqa/runtime/cases/python.py b/meta/lib/oeqa/runtime/cases/python.py
index ec54f1e1db..5d6d133480 100644
--- a/meta/lib/oeqa/runtime/cases/python.py
+++ b/meta/lib/oeqa/runtime/cases/python.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py
index a4339116bf..ea5619ffea 100644
--- a/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/meta/lib/oeqa/runtime/cases/rpm.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -49,21 +51,20 @@ class RpmBasicTest(OERuntimeTestCase):
msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
self.assertEqual(status, 0, msg=msg)
- def check_no_process_for_user(u):
- _, output = self.target.run(self.tc.target_cmds['ps'])
- if u + ' ' in output:
- return False
- else:
- return True
+ def wait_for_no_process_for_user(u, timeout = 120):
+ timeout_at = time.time() + timeout
+ while time.time() < timeout_at:
+ _, output = self.target.run(self.tc.target_cmds['ps'])
+ if u + ' ' not in output:
+ return
+ time.sleep(1)
+ user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
+ msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss))
+ self.fail(msg=msg)
def unset_up_test_user(u):
# ensure no test1 process in running
- timeout = time.time() + 30
- while time.time() < timeout:
- if check_no_process_for_user(u):
- break
- else:
- time.sleep(1)
+ wait_for_no_process_for_user(u)
status, output = self.target.run('userdel -r %s' % u)
msg = 'Failed to erase user: %s' % output
self.assertTrue(status == 0, msg=msg)
@@ -79,21 +80,24 @@ class RpmBasicTest(OERuntimeTestCase):
class RpmInstallRemoveTest(OERuntimeTestCase):
- @classmethod
- def setUpClass(cls):
- pkgarch = cls.td['TUNE_PKGARCH'].replace('-', '_')
- rpmdir = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
+ def _find_test_file(self):
+ pkgarch = self.td['TUNE_PKGARCH'].replace('-', '_')
+ rpmdir = os.path.join(self.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
# Pick base-passwd-doc as a test file to get installed, because it's small
# and it will always be built for standard targets
rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch
if not os.path.exists(rpmdir):
- return
+ self.fail("Rpm directory {} does not exist".format(rpmdir))
for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc):
- cls.test_file = os.path.join(rpmdir, f)
- cls.dst = '/tmp/base-passwd-doc.rpm'
+ self.test_file = os.path.join(rpmdir, f)
+ break
+ else:
+ self.fail("Couldn't find the test rpm file {} in {}".format(rpm_doc, rpmdir))
+ self.dst = '/tmp/base-passwd-doc.rpm'
@OETestDepends(['rpm.RpmBasicTest.test_rpm_query'])
def test_rpm_install(self):
+ self._find_test_file()
self.tc.target.copyTo(self.test_file, self.dst)
status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm')
msg = 'Failed to install base-passwd-doc package: %s' % output
@@ -116,6 +120,7 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
Author: Alexander Kanavin <alex.kanavin@gmail.com>
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
+ self._find_test_file()
db_files_cmd = 'ls /var/lib/rpm/rpmdb.sqlite*'
check_log_cmd = "grep RPM /var/log/messages | wc -l"
diff --git a/meta/lib/oeqa/runtime/cases/rt.py b/meta/lib/oeqa/runtime/cases/rt.py
new file mode 100644
index 0000000000..15ab4dbbbb
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rt.py
@@ -0,0 +1,19 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.runtime.case import OERuntimeTestCase
+from oeqa.core.decorator.depends import OETestDepends
+
+class RtTest(OERuntimeTestCase):
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ def test_is_rt(self):
+ """
+ Check that the kernel has CONFIG_PREEMPT_RT enabled.
+ """
+ status, output = self.target.run("uname -a")
+ self.assertEqual(status, 0, msg=output)
+ # Split so we don't get a substring false-positive
+ self.assertIn("PREEMPT_RT", output.split())
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py
index c4e6681324..6e45c5db4f 100644
--- a/meta/lib/oeqa/runtime/cases/rtc.py
+++ b/meta/lib/oeqa/runtime/cases/rtc.py
@@ -1,5 +1,11 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
+from oeqa.core.decorator.data import skipIfFeature
from oeqa.runtime.decorator.package import OEHasPackage
import re
@@ -16,12 +22,14 @@ class RTCTest(OERuntimeTestCase):
self.logger.debug('Starting systemd-timesyncd daemon')
self.target.run('systemctl enable --now --runtime systemd-timesyncd')
+ @skipIfFeature('read-only-rootfs',
+ 'Test does not work with read-only-rootfs in IMAGE_FEATURES')
@OETestDepends(['ssh.SSHTest.test_ssh'])
@OEHasPackage(['coreutils', 'busybox'])
def test_rtc(self):
(status, output) = self.target.run('hwclock -r')
self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output)
-
+
(status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"')
self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime)
@@ -32,7 +40,6 @@ class RTCTest(OERuntimeTestCase):
(status, output) = self.target.run('date %s' % current_datetime)
self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output)
-
+
(status, output) = self.target.run('hwclock -w')
self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output)
-
diff --git a/meta/lib/oeqa/runtime/cases/runlevel.py b/meta/lib/oeqa/runtime/cases/runlevel.py
index 3a4df8ace1..6734b0f5ed 100644
--- a/meta/lib/oeqa/runtime/cases/runlevel.py
+++ b/meta/lib/oeqa/runtime/cases/runlevel.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
diff --git a/meta/lib/oeqa/runtime/cases/rust.py b/meta/lib/oeqa/runtime/cases/rust.py
index b3d6cf7f37..123c942012 100644
--- a/meta/lib/oeqa/runtime/cases/rust.py
+++ b/meta/lib/oeqa/runtime/cases/rust.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -6,14 +8,57 @@ from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.runtime.decorator.package import OEHasPackage
-class RustHelloworldTest(OERuntimeTestCase):
+class RustCompileTest(OERuntimeTestCase):
+
+ @classmethod
+ def setUp(cls):
+ dst = '/tmp/'
+ src = os.path.join(cls.tc.files_dir, 'test.rs')
+ cls.tc.target.copyTo(src, dst)
+
+ @classmethod
+ def tearDown(cls):
+ files = '/tmp/test.rs /tmp/test'
+ cls.tc.target.run('rm %s' % files)
+ dirs = '/tmp/hello'
+ cls.tc.target.run('rm -r %s' % dirs)
+
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ @OEHasPackage('rust')
+ @OEHasPackage('openssh-scp')
+ def test_rust_compile(self):
+ status, output = self.target.run('rustc /tmp/test.rs -o /tmp/test')
+ msg = 'rust compile failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('/tmp/test')
+ msg = 'running compiled file failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ @OEHasPackage('cargo')
+ @OEHasPackage('openssh-scp')
+ def test_cargo_compile(self):
+ status, output = self.target.run('cargo new /tmp/hello')
+ msg = 'cargo new failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('cargo build --manifest-path=/tmp/hello/Cargo.toml')
+ msg = 'cargo build failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('cargo run --manifest-path=/tmp/hello/Cargo.toml')
+ msg = 'running compiled file failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+class RustCLibExampleTest(OERuntimeTestCase):
@OETestDepends(['ssh.SSHTest.test_ssh'])
- @OEHasPackage(['rust-hello-world'])
- def test_rusthelloworld(self):
- cmd = "rust-hello-world"
+ @OEHasPackage('rust-c-lib-example-bin')
+ def test_rust_c_lib_example(self):
+ cmd = "rust-c-lib-example-bin test"
status, output = self.target.run(cmd)
msg = 'Exit status was not 0. Output: %s' % output
self.assertEqual(status, 0, msg=msg)
msg = 'Incorrect output: %s' % output
- self.assertEqual(output, "Hello, world!", msg=msg)
+ self.assertEqual(output, "Hello world in rust from C!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/scons.py b/meta/lib/oeqa/runtime/cases/scons.py
index 3c7c7f7270..4a8d4d40ba 100644
--- a/meta/lib/oeqa/runtime/cases/scons.py
+++ b/meta/lib/oeqa/runtime/cases/scons.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index 3a5f292152..ee97b8ef66 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -23,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
os.remove(cls.tmp_path)
@OETestDepends(['ssh.SSHTest.test_ssh'])
- @OEHasPackage(['openssh-scp', 'dropbear'])
+ @OEHasPackage(['openssh-scp'])
def test_scp_file(self):
dst = '/tmp/test_scp_file'
diff --git a/meta/lib/oeqa/runtime/cases/skeletoninit.py b/meta/lib/oeqa/runtime/cases/skeletoninit.py
index a12f1e9aae..d0fdcbded9 100644
--- a/meta/lib/oeqa/runtime/cases/skeletoninit.py
+++ b/meta/lib/oeqa/runtime/cases/skeletoninit.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -15,7 +17,7 @@ class SkeletonBasicTest(OERuntimeTestCase):
@OETestDepends(['ssh.SSHTest.test_ssh'])
@OEHasPackage(['service'])
@skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
- 'Not appropiate for systemd image')
+ 'Not appropriate for systemd image')
def test_skeleton_availability(self):
status, output = self.target.run('ls /etc/init.d/skeleton')
msg = 'skeleton init script not found. Output:\n%s' % output
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index e31224b3af..cdbef59500 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -11,6 +13,9 @@ class SSHTest(OERuntimeTestCase):
@OETestDepends(['ping.PingTest.test_ping'])
@OEHasPackage(['dropbear', 'openssh-sshd'])
def test_ssh(self):
+ (status, output) = self.target.run('sleep 20', timeout=2)
+ msg='run() timed out but return code was zero.'
+ self.assertNotEqual(status, 0, msg=msg)
(status, output) = self.target.run('uname -a')
self.assertEqual(status, 0, msg='SSH Test failed: %s' % output)
(status, output) = self.target.run('cat /etc/controllerimage')
diff --git a/meta/lib/oeqa/runtime/cases/stap.py b/meta/lib/oeqa/runtime/cases/stap.py
index 480eaabf2d..3be4162108 100644
--- a/meta/lib/oeqa/runtime/cases/stap.py
+++ b/meta/lib/oeqa/runtime/cases/stap.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/storage.py b/meta/lib/oeqa/runtime/cases/storage.py
index 972ef8210c..b05622fea8 100644
--- a/meta/lib/oeqa/runtime/cases/storage.py
+++ b/meta/lib/oeqa/runtime/cases/storage.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py
index 0382d48f45..a625cc5901 100644
--- a/meta/lib/oeqa/runtime/cases/suspend.py
+++ b/meta/lib/oeqa/runtime/cases/suspend.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.core.decorator.data import skipIfQemu
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 7c44abe8ed..5481e1d840 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -1,8 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import re
+import threading
import time
from oeqa.runtime.case import OERuntimeTestCase
@@ -66,8 +69,8 @@ class SystemdBasicTests(SystemdTest):
"""
endtime = time.time() + (60 * 2)
while True:
- status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl --state=activating')
- if "0 loaded units listed" in output:
+ status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl is-system-running')
+ if "running" in output or "degraded" in output:
return (True, '')
if time.time() >= endtime:
return (False, output)
@@ -134,6 +137,27 @@ class SystemdServiceTests(SystemdTest):
status = self.target.run('mount -oro,remount /')[0]
self.assertTrue(status == 0, msg='Remounting / as r/o failed')
+ @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
+ @skipIfNotFeature('minidebuginfo', 'Test requires minidebuginfo to be in DISTRO_FEATURES')
+ @OEHasPackage(['busybox'])
+ def test_systemd_coredump_minidebuginfo(self):
+ """
+ Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
+ extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
+ """
+ t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",))
+ t_thread.start()
+ time.sleep(1)
+
+ status, output = self.target.run('pidof sleep')
+ # cause segfault on purpose
+ self.target.run('kill -SEGV %s' % output)
+ self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % output)
+
+ (status, output) = self.target.run('coredumpctl info')
+ self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
+ self.assertEqual('sleep_for_duration (busybox.nosuid' in output, True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
+
class SystemdJournalTests(SystemdTest):
@OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
@@ -152,7 +176,7 @@ class SystemdJournalTests(SystemdTest):
"""
# The expression chain that uniquely identifies the time boot message.
- expr_items=['Startup finished', 'kernel', 'userspace','\.$']
+ expr_items=['Startup finished', 'kernel', 'userspace', r'\.$']
try:
output = self.journalctl(args='-o cat --reverse')
except AssertionError:
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py
index 8fcca99f47..96ba3c3195 100644
--- a/meta/lib/oeqa/runtime/cases/terminal.py
+++ b/meta/lib/oeqa/runtime/cases/terminal.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.runtime.decorator.package import OEHasPackage
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py
index 8743174370..6f23d2ff51 100644
--- a/meta/lib/oeqa/runtime/cases/usb_hid.py
+++ b/meta/lib/oeqa/runtime/cases/usb_hid.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.core.decorator.data import skipIfQemu
diff --git a/meta/lib/oeqa/runtime/cases/weston.py b/meta/lib/oeqa/runtime/cases/weston.py
index b81cc299ef..ee4d336482 100644
--- a/meta/lib/oeqa/runtime/cases/weston.py
+++ b/meta/lib/oeqa/runtime/cases/weston.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -77,3 +79,11 @@ class WestonTest(OERuntimeTestCase):
self.target.run('kill -9 %s' % w)
__, weston_log = self.target.run('cat %s' % self.weston_log_file)
self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log))
+
+ @skipIfNotFeature('x11', 'Test requires x11 to be in DISTRO_FEATURES')
+ @OEHasPackage(['weston'])
+ def test_weston_supports_xwayland(self):
+ cmd ='cat %s | grep "xserver listening on display"' % self.weston_log_file
+ status, output = self.target.run(cmd)
+ msg = ('xwayland does not appear to be running')
+ self.assertEqual(status, 0, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/x32lib.py b/meta/lib/oeqa/runtime/cases/x32lib.py
index f419c8f181..014da4b386 100644
--- a/meta/lib/oeqa/runtime/cases/x32lib.py
+++ b/meta/lib/oeqa/runtime/cases/x32lib.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/cases/xorg.py b/meta/lib/oeqa/runtime/cases/xorg.py
index d6845587c2..09afb1e3d1 100644
--- a/meta/lib/oeqa/runtime/cases/xorg.py
+++ b/meta/lib/oeqa/runtime/cases/xorg.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index 8092dd0bae..cb7227a8df 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -10,7 +10,6 @@ import sys
from oeqa.core.context import OETestContext, OETestContextExecutor
from oeqa.core.target.ssh import OESSHTarget
from oeqa.core.target.qemu import OEQemuTarget
-from oeqa.utils.dump import HostDumper
from oeqa.runtime.loader import OERuntimeTestLoader
@@ -20,12 +19,11 @@ class OERuntimeTestContext(OETestContext):
os.path.dirname(os.path.abspath(__file__)), "files")
def __init__(self, td, logger, target,
- host_dumper, image_packages, extract_dir):
+ image_packages, extract_dir):
super(OERuntimeTestContext, self).__init__(td, logger)
self.target = target
self.image_packages = image_packages
- self.host_dumper = host_dumper
self.extract_dir = extract_dir
self._set_target_cmds()
@@ -67,11 +65,11 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
% self.default_target_type)
runtime_group.add_argument('--target-ip', action='store',
default=self.default_target_ip,
- help="IP address of device under test, default: %s" \
+ help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \
% self.default_target_ip)
runtime_group.add_argument('--server-ip', action='store',
default=self.default_target_ip,
- help="IP address of device under test, default: %s" \
+ help="IP address of the test host from test target machine, default: %s" \
% self.default_server_ip)
runtime_group.add_argument('--host-dumper-dir', action='store',
@@ -199,10 +197,6 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
return image_packages
- @staticmethod
- def getHostDumper(cmds, directory):
- return HostDumper(cmds, directory)
-
def _process_args(self, logger, args):
if not args.packages_manifest:
raise TypeError('Manifest file not provided')
@@ -215,9 +209,6 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
self.tc_kwargs['init']['target'] = \
OERuntimeTestContextExecutor.getTarget(args.target_type,
None, args.target_ip, args.server_ip, **target_kwargs)
- self.tc_kwargs['init']['host_dumper'] = \
- OERuntimeTestContextExecutor.getHostDumper(None,
- args.host_dumper_dir)
self.tc_kwargs['init']['image_packages'] = \
OERuntimeTestContextExecutor.readPackagesManifest(
args.packages_manifest)
diff --git a/meta/lib/oeqa/runtime/decorator/package.py b/meta/lib/oeqa/runtime/decorator/package.py
index 8aba3f325b..b78ac9fc38 100644
--- a/meta/lib/oeqa/runtime/decorator/package.py
+++ b/meta/lib/oeqa/runtime/decorator/package.py
@@ -38,11 +38,12 @@ class OEHasPackage(OETestDecorator):
if isinstance(self.need_pkgs, str):
self.need_pkgs = [self.need_pkgs,]
+ mlprefix = self.case.td.get("MLPREFIX")
for pkg in self.need_pkgs:
if pkg.startswith('!'):
- unneed_pkgs.add(pkg[1:])
+ unneed_pkgs.add(mlprefix + pkg[1:])
else:
- need_pkgs.add(pkg)
+ need_pkgs.add(mlprefix + pkg)
if unneed_pkgs:
msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs)
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/build.py b/meta/lib/oeqa/sdk/buildtools-cases/build.py
index aee2e5a8c0..c85c32496b 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/build.py
+++ b/meta/lib/oeqa/sdk/buildtools-cases/build.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py b/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
index 36ba15b134..a62c4d0bc4 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
+++ b/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/https.py b/meta/lib/oeqa/sdk/buildtools-cases/https.py
index 35e549eb40..4525e3d758 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/https.py
+++ b/meta/lib/oeqa/sdk/buildtools-cases/https.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py b/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
index 64baaa8f84..a55d456656 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
+++ b/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -19,4 +21,4 @@ class SanityTests(OESDKTestCase):
# Canonicalise the location of this command
tool_path = os.path.realpath(self._run("command -v %s" % command).strip())
# Assert that the tool was found inside the SDK root
- self.assertEquals(os.path.commonprefix((sdk_base, tool_path)), sdk_base)
+ self.assertEqual(os.path.commonprefix((sdk_base, tool_path)), sdk_base)
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py b/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
index 5b0eca046f..6e3ee94292 100644
--- a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
+++ b/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/assimp.py
index f166758e49..e986838aea 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/assimp.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -23,18 +25,21 @@ class BuildAssimp(OESDKTestCase):
def test_assimp(self):
with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
- tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v4.1.0.tar.gz")
+ tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.3.1.tar.gz")
dirs = {}
- dirs["source"] = os.path.join(testdir, "assimp-4.1.0")
+ dirs["source"] = os.path.join(testdir, "assimp-5.3.1")
dirs["build"] = os.path.join(testdir, "build")
dirs["install"] = os.path.join(testdir, "install")
subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
self.assertTrue(os.path.isdir(dirs["source"]))
+ # Apply the zlib patch https://github.com/madler/zlib/commit/a566e156b3fa07b566ddbf6801b517a9dba04fa3
+ # this sed wont be needed once assimp moves its zlib copy to v1.3.1+
+ self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
os.makedirs(dirs["build"])
- self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON {source}".format(**dirs))
+ self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
self._run("cmake --build {build} -- -j".format(**dirs))
self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
- self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.4.1.0"))
+ self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.3.0"))
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/buildcpio.py
index e7fc211a47..51003b19cd 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/buildcpio.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -17,10 +19,10 @@ class BuildCpioTest(OESDKTestCase):
"""
def test_cpio(self):
with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
- tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz")
+ tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
dirs = {}
- dirs["source"] = os.path.join(testdir, "cpio-2.13")
+ dirs["source"] = os.path.join(testdir, "cpio-2.15")
dirs["build"] = os.path.join(testdir, "build")
dirs["install"] = os.path.join(testdir, "install")
@@ -28,8 +30,7 @@ class BuildCpioTest(OESDKTestCase):
self.assertTrue(os.path.isdir(dirs["source"]))
os.makedirs(dirs["build"])
- self._run("sed -i -e '/char.*program_name/d' {source}/src/global.c".format(**dirs))
- self._run("cd {build} && {source}/configure --disable-maintainer-mode $CONFIGURE_FLAGS".format(**dirs))
+ self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs))
self._run("cd {build} && make -j".format(**dirs))
self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py
index f69f720cd6..147ee3e0ee 100644
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ b/meta/lib/oeqa/sdk/cases/buildepoxy.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -16,7 +18,8 @@ class EpoxyTest(OESDKTestCase):
Test that Meson builds correctly.
"""
def setUp(self):
- if not (self.tc.hasHostPackage("nativesdk-meson")):
+ if not (self.tc.hasHostPackage("nativesdk-meson") or
+ self.tc.hasHostPackage("meson-native")):
raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson")
def test_epoxy(self):
@@ -32,7 +35,7 @@ class EpoxyTest(OESDKTestCase):
self.assertTrue(os.path.isdir(dirs["source"]))
os.makedirs(dirs["build"])
- log = self._run("meson -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs))
+ log = self._run("meson --warnlevel 1 -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs))
# Check that Meson thinks we're doing a cross build and not a native
self.assertIn("Build type: cross build", log)
self._run("ninja -C {build} -v".format(**dirs))
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/buildgalculator.py
index eb3c8ddf39..178f07472d 100644
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ b/meta/lib/oeqa/sdk/cases/buildgalculator.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -19,7 +21,8 @@ class GalculatorTest(OESDKTestCase):
if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
- if not (self.tc.hasHostPackage("nativesdk-gettext-dev")):
+ if not (self.tc.hasHostPackage("nativesdk-gettext-dev") or
+ self.tc.hasHostPackage("gettext-native")):
raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext")
def test_galculator(self):
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/buildlzip.py
index 49ae756bf3..b4b7d85b88 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/buildlzip.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index eb08eadd28..fc28b9c3d4 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
new file mode 100644
index 0000000000..ea10f568b2
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -0,0 +1,79 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import unittest
+
+from oeqa.core.utils.path import remove_safe
+from oeqa.sdk.case import OESDKTestCase
+from oeqa.utils.subprocesstweak import errors_have_output
+
+errors_have_output()
+
+
+class MaturinTest(OESDKTestCase):
+ def setUp(self):
+ if not (
+ self.tc.hasHostPackage("nativesdk-python3-maturin")
+ or self.tc.hasHostPackage("python3-maturin-native")
+ ):
+ raise unittest.SkipTest("No python3-maturin package in the SDK")
+
+ def test_maturin_list_python(self):
+ py_major = self._run("python3 -c 'import sys; print(sys.version_info.major)'")
+ py_minor = self._run("python3 -c 'import sys; print(sys.version_info.minor)'")
+ python_version = "%s.%s" % (py_major.strip(), py_minor.strip())
+ cmd = "maturin list-python"
+ output = self._run(cmd)
+ self.assertRegex(output, r"^🐍 1 python interpreter found:\n")
+ self.assertRegex(
+ output,
+ r" - CPython %s (.+)/usr/bin/python%s$" % (python_version, python_version),
+ )
+
+
+class MaturinDevelopTest(OESDKTestCase):
+ @classmethod
+ def setUpClass(self):
+ targetdir = os.path.join(self.tc.sdk_dir, "guessing-game")
+ try:
+ shutil.rmtree(targetdir)
+ except FileNotFoundError:
+ pass
+ shutil.copytree(
+ os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir
+ )
+
+ def setUp(self):
+ machine = self.td.get("MACHINE")
+ if not (
+ self.tc.hasHostPackage("nativesdk-python3-maturin")
+ or self.tc.hasHostPackage("python3-maturin-native")
+ ):
+ raise unittest.SkipTest("No python3-maturin package in the SDK")
+ if not (
+ self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine)
+ ):
+ raise unittest.SkipTest(
+ "Testing 'maturin develop' requires Rust cross-canadian in the SDK"
+ )
+
+ def test_maturin_develop(self):
+ """
+ This test case requires:
+ (1) that a .venv can been created.
+ (2) a functional 'rustc' and 'cargo'
+ """
+ self._run("cd %s/guessing-game; python3 -m venv .venv" % self.tc.sdk_dir)
+ cmd = "cd %s/guessing-game; maturin develop" % self.tc.sdk_dir
+ output = self._run(cmd)
+ self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
+ self.assertRegex(output, r"🐍 Not using a specific python interpreter")
+ self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
+ self.assertRegex(output, r"Compiling guessing-game v0.1.0")
+ self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
+ self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
diff --git a/meta/lib/oeqa/sdk/cases/perl.py b/meta/lib/oeqa/sdk/cases/perl.py
index 14d76d820f..8eab4442e8 100644
--- a/meta/lib/oeqa/sdk/cases/perl.py
+++ b/meta/lib/oeqa/sdk/cases/perl.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index a334abce5f..5ea992b9f3 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -8,17 +10,6 @@ from oeqa.sdk.case import OESDKTestCase
from oeqa.utils.subprocesstweak import errors_have_output
errors_have_output()
-class Python2Test(OESDKTestCase):
- def setUp(self):
- if not (self.tc.hasHostPackage("nativesdk-python-core") or
- self.tc.hasHostPackage("python-core-native")):
- raise unittest.SkipTest("No python package in the SDK")
-
- def test_python2(self):
- cmd = "python -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
- output = self._run(cmd)
- self.assertEqual(output, "Hello, world\n")
-
class Python3Test(OESDKTestCase):
def setUp(self):
if not (self.tc.hasHostPackage("nativesdk-python3-core") or
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
new file mode 100644
index 0000000000..f5d437bb19
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -0,0 +1,57 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import unittest
+
+from oeqa.core.utils.path import remove_safe
+from oeqa.sdk.case import OESDKTestCase
+
+from oeqa.utils.subprocesstweak import errors_have_output
+errors_have_output()
+
+class RustCompileTest(OESDKTestCase):
+ td_vars = ['MACHINE']
+
+ @classmethod
+ def setUpClass(self):
+ targetdir = os.path.join(self.tc.sdk_dir, "hello")
+ try:
+ shutil.rmtree(targetdir)
+ except FileNotFoundError:
+ pass
+ shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
+
+ def setUp(self):
+ machine = self.td.get("MACHINE")
+ if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
+ raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
+
+ def test_cargo_build(self):
+ self._run('cd %s/hello; cargo build' % self.tc.sdk_dir)
+
+class RustHostCompileTest(OESDKTestCase):
+ td_vars = ['MACHINE', 'SDK_SYS']
+
+ @classmethod
+ def setUpClass(self):
+ targetdir = os.path.join(self.tc.sdk_dir, "hello")
+ try:
+ shutil.rmtree(targetdir)
+ except FileNotFoundError:
+ pass
+ shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
+
+ def setUp(self):
+ machine = self.td.get("MACHINE")
+ if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
+ raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
+
+ def test_cargo_build(self):
+ sdksys = self.td.get("SDK_SYS")
+ self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys))
+ self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys))
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
new file mode 100644
index 0000000000..fe619478a6
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "hello"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/build.rs b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
new file mode 100644
index 0000000000..b1a533d5df
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
@@ -0,0 +1,3 @@
+/* This is the simplest build script just to invoke host compiler
+ in the build process. */
+fn main() {}
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
new file mode 100644
index 0000000000..a06c03f82a
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, OpenEmbedded world!");
+}
diff --git a/meta/lib/oeqa/sdk/testmetaidesupport.py b/meta/lib/oeqa/sdk/testmetaidesupport.py
new file mode 100644
index 0000000000..00ef30e82e
--- /dev/null
+++ b/meta/lib/oeqa/sdk/testmetaidesupport.py
@@ -0,0 +1,45 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+class TestSDK(object):
+ def run(self, d):
+ import json
+ import logging
+ from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor
+ from oeqa.utils import make_logger_bitbake_compatible
+
+ pn = d.getVar("PN")
+
+ logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
+
+ sdk_dir = d.expand("${WORKDIR}/testsdk/")
+ bb.utils.remove(sdk_dir, True)
+ bb.utils.mkdirhier(sdk_dir)
+
+ sdk_envs = OESDKTestContextExecutor._get_sdk_environs(d.getVar("DEPLOY_DIR_IMAGE"))
+ tdname = d.expand("${DEPLOY_DIR_IMAGE}/${PN}.testdata.json")
+ test_data = json.load(open(tdname, "r"))
+
+ host_pkg_manifest = {"cmake-native":"", "gcc-cross":"", "gettext-native":"", "meson-native":"", "perl-native":"", "python3-core-native":"", }
+ target_pkg_manifest = {"gtk+3":""}
+
+ for s in sdk_envs:
+ bb.plain("meta-ide-support based SDK testing environment: %s" % s)
+
+ sdk_env = sdk_envs[s]
+
+ tc = OESDKTestContext(td=test_data, logger=logger, sdk_dir=sdk_dir,
+ sdk_env=sdk_env, target_pkg_manifest=target_pkg_manifest,
+ host_pkg_manifest=host_pkg_manifest)
+
+ tc.loadTests(OESDKTestContextExecutor.default_cases)
+
+ results = tc.runTests()
+ if results:
+ results.logSummary(pn)
+
+ if (not results) or (not results.wasSuccessful()):
+ bb.fatal('%s - FAILED' % (pn,), forcelog=True)
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py
index 35e40187bc..518b09febb 100644
--- a/meta/lib/oeqa/sdk/testsdk.py
+++ b/meta/lib/oeqa/sdk/testsdk.py
@@ -23,14 +23,6 @@ class TestSDKBase(object):
return configuration
@staticmethod
- def get_sdk_json_result_dir(d):
- json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
- custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
- if custom_json_result_dir:
- json_result_dir = custom_json_result_dir
- return json_result_dir
-
- @staticmethod
def get_sdk_result_id(configuration):
return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME'])
@@ -72,6 +64,7 @@ class TestSDK(TestSDKBase):
from bb.utils import export_proxies
from oeqa.utils import make_logger_bitbake_compatible
+ from oeqa.utils import get_json_result_dir
pn = d.getVar("PN")
logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -79,6 +72,9 @@ class TestSDK(TestSDKBase):
# sdk use network for download projects for build
export_proxies(d)
+ # We need the original PATH for testing the eSDK, not with our manipulations
+ os.environ['PATH'] = d.getVar("BB_ORIGENV", False).getVar("PATH")
+
tcname = self.get_tcname(d)
if not os.path.exists(tcname):
@@ -131,7 +127,7 @@ class TestSDK(TestSDKBase):
component = "%s %s" % (pn, self.context_executor_class.name)
context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
configuration = self.get_sdk_configuration(d, self.test_type)
- result.logDetails(self.get_sdk_json_result_dir(d),
+ result.logDetails(get_json_result_dir(d),
configuration,
self.get_sdk_result_id(configuration))
result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index a5c6a76e02..5ffb732556 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -112,7 +112,7 @@ class SdkUpdateTest(OESDKExtTestCase):
cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir)
subprocess.check_output(cmd, shell=True)
- self.http_service = HTTPService(self.publish_dir)
+ self.http_service = HTTPService(self.publish_dir, logger=self.logger)
self.http_service.start()
self.http_url = "http://127.0.0.1:%d" % self.http_service.port
diff --git a/meta/lib/oeqa/sdkext/testsdk.py b/meta/lib/oeqa/sdkext/testsdk.py
index 159f0d135b..9d5a99d900 100644
--- a/meta/lib/oeqa/sdkext/testsdk.py
+++ b/meta/lib/oeqa/sdkext/testsdk.py
@@ -16,6 +16,7 @@ class TestSDKExt(TestSDKBase):
from bb.utils import export_proxies
from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak
from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor
+ from oeqa.utils import get_json_result_dir
pn = d.getVar("PN")
logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -91,7 +92,7 @@ class TestSDKExt(TestSDKBase):
component = "%s %s" % (pn, OESDKExtTestContextExecutor.name)
context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
configuration = self.get_sdk_configuration(d, 'sdkext')
- result.logDetails(self.get_sdk_json_result_dir(d),
+ result.logDetails(get_json_result_dir(d),
configuration,
self.get_sdk_result_id(configuration))
result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/selftest/case.py b/meta/lib/oeqa/selftest/case.py
index dcad4f76ec..da35b25f68 100644
--- a/meta/lib/oeqa/selftest/case.py
+++ b/meta/lib/oeqa/selftest/case.py
@@ -117,10 +117,6 @@ class OESelftestTestCase(OETestCase):
if e.errno != errno.ENOENT:
raise
- if self.tc.custommachine:
- machine_conf = 'MACHINE ??= "%s"\n' % self.tc.custommachine
- self.set_machine_config(machine_conf)
-
# tests might need their own setup
# but if they overwrite this one they have to call
# super each time, so let's give them an alternative
@@ -178,19 +174,11 @@ class OESelftestTestCase(OETestCase):
self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data))
ftools.write_file(dest_path, data)
- if not multiconfig and self.tc.custommachine and 'MACHINE' in data:
- machine = get_bb_var('MACHINE')
- self.logger.warning('MACHINE overridden: %s' % machine)
-
def append_config(self, data):
"""Append to <builddir>/conf/selftest.inc"""
self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data))
ftools.append_file(self.testinc_path, data)
- if self.tc.custommachine and 'MACHINE' in data:
- machine = get_bb_var('MACHINE')
- self.logger.warning('MACHINE overridden: %s' % machine)
-
def remove_config(self, data):
"""Remove data from <builddir>/conf/selftest.inc"""
self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data))
@@ -249,6 +237,13 @@ class OESelftestTestCase(OETestCase):
self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data))
ftools.write_file(self.machineinc_path, data)
+ def disable_class(self, classname):
+ destfile = "%s/classes/%s.bbclass" % (self.builddir, classname)
+ os.makedirs(os.path.dirname(destfile), exist_ok=True)
+ self.track_for_cleanup(destfile)
+ self.logger.debug("Creating empty class: %s\n" % (destfile))
+ ftools.write_file(destfile, "")
+
# check does path exist
def assertExists(self, expr, msg=None):
if not os.path.exists(expr):
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index bff6e7740c..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -7,7 +9,7 @@ import shutil
import oeqa.utils.ftools as ftools
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
-from oeqa.selftest.cases.sstate import SStateBase
+from oeqa.selftest.cases.sstatetests import SStateBase
class RebuildFromSState(SStateBase):
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index 75195241b7..3cb888c506 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,9 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import os
import glob
+import re
from oeqa.utils.commands import bitbake, get_bb_vars
from oeqa.selftest.case import OESelftestTestCase
@@ -117,7 +120,38 @@ class Archiver(OESelftestTestCase):
excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
+ def test_archiver_multiconfig_shared_unpack_and_patch(self):
+ """
+ Test that shared recipes in original mode with diff enabled works in multiconfig,
+ otherwise it will not build when using the same TMP dir.
+ """
+
+ features = 'BBMULTICONFIG = "mc1 mc2"\n'
+ features += 'INHERIT += "archiver"\n'
+ features += 'ARCHIVER_MODE[src] = "original"\n'
+ features += 'ARCHIVER_MODE[diff] = "1"\n'
+ self.write_config(features)
+ # We can use any machine in multiconfig as long as they are different
+ self.write_config('MACHINE = "qemuarm"\n', 'mc1')
+ self.write_config('MACHINE = "qemux86"\n', 'mc2')
+
+ task = 'do_unpack_and_patch'
+ # Use gcc-source as it is a shared recipe (appends the pv to the pn)
+ pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ # Generate the tasks signatures
+ bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
+
+ # Check the tasks signatures
+ # To be machine agnostic the tasks needs to generate the same signature for each machine
+ locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
+ locked_sigs = open(locked_sigs_inc).read()
+ task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
+ uniq_sigs = set(task_sigs)
+ self.assertFalse(len(uniq_sigs) - 1, \
+ 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
+ % (task, pn, locked_sigs_inc))
def test_archiver_srpm_mode(self):
"""
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
+
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BaremetalTest(OESelftestTestCase):
+ def test_baremetal(self):
+ self.write_config('TCLIBC = "baremetal"')
+ bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index 7d74833f61..695d17377d 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -6,12 +8,18 @@ import os
import re
import oeqa.utils.ftools as ftools
-from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
from oeqa.selftest.case import OESelftestTestCase
class BitbakeLayers(OESelftestTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(BitbakeLayers, cls).setUpClass()
+ bitbake("python3-jsonschema-native")
+ bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
+
def test_bitbakelayers_layerindexshowdepends(self):
result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
find_in_contents = re.search("openembedded-core", result.output)
@@ -46,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase):
bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
contents = ftools.read_file(bb_file)
- find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
+ find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
def test_bitbakelayers_add_remove(self):
@@ -77,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
result = runCmd('bitbake-layers show-recipes -i image')
self.assertIn('core-image-minimal', result.output)
self.assertNotIn('mtd-utils:', result.output)
- result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
+ result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
self.assertIn('libproxy:', result.output)
+ result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
self.assertNotIn('wget:', result.output) # doesn't inherit cmake
self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -111,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase):
self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
+ result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
+ for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
+ fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
+ self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
+
def get_recipe_basename(self, recipe):
recipe_file = ""
result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -121,3 +135,108 @@ class BitbakeLayers(OESelftestTestCase):
self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
return os.path.basename(recipe_file)
+
+ def validate_layersjson(self, json):
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
+ jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
+ jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
+ result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
+
+ def test_validate_examplelayersjson(self):
+ json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
+ self.validate_layersjson(json)
+
+ def test_bitbakelayers_setup(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ # The revision-under-test may not necessarily be available on the remote server,
+ # so replace it with a revision that has a yocto-4.1 tag.
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ for s in data['sources']:
+ data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
+ with open(jsonfile, 'w') as f:
+ json.dump(data, f)
+
+ testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
+ result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
+ layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
+ self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
+
+ # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
+ # and we need to run oe-setup-build directly from the current poky tree under test
+ oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
+ oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
+ os.symlink(oe_setup_build,oe_setup_build_l)
+
+ cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
+ result = runCmd(cmd)
+ cond = "conf/templates/default" in result.output
+ self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
+
+ # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
+ conf = None
+ if 'poky-default' in result.output:
+ conf = 'poky-default'
+ elif 'meta-default' in result.output:
+ conf = 'meta-default'
+ self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
+
+ cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
+ result = runCmd(cmd)
+
+ def test_bitbakelayers_updatelayer(self):
+ result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
+ jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
+ self.validate_layersjson(jsonfile)
+
+ import json
+ with open(jsonfile) as f:
+ data = json.load(f)
+ repos = []
+ for s in data['sources']:
+ repos.append(s)
+
+ self.assertTrue(len(repos) > 1, "Not enough repositories available")
+ self.validate_layersjson(jsonfile)
+
+ test_ref_1 = 'ref_1'
+ test_ref_2 = 'ref_2'
+
+ # Create a new layers setup using custom references
+ result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
+ self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
+ self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
+ self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
+
+ # Update one of the repositories in the layers setup using a different custom reference
+ # This should only update the selected repository, everything else should remain as is
+ result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
+ .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
+ self.validate_layersjson(jsonfile)
+
+ with open(jsonfile) as f:
+ data = json.load(f)
+ first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
+ first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
+ second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
+ second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
+
+ self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
+ self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
+ self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
+ self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
+#
+# Copyright (c) 2023 BayLibre, SAS
+# Author: Julien Stepahn <jstephan@baylibre.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import bb.tinfoil
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
+
+from oeqa.selftest.case import OESelftestTestCase
+
+
+class BBLock(OESelftestTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(BBLock, cls).setUpClass()
+ cls.lockfile = cls.builddir + "/conf/bblock.conf"
+
+ def unlock_recipes(self, recipes=None, tasks=None):
+ cmd = "bblock -r "
+ if recipes:
+ cmd += " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+ result = runCmd(cmd)
+
+ if recipes:
+ # ensure all signatures are removed from lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertFalse(
+ find_in_contents,
+ msg="%s:%s should not be present into bblock.conf anymore"
+ % (recipe, task),
+ )
+ self.assertExists(self.lockfile)
+ else:
+ self.assertNotExists(self.lockfile)
+
+ def lock_recipes(self, recipes, tasks=None):
+ cmd = "bblock " + " ".join(recipes)
+ if tasks:
+ cmd += " -t " + ",".join(tasks)
+
+ result = runCmd(cmd)
+
+ self.assertExists(self.lockfile)
+
+ # ensure all signatures are added to lockfile
+ contents = ftools.read_file(self.lockfile)
+ for recipe in recipes:
+ if tasks:
+ for task in tasks:
+ find_in_contents = re.search(
+ 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
+ contents,
+ )
+ self.assertTrue(
+ find_in_contents,
+ msg="%s:%s was not added into bblock.conf. bblock output: %s"
+ % (recipe, task, result.output),
+ )
+
+ def modify_tasks(self, recipes, tasks):
+ task_append = ""
+ for recipe in recipes:
+ bb_vars = get_bb_vars(["PV"], recipe)
+ recipe_pv = bb_vars["PV"]
+ recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
+
+ os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
+ recipe_append_path = os.path.join(
+ self.testlayer_path, "recipes-test", recipe, recipe_append_file
+ )
+
+ for task in tasks:
+ task_append += "%s:append() {\n#modify task hash \n}\n" % task
+ ftools.write_file(recipe_append_path, task_append)
+ self.add_command_to_tearDown(
+ "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
+ )
+
+ def test_lock_single_recipe_single_task(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_multiple_tasks(self):
+ recipes = ["quilt"]
+ tasks = ["do_compile", "do_install"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_single_recipe_all_tasks(self):
+ recipes = ["quilt"]
+ self._run_test(recipes, None)
+
+ def test_lock_multiple_recipe_single_task(self):
+ recipes = ["quilt", "bc"]
+ tasks = ["do_compile"]
+ self._run_test(recipes, tasks)
+
+ def test_lock_architecture_specific(self):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ recipes = ["quilt"]
+ tasks = ["do_compile"]
+
+ # lock quilt's do_compile task for another machine
+ if self.td["MACHINE"] == "qemux86-64":
+ machine = "qemuarm"
+ else:
+ machine = "qemux86-64"
+
+ self.write_config('MACHINE = "%s"\n' % machine)
+
+ self.lock_recipes(recipes, tasks)
+
+ self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
+ # modify quilt's do_compile task
+ self.modify_tasks(recipes, tasks)
+
+ # build quilt using the default machine
+ # No Note/Warning should be emitted since sig is locked for another machine
+ # (quilt package is architecture dependant)
+ info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
+ warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
+ result = bitbake(recipes[0] + " -n")
+ self.assertNotIn(info_message, result.output)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
+
+ def _run_test(self, recipes, tasks=None):
+ # unlock all recipes and ensure no bblock.conf file exist
+ self.unlock_recipes()
+
+ self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
+
+ # lock tasks for recipes
+ result = self.lock_recipes(recipes, tasks)
+
+ if not tasks:
+ tasks = []
+ result = bitbake("-c listtasks " + recipes[0])
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ d = tinfoil.parse_recipe(recipes[0])
+
+ for line in result.output.splitlines():
+ if line.startswith("do_"):
+ task = line.split()[0]
+ if "setscene" in task:
+ continue
+ if d.getVarFlag(task, "nostamp"):
+ continue
+ tasks.append(task)
+
+ # build recipes. At this stage we should have a Note about recipes
+ # having locked task's sig, but no warning since sig still match
+ info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
+ recipes
+ )
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # modify all tasks that are locked to trigger a sig change then build the recipes
+ # at this stage we should have a Note as before, but also a Warning for all
+ # locked tasks indicating the sig mismatch
+ self.modify_tasks(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertIn(warn_message, result.output)
+
+ # unlock all tasks and rebuild, no more Note/Warning should remain
+ self.unlock_recipes(recipes, tasks)
+ for recipe in recipes:
+ result = bitbake(recipe + " -n")
+ self.assertNotIn(info_message, result.output)
+ for task in tasks:
+ warn_message = "The %s:%s sig is computed to be" % (recipe, task)
+ self.assertNotIn(warn_message, result.output)
+
+ # unlock all recipes
+ self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
index 317e68b82f..040c6db089 100644
--- a/meta/lib/oeqa/selftest/cases/bblogging.py
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -103,16 +105,14 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = ""')
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exit_loggingD(self):
# logs, verbose
self.write_config('BBINCLUDELOGS = "yes"')
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exec_func_python_loggingA(self):
# no logs, no verbose
@@ -137,8 +137,7 @@ class BitBakeLogging(OESelftestTestCase):
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_exec_func_python_loggingD(self):
# logs, verbose
@@ -146,8 +145,7 @@ class BitBakeLogging(OESelftestTestCase):
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python stdout", 1)
+ self.assertCount(result.output, "This is python stdout", 1)
def test_python_fatal_loggingA(self):
# no logs, no verbose
@@ -171,8 +169,7 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = ""')
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
self.assertCount(result.output, "This is a fatal error", 1)
def test_python_fatal_loggingD(self):
@@ -180,7 +177,6 @@ class BitBakeLogging(OESelftestTestCase):
self.write_config('BBINCLUDELOGS = "yes"')
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
- # python tasks don't log output with -v currently
- #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is python fatal test stdout", 1)
self.assertCount(result.output, "This is a fatal error", 1)
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index cfac7afcf4..98e9f81661 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase):
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
- result = bitbake('m4-native')
+ result = bitbake('selftest-hello-native')
find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase):
self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
def test_local_sstate(self):
- bitbake('m4-native')
- bitbake('m4-native -cclean')
- result = bitbake('m4-native')
- find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
- self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
+ bitbake('selftest-hello-native')
+ bitbake('selftest-hello-native -cclean')
+ result = bitbake('selftest-hello-native')
+ find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
+ self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
def test_bitbake_invalid_recipe(self):
result = bitbake('-b asdf', ignore_status=True)
@@ -145,12 +147,10 @@ INHERIT:remove = \"report-error\"
""")
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
- bitbake('-ccleanall man-db')
result = bitbake('-c fetch man-db', ignore_status=True)
- bitbake('-ccleanall man-db')
self.delete_recipeinc('man-db')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
- self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output)
+ self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
def test_rename_downloaded_file(self):
# TODO unique dldir instead of using cleanall
@@ -175,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertIn('localconf', result.output)
def test_dry_run(self):
- result = runCmd('bitbake -n m4-native')
+ result = runCmd('bitbake -n selftest-hello-native')
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
def test_just_parse(self):
@@ -188,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
def test_prefile(self):
+ # Test when the prefile does not exist
+ result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
+ # Test when the prefile exists
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
self.track_for_cleanup(preconf)
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -198,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
self.assertIn('localconf', result.output)
def test_postfile(self):
+ # Test when the postfile does not exist
+ result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
+ self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
+ # Test when the postfile exists
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
self.track_for_cleanup(postconf)
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
@@ -224,16 +232,21 @@ INHERIT:remove = \"report-error\"
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
def test_non_gplv3(self):
- self.write_config('INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"')
+ self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
+require conf/distro/include/no-gplv3.inc
+''')
result = bitbake('selftest-ed', ignore_status=True)
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
lic_dir = get_bb_var('LICENSE_DIRECTORY')
- self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPL-3.0-or-later')))
- self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPL-2.0-or-later')))
+ arch = get_bb_var('SSTATE_PKGARCH')
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
+ self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
+ filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later')
+ self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
def test_setscene_only(self):
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
- test_recipe = 'ed'
+ test_recipe = 'selftest-hello-native'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
@@ -246,7 +259,7 @@ INHERIT:remove = \"report-error\"
'Executed tasks were: %s' % (task, str(tasks)))
def test_skip_setscene(self):
- test_recipe = 'ed'
+ test_recipe = 'selftest-hello-native'
bitbake(test_recipe)
bitbake('-c clean %s' % test_recipe)
@@ -350,4 +363,15 @@ INHERIT:remove = \"report-error\"
self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
- self.assertTrue("Recipe uses a floating tag/branch without a fixed SRCREV" in result.output, msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
+ self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
+
+ def test_unexpanded_variable_in_path(self):
+ """
+ Test that bitbake fails if directory contains unexpanded bitbake variable in the name
+ """
+ recipe_name = "gitunpackoffline"
+ self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
+ result = bitbake('{}'.format(recipe_name), ignore_status=True)
+ self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
+ self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
+ result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 3b0b44b390..1688eabe4e 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
from oeqa.selftest.case import OESelftestTestCase
@@ -32,15 +37,19 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
+ start_time = time.time()
+
bitbake("{0} -c check".format(recipe))
+ end_time = time.time()
+
sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
if not os.path.exists(sumspath):
sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
logpath = os.path.splitext(sumspath)[0] + ".log"
ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
- self.ptest_section(ptestsuite, logfile = logpath)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
with open(sumspath, "r") as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index d865da6252..2d55994916 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index ad604d6ae2..31dafaa9c5 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -9,8 +11,10 @@ import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
+from oeqa.core.decorator.data import skipIfMachine
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
import oeqa.utils.ftools as ftools
+from oeqa.core.decorator import OETestTag
class ImageOptionsTests(OESelftestTestCase):
@@ -201,6 +205,7 @@ class ToolchainOptions(OESelftestTestCase):
self.write_config(features)
bitbake('fortran-helloworld')
+@OETestTag("yocto-mirrors")
class SourceMirroring(OESelftestTestCase):
# Can we download everything from the Yocto Sources Mirror over http only
def test_yocto_source_mirror(self):
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator.data import skipIfNotQemuUsermode
+from oeqa.utils.commands import bitbake
+
+
+class CCppTests(OESelftestTestCase):
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode(self, recipe_name):
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ bitbake("%s -c run_tests" % recipe_name)
+
+ @skipIfNotQemuUsermode()
+ def _qemu_usermode_failing(self, recipe_name):
+ config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
+ self.write_config(config)
+ self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
+ result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
+ self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
+ result.command, result.status, result.output, result.error))
+
+
+class CMakeTests(CCppTests):
+ def test_cmake_qemu(self):
+ """Test for cmake-qemu.bbclass good case
+
+ compile the cmake-example and verify the CTests pass in qemu-user.
+ qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
+ """
+ self._qemu_usermode("cmake-example")
+
+ def test_cmake_qemu_failing(self):
+ """Test for cmake-qemu.bbclass bad case
+
+ Break the comparison in the test code and verify the CTests do not pass.
+ """
+ self._qemu_usermode_failing("cmake-example")
+
+
+class MesonTests(CCppTests):
+ def test_meson_qemu(self):
+ """Test the qemu-user feature of the meson.bbclass good case
+
+ compile the meson-example and verify the Unit Test pass in qemu-user.
+ qemu-user is configured by meson's exe_wrapper option.
+ """
+ self._qemu_usermode("meson-example")
+
+ def test_meson_qemu_failing(self):
+ """Test the qemu-user feature of the meson.bbclass bad case
+
+ Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
+ """
+ self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index e0aea1a1ef..23c0a1408a 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 2f26f606d7..60cecd1328 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import json
import os
from oeqa.selftest.case import OESelftestTestCase
@@ -48,6 +54,25 @@ class CVECheck(OESelftestTestCase):
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
+ def test_convert_cve_version(self):
+ from oe.cve_check import convert_cve_version
+
+ # Default format
+ self.assertEqual(convert_cve_version("8.3"), "8.3")
+ self.assertEqual(convert_cve_version(""), "")
+
+ # OpenSSL format version
+ self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
+
+ # OpenSSH format
+ self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
+ self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
+
+ # Linux kernel format
+ self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
+ self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
+
+
def test_recipe_report_json(self):
config = """
INHERIT += "cve-check"
@@ -117,3 +142,101 @@ CVE_CHECK_FORMAT_JSON = "1"
self.assertEqual(report["version"], "1")
self.assertEqual(len(report["package"]), 1)
self.assertEqual(report["package"][0]["name"], recipename)
+
+
+ def test_recipe_report_json_unpatched(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "0"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("m4-native -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "m4-native")
+ #m4 had only Patched CVEs, so the issues array will be empty
+ self.assertEqual(package["issue"], [])
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
+
+
+ def test_recipe_report_json_ignored(self):
+ config = """
+INHERIT += "cve-check"
+CVE_CHECK_FORMAT_JSON = "1"
+CVE_CHECK_REPORT_PATCHED = "1"
+"""
+ self.write_config(config)
+
+ vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
+ recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
+
+ try:
+ os.remove(summary_json)
+ os.remove(recipe_json)
+ except FileNotFoundError:
+ pass
+
+ bitbake("logrotate -c cve_check")
+
+ def check_m4_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertEqual(report["version"], "1")
+ self.assertEqual(len(report["package"]), 1)
+ package = report["package"][0]
+ self.assertEqual(package["name"], "logrotate")
+ found_cves = {}
+ for issue in package["issue"]:
+ found_cves[issue["id"]] = {
+ "status" : issue["status"],
+ "detail" : issue["detail"] if "detail" in issue else "",
+ "description" : issue["description"] if "description" in issue else ""
+ }
+ # m4 CVE should not be in logrotate
+ self.assertNotIn("CVE-2008-1687", found_cves)
+ # logrotate has both Patched and Ignored CVEs
+ self.assertIn("CVE-2011-1098", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0)
+ self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
+ detail = "not-applicable-platform"
+ description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
+ self.assertIn("CVE-2011-1548", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
+ self.assertIn("CVE-2011-1549", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
+ self.assertIn("CVE-2011-1550", found_cves)
+ self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
+ self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
+ self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
+
+ self.assertExists(summary_json)
+ check_m4_json(summary_json)
+ self.assertExists(recipe_json)
+ check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..505b4be837
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,158 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import socketserver
+import subprocess
+import time
+import urllib
+import pathlib
+
+from oeqa.core.decorator import OETestTag
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runqemu
+
+
+class Debuginfod(OESelftestTestCase):
+
+ def wait_for_debuginfod(self, port):
+ """
+ debuginfod takes time to scan the packages and requesting too early may
+ result in a test failure if the right packages haven't been scanned yet.
+
+ Request the metrics endpoint periodically and wait for there to be no
+ busy scanning threads.
+
+ Returns if debuginfod is ready, raises an exception if not within the
+ timeout.
+ """
+
+ # Wait two minutes
+ countdown = 24
+ delay = 5
+ latest = None
+
+ while countdown:
+ self.logger.info("waiting...")
+ time.sleep(delay)
+
+ self.logger.info("polling server")
+ if self.debuginfod.poll():
+ self.logger.info("server dead")
+ self.debuginfod.communicate()
+ self.fail("debuginfod terminated unexpectedly")
+ self.logger.info("server alive")
+
+ try:
+ with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
+ for line in f.read().decode("ascii").splitlines():
+ key, value = line.rsplit(" ", 1)
+ if key == "thread_busy{role=\"scan\"}":
+ latest = int(value)
+ self.logger.info("Waiting for %d scan jobs to finish" % latest)
+ if latest == 0:
+ return
+ except urllib.error.URLError as e:
+ # TODO: how to catch just timeouts?
+ self.logger.error(e)
+
+ countdown -= 1
+
+ raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
+
+ def start_debuginfod(self):
+ # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
+
+ # Save some useful paths for later
+ native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
+ native_bindir = native_sysroot / "usr" / "bin"
+ self.debuginfod = native_bindir / "debuginfod"
+ self.debuginfod_find = native_bindir / "debuginfod-find"
+
+ cmd = [
+ self.debuginfod,
+ "--verbose",
+ # In-memory database, this is a one-shot test
+ "--database=:memory:",
+ # Don't use all the host cores
+ "--concurrency=8",
+ "--connection-pool=8",
+ # Disable rescanning, this is a one-shot test
+ "--rescan-time=0",
+ "--groom-time=0",
+ get_bb_var("DEPLOY_DIR"),
+ ]
+
+ format = get_bb_var("PACKAGE_CLASSES").split()[0]
+ if format == "package_deb":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_ipk":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_rpm":
+ cmd.append("--scan-rpm-dir")
+ else:
+ self.fail("Unknown package class %s" % format)
+
+ # Find a free port. Racey but the window is small.
+ with socketserver.TCPServer(("localhost", 0), None) as s:
+ self.port = s.server_address[1]
+ cmd.append("--port=%d" % self.port)
+
+ self.logger.info(f"Starting server {cmd}")
+ self.debuginfod = subprocess.Popen(cmd, env={})
+ self.wait_for_debuginfod(self.port)
+
+
+ def test_debuginfod_native(self):
+ """
+ Test debuginfod outside of qemu, by building a package and looking up a
+ binary's debuginfo using elfutils-native.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+""")
+ bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package")
+
+ try:
+ self.start_debuginfod()
+
+ env = os.environ.copy()
+ env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
+
+ pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
+ cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
+ self.logger.info(f"Starting client {cmd}")
+ output = subprocess.check_output(cmd, env=env, text=True)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
+
+ @OETestTag("runqemu")
+ def test_debuginfod_qemu(self):
+ """
+ Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
+ """
+
+ self.write_config("""
+TMPDIR = "${TOPDIR}/tmp-debuginfod"
+DISTRO_FEATURES:append = " debuginfod"
+CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
+ """)
+ bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot")
+
+ try:
+ self.start_debuginfod()
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
+ self.logger.info(f"Starting client {cmd}")
+ status, output = qemu.run_serial(cmd)
+ # This should be more comprehensive
+ self.assertIn("/.cache/debuginfod_client/", output)
+ finally:
+ self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 3eea2b1a0e..bc1e40ef83 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,13 +1,18 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
+import errno
import os
import re
import shutil
import tempfile
import glob
import fnmatch
+import unittest
+import json
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
@@ -24,6 +29,9 @@ def setUpModule():
corecopydir = os.path.join(templayerdir, 'core-copy')
bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
edited_layers = []
+ # make sure user doesn't have a local workspace
+ result = runCmd('bitbake-layers show-layers')
+ assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
# We need to take a copy of the meta layer so we can modify it and not
# have any races against other tests that might be running in parallel
@@ -38,10 +46,17 @@ def setUpModule():
canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
edited_layers.append(layerpath)
oldmetapath = os.path.realpath(layerpath)
+
+ # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
+ try:
+ runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
+ except:
+ raise unittest.SkipTest("devtool tests require folder to be a git repo")
+
result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
oldreporoot = result.output.rstrip()
newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
- runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
+ runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
# Now we need to copy any modified files
# You might ask "why not just copy the entire tree instead of
# cloning and doing this?" - well, the problem with that is
@@ -218,6 +233,75 @@ class DevtoolTestCase(OESelftestTestCase):
filelist.append(' '.join(splitline))
return filelist
+ def _check_diff(self, diffoutput, addlines, removelines):
+ """Check output from 'git diff' matches expectation"""
+ remaining_addlines = addlines[:]
+ remaining_removelines = removelines[:]
+ for line in diffoutput.splitlines():
+ if line.startswith('+++') or line.startswith('---'):
+ continue
+ elif line.startswith('+'):
+ matched = False
+ for item in addlines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_addlines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
+ elif line.startswith('-'):
+ matched = False
+ for item in removelines:
+ if re.match(item, line[1:].strip()):
+ matched = True
+ remaining_removelines.remove(item)
+ break
+ self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ if remaining_addlines:
+ self.fail('Expected added lines not found: %s' % remaining_addlines)
+ if remaining_removelines:
+ self.fail('Expected removed lines not found: %s' % remaining_removelines)
+
+ def _check_runqemu_prerequisites(self):
+ """Check runqemu is available
+
+ Whilst some tests would seemingly be better placed as a runtime test,
+ unfortunately the runtime tests run under bitbake and you can't run
+ devtool within bitbake (since devtool needs to run bitbake itself).
+ Additionally we are testing build-time functionality as well, so
+ really this has to be done as an oe-selftest test.
+ """
+ machine = get_bb_var('MACHINE')
+ if not machine.startswith('qemu'):
+ self.skipTest('This test only works with qemu machines')
+ if not os.path.exists('/etc/runqemu-nosudo'):
+ self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
+ if result.status != 0:
+ result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
+ if result.status != 0:
+ self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
+ for line in result.output.splitlines():
+ if line.startswith('tap'):
+ break
+ else:
+ self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+
+ def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri):
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add --version %s %s %s' % (version, pn, git_url))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+ # Check the recipe name is correct
+ recipefile = get_bb_var('FILE', pn)
+ self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
+ self.assertIn(recipefile, result.output)
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(pn, result.output)
+ self.assertIn(recipefile, result.output)
+ checkvars = {}
+ checkvars['SRC_URI'] = resulting_src_uri
+ self._test_recipe_contents(recipefile, checkvars, [])
class DevtoolBase(DevtoolTestCase):
@@ -230,6 +314,7 @@ class DevtoolBase(DevtoolTestCase):
cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
% cls.original_sstate)
+ cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n')
@classmethod
def tearDownClass(cls):
@@ -311,6 +396,38 @@ class DevtoolAddTests(DevtoolBase):
bindir = bindir[1:]
self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
+ def test_devtool_add_binary(self):
+ # Create a binary package containing a known test file
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ pn = 'tst-bin'
+ pv = '1.0'
+ test_file_dir = "var/lib/%s/" % pn
+ test_file_name = "test_file"
+ test_file_content = "TEST CONTENT"
+ test_file_package_root = os.path.join(tempdir, pn)
+ test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
+ bb.utils.mkdirhier(test_file_dir_full)
+ with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
+ f.write(test_file_content)
+ bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
+ runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
+
+ # Test devtool add -b on the binary package
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
+
+ # Build the resulting recipe
+ result = runCmd('devtool build %s' % pn)
+ installdir = get_bb_var('D', pn)
+ self.assertTrue(installdir, 'Could not query installdir variable')
+
+ # Check that a known file from the binary package has indeed been installed
+ self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
+
def test_devtool_add_git_local(self):
# We need dbus built so that DEPENDS recognition works
bitbake('dbus')
@@ -346,12 +463,28 @@ class DevtoolAddTests(DevtoolBase):
checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '0.1+git${SRCPV}'
+ checkvars['PV'] = '0.1+git'
checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
checkvars['SRCREV'] = srcrev
checkvars['DEPENDS'] = set(['dbus'])
self._test_recipe_contents(recipefile, checkvars, [])
+ def test_devtool_add_git_style1(self):
+ version = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
+ resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
+
+ def test_devtool_add_git_style2(self):
+ version = 'v3.1.0'
+ pn = 'mbedtls'
+ # this will trigger reformat_git_uri with branch parameter in url
+ git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
+ resulting_src_uri = "gitsm://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
+ self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
+
def test_devtool_add_library(self):
# Fetch source
tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -412,7 +545,7 @@ class DevtoolAddTests(DevtoolBase):
self.track_for_cleanup(self.workspacedir)
self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
- result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+ result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -431,7 +564,7 @@ class DevtoolAddTests(DevtoolBase):
result = runCmd('devtool reset -n %s' % testrecipe)
shutil.rmtree(srcdir)
fakever = '1.9'
- result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
+ result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
# Test devtool status
result = runCmd('devtool status')
@@ -469,7 +602,7 @@ class DevtoolAddTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.0+git${SRCPV}'
+ checkvars['PV'] = '1.0+git'
checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = '${AUTOREV}'
self._test_recipe_contents(recipefile, checkvars, [])
@@ -488,7 +621,7 @@ class DevtoolAddTests(DevtoolBase):
self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
checkvars = {}
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.5+git${SRCPV}'
+ checkvars['PV'] = '1.5+git'
checkvars['SRC_URI'] = url_branch
checkvars['SRCREV'] = checkrev
self._test_recipe_contents(recipefile, checkvars, [])
@@ -512,7 +645,7 @@ class DevtoolAddTests(DevtoolBase):
result = runCmd('devtool status')
self.assertIn(testrecipe, result.output)
self.assertIn(srcdir, result.output)
- # Check recipe
+ # Check recipedevtool add
recipefile = get_bb_var('FILE', testrecipe)
self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
checkvars = {}
@@ -544,6 +677,19 @@ class DevtoolAddTests(DevtoolBase):
# Test devtool build
result = runCmd('devtool build %s' % pn)
+ def test_devtool_add_python_egg_requires(self):
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ testver = '0.14.0'
+ url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
+ testrecipe = 'python3-uvicorn'
+ srcdir = os.path.join(tempdir, testrecipe)
+ # Test devtool add
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+
class DevtoolModifyTests(DevtoolBase):
def test_devtool_modify(self):
@@ -705,6 +851,7 @@ class DevtoolModifyTests(DevtoolBase):
self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+
def test_devtool_modify_localfiles_only(self):
# Check preconditions
testrecipe = 'base-files'
@@ -771,6 +918,122 @@ class DevtoolModifyTests(DevtoolBase):
# Try building
bitbake(testrecipe)
+ def test_devtool_modify_git_no_extract(self):
+ # Check preconditions
+ testrecipe = 'psplash'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+
+ def test_devtool_modify_git_crates_subpath(self):
+ # This tests two things in devtool context:
+ # - that we support local git dependencies for cargo based recipe
+ # - that we support patches in SRC_URI when git url contains subpath parameter
+
+ # Check preconditions:
+ # recipe inherits cargo
+ # git:// uri with a subpath as the main package
+ # some crate:// in SRC_URI
+ # others git:// in SRC_URI
+ # cointains a patch
+ testrecipe = 'hello-rs'
+ bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe)
+ recipefile = bb_vars['FILE']
+ workdir = bb_vars['WORKDIR']
+ cargo_home = bb_vars['CARGO_HOME']
+ src_uri = bb_vars['SRC_URI'].split()
+ self.assertTrue(src_uri[0].startswith('git://'),
+ 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
+ self.assertIn(';subpath=', src_uri[0],
+ 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
+ self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
+ 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
+ self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
+ 'This test expects the %s recipe to have several git:// uris' % testrecipe)
+ self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
+ 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
+
+ self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
+
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Check that the patch is correctly applied.
+ # The last commit message in the tree must contain the following note:
+ # Notes (devtool):
+ # original patch: <patchname>
+ # ..
+ patchname = None
+ for uri in src_uri:
+ if uri.startswith('file://') and '.patch' in uri:
+ patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
+ self.assertIsNotNone(patchname)
+ result = runCmd('git -C %s log -1' % tempdir)
+ self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
+
+ # Configure the recipe to check that the git dependencies are correctly patched in cargo config
+ bitbake('-c configure %s' % testrecipe)
+
+ cargo_config_path = os.path.join(cargo_home, 'config')
+ with open(cargo_config_path, "r") as f:
+ cargo_config_contents = [line.strip('\n') for line in f.readlines()]
+
+ # Get back git dependencies of the recipe (ignoring the main one)
+ # and check that they are all correctly patched to be fetched locally
+ git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
+ for git_dep in git_deps:
+ raw_url, _, raw_parms = git_dep.partition(";")
+ parms = {}
+ for parm in raw_parms.split(";"):
+ name_parm, _, value_parm = parm.partition('=')
+ parms[name_parm]=value_parm
+ self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
+ self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
+ self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
+ self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
+ self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
+ raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
+ patch_line = '[patch."%s"]' % raw_url
+ path_patched = os.path.join(workdir, parms['destsuffix'])
+ path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
+ # Would have been better to use tomllib to read this file :/
+ self.assertIn(patch_line, cargo_config_contents)
+ self.assertIn(path_override_line, cargo_config_contents)
+
+ # Try to package the recipe
+ bitbake('-c package_qa %s' % testrecipe)
+
def test_devtool_modify_localfiles(self):
# Check preconditions
testrecipe = 'lighttpd'
@@ -836,12 +1099,43 @@ class DevtoolModifyTests(DevtoolBase):
runCmd('git -C %s checkout %s' % (tempdir, branch))
with open(source, "rt") as f:
content = f.read()
- self.assertEquals(content, expected)
- check('devtool', 'This is a test for something\n')
+ self.assertEqual(content, expected)
+ if self.td["MACHINE"] == "qemux86":
+ check('devtool', 'This is a test for qemux86\n')
+ elif self.td["MACHINE"] == "qemuarm":
+ check('devtool', 'This is a test for qemuarm\n')
+ else:
+ check('devtool', 'This is a test for something\n')
check('devtool-no-overrides', 'This is a test for something\n')
check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
check('devtool-override-qemux86', 'This is a test for qemux86\n')
+ def test_devtool_modify_multiple_sources(self):
+ # This test check that recipes fetching several sources can be used with devtool modify/build
+ # Check preconditions
+ testrecipe = 'bzip2'
+ src_uri = get_bb_var('SRC_URI', testrecipe)
+ src1 = 'https://' in src_uri
+ src2 = 'git://' in src_uri
+ self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(testrecipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Try building
+ result = bitbake(testrecipe)
+ self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
+
class DevtoolUpdateTests(DevtoolBase):
def test_devtool_update_recipe(self):
@@ -871,14 +1165,15 @@ class DevtoolUpdateTests(DevtoolBase):
result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
+ result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
- ('??', '.*/0001-Change-the-README.patch$'),
- ('??', '.*/0002-Add-a-new-file.patch$')]
+ ('A ', '.*/0001-Change-the-README.patch$'),
+ ('A ', '.*/0002-Add-a-new-file.patch$')]
self._check_repo_status(os.path.dirname(recipefile), expected_status)
def test_devtool_update_recipe_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
+ testrecipe = 'mtd-utils-selftest'
bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
@@ -917,23 +1212,7 @@ class DevtoolUpdateTests(DevtoolBase):
srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
srcurilines.append('"')
removelines = ['SRCREV = ".*"'] + srcurilines
- for line in result.output.splitlines():
- if line.startswith('+++') or line.startswith('---'):
- continue
- elif line.startswith('+'):
- matched = False
- for item in addlines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
- elif line.startswith('-'):
- matched = False
- for item in removelines:
- if re.match(item, line[1:].strip()):
- matched = True
- break
- self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+ self._check_diff(result.output, addlines, removelines)
# Now try with auto mode
runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -1015,10 +1294,11 @@ class DevtoolUpdateTests(DevtoolBase):
def test_devtool_update_recipe_append_git(self):
# Check preconditions
- testrecipe = 'mtd-utils'
- bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ testrecipe = 'mtd-utils-selftest'
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
+ corenames = bb_vars['LAYERSERIES_CORENAMES']
self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
for entry in src_uri.split():
if entry.startswith('git://'):
@@ -1049,7 +1329,7 @@ class DevtoolUpdateTests(DevtoolBase):
f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
- f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n')
+ f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
# Create the bbappend
@@ -1267,7 +1547,7 @@ class DevtoolUpdateTests(DevtoolBase):
# Modify one file
srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
runCmd('echo "Another line" >> README', cwd=srctree)
- runCmd('git commit -a --amend --no-edit', cwd=srctree)
+ runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
result = runCmd('devtool update-recipe %s' % testrecipe)
expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1303,6 +1583,121 @@ class DevtoolUpdateTests(DevtoolBase):
expected_status = []
self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ def test_devtool_finish_modify_git_subdir(self):
+ # Check preconditions
+ testrecipe = 'dos2unix'
+ self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
+ bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe)
+ self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
+ workdir_git = '%s/git/' % bb_vars['WORKDIR']
+ if not bb_vars['S'].startswith(workdir_git):
+ self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
+ subdir = bb_vars['S'].split(workdir_git, 1)[1]
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('%s -c cleansstate' % testrecipe)
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
+ self.assertExists(testsrcfile, 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
+ self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
+ # Check git repo
+ self._check_src_repo(tempdir)
+ # Modify file
+ runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
+ result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
+ # Now try updating original recipe
+ recipefile = bb_vars['FILE']
+ recipedir = os.path.dirname(recipefile)
+ self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = ['SRC_URI = "git://.*"']
+ addlines = [
+ 'SRC_URI = "git://.* \\\\',
+ 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
+ '"'
+ ]
+ self._check_diff(result.output, addlines, removelines)
+ # Put things back so we can run devtool finish on a different layer
+ runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
+ # Run devtool finish
+ res = re.search('recipes-.*', recipedir)
+ self.assertTrue(res, 'Unable to find recipe subdirectory')
+ recipesubdir = res[0]
+ self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
+ result = runCmd('devtool finish %s meta-selftest' % testrecipe)
+ # Check bbappend file contents
+ appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
+ with open(appendfn, 'r') as f:
+ appendlines = f.readlines()
+ expected_appendlines = [
+ 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
+ '\n',
+ 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
+ '\n'
+ ]
+ self.assertEqual(appendlines, expected_appendlines)
+ self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
+ # Try building
+ bitbake('%s -c patch' % testrecipe)
+
+ def test_devtool_git_submodules(self):
+ # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'vulkan-samples'
+ src_uri = get_bb_var('SRC_URI', recipe)
+ self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
+ oldrecipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(oldrecipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ self.assertIn('/meta/', recipedir)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe, result.output)
+ self.assertIn(tempdir, result.output)
+ # Modify a source file in a submodule, (grab the first one)
+ result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
+ submodule = result.output.splitlines()[0]
+ submodule_path = os.path.join(tempdir, submodule)
+ runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
+ result = runCmd('git status --porcelain . ', cwd=submodule_path)
+ self.assertIn("testfile", result.output)
+ runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
+
+ # Try finish to the original layer
+ self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
+ runCmd('devtool finish -f %s meta' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
+ ('??', '.*/.*-Adding-a-new-file.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+ # Make sure the patch is added to the recipe with the correct "patchdir" option
+ result = runCmd('git diff .', cwd=recipedir)
+ addlines = [
+ 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
+ ]
+ self._check_diff(result.output, addlines, [])
+
class DevtoolExtractTests(DevtoolBase):
def test_devtool_extract(self):
@@ -1353,28 +1748,7 @@ class DevtoolExtractTests(DevtoolBase):
@OETestTag("runqemu")
def test_devtool_deploy_target(self):
- # NOTE: Whilst this test would seemingly be better placed as a runtime test,
- # unfortunately the runtime tests run under bitbake and you can't run
- # devtool within bitbake (since devtool needs to run bitbake itself).
- # Additionally we are testing build-time functionality as well, so
- # really this has to be done as an oe-selftest test.
- #
- # Check preconditions
- machine = get_bb_var('MACHINE')
- if not machine.startswith('qemu'):
- self.skipTest('This test only works with qemu machines')
- if not os.path.exists('/etc/runqemu-nosudo'):
- self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
- if result.status != 0:
- result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
- if result.status != 0:
- self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
- for line in result.output.splitlines():
- if line.startswith('tap'):
- break
- else:
- self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+ self._check_runqemu_prerequisites()
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
# Definitions
testrecipe = 'mdadm'
@@ -1560,6 +1934,54 @@ class DevtoolUpgradeTests(DevtoolBase):
self.assertNotIn(recipe, result.output)
self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
+ def test_devtool_upgrade_drop_md5sum(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test3'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
+ def test_devtool_upgrade_all_checksums(self):
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # For the moment, we are using a real recipe.
+ recipe = 'devtool-upgrade-test4'
+ version = '1.6.0'
+ oldrecipefile = get_bb_var('FILE', recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+ # we are downgrading instead of upgrading.
+ result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+ # Check new recipe file is present
+ newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
+ self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
+ # Check recipe got changed as expected
+ with open(oldrecipefile + '.upgraded', 'r') as f:
+ desiredlines = f.readlines()
+ with open(newrecipefile, 'r') as f:
+ newlines = f.readlines()
+ self.assertEqual(desiredlines, newlines)
+
def test_devtool_layer_plugins(self):
"""Test that devtool can use plugins from other layers.
@@ -1578,7 +2000,15 @@ class DevtoolUpgradeTests(DevtoolBase):
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
if p == "lib":
# Can race with other tests
self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -1586,8 +2016,12 @@ class DevtoolUpgradeTests(DevtoolBase):
self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
def test_devtool_load_plugin(self):
"""Test that devtool loads only the first found plugin in BBPATH."""
@@ -1605,15 +2039,17 @@ class DevtoolUpgradeTests(DevtoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
+ ]
result = runCmd("devtool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("devtool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("devtool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
@@ -1794,6 +2230,52 @@ class DevtoolUpgradeTests(DevtoolBase):
if files:
self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
+ def test_devtool_finish_update_patch(self):
+ # This test uses a modified version of the sysdig recipe from meta-oe.
+ # - The patches have been renamed.
+ # - The dependencies are commented out since the recipe is not being
+ # built.
+ #
+ # The sysdig recipe is interesting in that it fetches two different Git
+ # repositories, and there are patches for both. This leads to that
+ # devtool will create ignore commits as it uses Git submodules to keep
+ # track of the second repository.
+ #
+ # This test will verify that the ignored commits actually are ignored
+ # when a commit in between is modified. It will also verify that the
+ # updated patch keeps its original name.
+
+ # Check preconditions
+ self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Try modifying a recipe
+ self.track_for_cleanup(self.workspacedir)
+ recipe = 'sysdig-selftest'
+ recipefile = get_bb_var('FILE', recipe)
+ recipedir = os.path.dirname(recipefile)
+ result = runCmd('git status --porcelain .', cwd=recipedir)
+ if result.output.strip():
+ self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool modify %s %s' % (recipe, tempdir))
+ self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
+ self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
+ # Make a change to one of the existing commits
+ result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertIn('M CMakeLists.txt', result.output)
+ result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
+ result = runCmd('git show -s --format=%s', cwd=tempdir)
+ self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
+ result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
+ result = runCmd('devtool finish %s meta-selftest' % recipe)
+ result = runCmd('devtool status')
+ self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
+ self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
+ expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
+ self._check_repo_status(recipedir, expected_status)
+
def test_devtool_rename(self):
# Check preconditions
self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1830,7 +2312,6 @@ class DevtoolUpgradeTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just name this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1843,7 +2324,6 @@ class DevtoolUpgradeTests(DevtoolBase):
self._test_recipe_contents(newrecipefile, checkvars, [])
# Try again - change just version this time
result = runCmd('devtool reset -n %s' % newrecipename)
- shutil.rmtree(newsrctree)
add_recipe()
newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
@@ -1914,7 +2394,9 @@ class DevtoolUpgradeTests(DevtoolBase):
#Modify the kernel source
modfile = os.path.join(tempdir, 'init/version.c')
- runCmd("sed -i 's/Linux/LiNuX/g' %s" % (modfile))
+ # Moved to uts.h in 6.1 onwards
+ modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
+ runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
#Modify the configuration
codeconfigfile = os.path.join(tempdir, '.config.new')
@@ -1929,3 +2411,518 @@ class DevtoolUpgradeTests(DevtoolBase):
#Step 4.5
runCmd("grep %s %s" % (modconfopt, codeconfigfile))
+
+
+class DevtoolIdeSdkTests(DevtoolBase):
+ def _write_bb_config(self, recipe_names):
+ """Helper to write the bitbake local.conf file"""
+ conf_lines = [
+ 'IMAGE_CLASSES += "image-combined-dbg"',
+ 'IMAGE_GEN_DEBUGFS = "1"',
+ 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
+ [r + '-ptest' for r in recipe_names])
+ ]
+ self.write_config("\n".join(conf_lines))
+
+ def _check_workspace(self):
+ """Check if a workspace directory is available and setup the cleanup"""
+ self.assertTrue(not os.path.exists(self.workspacedir),
+ 'This test cannot be run with a workspace directory under the build directory')
+ self.track_for_cleanup(self.workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+ def _workspace_scripts_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
+
+ def _sources_scripts_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
+
+ def _workspace_gdbinit_dir(self, recipe_name):
+ return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
+
+ def _sources_gdbinit_dir(self, src_dir):
+ return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
+
+ def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
+ """Setup a recipe for working with devtool ide-sdk
+
+ Basically devtool modify -x followed by some tests
+ """
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
+
+ result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir))
+ self.assertExists(os.path.join(tempdir, build_file),
+ 'Extracted source could not be found')
+ self.assertExists(os.path.join(self.workspacedir, 'conf',
+ 'layer.conf'), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(self.workspacedir,
+ 'appends', recipe_name + '.bbappend'))
+ self.assertTrue(matches, 'bbappend not created %s' % result.output)
+
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn(recipe_name, result.output)
+ self.assertIn(tempdir, result.output)
+ self._check_src_repo(tempdir)
+
+ # Usually devtool ide-sdk would initiate the build of the SDK.
+ # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
+ if testimage:
+ bitbake("%s qemu-native qemu-helper-native" % testimage)
+ deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
+ self.add_command_to_tearDown(
+ 'rm -f %s/%s*' % (deploy_dir_image, testimage))
+
+ return tempdir
+
+ def _get_recipe_ids(self, recipe_name):
+ """IDs needed to write recipe specific config entries into IDE config files"""
+ package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
+ recipe_id = recipe_name + "-" + package_arch
+ recipe_id_pretty = recipe_name + ": " + package_arch
+ return (recipe_id, recipe_id_pretty)
+
+ def _verify_install_script_code(self, tempdir, recipe_name):
+ """Verify the scripts referred by the tasks.json file are fine.
+
+ This function does not depend on Qemu. Therefore it verifies the scripts
+ exists and the delete step works as expected. But it does not try to
+ deploy to Qemu.
+ """
+ recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
+ with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
+ tasks_d = json.load(tasks_j)
+ tasks = tasks_d["tasks"]
+ task_install = next(
+ (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
+ self.assertIsNot(task_install, None)
+ # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ i_and_d_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(i_and_d_script_path)
+ del_script = "delete_package_dirs_" + recipe_id
+ del_script_path = os.path.join(
+ self._workspace_scripts_dir(recipe_name), del_script)
+ self.assertExists(del_script_path)
+ runCmd(del_script_path, cwd=tempdir)
+
+ def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
+ """Verify deployment and execution in Qemu system work for one recipe.
+
+ This function checks the entire SDK workflow: changing the code, recompiling
+ it and deploying it back to Qemu, and checking that the changes have been
+ incorporated into the provided binaries. It also runs the tests of the recipe.
+ """
+ recipe_id, _ = self._get_recipe_ids(recipe_name)
+ i_and_d_script = "install_and_deploy_" + recipe_id
+ install_deploy_cmd = os.path.join(
+ self._workspace_scripts_dir(recipe_name), i_and_d_script)
+ self.assertExists(install_deploy_cmd,
+ '%s script not found' % install_deploy_cmd)
+ runCmd(install_deploy_cmd)
+
+ MAGIC_STRING_ORIG = "Magic: 123456789"
+ MAGIC_STRING_NEW = "Magic: 987654321"
+ ptest_cmd = "ptest-runner " + recipe_name
+
+ # validate that SSH is working
+ status, _ = qemu.run("uname")
+ self.assertEqual(
+ status, 0, msg="Failed to connect to the SSH server on Qemu")
+
+ # Verify the unmodified example prints the magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertIn(MAGIC_STRING_ORIG, output)
+
+ # Verify the unmodified ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
+
+ # Replace the Magic String in the code, compile and deploy to Qemu
+ cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
+ with open(cpp_example_lib_hpp, 'r') as file:
+ cpp_code = file.read()
+ cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
+ with open(cpp_example_lib_hpp, 'w') as file:
+ file.write(cpp_code)
+ runCmd(install_deploy_cmd, cwd=tempdir)
+
+ # Verify the modified example prints the modified magic string
+ status, output = qemu.run(example_exe)
+ self.assertEqual(status, 0, msg="%s failed: %s" %
+ (example_exe, output))
+ self.assertNotIn(MAGIC_STRING_ORIG, output)
+ self.assertIn(MAGIC_STRING_NEW, output)
+
+ # Verify the modified example ptests work
+ status, output = qemu.run(ptest_cmd)
+ self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
+ self.assertIn("PASS: cpp-example-lib", output)
+
+ # Verify remote debugging works wit the modified magic string
+ self._gdb_cross_debugging(
+ qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
+
+ def _gdb_cross(self):
+ """Verify gdb-cross is provided by devtool ide-sdk"""
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary,
+ native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+
+ def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
+ """Verify gdb-cross is working
+
+ Test remote debugging:
+ break main
+ run
+ continue
+ break CppExample::print_json()
+ continue
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
+ $1 = 0
+ print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
+ $2 = -3
+ list cpp-example-lib.hpp:13,13
+ 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
+ continue
+ """
+ sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ gdbserver_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
+ gdb_script = os.path.join(self._workspace_scripts_dir(
+ recipe_name), 'gdb_1234_usr-bin-' + example_exe)
+
+ # Start a gdbserver
+ r = runCmd(gdbserver_script)
+ self.assertEqual(r.status, 0)
+
+ # Check there is a gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver ", r.output)
+
+ # Check the pid file is correct
+ test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
+ example_exe + "/pid)/cmdline"
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
+ self.assertEqual(r.status, 0)
+ self.assertIn("gdbserver", r.output)
+
+ # Test remote debugging works
+ gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
+ gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
+ gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
+ gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
+ gdb_batch_cmd += " -ex 'continue'"
+ r = runCmd(gdb_script + gdb_batch_cmd)
+ self.logger.debug("%s %s returned: %s", gdb_script,
+ gdb_batch_cmd, r.output)
+ self.assertEqual(r.status, 0)
+ self.assertIn("Breakpoint 1, main", r.output)
+ self.assertIn("$1 = 0", r.output) # test.string.compare equal
+ self.assertIn("$2 = -3", r.output) # test.string.compare longer
+ self.assertIn(
+ 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
+ self.assertIn("exited normally", r.output)
+
+ # Stop the gdbserver
+ r = runCmd(gdbserver_script + ' stop')
+ self.assertEqual(r.status, 0)
+
+ # Check there is no gdbserver running
+ r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
+ self.assertEqual(r.status, 0)
+ self.assertNotIn("gdbserver ", r.output)
+
+ def _verify_cmake_preset(self, tempdir):
+ """Verify the generated cmake preset works as expected
+
+ Check if compiling works
+ Check if unit tests can be executed in qemu (not qemu-system)
+ """
+ with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
+ cmake_preset_d = json.load(cmake_preset_j)
+ config_presets = cmake_preset_d["configurePresets"]
+ self.assertEqual(len(config_presets), 1)
+ cmake_exe = config_presets[0]["cmakeExecutable"]
+ preset_name = config_presets[0]["name"]
+
+ # Verify the wrapper for cmake native is available
+ self.assertExists(cmake_exe)
+
+ # Verify the cmake preset generated by devtool ide-sdk is available
+ result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
+ self.assertIn(preset_name, result.output)
+
+ # Verify cmake re-uses the o files compiled by bitbake
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu user mode)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("100% tests passed", result.output)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s --build --preset %s --target clean' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Cleaning", result.output)
+ result = runCmd('%s --build --preset %s' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Building", result.output)
+ self.assertIn("Linking", result.output)
+ result = runCmd('%s --build --preset %s --target test' %
+ (cmake_exe, preset_name), cwd=tempdir)
+ self.assertIn("Running tests...", result.output)
+ self.assertIn("100% tests passed", result.output)
+
+ @OETestTag("runqemu")
+ def test_devtool_ide_sdk_none_qemu(self):
+ """Start qemu-system and run tests for multiple recipes. ide=none is used."""
+ recipe_names = ["cmake-example", "meson-example"]
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config(recipe_names)
+ self._check_runqemu_prerequisites()
+
+ # Verify deployment to Qemu (system mode) works
+ bitbake(testimage)
+ with runqemu(testimage, runqemuparams="nographic") as qemu:
+ # cmake-example recipe
+ recipe_name = "cmake-example"
+ example_exe = "cmake-example"
+ build_file = "CMakeLists.txt"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._verify_cmake_preset(tempdir)
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ # meson-example recipe
+ recipe_name = "meson-example"
+ example_exe = "mesonex"
+ build_file = "meson.build"
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
+ recipe_name, testimage, qemu.ip)
+ runCmd(bitbake_sdk_cmd)
+ self._gdb_cross()
+ self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
+ # Verify the oe-scripts sym-link is valid
+ self.assertEqual(self._workspace_scripts_dir(
+ recipe_name), self._sources_scripts_dir(tempdir))
+
+ def test_devtool_ide_sdk_code_cmake(self):
+ """Verify a cmake recipe works with ide=code mode"""
+ recipe_name = "cmake-example"
+ build_file = "CMakeLists.txt"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+ self._verify_cmake_preset(tempdir)
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_code_meson(self):
+ """Verify a meson recipe works with ide=code mode"""
+ recipe_name = "meson-example"
+ build_file = "meson.build"
+ testimage = "oe-selftest-image"
+
+ self._check_workspace()
+ self._write_bb_config([recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ recipe_name, build_file, testimage)
+ bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
+ recipe_name, testimage)
+ runCmd(bitbake_sdk_cmd)
+
+ with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
+ settings_d = json.load(settings_j)
+ meson_exe = settings_d["mesonbuild.mesonPath"]
+ meson_build_folder = settings_d["mesonbuild.buildFolder"]
+
+ # Verify the wrapper for meson native is available
+ self.assertExists(meson_exe)
+
+ # Verify meson re-uses the o files compiled by bitbake
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("ninja: no work to do.", result.output)
+
+ # Verify the unit tests work (in Qemu)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ # Verify re-building and testing works again
+ result = runCmd('%s compile -C %s --clean' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Cleaning...", result.output)
+ result = runCmd('%s compile -C %s' %
+ (meson_exe, meson_build_folder), cwd=tempdir)
+ self.assertIn("Linking target", result.output)
+ runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
+
+ self._verify_install_script_code(tempdir, recipe_name)
+ self._gdb_cross()
+
+ def test_devtool_ide_sdk_shared_sysroots(self):
+ """Verify the shared sysroot SDK"""
+
+ # Handle the workspace (which is not needed by this test case)
+ self._check_workspace()
+
+ result_init = runCmd(
+ 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
+ bb_vars = get_bb_vars(
+ ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
+ environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
+ deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ environment_script_path = os.path.join(deploydir, environment_script)
+ cpp_example_src = os.path.join(
+ bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
+
+ # Verify the cross environment script is available
+ self.assertExists(environment_script_path)
+
+ def runCmdEnv(cmd, cwd):
+ cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
+ environment_script_path, cmd)
+ return runCmd(cmd, cwd)
+
+ # Verify building the C++ example works with CMake
+ tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_cmake)
+
+ result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
+ cmake_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(cmake_native)
+
+ runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
+ runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
+
+ # Verify the printed note really referres to a cmake executable
+ cmake_native_code = ""
+ for line in result_init.output.splitlines():
+ m = re.search(r'"cmake.cmakePath": "(.*)"', line)
+ if m:
+ cmake_native_code = m.group(1)
+ break
+ self.assertExists(cmake_native_code)
+ self.assertEqual(cmake_native, cmake_native_code)
+
+ # Verify building the C++ example works with Meson
+ tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir_meson)
+
+ result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
+ meson_native = os.path.normpath(result_cmake.output.strip())
+ self.assertExists(meson_native)
+
+ runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
+ runCmdEnv('meson compile', cwd=tempdir_meson)
+
+ def test_devtool_ide_sdk_plugins(self):
+ """Test that devtool ide-sdk can use plugins from other layers."""
+
+ # We need a workspace layer and a modified recipe (but no image)
+ modified_recipe_name = "meson-example"
+ modified_build_file = "meson.build"
+ testimage = "oe-selftest-image"
+ shared_recipe_name = "cmake-example"
+
+ self._check_workspace()
+ self._write_bb_config([modified_recipe_name])
+ tempdir = self._devtool_ide_sdk_recipe(
+ modified_recipe_name, modified_build_file, None)
+
+ IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
+
+ def get_ides_from_help(help_str):
+ m = IDE_RE.search(help_str)
+ return m.group(1).split(',')
+
+ # verify the default plugins are available but the foo plugin is not
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertNotIn('foo', found_ides)
+
+ shared_config_file = os.path.join(tempdir, 'shared-config.txt')
+ shared_config_str = 'Dummy shared IDE config'
+ modified_config_file = os.path.join(tempdir, 'modified-config.txt')
+ modified_config_str = 'Dummy modified IDE config'
+
+ # Generate a foo plugin in the workspace layer
+ plugin_dir = os.path.join(
+ self.workspacedir, 'lib', 'devtool', 'ide_plugins')
+ os.makedirs(plugin_dir)
+ plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
+ plugin_code += 'class IdeFoo(IdeBase):\n'
+ plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
+ plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
+ plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
+ plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
+ plugin_code += 'def register_ide_plugin(ide_plugins):\n'
+ plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
+
+ plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
+ with open(plugin_py, 'w') as plugin_file:
+ plugin_file.write(plugin_code)
+
+ # Verify the foo plugin is available as well
+ result = runCmd('devtool ide-sdk -h')
+ found_ides = get_ides_from_help(result.output)
+ self.assertIn('code', found_ides)
+ self.assertIn('none', found_ides)
+ self.assertIn('foo', found_ides)
+
+ # Verify the foo plugin generates a shared config
+ result = runCmd(
+ 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
+ with open(shared_config_file) as shared_config:
+ shared_config_new = shared_config.read()
+ self.assertEqual(shared_config_str, shared_config_new)
+
+ # Verify the foo plugin generates a modified config
+ result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
+ (modified_recipe_name, testimage))
+ with open(modified_config_file) as modified_config:
+ modified_config_new = modified_config.read()
+ self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index b80d091c1c..ad952c004b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -37,42 +39,6 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
""" + "\n".join(regressed_successes)
self.assertTrue(len(regressed_failures) == 0 and len(regressed_successes) == 0, msg)
- def test_missing_homepg(self):
- """
- Summary: Test for oe-core recipes that don't have a HOMEPAGE or DESCRIPTION
- Expected: All oe-core recipes should have a DESCRIPTION entry
- Expected: All oe-core recipes should have a HOMEPAGE entry except for recipes that are not fetched from external sources.
- Product: oe-core
- """
- with bb.tinfoil.Tinfoil() as tinfoil:
- tinfoil.prepare(config_only=False)
- no_description = []
- no_homepage = []
- for fn in tinfoil.all_recipe_files(variants=False):
- if not '/meta/recipes-' in fn:
- # We are only interested in OE-Core
- continue
- rd = tinfoil.parse_recipe_file(fn, appends=False)
- pn = rd.getVar('BPN')
- srcfile = rd.getVar('SRC_URI').split()
- #Since DESCRIPTION defaults to SUMMARY if not set, we are only interested in recipes without DESCRIPTION or SUMMARY
- if not (rd.getVar('SUMMARY') or rd.getVar('DESCRIPTION')):
- no_description.append((pn, fn))
- if not rd.getVar('HOMEPAGE'):
- if srcfile and srcfile[0].startswith('file') or not rd.getVar('SRC_URI'):
- # We are only interested in recipes SRC_URI fetched from external sources
- continue
- no_homepage.append((pn, fn))
- if no_homepage:
- self.fail("""
-The following recipes do not have a HOMEPAGE. Please add an entry for HOMEPAGE in the recipe.
-""" + "\n".join(['%s (%s)' % i for i in no_homepage]))
-
- if no_description:
- self.fail("""
-The following recipes do not have a DESCRIPTION. Please add an entry for DESCRIPTION in the recipe.
-""" + "\n".join(['%s (%s)' % i for i in no_description]))
-
def test_maintainers(self):
"""
Summary: Test that oe-core recipes have a maintainer and entries in maintainers list have a recipe
@@ -82,15 +48,15 @@ The following recipes do not have a DESCRIPTION. Please add an entry for DESCRIP
Author: Alexander Kanavin <alex.kanavin@gmail.com>
"""
def is_exception(pkg):
- exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"]
+ exceptions = ["packagegroup-",]
for i in exceptions:
if i in pkg:
return True
return False
def is_maintainer_exception(entry):
- exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran",
- "cve-update-db-native", "rust"]
+ exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
+ "cve-update-nvd2-native",]
for i in exceptions:
if i in entry:
return True
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/eSDK.py
deleted file mode 100644
index 3ea0f66357..0000000000
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#
-# SPDX-License-Identifier: MIT
-#
-
-import tempfile
-import shutil
-import os
-import glob
-import time
-from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
-
-class oeSDKExtSelfTest(OESelftestTestCase):
- """
- # Bugzilla Test Plan: 6033
- # This code is planned to be part of the automation for eSDK containig
- # Install libraries and headers, image generation binary feeds, sdk-update.
- """
-
- @staticmethod
- def get_esdk_environment(env_eSDK, tmpdir_eSDKQA):
- # XXX: at this time use the first env need to investigate
- # what environment load oe-selftest, i586, x86_64
- pattern = os.path.join(tmpdir_eSDKQA, 'environment-setup-*')
- return glob.glob(pattern)[0]
-
- @staticmethod
- def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options):
- if postconfig:
- esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
- with open(esdk_conf_file, 'a+') as f:
- f.write(postconfig)
- if not options:
- options = {}
- if not 'shell' in options:
- options['shell'] = True
-
- runCmd("cd %s; unset BBPATH; unset BUILDDIR; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
-
- @staticmethod
- def generate_eSDK(image):
- pn_task = '%s -c populate_sdk_ext' % image
- bitbake(pn_task)
-
- @staticmethod
- def get_eSDK_toolchain(image):
- pn_task = '%s -c populate_sdk_ext' % image
-
- bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAINEXT_OUTPUTNAME'], pn_task)
- sdk_deploy = bb_vars['SDK_DEPLOY']
- toolchain_name = bb_vars['TOOLCHAINEXT_OUTPUTNAME']
- return os.path.join(sdk_deploy, toolchain_name + '.sh')
-
- @staticmethod
- def update_configuration(cls, image, tmpdir_eSDKQA, env_eSDK, ext_sdk_path):
- sstate_dir = os.path.join(os.environ['BUILDDIR'], 'sstate-cache')
-
- oeSDKExtSelfTest.generate_eSDK(cls.image)
-
- cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
- runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
-
- cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
-
- sstate_config="""
-ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
-SSTATE_MIRRORS = "file://.* file://%s/PATH"
-CORE_IMAGE_EXTRA_INSTALL = "perl"
- """ % sstate_dir
-
- with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
- f.write(sstate_config)
-
- @classmethod
- def setUpClass(cls):
- super(oeSDKExtSelfTest, cls).setUpClass()
- cls.image = 'core-image-minimal'
-
- bb_vars = get_bb_vars(['SSTATE_DIR', 'WORKDIR'], cls.image)
- bb.utils.mkdirhier(bb_vars["WORKDIR"])
- cls.tmpdirobj = tempfile.TemporaryDirectory(prefix="selftest-esdk-", dir=bb_vars["WORKDIR"])
- cls.tmpdir_eSDKQA = cls.tmpdirobj.name
-
- oeSDKExtSelfTest.generate_eSDK(cls.image)
-
- # Install eSDK
- cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
- runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
-
- cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
-
- # Configure eSDK to use sstate mirror from poky
- sstate_config="""
-ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
-SSTATE_MIRRORS = "file://.* file://%s/PATH"
- """ % bb_vars["SSTATE_DIR"]
- with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
- f.write(sstate_config)
-
- @classmethod
- def tearDownClass(cls):
- for i in range(0, 10):
- if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
- time.sleep(1)
- else:
- break
- cls.tmpdirobj.cleanup()
- super().tearDownClass()
-
- def test_install_libraries_headers(self):
- pn_sstate = 'bc'
- bitbake(pn_sstate)
- cmd = "devtool sdk-install %s " % pn_sstate
- oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
-
- def test_image_generation_binary_feeds(self):
- image = 'core-image-minimal'
- cmd = "devtool build-image %s" % image
- oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
-
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
index 26de3a07c9..fa74103dec 100644
--- a/meta/lib/oeqa/selftest/cases/efibootpartition.py
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -5,42 +5,29 @@
# SPDX-License-Identifier: MIT
#
-import re
-
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import bitbake, runqemu, get_bb_var
+from oeqa.utils.commands import bitbake, runqemu
+from oeqa.core.decorator.data import skipIfNotMachine
+import oe.types
class GenericEFITest(OESelftestTestCase):
"""EFI booting test class"""
+ @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
+ def test_boot_efi(self):
+ cmd = "runqemu nographic serial wic ovmf"
+ if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
+ cmd += " kvm"
+ image = "core-image-minimal"
- cmd_common = "runqemu nographic serial wic ovmf"
- efi_provider = "systemd-boot"
- image = "core-image-minimal"
- machine = "qemux86-64"
- recipes_built = False
-
- @classmethod
- def setUpLocal(self):
- super(GenericEFITest, self).setUpLocal(self)
-
- self.write_config(self,
-"""
-EFI_PROVIDER = "%s"
+ self.write_config("""
+EFI_PROVIDER = "systemd-boot"
IMAGE_FSTYPES:pn-%s:append = " wic"
-MACHINE = "%s"
MACHINE_FEATURES:append = " efi"
WKS_FILE = "efi-bootdisk.wks.in"
IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage"
"""
-% (self.efi_provider, self.image, self.machine))
- if not self.recipes_built:
- bitbake("ovmf")
- bitbake(self.image)
- self.recipes_built = True
+% (image))
- @classmethod
- def test_boot_efi(self):
- """Test generic boot partition with qemu"""
- cmd = "%s %s" % (self.cmd_common, self.machine)
- with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
+ bitbake(image + " ovmf")
+ with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/esdk.py b/meta/lib/oeqa/selftest/cases/esdk.py
new file mode 100644
index 0000000000..9f5de2cde7
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -0,0 +1,122 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import tempfile
+import shutil
+import os
+import glob
+import time
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
+
+class oeSDKExtSelfTest(OESelftestTestCase):
+ """
+ # Bugzilla Test Plan: 6033
+ # This code is planned to be part of the automation for eSDK containig
+ # Install libraries and headers, image generation binary feeds, sdk-update.
+ """
+
+ @staticmethod
+ def get_esdk_environment(env_eSDK, tmpdir_eSDKQA):
+ # XXX: at this time use the first env need to investigate
+ # what environment load oe-selftest, i586, x86_64
+ pattern = os.path.join(tmpdir_eSDKQA, 'environment-setup-*')
+ return glob.glob(pattern)[0]
+
+ @staticmethod
+ def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options):
+ if postconfig:
+ esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
+ with open(esdk_conf_file, 'a+') as f:
+ f.write(postconfig)
+ if not options:
+ options = {}
+ if not 'shell' in options:
+ options['shell'] = True
+
+ runCmd("cd %s; unset BBPATH; unset BUILDDIR; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
+
+ @staticmethod
+ def generate_eSDK(image):
+ pn_task = '%s -c populate_sdk_ext' % image
+ bitbake(pn_task)
+
+ @staticmethod
+ def get_eSDK_toolchain(image):
+ pn_task = '%s -c populate_sdk_ext' % image
+
+ bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAINEXT_OUTPUTNAME'], pn_task)
+ sdk_deploy = bb_vars['SDK_DEPLOY']
+ toolchain_name = bb_vars['TOOLCHAINEXT_OUTPUTNAME']
+ return os.path.join(sdk_deploy, toolchain_name + '.sh')
+
+ @staticmethod
+ def update_configuration(cls, image, tmpdir_eSDKQA, env_eSDK, ext_sdk_path):
+ sstate_dir = os.path.join(os.environ['BUILDDIR'], 'sstate-cache')
+
+ oeSDKExtSelfTest.generate_eSDK(cls.image)
+
+ cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
+ runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
+
+ cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
+
+ sstate_config="""
+ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
+SSTATE_MIRRORS = "file://.* file://%s/PATH"
+CORE_IMAGE_EXTRA_INSTALL = "perl"
+ """ % sstate_dir
+
+ with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
+ f.write(sstate_config)
+
+ @classmethod
+ def setUpClass(cls):
+ super(oeSDKExtSelfTest, cls).setUpClass()
+ cls.image = 'core-image-minimal'
+
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'WORKDIR'], cls.image)
+ bb.utils.mkdirhier(bb_vars["WORKDIR"])
+ cls.tmpdirobj = tempfile.TemporaryDirectory(prefix="selftest-esdk-", dir=bb_vars["WORKDIR"])
+ cls.tmpdir_eSDKQA = cls.tmpdirobj.name
+
+ oeSDKExtSelfTest.generate_eSDK(cls.image)
+
+ # Install eSDK
+ cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
+ runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
+
+ cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
+
+ # Configure eSDK to use sstate mirror from poky
+ sstate_config="""
+ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
+SSTATE_MIRRORS = "file://.* file://%s/PATH"
+ """ % bb_vars["SSTATE_DIR"]
+ with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
+ f.write(sstate_config)
+
+ @classmethod
+ def tearDownClass(cls):
+ for i in range(0, 10):
+ if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
+ time.sleep(1)
+ else:
+ break
+ cls.tmpdirobj.cleanup()
+ super().tearDownClass()
+
+ def test_install_libraries_headers(self):
+ pn_sstate = 'bc'
+ bitbake(pn_sstate)
+ cmd = "devtool sdk-install %s " % pn_sstate
+ oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
+
+ def test_image_generation_binary_feeds(self):
+ image = 'core-image-minimal'
+ cmd = "devtool build-image %s" % image
+ oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
+
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, runCmd
+
+class ExternalSrc(OESelftestTestCase):
+ # test that srctree_hash_files does not crash
+ # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
+ # so we check only that a recipe with externalsrc can be parsed
+ def test_externalsrc_srctree_hash_files(self):
+ test_recipe = "git-submodule-test"
+ git_url = "git://git.yoctoproject.org/git-submodule-test"
+ externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
+
+ self.write_config(
+ """
+INHERIT += "externalsrc"
+EXTERNALSRC:pn-%s = "%s"
+""" % (test_recipe, externalsrc_dir)
+ )
+
+ # test with git without submodules
+ runCmd('git clone %s %s' % (git_url, externalsrc_dir))
+ os.unlink(externalsrc_dir + "/.gitmodules")
+ open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+ os.unlink(".gitmodules")
+
+ # test with git with submodules
+ runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
+ runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+
+ # test without git
+ shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index be14272e63..44099176fc 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -34,6 +36,7 @@ PREMIRRORS:forcevariable = ""
# No mirrors and broken git, should fail
features = """
DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
GIT_PROXY_COMMAND = "false"
MIRRORS:forcevariable = ""
PREMIRRORS:forcevariable = ""
@@ -46,6 +49,7 @@ PREMIRRORS:forcevariable = ""
# Broken git but a specific mirror
features = """
DL_DIR = "%s"
+SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
GIT_PROXY_COMMAND = "false"
MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
""" % dldir
@@ -99,7 +103,7 @@ class Dependencies(OESelftestTestCase):
r = """
LICENSE="CLOSED"
- SRC_URI="git://example.com/repo;branch=master"
+ SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
"""
f = self.write_recipe(textwrap.dedent(r), tempdir)
d = tinfoil.parse_recipe_file(f)
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index e6bfd1257e..347c065377 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -1,9 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
import os
import re
@@ -31,6 +33,8 @@ KERNEL_CLASSES = " kernel-fitimage "
# RAM disk variables including load address and entrypoint for kernel and RAM disk
IMAGE_FSTYPES += "cpio.gz"
INITRAMFS_IMAGE = "core-image-minimal"
+# core-image-minimal is used as initramfs here, drop the rootfs suffix
+IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
UBOOT_RD_LOADADDRESS = "0x88000000"
UBOOT_RD_ENTRYPOINT = "0x88000000"
UBOOT_LOADADDRESS = "0x80080000"
@@ -40,15 +44,14 @@ FIT_DESC = "A model description"
self.write_config(config)
# fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image)
- image_type = "core-image-minimal"
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = get_bb_var('MACHINE')
- fitimage_its_path = os.path.join(deploy_dir_image,
- "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
- fitimage_path = os.path.join(deploy_dir_image,
- "fitImage-%s-%s-%s" % (image_type, machine, machine))
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
self.assertTrue(os.path.exists(fitimage_its_path),
"%s image tree source doesn't exist" % (fitimage_its_path))
@@ -121,15 +124,14 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
self.write_config(config)
# fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ image = "virtual/kernel"
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image)
- image_type = "core-image-minimal"
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = get_bb_var('MACHINE')
- fitimage_its_path = os.path.join(deploy_dir_image,
- "fitImage-its-%s" % (machine,))
- fitimage_path = os.path.join(deploy_dir_image,
- "fitImage-%s.bin" % (machine,))
+ fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME']))
+ fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
+ "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
self.assertTrue(os.path.exists(fitimage_its_path),
"%s image tree source doesn't exist" % (fitimage_its_path))
@@ -202,7 +204,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Configuration', ' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match('^ *', line) in (' ', ''):
in_signed = None
elif in_signed:
@@ -275,8 +277,8 @@ FIT_SIGN_INDIVIDUAL = "1"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
machine = get_bb_var('MACHINE')
@@ -348,7 +350,8 @@ UBOOT_LOADADDRESS = "0x80080000"
UBOOT_ENTRYPOINT = "0x80080000"
UBOOT_FIT_DESC = "A model description"
KERNEL_IMAGETYPES += " fitImage "
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
UBOOT_SIGN_ENABLE = "1"
FIT_GENERATE_KEYS = "1"
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -359,8 +362,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
machine = get_bb_var('MACHINE')
@@ -430,7 +433,8 @@ UBOOT_MACHINE = "am57xx_evm_defconfig"
SPL_BINARY = "MLO"
# The kernel-fitimage class is a dependency even if we're only
# creating/signing the U-Boot fitImage
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage"
+INHERIT += "test-mkimage-wrapper"
# Enable creation and signing of the U-Boot fitImage
UBOOT_FITIMAGE_ENABLE = "1"
SPL_SIGN_ENABLE = "1"
@@ -449,8 +453,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
image_type = "core-image-minimal"
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -521,7 +525,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match(' \w', line):
in_signed = None
elif in_signed:
@@ -538,7 +542,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
# Check for SPL_MKIMAGE_SIGN_ARGS
- result = runCmd('bitbake -e virtual/kernel | grep ^T=')
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
tempdir = result.output.split('=', 1)[1].strip().strip('')
result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
@@ -593,7 +597,8 @@ UBOOT_EXTLINUX = "0"
UBOOT_FIT_GENERATE_KEYS = "1"
UBOOT_FIT_HASH_ALG = "sha256"
KERNEL_IMAGETYPES += " fitImage "
-KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
+KERNEL_CLASSES = " kernel-fitimage "
+INHERIT += "test-mkimage-wrapper"
UBOOT_SIGN_ENABLE = "1"
FIT_GENERATE_KEYS = "1"
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -603,8 +608,8 @@ FIT_SIGN_INDIVIDUAL = "1"
"""
self.write_config(config)
- # The U-Boot fitImage is created as part of linux recipe
- bitbake("virtual/kernel")
+ # The U-Boot fitImage is created as part of the U-Boot recipe
+ bitbake("virtual/bootloader")
image_type = "core-image-minimal"
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -675,7 +680,7 @@ FIT_SIGN_INDIVIDUAL = "1"
signed_sections = {}
for line in result.output.splitlines():
if line.startswith((' Image')):
- in_signed = re.search('\((.*)\)', line).groups()[0]
+ in_signed = re.search(r'\((.*)\)', line).groups()[0]
elif re.match(' \w', line):
in_signed = None
elif in_signed:
@@ -692,7 +697,7 @@ FIT_SIGN_INDIVIDUAL = "1"
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
# Check for SPL_MKIMAGE_SIGN_ARGS
- result = runCmd('bitbake -e virtual/kernel | grep ^T=')
+ result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
tempdir = result.output.split('=', 1)[1].strip().strip('')
result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
@@ -738,6 +743,7 @@ UBOOT_LOADADDRESS = "0x80000000"
UBOOT_DTB_LOADADDRESS = "0x82000000"
UBOOT_ARCH = "arm"
UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
+UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
UBOOT_EXTLINUX = "0"
FIT_GENERATE_KEYS = "1"
KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
@@ -763,6 +769,7 @@ FIT_HASH_ALG = "sha256"
kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
+ kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
uboot_arch = str(get_bb_var('UBOOT_ARCH'))
fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
@@ -775,7 +782,7 @@ FIT_HASH_ALG = "sha256"
'kernel-1 {',
'description = "Linux kernel";',
'data = /incbin/("linux.bin");',
- 'type = "kernel";',
+ 'type = "' + kernel_type + '";',
'arch = "' + uboot_arch + '";',
'os = "linux";',
'compression = "' + kernel_compression + '";',
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index b9ea03ae62..89360178fe 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
from oeqa.selftest.case import OESelftestTestCase
@@ -39,8 +44,13 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
self.write_config("\n".join(features))
recipe = "gcc-runtime"
+
+ start_time = time.time()
+
bitbake("{} -c check".format(recipe))
+ end_time = time.time()
+
bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
- self.ptest_section(ptestsuite, logfile = logpath)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
with open(sumspath, "r") as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..9da97ae780
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import time
+import tempfile
+import shutil
+import concurrent.futures
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
+
+class GdbServerTest(OESelftestTestCase):
+ def test_gdb_server(self):
+ target_arch = self.td["TARGET_ARCH"]
+ target_sys = self.td["TARGET_SYS"]
+
+ features = """
+IMAGE_GEN_DEBUGFS = "1"
+IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
+CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
+ """
+ self.write_config(features)
+
+ gdb_recipe = "gdb-cross-" + target_arch
+ gdb_binary = target_sys + "-gdb"
+
+ bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
+ r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(r.status, 0)
+ self.assertIn("GNU gdb", r.output)
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
+ shutil.unpack_archive(filename, debugfs)
+
+ with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ status, output = qemu.run_serial("kmod --help")
+ self.assertIn("modprobe", output)
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
+ def run_gdb():
+ for _ in range(5):
+ time.sleep(2)
+ cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
+ self.logger.warning("starting gdb %s" % cmd)
+ r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
+ self.assertEqual(0, r.status)
+ line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
+ self.assertRegex(r.output, line_re)
+ break
+ else:
+ self.fail("Timed out connecting to gdb")
+ future = executor.submit(run_gdb)
+
+ status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
+ self.assertEqual(status, 1)
+ # The future either returns None, or raises an exception
+ future.result()
diff --git a/meta/lib/oeqa/selftest/cases/git.py b/meta/lib/oeqa/selftest/cases/git.py
deleted file mode 100644
index f12874dc7d..0000000000
--- a/meta/lib/oeqa/selftest/cases/git.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import bitbake
-
-class GitCheck(OESelftestTestCase):
- def test_git_intercept(self):
- """
- Git binaries with CVE-2022-24765 fixed will refuse to operate on a
- repository which is owned by a different user. This breaks our
- do_install task as that runs inside pseudo, so the git repository is
- owned by the build user but git is running as (fake)root.
-
- We have an intercept which disables pseudo, so verify that it works.
- """
- bitbake("git-submodule-test -c test_git_as_user")
- bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+import oeqa.utils.gitarchive as ga
+from oeqa.utils.git import GitError
+import tempfile
+import shutil
+import scriptutils
+import logging
+from oeqa.selftest.case import OESelftestTestCase
+
+logger = scriptutils.logger_create('resulttool')
+
+def create_fake_repository(commit, tag_list=[], add_remote=True):
+ """ Create a testing git directory
+
+ Initialize a simple git repository with one initial commit, and as many
+ tags on this commit as listed in tag_list
+ Returns both git directory path and gitarchive git object
+ If commit is true, fake data will be commited, otherwise it will stay in staging area
+ If commit is true and tag_lsit is non empty, all tags in tag_list will be
+ created on the initial commit
+ Fake remote will also be added to make git ls-remote work
+ """
+ fake_data_file = "fake_data.txt"
+ tempdir = tempfile.mkdtemp(prefix='fake_results.')
+ repo = ga.init_git_repo(tempdir, False, False, logger)
+ if add_remote:
+ repo.run_cmd(["remote", "add", "origin", "."])
+ with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
+ fake_data.write("Fake data")
+ if commit:
+ repo.run_cmd(["add", fake_data_file])
+ repo.run_cmd(["commit", "-m", "\"Add fake data\""])
+ for tag in tag_list:
+ repo.run_cmd(["tag", tag])
+
+ return tempdir, repo
+
+def delete_fake_repository(path):
+ shutil.rmtree(path)
+
+def tag_exists(git_obj, target_tag):
+ for tag in git_obj.run_cmd(["tag"]).splitlines():
+ if target_tag == tag:
+ return True
+ return False
+
+class GitArchiveTests(OESelftestTestCase):
+ TEST_BRANCH="main"
+ TEST_COMMIT="0f7d5df"
+ TEST_COMMIT_COUNT="42"
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.log = logging.getLogger('gitarchivetests')
+ cls.log.setLevel(logging.DEBUG)
+
+ def test_create_first_test_tag(self):
+ path, git_obj = create_fake_repository(False)
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+ target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_create_second_test_tag(self):
+ first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
+ second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
+ keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
+
+ path, git_obj = create_fake_repository(True, [first_tag])
+ ga.gitarchive(path, path, True, False,
+ "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
+ False, "{branch}/{commit_count}-g{commit}/{tag_number}",
+ 'Test run #{tag_number} of {branch}:{commit}', '',
+ [], [], False, keywords, logger)
+ self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
+ delete_fake_repository(path)
+
+ def test_get_revs_on_branch(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
+
+ path, git_obj = create_fake_repository(True, fake_tags_list)
+ revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
+ self.assertEqual(len(revs), 1)
+ self.assertEqual(revs[0].commit, "0f7d5df")
+ self.assertEqual(len(revs[0].tags), 2)
+ self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote(self):
+ url = 'git://git.yoctoproject.org/poky'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ """Test for some well established tags (released tags)"""
+ self.assertIn("yocto-4.0", tags)
+ self.assertIn("yocto-4.1", tags)
+ self.assertIn("yocto-4.2", tags)
+ delete_fake_repository(path)
+
+ def test_get_tags_with_only_local_tag(self):
+ fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
+ path, git_obj = create_fake_repository(True, fake_tags_list, False)
+
+ """No remote is configured and no url is passed: get_tags must fall
+ back to local tags
+ """
+ tags = ga.get_tags(git_obj, self.log)
+ self.assertCountEqual(tags, fake_tags_list)
+ delete_fake_repository(path)
+
+ def test_get_tags_without_valid_remote_and_wrong_url(self):
+ url = 'git://git.foo.org/bar'
+ path, git_obj = create_fake_repository(False, None, False)
+
+ """Test for some well established tags (released tags)"""
+ with self.assertRaises(GitError):
+ tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
+ delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index 6fc98e9cb4..bd56b2f6e7 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -1,5 +1,10 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
+import time
import contextlib
from oeqa.core.decorator import OETestTag
from oeqa.core.case import OEPTestResultTestCase
@@ -24,15 +29,19 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
# force single threaded test execution
- features.append('EGLIBCPARALLELISM_task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
+ features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
self.write_config("\n".join(features))
+ start_time = time.time()
+
bitbake("glibc-testsuite -c check")
+ end_time = time.time()
+
builddir = get_bb_var("B", "glibc-testsuite")
ptestsuite = "glibc-user" if ssh is None else "glibc"
- self.ptest_section(ptestsuite)
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time))
with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
for test, result in parse_values(f):
self.ptest_result(ptestsuite, test, result)
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
with contextlib.ExitStack() as s:
# use the base work dir, as the nfs mount, since the recipe directory may not exist
tmpdir = get_bb_var("BASE_WORKDIR")
- nfsport, mountport = s.enter_context(unfs_server(tmpdir))
+ nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
# build core-image-minimal with required packages
default_installed_packages = [
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
bitbake("core-image-minimal")
# start runqemu
- qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic"))
+ qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
# validate that SSH is working
status, _ = qemu.run("uname")
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
# setup nfs mount
if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
raise Exception("Failed to setup NFS mount directory on target")
- mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
+ mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
status, output = qemu.run(mountcmd)
if status != 0:
raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index c809d7c9b1..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -43,12 +45,6 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
@classmethod
def tearDownClass(cls):
- # Go creates file which are readonly
- for dirpath, dirnames, filenames in os.walk(cls.tmpdir_SDKQA):
- for filename in filenames + dirnames:
- f = os.path.join(dirpath, filename)
- if not os.path.islink(f):
- os.chmod(f, 0o775)
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
@@ -56,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
cmd = cmd + ". %s; " % self.env_SDK
cmd = cmd + "export GOPATH=%s; " % self.go_path
+ cmd = cmd + "export GOFLAGS=-modcacherw; "
+ cmd = cmd + "export CGO_ENABLED=1; "
+ cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
return runCmd(cmd).status
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index 5b182a8f94..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 6d010b3e3a..dc88c222bd 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,10 +1,12 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.core.decorator import OETestTag
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
from oeqa.utils.sshcontrol import SSHControl
import glob
import os
@@ -100,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
bmap_path = "%s.bmap" % image_path
gzip_path = "%s.gz" % bmap_path
@@ -118,8 +119,8 @@ class ImageFeatures(OESelftestTestCase):
image_stat = os.stat(image_path)
self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
- # check if the resulting gzip is valid
- self.assertTrue(runCmd('gzip -t %s' % gzip_path))
+ # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
+ self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
def test_hypervisor_fmts(self):
"""
@@ -134,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
img_types = [ 'vmdk', 'vdi', 'qcow2' ]
features = ""
for itype in img_types:
- features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype
+ features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.wic.%s" %
- (link_name, itype))
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], itype))
# check if result image file is in deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -164,24 +164,22 @@ class ImageFeatures(OESelftestTestCase):
"""
Summary: Check for chaining many CONVERSION_CMDs together
Expected: 1. core-image-minimal can be built with
- ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a
+ ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
sha256sum
2. The above image has a valid sha256sum
Product: oe-core
Author: Tom Rini <trini@konsulko.com>
"""
- conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot"
+ conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
self.write_config(features)
- image_name = 'core-image-minimal'
- bitbake(image_name)
-
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
- image_path = os.path.join(deploy_dir_image, "%s.%s" %
- (link_name, conv))
+ image = 'core-image-minimal'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
+ (bb_vars['IMAGE_LINK_NAME'], conv))
# check if resulting image is in the deploy directory
self.assertTrue(os.path.exists(image_path))
@@ -189,7 +187,7 @@ class ImageFeatures(OESelftestTestCase):
# check if the resulting sha256sum agrees
self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
- (deploy_dir_image, link_name, conv)))
+ (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
def test_image_fstypes(self):
"""
@@ -198,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
Product: oe-core
Author: Ed Bartosh <ed.bartosh@linux.intel.com>
"""
- image_name = 'core-image-minimal'
+ image = 'core-image-minimal'
- all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split())
- skip_image_types = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst'))
+ all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
+ skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
img_types = all_image_types - skip_image_types
- config = 'IMAGE_FSTYPES += "%s"\n'\
- 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\
- 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types)
+ config = """
+IMAGE_FSTYPES += "%s"
+WKS_FILE = "wictestdisk.wks"
+MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_2_128"
+MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
+UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
+MULTIUBI_BUILD += "mtd_4_256"
+MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
+UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
+""" % ' '.join(img_types)
self.write_config(config)
- bitbake(image_name)
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
for itype in img_types:
- image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype))
- # check if result image is in deploy directory
- self.assertTrue(os.path.exists(image_path),
- "%s image %s doesn't exist" % (itype, image_path))
+ if itype == 'multiubi':
+ # For multiubi build we need to manage MULTIUBI_BUILD entry to append
+ # specific name to IMAGE_LINK_NAME
+ for vname in bb_vars['MULTIUBI_BUILD'].split():
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
+ else:
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
+ # check if result image is in deploy directory
+ self.assertTrue(os.path.exists(image_path),
+ "%s image %s doesn't exist" % (itype, image_path))
def test_useradd_static(self):
config = """
@@ -235,7 +250,7 @@ USERADD_GID_TABLES += "files/static-group"
DISTRO_FEATURES:append = " pam opengl wayland"
# Switch to systemd
-DISTRO_FEATURES:append = " systemd"
+DISTRO_FEATURES:append = " systemd usrmerge"
VIRTUAL-RUNTIME_init_manager = "systemd"
VIRTUAL-RUNTIME_initscripts = ""
VIRTUAL-RUNTIME_syslog = ""
@@ -266,20 +281,20 @@ SKIP_RECIPE[busybox] = "Don't build this"
Yeoh Ee Peng <ee.peng.yeoh@intel.com>
"""
- image_name = 'core-image-minimal'
+ image = 'core-image-minimal'
+ image_fstypes_debugfs = 'tar.bz2'
features = 'IMAGE_GEN_DEBUGFS = "1"\n'
- features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n'
- features += 'MACHINE = "genericx86-64"\n'
+ features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
self.write_config(features)
- bitbake(image_name)
- deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
- dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2")
- debug_files = glob.glob(dbg_tar_file)
- self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file)
- result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file))
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+
+ dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
+ self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
+ result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
- result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm"))
+ result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
dbg_symbols_targets = result.output.splitlines()
self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
@@ -289,9 +304,33 @@ SKIP_RECIPE[busybox] = "Don't build this"
def test_empty_image(self):
"""Test creation of image with no packages"""
- bitbake('test-empty-image')
- res_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- images = os.path.join(res_dir, "test-empty-image-*.manifest")
- result = glob.glob(images)
- with open(result[1],"r") as f:
+ image = 'test-empty-image'
+ bitbake(image)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(manifest))
+
+ with open(manifest, "r") as f:
self.assertEqual(len(f.read().strip()),0)
+
+ def test_mandb(self):
+ """
+ Test that an image containing manpages has working man and apropos commands.
+ """
+ config = """
+DISTRO_FEATURES:append = " api-documentation"
+CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
+"""
+ self.write_config(config)
+ bitbake("core-image-minimal")
+
+ with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
+ # This manpage is provided by man-pages
+ status, output = qemu.run_serial("apropos 8859")
+ self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
+ self.assertIn("iso_8859_15", output)
+
+ # This manpage is provided by kmod
+ status, output = qemu.run_serial("man --pager=cat modprobe")
+ self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
+ self.assertIn("force-modversion", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index 6279d74245..f4af67a239 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -1,3 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake
@@ -108,6 +113,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
raise AssertionError(result.output)
def test_bash_and_license(self):
+ self.disable_class("create-spdx")
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"')
error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
@@ -116,6 +122,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
raise AssertionError(result.output)
def test_bash_or_license(self):
+ self.disable_class("create-spdx")
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"')
bitbake('core-image-minimal')
@@ -134,23 +141,11 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
def test_core_image_full_cmdline_weston(self):
self.write_config("""
-INHERIT += "testimage"
+IMAGE_CLASSES += "testimage"
INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
-# Settings for full-cmdline
-RDEPENDS:packagegroup-core-full-cmdline-utils:remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"
-RDEPENDS:packagegroup-core-full-cmdline-dev-utils:remove = "diffutils m4 make patch"
-RDEPENDS:packagegroup-core-full-cmdline-multiuser:remove = "gzip"
-# Settings for weston
-# direct gpl3 dependencies
-RRECOMMENDS:packagegroup-base-vfat:remove = "dosfstools"
-PACKAGECONFIG:remove:pn-bluez5 = "readline"
-# dnf pulls in gpg which is gpl3; it also pulls in python3-rpm which pulls in rpm-build which pulls in bash
-# so install rpm but not dnf
-IMAGE_FEATURES:remove:pn-core-image-weston = "package-management"
-CORE_IMAGE_EXTRA_INSTALL:pn-core-image-weston += "rpm"
-# matchbox-terminal depends on vte, which is gpl3
-CORE_IMAGE_BASE_INSTALL:remove:pn-core-image-weston = "matchbox-terminal"
+
+require conf/distro/include/no-gplv3.inc
""")
bitbake('core-image-full-cmdline core-image-weston')
bitbake('-c testimage core-image-full-cmdline core-image-weston')
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py
new file mode 100644
index 0000000000..12583c3099
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -0,0 +1,21 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class GitCheck(OESelftestTestCase):
+ def test_git_intercept(self):
+ """
+ Git binaries with CVE-2022-24765 fixed will refuse to operate on a
+ repository which is owned by a different user. This breaks our
+ do_install task as that runs inside pseudo, so the git repository is
+ owned by the build user but git is running as (fake)root.
+
+ We have an intercept which disables pseudo, so verify that it works.
+ """
+ bitbake("git-submodule-test -c test_git_as_user")
+ bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
index b1623a1885..b1f78a0cd1 100644
--- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -1,3 +1,9 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
import os
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, get_bb_var
@@ -58,6 +64,7 @@ class KernelDev(OESelftestTestCase):
recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
with open(recipe_append, 'w+') as fh:
fh.write('SRC_URI += "file://%s"\n' % patch_name)
+ fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
runCmd('bitbake virtual/kernel -c clean')
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 8fb1e6c530..379ed589ad 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index afe8f8809f..d5ffffdcb4 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -97,6 +99,6 @@ class LibOE(OESelftestTestCase):
dstcnt = len(os.listdir(dst))
srccnt = len(os.listdir(src))
- self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
+ self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
oe.path.remove(testloc)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index 8f1226e6a5..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -26,6 +28,7 @@ LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
result = bitbake(bitbake_cmd)
+ self.delete_recipeinc('emptytest')
# Verify that changing a license file that has an absolute path causes
@@ -51,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
f.write("data")
result = bitbake(bitbake_cmd, ignore_status=True)
+ self.delete_recipeinc('emptytest')
if error_msg not in result.output:
raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..4ca8ffb7aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+from oeqa.utils.commands import bitbake, runqemu
+
+class LocalesTest(OESelftestTestCase):
+
+ @OETestTag("runqemu")
+
+ def run_locales_test(self, binary_enabled):
+ features = []
+ features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
+ features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
+ features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"')
+ features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
+ if binary_enabled:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
+ else:
+ features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
+ self.write_config("\n".join(features))
+
+ # Build a core-image-minimal
+ bitbake('core-image-minimal')
+
+ with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
+ cmd = "locale -a"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR or fr_FR.UTF-8
+ self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
+ self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
+
+ cmd = "localedef --list-archive -v"
+ status, output = qemu.run_serial(cmd)
+ # output must includes fr_FR.utf8
+ self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
+ self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
+
+ def test_locales_on(self):
+ """
+ Summary: Test the locales are generated
+ Expected: 1. Check the locale exist in the locale-archive
+ 2. Check the locale exist for the glibc
+ 3. Check the locale can be generated
+ Product: oe-core
+ Author: Louis Rannou <lrannou@baylibre.com>
+ AutomatedBy: Louis Rannou <lrannou@baylibre.com>
+ """
+ self.run_locales_test(True)
+
+ def test_locales_off(self):
+ self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 0a04c13a85..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index 6f10d30dc9..ffe0d2604d 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase):
def setUpClass(cls):
super(MetaIDE, cls).setUpClass()
bitbake('meta-ide-support')
- bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE'])
+ bitbake('build-sysroots -c build_native_sysroot')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS']
- cls.tmpdir = bb_vars['TMPDIR']
- cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script)
+ cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
+ cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
cls.corebasedir = bb_vars['COREBASE']
cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
-
+
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase):
def test_meta_ide_can_build_cpio_project(self):
dl_dir = self.td.get('DL_DIR', None)
self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
- "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz",
+ "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
self.project.download_archive()
- self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0,
+ self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0,
msg="Running configure failed")
self.assertEqual(self.project.run_make(), 0,
msg="Running make failed")
self.assertEqual(self.project.run_install(), 0,
msg="Running make install failed")
+
+ def test_meta_ide_can_run_sdk_tests(self):
+ bitbake('-c populate_sysroot gtk+3')
+ bitbake('build-sysroots -c build_target_sysroot')
+ bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..2919f07939
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+import os
+import subprocess
+import tempfile
+import shutil
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
+
+
+class Minidebuginfo(OESelftestTestCase):
+ def test_minidebuginfo(self):
+ target_sys = get_bb_var("TARGET_SYS")
+ binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
+
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
+
+ self.write_config("""
+DISTRO_FEATURES:append = " minidebuginfo"
+IMAGE_FSTYPES = "tar.bz2"
+""")
+ bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
+
+ native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
+
+ # confirm that executables and shared libraries contain an ELF section
+ # ".gnu_debugdata" which stores minidebuginfo.
+ with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
+ filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
+ shutil.unpack_archive(filename, unpackedfs)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
+ r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
+ native_sysroot = native_sysroot, target_sys = target_sys)
+ self.assertIn(".gnu_debugdata", r.output)
+
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
index baae9b456f..f509cbf607 100644
--- a/meta/lib/oeqa/selftest/cases/multiconfig.py
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -70,3 +72,16 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
result = bitbake('mc:test:multiconfig-test-parse -c showvar')
self.assertIn('MCTESTVAR=test2', result.output.splitlines())
+
+ def test_multiconfig_inlayer(self):
+ """
+ Test that a multiconfig from meta-selftest works.
+ """
+
+ config = """
+BBMULTICONFIG = "muslmc"
+"""
+ self.write_config(config)
+
+ # Build a core-image-minimal, only dry run needed to check config is present
+ bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
index 999e3e78b0..fe57aa51f2 100644
--- a/meta/lib/oeqa/selftest/cases/newlib.py
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 802a91a488..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,8 +1,11 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import os
+import sys
from oeqa.selftest.case import OESelftestTestCase
import tempfile
import operator
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var
class TestBlobParsing(OESelftestTestCase):
def setUp(self):
- import time
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
dir=get_bb_var('TOPDIR'))
try:
from git import Repo
self.repo = Repo.init(self.repo_path)
- except ImportError:
- self.skipTest('Python module GitPython is not present')
+ except ImportError as e:
+ self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
self.test_file = "test"
self.var_map = {}
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
import shutil
shutil.rmtree(self.repo_path)
+ @property
+ def heads_default(self):
+ """
+ Support repos defaulting to master or to main branch
+ """
+ try:
+ return self.repo.heads.main
+ except AttributeError:
+ return self.repo.heads.master
+
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
if len(to_add) == 0 and len(to_remove) == 0:
return
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
self.commit_vars(to_add = { "foo" : "1" })
- blob1 = self.repo.heads.master.commit.tree.blobs[0]
+ blob1 = self.heads_default.commit.tree.blobs[0]
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
- blob2 = self.repo.heads.master.commit.tree.blobs[0]
+ blob2 = self.heads_default.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 5a5f9b4fdf..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index 3b359396b6..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index a1cfa08c09..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 7eb49e6f95..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index bbf67bf9c9..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index cd687816c8..f69efccfee 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -21,7 +23,7 @@ class BuildhistoryDiffTests(BuildhistoryBase):
pkgv = result.output.rstrip()
result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
expected_endlines = [
- "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
+ "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
"xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
]
for line in result.output.splitlines():
@@ -34,18 +36,16 @@ class BuildhistoryDiffTests(BuildhistoryBase):
if expected_endlines:
self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
-class OEScriptTests(OESelftestTestCase):
- scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
-
@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
-class OEPybootchartguyTests(OEScriptTests):
+class OEPybootchartguyTests(OESelftestTestCase):
@classmethod
def setUpClass(cls):
- super(OEScriptTests, cls).setUpClass()
+ super().setUpClass()
bitbake("core-image-minimal -c rootfs -f")
cls.tmpdir = get_bb_var('TMPDIR')
cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
def test_pybootchartguy_help(self):
runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
@@ -63,7 +63,12 @@ class OEPybootchartguyTests(OEScriptTests):
self.assertTrue(os.path.exists(self.tmpdir + "/charts.pdf"))
-class OEGitproxyTests(OEScriptTests):
+class OEGitproxyTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
def test_oegitproxy_help(self):
try:
@@ -121,15 +126,22 @@ class OEGitproxyTests(OEScriptTests):
class OeRunNativeTest(OESelftestTestCase):
def test_oe_run_native(self):
bitbake("qemu-helper-native -c addto_recipe_sysroot")
- result = runCmd("oe-run-native qemu-helper-native tunctl -h")
- self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output)
+ result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
+ self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
+
+class OEListPackageconfigTests(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
-class OEListPackageconfigTests(OEScriptTests):
#oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
def check_endlines(self, results, expected_endlines):
for line in results.output.splitlines():
for el in expected_endlines:
- if line.split() == el.split():
+ if line and line.split()[0] == el.split()[0] and \
+ ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
expected_endlines.remove(el)
break
@@ -145,7 +157,7 @@ class OEListPackageconfigTests(OEScriptTests):
results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
expected_endlines = []
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
- expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
expected_endlines.append("tar acl selinux")
self.check_endlines(results, expected_endlines)
@@ -163,11 +175,10 @@ class OEListPackageconfigTests(OEScriptTests):
def test_packageconfig_flags_option_all(self):
results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
expected_endlines = []
- expected_endlines.append("pinentry-1.2.0")
- expected_endlines.append("PACKAGECONFIG ncurses libcap")
+ expected_endlines.append("pinentry-1.2.1")
+ expected_endlines.append("PACKAGECONFIG ncurses")
expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
- expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
@@ -177,7 +188,7 @@ class OEListPackageconfigTests(OEScriptTests):
results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
expected_endlines = []
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
- expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
+ expected_endlines.append("pinentry gtk2 ncurses qt secret")
self.check_endlines(results, expected_endlines)
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
index 96beb8b869..e31063567b 100644
--- a/meta/lib/oeqa/selftest/cases/overlayfs.py
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -1,10 +1,13 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import bitbake, runqemu
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotMachine
def getline_qemu(out, line):
for l in out.split('\n'):
@@ -55,7 +58,7 @@ inherit overlayfs
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
self.write_config(config)
@@ -76,7 +79,7 @@ DISTRO_FEATURES:append = " systemd overlayfs"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES += "systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
"""
@@ -94,7 +97,7 @@ OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
self.write_config(config)
@@ -112,7 +115,7 @@ DISTRO_FEATURES:append = " systemd overlayfs"
config = """
IMAGE_INSTALL:append = " overlayfs-user"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
"""
wrong_machine_config = """
@@ -136,10 +139,10 @@ OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
config = """
IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
-DISTRO_FEATURES:append = " systemd overlayfs"
+DISTRO_FEATURES:append = " overlayfs"
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -271,10 +274,8 @@ class OverlayFSEtcRunTimeTests(OESelftestTestCase):
"""
configBase = """
-DISTRO_FEATURES:append = " systemd"
-
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -313,10 +314,8 @@ OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
"""
config = """
-DISTRO_FEATURES:append = " systemd"
-
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -331,28 +330,18 @@ EXTRA_IMAGE_FEATURES += "package-management"
self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
self.assertTrue("package-management" in res.output, msg=res.output)
- def test_image_feature_is_missing_class_included(self):
- configAppend = """
-INHERIT += "overlayfs-etc"
-"""
- self.run_check_image_feature(configAppend)
-
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_image_feature_is_missing(self):
- self.run_check_image_feature()
-
- def run_check_image_feature(self, appendToConfig=""):
"""
Summary: Overlayfs-etc class is not applied when image feature is not set
- even if we inherit it directly,
Expected: Image is created successfully but /etc is not an overlay
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
"""
- config = f"""
-DISTRO_FEATURES:append = " systemd"
-
+ config = """
# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
+INIT_MANAGER = "systemd"
# enable overlayfs in the kernel
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
@@ -364,7 +353,7 @@ EXTRA_IMAGE_FEATURES += "read-only-rootfs"
# Image configuration for overlayfs-etc
OVERLAYFS_ETC_MOUNT_POINT = "/data"
OVERLAYFS_ETC_DEVICE = "/dev/sda3"
-{appendToConfig}
+OVERLAYFS_ROOTFS_TYPE = "ext4"
"""
self.write_config(config)
@@ -377,45 +366,32 @@ OVERLAYFS_ETC_DEVICE = "/dev/sda3"
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
self.assertFalse(line, msg=output)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_sbin_init_preinit(self):
- self.run_sbin_init(False)
+ self.run_sbin_init(False, "ext4")
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_sbin_init_original(self):
- self.run_sbin_init(True)
+ self.run_sbin_init(True, "ext4")
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_sbin_init_read_only(self):
+ self.run_sbin_init(True, "squashfs")
- def run_sbin_init(self, origInit):
+ def run_sbin_init(self, origInit, rootfsType):
"""
Summary: Confirm we can replace original init and mount overlay on top of /etc
Expected: Image is created successfully and /etc is mounted as an overlay
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
"""
- config = """
-DISTRO_FEATURES:append = " systemd"
-
-# Use systemd as init manager
-VIRTUAL-RUNTIME_init_manager = "systemd"
-
-# enable overlayfs in the kernel
-KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
-
-IMAGE_FSTYPES += "wic"
-OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
-WKS_FILE = "overlayfs_etc.wks.in"
-
-EXTRA_IMAGE_FEATURES += "read-only-rootfs"
-# Image configuration for overlayfs-etc
-EXTRA_IMAGE_FEATURES += "overlayfs-etc"
-IMAGE_FEATURES:remove = "package-management"
-OVERLAYFS_ETC_MOUNT_POINT = "/data"
-OVERLAYFS_ETC_FSTYPE = "ext4"
-OVERLAYFS_ETC_DEVICE = "/dev/sda3"
-OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
-"""
+ config = self.get_working_config()
args = {
'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
- 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True)
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
+ 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
}
self.write_config(config.format(**args))
@@ -432,6 +408,11 @@ OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
+ # check that lower layer is not available
+ status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
+ line = getline_qemu(output, "No such file or directory")
+ self.assertTrue(line, msg=output)
+
status, output = qemu.run_serial("touch " + testFile)
status, output = qemu.run_serial("sync")
status, output = qemu.run_serial("ls -1 " + testFile)
@@ -443,3 +424,79 @@ OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
status, output = qemu.run_serial("ls -1 " + testFile)
line = getline_qemu(output, testFile)
self.assertTrue(line and line.startswith(testFile), msg=output)
+
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
+ def test_lower_layer_access(self):
+ """
+ Summary: Test that lower layer of /etc is available read-only when configured
+ Expected: Can't write to lower layer. The files on lower and upper different after
+ modification
+ Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
+ """
+
+ config = self.get_working_config()
+
+ configLower = """
+OVERLAYFS_ETC_EXPOSE_LOWER = "1"
+IMAGE_INSTALL:append = " overlayfs-user"
+"""
+ testFile = "lower-layer-test.txt"
+
+ args = {
+ 'OVERLAYFS_INIT_OPTION': "",
+ 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
+ 'OVERLAYFS_ROOTFS_TYPE': "ext4",
+ 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
+ }
+
+ self.write_config(config.format(**args))
+
+ self.append_config(configLower)
+ bitbake('core-image-minimal')
+
+ with runqemu('core-image-minimal', image_fstype='wic') as qemu:
+ status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
+ status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
+ line = getline_qemu(output, "Modified in upper")
+ self.assertTrue(line, msg=output)
+ line = getline_qemu(output, "Original file")
+ self.assertTrue(line, msg=output)
+
+ status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
+ line = getline_qemu(output, "Read-only file system")
+ self.assertTrue(line, msg=output)
+
+ def get_working_config(self):
+ return """
+# Use systemd as init manager
+INIT_MANAGER = "systemd"
+
+# enable overlayfs in the kernel
+KERNEL_EXTRA_FEATURES:append = " \
+ features/overlayfs/overlayfs.scc \
+ cfg/fs/squashfs.scc"
+
+IMAGE_FSTYPES += "wic"
+OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
+OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
+OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
+WKS_FILE = "overlayfs_etc.wks.in"
+
+EXTRA_IMAGE_FEATURES += "read-only-rootfs"
+# Image configuration for overlayfs-etc
+EXTRA_IMAGE_FEATURES += "overlayfs-etc"
+IMAGE_FEATURES:remove = "package-management"
+OVERLAYFS_ETC_MOUNT_POINT = "/data"
+OVERLAYFS_ETC_FSTYPE = "ext4"
+OVERLAYFS_ETC_DEVICE = "/dev/sda3"
+OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
+
+ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
+
+ext4_rootfs() {{
+}}
+
+squashfs_rootfs() {{
+ mkdir -p ${{IMAGE_ROOTFS}}/data
+}}
+"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 51d835259e..1aa6c03f8a 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -87,6 +89,13 @@ class VersionOrdering(OESelftestTestCase):
self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
class PackageTests(OESelftestTestCase):
+ # Verify that a recipe cannot rename a package into an existing one
+ def test_package_name_conflict(self):
+ res = bitbake("packagenameconflict", ignore_status=True)
+ self.assertNotEqual(res.status, 0)
+ err = "package name already exists"
+ self.assertTrue(err in res.output)
+
# Verify that a recipe which sets up hardlink files has those preserved into split packages
# Also test file sparseness is preserved
def test_preserve_sparse_hardlinks(self):
@@ -133,8 +142,10 @@ class PackageTests(OESelftestTestCase):
self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
return False
- # Check debugging symbols works correctly
- elif re.match(r"Breakpoint 1.*hello\.c.*4", l):
+ # Check debugging symbols works correctly. Don't look for a
+ # source file as optimisation can put the breakpoint inside
+ # stdio.h.
+ elif "Breakpoint 1 at" in l:
return True
self.logger.error("GDB result:\n%d: %s", status, output)
@@ -149,25 +160,25 @@ class PackageTests(OESelftestTestCase):
self.fail('GDB %s failed' % binary)
def test_preserve_ownership(self):
- import os, stat, oe.cachedpath
features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
self.write_config(features)
bitbake("core-image-minimal")
- sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
- def check_ownership(qemu, gid, uid, path):
+ def check_ownership(qemu, expected_gid, expected_uid, path):
self.logger.info("Check ownership of %s", path)
- status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60)
- output = output.split(" ")
- if output[0] != uid or output[1] != gid :
- self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1])
- return False
- return True
+ status, output = qemu.run_serial('stat -c "%U %G" ' + path)
+ self.assertEqual(status, 1, "stat failed: " + output)
+ try:
+ uid, gid = output.split()
+ self.assertEqual(uid, expected_uid)
+ self.assertEqual(gid, expected_gid)
+ except ValueError:
+ self.fail("Cannot parse output: " + output)
+ sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
with runqemu('core-image-minimal') as qemu:
for path in [ sysconfdir + "/selftest-chown/file",
sysconfdir + "/selftest-chown/dir",
sysconfdir + "/selftest-chown/symlink",
sysconfdir + "/selftest-chown/fifotest/fifo"]:
- if not check_ownership(qemu, "test", "test", path):
- self.fail('Test ownership %s failed' % path)
+ check_ownership(qemu, "test", "test", path)
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 254abc40c6..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
def test_find_path(self):
- result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1')
- self.assertEqual(result.output, 'zlib: /lib/libz.so.1')
+ result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
+ self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
self.assertEqual(result.output, 'm4: /usr/bin/m4')
result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
curpkg = line.split(':')[0]
files[curpkg] = []
return files
- bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir'])
- base_libdir = bb_vars['base_libdir']
+ bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
libdir = bb_vars['libdir']
includedir = bb_vars['includedir']
mandir = bb_vars['mandir']
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
self.assertGreater(len(files['libz1']), 1)
- libspec = os.path.join(base_libdir, 'libz.so.1.*')
+ libspec = os.path.join(libdir, 'libz.so.1.*')
found = False
for fileitem in files['libz1']:
if fnmatch.fnmatchcase(fileitem, libspec):
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 10158ca7c2..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
from oeqa.utils.network import get_free_port
+import bb.utils
+
class BitbakePrTests(OESelftestTestCase):
@classmethod
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase):
super(BitbakePrTests, cls).setUpClass()
cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
+ cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
+ cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
+
+ def cleanup(self):
+ # Ensure any memory resident bitbake is stopped
+ bitbake("-m")
+ # Remove any existing export file or prserv database
+ bb.utils.remove(self.exported_db_path)
+ bb.utils.remove(self.current_db_path + "*")
+
def get_pr_version(self, package_name):
package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
package_data = ftools.read_file(package_data_file)
@@ -47,6 +61,7 @@ class BitbakePrTests(OESelftestTestCase):
self.assertEqual(res.status, 0, msg=res.output)
def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
+ self.cleanup()
config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
self.write_config(config_package_data)
config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase):
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
+ self.cleanup()
+
def run_test_pr_export_import(self, package_name, replace_current_db=True):
self.config_pr_tests(package_name)
self.increment_package_pr(package_name)
pr_1 = self.get_pr_version(package_name)
- exported_db_path = os.path.join(self.builddir, 'export.inc')
- export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
+ export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
- self.assertTrue(os.path.exists(exported_db_path))
+ self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
if replace_current_db:
- current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
- self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
- os.remove(current_db_path)
+ self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
+ os.remove(self.current_db_path)
- import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
- os.remove(exported_db_path)
+ import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
+ #os.remove(self.exported_db_path)
self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
self.increment_package_pr(package_name)
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase):
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
+ self.cleanup()
+
def test_import_export_replace_db(self):
self.run_test_pr_export_import('m4')
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
index 33593d5ce9..3ef8786022 100644
--- a/meta/lib/oeqa/selftest/cases/pseudo.py
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 510dae6bad..aebea42502 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,7 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
+import errno
import os
import shutil
import tempfile
@@ -25,6 +28,16 @@ def tearDownModule():
runCmd('rm -rf %s' % templayerdir)
+def needTomllib(test):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ try:
+ import tomllib
+ except ImportError:
+ try:
+ import tomli
+ except ImportError:
+ test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
+
class RecipetoolBase(devtool.DevtoolTestCase):
def setUpLocal(self):
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolTestCase):
self.testfile = os.path.join(self.tempdir, 'testfile')
with open(self.testfile, 'w') as f:
f.write('Test file\n')
+ config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
+ self.append_config(config)
def tearDownLocal(self):
runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -346,7 +361,6 @@ class RecipetoolCreateTests(RecipetoolBase):
checkvars['LICENSE'] = 'GPL-2.0-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
- checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
self._test_recipe_contents(recipefile, checkvars, [])
@@ -359,14 +373,14 @@ class RecipetoolCreateTests(RecipetoolBase):
tempsrc = os.path.join(self.tempdir, 'srctree')
os.makedirs(tempsrc)
recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
- srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
checkvars = {}
checkvars['LICENSE'] = 'LGPL-2.1-only'
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
checkvars['S'] = '${WORKDIR}/git'
- checkvars['PV'] = '1.11+git${SRCPV}'
+ checkvars['PV'] = '1.11+git'
checkvars['SRC_URI'] = srcuri + ';branch=master'
checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
inherits = ['autotools', 'pkgconfig']
@@ -404,7 +418,6 @@ class RecipetoolCreateTests(RecipetoolBase):
checkvars = {}
checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
- checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
checkvars['DEPENDS'] = set(['boost', 'zlib'])
inherits = ['cmake']
@@ -440,44 +453,257 @@ class RecipetoolCreateTests(RecipetoolBase):
self._test_recipe_contents(recipefile, checkvars, inherits)
def test_recipetool_create_github(self):
- # Basic test to see if github URL mangling works
+ # Basic test to see if github URL mangling works. Deliberately use an
+ # older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- recipefile = os.path.join(temprecipe, 'meson_git.bb')
- srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0'
- result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri])
- self.assertTrue(os.path.isfile(recipefile))
+ recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
+ srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
+ cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
+ result = runCmd(cmd)
+ self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
checkvars = {}
- checkvars['LICENSE'] = set(['Apache-2.0'])
- checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=master'
+ checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
+ checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
def test_recipetool_create_python3_setuptools(self):
# Test creating python3 package from tarball (using setuptools3 class)
+ # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
+ # and check the created recipe as if it was a more general tarball
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
pn = 'python-magic'
pv = '0.4.15'
recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
- result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
checkvars['LICENSE'] = set(['MIT'])
checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
- checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
inherits = ['setuptools3']
self._test_recipe_contents(recipefile, checkvars, inherits)
+ def test_recipetool_create_python3_setuptools_pypi_tarball(self):
+ # Test creating python3 package from tarball (using setuptools3 and pypi classes)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', 'pypi']
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_setuptools_pypi(self):
+ # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
+ # Intentionnaly using setuptools3 class here instead of any of the pep517 class
+ # to avoid the toml dependency and allows this test to run on host autobuilders
+ # with older version of python
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'python-magic'
+ pv = '0.4.15'
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
+ # First specify the required version in the url
+ srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
+ runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now specify the version as a recipetool parameter
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ srcuri = 'https://pypi.org/project/%s' % pn
+ runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
+ checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
+ # unless hardcoding the latest version but it means we will need to update the test for each release,
+ # so use a regexp
+ runCmd('rm -rf %s' % recipefile)
+ self.assertFalse(os.path.isfile(recipefile))
+ recipefile_re = r'%s_(.*)\.bb' % pn
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ dirlist = os.listdir(temprecipe)
+ if len(dirlist) > 1:
+ self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
+ self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
+ import re
+ match = re.match(recipefile_re, dirlist[0])
+ self.assertTrue(match)
+ latest_pv = match.group(1)
+ self.assertTrue(latest_pv != pv)
+ recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
+ # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
+ checkvars = {}
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['PYPI_PACKAGE'] = pn
+ inherits = ['setuptools3', "pypi"]
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using setuptools.build_meta class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'webcolors'
+ pv = '1.13'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
+ checkvars['LICENSE'] = set(['BSD-3-Clause'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
+ checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
+ inherits = ['python_setuptools_build_meta', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using poetry.core.masonry.api class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'iso8601'
+ pv = '2.1.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
+ checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
+ inherits = ['python_poetry_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using flit_core.buildapi class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'typing-extensions'
+ pv = '4.8.0'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
+ checkvars['LICENSE'] = set(['PSF-2.0'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
+ checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
+ inherits = ['python_flit_core', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_hatchling(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using hatchling class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'jsonschema'
+ pv = '4.19.1'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
+ checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
+ checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
+ inherits = ['python_hatchling', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_maturin(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using maturin class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'pydantic-core'
+ pv = '2.14.5'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
+ checkvars['LICENSE'] = set(['MIT'])
+ checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
+ checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
+ inherits = ['python_maturin', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ def test_recipetool_create_python3_pep517_mesonpy(self):
+ # This test require python 3.11 or above for the tomllib module or tomli module to be installed
+ needTomllib(self)
+
+ # Test creating python3 package from tarball (using mesonpy class)
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+ pn = 'siphash24'
+ pv = '1.4'
+ recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
+ srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
+ result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
+ inherits = ['python_mesonpy', 'pypi']
+
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
def test_recipetool_create_github_tarball(self):
- # Basic test to ensure github URL mangling doesn't apply to release tarballs
+ # Basic test to ensure github URL mangling doesn't apply to release tarballs.
+ # Deliberately use an older release of Meson at present so we don't need a toml parser.
temprecipe = os.path.join(self.tempdir, 'recipe')
os.makedirs(temprecipe)
- pv = '0.32.0'
- recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv)
+ pv = '0.52.1'
+ recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
@@ -509,14 +735,252 @@ class RecipetoolCreateTests(RecipetoolBase):
self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
def test_recipetool_create_git_srcuri_master(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
def test_recipetool_create_git_srcuri_branch(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
def test_recipetool_create_git_srcbranch(self):
- self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard', 'matchbox-keyboard-0-1')
+ self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
+
+ def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
+ modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
+ pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
+ subdir = ",subdir='%s'" % subdir if len(subdir) else ''
+ return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
+
+ def test_recipetool_create_go(self):
+ # Basic test to check go recipe generation
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'edgex-go_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt')
+
+ srcuri = 'https://github.com/edgexfoundry/edgex-go.git'
+ srcrev = "v3.0.0"
+ srcbranch = "main"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+ checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'}
+
+ self.assertTrue(os.path.isfile(recipefile))
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ checkvars = {}
+ checkvars['VENDORED_LIC_FILES_CHKSUM'] = set(
+ ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef',
+ 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be',
+ 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7',
+ 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
+ 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a',
+ 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60',
+ 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5',
+ 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93',
+ 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13',
+ 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc',
+ 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378',
+ 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e',
+ 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735',
+ 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f',
+ 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b',
+ 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
+ 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316',
+ 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c'])
+
+ self.assertTrue(os.path.isfile(lics_require_file))
+ self._test_recipe_contents(lics_require_file, checkvars, [])
+
+ dependencies = \
+ [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''),
+ ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''),
+ ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''),
+ ('github.com/gomodule/redigo','v1.8.9', '', '', ''),
+ ('github.com/google/uuid','v1.3.0', '', '', ''),
+ ('github.com/gorilla/mux','v1.8.0', '', '', ''),
+ ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''),
+ ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''),
+ ('github.com/stretchr/testify','v1.8.2', '', '', ''),
+ ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''),
+ ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''),
+ ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''),
+ ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''),
+ ('github.com/fatih/color','v1.9.0', '', '', ''),
+ ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''),
+ ('github.com/go-kit/log','v0.2.1', '', '', ''),
+ ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''),
+ ('github.com/go-playground/locales','v0.14.1', '', '', ''),
+ ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''),
+ ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''),
+ ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''),
+ ('github.com/golang/protobuf','v1.5.2', '', '', ''),
+ ('github.com/gorilla/websocket','v1.4.2', '', '', ''),
+ ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'),
+ ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''),
+ ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''),
+ ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''),
+ ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''),
+ ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''),
+ ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''),
+ ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''),
+ ('github.com/hashicorp/serf','v0.10.1', '', '', ''),
+ ('github.com/leodido/go-urn','v1.2.3', '', '', ''),
+ ('github.com/mattn/go-colorable','v0.1.12', '', '', ''),
+ ('github.com/mattn/go-isatty','v0.0.14', '', '', ''),
+ ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''),
+ ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''),
+ ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''),
+ ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''),
+ ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''),
+ ('github.com/nats-io/nats.go','v1.25.0', '', '', ''),
+ ('github.com/nats-io/nkeys','v0.4.4', '', '', ''),
+ ('github.com/nats-io/nuid','v1.0.1', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/stretchr/objx','v0.5.0', '', '', ''),
+ ('github.com/x448/float16','v0.8.4', '', '', ''),
+ ('github.com/zeebo/errs','v1.3.0', '', '', ''),
+ ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''),
+ ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''),
+ ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''),
+ ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''),
+ ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''),
+ ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''),
+ ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''),
+ ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''),
+ ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''),
+ ]
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
+
+ def test_recipetool_create_go_replace_modules(self):
+ # Check handling of replaced modules
+ temprecipe = os.path.join(self.tempdir, 'recipe')
+ os.makedirs(temprecipe)
+
+ recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb')
+ deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc')
+ lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc')
+ modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt')
+
+ srcuri = 'https://github.com/OpenAPITools/openapi-generator.git'
+ srcrev = "v7.2.0"
+ srcbranch = "master"
+ srcsubdir = "samples/openapi3/client/petstore/go"
+
+ result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir))
+
+ self.maxDiff = None
+ inherits = ['go-vendor']
+
+ checkvars = {}
+ checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go"
+ checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
+ 'file://modules.txt'}
+
+ self.assertNotIn('Traceback', result.output)
+ self.assertIn('No license file was detected for the main module', result.output)
+ self.assertTrue(os.path.isfile(recipefile))
+ self._test_recipe_contents(recipefile, checkvars, inherits)
+
+ # make sure that dependencies don't mention local directory ./go-petstore
+ dependencies = \
+ [ ('github.com/stretchr/testify','v1.8.4', '', '', ''),
+ ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''),
+ ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
+ ('github.com/golang/protobuf','v1.5.3', '', '', ''),
+ ('github.com/kr/pretty','v0.3.0', '', '', ''),
+ ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
+ ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''),
+ ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''),
+ ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''),
+ ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''),
+ ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''),
+ ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
+ ]
+
+ src_uri = set()
+ for d in dependencies:
+ src_uri.add(self._go_urifiy(*d))
+
+ checkvars = {}
+ checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
+
+ self.assertTrue(os.path.isfile(deps_require_file))
+ self._test_recipe_contents(deps_require_file, checkvars, [])
class RecipetoolTests(RecipetoolBase):
@@ -536,7 +1000,15 @@ class RecipetoolTests(RecipetoolBase):
for p in paths:
dstdir = os.path.join(dstdir, p)
if not os.path.exists(dstdir):
- os.makedirs(dstdir)
+ try:
+ os.makedirs(dstdir)
+ except PermissionError:
+ return False
+ except OSError as e:
+ if e.errno == errno.EROFS:
+ return False
+ else:
+ raise e
if p == "lib":
# Can race with other tests
self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -544,8 +1016,12 @@ class RecipetoolTests(RecipetoolBase):
self.track_for_cleanup(dstdir)
dstfile = os.path.join(dstdir, os.path.basename(srcfile))
if srcfile != dstfile:
- shutil.copy(srcfile, dstfile)
+ try:
+ shutil.copy(srcfile, dstfile)
+ except PermissionError:
+ return False
self.track_for_cleanup(dstfile)
+ return True
def test_recipetool_load_plugin(self):
"""Test that recipetool loads only the first found plugin in BBPATH."""
@@ -559,15 +1035,17 @@ class RecipetoolTests(RecipetoolBase):
plugincontent = fh.readlines()
try:
self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
- for path in searchpath:
- self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ searchpath = [
+ path for path in searchpath
+ if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
+ ]
result = runCmd("recipetool --quiet count")
self.assertEqual(result.output, '1')
result = runCmd("recipetool --quiet multiloaded")
self.assertEqual(result.output, "no")
for path in searchpath:
result = runCmd("recipetool --quiet bbdir")
- self.assertEqual(result.output, path)
+ self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
finally:
with open(srcfile, 'w') as fh:
@@ -579,7 +1057,10 @@ class RecipetoolTests(RecipetoolBase):
commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
- d = bb.tinfoil.TinfoilDataStoreConnector
+ class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
+ pass
+
+ d = DataConnectorCopy
d.getVar = Mock(return_value=commonlicdir)
srctree = tempfile.mkdtemp(prefix='recipetoolqa')
@@ -727,9 +1208,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
for uri in src_uri:
p = urllib.parse.urlparse(uri)
if p.scheme == 'file':
- return p.netloc + p.path
+ return p.netloc + p.path, uri
- def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''):
+ def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
if newfile is None:
newfile = self.testfile
@@ -756,12 +1237,40 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
'\n']
+
+ override = ""
+ if machine:
+ options += ' -m %s' % machine
+ override = ':append:%s' % machine
+ expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
+ '\n'])
+
+ if remove:
+ for entry in remove:
+ if machine:
+ entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
+ else:
+ entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
+
+ expectedlines.extend([entry_remove_line,
+ '\n'])
+
if has_src_uri:
uri = 'file://%s' % filename
if expected_subdir:
uri += ';subdir=%s' % expected_subdir
- expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri,
- '\n']
+ if machine:
+ src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
+ else:
+ src_uri_line = 'SRC_URI += "%s"\n' % uri
+
+ expectedlines.extend([src_uri_line, '\n'])
+
+ with open("/tmp/tmp.txt", "w") as file:
+ print(expectedlines, file=file)
+
+ if machine:
+ filename = '%s/%s' % (machine, filename)
return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
@@ -816,18 +1325,46 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
def test_recipetool_appendsrcfile_existing_in_src_uri(self):
testrecipe = 'base-files'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath,_ = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
- def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
+ def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
testrecipe = 'base-files'
subdir = 'tmp'
- filepath = self._get_first_file_uri(testrecipe)
+ filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
- output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
- self.assertTrue(any('with different parameters' in l for l in output))
+ self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
+
+ def test_recipetool_appendsrcfile_machine(self):
+ # A very basic test
+ self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
+
+ # Force cleaning the output of previous test
+ self.tearDownLocal()
+
+ # A more complex test: existing entry in src_uri with different param
+ self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
+
+ def test_recipetool_appendsrcfile_update_recipe_basic(self):
+ testrecipe = "mtd-utils-selftest"
+ recipefile = get_bb_var('FILE', testrecipe)
+ self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
+ cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
+ result = runCmd(cmd)
+ self.assertNotIn('Traceback', result.output)
+ self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
+
+ expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+ ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
+ self._check_repo_status(os.path.dirname(recipefile), expected_status)
+ result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+ removelines = []
+ addlines = [
+ 'file://%s \\\\' % os.path.basename(self.testfile),
+ ]
+ self._check_diff(result.output, addlines, removelines)
def test_recipetool_appendsrcfile_replace_file_srcdir(self):
testrecipe = 'bash'
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 74b2098ae8..2cb4445f81 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -48,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase):
+SRC_URI[md5sum] = "aaaaaa"
SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
- RDEPENDS:${PN} += "${PYTHON_PN}-threading"
+ RDEPENDS:${PN} += "python3-threading"
"""
patchlines = []
for f in patches:
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 5042c11d8e..80e830136f 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -43,13 +43,14 @@ class CompareResult(object):
return (self.status, self.test) < (other.status, other.test)
class PackageCompareResults(object):
- def __init__(self):
+ def __init__(self, exclusions):
self.total = []
self.missing = []
self.different = []
self.different_excluded = []
self.same = []
self.active_exclusions = set()
+ exclude_packages.extend((exclusions or "").split())
def add_result(self, r):
self.total.append(r)
@@ -133,8 +134,10 @@ class ReproducibleTests(OESelftestTestCase):
# targets are the things we want to test the reproducibility of
targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world']
+
# sstate targets are things to pull from sstate to potentially cut build/debugging time
sstate_targets = []
+
save_results = False
if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
@@ -149,11 +152,29 @@ class ReproducibleTests(OESelftestTestCase):
def setUpLocal(self):
super().setUpLocal()
- needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS']
+ needed_vars = [
+ 'TOPDIR',
+ 'TARGET_PREFIX',
+ 'BB_NUMBER_THREADS',
+ 'BB_HASHSERVE',
+ 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
+ 'OEQA_REPRODUCIBLE_TEST_TARGET',
+ 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
+ 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
+ ]
bb_vars = get_bb_vars(needed_vars)
for v in needed_vars:
setattr(self, v.lower(), bb_vars[v])
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
+ self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']:
+ self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split()
+
+ if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
+ self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
+
self.extraresults = {}
self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
@@ -162,7 +183,7 @@ class ReproducibleTests(OESelftestTestCase):
self.extraresults['reproducible.rawlogs']['log'] += msg
def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
- result = PackageCompareResults()
+ result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
old_cwd = os.getcwd()
try:
@@ -202,10 +223,9 @@ class ReproducibleTests(OESelftestTestCase):
config = textwrap.dedent('''\
PACKAGE_CLASSES = "{package_classes}"
- INHIBIT_PACKAGE_STRIP = "1"
TMPDIR = "{tmpdir}"
LICENSE_FLAGS_ACCEPTED = "commercial"
- DISTRO_FEATURES:append = ' systemd pam'
+ DISTRO_FEATURES:append = ' pam'
USERADDEXTENSION = "useradd-staticids"
USERADD_ERROR_DYNAMIC = "skip"
USERADD_UID_TABLES += "files/static-passwd"
@@ -223,7 +243,7 @@ class ReproducibleTests(OESelftestTestCase):
# mirror, forcing a complete build from scratch
config += textwrap.dedent('''\
SSTATE_DIR = "${TMPDIR}/sstate"
- SSTATE_MIRRORS = ""
+ SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
''')
self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
@@ -290,9 +310,13 @@ class ReproducibleTests(OESelftestTestCase):
self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
- if result.missing or result.different:
- fails.append("The following %s packages are missing or different and not in exclusion list: %s" %
- (c, '\n'.join(r.test for r in (result.missing + result.different))))
+ if result.different:
+ fails.append("The following %s packages are different and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.different))))
+
+ if result.missing and len(self.sstate_targets) == 0:
+ fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
+ (c, '\n'.join(r.test for r in (result.missing))))
# Clean up empty directories
if self.save_results:
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
index dac5c46801..c3303f3fbb 100644
--- a/meta/lib/oeqa/selftest/cases/resulttooltests.py
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase):
self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
- def test_regrresion_can_get_regression_result(self):
+ def test_regression_can_get_regression_result(self):
base_result_data = {'result': {'test1': {'status': 'PASSED'},
'test2': {'status': 'PASSED'},
'test3': {'status': 'FAILED'},
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase):
resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
+ def test_results_without_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests without metadata should be compared")
+
+ def test_target_result_with_missing_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "OESELFTEST_METADATA": {
+ "run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None
+ }}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests should not be compared")
+
+ def test_results_with_matching_metadata_can_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with matching metadata should be compared")
+
+ def test_results_with_mismatching_metadata_can_not_be_compared(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "oeselftest",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
+
+ def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
+ base_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["toolchain-user", "toolchain-system"],
+ "exclude_tags": None}}, "result": {}}
+ target_configuration = {"configuration": {"TEST_TYPE": "runtime",
+ "TESTSERIES": "series1",
+ "IMAGE_BASENAME": "image",
+ "IMAGE_PKGTYPE": "ipk",
+ "DISTRO": "mydistro",
+ "MACHINE": "qemux86",
+ "STARTTIME": 1672527600,
+ "OESELFTEST_METADATA": {"run_all_tests": True,
+ "run_tests": None,
+ "skips": None,
+ "machine": None,
+ "select_tags": ["machine"],
+ "exclude_tags": None}}, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
+
+ def test_machine_matches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"}, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, identical machine tests should be compared")
+
+ def test_machine_mismatches(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect machine filtering, mismatching machine tests should not be compared")
+
+ def test_can_not_compare_non_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86_64"
+ }, "result": {
+ "bar": {
+ "status": "PASSED"
+ }}}
+ self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
+
+ def test_can_compare_ltp_tests(self):
+ base_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ target_configuration = {"configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ltpresult_foo": {
+ "status": "PASSED"
+ }}}
+ self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
+ msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
+
+ def test_can_match_non_static_ptest_names(self):
+ base_configuration = {"a": {
+ "conf_X": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_bar_-_moo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_moo_-_foo": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_foo_-_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__foo_out_of_bar": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ target_configuration = {"a": {
+ "conf_Y": {
+ "configuration": {
+ "TEST_TYPE": "runtime",
+ "MACHINE": "qemux86"
+ }, "result": {
+ "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
+ "status": "PASSED"
+ },
+ "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.curl.test_0000__xxx_out_of_yyy": {
+ "status": "PASSED"
+ },
+ "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
+ "status": "PASSED"
+ },
+ "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
+ "status": "PASSED"
+ },
+ "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
+ "status": "PASSED"
+ }
+ }}}}
+ regression.fixup_ptest_names(base_configuration, self.logger)
+ regression.fixup_ptest_names(target_configuration, self.logger)
+ result, resultstring = regression.compare_result(
+ self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
+ self.assertDictEqual(
+ result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
+# SPDX-FileCopyrightText: Huawei Inc.
+#
+# SPDX-License-Identifier: MIT
+
+import os
+import oe
+import unittest
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_vars
+
+class ShadowUtilsTidyFiles(OESelftestTestCase):
+ """
+ Check if shadow image rootfs files are tidy.
+
+ The tests are focused on testing the functionality provided by the
+ 'tidy_shadowutils_files' rootfs postprocess command (via
+ SORT_PASSWD_POSTPROCESS_COMMAND).
+ """
+
+ def sysconf_build(self):
+ """
+ Verify if shadow tidy files tests are to be run and if yes, build a
+ test image and return its sysconf rootfs path.
+ """
+
+ test_image = "core-image-minimal"
+
+ config = 'IMAGE_CLASSES += "extrausers"\n'
+ config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
+ config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
+ self.write_config(config)
+
+ vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
+ test_image)
+ passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
+ self.assertIsNotNone(passwd_postprocess_cmd)
+ if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
+ raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
+ "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
+
+ rootfs = vars["IMAGE_ROOTFS"]
+ self.assertIsNotNone(rootfs)
+ sysconfdir = vars["sysconfdir"]
+ bitbake(test_image)
+ self.assertIsNotNone(sysconfdir)
+
+ return oe.path.join(rootfs, sysconfdir)
+
+ def test_shadowutils_backup_files(self):
+ """
+ Test that the rootfs doesn't include any known shadow backup files.
+ """
+
+ backup_files = (
+ 'group-',
+ 'gshadow-',
+ 'passwd-',
+ 'shadow-',
+ 'subgid-',
+ 'subuid-',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ found = []
+ for backup_file in backup_files:
+ backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
+ if os.path.exists(backup_filepath):
+ found.append(backup_file)
+ if (found):
+ raise Exception('The following shadow backup files were found in '
+ 'the rootfs: %s' % found)
+
+ def test_shadowutils_sorted_files(self):
+ """
+ Test that the 'passwd' and the 'group' shadow utils files are ordered
+ by ID.
+ """
+
+ files = (
+ 'passwd',
+ 'group',
+ )
+
+ rootfs_sysconfdir = self.sysconf_build()
+ unsorted = []
+ for file in files:
+ filepath = oe.path.join(rootfs_sysconfdir, file)
+ with open(filepath, 'rb') as f:
+ ids = []
+ lines = f.readlines()
+ for line in lines:
+ entries = line.split(b':')
+ ids.append(int(entries[2]))
+ if (ids != sorted(ids)):
+ unsorted.append(file)
+ if (unsorted):
+ raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BitbakeTests(OESelftestTestCase):
+
+ def test_rpm_filenames(self):
+ test_recipe = "testrpm"
+ bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index e9612389fe..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase):
self.assertEqual(result.status, 0)
def test_result_assertion(self):
- self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
+ self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
runCmd, "echo foobar >&2; false", shell=True)
def test_result_exception(self):
- self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
+ self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
def test_output(self):
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index c1d277a095..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -4,13 +4,15 @@
# SPDX-License-Identifier: MIT
#
+import os
import re
-import tempfile
import time
import oe.types
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd
+from oeqa.utils.commands import bitbake, runqemu, get_bb_var
+
@OETestTag("runqemu")
class RunqemuTests(OESelftestTestCase):
@@ -22,23 +24,26 @@ class RunqemuTests(OESelftestTestCase):
def setUpLocal(self):
super(RunqemuTests, self).setUpLocal()
self.recipe = 'core-image-minimal'
- self.machine = 'qemux86-64'
- self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi"
- self.cmd_common = "runqemu nographic"
+ self.machine = self.td['MACHINE']
+ self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
- kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64')
+ self.fstypes = "ext4"
+ if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
+ self.fstypes += " iso hddimg"
+ if self.machine == "qemux86-64":
+ self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
+
+ self.cmd_common = "runqemu nographic"
+ kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
if kvm:
self.cmd_common += " kvm"
self.write_config(
"""
-MACHINE = "%s"
IMAGE_FSTYPES = "%s"
# 10 means 1 second
SYSLINUX_TIMEOUT = "10"
-"""
-% (self.machine, self.fstypes)
- )
+""" % self.fstypes)
if not RunqemuTests.image_is_ready:
RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -57,14 +62,17 @@ SYSLINUX_TIMEOUT = "10"
cmd = "%s %s ext4" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd)
+ regexp = r'\nROOTFS: .*\.ext4]\n'
+ self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_machine_iso(self):
"""Test runqemu machine iso"""
cmd = "%s %s iso" % (self.cmd_common, self.machine)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd)
+ text_in = 'media=cdrom'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
def test_boot_recipe_image(self):
"""Test runqemu recipe-image"""
@@ -73,20 +81,24 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
-
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vmdk(self):
"""Test runqemu recipe-image vmdk"""
cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vmdk'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_recipe_image_vdi(self):
"""Test runqemu recipe-image vdi"""
cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
with open(qemu.qemurunnerlog) as f:
- self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd)
+ text_in = 'format=vdi'
+ self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
def test_boot_deploy(self):
"""Test runqemu deploy_dir_image"""
@@ -95,7 +107,7 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
-
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_boot_deploy_hddimg(self):
"""Test runqemu deploy_dir_image hddimg"""
cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
@@ -110,6 +122,7 @@ SYSLINUX_TIMEOUT = "10"
with open(qemu.qemurunnerlog) as f:
self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
+ @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
def test_boot_machine_slirp_qcow2(self):
"""Test runqemu machine slirp qcow2"""
cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
@@ -119,7 +132,7 @@ SYSLINUX_TIMEOUT = "10"
def test_boot_qemu_boot(self):
"""Test runqemu /path/to/image.qemuboot.conf"""
- qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine)
+ qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
if not os.path.exists(qemuboot_conf):
self.skipTest("%s not found" % qemuboot_conf)
@@ -130,7 +143,7 @@ SYSLINUX_TIMEOUT = "10"
def test_boot_rootfs(self):
"""Test runqemu /path/to/rootfs.ext4"""
- rootfs = "%s-%s.ext4" % (self.recipe, self.machine)
+ rootfs = "%s.ext4" % (self.image_link_name)
rootfs = os.path.join(self.deploy_dir_image, rootfs)
if not os.path.exists(rootfs):
self.skipTest("%s not found" % rootfs)
@@ -157,10 +170,11 @@ class QemuTest(OESelftestTestCase):
def setUpClass(cls):
super(QemuTest, cls).setUpClass()
cls.recipe = 'core-image-minimal'
- cls.machine = get_bb_var('MACHINE')
- cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.machine = get_bb_var('MACHINE')
+ cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+ cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
cls.cmd_common = "runqemu nographic"
- cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine)
+ cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
bitbake(cls.recipe)
@@ -191,22 +205,12 @@ class QemuTest(OESelftestTestCase):
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- # Need to have portmap/rpcbind running to allow this test to work and
- # current autobuilder setup does not have this.
- def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
- self.assertExists(self.qemuboot_conf)
- bitbake('meta-ide-support')
- rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
+ def test_qemu_can_boot_nfs_and_shutdown(self):
+ rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
self.assertExists(rootfs_tar)
- tmpdir = tempfile.mkdtemp(prefix='qemu_nfs')
- tmpdir_nfs = os.path.join(tmpdir, 'nfs')
- cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
- result = runCmd(cmd_extract_nfs)
- self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
- cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
+ cmd = "%s %s" % (self.cmd_common, rootfs_tar)
shutdown_timeout = 120
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 8eacde40ad..12000aac16 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -8,7 +10,7 @@ from oeqa.core.decorator import OETestTag
import os
import tempfile
import oe.lsb
-from oeqa.core.decorator.data import skipIfNotQemu
+from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
class TestExport(OESelftestTestCase):
@@ -23,7 +25,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -64,7 +66,7 @@ class TestExport(OESelftestTestCase):
Author: Mariano Lopez <mariano.lopez@intel.com>
"""
- features = 'INHERIT += "testexport"\n'
+ features = 'IMAGE_CLASSES += "testexport"\n'
# These aren't the actual IP addresses but testexport class needs something defined
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -119,7 +121,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'IMAGE_INSTALL:append = " libssl"\n'
features += 'TEST_SUITES = "ping ssh selftest"\n'
self.write_config(features)
@@ -127,6 +129,22 @@ class TestImage(OESelftestTestCase):
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
+ def test_testimage_slirp(self):
+ """
+ Summary: Check basic testimage functionality with qemu and slirp networking.
+ """
+
+ features = '''
+IMAGE_CLASSES:append = " testimage"
+IMAGE_FEATURES:append = " ssh-server-dropbear"
+IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
+TEST_RUNQEMUPARAMS += " slirp"
+'''
+ self.write_config(features)
+
+ bitbake('core-image-minimal')
+ bitbake('-c testimage core-image-minimal')
+
def test_testimage_dnf(self):
"""
Summary: Check package feeds functionality for dnf
@@ -137,7 +155,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
# We don't yet know what the server ip and port will be - they will be patched
# in at the start of the on-image test
@@ -172,7 +190,7 @@ class TestImage(OESelftestTestCase):
if get_bb_var('DISTRO') == 'poky-tiny':
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
# We don't yet know what the server ip and port will be - they will be patched
# in at the start of the on-image test
@@ -200,6 +218,8 @@ class TestImage(OESelftestTestCase):
bitbake('core-image-full-cmdline socat')
bitbake('-c testimage core-image-full-cmdline')
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
def test_testimage_virgl_gtk_sdl(self):
"""
Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
@@ -222,7 +242,7 @@ class TestImage(OESelftestTestCase):
qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
if 'gtk+' not in qemu_packageconfig:
features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
if 'sdl' not in qemu_packageconfig:
@@ -232,15 +252,16 @@ class TestImage(OESelftestTestCase):
features += 'TEST_SUITES = "ping ssh virgl"\n'
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
features += 'IMAGE_INSTALL:append = " kmscube"\n'
- features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n'
+ features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
self.write_config(features_gtk)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
- features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n'
+ features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
self.write_config(features_sdl)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
+ @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
def test_testimage_virgl_headless(self):
"""
Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
@@ -252,28 +273,18 @@ class TestImage(OESelftestTestCase):
import subprocess, os
distro = oe.lsb.distro_identifier()
- if distro and distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04', 'almalinux-8.5', 'almalinux-8.6']:
+ if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or
+ distro.startswith('almalinux') or distro.startswith('rocky')):
self.skipTest('virgl headless cannot be tested with %s' %(distro))
- render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one suitable for mesa llvmpipe software renderer."""
- try:
- content = os.listdir("/dev/dri")
- if len([i for i in content if i.startswith('render')]) == 0:
- self.fail("No render nodes found in /dev/dri: %s. %s" %(content, render_hint))
- except FileNotFoundError:
- self.fail("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
- try:
- dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
- except subprocess.CalledProcessError as e:
- self.fail("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
- features = 'INHERIT += "testimage"\n'
+ features = 'IMAGE_CLASSES += "testimage"\n'
if 'opengl' not in qemu_distrofeatures:
features += 'DISTRO_FEATURES:append = " opengl"\n'
features += 'TEST_SUITES = "ping ssh virgl"\n'
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
features += 'IMAGE_INSTALL:append = " kmscube"\n'
- features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n'
+ features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
self.write_config(features)
bitbake('core-image-minimal')
bitbake('-c testimage core-image-minimal')
@@ -299,7 +310,7 @@ class Postinst(OESelftestTestCase):
features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
features += 'PACKAGE_CLASSES = "%s"\n' % classes
if init_manager == "systemd":
- features += 'DISTRO_FEATURES:append = " systemd"\n'
+ features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n'
features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..ad14189c6d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,231 @@
+# SPDX-License-Identifier: MIT
+import os
+import subprocess
+import time
+from oeqa.core.decorator import OETestTag
+from oeqa.core.case import OEPTestResultTestCase
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command
+from oeqa.utils.sshcontrol import SSHControl
+
+def parse_results(filename):
+ tests = {}
+ with open(filename, "r") as f:
+ lines = f.readlines()
+ for line in lines:
+ if "..." in line and "test [" in line:
+ test = line.split("test ")[1].split(" ... ")[0]
+ if "] " in test:
+ test = test.split("] ", 1)[1]
+ result = line.split(" ... ")[1].strip()
+ if result == "ok":
+ result = "PASS"
+ elif result == "failed":
+ result = "FAIL"
+ elif "ignored" in result:
+ result = "SKIPPED"
+ if test in tests:
+ if tests[test] != result:
+ print("Duplicate and mismatching result %s for %s" % (result, test))
+ else:
+ print("Duplicate result %s for %s" % (result, test))
+ else:
+ tests[test] = result
+ return tests
+
+# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
+@OETestTag("toolchain-system")
+@OETestTag("toolchain-user")
+@OETestTag("runqemu")
+class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
+ def test_rust(self, *args, **kwargs):
+ # Disable Rust Oe-selftest
+ #self.skipTest("The Rust Oe-selftest is disabled.")
+
+ # Skip mips32 target since it is unstable with rust tests
+ machine = get_bb_var('MACHINE')
+ if machine == "qemumips":
+ self.skipTest("The mips32 target is skipped for Rust Oe-selftest.")
+
+ # build remote-test-server before image build
+ recipe = "rust"
+ start_time = time.time()
+ bitbake("{} -c test_compile".format(recipe))
+ builddir = get_bb_var("RUSTSRC", "rust")
+ # build core-image-minimal with required packages
+ default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
+ features = []
+ features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
+ features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
+ self.write_config("\n".join(features))
+ bitbake("core-image-minimal")
+
+ # Exclude the test folders that error out while building
+ # TODO: Fix the errors and include them for testing
+ # no-fail-fast: Run all tests regardless of failure.
+ # bless: First runs rustfmt to format the codebase,
+ # then runs tidy checks.
+ exclude_list = [
+ 'compiler/rustc',
+ 'compiler/rustc_interface/src/tests.rs',
+ 'library/panic_abort',
+ 'library/panic_unwind',
+ 'library/test/src/stats/tests.rs',
+ 'src/bootstrap/builder/tests.rs',
+ 'src/doc/rustc',
+ 'src/doc/rustdoc',
+ 'src/doc/unstable-book',
+ 'src/librustdoc',
+ 'src/rustdoc-json-types',
+ 'src/tools/compiletest/src/common.rs',
+ 'src/tools/lint-docs',
+ 'src/tools/rust-analyzer',
+ 'src/tools/rustdoc-themes',
+ 'src/tools/tidy',
+ 'tests/assembly/asm/aarch64-outline-atomics.rs',
+ 'tests/codegen/abi-main-signature-32bit-c-int.rs',
+ 'tests/codegen/abi-repr-ext.rs',
+ 'tests/codegen/abi-x86-interrupt.rs',
+ 'tests/codegen/branch-protection.rs',
+ 'tests/codegen/catch-unwind.rs',
+ 'tests/codegen/cf-protection.rs',
+ 'tests/codegen/enum-bounds-check-derived-idx.rs',
+ 'tests/codegen/force-unwind-tables.rs',
+ 'tests/codegen/intrinsic-no-unnamed-attr.rs',
+ 'tests/codegen/issues/issue-103840.rs',
+ 'tests/codegen/issues/issue-47278.rs',
+ 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
+ 'tests/codegen/lifetime_start_end.rs',
+ 'tests/codegen/local-generics-in-exe-internalized.rs',
+ 'tests/codegen/match-unoptimized.rs',
+ 'tests/codegen/noalias-rwlockreadguard.rs',
+ 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
+ 'tests/codegen/noreturn-uninhabited.rs',
+ 'tests/codegen/repr-transparent-aggregates-3.rs',
+ 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
+ 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
+ 'tests/codegen/sse42-implies-crc32.rs',
+ 'tests/codegen/thread-local.rs',
+ 'tests/codegen/uninit-consts.rs',
+ 'tests/pretty/raw-str-nonexpr.rs',
+ 'tests/run-make',
+ 'tests/run-make-fulldeps',
+ 'tests/rustdoc',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-js-std',
+ 'tests/rustdoc-ui/cfg-test.rs',
+ 'tests/rustdoc-ui/check-cfg-test.rs',
+ 'tests/rustdoc-ui/display-output.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
+ 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
+ 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
+ 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
+ 'tests/rustdoc-ui/doctest-output.rs',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
+ 'tests/rustdoc-ui/issue-80992.rs',
+ 'tests/rustdoc-ui/issue-91134.rs',
+ 'tests/rustdoc-ui/nocapture-fail.rs',
+ 'tests/rustdoc-ui/nocapture.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/rustdoc-ui/run-directory.rs',
+ 'tests/rustdoc-ui/test-no_std.rs',
+ 'tests/rustdoc-ui/test-type.rs',
+ 'tests/rustdoc/unit-return.rs',
+ 'tests/ui/abi/stack-probes-lto.rs',
+ 'tests/ui/abi/stack-probes.rs',
+ 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs',
+ 'tests/ui/asm/x86_64/sym.rs',
+ 'tests/ui/associated-type-bounds/fn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
+ 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
+ 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
+ 'tests/ui/drop/dynamic-drop.rs',
+ 'tests/ui/empty_global_asm.rs',
+ 'tests/ui/functions-closures/fn-help-with-err.rs',
+ 'tests/ui/linkage-attr/issue-10755.rs',
+ 'tests/ui/macros/restricted-shadowing-legacy.rs',
+ 'tests/ui/process/nofile-limit.rs',
+ 'tests/ui/process/process-panic-after-fork.rs',
+ 'tests/ui/process/process-sigpipe.rs',
+ 'tests/ui/simd/target-feature-mixup.rs',
+ 'tests/ui/structs-enums/multiple-reprs.rs',
+ 'src/tools/jsondoclint',
+ 'src/tools/replace-version-placeholder',
+ 'tests/codegen/abi-efiapi.rs',
+ 'tests/codegen/abi-sysv64.rs',
+ 'tests/codegen/align-byval.rs',
+ 'tests/codegen/align-fn.rs',
+ 'tests/codegen/asm-powerpc-clobbers.rs',
+ 'tests/codegen/async-fn-debug-awaitee-field.rs',
+ 'tests/codegen/binary-search-index-no-bound-check.rs',
+ 'tests/codegen/call-metadata.rs',
+ 'tests/codegen/debug-column.rs',
+ 'tests/codegen/debug-limited.rs',
+ 'tests/codegen/debuginfo-generic-closure-env-names.rs',
+ 'tests/codegen/drop.rs',
+ 'tests/codegen/dst-vtable-align-nonzero.rs',
+ 'tests/codegen/enable-lto-unit-splitting.rs',
+ 'tests/codegen/enum/enum-u128.rs',
+ 'tests/codegen/fn-impl-trait-self.rs',
+ 'tests/codegen/inherit_overflow.rs',
+ 'tests/codegen/inline-function-args-debug-info.rs',
+ 'tests/codegen/intrinsics/mask.rs',
+ 'tests/codegen/intrinsics/transmute-niched.rs',
+ 'tests/codegen/issues/issue-73258.rs',
+ 'tests/codegen/issues/issue-75546.rs',
+ 'tests/codegen/issues/issue-77812.rs',
+ 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
+ 'tests/codegen/llvm-ident.rs',
+ 'tests/codegen/mainsubprogram.rs',
+ 'tests/codegen/move-operands.rs',
+ 'tests/codegen/repr/transparent-mips64.rs',
+ 'tests/mir-opt/',
+ 'tests/rustdoc-json',
+ 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
+ 'tests/rustdoc-ui/no-run-flag.rs',
+ 'tests/ui-fulldeps/',
+ 'tests/ui/numbers-arithmetic/u128.rs'
+ ]
+
+ exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
+ # Add exclude_fail_tests with other test arguments
+ testargs = exclude_fail_tests + " --doc --no-fail-fast --bless"
+
+ # wrap the execution with a qemu instance.
+ # Tests are run with 512 tasks in parallel to execute all tests very quickly
+ with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
+ # Copy remote-test-server to image through scp
+ host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
+ ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
+ ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/")
+ # Execute remote-test-server on image through background ssh
+ command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
+ sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Get the values of variables.
+ tcpath = get_bb_var("TARGET_SYS", "rust")
+ targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
+ rustlibpath = get_bb_var("WORKDIR", "rust")
+ tmpdir = get_bb_var("TMPDIR", "rust")
+
+ # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
+ cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath
+ cmd = cmd + " export TARGET_VENDOR=\"-poky\";"
+ cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
+ cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
+ # Trigger testing.
+ cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
+ cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
+ retval = runCmd(cmd)
+ end_time = time.time()
+
+ resultlog = rustlibpath + "/results-log.txt"
+ with open(resultlog, "w") as f:
+ f.write(retval.output)
+
+ ptestsuite = "rust"
+ self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
+ test_results = parse_results(resultlog)
+ for test in test_results:
+ self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index 7268e25939..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index 6f3d4aeae9..18cce0ba25 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -189,7 +191,7 @@ class LockedSignatures(OESelftestTestCase):
bitbake(test_recipe)
# Generate locked sigs include file
- bitbake('-S none %s' % test_recipe)
+ bitbake('-S lockedsigs %s' % test_recipe)
feature = 'require %s\n' % locked_sigs_file
feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..05fc4e390b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,54 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import json
+import os
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake, get_bb_var, runCmd
+
+class SPDXCheck(OESelftestTestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super(SPDXCheck, cls).setUpClass()
+ bitbake("python3-spdx-tools-native")
+ bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
+
+ def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
+ config = """
+INHERIT += "create-spdx"
+"""
+ self.write_config(config)
+
+ deploy_dir = get_bb_var("DEPLOY_DIR")
+ machine_var = get_bb_var("MACHINE")
+ # qemux86-64 creates the directory qemux86_64
+ machine_dir = machine_var.replace("-", "_")
+
+ full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file)
+
+ try:
+ os.remove(full_file_path)
+ except FileNotFoundError:
+ pass
+
+ bitbake("%s -c create_spdx" % target_name)
+
+ def check_spdx_json(filename):
+ with open(filename) as f:
+ report = json.load(f)
+ self.assertNotEqual(report, None)
+ self.assertNotEqual(report["SPDXID"], None)
+
+ python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3')
+ validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools')
+ result = runCmd("{} {} -i {}".format(python, validator, filename))
+
+ self.assertExists(full_file_path)
+ result = check_spdx_json(full_file_path)
+
+ def test_spdx_base_files(self):
+ self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index 176766331a..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#
-# SPDX-License-Identifier: MIT
-#
-
-import datetime
-import os
-import re
-
-from oeqa.selftest.case import OESelftestTestCase
-from oeqa.utils.commands import get_bb_vars
-
-
-class SStateBase(OESelftestTestCase):
-
- def setUpLocal(self):
- super(SStateBase, self).setUpLocal()
- self.temp_sstate_location = None
- needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
- 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
- bb_vars = get_bb_vars(needed_vars)
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.tclibc = bb_vars['TCLIBC']
- self.tune_arch = bb_vars['TUNE_ARCH']
- self.topdir = bb_vars['TOPDIR']
- self.target_vendor = bb_vars['TARGET_VENDOR']
- self.target_os = bb_vars['TARGET_OS']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- # Creates a special sstate configuration with the option to add sstate mirrors
- def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
- self.temp_sstate_location = temp_sstate_location
-
- if self.temp_sstate_location:
- temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
- config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
- self.append_config(config_temp_sstate)
- self.track_for_cleanup(temp_sstate_path)
- bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
- self.sstate_path = bb_vars['SSTATE_DIR']
- self.hostdistro = bb_vars['NATIVELSBSTRING']
- self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
-
- if add_local_mirrors:
- config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
- self.append_config(config_set_sstate_if_not_set)
- for local_mirror in add_local_mirrors:
- self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
- config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
- self.append_config(config_sstate_mirror)
-
- # Returns a list containing sstate files
- def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
- result = []
- for root, dirs, files in os.walk(self.sstate_path):
- if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
- for f in files:
- if re.search(filename_regex, f):
- result.append(f)
- return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index 4a32af902f..86d6cd7464 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -7,54 +9,77 @@ import shutil
import glob
import subprocess
import tempfile
+import datetime
+import re
-from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
-from oeqa.selftest.cases.sstate import SStateBase
-import oe
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.core.decorator import OETestTag
+import oe
import bb.siggen
-class SStateTests(SStateBase):
- def test_autorev_sstate_works(self):
- # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
- # when PV does not contain SRCPV
-
- tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
- tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
- self.track_for_cleanup(tempdir)
- self.track_for_cleanup(tempdldir)
- create_temp_layer(tempdir, 'selftestrecipetool')
- self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
- self.append_config("DL_DIR = \"%s\"" % tempdldir)
- runCmd('bitbake-layers add-layer %s' % tempdir)
-
- # Use dbus-wait as a local git repo we can add a commit between two builds in
- pn = 'dbus-wait'
- srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
- url = 'git://git.yoctoproject.org/dbus-wait'
- result = runCmd('git clone %s noname' % url, cwd=tempdir)
- srcdir = os.path.join(tempdir, 'noname')
- result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
- self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
-
- recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
- os.makedirs(os.path.dirname(recipefile))
- srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
- result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
- self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
-
- with open(recipefile, 'a') as f:
- f.write('SRCREV = "${AUTOREV}"\n')
- f.write('PV = "1.0"\n')
-
- bitbake("dbus-wait-test -c fetch")
- with open(os.path.join(srcdir, "bar.txt"), "w") as f:
- f.write("foo")
- result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
- bitbake("dbus-wait-test -c unpack")
-
-
- # Test sstate files creation and their location
+# Set to True to preserve stamp files after test execution for debugging failures
+keep_temp_files = False
+
+class SStateBase(OESelftestTestCase):
+
+ def setUpLocal(self):
+ super(SStateBase, self).setUpLocal()
+ self.temp_sstate_location = None
+ needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
+ 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
+ bb_vars = get_bb_vars(needed_vars)
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.tclibc = bb_vars['TCLIBC']
+ self.tune_arch = bb_vars['TUNE_ARCH']
+ self.topdir = bb_vars['TOPDIR']
+ self.target_vendor = bb_vars['TARGET_VENDOR']
+ self.target_os = bb_vars['TARGET_OS']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ def track_for_cleanup(self, path):
+ if not keep_temp_files:
+ super().track_for_cleanup(path)
+
+ # Creates a special sstate configuration with the option to add sstate mirrors
+ def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
+ self.temp_sstate_location = temp_sstate_location
+
+ if self.temp_sstate_location:
+ temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+ config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
+ self.append_config(config_temp_sstate)
+ self.track_for_cleanup(temp_sstate_path)
+ bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
+ self.sstate_path = bb_vars['SSTATE_DIR']
+ self.hostdistro = bb_vars['NATIVELSBSTRING']
+ self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
+
+ if add_local_mirrors:
+ config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
+ self.append_config(config_set_sstate_if_not_set)
+ for local_mirror in add_local_mirrors:
+ self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
+ config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
+ self.append_config(config_sstate_mirror)
+
+ # Returns a list containing sstate files
+ def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
+ result = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
+ for f in files:
+ if re.search(filename_regex, f):
+ result.append(f)
+ return result
+
+ # Test sstate files creation and their location and directory perms
def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
self.config_sstate(temp_sstate_location, [self.sstate_path])
@@ -63,6 +88,19 @@ class SStateTests(SStateBase):
else:
bitbake(['-ccleansstate'] + targets)
+ # We need to test that the env umask have does not effect sstate directory creation
+ # So, first, we'll get the current umask and set it to something we know incorrect
+ # See: sstate_task_postfunc for correct umask of os.umask(0o002)
+ import os
+ def current_umask():
+ current_umask = os.umask(0)
+ os.umask(current_umask)
+ return current_umask
+
+ orig_umask = current_umask()
+ # Set it to a umask we know will be 'wrong'
+ os.umask(0o022)
+
bitbake(targets)
file_tracker = []
results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
@@ -79,17 +117,18 @@ class SStateTests(SStateBase):
else:
self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
- def test_sstate_creation_distro_specific_pass(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
-
- def test_sstate_creation_distro_specific_fail(self):
- self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+ # Now we'll walk the tree to check the mode and see if things are incorrect.
+ badperms = []
+ for root, dirs, files in os.walk(self.sstate_path):
+ for directory in dirs:
+ if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775:
+ badperms.append(os.path.join(root, directory))
- def test_sstate_creation_distro_nonspecific_pass(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+ # Return to original umask
+ os.umask(orig_umask)
- def test_sstate_creation_distro_nonspecific_fail(self):
- self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+ if should_pass:
+ self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
# Test the sstate files deletion part of the do_cleansstate task
def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
@@ -108,20 +147,6 @@ class SStateTests(SStateBase):
archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
- def test_cleansstate_task_distro_specific_nonspecific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
-
- def test_cleansstate_task_distro_nonspecific(self):
- self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
-
- def test_cleansstate_task_distro_specific(self):
- targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
- targets.append('linux-libc-headers')
- self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
-
-
# Test rebuilding of distro-specific sstate files
def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
self.config_sstate(temp_sstate_location, [self.sstate_path])
@@ -154,6 +179,105 @@ class SStateTests(SStateBase):
created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
+ def sstate_common_samesigs(self, configA, configB, allarch=False):
+
+ self.write_config(configA)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
+ bitbake("world meta-toolchain -S none")
+ self.write_config(configB)
+ self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
+ bitbake("world meta-toolchain -S none")
+
+ def get_files(d, result):
+ for root, dirs, files in os.walk(d):
+ for name in files:
+ if "meta-environment" in root or "cross-canadian" in root:
+ continue
+ if "do_build" not in name:
+ # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
+ (_, task, _, shash) = name.rsplit(".", 3)
+ result[os.path.join(os.path.basename(root), task)] = shash
+
+ files1 = {}
+ files2 = {}
+ subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
+ if allarch:
+ subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
+
+ for subdir in subdirs:
+ nativesdkdir = os.path.basename(subdir)
+ get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
+ get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
+
+ self.maxDiff = None
+ self.assertEqual(files1, files2)
+
+class SStateTests(SStateBase):
+ def test_autorev_sstate_works(self):
+ # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
+
+ tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
+ tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(tempdldir)
+ create_temp_layer(tempdir, 'selftestrecipetool')
+ self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
+ self.append_config("DL_DIR = \"%s\"" % tempdldir)
+ runCmd('bitbake-layers add-layer %s' % tempdir)
+
+ # Use dbus-wait as a local git repo we can add a commit between two builds in
+ pn = 'dbus-wait'
+ srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
+ url = 'git://git.yoctoproject.org/dbus-wait'
+ result = runCmd('git clone %s noname' % url, cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'noname')
+ result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
+
+ recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
+ os.makedirs(os.path.dirname(recipefile))
+ srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
+ result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+
+ with open(recipefile, 'a') as f:
+ f.write('SRCREV = "${AUTOREV}"\n')
+ f.write('PV = "1.0"\n')
+
+ bitbake("dbus-wait-test -c fetch")
+ with open(os.path.join(srcdir, "bar.txt"), "w") as f:
+ f.write("foo")
+ result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
+ bitbake("dbus-wait-test -c unpack")
+
+class SStateCreation(SStateBase):
+ def test_sstate_creation_distro_specific_pass(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_specific_fail(self):
+ self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+
+ def test_sstate_creation_distro_nonspecific_pass(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_sstate_creation_distro_nonspecific_fail(self):
+ self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+
+class SStateCleanup(SStateBase):
+ def test_cleansstate_task_distro_specific_nonspecific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_nonspecific(self):
+ self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+ def test_cleansstate_task_distro_specific(self):
+ targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
+ targets.append('linux-libc-headers')
+ self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+class SStateDistroTests(SStateBase):
def test_rebuild_distro_specific_sstate_cross_native_targets(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
@@ -163,30 +287,30 @@ class SStateTests(SStateBase):
def test_rebuild_distro_specific_sstate_native_target(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
-
+class SStateCacheManagement(SStateBase):
# Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
- # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE)
+ # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
self.assertTrue(global_config)
self.assertTrue(target_config)
self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
- self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
- # If buildhistory is enabled, we need to disable version-going-backwards
- # QA checks for this test. It may report errors otherwise.
- self.append_config('ERROR_QA:remove = "version-going-backwards"')
+ for idx in range(len(target_config)):
+ self.append_config(global_config[idx])
+ self.append_recipeinc(target, target_config[idx])
+ bitbake(target)
+ self.remove_config(global_config[idx])
+ self.remove_recipeinc(target, target_config[idx])
+
+ self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
# For now this only checks if random sstate tasks are handled correctly as a group.
# In the future we should add control over what tasks we check for.
- sstate_archs_list = []
expected_remaining_sstate = []
for idx in range(len(target_config)):
self.append_config(global_config[idx])
self.append_recipeinc(target, target_config[idx])
- sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
- if not sstate_arch in sstate_archs_list:
- sstate_archs_list.append(sstate_arch)
if target_config[idx] == target_config[-1]:
target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
bitbake("-cclean %s" % target)
@@ -198,7 +322,7 @@ class SStateTests(SStateBase):
self.remove_recipeinc(target, target_config[idx])
self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
- runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
+ runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
@@ -242,6 +366,7 @@ class SStateTests(SStateBase):
target_config.append('')
self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
+class SStateHashSameSigs(SStateBase):
def test_sstate_32_64_same_hash(self):
"""
The sstate checksums for both native and target should not vary whether
@@ -327,6 +452,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
self.maxDiff = None
self.assertCountEqual(files1, files2)
+class SStateHashSameSigs2(SStateBase):
def test_sstate_allarch_samesigs(self):
"""
The sstate checksums of allarch packages should be independent of whichever
@@ -375,41 +501,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
"""
self.sstate_common_samesigs(configA, configB)
- def sstate_common_samesigs(self, configA, configB, allarch=False):
-
- self.write_config(configA)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
- bitbake("world meta-toolchain -S none")
- self.write_config(configB)
- self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
- bitbake("world meta-toolchain -S none")
-
- def get_files(d):
- f = {}
- for root, dirs, files in os.walk(d):
- for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
- continue
- if "do_build" not in name:
- # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
- (_, task, _, shash) = name.rsplit(".", 3)
- f[os.path.join(os.path.basename(root), task)] = shash
- return f
-
- nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
-
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
- self.maxDiff = None
- self.assertEqual(files1, files2)
-
- if allarch:
- allarchdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")[0])
-
- files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + allarchdir)
- files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + allarchdir)
- self.assertEqual(files1, files2)
-
+class SStateHashSameSigs3(SStateBase):
def test_sstate_sametune_samesigs(self):
"""
The sstate checksums of two identical machines (using the same tune) should be the
@@ -444,7 +536,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
f = []
for root, dirs, files in os.walk(d):
for name in files:
- if "meta-environment" in root or "cross-canadian" in root:
+ if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
continue
if "qemux86copy-" in root or "qemux86-" in root:
continue
@@ -497,7 +589,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
self.maxDiff = None
self.assertCountEqual(files1, files2)
-
+class SStateHashSameSigs4(SStateBase):
def test_sstate_noop_samesigs(self):
"""
The sstate checksums of two builds with these variables changed or
@@ -594,6 +686,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
copy_layer_2 = self.topdir + "/meta-copy2/meta"
oe.path.copytree(core_layer, copy_layer_1)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
""")
@@ -603,6 +696,7 @@ TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
bitbake("bash -S none")
oe.path.copytree(core_layer, copy_layer_2)
+ os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
self.write_config("""
TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
""")
@@ -623,3 +717,292 @@ TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
self.maxDiff = None
self.assertCountEqual(files1, files2)
+class SStateFindSiginfo(SStateBase):
+ def test_sstate_compare_sigfiles_and_find_siginfo(self):
+ """
+ Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
+ """
+ self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
+TCLIBCAPPEND = \"\"
+MACHINE = \"qemux86-64\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
+BB_SIGNATURE_HANDLER = "OEBasicHash"
+""")
+ self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
+
+ pns = ["binutils", "binutils-native", "lib32-binutils"]
+ target_configs = [
+"""
+TMPVAL1 = "tmpval1"
+TMPVAL2 = "tmpval2"
+do_tmptask1() {
+ echo ${TMPVAL1}
+}
+do_tmptask2() {
+ echo ${TMPVAL2}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+""",
+"""
+TMPVAL3 = "tmpval3"
+TMPVAL4 = "tmpval4"
+do_tmptask1() {
+ echo ${TMPVAL3}
+}
+do_tmptask2() {
+ echo ${TMPVAL4}
+}
+addtask do_tmptask1
+addtask tmptask2 before do_tmptask1
+"""
+ ]
+
+ for target_config in target_configs:
+ self.write_recipeinc("binutils", target_config)
+ for pn in pns:
+ bitbake("%s -c do_tmptask1 -S none" % pn)
+ self.delete_recipeinc("binutils")
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+
+ def find_siginfo(pn, taskname, sigs=None):
+ result = None
+ command_complete = False
+ tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
+ "bb.command.CommandCompleted"])
+ ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
+ if ret:
+ while result is None or not command_complete:
+ event = tinfoil.wait_event(1)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ command_complete = True
+ elif isinstance(event, bb.event.FindSigInfoResult):
+ result = event.result
+ return result
+
+ def recursecb(key, hash1, hash2):
+ nonlocal recursecb_count
+ recursecb_count += 1
+ hashes = [hash1, hash2]
+ hashfiles = find_siginfo(key, None, hashes)
+ self.assertCountEqual(hashes, hashfiles)
+ bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
+
+ for pn in pns:
+ recursecb_count = 0
+ matches = find_siginfo(pn, "do_tmptask1")
+ self.assertGreaterEqual(len(matches), 2)
+ latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
+ bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
+ self.assertEqual(recursecb_count,1)
+
+class SStatePrintdiff(SStateBase):
+ def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
+ # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
+ # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(change_bbtask)
+ self.write_recipeinc(change_recipe, change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ self.delete_recipeinc(change_recipe)
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+ def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
+ import time
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
+""".format(time.time()))
+ bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
+ bitbake("-S none {}".format(target))
+ bitbake(" ".join(change_bbtasks))
+ self.append_config(change_content)
+ result_sametmp = bitbake("-S printdiff {}".format(target))
+
+ self.write_config("""
+TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
+""".format(time.time()))
+ self.append_config(change_content)
+ result_difftmp = bitbake("-S printdiff {}".format(target))
+
+ for item in expected_sametmp_output:
+ self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
+ for item in expected_difftmp_output:
+ self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
+
+
+ # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
+ def test_image_minimal_vs_perlcross(self):
+ expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_install value changed",
+'+ echo "this changes the task signature"')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
+"""
+do_install:append() {
+ echo "this changes the task signature"
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
+ def test_gcc_runtime_vs_gcc_source(self):
+ gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
+
+ expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
+"We need hash",
+"most recent matching task was")
+ expected_sametmp_output = expected_output + (
+"Variable do_preconfigure value changed",
+'+ print("this changes the task signature")')
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
+"""
+python do_preconfigure:append() {
+ print("this changes the task signature")
+}
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+ # Check if changing a really base task definiton is reported against multiple core recipes using it
+ def test_image_minimal_vs_base_do_configure(self):
+ change_bbtasks = ('zstd-native:do_configure',
+'texinfo-dummy-native:do_configure',
+'ldconfig-native:do_configure',
+'gettext-minimal-native:do_configure',
+'tzcode-native:do_configure',
+'makedevs-native:do_configure',
+'pigz-native:do_configure',
+'update-rc.d-native:do_configure',
+'unzip-native:do_configure',
+'gnu-config-native:do_configure')
+
+ expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
+"We need hash",
+"most recent matching task was"]
+
+ expected_sametmp_output = expected_output + [
+"Variable base_do_configure value changed",
+'+ echo "this changes base_do_configure() definiton "']
+ expected_difftmp_output = expected_output
+
+ self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
+"""
+INHERIT += "base-do-configure-modified"
+""",
+expected_sametmp_output, expected_difftmp_output)
+
+@OETestTag("yocto-mirrors")
+class SStateMirrors(SStateBase):
+ def check_bb_output(self, output, exceptions, check_cdn):
+ def is_exception(object, exceptions):
+ for e in exceptions:
+ if re.search(e, object):
+ return True
+ return False
+
+ output_l = output.splitlines()
+ for l in output_l:
+ if l.startswith("Sstate summary"):
+ for idx, item in enumerate(l.split()):
+ if item == 'Missed':
+ missing_objects = int(l.split()[idx+1])
+ break
+ else:
+ self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
+ break
+ else:
+ self.fail("Did not find 'Sstate summary' line in bitbake output")
+
+ failed_urls = []
+ failed_urls_extrainfo = []
+ for l in output_l:
+ if "SState: Unsuccessful fetch test for" in l and check_cdn:
+ missing_object = l.split()[6]
+ elif "SState: Looked for but didn't find file" in l and not check_cdn:
+ missing_object = l.split()[8]
+ else:
+ missing_object = None
+ if missing_object:
+ if not is_exception(missing_object, exceptions):
+ failed_urls.append(missing_object)
+ else:
+ missing_objects -= 1
+
+ if "urlopen failed for" in l and not is_exception(l, exceptions):
+ failed_urls_extrainfo.append(l)
+
+ self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+ self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
+
+ def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
+ # sstate is checked for existence of these, but they never get written out to begin with
+ exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
+ exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
+ exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
+ exceptions += ["linux-yocto.*shared_workdir"]
+ # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
+ # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
+ # which makes tracing other changes difficult
+ exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
+ exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
+
+ if check_cdn:
+ self.config_sstate(True)
+ self.append_config("""
+MACHINE = "{}"
+BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
+""".format(machine))
+ else:
+ self.append_config("""
+MACHINE = "{}"
+""".format(machine))
+ result = bitbake("-DD -n {}".format(targets))
+ bitbake("-S none {}".format(targets))
+ if ignore_errors:
+ return
+ self.check_bb_output(result.output, exceptions, check_cdn)
+
+ def test_cdn_mirror_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_cdn_mirror_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
+
+ def test_local_cache_qemux86_64(self):
+ exceptions = []
+ self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
+
+ def test_local_cache_qemuarm64(self):
+ exceptions = []
+ self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
index 315d1a61c2..ef854f6fee 100644
--- a/meta/lib/oeqa/selftest/cases/sysroot.py
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -35,3 +37,50 @@ TESTSTRING:pn-sysroot-test-arch1 = "%s"
TESTSTRING:pn-sysroot-test-arch2 = "%s"
""" % (uuid1, uuid2))
bitbake("sysroot-test")
+
+ def test_sysroot_max_shebang(self):
+ """
+ Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
+ Expected: Fail when a shebang bigger than the max shebang-size is reached.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
+ res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_la(self):
+ """
+ Summary: Check that workdir paths are not contained in .la files.
+ Expected: Fail when a workdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "la-test.la failed sanity test (workdir) in path"
+
+ res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[la]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ def test_sysroot_pkgconfig(self):
+ """
+ Summary: Check that tmpdir paths are not contained in .pc files.
+ Expected: Fail when a tmpdir path is found in the file content.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ expected = "test.pc failed sanity test (tmpdir) in path"
+
+ res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
+
+ res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
+ self.assertTrue(expected in res.output, msg=res.output)
+ self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
+ self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index c81d56d82b..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -46,6 +48,17 @@ class TinfoilTests(OESelftestTestCase):
rd = tinfoil.parse_recipe_file(best[3])
self.assertEqual(testrecipe, rd.getVar('PN'))
+ def test_parse_virtual_recipe(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3])
+ self.assertEqual(testrecipe, rd.getVar('PN'))
+ self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
+
def test_parse_recipe_copy_expand(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
@@ -64,6 +77,32 @@ class TinfoilTests(OESelftestTestCase):
localdata.setVar('PN', 'hello')
self.assertEqual('hello', localdata.getVar('BPN'))
+ # The config_data API to parse_recipe_file is used by:
+ # layerindex-web layerindex/update_layer.py
+ def test_parse_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
+ def test_parse_virtual_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'nativesdk-gcc'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
def test_list_recipes(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
+
+class UserGroupTests(OESelftestTestCase):
+ def test_group_from_dep_package(self):
+ self.logger.info("Building creategroup2")
+ bitbake(' creategroup2 creategroup1')
+ bitbake(' creategroup2 creategroup1 -c clean')
+ self.logger.info("Packaging creategroup2")
+ self.assertTrue(bitbake(' creategroup2 -c package'))
+
+ def test_add_task_between_p_sysroot_and_package(self):
+ # Test for YOCTO #14961
+ self.assertTrue(bitbake('useraddbadtask -C fetch'))
+
+ def test_postinst_order(self):
+ self.logger.info("Building dcreategroup")
+ self.assertTrue(bitbake(' dcreategroup'))
+
+ def test_static_useradd_from_dynamic(self):
+ metaselftestpath = get_test_layer()
+ self.logger.info("Building core-image-minimal to generate passwd/group file")
+ bitbake(' core-image-minimal')
+ self.logger.info("Setting up useradd-staticids")
+ repropassdir = os.path.join(metaselftestpath, "conf/include")
+ os.makedirs(repropassdir)
+ etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
+ os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
+ shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
+ shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
+ # Copy the original local.conf
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
+
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
+ self.logger.info("Rebuild with staticids")
+ bitbake(' core-image-minimal')
+ shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
+ self.logger.info("Rebuild without staticids")
+ bitbake(' core-image-minimal')
+ self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
+ self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
+ self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
+ self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
+ self.logger.info("Rebuild with other staticids")
+ self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index de74c07a03..b616759209 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -15,33 +15,15 @@ import hashlib
from glob import glob
from shutil import rmtree, copy
-from functools import wraps, lru_cache
from tempfile import NamedTemporaryFile
+from tempfile import TemporaryDirectory
from oeqa.selftest.case import OESelftestTestCase
from oeqa.core.decorator import OETestTag
+from oeqa.core.decorator.data import skipIfNotArch
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
-@lru_cache()
-def get_host_arch():
- return get_bb_var('HOST_ARCH')
-
-
-def only_for_arch(archs):
- """Decorator for wrapping test cases that can be run only for specific target
- architectures. A list of compatible architectures is passed in `archs`.
- """
- def wrapper(func):
- @wraps(func)
- def wrapped_f(*args, **kwargs):
- arch = get_host_arch()
- if archs and arch not in archs:
- raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
- return func(*args, **kwargs)
- return wrapped_f
- return wrapper
-
def extract_files(debugfs_output):
"""
extract file names from the output of debugfs -R 'ls -p',
@@ -165,20 +147,87 @@ class CLITests(OESelftestTestCase):
self.assertEqual(1, runCmd('wic', ignore_status=True).status)
class Wic(WicTestCase):
+ def test_skip_kernel_install(self):
+ """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (
+ wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(
+ self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ raise AssertionError(
+ "The kernel image '{}' was found in the partition".format(kimgtype)
+ )
+
+ def test_kernel_install(self):
+ """Test the installation of the kernel to the boot directory in the wic plugin"""
+ # create a temporary file for the WKS content
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.write(
+ 'part --source bootimg-efi '
+ '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
+ '--label boot --active\n'
+ )
+ wks.flush()
+ # create a temporary directory to extract the disk image to
+ with TemporaryDirectory() as tmpdir:
+ img = 'core-image-minimal'
+ # build the image using the WKS file
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ # extract the content of the disk image to the temporary directory
+ cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
+ runCmd(cmd)
+ # check if the kernel is installed or not
+ kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
+ found = False
+ for file in os.listdir(tmpdir):
+ if file == kimgtype:
+ found = True
+ break
+ self.assertTrue(
+ found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
+ )
+
def test_build_image_name(self):
"""Test wic create wictestdisk --image-name=core-image-minimal"""
cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_gpt_image(self):
"""Test creation of core-image-minimal with gpt table and UUID boot"""
cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_iso_image(self):
"""Test creation of hybrid iso image with legacy and EFI boot"""
config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
@@ -192,21 +241,21 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_qemux86_directdisk(self):
"""Test creation of qemux-86-directdisk image"""
cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_mkefidisk(self):
"""Test creation of mkefidisk image"""
cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_bootloader_config(self):
"""Test creation of directdisk-bootloader-config image"""
config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
@@ -217,7 +266,7 @@ class Wic(WicTestCase):
runCmd(cmd)
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
def test_systemd_bootdisk(self):
"""Test creation of systemd-bootdisk image"""
config = 'MACHINE_FEATURES:append = " efi"\n'
@@ -248,7 +297,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_default_output_dir(self):
"""Test default output location"""
for fname in glob("directdisk-*.direct"):
@@ -261,7 +310,7 @@ class Wic(WicTestCase):
runCmd(cmd)
self.assertEqual(1, len(glob("directdisk-*.direct")))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_build_artifacts(self):
"""Test wic create directdisk providing all artifacts."""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -353,7 +402,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_indirect_recipes(self):
"""Test usage of rootfs plugin with rootfs recipes"""
runCmd("wic create directdisk-multi-rootfs "
@@ -364,7 +413,7 @@ class Wic(WicTestCase):
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
# TODO this doesn't have to be x86-specific
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_rootfs_artifacts(self):
"""Test usage of rootfs plugin with rootfs paths"""
bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -748,7 +797,48 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
self.assertEqual(1, len(wicout))
size = os.path.getsize(wicout[0])
- self.assertTrue(size > extraspace)
+ self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
+
+ def test_no_table(self):
+ """Test --no-table wks option."""
+ wks_file = 'temp.wks'
+
+ # Absolute argument.
+ with open(wks_file, 'w') as wks:
+ wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
+ runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
+
+ wicout = glob(os.path.join(self.resultdir, "*.*"))
+
+ self.assertEqual(1, len(wicout))
+ size = os.path.getsize(wicout[0])
+ self.assertEqual(size, 4 * 1024 * 1024)
+
+ os.remove(wks_file)
+
+ def test_partition_hidden_attributes(self):
+ """Test --hidden wks option."""
+ wks_file = 'temp.wks'
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+ try:
+ with open(wks_file, 'w') as wks:
+ wks.write("""
+part / --source rootfs --fstype=ext4
+part / --source rootfs --fstype=ext4 --hidden
+bootloader --ptable gpt""")
+
+ runCmd("wic create %s -e core-image-minimal -o %s" \
+ % (wks_file, self.resultdir))
+ wicout = os.path.join(self.resultdir, "*.direct")
+
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
+ self.assertEqual('', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
+ self.assertEqual('RequiredPartition', result.output)
+
+ finally:
+ os.remove(wks_file)
+
class Wic2(WicTestCase):
@@ -775,7 +865,7 @@ class Wic2(WicTestCase):
basename = bb_vars['IMAGE_BASENAME']
self.assertEqual(basename, image)
path = os.path.join(imgdatadir, basename) + '.env'
- self.assertTrue(os.path.isfile(path))
+ self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
wicvars = set(bb_vars['WICVARS'].split())
# filter out optional variables
@@ -788,7 +878,7 @@ class Wic2(WicTestCase):
# test if variables used by wic present in the .env file
for var in wicvars:
self.assertTrue(var in content, "%s is not in .env file" % var)
- self.assertTrue(content[var])
+ self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
def test_image_vars_dir_short(self):
"""Test image vars directory selection -v option"""
@@ -817,27 +907,29 @@ class Wic2(WicTestCase):
self.resultdir))
self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_wic_image_type(self):
"""Test building wic images by bitbake"""
config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
'MACHINE_FEATURES:append = " efi"\n'
self.append_config(config)
- bitbake('wic-image-minimal')
+ image = 'wic-image-minimal'
+ bitbake(image)
self.remove_config(config)
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
- prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
+
# check if we have result image and manifests symlinks
# pointing to existing files
for suffix in ('wic', 'manifest'):
path = prefix + suffix
- self.assertTrue(os.path.islink(path))
- self.assertTrue(os.path.isfile(os.path.realpath(path)))
+ self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
+ self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
# TODO this should work on aarch64
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_qemu(self):
"""Test wic-image-minimal under qemu"""
@@ -853,12 +945,12 @@ class Wic2(WicTestCase):
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, '4')
- cmd = "grep UUID= /etc/fstab"
+ cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
status, output = qemu.run_serial(cmd)
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_qemu_efi(self):
"""Test core-image-minimal efi image under qemu"""
@@ -1042,7 +1134,8 @@ class Wic2(WicTestCase):
size = int(size[:-3])
self.assertGreaterEqual(size, 204800)
- @only_for_arch(['i586', 'i686', 'x86_64', 'aarch64'])
+ # TODO this test could also work on aarch64
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_rawcopy_plugin_qemu(self):
"""Test rawcopy plugin in qemu"""
@@ -1050,9 +1143,13 @@ class Wic2(WicTestCase):
config = 'IMAGE_FSTYPES = "ext4"\n'
self.append_config(config)
bitbake('core-image-minimal')
+ image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
self.remove_config(config)
- config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n'
+ config = 'IMAGE_FSTYPES = "wic"\n' \
+ 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
+ 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
+ % image_link_name
self.append_config(config)
bitbake('core-image-minimal-mtdutils')
self.remove_config(config)
@@ -1066,14 +1163,14 @@ class Wic2(WicTestCase):
def _rawcopy_plugin(self, fstype):
"""Test rawcopy plugin"""
- img = 'core-image-minimal'
- machine = self.td["MACHINE"]
+ image = 'core-image-minimal'
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
params = ',unpack' if fstype.endswith('.gz') else ''
with NamedTemporaryFile("w", suffix=".wks") as wks:
- wks.write('part / --source rawcopy --sourceparams="file=%s-%s.%s%s"\n'\
- % (img, machine, fstype, params))
+ wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
+ % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
wks.flush()
- cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
runCmd(cmd)
wksname = os.path.splitext(os.path.basename(wks.name))[0]
out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
@@ -1094,13 +1191,12 @@ class Wic2(WicTestCase):
"""Test empty plugin"""
config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
self.append_config(config)
- bitbake('core-image-minimal')
+ image = 'core-image-minimal'
+ bitbake(image)
self.remove_config(config)
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
-
- image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
- self.assertTrue(os.path.exists(image_path))
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+ self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1109,7 +1205,7 @@ class Wic2(WicTestCase):
result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
self.assertEqual('1', result.output)
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_biosplusefi_plugin_qemu(self):
"""Test biosplusefi plugin in qemu"""
@@ -1142,7 +1238,7 @@ class Wic2(WicTestCase):
self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
self.assertEqual(output, '*')
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
def test_biosplusefi_plugin(self):
"""Test biosplusefi plugin"""
# Wic generation below may fail depending on the order of the unittests
@@ -1168,8 +1264,28 @@ class Wic2(WicTestCase):
out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
self.assertEqual(1, len(out))
+ @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
+ def test_uefi_kernel(self):
+ """ Test uefi-kernel in wic """
+ config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ bitbake('core-image-minimal')
+ self.remove_config(config)
+
+ img = 'core-image-minimal'
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n'
+ 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
+ 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
+ self.assertEqual(1, len(out))
+
# TODO this test could also work on aarch64
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_efi_plugin_unified_kernel_image_qemu(self):
"""Test efi plugin's Unified Kernel Image feature in qemu"""
@@ -1287,19 +1403,19 @@ class Wic2(WicTestCase):
out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
self.assertEqual(1, len(out))
- @only_for_arch(['i586', 'i686', 'x86_64'])
+ @skipIfNotArch(['i586', 'i686', 'x86_64'])
@OETestTag("runqemu")
def test_expand_mbr_image(self):
"""Test wic write --expand command for mbr image"""
# build an image
config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
self.append_config(config)
- bitbake('core-image-minimal')
+ image = 'core-image-minimal'
+ bitbake(image)
# get path to the image
- deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
- machine = self.td['MACHINE']
- image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
self.remove_config(config)
@@ -1307,7 +1423,7 @@ class Wic2(WicTestCase):
# expand image to 1G
new_image_path = None
with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
- dir=deploy_dir, delete=False) as sparse:
+ dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
sparse.truncate(1024 ** 3)
new_image_path = sparse.name
@@ -1321,11 +1437,11 @@ class Wic2(WicTestCase):
orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
- self.assertTrue(orig_sizes[1] < exp_sizes[1])
+ self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
# Check if all free space is partitioned
result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
- self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
+ self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
os.rename(image_path, image_path + '.bak')
os.rename(new_image_path, image_path)
@@ -1341,6 +1457,68 @@ class Wic2(WicTestCase):
if os.path.exists(image_path + '.bak'):
os.rename(image_path + '.bak', image_path)
+ def test_gpt_partition_name(self):
+ """Test --part-name argument to set partition name in GPT table"""
+ config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
+ self.append_config(config)
+ image = 'core-image-minimal'
+ bitbake(image)
+ self.remove_config(config)
+ deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
+ bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
+ image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
+
+ sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
+
+ # Image is created
+ self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
+
+ # Check the names of the three partitions
+ # as listed in test_gpt_partition_name.wks
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
+ self.assertEqual('boot-A', result.output)
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
+ self.assertEqual('root-A', result.output)
+ # When the --part-name is not defined, the partition name is equal to the --label
+ result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
+ self.assertEqual('ext-space', result.output)
+
+ def test_empty_zeroize_plugin(self):
+ img = 'core-image-minimal'
+ expected_size = [ 1024*1024, # 1M
+ 512*1024, # 512K
+ 2*1024*1024] # 2M
+ # Check combination of sourceparams
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(
+ ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
+ 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
+ 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
+ ])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ runCmd(cmd)
+ wksname = os.path.splitext(os.path.basename(wks.name))[0]
+ wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
+ # Skip the complete image and just look at the single partitions
+ for idx, value in enumerate(wicout[1:]):
+ self.logger.info(wicout[idx])
+ # Check if partitions are actually zeroized
+ with open(wicout[idx], mode="rb") as fd:
+ ba = bytearray(fd.read())
+ for b in ba:
+ self.assertEqual(b, 0)
+ self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
+
+ # Check inconsistancy check between "fill" and "--size" parameter
+ with NamedTemporaryFile("w", suffix=".wks") as wks:
+ wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
+ wks.flush()
+ cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
+ result = runCmd(cmd, ignore_status=True)
+ self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
+ self.assertNotEqual(0, result.status)
+
class ModifyTests(WicTestCase):
def test_wic_ls(self):
"""Test listing image content using 'wic ls'"""
@@ -1383,7 +1561,7 @@ class ModifyTests(WicTestCase):
# check if file is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
self.assertEqual(7, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testfile.name) in result.output)
+ self.assertIn(os.path.basename(testfile.name), result.output)
# prepare directory
testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -1397,13 +1575,13 @@ class ModifyTests(WicTestCase):
# check if directory is there
result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
self.assertEqual(8, len(result.output.split('\n')))
- self.assertTrue(os.path.basename(testdir) in result.output)
+ self.assertIn(os.path.basename(testdir), result.output)
# copy the file from the partition and check if it success
dest = '%s-cp' % testfile.name
runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
os.path.basename(testfile.name), dest, sysroot))
- self.assertTrue(os.path.exists(dest))
+ self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
def test_wic_rm(self):
@@ -1420,7 +1598,7 @@ class ModifyTests(WicTestCase):
# list directory content of the first partition
result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
- self.assertIn('\n%s ' % kerneltype.upper(), result.output)
+ self.assertIn('\n%s ' % kerneltype.upper(), result.output)
self.assertIn('\nEFI <DIR> ', result.output)
# remove file. EFI partitions are case-insensitive so exercise that too
@@ -1447,7 +1625,7 @@ class ModifyTests(WicTestCase):
# list directory content of the second ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
- set(line.split()[-1] for line in result.output.split('\n') if line)))
+ set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
def test_wic_cp_ext(self):
"""Test copy files and directories to the ext partition."""
@@ -1462,7 +1640,7 @@ class ModifyTests(WicTestCase):
# list directory content of the ext4 partition
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
- self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs))
+ self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
testfile.write("test")
@@ -1477,12 +1655,12 @@ class ModifyTests(WicTestCase):
# check if the file to copy is in the partition
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# copy file from the partition, replace the temporary file content with it and
# check for the file size to validate the copy
runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
- self.assertTrue(os.stat(testfile.name).st_size > 0)
+ self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
def test_wic_rm_ext(self):
@@ -1497,18 +1675,18 @@ class ModifyTests(WicTestCase):
# list directory content of the /etc directory on ext4 partition
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# remove file
runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
# check if it's removed
result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
- self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
# remove non-empty directory
runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
# check if it's removed
result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
- self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line])
+ self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class WrapperTests(OESelftestTestCase):
+ def test_shebang_wrapper(self):
+ """
+ Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
+ Expected: Exit status to be 0.
+ Author: Paulo Neves <ptsneves@gmail.com>
+ """
+ res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import sys
+import subprocess
+import shutil
+from oeqa.selftest.case import OESelftestTestCase
+from yocto_testresults_query import get_sha1, create_workdir
+basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
+lib_path = basepath + '/scripts/lib'
+sys.path = sys.path + [lib_path]
+
+
+class TestResultsQueryTests(OESelftestTestCase):
+ def test_get_sha1(self):
+ test_data_get_sha1 = [
+ {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
+ {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
+ ]
+ for data in test_data_get_sha1:
+ test_name = data["input"]
+ with self.subTest(f"Test SHA1 from {test_name}"):
+ self.assertEqual(
+ get_sha1(basepath, data["input"]), data["expected"])
+
+ def test_create_workdir(self):
+ workdir = create_workdir()
+ try:
+ url = subprocess.check_output(
+ ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
+ except:
+ shutil.rmtree(workdir, ignore_errors=True)
+ self.fail(f"Can not execute git commands in {workdir}")
+ shutil.rmtree(workdir)
+ self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 78c7a467e2..57844b289a 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -16,19 +16,32 @@ from random import choice
import oeqa
import oe
import bb.utils
+import bb.tinfoil
from oeqa.core.context import OETestContext, OETestContextExecutor
from oeqa.core.exception import OEQAPreRun, OEQATestNotFound
from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer
+OESELFTEST_METADATA=["run_all_tests", "run_tests", "skips", "machine", "select_tags", "exclude_tags"]
+
+def get_oeselftest_metadata(args):
+ result = {}
+ raw_args = vars(args)
+ for metadata in OESELFTEST_METADATA:
+ if metadata in raw_args:
+ result[metadata] = raw_args[metadata]
+
+ return result
+
class NonConcurrentTestSuite(unittest.TestSuite):
- def __init__(self, suite, processes, setupfunc, removefunc):
+ def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
super().__init__([suite])
self.processes = processes
self.suite = suite
self.setupfunc = setupfunc
self.removefunc = removefunc
+ self.bb_vars = bb_vars
def run(self, result):
(builddir, newbuilddir) = self.setupfunc("-st", None, self.suite)
@@ -57,8 +70,6 @@ class OESelftestTestContext(OETestContext):
def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None):
super(OESelftestTestContext, self).__init__(td, logger)
- self.machines = machines
- self.custommachine = None
self.config_paths = config_paths
self.newbuilddir = newbuilddir
@@ -67,10 +78,15 @@ class OESelftestTestContext(OETestContext):
else:
self.removebuilddir = removebuilddir
+ def set_variables(self, vars):
+ self.bb_vars = vars
+
def setup_builddir(self, suffix, selftestdir, suite):
+ sstatedir = self.bb_vars['SSTATE_DIR']
+
builddir = os.environ['BUILDDIR']
if not selftestdir:
- selftestdir = get_test_layer()
+ selftestdir = get_test_layer(self.bb_vars['BBLAYERS'])
if self.newbuilddir:
newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix)
else:
@@ -86,16 +102,29 @@ class OESelftestTestContext(OETestContext):
oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
oe.path.copytree(selftestdir, newselftestdir)
+ subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True)
+
+ # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
+ subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
+
+ # Relative paths in BBLAYERS only works when the new build dir share the same ascending node
+ if self.newbuilddir:
+ bblayers = subprocess.check_output("bitbake-getvar --value BBLAYERS | tail -1", cwd=builddir, shell=True, text=True)
+ if '..' in bblayers:
+ bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()]
+ with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f:
+ newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir
+ newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
+ f.write(newbblayers)
+
for e in os.environ:
if builddir + "/" in os.environ[e]:
os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
if os.environ[e].endswith(builddir):
os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
- subprocess.check_output("git init; git add *; git commit -a -m 'initial'", cwd=newselftestdir, shell=True)
-
- # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
- subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
+ # Set SSTATE_DIR to match the parent SSTATE_DIR
+ subprocess.check_output("echo 'SSTATE_DIR ?= \"%s\"' >> %s/conf/local.conf" % (sstatedir, newbuilddir), cwd=newbuilddir, shell=True)
os.chdir(newbuilddir)
@@ -124,17 +153,11 @@ class OESelftestTestContext(OETestContext):
if processes:
from oeqa.core.utils.concurrencytest import ConcurrentTestSuite
- return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir)
+ return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
else:
- return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir)
+ return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
def runTests(self, processes=None, machine=None, skips=[]):
- if machine:
- self.custommachine = machine
- if machine == 'random':
- self.custommachine = choice(self.machines)
- self.logger.info('Run tests with custom MACHINE set to: %s' % \
- self.custommachine)
return super(OESelftestTestContext, self).runTests(processes, skips)
def listTests(self, display_type, machine=None):
@@ -154,9 +177,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
group.add_argument('-a', '--run-all-tests', default=False,
action="store_true", dest="run_all_tests",
help='Run all (unhidden) tests')
- group.add_argument('-R', '--skip-tests', required=False, action='store',
- nargs='+', dest="skips", default=None,
- help='Run all (unhidden) tests except the ones specified. Format should be <module>[.<class>[.<test_method>]]')
group.add_argument('-r', '--run-tests', required=False, action='store',
nargs='+', dest="run_tests", default=None,
help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
@@ -171,12 +191,12 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
action="store_true", default=False,
help='List all available tests.')
+ parser.add_argument('-R', '--skip-tests', required=False, action='store',
+ nargs='+', dest="skips", default=None,
+ help='Skip the tests specified. Format should be <module>[.<class>[.<test_method>]]')
parser.add_argument('-j', '--num-processes', dest='processes', action='store',
type=int, help="number of processes to execute in parallel with")
- parser.add_argument('--machine', required=False, choices=['random', 'all'],
- help='Run tests on different machines (random/all).')
-
parser.add_argument('-t', '--select-tag', dest="select_tags",
action='append', default=None,
help='Filter all (unhidden) tests to any that match any of the specified tag(s).')
@@ -191,20 +211,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
parser.add_argument('-v', '--verbose', action='store_true')
parser.set_defaults(func=self.run)
- def _get_available_machines(self):
- machines = []
-
- bbpath = self.tc_kwargs['init']['td']['BBPATH'].split(':')
-
- for path in bbpath:
- found_machines = glob.glob(os.path.join(path, 'conf', 'machine', '*.conf'))
- if found_machines:
- for i in found_machines:
- # eg: '/home/<user>/poky/meta-intel/conf/machine/intel-core2-32.conf'
- machines.append(os.path.splitext(os.path.basename(i))[0])
-
- return machines
-
def _get_cases_paths(self, bbpath):
cases_paths = []
for layer in bbpath:
@@ -235,11 +241,10 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
args.list_tests = 'name'
self.tc_kwargs['init']['td'] = bbvars
- self.tc_kwargs['init']['machines'] = self._get_available_machines()
builddir = os.environ.get("BUILDDIR")
self.tc_kwargs['init']['config_paths'] = {}
- self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer()
+ self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer(bbvars["BBLAYERS"])
self.tc_kwargs['init']['config_paths']['builddir'] = builddir
self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf")
self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf")
@@ -275,14 +280,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
os.chdir(builddir)
if not "meta-selftest" in self.tc.td["BBLAYERS"]:
- self.tc.logger.warning("meta-selftest layer not found in BBLAYERS, adding it")
+ self.tc.logger.info("meta-selftest layer not found in BBLAYERS, adding it")
meta_selftestdir = os.path.join(
self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest')
if os.path.isdir(meta_selftestdir):
- runCmd("bitbake-layers add-layer %s" %meta_selftestdir)
+ runCmd("bitbake-layers add-layer %s" % meta_selftestdir)
# reload data is needed because a meta-selftest layer was add
self.tc.td = get_bb_vars()
- self.tc.config_paths['testlayer_path'] = get_test_layer()
+ self.tc.config_paths['testlayer_path'] = get_test_layer(self.tc.td["BBLAYERS"])
else:
self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir)
raise OEQAPreRun
@@ -320,8 +325,15 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
_add_layer_libs()
- self.tc.logger.info("Running bitbake -e to test the configuration is valid/parsable")
- runCmd("bitbake -e")
+ self.tc.logger.info("Checking base configuration is valid/parsable")
+
+ with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ tinfoil.prepare(quiet=2, config_only=True)
+ d = tinfoil.config_data
+ vars = {}
+ vars['SSTATE_DIR'] = str(d.getVar('SSTATE_DIR'))
+ vars['BBLAYERS'] = str(d.getVar('BBLAYERS'))
+ self.tc.set_variables(vars)
def get_json_result_dir(self, args):
json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa')
@@ -334,12 +346,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
import platform
from oeqa.utils.metadata import metadata_from_bb
metadata = metadata_from_bb()
+ oeselftest_metadata = get_oeselftest_metadata(args)
configuration = {'TEST_TYPE': 'oeselftest',
'STARTTIME': args.test_start_time,
'MACHINE': self.tc.td["MACHINE"],
'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'),
'HOST_NAME': metadata['hostname'],
- 'LAYERS': metadata['layers']}
+ 'LAYERS': metadata['layers'],
+ 'OESELFTEST_METADATA': oeselftest_metadata}
return configuration
def get_result_id(self, configuration):
@@ -374,37 +388,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
rc = None
try:
- if args.machine:
- logger.info('Custom machine mode enabled. MACHINE set to %s' %
- args.machine)
-
- if args.machine == 'all':
- results = []
- for m in self.tc_kwargs['init']['machines']:
- self.tc_kwargs['run']['machine'] = m
- results.append(self._internal_run(logger, args))
-
- # XXX: the oe-selftest script only needs to know if one
- # machine run fails
- for r in results:
- rc = r
- if not r.wasSuccessful():
- break
-
- else:
- self.tc_kwargs['run']['machine'] = args.machine
- return self._internal_run(logger, args)
-
- else:
- self.tc_kwargs['run']['machine'] = args.machine
- rc = self._internal_run(logger, args)
+ rc = self._internal_run(logger, args)
finally:
config_paths = self.tc_kwargs['init']['config_paths']
output_link = os.path.join(os.path.dirname(args.output_log),
"%s-results.log" % self.name)
if os.path.lexists(output_link):
- os.remove(output_link)
+ os.unlink(output_link)
os.symlink(args.output_log, output_link)
return rc
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
index 1fdff82889..6e8b781973 100644
--- a/meta/lib/oeqa/targetcontrol.py
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -7,18 +7,14 @@
# This module is used by testimage.bbclass for setting up and controlling a target machine.
import os
-import shutil
import subprocess
import bb
-import traceback
-import sys
import logging
from oeqa.utils.sshcontrol import SSHControl
from oeqa.utils.qemurunner import QemuRunner
from oeqa.utils.qemutinyrunner import QemuTinyRunner
from oeqa.utils.dump import TargetDumper
from oeqa.utils.dump import MonitorDumper
-from oeqa.controllers.testtargetloader import TestTargetLoader
from abc import ABCMeta, abstractmethod
class BaseTarget(object, metaclass=ABCMeta):
@@ -42,7 +38,7 @@ class BaseTarget(object, metaclass=ABCMeta):
if os.path.islink(sshloglink):
os.unlink(sshloglink)
os.symlink(self.sshlog, sshloglink)
- self.logger.info("SSH log file: %s" % self.sshlog)
+ self.logger.info("SSH log file: %s" % self.sshlog)
@abstractmethod
def start(self, params=None, ssh=True, extra_bootparams=None):
@@ -107,8 +103,6 @@ class QemuTarget(BaseTarget):
self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
- dump_target_cmds = d.getVar("testimage_dump_target")
- dump_host_cmds = d.getVar("testimage_dump_host")
dump_monitor_cmds = d.getVar("testimage_dump_monitor")
dump_dir = d.getVar("TESTIMAGE_DUMP_DIR")
if not dump_dir:
@@ -145,12 +139,10 @@ class QemuTarget(BaseTarget):
boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
use_kvm = use_kvm,
dump_dir = dump_dir,
- dump_host_cmds = d.getVar("testimage_dump_host"),
logger = logger,
tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
serial_ports = len(d.getVar("SERIAL_CONSOLES").split()))
- self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner)
if (self.monitor_dumper):
self.monitor_dumper.create_dir("qmp")
@@ -163,7 +155,7 @@ class QemuTarget(BaseTarget):
os.unlink(qemuloglink)
os.symlink(self.qemulog, qemuloglink)
- self.logger.info("rootfs file: %s" % self.rootfs)
+ self.logger.info("rootfs file: %s" % self.rootfs)
self.logger.info("Qemu log file: %s" % self.qemulog)
super(QemuTarget, self).deploy()
@@ -205,7 +197,7 @@ class QemuTarget(BaseTarget):
self.server_ip = self.runner.server_ip
self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
else:
- raise RuntimError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
+ raise RuntimeError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
def run_serial(self, command, timeout=60):
return self.runner.run_serial(command, timeout=timeout)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 6d1ec4cb99..53bdcbf266 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
# Enable other layers to have modules in the same named directory
@@ -88,3 +90,10 @@ def load_test_components(logger, executor):
"_executor_class defined." % (comp_name, comp_context))
return components
+
+def get_json_result_dir(d):
+ json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
+ custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
+ if custom_json_result_dir:
+ json_result_dir = custom_json_result_dir
+ return json_result_dir \ No newline at end of file
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index 024261410e..575e380017 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -8,11 +8,8 @@
# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
# It provides a class and methods for running commands on the host in a convienent way for tests.
-
-
import os
import sys
-import signal
import subprocess
import threading
import time
@@ -21,6 +18,7 @@ from oeqa.utils import CommandError
from oeqa.utils import ftools
import re
import contextlib
+import errno
# Export test doesn't require bb
try:
import bb
@@ -85,7 +83,7 @@ class Command(object):
except OSError as ex:
# It's not an error when the command does not consume all
# of our data. subprocess.communicate() also ignores that.
- if ex.errno != EPIPE:
+ if ex.errno != errno.EPIPE:
raise
# We write in a separate thread because then we can read
@@ -117,7 +115,7 @@ class Command(object):
else:
deadline = time.time() + self.timeout
for thread in self.threads:
- timeout = deadline - time.time()
+ timeout = deadline - time.time()
if timeout < 0:
timeout = 0
thread.join(timeout)
@@ -168,15 +166,22 @@ class Result(object):
def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
- native_sysroot=None, limit_exc_output=0, output_log=None, **options):
+ native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options):
result = Result()
if native_sysroot:
- extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \
- (native_sysroot, native_sysroot, native_sysroot)
- nenv = dict(options.get('env', os.environ))
- nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '')
- options['env'] = nenv
+ new_env = dict(options.get('env', os.environ))
+ paths = new_env["PATH"].split(":")
+ paths = [
+ os.path.join(native_sysroot, "bin"),
+ os.path.join(native_sysroot, "sbin"),
+ os.path.join(native_sysroot, "usr", "bin"),
+ os.path.join(native_sysroot, "usr", "sbin"),
+ ] + paths
+ if target_sys:
+ paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths
+ new_env["PATH"] = ":".join(paths)
+ options['env'] = new_env
cmd = Command(command, timeout=timeout, output_log=output_log, **options)
cmd.run()
@@ -280,8 +285,10 @@ def get_bb_vars(variables=None, target=None, postconfig=None):
def get_bb_var(var, target=None, postconfig=None):
return get_bb_vars([var], target, postconfig)[var]
-def get_test_layer():
- layers = get_bb_var("BBLAYERS").split()
+def get_test_layer(bblayers=None):
+ if bblayers is None:
+ bblayers = get_bb_var("BBLAYERS")
+ layers = bblayers.split()
testlayer = None
for l in layers:
if '~' in l:
@@ -293,6 +300,7 @@ def get_test_layer():
def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
os.makedirs(os.path.join(templayerdir, 'conf'))
+ corenames = get_bb_var('LAYERSERIES_CORENAMES')
with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
f.write('BBPATH .= ":${LAYERDIR}"\n')
f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
@@ -301,7 +309,7 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
- f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername)
+ f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
@contextlib.contextmanager
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
index aabf4110cb..ea90164e5e 100644
--- a/meta/lib/oeqa/utils/decorators.py
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -16,91 +16,6 @@ import threading
import signal
from functools import wraps
-#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
-class getResults(object):
- def __init__(self):
- #dynamically determine the unittest.case frame and use it to get the name of the test method
- ident = threading.current_thread().ident
- upperf = sys._current_frames()[ident]
- while (upperf.f_globals['__name__'] != 'unittest.case'):
- upperf = upperf.f_back
-
- def handleList(items):
- ret = []
- # items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
- for i in items:
- s = i[0].id()
- #Handle the _ErrorHolder objects from skipModule failures
- if "setUpModule (" in s:
- ret.append(s.replace("setUpModule (", "").replace(")",""))
- else:
- ret.append(s)
- # Append also the test without the full path
- testname = s.split('.')[-1]
- if testname:
- ret.append(testname)
- return ret
- self.faillist = handleList(upperf.f_locals['result'].failures)
- self.errorlist = handleList(upperf.f_locals['result'].errors)
- self.skiplist = handleList(upperf.f_locals['result'].skipped)
-
- def getFailList(self):
- return self.faillist
-
- def getErrorList(self):
- return self.errorlist
-
- def getSkipList(self):
- return self.skiplist
-
-class skipIfFailure(object):
-
- def __init__(self,testcase):
- self.testcase = testcase
-
- def __call__(self,f):
- @wraps(f)
- def wrapped_f(*args, **kwargs):
- res = getResults()
- if self.testcase in (res.getFailList() or res.getErrorList()):
- raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
- return f(*args, **kwargs)
- wrapped_f.__name__ = f.__name__
- return wrapped_f
-
-class skipIfSkipped(object):
-
- def __init__(self,testcase):
- self.testcase = testcase
-
- def __call__(self,f):
- @wraps(f)
- def wrapped_f(*args, **kwargs):
- res = getResults()
- if self.testcase in res.getSkipList():
- raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
- return f(*args, **kwargs)
- wrapped_f.__name__ = f.__name__
- return wrapped_f
-
-class skipUnlessPassed(object):
-
- def __init__(self,testcase):
- self.testcase = testcase
-
- def __call__(self,f):
- @wraps(f)
- def wrapped_f(*args, **kwargs):
- res = getResults()
- if self.testcase in res.getSkipList() or \
- self.testcase in res.getFailList() or \
- self.testcase in res.getErrorList():
- raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
- return f(*args, **kwargs)
- wrapped_f.__name__ = f.__name__
- wrapped_f._depends_on = self.testcase
- return wrapped_f
-
class testcase(object):
def __init__(self, test_case):
self.test_case = test_case
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
index 95a79a571c..d4d271369f 100644
--- a/meta/lib/oeqa/utils/dump.py
+++ b/meta/lib/oeqa/utils/dump.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -49,9 +51,7 @@ class BaseDumper(object):
self.dump_dir = dump_dir
def _construct_filename(self, command):
- if isinstance(self, HostDumper):
- prefix = "host"
- elif isinstance(self, TargetDumper):
+ if isinstance(self, TargetDumper):
prefix = "target"
elif isinstance(self, MonitorDumper):
prefix = "qmp"
@@ -74,54 +74,56 @@ class BaseDumper(object):
with open(fullname, 'w') as dump_file:
dump_file.write(output)
-class HostDumper(BaseDumper):
- """ Class to get dumps from the host running the tests """
-
- def __init__(self, cmds, parent_dir):
- super(HostDumper, self).__init__(cmds, parent_dir)
-
- def dump_host(self, dump_dir=""):
- if dump_dir:
- self.dump_dir = dump_dir
- env = os.environ.copy()
- env['PATH'] = '/usr/sbin:/sbin:/usr/bin:/bin'
- env['COLUMNS'] = '9999'
- for cmd in self.cmds:
- result = runCmd(cmd, ignore_status=True, env=env)
- self._write_dump(cmd.split()[0], result.output)
-
class TargetDumper(BaseDumper):
- """ Class to get dumps from target, it only works with QemuRunner """
+ """ Class to get dumps from target, it only works with QemuRunner.
+ Will give up permanently after 5 errors from running commands over
+ serial console. This helps to end testing when target is really dead, hanging
+ or unresponsive.
+ """
def __init__(self, cmds, parent_dir, runner):
super(TargetDumper, self).__init__(cmds, parent_dir)
self.runner = runner
+ self.errors = 0
def dump_target(self, dump_dir=""):
+ if self.errors >= 5:
+ print("Too many errors when dumping data from target, assuming it is dead! Will not dump data anymore!")
+ return
if dump_dir:
self.dump_dir = dump_dir
for cmd in self.cmds:
# We can continue with the testing if serial commands fail
try:
(status, output) = self.runner.run_serial(cmd)
+ if status == 0:
+ self.errors = self.errors + 1
self._write_dump(cmd.split()[0], output)
except:
+ self.errors = self.errors + 1
print("Tried to dump info from target but "
"serial console failed")
print("Failed CMD: %s" % (cmd))
class MonitorDumper(BaseDumper):
- """ Class to get dumps via the Qemu Monitor, it only works with QemuRunner """
+ """ Class to get dumps via the Qemu Monitor, it only works with QemuRunner
+ Will stop completely if there are more than 5 errors when dumping monitor data.
+ This helps to end testing when target is really dead, hanging or unresponsive.
+ """
def __init__(self, cmds, parent_dir, runner):
super(MonitorDumper, self).__init__(cmds, parent_dir)
self.runner = runner
+ self.errors = 0
def dump_monitor(self, dump_dir=""):
if self.runner is None:
return
if dump_dir:
self.dump_dir = dump_dir
+ if self.errors >= 5:
+ print("Too many errors when dumping data from qemu monitor, assuming it is dead! Will not dump data anymore!")
+ return
for cmd in self.cmds:
cmd_name = cmd.split()[0]
try:
@@ -135,4 +137,5 @@ class MonitorDumper(BaseDumper):
output = self.runner.run_monitor(cmd_name)
self._write_dump(cmd_name, output)
except Exception as e:
+ self.errors = self.errors + 1
print("Failed to dump QMP CMD: %s with\nException: %s" % (cmd_name, e))
diff --git a/meta/lib/oeqa/utils/ftools.py b/meta/lib/oeqa/utils/ftools.py
index 3093419cc7..a50aaa84c2 100644
--- a/meta/lib/oeqa/utils/ftools.py
+++ b/meta/lib/oeqa/utils/ftools.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/utils/gitarchive.py b/meta/lib/oeqa/utils/gitarchive.py
index 6e8040eb5c..10cb267dfa 100644
--- a/meta/lib/oeqa/utils/gitarchive.py
+++ b/meta/lib/oeqa/utils/gitarchive.py
@@ -100,9 +100,44 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
if os.path.exists(tmp_index):
os.unlink(tmp_index)
+def get_tags(repo, log, pattern=None, url=None):
+ """ Fetch remote tags from current repository
+
+ A pattern can be provided to filter returned tags list
+ An URL can be provided if local repository has no valid remote configured
+ """
+
+ base_cmd = ['ls-remote', '--refs', '--tags', '-q']
+ cmd = base_cmd.copy()
+
+ # First try to fetch tags from repository configured remote
+ cmd.append('origin')
+ if pattern:
+ cmd.append("refs/tags/"+pattern)
+ try:
+ tags_refs = repo.run_cmd(cmd)
+ tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
+ except GitError as e:
+ # If it fails, retry with repository url if one is provided
+ if url:
+ log.info("No remote repository configured, use provided url")
+ cmd = base_cmd.copy()
+ cmd.append(url)
+ if pattern:
+ cmd.append(pattern)
+ tags_refs = repo.run_cmd(cmd)
+ tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
+ else:
+ log.info("Read local tags only, some remote tags may be missed")
+ cmd = ["tag"]
+ if pattern:
+ cmd += ["-l", pattern]
+ tags = repo.run_cmd(cmd).splitlines()
+
+ return tags
def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
- keywords):
+ url, log, keywords):
"""Generate tag name and message, with support for running id number"""
keyws = keywords.copy()
# Tag number is handled specially: if not defined, we autoincrement it
@@ -116,7 +151,7 @@ def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})')
keyws['tag_number'] = 0
- for existing_tag in repo.run_cmd('tag').splitlines():
+ for existing_tag in get_tags(repo, log, url=url):
match = re.match(tag_re, existing_tag)
if match and int(match.group('tag_number')) >= keyws['tag_number']:
@@ -143,7 +178,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
if not no_tag and tagname:
tag_name, tag_msg = expand_tag_strings(data_repo, tagname,
tag_msg_subject,
- tag_msg_body, keywords)
+ tag_msg_body,
+ push, log, keywords)
# Commit data
commit = git_commit_data(data_repo, data_dir, branch_name,
@@ -181,7 +217,7 @@ def get_test_runs(log, repo, tag_name, **kwargs):
# Get a list of all matching tags
tag_pattern = tag_name.format(**str_fields)
- tags = repo.run_cmd(['tag', '-l', tag_pattern]).splitlines()
+ tags = get_tags(repo, log, pattern=tag_pattern)
log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern)
# Parse undefined fields from tag names
@@ -199,6 +235,8 @@ def get_test_runs(log, repo, tag_name, **kwargs):
revs = []
for tag in tags:
m = tag_re.match(tag)
+ if not m:
+ continue
groups = m.groupdict()
revs.append([groups[f] for f in undef_fields] + [tag])
@@ -219,7 +257,15 @@ def get_test_revs(log, repo, tag_name, **kwargs):
if not commit in revs:
revs[commit] = TestedRev(commit, commit_num, [tag])
else:
- assert commit_num == revs[commit].commit_number, "Commit numbers do not match"
+ if commit_num != revs[commit].commit_number:
+ # Historically we have incorrect commit counts of '1' in the repo so fix these up
+ if int(revs[commit].commit_number) < 5:
+ tags = revs[commit].tags
+ revs[commit] = TestedRev(commit, commit_num, [tags])
+ elif int(commit_num) < 5:
+ pass
+ else:
+ sys.exit("Commit numbers for commit %s don't match (%s vs %s)" % (commit, commit_num, revs[commit].commit_number))
revs[commit].tags.append(tag)
# Return in sorted table
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
index 58d3c3b3f8..80752c1377 100644
--- a/meta/lib/oeqa/utils/httpserver.py
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -1,11 +1,13 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import http.server
+import logging
import multiprocessing
import os
-import traceback
import signal
from socketserver import ThreadingMixIn
@@ -13,20 +15,24 @@ class HTTPServer(ThreadingMixIn, http.server.HTTPServer):
def server_start(self, root_dir, logger):
os.chdir(root_dir)
+ self.logger = logger
self.serve_forever()
class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
def log_message(self, format_str, *args):
- pass
+ self.server.logger.info(format_str, *args)
-class HTTPService(object):
+class HTTPService:
def __init__(self, root_dir, host='', port=0, logger=None):
self.root_dir = root_dir
self.host = host
self.port = port
- self.logger = logger
+ if logger:
+ self.logger = logger.getChild("HTTPService")
+ else:
+ self.logger = logging.getLogger("HTTPService")
def start(self):
if not os.path.exists(self.root_dir):
@@ -38,6 +44,12 @@ class HTTPService(object):
self.port = self.server.server_port
self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger])
+ def handle_error(self, request, client_address):
+ import traceback
+ exception = traceback.format_exc()
+ self.logger.warn("Exception when handling %s: %s" % (request, exception))
+ self.server.handle_error = handle_error
+
# The signal handler from testimage.bbclass can cause deadlocks here
# if the HTTPServer is terminated before it can restore the standard
#signal behaviour
@@ -47,7 +59,7 @@ class HTTPService(object):
signal.signal(signal.SIGTERM, orig)
if self.logger:
- self.logger.info("Started HTTPService on %s:%s" % (self.host, self.port))
+ self.logger.info("Started HTTPService for %s on %s:%s" % (self.root_dir, self.host, self.port))
def stop(self):
@@ -59,3 +71,10 @@ class HTTPService(object):
if self.logger:
self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port))
+if __name__ == "__main__":
+ import sys, logging
+
+ logger = logging.getLogger(__name__)
+ logging.basicConfig(level=logging.DEBUG)
+ httpd = HTTPService(sys.argv[1], port=8888, logger=logger)
+ httpd.start()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
index 879aefca33..496d9e0c90 100644
--- a/meta/lib/oeqa/utils/logparser.py
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -1,8 +1,10 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
-import sys
+import enum
import os
import re
@@ -42,6 +44,8 @@ class PtestParser(object):
result = section_regex['begin'].search(line)
if result:
current_section['name'] = result.group(1)
+ if current_section['name'] not in self.results:
+ self.results[current_section['name']] = {}
continue
result = section_regex['end'].search(line)
@@ -73,9 +77,10 @@ class PtestParser(object):
for t in test_regex:
result = test_regex[t].search(line)
if result:
- if current_section['name'] not in self.results:
- self.results[current_section['name']] = {}
- self.results[current_section['name']][result.group(1).strip()] = t
+ try:
+ self.results[current_section['name']][result.group(1).strip()] = t
+ except KeyError:
+ bb.warn("Result with no section: %s - %s" % (t, result.group(1).strip()))
# Python performance for repeatedly joining long strings is poor, do it all at once at the end.
# For 2.1 million lines in a log this reduces 18 hours to 12s.
@@ -101,30 +106,48 @@ class PtestParser(object):
f.write(status + ": " + test_name + "\n")
-# ltp log parsing
-class LtpParser(object):
- def __init__(self):
- self.results = {}
- self.section = {'duration': "", 'log': ""}
-
+class LtpParser:
+ """
+ Parse the machine-readable LTP log output into a ptest-friendly data structure.
+ """
def parse(self, logfile):
- test_regex = {}
- test_regex['PASSED'] = re.compile(r"PASS")
- test_regex['FAILED'] = re.compile(r"FAIL")
- test_regex['SKIPPED'] = re.compile(r"SKIP")
-
- with open(logfile, errors='replace') as f:
+ results = {}
+ # Aaccumulate the duration here but as the log rounds quick tests down
+ # to 0 seconds this is very much a lower bound. The caller can replace
+ # the value.
+ section = {"duration": 0, "log": ""}
+
+ class LtpExitCode(enum.IntEnum):
+ # Exit codes as defined in ltp/include/tst_res_flags.h
+ TPASS = 0 # Test passed flag
+ TFAIL = 1 # Test failed flag
+ TBROK = 2 # Test broken flag
+ TWARN = 4 # Test warning flag
+ TINFO = 16 # Test information flag
+ TCONF = 32 # Test not appropriate for configuration flag
+
+ with open(logfile, errors="replace") as f:
+ # Lines look like this:
+ # tag=cfs_bandwidth01 stime=1689762564 dur=0 exit=exited stat=32 core=no cu=0 cs=0
for line in f:
- for t in test_regex:
- result = test_regex[t].search(line)
- if result:
- self.results[line.split()[0].strip()] = t
-
- for test in self.results:
- result = self.results[test]
- self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
+ if not line.startswith("tag="):
+ continue
- return self.results, self.section
+ values = dict(s.split("=") for s in line.strip().split())
+
+ section["duration"] += int(values["dur"])
+ exitcode = int(values["stat"])
+ if values["exit"] == "exited" and exitcode == LtpExitCode.TCONF:
+ # Exited normally with the "invalid configuration" code
+ results[values["tag"]] = "SKIPPED"
+ elif exitcode == LtpExitCode.TPASS:
+ # Successful exit
+ results[values["tag"]] = "PASSED"
+ else:
+ # Other exit
+ results[values["tag"]] = "FAILED"
+
+ return results, section
# ltp Compliance log parsing
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py
index 8013aa684d..15ec190c4a 100644
--- a/meta/lib/oeqa/utils/metadata.py
+++ b/meta/lib/oeqa/utils/metadata.py
@@ -27,9 +27,9 @@ def metadata_from_bb():
data_dict = get_bb_vars()
# Distro information
- info_dict['distro'] = {'id': data_dict['DISTRO'],
- 'version_id': data_dict['DISTRO_VERSION'],
- 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])}
+ info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'),
+ 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'),
+ 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))}
# Host distro information
os_release = get_os_release()
diff --git a/meta/lib/oeqa/utils/network.py b/meta/lib/oeqa/utils/network.py
index 59d01723a1..da4ffda9a9 100644
--- a/meta/lib/oeqa/utils/network.py
+++ b/meta/lib/oeqa/utils/network.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py
index a37686c914..903469bfee 100644
--- a/meta/lib/oeqa/utils/nfs.py
+++ b/meta/lib/oeqa/utils/nfs.py
@@ -1,4 +1,8 @@
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
+#
import os
import sys
import tempfile
@@ -8,7 +12,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command
from oeqa.utils.network import get_free_port
@contextlib.contextmanager
-def unfs_server(directory, logger = None):
+def unfs_server(directory, logger = None, udp = True):
unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native")
if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")):
# build native tool
@@ -22,11 +26,11 @@ def unfs_server(directory, logger = None):
exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode())
# find some ports for the server
- nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True)
+ nfsport, mountport = get_free_port(udp), get_free_port(udp)
nenv = dict(os.environ)
nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
- cmd = Command(["unfsd", "-d", "-p", "-N", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
+ cmd = Command(["unfsd", "-d", "-p", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
bg = True, env = nenv, output_log = logger)
cmd.run()
yield nfsport, mountport
diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py
index 6b67f22fdd..db799b64d6 100644
--- a/meta/lib/oeqa/utils/package_manager.py
+++ b/meta/lib/oeqa/utils/package_manager.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
new file mode 100644
index 0000000000..8104400ac2
--- /dev/null
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -0,0 +1,98 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# Run a set of actions after tests. The runner provides internal data
+# dictionary as well as test context to any action to run.
+
+from oeqa.utils import get_json_result_dir
+
+def create_artifacts_directory(d, tc):
+ import shutil
+
+ local_artifacts_dir = os.path.join(get_json_result_dir(d), "artifacts")
+ if os.path.isdir(local_artifacts_dir):
+ shutil.rmtree(local_artifacts_dir)
+
+ os.makedirs(local_artifacts_dir)
+
+##################################################################
+# Host/target statistics
+##################################################################
+
+def get_target_disk_usage(d, tc):
+ output_file = os.path.join(get_json_result_dir(d), "artifacts", "target_disk_usage.txt")
+ try:
+ (status, output) = tc.target.run('df -hl')
+ with open(output_file, 'w') as f:
+ f.write(output)
+ f.write("\n")
+ except Exception as e:
+ bb.warn(f"Can not get target disk usage: {e}")
+
+def get_host_disk_usage(d, tc):
+ import subprocess
+
+ output_file = os.path.join(get_json_result_dir(d), "artifacts", "host_disk_usage.txt")
+ try:
+ with open(output_file, 'w') as f:
+ output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
+ except Exception as e:
+ bb.warn(f"Can not get host disk usage: {e}")
+
+##################################################################
+# Artifacts retrieval
+##################################################################
+
+def get_artifacts_list(target, raw_list):
+ result = []
+ # Passed list may contains patterns in paths, expand them directly on target
+ for raw_path in raw_list.split():
+ cmd = f"for p in {raw_path}; do if [ -e $p ]; then echo $p; fi; done"
+ try:
+ status, output = target.run(cmd)
+ if status != 0 or not output:
+ raise Exception()
+ result += output.split()
+ except:
+ bb.note(f"No file/directory matching path {raw_path}")
+
+ return result
+
+def retrieve_test_artifacts(target, artifacts_list, target_dir):
+ local_artifacts_dir = os.path.join(target_dir, "artifacts")
+ for artifact_path in artifacts_list:
+ if not os.path.isabs(artifact_path):
+ bb.warn(f"{artifact_path} is not an absolute path")
+ continue
+ try:
+ dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:]))
+ os.makedirs(dest_dir, exist_ok=True)
+ target.copyFrom(artifact_path, dest_dir)
+ except Exception as e:
+ bb.warn(f"Can not retrieve {artifact_path} from test target: {e}")
+
+def list_and_fetch_failed_tests_artifacts(d, tc):
+ artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS"))
+ if not artifacts_list:
+ bb.warn("Could not load artifacts list, skip artifacts retrieval")
+ else:
+ retrieve_test_artifacts(tc.target, artifacts_list, get_json_result_dir(d))
+
+
+##################################################################
+# General post actions runner
+##################################################################
+
+def run_failed_tests_post_actions(d, tc):
+ post_actions=[
+ create_artifacts_directory,
+ list_and_fetch_failed_tests_artifacts,
+ get_target_disk_usage,
+ get_host_disk_usage
+ ]
+
+ for action in post_actions:
+ action(d, tc)
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
index 76296d50cd..cda43aad8c 100644
--- a/meta/lib/oeqa/utils/qemurunner.py
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -19,11 +19,11 @@ import errno
import string
import threading
import codecs
-import logging
import tempfile
-from oeqa.utils.dump import HostDumper
from collections import defaultdict
+from contextlib import contextmanager
import importlib
+import traceback
# Get Unicode non printable control chars
control_range = list(range(0,32))+list(range(127,160))
@@ -31,10 +31,19 @@ control_chars = [chr(x) for x in control_range
if chr(x) not in string.printable]
re_control_char = re.compile('[%s]' % re.escape("".join(control_chars)))
+def getOutput(o):
+ import fcntl
+ fl = fcntl.fcntl(o, fcntl.F_GETFL)
+ fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+ try:
+ return os.read(o.fileno(), 1000000).decode("utf-8")
+ except BlockingIOError:
+ return ""
+
class QemuRunner:
- def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds,
- use_kvm, logger, use_slirp=False, serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None, tmpfsdir=None):
+ def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, use_kvm, logger, use_slirp=False,
+ serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None, tmpfsdir=None):
# Popen object for runqemu
self.runqemu = None
@@ -57,6 +66,7 @@ class QemuRunner:
self.boottime = boottime
self.logged = False
self.thread = None
+ self.threadsock = None
self.use_kvm = use_kvm
self.use_ovmf = use_ovmf
self.use_slirp = use_slirp
@@ -69,7 +79,6 @@ class QemuRunner:
if not workdir:
workdir = os.getcwd()
self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid())
- self.host_dumper = HostDumper(dump_host_cmds, dump_dir)
self.monitorpipe = None
self.logger = logger
@@ -85,7 +94,7 @@ class QemuRunner:
accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished']
default_boot_patterns = defaultdict(str)
# Default to the usual paterns used to communicate with the target
- default_boot_patterns['search_reached_prompt'] = b' login:'
+ default_boot_patterns['search_reached_prompt'] = ' login:'
default_boot_patterns['send_login_user'] = 'root\n'
default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#"
default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#"
@@ -99,6 +108,7 @@ class QemuRunner:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setblocking(0)
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.bind(("127.0.0.1",0))
sock.listen(2)
port = sock.getsockname()[1]
@@ -109,33 +119,24 @@ class QemuRunner:
sock.close()
raise
- def log(self, msg):
- if self.logfile:
- # It is needed to sanitize the data received from qemu
- # because is possible to have control characters
- msg = msg.decode("utf-8", errors='ignore')
- msg = re_control_char.sub('', msg)
- self.msg += msg
- with codecs.open(self.logfile, "a", encoding="utf-8") as f:
- f.write("%s" % msg)
-
- def getOutput(self, o):
- import fcntl
- fl = fcntl.fcntl(o, fcntl.F_GETFL)
- fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
- try:
- return os.read(o.fileno(), 1000000).decode("utf-8")
- except BlockingIOError:
- return ""
+ def decode_qemulog(self, todecode):
+ # Sanitize the data received from qemu as it may contain control characters
+ msg = todecode.decode("utf-8", errors='backslashreplace')
+ msg = re_control_char.sub('', msg)
+ return msg
+ def log(self, msg, extension=""):
+ if self.logfile:
+ with codecs.open(self.logfile + extension, "ab") as f:
+ f.write(msg)
+ self.msg += self.decode_qemulog(msg)
def handleSIGCHLD(self, signum, frame):
if self.runqemu and self.runqemu.poll():
if self.runqemu.returncode:
self.logger.error('runqemu exited with code %d' % self.runqemu.returncode)
- self.logger.error('Output from runqemu:\n%s' % self.getOutput(self.runqemu.stdout))
+ self.logger.error('Output from runqemu:\n%s' % getOutput(self.runqemu.stdout))
self.stop()
- self._dump_host()
def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True):
env = os.environ.copy()
@@ -174,28 +175,32 @@ class QemuRunner:
launch_cmd += ' slirp'
if self.use_ovmf:
launch_cmd += ' ovmf'
- launch_cmd += ' %s %s %s' % (runqemuparams, self.machine, self.rootfs)
+ launch_cmd += ' %s %s' % (runqemuparams, self.machine)
+ if self.rootfs.endswith('.vmdk'):
+ self.logger.debug('Bypassing VMDK rootfs for runqemu')
+ else:
+ launch_cmd += ' %s' % (self.rootfs)
return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env)
def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None):
# use logfile to determine the recipe-sysroot-native path and
# then add in the site-packages path components and add that
- # to the python sys.path so qmp.py can be found.
+ # to the python sys.path so the qmp module can be found.
python_path = os.path.dirname(os.path.dirname(self.logfile))
python_path += "/recipe-sysroot-native/usr/lib/qemu-python"
sys.path.append(python_path)
importlib.invalidate_caches()
try:
qmp = importlib.import_module("qmp")
- except:
- self.logger.error("qemurunner: qmp.py missing, please ensure it's installed")
+ except Exception as e:
+ self.logger.error("qemurunner: qmp module missing, please ensure it's installed in %s (%s)" % (python_path, str(e)))
return False
# Path relative to tmpdir used as cwd for qemu below to avoid unix socket path length issues
qmp_file = "." + next(tempfile._get_candidate_names())
qmp_param = ' -S -qmp unix:./%s,server,wait' % (qmp_file)
qmp_port = self.tmpdir + "/" + qmp_file
- # Create a second socket connection for debugging use,
+ # Create a second socket connection for debugging use,
# note this will NOT cause qemu to block waiting for the connection
qmp_file2 = "." + next(tempfile._get_candidate_names())
qmp_param += ' -qmp unix:./%s,server,nowait' % (qmp_file2)
@@ -231,7 +236,7 @@ class QemuRunner:
self.origchldhandler = signal.getsignal(signal.SIGCHLD)
signal.signal(signal.SIGCHLD, self.handleSIGCHLD)
- self.logger.debug('launchcmd=%s'%(launch_cmd))
+ self.logger.debug('launchcmd=%s' % (launch_cmd))
# FIXME: We pass in stdin=subprocess.PIPE here to work around stty
# blocking at the end of the runqemu script when using this within
@@ -268,7 +273,7 @@ class QemuRunner:
os._exit(0)
self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
- self.logger.debug("waiting at most %s seconds for qemu pid (%s)" %
+ self.logger.debug("waiting at most %d seconds for qemu pid (%s)" %
(self.runqemutime, time.strftime("%D %H:%M:%S")))
endtime = time.time() + self.runqemutime
while not self.is_alive() and time.time() < endtime:
@@ -279,8 +284,7 @@ class QemuRunner:
if self.runqemu.returncode:
# No point waiting any longer
self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
- self._dump_host()
- self.logger.warning("Output from runqemu:\n%s" % self.getOutput(output))
+ self.logger.warning("Output from runqemu:\n%s" % getOutput(output))
self.stop()
return False
time.sleep(0.5)
@@ -292,7 +296,7 @@ class QemuRunner:
self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
if not self.is_alive():
- self.logger.error("Qemu pid didn't appear in %s seconds (%s)" %
+ self.logger.error("Qemu pid didn't appear in %d seconds (%s)" %
(self.runqemutime, time.strftime("%D %H:%M:%S")))
qemu_pid = None
@@ -307,8 +311,7 @@ class QemuRunner:
ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0]
processes = ps.decode("utf-8")
self.logger.debug("Running processes:\n%s" % processes)
- self._dump_host()
- op = self.getOutput(output)
+ op = getOutput(output)
self.stop()
if op:
self.logger.error("Output from runqemu:\n%s" % op)
@@ -325,7 +328,8 @@ class QemuRunner:
try:
os.chdir(os.path.dirname(qmp_port))
try:
- self.qmp = qmp.QEMUMonitorProtocol(os.path.basename(qmp_port))
+ from qmp.legacy import QEMUMonitorProtocol
+ self.qmp = QEMUMonitorProtocol(os.path.basename(qmp_port))
except OSError as msg:
self.logger.warning("Failed to initialize qemu monitor socket: %s File: %s" % (msg, msg.filename))
return False
@@ -335,22 +339,24 @@ class QemuRunner:
self.logger.debug("QMP Port does not exist waiting for it to be created")
endtime = time.time() + self.runqemutime
while not os.path.exists(qmp_port) and self.is_alive() and time.time() < endtime:
- self.logger.info("QMP port does not exist yet!")
- time.sleep(0.5)
+ self.logger.info("QMP port does not exist yet!")
+ time.sleep(0.5)
if not os.path.exists(qmp_port) and self.is_alive():
self.logger.warning("QMP Port still does not exist but QEMU is alive")
return False
try:
+ # set timeout value for all QMP calls
+ self.qmp.settimeout(self.runqemutime)
self.qmp.connect()
connect_time = time.time()
- self.logger.info("QMP connected to QEMU at %s and took %s seconds" %
+ self.logger.info("QMP connected to QEMU at %s and took %.2f seconds" %
(time.strftime("%D %H:%M:%S"),
time.time() - launch_time))
except OSError as msg:
self.logger.warning("Failed to connect qemu monitor socket: %s File: %s" % (msg, msg.filename))
return False
- except qmp.QMPConnectError as msg:
+ except qmp.legacy.QMPError as msg:
self.logger.warning("Failed to communicate with qemu monitor: %s" % (msg))
return False
finally:
@@ -378,14 +384,14 @@ class QemuRunner:
# Release the qemu process to continue running
self.run_monitor('cont')
- self.logger.info("QMP released QEMU at %s and took %s seconds from connect" %
+ self.logger.info("QMP released QEMU at %s and took %.2f seconds from connect" %
(time.strftime("%D %H:%M:%S"),
time.time() - connect_time))
# We are alive: qemu is running
- out = self.getOutput(output)
+ out = getOutput(output)
netconf = False # network configuration is not required by default
- self.logger.debug("qemu started in %s seconds - qemu procces pid is %s (%s)" %
+ self.logger.debug("qemu started in %.2f seconds - qemu procces pid is %s (%s)" %
(time.time() - (endtime - self.runqemutime),
self.qemupid, time.strftime("%D %H:%M:%S")))
cmdline = ''
@@ -397,9 +403,10 @@ class QemuRunner:
cmdline = re_control_char.sub(' ', cmdline)
try:
if self.use_slirp:
- tcp_ports = cmdline.split("hostfwd=tcp::")[1]
+ tcp_ports = cmdline.split("hostfwd=tcp:")[1]
+ ip, tcp_ports = tcp_ports.split(":")[:2]
host_port = tcp_ports[:tcp_ports.find('-')]
- self.ip = "localhost:%s" % host_port
+ self.ip = "%s:%s" % (ip, host_port)
else:
ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
self.ip = ips[0]
@@ -408,7 +415,7 @@ class QemuRunner:
except (IndexError, ValueError):
# Try to get network configuration from runqemu output
match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+).*',
- out, re.MULTILINE|re.DOTALL)
+ out, re.MULTILINE | re.DOTALL)
if match:
self.ip, self.server_ip, self.netmask = match.groups()
# network configuration is required as we couldn't get it
@@ -419,16 +426,16 @@ class QemuRunner:
self.logger.error("Couldn't get ip from qemu command line and runqemu output! "
"Here is the qemu command line used:\n%s\n"
"and output from runqemu:\n%s" % (cmdline, out))
- self._dump_host()
self.stop()
return False
self.logger.debug("Target IP: %s" % self.ip)
self.logger.debug("Server IP: %s" % self.server_ip)
+ self.thread = LoggingThread(self.log, self.threadsock, self.logger, self.runqemu.stdout)
+ self.thread.start()
+
if self.serial_ports >= 2:
- self.thread = LoggingThread(self.log, self.threadsock, self.logger)
- self.thread.start()
if not self.thread.connection_established.wait(self.boottime):
self.logger.error("Didn't receive a console connection from qemu. "
"Here is the qemu command line used:\n%s\nand "
@@ -440,7 +447,7 @@ class QemuRunner:
self.logger.debug("Waiting at most %d seconds for login banner (%s)" %
(self.boottime, time.strftime("%D %H:%M:%S")))
endtime = time.time() + self.boottime
- socklist = [self.server_socket]
+ filelist = [self.server_socket]
reachedlogin = False
stopread = False
qemusock = None
@@ -448,38 +455,48 @@ class QemuRunner:
data = b''
while time.time() < endtime and not stopread:
try:
- sread, swrite, serror = select.select(socklist, [], [], 5)
+ sread, swrite, serror = select.select(filelist, [], [], 5)
except InterruptedError:
continue
- for sock in sread:
- if sock is self.server_socket:
+ for file in sread:
+ if file is self.server_socket:
qemusock, addr = self.server_socket.accept()
- qemusock.setblocking(0)
- socklist.append(qemusock)
- socklist.remove(self.server_socket)
+ qemusock.setblocking(False)
+ filelist.append(qemusock)
+ filelist.remove(self.server_socket)
self.logger.debug("Connection from %s:%s" % addr)
else:
- data = data + sock.recv(1024)
+ # try to avoid reading only a single character at a time
+ time.sleep(0.1)
+ if hasattr(file, 'read'):
+ read = file.read(1024)
+ elif hasattr(file, 'recv'):
+ read = file.recv(1024)
+ else:
+ self.logger.error('Invalid file type: %s\n%s' % (file))
+ read = b''
+
+ self.logger.debug2('Partial boot log:\n%s' % (read.decode('utf-8', errors='backslashreplace')))
+ data = data + read
if data:
bootlog += data
- if self.serial_ports < 2:
- # this socket has mixed console/kernel data, log it to logfile
- self.log(data)
-
+ self.log(data, extension = ".2")
data = b''
- if self.boot_patterns['search_reached_prompt'] in bootlog:
+
+ if bytes(self.boot_patterns['search_reached_prompt'], 'utf-8') in bootlog:
+ self.server_socket.close()
self.server_socket = qemusock
stopread = True
reachedlogin = True
- self.logger.debug("Reached login banner in %s seconds (%s)" %
+ self.logger.debug("Reached login banner in %.2f seconds (%s)" %
(time.time() - (endtime - self.boottime),
time.strftime("%D %H:%M:%S")))
else:
# no need to check if reachedlogin unless we support multiple connections
self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" %
time.strftime("%D %H:%M:%S"))
- socklist.remove(sock)
- sock.close()
+ filelist.remove(file)
+ file.close()
stopread = True
if not reachedlogin:
@@ -487,21 +504,33 @@ class QemuRunner:
self.logger.warning("Target didn't reach login banner in %d seconds (%s)" %
(self.boottime, time.strftime("%D %H:%M:%S")))
tail = lambda l: "\n".join(l.splitlines()[-25:])
- bootlog = bootlog.decode("utf-8")
- # in case bootlog is empty, use tail qemu log store at self.msg
- lines = tail(bootlog if bootlog else self.msg)
- self.logger.warning("Last 25 lines of text:\n%s" % lines)
+ bootlog = self.decode_qemulog(bootlog)
+ self.logger.warning("Last 25 lines of login console (%d):\n%s" % (len(bootlog), tail(bootlog)))
+ self.logger.warning("Last 25 lines of all logging (%d):\n%s" % (len(self.msg), tail(self.msg)))
self.logger.warning("Check full boot log: %s" % self.logfile)
- self._dump_host()
self.stop()
+ data = True
+ while data:
+ try:
+ time.sleep(1)
+ data = qemusock.recv(1024)
+ self.log(data, extension = ".2")
+ self.logger.warning('Extra log data read: %s\n' % (data.decode('utf-8', errors='backslashreplace')))
+ except Exception as e:
+ self.logger.warning('Extra log data exception %s' % repr(e))
+ data = None
+ self.thread.serial_lock.release()
return False
+ with self.thread.serial_lock:
+ self.thread.set_serialsock(self.server_socket)
+
# If we are not able to login the tests can continue
try:
(status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120)
if re.search(self.boot_patterns['search_login_succeeded'], output):
self.logged = True
- self.logger.debug("Logged as root in serial console")
+ self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", ""))
if netconf:
# configure guest networking
cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask)
@@ -512,7 +541,7 @@ class QemuRunner:
self.logger.debug("Couldn't configure guest networking")
else:
self.logger.warning("Couldn't login into serial console"
- " as root using blank password")
+ " as %s using blank password" % self.boot_patterns['send_login_user'].replace("\n", ""))
self.logger.warning("The output:\n%s" % output)
except:
self.logger.warning("Serial console failed while trying to login")
@@ -532,14 +561,17 @@ class QemuRunner:
except OSError as e:
if e.errno != errno.ESRCH:
raise
- endtime = time.time() + self.runqemutime
- while self.runqemu.poll() is None and time.time() < endtime:
- time.sleep(1)
- if self.runqemu.poll() is None:
+ try:
+ outs, errs = self.runqemu.communicate(timeout=self.runqemutime)
+ if outs:
+ self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8"))
+ if errs:
+ self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8"))
+ except subprocess.TimeoutExpired:
self.logger.debug("Sending SIGKILL to runqemu")
os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
if not self.runqemu.stdout.closed:
- self.logger.info("Output from runqemu:\n%s" % self.getOutput(self.runqemu.stdout))
+ self.logger.info("Output from runqemu:\n%s" % getOutput(self.runqemu.stdout))
self.runqemu.stdin.close()
self.runqemu.stdout.close()
self.runqemu_exited = True
@@ -598,8 +630,12 @@ class QemuRunner:
# so it's possible that the file has been created but the content is empty
pidfile_timeout = time.time() + 3
while time.time() < pidfile_timeout:
- with open(self.qemu_pidfile, 'r') as f:
- qemu_pid = f.read().strip()
+ try:
+ with open(self.qemu_pidfile, 'r') as f:
+ qemu_pid = f.read().strip()
+ except FileNotFoundError:
+ # Can be used to detect shutdown so the pid file can disappear
+ return False
# file created but not yet written contents
if not qemu_pid:
time.sleep(0.5)
@@ -612,41 +648,47 @@ class QemuRunner:
def run_monitor(self, command, args=None, timeout=60):
if hasattr(self, 'qmp') and self.qmp:
+ self.qmp.settimeout(timeout)
if args is not None:
- return self.qmp.cmd(command, args)
+ return self.qmp.cmd_raw(command, args)
else:
- return self.qmp.cmd(command)
+ return self.qmp.cmd_raw(command)
def run_serial(self, command, raw=False, timeout=60):
+ # Returns (status, output) where status is 1 on success and 0 on error
+
# We assume target system have echo to get command status
if not raw:
command = "%s; echo $?\n" % command
data = ''
status = 0
- self.server_socket.sendall(command.encode('utf-8'))
- start = time.time()
- end = start + timeout
- while True:
- now = time.time()
- if now >= end:
- data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout
- break
- try:
- sread, _, _ = select.select([self.server_socket],[],[], end - now)
- except InterruptedError:
- continue
- if sread:
- answer = self.server_socket.recv(1024)
- if answer:
- data += answer.decode('utf-8')
- # Search the prompt to stop
- if re.search(self.boot_patterns['search_cmd_finished'], data):
- break
- else:
- if self.canexit:
- return (1, "")
- raise Exception("No data on serial console socket, connection closed?")
+ with self.thread.serial_lock:
+ self.server_socket.sendall(command.encode('utf-8'))
+ start = time.time()
+ end = start + timeout
+ while True:
+ now = time.time()
+ if now >= end:
+ data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout
+ break
+ try:
+ sread, _, _ = select.select([self.server_socket],[],[], end - now)
+ except InterruptedError:
+ continue
+ if sread:
+ # try to avoid reading single character at a time
+ time.sleep(0.1)
+ answer = self.server_socket.recv(1024)
+ if answer:
+ data += answer.decode('utf-8')
+ # Search the prompt to stop
+ if re.search(self.boot_patterns['search_cmd_finished'], data):
+ break
+ else:
+ if self.canexit:
+ return (1, "")
+ raise Exception("No data on serial console socket, connection closed?")
if data:
if raw:
@@ -665,21 +707,27 @@ class QemuRunner:
status = 1
return (status, str(data))
-
- def _dump_host(self):
- self.host_dumper.create_dir("qemu")
- self.logger.warning("Qemu ended unexpectedly, dump data from host"
- " is in %s" % self.host_dumper.dump_dir)
- self.host_dumper.dump_host()
+@contextmanager
+def nonblocking_lock(lock):
+ locked = lock.acquire(False)
+ try:
+ yield locked
+ finally:
+ if locked:
+ lock.release()
# This class is for reading data from a socket and passing it to logfunc
# to be processed. It's completely event driven and has a straightforward
# event loop. The mechanism for stopping the thread is a simple pipe which
# will wake up the poll and allow for tearing everything down.
class LoggingThread(threading.Thread):
- def __init__(self, logfunc, sock, logger):
+ def __init__(self, logfunc, sock, logger, qemuoutput):
self.connection_established = threading.Event()
+ self.serial_lock = threading.Lock()
+
self.serversock = sock
+ self.serialsock = None
+ self.qemuoutput = qemuoutput
self.logfunc = logfunc
self.logger = logger
self.readsock = None
@@ -691,9 +739,14 @@ class LoggingThread(threading.Thread):
threading.Thread.__init__(self, target=self.threadtarget)
+ def set_serialsock(self, serialsock):
+ self.serialsock = serialsock
+
def threadtarget(self):
try:
self.eventloop()
+ except Exception as e:
+ self.logger.warning("Exception %s in logging thread" % traceback.format_exception(e))
finally:
self.teardown()
@@ -709,7 +762,8 @@ class LoggingThread(threading.Thread):
def teardown(self):
self.logger.debug("Tearing down logging thread")
- self.close_socket(self.serversock)
+ if self.serversock:
+ self.close_socket(self.serversock)
if self.readsock is not None:
self.close_socket(self.readsock)
@@ -724,27 +778,31 @@ class LoggingThread(threading.Thread):
def eventloop(self):
poll = select.poll()
event_read_mask = self.errorevents | self.readevents
- poll.register(self.serversock.fileno())
+ if self.serversock:
+ poll.register(self.serversock.fileno())
+ serial_registered = False
+ poll.register(self.qemuoutput.fileno())
poll.register(self.readpipe, event_read_mask)
breakout = False
self.running = True
self.logger.debug("Starting thread event loop")
while not breakout:
- events = poll.poll()
- for event in events:
+ events = poll.poll(2)
+ for fd, event in events:
+
# An error occurred, bail out
- if event[1] & self.errorevents:
- raise Exception(self.stringify_event(event[1]))
+ if event & self.errorevents:
+ raise Exception(self.stringify_event(event))
# Event to stop the thread
- if self.readpipe == event[0]:
+ if self.readpipe == fd:
self.logger.debug("Stop event received")
breakout = True
break
# A connection request was received
- elif self.serversock.fileno() == event[0]:
+ elif self.serversock and self.serversock.fileno() == fd:
self.logger.debug("Connection request received")
self.readsock, _ = self.serversock.accept()
self.readsock.setblocking(0)
@@ -755,15 +813,35 @@ class LoggingThread(threading.Thread):
self.connection_established.set()
# Actual data to be logged
- elif self.readsock.fileno() == event[0]:
- data = self.recv(1024)
+ elif self.readsock and self.readsock.fileno() == fd:
+ data = self.recv(1024, self.readsock)
self.logfunc(data)
+ elif self.qemuoutput.fileno() == fd:
+ data = self.qemuoutput.read()
+ self.logger.debug("Data received on qemu stdout %s" % data)
+ self.logfunc(data, ".stdout")
+ elif self.serialsock and self.serialsock.fileno() == fd:
+ if self.serial_lock.acquire(blocking=False):
+ data = self.recv(1024, self.serialsock)
+ self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace'))
+ self.logfunc(data, ".2")
+ self.serial_lock.release()
+ else:
+ serial_registered = False
+ poll.unregister(self.serialsock.fileno())
+
+ if not serial_registered and self.serialsock:
+ with nonblocking_lock(self.serial_lock) as l:
+ if l:
+ serial_registered = True
+ poll.register(self.serialsock.fileno(), event_read_mask)
+
# Since the socket is non-blocking make sure to honor EAGAIN
# and EWOULDBLOCK.
- def recv(self, count):
+ def recv(self, count, sock):
try:
- data = self.readsock.recv(count)
+ data = sock.recv(count)
except socket.error as e:
if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK:
return b''
@@ -791,6 +869,9 @@ class LoggingThread(threading.Thread):
val = 'POLLHUP'
elif select.POLLNVAL == event:
val = 'POLLNVAL'
+ else:
+ val = "0x%x" % (event)
+
return val
def close_socket(self, sock):
diff --git a/meta/lib/oeqa/utils/subprocesstweak.py b/meta/lib/oeqa/utils/subprocesstweak.py
index b47975a4bc..3e43ed547b 100644
--- a/meta/lib/oeqa/utils/subprocesstweak.py
+++ b/meta/lib/oeqa/utils/subprocesstweak.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
import subprocess
diff --git a/meta/lib/patchtest/README.md b/meta/lib/patchtest/README.md
new file mode 100644
index 0000000000..f66613c0c1
--- /dev/null
+++ b/meta/lib/patchtest/README.md
@@ -0,0 +1,20 @@
+# patchtest selftests for openembedded-core
+
+This directory provides a test suite and selftest script for use with the
+patchtest repository: https://git.yoctoproject.org/patchtest/
+
+To setup for use:
+
+1. Clone https://git.openembedded.org/openembedded-core (this repo) and https://git.openembedded.org/bitbake/
+2. Clone https://git.yoctoproject.org/patchtest
+3. Install the necessary Python modules: in meta/lib/patchtest or the patchtest
+ repo, do `pip install -r requirements.txt`
+4. Add patchtest to PATH: `export PATH=/path/to/patchtest/repo:$PATH`
+5. Initialize the environment: `source oe-init-build-env`
+6. Add meta-selftest to bblayers.conf: `bitbake-layers add-layer
+ /path/to/meta-selftest/` (the selftests use this layer's recipes as test
+ targets)
+7. Finally, run the selftest script: `./meta/lib/patchtest/selftest/selftest`
+
+For more information on using patchtest, see the patchtest repo at
+https://git.yoctoproject.org/patchtest/.
diff --git a/meta/lib/patchtest/data.py b/meta/lib/patchtest/data.py
new file mode 100644
index 0000000000..356259921d
--- /dev/null
+++ b/meta/lib/patchtest/data.py
@@ -0,0 +1,86 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtestdata: module used to share command line arguments between
+# patchtest & test suite and a data store between test cases
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# NOTE: Strictly speaking, unit test should be isolated from outside,
+# but patchtest test suites uses command line input data and
+# pretest and test test cases may use the datastore defined
+# on this module
+
+import os
+import argparse
+import collections
+import logging
+
+logger=logging.getLogger('patchtest')
+info=logger.info
+
+default_testdir = os.path.abspath(os.path.dirname(__file__) + "/tests")
+default_repodir = os.path.abspath(os.path.dirname(__file__) + "/../../..")
+
+# Data store commonly used to share values between pre and post-merge tests
+PatchTestDataStore = collections.defaultdict(str)
+
+class PatchTestInput(object):
+ """Abstract the patchtest argument parser"""
+
+ @classmethod
+ def set_namespace(cls):
+ parser = cls.get_parser()
+ parser.parse_args(namespace=cls)
+
+ @classmethod
+ def get_parser(cls):
+ parser = argparse.ArgumentParser()
+
+ target_patch_group = parser.add_mutually_exclusive_group(required=True)
+
+ target_patch_group.add_argument('--patch', metavar='PATCH', dest='patch_path',
+ help='The patch to be tested')
+
+ target_patch_group.add_argument('--directory', metavar='DIRECTORY', dest='patch_path',
+ help='The directory containing patches to be tested')
+
+ parser.add_argument('--repodir', metavar='REPO',
+ default=default_repodir,
+ help="Name of the repository where patch is merged")
+
+ parser.add_argument('--testdir', metavar='TESTDIR',
+ default=default_testdir,
+ help="Directory where test cases are located")
+
+ parser.add_argument('--top-level-directory', '-t',
+ dest='topdir',
+ default=None,
+ help="Top level directory of project (defaults to start directory)")
+
+ parser.add_argument('--pattern', '-p',
+ dest='pattern',
+ default='test*.py',
+ help="Pattern to match test files")
+
+ parser.add_argument('--base-branch', '-b',
+ dest='basebranch',
+ help="Branch name used by patchtest to branch from. By default, it uses the current one.")
+
+ parser.add_argument('--base-commit', '-c',
+ dest='basecommit',
+ help="Commit ID used by patchtest to branch from. By default, it uses HEAD.")
+
+ parser.add_argument('--debug', '-d',
+ action='store_true',
+ help='Enable debug output')
+
+ parser.add_argument('--log-results',
+ action='store_true',
+ help='Enable logging to a file matching the target patch name with ".testresult" appended')
+
+
+ return parser
+
diff --git a/meta/lib/patchtest/patch.py b/meta/lib/patchtest/patch.py
new file mode 100644
index 0000000000..baf6283873
--- /dev/null
+++ b/meta/lib/patchtest/patch.py
@@ -0,0 +1,62 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtestpatch: PatchTestPatch class which abstracts a patch file
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import utils
+
+logger = logging.getLogger('patchtest')
+
+class PatchTestPatch(object):
+ MERGE_STATUS_INVALID = 'INVALID'
+ MERGE_STATUS_NOT_MERGED = 'NOTMERGED'
+ MERGE_STATUS_MERGED_SUCCESSFULL = 'PASS'
+ MERGE_STATUS_MERGED_FAIL = 'FAIL'
+ MERGE_STATUS = (MERGE_STATUS_INVALID,
+ MERGE_STATUS_NOT_MERGED,
+ MERGE_STATUS_MERGED_SUCCESSFULL,
+ MERGE_STATUS_MERGED_FAIL)
+
+ def __init__(self, path, forcereload=False):
+ self._path = path
+ self._forcereload = forcereload
+
+ self._contents = None
+ self._branch = None
+ self._merge_status = PatchTestPatch.MERGE_STATUS_NOT_MERGED
+
+ @property
+ def contents(self):
+ if self._forcereload or (not self._contents):
+ logger.debug('Reading %s contents' % self._path)
+ try:
+ with open(self._path, newline='') as _f:
+ self._contents = _f.read()
+ except IOError:
+ logger.warn("Reading the mbox %s failed" % self.resource)
+ return self._contents
+
+ @property
+ def path(self):
+ return self._path
+
+ @property
+ def branch(self):
+ if not self._branch:
+ self._branch = utils.get_branch(self._path)
+ return self._branch
+
+ def setmergestatus(self, status):
+ self._merge_status = status
+
+ def getmergestatus(self):
+ return self._merge_status
+
+ merge_status = property(getmergestatus, setmergestatus)
+
diff --git a/meta/lib/patchtest/repo.py b/meta/lib/patchtest/repo.py
new file mode 100644
index 0000000000..d3788f466d
--- /dev/null
+++ b/meta/lib/patchtest/repo.py
@@ -0,0 +1,174 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtestrepo: PatchTestRepo class used mainly to control a git repo from patchtest
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import utils
+import logging
+from patch import PatchTestPatch
+
+logger = logging.getLogger('patchtest')
+info=logger.info
+
+class PatchTestRepo(object):
+
+ # prefixes used for temporal branches/stashes
+ prefix = 'patchtest'
+
+ def __init__(self, patch, repodir, commit=None, branch=None):
+ self._repodir = repodir
+ self._patch = PatchTestPatch(patch)
+ self._current_branch = self._get_current_branch()
+
+ # targeted branch defined on the patch may be invalid, so make sure there
+ # is a corresponding remote branch
+ valid_patch_branch = None
+ if self._patch.branch in self.upstream_branches():
+ valid_patch_branch = self._patch.branch
+
+ # Target Branch
+ # Priority (top has highest priority):
+ # 1. branch given at cmd line
+ # 2. branch given at the patch
+ # 3. current branch
+ self._branch = branch or valid_patch_branch or self._current_branch
+
+ # Target Commit
+ # Priority (top has highest priority):
+ # 1. commit given at cmd line
+ # 2. branch given at cmd line
+ # 3. branch given at the patch
+ # 3. current HEAD
+ self._commit = self._get_commitid(commit) or \
+ self._get_commitid(branch) or \
+ self._get_commitid(valid_patch_branch) or \
+ self._get_commitid('HEAD')
+
+ self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid())
+
+ # create working branch
+ self._exec({'cmd': ['git', 'checkout', '-b', self._workingbranch, self._commit]})
+
+ self._patchmerged = False
+
+ # Check if patch can be merged using git-am
+ self._patchcanbemerged = True
+ try:
+ self._exec({'cmd': ['git', 'am', '--keep-cr'], 'input': self._patch.contents})
+ except utils.CmdException as ce:
+ self._exec({'cmd': ['git', 'am', '--abort']})
+ self._patchcanbemerged = False
+ finally:
+ # if patch was applied, remove it
+ if self._patchcanbemerged:
+ self._exec({'cmd':['git', 'reset', '--hard', self._commit]})
+
+ # for debugging purposes, print all repo parameters
+ logger.debug("Parameters")
+ logger.debug("\tRepository : %s" % self._repodir)
+ logger.debug("\tTarget Commit : %s" % self._commit)
+ logger.debug("\tTarget Branch : %s" % self._branch)
+ logger.debug("\tWorking branch : %s" % self._workingbranch)
+ logger.debug("\tPatch : %s" % self._patch)
+
+ @property
+ def patch(self):
+ return self._patch.path
+
+ @property
+ def branch(self):
+ return self._branch
+
+ @property
+ def commit(self):
+ return self._commit
+
+ @property
+ def ismerged(self):
+ return self._patchmerged
+
+ @property
+ def canbemerged(self):
+ return self._patchcanbemerged
+
+ def _exec(self, cmds):
+ _cmds = []
+ if isinstance(cmds, dict):
+ _cmds.append(cmds)
+ elif isinstance(cmds, list):
+ _cmds = cmds
+ else:
+ raise utils.CmdException({'cmd':str(cmds)})
+
+ results = []
+ cmdfailure = False
+ try:
+ results = utils.exec_cmds(_cmds, self._repodir)
+ except utils.CmdException as ce:
+ cmdfailure = True
+ raise ce
+ finally:
+ if cmdfailure:
+ for cmd in _cmds:
+ logger.debug("CMD: %s" % ' '.join(cmd['cmd']))
+ else:
+ for result in results:
+ cmd, rc, stdout, stderr = ' '.join(result['cmd']), result['returncode'], result['stdout'], result['stderr']
+ logger.debug("CMD: %s RCODE: %s STDOUT: %s STDERR: %s" % (cmd, rc, stdout, stderr))
+
+ return results
+
+ def _get_current_branch(self, commit='HEAD'):
+ cmd = {'cmd':['git', 'rev-parse', '--abbrev-ref', commit]}
+ cb = self._exec(cmd)[0]['stdout']
+ if cb == commit:
+ logger.warning('You may be detached so patchtest will checkout to master after execution')
+ cb = 'master'
+ return cb
+
+ def _get_commitid(self, commit):
+
+ if not commit:
+ return None
+
+ try:
+ cmd = {'cmd':['git', 'rev-parse', '--short', commit]}
+ return self._exec(cmd)[0]['stdout']
+ except utils.CmdException as ce:
+ # try getting the commit under any remotes
+ cmd = {'cmd':['git', 'remote']}
+ remotes = self._exec(cmd)[0]['stdout']
+ for remote in remotes.splitlines():
+ cmd = {'cmd':['git', 'rev-parse', '--short', '%s/%s' % (remote, commit)]}
+ try:
+ return self._exec(cmd)[0]['stdout']
+ except utils.CmdException:
+ pass
+
+ return None
+
+ def upstream_branches(self):
+ cmd = {'cmd':['git', 'branch', '--remotes']}
+ remote_branches = self._exec(cmd)[0]['stdout']
+
+ # just get the names, without the remote name
+ branches = set(branch.split('/')[-1] for branch in remote_branches.splitlines())
+ return branches
+
+ def merge(self):
+ if self._patchcanbemerged:
+ self._exec({'cmd': ['git', 'am', '--keep-cr'],
+ 'input': self._patch.contents,
+ 'updateenv': {'PTRESOURCE':self._patch.path}})
+ self._patchmerged = True
+
+ def clean(self):
+ self._exec({'cmd':['git', 'checkout', '%s' % self._current_branch]})
+ self._exec({'cmd':['git', 'branch', '-D', self._workingbranch]})
+ self._patchmerged = False
diff --git a/meta/lib/patchtest/requirements.txt b/meta/lib/patchtest/requirements.txt
new file mode 100644
index 0000000000..ba55ff905e
--- /dev/null
+++ b/meta/lib/patchtest/requirements.txt
@@ -0,0 +1,6 @@
+boto3
+git-pw>=2.5.0
+jinja2
+pylint
+pyparsing>=3.0.9
+unidiff
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
new file mode 100644
index 0000000000..0c40cdc1b6
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.fail
@@ -0,0 +1,32 @@
+From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001
+From: First Last <first.last@example.com>
+Date: Tue, 29 Aug 2023 13:32:24 -0400
+Subject: [PATCH] selftest-hello: add a summary
+
+This patch should fail the selftests because the author address is from the
+invalid "example.com".
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..491f0a3df7 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "A cool sample"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
new file mode 100644
index 0000000000..cbb8ef2cef
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.1.pass
@@ -0,0 +1,31 @@
+From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001
+From: First Last <first.last@address.com>
+Date: Tue, 29 Aug 2023 13:32:24 -0400
+Subject: [PATCH] selftest-hello: add a summary
+
+This patch should pass the selftests because the author address is in a valid format.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..491f0a3df7 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "A cool sample"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
new file mode 100644
index 0000000000..3e2b81bca1
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.fail
@@ -0,0 +1,31 @@
+From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001
+From: Upgrade Helper <auh@auh.yoctoproject.org>
+Date: Tue, 29 Aug 2023 13:32:24 -0400
+Subject: [PATCH] selftest-hello: add a summary
+
+This patch should fail the selftests because AUH is an invalid sender.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..491f0a3df7 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "A cool sample"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
new file mode 100644
index 0000000000..f84e1265a7
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_author_valid.2.pass
@@ -0,0 +1,31 @@
+From 1fbb446d1849b1208012cbdae5d85d228cdbe4a6 Mon Sep 17 00:00:00 2001
+From: First Last <averylongemailaddressthatishardtoread.from@address.com>
+Date: Tue, 29 Aug 2023 13:32:24 -0400
+Subject: [PATCH] selftest-hello: add a summary
+
+This patch should pass the selftests because the author address is in a valid format.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..491f0a3df7 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "A cool sample"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
new file mode 100644
index 0000000000..80f409e952
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail
@@ -0,0 +1,25 @@
+From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001
+From: name@somedomain.com <email@address.com>
+Date: Fri, 17 Feb 2017 16:29:21 -0600
+Subject: [PATCH] README: adds 'foo' to the header
+
+This test patch adds 'foo' to the header
+
+[YOCTO 1234]
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ README | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/README b/README
+index 521916cd4f..cdf29dcea3 100644
+--- a/README
++++ b/README
+@@ -1,3 +1,4 @@
++**** FOO ****
+ OpenEmbedded-Core
+ =================
+
+--
+2.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
new file mode 100644
index 0000000000..2648b03364
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.pass
@@ -0,0 +1,25 @@
+From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001
+From: name@somedomain.com <email@address.com>
+Date: Fri, 17 Feb 2017 16:29:21 -0600
+Subject: [PATCH] README: adds 'foo' to the header
+
+This test patch adds 'foo' to the header
+
+[YOCTO #1234]
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ README | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/README b/README
+index 521916cd4f..cdf29dcea3 100644
+--- a/README
++++ b/README
+@@ -1,3 +1,4 @@
++**** FOO ****
+ OpenEmbedded-Core
+ =================
+
+--
+2.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
new file mode 100644
index 0000000000..93ca0f9119
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.fail
@@ -0,0 +1,22 @@
+From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 1 Sep 2023 08:56:14 -0400
+Subject: [PATCH] README.OE-Core.md: add foo
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ README.OE-Core.md | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/README.OE-Core.md b/README.OE-Core.md
+index 2f2127fb03..48464252c8 100644
+--- a/README.OE-Core.md
++++ b/README.OE-Core.md
+@@ -1,3 +1,4 @@
++** FOO **
+ OpenEmbedded-Core
+ =================
+
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
new file mode 100644
index 0000000000..5e3dcbd58b
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_commit_message_presence.pass
@@ -0,0 +1,24 @@
+From 0a52a62c9430c05d22cb7f46380488f2280b69bb Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 1 Sep 2023 08:56:14 -0400
+Subject: [PATCH] README.OE-Core.md: add foo
+
+This is a commit message
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ README.OE-Core.md | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/README.OE-Core.md b/README.OE-Core.md
+index 2f2127fb03..48464252c8 100644
+--- a/README.OE-Core.md
++++ b/README.OE-Core.md
+@@ -1,3 +1,4 @@
++** FOO **
+ OpenEmbedded-Core
+ =================
+
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail
new file mode 100644
index 0000000000..9cc4aab38a
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.1.fail
@@ -0,0 +1,36 @@
+From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 30 Aug 2023 12:15:00 -0400
+Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
+
+This test should fail the mbox formatting test and the merge on head
+test.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+ rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+similarity index 88%
+rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+index 547587bef4..acc388ec2c 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+@@ -1,3 +1,4 @@
+%+SUMMARY = "Hello!"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail
new file mode 100644
index 0000000000..eca1c60085
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.2.fail
@@ -0,0 +1,35 @@
+From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 30 Aug 2023 12:15:00 -0400
+Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
+
+This test should fail the merge-on-head and mbox formatting tests.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+ rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+similarity index 88%
+rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+index 547587bef4..acc388ec2c 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+@@ -1,3 +1,4 @@
+%+SUMMARY = "Hello!"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
new file mode 100644
index 0000000000..33940adffc
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_mbox_format.pass
@@ -0,0 +1,33 @@
+From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 30 Aug 2023 12:15:00 -0400
+Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+ rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+similarity index 88%
+rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+index 547587bef4..acc388ec2c 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "Hello!"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
new file mode 100644
index 0000000000..2a72457878
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.1.skip
@@ -0,0 +1,35 @@
+From d12db4cfa913b0e7a4b5bd858d3019acc53ce426 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 30 Aug 2023 12:15:00 -0400
+Subject: [PATCH] selftest-hello: upgrade 1.0 -> 1.1
+
+This file should pass the test_series_merge_on_head test.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../{selftest-hello_1.0.bb => selftest-hello_1.1.bb} | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+ rename meta-selftest/recipes-test/selftest-hello/{selftest-hello_1.0.bb => selftest-hello_1.1.bb} (88%)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+similarity index 88%
+rename from meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+rename to meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+index 547587bef4..acc388ec2c 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.1.bb
+@@ -1,3 +1,4 @@
++SUMMARY = "Hello!"
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+@@ -16,4 +17,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip
new file mode 100644
index 0000000000..49bd1f8ede
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_series_merge_on_head.2.skip
@@ -0,0 +1,41 @@
+From 55208224f492af0ad929555ffc9b95ff1d301c5f Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Thu, 17 Aug 2023 15:02:38 -0400
+Subject: [PATCH] python3-dtc: upgrade 1.6.1 -> 1.7.0
+
+Changelog: https://kernel.googlesource.com/pub/scm/utils/dtc/dtc/+log/039a99414e778332d8f9c04cbd3072e1dcc62798
+
+Remove custom PV from the recipe since the relevant functionality is in
+1.7.0:
+
+[tgamblin@megalith dtc]$ git tag --contains c001fc01a43e7a06447c06ea3d50bd60641322b8
+v1.7.0
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ .../python/{python3-dtc_1.6.1.bb => python3-dtc_1.7.0.bb} | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+ rename meta/recipes-devtools/python/{python3-dtc_1.6.1.bb => python3-dtc_1.7.0.bb} (92%)
+
+diff --git a/meta/recipes-devtools/python/python3-dtc_1.6.1.bb b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
+similarity index 92%
+rename from meta/recipes-devtools/python/python3-dtc_1.6.1.bb
+rename to meta/recipes-devtools/python/python3-dtc_1.7.0.bb
+index 95ab0be474..85e48d4694 100644
+--- a/meta/recipes-devtools/python/python3-dtc_1.6.1.bb
++++ b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
+@@ -14,9 +14,8 @@ UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+ LIC_FILES_CHKSUM = "file://pylibfdt/libfdt.i;beginline=1;endline=6;md5=afda088c974174a29108c8d80b5dce90"
+
+-SRCREV = "c001fc01a43e7a06447c06ea3d50bd60641322b8"
++SRCREV = "039a99414e778332d8f9c04cbd3072e1dcc62798"
+
+-PV = "1.6.1+git"
+ S = "${WORKDIR}/git"
+
+ PYPA_WHEEL = "${S}/dist/libfdt-1.6.2*.whl"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
new file mode 100644
index 0000000000..cdbbc61b61
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.fail
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello% fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
new file mode 100644
index 0000000000..ef6017037c
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_format.pass
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
new file mode 100644
index 0000000000..629e78540b
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.fail
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: this is a very long commit shortlog with way too many words included in it to pass the test
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://0001-Fix-CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
new file mode 100644
index 0000000000..ef6017037c
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_shortlog_length.pass
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
new file mode 100644
index 0000000000..35d92aeed7
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.1.fail
@@ -0,0 +1,71 @@
+From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+---
+ .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..869cfb6fe5
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
new file mode 100644
index 0000000000..68f38dee06
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.2.fail
@@ -0,0 +1,72 @@
+From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Approved: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..869cfb6fe5
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
new file mode 100644
index 0000000000..ea34c76f0d
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMbox.test_signed_off_by_presence.pass
@@ -0,0 +1,72 @@
+From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..869cfb6fe5
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
new file mode 100644
index 0000000000..3574463ade
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.fail
@@ -0,0 +1,30 @@
+From c4ca86b9cca3643097db0328e2f34dccffbba309 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com>
+Date: Sat, 10 Feb 2024 13:18:44 +0100
+Subject: [PATCH] selftest-hello: add CVE_CHECK_IGNORE
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+This should fail the test_cve_tag_format selftest.
+
+Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..3ef9b87c34 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -16,4 +16,5 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++CVE_CHECK_IGNORE = "CVE-2024-12345"
++BBCLASSEXTEND = "native nativesdk"
+--
+2.39.2
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
new file mode 100644
index 0000000000..10f942a6eb
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_cve_check_ignore.pass
@@ -0,0 +1,31 @@
+From 7d4d3fee0c7111830ee9b2b049ae3ce265b26030 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Simone=20Wei=C3=9F?= <simone.p.weiss@posteo.com>
+Date: Sat, 10 Feb 2024 13:23:56 +0100
+Subject: [PATCH] selftest-hello: add CVE_STATUS
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+This should pass the test_cve_tag_format selftest.
+
+Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..9908b3b417 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -16,4 +16,6 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++CVE_STATUS[CVE-2024-12345] = "not-applicable-platform: Issue only applies on Windows"
++
++BBCLASSEXTEND = "native nativesdk"
+--
+2.39.2
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
new file mode 100644
index 0000000000..ab6c52c374
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.fail
@@ -0,0 +1,37 @@
+From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 6 Sep 2023 09:09:27 -0400
+Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
+
+This test should fail the
+test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned
+test.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..65dda40aba 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,7 +1,7 @@
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+-LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
++LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f303"
+
+ SRC_URI = "file://helloworld.c"
+
+@@ -16,4 +16,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
new file mode 100644
index 0000000000..99d9f144da
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_modified_not_mentioned.pass
@@ -0,0 +1,39 @@
+From f89919ea86d38404dd621521680a0162367bb965 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Wed, 6 Sep 2023 09:09:27 -0400
+Subject: [PATCH] selftest-hello: update LIC_FILES_CHKSUM
+
+License-Update: Fix checksum
+
+This test should pass the
+test_metadata_lic_files_chksum.LicFilesChkSum.test_lic_files_chksum_modified_not_mentioned
+test.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..65dda40aba 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -1,7 +1,7 @@
+ DESCRIPTION = "Simple helloworld application -- selftest variant"
+ SECTION = "examples"
+ LICENSE = "MIT"
+-LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
++LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f303"
+
+ SRC_URI = "file://helloworld.c"
+
+@@ -16,4 +16,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
new file mode 100644
index 0000000000..e14d644bb2
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.fail
@@ -0,0 +1,53 @@
+From 66430e7c6fbd5187b66560909a510e136fed91c0 Mon Sep 17 00:00:00 2001
+From: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+Date: Thu, 23 Feb 2017 10:34:27 -0600
+Subject: [PATCH] meta: adding hello-yocto recipe
+
+This is a sample recipe
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ .../hello-world/hello-world/hello_world.c | 5 +++++
+ .../hello-world/hello-world_1.0.bb | 18 ++++++++++++++++++
+ 2 files changed, 23 insertions(+)
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
+
+diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+new file mode 100644
+index 0000000000..0d59f57d4c
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+@@ -0,0 +1,5 @@
++#include <stdio.h>
++
++int main(){
++ printf("Hello World\n");
++}
+diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+new file mode 100644
+index 0000000000..3c990c108a
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+@@ -0,0 +1,18 @@
++SUMMARY = "This is a sample summary"
++DESCRIPTION = "This is a sample description"
++HOMEPAGE = "https://sample.com/this-is-a-sample"
++LICENSE = "MIT"
++
++SRC_URI += "file://hello_world.c"
++
++SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632"
++SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e"
++
++do_compile(){
++ ${CC} -o hello_world ../hello_world.c
++}
++
++do_install(){
++ install -d ${D}${bindir}
++ install -m +x hello_world ${D}${bindir}/hello_world
++}
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
new file mode 100644
index 0000000000..b8da16dfe5
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_lic_files_chksum_presence.pass
@@ -0,0 +1,54 @@
+From 5144d2ba1aa763312c047dd5f8901368cff79da6 Mon Sep 17 00:00:00 2001
+From: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+Date: Thu, 23 Feb 2017 10:34:27 -0600
+Subject: [PATCH] meta: adding hello-yocto recipe
+
+This is a sample recipe
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ .../hello-world/hello-world/hello_world.c | 5 +++++
+ .../hello-world/hello-world_1.0.bb | 19 +++++++++++++++++++
+ 2 files changed, 24 insertions(+)
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
+
+diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+new file mode 100644
+index 0000000000..0d59f57d4c
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+@@ -0,0 +1,5 @@
++#include <stdio.h>
++
++int main(){
++ printf("Hello World\n");
++}
+diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+new file mode 100644
+index 0000000000..44d888c82a
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+@@ -0,0 +1,19 @@
++SUMMARY = "This is a sample summary"
++DESCRIPTION = "This is a sample description"
++HOMEPAGE = "https://sample.com/this-is-a-sample"
++LICENSE = "MIT"
++LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
++
++SRC_URI += "file://hello_world.c"
++
++SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632"
++SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e"
++
++do_compile(){
++ ${CC} -o hello_world ../hello_world.c
++}
++
++do_install(){
++ install -d ${D}${bindir}
++ install -m +x hello_world ${D}${bindir}/hello_world
++}
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
new file mode 100644
index 0000000000..983b6e0c2b
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.fail
@@ -0,0 +1,35 @@
+From 4ab06b5f81455249cd5e89d2cce9863803b5ecb5 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 8 Sep 2023 14:41:00 -0400
+Subject: [PATCH] selftest-hello: remove helloworld.c
+
+This should fail the test_src_uri_left_files selftest.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +---
+ 1 file changed, 1 insertion(+), 3 deletions(-)
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..f6817f05bc 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,8 +3,6 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
+-
+ S = "${WORKDIR}"
+
+ do_compile() {
+@@ -16,4 +14,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
new file mode 100644
index 0000000000..1f1a77e581
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_src_uri_left_files.pass
@@ -0,0 +1,51 @@
+From 6c7ac367a873bf827c19b81085c943eace917a99 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 8 Sep 2023 14:41:00 -0400
+Subject: [PATCH] selftest-hello: remove helloworld.c
+
+This should pass the test_src_uri_left_files selftest.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../recipes-test/selftest-hello/files/helloworld.c | 8 --------
+ .../recipes-test/selftest-hello/selftest-hello_1.0.bb | 4 +---
+ 2 files changed, 1 insertion(+), 11 deletions(-)
+ delete mode 100644 meta-selftest/recipes-test/selftest-hello/files/helloworld.c
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c b/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
+deleted file mode 100644
+index fc7169b7b8..0000000000
+--- a/meta-selftest/recipes-test/selftest-hello/files/helloworld.c
++++ /dev/null
+@@ -1,8 +0,0 @@
+-#include <stdio.h>
+-
+-int main(void)
+-{
+- printf("Hello world!\n");
+-
+- return 0;
+-}
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..f6817f05bc 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,8 +3,6 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
+-
+ S = "${WORKDIR}"
+
+ do_compile() {
+@@ -16,4 +14,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
new file mode 100644
index 0000000000..2d2b4e683d
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.fail
@@ -0,0 +1,46 @@
+From e29da5faa74409be394caa09d9f3b7b60f8592b9 Mon Sep 17 00:00:00 2001
+From: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+Date: Thu, 23 Feb 2017 10:34:27 -0600
+Subject: [PATCH] meta: adding hello-yocto recipe
+
+This is a sample recipe
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ meta/recipes-devtools/hello-world/hello-world/hello_world.c | 5 +++++
+ meta/recipes-devtools/hello-world/hello-world_1.0.bb | 12 ++++++++++++
+ 2 files changed, 17 insertions(+)
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
+
+diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+new file mode 100644
+index 0000000000..0d59f57d4c
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+@@ -0,0 +1,5 @@
++#include <stdio.h>
++
++int main(){
++ printf("Hello World\n");
++}
+diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+new file mode 100644
+index 0000000000..c4e1359217
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+@@ -0,0 +1,12 @@
++LICENSE = "CLOSED"
++
++SRC_URI += "file://hello_world.c"
++
++do_compile(){
++ ${CC} -o hello_world ../hello_world.c
++}
++
++do_install(){
++ install -d ${D}${bindir}
++ install -m +x hello_world ${D}${bindir}/hello_world
++}
+--
+2.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
new file mode 100644
index 0000000000..55f0309b3f
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestMetadata.test_summary_presence.pass
@@ -0,0 +1,49 @@
+From 0cd2fed12ce4b7b071edde12aec4481ad7a6f107 Mon Sep 17 00:00:00 2001
+From: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+Date: Thu, 23 Feb 2017 10:34:27 -0600
+Subject: [PATCH] meta: adding hello-yocto recipe
+
+This is a sample recipe
+
+Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com>
+---
+ .../hello-world/hello-world/hello_world.c | 5 +++++
+ meta/recipes-devtools/hello-world/hello-world_1.0.bb | 15 +++++++++++++++
+ 2 files changed, 20 insertions(+)
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world/hello_world.c
+ create mode 100644 meta/recipes-devtools/hello-world/hello-world_1.0.bb
+
+diff --git a/meta/recipes-devtools/hello-world/hello-world/hello_world.c b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+new file mode 100644
+index 0000000000..0d59f57d4c
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world/hello_world.c
+@@ -0,0 +1,5 @@
++#include <stdio.h>
++
++int main(){
++ printf("Hello World\n");
++}
+diff --git a/meta/recipes-devtools/hello-world/hello-world_1.0.bb b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+new file mode 100644
+index 0000000000..c54283eece
+--- /dev/null
++++ b/meta/recipes-devtools/hello-world/hello-world_1.0.bb
+@@ -0,0 +1,15 @@
++SUMMARY = "This is a sample summary"
++DESCRIPTION = "This is a sample description"
++HOMEPAGE = "https://sample.com/this-is-a-sample"
++LICENSE = "CLOSED"
++
++SRC_URI += "file://hello_world.c"
++
++do_compile(){
++ ${CC} -o hello_world ../hello_world.c
++}
++
++do_install(){
++ install -d ${D}${bindir}
++ install -m +x hello_world ${D}${bindir}/hello_world
++}
+--
+2.11.0
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
new file mode 100644
index 0000000000..c763a7506e
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.fail
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-BAD-FORMAT
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-BAD-FORMAT
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
new file mode 100644
index 0000000000..ef6017037c
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_cve_tag_format.pass
@@ -0,0 +1,73 @@
+From 35ccee3cee96fb29514475279248078d88907231 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../files/0001-Fix-CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..9219b8db62
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/0001-Fix-CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
++
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
new file mode 100644
index 0000000000..ce8bf7b7d1
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.fail
@@ -0,0 +1,71 @@
+From 5a2d0ac780a0f4c046fb1a3c3463d3e726f191cb Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../selftest-hello/files/CVE-1234-56789.patch | 26 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 30 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..92a5b65a53
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+@@ -0,0 +1,26 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
new file mode 100644
index 0000000000..ea34c76f0d
--- /dev/null
+++ b/meta/lib/patchtest/selftest/files/TestPatch.test_signed_off_by_presence.pass
@@ -0,0 +1,72 @@
+From 14d72f6973270f78455a8628143f2cff90e8f41e Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 29 Aug 2023 14:12:27 -0400
+Subject: [PATCH] selftest-hello: fix CVE-1234-56789
+
+CVE: CVE-1234-56789
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ .../selftest-hello/files/CVE-1234-56789.patch | 27 +++++++++++++++++++
+ .../selftest-hello/selftest-hello_1.0.bb | 6 +++--
+ 2 files changed, 31 insertions(+), 2 deletions(-)
+ create mode 100644 meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+
+diff --git a/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+new file mode 100644
+index 0000000000..869cfb6fe5
+--- /dev/null
++++ b/meta-selftest/recipes-test/selftest-hello/files/CVE-1234-56789.patch
+@@ -0,0 +1,27 @@
++From b26a31186e6ee2eb1f506d5f2f9394d327a0df2f Mon Sep 17 00:00:00 2001
++From: Trevor Gamblin <tgamblin@baylibre.com>
++Date: Tue, 29 Aug 2023 14:08:20 -0400
++Subject: [PATCH] Fix CVE-NOT-REAL
++
++CVE: CVE-1234-56789
++Upstream-Status: Backport(http://example.com/example)
++
++Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
++---
++ strlen.c | 1 +
++ 1 file changed, 1 insertion(+)
++
++diff --git a/strlen.c b/strlen.c
++index 1788f38..83d7918 100644
++--- a/strlen.c
+++++ b/strlen.c
++@@ -8,6 +8,7 @@ int main() {
++
++ printf("%d\n", str_len(string1));
++ printf("%d\n", str_len(string2));
+++ printf("CVE FIXED!!!\n");
++
++ return 0;
++ }
++--
++2.41.0
+diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+index 547587bef4..76975a6729 100644
+--- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
++++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb
+@@ -3,7 +3,9 @@ SECTION = "examples"
+ LICENSE = "MIT"
+ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+-SRC_URI = "file://helloworld.c"
++SRC_URI = "file://helloworld.c \
++ file://CVE-1234-56789.patch \
++ "
+
+ S = "${WORKDIR}"
+
+@@ -16,4 +18,4 @@ do_install() {
+ install -m 0755 helloworld ${D}${bindir}
+ }
+
+-BBCLASSEXTEND = "native nativesdk"
+\ No newline at end of file
++BBCLASSEXTEND = "native nativesdk"
+--
+2.41.0
+
diff --git a/meta/lib/patchtest/selftest/selftest b/meta/lib/patchtest/selftest/selftest
new file mode 100755
index 0000000000..6fad50ce61
--- /dev/null
+++ b/meta/lib/patchtest/selftest/selftest
@@ -0,0 +1,94 @@
+#!/usr/bin/env python3
+
+# Test every patch from files folder and output error on failure
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import os
+import subprocess
+import sys
+
+currentdir = os.path.dirname(os.path.abspath(__file__))
+patchesdir = os.path.join(currentdir, 'files')
+topdir = os.path.dirname(currentdir)
+parentdir = os.path.dirname(topdir)
+
+# path to the repo root
+repodir = os.path.dirname(os.path.dirname(parentdir))
+
+def print_results(passcount, failcount, skipcount, xpasscount, xfailcount, xskipcount, errorcount):
+ total = passcount + skipcount + failcount + xpasscount + xfailcount + xskipcount + errorcount
+ print("============================================================================")
+ print("Testsuite summary for %s" % os.path.basename(topdir))
+ print("============================================================================")
+ print("# TOTAL: %s" % str(total))
+ print("# XPASS: %s" % str(xpasscount))
+ print("# XFAIL: %s" % str(xfailcount))
+ print("# XSKIP: %s" % str(xskipcount))
+ print("# PASS: %s" % str(passcount))
+ print("# FAIL: %s" % str(failcount))
+ print("# SKIP: %s" % str(skipcount))
+ print("# ERROR: %s" % str(errorcount))
+ print("============================================================================")
+
+# Once the tests are in oe-core, we can remove the testdir param and use os.path.dirname to get relative paths
+def test(root, patch):
+ res = True
+ patchpath = os.path.abspath(os.path.join(root, patch))
+
+ cmd = 'patchtest --repodir %s --testdir %s/tests --patch %s' % (repodir, topdir, patchpath)
+ results = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
+
+ return results
+
+if __name__ == '__main__':
+ passcount = 0
+ failcount = 0
+ skipcount = 0
+ xpasscount = 0
+ xfailcount = 0
+ xskipcount = 0
+ errorcount = 0
+
+ results = None
+
+ for root, dirs, patches in os.walk(patchesdir):
+ for patch in patches:
+ results = test(root, patch)
+
+ a = patch.split('.')
+ klass, testname = a[0], a[1]
+ expected_result = a[-1]
+ testid = ".%s.%s" % (klass,testname)
+
+ for resultline in results.splitlines():
+ if testid in resultline:
+ result, _ = resultline.split(':', 1)
+
+ if expected_result.upper() == "FAIL" and result.upper() == "FAIL":
+ xfailcount = xfailcount + 1
+ print("XFAIL: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
+ elif expected_result.upper() == "PASS" and result.upper() == "PASS":
+ xpasscount = xpasscount + 1
+ print("XPASS: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
+ elif expected_result.upper() == "SKIP" and result.upper() == "SKIP":
+ xskipcount = xskipcount + 1
+ print("XSKIP: %s (file: %s)" % (testid.strip("."), os.path.basename(patch)))
+ else:
+ print("%s: %s (%s)" % (result.upper(), testid.strip("."), os.path.basename(patch)))
+ if result.upper() == "PASS":
+ passcount = passcount + 1
+ elif result.upper() == "FAIL":
+ failcount = failcount + 1
+ elif result.upper() == "SKIP":
+ skipcount = skipcount + 1
+ else:
+ print("Bad result on test %s against %s" % (testid.strip("."), os.path.basename(patch)))
+ errorcount = errorcount + 1
+ break
+ else:
+ print ("No test for=%s" % patch)
+
+ print_results(passcount, failcount, skipcount, xpasscount, xfailcount, xskipcount, errorcount)
diff --git a/meta/lib/patchtest/tests/__init__.py b/meta/lib/patchtest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/patchtest/tests/__init__.py
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py
new file mode 100644
index 0000000000..424e61b5be
--- /dev/null
+++ b/meta/lib/patchtest/tests/base.py
@@ -0,0 +1,239 @@
+# Base class to be used by all test cases defined in the suite
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import unittest
+import logging
+import json
+import unidiff
+from data import PatchTestInput
+import mailbox
+import collections
+import sys
+import os
+import re
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pyparsing'))
+
+logger = logging.getLogger('patchtest')
+debug=logger.debug
+info=logger.info
+warn=logger.warn
+error=logger.error
+
+Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload'])
+
+class PatchtestOEError(Exception):
+ """Exception for handling patchtest-oe errors"""
+ def __init__(self, message, exitcode=1):
+ super().__init__(message)
+ self.exitcode = exitcode
+
+class Base(unittest.TestCase):
+ # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>}
+
+ endcommit_messages_regex = re.compile(r'\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n')
+ patchmetadata_regex = re.compile(r'-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+')
+
+
+ @staticmethod
+ def msg_to_commit(msg):
+ payload = msg.get_payload()
+ return Commit(subject=msg['subject'].replace('\n', ' ').replace(' ', ' '),
+ author=msg.get('From'),
+ shortlog=Base.shortlog(msg['subject']),
+ commit_message=Base.commit_message(payload),
+ payload=payload)
+
+ @staticmethod
+ def commit_message(payload):
+ commit_message = payload.__str__()
+ match = Base.endcommit_messages_regex.search(payload)
+ if match:
+ commit_message = payload[:match.start()]
+ return commit_message
+
+ @staticmethod
+ def shortlog(shlog):
+ # remove possible prefix (between brackets) before colon
+ start = shlog.find(']', 0, shlog.find(':'))
+ # remove also newlines and spaces at both sides
+ return shlog[start + 1:].replace('\n', '').strip()
+
+ @classmethod
+ def setUpClass(cls):
+
+ # General objects: mailbox.mbox and patchset
+ cls.mbox = mailbox.mbox(PatchTestInput.repo.patch)
+
+ # Patch may be malformed, so try parsing it
+ cls.unidiff_parse_error = ''
+ cls.patchset = None
+ try:
+ cls.patchset = unidiff.PatchSet.from_filename(PatchTestInput.repo.patch, encoding=u'UTF-8')
+ except unidiff.UnidiffParseError as upe:
+ cls.patchset = []
+ cls.unidiff_parse_error = str(upe)
+
+ # Easy to iterate list of commits
+ cls.commits = []
+ for msg in cls.mbox:
+ if msg['subject'] and msg.get_payload():
+ cls.commits.append(Base.msg_to_commit(msg))
+
+ cls.setUpClassLocal()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownClassLocal()
+
+ @classmethod
+ def setUpClassLocal(cls):
+ pass
+
+ @classmethod
+ def tearDownClassLocal(cls):
+ pass
+
+ def fail(self, issue, fix=None, commit=None, data=None):
+ """ Convert to a JSON string failure data"""
+ value = {'id': self.id(),
+ 'issue': issue}
+
+ if fix:
+ value['fix'] = fix
+ if commit:
+ value['commit'] = {'subject': commit.subject,
+ 'shortlog': commit.shortlog}
+
+ # extend return value with other useful info
+ if data:
+ value['data'] = data
+
+ return super(Base, self).fail(json.dumps(value))
+
+ def skip(self, issue, data=None):
+ """ Convert the skip string to JSON"""
+ value = {'id': self.id(),
+ 'issue': issue}
+
+ # extend return value with other useful info
+ if data:
+ value['data'] = data
+
+ return super(Base, self).skipTest(json.dumps(value))
+
+ def shortid(self):
+ return self.id().split('.')[-1]
+
+ def __str__(self):
+ return json.dumps({'id': self.id()})
+
+class Metadata(Base):
+ @classmethod
+ def setUpClassLocal(cls):
+ cls.tinfoil = cls.setup_tinfoil()
+
+ # get info about added/modified/remove recipes
+ cls.added, cls.modified, cls.removed = cls.get_metadata_stats(cls.patchset)
+
+ @classmethod
+ def tearDownClassLocal(cls):
+ cls.tinfoil.shutdown()
+
+ @classmethod
+ def setup_tinfoil(cls, config_only=False):
+ """Initialize tinfoil api from bitbake"""
+
+ # import relevant libraries
+ try:
+ scripts_path = os.path.join(PatchTestInput.repodir, 'scripts', 'lib')
+ if scripts_path not in sys.path:
+ sys.path.insert(0, scripts_path)
+ import scriptpath
+ scriptpath.add_bitbake_lib_path()
+ import bb.tinfoil
+ except ImportError:
+ raise PatchtestOEError('Could not import tinfoil module')
+
+ orig_cwd = os.path.abspath(os.curdir)
+
+ # Load tinfoil
+ tinfoil = None
+ try:
+ builddir = os.environ.get('BUILDDIR')
+ if not builddir:
+ logger.warn('Bitbake environment not loaded?')
+ return tinfoil
+ os.chdir(builddir)
+ tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil.prepare(config_only=config_only)
+ except bb.tinfoil.TinfoilUIException as te:
+ if tinfoil:
+ tinfoil.shutdown()
+ raise PatchtestOEError('Could not prepare properly tinfoil (TinfoilUIException)')
+ except Exception as e:
+ if tinfoil:
+ tinfoil.shutdown()
+ raise e
+ finally:
+ os.chdir(orig_cwd)
+
+ return tinfoil
+
+ @classmethod
+ def get_metadata_stats(cls, patchset):
+ """Get lists of added, modified and removed metadata files"""
+
+ def find_pn(data, path):
+ """Find the PN from data"""
+ pn = None
+ pn_native = None
+ for _path, _pn in data:
+ if path in _path:
+ if 'native' in _pn:
+ # store the native PN but look for the non-native one first
+ pn_native = _pn
+ else:
+ pn = _pn
+ break
+ else:
+ # sent the native PN if found previously
+ if pn_native:
+ return pn_native
+
+ # on renames (usually upgrades), we need to check (FILE) base names
+ # because the unidiff library does not provided the new filename, just the modified one
+ # and tinfoil datastore, once the patch is merged, will contain the new filename
+ path_basename = path.split('_')[0]
+ for _path, _pn in data:
+ _path_basename = _path.split('_')[0]
+ if path_basename == _path_basename:
+ pn = _pn
+ return pn
+
+ if not cls.tinfoil:
+ cls.tinfoil = cls.setup_tinfoil()
+
+ added_paths, modified_paths, removed_paths = [], [], []
+ added, modified, removed = [], [], []
+
+ # get metadata filename additions, modification and removals
+ for patch in patchset:
+ if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'):
+ if patch.is_added_file:
+ added_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+ elif patch.is_modified_file:
+ modified_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+ elif patch.is_removed_file:
+ removed_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+
+ data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items()
+
+ added = [find_pn(data,path) for path in added_paths]
+ modified = [find_pn(data,path) for path in modified_paths]
+ removed = [find_pn(data,path) for path in removed_paths]
+
+ return [a for a in added if a], [m for m in modified if m], [r for r in removed if r]
diff --git a/meta/lib/patchtest/tests/pyparsing/common.py b/meta/lib/patchtest/tests/pyparsing/common.py
new file mode 100644
index 0000000000..cbce4c38bc
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/common.py
@@ -0,0 +1,26 @@
+# common pyparsing variables
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import pyparsing
+
+# general
+colon = pyparsing.Literal(":")
+start = pyparsing.LineStart()
+end = pyparsing.LineEnd()
+at = pyparsing.Literal("@")
+lessthan = pyparsing.Literal("<")
+greaterthan = pyparsing.Literal(">")
+opensquare = pyparsing.Literal("[")
+closesquare = pyparsing.Literal("]")
+inappropriate = pyparsing.CaselessLiteral("Inappropriate")
+submitted = pyparsing.CaselessLiteral("Submitted")
+
+# word related
+nestexpr = pyparsing.nestedExpr(opener='[', closer=']')
+inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr
+submittedinfo = pyparsing.Literal("Submitted") + nestexpr
+word = pyparsing.Word(pyparsing.alphas)
+worddot = pyparsing.Word(pyparsing.alphas+".")
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py
new file mode 100644
index 0000000000..f7fb82ec2b
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py
@@ -0,0 +1,18 @@
+# signed-off-by pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import pyparsing
+import common
+
+name = pyparsing.Regex('\S+.*(?= <)')
+username = pyparsing.OneOrMore(common.worddot)
+domain = pyparsing.OneOrMore(common.worddot)
+cve = pyparsing.Regex('CVE\-\d{4}\-\d+')
+cve_mark = pyparsing.Literal("CVE:")
+
+cve_tag = pyparsing.AtLineStart(cve_mark + cve)
+patch_cve_tag = pyparsing.AtLineStart("+" + cve_mark + cve)
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py
new file mode 100644
index 0000000000..30d3ab35b3
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py
@@ -0,0 +1,14 @@
+# subject pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+# NOTE:This is an oversimplified syntax of the mbox's summary
+
+import pyparsing
+import common
+
+target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':','')))
+summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables))
+shortlog = common.start + target + common.colon + summary + common.end
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py
new file mode 100644
index 0000000000..692ebec3ff
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py
@@ -0,0 +1,22 @@
+# signed-off-by pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import pyparsing
+import common
+
+name = pyparsing.Regex('\S+.*(?= <)')
+username = pyparsing.OneOrMore(common.worddot)
+domain = pyparsing.OneOrMore(common.worddot)
+
+# taken from https://pyparsing-public.wikispaces.com/Helpful+Expressions
+email = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})")
+
+email_enclosed = common.lessthan + email + common.greaterthan
+
+signed_off_by_mark = pyparsing.Literal("Signed-off-by:")
+signed_off_by = pyparsing.AtLineStart(signed_off_by_mark + name + email_enclosed)
+patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_mark + name + email_enclosed)
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py
new file mode 100644
index 0000000000..bc6c427c4c
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py
@@ -0,0 +1,24 @@
+# upstream-status pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import common
+import pyparsing
+
+upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted"]
+upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]"]
+
+upstream_status_valid_status = pyparsing.Or(
+ [pyparsing.Literal(status) for status in upstream_status_literal_valid_status]
+)
+
+upstream_status_mark = pyparsing.Literal("Upstream-Status")
+inappropriate_status_mark = common.inappropriate
+submitted_status_mark = common.submitted
+
+upstream_status = common.start + upstream_status_mark + common.colon + upstream_status_valid_status
+upstream_status_inappropriate_info = common.start + upstream_status_mark + common.colon + common.inappropriateinfo
+upstream_status_submitted_info = common.start + upstream_status_mark + common.colon + common.submittedinfo
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py
new file mode 100644
index 0000000000..0b623b7d17
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_mbox.py
@@ -0,0 +1,159 @@
+# Checks related to the patch's author
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+import collections
+import parse_shortlog
+import parse_signed_off_by
+import pyparsing
+import subprocess
+from data import PatchTestInput
+
+def headlog():
+ output = subprocess.check_output(
+ "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchTestInput.repodir,
+ universal_newlines=True,
+ shell=True
+ )
+ return output.split('#')
+
+class TestMbox(base.Base):
+
+ auh_email = 'auh@auh.yoctoproject.org'
+
+ invalids = [pyparsing.Regex("^Upgrade Helper.+"),
+ pyparsing.Regex(auh_email),
+ pyparsing.Regex("uh@not\.set"),
+ pyparsing.Regex("\S+@example\.com")]
+
+ rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]')
+ rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]')
+ revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
+ signoff_prog = parse_signed_off_by.signed_off_by
+ revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
+ maxlength = 90
+
+ # base paths of main yocto project sub-projects
+ paths = {
+ 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'],
+ 'bitbake': ['bitbake'],
+ 'documentation': ['documentation'],
+ 'poky': ['meta-poky','meta-yocto-bsp'],
+ 'oe': ['meta-gpe', 'meta-gnome', 'meta-efl', 'meta-networking', 'meta-multimedia','meta-initramfs', 'meta-ruby', 'contrib', 'meta-xfce', 'meta-filesystems', 'meta-perl', 'meta-webserver', 'meta-systemd', 'meta-oe', 'meta-python']
+ }
+
+ # scripts folder is a mix of oe-core and poky, most is oe-core code except:
+ poky_scripts = ['scripts/yocto-bsp', 'scripts/yocto-kernel', 'scripts/yocto-layer', 'scripts/lib/bsp']
+
+ Project = collections.namedtuple('Project', ['name', 'listemail', 'gitrepo', 'paths'])
+
+ bitbake = Project(name='Bitbake', listemail='bitbake-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/bitbake/', paths=paths['bitbake'])
+ doc = Project(name='Documentantion', listemail='yocto@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/', paths=paths['documentation'])
+ poky = Project(name='Poky', listemail='poky@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/poky/', paths=paths['poky'])
+ oe = Project(name='oe', listemail='openembedded-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/meta-openembedded/', paths=paths['oe'])
+
+
+ def test_signed_off_by_presence(self):
+ for commit in TestMbox.commits:
+ # skip those patches that revert older commits, these do not required the tag presence
+ if self.revert_shortlog_regex.search_string(commit.shortlog):
+ continue
+ if not self.signoff_prog.search_string(commit.payload):
+ self.fail('Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"',
+ commit=commit)
+
+ def test_shortlog_format(self):
+ for commit in TestMbox.commits:
+ shortlog = commit.shortlog
+ if not shortlog.strip():
+ self.skip('Empty shortlog, no reason to execute shortlog format test')
+ else:
+ # no reason to re-check on revert shortlogs
+ if shortlog.startswith('Revert "'):
+ continue
+ try:
+ parse_shortlog.shortlog.parseString(shortlog)
+ except pyparsing.ParseException as pe:
+ self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"',
+ commit=commit)
+
+ def test_shortlog_length(self):
+ for commit in TestMbox.commits:
+ # no reason to re-check on revert shortlogs
+ shortlog = commit.shortlog
+ if shortlog.startswith('Revert "'):
+ continue
+ l = len(shortlog)
+ if l > self.maxlength:
+ self.fail('Edit shortlog so that it is %d characters or less (currently %d characters)' % (self.maxlength, l),
+ commit=commit)
+
+ def test_series_merge_on_head(self):
+ self.skip("Merge test is disabled for now")
+ if PatchTestInput.repo.branch != "master":
+ self.skip("Skipping merge test since patch is not intended for master branch. Target detected is %s" % PatchTestInput.repo.branch)
+ if not PatchTestInput.repo.ismerged:
+ commithash, author, date, shortlog = headlog()
+ self.fail('Series does not apply on top of target branch %s' % PatchTestInput.repo.branch,
+ data=[('Targeted branch', '%s (currently at %s)' % (PatchTestInput.repo.branch, commithash))])
+
+ def test_target_mailing_list(self):
+ """In case of merge failure, check for other targeted projects"""
+ if PatchTestInput.repo.ismerged:
+ self.skip('Series merged, no reason to check other mailing lists')
+
+ # a meta project may be indicted in the message subject, if this is the case, just fail
+ # TODO: there may be other project with no-meta prefix, we also need to detect these
+ project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]")
+ for commit in TestMbox.commits:
+ match = project_regex.search_string(commit.subject)
+ if match:
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ commit=commit)
+
+ for patch in self.patchset:
+ folders = patch.path.split('/')
+ base_path = folders[0]
+ for project in [self.bitbake, self.doc, self.oe, self.poky]:
+ if base_path in project.paths:
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ data=[('Suggested ML', '%s [%s]' % (project.listemail, project.gitrepo)),
+ ('Patch\'s path:', patch.path)])
+
+ # check for poky's scripts code
+ if base_path.startswith('scripts'):
+ for poky_file in self.poky_scripts:
+ if patch.path.startswith(poky_file):
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ data=[('Suggested ML', '%s [%s]' % (self.poky.listemail, self.poky.gitrepo)),('Patch\'s path:', patch.path)])
+
+ def test_mbox_format(self):
+ if self.unidiff_parse_error:
+ self.fail('Series has malformed diff lines. Create the series again using git-format-patch and ensure it applies using git am',
+ data=[('Diff line',self.unidiff_parse_error)])
+
+ def test_commit_message_presence(self):
+ for commit in TestMbox.commits:
+ if not commit.commit_message.strip():
+ self.fail('Please include a commit message on your patch explaining the change', commit=commit)
+
+ def test_bugzilla_entry_format(self):
+ for commit in TestMbox.commits:
+ if not self.rexp_detect.search_string(commit.commit_message):
+ self.skip("No bug ID found")
+ elif not self.rexp_validation.search_string(commit.commit_message):
+ self.fail('Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', commit=commit)
+
+ def test_author_valid(self):
+ for commit in self.commits:
+ for invalid in self.invalids:
+ if invalid.search_string(commit.author):
+ self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit)
+
+ def test_non_auh_upgrade(self):
+ for commit in self.commits:
+ if self.auh_email in commit.payload:
+ self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit)
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py
new file mode 100644
index 0000000000..be609dbd04
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_metadata.py
@@ -0,0 +1,197 @@
+# Checks related to the patch's LIC_FILES_CHKSUM metadata variable
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+import os
+import pyparsing
+from data import PatchTestInput, PatchTestDataStore
+
+class TestMetadata(base.Metadata):
+ metadata_lic = 'LICENSE'
+ invalid_license = 'PATCHTESTINVALID'
+ metadata_chksum = 'LIC_FILES_CHKSUM'
+ license_var = 'LICENSE'
+ closed = 'CLOSED'
+ lictag_re = pyparsing.AtLineStart("License-Update:")
+ lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum)
+ lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum)
+ add_mark = pyparsing.Regex('\+ ')
+ max_length = 200
+ metadata_src_uri = 'SRC_URI'
+ md5sum = 'md5sum'
+ sha256sum = 'sha256sum'
+ git_regex = pyparsing.Regex('^git\:\/\/.*')
+ metadata_summary = 'SUMMARY'
+ cve_check_ignore_var = 'CVE_CHECK_IGNORE'
+ cve_status_var = 'CVE_STATUS'
+
+ def test_license_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ # TODO: this is a workaround so we can parse the recipe not
+ # containing the LICENSE var: add some default license instead
+ # of INVALID into auto.conf, then remove this line at the end
+ auto_conf = os.path.join(os.environ.get('BUILDDIR'), 'conf', 'auto.conf')
+ open_flag = 'w'
+ if os.path.exists(auto_conf):
+ open_flag = 'a'
+ with open(auto_conf, open_flag) as fd:
+ for pn in self.added:
+ fd.write('LICENSE ??= "%s"\n' % self.invalid_license)
+
+ no_license = False
+ for pn in self.added:
+ rd = self.tinfoil.parse_recipe(pn)
+ license = rd.getVar(self.metadata_lic)
+ if license == self.invalid_license:
+ no_license = True
+ break
+
+ # remove auto.conf line or the file itself
+ if open_flag == 'w':
+ os.remove(auto_conf)
+ else:
+ fd = open(auto_conf, 'r')
+ lines = fd.readlines()
+ fd.close()
+ with open(auto_conf, 'w') as fd:
+ fd.write(''.join(lines[:-1]))
+
+ if no_license:
+ self.fail('Recipe does not have the LICENSE field set.')
+
+ def test_lic_files_chksum_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ for pn in self.added:
+ rd = self.tinfoil.parse_recipe(pn)
+ pathname = rd.getVar('FILE')
+ # we are not interested in images
+ if '/images/' in pathname:
+ continue
+ lic_files_chksum = rd.getVar(self.metadata_chksum)
+ if rd.getVar(self.license_var) == self.closed:
+ continue
+ if not lic_files_chksum:
+ self.fail('%s is missing in newly added recipe' % self.metadata_chksum)
+
+ def test_lic_files_chksum_modified_not_mentioned(self):
+ if not self.modified:
+ self.skip('No modified recipes, skipping test')
+
+ for patch in self.patchset:
+ # for the moment, we are just interested in metadata
+ if patch.path.endswith('.patch'):
+ continue
+ payload = str(patch)
+ if (self.lic_chksum_added.search_string(payload) or self.lic_chksum_removed.search_string(payload)):
+ # if any patch on the series contain reference on the metadata, fail
+ for commit in self.commits:
+ if self.lictag_re.search_string(commit.commit_message):
+ break
+ else:
+ self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message')
+
+ def test_max_line_length(self):
+ for patch in self.patchset:
+ # for the moment, we are just interested in metadata
+ if patch.path.endswith('.patch'):
+ continue
+ payload = str(patch)
+ for line in payload.splitlines():
+ if self.add_mark.search_string(line):
+ current_line_length = len(line[1:])
+ if current_line_length > self.max_length:
+ self.fail('Patch line too long (current length %s, maximum is %s)' % (current_line_length, self.max_length),
+ data=[('Patch', patch.path), ('Line', '%s ...' % line[0:80])])
+
+ def pretest_src_uri_left_files(self):
+ # these tests just make sense on patches that can be merged
+ if not PatchTestInput.repo.canbemerged:
+ self.skip('Patch cannot be merged')
+ if not self.modified:
+ self.skip('No modified recipes, skipping pretest')
+
+ # get the proper metadata values
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri)
+
+ def test_src_uri_left_files(self):
+ # these tests just make sense on patches that can be merged
+ if not PatchTestInput.repo.canbemerged:
+ self.skip('Patch cannot be merged')
+ if not self.modified:
+ self.skip('No modified recipes, skipping pretest')
+
+ # get the proper metadata values
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri)
+
+ for pn in self.modified:
+ pretest_src_uri = PatchTestDataStore['pre%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split()
+ test_src_uri = PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split()
+
+ pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')])
+ test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')])
+
+ # check if files were removed
+ if len(test_files) < len(pretest_files):
+
+ # get removals from patchset
+ filesremoved_from_patchset = set()
+ for patch in self.patchset:
+ if patch.is_removed_file:
+ filesremoved_from_patchset.add(os.path.basename(patch.path))
+
+ # get the deleted files from the SRC_URI
+ filesremoved_from_usr_uri = pretest_files - test_files
+
+ # finally, get those patches removed at SRC_URI and not removed from the patchset
+ # TODO: we are not taking into account renames, so test may raise false positives
+ not_removed = filesremoved_from_usr_uri - filesremoved_from_patchset
+ if not_removed:
+ self.fail('Patches not removed from tree. Remove them and amend the submitted mbox',
+ data=[('Patch', f) for f in not_removed])
+
+ def test_summary_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ for pn in self.added:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ summary = rd.getVar(self.metadata_summary)
+
+ # "${PN} version ${PN}-${PR}" is the default, so fail if default
+ if summary.startswith('%s version' % pn):
+ self.fail('%s is missing in newly added recipe' % self.metadata_summary)
+
+ def test_cve_check_ignore(self):
+ # Skip if we neither modified a recipe or target branches are not
+ # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield.
+ if not self.modified or PatchTestInput.repo.branch == "kirkstone" or PatchTestInput.repo.branch == "dunfell":
+ self.skip('No modified recipes or older target branch, skipping test')
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ cve_check_ignore = rd.getVar(self.cve_check_ignore_var)
+
+ if cve_check_ignore is not None:
+ self.fail('%s is deprecated and should be replaced by %s' % (self.cve_check_ignore_var, self.cve_status_var))
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py
new file mode 100644
index 0000000000..d7187a0cb1
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_patch.py
@@ -0,0 +1,103 @@
+# Checks related to the patch's CVE lines
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import base
+import os
+import parse_signed_off_by
+import parse_upstream_status
+import pyparsing
+
+class TestPatch(base.Base):
+
+ re_cve_pattern = pyparsing.Regex("CVE\-\d{4}\-\d+")
+ re_cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+")
+ upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status")
+
+ @classmethod
+ def setUpClassLocal(cls):
+ cls.newpatches = []
+ # get just those relevant patches: new software patches
+ for patch in cls.patchset:
+ if patch.path.endswith('.patch') and patch.is_added_file:
+ cls.newpatches.append(patch)
+
+ cls.mark = str(parse_signed_off_by.signed_off_by_mark).strip('"')
+
+ # match PatchSignedOffBy.mark with '+' preceding it
+ cls.prog = parse_signed_off_by.patch_signed_off_by
+
+ def setUp(self):
+ if self.unidiff_parse_error:
+ self.skip('Parse error %s' % self.unidiff_parse_error)
+
+ self.valid_status = ', '.join(parse_upstream_status.upstream_status_nonliteral_valid_status)
+ self.standard_format = 'Upstream-Status: <Valid status>'
+
+ # we are just interested in series that introduce CVE patches, thus discard other
+ # possibilities: modification to current CVEs, patch directly introduced into the
+ # recipe, upgrades already including the CVE, etc.
+ new_cves = [p for p in self.patchset if p.path.endswith('.patch') and p.is_added_file]
+ if not new_cves:
+ self.skip('No new CVE patches introduced')
+
+ def test_upstream_status_presence_format(self):
+ if not TestPatch.newpatches:
+ self.skip("There are no new software patches, no reason to test Upstream-Status presence/format")
+
+ for newpatch in TestPatch.newpatches:
+ payload = newpatch.__str__()
+ if not self.upstream_status_regex.search_string(payload):
+ self.fail('Added patch file is missing Upstream-Status: <Valid status> in the commit message',
+ data=[('Standard format', self.standard_format), ('Valid status', self.valid_status)])
+ for line in payload.splitlines():
+ if self.patchmetadata_regex.match(line):
+ continue
+ if self.upstream_status_regex.search_string(line):
+ if parse_upstream_status.inappropriate_status_mark.searchString(line):
+ try:
+ parse_upstream_status.upstream_status_inappropriate_info.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is Inappropriate, but no reason was provided',
+ data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Inappropriate [reason]')])
+ elif parse_upstream_status.submitted_status_mark.searchString(line):
+ try:
+ parse_upstream_status.upstream_status_submitted_info.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is Submitted, but it is not mentioned where',
+ data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Submitted [where]')])
+ else:
+ try:
+ parse_upstream_status.upstream_status.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is in incorrect format',
+ data=[('Current', pe.pstr), ('Standard format', self.standard_format), ('Valid status', self.valid_status)])
+
+ def test_signed_off_by_presence(self):
+ if not TestPatch.newpatches:
+ self.skip("There are no new software patches, no reason to test %s presence" % PatchSignedOffBy.mark)
+
+ for newpatch in TestPatch.newpatches:
+ payload = newpatch.__str__()
+ for line in payload.splitlines():
+ if self.patchmetadata_regex.match(line):
+ continue
+ if TestPatch.prog.search_string(payload):
+ break
+ else:
+ self.fail('A patch file has been added without a Signed-off-by tag: \'%s\'' % os.path.basename(newpatch.path))
+
+ def test_cve_tag_format(self):
+ for commit in TestPatch.commits:
+ if self.re_cve_pattern.search_string(commit.shortlog) or self.re_cve_pattern.search_string(commit.commit_message):
+ tag_found = False
+ for line in commit.payload.splitlines():
+ if self.re_cve_payload_tag.search_string(line):
+ tag_found = True
+ break
+ if not tag_found:
+ self.fail('Missing or incorrectly formatted CVE tag in patch file. Correct or include the CVE tag in the patch with format: "CVE: CVE-YYYY-XXXX"',
+ commit=commit)
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py
new file mode 100644
index 0000000000..ef315e591c
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_python_pylint.py
@@ -0,0 +1,65 @@
+# Checks related to the python code done with pylint
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+from io import StringIO
+from data import PatchTestInput
+from pylint.reporters.text import TextReporter
+import pylint.lint as lint
+
+
+class PyLint(base.Base):
+ pythonpatches = []
+ pylint_pretest = {}
+ pylint_test = {}
+ pylint_options = " -E --disable='E0611, E1101, F0401, E0602' --msg-template='L:{line} F:{module} I:{msg}'"
+
+ @classmethod
+ def setUpClassLocal(cls):
+ # get just those patches touching python files
+ cls.pythonpatches = []
+ for patch in cls.patchset:
+ if patch.path.endswith('.py'):
+ if not patch.is_removed_file:
+ cls.pythonpatches.append(patch)
+
+ def setUp(self):
+ if self.unidiff_parse_error:
+ self.skip('Python-unidiff parse error')
+ if not PyLint.pythonpatches:
+ self.skip('No python related patches, skipping test')
+
+ def pretest_pylint(self):
+ for pythonpatch in self.pythonpatches:
+ if pythonpatch.is_modified_file:
+ pylint_output = StringIO()
+ reporter = TextReporter(pylint_output)
+ lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
+ for line in pylint_output.readlines():
+ if not '*' in line:
+ if line.strip():
+ self.pylint_pretest[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
+
+ def test_pylint(self):
+ for pythonpatch in self.pythonpatches:
+ # a condition checking whether a file is renamed or not
+ # unidiff doesn't support this yet
+ if pythonpatch.target_file is not pythonpatch.path:
+ path = pythonpatch.target_file[2:]
+ else:
+ path = pythonpatch.path
+ pylint_output = StringIO()
+ reporter = TextReporter(pylint_output)
+ lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
+ for line in pylint_output.readlines():
+ if not '*' in line:
+ if line.strip():
+ self.pylint_test[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
+
+ for issue in self.pylint_test:
+ if self.pylint_test[issue] not in self.pylint_pretest.values():
+ self.fail('Errors in your Python code were encountered. Please check your code with a linter and resubmit',
+ data=[('Output', 'Please, fix the listed issues:'), ('', issue + ' ' + self.pylint_test[issue])])
diff --git a/meta/lib/patchtest/utils.py b/meta/lib/patchtest/utils.py
new file mode 100644
index 0000000000..dd0abc22d9
--- /dev/null
+++ b/meta/lib/patchtest/utils.py
@@ -0,0 +1,168 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# utils: common methods used by the patchtest framework
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import subprocess
+import logging
+import re
+import mailbox
+
+class CmdException(Exception):
+ """ Simple exception class where its attributes are the ones passed when instantiated """
+ def __init__(self, cmd):
+ self._cmd = cmd
+ def __getattr__(self, name):
+ value = None
+ if self._cmd.has_key(name):
+ value = self._cmd[name]
+ return value
+
+def exec_cmd(cmd, cwd, ignore_error=False, input=None, strip=True, updateenv={}):
+ """
+ Input:
+
+ cmd: dict containing the following keys:
+
+ cmd : the command itself as an array of strings
+ ignore_error: if False, no exception is raised
+ strip: indicates if strip is done on the output (stdout and stderr)
+ input: input data to the command (stdin)
+ updateenv: environment variables to be appended to the current
+ process environment variables
+
+ NOTE: keys 'ignore_error' and 'input' are optional; if not included,
+ the defaults are the ones specify in the arguments
+ cwd: directory where commands are executed
+ ignore_error: raise CmdException if command fails to execute and
+ this value is False
+ input: input data (stdin) for the command
+
+ Output: dict containing the following keys:
+
+ cmd: the same as input
+ ignore_error: the same as input
+ strip: the same as input
+ input: the same as input
+ stdout: Standard output after command's execution
+ stderr: Standard error after command's execution
+ returncode: Return code after command's execution
+
+ """
+ cmddefaults = {
+ 'cmd':'',
+ 'ignore_error':ignore_error,
+ 'strip':strip,
+ 'input':input,
+ 'updateenv':updateenv,
+ }
+
+ # update input values if necessary
+ cmddefaults.update(cmd)
+
+ _cmd = cmddefaults
+
+ if not _cmd['cmd']:
+ raise CmdException({'cmd':None, 'stderr':'no command given'})
+
+ # update the environment
+ env = os.environ
+ env.update(_cmd['updateenv'])
+
+ _command = [e for e in _cmd['cmd']]
+ p = subprocess.Popen(_command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ cwd=cwd,
+ env=env)
+
+ # execute the command and strip output
+ (_stdout, _stderr) = p.communicate(_cmd['input'])
+ if _cmd['strip']:
+ _stdout, _stderr = map(str.strip, [_stdout, _stderr])
+
+ # generate the result
+ result = _cmd
+ result.update({'cmd':_command,'stdout':_stdout,'stderr':_stderr,'returncode':p.returncode})
+
+ # launch exception if necessary
+ if not _cmd['ignore_error'] and p.returncode:
+ raise CmdException(result)
+
+ return result
+
+def exec_cmds(cmds, cwd):
+ """ Executes commands
+
+ Input:
+ cmds: Array of commands
+ cwd: directory where commands are executed
+
+ Output: Array of output commands
+ """
+ results = []
+ _cmds = cmds
+
+ for cmd in _cmds:
+ result = exec_cmd(cmd, cwd)
+ results.append(result)
+
+ return results
+
+def logger_create(name):
+ logger = logging.getLogger(name)
+ loggerhandler = logging.StreamHandler()
+ loggerhandler.setFormatter(logging.Formatter("%(message)s"))
+ logger.addHandler(loggerhandler)
+ logger.setLevel(logging.INFO)
+ return logger
+
+def get_subject_prefix(path):
+ prefix = ""
+ mbox = mailbox.mbox(path)
+
+ if len(mbox):
+ subject = mbox[0]['subject']
+ if subject:
+ pattern = re.compile(r"(\[.*\])", re.DOTALL)
+ match = pattern.search(subject)
+ if match:
+ prefix = match.group(1)
+
+ return prefix
+
+def valid_branch(branch):
+ """ Check if branch is valid name """
+ lbranch = branch.lower()
+
+ invalid = lbranch.startswith('patch') or \
+ lbranch.startswith('rfc') or \
+ lbranch.startswith('resend') or \
+ re.search(r'^v\d+', lbranch) or \
+ re.search(r'^\d+/\d+', lbranch)
+
+ return not invalid
+
+def get_branch(path):
+ """ Get the branch name from mbox """
+ fullprefix = get_subject_prefix(path)
+ branch, branches, valid_branches = None, [], []
+
+ if fullprefix:
+ prefix = fullprefix.strip('[]')
+ branches = [ b.strip() for b in prefix.split(',')]
+ valid_branches = [b for b in branches if valid_branch(b)]
+
+ if len(valid_branches):
+ branch = valid_branches[0]
+
+ return branch
+
diff --git a/meta/lib/rootfspostcommands.py b/meta/lib/rootfspostcommands.py
index fdb9f5b850..5386eea409 100644
--- a/meta/lib/rootfspostcommands.py
+++ b/meta/lib/rootfspostcommands.py
@@ -1,16 +1,19 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
-def sort_file(filename, mapping):
+def sort_shadowutils_file(filename, mapping):
"""
Sorts a passwd or group file based on the numeric ID in the third column.
If a mapping is given, the name from the first column is mapped via that
dictionary instead (necessary for /etc/shadow and /etc/gshadow). If not,
a new mapping is created on the fly and returned.
"""
+
new_mapping = {}
with open(filename, 'rb+') as f:
lines = f.readlines()
@@ -31,30 +34,57 @@ def sort_file(filename, mapping):
# We overwrite the entire file, i.e. no truncate() necessary.
f.seek(0)
f.write(b''.join(lines))
+
return new_mapping
-def remove_backup(filename):
+def sort_shadowutils_files(sysconfdir):
"""
- Removes the backup file for files like /etc/passwd.
+ Sorts shadow-utils 'passwd' and 'group' files in a rootfs' /etc directory
+ by ID.
"""
- backup_filename = filename + '-'
- if os.path.exists(backup_filename):
- os.unlink(backup_filename)
-def sort_passwd(sysconfdir):
- """
- Sorts passwd and group files in a rootfs /etc directory by ID.
- Backup files are sometimes are inconsistent and then cannot be
- sorted (YOCTO #11043), and more importantly, are not needed in
- the initial rootfs, so they get deleted.
- """
for main, shadow in (('passwd', 'shadow'),
('group', 'gshadow')):
filename = os.path.join(sysconfdir, main)
- remove_backup(filename)
if os.path.exists(filename):
- mapping = sort_file(filename, None)
+ mapping = sort_shadowutils_file(filename, None)
filename = os.path.join(sysconfdir, shadow)
- remove_backup(filename)
if os.path.exists(filename):
- sort_file(filename, mapping)
+ sort_shadowutils_file(filename, mapping)
+
+def remove_shadowutils_backup_file(filename):
+ """
+ Remove shadow-utils backup file for files like /etc/passwd.
+ """
+
+ backup_filename = filename + '-'
+ if os.path.exists(backup_filename):
+ os.unlink(backup_filename)
+
+def remove_shadowutils_backup_files(sysconfdir):
+ """
+ Remove shadow-utils backup files in a rootfs /etc directory. They are not
+ needed in the initial root filesystem and sorting them can be inconsistent
+ (YOCTO #11043).
+ """
+
+ for filename in (
+ 'group',
+ 'gshadow',
+ 'passwd',
+ 'shadow',
+ 'subgid',
+ 'subuid',
+ ):
+ filepath = os.path.join(sysconfdir, filename)
+ remove_shadowutils_backup_file(filepath)
+
+def tidy_shadowutils_files(sysconfdir):
+ """
+ Tidy up shadow-utils files.
+ """
+
+ remove_shadowutils_backup_files(sysconfdir)
+ sort_shadowutils_files(sysconfdir)
+
+ return True
diff --git a/meta/recipes-bsp/acpid/acpid.inc b/meta/recipes-bsp/acpid/acpid.inc
index 98910bab29..7b2f1c71c5 100644
--- a/meta/recipes-bsp/acpid/acpid.inc
+++ b/meta/recipes-bsp/acpid/acpid.inc
@@ -13,7 +13,8 @@ LICENSE = "GPL-2.0-or-later"
SRC_URI = "${SOURCEFORGE_MIRROR}/acpid2/acpid-${PV}.tar.xz \
file://init \
file://acpid.service \
- "
+ file://0001-Replace-stat64-with-stat.patch \
+ "
CVE_PRODUCT = "acpid2"
diff --git a/meta/recipes-bsp/acpid/acpid/0001-Replace-stat64-with-stat.patch b/meta/recipes-bsp/acpid/acpid/0001-Replace-stat64-with-stat.patch
new file mode 100644
index 0000000000..10abfc8388
--- /dev/null
+++ b/meta/recipes-bsp/acpid/acpid/0001-Replace-stat64-with-stat.patch
@@ -0,0 +1,31 @@
+From 4b729235a9e96f120feee7e3746818aad0f3b924 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 14 Dec 2022 15:04:30 -0800
+Subject: [PATCH] Replace stat64 with stat
+
+It already checks for largefile support in configure.ac via
+AC_SYS_LARGEFILE macro, which will ensure that 64bit elements
+are correctly setup for stat APIs on platforms needing large
+file support.
+
+Upstream-Status: Submitted [https://sourceforge.net/p/acpid2/code/merge-requests/5/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ sock.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/sock.c b/sock.c
+index 9e04501..3121fb7 100644
+--- a/sock.c
++++ b/sock.c
+@@ -54,8 +54,8 @@ int non_root_clients;
+ static int
+ isfdtype(int fd, int fdtype)
+ {
+- struct stat64 st;
+- if (fstat64(fd, &st) != 0)
++ struct stat st;
++ if (fstat(fd, &st) != 0)
+ return -1;
+ return ((st.st_mode & S_IFMT) == (mode_t)fdtype);
+ }
diff --git a/meta/recipes-bsp/acpid/acpid_2.0.33.bb b/meta/recipes-bsp/acpid/acpid_2.0.33.bb
deleted file mode 100644
index 7094ba2662..0000000000
--- a/meta/recipes-bsp/acpid/acpid_2.0.33.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require acpid.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=8ca43cbc842c2336e835926c2166c28b \
- file://acpid.h;endline=24;md5=324a9cf225ae69ddaad1bf9d942115b5"
-
-SRC_URI[sha256sum] = "0856f71b3eb34a1b663d0a8e6363dfcbc519e63d847330498898658e2972dbe8"
diff --git a/meta/recipes-bsp/acpid/acpid_2.0.34.bb b/meta/recipes-bsp/acpid/acpid_2.0.34.bb
new file mode 100644
index 0000000000..1e0a6d5f24
--- /dev/null
+++ b/meta/recipes-bsp/acpid/acpid_2.0.34.bb
@@ -0,0 +1,6 @@
+require acpid.inc
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=8ca43cbc842c2336e835926c2166c28b \
+ file://acpid.h;endline=24;md5=324a9cf225ae69ddaad1bf9d942115b5"
+
+SRC_URI[sha256sum] = "2d095c8cfcbc847caec746d62cdc8d0bff1ec1bc72ef7c674c721e04da6ab333"
diff --git a/meta/recipes-bsp/alsa-state/alsa-state.bb b/meta/recipes-bsp/alsa-state/alsa-state.bb
index df546633f1..bd7f610f46 100644
--- a/meta/recipes-bsp/alsa-state/alsa-state.bb
+++ b/meta/recipes-bsp/alsa-state/alsa-state.bb
@@ -8,10 +8,12 @@ SUMMARY = "Alsa scenario files to enable alsa state restoration"
HOMEPAGE = "http://www.alsa-project.org/"
DESCRIPTION = "Alsa Scenario Files - an init script and state files to restore \
sound state at system boot and save it at system shut down."
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+LICENSE = "MIT & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = " \
+ file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420 \
+ file://alsa-state-init;beginline=3;endline=4;md5=3ff7ecbf534d7d503941abe8e268ef50 \
+"
PV = "0.2.0"
-PR = "r5"
SRC_URI = "\
file://asound.conf \
diff --git a/meta/recipes-bsp/alsa-state/alsa-state/alsa-state-init b/meta/recipes-bsp/alsa-state/alsa-state/alsa-state-init
index eee59cb321..a04cc27004 100755
--- a/meta/recipes-bsp/alsa-state/alsa-state/alsa-state-init
+++ b/meta/recipes-bsp/alsa-state/alsa-state/alsa-state-init
@@ -1,10 +1,9 @@
#! /bin/sh
#
# Copyright Matthias Hentges <devel@hentges.net> (c) 2007
-# License: GPL (see http://www.gnu.org/licenses/gpl.txt for a copy of the license)
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# Filename: alsa-state
-# Date: 20070308 (YMD)
# source function library
. /etc/init.d/functions
diff --git a/meta/recipes-bsp/apmd/apmd/apmd.service b/meta/recipes-bsp/apmd/apmd/apmd.service
deleted file mode 100644
index ffab82334f..0000000000
--- a/meta/recipes-bsp/apmd/apmd/apmd.service
+++ /dev/null
@@ -1,7 +0,0 @@
-[Unit]
-Description=Advanced Power Management daemon
-After=remote-fs.target
-
-[Service]
-EnvironmentFile=-@SYSCONFDIR@/default/apmd
-ExecStart=@SBINDIR@/apmd -P @SYSCONFDIR@/apm/apmd_proxy $APMD
diff --git a/meta/recipes-bsp/apmd/apmd/apmd_proxy b/meta/recipes-bsp/apmd/apmd/apmd_proxy
deleted file mode 100644
index c48ee4e5d5..0000000000
--- a/meta/recipes-bsp/apmd/apmd/apmd_proxy
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/bin/sh
-#
-# apmd_proxy - program dispatcher for APM daemon
-#
-# Written by Craig Markwardt (craigm@lheamail.gsfc.nasa.gov) 21 May 1999
-# Modified for Debian by Avery Pennarun
-#
-# This shell script is called by the APM daemon (apmd) when a power
-# management event occurs. Its first and second arguments describe the
-# event. For example, apmd will call "apmd_proxy suspend system" just
-# before the system is suspended.
-#
-# Here are the possible arguments:
-#
-# start - APM daemon has started
-# stop - APM daemon is shutting down
-# suspend critical - APM system indicates critical suspend (++)
-# suspend system - APM system has requested suspend mode
-# suspend user - User has requested suspend mode
-# standby system - APM system has requested standby mode
-# standby user - User has requested standby mode
-# resume suspend - System has resumed from suspend mode
-# resume standby - System has resumed from standby mode
-# resume critical - System has resumed from critical suspend
-# change battery - APM system reported low battery
-# change power - APM system reported AC/battery change
-# change time - APM system reported time change (*)
-# change capability - APM system reported config. change (+)
-#
-# (*) - APM daemon may be configured to not call these sequences
-# (+) - Available if APM kernel supports it.
-# (++) - "suspend critical" is never passed to apmd from the kernel,
-# so we will never see it here. Scripts that process "resume
-# critical" events need to take this into account.
-#
-# It is the proxy script's responsibility to examine the APM status
-# (via /proc/apm) or other status and to take appropriate actions.
-# For example, the script might unmount network drives before the
-# machine is suspended.
-#
-# In Debian, the usual way of adding functionality to the proxy is to
-# add a script to /etc/apm/event.d. This script will be called by
-# apmd_proxy (via run-parts) with the same arguments.
-#
-# If it is important that a certain set of script be run in a certain
-# order on suspend and in a different order on resume, then put all
-# the scripts in /etc/apm/scripts.d instead of /etc/apm/event.d and
-# symlink to these from /etc/apm/suspend.d, /etc/apm/resume.d and
-# /etc/apm/other.d using names whose lexicographical order is the same
-# as the desired order of execution.
-#
-# If the kernel's APM driver supports it, apmd_proxy can return a non-zero
-# exit status on suspend and standby events, indicating that the suspend
-# or standby event should be rejected.
-#
-# *******************************************************************
-
-set -e
-
-# The following doesn't yet work, because current kernels (up to at least
-# 2.4.20) do not support rejection of APM events. Supporting this would
-# require substantial modifications to the APM driver. We will re-enable
-# this feature if the driver is ever modified. -- cph@debian.org
-#
-#SUSPEND_ON_AC=false
-#[ -r /etc/apm/apmd_proxy.conf ] && . /etc/apm/apmd_proxy.conf
-#
-#if [ "${SUSPEND_ON_AC}" = "false" -a "${2}" = "system" ] \
-# && on_ac_power >/dev/null; then
-# # Reject system suspends and standbys if we are on AC power
-# exit 1 # Reject (NOTE kernel support must be enabled)
-#fi
-
-if [ "${1}" = "suspend" -o "${1}" = "standby" ]; then
- run-parts -a "${1}" -a "${2}" /etc/apm/event.d
- if [ -d /etc/apm/suspend.d ]; then
- run-parts -a "${1}" -a "${2}" /etc/apm/suspend.d
- fi
-elif [ "${1}" = "resume" ]; then
- if [ -d /etc/apm/resume.d ]; then
- run-parts -a "${1}" -a "${2}" /etc/apm/resume.d
- fi
- run-parts -a "${1}" -a "${2}" /etc/apm/event.d
-else
- run-parts -a "${1}" -a "${2}" /etc/apm/event.d
- if [ -d /etc/apm/other.d ]; then
- run-parts -a "${1}" -a "${2}" /etc/apm/other.d
- fi
-fi
-
-exit 0
diff --git a/meta/recipes-bsp/apmd/apmd/apmd_proxy.conf b/meta/recipes-bsp/apmd/apmd/apmd_proxy.conf
deleted file mode 100644
index 751145c522..0000000000
--- a/meta/recipes-bsp/apmd/apmd/apmd_proxy.conf
+++ /dev/null
@@ -1,16 +0,0 @@
-# /etc/apm/apmd_proxy.conf: configuration file for apmd.
-#
-# This file is managed by debconf when installing or reconfiguring the
-# package. It is generated by merging the answers gathered by debconf
-# into the template file "/usr/share/apmd/apmd_proxy.conf".
-
-# The following doesn't yet work, because current kernels (up to at least
-# 2.4.20) do not support rejection of APM events. Supporting this would
-# require substantial modifications to the APM driver. We will re-enable
-# this feature if the driver is ever modified. -- cph@debian.org
-#
-# Set the following to "false" if you want to reject system suspend or
-# system standby requests when the computer is running on AC power.
-# Otherwise set this to "true". Such requests are never rejected when
-# the computer is running on battery power.
-#SUSPEND_ON_AC=true
diff --git a/meta/recipes-bsp/apmd/apmd/default b/meta/recipes-bsp/apmd/apmd/default
deleted file mode 100644
index 4b7965abf8..0000000000
--- a/meta/recipes-bsp/apmd/apmd/default
+++ /dev/null
@@ -1,8 +0,0 @@
-#
-# Default for /etc/init.d/apmd
-#
-
-# As apmd can be called with arguments, we use the following variable
-# to store them, e.g., APMD="-w 5 -p 2".
-# See the manual page apmd(8) for details.
-APMD="--proxy-timeout 30"
diff --git a/meta/recipes-bsp/apmd/apmd/init b/meta/recipes-bsp/apmd/apmd/init
deleted file mode 100755
index c0b41aa9d1..0000000000
--- a/meta/recipes-bsp/apmd/apmd/init
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/sh
-### BEGIN INIT INFO
-# Provides: apmd
-# Required-Start: $remote_fs
-# Required-Stop: $remote_fs
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description: Advanced Power Management daemon
-### END INIT INFO
-
-# Source function library.
-. /etc/init.d/functions
-
-PATH=/bin:/usr/bin:/sbin:/usr/sbin
-
-[ -f /etc/default/rcS ] && . /etc/default/rcS
-[ -f /etc/default/apmd ] && . /etc/default/apmd
-
-case "$1" in
- start)
- echo -n "Starting advanced power management daemon: "
- start-stop-daemon -S -x /usr/sbin/apmd -- \
- -P /etc/apm/apmd_proxy $APMD
- if [ $? = 0 ]; then
- echo "apmd."
- else
- echo "(failed.)"
- fi
- ;;
- stop)
- echo -n "Stopping advanced power management daemon: "
- start-stop-daemon -K \
- -x /usr/sbin/apmd
- echo "apmd."
- ;;
- status)
- status /usr/sbin/apmd;
- exit $?
- ;;
- restart|force-reload)
- $0 stop
- $0 start
- exit
- ;;
- *)
- echo "Usage: /etc/init.d/apmd {start|stop|status|restart|force-reload}"
- exit 1
- ;;
-esac
-
-exit 0
diff --git a/meta/recipes-bsp/apmd/apmd/legacy.patch b/meta/recipes-bsp/apmd/apmd/legacy.patch
deleted file mode 100644
index 8871311805..0000000000
--- a/meta/recipes-bsp/apmd/apmd/legacy.patch
+++ /dev/null
@@ -1,133 +0,0 @@
-From 3595933d221f0ba836917debc0776b8723972ec9 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 11 Aug 2015 17:40:50 +0300
-Subject: [PATCH 1/3] Patch with fixes provided by Debian.
-
-This patch is taken from
-ftp://ftp.debian.org/debian/pool/main/a/apmd/apmd_3.2.2-15.debian.tar.xz
-
-Upstream-Status: Inappropriate [upstream is dead]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Makefile | 2 +-
- apm.c | 3 ++-
- apm.h | 9 +++++++++
- apmd.c | 15 ++++++++-------
- 4 files changed, 20 insertions(+), 9 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index bf346d9..92fc0fd 100644
---- a/Makefile
-+++ b/Makefile
-@@ -43,7 +43,7 @@ DESTDIR=
-
- CC=gcc
- CFLAGS=-O -g
--XTRACFLAGS=-Wall -pipe -I. -I/usr/src/linux/include \
-+XTRACFLAGS=-Wall -pipe -I. -I/usr/src/linux/include -I/usr/X11R6/include \
- -I/usr/src/linux-2.2/include -I /usr/src/linux-2.0/include \
- -DVERSION=\"$(VERSION)\" \
- -DDEFAULT_PROXY_NAME=\"$(PROXY_DIR)/apmd_proxy\"
-diff --git a/apm.c b/apm.c
-index b21c057..0359b1c 100644
---- a/apm.c
-+++ b/apm.c
-@@ -219,12 +219,13 @@ int main(int argc, char **argv)
- }
- }
-
--
-+#if 0
- if (!(i.apm_flags & APM_32_BIT_SUPPORT))
- {
- fprintf(stderr, "32-bit APM interface not supported\n");
- exit(1);
- }
-+#endif
-
- if (verbose && (i.apm_flags & 0x10))
- printf("APM BIOS Power Management is currently disabled\n");
-diff --git a/apm.h b/apm.h
-index fb24dfd..824cc06 100644
---- a/apm.h
-+++ b/apm.h
-@@ -20,6 +20,13 @@
- * $Id: apm.h,v 1.7 1999/07/05 22:31:11 apenwarr Exp $
- *
- */
-+#ifndef _APM_H
-+#define _APM_H 1
-+
-+#ifndef __KERNEL_STRICT_NAMES
-+#define __KERNEL_STRICT_NAMES
-+#endif
-+
- #include <linux/apm_bios.h>
- #include <sys/types.h>
-
-@@ -93,3 +100,5 @@ extern int apm_reject(int fd);
- #else
- #define apm_reject(fd) (-EINVAL)
- #endif
-+
-+#endif
-diff --git a/apmd.c b/apmd.c
-index 49ed3a1..560f536 100644
---- a/apmd.c
-+++ b/apmd.c
-@@ -343,7 +343,7 @@ static int call_proxy(apm_event_t event)
- /* parent */
- int status, retval;
- ssize_t len;
-- time_t time_limit;
-+ time_t countdown;
-
- if (pid < 0) {
- /* Couldn't fork */
-@@ -356,8 +356,9 @@ static int call_proxy(apm_event_t event)
- /* Capture the child's output, if any, but only until it terminates */
- close(fds[1]);
- fcntl(fds[0], F_SETFL, O_RDONLY|O_NONBLOCK);
-- time_limit = time(0) + proxy_timeout;
-+ countdown = proxy_timeout;
- do {
-+ countdown -= 1;
- while ((len = read(fds[0], line, sizeof(line)-1)) > 0) {
- line[len] = 0;
- APMD_SYSLOG(LOG_INFO, "+ %s", line);
-@@ -372,16 +373,16 @@ static int call_proxy(apm_event_t event)
- goto proxy_done;
- }
-
-- sleep(1);
-+ while (sleep(1) > 0) ;
- } while (
-- (time(0) < time_limit)
-+ (countdown >= 0)
- || (proxy_timeout < 0)
- );
-
- APMD_SYSLOG(LOG_NOTICE, "Proxy has been running more than %d seconds; killing it", proxy_timeout);
-
- kill(pid, SIGTERM);
-- time_limit = time(0) + 5;
-+ countdown = 5;
- do {
- retval = waitpid(pid, &status, WNOHANG);
- if (retval == pid)
-@@ -392,9 +393,9 @@ static int call_proxy(apm_event_t event)
- goto proxy_done;
- }
-
-- sleep(1);
-+ while (sleep(1) > 0) ;
-
-- } while (time(0) < time_limit);
-+ } while (countdown >= 0);
-
- kill(pid, SIGKILL);
- status = __W_EXITCODE(0, SIGKILL);
---
-2.1.4
-
diff --git a/meta/recipes-bsp/apmd/apmd/libtool.patch b/meta/recipes-bsp/apmd/apmd/libtool.patch
deleted file mode 100644
index fd0a952890..0000000000
--- a/meta/recipes-bsp/apmd/apmd/libtool.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From d5dde7ca91a5aed273d8fe269e1a5194e85c8c79 Mon Sep 17 00:00:00 2001
-From: Scott Garman <scott.a.garman@intel.com>
-Date: Tue, 13 Jul 2010 16:46:46 +0800
-Subject: [PATCH] apmd: upgrade to 3.2.2-14
-
-Add by RP to address "unable to infer tagged configuration" error:
- commit 35de05e61b88c0808a5e885bb0efdf420555d5ad
- Author: Richard Purdie <rpurdie@rpsys.net>
- Date: Sun Jun 1 16:13:38 2008 +0000
-
- apmd: Use libtool --tag options to avoid problems with libtool 2.2.4 (from poky)
-
-However I didn't see same issue with current libtool-2.2.10. Also per my understanding,
-the default tag, if not specified, falls back to CC. So disable it from patching, but
-keep it here. If we encounter similar issue in the future, we could then push upstream
-
-Comment added by Kevin Tian <kevin.tian@intel.com>, 2010-07-16
-
-Upstream-Status: Pending
-
-Signed-off-by: Scott Garman <scott.a.garman@intel.com>
-
----
- Makefile | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index 92fc0fd..8e283dc 100644
---- a/Makefile
-+++ b/Makefile
-@@ -59,8 +59,8 @@ RANLIB=ranlib
- #LDFLAGS=-s
-
- LIBTOOL=libtool --quiet
--LT_COMPILE = $(LIBTOOL) --mode=compile $(CC)
--LT_LINK = $(LIBTOOL) --mode=link $(CC)
-+LT_COMPILE = $(LIBTOOL) --tag=CC --mode=compile $(CC)
-+LT_LINK = $(LIBTOOL) --tag=CC --mode=link $(CC)
- LT_INSTALL = $(LIBTOOL) --mode=install install
- LT_CLEAN = $(LIBTOOL) --mode=clean rm
-
diff --git a/meta/recipes-bsp/apmd/apmd/linkage.patch b/meta/recipes-bsp/apmd/apmd/linkage.patch
deleted file mode 100644
index 3d32c49cd2..0000000000
--- a/meta/recipes-bsp/apmd/apmd/linkage.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-When building use the libtool intermediate .lo files instead of explicitly using
-the .o files. Under libtool foo.lo is the libtool intermediate wrapper, foo.o is
-a static build, and .libs/foo.o is a shared build.
-
-If static libraries have been disabled globally then libtool won't generate them
-and explicit references to foo.o won't be satisfied.
-
-Upstream-Status: Pending
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/Makefile b/Makefile
-index bb695c6..5f60146 100644
---- a/Makefile
-+++ b/Makefile
-@@ -28,7 +28,7 @@ endif
-
- .SUFFIXES:
-
--OBJS=apmlib.o
-+OBJS=apmlib.lo
- EXES=apm apmd xapm apmsleep
- HEADERS=apm.h
-
-@@ -66,22 +66,22 @@ all: $(EXES)
-
- $(OBJS): $(HEADERS)
-
--%.o: %.c
-+%.lo: %.c
- $(LT_COMPILE) -c $(CPPFLAGS) $(CFLAGS) $(XTRACFLAGS) $<
-
--%: %.o $(LIBAPM)
-+%: %.lo $(LIBAPM)
- $(LT_LINK) -o $@ $< $(LDFLAGS) $(LIBAPM)
-
--xapm.o: xapm.c
-+xapm.lo: xapm.c
- $(LT_COMPILE) -c $(CPPFLAGS) $(CFLAGS) $(XTRACFLAGS) -DNARROWPROTO $<
-
--apmd: apmd.o
-+apmd: apmd.lo
-
--apmsleep: apmsleep.o
-+apmsleep: apmsleep.lo
-
--apmexists: apmexists.o
-+apmexists: apmexists.lo
-
--xapm: xapm.o $(LIBAPM)
-+xapm: xapm.lo $(LIBAPM)
- $(LT_LINK) -o $@ $< $(LDFLAGS) $(LIBAPM) $(XLDFLAGS) $(XLIBS)
-
- $(LIBAPM): apmlib.lo
diff --git a/meta/recipes-bsp/apmd/apmd/unlinux.patch b/meta/recipes-bsp/apmd/apmd/unlinux.patch
deleted file mode 100644
index ec8206cf17..0000000000
--- a/meta/recipes-bsp/apmd/apmd/unlinux.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-copy OE commit message here:
- commit 9456cdc1cf43e3ba9e6d88c63560c1b6fdee4359
- Author: Michael Krelin <hacker@klever.net>
- Date: Tue May 29 12:27:45 2007 +0000
-
- apmd: prevent build from interferring with host kernel headers. Closes #1257
-
-comment added by Kevin Tian <kevin.tian@intel.com>, 2010-07-13
-
-Upstream-Status: Pending
-
-Signed-off-by: Scott Garman <scott.a.garman@intel.com>
-
---- apmd-3.2.2.orig/Makefile
-+++ apmd-3.2.2/Makefile
-@@ -43,8 +43,7 @@
-
- CC=gcc
- CFLAGS=-O -g
--XTRACFLAGS=-Wall -pipe -I. -I/usr/src/linux/include -I/usr/X11R6/include \
-- -I/usr/src/linux-2.2/include -I /usr/src/linux-2.0/include \
-+XTRACFLAGS=-Wall -pipe -I. \
- -DVERSION=\"$(VERSION)\" \
- -DDEFAULT_PROXY_NAME=\"$(PROXY_DIR)/apmd_proxy\"
- LDFLAGS=
diff --git a/meta/recipes-bsp/apmd/apmd/wexitcode.patch b/meta/recipes-bsp/apmd/apmd/wexitcode.patch
deleted file mode 100644
index c5faa85fa7..0000000000
--- a/meta/recipes-bsp/apmd/apmd/wexitcode.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-Define non-posix W* funcitons
-
-C libraries like musl dont define them
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
-
-Index: apmd-3.2.2.orig/apmd.c
-===================================================================
---- apmd-3.2.2.orig.orig/apmd.c
-+++ apmd-3.2.2.orig/apmd.c
-@@ -55,6 +55,14 @@
- #define MINIMUM_RATE_CALC_TIME 120
- #endif
-
-+#ifndef _POSIX_SOURCE
-+
-+#define __WCOREFLAG 0200
-+#define __WCOREDUMP(x) (_W_INT(x) & __WCOREFLAG)
-+#define __W_EXITCODE(ret, sig) ((ret) << 8 | (sig))
-+
-+#endif
-+
- /*
- * For the verbosity level feature to be useful,
- * we rely on the fact that syslog.h assigns adjacent
diff --git a/meta/recipes-bsp/apmd/apmd_3.2.2-15.bb b/meta/recipes-bsp/apmd/apmd_3.2.2-15.bb
deleted file mode 100644
index 92c35c9896..0000000000
--- a/meta/recipes-bsp/apmd/apmd_3.2.2-15.bb
+++ /dev/null
@@ -1,85 +0,0 @@
-SUMMARY = "Utilities for Advanced Power Management"
-DESCRIPTION = "The Advanced Power Management (APM) support provides \
-access to battery status information and a set of tools for managing \
-notebook power consumption."
-HOMEPAGE = "http://apenwarr.ca/apmd/"
-SECTION = "base"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://apm.h;beginline=6;endline=18;md5=7d4acc1250910a89f84ce3cc6557c4c2"
-DEPENDS = "libtool-cross"
-
-SRC_URI = "http://snapshot.debian.org/archive/debian/20160728T043443Z/pool/main/a/${BPN}/${BPN}_3.2.2.orig.tar.gz;name=tarball \
- file://legacy.patch \
- file://libtool.patch \
- file://unlinux.patch \
- file://wexitcode.patch \
- file://linkage.patch \
- file://init \
- file://default \
- file://apmd_proxy \
- file://apmd_proxy.conf \
- file://apmd.service"
-
-SRC_URI[tarball.md5sum] = "b1e6309e8331e0f4e6efd311c2d97fa8"
-SRC_URI[tarball.sha256sum] = "7f7d9f60b7766b852881d40b8ff91d8e39fccb0d1d913102a5c75a2dbb52332d"
-
-# for this package we're mostly interested in tracking debian patches,
-# and not in the upstream version where all development has effectively stopped
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/a/apmd/"
-UPSTREAM_CHECK_REGEX = "(?P<pver>((\d+\.*)+)-((\d+\.*)+))\.(diff|debian\.tar)\.(gz|xz)"
-
-S = "${WORKDIR}/apmd-3.2.2.orig"
-
-inherit update-rc.d systemd
-
-INITSCRIPT_NAME = "apmd"
-INITSCRIPT_PARAMS = "defaults"
-
-SYSTEMD_SERVICE:${PN} = "apmd.service"
-SYSTEMD_AUTO_ENABLE = "disable"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS="
-
-do_compile() {
- # apmd doesn't use whole autotools. Just libtool for installation
- oe_runmake apm apmd
-}
-
-do_install() {
- install -d ${D}${sysconfdir}
- install -d ${D}${sysconfdir}/apm
- install -d ${D}${sysconfdir}/apm/event.d
- install -d ${D}${sysconfdir}/apm/other.d
- install -d ${D}${sysconfdir}/apm/suspend.d
- install -d ${D}${sysconfdir}/apm/resume.d
- install -d ${D}${sysconfdir}/apm/scripts.d
- install -d ${D}${sysconfdir}/default
- install -d ${D}${sysconfdir}/init.d
- install -d ${D}${sbindir}
- install -d ${D}${bindir}
- install -d ${D}${libdir}
- install -d ${D}${datadir}/apmd
- install -d ${D}${includedir}
-
- install -m 4755 ${S}/.libs/apm ${D}${bindir}/apm
- install -m 0755 ${S}/.libs/apmd ${D}${sbindir}/apmd
- install -m 0755 ${WORKDIR}/apmd_proxy ${D}${sysconfdir}/apm/
- install -m 0644 ${WORKDIR}/apmd_proxy.conf ${D}${datadir}/apmd/
- install -m 0644 ${WORKDIR}/default ${D}${sysconfdir}/default/apmd
- oe_libinstall -so libapm ${D}${libdir}
- install -m 0644 apm.h ${D}${includedir}
-
- sed -e 's,/usr/sbin,${sbindir},g; s,/etc,${sysconfdir},g;' ${WORKDIR}/init > ${D}${sysconfdir}/init.d/apmd
- chmod 755 ${D}${sysconfdir}/init.d/apmd
-
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/apmd.service ${D}${systemd_system_unitdir}/
- sed -i -e 's,@SYSCONFDIR@,${sysconfdir},g' \
- -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_system_unitdir}/apmd.service
-}
-
-PACKAGES =+ "libapm apm"
-
-FILES:libapm = "${libdir}/libapm${SOLIBS}"
-FILES:apm = "${bindir}/apm*"
diff --git a/meta/recipes-bsp/efibootmgr/efibootmgr/0001-remove-extra-decl.patch b/meta/recipes-bsp/efibootmgr/efibootmgr/0001-remove-extra-decl.patch
deleted file mode 100644
index 42f3a8182d..0000000000
--- a/meta/recipes-bsp/efibootmgr/efibootmgr/0001-remove-extra-decl.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 99b578501643377e0b1994b2a068b790d189d5ad Mon Sep 17 00:00:00 2001
-From: Peter Jones <pjones@redhat.com>
-Date: Wed, 13 Jun 2018 09:41:01 -0400
-Subject: [PATCH] remove extra decl
-
-Signed-off-by: Peter Jones <pjones@redhat.com>
-
-Upstream-Status: Backport [git://github.com/rhinstaller/efibootmgr.git]
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
----
- src/efibootmgr.c | 3 ---
- 1 file changed, 3 deletions(-)
-
-diff --git a/src/efibootmgr.c b/src/efibootmgr.c
-index de38f01..4e1a680 100644
---- a/src/efibootmgr.c
-+++ b/src/efibootmgr.c
-@@ -1536,9 +1536,6 @@ parse_opts(int argc, char **argv)
- "invalid numeric value %s\n",
- optarg);
- }
-- /* XXX efivar-36 accidentally doesn't have a public
-- * header for this */
-- extern int efi_set_verbose(int verbosity, FILE *errlog);
- efi_set_verbose(opts.verbose - 2, stderr);
- break;
- case 'V':
---
-2.7.4
-
diff --git a/meta/recipes-bsp/efibootmgr/efibootmgr/0001-src-make-compatible-with-efivar-38.patch b/meta/recipes-bsp/efibootmgr/efibootmgr/0001-src-make-compatible-with-efivar-38.patch
deleted file mode 100644
index f8d912391e..0000000000
--- a/meta/recipes-bsp/efibootmgr/efibootmgr/0001-src-make-compatible-with-efivar-38.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 07f080184d067c1ebc3fec1b53dd4a06d1a2566a Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 17 Jan 2022 23:24:34 +0100
-Subject: [PATCH] src: make compatible with efivar 38
-
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- src/efibootdump.c | 2 +-
- src/efibootmgr.c | 4 ++--
- 2 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/efibootdump.c b/src/efibootdump.c
-index eceffd6..09bd76e 100644
---- a/src/efibootdump.c
-+++ b/src/efibootdump.c
-@@ -69,7 +69,7 @@ print_boot_entry(efi_load_option *loadopt, size_t data_size)
- text_path = alloca(text_path_len);
- if (!text_path)
- error(100, "Couldn't allocate memory");
-- rc = efidp_format_device_path(text_path, text_path_len,
-+ rc = efidp_format_device_path((unsigned char *)text_path, text_path_len,
- dp, pathlen);
- if (rc < 0) {
- printf("<bad device path>");
-diff --git a/src/efibootmgr.c b/src/efibootmgr.c
-index 4e1a680..b77b1fb 100644
---- a/src/efibootmgr.c
-+++ b/src/efibootmgr.c
-@@ -949,7 +949,7 @@ show_vars(const char *prefix)
- pathlen = efi_loadopt_pathlen(load_option,
- boot->data_size);
- dp = efi_loadopt_path(load_option, boot->data_size);
-- rc = efidp_format_device_path(text_path, text_path_len,
-+ rc = efidp_format_device_path((unsigned char *)text_path, text_path_len,
- dp, pathlen);
- if (rc < 0)
- error(18, "Could not parse device path");
-@@ -960,7 +960,7 @@ show_vars(const char *prefix)
- if (!text_path)
- error(19, "Could not parse device path");
-
-- rc = efidp_format_device_path(text_path, text_path_len,
-+ rc = efidp_format_device_path((unsigned char *)text_path, text_path_len,
- dp, pathlen);
- if (rc < 0)
- error(20, "Could not parse device path");
diff --git a/meta/recipes-bsp/efibootmgr/efibootmgr/97668ae0bce776a36ea2001dea63d376be8274ac.patch b/meta/recipes-bsp/efibootmgr/efibootmgr/97668ae0bce776a36ea2001dea63d376be8274ac.patch
deleted file mode 100644
index 9525ed8c54..0000000000
--- a/meta/recipes-bsp/efibootmgr/efibootmgr/97668ae0bce776a36ea2001dea63d376be8274ac.patch
+++ /dev/null
@@ -1,83 +0,0 @@
-From 97668ae0bce776a36ea2001dea63d376be8274ac Mon Sep 17 00:00:00 2001
-From: Peter Jones <pjones@redhat.com>
-Date: Wed, 6 Mar 2019 13:08:33 -0500
-Subject: [PATCH] Make sure PKGS= is propogated into the submake for "make
- deps"
-
-When we're doing make deps with "$(CC) -MF", gcc and clang have different
-behavior, both broken in different ways, which we're hitting because of a
-missing -I argument for libefivar's includes. On clang, when a header can't
-be found, it emits a rule with the header as a prerequisite without a path,
-such as efivar.h here:
-
-efibootmgr.o: efibootmgr.c fix_coverity.h efivar.h efiboot.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/list.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efi.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/unparse_path.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efibootmgr.h \
- error.h
-
-Then the build that utilizes that rule will fail to find the
-prerequisite and tell you something like:
-
-make[1]: *** No rule to make target 'efivar.h', needed by 'efibootmgr.o'. Stop.
-make[1]: Leaving directory '/home/pjones/devel/github.com/efibootmgr/master/src'
-
-With gcc, when a header can't be found, it emits a rule without that header
-as a prerequisite, as such (again with efivar.h):
-
-efibootmgr.o: efibootmgr.c fix_coverity.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/list.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efi.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/unparse_path.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efi.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efibootmgr.h \
- error.h
-
-And then your build will fail if you haven't adjusted CFLAGS to tell it
-where to find the header.
-
-Both of these would be better just erroring, but at least gcc's doesn't
-insert a *wrong* dependency.
-
-This patch adds "PKGS=efivar efibootmgr popt" for all deps under src/.
-Technically that's overkill, as efibootmgr itself doesn't need popt, but it
-doesn't hurt anything to have the extra part there. The resulting
-.efibootmgr.d file has the prerequisites expressed correctly:
-
-efibootmgr.o: efibootmgr.c fix_coverity.h /usr/include/efivar/efivar.h \
- /usr/include/efivar/efiboot.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/list.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efi.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/unparse_path.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efi.h \
- /home/pjones/devel/github.com/efibootmgr/master/src/include/efibootmgr.h \
- error.h
-
-This fixes the issue described in github PR #96
-
-Signed-off-by: Peter Jones <pjones@redhat.com>
-Upstream-Status: Backport [https://github.com/rhboot/efibootmgr/commit/97668ae0bce776a36ea2001dea63d376be8274ac]
----
- src/Makefile | 7 ++++++-
- 1 file changed, 6 insertions(+), 1 deletion(-)
-
-diff --git a/src/Makefile b/src/Makefile
-index 258bac1..32fa188 100644
---- a/src/Makefile
-+++ b/src/Makefile
-@@ -31,8 +31,13 @@ efibootdump : PKGS=efivar efiboot popt
- efibootnext : $(call objects-of,$(EFIBOOTNEXT_SOURCES))
- efibootnext : PKGS=efivar efiboot popt
-
-+deps : PKGS=efivar efiboot popt
- deps : $(ALL_SOURCES)
-- $(MAKE) -f $(TOPDIR)/Make.deps deps SOURCES="$(ALL_SOURCES)" SUBDIR_CFLAGS="$(SUBDIR_CFLAGS)"
-+ $(MAKE) -f $(TOPDIR)/Make.deps \
-+ SOURCES="$(ALL_SOURCES)" \
-+ SUBDIR_CFLAGS="$(SUBDIR_CFLAGS)" \
-+ PKGS="$(PKGS)" \
-+ deps
-
- clean :
- @rm -rfv *.o *.a *.so $(TARGETS)
diff --git a/meta/recipes-bsp/efibootmgr/efibootmgr_17.bb b/meta/recipes-bsp/efibootmgr/efibootmgr_17.bb
deleted file mode 100644
index 11d8b9061d..0000000000
--- a/meta/recipes-bsp/efibootmgr/efibootmgr_17.bb
+++ /dev/null
@@ -1,39 +0,0 @@
-DESCRIPTION = "Linux user-space application to modify the EFI Boot Manager."
-SUMMARY = "EFI Boot Manager"
-HOMEPAGE = "https://github.com/rhboot/efibootmgr"
-SECTION = "base"
-
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
-
-DEPENDS = "efivar popt"
-
-COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
-
-SRC_URI = "git://github.com/rhinstaller/efibootmgr.git;protocol=https;branch=master \
- file://0001-remove-extra-decl.patch \
- file://97668ae0bce776a36ea2001dea63d376be8274ac.patch \
- file://0001-src-make-compatible-with-efivar-38.patch \
- "
-SRCREV = "e067160ecef8208e1944002e5d50b275733211fb"
-
-S = "${WORKDIR}/git"
-
-inherit pkgconfig
-
-# The directory under the ESP that the default bootloader is found in. When
-# wic uses a subdirectory, this should use the same one too.
-EFIDIR ?= "/"
-
-EXTRA_OEMAKE += "'EFIDIR=${EFIDIR}'"
-
-CFLAGS += " -Wno-error"
-
-do_install () {
- oe_runmake install DESTDIR="${D}"
-}
-
-CLEANBROKEN = "1"
-# https://github.com/rhboot/efivar/issues/202
-COMPATIBLE_HOST:libc-musl = 'null'
-
diff --git a/meta/recipes-bsp/efibootmgr/efibootmgr_18.bb b/meta/recipes-bsp/efibootmgr/efibootmgr_18.bb
new file mode 100644
index 0000000000..fbd2f5dbc8
--- /dev/null
+++ b/meta/recipes-bsp/efibootmgr/efibootmgr_18.bb
@@ -0,0 +1,32 @@
+DESCRIPTION = "Linux user-space application to modify the EFI Boot Manager."
+SUMMARY = "EFI Boot Manager"
+HOMEPAGE = "https://github.com/rhboot/efibootmgr"
+SECTION = "base"
+
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
+
+DEPENDS = "efivar popt"
+
+COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
+
+SRC_URI = "git://github.com/rhinstaller/efibootmgr.git;protocol=https;branch=main"
+SRCREV = "c3f9f0534e32158f62c43564036878b93b9e0fd6"
+
+S = "${WORKDIR}/git"
+
+inherit pkgconfig
+
+# The directory under the ESP that the default bootloader is found in. When
+# wic uses a subdirectory, this should use the same one too.
+EFIDIR ?= "/"
+
+EXTRA_OEMAKE += "'EFIDIR=${EFIDIR}'"
+
+CFLAGS += " -Wno-error"
+
+do_install () {
+ oe_runmake install DESTDIR="${D}"
+}
+
+CLEANBROKEN = "1"
diff --git a/meta/recipes-bsp/efivar/efivar/0001-src-Makefile-build-util.c-separately-for-makeguids.patch b/meta/recipes-bsp/efivar/efivar/0001-src-Makefile-build-util.c-separately-for-makeguids.patch
deleted file mode 100644
index 02781eb67d..0000000000
--- a/meta/recipes-bsp/efivar/efivar/0001-src-Makefile-build-util.c-separately-for-makeguids.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 80f11fcb46f6b52e13501cb323ca1a849c3f6e88 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 18 Jan 2022 11:53:41 +0100
-Subject: [PATCH] src/Makefile: build util.c separately for makeguids
-
-util.c needs to be built twice when cross-compiling:
-for the build machine to be able to link with
-makeguids which then runs during the same build,
-and then for the actual target.
-
-Upstream-Status: Submitted [https://github.com/rhboot/efivar/pull/203]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- src/Makefile | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/src/Makefile b/src/Makefile
-index 0e423c4..b10051b 100644
---- a/src/Makefile
-+++ b/src/Makefile
-@@ -28,10 +28,13 @@ EFIVAR_OBJECTS = $(patsubst %.S,%.o,$(patsubst %.c,%.o,$(EFIVAR_SOURCES)))
- EFISECDB_SOURCES = efisecdb.c guid-symbols.c secdb-dump.c util.c
- EFISECDB_OBJECTS = $(patsubst %.S,%.o,$(patsubst %.c,%.o,$(EFISECDB_SOURCES)))
- GENERATED_SOURCES = include/efivar/efivar-guids.h guid-symbols.c
--MAKEGUIDS_SOURCES = makeguids.c util.c
-+MAKEGUIDS_SOURCES = makeguids.c util-makeguids.c
- MAKEGUIDS_OBJECTS = $(patsubst %.S,%.o,$(patsubst %.c,%.o,$(MAKEGUIDS_SOURCES)))
- MAKEGUIDS_OUTPUT = $(GENERATED_SOURCES) guids.lds
-
-+util-makeguids.c :
-+ cp util.c util-makeguids.c
-+
- ALL_SOURCES=$(LIBEFISEC_SOURCES) $(LIBEFIBOOT_SOURCES) $(LIBEFIVAR_SOURCES) \
- $(MAKEGUIDS_SOURCES) $(GENERATED_SOURCES) $(EFIVAR_SOURCES) \
- $(sort $(wildcard include/efivar/*.h))
---
-2.20.1
-
diff --git a/meta/recipes-bsp/efivar/efivar_38.bb b/meta/recipes-bsp/efivar/efivar_38.bb
deleted file mode 100644
index 68c4b4b914..0000000000
--- a/meta/recipes-bsp/efivar/efivar_38.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "Tools to manipulate UEFI variables"
-DESCRIPTION = "efivar provides a simple command line interface to the UEFI variable facility"
-HOMEPAGE = "https://github.com/rhboot/efivar"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6626bb1e20189cfa95f2c508ba286393"
-
-COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
-
-SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=main;protocol=https \
- file://0001-docs-do-not-build-efisecdb-manpage.patch \
- file://0001-src-Makefile-build-util.c-separately-for-makeguids.patch \
- "
-SRCREV = "1753149d4176ebfb2b135ac0aaf79340bf0e7a93"
-
-S = "${WORKDIR}/git"
-
-inherit pkgconfig
-
-export CCLD_FOR_BUILD = "${BUILD_CCLD}"
-
-# Upstream uses --add-needed in gcc.specs which gold doesn't support, so
-# enforce BFD.
-LDFLAGS += "-fuse-ld=bfd"
-
-do_compile() {
- oe_runmake ERRORS= HOST_CFLAGS="${BUILD_CFLAGS}" HOST_LDFLAGS="${BUILD_LDFLAGS}"
-}
-
-do_install() {
- oe_runmake install DESTDIR=${D}
-}
-
-BBCLASSEXTEND = "native"
-
-RRECOMMENDS:${PN}:class-target = "kernel-module-efivarfs"
-
-CLEANBROKEN = "1"
-# https://github.com/rhboot/efivar/issues/202
-COMPATIBLE_HOST:libc-musl = 'null'
diff --git a/meta/recipes-bsp/efivar/efivar_39.bb b/meta/recipes-bsp/efivar/efivar_39.bb
new file mode 100644
index 0000000000..aab319be91
--- /dev/null
+++ b/meta/recipes-bsp/efivar/efivar_39.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Tools to manipulate UEFI variables"
+DESCRIPTION = "efivar provides a simple command line interface to the UEFI variable facility"
+HOMEPAGE = "https://github.com/rhboot/efivar"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6626bb1e20189cfa95f2c508ba286393"
+
+COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
+
+SRC_URI = "git://github.com/rhinstaller/efivar.git;branch=main;protocol=https \
+ file://0001-docs-do-not-build-efisecdb-manpage.patch \
+ "
+SRCREV = "c47820c37ac26286559ec004de07d48d05f3308c"
+PV .= "+39+git"
+
+S = "${WORKDIR}/git"
+
+inherit pkgconfig
+
+export CCLD_FOR_BUILD = "${BUILD_CCLD}"
+
+do_compile() {
+ oe_runmake ERRORS= HOST_CFLAGS="${BUILD_CFLAGS}" HOST_LDFLAGS="${BUILD_LDFLAGS}"
+}
+
+do_install() {
+ oe_runmake install DESTDIR=${D}
+}
+
+BBCLASSEXTEND = "native"
+
+RRECOMMENDS:${PN}:class-target = "kernel-module-efivarfs"
+
+CLEANBROKEN = "1"
diff --git a/meta/recipes-bsp/formfactor/formfactor_0.0.bb b/meta/recipes-bsp/formfactor/formfactor_0.0.bb
index ea1fa4c754..1eaf30746b 100644
--- a/meta/recipes-bsp/formfactor/formfactor_0.0.bb
+++ b/meta/recipes-bsp/formfactor/formfactor_0.0.bb
@@ -5,7 +5,6 @@ build system cannot obtain from other sources such as the kernel."
SECTION = "base"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-PR = "r45"
SRC_URI = "file://config file://machconfig"
S = "${WORKDIR}"
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch
new file mode 100644
index 0000000000..3475606264
--- /dev/null
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch
@@ -0,0 +1,34 @@
+From 1de509497826faa0ad84b82f5e2c3d21ee613459 Mon Sep 17 00:00:00 2001
+From: Moody Liu <mooodyhunter@outlook.com>
+Date: Sat, 13 May 2023 17:39:16 +0100
+Subject: [PATCH] riscv64: adjust type definitions
+
+CHAR8 needs to be defined while BOOLEAN should be removed
+here to prevent typedef conflicts
+
+Upstream-Status: Backport [https://sourceforge.net/p/gnu-efi/code/ci/1de509497826faa0ad84b82f5e2c3d21ee613459/]
+Signed-off-by: Moody Liu <mooodyhunter@outlook.com>
+---
+ inc/riscv64/efibind.h | 4 +---
+ 1 file changed, 1 insertion(+), 3 deletions(-)
+
+diff --git a/inc/riscv64/efibind.h b/inc/riscv64/efibind.h
+index 4fdf81d..d8b4f39 100644
+--- a/inc/riscv64/efibind.h
++++ b/inc/riscv64/efibind.h
+@@ -32,11 +32,9 @@ typedef uint16_t UINT16;
+ typedef int16_t INT16;
+ typedef uint8_t UINT8;
+ typedef int8_t INT8;
++typedef char CHAR8;
+ typedef wchar_t CHAR16;
+ #define WCHAR CHAR16
+-#ifndef BOOLEAN
+-typedef uint8_t BOOLEAN;
+-#endif
+ #undef VOID
+ typedef void VOID;
+ typedef int64_t INTN;
+--
+2.41.0
+
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch
new file mode 100644
index 0000000000..5b3c152c5e
--- /dev/null
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch
@@ -0,0 +1,32 @@
+From 708f66acfec9a86f237726d45095cbd380fd83ca Mon Sep 17 00:00:00 2001
+From: Callum Farmer <gmbr3@opensuse.org>
+Date: Wed, 21 Jun 2023 11:32:28 +0100
+Subject: [PATCH] riscv64: ignore unknown relocs
+
+Sometimes ld emits relocs such as R_RISCV_64 for unwind symbols
+these don't need to be handled yet so just can be skipped otherwise
+the binary will never load
+
+Upstream-Status: Backport [https://sourceforge.net/p/gnu-efi/code/ci/708f66acfec9a86f237726d45095cbd380fd83ca/]
+Signed-off-by: Callum Farmer <gmbr3@opensuse.org>
+---
+ gnuefi/reloc_riscv64.c | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/gnuefi/reloc_riscv64.c b/gnuefi/reloc_riscv64.c
+index 0b02d83..e429602 100644
+--- a/gnuefi/reloc_riscv64.c
++++ b/gnuefi/reloc_riscv64.c
+@@ -81,8 +81,7 @@ EFI_STATUS EFIAPI _relocate(long ldbase, Elf_Dyn *dyn)
+ *addr = ldbase + rel->r_addend;
+ break;
+ default:
+- /* Panic */
+- while (1) ;
++ break;
+ }
+ rel = (Elf_Rela *)((char *)rel + relent);
+ relsz -= relent;
+--
+2.41.0
+
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/lib-Makefile-fix-parallel-issue.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/lib-Makefile-fix-parallel-issue.patch
deleted file mode 100644
index dc00b8fa4d..0000000000
--- a/meta/recipes-bsp/gnu-efi/gnu-efi/lib-Makefile-fix-parallel-issue.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 3ec8c2a70304eabd5760937a4ec3fbc4068a77ed Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 23 Apr 2015 01:49:31 -0700
-Subject: [PATCH 2/3] lib/Makefile: fix parallel issue
-
-Fixed:
-Assembler messages:
-Fatal error: can't create runtime/rtlock.o: No such file or directory
-Assembler messages:
-Fatal error: can't create runtime/rtdata.o: No such file or directory
-Assembler messages:
-Fatal error: can't create runtime/vm.o: No such file or directory
-Assembler messages:
-Fatal error: can't create runtime/efirtlib.o: No such file or directory
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
----
- lib/Makefile | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/lib/Makefile b/lib/Makefile
-index 048751a..ed39bbb 100644
---- a/lib/Makefile
-+++ b/lib/Makefile
-@@ -74,6 +74,8 @@ all: libsubdirs libefi.a
- libsubdirs:
- for sdir in $(SUBDIRS); do mkdir -p $$sdir; done
-
-+$(OBJS): libsubdirs
-+
- libefi.a: $(OBJS)
- $(AR) $(ARFLAGS) $@ $(OBJS)
-
---
-2.7.4
-
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch
new file mode 100644
index 0000000000..db2bcf70b7
--- /dev/null
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch
@@ -0,0 +1,19 @@
+Do not treat warnings as errors
+
+There are additional warnings found with musl which are
+treated as errors and fails the build, we have more combinations
+then upstream supports to handle
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-Status: Inappropriate [OE specific]
+--- a/Make.defaults
++++ b/Make.defaults
+@@ -187,7 +187,7 @@ CFLAGS += $(ARCH3264) -g -O2 -Wall -Wex
+ -funsigned-char -fshort-wchar -fno-strict-aliasing \
+ -ffreestanding -fno-stack-protector
+ else
+-CFLAGS += $(ARCH3264) -g -O2 -Wall -Wextra -Wno-pointer-sign -Werror \
++CFLAGS += $(ARCH3264) -g -O2 -Wall -Wextra -Wno-pointer-sign \
+ -funsigned-char -fshort-wchar -fno-strict-aliasing \
+ -ffreestanding -fno-stack-protector -fno-stack-check \
+ -fno-stack-check \
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch
index 8a0138bbe5..63d9b6fc31 100644
--- a/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch
@@ -1,7 +1,7 @@
-From 48b2cdbcd761105e8ebad412fcbf23db1ac4ef7c Mon Sep 17 00:00:00 2001
+From f56ddb00a656af2e84f839738fad19909ac65047 Mon Sep 17 00:00:00 2001
From: Saul Wold <sgw@linux.intel.com>
Date: Sun, 9 Mar 2014 15:22:15 +0200
-Subject: [PATCH 1/3] Fix parallel make failure for archives
+Subject: [PATCH] Fix parallel make failure for archives
Upstream-Status: Pending
@@ -20,12 +20,16 @@ Signed-off-by: California Sullivan <california.l.sullivan@intel.com>
[Rebased for 3.0.8]
Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ lib/Makefile | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
diff --git a/lib/Makefile b/lib/Makefile
-index 0e6410d..048751a 100644
+index 1fc6a47..54b0ca7 100644
--- a/lib/Makefile
+++ b/lib/Makefile
-@@ -75,7 +75,7 @@ libsubdirs:
- for sdir in $(SUBDIRS); do mkdir -p $$sdir; done
+@@ -77,7 +77,7 @@ libsubdirs:
+ $(OBJS): libsubdirs
libefi.a: $(OBJS)
- $(AR) $(ARFLAGS) $@ $^
@@ -33,6 +37,3 @@ index 0e6410d..048751a 100644
clean:
rm -f libefi.a *~ $(OBJS) */*.o
---
-2.7.4
-
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb
deleted file mode 100644
index c746c2a1b8..0000000000
--- a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb
+++ /dev/null
@@ -1,70 +0,0 @@
-SUMMARY = "Libraries for producing EFI binaries"
-HOMEPAGE = "http://sourceforge.net/projects/gnu-efi/"
-DESCRIPTION = "GNU-EFI aims to Develop EFI applications for ARM-64, ARM-32, x86_64, IA-64 (IPF), IA-32 (x86), and MIPS platforms using the GNU toolchain and the EFI development environment."
-SECTION = "devel"
-LICENSE = "GPL-2.0-or-later | BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://gnuefi/crt0-efi-arm.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \
- file://gnuefi/crt0-efi-aarch64.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \
- file://inc/efishellintf.h;beginline=13;endline=20;md5=202766b79d708eff3cc70fce15fb80c7 \
- file://lib/arm/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
- file://lib/arm/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
- file://lib/aarch64/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
- file://lib/aarch64/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
- "
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/files/${BP}.tar.bz2 \
- file://parallel-make-archives.patch \
- file://lib-Makefile-fix-parallel-issue.patch \
- file://gnu-efi-3.0.9-fix-clang-build.patch \
- "
-
-SRC_URI[sha256sum] = "b73b643a0d5697d1f396d7431448e886dd805668789578e3e1a28277c9528435"
-
-COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*)-linux"
-COMPATIBLE_HOST:armv4 = 'null'
-
-do_configure:linux-gnux32:prepend() {
- cp ${STAGING_INCDIR}/gnu/stubs-x32.h ${STAGING_INCDIR}/gnu/stubs-64.h
- cp ${STAGING_INCDIR}/bits/long-double-32.h ${STAGING_INCDIR}/bits/long-double-64.h
-}
-
-def gnu_efi_arch(d):
- import re
- tarch = d.getVar("TARGET_ARCH")
- if re.match("i[3456789]86", tarch):
- return "ia32"
- return tarch
-
-EXTRA_OEMAKE = "'ARCH=${@gnu_efi_arch(d)}' 'CC=${CC}' 'AS=${AS}' 'LD=${LD}' 'AR=${AR}' \
- 'RANLIB=${RANLIB}' 'OBJCOPY=${OBJCOPY}' 'PREFIX=${prefix}' 'LIBDIR=${libdir}' \
- "
-
-# gnu-efi's Makefile treats prefix as toolchain prefix, so don't
-# export it.
-prefix[unexport] = "1"
-
-do_install() {
- oe_runmake install INSTALLROOT="${D}"
-}
-
-FILES:${PN} += "${libdir}/*.lds"
-
-# 64-bit binaries are expected for EFI when targeting X32
-INSANE_SKIP:${PN}-dev:append:linux-gnux32 = " arch"
-INSANE_SKIP:${PN}-dev:append:linux-muslx32 = " arch"
-
-BBCLASSEXTEND = "native"
-
-# It doesn't support sse, its make.defaults sets:
-# CFLAGS += -mno-mmx -mno-sse
-# So also remove -mfpmath=sse from TUNE_CCARGS
-TUNE_CCARGS:remove = "-mfpmath=sse"
-
-python () {
- ccargs = d.getVar('TUNE_CCARGS').split()
- if '-mx32' in ccargs:
- # use x86_64 EFI ABI
- ccargs.remove('-mx32')
- ccargs.append('-m64')
- d.setVar('TUNE_CCARGS', ' '.join(ccargs))
-}
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb
new file mode 100644
index 0000000000..43b7cc7529
--- /dev/null
+++ b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb
@@ -0,0 +1,75 @@
+SUMMARY = "Libraries for producing EFI binaries"
+HOMEPAGE = "http://sourceforge.net/projects/gnu-efi/"
+DESCRIPTION = "GNU-EFI aims to Develop EFI applications for ARM-64, ARM-32, x86_64, IA-64 (IPF), IA-32 (x86), and MIPS platforms using the GNU toolchain and the EFI development environment."
+SECTION = "devel"
+LICENSE = "GPL-2.0-or-later | BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://gnuefi/crt0-efi-arm.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \
+ file://gnuefi/crt0-efi-aarch64.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \
+ file://inc/efishellintf.h;beginline=13;endline=20;md5=202766b79d708eff3cc70fce15fb80c7 \
+ file://lib/arm/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
+ file://lib/arm/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
+ file://lib/aarch64/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
+ file://lib/aarch64/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \
+ "
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/files/${BP}.tar.bz2 \
+ file://parallel-make-archives.patch \
+ file://gnu-efi-3.0.9-fix-clang-build.patch \
+ file://0001-riscv64-adjust-type-definitions.patch \
+ file://0001-riscv64-ignore-unknown-relocs.patch \
+ file://no-werror.patch \
+ "
+SRC_URI[sha256sum] = "7807e903349343a7a142ebb934703a2872235e89688cf586c032b0a1087bcaf4"
+
+COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*|riscv64.*)-linux"
+COMPATIBLE_HOST:armv4 = 'null'
+
+do_configure:linux-gnux32:prepend() {
+ cp ${STAGING_INCDIR}/gnu/stubs-x32.h ${STAGING_INCDIR}/gnu/stubs-64.h
+ cp ${STAGING_INCDIR}/bits/long-double-32.h ${STAGING_INCDIR}/bits/long-double-64.h
+}
+
+def gnu_efi_arch(d):
+ import re
+ tarch = d.getVar("TARGET_ARCH")
+ if re.match("i[3456789]86", tarch):
+ return "ia32"
+ return tarch
+
+do_compile:prepend() {
+ unset LDFLAGS
+}
+
+EXTRA_OEMAKE = "'ARCH=${@gnu_efi_arch(d)}' 'CC=${CC}' 'AS=${AS}' 'LD=${LD}' 'AR=${AR}' \
+ 'RANLIB=${RANLIB}' 'OBJCOPY=${OBJCOPY}' 'PREFIX=${prefix}' 'LIBDIR=${libdir}' \
+ "
+
+# gnu-efi's Makefile treats prefix as toolchain prefix, so don't
+# export it.
+prefix[unexport] = "1"
+
+do_install() {
+ oe_runmake install INSTALLROOT="${D}"
+}
+
+FILES:${PN} += "${libdir}/*.lds ${libdir}/gnuefi/apps"
+
+# 64-bit binaries are expected for EFI when targeting X32
+INSANE_SKIP:${PN}-dev:append:linux-gnux32 = " arch"
+INSANE_SKIP:${PN}-dev:append:linux-muslx32 = " arch"
+
+BBCLASSEXTEND = "native"
+
+# It doesn't support sse, its make.defaults sets:
+# CFLAGS += -mno-mmx -mno-sse
+# So also remove -mfpmath=sse from TUNE_CCARGS
+TUNE_CCARGS:remove = "-mfpmath=sse"
+
+python () {
+ ccargs = d.getVar('TUNE_CCARGS').split()
+ if '-mx32' in ccargs:
+ # use x86_64 EFI ABI
+ ccargs.remove('-mx32')
+ ccargs.append('-m64')
+ d.setVar('TUNE_CCARGS', ' '.join(ccargs))
+}
diff --git a/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch b/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch
index 6b73878cc0..05a4697a73 100644
--- a/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch
+++ b/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch
@@ -1,4 +1,4 @@
-From 96d9aa55d29b24e2490d5647a9efc66940fc400f Mon Sep 17 00:00:00 2001
+From 006799e9c4babe8a8340a24501b253e759614a2d Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 13 Jan 2016 19:17:31 +0000
Subject: [PATCH] Disable -mfpmath=sse as well when SSE is disabled
@@ -24,15 +24,16 @@ Signed-off-by: Nitin A Kamble <nitin.a.kamble@intel.com>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
+
---
configure.ac | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index 7656f24..0868ea9 100644
+index cd667a2..8263876 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -824,7 +824,7 @@ fi
+@@ -846,7 +846,7 @@ fi
if ( test "x$target_cpu" = xi386 || test "x$target_cpu" = xx86_64 ) && test "x$platform" != xemu; then
# Some toolchains enable these features by default, but they need
# registers that aren't set up properly in GRUB.
@@ -40,4 +41,4 @@ index 7656f24..0868ea9 100644
+ TARGET_CFLAGS="$TARGET_CFLAGS -mno-mmx -mno-sse -mno-sse2 -mno-sse3 -mno-3dnow -mfpmath=387"
fi
- # GRUB doesn't use float or doubles at all. Yet some toolchains may decide
+ if ( test "x$target_cpu" = xi386 || test "x$target_cpu" = xx86_64 ); then
diff --git a/meta/recipes-bsp/grub/files/0001-RISC-V-Restore-the-typcast-to-long.patch b/meta/recipes-bsp/grub/files/0001-RISC-V-Restore-the-typcast-to-long.patch
index 2f15a91f68..cafa711731 100644
--- a/meta/recipes-bsp/grub/files/0001-RISC-V-Restore-the-typcast-to-long.patch
+++ b/meta/recipes-bsp/grub/files/0001-RISC-V-Restore-the-typcast-to-long.patch
@@ -1,4 +1,4 @@
-From e4c41db74b8972285cbdfe614c95c1ffd97d70e1 Mon Sep 17 00:00:00 2001
+From b47029e8e582d17c6874d2622fe1a5b834377dbb Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 26 Mar 2021 11:59:43 -0700
Subject: [PATCH] RISC-V: Restore the typcast to 64bit type
@@ -17,15 +17,16 @@ Cc: Daniel Kiper <daniel.kiper@oracle.com>
Cc: Chester Lin <clin@suse.com>
Cc: Nikita Ermakov <arei@altlinux.org>
Cc: Alistair Francis <alistair.francis@wdc.com>
+
---
util/grub-mkimagexx.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/util/grub-mkimagexx.c b/util/grub-mkimagexx.c
-index 00f49ccaa..ac677d03d 100644
+index e50b295..2f09255 100644
--- a/util/grub-mkimagexx.c
+++ b/util/grub-mkimagexx.c
-@@ -1242,7 +1242,7 @@ SUFFIX (relocate_addrs) (Elf_Ehdr *e, struct section_metadata *smd,
+@@ -1310,7 +1310,7 @@ SUFFIX (relocate_addrs) (Elf_Ehdr *e, struct section_metadata *smd,
*/
sym_addr += addend;
@@ -34,6 +35,3 @@ index 00f49ccaa..ac677d03d 100644
switch (ELF_R_TYPE (info))
{
---
-2.31.1
-
diff --git a/meta/recipes-bsp/grub/files/0001-configure.ac-Use-_zicsr_zifencei-extentions-on-riscv.patch b/meta/recipes-bsp/grub/files/0001-configure.ac-Use-_zicsr_zifencei-extentions-on-riscv.patch
deleted file mode 100644
index c575a31161..0000000000
--- a/meta/recipes-bsp/grub/files/0001-configure.ac-Use-_zicsr_zifencei-extentions-on-riscv.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From f1217c803cec90813eb834dde7829f4961b2a2e4 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 17 Feb 2022 15:07:02 -0800
-Subject: [PATCH] configure.ac: Use _zicsr_zifencei extentions on riscv
-
-From version 2.38, binutils defaults to ISA spec version 20191213. This
-means that the csr read/write (csrr*/csrw*) instructions and fence.i
-instruction has separated from the `I` extension, become two standalone
-extensions: Zicsr and Zifencei.
-
-The fix is to specify those extensions explicitely in -march. Since we
-are now using binutils 2.38+ in OE this is ok, a more upstreamable fix for
-grub will be to detect these extentions, however thats not easy to
-implement
-
-Upstream-Status: Inappropriate [OE specific]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- configure.ac | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index c7fc55a..072f2c9 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -849,14 +849,14 @@ if test x"$platform" != xemu ; then
- [grub_cv_target_cc_soft_float="-mgeneral-regs-only"], [])
- fi
- if test "x$target_cpu" = xriscv32; then
-- CFLAGS="$TARGET_CFLAGS -march=rv32imac -mabi=ilp32 -Werror"
-+ CFLAGS="$TARGET_CFLAGS -march=rv32imac_zicsr_zifencei -mabi=ilp32 -Werror"
- AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
-- [grub_cv_target_cc_soft_float="-march=rv32imac -mabi=ilp32"], [])
-+ [grub_cv_target_cc_soft_float="-march=rv32imac_zicsr_zifencei -mabi=ilp32"], [])
- fi
- if test "x$target_cpu" = xriscv64; then
-- CFLAGS="$TARGET_CFLAGS -march=rv64imac -mabi=lp64 -Werror"
-+ CFLAGS="$TARGET_CFLAGS -march=rv64imac_zicsr_zifencei -mabi=lp64 -Werror"
- AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
-- [grub_cv_target_cc_soft_float="-march=rv64imac -mabi=lp64"], [])
-+ [grub_cv_target_cc_soft_float="-march=rv64imac_zicsr_zifencei -mabi=lp64"], [])
- fi
- if test "x$target_cpu" = xia64; then
- CFLAGS="$TARGET_CFLAGS -mno-inline-float-divide -mno-inline-sqrt -Werror"
---
-2.35.1
-
diff --git a/meta/recipes-bsp/grub/files/0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch b/meta/recipes-bsp/grub/files/0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch
index 69b04aa56f..69dec7695a 100644
--- a/meta/recipes-bsp/grub/files/0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch
+++ b/meta/recipes-bsp/grub/files/0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch
@@ -1,4 +1,4 @@
-From 8f47ed4aaefba087b6ca76e59c9f832b6a0702bc Mon Sep 17 00:00:00 2001
+From a80592e20f6c4b928a22862f52f268ab9d9908b2 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 13 Jan 2016 19:28:00 +0000
Subject: [PATCH] grub.d/10_linux.in: add oe's kernel name
@@ -20,10 +20,10 @@ Upstream-Status: Inappropriate [OE specific]
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/util/grub.d/10_linux.in b/util/grub.d/10_linux.in
-index 4532266..cba2617 100644
+index cc393be..8545cb6 100644
--- a/util/grub.d/10_linux.in
+++ b/util/grub.d/10_linux.in
-@@ -164,12 +164,12 @@ machine=`uname -m`
+@@ -166,12 +166,12 @@ machine=`uname -m`
case "x$machine" in
xi?86 | xx86_64)
list=
@@ -40,10 +40,10 @@ index 4532266..cba2617 100644
done ;;
esac
diff --git a/util/grub.d/20_linux_xen.in b/util/grub.d/20_linux_xen.in
-index 96179ea..98d16ae 100644
+index 94dd8be..36cd554 100644
--- a/util/grub.d/20_linux_xen.in
+++ b/util/grub.d/20_linux_xen.in
-@@ -154,7 +154,7 @@ EOF
+@@ -181,7 +181,7 @@ EOF
}
linux_list=
diff --git a/meta/recipes-bsp/grub/files/CVE-2021-3981-grub-mkconfig-Restore-umask-for-the-grub.cfg.patch b/meta/recipes-bsp/grub/files/CVE-2021-3981-grub-mkconfig-Restore-umask-for-the-grub.cfg.patch
deleted file mode 100644
index dae26fd8bb..0000000000
--- a/meta/recipes-bsp/grub/files/CVE-2021-3981-grub-mkconfig-Restore-umask-for-the-grub.cfg.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 0adec29674561034771c13e446069b41ef41e4d4 Mon Sep 17 00:00:00 2001
-From: Michael Chang <mchang@suse.com>
-Date: Fri, 3 Dec 2021 16:13:28 +0800
-Subject: [PATCH] grub-mkconfig: Restore umask for the grub.cfg
-
-The commit ab2e53c8a (grub-mkconfig: Honor a symlink when generating
-configuration by grub-mkconfig) has inadvertently discarded umask for
-creating grub.cfg in the process of running grub-mkconfig. The resulting
-wrong permission (0644) would allow unprivileged users to read GRUB
-configuration file content. This presents a low confidentiality risk
-as grub.cfg may contain non-secured plain-text passwords.
-
-This patch restores the missing umask and sets the creation file mode
-to 0600 preventing unprivileged access.
-
-Fixes: CVE-2021-3981
-
-Signed-off-by: Michael Chang <mchang@suse.com>
-Reviewed-by: Daniel Kiper <daniel.kiper@oracle.com>
-
-Upstream-Status: Backport
-CVE: CVE-2021-3981
-
-Reference to upstream patch:
-https://git.savannah.gnu.org/cgit/grub.git/commit/?id=0adec29674561034771c13e446069b41ef41e4d4
-
-Signed-off-by: Yongxin Liu <yongxin.liu@windriver.com>
----
- util/grub-mkconfig.in | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/util/grub-mkconfig.in b/util/grub-mkconfig.in
-index c3ea7612e..62335d027 100644
---- a/util/grub-mkconfig.in
-+++ b/util/grub-mkconfig.in
-@@ -301,7 +301,10 @@ and /etc/grub.d/* files or please file a bug report with
- exit 1
- else
- # none of the children aborted with error, install the new grub.cfg
-+ oldumask=$(umask)
-+ umask 077
- cat ${grub_cfg}.new > ${grub_cfg}
-+ umask $oldumask
- rm -f ${grub_cfg}.new
- fi
- fi
---
-2.31.1
-
diff --git a/meta/recipes-bsp/grub/files/autogen.sh-exclude-pc.patch b/meta/recipes-bsp/grub/files/autogen.sh-exclude-pc.patch
index 1323a54a59..f8dfda90ab 100644
--- a/meta/recipes-bsp/grub/files/autogen.sh-exclude-pc.patch
+++ b/meta/recipes-bsp/grub/files/autogen.sh-exclude-pc.patch
@@ -1,4 +1,4 @@
-From 8790aa8bea736f52341a0430ff3e317d3be0f99b Mon Sep 17 00:00:00 2001
+From 14c1d0459fb3561e627d3a5f6e91a0d2f7b4aa45 Mon Sep 17 00:00:00 2001
From: Naveen Saini <naveen.kumar.saini@intel.com>
Date: Mon, 15 Mar 2021 14:44:15 +0800
Subject: [PATCH] autogen.sh: exclude .pc from po/POTFILES.in
@@ -14,15 +14,16 @@ Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
Signed-off-by: Naveen Saini <naveen.kumar.saini@intel.com>
+
---
autogen.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/autogen.sh b/autogen.sh
-index 31b0ced7e..c63ae766c 100755
+index 195daa5..773b7b4 100755
--- a/autogen.sh
+++ b/autogen.sh
-@@ -13,7 +13,7 @@ fi
+@@ -26,7 +26,7 @@ fi
export LC_COLLATE=C
unset LC_ALL
@@ -31,6 +32,3 @@ index 31b0ced7e..c63ae766c 100755
find util -iname '*.in' ! -name Makefile.in |sort > po/POTFILES-shell.in
echo "Importing unicode..."
---
-2.17.1
-
diff --git a/meta/recipes-bsp/grub/files/determinism.patch b/meta/recipes-bsp/grub/files/determinism.patch
deleted file mode 100644
index 2828e80975..0000000000
--- a/meta/recipes-bsp/grub/files/determinism.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-From b6f9b3f6fa782807c4a7ec16ee8ef868cdfbf468 Mon Sep 17 00:00:00 2001
-From: Naveen Saini <naveen.kumar.saini@intel.com>
-Date: Mon, 15 Mar 2021 14:56:18 +0800
-Subject: [PATCH] The output in moddep.lst generated from syminfo.lst using
- genmoddep.awk is not deterministic since the order of the dependencies on
- each line can vary depending on how awk sorts the values in the array.
-
-Be deterministic in the output by sorting the dependencies on each line.
-
-Also, the output of the SOURCES lines in grub-core/Makefile.core.am, generated
-from grub-core/Makefile.core.def with gentpl.py is not deterministic due to
-missing sorting of the list used to generate it. Add such a sort.
-
-Also ensure the generated unidata.c file is deterministic by sorting the
-keys of the dict.
-
-Upstream-Status: Pending
-Richard Purdie <richard.purdie@linuxfoundation.org>
-Signed-off-by: Naveen Saini <naveen.kumar.saini@intel.com>
----
- gentpl.py | 1 +
- grub-core/genmoddep.awk | 4 +++-
- util/import_unicode.py | 2 +-
- 3 files changed, 5 insertions(+), 2 deletions(-)
-
-diff --git a/gentpl.py b/gentpl.py
-index c86550d4f..589285192 100644
---- a/gentpl.py
-+++ b/gentpl.py
-@@ -568,6 +568,7 @@ def foreach_platform_value(defn, platform, suffix, closure):
- for group in RMAP[platform]:
- for value in defn.find_all(group + suffix):
- r.append(closure(value))
-+ r.sort()
- return ''.join(r)
-
- def platform_conditional(platform, closure):
-diff --git a/grub-core/genmoddep.awk b/grub-core/genmoddep.awk
-index 04c2863e5..247436392 100644
---- a/grub-core/genmoddep.awk
-+++ b/grub-core/genmoddep.awk
-@@ -59,7 +59,9 @@ END {
- }
- modlist = ""
- depcount[mod] = 0
-- for (depmod in uniqmods) {
-+ n = asorti(uniqmods, w)
-+ for (i = 1; i <= n; i++) {
-+ depmod = w[i]
- modlist = modlist " " depmod;
- inverse_dependencies[depmod] = inverse_dependencies[depmod] " " mod
- depcount[mod]++
-diff --git a/util/import_unicode.py b/util/import_unicode.py
-index 08f80591e..1f434a069 100644
---- a/util/import_unicode.py
-+++ b/util/import_unicode.py
-@@ -174,7 +174,7 @@ infile.close ()
-
- outfile.write ("struct grub_unicode_arabic_shape grub_unicode_arabic_shapes[] = {\n ")
-
--for x in arabicsubst:
-+for x in sorted(arabicsubst):
- try:
- if arabicsubst[x]['join'] == "DUAL":
- outfile.write ("{0x%x, 0x%x, 0x%x, 0x%x, 0x%x},\n " % (arabicsubst[x][0], arabicsubst[x][1], arabicsubst[x][2], arabicsubst[x][3], arabicsubst[x][4]))
---
-2.17.1
-
diff --git a/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch b/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch
index 26890261b7..d9012d1dd6 100644
--- a/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch
+++ b/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch
@@ -1,4 +1,4 @@
-From 917133acc701dbc4636165d3b08d15dc5829a06f Mon Sep 17 00:00:00 2001
+From b316ed326bd492106006d78f5bfcd767b49a4f2e Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Wed, 17 Aug 2016 04:06:34 -0400
Subject: [PATCH] grub module explicitly keeps symbole .module_license
@@ -8,7 +8,7 @@ it stripped symbol table:
---------------
root@localhost:~# objdump -t all_video.mod
-
+
all_video.mod: file format elf64-x86-64
SYMBOL TABLE:
@@ -40,12 +40,13 @@ SYMBOL TABLE:
Upstream-Status: Pending
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
grub-core/genmod.sh.in | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/grub-core/genmod.sh.in b/grub-core/genmod.sh.in
-index 1250589..dd14308 100644
+index e57c4d9..42bb1ba 100644
--- a/grub-core/genmod.sh.in
+++ b/grub-core/genmod.sh.in
@@ -56,7 +56,7 @@ if test x@TARGET_APPLE_LINKER@ != x1; then
diff --git a/meta/recipes-bsp/grub/grub-efi_2.06.bb b/meta/recipes-bsp/grub/grub-efi_2.12.bb
index 9857e8e036..9857e8e036 100644
--- a/meta/recipes-bsp/grub/grub-efi_2.06.bb
+++ b/meta/recipes-bsp/grub/grub-efi_2.12.bb
diff --git a/meta/recipes-bsp/grub/grub2.inc b/meta/recipes-bsp/grub/grub2.inc
index 45852ab9b1..bb9aacb478 100644
--- a/meta/recipes-bsp/grub/grub2.inc
+++ b/meta/recipes-bsp/grub/grub2.inc
@@ -18,24 +18,27 @@ SRC_URI = "${GNU_MIRROR}/grub/grub-${PV}.tar.gz \
file://autogen.sh-exclude-pc.patch \
file://grub-module-explicitly-keeps-symbole-.module_license.patch \
file://0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch \
- file://determinism.patch \
file://0001-RISC-V-Restore-the-typcast-to-long.patch \
- file://CVE-2021-3981-grub-mkconfig-Restore-umask-for-the-grub.cfg.patch \
- file://0001-configure.ac-Use-_zicsr_zifencei-extentions-on-riscv.patch \
"
-SRC_URI[sha256sum] = "23b64b4c741569f9426ed2e3d0e6780796fca081bee4c99f62aa3f53ae803f5f"
+SRC_URI[sha256sum] = "b30919fa5be280417c17ac561bb1650f60cfb80cc6237fa1e2b6f56154cb9c91"
-# Applies only to RHEL
-CVE_CHECK_IGNORE += "CVE-2019-14865"
-# Applies only to SUSE
-CVE_CHECK_IGNORE += "CVE-2021-46705"
+CVE_STATUS[CVE-2019-14865] = "not-applicable-platform: applies only to RHEL"
+CVE_STATUS[CVE-2021-46705] = "not-applicable-platform: Applies only to SUSE"
+CVE_STATUS[CVE-2023-4001] = "not-applicable-platform: Applies only to RHEL/Fedora"
+CVE_STATUS[CVE-2024-1048] = "not-applicable-platform: Applies only to RHEL/Fedora"
+CVE_STATUS[CVE-2023-4692] = "cpe-incorrect: Fixed in version 2.12 already"
+CVE_STATUS[CVE-2023-4693] = "cpe-incorrect: Fixed in version 2.12 already"
DEPENDS = "flex-native bison-native gettext-native"
-COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|riscv.*)-(linux.*|freebsd.*)'
-COMPATIBLE_HOST:armv7a = 'null'
-COMPATIBLE_HOST:armv7ve = 'null'
+GRUB_COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|riscv.*)-(linux.*|freebsd.*)'
+COMPATIBLE_HOST = "${GRUB_COMPATIBLE_HOST}"
+# Grub doesn't support hard float toolchain and won't be able to forcefully
+# disable it on some of the target CPUs. See 'configure.ac' for
+# supported/unsupported CPUs in hardfp.
+COMPATIBLE_HOST:armv7a = "${@'null' if bb.utils.contains('TUNE_CCARGS_MFLOAT', 'hard', True, False, d) else d.getVar('GRUB_COMPATIBLE_HOST')}"
+COMPATIBLE_HOST:armv7ve = "${@'null' if bb.utils.contains('TUNE_CCARGS_MFLOAT', 'hard', True, False, d) else d.getVar('GRUB_COMPATIBLE_HOST')}"
# configure.ac has code to set this automagically from the target tuple
# but the OE freeform one (core2-foo-bar-linux) don't work with that.
@@ -76,6 +79,11 @@ export PYTHON = "python3"
do_configure:prepend() {
cd ${S}
+
+ # Remove in next version.
+ # See: https://git.savannah.gnu.org/cgit/grub.git/commit/?id=b835601c7639ed1890f2d3db91900a8506011a8e
+ echo "depends bli part_gpt" > ${S}/grub-core/extra_deps.lst
+
FROM_BOOTSTRAP=1 ${S}/autogen.sh
cd ${B}
}
diff --git a/meta/recipes-bsp/grub/grub_2.06.bb b/meta/recipes-bsp/grub/grub_2.12.bb
index 05d462785c..05d462785c 100644
--- a/meta/recipes-bsp/grub/grub_2.06.bb
+++ b/meta/recipes-bsp/grub/grub_2.12.bb
diff --git a/meta/recipes-bsp/keymaps/keymaps_1.0.bb b/meta/recipes-bsp/keymaps/keymaps_1.0.bb
index 84d09cb965..3d5d127820 100644
--- a/meta/recipes-bsp/keymaps/keymaps_1.0.bb
+++ b/meta/recipes-bsp/keymaps/keymaps_1.0.bb
@@ -7,7 +7,6 @@ RDEPENDS:${PN} = "kbd"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://keymap.sh;beginline=5;endline=5;md5=829e563511c9a1d6d41f17a7a4989d6a"
PACKAGE_ARCH = "${MACHINE_ARCH}"
-PR = "r31"
INHIBIT_DEFAULT_DEPS = "1"
diff --git a/meta/recipes-bsp/libacpi/libacpi_0.2.bb b/meta/recipes-bsp/libacpi/libacpi_0.2.bb
index 6d4be76bab..6aae34b310 100644
--- a/meta/recipes-bsp/libacpi/libacpi_0.2.bb
+++ b/meta/recipes-bsp/libacpi/libacpi_0.2.bb
@@ -5,7 +5,6 @@ SECTION = "base"
HOMEPAGE = "http://www.ngolde.de/libacpi.html"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENSE;md5=fec17f82f16630adf2dfb7d2a46f21c5"
-PR = "r6"
SRC_URI = "http://www.ngolde.de/download/libacpi-${PV}.tar.gz \
file://makefile-fix.patch \
diff --git a/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb b/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb
index a34fb4eb03..63edcbd864 100644
--- a/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb
+++ b/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb
@@ -10,7 +10,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
file://src/lrz.c;beginline=1;endline=10;md5=5276956373ff7d8758837f6399a1045f"
SECTION = "console/network"
DEPENDS = ""
-PR = "r6"
SRC_URI = "http://www.ohse.de/uwe/releases/lrzsz-${PV}.tar.gz \
file://autotools-update.patch \
diff --git a/meta/recipes-bsp/opensbi/opensbi-payloads.inc b/meta/recipes-bsp/opensbi/opensbi-payloads.inc
index e590a27573..a55679632e 100644
--- a/meta/recipes-bsp/opensbi/opensbi-payloads.inc
+++ b/meta/recipes-bsp/opensbi/opensbi-payloads.inc
@@ -1,13 +1,15 @@
def riscv_get_extra_oemake_image(d):
sbi_payload = d.getVar('RISCV_SBI_PAYLOAD')
- deploy_dir = d.getVar('DEPLOY_DIR_IMAGE')
-
if sbi_payload is None:
return ""
+ deploy_dir = d.getVar('DEPLOY_DIR_IMAGE')
+
return "FW_PAYLOAD_PATH=" + deploy_dir + "/" + sbi_payload
def riscv_get_extra_oemake_fdt(d):
+ if d.getVar('RISCV_SBI_PAYLOAD') is None:
+ return ""
sbi_fdt = d.getVar('RISCV_SBI_FDT')
deploy_dir = d.getVar('DEPLOY_DIR_IMAGE')
@@ -18,11 +20,11 @@ def riscv_get_extra_oemake_fdt(d):
def riscv_get_do_compile_depends(d):
sbi_payload = d.getVar('RISCV_SBI_PAYLOAD') or ""
- sbi_fdt = d.getVar('RISCV_SBI_FDT') or ""
-
- if sbi_payload == "" and sbi_fdt == "":
+ if sbi_payload == "":
return ""
+ sbi_fdt = d.getVar('RISCV_SBI_FDT') or ""
+
if sbi_fdt != "" and 'u-boot.bin' in sbi_payload:
return "virtual/kernel:do_deploy virtual/bootloader:do_deploy"
diff --git a/meta/recipes-bsp/opensbi/opensbi_1.0.bb b/meta/recipes-bsp/opensbi/opensbi_1.0.bb
deleted file mode 100644
index 8430f62543..0000000000
--- a/meta/recipes-bsp/opensbi/opensbi_1.0.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "RISC-V Open Source Supervisor Binary Interface (OpenSBI)"
-DESCRIPTION = "OpenSBI aims to provide an open-source and extensible implementation of the RISC-V SBI specification for a platform specific firmware (M-mode) and a general purpose OS, hypervisor or bootloader (S-mode or HS-mode). OpenSBI implementation can be easily extended by RISC-V platform or System-on-Chip vendors to fit a particular hadware configuration."
-HOMEPAGE = "https://github.com/riscv/opensbi"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING.BSD;md5=42dd9555eb177f35150cf9aa240b61e5"
-
-require opensbi-payloads.inc
-
-inherit autotools-brokensep deploy
-
-SRCREV = "ce4c0188d96b2c20c2e08d24646a5e517fe15a4b"
-SRC_URI = "git://github.com/riscv/opensbi.git;branch=master;protocol=https \
- "
-
-S = "${WORKDIR}/git"
-
-EXTRA_OEMAKE += "PLATFORM=${RISCV_SBI_PLAT} I=${D} FW_PIC=n CLANG_TARGET= "
-# If RISCV_SBI_PAYLOAD is set then include it as a payload
-EXTRA_OEMAKE:append = " ${@riscv_get_extra_oemake_image(d)}"
-EXTRA_OEMAKE:append = " ${@riscv_get_extra_oemake_fdt(d)}"
-
-# Required if specifying a custom payload
-do_compile[depends] += "${@riscv_get_do_compile_depends(d)}"
-
-do_install:append() {
- # In the future these might be required as a dependency for other packages.
- # At the moment just delete them to avoid warnings
- rm -r ${D}/include
- rm -r ${D}/lib*
- rm -r ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/payloads
-}
-
-do_deploy () {
- install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_payload.* ${DEPLOYDIR}/
- install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_jump.* ${DEPLOYDIR}/
- install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_dynamic.* ${DEPLOYDIR}/
-}
-
-addtask deploy before do_build after do_install
-
-FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_jump.*"
-FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_payload.*"
-FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_dynamic.*"
-
-COMPATIBLE_HOST = "(riscv64|riscv32).*"
-INHIBIT_PACKAGE_STRIP = "1"
-
-SECURITY_CFLAGS = ""
diff --git a/meta/recipes-bsp/opensbi/opensbi_1.4.bb b/meta/recipes-bsp/opensbi/opensbi_1.4.bb
new file mode 100644
index 0000000000..cf37a41176
--- /dev/null
+++ b/meta/recipes-bsp/opensbi/opensbi_1.4.bb
@@ -0,0 +1,47 @@
+SUMMARY = "RISC-V Open Source Supervisor Binary Interface (OpenSBI)"
+DESCRIPTION = "OpenSBI aims to provide an open-source and extensible implementation of the RISC-V SBI specification for a platform specific firmware (M-mode) and a general purpose OS, hypervisor or bootloader (S-mode or HS-mode). OpenSBI implementation can be easily extended by RISC-V platform or System-on-Chip vendors to fit a particular hadware configuration."
+HOMEPAGE = "https://github.com/riscv/opensbi"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING.BSD;md5=42dd9555eb177f35150cf9aa240b61e5"
+
+require opensbi-payloads.inc
+
+inherit autotools-brokensep deploy
+
+SRCREV = "a2b255b88918715173942f2c5e1f97ac9e90c877"
+SRC_URI = "git://github.com/riscv/opensbi.git;branch=master;protocol=https"
+
+S = "${WORKDIR}/git"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
+EXTRA_OEMAKE += "PLATFORM=${RISCV_SBI_PLAT} I=${D} FW_PIC=y CLANG_TARGET= "
+# If RISCV_SBI_PAYLOAD is set then include it as a payload
+EXTRA_OEMAKE:append = " ${@riscv_get_extra_oemake_image(d)}"
+EXTRA_OEMAKE:append = " ${@riscv_get_extra_oemake_fdt(d)}"
+
+# Required if specifying a custom payload
+do_compile[depends] += "${@riscv_get_do_compile_depends(d)}"
+
+do_install:append() {
+ # In the future these might be required as a dependency for other packages.
+ # At the moment just delete them to avoid warnings
+ rm -r ${D}/include
+ rm -r ${D}/lib*
+ rm -r ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/payloads
+}
+
+do_deploy () {
+ install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_payload.* ${DEPLOYDIR}/
+ install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_jump.* ${DEPLOYDIR}/
+ install -m 755 ${D}/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_dynamic.* ${DEPLOYDIR}/
+}
+
+addtask deploy before do_build after do_install
+
+FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_jump.*"
+FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_payload.*"
+FILES:${PN} += "/share/opensbi/*/${RISCV_SBI_PLAT}/firmware/fw_dynamic.*"
+
+COMPATIBLE_HOST = "(riscv64|riscv32).*"
+INHIBIT_PACKAGE_STRIP = "1"
diff --git a/meta/recipes-bsp/pciutils/pciutils/configure.patch b/meta/recipes-bsp/pciutils/pciutils/configure.patch
index cf55b94808..5015cf4884 100644
--- a/meta/recipes-bsp/pciutils/pciutils/configure.patch
+++ b/meta/recipes-bsp/pciutils/pciutils/configure.patch
@@ -1,4 +1,4 @@
-From 502c01e180d9085fcbeaf2fb46239999c4f335d2 Mon Sep 17 00:00:00 2001
+From 561216c8cbc280aaa9aecf30cb11835a4a0a78ed Mon Sep 17 00:00:00 2001
From: Richard Purdie <rpurdie@linux.intel.com>
Date: Wed, 31 Dec 2008 17:20:38 +0000
Subject: [PATCH] pciutils: Upgarde 2.2.4 -> 3.0.3
@@ -17,27 +17,26 @@ Ionut Radu <ionutx.radu@intel.com>
2017/6/15 - RP - Cleanups and merging patches
Upstream-Status: Inappropriate [embedded specific]
-
---
Makefile | 2 +-
lib/configure | 14 ++++++++++----
2 files changed, 11 insertions(+), 5 deletions(-)
diff --git a/Makefile b/Makefile
-index 9319bb4..78a2d54 100644
+index aaec04e..9c1dab0 100644
--- a/Makefile
+++ b/Makefile
-@@ -108,7 +108,7 @@ example$(EXEEXT): example.o lib/$(PCILIB)
- example.o: example.c $(PCIINC)
+@@ -123,7 +123,7 @@ pcilmr$(EXEEXT): pcilmr.o $(LMROBJS) $(COMMON) lib/$(PCIIMPLIB)
+ pcilmr.o: pcilmr.c $(LMRINC)
%$(EXEEXT): %.o
- $(CC) $(LDFLAGS) $(TARGET_ARCH) $^ $(LDLIBS) -o $@
+ $(CC) $(LDFLAGS) $(TARGET_ARCH) $^ $(LIB_LDLIBS) $(LDLIBS) -o $@
- %.8 %.7 %.5: %.man
- M=`echo $(DATE) | sed 's/-01-/-January-/;s/-02-/-February-/;s/-03-/-March-/;s/-04-/-April-/;s/-05-/-May-/;s/-06-/-June-/;s/-07-/-July-/;s/-08-/-August-/;s/-09-/-September-/;s/-10-/-October-/;s/-11-/-November-/;s/-12-/-December-/;s/\(.*\)-\(.*\)-\(.*\)/\3 \2 \1/'` ; sed <$< >$@ "s/@TODAY@/$$M/;s/@VERSION@/pciutils-$(VERSION)/;s#@IDSDIR@#$(IDSDIR)#;s#@PCI_IDS@#$(PCI_IDS)#"
+ ifdef PCI_OS_WINDOWS
+ comma := ,
diff --git a/lib/configure b/lib/configure
-index 45a416a..1afdaa6 100755
+index 3df057a..c87e71c 100755
--- a/lib/configure
+++ b/lib/configure
@@ -9,6 +9,10 @@ echo_n() {
diff --git a/meta/recipes-bsp/pciutils/pciutils_3.11.1.bb b/meta/recipes-bsp/pciutils/pciutils_3.11.1.bb
new file mode 100644
index 0000000000..044074ccc3
--- /dev/null
+++ b/meta/recipes-bsp/pciutils/pciutils_3.11.1.bb
@@ -0,0 +1,64 @@
+SUMMARY = "PCI utilities"
+DESCRIPTION = 'The PCI Utilities package contains a library for portable access \
+to PCI bus configuration space and several utilities based on this library.'
+HOMEPAGE = "http://atrey.karlin.mff.cuni.cz/~mj/pciutils.shtml"
+SECTION = "console/utils"
+
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
+# Can drop make-native when all systems have make 4.3
+# https://git.savannah.gnu.org/cgit/make.git/commit/?id=b90fabc8d6f34fb37d428dc0fb1b8b1951a9fbed
+# causes space issues in lib/libpci.pc
+DEPENDS = "zlib kmod make-native"
+
+SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \
+ file://configure.patch"
+
+SRC_URI[sha256sum] = "3f472ad864473de5ba17f765cc96ef5f33e1b730918d3adda6f945a2a9290df4"
+
+inherit multilib_header pkgconfig update-alternatives
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'hwdb', '', d)}"
+PACKAGECONFIG[hwdb] = "HWDB=yes,HWDB=no,udev"
+
+PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes STRIP= LIBDIR=${libdir}"
+
+# see configure.patch
+do_configure () {
+ (
+ cd lib && \
+ # PACKAGECONFIG_CONFARGS for this recipe could only possibly contain 'HWDB=yes/no',
+ # so we put it before ./configure
+ ${PCI_CONF_FLAG} ${PACKAGECONFIG_CONFARGS} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH}
+ )
+}
+
+export PREFIX = "${prefix}"
+export SBINDIR = "${sbindir}"
+export SHAREDIR = "${datadir}"
+export MANDIR = "${mandir}"
+
+EXTRA_OEMAKE = "-e MAKEFLAGS= ${PCI_CONF_FLAG}"
+
+ASNEEDED = ""
+
+# The configure script breaks if the HOST variable is set
+HOST[unexport] = "1"
+
+do_install () {
+ oe_runmake DESTDIR=${D} install install-lib
+
+ install -d ${D}${bindir}
+
+ oe_multilib_header pci/config.h
+}
+
+PACKAGES =+ "${PN}-ids libpci"
+FILES:${PN}-ids = "${datadir}/pci.ids*"
+FILES:libpci = "${libdir}/libpci.so.*"
+SUMMARY:${PN}-ids = "PCI utilities - device ID database"
+DESCRIPTION:${PN}-ids = "Package providing the PCI device ID database for pciutils."
+RDEPENDS:${PN} += "${PN}-ids"
+
+ALTERNATIVE:${PN} = "lspci"
+ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-bsp/pciutils/pciutils_3.8.0.bb b/meta/recipes-bsp/pciutils/pciutils_3.8.0.bb
deleted file mode 100644
index f3a67d97e4..0000000000
--- a/meta/recipes-bsp/pciutils/pciutils_3.8.0.bb
+++ /dev/null
@@ -1,61 +0,0 @@
-SUMMARY = "PCI utilities"
-DESCRIPTION = 'The PCI Utilities package contains a library for portable access \
-to PCI bus configuration space and several utilities based on this library.'
-HOMEPAGE = "http://atrey.karlin.mff.cuni.cz/~mj/pciutils.shtml"
-SECTION = "console/utils"
-
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-# Can drop make-native when all systems have make 4.3
-# https://git.savannah.gnu.org/cgit/make.git/commit/?id=b90fabc8d6f34fb37d428dc0fb1b8b1951a9fbed
-# causes space issues in lib/libpci.pc
-DEPENDS = "zlib kmod make-native"
-
-SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \
- file://configure.patch"
-
-SRC_URI[sha256sum] = "91edbd0429a84705c9ad156d4ff38ccc724d41ea54c4c5b88e38e996f8a34f05"
-
-inherit multilib_header pkgconfig
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'hwdb', '', d)}"
-PACKAGECONFIG[hwdb] = "HWDB=yes,HWDB=no,udev"
-
-PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes STRIP= LIBDIR=${libdir}"
-
-# see configure.patch
-do_configure () {
- (
- cd lib && \
- # PACKAGECONFIG_CONFARGS for this recipe could only possibly contain 'HWDB=yes/no',
- # so we put it before ./configure
- ${PCI_CONF_FLAG} ${PACKAGECONFIG_CONFARGS} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH}
- )
-}
-
-export PREFIX = "${prefix}"
-export SBINDIR = "${sbindir}"
-export SHAREDIR = "${datadir}"
-export MANDIR = "${mandir}"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS= ${PCI_CONF_FLAG}"
-
-ASNEEDED = ""
-
-# The configure script breaks if the HOST variable is set
-HOST[unexport] = "1"
-
-do_install () {
- oe_runmake DESTDIR=${D} install install-lib
-
- install -d ${D}${bindir}
-
- oe_multilib_header pci/config.h
-}
-
-PACKAGES =+ "${PN}-ids libpci"
-FILES:${PN}-ids = "${datadir}/pci.ids*"
-FILES:libpci = "${libdir}/libpci.so.*"
-SUMMARY:${PN}-ids = "PCI utilities - device ID database"
-DESCRIPTION:${PN}-ids = "Package providing the PCI device ID database for pciutils."
-RDEPENDS:${PN} += "${PN}-ids"
diff --git a/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb b/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
index c6a4bc4932..c7b95e57b0 100644
--- a/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
+++ b/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
@@ -6,7 +6,6 @@ LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://src/pm-pmu.c;beginline=1;endline=22;md5=3c1ddbc54e735fb4a0386e14c78a3147"
-PR = "r1"
SRC_URI = "http://pm-utils.freedesktop.org/releases/pm-utils-${PV}.tar.gz"
@@ -17,11 +16,14 @@ inherit pkgconfig autotools manpages
PACKAGECONFIG[manpages] = "--enable-doc, --disable-doc, libxslt-native xmlto-native"
-RDEPENDS:${PN} = "grep bash"
+RDEPENDS:${PN} = "bash"
+
+EXTRA_OECONF = "--libdir=${nonarch_libdir}"
do_configure:prepend () {
( cd ${S}; autoreconf -f -i -s )
}
-FILES:${PN} += "${libdir}/${BPN}/*"
+FILES:${PN} += "${nonarch_libdir}/${BPN}/*"
FILES:${PN}-dbg += "${datadir}/doc/pm-utils/README.debugging"
+FILES:${PN}-dev += "${nonarch_libdir}/pkgconfig/pm-utils.pc"
diff --git a/meta/recipes-bsp/setserial/setserial/0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch b/meta/recipes-bsp/setserial/setserial/0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch
new file mode 100644
index 0000000000..10c6ae881b
--- /dev/null
+++ b/meta/recipes-bsp/setserial/setserial/0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch
@@ -0,0 +1,41 @@
+From 9bbb342f5d9ad5dc75486fd35ada8e287ba19299 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 15 Aug 2022 13:03:17 -0700
+Subject: [PATCH] setserial.c: Add needed system headers for ioctl() and
+ close() calls
+
+Add int return type for main() function
+
+Fixes
+error: type specifier missing, defaults to 'int'; ISO C99 and later do not support implicit int [-Wimplicit-int]
+error: call to undeclared function 'close'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declarat
+ion]
+
+Upstream-Status: Submitted [https://sourceforge.net/p/setserial/discussion/7060/thread/95d874c12c/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ setserial.c | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/setserial.c b/setserial.c
+index bfda8fd..6a95513 100644
+--- a/setserial.c
++++ b/setserial.c
+@@ -16,6 +16,8 @@
+ #include <termios.h>
+ #include <string.h>
+ #include <errno.h>
++#include <unistd.h>
++#include <sys/ioctl.h>
+
+ #ifdef HAVE_ASM_IOCTLS_H
+ #include <asm/ioctls.h>
+@@ -715,7 +717,7 @@ fprintf(stderr, "\t* port\t\tset the I/O port\n");
+ exit(1);
+ }
+
+-main(int argc, char **argv)
++int main(int argc, char **argv)
+ {
+ int get_flag = 0, wild_intr_flag = 0;
+ int c;
diff --git a/meta/recipes-bsp/setserial/setserial_2.17.bb b/meta/recipes-bsp/setserial/setserial_2.17.bb
index 5b31cd183b..98ab45145f 100644
--- a/meta/recipes-bsp/setserial/setserial_2.17.bb
+++ b/meta/recipes-bsp/setserial/setserial_2.17.bb
@@ -1,12 +1,10 @@
SUMMARY = "Controls the configuration of serial ports"
DESCRIPTION = "setserial is a program designed to set and/or report the configuration information associated with a serial port"
HOMEPAGE = "http://setserial.sourceforge.net"
-AUTHOR = "Theodore Ts'o <tytso@mit.edu>"
SECTION = "console/utils"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://version.h;beginline=1;endline=6;md5=2e7c59cb9e57e356ae81f50f4e4dfd99"
-PR = "r3"
DEPENDS += "groff-native"
@@ -15,7 +13,8 @@ inherit autotools-brokensep
SRC_URI = "${SOURCEFORGE_MIRROR}/setserial/${BPN}-${PV}.tar.gz \
file://add_stdlib.patch \
file://ldflags.patch \
- "
+ file://0001-setserial.c-Add-needed-system-headers-for-ioctl-and-.patch \
+ "
SRC_URI[md5sum] = "c4867d72c41564318e0107745eb7a0f2"
SRC_URI[sha256sum] = "7e4487d320ac31558563424189435d396ddf77953bb23111a17a3d1487b5794a"
diff --git a/meta/recipes-bsp/u-boot/files/0001-riscv-fix-build-with-binutils-2.38.patch b/meta/recipes-bsp/u-boot/files/0001-riscv-fix-build-with-binutils-2.38.patch
deleted file mode 100644
index 3598329b99..0000000000
--- a/meta/recipes-bsp/u-boot/files/0001-riscv-fix-build-with-binutils-2.38.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From 26a7f6b1e4c5f715c03e59a623f0d620498b92cf Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 13 Feb 2022 21:11:31 -0800
-Subject: [PATCH] riscv: fix build with binutils 2.38
-
-From version 2.38, binutils default to ISA spec version 20191213. This
-means that the csr read/write (csrr*/csrw*) instructions and fence.i
-instruction has separated from the `I` extension, become two standalone
-extensions: Zicsr and Zifencei.
-
-The fix is to specify those extensions explicitely in -march. However as
-older binutils version do not support this, we first need to detect
-that.
-
-Fixes
-arch/riscv/lib/cache.c: Assembler messages:
-arch/riscv/lib/cache.c:12: Error: unrecognized opcode `fence.i'
-
-Upstream-Status: Submitted []
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- arch/riscv/Makefile | 7 ++++++-
- 1 file changed, 6 insertions(+), 1 deletion(-)
-
---- a/arch/riscv/Makefile
-+++ b/arch/riscv/Makefile
-@@ -28,7 +28,12 @@ ifeq ($(CONFIG_CMODEL_MEDANY),y)
- CMODEL = medany
- endif
-
--ARCH_FLAGS = -march=$(ARCH_BASE)$(ARCH_A)$(ARCH_F)$(ARCH_D)$(ARCH_C) -mabi=$(ABI) \
-+# Newer binutils versions default to ISA spec version 20191213 which moves some
-+# instructions from the I extension to the Zicsr and Zifencei extensions.
-+toolchain-need-zicsr-zifencei := $(call cc-option-yn, -march=$(ARCH_BASE)$(ARCH_A)$(ARCH_F)$(ARCH_D)$(ARCH_C)_zicsr_zifencei)
-+zicsr_zifencei-$(toolchain-need-zicsr-zifencei) := _zicsr_zifencei
-+
-+ARCH_FLAGS = -march=$(ARCH_BASE)$(ARCH_A)$(ARCH_F)$(ARCH_D)$(ARCH_C)$(zicsr_zifencei-y) -mabi=$(ABI) \
- -mcmodel=$(CMODEL)
-
- PLATFORM_CPPFLAGS += $(ARCH_FLAGS)
diff --git a/meta/recipes-bsp/u-boot/files/0001-riscv32-Use-double-float-ABI-for-rv32.patch b/meta/recipes-bsp/u-boot/files/0001-riscv32-Use-double-float-ABI-for-rv32.patch
deleted file mode 100644
index 0bf1bef2c9..0000000000
--- a/meta/recipes-bsp/u-boot/files/0001-riscv32-Use-double-float-ABI-for-rv32.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 66dfe0fa886f6289add06d1af8642ce2b5302852 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 9 Feb 2021 16:40:12 -0800
-Subject: [PATCH] riscv32: Use double-float ABI for rv32
-
-So it can use libgcc built with OE toolchain
-Fixes
-error: "can't link hard-float modules with soft-float modules"
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Inappropriate [embedded specific]
----
- arch/riscv/Makefile | 6 ++++--
- 1 file changed, 4 insertions(+), 2 deletions(-)
-
---- a/arch/riscv/Makefile
-+++ b/arch/riscv/Makefile
-@@ -5,11 +5,15 @@
-
- ifeq ($(CONFIG_ARCH_RV64I),y)
- ARCH_BASE = rv64im
-- ABI = lp64
-+ ABI = lp64d
-+ ARCH_D = d
-+ ARCH_F = f
- endif
- ifeq ($(CONFIG_ARCH_RV32I),y)
- ARCH_BASE = rv32im
-- ABI = ilp32
-+ ABI = ilp32d
-+ ARCH_D = d
-+ ARCH_F = f
- endif
- ifeq ($(CONFIG_RISCV_ISA_A),y)
- ARCH_A = a
-@@ -24,7 +28,7 @@ ifeq ($(CONFIG_CMODEL_MEDANY),y)
- CMODEL = medany
- endif
-
--ARCH_FLAGS = -march=$(ARCH_BASE)$(ARCH_A)$(ARCH_C) -mabi=$(ABI) \
-+ARCH_FLAGS = -march=$(ARCH_BASE)$(ARCH_A)$(ARCH_F)$(ARCH_D)$(ARCH_C) -mabi=$(ABI) \
- -mcmodel=$(CMODEL)
-
- PLATFORM_CPPFLAGS += $(ARCH_FLAGS)
diff --git a/meta/recipes-bsp/u-boot/libubootenv_0.3.2.bb b/meta/recipes-bsp/u-boot/libubootenv_0.3.2.bb
deleted file mode 100644
index e8f58941cf..0000000000
--- a/meta/recipes-bsp/u-boot/libubootenv_0.3.2.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "U-Boot libraries and tools to access environment"
-
-DESCRIPTION = "This package contains tools and libraries to read \
-and modify U-Boot environment. \
-It provides a hardware-independent replacement for fw_printenv/setenv utilities \
-provided by U-Boot"
-
-HOMEPAGE = "https://github.com/sbabic/libubootenv"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://Licenses/lgpl-2.1.txt;md5=4fbd65380cdd255951079008b364516c"
-SECTION = "libs"
-
-SRC_URI = "git://github.com/sbabic/libubootenv;protocol=https;branch=master"
-SRCREV = "ba7564f5006d09bec51058cf4f5ac90d4dc18b3c"
-
-S = "${WORKDIR}/git"
-
-inherit cmake lib_package
-
-EXTRA_OECMAKE = "-DCMAKE_BUILD_TYPE=Release"
-
-DEPENDS = "zlib"
-PROVIDES += "u-boot-fw-utils"
-RPROVIDES:${PN}-bin += "u-boot-fw-utils"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-bsp/u-boot/libubootenv_0.3.5.bb b/meta/recipes-bsp/u-boot/libubootenv_0.3.5.bb
new file mode 100644
index 0000000000..b68bbb430a
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/libubootenv_0.3.5.bb
@@ -0,0 +1,26 @@
+SUMMARY = "U-Boot libraries and tools to access environment"
+
+DESCRIPTION = "This package contains tools and libraries to read \
+and modify U-Boot environment. \
+It provides a hardware-independent replacement for fw_printenv/setenv utilities \
+provided by U-Boot"
+
+HOMEPAGE = "https://github.com/sbabic/libubootenv"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://LICENSES/LGPL-2.1-or-later.txt;md5=4fbd65380cdd255951079008b364516c"
+SECTION = "libs"
+
+SRC_URI = "git://github.com/sbabic/libubootenv;protocol=https;branch=master"
+SRCREV = "3f4d15e36ceb58085b08dd13f3f2788e9299877b"
+
+S = "${WORKDIR}/git"
+
+inherit cmake lib_package
+
+EXTRA_OECMAKE = "-DCMAKE_BUILD_TYPE=Release"
+
+DEPENDS = "zlib libyaml"
+PROVIDES += "u-boot-fw-utils"
+RPROVIDES:${PN}-bin += "u-boot-fw-utils"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-bsp/u-boot/u-boot-common.inc b/meta/recipes-bsp/u-boot/u-boot-common.inc
index 8fc33608d0..ca5357392a 100644
--- a/meta/recipes-bsp/u-boot/u-boot-common.inc
+++ b/meta/recipes-bsp/u-boot/u-boot-common.inc
@@ -4,15 +4,15 @@ ARM, MIPS and several other processors, which can be installed in a boot \
ROM and used to initialize and test the hardware or to download and run \
application code."
SECTION = "bootloaders"
-DEPENDS += "flex-native bison-native"
+DEPENDS += "flex-native bison-native python3-setuptools-native"
LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://Licenses/README;md5=5a7450c57ffe5ae63fd732446b988025"
+LIC_FILES_CHKSUM = "file://Licenses/README;md5=2ca5f2c35c8cc335f0a19756634782f1"
PE = "1"
# We use the revision in order to avoid having to fetch it from the
# repo during parse
-SRCREV = "e4b6ebd3de982ae7185dbf689a030e73fd06e0d2"
+SRCREV = "25049ad560826f7dc1c4740883b0016014a59789"
SRC_URI = "git://source.denx.de/u-boot/u-boot.git;protocol=https;branch=master"
diff --git a/meta/recipes-bsp/u-boot/u-boot-configure.inc b/meta/recipes-bsp/u-boot/u-boot-configure.inc
index 04e0894752..378d675364 100644
--- a/meta/recipes-bsp/u-boot/u-boot-configure.inc
+++ b/meta/recipes-bsp/u-boot/u-boot-configure.inc
@@ -8,6 +8,8 @@ inherit uboot-config cml1
DEPENDS += "kern-tools-native"
+CONFIGURE_FILES = "${@d.getVar('UBOOT_MACHINE') or '.config'}"
+
do_configure () {
if [ -n "${UBOOT_CONFIG}" ]; then
unset i j
@@ -26,7 +28,6 @@ do_configure () {
unset j
done
unset i
- DEVTOOL_DISABLE_MENUCONFIG=true
else
if [ -n "${UBOOT_MACHINE}" ]; then
oe_runmake -C ${S} O=${B} ${UBOOT_MACHINE}
diff --git a/meta/recipes-bsp/u-boot/u-boot-tools.inc b/meta/recipes-bsp/u-boot/u-boot-tools.inc
index a8d7fab46d..f3010763c0 100644
--- a/meta/recipes-bsp/u-boot/u-boot-tools.inc
+++ b/meta/recipes-bsp/u-boot/u-boot-tools.inc
@@ -1,10 +1,13 @@
SUMMARY = "U-Boot bootloader tools"
-DEPENDS += "gnutls openssl util-linux"
+DEPENDS += "gnutls openssl util-linux swig-native"
-PROVIDES = "${MLPREFIX}u-boot-mkimage ${MLPREFIX}u-boot-mkenvimage"
-PROVIDES:class-native = "u-boot-mkimage-native u-boot-mkenvimage-native"
+inherit python3native
+export STAGING_INCDIR="${STAGING_INCDIR_NATIVE}"
-PACKAGES += "${PN}-mkimage ${PN}-mkenvimage"
+PROVIDES = "${MLPREFIX}u-boot-mkimage ${MLPREFIX}u-boot-mkenvimage ${MLPREFIX}u-boot-mkeficapsule"
+PROVIDES:class-native = "u-boot-mkimage-native u-boot-mkenvimage-native u-boot-mkeficapsule-native"
+
+PACKAGES += "${PN}-mkimage ${PN}-mkenvimage ${PN}-mkeficapsule"
# Required for backward compatibility with "u-boot-mkimage-xxx.bb"
RPROVIDES:${PN}-mkimage = "u-boot-mkimage"
@@ -21,6 +24,7 @@ SED_CONFIG_EFI:x86-64 = ''
SED_CONFIG_EFI:arm = ''
SED_CONFIG_EFI:armeb = ''
SED_CONFIG_EFI:aarch64 = ''
+SED_CONFIG_EFI:loongarch64 = ''
do_compile () {
# Yes, this is crazy. If you build on a system with git < 2.14 from scratch, the tree will
@@ -66,15 +70,20 @@ do_install () {
# fit_check_sign
install -m 0755 tools/fit_check_sign ${D}${bindir}/uboot-fit_check_sign
ln -sf uboot-fit_check_sign ${D}${bindir}/fit_check_sign
+
+ # mkeficapsule
+ install -m 0755 tools/mkeficapsule ${D}${bindir}/uboot-mkeficapsule
+ ln -sf uboot-mkeficapsule ${D}${bindir}/mkeficapsule
}
ALLOW_EMPTY:${PN} = "1"
FILES:${PN} = ""
FILES:${PN}-mkimage = "${bindir}/uboot-mkimage ${bindir}/mkimage ${bindir}/uboot-dumpimage ${bindir}/dumpimage ${bindir}/uboot-fit_check_sign ${bindir}/fit_check_sign"
FILES:${PN}-mkenvimage = "${bindir}/uboot-mkenvimage ${bindir}/mkenvimage"
+FILES:${PN}-mkeficapsule = "${bindir}/uboot-mkeficapsule ${bindir}/mkeficapsule"
RDEPENDS:${PN}-mkimage += "dtc"
-RDEPENDS:${PN} += "${PN}-mkimage ${PN}-mkenvimage"
+RDEPENDS:${PN} += "${PN}-mkimage ${PN}-mkenvimage ${PN}-mkeficapsule"
RDEPENDS:${PN}:class-native = ""
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-bsp/u-boot/u-boot-tools_2022.04.bb b/meta/recipes-bsp/u-boot/u-boot-tools_2024.04.bb
index 7eaf721ca8..7eaf721ca8 100644
--- a/meta/recipes-bsp/u-boot/u-boot-tools_2022.04.bb
+++ b/meta/recipes-bsp/u-boot/u-boot-tools_2024.04.bb
diff --git a/meta/recipes-bsp/u-boot/u-boot.inc b/meta/recipes-bsp/u-boot/u-boot.inc
index f022aed732..f5b43f6e36 100644
--- a/meta/recipes-bsp/u-boot/u-boot.inc
+++ b/meta/recipes-bsp/u-boot/u-boot.inc
@@ -5,11 +5,11 @@ PACKAGE_ARCH = "${MACHINE_ARCH}"
DEPENDS += "${@bb.utils.contains('UBOOT_ENV_SUFFIX', 'scr', 'u-boot-mkimage-native', '', d)}"
-inherit uboot-config uboot-extlinux-config uboot-sign deploy cml1 python3native kernel-arch
+inherit uboot-config uboot-extlinux-config uboot-sign deploy python3native kernel-arch
DEPENDS += "swig-native"
-EXTRA_OEMAKE = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${TARGET_PREFIX}gcc ${TOOLCHAIN_OPTIONS}" V=1'
+EXTRA_OEMAKE = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${TARGET_PREFIX}gcc ${TOOLCHAIN_OPTIONS} ${DEBUG_PREFIX_MAP}" V=1'
EXTRA_OEMAKE += 'HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}"'
EXTRA_OEMAKE += 'STAGING_INCDIR=${STAGING_INCDIR_NATIVE} STAGING_LIBDIR=${STAGING_LIBDIR_NATIVE}'
@@ -24,8 +24,19 @@ PACKAGECONFIG[openssl] = ",,openssl-native"
# file already exists it will not be overwritten.
UBOOT_LOCALVERSION ?= ""
+# Default name of u-boot initial env, but enable individual recipes to change
+# this value.
+UBOOT_INITIAL_ENV ?= "${PN}-initial-env"
+
require u-boot-configure.inc
+do_savedefconfig() {
+ bbplain "Saving defconfig to:\n${B}/defconfig"
+ oe_runmake -C ${B} savedefconfig
+}
+do_savedefconfig[nostamp] = "1"
+addtask savedefconfig after do_configure
+UBOOT_ARCH_DIR = "${@'arm' if d.getVar('UBOOT_ARCH').startswith('arm') else d.getVar('UBOOT_ARCH')}"
do_compile () {
if [ "${@bb.utils.filter('DISTRO_FEATURES', 'ld-is-gold', d)}" ]; then
sed -i 's/$(CROSS_COMPILE)ld$/$(CROSS_COMPILE)ld.bfd/g' ${S}/config.mk
@@ -199,6 +210,7 @@ FILES:${PN}-env = " \
FILES:${PN}-extlinux = "${UBOOT_EXTLINUX_INSTALL_DIR}/${UBOOT_EXTLINUX_CONF_NAME}"
RDEPENDS:${PN} += "${@bb.utils.contains('UBOOT_EXTLINUX', '1', '${PN}-extlinux', '', d)}"
+SYSROOT_DIRS += "/boot"
FILES:${PN} = "/boot ${datadir}"
RDEPENDS:${PN} += "${PN}-env"
@@ -305,17 +317,14 @@ do_deploy () {
unset i
else
install -m 644 ${B}/${SPL_BINARY} ${DEPLOYDIR}/${SPL_IMAGE}
- rm -f ${DEPLOYDIR}/${SPL_BINARYNAME} ${DEPLOYDIR}/${SPL_SYMLINK}
ln -sf ${SPL_IMAGE} ${DEPLOYDIR}/${SPL_BINARYNAME}
ln -sf ${SPL_IMAGE} ${DEPLOYDIR}/${SPL_SYMLINK}
fi
fi
-
if [ -n "${UBOOT_ENV}" ]
then
install -m 644 ${WORKDIR}/${UBOOT_ENV_BINARY} ${DEPLOYDIR}/${UBOOT_ENV_IMAGE}
- rm -f ${DEPLOYDIR}/${UBOOT_ENV_BINARY} ${DEPLOYDIR}/${UBOOT_ENV_SYMLINK}
ln -sf ${UBOOT_ENV_IMAGE} ${DEPLOYDIR}/${UBOOT_ENV_BINARY}
ln -sf ${UBOOT_ENV_IMAGE} ${DEPLOYDIR}/${UBOOT_ENV_SYMLINK}
fi
@@ -329,7 +338,7 @@ do_deploy () {
if [ -n "${UBOOT_DTB}" ]
then
- install -m 644 ${B}/arch/${UBOOT_ARCH}/dts/${UBOOT_DTB_BINARY} ${DEPLOYDIR}/
+ install -m 644 ${B}/arch/${UBOOT_ARCH_DIR}/dts/${UBOOT_DTB_BINARY} ${DEPLOYDIR}/
fi
}
diff --git a/meta/recipes-bsp/u-boot/u-boot_2022.04.bb b/meta/recipes-bsp/u-boot/u-boot_2022.04.bb
deleted file mode 100644
index 0d2464d74b..0000000000
--- a/meta/recipes-bsp/u-boot/u-boot_2022.04.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-require u-boot-common.inc
-require u-boot.inc
-
-SRC_URI:append = " file://0001-riscv32-Use-double-float-ABI-for-rv32.patch \
- file://0001-riscv-fix-build-with-binutils-2.38.patch \
- "
-
-DEPENDS += "bc-native dtc-native python3-setuptools-native"
-
diff --git a/meta/recipes-bsp/u-boot/u-boot_2024.04.bb b/meta/recipes-bsp/u-boot/u-boot_2024.04.bb
new file mode 100644
index 0000000000..b15bcaa818
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/u-boot_2024.04.bb
@@ -0,0 +1,5 @@
+require u-boot-common.inc
+require u-boot.inc
+
+DEPENDS += "bc-native dtc-native python3-pyelftools-native"
+
diff --git a/meta/recipes-bsp/usbinit/usbinit.bb b/meta/recipes-bsp/usbinit/usbinit.bb
index ffaca4b58d..3a50b835c2 100644
--- a/meta/recipes-bsp/usbinit/usbinit.bb
+++ b/meta/recipes-bsp/usbinit/usbinit.bb
@@ -7,7 +7,6 @@ HOMEPAGE = "http://linux-sunxi.org/USB_Gadget/Ethernet"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://${WORKDIR}/COPYING.GPL;md5=751419260aa954499f7abaabaa882bbe"
-PR = "r3"
SRC_URI = "file://usb-gether \
file://COPYING.GPL"
diff --git a/meta/recipes-bsp/usbutils/usbutils/0001-usbutils.pc.in-Fix-Cflags-entry.patch b/meta/recipes-bsp/usbutils/usbutils/0001-usbutils.pc.in-Fix-Cflags-entry.patch
new file mode 100644
index 0000000000..039dd5cd72
--- /dev/null
+++ b/meta/recipes-bsp/usbutils/usbutils/0001-usbutils.pc.in-Fix-Cflags-entry.patch
@@ -0,0 +1,34 @@
+From f558919e858453a31313a3df35906de2e036940c Mon Sep 17 00:00:00 2001
+From: Fabio Estevam <festevam@denx.de>
+Date: Thu, 4 Jan 2024 03:32:11 +0100
+Subject: [PATCH] usbutils.pc.in: Fix Cflags entry
+
+When updating the usbutils version in OpenEmbedded from 015 to 017,
+the following QA error is seen:
+
+QA Issue: File /usr/lib/pkgconfig/usbutils.pc in package usbutils-dev contains reference to TMPDIR [buildpaths]
+
+As this causes reproducibility problem due to the host PC path being
+leaked, it is treated as error.
+
+Fix it by using the standard Cflags entry.
+
+Upstream-Status: Submitted [https://github.com/gregkh/usbutils/pull/184/commits/3b3e5e1ebea7060bfa118d25a91b816dfa176b31]
+Signed-off-by: Fabio Estevam <festevam@denx.de>
+---
+ usbutils.pc.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/usbutils.pc.in b/usbutils.pc.in
+index e69778c3195c..0aa0005de951 100644
+--- a/usbutils.pc.in
++++ b/usbutils.pc.in
+@@ -15,4 +15,4 @@ Requires: libusb-1.0 >= 1.0.14 libudev >= 196
+ Conflicts:
+ Libs: -L${libdir}
+ Libs.private: @LIBUSB_LIBS@ @UDEV_LIBS@
+-Cflags: @CFLAGS@ @LIBUSB_CFLAGS@ @UDEV_CFLAGS@
++Cflags: -I${includedir}
+--
+2.34.1
+
diff --git a/meta/recipes-bsp/usbutils/usbutils_014.bb b/meta/recipes-bsp/usbutils/usbutils_014.bb
deleted file mode 100644
index e728f1a190..0000000000
--- a/meta/recipes-bsp/usbutils/usbutils_014.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Host side USB console utilities"
-DESCRIPTION = "Contains the lsusb utility for inspecting the devices connected to the USB bus."
-HOMEPAGE = "http://www.linux-usb.org"
-SECTION = "base"
-
-LICENSE = "GPL-2.0-or-later & (GPL-2.0-only | GPL-3.0-only)"
-# License files went missing in 010, when 011 is released add LICENSES/* back
-LIC_FILES_CHKSUM = "file://lsusb.c;endline=1;md5=7226e442a172bcf25807246d7ef1eba1 \
- file://lsusb.py.in;beginline=2;endline=2;md5=c443ada211d701156e42ea36d41625b3 \
- "
-
-DEPENDS = "libusb1 virtual/libiconv udev"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/usb/usbutils/usbutils-${PV}.tar.gz \
- "
-SRC_URI[sha256sum] = "59398ab012888dfe0fd12e447b45f36801e9d7b71d9a865fc38e2f549afdb9d0"
-
-inherit autotools pkgconfig update-alternatives
-
-ALTERNATIVE:${PN} = "lsusb"
-ALTERNATIVE_PRIORITY = "100"
-
-# The binaries are mostly GPL-2.0-or-later apart from lsusb.py which is
-# GPL-2.0-only or GPL-3.0-only.
-LICENSE:${PN} = "GPL-2.0-or-later"
-LICENSE:${PN}-python = "GPL-2.0-only | GPL-3.0-only"
-
-RRECOMMENDS:${PN} = "udev-hwdb"
-
-PACKAGE_BEFORE_PN =+ "${PN}-python"
-FILES:${PN}-python += "${bindir}/lsusb.py"
-RDEPENDS:${PN}-python = "python3-core"
diff --git a/meta/recipes-bsp/usbutils/usbutils_017.bb b/meta/recipes-bsp/usbutils/usbutils_017.bb
new file mode 100644
index 0000000000..a2e340ea4f
--- /dev/null
+++ b/meta/recipes-bsp/usbutils/usbutils_017.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Host side USB console utilities"
+DESCRIPTION = "Contains the lsusb utility for inspecting the devices connected to the USB bus."
+HOMEPAGE = "http://www.linux-usb.org"
+SECTION = "base"
+
+LICENSE = "GPL-2.0-or-later & (GPL-2.0-only | GPL-3.0-only)"
+# License files went missing in 010, when 011 is released add LICENSES/* back
+LIC_FILES_CHKSUM = "file://lsusb.c;endline=1;md5=7226e442a172bcf25807246d7ef1eba1 \
+ file://lsusb.py.in;beginline=2;endline=2;md5=c443ada211d701156e42ea36d41625b3 \
+ "
+
+DEPENDS = "libusb1 virtual/libiconv udev"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/usb/usbutils/usbutils-${PV}.tar.gz \
+ file://0001-usbutils.pc.in-Fix-Cflags-entry.patch \
+ "
+SRC_URI[sha256sum] = "f704c4cb78a060db88b43aac6ebfd3d93c2c5cf1d6dd0e42936faaf00814ab00"
+
+inherit autotools pkgconfig update-alternatives
+
+ALTERNATIVE:${PN} = "lsusb"
+ALTERNATIVE_PRIORITY = "100"
+
+# The binaries are mostly GPL-2.0-or-later apart from lsusb.py which is
+# GPL-2.0-only or GPL-3.0-only.
+LICENSE:${PN} = "GPL-2.0-or-later"
+LICENSE:${PN}-python = "GPL-2.0-only | GPL-3.0-only"
+
+RRECOMMENDS:${PN} = "udev-hwdb"
+
+PACKAGE_BEFORE_PN =+ "${PN}-python"
+FILES:${PN}-python += "${bindir}/lsusb.py"
+RDEPENDS:${PN}-python = "python3-core"
diff --git a/meta/recipes-bsp/v86d/v86d_0.1.10.bb b/meta/recipes-bsp/v86d/v86d_0.1.10.bb
index 5f342b1120..6151f0a7e2 100644
--- a/meta/recipes-bsp/v86d/v86d_0.1.10.bb
+++ b/meta/recipes-bsp/v86d/v86d_0.1.10.bb
@@ -6,9 +6,7 @@ DESCRIPTION = "v86d provides a backend for kernel drivers that need to execute x
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://README;md5=94ac1971e4f2309dc322d598e7b1f7dd"
-DEPENDS = "virtual/kernel"
RRECOMMENDS:${PN} = "kernel-module-uvesafb"
-PR = "r2"
SRC_URI = "http://snapshot.debian.org/archive/debian/20110427T035506Z/pool/main/v/${BPN}/${BPN}_${PV}.orig.tar.gz \
file://Update-x86emu-from-X.org.patch \
diff --git a/meta/recipes-connectivity/avahi/avahi_0.8.bb b/meta/recipes-connectivity/avahi/avahi_0.8.bb
index 9bb5e5861e..1f18d4491d 100644
--- a/meta/recipes-connectivity/avahi/avahi_0.8.bb
+++ b/meta/recipes-connectivity/avahi/avahi_0.8.bb
@@ -5,9 +5,8 @@ with no specific configuration. This tool implements IPv4LL, "Dynamic Configurat
IPv4 Link-Local Addresses" (IETF RFC3927), a protocol for automatic IP address \
configuration from the link-local 169.254.0.0/16 range without the need for a central \
server.'
-AUTHOR = "Lennart Poettering <lennart@poettering.net>"
HOMEPAGE = "http://avahi.org"
-BUGTRACKER = "https://github.com/lathiat/avahi/issues"
+BUGTRACKER = "https://github.com/avahi/avahi/issues"
SECTION = "network"
# major part is under LGPL-2.1-or-later, but several .dtd, .xsl, initscripts and
@@ -19,23 +18,31 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=2d5025d4aa3495befef8f17206a5b0a1 \
file://avahi-daemon/main.c;endline=21;md5=9ee77368c5407af77caaef1b07285969 \
file://avahi-client/client.h;endline=23;md5=f4ac741a25c4f434039ba3e18c8674cf"
-SRC_URI = "https://github.com/lathiat/avahi/releases/download/v${PV}/avahi-${PV}.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/avahi-${PV}.tar.gz \
file://00avahi-autoipd \
file://99avahi-autoipd \
file://initscript.patch \
file://0001-Fix-opening-etc-resolv.conf-error.patch \
file://handle-hup.patch \
file://local-ping.patch \
+ file://invalid-service.patch \
+ file://CVE-2023-1981.patch \
+ file://CVE-2023-38469-1.patch \
+ file://CVE-2023-38469-2.patch \
+ file://CVE-2023-38470-1.patch \
+ file://CVE-2023-38470-2.patch \
+ file://CVE-2023-38471-1.patch \
+ file://CVE-2023-38471-2.patch \
+ file://CVE-2023-38472.patch \
+ file://CVE-2023-38473.patch \
"
-UPSTREAM_CHECK_URI = "https://github.com/lathiat/avahi/releases/"
-SRC_URI[md5sum] = "229c6aa30674fc43c202b22c5f8c2be7"
+GITHUB_BASE_URI = "https://github.com/avahi/avahi/releases/"
SRC_URI[sha256sum] = "060309d7a333d38d951bc27598c677af1796934dbd98e1024e7ad8de798fedda"
-# Issue only affects Debian/SUSE, not us
-CVE_CHECK_IGNORE += "CVE-2021-26720"
+CVE_STATUS[CVE-2021-26720] = "not-applicable-platform: Issue only affects Debian/SUSE"
-DEPENDS = "expat libcap libdaemon glib-2.0"
+DEPENDS = "expat libcap libdaemon glib-2.0 glib-2.0-native"
# For gtk related PACKAGECONFIGs: gtk, gtk3
AVAHI_GTK ?= ""
@@ -48,7 +55,7 @@ PACKAGECONFIG[libdns_sd] = "--enable-compat-libdns_sd --enable-dbus,,dbus"
PACKAGECONFIG[libevent] = "--enable-libevent,--disable-libevent,libevent"
PACKAGECONFIG[qt5] = "--enable-qt5,--disable-qt5,qtbase"
-inherit autotools pkgconfig gettext gobject-introspection
+inherit autotools pkgconfig gettext gobject-introspection github-releases
EXTRA_OECONF = "--with-avahi-priv-access-group=adm \
--disable-stack-protector \
@@ -83,7 +90,6 @@ RRECOMMENDS:${PN}:append:libc-glibc = " libnss-mdns"
do_install() {
autotools_do_install
rm -rf ${D}/run
- rm -rf ${D}${datadir}/dbus-1/interfaces
test -d ${D}${datadir}/dbus-1 && rmdir --ignore-fail-on-non-empty ${D}${datadir}/dbus-1
rm -rf ${D}${libdir}/avahi
@@ -135,7 +141,7 @@ FILES:avahi-daemon = "${sbindir}/avahi-daemon \
${sysconfdir}/avahi/services \
${sysconfdir}/dbus-1 \
${sysconfdir}/init.d/avahi-daemon \
- ${datadir}/avahi/introspection/*.introspect \
+ ${datadir}/dbus-1/interfaces \
${datadir}/avahi/avahi-service.dtd \
${datadir}/avahi/service-types \
${datadir}/dbus-1/system-services"
@@ -147,8 +153,8 @@ FILES:libavahi-glib = "${libdir}/libavahi-glib.so.*"
FILES:libavahi-gobject = "${libdir}/libavahi-gobject.so.* ${libdir}/girepository-1.0/Avahi*.typelib"
FILES:avahi-utils = "${bindir}/avahi-* ${bindir}/b* ${datadir}/applications/b*"
-RDEPENDS:${PN}-dev = "avahi-daemon (= ${EXTENDPKGV}) libavahi-core (= ${EXTENDPKGV})"
-RDEPENDS:${PN}-dev += "${@["", " libavahi-client (= ${EXTENDPKGV})"][bb.utils.contains('PACKAGECONFIG', 'dbus', 1, 0, d)]}"
+DEV_PKG_DEPENDENCY = "avahi-daemon (= ${EXTENDPKGV}) libavahi-core (= ${EXTENDPKGV})"
+DEV_PKG_DEPENDENCY += "${@["", " libavahi-client (= ${EXTENDPKGV})"][bb.utils.contains('PACKAGECONFIG', 'dbus', 1, 0, d)]}"
RDEPENDS:${PN}-dnsconfd = "${PN}-daemon"
RRECOMMENDS:avahi-daemon:append:libc-glibc = " libnss-mdns"
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-1981.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-1981.patch
new file mode 100644
index 0000000000..4d7924d13a
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-1981.patch
@@ -0,0 +1,58 @@
+From a2696da2f2c50ac43b6c4903f72290d5c3fa9f6f Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Petr=20Men=C5=A1=C3=ADk?= <pemensik@redhat.com>
+Date: Thu, 17 Nov 2022 01:51:53 +0100
+Subject: [PATCH] Emit error if requested service is not found
+
+It currently just crashes instead of replying with error. Check return
+value and emit error instead of passing NULL pointer to reply.
+
+Fixes #375
+
+Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/avahi/tree/debian/patches/CVE-2023-1981.patch?h=ubuntu/jammy-security
+Upstream commit https://github.com/lathiat/avahi/commit/a2696da2f2c50ac43b6c4903f72290d5c3fa9f6f]
+CVE: CVE-2023-1981
+Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
+---
+ avahi-daemon/dbus-protocol.c | 20 ++++++++++++++------
+ 1 file changed, 14 insertions(+), 6 deletions(-)
+
+diff --git a/avahi-daemon/dbus-protocol.c b/avahi-daemon/dbus-protocol.c
+index 70d7687bc..406d0b441 100644
+--- a/avahi-daemon/dbus-protocol.c
++++ b/avahi-daemon/dbus-protocol.c
+@@ -375,10 +375,14 @@ static DBusHandlerResult dbus_get_alternative_host_name(DBusConnection *c, DBusM
+ }
+
+ t = avahi_alternative_host_name(n);
+- avahi_dbus_respond_string(c, m, t);
+- avahi_free(t);
++ if (t) {
++ avahi_dbus_respond_string(c, m, t);
++ avahi_free(t);
+
+- return DBUS_HANDLER_RESULT_HANDLED;
++ return DBUS_HANDLER_RESULT_HANDLED;
++ } else {
++ return avahi_dbus_respond_error(c, m, AVAHI_ERR_NOT_FOUND, "Hostname not found");
++ }
+ }
+
+ static DBusHandlerResult dbus_get_alternative_service_name(DBusConnection *c, DBusMessage *m, DBusError *error) {
+@@ -389,10 +393,14 @@ static DBusHandlerResult dbus_get_alternative_service_name(DBusConnection *c, DB
+ }
+
+ t = avahi_alternative_service_name(n);
+- avahi_dbus_respond_string(c, m, t);
+- avahi_free(t);
++ if (t) {
++ avahi_dbus_respond_string(c, m, t);
++ avahi_free(t);
+
+- return DBUS_HANDLER_RESULT_HANDLED;
++ return DBUS_HANDLER_RESULT_HANDLED;
++ } else {
++ return avahi_dbus_respond_error(c, m, AVAHI_ERR_NOT_FOUND, "Service not found");
++ }
+ }
+
+ static DBusHandlerResult dbus_create_new_entry_group(DBusConnection *c, DBusMessage *m, DBusError *error) {
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38469-1.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38469-1.patch
new file mode 100644
index 0000000000..a078f66102
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38469-1.patch
@@ -0,0 +1,48 @@
+From 72842945085cc3adaccfdfa2853771b0e75ef991 Mon Sep 17 00:00:00 2001
+From: Evgeny Vereshchagin <evvers@ya.ru>
+Date: Mon, 23 Oct 2023 20:29:31 +0000
+Subject: [PATCH] avahi: core: reject overly long TXT resource records
+
+Closes https://github.com/lathiat/avahi/issues/455
+
+Upstream-Status: Backport [https://github.com/lathiat/avahi/commit/a337a1ba7d15853fb56deef1f464529af6e3a1cf]
+CVE: CVE-2023-38469
+
+Signed-off-by: Meenali Gupta <meenali.gupta@windriver.com>
+---
+ avahi-core/rr.c | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/avahi-core/rr.c b/avahi-core/rr.c
+index 7fa0bee..b03a24c 100644
+--- a/avahi-core/rr.c
++++ b/avahi-core/rr.c
+@@ -32,6 +32,7 @@
+ #include <avahi-common/malloc.h>
+ #include <avahi-common/defs.h>
+
++#include "dns.h"
+ #include "rr.h"
+ #include "log.h"
+ #include "util.h"
+@@ -688,11 +689,17 @@ int avahi_record_is_valid(AvahiRecord *r) {
+ case AVAHI_DNS_TYPE_TXT: {
+
+ AvahiStringList *strlst;
++ size_t used = 0;
+
+- for (strlst = r->data.txt.string_list; strlst; strlst = strlst->next)
++ for (strlst = r->data.txt.string_list; strlst; strlst = strlst->next) {
+ if (strlst->size > 255 || strlst->size <= 0)
+ return 0;
+
++ used += 1+strlst->size;
++ if (used > AVAHI_DNS_RDATA_MAX)
++ return 0;
++ }
++
+ return 1;
+ }
+ }
+--
+2.40.0
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38469-2.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38469-2.patch
new file mode 100644
index 0000000000..f8f60ddca1
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38469-2.patch
@@ -0,0 +1,65 @@
+From c6cab87df290448a63323c8ca759baa516166237 Mon Sep 17 00:00:00 2001
+From: Evgeny Vereshchagin <evvers@ya.ru>
+Date: Wed, 25 Oct 2023 18:15:42 +0000
+Subject: [PATCH] tests: pass overly long TXT resource records
+
+to make sure they don't crash avahi any more.
+It reproduces https://github.com/lathiat/avahi/issues/455
+
+Canonical notes:
+nickgalanis> removed first hunk since there is no .github dir in this release
+
+Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/avahi/tree/debian/patches/CVE-2023-38469-2.patch?h=ubuntu/jammy-security
+Upstream commit https://github.com/lathiat/avahi/commit/c6cab87df290448a63323c8ca759baa516166237]
+CVE: CVE-2023-38469
+Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
+---
+ avahi-client/client-test.c | 14 ++++++++++++++
+ 1 files changed, 14 insertions(+)
+
+Index: avahi-0.8/avahi-client/client-test.c
+===================================================================
+--- avahi-0.8.orig/avahi-client/client-test.c
++++ avahi-0.8/avahi-client/client-test.c
+@@ -22,6 +22,7 @@
+ #endif
+
+ #include <stdio.h>
++#include <string.h>
+ #include <assert.h>
+
+ #include <avahi-client/client.h>
+@@ -33,6 +34,8 @@
+ #include <avahi-common/malloc.h>
+ #include <avahi-common/timeval.h>
+
++#include <avahi-core/dns.h>
++
+ static const AvahiPoll *poll_api = NULL;
+ static AvahiSimplePoll *simple_poll = NULL;
+
+@@ -222,6 +225,9 @@ int main (AVAHI_GCC_UNUSED int argc, AVA
+ uint32_t cookie;
+ struct timeval tv;
+ AvahiAddress a;
++ uint8_t rdata[AVAHI_DNS_RDATA_MAX+1];
++ AvahiStringList *txt = NULL;
++ int r;
+
+ simple_poll = avahi_simple_poll_new();
+ poll_api = avahi_simple_poll_get(simple_poll);
+@@ -258,6 +264,14 @@ int main (AVAHI_GCC_UNUSED int argc, AVA
+ printf("%s\n", avahi_strerror(avahi_entry_group_add_service (group, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, 0, "Lathiat's Site", "_http._tcp", NULL, NULL, 80, "foo=bar", NULL)));
+ printf("add_record: %d\n", avahi_entry_group_add_record (group, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, 0, "TestX", 0x01, 0x10, 120, "\5booya", 6));
+
++ memset(rdata, 1, sizeof(rdata));
++ r = avahi_string_list_parse(rdata, sizeof(rdata), &txt);
++ assert(r >= 0);
++ assert(avahi_string_list_serialize(txt, NULL, 0) == sizeof(rdata));
++ error = avahi_entry_group_add_service_strlst(group, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, 0, "TestX", "_qotd._tcp", NULL, NULL, 123, txt);
++ assert(error == AVAHI_ERR_INVALID_RECORD);
++ avahi_string_list_free(txt);
++
+ avahi_entry_group_commit (group);
+
+ domain = avahi_domain_browser_new (avahi, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, NULL, AVAHI_DOMAIN_BROWSER_BROWSE, 0, avahi_domain_browser_callback, (char*) "omghai3u");
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38470-1.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38470-1.patch
new file mode 100644
index 0000000000..91f9e677ac
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38470-1.patch
@@ -0,0 +1,59 @@
+From af7bfad67ca53a7c4042a4a2d85456b847e9f249 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Petr=20Men=C5=A1=C3=ADk?= <pemensik@redhat.com>
+Date: Tue, 11 Apr 2023 15:29:59 +0200
+Subject: [PATCH] avahi: Ensure each label is at least one byte long
+
+The only allowed exception is single dot, where it should return empty
+string.
+
+Fixes #454.
+
+Upstream-Status: Backport [https://github.com/lathiat/avahi/commit/94cb6489114636940ac683515417990b55b5d66c]
+CVE: CVE-2023-38470
+
+Signed-off-by: Meenali Gupta <meenali.gupta@windriver.com>
+---
+ avahi-common/domain-test.c | 14 ++++++++++++++
+ avahi-common/domain.c | 2 +-
+ 2 files changed, 15 insertions(+), 1 deletion(-)
+
+diff --git a/avahi-common/domain-test.c b/avahi-common/domain-test.c
+index cf763ec..3acc1c1 100644
+--- a/avahi-common/domain-test.c
++++ b/avahi-common/domain-test.c
+@@ -45,6 +45,20 @@ int main(AVAHI_GCC_UNUSED int argc, AVAHI_GCC_UNUSED char *argv[]) {
+ printf("%s\n", s = avahi_normalize_name_strdup("fo\\\\o\\..f oo."));
+ avahi_free(s);
+
++ printf("%s\n", s = avahi_normalize_name_strdup("."));
++ avahi_free(s);
++
++ s = avahi_normalize_name_strdup(",.=.}.=.?-.}.=.?.?.}.}.?.?.?.z.?.?.}.}."
++ "}.?.?.?.r.=.=.}.=.?.}}.}.?.?.?.zM.=.=.?.?.}.}.?.?.}.}.}"
++ ".?.?.?.r.=.=.}.=.?.}}.}.?.?.?.zM.=.=.?.?.}.}.?.?.?.zM.?`"
++ "?.}.}.}.?.?.?.r.=.?.}.=.?.?.}.?.?.?.}.=.?.?.}??.}.}.?.?."
++ "?.z.?.?.}.}.}.?.?.?.r.=.=.}.=.?.}}.}.?.?.?.zM.?`?.}.}.}."
++ "??.?.zM.?`?.}.}.}.?.?.?.r.=.?.}.=.?.?.}.?.?.?.}.=.?.?.}?"
++ "?.}.}.?.?.?.z.?.?.}.}.}.?.?.?.r.=.=.}.=.?.}}.}.?.?.?.zM."
++ "?`?.}.}.}.?.?.?.r.=.=.?.?`.?.?}.}.}.?.?.?.r.=.?.}.=.?.?."
++ "}.?.?.?.}.=.?.?.}");
++ assert(s == NULL);
++
+ printf("%i\n", avahi_domain_equal("\\065aa bbb\\.\\046cc.cc\\\\.dee.fff.", "Aaa BBB\\.\\.cc.cc\\\\.dee.fff"));
+ printf("%i\n", avahi_domain_equal("A", "a"));
+
+diff --git a/avahi-common/domain.c b/avahi-common/domain.c
+index 3b1ab68..e66d241 100644
+--- a/avahi-common/domain.c
++++ b/avahi-common/domain.c
+@@ -201,7 +201,7 @@ char *avahi_normalize_name(const char *s, char *ret_s, size_t size) {
+ }
+
+ if (!empty) {
+- if (size < 1)
++ if (size < 2)
+ return NULL;
+
+ *(r++) = '.';
+--
+2.40.0
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38470-2.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38470-2.patch
new file mode 100644
index 0000000000..e0736bf210
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38470-2.patch
@@ -0,0 +1,52 @@
+From 20dec84b2480821704258bc908e7b2bd2e883b24 Mon Sep 17 00:00:00 2001
+From: Evgeny Vereshchagin <evvers@ya.ru>
+Date: Tue, 19 Sep 2023 03:21:25 +0000
+Subject: [PATCH] [common] bail out when escaped labels can't fit into ret
+
+Fixes:
+```
+==93410==ERROR: AddressSanitizer: stack-buffer-overflow on address 0x7f9e76f14c16 at pc 0x00000047208d bp 0x7ffee90a6a00 sp 0x7ffee90a61c8
+READ of size 1110 at 0x7f9e76f14c16 thread T0
+ #0 0x47208c in __interceptor_strlen (out/fuzz-domain+0x47208c) (BuildId: 731b20c1eef22c2104e75a6496a399b10cfc7cba)
+ #1 0x534eb0 in avahi_strdup avahi/avahi-common/malloc.c:167:12
+ #2 0x53862c in avahi_normalize_name_strdup avahi/avahi-common/domain.c:226:12
+```
+and
+```
+fuzz-domain: fuzz/fuzz-domain.c:38: int LLVMFuzzerTestOneInput(const uint8_t *, size_t): Assertion `avahi_domain_equal(s, t)' failed.
+==101571== ERROR: libFuzzer: deadly signal
+ #0 0x501175 in __sanitizer_print_stack_trace (/home/vagrant/avahi/out/fuzz-domain+0x501175) (BuildId: 682bf6400aff9d41b64b6e2cc3ef5ad600216ea8)
+ #1 0x45ad2c in fuzzer::PrintStackTrace() (/home/vagrant/avahi/out/fuzz-domain+0x45ad2c) (BuildId: 682bf6400aff9d41b64b6e2cc3ef5ad600216ea8)
+ #2 0x43fc07 in fuzzer::Fuzzer::CrashCallback() (/home/vagrant/avahi/out/fuzz-domain+0x43fc07) (BuildId: 682bf6400aff9d41b64b6e2cc3ef5ad600216ea8)
+ #3 0x7f1581d7ebaf (/lib64/libc.so.6+0x3dbaf) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #4 0x7f1581dcf883 in __pthread_kill_implementation (/lib64/libc.so.6+0x8e883) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #5 0x7f1581d7eafd in gsignal (/lib64/libc.so.6+0x3dafd) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #6 0x7f1581d6787e in abort (/lib64/libc.so.6+0x2687e) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #7 0x7f1581d6779a in __assert_fail_base.cold (/lib64/libc.so.6+0x2679a) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #8 0x7f1581d77186 in __assert_fail (/lib64/libc.so.6+0x36186) (BuildId: c9f62793b9e886eb1b95077d4f26fe2b4aa1ac25)
+ #9 0x5344a4 in LLVMFuzzerTestOneInput /home/vagrant/avahi/fuzz/fuzz-domain.c:38:9
+```
+
+It's a follow-up to 94cb6489114636940ac683515417990b55b5d66c
+
+Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/avahi/tree/debian/patches/CVE-2023-38470-2.patch?h=ubuntu/jammy-security
+CVE: CVE-2023-38470 #Follow-up patch
+Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
+---
+ avahi-common/domain.c | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+Index: avahi-0.8/avahi-common/domain.c
+===================================================================
+--- avahi-0.8.orig/avahi-common/domain.c
++++ avahi-0.8/avahi-common/domain.c
+@@ -210,7 +210,8 @@ char *avahi_normalize_name(const char *s
+ } else
+ empty = 0;
+
+- avahi_escape_label(label, strlen(label), &r, &size);
++ if (!(avahi_escape_label(label, strlen(label), &r, &size)))
++ return NULL;
+ }
+
+ return ret_s;
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38471-1.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38471-1.patch
new file mode 100644
index 0000000000..b3f716495d
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38471-1.patch
@@ -0,0 +1,73 @@
+From 48d745db7fd554fc33e96ec86d3675ebd530bb8e Mon Sep 17 00:00:00 2001
+From: Michal Sekletar <msekleta@redhat.com>
+Date: Mon, 23 Oct 2023 13:38:35 +0200
+Subject: [PATCH] avahi: core: extract host name using avahi_unescape_label()
+
+Previously we could create invalid escape sequence when we split the
+string on dot. For example, from valid host name "foo\\.bar" we have
+created invalid name "foo\\" and tried to set that as the host name
+which crashed the daemon.
+
+Fixes #453
+
+Upstream-Status: Backport [https://github.com/lathiat/avahi/commit/894f085f402e023a98cbb6f5a3d117bd88d93b09]
+CVE: CVE-2023-38471
+
+Signed-off-by: Meenali Gupta <meenali.gupta@windriver.com>
+---
+ avahi-core/server.c | 27 +++++++++++++++++++++------
+ 1 file changed, 21 insertions(+), 6 deletions(-)
+
+diff --git a/avahi-core/server.c b/avahi-core/server.c
+index e507750..40f1d68 100644
+--- a/avahi-core/server.c
++++ b/avahi-core/server.c
+@@ -1295,7 +1295,11 @@ static void update_fqdn(AvahiServer *s) {
+ }
+
+ int avahi_server_set_host_name(AvahiServer *s, const char *host_name) {
+- char *hn = NULL;
++ char label_escaped[AVAHI_LABEL_MAX*4+1];
++ char label[AVAHI_LABEL_MAX];
++ char *hn = NULL, *h;
++ size_t len;
++
+ assert(s);
+
+ AVAHI_CHECK_VALIDITY(s, !host_name || avahi_is_valid_host_name(host_name), AVAHI_ERR_INVALID_HOST_NAME);
+@@ -1305,17 +1309,28 @@ int avahi_server_set_host_name(AvahiServer *s, const char *host_name) {
+ else
+ hn = avahi_normalize_name_strdup(host_name);
+
+- hn[strcspn(hn, ".")] = 0;
++ h = hn;
++ if (!avahi_unescape_label((const char **)&hn, label, sizeof(label))) {
++ avahi_free(h);
++ return AVAHI_ERR_INVALID_HOST_NAME;
++ }
++
++ avahi_free(h);
++
++ h = label_escaped;
++ len = sizeof(label_escaped);
++ if (!avahi_escape_label(label, strlen(label), &h, &len))
++ return AVAHI_ERR_INVALID_HOST_NAME;
+
+- if (avahi_domain_equal(s->host_name, hn) && s->state != AVAHI_SERVER_COLLISION) {
+- avahi_free(hn);
++ if (avahi_domain_equal(s->host_name, label_escaped) && s->state != AVAHI_SERVER_COLLISION)
+ return avahi_server_set_errno(s, AVAHI_ERR_NO_CHANGE);
+- }
+
+ withdraw_host_rrs(s);
+
+ avahi_free(s->host_name);
+- s->host_name = hn;
++ s->host_name = avahi_strdup(label_escaped);
++ if (!s->host_name)
++ return AVAHI_ERR_NO_MEMORY;
+
+ update_fqdn(s);
+
+--
+2.40.0
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38471-2.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38471-2.patch
new file mode 100644
index 0000000000..44737bfc2e
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38471-2.patch
@@ -0,0 +1,52 @@
+From b675f70739f404342f7f78635d6e2dcd85a13460 Mon Sep 17 00:00:00 2001
+From: Evgeny Vereshchagin <evvers@ya.ru>
+Date: Tue, 24 Oct 2023 22:04:51 +0000
+Subject: [PATCH] core: return errors from avahi_server_set_host_name properly
+
+It's a follow-up to 894f085f402e023a98cbb6f5a3d117bd88d93b09
+
+Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/avahi/tree/debian/patches/CVE-2023-38471-2.patch?h=ubuntu/jammy-security
+Upstream commit https://github.com/lathiat/avahi/commit/b675f70739f404342f7f78635d6e2dcd85a13460]
+CVE: CVE-2023-38471 #Follow-up Patch
+Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
+---
+ avahi-core/server.c | 9 ++++++---
+ 1 file changed, 6 insertions(+), 3 deletions(-)
+
+Index: avahi-0.8/avahi-core/server.c
+===================================================================
+--- avahi-0.8.orig/avahi-core/server.c
++++ avahi-0.8/avahi-core/server.c
+@@ -1309,10 +1309,13 @@ int avahi_server_set_host_name(AvahiServ
+ else
+ hn = avahi_normalize_name_strdup(host_name);
+
++ if (!hn)
++ return avahi_server_set_errno(s, AVAHI_ERR_NO_MEMORY);
++
+ h = hn;
+ if (!avahi_unescape_label((const char **)&hn, label, sizeof(label))) {
+ avahi_free(h);
+- return AVAHI_ERR_INVALID_HOST_NAME;
++ return avahi_server_set_errno(s, AVAHI_ERR_INVALID_HOST_NAME);
+ }
+
+ avahi_free(h);
+@@ -1320,7 +1323,7 @@ int avahi_server_set_host_name(AvahiServ
+ h = label_escaped;
+ len = sizeof(label_escaped);
+ if (!avahi_escape_label(label, strlen(label), &h, &len))
+- return AVAHI_ERR_INVALID_HOST_NAME;
++ return avahi_server_set_errno(s, AVAHI_ERR_INVALID_HOST_NAME);
+
+ if (avahi_domain_equal(s->host_name, label_escaped) && s->state != AVAHI_SERVER_COLLISION)
+ return avahi_server_set_errno(s, AVAHI_ERR_NO_CHANGE);
+@@ -1330,7 +1333,7 @@ int avahi_server_set_host_name(AvahiServ
+ avahi_free(s->host_name);
+ s->host_name = avahi_strdup(label_escaped);
+ if (!s->host_name)
+- return AVAHI_ERR_NO_MEMORY;
++ return avahi_server_set_errno(s, AVAHI_ERR_NO_MEMORY);
+
+ update_fqdn(s);
+
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38472.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38472.patch
new file mode 100644
index 0000000000..85dbded73b
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38472.patch
@@ -0,0 +1,46 @@
+From b024ae5749f4aeba03478e6391687c3c9c8dee40 Mon Sep 17 00:00:00 2001
+From: Michal Sekletar <msekleta@redhat.com>
+Date: Thu, 19 Oct 2023 17:36:44 +0200
+Subject: [PATCH] core: make sure there is rdata to process before parsing it
+
+Fixes #452
+
+CVE-2023-38472
+
+Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/avahi/tree/debian/patches/CVE-2023-38472.patch?h=ubuntu/jammy-security
+Upstream commit https://github.com/lathiat/avahi/commit/b024ae5749f4aeba03478e6391687c3c9c8dee40]
+CVE: CVE-2023-38472
+Signed-off-by: Meenali Gupta <meenali.gupta@windriver.com>
+Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
+---
+ avahi-client/client-test.c | 3 +++
+ avahi-daemon/dbus-entry-group.c | 2 +-
+ 2 files changed, 4 insertions(+), 1 deletion(-)
+
+Index: avahi-0.8/avahi-client/client-test.c
+===================================================================
+--- avahi-0.8.orig/avahi-client/client-test.c
++++ avahi-0.8/avahi-client/client-test.c
+@@ -272,6 +272,9 @@ int main (AVAHI_GCC_UNUSED int argc, AVA
+ assert(error == AVAHI_ERR_INVALID_RECORD);
+ avahi_string_list_free(txt);
+
++ error = avahi_entry_group_add_record (group, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, 0, "TestX", 0x01, 0x10, 120, "", 0);
++ assert(error != AVAHI_OK);
++
+ avahi_entry_group_commit (group);
+
+ domain = avahi_domain_browser_new (avahi, AVAHI_IF_UNSPEC, AVAHI_PROTO_UNSPEC, NULL, AVAHI_DOMAIN_BROWSER_BROWSE, 0, avahi_domain_browser_callback, (char*) "omghai3u");
+Index: avahi-0.8/avahi-daemon/dbus-entry-group.c
+===================================================================
+--- avahi-0.8.orig/avahi-daemon/dbus-entry-group.c
++++ avahi-0.8/avahi-daemon/dbus-entry-group.c
+@@ -340,7 +340,7 @@ DBusHandlerResult avahi_dbus_msg_entry_g
+ if (!(r = avahi_record_new_full (name, clazz, type, ttl)))
+ return avahi_dbus_respond_error(c, m, AVAHI_ERR_NO_MEMORY, NULL);
+
+- if (avahi_rdata_parse (r, rdata, size) < 0) {
++ if (!rdata || avahi_rdata_parse (r, rdata, size) < 0) {
+ avahi_record_unref (r);
+ return avahi_dbus_respond_error(c, m, AVAHI_ERR_INVALID_RDATA, NULL);
+ }
diff --git a/meta/recipes-connectivity/avahi/files/CVE-2023-38473.patch b/meta/recipes-connectivity/avahi/files/CVE-2023-38473.patch
new file mode 100644
index 0000000000..707acb60fe
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/CVE-2023-38473.patch
@@ -0,0 +1,110 @@
+From 88cbbc48d5efff9726694557ca6c3f698f3affe4 Mon Sep 17 00:00:00 2001
+From: Michal Sekletar <msekleta@redhat.com>
+Date: Wed, 11 Oct 2023 17:45:44 +0200
+Subject: [PATCH] avahi: common: derive alternative host name from its
+ unescaped version
+
+Normalization of input makes sure we don't have to deal with special
+cases like unescaped dot at the end of label.
+
+Fixes #451 #487
+
+Upstream-Status: Backport [https://github.com/lathiat/avahi/commit/b448c9f771bada14ae8de175695a9729f8646797]
+CVE: CVE-2023-38473
+
+Signed-off-by: Meenali Gupta <meenali.gupta@windriver.com>
+---
+ avahi-common/alternative-test.c | 3 +++
+ avahi-common/alternative.c | 27 +++++++++++++++++++--------
+ 2 files changed, 22 insertions(+), 8 deletions(-)
+
+diff --git a/avahi-common/alternative-test.c b/avahi-common/alternative-test.c
+index 9255435..681fc15 100644
+--- a/avahi-common/alternative-test.c
++++ b/avahi-common/alternative-test.c
+@@ -31,6 +31,9 @@ int main(AVAHI_GCC_UNUSED int argc, AVAHI_GCC_UNUSED char *argv[]) {
+ const char* const test_strings[] = {
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXüüüüüüü",
++ ").",
++ "\\.",
++ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\\\\",
+ "gurke",
+ "-",
+ " #",
+diff --git a/avahi-common/alternative.c b/avahi-common/alternative.c
+index b3d39f0..a094e6d 100644
+--- a/avahi-common/alternative.c
++++ b/avahi-common/alternative.c
+@@ -49,15 +49,20 @@ static void drop_incomplete_utf8(char *c) {
+ }
+
+ char *avahi_alternative_host_name(const char *s) {
++ char label[AVAHI_LABEL_MAX], alternative[AVAHI_LABEL_MAX*4+1];
++ char *alt, *r, *ret;
+ const char *e;
+- char *r;
++ size_t len;
+
+ assert(s);
+
+ if (!avahi_is_valid_host_name(s))
+ return NULL;
+
+- if ((e = strrchr(s, '-'))) {
++ if (!avahi_unescape_label(&s, label, sizeof(label)))
++ return NULL;
++
++ if ((e = strrchr(label, '-'))) {
+ const char *p;
+
+ e++;
+@@ -74,19 +79,18 @@ char *avahi_alternative_host_name(const char *s) {
+
+ if (e) {
+ char *c, *m;
+- size_t l;
+ int n;
+
+ n = atoi(e)+1;
+ if (!(m = avahi_strdup_printf("%i", n)))
+ return NULL;
+
+- l = e-s-1;
++ len = e-label-1;
+
+- if (l >= AVAHI_LABEL_MAX-1-strlen(m)-1)
+- l = AVAHI_LABEL_MAX-1-strlen(m)-1;
++ if (len >= AVAHI_LABEL_MAX-1-strlen(m)-1)
++ len = AVAHI_LABEL_MAX-1-strlen(m)-1;
+
+- if (!(c = avahi_strndup(s, l))) {
++ if (!(c = avahi_strndup(label, len))) {
+ avahi_free(m);
+ return NULL;
+ }
+@@ -100,7 +104,7 @@ char *avahi_alternative_host_name(const char *s) {
+ } else {
+ char *c;
+
+- if (!(c = avahi_strndup(s, AVAHI_LABEL_MAX-1-2)))
++ if (!(c = avahi_strndup(label, AVAHI_LABEL_MAX-1-2)))
+ return NULL;
+
+ drop_incomplete_utf8(c);
+@@ -109,6 +113,13 @@ char *avahi_alternative_host_name(const char *s) {
+ avahi_free(c);
+ }
+
++ alt = alternative;
++ len = sizeof(alternative);
++ ret = avahi_escape_label(r, strlen(r), &alt, &len);
++
++ avahi_free(r);
++ r = avahi_strdup(ret);
++
+ assert(avahi_is_valid_host_name(r));
+
+ return r;
+--
+2.40.0
diff --git a/meta/recipes-connectivity/avahi/files/invalid-service.patch b/meta/recipes-connectivity/avahi/files/invalid-service.patch
new file mode 100644
index 0000000000..8f188aff2c
--- /dev/null
+++ b/meta/recipes-connectivity/avahi/files/invalid-service.patch
@@ -0,0 +1,29 @@
+From 46490e95151d415cd22f02565e530eb5efcef680 Mon Sep 17 00:00:00 2001
+From: Asger Hautop Drewsen <asger@princh.com>
+Date: Mon, 9 Aug 2021 14:25:08 +0200
+Subject: [PATCH] Fix avahi-browse: Invalid service type
+
+Invalid service types will stop the browse from completing, or
+in simple terms "my washing machine stops me from printing".
+
+Upstream-Status: Submitted [https://github.com/lathiat/avahi/pull/472]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ avahi-core/browse-service.c | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/avahi-core/browse-service.c b/avahi-core/browse-service.c
+index 63e0275a..ac3d2ecb 100644
+--- a/avahi-core/browse-service.c
++++ b/avahi-core/browse-service.c
+@@ -103,7 +103,9 @@ AvahiSServiceBrowser *avahi_s_service_browser_prepare(
+ AVAHI_CHECK_VALIDITY_RETURN_NULL(server, AVAHI_PROTO_VALID(protocol), AVAHI_ERR_INVALID_PROTOCOL);
+ AVAHI_CHECK_VALIDITY_RETURN_NULL(server, !domain || avahi_is_valid_domain_name(domain), AVAHI_ERR_INVALID_DOMAIN_NAME);
+ AVAHI_CHECK_VALIDITY_RETURN_NULL(server, AVAHI_FLAGS_VALID(flags, AVAHI_LOOKUP_USE_WIDE_AREA|AVAHI_LOOKUP_USE_MULTICAST), AVAHI_ERR_INVALID_FLAGS);
+- AVAHI_CHECK_VALIDITY_RETURN_NULL(server, avahi_is_valid_service_type_generic(service_type), AVAHI_ERR_INVALID_SERVICE_TYPE);
++
++ if (!avahi_is_valid_service_type_generic(service_type))
++ service_type = "_invalid._tcp";
+
+ if (!domain)
+ domain = server->domain_name;
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/0001-avoid-start-failure-with-bind-user.patch b/meta/recipes-connectivity/bind/bind/0001-avoid-start-failure-with-bind-user.patch
index ec1bc7b567..ec1bc7b567 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/0001-avoid-start-failure-with-bind-user.patch
+++ b/meta/recipes-connectivity/bind/bind/0001-avoid-start-failure-with-bind-user.patch
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/0001-named-lwresd-V-and-start-log-hide-build-options.patch b/meta/recipes-connectivity/bind/bind/0001-named-lwresd-V-and-start-log-hide-build-options.patch
index 4c10f33f04..4c10f33f04 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/0001-named-lwresd-V-and-start-log-hide-build-options.patch
+++ b/meta/recipes-connectivity/bind/bind/0001-named-lwresd-V-and-start-log-hide-build-options.patch
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/bind-ensure-searching-for-json-headers-searches-sysr.patch b/meta/recipes-connectivity/bind/bind/bind-ensure-searching-for-json-headers-searches-sysr.patch
index f1abd179e8..38d07cae39 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/bind-ensure-searching-for-json-headers-searches-sysr.patch
+++ b/meta/recipes-connectivity/bind/bind/bind-ensure-searching-for-json-headers-searches-sysr.patch
@@ -1,4 +1,4 @@
-From 246087f89e9434b726c7884e4c0964f71084f091 Mon Sep 17 00:00:00 2001
+From 5ae30329f168c1e8d2e0c3831988a4f3e9096e39 Mon Sep 17 00:00:00 2001
From: Paul Gortmaker <paul.gortmaker@windriver.com>
Date: Tue, 9 Jun 2015 11:22:00 -0400
Subject: [PATCH] bind: ensure searching for json headers searches sysroot
@@ -33,10 +33,10 @@ Signed-off-by: Paul Gortmaker <paul.gortmaker@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index 10e8bf6..bf20690 100644
+index 2ab8ddd..92fe983 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -814,7 +814,7 @@ AS_CASE([$with_lmdb],
+@@ -761,7 +761,7 @@ AS_CASE([$with_lmdb],
[no],[],
[auto|yes], [PKG_CHECK_MODULES([LMDB], [lmdb],
[ac_lib_lmdb_found=yes],
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/bind9 b/meta/recipes-connectivity/bind/bind/bind9
index 968679ff7f..968679ff7f 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/bind9
+++ b/meta/recipes-connectivity/bind/bind/bind9
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/conf.patch b/meta/recipes-connectivity/bind/bind/conf.patch
index aa3642acec..aa3642acec 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/conf.patch
+++ b/meta/recipes-connectivity/bind/bind/conf.patch
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/generate-rndc-key.sh b/meta/recipes-connectivity/bind/bind/generate-rndc-key.sh
index 633e29c0e6..633e29c0e6 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/generate-rndc-key.sh
+++ b/meta/recipes-connectivity/bind/bind/generate-rndc-key.sh
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/init.d-add-support-for-read-only-rootfs.patch b/meta/recipes-connectivity/bind/bind/init.d-add-support-for-read-only-rootfs.patch
index 11db95ede1..11db95ede1 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/init.d-add-support-for-read-only-rootfs.patch
+++ b/meta/recipes-connectivity/bind/bind/init.d-add-support-for-read-only-rootfs.patch
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/make-etc-initd-bind-stop-work.patch b/meta/recipes-connectivity/bind/bind/make-etc-initd-bind-stop-work.patch
index 146f3e35db..146f3e35db 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/make-etc-initd-bind-stop-work.patch
+++ b/meta/recipes-connectivity/bind/bind/make-etc-initd-bind-stop-work.patch
diff --git a/meta/recipes-connectivity/bind/bind-9.18.2/named.service b/meta/recipes-connectivity/bind/bind/named.service
index cda56ef015..cda56ef015 100644
--- a/meta/recipes-connectivity/bind/bind-9.18.2/named.service
+++ b/meta/recipes-connectivity/bind/bind/named.service
diff --git a/meta/recipes-connectivity/bind/bind_9.18.2.bb b/meta/recipes-connectivity/bind/bind_9.18.2.bb
deleted file mode 100644
index 1c77aceb9f..0000000000
--- a/meta/recipes-connectivity/bind/bind_9.18.2.bb
+++ /dev/null
@@ -1,127 +0,0 @@
-SUMMARY = "ISC Internet Domain Name Server"
-HOMEPAGE = "https://www.isc.org/bind/"
-DESCRIPTION = "BIND 9 provides a full-featured Domain Name Server system"
-SECTION = "console/network"
-
-LICENSE = "MPL-2.0"
-LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=9a4a897f202c0710e07f2f2836bc2b62"
-
-DEPENDS = "openssl libcap zlib libuv"
-
-SRC_URI = "https://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.xz \
- file://conf.patch \
- file://named.service \
- file://bind9 \
- file://generate-rndc-key.sh \
- file://make-etc-initd-bind-stop-work.patch \
- file://init.d-add-support-for-read-only-rootfs.patch \
- file://bind-ensure-searching-for-json-headers-searches-sysr.patch \
- file://0001-named-lwresd-V-and-start-log-hide-build-options.patch \
- file://0001-avoid-start-failure-with-bind-user.patch \
- "
-
-SRC_URI[sha256sum] = "2e4b38779bba0a23ee634fdf7c525fd9794c41d692bfd83cda25823a2a3ed969"
-
-UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/"
-# follow the ESV versions divisible by 2
-UPSTREAM_CHECK_REGEX = "(?P<pver>9.(\d*[02468])+(\.\d+)+(-P\d+)*)/"
-
-# Issue only affects dhcpd with recent bind versions. We don't ship dhcpd anymore
-# so the issue doesn't affect us.
-CVE_CHECK_IGNORE += "CVE-2019-6470"
-
-inherit autotools update-rc.d systemd useradd pkgconfig multilib_header update-alternatives
-
-# PACKAGECONFIGs readline and libedit should NOT be set at same time
-PACKAGECONFIG ?= "readline"
-PACKAGECONFIG[httpstats] = "--with-libxml2=${STAGING_DIR_HOST}${prefix},--without-libxml2,libxml2"
-PACKAGECONFIG[readline] = "--with-readline=readline,,readline"
-PACKAGECONFIG[libedit] = "--with-readline=libedit,,libedit"
-PACKAGECONFIG[dns-over-http] = "--enable-doh,--disable-doh,nghttp2"
-
-EXTRA_OECONF = " --disable-devpoll --disable-auto-validation --enable-epoll \
- --with-gssapi=no --with-lmdb=no --with-zlib \
- --sysconfdir=${sysconfdir}/bind \
- --with-openssl=${STAGING_DIR_HOST}${prefix} \
- "
-LDFLAGS:append = " -lz"
-
-inherit ${@bb.utils.contains('PACKAGECONFIG', 'python3', 'python3native setuptools3-base', '', d)}
-
-# dhcp needs .la so keep them
-REMOVE_LIBTOOL_LA = "0"
-
-USERADD_PACKAGES = "${PN}"
-USERADD_PARAM:${PN} = "--system --home ${localstatedir}/cache/bind --no-create-home \
- --user-group bind"
-
-INITSCRIPT_NAME = "bind"
-INITSCRIPT_PARAMS = "defaults"
-
-SYSTEMD_SERVICE:${PN} = "named.service"
-
-do_install:append() {
-
- install -d -o bind "${D}${localstatedir}/cache/bind"
- install -d "${D}${sysconfdir}/bind"
- install -d "${D}${sysconfdir}/init.d"
- install -m 644 ${S}/conf/* "${D}${sysconfdir}/bind/"
- install -m 755 "${S}/init.d" "${D}${sysconfdir}/init.d/bind"
- if ${@bb.utils.contains('PACKAGECONFIG', 'python3', 'true', 'false', d)}; then
- sed -i -e '1s,#!.*python3,#! /usr/bin/python3,' \
- ${D}${sbindir}/dnssec-coverage \
- ${D}${sbindir}/dnssec-checkds \
- ${D}${sbindir}/dnssec-keymgr
- fi
-
- # Install systemd related files
- install -d ${D}${sbindir}
- install -m 755 ${WORKDIR}/generate-rndc-key.sh ${D}${sbindir}
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/named.service ${D}${systemd_system_unitdir}
- sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
- -e 's,@SBINDIR@,${sbindir},g' \
- ${D}${systemd_system_unitdir}/named.service
-
- install -d ${D}${sysconfdir}/default
- install -m 0644 ${WORKDIR}/bind9 ${D}${sysconfdir}/default
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/tmpfiles.d
- echo "d /run/named 0755 bind bind - -" > ${D}${sysconfdir}/tmpfiles.d/bind.conf
- fi
-}
-
-CONFFILES:${PN} = " \
- ${sysconfdir}/bind/named.conf \
- ${sysconfdir}/bind/named.conf.local \
- ${sysconfdir}/bind/named.conf.options \
- ${sysconfdir}/bind/db.0 \
- ${sysconfdir}/bind/db.127 \
- ${sysconfdir}/bind/db.empty \
- ${sysconfdir}/bind/db.local \
- ${sysconfdir}/bind/db.root \
- "
-
-ALTERNATIVE:${PN}-utils = "nslookup"
-ALTERNATIVE_LINK_NAME[nslookup] = "${bindir}/nslookup"
-ALTERNATIVE_PRIORITY = "100"
-
-PACKAGE_BEFORE_PN += "${PN}-utils"
-FILES:${PN}-utils = "${bindir}/host ${bindir}/dig ${bindir}/mdig ${bindir}/nslookup ${bindir}/nsupdate"
-FILES:${PN}-dev += "${bindir}/isc-config.h"
-FILES:${PN} += "${sbindir}/generate-rndc-key.sh"
-
-PACKAGE_BEFORE_PN += "${PN}-libs"
-# special arrangement below due to
-# https://github.com/isc-projects/bind9/commit/0e25af628cd776f98c04fc4cc59048f5448f6c88
-FILES_SOLIBSDEV = "${libdir}/*[!0-9].so ${libdir}/libbind9.so"
-FILES:${PN}-libs = "${libdir}/named/*.so* ${libdir}/*-${PV}.so"
-FILES:${PN}-staticdev += "${libdir}/*.la"
-
-PACKAGE_BEFORE_PN += "${@bb.utils.contains('PACKAGECONFIG', 'python3', 'python3-bind', '', d)}"
-FILES:python3-bind = "${sbindir}/dnssec-coverage ${sbindir}/dnssec-checkds \
- ${sbindir}/dnssec-keymgr ${PYTHON_SITEPACKAGES_DIR}"
-
-RDEPENDS:${PN}-dev = ""
-RDEPENDS:python3-bind = "python3-core python3-ply"
diff --git a/meta/recipes-connectivity/bind/bind_9.18.25.bb b/meta/recipes-connectivity/bind/bind_9.18.25.bb
new file mode 100644
index 0000000000..cc35604aba
--- /dev/null
+++ b/meta/recipes-connectivity/bind/bind_9.18.25.bb
@@ -0,0 +1,113 @@
+SUMMARY = "ISC Internet Domain Name Server"
+HOMEPAGE = "https://www.isc.org/bind/"
+DESCRIPTION = "BIND 9 provides a full-featured Domain Name Server system"
+SECTION = "console/network"
+
+LICENSE = "MPL-2.0"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=c7a0b6d9a1b692a5da9af9d503671f43"
+
+DEPENDS = "openssl libcap zlib libuv"
+
+SRC_URI = "https://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.xz \
+ file://conf.patch \
+ file://named.service \
+ file://bind9 \
+ file://generate-rndc-key.sh \
+ file://make-etc-initd-bind-stop-work.patch \
+ file://init.d-add-support-for-read-only-rootfs.patch \
+ file://bind-ensure-searching-for-json-headers-searches-sysr.patch \
+ file://0001-named-lwresd-V-and-start-log-hide-build-options.patch \
+ file://0001-avoid-start-failure-with-bind-user.patch \
+ "
+
+SRC_URI[sha256sum] = "5a4a70432a33d009f0e6e9dbb328aae7a5e27507e98e28bf3c0c6b250ccb2ab3"
+
+UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/"
+# follow the ESV versions divisible by 2
+UPSTREAM_CHECK_REGEX = "(?P<pver>9.(\d*[02468])+(\.\d+)+(-P\d+)*)/"
+
+# Issue only affects dhcpd with recent bind versions. We don't ship dhcpd anymore
+# so the issue doesn't affect us.
+CVE_STATUS[CVE-2019-6470] = "not-applicable-config: Issue only affects dhcpd with recent bind versions and we don't ship dhcpd anymore."
+
+inherit autotools update-rc.d systemd useradd pkgconfig multilib_header update-alternatives
+
+# PACKAGECONFIGs readline and libedit should NOT be set at same time
+PACKAGECONFIG ?= "readline"
+PACKAGECONFIG[httpstats] = "--with-libxml2=${STAGING_DIR_HOST}${prefix},--without-libxml2,libxml2"
+PACKAGECONFIG[readline] = "--with-readline=readline,,readline"
+PACKAGECONFIG[libedit] = "--with-readline=libedit,,libedit"
+PACKAGECONFIG[dns-over-http] = "--enable-doh,--disable-doh,nghttp2"
+
+EXTRA_OECONF = " --disable-auto-validation \
+ --with-gssapi=no --with-lmdb=no --with-zlib \
+ --sysconfdir=${sysconfdir}/bind \
+ --with-openssl=${STAGING_DIR_HOST}${prefix} \
+ "
+LDFLAGS:append = " -lz"
+
+# dhcp needs .la so keep them
+REMOVE_LIBTOOL_LA = "0"
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "--system --home ${localstatedir}/cache/bind --no-create-home \
+ --user-group bind"
+
+INITSCRIPT_NAME = "bind"
+INITSCRIPT_PARAMS = "defaults"
+
+SYSTEMD_SERVICE:${PN} = "named.service"
+
+do_install:append() {
+
+ install -d -o bind "${D}${localstatedir}/cache/bind"
+ install -d "${D}${sysconfdir}/bind"
+ install -d "${D}${sysconfdir}/init.d"
+ install -m 644 ${S}/conf/* "${D}${sysconfdir}/bind/"
+ install -m 755 "${S}/init.d" "${D}${sysconfdir}/init.d/bind"
+
+ # Install systemd related files
+ install -d ${D}${sbindir}
+ install -m 755 ${WORKDIR}/generate-rndc-key.sh ${D}${sbindir}
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/named.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${systemd_system_unitdir}/named.service
+
+ install -d ${D}${sysconfdir}/default
+ install -m 0644 ${WORKDIR}/bind9 ${D}${sysconfdir}/default
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -d ${D}${sysconfdir}/tmpfiles.d
+ echo "d /run/named 0755 bind bind - -" > ${D}${sysconfdir}/tmpfiles.d/bind.conf
+ fi
+}
+
+CONFFILES:${PN} = " \
+ ${sysconfdir}/bind/named.conf \
+ ${sysconfdir}/bind/named.conf.local \
+ ${sysconfdir}/bind/named.conf.options \
+ ${sysconfdir}/bind/db.0 \
+ ${sysconfdir}/bind/db.127 \
+ ${sysconfdir}/bind/db.empty \
+ ${sysconfdir}/bind/db.local \
+ ${sysconfdir}/bind/db.root \
+ "
+
+ALTERNATIVE:${PN}-utils = "nslookup"
+ALTERNATIVE_LINK_NAME[nslookup] = "${bindir}/nslookup"
+ALTERNATIVE_PRIORITY = "100"
+
+PACKAGE_BEFORE_PN += "${PN}-utils"
+FILES:${PN}-utils = "${bindir}/host ${bindir}/dig ${bindir}/mdig ${bindir}/nslookup ${bindir}/nsupdate"
+FILES:${PN}-dev += "${bindir}/isc-config.h"
+FILES:${PN} += "${sbindir}/generate-rndc-key.sh"
+
+PACKAGE_BEFORE_PN += "${PN}-libs"
+# special arrangement below due to
+# https://github.com/isc-projects/bind9/commit/0e25af628cd776f98c04fc4cc59048f5448f6c88
+FILES_SOLIBSDEV = "${libdir}/*[!0-9].so ${libdir}/libbind9.so"
+FILES:${PN}-libs = "${libdir}/named/*.so* ${libdir}/*-${PV}.so"
+
+DEV_PKG_DEPENDENCY = ""
diff --git a/meta/recipes-connectivity/bluez5/bluez5.inc b/meta/recipes-connectivity/bluez5/bluez5.inc
index 22dd07b348..a31d7076ba 100644
--- a/meta/recipes-connectivity/bluez5/bluez5.inc
+++ b/meta/recipes-connectivity/bluez5/bluez5.inc
@@ -7,6 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
file://COPYING.LIB;md5=fb504b67c50331fc78734fed90fb0e09 \
file://src/main.c;beginline=1;endline=24;md5=0ad83ca0dc37ab08af448777c581e7ac"
DEPENDS = "dbus glib-2.0"
+RDEPENDS:${PN} += "dbus"
PROVIDES += "bluez-hcidump"
RPROVIDES:${PN} += "bluez-hcidump"
@@ -53,7 +54,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/bluetooth/bluez-${PV}.tar.xz \
${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '', 'file://0001-Allow-using-obexd-without-systemd-in-the-user-sessio.patch', d)} \
file://0001-tests-add-a-target-for-building-tests-without-runnin.patch \
file://0001-test-gatt-Fix-hung-issue.patch \
- file://fix_service.patch \
+ file://0004-src-shared-util.c-include-linux-limits.h.patch \
"
S = "${WORKDIR}/bluez-${PV}"
@@ -65,9 +66,12 @@ EXTRA_OECONF = "\
--enable-test \
--enable-datafiles \
--enable-library \
+ --enable-pie \
--without-zsh-completion-dir \
"
+CFLAGS += "-DFIRMWARE_DIR=\\"${nonarch_base_libdir}/firmware\\""
+
# bluez5 builds a large number of useful utilities but does not
# install them. Specify which ones we want put into ${PN}-noinst-tools.
NOINST_TOOLS_READLINE ??= ""
@@ -83,14 +87,6 @@ do_install:append() {
install -d ${D}${INIT_D_DIR}
install -m 0755 ${WORKDIR}/init ${D}${INIT_D_DIR}/bluetooth
- install -d ${D}${sysconfdir}/bluetooth/
- if [ -f ${S}/profiles/network/network.conf ]; then
- install -m 0644 ${S}/profiles/network/network.conf ${D}/${sysconfdir}/bluetooth/
- fi
- if [ -f ${S}/profiles/input/input.conf ]; then
- install -m 0644 ${S}/profiles/input/input.conf ${D}/${sysconfdir}/bluetooth/
- fi
-
if [ -f ${D}/${sysconfdir}/init.d/bluetooth ]; then
sed -i -e 's#@LIBEXECDIR@#${libexecdir}#g' ${D}/${sysconfdir}/init.d/bluetooth
fi
diff --git a/meta/recipes-connectivity/bluez5/bluez5/0001-test-gatt-Fix-hung-issue.patch b/meta/recipes-connectivity/bluez5/bluez5/0001-test-gatt-Fix-hung-issue.patch
index e90b6a546f..b1e93dbe19 100644
--- a/meta/recipes-connectivity/bluez5/bluez5/0001-test-gatt-Fix-hung-issue.patch
+++ b/meta/recipes-connectivity/bluez5/bluez5/0001-test-gatt-Fix-hung-issue.patch
@@ -1,4 +1,4 @@
-From 61e741654cc2eb167bca212a3bb2ba8f3ba280c1 Mon Sep 17 00:00:00 2001
+From fb583a57f9f4ab956a09e9bb96d89aa13553bf21 Mon Sep 17 00:00:00 2001
From: Mingli Yu <Mingli.Yu@windriver.com>
Date: Fri, 24 Aug 2018 12:04:03 +0800
Subject: [PATCH] test-gatt: Fix hung issue
@@ -21,15 +21,16 @@ no action.
Upstream-Status: Submitted [https://marc.info/?l=linux-bluetooth&m=153508881804635&w=2]
Signed-off-by: Mingli Yu <Mingli.Yu@windriver.com>
+
---
unit/test-gatt.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/unit/test-gatt.c b/unit/test-gatt.c
-index c7e28f8..b57373b 100644
+index 5e06d4e..4864d36 100644
--- a/unit/test-gatt.c
+++ b/unit/test-gatt.c
-@@ -4463,7 +4463,7 @@ int main(int argc, char *argv[])
+@@ -4546,7 +4546,7 @@ int main(int argc, char *argv[])
test_server, service_db_1, NULL,
raw_pdu(0x03, 0x00, 0x02),
raw_pdu(0xbf, 0x00),
@@ -38,6 +39,3 @@ index c7e28f8..b57373b 100644
define_test_server("/robustness/unkown-command",
test_server, service_db_1, NULL,
---
-2.7.4
-
diff --git a/meta/recipes-connectivity/bluez5/bluez5/0001-tests-add-a-target-for-building-tests-without-runnin.patch b/meta/recipes-connectivity/bluez5/bluez5/0001-tests-add-a-target-for-building-tests-without-runnin.patch
index 24ddae6b63..881494a354 100644
--- a/meta/recipes-connectivity/bluez5/bluez5/0001-tests-add-a-target-for-building-tests-without-runnin.patch
+++ b/meta/recipes-connectivity/bluez5/bluez5/0001-tests-add-a-target-for-building-tests-without-runnin.patch
@@ -1,19 +1,20 @@
-From 4bdf0f96dcaa945fd29f26d56e5b36d8c23e4c8b Mon Sep 17 00:00:00 2001
+From 738e73b386352fd90f1f26cc1ee75427cf4dc23b Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 1 Apr 2016 17:07:34 +0300
Subject: [PATCH] tests: add a target for building tests without running them
Upstream-Status: Inappropriate [oe specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+
---
Makefile.am | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Makefile.am b/Makefile.am
-index 1a48a71..ba3b92f 100644
+index e738eb3..dab17dd 100644
--- a/Makefile.am
+++ b/Makefile.am
-@@ -425,6 +425,9 @@ endif
+@@ -710,6 +710,9 @@ endif
TESTS = $(unit_tests)
AM_TESTS_ENVIRONMENT = MALLOC_CHECK_=3 MALLOC_PERTURB_=69
@@ -23,6 +24,3 @@ index 1a48a71..ba3b92f 100644
if DBUS_RUN_SESSION
AM_TESTS_ENVIRONMENT += dbus-run-session --
endif
---
-2.8.0.rc3
-
diff --git a/meta/recipes-connectivity/bluez5/bluez5/0004-src-shared-util.c-include-linux-limits.h.patch b/meta/recipes-connectivity/bluez5/bluez5/0004-src-shared-util.c-include-linux-limits.h.patch
new file mode 100644
index 0000000000..516d859069
--- /dev/null
+++ b/meta/recipes-connectivity/bluez5/bluez5/0004-src-shared-util.c-include-linux-limits.h.patch
@@ -0,0 +1,27 @@
+From b53df61b41088b68c127ac76cc71683ac3453b9d Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Mon, 12 Dec 2022 13:10:19 +0100
+Subject: [PATCH] src/shared/util.c: include linux/limits.h
+
+MAX_INPUT is defined in that file. This matters on non-glibc
+systems such as those using musl.
+
+Upstream-Status: Submitted [to linux-bluetooth@vger.kernel.org,luiz.von.dentz@intel.com,frederic.danis@collabora.com]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
+---
+ src/shared/util.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/shared/util.c b/src/shared/util.c
+index c0c2c4a..036dc0d 100644
+--- a/src/shared/util.c
++++ b/src/shared/util.c
+@@ -23,6 +23,7 @@
+ #include <unistd.h>
+ #include <dirent.h>
+ #include <limits.h>
++#include <linux/limits.h>
+ #include <string.h>
+
+ #ifdef HAVE_SYS_RANDOM_H
diff --git a/meta/recipes-connectivity/bluez5/bluez5/fix_service.patch b/meta/recipes-connectivity/bluez5/bluez5/fix_service.patch
deleted file mode 100644
index 96fdf6b299..0000000000
--- a/meta/recipes-connectivity/bluez5/bluez5/fix_service.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-The systemd bluetooth service failed to start because the /var/lib/bluetooth
-path of ReadWritePaths= is created by the bluetooth daemon itself.
-
-The commit systemd: Add more filesystem lockdown (442d211) add ReadWritePaths=/etc/bluetooth
-and ReadOnlyPaths=/var/lib/bluetooth options to the bluetooth systemd service.
-The existing ProtectSystem=full option mounts the /usr, the boot loader
-directories and /etc read-only. This means the two option are useless and could be removed.
-
-Upstream-Status: Submitted [https://github.com/bluez/bluez/issues/329]
-
-Index: bluez-5.64/src/bluetooth.service.in
-===================================================================
---- bluez-5.64.orig/src/bluetooth.service.in
-+++ bluez-5.64/src/bluetooth.service.in
-@@ -15,12 +15,12 @@ LimitNPROC=1
-
- # Filesystem lockdown
- ProtectHome=true
--ProtectSystem=full
-+ProtectSystem=strict
- PrivateTmp=true
- ProtectKernelTunables=true
- ProtectControlGroups=true
--ReadWritePaths=@statedir@
--ReadOnlyPaths=@confdir@
-+ConfigurationDirectory=bluetooth
-+StateDirectory=bluetooth
-
- # Execute Mappings
- MemoryDenyWriteExecute=true
diff --git a/meta/recipes-connectivity/bluez5/bluez5_5.64.bb b/meta/recipes-connectivity/bluez5/bluez5_5.64.bb
deleted file mode 100644
index 4319f9aae8..0000000000
--- a/meta/recipes-connectivity/bluez5/bluez5_5.64.bb
+++ /dev/null
@@ -1,70 +0,0 @@
-require bluez5.inc
-
-SRC_URI[sha256sum] = "ae437e65b6b3070c198bc5b0109fe9cdeb9eaa387380e2072f9de65fe8a1de34"
-
-# These issues have kernel fixes rather than bluez fixes so exclude here
-CVE_CHECK_IGNORE += "CVE-2020-12352 CVE-2020-24490"
-
-# noinst programs in Makefile.tools that are conditional on READLINE
-# support
-NOINST_TOOLS_READLINE ?= " \
- ${@bb.utils.contains('PACKAGECONFIG', 'deprecated', 'attrib/gatttool', '', d)} \
- tools/obex-client-tool \
- tools/obex-server-tool \
- tools/bluetooth-player \
- tools/obexctl \
- tools/btmgmt \
-"
-
-# noinst programs in Makefile.tools that are conditional on TESTING
-# support
-NOINST_TOOLS_TESTING ?= " \
- emulator/btvirt \
- emulator/b1ee \
- emulator/hfp \
- peripheral/btsensor \
- tools/3dsp \
- tools/mgmt-tester \
- tools/gap-tester \
- tools/l2cap-tester \
- tools/sco-tester \
- tools/smp-tester \
- tools/hci-tester \
- tools/rfcomm-tester \
- tools/bnep-tester \
- tools/userchan-tester \
-"
-
-# noinst programs in Makefile.tools that are conditional on TOOLS
-# support
-NOINST_TOOLS_BT ?= " \
- tools/bdaddr \
- tools/avinfo \
- tools/avtest \
- tools/scotest \
- tools/amptest \
- tools/hwdb \
- tools/hcieventmask \
- tools/hcisecfilter \
- tools/btinfo \
- tools/btsnoop \
- tools/btproxy \
- tools/btiotest \
- tools/bneptest \
- tools/mcaptest \
- tools/cltest \
- tools/oobtest \
- tools/advtest \
- tools/seq2bseq \
- tools/nokfw \
- tools/create-image \
- tools/eddystone \
- tools/ibeacon \
- tools/btgatt-client \
- tools/btgatt-server \
- tools/test-runner \
- tools/check-selftest \
- tools/gatt-service \
- profiles/iap/iapd \
- ${@bb.utils.contains('PACKAGECONFIG', 'btpclient', 'tools/btpclient', '', d)} \
-"
diff --git a/meta/recipes-connectivity/bluez5/bluez5_5.72.bb b/meta/recipes-connectivity/bluez5/bluez5_5.72.bb
new file mode 100644
index 0000000000..9fda960ea7
--- /dev/null
+++ b/meta/recipes-connectivity/bluez5/bluez5_5.72.bb
@@ -0,0 +1,69 @@
+require bluez5.inc
+
+SRC_URI[sha256sum] = "499d7fa345a996c1bb650f5c6749e1d929111fa6ece0be0e98687fee6124536e"
+
+CVE_STATUS[CVE-2020-24490] = "cpe-incorrect: This issue has kernel fixes rather than bluez fixes"
+
+# noinst programs in Makefile.tools that are conditional on READLINE
+# support
+NOINST_TOOLS_READLINE ?= " \
+ ${@bb.utils.contains('PACKAGECONFIG', 'deprecated', 'attrib/gatttool', '', d)} \
+ tools/obex-client-tool \
+ tools/obex-server-tool \
+ tools/bluetooth-player \
+ tools/obexctl \
+ tools/btmgmt \
+"
+
+# noinst programs in Makefile.tools that are conditional on TESTING
+# support
+NOINST_TOOLS_TESTING ?= " \
+ emulator/btvirt \
+ emulator/b1ee \
+ emulator/hfp \
+ peripheral/btsensor \
+ tools/3dsp \
+ tools/mgmt-tester \
+ tools/gap-tester \
+ tools/l2cap-tester \
+ tools/sco-tester \
+ tools/smp-tester \
+ tools/hci-tester \
+ tools/rfcomm-tester \
+ tools/bnep-tester \
+ tools/userchan-tester \
+"
+
+# noinst programs in Makefile.tools that are conditional on TOOLS
+# support
+NOINST_TOOLS_BT ?= " \
+ tools/bdaddr \
+ tools/avinfo \
+ tools/avtest \
+ tools/scotest \
+ tools/amptest \
+ tools/hwdb \
+ tools/hcieventmask \
+ tools/hcisecfilter \
+ tools/btinfo \
+ tools/btsnoop \
+ tools/btproxy \
+ tools/btiotest \
+ tools/bneptest \
+ tools/mcaptest \
+ tools/cltest \
+ tools/oobtest \
+ tools/advtest \
+ tools/seq2bseq \
+ tools/nokfw \
+ tools/create-image \
+ tools/eddystone \
+ tools/ibeacon \
+ tools/btgatt-client \
+ tools/btgatt-server \
+ tools/test-runner \
+ tools/check-selftest \
+ tools/gatt-service \
+ profiles/iap/iapd \
+ ${@bb.utils.contains('PACKAGECONFIG', 'btpclient', 'tools/btpclient', '', d)} \
+"
diff --git a/meta/recipes-connectivity/connman/connman-conf.bb b/meta/recipes-connectivity/connman/connman-conf.bb
index 7959ed8e50..a1a0e08faa 100644
--- a/meta/recipes-connectivity/connman/connman-conf.bb
+++ b/meta/recipes-connectivity/connman/connman-conf.bb
@@ -4,7 +4,6 @@ network interface inside qemu machines."
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-PR = "r2"
SRC_URI = "file://main.conf \
"
diff --git a/meta/recipes-connectivity/connman/connman-conf/main.conf b/meta/recipes-connectivity/connman/connman-conf/main.conf
index a394e8f25b..3c9dd396f6 100644
--- a/meta/recipes-connectivity/connman/connman-conf/main.conf
+++ b/meta/recipes-connectivity/connman/connman-conf/main.conf
@@ -1,2 +1,2 @@
[General]
-NetworkInterfaceBlacklist = eth0
+NetworkInterfaceBlacklist = eth,en
diff --git a/meta/recipes-connectivity/connman/connman.inc b/meta/recipes-connectivity/connman/connman.inc
index 5880ecd5d4..7487ca0d0c 100644
--- a/meta/recipes-connectivity/connman/connman.inc
+++ b/meta/recipes-connectivity/connman/connman.inc
@@ -27,11 +27,17 @@ EXTRA_OECONF += "\
--enable-ethernet \
--enable-tools \
--disable-polkit \
+ --runstatedir=/run \
"
+# For smooth operation it would be best to start only one wireless daemon at a time.
+# If wpa-supplicant is running, connman will use it preferentially.
+# Select either wpa-supplicant or iwd
+WIRELESS_DAEMON ??= "wpa-supplicant"
PACKAGECONFIG ??= "wispr iptables client\
- ${@bb.utils.filter('DISTRO_FEATURES', '3g systemd wifi', d)} \
+ ${@bb.utils.filter('DISTRO_FEATURES', '3g systemd', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wifi', 'wifi ${WIRELESS_DAEMON}', '', d)} \
"
# If you want ConnMan to support VPN, add following statement into
@@ -39,9 +45,11 @@ PACKAGECONFIG ??= "wispr iptables client\
# PACKAGECONFIG:append:pn-connman = " openvpn vpnc l2tp pptp"
PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_system_unitdir}/ --with-tmpfilesdir=${sysconfdir}/tmpfiles.d/,--with-systemdunitdir='' --with-tmpfilesdir=''"
-PACKAGECONFIG[wifi] = "--enable-wifi, --disable-wifi, wpa-supplicant, wpa-supplicant"
+PACKAGECONFIG[wifi] = "--enable-wifi, --disable-wifi"
PACKAGECONFIG[bluez] = "--enable-bluetooth, --disable-bluetooth, bluez5, bluez5"
PACKAGECONFIG[3g] = "--enable-ofono, --disable-ofono, ofono, ofono"
+PACKAGECONFIG[wpa-supplicant] = ",,wpa-supplicant,wpa-supplicant"
+PACKAGECONFIG[iwd] = "--enable-iwd,--disable-iwd,,iwd"
PACKAGECONFIG[tist] = "--enable-tist,--disable-tist,"
PACKAGECONFIG[openvpn] = "--enable-openvpn --with-openvpn=${sbindir}/openvpn,--disable-openvpn,,openvpn"
PACKAGECONFIG[vpnc] = "--enable-vpnc --with-vpnc=${sbindir}/vpnc,--disable-vpnc,,vpnc"
diff --git a/meta/recipes-connectivity/connman/connman/0001-src-log.c-Include-libgen.h-for-basename-API.patch b/meta/recipes-connectivity/connman/connman/0001-src-log.c-Include-libgen.h-for-basename-API.patch
new file mode 100644
index 0000000000..8012606db7
--- /dev/null
+++ b/meta/recipes-connectivity/connman/connman/0001-src-log.c-Include-libgen.h-for-basename-API.patch
@@ -0,0 +1,55 @@
+From cbba6638986c2de763981bf6fc59df6a86fed44f Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 1 Jan 2024 17:42:21 -0800
+Subject: [PATCH v2] src/log.c: Include libgen.h for basename API
+
+Use POSIX version of basename. This comes to front with latest musl
+which dropped the declaration from string.h [1] it fails to build with
+clang-17+ because it treats implicit function declaration as error.
+
+Fix it by applying the basename on a copy of string since posix version
+may modify the input string.
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://lore.kernel.org/connman/20240102015917.3732089-1-raj.khem@gmail.com/T/#u]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+
+ src/log.c | 6 ++++--
+ 1 file changed, 4 insertions(+), 2 deletions(-)
+
+diff --git a/src/log.c b/src/log.c
+index 554b046..2df3af7 100644
+--- a/src/log.c
++++ b/src/log.c
+@@ -24,6 +24,7 @@
+ #endif
+
+ #include <stdio.h>
++#include <libgen.h>
+ #include <unistd.h>
+ #include <stdarg.h>
+ #include <stdlib.h>
+@@ -196,6 +197,7 @@ int __connman_log_init(const char *program, const char *debug,
+ const char *program_name, const char *program_version)
+ {
+ static char path[PATH_MAX];
++ char* tmp = strdup(program);
+ int option = LOG_NDELAY | LOG_PID;
+
+ program_exec = program;
+@@ -212,8 +214,8 @@ int __connman_log_init(const char *program, const char *debug,
+ if (backtrace)
+ signal_setup(signal_handler);
+
+- openlog(basename(program), option, LOG_DAEMON);
+-
++ openlog(basename(tmp), option, LOG_DAEMON);
++ free(tmp);
+ syslog(LOG_INFO, "%s version %s", program_name, program_version);
+
+ return 0;
+--
+2.43.0
+
diff --git a/meta/recipes-connectivity/connman/connman/0001-vpn-Adding-support-for-latest-pppd-2.5.0-release.patch b/meta/recipes-connectivity/connman/connman/0001-vpn-Adding-support-for-latest-pppd-2.5.0-release.patch
new file mode 100644
index 0000000000..9e5ac8da15
--- /dev/null
+++ b/meta/recipes-connectivity/connman/connman/0001-vpn-Adding-support-for-latest-pppd-2.5.0-release.patch
@@ -0,0 +1,152 @@
+From af55a6a414d32c12f9ef3cab778385a361e1ad6d Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Eivind=20N=C3=A6ss?= <eivnaes@yahoo.com>
+Date: Sat, 25 Mar 2023 20:51:52 +0000
+Subject: [PATCH] vpn: Adding support for latest pppd 2.5.0 release
+
+The API has gone through a significant overhaul, and this change fixes any compile issues.
+1) Fixes to configure.ac itself
+2) Cleanup in pppd plugin itself
+
+Adding a libppp-compat.h file to mask for any differences in the version.
+
+Upstream-Status: Backport [https://git.kernel.org/pub/scm/network/connman/connman.git/commit/?id=a48864a2e5d2a725dfc6eef567108bc13b43857f]
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+
+---
+ scripts/libppp-compat.h | 127 ++++++++++++++++++++++++++++++++++++++++
+ 1 file changed, 127 insertions(+)
+ create mode 100644 scripts/libppp-compat.h
+
+diff --git a/scripts/libppp-compat.h b/scripts/libppp-compat.h
+new file mode 100644
+index 0000000..eee1d09
+--- /dev/null
++++ b/scripts/libppp-compat.h
+@@ -0,0 +1,127 @@
++/* Copyright (C) Eivind Naess, eivnaes@yahoo.com */
++/* SPDX-License-Identifier: GPL-2.0-or-later */
++
++#ifndef __LIBPPP_COMPAT_H__
++#define __LIBPPP_COMPAT_H__
++
++/* Define USE_EAPTLS compile with EAP TLS support against older pppd headers,
++ * pppd >= 2.5.0 use PPP_WITH_EAPTLS and is defined in pppdconf.h */
++#define USE_EAPTLS 1
++
++/* Define INET6 to compile with IPv6 support against older pppd headers,
++ * pppd >= 2.5.0 use PPP_WITH_IPV6CP and is defined in pppdconf.h */
++#define INET6 1
++
++/* PPP < 2.5.0 defines and exports VERSION which overlaps with current package VERSION define.
++ * this silly macro magic is to work around that. */
++#undef VERSION
++#include <pppd/pppd.h>
++
++#ifndef PPPD_VERSION
++#define PPPD_VERSION VERSION
++#endif
++
++#include <pppd/fsm.h>
++#include <pppd/ccp.h>
++#include <pppd/eui64.h>
++#include <pppd/ipcp.h>
++#include <pppd/ipv6cp.h>
++#include <pppd/eap.h>
++#include <pppd/upap.h>
++
++#ifdef HAVE_PPPD_CHAP_H
++#include <pppd/chap.h>
++#endif
++
++#ifdef HAVE_PPPD_CHAP_NEW_H
++#include <pppd/chap-new.h>
++#endif
++
++#ifdef HAVE_PPPD_CHAP_MS_H
++#include <pppd/chap_ms.h>
++#endif
++
++#ifndef PPP_PROTO_CHAP
++#define PPP_PROTO_CHAP 0xc223
++#endif
++
++#ifndef PPP_PROTO_EAP
++#define PPP_PROTO_EAP 0xc227
++#endif
++
++
++#if WITH_PPP_VERSION < PPP_VERSION(2,5,0)
++
++static inline bool
++debug_on (void)
++{
++ return debug;
++}
++
++static inline const char
++*ppp_ipparam (void)
++{
++ return ipparam;
++}
++
++static inline int
++ppp_ifunit (void)
++{
++ return ifunit;
++}
++
++static inline const char *
++ppp_ifname (void)
++{
++ return ifname;
++}
++
++static inline int
++ppp_get_mtu (int idx)
++{
++ return netif_get_mtu(idx);
++}
++
++typedef enum ppp_notify
++{
++ NF_PID_CHANGE,
++ NF_PHASE_CHANGE,
++ NF_EXIT,
++ NF_SIGNALED,
++ NF_IP_UP,
++ NF_IP_DOWN,
++ NF_IPV6_UP,
++ NF_IPV6_DOWN,
++ NF_AUTH_UP,
++ NF_LINK_DOWN,
++ NF_FORK,
++ NF_MAX_NOTIFY
++} ppp_notify_t;
++
++typedef void (ppp_notify_fn) (void *ctx, int arg);
++
++static inline void
++ppp_add_notify (ppp_notify_t type, ppp_notify_fn *func, void *ctx)
++{
++ struct notifier **list[NF_MAX_NOTIFY] = {
++ [NF_PID_CHANGE ] = &pidchange,
++ [NF_PHASE_CHANGE] = &phasechange,
++ [NF_EXIT ] = &exitnotify,
++ [NF_SIGNALED ] = &sigreceived,
++ [NF_IP_UP ] = &ip_up_notifier,
++ [NF_IP_DOWN ] = &ip_down_notifier,
++ [NF_IPV6_UP ] = &ipv6_up_notifier,
++ [NF_IPV6_DOWN ] = &ipv6_down_notifier,
++ [NF_AUTH_UP ] = &auth_up_notifier,
++ [NF_LINK_DOWN ] = &link_down_notifier,
++ [NF_FORK ] = &fork_notifier,
++ };
++
++ struct notifier **notify = list[type];
++ if (notify) {
++ add_notifier(notify, func, ctx);
++ }
++}
++
++#endif /* #if WITH_PPP_VERSION < PPP_VERSION(2,5,0) */
++#endif /* #if__LIBPPP_COMPAT_H__ */
diff --git a/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch b/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch
index 9dca21a02f..aefdd3aa06 100644
--- a/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch
+++ b/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch
@@ -18,14 +18,6 @@ diff --git a/gweb/gresolv.c b/gweb/gresolv.c
index 954e7cf..2a9bc51 100644
--- a/gweb/gresolv.c
+++ b/gweb/gresolv.c
-@@ -36,6 +36,7 @@
- #include <arpa/inet.h>
- #include <arpa/nameser.h>
- #include <net/if.h>
-+#include <ctype.h>
-
- #include "gresolv.h"
-
@@ -878,8 +879,6 @@ GResolv *g_resolv_new(int index)
resolv->index = index;
resolv->nameserver_list = NULL;
diff --git a/meta/recipes-connectivity/connman/connman/connman b/meta/recipes-connectivity/connman/connman/connman
index 310a696863..a021fd4655 100644
--- a/meta/recipes-connectivity/connman/connman/connman
+++ b/meta/recipes-connectivity/connman/connman/connman
@@ -10,48 +10,11 @@ fi
set -e
-nfsroot=0
-
-exec 9<&0 < /proc/mounts
-while read dev mtpt fstype rest; do
- if test $mtpt = "/" ; then
- case $fstype in
- nfs | nfs4)
- nfsroot=1
- break
- ;;
- *)
- ;;
- esac
- fi
-done
-
do_start() {
- if test $nfsroot -eq 1 ; then
- NET_DEVS=`cat /proc/net/dev | sed -ne 's/^\([a-zA-Z0-9 ]*\):.*$/\1/p'`
- NET_ADDR=`cat /proc/cmdline | sed -ne 's/^.*ip=\([^ :]*\).*$/\1/p'`
-
- if [ ! -z "$NET_ADDR" ]; then
- if [ "$NET_ADDR" = dhcp ]; then
- ethn=`ifconfig | grep "^eth" | sed -e "s/\(eth[0-9]\)\(.*\)/\1/"`
- if [ ! -z "$ethn" ]; then
- EXTRA_PARAM="$EXTRA_PARAM -I $ethn"
- fi
- else
- for i in $NET_DEVS; do
- ADDR=`ifconfig $i | sed 's/addr://g' | sed -ne 's/^.*inet \([0-9.]*\) .*$/\1/p'`
- if [ "$NET_ADDR" = "$ADDR" ]; then
- EXTRA_PARAM="$EXTRA_PARAM -I $i"
- break
- fi
- done
- fi
- fi
- fi
if [ -f @DATADIR@/connman/wired-setup ] ; then
. @DATADIR@/connman/wired-setup
fi
- $DAEMON $EXTRA_PARAM
+ $DAEMON
}
do_stop() {
diff --git a/meta/recipes-connectivity/connman/connman_1.41.bb b/meta/recipes-connectivity/connman/connman_1.41.bb
deleted file mode 100644
index 736b78eaeb..0000000000
--- a/meta/recipes-connectivity/connman/connman_1.41.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-require connman.inc
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
- file://0001-plugin.h-Change-visibility-to-default-for-debug-symb.patch \
- file://0001-connman.service-stop-systemd-resolved-when-we-use-co.patch \
- file://connman \
- file://no-version-scripts.patch \
- "
-
-SRC_URI:append:libc-musl = " file://0002-resolve-musl-does-not-implement-res_ninit.patch"
-
-SRC_URI[sha256sum] = "79fb40f4fdd5530c45aa8e592fb16ba23d3674f3a98cf10b89a6576f198de589"
-
-RRECOMMENDS:${PN} = "connman-conf"
-RCONFLICTS:${PN} = "networkmanager"
diff --git a/meta/recipes-connectivity/connman/connman_1.42.bb b/meta/recipes-connectivity/connman/connman_1.42.bb
new file mode 100644
index 0000000000..91ab9895ac
--- /dev/null
+++ b/meta/recipes-connectivity/connman/connman_1.42.bb
@@ -0,0 +1,17 @@
+require connman.inc
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
+ file://0001-plugin.h-Change-visibility-to-default-for-debug-symb.patch \
+ file://0001-connman.service-stop-systemd-resolved-when-we-use-co.patch \
+ file://connman \
+ file://no-version-scripts.patch \
+ file://0001-vpn-Adding-support-for-latest-pppd-2.5.0-release.patch \
+ file://0001-src-log.c-Include-libgen.h-for-basename-API.patch \
+ "
+
+SRC_URI:append:libc-musl = " file://0002-resolve-musl-does-not-implement-res_ninit.patch"
+
+SRC_URI[sha256sum] = "a3e6bae46fc081ef2e9dae3caa4f7649de892c3de622c20283ac0ca81423c2aa"
+
+RRECOMMENDS:${PN} = "connman-conf"
+RCONFLICTS:${PN} = "networkmanager"
diff --git a/meta/recipes-connectivity/dhcpcd/dhcpcd_10.0.6.bb b/meta/recipes-connectivity/dhcpcd/dhcpcd_10.0.6.bb
new file mode 100644
index 0000000000..6bde9b1f51
--- /dev/null
+++ b/meta/recipes-connectivity/dhcpcd/dhcpcd_10.0.6.bb
@@ -0,0 +1,61 @@
+SECTION = "console/network"
+SUMMARY = "dhcpcd - a DHCP client"
+DESCRIPTION = "dhcpcd runs on your machine and silently configures your \
+ computer to work on the attached networks without trouble \
+ and mostly without configuration."
+
+HOMEPAGE = "http://roy.marples.name/projects/dhcpcd/"
+
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=ba9c7e534853aaf3de76c905b2410ffd"
+
+SRC_URI = "git://github.com/NetworkConfiguration/dhcpcd;protocol=https;branch=master \
+ file://0001-remove-INCLUDEDIR-to-prevent-build-issues.patch \
+ file://0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch \
+ file://dhcpcd.service \
+ file://dhcpcd@.service \
+ file://0001-dhcpcd.8-Fix-conflict-error-when-enable-multilib.patch \
+ "
+
+SRCREV = "1c8ae59836fa87b4c63c598087f0460ec20ed862"
+S = "${WORKDIR}/git"
+
+inherit pkgconfig autotools-brokensep systemd useradd
+
+SYSTEMD_SERVICE:${PN} = "dhcpcd.service"
+
+PACKAGECONFIG ?= "udev ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+
+PACKAGECONFIG[udev] = "--with-udev,--without-udev,udev,udev"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6"
+# ntp conflicts with chrony
+PACKAGECONFIG[ntp] = "--with-hook=ntp, , ,ntp"
+PACKAGECONFIG[chrony] = "--with-hook=ntp, , ,chrony"
+PACKAGECONFIG[ypbind] = "--with-eghook=yp, , ,ypbind-mt"
+
+# add option to override DBDIR location
+DBDIR ?= "${localstatedir}/lib/${BPN}"
+
+EXTRA_OECONF = "--enable-ipv4 \
+ --dbdir=${DBDIR} \
+ --sbindir=${base_sbindir} \
+ --runstatedir=/run \
+ --enable-privsep \
+ --privsepuser=dhcpcd \
+ --with-hooks \
+ --with-eghooks \
+ "
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "--system -d ${DBDIR} -M -s /bin/false -U dhcpcd"
+
+do_install:append () {
+ # install systemd unit files
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/dhcpcd*.service ${D}${systemd_system_unitdir}
+
+ chmod 700 ${D}${DBDIR}
+ chown dhcpcd:dhcpcd ${D}${DBDIR}
+}
+
+FILES:${PN}-dbg += "${libdir}/dhcpcd/dev/.debug"
diff --git a/meta/recipes-connectivity/dhcpcd/dhcpcd_9.4.1.bb b/meta/recipes-connectivity/dhcpcd/dhcpcd_9.4.1.bb
deleted file mode 100644
index ab6ffe986c..0000000000
--- a/meta/recipes-connectivity/dhcpcd/dhcpcd_9.4.1.bb
+++ /dev/null
@@ -1,60 +0,0 @@
-SECTION = "console/network"
-SUMMARY = "dhcpcd - a DHCP client"
-DESCRIPTION = "dhcpcd runs on your machine and silently configures your \
- computer to work on the attached networks without trouble \
- and mostly without configuration."
-
-HOMEPAGE = "http://roy.marples.name/projects/dhcpcd/"
-
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=d148485768fe85b9f1072b186a7e9b4d"
-
-UPSTREAM_CHECK_URI = "https://roy.marples.name/downloads/dhcpcd/"
-
-SRC_URI = "https://roy.marples.name/downloads/${BPN}/${BPN}-${PV}.tar.xz \
- file://0001-remove-INCLUDEDIR-to-prevent-build-issues.patch \
- file://dhcpcd.service \
- file://dhcpcd@.service \
- "
-
-SRC_URI[sha256sum] = "819357634efed1ea5cf44ec01b24d3d3f8852fec8b4249925dcc5667c54e376c"
-
-inherit pkgconfig autotools-brokensep systemd useradd
-
-SYSTEMD_SERVICE:${PN} = "dhcpcd.service"
-
-PACKAGECONFIG ?= "udev ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-
-PACKAGECONFIG[udev] = "--with-udev,--without-udev,udev,udev"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6"
-# ntp conflicts with chrony
-PACKAGECONFIG[ntp] = "--with-hook=ntp, , ,ntp"
-PACKAGECONFIG[chrony] = "--with-hook=ntp, , ,chrony"
-PACKAGECONFIG[ypbind] = "--with-eghook=yp, , ,ypbind-mt"
-
-# add option to override DBDIR location
-DBDIR ?= "${localstatedir}/lib/${BPN}"
-
-EXTRA_OECONF = "--enable-ipv4 \
- --dbdir=${DBDIR} \
- --sbindir=${base_sbindir} \
- --runstatedir=/run \
- --enable-privsep \
- --privsepuser=dhcpcd \
- --with-hooks \
- --with-eghooks \
- "
-
-USERADD_PACKAGES = "${PN}"
-USERADD_PARAM:${PN} = "--system -d ${DBDIR} -M -s /bin/false -U dhcpcd"
-
-do_install:append () {
- # install systemd unit files
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/dhcpcd*.service ${D}${systemd_system_unitdir}
-
- chmod 700 ${D}${DBDIR}
- chown dhcpcd:dhcpcd ${D}${DBDIR}
-}
-
-FILES:${PN}-dbg += "${libdir}/dhcpcd/dev/.debug"
diff --git a/meta/recipes-connectivity/dhcpcd/files/0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch b/meta/recipes-connectivity/dhcpcd/files/0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch
new file mode 100644
index 0000000000..8d1ed6671a
--- /dev/null
+++ b/meta/recipes-connectivity/dhcpcd/files/0001-20-resolv.conf-improve-the-sitation-of-working-with-.patch
@@ -0,0 +1,82 @@
+From 02acc4d875ee81e6fd19ef66d69c9f55b4b4a7e7 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Wed, 9 Nov 2022 16:33:18 +0800
+Subject: [PATCH] 20-resolv.conf: improve the sitation of working with systemd
+
+systemd's resolvconf implementation ignores the protocol part.
+See https://github.com/systemd/systemd/issues/25032.
+
+When using 'dhcp server + dns server + dhcpcd + systemd', we
+get an integration issue, that is dhcpcd runs 'resolvconf -d eth0.ra',
+yet systemd's resolvconf treats it as eth0. This will delete the
+DNS information set by 'resolvconf -a eth0.dhcp'.
+
+Fortunately, 20-resolv.conf has the ability to build the resolv.conf
+file contents itself. We can just pass the generated contents to
+systemd's resolvconf. This way, the DNS information is not incorrectly
+deleted. Also, it does not cause behavior regression for dhcpcd
+in other cases.
+
+Upstream-Status: Inappropriate [OE Specific]
+This patch has been rejected by dhcpcd upstream.
+See details in https://github.com/NetworkConfiguration/dhcpcd/pull/152
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ hooks/20-resolv.conf | 17 +++++++++++++----
+ 1 file changed, 13 insertions(+), 4 deletions(-)
+
+diff --git a/hooks/20-resolv.conf b/hooks/20-resolv.conf
+index 7c29e276..becc019f 100644
+--- a/hooks/20-resolv.conf
++++ b/hooks/20-resolv.conf
+@@ -11,8 +11,12 @@ nocarrier_roaming_dir="$state_dir/roaming"
+ NL="
+ "
+ : ${resolvconf:=resolvconf}
++resolvconf_from_systemd=false
+ if command -v "$resolvconf" >/dev/null 2>&1; then
+ have_resolvconf=true
++ if [ $(basename $(readlink -f $(which $resolvconf))) = resolvectl ]; then
++ resolvconf_from_systemd=true
++ fi
+ else
+ have_resolvconf=false
+ fi
+@@ -69,8 +73,13 @@ build_resolv_conf()
+ else
+ echo "# /etc/resolv.conf.tail can replace this line" >> "$cf"
+ fi
+- if change_file /etc/resolv.conf "$cf"; then
+- chmod 644 /etc/resolv.conf
++ if $resolvconf_from_systemd; then
++ [ -n "$ifmetric" ] && export IF_METRIC="$ifmetric"
++ "$resolvconf" -a "$ifname" <"$cf"
++ else
++ if change_file /etc/resolv.conf "$cf"; then
++ chmod 644 /etc/resolv.conf
++ fi
+ fi
+ rm -f "$cf"
+ }
+@@ -170,7 +179,7 @@ add_resolv_conf()
+ for x in ${new_domain_name_servers}; do
+ conf="${conf}nameserver $x$NL"
+ done
+- if $have_resolvconf; then
++ if $have_resolvconf && ! $resolvconf_from_systemd; then
+ [ -n "$ifmetric" ] && export IF_METRIC="$ifmetric"
+ printf %s "$conf" | "$resolvconf" -a "$ifname"
+ return $?
+@@ -186,7 +195,7 @@ add_resolv_conf()
+
+ remove_resolv_conf()
+ {
+- if $have_resolvconf; then
++ if $have_resolvconf && ($if_down || ! $resolvconf_from_systemd); then
+ "$resolvconf" -d "$ifname" -f
+ else
+ if [ -e "$resolv_conf_dir/$ifname" ]; then
+--
+2.17.1
+
diff --git a/meta/recipes-connectivity/dhcpcd/files/0001-dhcpcd.8-Fix-conflict-error-when-enable-multilib.patch b/meta/recipes-connectivity/dhcpcd/files/0001-dhcpcd.8-Fix-conflict-error-when-enable-multilib.patch
new file mode 100644
index 0000000000..461d04bd1d
--- /dev/null
+++ b/meta/recipes-connectivity/dhcpcd/files/0001-dhcpcd.8-Fix-conflict-error-when-enable-multilib.patch
@@ -0,0 +1,44 @@
+From 5d5ba8a2b8010db6bee68bd712f829cb737c9ac1 Mon Sep 17 00:00:00 2001
+From: Lei Maohui <leimaohui@fujitsu.com>
+Date: Fri, 10 Mar 2023 03:48:46 +0000
+Subject: [PATCH] dhcpcd.8: Fix conflict error when enable multilib.
+
+Error: Transaction test error:
+ file /usr/share/man/man8/dhcpcd.8 conflicts between attempted
+ installs of dhcpcd-doc-9.4.1-r0.cortexa57 and
+ lib32-dhcpcd-doc-9.4.1-r0.armv7ahf_neon
+
+The differences between the two files are as follows:
+@@ -821,7 +821,7 @@
+ If you always use the same options, put them here.
+ .It Pa /usr/libexec/dhcpcd-run-hooks
+ Bourne shell script that is run to configure or de-configure an interface.
+-.It Pa /usr/lib64/dhcpcd/dev
++.It Pa /usr/lib/dhcpcd/dev
+ Linux
+ .Pa /dev
+ management modules.
+
+It is just a man file, there is no necessary to manage multiple
+versions.
+
+Upstream-Status: Inappropriate [oe specific]
+Signed-off-by: Lei Maohui <leimaohui@fujitsu.com>
+
+---
+ src/dhcpcd.8.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/dhcpcd.8.in b/src/dhcpcd.8.in
+index 93232840..09930a31 100644
+--- a/src/dhcpcd.8.in
++++ b/src/dhcpcd.8.in
+@@ -824,7 +824,7 @@ Configuration file for dhcpcd.
+ If you always use the same options, put them here.
+ .It Pa @SCRIPT@
+ Bourne shell script that is run to configure or de-configure an interface.
+-.It Pa @LIBDIR@/dhcpcd/dev
++.It Pa /usr/<libdir>/dhcpcd/dev
+ Linux
+ .Pa /dev
+ management modules.
diff --git a/meta/recipes-connectivity/dhcpcd/files/0001-remove-INCLUDEDIR-to-prevent-build-issues.patch b/meta/recipes-connectivity/dhcpcd/files/0001-remove-INCLUDEDIR-to-prevent-build-issues.patch
index 37d2344438..c54942be4b 100644
--- a/meta/recipes-connectivity/dhcpcd/files/0001-remove-INCLUDEDIR-to-prevent-build-issues.patch
+++ b/meta/recipes-connectivity/dhcpcd/files/0001-remove-INCLUDEDIR-to-prevent-build-issues.patch
@@ -1,4 +1,4 @@
-From aa9e3982c1e75ad49945a62f5e262279c7a905a4 Mon Sep 17 00:00:00 2001
+From ec9fc4e6086e1dbe0ac2f94a8a088a571596a581 Mon Sep 17 00:00:00 2001
From: Stefano Cappa <stefano.cappa.ks89@gmail.com>
Date: Sun, 13 Jan 2019 01:50:52 +0100
Subject: [PATCH] remove INCLUDEDIR to prevent build issues
@@ -6,15 +6,16 @@ Subject: [PATCH] remove INCLUDEDIR to prevent build issues
Upstream-Status: Pending
Signed-off-by: Stefano Cappa <stefano.cappa.ks89@gmail.com>
+
---
configure | 5 -----
1 file changed, 5 deletions(-)
diff --git a/configure b/configure
-index 6c81e0db..32dea2b4 100755
+index 5237b0e2..7220718b 100755
--- a/configure
+++ b/configure
-@@ -20,7 +20,6 @@ BUILD=
+@@ -26,7 +26,6 @@ BUILD=
HOST=
HOSTCC=
TARGET=
@@ -22,7 +23,7 @@ index 6c81e0db..32dea2b4 100755
DEBUG=
FORK=
STATIC=
-@@ -72,7 +71,6 @@ for x do
+@@ -86,7 +85,6 @@ for x do
--mandir) MANDIR=$var;;
--datadir) DATADIR=$var;;
--with-ccopts|CFLAGS) CFLAGS=$var;;
@@ -30,7 +31,7 @@ index 6c81e0db..32dea2b4 100755
CC) CC=$var;;
CPPFLAGS) CPPFLAGS=$var;;
PKG_CONFIG) PKG_CONFIG=$var;;
-@@ -309,9 +307,6 @@ if [ -n "$CPPFLAGS" ]; then
+@@ -343,9 +341,6 @@ if [ -n "$CPPFLAGS" ]; then
echo "CPPFLAGS=" >>$CONFIG_MK
echo "CPPFLAGS+= $CPPFLAGS" >>$CONFIG_MK
fi
@@ -40,6 +41,3 @@ index 6c81e0db..32dea2b4 100755
if [ -n "$LDFLAGS" ]; then
echo "LDFLAGS=" >>$CONFIG_MK
echo "LDFLAGS+= $LDFLAGS" >>$CONFIG_MK
---
-2.17.2 (Apple Git-113)
-
diff --git a/meta/recipes-connectivity/inetutils/inetutils/0001-ftpd-telnetd-Fix-multiple-definitions-of-errcatch-an.patch b/meta/recipes-connectivity/inetutils/inetutils/0001-ftpd-telnetd-Fix-multiple-definitions-of-errcatch-an.patch
deleted file mode 100644
index 49d319f59d..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/0001-ftpd-telnetd-Fix-multiple-definitions-of-errcatch-an.patch
+++ /dev/null
@@ -1,58 +0,0 @@
-From 7d39930468e272c740b0eed3c7e5b7fb3abf29e8 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 5 Aug 2020 10:36:22 -0700
-Subject: [PATCH] ftpd,telnetd: Fix multiple definitions of errcatch and not42
-
-This helps fix build failures when -fno-common option is used
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ftpd/extern.h | 2 +-
- ftpd/ftpcmd.c | 1 +
- telnetd/utility.c | 2 +-
- 3 files changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/ftpd/extern.h b/ftpd/extern.h
-index ab33cf3..91dbbee 100644
---- a/ftpd/extern.h
-+++ b/ftpd/extern.h
-@@ -90,7 +90,7 @@ extern void user (const char *);
- extern char *sgetsave (const char *);
-
- /* Exported from ftpd.c. */
--jmp_buf errcatch;
-+extern jmp_buf errcatch;
- extern struct sockaddr_storage data_dest;
- extern socklen_t data_dest_len;
- extern struct sockaddr_storage his_addr;
-diff --git a/ftpd/ftpcmd.c b/ftpd/ftpcmd.c
-index beb1f06..d272e9d 100644
---- a/ftpd/ftpcmd.c
-+++ b/ftpd/ftpcmd.c
-@@ -106,6 +106,7 @@
- #endif
-
- off_t restart_point;
-+jmp_buf errcatch;
-
- static char cbuf[512]; /* Command Buffer. */
- static char *fromname;
-diff --git a/telnetd/utility.c b/telnetd/utility.c
-index e7ffb8e..46bf91e 100644
---- a/telnetd/utility.c
-+++ b/telnetd/utility.c
-@@ -63,7 +63,7 @@ static int ncc;
- static char ptyibuf[BUFSIZ], *ptyip;
- static int pcc;
-
--int not42;
-+extern int not42;
-
- static int
- readstream (int p, char *ibuf, int bufsize)
---
-2.28.0
-
diff --git a/meta/recipes-connectivity/inetutils/inetutils/fix-buffer-fortify-tfpt.patch b/meta/recipes-connectivity/inetutils/inetutils/fix-buffer-fortify-tfpt.patch
deleted file mode 100644
index a91913cb51..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/fix-buffer-fortify-tfpt.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-tftpd: Fix abort on error path
-
-When trying to fetch a non existent file, the app crashes with:
-
-*** buffer overflow detected ***:
-Aborted
-
-
-Upstream-Status: Submitted [https://www.mail-archive.com/bug-inetutils@gnu.org/msg03036.html https://gcc.gnu.org/bugzilla/show_bug.cgi?id=91205]
-Signed-off-by: Ricardo Ribalda Delgado <ricardo@ribalda.com>
-diff --git a/src/tftpd.c b/src/tftpd.c
-index 56002a0..144012f 100644
---- a/src/tftpd.c
-+++ b/src/tftpd.c
-@@ -864,9 +864,8 @@ nak (int error)
- pe->e_msg = strerror (error - 100);
- tp->th_code = EUNDEF; /* set 'undef' errorcode */
- }
-- strcpy (tp->th_msg, pe->e_msg);
- length = strlen (pe->e_msg);
-- tp->th_msg[length] = '\0';
-+ memcpy(tp->th_msg, pe->e_msg, length + 1);
- length += 5;
- if (sendto (peer, buf, length, 0, (struct sockaddr *) &from, fromlen) != length)
- syslog (LOG_ERR, "nak: %m\n");
diff --git a/meta/recipes-connectivity/inetutils/inetutils/fix-disable-ipv6.patch b/meta/recipes-connectivity/inetutils/inetutils/fix-disable-ipv6.patch
deleted file mode 100644
index 603d2baf9d..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/fix-disable-ipv6.patch
+++ /dev/null
@@ -1,85 +0,0 @@
-From c7c27ba763c613f83c1561e56448b49315c271c5 Mon Sep 17 00:00:00 2001
-From: Jackie Huang <jackie.huang@windriver.com>
-Date: Wed, 6 Mar 2019 09:36:11 -0500
-Subject: [PATCH] Upstream:
- http://www.mail-archive.com/bug-inetutils@gnu.org/msg02103.html
-
-Upstream-Status: Pending
-
-Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
-
----
- ping/ping_common.h | 20 ++++++++++++++++++++
- 1 file changed, 20 insertions(+)
-
-diff --git a/ping/ping_common.h b/ping/ping_common.h
-index 65e3e60..3e84db0 100644
---- a/ping/ping_common.h
-+++ b/ping/ping_common.h
-@@ -18,10 +18,14 @@
- You should have received a copy of the GNU General Public License
- along with this program. If not, see `http://www.gnu.org/licenses/'. */
-
-+#include <config.h>
-+
- #include <netinet/in_systm.h>
- #include <netinet/in.h>
- #include <netinet/ip.h>
-+#ifdef HAVE_IPV6
- #include <netinet/icmp6.h>
-+#endif
- #include <icmp.h>
- #include <error.h>
- #include <progname.h>
-@@ -63,7 +67,12 @@ struct ping_stat
- want to follow the traditional behaviour of ping. */
- #define DEFAULT_PING_COUNT 0
-
-+#ifdef HAVE_IPV6
- #define PING_HEADER_LEN (USE_IPV6 ? sizeof (struct icmp6_hdr) : ICMP_MINLEN)
-+#else
-+#define PING_HEADER_LEN (ICMP_MINLEN)
-+#endif
-+
- #define PING_TIMING(s) ((s) >= sizeof (struct timeval))
- #define PING_DATALEN (64 - PING_HEADER_LEN) /* default data length */
-
-@@ -78,13 +87,20 @@ struct ping_stat
-
- #define PING_MIN_USER_INTERVAL (200000/PING_PRECISION)
-
-+#ifdef HAVE_IPV6
- /* FIXME: Adjust IPv6 case for options and their consumption. */
- #define _PING_BUFLEN(p, u) ((u)? ((p)->ping_datalen + sizeof (struct icmp6_hdr)) : \
- (MAXIPLEN + (p)->ping_datalen + ICMP_TSLEN))
-
-+#else
-+#define _PING_BUFLEN(p, u) (MAXIPLEN + (p)->ping_datalen + ICMP_TSLEN)
-+#endif
-+
-+#ifdef HAVE_IPV6
- typedef int (*ping_efp6) (int code, void *closure, struct sockaddr_in6 * dest,
- struct sockaddr_in6 * from, struct icmp6_hdr * icmp,
- int datalen);
-+#endif
-
- typedef int (*ping_efp) (int code,
- void *closure,
-@@ -93,13 +109,17 @@ typedef int (*ping_efp) (int code,
- struct ip * ip, icmphdr_t * icmp, int datalen);
-
- union event {
-+#ifdef HAVE_IPV6
- ping_efp6 handler6;
-+#endif
- ping_efp handler;
- };
-
- union ping_address {
- struct sockaddr_in ping_sockaddr;
-+#ifdef HAVE_IPV6
- struct sockaddr_in6 ping_sockaddr6;
-+#endif
- };
-
- typedef struct ping_data PING;
diff --git a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch b/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch
deleted file mode 100644
index 2974bd4f94..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From f7f785c21306010b2367572250b2822df5bc7728 Mon Sep 17 00:00:00 2001
-From: Mike Frysinger <vapier at gentoo.org>
-Date: Thu, 18 Nov 2010 16:59:14 -0500
-Subject: [PATCH] printf-parse: pull in features.h for __GLIBC__
-
-Upstream-Status: Pending
-
-Signed-off-by: Mike Frysinger <vapier at gentoo.org>
-
----
- lib/printf-parse.h | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/lib/printf-parse.h b/lib/printf-parse.h
-index e7d0f82..d7b4534 100644
---- a/lib/printf-parse.h
-+++ b/lib/printf-parse.h
-@@ -28,6 +28,9 @@
-
- #include "printf-args.h"
-
-+#ifdef HAVE_FEATURES_H
-+# include <features.h> /* for __GLIBC__ */
-+#endif
-
- /* Flags */
- #define FLAG_GROUP 1 /* ' flag */
diff --git a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0003-wchar.patch b/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0003-wchar.patch
deleted file mode 100644
index 1ef7e21073..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.8-0003-wchar.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From 9089c6eafbf5903174dce87b68476e35db80beb9 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <martin.jansa@gmail.com>
-Date: Wed, 6 Mar 2019 09:36:11 -0500
-Subject: [PATCH] inetutils: Import version 1.9.4
-
-Upstream-Status: Pending
-
----
- lib/wchar.in.h | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/lib/wchar.in.h b/lib/wchar.in.h
-index cdda680..043866a 100644
---- a/lib/wchar.in.h
-+++ b/lib/wchar.in.h
-@@ -77,6 +77,9 @@
- /* The include_next requires a split double-inclusion guard. */
- #if @HAVE_WCHAR_H@
- # @INCLUDE_NEXT@ @NEXT_WCHAR_H@
-+#else
-+# include <stddef.h>
-+# define MB_CUR_MAX 1
- #endif
-
- #undef _GL_ALREADY_INCLUDING_WCHAR_H
diff --git a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.9-PATH_PROCNET_DEV.patch b/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.9-PATH_PROCNET_DEV.patch
deleted file mode 100644
index 460ddf9830..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/inetutils-1.9-PATH_PROCNET_DEV.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 101130f422dd5c01a1459645d7b2a5b8d19720ab Mon Sep 17 00:00:00 2001
-From: Martin Jansa <martin.jansa@gmail.com>
-Date: Wed, 6 Mar 2019 09:36:11 -0500
-Subject: [PATCH] inetutils: define PATH_PROCNET_DEV if not already defined
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-this prevents the following compilation error :
-system/linux.c:401:15: error: 'PATH_PROCNET_DEV' undeclared (first use in this function)
-
-this patch comes from :
- http://repository.timesys.com/buildsources/i/inetutils/inetutils-1.9/
-
-Upstream-Status: Inappropriate [not author]
-
-Signed-of-by: Eric Bénard <eric@eukrea.com>
-
----
- ifconfig/system/linux.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/ifconfig/system/linux.c b/ifconfig/system/linux.c
-index e453b46..4268ca9 100644
---- a/ifconfig/system/linux.c
-+++ b/ifconfig/system/linux.c
-@@ -53,6 +53,10 @@
- #include "../ifconfig.h"
-
-
-+#ifndef PATH_PROCNET_DEV
-+ #define PATH_PROCNET_DEV "/proc/net/dev"
-+#endif
-+
- /* ARPHRD stuff. */
-
- static void
diff --git a/meta/recipes-connectivity/inetutils/inetutils/inetutils-only-check-pam_appl.h-when-pam-enabled.patch b/meta/recipes-connectivity/inetutils/inetutils/inetutils-only-check-pam_appl.h-when-pam-enabled.patch
deleted file mode 100644
index 2343c03cb4..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils/inetutils-only-check-pam_appl.h-when-pam-enabled.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From cc66e842e037fba9f06761f942abe5c4856492b8 Mon Sep 17 00:00:00 2001
-From: Kai Kang <kai.kang@windriver.com>
-Date: Wed, 6 Mar 2019 09:36:11 -0500
-Subject: [PATCH] inetutils: Import version 1.9.4
-
-Only check security/pam_appl.h which is provided by package libpam when pam is
-enabled.
-
-Upstream-Status: Pending
-
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-
----
- configure.ac | 15 ++++++++++++++-
- 1 file changed, 14 insertions(+), 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index 5e16c3a..18510a8 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -182,6 +182,19 @@ AC_SUBST(LIBUTIL)
-
- # See if we have libpam.a. Investigate PAM versus Linux-PAM.
- if test "$with_pam" = yes ; then
-+ AC_CHECK_HEADERS([security/pam_appl.h], [], [], [
-+#include <sys/types.h>
-+#ifdef HAVE_NETINET_IN_SYSTM_H
-+# include <netinet/in_systm.h>
-+#endif
-+#include <netinet/in.h>
-+#ifdef HAVE_NETINET_IP_H
-+# include <netinet/ip.h>
-+#endif
-+#ifdef HAVE_SYS_PARAM_H
-+# include <sys/param.h>
-+#endif
-+])
- AC_CHECK_LIB(dl, dlopen, LIBDL=-ldl)
- AC_CHECK_LIB(pam, pam_authenticate, LIBPAM=-lpam)
- if test "$ac_cv_lib_pam_pam_authenticate" = yes ; then
-@@ -617,7 +630,7 @@ AC_HEADER_DIRENT
- AC_CHECK_HEADERS([arpa/nameser.h arpa/tftp.h fcntl.h features.h \
- glob.h memory.h netinet/ether.h netinet/in_systm.h \
- netinet/ip.h netinet/ip_icmp.h netinet/ip_var.h \
-- security/pam_appl.h shadow.h \
-+ shadow.h \
- stropts.h sys/tty.h \
- sys/utsname.h sys/ptyvar.h sys/msgbuf.h sys/filio.h \
- sys/ioctl_compat.h sys/cdefs.h sys/stream.h sys/mkdev.h \
diff --git a/meta/recipes-connectivity/inetutils/inetutils_2.2.bb b/meta/recipes-connectivity/inetutils/inetutils_2.2.bb
deleted file mode 100644
index 6c9a299b71..0000000000
--- a/meta/recipes-connectivity/inetutils/inetutils_2.2.bb
+++ /dev/null
@@ -1,211 +0,0 @@
-DESCRIPTION = "The GNU inetutils are a collection of common \
-networking utilities and servers including ftp, ftpd, rcp, \
-rexec, rlogin, rlogind, rsh, rshd, syslog, syslogd, talk, \
-talkd, telnet, telnetd, tftp, tftpd, and uucpd."
-HOMEPAGE = "http://www.gnu.org/software/inetutils"
-SECTION = "net"
-DEPENDS = "ncurses netbase readline virtual/crypt"
-
-LICENSE = "GPL-3.0-only"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=0c7051aef9219dc7237f206c5c4179a7"
-
-SRC_URI[sha256sum] = "d547f69172df73afef691a0f7886280fd781acea28def4ff4b4b212086a89d80"
-SRC_URI = "${GNU_MIRROR}/inetutils/inetutils-${PV}.tar.xz \
- file://inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch \
- file://inetutils-1.8-0003-wchar.patch \
- file://rexec.xinetd.inetutils \
- file://rlogin.xinetd.inetutils \
- file://rsh.xinetd.inetutils \
- file://telnet.xinetd.inetutils \
- file://tftpd.xinetd.inetutils \
- file://inetutils-1.9-PATH_PROCNET_DEV.patch \
- file://inetutils-only-check-pam_appl.h-when-pam-enabled.patch \
-"
-
-inherit autotools gettext update-alternatives texinfo
-
-acpaths = "-I ./m4"
-
-SRC_URI += "${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', '', 'file://fix-disable-ipv6.patch', d)}"
-
-PACKAGECONFIG ??= "ftp uucpd \
- ${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ipv6 ping6', '', d)} \
- "
-PACKAGECONFIG[ftp] = "--enable-ftp,--disable-ftp,readline"
-PACKAGECONFIG[uucpd] = "--enable-uucpd,--disable-uucpd,readline"
-PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6 gl_cv_socket_ipv6=no,"
-PACKAGECONFIG[ping6] = "--enable-ping6,--disable-ping6,"
-
-EXTRA_OECONF = "--with-ncurses-include-dir=${STAGING_INCDIR} \
- inetutils_cv_path_login=${base_bindir}/login \
- --with-libreadline-prefix=${STAGING_LIBDIR} \
- --enable-rpath=no \
-"
-
-# These are horrible for security, disable them
-EXTRA_OECONF:append = " --disable-rsh --disable-rshd --disable-rcp \
- --disable-rlogin --disable-rlogind --disable-rexec --disable-rexecd"
-
-do_configure:prepend () {
- export HELP2MAN='true'
- cp ${STAGING_DATADIR_NATIVE}/gettext/config.rpath ${S}/build-aux/config.rpath
- install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}
- install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}
- rm -f ${S}/glob/configure*
-}
-
-do_install:append () {
- install -m 0755 -d ${D}${base_sbindir}
- install -m 0755 -d ${D}${sbindir}
- install -m 0755 -d ${D}${sysconfdir}/xinetd.d
- if [ "${base_bindir}" != "${bindir}" ] ; then
- install -m 0755 -d ${D}${base_bindir}
- mv ${D}${bindir}/ping* ${D}${base_bindir}/
- mv ${D}${bindir}/hostname ${D}${base_bindir}/
- mv ${D}${bindir}/dnsdomainname ${D}${base_bindir}/
- fi
- mv ${D}${bindir}/ifconfig ${D}${base_sbindir}/
- mv ${D}${libexecdir}/syslogd ${D}${base_sbindir}/
- mv ${D}${libexecdir}/tftpd ${D}${sbindir}/in.tftpd
- mv ${D}${libexecdir}/telnetd ${D}${sbindir}/in.telnetd
- if [ -e ${D}${libexecdir}/rexecd ]; then
- mv ${D}${libexecdir}/rexecd ${D}${sbindir}/in.rexecd
- cp ${WORKDIR}/rexec.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rexec
- fi
- if [ -e ${D}${libexecdir}/rlogind ]; then
- mv ${D}${libexecdir}/rlogind ${D}${sbindir}/in.rlogind
- cp ${WORKDIR}/rlogin.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rlogin
- fi
- if [ -e ${D}${libexecdir}/rshd ]; then
- mv ${D}${libexecdir}/rshd ${D}${sbindir}/in.rshd
- cp ${WORKDIR}/rsh.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rsh
- fi
- if [ -e ${D}${libexecdir}/talkd ]; then
- mv ${D}${libexecdir}/talkd ${D}${sbindir}/in.talkd
- fi
- mv ${D}${libexecdir}/uucpd ${D}${sbindir}/in.uucpd
- mv ${D}${libexecdir}/* ${D}${bindir}/
- cp ${WORKDIR}/telnet.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/telnet
- cp ${WORKDIR}/tftpd.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/tftpd
-
- sed -e 's,@SBINDIR@,${sbindir},g' -i ${D}/${sysconfdir}/xinetd.d/*
- if [ -e ${D}${libdir}/charset.alias ]; then
- rm -rf ${D}${libdir}/charset.alias
- fi
- rm -rf ${D}${libexecdir}/
- # remove usr/lib if empty
- rmdir ${D}${libdir} || true
-}
-
-PACKAGES =+ "${PN}-ping ${PN}-ping6 ${PN}-hostname ${PN}-ifconfig \
-${PN}-tftp ${PN}-logger ${PN}-traceroute ${PN}-syslogd \
-${PN}-ftp ${PN}-ftpd ${PN}-tftpd ${PN}-telnet ${PN}-telnetd ${PN}-inetd \
-${PN}-rsh ${PN}-rshd"
-
-# The packages tftpd, telnetd and rshd conflict with the ones
-# provided by netkit, so add the corresponding -dbg packages
-# for them to avoid the confliction between the dbg package
-# of inetutils and netkit.
-PACKAGES =+ "${PN}-tftpd-dbg ${PN}-telnetd-dbg ${PN}-rshd-dbg"
-NOAUTOPACKAGEDEBUG = "1"
-
-ALTERNATIVE_PRIORITY = "79"
-ALTERNATIVE:${PN} = "whois dnsdomainname"
-ALTERNATIVE_LINK_NAME[uucpd] = "${sbindir}/in.uucpd"
-ALTERNATIVE_LINK_NAME[dnsdomainname] = "${base_bindir}/dnsdomainname"
-
-ALTERNATIVE_PRIORITY_${PN}-logger = "60"
-ALTERNATIVE:${PN}-logger = "logger"
-ALTERNATIVE:${PN}-syslogd = "syslogd"
-ALTERNATIVE_LINK_NAME[syslogd] = "${base_sbindir}/syslogd"
-
-ALTERNATIVE:${PN}-ftp = "ftp"
-ALTERNATIVE:${PN}-ftpd = "ftpd"
-ALTERNATIVE:${PN}-tftp = "tftp"
-ALTERNATIVE:${PN}-tftpd = "tftpd"
-ALTERNATIVE_LINK_NAME[tftpd] = "${sbindir}/tftpd"
-ALTERNATIVE_TARGET[tftpd] = "${sbindir}/in.tftpd"
-
-ALTERNATIVE:${PN}-telnet = "telnet"
-ALTERNATIVE:${PN}-telnetd = "telnetd"
-ALTERNATIVE_LINK_NAME[telnetd] = "${sbindir}/telnetd"
-ALTERNATIVE_TARGET[telnetd] = "${sbindir}/in.telnetd"
-
-ALTERNATIVE:${PN}-inetd= "inetd"
-ALTERNATIVE:${PN}-traceroute = "traceroute"
-
-ALTERNATIVE:${PN}-hostname = "hostname"
-ALTERNATIVE_LINK_NAME[hostname] = "${base_bindir}/hostname"
-
-ALTERNATIVE:${PN}-doc = "hostname.1 dnsdomainname.1 logger.1 syslogd.8 \
- tftpd.8 tftp.1 telnetd.8"
-ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1"
-ALTERNATIVE_LINK_NAME[dnsdomainname.1] = "${mandir}/man1/dnsdomainname.1"
-ALTERNATIVE_LINK_NAME[logger.1] = "${mandir}/man1/logger.1"
-ALTERNATIVE_LINK_NAME[syslogd.8] = "${mandir}/man8/syslogd.8"
-ALTERNATIVE_LINK_NAME[telnetd.8] = "${mandir}/man8/telnetd.8"
-ALTERNATIVE_LINK_NAME[tftpd.8] = "${mandir}/man8/tftpd.8"
-ALTERNATIVE_LINK_NAME[tftp.1] = "${mandir}/man1/tftp.1"
-
-ALTERNATIVE:${PN}-ifconfig = "ifconfig"
-ALTERNATIVE_LINK_NAME[ifconfig] = "${base_sbindir}/ifconfig"
-
-ALTERNATIVE:${PN}-ping = "ping"
-ALTERNATIVE_LINK_NAME[ping] = "${base_bindir}/ping"
-
-ALTERNATIVE:${PN}-ping6 = "${@bb.utils.filter('PACKAGECONFIG', 'ping6', d)}"
-ALTERNATIVE_LINK_NAME[ping6] = "${base_bindir}/ping6"
-
-
-FILES:${PN}-dbg += "${base_bindir}/.debug ${base_sbindir}/.debug ${bindir}/.debug ${sbindir}/.debug"
-FILES:${PN}-ping = "${base_bindir}/ping.${BPN}"
-FILES:${PN}-ping6 = "${base_bindir}/ping6.${BPN}"
-FILES:${PN}-hostname = "${base_bindir}/hostname.${BPN}"
-FILES:${PN}-ifconfig = "${base_sbindir}/ifconfig.${BPN}"
-FILES:${PN}-traceroute = "${bindir}/traceroute.${BPN}"
-FILES:${PN}-logger = "${bindir}/logger.${BPN}"
-
-FILES:${PN}-syslogd = "${base_sbindir}/syslogd.${BPN}"
-RCONFLICTS:${PN}-syslogd = "rsyslog busybox-syslog sysklogd syslog-ng"
-
-FILES:${PN}-ftp = "${bindir}/ftp.${BPN}"
-
-FILES:${PN}-tftp = "${bindir}/tftp.${BPN}"
-FILES:${PN}-telnet = "${bindir}/telnet.${BPN}"
-
-# We make us of RCONFLICTS / RPROVIDES here rather than using the normal
-# alternatives method as this leads to packaging QA issues when using
-# musl as that library does not provide what these applications need to
-# build.
-FILES:${PN}-rsh = "${bindir}/rsh ${bindir}/rlogin ${bindir}/rexec ${bindir}/rcp"
-RCONFLICTS:${PN}-rsh += "netkit-rsh-client"
-RPROVIDES:${PN}-rsh = "rsh"
-
-FILES:${PN}-rshd = "${sbindir}/in.rshd ${sbindir}/in.rlogind ${sbindir}/in.rexecd \
- ${sysconfdir}/xinetd.d/rsh ${sysconfdir}/xinetd.d/rlogin ${sysconfdir}/xinetd.d/rexec"
-FILES:${PN}-rshd-dbg = "${sbindir}/.debug/in.rshd ${sbindir}/.debug/in.rlogind ${sbindir}/.debug/in.rexecd"
-RDEPENDS:${PN}-rshd += "xinetd tcp-wrappers"
-RCONFLICTS:${PN}-rshd += "netkit-rshd-server"
-RPROVIDES:${PN}-rshd = "rshd"
-
-FILES:${PN}-ftpd = "${bindir}/ftpd.${BPN}"
-FILES:${PN}-ftpd-dbg = "${bindir}/.debug/ftpd.${BPN}"
-RDEPENDS:${PN}-ftpd += "xinetd"
-
-FILES:${PN}-tftpd = "${sbindir}/in.tftpd ${sysconfdir}/xinetd.d/tftpd"
-FILES:${PN}-tftpd-dbg = "${sbindir}/.debug/in.tftpd"
-RCONFLICTS:${PN}-tftpd += "netkit-tftpd"
-RDEPENDS:${PN}-tftpd += "xinetd"
-
-FILES:${PN}-telnetd = "${sbindir}/in.telnetd ${sysconfdir}/xinetd.d/telnet"
-FILES:${PN}-telnetd-dbg = "${sbindir}/.debug/in.telnetd"
-RCONFLICTS:${PN}-telnetd += "netkit-telnet"
-RPROVIDES:${PN}-telnetd = "telnetd"
-RDEPENDS:${PN}-telnetd += "xinetd"
-
-FILES:${PN}-inetd = "${bindir}/inetd.${BPN}"
-
-RDEPENDS:${PN} = "xinetd"
diff --git a/meta/recipes-connectivity/inetutils/inetutils_2.5.bb b/meta/recipes-connectivity/inetutils/inetutils_2.5.bb
new file mode 100644
index 0000000000..0f1a0736bd
--- /dev/null
+++ b/meta/recipes-connectivity/inetutils/inetutils_2.5.bb
@@ -0,0 +1,218 @@
+SUMMARY = "The GNU inetutils are a collection of common networking utilities and servers."
+DESCRIPTION = "The GNU inetutils are a collection of common \
+networking utilities and servers including ftp, ftpd, rcp, \
+rexec, rlogin, rlogind, rsh, rshd, syslog, syslogd, talk, \
+talkd, telnet, telnetd, tftp, tftpd, and uucpd."
+HOMEPAGE = "http://www.gnu.org/software/inetutils"
+SECTION = "net"
+DEPENDS = "ncurses netbase readline virtual/crypt"
+
+LICENSE = "GPL-3.0-only"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=0c7051aef9219dc7237f206c5c4179a7"
+
+SRC_URI[sha256sum] = "87697d60a31e10b5cb86a9f0651e1ec7bee98320d048c0739431aac3d5764fb6"
+SRC_URI = "${GNU_MIRROR}/inetutils/inetutils-${PV}.tar.xz \
+ file://rexec.xinetd.inetutils \
+ file://rlogin.xinetd.inetutils \
+ file://rsh.xinetd.inetutils \
+ file://telnet.xinetd.inetutils \
+ file://tftpd.xinetd.inetutils \
+ "
+
+inherit autotools gettext update-alternatives texinfo
+
+acpaths = "-I ./m4"
+
+PACKAGECONFIG ??= "ftp uucpd \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ipv6 ping6', '', d)} \
+ "
+PACKAGECONFIG[ftp] = "--enable-ftp,--disable-ftp,readline"
+PACKAGECONFIG[uucpd] = "--enable-uucpd,--disable-uucpd,readline"
+PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6 gl_cv_socket_ipv6=no,"
+PACKAGECONFIG[ping6] = "--enable-ping6,--disable-ping6,"
+
+EXTRA_OECONF = "--with-ncurses-include-dir=${STAGING_INCDIR} \
+ --with-libreadline-prefix=${STAGING_LIBDIR} \
+ --enable-rpath=no \
+ --with-path-login=${base_bindir}/login \
+ --with-path-cp=${base_bindir}/cp \
+ --with-path-uucico=${libexecdir}/uuico \
+ --with-path-procnet-dev=/proc/net/dev \
+ "
+
+EXTRA_OECONF:append:libc-musl = " --with-path-utmpx=/dev/null/utmpx --with-path-wtmpx=/dev/null/wtmpx"
+
+# These are horrible for security, disable them
+EXTRA_OECONF:append = " --disable-rsh --disable-rshd --disable-rcp \
+ --disable-rlogin --disable-rlogind --disable-rexec --disable-rexecd"
+
+# The configure script guesses many paths in cross builds, check for this happening
+do_configure_cross_check() {
+ if grep "may be incorrect because of cross-compilation" ${B}/config.log; then
+ bberror Default path values used, these must be set explicitly
+ fi
+}
+do_configure[postfuncs] += "do_configure_cross_check"
+
+# The --with-path options are not actually options, so this check needs to be silenced
+ERROR_QA:remove = "unknown-configure-option"
+
+do_configure:prepend () {
+ export HELP2MAN='true'
+}
+
+do_install:append () {
+ install -m 0755 -d ${D}${base_sbindir}
+ install -m 0755 -d ${D}${sbindir}
+ install -m 0755 -d ${D}${sysconfdir}/xinetd.d
+ if [ "${base_bindir}" != "${bindir}" ] ; then
+ install -m 0755 -d ${D}${base_bindir}
+ mv ${D}${bindir}/ping* ${D}${base_bindir}/
+ mv ${D}${bindir}/hostname ${D}${base_bindir}/
+ mv ${D}${bindir}/dnsdomainname ${D}${base_bindir}/
+ fi
+ mv ${D}${bindir}/ifconfig ${D}${base_sbindir}/
+ mv ${D}${libexecdir}/syslogd ${D}${base_sbindir}/
+ mv ${D}${libexecdir}/tftpd ${D}${sbindir}/in.tftpd
+ mv ${D}${libexecdir}/telnetd ${D}${sbindir}/in.telnetd
+ if [ -e ${D}${libexecdir}/rexecd ]; then
+ mv ${D}${libexecdir}/rexecd ${D}${sbindir}/in.rexecd
+ cp ${WORKDIR}/rexec.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rexec
+ fi
+ if [ -e ${D}${libexecdir}/rlogind ]; then
+ mv ${D}${libexecdir}/rlogind ${D}${sbindir}/in.rlogind
+ cp ${WORKDIR}/rlogin.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rlogin
+ fi
+ if [ -e ${D}${libexecdir}/rshd ]; then
+ mv ${D}${libexecdir}/rshd ${D}${sbindir}/in.rshd
+ cp ${WORKDIR}/rsh.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/rsh
+ fi
+ if [ -e ${D}${libexecdir}/talkd ]; then
+ mv ${D}${libexecdir}/talkd ${D}${sbindir}/in.talkd
+ fi
+ mv ${D}${libexecdir}/uucpd ${D}${sbindir}/in.uucpd
+ mv ${D}${libexecdir}/* ${D}${bindir}/
+ cp ${WORKDIR}/telnet.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/telnet
+ cp ${WORKDIR}/tftpd.xinetd.inetutils ${D}/${sysconfdir}/xinetd.d/tftpd
+
+ sed -e 's,@SBINDIR@,${sbindir},g' -i ${D}/${sysconfdir}/xinetd.d/*
+ if [ -e ${D}${libdir}/charset.alias ]; then
+ rm -rf ${D}${libdir}/charset.alias
+ fi
+ rm -rf ${D}${libexecdir}/
+ # remove usr/lib if empty
+ rmdir ${D}${libdir} || true
+}
+
+PACKAGES =+ "${PN}-ping ${PN}-ping6 ${PN}-hostname ${PN}-ifconfig \
+${PN}-tftp ${PN}-logger ${PN}-traceroute ${PN}-syslogd \
+${PN}-ftp ${PN}-ftpd ${PN}-tftpd ${PN}-telnet ${PN}-telnetd ${PN}-inetd \
+${PN}-rsh ${PN}-rshd"
+
+# The packages tftpd, telnetd and rshd conflict with the ones
+# provided by netkit, so add the corresponding -dbg packages
+# for them to avoid the confliction between the dbg package
+# of inetutils and netkit.
+PACKAGES =+ "${PN}-tftpd-dbg ${PN}-telnetd-dbg ${PN}-rshd-dbg"
+NOAUTOPACKAGEDEBUG = "1"
+
+ALTERNATIVE_PRIORITY = "79"
+ALTERNATIVE:${PN} = "whois dnsdomainname"
+ALTERNATIVE_LINK_NAME[uucpd] = "${sbindir}/in.uucpd"
+ALTERNATIVE_LINK_NAME[dnsdomainname] = "${base_bindir}/dnsdomainname"
+
+ALTERNATIVE_PRIORITY_${PN}-logger = "60"
+ALTERNATIVE:${PN}-logger = "logger"
+ALTERNATIVE:${PN}-syslogd = "syslogd"
+ALTERNATIVE_LINK_NAME[syslogd] = "${base_sbindir}/syslogd"
+
+ALTERNATIVE:${PN}-ftp = "ftp"
+ALTERNATIVE:${PN}-ftpd = "ftpd"
+ALTERNATIVE:${PN}-tftp = "tftp"
+ALTERNATIVE:${PN}-tftpd = "tftpd"
+ALTERNATIVE_LINK_NAME[tftpd] = "${sbindir}/tftpd"
+ALTERNATIVE_TARGET[tftpd] = "${sbindir}/in.tftpd"
+
+ALTERNATIVE:${PN}-telnet = "telnet"
+ALTERNATIVE:${PN}-telnetd = "telnetd"
+ALTERNATIVE_LINK_NAME[telnetd] = "${sbindir}/telnetd"
+ALTERNATIVE_TARGET[telnetd] = "${sbindir}/in.telnetd"
+
+ALTERNATIVE:${PN}-inetd= "inetd"
+ALTERNATIVE:${PN}-traceroute = "traceroute"
+
+ALTERNATIVE:${PN}-hostname = "hostname"
+ALTERNATIVE_LINK_NAME[hostname] = "${base_bindir}/hostname"
+
+ALTERNATIVE:${PN}-doc = "hostname.1 dnsdomainname.1 logger.1 syslogd.8 \
+ tftpd.8 tftp.1 telnetd.8"
+ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1"
+ALTERNATIVE_LINK_NAME[dnsdomainname.1] = "${mandir}/man1/dnsdomainname.1"
+ALTERNATIVE_LINK_NAME[logger.1] = "${mandir}/man1/logger.1"
+ALTERNATIVE_LINK_NAME[syslogd.8] = "${mandir}/man8/syslogd.8"
+ALTERNATIVE_LINK_NAME[telnetd.8] = "${mandir}/man8/telnetd.8"
+ALTERNATIVE_LINK_NAME[tftpd.8] = "${mandir}/man8/tftpd.8"
+ALTERNATIVE_LINK_NAME[tftp.1] = "${mandir}/man1/tftp.1"
+
+ALTERNATIVE:${PN}-ifconfig = "ifconfig"
+ALTERNATIVE_LINK_NAME[ifconfig] = "${base_sbindir}/ifconfig"
+
+ALTERNATIVE:${PN}-ping = "ping"
+ALTERNATIVE_LINK_NAME[ping] = "${base_bindir}/ping"
+
+ALTERNATIVE:${PN}-ping6 = "${@bb.utils.filter('PACKAGECONFIG', 'ping6', d)}"
+ALTERNATIVE_LINK_NAME[ping6] = "${base_bindir}/ping6"
+
+
+FILES:${PN}-dbg += "${base_bindir}/.debug ${base_sbindir}/.debug ${bindir}/.debug ${sbindir}/.debug"
+FILES:${PN}-ping = "${base_bindir}/ping.${BPN}"
+FILES:${PN}-ping6 = "${base_bindir}/ping6.${BPN}"
+FILES:${PN}-hostname = "${base_bindir}/hostname.${BPN}"
+FILES:${PN}-ifconfig = "${base_sbindir}/ifconfig.${BPN}"
+FILES:${PN}-traceroute = "${bindir}/traceroute.${BPN}"
+FILES:${PN}-logger = "${bindir}/logger.${BPN}"
+
+FILES:${PN}-syslogd = "${base_sbindir}/syslogd.${BPN}"
+RCONFLICTS:${PN}-syslogd = "rsyslog busybox-syslog sysklogd syslog-ng"
+
+FILES:${PN}-ftp = "${bindir}/ftp.${BPN}"
+
+FILES:${PN}-tftp = "${bindir}/tftp.${BPN}"
+FILES:${PN}-telnet = "${bindir}/telnet.${BPN}"
+
+# We make us of RCONFLICTS / RPROVIDES here rather than using the normal
+# alternatives method as this leads to packaging QA issues when using
+# musl as that library does not provide what these applications need to
+# build.
+FILES:${PN}-rsh = "${bindir}/rsh ${bindir}/rlogin ${bindir}/rexec ${bindir}/rcp"
+RCONFLICTS:${PN}-rsh += "netkit-rsh-client"
+RPROVIDES:${PN}-rsh = "rsh"
+
+FILES:${PN}-rshd = "${sbindir}/in.rshd ${sbindir}/in.rlogind ${sbindir}/in.rexecd \
+ ${sysconfdir}/xinetd.d/rsh ${sysconfdir}/xinetd.d/rlogin ${sysconfdir}/xinetd.d/rexec"
+FILES:${PN}-rshd-dbg = "${sbindir}/.debug/in.rshd ${sbindir}/.debug/in.rlogind ${sbindir}/.debug/in.rexecd"
+RDEPENDS:${PN}-rshd += "xinetd tcp-wrappers"
+RCONFLICTS:${PN}-rshd += "netkit-rshd-server"
+RPROVIDES:${PN}-rshd = "rshd"
+
+FILES:${PN}-ftpd = "${bindir}/ftpd.${BPN}"
+FILES:${PN}-ftpd-dbg = "${bindir}/.debug/ftpd.${BPN}"
+RDEPENDS:${PN}-ftpd += "xinetd"
+
+FILES:${PN}-tftpd = "${sbindir}/in.tftpd ${sysconfdir}/xinetd.d/tftpd"
+FILES:${PN}-tftpd-dbg = "${sbindir}/.debug/in.tftpd"
+RCONFLICTS:${PN}-tftpd += "netkit-tftpd"
+RDEPENDS:${PN}-tftpd += "xinetd"
+
+FILES:${PN}-telnetd = "${sbindir}/in.telnetd ${sysconfdir}/xinetd.d/telnet"
+FILES:${PN}-telnetd-dbg = "${sbindir}/.debug/in.telnetd"
+RCONFLICTS:${PN}-telnetd += "netkit-telnet"
+RPROVIDES:${PN}-telnetd = "telnetd"
+RDEPENDS:${PN}-telnetd += "xinetd"
+
+FILES:${PN}-inetd = "${bindir}/inetd.${BPN}"
+
+RDEPENDS:${PN} = "xinetd"
diff --git a/meta/recipes-connectivity/iproute2/iproute2.inc b/meta/recipes-connectivity/iproute2/iproute2.inc
deleted file mode 100644
index b1bcc1434c..0000000000
--- a/meta/recipes-connectivity/iproute2/iproute2.inc
+++ /dev/null
@@ -1,91 +0,0 @@
-SUMMARY = "TCP / IP networking and traffic control utilities"
-DESCRIPTION = "Iproute2 is a collection of utilities for controlling \
-TCP / IP networking and traffic control in Linux. Of the utilities ip \
-and tc are the most important. ip controls IPv4 and IPv6 \
-configuration and tc stands for traffic control."
-HOMEPAGE = "http://www.linuxfoundation.org/collaborate/workgroups/networking/iproute2"
-SECTION = "base"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
- file://ip/ip.c;beginline=3;endline=8;md5=689d691d0410a4b64d3899f8d6e31817"
-
-DEPENDS = "flex-native bison-native iptables libcap"
-
-inherit update-alternatives bash-completion pkgconfig
-
-CLEANBROKEN = "1"
-
-PACKAGECONFIG ??= "tipc elf devlink"
-PACKAGECONFIG[tipc] = ",,libmnl,"
-PACKAGECONFIG[elf] = ",,elfutils,"
-PACKAGECONFIG[devlink] = ",,libmnl,"
-PACKAGECONFIG[rdma] = ",,libmnl,"
-
-IPROUTE2_MAKE_SUBDIRS = "lib tc ip bridge misc genl ${@bb.utils.filter('PACKAGECONFIG', 'devlink tipc rdma', d)}"
-
-EXTRA_OEMAKE = "\
- CC='${CC}' \
- KERNEL_INCLUDE=${STAGING_INCDIR} \
- DOCDIR=${docdir}/iproute2 \
- SUBDIRS='${IPROUTE2_MAKE_SUBDIRS}' \
- SBINDIR='${base_sbindir}' \
- LIBDIR='${libdir}' \
-"
-
-do_configure:append () {
- sh configure ${STAGING_INCDIR}
- # Explicitly disable ATM support
- sed -i -e '/TC_CONFIG_ATM/d' config.mk
-}
-
-do_install () {
- oe_runmake DESTDIR=${D} install
- mv ${D}${base_sbindir}/ip ${D}${base_sbindir}/ip.iproute2
- install -d ${D}${datadir}
- mv ${D}/share/* ${D}${datadir}/ || true
- rm ${D}/share -rf || true
-}
-
-# The .so files in iproute2-tc are modules, not traditional libraries
-INSANE_SKIP:${PN}-tc = "dev-so"
-
-IPROUTE2_PACKAGES =+ "\
- ${PN}-devlink \
- ${PN}-genl \
- ${PN}-ifstat \
- ${PN}-ip \
- ${PN}-lnstat \
- ${PN}-nstat \
- ${PN}-rtacct \
- ${PN}-ss \
- ${PN}-tc \
- ${PN}-tipc \
- ${PN}-rdma \
-"
-
-PACKAGE_BEFORE_PN = "${IPROUTE2_PACKAGES}"
-RDEPENDS:${PN} += "${PN}-ip"
-
-FILES:${PN}-tc = "${base_sbindir}/tc* \
- ${libdir}/tc/*.so"
-FILES:${PN}-lnstat = "${base_sbindir}/lnstat \
- ${base_sbindir}/ctstat \
- ${base_sbindir}/rtstat"
-FILES:${PN}-ifstat = "${base_sbindir}/ifstat"
-FILES:${PN}-ip = "${base_sbindir}/ip.${PN} ${sysconfdir}/iproute2"
-FILES:${PN}-genl = "${base_sbindir}/genl"
-FILES:${PN}-rtacct = "${base_sbindir}/rtacct"
-FILES:${PN}-nstat = "${base_sbindir}/nstat"
-FILES:${PN}-ss = "${base_sbindir}/ss"
-FILES:${PN}-tipc = "${base_sbindir}/tipc"
-FILES:${PN}-devlink = "${base_sbindir}/devlink"
-FILES:${PN}-rdma = "${base_sbindir}/rdma"
-
-ALTERNATIVE:${PN}-ip = "ip"
-ALTERNATIVE_TARGET[ip] = "${base_sbindir}/ip.${BPN}"
-ALTERNATIVE_LINK_NAME[ip] = "${base_sbindir}/ip"
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN}-tc = "tc"
-ALTERNATIVE_LINK_NAME[tc] = "${base_sbindir}/tc"
-ALTERNATIVE_PRIORITY_${PN}-tc = "100"
diff --git a/meta/recipes-connectivity/iproute2/iproute2_5.17.0.bb b/meta/recipes-connectivity/iproute2/iproute2_5.17.0.bb
deleted file mode 100644
index becbaa9190..0000000000
--- a/meta/recipes-connectivity/iproute2/iproute2_5.17.0.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require iproute2.inc
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz \
- file://0001-libc-compat.h-add-musl-workaround.patch \
- "
-
-SRC_URI[sha256sum] = "6e384f1b42c75e1a9daac57866da37dcff909090ba86eb25a6e764da7893660e"
-
-# CFLAGS are computed in Makefile and reference CCOPTS
-#
-EXTRA_OEMAKE:append = " CCOPTS='${CFLAGS}'"
diff --git a/meta/recipes-connectivity/iproute2/iproute2_6.7.0.bb b/meta/recipes-connectivity/iproute2/iproute2_6.7.0.bb
new file mode 100644
index 0000000000..8c460adf73
--- /dev/null
+++ b/meta/recipes-connectivity/iproute2/iproute2_6.7.0.bb
@@ -0,0 +1,106 @@
+SUMMARY = "TCP / IP networking and traffic control utilities"
+DESCRIPTION = "Iproute2 is a collection of utilities for controlling \
+TCP / IP networking and traffic control in Linux. Of the utilities ip \
+and tc are the most important. ip controls IPv4 and IPv6 \
+configuration and tc stands for traffic control."
+HOMEPAGE = "http://www.linuxfoundation.org/collaborate/workgroups/networking/iproute2"
+SECTION = "base"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
+ "
+
+DEPENDS = "flex-native bison-native iptables libcap"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz \
+ file://0001-libc-compat.h-add-musl-workaround.patch \
+ "
+
+SRC_URI[sha256sum] = "ff942dd9828d7d1f867f61fe72ce433078c31e5d8e4a78e20f02cb5892e8841d"
+
+inherit update-alternatives bash-completion pkgconfig
+
+PACKAGECONFIG ??= "tipc elf devlink"
+PACKAGECONFIG[tipc] = ",,libmnl,"
+PACKAGECONFIG[elf] = ",,elfutils,"
+PACKAGECONFIG[devlink] = ",,libmnl,"
+PACKAGECONFIG[rdma] = ",,libmnl,"
+PACKAGECONFIG[selinux] = ",,libselinux"
+
+IPROUTE2_MAKE_SUBDIRS = "lib tc ip bridge misc genl ${@bb.utils.filter('PACKAGECONFIG', 'devlink tipc rdma', d)}"
+
+# CFLAGS are computed in Makefile and reference CCOPTS
+#
+EXTRA_OEMAKE = "\
+ CC='${CC}' \
+ KERNEL_INCLUDE=${STAGING_INCDIR} \
+ DOCDIR=${docdir}/iproute2 \
+ SUBDIRS='${IPROUTE2_MAKE_SUBDIRS}' \
+ SBINDIR='${base_sbindir}' \
+ CONF_USR_DIR='${libdir}/iproute2' \
+ LIBDIR='${libdir}' \
+ CCOPTS='${CFLAGS}' \
+"
+
+do_configure:append () {
+ sh configure ${STAGING_INCDIR}
+ # Explicitly disable ATM support
+ sed -i -e '/TC_CONFIG_ATM/d' config.mk
+}
+
+do_install () {
+ oe_runmake DESTDIR=${D} install
+ mv ${D}${base_sbindir}/ip ${D}${base_sbindir}/ip.iproute2
+ install -d ${D}${datadir}
+ mv ${D}/share/* ${D}${datadir}/ || true
+ rm ${D}/share -rf || true
+}
+
+# The .so files in iproute2-tc are modules, not traditional libraries
+INSANE_SKIP:${PN}-tc = "dev-so"
+
+IPROUTE2_PACKAGES =+ "\
+ ${PN}-bridge \
+ ${PN}-devlink \
+ ${PN}-genl \
+ ${PN}-ifstat \
+ ${PN}-ip \
+ ${PN}-lnstat \
+ ${PN}-nstat \
+ ${PN}-routel \
+ ${PN}-rtacct \
+ ${PN}-ss \
+ ${PN}-tc \
+ ${PN}-tipc \
+ ${PN}-rdma \
+"
+
+PACKAGE_BEFORE_PN = "${IPROUTE2_PACKAGES}"
+RDEPENDS:${PN} += "${PN}-ip"
+
+FILES:${PN}-tc = "${base_sbindir}/tc* \
+ ${libdir}/tc/*.so"
+FILES:${PN}-lnstat = "${base_sbindir}/lnstat \
+ ${base_sbindir}/ctstat \
+ ${base_sbindir}/rtstat"
+FILES:${PN}-ifstat = "${base_sbindir}/ifstat"
+FILES:${PN}-ip = "${base_sbindir}/ip.* ${libdir}/iproute2"
+FILES:${PN}-genl = "${base_sbindir}/genl"
+FILES:${PN}-rtacct = "${base_sbindir}/rtacct"
+FILES:${PN}-nstat = "${base_sbindir}/nstat"
+FILES:${PN}-ss = "${base_sbindir}/ss"
+FILES:${PN}-tipc = "${base_sbindir}/tipc"
+FILES:${PN}-devlink = "${base_sbindir}/devlink"
+FILES:${PN}-rdma = "${base_sbindir}/rdma"
+FILES:${PN}-routel = "${base_sbindir}/routel"
+FILES:${PN}-bridge = "${base_sbindir}/bridge"
+
+RDEPENDS:${PN}-routel = "python3-core"
+
+ALTERNATIVE:${PN}-ip = "ip"
+ALTERNATIVE_TARGET[ip] = "${base_sbindir}/ip.${BPN}"
+ALTERNATIVE_LINK_NAME[ip] = "${base_sbindir}/ip"
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN}-tc = "tc"
+ALTERNATIVE_LINK_NAME[tc] = "${base_sbindir}/tc"
+ALTERNATIVE_PRIORITY_${PN}-tc = "100"
diff --git a/meta/recipes-connectivity/iw/iw_5.16.bb b/meta/recipes-connectivity/iw/iw_5.16.bb
deleted file mode 100644
index cf176a349f..0000000000
--- a/meta/recipes-connectivity/iw/iw_5.16.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "nl80211 based CLI configuration utility for wireless devices"
-DESCRIPTION = "iw is a new nl80211 based CLI configuration utility for \
-wireless devices. It supports almost all new drivers that have been added \
-to the kernel recently. "
-HOMEPAGE = "https://wireless.wiki.kernel.org/en/users/documentation/iw"
-SECTION = "base"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=878618a5c4af25e9b93ef0be1a93f774"
-
-DEPENDS = "libnl"
-
-SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \
- file://0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch \
- file://separate-objdir.patch \
-"
-
-SRC_URI[sha256sum] = "9c91f2560b258d9660e656ad37fa5bd100ac255865dcfb26076a576b10d8f3a7"
-
-inherit pkgconfig
-
-EXTRA_OEMAKE = "\
- -f '${S}/Makefile' \
- \
- 'PREFIX=${prefix}' \
- 'SBINDIR=${sbindir}' \
- 'MANDIR=${mandir}' \
-"
-
-do_install() {
- oe_runmake 'DESTDIR=${D}' install
-}
diff --git a/meta/recipes-connectivity/iw/iw_6.7.bb b/meta/recipes-connectivity/iw/iw_6.7.bb
new file mode 100644
index 0000000000..b46b54bc93
--- /dev/null
+++ b/meta/recipes-connectivity/iw/iw_6.7.bb
@@ -0,0 +1,31 @@
+SUMMARY = "nl80211 based CLI configuration utility for wireless devices"
+DESCRIPTION = "iw is a new nl80211 based CLI configuration utility for \
+wireless devices. It supports almost all new drivers that have been added \
+to the kernel recently. "
+HOMEPAGE = "https://wireless.wiki.kernel.org/en/users/documentation/iw"
+SECTION = "base"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=878618a5c4af25e9b93ef0be1a93f774"
+
+DEPENDS = "libnl"
+
+SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \
+ file://0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch \
+ file://separate-objdir.patch \
+"
+
+SRC_URI[sha256sum] = "b3ef3fa85fa1177b11d3e97d6d38cdfe10ee250ca31482b581f3bd0fc79cb015"
+
+inherit pkgconfig
+
+EXTRA_OEMAKE = "\
+ -f '${S}/Makefile' \
+ \
+ 'PREFIX=${prefix}' \
+ 'SBINDIR=${sbindir}' \
+ 'MANDIR=${mandir}' \
+"
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' install
+}
diff --git a/meta/recipes-connectivity/kea/files/0001-kea-fix-reproducible-build-failure.patch b/meta/recipes-connectivity/kea/files/0001-kea-fix-reproducible-build-failure.patch
new file mode 100644
index 0000000000..8a5bd00302
--- /dev/null
+++ b/meta/recipes-connectivity/kea/files/0001-kea-fix-reproducible-build-failure.patch
@@ -0,0 +1,62 @@
+From f9bcfed5a1d44d9211c5f6eba403a9898c8c9057 Mon Sep 17 00:00:00 2001
+From: Sudip Mukherjee <sudipm.mukherjee@gmail.com>
+Date: Tue, 8 Aug 2023 19:03:13 +0100
+Subject: [PATCH] kea: fix reproducible build failure
+
+New version of Kea has started using path of build-dir instead of
+src-dir which results in reproducible builds failure.
+Use src-dir as is used in v2.2.0
+
+Upstream-Status: Pending
+https://gitlab.isc.org/isc-projects/kea/-/issues/3007
+
+Upstream has confirmed the patch will not be accepted but discussions
+with upstream is still going on, we might have a proper solution later.
+
+Signed-off-by: Sudip Mukherjee <sudipm.mukherjee@gmail.com>
+---
+ src/bin/admin/kea-admin.in | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/src/bin/admin/kea-admin.in b/src/bin/admin/kea-admin.in
+index 034a0ee..8ab11ab 100644
+--- a/src/bin/admin/kea-admin.in
++++ b/src/bin/admin/kea-admin.in
+@@ -51,14 +51,14 @@ dump_qry=""
+ if test -f "@datarootdir@/@PACKAGE_NAME@/scripts/admin-utils.sh"; then
+ . "@datarootdir@/@PACKAGE_NAME@/scripts/admin-utils.sh"
+ else
+- . "@abs_top_builddir@/src/bin/admin/admin-utils.sh"
++ . "@abs_top_srcdir@/src/bin/admin/admin-utils.sh"
+ fi
+
+ # Find the installed kea-lfc if available. Fallback to sources otherwise.
+ if test -x "@sbindir@/kea-lfc"; then
+ kea_lfc="@sbindir@/kea-lfc"
+ else
+- kea_lfc="@abs_top_builddir@/src/bin/lfc/kea-lfc"
++ kea_lfc="@abs_top_srcdir@/src/bin/lfc/kea-lfc"
+ fi
+
+ # Prints out usage version.
+@@ -355,7 +355,7 @@ mysql_upgrade() {
+ # Check if there are any files in it
+ num_files=$(find "${upgrade_scripts_dir}" -name 'upgrade*.sh' -type f | wc -l)
+ if [ "$num_files" -eq 0 ]; then
+- upgrade_scripts_dir=@abs_top_builddir@/src/share/database/scripts/mysql
++ upgrade_scripts_dir=@abs_top_srcdir@/src/share/database/scripts/mysql
+
+ # Check if the scripts directory exists at all.
+ if [ ! -d ${upgrade_scripts_dir} ]; then
+@@ -405,7 +405,7 @@ pgsql_upgrade() {
+ # Check if there are any files in it
+ num_files=$(find "${upgrade_scripts_dir}" -name 'upgrade*.sh' -type f | wc -l)
+ if [ "$num_files" -eq 0 ]; then
+- upgrade_scripts_dir=@abs_top_builddir@/src/share/database/scripts/pgsql
++ upgrade_scripts_dir=@abs_top_srcdir@/src/share/database/scripts/pgsql
+
+ # Check if the scripts directory exists at all.
+ if [ ! -d ${upgrade_scripts_dir} ]; then
+--
+2.39.2
+
diff --git a/meta/recipes-connectivity/kea/files/fix-multilib-conflict.patch b/meta/recipes-connectivity/kea/files/fix-multilib-conflict.patch
index 78f475a495..5b135b3aee 100644
--- a/meta/recipes-connectivity/kea/files/fix-multilib-conflict.patch
+++ b/meta/recipes-connectivity/kea/files/fix-multilib-conflict.patch
@@ -1,4 +1,4 @@
-From d027b1d85a8c1a0193b6e4a00083d3038d699a59 Mon Sep 17 00:00:00 2001
+From 06ebd1b2ced426c420ed162980eca194f9f918ae Mon Sep 17 00:00:00 2001
From: Kai Kang <kai.kang@windriver.com>
Date: Tue, 22 Sep 2020 15:02:33 +0800
Subject: [PATCH] There are conflict of config files between kea and lib32-kea:
@@ -12,7 +12,7 @@ Subject: [PATCH] There are conflict of config files between kea and lib32-kea:
Because they are all commented out, replace the expanded libdir path with
'$libdir' in the config files to avoid conflict.
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://gitlab.isc.org/isc-projects/kea/-/issues/2602]
Signed-off-by: Kai Kang <kai.kang@windriver.com>
---
@@ -35,10 +35,10 @@ index e6ae8b8..50a3092 100644
// "param1": "foo"
// }
diff --git a/src/bin/keactrl/kea-dhcp4.conf.pre b/src/bin/keactrl/kea-dhcp4.conf.pre
-index 26bf163..49ddb0a 100644
+index 6edb8a1..b2a7385 100644
--- a/src/bin/keactrl/kea-dhcp4.conf.pre
+++ b/src/bin/keactrl/kea-dhcp4.conf.pre
-@@ -252,7 +252,7 @@
+@@ -255,7 +255,7 @@
// // of all devices serviced by Kea, including their identifiers
// // (like MAC address), their location in the network, times
// // when they were active etc.
@@ -47,7 +47,7 @@ index 26bf163..49ddb0a 100644
// "parameters": {
// "path": "/var/lib/kea",
// "base-name": "kea-forensic4"
-@@ -269,7 +269,7 @@
+@@ -272,7 +272,7 @@
// // of specific options or perhaps even a combination of several
// // options and fields to uniquely identify a client. Those scenarios
// // are addressed by the Flexible Identifiers hook application.
diff --git a/meta/recipes-connectivity/kea/files/fix_pid_keactrl.patch b/meta/recipes-connectivity/kea/files/fix_pid_keactrl.patch
index b7c2fd4f0d..63a6a2805b 100644
--- a/meta/recipes-connectivity/kea/files/fix_pid_keactrl.patch
+++ b/meta/recipes-connectivity/kea/files/fix_pid_keactrl.patch
@@ -1,4 +1,4 @@
-From 18f4f6206c248d6169aa67b3ecf16bf54e9292e8 Mon Sep 17 00:00:00 2001
+From c878a356712606549f7f188b62f7d1cae08a176e Mon Sep 17 00:00:00 2001
From: Armin kuster <akuster808@gmail.com>
Date: Wed, 14 Oct 2020 22:48:31 -0700
Subject: [PATCH] Busybox does not support ps -p so use pgrep
@@ -13,10 +13,10 @@ Signed-off-by: Armin kuster <akuster808@gmail.com>
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/bin/keactrl/keactrl.in b/src/bin/keactrl/keactrl.in
-index ae5bd8e..e9f9b73 100644
+index 450e997..c353ca9 100644
--- a/src/bin/keactrl/keactrl.in
+++ b/src/bin/keactrl/keactrl.in
-@@ -151,8 +151,8 @@ check_running() {
+@@ -149,8 +149,8 @@ check_running() {
# Get the PID from the PID file (if it exists)
get_pid_from_file "${proc_name}"
if [ ${_pid} -gt 0 ]; then
diff --git a/meta/recipes-connectivity/kea/files/kea-dhcp-ddns.service b/meta/recipes-connectivity/kea/files/kea-dhcp-ddns.service
index 91aa2eb14f..f6059d73cb 100644
--- a/meta/recipes-connectivity/kea/files/kea-dhcp-ddns.service
+++ b/meta/recipes-connectivity/kea/files/kea-dhcp-ddns.service
@@ -6,7 +6,6 @@ After=time-sync.target
[Service]
ExecStartPre=@BASE_BINDIR@/mkdir -p @LOCALSTATEDIR@/run/kea/
-ExecStartPre=@BASE_BINDIR@/mkdir -p @LOCALSTATEDIR@/kea
ExecStart=@SBINDIR@/kea-dhcp-ddns -c @SYSCONFDIR@/kea/kea-dhcp-ddns.conf
[Install]
diff --git a/meta/recipes-connectivity/kea/kea_2.0.2.bb b/meta/recipes-connectivity/kea/kea_2.0.2.bb
deleted file mode 100644
index 13da1f858d..0000000000
--- a/meta/recipes-connectivity/kea/kea_2.0.2.bb
+++ /dev/null
@@ -1,77 +0,0 @@
-SUMMARY = "ISC Kea DHCP Server"
-DESCRIPTION = "Kea is the next generation of DHCP software developed by ISC. It supports both DHCPv4 and DHCPv6 protocols along with their extensions, e.g. prefix delegation and dynamic updates to DNS."
-HOMEPAGE = "http://kea.isc.org"
-SECTION = "connectivity"
-LICENSE = "MPL-2.0 & Apache-2.0"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b4ecee995eeb6780a17dd7e539e97abc"
-
-DEPENDS = "boost log4cplus openssl"
-
-SRC_URI = "http://ftp.isc.org/isc/kea/${PV}/${BP}.tar.gz \
- file://kea-dhcp4.service \
- file://kea-dhcp6.service \
- file://kea-dhcp-ddns.service \
- file://kea-dhcp4-server \
- file://kea-dhcp6-server \
- file://kea-dhcp-ddns-server \
- file://fix-multilib-conflict.patch \
- file://fix_pid_keactrl.patch \
- file://0001-src-lib-log-logger_unittest_support.cc-do-not-write-.patch \
- "
-SRC_URI[sha256sum] = "8d28213bdc8e2bb870a383b30ac1e53d54e1eba43d2f86e5151b08b66aa6cf32"
-
-inherit autotools systemd update-rc.d upstream-version-is-even
-
-INITSCRIPT_NAME = "kea-dhcp4-server"
-INITSCRIPT_PARAMS = "defaults 30"
-
-SYSTEMD_SERVICE:${PN} = "kea-dhcp4.service kea-dhcp6.service kea-dhcp-ddns.service"
-SYSTEMD_AUTO_ENABLE = "disable"
-
-DEBUG_OPTIMIZATION:remove:mips = " -Og"
-DEBUG_OPTIMIZATION:append:mips = " -O"
-BUILD_OPTIMIZATION:remove:mips = " -Og"
-BUILD_OPTIMIZATION:append:mips = " -O"
-
-DEBUG_OPTIMIZATION:remove:mipsel = " -Og"
-DEBUG_OPTIMIZATION:append:mipsel = " -O"
-BUILD_OPTIMIZATION:remove:mipsel = " -Og"
-BUILD_OPTIMIZATION:append:mipsel = " -O"
-
-EXTRA_OECONF = "--with-boost-libs=-lboost_system \
- --with-log4cplus=${STAGING_DIR_TARGET}${prefix} \
- --with-openssl=${STAGING_DIR_TARGET}${prefix}"
-
-do_configure:prepend() {
- # replace abs_top_builddir to avoid introducing the build path
- # don't expand the abs_top_builddir on the target as the abs_top_builddir is meanlingless on the target
- find ${S} -type f -name *.sh.in | xargs sed -i "s:@abs_top_builddir@:@abs_top_builddir_placeholder@:g"
- sed -i "s:@abs_top_srcdir@:@abs_top_srcdir_placeholder@:g" ${S}/src/bin/admin/kea-admin.in
-}
-
-# patch out build host paths for reproducibility
-do_compile:prepend:class-target() {
- sed -i -e "s,${WORKDIR},,g" ${B}/config.report
-}
-
-do_install:append() {
- install -d ${D}${sysconfdir}/init.d
- install -d ${D}${systemd_system_unitdir}
-
- install -m 0644 ${WORKDIR}/kea-dhcp*service ${D}${systemd_system_unitdir}
- install -m 0755 ${WORKDIR}/kea-*-server ${D}${sysconfdir}/init.d
- sed -i -e 's,@SBINDIR@,${sbindir},g' -e 's,@BASE_BINDIR@,${base_bindir},g' \
- -e 's,@LOCALSTATEDIR@,${localstatedir},g' -e 's,@SYSCONFDIR@,${sysconfdir},g' \
- ${D}${systemd_system_unitdir}/kea-dhcp*service ${D}${sbindir}/keactrl
-}
-
-do_install:append() {
- rm -rf "${D}${localstatedir}"
-}
-
-CONFFILES:${PN} = "${sysconfdir}/kea/keactrl.conf"
-
-FILES:${PN}-staticdev += "${libdir}/kea/hooks/*.a ${libdir}/hooks/*.a"
-FILES:${PN} += "${libdir}/hooks/*.so"
-
-PARALLEL_MAKEINST = ""
diff --git a/meta/recipes-connectivity/kea/kea_2.4.1.bb b/meta/recipes-connectivity/kea/kea_2.4.1.bb
new file mode 100644
index 0000000000..c3aa4dc8f0
--- /dev/null
+++ b/meta/recipes-connectivity/kea/kea_2.4.1.bb
@@ -0,0 +1,78 @@
+SUMMARY = "ISC Kea DHCP Server"
+DESCRIPTION = "Kea is the next generation of DHCP software developed by ISC. It supports both DHCPv4 and DHCPv6 protocols along with their extensions, e.g. prefix delegation and dynamic updates to DNS."
+HOMEPAGE = "http://kea.isc.org"
+SECTION = "connectivity"
+LICENSE = "MPL-2.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=ea061fa0188838072c4248c1318ec131"
+
+DEPENDS = "boost log4cplus openssl"
+
+SRC_URI = "http://ftp.isc.org/isc/kea/${PV}/${BP}.tar.gz \
+ file://kea-dhcp4.service \
+ file://kea-dhcp6.service \
+ file://kea-dhcp-ddns.service \
+ file://kea-dhcp4-server \
+ file://kea-dhcp6-server \
+ file://kea-dhcp-ddns-server \
+ file://fix-multilib-conflict.patch \
+ file://fix_pid_keactrl.patch \
+ file://0001-src-lib-log-logger_unittest_support.cc-do-not-write-.patch \
+ file://0001-kea-fix-reproducible-build-failure.patch \
+ "
+SRC_URI[sha256sum] = "815c61f5c271caa4a1db31dd656eb50a7f6ea973da3690f7c8581408e180131a"
+
+inherit autotools systemd update-rc.d upstream-version-is-even
+
+INITSCRIPT_NAME = "kea-dhcp4-server"
+INITSCRIPT_PARAMS = "defaults 30"
+
+SYSTEMD_SERVICE:${PN} = "kea-dhcp4.service kea-dhcp6.service kea-dhcp-ddns.service"
+SYSTEMD_AUTO_ENABLE = "disable"
+
+DEBUG_OPTIMIZATION:remove:mips = " -Og"
+DEBUG_OPTIMIZATION:append:mips = " -O"
+BUILD_OPTIMIZATION:remove:mips = " -Og"
+BUILD_OPTIMIZATION:append:mips = " -O"
+
+DEBUG_OPTIMIZATION:remove:mipsel = " -Og"
+DEBUG_OPTIMIZATION:append:mipsel = " -O"
+BUILD_OPTIMIZATION:remove:mipsel = " -Og"
+BUILD_OPTIMIZATION:append:mipsel = " -O"
+
+EXTRA_OECONF = "--with-boost-libs=-lboost_system \
+ --with-log4cplus=${STAGING_DIR_TARGET}${prefix} \
+ --with-openssl=${STAGING_DIR_TARGET}${prefix}"
+
+do_configure:prepend() {
+ # replace abs_top_builddir to avoid introducing the build path
+ # don't expand the abs_top_builddir on the target as the abs_top_builddir is meanlingless on the target
+ find ${S} -type f -name *.sh.in | xargs sed -i "s:@abs_top_builddir@:@abs_top_builddir_placeholder@:g"
+ sed -i "s:@abs_top_srcdir@:@abs_top_srcdir_placeholder@:g" ${S}/src/bin/admin/kea-admin.in
+}
+
+# patch out build host paths for reproducibility
+do_compile:prepend:class-target() {
+ sed -i -e "s,${WORKDIR},,g" ${B}/config.report
+}
+
+do_install:append() {
+ install -d ${D}${sysconfdir}/init.d
+ install -d ${D}${systemd_system_unitdir}
+
+ install -m 0644 ${WORKDIR}/kea-dhcp*service ${D}${systemd_system_unitdir}
+ install -m 0755 ${WORKDIR}/kea-*-server ${D}${sysconfdir}/init.d
+ sed -i -e 's,@SBINDIR@,${sbindir},g' -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@LOCALSTATEDIR@,${localstatedir},g' -e 's,@SYSCONFDIR@,${sysconfdir},g' \
+ ${D}${systemd_system_unitdir}/kea-dhcp*service ${D}${sbindir}/keactrl
+}
+
+do_install:append() {
+ rm -rf "${D}${localstatedir}"
+}
+
+CONFFILES:${PN} = "${sysconfdir}/kea/keactrl.conf"
+
+FILES:${PN}-staticdev += "${libdir}/kea/hooks/*.a ${libdir}/hooks/*.a"
+FILES:${PN} += "${libdir}/hooks/*.so"
+
+PARALLEL_MAKEINST = ""
diff --git a/meta/recipes-connectivity/libpcap/libpcap_1.10.1.bb b/meta/recipes-connectivity/libpcap/libpcap_1.10.1.bb
deleted file mode 100644
index dbe2fd8157..0000000000
--- a/meta/recipes-connectivity/libpcap/libpcap_1.10.1.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "Interface for user-level network packet capture"
-DESCRIPTION = "Libpcap provides a portable framework for low-level network \
-monitoring. Libpcap can provide network statistics collection, \
-security monitoring and network debugging."
-HOMEPAGE = "http://www.tcpdump.org/"
-BUGTRACKER = "http://sourceforge.net/tracker/?group_id=53067&atid=469577"
-SECTION = "libs/network"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5eb289217c160e2920d2e35bddc36453 \
- file://pcap.h;beginline=1;endline=32;md5=39af3510e011f34b8872f120b1dc31d2"
-DEPENDS = "flex-native bison-native"
-
-SRC_URI = "https://www.tcpdump.org/release/${BP}.tar.gz"
-SRC_URI[sha256sum] = "ed285f4accaf05344f90975757b3dbfe772ba41d1c401c2648b7fa45b711bdd4"
-
-inherit autotools binconfig-disabled pkgconfig
-
-BINCONFIG = "${bindir}/pcap-config"
-
-# Explicitly disable dag support. We don't have recipe for it and if enabled here,
-# configure script poisons the include dirs with /usr/local/include even when the
-# support hasn't been detected. Do the same thing for DPDK.
-EXTRA_OECONF = " \
- --with-pcap=linux \
- --without-dag \
- --without-dpdk \
- "
-EXTRA_AUTORECONF += "--exclude=aclocal"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez5', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
-"
-PACKAGECONFIG[bluez5] = "--enable-bluetooth,--disable-bluetooth,bluez5"
-PACKAGECONFIG[dbus] = "--enable-dbus,--disable-dbus,dbus"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-PACKAGECONFIG[libnl] = "--with-libnl,--without-libnl,libnl"
-
-do_configure:prepend () {
- #remove hardcoded references to /usr/include
- sed 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i ${S}/configure.ac
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-connectivity/libpcap/libpcap_1.10.4.bb b/meta/recipes-connectivity/libpcap/libpcap_1.10.4.bb
new file mode 100644
index 0000000000..166654e280
--- /dev/null
+++ b/meta/recipes-connectivity/libpcap/libpcap_1.10.4.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Interface for user-level network packet capture"
+DESCRIPTION = "Libpcap provides a portable framework for low-level network \
+monitoring. Libpcap can provide network statistics collection, \
+security monitoring and network debugging."
+HOMEPAGE = "http://www.tcpdump.org/"
+BUGTRACKER = "http://sourceforge.net/tracker/?group_id=53067&atid=469577"
+SECTION = "libs/network"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5eb289217c160e2920d2e35bddc36453 \
+ file://pcap.h;beginline=1;endline=32;md5=39af3510e011f34b8872f120b1dc31d2"
+DEPENDS = "flex-native bison-native"
+
+SRC_URI = "https://www.tcpdump.org/release/${BP}.tar.gz"
+SRC_URI[sha256sum] = "ed19a0383fad72e3ad435fd239d7cd80d64916b87269550159d20e47160ebe5f"
+
+inherit autotools binconfig-disabled pkgconfig
+
+BINCONFIG = "${bindir}/pcap-config"
+
+# Explicitly disable dag support. We don't have recipe for it and if enabled here,
+# configure script poisons the include dirs with /usr/local/include even when the
+# support hasn't been detected. Do the same thing for DPDK.
+EXTRA_OECONF = " \
+ --with-pcap=linux \
+ --without-dag \
+ --without-dpdk \
+ "
+EXTRA_AUTORECONF += "--exclude=aclocal"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez5', '', d)} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
+"
+PACKAGECONFIG[bluez5] = "--enable-bluetooth,--disable-bluetooth,bluez5"
+PACKAGECONFIG[dbus] = "--enable-dbus,--disable-dbus,dbus"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+PACKAGECONFIG[libnl] = "--with-libnl,--without-libnl,libnl"
+
+do_configure:prepend () {
+ #remove hardcoded references to /usr/include
+ sed 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i ${S}/configure.ac
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-connectivity/libuv/libuv_1.44.1.bb b/meta/recipes-connectivity/libuv/libuv_1.44.1.bb
deleted file mode 100644
index 4c96d80a65..0000000000
--- a/meta/recipes-connectivity/libuv/libuv_1.44.1.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-SUMMARY = "A multi-platform support library with a focus on asynchronous I/O"
-HOMEPAGE = "https://github.com/libuv/libuv"
-DESCRIPTION = "libuv is a multi-platform support library with a focus on asynchronous I/O. It was primarily developed for use by Node.js, but it's also used by Luvit, Julia, pyuv, and others."
-BUGTRACKER = "https://github.com/libuv/libuv/issues"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d"
-
-SRCREV = "e8b7eb6908a847ffbe6ab2eec7428e43a0aa53a2"
-SRC_URI = "git://github.com/libuv/libuv;branch=v1.x;protocol=https"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
-
-S = "${WORKDIR}/git"
-
-inherit autotools
-
-do_configure() {
- ${S}/autogen.sh || bbnote "${PN} failed to autogen.sh"
- oe_runconf
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-connectivity/libuv/libuv_1.48.0.bb b/meta/recipes-connectivity/libuv/libuv_1.48.0.bb
new file mode 100644
index 0000000000..87a2c22a7c
--- /dev/null
+++ b/meta/recipes-connectivity/libuv/libuv_1.48.0.bb
@@ -0,0 +1,22 @@
+SUMMARY = "A multi-platform support library with a focus on asynchronous I/O"
+HOMEPAGE = "https://github.com/libuv/libuv"
+DESCRIPTION = "libuv is a multi-platform support library with a focus on asynchronous I/O. It was primarily developed for use by Node.js, but it's also used by Luvit, Julia, pyuv, and others."
+BUGTRACKER = "https://github.com/libuv/libuv/issues"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=74b6f2f7818a4e3a80d03556f71b129b \
+ file://LICENSE-extra;md5=f9307417749e19bd1d6d68a394b49324"
+
+SRCREV = "e9f29cb984231524e3931aa0ae2c5dae1a32884e"
+SRC_URI = "git://github.com/libuv/libuv.git;branch=v1.x;protocol=https"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+S = "${WORKDIR}/git"
+
+inherit autotools
+
+do_configure() {
+ ${S}/autogen.sh || bbnote "${PN} failed to autogen.sh"
+ oe_runconf
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb b/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb
index e6f216e5cb..a4030b7b32 100644
--- a/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb
+++ b/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb
@@ -5,8 +5,8 @@ SECTION = "network"
LICENSE = "PD"
LIC_FILES_CHKSUM = "file://COPYING;md5=87964579b2a8ece4bc6744d2dc9a8b04"
-SRCREV = "3d5c8d0f7e0264768a2c000d0fd4b4d4a991e041"
-PV = "20220511"
+SRCREV = "aae7c68671d225e6d35224613d5b98192b9b2ffe"
+PV = "20230416"
PE = "1"
SRC_URI = "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=main"
diff --git a/meta/recipes-connectivity/neard/neard_0.18.bb b/meta/recipes-connectivity/neard/neard_0.18.bb
deleted file mode 100644
index 23e999acc4..0000000000
--- a/meta/recipes-connectivity/neard/neard_0.18.bb
+++ /dev/null
@@ -1,51 +0,0 @@
-SUMMARY = "Linux NFC daemon"
-DESCRIPTION = "A daemon for the Linux Near Field Communication stack"
-HOMEPAGE = "http://01.org/linux-nfc"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
- file://src/near.h;beginline=1;endline=20;md5=358e4deefef251a4761e1ffacc965d13 \
- "
-
-DEPENDS = "dbus glib-2.0 libnl"
-
-SRC_URI = "git://git.kernel.org/pub/scm/network/nfc/neard.git;protocol=git;branch=master \
- file://neard.in \
- file://Makefile.am-fix-parallel-issue.patch \
- file://Makefile.am-do-not-ship-version.h.patch \
- file://0001-Add-header-dependency-to-nciattach.o.patch \
- "
-
-SRCREV = "c781008d3786e03173f0a0f5dfcc0545c787d7fc"
-
-S = "${WORKDIR}/git"
-
-inherit autotools pkgconfig systemd update-rc.d
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
-
-PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir}/ --with-systemduserunitdir=${systemd_unitdir}/user/,--disable-systemd"
-
-EXTRA_OECONF += "--enable-tools"
-
-# This would copy neard start-stop shell and test scripts
-do_install:append() {
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/init.d/
- sed "s:@installpath@:${libexecdir}/nfc:" ${WORKDIR}/neard.in \
- > ${D}${sysconfdir}/init.d/neard
- chmod 0755 ${D}${sysconfdir}/init.d/neard
- fi
-}
-
-RDEPENDS:${PN} = "dbus"
-
-# Bluez & Wifi are not mandatory except for handover
-RRECOMMENDS:${PN} = "\
- ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez5', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wifi','wpa-supplicant', '', d)} \
- "
-
-INITSCRIPT_NAME = "neard"
-INITSCRIPT_PARAMS = "defaults 64"
-
-SYSTEMD_SERVICE:${PN} = "neard.service"
diff --git a/meta/recipes-connectivity/neard/neard_0.19.bb b/meta/recipes-connectivity/neard/neard_0.19.bb
new file mode 100644
index 0000000000..a98f436b98
--- /dev/null
+++ b/meta/recipes-connectivity/neard/neard_0.19.bb
@@ -0,0 +1,51 @@
+SUMMARY = "Linux NFC daemon"
+DESCRIPTION = "A daemon for the Linux Near Field Communication stack"
+HOMEPAGE = "http://01.org/linux-nfc"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
+ file://src/near.h;beginline=1;endline=20;md5=358e4deefef251a4761e1ffacc965d13 \
+ "
+
+DEPENDS = "dbus glib-2.0 libnl autoconf-archive-native"
+
+SRC_URI = "git://git.kernel.org/pub/scm/network/nfc/neard.git;protocol=https;branch=master \
+ file://neard.in \
+ file://Makefile.am-fix-parallel-issue.patch \
+ file://Makefile.am-do-not-ship-version.h.patch \
+ file://0001-Add-header-dependency-to-nciattach.o.patch \
+ "
+
+SRCREV = "a1dc8a75cba999728e154a0f811ab9dd50c809f7"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig systemd update-rc.d
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
+
+PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir}/ --with-systemduserunitdir=${systemd_unitdir}/user/,--disable-systemd"
+
+EXTRA_OECONF += "--enable-tools"
+
+# This would copy neard start-stop shell and test scripts
+do_install:append() {
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ install -d ${D}${sysconfdir}/init.d/
+ sed "s:@installpath@:${libexecdir}/nfc:" ${WORKDIR}/neard.in \
+ > ${D}${sysconfdir}/init.d/neard
+ chmod 0755 ${D}${sysconfdir}/init.d/neard
+ fi
+}
+
+RDEPENDS:${PN} = "dbus"
+
+# Bluez & Wifi are not mandatory except for handover
+RRECOMMENDS:${PN} = "\
+ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez5', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wifi','wpa-supplicant', '', d)} \
+ "
+
+INITSCRIPT_NAME = "neard"
+INITSCRIPT_PARAMS = "defaults 64"
+
+SYSTEMD_SERVICE:${PN} = "neard.service"
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-locktest-Makefile.am-Do-not-use-build-flags.patch b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-locktest-Makefile.am-Do-not-use-build-flags.patch
new file mode 100644
index 0000000000..351407ddcd
--- /dev/null
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-locktest-Makefile.am-Do-not-use-build-flags.patch
@@ -0,0 +1,36 @@
+From 9efa7a0d37665d9bb0f46d2407883a5ab42c2b84 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 24 Jul 2023 20:39:16 -0700
+Subject: [PATCH] locktest: Makefile.am: Do not use build flags
+
+Using CFLAGS_FOR_BUILD etc. here means it is using wrong flags
+when thse flags are speficied different than target flags which
+is common when cross-building. It can pass wrong paths to linker
+and it would find incompatible libraries during link since they
+are from host system and target maybe not same as build host.
+
+Fixes subtle errors like
+| aarch64-yoe-linux-ld.lld: error: /mnt/b/yoe/master/build/tmp/work/cortexa72-cortexa53-crypto-yoe-linux/nfs-utils/2.6.3-r0/recipe-sysroot-native/usr/lib/libsqlite3.so is incompatible with elf64-littleaarch64
+
+Upstream-Status: Submitted [https://marc.info/?l=linux-nfs&m=169025681008001&w=2]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ tools/locktest/Makefile.am | 3 ---
+ 1 file changed, 3 deletions(-)
+
+diff --git a/tools/locktest/Makefile.am b/tools/locktest/Makefile.am
+index e8914655..2fd36971 100644
+--- a/tools/locktest/Makefile.am
++++ b/tools/locktest/Makefile.am
+@@ -2,8 +2,5 @@
+
+ noinst_PROGRAMS = testlk
+ testlk_SOURCES = testlk.c
+-testlk_CFLAGS=$(CFLAGS_FOR_BUILD)
+-testlk_CPPFLAGS=$(CPPFLAGS_FOR_BUILD)
+-testlk_LDFLAGS=$(LDFLAGS_FOR_BUILD)
+
+ MAINTAINERCLEANFILES = Makefile.in
+--
+2.41.0
+
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-reexport.h-Include-unistd.h-to-compile-with-musl.patch b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-reexport.h-Include-unistd.h-to-compile-with-musl.patch
new file mode 100644
index 0000000000..57d4660571
--- /dev/null
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-reexport.h-Include-unistd.h-to-compile-with-musl.patch
@@ -0,0 +1,34 @@
+From 45597a58e98f351b18db8444292b1cf6dd0cd810 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Sat, 9 Dec 2023 23:34:08 -0800
+Subject: [PATCH] reexport.h: Include unistd.h to compile with musl
+
+Fixed error when compile with musl
+reexport.c: In function 'reexpdb_init':
+reexport.c:62:17: error: implicit declaration of function 'sleep' [-Werror=implicit-function-declaration]
+ 62 | sleep(1);
+
+
+Upstream-Status: Submitted [https://marc.info/?l=linux-nfs&m=170254661824522&w=2]
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ support/reexport/reexport.h | 1 +
+ 1 files changed, 1 insertions(+)
+
+diff --git a/support/reexport/reexport.h b/support/reexport/reexport.h
+index 85fd59c..02f8684 100644
+--- a/support/reexport/reexport.h
++++ b/support/reexport/reexport.h
+@@ -1,6 +1,8 @@
+ #ifndef REEXPORT_H
+ #define REEXPORT_H
+
++#include <unistd.h>
++
+ #include "nfslib.h"
+
+ enum {
+--
+2.42.0
+
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-tools-locktest-Use-intmax_t-to-print-off_t.patch b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-tools-locktest-Use-intmax_t-to-print-off_t.patch
new file mode 100644
index 0000000000..7d903e04bc
--- /dev/null
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-tools-locktest-Use-intmax_t-to-print-off_t.patch
@@ -0,0 +1,53 @@
+From e2e9251dbeb452f5382179023d8ae18b511167a1 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 25 Jul 2023 23:47:08 -0700
+Subject: [PATCH] tools/locktest: Use intmax_t to print off_t
+
+off_t could be 64bit on 32bit architectures which means using %z printf
+modifier is not enough to print it and compiler will complain about
+format mismatch
+
+Fixes
+| testlk.c:84:66: error: format '%zd' expects argument of type 'signed size_t', but argument 4 has type '__off64_t' {aka 'long long int'} [-Werror=format=]
+| 84 | printf("%s: conflicting lock by %d on (%zd;%zd)\n",
+| | ~~^
+| | |
+| | int
+| | %lld
+| 85 | fname, fl.l_pid, fl.l_start, fl.l_len);
+| | ~~~~~~~~~~
+| | |
+| | __off64_t {aka long long int}
+
+Upstream-Status: Submitted [https://marc.info/?l=linux-nfs&m=169035457128067&w=2]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ tools/locktest/testlk.c | 5 +++--
+ 1 file changed, 3 insertions(+), 2 deletions(-)
+
+diff --git a/tools/locktest/testlk.c b/tools/locktest/testlk.c
+index ea51f788..9d4c88c4 100644
+--- a/tools/locktest/testlk.c
++++ b/tools/locktest/testlk.c
+@@ -2,6 +2,7 @@
+ #include <config.h>
+ #endif
+
++#include <stdint.h>
+ #include <stdlib.h>
+ #include <stdio.h>
+ #include <unistd.h>
+@@ -81,8 +82,8 @@ main(int argc, char **argv)
+ if (fl.l_type == F_UNLCK) {
+ printf("%s: no conflicting lock\n", fname);
+ } else {
+- printf("%s: conflicting lock by %d on (%zd;%zd)\n",
+- fname, fl.l_pid, fl.l_start, fl.l_len);
++ printf("%s: conflicting lock by %d on (%jd;%jd)\n",
++ fname, fl.l_pid, (intmax_t)fl.l_start, (intmax_t)fl.l_len);
+ }
+ return 0;
+ }
+--
+2.41.0
+
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service
index c01415de84..ebfe64b9ce 100644
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service
@@ -12,6 +12,7 @@ ConditionPathExists=@SYSCONFDIR@/exports
EnvironmentFile=-@SYSCONFDIR@/nfs-utils.conf
ExecStart=@SBINDIR@/rpc.mountd -F $MOUNTD_OPTS
LimitNOFILE=@HIGH_RLIMIT_NOFILE@
+StateDirectory=nfs
[Install]
WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service
index 5c845b7e82..15ceee04d0 100644
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service
@@ -18,6 +18,7 @@ ExecStopPost=@SBINDIR@/exportfs -au
ExecStopPost=@SBINDIR@/exportfs -f
ExecReload=@SBINDIR@/exportfs -r
RemainAfterExit=yes
+StateDirectory=nfs
[Install]
WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service
index 4fa64e1998..b519194121 100644
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service
@@ -4,11 +4,13 @@ DefaultDependencies=no
Conflicts=umount.target
Requires=nss-lookup.target rpcbind.service
After=network.target nss-lookup.target rpcbind.service
+ConditionPathExists=@SYSCONFDIR@/exports
[Service]
EnvironmentFile=-@SYSCONFDIR@/nfs-utils.conf
ExecStart=@SBINDIR@/rpc.statd -F $STATD_OPTS
LimitNOFILE=@HIGH_RLIMIT_NOFILE@
+StateDirectory=nfs
[Install]
WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.1.bb b/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.1.bb
deleted file mode 100644
index bbed5aea59..0000000000
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.1.bb
+++ /dev/null
@@ -1,145 +0,0 @@
-SUMMARY = "userspace utilities for kernel nfs"
-DESCRIPTION = "The nfs-utils package provides a daemon for the kernel \
-NFS server and related tools."
-HOMEPAGE = "http://nfs.sourceforge.net/"
-SECTION = "console/network"
-
-LICENSE = "MIT & GPL-2.0-or-later & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=95f3a93a5c3c7888de623b46ea085a84"
-
-# util-linux for libblkid
-DEPENDS = "libcap libevent util-linux sqlite3 libtirpc"
-RDEPENDS:${PN} = "${PN}-client"
-RRECOMMENDS:${PN} = "kernel-module-nfsd"
-
-inherit useradd
-
-USERADD_PACKAGES = "${PN}-client"
-USERADD_PARAM:${PN}-client = "--system --home-dir /var/lib/nfs \
- --shell /bin/false --user-group rpcuser"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.xz \
- file://nfsserver \
- file://nfscommon \
- file://nfs-utils.conf \
- file://nfs-server.service \
- file://nfs-mountd.service \
- file://nfs-statd.service \
- file://proc-fs-nfsd.mount \
- file://nfs-utils-debianize-start-statd.patch \
- file://bugfix-adjust-statd-service-name.patch \
- file://0001-Makefile.am-fix-undefined-function-for-libnsm.a.patch \
- file://clang-warnings.patch \
- "
-SRC_URI[sha256sum] = "60dfcd94a9f3d72a12bc7058d811787ec87a6d593d70da2123faf9aad3d7a1df"
-
-# Only kernel-module-nfsd is required here (but can be built-in) - the nfsd module will
-# pull in the remainder of the dependencies.
-
-INITSCRIPT_PACKAGES = "${PN} ${PN}-client"
-INITSCRIPT_NAME = "nfsserver"
-INITSCRIPT_PARAMS = "defaults"
-INITSCRIPT_NAME:${PN}-client = "nfscommon"
-INITSCRIPT_PARAMS:${PN}-client = "defaults 19 21"
-
-inherit autotools-brokensep update-rc.d systemd pkgconfig
-
-SYSTEMD_PACKAGES = "${PN} ${PN}-client"
-SYSTEMD_SERVICE:${PN} = "nfs-server.service nfs-mountd.service"
-SYSTEMD_SERVICE:${PN}-client = "nfs-statd.service"
-
-# --enable-uuid is need for cross-compiling
-EXTRA_OECONF = "--with-statduser=rpcuser \
- --enable-mountconfig \
- --enable-libmount-mount \
- --enable-uuid \
- --disable-gss \
- --disable-nfsdcltrack \
- --with-statdpath=/var/lib/nfs/statd \
- --with-rpcgen=${HOSTTOOLS_DIR}/rpcgen \
- "
-
-PACKAGECONFIG ??= "tcp-wrappers \
- ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
-"
-PACKAGECONFIG:remove:libc-musl = "tcp-wrappers"
-PACKAGECONFIG[tcp-wrappers] = "--with-tcp-wrappers,--without-tcp-wrappers,tcp-wrappers"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-# libdevmapper is available in meta-oe
-PACKAGECONFIG[nfsv41] = "--enable-nfsv41,--disable-nfsv41,libdevmapper,libdevmapper"
-# keyutils is available in meta-oe
-PACKAGECONFIG[nfsv4] = "--enable-nfsv4,--disable-nfsv4,keyutils,python3-core"
-
-PACKAGES =+ "${PN}-client ${PN}-mount ${PN}-stats"
-
-CONFFILES:${PN}-client += "${localstatedir}/lib/nfs/etab \
- ${localstatedir}/lib/nfs/rmtab \
- ${localstatedir}/lib/nfs/xtab \
- ${localstatedir}/lib/nfs/statd/state \
- ${sysconfdir}/nfsmount.conf"
-
-FILES:${PN}-client = "${sbindir}/*statd \
- ${sbindir}/rpc.idmapd ${sbindir}/sm-notify \
- ${sbindir}/showmount ${sbindir}/nfsstat \
- ${localstatedir}/lib/nfs \
- ${sysconfdir}/nfs-utils.conf \
- ${sysconfdir}/nfsmount.conf \
- ${sysconfdir}/init.d/nfscommon \
- ${systemd_system_unitdir}/nfs-statd.service"
-RDEPENDS:${PN}-client = "${PN}-mount rpcbind"
-
-FILES:${PN}-mount = "${base_sbindir}/*mount.nfs*"
-
-FILES:${PN}-stats = "${sbindir}/mountstats ${sbindir}/nfsiostat ${sbindir}/nfsdclnts"
-RDEPENDS:${PN}-stats = "python3-core"
-
-FILES:${PN}-staticdev += "${libdir}/libnfsidmap/*.a"
-
-FILES:${PN} += "${systemd_unitdir} ${libdir}/libnfsidmap/"
-
-do_configure:prepend() {
- sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \
- ${S}/utils/mount/Makefile.am
-}
-
-# Make clean needed because the package comes with
-# precompiled 64-bit objects that break the build
-do_compile:prepend() {
- make clean
-}
-
-# Works on systemd only
-HIGH_RLIMIT_NOFILE ??= "4096"
-
-do_install:append () {
- install -d ${D}${sysconfdir}/init.d
- install -m 0755 ${WORKDIR}/nfsserver ${D}${sysconfdir}/init.d/nfsserver
- install -m 0755 ${WORKDIR}/nfscommon ${D}${sysconfdir}/init.d/nfscommon
-
- install -m 0755 ${WORKDIR}/nfs-utils.conf ${D}${sysconfdir}
- install -m 0755 ${S}/utils/mount/nfsmount.conf ${D}${sysconfdir}
-
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/nfs-server.service ${D}${systemd_system_unitdir}/
- install -m 0644 ${WORKDIR}/nfs-mountd.service ${D}${systemd_system_unitdir}/
- install -m 0644 ${WORKDIR}/nfs-statd.service ${D}${systemd_system_unitdir}/
- sed -i -e 's,@SBINDIR@,${sbindir},g' \
- -e 's,@SYSCONFDIR@,${sysconfdir},g' \
- -e 's,@HIGH_RLIMIT_NOFILE@,${HIGH_RLIMIT_NOFILE},g' \
- ${D}${systemd_system_unitdir}/*.service
- if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
- install -m 0644 ${WORKDIR}/proc-fs-nfsd.mount ${D}${systemd_system_unitdir}/
- install -d ${D}${systemd_system_unitdir}/sysinit.target.wants/
- ln -sf ../proc-fs-nfsd.mount ${D}${systemd_system_unitdir}/sysinit.target.wants/proc-fs-nfsd.mount
- fi
-
- # kernel code as of 3.8 hard-codes this path as a default
- install -d ${D}/var/lib/nfs/v4recovery
-
- # chown the directories and files
- chown -R rpcuser:rpcuser ${D}${localstatedir}/lib/nfs/statd
- chmod 0644 ${D}${localstatedir}/lib/nfs/statd/state
-
- # Make python tools use python 3
- sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${sbindir}/mountstats ${D}${sbindir}/nfsiostat
-}
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.4.bb b/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.4.bb
new file mode 100644
index 0000000000..2f2644f9a8
--- /dev/null
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils_2.6.4.bb
@@ -0,0 +1,154 @@
+SUMMARY = "userspace utilities for kernel nfs"
+DESCRIPTION = "The nfs-utils package provides a daemon for the kernel \
+NFS server and related tools."
+HOMEPAGE = "http://nfs.sourceforge.net/"
+SECTION = "console/network"
+
+LICENSE = "MIT & GPL-2.0-or-later & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=95f3a93a5c3c7888de623b46ea085a84"
+
+# util-linux for libblkid
+DEPENDS = "libcap libevent util-linux sqlite3 libtirpc"
+RDEPENDS:${PN} = "${PN}-client"
+RRECOMMENDS:${PN} = "kernel-module-nfsd"
+
+inherit useradd
+
+USERADD_PACKAGES = "${PN}-client"
+USERADD_PARAM:${PN}-client = "--system --home-dir /var/lib/nfs \
+ --shell /bin/false --user-group rpcuser"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.xz \
+ file://nfsserver \
+ file://nfscommon \
+ file://nfs-utils.conf \
+ file://nfs-server.service \
+ file://nfs-mountd.service \
+ file://nfs-statd.service \
+ file://proc-fs-nfsd.mount \
+ file://nfs-utils-debianize-start-statd.patch \
+ file://bugfix-adjust-statd-service-name.patch \
+ file://0001-Makefile.am-fix-undefined-function-for-libnsm.a.patch \
+ file://clang-warnings.patch \
+ file://0001-locktest-Makefile.am-Do-not-use-build-flags.patch \
+ file://0001-tools-locktest-Use-intmax_t-to-print-off_t.patch \
+ file://0001-reexport.h-Include-unistd.h-to-compile-with-musl.patch \
+ "
+SRC_URI[sha256sum] = "01b3b0fb9c7d0bbabf5114c736542030748c788ec2fd9734744201e9b0a1119d"
+
+# Only kernel-module-nfsd is required here (but can be built-in) - the nfsd module will
+# pull in the remainder of the dependencies.
+
+INITSCRIPT_PACKAGES = "${PN} ${PN}-client"
+INITSCRIPT_NAME = "nfsserver"
+INITSCRIPT_PARAMS = "defaults"
+INITSCRIPT_NAME:${PN}-client = "nfscommon"
+INITSCRIPT_PARAMS:${PN}-client = "defaults 19 21"
+
+inherit autotools-brokensep update-rc.d systemd pkgconfig
+
+SYSTEMD_PACKAGES = "${PN} ${PN}-client"
+SYSTEMD_SERVICE:${PN} = "nfs-server.service nfs-mountd.service"
+SYSTEMD_SERVICE:${PN}-client = "nfs-statd.service"
+
+# --enable-uuid is need for cross-compiling
+EXTRA_OECONF = "--with-statduser=rpcuser \
+ --enable-mountconfig \
+ --enable-libmount-mount \
+ --enable-uuid \
+ --disable-gss \
+ --disable-nfsdcltrack \
+ --with-statdpath=/var/lib/nfs/statd \
+ --with-rpcgen=${HOSTTOOLS_DIR}/rpcgen \
+ "
+
+LDFLAGS:append = " -lsqlite3 -levent"
+
+PACKAGECONFIG ??= "tcp-wrappers \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
+"
+PACKAGECONFIG:remove:libc-musl = "tcp-wrappers"
+PACKAGECONFIG[tcp-wrappers] = "--with-tcp-wrappers,--without-tcp-wrappers,tcp-wrappers"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+# libdevmapper is available in meta-oe
+PACKAGECONFIG[nfsv41] = "--enable-nfsv41,--disable-nfsv41,libdevmapper,libdevmapper"
+# keyutils is available in meta-oe
+PACKAGECONFIG[nfsv4] = "--enable-nfsv4,--disable-nfsv4,keyutils,python3-core"
+
+PACKAGES =+ "${PN}-client ${PN}-mount ${PN}-stats ${PN}-rpcctl"
+
+CONFFILES:${PN}-client += "${localstatedir}/lib/nfs/etab \
+ ${localstatedir}/lib/nfs/rmtab \
+ ${localstatedir}/lib/nfs/xtab \
+ ${localstatedir}/lib/nfs/statd/state \
+ ${sysconfdir}/nfsmount.conf"
+
+FILES:${PN}-client = "${sbindir}/*statd \
+ ${libdir}/libnfsidmap.so.* \
+ ${sbindir}/rpc.idmapd ${sbindir}/sm-notify \
+ ${sbindir}/showmount ${sbindir}/nfsstat \
+ ${localstatedir}/lib/nfs \
+ ${sysconfdir}/nfs-utils.conf \
+ ${sysconfdir}/nfsmount.conf \
+ ${sysconfdir}/init.d/nfscommon \
+ ${systemd_system_unitdir}/nfs-statd.service"
+RDEPENDS:${PN}-client = "${PN}-mount rpcbind"
+
+FILES:${PN}-mount = "${base_sbindir}/*mount.nfs*"
+
+FILES:${PN}-stats = "${sbindir}/mountstats ${sbindir}/nfsiostat ${sbindir}/nfsdclnts"
+RDEPENDS:${PN}-stats = "python3-core"
+
+FILES:${PN}-rpcctl = "${sbindir}/rpcctl"
+RDEPENDS:${PN}-rpcctl = "python3-core"
+
+FILES:${PN}-staticdev += "${libdir}/libnfsidmap/*.a"
+
+FILES:${PN} += "${systemd_unitdir} ${libdir}/libnfsidmap/ ${nonarch_libdir}/modprobe.d"
+
+do_configure:prepend() {
+ sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \
+ ${S}/utils/mount/Makefile.am
+}
+
+# Make clean needed because the package comes with
+# precompiled 64-bit objects that break the build
+do_compile:prepend() {
+ make clean
+}
+
+# Works on systemd only
+HIGH_RLIMIT_NOFILE ??= "4096"
+
+do_install:append () {
+ install -d ${D}${sysconfdir}/init.d
+ install -m 0755 ${WORKDIR}/nfsserver ${D}${sysconfdir}/init.d/nfsserver
+ install -m 0755 ${WORKDIR}/nfscommon ${D}${sysconfdir}/init.d/nfscommon
+
+ install -m 0755 ${WORKDIR}/nfs-utils.conf ${D}${sysconfdir}
+ install -m 0755 ${S}/utils/mount/nfsmount.conf ${D}${sysconfdir}
+
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/nfs-server.service ${D}${systemd_system_unitdir}/
+ install -m 0644 ${WORKDIR}/nfs-mountd.service ${D}${systemd_system_unitdir}/
+ install -m 0644 ${WORKDIR}/nfs-statd.service ${D}${systemd_system_unitdir}/
+ sed -i -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@SYSCONFDIR@,${sysconfdir},g' \
+ -e 's,@HIGH_RLIMIT_NOFILE@,${HIGH_RLIMIT_NOFILE},g' \
+ ${D}${systemd_system_unitdir}/*.service
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ install -m 0644 ${WORKDIR}/proc-fs-nfsd.mount ${D}${systemd_system_unitdir}/
+ install -d ${D}${systemd_system_unitdir}/sysinit.target.wants/
+ ln -sf ../proc-fs-nfsd.mount ${D}${systemd_system_unitdir}/sysinit.target.wants/proc-fs-nfsd.mount
+ fi
+
+ # kernel code as of 3.8 hard-codes this path as a default
+ install -d ${D}/var/lib/nfs/v4recovery
+
+ # chown the directories and files
+ chown -R rpcuser:rpcuser ${D}${localstatedir}/lib/nfs/statd
+ chmod 0644 ${D}${localstatedir}/lib/nfs/statd/state
+
+ # Make python tools use python 3
+ sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${sbindir}/mountstats ${D}${sbindir}/nfsiostat
+}
diff --git a/meta/recipes-connectivity/ofono/ofono_1.34.bb b/meta/recipes-connectivity/ofono/ofono_1.34.bb
deleted file mode 100644
index 23631747a7..0000000000
--- a/meta/recipes-connectivity/ofono/ofono_1.34.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "open source telephony"
-DESCRIPTION = "oFono is a stack for mobile telephony devices on Linux. oFono supports speaking to telephony devices through specific drivers, or with generic AT commands."
-HOMEPAGE = "http://www.ofono.org"
-BUGTRACKER = "https://01.org/jira/browse/OF"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
- file://src/ofono.h;beginline=1;endline=20;md5=3ce17d5978ef3445def265b98899c2ee"
-DEPENDS = "dbus glib-2.0 udev mobile-broadband-provider-info ell"
-
-SRC_URI = "\
- ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
- file://ofono \
- file://0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch \
- file://0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch \
-"
-SRC_URI[sha256sum] = "c0b96d3013447ec2bcb74579bef90e4e59c68dbfa4b9c6fbce5d12401a43aac7"
-
-inherit autotools pkgconfig update-rc.d systemd gobject-introspection-data
-
-INITSCRIPT_NAME = "ofono"
-INITSCRIPT_PARAMS = "defaults 22"
-SYSTEMD_SERVICE:${PN} = "ofono.service"
-
-PACKAGECONFIG ??= "\
- ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
-"
-PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_system_unitdir}/,--with-systemdunitdir="
-PACKAGECONFIG[bluez] = "--enable-bluetooth, --disable-bluetooth, bluez5"
-
-EXTRA_OECONF += "--enable-test --enable-external-ell"
-
-do_configure:prepend() {
- bbnote "Removing bundled ell from ${S}/ell to prevent including it"
- rm -rf ${S}/ell
-}
-
-do_install:append() {
- install -d ${D}${sysconfdir}/init.d/
- install -m 0755 ${WORKDIR}/ofono ${D}${sysconfdir}/init.d/ofono
-}
-
-PACKAGES =+ "${PN}-tests"
-
-FILES:${PN} += "${systemd_unitdir}"
-FILES:${PN}-tests = "${libdir}/${BPN}/test"
-
-RDEPENDS:${PN} += "dbus"
-RDEPENDS:${PN}-tests = "\
- python3-core \
- python3-dbus \
- ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'python3-pygobject', '', d)} \
-"
-
-RRECOMMENDS:${PN} += "kernel-module-tun mobile-broadband-provider-info"
diff --git a/meta/recipes-connectivity/ofono/ofono_2.4.bb b/meta/recipes-connectivity/ofono/ofono_2.4.bb
new file mode 100644
index 0000000000..dae5cc3c25
--- /dev/null
+++ b/meta/recipes-connectivity/ofono/ofono_2.4.bb
@@ -0,0 +1,55 @@
+SUMMARY = "open source telephony"
+DESCRIPTION = "oFono is a stack for mobile telephony devices on Linux. oFono supports speaking to telephony devices through specific drivers, or with generic AT commands."
+HOMEPAGE = "http://www.ofono.org"
+BUGTRACKER = "https://01.org/jira/browse/OF"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
+ file://src/ofono.h;beginline=1;endline=20;md5=3ce17d5978ef3445def265b98899c2ee"
+DEPENDS = "dbus glib-2.0 udev mobile-broadband-provider-info ell"
+
+SRC_URI = "\
+ ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
+ file://ofono \
+ file://0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch \
+ file://0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch \
+"
+SRC_URI[sha256sum] = "93580adc1afd1890dc516efb069de0c5cdfef014415256ddfb28ab172df2d11d"
+
+inherit autotools pkgconfig update-rc.d systemd gobject-introspection-data
+
+INITSCRIPT_NAME = "ofono"
+INITSCRIPT_PARAMS = "defaults 22"
+SYSTEMD_SERVICE:${PN} = "ofono.service"
+
+PACKAGECONFIG ??= "\
+ ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
+"
+PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_system_unitdir}/,--with-systemdunitdir="
+PACKAGECONFIG[bluez] = "--enable-bluetooth, --disable-bluetooth, bluez5"
+
+EXTRA_OECONF += "--enable-test --enable-external-ell"
+
+do_configure:prepend() {
+ bbnote "Removing bundled ell from ${S}/ell to prevent including it"
+ rm -rf ${S}/ell
+}
+
+do_install:append() {
+ install -d ${D}${sysconfdir}/init.d/
+ install -m 0755 ${WORKDIR}/ofono ${D}${sysconfdir}/init.d/ofono
+}
+
+PACKAGES =+ "${PN}-tests"
+
+FILES:${PN} += "${systemd_unitdir}"
+FILES:${PN}-tests = "${libdir}/${BPN}/test"
+
+RDEPENDS:${PN} += "dbus"
+RDEPENDS:${PN}-tests = "\
+ python3-core \
+ python3-dbus \
+ ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'python3-pygobject', '', d)} \
+"
+
+RRECOMMENDS:${PN} += "kernel-module-tun mobile-broadband-provider-info"
diff --git a/meta/recipes-connectivity/openssh/openssh/0001-regress-banner.sh-log-input-and-output-files-on-erro.patch b/meta/recipes-connectivity/openssh/openssh/0001-regress-banner.sh-log-input-and-output-files-on-erro.patch
new file mode 100644
index 0000000000..8763f30f4b
--- /dev/null
+++ b/meta/recipes-connectivity/openssh/openssh/0001-regress-banner.sh-log-input-and-output-files-on-erro.patch
@@ -0,0 +1,61 @@
+From f5a4dacc987ca548fc86577c2dba121c86da3c34 Mon Sep 17 00:00:00 2001
+From: Mikko Rapeli <mikko.rapeli@linaro.org>
+Date: Mon, 11 Sep 2023 09:55:21 +0100
+Subject: [PATCH] regress/banner.sh: log input and output files on error
+
+Some test environments like yocto with qemu are seeing these
+tests failing. There may be additional error messages in the
+stderr of ssh cloent command. busybox cmp shows this error when
+first input file has less new line characters then second
+input file:
+
+cmp: EOF on /usr/lib/openssh/ptest/regress/banner.in
+
+Logging the full banner.out will show what other error messages
+are captured in addition of the expected banner.
+
+Full log of a failing banner test runs is:
+
+run test banner.sh ...
+test banner: missing banner file
+test banner: size 0
+cmp: EOF on /usr/lib/openssh/ptest/regress/banner.in
+banner size 0 mismatch
+test banner: size 10
+test banner: size 100
+cmp: EOF on /usr/lib/openssh/ptest/regress/banner.in
+banner size 100 mismatch
+test banner: size 1000
+test banner: size 10000
+test banner: size 100000
+test banner: suppress banner (-q)
+FAIL: banner
+return value: 1
+
+See: https://bugzilla.yoctoproject.org/show_bug.cgi?id=15178
+
+Upstream-Status: Denied [https://github.com/openssh/openssh-portable/pull/437]
+
+Signed-off-by: Mikko Rapeli <mikko.rapeli@linaro.org>
+---
+ regress/banner.sh | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/regress/banner.sh b/regress/banner.sh
+index a84feb5a..de84957a 100644
+--- a/regress/banner.sh
++++ b/regress/banner.sh
+@@ -32,7 +32,9 @@ for s in 0 10 100 1000 10000 100000 ; do
+ verbose "test $tid: size $s"
+ ( ${SSH} -F $OBJ/ssh_proxy otherhost true 2>$OBJ/banner.out && \
+ cmp $OBJ/banner.in $OBJ/banner.out ) || \
+- fail "banner size $s mismatch"
++ ( verbose "Contents of $OBJ/banner.in:"; cat $OBJ/banner.in; \
++ verbose "Contents of $OBJ/banner.out:"; cat $OBJ/banner.out; \
++ fail "banner size $s mismatch" )
+ done
+
+ trace "test suppress banner (-q)"
+--
+2.34.1
+
diff --git a/meta/recipes-connectivity/openssh/openssh/0001-systemd-Add-optional-support-for-systemd-sd_notify.patch b/meta/recipes-connectivity/openssh/openssh/0001-systemd-Add-optional-support-for-systemd-sd_notify.patch
new file mode 100644
index 0000000000..f079d936a4
--- /dev/null
+++ b/meta/recipes-connectivity/openssh/openssh/0001-systemd-Add-optional-support-for-systemd-sd_notify.patch
@@ -0,0 +1,96 @@
+From b02ef7621758f06eb686ef4f620636dbad086eda Mon Sep 17 00:00:00 2001
+From: Matt Jolly <Matt.Jolly@footclan.ninja>
+Date: Thu, 2 Feb 2023 21:05:40 +1100
+Subject: [PATCH] systemd: Add optional support for systemd `sd_notify`
+
+This is a rebase of Dennis Lamm's <expeditioneer@gentoo.org>
+patch based on Jakub Jelen's <jjelen@redhat.com> original patch
+
+Upstream-Status: Submitted [https://github.com/openssh/openssh-portable/pull/375/commits/be187435911cde6cc3cef6982a508261074f1e56]
+
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+---
+ configure.ac | 24 ++++++++++++++++++++++++
+ sshd.c | 13 +++++++++++++
+ 2 files changed, 37 insertions(+)
+
+diff --git a/configure.ac b/configure.ac
+index 82e8bb7..d1145d3 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -4870,6 +4870,29 @@ AC_SUBST([GSSLIBS])
+ AC_SUBST([K5LIBS])
+ AC_SUBST([CHANNELLIBS])
+
++# Check whether user wants systemd support
++SYSTEMD_MSG="no"
++AC_ARG_WITH(systemd,
++ [ --with-systemd Enable systemd support],
++ [ if test "x$withval" != "xno" ; then
++ AC_PATH_TOOL([PKGCONFIG], [pkg-config], [no])
++ if test "$PKGCONFIG" != "no"; then
++ AC_MSG_CHECKING([for libsystemd])
++ if $PKGCONFIG --exists libsystemd; then
++ SYSTEMD_CFLAGS=`$PKGCONFIG --cflags libsystemd`
++ SYSTEMD_LIBS=`$PKGCONFIG --libs libsystemd`
++ CPPFLAGS="$CPPFLAGS $SYSTEMD_CFLAGS"
++ SSHDLIBS="$SSHDLIBS $SYSTEMD_LIBS"
++ AC_MSG_RESULT([yes])
++ AC_DEFINE(HAVE_SYSTEMD, 1, [Define if you want systemd support.])
++ SYSTEMD_MSG="yes"
++ else
++ AC_MSG_RESULT([no])
++ fi
++ fi
++ fi ]
++)
++
+ # Looking for programs, paths and files
+
+ PRIVSEP_PATH=/var/empty
+@@ -5688,6 +5711,7 @@ echo " libldns support: $LDNS_MSG"
+ echo " Solaris process contract support: $SPC_MSG"
+ echo " Solaris project support: $SP_MSG"
+ echo " Solaris privilege support: $SPP_MSG"
++echo " systemd support: $SYSTEMD_MSG"
+ echo " IP address in \$DISPLAY hack: $DISPLAY_HACK_MSG"
+ echo " Translate v4 in v6 hack: $IPV4_IN6_HACK_MSG"
+ echo " BSD Auth support: $BSD_AUTH_MSG"
+diff --git a/sshd.c b/sshd.c
+index b4f2b97..6820a41 100644
+--- a/sshd.c
++++ b/sshd.c
+@@ -88,6 +88,10 @@
+ #include <prot.h>
+ #endif
+
++#ifdef HAVE_SYSTEMD
++#include <systemd/sd-daemon.h>
++#endif
++
+ #include "xmalloc.h"
+ #include "ssh.h"
+ #include "ssh2.h"
+@@ -308,6 +312,10 @@ static void
+ sighup_restart(void)
+ {
+ logit("Received SIGHUP; restarting.");
++#ifdef HAVE_SYSTEMD
++ /* Signal systemd that we are reloading */
++ sd_notify(0, "RELOADING=1");
++#endif
+ if (options.pid_file != NULL)
+ unlink(options.pid_file);
+ platform_pre_restart();
+@@ -2093,6 +2101,11 @@ main(int ac, char **av)
+ }
+ }
+
++#ifdef HAVE_SYSTEMD
++ /* Signal systemd that we are ready to accept connections */
++ sd_notify(0, "READY=1");
++#endif
++
+ /* Accept a connection and return in a forked child */
+ server_accept_loop(&sock_in, &sock_out,
+ &newsock, config_s);
diff --git a/meta/recipes-connectivity/openssh/openssh/run-ptest b/meta/recipes-connectivity/openssh/openssh/run-ptest
index 8a9b770d59..b2244d725a 100755
--- a/meta/recipes-connectivity/openssh/openssh/run-ptest
+++ b/meta/recipes-connectivity/openssh/openssh/run-ptest
@@ -4,8 +4,22 @@ export TEST_SHELL=sh
export SKIP_UNIT=1
cd regress
+
+# copied from openssh-portable/.github/run_test.sh
+output_failed_logs() {
+ for i in failed*.log; do
+ if [ -f "$i" ]; then
+ echo -------------------------------------------------------------------------
+ echo LOGFILE $i
+ cat $i
+ echo -------------------------------------------------------------------------
+ fi
+ done
+}
+trap output_failed_logs 0
+
sed -i "/\t\tagent-ptrace /d" Makefile
-make -k BUILDDIR=`pwd`/.. .OBJDIR=`pwd` .CURDIR=`pwd` SUDO="sudo" tests \
+make -k BUILDDIR=`pwd`/.. .OBJDIR=`pwd` .CURDIR=`pwd` SUDO="" tests \
| sed -u -e 's/^skipped/SKIP: /g' -e 's/^ok /PASS: /g' -e 's/^failed/FAIL: /g'
SSHAGENT=`which ssh-agent`
diff --git a/meta/recipes-connectivity/openssh/openssh/ssh_config b/meta/recipes-connectivity/openssh/openssh/ssh_config
index e0d023803e..cb2774a163 100644
--- a/meta/recipes-connectivity/openssh/openssh/ssh_config
+++ b/meta/recipes-connectivity/openssh/openssh/ssh_config
@@ -1,4 +1,4 @@
-# $OpenBSD: ssh_config,v 1.33 2017/05/07 23:12:57 djm Exp $
+# $OpenBSD: ssh_config,v 1.35 2020/07/17 03:43:42 dtucker Exp $
# This is the ssh client system-wide configuration file. See
# ssh_config(5) for more information. This file provides defaults for
@@ -17,11 +17,11 @@
# list of available options, their meanings and defaults, please see the
# ssh_config(5) man page.
-Host *
- ForwardAgent yes
- ForwardX11 yes
-# RhostsRSAAuthentication no
-# RSAAuthentication yes
+Include /etc/ssh/ssh_config.d/*.conf
+
+# Host *
+# ForwardAgent no
+# ForwardX11 no
# PasswordAuthentication yes
# HostbasedAuthentication no
# GSSAPIAuthentication no
@@ -36,7 +36,6 @@ Host *
# IdentityFile ~/.ssh/id_ecdsa
# IdentityFile ~/.ssh/id_ed25519
# Port 22
-# Protocol 2
# Ciphers aes128-ctr,aes192-ctr,aes256-ctr,aes128-cbc,3des-cbc
# MACs hmac-md5,hmac-sha1,umac-64@openssh.com
# EscapeChar ~
@@ -46,3 +45,4 @@ Host *
# VisualHostKey no
# ProxyCommand ssh -q -W %h:%p gateway.example.com
# RekeyLimit 1G 1h
+# UserKnownHostsFile ~/.ssh/known_hosts.d/%k
diff --git a/meta/recipes-connectivity/openssh/openssh/sshd.service b/meta/recipes-connectivity/openssh/openssh/sshd.service
new file mode 100644
index 0000000000..2a997b656a
--- /dev/null
+++ b/meta/recipes-connectivity/openssh/openssh/sshd.service
@@ -0,0 +1,17 @@
+[Unit]
+Description=OpenSSH server daemon
+Wants=sshdgenkeys.service
+After=sshdgenkeys.service
+
+[Service]
+Environment="SSHD_OPTS="
+EnvironmentFile=-/etc/default/ssh
+ExecStartPre=@BASE_BINDIR@/mkdir -p /var/run/sshd
+ExecStart=-@SBINDIR@/sshd -D $SSHD_OPTS
+ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
+KillMode=process
+Restart=on-failure
+RestartSec=42s
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/openssh/openssh/sshd_check_keys b/meta/recipes-connectivity/openssh/openssh/sshd_check_keys
index ef117de897..606d1894b5 100644
--- a/meta/recipes-connectivity/openssh/openssh/sshd_check_keys
+++ b/meta/recipes-connectivity/openssh/openssh/sshd_check_keys
@@ -57,8 +57,7 @@ while true ; do
esac
done
-HOST_KEYS=$(sed -n 's/^[ \t]*HostKey[ \t]\+\(.*\)/\1/p' "${sshd_config}")
-[ -z "${HOST_KEYS}" ] && HOST_KEYS="$SYSCONFDIR/ssh_host_rsa_key $SYSCONFDIR/ssh_host_ecdsa_key $SYSCONFDIR/ssh_host_ed25519_key"
+HOST_KEYS=$(sshd -G -f "${sshd_config}" | grep -i '^hostkey ' | cut -f2 -d' ')
for key in ${HOST_KEYS} ; do
[ -f $key ] && continue
diff --git a/meta/recipes-connectivity/openssh/openssh/sshd_config b/meta/recipes-connectivity/openssh/openssh/sshd_config
index 15f061b570..e9eaf93157 100644
--- a/meta/recipes-connectivity/openssh/openssh/sshd_config
+++ b/meta/recipes-connectivity/openssh/openssh/sshd_config
@@ -1,4 +1,4 @@
-# $OpenBSD: sshd_config,v 1.102 2018/02/16 02:32:40 djm Exp $
+# $OpenBSD: sshd_config,v 1.104 2021/07/02 05:11:21 dtucker Exp $
# This is the sshd server system-wide configuration file. See
# sshd_config(5) for more information.
@@ -10,6 +10,8 @@
# possible, but leave them commented. Uncommented options override the
# default value.
+Include /etc/ssh/sshd_config.d/*.conf
+
#Port 22
#AddressFamily any
#ListenAddress 0.0.0.0
@@ -57,9 +59,9 @@ AuthorizedKeysFile .ssh/authorized_keys
#PasswordAuthentication yes
#PermitEmptyPasswords no
-# Change to yes to enable challenge-response passwords (beware issues with
-# some PAM modules and threads)
-ChallengeResponseAuthentication no
+# Change to yes to enable keyboard-interactive authentication (beware issues
+# with some PAM modules and threads)
+KbdInteractiveAuthentication no
# Kerberos options
#KerberosAuthentication no
@@ -73,13 +75,13 @@ ChallengeResponseAuthentication no
# Set this to 'yes' to enable PAM authentication, account processing,
# and session processing. If this is enabled, PAM authentication will
-# be allowed through the ChallengeResponseAuthentication and
+# be allowed through the KbdInteractiveAuthentication and
# PasswordAuthentication. Depending on your PAM configuration,
-# PAM authentication via ChallengeResponseAuthentication may bypass
+# PAM authentication via KbdInteractiveAuthentication may bypass
# the setting of "PermitRootLogin without-password".
# If you just want the PAM account and session checks to run without
# PAM authentication, then enable this but set PasswordAuthentication
-# and ChallengeResponseAuthentication to 'no'.
+# and KbdInteractiveAuthentication to 'no'.
#UsePAM no
#AllowAgentForwarding yes
@@ -92,7 +94,6 @@ ChallengeResponseAuthentication no
#PrintMotd yes
#PrintLastLog yes
#TCPKeepAlive yes
-#UseLogin no
#PermitUserEnvironment no
Compression no
ClientAliveInterval 15
diff --git a/meta/recipes-connectivity/openssh/openssh_9.0p1.bb b/meta/recipes-connectivity/openssh/openssh_9.0p1.bb
deleted file mode 100644
index b63ea2b137..0000000000
--- a/meta/recipes-connectivity/openssh/openssh_9.0p1.bb
+++ /dev/null
@@ -1,183 +0,0 @@
-SUMMARY = "A suite of security-related network utilities based on \
-the SSH protocol including the ssh client and sshd server"
-DESCRIPTION = "Secure rlogin/rsh/rcp/telnet replacement (OpenSSH) \
-Ssh (Secure Shell) is a program for logging into a remote machine \
-and for executing commands on a remote machine."
-HOMEPAGE = "http://www.openssh.com/"
-SECTION = "console/network"
-LICENSE = "BSD-2-Clause & BSD-3-Clause & ISC & MIT"
-LIC_FILES_CHKSUM = "file://LICENCE;md5=8baf365614c9bdd63705f298c9afbfb9"
-
-DEPENDS = "zlib openssl virtual/crypt"
-DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
-
-SRC_URI = "http://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-${PV}.tar.gz \
- file://sshd_config \
- file://ssh_config \
- file://init \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
- file://sshd.socket \
- file://sshd@.service \
- file://sshdgenkeys.service \
- file://volatiles.99_sshd \
- file://run-ptest \
- file://fix-potential-signed-overflow-in-pointer-arithmatic.patch \
- file://sshd_check_keys \
- file://add-test-support-for-busybox.patch \
- "
-SRC_URI[sha256sum] = "03974302161e9ecce32153cfa10012f1e65c8f3750f573a73ab1befd5972a28a"
-
-# This CVE is specific to OpenSSH with the pam opie which we don't build/use here
-CVE_CHECK_IGNORE += "CVE-2007-2768"
-
-# This CVE is specific to OpenSSH server, as used in Fedora and Red Hat Enterprise Linux 7
-# and when running in a Kerberos environment. As such it is not relevant to OpenEmbedded
-CVE_CHECK_IGNORE += "CVE-2014-9278"
-
-# CVE only applies to some distributed RHEL binaries
-CVE_CHECK_IGNORE += "CVE-2008-3844"
-
-PAM_SRC_URI = "file://sshd"
-
-inherit manpages useradd update-rc.d update-alternatives systemd
-
-USERADD_PACKAGES = "${PN}-sshd"
-USERADD_PARAM:${PN}-sshd = "--system --no-create-home --home-dir /var/run/sshd --shell /bin/false --user-group sshd"
-INITSCRIPT_PACKAGES = "${PN}-sshd"
-INITSCRIPT_NAME:${PN}-sshd = "sshd"
-INITSCRIPT_PARAMS:${PN}-sshd = "defaults 9"
-
-SYSTEMD_PACKAGES = "${PN}-sshd"
-SYSTEMD_SERVICE:${PN}-sshd = "sshd.socket"
-
-inherit autotools-brokensep ptest
-
-PACKAGECONFIG ??= "rng-tools"
-PACKAGECONFIG[kerberos] = "--with-kerberos5,--without-kerberos5,krb5"
-PACKAGECONFIG[ldns] = "--with-ldns,--without-ldns,ldns"
-PACKAGECONFIG[libedit] = "--with-libedit,--without-libedit,libedit"
-PACKAGECONFIG[manpages] = "--with-mantype=man,--with-mantype=cat"
-
-# Add RRECOMMENDS to rng-tools for sshd package
-PACKAGECONFIG[rng-tools] = ""
-
-EXTRA_AUTORECONF += "--exclude=aclocal"
-
-# login path is hardcoded in sshd
-EXTRA_OECONF = "'LOGIN_PROGRAM=${base_bindir}/login' \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \
- --without-zlib-version-check \
- --with-privsep-path=${localstatedir}/run/sshd \
- --sysconfdir=${sysconfdir}/ssh \
- --with-xauth=${bindir}/xauth \
- --disable-strip \
- "
-
-# musl doesn't implement wtmp/utmp and logwtmp
-EXTRA_OECONF:append:libc-musl = " --disable-wtmp --disable-lastlog"
-
-# Since we do not depend on libbsd, we do not want configure to use it
-# just because it finds libutil.h. But, specifying --disable-libutil
-# causes compile errors, so...
-CACHED_CONFIGUREVARS += "ac_cv_header_bsd_libutil_h=no ac_cv_header_libutil_h=no"
-
-# passwd path is hardcoded in sshd
-CACHED_CONFIGUREVARS += "ac_cv_path_PATH_PASSWD_PROG=${bindir}/passwd"
-
-# We don't want to depend on libblockfile
-CACHED_CONFIGUREVARS += "ac_cv_header_maillock_h=no"
-
-do_configure:prepend () {
- export LD="${CC}"
- install -m 0644 ${WORKDIR}/sshd_config ${B}/
- install -m 0644 ${WORKDIR}/ssh_config ${B}/
-}
-
-do_compile_ptest() {
- oe_runmake regress-binaries regress-unit-binaries
-}
-
-do_install:append () {
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
- install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd
- sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config
- fi
-
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}" ]; then
- sed -i -e 's:#X11Forwarding no:X11Forwarding yes:' ${D}${sysconfdir}/ssh/sshd_config
- fi
-
- install -d ${D}${sysconfdir}/init.d
- install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/sshd
- rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin
- rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir}
- install -d ${D}/${sysconfdir}/default/volatiles
- install -m 644 ${WORKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd
- install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir}
-
- # Create config files for read-only rootfs
- install -d ${D}${sysconfdir}/ssh
- install -m 644 ${D}${sysconfdir}/ssh/sshd_config ${D}${sysconfdir}/ssh/sshd_config_readonly
- sed -i '/HostKey/d' ${D}${sysconfdir}/ssh/sshd_config_readonly
- echo "HostKey /var/run/ssh/ssh_host_rsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
- echo "HostKey /var/run/ssh/ssh_host_ecdsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
- echo "HostKey /var/run/ssh/ssh_host_ed25519_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
-
- install -d ${D}${systemd_system_unitdir}
- install -c -m 0644 ${WORKDIR}/sshd.socket ${D}${systemd_system_unitdir}
- install -c -m 0644 ${WORKDIR}/sshd@.service ${D}${systemd_system_unitdir}
- install -c -m 0644 ${WORKDIR}/sshdgenkeys.service ${D}${systemd_system_unitdir}
- sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
- -e 's,@SBINDIR@,${sbindir},g' \
- -e 's,@BINDIR@,${bindir},g' \
- -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \
- ${D}${systemd_system_unitdir}/sshd.socket ${D}${systemd_system_unitdir}/*.service
-
- sed -i -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \
- ${D}${sysconfdir}/init.d/sshd
-
- install -D -m 0755 ${WORKDIR}/sshd_check_keys ${D}${libexecdir}/${BPN}/sshd_check_keys
-}
-
-do_install_ptest () {
- sed -i -e "s|^SFTPSERVER=.*|SFTPSERVER=${libexecdir}/sftp-server|" regress/test-exec.sh
- cp -r regress ${D}${PTEST_PATH}
- cp config.h ${D}${PTEST_PATH}
-}
-
-ALLOW_EMPTY:${PN} = "1"
-
-PACKAGES =+ "${PN}-keygen ${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-sftp ${PN}-misc ${PN}-sftp-server"
-FILES:${PN}-scp = "${bindir}/scp.${BPN}"
-FILES:${PN}-ssh = "${bindir}/ssh.${BPN} ${sysconfdir}/ssh/ssh_config"
-FILES:${PN}-sshd = "${sbindir}/sshd ${sysconfdir}/init.d/sshd ${systemd_system_unitdir}"
-FILES:${PN}-sshd += "${sysconfdir}/ssh/moduli ${sysconfdir}/ssh/sshd_config ${sysconfdir}/ssh/sshd_config_readonly ${sysconfdir}/default/volatiles/99_sshd ${sysconfdir}/pam.d/sshd"
-FILES:${PN}-sshd += "${libexecdir}/${BPN}/sshd_check_keys"
-FILES:${PN}-sftp = "${bindir}/sftp"
-FILES:${PN}-sftp-server = "${libexecdir}/sftp-server"
-FILES:${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*"
-FILES:${PN}-keygen = "${bindir}/ssh-keygen"
-
-RDEPENDS:${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen ${PN}-sftp-server"
-RDEPENDS:${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}"
-RRECOMMENDS:${PN}-sshd:append:class-target = "\
- ${@bb.utils.filter('PACKAGECONFIG', 'rng-tools', d)} \
-"
-
-# gdb would make attach-ptrace test pass rather than skip but not worth the build dependencies
-RDEPENDS:${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make sed sudo coreutils"
-
-RPROVIDES:${PN}-ssh = "ssh"
-RPROVIDES:${PN}-sshd = "sshd"
-
-RCONFLICTS:${PN} = "dropbear"
-RCONFLICTS:${PN}-sshd = "dropbear"
-
-CONFFILES:${PN}-sshd = "${sysconfdir}/ssh/sshd_config"
-CONFFILES:${PN}-ssh = "${sysconfdir}/ssh/ssh_config"
-
-ALTERNATIVE_PRIORITY = "90"
-ALTERNATIVE:${PN}-scp = "scp"
-ALTERNATIVE:${PN}-ssh = "ssh"
-
-BBCLASSEXTEND += "nativesdk"
diff --git a/meta/recipes-connectivity/openssh/openssh_9.7p1.bb b/meta/recipes-connectivity/openssh/openssh_9.7p1.bb
new file mode 100644
index 0000000000..d1468c59fc
--- /dev/null
+++ b/meta/recipes-connectivity/openssh/openssh_9.7p1.bb
@@ -0,0 +1,202 @@
+SUMMARY = "A suite of security-related network utilities based on \
+the SSH protocol including the ssh client and sshd server"
+DESCRIPTION = "Secure rlogin/rsh/rcp/telnet replacement (OpenSSH) \
+Ssh (Secure Shell) is a program for logging into a remote machine \
+and for executing commands on a remote machine."
+HOMEPAGE = "http://www.openssh.com/"
+SECTION = "console/network"
+LICENSE = "BSD-2-Clause & BSD-3-Clause & ISC & MIT"
+LIC_FILES_CHKSUM = "file://LICENCE;md5=072979064e691d342002f43cd89c0394"
+
+DEPENDS = "zlib openssl virtual/crypt"
+DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
+
+SRC_URI = "http://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-${PV}.tar.gz \
+ file://sshd_config \
+ file://ssh_config \
+ file://init \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
+ file://sshd.service \
+ file://sshd.socket \
+ file://sshd@.service \
+ file://sshdgenkeys.service \
+ file://volatiles.99_sshd \
+ file://run-ptest \
+ file://fix-potential-signed-overflow-in-pointer-arithmatic.patch \
+ file://sshd_check_keys \
+ file://add-test-support-for-busybox.patch \
+ file://0001-regress-banner.sh-log-input-and-output-files-on-erro.patch \
+ file://0001-systemd-Add-optional-support-for-systemd-sd_notify.patch \
+ "
+SRC_URI[sha256sum] = "490426f766d82a2763fcacd8d83ea3d70798750c7bd2aff2e57dc5660f773ffd"
+
+CVE_STATUS[CVE-2007-2768] = "not-applicable-config: This CVE is specific to OpenSSH with the pam opie which we don't build/use here."
+
+# This CVE is specific to OpenSSH server, as used in Fedora and Red Hat Enterprise Linux 7
+# and when running in a Kerberos environment. As such it is not relevant to OpenEmbedded
+CVE_STATUS[CVE-2014-9278] = "not-applicable-platform: This CVE is specific to OpenSSH server, as used in Fedora and \
+Red Hat Enterprise Linux 7 and when running in a Kerberos environment"
+
+CVE_STATUS[CVE-2008-3844] = "not-applicable-platform: Only applies to some distributed RHEL binaries."
+
+PAM_SRC_URI = "file://sshd"
+
+inherit manpages useradd update-rc.d update-alternatives systemd
+
+USERADD_PACKAGES = "${PN}-sshd"
+USERADD_PARAM:${PN}-sshd = "--system --no-create-home --home-dir /var/run/sshd --shell /bin/false --user-group sshd"
+INITSCRIPT_PACKAGES = "${PN}-sshd"
+INITSCRIPT_NAME:${PN}-sshd = "sshd"
+INITSCRIPT_PARAMS:${PN}-sshd = "defaults 9"
+
+SYSTEMD_PACKAGES = "${PN}-sshd"
+SYSTEMD_SERVICE:${PN}-sshd = "${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-socket-mode','sshd.socket', '', d)} ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-service-mode','sshd.service', '', d)}"
+
+inherit autotools-brokensep ptest pkgconfig
+DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)}"
+
+# systemd-sshd-socket-mode means installing sshd.socket
+# and systemd-sshd-service-mode corresponding to sshd.service
+PACKAGECONFIG ??= "systemd-sshd-socket-mode"
+PACKAGECONFIG[fido2] = "--with-security-key-builtin,--disable-security-key,libfido2"
+PACKAGECONFIG[kerberos] = "--with-kerberos5,--without-kerberos5,krb5"
+PACKAGECONFIG[ldns] = "--with-ldns,--without-ldns,ldns"
+PACKAGECONFIG[libedit] = "--with-libedit,--without-libedit,libedit"
+PACKAGECONFIG[manpages] = "--with-mantype=man,--with-mantype=cat"
+PACKAGECONFIG[systemd-sshd-socket-mode] = ""
+PACKAGECONFIG[systemd-sshd-service-mode] = ""
+
+EXTRA_AUTORECONF += "--exclude=aclocal"
+
+# login path is hardcoded in sshd
+EXTRA_OECONF = "'LOGIN_PROGRAM=${base_bindir}/login' \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \
+ --without-zlib-version-check \
+ --with-privsep-path=${localstatedir}/run/sshd \
+ --sysconfdir=${sysconfdir}/ssh \
+ --with-xauth=${bindir}/xauth \
+ --disable-strip \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--with-systemd', '--without-systemd', d)} \
+ "
+
+# musl doesn't implement wtmp/utmp and logwtmp
+EXTRA_OECONF:append:libc-musl = " --disable-wtmp --disable-lastlog"
+
+# Work around ICE on mips/mips64 starting in 9.6p1
+EXTRA_OECONF:append:mips = " --without-hardening"
+EXTRA_OECONF:append:mips64 = " --without-hardening"
+
+# Work around ICE on powerpc64le starting in 9.6p1
+EXTRA_OECONF:append:powerpc64le = " --without-hardening"
+
+# Since we do not depend on libbsd, we do not want configure to use it
+# just because it finds libutil.h. But, specifying --disable-libutil
+# causes compile errors, so...
+CACHED_CONFIGUREVARS += "ac_cv_header_bsd_libutil_h=no ac_cv_header_libutil_h=no"
+
+# passwd path is hardcoded in sshd
+CACHED_CONFIGUREVARS += "ac_cv_path_PATH_PASSWD_PROG=${bindir}/passwd"
+
+# We don't want to depend on libblockfile
+CACHED_CONFIGUREVARS += "ac_cv_header_maillock_h=no"
+
+do_configure:prepend () {
+ export LD="${CC}"
+ install -m 0644 ${WORKDIR}/sshd_config ${B}/
+ install -m 0644 ${WORKDIR}/ssh_config ${B}/
+}
+
+do_compile_ptest() {
+ oe_runmake regress-binaries regress-unit-binaries
+}
+
+do_install:append () {
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
+ install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd
+ sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config
+ fi
+
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}" ]; then
+ sed -i -e 's:#X11Forwarding no:X11Forwarding yes:' ${D}${sysconfdir}/ssh/sshd_config
+ fi
+
+ install -d ${D}${sysconfdir}/init.d
+ install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/sshd
+ rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin
+ rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir}
+ install -d ${D}/${sysconfdir}/default/volatiles
+ install -m 644 ${WORKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd
+ install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir}
+
+ # Create config files for read-only rootfs
+ install -d ${D}${sysconfdir}/ssh
+ install -m 644 ${D}${sysconfdir}/ssh/sshd_config ${D}${sysconfdir}/ssh/sshd_config_readonly
+ sed -i '/HostKey/d' ${D}${sysconfdir}/ssh/sshd_config_readonly
+ echo "HostKey /var/run/ssh/ssh_host_rsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
+ echo "HostKey /var/run/ssh/ssh_host_ecdsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
+ echo "HostKey /var/run/ssh/ssh_host_ed25519_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly
+
+ install -d ${D}${systemd_system_unitdir}
+ if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-socket-mode','true','false',d)}; then
+ install -c -m 0644 ${WORKDIR}/sshd.socket ${D}${systemd_system_unitdir}
+ install -c -m 0644 ${WORKDIR}/sshd@.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@BINDIR@,${bindir},g' \
+ -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \
+ ${D}${systemd_system_unitdir}/sshd.socket
+ fi
+ if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-service-mode','true','false',d)}; then
+ install -c -m 0644 ${WORKDIR}/sshd.service ${D}${systemd_system_unitdir}
+ fi
+ install -c -m 0644 ${WORKDIR}/sshdgenkeys.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@BINDIR@,${bindir},g' \
+ -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \
+ ${D}${systemd_system_unitdir}/*.service
+
+ sed -i -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \
+ ${D}${sysconfdir}/init.d/sshd
+
+ install -D -m 0755 ${WORKDIR}/sshd_check_keys ${D}${libexecdir}/${BPN}/sshd_check_keys
+}
+
+do_install_ptest () {
+ sed -i -e "s|^SFTPSERVER=.*|SFTPSERVER=${libexecdir}/sftp-server|" regress/test-exec.sh
+ cp -r regress ${D}${PTEST_PATH}
+ cp config.h ${D}${PTEST_PATH}
+}
+
+ALLOW_EMPTY:${PN} = "1"
+
+PACKAGES =+ "${PN}-keygen ${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-sftp ${PN}-misc ${PN}-sftp-server"
+FILES:${PN}-scp = "${bindir}/scp.${BPN}"
+FILES:${PN}-ssh = "${bindir}/ssh.${BPN} ${sysconfdir}/ssh/ssh_config"
+FILES:${PN}-sshd = "${sbindir}/sshd ${sysconfdir}/init.d/sshd ${systemd_system_unitdir}"
+FILES:${PN}-sshd += "${sysconfdir}/ssh/moduli ${sysconfdir}/ssh/sshd_config ${sysconfdir}/ssh/sshd_config_readonly ${sysconfdir}/default/volatiles/99_sshd ${sysconfdir}/pam.d/sshd"
+FILES:${PN}-sshd += "${libexecdir}/${BPN}/sshd_check_keys"
+FILES:${PN}-sftp = "${bindir}/sftp"
+FILES:${PN}-sftp-server = "${libexecdir}/sftp-server"
+FILES:${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*"
+FILES:${PN}-keygen = "${bindir}/ssh-keygen"
+
+RDEPENDS:${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen ${PN}-sftp-server"
+RDEPENDS:${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}"
+# gdb would make attach-ptrace test pass rather than skip but not worth the build dependencies
+RDEPENDS:${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make sed coreutils openssl-bin"
+
+RPROVIDES:${PN}-ssh = "ssh"
+RPROVIDES:${PN}-sshd = "sshd"
+
+RCONFLICTS:${PN} = "dropbear"
+RCONFLICTS:${PN}-sshd = "dropbear"
+
+CONFFILES:${PN}-sshd = "${sysconfdir}/ssh/sshd_config"
+CONFFILES:${PN}-ssh = "${sysconfdir}/ssh/ssh_config"
+
+ALTERNATIVE_PRIORITY = "90"
+ALTERNATIVE:${PN}-scp = "scp"
+ALTERNATIVE:${PN}-ssh = "ssh"
+
+BBCLASSEXTEND += "nativesdk"
diff --git a/meta/recipes-connectivity/openssl/files/environment.d-openssl.sh b/meta/recipes-connectivity/openssl/files/environment.d-openssl.sh
index b9cc24a7ac..6f23490c87 100644
--- a/meta/recipes-connectivity/openssl/files/environment.d-openssl.sh
+++ b/meta/recipes-connectivity/openssl/files/environment.d-openssl.sh
@@ -1 +1,5 @@
export OPENSSL_CONF="$OECORE_NATIVE_SYSROOT/usr/lib/ssl/openssl.cnf"
+export SSL_CERT_DIR="$OECORE_NATIVE_SYSROOT/usr/lib/ssl/certs"
+export SSL_CERT_FILE="$OECORE_NATIVE_SYSROOT/usr/lib/ssl/certs/ca-certificates.crt"
+export OPENSSL_MODULES="$OECORE_NATIVE_SYSROOT/usr/lib/ossl-modules/"
+export OPENSSL_ENGINES="$OECORE_NATIVE_SYSROOT/usr/lib/engines-3"
diff --git a/meta/recipes-connectivity/openssl/openssl/0001-Added-handshake-history-reporting-when-test-fails.patch b/meta/recipes-connectivity/openssl/openssl/0001-Added-handshake-history-reporting-when-test-fails.patch
new file mode 100644
index 0000000000..aa2e5bb800
--- /dev/null
+++ b/meta/recipes-connectivity/openssl/openssl/0001-Added-handshake-history-reporting-when-test-fails.patch
@@ -0,0 +1,374 @@
+From 5ba65051fea0513db0d997f0ab7cafb9826ed74a Mon Sep 17 00:00:00 2001
+From: William Lyu <William.Lyu@windriver.com>
+Date: Fri, 20 Oct 2023 16:22:37 -0400
+Subject: [PATCH] Added handshake history reporting when test fails
+
+Upstream-Status: Submitted [https://github.com/openssl/openssl/pull/22481]
+
+Signed-off-by: William Lyu <William.Lyu@windriver.com>
+---
+ test/helpers/handshake.c | 139 +++++++++++++++++++++++++++++----------
+ test/helpers/handshake.h | 70 +++++++++++++++++++-
+ test/ssl_test.c | 44 +++++++++++++
+ 3 files changed, 218 insertions(+), 35 deletions(-)
+
+diff --git a/test/helpers/handshake.c b/test/helpers/handshake.c
+index e0422469e4..ae2ad59dd4 100644
+--- a/test/helpers/handshake.c
++++ b/test/helpers/handshake.c
+@@ -1,5 +1,5 @@
+ /*
+- * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved.
++ * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License 2.0 (the "License"). You may not use
+ * this file except in compliance with the License. You can obtain a copy
+@@ -24,6 +24,102 @@
+ #include <netinet/sctp.h>
+ #endif
+
++/* Shamelessly copied from test/helpers/ssl_test_ctx.c */
++/* Maps string names to various enumeration type */
++typedef struct {
++ const char *name;
++ int value;
++} enum_name_map;
++
++static const enum_name_map connect_phase_names[] = {
++ {"Handshake", HANDSHAKE},
++ {"RenegAppData", RENEG_APPLICATION_DATA},
++ {"RenegSetup", RENEG_SETUP},
++ {"RenegHandshake", RENEG_HANDSHAKE},
++ {"AppData", APPLICATION_DATA},
++ {"Shutdown", SHUTDOWN},
++ {"ConnectionDone", CONNECTION_DONE}
++};
++
++static const enum_name_map peer_status_names[] = {
++ {"PeerSuccess", PEER_SUCCESS},
++ {"PeerRetry", PEER_RETRY},
++ {"PeerError", PEER_ERROR},
++ {"PeerWaiting", PEER_WAITING},
++ {"PeerTestFail", PEER_TEST_FAILURE}
++};
++
++static const enum_name_map handshake_status_names[] = {
++ {"HandshakeSuccess", HANDSHAKE_SUCCESS},
++ {"ClientError", CLIENT_ERROR},
++ {"ServerError", SERVER_ERROR},
++ {"InternalError", INTERNAL_ERROR},
++ {"HandshakeRetry", HANDSHAKE_RETRY}
++};
++
++/* Shamelessly copied from test/helpers/ssl_test_ctx.c */
++static const char *enum_name(const enum_name_map *enums, size_t num_enums,
++ int value)
++{
++ size_t i;
++ for (i = 0; i < num_enums; i++) {
++ if (enums[i].value == value) {
++ return enums[i].name;
++ }
++ }
++ return "InvalidValue";
++}
++
++const char *handshake_connect_phase_name(connect_phase_t phase)
++{
++ return enum_name(connect_phase_names, OSSL_NELEM(connect_phase_names),
++ (int)phase);
++}
++
++const char *handshake_status_name(handshake_status_t handshake_status)
++{
++ return enum_name(handshake_status_names, OSSL_NELEM(handshake_status_names),
++ (int)handshake_status);
++}
++
++const char *handshake_peer_status_name(peer_status_t peer_status)
++{
++ return enum_name(peer_status_names, OSSL_NELEM(peer_status_names),
++ (int)peer_status);
++}
++
++static void save_loop_history(HANDSHAKE_HISTORY *history,
++ connect_phase_t phase,
++ handshake_status_t handshake_status,
++ peer_status_t server_status,
++ peer_status_t client_status,
++ int client_turn_count,
++ int is_client_turn)
++{
++ HANDSHAKE_HISTORY_ENTRY *new_entry = NULL;
++
++ /*
++ * Create a new history entry for a handshake loop with statuses given in
++ * the arguments. Potentially evicting the oldest entry when the
++ * ring buffer is full.
++ */
++ ++(history->last_idx);
++ history->last_idx &= MAX_HANDSHAKE_HISTORY_ENTRY_IDX_MASK;
++
++ new_entry = &((history->entries)[history->last_idx]);
++ new_entry->phase = phase;
++ new_entry->handshake_status = handshake_status;
++ new_entry->server_status = server_status;
++ new_entry->client_status = client_status;
++ new_entry->client_turn_count = client_turn_count;
++ new_entry->is_client_turn = is_client_turn;
++
++ /* Evict the oldest handshake loop entry when the ring buffer is full. */
++ if (history->entry_count < MAX_HANDSHAKE_HISTORY_ENTRY) {
++ ++(history->entry_count);
++ }
++}
++
+ HANDSHAKE_RESULT *HANDSHAKE_RESULT_new(void)
+ {
+ HANDSHAKE_RESULT *ret;
+@@ -719,15 +815,6 @@ static void configure_handshake_ssl(SSL *server, SSL *client,
+ SSL_set_post_handshake_auth(client, 1);
+ }
+
+-/* The status for each connection phase. */
+-typedef enum {
+- PEER_SUCCESS,
+- PEER_RETRY,
+- PEER_ERROR,
+- PEER_WAITING,
+- PEER_TEST_FAILURE
+-} peer_status_t;
+-
+ /* An SSL object and associated read-write buffers. */
+ typedef struct peer_st {
+ SSL *ssl;
+@@ -1074,17 +1161,6 @@ static void do_shutdown_step(PEER *peer)
+ }
+ }
+
+-typedef enum {
+- HANDSHAKE,
+- RENEG_APPLICATION_DATA,
+- RENEG_SETUP,
+- RENEG_HANDSHAKE,
+- APPLICATION_DATA,
+- SHUTDOWN,
+- CONNECTION_DONE
+-} connect_phase_t;
+-
+-
+ static int renegotiate_op(const SSL_TEST_CTX *test_ctx)
+ {
+ switch (test_ctx->handshake_mode) {
+@@ -1162,19 +1238,6 @@ static void do_connect_step(const SSL_TEST_CTX *test_ctx, PEER *peer,
+ }
+ }
+
+-typedef enum {
+- /* Both parties succeeded. */
+- HANDSHAKE_SUCCESS,
+- /* Client errored. */
+- CLIENT_ERROR,
+- /* Server errored. */
+- SERVER_ERROR,
+- /* Peers are in inconsistent state. */
+- INTERNAL_ERROR,
+- /* One or both peers not done. */
+- HANDSHAKE_RETRY
+-} handshake_status_t;
+-
+ /*
+ * Determine the handshake outcome.
+ * last_status: the status of the peer to have acted last.
+@@ -1539,6 +1602,10 @@ static HANDSHAKE_RESULT *do_handshake_internal(
+
+ start = time(NULL);
+
++ save_loop_history(&(ret->history),
++ phase, status, server.status, client.status,
++ client_turn_count, client_turn);
++
+ /*
+ * Half-duplex handshake loop.
+ * Client and server speak to each other synchronously in the same process.
+@@ -1560,6 +1627,10 @@ static HANDSHAKE_RESULT *do_handshake_internal(
+ 0 /* server went last */);
+ }
+
++ save_loop_history(&(ret->history),
++ phase, status, server.status, client.status,
++ client_turn_count, client_turn);
++
+ switch (status) {
+ case HANDSHAKE_SUCCESS:
+ client_turn_count = 0;
+diff --git a/test/helpers/handshake.h b/test/helpers/handshake.h
+index 78b03f9f4b..b9967c2623 100644
+--- a/test/helpers/handshake.h
++++ b/test/helpers/handshake.h
+@@ -1,5 +1,5 @@
+ /*
+- * Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved.
++ * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License 2.0 (the "License"). You may not use
+ * this file except in compliance with the License. You can obtain a copy
+@@ -12,6 +12,11 @@
+
+ #include "ssl_test_ctx.h"
+
++#define MAX_HANDSHAKE_HISTORY_ENTRY_BIT 4
++#define MAX_HANDSHAKE_HISTORY_ENTRY (1 << MAX_HANDSHAKE_HISTORY_ENTRY_BIT)
++#define MAX_HANDSHAKE_HISTORY_ENTRY_IDX_MASK \
++ ((1 << MAX_HANDSHAKE_HISTORY_ENTRY_BIT) - 1)
++
+ typedef struct ctx_data_st {
+ unsigned char *npn_protocols;
+ size_t npn_protocols_len;
+@@ -22,6 +27,63 @@ typedef struct ctx_data_st {
+ char *session_ticket_app_data;
+ } CTX_DATA;
+
++typedef enum {
++ HANDSHAKE,
++ RENEG_APPLICATION_DATA,
++ RENEG_SETUP,
++ RENEG_HANDSHAKE,
++ APPLICATION_DATA,
++ SHUTDOWN,
++ CONNECTION_DONE
++} connect_phase_t;
++
++/* The status for each connection phase. */
++typedef enum {
++ PEER_SUCCESS,
++ PEER_RETRY,
++ PEER_ERROR,
++ PEER_WAITING,
++ PEER_TEST_FAILURE
++} peer_status_t;
++
++typedef enum {
++ /* Both parties succeeded. */
++ HANDSHAKE_SUCCESS,
++ /* Client errored. */
++ CLIENT_ERROR,
++ /* Server errored. */
++ SERVER_ERROR,
++ /* Peers are in inconsistent state. */
++ INTERNAL_ERROR,
++ /* One or both peers not done. */
++ HANDSHAKE_RETRY
++} handshake_status_t;
++
++/* Stores the various status information in a handshake loop. */
++typedef struct handshake_history_entry_st {
++ connect_phase_t phase;
++ handshake_status_t handshake_status;
++ peer_status_t server_status;
++ peer_status_t client_status;
++ int client_turn_count;
++ int is_client_turn;
++} HANDSHAKE_HISTORY_ENTRY;
++
++typedef struct handshake_history_st {
++ /* Implemented using ring buffer. */
++ /*
++ * The valid entries are |entries[last_idx]|, |entries[last_idx-1]|,
++ * ..., etc., going up to |entry_count| number of entries. Note that when
++ * the index into the array |entries| becomes < 0, we wrap around to
++ * the end of |entries|.
++ */
++ HANDSHAKE_HISTORY_ENTRY entries[MAX_HANDSHAKE_HISTORY_ENTRY];
++ /* The number of valid entries in |entries| array. */
++ size_t entry_count;
++ /* The index of the last valid entry in the |entries| array. */
++ size_t last_idx;
++} HANDSHAKE_HISTORY;
++
+ typedef struct handshake_result {
+ ssl_test_result_t result;
+ /* These alerts are in the 2-byte format returned by the info_callback. */
+@@ -77,6 +139,8 @@ typedef struct handshake_result {
+ char *cipher;
+ /* session ticket application data */
+ char *result_session_ticket_app_data;
++ /* handshake loop history */
++ HANDSHAKE_HISTORY history;
+ } HANDSHAKE_RESULT;
+
+ HANDSHAKE_RESULT *HANDSHAKE_RESULT_new(void);
+@@ -95,4 +159,8 @@ int configure_handshake_ctx_for_srp(SSL_CTX *server_ctx, SSL_CTX *server2_ctx,
+ CTX_DATA *server2_ctx_data,
+ CTX_DATA *client_ctx_data);
+
++const char *handshake_connect_phase_name(connect_phase_t phase);
++const char *handshake_status_name(handshake_status_t handshake_status);
++const char *handshake_peer_status_name(peer_status_t peer_status);
++
+ #endif /* OSSL_TEST_HANDSHAKE_HELPER_H */
+diff --git a/test/ssl_test.c b/test/ssl_test.c
+index ea608518f9..9d6b093c81 100644
+--- a/test/ssl_test.c
++++ b/test/ssl_test.c
+@@ -26,6 +26,44 @@ static OSSL_LIB_CTX *libctx = NULL;
+ /* Currently the section names are of the form test-<number>, e.g. test-15. */
+ #define MAX_TESTCASE_NAME_LENGTH 100
+
++static void print_handshake_history(const HANDSHAKE_HISTORY *history)
++{
++ size_t first_idx;
++ size_t i;
++ size_t cur_idx;
++ const HANDSHAKE_HISTORY_ENTRY *cur_entry;
++ const char header_template[] = "|%14s|%16s|%16s|%16s|%17s|%14s|";
++ const char body_template[] = "|%14s|%16s|%16s|%16s|%17d|%14s|";
++
++ TEST_info("The following is the server/client state "
++ "in the most recent %d handshake loops.",
++ MAX_HANDSHAKE_HISTORY_ENTRY);
++
++ TEST_note("=================================================="
++ "==================================================");
++ TEST_note(header_template,
++ "phase", "handshake status", "server status",
++ "client status", "client turn count", "is client turn");
++ TEST_note("+--------------+----------------+----------------"
++ "+----------------+-----------------+--------------+");
++
++ first_idx = (history->last_idx - history->entry_count + 1) &
++ MAX_HANDSHAKE_HISTORY_ENTRY_IDX_MASK;
++ for (i = 0; i < history->entry_count; ++i) {
++ cur_idx = (first_idx + i) & MAX_HANDSHAKE_HISTORY_ENTRY_IDX_MASK;
++ cur_entry = &(history->entries)[cur_idx];
++ TEST_note(body_template,
++ handshake_connect_phase_name(cur_entry->phase),
++ handshake_status_name(cur_entry->handshake_status),
++ handshake_peer_status_name(cur_entry->server_status),
++ handshake_peer_status_name(cur_entry->client_status),
++ cur_entry->client_turn_count,
++ cur_entry->is_client_turn ? "true" : "false");
++ }
++ TEST_note("=================================================="
++ "==================================================");
++}
++
+ static const char *print_alert(int alert)
+ {
+ return alert ? SSL_alert_desc_string_long(alert) : "no alert";
+@@ -388,6 +426,12 @@ static int check_test(HANDSHAKE_RESULT *result, SSL_TEST_CTX *test_ctx)
+ ret &= check_client_sign_type(result, test_ctx);
+ ret &= check_client_ca_names(result, test_ctx);
+ }
++
++ /* Print handshake loop history if any check fails. */
++ if (!ret) {
++ print_handshake_history(&(result->history));
++ }
++
+ return ret;
+ }
+
+--
+2.25.1
+
diff --git a/meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch b/meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch
index 5effa6c6f6..502a7aaf32 100644
--- a/meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch
+++ b/meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch
@@ -1,6 +1,6 @@
-From 326909baf81a638d51fa8be1d8227518784f5cc4 Mon Sep 17 00:00:00 2001
+From 0377f0d5b5c1079e3b9a80881f4dcc891cbe9f9a Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 14 Sep 2021 12:18:25 +0200
+Date: Tue, 30 May 2023 09:11:27 -0700
Subject: [PATCH] Configure: do not tweak mips cflags
This conflicts with mips machine definitons from yocto,
@@ -9,12 +9,15 @@ e.g.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
+Refreshed for openssl-3.1.1
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
---
Configure | 10 ----------
1 file changed, 10 deletions(-)
diff --git a/Configure b/Configure
-index 821e680..0387a74 100755
+index 4569952..adf019b 100755
--- a/Configure
+++ b/Configure
@@ -1422,16 +1422,6 @@ if ($target =~ /^mingw/ && `$config{CC} --target-help 2>&1` =~ m/-mno-cygwin/m)
@@ -22,7 +25,7 @@ index 821e680..0387a74 100755
}
-if ($target =~ /linux.*-mips/ && !$disabled{asm}
-- && !grep { $_ !~ /-m(ips|arch=)/ } (@{$config{CFLAGS}})) {
+- && !grep { $_ =~ /-m(ips|arch=)/ } (@{$config{CFLAGS}})) {
- # minimally required architecture flags for assembly modules
- my $value;
- $value = '-mips2' if ($target =~ /mips32/);
diff --git a/meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch b/meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch
index 60890c666d..bafdbaa46f 100644
--- a/meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch
+++ b/meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch
@@ -34,11 +34,11 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
crypto/build.info | 2 +-
2 files changed, 12 insertions(+), 2 deletions(-)
-diff --git a/Configurations/unix-Makefile.tmpl b/Configurations/unix-Makefile.tmpl
-index f88a70f..528cdef 100644
---- a/Configurations/unix-Makefile.tmpl
-+++ b/Configurations/unix-Makefile.tmpl
-@@ -471,13 +471,23 @@ BIN_LDFLAGS={- join(' ', $target{bin_lflags} || (),
+Index: openssl-3.0.4/Configurations/unix-Makefile.tmpl
+===================================================================
+--- openssl-3.0.4.orig/Configurations/unix-Makefile.tmpl
++++ openssl-3.0.4/Configurations/unix-Makefile.tmpl
+@@ -472,13 +472,23 @@ BIN_LDFLAGS={- join(' ', $target{bin_lfl
'$(CNF_LDFLAGS)', '$(LDFLAGS)') -}
BIN_EX_LIBS=$(CNF_EX_LIBS) $(EX_LIBS)
@@ -63,10 +63,10 @@ index f88a70f..528cdef 100644
PERLASM_SCHEME= {- $target{perlasm_scheme} -}
# For x86 assembler: Set PROCESSOR to 386 if you want to support
-diff --git a/crypto/build.info b/crypto/build.info
-index efca6cc..eda433e 100644
---- a/crypto/build.info
-+++ b/crypto/build.info
+Index: openssl-3.0.4/crypto/build.info
+===================================================================
+--- openssl-3.0.4.orig/crypto/build.info
++++ openssl-3.0.4/crypto/build.info
@@ -109,7 +109,7 @@ DEFINE[../libcrypto]=$UPLINKDEF
DEPEND[info.o]=buildinf.h
@@ -74,5 +74,5 @@ index efca6cc..eda433e 100644
-GENERATE[buildinf.h]=../util/mkbuildinf.pl "$(CC) $(LIB_CFLAGS) $(CPPFLAGS_Q)" "$(PLATFORM)"
+GENERATE[buildinf.h]=../util/mkbuildinf.pl "$(CC_Q) $(CFLAGS_Q) $(CPPFLAGS_Q)" "$(PLATFORM)"
- GENERATE[uplink-x86.s]=../ms/uplink-x86.pl
+ GENERATE[uplink-x86.S]=../ms/uplink-x86.pl
GENERATE[uplink-x86_64.s]=../ms/uplink-x86_64.pl
diff --git a/meta/recipes-connectivity/openssl/openssl/CVE-2024-2511.patch b/meta/recipes-connectivity/openssl/openssl/CVE-2024-2511.patch
new file mode 100644
index 0000000000..8772f716d5
--- /dev/null
+++ b/meta/recipes-connectivity/openssl/openssl/CVE-2024-2511.patch
@@ -0,0 +1,120 @@
+From e9d7083e241670332e0443da0f0d4ffb52829f08 Mon Sep 17 00:00:00 2001
+From: Matt Caswell <matt@openssl.org>
+Date: Tue, 5 Mar 2024 15:43:53 +0000
+Subject: [PATCH] Fix unconstrained session cache growth in TLSv1.3
+
+In TLSv1.3 we create a new session object for each ticket that we send.
+We do this by duplicating the original session. If SSL_OP_NO_TICKET is in
+use then the new session will be added to the session cache. However, if
+early data is not in use (and therefore anti-replay protection is being
+used), then multiple threads could be resuming from the same session
+simultaneously. If this happens and a problem occurs on one of the threads,
+then the original session object could be marked as not_resumable. When we
+duplicate the session object this not_resumable status gets copied into the
+new session object. The new session object is then added to the session
+cache even though it is not_resumable.
+
+Subsequently, another bug means that the session_id_length is set to 0 for
+sessions that are marked as not_resumable - even though that session is
+still in the cache. Once this happens the session can never be removed from
+the cache. When that object gets to be the session cache tail object the
+cache never shrinks again and grows indefinitely.
+
+CVE-2024-2511
+
+Reviewed-by: Neil Horman <nhorman@openssl.org>
+Reviewed-by: Tomas Mraz <tomas@openssl.org>
+(Merged from https://github.com/openssl/openssl/pull/24043)
+
+CVE: CVE-2024-2511
+Upstream-Status: Backport [https://github.com/openssl/openssl/commit/e9d7083e241670332e0443da0f0d4ffb52829f08]
+Signed-off-by: Peter Marko <peter.marko@siemens.com>
+---
+ ssl/ssl_lib.c | 5 +++--
+ ssl/ssl_sess.c | 28 ++++++++++++++++++++++------
+ ssl/statem/statem_srvr.c | 5 ++---
+ 3 files changed, 27 insertions(+), 11 deletions(-)
+
+diff --git a/ssl/ssl_lib.c b/ssl/ssl_lib.c
+index 4afb43bc86e54..c51529ddab5bb 100644
+--- a/ssl/ssl_lib.c
++++ b/ssl/ssl_lib.c
+@@ -4457,9 +4457,10 @@ void ssl_update_cache(SSL_CONNECTION *s, int mode)
+
+ /*
+ * If the session_id_length is 0, we are not supposed to cache it, and it
+- * would be rather hard to do anyway :-)
++ * would be rather hard to do anyway :-). Also if the session has already
++ * been marked as not_resumable we should not cache it for later reuse.
+ */
+- if (s->session->session_id_length == 0)
++ if (s->session->session_id_length == 0 || s->session->not_resumable)
+ return;
+
+ /*
+diff --git a/ssl/ssl_sess.c b/ssl/ssl_sess.c
+index 3dcc4d81e5bc6..1fa6d17c46863 100644
+--- a/ssl/ssl_sess.c
++++ b/ssl/ssl_sess.c
+@@ -127,16 +127,11 @@ SSL_SESSION *SSL_SESSION_new(void)
+ return ss;
+ }
+
+-SSL_SESSION *SSL_SESSION_dup(const SSL_SESSION *src)
+-{
+- return ssl_session_dup(src, 1);
+-}
+-
+ /*
+ * Create a new SSL_SESSION and duplicate the contents of |src| into it. If
+ * ticket == 0 then no ticket information is duplicated, otherwise it is.
+ */
+-SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket)
++static SSL_SESSION *ssl_session_dup_intern(const SSL_SESSION *src, int ticket)
+ {
+ SSL_SESSION *dest;
+
+@@ -265,6 +260,27 @@ SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket)
+ return NULL;
+ }
+
++SSL_SESSION *SSL_SESSION_dup(const SSL_SESSION *src)
++{
++ return ssl_session_dup_intern(src, 1);
++}
++
++/*
++ * Used internally when duplicating a session which might be already shared.
++ * We will have resumed the original session. Subsequently we might have marked
++ * it as non-resumable (e.g. in another thread) - but this copy should be ok to
++ * resume from.
++ */
++SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket)
++{
++ SSL_SESSION *sess = ssl_session_dup_intern(src, ticket);
++
++ if (sess != NULL)
++ sess->not_resumable = 0;
++
++ return sess;
++}
++
+ const unsigned char *SSL_SESSION_get_id(const SSL_SESSION *s, unsigned int *len)
+ {
+ if (len)
+diff --git a/ssl/statem/statem_srvr.c b/ssl/statem/statem_srvr.c
+index 853af8c0aa9f9..d5f0ab091dacc 100644
+--- a/ssl/statem/statem_srvr.c
++++ b/ssl/statem/statem_srvr.c
+@@ -2445,9 +2445,8 @@ CON_FUNC_RETURN tls_construct_server_hello(SSL_CONNECTION *s, WPACKET *pkt)
+ * so the following won't overwrite an ID that we're supposed
+ * to send back.
+ */
+- if (s->session->not_resumable ||
+- (!(SSL_CONNECTION_GET_CTX(s)->session_cache_mode & SSL_SESS_CACHE_SERVER)
+- && !s->hit))
++ if (!(SSL_CONNECTION_GET_CTX(s)->session_cache_mode & SSL_SESS_CACHE_SERVER)
++ && !s->hit)
+ s->session->session_id_length = 0;
+
+ if (usetls13) {
diff --git a/meta/recipes-connectivity/openssl/openssl/afalg.patch b/meta/recipes-connectivity/openssl/openssl/afalg.patch
deleted file mode 100644
index b7c0e9697f..0000000000
--- a/meta/recipes-connectivity/openssl/openssl/afalg.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-Don't refuse to build afalgeng if cross-compiling or the host kernel is too old.
-
-Upstream-Status: Submitted [hhttps://github.com/openssl/openssl/pull/7688]
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/Configure b/Configure
-index 3baa8ce..9ef52ed 100755
---- a/Configure
-+++ b/Configure
-@@ -1550,20 +1550,7 @@ unless ($disabled{"crypto-mdebug-backtrace"})
- unless ($disabled{afalgeng}) {
- $config{afalgeng}="";
- if (grep { $_ eq 'afalgeng' } @{$target{enable}}) {
-- my $minver = 4*10000 + 1*100 + 0;
-- if ($config{CROSS_COMPILE} eq "") {
-- my $verstr = `uname -r`;
-- my ($ma, $mi1, $mi2) = split("\\.", $verstr);
-- ($mi2) = $mi2 =~ /(\d+)/;
-- my $ver = $ma*10000 + $mi1*100 + $mi2;
-- if ($ver < $minver) {
-- disable('too-old-kernel', 'afalgeng');
-- } else {
-- push @{$config{engdirs}}, "afalg";
-- }
-- } else {
-- disable('cross-compiling', 'afalgeng');
-- }
-+ push @{$config{engdirs}}, "afalg";
- } else {
- disable('not-linux', 'afalgeng');
- }
diff --git a/meta/recipes-connectivity/openssl/openssl/bti.patch b/meta/recipes-connectivity/openssl/openssl/bti.patch
new file mode 100644
index 0000000000..748576c30c
--- /dev/null
+++ b/meta/recipes-connectivity/openssl/openssl/bti.patch
@@ -0,0 +1,58 @@
+From ba8a599395f8b770c76316b5f5b0f3838567014f Mon Sep 17 00:00:00 2001
+From: Tom Cosgrove <tom.cosgrove@arm.com>
+Date: Tue, 26 Mar 2024 13:18:00 +0000
+Subject: [PATCH] aarch64: fix BTI in bsaes assembly code
+
+In Arm systems where BTI is enabled but the Crypto extensions are not (more
+likely in FVPs than in real hardware), the bit-sliced assembler code will
+be used. However, this wasn't annotated with BTI instructions when BTI was
+enabled, so the moment libssl jumps into this code it (correctly) aborts.
+
+Solve this by adding the missing BTI landing pads.
+
+Upstream-Status: Submitted [https://github.com/openssl/openssl/pull/23982]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ crypto/aes/asm/bsaes-armv8.pl | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/crypto/aes/asm/bsaes-armv8.pl b/crypto/aes/asm/bsaes-armv8.pl
+index b3c97e439f..c3c5ff3e05 100644
+--- a/crypto/aes/asm/bsaes-armv8.pl
++++ b/crypto/aes/asm/bsaes-armv8.pl
+@@ -1018,6 +1018,7 @@ _bsaes_key_convert:
+ // Initialisation vector overwritten with last quadword of ciphertext
+ // No output registers, usual AAPCS64 register preservation
+ ossl_bsaes_cbc_encrypt:
++ AARCH64_VALID_CALL_TARGET
+ cmp x2, #128
+ bhs .Lcbc_do_bsaes
+ b AES_cbc_encrypt
+@@ -1270,7 +1271,7 @@ ossl_bsaes_cbc_encrypt:
+ // Output text filled in
+ // No output registers, usual AAPCS64 register preservation
+ ossl_bsaes_ctr32_encrypt_blocks:
+-
++ AARCH64_VALID_CALL_TARGET
+ cmp x2, #8 // use plain AES for
+ blo .Lctr_enc_short // small sizes
+
+@@ -1476,6 +1477,7 @@ ossl_bsaes_ctr32_encrypt_blocks:
+ // Output ciphertext filled in
+ // No output registers, usual AAPCS64 register preservation
+ ossl_bsaes_xts_encrypt:
++ AARCH64_VALID_CALL_TARGET
+ // Stack layout:
+ // sp ->
+ // nrounds*128-96 bytes: key schedule
+@@ -1921,6 +1923,7 @@ ossl_bsaes_xts_encrypt:
+ // Output plaintext filled in
+ // No output registers, usual AAPCS64 register preservation
+ ossl_bsaes_xts_decrypt:
++ AARCH64_VALID_CALL_TARGET
+ // Stack layout:
+ // sp ->
+ // nrounds*128-96 bytes: key schedule
+--
+2.34.1
+
diff --git a/meta/recipes-connectivity/openssl/openssl/run-ptest b/meta/recipes-connectivity/openssl/openssl/run-ptest
index 8dff79101f..c89ec5afa1 100644
--- a/meta/recipes-connectivity/openssl/openssl/run-ptest
+++ b/meta/recipes-connectivity/openssl/openssl/run-ptest
@@ -9,4 +9,4 @@ export TOP=.
# OPENSSL_ENGINES is relative from the test binaries
export OPENSSL_ENGINES=../engines
-perl ./test/run_tests.pl $* | sed -u -r -e '/(.*) \.*.ok/ s/^/PASS: /g' -r -e '/Dubious(.*)/ s/^/FAIL: /g' -e '/(.*) \.*.skipped: (.*)/ s/^/SKIP: /g'
+{ HARNESS_JOBS=4 perl ./test/run_tests.pl $* || echo "FAIL: openssl" ; } | sed -u -r -e '/(.*) \.*.ok/ s/^/PASS: /g' -r -e '/Dubious(.*)/ s/^/FAIL: /g' -e '/(.*) \.*.skipped: (.*)/ s/^/SKIP: /g'
diff --git a/meta/recipes-connectivity/openssl/openssl_3.0.3.bb b/meta/recipes-connectivity/openssl/openssl_3.0.3.bb
deleted file mode 100644
index fd88ae807d..0000000000
--- a/meta/recipes-connectivity/openssl/openssl_3.0.3.bb
+++ /dev/null
@@ -1,258 +0,0 @@
-SUMMARY = "Secure Socket Layer"
-DESCRIPTION = "Secure Socket Layer (SSL) binary and related cryptographic tools."
-HOMEPAGE = "http://www.openssl.org/"
-BUGTRACKER = "http://www.openssl.org/news/vulnerabilities.html"
-SECTION = "libs/network"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=c75985e733726beaba57bc5253e96d04"
-
-SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \
- file://run-ptest \
- file://0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch \
- file://afalg.patch \
- file://0001-Configure-do-not-tweak-mips-cflags.patch \
- "
-
-SRC_URI:append:class-nativesdk = " \
- file://environment.d-openssl.sh \
- "
-
-SRC_URI[sha256sum] = "ee0078adcef1de5f003c62c80cc96527721609c6f3bb42b7795df31f8b558c0b"
-
-inherit lib_package multilib_header multilib_script ptest perlnative
-MULTILIB_SCRIPTS = "${PN}-bin:${bindir}/c_rehash"
-
-PACKAGECONFIG ?= ""
-PACKAGECONFIG:class-native = ""
-PACKAGECONFIG:class-nativesdk = ""
-
-PACKAGECONFIG[cryptodev-linux] = "enable-devcryptoeng,disable-devcryptoeng,cryptodev-linux,,cryptodev-module"
-PACKAGECONFIG[no-tls1] = "no-tls1"
-PACKAGECONFIG[no-tls1_1] = "no-tls1_1"
-
-B = "${WORKDIR}/build"
-do_configure[cleandirs] = "${B}"
-
-#| ./libcrypto.so: undefined reference to `getcontext'
-#| ./libcrypto.so: undefined reference to `setcontext'
-#| ./libcrypto.so: undefined reference to `makecontext'
-EXTRA_OECONF:append:libc-musl = " no-async"
-EXTRA_OECONF:append:libc-musl:powerpc64 = " no-asm"
-
-# adding devrandom prevents openssl from using getrandom() which is not available on older glibc versions
-# (native versions can be built with newer glibc, but then relocated onto a system with older glibc)
-EXTRA_OECONF:class-native = "--with-rand-seed=os,devrandom"
-EXTRA_OECONF:class-nativesdk = "--with-rand-seed=os,devrandom"
-
-# Relying on hardcoded built-in paths causes openssl-native to not be relocateable from sstate.
-CFLAGS:append:class-native = " -DOPENSSLDIR=/not/builtin -DENGINESDIR=/not/builtin"
-CFLAGS:append:class-nativesdk = " -DOPENSSLDIR=/not/builtin -DENGINESDIR=/not/builtin"
-
-# This allows disabling deprecated or undesirable crypto algorithms.
-# The default is to trust upstream choices.
-DEPRECATED_CRYPTO_FLAGS ?= ""
-
-do_configure () {
- # When we upgrade glibc but not uninative we see obtuse failures in openssl. Make
- # the issue really clear that perl isn't functional due to symbol mismatch issues.
- cat <<- EOF > ${WORKDIR}/perltest
- #!/usr/bin/env perl
- use POSIX;
- EOF
- chmod a+x ${WORKDIR}/perltest
- ${WORKDIR}/perltest
-
- os=${HOST_OS}
- case $os in
- linux-gnueabi |\
- linux-gnuspe |\
- linux-musleabi |\
- linux-muslspe |\
- linux-musl )
- os=linux
- ;;
- *)
- ;;
- esac
- target="$os-${HOST_ARCH}"
- case $target in
- linux-arc)
- target=linux-latomic
- ;;
- linux-arm*)
- target=linux-armv4
- ;;
- linux-aarch64*)
- target=linux-aarch64
- ;;
- linux-i?86 | linux-viac3)
- target=linux-x86
- ;;
- linux-gnux32-x86_64 | linux-muslx32-x86_64 )
- target=linux-x32
- ;;
- linux-gnu64-x86_64)
- target=linux-x86_64
- ;;
- linux-mips | linux-mipsel)
- # specifying TARGET_CC_ARCH prevents openssl from (incorrectly) adding target architecture flags
- target="linux-mips32 ${TARGET_CC_ARCH}"
- ;;
- linux-gnun32-mips*)
- target=linux-mips64
- ;;
- linux-*-mips64 | linux-mips64 | linux-*-mips64el | linux-mips64el)
- target=linux64-mips64
- ;;
- linux-microblaze* | linux-nios2* | linux-sh3 | linux-sh4 | linux-arc*)
- target=linux-generic32
- ;;
- linux-powerpc)
- target=linux-ppc
- ;;
- linux-powerpc64)
- target=linux-ppc64
- ;;
- linux-powerpc64le)
- target=linux-ppc64le
- ;;
- linux-riscv32)
- target=linux-generic32
- ;;
- linux-riscv64)
- target=linux-generic64
- ;;
- linux-sparc | linux-supersparc)
- target=linux-sparcv9
- ;;
- mingw32-x86_64)
- target=mingw64
- ;;
- esac
-
- useprefix=${prefix}
- if [ "x$useprefix" = "x" ]; then
- useprefix=/
- fi
- # WARNING: do not set compiler/linker flags (-I/-D etc.) in EXTRA_OECONF, as they will fully replace the
- # environment variables set by bitbake. Adjust the environment variables instead.
- HASHBANGPERL="/usr/bin/env perl" PERL=perl PERL5LIB="${S}/external/perl/Text-Template-1.46/lib/" \
- perl ${S}/Configure ${EXTRA_OECONF} ${PACKAGECONFIG_CONFARGS} ${DEPRECATED_CRYPTO_FLAGS} --prefix=$useprefix --openssldir=${libdir}/ssl-3 --libdir=${libdir} $target
- perl ${B}/configdata.pm --dump
-}
-
-do_install () {
- oe_runmake DESTDIR="${D}" MANDIR="${mandir}" MANSUFFIX=ssl install
-
- oe_multilib_header openssl/opensslconf.h
- oe_multilib_header openssl/configuration.h
-
- # Create SSL structure for packages such as ca-certificates which
- # contain hard-coded paths to /etc/ssl. Debian does the same.
- install -d ${D}${sysconfdir}/ssl
- mv ${D}${libdir}/ssl-3/certs \
- ${D}${libdir}/ssl-3/private \
- ${D}${libdir}/ssl-3/openssl.cnf \
- ${D}${sysconfdir}/ssl/
-
- # Although absolute symlinks would be OK for the target, they become
- # invalid if native or nativesdk are relocated from sstate.
- ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/certs')} ${D}${libdir}/ssl-3/certs
- ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/private')} ${D}${libdir}/ssl-3/private
- ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/openssl.cnf')} ${D}${libdir}/ssl-3/openssl.cnf
-}
-
-do_install:append:class-native () {
- create_wrapper ${D}${bindir}/openssl \
- OPENSSL_CONF=${libdir}/ssl-3/openssl.cnf \
- SSL_CERT_DIR=${libdir}/ssl-3/certs \
- SSL_CERT_FILE=${libdir}/ssl-3/cert.pem \
- OPENSSL_ENGINES=${libdir}/engines-3 \
- OPENSSL_MODULES=${libdir}/ossl-modules
-}
-
-do_install:append:class-nativesdk () {
- mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
- install -m 644 ${WORKDIR}/environment.d-openssl.sh ${D}${SDKPATHNATIVE}/environment-setup.d/openssl.sh
- sed 's|/usr/lib/ssl/|/usr/lib/ssl-3/|g' -i ${D}${SDKPATHNATIVE}/environment-setup.d/openssl.sh
-}
-
-PTEST_BUILD_HOST_FILES += "configdata.pm"
-PTEST_BUILD_HOST_PATTERN = "perl_version ="
-do_install_ptest () {
- install -d ${D}${PTEST_PATH}/test
- install -m755 ${B}/test/p_test.so ${D}${PTEST_PATH}/test
- install -m755 ${B}/test/provider_internal_test.cnf ${D}${PTEST_PATH}/test
-
- # Prune the build tree
- rm -f ${B}/fuzz/*.* ${B}/test/*.*
-
- cp ${S}/Configure ${B}/configdata.pm ${D}${PTEST_PATH}
- sed 's|${S}|${PTEST_PATH}|g' -i ${D}${PTEST_PATH}/configdata.pm
- cp -r ${S}/external ${B}/test ${S}/test ${B}/fuzz ${S}/util ${B}/util ${D}${PTEST_PATH}
-
- # For test_shlibload
- ln -s ${libdir}/libcrypto.so.1.1 ${D}${PTEST_PATH}/
- ln -s ${libdir}/libssl.so.1.1 ${D}${PTEST_PATH}/
-
- install -d ${D}${PTEST_PATH}/apps
- ln -s ${bindir}/openssl ${D}${PTEST_PATH}/apps
- install -m644 ${S}/apps/*.pem ${S}/apps/*.srl ${S}/apps/openssl.cnf ${D}${PTEST_PATH}/apps
- install -m755 ${B}/apps/CA.pl ${D}${PTEST_PATH}/apps
-
- install -d ${D}${PTEST_PATH}/engines
- install -m755 ${B}/engines/dasync.so ${D}${PTEST_PATH}/engines
- install -m755 ${B}/engines/loader_attic.so ${D}${PTEST_PATH}/engines
- install -m755 ${B}/engines/ossltest.so ${D}${PTEST_PATH}/engines
-
- install -d ${D}${PTEST_PATH}/providers
- install -m755 ${B}/providers/legacy.so ${D}${PTEST_PATH}/providers
-
- install -d ${D}${PTEST_PATH}/Configurations
- cp -rf ${S}/Configurations/* ${D}${PTEST_PATH}/Configurations/
-
- # seems to be needed with perl 5.32.1
- install -d ${D}${PTEST_PATH}/util/perl/recipes
- cp ${D}${PTEST_PATH}/test/recipes/tconversion.pl ${D}${PTEST_PATH}/util/perl/recipes/
-
- sed 's|${S}|${PTEST_PATH}|g' -i ${D}${PTEST_PATH}/util/wrap.pl
-}
-
-# Add the openssl.cnf file to the openssl-conf package. Make the libcrypto
-# package RRECOMMENDS on this package. This will enable the configuration
-# file to be installed for both the openssl-bin package and the libcrypto
-# package since the openssl-bin package depends on the libcrypto package.
-
-PACKAGES =+ "libcrypto libssl openssl-conf ${PN}-engines ${PN}-misc ${PN}-ossl-module-legacy"
-
-FILES:libcrypto = "${libdir}/libcrypto${SOLIBS}"
-FILES:libssl = "${libdir}/libssl${SOLIBS}"
-FILES:openssl-conf = "${sysconfdir}/ssl/openssl.cnf \
- ${libdir}/ssl-3/openssl.cnf* \
- "
-FILES:${PN}-engines = "${libdir}/engines-3"
-# ${prefix} comes from what we pass into --prefix at configure time (which is used for INSTALLTOP)
-FILES:${PN}-engines:append:mingw32:class-nativesdk = " ${prefix}${libdir}/engines-3"
-FILES:${PN}-misc = "${libdir}/ssl-3/misc ${bindir}/c_rehash"
-FILES:${PN}-ossl-module-legacy = "${libdir}/ossl-modules/legacy.so"
-FILES:${PN} =+ "${libdir}/ssl-3/* ${libdir}/ossl-modules/"
-FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}/environment-setup.d/openssl.sh"
-
-CONFFILES:openssl-conf = "${sysconfdir}/ssl/openssl.cnf"
-
-RRECOMMENDS:libcrypto += "openssl-conf ${PN}-ossl-module-legacy"
-RDEPENDS:${PN}-misc = "perl"
-RDEPENDS:${PN}-ptest += "openssl-bin perl perl-modules bash sed"
-
-RDEPENDS:${PN}-bin += "openssl-conf"
-
-BBCLASSEXTEND = "native nativesdk"
-
-CVE_PRODUCT = "openssl:openssl"
-
-CVE_VERSION_SUFFIX = "alphabetical"
-
-# Only affects OpenSSL >= 1.1.1 in combination with Apache < 2.4.37
-# Apache in meta-webserver is already recent enough
-CVE_CHECK_IGNORE += "CVE-2019-0190"
diff --git a/meta/recipes-connectivity/openssl/openssl_3.2.1.bb b/meta/recipes-connectivity/openssl/openssl_3.2.1.bb
new file mode 100644
index 0000000000..d37b68abbb
--- /dev/null
+++ b/meta/recipes-connectivity/openssl/openssl_3.2.1.bb
@@ -0,0 +1,264 @@
+SUMMARY = "Secure Socket Layer"
+DESCRIPTION = "Secure Socket Layer (SSL) binary and related cryptographic tools."
+HOMEPAGE = "http://www.openssl.org/"
+BUGTRACKER = "http://www.openssl.org/news/vulnerabilities.html"
+SECTION = "libs/network"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=c75985e733726beaba57bc5253e96d04"
+
+SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \
+ file://run-ptest \
+ file://0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch \
+ file://0001-Configure-do-not-tweak-mips-cflags.patch \
+ file://0001-Added-handshake-history-reporting-when-test-fails.patch \
+ file://bti.patch \
+ file://CVE-2024-2511.patch \
+ "
+
+SRC_URI:append:class-nativesdk = " \
+ file://environment.d-openssl.sh \
+ "
+
+SRC_URI[sha256sum] = "83c7329fe52c850677d75e5d0b0ca245309b97e8ecbcfdc1dfdc4ab9fac35b39"
+
+inherit lib_package multilib_header multilib_script ptest perlnative manpages
+MULTILIB_SCRIPTS = "${PN}-bin:${bindir}/c_rehash"
+
+PACKAGECONFIG ?= ""
+PACKAGECONFIG:class-native = ""
+PACKAGECONFIG:class-nativesdk = ""
+
+PACKAGECONFIG[cryptodev-linux] = "enable-devcryptoeng,disable-devcryptoeng,cryptodev-linux,,cryptodev-module"
+PACKAGECONFIG[no-tls1] = "no-tls1"
+PACKAGECONFIG[no-tls1_1] = "no-tls1_1"
+PACKAGECONFIG[manpages] = ""
+
+B = "${WORKDIR}/build"
+do_configure[cleandirs] = "${B}"
+
+#| ./libcrypto.so: undefined reference to `getcontext'
+#| ./libcrypto.so: undefined reference to `setcontext'
+#| ./libcrypto.so: undefined reference to `makecontext'
+EXTRA_OECONF:append:libc-musl = " no-async"
+EXTRA_OECONF:append:libc-musl:powerpc64 = " no-asm"
+
+# adding devrandom prevents openssl from using getrandom() which is not available on older glibc versions
+# (native versions can be built with newer glibc, but then relocated onto a system with older glibc)
+EXTRA_OECONF:class-native = "--with-rand-seed=os,devrandom"
+EXTRA_OECONF:class-nativesdk = "--with-rand-seed=os,devrandom"
+
+# Relying on hardcoded built-in paths causes openssl-native to not be relocateable from sstate.
+CFLAGS:append:class-native = " -DOPENSSLDIR=/not/builtin -DENGINESDIR=/not/builtin"
+CFLAGS:append:class-nativesdk = " -DOPENSSLDIR=/not/builtin -DENGINESDIR=/not/builtin"
+
+# This allows disabling deprecated or undesirable crypto algorithms.
+# The default is to trust upstream choices.
+DEPRECATED_CRYPTO_FLAGS ?= ""
+
+do_configure () {
+ # When we upgrade glibc but not uninative we see obtuse failures in openssl. Make
+ # the issue really clear that perl isn't functional due to symbol mismatch issues.
+ cat <<- EOF > ${WORKDIR}/perltest
+ #!/usr/bin/env perl
+ use POSIX;
+ EOF
+ chmod a+x ${WORKDIR}/perltest
+ ${WORKDIR}/perltest
+
+ os=${HOST_OS}
+ case $os in
+ linux-gnueabi |\
+ linux-gnuspe |\
+ linux-musleabi |\
+ linux-muslspe |\
+ linux-musl )
+ os=linux
+ ;;
+ *)
+ ;;
+ esac
+ target="$os-${HOST_ARCH}"
+ case $target in
+ linux-arc | linux-microblaze*)
+ target=linux-latomic
+ ;;
+ linux-arm*)
+ target=linux-armv4
+ ;;
+ linux-aarch64*)
+ target=linux-aarch64
+ ;;
+ linux-i?86 | linux-viac3)
+ target=linux-x86
+ ;;
+ linux-gnux32-x86_64 | linux-muslx32-x86_64 )
+ target=linux-x32
+ ;;
+ linux-gnu64-x86_64)
+ target=linux-x86_64
+ ;;
+ linux-loongarch64)
+ target=linux64-loongarch64
+ ;;
+ linux-mips | linux-mipsel)
+ # specifying TARGET_CC_ARCH prevents openssl from (incorrectly) adding target architecture flags
+ target="linux-mips32 ${TARGET_CC_ARCH}"
+ ;;
+ linux-gnun32-mips*)
+ target=linux-mips64
+ ;;
+ linux-*-mips64 | linux-mips64 | linux-*-mips64el | linux-mips64el)
+ target=linux64-mips64
+ ;;
+ linux-nios2* | linux-sh3 | linux-sh4 | linux-arc*)
+ target=linux-generic32
+ ;;
+ linux-powerpc)
+ target=linux-ppc
+ ;;
+ linux-powerpc64)
+ target=linux-ppc64
+ ;;
+ linux-powerpc64le)
+ target=linux-ppc64le
+ ;;
+ linux-riscv32)
+ target=linux32-riscv32
+ ;;
+ linux-riscv64)
+ target=linux64-riscv64
+ ;;
+ linux-sparc | linux-supersparc)
+ target=linux-sparcv9
+ ;;
+ mingw32-x86_64)
+ target=mingw64
+ ;;
+ esac
+
+ useprefix=${prefix}
+ if [ "x$useprefix" = "x" ]; then
+ useprefix=/
+ fi
+ # WARNING: do not set compiler/linker flags (-I/-D etc.) in EXTRA_OECONF, as they will fully replace the
+ # environment variables set by bitbake. Adjust the environment variables instead.
+ PERLEXTERNAL="$(realpath ${S}/external/perl/Text-Template-*/lib)"
+ test -d "$PERLEXTERNAL" || bberror "PERLEXTERNAL '$PERLEXTERNAL' not found!"
+ HASHBANGPERL="/usr/bin/env perl" PERL=perl PERL5LIB="$PERLEXTERNAL" \
+ perl ${S}/Configure ${EXTRA_OECONF} ${PACKAGECONFIG_CONFARGS} ${DEPRECATED_CRYPTO_FLAGS} --prefix=$useprefix --openssldir=${libdir}/ssl-3 --libdir=${libdir} $target
+ perl ${B}/configdata.pm --dump
+}
+
+do_install () {
+ oe_runmake DESTDIR="${D}" MANDIR="${mandir}" MANSUFFIX=ssl install_sw install_ssldirs ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'install_docs', '', d)}
+
+ oe_multilib_header openssl/opensslconf.h
+ oe_multilib_header openssl/configuration.h
+
+ # Create SSL structure for packages such as ca-certificates which
+ # contain hard-coded paths to /etc/ssl. Debian does the same.
+ install -d ${D}${sysconfdir}/ssl
+ mv ${D}${libdir}/ssl-3/certs \
+ ${D}${libdir}/ssl-3/private \
+ ${D}${libdir}/ssl-3/openssl.cnf \
+ ${D}${sysconfdir}/ssl/
+
+ # Although absolute symlinks would be OK for the target, they become
+ # invalid if native or nativesdk are relocated from sstate.
+ ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/certs')} ${D}${libdir}/ssl-3/certs
+ ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/private')} ${D}${libdir}/ssl-3/private
+ ln -sf ${@oe.path.relative('${libdir}/ssl-3', '${sysconfdir}/ssl/openssl.cnf')} ${D}${libdir}/ssl-3/openssl.cnf
+}
+
+do_install:append:class-native () {
+ create_wrapper ${D}${bindir}/openssl \
+ OPENSSL_CONF=${libdir}/ssl-3/openssl.cnf \
+ SSL_CERT_DIR=${libdir}/ssl-3/certs \
+ SSL_CERT_FILE=${libdir}/ssl-3/cert.pem \
+ OPENSSL_ENGINES=${libdir}/engines-3 \
+ OPENSSL_MODULES=${libdir}/ossl-modules
+}
+
+do_install:append:class-nativesdk () {
+ mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
+ install -m 644 ${WORKDIR}/environment.d-openssl.sh ${D}${SDKPATHNATIVE}/environment-setup.d/openssl.sh
+ sed 's|/usr/lib/ssl/|/usr/lib/ssl-3/|g' -i ${D}${SDKPATHNATIVE}/environment-setup.d/openssl.sh
+}
+
+PTEST_BUILD_HOST_FILES += "configdata.pm"
+PTEST_BUILD_HOST_PATTERN = "perl_version ="
+do_install_ptest () {
+ install -d ${D}${PTEST_PATH}/test
+ install -m755 ${B}/test/p_test.so ${D}${PTEST_PATH}/test
+ install -m755 ${B}/test/p_minimal.so ${D}${PTEST_PATH}/test
+ install -m755 ${B}/test/provider_internal_test.cnf ${D}${PTEST_PATH}/test
+
+ # Prune the build tree
+ rm -f ${B}/fuzz/*.* ${B}/test/*.*
+
+ cp ${S}/Configure ${B}/configdata.pm ${D}${PTEST_PATH}
+ sed 's|${S}|${PTEST_PATH}|g' -i ${D}${PTEST_PATH}/configdata.pm
+ cp -r ${S}/external ${B}/test ${S}/test ${B}/fuzz ${S}/util ${B}/util ${D}${PTEST_PATH}
+
+ # For test_shlibload
+ ln -s ${libdir}/libcrypto.so.1.1 ${D}${PTEST_PATH}/
+ ln -s ${libdir}/libssl.so.1.1 ${D}${PTEST_PATH}/
+
+ install -d ${D}${PTEST_PATH}/apps
+ ln -s ${bindir}/openssl ${D}${PTEST_PATH}/apps
+ install -m644 ${S}/apps/*.pem ${S}/apps/*.srl ${S}/apps/openssl.cnf ${D}${PTEST_PATH}/apps
+ install -m755 ${B}/apps/CA.pl ${D}${PTEST_PATH}/apps
+
+ install -d ${D}${PTEST_PATH}/engines
+ install -m755 ${B}/engines/dasync.so ${D}${PTEST_PATH}/engines
+ install -m755 ${B}/engines/loader_attic.so ${D}${PTEST_PATH}/engines
+ install -m755 ${B}/engines/ossltest.so ${D}${PTEST_PATH}/engines
+
+ install -d ${D}${PTEST_PATH}/providers
+ install -m755 ${B}/providers/legacy.so ${D}${PTEST_PATH}/providers
+
+ install -d ${D}${PTEST_PATH}/Configurations
+ cp -rf ${S}/Configurations/* ${D}${PTEST_PATH}/Configurations/
+
+ # seems to be needed with perl 5.32.1
+ install -d ${D}${PTEST_PATH}/util/perl/recipes
+ cp ${D}${PTEST_PATH}/test/recipes/tconversion.pl ${D}${PTEST_PATH}/util/perl/recipes/
+
+ sed 's|${S}|${PTEST_PATH}|g' -i ${D}${PTEST_PATH}/util/wrap.pl
+}
+
+# Add the openssl.cnf file to the openssl-conf package. Make the libcrypto
+# package RRECOMMENDS on this package. This will enable the configuration
+# file to be installed for both the openssl-bin package and the libcrypto
+# package since the openssl-bin package depends on the libcrypto package.
+
+PACKAGES =+ "libcrypto libssl openssl-conf ${PN}-engines ${PN}-misc ${PN}-ossl-module-legacy"
+
+FILES:libcrypto = "${libdir}/libcrypto${SOLIBS}"
+FILES:libssl = "${libdir}/libssl${SOLIBS}"
+FILES:openssl-conf = "${sysconfdir}/ssl/openssl.cnf \
+ ${libdir}/ssl-3/openssl.cnf* \
+ "
+FILES:${PN}-engines = "${libdir}/engines-3"
+# ${prefix} comes from what we pass into --prefix at configure time (which is used for INSTALLTOP)
+FILES:${PN}-engines:append:mingw32:class-nativesdk = " ${prefix}${libdir}/engines-3"
+FILES:${PN}-misc = "${libdir}/ssl-3/misc ${bindir}/c_rehash"
+FILES:${PN}-ossl-module-legacy = "${libdir}/ossl-modules/legacy.so"
+FILES:${PN} =+ "${libdir}/ssl-3/* ${libdir}/ossl-modules/"
+FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}/environment-setup.d/openssl.sh"
+
+CONFFILES:openssl-conf = "${sysconfdir}/ssl/openssl.cnf"
+
+RRECOMMENDS:libcrypto += "openssl-conf ${PN}-ossl-module-legacy"
+RDEPENDS:${PN}-misc = "perl"
+RDEPENDS:${PN}-ptest += "openssl-bin perl perl-modules bash sed"
+
+RDEPENDS:${PN}-bin += "openssl-conf"
+
+BBCLASSEXTEND = "native nativesdk"
+
+CVE_PRODUCT = "openssl:openssl"
+
+CVE_VERSION_SUFFIX = "alphabetical"
+
diff --git a/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb b/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb
index 8a6c297cb0..099c58bfc7 100644
--- a/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb
+++ b/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb
@@ -3,7 +3,6 @@ SECTION = "console/network"
DESCRIPTION = "PPP dail-in provides a point to point protocol (PPP), so that other computers can dial up to it and access connected networks."
DEPENDS = "ppp"
RDEPENDS:${PN} = "ppp"
-PR = "r8"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
diff --git a/meta/recipes-connectivity/ppp/ppp/0001-ppp-fix-build-against-5.15-headers.patch b/meta/recipes-connectivity/ppp/ppp/0001-ppp-fix-build-against-5.15-headers.patch
deleted file mode 100644
index c91246dbf5..0000000000
--- a/meta/recipes-connectivity/ppp/ppp/0001-ppp-fix-build-against-5.15-headers.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From aba3273273e826c6dc90f197ca9a3e800e826891 Mon Sep 17 00:00:00 2001
-From: Bruce Ashfield <bruce.ashfield@gmail.com>
-Date: Fri, 5 Nov 2021 12:41:35 -0400
-Subject: [PATCH] ppp: fix build against 5.15 headers
-
-The 5.15 kernel has removed ipx support, along with the userspace
-visible header.
-
-This support wasn't used previously (as it hasn't been very well
-maintained in the kernel for several years), so we can simply
-disable it in our build and wait for upstream to do a release that
-drops the support.
-
-Upstream-Status: Inappropriate [OE-specific configuration/headers]
-
-Signed-off-by: Bruce Ashfield <bruce.ashfield@gmail.com>
----
- pppd/Makefile.linux | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/pppd/Makefile.linux b/pppd/Makefile.linux
-index 22837c5..23b9b22 100644
---- a/pppd/Makefile.linux
-+++ b/pppd/Makefile.linux
-@@ -91,7 +91,7 @@ MAXOCTETS=y
-
- INCLUDE_DIRS= -I../include
-
--COMPILE_FLAGS= -DHAVE_PATHS_H -DIPX_CHANGE -DHAVE_MMAP -pipe
-+COMPILE_FLAGS= -DHAVE_PATHS_H -DHAVE_MMAP -pipe
-
- CFLAGS= $(COPTS) $(COMPILE_FLAGS) $(INCLUDE_DIRS) '-DDESTDIR="@DESTDIR@"'
-
---
-2.25.1
-
diff --git a/meta/recipes-connectivity/ppp/ppp/makefix.patch b/meta/recipes-connectivity/ppp/ppp/makefix.patch
deleted file mode 100644
index fce068cae0..0000000000
--- a/meta/recipes-connectivity/ppp/ppp/makefix.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-We were seeing reproducibility issues where one host would use the internal
-logwtmp wrapper, another would use the one in libutil. The issue was that in
-some cases the "\#include" was making it to CC, in others, "#include". The
-issue seems to be related to shell escaping.
-
-The root cause looks to be:
-http://git.savannah.gnu.org/cgit/make.git/commit/?id=c6966b323811c37acedff05b576b907b06aea5f4
-
-Instead of relying on shell quoting, use make to indirect the variable
-and avoid the problem.
-
-See https://github.com/paulusmack/ppp/issues/233
-
-Upstream-Status: Backport [https://github.com/paulusmack/ppp/commit/b4430f7092ececdff2504d5f3393a4c6528c3686]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: ppp-2.4.9/pppd/Makefile.linux
-===================================================================
---- ppp-2.4.9.orig/pppd/Makefile.linux
-+++ ppp-2.4.9/pppd/Makefile.linux
-@@ -80,7 +80,8 @@ PLUGIN=y
- #USE_SRP=y
-
- # Use libutil; test if logwtmp is declared in <utmp.h> to detect
--ifeq ($(shell echo '\#include <utmp.h>' | $(CC) -E - 2>/dev/null | grep -q logwtmp && echo yes),yes)
-+UTMPHEADER = "\#include <utmp.h>"
-+ifeq ($(shell echo $(UTMPHEADER) | $(CC) -E - 2>/dev/null | grep -q logwtmp && echo yes),yes)
- USE_LIBUTIL=y
- endif
-
-@@ -143,7 +144,8 @@ CFLAGS += -DHAS_SHADOW
- #LIBS += -lshadow $(LIBS)
- endif
-
--ifeq ($(shell echo '\#include <crypt.h>' | $(CC) -E - >/dev/null 2>&1 && echo yes),yes)
-+CRYPTHEADER = "\#include <crypt.h>"
-+ifeq ($(shell echo $(CRYPTHEADER) | $(CC) -E - >/dev/null 2>&1 && echo yes),yes)
- CFLAGS += -DHAVE_CRYPT_H=1
- LIBS += -lcrypt
- endif
diff --git a/meta/recipes-connectivity/ppp/ppp_2.4.9.bb b/meta/recipes-connectivity/ppp/ppp_2.4.9.bb
deleted file mode 100644
index 700ece61dc..0000000000
--- a/meta/recipes-connectivity/ppp/ppp_2.4.9.bb
+++ /dev/null
@@ -1,99 +0,0 @@
-SUMMARY = "Point-to-Point Protocol (PPP) support"
-DESCRIPTION = "ppp (Paul's PPP Package) is an open source package which implements \
-the Point-to-Point Protocol (PPP) on Linux and Solaris systems."
-SECTION = "console/network"
-HOMEPAGE = "http://samba.org/ppp/"
-BUGTRACKER = "http://ppp.samba.org/cgi-bin/ppp-bugs"
-DEPENDS = "libpcap openssl virtual/crypt"
-LICENSE = "BSD-3-Clause & BSD-3-Clause-Attribution & GPL-2.0-or-later & LGPL-2.0-or-later & PD"
-LIC_FILES_CHKSUM = "file://pppd/ccp.c;beginline=1;endline=29;md5=e2c43fe6e81ff77d87dc9c290a424dea \
- file://pppd/plugins/passprompt.c;beginline=1;endline=10;md5=3bcbcdbf0e369c9a3e0b8c8275b065d8 \
- file://pppd/tdb.c;beginline=1;endline=27;md5=4ca3a9991b011038d085d6675ae7c4e6 \
- file://chat/chat.c;beginline=1;endline=15;md5=0d374b8545ee5c62d7aff1acbd38add2"
-
-SRC_URI = "https://download.samba.org/pub/${BPN}/${BP}.tar.gz \
- file://makefix.patch \
- file://pon \
- file://poff \
- file://init \
- file://ip-up \
- file://ip-down \
- file://08setupdns \
- file://92removedns \
- file://pap \
- file://ppp_on_boot \
- file://provider \
- file://ppp@.service \
- file://0001-ppp-fix-build-against-5.15-headers.patch \
- "
-
-SRC_URI[sha256sum] = "f938b35eccde533ea800b15a7445b2f1137da7f88e32a16898d02dee8adc058d"
-
-inherit autotools-brokensep systemd
-
-TARGET_CC_ARCH += " ${LDFLAGS}"
-EXTRA_OEMAKE = "CC='${CC}' STRIPPROG=${STRIP} MANDIR=${D}${datadir}/man/man8 INCDIR=${D}${includedir} LIBDIR=${D}${libdir}/pppd/${PV} BINDIR=${D}${sbindir}"
-EXTRA_OECONF = "--disable-strip"
-
-# Package Makefile computes CFLAGS, referencing COPTS.
-# Typically hard-coded to '-O2 -g' in the Makefile's.
-#
-EXTRA_OEMAKE += ' COPTS="${CFLAGS} -I${STAGING_INCDIR}/openssl -I${S}/include"'
-
-EXTRA_OECONF:append:libc-musl = " --disable-ipxcp"
-
-do_configure () {
- oe_runconf
-}
-
-do_install:append () {
- make install-etcppp ETCDIR=${D}/${sysconfdir}/ppp
- mkdir -p ${D}${bindir}/ ${D}${sysconfdir}/init.d
- mkdir -p ${D}${sysconfdir}/ppp/ip-up.d/
- mkdir -p ${D}${sysconfdir}/ppp/ip-down.d/
- install -m 0755 ${WORKDIR}/pon ${D}${bindir}/pon
- install -m 0755 ${WORKDIR}/poff ${D}${bindir}/poff
- install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/ppp
- install -m 0755 ${WORKDIR}/ip-up ${D}${sysconfdir}/ppp/
- install -m 0755 ${WORKDIR}/ip-down ${D}${sysconfdir}/ppp/
- install -m 0755 ${WORKDIR}/08setupdns ${D}${sysconfdir}/ppp/ip-up.d/
- install -m 0755 ${WORKDIR}/92removedns ${D}${sysconfdir}/ppp/ip-down.d/
- mkdir -p ${D}${sysconfdir}/chatscripts
- mkdir -p ${D}${sysconfdir}/ppp/peers
- install -m 0755 ${WORKDIR}/pap ${D}${sysconfdir}/chatscripts
- install -m 0755 ${WORKDIR}/ppp_on_boot ${D}${sysconfdir}/ppp/ppp_on_boot
- install -m 0755 ${WORKDIR}/provider ${D}${sysconfdir}/ppp/peers/provider
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/ppp@.service ${D}${systemd_system_unitdir}
- sed -i -e 's,@SBINDIR@,${sbindir},g' \
- ${D}${systemd_system_unitdir}/ppp@.service
- rm -rf ${D}/${mandir}/man8/man8
- chmod u+s ${D}${sbindir}/pppd
-}
-
-do_install:append:libc-musl () {
- install -Dm 0644 ${S}/include/net/ppp_defs.h ${D}${includedir}/net/ppp_defs.h
-}
-
-CONFFILES:${PN} = "${sysconfdir}/ppp/pap-secrets ${sysconfdir}/ppp/chap-secrets ${sysconfdir}/ppp/options"
-PACKAGES =+ "${PN}-oa ${PN}-oe ${PN}-radius ${PN}-winbind ${PN}-minconn ${PN}-password ${PN}-l2tp ${PN}-tools"
-FILES:${PN} = "${sysconfdir} ${bindir} ${sbindir}/chat ${sbindir}/pppd ${systemd_system_unitdir}/ppp@.service"
-FILES:${PN}-oa = "${libdir}/pppd/${PV}/pppoatm.so"
-FILES:${PN}-oe = "${sbindir}/pppoe-discovery ${libdir}/pppd/${PV}/*pppoe.so"
-FILES:${PN}-radius = "${libdir}/pppd/${PV}/radius.so ${libdir}/pppd/${PV}/radattr.so ${libdir}/pppd/${PV}/radrealms.so"
-FILES:${PN}-winbind = "${libdir}/pppd/${PV}/winbind.so"
-FILES:${PN}-minconn = "${libdir}/pppd/${PV}/minconn.so"
-FILES:${PN}-password = "${libdir}/pppd/${PV}/pass*.so"
-FILES:${PN}-l2tp = "${libdir}/pppd/${PV}/*l2tp.so"
-FILES:${PN}-tools = "${sbindir}/pppstats ${sbindir}/pppdump"
-SUMMARY:${PN}-oa = "Plugin for PPP for PPP-over-ATM support"
-SUMMARY:${PN}-oe = "Plugin for PPP for PPP-over-Ethernet support"
-SUMMARY:${PN}-radius = "Plugin for PPP for RADIUS support"
-SUMMARY:${PN}-winbind = "Plugin for PPP to authenticate against Samba or Windows"
-SUMMARY:${PN}-minconn = "Plugin for PPP to set a delay before the idle timeout applies"
-SUMMARY:${PN}-password = "Plugin for PPP to get passwords via a pipe"
-SUMMARY:${PN}-l2tp = "Plugin for PPP for l2tp support"
-SUMMARY:${PN}-tools = "Additional tools for the PPP package"
-
-# Ignore compatibility symlink rp-pppoe.so->pppoe.so
-INSANE_SKIP:${PN}-oe += "dev-so"
diff --git a/meta/recipes-connectivity/ppp/ppp_2.5.0.bb b/meta/recipes-connectivity/ppp/ppp_2.5.0.bb
new file mode 100644
index 0000000000..4b052f8ed9
--- /dev/null
+++ b/meta/recipes-connectivity/ppp/ppp_2.5.0.bb
@@ -0,0 +1,75 @@
+SUMMARY = "Point-to-Point Protocol (PPP) support"
+DESCRIPTION = "ppp (Paul's PPP Package) is an open source package which implements \
+the Point-to-Point Protocol (PPP) on Linux and Solaris systems."
+SECTION = "console/network"
+HOMEPAGE = "http://samba.org/ppp/"
+BUGTRACKER = "http://ppp.samba.org/cgi-bin/ppp-bugs"
+DEPENDS = "libpcap openssl virtual/crypt"
+LICENSE = "BSD-3-Clause & BSD-3-Clause-Attribution & GPL-2.0-or-later & LGPL-2.0-or-later & PD"
+LIC_FILES_CHKSUM = "file://pppd/ccp.c;beginline=1;endline=29;md5=e2c43fe6e81ff77d87dc9c290a424dea \
+ file://pppd/plugins/passprompt.c;beginline=1;endline=10;md5=3bcbcdbf0e369c9a3e0b8c8275b065d8 \
+ file://pppd/tdb.c;beginline=1;endline=27;md5=4ca3a9991b011038d085d6675ae7c4e6 \
+ file://chat/chat.c;beginline=1;endline=15;md5=0d374b8545ee5c62d7aff1acbd38add2"
+
+SRC_URI = "https://download.samba.org/pub/${BPN}/${BP}.tar.gz \
+ file://pon \
+ file://poff \
+ file://init \
+ file://ip-up \
+ file://ip-down \
+ file://08setupdns \
+ file://92removedns \
+ file://pap \
+ file://ppp_on_boot \
+ file://provider \
+ file://ppp@.service \
+ "
+
+SRC_URI[sha256sum] = "5cae0e8075f8a1755f16ca290eb44e6b3545d3f292af4da65ecffe897de636ff"
+
+inherit autotools systemd
+
+EXTRA_OECONF += "--with-openssl=${STAGING_EXECPREFIXDIR}"
+
+do_install:append () {
+ mkdir -p ${D}${bindir}/ ${D}${sysconfdir}/init.d
+ mkdir -p ${D}${sysconfdir}/ppp/ip-up.d/
+ mkdir -p ${D}${sysconfdir}/ppp/ip-down.d/
+ install -m 0755 ${WORKDIR}/pon ${D}${bindir}/pon
+ install -m 0755 ${WORKDIR}/poff ${D}${bindir}/poff
+ install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/ppp
+ install -m 0755 ${WORKDIR}/ip-up ${D}${sysconfdir}/ppp/
+ install -m 0755 ${WORKDIR}/ip-down ${D}${sysconfdir}/ppp/
+ install -m 0755 ${WORKDIR}/08setupdns ${D}${sysconfdir}/ppp/ip-up.d/
+ install -m 0755 ${WORKDIR}/92removedns ${D}${sysconfdir}/ppp/ip-down.d/
+ mkdir -p ${D}${sysconfdir}/chatscripts
+ mkdir -p ${D}${sysconfdir}/ppp/peers
+ install -m 0755 ${WORKDIR}/pap ${D}${sysconfdir}/chatscripts
+ install -m 0755 ${WORKDIR}/ppp_on_boot ${D}${sysconfdir}/ppp/ppp_on_boot
+ install -m 0755 ${WORKDIR}/provider ${D}${sysconfdir}/ppp/peers/provider
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/ppp@.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${systemd_system_unitdir}/ppp@.service
+}
+
+CONFFILES:${PN} = "${sysconfdir}/ppp/pap-secrets ${sysconfdir}/ppp/chap-secrets ${sysconfdir}/ppp/options"
+PACKAGES =+ "${PN}-oa ${PN}-oe ${PN}-radius ${PN}-winbind ${PN}-minconn ${PN}-password ${PN}-l2tp ${PN}-tools"
+FILES:${PN} = "${sysconfdir} ${bindir} ${sbindir}/chat ${sbindir}/pppd ${systemd_system_unitdir}/ppp@.service"
+FILES:${PN}-oa = "${libdir}/pppd/${PV}/pppoatm.so"
+FILES:${PN}-oe = "${sbindir}/pppoe-discovery ${libdir}/pppd/${PV}/*pppoe.so"
+FILES:${PN}-radius = "${libdir}/pppd/${PV}/radius.so ${libdir}/pppd/${PV}/radattr.so ${libdir}/pppd/${PV}/radrealms.so"
+FILES:${PN}-winbind = "${libdir}/pppd/${PV}/winbind.so"
+FILES:${PN}-minconn = "${libdir}/pppd/${PV}/minconn.so"
+FILES:${PN}-password = "${libdir}/pppd/${PV}/pass*.so"
+FILES:${PN}-l2tp = "${libdir}/pppd/${PV}/*l2tp.so"
+FILES:${PN}-tools = "${sbindir}/pppstats ${sbindir}/pppdump"
+SUMMARY:${PN}-oa = "Plugin for PPP for PPP-over-ATM support"
+SUMMARY:${PN}-oe = "Plugin for PPP for PPP-over-Ethernet support"
+SUMMARY:${PN}-radius = "Plugin for PPP for RADIUS support"
+SUMMARY:${PN}-winbind = "Plugin for PPP to authenticate against Samba or Windows"
+SUMMARY:${PN}-minconn = "Plugin for PPP to set a delay before the idle timeout applies"
+SUMMARY:${PN}-password = "Plugin for PPP to get passwords via a pipe"
+SUMMARY:${PN}-l2tp = "Plugin for PPP for l2tp support"
+SUMMARY:${PN}-tools = "Additional tools for the PPP package"
+
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf/0001-avoid-using-m-option-for-readlink.patch b/meta/recipes-connectivity/resolvconf/resolvconf/0001-avoid-using-m-option-for-readlink.patch
new file mode 100644
index 0000000000..ab32f26754
--- /dev/null
+++ b/meta/recipes-connectivity/resolvconf/resolvconf/0001-avoid-using-m-option-for-readlink.patch
@@ -0,0 +1,37 @@
+From 6bf2bb136a0b3961339369bc08e58b661fba0edb Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 17 Nov 2022 17:26:30 +0800
+Subject: [PATCH] avoid using -m option for readlink
+
+Use a more widely used option '-f' instead of '-m' here to
+avoid dependency on coreutils.
+
+Looking at the git history of the resolvconf repo, the '-m'
+is deliberately used. And it wants to depend on coreutils.
+But in case of OE, the existence of /etc is ensured, and busybox
+readlink provides '-f' option, so we can just use '-f'. In this
+way, the coreutils dependency is not necessary any more.
+
+Upstream-Status: Inappropriate [OE Specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ etc/resolvconf/update.d/libc | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/etc/resolvconf/update.d/libc b/etc/resolvconf/update.d/libc
+index 1c4f6bc..f75d22c 100755
+--- a/etc/resolvconf/update.d/libc
++++ b/etc/resolvconf/update.d/libc
+@@ -57,7 +57,7 @@ fi
+ report_warning() { echo "$0: Warning: $*" >&2 ; }
+
+ resolv_conf_is_symlinked_to_dynamic_file() {
+- [ -L ${ETC}/resolv.conf ] && [ "$(readlink -m ${ETC}/resolv.conf)" = "$DYNAMICRSLVCNFFILE" ]
++ [ -L ${ETC}/resolv.conf ] && [ "$(readlink -f ${ETC}/resolv.conf)" = "$DYNAMICRSLVCNFFILE" ]
+ }
+
+ if ! resolv_conf_is_symlinked_to_dynamic_file ; then
+--
+2.17.1
+
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf_1.91.bb b/meta/recipes-connectivity/resolvconf/resolvconf_1.91.bb
deleted file mode 100644
index 94fd2c1a70..0000000000
--- a/meta/recipes-connectivity/resolvconf/resolvconf_1.91.bb
+++ /dev/null
@@ -1,67 +0,0 @@
-SUMMARY = "name server information handler"
-DESCRIPTION = "Resolvconf is a framework for keeping track of the system's \
-information about currently available nameservers. It sets \
-itself up as the intermediary between programs that supply \
-nameserver information and programs that need nameserver \
-information."
-SECTION = "console/network"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c93c0550bd3173f4504b2cbd8991e50b"
-AUTHOR = "Thomas Hood"
-HOMEPAGE = "http://packages.debian.org/resolvconf"
-RDEPENDS:${PN} = "bash"
-
-SRC_URI = "git://salsa.debian.org/debian/resolvconf.git;protocol=https;branch=unstable \
- file://99_resolvconf \
- "
-
-SRCREV = "859209d573e7aec0e95d812c6b52444591a628d1"
-
-S = "${WORKDIR}/git"
-
-# the package is taken from snapshots.debian.org; that source is static and goes stale
-# so we check the latest upstream from a directory that does get updated
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/r/resolvconf/"
-
-inherit allarch
-
-do_compile () {
- :
-}
-
-do_install () {
- install -d ${D}${sysconfdir}/default/volatiles
- install -m 0644 ${WORKDIR}/99_resolvconf ${D}${sysconfdir}/default/volatiles
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/tmpfiles.d
- echo "d /run/${BPN}/interface - - - -" \
- > ${D}${sysconfdir}/tmpfiles.d/resolvconf.conf
- fi
- install -d ${D}${base_libdir}/${BPN}
- install -d ${D}${sysconfdir}/${BPN}
- ln -snf ${localstatedir}/run/${BPN} ${D}${sysconfdir}/${BPN}/run
- install -d ${D}${sysconfdir} ${D}${base_sbindir}
- install -d ${D}${mandir}/man8 ${D}${docdir}/${P}
- cp -pPR etc/resolvconf ${D}${sysconfdir}/
- chown -R root:root ${D}${sysconfdir}/
- install -m 0755 bin/resolvconf ${D}${base_sbindir}/
- install -m 0755 bin/list-records ${D}${base_libdir}/${BPN}
- install -d ${D}/${sysconfdir}/network/if-up.d
- install -m 0755 debian/resolvconf.000resolvconf.if-up ${D}/${sysconfdir}/network/if-up.d/000resolvconf
- install -d ${D}/${sysconfdir}/network/if-down.d
- install -m 0755 debian/resolvconf.resolvconf.if-down ${D}/${sysconfdir}/network/if-down.d/resolvconf
- install -m 0644 README ${D}${docdir}/${P}/
- install -m 0644 man/resolvconf.8 ${D}${mandir}/man8/
-}
-
-pkg_postinst:${PN} () {
- if [ -z "$D" ]; then
- if command -v systemd-tmpfiles >/dev/null; then
- systemd-tmpfiles --create ${sysconfdir}/tmpfiles.d/resolvconf.conf
- elif [ -e ${sysconfdir}/init.d/populate-volatile.sh ]; then
- ${sysconfdir}/init.d/populate-volatile.sh update
- fi
- fi
-}
-
-FILES:${PN} += "${base_libdir}/${BPN}"
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf_1.92.bb b/meta/recipes-connectivity/resolvconf/resolvconf_1.92.bb
new file mode 100644
index 0000000000..226cb7ee77
--- /dev/null
+++ b/meta/recipes-connectivity/resolvconf/resolvconf_1.92.bb
@@ -0,0 +1,67 @@
+SUMMARY = "name server information handler"
+DESCRIPTION = "Resolvconf is a framework for keeping track of the system's \
+information about currently available nameservers. It sets \
+itself up as the intermediary between programs that supply \
+nameserver information and programs that need nameserver \
+information."
+SECTION = "console/network"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c93c0550bd3173f4504b2cbd8991e50b"
+HOMEPAGE = "http://packages.debian.org/resolvconf"
+RDEPENDS:${PN} = "bash sed util-linux-flock"
+
+SRC_URI = "git://salsa.debian.org/debian/resolvconf.git;protocol=https;branch=unstable \
+ file://99_resolvconf \
+ file://0001-avoid-using-m-option-for-readlink.patch \
+ "
+
+SRCREV = "86047276c80705c51859a19f0c472102e0822f34"
+
+S = "${WORKDIR}/git"
+
+# the package is taken from snapshots.debian.org; that source is static and goes stale
+# so we check the latest upstream from a directory that does get updated
+UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/r/resolvconf/"
+
+do_compile () {
+ :
+}
+
+do_install () {
+ install -d ${D}${sysconfdir}/default/volatiles
+ install -m 0644 ${WORKDIR}/99_resolvconf ${D}${sysconfdir}/default/volatiles
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -d ${D}${sysconfdir}/tmpfiles.d
+ echo "d /run/${BPN}/interface - - - -" \
+ > ${D}${sysconfdir}/tmpfiles.d/resolvconf.conf
+ fi
+ install -d ${D}${base_libdir}/${BPN}
+ install -d ${D}${sysconfdir}/${BPN}
+ install -d ${D}${nonarch_base_libdir}/${BPN}
+ ln -snf ${localstatedir}/run/${BPN} ${D}${sysconfdir}/${BPN}/run
+ install -d ${D}${sysconfdir} ${D}${base_sbindir}
+ install -d ${D}${mandir}/man8 ${D}${docdir}/${P}
+ cp -pPR etc/resolvconf ${D}${sysconfdir}/
+ chown -R root:root ${D}${sysconfdir}/
+ install -m 0755 bin/resolvconf ${D}${base_sbindir}/
+ install -m 0755 bin/normalize-resolvconf ${D}${nonarch_base_libdir}/${BPN}
+ install -m 0755 bin/list-records ${D}${base_libdir}/${BPN}
+ install -d ${D}/${sysconfdir}/network/if-up.d
+ install -m 0755 debian/resolvconf.000resolvconf.if-up ${D}/${sysconfdir}/network/if-up.d/000resolvconf
+ install -d ${D}/${sysconfdir}/network/if-down.d
+ install -m 0755 debian/resolvconf.resolvconf.if-down ${D}/${sysconfdir}/network/if-down.d/resolvconf
+ install -m 0644 README ${D}${docdir}/${P}/
+ install -m 0644 man/resolvconf.8 ${D}${mandir}/man8/
+}
+
+pkg_postinst:${PN} () {
+ if [ -z "$D" ]; then
+ if command -v systemd-tmpfiles >/dev/null; then
+ systemd-tmpfiles --create ${sysconfdir}/tmpfiles.d/resolvconf.conf
+ elif [ -e ${sysconfdir}/init.d/populate-volatile.sh ]; then
+ ${sysconfdir}/init.d/populate-volatile.sh update
+ fi
+ fi
+}
+
+FILES:${PN} += "${base_libdir}/${BPN} ${nonarch_base_libdir}/${BPN}"
diff --git a/meta/recipes-connectivity/slirp/libslirp_git.bb b/meta/recipes-connectivity/slirp/libslirp_git.bb
new file mode 100644
index 0000000000..334b786b9b
--- /dev/null
+++ b/meta/recipes-connectivity/slirp/libslirp_git.bb
@@ -0,0 +1,18 @@
+SUMMARY = "A general purpose TCP-IP emulator"
+DESCRIPTION = "A general purpose TCP-IP emulator used by virtual machine hypervisors to provide virtual networking services."
+HOMEPAGE = "https://gitlab.freedesktop.org/slirp/libslirp"
+LICENSE = "BSD-3-Clause & MIT"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=bca0186b14e6b05e338e729f106db727"
+
+SRC_URI = "git://gitlab.freedesktop.org/slirp/libslirp.git;protocol=https;branch=master"
+SRCREV = "3ad1710a96678fe79066b1469cead4058713a1d9"
+PV = "4.7.0"
+S = "${WORKDIR}/git"
+
+DEPENDS = " \
+ glib-2.0 \
+"
+
+inherit meson pkgconfig
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-connectivity/socat/files/0001-fix-compile-procan.c-failed.patch b/meta/recipes-connectivity/socat/files/0001-fix-compile-procan.c-failed.patch
new file mode 100644
index 0000000000..9051ae1abe
--- /dev/null
+++ b/meta/recipes-connectivity/socat/files/0001-fix-compile-procan.c-failed.patch
@@ -0,0 +1,62 @@
+From 4f887cc665c9a48b83e20ef4abe57afa7e365e0e Mon Sep 17 00:00:00 2001
+From: Hongxu Jia <hongxu.jia@eng.windriver.com>
+Date: Tue, 5 Dec 2023 23:02:22 -0800
+Subject: [PATCH v2] fix compile procan.c failed
+
+1. Compile socat failed if out of tree build (build dir != source dir)
+...
+gcc -c -D CC="gcc" -o procan.o procan.c
+cc1: fatal error: procan.c: No such file or directory
+...
+Explicitly add $srcdir to makefile rule
+
+2. Compile socat failed if multiple words in $(CC), such as CC="gcc -m64"
+...
+from ../socat-1.8.0.0/procan.c:10:
+../socat-1.8.0.0/sysincludes.h:18:10: fatal error: inttypes.h: No such file or directory
+ 18 | #include <inttypes.h> /* uint16_t */
+...
+
+In commit [Procan: print umask, CC, and couple more new infos][1],
+it defeines marcro CC in C source, the space in CC will break
+C source compile. Use first word of $(CC) to defeine marco CC
+
+[1] https://repo.or.cz/socat.git/commit/cd5673dbd0786c94e0b3ace7e35fab14c01e3185
+
+Upstream-Status: Submitted [socat@dest-unreach.org]
+Signed-off-by: Hongxu Jia <hongxu.jia@eng.windriver.com>
+---
+ Makefile.in | 10 +++++-----
+ 1 file changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/Makefile.in b/Makefile.in
+index c01b1a4..48dad69 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -109,8 +109,8 @@ depend: $(CFILES) $(HFILES)
+ socat: socat.o libxio.a
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ socat.o libxio.a $(CLIBS)
+
+-procan.o: procan.c
+- $(CC) $(CFLAGS) -c -D CC=\"$(CC)\" -o $@ procan.c
++procan.o: $(srcdir)/procan.c
++ $(CC) $(CFLAGS) -c -D CC=\"$(firstword $(CC))\" -o $@ $(srcdir)/procan.c
+
+ PROCAN_OBJS=procan_main.o procan.o procan-cdefs.o hostan.o error.o sycls.o sysutils.o utils.o vsnprintf_r.o snprinterr.o
+ procan: $(PROCAN_OBJS)
+@@ -132,9 +132,9 @@ install: progs $(srcdir)/doc/socat.1
+ mkdir -p $(DESTDIR)$(BINDEST)
+ $(INSTALL) -m 755 socat $(DESTDIR)$(BINDEST)/socat1
+ ln -sf socat1 $(DESTDIR)$(BINDEST)/socat
+- $(INSTALL) -m 755 socat-chain.sh $(DESTDIR)$(BINDEST)
+- $(INSTALL) -m 755 socat-mux.sh $(DESTDIR)$(BINDEST)
+- $(INSTALL) -m 755 socat-broker.sh $(DESTDIR)$(BINDEST)
++ $(INSTALL) -m 755 $(srcdir)/socat-chain.sh $(DESTDIR)$(BINDEST)
++ $(INSTALL) -m 755 $(srcdir)/socat-mux.sh $(DESTDIR)$(BINDEST)
++ $(INSTALL) -m 755 $(srcdir)/socat-broker.sh $(DESTDIR)$(BINDEST)
+ $(INSTALL) -m 755 procan $(DESTDIR)$(BINDEST)
+ $(INSTALL) -m 755 filan $(DESTDIR)$(BINDEST)
+ mkdir -p $(DESTDIR)$(MANDEST)/man1
+--
+2.42.0
+
diff --git a/meta/recipes-connectivity/socat/socat/0001-configure.ac-check-getprotobynumber_r-with-AC_TRY_LI.patch b/meta/recipes-connectivity/socat/socat/0001-configure.ac-check-getprotobynumber_r-with-AC_TRY_LI.patch
deleted file mode 100644
index fbfb0816dd..0000000000
--- a/meta/recipes-connectivity/socat/socat/0001-configure.ac-check-getprotobynumber_r-with-AC_TRY_LI.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From d67d6b4f981db9612d808bd723176a1d2996d53a Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 17 Jan 2022 13:21:32 +0100
-Subject: [PATCH] configure.ac: check getprotobynumber_r with AC_TRY_LINK
-
-AC_TRY_COMPILE won't error out if the function is altogether absent
-(e.g. on linux musl C library), the test needs to link all the way.
-
-Upstream-Status: Submitted [via email to socat@dest-unreach.org]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- configure.ac | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index d4acc9e..973a7f2 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -137,13 +137,13 @@ AC_MSG_RESULT($sc_cv_have_prototype_hstrerror)
- # getprotobynumber_r() is not standardized
- AC_MSG_CHECKING(for getprotobynumber_r() variant)
- AC_CACHE_VAL(sc_cv_getprotobynumber_r,
--[AC_TRY_COMPILE([#include <stddef.h>
-+[AC_TRY_LINK([#include <stddef.h>
- #include <netdb.h>],[getprotobynumber_r(1,NULL,NULL,1024,NULL);],
- [sc_cv_getprotobynumber_r=1; tmp_bynum_variant=Linux],
-- [AC_TRY_COMPILE([#include <stddef.h>
-+ [AC_TRY_LINK([#include <stddef.h>
- #include <netdb.h>],[getprotobynumber_r(1,NULL,NULL,1024);],
- [sc_cv_getprotobynumber_r=2; tmp_bynum_variant=Solaris],
-- [AC_TRY_COMPILE([#include <stddef.h>
-+ [AC_TRY_LINK([#include <stddef.h>
- #include <netdb.h>],[getprotobynumber_r(1,NULL,NULL);],
- [sc_cv_getprotobynumber_r=3; tmp_bynum_variant=AIX],
-
diff --git a/meta/recipes-connectivity/socat/socat_1.7.4.3.bb b/meta/recipes-connectivity/socat/socat_1.7.4.3.bb
deleted file mode 100644
index a4a0a8933e..0000000000
--- a/meta/recipes-connectivity/socat/socat_1.7.4.3.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-SUMMARY = "Multipurpose relay for bidirectional data transfer"
-DESCRIPTION = "Socat is a relay for bidirectional data \
-transfer between two independent data channels."
-HOMEPAGE = "http://www.dest-unreach.org/socat/"
-
-SECTION = "console/network"
-
-LICENSE = "GPL-2.0-with-OpenSSL-exception"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://README;beginline=257;endline=287;md5=82520b052f322ac2b5b3dfdc7c7eea86"
-
-SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \
- file://0001-configure.ac-check-getprotobynumber_r-with-AC_TRY_LI.patch \
- "
-
-SRC_URI[sha256sum] = "d47318104415077635119dfee44bcfb41de3497374a9a001b1aff6e2f0858007"
-
-inherit autotools
-
-EXTRA_AUTORECONF += "--exclude=autoheader"
-
-EXTRA_OECONF += "ac_cv_have_z_modifier=yes \
- ac_cv_header_bsd_libutil_h=no \
- sc_cv_termios_ispeed=no \
- ${TERMBITS_SHIFTS} \
-"
-
-TERMBITS_SHIFTS ?= "sc_cv_sys_crdly_shift=9 \
- sc_cv_sys_tabdly_shift=11 \
- sc_cv_sys_csize_shift=4"
-
-TERMBITS_SHIFTS:powerpc = "sc_cv_sys_crdly_shift=12 \
- sc_cv_sys_tabdly_shift=10 \
- sc_cv_sys_csize_shift=8"
-
-TERMBITS_SHIFTS:powerpc64 = "sc_cv_sys_crdly_shift=12 \
- sc_cv_sys_tabdly_shift=10 \
- sc_cv_sys_csize_shift=8"
-
-PACKAGECONFIG:class-target ??= "tcp-wrappers readline openssl"
-PACKAGECONFIG ??= "readline openssl"
-PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers"
-PACKAGECONFIG[readline] = "--enable-readline,--disable-readline,readline"
-PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
-
-CFLAGS += "-fcommon"
-
-do_install:prepend () {
- mkdir -p ${D}${bindir}
- install -d ${D}${bindir} ${D}${mandir}/man1
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-connectivity/socat/socat_1.8.0.0.bb b/meta/recipes-connectivity/socat/socat_1.8.0.0.bb
new file mode 100644
index 0000000000..912605c95c
--- /dev/null
+++ b/meta/recipes-connectivity/socat/socat_1.8.0.0.bb
@@ -0,0 +1,53 @@
+SUMMARY = "Multipurpose relay for bidirectional data transfer"
+DESCRIPTION = "Socat is a relay for bidirectional data \
+transfer between two independent data channels."
+HOMEPAGE = "http://www.dest-unreach.org/socat/"
+
+SECTION = "console/network"
+
+LICENSE = "GPL-2.0-with-OpenSSL-exception"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://README;beginline=241;endline=271;md5=338c05eadd013872abb1d6e198e10a3f"
+
+SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \
+ file://0001-fix-compile-procan.c-failed.patch \
+"
+
+SRC_URI[sha256sum] = "e1de683dd22ee0e3a6c6bbff269abe18ab0c9d7eb650204f125155b9005faca7"
+
+inherit autotools
+
+EXTRA_AUTORECONF += "--exclude=autoheader"
+
+EXTRA_OECONF += "ac_cv_have_z_modifier=yes \
+ ac_cv_header_bsd_libutil_h=no \
+ sc_cv_termios_ispeed=no \
+ ${TERMBITS_SHIFTS} \
+"
+
+TERMBITS_SHIFTS ?= "sc_cv_sys_crdly_shift=9 \
+ sc_cv_sys_tabdly_shift=11 \
+ sc_cv_sys_csize_shift=4"
+
+TERMBITS_SHIFTS:powerpc = "sc_cv_sys_crdly_shift=12 \
+ sc_cv_sys_tabdly_shift=10 \
+ sc_cv_sys_csize_shift=8"
+
+TERMBITS_SHIFTS:powerpc64 = "sc_cv_sys_crdly_shift=12 \
+ sc_cv_sys_tabdly_shift=10 \
+ sc_cv_sys_csize_shift=8"
+
+PACKAGECONFIG:class-target ??= "tcp-wrappers readline openssl"
+PACKAGECONFIG ??= "readline openssl"
+PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers"
+PACKAGECONFIG[readline] = "--enable-readline,--disable-readline,readline"
+PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
+
+CFLAGS += "-fcommon"
+
+do_install:prepend () {
+ mkdir -p ${D}${bindir}
+ install -d ${D}${bindir} ${D}${mandir}/man1
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-PEAP-client-Update-Phase-2-authentication-requiremen.patch b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-PEAP-client-Update-Phase-2-authentication-requiremen.patch
new file mode 100644
index 0000000000..620560d3c7
--- /dev/null
+++ b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-PEAP-client-Update-Phase-2-authentication-requiremen.patch
@@ -0,0 +1,213 @@
+From f6f7cead3661ceeef54b21f7e799c0afc98537ec Mon Sep 17 00:00:00 2001
+From: Jouni Malinen <j@w1.fi>
+Date: Sat, 8 Jul 2023 19:55:32 +0300
+Subject: [PATCH] PEAP client: Update Phase 2 authentication requirements
+
+The previous PEAP client behavior allowed the server to skip Phase 2
+authentication with the expectation that the server was authenticated
+during Phase 1 through TLS server certificate validation. Various PEAP
+specifications are not exactly clear on what the behavior on this front
+is supposed to be and as such, this ended up being more flexible than
+the TTLS/FAST/TEAP cases. However, this is not really ideal when
+unfortunately common misconfiguration of PEAP is used in deployed
+devices where the server trust root (ca_cert) is not configured or the
+user has an easy option for allowing this validation step to be skipped.
+
+Change the default PEAP client behavior to be to require Phase 2
+authentication to be successfully completed for cases where TLS session
+resumption is not used and the client certificate has not been
+configured. Those two exceptions are the main cases where a deployed
+authentication server might skip Phase 2 and as such, where a more
+strict default behavior could result in undesired interoperability
+issues. Requiring Phase 2 authentication will end up disabling TLS
+session resumption automatically to avoid interoperability issues.
+
+Allow Phase 2 authentication behavior to be configured with a new phase1
+configuration parameter option:
+'phase2_auth' option can be used to control Phase 2 (i.e., within TLS
+tunnel) behavior for PEAP:
+ * 0 = do not require Phase 2 authentication
+ * 1 = require Phase 2 authentication when client certificate
+ (private_key/client_cert) is no used and TLS session resumption was
+ not used (default)
+ * 2 = require Phase 2 authentication in all cases
+
+Signed-off-by: Jouni Malinen <j@w1.fi>
+
+CVE: CVE-2023-52160
+Upstream-Status: Backport [https://w1.fi/cgit/hostap/commit/?id=8e6485a1bcb0baffdea9e55255a81270b768439c]
+
+Signed-off-by: Claus Stovgaard <claus.stovgaard@gmail.com>
+
+---
+ src/eap_peer/eap_config.h | 8 ++++++
+ src/eap_peer/eap_peap.c | 40 +++++++++++++++++++++++++++---
+ src/eap_peer/eap_tls_common.c | 6 +++++
+ src/eap_peer/eap_tls_common.h | 5 ++++
+ wpa_supplicant/wpa_supplicant.conf | 7 ++++++
+ 5 files changed, 63 insertions(+), 3 deletions(-)
+
+diff --git a/src/eap_peer/eap_config.h b/src/eap_peer/eap_config.h
+index 3238f74..047eec2 100644
+--- a/src/eap_peer/eap_config.h
++++ b/src/eap_peer/eap_config.h
+@@ -469,6 +469,14 @@ struct eap_peer_config {
+ * 1 = use cryptobinding if server supports it
+ * 2 = require cryptobinding
+ *
++ * phase2_auth option can be used to control Phase 2 (i.e., within TLS
++ * tunnel) behavior for PEAP:
++ * 0 = do not require Phase 2 authentication
++ * 1 = require Phase 2 authentication when client certificate
++ * (private_key/client_cert) is no used and TLS session resumption was
++ * not used (default)
++ * 2 = require Phase 2 authentication in all cases
++ *
+ * EAP-WSC (WPS) uses following options: pin=Device_Password and
+ * uuid=Device_UUID
+ *
+diff --git a/src/eap_peer/eap_peap.c b/src/eap_peer/eap_peap.c
+index 12e30df..6080697 100644
+--- a/src/eap_peer/eap_peap.c
++++ b/src/eap_peer/eap_peap.c
+@@ -67,6 +67,7 @@ struct eap_peap_data {
+ u8 cmk[20];
+ int soh; /* Whether IF-TNCCS-SOH (Statement of Health; Microsoft NAP)
+ * is enabled. */
++ enum { NO_AUTH, FOR_INITIAL, ALWAYS } phase2_auth;
+ };
+
+
+@@ -114,6 +115,19 @@ static void eap_peap_parse_phase1(struct eap_peap_data *data,
+ wpa_printf(MSG_DEBUG, "EAP-PEAP: Require cryptobinding");
+ }
+
++ if (os_strstr(phase1, "phase2_auth=0")) {
++ data->phase2_auth = NO_AUTH;
++ wpa_printf(MSG_DEBUG,
++ "EAP-PEAP: Do not require Phase 2 authentication");
++ } else if (os_strstr(phase1, "phase2_auth=1")) {
++ data->phase2_auth = FOR_INITIAL;
++ wpa_printf(MSG_DEBUG,
++ "EAP-PEAP: Require Phase 2 authentication for initial connection");
++ } else if (os_strstr(phase1, "phase2_auth=2")) {
++ data->phase2_auth = ALWAYS;
++ wpa_printf(MSG_DEBUG,
++ "EAP-PEAP: Require Phase 2 authentication for all cases");
++ }
+ #ifdef EAP_TNC
+ if (os_strstr(phase1, "tnc=soh2")) {
+ data->soh = 2;
+@@ -142,6 +156,7 @@ static void * eap_peap_init(struct eap_sm *sm)
+ data->force_peap_version = -1;
+ data->peap_outer_success = 2;
+ data->crypto_binding = OPTIONAL_BINDING;
++ data->phase2_auth = FOR_INITIAL;
+
+ if (config && config->phase1)
+ eap_peap_parse_phase1(data, config->phase1);
+@@ -454,6 +469,20 @@ static int eap_tlv_validate_cryptobinding(struct eap_sm *sm,
+ }
+
+
++static bool peap_phase2_sufficient(struct eap_sm *sm,
++ struct eap_peap_data *data)
++{
++ if ((data->phase2_auth == ALWAYS ||
++ (data->phase2_auth == FOR_INITIAL &&
++ !tls_connection_resumed(sm->ssl_ctx, data->ssl.conn) &&
++ !data->ssl.client_cert_conf) ||
++ data->phase2_eap_started) &&
++ !data->phase2_eap_success)
++ return false;
++ return true;
++}
++
++
+ /**
+ * eap_tlv_process - Process a received EAP-TLV message and generate a response
+ * @sm: Pointer to EAP state machine allocated with eap_peer_sm_init()
+@@ -568,6 +597,11 @@ static int eap_tlv_process(struct eap_sm *sm, struct eap_peap_data *data,
+ " - force failed Phase 2");
+ resp_status = EAP_TLV_RESULT_FAILURE;
+ ret->decision = DECISION_FAIL;
++ } else if (!peap_phase2_sufficient(sm, data)) {
++ wpa_printf(MSG_INFO,
++ "EAP-PEAP: Server indicated Phase 2 success, but sufficient Phase 2 authentication has not been completed");
++ resp_status = EAP_TLV_RESULT_FAILURE;
++ ret->decision = DECISION_FAIL;
+ } else {
+ resp_status = EAP_TLV_RESULT_SUCCESS;
+ ret->decision = DECISION_UNCOND_SUCC;
+@@ -887,8 +921,7 @@ continue_req:
+ /* EAP-Success within TLS tunnel is used to indicate
+ * shutdown of the TLS channel. The authentication has
+ * been completed. */
+- if (data->phase2_eap_started &&
+- !data->phase2_eap_success) {
++ if (!peap_phase2_sufficient(sm, data)) {
+ wpa_printf(MSG_DEBUG, "EAP-PEAP: Phase 2 "
+ "Success used to indicate success, "
+ "but Phase 2 EAP was not yet "
+@@ -1199,8 +1232,9 @@ static struct wpabuf * eap_peap_process(struct eap_sm *sm, void *priv,
+ static bool eap_peap_has_reauth_data(struct eap_sm *sm, void *priv)
+ {
+ struct eap_peap_data *data = priv;
++
+ return tls_connection_established(sm->ssl_ctx, data->ssl.conn) &&
+- data->phase2_success;
++ data->phase2_success && data->phase2_auth != ALWAYS;
+ }
+
+
+diff --git a/src/eap_peer/eap_tls_common.c b/src/eap_peer/eap_tls_common.c
+index c1837db..a53eeb1 100644
+--- a/src/eap_peer/eap_tls_common.c
++++ b/src/eap_peer/eap_tls_common.c
+@@ -239,6 +239,12 @@ static int eap_tls_params_from_conf(struct eap_sm *sm,
+
+ sm->ext_cert_check = !!(params->flags & TLS_CONN_EXT_CERT_CHECK);
+
++ if (!phase2)
++ data->client_cert_conf = params->client_cert ||
++ params->client_cert_blob ||
++ params->private_key ||
++ params->private_key_blob;
++
+ return 0;
+ }
+
+diff --git a/src/eap_peer/eap_tls_common.h b/src/eap_peer/eap_tls_common.h
+index 9ac0012..3348634 100644
+--- a/src/eap_peer/eap_tls_common.h
++++ b/src/eap_peer/eap_tls_common.h
+@@ -79,6 +79,11 @@ struct eap_ssl_data {
+ * tls_v13 - Whether TLS v1.3 or newer is used
+ */
+ int tls_v13;
++
++ /**
++ * client_cert_conf: Whether client certificate has been configured
++ */
++ bool client_cert_conf;
+ };
+
+
+diff --git a/wpa_supplicant/wpa_supplicant.conf b/wpa_supplicant/wpa_supplicant.conf
+index 6619d6b..d63f73c 100644
+--- a/wpa_supplicant/wpa_supplicant.conf
++++ b/wpa_supplicant/wpa_supplicant.conf
+@@ -1321,6 +1321,13 @@ fast_reauth=1
+ # * 0 = do not use cryptobinding (default)
+ # * 1 = use cryptobinding if server supports it
+ # * 2 = require cryptobinding
++# 'phase2_auth' option can be used to control Phase 2 (i.e., within TLS
++# tunnel) behavior for PEAP:
++# * 0 = do not require Phase 2 authentication
++# * 1 = require Phase 2 authentication when client certificate
++# (private_key/client_cert) is no used and TLS session resumption was
++# not used (default)
++# * 2 = require Phase 2 authentication in all cases
+ # EAP-WSC (WPS) uses following options: pin=<Device Password> or
+ # pbc=1.
+ #
diff --git a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.10.bb b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.10.bb
index 4e27b92bc6..22028ce957 100644
--- a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.10.bb
+++ b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.10.bb
@@ -18,6 +18,7 @@ SRC_URI = "http://w1.fi/releases/wpa_supplicant-${PV}.tar.gz \
file://0001-build-Re-enable-options-for-libwpa_client.so-and-wpa.patch \
file://0002-Fix-removal-of-wpa_passphrase-on-make-clean.patch \
file://0001-Install-wpa_passphrase-when-not-disabled.patch \
+ file://0001-PEAP-client-Update-Phase-2-authentication-requiremen.patch \
"
SRC_URI[sha256sum] = "20df7ae5154b3830355f8ab4269123a87affdea59fe74fe9292a91d0d7e17b2f"
@@ -134,4 +135,4 @@ python split_wpa_supplicant_libs () {
d.setVar('RRECOMMENDS:' + pn + '-plugins', ' '.join(split_packages))
d.appendVar('RRECOMMENDS:' + pn + '-dbg', ' ' + ' '.join(split_dbg_packages))
}
-PACKAGESPLITFUNCS:prepend = "split_wpa_supplicant_libs "
+PACKAGESPLITFUNCS += "split_wpa_supplicant_libs"
diff --git a/meta/recipes-core/base-files/base-files/0001-add-nss-resolve-to-nsswitch.patch b/meta/recipes-core/base-files/base-files/0001-add-nss-resolve-to-nsswitch.patch
new file mode 100644
index 0000000000..a6e39e0956
--- /dev/null
+++ b/meta/recipes-core/base-files/base-files/0001-add-nss-resolve-to-nsswitch.patch
@@ -0,0 +1,31 @@
+From 830abe652428d9d31780c3ace121635ad7b64274 Mon Sep 17 00:00:00 2001
+From: Eero Aaltonen <eero.aaltonen@vaisala.com>
+Date: Wed Sep 27 15:50:48 2023 +0300
+Subject: [PATCH] Add nss-resolve to the Name Service Switch (NSS)
+
+Add `nss-resolve` so that `systemd-resolved` is used for name
+resolution with glibc `gethostbyname` calls.
+
+Upstream-Status: Inappropriate [no upstream, configuration].
+
+Signed-off-by: Eero Aaltonen <eero.aaltonen@vaisala.com>
+---
+ nsswitch.conf | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/nsswitch.conf b/nsswitch.conf
+index 06f03d2..34b165c 100644
+--- a/nsswitch.conf
++++ b/nsswitch.conf
+@@ -8,7 +8,7 @@ passwd: compat
+ group: compat
+ shadow: compat
+
+-hosts: files dns
++hosts: resolve [!UNAVAIL=return] files dns
+ networks: files
+
+ protocols: db files
+--
+2.25.1
+
diff --git a/meta/recipes-core/base-files/base-files/hosts b/meta/recipes-core/base-files/base-files/hosts
index b94f414d5c..10a5b6c704 100644
--- a/meta/recipes-core/base-files/base-files/hosts
+++ b/meta/recipes-core/base-files/base-files/hosts
@@ -1,4 +1,4 @@
-127.0.0.1 localhost.localdomain localhost
+127.0.0.1 localhost
# The following lines are desirable for IPv6 capable hosts
::1 localhost ip6-localhost ip6-loopback
diff --git a/meta/recipes-core/base-files/base-files/profile b/meta/recipes-core/base-files/base-files/profile
index cc37e1ba77..bded3757cc 100644
--- a/meta/recipes-core/base-files/base-files/profile
+++ b/meta/recipes-core/base-files/base-files/profile
@@ -10,6 +10,12 @@ PATH="/usr/local/bin:/usr/bin:/bin"
# Set the prompt for bash and ash (no other shells known to be in use here)
[ -z "$PS1" ] || PS1='\u@\h:\w\$ '
+# Use the EDITOR not being set as a trigger to call resize later on
+FIRSTTIMESETUP=0
+if [ -z "$EDITOR" ] ; then
+ FIRSTTIMESETUP=1
+fi
+
if [ -d /etc/profile.d ]; then
for i in /etc/profile.d/*.sh; do
if [ -f $i -a -r $i ]; then
@@ -50,17 +56,19 @@ resize() {
}
fi
fi
- # Use the EDITOR not being set as a trigger to call resize
- # and only do this for /dev/tty[A-z] which are typically
+ # only do this for /dev/tty[A-z] which are typically
# serial ports
- if [ -z "$EDITOR" -a "$SHLVL" = 1 ] ; then
+ if [ $FIRSTTIMESETUP -eq 1 -a $SHLVL -eq 1 ] ; then
case $(tty 2>/dev/null) in
/dev/tty[A-z]*) resize >/dev/null;;
esac
fi
fi
-EDITOR="vi" # needed for packages like cron, git-commit
+if [ -z "$EDITOR" ]; then
+ EDITOR="vi" # needed for packages like cron, git-commit
+fi
+
export PATH PS1 OPIEDIR QPEDIR QTDIR EDITOR TERM
umask 022
diff --git a/meta/recipes-core/base-files/base-files_3.0.14.bb b/meta/recipes-core/base-files/base-files_3.0.14.bb
index ff83567bd9..9fab53ce63 100644
--- a/meta/recipes-core/base-files/base-files_3.0.14.bb
+++ b/meta/recipes-core/base-files/base-files_3.0.14.bb
@@ -1,7 +1,6 @@
SUMMARY = "Miscellaneous files for the base system"
DESCRIPTION = "The base-files package creates the basic system directory structure and provides a small set of key configuration files for the system."
SECTION = "base"
-PR = "r89"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://licenses/GPL-2;md5=94d55d512a9ba36caa9b7df079bae19f"
# Removed all license related tasks in this recipe as license.bbclass
@@ -24,6 +23,8 @@ SRC_URI = "file://rotation \
file://share/dot.profile \
file://licenses/GPL-2 \
"
+SRC_URI:append:libc-glibc = "${@bb.utils.contains('DISTRO_FEATURES', 'systemd systemd-resolved', ' file://0001-add-nss-resolve-to-nsswitch.patch', '', d)}"
+
S = "${WORKDIR}"
INHIBIT_DEFAULT_DEPS = "1"
@@ -137,6 +138,10 @@ do_install () {
echo ${hostname} > ${D}${sysconfdir}/hostname
echo "127.0.1.1 ${hostname}" >> ${D}${sysconfdir}/hosts
fi
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'false', 'true', d)}; then
+ sed -i '/^::1/s/ localhost//' ${D}${sysconfdir}/hosts
+ fi
}
do_install:append:libc-glibc () {
diff --git a/meta/recipes-core/base-passwd/base-passwd/0001-base-passwd-Add-the-sgx-group.patch b/meta/recipes-core/base-passwd/base-passwd/0001-base-passwd-Add-the-sgx-group.patch
new file mode 100644
index 0000000000..e1340e1b70
--- /dev/null
+++ b/meta/recipes-core/base-passwd/base-passwd/0001-base-passwd-Add-the-sgx-group.patch
@@ -0,0 +1,30 @@
+From 9e57771d138ac423d5139b984b8c869122ce4976 Mon Sep 17 00:00:00 2001
+From: Alex Kiernan <alexk@zuma.ai>
+Date: Fri, 28 Jul 2023 10:28:57 +0100
+Subject: [PATCH] base-passwd: Add the sgx group
+
+To avoid errors from eudev/udev we need an sgx group, but if we add it
+via groupadd that causes shadow login to be brought into an image, which
+causes images which have CONFIG_MULTIUSER unset to fail with `setgid:
+Function not implemented` as shadow's login doesn't implement the
+heuristics which busybox has to handle this kernel configuration.
+
+Upstream-Status: Inappropriate [oe-specific]
+
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+---
+ group.master | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/group.master b/group.master
+index d34d2b832d43..e54fd1d2c6dc 100644
+--- a/group.master
++++ b/group.master
+@@ -34,6 +34,7 @@ video:*:44:
+ sasl:*:45:
+ plugdev:*:46:
+ kvm:*:47:
++sgx:*:48:
+ staff:*:50:
+ games:*:60:
+ shutdown:*:70:
diff --git a/meta/recipes-core/base-passwd/base-passwd/0006-Make-it-possible-to-build-without-debconf-support.patch b/meta/recipes-core/base-passwd/base-passwd/0006-Make-it-possible-to-build-without-debconf-support.patch
deleted file mode 100644
index 6e236993f5..0000000000
--- a/meta/recipes-core/base-passwd/base-passwd/0006-Make-it-possible-to-build-without-debconf-support.patch
+++ /dev/null
@@ -1,129 +0,0 @@
-From 236d6c8c0dd7e15d9a9795813b94bc87ce09eec5 Mon Sep 17 00:00:00 2001
-From: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
-Date: Fri, 29 Apr 2022 19:32:29 +0200
-Subject: [PATCH] Make it possible to build without debconf support
-
-Not all systems have the debconfclient library available.
-
-Upstream-Status: Backport [https://salsa.debian.org/debian/base-passwd/-/commit/c72aa5dd25a952da25e307761f4526db2c8c39ec]
-Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
----
- Makefile.am | 1 -
- configure.ac | 13 +++++++++++++
- update-passwd.c | 15 +++++++++++++++
- 3 files changed, 28 insertions(+), 1 deletion(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index 223916f..4bdd769 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -3,7 +3,6 @@ SUBDIRS = doc man
- sbin_PROGRAMS = update-passwd
-
- update_passwd_SOURCES = update-passwd.c
--update_passwd_LDADD = -ldebconfclient
-
- pkgdata_DATA = passwd.master group.master
-
-diff --git a/configure.ac b/configure.ac
-index 9d1ace5..1e35ad1 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -14,6 +14,19 @@ AC_SYS_LARGEFILE
- dnl Scan for things we need
- AC_CHECK_FUNCS([putgrent])
-
-+dnl Check for debconf
-+AC_MSG_CHECKING([whether to enable debconf support])
-+AC_ARG_ENABLE([debconf],
-+ [AS_HELP_STRING([--disable-debconf], [disable support for debconf])],
-+ [],
-+ [enable_debconf=yes])
-+AC_MSG_RESULT($enable_debconf)
-+AS_IF([test "x$enable_debconf" != xno],
-+ [AC_CHECK_LIB([debconfclient], [debconfclient_new], [],
-+ [AC_MSG_ERROR(
-+ [debconf support not available (use --disable-debconf to disable)])])
-+ AC_DEFINE([HAVE_DEBCONF], [1], [Define if you have libdebconfclient])])
-+
- dnl Finally output everything
- AC_CONFIG_FILES([Makefile doc/Makefile man/Makefile])
- AC_OUTPUT
-diff --git a/update-passwd.c b/update-passwd.c
-index 3f3dffa..5b49740 100644
---- a/update-passwd.c
-+++ b/update-passwd.c
-@@ -39,7 +39,9 @@
- #include <stdarg.h>
- #include <ctype.h>
-
-+#ifdef HAVE_DEBCONF
- #include <cdebconf/debconfclient.h>
-+#endif
-
- #define DEFAULT_PASSWD_MASTER "/usr/share/base-passwd/passwd.master"
- #define DEFAULT_GROUP_MASTER "/usr/share/base-passwd/group.master"
-@@ -143,6 +145,7 @@ int flag_debconf = 0;
- const char* user_domain = DEFAULT_DEBCONF_DOMAIN;
- const char* group_domain = DEFAULT_DEBCONF_DOMAIN;
-
-+#ifdef HAVE_DEBCONF
- struct debconfclient* debconf = NULL;
-
- /* Abort the program if talking to debconf fails. Use ret exactly once. */
-@@ -162,6 +165,10 @@ struct debconfclient* debconf = NULL;
- DEBCONF_CHECK(debconf_register(debconf, (template), (question)))
- #define DEBCONF_SUBST(question, var, value) \
- DEBCONF_CHECK(debconf_subst(debconf, (question), (var), (value)))
-+#else
-+#define DEBCONF_REGISTER(template, question)
-+#define DEBCONF_SUBST(question, var, value)
-+#endif
-
-
- /* malloc() with out-of-memory checking.
-@@ -621,6 +628,7 @@ void version() {
- * flag. Aborts the problem on any failure.
- */
- int ask_debconf(const char* priority, const char* question) {
-+#ifdef HAVE_DEBCONF
- int ret;
- const char* response;
-
-@@ -640,6 +648,9 @@ int ask_debconf(const char* priority, const char* question) {
- return 1;
- else
- return 0;
-+#else
-+ return 0;
-+#endif
- }
-
-
-@@ -1427,6 +1438,7 @@ int main(int argc, char** argv) {
- /* If DEBIAN_HAS_FRONTEND is set in the environment, we're running under
- * debconf. Enable debconf prompting unless --dry-run was also given.
- */
-+#ifdef HAVE_DEBCONF
- if (getenv("DEBIAN_HAS_FRONTEND")!=NULL && !opt_dryrun) {
- debconf=debconfclient_new();
- if (debconf==NULL) {
-@@ -1435,6 +1447,7 @@ int main(int argc, char** argv) {
- }
- flag_debconf=1;
- }
-+#endif
-
- if (read_passwd(&master_accounts, master_passwd)!=0)
- return 2;
-@@ -1480,8 +1493,10 @@ int main(int argc, char** argv) {
- if (!unlock_files())
- return 5;
-
-+#ifdef HAVE_DEBCONF
- if (debconf!=NULL)
- debconfclient_delete(debconf);
-+#endif
-
- if (opt_dryrun)
- return flag_dirty;
diff --git a/meta/recipes-core/base-passwd/base-passwd/0007-Add-wheel-group.patch b/meta/recipes-core/base-passwd/base-passwd/0007-Add-wheel-group.patch
new file mode 100644
index 0000000000..d77122789d
--- /dev/null
+++ b/meta/recipes-core/base-passwd/base-passwd/0007-Add-wheel-group.patch
@@ -0,0 +1,20 @@
+
+We need to have a wheel group which has some system privileges to consult the
+systemd journal or manage printers with cups.
+
+Upstream says the group does not exist by default.
+
+Upstream-Status: Inappropriate [enable feature]
+
+Signed-off-by: Louis Rannou <lrannou@baylibre.com>
+Index: base-passwd-3.5.26/group.master
+===================================================================
+--- base-passwd-3.5.29.orig/group.master
++++ base-passwd-3.5.29/group.master
+@@ -38,5 +38,6 @@
+ staff:*:50:
+ games:*:60:
+ shutdown:*:70:
++wheel:*:80:
+ users:*:100:
+ nogroup:*:65534:
diff --git a/meta/recipes-core/base-passwd/base-passwd/0007-Make-it-possible-to-disable-the-generation-of-the-do.patch b/meta/recipes-core/base-passwd/base-passwd/0007-Make-it-possible-to-disable-the-generation-of-the-do.patch
deleted file mode 100644
index 5c63599143..0000000000
--- a/meta/recipes-core/base-passwd/base-passwd/0007-Make-it-possible-to-disable-the-generation-of-the-do.patch
+++ /dev/null
@@ -1,46 +0,0 @@
-From 63e8270141a296843cfe1daba38e1969ac6d75ae Mon Sep 17 00:00:00 2001
-From: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
-Date: Sat, 30 Apr 2022 00:35:34 +0200
-Subject: [PATCH] Make it possible to disable the generation of the
- documentation
-
-Not all systems have docbook and po4a available.
-
-Upstream-Status: Backport [https://salsa.debian.org/debian/base-passwd/-/commit/2a6d16e595c93084e279d0dcbef37d960b44fd1a]
-Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
----
- Makefile.am | 2 ++
- configure.ac | 9 +++++++++
- 2 files changed, 11 insertions(+)
-
-diff --git a/Makefile.am b/Makefile.am
-index 4bdd769..97b4f42 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -1,4 +1,6 @@
-+if ENABLE_DOCS
- SUBDIRS = doc man
-+endif
-
- sbin_PROGRAMS = update-passwd
-
-diff --git a/configure.ac b/configure.ac
-index 1e35ad1..b98374e 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -27,6 +27,15 @@ AS_IF([test "x$enable_debconf" != xno],
- [debconf support not available (use --disable-debconf to disable)])])
- AC_DEFINE([HAVE_DEBCONF], [1], [Define if you have libdebconfclient])])
-
-+dnl Check whether to build the documentation
-+AC_MSG_CHECKING([whether to build the documentation])
-+AC_ARG_ENABLE([docs],
-+ [AC_HELP_STRING([--disable-docs], [do not build and install documentation])],
-+ [],
-+ [enable_docs=yes])
-+AC_MSG_RESULT($enable_docs)
-+AM_CONDITIONAL(ENABLE_DOCS, test "x$enable_docs" = xyes)
-+
- dnl Finally output everything
- AC_CONFIG_FILES([Makefile doc/Makefile man/Makefile])
- AC_OUTPUT
diff --git a/meta/recipes-core/base-passwd/base-passwd_3.5.52.bb b/meta/recipes-core/base-passwd/base-passwd_3.5.52.bb
deleted file mode 100644
index f89752c077..0000000000
--- a/meta/recipes-core/base-passwd/base-passwd_3.5.52.bb
+++ /dev/null
@@ -1,122 +0,0 @@
-SUMMARY = "Base system master password/group files"
-DESCRIPTION = "The master copies of the user database files (/etc/passwd and /etc/group). The update-passwd tool is also provided to keep the system databases synchronized with these master files."
-HOMEPAGE = "https://launchpad.net/base-passwd"
-SECTION = "base"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a"
-
-SRC_URI = "https://launchpad.net/debian/+archive/primary/+files/${BPN}_${PV}.tar.xz \
- file://0001-Add-a-shutdown-group.patch \
- file://0002-Use-bin-sh-instead-of-bin-bash-for-the-root-user.patch \
- file://0003-Remove-for-root-since-we-do-not-have-an-etc-shadow.patch \
- file://0004-Add-an-input-group-for-the-dev-input-devices.patch \
- file://0005-Add-kvm-group.patch \
- file://0006-Make-it-possible-to-build-without-debconf-support.patch \
- file://0007-Make-it-possible-to-disable-the-generation-of-the-do.patch \
- "
-
-SRC_URI[sha256sum] = "5dfec6556b5a16ecf14dd3f7c95b591d929270289268123f31a3d6317f95ccea"
-
-# the package is taken from launchpad; that source is static and goes stale
-# so we check the latest upstream from a directory that does get updated
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/b/base-passwd/"
-
-S = "${WORKDIR}/work"
-
-inherit autotools
-
-EXTRA_OECONF += "--disable-debconf --disable-docs"
-
-NOLOGIN ?= "${base_sbindir}/nologin"
-
-do_install () {
- install -d -m 755 ${D}${sbindir}
- install -o root -g root -p -m 755 ${B}/update-passwd ${D}${sbindir}/
- install -d -m 755 ${D}${mandir}/man8 ${D}${mandir}/pl/man8
- install -p -m 644 ${S}/man/update-passwd.8 ${D}${mandir}/man8/
- install -p -m 644 ${S}/man/update-passwd.pl.8 \
- ${D}${mandir}/pl/man8/update-passwd.8
- gzip -9 ${D}${mandir}/man8/* ${D}${mandir}/pl/man8/*
- install -d -m 755 ${D}${datadir}/base-passwd
- install -o root -g root -p -m 644 ${S}/passwd.master ${D}${datadir}/base-passwd/
- sed -i 's#:/root:#:${ROOT_HOME}:#' ${D}${datadir}/base-passwd/passwd.master
- sed -i 's#/usr/sbin/nologin#${NOLOGIN}#' ${D}${datadir}/base-passwd/passwd.master
- install -o root -g root -p -m 644 ${S}/group.master ${D}${datadir}/base-passwd/
-
- install -d -m 755 ${D}${docdir}/${BPN}
- install -p -m 644 ${S}/debian/changelog ${D}${docdir}/${BPN}/
- gzip -9 ${D}${docdir}/${BPN}/*
- install -p -m 644 ${S}/README ${D}${docdir}/${BPN}/
- install -p -m 644 ${S}/debian/copyright ${D}${docdir}/${BPN}/
-}
-
-basepasswd_sysroot_postinst() {
-#!/bin/sh
-
-# Install passwd.master and group.master to sysconfdir
-install -d -m 755 ${STAGING_DIR_TARGET}${sysconfdir}
-for i in passwd group; do
- install -p -m 644 ${STAGING_DIR_TARGET}${datadir}/base-passwd/\$i.master \
- ${STAGING_DIR_TARGET}${sysconfdir}/\$i
-done
-
-# Run any useradd postinsts
-for script in ${STAGING_DIR_TARGET}${bindir}/postinst-useradd-*; do
- if [ -f \$script ]; then
- \$script
- fi
-done
-}
-
-SYSROOT_DIRS += "${sysconfdir}"
-SYSROOT_PREPROCESS_FUNCS += "base_passwd_tweaksysroot"
-
-base_passwd_tweaksysroot () {
- mkdir -p ${SYSROOT_DESTDIR}${bindir}
- dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}
- echo "${basepasswd_sysroot_postinst}" > $dest
- chmod 0755 $dest
-}
-
-python populate_packages:prepend() {
- # Add in the preinst function for ${PN}
- # We have to do this here as prior to this, passwd/group.master
- # would be unavailable. We need to create these files at preinst
- # time before the files from the package may be available, hence
- # storing the data from the files in the preinst directly.
-
- f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
- passwd = "".join(f.readlines())
- f.close()
- f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
- group = "".join(f.readlines())
- f.close()
-
- preinst = """#!/bin/sh
-mkdir -p $D${sysconfdir}
-if [ ! -e $D${sysconfdir}/passwd ]; then
-\tcat << 'EOF' > $D${sysconfdir}/passwd
-""" + passwd + """EOF
-fi
-if [ ! -e $D${sysconfdir}/group ]; then
-\tcat << 'EOF' > $D${sysconfdir}/group
-""" + group + """EOF
-fi
-"""
- d.setVar(d.expand('pkg_preinst:${PN}'), preinst)
-}
-
-addtask do_package after do_populate_sysroot
-
-ALLOW_EMPTY:${PN} = "1"
-
-PACKAGES =+ "${PN}-update"
-FILES:${PN}-update = "${sbindir}/* ${datadir}/${PN}"
-
-pkg_postinst:${PN}-update () {
-#!/bin/sh
-if [ -n "$D" ]; then
- exit 0
-fi
-${sbindir}/update-passwd
-}
diff --git a/meta/recipes-core/base-passwd/base-passwd_3.6.3.bb b/meta/recipes-core/base-passwd/base-passwd_3.6.3.bb
new file mode 100644
index 0000000000..bf50b01fd5
--- /dev/null
+++ b/meta/recipes-core/base-passwd/base-passwd_3.6.3.bb
@@ -0,0 +1,125 @@
+SUMMARY = "Base system master password/group files"
+DESCRIPTION = "The master copies of the user database files (/etc/passwd and /etc/group). The update-passwd tool is also provided to keep the system databases synchronized with these master files."
+HOMEPAGE = "https://launchpad.net/base-passwd"
+SECTION = "base"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a"
+
+SRC_URI = "https://launchpad.net/debian/+archive/primary/+files/${BPN}_${PV}.tar.xz \
+ file://0001-Add-a-shutdown-group.patch \
+ file://0002-Use-bin-sh-instead-of-bin-bash-for-the-root-user.patch \
+ file://0003-Remove-for-root-since-we-do-not-have-an-etc-shadow.patch \
+ file://0004-Add-an-input-group-for-the-dev-input-devices.patch \
+ file://0005-Add-kvm-group.patch \
+ file://0007-Add-wheel-group.patch \
+ file://0001-base-passwd-Add-the-sgx-group.patch \
+ "
+
+SRC_URI[sha256sum] = "83575327d8318a419caf2d543341215c046044073d1afec2acc0ac4d8095ff39"
+
+# the package is taken from launchpad; that source is static and goes stale
+# so we check the latest upstream from a directory that does get updated
+UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/b/base-passwd/"
+
+S = "${WORKDIR}/work"
+
+PACKAGECONFIG = "${@bb.utils.filter('DISTRO_FEATURES', 'selinux', d)}"
+PACKAGECONFIG[selinux] = "--enable-selinux, --disable-selinux, libselinux"
+
+inherit autotools
+
+EXTRA_OECONF += "--disable-debconf --disable-docs"
+
+NOLOGIN ?= "${base_sbindir}/nologin"
+
+do_install () {
+ install -d -m 755 ${D}${sbindir}
+ install -o root -g root -p -m 755 ${B}/update-passwd ${D}${sbindir}/
+ install -d -m 755 ${D}${mandir}/man8 ${D}${mandir}/pl/man8
+ install -p -m 644 ${S}/man/update-passwd.8 ${D}${mandir}/man8/
+ install -p -m 644 ${S}/man/update-passwd.pl.8 \
+ ${D}${mandir}/pl/man8/update-passwd.8
+ gzip -9 ${D}${mandir}/man8/* ${D}${mandir}/pl/man8/*
+ install -d -m 755 ${D}${datadir}/base-passwd
+ install -o root -g root -p -m 644 ${S}/passwd.master ${D}${datadir}/base-passwd/
+ sed -i 's#:/root:#:${ROOT_HOME}:#' ${D}${datadir}/base-passwd/passwd.master
+ sed -i 's#/usr/sbin/nologin#${NOLOGIN}#' ${D}${datadir}/base-passwd/passwd.master
+ install -o root -g root -p -m 644 ${S}/group.master ${D}${datadir}/base-passwd/
+
+ install -d -m 755 ${D}${docdir}/${BPN}
+ install -p -m 644 ${S}/debian/changelog ${D}${docdir}/${BPN}/
+ gzip -9 ${D}${docdir}/${BPN}/*
+ install -p -m 644 ${S}/README ${D}${docdir}/${BPN}/
+ install -p -m 644 ${S}/debian/copyright ${D}${docdir}/${BPN}/
+}
+
+basepasswd_sysroot_postinst() {
+#!/bin/sh -e
+
+# Install passwd.master and group.master to sysconfdir
+install -d -m 755 ${STAGING_DIR_TARGET}${sysconfdir}
+for i in passwd group; do
+ install -p -m 644 ${STAGING_DIR_TARGET}${datadir}/base-passwd/\$i.master \
+ ${STAGING_DIR_TARGET}${sysconfdir}/\$i
+done
+
+# Run any useradd postinsts
+for script in ${STAGING_DIR_TARGET}${bindir}/postinst-useradd-*; do
+ if [ -f \$script ]; then
+ \$script
+ fi
+done
+}
+
+SYSROOT_DIRS += "${sysconfdir}"
+SYSROOT_PREPROCESS_FUNCS += "base_passwd_tweaksysroot"
+
+base_passwd_tweaksysroot () {
+ mkdir -p ${SYSROOT_DESTDIR}${bindir}
+ dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}
+ echo "${basepasswd_sysroot_postinst}" > $dest
+ chmod 0755 $dest
+}
+
+python populate_packages:prepend() {
+ # Add in the preinst function for ${PN}
+ # We have to do this here as prior to this, passwd/group.master
+ # would be unavailable. We need to create these files at preinst
+ # time before the files from the package may be available, hence
+ # storing the data from the files in the preinst directly.
+
+ f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
+ passwd = "".join(f.readlines())
+ f.close()
+ f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
+ group = "".join(f.readlines())
+ f.close()
+
+ preinst = """#!/bin/sh
+mkdir -p $D${sysconfdir}
+if [ ! -e $D${sysconfdir}/passwd ]; then
+\tcat << 'EOF' > $D${sysconfdir}/passwd
+""" + passwd + """EOF
+fi
+if [ ! -e $D${sysconfdir}/group ]; then
+\tcat << 'EOF' > $D${sysconfdir}/group
+""" + group + """EOF
+fi
+"""
+ d.setVar(d.expand('pkg_preinst:${PN}'), preinst)
+}
+
+addtask do_package after do_populate_sysroot
+
+ALLOW_EMPTY:${PN} = "1"
+
+PACKAGES =+ "${PN}-update"
+FILES:${PN}-update = "${sbindir}/* ${datadir}/${PN}"
+
+pkg_postinst:${PN}-update () {
+#!/bin/sh
+if [ -n "$D" ]; then
+ exit 0
+fi
+${sbindir}/update-passwd
+}
diff --git a/meta/recipes-core/busybox/busybox-inittab_1.35.0.bb b/meta/recipes-core/busybox/busybox-inittab_1.35.0.bb
deleted file mode 100644
index 868d7a230f..0000000000
--- a/meta/recipes-core/busybox/busybox-inittab_1.35.0.bb
+++ /dev/null
@@ -1,85 +0,0 @@
-SUMMARY = "inittab configuration for BusyBox"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-
-SRC_URI = "file://inittab"
-
-S = "${WORKDIR}"
-
-INHIBIT_DEFAULT_DEPS = "1"
-
-do_compile() {
- :
-}
-
-do_install() {
- install -d ${D}${sysconfdir}
- install -D -m 0644 ${WORKDIR}/inittab ${D}${sysconfdir}/inittab
- tmp="${SERIAL_CONSOLES}"
- [ -n "$tmp" ] && echo >> ${D}${sysconfdir}/inittab
- for i in $tmp
- do
- j=`echo ${i} | sed s/\;/\ /g`
- id=`echo ${i} | sed -e 's/^.*;//' -e 's/;.*//'`
- echo "$id::respawn:${base_sbindir}/getty ${j}" >> ${D}${sysconfdir}/inittab
- done
- if [ "${USE_VT}" = "1" ]; then
- cat <<EOF >>${D}${sysconfdir}/inittab
-# ${base_sbindir}/getty invocations for the runlevels.
-#
-# The "id" field MUST be the same as the last
-# characters of the device (after "tty").
-#
-# Format:
-# <id>:<runlevels>:<action>:<process>
-#
-
-EOF
-
- for n in ${SYSVINIT_ENABLED_GETTYS}
- do
- echo "tty$n:12345:respawn:${base_sbindir}/getty 38400 tty$n" >> ${D}${sysconfdir}/inittab
- done
- echo "" >> ${D}${sysconfdir}/inittab
- fi
-
-}
-
-pkg_postinst:${PN} () {
-# run this on host and on target
-if [ "${SERIAL_CONSOLES_CHECK}" = "" ]; then
- exit 0
-fi
-}
-
-pkg_postinst_ontarget:${PN} () {
-# run this on the target
-if [ -e /proc/consoles ]; then
- tmp="${SERIAL_CONSOLES_CHECK}"
- for i in $tmp
- do
- j=`echo ${i} | sed -e s/^.*\;//g -e s/\:.*//g`
- k=`echo ${i} | sed s/^.*\://g`
- if [ -z "`grep ${j} /proc/consoles`" ]; then
- if [ -z "${k}" ] || [ -z "`grep ${k} /proc/consoles`" ] || [ ! -e /dev/${j} ]; then
- sed -i -e /^.*${j}\ /d -e /^.*${j}$/d /etc/inittab
- fi
- fi
- done
- kill -HUP 1
-else
- exit 1
-fi
-}
-
-# SERIAL_CONSOLES is generally defined by the MACHINE .conf.
-# Set PACKAGE_ARCH appropriately.
-PACKAGE_ARCH = "${MACHINE_ARCH}"
-
-FILES:${PN} = "${sysconfdir}/inittab"
-CONFFILES:${PN} = "${sysconfdir}/inittab"
-
-RCONFLICTS:${PN} = "sysvinit-inittab"
-
-USE_VT ?= "1"
-SYSVINIT_ENABLED_GETTYS ?= "1"
diff --git a/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb b/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb
new file mode 100644
index 0000000000..6904a91930
--- /dev/null
+++ b/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb
@@ -0,0 +1,63 @@
+SUMMARY = "inittab configuration for BusyBox"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
+
+SRC_URI = "file://inittab"
+
+S = "${WORKDIR}"
+
+INHIBIT_DEFAULT_DEPS = "1"
+
+do_compile() {
+ :
+}
+
+do_install() {
+ install -d ${D}${sysconfdir}
+ install -D -m 0644 ${WORKDIR}/inittab ${D}${sysconfdir}/inittab
+
+ CONSOLES="${SERIAL_CONSOLES}"
+ for s in $CONSOLES
+ do
+ speed=$(echo $s | cut -d\; -f 1)
+ device=$(echo $s | cut -d\; -f 2)
+ label=$(echo $device | sed -e 's/tty//' | tail --bytes=5)
+
+ echo "$device::respawn:${sbindir}/ttyrun $device ${base_sbindir}/getty $speed $device" >> ${D}${sysconfdir}/inittab
+ done
+
+ if [ "${USE_VT}" = "1" ]; then
+ cat <<EOF >>${D}${sysconfdir}/inittab
+# ${base_sbindir}/getty invocations for the runlevels.
+#
+# The "id" field MUST be the same as the last
+# characters of the device (after "tty").
+#
+# Format:
+# <id>:<runlevels>:<action>:<process>
+#
+
+EOF
+
+ for n in ${SYSVINIT_ENABLED_GETTYS}
+ do
+ echo "tty$n:12345:respawn:${base_sbindir}/getty 38400 tty$n" >> ${D}${sysconfdir}/inittab
+ done
+ echo "" >> ${D}${sysconfdir}/inittab
+ fi
+
+}
+
+
+# SERIAL_CONSOLES is generally defined by the MACHINE .conf.
+# Set PACKAGE_ARCH appropriately.
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+FILES:${PN} = "${sysconfdir}/inittab"
+CONFFILES:${PN} = "${sysconfdir}/inittab"
+
+RDEPENDS:${PN} = "ttyrun"
+RCONFLICTS:${PN} = "sysvinit-inittab"
+
+USE_VT ?= "1"
+SYSVINIT_ENABLED_GETTYS ?= "1"
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc
index 5f1c473d5e..f5d7c3f9c8 100644
--- a/meta/recipes-core/busybox/busybox.inc
+++ b/meta/recipes-core/busybox/busybox.inc
@@ -34,6 +34,7 @@ INITSCRIPT_PACKAGES = "${PN}-httpd ${PN}-syslog ${PN}-udhcpd ${PN}-mdev ${PN}-hw
INITSCRIPT_NAME:${PN}-httpd = "busybox-httpd"
INITSCRIPT_NAME:${PN}-hwclock = "hwclock.sh"
+INITSCRIPT_PARAMS:${PN}-hwclock = "start 40 S . stop 20 0 1 6 ."
INITSCRIPT_NAME:${PN}-mdev = "mdev"
INITSCRIPT_PARAMS:${PN}-mdev = "start 04 S ."
INITSCRIPT_NAME:${PN}-syslog = "syslog"
@@ -138,19 +139,26 @@ do_configure () {
do_prepare_config
merge_config.sh -m .config ${@" ".join(find_cfgs(d))}
cml1_do_configure
+
+ # Save a copy of .config and autoconf.h.
+ cp .config .config.orig
+ cp include/autoconf.h include/autoconf.h.orig
}
do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
export KCONFIG_NOTIMESTAMP=1
+ # Ensure we start do_compile with the original .config and autoconf.h.
+ # These files should always have matching timestamps.
+ cp .config.orig .config
+ cp include/autoconf.h.orig include/autoconf.h
+
if [ "${BUSYBOX_SPLIT_SUID}" = "1" -a x`grep "CONFIG_FEATURE_INDIVIDUAL=y" .config` = x ]; then
+ # Guard againt interrupted do_compile: clean temporary files.
+ rm -f .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
+
# split the .config into two parts, and make two busybox binaries
- if [ -e .config.orig ]; then
- # Need to guard again an interrupted do_compile - restore any backup
- cp .config.orig .config
- fi
- cp .config .config.orig
oe_runmake busybox.cfg.suid
oe_runmake busybox.cfg.nosuid
@@ -187,15 +195,18 @@ do_compile() {
bbfatal "busybox suid binary incorrectly provides /bin/sh"
fi
- # copy .config.orig back to .config, because the install process may check this file
- cp .config.orig .config
# cleanup
- rm .config.orig .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
+ rm .config.app.suid .config.app.nosuid .config.disable.apps .config.nonapps
else
oe_runmake busybox_unstripped
cp busybox_unstripped busybox
oe_runmake busybox.links
fi
+
+ # restore original .config and autoconf.h, because the install process
+ # may check these files
+ cp .config.orig .config
+ cp include/autoconf.h.orig include/autoconf.h
}
do_install () {
diff --git a/meta/recipes-core/busybox/busybox/0001-depmod-Ignore-.debug-directories.patch b/meta/recipes-core/busybox/busybox/0001-depmod-Ignore-.debug-directories.patch
index 354f83a4a5..d76118f85b 100644
--- a/meta/recipes-core/busybox/busybox/0001-depmod-Ignore-.debug-directories.patch
+++ b/meta/recipes-core/busybox/busybox/0001-depmod-Ignore-.debug-directories.patch
@@ -21,7 +21,7 @@ index bb42bbe..aa5a2de 100644
/* Arbitrary. Was sb->st_size, but that breaks .gz etc */
size_t len = (64*1024*1024 - 4096);
-+ if (strstr(fname, ".debug") == NULL)
++ if (strstr(fname, ".debug") != NULL)
+ return TRUE;
+
if (strrstr(fname, ".ko") == NULL)
diff --git a/meta/recipes-core/busybox/busybox/defconfig b/meta/recipes-core/busybox/busybox/defconfig
index 5e1e1f5638..f3d545dc3f 100644
--- a/meta/recipes-core/busybox/busybox/defconfig
+++ b/meta/recipes-core/busybox/busybox/defconfig
@@ -1,7 +1,7 @@
#
# Automatically generated make config: don't edit
-# Busybox version: 1.35.0
-# Sun Dec 26 16:55:55 2021
+# Busybox version: 1.36.0
+# Tue Jan 3 14:17:01 2023
#
CONFIG_HAVE_DOT_CONFIG=y
@@ -123,6 +123,9 @@ CONFIG_UNICODE_WIDE_WCHARS=y
# CONFIG_UNICODE_BIDI_SUPPORT is not set
# CONFIG_UNICODE_NEUTRAL_TABLE is not set
# CONFIG_UNICODE_PRESERVE_BROKEN is not set
+# CONFIG_LOOP_CONFIGURE is not set
+# CONFIG_NO_LOOP_CONFIGURE is not set
+CONFIG_TRY_LOOP_CONFIGURE=y
#
# Applets
@@ -338,6 +341,7 @@ CONFIG_FEATURE_TR_CLASSES=y
# CONFIG_FEATURE_TR_EQUIV is not set
CONFIG_TRUE=y
# CONFIG_TRUNCATE is not set
+# CONFIG_TSORT is not set
CONFIG_TTY=y
CONFIG_UNAME=y
CONFIG_UNAME_OSNAME="GNU/Linux"
@@ -831,10 +835,12 @@ CONFIG_MICROCOM=y
CONFIG_RFKILL=y
# CONFIG_RUNLEVEL is not set
# CONFIG_RX is not set
+# CONFIG_SEEDRNG is not set
# CONFIG_SETFATTR is not set
# CONFIG_SETSERIAL is not set
CONFIG_STRINGS=y
CONFIG_TIME=y
+# CONFIG_TREE is not set
CONFIG_TS=y
# CONFIG_TTYSIZE is not set
# CONFIG_UBIATTACH is not set
@@ -1184,7 +1190,7 @@ CONFIG_ASH_CMDCMD=y
# Options common to all shells
#
CONFIG_FEATURE_SH_MATH=y
-# CONFIG_FEATURE_SH_MATH_64 is not set
+CONFIG_FEATURE_SH_MATH_64=y
CONFIG_FEATURE_SH_MATH_BASE=y
CONFIG_FEATURE_SH_EXTRA_QUIET=y
# CONFIG_FEATURE_SH_STANDALONE is not set
diff --git a/meta/recipes-core/busybox/busybox/musl.cfg b/meta/recipes-core/busybox/busybox/musl.cfg
index 6fffc91098..ba63def1ba 100644
--- a/meta/recipes-core/busybox/busybox/musl.cfg
+++ b/meta/recipes-core/busybox/busybox/musl.cfg
@@ -7,5 +7,4 @@
# CONFIG_FEATURE_INETD_RPC is not set
# CONFIG_SELINUXENABLED is not set
# CONFIG_FEATURE_MOUNT_NFS is not set
-# CONFIG_FEATURE_UTMP is not set
diff --git a/meta/recipes-core/busybox/busybox/recognize_connmand.patch b/meta/recipes-core/busybox/busybox/recognize_connmand.patch
index f42c74caad..4f28beb556 100644
--- a/meta/recipes-core/busybox/busybox/recognize_connmand.patch
+++ b/meta/recipes-core/busybox/busybox/recognize_connmand.patch
@@ -4,14 +4,14 @@ Upstream-Status: Inappropriate [OE-Core]
Signed-off-by: Saul Wold <sgw@linux.intel.com>
-Index: busybox-1.22.1/networking/ifupdown.c
+Index: busybox-1.36.0/networking/ifupdown.c
===================================================================
---- busybox-1.22.1.orig/networking/ifupdown.c
-+++ busybox-1.22.1/networking/ifupdown.c
-@@ -521,6 +521,10 @@ struct dhcp_client_t {
+--- busybox-1.36.0.orig/networking/ifupdown.c
++++ busybox-1.36.0/networking/ifupdown.c
+@@ -628,6 +628,10 @@ struct dhcp_client_t {
};
- static const struct dhcp_client_t ext_dhcp_clients[] = {
+ static const struct dhcp_client_t ext_dhcp_clients[] ALIGN_PTR = {
+ { "connmand",
+ "true",
+ "true",
diff --git a/meta/recipes-core/busybox/busybox/sha1sum.cfg b/meta/recipes-core/busybox/busybox/sha1sum.cfg
index 20e72d9263..afd4da4ea1 100644
--- a/meta/recipes-core/busybox/busybox/sha1sum.cfg
+++ b/meta/recipes-core/busybox/busybox/sha1sum.cfg
@@ -1 +1,2 @@
CONFIG_SHA1SUM=y
+CONFIG_SHA1_SMALL=3
diff --git a/meta/recipes-core/busybox/busybox/sha_accel.cfg b/meta/recipes-core/busybox/busybox/sha_accel.cfg
new file mode 100644
index 0000000000..8900305a11
--- /dev/null
+++ b/meta/recipes-core/busybox/busybox/sha_accel.cfg
@@ -0,0 +1,2 @@
+# CONFIG_SHA256_HWACCEL is not set
+# CONFIG_SHA1_HWACCEL is not set
diff --git a/meta/recipes-core/busybox/busybox/start-stop-false.patch b/meta/recipes-core/busybox/busybox/start-stop-false.patch
new file mode 100644
index 0000000000..3aef68329c
--- /dev/null
+++ b/meta/recipes-core/busybox/busybox/start-stop-false.patch
@@ -0,0 +1,35 @@
+It's known that the final start-stop-daemon test fails if /bin/false is
+actually a busybox symlink. Instead of failing, check if false is
+busybox and adapt the expected output to match.
+
+Upstream-Status: Submitted [http://lists.busybox.net/pipermail/busybox/2023-August/090416.html]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+diff --git a/testsuite/start-stop-daemon.tests b/testsuite/start-stop-daemon.tests
+index 0757b1288..aa6e9cc41 100755
+--- a/testsuite/start-stop-daemon.tests
++++ b/testsuite/start-stop-daemon.tests
+@@ -27,10 +27,18 @@ testing "start-stop-daemon without -x and -a" \
+ # but at least it checks that pathname to exec() is correct
+ #
+ # NB: this fails if /bin/false is a busybox symlink:
+-# busybox looks at argv[0] and says "qwerty: applet not found"
+-testing "start-stop-daemon with both -x and -a" \
+- 'start-stop-daemon -S -x /bin/false -a qwerty false 2>&1; echo $?' \
+- "1\n" \
+- "" ""
++# busybox looks at argv[0] and says "qwerty: applet not found", so
++# skip the test if false is busybox.
++case $(readlink /bin/false) in
++ *busybox*)
++ echo "SKIPPED: start-stop-daemon with both -x and -a (need non-busybox false)"
++ ;;
++ *)
++ testing "start-stop-daemon with both -x and -a" \
++ 'start-stop-daemon -S -x /bin/false -a qwerty false 2>&1; echo $?' \
++ "1\n" \
++ "" ""
++ ;;
++esac
+
+ exit $FAILCOUNT
diff --git a/meta/recipes-core/busybox/busybox_1.35.0.bb b/meta/recipes-core/busybox/busybox_1.35.0.bb
deleted file mode 100644
index f2f1b35902..0000000000
--- a/meta/recipes-core/busybox/busybox_1.35.0.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-require busybox.inc
-
-SRC_URI = "https://busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
- file://0001-depmod-Ignore-.debug-directories.patch \
- file://busybox-udhcpc-no_deconfig.patch \
- file://find-touchscreen.sh \
- file://busybox-cron \
- file://busybox-httpd \
- file://busybox-udhcpd \
- file://default.script \
- file://simple.script \
- file://hwclock.sh \
- file://syslog \
- file://syslog-startup.conf \
- file://syslog.conf \
- file://busybox-syslog.default \
- file://mdev \
- file://mdev.conf \
- file://mdev-mount.sh \
- file://defconfig \
- file://busybox-syslog.service.in \
- file://busybox-klogd.service.in \
- file://fail_on_no_media.patch \
- file://run-ptest \
- file://inetd.conf \
- file://inetd \
- file://login-utilities.cfg \
- file://recognize_connmand.patch \
- file://busybox-cross-menuconfig.patch \
- file://mount-via-label.cfg \
- file://sha1sum.cfg \
- file://sha256sum.cfg \
- file://getopts.cfg \
- file://longopts.cfg \
- file://resize.cfg \
- ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
- ${@["", "file://rcS.default"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
- ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager') == 'busybox-mdev')]} \
- file://syslog.cfg \
- file://unicode.cfg \
- file://rev.cfg \
- file://pgrep.cfg \
- file://rcS \
- file://rcK \
- file://makefile-libbb-race.patch \
- file://0001-testsuite-check-uudecode-before-using-it.patch \
- file://0001-testsuite-use-www.example.org-for-wget-test-cases.patch \
- file://0001-du-l-works-fix-to-use-145-instead-of-144.patch \
- file://0001-sysctl-ignore-EIO-of-stable_secret-below-proc-sys-ne.patch \
- file://0001-libbb-sockaddr2str-ensure-only-printable-characters-.patch \
- file://0002-nslookup-sanitize-all-printed-strings-with-printable.patch \
- "
-SRC_URI:append:libc-musl = " file://musl.cfg "
-
-SRC_URI[tarball.sha256sum] = "faeeb244c35a348a334f4a59e44626ee870fb07b6884d68c10ae8bc19f83a694"
diff --git a/meta/recipes-core/busybox/busybox_1.36.1.bb b/meta/recipes-core/busybox/busybox_1.36.1.bb
new file mode 100644
index 0000000000..06eb9eb999
--- /dev/null
+++ b/meta/recipes-core/busybox/busybox_1.36.1.bb
@@ -0,0 +1,57 @@
+require busybox.inc
+
+SRC_URI = "https://busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
+ file://0001-depmod-Ignore-.debug-directories.patch \
+ file://busybox-udhcpc-no_deconfig.patch \
+ file://find-touchscreen.sh \
+ file://busybox-cron \
+ file://busybox-httpd \
+ file://busybox-udhcpd \
+ file://default.script \
+ file://simple.script \
+ file://hwclock.sh \
+ file://syslog \
+ file://syslog-startup.conf \
+ file://syslog.conf \
+ file://busybox-syslog.default \
+ file://mdev \
+ file://mdev.conf \
+ file://mdev-mount.sh \
+ file://defconfig \
+ file://busybox-syslog.service.in \
+ file://busybox-klogd.service.in \
+ file://fail_on_no_media.patch \
+ file://run-ptest \
+ file://inetd.conf \
+ file://inetd \
+ file://login-utilities.cfg \
+ file://recognize_connmand.patch \
+ file://busybox-cross-menuconfig.patch \
+ file://mount-via-label.cfg \
+ file://sha1sum.cfg \
+ file://sha256sum.cfg \
+ file://getopts.cfg \
+ file://longopts.cfg \
+ file://resize.cfg \
+ ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
+ ${@["", "file://rcS.default"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \
+ ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager') == 'busybox-mdev')]} \
+ file://syslog.cfg \
+ file://unicode.cfg \
+ file://rev.cfg \
+ file://pgrep.cfg \
+ file://rcS \
+ file://rcK \
+ file://makefile-libbb-race.patch \
+ file://0001-testsuite-check-uudecode-before-using-it.patch \
+ file://0001-testsuite-use-www.example.org-for-wget-test-cases.patch \
+ file://0001-du-l-works-fix-to-use-145-instead-of-144.patch \
+ file://0001-sysctl-ignore-EIO-of-stable_secret-below-proc-sys-ne.patch \
+ file://0001-libbb-sockaddr2str-ensure-only-printable-characters-.patch \
+ file://0002-nslookup-sanitize-all-printed-strings-with-printable.patch \
+ file://start-stop-false.patch \
+ "
+SRC_URI:append:libc-musl = " file://musl.cfg "
+# TODO http://lists.busybox.net/pipermail/busybox/2023-January/090078.html
+SRC_URI:append:x86 = " file://sha_accel.cfg"
+SRC_URI[tarball.sha256sum] = "b8cc24c9574d809e7279c3be349795c5d5ceb6fdf19ca709f80cde50e47de314"
diff --git a/meta/recipes-core/busybox/files/syslog b/meta/recipes-core/busybox/files/syslog
index 2208613e8c..a4fea30ab7 100644
--- a/meta/recipes-core/busybox/files/syslog
+++ b/meta/recipes-core/busybox/files/syslog
@@ -10,6 +10,8 @@
set -e
+PATH="/bin:/usr/bin:/sbin:/usr/sbin"
+
if [ -f /etc/syslog-startup.conf ]; then
. /etc/syslog-startup.conf
LOG_LOCAL=0
diff --git a/meta/recipes-core/coreutils/coreutils/0001-local.mk-fix-cross-compiling-problem.patch b/meta/recipes-core/coreutils/coreutils/0001-local.mk-fix-cross-compiling-problem.patch
index 66f9a716c9..97a6357ab9 100644
--- a/meta/recipes-core/coreutils/coreutils/0001-local.mk-fix-cross-compiling-problem.patch
+++ b/meta/recipes-core/coreutils/coreutils/0001-local.mk-fix-cross-compiling-problem.patch
@@ -1,4 +1,7 @@
-Subject: local.mk: fix cross compiling problem
+From 7cb2d20cfa2a27191255031d231cd41917dcffe8 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 26 Dec 2016 16:10:35 +0800
+Subject: [PATCH] local.mk: fix cross compiling problem
We meet the following error when cross compiling.
| Makefile:3418: *** Recursive variable 'INSTALL' references itself (eventually). Stop.
@@ -12,15 +15,12 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/local.mk b/src/local.mk
-index 36dfa4e..c5898cc 100644
+index 96ee941..cdd47d6 100644
--- a/src/local.mk
+++ b/src/local.mk
-@@ -649,4 +649,4 @@ cu_install_program = @INSTALL_PROGRAM@
+@@ -704,4 +704,4 @@ cu_install_program = @INSTALL@
else
cu_install_program = src/ginstall
endif
-INSTALL = $(cu_install_program) -c
+INSTALL_PROGRAM = $(cu_install_program)
---
-2.1.0
-
diff --git a/meta/recipes-core/coreutils/coreutils/remove-usr-local-lib-from-m4.patch b/meta/recipes-core/coreutils/coreutils/remove-usr-local-lib-from-m4.patch
index 1a8a9b9983..718de0ab78 100644
--- a/meta/recipes-core/coreutils/coreutils/remove-usr-local-lib-from-m4.patch
+++ b/meta/recipes-core/coreutils/coreutils/remove-usr-local-lib-from-m4.patch
@@ -1,4 +1,4 @@
-From a26530083a29eeee910bfd606ecc621acecd547a Mon Sep 17 00:00:00 2001
+From f53ffb5b27ab7d4a4c62df00ebd6a1a6936d1709 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 3 Aug 2011 14:12:30 -0700
Subject: [PATCH] coreutils: Fix build on uclibc
@@ -12,16 +12,15 @@ and make life easier for cross compilation process.
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Inappropriate [Upstream does care for AIX while we may not]
-
---
m4/getloadavg.m4 | 12 ------------
1 file changed, 12 deletions(-)
diff --git a/m4/getloadavg.m4 b/m4/getloadavg.m4
-index 8e96965..63782a2 100644
+index 9d0236f..68f7c52 100644
--- a/m4/getloadavg.m4
+++ b/m4/getloadavg.m4
-@@ -41,18 +41,6 @@ AC_CHECK_FUNC([getloadavg], [],
+@@ -46,18 +46,6 @@ if test $ac_cv_func_getloadavg != yes; then
[LIBS="-lutil $LIBS" gl_func_getloadavg_done=yes])
fi
diff --git a/meta/recipes-core/coreutils/coreutils_9.1.bb b/meta/recipes-core/coreutils/coreutils_9.1.bb
deleted file mode 100644
index d57e147a7e..0000000000
--- a/meta/recipes-core/coreutils/coreutils_9.1.bb
+++ /dev/null
@@ -1,210 +0,0 @@
-SUMMARY = "The basic file, shell and text manipulation utilities"
-DESCRIPTION = "The GNU Core Utilities provide the basic file, shell and text \
-manipulation utilities. These are the core utilities which are expected to exist on \
-every system."
-HOMEPAGE = "http://www.gnu.org/software/coreutils/"
-BUGTRACKER = "http://debbugs.gnu.org/coreutils"
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
- file://src/ls.c;beginline=1;endline=15;md5=1fe89f62614b5e1f5475ec04d5899bc1 \
- "
-DEPENDS = "gmp libcap"
-DEPENDS:class-native = ""
-
-inherit autotools gettext texinfo
-
-SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.xz \
- file://remove-usr-local-lib-from-m4.patch \
- file://0001-local.mk-fix-cross-compiling-problem.patch \
- file://run-ptest \
- "
-
-SRC_URI[sha256sum] = "61a1f410d78ba7e7f37a5a4f50e6d1320aca33375484a3255eddf17a38580423"
-
-# http://git.savannah.gnu.org/cgit/coreutils.git/commit/?id=v8.27-101-gf5d7c0842
-# runcon is not really a sandbox command, use `runcon ... setsid ...` to avoid this particular issue.
-CVE_CHECK_IGNORE += "CVE-2016-2781"
-
-EXTRA_OECONF:class-target = "--enable-install-program=arch,hostname --libexecdir=${libdir}"
-EXTRA_OECONF:class-nativesdk = "--enable-install-program=arch,hostname"
-
-# acl and xattr are not default features
-#
-PACKAGECONFIG:class-target ??= "\
- ${@bb.utils.filter('DISTRO_FEATURES', 'acl xattr', d)} \
-"
-
-# The lib/oe/path.py requires xattr
-PACKAGECONFIG:class-native ??= "xattr"
-
-# oe-core builds need xattr support
-PACKAGECONFIG:class-nativesdk ??= "xattr"
-
-# with, without, depends, rdepends
-#
-PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl,"
-PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr,"
-PACKAGECONFIG[single-binary] = "--enable-single-binary,--disable-single-binary,,"
-PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
-
-# [ df mktemp nice printenv base64 gets a special treatment and is not included in this
-bindir_progs = "arch basename chcon cksum comm csplit cut dir dircolors dirname du \
- env expand expr factor fmt fold groups head hostid id install \
- join link logname md5sum mkfifo nl nohup nproc od paste pathchk \
- pinky pr printf ptx readlink realpath runcon seq sha1sum sha224sum sha256sum \
- sha384sum sha512sum shred shuf sort split sum tac tail tee test timeout \
- tr truncate tsort tty unexpand uniq unlink uptime users vdir wc who whoami yes"
-
-# hostname gets a special treatment and is not included in this
-base_bindir_progs = "cat chgrp chmod chown cp date dd echo false hostname kill ln ls mkdir \
- mknod mv pwd rm rmdir sleep stty sync touch true uname stat"
-
-sbindir_progs= "chroot"
-
-# Split stdbuf into its own package, so one can include
-# coreutils-stdbuf without getting the rest of coreutils, but make
-# coreutils itself pull in stdbuf, so IMAGE_INSTALL += "coreutils"
-# always provides all coreutils
-PACKAGE_BEFORE_PN:class-target += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', '', 'coreutils-stdbuf', d)}"
-FILES:coreutils-stdbuf = "${bindir}/stdbuf ${libdir}/coreutils/libstdbuf.so"
-RDEPENDS:coreutils:class-target += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', '', 'coreutils-stdbuf', d)}"
-
-# However, when the single-binary PACKAGECONFIG is used, stdbuf
-# functionality is built into the single coreutils binary, so there's
-# no point splitting /usr/bin/stdbuf to its own package. Instead, add
-# an RPROVIDE so that rdepending on coreutils-stdbuf will work
-# regardless of whether single-binary is in effect.
-RPROVIDES:coreutils += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', 'coreutils-stdbuf', '', d)}"
-
-# Let aclocal use the relative path for the m4 file rather than the
-# absolute since coreutils has a lot of m4 files, otherwise there might
-# be an "Argument list too long" error when it is built in a long/deep
-# directory.
-acpaths = "-I ./m4"
-
-# Deal with a separate builddir failure if src doesn't exist when creating version.c/version.h
-do_compile:prepend () {
- mkdir -p ${B}/src
-}
-
-do_install:class-native() {
- autotools_do_install
- # remove groups to fix conflict with shadow-native
- rm -f ${D}${STAGING_BINDIR_NATIVE}/groups
- # The return is a must since native doesn't need the
- # do_install:append() in the below.
- return
-}
-
-do_install:append() {
- for i in df mktemp nice printenv base64; do mv ${D}${bindir}/$i ${D}${bindir}/$i.${BPN}; done
-
- install -d ${D}${base_bindir}
- [ "${base_bindir}" != "${bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i.${BPN}; done
-
- install -d ${D}${sbindir}
- [ "${sbindir}" != "${bindir}" ] && for i in ${sbindir_progs}; do mv ${D}${bindir}/$i ${D}${sbindir}/$i.${BPN}; done
-
- # [ requires special handling because [.coreutils will cause the sed stuff
- # in update-alternatives to fail, therefore use lbracket - the name used
- # for the actual source file.
- mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN}
-}
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-# Make hostname's priority higher than busybox but lower than net-tools
-ALTERNATIVE_PRIORITY[hostname] = "90"
-ALTERNATIVE:${PN} = "lbracket ${bindir_progs} ${base_bindir_progs} ${sbindir_progs} base32 base64 nice printenv mktemp df"
-ALTERNATIVE:${PN}-doc = "base64.1 nice.1 mktemp.1 df.1 groups.1 kill.1 uptime.1 stat.1 hostname.1"
-
-ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1"
-
-ALTERNATIVE_LINK_NAME[base64] = "${base_bindir}/base64"
-ALTERNATIVE_TARGET[base64] = "${bindir}/base64.${BPN}"
-ALTERNATIVE_LINK_NAME[base64.1] = "${mandir}/man1/base64.1"
-
-ALTERNATIVE_LINK_NAME[mktemp] = "${base_bindir}/mktemp"
-ALTERNATIVE_TARGET[mktemp] = "${bindir}/mktemp.${BPN}"
-ALTERNATIVE_LINK_NAME[mktemp.1] = "${mandir}/man1/mktemp.1"
-
-ALTERNATIVE_LINK_NAME[df] = "${base_bindir}/df"
-ALTERNATIVE_TARGET[df] = "${bindir}/df.${BPN}"
-ALTERNATIVE_LINK_NAME[df.1] = "${mandir}/man1/df.1"
-
-ALTERNATIVE_LINK_NAME[nice] = "${base_bindir}/nice"
-ALTERNATIVE_TARGET[nice] = "${bindir}/nice.${BPN}"
-ALTERNATIVE_LINK_NAME[nice.1] = "${mandir}/man1/nice.1"
-
-ALTERNATIVE_LINK_NAME[printenv] = "${base_bindir}/printenv"
-ALTERNATIVE_TARGET[printenv] = "${bindir}/printenv.${BPN}"
-
-ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/["
-ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}"
-
-ALTERNATIVE_LINK_NAME[groups.1] = "${mandir}/man1/groups.1"
-ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1"
-ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
-ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1"
-
-# The statx() requires glibc >= 2.28 and linux kernel >= 4.11, it doesn't work
-# when glibc >= 2.28 ((Ubuntu 20.04 in docker) and kernel < 4.11 (Host OS
-# CentOS 7), we can check kernel version and disable it, but that would cause
-# two different signatures for coreutils-native, so disable it unconditionally
-# for deterministic build.
-EXTRA_OECONF:append:class-native = " ac_cv_func_statx=0"
-
-python __anonymous() {
- for prog in d.getVar('base_bindir_progs').split():
- d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
-
- for prog in d.getVar('sbindir_progs').split():
- d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir'), prog))
-}
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit ptest
-
-RDEPENDS:${PN}-ptest += "bash findutils gawk liberror-perl make perl perl-modules python3-core sed shadow"
-
-# -dev automatic dependencies fails as we don't want libmodule-build-perl-dev, its too heavy
-# may need tweaking if DEPENDS changes
-RRECOMMENDS:coreutils-dev[nodeprrecs] = "1"
-RRECOMMENDS:coreutils-dev = "acl-dev attr-dev gmp-dev libcap-dev bash-dev findutils-dev gawk-dev shadow-dev"
-
-do_install_ptest () {
- install -d ${D}${PTEST_PATH}/tests
- cp -r ${S}/tests/* ${D}${PTEST_PATH}/tests
- sed -i 's/ginstall/install/g' `grep -R ginstall ${D}${PTEST_PATH}/tests | awk -F: '{print $1}' | uniq`
- install -d ${D}${PTEST_PATH}/build-aux
- install ${S}/build-aux/test-driver ${D}${PTEST_PATH}/build-aux/
- cp ${B}/Makefile ${D}${PTEST_PATH}/
- cp ${S}/init.cfg ${D}${PTEST_PATH}/
- cp -r ${B}/src ${D}${PTEST_PATH}/
- cp -r ${S}/src/*.c ${D}${PTEST_PATH}/src
- sed -i '/^VPATH/s/= .*$/= ./g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^PROGRAMS/s/^/#/g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^Makefile: /s/^.*$/Makefile:/g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^abs_srcdir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^abs_top_builddir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^abs_top_srcdir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
- sed -i '/^built_programs/s/ginstall/install/g' ${D}${PTEST_PATH}/Makefile
- chmod -R 777 ${D}${PTEST_PATH}
-
- # Disable subcase stty-pairs.sh, it will cause test framework hang
- sed -i '/stty-pairs.sh/d' ${D}${PTEST_PATH}/Makefile
-
- # Disable subcase tail-2/assert.sh as it has issues on 32-bit systems
- sed -i '/assert.sh/d' ${D}${PTEST_PATH}/Makefile
-
- # Tweak test d_type-check to use python3 instead of python
- sed -i "1s@.*@#!/usr/bin/python3@" ${D}${PTEST_PATH}/tests/d_type-check
- install ${B}/src/getlimits ${D}/${bindir}
-
- # handle multilib
- sed -i s:@libdir@:${libdir}:g ${D}${PTEST_PATH}/run-ptest
-}
-
-FILES:${PN}-ptest += "${bindir}/getlimits"
diff --git a/meta/recipes-core/coreutils/coreutils_9.5.bb b/meta/recipes-core/coreutils/coreutils_9.5.bb
new file mode 100644
index 0000000000..9a5f836ebe
--- /dev/null
+++ b/meta/recipes-core/coreutils/coreutils_9.5.bb
@@ -0,0 +1,219 @@
+SUMMARY = "The basic file, shell and text manipulation utilities"
+DESCRIPTION = "The GNU Core Utilities provide the basic file, shell and text \
+manipulation utilities. These are the core utilities which are expected to exist on \
+every system."
+HOMEPAGE = "http://www.gnu.org/software/coreutils/"
+BUGTRACKER = "http://debbugs.gnu.org/coreutils"
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://src/ls.c;beginline=1;endline=15;md5=9ac94aaed7fd46fd8df7147a9e3410cb \
+ "
+DEPENDS = "gmp libcap"
+DEPENDS:class-native = ""
+
+inherit autotools gettext texinfo
+
+SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.xz \
+ file://remove-usr-local-lib-from-m4.patch \
+ file://0001-local.mk-fix-cross-compiling-problem.patch \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "cd328edeac92f6a665de9f323c93b712af1858bc2e0d88f3f7100469470a1b8a"
+
+# http://git.savannah.gnu.org/cgit/coreutils.git/commit/?id=v8.27-101-gf5d7c0842
+#
+CVE_STATUS[CVE-2016-2781] = "disputed: runcon is not really a sandbox command, use `runcon ... setsid ...` to avoid this particular issue."
+
+EXTRA_OECONF:class-target = "--enable-install-program=arch,hostname --libexecdir=${libdir}"
+EXTRA_OECONF:class-nativesdk = "--enable-install-program=arch,hostname"
+
+# acl and xattr are not default features
+#
+PACKAGECONFIG:class-target ??= "\
+ ${@bb.utils.filter('DISTRO_FEATURES', 'acl xattr', d)} \
+"
+
+# The lib/oe/path.py requires xattr
+PACKAGECONFIG:class-native ??= "xattr"
+
+# oe-core builds need xattr support
+PACKAGECONFIG:class-nativesdk ??= "xattr"
+
+# with, without, depends, rdepends
+#
+PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl,"
+PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr,"
+PACKAGECONFIG[single-binary] = "--enable-single-binary,--disable-single-binary,,"
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
+PACKAGECONFIG[openssl] = "--with-openssl=yes,--with-openssl=no,openssl"
+
+# [ df mktemp nice printenv base64 gets a special treatment and is not included in this
+bindir_progs = "arch basename chcon cksum comm csplit cut dir dircolors dirname du \
+ env expand expr factor fmt fold groups head hostid id install \
+ join link logname md5sum mkfifo nl nohup nproc od paste pathchk \
+ pinky pr printf ptx readlink realpath runcon seq sha1sum sha224sum sha256sum \
+ sha384sum sha512sum shred shuf sort split sum tac tail tee test timeout \
+ tr truncate tsort tty unexpand uniq unlink uptime users vdir wc who whoami yes"
+
+# hostname gets a special treatment and is not included in this
+base_bindir_progs = "cat chgrp chmod chown cp date dd echo false hostname kill ln ls mkdir \
+ mknod mv pwd rm rmdir sleep stty sync touch true uname stat"
+
+sbindir_progs= "chroot"
+
+# Split stdbuf into its own package, so one can include
+# coreutils-stdbuf without getting the rest of coreutils, but make
+# coreutils itself pull in stdbuf, so IMAGE_INSTALL += "coreutils"
+# always provides all coreutils
+PACKAGE_BEFORE_PN:class-target += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', '', 'coreutils-stdbuf', d)}"
+FILES:coreutils-stdbuf = "${bindir}/stdbuf ${libdir}/coreutils/libstdbuf.so"
+RDEPENDS:coreutils:class-target += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', '', 'coreutils-stdbuf', d)}"
+
+# However, when the single-binary PACKAGECONFIG is used, stdbuf
+# functionality is built into the single coreutils binary, so there's
+# no point splitting /usr/bin/stdbuf to its own package. Instead, add
+# an RPROVIDE so that rdepending on coreutils-stdbuf will work
+# regardless of whether single-binary is in effect.
+RPROVIDES:coreutils += "${@bb.utils.contains('PACKAGECONFIG', 'single-binary', 'coreutils-stdbuf', '', d)}"
+
+# Let aclocal use the relative path for the m4 file rather than the
+# absolute since coreutils has a lot of m4 files, otherwise there might
+# be an "Argument list too long" error when it is built in a long/deep
+# directory.
+acpaths = "-I ./m4"
+
+# Deal with a separate builddir failure if src doesn't exist when creating version.c/version.h
+do_compile:prepend () {
+ mkdir -p ${B}/src
+}
+
+do_install:class-native() {
+ autotools_do_install
+ # remove groups to fix conflict with shadow-native
+ rm -f ${D}${STAGING_BINDIR_NATIVE}/groups
+ # The return is a must since native doesn't need the
+ # do_install:append() in the below.
+ return
+}
+
+do_install:append() {
+ for i in df mktemp nice printenv base64; do mv ${D}${bindir}/$i ${D}${bindir}/$i.${BPN}; done
+
+ install -d ${D}${base_bindir}
+ [ "${base_bindir}" != "${bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i.${BPN}; done
+
+ install -d ${D}${sbindir}
+ [ "${sbindir}" != "${bindir}" ] && for i in ${sbindir_progs}; do mv ${D}${bindir}/$i ${D}${sbindir}/$i.${BPN}; done
+
+ # [ requires special handling because [.coreutils will cause the sed stuff
+ # in update-alternatives to fail, therefore use lbracket - the name used
+ # for the actual source file.
+ mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN}
+}
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+# Make hostname's priority higher than busybox but lower than net-tools
+ALTERNATIVE_PRIORITY[hostname] = "90"
+ALTERNATIVE:${PN} = "lbracket ${bindir_progs} ${base_bindir_progs} ${sbindir_progs} base32 base64 nice printenv mktemp df"
+ALTERNATIVE:${PN}-doc = "base64.1 nice.1 mktemp.1 df.1 groups.1 kill.1 uptime.1 stat.1 hostname.1"
+
+ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1"
+
+ALTERNATIVE_LINK_NAME[base64] = "${base_bindir}/base64"
+ALTERNATIVE_TARGET[base64] = "${bindir}/base64.${BPN}"
+ALTERNATIVE_LINK_NAME[base64.1] = "${mandir}/man1/base64.1"
+
+ALTERNATIVE_LINK_NAME[mktemp] = "${base_bindir}/mktemp"
+ALTERNATIVE_TARGET[mktemp] = "${bindir}/mktemp.${BPN}"
+ALTERNATIVE_LINK_NAME[mktemp.1] = "${mandir}/man1/mktemp.1"
+
+ALTERNATIVE_LINK_NAME[df] = "${base_bindir}/df"
+ALTERNATIVE_TARGET[df] = "${bindir}/df.${BPN}"
+ALTERNATIVE_LINK_NAME[df.1] = "${mandir}/man1/df.1"
+
+ALTERNATIVE_LINK_NAME[nice] = "${base_bindir}/nice"
+ALTERNATIVE_TARGET[nice] = "${bindir}/nice.${BPN}"
+ALTERNATIVE_LINK_NAME[nice.1] = "${mandir}/man1/nice.1"
+
+ALTERNATIVE_LINK_NAME[printenv] = "${base_bindir}/printenv"
+ALTERNATIVE_TARGET[printenv] = "${bindir}/printenv.${BPN}"
+
+ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/["
+ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}"
+
+ALTERNATIVE_LINK_NAME[groups.1] = "${mandir}/man1/groups.1"
+ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1"
+ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
+ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1"
+
+# The statx() requires glibc >= 2.28 and linux kernel >= 4.11, it doesn't work
+# when glibc >= 2.28 ((Ubuntu 20.04 in docker) and kernel < 4.11 (Host OS
+# CentOS 7), we can check kernel version and disable it, but that would cause
+# two different signatures for coreutils-native, so disable it unconditionally
+# for deterministic build.
+EXTRA_OECONF:append:class-native = " ac_cv_func_statx=0"
+
+python __anonymous() {
+ for prog in d.getVar('base_bindir_progs').split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
+
+ for prog in d.getVar('sbindir_progs').split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir'), prog))
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit ptest
+
+RDEPENDS:${PN}-ptest += "bash findutils gawk make perl perl-modules python3-core sed shadow"
+
+# -dev automatic dependencies fails as we don't want libmodule-build-perl-dev, its too heavy
+# may need tweaking if DEPENDS changes
+# Can't use ${PN}-dev here since flags with overrides and key expansion not supported
+RRECOMMENDS:coreutils-dev[nodeprrecs] = "1"
+RRECOMMENDS:${PN}-dev += "acl-dev attr-dev gmp-dev libcap-dev bash-dev findutils-dev gawk-dev shadow-dev"
+
+do_install_ptest () {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -r ${S}/tests/* ${D}${PTEST_PATH}/tests
+ sed -i 's/ginstall/install/g' `grep -R ginstall ${D}${PTEST_PATH}/tests | awk -F: '{print $1}' | uniq`
+ install -d ${D}${PTEST_PATH}/build-aux
+ install ${S}/build-aux/test-driver ${D}${PTEST_PATH}/build-aux/
+ install -Dm 0644 ${B}/lib/config.h ${D}${PTEST_PATH}/lib/config.h
+ cp ${B}/Makefile ${D}${PTEST_PATH}/
+ cp ${S}/init.cfg ${D}${PTEST_PATH}/
+ cp -r ${B}/src ${D}${PTEST_PATH}/
+ cp -r ${S}/src/*.c ${D}${PTEST_PATH}/src
+ sed -i '/^VPATH/s/= .*$/= ./g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^PROGRAMS/s/^/#/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^Makefile: /s/^.*$/Makefile:/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^abs_srcdir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^abs_top_builddir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^abs_top_srcdir/s/= .*$/= \$\{PWD\}/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^built_programs/s/ginstall/install/g' ${D}${PTEST_PATH}/Makefile
+ sed -i '/^CC =/s/ --sysroot=.*recipe-sysroot/ /g' ${D}${PTEST_PATH}/Makefile
+ chmod -R 777 ${D}${PTEST_PATH}
+
+ # Disable subcase stty-pairs.sh, it will cause test framework hang
+ sed -i '/stty-pairs.sh/d' ${D}${PTEST_PATH}/Makefile
+
+ # Disable subcase tail-2/assert.sh as it has issues on 32-bit systems
+ sed -i '/assert.sh/d' ${D}${PTEST_PATH}/Makefile
+
+ # Tweak test d_type-check to use python3 instead of python
+ sed -i "1s@.*@#!/usr/bin/python3@" ${D}${PTEST_PATH}/tests/d_type-check
+ install ${B}/src/getlimits ${D}/${bindir}
+
+ # handle multilib
+ sed -i s:@libdir@:${libdir}:g ${D}${PTEST_PATH}/run-ptest
+}
+
+do_install_ptest:append:libc-musl () {
+ # these tests fail due to bash on musl systems
+ # xmalloc: cannot allocate 16146 bytes
+ sed -i -e '/tests\/dd\/no-allocate.sh/d' ${D}${PTEST_PATH}/Makefile
+ sed -i -e '/tests\/split\/line-bytes.sh/d' ${D}${PTEST_PATH}/Makefile
+}
+FILES:${PN}-ptest += "${bindir}/getlimits"
diff --git a/meta/recipes-core/dbus-wait/dbus-wait_git.bb b/meta/recipes-core/dbus-wait/dbus-wait_git.bb
index f2eb8b1874..39363b9b3a 100644
--- a/meta/recipes-core/dbus-wait/dbus-wait_git.bb
+++ b/meta/recipes-core/dbus-wait/dbus-wait_git.bb
@@ -7,11 +7,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
DEPENDS = "dbus"
-SRCREV = "6cc6077a36fe2648a5f993fe7c16c9632f946517"
-PV = "0.1+git${SRCPV}"
-PR = "r2"
+SRCREV = "64bc7c8fae61ded0c4e555aa775911f84c56e438"
+PV = "0.1+git"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https"
UPSTREAM_CHECK_COMMITS = "1"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-core/dbus/dbus_1.14.0.bb b/meta/recipes-core/dbus/dbus_1.14.0.bb
deleted file mode 100644
index 7598c45f8e..0000000000
--- a/meta/recipes-core/dbus/dbus_1.14.0.bb
+++ /dev/null
@@ -1,183 +0,0 @@
-SUMMARY = "D-Bus message bus"
-DESCRIPTION = "D-Bus is a message bus system, a simple way for applications to talk to one another. In addition to interprocess communication, D-Bus helps coordinate process lifecycle; it makes it simple and reliable to code a \"single instance\" application or daemon, and to launch applications and daemons on demand when their services are needed."
-HOMEPAGE = "https://dbus.freedesktop.org"
-SECTION = "base"
-
-inherit autotools pkgconfig gettext upstream-version-is-even ptest-gnome
-
-LICENSE = "AFL-2.1 | GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \
- file://dbus/dbus.h;beginline=6;endline=20;md5=866739837ccd835350af94dccd6457d8"
-
-SRC_URI = "https://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.xz \
- file://run-ptest \
- file://tmpdir.patch \
- file://dbus-1.init \
-"
-
-SRC_URI[sha256sum] = "ccd7cce37596e0a19558fd6648d1272ab43f011d80c8635aea8fd0bad58aebd4"
-
-EXTRA_OECONF = "--disable-xml-docs \
- --disable-doxygen-docs \
- --enable-largefile \
- --with-system-socket=/run/dbus/system_bus_socket \
- --enable-tests \
- --enable-checks \
- --enable-asserts \
- "
-EXTRA_OECONF:append:class-target = " SYSTEMCTL=${base_bindir}/systemctl"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd x11', d)} \
- user-session \
- "
-PACKAGECONFIG:class-native = ""
-PACKAGECONFIG:class-nativesdk = ""
-
-PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir},--disable-systemd --without-systemdsystemunitdir,systemd"
-PACKAGECONFIG[x11] = "--enable-x11-autolaunch,--without-x --disable-x11-autolaunch, virtual/libx11 libsm"
-PACKAGECONFIG[user-session] = "--enable-user-session --with-systemduserunitdir=${systemd_user_unitdir},--disable-user-session"
-PACKAGECONFIG[verbose-mode] = "--enable-verbose-mode,,,"
-PACKAGECONFIG[audit] = "--enable-libaudit,--disable-libaudit,audit"
-PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
-
-DEPENDS = "expat virtual/libintl autoconf-archive glib-2.0"
-RDEPENDS:${PN} += "${PN}-common ${PN}-tools"
-RDEPENDS:${PN}:class-native = ""
-
-inherit useradd update-rc.d
-
-INITSCRIPT_NAME = "dbus-1"
-INITSCRIPT_PARAMS = "start 02 5 3 2 . stop 20 0 1 6 ."
-
-python __anonymous() {
- if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
- d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
-}
-
-PACKAGES =+ "${PN}-lib ${PN}-common ${PN}-tools"
-
-USERADD_PACKAGES = "dbus-common"
-USERADD_PARAM:dbus-common = "--system --home ${localstatedir}/lib/dbus \
- --no-create-home --shell /bin/false \
- --user-group messagebus"
-
-CONFFILES:${PN} = "${sysconfdir}/dbus-1/system.conf ${sysconfdir}/dbus-1/session.conf"
-
-DEBIANNAME:${PN} = "dbus-1"
-
-OLDPKGNAME = "dbus-x11"
-OLDPKGNAME:class-nativesdk = ""
-
-# for compatibility
-RPROVIDES:${PN} = "${OLDPKGNAME}"
-RREPLACES:${PN} += "${OLDPKGNAME}"
-
-FILES:${PN} = "${bindir}/dbus-daemon* \
- ${bindir}/dbus-cleanup-sockets \
- ${bindir}/dbus-launch \
- ${bindir}/dbus-run-session \
- ${libexecdir}/dbus* \
- ${sysconfdir} \
- ${localstatedir} \
- ${systemd_system_unitdir} \
- ${systemd_user_unitdir} \
- ${nonarch_libdir}/tmpfiles.d/dbus.conf \
-"
-FILES:${PN}-common = "${sysconfdir}/dbus-1 \
- ${datadir}/dbus-1/services \
- ${datadir}/dbus-1/system-services \
- ${datadir}/dbus-1/session.d \
- ${datadir}/dbus-1/session.conf \
- ${datadir}/dbus-1/system.d \
- ${datadir}/dbus-1/system.conf \
- ${systemd_system_unitdir}/dbus.socket \
- ${systemd_system_unitdir}/sockets.target.wants \
- ${systemd_user_unitdir}/dbus.socket \
- ${systemd_user_unitdir}/sockets.target.wants \
- ${nonarch_libdir}/sysusers.d/dbus.conf \
-"
-FILES:${PN}-tools = "${bindir}/dbus-uuidgen \
- ${bindir}/dbus-send \
- ${bindir}/dbus-monitor \
- ${bindir}/dbus-update-activation-environment \
-"
-FILES:${PN}-lib = "${libdir}/lib*.so.*"
-RRECOMMENDS:${PN}-lib = "${PN}"
-FILES:${PN}-dev += "${libdir}/dbus-1.0/include ${bindir}/dbus-test-tool ${datadir}/xml/dbus-1"
-
-RDEPENDS:${PN}-ptest += "bash make dbus"
-
-PACKAGE_WRITE_DEPS += "${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','systemd-systemctl-native','',d)}"
-pkg_postinst:dbus() {
- # If both systemd and sysvinit are enabled, mask the dbus-1 init script
- if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then
- if [ -n "$D" ]; then
- OPTS="--root=$D"
- fi
- systemctl $OPTS mask dbus-1.service
- fi
-
- if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then
- /etc/init.d/populate-volatile.sh update
- fi
-}
-
-
-do_install() {
- autotools_do_install
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/init.d
- sed 's:@bindir@:${bindir}:' < ${WORKDIR}/dbus-1.init >${WORKDIR}/dbus-1.init.sh
- install -m 0755 ${WORKDIR}/dbus-1.init.sh ${D}${sysconfdir}/init.d/dbus-1
- install -d ${D}${sysconfdir}/default/volatiles
- echo "d messagebus messagebus 0755 ${localstatedir}/run/dbus none" \
- > ${D}${sysconfdir}/default/volatiles/99_dbus
- fi
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- for i in dbus.target.wants sockets.target.wants multi-user.target.wants; do \
- install -d ${D}${systemd_system_unitdir}/$i; done
- install -m 0644 ${B}/bus/dbus.service ${B}/bus/dbus.socket ${D}${systemd_system_unitdir}/
- ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/dbus.target.wants/dbus.socket
- ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/sockets.target.wants/dbus.socket
- ln -fs ../dbus.service ${D}${systemd_system_unitdir}/multi-user.target.wants/dbus.service
- fi
-
-
- mkdir -p ${D}${localstatedir}/lib/dbus
-
- chown messagebus:messagebus ${D}${localstatedir}/lib/dbus
-
- chown root:messagebus ${D}${libexecdir}/dbus-daemon-launch-helper
- chmod 4755 ${D}${libexecdir}/dbus-daemon-launch-helper
-
- # Remove Red Hat initscript
- rm -rf ${D}${sysconfdir}/rc.d
-
- # Remove empty testexec directory as we don't build tests
- rm -rf ${D}${libdir}/dbus-1.0/test
-
- # Remove /var/run as it is created on startup
- rm -rf ${D}${localstatedir}/run
-}
-
-do_install:class-native() {
- autotools_do_install
-
- # dbus-launch has no X support so lets not install it in case the host
- # has a more featured and useful version
- rm -f ${D}${bindir}/dbus-launch
-}
-
-do_install:class-nativesdk() {
- autotools_do_install
-
- # dbus-launch has no X support so lets not install it in case the host
- # has a more featured and useful version
- rm -f ${D}${bindir}/dbus-launch
-
- # Remove /var/run to avoid QA error
- rm -rf ${D}${localstatedir}/run
-}
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/dbus/dbus_1.14.10.bb b/meta/recipes-core/dbus/dbus_1.14.10.bb
new file mode 100644
index 0000000000..4fe7af7512
--- /dev/null
+++ b/meta/recipes-core/dbus/dbus_1.14.10.bb
@@ -0,0 +1,186 @@
+SUMMARY = "D-Bus message bus"
+DESCRIPTION = "D-Bus is a message bus system, a simple way for applications to talk to one another. In addition to interprocess communication, D-Bus helps coordinate process lifecycle; it makes it simple and reliable to code a \"single instance\" application or daemon, and to launch applications and daemons on demand when their services are needed."
+HOMEPAGE = "https://dbus.freedesktop.org"
+SECTION = "base"
+
+inherit autotools pkgconfig gettext upstream-version-is-even ptest-gnome
+
+LICENSE = "AFL-2.1 | GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6423dcd74d7be9715b0db247fd889da3 \
+ file://dbus/dbus.h;beginline=6;endline=20;md5=866739837ccd835350af94dccd6457d8 \
+ "
+
+SRC_URI = "https://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.xz \
+ file://run-ptest \
+ file://tmpdir.patch \
+ file://dbus-1.init \
+ "
+
+SRC_URI[sha256sum] = "ba1f21d2bd9d339da2d4aa8780c09df32fea87998b73da24f49ab9df1e36a50f"
+
+EXTRA_OECONF = "--disable-xml-docs \
+ --disable-doxygen-docs \
+ --enable-largefile \
+ --with-system-socket=/run/dbus/system_bus_socket \
+ --enable-modular-tests \
+ --enable-checks \
+ --runstatedir=/run \
+ "
+EXTRA_OECONF:append:class-target = " SYSTEMCTL=${base_bindir}/systemctl"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd x11', d)} \
+ user-session \
+ "
+PACKAGECONFIG:class-native = ""
+PACKAGECONFIG:class-nativesdk = ""
+
+PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir},--disable-systemd --without-systemdsystemunitdir,systemd"
+PACKAGECONFIG[x11] = "--enable-x11-autolaunch,--without-x --disable-x11-autolaunch, virtual/libx11 libsm"
+PACKAGECONFIG[user-session] = "--enable-user-session --with-systemduserunitdir=${systemd_user_unitdir},--disable-user-session"
+PACKAGECONFIG[verbose-mode] = "--enable-verbose-mode,,,"
+PACKAGECONFIG[audit] = "--enable-libaudit,--disable-libaudit,audit"
+PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
+
+DEPENDS = "expat virtual/libintl autoconf-archive-native glib-2.0"
+RDEPENDS:${PN} += "${PN}-common ${PN}-tools"
+RDEPENDS:${PN}:class-native = ""
+
+inherit useradd update-rc.d
+
+INITSCRIPT_NAME = "dbus-1"
+INITSCRIPT_PARAMS = "start 02 5 3 2 . stop 20 0 1 6 ."
+
+python __anonymous() {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+}
+
+PACKAGES =+ "${PN}-lib ${PN}-common ${PN}-tools"
+
+USERADD_PACKAGES = "dbus-common"
+USERADD_PARAM:dbus-common = "--system --home ${localstatedir}/lib/dbus \
+ --no-create-home --shell /bin/false \
+ --user-group messagebus"
+
+CONFFILES:${PN} = "${sysconfdir}/dbus-1/system.conf ${sysconfdir}/dbus-1/session.conf"
+
+DEBIANNAME:${PN} = "dbus-1"
+
+OLDPKGNAME = "dbus-x11"
+OLDPKGNAME:class-nativesdk = ""
+
+# for compatibility
+RPROVIDES:${PN} = "${OLDPKGNAME}"
+RREPLACES:${PN} += "${OLDPKGNAME}"
+
+FILES:${PN} = "${bindir}/dbus-daemon* \
+ ${bindir}/dbus-cleanup-sockets \
+ ${bindir}/dbus-launch \
+ ${bindir}/dbus-run-session \
+ ${libexecdir}/dbus* \
+ ${sysconfdir} \
+ ${localstatedir} \
+ ${systemd_system_unitdir} \
+ ${systemd_user_unitdir} \
+ ${nonarch_libdir}/tmpfiles.d/dbus.conf \
+"
+FILES:${PN}-common = "${sysconfdir}/dbus-1 \
+ ${datadir}/dbus-1/services \
+ ${datadir}/dbus-1/system-services \
+ ${datadir}/dbus-1/session.d \
+ ${datadir}/dbus-1/session.conf \
+ ${datadir}/dbus-1/system.d \
+ ${datadir}/dbus-1/system.conf \
+ ${systemd_system_unitdir}/dbus.socket \
+ ${systemd_system_unitdir}/sockets.target.wants \
+ ${systemd_user_unitdir}/dbus.socket \
+ ${systemd_user_unitdir}/sockets.target.wants \
+ ${nonarch_libdir}/sysusers.d/dbus.conf \
+"
+FILES:${PN}-tools = "${bindir}/dbus-uuidgen \
+ ${bindir}/dbus-send \
+ ${bindir}/dbus-monitor \
+ ${bindir}/dbus-update-activation-environment \
+"
+FILES:${PN}-lib = "${libdir}/lib*.so.*"
+RRECOMMENDS:${PN}-lib = "${PN}"
+FILES:${PN}-dev += "${libdir}/dbus-1.0/include ${bindir}/dbus-test-tool ${datadir}/xml/dbus-1"
+
+RDEPENDS:${PN}-ptest += "bash make dbus"
+
+PACKAGE_WRITE_DEPS += "${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','systemd-systemctl-native','',d)}"
+pkg_postinst:dbus() {
+ # If both systemd and sysvinit are enabled, mask the dbus-1 init script
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then
+ if [ -n "$D" ]; then
+ OPTS="--root=$D"
+ fi
+ systemctl $OPTS mask dbus-1.service
+ fi
+
+ if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then
+ /etc/init.d/populate-volatile.sh update
+ fi
+}
+
+
+do_install() {
+ autotools_do_install
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ install -d ${D}${sysconfdir}/init.d
+ sed 's:@bindir@:${bindir}:' < ${WORKDIR}/dbus-1.init >${WORKDIR}/dbus-1.init.sh
+ install -m 0755 ${WORKDIR}/dbus-1.init.sh ${D}${sysconfdir}/init.d/dbus-1
+ install -d ${D}${sysconfdir}/default/volatiles
+ echo "d messagebus messagebus 0755 /run/dbus none" \
+ > ${D}${sysconfdir}/default/volatiles/99_dbus
+ fi
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ for i in dbus.target.wants sockets.target.wants multi-user.target.wants; do \
+ install -d ${D}${systemd_system_unitdir}/$i; done
+ install -m 0644 ${B}/bus/dbus.service ${B}/bus/dbus.socket ${D}${systemd_system_unitdir}/
+ ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/dbus.target.wants/dbus.socket
+ ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/sockets.target.wants/dbus.socket
+ ln -fs ../dbus.service ${D}${systemd_system_unitdir}/multi-user.target.wants/dbus.service
+ fi
+
+
+ mkdir -p ${D}${localstatedir}/lib/dbus
+
+ chown messagebus:messagebus ${D}${localstatedir}/lib/dbus
+
+ chown root:messagebus ${D}${libexecdir}/dbus-daemon-launch-helper
+ chmod 4755 ${D}${libexecdir}/dbus-daemon-launch-helper
+
+ # Remove Red Hat initscript
+ rm -rf ${D}${sysconfdir}/rc.d
+
+ # Remove empty testexec directory as we don't build tests
+ rm -rf ${D}${libdir}/dbus-1.0/test
+
+ # Remove /var/run as it is created on startup
+ rm -rf ${D}${localstatedir}/run
+}
+
+do_install:class-native() {
+ autotools_do_install
+
+ # dbus-launch has no X support so lets not install it in case the host
+ # has a more featured and useful version
+ rm -f ${D}${bindir}/dbus-launch
+}
+
+do_install:class-nativesdk() {
+ autotools_do_install
+
+ # dbus-launch has no X support so lets not install it in case the host
+ # has a more featured and useful version
+ rm -f ${D}${bindir}/dbus-launch
+
+ # Remove /var/run to avoid QA error
+ rm -rf ${D}${localstatedir}/run
+}
+BBCLASSEXTEND = "native nativesdk"
+
+CVE_PRODUCT += "d-bus_project:d-bus freedesktop:dbus freedesktop:libdbus"
diff --git a/meta/recipes-core/dropbear/dropbear.inc b/meta/recipes-core/dropbear/dropbear.inc
deleted file mode 100644
index 78f9f9adbd..0000000000
--- a/meta/recipes-core/dropbear/dropbear.inc
+++ /dev/null
@@ -1,126 +0,0 @@
-SUMMARY = "A lightweight SSH and SCP implementation"
-HOMEPAGE = "http://matt.ucc.asn.au/dropbear/dropbear.html"
-DESCRIPTION = "Dropbear is a relatively small SSH server and client. It runs on a variety of POSIX-based platforms. Dropbear is open source software, distributed under a MIT-style license. Dropbear is particularly useful for "embedded"-type Linux (or other Unix) systems, such as wireless routers."
-SECTION = "console/network"
-
-# some files are from other projects and have others license terms:
-# public domain, OpenSSH 3.5p1, OpenSSH3.6.1p2, PuTTY
-LICENSE = "MIT & BSD-3-Clause & BSD-2-Clause & PD"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=25cf44512b7bc8966a48b6b1a9b7605f"
-
-DEPENDS = "zlib virtual/crypt"
-RPROVIDES:${PN} = "ssh sshd"
-RCONFLICTS:${PN} = "openssh-sshd openssh"
-
-DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
-
-SRC_URI = "http://matt.ucc.asn.au/dropbear/releases/dropbear-${PV}.tar.bz2 \
- file://0001-urandom-xauth-changes-to-options.h.patch \
- file://init \
- file://dropbearkey.service \
- file://dropbear@.service \
- file://dropbear.socket \
- file://dropbear.default \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'disable-weak-ciphers', 'file://dropbear-disable-weak-ciphers.patch', '', d)} "
-
-PAM_SRC_URI = "file://0005-dropbear-enable-pam.patch \
- file://0006-dropbear-configuration-file.patch \
- file://dropbear"
-
-PAM_PLUGINS = "libpam-runtime \
- pam-plugin-deny \
- pam-plugin-permit \
- pam-plugin-unix \
- "
-RDEPENDS:${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_PLUGINS}', '', d)}"
-
-inherit autotools update-rc.d systemd
-
-CVE_PRODUCT = "dropbear_ssh"
-
-INITSCRIPT_NAME = "dropbear"
-INITSCRIPT_PARAMS = "defaults 10"
-
-SYSTEMD_SERVICE:${PN} = "dropbear.socket"
-
-SBINCOMMANDS = "dropbear dropbearkey dropbearconvert"
-BINCOMMANDS = "dbclient ssh scp"
-EXTRA_OEMAKE = 'MULTI=1 SCPPROGRESS=1 PROGRAMS="${SBINCOMMANDS} ${BINCOMMANDS}"'
-
-PACKAGECONFIG ?= "disable-weak-ciphers"
-PACKAGECONFIG[system-libtom] = "--disable-bundled-libtom,--enable-bundled-libtom,libtommath libtomcrypt"
-PACKAGECONFIG[disable-weak-ciphers] = ""
-
-EXTRA_OECONF += "\
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--enable-pam', '--disable-pam', d)}"
-
-# This option appends to CFLAGS and LDFLAGS from OE
-# This is causing [textrel] QA warning
-EXTRA_OECONF += "--disable-harden"
-
-# musl does not implement wtmp/logwtmp APIs
-EXTRA_OECONF:append:libc-musl = " --disable-wtmp --disable-lastlog"
-
-do_install() {
- install -d ${D}${sysconfdir} \
- ${D}${sysconfdir}/init.d \
- ${D}${sysconfdir}/default \
- ${D}${sysconfdir}/dropbear \
- ${D}${bindir} \
- ${D}${sbindir} \
- ${D}${localstatedir}
-
- install -m 0644 ${WORKDIR}/dropbear.default ${D}${sysconfdir}/default/dropbear
-
- install -m 0755 dropbearmulti ${D}${sbindir}/
-
- for i in ${BINCOMMANDS}
- do
- # ssh and scp symlinks are created by update-alternatives
- if [ $i = ssh ] || [ $i = scp ]; then continue; fi
- ln -s ${sbindir}/dropbearmulti ${D}${bindir}/$i
- done
- for i in ${SBINCOMMANDS}
- do
- ln -s ./dropbearmulti ${D}${sbindir}/$i
- done
- sed -e 's,/etc,${sysconfdir},g' \
- -e 's,/usr/sbin,${sbindir},g' \
- -e 's,/var,${localstatedir},g' \
- -e 's,/usr/bin,${bindir},g' \
- -e 's,/usr,${prefix},g' ${WORKDIR}/init > ${D}${sysconfdir}/init.d/dropbear
- chmod 755 ${D}${sysconfdir}/init.d/dropbear
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
- install -d ${D}${sysconfdir}/pam.d
- install -m 0644 ${WORKDIR}/dropbear ${D}${sysconfdir}/pam.d/
- fi
-
- # deal with systemd unit files
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/dropbearkey.service ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/dropbear@.service ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/dropbear.socket ${D}${systemd_system_unitdir}
- sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
- -e 's,@BINDIR@,${bindir},g' \
- -e 's,@SBINDIR@,${sbindir},g' \
- ${D}${systemd_system_unitdir}/dropbear.socket ${D}${systemd_system_unitdir}/*.service
-}
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "20"
-ALTERNATIVE:${PN} = "${@bb.utils.filter('BINCOMMANDS', 'scp ssh', d)}"
-
-ALTERNATIVE_TARGET = "${sbindir}/dropbearmulti"
-
-pkg_postrm:${PN} () {
- if [ -f "${sysconfdir}/dropbear/dropbear_rsa_host_key" ]; then
- rm ${sysconfdir}/dropbear/dropbear_rsa_host_key
- fi
- if [ -f "${sysconfdir}/dropbear/dropbear_dss_host_key" ]; then
- rm ${sysconfdir}/dropbear/dropbear_dss_host_key
- fi
-}
-
-FILES:${PN} += "${bindir}"
diff --git a/meta/recipes-core/dropbear/dropbear/CVE-2023-36328.patch b/meta/recipes-core/dropbear/dropbear/CVE-2023-36328.patch
new file mode 100644
index 0000000000..ec50d69816
--- /dev/null
+++ b/meta/recipes-core/dropbear/dropbear/CVE-2023-36328.patch
@@ -0,0 +1,144 @@
+From beba892bc0d4e4ded4d667ab1d2a94f4d75109a9 Mon Sep 17 00:00:00 2001
+From: czurnieden <czurnieden@gmx.de>
+Date: Fri, 8 Sep 2023 10:07:32 +0000
+Subject: [PATCH] Fix possible integer overflow
+
+CVE: CVE-2023-36328
+
+Upstream-Status: Backport [https://github.com/libtom/libtommath/commit/beba892bc0d4e4ded4d667ab1d2a94f4d75109a9]
+
+Signed-off-by: Yogita Urade <yogita.urade@windriver.com>
+---
+ libtommath/bn_mp_2expt.c | 4 ++++
+ libtommath/bn_mp_grow.c | 4 ++++
+ libtommath/bn_mp_init_size.c | 5 +++++
+ libtommath/bn_mp_mul_2d.c | 4 ++++
+ libtommath/bn_s_mp_mul_digs.c | 4 ++++
+ libtommath/bn_s_mp_mul_digs_fast.c | 4 ++++
+ libtommath/bn_s_mp_mul_high_digs.c | 4 ++++
+ libtommath/bn_s_mp_mul_high_digs_fast.c | 4 ++++
+ 8 files changed, 33 insertions(+)
+
+diff --git a/libtommath/bn_mp_2expt.c b/libtommath/bn_mp_2expt.c
+index 0ae3df1..ca6fbc3 100644
+--- a/libtommath/bn_mp_2expt.c
++++ b/libtommath/bn_mp_2expt.c
+@@ -12,6 +12,10 @@ mp_err mp_2expt(mp_int *a, int b)
+ {
+ mp_err err;
+
++ if (b < 0) {
++ return MP_VAL;
++ }
++
+ /* zero a as per default */
+ mp_zero(a);
+
+diff --git a/libtommath/bn_mp_grow.c b/libtommath/bn_mp_grow.c
+index 9e904c5..2b16826 100644
+--- a/libtommath/bn_mp_grow.c
++++ b/libtommath/bn_mp_grow.c
+@@ -9,6 +9,10 @@ mp_err mp_grow(mp_int *a, int size)
+ int i;
+ mp_digit *tmp;
+
++ if (size < 0) {
++ return MP_VAL;
++ }
++
+ /* if the alloc size is smaller alloc more ram */
+ if (a->alloc < size) {
+ /* reallocate the array a->dp
+diff --git a/libtommath/bn_mp_init_size.c b/libtommath/bn_mp_init_size.c
+index d622687..5fefa96 100644
+--- a/libtommath/bn_mp_init_size.c
++++ b/libtommath/bn_mp_init_size.c
+@@ -6,6 +6,11 @@
+ /* init an mp_init for a given size */
+ mp_err mp_init_size(mp_int *a, int size)
+ {
++
++ if (size < 0) {
++ return MP_VAL;
++ }
++
+ size = MP_MAX(MP_MIN_PREC, size);
+
+ /* alloc mem */
+diff --git a/libtommath/bn_mp_mul_2d.c b/libtommath/bn_mp_mul_2d.c
+index 87354de..2744163 100644
+--- a/libtommath/bn_mp_mul_2d.c
++++ b/libtommath/bn_mp_mul_2d.c
+@@ -9,6 +9,10 @@ mp_err mp_mul_2d(const mp_int *a, int b, mp_int *c)
+ mp_digit d;
+ mp_err err;
+
++ if (b < 0) {
++ return MP_VAL;
++ }
++
+ /* copy */
+ if (a != c) {
+ if ((err = mp_copy(a, c)) != MP_OKAY) {
+diff --git a/libtommath/bn_s_mp_mul_digs.c b/libtommath/bn_s_mp_mul_digs.c
+index 64509d4..2d2f5b0 100644
+--- a/libtommath/bn_s_mp_mul_digs.c
++++ b/libtommath/bn_s_mp_mul_digs.c
+@@ -16,6 +16,10 @@ mp_err s_mp_mul_digs(const mp_int *a, const mp_int *b, mp_int *c, int digs)
+ mp_word r;
+ mp_digit tmpx, *tmpt, *tmpy;
+
++ if (digs < 0) {
++ return MP_VAL;
++ }
++
+ /* can we use the fast multiplier? */
+ if ((digs < MP_WARRAY) &&
+ (MP_MIN(a->used, b->used) < MP_MAXFAST)) {
+diff --git a/libtommath/bn_s_mp_mul_digs_fast.c b/libtommath/bn_s_mp_mul_digs_fast.c
+index b2a287b..d6dd3cc 100644
+--- a/libtommath/bn_s_mp_mul_digs_fast.c
++++ b/libtommath/bn_s_mp_mul_digs_fast.c
+@@ -26,6 +26,10 @@ mp_err s_mp_mul_digs_fast(const mp_int *a, const mp_int *b, mp_int *c, int digs)
+ mp_digit W[MP_WARRAY];
+ mp_word _W;
+
++ if (digs < 0) {
++ return MP_VAL;
++ }
++
+ /* grow the destination as required */
+ if (c->alloc < digs) {
+ if ((err = mp_grow(c, digs)) != MP_OKAY) {
+diff --git a/libtommath/bn_s_mp_mul_high_digs.c b/libtommath/bn_s_mp_mul_high_digs.c
+index 2bb2a50..c9dd355 100644
+--- a/libtommath/bn_s_mp_mul_high_digs.c
++++ b/libtommath/bn_s_mp_mul_high_digs.c
+@@ -15,6 +15,10 @@ mp_err s_mp_mul_high_digs(const mp_int *a, const mp_int *b, mp_int *c, int digs)
+ mp_word r;
+ mp_digit tmpx, *tmpt, *tmpy;
+
++ if (digs < 0) {
++ return MP_VAL;
++ }
++
+ /* can we use the fast multiplier? */
+ if (MP_HAS(S_MP_MUL_HIGH_DIGS_FAST)
+ && ((a->used + b->used + 1) < MP_WARRAY)
+diff --git a/libtommath/bn_s_mp_mul_high_digs_fast.c b/libtommath/bn_s_mp_mul_high_digs_fast.c
+index a2c4fb6..afe3e4b 100644
+--- a/libtommath/bn_s_mp_mul_high_digs_fast.c
++++ b/libtommath/bn_s_mp_mul_high_digs_fast.c
+@@ -19,6 +19,10 @@ mp_err s_mp_mul_high_digs_fast(const mp_int *a, const mp_int *b, mp_int *c, int
+ mp_digit W[MP_WARRAY];
+ mp_word _W;
+
++ if (digs < 0) {
++ return MP_VAL;
++ }
++
+ /* grow the destination as required */
+ pa = a->used + b->used;
+ if (c->alloc < pa) {
+--
+2.35.5
diff --git a/meta/recipes-core/dropbear/dropbear_2022.82.bb b/meta/recipes-core/dropbear/dropbear_2022.82.bb
deleted file mode 100644
index 154a407a19..0000000000
--- a/meta/recipes-core/dropbear/dropbear_2022.82.bb
+++ /dev/null
@@ -1,3 +0,0 @@
-require dropbear.inc
-
-SRC_URI[sha256sum] = "3a038d2bbc02bf28bbdd20c012091f741a3ec5cbe460691811d714876aad75d1"
diff --git a/meta/recipes-core/dropbear/dropbear_2022.83.bb b/meta/recipes-core/dropbear/dropbear_2022.83.bb
new file mode 100644
index 0000000000..528eff1a10
--- /dev/null
+++ b/meta/recipes-core/dropbear/dropbear_2022.83.bb
@@ -0,0 +1,132 @@
+SUMMARY = "A lightweight SSH and SCP implementation"
+HOMEPAGE = "http://matt.ucc.asn.au/dropbear/dropbear.html"
+DESCRIPTION = "Dropbear is a relatively small SSH server and client. It runs on a variety of POSIX-based platforms. Dropbear is open source software, distributed under a MIT-style license. Dropbear is particularly useful for "embedded"-type Linux (or other Unix) systems, such as wireless routers."
+SECTION = "console/network"
+
+# some files are from other projects and have others license terms:
+# public domain, OpenSSH 3.5p1, OpenSSH3.6.1p2, PuTTY
+LICENSE = "MIT & BSD-3-Clause & BSD-2-Clause & PD"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=25cf44512b7bc8966a48b6b1a9b7605f"
+
+DEPENDS = "zlib virtual/crypt"
+RPROVIDES:${PN} = "ssh sshd"
+RCONFLICTS:${PN} = "openssh-sshd openssh"
+
+SRC_URI = "http://matt.ucc.asn.au/dropbear/releases/dropbear-${PV}.tar.bz2 \
+ file://0001-urandom-xauth-changes-to-options.h.patch \
+ file://init \
+ file://dropbearkey.service \
+ file://dropbear@.service \
+ file://dropbear.socket \
+ file://dropbear.default \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'disable-weak-ciphers', 'file://dropbear-disable-weak-ciphers.patch', '', d)} \
+ file://CVE-2023-36328.patch \
+ "
+
+SRC_URI[sha256sum] = "bc5a121ffbc94b5171ad5ebe01be42746d50aa797c9549a4639894a16749443b"
+
+PAM_SRC_URI = "file://0005-dropbear-enable-pam.patch \
+ file://0006-dropbear-configuration-file.patch \
+ file://dropbear"
+
+PAM_PLUGINS = "libpam-runtime \
+ pam-plugin-deny \
+ pam-plugin-permit \
+ pam-plugin-unix \
+ "
+inherit autotools update-rc.d systemd
+
+CVE_PRODUCT = "dropbear_ssh"
+
+INITSCRIPT_NAME = "dropbear"
+INITSCRIPT_PARAMS = "defaults 10"
+
+SYSTEMD_SERVICE:${PN} = "dropbear.socket"
+
+SBINCOMMANDS = "dropbear dropbearkey dropbearconvert"
+BINCOMMANDS = "dbclient ssh scp"
+EXTRA_OEMAKE = 'MULTI=1 SCPPROGRESS=1 PROGRAMS="${SBINCOMMANDS} ${BINCOMMANDS}"'
+
+PACKAGECONFIG ?= "disable-weak-ciphers ${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
+PACKAGECONFIG[pam] = "--enable-pam,--disable-pam,libpam,${PAM_PLUGINS}"
+PACKAGECONFIG[system-libtom] = "--disable-bundled-libtom,--enable-bundled-libtom,libtommath libtomcrypt"
+PACKAGECONFIG[disable-weak-ciphers] = ""
+PACKAGECONFIG[enable-x11-forwarding] = ""
+
+# This option appends to CFLAGS and LDFLAGS from OE
+# This is causing [textrel] QA warning
+EXTRA_OECONF += "--disable-harden"
+
+# musl does not implement wtmp/logwtmp APIs
+EXTRA_OECONF:append:libc-musl = " --disable-wtmp --disable-lastlog"
+
+do_configure:append() {
+ echo "/* Dropbear features */" > ${B}/localoptions.h
+ if ${@bb.utils.contains('PACKAGECONFIG', 'enable-x11-forwarding', 'true', 'false', d)}; then
+ echo "#define DROPBEAR_X11FWD 1" >> ${B}/localoptions.h
+ fi
+}
+
+do_install() {
+ install -d ${D}${sysconfdir} \
+ ${D}${sysconfdir}/init.d \
+ ${D}${sysconfdir}/default \
+ ${D}${sysconfdir}/dropbear \
+ ${D}${bindir} \
+ ${D}${sbindir} \
+ ${D}${localstatedir}
+
+ install -m 0644 ${WORKDIR}/dropbear.default ${D}${sysconfdir}/default/dropbear
+
+ install -m 0755 dropbearmulti ${D}${sbindir}/
+
+ for i in ${BINCOMMANDS}
+ do
+ # ssh and scp symlinks are created by update-alternatives
+ if [ $i = ssh ] || [ $i = scp ]; then continue; fi
+ ln -s ${sbindir}/dropbearmulti ${D}${bindir}/$i
+ done
+ for i in ${SBINCOMMANDS}
+ do
+ ln -s ./dropbearmulti ${D}${sbindir}/$i
+ done
+ sed -e 's,/etc,${sysconfdir},g' \
+ -e 's,/usr/sbin,${sbindir},g' \
+ -e 's,/var,${localstatedir},g' \
+ -e 's,/usr/bin,${bindir},g' \
+ -e 's,/usr,${prefix},g' ${WORKDIR}/init > ${D}${sysconfdir}/init.d/dropbear
+ chmod 755 ${D}${sysconfdir}/init.d/dropbear
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
+ install -d ${D}${sysconfdir}/pam.d
+ install -m 0644 ${WORKDIR}/dropbear ${D}${sysconfdir}/pam.d/
+ fi
+
+ # deal with systemd unit files
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/dropbearkey.service ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/dropbear@.service ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/dropbear.socket ${D}${systemd_system_unitdir}
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@BINDIR@,${bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${systemd_system_unitdir}/dropbear.socket ${D}${systemd_system_unitdir}/*.service
+}
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "20"
+ALTERNATIVE:${PN} = "${@bb.utils.filter('BINCOMMANDS', 'scp ssh', d)}"
+
+ALTERNATIVE_TARGET = "${sbindir}/dropbearmulti"
+
+pkg_postrm:${PN} () {
+ if [ -f "${sysconfdir}/dropbear/dropbear_rsa_host_key" ]; then
+ rm ${sysconfdir}/dropbear/dropbear_rsa_host_key
+ fi
+ if [ -f "${sysconfdir}/dropbear/dropbear_dss_host_key" ]; then
+ rm ${sysconfdir}/dropbear/dropbear_dss_host_key
+ fi
+}
+
+CONFFILES:${PN} = "${sysconfdir}/default/dropbear"
diff --git a/meta/recipes-core/ell/ell_0.50.bb b/meta/recipes-core/ell/ell_0.50.bb
deleted file mode 100644
index 243ac01530..0000000000
--- a/meta/recipes-core/ell/ell_0.50.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Embedded Linux Library"
-HOMEPAGE = "https://01.org/ell"
-DESCRIPTION = "The Embedded Linux Library (ELL) provides core, \
-low-level functionality for system daemons. It typically has no \
-dependencies other than the Linux kernel, C standard library, and \
-libdl (for dynamic linking). While ELL is designed to be efficient \
-and compact enough for use on embedded Linux platforms, it is not \
-limited to resource-constrained systems."
-SECTION = "libs"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fb504b67c50331fc78734fed90fb0e09"
-
-DEPENDS = "dbus"
-
-inherit autotools pkgconfig
-
-SRC_URI = "https://mirrors.edge.kernel.org/pub/linux/libs/${BPN}/${BPN}-${PV}.tar.xz \
- "
-SRC_URI[sha256sum] = "0fe51d51c6eddc2a2784092f1dfdd1143a5ef27f15c274ecfbadd680d3a72fd9"
-
-do_configure:prepend () {
- mkdir -p ${S}/build-aux
-}
diff --git a/meta/recipes-core/ell/ell_0.64.bb b/meta/recipes-core/ell/ell_0.64.bb
new file mode 100644
index 0000000000..c4f16905fd
--- /dev/null
+++ b/meta/recipes-core/ell/ell_0.64.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Embedded Linux Library"
+HOMEPAGE = "https://01.org/ell"
+DESCRIPTION = "The Embedded Linux Library (ELL) provides core, \
+low-level functionality for system daemons. It typically has no \
+dependencies other than the Linux kernel, C standard library, and \
+libdl (for dynamic linking). While ELL is designed to be efficient \
+and compact enough for use on embedded Linux platforms, it is not \
+limited to resource-constrained systems."
+SECTION = "libs"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fb504b67c50331fc78734fed90fb0e09"
+
+DEPENDS = "dbus"
+
+inherit autotools pkgconfig
+
+SRC_URI = "https://mirrors.edge.kernel.org/pub/linux/libs/${BPN}/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "760f3901078409f66cadf1bb24c8bdc60f13d53f6dd66b88631221d2494f8405"
+
+do_configure:prepend () {
+ mkdir -p ${S}/build-aux
+}
diff --git a/meta/recipes-core/expat/expat/run-ptest b/meta/recipes-core/expat/expat/run-ptest
index dbf602ca80..ff7986db3c 100644
--- a/meta/recipes-core/expat/expat/run-ptest
+++ b/meta/recipes-core/expat/expat/run-ptest
@@ -5,5 +5,5 @@ TIME=$(which time)
echo "runtests"
${TIME} -f 'Execution time: %e s' bash -c "./runtests -v"
echo "runtestspp"
-${TIME} -f 'Execution time: %e s' bash -c "./runtestspp -v"
+${TIME} -f 'Execution time: %e s' bash -c "./runtests_cxx -v"
echo
diff --git a/meta/recipes-core/expat/expat_2.4.8.bb b/meta/recipes-core/expat/expat_2.4.8.bb
deleted file mode 100644
index 980c488640..0000000000
--- a/meta/recipes-core/expat/expat_2.4.8.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "A stream-oriented XML parser library"
-DESCRIPTION = "Expat is an XML parser library written in C. It is a stream-oriented parser in which an application registers handlers for things the parser might find in the XML document (like start tags)"
-HOMEPAGE = "https://github.com/libexpat/libexpat"
-SECTION = "libs"
-LICENSE = "MIT"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=9e2ce3b3c4c0f2670883a23bbd7c37a9"
-
-VERSION_TAG = "${@d.getVar('PV').replace('.', '_')}"
-
-SRC_URI = "https://github.com/libexpat/libexpat/releases/download/R_${VERSION_TAG}/expat-${PV}.tar.bz2 \
- file://run-ptest \
- "
-
-UPSTREAM_CHECK_URI = "https://github.com/libexpat/libexpat/releases/"
-
-SRC_URI[sha256sum] = "a247a7f6bbb21cf2ca81ea4cbb916bfb9717ca523631675f99b3d4a5678dcd16"
-
-EXTRA_OECMAKE:class-native += "-DEXPAT_BUILD_DOCS=OFF"
-
-RDEPENDS:${PN}-ptest += "bash"
-
-inherit cmake lib_package ptest
-
-do_install_ptest:class-target() {
- install -m 755 ${B}/tests/* ${D}${PTEST_PATH}
-}
-
-BBCLASSEXTEND += "native nativesdk"
-
-CVE_PRODUCT = "expat libexpat"
diff --git a/meta/recipes-core/expat/expat_2.6.2.bb b/meta/recipes-core/expat/expat_2.6.2.bb
new file mode 100644
index 0000000000..6c9db91bef
--- /dev/null
+++ b/meta/recipes-core/expat/expat_2.6.2.bb
@@ -0,0 +1,33 @@
+SUMMARY = "A stream-oriented XML parser library"
+DESCRIPTION = "Expat is an XML parser library written in C. It is a stream-oriented parser in which an application registers handlers for things the parser might find in the XML document (like start tags)"
+HOMEPAGE = "https://github.com/libexpat/libexpat"
+SECTION = "libs"
+LICENSE = "MIT"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=7b3b078238d0901d3b339289117cb7fb"
+
+VERSION_TAG = "${@d.getVar('PV').replace('.', '_')}"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/R_${VERSION_TAG}/expat-${PV}.tar.bz2 \
+ file://run-ptest \
+ "
+
+GITHUB_BASE_URI = "https://github.com/libexpat/libexpat/releases/"
+UPSTREAM_CHECK_REGEX = "releases/tag/R_(?P<pver>.+)"
+
+SRC_URI[sha256sum] = "9c7c1b5dcbc3c237c500a8fb1493e14d9582146dd9b42aa8d3ffb856a3b927e0"
+
+EXTRA_OECMAKE:class-native += "-DEXPAT_BUILD_DOCS=OFF"
+
+RDEPENDS:${PN}-ptest += "bash"
+
+inherit cmake lib_package ptest github-releases
+
+do_install_ptest:class-target() {
+ install -m 755 ${B}/tests/runtests* ${D}${PTEST_PATH}
+ install -m 755 ${B}/tests/benchmark/benchmark ${D}${PTEST_PATH}
+}
+
+BBCLASSEXTEND += "native nativesdk"
+
+CVE_PRODUCT = "expat libexpat"
diff --git a/meta/recipes-core/fts/fts_1.2.7.bb b/meta/recipes-core/fts/fts_1.2.7.bb
index d3b0f31eda..10103830af 100644
--- a/meta/recipes-core/fts/fts_1.2.7.bb
+++ b/meta/recipes-core/fts/fts_1.2.7.bb
@@ -1,7 +1,7 @@
# Copyright (C) 2015 Khem Raj <raj.khem@gmail.com>
# Released under the MIT license (see COPYING.MIT for the terms)
-SUMMARY = "Implementation of ftsfor musl libc packages"
+SUMMARY = "Implementation of fts for musl libc packages"
HOMEPAGE = "https://github.com/pullmoll/musl-fts"
DESCRIPTION = "The musl-fts package implements the fts(3) functions fts_open, fts_read, fts_children, fts_set and fts_close, which are missing in musl libc."
LICENSE = "BSD-3-Clause"
diff --git a/meta/recipes-core/gettext/gettext-0.21/0001-libtextstyle-fix-builds-with-automake-1.16.4-and-new.patch b/meta/recipes-core/gettext/gettext-0.21/0001-libtextstyle-fix-builds-with-automake-1.16.4-and-new.patch
deleted file mode 100644
index 727d1db552..0000000000
--- a/meta/recipes-core/gettext/gettext-0.21/0001-libtextstyle-fix-builds-with-automake-1.16.4-and-new.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From 2f127ebe425c97b0641fe1bc73247e91e18c2be0 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Thu, 11 Nov 2021 22:06:16 +0100
-Subject: [PATCH] libtextstyle: fix builds with automake 1.16.4 and newer
-
-Upstream-Status: Submitted [http://savannah.gnu.org/bugs/?59929]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- libtextstyle/configure.ac | 5 ++---
- 1 file changed, 2 insertions(+), 3 deletions(-)
-
-diff --git a/libtextstyle/configure.ac b/libtextstyle/configure.ac
-index 63b4c2a..cab9025 100644
---- a/libtextstyle/configure.ac
-+++ b/libtextstyle/configure.ac
-@@ -17,11 +17,10 @@ dnl along with this program. If not, see <https://www.gnu.org/licenses/>.
- dnl Process this file with autoconf to produce a configure script.
-
- AC_PREREQ([2.63])
--AC_INIT
-+AC_INIT([libtextstyle],
-+ [0.20.1])
- AC_CONFIG_SRCDIR([version.sh])
- AC_CONFIG_AUX_DIR([build-aux])
--. $srcdir/version.sh
--gl_INIT_PACKAGE([libtextstyle], [$VERSION_NUMBER])
- AM_INIT_AUTOMAKE([1.13 silent-rules])
- AM_CONFIG_HEADER([config.h])
-
diff --git a/meta/recipes-core/gettext/gettext-0.21/0001-msgmerge-29-Add-executable-file-mode-bits.patch b/meta/recipes-core/gettext/gettext-0.21/0001-msgmerge-29-Add-executable-file-mode-bits.patch
deleted file mode 100644
index e42a6c0a4d..0000000000
--- a/meta/recipes-core/gettext/gettext-0.21/0001-msgmerge-29-Add-executable-file-mode-bits.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-From 71988f83b69c4ed98d1b9418da80edf11a931894 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 23 Sep 2020 22:08:34 -0700
-Subject: [PATCH] msgmerge-29: Add executable file mode bits
-
-This script is installed with default mode bits and executed directly
-e.g ./msgmerge-29 when doing make check which results in
-
-/bin/bash: line 9: ./msgmerge-29: Permission denied
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gettext-tools/tests/msgmerge-29 | 0
- 1 file changed, 0 insertions(+), 0 deletions(-)
- mode change 100644 => 100755 gettext-tools/tests/msgmerge-29
-
-diff --git a/gettext-tools/tests/msgmerge-29 b/gettext-tools/tests/msgmerge-29
-old mode 100644
-new mode 100755
---
-2.28.0
-
diff --git a/meta/recipes-core/gettext/gettext-0.21/mingw.patch b/meta/recipes-core/gettext/gettext-0.21/mingw.patch
deleted file mode 100644
index b062c784f6..0000000000
--- a/meta/recipes-core/gettext/gettext-0.21/mingw.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 7cf68dffb2adb76375bfb0781e277510523a1f3e Mon Sep 17 00:00:00 2001
-From: Michele Locati <michele@locati.it>
-Date: Thu, 30 Jul 2020 18:58:02 +0200
-Subject: [PATCH] build: Fix build failure on Cygwin and mingw.
-
-* gettext-tools/woe32dll/gettextsrc-exports.c: Export formatstring_ruby.
-
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- gettext-tools/woe32dll/gettextsrc-exports.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/gettext-tools/woe32dll/gettextsrc-exports.c b/gettext-tools/woe32dll/gettextsrc-exports.c
-index 4477ae8..6d76089 100644
---- a/gettext-tools/woe32dll/gettextsrc-exports.c
-+++ b/gettext-tools/woe32dll/gettextsrc-exports.c
-@@ -50,6 +50,7 @@ VARIABLE(formatstring_python)
- VARIABLE(formatstring_python_brace)
- VARIABLE(formatstring_qt)
- VARIABLE(formatstring_qt_plural)
-+VARIABLE(formatstring_ruby)
- VARIABLE(formatstring_scheme)
- VARIABLE(formatstring_sh)
- VARIABLE(formatstring_smalltalk)
---
-1.9.1
-
diff --git a/meta/recipes-core/gettext/gettext-0.21/use-pkgconfig.patch b/meta/recipes-core/gettext/gettext-0.21/use-pkgconfig.patch
deleted file mode 100644
index fc77feb270..0000000000
--- a/meta/recipes-core/gettext/gettext-0.21/use-pkgconfig.patch
+++ /dev/null
@@ -1,699 +0,0 @@
-From 15647f679834be633fb4a9aeff4671b9cb95ccb8 Mon Sep 17 00:00:00 2001
-From: Ross Burton <ross.burton@intel.com>
-Date: Tue, 23 Jan 2018 00:54:13 +0000
-Subject: [PATCH] gettext: beat library detection into shape
-
-For reasons which I just can't fathom gnulib doesn't use the expected tools to
-find libraries but badly reinvents the wheel. This will trivially lead to host
-contamination (explicit searches of /usr/lib) or incorrect RPATHs (bad
-canonicalisation resulting in relative paths).
-
-Simply delete all the crazy, and replace with a single call to pkg-config.
-
-Upstream-Status: Inappropriate [upstream still refuse to consider pkg-config]
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
----
- gettext-tools/gnulib-m4/libxml.m4 | 99 +----------------
- .../gnulib-local/lib/term-styled-ostream.oo.c | 12 +--
- libtextstyle/gnulib-local/m4/libcroco.m4 | 99 +++--------------
- libtextstyle/gnulib-local/m4/libglib.m4 | 100 +++---------------
- libtextstyle/gnulib-m4/libcroco.m4 | 99 +++--------------
- libtextstyle/gnulib-m4/libglib.m4 | 100 +++---------------
- libtextstyle/lib/term-styled-ostream.c | 12 +--
- libtextstyle/lib/term-styled-ostream.oo.c | 12 +--
- 8 files changed, 83 insertions(+), 450 deletions(-)
-
-diff --git a/gettext-tools/gnulib-m4/libxml.m4 b/gettext-tools/gnulib-m4/libxml.m4
-index 2f80c37..30ce58e 100644
---- a/gettext-tools/gnulib-m4/libxml.m4
-+++ b/gettext-tools/gnulib-m4/libxml.m4
-@@ -13,6 +13,7 @@ dnl gl_LIBXML(FORCE-INCLUDED)
- dnl forces the use of the included or an external libxml.
- AC_DEFUN([gl_LIBXML],
- [
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
- AC_REQUIRE([AM_ICONV_LINK])
-
- ifelse([$1], , [
-@@ -30,100 +31,10 @@ AC_DEFUN([gl_LIBXML],
- INCXML=
- ifelse([$1], [yes], , [
- if test "$gl_cv_libxml_use_included" != yes; then
-- dnl Figure out whether we can use a preinstalled libxml2, or have to use
-- dnl the included one.
-- AC_CACHE_VAL([gl_cv_libxml], [
-- gl_cv_libxml=no
-- gl_cv_LIBXML=
-- gl_cv_LTLIBXML=
-- gl_cv_INCXML=
-- gl_save_LIBS="$LIBS"
-- LIBS="$LIBS $LIBICONV"
-- dnl Search for libxml2 and define LIBXML2, LTLIBXML2 and INCXML2
-- dnl accordingly.
-- dnl Don't use xml2-config nor pkg-config, since it doesn't work when
-- dnl cross-compiling or when the C compiler in use is different from the
-- dnl one that built the library.
-- dnl Use a test program that tries to invoke xmlFree. On Cygwin 1.7.x,
-- dnl libxml2 is built in such a way that uses of xmlFree work fine with
-- dnl -Wl,--enable-auto-import but lead to a link error with
-- dnl -Wl,--disable-auto-import.
-- AC_LIB_LINKFLAGS_BODY([xml2])
-- LIBS="$gl_save_LIBS $LIBXML2 $LIBICONV"
-- AC_TRY_LINK([#include <libxml/xmlversion.h>
-- #include <libxml/xmlmemory.h>
-- #include <libxml/xpath.h>
-- ],
-- [xmlCheckVersion (0);
-- xmlFree ((void *) 0);
-- xmlXPathSetContextNode ((void *)0, (void *)0);
-- ],
-- [gl_cv_libxml=yes
-- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
-- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
-- ])
-- if test "$gl_cv_libxml" != yes; then
-- gl_save_CPPFLAGS="$CPPFLAGS"
-- CPPFLAGS="$CPPFLAGS $INCXML2"
-- AC_TRY_LINK([#include <libxml/xmlversion.h>
-- #include <libxml/xmlmemory.h>
-- #include <libxml/xpath.h>
-- ],
-- [xmlCheckVersion (0);
-- xmlFree ((void *) 0);
-- xmlXPathSetContextNode ((void *)0, (void *)0);
-- ],
-- [gl_cv_libxml=yes
-- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
-- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
-- gl_cv_INCXML="$INCXML2"
-- ])
-- if test "$gl_cv_libxml" != yes; then
-- dnl Often the include files are installed in /usr/include/libxml2.
-- dnl In libxml2-2.5, <libxml/xmlversion.h> is self-contained.
-- dnl In libxml2-2.6, it includes <libxml/xmlexports.h> which is
-- dnl self-contained.
-- libxml2_include_dir=
-- AC_TRY_CPP([#include <libxml2/libxml/xmlexports.h>],
-- [gl_ABSOLUTE_HEADER([libxml2/libxml/xmlexports.h])
-- libxml2_include_dir=`echo "$gl_cv_absolute_libxml2_libxml_xmlexports_h" | sed -e 's,.libxml.xmlexports\.h$,,'`
-- ])
-- if test -z "$libxml2_include_dir"; then
-- AC_TRY_CPP([#include <libxml2/libxml/xmlversion.h>],
-- [gl_ABSOLUTE_HEADER([libxml2/libxml/xmlversion.h])
-- libxml2_include_dir=`echo "$gl_cv_absolute_libxml2_libxml_xmlversion_h" | sed -e 's,.libxml.xmlversion\.h$,,'`
-- ])
-- fi
-- if test -n "$libxml2_include_dir" && test -d "$libxml2_include_dir"; then
-- CPPFLAGS="$gl_save_CPPFLAGS -I$libxml2_include_dir"
-- AC_TRY_LINK([#include <libxml/xmlversion.h>
-- #include <libxml/xmlmemory.h>
-- #include <libxml/xpath.h>
-- ],
-- [xmlCheckVersion (0);
-- xmlFree ((void *) 0);
-- xmlXPathSetContextNode ((void *)0, (void *)0);
-- ],
-- [gl_cv_libxml=yes
-- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
-- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
-- gl_cv_INCXML="-I$libxml2_include_dir"
-- ])
-- fi
-- fi
-- CPPFLAGS="$gl_save_CPPFLAGS"
-- fi
-- LIBS="$gl_save_LIBS"
-- ])
-- AC_MSG_CHECKING([for libxml])
-- AC_MSG_RESULT([$gl_cv_libxml])
-- if test $gl_cv_libxml = yes; then
-- LIBXML="$gl_cv_LIBXML"
-- LTLIBXML="$gl_cv_LTLIBXML"
-- INCXML="$gl_cv_INCXML"
-- else
-- gl_cv_libxml_use_included=yes
-- fi
-+ PKG_CHECK_MODULES([XML], [libxml-2.0])
-+ LIBXML=$XML_LIBS
-+ LTLIBXML=$XML_LIBS
-+ INCXML=$XML_CFLAGS
- fi
- ])
- AC_SUBST([LIBXML])
-diff --git a/libtextstyle/gnulib-local/lib/term-styled-ostream.oo.c b/libtextstyle/gnulib-local/lib/term-styled-ostream.oo.c
-index 2ff978f..5ffb17a 100644
---- a/libtextstyle/gnulib-local/lib/term-styled-ostream.oo.c
-+++ b/libtextstyle/gnulib-local/lib/term-styled-ostream.oo.c
-@@ -22,15 +22,15 @@
-
- #include <stdlib.h>
-
--#include <cr-om-parser.h>
--#include <cr-sel-eng.h>
--#include <cr-style.h>
--#include <cr-rgb.h>
-+#include <libcroco/cr-om-parser.h>
-+#include <libcroco/cr-sel-eng.h>
-+#include <libcroco/cr-style.h>
-+#include <libcroco/cr-rgb.h>
- /* <cr-fonts.h> has a broken double-inclusion guard in libcroco-0.6.1. */
- #ifndef __CR_FONTS_H__
--# include <cr-fonts.h>
-+# include <libcroco/cr-fonts.h>
- #endif
--#include <cr-string.h>
-+#include <libcroco/cr-string.h>
-
- #include "term-ostream.h"
- #include "mem-hash-map.h"
-diff --git a/libtextstyle/gnulib-local/m4/libcroco.m4 b/libtextstyle/gnulib-local/m4/libcroco.m4
-index bc53cc6..10b2455 100644
---- a/libtextstyle/gnulib-local/m4/libcroco.m4
-+++ b/libtextstyle/gnulib-local/m4/libcroco.m4
-@@ -1,99 +1,34 @@
--# libcroco.m4 serial 3
--dnl Copyright (C) 2006-2007, 2019 Free Software Foundation, Inc.
-+# libcroco.m4 serial 2 (gettext-0.17)
-+dnl Copyright (C) 2006, 2015-2016 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
- dnl From Bruno Haible.
-
--dnl gl_LIBCROCO
--dnl gives the user the option to decide whether to use the included or
--dnl an external libcroco.
--dnl gl_LIBCROCO(FORCE-INCLUDED)
--dnl forces the use of the included or an external libcroco.
- AC_DEFUN([gl_LIBCROCO],
- [
-- ifelse([$1], [yes], , [
-- dnl libcroco depends on libglib.
-- AC_REQUIRE([gl_LIBGLIB])
-- ])
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
-+ dnl libcroco depends on libglib.
-+ AC_REQUIRE([gl_LIBGLIB])
-
-- ifelse([$1], , [
-- AC_MSG_CHECKING([whether included libcroco is requested])
-- AC_ARG_WITH([included-libcroco],
-- [ --with-included-libcroco use the libcroco included here],
-- [gl_cv_libcroco_force_included=$withval],
-- [gl_cv_libcroco_force_included=no])
-- AC_MSG_RESULT([$gl_cv_libcroco_force_included])
-- ], [gl_cv_libcroco_force_included=$1])
-+ AC_MSG_CHECKING([whether included libcroco is requested])
-+ AC_ARG_WITH([included-libcroco],
-+ [ --with-included-libcroco use the libcroco included here],
-+ [gl_cv_libcroco_force_included=$withval],
-+ [gl_cv_libcroco_force_included=no])
-+ AC_MSG_RESULT([$gl_cv_libcroco_force_included])
-
- gl_cv_libcroco_use_included="$gl_cv_libcroco_force_included"
- LIBCROCO=
- LTLIBCROCO=
- INCCROCO=
-- ifelse([$1], [yes], , [
-- if test "$gl_cv_libcroco_use_included" != yes; then
-- dnl Figure out whether we can use a preinstalled libcroco-0.6, or have to
-- dnl use the included one.
-- AC_CACHE_VAL([gl_cv_libcroco], [
-- gl_cv_libcroco=no
-- gl_cv_LIBCROCO=
-- gl_cv_LTLIBCROCO=
-- gl_cv_INCCROCO=
-- gl_save_LIBS="$LIBS"
-- dnl Search for libcroco and define LIBCROCO_0_6, LTLIBCROCO_0_6 and
-- dnl INCCROCO_0_6 accordingly.
-- dnl Don't use croco-0.6-config nor pkg-config, since it doesn't work when
-- dnl cross-compiling or when the C compiler in use is different from the
-- dnl one that built the library.
-- AC_LIB_LINKFLAGS_BODY([croco-0.6], [glib-2.0])
-- LIBS="$gl_save_LIBS $LIBCROCO_0_6"
-- AC_TRY_LINK([#include <libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- ])
-- if test "$gl_cv_libcroco" != yes; then
-- gl_save_CPPFLAGS="$CPPFLAGS"
-- CPPFLAGS="$CPPFLAGS $INCCROCO_0_6"
-- AC_TRY_LINK([#include <libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- gl_cv_INCCROCO="$INCCROCO_0_6"
-- ])
-- if test "$gl_cv_libcroco" != yes; then
-- dnl Often the include files are installed in
-- dnl /usr/include/libcroco-0.6/libcroco.
-- AC_TRY_LINK([#include <libcroco-0.6/libcroco/libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_ABSOLUTE_HEADER([libcroco-0.6/libcroco/libcroco-config.h])
-- libcroco_include_dir=`echo "$gl_cv_absolute_libcroco_0_6_libcroco_libcroco_config_h" | sed -e 's,.libcroco-config\.h$,,'`
-- if test -d "$libcroco_include_dir"; then
-- gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- gl_cv_INCCROCO="-I$libcroco_include_dir"
-- fi
-- ])
-- fi
-- CPPFLAGS="$gl_save_CPPFLAGS"
-- fi
-- LIBS="$gl_save_LIBS"
-- ])
-- AC_MSG_CHECKING([for libcroco])
-- AC_MSG_RESULT([$gl_cv_libcroco])
-- if test $gl_cv_libcroco = yes; then
-- LIBCROCO="$gl_cv_LIBCROCO"
-- LTLIBCROCO="$gl_cv_LTLIBCROCO"
-- INCCROCO="$gl_cv_INCCROCO"
-- else
-- gl_cv_libcroco_use_included=yes
-- fi
-- fi
-- ])
-+ if test "$gl_cv_libcroco_use_included" != yes; then
-+ PKG_CHECK_MODULES([CROCO], [libcroco-0.6])
-+ LIBCROCO=$CROCO_LIBS
-+ LTLIBCROCO=$CROCO_LIBS
-+ INCCROCO=$CROCO_CFLAGS
-+ fi
- AC_SUBST([LIBCROCO])
- AC_SUBST([LTLIBCROCO])
- AC_SUBST([INCCROCO])
-diff --git a/libtextstyle/gnulib-local/m4/libglib.m4 b/libtextstyle/gnulib-local/m4/libglib.m4
-index 5853772..767fba2 100644
---- a/libtextstyle/gnulib-local/m4/libglib.m4
-+++ b/libtextstyle/gnulib-local/m4/libglib.m4
-@@ -6,100 +6,26 @@ dnl with or without modifications, as long as this notice is preserved.
-
- dnl From Bruno Haible.
-
--dnl gl_LIBGLIB
--dnl gives the user the option to decide whether to use the included or
--dnl an external libglib.
--dnl gl_LIBGLIB(FORCE-INCLUDED)
--dnl forces the use of the included or an external libglib.
- AC_DEFUN([gl_LIBGLIB],
- [
-- ifelse([$1], , [
-- AC_MSG_CHECKING([whether included glib is requested])
-- AC_ARG_WITH([included-glib],
-- [ --with-included-glib use the glib2 included here],
-- [gl_cv_libglib_force_included=$withval],
-- [gl_cv_libglib_force_included=no])
-- AC_MSG_RESULT([$gl_cv_libglib_force_included])
-- ], [gl_cv_libglib_force_included=$1])
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
-+ AC_MSG_CHECKING([whether included glib is requested])
-+ AC_ARG_WITH([included-glib],
-+ [ --with-included-glib use the glib2 included here],
-+ [gl_cv_libglib_force_included=$withval],
-+ [gl_cv_libglib_force_included=no])
-+ AC_MSG_RESULT([$gl_cv_libglib_force_included])
-
- gl_cv_libglib_use_included="$gl_cv_libglib_force_included"
- LIBGLIB=
- LTLIBGLIB=
- INCGLIB=
-- ifelse([$1], [yes], , [
-- if test "$gl_cv_libglib_use_included" != yes; then
-- dnl Figure out whether we can use a preinstalled libglib-2.0, or have to use
-- dnl the included one.
-- AC_CACHE_VAL([gl_cv_libglib], [
-- gl_cv_libglib=no
-- gl_cv_LIBGLIB=
-- gl_cv_LTLIBGLIB=
-- gl_cv_INCGLIB=
-- gl_save_LIBS="$LIBS"
-- dnl Search for libglib2 and define LIBGLIB_2_0, LTLIBGLIB_2_0 and
-- dnl INCGLIB_2_0 accordingly.
-- dnl Don't use glib-config nor pkg-config, since it doesn't work when
-- dnl cross-compiling or when the C compiler in use is different from the
-- dnl one that built the library.
-- AC_LIB_LINKFLAGS_BODY([glib-2.0])
-- LIBS="$gl_save_LIBS $LIBGLIB_2_0"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- ])
-- if test "$gl_cv_libglib" != yes; then
-- gl_save_CPPFLAGS="$CPPFLAGS"
-- CPPFLAGS="$CPPFLAGS $INCGLIB_2_0"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- gl_cv_INCGLIB="$INCGLIB_2_0"
-- ])
-- if test "$gl_cv_libglib" != yes; then
-- dnl Often the include files are installed in /usr/include/glib-2.0
-- dnl and /usr/lib/glib-2.0/include.
-- if test -n "$LIBGLIB_2_0_PREFIX"; then
-- CPPFLAGS="$gl_save_CPPFLAGS -I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- gl_cv_INCGLIB="-I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
-- ])
-- fi
-- fi
-- CPPFLAGS="$gl_save_CPPFLAGS"
-- fi
-- LIBS="$gl_save_LIBS"
-- ])
-- AC_MSG_CHECKING([for glib])
-- AC_MSG_RESULT([$gl_cv_libglib])
-- if test $gl_cv_libglib = yes; then
-- LIBGLIB="$gl_cv_LIBGLIB"
-- LTLIBGLIB="$gl_cv_LTLIBGLIB"
-- INCGLIB="$gl_cv_INCGLIB"
-- else
-- gl_cv_libglib_use_included=yes
-- fi
-- fi
-- ])
-+ if test "$gl_cv_libglib_use_included" != yes; then
-+ PKG_CHECK_MODULES([GLIB], [glib-2.0])
-+ LIBGLIB="$GLIB_LIBS"
-+ LTLIBGLIB="$GLIB_LIBS"
-+ INCGLIB="$GLIB_CFLAGS"
-+ fi
- AC_SUBST([LIBGLIB])
- AC_SUBST([LTLIBGLIB])
- AC_SUBST([INCGLIB])
-diff --git a/libtextstyle/gnulib-m4/libcroco.m4 b/libtextstyle/gnulib-m4/libcroco.m4
-index bc53cc6..10b2455 100644
---- a/libtextstyle/gnulib-m4/libcroco.m4
-+++ b/libtextstyle/gnulib-m4/libcroco.m4
-@@ -1,99 +1,34 @@
--# libcroco.m4 serial 3
--dnl Copyright (C) 2006-2007, 2019 Free Software Foundation, Inc.
-+# libcroco.m4 serial 2 (gettext-0.17)
-+dnl Copyright (C) 2006, 2015-2016 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
- dnl From Bruno Haible.
-
--dnl gl_LIBCROCO
--dnl gives the user the option to decide whether to use the included or
--dnl an external libcroco.
--dnl gl_LIBCROCO(FORCE-INCLUDED)
--dnl forces the use of the included or an external libcroco.
- AC_DEFUN([gl_LIBCROCO],
- [
-- ifelse([$1], [yes], , [
-- dnl libcroco depends on libglib.
-- AC_REQUIRE([gl_LIBGLIB])
-- ])
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
-+ dnl libcroco depends on libglib.
-+ AC_REQUIRE([gl_LIBGLIB])
-
-- ifelse([$1], , [
-- AC_MSG_CHECKING([whether included libcroco is requested])
-- AC_ARG_WITH([included-libcroco],
-- [ --with-included-libcroco use the libcroco included here],
-- [gl_cv_libcroco_force_included=$withval],
-- [gl_cv_libcroco_force_included=no])
-- AC_MSG_RESULT([$gl_cv_libcroco_force_included])
-- ], [gl_cv_libcroco_force_included=$1])
-+ AC_MSG_CHECKING([whether included libcroco is requested])
-+ AC_ARG_WITH([included-libcroco],
-+ [ --with-included-libcroco use the libcroco included here],
-+ [gl_cv_libcroco_force_included=$withval],
-+ [gl_cv_libcroco_force_included=no])
-+ AC_MSG_RESULT([$gl_cv_libcroco_force_included])
-
- gl_cv_libcroco_use_included="$gl_cv_libcroco_force_included"
- LIBCROCO=
- LTLIBCROCO=
- INCCROCO=
-- ifelse([$1], [yes], , [
-- if test "$gl_cv_libcroco_use_included" != yes; then
-- dnl Figure out whether we can use a preinstalled libcroco-0.6, or have to
-- dnl use the included one.
-- AC_CACHE_VAL([gl_cv_libcroco], [
-- gl_cv_libcroco=no
-- gl_cv_LIBCROCO=
-- gl_cv_LTLIBCROCO=
-- gl_cv_INCCROCO=
-- gl_save_LIBS="$LIBS"
-- dnl Search for libcroco and define LIBCROCO_0_6, LTLIBCROCO_0_6 and
-- dnl INCCROCO_0_6 accordingly.
-- dnl Don't use croco-0.6-config nor pkg-config, since it doesn't work when
-- dnl cross-compiling or when the C compiler in use is different from the
-- dnl one that built the library.
-- AC_LIB_LINKFLAGS_BODY([croco-0.6], [glib-2.0])
-- LIBS="$gl_save_LIBS $LIBCROCO_0_6"
-- AC_TRY_LINK([#include <libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- ])
-- if test "$gl_cv_libcroco" != yes; then
-- gl_save_CPPFLAGS="$CPPFLAGS"
-- CPPFLAGS="$CPPFLAGS $INCCROCO_0_6"
-- AC_TRY_LINK([#include <libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- gl_cv_INCCROCO="$INCCROCO_0_6"
-- ])
-- if test "$gl_cv_libcroco" != yes; then
-- dnl Often the include files are installed in
-- dnl /usr/include/libcroco-0.6/libcroco.
-- AC_TRY_LINK([#include <libcroco-0.6/libcroco/libcroco-config.h>],
-- [const char *version = LIBCROCO_VERSION; return !version;],
-- [gl_ABSOLUTE_HEADER([libcroco-0.6/libcroco/libcroco-config.h])
-- libcroco_include_dir=`echo "$gl_cv_absolute_libcroco_0_6_libcroco_libcroco_config_h" | sed -e 's,.libcroco-config\.h$,,'`
-- if test -d "$libcroco_include_dir"; then
-- gl_cv_libcroco=yes
-- gl_cv_LIBCROCO="$LIBCROCO_0_6"
-- gl_cv_LTLIBCROCO="$LTLIBCROCO_0_6"
-- gl_cv_INCCROCO="-I$libcroco_include_dir"
-- fi
-- ])
-- fi
-- CPPFLAGS="$gl_save_CPPFLAGS"
-- fi
-- LIBS="$gl_save_LIBS"
-- ])
-- AC_MSG_CHECKING([for libcroco])
-- AC_MSG_RESULT([$gl_cv_libcroco])
-- if test $gl_cv_libcroco = yes; then
-- LIBCROCO="$gl_cv_LIBCROCO"
-- LTLIBCROCO="$gl_cv_LTLIBCROCO"
-- INCCROCO="$gl_cv_INCCROCO"
-- else
-- gl_cv_libcroco_use_included=yes
-- fi
-- fi
-- ])
-+ if test "$gl_cv_libcroco_use_included" != yes; then
-+ PKG_CHECK_MODULES([CROCO], [libcroco-0.6])
-+ LIBCROCO=$CROCO_LIBS
-+ LTLIBCROCO=$CROCO_LIBS
-+ INCCROCO=$CROCO_CFLAGS
-+ fi
- AC_SUBST([LIBCROCO])
- AC_SUBST([LTLIBCROCO])
- AC_SUBST([INCCROCO])
-diff --git a/libtextstyle/gnulib-m4/libglib.m4 b/libtextstyle/gnulib-m4/libglib.m4
-index 5853772..767fba2 100644
---- a/libtextstyle/gnulib-m4/libglib.m4
-+++ b/libtextstyle/gnulib-m4/libglib.m4
-@@ -6,100 +6,26 @@ dnl with or without modifications, as long as this notice is preserved.
-
- dnl From Bruno Haible.
-
--dnl gl_LIBGLIB
--dnl gives the user the option to decide whether to use the included or
--dnl an external libglib.
--dnl gl_LIBGLIB(FORCE-INCLUDED)
--dnl forces the use of the included or an external libglib.
- AC_DEFUN([gl_LIBGLIB],
- [
-- ifelse([$1], , [
-- AC_MSG_CHECKING([whether included glib is requested])
-- AC_ARG_WITH([included-glib],
-- [ --with-included-glib use the glib2 included here],
-- [gl_cv_libglib_force_included=$withval],
-- [gl_cv_libglib_force_included=no])
-- AC_MSG_RESULT([$gl_cv_libglib_force_included])
-- ], [gl_cv_libglib_force_included=$1])
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
-+ AC_MSG_CHECKING([whether included glib is requested])
-+ AC_ARG_WITH([included-glib],
-+ [ --with-included-glib use the glib2 included here],
-+ [gl_cv_libglib_force_included=$withval],
-+ [gl_cv_libglib_force_included=no])
-+ AC_MSG_RESULT([$gl_cv_libglib_force_included])
-
- gl_cv_libglib_use_included="$gl_cv_libglib_force_included"
- LIBGLIB=
- LTLIBGLIB=
- INCGLIB=
-- ifelse([$1], [yes], , [
-- if test "$gl_cv_libglib_use_included" != yes; then
-- dnl Figure out whether we can use a preinstalled libglib-2.0, or have to use
-- dnl the included one.
-- AC_CACHE_VAL([gl_cv_libglib], [
-- gl_cv_libglib=no
-- gl_cv_LIBGLIB=
-- gl_cv_LTLIBGLIB=
-- gl_cv_INCGLIB=
-- gl_save_LIBS="$LIBS"
-- dnl Search for libglib2 and define LIBGLIB_2_0, LTLIBGLIB_2_0 and
-- dnl INCGLIB_2_0 accordingly.
-- dnl Don't use glib-config nor pkg-config, since it doesn't work when
-- dnl cross-compiling or when the C compiler in use is different from the
-- dnl one that built the library.
-- AC_LIB_LINKFLAGS_BODY([glib-2.0])
-- LIBS="$gl_save_LIBS $LIBGLIB_2_0"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- ])
-- if test "$gl_cv_libglib" != yes; then
-- gl_save_CPPFLAGS="$CPPFLAGS"
-- CPPFLAGS="$CPPFLAGS $INCGLIB_2_0"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- gl_cv_INCGLIB="$INCGLIB_2_0"
-- ])
-- if test "$gl_cv_libglib" != yes; then
-- dnl Often the include files are installed in /usr/include/glib-2.0
-- dnl and /usr/lib/glib-2.0/include.
-- if test -n "$LIBGLIB_2_0_PREFIX"; then
-- CPPFLAGS="$gl_save_CPPFLAGS -I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
-- AC_TRY_LINK([#include <glib.h>
--#ifndef G_BEGIN_DECLS
--error this glib.h includes a glibconfig.h from a glib version 1.x
--#endif
--],
-- [g_string_new ("foo");],
-- [gl_cv_libglib=yes
-- gl_cv_LIBGLIB="$LIBGLIB_2_0"
-- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
-- gl_cv_INCGLIB="-I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
-- ])
-- fi
-- fi
-- CPPFLAGS="$gl_save_CPPFLAGS"
-- fi
-- LIBS="$gl_save_LIBS"
-- ])
-- AC_MSG_CHECKING([for glib])
-- AC_MSG_RESULT([$gl_cv_libglib])
-- if test $gl_cv_libglib = yes; then
-- LIBGLIB="$gl_cv_LIBGLIB"
-- LTLIBGLIB="$gl_cv_LTLIBGLIB"
-- INCGLIB="$gl_cv_INCGLIB"
-- else
-- gl_cv_libglib_use_included=yes
-- fi
-- fi
-- ])
-+ if test "$gl_cv_libglib_use_included" != yes; then
-+ PKG_CHECK_MODULES([GLIB], [glib-2.0])
-+ LIBGLIB="$GLIB_LIBS"
-+ LTLIBGLIB="$GLIB_LIBS"
-+ INCGLIB="$GLIB_CFLAGS"
-+ fi
- AC_SUBST([LIBGLIB])
- AC_SUBST([LTLIBGLIB])
- AC_SUBST([INCGLIB])
-diff --git a/libtextstyle/lib/term-styled-ostream.c b/libtextstyle/lib/term-styled-ostream.c
-index 5484800..16793fa 100644
---- a/libtextstyle/lib/term-styled-ostream.c
-+++ b/libtextstyle/lib/term-styled-ostream.c
-@@ -28,15 +28,15 @@
-
- #include <stdlib.h>
-
--#include <cr-om-parser.h>
--#include <cr-sel-eng.h>
--#include <cr-style.h>
--#include <cr-rgb.h>
-+#include <libcroco/cr-om-parser.h>
-+#include <libcroco/cr-sel-eng.h>
-+#include <libcroco/cr-style.h>
-+#include <libcroco/cr-rgb.h>
- /* <cr-fonts.h> has a broken double-inclusion guard in libcroco-0.6.1. */
- #ifndef __CR_FONTS_H__
--# include <cr-fonts.h>
-+# include <libcroco/cr-fonts.h>
- #endif
--#include <cr-string.h>
-+#include <libcroco/cr-string.h>
-
- #include "term-ostream.h"
- #include "mem-hash-map.h"
-diff --git a/libtextstyle/lib/term-styled-ostream.oo.c b/libtextstyle/lib/term-styled-ostream.oo.c
-index 2ff978f..5ffb17a 100644
---- a/libtextstyle/lib/term-styled-ostream.oo.c
-+++ b/libtextstyle/lib/term-styled-ostream.oo.c
-@@ -22,15 +22,15 @@
-
- #include <stdlib.h>
-
--#include <cr-om-parser.h>
--#include <cr-sel-eng.h>
--#include <cr-style.h>
--#include <cr-rgb.h>
-+#include <libcroco/cr-om-parser.h>
-+#include <libcroco/cr-sel-eng.h>
-+#include <libcroco/cr-style.h>
-+#include <libcroco/cr-rgb.h>
- /* <cr-fonts.h> has a broken double-inclusion guard in libcroco-0.6.1. */
- #ifndef __CR_FONTS_H__
--# include <cr-fonts.h>
-+# include <libcroco/cr-fonts.h>
- #endif
--#include <cr-string.h>
-+#include <libcroco/cr-string.h>
-
- #include "term-ostream.h"
- #include "mem-hash-map.h"
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/Makefile.in.in b/meta/recipes-core/gettext/gettext-minimal-0.21/Makefile.in.in
deleted file mode 100644
index 6b25f0d916..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/Makefile.in.in
+++ /dev/null
@@ -1,510 +0,0 @@
-# Makefile for PO directory in any package using GNU gettext.
-# Copyright (C) 1995-2000 Ulrich Drepper <drepper@gnu.ai.mit.edu>
-# Copyright (C) 2000-2020 Free Software Foundation, Inc.
-#
-# Copying and distribution of this file, with or without modification,
-# are permitted in any medium without royalty provided the copyright
-# notice and this notice are preserved. This file is offered as-is,
-# without any warranty.
-#
-# Origin: gettext-0.21
-GETTEXT_MACRO_VERSION = 0.20
-
-PACKAGE = @PACKAGE@
-VERSION = @VERSION@
-PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
-
-SED = @SED@
-SHELL = /bin/sh
-@SET_MAKE@
-
-srcdir = @srcdir@
-top_srcdir = @top_srcdir@
-VPATH = @srcdir@
-
-prefix = @prefix@
-exec_prefix = @exec_prefix@
-datarootdir = @datarootdir@
-datadir = @datadir@
-localedir = @localedir@
-gettextsrcdir = $(datadir)/gettext/po
-
-INSTALL = @INSTALL@
-INSTALL_DATA = @INSTALL_DATA@
-
-# We use $(mkdir_p).
-# In automake <= 1.9.x, $(mkdir_p) is defined either as "mkdir -p --" or as
-# "$(mkinstalldirs)" or as "$(install_sh) -d". For these automake versions,
-# @install_sh@ does not start with $(SHELL), so we add it.
-# In automake >= 1.10, @mkdir_p@ is derived from ${MKDIR_P}, which is defined
-# either as "/path/to/mkdir -p" or ".../install-sh -c -d". For these automake
-# versions, $(mkinstalldirs) and $(install_sh) are unused.
-mkinstalldirs = $(SHELL) @install_sh@ -d
-install_sh = $(SHELL) @install_sh@
-MKDIR_P = @MKDIR_P@
-mkdir_p = @mkdir_p@
-
-# When building gettext-tools, we prefer to use the built programs
-# rather than installed programs. However, we can't do that when we
-# are cross compiling.
-CROSS_COMPILING = @CROSS_COMPILING@
-
-GMSGFMT_ = @GMSGFMT@
-GMSGFMT_no = @GMSGFMT@
-GMSGFMT_yes = @GMSGFMT_015@
-GMSGFMT = $(GMSGFMT_$(USE_MSGCTXT))
-XGETTEXT_ = @XGETTEXT@
-XGETTEXT_no = @XGETTEXT@
-XGETTEXT_yes = @XGETTEXT_015@
-XGETTEXT = $(XGETTEXT_$(USE_MSGCTXT))
-MSGMERGE = @MSGMERGE@
-MSGMERGE_UPDATE = @MSGMERGE@ --update
-MSGMERGE_FOR_MSGFMT_OPTION = @MSGMERGE_FOR_MSGFMT_OPTION@
-MSGINIT = msginit
-MSGCONV = msgconv
-MSGFILTER = msgfilter
-
-POFILES = @POFILES@
-GMOFILES = @GMOFILES@
-UPDATEPOFILES = @UPDATEPOFILES@
-DUMMYPOFILES = @DUMMYPOFILES@
-DISTFILES.common = Makefile.in.in remove-potcdate.sin \
-$(DISTFILES.common.extra1) $(DISTFILES.common.extra2) $(DISTFILES.common.extra3)
-DISTFILES = $(DISTFILES.common) Makevars POTFILES.in \
-$(POFILES) $(GMOFILES) \
-$(DISTFILES.extra1) $(DISTFILES.extra2) $(DISTFILES.extra3)
-
-POTFILES = \
-
-CATALOGS = @CATALOGS@
-
-POFILESDEPS_ = $(srcdir)/$(DOMAIN).pot
-POFILESDEPS_yes = $(POFILESDEPS_)
-POFILESDEPS_no =
-POFILESDEPS = $(POFILESDEPS_$(PO_DEPENDS_ON_POT))
-
-DISTFILESDEPS_ = update-po
-DISTFILESDEPS_yes = $(DISTFILESDEPS_)
-DISTFILESDEPS_no =
-DISTFILESDEPS = $(DISTFILESDEPS_$(DIST_DEPENDS_ON_UPDATE_PO))
-
-# Makevars gets inserted here. (Don't remove this line!)
-
-all: all-@USE_NLS@
-
-
-.SUFFIXES:
-.SUFFIXES: .po .gmo .sed .sin .nop .po-create .po-update
-
-# The .pot file, stamp-po, .po files, and .gmo files appear in release tarballs.
-# The GNU Coding Standards say in
-# <https://www.gnu.org/prep/standards/html_node/Makefile-Basics.html>:
-# "GNU distributions usually contain some files which are not source files
-# ... . Since these files normally appear in the source directory, they
-# should always appear in the source directory, not in the build directory.
-# So Makefile rules to update them should put the updated files in the
-# source directory."
-# Therefore we put these files in the source directory, not the build directory.
-
-# During .po -> .gmo conversion, take into account the most recent changes to
-# the .pot file. This eliminates the need to update the .po files when the
-# .pot file has changed, which would be troublesome if the .po files are put
-# under version control.
-$(GMOFILES): $(srcdir)/$(DOMAIN).pot
-.po.gmo:
- @lang=`echo $* | sed -e 's,.*/,,'`; \
- test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \
- echo "$${cdcmd}rm -f $${lang}.gmo && $(MSGMERGE) $(MSGMERGE_FOR_MSGFMT_OPTION) -o $${lang}.1po $${lang}.po $(DOMAIN).pot && $(GMSGFMT) -c --statistics --verbose -o $${lang}.gmo $${lang}.1po && rm -f $${lang}.1po"; \
- cd $(srcdir) && \
- rm -f $${lang}.gmo && \
- $(MSGMERGE) $(MSGMERGE_FOR_MSGFMT_OPTION) -o $${lang}.1po $${lang}.po $(DOMAIN).pot && \
- $(GMSGFMT) -c --statistics --verbose -o t-$${lang}.gmo $${lang}.1po && \
- mv t-$${lang}.gmo $${lang}.gmo && \
- rm -f $${lang}.1po
-
-.sin.sed:
- sed -e '/^#/d' $< > t-$@
- mv t-$@ $@
-
-
-all-yes: $(srcdir)/stamp-po
-all-no:
-
-# Ensure that the gettext macros and this Makefile.in.in are in sync.
-CHECK_MACRO_VERSION = \
- test "$(GETTEXT_MACRO_VERSION)" = "@GETTEXT_MACRO_VERSION@" \
- || { echo "*** error: gettext infrastructure mismatch: using a Makefile.in.in from gettext version $(GETTEXT_MACRO_VERSION) but the autoconf macros are from gettext version @GETTEXT_MACRO_VERSION@" 1>&2; \
- exit 1; \
- }
-
-# $(srcdir)/$(DOMAIN).pot is only created when needed. When xgettext finds no
-# internationalized messages, no $(srcdir)/$(DOMAIN).pot is created (because
-# we don't want to bother translators with empty POT files). We assume that
-# LINGUAS is empty in this case, i.e. $(POFILES) and $(GMOFILES) are empty.
-# In this case, $(srcdir)/stamp-po is a nop (i.e. a phony target).
-
-# $(srcdir)/stamp-po is a timestamp denoting the last time at which the CATALOGS
-# have been loosely updated. Its purpose is that when a developer or translator
-# checks out the package from a version control system, and the $(DOMAIN).pot
-# file is not under version control, "make" will update the $(DOMAIN).pot and
-# the $(CATALOGS), but subsequent invocations of "make" will do nothing. This
-# timestamp would not be necessary if updating the $(CATALOGS) would always
-# touch them; however, the rule for $(POFILES) has been designed to not touch
-# files that don't need to be changed.
-$(srcdir)/stamp-po: $(srcdir)/$(DOMAIN).pot
- @$(CHECK_MACRO_VERSION)
- test ! -f $(srcdir)/$(DOMAIN).pot || \
- test -z "$(GMOFILES)" || $(MAKE) $(GMOFILES)
- @test ! -f $(srcdir)/$(DOMAIN).pot || { \
- echo "touch $(srcdir)/stamp-po" && \
- echo timestamp > $(srcdir)/stamp-poT && \
- mv $(srcdir)/stamp-poT $(srcdir)/stamp-po; \
- }
-
-# Note: Target 'all' must not depend on target '$(DOMAIN).pot-update',
-# otherwise packages like GCC can not be built if only parts of the source
-# have been downloaded.
-
-# This target rebuilds $(DOMAIN).pot; it is an expensive operation.
-# Note that $(DOMAIN).pot is not touched if it doesn't need to be changed.
-# The determination of whether the package xyz is a GNU one is based on the
-# heuristic whether some file in the top level directory mentions "GNU xyz".
-# If GNU 'find' is available, we avoid grepping through monster files.
-$(DOMAIN).pot-update: $(POTFILES) $(srcdir)/POTFILES.in remove-potcdate.sed
- package_gnu="$(PACKAGE_GNU)"; \
- test -n "$$package_gnu" || { \
- if { if (LC_ALL=C find --version) 2>/dev/null | grep GNU >/dev/null; then \
- LC_ALL=C find -L $(top_srcdir) -maxdepth 1 -type f -size -10000000c -exec grep -i 'GNU @PACKAGE@' /dev/null '{}' ';' 2>/dev/null; \
- else \
- LC_ALL=C grep -i 'GNU @PACKAGE@' $(top_srcdir)/* 2>/dev/null; \
- fi; \
- } | grep -v 'libtool:' >/dev/null; then \
- package_gnu=yes; \
- else \
- package_gnu=no; \
- fi; \
- }; \
- if test "$$package_gnu" = "yes"; then \
- package_prefix='GNU '; \
- else \
- package_prefix=''; \
- fi; \
- if test -n '$(MSGID_BUGS_ADDRESS)' || test '$(PACKAGE_BUGREPORT)' = '@'PACKAGE_BUGREPORT'@'; then \
- msgid_bugs_address='$(MSGID_BUGS_ADDRESS)'; \
- else \
- msgid_bugs_address='$(PACKAGE_BUGREPORT)'; \
- fi; \
- case `$(XGETTEXT) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \
- '' | 0.[0-9] | 0.[0-9].* | 0.1[0-5] | 0.1[0-5].* | 0.16 | 0.16.[0-1]*) \
- $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \
- --add-comments=TRANSLATORS: \
- --files-from=$(srcdir)/POTFILES.in \
- --copyright-holder='$(COPYRIGHT_HOLDER)' \
- --msgid-bugs-address="$$msgid_bugs_address" \
- $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \
- ;; \
- *) \
- $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \
- --add-comments=TRANSLATORS: \
- --files-from=$(srcdir)/POTFILES.in \
- --copyright-holder='$(COPYRIGHT_HOLDER)' \
- --package-name="$${package_prefix}@PACKAGE@" \
- --package-version='@VERSION@' \
- --msgid-bugs-address="$$msgid_bugs_address" \
- $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \
- ;; \
- esac
- test ! -f $(DOMAIN).po || { \
- if test -f $(srcdir)/$(DOMAIN).pot-header; then \
- sed -e '1,/^#$$/d' < $(DOMAIN).po > $(DOMAIN).1po && \
- cat $(srcdir)/$(DOMAIN).pot-header $(DOMAIN).1po > $(DOMAIN).po && \
- rm -f $(DOMAIN).1po \
- || exit 1; \
- fi; \
- if test -f $(srcdir)/$(DOMAIN).pot; then \
- sed -f remove-potcdate.sed < $(srcdir)/$(DOMAIN).pot > $(DOMAIN).1po && \
- sed -f remove-potcdate.sed < $(DOMAIN).po > $(DOMAIN).2po && \
- if cmp $(DOMAIN).1po $(DOMAIN).2po >/dev/null 2>&1; then \
- rm -f $(DOMAIN).1po $(DOMAIN).2po $(DOMAIN).po; \
- else \
- rm -f $(DOMAIN).1po $(DOMAIN).2po $(srcdir)/$(DOMAIN).pot && \
- mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \
- fi; \
- else \
- mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \
- fi; \
- }
-
-# This rule has no dependencies: we don't need to update $(DOMAIN).pot at
-# every "make" invocation, only create it when it is missing.
-# Only "make $(DOMAIN).pot-update" or "make dist" will force an update.
-$(srcdir)/$(DOMAIN).pot:
- $(MAKE) $(DOMAIN).pot-update
-
-# This target rebuilds a PO file if $(DOMAIN).pot has changed.
-# Note that a PO file is not touched if it doesn't need to be changed.
-$(POFILES): $(POFILESDEPS)
- @test -f $(srcdir)/$(DOMAIN).pot || $(MAKE) $(srcdir)/$(DOMAIN).pot
- @lang=`echo $@ | sed -e 's,.*/,,' -e 's/\.po$$//'`; \
- if test -f "$(srcdir)/$${lang}.po"; then \
- test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \
- echo "$${cdcmd}$(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) --lang=$${lang} --previous $${lang}.po $(DOMAIN).pot"; \
- cd $(srcdir) \
- && { case `$(MSGMERGE_UPDATE) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \
- '' | 0.[0-9] | 0.[0-9].* | 0.1[0-5] | 0.1[0-5].*) \
- $(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) $${lang}.po $(DOMAIN).pot;; \
- 0.1[6-7] | 0.1[6-7].*) \
- $(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) --previous $${lang}.po $(DOMAIN).pot;; \
- *) \
- $(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) --lang=$${lang} --previous $${lang}.po $(DOMAIN).pot;; \
- esac; \
- }; \
- else \
- $(MAKE) $${lang}.po-create; \
- fi
-
-
-install: install-exec install-data
-install-exec:
-install-data: install-data-@USE_NLS@
- if test "$(PACKAGE)" = "gettext-tools"; then \
- $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \
- for file in $(DISTFILES.common) Makevars.template; do \
- $(INSTALL_DATA) $(srcdir)/$$file \
- $(DESTDIR)$(gettextsrcdir)/$$file; \
- done; \
- for file in Makevars; do \
- rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \
- done; \
- else \
- : ; \
- fi
-install-data-no: all
-install-data-yes: all
- @catalogs='$(CATALOGS)'; \
- for cat in $$catalogs; do \
- cat=`basename $$cat`; \
- lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \
- dir=$(localedir)/$$lang/LC_MESSAGES; \
- $(mkdir_p) $(DESTDIR)$$dir; \
- if test -r $$cat; then realcat=$$cat; else realcat=$(srcdir)/$$cat; fi; \
- $(INSTALL_DATA) $$realcat $(DESTDIR)$$dir/$(DOMAIN).mo; \
- echo "installing $$realcat as $(DESTDIR)$$dir/$(DOMAIN).mo"; \
- for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \
- if test -n "$$lc"; then \
- if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \
- link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \
- mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \
- mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \
- (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \
- for file in *; do \
- if test -f $$file; then \
- ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \
- fi; \
- done); \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \
- else \
- if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \
- :; \
- else \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \
- mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \
- fi; \
- fi; \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \
- ln -s ../LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \
- ln $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \
- cp -p $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \
- echo "installing $$realcat link as $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo"; \
- fi; \
- done; \
- done
-
-install-strip: install
-
-installdirs: installdirs-exec installdirs-data
-installdirs-exec:
-installdirs-data: installdirs-data-@USE_NLS@
- if test "$(PACKAGE)" = "gettext-tools"; then \
- $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \
- else \
- : ; \
- fi
-installdirs-data-no:
-installdirs-data-yes:
- @catalogs='$(CATALOGS)'; \
- for cat in $$catalogs; do \
- cat=`basename $$cat`; \
- lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \
- dir=$(localedir)/$$lang/LC_MESSAGES; \
- $(mkdir_p) $(DESTDIR)$$dir; \
- for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \
- if test -n "$$lc"; then \
- if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \
- link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \
- mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \
- mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \
- (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \
- for file in *; do \
- if test -f $$file; then \
- ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \
- fi; \
- done); \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \
- else \
- if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \
- :; \
- else \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \
- mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \
- fi; \
- fi; \
- fi; \
- done; \
- done
-
-# Define this as empty until I found a useful application.
-installcheck:
-
-uninstall: uninstall-exec uninstall-data
-uninstall-exec:
-uninstall-data: uninstall-data-@USE_NLS@
- if test "$(PACKAGE)" = "gettext-tools"; then \
- for file in $(DISTFILES.common) Makevars.template; do \
- rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \
- done; \
- else \
- : ; \
- fi
-uninstall-data-no:
-uninstall-data-yes:
- catalogs='$(CATALOGS)'; \
- for cat in $$catalogs; do \
- cat=`basename $$cat`; \
- lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \
- for lc in LC_MESSAGES $(EXTRA_LOCALE_CATEGORIES); do \
- rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \
- done; \
- done
-
-check: all
-
-info dvi ps pdf html tags TAGS ctags CTAGS ID:
-
-install-dvi install-ps install-pdf install-html:
-
-mostlyclean:
- rm -f remove-potcdate.sed
- rm -f $(srcdir)/stamp-poT
- rm -f core core.* $(DOMAIN).po $(DOMAIN).1po $(DOMAIN).2po *.new.po
- rm -fr *.o
-
-clean: mostlyclean
-
-distclean: clean
- rm -f Makefile Makefile.in POTFILES
-
-maintainer-clean: distclean
- @echo "This command is intended for maintainers to use;"
- @echo "it deletes files that may require special tools to rebuild."
- rm -f $(srcdir)/$(DOMAIN).pot $(srcdir)/stamp-po $(GMOFILES)
-
-distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir)
-dist distdir:
- test -z "$(DISTFILESDEPS)" || $(MAKE) $(DISTFILESDEPS)
- @$(MAKE) dist2
-# This is a separate target because 'update-po' must be executed before.
-dist2: $(srcdir)/stamp-po $(DISTFILES)
- @dists="$(DISTFILES)"; \
- if test "$(PACKAGE)" = "gettext-tools"; then \
- dists="$$dists Makevars.template"; \
- fi; \
- if test -f $(srcdir)/$(DOMAIN).pot; then \
- dists="$$dists $(DOMAIN).pot stamp-po"; \
- else \
- case $(XGETTEXT) in \
- :) echo "Warning: Creating a tarball without '$(DOMAIN).pot', because a suitable 'xgettext' program was not found in PATH." 1>&2;; \
- *) echo "Warning: Creating a tarball without '$(DOMAIN).pot', because 'xgettext' found no strings to extract. Check the contents of the POTFILES.in file and the XGETTEXT_OPTIONS in the Makevars file." 1>&2;; \
- esac; \
- fi; \
- if test -f $(srcdir)/ChangeLog; then \
- dists="$$dists ChangeLog"; \
- fi; \
- for i in 0 1 2 3 4 5 6 7 8 9; do \
- if test -f $(srcdir)/ChangeLog.$$i; then \
- dists="$$dists ChangeLog.$$i"; \
- fi; \
- done; \
- if test -f $(srcdir)/LINGUAS; then dists="$$dists LINGUAS"; fi; \
- for file in $$dists; do \
- if test -f $$file; then \
- cp -p $$file $(distdir) || exit 1; \
- else \
- cp -p $(srcdir)/$$file $(distdir) || exit 1; \
- fi; \
- done
-
-update-po: Makefile
- $(MAKE) $(DOMAIN).pot-update
- test -z "$(UPDATEPOFILES)" || $(MAKE) $(UPDATEPOFILES)
- $(MAKE) update-gmo
-
-# General rule for creating PO files.
-
-.nop.po-create:
- @lang=`echo $@ | sed -e 's/\.po-create$$//'`; \
- echo "File $$lang.po does not exist. If you are a translator, you can create it through 'msginit'." 1>&2; \
- exit 1
-
-# General rule for updating PO files.
-
-.nop.po-update:
- @lang=`echo $@ | sed -e 's/\.po-update$$//'`; \
- if test "$(PACKAGE)" = "gettext-tools" && test "$(CROSS_COMPILING)" != "yes"; then PATH=`pwd`/../src:$$PATH; fi; \
- tmpdir=`pwd`; \
- echo "$$lang:"; \
- test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \
- echo "$${cdcmd}$(MSGMERGE) $(MSGMERGE_OPTIONS) --lang=$$lang --previous $$lang.po $(DOMAIN).pot -o $$lang.new.po"; \
- cd $(srcdir); \
- if { case `$(MSGMERGE) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \
- '' | 0.[0-9] | 0.[0-9].* | 0.1[0-5] | 0.1[0-5].*) \
- $(MSGMERGE) $(MSGMERGE_OPTIONS) -o $$tmpdir/$$lang.new.po $$lang.po $(DOMAIN).pot;; \
- 0.1[6-7] | 0.1[6-7].*) \
- $(MSGMERGE) $(MSGMERGE_OPTIONS) --previous -o $$tmpdir/$$lang.new.po $$lang.po $(DOMAIN).pot;; \
- *) \
- $(MSGMERGE) $(MSGMERGE_OPTIONS) --lang=$$lang --previous -o $$tmpdir/$$lang.new.po $$lang.po $(DOMAIN).pot;; \
- esac; \
- }; then \
- if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \
- rm -f $$tmpdir/$$lang.new.po; \
- else \
- if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \
- :; \
- else \
- echo "msgmerge for $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \
- exit 1; \
- fi; \
- fi; \
- else \
- echo "msgmerge for $$lang.po failed!" 1>&2; \
- rm -f $$tmpdir/$$lang.new.po; \
- fi
-
-$(DUMMYPOFILES):
-
-update-gmo: Makefile $(GMOFILES)
- @:
-
-# Recreate Makefile by invoking config.status. Explicitly invoke the shell,
-# because execution permission bits may not work on the current file system.
-# Use @SHELL@, which is the shell determined by autoconf for the use by its
-# scripts, not $(SHELL) which is hardwired to /bin/sh and may be deficient.
-Makefile: Makefile.in.in Makevars $(top_builddir)/config.status @POMAKEFILEDEPS@
- cd $(top_builddir) \
- && @SHELL@ ./config.status $(subdir)/$@.in po-directories
-
-force:
-
-# Tell versions [3.59,3.63) of GNU make not to export all variables.
-# Otherwise a system limit (for SysV at least) may be exceeded.
-.NOEXPORT:
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/gettext.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/gettext.m4
deleted file mode 100644
index 4f25a27d93..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/gettext.m4
+++ /dev/null
@@ -1,386 +0,0 @@
-# gettext.m4 serial 71 (gettext-0.20.2)
-dnl Copyright (C) 1995-2014, 2016, 2018-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-dnl
-dnl This file can be used in projects which are not available under
-dnl the GNU General Public License or the GNU Lesser General Public
-dnl License but which still want to provide support for the GNU gettext
-dnl functionality.
-dnl Please note that the actual code of the GNU gettext library is covered
-dnl by the GNU Lesser General Public License, and the rest of the GNU
-dnl gettext package is covered by the GNU General Public License.
-dnl They are *not* in the public domain.
-
-dnl Authors:
-dnl Ulrich Drepper <drepper@cygnus.com>, 1995-2000.
-dnl Bruno Haible <haible@clisp.cons.org>, 2000-2006, 2008-2010.
-
-dnl Macro to add for using GNU gettext.
-
-dnl Usage: AM_GNU_GETTEXT([INTLSYMBOL], [NEEDSYMBOL], [INTLDIR]).
-dnl INTLSYMBOL must be one of 'external', 'use-libtool'.
-dnl INTLSYMBOL should be 'external' for packages other than GNU gettext, and
-dnl 'use-libtool' for the packages 'gettext-runtime' and 'gettext-tools'.
-dnl If INTLSYMBOL is 'use-libtool', then a libtool library
-dnl $(top_builddir)/intl/libintl.la will be created (shared and/or static,
-dnl depending on --{enable,disable}-{shared,static} and on the presence of
-dnl AM-DISABLE-SHARED).
-dnl If NEEDSYMBOL is specified and is 'need-ngettext', then GNU gettext
-dnl implementations (in libc or libintl) without the ngettext() function
-dnl will be ignored. If NEEDSYMBOL is specified and is
-dnl 'need-formatstring-macros', then GNU gettext implementations that don't
-dnl support the ISO C 99 <inttypes.h> formatstring macros will be ignored.
-dnl INTLDIR is used to find the intl libraries. If empty,
-dnl the value '$(top_builddir)/intl/' is used.
-dnl
-dnl The result of the configuration is one of three cases:
-dnl 1) GNU gettext, as included in the intl subdirectory, will be compiled
-dnl and used.
-dnl Catalog format: GNU --> install in $(datadir)
-dnl Catalog extension: .mo after installation, .gmo in source tree
-dnl 2) GNU gettext has been found in the system's C library.
-dnl Catalog format: GNU --> install in $(datadir)
-dnl Catalog extension: .mo after installation, .gmo in source tree
-dnl 3) No internationalization, always use English msgid.
-dnl Catalog format: none
-dnl Catalog extension: none
-dnl If INTLSYMBOL is 'external', only cases 2 and 3 can occur.
-dnl The use of .gmo is historical (it was needed to avoid overwriting the
-dnl GNU format catalogs when building on a platform with an X/Open gettext),
-dnl but we keep it in order not to force irrelevant filename changes on the
-dnl maintainers.
-dnl
-AC_DEFUN([AM_GNU_GETTEXT],
-[
- dnl Argument checking.
- ifelse([$1], [], , [ifelse([$1], [external], , [ifelse([$1], [use-libtool], ,
- [errprint([ERROR: invalid first argument to AM_GNU_GETTEXT
-])])])])
- ifelse(ifelse([$1], [], [old])[]ifelse([$1], [no-libtool], [old]), [old],
- [errprint([ERROR: Use of AM_GNU_GETTEXT without [external] argument is no longer supported.
-])])
- ifelse([$2], [], , [ifelse([$2], [need-ngettext], , [ifelse([$2], [need-formatstring-macros], ,
- [errprint([ERROR: invalid second argument to AM_GNU_GETTEXT
-])])])])
- define([gt_included_intl],
- ifelse([$1], [external], [no], [yes]))
- gt_NEEDS_INIT
- AM_GNU_GETTEXT_NEED([$2])
-
- AC_REQUIRE([AM_PO_SUBDIRS])dnl
- ifelse(gt_included_intl, yes, [
- AC_REQUIRE([AM_INTL_SUBDIR])dnl
- ])
-
- dnl Prerequisites of AC_LIB_LINKFLAGS_BODY.
- AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
- AC_REQUIRE([AC_LIB_RPATH])
-
- dnl Sometimes libintl requires libiconv, so first search for libiconv.
- dnl Ideally we would do this search only after the
- dnl if test "$USE_NLS" = "yes"; then
- dnl if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then
- dnl tests. But if configure.in invokes AM_ICONV after AM_GNU_GETTEXT
- dnl the configure script would need to contain the same shell code
- dnl again, outside any 'if'. There are two solutions:
- dnl - Invoke AM_ICONV_LINKFLAGS_BODY here, outside any 'if'.
- dnl - Control the expansions in more detail using AC_PROVIDE_IFELSE.
- dnl Since AC_PROVIDE_IFELSE is not documented, we avoid it.
- ifelse(gt_included_intl, yes, , [
- AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY])
- ])
-
- dnl Sometimes, on Mac OS X, libintl requires linking with CoreFoundation.
- gt_INTL_MACOSX
-
- dnl Set USE_NLS.
- AC_REQUIRE([AM_NLS])
-
- ifelse(gt_included_intl, yes, [
- BUILD_INCLUDED_LIBINTL=no
- USE_INCLUDED_LIBINTL=no
- ])
- LIBINTL=
- LTLIBINTL=
- POSUB=
-
- dnl Add a version number to the cache macros.
- case " $gt_needs " in
- *" need-formatstring-macros "*) gt_api_version=3 ;;
- *" need-ngettext "*) gt_api_version=2 ;;
- *) gt_api_version=1 ;;
- esac
- gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc"
- gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl"
-
- dnl If we use NLS figure out what method
- if test "$USE_NLS" = "yes"; then
- gt_use_preinstalled_gnugettext=no
- ifelse(gt_included_intl, yes, [
- AC_MSG_CHECKING([whether included gettext is requested])
- AC_ARG_WITH([included-gettext],
- [ --with-included-gettext use the GNU gettext library included here],
- nls_cv_force_use_gnu_gettext=$withval,
- nls_cv_force_use_gnu_gettext=no)
- AC_MSG_RESULT([$nls_cv_force_use_gnu_gettext])
-
- nls_cv_use_gnu_gettext="$nls_cv_force_use_gnu_gettext"
- if test "$nls_cv_force_use_gnu_gettext" != "yes"; then
- ])
- dnl User does not insist on using GNU NLS library. Figure out what
- dnl to use. If GNU gettext is available we use this. Else we have
- dnl to fall back to GNU NLS library.
-
- if test $gt_api_version -ge 3; then
- gt_revision_test_code='
-#ifndef __GNU_GETTEXT_SUPPORTED_REVISION
-#define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1)
-#endif
-changequote(,)dnl
-typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1];
-changequote([,])dnl
-'
- else
- gt_revision_test_code=
- fi
- if test $gt_api_version -ge 2; then
- gt_expression_test_code=' + * ngettext ("", "", 0)'
- else
- gt_expression_test_code=
- fi
-
- AC_CACHE_CHECK([for GNU gettext in libc], [$gt_func_gnugettext_libc],
- [AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <libintl.h>
-#ifndef __GNU_GETTEXT_SUPPORTED_REVISION
-extern int _nl_msg_cat_cntr;
-extern int *_nl_domain_bindings;
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_domain_bindings)
-#else
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION 0
-#endif
-$gt_revision_test_code
- ]],
- [[
-bindtextdomain ("", "");
-return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION
- ]])],
- [eval "$gt_func_gnugettext_libc=yes"],
- [eval "$gt_func_gnugettext_libc=no"])])
-
- if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then
- dnl Sometimes libintl requires libiconv, so first search for libiconv.
- ifelse(gt_included_intl, yes, , [
- AM_ICONV_LINK
- ])
- dnl Search for libintl and define LIBINTL, LTLIBINTL and INCINTL
- dnl accordingly. Don't use AC_LIB_LINKFLAGS_BODY([intl],[iconv])
- dnl because that would add "-liconv" to LIBINTL and LTLIBINTL
- dnl even if libiconv doesn't exist.
- AC_LIB_LINKFLAGS_BODY([intl])
- AC_CACHE_CHECK([for GNU gettext in libintl],
- [$gt_func_gnugettext_libintl],
- [gt_save_CPPFLAGS="$CPPFLAGS"
- CPPFLAGS="$CPPFLAGS $INCINTL"
- gt_save_LIBS="$LIBS"
- LIBS="$LIBS $LIBINTL"
- dnl Now see whether libintl exists and does not depend on libiconv.
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <libintl.h>
-#ifndef __GNU_GETTEXT_SUPPORTED_REVISION
-extern int _nl_msg_cat_cntr;
-extern
-#ifdef __cplusplus
-"C"
-#endif
-const char *_nl_expand_alias (const char *);
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias (""))
-#else
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION 0
-#endif
-$gt_revision_test_code
- ]],
- [[
-bindtextdomain ("", "");
-return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION
- ]])],
- [eval "$gt_func_gnugettext_libintl=yes"],
- [eval "$gt_func_gnugettext_libintl=no"])
- dnl Now see whether libintl exists and depends on libiconv.
- if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then
- LIBS="$LIBS $LIBICONV"
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <libintl.h>
-#ifndef __GNU_GETTEXT_SUPPORTED_REVISION
-extern int _nl_msg_cat_cntr;
-extern
-#ifdef __cplusplus
-"C"
-#endif
-const char *_nl_expand_alias (const char *);
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias (""))
-#else
-#define __GNU_GETTEXT_SYMBOL_EXPRESSION 0
-#endif
-$gt_revision_test_code
- ]],
- [[
-bindtextdomain ("", "");
-return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION
- ]])],
- [LIBINTL="$LIBINTL $LIBICONV"
- LTLIBINTL="$LTLIBINTL $LTLIBICONV"
- eval "$gt_func_gnugettext_libintl=yes"
- ])
- fi
- CPPFLAGS="$gt_save_CPPFLAGS"
- LIBS="$gt_save_LIBS"])
- fi
-
- dnl If an already present or preinstalled GNU gettext() is found,
- dnl use it. But if this macro is used in GNU gettext, and GNU
- dnl gettext is already preinstalled in libintl, we update this
- dnl libintl. (Cf. the install rule in intl/Makefile.in.)
- if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \
- || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \
- && test "$PACKAGE" != gettext-runtime \
- && test "$PACKAGE" != gettext-tools; }; then
- gt_use_preinstalled_gnugettext=yes
- else
- dnl Reset the values set by searching for libintl.
- LIBINTL=
- LTLIBINTL=
- INCINTL=
- fi
-
- ifelse(gt_included_intl, yes, [
- if test "$gt_use_preinstalled_gnugettext" != "yes"; then
- dnl GNU gettext is not found in the C library.
- dnl Fall back on included GNU gettext library.
- nls_cv_use_gnu_gettext=yes
- fi
- fi
-
- if test "$nls_cv_use_gnu_gettext" = "yes"; then
- dnl Mark actions used to generate GNU NLS library.
- BUILD_INCLUDED_LIBINTL=yes
- USE_INCLUDED_LIBINTL=yes
- LIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.la $LIBICONV $LIBTHREAD"
- LTLIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.la $LTLIBICONV $LTLIBTHREAD"
- LIBS=`echo " $LIBS " | sed -e 's/ -lintl / /' -e 's/^ //' -e 's/ $//'`
- fi
-
- CATOBJEXT=
- if test "$gt_use_preinstalled_gnugettext" = "yes" \
- || test "$nls_cv_use_gnu_gettext" = "yes"; then
- dnl Mark actions to use GNU gettext tools.
- CATOBJEXT=.gmo
- fi
- ])
-
- if test -n "$INTL_MACOSX_LIBS"; then
- if test "$gt_use_preinstalled_gnugettext" = "yes" \
- || test "$nls_cv_use_gnu_gettext" = "yes"; then
- dnl Some extra flags are needed during linking.
- LIBINTL="$LIBINTL $INTL_MACOSX_LIBS"
- LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS"
- fi
- fi
-
- if test "$gt_use_preinstalled_gnugettext" = "yes" \
- || test "$nls_cv_use_gnu_gettext" = "yes"; then
- AC_DEFINE([ENABLE_NLS], [1],
- [Define to 1 if translation of program messages to the user's native language
- is requested.])
- else
- USE_NLS=no
- fi
- fi
-
- AC_MSG_CHECKING([whether to use NLS])
- AC_MSG_RESULT([$USE_NLS])
- if test "$USE_NLS" = "yes"; then
- AC_MSG_CHECKING([where the gettext function comes from])
- if test "$gt_use_preinstalled_gnugettext" = "yes"; then
- if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then
- gt_source="external libintl"
- else
- gt_source="libc"
- fi
- else
- gt_source="included intl directory"
- fi
- AC_MSG_RESULT([$gt_source])
- fi
-
- if test "$USE_NLS" = "yes"; then
-
- if test "$gt_use_preinstalled_gnugettext" = "yes"; then
- if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then
- AC_MSG_CHECKING([how to link with libintl])
- AC_MSG_RESULT([$LIBINTL])
- AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCINTL])
- fi
-
- dnl For backward compatibility. Some packages may be using this.
- AC_DEFINE([HAVE_GETTEXT], [1],
- [Define if the GNU gettext() function is already present or preinstalled.])
- AC_DEFINE([HAVE_DCGETTEXT], [1],
- [Define if the GNU dcgettext() function is already present or preinstalled.])
- fi
-
- dnl We need to process the po/ directory.
- POSUB=po
- fi
-
- ifelse(gt_included_intl, yes, [
- dnl In GNU gettext we have to set BUILD_INCLUDED_LIBINTL to 'yes'
- dnl because some of the testsuite requires it.
- BUILD_INCLUDED_LIBINTL=yes
-
- dnl Make all variables we use known to autoconf.
- AC_SUBST([BUILD_INCLUDED_LIBINTL])
- AC_SUBST([USE_INCLUDED_LIBINTL])
- AC_SUBST([CATOBJEXT])
- ])
-
- dnl For backward compatibility. Some Makefiles may be using this.
- INTLLIBS="$LIBINTL"
- AC_SUBST([INTLLIBS])
-
- dnl Make all documented variables known to autoconf.
- AC_SUBST([LIBINTL])
- AC_SUBST([LTLIBINTL])
- AC_SUBST([POSUB])
-])
-
-
-dnl gt_NEEDS_INIT ensures that the gt_needs variable is initialized.
-m4_define([gt_NEEDS_INIT],
-[
- m4_divert_text([DEFAULTS], [gt_needs=])
- m4_define([gt_NEEDS_INIT], [])
-])
-
-
-dnl Usage: AM_GNU_GETTEXT_NEED([NEEDSYMBOL])
-AC_DEFUN([AM_GNU_GETTEXT_NEED],
-[
- m4_divert_text([INIT_PREPARE], [gt_needs="$gt_needs $1"])
-])
-
-
-dnl Usage: AM_GNU_GETTEXT_VERSION([gettext-version])
-AC_DEFUN([AM_GNU_GETTEXT_VERSION], [])
-
-
-dnl Usage: AM_GNU_GETTEXT_REQUIRE_VERSION([gettext-version])
-AC_DEFUN([AM_GNU_GETTEXT_REQUIRE_VERSION], [])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/host-cpu-c-abi.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/host-cpu-c-abi.m4
deleted file mode 100644
index 6db2aa25ae..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/host-cpu-c-abi.m4
+++ /dev/null
@@ -1,675 +0,0 @@
-# host-cpu-c-abi.m4 serial 13
-dnl Copyright (C) 2002-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-
-dnl From Bruno Haible and Sam Steingold.
-
-dnl Sets the HOST_CPU variable to the canonical name of the CPU.
-dnl Sets the HOST_CPU_C_ABI variable to the canonical name of the CPU with its
-dnl C language ABI (application binary interface).
-dnl Also defines __${HOST_CPU}__ and __${HOST_CPU_C_ABI}__ as C macros in
-dnl config.h.
-dnl
-dnl This canonical name can be used to select a particular assembly language
-dnl source file that will interoperate with C code on the given host.
-dnl
-dnl For example:
-dnl * 'i386' and 'sparc' are different canonical names, because code for i386
-dnl will not run on SPARC CPUs and vice versa. They have different
-dnl instruction sets.
-dnl * 'sparc' and 'sparc64' are different canonical names, because code for
-dnl 'sparc' and code for 'sparc64' cannot be linked together: 'sparc' code
-dnl contains 32-bit instructions, whereas 'sparc64' code contains 64-bit
-dnl instructions. A process on a SPARC CPU can be in 32-bit mode or in 64-bit
-dnl mode, but not both.
-dnl * 'mips' and 'mipsn32' are different canonical names, because they use
-dnl different argument passing and return conventions for C functions, and
-dnl although the instruction set of 'mips' is a large subset of the
-dnl instruction set of 'mipsn32'.
-dnl * 'mipsn32' and 'mips64' are different canonical names, because they use
-dnl different sizes for the C types like 'int' and 'void *', and although
-dnl the instruction sets of 'mipsn32' and 'mips64' are the same.
-dnl * The same canonical name is used for different endiannesses. You can
-dnl determine the endianness through preprocessor symbols:
-dnl - 'arm': test __ARMEL__.
-dnl - 'mips', 'mipsn32', 'mips64': test _MIPSEB vs. _MIPSEL.
-dnl - 'powerpc64': test _BIG_ENDIAN vs. _LITTLE_ENDIAN.
-dnl * The same name 'i386' is used for CPUs of type i386, i486, i586
-dnl (Pentium), AMD K7, Pentium II, Pentium IV, etc., because
-dnl - Instructions that do not exist on all of these CPUs (cmpxchg,
-dnl MMX, SSE, SSE2, 3DNow! etc.) are not frequently used. If your
-dnl assembly language source files use such instructions, you will
-dnl need to make the distinction.
-dnl - Speed of execution of the common instruction set is reasonable across
-dnl the entire family of CPUs. If you have assembly language source files
-dnl that are optimized for particular CPU types (like GNU gmp has), you
-dnl will need to make the distinction.
-dnl See <https://en.wikipedia.org/wiki/X86_instruction_listings>.
-AC_DEFUN([gl_HOST_CPU_C_ABI],
-[
- AC_REQUIRE([AC_CANONICAL_HOST])
- AC_REQUIRE([gl_C_ASM])
- AC_CACHE_CHECK([host CPU and C ABI], [gl_cv_host_cpu_c_abi],
- [case "$host_cpu" in
-
-changequote(,)dnl
- i[34567]86 )
-changequote([,])dnl
- gl_cv_host_cpu_c_abi=i386
- ;;
-
- x86_64 )
- # On x86_64 systems, the C compiler may be generating code in one of
- # these ABIs:
- # - 64-bit instruction set, 64-bit pointers, 64-bit 'long': x86_64.
- # - 64-bit instruction set, 64-bit pointers, 32-bit 'long': x86_64
- # with native Windows (mingw, MSVC).
- # - 64-bit instruction set, 32-bit pointers, 32-bit 'long': x86_64-x32.
- # - 32-bit instruction set, 32-bit pointers, 32-bit 'long': i386.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if (defined __x86_64__ || defined __amd64__ \
- || defined _M_X64 || defined _M_AMD64)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __ILP32__ || defined _ILP32
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=x86_64-x32],
- [gl_cv_host_cpu_c_abi=x86_64])],
- [gl_cv_host_cpu_c_abi=i386])
- ;;
-
-changequote(,)dnl
- alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] )
-changequote([,])dnl
- gl_cv_host_cpu_c_abi=alpha
- ;;
-
- arm* | aarch64 )
- # Assume arm with EABI.
- # On arm64 systems, the C compiler may be generating code in one of
- # these ABIs:
- # - aarch64 instruction set, 64-bit pointers, 64-bit 'long': arm64.
- # - aarch64 instruction set, 32-bit pointers, 32-bit 'long': arm64-ilp32.
- # - 32-bit instruction set, 32-bit pointers, 32-bit 'long': arm or armhf.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#ifdef __aarch64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __ILP32__ || defined _ILP32
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=arm64-ilp32],
- [gl_cv_host_cpu_c_abi=arm64])],
- [# Don't distinguish little-endian and big-endian arm, since they
- # don't require different machine code for simple operations and
- # since the user can distinguish them through the preprocessor
- # defines __ARMEL__ vs. __ARMEB__.
- # But distinguish arm which passes floating-point arguments and
- # return values in integer registers (r0, r1, ...) - this is
- # gcc -mfloat-abi=soft or gcc -mfloat-abi=softfp - from arm which
- # passes them in float registers (s0, s1, ...) and double registers
- # (d0, d1, ...) - this is gcc -mfloat-abi=hard. GCC 4.6 or newer
- # sets the preprocessor defines __ARM_PCS (for the first case) and
- # __ARM_PCS_VFP (for the second case), but older GCC does not.
- echo 'double ddd; void func (double dd) { ddd = dd; }' > conftest.c
- # Look for a reference to the register d0 in the .s file.
- AC_TRY_COMMAND(${CC-cc} $CFLAGS $CPPFLAGS $gl_c_asm_opt conftest.c) >/dev/null 2>&1
- if LC_ALL=C grep 'd0,' conftest.$gl_asmext >/dev/null; then
- gl_cv_host_cpu_c_abi=armhf
- else
- gl_cv_host_cpu_c_abi=arm
- fi
- rm -f conftest*
- ])
- ;;
-
- hppa1.0 | hppa1.1 | hppa2.0* | hppa64 )
- # On hppa, the C compiler may be generating 32-bit code or 64-bit
- # code. In the latter case, it defines _LP64 and __LP64__.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#ifdef __LP64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=hppa64],
- [gl_cv_host_cpu_c_abi=hppa])
- ;;
-
- ia64* )
- # On ia64 on HP-UX, the C compiler may be generating 64-bit code or
- # 32-bit code. In the latter case, it defines _ILP32.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#ifdef _ILP32
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=ia64-ilp32],
- [gl_cv_host_cpu_c_abi=ia64])
- ;;
-
- mips* )
- # We should also check for (_MIPS_SZPTR == 64), but gcc keeps this
- # at 32.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined _MIPS_SZLONG && (_MIPS_SZLONG == 64)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=mips64],
- [# In the n32 ABI, _ABIN32 is defined, _ABIO32 is not defined (but
- # may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIN32.
- # In the 32 ABI, _ABIO32 is defined, _ABIN32 is not defined (but
- # may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIO32.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if (_MIPS_SIM == _ABIN32)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=mipsn32],
- [gl_cv_host_cpu_c_abi=mips])])
- ;;
-
- powerpc* )
- # Different ABIs are in use on AIX vs. Mac OS X vs. Linux,*BSD.
- # No need to distinguish them here; the caller may distinguish
- # them based on the OS.
- # On powerpc64 systems, the C compiler may still be generating
- # 32-bit code. And on powerpc-ibm-aix systems, the C compiler may
- # be generating 64-bit code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __powerpc64__ || defined _ARCH_PPC64
- int ok;
- #else
- error fail
- #endif
- ]])],
- [# On powerpc64, there are two ABIs on Linux: The AIX compatible
- # one and the ELFv2 one. The latter defines _CALL_ELF=2.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined _CALL_ELF && _CALL_ELF == 2
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=powerpc64-elfv2],
- [gl_cv_host_cpu_c_abi=powerpc64])
- ],
- [gl_cv_host_cpu_c_abi=powerpc])
- ;;
-
- rs6000 )
- gl_cv_host_cpu_c_abi=powerpc
- ;;
-
- riscv32 | riscv64 )
- # There are 2 architectures (with variants): rv32* and rv64*.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if __riscv_xlen == 64
- int ok;
- #else
- error fail
- #endif
- ]])],
- [cpu=riscv64],
- [cpu=riscv32])
- # There are 6 ABIs: ilp32, ilp32f, ilp32d, lp64, lp64f, lp64d.
- # Size of 'long' and 'void *':
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __LP64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [main_abi=lp64],
- [main_abi=ilp32])
- # Float ABIs:
- # __riscv_float_abi_double:
- # 'float' and 'double' are passed in floating-point registers.
- # __riscv_float_abi_single:
- # 'float' are passed in floating-point registers.
- # __riscv_float_abi_soft:
- # No values are passed in floating-point registers.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __riscv_float_abi_double
- int ok;
- #else
- error fail
- #endif
- ]])],
- [float_abi=d],
- [AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __riscv_float_abi_single
- int ok;
- #else
- error fail
- #endif
- ]])],
- [float_abi=f],
- [float_abi=''])
- ])
- gl_cv_host_cpu_c_abi="${cpu}-${main_abi}${float_abi}"
- ;;
-
- s390* )
- # On s390x, the C compiler may be generating 64-bit (= s390x) code
- # or 31-bit (= s390) code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __LP64__ || defined __s390x__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=s390x],
- [gl_cv_host_cpu_c_abi=s390])
- ;;
-
- sparc | sparc64 )
- # UltraSPARCs running Linux have `uname -m` = "sparc64", but the
- # C compiler still generates 32-bit code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __sparcv9 || defined __arch64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi=sparc64],
- [gl_cv_host_cpu_c_abi=sparc])
- ;;
-
- *)
- gl_cv_host_cpu_c_abi="$host_cpu"
- ;;
- esac
- ])
-
- dnl In most cases, $HOST_CPU and $HOST_CPU_C_ABI are the same.
- HOST_CPU=`echo "$gl_cv_host_cpu_c_abi" | sed -e 's/-.*//'`
- HOST_CPU_C_ABI="$gl_cv_host_cpu_c_abi"
- AC_SUBST([HOST_CPU])
- AC_SUBST([HOST_CPU_C_ABI])
-
- # This was
- # AC_DEFINE_UNQUOTED([__${HOST_CPU}__])
- # AC_DEFINE_UNQUOTED([__${HOST_CPU_C_ABI}__])
- # earlier, but KAI C++ 3.2d doesn't like this.
- sed -e 's/-/_/g' >> confdefs.h <<EOF
-#ifndef __${HOST_CPU}__
-#define __${HOST_CPU}__ 1
-#endif
-#ifndef __${HOST_CPU_C_ABI}__
-#define __${HOST_CPU_C_ABI}__ 1
-#endif
-EOF
- AH_TOP([/* CPU and C ABI indicator */
-#ifndef __i386__
-#undef __i386__
-#endif
-#ifndef __x86_64_x32__
-#undef __x86_64_x32__
-#endif
-#ifndef __x86_64__
-#undef __x86_64__
-#endif
-#ifndef __alpha__
-#undef __alpha__
-#endif
-#ifndef __arm__
-#undef __arm__
-#endif
-#ifndef __armhf__
-#undef __armhf__
-#endif
-#ifndef __arm64_ilp32__
-#undef __arm64_ilp32__
-#endif
-#ifndef __arm64__
-#undef __arm64__
-#endif
-#ifndef __hppa__
-#undef __hppa__
-#endif
-#ifndef __hppa64__
-#undef __hppa64__
-#endif
-#ifndef __ia64_ilp32__
-#undef __ia64_ilp32__
-#endif
-#ifndef __ia64__
-#undef __ia64__
-#endif
-#ifndef __m68k__
-#undef __m68k__
-#endif
-#ifndef __mips__
-#undef __mips__
-#endif
-#ifndef __mipsn32__
-#undef __mipsn32__
-#endif
-#ifndef __mips64__
-#undef __mips64__
-#endif
-#ifndef __powerpc__
-#undef __powerpc__
-#endif
-#ifndef __powerpc64__
-#undef __powerpc64__
-#endif
-#ifndef __powerpc64_elfv2__
-#undef __powerpc64_elfv2__
-#endif
-#ifndef __riscv32__
-#undef __riscv32__
-#endif
-#ifndef __riscv64__
-#undef __riscv64__
-#endif
-#ifndef __riscv32_ilp32__
-#undef __riscv32_ilp32__
-#endif
-#ifndef __riscv32_ilp32f__
-#undef __riscv32_ilp32f__
-#endif
-#ifndef __riscv32_ilp32d__
-#undef __riscv32_ilp32d__
-#endif
-#ifndef __riscv64_ilp32__
-#undef __riscv64_ilp32__
-#endif
-#ifndef __riscv64_ilp32f__
-#undef __riscv64_ilp32f__
-#endif
-#ifndef __riscv64_ilp32d__
-#undef __riscv64_ilp32d__
-#endif
-#ifndef __riscv64_lp64__
-#undef __riscv64_lp64__
-#endif
-#ifndef __riscv64_lp64f__
-#undef __riscv64_lp64f__
-#endif
-#ifndef __riscv64_lp64d__
-#undef __riscv64_lp64d__
-#endif
-#ifndef __s390__
-#undef __s390__
-#endif
-#ifndef __s390x__
-#undef __s390x__
-#endif
-#ifndef __sh__
-#undef __sh__
-#endif
-#ifndef __sparc__
-#undef __sparc__
-#endif
-#ifndef __sparc64__
-#undef __sparc64__
-#endif
-])
-
-])
-
-
-dnl Sets the HOST_CPU_C_ABI_32BIT variable to 'yes' if the C language ABI
-dnl (application binary interface) is a 32-bit one, to 'no' if it is a 64-bit
-dnl one, or to 'unknown' if unknown.
-dnl This is a simplified variant of gl_HOST_CPU_C_ABI.
-AC_DEFUN([gl_HOST_CPU_C_ABI_32BIT],
-[
- AC_REQUIRE([AC_CANONICAL_HOST])
- AC_CACHE_CHECK([32-bit host C ABI], [gl_cv_host_cpu_c_abi_32bit],
- [if test -n "$gl_cv_host_cpu_c_abi"; then
- case "$gl_cv_host_cpu_c_abi" in
- i386 | x86_64-x32 | arm | armhf | arm64-ilp32 | hppa | ia64-ilp32 | mips | mipsn32 | powerpc | riscv*-ilp32* | s390 | sparc)
- gl_cv_host_cpu_c_abi_32bit=yes ;;
- x86_64 | alpha | arm64 | hppa64 | ia64 | mips64 | powerpc64 | powerpc64-elfv2 | riscv*-lp64* | s390x | sparc64 )
- gl_cv_host_cpu_c_abi_32bit=no ;;
- *)
- gl_cv_host_cpu_c_abi_32bit=unknown ;;
- esac
- else
- case "$host_cpu" in
-
- # CPUs that only support a 32-bit ABI.
- arc \
- | bfin \
- | cris* \
- | csky \
- | epiphany \
- | ft32 \
- | h8300 \
- | m68k \
- | microblaze | microblazeel \
- | nds32 | nds32le | nds32be \
- | nios2 | nios2eb | nios2el \
- | or1k* \
- | or32 \
- | sh | sh[1234] | sh[1234]e[lb] \
- | tic6x \
- | xtensa* )
- gl_cv_host_cpu_c_abi_32bit=yes
- ;;
-
- # CPUs that only support a 64-bit ABI.
-changequote(,)dnl
- alpha | alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] \
- | mmix )
-changequote([,])dnl
- gl_cv_host_cpu_c_abi_32bit=no
- ;;
-
-changequote(,)dnl
- i[34567]86 )
-changequote([,])dnl
- gl_cv_host_cpu_c_abi_32bit=yes
- ;;
-
- x86_64 )
- # On x86_64 systems, the C compiler may be generating code in one of
- # these ABIs:
- # - 64-bit instruction set, 64-bit pointers, 64-bit 'long': x86_64.
- # - 64-bit instruction set, 64-bit pointers, 32-bit 'long': x86_64
- # with native Windows (mingw, MSVC).
- # - 64-bit instruction set, 32-bit pointers, 32-bit 'long': x86_64-x32.
- # - 32-bit instruction set, 32-bit pointers, 32-bit 'long': i386.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if (defined __x86_64__ || defined __amd64__ \
- || defined _M_X64 || defined _M_AMD64) \
- && !(defined __ILP32__ || defined _ILP32)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- arm* | aarch64 )
- # Assume arm with EABI.
- # On arm64 systems, the C compiler may be generating code in one of
- # these ABIs:
- # - aarch64 instruction set, 64-bit pointers, 64-bit 'long': arm64.
- # - aarch64 instruction set, 32-bit pointers, 32-bit 'long': arm64-ilp32.
- # - 32-bit instruction set, 32-bit pointers, 32-bit 'long': arm or armhf.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __aarch64__ && !(defined __ILP32__ || defined _ILP32)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- hppa1.0 | hppa1.1 | hppa2.0* | hppa64 )
- # On hppa, the C compiler may be generating 32-bit code or 64-bit
- # code. In the latter case, it defines _LP64 and __LP64__.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#ifdef __LP64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- ia64* )
- # On ia64 on HP-UX, the C compiler may be generating 64-bit code or
- # 32-bit code. In the latter case, it defines _ILP32.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#ifdef _ILP32
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=yes],
- [gl_cv_host_cpu_c_abi_32bit=no])
- ;;
-
- mips* )
- # We should also check for (_MIPS_SZPTR == 64), but gcc keeps this
- # at 32.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined _MIPS_SZLONG && (_MIPS_SZLONG == 64)
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- powerpc* )
- # Different ABIs are in use on AIX vs. Mac OS X vs. Linux,*BSD.
- # No need to distinguish them here; the caller may distinguish
- # them based on the OS.
- # On powerpc64 systems, the C compiler may still be generating
- # 32-bit code. And on powerpc-ibm-aix systems, the C compiler may
- # be generating 64-bit code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __powerpc64__ || defined _ARCH_PPC64
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- rs6000 )
- gl_cv_host_cpu_c_abi_32bit=yes
- ;;
-
- riscv32 | riscv64 )
- # There are 6 ABIs: ilp32, ilp32f, ilp32d, lp64, lp64f, lp64d.
- # Size of 'long' and 'void *':
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __LP64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- s390* )
- # On s390x, the C compiler may be generating 64-bit (= s390x) code
- # or 31-bit (= s390) code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __LP64__ || defined __s390x__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- sparc | sparc64 )
- # UltraSPARCs running Linux have `uname -m` = "sparc64", but the
- # C compiler still generates 32-bit code.
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __sparcv9 || defined __arch64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [gl_cv_host_cpu_c_abi_32bit=no],
- [gl_cv_host_cpu_c_abi_32bit=yes])
- ;;
-
- *)
- gl_cv_host_cpu_c_abi_32bit=unknown
- ;;
- esac
- fi
- ])
-
- HOST_CPU_C_ABI_32BIT="$gl_cv_host_cpu_c_abi_32bit"
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/iconv.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/iconv.m4
deleted file mode 100644
index e593b7270a..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/iconv.m4
+++ /dev/null
@@ -1,288 +0,0 @@
-# iconv.m4 serial 21
-dnl Copyright (C) 2000-2002, 2007-2014, 2016-2020 Free Software Foundation,
-dnl Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-
-dnl From Bruno Haible.
-
-AC_DEFUN([AM_ICONV_LINKFLAGS_BODY],
-[
- dnl Prerequisites of AC_LIB_LINKFLAGS_BODY.
- AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
- AC_REQUIRE([AC_LIB_RPATH])
-
- dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV
- dnl accordingly.
- AC_LIB_LINKFLAGS_BODY([iconv])
-])
-
-AC_DEFUN([AM_ICONV_LINK],
-[
- dnl Some systems have iconv in libc, some have it in libiconv (OSF/1 and
- dnl those with the standalone portable GNU libiconv installed).
- AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles
-
- dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV
- dnl accordingly.
- AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY])
-
- dnl Add $INCICONV to CPPFLAGS before performing the following checks,
- dnl because if the user has installed libiconv and not disabled its use
- dnl via --without-libiconv-prefix, he wants to use it. The first
- dnl AC_LINK_IFELSE will then fail, the second AC_LINK_IFELSE will succeed.
- am_save_CPPFLAGS="$CPPFLAGS"
- AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCICONV])
-
- AC_CACHE_CHECK([for iconv], [am_cv_func_iconv], [
- am_cv_func_iconv="no, consider installing GNU libiconv"
- am_cv_lib_iconv=no
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <stdlib.h>
-#include <iconv.h>
- ]],
- [[iconv_t cd = iconv_open("","");
- iconv(cd,NULL,NULL,NULL,NULL);
- iconv_close(cd);]])],
- [am_cv_func_iconv=yes])
- if test "$am_cv_func_iconv" != yes; then
- am_save_LIBS="$LIBS"
- LIBS="$LIBS $LIBICONV"
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <stdlib.h>
-#include <iconv.h>
- ]],
- [[iconv_t cd = iconv_open("","");
- iconv(cd,NULL,NULL,NULL,NULL);
- iconv_close(cd);]])],
- [am_cv_lib_iconv=yes]
- [am_cv_func_iconv=yes])
- LIBS="$am_save_LIBS"
- fi
- ])
- if test "$am_cv_func_iconv" = yes; then
- AC_CACHE_CHECK([for working iconv], [am_cv_func_iconv_works], [
- dnl This tests against bugs in AIX 5.1, AIX 6.1..7.1, HP-UX 11.11,
- dnl Solaris 10.
- am_save_LIBS="$LIBS"
- if test $am_cv_lib_iconv = yes; then
- LIBS="$LIBS $LIBICONV"
- fi
- am_cv_func_iconv_works=no
- for ac_iconv_const in '' 'const'; do
- AC_RUN_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <iconv.h>
-#include <string.h>
-
-#ifndef ICONV_CONST
-# define ICONV_CONST $ac_iconv_const
-#endif
- ]],
- [[int result = 0;
- /* Test against AIX 5.1 bug: Failures are not distinguishable from successful
- returns. */
- {
- iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8");
- if (cd_utf8_to_88591 != (iconv_t)(-1))
- {
- static ICONV_CONST char input[] = "\342\202\254"; /* EURO SIGN */
- char buf[10];
- ICONV_CONST char *inptr = input;
- size_t inbytesleft = strlen (input);
- char *outptr = buf;
- size_t outbytesleft = sizeof (buf);
- size_t res = iconv (cd_utf8_to_88591,
- &inptr, &inbytesleft,
- &outptr, &outbytesleft);
- if (res == 0)
- result |= 1;
- iconv_close (cd_utf8_to_88591);
- }
- }
- /* Test against Solaris 10 bug: Failures are not distinguishable from
- successful returns. */
- {
- iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646");
- if (cd_ascii_to_88591 != (iconv_t)(-1))
- {
- static ICONV_CONST char input[] = "\263";
- char buf[10];
- ICONV_CONST char *inptr = input;
- size_t inbytesleft = strlen (input);
- char *outptr = buf;
- size_t outbytesleft = sizeof (buf);
- size_t res = iconv (cd_ascii_to_88591,
- &inptr, &inbytesleft,
- &outptr, &outbytesleft);
- if (res == 0)
- result |= 2;
- iconv_close (cd_ascii_to_88591);
- }
- }
- /* Test against AIX 6.1..7.1 bug: Buffer overrun. */
- {
- iconv_t cd_88591_to_utf8 = iconv_open ("UTF-8", "ISO-8859-1");
- if (cd_88591_to_utf8 != (iconv_t)(-1))
- {
- static ICONV_CONST char input[] = "\304";
- static char buf[2] = { (char)0xDE, (char)0xAD };
- ICONV_CONST char *inptr = input;
- size_t inbytesleft = 1;
- char *outptr = buf;
- size_t outbytesleft = 1;
- size_t res = iconv (cd_88591_to_utf8,
- &inptr, &inbytesleft,
- &outptr, &outbytesleft);
- if (res != (size_t)(-1) || outptr - buf > 1 || buf[1] != (char)0xAD)
- result |= 4;
- iconv_close (cd_88591_to_utf8);
- }
- }
-#if 0 /* This bug could be worked around by the caller. */
- /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */
- {
- iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591");
- if (cd_88591_to_utf8 != (iconv_t)(-1))
- {
- static ICONV_CONST char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337";
- char buf[50];
- ICONV_CONST char *inptr = input;
- size_t inbytesleft = strlen (input);
- char *outptr = buf;
- size_t outbytesleft = sizeof (buf);
- size_t res = iconv (cd_88591_to_utf8,
- &inptr, &inbytesleft,
- &outptr, &outbytesleft);
- if ((int)res > 0)
- result |= 8;
- iconv_close (cd_88591_to_utf8);
- }
- }
-#endif
- /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is
- provided. */
- {
- /* Try standardized names. */
- iconv_t cd1 = iconv_open ("UTF-8", "EUC-JP");
- /* Try IRIX, OSF/1 names. */
- iconv_t cd2 = iconv_open ("UTF-8", "eucJP");
- /* Try AIX names. */
- iconv_t cd3 = iconv_open ("UTF-8", "IBM-eucJP");
- /* Try HP-UX names. */
- iconv_t cd4 = iconv_open ("utf8", "eucJP");
- if (cd1 == (iconv_t)(-1) && cd2 == (iconv_t)(-1)
- && cd3 == (iconv_t)(-1) && cd4 == (iconv_t)(-1))
- result |= 16;
- if (cd1 != (iconv_t)(-1))
- iconv_close (cd1);
- if (cd2 != (iconv_t)(-1))
- iconv_close (cd2);
- if (cd3 != (iconv_t)(-1))
- iconv_close (cd3);
- if (cd4 != (iconv_t)(-1))
- iconv_close (cd4);
- }
- return result;
-]])],
- [am_cv_func_iconv_works=yes], ,
- [case "$host_os" in
- aix* | hpux*) am_cv_func_iconv_works="guessing no" ;;
- *) am_cv_func_iconv_works="guessing yes" ;;
- esac])
- test "$am_cv_func_iconv_works" = no || break
- done
- LIBS="$am_save_LIBS"
- ])
- case "$am_cv_func_iconv_works" in
- *no) am_func_iconv=no am_cv_lib_iconv=no ;;
- *) am_func_iconv=yes ;;
- esac
- else
- am_func_iconv=no am_cv_lib_iconv=no
- fi
- if test "$am_func_iconv" = yes; then
- AC_DEFINE([HAVE_ICONV], [1],
- [Define if you have the iconv() function and it works.])
- fi
- if test "$am_cv_lib_iconv" = yes; then
- AC_MSG_CHECKING([how to link with libiconv])
- AC_MSG_RESULT([$LIBICONV])
- else
- dnl If $LIBICONV didn't lead to a usable library, we don't need $INCICONV
- dnl either.
- CPPFLAGS="$am_save_CPPFLAGS"
- LIBICONV=
- LTLIBICONV=
- fi
- AC_SUBST([LIBICONV])
- AC_SUBST([LTLIBICONV])
-])
-
-dnl Define AM_ICONV using AC_DEFUN_ONCE for Autoconf >= 2.64, in order to
-dnl avoid warnings like
-dnl "warning: AC_REQUIRE: `AM_ICONV' was expanded before it was required".
-dnl This is tricky because of the way 'aclocal' is implemented:
-dnl - It requires defining an auxiliary macro whose name ends in AC_DEFUN.
-dnl Otherwise aclocal's initial scan pass would miss the macro definition.
-dnl - It requires a line break inside the AC_DEFUN_ONCE and AC_DEFUN expansions.
-dnl Otherwise aclocal would emit many "Use of uninitialized value $1"
-dnl warnings.
-m4_define([gl_iconv_AC_DEFUN],
- m4_version_prereq([2.64],
- [[AC_DEFUN_ONCE(
- [$1], [$2])]],
- [m4_ifdef([gl_00GNULIB],
- [[AC_DEFUN_ONCE(
- [$1], [$2])]],
- [[AC_DEFUN(
- [$1], [$2])]])]))
-gl_iconv_AC_DEFUN([AM_ICONV],
-[
- AM_ICONV_LINK
- if test "$am_cv_func_iconv" = yes; then
- AC_MSG_CHECKING([for iconv declaration])
- AC_CACHE_VAL([am_cv_proto_iconv], [
- AC_COMPILE_IFELSE(
- [AC_LANG_PROGRAM(
- [[
-#include <stdlib.h>
-#include <iconv.h>
-extern
-#ifdef __cplusplus
-"C"
-#endif
-#if defined(__STDC__) || defined(_MSC_VER) || defined(__cplusplus)
-size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);
-#else
-size_t iconv();
-#endif
- ]],
- [[]])],
- [am_cv_proto_iconv_arg1=""],
- [am_cv_proto_iconv_arg1="const"])
- am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);"])
- am_cv_proto_iconv=`echo "[$]am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'`
- AC_MSG_RESULT([
- $am_cv_proto_iconv])
- else
- dnl When compiling GNU libiconv on a system that does not have iconv yet,
- dnl pick the POSIX compliant declaration without 'const'.
- am_cv_proto_iconv_arg1=""
- fi
- AC_DEFINE_UNQUOTED([ICONV_CONST], [$am_cv_proto_iconv_arg1],
- [Define as const if the declaration of iconv() needs const.])
- dnl Also substitute ICONV_CONST in the gnulib generated <iconv.h>.
- m4_ifdef([gl_ICONV_H_DEFAULTS],
- [AC_REQUIRE([gl_ICONV_H_DEFAULTS])
- if test -n "$am_cv_proto_iconv_arg1"; then
- ICONV_CONST="const"
- fi
- ])
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/intlmacosx.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/intlmacosx.m4
deleted file mode 100644
index ebd9937c1a..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/intlmacosx.m4
+++ /dev/null
@@ -1,65 +0,0 @@
-# intlmacosx.m4 serial 8 (gettext-0.20.2)
-dnl Copyright (C) 2004-2014, 2016, 2019-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-dnl
-dnl This file can be used in projects which are not available under
-dnl the GNU General Public License or the GNU Lesser General Public
-dnl License but which still want to provide support for the GNU gettext
-dnl functionality.
-dnl Please note that the actual code of the GNU gettext library is covered
-dnl by the GNU Lesser General Public License, and the rest of the GNU
-dnl gettext package is covered by the GNU General Public License.
-dnl They are *not* in the public domain.
-
-dnl Checks for special options needed on Mac OS X.
-dnl Defines INTL_MACOSX_LIBS.
-AC_DEFUN([gt_INTL_MACOSX],
-[
- dnl Check for API introduced in Mac OS X 10.4.
- AC_CACHE_CHECK([for CFPreferencesCopyAppValue],
- [gt_cv_func_CFPreferencesCopyAppValue],
- [gt_save_LIBS="$LIBS"
- LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation"
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[#include <CoreFoundation/CFPreferences.h>]],
- [[CFPreferencesCopyAppValue(NULL, NULL)]])],
- [gt_cv_func_CFPreferencesCopyAppValue=yes],
- [gt_cv_func_CFPreferencesCopyAppValue=no])
- LIBS="$gt_save_LIBS"])
- if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then
- AC_DEFINE([HAVE_CFPREFERENCESCOPYAPPVALUE], [1],
- [Define to 1 if you have the Mac OS X function CFPreferencesCopyAppValue in the CoreFoundation framework.])
- fi
- dnl Don't check for the API introduced in Mac OS X 10.5, CFLocaleCopyCurrent,
- dnl because in macOS 10.13.4 it has the following behaviour:
- dnl When two or more languages are specified in the
- dnl "System Preferences > Language & Region > Preferred Languages" panel,
- dnl it returns en_CC where CC is the territory (even when English is not among
- dnl the preferred languages!). What we want instead is what
- dnl CFLocaleCopyCurrent returned in earlier macOS releases and what
- dnl CFPreferencesCopyAppValue still returns, namely ll_CC where ll is the
- dnl first among the preferred languages and CC is the territory.
- AC_CACHE_CHECK([for CFLocaleCopyPreferredLanguages], [gt_cv_func_CFLocaleCopyPreferredLanguages],
- [gt_save_LIBS="$LIBS"
- LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation"
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM(
- [[#include <CoreFoundation/CFLocale.h>]],
- [[CFLocaleCopyPreferredLanguages();]])],
- [gt_cv_func_CFLocaleCopyPreferredLanguages=yes],
- [gt_cv_func_CFLocaleCopyPreferredLanguages=no])
- LIBS="$gt_save_LIBS"])
- if test $gt_cv_func_CFLocaleCopyPreferredLanguages = yes; then
- AC_DEFINE([HAVE_CFLOCALECOPYPREFERREDLANGUAGES], [1],
- [Define to 1 if you have the Mac OS X function CFLocaleCopyPreferredLanguages in the CoreFoundation framework.])
- fi
- INTL_MACOSX_LIBS=
- if test $gt_cv_func_CFPreferencesCopyAppValue = yes \
- || test $gt_cv_func_CFLocaleCopyPreferredLanguages = yes; then
- INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation"
- fi
- AC_SUBST([INTL_MACOSX_LIBS])
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-ld.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-ld.m4
deleted file mode 100644
index 98c348faff..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-ld.m4
+++ /dev/null
@@ -1,168 +0,0 @@
-# lib-ld.m4 serial 9
-dnl Copyright (C) 1996-2003, 2009-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-
-dnl Subroutines of libtool.m4,
-dnl with replacements s/_*LT_PATH/AC_LIB_PROG/ and s/lt_/acl_/ to avoid
-dnl collision with libtool.m4.
-
-dnl From libtool-2.4. Sets the variable with_gnu_ld to yes or no.
-AC_DEFUN([AC_LIB_PROG_LD_GNU],
-[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], [acl_cv_prog_gnu_ld],
-[# I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
- acl_cv_prog_gnu_ld=yes
- ;;
-*)
- acl_cv_prog_gnu_ld=no
- ;;
-esac])
-with_gnu_ld=$acl_cv_prog_gnu_ld
-])
-
-dnl From libtool-2.4. Sets the variable LD.
-AC_DEFUN([AC_LIB_PROG_LD],
-[AC_REQUIRE([AC_PROG_CC])dnl
-AC_REQUIRE([AC_CANONICAL_HOST])dnl
-
-AC_ARG_WITH([gnu-ld],
- [AS_HELP_STRING([--with-gnu-ld],
- [assume the C compiler uses GNU ld [default=no]])],
- [test "$withval" = no || with_gnu_ld=yes],
- [with_gnu_ld=no])dnl
-
-# Prepare PATH_SEPARATOR.
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
- # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which
- # contains only /bin. Note that ksh looks also at the FPATH variable,
- # so we have to set that as well for the test.
- PATH_SEPARATOR=:
- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \
- && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \
- || PATH_SEPARATOR=';'
- }
-fi
-
-if test -n "$LD"; then
- AC_MSG_CHECKING([for ld])
-elif test "$GCC" = yes; then
- AC_MSG_CHECKING([for ld used by $CC])
-elif test "$with_gnu_ld" = yes; then
- AC_MSG_CHECKING([for GNU ld])
-else
- AC_MSG_CHECKING([for non-GNU ld])
-fi
-if test -n "$LD"; then
- # Let the user override the test with a path.
- :
-else
- AC_CACHE_VAL([acl_cv_path_LD],
- [
- acl_cv_path_LD= # Final result of this test
- ac_prog=ld # Program to search in $PATH
- if test "$GCC" = yes; then
- # Check if gcc -print-prog-name=ld gives a path.
- case $host in
- *-*-mingw*)
- # gcc leaves a trailing carriage return which upsets mingw
- acl_output=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
- *)
- acl_output=`($CC -print-prog-name=ld) 2>&5` ;;
- esac
- case $acl_output in
- # Accept absolute paths.
- [[\\/]]* | ?:[[\\/]]*)
- re_direlt='/[[^/]][[^/]]*/\.\./'
- # Canonicalize the pathname of ld
- acl_output=`echo "$acl_output" | sed 's%\\\\%/%g'`
- while echo "$acl_output" | grep "$re_direlt" > /dev/null 2>&1; do
- acl_output=`echo $acl_output | sed "s%$re_direlt%/%"`
- done
- # Got the pathname. No search in PATH is needed.
- acl_cv_path_LD="$acl_output"
- ac_prog=
- ;;
- "")
- # If it fails, then pretend we aren't using GCC.
- ;;
- *)
- # If it is relative, then search for the first ld in PATH.
- with_gnu_ld=unknown
- ;;
- esac
- fi
- if test -n "$ac_prog"; then
- # Search for $ac_prog in $PATH.
- acl_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in $PATH; do
- IFS="$acl_save_ifs"
- test -z "$ac_dir" && ac_dir=.
- if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
- acl_cv_path_LD="$ac_dir/$ac_prog"
- # Check to see if the program is GNU ld. I'd rather use --version,
- # but apparently some variants of GNU ld only accept -v.
- # Break only if it was the GNU/non-GNU ld that we prefer.
- case `"$acl_cv_path_LD" -v 2>&1 </dev/null` in
- *GNU* | *'with BFD'*)
- test "$with_gnu_ld" != no && break
- ;;
- *)
- test "$with_gnu_ld" != yes && break
- ;;
- esac
- fi
- done
- IFS="$acl_save_ifs"
- fi
- case $host in
- *-*-aix*)
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __powerpc64__ || defined _ARCH_PPC64
- int ok;
- #else
- error fail
- #endif
- ]])],
- [# The compiler produces 64-bit code. Add option '-b64' so that the
- # linker groks 64-bit object files.
- case "$acl_cv_path_LD " in
- *" -b64 "*) ;;
- *) acl_cv_path_LD="$acl_cv_path_LD -b64" ;;
- esac
- ], [])
- ;;
- sparc64-*-netbsd*)
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
- [[#if defined __sparcv9 || defined __arch64__
- int ok;
- #else
- error fail
- #endif
- ]])],
- [],
- [# The compiler produces 32-bit code. Add option '-m elf32_sparc'
- # so that the linker groks 32-bit object files.
- case "$acl_cv_path_LD " in
- *" -m elf32_sparc "*) ;;
- *) acl_cv_path_LD="$acl_cv_path_LD -m elf32_sparc" ;;
- esac
- ])
- ;;
- esac
- ])
- LD="$acl_cv_path_LD"
-fi
-if test -n "$LD"; then
- AC_MSG_RESULT([$LD])
-else
- AC_MSG_RESULT([no])
- AC_MSG_ERROR([no acceptable ld found in \$PATH])
-fi
-AC_LIB_PROG_LD_GNU
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-link.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-link.m4
deleted file mode 100644
index eecf70ec76..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-link.m4
+++ /dev/null
@@ -1,800 +0,0 @@
-# lib-link.m4 serial 31
-dnl Copyright (C) 2001-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-
-dnl From Bruno Haible.
-
-AC_PREREQ([2.61])
-
-dnl AC_LIB_LINKFLAGS(name [, dependencies]) searches for libname and
-dnl the libraries corresponding to explicit and implicit dependencies.
-dnl Sets and AC_SUBSTs the LIB${NAME} and LTLIB${NAME} variables and
-dnl augments the CPPFLAGS variable.
-dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname
-dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem.
-AC_DEFUN([AC_LIB_LINKFLAGS],
-[
- AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
- AC_REQUIRE([AC_LIB_RPATH])
- pushdef([Name],[m4_translit([$1],[./+-], [____])])
- pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
- AC_CACHE_CHECK([how to link with lib[]$1], [ac_cv_lib[]Name[]_libs], [
- AC_LIB_LINKFLAGS_BODY([$1], [$2])
- ac_cv_lib[]Name[]_libs="$LIB[]NAME"
- ac_cv_lib[]Name[]_ltlibs="$LTLIB[]NAME"
- ac_cv_lib[]Name[]_cppflags="$INC[]NAME"
- ac_cv_lib[]Name[]_prefix="$LIB[]NAME[]_PREFIX"
- ])
- LIB[]NAME="$ac_cv_lib[]Name[]_libs"
- LTLIB[]NAME="$ac_cv_lib[]Name[]_ltlibs"
- INC[]NAME="$ac_cv_lib[]Name[]_cppflags"
- LIB[]NAME[]_PREFIX="$ac_cv_lib[]Name[]_prefix"
- AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME)
- AC_SUBST([LIB]NAME)
- AC_SUBST([LTLIB]NAME)
- AC_SUBST([LIB]NAME[_PREFIX])
- dnl Also set HAVE_LIB[]NAME so that AC_LIB_HAVE_LINKFLAGS can reuse the
- dnl results of this search when this library appears as a dependency.
- HAVE_LIB[]NAME=yes
- popdef([NAME])
- popdef([Name])
-])
-
-dnl AC_LIB_HAVE_LINKFLAGS(name, dependencies, includes, testcode, [missing-message])
-dnl searches for libname and the libraries corresponding to explicit and
-dnl implicit dependencies, together with the specified include files and
-dnl the ability to compile and link the specified testcode. The missing-message
-dnl defaults to 'no' and may contain additional hints for the user.
-dnl If found, it sets and AC_SUBSTs HAVE_LIB${NAME}=yes and the LIB${NAME}
-dnl and LTLIB${NAME} variables and augments the CPPFLAGS variable, and
-dnl #defines HAVE_LIB${NAME} to 1. Otherwise, it sets and AC_SUBSTs
-dnl HAVE_LIB${NAME}=no and LIB${NAME} and LTLIB${NAME} to empty.
-dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname
-dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem.
-AC_DEFUN([AC_LIB_HAVE_LINKFLAGS],
-[
- AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
- AC_REQUIRE([AC_LIB_RPATH])
- pushdef([Name],[m4_translit([$1],[./+-], [____])])
- pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
-
- dnl Search for lib[]Name and define LIB[]NAME, LTLIB[]NAME and INC[]NAME
- dnl accordingly.
- AC_LIB_LINKFLAGS_BODY([$1], [$2])
-
- dnl Add $INC[]NAME to CPPFLAGS before performing the following checks,
- dnl because if the user has installed lib[]Name and not disabled its use
- dnl via --without-lib[]Name-prefix, he wants to use it.
- ac_save_CPPFLAGS="$CPPFLAGS"
- AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME)
-
- AC_CACHE_CHECK([for lib[]$1], [ac_cv_lib[]Name], [
- ac_save_LIBS="$LIBS"
- dnl If $LIB[]NAME contains some -l options, add it to the end of LIBS,
- dnl because these -l options might require -L options that are present in
- dnl LIBS. -l options benefit only from the -L options listed before it.
- dnl Otherwise, add it to the front of LIBS, because it may be a static
- dnl library that depends on another static library that is present in LIBS.
- dnl Static libraries benefit only from the static libraries listed after
- dnl it.
- case " $LIB[]NAME" in
- *" -l"*) LIBS="$LIBS $LIB[]NAME" ;;
- *) LIBS="$LIB[]NAME $LIBS" ;;
- esac
- AC_LINK_IFELSE(
- [AC_LANG_PROGRAM([[$3]], [[$4]])],
- [ac_cv_lib[]Name=yes],
- [ac_cv_lib[]Name='m4_if([$5], [], [no], [[$5]])'])
- LIBS="$ac_save_LIBS"
- ])
- if test "$ac_cv_lib[]Name" = yes; then
- HAVE_LIB[]NAME=yes
- AC_DEFINE([HAVE_LIB]NAME, 1, [Define if you have the lib][$1 library.])
- AC_MSG_CHECKING([how to link with lib[]$1])
- AC_MSG_RESULT([$LIB[]NAME])
- else
- HAVE_LIB[]NAME=no
- dnl If $LIB[]NAME didn't lead to a usable library, we don't need
- dnl $INC[]NAME either.
- CPPFLAGS="$ac_save_CPPFLAGS"
- LIB[]NAME=
- LTLIB[]NAME=
- LIB[]NAME[]_PREFIX=
- fi
- AC_SUBST([HAVE_LIB]NAME)
- AC_SUBST([LIB]NAME)
- AC_SUBST([LTLIB]NAME)
- AC_SUBST([LIB]NAME[_PREFIX])
- popdef([NAME])
- popdef([Name])
-])
-
-dnl Determine the platform dependent parameters needed to use rpath:
-dnl acl_libext,
-dnl acl_shlibext,
-dnl acl_libname_spec,
-dnl acl_library_names_spec,
-dnl acl_hardcode_libdir_flag_spec,
-dnl acl_hardcode_libdir_separator,
-dnl acl_hardcode_direct,
-dnl acl_hardcode_minus_L.
-AC_DEFUN([AC_LIB_RPATH],
-[
- dnl Complain if config.rpath is missing.
- AC_REQUIRE_AUX_FILE([config.rpath])
- AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS
- AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld
- AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host
- AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir
- AC_CACHE_CHECK([for shared library run path origin], [acl_cv_rpath], [
- CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \
- ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh
- . ./conftest.sh
- rm -f ./conftest.sh
- acl_cv_rpath=done
- ])
- wl="$acl_cv_wl"
- acl_libext="$acl_cv_libext"
- acl_shlibext="$acl_cv_shlibext"
- acl_libname_spec="$acl_cv_libname_spec"
- acl_library_names_spec="$acl_cv_library_names_spec"
- acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec"
- acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator"
- acl_hardcode_direct="$acl_cv_hardcode_direct"
- acl_hardcode_minus_L="$acl_cv_hardcode_minus_L"
- dnl Determine whether the user wants rpath handling at all.
- AC_ARG_ENABLE([rpath],
- [ --disable-rpath do not hardcode runtime library paths],
- :, enable_rpath=yes)
-])
-
-dnl AC_LIB_FROMPACKAGE(name, package)
-dnl declares that libname comes from the given package. The configure file
-dnl will then not have a --with-libname-prefix option but a
-dnl --with-package-prefix option. Several libraries can come from the same
-dnl package. This declaration must occur before an AC_LIB_LINKFLAGS or similar
-dnl macro call that searches for libname.
-AC_DEFUN([AC_LIB_FROMPACKAGE],
-[
- pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
- define([acl_frompackage_]NAME, [$2])
- popdef([NAME])
- pushdef([PACK],[$2])
- pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
- define([acl_libsinpackage_]PACKUP,
- m4_ifdef([acl_libsinpackage_]PACKUP, [m4_defn([acl_libsinpackage_]PACKUP)[, ]],)[lib$1])
- popdef([PACKUP])
- popdef([PACK])
-])
-
-dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and
-dnl the libraries corresponding to explicit and implicit dependencies.
-dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables.
-dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found
-dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem.
-AC_DEFUN([AC_LIB_LINKFLAGS_BODY],
-[
- AC_REQUIRE([AC_LIB_PREPARE_MULTILIB])
- pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
- pushdef([PACK],[m4_ifdef([acl_frompackage_]NAME, [acl_frompackage_]NAME, lib[$1])])
- pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-],
- [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])])
- pushdef([PACKLIBS],[m4_ifdef([acl_frompackage_]NAME, [acl_libsinpackage_]PACKUP, lib[$1])])
- dnl By default, look in $includedir and $libdir.
- use_additional=yes
- AC_LIB_WITH_FINAL_PREFIX([
- eval additional_includedir=\"$includedir\"
- eval additional_libdir=\"$libdir\"
- eval additional_libdir2=\"$exec_prefix/$acl_libdirstem2\"
- eval additional_libdir3=\"$exec_prefix/$acl_libdirstem3\"
- ])
- AC_ARG_WITH(PACK[-prefix],
-[[ --with-]]PACK[[-prefix[=DIR] search for ]PACKLIBS[ in DIR/include and DIR/lib
- --without-]]PACK[[-prefix don't search for ]PACKLIBS[ in includedir and libdir]],
-[
- if test "X$withval" = "Xno"; then
- use_additional=no
- else
- if test "X$withval" = "X"; then
- AC_LIB_WITH_FINAL_PREFIX([
- eval additional_includedir=\"$includedir\"
- eval additional_libdir=\"$libdir\"
- eval additional_libdir2=\"$exec_prefix/$acl_libdirstem2\"
- eval additional_libdir3=\"$exec_prefix/$acl_libdirstem3\"
- ])
- else
- additional_includedir="$withval/include"
- additional_libdir="$withval/$acl_libdirstem"
- additional_libdir2="$withval/$acl_libdirstem2"
- additional_libdir3="$withval/$acl_libdirstem3"
- fi
- fi
-])
- if test "X$additional_libdir2" = "X$additional_libdir"; then
- additional_libdir2=
- fi
- if test "X$additional_libdir3" = "X$additional_libdir"; then
- additional_libdir3=
- fi
- dnl Search the library and its dependencies in $additional_libdir and
- dnl $LDFLAGS. Using breadth-first-seach.
- LIB[]NAME=
- LTLIB[]NAME=
- INC[]NAME=
- LIB[]NAME[]_PREFIX=
- dnl HAVE_LIB${NAME} is an indicator that LIB${NAME}, LTLIB${NAME} have been
- dnl computed. So it has to be reset here.
- HAVE_LIB[]NAME=
- rpathdirs=
- ltrpathdirs=
- names_already_handled=
- names_next_round='$1 $2'
- while test -n "$names_next_round"; do
- names_this_round="$names_next_round"
- names_next_round=
- for name in $names_this_round; do
- already_handled=
- for n in $names_already_handled; do
- if test "$n" = "$name"; then
- already_handled=yes
- break
- fi
- done
- if test -z "$already_handled"; then
- names_already_handled="$names_already_handled $name"
- dnl See if it was already located by an earlier AC_LIB_LINKFLAGS
- dnl or AC_LIB_HAVE_LINKFLAGS call.
- uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./+-|ABCDEFGHIJKLMNOPQRSTUVWXYZ____|'`
- eval value=\"\$HAVE_LIB$uppername\"
- if test -n "$value"; then
- if test "$value" = yes; then
- eval value=\"\$LIB$uppername\"
- test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value"
- eval value=\"\$LTLIB$uppername\"
- test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value"
- else
- dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined
- dnl that this library doesn't exist. So just drop it.
- :
- fi
- else
- dnl Search the library lib$name in $additional_libdir and $LDFLAGS
- dnl and the already constructed $LIBNAME/$LTLIBNAME.
- found_dir=
- found_la=
- found_so=
- found_a=
- eval libname=\"$acl_libname_spec\" # typically: libname=lib$name
- if test -n "$acl_shlibext"; then
- shrext=".$acl_shlibext" # typically: shrext=.so
- else
- shrext=
- fi
- if test $use_additional = yes; then
- for additional_libdir_variable in additional_libdir additional_libdir2 additional_libdir3; do
- if test "X$found_dir" = "X"; then
- eval dir=\$$additional_libdir_variable
- if test -n "$dir"; then
- dnl The same code as in the loop below:
- dnl First look for a shared library.
- if test -n "$acl_shlibext"; then
- if test -f "$dir/$libname$shrext" && acl_is_expected_elfclass < "$dir/$libname$shrext"; then
- found_dir="$dir"
- found_so="$dir/$libname$shrext"
- else
- if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then
- ver=`(cd "$dir" && \
- for f in "$libname$shrext".*; do echo "$f"; done \
- | sed -e "s,^$libname$shrext\\\\.,," \
- | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \
- | sed 1q ) 2>/dev/null`
- if test -n "$ver" && test -f "$dir/$libname$shrext.$ver" && acl_is_expected_elfclass < "$dir/$libname$shrext.$ver"; then
- found_dir="$dir"
- found_so="$dir/$libname$shrext.$ver"
- fi
- else
- eval library_names=\"$acl_library_names_spec\"
- for f in $library_names; do
- if test -f "$dir/$f" && acl_is_expected_elfclass < "$dir/$f"; then
- found_dir="$dir"
- found_so="$dir/$f"
- break
- fi
- done
- fi
- fi
- fi
- dnl Then look for a static library.
- if test "X$found_dir" = "X"; then
- if test -f "$dir/$libname.$acl_libext" && ${AR-ar} -p "$dir/$libname.$acl_libext" | acl_is_expected_elfclass; then
- found_dir="$dir"
- found_a="$dir/$libname.$acl_libext"
- fi
- fi
- if test "X$found_dir" != "X"; then
- if test -f "$dir/$libname.la"; then
- found_la="$dir/$libname.la"
- fi
- fi
- fi
- fi
- done
- fi
- if test "X$found_dir" = "X"; then
- for x in $LDFLAGS $LTLIB[]NAME; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- case "$x" in
- -L*)
- dir=`echo "X$x" | sed -e 's/^X-L//'`
- dnl First look for a shared library.
- if test -n "$acl_shlibext"; then
- if test -f "$dir/$libname$shrext" && acl_is_expected_elfclass < "$dir/$libname$shrext"; then
- found_dir="$dir"
- found_so="$dir/$libname$shrext"
- else
- if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then
- ver=`(cd "$dir" && \
- for f in "$libname$shrext".*; do echo "$f"; done \
- | sed -e "s,^$libname$shrext\\\\.,," \
- | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \
- | sed 1q ) 2>/dev/null`
- if test -n "$ver" && test -f "$dir/$libname$shrext.$ver" && acl_is_expected_elfclass < "$dir/$libname$shrext.$ver"; then
- found_dir="$dir"
- found_so="$dir/$libname$shrext.$ver"
- fi
- else
- eval library_names=\"$acl_library_names_spec\"
- for f in $library_names; do
- if test -f "$dir/$f" && acl_is_expected_elfclass < "$dir/$f"; then
- found_dir="$dir"
- found_so="$dir/$f"
- break
- fi
- done
- fi
- fi
- fi
- dnl Then look for a static library.
- if test "X$found_dir" = "X"; then
- if test -f "$dir/$libname.$acl_libext" && ${AR-ar} -p "$dir/$libname.$acl_libext" | acl_is_expected_elfclass; then
- found_dir="$dir"
- found_a="$dir/$libname.$acl_libext"
- fi
- fi
- if test "X$found_dir" != "X"; then
- if test -f "$dir/$libname.la"; then
- found_la="$dir/$libname.la"
- fi
- fi
- ;;
- esac
- if test "X$found_dir" != "X"; then
- break
- fi
- done
- fi
- if test "X$found_dir" != "X"; then
- dnl Found the library.
- LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name"
- if test "X$found_so" != "X"; then
- dnl Linking with a shared library. We attempt to hardcode its
- dnl directory into the executable's runpath, unless it's the
- dnl standard /usr/lib.
- if test "$enable_rpath" = no \
- || test "X$found_dir" = "X/usr/$acl_libdirstem" \
- || test "X$found_dir" = "X/usr/$acl_libdirstem2" \
- || test "X$found_dir" = "X/usr/$acl_libdirstem3"; then
- dnl No hardcoding is needed.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so"
- else
- dnl Use an explicit option to hardcode DIR into the resulting
- dnl binary.
- dnl Potentially add DIR to ltrpathdirs.
- dnl The ltrpathdirs will be appended to $LTLIBNAME at the end.
- haveit=
- for x in $ltrpathdirs; do
- if test "X$x" = "X$found_dir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- ltrpathdirs="$ltrpathdirs $found_dir"
- fi
- dnl The hardcoding into $LIBNAME is system dependent.
- if test "$acl_hardcode_direct" = yes; then
- dnl Using DIR/libNAME.so during linking hardcodes DIR into the
- dnl resulting binary.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so"
- else
- if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then
- dnl Use an explicit option to hardcode DIR into the resulting
- dnl binary.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so"
- dnl Potentially add DIR to rpathdirs.
- dnl The rpathdirs will be appended to $LIBNAME at the end.
- haveit=
- for x in $rpathdirs; do
- if test "X$x" = "X$found_dir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- rpathdirs="$rpathdirs $found_dir"
- fi
- else
- dnl Rely on "-L$found_dir".
- dnl But don't add it if it's already contained in the LDFLAGS
- dnl or the already constructed $LIBNAME
- haveit=
- for x in $LDFLAGS $LIB[]NAME; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-L$found_dir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir"
- fi
- if test "$acl_hardcode_minus_L" != no; then
- dnl FIXME: Not sure whether we should use
- dnl "-L$found_dir -l$name" or "-L$found_dir $found_so"
- dnl here.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so"
- else
- dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH
- dnl here, because this doesn't fit in flags passed to the
- dnl compiler. So give up. No hardcoding. This affects only
- dnl very old systems.
- dnl FIXME: Not sure whether we should use
- dnl "-L$found_dir -l$name" or "-L$found_dir $found_so"
- dnl here.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name"
- fi
- fi
- fi
- fi
- else
- if test "X$found_a" != "X"; then
- dnl Linking with a static library.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a"
- else
- dnl We shouldn't come here, but anyway it's good to have a
- dnl fallback.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name"
- fi
- fi
- dnl Assume the include files are nearby.
- additional_includedir=
- case "$found_dir" in
- */$acl_libdirstem | */$acl_libdirstem/)
- basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'`
- if test "$name" = '$1'; then
- LIB[]NAME[]_PREFIX="$basedir"
- fi
- additional_includedir="$basedir/include"
- ;;
- */$acl_libdirstem2 | */$acl_libdirstem2/)
- basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'`
- if test "$name" = '$1'; then
- LIB[]NAME[]_PREFIX="$basedir"
- fi
- additional_includedir="$basedir/include"
- ;;
- */$acl_libdirstem3 | */$acl_libdirstem3/)
- basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem3/"'*$,,'`
- if test "$name" = '$1'; then
- LIB[]NAME[]_PREFIX="$basedir"
- fi
- additional_includedir="$basedir/include"
- ;;
- esac
- if test "X$additional_includedir" != "X"; then
- dnl Potentially add $additional_includedir to $INCNAME.
- dnl But don't add it
- dnl 1. if it's the standard /usr/include,
- dnl 2. if it's /usr/local/include and we are using GCC on Linux,
- dnl 3. if it's already present in $CPPFLAGS or the already
- dnl constructed $INCNAME,
- dnl 4. if it doesn't exist as a directory.
- if test "X$additional_includedir" != "X/usr/include"; then
- haveit=
- if test "X$additional_includedir" = "X/usr/local/include"; then
- if test -n "$GCC"; then
- case $host_os in
- linux* | gnu* | k*bsd*-gnu) haveit=yes;;
- esac
- fi
- fi
- if test -z "$haveit"; then
- for x in $CPPFLAGS $INC[]NAME; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-I$additional_includedir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- if test -d "$additional_includedir"; then
- dnl Really add $additional_includedir to $INCNAME.
- INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir"
- fi
- fi
- fi
- fi
- fi
- dnl Look for dependencies.
- if test -n "$found_la"; then
- dnl Read the .la file. It defines the variables
- dnl dlname, library_names, old_library, dependency_libs, current,
- dnl age, revision, installed, dlopen, dlpreopen, libdir.
- save_libdir="$libdir"
- case "$found_la" in
- */* | *\\*) . "$found_la" ;;
- *) . "./$found_la" ;;
- esac
- libdir="$save_libdir"
- dnl We use only dependency_libs.
- for dep in $dependency_libs; do
- case "$dep" in
- -L*)
- dependency_libdir=`echo "X$dep" | sed -e 's/^X-L//'`
- dnl Potentially add $dependency_libdir to $LIBNAME and $LTLIBNAME.
- dnl But don't add it
- dnl 1. if it's the standard /usr/lib,
- dnl 2. if it's /usr/local/lib and we are using GCC on Linux,
- dnl 3. if it's already present in $LDFLAGS or the already
- dnl constructed $LIBNAME,
- dnl 4. if it doesn't exist as a directory.
- if test "X$dependency_libdir" != "X/usr/$acl_libdirstem" \
- && test "X$dependency_libdir" != "X/usr/$acl_libdirstem2" \
- && test "X$dependency_libdir" != "X/usr/$acl_libdirstem3"; then
- haveit=
- if test "X$dependency_libdir" = "X/usr/local/$acl_libdirstem" \
- || test "X$dependency_libdir" = "X/usr/local/$acl_libdirstem2" \
- || test "X$dependency_libdir" = "X/usr/local/$acl_libdirstem3"; then
- if test -n "$GCC"; then
- case $host_os in
- linux* | gnu* | k*bsd*-gnu) haveit=yes;;
- esac
- fi
- fi
- if test -z "$haveit"; then
- haveit=
- for x in $LDFLAGS $LIB[]NAME; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-L$dependency_libdir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- if test -d "$dependency_libdir"; then
- dnl Really add $dependency_libdir to $LIBNAME.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$dependency_libdir"
- fi
- fi
- haveit=
- for x in $LDFLAGS $LTLIB[]NAME; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-L$dependency_libdir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- if test -d "$dependency_libdir"; then
- dnl Really add $dependency_libdir to $LTLIBNAME.
- LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$dependency_libdir"
- fi
- fi
- fi
- fi
- ;;
- -R*)
- dir=`echo "X$dep" | sed -e 's/^X-R//'`
- if test "$enable_rpath" != no; then
- dnl Potentially add DIR to rpathdirs.
- dnl The rpathdirs will be appended to $LIBNAME at the end.
- haveit=
- for x in $rpathdirs; do
- if test "X$x" = "X$dir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- rpathdirs="$rpathdirs $dir"
- fi
- dnl Potentially add DIR to ltrpathdirs.
- dnl The ltrpathdirs will be appended to $LTLIBNAME at the end.
- haveit=
- for x in $ltrpathdirs; do
- if test "X$x" = "X$dir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- ltrpathdirs="$ltrpathdirs $dir"
- fi
- fi
- ;;
- -l*)
- dnl Handle this in the next round.
- names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'`
- ;;
- *.la)
- dnl Handle this in the next round. Throw away the .la's
- dnl directory; it is already contained in a preceding -L
- dnl option.
- names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'`
- ;;
- *)
- dnl Most likely an immediate library name.
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep"
- LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep"
- ;;
- esac
- done
- fi
- else
- dnl Didn't find the library; assume it is in the system directories
- dnl known to the linker and runtime loader. (All the system
- dnl directories known to the linker should also be known to the
- dnl runtime loader, otherwise the system is severely misconfigured.)
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name"
- LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name"
- fi
- fi
- fi
- done
- done
- if test "X$rpathdirs" != "X"; then
- if test -n "$acl_hardcode_libdir_separator"; then
- dnl Weird platform: only the last -rpath option counts, the user must
- dnl pass all path elements in one option. We can arrange that for a
- dnl single library, but not when more than one $LIBNAMEs are used.
- alldirs=
- for found_dir in $rpathdirs; do
- alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir"
- done
- dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl.
- acl_save_libdir="$libdir"
- libdir="$alldirs"
- eval flag=\"$acl_hardcode_libdir_flag_spec\"
- libdir="$acl_save_libdir"
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag"
- else
- dnl The -rpath options are cumulative.
- for found_dir in $rpathdirs; do
- acl_save_libdir="$libdir"
- libdir="$found_dir"
- eval flag=\"$acl_hardcode_libdir_flag_spec\"
- libdir="$acl_save_libdir"
- LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag"
- done
- fi
- fi
- if test "X$ltrpathdirs" != "X"; then
- dnl When using libtool, the option that works for both libraries and
- dnl executables is -R. The -R options are cumulative.
- for found_dir in $ltrpathdirs; do
- LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir"
- done
- fi
- popdef([PACKLIBS])
- popdef([PACKUP])
- popdef([PACK])
- popdef([NAME])
-])
-
-dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR,
-dnl unless already present in VAR.
-dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes
-dnl contains two or three consecutive elements that belong together.
-AC_DEFUN([AC_LIB_APPENDTOVAR],
-[
- for element in [$2]; do
- haveit=
- for x in $[$1]; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X$element"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- [$1]="${[$1]}${[$1]:+ }$element"
- fi
- done
-])
-
-dnl For those cases where a variable contains several -L and -l options
-dnl referring to unknown libraries and directories, this macro determines the
-dnl necessary additional linker options for the runtime path.
-dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL])
-dnl sets LDADDVAR to linker options needed together with LIBSVALUE.
-dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed,
-dnl otherwise linking without libtool is assumed.
-AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS],
-[
- AC_REQUIRE([AC_LIB_RPATH])
- AC_REQUIRE([AC_LIB_PREPARE_MULTILIB])
- $1=
- if test "$enable_rpath" != no; then
- if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then
- dnl Use an explicit option to hardcode directories into the resulting
- dnl binary.
- rpathdirs=
- next=
- for opt in $2; do
- if test -n "$next"; then
- dir="$next"
- dnl No need to hardcode the standard /usr/lib.
- if test "X$dir" != "X/usr/$acl_libdirstem" \
- && test "X$dir" != "X/usr/$acl_libdirstem2" \
- && test "X$dir" != "X/usr/$acl_libdirstem3"; then
- rpathdirs="$rpathdirs $dir"
- fi
- next=
- else
- case $opt in
- -L) next=yes ;;
- -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'`
- dnl No need to hardcode the standard /usr/lib.
- if test "X$dir" != "X/usr/$acl_libdirstem" \
- && test "X$dir" != "X/usr/$acl_libdirstem2" \
- && test "X$dir" != "X/usr/$acl_libdirstem3"; then
- rpathdirs="$rpathdirs $dir"
- fi
- next= ;;
- *) next= ;;
- esac
- fi
- done
- if test "X$rpathdirs" != "X"; then
- if test -n ""$3""; then
- dnl libtool is used for linking. Use -R options.
- for dir in $rpathdirs; do
- $1="${$1}${$1:+ }-R$dir"
- done
- else
- dnl The linker is used for linking directly.
- if test -n "$acl_hardcode_libdir_separator"; then
- dnl Weird platform: only the last -rpath option counts, the user
- dnl must pass all path elements in one option.
- alldirs=
- for dir in $rpathdirs; do
- alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir"
- done
- acl_save_libdir="$libdir"
- libdir="$alldirs"
- eval flag=\"$acl_hardcode_libdir_flag_spec\"
- libdir="$acl_save_libdir"
- $1="$flag"
- else
- dnl The -rpath options are cumulative.
- for dir in $rpathdirs; do
- acl_save_libdir="$libdir"
- libdir="$dir"
- eval flag=\"$acl_hardcode_libdir_flag_spec\"
- libdir="$acl_save_libdir"
- $1="${$1}${$1:+ }$flag"
- done
- fi
- fi
- fi
- fi
- fi
- AC_SUBST([$1])
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-prefix.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-prefix.m4
deleted file mode 100644
index c8a0b464c2..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/lib-prefix.m4
+++ /dev/null
@@ -1,320 +0,0 @@
-# lib-prefix.m4 serial 17
-dnl Copyright (C) 2001-2005, 2008-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-
-dnl From Bruno Haible.
-
-dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed
-dnl to access previously installed libraries. The basic assumption is that
-dnl a user will want packages to use other packages he previously installed
-dnl with the same --prefix option.
-dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate
-dnl libraries, but is otherwise very convenient.
-AC_DEFUN([AC_LIB_PREFIX],
-[
- AC_BEFORE([$0], [AC_LIB_LINKFLAGS])
- AC_REQUIRE([AC_PROG_CC])
- AC_REQUIRE([AC_CANONICAL_HOST])
- AC_REQUIRE([AC_LIB_PREPARE_MULTILIB])
- AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
- dnl By default, look in $includedir and $libdir.
- use_additional=yes
- AC_LIB_WITH_FINAL_PREFIX([
- eval additional_includedir=\"$includedir\"
- eval additional_libdir=\"$libdir\"
- ])
- AC_ARG_WITH([lib-prefix],
-[[ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib
- --without-lib-prefix don't search for libraries in includedir and libdir]],
-[
- if test "X$withval" = "Xno"; then
- use_additional=no
- else
- if test "X$withval" = "X"; then
- AC_LIB_WITH_FINAL_PREFIX([
- eval additional_includedir=\"$includedir\"
- eval additional_libdir=\"$libdir\"
- ])
- else
- additional_includedir="$withval/include"
- additional_libdir="$withval/$acl_libdirstem"
- fi
- fi
-])
- if test $use_additional = yes; then
- dnl Potentially add $additional_includedir to $CPPFLAGS.
- dnl But don't add it
- dnl 1. if it's the standard /usr/include,
- dnl 2. if it's already present in $CPPFLAGS,
- dnl 3. if it's /usr/local/include and we are using GCC on Linux,
- dnl 4. if it doesn't exist as a directory.
- if test "X$additional_includedir" != "X/usr/include"; then
- haveit=
- for x in $CPPFLAGS; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-I$additional_includedir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- if test "X$additional_includedir" = "X/usr/local/include"; then
- if test -n "$GCC"; then
- case $host_os in
- linux* | gnu* | k*bsd*-gnu) haveit=yes;;
- esac
- fi
- fi
- if test -z "$haveit"; then
- if test -d "$additional_includedir"; then
- dnl Really add $additional_includedir to $CPPFLAGS.
- CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir"
- fi
- fi
- fi
- fi
- dnl Potentially add $additional_libdir to $LDFLAGS.
- dnl But don't add it
- dnl 1. if it's the standard /usr/lib,
- dnl 2. if it's already present in $LDFLAGS,
- dnl 3. if it's /usr/local/lib and we are using GCC on Linux,
- dnl 4. if it doesn't exist as a directory.
- if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then
- haveit=
- for x in $LDFLAGS; do
- AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
- if test "X$x" = "X-L$additional_libdir"; then
- haveit=yes
- break
- fi
- done
- if test -z "$haveit"; then
- if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then
- if test -n "$GCC"; then
- case $host_os in
- linux*) haveit=yes;;
- esac
- fi
- fi
- if test -z "$haveit"; then
- if test -d "$additional_libdir"; then
- dnl Really add $additional_libdir to $LDFLAGS.
- LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir"
- fi
- fi
- fi
- fi
- fi
-])
-
-dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix,
-dnl acl_final_exec_prefix, containing the values to which $prefix and
-dnl $exec_prefix will expand at the end of the configure script.
-AC_DEFUN([AC_LIB_PREPARE_PREFIX],
-[
- dnl Unfortunately, prefix and exec_prefix get only finally determined
- dnl at the end of configure.
- if test "X$prefix" = "XNONE"; then
- acl_final_prefix="$ac_default_prefix"
- else
- acl_final_prefix="$prefix"
- fi
- if test "X$exec_prefix" = "XNONE"; then
- acl_final_exec_prefix='${prefix}'
- else
- acl_final_exec_prefix="$exec_prefix"
- fi
- acl_save_prefix="$prefix"
- prefix="$acl_final_prefix"
- eval acl_final_exec_prefix=\"$acl_final_exec_prefix\"
- prefix="$acl_save_prefix"
-])
-
-dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the
-dnl variables prefix and exec_prefix bound to the values they will have
-dnl at the end of the configure script.
-AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX],
-[
- acl_save_prefix="$prefix"
- prefix="$acl_final_prefix"
- acl_save_exec_prefix="$exec_prefix"
- exec_prefix="$acl_final_exec_prefix"
- $1
- exec_prefix="$acl_save_exec_prefix"
- prefix="$acl_save_prefix"
-])
-
-dnl AC_LIB_PREPARE_MULTILIB creates
-dnl - a function acl_is_expected_elfclass, that tests whether standard input
-dn; has a 32-bit or 64-bit ELF header, depending on the host CPU ABI,
-dnl - 3 variables acl_libdirstem, acl_libdirstem2, acl_libdirstem3, containing
-dnl the basename of the libdir to try in turn, either "lib" or "lib64" or
-dnl "lib/64" or "lib32" or "lib/sparcv9" or "lib/amd64" or similar.
-AC_DEFUN([AC_LIB_PREPARE_MULTILIB],
-[
- dnl There is no formal standard regarding lib, lib32, and lib64.
- dnl On most glibc systems, the current practice is that on a system supporting
- dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
- dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. However, on
- dnl Arch Linux based distributions, it's the opposite: 32-bit libraries go
- dnl under $prefix/lib32 and 64-bit libraries go under $prefix/lib.
- dnl We determine the compiler's default mode by looking at the compiler's
- dnl library search path. If at least one of its elements ends in /lib64 or
- dnl points to a directory whose absolute pathname ends in /lib64, we use that
- dnl for 64-bit ABIs. Similarly for 32-bit ABIs. Otherwise we use the default,
- dnl namely "lib".
- dnl On Solaris systems, the current practice is that on a system supporting
- dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
- dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or
- dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib.
- AC_REQUIRE([AC_CANONICAL_HOST])
- AC_REQUIRE([gl_HOST_CPU_C_ABI_32BIT])
-
- AC_CACHE_CHECK([for ELF binary format], [gl_cv_elf],
- [AC_EGREP_CPP([Extensible Linking Format],
- [#ifdef __ELF__
- Extensible Linking Format
- #endif
- ],
- [gl_cv_elf=yes],
- [gl_cv_elf=no])
- ])
- if test $gl_cv_elf; then
- # Extract the ELF class of a file (5th byte) in decimal.
- # Cf. https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
- if od -A x < /dev/null >/dev/null 2>/dev/null; then
- # Use POSIX od.
- func_elfclass ()
- {
- od -A n -t d1 -j 4 -N 1
- }
- else
- # Use BSD hexdump.
- func_elfclass ()
- {
- dd bs=1 count=1 skip=4 2>/dev/null | hexdump -e '1/1 "%3d "'
- echo
- }
- fi
-changequote(,)dnl
- case $HOST_CPU_C_ABI_32BIT in
- yes)
- # 32-bit ABI.
- acl_is_expected_elfclass ()
- {
- test "`func_elfclass | sed -e 's/[ ]//g'`" = 1
- }
- ;;
- no)
- # 64-bit ABI.
- acl_is_expected_elfclass ()
- {
- test "`func_elfclass | sed -e 's/[ ]//g'`" = 2
- }
- ;;
- *)
- # Unknown.
- acl_is_expected_elfclass ()
- {
- :
- }
- ;;
- esac
-changequote([,])dnl
- else
- acl_is_expected_elfclass ()
- {
- :
- }
- fi
-
- dnl Allow the user to override the result by setting acl_cv_libdirstems.
- AC_CACHE_CHECK([for the common suffixes of directories in the library search path],
- [acl_cv_libdirstems],
- [dnl Try 'lib' first, because that's the default for libdir in GNU, see
- dnl <https://www.gnu.org/prep/standards/html_node/Directory-Variables.html>.
- acl_libdirstem=lib
- acl_libdirstem2=
- acl_libdirstem3=
- case "$host_os" in
- solaris*)
- dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment
- dnl <https://docs.oracle.com/cd/E19253-01/816-5138/dev-env/index.html>.
- dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link."
- dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the
- dnl symlink is missing, so we set acl_libdirstem2 too.
- if test $HOST_CPU_C_ABI_32BIT = no; then
- acl_libdirstem2=lib/64
- case "$host_cpu" in
- sparc*) acl_libdirstem3=lib/sparcv9 ;;
- i*86 | x86_64) acl_libdirstem3=lib/amd64 ;;
- esac
- fi
- ;;
- *)
- dnl If $CC generates code for a 32-bit ABI, the libraries are
- dnl surely under $prefix/lib or $prefix/lib32, not $prefix/lib64.
- dnl Similarly, if $CC generates code for a 64-bit ABI, the libraries
- dnl are surely under $prefix/lib or $prefix/lib64, not $prefix/lib32.
- dnl Find the compiler's search path. However, non-system compilers
- dnl sometimes have odd library search paths. But we can't simply invoke
- dnl '/usr/bin/gcc -print-search-dirs' because that would not take into
- dnl account the -m32/-m31 or -m64 options from the $CC or $CFLAGS.
- searchpath=`(LC_ALL=C $CC $CPPFLAGS $CFLAGS -print-search-dirs) 2>/dev/null \
- | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'`
- if test $HOST_CPU_C_ABI_32BIT != no; then
- # 32-bit or unknown ABI.
- if test -d /usr/lib32; then
- acl_libdirstem2=lib32
- fi
- fi
- if test $HOST_CPU_C_ABI_32BIT != yes; then
- # 64-bit or unknown ABI.
- if test -d /usr/lib64; then
- acl_libdirstem3=lib64
- fi
- fi
- if test -n "$searchpath"; then
- acl_save_IFS="${IFS= }"; IFS=":"
- for searchdir in $searchpath; do
- if test -d "$searchdir"; then
- case "$searchdir" in
- */lib32/ | */lib32 ) acl_libdirstem2=lib32 ;;
- */lib64/ | */lib64 ) acl_libdirstem3=lib64 ;;
- */../ | */.. )
- # Better ignore directories of this form. They are misleading.
- ;;
- *) searchdir=`cd "$searchdir" && pwd`
- case "$searchdir" in
- */lib32 ) acl_libdirstem2=lib32 ;;
- */lib64 ) acl_libdirstem3=lib64 ;;
- esac ;;
- esac
- fi
- done
- IFS="$acl_save_IFS"
- if test $HOST_CPU_C_ABI_32BIT = yes; then
- # 32-bit ABI.
- acl_libdirstem3=
- fi
- if test $HOST_CPU_C_ABI_32BIT = no; then
- # 64-bit ABI.
- acl_libdirstem2=
- fi
- fi
- ;;
- esac
- test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem"
- test -n "$acl_libdirstem3" || acl_libdirstem3="$acl_libdirstem"
- acl_cv_libdirstems="$acl_libdirstem,$acl_libdirstem2,$acl_libdirstem3"
- ])
- dnl Decompose acl_cv_libdirstems into acl_libdirstem, acl_libdirstem2, and
- dnl acl_libdirstem3.
-changequote(,)dnl
- acl_libdirstem=`echo "$acl_cv_libdirstems" | sed -e 's/,.*//'`
- acl_libdirstem2=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,//' -e 's/,.*//'`
- acl_libdirstem3=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,[^,]*,//' -e 's/,.*//'`
-changequote([,])dnl
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/nls.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/nls.m4
deleted file mode 100644
index 5a506fc4b5..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/nls.m4
+++ /dev/null
@@ -1,32 +0,0 @@
-# nls.m4 serial 6 (gettext-0.20.2)
-dnl Copyright (C) 1995-2003, 2005-2006, 2008-2014, 2016, 2019-2020 Free
-dnl Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-dnl
-dnl This file can be used in projects which are not available under
-dnl the GNU General Public License or the GNU Lesser General Public
-dnl License but which still want to provide support for the GNU gettext
-dnl functionality.
-dnl Please note that the actual code of the GNU gettext library is covered
-dnl by the GNU Lesser General Public License, and the rest of the GNU
-dnl gettext package is covered by the GNU General Public License.
-dnl They are *not* in the public domain.
-
-dnl Authors:
-dnl Ulrich Drepper <drepper@cygnus.com>, 1995-2000.
-dnl Bruno Haible <haible@clisp.cons.org>, 2000-2003.
-
-AC_PREREQ([2.50])
-
-AC_DEFUN([AM_NLS],
-[
- AC_MSG_CHECKING([whether NLS is requested])
- dnl Default is enabled NLS
- AC_ARG_ENABLE([nls],
- [ --disable-nls do not use Native Language Support],
- USE_NLS=$enableval, USE_NLS=yes)
- AC_MSG_RESULT([$USE_NLS])
- AC_SUBST([USE_NLS])
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/po.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/po.m4
deleted file mode 100644
index 3778fd7aac..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/po.m4
+++ /dev/null
@@ -1,450 +0,0 @@
-# po.m4 serial 31 (gettext-0.20.2)
-dnl Copyright (C) 1995-2014, 2016, 2018-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-dnl
-dnl This file can be used in projects which are not available under
-dnl the GNU General Public License or the GNU Lesser General Public
-dnl License but which still want to provide support for the GNU gettext
-dnl functionality.
-dnl Please note that the actual code of the GNU gettext library is covered
-dnl by the GNU Lesser General Public License, and the rest of the GNU
-dnl gettext package is covered by the GNU General Public License.
-dnl They are *not* in the public domain.
-
-dnl Authors:
-dnl Ulrich Drepper <drepper@cygnus.com>, 1995-2000.
-dnl Bruno Haible <haible@clisp.cons.org>, 2000-2003.
-
-AC_PREREQ([2.60])
-
-dnl Checks for all prerequisites of the po subdirectory.
-AC_DEFUN([AM_PO_SUBDIRS],
-[
- AC_REQUIRE([AC_PROG_MAKE_SET])dnl
- AC_REQUIRE([AC_PROG_INSTALL])dnl
- AC_REQUIRE([AC_PROG_MKDIR_P])dnl
- AC_REQUIRE([AC_PROG_SED])dnl
- AC_REQUIRE([AM_NLS])dnl
-
- dnl Release version of the gettext macros. This is used to ensure that
- dnl the gettext macros and po/Makefile.in.in are in sync.
- AC_SUBST([GETTEXT_MACRO_VERSION], [0.20])
-
- dnl Perform the following tests also if --disable-nls has been given,
- dnl because they are needed for "make dist" to work.
-
- dnl Search for GNU msgfmt in the PATH.
- dnl The first test excludes Solaris msgfmt and early GNU msgfmt versions.
- dnl The second test excludes FreeBSD msgfmt.
- AM_PATH_PROG_WITH_TEST(MSGFMT, msgfmt,
- [$ac_dir/$ac_word --statistics /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 &&
- (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)],
- :)
- AC_PATH_PROG([GMSGFMT], [gmsgfmt], [$MSGFMT])
-
- dnl Test whether it is GNU msgfmt >= 0.15.
-changequote(,)dnl
- case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in
- '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;;
- *) GMSGFMT_015=$GMSGFMT ;;
- esac
-changequote([,])dnl
- AC_SUBST([GMSGFMT_015])
-
- dnl Search for GNU xgettext 0.12 or newer in the PATH.
- dnl The first test excludes Solaris xgettext and early GNU xgettext versions.
- dnl The second test excludes FreeBSD xgettext.
- AM_PATH_PROG_WITH_TEST(XGETTEXT, xgettext,
- [$ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 &&
- (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)],
- :)
- dnl Remove leftover from FreeBSD xgettext call.
- rm -f messages.po
-
- dnl Test whether it is GNU xgettext >= 0.15.
-changequote(,)dnl
- case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in
- '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;;
- *) XGETTEXT_015=$XGETTEXT ;;
- esac
-changequote([,])dnl
- AC_SUBST([XGETTEXT_015])
-
- dnl Search for GNU msgmerge 0.11 or newer in the PATH.
- AM_PATH_PROG_WITH_TEST(MSGMERGE, msgmerge,
- [$ac_dir/$ac_word --update -q /dev/null /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1], :)
-
- dnl Test whether it is GNU msgmerge >= 0.20.
- if LC_ALL=C $MSGMERGE --help | grep ' --for-msgfmt ' >/dev/null; then
- MSGMERGE_FOR_MSGFMT_OPTION='--for-msgfmt'
- else
- dnl Test whether it is GNU msgmerge >= 0.12.
- if LC_ALL=C $MSGMERGE --help | grep ' --no-fuzzy-matching ' >/dev/null; then
- MSGMERGE_FOR_MSGFMT_OPTION='--no-fuzzy-matching --no-location --quiet'
- else
- dnl With these old versions, $(MSGMERGE) $(MSGMERGE_FOR_MSGFMT_OPTION) is
- dnl slow. But this is not a big problem, as such old gettext versions are
- dnl hardly in use any more.
- MSGMERGE_FOR_MSGFMT_OPTION='--no-location --quiet'
- fi
- fi
- AC_SUBST([MSGMERGE_FOR_MSGFMT_OPTION])
-
- dnl Support for AM_XGETTEXT_OPTION.
- test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS=
- AC_SUBST([XGETTEXT_EXTRA_OPTIONS])
-
- AC_CONFIG_COMMANDS([po-directories], [[
- for ac_file in $CONFIG_FILES; do
- # Support "outfile[:infile[:infile...]]"
- case "$ac_file" in
- *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;;
- esac
- # PO directories have a Makefile.in generated from Makefile.in.in.
- case "$ac_file" in */Makefile.in)
- # Adjust a relative srcdir.
- ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'`
- ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'`
- ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'`
- # In autoconf-2.13 it is called $ac_given_srcdir.
- # In autoconf-2.50 it is called $srcdir.
- test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir"
- case "$ac_given_srcdir" in
- .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;;
- /*) top_srcdir="$ac_given_srcdir" ;;
- *) top_srcdir="$ac_dots$ac_given_srcdir" ;;
- esac
- # Treat a directory as a PO directory if and only if it has a
- # POTFILES.in file. This allows packages to have multiple PO
- # directories under different names or in different locations.
- if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then
- rm -f "$ac_dir/POTFILES"
- test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES"
- gt_tab=`printf '\t'`
- cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ${gt_tab}]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES"
- POMAKEFILEDEPS="POTFILES.in"
- # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend
- # on $ac_dir but don't depend on user-specified configuration
- # parameters.
- if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then
- # The LINGUAS file contains the set of available languages.
- if test -n "$OBSOLETE_ALL_LINGUAS"; then
- test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete"
- fi
- ALL_LINGUAS=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"`
- POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS"
- else
- # The set of available languages was given in configure.in.
- ALL_LINGUAS=$OBSOLETE_ALL_LINGUAS
- fi
- # Compute POFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po)
- # Compute UPDATEPOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update)
- # Compute DUMMYPOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop)
- # Compute GMOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo)
- case "$ac_given_srcdir" in
- .) srcdirpre= ;;
- *) srcdirpre='$(srcdir)/' ;;
- esac
- POFILES=
- UPDATEPOFILES=
- DUMMYPOFILES=
- GMOFILES=
- for lang in $ALL_LINGUAS; do
- POFILES="$POFILES $srcdirpre$lang.po"
- UPDATEPOFILES="$UPDATEPOFILES $lang.po-update"
- DUMMYPOFILES="$DUMMYPOFILES $lang.nop"
- GMOFILES="$GMOFILES $srcdirpre$lang.gmo"
- done
- # CATALOGS depends on both $ac_dir and the user's LINGUAS
- # environment variable.
- INST_LINGUAS=
- if test -n "$ALL_LINGUAS"; then
- for presentlang in $ALL_LINGUAS; do
- useit=no
- if test "%UNSET%" != "$LINGUAS"; then
- desiredlanguages="$LINGUAS"
- else
- desiredlanguages="$ALL_LINGUAS"
- fi
- for desiredlang in $desiredlanguages; do
- # Use the presentlang catalog if desiredlang is
- # a. equal to presentlang, or
- # b. a variant of presentlang (because in this case,
- # presentlang can be used as a fallback for messages
- # which are not translated in the desiredlang catalog).
- case "$desiredlang" in
- "$presentlang"*) useit=yes;;
- esac
- done
- if test $useit = yes; then
- INST_LINGUAS="$INST_LINGUAS $presentlang"
- fi
- done
- fi
- CATALOGS=
- if test -n "$INST_LINGUAS"; then
- for lang in $INST_LINGUAS; do
- CATALOGS="$CATALOGS $lang.gmo"
- done
- fi
- test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile"
- sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile"
- for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do
- if test -f "$f"; then
- case "$f" in
- *.orig | *.bak | *~) ;;
- *) cat "$f" >> "$ac_dir/Makefile" ;;
- esac
- fi
- done
- fi
- ;;
- esac
- done]],
- [# Capture the value of obsolete ALL_LINGUAS because we need it to compute
- # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS.
- OBSOLETE_ALL_LINGUAS="$ALL_LINGUAS"
- # Capture the value of LINGUAS because we need it to compute CATALOGS.
- LINGUAS="${LINGUAS-%UNSET%}"
- ])
-])
-
-dnl Postprocesses a Makefile in a directory containing PO files.
-AC_DEFUN([AM_POSTPROCESS_PO_MAKEFILE],
-[
- # When this code is run, in config.status, two variables have already been
- # set:
- # - OBSOLETE_ALL_LINGUAS is the value of LINGUAS set in configure.in,
- # - LINGUAS is the value of the environment variable LINGUAS at configure
- # time.
-
-changequote(,)dnl
- # Adjust a relative srcdir.
- ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'`
- ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'`
- ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'`
- # In autoconf-2.13 it is called $ac_given_srcdir.
- # In autoconf-2.50 it is called $srcdir.
- test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir"
- case "$ac_given_srcdir" in
- .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;;
- /*) top_srcdir="$ac_given_srcdir" ;;
- *) top_srcdir="$ac_dots$ac_given_srcdir" ;;
- esac
-
- # Find a way to echo strings without interpreting backslash.
- if test "X`(echo '\t') 2>/dev/null`" = 'X\t'; then
- gt_echo='echo'
- else
- if test "X`(printf '%s\n' '\t') 2>/dev/null`" = 'X\t'; then
- gt_echo='printf %s\n'
- else
- echo_func () {
- cat <<EOT
-$*
-EOT
- }
- gt_echo='echo_func'
- fi
- fi
-
- # A sed script that extracts the value of VARIABLE from a Makefile.
- tab=`printf '\t'`
- sed_x_variable='
-# Test if the hold space is empty.
-x
-s/P/P/
-x
-ta
-# Yes it was empty. Look if we have the expected variable definition.
-/^['"${tab}"' ]*VARIABLE['"${tab}"' ]*=/{
- # Seen the first line of the variable definition.
- s/^['"${tab}"' ]*VARIABLE['"${tab}"' ]*=//
- ba
-}
-bd
-:a
-# Here we are processing a line from the variable definition.
-# Remove comment, more precisely replace it with a space.
-s/#.*$/ /
-# See if the line ends in a backslash.
-tb
-:b
-s/\\$//
-# Print the line, without the trailing backslash.
-p
-tc
-# There was no trailing backslash. The end of the variable definition is
-# reached. Clear the hold space.
-s/^.*$//
-x
-bd
-:c
-# A trailing backslash means that the variable definition continues in the
-# next line. Put a nonempty string into the hold space to indicate this.
-s/^.*$/P/
-x
-:d
-'
-changequote([,])dnl
-
- # Set POTFILES to the value of the Makefile variable POTFILES.
- sed_x_POTFILES=`$gt_echo "$sed_x_variable" | sed -e '/^ *#/d' -e 's/VARIABLE/POTFILES/g'`
- POTFILES=`sed -n -e "$sed_x_POTFILES" < "$ac_file"`
- # Compute POTFILES_DEPS as
- # $(foreach file, $(POTFILES), $(top_srcdir)/$(file))
- POTFILES_DEPS=
- for file in $POTFILES; do
- POTFILES_DEPS="$POTFILES_DEPS "'$(top_srcdir)/'"$file"
- done
- POMAKEFILEDEPS=""
-
- if test -n "$OBSOLETE_ALL_LINGUAS"; then
- test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete"
- fi
- if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then
- # The LINGUAS file contains the set of available languages.
- ALL_LINGUAS=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"`
- POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS"
- else
- # Set ALL_LINGUAS to the value of the Makefile variable LINGUAS.
- sed_x_LINGUAS=`$gt_echo "$sed_x_variable" | sed -e '/^ *#/d' -e 's/VARIABLE/LINGUAS/g'`
- ALL_LINGUAS=`sed -n -e "$sed_x_LINGUAS" < "$ac_file"`
- fi
- # Compute POFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po)
- # Compute UPDATEPOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update)
- # Compute DUMMYPOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop)
- # Compute GMOFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo)
- # Compute PROPERTIESFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(DOMAIN)_$(lang).properties)
- # Compute CLASSFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(DOMAIN)_$(lang).class)
- # Compute QMFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).qm)
- # Compute MSGFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(frob $(lang)).msg)
- # Compute RESOURCESDLLFILES
- # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(frob $(lang))/$(DOMAIN).resources.dll)
- case "$ac_given_srcdir" in
- .) srcdirpre= ;;
- *) srcdirpre='$(srcdir)/' ;;
- esac
- POFILES=
- UPDATEPOFILES=
- DUMMYPOFILES=
- GMOFILES=
- PROPERTIESFILES=
- CLASSFILES=
- QMFILES=
- MSGFILES=
- RESOURCESDLLFILES=
- for lang in $ALL_LINGUAS; do
- POFILES="$POFILES $srcdirpre$lang.po"
- UPDATEPOFILES="$UPDATEPOFILES $lang.po-update"
- DUMMYPOFILES="$DUMMYPOFILES $lang.nop"
- GMOFILES="$GMOFILES $srcdirpre$lang.gmo"
- PROPERTIESFILES="$PROPERTIESFILES \$(srcdir)/\$(DOMAIN)_$lang.properties"
- CLASSFILES="$CLASSFILES \$(srcdir)/\$(DOMAIN)_$lang.class"
- QMFILES="$QMFILES $srcdirpre$lang.qm"
- frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'`
- MSGFILES="$MSGFILES $srcdirpre$frobbedlang.msg"
- frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'`
- RESOURCESDLLFILES="$RESOURCESDLLFILES $srcdirpre$frobbedlang/\$(DOMAIN).resources.dll"
- done
- # CATALOGS depends on both $ac_dir and the user's LINGUAS
- # environment variable.
- INST_LINGUAS=
- if test -n "$ALL_LINGUAS"; then
- for presentlang in $ALL_LINGUAS; do
- useit=no
- if test "%UNSET%" != "$LINGUAS"; then
- desiredlanguages="$LINGUAS"
- else
- desiredlanguages="$ALL_LINGUAS"
- fi
- for desiredlang in $desiredlanguages; do
- # Use the presentlang catalog if desiredlang is
- # a. equal to presentlang, or
- # b. a variant of presentlang (because in this case,
- # presentlang can be used as a fallback for messages
- # which are not translated in the desiredlang catalog).
- case "$desiredlang" in
- "$presentlang"*) useit=yes;;
- esac
- done
- if test $useit = yes; then
- INST_LINGUAS="$INST_LINGUAS $presentlang"
- fi
- done
- fi
- CATALOGS=
- JAVACATALOGS=
- QTCATALOGS=
- TCLCATALOGS=
- CSHARPCATALOGS=
- if test -n "$INST_LINGUAS"; then
- for lang in $INST_LINGUAS; do
- CATALOGS="$CATALOGS $lang.gmo"
- JAVACATALOGS="$JAVACATALOGS \$(DOMAIN)_$lang.properties"
- QTCATALOGS="$QTCATALOGS $lang.qm"
- frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'`
- TCLCATALOGS="$TCLCATALOGS $frobbedlang.msg"
- frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'`
- CSHARPCATALOGS="$CSHARPCATALOGS $frobbedlang/\$(DOMAIN).resources.dll"
- done
- fi
-
- sed -e "s|@POTFILES_DEPS@|$POTFILES_DEPS|g" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@PROPERTIESFILES@|$PROPERTIESFILES|g" -e "s|@CLASSFILES@|$CLASSFILES|g" -e "s|@QMFILES@|$QMFILES|g" -e "s|@MSGFILES@|$MSGFILES|g" -e "s|@RESOURCESDLLFILES@|$RESOURCESDLLFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@JAVACATALOGS@|$JAVACATALOGS|g" -e "s|@QTCATALOGS@|$QTCATALOGS|g" -e "s|@TCLCATALOGS@|$TCLCATALOGS|g" -e "s|@CSHARPCATALOGS@|$CSHARPCATALOGS|g" -e 's,^#distdir:,distdir:,' < "$ac_file" > "$ac_file.tmp"
- tab=`printf '\t'`
- if grep -l '@TCLCATALOGS@' "$ac_file" > /dev/null; then
- # Add dependencies that cannot be formulated as a simple suffix rule.
- for lang in $ALL_LINGUAS; do
- frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'`
- cat >> "$ac_file.tmp" <<EOF
-$frobbedlang.msg: $lang.po
-${tab}@echo "\$(MSGFMT) -c --tcl -d \$(srcdir) -l $lang $srcdirpre$lang.po"; \
-${tab}\$(MSGFMT) -c --tcl -d "\$(srcdir)" -l $lang $srcdirpre$lang.po || { rm -f "\$(srcdir)/$frobbedlang.msg"; exit 1; }
-EOF
- done
- fi
- if grep -l '@CSHARPCATALOGS@' "$ac_file" > /dev/null; then
- # Add dependencies that cannot be formulated as a simple suffix rule.
- for lang in $ALL_LINGUAS; do
- frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'`
- cat >> "$ac_file.tmp" <<EOF
-$frobbedlang/\$(DOMAIN).resources.dll: $lang.po
-${tab}@echo "\$(MSGFMT) -c --csharp -d \$(srcdir) -l $lang $srcdirpre$lang.po -r \$(DOMAIN)"; \
-${tab}\$(MSGFMT) -c --csharp -d "\$(srcdir)" -l $lang $srcdirpre$lang.po -r "\$(DOMAIN)" || { rm -f "\$(srcdir)/$frobbedlang.msg"; exit 1; }
-EOF
- done
- fi
- if test -n "$POMAKEFILEDEPS"; then
- cat >> "$ac_file.tmp" <<EOF
-Makefile: $POMAKEFILEDEPS
-EOF
- fi
- mv "$ac_file.tmp" "$ac_file"
-])
-
-dnl Initializes the accumulator used by AM_XGETTEXT_OPTION.
-AC_DEFUN([AM_XGETTEXT_OPTION_INIT],
-[
- XGETTEXT_EXTRA_OPTIONS=
-])
-
-dnl Registers an option to be passed to xgettext in the po subdirectory.
-AC_DEFUN([AM_XGETTEXT_OPTION],
-[
- AC_REQUIRE([AM_XGETTEXT_OPTION_INIT])
- XGETTEXT_EXTRA_OPTIONS="$XGETTEXT_EXTRA_OPTIONS $1"
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/progtest.m4 b/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/progtest.m4
deleted file mode 100644
index f28010aed1..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/aclocal/progtest.m4
+++ /dev/null
@@ -1,91 +0,0 @@
-# progtest.m4 serial 8 (gettext-0.20.2)
-dnl Copyright (C) 1996-2003, 2005, 2008-2020 Free Software Foundation, Inc.
-dnl This file is free software; the Free Software Foundation
-dnl gives unlimited permission to copy and/or distribute it,
-dnl with or without modifications, as long as this notice is preserved.
-dnl
-dnl This file can be used in projects which are not available under
-dnl the GNU General Public License or the GNU Lesser General Public
-dnl License but which still want to provide support for the GNU gettext
-dnl functionality.
-dnl Please note that the actual code of the GNU gettext library is covered
-dnl by the GNU Lesser General Public License, and the rest of the GNU
-dnl gettext package is covered by the GNU General Public License.
-dnl They are *not* in the public domain.
-
-dnl Authors:
-dnl Ulrich Drepper <drepper@cygnus.com>, 1996.
-
-AC_PREREQ([2.50])
-
-# Search path for a program which passes the given test.
-
-dnl AM_PATH_PROG_WITH_TEST(VARIABLE, PROG-TO-CHECK-FOR,
-dnl TEST-PERFORMED-ON-FOUND_PROGRAM [, VALUE-IF-NOT-FOUND [, PATH]])
-AC_DEFUN([AM_PATH_PROG_WITH_TEST],
-[
-# Prepare PATH_SEPARATOR.
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
- # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which
- # contains only /bin. Note that ksh looks also at the FPATH variable,
- # so we have to set that as well for the test.
- PATH_SEPARATOR=:
- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \
- && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \
- || PATH_SEPARATOR=';'
- }
-fi
-
-# Find out how to test for executable files. Don't use a zero-byte file,
-# as systems may use methods other than mode bits to determine executability.
-cat >conf$$.file <<_ASEOF
-#! /bin/sh
-exit 0
-_ASEOF
-chmod +x conf$$.file
-if test -x conf$$.file >/dev/null 2>&1; then
- ac_executable_p="test -x"
-else
- ac_executable_p="test -f"
-fi
-rm -f conf$$.file
-
-# Extract the first word of "$2", so it can be a program name with args.
-set dummy $2; ac_word=[$]2
-AC_MSG_CHECKING([for $ac_word])
-AC_CACHE_VAL([ac_cv_path_$1],
-[case "[$]$1" in
- [[\\/]]* | ?:[[\\/]]*)
- ac_cv_path_$1="[$]$1" # Let the user override the test with a path.
- ;;
- *)
- ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR
- for ac_dir in ifelse([$5], , $PATH, [$5]); do
- IFS="$ac_save_IFS"
- test -z "$ac_dir" && ac_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then
- echo "$as_me: trying $ac_dir/$ac_word..." >&AS_MESSAGE_LOG_FD
- if [$3]; then
- ac_cv_path_$1="$ac_dir/$ac_word$ac_exec_ext"
- break 2
- fi
- fi
- done
- done
- IFS="$ac_save_IFS"
-dnl If no 4th arg is given, leave the cache variable unset,
-dnl so AC_PATH_PROGS will keep looking.
-ifelse([$4], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$4"
-])dnl
- ;;
-esac])dnl
-$1="$ac_cv_path_$1"
-if test ifelse([$4], , [-n "[$]$1"], ["[$]$1" != "$4"]); then
- AC_MSG_RESULT([$][$1])
-else
- AC_MSG_RESULT([no])
-fi
-AC_SUBST([$1])dnl
-])
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/config.rpath b/meta/recipes-core/gettext/gettext-minimal-0.21/config.rpath
deleted file mode 100755
index 24be79cfb6..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/config.rpath
+++ /dev/null
@@ -1,684 +0,0 @@
-#! /bin/sh
-# Output a system dependent set of variables, describing how to set the
-# run time search path of shared libraries in an executable.
-#
-# Copyright 1996-2020 Free Software Foundation, Inc.
-# Taken from GNU libtool, 2001
-# Originally by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
-#
-# This file is free software; the Free Software Foundation gives
-# unlimited permission to copy and/or distribute it, with or without
-# modifications, as long as this notice is preserved.
-#
-# The first argument passed to this file is the canonical host specification,
-# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
-# or
-# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
-# The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld
-# should be set by the caller.
-#
-# The set of defined variables is at the end of this script.
-
-# Known limitations:
-# - On IRIX 6.5 with CC="cc", the run time search patch must not be longer
-# than 256 bytes, otherwise the compiler driver will dump core. The only
-# known workaround is to choose shorter directory names for the build
-# directory and/or the installation directory.
-
-# All known linkers require a '.a' archive for static linking (except MSVC,
-# which needs '.lib').
-libext=a
-shrext=.so
-
-host="$1"
-host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'`
-host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'`
-host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'`
-
-# Code taken from libtool.m4's _LT_CC_BASENAME.
-
-for cc_temp in $CC""; do
- case $cc_temp in
- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
- \-*) ;;
- *) break;;
- esac
-done
-cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'`
-
-# Code taken from libtool.m4's _LT_COMPILER_PIC.
-
-wl=
-if test "$GCC" = yes; then
- wl='-Wl,'
-else
- case "$host_os" in
- aix*)
- wl='-Wl,'
- ;;
- mingw* | cygwin* | pw32* | os2* | cegcc*)
- ;;
- hpux9* | hpux10* | hpux11*)
- wl='-Wl,'
- ;;
- irix5* | irix6* | nonstopux*)
- wl='-Wl,'
- ;;
- linux* | k*bsd*-gnu | kopensolaris*-gnu)
- case $cc_basename in
- ecc*)
- wl='-Wl,'
- ;;
- icc* | ifort*)
- wl='-Wl,'
- ;;
- lf95*)
- wl='-Wl,'
- ;;
- nagfor*)
- wl='-Wl,-Wl,,'
- ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- wl='-Wl,'
- ;;
- ccc*)
- wl='-Wl,'
- ;;
- xl* | bgxl* | bgf* | mpixl*)
- wl='-Wl,'
- ;;
- como)
- wl='-lopt='
- ;;
- *)
- case `$CC -V 2>&1 | sed 5q` in
- *Sun\ F* | *Sun*Fortran*)
- wl=
- ;;
- *Sun\ C*)
- wl='-Wl,'
- ;;
- esac
- ;;
- esac
- ;;
- newsos6)
- ;;
- *nto* | *qnx*)
- ;;
- osf3* | osf4* | osf5*)
- wl='-Wl,'
- ;;
- rdos*)
- ;;
- solaris*)
- case $cc_basename in
- f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- wl='-Qoption ld '
- ;;
- *)
- wl='-Wl,'
- ;;
- esac
- ;;
- sunos4*)
- wl='-Qoption ld '
- ;;
- sysv4 | sysv4.2uw2* | sysv4.3*)
- wl='-Wl,'
- ;;
- sysv4*MP*)
- ;;
- sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
- wl='-Wl,'
- ;;
- unicos*)
- wl='-Wl,'
- ;;
- uts4*)
- ;;
- esac
-fi
-
-# Code taken from libtool.m4's _LT_LINKER_SHLIBS.
-
-hardcode_libdir_flag_spec=
-hardcode_libdir_separator=
-hardcode_direct=no
-hardcode_minus_L=no
-
-case "$host_os" in
- cygwin* | mingw* | pw32* | cegcc*)
- # FIXME: the MSVC++ port hasn't been tested in a loooong time
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- if test "$GCC" != yes; then
- with_gnu_ld=no
- fi
- ;;
- interix*)
- # we just hope/assume this is gcc and not c89 (= MSVC++)
- with_gnu_ld=yes
- ;;
- openbsd*)
- with_gnu_ld=no
- ;;
-esac
-
-ld_shlibs=yes
-if test "$with_gnu_ld" = yes; then
- # Set some defaults for GNU ld with shared library support. These
- # are reset later if shared libraries are not supported. Putting them
- # here allows them to be overridden if necessary.
- # Unlike libtool, we use -rpath here, not --rpath, since the documented
- # option of GNU ld is called -rpath, not --rpath.
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- case "$host_os" in
- aix[3-9]*)
- # On AIX/PPC, the GNU linker is very broken
- if test "$host_cpu" != ia64; then
- ld_shlibs=no
- fi
- ;;
- amigaos*)
- case "$host_cpu" in
- powerpc)
- ;;
- m68k)
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- ;;
- esac
- ;;
- beos*)
- if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- cygwin* | mingw* | pw32* | cegcc*)
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
- hardcode_libdir_flag_spec='-L$libdir'
- if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- haiku*)
- ;;
- interix[3-9]*)
- hardcode_direct=no
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- ;;
- gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
- if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- netbsd*)
- ;;
- solaris*)
- if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then
- ld_shlibs=no
- elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
- case `$LD -v 2>&1` in
- *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
- ld_shlibs=no
- ;;
- *)
- if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then
- hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`'
- else
- ld_shlibs=no
- fi
- ;;
- esac
- ;;
- sunos4*)
- hardcode_direct=yes
- ;;
- *)
- if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- esac
- if test "$ld_shlibs" = no; then
- hardcode_libdir_flag_spec=
- fi
-else
- case "$host_os" in
- aix3*)
- # Note: this linker hardcodes the directories in LIBPATH if there
- # are no directories specified by -L.
- hardcode_minus_L=yes
- if test "$GCC" = yes; then
- # Neither direct hardcoding nor static linking is supported with a
- # broken collect2.
- hardcode_direct=unsupported
- fi
- ;;
- aix[4-9]*)
- if test "$host_cpu" = ia64; then
- # On IA64, the linker does run time linking by default, so we don't
- # have to do anything special.
- aix_use_runtimelinking=no
- else
- aix_use_runtimelinking=no
- # Test if we are trying to use run time linking or normal
- # AIX style linking. If -brtl is somewhere in LDFLAGS, we
- # need to do runtime linking.
- case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
- for ld_flag in $LDFLAGS; do
- if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
- aix_use_runtimelinking=yes
- break
- fi
- done
- ;;
- esac
- fi
- hardcode_direct=yes
- hardcode_libdir_separator=':'
- if test "$GCC" = yes; then
- case $host_os in aix4.[012]|aix4.[012].*)
- collect2name=`${CC} -print-prog-name=collect2`
- if test -f "$collect2name" && \
- strings "$collect2name" | grep resolve_lib_name >/dev/null
- then
- # We have reworked collect2
- :
- else
- # We have old collect2
- hardcode_direct=unsupported
- hardcode_minus_L=yes
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_libdir_separator=
- fi
- ;;
- esac
- fi
- # Begin _LT_AC_SYS_LIBPATH_AIX.
- echo 'int main () { return 0; }' > conftest.c
- ${CC} ${LDFLAGS} conftest.c -o conftest
- aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; }
-}'`
- if test -z "$aix_libpath"; then
- aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; }
-}'`
- fi
- if test -z "$aix_libpath"; then
- aix_libpath="/usr/lib:/lib"
- fi
- rm -f conftest.c conftest
- # End _LT_AC_SYS_LIBPATH_AIX.
- if test "$aix_use_runtimelinking" = yes; then
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- else
- if test "$host_cpu" = ia64; then
- hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
- else
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- fi
- fi
- ;;
- amigaos*)
- case "$host_cpu" in
- powerpc)
- ;;
- m68k)
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- ;;
- esac
- ;;
- bsdi[45]*)
- ;;
- cygwin* | mingw* | pw32* | cegcc*)
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
- hardcode_libdir_flag_spec=' '
- libext=lib
- ;;
- darwin* | rhapsody*)
- hardcode_direct=no
- if { case $cc_basename in ifort*) true;; *) test "$GCC" = yes;; esac; }; then
- :
- else
- ld_shlibs=no
- fi
- ;;
- dgux*)
- hardcode_libdir_flag_spec='-L$libdir'
- ;;
- freebsd2.[01]*)
- hardcode_direct=yes
- hardcode_minus_L=yes
- ;;
- freebsd* | dragonfly*)
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- ;;
- hpux9*)
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator=:
- hardcode_direct=yes
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- ;;
- hpux10*)
- if test "$with_gnu_ld" = no; then
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator=:
- hardcode_direct=yes
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- fi
- ;;
- hpux11*)
- if test "$with_gnu_ld" = no; then
- hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
- hardcode_libdir_separator=:
- case $host_cpu in
- hppa*64*|ia64*)
- hardcode_direct=no
- ;;
- *)
- hardcode_direct=yes
- # hardcode_minus_L: Not really in the search PATH,
- # but as the default location of the library.
- hardcode_minus_L=yes
- ;;
- esac
- fi
- ;;
- irix5* | irix6* | nonstopux*)
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- ;;
- netbsd*)
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- ;;
- newsos6)
- hardcode_direct=yes
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- ;;
- *nto* | *qnx*)
- ;;
- openbsd*)
- if test -f /usr/libexec/ld.so; then
- hardcode_direct=yes
- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- else
- case "$host_os" in
- openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
- hardcode_libdir_flag_spec='-R$libdir'
- ;;
- *)
- hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
- ;;
- esac
- fi
- else
- ld_shlibs=no
- fi
- ;;
- os2*)
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_minus_L=yes
- ;;
- osf3*)
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- hardcode_libdir_separator=:
- ;;
- osf4* | osf5*)
- if test "$GCC" = yes; then
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- # Both cc and cxx compiler support -rpath directly
- hardcode_libdir_flag_spec='-rpath $libdir'
- fi
- hardcode_libdir_separator=:
- ;;
- solaris*)
- hardcode_libdir_flag_spec='-R$libdir'
- ;;
- sunos4*)
- hardcode_libdir_flag_spec='-L$libdir'
- hardcode_direct=yes
- hardcode_minus_L=yes
- ;;
- sysv4)
- case $host_vendor in
- sni)
- hardcode_direct=yes # is this really true???
- ;;
- siemens)
- hardcode_direct=no
- ;;
- motorola)
- hardcode_direct=no #Motorola manual says yes, but my tests say they lie
- ;;
- esac
- ;;
- sysv4.3*)
- ;;
- sysv4*MP*)
- if test -d /usr/nec; then
- ld_shlibs=yes
- fi
- ;;
- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
- ;;
- sysv5* | sco3.2v5* | sco5v6*)
- hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`'
- hardcode_libdir_separator=':'
- ;;
- uts4*)
- hardcode_libdir_flag_spec='-L$libdir'
- ;;
- *)
- ld_shlibs=no
- ;;
- esac
-fi
-
-# Check dynamic linker characteristics
-# Code taken from libtool.m4's _LT_SYS_DYNAMIC_LINKER.
-# Unlike libtool.m4, here we don't care about _all_ names of the library, but
-# only about the one the linker finds when passed -lNAME. This is the last
-# element of library_names_spec in libtool.m4, or possibly two of them if the
-# linker has special search rules.
-library_names_spec= # the last element of library_names_spec in libtool.m4
-libname_spec='lib$name'
-case "$host_os" in
- aix3*)
- library_names_spec='$libname.a'
- ;;
- aix[4-9]*)
- library_names_spec='$libname$shrext'
- ;;
- amigaos*)
- case "$host_cpu" in
- powerpc*)
- library_names_spec='$libname$shrext' ;;
- m68k)
- library_names_spec='$libname.a' ;;
- esac
- ;;
- beos*)
- library_names_spec='$libname$shrext'
- ;;
- bsdi[45]*)
- library_names_spec='$libname$shrext'
- ;;
- cygwin* | mingw* | pw32* | cegcc*)
- shrext=.dll
- library_names_spec='$libname.dll.a $libname.lib'
- ;;
- darwin* | rhapsody*)
- shrext=.dylib
- library_names_spec='$libname$shrext'
- ;;
- dgux*)
- library_names_spec='$libname$shrext'
- ;;
- freebsd[23].*)
- library_names_spec='$libname$shrext$versuffix'
- ;;
- freebsd* | dragonfly*)
- library_names_spec='$libname$shrext'
- ;;
- gnu*)
- library_names_spec='$libname$shrext'
- ;;
- haiku*)
- library_names_spec='$libname$shrext'
- ;;
- hpux9* | hpux10* | hpux11*)
- case $host_cpu in
- ia64*)
- shrext=.so
- ;;
- hppa*64*)
- shrext=.sl
- ;;
- *)
- shrext=.sl
- ;;
- esac
- library_names_spec='$libname$shrext'
- ;;
- interix[3-9]*)
- library_names_spec='$libname$shrext'
- ;;
- irix5* | irix6* | nonstopux*)
- library_names_spec='$libname$shrext'
- case "$host_os" in
- irix5* | nonstopux*)
- libsuff= shlibsuff=
- ;;
- *)
- case $LD in
- *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;;
- *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;;
- *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;;
- *) libsuff= shlibsuff= ;;
- esac
- ;;
- esac
- ;;
- linux*oldld* | linux*aout* | linux*coff*)
- ;;
- linux* | k*bsd*-gnu | kopensolaris*-gnu)
- library_names_spec='$libname$shrext'
- ;;
- knetbsd*-gnu)
- library_names_spec='$libname$shrext'
- ;;
- netbsd*)
- library_names_spec='$libname$shrext'
- ;;
- newsos6)
- library_names_spec='$libname$shrext'
- ;;
- *nto* | *qnx*)
- library_names_spec='$libname$shrext'
- ;;
- openbsd*)
- library_names_spec='$libname$shrext$versuffix'
- ;;
- os2*)
- libname_spec='$name'
- shrext=.dll
- library_names_spec='$libname.a'
- ;;
- osf3* | osf4* | osf5*)
- library_names_spec='$libname$shrext'
- ;;
- rdos*)
- ;;
- solaris*)
- library_names_spec='$libname$shrext'
- ;;
- sunos4*)
- library_names_spec='$libname$shrext$versuffix'
- ;;
- sysv4 | sysv4.3*)
- library_names_spec='$libname$shrext'
- ;;
- sysv4*MP*)
- library_names_spec='$libname$shrext'
- ;;
- sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
- library_names_spec='$libname$shrext'
- ;;
- tpf*)
- library_names_spec='$libname$shrext'
- ;;
- uts4*)
- library_names_spec='$libname$shrext'
- ;;
-esac
-
-sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
-escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"`
-shlibext=`echo "$shrext" | sed -e 's,^\.,,'`
-escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"`
-escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"`
-escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"`
-
-LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' <<EOF
-
-# How to pass a linker flag through the compiler.
-wl="$escaped_wl"
-
-# Static library suffix (normally "a").
-libext="$libext"
-
-# Shared library suffix (normally "so").
-shlibext="$shlibext"
-
-# Format of library name prefix.
-libname_spec="$escaped_libname_spec"
-
-# Library names that the linker finds when passed -lNAME.
-library_names_spec="$escaped_library_names_spec"
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist.
-hardcode_libdir_flag_spec="$escaped_hardcode_libdir_flag_spec"
-
-# Whether we need a single -rpath flag with a separated argument.
-hardcode_libdir_separator="$hardcode_libdir_separator"
-
-# Set to yes if using DIR/libNAME.so during linking hardcodes DIR into the
-# resulting binary.
-hardcode_direct="$hardcode_direct"
-
-# Set to yes if using the -LDIR flag during linking hardcodes DIR into the
-# resulting binary.
-hardcode_minus_L="$hardcode_minus_L"
-
-EOF
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/remove-potcdate.sin b/meta/recipes-core/gettext/gettext-minimal-0.21/remove-potcdate.sin
deleted file mode 100644
index 8c70dfbf14..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/remove-potcdate.sin
+++ /dev/null
@@ -1,25 +0,0 @@
-# Sed script that removes the POT-Creation-Date line in the header entry
-# from a POT file.
-#
-# Copyright (C) 2002 Free Software Foundation, Inc.
-# Copying and distribution of this file, with or without modification,
-# are permitted in any medium without royalty provided the copyright
-# notice and this notice are preserved. This file is offered as-is,
-# without any warranty.
-#
-# The distinction between the first and the following occurrences of the
-# pattern is achieved by looking at the hold space.
-/^"POT-Creation-Date: .*"$/{
-x
-# Test if the hold space is empty.
-s/P/P/
-ta
-# Yes it was empty. First occurrence. Remove the line.
-g
-d
-bb
-:a
-# The hold space was nonempty. Following occurrences. Do nothing.
-x
-:b
-}
diff --git a/meta/recipes-core/gettext/gettext-minimal-native_0.21.bb b/meta/recipes-core/gettext/gettext-minimal-native_0.21.bb
deleted file mode 100644
index 7a4dcbec57..0000000000
--- a/meta/recipes-core/gettext/gettext-minimal-native_0.21.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Minimal gettext for supporting native autoconf/automake"
-DESCRIPTION = "Contains the m4 macros sufficient to support building \
-autoconf/automake. This provides a significant build time speedup by \
-the removal of gettext-native from most dependency chains (now only \
-needed for gettext for the target)."
-SRC_URI = "file://aclocal/ \
- file://config.rpath \
- file://Makefile.in.in \
- file://remove-potcdate.sin \
- file://COPYING \
-"
-
-INHIBIT_DEFAULT_DEPS = "1"
-INHIBIT_AUTOTOOLS_DEPS = "1"
-
-LICENSE = "FSF-Unlimited"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4bd090a20bfcd1a18f1f79837b5e3e91"
-
-inherit native
-
-S = "${WORKDIR}"
-
-do_install () {
- install -d ${D}${datadir}/aclocal/
- cp ${WORKDIR}/aclocal/*.m4 ${D}${datadir}/aclocal/
- install -d ${D}${datadir}/gettext/po/
- cp ${WORKDIR}/config.rpath ${D}${datadir}/gettext/
- cp ${WORKDIR}/Makefile.in.in ${D}${datadir}/gettext/po/
- cp ${WORKDIR}/remove-potcdate.sin ${D}${datadir}/gettext/po/
-}
diff --git a/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb b/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb
new file mode 100644
index 0000000000..db1bbb7e68
--- /dev/null
+++ b/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb
@@ -0,0 +1,46 @@
+SUMMARY = "Minimal gettext for supporting native autoconf/automake"
+DESCRIPTION = "Contains the m4 macros sufficient to support building \
+autoconf/automake. This provides a significant build time speedup by \
+the removal of gettext-native from most dependency chains (now only \
+needed for gettext for the target)."
+
+require gettext-sources.inc
+SRC_URI += " \
+ file://COPYING \
+"
+
+INHIBIT_DEFAULT_DEPS = "1"
+INHIBIT_AUTOTOOLS_DEPS = "1"
+
+LICENSE = "FSF-Unlimited"
+LIC_FILES_CHKSUM = "file://../COPYING;md5=4bd090a20bfcd1a18f1f79837b5e3e91"
+
+inherit native
+
+S = "${WORKDIR}/gettext-${PV}"
+
+python get_aclocal_files() {
+ fpath = oe.path.join(d.getVar("S"), "/gettext-tools/m4/Makefile.am")
+ with open(fpath) as f:
+ content = f.read()
+ for l in content.replace("\\\n","").split("\n"):
+ if l.startswith("aclocal_DATA"):
+ aclocal_files = l.split("=")[1]
+ with open(oe.path.join(d.getVar("WORKDIR"),"aclocal-files"),'w') as outf:
+ outf.write(aclocal_files)
+ break
+ else:
+ bb.error("Could not obtain list of installed aclocal files from {}".format(fpath))
+}
+do_install[prefuncs] += "get_aclocal_files"
+
+do_install () {
+ install -d ${D}${datadir}/aclocal/
+ for i in `cat ${WORKDIR}/aclocal-files`; do
+ cp ${S}/gettext-tools/m4/$i ${D}${datadir}/aclocal/
+ done
+ install -d ${D}${datadir}/gettext/po/
+ cp ${S}/build-aux/config.rpath ${D}${datadir}/gettext/
+ cp ${S}/gettext-runtime/po/Makefile.in.in ${D}${datadir}/gettext/po/
+ cp ${S}/gettext-runtime/po/remove-potcdate.sin ${D}${datadir}/gettext/po/
+}
diff --git a/meta/recipes-core/gettext/gettext-minimal-0.21/COPYING b/meta/recipes-core/gettext/gettext-minimal/COPYING
index 3671ab6985..3671ab6985 100644
--- a/meta/recipes-core/gettext/gettext-minimal-0.21/COPYING
+++ b/meta/recipes-core/gettext/gettext-minimal/COPYING
diff --git a/meta/recipes-core/gettext/gettext-sources.inc b/meta/recipes-core/gettext/gettext-sources.inc
new file mode 100644
index 0000000000..fd05837528
--- /dev/null
+++ b/meta/recipes-core/gettext/gettext-sources.inc
@@ -0,0 +1,4 @@
+HOMEPAGE = "http://www.gnu.org/software/gettext/gettext.html"
+SRC_URI = "${GNU_MIRROR}/gettext/gettext-${PV}.tar.gz \
+ "
+SRC_URI[sha256sum] = "ec1705b1e969b83a9f073144ec806151db88127f5e40fe5a94cb6c8fa48996a0"
diff --git a/meta/recipes-core/gettext/gettext-0.21/0001-init-env.in-do-not-add-C-CXX-parameters.patch b/meta/recipes-core/gettext/gettext/0001-init-env.in-do-not-add-C-CXX-parameters.patch
index d45b75869a..33d45a038c 100644
--- a/meta/recipes-core/gettext/gettext-0.21/0001-init-env.in-do-not-add-C-CXX-parameters.patch
+++ b/meta/recipes-core/gettext/gettext/0001-init-env.in-do-not-add-C-CXX-parameters.patch
@@ -1,4 +1,4 @@
-From 9b912a47f790a7b282ec0c2295a188c5d8fb6a7c Mon Sep 17 00:00:00 2001
+From c58c503c532d20883a93ef778327c86dd6de24f6 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 6 Mar 2020 21:04:05 +0000
Subject: [PATCH] init-env.in: do not add C/CXX parameters
diff --git a/meta/recipes-core/gettext/gettext-0.21/0001-tests-autopoint-3-unset-MAKEFLAGS.patch b/meta/recipes-core/gettext/gettext/0001-tests-autopoint-3-unset-MAKEFLAGS.patch
index b0bc6b97a6..94afb255c3 100644
--- a/meta/recipes-core/gettext/gettext-0.21/0001-tests-autopoint-3-unset-MAKEFLAGS.patch
+++ b/meta/recipes-core/gettext/gettext/0001-tests-autopoint-3-unset-MAKEFLAGS.patch
@@ -1,4 +1,4 @@
-From 38b256e5aa7dfeb42acffd89565e53a2c0bab3e3 Mon Sep 17 00:00:00 2001
+From 7006142d199af13628edc687847a9a5bac48b45d Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 7 Jan 2020 16:44:38 +0100
Subject: [PATCH] tests/autopoint-3: unset MAKEFLAGS
@@ -13,10 +13,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+)
diff --git a/gettext-tools/tests/autopoint-3 b/gettext-tools/tests/autopoint-3
-index e13552b..55188df 100755
+index bd3fbf3..380eaac 100755
--- a/gettext-tools/tests/autopoint-3
+++ b/gettext-tools/tests/autopoint-3
-@@ -126,6 +126,7 @@ test $? = 0 || { cat autopoint.err; Exit 1; }
+@@ -131,6 +131,7 @@ test $? = 0 || { cat autopoint.err; Exit 1; }
${CONFIG_SHELL} ./configure >/dev/null 2>autpoint.err
test $? = 0 || { cat autopoint.err; Exit 1; }
diff --git a/meta/recipes-core/gettext/gettext-0.21/parallel.patch b/meta/recipes-core/gettext/gettext/parallel.patch
index d96a376b7d..8ecba46fd0 100644
--- a/meta/recipes-core/gettext/gettext-0.21/parallel.patch
+++ b/meta/recipes-core/gettext/gettext/parallel.patch
@@ -1,4 +1,4 @@
-From 4a2a0a93b469093b60ffd0bec55d33d1e03d4713 Mon Sep 17 00:00:00 2001
+From 83333bc873c2655ff1de161f6b7db930f22f1cea Mon Sep 17 00:00:00 2001
From: Joe Slater <jslater@windriver.com>
Date: Thu, 7 Jun 2012 16:37:01 -0700
Subject: [PATCH] instal libgettextlib.a before removing it
@@ -10,16 +10,15 @@ the operations.
Upstream-Status: Pending
Signed-off-by: Joe Slater <jslater@windriver.com>
-
---
gettext-tools/gnulib-lib/Makefile.am | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/gettext-tools/gnulib-lib/Makefile.am b/gettext-tools/gnulib-lib/Makefile.am
-index 2126699..d2dd7e4 100644
+index ab729cd..a0d713e 100644
--- a/gettext-tools/gnulib-lib/Makefile.am
+++ b/gettext-tools/gnulib-lib/Makefile.am
-@@ -58,6 +58,10 @@ endif
+@@ -58,6 +58,10 @@ AM_CFLAGS = @WARN_CFLAGS@
# Rules generated and collected by gnulib-tool.
include Makefile.gnulib
diff --git a/meta/recipes-core/gettext/gettext-0.21/run-ptest b/meta/recipes-core/gettext/gettext/run-ptest
index f17f3c87a7..f17f3c87a7 100644
--- a/meta/recipes-core/gettext/gettext-0.21/run-ptest
+++ b/meta/recipes-core/gettext/gettext/run-ptest
diff --git a/meta/recipes-core/gettext/gettext-0.21/serial-tests-config.patch b/meta/recipes-core/gettext/gettext/serial-tests-config.patch
index 93f7c03334..7d39d6086a 100644
--- a/meta/recipes-core/gettext/gettext-0.21/serial-tests-config.patch
+++ b/meta/recipes-core/gettext/gettext/serial-tests-config.patch
@@ -1,4 +1,4 @@
-From ed64a5724ef7d6eb4e9a876f817ea266a536e195 Mon Sep 17 00:00:00 2001
+From aa8dfe0aafd0b53a6c5bac61b356ca92e1430a36 Mon Sep 17 00:00:00 2001
From: "Hongjun.Yang" <hongjun.yang@windriver.com>
Date: Thu, 28 Jul 2016 12:36:15 +0800
Subject: [PATCH] fix for ptest
@@ -8,7 +8,6 @@ Add serial-tests support, ptest need it
Upstream-Status: Inappropriate [oe specific]
Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
---
configure.ac | 2 +-
gettext-runtime/configure.ac | 2 +-
@@ -16,7 +15,7 @@ Signed-off-by: Changqing Li <changqing.li@windriver.com>
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/configure.ac b/configure.ac
-index 38db6fd..f019ae0 100644
+index f4e54ea..a47ff14 100644
--- a/configure.ac
+++ b/configure.ac
@@ -22,7 +22,7 @@ AC_INIT([gettext],
@@ -29,7 +28,7 @@ index 38db6fd..f019ae0 100644
dnl Override automake's tar command used for creating distributions.
am__tar='${AMTAR} chf - --format=ustar --owner=root --group=root "$$tardir"'
diff --git a/gettext-runtime/configure.ac b/gettext-runtime/configure.ac
-index de203e7..138a07f 100644
+index 126d069..aa23232 100644
--- a/gettext-runtime/configure.ac
+++ b/gettext-runtime/configure.ac
@@ -22,7 +22,7 @@ AC_INIT([gettext-runtime],
@@ -42,7 +41,7 @@ index de203e7..138a07f 100644
dnl Installation directories.
diff --git a/gettext-tools/configure.ac b/gettext-tools/configure.ac
-index cf1dd73..b544d6d 100644
+index 3610ee3..6f56a23 100644
--- a/gettext-tools/configure.ac
+++ b/gettext-tools/configure.ac
@@ -22,7 +22,7 @@ AC_INIT([gettext-tools],
diff --git a/meta/recipes-core/gettext/gettext/use-pkgconfig.patch b/meta/recipes-core/gettext/gettext/use-pkgconfig.patch
new file mode 100644
index 0000000000..ceb1856118
--- /dev/null
+++ b/meta/recipes-core/gettext/gettext/use-pkgconfig.patch
@@ -0,0 +1,391 @@
+From 6aa1338b916fe72c200b6f160b934be15b6ff590 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Tue, 23 Jan 2018 00:54:13 +0000
+Subject: [PATCH] gettext: beat library detection into shape
+
+For reasons which I just can't fathom gnulib doesn't use the expected tools to
+find libraries but badly reinvents the wheel. This will trivially lead to host
+contamination (explicit searches of /usr/lib) or incorrect RPATHs (bad
+canonicalisation resulting in relative paths).
+
+Simply delete all the crazy, and replace with a single call to pkg-config.
+
+Upstream-Status: Inappropriate [upstream still refuse to consider pkg-config]
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+---
+ gettext-tools/gnulib-m4/libxml.m4 | 105 ++---------------------
+ libtextstyle/gnulib-local/m4/libglib.m4 | 106 +++---------------------
+ libtextstyle/gnulib-m4/libglib.m4 | 106 +++---------------------
+ 3 files changed, 31 insertions(+), 286 deletions(-)
+
+diff --git a/gettext-tools/gnulib-m4/libxml.m4 b/gettext-tools/gnulib-m4/libxml.m4
+index 0340490..0355388 100644
+--- a/gettext-tools/gnulib-m4/libxml.m4
++++ b/gettext-tools/gnulib-m4/libxml.m4
+@@ -13,6 +13,7 @@ dnl gl_LIBXML(FORCE-INCLUDED)
+ dnl forces the use of the included or an external libxml.
+ AC_DEFUN([gl_LIBXML],
+ [
++ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
+ AC_REQUIRE([AM_ICONV_LINK])
+
+ ifelse([$1], , [
+@@ -30,106 +31,10 @@ AC_DEFUN([gl_LIBXML],
+ INCXML=
+ ifelse([$1], [yes], , [
+ if test "$gl_cv_libxml_use_included" != yes; then
+- dnl Figure out whether we can use a preinstalled libxml2, or have to use
+- dnl the included one.
+- AC_CACHE_VAL([gl_cv_libxml], [
+- gl_cv_libxml=no
+- gl_cv_LIBXML=
+- gl_cv_LTLIBXML=
+- gl_cv_INCXML=
+- gl_save_LIBS="$LIBS"
+- LIBS="$LIBS $LIBICONV"
+- dnl Search for libxml2 and define LIBXML2, LTLIBXML2 and INCXML2
+- dnl accordingly.
+- dnl Don't use xml2-config nor pkg-config, since it doesn't work when
+- dnl cross-compiling or when the C compiler in use is different from the
+- dnl one that built the library.
+- dnl Use a test program that tries to invoke xmlFree. On Cygwin 1.7.x,
+- dnl libxml2 is built in such a way that uses of xmlFree work fine with
+- dnl -Wl,--enable-auto-import but lead to a link error with
+- dnl -Wl,--disable-auto-import.
+- AC_LIB_LINKFLAGS_BODY([xml2])
+- LIBS="$gl_save_LIBS $LIBXML2 $LIBICONV"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <libxml/xmlversion.h>
+- #include <libxml/xmlmemory.h>
+- #include <libxml/xpath.h>
+- ]],
+- [[xmlCheckVersion (0);
+- xmlFree ((void *) 0);
+- xmlXPathSetContextNode ((void *)0, (void *)0);
+- ]])],
+- [gl_cv_libxml=yes
+- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
+- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
+- ])
+- if test "$gl_cv_libxml" != yes; then
+- gl_save_CPPFLAGS="$CPPFLAGS"
+- CPPFLAGS="$CPPFLAGS $INCXML2"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <libxml/xmlversion.h>
+- #include <libxml/xmlmemory.h>
+- #include <libxml/xpath.h>
+- ]],
+- [[xmlCheckVersion (0);
+- xmlFree ((void *) 0);
+- xmlXPathSetContextNode ((void *)0, (void *)0);
+- ]])],
+- [gl_cv_libxml=yes
+- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
+- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
+- gl_cv_INCXML="$INCXML2"
+- ])
+- if test "$gl_cv_libxml" != yes; then
+- dnl Often the include files are installed in /usr/include/libxml2.
+- dnl In libxml2-2.5, <libxml/xmlversion.h> is self-contained.
+- dnl In libxml2-2.6, it includes <libxml/xmlexports.h> which is
+- dnl self-contained.
+- libxml2_include_dir=
+- AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <libxml2/libxml/xmlexports.h>]])],
+- [gl_ABSOLUTE_HEADER([libxml2/libxml/xmlexports.h])
+- libxml2_include_dir=`echo "$gl_cv_absolute_libxml2_libxml_xmlexports_h" | sed -e 's,.libxml.xmlexports\.h$,,'`
+- ])
+- if test -z "$libxml2_include_dir"; then
+- AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <libxml2/libxml/xmlversion.h>]])],
+- [gl_ABSOLUTE_HEADER([libxml2/libxml/xmlversion.h])
+- libxml2_include_dir=`echo "$gl_cv_absolute_libxml2_libxml_xmlversion_h" | sed -e 's,.libxml.xmlversion\.h$,,'`
+- ])
+- fi
+- if test -n "$libxml2_include_dir" && test -d "$libxml2_include_dir"; then
+- CPPFLAGS="$gl_save_CPPFLAGS -I$libxml2_include_dir"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <libxml/xmlversion.h>
+- #include <libxml/xmlmemory.h>
+- #include <libxml/xpath.h>
+- ]],
+- [[xmlCheckVersion (0);
+- xmlFree ((void *) 0);
+- xmlXPathSetContextNode ((void *)0, (void *)0);
+- ]])],
+- [gl_cv_libxml=yes
+- gl_cv_LIBXML="$LIBXML2 $LIBICONV"
+- gl_cv_LTLIBXML="$LTLIBXML2 $LTLIBICONV"
+- gl_cv_INCXML="-I$libxml2_include_dir"
+- ])
+- fi
+- fi
+- CPPFLAGS="$gl_save_CPPFLAGS"
+- fi
+- LIBS="$gl_save_LIBS"
+- ])
+- AC_MSG_CHECKING([for libxml])
+- AC_MSG_RESULT([$gl_cv_libxml])
+- if test $gl_cv_libxml = yes; then
+- LIBXML="$gl_cv_LIBXML"
+- LTLIBXML="$gl_cv_LTLIBXML"
+- INCXML="$gl_cv_INCXML"
+- else
+- gl_cv_libxml_use_included=yes
+- fi
++ PKG_CHECK_MODULES([XML], [libxml-2.0])
++ LIBXML=$XML_LIBS
++ LTLIBXML=$XML_LIBS
++ INCXML=$XML_CFLAGS
+ fi
+ ])
+ AC_SUBST([LIBXML])
+diff --git a/libtextstyle/gnulib-local/m4/libglib.m4 b/libtextstyle/gnulib-local/m4/libglib.m4
+index dbc9eb8..136e512 100644
+--- a/libtextstyle/gnulib-local/m4/libglib.m4
++++ b/libtextstyle/gnulib-local/m4/libglib.m4
+@@ -6,106 +6,26 @@ dnl with or without modifications, as long as this notice is preserved.
+
+ dnl From Bruno Haible.
+
+-dnl gl_LIBGLIB
+-dnl gives the user the option to decide whether to use the included or
+-dnl an external libglib.
+-dnl gl_LIBGLIB(FORCE-INCLUDED)
+-dnl forces the use of the included or an external libglib.
+ AC_DEFUN([gl_LIBGLIB],
+ [
+- ifelse([$1], , [
+- AC_MSG_CHECKING([whether included glib is requested])
+- AC_ARG_WITH([included-glib],
+- [ --with-included-glib use the glib2 included here],
+- [gl_cv_libglib_force_included=$withval],
+- [gl_cv_libglib_force_included=no])
+- AC_MSG_RESULT([$gl_cv_libglib_force_included])
+- ], [gl_cv_libglib_force_included=$1])
++ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
++ AC_MSG_CHECKING([whether included glib is requested])
++ AC_ARG_WITH([included-glib],
++ [ --with-included-glib use the glib2 included here],
++ [gl_cv_libglib_force_included=$withval],
++ [gl_cv_libglib_force_included=no])
++ AC_MSG_RESULT([$gl_cv_libglib_force_included])
+
+ gl_cv_libglib_use_included="$gl_cv_libglib_force_included"
+ LIBGLIB=
+ LTLIBGLIB=
+ INCGLIB=
+- ifelse([$1], [yes], , [
+- if test "$gl_cv_libglib_use_included" != yes; then
+- dnl Figure out whether we can use a preinstalled libglib-2.0, or have to use
+- dnl the included one.
+- AC_CACHE_VAL([gl_cv_libglib], [
+- gl_cv_libglib=no
+- gl_cv_LIBGLIB=
+- gl_cv_LTLIBGLIB=
+- gl_cv_INCGLIB=
+- gl_save_LIBS="$LIBS"
+- dnl Search for libglib2 and define LIBGLIB_2_0, LTLIBGLIB_2_0 and
+- dnl INCGLIB_2_0 accordingly.
+- dnl Don't use glib-config nor pkg-config, since it doesn't work when
+- dnl cross-compiling or when the C compiler in use is different from the
+- dnl one that built the library.
+- AC_LIB_LINKFLAGS_BODY([glib-2.0])
+- LIBS="$gl_save_LIBS $LIBGLIB_2_0"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- ])
+- if test "$gl_cv_libglib" != yes; then
+- gl_save_CPPFLAGS="$CPPFLAGS"
+- CPPFLAGS="$CPPFLAGS $INCGLIB_2_0"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- gl_cv_INCGLIB="$INCGLIB_2_0"
+- ])
+- if test "$gl_cv_libglib" != yes; then
+- dnl Often the include files are installed in /usr/include/glib-2.0
+- dnl and /usr/lib/glib-2.0/include.
+- if test -n "$LIBGLIB_2_0_PREFIX"; then
+- CPPFLAGS="$gl_save_CPPFLAGS -I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- gl_cv_INCGLIB="-I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
+- ])
+- fi
+- fi
+- CPPFLAGS="$gl_save_CPPFLAGS"
+- fi
+- LIBS="$gl_save_LIBS"
+- ])
+- AC_MSG_CHECKING([for glib])
+- AC_MSG_RESULT([$gl_cv_libglib])
+- if test $gl_cv_libglib = yes; then
+- LIBGLIB="$gl_cv_LIBGLIB"
+- LTLIBGLIB="$gl_cv_LTLIBGLIB"
+- INCGLIB="$gl_cv_INCGLIB"
+- else
+- gl_cv_libglib_use_included=yes
+- fi
+- fi
+- ])
++ if test "$gl_cv_libglib_use_included" != yes; then
++ PKG_CHECK_MODULES([GLIB], [glib-2.0])
++ LIBGLIB="$GLIB_LIBS"
++ LTLIBGLIB="$GLIB_LIBS"
++ INCGLIB="$GLIB_CFLAGS"
++ fi
+ AC_SUBST([LIBGLIB])
+ AC_SUBST([LTLIBGLIB])
+ AC_SUBST([INCGLIB])
+diff --git a/libtextstyle/gnulib-m4/libglib.m4 b/libtextstyle/gnulib-m4/libglib.m4
+index dbc9eb8..136e512 100644
+--- a/libtextstyle/gnulib-m4/libglib.m4
++++ b/libtextstyle/gnulib-m4/libglib.m4
+@@ -6,106 +6,26 @@ dnl with or without modifications, as long as this notice is preserved.
+
+ dnl From Bruno Haible.
+
+-dnl gl_LIBGLIB
+-dnl gives the user the option to decide whether to use the included or
+-dnl an external libglib.
+-dnl gl_LIBGLIB(FORCE-INCLUDED)
+-dnl forces the use of the included or an external libglib.
+ AC_DEFUN([gl_LIBGLIB],
+ [
+- ifelse([$1], , [
+- AC_MSG_CHECKING([whether included glib is requested])
+- AC_ARG_WITH([included-glib],
+- [ --with-included-glib use the glib2 included here],
+- [gl_cv_libglib_force_included=$withval],
+- [gl_cv_libglib_force_included=no])
+- AC_MSG_RESULT([$gl_cv_libglib_force_included])
+- ], [gl_cv_libglib_force_included=$1])
++ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
++ AC_MSG_CHECKING([whether included glib is requested])
++ AC_ARG_WITH([included-glib],
++ [ --with-included-glib use the glib2 included here],
++ [gl_cv_libglib_force_included=$withval],
++ [gl_cv_libglib_force_included=no])
++ AC_MSG_RESULT([$gl_cv_libglib_force_included])
+
+ gl_cv_libglib_use_included="$gl_cv_libglib_force_included"
+ LIBGLIB=
+ LTLIBGLIB=
+ INCGLIB=
+- ifelse([$1], [yes], , [
+- if test "$gl_cv_libglib_use_included" != yes; then
+- dnl Figure out whether we can use a preinstalled libglib-2.0, or have to use
+- dnl the included one.
+- AC_CACHE_VAL([gl_cv_libglib], [
+- gl_cv_libglib=no
+- gl_cv_LIBGLIB=
+- gl_cv_LTLIBGLIB=
+- gl_cv_INCGLIB=
+- gl_save_LIBS="$LIBS"
+- dnl Search for libglib2 and define LIBGLIB_2_0, LTLIBGLIB_2_0 and
+- dnl INCGLIB_2_0 accordingly.
+- dnl Don't use glib-config nor pkg-config, since it doesn't work when
+- dnl cross-compiling or when the C compiler in use is different from the
+- dnl one that built the library.
+- AC_LIB_LINKFLAGS_BODY([glib-2.0])
+- LIBS="$gl_save_LIBS $LIBGLIB_2_0"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- ])
+- if test "$gl_cv_libglib" != yes; then
+- gl_save_CPPFLAGS="$CPPFLAGS"
+- CPPFLAGS="$CPPFLAGS $INCGLIB_2_0"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- gl_cv_INCGLIB="$INCGLIB_2_0"
+- ])
+- if test "$gl_cv_libglib" != yes; then
+- dnl Often the include files are installed in /usr/include/glib-2.0
+- dnl and /usr/lib/glib-2.0/include.
+- if test -n "$LIBGLIB_2_0_PREFIX"; then
+- CPPFLAGS="$gl_save_CPPFLAGS -I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
+- AC_LINK_IFELSE(
+- [AC_LANG_PROGRAM(
+- [[#include <glib.h>
+- #ifndef G_BEGIN_DECLS
+- error this glib.h includes a glibconfig.h from a glib version 1.x
+- #endif
+- ]],
+- [[g_string_new ("foo");]])],
+- [gl_cv_libglib=yes
+- gl_cv_LIBGLIB="$LIBGLIB_2_0"
+- gl_cv_LTLIBGLIB="$LTLIBGLIB_2_0"
+- gl_cv_INCGLIB="-I$LIBGLIB_2_0_PREFIX/include/glib-2.0 -I$LIBGLIB_2_0_PREFIX/$acl_libdirstem/glib-2.0/include"
+- ])
+- fi
+- fi
+- CPPFLAGS="$gl_save_CPPFLAGS"
+- fi
+- LIBS="$gl_save_LIBS"
+- ])
+- AC_MSG_CHECKING([for glib])
+- AC_MSG_RESULT([$gl_cv_libglib])
+- if test $gl_cv_libglib = yes; then
+- LIBGLIB="$gl_cv_LIBGLIB"
+- LTLIBGLIB="$gl_cv_LTLIBGLIB"
+- INCGLIB="$gl_cv_INCGLIB"
+- else
+- gl_cv_libglib_use_included=yes
+- fi
+- fi
+- ])
++ if test "$gl_cv_libglib_use_included" != yes; then
++ PKG_CHECK_MODULES([GLIB], [glib-2.0])
++ LIBGLIB="$GLIB_LIBS"
++ LTLIBGLIB="$GLIB_LIBS"
++ INCGLIB="$GLIB_CFLAGS"
++ fi
+ AC_SUBST([LIBGLIB])
+ AC_SUBST([LTLIBGLIB])
+ AC_SUBST([INCGLIB])
diff --git a/meta/recipes-core/gettext/gettext_0.21.bb b/meta/recipes-core/gettext/gettext_0.21.bb
deleted file mode 100644
index 364e6a52a1..0000000000
--- a/meta/recipes-core/gettext/gettext_0.21.bb
+++ /dev/null
@@ -1,218 +0,0 @@
-SUMMARY = "Utilities and libraries for producing multi-lingual messages"
-DESCRIPTION = "GNU gettext is a set of tools that provides a framework to help other programs produce multi-lingual messages. \
-These tools include a set of conventions about how programs should be written to support message catalogs, a directory and file \
-naming organization for the message catalogs themselves, a runtime library supporting the retrieval of translated messages, and \
-a few stand-alone programs to massage in various ways the sets of translatable and already translated strings."
-HOMEPAGE = "http://www.gnu.org/software/gettext/gettext.html"
-SECTION = "libs"
-LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c678957b0c8e964aa6c70fd77641a71e"
-
-# without libxml in PACKAGECONFIG vendor copy of the lib will be used
-LICENSE:append = " ${@bb.utils.contains('PACKAGECONFIG', 'libxml', '', '& MIT', d)}"
-LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'libxml', '', 'file://libtextstyle/lib/libxml/COPYING;md5=2044417e2e5006b65a8b9067b683fcf1', d)}"
-# without croco in PACKAGECONFIG vendor copy of the lib will be used
-LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'croco', '', 'file://libtextstyle/lib/libcroco/libcroco.h;md5=915a46e7307c2f7f8d2b9c503fc434ed;beginline=10;endline=28', d)}"
-# without glib in PACKAGECONFIG vendor copy of the lib will be used
-LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'glib', '', 'file://libtextstyle/lib/glib/ghash.c;md5=af89a160226edf0b276b6183888037d0;beginline=10;endline=27', d)}"
-
-
-DEPENDS = "gettext-native virtual/libiconv"
-DEPENDS:class-native = "gettext-minimal-native"
-PROVIDES = "virtual/libintl virtual/gettext"
-PROVIDES:class-native = "virtual/gettext-native"
-RCONFLICTS:${PN} = "proxy-libintl"
-SRC_URI = "${GNU_MIRROR}/gettext/gettext-${PV}.tar.gz \
- file://parallel.patch \
- file://use-pkgconfig.patch \
- file://run-ptest \
- file://serial-tests-config.patch \
- file://0001-tests-autopoint-3-unset-MAKEFLAGS.patch \
- file://0001-init-env.in-do-not-add-C-CXX-parameters.patch \
- file://mingw.patch \
- file://0001-msgmerge-29-Add-executable-file-mode-bits.patch \
- file://0001-libtextstyle-fix-builds-with-automake-1.16.4-and-new.patch \
- "
-SRC_URI[sha256sum] = "c77d0da3102aec9c07f43671e60611ebff89a996ef159497ce8e59d075786b12"
-
-inherit autotools texinfo pkgconfig ptest
-
-EXTRA_OECONF += "--without-lispdir \
- --disable-csharp \
- --disable-libasprintf \
- --disable-java \
- --disable-native-java \
- --disable-openmp \
- --disable-acl \
- --without-emacs \
- --without-cvs \
- --without-git \
- --cache-file=${B}/config.cache \
- "
-EXTRA_OECONF:append:class-target = " \
- --with-bisonlocaledir=${datadir}/locale \
- gt_cv_locale_fr_utf8=fr_FR \
- gt_cv_locale_fr=fr_FR.ISO-8859-1 \
- gt_cv_locale_de_utf8=de_DE \
- gt_cv_locale_de=de_DE.ISO-8859-1 \
-"
-
-PACKAGECONFIG ??= "croco glib libxml"
-PACKAGECONFIG:class-native = ""
-PACKAGECONFIG:class-nativesdk = ""
-
-PACKAGECONFIG[croco] = "--without-included-libcroco,--with-included-libcroco,libcroco"
-PACKAGECONFIG[glib] = "--without-included-glib,--with-included-glib,glib-2.0"
-PACKAGECONFIG[libxml] = "--without-included-libxml,--with-included-libxml,libxml2"
-# Need paths here to avoid host contamination but this can cause RPATH warnings
-# or problems if $libdir isn't $prefix/lib.
-PACKAGECONFIG[libunistring] = "--with-libunistring-prefix=${STAGING_LIBDIR}/..,--with-included-libunistring,libunistring"
-PACKAGECONFIG[msgcat-curses] = "--with-libncurses-prefix=${STAGING_LIBDIR}/..,--disable-curses,ncurses,"
-
-acpaths = '-I ${S}/gettext-runtime/m4 \
- -I ${S}/gettext-tools/m4'
-
-do_install:append:libc-musl () {
- rm -f ${D}${libdir}/charset.alias
- rm -f ${D}${includedir}/libintl.h
- rm -f ${D}${libdir}/libintl.la
-}
-
-# these lack the .x behind the .so, but shouldn't be in the -dev package
-# Otherwise you get the following results:
-# 7.4M glibc/images/ep93xx/Angstrom-console-image-glibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz
-# 25M uclibc/images/ep93xx/Angstrom-console-image-uclibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz
-# because gettext depends on gettext-dev, which pulls in more -dev packages:
-# 15228 KiB /ep93xx/libstdc++-dev_4.2.2-r2_ep93xx.ipk
-# 1300 KiB /ep93xx/uclibc-dev_0.9.29-r8_ep93xx.ipk
-# 140 KiB /armv4t/gettext-dev_0.14.1-r6_armv4t.ipk
-# 4 KiB /ep93xx/libgcc-s-dev_4.2.2-r2_ep93xx.ipk
-
-PACKAGES =+ "libgettextlib libgettextsrc"
-FILES:libgettextlib = "${libdir}/libgettextlib-*.so*"
-FILES:libgettextsrc = "${libdir}/libgettextsrc-*.so*"
-
-PACKAGES =+ "gettext-runtime gettext-runtime-dev gettext-runtime-doc"
-
-FILES:${PN} += "${libdir}/${BPN}/*"
-
-# The its/Makefile.am has defined:
-# itsdir = $(pkgdatadir)$(PACKAGE_SUFFIX)/its
-# not itsdir = $(pkgdatadir), so use wildcard to match the version.
-FILES:${PN} += "${datadir}/${BPN}-*/*"
-
-FILES:gettext-runtime = "${bindir}/gettext \
- ${bindir}/ngettext \
- ${bindir}/envsubst \
- ${bindir}/gettext.sh \
- ${libdir}/libasprintf.so* \
- ${libdir}/GNU.Gettext.dll \
- "
-FILES:gettext-runtime-dev += "${libdir}/libasprintf.a \
- ${includedir}/autosprintf.h \
- "
-FILES:gettext-runtime-doc = "${mandir}/man1/gettext.* \
- ${mandir}/man1/ngettext.* \
- ${mandir}/man1/envsubst.* \
- ${mandir}/man1/.* \
- ${mandir}/man3/* \
- ${docdir}/gettext/gettext.* \
- ${docdir}/gettext/ngettext.* \
- ${docdir}/gettext/envsubst.* \
- ${docdir}/gettext/*.3.html \
- ${datadir}/gettext/ABOUT-NLS \
- ${docdir}/gettext/csharpdoc/* \
- ${docdir}/libasprintf/autosprintf.html \
- ${infodir}/autosprintf.info \
- "
-
-do_install:append() {
- rm -f ${D}${libdir}/preloadable_libintl.so
-}
-
-do_install:append:class-native () {
- rm ${D}${datadir}/aclocal/*
- rm ${D}${datadir}/gettext/config.rpath
- rm ${D}${datadir}/gettext/po/Makefile.in.in
- rm ${D}${datadir}/gettext/po/remove-potcdate.sin
-
- create_wrapper ${D}${bindir}/msgfmt \
- GETTEXTDATADIR="${STAGING_DATADIR_NATIVE}/gettext-${PV}/"
-
-}
-
-do_compile_ptest() {
- cd ${B}/gettext-tools/tests/
- sed -i '/^buildtest-TESTS: /c buildtest-TESTS: $(TESTS) $(check_PROGRAMS)' Makefile
- oe_runmake buildtest-TESTS
- cd -
-}
-
-do_install_ptest() {
- if [ ${PTEST_ENABLED} = "1" ]; then
- mkdir -p ${D}${PTEST_PATH}/tests
- mkdir -p ${D}${PTEST_PATH}/src
- mkdir -p ${D}${PTEST_PATH}/po
- mkdir -p ${D}${PTEST_PATH}/misc
- mkdir -p ${D}${PTEST_PATH}/its
- mkdir -p ${D}${PTEST_PATH}/styles
- mkdir -p ${D}${PTEST_PATH}/gnulib-lib
- mkdir -p ${D}${PTEST_PATH}/examples
- cp -rf ${S}/gettext-tools/its/* ${D}${PTEST_PATH}/its
- cp -rf ${S}/gettext-tools/styles/* ${D}${PTEST_PATH}/styles
- cp -rf ${S}/gettext-tools/gnulib-lib/gettext.h ${D}${PTEST_PATH}/gnulib-lib
- cp -rf ${S}/gettext-tools/examples/hello-c ${D}${PTEST_PATH}/examples
- cp -rf ${S}/gettext-tools/tests/* ${D}${PTEST_PATH}/tests
- cp -rf ${B}/gettext-tools/tests/.libs/* ${D}${PTEST_PATH}/tests
- cp -rf ${B}/gettext-runtime/intl/.libs/libgnuintl.so.8* ${D}${libdir}/
- cp -rf ${B}/gettext-tools/tests/Makefile ${D}${PTEST_PATH}/tests
- cp -rf ${B}/gettext-tools/tests/init-env ${D}${PTEST_PATH}/tests
- sed -i '/^Makefile:/c Makefile:' ${D}${PTEST_PATH}/tests/Makefile
- sed -i -e 's:lang-c lang-c++:lang-c++:g' ${D}${PTEST_PATH}/tests/Makefile
- install ${S}/gettext-tools/src/msgunfmt.tcl ${D}${PTEST_PATH}/src
- install ${S}/gettext-tools/src/project-id ${D}${PTEST_PATH}/src
- install ${B}/gettext-runtime/src/gettext.sh ${D}${PTEST_PATH}/src
- install ${B}/gettext-runtime/src/ngettext ${D}${PTEST_PATH}/src
- install ${B}/gettext-runtime/src/envsubst ${D}${PTEST_PATH}/src
- install ${B}/gettext-runtime/src/gettext ${D}${PTEST_PATH}/src
- install ${B}/gettext-tools/src/.libs/cldr-plurals ${D}${PTEST_PATH}/src
- install ${S}/gettext-tools/po/gettext-tools.pot ${D}${PTEST_PATH}/po
- install ${B}/gettext-tools/misc/* ${D}${PTEST_PATH}/misc
- find ${D}${PTEST_PATH}/ -name "*.o" -exec rm {} \;
- chmod 0755 ${D}${PTEST_PATH}/tests/lang-vala ${D}${PTEST_PATH}/tests/plural-1 ${D}${PTEST_PATH}/tests/xgettext-tcl-4 \
- ${D}${PTEST_PATH}/tests/xgettext-vala-1 ${D}${PTEST_PATH}/tests/xgettext-po-2
- sed -i -e 's|${DEBUG_PREFIX_MAP}||g' ${D}${PTEST_PATH}/tests/init-env
- fi
-}
-
-RDEPENDS:${PN}-ptest += "make xz"
-RDEPENDS:${PN}-ptest:append:libc-glibc = "\
- glibc-gconv-big5 \
- glibc-charmap-big5 \
- glibc-gconv-cp1251 \
- glibc-charmap-cp1251 \
- glibc-charmap-iso-8859-9 \
- glibc-gconv-iso8859-9 \
- glibc-charmap-koi8-r \
- glibc-gconv-koi8-r \
- glibc-gconv-iso8859-2 \
- glibc-charmap-iso-8859-2 \
- glibc-gconv-iso8859-1 \
- glibc-charmap-iso-8859-1 \
- glibc-gconv-euc-kr \
- glibc-charmap-euc-kr \
- glibc-gconv-euc-jp \
- glibc-charmap-euc-jp \
- locale-base-de-de \
- locale-base-fr-fr \
-"
-
-RRECOMMENDS:${PN}-ptest:append:libc-glibc = "\
- locale-base-de-de.iso-8859-1 \
- locale-base-fr-fr.iso-8859-1 \
-"
-
-INSANE_SKIP:${PN}-ptest += "ldflags"
-INSANE_SKIP:${PN}-ptest += "rpaths"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/gettext/gettext_0.22.5.bb b/meta/recipes-core/gettext/gettext_0.22.5.bb
new file mode 100644
index 0000000000..1a66d37916
--- /dev/null
+++ b/meta/recipes-core/gettext/gettext_0.22.5.bb
@@ -0,0 +1,213 @@
+SUMMARY = "Utilities and libraries for producing multi-lingual messages"
+DESCRIPTION = "GNU gettext is a set of tools that provides a framework to help other programs produce multi-lingual messages. \
+These tools include a set of conventions about how programs should be written to support message catalogs, a directory and file \
+naming organization for the message catalogs themselves, a runtime library supporting the retrieval of translated messages, and \
+a few stand-alone programs to massage in various ways the sets of translatable and already translated strings."
+SECTION = "libs"
+LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c678957b0c8e964aa6c70fd77641a71e"
+
+# without libxml in PACKAGECONFIG vendor copy of the lib will be used
+LICENSE:append = " ${@bb.utils.contains('PACKAGECONFIG', 'libxml', '', '& MIT', d)}"
+LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'libxml', '', 'file://libtextstyle/lib/libxml/COPYING;md5=2044417e2e5006b65a8b9067b683fcf1', d)}"
+# without glib in PACKAGECONFIG vendor copy of the lib will be used
+LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'glib', '', 'file://libtextstyle/lib/glib/ghash.c;md5=e3159f5ac38dfe77af5cc0ee104dab2d;beginline=10;endline=27', d)}"
+
+
+DEPENDS = "gettext-native virtual/libiconv"
+DEPENDS:class-native = "gettext-minimal-native"
+PROVIDES = "virtual/libintl virtual/gettext"
+PROVIDES:class-native = "virtual/gettext-native"
+RCONFLICTS:${PN} = "proxy-libintl"
+
+require gettext-sources.inc
+SRC_URI += " \
+ file://parallel.patch \
+ file://use-pkgconfig.patch \
+ file://run-ptest \
+ file://serial-tests-config.patch \
+ file://0001-tests-autopoint-3-unset-MAKEFLAGS.patch \
+ file://0001-init-env.in-do-not-add-C-CXX-parameters.patch \
+ "
+
+inherit autotools texinfo pkgconfig ptest
+
+EXTRA_OECONF += "--without-lispdir \
+ --disable-csharp \
+ --disable-libasprintf \
+ --disable-java \
+ --disable-native-java \
+ --disable-openmp \
+ --disable-acl \
+ --without-emacs \
+ --without-cvs \
+ --without-git \
+ --without-included-libcroco \
+ --cache-file=${B}/config.cache \
+ "
+EXTRA_OECONF:append:class-target = " \
+ --with-bisonlocaledir=${datadir}/locale \
+ gt_cv_locale_fr_utf8=fr_FR \
+ gt_cv_locale_fr=fr_FR.ISO-8859-1 \
+ gt_cv_locale_de_utf8=de_DE \
+ gt_cv_locale_de=de_DE.ISO-8859-1 \
+"
+
+PACKAGECONFIG ??= "glib libxml"
+PACKAGECONFIG:class-native = ""
+PACKAGECONFIG:class-nativesdk = ""
+
+PACKAGECONFIG[glib] = "--without-included-glib,--with-included-glib,glib-2.0"
+PACKAGECONFIG[libxml] = "--without-included-libxml,--with-included-libxml,libxml2"
+# Need paths here to avoid host contamination but this can cause RPATH warnings
+# or problems if $libdir isn't $prefix/lib.
+PACKAGECONFIG[libunistring] = "--with-libunistring-prefix=${STAGING_LIBDIR}/..,--with-included-libunistring,libunistring"
+PACKAGECONFIG[msgcat-curses] = "--with-libncurses-prefix=${STAGING_LIBDIR}/..,--disable-curses,ncurses,"
+
+acpaths = '-I ${S}/gettext-runtime/m4 \
+ -I ${S}/gettext-tools/m4'
+
+do_install:append:libc-musl () {
+ rm -f ${D}${libdir}/charset.alias
+ rm -f ${D}${includedir}/libintl.h
+ rm -f ${D}${libdir}/libintl.la
+}
+
+# these lack the .x behind the .so, but shouldn't be in the -dev package
+# Otherwise you get the following results:
+# 7.4M glibc/images/ep93xx/Angstrom-console-image-glibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz
+# 25M uclibc/images/ep93xx/Angstrom-console-image-uclibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz
+# because gettext depends on gettext-dev, which pulls in more -dev packages:
+# 15228 KiB /ep93xx/libstdc++-dev_4.2.2-r2_ep93xx.ipk
+# 1300 KiB /ep93xx/uclibc-dev_0.9.29-r8_ep93xx.ipk
+# 140 KiB /armv4t/gettext-dev_0.14.1-r6_armv4t.ipk
+# 4 KiB /ep93xx/libgcc-s-dev_4.2.2-r2_ep93xx.ipk
+
+PACKAGES =+ "libgettextlib libgettextsrc"
+FILES:libgettextlib = "${libdir}/libgettextlib-*.so*"
+FILES:libgettextsrc = "${libdir}/libgettextsrc-*.so*"
+
+PACKAGES =+ "gettext-runtime gettext-runtime-dev gettext-runtime-doc"
+
+FILES:${PN} += "${libdir}/${BPN}/*"
+
+# The its/Makefile.am has defined:
+# itsdir = $(pkgdatadir)$(PACKAGE_SUFFIX)/its
+# not itsdir = $(pkgdatadir), so use wildcard to match the version.
+FILES:${PN} += "${datadir}/${BPN}-*/*"
+
+FILES:gettext-runtime = "${bindir}/gettext \
+ ${bindir}/ngettext \
+ ${bindir}/envsubst \
+ ${bindir}/gettext.sh \
+ ${libdir}/libasprintf.so* \
+ ${libdir}/GNU.Gettext.dll \
+ "
+FILES:gettext-runtime-dev += "${libdir}/libasprintf.a \
+ ${includedir}/autosprintf.h \
+ "
+FILES:gettext-runtime-doc = "${mandir}/man1/gettext.* \
+ ${mandir}/man1/ngettext.* \
+ ${mandir}/man1/envsubst.* \
+ ${mandir}/man1/.* \
+ ${mandir}/man3/* \
+ ${docdir}/gettext/gettext.* \
+ ${docdir}/gettext/ngettext.* \
+ ${docdir}/gettext/envsubst.* \
+ ${docdir}/gettext/*.3.html \
+ ${datadir}/gettext/ABOUT-NLS \
+ ${docdir}/gettext/csharpdoc/* \
+ ${docdir}/libasprintf/autosprintf.html \
+ ${infodir}/autosprintf.info \
+ "
+
+do_install:append() {
+ rm -f ${D}${libdir}/preloadable_libintl.so
+}
+
+do_install:append:class-native () {
+ rm ${D}${datadir}/aclocal/*
+ rm ${D}${datadir}/gettext/config.rpath
+ rm ${D}${datadir}/gettext/po/Makefile.in.in
+ rm ${D}${datadir}/gettext/po/remove-potcdate.sin
+
+ create_wrapper ${D}${bindir}/msgfmt \
+ GETTEXTDATADIR="${STAGING_DATADIR_NATIVE}/gettext-${PV}/"
+
+}
+
+do_compile_ptest() {
+ cd ${B}/gettext-tools/tests/
+ sed -i '/^buildtest-TESTS: /c buildtest-TESTS: $(TESTS) $(check_PROGRAMS)' Makefile
+ oe_runmake buildtest-TESTS
+ cd -
+}
+
+do_install_ptest() {
+ if [ ${PTEST_ENABLED} = "1" ]; then
+ mkdir -p ${D}${PTEST_PATH}/tests
+ mkdir -p ${D}${PTEST_PATH}/src
+ mkdir -p ${D}${PTEST_PATH}/po
+ mkdir -p ${D}${PTEST_PATH}/misc
+ mkdir -p ${D}${PTEST_PATH}/its
+ mkdir -p ${D}${PTEST_PATH}/styles
+ mkdir -p ${D}${PTEST_PATH}/gnulib-lib
+ mkdir -p ${D}${PTEST_PATH}/examples
+ cp -rf ${S}/gettext-tools/its/* ${D}${PTEST_PATH}/its
+ cp -rf ${S}/gettext-tools/styles/* ${D}${PTEST_PATH}/styles
+ cp -rf ${S}/gettext-tools/gnulib-lib/gettext.h ${D}${PTEST_PATH}/gnulib-lib
+ cp -rf ${S}/gettext-tools/examples/hello-c ${D}${PTEST_PATH}/examples
+ cp -rf ${S}/gettext-tools/tests/* ${D}${PTEST_PATH}/tests
+ cp -rf ${B}/gettext-tools/tests/.libs/* ${D}${PTEST_PATH}/tests
+ cp -rf ${B}/gettext-runtime/intl/.libs/libgnuintl.so.8* ${D}${libdir}/
+ cp -rf ${B}/gettext-tools/tests/Makefile ${D}${PTEST_PATH}/tests
+ cp -rf ${B}/gettext-tools/tests/init-env ${D}${PTEST_PATH}/tests
+ sed -i '/^Makefile:/c Makefile:' ${D}${PTEST_PATH}/tests/Makefile
+ sed -i -e 's:lang-c lang-c++:lang-c++:g' ${D}${PTEST_PATH}/tests/Makefile
+ install ${S}/gettext-tools/src/msgunfmt.tcl ${D}${PTEST_PATH}/src
+ install ${S}/gettext-tools/src/project-id ${D}${PTEST_PATH}/src
+ install ${B}/gettext-runtime/src/gettext.sh ${D}${PTEST_PATH}/src
+ install ${B}/gettext-runtime/src/ngettext ${D}${PTEST_PATH}/src
+ install ${B}/gettext-runtime/src/envsubst ${D}${PTEST_PATH}/src
+ install ${B}/gettext-runtime/src/gettext ${D}${PTEST_PATH}/src
+ install ${B}/gettext-tools/src/.libs/cldr-plurals ${D}${PTEST_PATH}/src
+ install ${S}/gettext-tools/po/gettext-tools.pot ${D}${PTEST_PATH}/po
+ install ${B}/gettext-tools/misc/* ${D}${PTEST_PATH}/misc
+ find ${D}${PTEST_PATH}/ -name "*.o" -exec rm {} \;
+ chmod 0755 ${D}${PTEST_PATH}/tests/lang-vala ${D}${PTEST_PATH}/tests/plural-1 ${D}${PTEST_PATH}/tests/xgettext-tcl-4 \
+ ${D}${PTEST_PATH}/tests/xgettext-vala-1 ${D}${PTEST_PATH}/tests/xgettext-po-2 ${D}${PTEST_PATH}/tests/xgettext-vala-6
+ sed -i -e 's|${DEBUG_PREFIX_MAP}||g' ${D}${PTEST_PATH}/tests/init-env
+ fi
+}
+
+RDEPENDS:${PN}-ptest += "make xz bash gawk autoconf locale-base-de-de locale-base-fr-fr"
+RDEPENDS:${PN}-ptest:append:libc-glibc = "\
+ glibc-gconv-big5 \
+ glibc-charmap-big5 \
+ glibc-gconv-cp1251 \
+ glibc-charmap-cp1251 \
+ glibc-charmap-iso-8859-9 \
+ glibc-gconv-iso8859-9 \
+ glibc-charmap-koi8-r \
+ glibc-gconv-koi8-r \
+ glibc-gconv-iso8859-2 \
+ glibc-charmap-iso-8859-2 \
+ glibc-gconv-iso8859-1 \
+ glibc-charmap-iso-8859-1 \
+ glibc-gconv-euc-kr \
+ glibc-charmap-euc-kr \
+ glibc-gconv-euc-jp \
+ glibc-charmap-euc-jp \
+ glibc-gconv-gb18030 \
+ glibc-charmap-gb18030 \
+"
+
+RRECOMMENDS:${PN}-ptest:append:libc-glibc = "\
+ locale-base-de-de.iso-8859-1 \
+ locale-base-fr-fr.iso-8859-1 \
+"
+
+INSANE_SKIP:${PN}-ptest += "ldflags"
+INSANE_SKIP:${PN}-ptest += "rpaths"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-ignore-return-value-of-write.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-ignore-return-value-of-write.patch
deleted file mode 100644
index f3a0069633..0000000000
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-ignore-return-value-of-write.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From 658c034d92027dc8af5f784cae852123fac79b19 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 16 Apr 2016 13:28:59 -0700
-Subject: [PATCH] Do not ignore return value of write()
-
-gcc warns about ignoring return value when compiling
-with fortify turned on.
-
-assert when write() fails
-
-Upstream-Status: Submitted
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- glib/tests/unix.c | 5 +++--
- 1 file changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/glib/tests/unix.c b/glib/tests/unix.c
-index 7639d06..f941141 100644
---- a/glib/tests/unix.c
-+++ b/glib/tests/unix.c
-@@ -33,14 +33,15 @@ test_pipe (void)
- GError *error = NULL;
- int pipefd[2];
- char buf[1024];
-- gssize bytes_read;
-+ gssize bytes_read, bytes_written;
- gboolean res;
-
- res = g_unix_open_pipe (pipefd, FD_CLOEXEC, &error);
- g_assert (res);
- g_assert_no_error (error);
-
-- write (pipefd[1], "hello", sizeof ("hello"));
-+ bytes_written = write (pipefd[1], "hello", sizeof ("hello"));
-+ g_assert (bytes_written != -1 && "write() failed");
- memset (buf, 0, sizeof (buf));
- bytes_read = read (pipefd[0], buf, sizeof(buf) - 1);
- g_assert_cmpint (bytes_read, >, 0);
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch
index 5fe3aa898e..8e6598fbef 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch
@@ -1,4 +1,4 @@
-From 0797a40627a4cb5439a24b872edc65356dceaaf0 Mon Sep 17 00:00:00 2001
+From e7077aa23bfcd31a8e72e39dc93ce4f854678376 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 15 Feb 2019 11:17:27 +0100
Subject: [PATCH] Do not write $bindir into pkg-config files
@@ -9,53 +9,52 @@ rather than use target paths).
Upstream-Status: Inappropriate [upstream wants the paths in .pc files]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
gio/meson.build | 16 ++++++++--------
glib/meson.build | 6 +++---
2 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/gio/meson.build b/gio/meson.build
-index 532b086..98468a3 100644
+index 5f91586..1a95f4f 100644
--- a/gio/meson.build
+++ b/gio/meson.build
-@@ -820,14 +820,14 @@ pkg.generate(libgio,
- 'schemasdir=' + join_paths('${datadir}', schemas_subdir),
- 'bindir=' + join_paths('${prefix}', get_option('bindir')),
- 'giomoduledir=' + pkgconfig_giomodulesdir,
-- 'gio=' + join_paths('${bindir}', 'gio'),
-- 'gio_querymodules=' + join_paths('${bindir}', 'gio-querymodules'),
-- 'glib_compile_schemas=' + join_paths('${bindir}', 'glib-compile-schemas'),
-- 'glib_compile_resources=' + join_paths('${bindir}', 'glib-compile-resources'),
-- 'gdbus=' + join_paths('${bindir}', 'gdbus'),
-- 'gdbus_codegen=' + join_paths('${bindir}', 'gdbus-codegen'),
-- 'gresource=' + join_paths('${bindir}', 'gresource'),
-- 'gsettings=' + join_paths('${bindir}', 'gsettings')],
-+ 'gio=gio',
-+ 'gio_querymodules=gio-querymodules',
-+ 'glib_compile_schemas=glib-compile-schemas',
-+ 'glib_compile_resources=glib-compile-resources',
-+ 'gdbus=gdbus',
-+ 'gdbus_codegen=gdbus-codegen',
-+ 'gresource=gresource',
-+ 'gsettings=gsettings'],
+@@ -884,14 +884,14 @@ pkg.generate(libgio,
+ 'dtdsdir=' + '${datadir}' / dtds_subdir,
+ 'bindir=' + '${prefix}' / get_option('bindir'),
+ 'giomoduledir=' + pkgconfig_giomodulesdir,
+- 'gio=' + '${bindir}' / 'gio',
+- 'gio_querymodules=' + pkgconfig_multiarch_bindir / 'gio-querymodules',
+- 'glib_compile_schemas=' + pkgconfig_multiarch_bindir / 'glib-compile-schemas',
+- 'glib_compile_resources=' + '${bindir}' / 'glib-compile-resources',
+- 'gdbus=' + '${bindir}' /'gdbus',
+- 'gdbus_codegen=' + '${bindir}' / 'gdbus-codegen',
+- 'gresource=' + '${bindir}' / 'gresource',
+- 'gsettings=' + '${bindir}' / 'gsettings',
++ 'gio=gio',
++ 'gio_querymodules=gio-querymodules',
++ 'glib_compile_schemas=glib-compile-schemas',
++ 'glib_compile_resources=glib-compile-resources',
++ 'gdbus=gdbus',
++ 'gdbus_codegen=gdbus-codegen',
++ 'gresource=gresource',
++ 'gsettings=gsettings',
+ ],
version : glib_version,
install_dir : glib_pkgconfigreldir,
- filebase : 'gio-2.0',
diff --git a/glib/meson.build b/glib/meson.build
-index aaf5f00..1e0992b 100644
+index c26a35e..1d8ca6b 100644
--- a/glib/meson.build
+++ b/glib/meson.build
-@@ -375,9 +375,9 @@ pkg.generate(libglib,
- subdirs : ['glib-2.0'],
- extra_cflags : ['-I${libdir}/glib-2.0/include'] + win32_cflags,
- variables : ['bindir=' + join_paths('${prefix}', get_option('bindir')),
-- 'glib_genmarshal=' + join_paths('${bindir}', 'glib-genmarshal'),
-- 'gobject_query=' + join_paths('${bindir}', 'gobject-query'),
-- 'glib_mkenums=' + join_paths('${bindir}', 'glib-mkenums')],
-+ 'glib_genmarshal=glib-genmarshal',
-+ 'gobject_query=gobject-query',
-+ 'glib_mkenums=glib-mkenums'],
- version : glib_version,
- install_dir : glib_pkgconfigreldir,
- filebase : 'glib-2.0',
+@@ -447,9 +447,9 @@ pkg.generate(libglib,
+ variables : [
+ 'bindir=' + '${prefix}' / get_option('bindir'),
+ 'datadir=' + '${prefix}' / get_option('datadir'),
+- 'glib_genmarshal=' + '${bindir}' / 'glib-genmarshal',
+- 'gobject_query=' + '${bindir}' / 'gobject-query',
+- 'glib_mkenums=' + '${bindir}' / 'glib-mkenums',
++ 'glib_genmarshal=glib-genmarshal',
++ 'gobject_query=gobject-query',
++ 'glib_mkenums=glib-mkenums',
+ 'glib_valgrind_suppressions=' + '${datadir}' /
+ valgrind_suppression_file_install_subdir /
+ fs.name(valgrind_suppression_file),
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch
index 16f2d31496..eb9dfdbcf9 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch
@@ -1,4 +1,4 @@
-From c94e669de98a3892c699bd8d0d2b5164b2de747e Mon Sep 17 00:00:00 2001
+From 9a5d4bf65b658d744d610ee27ecd2ae65b14b158 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sat, 15 Mar 2014 22:42:29 -0700
Subject: [PATCH] Fix DATADIRNAME on uclibc/Linux
@@ -8,8 +8,6 @@ based systems therefore lets set DATADIRNAME to "share".
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
-
-
---
m4macros/glib-gettext.m4 | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch
index 597864d9ac..ad69f7ec65 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch
@@ -1,4 +1,4 @@
-From 0015db45cd1bfefc04959dffab5dabeead93136f Mon Sep 17 00:00:00 2001
+From 4933aef791857a5aac650b60af800778658b875b Mon Sep 17 00:00:00 2001
From: Jussi Kukkonen <jussi.kukkonen@intel.com>
Date: Tue, 22 Mar 2016 15:14:58 +0200
Subject: [PATCH] Install gio-querymodules as libexec_PROGRAM
@@ -8,16 +8,15 @@ renamer does not cope with library packages with files in ${bindir}
Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
Upstream-Status: Inappropriate [OE specific]
-
---
gio/meson.build | 1 +
1 file changed, 1 insertion(+)
diff --git a/gio/meson.build b/gio/meson.build
-index 2ef60ed..532b086 100644
+index f9fdf6e..5f91586 100644
--- a/gio/meson.build
+++ b/gio/meson.build
-@@ -936,6 +936,7 @@ gio_querymodules = executable('gio-querymodules', 'gio-querymodules.c', 'giomodu
+@@ -1005,6 +1005,7 @@ gio_querymodules = executable('gio-querymodules', 'gio-querymodules.c', 'giomodu
c_args : gio_c_args,
# intl.lib is not compatible with SAFESEH
link_args : noseh_link_args,
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch
index 6fd93526ce..0e3a62af6a 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch
@@ -1,4 +1,4 @@
-From 4f47b8a8d650d185aa61aec2f56a283522a723c4 Mon Sep 17 00:00:00 2001
+From 8ae2e9c2a04e089306693a021149dc6b7d1bd679 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 12 Jun 2015 17:08:46 +0300
Subject: [PATCH] Remove the warning about deprecated paths in schemas
@@ -9,13 +9,12 @@ messages, and meta/lib/oe/rootfs.py complaints about them.
Upstream-Status: Inappropriate
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
gio/glib-compile-schemas.c | 13 -------------
1 file changed, 13 deletions(-)
diff --git a/gio/glib-compile-schemas.c b/gio/glib-compile-schemas.c
-index 7888120..7acbd5b 100644
+index 04ef404..e791ce2 100644
--- a/gio/glib-compile-schemas.c
+++ b/gio/glib-compile-schemas.c
@@ -1232,19 +1232,6 @@ parse_state_start_schema (ParseState *state,
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch
index 59de3fa969..32b4cea409 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch
@@ -1,4 +1,4 @@
-From 4f327be49fd6ac5a77da6e48459b37f37a601977 Mon Sep 17 00:00:00 2001
+From c0733f7a91dfe13152abc60c5a3064456b3e9d63 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 13 Feb 2019 15:32:05 +0100
Subject: [PATCH] Set host_machine correctly when building with mingw32
@@ -9,23 +9,22 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
gio/tests/meson.build | 8 ++++----
glib/tests/meson.build | 2 +-
meson.build | 3 +++
- tests/meson.build | 2 +-
- 4 files changed, 9 insertions(+), 6 deletions(-)
+ 3 files changed, 8 insertions(+), 5 deletions(-)
diff --git a/gio/tests/meson.build b/gio/tests/meson.build
-index abe676767c60..34b347815308 100644
+index 4ef3343..e498e7e 100644
--- a/gio/tests/meson.build
+++ b/gio/tests/meson.build
-@@ -27,7 +27,7 @@ if build_machine.system() == 'linux'
- endif # libutil.length() > 0
- endif # build_machine.system() == 'linux'
+@@ -29,7 +29,7 @@ endif
+
+ test_cpp_args = test_c_args
-if host_machine.system() == 'windows'
+if host_system == 'windows'
common_gio_tests_deps += [iphlpapi_dep, winsock2, cc.find_library ('secur32')]
endif
-@@ -176,7 +176,7 @@ else
+@@ -230,7 +230,7 @@ if have_dbus_daemon
endif
# Test programs buildable on UNIX only
@@ -33,8 +32,8 @@ index abe676767c60..34b347815308 100644
+if host_system != 'windows'
gio_tests += {
'file' : {},
- 'gdbus-peer' : {
-@@ -434,7 +434,7 @@ if host_machine.system() != 'windows'
+ 'gdbus-peer-object-manager' : {},
+@@ -562,7 +562,7 @@ if host_machine.system() != 'windows'
endif # unix
# Test programs buildable on Windows only
@@ -43,7 +42,7 @@ index abe676767c60..34b347815308 100644
gio_tests += {'win32-streams' : {}}
endif
-@@ -504,7 +504,7 @@ if cc.get_id() != 'msvc' and cc.get_id() != 'clang-cl'
+@@ -632,7 +632,7 @@ if cc.get_id() != 'msvc' and cc.get_id() != 'clang-cl'
}
endif
@@ -53,10 +52,10 @@ index abe676767c60..34b347815308 100644
'gdbus-example-unix-fd-client' : {
'install' : false,
diff --git a/glib/tests/meson.build b/glib/tests/meson.build
-index a0c64afe6ae9..48407f99569c 100644
+index d80c86e..5329cda 100644
--- a/glib/tests/meson.build
+++ b/glib/tests/meson.build
-@@ -151,7 +151,7 @@ if glib_conf.has('HAVE_EVENTFD')
+@@ -216,7 +216,7 @@ if glib_conf.has('HAVE_EVENTFD')
}
endif
@@ -66,7 +65,7 @@ index a0c64afe6ae9..48407f99569c 100644
glib_tests += {
'gpoll' : {
diff --git a/meson.build b/meson.build
-index e0b14319fb58..f6756a3c7dfb 100644
+index 813c9b7..6ee775e 100644
--- a/meson.build
+++ b/meson.build
@@ -54,6 +54,9 @@ else
@@ -79,19 +78,3 @@ index e0b14319fb58..f6756a3c7dfb 100644
if host_system == 'darwin'
ios_test_code = '''#include <TargetConditionals.h>
-diff --git a/tests/meson.build b/tests/meson.build
-index 5ff99a4a8258..8a4dad0ccae7 100644
---- a/tests/meson.build
-+++ b/tests/meson.build
-@@ -53,7 +53,7 @@ test_extra_programs = {
- 'assert-msg-test' : {},
- }
-
--if host_machine.system() != 'windows'
-+if host_system != 'windows'
- tests += {
- 'timeloop' : {},
- }
---
-2.34.1
-
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch
new file mode 100644
index 0000000000..b11c283e6d
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch
@@ -0,0 +1,34 @@
+From a8eb944a10353403241608a084787f6efcbb2eb7 Mon Sep 17 00:00:00 2001
+From: Jordan Williams <jordan@jwillikers.com>
+Date: Fri, 1 Dec 2023 09:53:50 -0600
+Subject: [PATCH] Switch from the deprecated distutils module to the packaging
+ module
+
+The distutils module was removed in Python 3.12.
+
+Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/glib/-/commit/6ef967a0f930ce37a8c9b5aff969693b34714291]
+
+Signed-off-by: Martin Jansa <martin.jansa@gmail.com>
+---
+ gio/gdbus-2.0/codegen/utils.py | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/gio/gdbus-2.0/codegen/utils.py b/gio/gdbus-2.0/codegen/utils.py
+index 0204610..08f1ba9 100644
+--- a/gio/gdbus-2.0/codegen/utils.py
++++ b/gio/gdbus-2.0/codegen/utils.py
+@@ -19,7 +19,7 @@
+ #
+ # Author: David Zeuthen <davidz@redhat.com>
+
+-import distutils.version
++import packaging.version
+ import os
+ import sys
+
+@@ -166,4 +166,4 @@ def version_cmp_key(key):
+ v = str(key[0])
+ else:
+ v = "0"
+- return (distutils.version.LooseVersion(v), key[1])
++ return (packaging.version.Version(v), key[1])
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch
index d33fdd4d8b..aee2986033 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch
@@ -1,4 +1,4 @@
-From 92de6c7eb30b961b24a2dce812d5276487b7d23d Mon Sep 17 00:00:00 2001
+From 878e51f82100c698236fda0e069e14ea9249350c Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 8 Jan 2020 18:22:46 +0100
Subject: [PATCH] gio/tests/resources.c: comment out a build host-only test
@@ -8,16 +8,15 @@ not cross-compatible (hardcodes ld and objcopy).
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
gio/tests/resources.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/gio/tests/resources.c b/gio/tests/resources.c
-index c44d214..e289a01 100644
+index f567914..b21b616 100644
--- a/gio/tests/resources.c
+++ b/gio/tests/resources.c
-@@ -993,7 +993,7 @@ main (int argc,
+@@ -1068,7 +1068,7 @@ main (int argc,
g_test_add_func ("/resource/automatic", test_resource_automatic);
/* This only uses automatic resources too, so it tests the constructors and destructors */
g_test_add_func ("/resource/module", test_resource_module);
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch
index 44482dd2b7..0b10269114 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch
@@ -1,4 +1,4 @@
-From 4b97f457b7b44117e27d2a218c4b68e7fe3fe4ce Mon Sep 17 00:00:00 2001
+From b4b523160ef550a53705fcc45ac6e10d086ce491 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sat, 12 Oct 2019 17:46:26 -0700
Subject: [PATCH] meson: Run atomics test on clang as well
@@ -9,16 +9,15 @@ Fixes
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---
meson.build | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meson.build b/meson.build
-index afb6eaa..6aa70f5 100644
+index 6ee775e..8bc5fa7 100644
--- a/meson.build
+++ b/meson.build
-@@ -1692,7 +1692,7 @@ atomicdefine = '''
+@@ -1938,7 +1938,7 @@ atomicdefine = '''
# We know that we can always use real ("lock free") atomic operations with MSVC
if cc.get_id() == 'msvc' or cc.get_id() == 'clang-cl' or cc.links(atomictest, name : 'atomic ops')
have_atomic_lock_free = true
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch
new file mode 100644
index 0000000000..14dcb278e0
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch
@@ -0,0 +1,29 @@
+From 294f3e6e9a0a9f4733e85ed6810d1b743055370b Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Sat, 16 Sep 2023 22:28:27 +0200
+Subject: [PATCH] meson.build: do not enable pidfd features on native glib
+ builds
+
+We still use host distros like alma 8 with kernels older than 5.4,
+where these features are not implemented.
+
+Upstream-Status: Inappropriate [oe-core specific]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ meson.build | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/meson.build b/meson.build
+index 8bc5fa7..df1fa60 100644
+--- a/meson.build
++++ b/meson.build
+@@ -981,7 +981,8 @@ if cc.links('''#include <sys/syscall.h>
+ waitid (P_PIDFD, 0, &child_info, WEXITED | WNOHANG);
+ return 0;
+ }''', name : 'pidfd_open(2) system call')
+- glib_conf.set('HAVE_PIDFD', 1)
++ #requires kernel 5.4+
++ #glib_conf.set('HAVE_PIDFD', 1)
+ endif
+
+ # Check for __uint128_t (gcc) by checking for 128-bit division
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch b/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch
index 1c645f3a9a..6dff5179c7 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch
@@ -1,11 +1,10 @@
-From 79ce7e545dd3a93f77d2146d50b6fa061fbceed9 Mon Sep 17 00:00:00 2001
+From 50636758c73e5e61212a8f801c6c602b8aab5ba7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 3 Oct 2017 10:45:55 +0300
Subject: [PATCH] Do not hardcode python path into various tools
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
gio/gdbus-2.0/codegen/gdbus-codegen.in | 2 +-
gobject/glib-genmarshal.in | 2 +-
@@ -23,7 +22,7 @@ index 67d3675..4e92a7a 100755
# GDBus - GLib D-Bus Library
#
diff --git a/gobject/glib-genmarshal.in b/gobject/glib-genmarshal.in
-index 7380f24..c8abeaa 100755
+index aa5af43..56e8e2e 100755
--- a/gobject/glib-genmarshal.in
+++ b/gobject/glib-genmarshal.in
@@ -1,4 +1,4 @@
@@ -33,7 +32,7 @@ index 7380f24..c8abeaa 100755
# pylint: disable=too-many-lines, missing-docstring, invalid-name
diff --git a/gobject/glib-mkenums.in b/gobject/glib-mkenums.in
-index 91ad779..3ebef62 100755
+index 353e53a..8ed6c39 100755
--- a/gobject/glib-mkenums.in
+++ b/gobject/glib-mkenums.in
@@ -1,4 +1,4 @@
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/Enable-more-tests-while-cross-compiling.patch b/meta/recipes-core/glib-2.0/glib-2.0/Enable-more-tests-while-cross-compiling.patch
deleted file mode 100644
index f5c161fe04..0000000000
--- a/meta/recipes-core/glib-2.0/glib-2.0/Enable-more-tests-while-cross-compiling.patch
+++ /dev/null
@@ -1,123 +0,0 @@
-From 1f3c05529c0c9032ae0a289fb1f088b7541fc9b0 Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Mon, 9 Nov 2015 11:07:27 +0200
-Subject: [PATCH] Enable more tests while cross-compiling
-
-Upstream disables a few tests while cross-compiling because their build requires
-running other built binaries. This usually makes sense but in the cross-compile
-case we can depend on glib-2.0-native.
-
-Upstream-Status: Inappropriate [OE specific]
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
----
- gio/tests/meson.build | 24 ++++++++++++------------
- 1 file changed, 12 insertions(+), 12 deletions(-)
-
-diff --git a/gio/tests/meson.build b/gio/tests/meson.build
-index 3ed23a5..5df932a 100644
---- a/gio/tests/meson.build
-+++ b/gio/tests/meson.build
-@@ -253,7 +253,7 @@ if host_machine.system() != 'windows'
- }
- endif
-
-- if have_dbus_daemon
-+ if true
- annotate_args = [
- '--annotate', 'org.project.Bar', 'Key1', 'Value1',
- '--annotate', 'org.project.Bar', 'org.gtk.GDBus.Internal', 'Value2',
-@@ -603,14 +603,14 @@ if installed_tests_enabled
- endforeach
- endif
-
--if not meson.is_cross_build() or meson.has_exe_wrapper()
-+if meson.is_cross_build()
-
- compiler_type = '--compiler=@0@'.format(cc.get_id())
-
- plugin_resources_c = custom_target('plugin-resources.c',
- input : 'test4.gresource.xml',
- output : 'plugin-resources.c',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -636,7 +636,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- test_gresource = custom_target('test.gresource',
- input : 'test.gresource.xml',
- output : 'test.gresource',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -649,7 +649,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- test_resources2_c = custom_target('test_resources2.c',
- input : 'test3.gresource.xml',
- output : 'test_resources2.c',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -662,7 +662,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- test_resources2_h = custom_target('test_resources2.h',
- input : 'test3.gresource.xml',
- output : 'test_resources2.h',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -676,7 +676,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- input : 'test2.gresource.xml',
- depends : big_test_resource,
- output : 'test_resources.c',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -689,7 +689,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- digit_test_resources_c = custom_target('digit_test_resources.c',
- input : '111_digit_test.gresource.xml',
- output : 'digit_test_resources.c',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -702,7 +702,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- digit_test_resources_h = custom_target('digit_test_resources.h',
- input : '111_digit_test.gresource.xml',
- output : 'digit_test_resources.h',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -744,11 +744,11 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
-
- ld = find_program('ld', required : false)
-
-- if build_machine.system() == 'linux' and objcopy.found() and objcopy_supports_add_symbol and ld.found()
-+ if not meson.is_cross_build()
- test_gresource_binary = custom_target('test5.gresource',
- input : 'test5.gresource.xml',
- output : 'test5.gresource',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
-@@ -762,7 +762,7 @@ if not meson.is_cross_build() or meson.has_exe_wrapper()
- test_resources_binary_c = custom_target('test_resources_binary.c',
- input : 'test5.gresource.xml',
- output : 'test_resources_binary.c',
-- command : [glib_compile_resources,
-+ command : ['glib-compile-resources',
- compiler_type,
- '--target=@OUTPUT@',
- '--sourcedir=' + meson.current_source_dir(),
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch b/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch
new file mode 100644
index 0000000000..bdfbd55899
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch
@@ -0,0 +1,54 @@
+From cce3ae98a2c1966719daabff5a4ec6cf94a846f6 Mon Sep 17 00:00:00 2001
+From: Philip Withnall <pwithnall@gnome.org>
+Date: Mon, 26 Feb 2024 16:55:44 +0000
+Subject: [PATCH] tests: Remove variable-length lookbehind tests for GRegex
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+PCRE2 10.43 has now introduced support for variable-length lookbehind,
+so these tests now fail if GLib is built against PCRE2 10.43 or higher.
+
+See
+https://github.com/PCRE2Project/pcre2/blob/e8db6fa7137f4c6f66cb87e0a3c9467252ec1ef7/ChangeLog#L94.
+
+Rather than making the tests conditional on the version of PCRE2 in use,
+just remove them. They are mostly testing the PCRE2 code rather than
+any code in GLib, so don’t have much value.
+
+This should fix CI runs on msys2-mingw32, which updated to PCRE2 10.43 2
+days ago.
+
+Signed-off-by: Philip Withnall <pwithnall@gnome.org>
+
+Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/glib/-/commit/cce3ae98a2c1966719daabff5a4ec6cf94a846f6]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ glib/tests/regex.c | 10 ----------
+ 1 file changed, 10 deletions(-)
+
+diff --git a/glib/tests/regex.c b/glib/tests/regex.c
+index 1082526292..d7a698ec67 100644
+--- a/glib/tests/regex.c
++++ b/glib/tests/regex.c
+@@ -1885,16 +1885,6 @@ test_lookbehind (void)
+ g_match_info_free (match);
+ g_regex_unref (regex);
+
+- regex = g_regex_new ("(?<!dogs?|cats?) x", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error);
+- g_assert (regex == NULL);
+- g_assert_error (error, G_REGEX_ERROR, G_REGEX_ERROR_VARIABLE_LENGTH_LOOKBEHIND);
+- g_clear_error (&error);
+-
+- regex = g_regex_new ("(?<=ab(c|de)) foo", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error);
+- g_assert (regex == NULL);
+- g_assert_error (error, G_REGEX_ERROR, G_REGEX_ERROR_VARIABLE_LENGTH_LOOKBEHIND);
+- g_clear_error (&error);
+-
+ regex = g_regex_new ("(?<=abc|abde)foo", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error);
+ g_assert (regex);
+ g_assert_no_error (error);
+--
+GitLab
+
+
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch b/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch
new file mode 100644
index 0000000000..4f38509da6
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch
@@ -0,0 +1,361 @@
+From ce840b6b111e1e109e511f6833d6aa419e2b723a Mon Sep 17 00:00:00 2001
+From: Philip Withnall <philip@tecnocode.co.uk>
+Date: Tue, 23 Jan 2024 11:16:52 +0000
+Subject: [PATCH] Merge branch '2887-memory-monitor-tests' into 'main'
+
+tests: Fix race condition in memory-monitor-dbus.test
+
+Closes #2887
+
+See merge request GNOME/glib!3844
+
+Hopefully these commits fix the occasional failures we've been seeing:
+https://bugzilla.yoctoproject.org/show_bug.cgi?id=15362
+
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ gio/tests/memory-monitor-dbus.py.in | 64 +++++++++++++-------
+ gio/tests/memory-monitor-portal.py.in | 54 ++++++++++-------
+ gio/tests/power-profile-monitor-dbus.py.in | 35 ++++++-----
+ gio/tests/power-profile-monitor-portal.py.in | 34 ++++++-----
+ 4 files changed, 113 insertions(+), 74 deletions(-)
+
+diff --git a/gio/tests/memory-monitor-dbus.py.in b/gio/tests/memory-monitor-dbus.py.in
+index bf32918..7aae01e 100755
+--- a/gio/tests/memory-monitor-dbus.py.in
++++ b/gio/tests/memory-monitor-dbus.py.in
+@@ -16,7 +16,6 @@ import sys
+ import subprocess
+ import fcntl
+ import os
+-import time
+
+ import taptestrunner
+
+@@ -57,53 +56,74 @@ try:
+ fcntl.fcntl(self.p_mock.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+ self.last_warning = -1
+ self.dbusmock = dbus.Interface(self.obj_lmm, dbusmock.MOCK_IFACE)
++
++ try:
++ self.wait_for_bus_object('org.freedesktop.LowMemoryMonitor',
++ '/org/freedesktop/LowMemoryMonitor',
++ system_bus=True)
++ except:
++ raise
++
+ self.memory_monitor = Gio.MemoryMonitor.dup_default()
++ assert("GMemoryMonitorDBus" in str(self.memory_monitor))
+ self.memory_monitor.connect("low-memory-warning", self.memory_warning_cb)
+ self.mainloop = GLib.MainLoop()
+ self.main_context = self.mainloop.get_context()
+
++ # The LowMemoryMonitor API is stateless: it doesn’t expose any
++ # properties, just a warning signal. Emit the signal in a loop until
++ # the GMemoryMonitor instance has initialised and synchronised to
++ # the right state.
++ def emit_warning(level):
++ self.dbusmock.EmitWarning(level)
++ return GLib.SOURCE_CONTINUE
++
++ idle_id = GLib.idle_add(emit_warning, 0)
++ while self.last_warning != 0:
++ self.main_context.iteration(True)
++ GLib.source_remove(idle_id)
++
+ def tearDown(self):
+ self.p_mock.terminate()
+ self.p_mock.wait()
+
+- def assertEventually(self, condition, message=None, timeout=50):
++ def assertEventually(self, condition, message=None, timeout=5):
+ '''Assert that condition function eventually returns True.
+
+- Timeout is in deciseconds, defaulting to 50 (5 seconds). message is
++ Timeout is in seconds, defaulting to 5 seconds. message is
+ printed on failure.
+ '''
+- while timeout >= 0:
+- context = GLib.MainContext.default()
+- while context.iteration(False):
+- pass
+- if condition():
+- break
+- timeout -= 1
+- time.sleep(0.1)
+- else:
+- self.fail(message or 'timed out waiting for ' + str(condition))
++ if not message:
++ message = 'timed out waiting for ' + str(condition)
++
++ def timed_out_cb(message):
++ self.fail(message)
++ return GLib.SOURCE_REMOVE
++
++ timeout_source = GLib.timeout_source_new_seconds(timeout)
++ timeout_source.set_callback(timed_out_cb, message)
++ timeout_source.attach(self.main_context)
++
++ while not condition():
++ self.main_context.iteration(True)
++
++ timeout_source.destroy()
+
+ def memory_warning_cb(self, monitor, level):
++ print("Received memory warning signal, level", level)
+ self.last_warning = level
+ self.main_context.wakeup()
+
+ def test_low_memory_warning_signal(self):
+ '''LowMemoryWarning signal'''
+
+- # Wait 2 seconds
+- timeout = 2
+- while timeout > 0:
+- time.sleep(0.5)
+- timeout -= 0.5
+- self.main_context.iteration(False)
+-
+ self.dbusmock.EmitWarning(100)
+ # Wait 2 seconds or until warning
+- self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 20)
++ self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 2)
+
+ self.dbusmock.EmitWarning(255)
+ # Wait 2 seconds or until warning
+- self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 20)
++ self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 2)
+
+ except ImportError as e:
+ @unittest.skip("Cannot import %s" % e.name)
+diff --git a/gio/tests/memory-monitor-portal.py.in b/gio/tests/memory-monitor-portal.py.in
+index 748cee8..f570508 100755
+--- a/gio/tests/memory-monitor-portal.py.in
++++ b/gio/tests/memory-monitor-portal.py.in
+@@ -16,7 +16,6 @@ import sys
+ import subprocess
+ import fcntl
+ import os
+-import time
+
+ import taptestrunner
+
+@@ -80,26 +79,44 @@ try:
+ self.mainloop = GLib.MainLoop()
+ self.main_context = self.mainloop.get_context()
+
++ # The LowMemoryMonitor API is stateless: it doesn’t expose any
++ # properties, just a warning signal. Emit the signal in a loop until
++ # the GMemoryMonitor instance has initialised and synchronised to
++ # the right state.
++ def emit_warning(level):
++ self.dbusmock.EmitWarning(level)
++ return GLib.SOURCE_CONTINUE
++
++ idle_id = GLib.idle_add(self.emit_warning, 0)
++ while self.last_warning != 0:
++ self.main_context.iteration(True)
++ GLib.source_remove(idle_id)
++
+ def tearDown(self):
+ self.p_mock.terminate()
+ self.p_mock.wait()
+
+- def assertEventually(self, condition, message=None, timeout=50):
++ def assertEventually(self, condition, message=None, timeout=5):
+ '''Assert that condition function eventually returns True.
+
+- Timeout is in deciseconds, defaulting to 50 (5 seconds). message is
++ Timeout is in seconds, defaulting to 5 seconds. message is
+ printed on failure.
+ '''
+- while timeout >= 0:
+- context = GLib.MainContext.default()
+- while context.iteration(False):
+- pass
+- if condition():
+- break
+- timeout -= 1
+- time.sleep(0.1)
+- else:
+- self.fail(message or 'timed out waiting for ' + str(condition))
++ if not message:
++ message = 'timed out waiting for ' + str(condition)
++
++ def timed_out_cb(message):
++ self.fail(message)
++ return GLib.SOURCE_REMOVE
++
++ timeout_source = GLib.timeout_source_new_seconds(timeout)
++ timeout_source.set_callback(timed_out_cb, message)
++ timeout_source.attach(self.main_context)
++
++ while not condition():
++ self.main_context.iteration(True)
++
++ timeout_source.destroy()
+
+ def portal_memory_warning_cb(self, monitor, level):
+ self.last_warning = level
+@@ -108,20 +125,13 @@ try:
+ def test_low_memory_warning_portal_signal(self):
+ '''LowMemoryWarning signal'''
+
+- # Wait 2 seconds
+- timeout = 2
+- while timeout > 0:
+- time.sleep(0.5)
+- timeout -= 0.5
+- self.main_context.iteration(False)
+-
+ self.dbusmock.EmitWarning(100)
+ # Wait 2 seconds or until warning
+- self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 20)
++ self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 2)
+
+ self.dbusmock.EmitWarning(255)
+ # Wait 2 seconds or until warning
+- self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 20)
++ self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 2)
+
+ except ImportError as e:
+ @unittest.skip("Cannot import %s" % e.name)
+diff --git a/gio/tests/power-profile-monitor-dbus.py.in b/gio/tests/power-profile-monitor-dbus.py.in
+index 06e594f..f955afc 100755
+--- a/gio/tests/power-profile-monitor-dbus.py.in
++++ b/gio/tests/power-profile-monitor-dbus.py.in
+@@ -16,7 +16,6 @@ import sys
+ import subprocess
+ import fcntl
+ import os
+-import time
+
+ import taptestrunner
+
+@@ -58,6 +57,7 @@ try:
+ self.power_saver_enabled = False
+ self.dbus_props = dbus.Interface(self.obj_ppd, dbus.PROPERTIES_IFACE)
+ self.power_profile_monitor = Gio.PowerProfileMonitor.dup_default()
++ assert("GPowerProfileMonitorDBus" in str(self.power_profile_monitor))
+ self.power_profile_monitor.connect("notify::power-saver-enabled", self.power_saver_enabled_cb)
+ self.mainloop = GLib.MainLoop()
+ self.main_context = self.mainloop.get_context()
+@@ -66,22 +66,27 @@ try:
+ self.p_mock.terminate()
+ self.p_mock.wait()
+
+- def assertEventually(self, condition, message=None, timeout=50):
++ def assertEventually(self, condition, message=None, timeout=5):
+ '''Assert that condition function eventually returns True.
+
+- Timeout is in deciseconds, defaulting to 50 (5 seconds). message is
++ Timeout is in seconds, defaulting to 5 seconds. message is
+ printed on failure.
+ '''
+- while timeout >= 0:
+- context = GLib.MainContext.default()
+- while context.iteration(False):
+- pass
+- if condition():
+- break
+- timeout -= 1
+- time.sleep(0.1)
+- else:
+- self.fail(message or 'timed out waiting for ' + str(condition))
++ if not message:
++ message = 'timed out waiting for ' + str(condition)
++
++ def timed_out_cb(message):
++ self.fail(message)
++ return GLib.SOURCE_REMOVE
++
++ timeout_source = GLib.timeout_source_new_seconds(timeout)
++ timeout_source.set_callback(timed_out_cb, message)
++ timeout_source.attach(self.main_context)
++
++ while not condition():
++ self.main_context.iteration(True)
++
++ timeout_source.destroy()
+
+ def power_saver_enabled_cb(self, spec, data):
+ self.power_saver_enabled = self.power_profile_monitor.get_power_saver_enabled()
+@@ -92,10 +97,10 @@ try:
+
+ self.assertEqual(self.power_profile_monitor.get_power_saver_enabled(), False)
+ self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('power-saver', variant_level=1))
+- self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 10)
++ self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 1)
+
+ self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('balanced', variant_level=1))
+- self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 10)
++ self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 1)
+
+ except ImportError as e:
+ @unittest.skip("Cannot import %s" % e.name)
+diff --git a/gio/tests/power-profile-monitor-portal.py.in b/gio/tests/power-profile-monitor-portal.py.in
+index 09e9a45..ad2abf6 100755
+--- a/gio/tests/power-profile-monitor-portal.py.in
++++ b/gio/tests/power-profile-monitor-portal.py.in
+@@ -16,7 +16,6 @@ import sys
+ import subprocess
+ import fcntl
+ import os
+-import time
+
+ import taptestrunner
+
+@@ -90,22 +89,27 @@ try:
+ self.p_mock.terminate()
+ self.p_mock.wait()
+
+- def assertEventually(self, condition, message=None, timeout=50):
++ def assertEventually(self, condition, message=None, timeout=5):
+ '''Assert that condition function eventually returns True.
+
+- Timeout is in deciseconds, defaulting to 50 (5 seconds). message is
++ Timeout is in seconds, defaulting to 5 seconds. message is
+ printed on failure.
+ '''
+- while timeout >= 0:
+- context = GLib.MainContext.default()
+- while context.iteration(False):
+- pass
+- if condition():
+- break
+- timeout -= 1
+- time.sleep(0.1)
+- else:
+- self.fail(message or 'timed out waiting for ' + str(condition))
++ if not message:
++ message = 'timed out waiting for ' + str(condition)
++
++ def timed_out_cb(message):
++ self.fail(message)
++ return GLib.SOURCE_REMOVE
++
++ timeout_source = GLib.timeout_source_new_seconds(timeout)
++ timeout_source.set_callback(timed_out_cb, message)
++ timeout_source.attach(self.main_context)
++
++ while not condition():
++ self.main_context.iteration(True)
++
++ timeout_source.destroy()
+
+ def power_saver_enabled_cb(self, spec, data):
+ self.power_saver_enabled = self.power_profile_monitor.get_power_saver_enabled()
+@@ -116,10 +120,10 @@ try:
+
+ self.assertEqual(self.power_profile_monitor.get_power_saver_enabled(), False)
+ self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('power-saver', variant_level=1))
+- self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 10)
++ self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 1)
+
+ self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('balanced', variant_level=1))
+- self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 10)
++ self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 1)
+
+ def test_power_profile_power_saver_enabled_portal_default(self):
+ '''power-saver-enabled property default value'''
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc b/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc
index c4648f58c7..3049e5116e 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc
+++ b/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc
@@ -3,3 +3,4 @@ have_c99_vsnprintf = true
have_c99_snprintf = true
have_unix98_printf = true
va_val_copy = true
+have_strlcpy = true
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch b/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch
index 816b790ce7..3e79bbf679 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch
+++ b/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch
@@ -1,4 +1,4 @@
-From d52b1b530c5d8a1e70ae45d6e2139e9d3f25207f Mon Sep 17 00:00:00 2001
+From f40e89b3852df37959606ee13b1a14ade81fa886 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Fri, 11 Mar 2016 15:35:55 +0000
Subject: [PATCH] glib-2.0: relocate the GIO module directory for native builds
@@ -13,38 +13,32 @@ Signed-off-by: Ross Burton <ross.burton@intel.com>
Port patch to 2.48
Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
---
- gio/giomodule.c | 12 +++++++++++-
- 1 file changed, 11 insertions(+), 1 deletion(-)
+ gio/giomodule.c | 7 -------
+ 1 file changed, 7 deletions(-)
diff --git a/gio/giomodule.c b/gio/giomodule.c
-index 2a043cc..e2d2310 100644
+index 17fabe6..8021208 100644
--- a/gio/giomodule.c
+++ b/gio/giomodule.c
-@@ -56,6 +56,8 @@
- #ifdef G_OS_WIN32
- #include "gregistrysettingsbackend.h"
- #include "giowin32-priv.h"
-+#else
-+#include <dlfcn.h>
- #endif
- #include <glib/gstdio.h>
-
-@@ -1267,7 +1269,15 @@ get_gio_module_dir (void)
- NULL);
+@@ -1271,11 +1271,6 @@ get_gio_module_dir (void)
g_free (install_dir);
#else
-- module_dir = g_strdup (GIO_MODULE_DIR);
-+ Dl_info info;
-+
-+ if (dladdr (g_io_module_new, &info)) {
-+ char *libdir = g_path_get_dirname (info.dli_fname);
-+ module_dir = g_build_filename (libdir, "gio", "modules", NULL);
-+ g_free (libdir);
-+ } else {
-+ module_dir = g_strdup (GIO_MODULE_DIR);
-+ }
+ module_dir = g_strdup (GIO_MODULE_DIR);
+-#ifdef __APPLE__
+-#include "TargetConditionals.h"
+-/* Only auto-relocate on macOS, not watchOS etc; older macOS SDKs only define TARGET_OS_MAC */
+-#if (defined (TARGET_OS_OSX) && TARGET_OS_OSX) || \
+- (!defined (TARGET_OS_OSX) && defined (TARGET_OS_MAC) && TARGET_OS_MAC)
+ #include <dlfcn.h>
+ {
+ g_autofree gchar *path = NULL;
+@@ -1294,8 +1289,6 @@ get_gio_module_dir (void)
+ }
+ }
+ }
+-#endif
+-#endif
#endif
}
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/run-ptest b/meta/recipes-core/glib-2.0/glib-2.0/run-ptest
index 7a231b514b..831bc3b91f 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0/run-ptest
+++ b/meta/recipes-core/glib-2.0/glib-2.0/run-ptest
@@ -5,5 +5,6 @@ if id -u glib2-test; then
userdel glib2-test
fi
useradd glib2-test
-su glib2-test -c 'gnome-desktop-testing-runner glib'
+cd /tmp
+su glib2-test -c 'G_TEST_TMPDIR=`readlink -f /tmp` gnome-desktop-testing-runner glib'
userdel glib2-test
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch b/meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch
new file mode 100644
index 0000000000..cd5ac287c3
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch
@@ -0,0 +1,32 @@
+From bb11d1a4ae77d93ec0743e54077cf0f990243fa6 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Thu, 28 Mar 2024 16:27:09 +0000
+Subject: [PATCH] Skip /timeout/rounding test
+
+This test is sensitive to load because it expects certain timeout operations
+to succeed in specific time periods. Whilst these timeouts are fairly large,
+they're still exceeded inside a qemu on a loaded system.
+
+https://bugzilla.yoctoproject.org/show_bug.cgi?id=14464
+
+Upstream-Status: Inappropriate [OE-specific]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ glib/tests/timeout.c | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/glib/tests/timeout.c b/glib/tests/timeout.c
+index 1ae3f3a34..85a715b0f 100644
+--- a/glib/tests/timeout.c
++++ b/glib/tests/timeout.c
+@@ -214,7 +214,6 @@ main (int argc, char *argv[])
+ g_test_add_func ("/timeout/seconds-once", test_seconds_once);
+ g_test_add_func ("/timeout/weeks-overflow", test_weeks_overflow);
+ g_test_add_func ("/timeout/far-future-ready-time", test_far_future_ready_time);
+- g_test_add_func ("/timeout/rounding", test_rounding);
+
+ return g_test_run ();
+ }
+--
+2.34.1
+
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.72.1.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.72.1.bb
deleted file mode 100644
index c9ccedd81f..0000000000
--- a/meta/recipes-core/glib-2.0/glib-2.0_2.72.1.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-require glib.inc
-
-PE = "1"
-
-SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-
-SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \
- file://run-ptest \
- file://0001-Fix-DATADIRNAME-on-uclibc-Linux.patch \
- file://Enable-more-tests-while-cross-compiling.patch \
- file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \
- file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \
- file://0001-Do-not-ignore-return-value-of-write.patch \
- file://0010-Do-not-hardcode-python-path-into-various-tools.patch \
- file://0001-Set-host_machine-correctly-when-building-with-mingw3.patch \
- file://0001-Do-not-write-bindir-into-pkg-config-files.patch \
- file://0001-meson-Run-atomics-test-on-clang-as-well.patch \
- file://0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch \
- "
-SRC_URI:append:class-native = " file://relocate-modules.patch"
-
-SRC_URI[sha256sum] = "c07e57147b254cef92ce80a0378dc0c02a4358e7de4702e9f403069781095fe2"
-
-# Find any meson cross files in FILESPATH that are relevant for the current
-# build (using siteinfo) and add them to EXTRA_OEMESON.
-inherit siteinfo
-def find_meson_cross_files(d):
- if bb.data.inherits_class('native', d):
- return ""
-
- thisdir = os.path.normpath(d.getVar("THISDIR"))
- import collections
- sitedata = siteinfo_data(d)
- # filename -> found
- files = collections.OrderedDict()
- for path in d.getVar("FILESPATH").split(":"):
- for element in sitedata:
- filename = os.path.normpath(os.path.join(path, "meson.cross.d", element))
- sanitized_path = filename.replace(thisdir, "${THISDIR}")
- if sanitized_path == filename:
- if os.path.exists(filename):
- bb.error("Cannot add '%s' to --cross-file, because it's not relative to THISDIR '%s' and sstate signature would contain this full path" % (filename, thisdir))
- continue
- files[filename.replace(thisdir, "${THISDIR}")] = os.path.exists(filename)
-
- items = ["--cross-file=" + k for k,v in files.items() if v]
- d.appendVar("EXTRA_OEMESON", " " + " ".join(items))
- items = ["%s:%s" % (k, "True" if v else "False") for k,v in files.items()]
- d.appendVarFlag("do_configure", "file-checksums", " " + " ".join(items))
-
-python () {
- find_meson_cross_files(d)
-}
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb
new file mode 100644
index 0000000000..b1669ead75
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb
@@ -0,0 +1,57 @@
+require glib.inc
+
+PE = "1"
+
+SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+
+SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \
+ file://run-ptest \
+ file://0001-Fix-DATADIRNAME-on-uclibc-Linux.patch \
+ file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \
+ file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \
+ file://0010-Do-not-hardcode-python-path-into-various-tools.patch \
+ file://0001-Set-host_machine-correctly-when-building-with-mingw3.patch \
+ file://0001-Do-not-write-bindir-into-pkg-config-files.patch \
+ file://0001-meson-Run-atomics-test-on-clang-as-well.patch \
+ file://0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch \
+ file://0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch \
+ file://memory-monitor.patch \
+ file://fix-regex.patch \
+ file://skip-timeout.patch \
+ "
+SRC_URI:append:class-native = " file://relocate-modules.patch \
+ file://0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch \
+ "
+
+SRC_URI[sha256sum] = "24b8e0672dca120cc32d394bccb85844e732e04fe75d18bb0573b2dbc7548f63"
+
+# Find any meson cross files in FILESPATH that are relevant for the current
+# build (using siteinfo) and add them to EXTRA_OEMESON.
+inherit siteinfo
+def find_meson_cross_files(d):
+ if bb.data.inherits_class('native', d):
+ return ""
+
+ thisdir = os.path.normpath(d.getVar("THISDIR"))
+ import collections
+ sitedata = siteinfo_data(d)
+ # filename -> found
+ files = collections.OrderedDict()
+ for path in d.getVar("FILESPATH").split(":"):
+ for element in sitedata:
+ filename = os.path.normpath(os.path.join(path, "meson.cross.d", element))
+ sanitized_path = filename.replace(thisdir, "${THISDIR}")
+ if sanitized_path == filename:
+ if os.path.exists(filename):
+ bb.error("Cannot add '%s' to --cross-file, because it's not relative to THISDIR '%s' and sstate signature would contain this full path" % (filename, thisdir))
+ continue
+ files[filename.replace(thisdir, "${THISDIR}")] = os.path.exists(filename)
+
+ items = ["--cross-file=" + k for k,v in files.items() if v]
+ d.appendVar("EXTRA_OEMESON", " " + " ".join(items))
+ items = ["%s:%s" % (k, "True" if v else "False") for k,v in files.items()]
+ d.appendVarFlag("do_configure", "file-checksums", " " + " ".join(items))
+
+python () {
+ find_meson_cross_files(d)
+}
diff --git a/meta/recipes-core/glib-2.0/glib.inc b/meta/recipes-core/glib-2.0/glib.inc
index ffddefe22e..fac8875d84 100644
--- a/meta/recipes-core/glib-2.0/glib.inc
+++ b/meta/recipes-core/glib-2.0/glib.inc
@@ -5,10 +5,10 @@ HOMEPAGE = "https://developer.gnome.org/glib/"
# pcre is under BSD;
# docs/reference/COPYING is with a 'public domain'-like license!
LICENSE = "LGPL-2.1-or-later & BSD-3-Clause & PD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
- file://glib/glib.h;beginline=4;endline=17;md5=b88abb7f3ad09607e71cb9d530155906 \
- file://gmodule/COPYING;md5=4fbd65380cdd255951079008b364516c \
- file://gmodule/gmodule.h;beginline=4;endline=17;md5=b88abb7f3ad09607e71cb9d530155906 \
+LIC_FILES_CHKSUM = "file://COPYING;md5=41890f71f740302b785c27661123bff5 \
+ file://glib/glib.h;beginline=4;endline=17;md5=72f7cc2847407f65d8981ef112e4e630 \
+ file://LICENSES/LGPL-2.1-or-later.txt;md5=41890f71f740302b785c27661123bff5 \
+ file://gmodule/gmodule.h;beginline=4;endline=17;md5=72f7cc2847407f65d8981ef112e4e630 \
file://docs/reference/COPYING;md5=f51a5100c17af6bae00735cd791e1fcc"
BUGTRACKER = "http://bugzilla.gnome.org"
SECTION = "libs"
@@ -21,16 +21,14 @@ DEPENDS = "glib-2.0-native \
virtual/libintl \
virtual/libiconv \
libffi \
- libpcre \
+ libpcre2 \
zlib"
PACKAGES += "${PN}-codegen ${PN}-utils"
LEAD_SONAME = "libglib-2.0.*"
-inherit meson gettext gtk-doc pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache manpages
-
-DEPENDS:append:class-target = "${@' gtk-doc' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}"
+inherit meson gettext gtk-doc pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache manpages gobject-introspection-data
GTKDOC_MESON_OPTION = "gtk_doc"
@@ -40,12 +38,11 @@ PACKAGECONFIG ??= "libmount \
${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
PACKAGECONFIG[libmount] = "-Dlibmount=enabled,-Dlibmount=disabled,util-linux"
PACKAGECONFIG[manpages] = "-Dman=true, -Dman=false, libxslt-native xmlto-native"
-# libelf is auto-detected without a configuration option
-PACKAGECONFIG[libelf] = ",,elfutils"
+PACKAGECONFIG[libelf] = "-Dlibelf=enabled,-Dlibelf=disabled,elfutils"
PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false,"
PACKAGECONFIG[selinux] = "-Dselinux=enabled,-Dselinux=disabled,libselinux"
-EXTRA_OEMESON = "-Ddtrace=false -Dfam=false -Dsystemtap=false"
+EXTRA_OEMESON = "-Ddtrace=false -Dsystemtap=false"
do_configure:prepend() {
sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/gio/gdbus-2.0/codegen/gdbus-codegen.in
@@ -54,13 +51,10 @@ do_configure:prepend() {
FILES:${PN} = "${libdir}/lib*${SOLIBS} \
${libdir}/gio \
${libexecdir}/*gio-querymodules \
+ ${libexecdir}/*gio-launch-desktop \
+ ${datadir}/glib-2.0/dtds \
${datadir}/glib-2.0/schemas"
-FILES:${PN}-utils += "${bindir}/glib-genmarshal \
- ${bindir}/glib-gettextize \
- ${bindir}/glib-mkenums \
- ${bindir}/glib-compile-resources"
-
FILES:${PN}-dev += "${libdir}/glib-2.0/include \
${libdir}/gio/modules/lib*${SOLIBSDEV} \
${libdir}/gio/modules/*.la \
@@ -117,6 +111,11 @@ do_install:append () {
mkdir -p ${D}${libdir}/gio/modules
}
+do_install:append:class-native () {
+ # Link gio-querymodules into ${bindir} as otherwise tools like meson won't find it
+ ln -rs ${D}${libexecdir}/gio-querymodules ${D}${bindir}
+}
+
do_install:append:class-target () {
# Tests are only installed on targets, not native builds. Separating this out
# keeps glib-2.0-native from depending on DISTRO_FEATURES
@@ -130,14 +129,22 @@ do_install:append:class-target () {
mv ${D}${datadir}/installed-tests/glib/static-link.test ${D}${datadir}/installed-tests/glib/${MLPREFIX}static-link.test
fi
fi
+ # https://gitlab.gnome.org/GNOME/glib/-/issues/2810
+ rm -f ${D}${datadir}/installed-tests/glib/thread-pool-slow.test
+}
+do_install:append:class-target:libc-musl () {
+ # Remove failing tests on musl libc systems, this helps set baseline for musl testing
+ # they remain to be rootcaused and fixed but marked known failures here.
+ for t in convert.test collate.test gdatetime.test date.test converter-stream.test option-context.test; do
+ rm -rf ${D}${datadir}/installed-tests/glib/$t
+ done
}
-
# As we do not build python3 for windows, makes no sense to ship the script that's using it
do_install:append:mingw32() {
rm -f ${D}${bindir}/gtester-report
}
-CODEGEN_PYTHON_RDEPENDS = "python3 python3-distutils python3-xml"
+CODEGEN_PYTHON_RDEPENDS = "python3 python3-packaging python3-xml"
CODEGEN_PYTHON_RDEPENDS:mingw32 = ""
RDEPENDS:${PN}-codegen += "${CODEGEN_PYTHON_RDEPENDS}"
@@ -148,6 +155,7 @@ RDEPENDS:${PN}-ptest += "\
coreutils \
libgcc \
dbus \
+ desktop-file-utils \
gnome-desktop-testing \
tzdata \
tzdata-americas \
@@ -167,6 +175,12 @@ RDEPENDS:${PN}-ptest += "\
python3-modules \
${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'python3-dbusmock', '', d)} \
${PN}-codegen \
+ locale-base-de-de \
+ locale-base-es-es \
+ locale-base-en-gb \
+ locale-base-en-us \
+ locale-base-fr-fr \
+ locale-base-ru-ru \
"
RDEPENDS:${PN}-ptest:append:libc-glibc = "\
@@ -181,18 +195,12 @@ RDEPENDS:${PN}-ptest:append:libc-glibc = "\
glibc-gconv-iso8859-15 \
glibc-charmap-invariant \
glibc-localedata-translit-cjk-variants \
- locale-base-tr-tr \
locale-base-lt-lt \
locale-base-ja-jp.euc-jp \
locale-base-fa-ir \
- locale-base-ru-ru \
- locale-base-de-de \
locale-base-hr-hr \
locale-base-el-gr \
- locale-base-fr-fr \
- locale-base-es-es \
- locale-base-en-gb \
- locale-base-en-us \
locale-base-pl-pl \
locale-base-pl-pl.iso-8859-2 \
+ locale-base-tr-tr \
"
diff --git a/meta/recipes-core/glib-networking/glib-networking/eagain.patch b/meta/recipes-core/glib-networking/glib-networking/eagain.patch
new file mode 100644
index 0000000000..6c2e3c634b
--- /dev/null
+++ b/meta/recipes-core/glib-networking/glib-networking/eagain.patch
@@ -0,0 +1,36 @@
+From 5604707bed4b4a4bc4658c7158a18c1774775775 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sat, 6 May 2023 12:18:50 +0100
+Subject: [PATCH] In autobuilder testing we regularly see glib-networking ptest
+ fail with:
+
+GLib-Net:ERROR:/usr/src/debug/glib-networking/2.74.0-r0/tls/tests/connection.c:1944:simul_read_thread: assertion failed (error == NULL): Resource temporarily unavailable (g-io-error-quark, 27)
+Bail out! GLib-Net:ERROR:/usr/src/debug/glib-networking/2.74.0-r0/tls/tests/connection.c:1944:simul_read_thread: assertion failed (error == NULL): Resource temporarily unavailable (g-io-error-quark, 27)
+FAIL: glib-networking/connection-openssl.test (Child process killed by signal 6)
+SUMMARY: total=4; passed=3; skipped=0; failed=1; user=0.9s; system=0.4s; maxrss=10708
+FAIL: glib-networking/connection-openssl.test (Child process killed by signal 6)
+
+The test should probably retry in this situation so test a patch which does this.
+
+Upstream-Status: Pending [testing to see if patch resolves the issue]
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ tls/tests/connection.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/tls/tests/connection.c b/tls/tests/connection.c
+index 62a7fbb..3447c80 100644
+--- a/tls/tests/connection.c
++++ b/tls/tests/connection.c
+@@ -2210,6 +2210,10 @@ simul_read_thread (gpointer user_data)
+ test->buf + test->nread,
+ MIN (TEST_DATA_LENGTH / 2, TEST_DATA_LENGTH - test->nread),
+ NULL, &error);
++
++ if (g_error_matches (error, G_IO_ERROR, G_IO_ERROR_WOULD_BLOCK))
++ continue;
++
+ g_assert_no_error (error);
+
+ test->nread += nread;
diff --git a/meta/recipes-core/glib-networking/glib-networking_2.72.0.bb b/meta/recipes-core/glib-networking/glib-networking_2.72.0.bb
deleted file mode 100644
index d578f17aa5..0000000000
--- a/meta/recipes-core/glib-networking/glib-networking_2.72.0.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "GLib networking extensions"
-DESCRIPTION = "glib-networking contains the implementations of certain GLib networking features that cannot be implemented directly in GLib itself because of their dependencies."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/glib-networking/"
-BUGTRACKER = "http://bugzilla.gnome.org"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SECTION = "libs"
-DEPENDS = "glib-2.0"
-
-SRC_URI[archive.sha256sum] = "100aaebb369285041de52da422b6b716789d5e4d7549a3a71ba587b932e0823b"
-
-PACKAGECONFIG ??= "openssl ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
-
-PACKAGECONFIG[gnutls] = "-Dgnutls=enabled,-Dgnutls=disabled,gnutls"
-PACKAGECONFIG[openssl] = "-Dopenssl=enabled,-Dopenssl=disabled,openssl"
-PACKAGECONFIG[libproxy] = "-Dlibproxy=enabled,-Dlibproxy=disabled,libproxy"
-PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false"
-
-EXTRA_OEMESON = "-Dgnome_proxy=disabled"
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase gettext upstream-version-is-even gio-module-cache ptest-gnome
-
-SRC_URI += "file://run-ptest"
-
-FILES:${PN} += "\
- ${libdir}/gio/modules/libgio*.so \
- ${datadir}/dbus-1/services/ \
- ${systemd_user_unitdir} \
- "
-FILES:${PN}-dev += "${libdir}/gio/modules/libgio*.la"
-FILES:${PN}-staticdev += "${libdir}/gio/modules/libgio*.a"
-
-RDEPENDS:${PN}-ptest += "bash"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/glib-networking/glib-networking_2.78.1.bb b/meta/recipes-core/glib-networking/glib-networking_2.78.1.bb
new file mode 100644
index 0000000000..5060d9fd7a
--- /dev/null
+++ b/meta/recipes-core/glib-networking/glib-networking_2.78.1.bb
@@ -0,0 +1,45 @@
+SUMMARY = "GLib networking extensions"
+DESCRIPTION = "glib-networking contains the implementations of certain GLib networking features that cannot be implemented directly in GLib itself because of their dependencies."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/glib-networking/"
+BUGTRACKER = "http://bugzilla.gnome.org"
+
+LICENSE = "LGPL-2.1-or-later"
+LICENSE:append = "${@bb.utils.contains('PACKAGECONFIG', 'openssl', ' & Glib-Networking-OpenSSL-Exception', '', d)}"
+NO_GENERIC_LICENSE[Glib-Networking-OpenSSL-Exception] = "LICENSE_EXCEPTION"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
+ file://LICENSE_EXCEPTION;md5=0f5be697951b5e71aff00f4a4ce66be8 \
+ file://tls/base/gtlsconnection-base.c;beginline=7;endline=22;md5=ab641ac307f3337811008ea9afe7059f"
+
+SECTION = "libs"
+DEPENDS = "glib-2.0-native glib-2.0"
+
+SRC_URI[archive.sha256sum] = "e48f2ddbb049832cbb09230529c5e45daca9f0df0eda325f832f7379859bf09f"
+
+# Upstream note that for the openssl backend, half the tests where this backend don't return
+# the expected error code or don't work as expected so default to gnutls
+PACKAGECONFIG ??= "gnutls environment ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+
+PACKAGECONFIG[gnutls] = "-Dgnutls=enabled,-Dgnutls=disabled,gnutls"
+PACKAGECONFIG[openssl] = "-Dopenssl=enabled,-Dopenssl=disabled,openssl"
+PACKAGECONFIG[environment] = "-Denvironment_proxy=enabled,-Denvironment_proxy=disabled"
+PACKAGECONFIG[libproxy] = "-Dlibproxy=enabled,-Dlibproxy=disabled,libproxy"
+PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false"
+PACKAGECONFIG[gnomeproxy] = "-Dgnome_proxy=enabled,-Dgnome_proxy=disabled,gsettings-desktop-schemas"
+
+inherit gnomebase gettext upstream-version-is-even gio-module-cache ptest-gnome
+
+SRC_URI += "file://run-ptest"
+SRC_URI += "file://eagain.patch"
+
+FILES:${PN} += "\
+ ${libdir}/gio/modules/libgio*.so \
+ ${datadir}/dbus-1/services/ \
+ ${systemd_user_unitdir} \
+ "
+FILES:${PN}-dev += "${libdir}/gio/modules/libgio*.la"
+FILES:${PN}-staticdev += "${libdir}/gio/modules/libgio*.a"
+
+RDEPENDS:${PN}-ptest += "bash"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/glibc/cross-localedef-native_2.35.bb b/meta/recipes-core/glibc/cross-localedef-native_2.35.bb
deleted file mode 100644
index b7b54e9ccc..0000000000
--- a/meta/recipes-core/glibc/cross-localedef-native_2.35.bb
+++ /dev/null
@@ -1,54 +0,0 @@
-SUMMARY = "Cross locale generation tool for glibc"
-HOMEPAGE = "http://www.gnu.org/software/libc/libc.html"
-SECTION = "libs"
-LICENSE = "LGPL-2.1-only"
-
-LIC_FILES_CHKSUM = "file://LICENSES;md5=1541fd8f5e8f1579512bf05f533371ba \
- file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
- file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
-
-require glibc-version.inc
-
-# Tell autotools that we're working in the localedef directory
-#
-AUTOTOOLS_SCRIPT_PATH = "${S}/localedef"
-
-inherit autotools
-inherit native
-
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:"
-
-SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
- git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef;protocol=https \
- \
- file://0001-localedef-Add-hardlink-resolver-from-util-linux.patch \
- file://0002-localedef-fix-ups-hardlink-to-make-it-compile.patch \
- \
- file://0010-eglibc-Cross-building-and-testing-instructions.patch \
- file://0011-eglibc-Help-bootstrap-cross-toolchain.patch \
- file://0012-eglibc-Resolve-__fpscr_values-on-SH4.patch \
- file://0013-eglibc-Forward-port-cross-locale-generation-support.patch \
- file://0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch \
- file://0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch \
- file://0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch \
- "
-# Makes for a rather long rev (22 characters), but...
-#
-SRCREV_FORMAT = "glibc_localedef"
-
-S = "${WORKDIR}/git"
-
-EXTRA_OECONF = "--with-glibc=${S}"
-
-# We do not need bash to run tzselect script, the default is to use
-# bash but it can be configured by setting KSHELL Makefile variable
-EXTRA_OEMAKE += "KSHELL=/bin/sh"
-
-CFLAGS += "-fgnu89-inline -std=gnu99 -DIS_IN\(x\)='0'"
-
-do_install() {
- install -d ${D}${bindir}
- install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef
- install -m 0755 ${B}/cross-localedef-hardlink ${D}${bindir}/cross-localedef-hardlink
-}
diff --git a/meta/recipes-core/glibc/cross-localedef-native_2.39.bb b/meta/recipes-core/glibc/cross-localedef-native_2.39.bb
new file mode 100644
index 0000000000..fed6e4ea97
--- /dev/null
+++ b/meta/recipes-core/glibc/cross-localedef-native_2.39.bb
@@ -0,0 +1,54 @@
+SUMMARY = "Cross locale generation tool for glibc"
+HOMEPAGE = "http://www.gnu.org/software/libc/libc.html"
+SECTION = "libs"
+LICENSE = "LGPL-2.1-only"
+
+LIC_FILES_CHKSUM = "file://LICENSES;md5=f77e878d320e99e94ae9a4aea7f491d1 \
+ file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
+ file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+require glibc-version.inc
+
+# Tell autotools that we're working in the localedef directory
+#
+AUTOTOOLS_SCRIPT_PATH = "${S}/localedef"
+
+inherit autotools
+inherit native
+
+FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:"
+
+SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
+ git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef;protocol=https \
+ \
+ file://0001-localedef-Add-hardlink-resolver-from-util-linux.patch \
+ file://0002-localedef-fix-ups-hardlink-to-make-it-compile.patch \
+ \
+ file://0010-eglibc-Cross-building-and-testing-instructions.patch \
+ file://0011-eglibc-Help-bootstrap-cross-toolchain.patch \
+ file://0012-eglibc-Resolve-__fpscr_values-on-SH4.patch \
+ file://0013-eglibc-Forward-port-cross-locale-generation-support.patch \
+ file://0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch \
+ file://0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch \
+ file://0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch \
+ "
+# Makes for a rather long rev (22 characters), but...
+#
+SRCREV_FORMAT = "glibc_localedef"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OECONF = "--with-glibc=${S}"
+
+# We do not need bash to run tzselect script, the default is to use
+# bash but it can be configured by setting KSHELL Makefile variable
+EXTRA_OEMAKE += "KSHELL=/bin/sh"
+
+CFLAGS += "-fgnu89-inline -std=gnu99 -DIS_IN\(x\)='0'"
+
+do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef
+ install -m 0755 ${B}/cross-localedef-hardlink ${D}${bindir}/cross-localedef-hardlink
+}
diff --git a/meta/recipes-core/glibc/glibc-common.inc b/meta/recipes-core/glibc/glibc-common.inc
index 90a6a534f3..b9516e77f0 100644
--- a/meta/recipes-core/glibc/glibc-common.inc
+++ b/meta/recipes-core/glibc/glibc-common.inc
@@ -4,7 +4,7 @@ HOMEPAGE = "http://www.gnu.org/software/libc/libc.html"
SECTION = "libs"
LICENSE = "GPL-2.0-only & LGPL-2.1-only"
-LIC_FILES_CHKSUM ?= "file://LICENSES;md5=1541fd8f5e8f1579512bf05f533371ba \
+LIC_FILES_CHKSUM ?= "file://LICENSES;md5=f77e878d320e99e94ae9a4aea7f491d1 \
file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
@@ -22,4 +22,4 @@ ARM_INSTRUCTION_SET:armv6 = "arm"
#
COMPATIBLE_HOST:libc-musl:class-target = "null"
-PV = "2.35"
+PV = "2.39+git"
diff --git a/meta/recipes-core/glibc/glibc-locale.inc b/meta/recipes-core/glibc/glibc-locale.inc
index b8de7d3192..c63c9edd76 100644
--- a/meta/recipes-core/glibc/glibc-locale.inc
+++ b/meta/recipes-core/glibc/glibc-locale.inc
@@ -5,14 +5,9 @@ SUMMARY = "Locale data from glibc"
BPN = "glibc"
LOCALEBASEPN = "${MLPREFIX}glibc"
-# glibc-collateral.inc inhibits all default deps, but do_package needs objcopy
-# ERROR: objcopy failed with exit code 127 (cmd was 'i586-webos-linux-objcopy' --only-keep-debug 'glibc-locale/2.17-r0/package/usr/lib/gconv/IBM1166.so' 'glibc-locale/2.17-r0/package/usr/lib/gconv/.debug/IBM1166.so')
-# ERROR: Function failed: split_and_strip_files
-BINUTILSDEP = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot"
-BINUTILSDEP:class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot"
-do_package[depends] += "${BINUTILSDEP}"
-
-DEPENDS += "virtual/libc"
+# Do not inhibit default deps, do_package requires binutils/gcc for
+# objcopy/gcc-nm and glibc-locale depends on virtual/libc directly.
+INHIBIT_DEFAULT_DEPS = ""
# Binary locales are generated at build time if ENABLE_BINARY_LOCALE_GENERATION
# is set. The idea is to avoid running localedef on the target (at first boot)
@@ -25,7 +20,7 @@ ENABLE_BINARY_LOCALE_GENERATION:pn-nativesdk-glibc-locale = "1"
#enable locale generation on these arches
# BINARY_LOCALE_ARCHES is a space separated list of regular expressions
-BINARY_LOCALE_ARCHES ?= "arc arm.* aarch64 i[3-6]86 x86_64 powerpc mips mips64 riscv32 riscv64"
+BINARY_LOCALE_ARCHES ?= "arc arm.* aarch64 i[3-6]86 x86_64 powerpc mips mips64 riscv32 riscv64 loongarch64"
# set "1" to use cross-localedef for locale generation
# set "0" for qemu emulation of native localedef for locale generation
@@ -33,7 +28,7 @@ LOCALE_GENERATION_WITH_CROSS-LOCALEDEF = "1"
PROVIDES = "virtual/libc-locale"
-PACKAGES = "localedef ${PN}-dbg"
+PACKAGES = "localedef ${PN}-dbg ${LOCALEBASEPN}-locale-alias"
PACKAGES_DYNAMIC = "^locale-base-.* \
^glibc-gconv-.* ^glibc-charmap-.* ^glibc-localedata-.* ^glibc-binary-localedata-.* \
@@ -42,22 +37,22 @@ PACKAGES_DYNAMIC = "^locale-base-.* \
# Create a glibc-binaries package
ALLOW_EMPTY:${BPN}-binaries = "1"
PACKAGES += "${BPN}-binaries"
-RRECOMMENDS:${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-binary") != -1])}"
+RRECOMMENDS:${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-binary-") != -1])}"
# Create a glibc-charmaps package
ALLOW_EMPTY:${BPN}-charmaps = "1"
PACKAGES += "${BPN}-charmaps"
-RRECOMMENDS:${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-charmap") != -1])}"
+RRECOMMENDS:${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-charmap-") != -1])}"
# Create a glibc-gconvs package
ALLOW_EMPTY:${BPN}-gconvs = "1"
PACKAGES += "${BPN}-gconvs"
-RRECOMMENDS:${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-gconv") != -1])}"
+RRECOMMENDS:${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-gconv-") != -1])}"
# Create a glibc-localedatas package
ALLOW_EMPTY:${BPN}-localedatas = "1"
PACKAGES += "${BPN}-localedatas"
-RRECOMMENDS:${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-localedata") != -1])}"
+RRECOMMENDS:${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-localedata-") != -1])}"
DESCRIPTION:localedef = "glibc: compile locale definition files"
@@ -66,6 +61,7 @@ DESCRIPTION:localedef = "glibc: compile locale definition files"
# Explicitly add ${MLPREFIX} for FILES:glibc-gconv.
FILES:${MLPREFIX}glibc-gconv = "${libdir}/gconv/*"
FILES:localedef = "${bindir}/localedef"
+FILES:${LOCALEBASEPN}-locale-alias = "${datadir}/locale/locale.alias"
LOCALETREESRC = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale"
@@ -87,10 +83,9 @@ do_install() {
if [ ${PACKAGE_NO_GCONV} -eq 0 ]; then
copy_locale_files ${libdir}/gconv 0755
copy_locale_files ${datadir}/i18n 0644
- else
- # Remove the libdir if it is empty when gconv is not copied
- find ${D}${libdir} -type d -empty -delete
fi
+ # Remove empty dirs in libdir when gconv or locales are not copied
+ find ${D}${libdir} -type d -empty -delete
copy_locale_files ${datadir}/locale 0644
install -m 0644 ${LOCALETREESRC}/SUPPORTED ${WORKDIR}/SUPPORTED
}
diff --git a/meta/recipes-core/glibc/glibc-locale_2.35.bb b/meta/recipes-core/glibc/glibc-locale_2.39.bb
index f7702e0358..f7702e0358 100644
--- a/meta/recipes-core/glibc/glibc-locale_2.35.bb
+++ b/meta/recipes-core/glibc/glibc-locale_2.39.bb
diff --git a/meta/recipes-core/glibc/glibc-mtrace_2.35.bb b/meta/recipes-core/glibc/glibc-mtrace_2.39.bb
index 0b69bad46a..0b69bad46a 100644
--- a/meta/recipes-core/glibc/glibc-mtrace_2.35.bb
+++ b/meta/recipes-core/glibc/glibc-mtrace_2.39.bb
diff --git a/meta/recipes-core/glibc/glibc-package.inc b/meta/recipes-core/glibc/glibc-package.inc
index 7f9e7503a1..1ef987be0a 100644
--- a/meta/recipes-core/glibc/glibc-package.inc
+++ b/meta/recipes-core/glibc/glibc-package.inc
@@ -42,7 +42,7 @@ FILES_SOLIBSDEV = "${libdir}/lib*${SOLIBSDEV}"
FILES:${PN}-dev += "${libdir}/libpthread.a ${libdir}/libdl.a ${libdir}/libutil.a ${libdir}/libanl.a ${libdir}/*_nonshared.a ${base_libdir}/*_nonshared.a ${base_libdir}/*.o ${datadir}/aclocal"
RDEPENDS:${PN}-dev = "linux-libc-headers-dev"
FILES:${PN}-staticdev += "${libdir}/*.a ${base_libdir}/*.a"
-FILES:nscd = "${sbindir}/nscd* ${sysconfdir}/init.d/nscd ${systemd_system_unitdir}/nscd* ${sysconfdir}/tmpfiles.d/nscd.conf \
+FILES:nscd = "${sbindir}/nscd* ${sysconfdir}/init.d/nscd ${systemd_system_unitdir}/nscd* ${nonarch_libdir}/tmpfiles.d/nscd.conf \
${sysconfdir}/nscd.conf ${sysconfdir}/default/volatiles/98_nscd ${localstatedir}/db/nscd"
FILES:${PN}-mtrace = "${bindir}/mtrace"
FILES:tzcode = "${bindir}/tzselect ${sbindir}/zic ${bindir}/zdump"
@@ -87,7 +87,7 @@ do_install() {
rmdir --ignore-fail-on-non-empty ${D}${libexecdir}
fi
- oe_multilib_header bits/syscall.h bits/long-double.h bits/floatn.h bits/endianness.h bits/struct_rwlock.h
+ oe_multilib_header bits/syscall.h bits/long-double.h bits/floatn.h bits/endianness.h bits/struct_rwlock.h bits/math-vector.h
if [ -f ${D}${bindir}/mtrace ]; then
sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' -e '2s,exec.*perl,exec ${USRBINPATH}/env perl,' ${D}${bindir}/mtrace
@@ -132,9 +132,9 @@ def get_libc_fpu_setting(bb, d):
do_install:append:class-target() {
if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/tmpfiles.d
+ install -d ${D}${nonarch_libdir}/tmpfiles.d
echo "d /run/nscd 755 root root -" \
- > ${D}${sysconfdir}/tmpfiles.d/nscd.conf
+ > ${D}${nonarch_libdir}/tmpfiles.d/nscd.conf
fi
if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
@@ -280,7 +280,7 @@ python populate_packages:prepend () {
pkg_postinst:nscd () {
if [ -z "$D" ]; then
if command -v systemd-tmpfiles >/dev/null; then
- systemd-tmpfiles --create ${sysconfdir}/tmpfiles.d/nscd.conf
+ systemd-tmpfiles --create ${nonarch_libdir}/tmpfiles.d/nscd.conf
elif [ -e ${sysconfdir}/init.d/populate-volatile.sh ]; then
${sysconfdir}/init.d/populate-volatile.sh update
fi
diff --git a/meta/recipes-core/glibc/glibc-scripts_2.35.bb b/meta/recipes-core/glibc/glibc-scripts_2.39.bb
index 5a89bd8022..5a89bd8022 100644
--- a/meta/recipes-core/glibc/glibc-scripts_2.35.bb
+++ b/meta/recipes-core/glibc/glibc-scripts_2.39.bb
diff --git a/meta/recipes-core/glibc/glibc-tests_2.35.bb b/meta/recipes-core/glibc/glibc-tests_2.35.bb
deleted file mode 100644
index 414f8660de..0000000000
--- a/meta/recipes-core/glibc/glibc-tests_2.35.bb
+++ /dev/null
@@ -1,116 +0,0 @@
-require glibc_${PV}.bb
-require glibc-tests.inc
-
-inherit ptest features_check
-REQUIRED_DISTRO_FEATURES = "ptest"
-
-SRC_URI:append = " \
- file://run-ptest \
-"
-
-SUMMARY = "glibc tests to be run with ptest"
-
-# Erase some variables already set by glibc_${PV}
-python __anonymous() {
- # Remove packages provided by glibc build, we only need a subset of them
- d.setVar("PACKAGES", "${PN} ${PN}-ptest")
-
- d.setVar("PROVIDES", "${PN} ${PN}-ptest")
- d.setVar("RPROVIDES", "${PN} ${PN}-ptest")
-
- d.setVar("BBCLASSEXTEND", "")
- d.setVar("RRECOMMENDS", "")
- d.setVar("SYSTEMD_SERVICE:nscd", "")
- d.setVar("SYSTEMD_PACKAGES", "")
-}
-
-# Remove any leftovers from original glibc recipe
-RPROVIDES:${PN} = "${PN}"
-RRECOMMENDS:${PN} = ""
-RDEPENDS:${PN} = " glibc sed"
-DEPENDS:append = " sed"
-
-# Just build tests for target - do not run them
-do_check:append () {
- oe_runmake -i check run-built-tests=no
-}
-addtask do_check after do_compile before do_install_ptest_base
-
-glibc_strip_build_directory () {
- # Delete all non executable files from build directory
- find ${B} ! -executable -type f -delete
-
- # Remove build dynamic libraries and links to them as
- # those are already installed in the target device
- find ${B} -type f -name "*.so" -delete
- find ${B} -type l -name "*.so*" -delete
-
- # Remove headers (installed with glibc)
- find ${B} -type f -name "*.h" -delete
-
- find ${B} -type f -name "isomac" -delete
- find ${B} -type f -name "annexc" -delete
-}
-
-do_install_ptest_base () {
- glibc_strip_build_directory
-
- ls -r ${B}/*/*-time64 > ${B}/tst_time64
-
- # Remove '-time64' suffix - those tests are also time related
- sed -e "s/-time64$//" ${B}/tst_time64 > ${B}/tst_time_tmp
- tst_time=$(cat ${B}/tst_time_tmp ${B}/tst_time64)
-
- rm ${B}/tst_time_tmp ${B}/tst_time64
- echo "${tst_time}"
-
- # Install build test programs to the image
- install -d ${D}${PTEST_PATH}/tests/glibc-ptest/
-
- for f in "${tst_time}"
- do
- cp -r ${f} ${D}${PTEST_PATH}/tests/glibc-ptest/
- done
-
- install -d ${D}${PTEST_PATH}
- cp ${WORKDIR}/run-ptest ${D}${PTEST_PATH}/
-
-}
-
-# The datadir directory is required to allow core (and reused)
-# glibc cleanup function to finish correctly, as this directory
-# is not created for ptests
-stash_locale_package_cleanup:prepend () {
- mkdir -p ${PKGD}${datadir}
-}
-
-stash_locale_sysroot_cleanup:prepend () {
- mkdir -p ${SYSROOT_DESTDIR}${datadir}
-}
-
-# Prevent the do_package() task to set 'libc6' prefix
-# for glibc tests related packages
-python populate_packages:prepend () {
- if d.getVar('DEBIAN_NAMES'):
- d.setVar('DEBIAN_NAMES', '')
-}
-
-FILES:${PN} = "${PTEST_PATH}/* /usr/src/debug/glibc-tests/*"
-
-EXCLUDE_FROM_SHLIBS = "1"
-
-# Install debug data in .debug and sources in /usr/src/debug
-# It is more handy to have _all_ the sources and symbols in one
-# place (package) as this recipe will be used for validation and
-# debugging.
-PACKAGE_DEBUG_SPLIT_STYLE = ".debug"
-
-# glibc test cases violate by default some Yocto/OE checks (staticdev,
-# textrel)
-# 'debug-files' - add everything (including debug) into one package
-# (no need to install/build *-src package)
-INSANE_SKIP:${PN} += "staticdev textrel debug-files rpaths"
-
-deltask do_stash_locale
-do_install[noexec] = "1"
-do_populate_sysroot[noexec] = "1"
diff --git a/meta/recipes-core/glibc/glibc-testsuite_2.35.bb b/meta/recipes-core/glibc/glibc-testsuite_2.35.bb
deleted file mode 100644
index e8ad2a938b..0000000000
--- a/meta/recipes-core/glibc/glibc-testsuite_2.35.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-require glibc_${PV}.bb
-require glibc-tests.inc
-
-inherit qemu
-
-SRC_URI += "file://check-test-wrapper"
-
-# strip provides
-PROVIDES = ""
-
-DEPENDS += "${@'qemu-native' if d.getVar('TOOLCHAIN_TEST_TARGET') == 'user' else ''}"
-
-TOOLCHAIN_TEST_TARGET ??= "user"
-TOOLCHAIN_TEST_HOST ??= "localhost"
-TOOLCHAIN_TEST_HOST_USER ??= "root"
-TOOLCHAIN_TEST_HOST_PORT ??= "2222"
-
-do_check[nostamp] = "1"
-do_check:append () {
- chmod 0755 ${WORKDIR}/check-test-wrapper
-
- oe_runmake -i \
- QEMU_SYSROOT="${RECIPE_SYSROOT}" \
- QEMU_OPTIONS="${@qemu_target_binary(d)} ${QEMU_OPTIONS}" \
- SSH_HOST="${TOOLCHAIN_TEST_HOST}" \
- SSH_HOST_USER="${TOOLCHAIN_TEST_HOST_USER}" \
- SSH_HOST_PORT="${TOOLCHAIN_TEST_HOST_PORT}" \
- test-wrapper="${WORKDIR}/check-test-wrapper ${TOOLCHAIN_TEST_TARGET}" \
- check
-}
-
-inherit nopackages
-deltask do_stash_locale
-deltask do_install
-deltask do_populate_sysroot
diff --git a/meta/recipes-core/glibc/glibc-testsuite_2.39.bb b/meta/recipes-core/glibc/glibc-testsuite_2.39.bb
new file mode 100644
index 0000000000..2e076f4b0f
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-testsuite_2.39.bb
@@ -0,0 +1,36 @@
+require glibc_${PV}.bb
+require glibc-tests.inc
+
+inherit qemu
+
+SRC_URI += "file://check-test-wrapper"
+
+# strip provides
+PROVIDES = ""
+
+DEPENDS += "${@'qemu-native' if d.getVar('TOOLCHAIN_TEST_TARGET') == 'user' else ''}"
+
+TOOLCHAIN_TEST_TARGET ??= "user"
+TOOLCHAIN_TEST_HOST ??= "localhost"
+TOOLCHAIN_TEST_HOST_USER ??= "root"
+TOOLCHAIN_TEST_HOST_PORT ??= "2222"
+
+do_check[nostamp] = "1"
+do_check[network] = "1"
+do_check:append () {
+ chmod 0755 ${WORKDIR}/check-test-wrapper
+
+ oe_runmake -i \
+ QEMU_SYSROOT="${RECIPE_SYSROOT}" \
+ QEMU_OPTIONS="${@qemu_target_binary(d)} ${QEMU_OPTIONS}" \
+ SSH_HOST="${TOOLCHAIN_TEST_HOST}" \
+ SSH_HOST_USER="${TOOLCHAIN_TEST_HOST_USER}" \
+ SSH_HOST_PORT="${TOOLCHAIN_TEST_HOST_PORT}" \
+ test-wrapper="${WORKDIR}/check-test-wrapper ${TOOLCHAIN_TEST_TARGET}" \
+ check
+}
+
+inherit nopackages
+deltask do_stash_locale
+deltask do_install
+deltask do_populate_sysroot
diff --git a/meta/recipes-core/glibc/glibc-version.inc b/meta/recipes-core/glibc/glibc-version.inc
index 5fea8b33ef..618a574566 100644
--- a/meta/recipes-core/glibc/glibc-version.inc
+++ b/meta/recipes-core/glibc/glibc-version.inc
@@ -1,8 +1,10 @@
-SRCBRANCH ?= "release/2.35/master"
-PV = "2.35"
-SRCREV_glibc ?= "499a60179657d2945c6ad01bdac90e8427a6310e"
-SRCREV_localedef ?= "794da69788cbf9bf57b59a852f9f11307663fa87"
+SRCBRANCH ?= "release/2.39/master"
+PV = "2.39+git"
+SRCREV_glibc ?= "1b9c1a0047fb26a65a9b2a7b8cd977243f7d353c"
+SRCREV_localedef ?= "fab74f31b3811df543e24b6de47efdf45b538abc"
-GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git"
+GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git;protocol=https"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+\.\d+(\.(?!90)\d+)*)"
+
+CVE_STATUS[CVE-2023-4911] = "fixed-version: Fixed in stable branch updates"
diff --git a/meta/recipes-core/glibc/glibc-y2038-tests_2.39.bb b/meta/recipes-core/glibc/glibc-y2038-tests_2.39.bb
new file mode 100644
index 0000000000..be49ca4cb7
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-y2038-tests_2.39.bb
@@ -0,0 +1,119 @@
+require glibc_${PV}.bb
+require glibc-tests.inc
+
+inherit ptest features_check
+REQUIRED_DISTRO_FEATURES = "ptest"
+
+SRC_URI += "\
+ file://run-ptest \
+"
+
+SUMMARY = "glibc tests using time32/time64 interfaces to be run with ptest for the purpose of checking y2038 compatiblity"
+
+# Erase some variables already set by glibc_${PV}
+python __anonymous() {
+ # Remove packages provided by glibc build, we only need a subset of them
+ d.setVar("PACKAGES", "${PN} ${PN}-ptest")
+
+ d.setVar("PROVIDES", "${PN} ${PN}-ptest")
+
+ bbclassextend = d.getVar("BBCLASSEXTEND").replace("nativesdk", "").strip()
+ d.setVar("BBCLASSEXTEND", bbclassextend)
+ d.setVar("RRECOMMENDS", "")
+ d.setVar("SYSTEMD_SERVICE:nscd", "")
+ d.setVar("SYSTEMD_PACKAGES", "")
+}
+
+# Remove any leftovers from original glibc recipe
+RPROVIDES:${PN} = "${PN}"
+RRECOMMENDS:${PN} = ""
+RDEPENDS:${PN} = "glibc libgcc sed bash"
+RDEPENDS:${PN}-ptest = "${PN}"
+DEPENDS += "sed"
+
+export oe_srcdir="${exec_prefix}/src/debug/glibc/${PV}/"
+
+# Just build tests for target - do not run them
+do_check:append () {
+ oe_runmake -i check run-built-tests=no
+}
+addtask do_check after do_compile before do_install_ptest_base
+
+glibc_strip_build_directory () {
+ # Delete all non executable files from build directory
+ find ${B} ! -executable -type f -delete
+
+ # Remove build dynamic libraries and links to them as
+ # those are already installed in the target device
+ find ${B} -type f -name "*.so" -delete
+ find ${B} -type l -name "*.so*" -delete
+
+ # Remove headers (installed with glibc)
+ find ${B} -type f -name "*.h" -delete
+
+ find ${B} -type f -name "isomac" -delete
+ find ${B} -type f -name "annexc" -delete
+}
+
+do_install_ptest_base () {
+ glibc_strip_build_directory
+
+ ls -r ${B}/*/*-time64 > ${B}/tst_time64
+
+ # Remove '-time64' suffix - those tests are also time related
+ sed -e "s/-time64$//" ${B}/tst_time64 > ${B}/tst_time_tmp
+ tst_time=$(cat ${B}/tst_time_tmp ${B}/tst_time64)
+
+ rm ${B}/tst_time_tmp ${B}/tst_time64
+ echo "${tst_time}"
+
+ # Install build test programs to the image
+ install -d ${D}${PTEST_PATH}/tests/glibc-ptest/
+
+ for f in "${tst_time}"
+ do
+ cp -r ${f} ${D}${PTEST_PATH}/tests/glibc-ptest/
+ done
+
+ install -d ${D}${PTEST_PATH}
+ cp ${WORKDIR}/run-ptest ${D}${PTEST_PATH}/
+
+}
+
+# The datadir directory is required to allow core (and reused)
+# glibc cleanup function to finish correctly, as this directory
+# is not created for ptests
+stash_locale_package_cleanup:prepend () {
+ mkdir -p ${PKGD}${datadir}
+}
+
+stash_locale_sysroot_cleanup:prepend () {
+ mkdir -p ${SYSROOT_DESTDIR}${datadir}
+}
+
+# Prevent the do_package() task to set 'libc6' prefix
+# for glibc tests related packages
+python populate_packages:prepend () {
+ if d.getVar('DEBIAN_NAMES'):
+ d.setVar('DEBIAN_NAMES', '')
+}
+
+FILES:${PN} = "${PTEST_PATH}/* /usr/src/debug/${PN}/*"
+
+EXCLUDE_FROM_SHLIBS = "1"
+
+# Install debug data in .debug and sources in /usr/src/debug
+# It is more handy to have _all_ the sources and symbols in one
+# place (package) as this recipe will be used for validation and
+# debugging.
+PACKAGE_DEBUG_SPLIT_STYLE = ".debug"
+
+# glibc test cases violate by default some Yocto/OE checks (staticdev,
+# textrel)
+# 'debug-files' - add everything (including debug) into one package
+# (no need to install/build *-src package)
+INSANE_SKIP:${PN} += "staticdev textrel debug-files rpaths"
+
+deltask do_stash_locale
+do_install[noexec] = "1"
+do_populate_sysroot[noexec] = "1"
diff --git a/meta/recipes-core/glibc/glibc.inc b/meta/recipes-core/glibc/glibc.inc
index fdd241d973..b08a70aa46 100644
--- a/meta/recipes-core/glibc/glibc.inc
+++ b/meta/recipes-core/glibc/glibc.inc
@@ -1,7 +1,7 @@
require glibc-common.inc
require glibc-ld.inc
-DEPENDS = "virtual/${TARGET_PREFIX}gcc libgcc-initial linux-libc-headers"
+DEPENDS = "virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}binutils libgcc-initial linux-libc-headers"
PROVIDES = "virtual/libc"
PROVIDES += "virtual/libintl virtual/libiconv"
diff --git a/meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch b/meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch
index 546fe58214..4f919078dd 100644
--- a/meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch
+++ b/meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch
@@ -1,4 +1,4 @@
-From 8778429a3345bb5c0361332cf5103f394717a396 Mon Sep 17 00:00:00 2001
+From 544d23dea91b2be793c805b9e4bce8cd1d28121f Mon Sep 17 00:00:00 2001
From: Jason Wessel <jason.wessel@windriver.com>
Date: Sat, 7 Dec 2019 09:59:22 -0800
Subject: [PATCH] localedef: Add hardlink resolver from util-linux
diff --git a/meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch b/meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch
index 94a05cf954..7c8fa973ec 100644
--- a/meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch
+++ b/meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch
@@ -1,4 +1,4 @@
-From 87a69126d97bb8d5d52e34e451b4a7076efd6bed Mon Sep 17 00:00:00 2001
+From ebb1e37285ab541135005cfe945b7a58e4b95040 Mon Sep 17 00:00:00 2001
From: Jason Wessel <jason.wessel@windriver.com>
Date: Sat, 7 Dec 2019 10:01:37 -0800
Subject: [PATCH] localedef: fix-ups hardlink to make it compile
diff --git a/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch b/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch
index 9a605078b9..bd4b5aa98b 100644
--- a/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch
+++ b/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch
@@ -1,4 +1,4 @@
-From 752b0d32fc96728ee624dbd62bf23e034d8d2aed Mon Sep 17 00:00:00 2001
+From 9770abfda8e85fe027f95871bc03450d05b1e2c8 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 01:48:24 +0000
Subject: [PATCH] nativesdk-glibc: Look for host system ld.so.cache as well
@@ -30,10 +30,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/elf/dl-load.c b/elf/dl-load.c
-index 721593135e..39c4657fa2 100644
+index ce8fdea302..1f502eb026 100644
--- a/elf/dl-load.c
+++ b/elf/dl-load.c
-@@ -2208,6 +2208,14 @@ _dl_map_object (struct link_map *loader, const char *name,
+@@ -2105,6 +2105,14 @@ _dl_map_object (struct link_map *loader, const char *name,
}
}
@@ -48,7 +48,7 @@ index 721593135e..39c4657fa2 100644
#ifdef USE_LDCONFIG
if (fd == -1
&& (__glibc_likely ((mode & __RTLD_SECURE) == 0)
-@@ -2266,14 +2274,6 @@ _dl_map_object (struct link_map *loader, const char *name,
+@@ -2163,14 +2171,6 @@ _dl_map_object (struct link_map *loader, const char *name,
}
#endif
diff --git a/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch b/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch
index da288d6ccf..19fc561a06 100644
--- a/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch
+++ b/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch
@@ -1,4 +1,4 @@
-From 2f7407697f2a905fedb98037152e7830f73bc6c6 Mon Sep 17 00:00:00 2001
+From 587b92ff99e6d8f59c461ee8beecae39d8818f7e Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 01:50:00 +0000
Subject: [PATCH] nativesdk-glibc: Fix buffer overrun with a relocated SDK
@@ -21,10 +21,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 12 insertions(+)
diff --git a/elf/dl-load.c b/elf/dl-load.c
-index 39c4657fa2..daa3af6c51 100644
+index 1f502eb026..c4a543fb00 100644
--- a/elf/dl-load.c
+++ b/elf/dl-load.c
-@@ -1904,7 +1904,19 @@ open_path (const char *name, size_t namelen, int mode,
+@@ -1802,7 +1802,19 @@ open_path (const char *name, size_t namelen, int mode,
given on the command line when rtld is run directly. */
return -1;
diff --git a/meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch b/meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch
index 14bcaf3ef9..55892417f4 100644
--- a/meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch
+++ b/meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch
@@ -1,4 +1,4 @@
-From 88a31cd08801df53249963f3b26c7dbcee6ae2f8 Mon Sep 17 00:00:00 2001
+From 49caf586b80ba030a0ee4af9f6128ff2979ea636 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 01:51:38 +0000
Subject: [PATCH] nativesdk-glibc: Raise the size of arrays containing dl paths
@@ -19,17 +19,17 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
elf/dl-load.c | 4 ++--
elf/dl-usage.c | 6 ++++--
elf/interp.c | 2 +-
- elf/ldconfig.c | 3 +++
+ elf/ldconfig.c | 2 ++
elf/rtld.c | 1 +
iconv/gconv_conf.c | 2 +-
sysdeps/generic/dl-cache.h | 4 ----
- 8 files changed, 16 insertions(+), 10 deletions(-)
+ 8 files changed, 15 insertions(+), 10 deletions(-)
diff --git a/elf/dl-cache.c b/elf/dl-cache.c
-index 2b8da8650d..3d9787bda4 100644
+index 85f3f179ed..cc55887c56 100644
--- a/elf/dl-cache.c
+++ b/elf/dl-cache.c
-@@ -355,6 +355,10 @@ search_cache (const char *string_table, uint32_t string_table_size,
+@@ -352,6 +352,10 @@ search_cache (const char *string_table, uint32_t string_table_size,
return best;
}
@@ -41,7 +41,7 @@ index 2b8da8650d..3d9787bda4 100644
_dl_cache_libcmp (const char *p1, const char *p2)
{
diff --git a/elf/dl-load.c b/elf/dl-load.c
-index daa3af6c51..e323952993 100644
+index c4a543fb00..27fb70f09b 100644
--- a/elf/dl-load.c
+++ b/elf/dl-load.c
@@ -117,8 +117,8 @@ enum { ncapstr = 1, max_capstrlen = 0 };
@@ -56,7 +56,7 @@ index daa3af6c51..e323952993 100644
SYSTEM_DIRS_LEN
};
diff --git a/elf/dl-usage.c b/elf/dl-usage.c
-index 5ad3a72559..88f26d3692 100644
+index 5baac4ba8e..60097ad0e2 100644
--- a/elf/dl-usage.c
+++ b/elf/dl-usage.c
@@ -25,6 +25,8 @@
@@ -68,7 +68,7 @@ index 5ad3a72559..88f26d3692 100644
void
_dl_usage (const char *argv0, const char *wrong_option)
{
-@@ -244,7 +246,7 @@ setting environment variables (which would be inherited by subprocesses).\n\
+@@ -185,7 +187,7 @@ setting environment variables (which would be inherited by subprocesses).\n\
--list list all dependencies and how they are resolved\n\
--verify verify that given object really is a dynamically linked\n\
object we can handle\n\
@@ -77,7 +77,7 @@ index 5ad3a72559..88f26d3692 100644
--library-path PATH use given PATH instead of content of the environment\n\
variable LD_LIBRARY_PATH\n\
--glibc-hwcaps-prepend LIST\n\
-@@ -267,7 +269,7 @@ setting environment variables (which would be inherited by subprocesses).\n\
+@@ -204,7 +206,7 @@ setting environment variables (which would be inherited by subprocesses).\n\
\n\
This program interpreter self-identifies as: " RTLD "\n\
",
@@ -85,9 +85,9 @@ index 5ad3a72559..88f26d3692 100644
+ argv0, LD_SO_CACHE);
print_search_path_for_help (state);
print_hwcaps_subdirectories (state);
- print_legacy_hwcap_directories ();
+ _exit (EXIT_SUCCESS);
diff --git a/elf/interp.c b/elf/interp.c
-index 91966702ca..dc86c20e83 100644
+index 8b705824bf..7d094829f3 100644
--- a/elf/interp.c
+++ b/elf/interp.c
@@ -18,5 +18,5 @@
@@ -98,24 +98,23 @@ index 91966702ca..dc86c20e83 100644
+const char __invoke_dynamic_linker__[4096] __attribute__ ((section (".interp")))
= RUNTIME_LINKER;
diff --git a/elf/ldconfig.c b/elf/ldconfig.c
-index 101d56ac8e..33debef60a 100644
+index b64c54b53e..caf7001745 100644
--- a/elf/ldconfig.c
+++ b/elf/ldconfig.c
-@@ -176,6 +176,9 @@ static struct argp argp =
+@@ -150,6 +150,8 @@ static struct argp argp =
options, parse_opt, NULL, doc, NULL, more_help, NULL
};
-+
+extern const char LD_SO_CACHE[4096] __attribute__ ((section (".ldsocache")));
+
- /* Check if string corresponds to an important hardware capability or
- a platform. */
- static int
+ /* Handle program arguments. */
+ static error_t
+ parse_opt (int key, char *arg, struct argp_state *state)
diff --git a/elf/rtld.c b/elf/rtld.c
-index 4b09e84b0d..56d93ff616 100644
+index 4f494b792e..d1c1252188 100644
--- a/elf/rtld.c
+++ b/elf/rtld.c
-@@ -193,6 +193,7 @@ dso_name_valid_for_suid (const char *p)
+@@ -190,6 +190,7 @@ dso_name_valid_for_suid (const char *p)
}
return *p != '\0';
}
@@ -124,7 +123,7 @@ index 4b09e84b0d..56d93ff616 100644
static void
audit_list_init (struct audit_list *list)
diff --git a/iconv/gconv_conf.c b/iconv/gconv_conf.c
-index 077082af66..46b6152455 100644
+index 1063c31a2b..57fa832e52 100644
--- a/iconv/gconv_conf.c
+++ b/iconv/gconv_conf.c
@@ -35,7 +35,7 @@
@@ -137,7 +136,7 @@ index 077082af66..46b6152455 100644
/* Type to represent search path. */
struct path_elem
diff --git a/sysdeps/generic/dl-cache.h b/sysdeps/generic/dl-cache.h
-index 964d50a486..94bf68ca9d 100644
+index 919e49ffc8..cd3f8ba94b 100644
--- a/sysdeps/generic/dl-cache.h
+++ b/sysdeps/generic/dl-cache.h
@@ -34,10 +34,6 @@
diff --git a/meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch b/meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch
index 493b2daad3..56eaaed4b7 100644
--- a/meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch
+++ b/meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch
@@ -1,4 +1,4 @@
-From a1fbd7ef1da02f334ff72c52cb11116164649067 Mon Sep 17 00:00:00 2001
+From 3a94365c730d174a3c30c6d9282e6ca12d9ad091 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Thu, 31 Dec 2015 14:35:35 -0800
Subject: [PATCH] nativesdk-glibc: Allow 64 bit atomics for x86
@@ -17,10 +17,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/sysdeps/x86/atomic-machine.h b/sysdeps/x86/atomic-machine.h
-index 2692d94a92..9d39bfdbd5 100644
+index cfd395087b..28a937c468 100644
--- a/sysdeps/x86/atomic-machine.h
+++ b/sysdeps/x86/atomic-machine.h
-@@ -52,19 +52,14 @@ typedef uintmax_t uatomic_max_t;
+@@ -26,19 +26,14 @@
#define LOCK_PREFIX "lock;"
#define USE_ATOMIC_COMPILER_BUILTINS 1
diff --git a/meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch b/meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch
index b40d2bdef6..54c085a714 100644
--- a/meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch
+++ b/meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch
@@ -1,4 +1,4 @@
-From bf1603b3d73f64de777be00f7e55f2cfef596102 Mon Sep 17 00:00:00 2001
+From 296bdde0683aa55cdea0fd0cab05ff8fbc462b17 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 3 Aug 2018 09:55:12 -0700
Subject: [PATCH] nativesdk-glibc: Make relocatable install for locales
@@ -19,7 +19,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
4 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/locale/findlocale.c b/locale/findlocale.c
-index 5986373edd..856ba9afc0 100644
+index 8d6e4e33e3..bfe74f241d 100644
--- a/locale/findlocale.c
+++ b/locale/findlocale.c
@@ -55,7 +55,7 @@ struct __locale_data *const _nl_C[] attribute_hidden =
@@ -41,7 +41,7 @@ index 5986373edd..856ba9afc0 100644
else
/* We really have to load some data. First see whether the name is
diff --git a/locale/loadarchive.c b/locale/loadarchive.c
-index 512769eaec..436619091b 100644
+index 452e3eb6e3..c7467aec42 100644
--- a/locale/loadarchive.c
+++ b/locale/loadarchive.c
@@ -42,7 +42,7 @@
@@ -54,10 +54,10 @@ index 512769eaec..436619091b 100644
/* Size of initial mapping window, optimal if large enough to
cover the header plus the initial locale. */
diff --git a/locale/localeinfo.h b/locale/localeinfo.h
-index b3d4da0185..22f9dc1140 100644
+index ed698faef1..f7efc288a5 100644
--- a/locale/localeinfo.h
+++ b/locale/localeinfo.h
-@@ -331,7 +331,7 @@ _nl_lookup_word (locale_t l, int category, int item)
+@@ -347,7 +347,7 @@ _nl_lookup_word (locale_t l, int category, int item)
}
/* Default search path if no LOCPATH environment variable. */
@@ -67,7 +67,7 @@ index b3d4da0185..22f9dc1140 100644
/* Load the locale data for CATEGORY from the file specified by *NAME.
If *NAME is "", use environment variables as specified by POSIX, and
diff --git a/locale/programs/locale.c b/locale/programs/locale.c
-index e9275d6b83..a9109155e5 100644
+index c7ee1874e8..8281e32236 100644
--- a/locale/programs/locale.c
+++ b/locale/programs/locale.c
@@ -631,6 +631,7 @@ nameentcmp (const void *a, const void *b)
diff --git a/meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch b/meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch
index a47dd5331a..096764009a 100644
--- a/meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch
+++ b/meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch
@@ -1,4 +1,4 @@
-From 78b2e81940561069faf7698931a033784f794e40 Mon Sep 17 00:00:00 2001
+From 70da806febac8b2eead6ddc32451bbc1787a1d7d Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sat, 6 Mar 2021 14:48:56 -0800
Subject: [PATCH] nativesdk-glibc: Fall back to faccessat on faccess2 returns
@@ -14,7 +14,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/sysdeps/unix/sysv/linux/faccessat.c b/sysdeps/unix/sysv/linux/faccessat.c
-index 13160d3249..ee3ddc9b79 100644
+index 2fa57fd63d..4d0d5ff0c4 100644
--- a/sysdeps/unix/sysv/linux/faccessat.c
+++ b/sysdeps/unix/sysv/linux/faccessat.c
@@ -30,7 +30,11 @@ __faccessat (int fd, const char *file, int mode, int flag)
diff --git a/meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch b/meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch
index 77644a2ee3..364e5cfde8 100644
--- a/meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch
+++ b/meta/recipes-core/glibc/glibc/0009-yes-within-the-path-sets-wrong-config-variables.patch
@@ -1,4 +1,4 @@
-From f6e96a95212bc1fef57b9594a7dddc0c20639873 Mon Sep 17 00:00:00 2001
+From 2192588942c5bc3b5fa10fc6d7433923f42e9ba0 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 00:31:06 +0000
Subject: [PATCH] 'yes' within the path sets wrong config variables
@@ -29,10 +29,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
12 files changed, 28 insertions(+), 28 deletions(-)
diff --git a/sysdeps/aarch64/configure b/sysdeps/aarch64/configure
-index 4c1fac49f3..597314f476 100644
+index ca57edce47..5e91fab023 100644
--- a/sysdeps/aarch64/configure
+++ b/sysdeps/aarch64/configure
-@@ -157,12 +157,12 @@ else
+@@ -165,12 +165,12 @@ else $as_nop
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#ifdef __AARCH64EB__
@@ -42,16 +42,16 @@ index 4c1fac49f3..597314f476 100644
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "is_aarch64_be" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "is_aarch64_be" >/dev/null 2>&1
+ then :
libc_cv_aarch64_be=yes
- else
- libc_cv_aarch64_be=no
+ else $as_nop
diff --git a/sysdeps/aarch64/configure.ac b/sysdeps/aarch64/configure.ac
-index 3347c13fa1..4af163c0b6 100644
+index 27874eceb4..8a708f2ef4 100644
--- a/sysdeps/aarch64/configure.ac
+++ b/sysdeps/aarch64/configure.ac
-@@ -17,8 +17,8 @@ AC_DEFINE(SUPPORT_STATIC_PIE)
+@@ -13,8 +13,8 @@ AC_DEFINE(SUPPORT_STATIC_PIE)
# the dynamic linker via %ifdef.
AC_CACHE_CHECK([for big endian],
[libc_cv_aarch64_be],
@@ -63,10 +63,10 @@ index 3347c13fa1..4af163c0b6 100644
], libc_cv_aarch64_be=yes, libc_cv_aarch64_be=no)])
if test $libc_cv_aarch64_be = yes; then
diff --git a/sysdeps/arm/configure b/sysdeps/arm/configure
-index 431e843b2b..e152461138 100644
+index 35e2918922..94d7fbe8bb 100644
--- a/sysdeps/arm/configure
+++ b/sysdeps/arm/configure
-@@ -151,12 +151,12 @@ else
+@@ -161,12 +161,12 @@ else $as_nop
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#ifdef __ARM_PCS_VFP
@@ -76,16 +76,16 @@ index 431e843b2b..e152461138 100644
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "use_arm_pcs_vfp" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "use_arm_pcs_vfp" >/dev/null 2>&1
+ then :
libc_cv_arm_pcs_vfp=yes
- else
- libc_cv_arm_pcs_vfp=no
+ else $as_nop
diff --git a/sysdeps/arm/configure.ac b/sysdeps/arm/configure.ac
-index 90cdd69c75..05a262ba00 100644
+index 5172e30bbe..f06dedd7c5 100644
--- a/sysdeps/arm/configure.ac
+++ b/sysdeps/arm/configure.ac
-@@ -15,8 +15,8 @@ AC_DEFINE(PI_STATIC_AND_HIDDEN)
+@@ -10,8 +10,8 @@ GLIBC_PROVIDES dnl See aclocal.m4 in the top level source directory.
# the dynamic linker via %ifdef.
AC_CACHE_CHECK([whether the compiler is using the ARM hard-float ABI],
[libc_cv_arm_pcs_vfp],
@@ -97,10 +97,10 @@ index 90cdd69c75..05a262ba00 100644
], libc_cv_arm_pcs_vfp=yes, libc_cv_arm_pcs_vfp=no)])
if test $libc_cv_arm_pcs_vfp = yes; then
diff --git a/sysdeps/mips/configure b/sysdeps/mips/configure
-index 4e13248c03..f14af952d0 100644
+index 1e8c6711e6..ae52ccd929 100644
--- a/sysdeps/mips/configure
+++ b/sysdeps/mips/configure
-@@ -143,11 +143,11 @@ else
+@@ -158,11 +158,11 @@ else $as_nop
/* end confdefs.h. */
dnl
#ifdef __mips_nan2008
@@ -109,17 +109,17 @@ index 4e13248c03..f14af952d0 100644
#endif
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "use_mips_nan2008" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "use_mips_nan2008" >/dev/null 2>&1
+ then :
libc_cv_mips_nan2008=yes
- else
- libc_cv_mips_nan2008=no
+ else $as_nop
diff --git a/sysdeps/mips/configure.ac b/sysdeps/mips/configure.ac
-index bcbdaffd9f..ad3057f4cc 100644
+index d3cd780d78..250223d206 100644
--- a/sysdeps/mips/configure.ac
+++ b/sysdeps/mips/configure.ac
@@ -6,9 +6,9 @@ dnl position independent way.
- dnl AC_DEFINE(PI_STATIC_AND_HIDDEN)
+ AC_DEFINE(HIDDEN_VAR_NEEDS_DYNAMIC_RELOC)
AC_CACHE_CHECK([whether the compiler is using the 2008 NaN encoding],
- libc_cv_mips_nan2008, [AC_EGREP_CPP(yes, [dnl
@@ -131,10 +131,10 @@ index bcbdaffd9f..ad3057f4cc 100644
if test x$libc_cv_mips_nan2008 = xyes; then
AC_DEFINE(HAVE_MIPS_NAN2008)
diff --git a/sysdeps/nios2/configure b/sysdeps/nios2/configure
-index 14c8a3a014..dde3814ef2 100644
+index 2fb230cbaa..1959d0a444 100644
--- a/sysdeps/nios2/configure
+++ b/sysdeps/nios2/configure
-@@ -142,12 +142,12 @@ else
+@@ -155,12 +155,12 @@ else $as_nop
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#ifdef __nios2_big_endian__
@@ -144,13 +144,13 @@ index 14c8a3a014..dde3814ef2 100644
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "is_nios2_be" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "is_nios2_be" >/dev/null 2>&1
+ then :
libc_cv_nios2_be=yes
- else
- libc_cv_nios2_be=no
+ else $as_nop
diff --git a/sysdeps/nios2/configure.ac b/sysdeps/nios2/configure.ac
-index f05f43802b..dc8639902d 100644
+index f738e9a7ed..4085851cbc 100644
--- a/sysdeps/nios2/configure.ac
+++ b/sysdeps/nios2/configure.ac
@@ -4,8 +4,8 @@ GLIBC_PROVIDES dnl See aclocal.m4 in the top level source directory.
@@ -165,10 +165,10 @@ index f05f43802b..dc8639902d 100644
], libc_cv_nios2_be=yes, libc_cv_nios2_be=no)])
if test $libc_cv_nios2_be = yes; then
diff --git a/sysdeps/unix/sysv/linux/mips/configure b/sysdeps/unix/sysv/linux/mips/configure
-index f25f2a3a65..1b7483e6c6 100644
+index a060901de4..0ac7019438 100644
--- a/sysdeps/unix/sysv/linux/mips/configure
+++ b/sysdeps/unix/sysv/linux/mips/configure
-@@ -414,11 +414,11 @@ else
+@@ -441,11 +441,11 @@ else $as_nop
/* end confdefs.h. */
dnl
#ifdef __mips_nan2008
@@ -177,11 +177,11 @@ index f25f2a3a65..1b7483e6c6 100644
#endif
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "use_mips_nan2008" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "use_mips_nan2008" >/dev/null 2>&1
+ then :
libc_cv_mips_nan2008=yes
- else
- libc_cv_mips_nan2008=no
+ else $as_nop
diff --git a/sysdeps/unix/sysv/linux/mips/configure.ac b/sysdeps/unix/sysv/linux/mips/configure.ac
index 049a0f4bdf..005526d4e8 100644
--- a/sysdeps/unix/sysv/linux/mips/configure.ac
@@ -199,10 +199,10 @@ index 049a0f4bdf..005526d4e8 100644
libc_mips_nan=
diff --git a/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure b/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure
-index ae7f254da4..874519000b 100644
+index cf1b70c745..0dccf6cd76 100644
--- a/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure
+++ b/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure
-@@ -155,12 +155,12 @@ else
+@@ -168,12 +168,12 @@ else $as_nop
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#if _CALL_ELF == 2
@@ -212,12 +212,12 @@ index ae7f254da4..874519000b 100644
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "use_ppc_elfv2_abi" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "use_ppc_elfv2_abi" >/dev/null 2>&1
+ then :
libc_cv_ppc64_elfv2_abi=yes
- else
- libc_cv_ppc64_elfv2_abi=no
-@@ -188,12 +188,12 @@ else
+ else $as_nop
+@@ -203,12 +203,12 @@ else $as_nop
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#ifdef _CALL_ELF
@@ -227,11 +227,11 @@ index ae7f254da4..874519000b 100644
_ACEOF
if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-- $EGREP "yes" >/dev/null 2>&1; then :
-+ $EGREP "is_def_call_elf" >/dev/null 2>&1; then :
+- $EGREP "yes" >/dev/null 2>&1
++ $EGREP "is_def_call_elf" >/dev/null 2>&1
+ then :
libc_cv_ppc64_def_call_elf=yes
- else
- libc_cv_ppc64_def_call_elf=no
+ else $as_nop
diff --git a/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure.ac b/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure.ac
index f9cba6e15d..b21f72f1e4 100644
--- a/sysdeps/unix/sysv/linux/powerpc/powerpc64/configure.ac
diff --git a/meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch b/meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch
index 295fa315d8..d7f15c1cfa 100644
--- a/meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch
+++ b/meta/recipes-core/glibc/glibc/0010-eglibc-Cross-building-and-testing-instructions.patch
@@ -1,4 +1,4 @@
-From d6300e80c7c010fa7ca33e36e826151558cec498 Mon Sep 17 00:00:00 2001
+From ce8b13bdf488058754fce573754cea0b022c37e2 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 00:42:58 +0000
Subject: [PATCH] eglibc: Cross building and testing instructions
diff --git a/meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch b/meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch
index 9e00da894d..82a3292655 100644
--- a/meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch
+++ b/meta/recipes-core/glibc/glibc/0011-eglibc-Help-bootstrap-cross-toolchain.patch
@@ -1,4 +1,4 @@
-From 1c8044544d2cbdc529910a3ed6eba4b0ce7ae549 Mon Sep 17 00:00:00 2001
+From 65b79161b9e2848a174d3519a03348884f182975 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 00:49:28 +0000
Subject: [PATCH] eglibc: Help bootstrap cross toolchain
@@ -29,7 +29,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
create mode 100644 include/stubs-bootstrap.h
diff --git a/Makefile b/Makefile
-index a49870d3d1..81673d7645 100644
+index 7052b46df8..46073abaa8 100644
--- a/Makefile
+++ b/Makefile
@@ -79,9 +79,18 @@ subdir-dirs = include
@@ -52,7 +52,7 @@ index a49870d3d1..81673d7645 100644
ifeq (yes,$(build-shared))
headers += gnu/lib-names.h
endif
-@@ -420,6 +429,16 @@ others: $(common-objpfx)testrun.sh $(common-objpfx)debugglibc.sh
+@@ -421,6 +430,16 @@ others: $(common-objpfx)testrun.sh $(common-objpfx)debugglibc.sh
subdir-stubs := $(foreach dir,$(subdirs),$(common-objpfx)$(dir)/stubs)
@@ -69,7 +69,7 @@ index a49870d3d1..81673d7645 100644
ifndef abi-variants
installed-stubs = $(inst_includedir)/gnu/stubs.h
else
-@@ -446,6 +465,7 @@ $(inst_includedir)/gnu/stubs.h: $(+force)
+@@ -447,6 +466,7 @@ $(inst_includedir)/gnu/stubs.h: $(+force)
install-others-nosubdir: $(installed-stubs)
endif
diff --git a/meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch b/meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch
index 03c81bfbd7..e726a9e76f 100644
--- a/meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch
+++ b/meta/recipes-core/glibc/glibc/0012-eglibc-Resolve-__fpscr_values-on-SH4.patch
@@ -1,4 +1,4 @@
-From e5999ffd1b8690c2902a6406c07f51023a6bf7ec Mon Sep 17 00:00:00 2001
+From 07a5fa22a9a8bbd43982c1b35132f8c2d5276bfe Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 00:55:53 +0000
Subject: [PATCH] eglibc: Resolve __fpscr_values on SH4
@@ -21,7 +21,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
2 files changed, 12 insertions(+)
diff --git a/sysdeps/unix/sysv/linux/sh/Versions b/sysdeps/unix/sysv/linux/sh/Versions
-index 9c734ff755..974e33b4b1 100644
+index 3ad702e165..931985b5d6 100644
--- a/sysdeps/unix/sysv/linux/sh/Versions
+++ b/sysdeps/unix/sysv/linux/sh/Versions
@@ -3,6 +3,7 @@ libc {
@@ -33,7 +33,7 @@ index 9c734ff755..974e33b4b1 100644
# a*
alphasort64;
diff --git a/sysdeps/unix/sysv/linux/sh/sysdep.S b/sysdeps/unix/sysv/linux/sh/sysdep.S
-index a18fbb2e8b..59421bfbb0 100644
+index d5d8c5e033..f356d02077 100644
--- a/sysdeps/unix/sysv/linux/sh/sysdep.S
+++ b/sysdeps/unix/sysv/linux/sh/sysdep.S
@@ -30,3 +30,14 @@ ENTRY (__syscall_error)
diff --git a/meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch b/meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch
index 48bb062e09..f36b7f6f4c 100644
--- a/meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch
+++ b/meta/recipes-core/glibc/glibc/0013-eglibc-Forward-port-cross-locale-generation-support.patch
@@ -1,4 +1,4 @@
-From 99ae3189430eaa5472b2117e5a999109a6ca9251 Mon Sep 17 00:00:00 2001
+From 33e9867758e830e19d181d5a0aa7f2f3cc4a08b3 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 18 Mar 2015 01:33:49 +0000
Subject: [PATCH] eglibc: Forward port cross locale generation support
@@ -7,7 +7,7 @@ Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
- locale/Makefile | 3 +-
+ locale/Makefile | 1 +
locale/catnames.c | 46 +++++++++++++++++++++++++++
locale/localeinfo.h | 2 +-
locale/programs/charmap-dir.c | 6 ++++
@@ -19,23 +19,21 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
locale/programs/locfile.c | 5 ++-
locale/programs/locfile.h | 59 +++++++++++++++++++++++++++++++++--
locale/setlocale.c | 29 -----------------
- 12 files changed, 167 insertions(+), 68 deletions(-)
+ 12 files changed, 166 insertions(+), 67 deletions(-)
create mode 100644 locale/catnames.c
diff --git a/locale/Makefile b/locale/Makefile
-index b7c60681fa..07c606cde3 100644
+index 2810f28605..05f847f9a6 100644
--- a/locale/Makefile
+++ b/locale/Makefile
-@@ -26,7 +26,8 @@ headers = langinfo.h locale.h bits/locale.h \
- bits/types/locale_t.h bits/types/__locale_t.h
- routines = setlocale findlocale loadlocale loadarchive \
- localeconv nl_langinfo nl_langinfo_l mb_cur_max \
-- newlocale duplocale freelocale uselocale
-+ newlocale duplocale freelocale uselocale \
-+ catnames
- tests = tst-C-locale tst-locname tst-duplocale
- tests-container = tst-localedef-path-norm
- categories = ctype messages monetary numeric time paper name \
+@@ -30,6 +30,7 @@ headers = \
+ locale.h \
+ # headers
+ routines = \
++ catnames \
+ duplocale \
+ findlocale \
+ freelocale \
diff --git a/locale/catnames.c b/locale/catnames.c
new file mode 100644
index 0000000000..538f3f5edb
@@ -89,10 +87,10 @@ index 0000000000..538f3f5edb
+ [LC_ALL] = sizeof ("LC_ALL") - 1
+ };
diff --git a/locale/localeinfo.h b/locale/localeinfo.h
-index 22f9dc1140..fa31b3c5ea 100644
+index f7efc288a5..6ef082eb25 100644
--- a/locale/localeinfo.h
+++ b/locale/localeinfo.h
-@@ -230,7 +230,7 @@ __libc_tsd_define (extern, locale_t, LOCALE)
+@@ -246,7 +246,7 @@ __libc_tsd_define (extern, locale_t, LOCALE)
unused. We can manage this playing some tricks with weak references.
But with thread-local locale settings, it becomes quite ungainly unless
we can use __thread variables. So only in that case do we attempt this. */
@@ -102,7 +100,7 @@ index 22f9dc1140..fa31b3c5ea 100644
# define NL_CURRENT_INDIRECT 1
#endif
diff --git a/locale/programs/charmap-dir.c b/locale/programs/charmap-dir.c
-index 4841bfd05d..ffcba1fd79 100644
+index 36504f238d..56ee97e61b 100644
--- a/locale/programs/charmap-dir.c
+++ b/locale/programs/charmap-dir.c
@@ -18,7 +18,9 @@
@@ -148,7 +146,7 @@ index 4841bfd05d..ffcba1fd79 100644
return NULL;
}
diff --git a/locale/programs/ld-collate.c b/locale/programs/ld-collate.c
-index 06a5203334..84b3ff4166 100644
+index 5048adbd9f..4232834ead 100644
--- a/locale/programs/ld-collate.c
+++ b/locale/programs/ld-collate.c
@@ -352,7 +352,7 @@ new_element (struct locale_collate_t *collate, const char *mbs, size_t mbslen,
@@ -160,7 +158,7 @@ index 06a5203334..84b3ff4166 100644
uint32_t zero = 0;
/* Handle <U0000> as a single character. */
if (nwcs == 0)
-@@ -1783,8 +1783,7 @@ symbol `%s' has the same encoding as"), (*eptr)->name);
+@@ -1776,8 +1776,7 @@ symbol `%s' has the same encoding as"), (*eptr)->name);
if ((*eptr)->nwcs == runp->nwcs)
{
@@ -170,7 +168,7 @@ index 06a5203334..84b3ff4166 100644
if (c == 0)
{
-@@ -2011,9 +2010,9 @@ add_to_tablewc (uint32_t ch, struct element_t *runp)
+@@ -2004,9 +2003,9 @@ add_to_tablewc (uint32_t ch, struct element_t *runp)
one consecutive entry. */
if (runp->wcnext != NULL
&& runp->nwcs == runp->wcnext->nwcs
@@ -183,7 +181,7 @@ index 06a5203334..84b3ff4166 100644
&& (runp->wcs[runp->nwcs - 1]
== runp->wcnext->wcs[runp->nwcs - 1] + 1))
{
-@@ -2037,9 +2036,9 @@ add_to_tablewc (uint32_t ch, struct element_t *runp)
+@@ -2030,9 +2029,9 @@ add_to_tablewc (uint32_t ch, struct element_t *runp)
runp = runp->wcnext;
while (runp->wcnext != NULL
&& runp->nwcs == runp->wcnext->nwcs
@@ -197,7 +195,7 @@ index 06a5203334..84b3ff4166 100644
== runp->wcnext->wcs[runp->nwcs - 1] + 1));
diff --git a/locale/programs/ld-ctype.c b/locale/programs/ld-ctype.c
-index 07b64ac5a1..70b49ab733 100644
+index eb6e7e145c..14736d1cac 100644
--- a/locale/programs/ld-ctype.c
+++ b/locale/programs/ld-ctype.c
@@ -914,7 +914,7 @@ ctype_output (struct localedef_t *locale, const struct charmap_t *charmap,
@@ -229,7 +227,7 @@ index 07b64ac5a1..70b49ab733 100644
handle_digits = 1;
goto read_charclass;
-@@ -3903,8 +3903,7 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
+@@ -3876,8 +3876,7 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
while (idx < number)
{
@@ -239,7 +237,7 @@ index 07b64ac5a1..70b49ab733 100644
if (res == 0)
{
replace = 1;
-@@ -3941,11 +3940,11 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
+@@ -3914,11 +3913,11 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
for (size_t cnt = 0; cnt < number; ++cnt)
{
struct translit_to_t *srunp;
@@ -253,7 +251,7 @@ index 07b64ac5a1..70b49ab733 100644
srunp = srunp->next;
}
/* Plus one for the extra NUL character marking the end of
-@@ -3969,18 +3968,18 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
+@@ -3942,18 +3941,18 @@ allocate_arrays (struct locale_ctype_t *ctype, const struct charmap_t *charmap,
ctype->translit_from_idx[cnt] = from_len;
ctype->translit_to_idx[cnt] = to_len;
@@ -279,7 +277,7 @@ index 07b64ac5a1..70b49ab733 100644
srunp = srunp->next;
}
diff --git a/locale/programs/ld-time.c b/locale/programs/ld-time.c
-index e6f320d2b3..c6631ad101 100644
+index 1abff3cf53..8a2f2b820a 100644
--- a/locale/programs/ld-time.c
+++ b/locale/programs/ld-time.c
@@ -219,8 +219,10 @@ No definition for %s category found"), "LC_TIME");
@@ -348,20 +346,20 @@ index e6f320d2b3..c6631ad101 100644
diff --git a/locale/programs/linereader.c b/locale/programs/linereader.c
-index a1f22b28ed..cbd3b40ceb 100644
+index 61373d2657..7ec5726377 100644
--- a/locale/programs/linereader.c
+++ b/locale/programs/linereader.c
-@@ -594,7 +594,7 @@ get_string (struct linereader *lr, const struct charmap_t *charmap,
+@@ -776,7 +776,7 @@ get_string (struct linereader *lr, const struct charmap_t *charmap,
{
int return_widestr = lr->return_widestr;
- char *buf;
+ struct lr_buffer lrb;
- wchar_t *buf2 = NULL;
+ uint32_t *buf2 = NULL;
- size_t bufact;
- size_t bufmax = 56;
+
+ lr_buffer_init (&lrb);
diff --git a/locale/programs/localedef.c b/locale/programs/localedef.c
-index f0da25e9e5..5d9e01cda2 100644
+index 907bb5fb25..3106529043 100644
--- a/locale/programs/localedef.c
+++ b/locale/programs/localedef.c
@@ -108,6 +108,7 @@ void (*argp_program_version_hook) (FILE *, struct argp_state *) = print_version;
@@ -407,7 +405,7 @@ index f0da25e9e5..5d9e01cda2 100644
force_output = 1;
break;
diff --git a/locale/programs/locfile.c b/locale/programs/locfile.c
-index 1427b518a9..dafa84a20b 100644
+index 6c6ebf2dd6..cc02ab82bf 100644
--- a/locale/programs/locfile.c
+++ b/locale/programs/locfile.c
@@ -543,6 +543,9 @@ compare_files (const char *filename1, const char *filename2, size_t size,
@@ -430,7 +428,7 @@ index 1427b518a9..dafa84a20b 100644
/* Record that FILE's next element is the 32-bit integer VALUE. */
diff --git a/locale/programs/locfile.h b/locale/programs/locfile.h
-index cbc20fe88d..ae88e6d0af 100644
+index 3afb0a8d29..46785374e8 100644
--- a/locale/programs/locfile.h
+++ b/locale/programs/locfile.h
@@ -70,6 +70,8 @@ extern void write_all_categories (struct localedef_t *definitions,
@@ -519,7 +517,7 @@ index cbc20fe88d..ae88e6d0af 100644
+
#endif /* locfile.h */
diff --git a/locale/setlocale.c b/locale/setlocale.c
-index 19ed85ae8e..f28ca11446 100644
+index 7bd27e5398..2f194bad7c 100644
--- a/locale/setlocale.c
+++ b/locale/setlocale.c
@@ -63,35 +63,6 @@ static char *const _nl_current_used[] =
diff --git a/meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch b/meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch
index eae1ee8907..daeff7b065 100644
--- a/meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch
+++ b/meta/recipes-core/glibc/glibc/0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch
@@ -1,4 +1,4 @@
-From 32c2e23ad29f63f57f544daf1a59259147cd1008 Mon Sep 17 00:00:00 2001
+From f4b1d6429298c0f8a2aa29ff559eb2093ea0188f Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 3 Aug 2018 09:42:06 -0700
Subject: [PATCH] localedef --add-to-archive uses a hard-coded locale path
@@ -18,7 +18,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 25 insertions(+), 10 deletions(-)
diff --git a/locale/programs/locarchive.c b/locale/programs/locarchive.c
-index 477499bd40..fe7b5ff60c 100644
+index 8d79a1b6d1..6dc7ecd4e7 100644
--- a/locale/programs/locarchive.c
+++ b/locale/programs/locarchive.c
@@ -339,12 +339,24 @@ enlarge_archive (struct locarhandle *ah, const struct locarhead *head)
diff --git a/meta/recipes-core/glibc/glibc/0015-powerpc-Do-not-ask-compiler-for-finding-arch.patch b/meta/recipes-core/glibc/glibc/0015-powerpc-Do-not-ask-compiler-for-finding-arch.patch
new file mode 100644
index 0000000000..8dd1f0a656
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0015-powerpc-Do-not-ask-compiler-for-finding-arch.patch
@@ -0,0 +1,48 @@
+From 2d064c0c1243ea0bf405909285d7cddca92cf097 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 7 Aug 2020 14:31:16 -0700
+Subject: [PATCH] powerpc: Do not ask compiler for finding arch
+
+This does not work well in cross compiling environments like OE
+and moreover it uses its own -mcpu/-march options via cflags
+
+Upstream-Status: Inappropriate [ OE-Specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ sysdeps/powerpc/preconfigure | 5 +----
+ sysdeps/powerpc/preconfigure.ac | 5 +----
+ 2 files changed, 2 insertions(+), 8 deletions(-)
+
+diff --git a/sysdeps/powerpc/preconfigure b/sysdeps/powerpc/preconfigure
+index 4de94089a3..428ad7b162 100644
+--- a/sysdeps/powerpc/preconfigure
++++ b/sysdeps/powerpc/preconfigure
+@@ -29,10 +29,7 @@ esac
+ # directive which shows up, and try using it.
+ case "${machine}:${submachine}" in
+ *powerpc*:)
+- archcpu=`echo "int foo () { return 0; }" \
+- | $CC $CFLAGS $CPPFLAGS -S -frecord-gcc-switches -xc -o - - \
+- | grep -E "mcpu=|.machine" -m 1 \
+- | sed -e "s/.*machine //" -e "s/.*mcpu=\(.*\)\"/\1/"`
++ archcpu=''
+ # Note if you add patterns here you must ensure that an appropriate
+ # directory exists in sysdeps/powerpc. Likewise, if we find a
+ # cpu, don't let the generic configure append extra compiler options.
+diff --git a/sysdeps/powerpc/preconfigure.ac b/sysdeps/powerpc/preconfigure.ac
+index 6c63bd8257..3e925f1d48 100644
+--- a/sysdeps/powerpc/preconfigure.ac
++++ b/sysdeps/powerpc/preconfigure.ac
+@@ -29,10 +29,7 @@ esac
+ # directive which shows up, and try using it.
+ case "${machine}:${submachine}" in
+ *powerpc*:)
+- archcpu=`echo "int foo () { return 0; }" \
+- | $CC $CFLAGS $CPPFLAGS -S -frecord-gcc-switches -xc -o - - \
+- | grep -E "mcpu=|[.]machine" -m 1 \
+- | sed -e "s/.*machine //" -e "s/.*mcpu=\(.*\)\"/\1/"`
++ archcpu=''
+ # Note if you add patterns here you must ensure that an appropriate
+ # directory exists in sysdeps/powerpc. Likewise, if we find a
+ # cpu, don't let the generic configure append extra compiler options.
diff --git a/meta/recipes-core/glibc/glibc/0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch b/meta/recipes-core/glibc/glibc/0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch
deleted file mode 100644
index 4e51036ce5..0000000000
--- a/meta/recipes-core/glibc/glibc/0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-From c59bc6eb421ad3310c43951a11d2561bbf34e95e Mon Sep 17 00:00:00 2001
-From: Martin Jansa <martin.jansa@gmail.com>
-Date: Mon, 17 Dec 2018 21:36:18 +0000
-Subject: [PATCH] locale: prevent maybe-uninitialized errors with -Os [BZ
- #19444]
-
-Fixes following error when building for aarch64 with -Os:
-| In file included from strcoll_l.c:43:
-| strcoll_l.c: In function '__strcoll_l':
-| ../locale/weight.h:31:26: error: 'seq2.back_us' may be used uninitialized in this function [-Werror=maybe-uninitialized]
-| int_fast32_t i = table[*(*cpp)++];
-| ^~~~~~~~~
-| strcoll_l.c:304:18: note: 'seq2.back_us' was declared here
-| coll_seq seq1, seq2;
-| ^~~~
-| In file included from strcoll_l.c:43:
-| ../locale/weight.h:31:26: error: 'seq1.back_us' may be used uninitialized in this function [-Werror=maybe-uninitialized]
-| int_fast32_t i = table[*(*cpp)++];
-| ^~~~~~~~~
-| strcoll_l.c:304:12: note: 'seq1.back_us' was declared here
-| coll_seq seq1, seq2;
-| ^~~~
-
- Partial fix for [BZ #19444]
- * locale/weight.h: Fix build with -Os.
-
-Upstream-Status: Submitted [https://patchwork.ozlabs.org/patch/1014766]
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- locale/weight.h | 7 +++++++
- 1 file changed, 7 insertions(+)
-
-diff --git a/locale/weight.h b/locale/weight.h
-index 076529c0ba..2ac83657f7 100644
---- a/locale/weight.h
-+++ b/locale/weight.h
-@@ -27,7 +27,14 @@ findidx (const int32_t *table,
- const unsigned char *extra,
- const unsigned char **cpp, size_t len)
- {
-+ /* With GCC 8 when compiling with -Os the compiler warns that
-+ seq1.back_us and seq2.back_us might be used uninitialized.
-+ This uninitialized use is impossible for the same reason
-+ as described in comments in locale/weightwc.h. */
-+ DIAG_PUSH_NEEDS_COMMENT;
-+ DIAG_IGNORE_Os_NEEDS_COMMENT (8, "-Wmaybe-uninitialized");
- int_fast32_t i = table[*(*cpp)++];
-+ DIAG_POP_NEEDS_COMMENT;
- const unsigned char *cp;
- const unsigned char *usrc;
-
diff --git a/meta/recipes-core/glibc/glibc/0016-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch b/meta/recipes-core/glibc/glibc/0016-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch
new file mode 100644
index 0000000000..066c3b1ea2
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0016-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch
@@ -0,0 +1,49 @@
+From b1c374f7ede81a98f2d02def2c7ca17f1001f7cb Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 15 May 2020 17:05:45 -0700
+Subject: [PATCH] wordsize.h: Unify the header between arm and aarch64
+
+This helps OE multilibs to not sythesize this header which causes all
+kind of recursions and other issues since wordsize is fundamental header
+and ends up including itself in many case e.g. clang tidy, bpf etc.
+
+Upstream-Status: Inappropriate [ OE-Specific ]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ sysdeps/aarch64/bits/wordsize.h | 8 ++++++--
+ sysdeps/arm/bits/wordsize.h | 1 +
+ 2 files changed, 7 insertions(+), 2 deletions(-)
+ create mode 120000 sysdeps/arm/bits/wordsize.h
+
+diff --git a/sysdeps/aarch64/bits/wordsize.h b/sysdeps/aarch64/bits/wordsize.h
+index 118e59172d..b4b0692eb5 100644
+--- a/sysdeps/aarch64/bits/wordsize.h
++++ b/sysdeps/aarch64/bits/wordsize.h
+@@ -17,12 +17,16 @@
+ License along with the GNU C Library; if not, see
+ <https://www.gnu.org/licenses/>. */
+
+-#ifdef __LP64__
++#if defined (__aarch64__) && defined (__LP64__)
+ # define __WORDSIZE 64
+-#else
++#elif defined (__aarch64__)
+ # define __WORDSIZE 32
+ # define __WORDSIZE32_SIZE_ULONG 1
+ # define __WORDSIZE32_PTRDIFF_LONG 1
++#else
++# define __WORDSIZE 32
++# define __WORDSIZE32_SIZE_ULONG 0
++# define __WORDSIZE32_PTRDIFF_LONG 0
+ #endif
+
+ #define __WORDSIZE_TIME64_COMPAT32 0
+diff --git a/sysdeps/arm/bits/wordsize.h b/sysdeps/arm/bits/wordsize.h
+new file mode 120000
+index 0000000000..4c4a788ec2
+--- /dev/null
++++ b/sysdeps/arm/bits/wordsize.h
+@@ -0,0 +1 @@
++../../aarch64/bits/wordsize.h
+\ No newline at end of file
diff --git a/meta/recipes-core/glibc/glibc/0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch b/meta/recipes-core/glibc/glibc/0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch
new file mode 100644
index 0000000000..d8c0b1b4ad
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch
@@ -0,0 +1,79 @@
+From 0aac3b8ee5b13e289b6969da51de384443286a5b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 31 Dec 2015 14:33:02 -0800
+Subject: [PATCH] Replace echo with printf builtin in nscd init script
+
+The nscd init script calls for #! /bin/bash interpreter
+since it uses bash specific extentions namely (translated strings)
+and echo -n command, replace echo with printf and
+switch the shell interpreter to #!/bin/sh.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ nscd/nscd.init | 20 ++++++++++----------
+ 1 file changed, 10 insertions(+), 10 deletions(-)
+
+diff --git a/nscd/nscd.init b/nscd/nscd.init
+index a882da7d8b..857b541381 100644
+--- a/nscd/nscd.init
++++ b/nscd/nscd.init
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+ #
+ # nscd: Starts the Name Switch Cache Daemon
+ #
+@@ -49,16 +49,16 @@ prog=nscd
+ start () {
+ [ -d /var/run/nscd ] || mkdir /var/run/nscd
+ [ -d /var/db/nscd ] || mkdir /var/db/nscd
+- echo -n $"Starting $prog: "
++ printf "Starting $prog: "
+ daemon /usr/sbin/nscd
+ RETVAL=$?
+- echo
++ printf "\n"
+ [ $RETVAL -eq 0 ] && touch /var/lock/subsys/nscd
+ return $RETVAL
+ }
+
+ stop () {
+- echo -n $"Stopping $prog: "
++ printf "Stopping $prog: "
+ /usr/sbin/nscd -K
+ RETVAL=$?
+ if [ $RETVAL -eq 0 ]; then
+@@ -67,11 +67,11 @@ stop () {
+ # a non-privileged user
+ rm -f /var/run/nscd/nscd.pid
+ rm -f /var/run/nscd/socket
+- success $"$prog shutdown"
++ success "$prog shutdown"
+ else
+- failure $"$prog shutdown"
++ failure "$prog shutdown"
+ fi
+- echo
++ printf "\n"
+ return $RETVAL
+ }
+
+@@ -103,13 +103,13 @@ case "$1" in
+ RETVAL=$?
+ ;;
+ force-reload | reload)
+- echo -n $"Reloading $prog: "
++ printf "Reloading $prog: "
+ killproc /usr/sbin/nscd -HUP
+ RETVAL=$?
+- echo
++ printf "\n"
+ ;;
+ *)
+- echo $"Usage: $0 {start|stop|status|restart|reload|condrestart}"
++ printf "Usage: $0 {start|stop|status|restart|reload|condrestart}\n"
+ RETVAL=1
+ ;;
+ esac
diff --git a/meta/recipes-core/glibc/glibc/0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch b/meta/recipes-core/glibc/glibc/0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch
deleted file mode 100644
index 77a2bab87d..0000000000
--- a/meta/recipes-core/glibc/glibc/0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From 9f4fcec5662bfa6f8aa6a36dda6f4c05f6e30e51 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 18 Mar 2015 00:11:22 +0000
-Subject: [PATCH] readlib: Add OECORE_KNOWN_INTERPRETER_NAMES to known names
-
-This bolts in a hook for OE to pass its own version of interpreter
-names into glibc especially for multilib case, where it differs from any
-other distros
-
-Upstream-Status: Inappropriate [OE specific]
-
-Signed-off-by: Lianhao Lu <lianhao.lu@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- elf/readlib.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/elf/readlib.c b/elf/readlib.c
-index 64b20d7804..50318158fb 100644
---- a/elf/readlib.c
-+++ b/elf/readlib.c
-@@ -49,6 +49,7 @@ static struct known_names interpreters[] =
- #ifdef SYSDEP_KNOWN_INTERPRETER_NAMES
- SYSDEP_KNOWN_INTERPRETER_NAMES
- #endif
-+ OECORE_KNOWN_INTERPRETER_NAMES
- };
-
- static struct known_names known_libs[] =
diff --git a/meta/recipes-core/glibc/glibc/0018-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch b/meta/recipes-core/glibc/glibc/0018-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch
new file mode 100644
index 0000000000..c81f82f433
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0018-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch
@@ -0,0 +1,41 @@
+From 7a25d4796411f22f824742092a4c2a08df99752d Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 18 Mar 2015 00:27:10 +0000
+Subject: [PATCH] sysdeps/gnu/configure.ac: Set libc_cv_rootsbindir only if its
+ empty
+
+This ensures that it can be set in build environment
+
+Upstream-Status: Pending
+Signed-off-by: Matthieu Crapet <Matthieu.Crapet@ingenico.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ sysdeps/gnu/configure | 2 +-
+ sysdeps/gnu/configure.ac | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/sysdeps/gnu/configure b/sysdeps/gnu/configure
+index 18c9d7945a..ef22cc845d 100644
+--- a/sysdeps/gnu/configure
++++ b/sysdeps/gnu/configure
+@@ -32,7 +32,7 @@ case "$prefix" in
+ else
+ libc_cv_localstatedir=$localstatedir
+ fi
+- libc_cv_rootsbindir=/sbin
++ libc_cv_rootsbindir=${libc_cv_rootsbindir:=/sbin}
+ ;;
+ esac
+
+diff --git a/sysdeps/gnu/configure.ac b/sysdeps/gnu/configure.ac
+index 634fe4de2a..492112e0fd 100644
+--- a/sysdeps/gnu/configure.ac
++++ b/sysdeps/gnu/configure.ac
+@@ -21,6 +21,6 @@ case "$prefix" in
+ else
+ libc_cv_localstatedir=$localstatedir
+ fi
+- libc_cv_rootsbindir=/sbin
++ libc_cv_rootsbindir=${libc_cv_rootsbindir:=/sbin}
+ ;;
+ esac
diff --git a/meta/recipes-core/glibc/glibc/0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch b/meta/recipes-core/glibc/glibc/0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch
deleted file mode 100644
index 3b2d638b5f..0000000000
--- a/meta/recipes-core/glibc/glibc/0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch
+++ /dev/null
@@ -1,70 +0,0 @@
-From 4d6bce6b106d9d9a629aadba74d74cd8a500ccbf Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 15 May 2020 17:05:45 -0700
-Subject: [PATCH 18/24] wordsize.h: Unify the header between arm and aarch64
-
-This helps OE multilibs to not sythesize this header which causes all
-kind of recursions and other issues since wordsize is fundamental header
-and ends up including itself in many case e.g. clang tidy, bpf etc.
-
-Upstream-Status: Inappropriate [ OE-Specific ]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- sysdeps/aarch64/bits/wordsize.h | 8 ++++++--
- sysdeps/{aarch64 => arm}/bits/wordsize.h | 10 +++++++---
- 2 files changed, 13 insertions(+), 5 deletions(-)
- copy sysdeps/{aarch64 => arm}/bits/wordsize.h (80%)
-
-diff --git a/sysdeps/aarch64/bits/wordsize.h b/sysdeps/aarch64/bits/wordsize.h
-index 4635431f0e..5ef0ed21f3 100644
---- a/sysdeps/aarch64/bits/wordsize.h
-+++ b/sysdeps/aarch64/bits/wordsize.h
-@@ -17,12 +17,16 @@
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
--#ifdef __LP64__
-+#if defined (__aarch64__) && defined (__LP64__)
- # define __WORDSIZE 64
--#else
-+#elif defined (__aarch64__)
- # define __WORDSIZE 32
- # define __WORDSIZE32_SIZE_ULONG 1
- # define __WORDSIZE32_PTRDIFF_LONG 1
-+#else
-+# define __WORDSIZE 32
-+# define __WORDSIZE32_SIZE_ULONG 0
-+# define __WORDSIZE32_PTRDIFF_LONG 0
- #endif
-
- #define __WORDSIZE_TIME64_COMPAT32 0
-diff --git a/sysdeps/aarch64/bits/wordsize.h b/sysdeps/arm/bits/wordsize.h
-similarity index 80%
-copy from sysdeps/aarch64/bits/wordsize.h
-copy to sysdeps/arm/bits/wordsize.h
-index 4635431f0e..34fcdef1f1 100644
---- a/sysdeps/aarch64/bits/wordsize.h
-+++ b/sysdeps/arm/bits/wordsize.h
-@@ -17,12 +17,16 @@
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
--#ifdef __LP64__
-+#if defined (__aarch64__) && defined (__LP64__)
- # define __WORDSIZE 64
--#else
-+#elif defined (__aarch64__)
- # define __WORDSIZE 32
- # define __WORDSIZE32_SIZE_ULONG 1
- # define __WORDSIZE32_PTRDIFF_LONG 1
-+#else
-+# define __WORDSIZE 32
-+# define __WORDSIZE32_SIZE_ULONG 0
-+# define __WORDSIZE32_PTRDIFF_LONG 0
- #endif
-
- #define __WORDSIZE_TIME64_COMPAT32 0
---
-2.34.1
-
diff --git a/meta/recipes-core/glibc/glibc/0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch b/meta/recipes-core/glibc/glibc/0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch
deleted file mode 100644
index 4313c6860f..0000000000
--- a/meta/recipes-core/glibc/glibc/0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From eb44466ec976d800bb697b10775efa28f22ec216 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 7 Aug 2020 14:31:16 -0700
-Subject: [PATCH] powerpc: Do not ask compiler for finding arch
-
-This does not work well in cross compiling environments like OE
-and moreover it uses its own -mcpu/-march options via cflags
-
-Upstream-Status: Inappropriate [ OE-Specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- sysdeps/powerpc/preconfigure | 5 +----
- sysdeps/powerpc/preconfigure.ac | 5 +----
- 2 files changed, 2 insertions(+), 8 deletions(-)
-
-diff --git a/sysdeps/powerpc/preconfigure b/sysdeps/powerpc/preconfigure
-index dfe8e20399..bbff040f0f 100644
---- a/sysdeps/powerpc/preconfigure
-+++ b/sysdeps/powerpc/preconfigure
-@@ -29,10 +29,7 @@ esac
- # directive which shows up, and try using it.
- case "${machine}:${submachine}" in
- *powerpc*:)
-- archcpu=`echo "int foo () { return 0; }" \
-- | $CC $CFLAGS $CPPFLAGS -S -frecord-gcc-switches -xc -o - - \
-- | grep -E "mcpu=|.machine" -m 1 \
-- | sed -e "s/.*machine //" -e "s/.*mcpu=\(.*\)\"/\1/"`
-+ archcpu=''
- # Note if you add patterns here you must ensure that an appropriate
- # directory exists in sysdeps/powerpc. Likewise, if we find a
- # cpu, don't let the generic configure append extra compiler options.
-diff --git a/sysdeps/powerpc/preconfigure.ac b/sysdeps/powerpc/preconfigure.ac
-index 6c63bd8257..3e925f1d48 100644
---- a/sysdeps/powerpc/preconfigure.ac
-+++ b/sysdeps/powerpc/preconfigure.ac
-@@ -29,10 +29,7 @@ esac
- # directive which shows up, and try using it.
- case "${machine}:${submachine}" in
- *powerpc*:)
-- archcpu=`echo "int foo () { return 0; }" \
-- | $CC $CFLAGS $CPPFLAGS -S -frecord-gcc-switches -xc -o - - \
-- | grep -E "mcpu=|[.]machine" -m 1 \
-- | sed -e "s/.*machine //" -e "s/.*mcpu=\(.*\)\"/\1/"`
-+ archcpu=''
- # Note if you add patterns here you must ensure that an appropriate
- # directory exists in sysdeps/powerpc. Likewise, if we find a
- # cpu, don't let the generic configure append extra compiler options.
diff --git a/meta/recipes-core/glibc/glibc/0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch b/meta/recipes-core/glibc/glibc/0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch
new file mode 100644
index 0000000000..d724c3e968
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch
@@ -0,0 +1,47 @@
+From 6aa1b835d95482287851e02abd3a406cbd0ef8c7 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 9 Dec 2021 15:14:42 -0800
+Subject: [PATCH] timezone: Make shell interpreter overridable in tzselect.ksh
+
+define new macro called KSHELL which can be used to define default shell
+use Bash by default
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ Makeconfig | 9 +++++++++
+ timezone/Makefile | 1 +
+ 2 files changed, 10 insertions(+)
+
+diff --git a/Makeconfig b/Makeconfig
+index 85e00cef94..643724108a 100644
+--- a/Makeconfig
++++ b/Makeconfig
+@@ -309,6 +309,15 @@ ifndef sysincludedir
+ sysincludedir = /usr/include
+ endif
+
++# The full path name of a Posix-compliant shell, preferably one that supports
++# the Korn shell's 'select' statement as an extension.
++# These days, Bash is the most popular.
++# It should be OK to set this to /bin/sh, on platforms where /bin/sh
++# lacks 'select' or doesn't completely conform to Posix, but /bin/bash
++# is typically nicer if it works.
++ifndef KSHELL
++KSHELL = /bin/bash
++endif
+
+ # Commands to install files.
+ ifndef INSTALL_DATA
+diff --git a/timezone/Makefile b/timezone/Makefile
+index d7acb387ba..ec79326f66 100644
+--- a/timezone/Makefile
++++ b/timezone/Makefile
+@@ -136,6 +136,7 @@ $(objpfx)tzselect: tzselect.ksh $(common-objpfx)config.make
+ -e '/TZVERSION=/s|see_Makefile|"$(version)"|' \
+ -e '/PKGVERSION=/s|=.*|="$(PKGVERSION)"|' \
+ -e '/REPORT_BUGS_TO=/s|=.*|="$(REPORT_BUGS_TO)"|' \
++ -e 's|#!/bin/bash|#!$(KSHELL)|g' \
+ < $< > $@.new
+ chmod 555 $@.new
+ mv -f $@.new $@
diff --git a/meta/recipes-core/glibc/glibc/0020-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch b/meta/recipes-core/glibc/glibc/0020-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch
new file mode 100644
index 0000000000..08c60a271b
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0020-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch
@@ -0,0 +1,27 @@
+From ef47e6199986c4951e681ed74f064042db1ae2e1 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 15 Dec 2021 21:47:53 -0800
+Subject: [PATCH] tzselect.ksh: Use /bin/sh default shell interpreter
+
+checkbashism reports no issues with tzselect.ksh, therefore using
+/bin/sh instead of /bin/bash should be safe and portable across systems
+which don't ship bash ( embedded systems )
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Cc: Adhemerval Zanella <adhemerval.zanella@linaro.org>
+Cc: Paul Eggert <eggert@cs.ucla.edu>
+---
+ timezone/tzselect.ksh | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/timezone/tzselect.ksh b/timezone/tzselect.ksh
+index 18fce27e24..cc08efb0fb 100755
+--- a/timezone/tzselect.ksh
++++ b/timezone/tzselect.ksh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+ # Ask the user about the time zone, and output the resulting TZ value to stdout.
+ # Interact with the user via stderr and stdin.
+
diff --git a/meta/recipes-core/glibc/glibc/0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch b/meta/recipes-core/glibc/glibc/0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch
deleted file mode 100644
index 42c498bbc8..0000000000
--- a/meta/recipes-core/glibc/glibc/0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 77fbd98f551d5b2cd338aa7f524e5ed980edb65e Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 31 Dec 2015 14:33:02 -0800
-Subject: [PATCH] Replace echo with printf builtin in nscd init script
-
-The nscd init script calls for #! /bin/bash interpreter
-since it uses bash specific extentions namely (translated strings)
-and echo -n command, replace echo with printf and
-switch the shell interpreter to #!/bin/sh.
-
-Upstream-Status: Submitted [https://patchwork.sourceware.org/project/glibc/patch/20211209203557.1318333-1-raj.khem@gmail.com/]
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- nscd/nscd.init | 20 ++++++++++----------
- 1 file changed, 10 insertions(+), 10 deletions(-)
-
-diff --git a/nscd/nscd.init b/nscd/nscd.init
-index a882da7d8b..857b541381 100644
---- a/nscd/nscd.init
-+++ b/nscd/nscd.init
-@@ -1,4 +1,4 @@
--#!/bin/bash
-+#!/bin/sh
- #
- # nscd: Starts the Name Switch Cache Daemon
- #
-@@ -49,16 +49,16 @@ prog=nscd
- start () {
- [ -d /var/run/nscd ] || mkdir /var/run/nscd
- [ -d /var/db/nscd ] || mkdir /var/db/nscd
-- echo -n $"Starting $prog: "
-+ printf "Starting $prog: "
- daemon /usr/sbin/nscd
- RETVAL=$?
-- echo
-+ printf "\n"
- [ $RETVAL -eq 0 ] && touch /var/lock/subsys/nscd
- return $RETVAL
- }
-
- stop () {
-- echo -n $"Stopping $prog: "
-+ printf "Stopping $prog: "
- /usr/sbin/nscd -K
- RETVAL=$?
- if [ $RETVAL -eq 0 ]; then
-@@ -67,11 +67,11 @@ stop () {
- # a non-privileged user
- rm -f /var/run/nscd/nscd.pid
- rm -f /var/run/nscd/socket
-- success $"$prog shutdown"
-+ success "$prog shutdown"
- else
-- failure $"$prog shutdown"
-+ failure "$prog shutdown"
- fi
-- echo
-+ printf "\n"
- return $RETVAL
- }
-
-@@ -103,13 +103,13 @@ case "$1" in
- RETVAL=$?
- ;;
- force-reload | reload)
-- echo -n $"Reloading $prog: "
-+ printf "Reloading $prog: "
- killproc /usr/sbin/nscd -HUP
- RETVAL=$?
-- echo
-+ printf "\n"
- ;;
- *)
-- echo $"Usage: $0 {start|stop|status|restart|reload|condrestart}"
-+ printf "Usage: $0 {start|stop|status|restart|reload|condrestart}\n"
- RETVAL=1
- ;;
- esac
diff --git a/meta/recipes-core/glibc/glibc/0021-fix-create-thread-failed-in-unprivileged-process-BZ-.patch b/meta/recipes-core/glibc/glibc/0021-fix-create-thread-failed-in-unprivileged-process-BZ-.patch
new file mode 100644
index 0000000000..d57dce125c
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0021-fix-create-thread-failed-in-unprivileged-process-BZ-.patch
@@ -0,0 +1,86 @@
+From 7254706544be5100843fc0a332e836fccffc9ef1 Mon Sep 17 00:00:00 2001
+From: Hongxu Jia <hongxu.jia@windriver.com>
+Date: Sun, 29 Aug 2021 20:49:16 +0800
+Subject: [PATCH] fix create thread failed in unprivileged process [BZ #28287]
+
+Since commit [d8ea0d0168 Add an internal wrapper for clone, clone2 and clone3]
+applied, start a unprivileged container (docker run without --privileged),
+it creates a thread failed in container.
+
+In commit d8ea0d0168, it calls __clone3 if HAVE_CLONE3_WAPPER is defined. If
+__clone3 returns -1 with ENOSYS, fall back to clone or clone2.
+
+As known from [1], cloneXXX fails with EPERM if CLONE_NEWCGROUP,
+CLONE_NEWIPC, CLONE_NEWNET, CLONE_NEWNS, CLONE_NEWPID, or CLONE_NEWUTS
+was specified by an unprivileged process (process without CAP_SYS_ADMIN)
+
+[1] https://man7.org/linux/man-pages/man2/clone3.2.html
+
+So if __clone3 returns -1 with EPERM, fall back to clone or clone2 could
+fix the issue. Here are the test steps:
+
+1) Prepare test code
+cat > conftest.c <<ENDOF
+ #include <pthread.h>
+ #include <stdio.h>
+
+int check_me = 0;
+void* func(void* data) {check_me = 42; printf("start thread: check_me %d\n", check_me); return &check_me;}
+int main()
+{
+ pthread_t t;
+ void *ret;
+ pthread_create (&t, 0, func, 0);
+ pthread_join (t, &ret);
+ printf("check_me %d, p %p\n", check_me, &ret);
+ return (check_me != 42 || ret != &check_me);
+}
+
+ENDOF
+
+2) Compile
+gcc -o conftest -pthread conftest.c
+
+3) Start a container with glibc 2.34 installed
+[skip details]
+docker run -it <container-image-name> bash
+
+4) Run conftest without this patch
+$ ./conftest
+check_me 0, p 0x7ffd91ccd400
+
+5) Run conftest with this patch
+$ ./conftest
+start thread: check_me 42
+check_me 42, p 0x7ffe253c6f20
+
+Upstream-Status: Inappropriate [Rejected by upstream]
+
+Upstream glibc rejected it because the latest docker has resolved the issue [1],
+and upstream glibc does not backward compatibility with old docker[2]
+
+In order to build Yocto with uninative in old docker, we need this local
+patch
+
+[1] https://github.com/moby/moby/commit/9f6b562dd12ef7b1f9e2f8e6f2ab6477790a6594
+[2] https://sourceware.org/pipermail/libc-alpha/2021-August/130590.html
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ sysdeps/unix/sysv/linux/clone-internal.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/sysdeps/unix/sysv/linux/clone-internal.c b/sysdeps/unix/sysv/linux/clone-internal.c
+index 40ef30303f..f97b1f6900 100644
+--- a/sysdeps/unix/sysv/linux/clone-internal.c
++++ b/sysdeps/unix/sysv/linux/clone-internal.c
+@@ -98,7 +98,7 @@ __clone_internal (struct clone_args *cl_args,
+ #ifdef HAVE_CLONE3_WRAPPER
+ int saved_errno = errno;
+ int ret = __clone3_internal (cl_args, func, arg);
+- if (ret != -1 || errno != ENOSYS)
++ if (ret != -1 || (errno != ENOSYS && errno != EPERM))
+ return ret;
+
+ /* NB: Restore errno since errno may be checked against non-zero
diff --git a/meta/recipes-core/glibc/glibc/0022-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch b/meta/recipes-core/glibc/glibc/0022-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch
new file mode 100644
index 0000000000..b527ddffc8
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0022-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch
@@ -0,0 +1,32 @@
+From d39779afc72d34f87f052097592008cc38e20615 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sun, 24 Jul 2022 07:07:29 -0700
+Subject: [PATCH] Avoid hardcoded build time paths in the output binaries
+
+replace the compile definitions with the output locations.
+
+Upstream-Status: Inappropriate [would need reworking somehow to be acceptable upstream]
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ support/Makefile | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/support/Makefile b/support/Makefile
+index 362a51f882..56d2b37058 100644
+--- a/support/Makefile
++++ b/support/Makefile
+@@ -228,9 +228,9 @@ libsupport-inhibit-o += .o
+ endif
+
+ CFLAGS-support_paths.c = \
+- -DSRCDIR_PATH=\"`cd .. ; pwd`\" \
+- -DOBJDIR_PATH=\"`cd $(objpfx)/..; pwd`\" \
+- -DOBJDIR_ELF_LDSO_PATH=\"`cd $(objpfx)/..; pwd`/elf/$(rtld-installed-name)\" \
++ -DSRCDIR_PATH=\"$(oe_srcdir)\" \
++ -DOBJDIR_PATH=\"$(libdir)/glibc-tests/ptest/tests/glibc-ptest\" \
++ -DOBJDIR_ELF_LDSO_PATH=\"$(slibdir)/$(rtld-installed-name)\" \
+ -DINSTDIR_PATH=\"$(prefix)\" \
+ -DLIBDIR_PATH=\"$(libdir)\" \
+ -DBINDIR_PATH=\"$(bindir)\" \
diff --git a/meta/recipes-core/glibc/glibc/0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch b/meta/recipes-core/glibc/glibc/0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch
deleted file mode 100644
index 5ac9d6d3a1..0000000000
--- a/meta/recipes-core/glibc/glibc/0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From 5d1384d86fc44404ca32c6fda2d46ec357337c91 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 18 Mar 2015 00:27:10 +0000
-Subject: [PATCH] sysdeps/gnu/configure.ac: Set libc_cv_rootsbindir only if its empty
-
-This ensures that it can be set in build environment
-
-Upstream-Status: Submitted [https://patchwork.sourceware.org/project/glibc/patch/20211209203557.1318333-2-raj.khem@gmail.com/]
-Signed-off-by: Matthieu Crapet <Matthieu.Crapet@ingenico.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- sysdeps/gnu/configure | 2 +-
- sysdeps/gnu/configure.ac | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/sysdeps/gnu/configure b/sysdeps/gnu/configure
-index c15d1087e8..d30d6e37ae 100644
---- a/sysdeps/gnu/configure
-+++ b/sysdeps/gnu/configure
-@@ -32,6 +32,6 @@ case "$prefix" in
- else
- libc_cv_localstatedir=$localstatedir
- fi
-- libc_cv_rootsbindir=/sbin
-+ libc_cv_rootsbindir=${libc_cv_rootsbindir:=/sbin}
- ;;
- esac
-diff --git a/sysdeps/gnu/configure.ac b/sysdeps/gnu/configure.ac
-index 634fe4de2a..492112e0fd 100644
---- a/sysdeps/gnu/configure.ac
-+++ b/sysdeps/gnu/configure.ac
-@@ -21,6 +21,6 @@ case "$prefix" in
- else
- libc_cv_localstatedir=$localstatedir
- fi
-- libc_cv_rootsbindir=/sbin
-+ libc_cv_rootsbindir=${libc_cv_rootsbindir:=/sbin}
- ;;
- esac
diff --git a/meta/recipes-core/glibc/glibc/0023-aarch64-configure-Pass-mcpu-along-with-march-to-dete.patch b/meta/recipes-core/glibc/glibc/0023-aarch64-configure-Pass-mcpu-along-with-march-to-dete.patch
new file mode 100644
index 0000000000..f6523c5498
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0023-aarch64-configure-Pass-mcpu-along-with-march-to-dete.patch
@@ -0,0 +1,62 @@
+From 73c26018ed0ecd9c807bb363cc2c2ab4aca66a82 Mon Sep 17 00:00:00 2001
+From: Szabolcs Nagy <szabolcs.nagy@arm.com>
+Date: Wed, 13 Mar 2024 14:34:14 +0000
+Subject: [PATCH] aarch64: fix check for SVE support in assembler
+
+Due to GCC bug 110901 -mcpu can override -march setting when compiling
+asm code and thus a compiler targetting a specific cpu can fail the
+configure check even when binutils gas supports SVE.
+
+The workaround is that explicit .arch directive overrides both -mcpu
+and -march, and since that's what the actual SVE memcpy uses the
+configure check should use that too even if the GCC issue is fixed
+independently.
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=glibc.git;a=commit;h=73c26018ed0ecd9c807bb363cc2c2ab4aca66a82]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Reviewed-by: Florian Weimer <fweimer@redhat.com>
+---
+ sysdeps/aarch64/configure | 5 +++--
+ sysdeps/aarch64/configure.ac | 5 +++--
+ 2 files changed, 6 insertions(+), 4 deletions(-)
+ mode change 100644 => 100755 sysdeps/aarch64/configure
+
+diff --git a/sysdeps/aarch64/configure b/sysdeps/aarch64/configure
+old mode 100644
+new mode 100755
+index ca57edce47..9606137e8d
+--- a/sysdeps/aarch64/configure
++++ b/sysdeps/aarch64/configure
+@@ -325,9 +325,10 @@ then :
+ printf %s "(cached) " >&6
+ else $as_nop
+ cat > conftest.s <<\EOF
+- ptrue p0.b
++ .arch armv8.2-a+sve
++ ptrue p0.b
+ EOF
+-if { ac_try='${CC-cc} -c -march=armv8.2-a+sve conftest.s 1>&5'
++if { ac_try='${CC-cc} -c conftest.s 1>&5'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+diff --git a/sysdeps/aarch64/configure.ac b/sysdeps/aarch64/configure.ac
+index 27874eceb4..56d12d661d 100644
+--- a/sysdeps/aarch64/configure.ac
++++ b/sysdeps/aarch64/configure.ac
+@@ -90,9 +90,10 @@ LIBC_CONFIG_VAR([aarch64-variant-pcs], [$libc_cv_aarch64_variant_pcs])
+ # Check if asm support armv8.2-a+sve
+ AC_CACHE_CHECK([for SVE support in assembler], [libc_cv_aarch64_sve_asm], [dnl
+ cat > conftest.s <<\EOF
+- ptrue p0.b
++ .arch armv8.2-a+sve
++ ptrue p0.b
+ EOF
+-if AC_TRY_COMMAND(${CC-cc} -c -march=armv8.2-a+sve conftest.s 1>&AS_MESSAGE_LOG_FD); then
++if AC_TRY_COMMAND(${CC-cc} -c conftest.s 1>&AS_MESSAGE_LOG_FD); then
+ libc_cv_aarch64_sve_asm=yes
+ else
+ libc_cv_aarch64_sve_asm=no
+--
+2.44.0
+
diff --git a/meta/recipes-core/glibc/glibc/0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch b/meta/recipes-core/glibc/glibc/0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch
deleted file mode 100644
index e5e6ceba60..0000000000
--- a/meta/recipes-core/glibc/glibc/0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From c0f251c58655e3377fe1c67a026c21ef68d2abcf Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 9 Dec 2021 15:14:42 -0800
-Subject: [PATCH] timezone: Make shell interpreter overridable in tzselect.ksh
-
-define new macro called KSHELL which can be used to define default shell
-use Bash by default
-
-Upstream-Status: Submitted [https://patchwork.sourceware.org/project/glibc/patch/20211209234015.1554552-1-raj.khem@gmail.com/]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- Makeconfig | 9 +++++++++
- timezone/Makefile | 1 +
- 2 files changed, 10 insertions(+)
-
-diff --git a/Makeconfig b/Makeconfig
-index 775bf12b65..7b9a8f0a94 100644
---- a/Makeconfig
-+++ b/Makeconfig
-@@ -293,6 +293,15 @@ ifndef sysincludedir
- sysincludedir = /usr/include
- endif
-
-+# The full path name of a Posix-compliant shell, preferably one that supports
-+# the Korn shell's 'select' statement as an extension.
-+# These days, Bash is the most popular.
-+# It should be OK to set this to /bin/sh, on platforms where /bin/sh
-+# lacks 'select' or doesn't completely conform to Posix, but /bin/bash
-+# is typically nicer if it works.
-+ifndef KSHELL
-+KSHELL = /bin/bash
-+endif
-
- # Commands to install files.
- ifndef INSTALL_DATA
-diff --git a/timezone/Makefile b/timezone/Makefile
-index c624a189b3..dc8f5277de 100644
---- a/timezone/Makefile
-+++ b/timezone/Makefile
-@@ -127,6 +127,7 @@ $(objpfx)tzselect: tzselect.ksh $(common-objpfx)config.make
- -e '/TZVERSION=/s|see_Makefile|"$(version)"|' \
- -e '/PKGVERSION=/s|=.*|="$(PKGVERSION)"|' \
- -e '/REPORT_BUGS_TO=/s|=.*|="$(REPORT_BUGS_TO)"|' \
-+ -e 's|#!/bin/bash|#!$(KSHELL)|g' \
- < $< > $@.new
- chmod 555 $@.new
- mv -f $@.new $@
diff --git a/meta/recipes-core/glibc/glibc/0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch b/meta/recipes-core/glibc/glibc/0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch
deleted file mode 100644
index b431ea168d..0000000000
--- a/meta/recipes-core/glibc/glibc/0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch
+++ /dev/null
@@ -1,88 +0,0 @@
-From 6609858239b8f94e12c19eac0cec425511d1211f Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Sun, 29 Aug 2021 20:49:16 +0800
-Subject: [PATCH] fix create thread failed in unprivileged process [BZ #28287]
-
-Since commit [d8ea0d0168 Add an internal wrapper for clone, clone2 and clone3]
-applied, start a unprivileged container (docker run without --privileged),
-it creates a thread failed in container.
-
-In commit d8ea0d0168, it calls __clone3 if HAVE_CLONE3_WAPPER is defined. If
-__clone3 returns -1 with ENOSYS, fall back to clone or clone2.
-
-As known from [1], cloneXXX fails with EPERM if CLONE_NEWCGROUP,
-CLONE_NEWIPC, CLONE_NEWNET, CLONE_NEWNS, CLONE_NEWPID, or CLONE_NEWUTS
-was specified by an unprivileged process (process without CAP_SYS_ADMIN)
-
-[1] https://man7.org/linux/man-pages/man2/clone3.2.html
-
-So if __clone3 returns -1 with EPERM, fall back to clone or clone2 could
-fix the issue. Here are the test steps:
-
-1) Prepare test code
-cat > conftest.c <<ENDOF
- #include <pthread.h>
- #include <stdio.h>
-
-int check_me = 0;
-void* func(void* data) {check_me = 42; printf("start thread: check_me %d\n", check_me); return &check_me;}
-int main()
-{
- pthread_t t;
- void *ret;
- pthread_create (&t, 0, func, 0);
- pthread_join (t, &ret);
- printf("check_me %d, p %p\n", check_me, &ret);
- return (check_me != 42 || ret != &check_me);
-}
-
-ENDOF
-
-2) Compile
-gcc -o conftest -pthread conftest.c
-
-3) Start a container with glibc 2.34 installed
-[skip details]
-docker run -it <container-image-name> bash
-
-4) Run conftest without this patch
-$ ./conftest
-check_me 0, p 0x7ffd91ccd400
-
-5) Run conftest with this patch
-$ ./conftest
-start thread: check_me 42
-check_me 42, p 0x7ffe253c6f20
-
-Upstream-Status: Inappropriate [Rejected by upstream]
-
-Upstream glibc rejected it because the latest docker has resolved the issue [1],
-and upstream glibc does not backward compatibility with old docker[2]
-
-In order to build Yocto with uninative in old docker, we need this local
-patch
-
-[1] https://github.com/moby/moby/commit/9f6b562dd12ef7b1f9e2f8e6f2ab6477790a6594
-[2] https://sourceware.org/pipermail/libc-alpha/2021-August/130590.html
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- sysdeps/unix/sysv/linux/clone-internal.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/sysdeps/unix/sysv/linux/clone-internal.c b/sysdeps/unix/sysv/linux/clone-internal.c
-index a71effcbd3..a0569113aa 100644
---- a/sysdeps/unix/sysv/linux/clone-internal.c
-+++ b/sysdeps/unix/sysv/linux/clone-internal.c
-@@ -52,7 +52,7 @@ __clone_internal (struct clone_args *cl_args,
- /* Try clone3 first. */
- int saved_errno = errno;
- ret = __clone3 (cl_args, sizeof (*cl_args), func, arg);
-- if (ret != -1 || errno != ENOSYS)
-+ if (ret != -1 || (errno != ENOSYS && errno != EPERM))
- return ret;
-
- /* NB: Restore errno since errno may be checked against non-zero
---
-2.27.0
-
diff --git a/meta/recipes-core/glibc/glibc/0024-qemu-stale-process.patch b/meta/recipes-core/glibc/glibc/0024-qemu-stale-process.patch
new file mode 100644
index 0000000000..c0a467fcec
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/0024-qemu-stale-process.patch
@@ -0,0 +1,45 @@
+glibc: Skip 2 qemu tests that can hang in oe-selftest
+
+qemumips and qemuppc were leaving stale processes behind after
+running glibc oe-selftest. During analysis, it was found that
+it was due to "tst-scm_rights" and "tst-scm_rights-time64" tests.
+Disable them so that there are no stale processes left behind.
+
+[YOCTO #15423]
+https://bugzilla.yoctoproject.org/show_bug.cgi?id=15423
+
+Upstream-Status: Inappropriate [oe-core specific]
+Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
+---
+diff --git a/sysdeps/unix/sysv/linux/Makefile b/sysdeps/unix/sysv/linux/Makefile
+--- a/sysdeps/unix/sysv/linux/Makefile 2024-03-18 01:15:49.019202881 -0700
++++ b/sysdeps/unix/sysv/linux/Makefile 2024-03-14 06:26:18.581404107 -0700
+@@ -222,7 +222,6 @@
+ tst-process_mrelease \
+ tst-quota \
+ tst-rlimit-infinity \
+- tst-scm_rights \
+ tst-sigtimedwait \
+ tst-sync_file_range \
+ tst-sysconf-iov_max \
+@@ -233,6 +232,8 @@
+ tst-timerfd \
+ tst-ttyname-direct \
+ tst-ttyname-namespace \
++ # Skip this test to avoid stale qemu process
++ # tst-scm_rights \
+ # tests
+
+ # process_madvise requires CAP_SYS_ADMIN.
+@@ -270,9 +271,10 @@
+ tst-ntp_gettimex-time64 \
+ tst-ppoll-time64 \
+ tst-prctl-time64 \
+- tst-scm_rights-time64 \
+ tst-sigtimedwait-time64 \
+ tst-timerfd-time64 \
++ # Skip this test to avoid stale qemu process
++ # tst-scm_rights-time64 \
+ # tests-time64
+
+ tests-clone-internal = \
diff --git a/meta/recipes-core/glibc/glibc/check-test-wrapper b/meta/recipes-core/glibc/glibc/check-test-wrapper
index 6ec9b9b29e..5cc993f718 100644
--- a/meta/recipes-core/glibc/glibc/check-test-wrapper
+++ b/meta/recipes-core/glibc/glibc/check-test-wrapper
@@ -58,7 +58,7 @@ elif targettype == "ssh":
user = os.environ.get("SSH_HOST_USER", None)
port = os.environ.get("SSH_HOST_PORT", None)
- command = ["ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no"]
+ command = ["ssh", "-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no", "-o", "LogLevel=quiet"]
if port:
command += ["-p", str(port)]
if not host:
diff --git a/meta/recipes-core/glibc/glibc/run-ptest b/meta/recipes-core/glibc/glibc/run-ptest
index f637986105..cb71c75682 100755
--- a/meta/recipes-core/glibc/glibc/run-ptest
+++ b/meta/recipes-core/glibc/glibc/run-ptest
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
# ptest script for glibc - to run time related tests to
# facilitate Y2038 validation
# Run with 'ptest-runner glibc-tests'
@@ -22,12 +22,12 @@ tst_time64=$(ls -r ${PWD}/tests/glibc-ptest/*-time64)
# related
tst_time_tmp=$(sed -e "s/-time64$//" <<< ${tst_time64})
-# Run tests supporting only 32 bit time
-for i in ${tst_time_tmp}
-do
- $i >/dev/null 2>&1
- output
-done
+# Do not run tests supporting only 32 bit time
+#for i in ${tst_time_tmp}
+#do
+# $i >/dev/null 2>&1
+# output
+#done
# Run tests supporting only 64 bit time
for i in ${tst_time64}
diff --git a/meta/recipes-core/glibc/glibc_2.35.bb b/meta/recipes-core/glibc/glibc_2.35.bb
deleted file mode 100644
index 6ea5b1efb5..0000000000
--- a/meta/recipes-core/glibc/glibc_2.35.bb
+++ /dev/null
@@ -1,123 +0,0 @@
-require glibc.inc
-require glibc-version.inc
-
-CVE_CHECK_IGNORE += "CVE-2020-10029 CVE-2021-27645"
-
-# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010022
-# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010023
-# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010024
-# Upstream glibc maintainers dispute there is any issue and have no plans to address it further.
-# "this is being treated as a non-security bug and no real threat."
-CVE_CHECK_IGNORE += "CVE-2019-1010022 CVE-2019-1010023 CVE-2019-1010024"
-
-# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010025
-# Allows for ASLR bypass so can bypass some hardening, not an exploit in itself, may allow
-# easier access for another. "ASLR bypass itself is not a vulnerability."
-# Potential patch at https://sourceware.org/bugzilla/show_bug.cgi?id=22853
-CVE_CHECK_IGNORE += "CVE-2019-1010025"
-
-DEPENDS += "gperf-native bison-native make-native"
-
-NATIVESDKFIXES ?= ""
-NATIVESDKFIXES:class-nativesdk = "\
- file://0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch \
- file://0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch \
- file://0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \
- file://0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch \
- file://0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch \
- file://0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch \
-"
-
-SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
- file://etc/ld.so.conf \
- file://generate-supported.mk \
- file://makedbs.sh \
- \
- ${NATIVESDKFIXES} \
- file://0009-yes-within-the-path-sets-wrong-config-variables.patch \
- file://0010-eglibc-Cross-building-and-testing-instructions.patch \
- file://0011-eglibc-Help-bootstrap-cross-toolchain.patch \
- file://0012-eglibc-Resolve-__fpscr_values-on-SH4.patch \
- file://0013-eglibc-Forward-port-cross-locale-generation-support.patch \
- file://0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch \
- file://0016-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch \
- file://0017-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch \
- file://0018-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch \
- file://0019-powerpc-Do-not-ask-compiler-for-finding-arch.patch \
- file://0021-Replace-echo-with-printf-builtin-in-nscd-init-script.patch \
- file://0022-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch \
- file://0023-timezone-Make-shell-interpreter-overridable-in-tzsel.patch \
- file://0024-fix-create-thread-failed-in-unprivileged-process-BZ-.patch \
- "
-S = "${WORKDIR}/git"
-B = "${WORKDIR}/build-${TARGET_SYS}"
-
-PACKAGES_DYNAMIC = ""
-
-# the -isystem in bitbake.conf screws up glibc do_stage
-BUILD_CPPFLAGS = "-I${STAGING_INCDIR_NATIVE}"
-TARGET_CPPFLAGS = "-I${STAGING_DIR_TARGET}${includedir}"
-
-GLIBC_BROKEN_LOCALES = ""
-
-GLIBCPIE ??= ""
-
-EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
- --disable-profile \
- --disable-debug --without-gd \
- --enable-clocale=gnu \
- --with-headers=${STAGING_INCDIR} \
- --without-selinux \
- --enable-tunables \
- --enable-bind-now \
- --enable-stack-protector=strong \
- --disable-crypt \
- --with-default-link \
- ${@bb.utils.contains_any('SELECTED_OPTIMIZATION', '-O0 -Og', '--disable-werror', '', d)} \
- ${GLIBCPIE} \
- ${GLIBC_EXTRA_OECONF}"
-
-EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
-
-EXTRA_OECONF:append:x86 = " ${@bb.utils.contains_any('TUNE_FEATURES', 'i586 c3', '--disable-cet', '--enable-cet', d)}"
-EXTRA_OECONF:append:x86-64 = " --enable-cet"
-
-PACKAGECONFIG ??= "nscd memory-tagging"
-PACKAGECONFIG[nscd] = "--enable-nscd,--disable-nscd"
-PACKAGECONFIG[memory-tagging] = "--enable-memory-tagging,--disable-memory-tagging"
-
-do_patch:append() {
- bb.build.exec_func('do_fix_readlib_c', d)
-}
-
-do_fix_readlib_c () {
- sed -i -e 's#OECORE_KNOWN_INTERPRETER_NAMES#${EGLIBC_KNOWN_INTERPRETER_NAMES}#' ${S}/elf/readlib.c
-}
-
-do_configure () {
-# override this function to avoid the autoconf/automake/aclocal/autoheader
-# calls for now
-# don't pass CPPFLAGS into configure, since it upsets the kernel-headers
-# version check and doesn't really help with anything
- (cd ${S} && gnu-configize) || die "failure in running gnu-configize"
- find ${S} -name "configure" | xargs touch
- CPPFLAGS="" oe_runconf
-}
-
-LDFLAGS += "-fuse-ld=bfd"
-do_compile () {
- base_do_compile
- echo "Adjust ldd script"
- if [ -n "${RTLDLIST}" ]
- then
- prevrtld=`cat ${B}/elf/ldd | grep "^RTLDLIST=" | sed 's#^RTLDLIST="\?\([^"]*\)"\?$#\1#'`
- # remove duplicate entries
- newrtld=`echo $(printf '%s\n' ${prevrtld} ${RTLDLIST} | LC_ALL=C sort -u)`
- echo "ldd \"${prevrtld} ${RTLDLIST}\" -> \"${newrtld}\""
- sed -i ${B}/elf/ldd -e "s#^RTLDLIST=.*\$#RTLDLIST=\"${newrtld}\"#"
- fi
-}
-
-require glibc-package.inc
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-core/glibc/glibc_2.39.bb b/meta/recipes-core/glibc/glibc_2.39.bb
new file mode 100644
index 0000000000..9122472689
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc_2.39.bb
@@ -0,0 +1,128 @@
+require glibc.inc
+require glibc-version.inc
+
+# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010022
+# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010023
+# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010024
+CVE_STATUS_GROUPS = "CVE_STATUS_RECIPE"
+CVE_STATUS_RECIPE = "CVE-2019-1010022 CVE-2019-1010023 CVE-2019-1010024"
+CVE_STATUS_RECIPE[status] = "disputed: \
+Upstream glibc maintainers dispute there is any issue and have no plans to address it further. \
+this is being treated as a non-security bug and no real threat."
+
+# glibc https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2019-1010025
+# Potential patch at https://sourceware.org/bugzilla/show_bug.cgi?id=22853
+CVE_STATUS[CVE-2019-1010025] = "disputed: \
+Allows for ASLR bypass so can bypass some hardening, not an exploit in itself, may allow \
+easier access for another. 'ASLR bypass itself is not a vulnerability.'"
+
+DEPENDS += "gperf-native bison-native"
+
+NATIVESDKFIXES ?= ""
+NATIVESDKFIXES:class-nativesdk = "\
+ file://0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch \
+ file://0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch \
+ file://0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \
+ file://0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch \
+ file://0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch \
+ file://0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch \
+"
+
+SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
+ file://etc/ld.so.conf \
+ file://generate-supported.mk \
+ file://makedbs.sh \
+ \
+ ${NATIVESDKFIXES} \
+ file://0009-yes-within-the-path-sets-wrong-config-variables.patch \
+ file://0010-eglibc-Cross-building-and-testing-instructions.patch \
+ file://0011-eglibc-Help-bootstrap-cross-toolchain.patch \
+ file://0012-eglibc-Resolve-__fpscr_values-on-SH4.patch \
+ file://0013-eglibc-Forward-port-cross-locale-generation-support.patch \
+ file://0014-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch \
+ file://0015-powerpc-Do-not-ask-compiler-for-finding-arch.patch \
+ file://0016-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch \
+ file://0017-Replace-echo-with-printf-builtin-in-nscd-init-script.patch \
+ file://0018-sysdeps-gnu-configure.ac-Set-libc_cv_rootsbindir-onl.patch \
+ file://0019-timezone-Make-shell-interpreter-overridable-in-tzsel.patch \
+ file://0020-tzselect.ksh-Use-bin-sh-default-shell-interpreter.patch \
+ file://0021-fix-create-thread-failed-in-unprivileged-process-BZ-.patch \
+ file://0022-Avoid-hardcoded-build-time-paths-in-the-output-binar.patch \
+ file://0023-aarch64-configure-Pass-mcpu-along-with-march-to-dete.patch \
+ file://0024-qemu-stale-process.patch \
+"
+S = "${WORKDIR}/git"
+B = "${WORKDIR}/build-${TARGET_SYS}"
+
+PACKAGES_DYNAMIC = ""
+
+# the -isystem in bitbake.conf screws up glibc do_stage
+BUILD_CPPFLAGS = "-I${STAGING_INCDIR_NATIVE}"
+TARGET_CPPFLAGS = "-I${STAGING_DIR_TARGET}${includedir}"
+
+GLIBC_BROKEN_LOCALES = ""
+
+GLIBCPIE ??= ""
+
+EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
+ --disable-profile \
+ --disable-debug --without-gd \
+ --enable-clocale=gnu \
+ --with-headers=${STAGING_INCDIR} \
+ --without-selinux \
+ --enable-bind-now \
+ --enable-stack-protector=strong \
+ --disable-crypt \
+ --with-default-link \
+ --disable-werror \
+ --enable-fortify-source \
+ ${@bb.utils.contains_any('SELECTED_OPTIMIZATION', '-O0 -Og', '--disable-werror', '', d)} \
+ ${GLIBCPIE} \
+ ${GLIBC_EXTRA_OECONF}"
+
+EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
+
+EXTRA_OECONF:append:x86-64 = " --enable-cet"
+
+PACKAGECONFIG ??= "nscd memory-tagging"
+
+PACKAGECONFIG[nscd] = "--enable-nscd,--disable-nscd"
+PACKAGECONFIG[memory-tagging] = "--enable-memory-tagging,--disable-memory-tagging"
+
+do_patch:append() {
+ bb.build.exec_func('do_fix_readlib_c', d)
+}
+
+do_fix_readlib_c () {
+ sed -i -e 's#OECORE_KNOWN_INTERPRETER_NAMES#${EGLIBC_KNOWN_INTERPRETER_NAMES}#' ${S}/elf/readlib.c
+}
+
+do_configure () {
+# override this function to avoid the autoconf/automake/aclocal/autoheader
+# calls for now
+# don't pass CPPFLAGS into configure, since it upsets the kernel-headers
+# version check and doesn't really help with anything
+ (cd ${S} && gnu-configize) || die "failure in running gnu-configize"
+ find ${S} -name "configure" | xargs touch
+ CPPFLAGS="" LD="${HOST_PREFIX}ld.bfd ${TOOLCHAIN_OPTIONS}" oe_runconf
+}
+
+LDFLAGS += "-fuse-ld=bfd"
+CC += "-fuse-ld=bfd"
+
+do_compile () {
+ base_do_compile
+ echo "Adjust ldd script"
+ if [ -n "${RTLDLIST}" ]
+ then
+ prevrtld=`cat ${B}/elf/ldd | grep "^RTLDLIST=" | sed 's#^RTLDLIST="\?\([^"]*\)"\?$#\1#'`
+ # remove duplicate entries
+ newrtld=`echo $(printf '%s\n' ${prevrtld} ${RTLDLIST} | LC_ALL=C sort -u)`
+ echo "ldd \"${prevrtld} ${RTLDLIST}\" -> \"${newrtld}\""
+ sed -i ${B}/elf/ldd -e "s#^RTLDLIST=.*\$#RTLDLIST=\"${newrtld}\"#"
+ fi
+}
+
+require glibc-package.inc
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-handle-.dynstr-located-in-separate-segment.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-handle-.dynstr-located-in-separate-segment.patch
new file mode 100644
index 0000000000..36f04adfde
--- /dev/null
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-handle-.dynstr-located-in-separate-segment.patch
@@ -0,0 +1,178 @@
+From 864054a6cb971688a181316b8227ae0361b4d69e Mon Sep 17 00:00:00 2001
+From: Andreas Schwab <schwab@suse.de>
+Date: Wed, 9 Oct 2019 17:46:47 +0200
+Subject: [PATCH] ldconfig: handle .dynstr located in separate segment (bug
+ 25087)
+
+To determine the load offset of the DT_STRTAB section search for the
+segment containing it, instead of using the load offset of the first
+segment.
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=glibc.git;a=commitdiff;h=58e8f5fd2ba47b6dc47fd4d0a35e4175c7c87aaa]
+
+Backported: ported to support endianness and 32/64 bits.
+Signed-off-by: Fabien Mahot <fabien.mahot@external.desouttertools.com>
+---
+ readelflib.c | 86 +++++++++++++++++++++++++++++++---------------------
+ 1 file changed, 52 insertions(+), 34 deletions(-)
+
+diff --git a/readelflib.c b/readelflib.c
+index a01e1cede3..380aed563d 100644
+--- a/readelflib.c
++++ b/readelflib.c
+@@ -80,7 +80,6 @@ process_elf_file32 (const char *file_name, const char *lib, int *flag,
+ {
+ int i;
+ unsigned int j;
+- Elf32_Addr loadaddr;
+ unsigned int dynamic_addr;
+ size_t dynamic_size;
+ char *program_interpreter;
+@@ -110,7 +109,6 @@ process_elf_file32 (const char *file_name, const char *lib, int *flag,
+ libc5/libc6. */
+ *flag = FLAG_ELF;
+
+- loadaddr = -1;
+ dynamic_addr = 0;
+ dynamic_size = 0;
+ program_interpreter = NULL;
+@@ -121,11 +119,6 @@ process_elf_file32 (const char *file_name, const char *lib, int *flag,
+
+ switch (read32(segment->p_type, be))
+ {
+- case PT_LOAD:
+- if (loadaddr == (Elf32_Addr) -1)
+- loadaddr = read32(segment->p_vaddr, be) - read32(segment->p_offset, be);
+- break;
+-
+ case PT_DYNAMIC:
+ if (dynamic_addr)
+ error (0, 0, _("more than one dynamic segment\n"));
+@@ -188,11 +181,6 @@ process_elf_file32 (const char *file_name, const char *lib, int *flag,
+ }
+
+ }
+- if (loadaddr == (Elf32_Addr) -1)
+- {
+- /* Very strange. */
+- loadaddr = 0;
+- }
+
+ /* Now we can read the dynamic sections. */
+ if (dynamic_size == 0)
+@@ -208,11 +196,32 @@ process_elf_file32 (const char *file_name, const char *lib, int *flag,
+ {
+ check_ptr (dyn_entry);
+ if (read32(dyn_entry->d_tag, be) == DT_STRTAB)
+- {
+- dynamic_strings = (char *) (file_contents + read32(dyn_entry->d_un.d_val, be) - loadaddr);
+- check_ptr (dynamic_strings);
+- break;
+- }
++ {
++ /* Find the file offset of the segment containing the dynamic
++ string table. */
++ Elf32_Off loadoff = -1;
++ for (i = 0, segment = elf_pheader;
++ i < read16(elf_header->e_phnum, be); i++, segment++)
++ {
++ if (read32(segment->p_type, be) == PT_LOAD
++ && read32(dyn_entry->d_un.d_val, be) >= read32(segment->p_vaddr, be)
++ && (read32(dyn_entry->d_un.d_val, be) - read32(segment->p_vaddr, be)
++ < read32(segment->p_filesz, be)))
++ {
++ loadoff = read32(segment->p_vaddr, be) - read32(segment->p_offset, be);
++ break;
++ }
++ }
++ if (loadoff == (Elf32_Off) -1)
++ {
++ /* Very strange. */
++ loadoff = 0;
++ }
++
++ dynamic_strings = (char *) (file_contents + read32(dyn_entry->d_un.d_val, be) - loadoff);
++ check_ptr (dynamic_strings);
++ break;
++ }
+ }
+
+ if (dynamic_strings == NULL)
+@@ -269,7 +278,6 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ {
+ int i;
+ unsigned int j;
+- Elf64_Addr loadaddr;
+ Elf64_Addr dynamic_addr;
+ Elf64_Xword dynamic_size;
+ char *program_interpreter;
+@@ -347,7 +355,6 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ break;
+ }
+
+- loadaddr = -1;
+ dynamic_addr = 0;
+ dynamic_size = 0;
+ program_interpreter = NULL;
+@@ -358,11 +365,6 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+
+ switch (read32(segment->p_type, be))
+ {
+- case PT_LOAD:
+- if (loadaddr == (Elf64_Addr) -1)
+- loadaddr = read64(segment->p_vaddr, be) - read64(segment->p_offset, be);
+- break;
+-
+ case PT_DYNAMIC:
+ if (dynamic_addr)
+ error (0, 0, _("more than one dynamic segment\n"));
+@@ -426,11 +428,6 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ }
+
+ }
+- if (loadaddr == (Elf64_Addr) -1)
+- {
+- /* Very strange. */
+- loadaddr = 0;
+- }
+
+ /* Now we can read the dynamic sections. */
+ if (dynamic_size == 0)
+@@ -446,11 +443,32 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ {
+ check_ptr (dyn_entry);
+ if (read64(dyn_entry->d_tag, be) == DT_STRTAB)
+- {
+- dynamic_strings = (char *) (file_contents + read64(dyn_entry->d_un.d_val, be) - loadaddr);
+- check_ptr (dynamic_strings);
+- break;
+- }
++ {
++ /* Find the file offset of the segment containing the dynamic
++ string table. */
++ Elf64_Off loadoff = -1;
++ for (i = 0, segment = elf_pheader;
++ i < read16(elf_header->e_phnum, be); i++, segment++)
++ {
++ if (read64(segment->p_type, be) == PT_LOAD
++ && read64(dyn_entry->d_un.d_val, be) >= read64(segment->p_vaddr, be)
++ && (read64(dyn_entry->d_un.d_val, be) - read64(segment->p_vaddr, be)
++ < read64(segment->p_filesz, be)))
++ {
++ loadoff = read64(segment->p_vaddr, be) - read64(segment->p_offset, be);
++ break;
++ }
++ }
++ if (loadoff == (Elf32_Off) -1)
++ {
++ /* Very strange. */
++ loadoff = 0;
++ }
++
++ dynamic_strings = (char *) (file_contents + read64(dyn_entry->d_un.d_val, be) - loadoff);
++ check_ptr (dynamic_strings);
++ break;
++ }
+ }
+
+ if (dynamic_strings == NULL)
diff --git a/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb b/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb
index e867ceb3ec..9ca95d1e52 100644
--- a/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb
+++ b/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb
@@ -16,9 +16,9 @@ SRC_URI = "file://ldconfig-native-2.12.1.tar.bz2 \
file://add-64-bit-flag-for-ELF64-entries.patch \
file://no-aux-cache.patch \
file://add-riscv-support.patch \
+ file://ldconfig-handle-.dynstr-located-in-separate-segment.patch \
"
-PR = "r2"
FILESEXTRAPATHS =. "${FILE_DIRNAME}/${P}:"
diff --git a/meta/recipes-core/ifupdown/files/0001-Define-FNM_EXTMATCH-for-musl.patch b/meta/recipes-core/ifupdown/files/0001-Define-FNM_EXTMATCH-for-musl.patch
index 7bf02ea536..6c4ed526bf 100644
--- a/meta/recipes-core/ifupdown/files/0001-Define-FNM_EXTMATCH-for-musl.patch
+++ b/meta/recipes-core/ifupdown/files/0001-Define-FNM_EXTMATCH-for-musl.patch
@@ -1,4 +1,4 @@
-From ff714d6461569d69b253089110ec659e4ebec248 Mon Sep 17 00:00:00 2001
+From 8dfbcf02e424ba1fdef587d81c9e08a37ab8c1b6 Mon Sep 17 00:00:00 2001
From: Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>
Date: Tue, 2 Jul 2019 20:10:42 +0200
Subject: [PATCH] Define FNM_EXTMATCH for musl
@@ -24,12 +24,13 @@ function); did you mean 'FNM_NOMATCH'?
Upstream-Status: Submitted [https://salsa.debian.org/debian/ifupdown/merge_requests/5]
Signed-off-by: Oleksandr Kravchuk <open.source@oleksandr-kravchuk.com>
+
---
archcommon.h | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/archcommon.h b/archcommon.h
-index fe99950..f257f9d 100644
+index 818b0b6..ad9cd1a 100644
--- a/archcommon.h
+++ b/archcommon.h
@@ -1,5 +1,9 @@
@@ -42,6 +43,3 @@ index fe99950..f257f9d 100644
bool execable(const char *);
#define iface_is_link() (!_iface_has(ifd->real_iface, ":."))
---
-2.17.1
-
diff --git a/meta/recipes-core/ifupdown/files/0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch b/meta/recipes-core/ifupdown/files/0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch
index 57c8d04fd7..6e15a25224 100644
--- a/meta/recipes-core/ifupdown/files/0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch
+++ b/meta/recipes-core/ifupdown/files/0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch
@@ -1,10 +1,11 @@
-From 7ace0656bd325f9e7749f2cde641eddc057bc98a Mon Sep 17 00:00:00 2001
+From 7fe516cbaf9fda09d99dcb54d4645367cffc8a4d Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 9 Jan 2020 15:38:06 +0100
Subject: [PATCH] Makefile: do not use dpkg for determining OS type
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta/recipes-core/ifupdown/files/0001-ifupdown-skip-wrong-test-case.patch b/meta/recipes-core/ifupdown/files/0001-ifupdown-skip-wrong-test-case.patch
index c751e4fab0..9ae24da51e 100644
--- a/meta/recipes-core/ifupdown/files/0001-ifupdown-skip-wrong-test-case.patch
+++ b/meta/recipes-core/ifupdown/files/0001-ifupdown-skip-wrong-test-case.patch
@@ -1,4 +1,4 @@
-From 98243deface88614e3f332c4a85d04a9abce55ff Mon Sep 17 00:00:00 2001
+From 782d8a869c266820d0f34974436f244f67afaea7 Mon Sep 17 00:00:00 2001
From: Zqiang <qiang.zhang@windriver.com>
Date: Mon, 19 Apr 2021 14:15:45 +0800
Subject: [PATCH] ifupdown: skip wrong test case
@@ -10,23 +10,21 @@ and are skipped directly.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Zqiang <qiang.zhang@windriver.com>
+
---
tests/testbuild-linux | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/testbuild-linux b/tests/testbuild-linux
-index 1181ea0..5f148eb 100755
+index 2fa1b8b..2c69856 100755
--- a/tests/testbuild-linux
+++ b/tests/testbuild-linux
@@ -3,7 +3,7 @@
dir=tests/linux
result=true
--for test in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18; do
-+for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18; do
+-for test in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19; do
++for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18 19; do
if [ -e $dir/testcase.$test ]; then
args="$(cat $dir/testcase.$test | sed -n 's/^# RUN: //p')"
else
---
-2.17.1
-
diff --git a/meta/recipes-core/ifupdown/files/defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch b/meta/recipes-core/ifupdown/files/defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch
index d1c3d260ed..7b4b761e6a 100644
--- a/meta/recipes-core/ifupdown/files/defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch
+++ b/meta/recipes-core/ifupdown/files/defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch
@@ -1,7 +1,7 @@
-From 40257d65b338b6e2ed9d89d6fa7c7b8701a4c311 Mon Sep 17 00:00:00 2001
+From fc8e8d4cb19ff30e69aa1855332544f1017f974c Mon Sep 17 00:00:00 2001
From: Paul Gortmaker <paul.gortmaker@windriver.com>
Date: Wed, 6 Aug 2014 14:54:12 -0400
-Subject: [PATCH 1/2] defn2[c|man]: don't rely on dpkg-architecture to set arch
+Subject: [PATCH] defn2[c|man]: don't rely on dpkg-architecture to set arch
In yocto we'll always be cross compiling, and we'll always
be building on linux for linux (vs. *BSD, hurd, etc.)
@@ -13,13 +13,14 @@ like the loopback device not being configured/enabled.
Signed-off-by: Paul Gortmaker <paul.gortmaker@windriver.com>
Upstream-Status: Pending
+
---
defn2c.pl | 6 +++---
defn2man.pl | 6 +++---
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/defn2c.pl b/defn2c.pl
-index 8798dc2..f2551c7 100755
+index fa7a02e..bb4987d 100755
--- a/defn2c.pl
+++ b/defn2c.pl
@@ -2,9 +2,9 @@
@@ -52,6 +53,3 @@ index 6ddcfdd..c9c4dd0 100755
# declarations
my $line;
---
-2.17.1
-
diff --git a/meta/recipes-core/ifupdown/files/tweak-ptest-script.patch b/meta/recipes-core/ifupdown/files/tweak-ptest-script.patch
index ea88a9086a..fa6bb84df3 100644
--- a/meta/recipes-core/ifupdown/files/tweak-ptest-script.patch
+++ b/meta/recipes-core/ifupdown/files/tweak-ptest-script.patch
@@ -1,14 +1,19 @@
-Tweak tests of ifupdown to make it work with oe-core ptest framework.
+From a6bb2ac5f521c678abbbdb1960d28f750f4357a6 Mon Sep 17 00:00:00 2001
+From: Kai Kang <kai.kang@windriver.com>
+Date: Mon, 3 Feb 2020 17:33:11 +0800
+Subject: [PATCH] Tweak tests of ifupdown to make it work with oe-core ptest
+ framework.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Kai Kang <kai.kang@windriver.com>
+
---
tests/testbuild-linux | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/tests/testbuild-linux b/tests/testbuild-linux
-index 5f148eb..d9b1698 100755
+index 2c69856..05f0703 100755
--- a/tests/testbuild-linux
+++ b/tests/testbuild-linux
@@ -1,6 +1,7 @@
@@ -19,8 +24,8 @@ index 5f148eb..d9b1698 100755
+dir=$curdir/linux
result=true
- for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18; do
-@@ -12,7 +13,7 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18; do
+ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18 19; do
+@@ -12,7 +13,7 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18 19; do
echo "Testcase $test: $args"
exitcode=0
@@ -29,7 +34,7 @@ index 5f148eb..d9b1698 100755
>$dir/up-res-out.$test 2>$dir/up-res-err.$test || exitcode=$?
(echo "exit code: $exitcode";
-@@ -20,7 +21,7 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18; do
+@@ -20,7 +21,7 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18 19; do
echo "====stderr===="; cat $dir/up-res-err.$test) > $dir/up-res.$test
exitcode=0
@@ -38,7 +43,7 @@ index 5f148eb..d9b1698 100755
>$dir/down-res-out.$test 2>$dir/down-res-err.$test || exitcode=$?
(echo "exit code: $exitcode";
-@@ -28,9 +29,9 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18; do
+@@ -28,9 +29,9 @@ for test in 1 2 3 4 5 6 7 8 9 10 11 16 17 18 19; do
echo "====stderr===="; cat $dir/down-res-err.$test) > $dir/down-res.$test
if diff -ub $dir/up.$test $dir/up-res.$test && diff -ub $dir/down.$test $dir/down-res.$test; then
@@ -50,5 +55,3 @@ index 5f148eb..d9b1698 100755
result=false
fi
echo "=========="
---
-2.17.1
diff --git a/meta/recipes-core/ifupdown/ifupdown_0.8.37.bb b/meta/recipes-core/ifupdown/ifupdown_0.8.37.bb
deleted file mode 100644
index 57d4152a39..0000000000
--- a/meta/recipes-core/ifupdown/ifupdown_0.8.37.bb
+++ /dev/null
@@ -1,57 +0,0 @@
-SUMMARY = "ifupdown: basic ifup and ifdown used by initscripts"
-HOMEPAGE = "https://salsa.debian.org/debian/ifupdown"
-DESCRIPTION = "High level tools to configure network interfaces \
-This package provides the tools ifup and ifdown which may be used to \
-configure (or, respectively, deconfigure) network interfaces, based on \
-the file /etc/network/interfaces."
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
-
-SRC_URI = "git://salsa.debian.org/debian/ifupdown.git;protocol=https;branch=master \
- file://defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch \
- file://99_network \
- file://0001-Define-FNM_EXTMATCH-for-musl.patch \
- file://0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch \
- file://run-ptest \
- file://0001-ifupdown-skip-wrong-test-case.patch \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'file://tweak-ptest-script.patch', '', d)} \
- "
-SRCREV = "2b4138f36ce3ba37186aa01b502273e0c39ab518"
-
-S = "${WORKDIR}/git"
-
-
-inherit ptest update-alternatives
-
-do_compile () {
- chmod a+rx *.pl *.sh
- oe_runmake 'CC=${CC}' "CFLAGS=${CFLAGS} -Wall -W -D'IFUPDOWN_VERSION=\"${PV}\"'"
-}
-
-do_install () {
- install -d ${D}${mandir}/man8 \
- ${D}${mandir}/man5 \
- ${D}${base_sbindir}
-
- # If volatiles are used, then we'll also need /run/network there too.
- install -d ${D}/etc/default/volatiles
- install -m 0644 ${WORKDIR}/99_network ${D}/etc/default/volatiles
-
- install -m 0755 ifup ${D}${base_sbindir}/
- ln ${D}${base_sbindir}/ifup ${D}${base_sbindir}/ifdown
- install -m 0644 ifup.8 ${D}${mandir}/man8
- install -m 0644 interfaces.5 ${D}${mandir}/man5
- cd ${D}${mandir}/man8 && ln -s ifup.8 ifdown.8
-}
-
-do_install_ptest () {
- install -d ${D}${PTEST_PATH}/tests
- cp -r ${S}/tests/testbuild-linux ${D}${PTEST_PATH}/tests/
- cp -r ${S}/tests/linux ${D}${PTEST_PATH}/tests/
-}
-
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE:${PN} = "ifup ifdown"
-
-ALTERNATIVE_LINK_NAME[ifup] = "${base_sbindir}/ifup"
-ALTERNATIVE_LINK_NAME[ifdown] = "${base_sbindir}/ifdown"
diff --git a/meta/recipes-core/ifupdown/ifupdown_0.8.41.bb b/meta/recipes-core/ifupdown/ifupdown_0.8.41.bb
new file mode 100644
index 0000000000..16425ea9e4
--- /dev/null
+++ b/meta/recipes-core/ifupdown/ifupdown_0.8.41.bb
@@ -0,0 +1,62 @@
+SUMMARY = "ifupdown: basic ifup and ifdown used by initscripts"
+HOMEPAGE = "https://salsa.debian.org/debian/ifupdown"
+DESCRIPTION = "High level tools to configure network interfaces \
+This package provides the tools ifup and ifdown which may be used to \
+configure (or, respectively, deconfigure) network interfaces, based on \
+the file /etc/network/interfaces."
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+
+SRC_URI = "git://salsa.debian.org/debian/ifupdown.git;protocol=https;branch=master \
+ file://99_network \
+ file://run-ptest \
+ file://0001-Define-FNM_EXTMATCH-for-musl.patch \
+ file://0001-Makefile-do-not-use-dpkg-for-determining-OS-type.patch \
+ file://0001-ifupdown-skip-wrong-test-case.patch \
+ file://defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch \
+ file://tweak-ptest-script.patch \
+ "
+SRCREV = "369d9d3c13a0c56ad09fd4f13b4a80eb9a94e270"
+
+S = "${WORKDIR}/git"
+
+
+inherit ptest update-alternatives
+
+do_compile () {
+ chmod a+rx *.pl *.sh
+ oe_runmake 'CC=${CC}' "CFLAGS=${CFLAGS} -Wall -W -D'IFUPDOWN_VERSION=\"${PV}\"'"
+}
+
+do_install () {
+ install -d ${D}${mandir}/man8 \
+ ${D}${mandir}/man5 \
+ ${D}${base_sbindir}
+
+ # If volatiles are used, then we'll also need /run/network there too.
+ install -d ${D}/etc/default/volatiles
+ install -m 0644 ${WORKDIR}/99_network ${D}/etc/default/volatiles
+
+ install -m 0755 ifup ${D}${base_sbindir}/
+ ln ${D}${base_sbindir}/ifup ${D}${base_sbindir}/ifdown
+ install -m 0644 ifup.8 ${D}${mandir}/man8
+ install -m 0644 interfaces.5 ${D}${mandir}/man5
+ cd ${D}${mandir}/man8 && ln -s ifup.8 ifdown.8
+
+ install -d ${D}${sysconfdir}/network/if-pre-up.d
+ install -d ${D}${sysconfdir}/network/if-up.d
+ install -d ${D}${sysconfdir}/network/if-down.d
+ install -d ${D}${sysconfdir}/network/if-post-down.d
+}
+
+do_install_ptest () {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -r ${S}/tests/testbuild-linux ${D}${PTEST_PATH}/tests/
+ cp -r ${S}/tests/linux ${D}${PTEST_PATH}/tests/
+}
+
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE:${PN} = "ifup ifdown"
+
+ALTERNATIVE_LINK_NAME[ifup] = "${base_sbindir}/ifup"
+ALTERNATIVE_LINK_NAME[ifdown] = "${base_sbindir}/ifdown"
diff --git a/meta/recipes-core/images/build-appliance-image_15.0.0.bb b/meta/recipes-core/images/build-appliance-image_15.0.0.bb
index 6ed797cafa..4cf55519cc 100644
--- a/meta/recipes-core/images/build-appliance-image_15.0.0.bb
+++ b/meta/recipes-core/images/build-appliance-image_15.0.0.bb
@@ -1,6 +1,6 @@
SUMMARY = "An image containing the build system itself"
DESCRIPTION = "An image containing the build system that you can boot and run using either VirtualBox, VMware Player or VMware Workstation."
-HOMEPAGE = "http://www.yoctoproject.org/documentation/build-appliance"
+HOMEPAGE = "https://docs.yoctoproject.org/overview-manual/yp-intro.html#archived-components"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
@@ -11,7 +11,7 @@ IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-openssh packagegro
IMAGE_FEATURES += "x11-base package-management splash"
-QB_MEM = '${@bb.utils.contains("DISTRO_FEATURES", "opengl", "-m 512", "-m 256", d)}'
+QB_MEM ?= '${@bb.utils.contains("DISTRO_FEATURES", "opengl", "-m 512", "-m 256", d)}'
# Ensure there's enough space to do a core-image-sato build, with rm_work enabled
IMAGE_ROOTFS_EXTRA_SPACE = "41943040"
@@ -22,9 +22,11 @@ APPEND += "rootfstype=ext4 quiet"
DEPENDS = "zip-native python3-pip-native"
IMAGE_FSTYPES = "wic.vmdk wic.vhd wic.vhdx"
-inherit core-image setuptools3
+inherit core-image setuptools3 features_check
-SRCREV ?= "0674ae7bc46ebfa90c55bbedec6b22dc5f48dacf"
+REQUIRED_DISTRO_FEATURES += "xattr"
+
+SRCREV ?= "17723c6e34096a53fb186cc70cfc604bb30da8b9"
SRC_URI = "git://git.yoctoproject.org/poky;branch=master \
file://Yocto_Build_Appliance.vmx \
file://Yocto_Build_Appliance.vmxf \
@@ -63,6 +65,7 @@ fakeroot do_populate_poky_src () {
echo "INHERIT += \"rm_work\"" >> ${IMAGE_ROOTFS}/home/builder/poky/build/conf/auto.conf
echo "export LC_ALL=en_US.utf8" >> ${IMAGE_ROOTFS}/home/builder/.bashrc
+ echo "export TERM=xterm-color" >> ${IMAGE_ROOTFS}/home/builder/.bashrc
# Also save (for reference only) the actual SRCREV used to create this image
echo "export BA_SRCREV=${SRCREV}" >> ${IMAGE_ROOTFS}/home/builder/.bashrc
@@ -108,7 +111,13 @@ fakeroot do_populate_poky_src () {
chown -R builder:builder ${IMAGE_ROOTFS}/home/builder/.cache
}
-IMAGE_PREPROCESS_COMMAND += "do_populate_poky_src; "
+fakeroot do_tweak_image () {
+ # add a /lib64 symlink
+ # this is needed for building rust-native on a 64-bit build appliance
+ ln -rs ${IMAGE_ROOTFS}/lib ${IMAGE_ROOTFS}/lib64
+}
+
+IMAGE_PREPROCESS_COMMAND += "do_populate_poky_src do_tweak_image"
# For pip usage above
do_image[network] = "1"
@@ -124,9 +133,9 @@ create_bundle_files () {
cd ${WORKDIR}
mkdir -p Yocto_Build_Appliance
cp *.vmx* Yocto_Build_Appliance
- ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.wic.vmdk Yocto_Build_Appliance/Yocto_Build_Appliance.vmdk
- ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.wic.vhdx Yocto_Build_Appliance/Yocto_Build_Appliance.vhdx
- ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.wic.vhd Yocto_Build_Appliance/Yocto_Build_Appliance.vhd
+ ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vmdk Yocto_Build_Appliance/Yocto_Build_Appliance.vmdk
+ ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhdx Yocto_Build_Appliance/Yocto_Build_Appliance.vhdx
+ ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhd Yocto_Build_Appliance/Yocto_Build_Appliance.vhd
zip -r ${IMGDEPLOYDIR}/Yocto_Build_Appliance-${DATETIME}.zip Yocto_Build_Appliance
ln -sf Yocto_Build_Appliance-${DATETIME}.zip ${IMGDEPLOYDIR}/Yocto_Build_Appliance.zip
}
diff --git a/meta/recipes-core/images/core-image-initramfs-boot.bb b/meta/recipes-core/images/core-image-initramfs-boot.bb
new file mode 100644
index 0000000000..7258944751
--- /dev/null
+++ b/meta/recipes-core/images/core-image-initramfs-boot.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Basic initramfs to boot a fully-featured rootfs"
+DESCRIPTION = "Small initramfs that contains just udev and init, to find the real rootfs."
+LICENSE = "MIT"
+
+INITRAMFS_SCRIPTS ?= "initramfs-framework-base initramfs-module-udev"
+
+PACKAGE_INSTALL = "${INITRAMFS_SCRIPTS} ${VIRTUAL-RUNTIME_base-utils} base-passwd"
+
+# Ensure the initramfs only contains the bare minimum
+IMAGE_FEATURES = ""
+IMAGE_LINGUAS = ""
+
+# Don't allow the initramfs to contain a kernel, as kernel modules will depend
+# on the kernel image.
+PACKAGE_EXCLUDE = "kernel-image-*"
+
+IMAGE_FSTYPES = "${INITRAMFS_FSTYPES}"
+IMAGE_NAME_SUFFIX ?= ""
+IMAGE_ROOTFS_SIZE = "8192"
+IMAGE_ROOTFS_EXTRA_SPACE = "0"
+
+inherit image
diff --git a/meta/recipes-core/images/core-image-minimal-initramfs.bb b/meta/recipes-core/images/core-image-minimal-initramfs.bb
index 664fe7310e..36e8771c4a 100644
--- a/meta/recipes-core/images/core-image-minimal-initramfs.bb
+++ b/meta/recipes-core/images/core-image-minimal-initramfs.bb
@@ -1,4 +1,5 @@
# Simple initramfs image. Mostly used for live images.
+SUMMARY = "Small image capable of booting a device."
DESCRIPTION = "Small image capable of booting a device. The kernel includes \
the Minimal RAM-based Initial Root Filesystem (initramfs), which finds the \
first 'init' program more efficiently."
@@ -16,7 +17,9 @@ PACKAGE_INSTALL = "${INITRAMFS_SCRIPTS} ${VIRTUAL-RUNTIME_base-utils} udev base-
# Do not pollute the initrd image with rootfs features
IMAGE_FEATURES = ""
-export IMAGE_BASENAME = "${MLPREFIX}core-image-minimal-initramfs"
+# Don't allow the initramfs to contain a kernel
+PACKAGE_EXCLUDE = "kernel-image-*"
+
IMAGE_NAME_SUFFIX ?= ""
IMAGE_LINGUAS = ""
@@ -29,4 +32,4 @@ IMAGE_ROOTFS_SIZE = "8192"
IMAGE_ROOTFS_EXTRA_SPACE = "0"
# Use the same restriction as initramfs-module-install
-COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*)-(linux.*|freebsd.*)'
+COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|loongarch64.*)-(linux.*|freebsd.*)'
diff --git a/meta/recipes-core/images/core-image-ptest-all.bb b/meta/recipes-core/images/core-image-ptest-all.bb
index 1472451675..d4edb0fd05 100644
--- a/meta/recipes-core/images/core-image-ptest-all.bb
+++ b/meta/recipes-core/images/core-image-ptest-all.bb
@@ -1,25 +1,25 @@
-inherit features_check
+SUMMARY = "Recipe to trigger execution of all ptest images."
+HOMEPAGE = "https://www.yoctoproject.org/"
+
+LICENSE = "MIT"
+
+inherit features_check nopackages
REQUIRED_DISTRO_FEATURES = "ptest"
-require core-image-minimal.bb
require conf/distro/include/ptest-packagelists.inc
-DESCRIPTION += "Also includes ptest packages."
-HOMEPAGE = "https://www.yoctoproject.org/"
-
# Include the full set of ptests
-IMAGE_INSTALL += "${PTESTS_FAST} ${PTESTS_SLOW}"
+PTESTS = "${PTESTS_FAST} ${PTESTS_SLOW}"
+
+do_testimage[noexec] = "1"
+do_testimage[depends] = "${@' '.join(['core-image-ptest-'+x+':do_testimage' for x in d.getVar('PTESTS').split()])}"
-# This image is sufficiently large (~1.8GB) that we need to be careful that it fits in a live
-# image (which has a 4GB limit), so nullify the overhead factor (1.3x out of the
-# box) and explicitly add just 1100MB.
-# strace-ptest in particular needs more than 500MB
-IMAGE_OVERHEAD_FACTOR = "1.0"
-IMAGE_ROOTFS_EXTRA_SPACE = "1124288"
+do_build[depends] = "${@' '.join(['core-image-ptest-'+x+':do_build' for x in d.getVar('PTESTS').split()])}"
-# ptests need more memory than standard to avoid the OOM killer
-# also lttng-tools needs /tmp that has at least 2G
-QB_MEM = "-m 4096"
+# normally image.bbclass would do this
+EXCLUDE_FROM_WORLD = "1"
-# Sadly at the moment the full set of ptests is not robust enough and sporadically fails in random places
-PTEST_EXPECT_FAILURE = "1"
+python () {
+ if bb.utils.contains('IMAGE_CLASSES', 'testimage', True, False, d):
+ bb.build.addtask("do_testimage", "", "", d)
+}
diff --git a/meta/recipes-core/images/core-image-ptest-fast.bb b/meta/recipes-core/images/core-image-ptest-fast.bb
index 40df274c4c..a5364e6cbf 100644
--- a/meta/recipes-core/images/core-image-ptest-fast.bb
+++ b/meta/recipes-core/images/core-image-ptest-fast.bb
@@ -1,24 +1,6 @@
-inherit features_check
-REQUIRED_DISTRO_FEATURES = "ptest"
+require core-image-ptest-all.bb
-require core-image-minimal.bb
-require conf/distro/include/ptest-packagelists.inc
+SUMMARY = "Recipe to trigger execution of all fast ptest images."
-# Include only fast (< 30 sec) ptests
-IMAGE_INSTALL += "${PTESTS_FAST}"
+PTESTS = "${PTESTS_FAST}"
-DESCRIPTION += "Also includes ptest packages with fast execution times to allow for more automated QA."
-HOMEPAGE = "https://www.yoctoproject.org/"
-
-# This image is sufficiently large (~1.8GB) that it can't actually fit in a live
-# image (which has a 4GB limit), so nullify the overhead factor (1.3x out of the
-# box) and explicitly add just 1500MB.
-# strace-ptest in particular needs more than 500MB
-IMAGE_OVERHEAD_FACTOR = "1.0"
-IMAGE_ROOTFS_EXTRA_SPACE = "1524288"
-
-# ptests need more memory than standard to avoid the OOM killer
-QB_MEM = "-m 1024"
-
-# Sadly at the moment the fast set of ptests is not robust enough and sporadically fails in a couple of places
-PTEST_EXPECT_FAILURE = "1"
diff --git a/meta/recipes-core/images/core-image-ptest.bb b/meta/recipes-core/images/core-image-ptest.bb
new file mode 100644
index 0000000000..72081f938d
--- /dev/null
+++ b/meta/recipes-core/images/core-image-ptest.bb
@@ -0,0 +1,44 @@
+inherit features_check
+REQUIRED_DISTRO_FEATURES = "ptest"
+
+require core-image-minimal.bb
+require conf/distro/include/ptest-packagelists.inc
+
+DESCRIPTION += "Also including the ${MCNAME} ptest package."
+SUMMARY ?= "${MCNAME} ptest image."
+HOMEPAGE = "https://www.yoctoproject.org/"
+
+PTESTS = "${PTESTS_SLOW} ${PTESTS_FAST}"
+
+IMAGE_INSTALL:append = " ${MCNAME}-ptest openssh"
+
+BBCLASSEXTEND = "${@' '.join(['mcextend:'+x for x in d.getVar('PTESTS').split()])}"
+
+# The image can sufficiently large (~1.8GB) that we need to be careful that it fits in a live
+# image (which has a 4GB limit), so nullify the overhead factor (1.3x out of the
+# box) and explicitly add up to 1500MB.
+# strace-ptest in particular needs more than 500MB
+IMAGE_OVERHEAD_FACTOR = "1.0"
+IMAGE_ROOTFS_EXTRA_SPACE = "324288"
+IMAGE_ROOTFS_EXTRA_SPACE:virtclass-mcextend-mdadm = "1524288"
+IMAGE_ROOTFS_EXTRA_SPACE:virtclass-mcextend-strace = "1524288"
+IMAGE_ROOTFS_EXTRA_SPACE:virtclass-mcextend-lttng-tools = "1524288"
+
+# tar-ptest in particular needs more space
+IMAGE_ROOTFS_EXTRA_SPACE:virtclass-mcextend-tar = "1524288"
+
+# ptests need more memory than standard to avoid the OOM killer
+QB_MEM = "-m 1024"
+QB_MEM:virtclass-mcextend-lttng-tools = "-m 4096"
+QB_MEM:virtclass-mcextend-python3 = "-m 2048"
+QB_MEM:virtclass-mcextend-python3-cryptography = "-m 5100"
+
+TEST_SUITES = "ping ssh parselogs ptest"
+
+# Sadly at the moment the full set of ptests is not robust enough and sporadically fails in random places
+PTEST_EXPECT_FAILURE = "1"
+
+python () {
+ if not d.getVar("MCNAME"):
+ raise bb.parse.SkipRecipe("No class extension set")
+}
diff --git a/meta/recipes-core/images/core-image-tiny-initramfs.bb b/meta/recipes-core/images/core-image-tiny-initramfs.bb
index a1883de0eb..7a71d55b1e 100644
--- a/meta/recipes-core/images/core-image-tiny-initramfs.bb
+++ b/meta/recipes-core/images/core-image-tiny-initramfs.bb
@@ -1,4 +1,5 @@
# Simple initramfs image artifact generation for tiny images.
+SUMMARY = "Tiny image capable of booting a device."
DESCRIPTION = "Tiny image capable of booting a device. The kernel includes \
the Minimal RAM-based Initial Root Filesystem (initramfs), which finds the \
first 'init' program more efficiently. core-image-tiny-initramfs doesn't \
@@ -12,7 +13,6 @@ PACKAGE_INSTALL = "initramfs-live-boot-tiny packagegroup-core-boot dropbear ${VI
# Do not pollute the initrd image with rootfs features
IMAGE_FEATURES = ""
-export IMAGE_BASENAME = "core-image-tiny-initramfs"
IMAGE_NAME_SUFFIX ?= ""
IMAGE_LINGUAS = ""
@@ -39,6 +39,6 @@ python tinyinitrd () {
init.write(newinit)
}
-IMAGE_PREPROCESS_COMMAND += "tinyinitrd;"
+IMAGE_PREPROCESS_COMMAND += "tinyinitrd"
QB_KERNEL_CMDLINE_APPEND += "debugshell=3 init=/bin/busybox sh init"
diff --git a/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/interfaces b/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/interfaces
index 0acf4cf441..3d0f0c6768 100644
--- a/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/interfaces
+++ b/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/interfaces
@@ -13,11 +13,15 @@ iface wlan0 inet dhcp
iface atml0 inet dhcp
-# Wired or wireless interfaces
+# Wired or wireless interfaces including predictable names
auto eth0
iface eth0 inet dhcp
iface eth1 inet dhcp
+# Busybox ifupdown won't process /en* correctly
+auto /en*=eth
+iface eth inet dhcp
+
# Ethernet/RNDIS gadget (g_ether)
# ... or on host side, usbnet and random hwaddr
iface usb0 inet static
diff --git a/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb b/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb
index d39323db2f..409b1c0403 100644
--- a/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb
+++ b/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb
@@ -4,7 +4,6 @@ HOMEPAGE = "http://packages.debian.org/ifupdown"
SECTION = "base"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://${WORKDIR}/copyright;md5=3dd6192d306f582dee7687da3d8748ab"
-PR = "r7"
inherit update-rc.d
diff --git a/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb
index ab460ac797..198459f2b2 100644
--- a/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb
@@ -3,7 +3,6 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
SRC_URI = "file://init-boot.sh"
-PR = "r2"
S = "${WORKDIR}"
diff --git a/meta/recipes-core/initrdscripts/initramfs-framework/finish b/meta/recipes-core/initrdscripts/initramfs-framework/finish
index f08a920867..ac0de9f996 100755
--- a/meta/recipes-core/initrdscripts/initramfs-framework/finish
+++ b/meta/recipes-core/initrdscripts/initramfs-framework/finish
@@ -26,6 +26,15 @@ finish_run() {
info "Switching root to '$ROOTFS_DIR'..."
+ debug "Moving basic mounts onto rootfs"
+ for dir in `awk '/\/dev.* \/run\/media/{print $2}' /proc/mounts`; do
+ # Parse any OCT or HEX encoded chars such as spaces
+ # in the mount points to actual ASCII chars
+ dir=`printf $dir`
+ mkdir -p "${ROOTFS_DIR}/media/${dir##*/}"
+ mount -n --move "$dir" "${ROOTFS_DIR}/media/${dir##*/}"
+ done
+
debug "Moving /dev, /proc and /sys onto rootfs..."
mount --move /dev $ROOTFS_DIR/dev
mount --move /proc $ROOTFS_DIR/proc
diff --git a/meta/recipes-core/initrdscripts/initramfs-framework/overlayroot b/meta/recipes-core/initrdscripts/initramfs-framework/overlayroot
index d40342dc59..0d41432878 100644
--- a/meta/recipes-core/initrdscripts/initramfs-framework/overlayroot
+++ b/meta/recipes-core/initrdscripts/initramfs-framework/overlayroot
@@ -15,9 +15,11 @@
# accessing the original, unmodified rootfs at /rofs after boot.
#
# It relies on the initramfs-module-rootfs to mount the original
-# root filesystem, and requires 'rootrw=<foo>' to be passed as a
+# root filesystem, and requires 'overlayrootrwdev=<foo>' to be passed as a
# kernel parameter, specifying the device/partition intended to
# use as RW.
+# Mount options of the RW device can be tweaked with 'overlayrootfstype='
+# (defaults to 'ext4') and 'overlayrootfsflags=' ('defaults').
#
# This module needs to be executed after the initramfs-module-rootfs
# since it relies on it to mount the filesystem at initramfs startup
@@ -72,15 +74,19 @@ exit_gracefully() {
eval "finish_run"
}
+# migrate legacy parameter
+if [ ! -z "$bootparam_rootrw" ]; then
+ bootparam_overlayrootrwdev="$bootparam_rootrw"
+fi
-if [ -z "$bootparam_rootrw" ]; then
- exit_gracefully "rootrw= kernel parameter doesn't exist and its required to mount the overlayfs"
+if [ -z "$bootparam_overlayrootrwdev" ]; then
+ exit_gracefully "overlayrootrwdev= kernel parameter doesn't exist and its required to mount the overlayfs"
fi
mkdir -p ${RWMOUNT}
# Mount RW device
-if mount -n -t ${bootparam_rootfstype:-ext4} -o ${bootparam_rootflags:-defaults} ${bootparam_rootrw} ${RWMOUNT}
+if mount -n -t ${bootparam_overlayrootfstype:-ext4} -o ${bootparam_overlayrootfsflags:-defaults} ${bootparam_overlayrootrwdev} ${RWMOUNT}
then
# Set up overlay directories
mkdir -p ${UPPER_DIR}
diff --git a/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb
index 4e76e20026..39ea51ccbd 100644
--- a/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb
@@ -4,7 +4,6 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384
RDEPENDS:${PN} += "${VIRTUAL-RUNTIME_base-utils}"
RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}"
-PR = "r4"
inherit allarch
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb
index 59a530506a..847dbc0472 100644
--- a/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb
@@ -5,7 +5,6 @@ DEPENDS = "virtual/kernel"
RDEPENDS:${PN} = "busybox-mdev"
SRC_URI = "file://init-live.sh"
-PR = "r12"
S = "${WORKDIR}"
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb
index 8f56d7ab90..b3b991b8fd 100644
--- a/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb
@@ -5,7 +5,6 @@ DEPENDS = "virtual/kernel"
RDEPENDS:${PN} = "udev udev-extraconf"
SRC_URI = "file://init-live.sh"
-PR = "r12"
S = "${WORKDIR}"
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb
index ecbd567d6c..e10faadfbe 100644
--- a/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb
@@ -3,7 +3,6 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
SRC_URI = "file://init-install-efi.sh"
-PR = "r1"
RDEPENDS:${PN} = "parted e2fsprogs-mke2fs dosfstools util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}"
RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}"
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb
index 674d49ecdf..9046d06c02 100644
--- a/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb
@@ -3,7 +3,6 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
SRC_URI = "file://init-install.sh"
-PR = "r9"
S = "${WORKDIR}"
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb
index fb19484555..b7499644f0 100644
--- a/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb
@@ -4,7 +4,6 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384
RDEPENDS:${PN} = "initramfs-framework-base parted e2fsprogs-mke2fs dosfstools util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}"
RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}"
-PR = "r4"
SRC_URI = "file://init-install-efi.sh"
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb
index 4d48d54e9e..11db7124af 100644
--- a/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb
@@ -5,11 +5,10 @@ RDEPENDS:${PN} = "initramfs-framework-base grub parted e2fsprogs-mke2fs util-lin
RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}"
# The same restriction as grub
-COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*)-(linux.*|freebsd.*)'
+COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|loongarch64.*)-(linux.*|freebsd.*)'
COMPATIBLE_HOST:armv7a = 'null'
COMPATIBLE_HOST:armv7ve = 'null'
-PR = "r1"
SRC_URI = "file://init-install.sh"
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb
index d8ce024b83..48a779e9aa 100644
--- a/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb
+++ b/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb
@@ -3,7 +3,6 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
RDEPENDS:${PN} = "initramfs-framework-base udev-extraconf"
-PR = "r4"
inherit allarch
diff --git a/meta/recipes-core/initscripts/init-system-helpers_1.62.bb b/meta/recipes-core/initscripts/init-system-helpers_1.62.bb
deleted file mode 100644
index d5c05ff8e5..0000000000
--- a/meta/recipes-core/initscripts/init-system-helpers_1.62.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-SUMMARY = "helper tools for all init systems"
-DESCRIPTION = "This package contains helper tools that are necessary for switching between \
-the various init systems that Debian contains (e. g. sysvinit or \
-systemd). An example is deb-systemd-helper, a script that enables systemd unit \
-files without depending on a running systemd. \
-\
-It also includes the \"service\", \"invoke-rc.d\", and \"update-rc.d\" scripts which \
-provide an abstraction for enabling, disabling, starting, and stopping \
-services for all supported Debian init systems as specified by the policy. \
-\
-While this package is maintained by pkg-systemd-maintainers, it is NOT \
-specific to systemd at all. Maintainers of other init systems are welcome to \
-include their helpers in this package."
-HOMEPAGE = "https://salsa.debian.org/debian/init-system-helpers"
-SECTION = "base"
-LICENSE = "BSD-3-Clause & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://debian/copyright;md5=ee2b1830fcfead84d07bc060ec43e072"
-
-SRCREV = "bbe4b508f3216cdc124683ba449e2295974a6b4a"
-SRC_URI = "git://salsa.debian.org/debian/init-system-helpers.git;protocol=https;branch=master"
-
-S = "${WORKDIR}/git"
-
-do_configure[noexec] = "1"
-do_compile[noexec] = "1"
-
-do_install() {
- install -d -m 0755 ${D}${sbindir}
- install -m 0755 ${S}/script/invoke-rc.d ${D}${sbindir}
- install -m 0755 ${S}/script/service ${D}${sbindir}
-}
-
-PACKAGES += "${PN}-invoke-rc.d ${PN}-service"
-
-FILES:${PN} = ""
-FILES:${PN}-invoke-rc.d = "${sbindir}/invoke-rc.d"
-FILES:${PN}-service = "${sbindir}/service"
-
-ALLOW_EMPTY:${PN} = "1"
-
-RRECOMMENDS:${PN} += "${PN}-invoke-rc.d ${PN}-service"
diff --git a/meta/recipes-core/initscripts/init-system-helpers_1.66.bb b/meta/recipes-core/initscripts/init-system-helpers_1.66.bb
new file mode 100644
index 0000000000..64e08aae68
--- /dev/null
+++ b/meta/recipes-core/initscripts/init-system-helpers_1.66.bb
@@ -0,0 +1,42 @@
+SUMMARY = "helper tools for all init systems"
+DESCRIPTION = "This package contains helper tools that are necessary for switching between \
+the various init systems that Debian contains (e. g. sysvinit or \
+systemd). An example is deb-systemd-helper, a script that enables systemd unit \
+files without depending on a running systemd. \
+\
+It also includes the \"service\", \"invoke-rc.d\", and \"update-rc.d\" scripts which \
+provide an abstraction for enabling, disabling, starting, and stopping \
+services for all supported Debian init systems as specified by the policy. \
+\
+While this package is maintained by pkg-systemd-maintainers, it is NOT \
+specific to systemd at all. Maintainers of other init systems are welcome to \
+include their helpers in this package."
+HOMEPAGE = "https://salsa.debian.org/debian/init-system-helpers"
+SECTION = "base"
+LICENSE = "BSD-3-Clause & GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://debian/copyright;md5=c4ec20aa158fa9de26ee1accf78dcaae"
+
+SRCREV = "a5439f465dc1d1d4e12329208dc321fb806009f4"
+SRC_URI = "git://salsa.debian.org/debian/init-system-helpers.git;protocol=https;branch=master"
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))(?!_exp)"
+
+S = "${WORKDIR}/git"
+
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+
+do_install() {
+ install -d -m 0755 ${D}${sbindir}
+ install -m 0755 ${S}/script/invoke-rc.d ${D}${sbindir}
+ install -m 0755 ${S}/script/service ${D}${sbindir}
+}
+
+PACKAGES += "${PN}-invoke-rc.d ${PN}-service"
+
+FILES:${PN} = ""
+FILES:${PN}-invoke-rc.d = "${sbindir}/invoke-rc.d"
+FILES:${PN}-service = "${sbindir}/service"
+
+ALLOW_EMPTY:${PN} = "1"
+
+RRECOMMENDS:${PN} += "${PN}-invoke-rc.d ${PN}-service"
diff --git a/meta/recipes-core/initscripts/initscripts-1.0/read-only-rootfs-hook.sh b/meta/recipes-core/initscripts/initscripts-1.0/read-only-rootfs-hook.sh
index 6706a117f7..a29773647f 100644
--- a/meta/recipes-core/initscripts/initscripts-1.0/read-only-rootfs-hook.sh
+++ b/meta/recipes-core/initscripts/initscripts-1.0/read-only-rootfs-hook.sh
@@ -37,9 +37,9 @@ if [ "$1" = "start" ] ; then
mkdir -p /var/volatile/.lib-work
# Try to mount using overlay, which is much faster than copying
# files. If that fails, fallback to the slower copy
- if ! mount -t overlay overlay -olowerdir=/var/lib,upperdir=/var/volatile/lib,workdir=/var/volatile/.lib-work /var/lib > /dev/null 2>&1; then
+ if ! mount -t overlay overlay SED_VARLIBMOUNTARGS -olowerdir=/var/lib,upperdir=/var/volatile/lib,workdir=/var/volatile/.lib-work /var/lib > /dev/null 2>&1; then
cp -a /var/lib/* /var/volatile/lib
- mount --bind /var/volatile/lib /var/lib
+ mount SED_VARLIBMOUNTARGS --bind /var/volatile/lib /var/lib
fi
fi
fi
diff --git a/meta/recipes-core/initscripts/initscripts_1.0.bb b/meta/recipes-core/initscripts/initscripts_1.0.bb
index 2244d1b292..e61ac554f3 100644
--- a/meta/recipes-core/initscripts/initscripts_1.0.bb
+++ b/meta/recipes-core/initscripts/initscripts_1.0.bb
@@ -4,7 +4,6 @@ DESCRIPTION = "Initscripts provide the basic system startup initialization scrip
SECTION = "base"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://functions;beginline=7;endline=7;md5=829e563511c9a1d6d41f17a7a4989d6a"
-PR = "r155"
INHIBIT_DEFAULT_DEPS = "1"
@@ -59,10 +58,12 @@ FILES:${PN}-functions = "${sysconfdir}/init.d/functions*"
FILES:${PN}-sushell = "${base_sbindir}/sushell"
HALTARGS ?= "-d -f"
+VARLIBMOUNTARGS ?= ""
do_configure() {
sed -i -e "s:SED_HALTARGS:${HALTARGS}:g" ${WORKDIR}/halt
sed -i -e "s:SED_HALTARGS:${HALTARGS}:g" ${WORKDIR}/reboot
+ sed -i -e "s:SED_VARLIBMOUNTARGS:${VARLIBMOUNTARGS}:g" ${WORKDIR}/read-only-rootfs-hook.sh
}
do_install () {
@@ -108,6 +109,9 @@ do_install () {
sed -i -e '\@^d root root 0755 /var/volatile/log none$@ a\l root root 0755 /var/log /var/volatile/log' \
${D}${sysconfdir}/default/volatiles/00_core
fi
+ if [ "${VOLATILE_TMP_DIR}" != "yes" ]; then
+ sed -i -e "/\<tmp\>/d" ${D}${sysconfdir}/default/volatiles/00_core
+ fi
install -m 0755 ${WORKDIR}/dmesg.sh ${D}${sysconfdir}/init.d
install -m 0644 ${WORKDIR}/logrotate-dmesg.conf ${D}${sysconfdir}/
@@ -130,7 +134,7 @@ do_install () {
update-rc.d -r ${D} rmnologin.sh start 99 2 3 4 5 .
update-rc.d -r ${D} sendsigs start 20 0 6 .
update-rc.d -r ${D} urandom start 38 S 0 6 .
- update-rc.d -r ${D} umountnfs.sh start 31 0 1 6 .
+ update-rc.d -r ${D} umountnfs.sh stop 31 0 1 6 .
update-rc.d -r ${D} umountfs start 40 0 6 .
update-rc.d -r ${D} reboot start 90 6 .
update-rc.d -r ${D} halt start 90 0 .
diff --git a/meta/recipes-core/kbd/kbd/0001-Remove-non-free-Agafari-fonts.patch b/meta/recipes-core/kbd/kbd/0001-Remove-non-free-Agafari-fonts.patch
new file mode 100644
index 0000000000..de279b55f9
--- /dev/null
+++ b/meta/recipes-core/kbd/kbd/0001-Remove-non-free-Agafari-fonts.patch
@@ -0,0 +1,73 @@
+From b757e6842f9631757f0d1a6b3833aabffa9ffeee Mon Sep 17 00:00:00 2001
+From: Alexey Gladkov <legion@kernel.org>
+Date: Thu, 29 Feb 2024 17:38:37 +0100
+Subject: [PATCH] Remove non-free Agafari fonts
+
+Based on legal analysis, we are removing non-free fonts for now. If we
+can change the license of these fonts, we will return them back.
+
+From: Stanislav Brabec <sbrabec@suse.com>
+Date: Wed, 28 Feb 2024 16:47:54 +0100
+Subject: kbd: Legal problems of Agafari fonts
+
+ The data/consolefonts/README.Ethiopic contains a notice:
+ Agafari:
+ Donated by the Ethiopian Science and Technology Commission
+ <ncic@padis.gn.apc.org> or <ncic@telecom.net.et> and may be redistributed
+ for non-commercial use under Unix environments only.
+
+ According to our legal review, it makes impossible to distribute these
+ fonts as part of any commercial products, and even makes it impossible to
+ distribute kbd sources as part of any commercial product services.
+
+ Additionally, it makes the whole kbd package incompatible with GPL, so the
+ COPYING file (created during build of the tarball) cannot declare GPL
+ version 2. It also violates section 6 of GPL (no further restrictions).
+
+ That is why several GNU/Linux distributions exclude Agafari from the
+ release. To be on a safe side, SUSE even decided to repack any source
+ tarballs before putting it to their servers.
+
+ This was probably reported to the former kbd maintainer about 20 years ago,
+ but nothing changed over years.
+
+ That is why I recommend removing Agafari fonts and removing the reference
+ to them from README.Ethiopic. Alternatively, you can ask the Ethiopian
+ Science and Technology Commission for re-licensing.
+
+Signed-off-by: Alexey Gladkov <legion@kernel.org>
+
+Upstream-Status: Backport [https://github.com/legionus/kbd/commit/b757e6842f9631757f0d1a6b3833aabffa9ffeee]
+
+[do_configure prepend added to remove binary files]
+
+Signed-off-by: Peter Marko <peter.marko@siemens.com>
+---
+ data/consolefonts/Agafari-12.psfu | Bin 7989 -> 0 bytes
+ data/consolefonts/Agafari-14.psfu | Bin 9013 -> 0 bytes
+ data/consolefonts/Agafari-16.psfu | Bin 10037 -> 0 bytes
+ data/consolefonts/README.Ethiopic | 5 -----
+ 4 files changed, 5 deletions(-)
+ delete mode 100644 data/consolefonts/Agafari-12.psfu
+ delete mode 100644 data/consolefonts/Agafari-14.psfu
+ delete mode 100644 data/consolefonts/Agafari-16.psfu
+
+diff --git a/data/consolefonts/README.Ethiopic b/data/consolefonts/README.Ethiopic
+index 7502722..2810797 100644
+--- a/data/consolefonts/README.Ethiopic
++++ b/data/consolefonts/README.Ethiopic
+@@ -14,11 +14,6 @@ Ethiopic fonts:
+ restrictions below:
+
+
+-Agafari:
+- Donated by the Ethiopian Science and Technology Commission
+- <ncic@padis.gn.apc.org> or <ncic@telecom.net.et> and may be redistributed
+- for non-commercial use under Unix environments only.
+-
+ Goha and GohaClassic:
+ Donated by Yitna Firdyiwek <ybf2u@virgina.edu> of GohaTibeb Associates
+ and may be redistributed without restriction under the GNU GPL 2.0.
+--
+2.30.2
+
diff --git a/meta/recipes-core/kbd/kbd_2.4.0.bb b/meta/recipes-core/kbd/kbd_2.4.0.bb
deleted file mode 100644
index 4b23f1a89d..0000000000
--- a/meta/recipes-core/kbd/kbd_2.4.0.bb
+++ /dev/null
@@ -1,46 +0,0 @@
-SUMMARY = "Keytable files and keyboard utilities"
-HOMEPAGE = "http://www.kbd-project.org/"
-DESCRIPTION = "The kbd project contains tools for managing Linux console (Linux console, virtual terminals, keyboard, etc.) – mainly, what they do is loading console fonts and keyboard maps."
-# everything minus console-fonts is GPL-2.0-or-later
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=892f569a555ba9c07a568a7c0c4fa63a"
-
-inherit autotools gettext pkgconfig
-
-DEPENDS += "flex-native"
-
-RREPLACES:${PN} = "console-tools"
-RPROVIDES:${PN} = "console-tools"
-RCONFLICTS:${PN} = "console-tools"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/${BPN}/${BP}.tar.xz \
- "
-
-SRC_URI[sha256sum] = "55f0740458cfd3a84e775e50d7e8b92dc01846db1edad8e2411ccc293ece9b9f"
-
-PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)} \
- "
-
-PACKAGECONFIG[pam] = "--enable-vlock, --disable-vlock, libpam,"
-
-PACKAGES += "${PN}-consolefonts ${PN}-keymaps ${PN}-unimaps ${PN}-consoletrans"
-
-FILES:${PN}-consolefonts = "${datadir}/consolefonts"
-FILES:${PN}-consoletrans = "${datadir}/consoletrans"
-FILES:${PN}-keymaps = "${datadir}/keymaps"
-FILES:${PN}-unimaps = "${datadir}/unimaps"
-
-do_install:append () {
- if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'yes', 'no', d)}" = "yes" ] \
- && [ -f ${D}${sysconfdir}/pam.d/vlock ]; then
- mv -f ${D}${sysconfdir}/pam.d/vlock ${D}${sysconfdir}/pam.d/vlock.kbd
- fi
-}
-
-inherit update-alternatives
-
-ALTERNATIVE:${PN} = "chvt deallocvt fgconsole openvt showkey \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'vlock','', d)}"
-ALTERNATIVE_PRIORITY = "100"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-core/kbd/kbd_2.6.4.bb b/meta/recipes-core/kbd/kbd_2.6.4.bb
new file mode 100644
index 0000000000..2331b51e59
--- /dev/null
+++ b/meta/recipes-core/kbd/kbd_2.6.4.bb
@@ -0,0 +1,67 @@
+SUMMARY = "Keytable files and keyboard utilities"
+HOMEPAGE = "http://www.kbd-project.org/"
+DESCRIPTION = "The kbd project contains tools for managing Linux console (Linux console, virtual terminals, keyboard, etc.) – mainly, what they do is loading console fonts and keyboard maps."
+
+# consolefonts and keymaps contain also some public domain and author notice licenses
+LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later & GPL-3.0-or-later"
+LIC_FILES_CHKSUM = " \
+ file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://data/keymaps/pine/en.map;beginline=2;endline=15;md5=20914a59c0546a7b77ebf959bc88ad5d \
+"
+LICENSE:${PN} = "GPL-2.0-or-later & LGPL-2.0-or-later"
+LICENSE:${PN}-consolefonts = "GPL-2.0-or-later"
+LICENSE:${PN}-consoletrans = "GPL-2.0-or-later"
+LICENSE:${PN}-keymaps-pine = "GPL-3.0-or-later"
+LICENSE:${PN}-keymaps = "GPL-2.0-or-later"
+LICENSE:${PN}-unimaps = "GPL-2.0-or-later"
+
+inherit autotools gettext pkgconfig
+
+DEPENDS += "flex-native"
+
+RREPLACES:${PN} = "console-tools"
+RPROVIDES:${PN} = "console-tools"
+RCONFLICTS:${PN} = "console-tools"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/${BPN}/${BP}.tar.xz \
+ file://0001-Remove-non-free-Agafari-fonts.patch \
+ "
+
+SRC_URI[sha256sum] = "519f8d087aecca7e0a33cd084bef92c066eb19731666653dcc70c9d71aa40926"
+
+EXTRA_OECONF = "--disable-tests"
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)} \
+ "
+
+PACKAGECONFIG[pam] = "--enable-vlock, --disable-vlock, libpam,"
+
+PACKAGES += "${PN}-consolefonts ${PN}-keymaps-pine ${PN}-keymaps ${PN}-unimaps ${PN}-consoletrans"
+
+FILES:${PN}-consolefonts = "${datadir}/consolefonts"
+FILES:${PN}-consoletrans = "${datadir}/consoletrans"
+FILES:${PN}-keymaps-pine = "${datadir}/keymaps/pine"
+FILES:${PN}-keymaps = "${datadir}/keymaps"
+FILES:${PN}-unimaps = "${datadir}/unimaps"
+
+RRECOMMENDS:${PN}-keymaps = "${PN}-keymaps-pine"
+
+# remove this when upgrading to newer version which has integrated
+# https://github.com/legionus/kbd/commit/b757e6842f9631757f0d1a6b3833aabffa9ffeee
+do_configure:prepend() {
+ rm -rf ${S}/data/consolefonts/Agafari-1*
+}
+
+do_install:append () {
+ if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'yes', 'no', d)}" = "yes" ] \
+ && [ -f ${D}${sysconfdir}/pam.d/vlock ]; then
+ mv -f ${D}${sysconfdir}/pam.d/vlock ${D}${sysconfdir}/pam.d/vlock.kbd
+ fi
+}
+
+inherit update-alternatives
+
+ALTERNATIVE:${PN} = "chvt deallocvt fgconsole openvt showkey \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'vlock','', d)}"
+ALTERNATIVE_PRIORITY = "100"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-core/libcgroup/libcgroup_2.0.2.bb b/meta/recipes-core/libcgroup/libcgroup_2.0.2.bb
deleted file mode 100644
index 7ade372cae..0000000000
--- a/meta/recipes-core/libcgroup/libcgroup_2.0.2.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-SUMMARY = "Linux control group abstraction library"
-HOMEPAGE = "http://libcg.sourceforge.net/"
-DESCRIPTION = "libcgroup is a library that abstracts the control group file system \
-in Linux. Control groups allow you to limit, account and isolate resource usage \
-(CPU, memory, disk I/O, etc.) of groups of processes."
-SECTION = "libs"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=2d5025d4aa3495befef8f17206a5b0a1"
-
-inherit autotools pkgconfig
-
-DEPENDS = "bison-native flex-native"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/v${PV}/${BP}.tar.gz"
-
-SRC_URI[sha256sum] = "8ef63b32e0aff619547dbb8a25e1f6bab152d7c4864795cf915571a5994d0cf8"
-UPSTREAM_CHECK_URI = "https://github.com/libcgroup/libcgroup/releases/"
-
-DEPENDS:append:libc-musl = " fts "
-EXTRA_OEMAKE:append:libc-musl = " LIBS=-lfts"
-
-PACKAGECONFIG = "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
-PACKAGECONFIG[pam] = "--enable-pam-module-dir=${base_libdir}/security --enable-pam=yes,--enable-pam=no,libpam"
-
-PACKAGES =+ "cgroups-pam-plugin"
-FILES:cgroups-pam-plugin = "${base_libdir}/security/pam_cgroup.so*"
-FILES:${PN}-dev += "${base_libdir}/security/*.la"
-FILES:${PN}-staticdev += "${base_libdir}/security/pam_cgroup.a"
-
-do_install:append() {
- # Until we ship the test suite, this library isn't useful
- rm -f ${D}${libdir}/libcgroupfortesting.*
-}
diff --git a/meta/recipes-core/libcgroup/libcgroup_3.1.0.bb b/meta/recipes-core/libcgroup/libcgroup_3.1.0.bb
new file mode 100644
index 0000000000..4b4f19e36f
--- /dev/null
+++ b/meta/recipes-core/libcgroup/libcgroup_3.1.0.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Linux control group abstraction library"
+HOMEPAGE = "http://libcg.sourceforge.net/"
+DESCRIPTION = "libcgroup is a library that abstracts the control group file system \
+in Linux. Control groups allow you to limit, account and isolate resource usage \
+(CPU, memory, disk I/O, etc.) of groups of processes."
+SECTION = "libs"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4d794c5d710e5b3547a6cc6a6609a641"
+
+inherit autotools pkgconfig github-releases
+
+DEPENDS = "bison-native flex-native"
+DEPENDS:append:libc-musl = " fts"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
+"
+UPSTREAM_CHECK_URI = "https://github.com/libcgroup/libcgroup/tags"
+
+SRC_URI[sha256sum] = "976ec4b1e03c0498308cfd28f1b256b40858f636abc8d1f9db24f0a7ea9e1258"
+
+PACKAGECONFIG = "${@bb.utils.filter('DISTRO_FEATURES', 'pam systemd', d)}"
+PACKAGECONFIG[pam] = "--enable-pam-module-dir=${base_libdir}/security --enable-pam=yes,--enable-pam=no,libpam"
+PACKAGECONFIG[systemd] = "--enable-systemd,--disable-systemd,systemd"
+
+PACKAGES =+ "cgroups-pam-plugin"
+FILES:cgroups-pam-plugin = "${base_libdir}/security/pam_cgroup.so*"
+FILES:${PN}-dev += "${base_libdir}/security/*.la"
+FILES:${PN}-staticdev += "${base_libdir}/security/pam_cgroup.a"
+
+do_install:append() {
+ # Until we ship the test suite, this library isn't useful
+ rm -f ${D}${libdir}/libcgroupfortesting.*
+}
diff --git a/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.28.bb b/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.28.bb
deleted file mode 100644
index ec9f9f4fa3..0000000000
--- a/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.28.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-# This provides libcrypto.so.1 which contains obsolete APIs, needed for uninative in particular
-#
-
-require libxcrypt.inc
-
-PROVIDES = ""
-AUTO_LIBNAME_PKGS = ""
-EXCLUDE_FROM_WORLD = "1"
-
-API = "--enable-obsolete-api"
-
-do_install:append () {
- rm -rf ${D}${includedir}
- rm -rf ${D}${libdir}/pkgconfig
- rm -rf ${D}${datadir}
-}
-
diff --git a/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.36.bb b/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.36.bb
new file mode 100644
index 0000000000..d5546ce9ba
--- /dev/null
+++ b/meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.36.bb
@@ -0,0 +1,18 @@
+#
+# This provides libcrypto.so.1 which contains obsolete APIs, needed for uninative in particular
+#
+
+require libxcrypt.inc
+
+PROVIDES = ""
+AUTO_LIBNAME_PKGS = ""
+EXCLUDE_FROM_WORLD = "1"
+
+API = "--enable-obsolete-api"
+
+do_install:append () {
+ rm -rf ${D}${includedir}
+ rm -rf ${D}${libdir}/pkgconfig
+ rm -rf ${D}${libdir}/libcrypt.so
+ rm -rf ${D}${datadir}
+}
diff --git a/meta/recipes-core/libxcrypt/libxcrypt.inc b/meta/recipes-core/libxcrypt/libxcrypt.inc
index 39ba2636ff..ba93d91aef 100644
--- a/meta/recipes-core/libxcrypt/libxcrypt.inc
+++ b/meta/recipes-core/libxcrypt/libxcrypt.inc
@@ -10,19 +10,13 @@ LIC_FILES_CHKSUM = "file://LICENSING;md5=c0a30e2b1502c55a7f37e412cd6c6a4b \
inherit autotools pkgconfig
SRC_URI = "git://github.com/besser82/libxcrypt.git;branch=${SRCBRANCH};protocol=https"
-SRCREV = "50cf2b6dd4fdf04309445f2eec8de7051d953abf"
-SRCBRANCH ?= "develop"
+SRCREV = "f531a36aa916a22ef2ce7d270ba381e264250cbf"
+SRCBRANCH ?= "master"
SRC_URI += "file://fix_cflags_handling.patch"
PROVIDES = "virtual/crypt"
-FILES:${PN} = "${libdir}/libcrypt*.so.* \
- ${libdir}/libcrypt-*.so \
- ${libdir}/libowcrypt*.so.* \
- ${libdir}/libowcrypt-*.so \
-"
-
S = "${WORKDIR}/git"
BUILD_CPPFLAGS = "-I${STAGING_INCDIR_NATIVE}"
diff --git a/meta/recipes-core/libxcrypt/libxcrypt_4.4.28.bb b/meta/recipes-core/libxcrypt/libxcrypt_4.4.36.bb
index 79dba2f6dc..79dba2f6dc 100644
--- a/meta/recipes-core/libxcrypt/libxcrypt_4.4.28.bb
+++ b/meta/recipes-core/libxcrypt/libxcrypt_4.4.36.bb
diff --git a/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch b/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch
deleted file mode 100644
index 6d9ede6194..0000000000
--- a/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch
+++ /dev/null
@@ -1,86 +0,0 @@
-From ea1993d1d9a18c5e61b9cb271892b0a48f508d32 Mon Sep 17 00:00:00 2001
-From: Peter Kjellerstedt <pkj@axis.com>
-Date: Fri, 9 Jun 2017 17:50:46 +0200
-Subject: [PATCH] Make ptest run the python tests if python is enabled
-
-One of the tests (tstLastError.py) needed a minor correction. It might
-be due to the fact that the tests are forced to run with Python 3.
-
-Upstream-Status: Inappropriate [OE specific]
-Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
----
- Makefile.am | 2 +-
- python/Makefile.am | 9 +++++++++
- python/tests/Makefile.am | 10 ++++++++++
- 3 files changed, 20 insertions(+), 1 deletion(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index b428452b..dc18d6dd 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -203,9 +203,9 @@ install-ptest:
- install $(check_PROGRAMS) $(DESTDIR))
- cp -r $(srcdir)/test $(DESTDIR)
- cp -r $(srcdir)/result $(DESTDIR)
-- cp -r $(srcdir)/python $(DESTDIR)
- cp Makefile $(DESTDIR)
- sed -i -e 's|^Makefile:|_Makefile:|' $(DESTDIR)/Makefile
-+ $(MAKE) -C python install-ptest
-
- runtests: runtest$(EXEEXT) testrecurse$(EXEEXT) testapi$(EXEEXT) \
- testchar$(EXEEXT) testdict$(EXEEXT) runxmlconf$(EXEEXT)
-diff --git a/python/Makefile.am b/python/Makefile.am
-index 34aed96c..ba3ec6a4 100644
---- a/python/Makefile.am
-+++ b/python/Makefile.am
-@@ -48,7 +48,16 @@ GENERATED = libxml2class.py libxml2class.txt $(BUILT_SOURCES)
-
- $(GENERATED): $(srcdir)/generator.py $(API_DESC)
- $(PYTHON) $(srcdir)/generator.py $(srcdir)
-+
-+install-ptest:
-+ cp -r $(srcdir) $(DESTDIR)
-+ sed -e 's|^Makefile:|_Makefile:|' \
-+ -e 's|^\(tests test:\) all|\1|' Makefile >$(DESTDIR)/python/Makefile
-+ $(MAKE) -C tests install-ptest
-+else
-+install-ptest:
- endif
-
-+.PHONY: tests test
- tests test: all
- cd tests && $(MAKE) tests
-diff --git a/python/tests/Makefile.am b/python/tests/Makefile.am
-index 227e24df..3568c2d2 100644
---- a/python/tests/Makefile.am
-+++ b/python/tests/Makefile.am
-@@ -59,6 +59,11 @@ XMLS= \
- CLEANFILES = core tmp.xml *.pyc
-
- if WITH_PYTHON
-+install-ptest:
-+ cp -r $(srcdir) $(DESTDIR)/python
-+ sed -e 's|^Makefile:|_Makefile:|' \
-+ -e 's|^\(srcdir = \).*|\1.|' Makefile >$(DESTDIR)/python/tests/Makefile
-+
- tests: $(PYTESTS)
- @for f in $(XMLS) ; do test -f $$f || $(LN_S) $(srcdir)/$$f . ; done
- @echo "## running Python regression tests"
-@@ -70,9 +75,14 @@ tests: $(PYTESTS)
- if [ "$$?" -ne 0 ] ; then \
- echo "-- $$test" ; \
- echo "$$log" ; \
-+ echo "FAIL: $$test"; \
- exit 1 ; \
-+ else \
-+ echo "PASS: $$test"; \
- fi ; \
- done)
- else
-+install-ptest:
-+
- tests:
- endif
---
-2.25.1
-
diff --git a/meta/recipes-core/libxml/libxml2/fix-execution-of-ptests.patch b/meta/recipes-core/libxml/libxml2/fix-execution-of-ptests.patch
deleted file mode 100644
index ad719d4f5f..0000000000
--- a/meta/recipes-core/libxml/libxml2/fix-execution-of-ptests.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From 395c0f53ec226aaabedb166e6b3a7f8590b95a5f Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Sat, 11 May 2019 20:39:15 +0800
-Subject: [PATCH] Make sure that Makefile doesn't try to compile these tests
- again on the target where the source dependencies won't be available.
-
-Upstream-Status: Inappropriate [cross-compile specific]
-
-Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
-
-Rebase to 2.9.9
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- Makefile.am | 3 +--
- 1 file changed, 1 insertion(+), 2 deletions(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index 8f4e43d..5edb930 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -211,8 +211,7 @@ install-ptest:
- sed -i -e 's|^Makefile:|_Makefile:|' $(DESTDIR)/Makefile
- $(MAKE) -C python install-ptest
-
--runtests: runtest$(EXEEXT) testrecurse$(EXEEXT) testapi$(EXEEXT) \
-- testchar$(EXEEXT) testdict$(EXEEXT) runxmlconf$(EXEEXT)
-+runtests:
- [ -d test ] || $(LN_S) $(srcdir)/test .
- [ -d result ] || $(LN_S) $(srcdir)/result .
- $(CHECKER) ./runtest$(EXEEXT) && \
---
-2.7.4
-
diff --git a/meta/recipes-core/libxml/libxml2/install-tests.patch b/meta/recipes-core/libxml/libxml2/install-tests.patch
new file mode 100644
index 0000000000..478eeea81b
--- /dev/null
+++ b/meta/recipes-core/libxml/libxml2/install-tests.patch
@@ -0,0 +1,34 @@
+From 0779511838a8cbd1e0f431c22f28f286a2a37b1b Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Mon, 5 Dec 2022 17:02:32 +0000
+Subject: [PATCH] add yocto-specific install-ptest target
+
+Add a target to install the test suite.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ Makefile.am | 10 ++++++++++
+ 1 file changed, 10 insertions(+)
+
+diff --git a/Makefile.am b/Makefile.am
+index 0a49d37..1097c63 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -27,6 +27,16 @@ check_PROGRAMS = \
+ testparser \
+ testrecurse
+
++ptestdir=$(libexecdir)
++install-test-data: $(check_PROGRAMS)
++ install -d $(DESTDIR)$(ptestdir) $(DESTDIR)$(ptestdir)/python/
++ for T in $(check_PROGRAMS); do \
++ $(LIBTOOL) --mode=install $(INSTALL_PROGRAM) $$T $(DESTDIR)$(ptestdir) ;\
++ done
++ cp -r $(srcdir)/test $(DESTDIR)$(ptestdir)
++ cp -r $(srcdir)/result $(DESTDIR)$(ptestdir)
++ cp -r $(srcdir)/python/tests $(DESTDIR)$(ptestdir)/python
++
+ bin_PROGRAMS = xmllint xmlcatalog
+
+ bin_SCRIPTS = xml2-config
diff --git a/meta/recipes-core/libxml/libxml2/libxml-64bit.patch b/meta/recipes-core/libxml/libxml2/libxml-64bit.patch
deleted file mode 100644
index fd8e469dd3..0000000000
--- a/meta/recipes-core/libxml/libxml2/libxml-64bit.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 056b14345b1abd76a761ab14538f1bc21302781a Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Sat, 11 May 2019 20:26:51 +0800
-Subject: [PATCH] libxml 64bit
-
-Upstream-Status: Backport [from debian: bugs.debian.org/439843]
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- libxml.h | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/libxml.h b/libxml.h
-index 64e30f7..4e80d90 100644
---- a/libxml.h
-+++ b/libxml.h
-@@ -15,6 +15,9 @@
- #ifndef _LARGEFILE_SOURCE
- #define _LARGEFILE_SOURCE
- #endif
-+#ifndef _LARGEFILE64_SOURCE
-+#define _LARGEFILE64_SOURCE
-+#endif
- #ifndef _FILE_OFFSET_BITS
- #define _FILE_OFFSET_BITS 64
- #endif
---
-2.7.4
-
diff --git a/meta/recipes-core/libxml/libxml2/libxml-m4-use-pkgconfig.patch b/meta/recipes-core/libxml/libxml2/libxml-m4-use-pkgconfig.patch
deleted file mode 100644
index cc9da88a29..0000000000
--- a/meta/recipes-core/libxml/libxml2/libxml-m4-use-pkgconfig.patch
+++ /dev/null
@@ -1,230 +0,0 @@
-From 7196bce35954c4b46391cb0139aeb15ed628fa54 Mon Sep 17 00:00:00 2001
-From: Tony Tascioglu <tony.tascioglu@windriver.com>
-Date: Fri, 14 May 2021 11:50:35 -0400
-Subject: [PATCH] AM_PATH_XML2 uses xml-config which we disable through
-
-binconfig-disabled.bbclass, so port it to use pkg-config instead.
-
-This cannot be upstreamed, as the original macro supports various
-optional arguments which cannot be supported with a direct call
-to pkg-config.
-
-Upstream-Status: Inappropriate [oe-core specific; see above]
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-Rebase to 2.9.9
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
-Updated to apply cleanly to v2.9.12
-Signed-off-by: Tony Tascioglu <tony.tascioglu@windriver.com>
-
-Rebase to 2.9.14
-Signed-off-by: Jiaqing Zhao <jiaqing.zhao@linux.intel.com>
----
- libxml.m4 | 189 ++----------------------------------------------------
- 1 file changed, 5 insertions(+), 184 deletions(-)
-
-diff --git a/libxml.m4 b/libxml.m4
-index fc7790c..1c53585 100644
---- a/libxml.m4
-+++ b/libxml.m4
-@@ -1,191 +1,12 @@
--# Configure paths for LIBXML2
--# Simon Josefsson 2020-02-12
--# Fix autoconf 2.70+ warnings
--# Mike Hommey 2004-06-19
--# use CPPFLAGS instead of CFLAGS
--# Toshio Kuratomi 2001-04-21
--# Adapted from:
--# Configure paths for GLIB
--# Owen Taylor 97-11-3
--
- dnl AM_PATH_XML2([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]])
- dnl Test for XML, and define XML_CPPFLAGS and XML_LIBS
- dnl
--AC_DEFUN([AM_PATH_XML2],[
--AC_ARG_WITH(xml-prefix,
-- [ --with-xml-prefix=PFX Prefix where libxml is installed (optional)],
-- xml_config_prefix="$withval", xml_config_prefix="")
--AC_ARG_WITH(xml-exec-prefix,
-- [ --with-xml-exec-prefix=PFX Exec prefix where libxml is installed (optional)],
-- xml_config_exec_prefix="$withval", xml_config_exec_prefix="")
--AC_ARG_ENABLE(xmltest,
-- [ --disable-xmltest Do not try to compile and run a test LIBXML program],,
-- enable_xmltest=yes)
--
-- if test x$xml_config_exec_prefix != x ; then
-- xml_config_args="$xml_config_args"
-- if test x${XML2_CONFIG+set} != xset ; then
-- XML2_CONFIG=$xml_config_exec_prefix/bin/xml2-config
-- fi
-- fi
-- if test x$xml_config_prefix != x ; then
-- xml_config_args="$xml_config_args --prefix=$xml_config_prefix"
-- if test x${XML2_CONFIG+set} != xset ; then
-- XML2_CONFIG=$xml_config_prefix/bin/xml2-config
-- fi
-- fi
--
-- AC_PATH_PROG(XML2_CONFIG, xml2-config, no)
-- min_xml_version=ifelse([$1], ,2.0.0,[$1])
-- AC_MSG_CHECKING(for libxml - version >= $min_xml_version)
-- no_xml=""
-- if test "$XML2_CONFIG" = "no" ; then
-- no_xml=yes
-- else
-- XML_CPPFLAGS=`$XML2_CONFIG $xml_config_args --cflags`
-- XML_LIBS=`$XML2_CONFIG $xml_config_args --libs`
-- xml_config_major_version=`$XML2_CONFIG $xml_config_args --version | \
-- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'`
-- xml_config_minor_version=`$XML2_CONFIG $xml_config_args --version | \
-- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'`
-- xml_config_micro_version=`$XML2_CONFIG $xml_config_args --version | \
-- sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'`
-- if test "x$enable_xmltest" = "xyes" ; then
-- ac_save_CPPFLAGS="$CPPFLAGS"
-- ac_save_LIBS="$LIBS"
-- CPPFLAGS="$CPPFLAGS $XML_CPPFLAGS"
-- LIBS="$XML_LIBS $LIBS"
--dnl
--dnl Now check if the installed libxml is sufficiently new.
--dnl (Also sanity checks the results of xml2-config to some extent)
--dnl
-- rm -f conf.xmltest
-- AC_RUN_IFELSE(
-- [AC_LANG_SOURCE([[
--#include <stdlib.h>
--#include <stdio.h>
--#include <string.h>
--#include <libxml/xmlversion.h>
--
--int
--main()
--{
-- int xml_major_version, xml_minor_version, xml_micro_version;
-- int major, minor, micro;
-- char *tmp_version;
--
-- system("touch conf.xmltest");
--
-- /* Capture xml2-config output via autoconf/configure variables */
-- /* HP/UX 9 (%@#!) writes to sscanf strings */
-- tmp_version = (char *)strdup("$min_xml_version");
-- if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, &micro) != 3) {
-- printf("%s, bad version string from xml2-config\n", "$min_xml_version");
-- exit(1);
-- }
-- free(tmp_version);
--
-- /* Capture the version information from the header files */
-- tmp_version = (char *)strdup(LIBXML_DOTTED_VERSION);
-- if (sscanf(tmp_version, "%d.%d.%d", &xml_major_version, &xml_minor_version, &xml_micro_version) != 3) {
-- printf("%s, bad version string from libxml includes\n", "LIBXML_DOTTED_VERSION");
-- exit(1);
-- }
-- free(tmp_version);
--
-- /* Compare xml2-config output to the libxml headers */
-- if ((xml_major_version != $xml_config_major_version) ||
-- (xml_minor_version != $xml_config_minor_version) ||
-- (xml_micro_version != $xml_config_micro_version))
-- {
-- printf("*** libxml header files (version %d.%d.%d) do not match\n",
-- xml_major_version, xml_minor_version, xml_micro_version);
-- printf("*** xml2-config (version %d.%d.%d)\n",
-- $xml_config_major_version, $xml_config_minor_version, $xml_config_micro_version);
-- return 1;
-- }
--/* Compare the headers to the library to make sure we match */
-- /* Less than ideal -- doesn't provide us with return value feedback,
-- * only exits if there's a serious mismatch between header and library.
-- */
-- LIBXML_TEST_VERSION;
--
-- /* Test that the library is greater than our minimum version */
-- if ((xml_major_version > major) ||
-- ((xml_major_version == major) && (xml_minor_version > minor)) ||
-- ((xml_major_version == major) && (xml_minor_version == minor) &&
-- (xml_micro_version >= micro)))
-- {
-- return 0;
-- }
-- else
-- {
-- printf("\n*** An old version of libxml (%d.%d.%d) was found.\n",
-- xml_major_version, xml_minor_version, xml_micro_version);
-- printf("*** You need a version of libxml newer than %d.%d.%d.\n",
-- major, minor, micro);
-- printf("***\n");
-- printf("*** If you have already installed a sufficiently new version, this error\n");
-- printf("*** probably means that the wrong copy of the xml2-config shell script is\n");
-- printf("*** being found. The easiest way to fix this is to remove the old version\n");
-- printf("*** of LIBXML, but you can also set the XML2_CONFIG environment to point to the\n");
-- printf("*** correct copy of xml2-config. (In this case, you will have to\n");
-- printf("*** modify your LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf\n");
-- printf("*** so that the correct libraries are found at run-time))\n");
-- }
-- return 1;
--}
--]])],, no_xml=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"])
-- CPPFLAGS="$ac_save_CPPFLAGS"
-- LIBS="$ac_save_LIBS"
-- fi
-- fi
-+AC_DEFUN([AM_PATH_XML2],[
-+ AC_REQUIRE([PKG_PROG_PKG_CONFIG])
-
-- if test "x$no_xml" = x ; then
-- AC_MSG_RESULT(yes (version $xml_config_major_version.$xml_config_minor_version.$xml_config_micro_version))
-- ifelse([$2], , :, [$2])
-- else
-- AC_MSG_RESULT(no)
-- if test "$XML2_CONFIG" = "no" ; then
-- echo "*** The xml2-config script installed by LIBXML could not be found"
-- echo "*** If libxml was installed in PREFIX, make sure PREFIX/bin is in"
-- echo "*** your path, or set the XML2_CONFIG environment variable to the"
-- echo "*** full path to xml2-config."
-- else
-- if test -f conf.xmltest ; then
-- :
-- else
-- echo "*** Could not run libxml test program, checking why..."
-- CPPFLAGS="$CPPFLAGS $XML_CPPFLAGS"
-- LIBS="$LIBS $XML_LIBS"
-- AC_LINK_IFELSE(
-- [AC_LANG_PROGRAM([[
--#include <libxml/xmlversion.h>
--#include <stdio.h>
--]], [[ LIBXML_TEST_VERSION; return 0;]])],
-- [ echo "*** The test program compiled, but did not run. This usually means"
-- echo "*** that the run-time linker is not finding LIBXML or finding the wrong"
-- echo "*** version of LIBXML. If it is not finding LIBXML, you'll need to set your"
-- echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point"
-- echo "*** to the installed location Also, make sure you have run ldconfig if that"
-- echo "*** is required on your system"
-- echo "***"
-- echo "*** If you have an old version installed, it is best to remove it, although"
-- echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH" ],
-- [ echo "*** The test program failed to compile or link. See the file config.log for the"
-- echo "*** exact error that occurred. This usually means LIBXML was incorrectly installed"
-- echo "*** or that you have moved LIBXML since it was installed. In the latter case, you"
-- echo "*** may want to edit the xml2-config script: $XML2_CONFIG" ])
-- CPPFLAGS="$ac_save_CPPFLAGS"
-- LIBS="$ac_save_LIBS"
-- fi
-- fi
-+ verdep=ifelse([$1], [], [], [">= $1"])
-+ PKG_CHECK_MODULES(XML, [libxml-2.0 $verdep], [$2], [$3])
-
-- XML_CPPFLAGS=""
-- XML_LIBS=""
-- ifelse([$3], , :, [$3])
-- fi
-+ XML_CPPFLAGS=$XML_CFLAGS
- AC_SUBST(XML_CPPFLAGS)
-- AC_SUBST(XML_LIBS)
-- rm -f conf.xmltest
- ])
---
-2.34.1
-
diff --git a/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch b/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch
deleted file mode 100644
index 956ff3f33e..0000000000
--- a/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From b038c3452667ed17ddb0e791cd7bdc7f8774ac29 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Sat, 11 May 2019 20:35:20 +0800
-Subject: [PATCH] Allow us to pass in PYTHON_SITE_PACKAGES
-
-The python binary used when building for nativesdk doesn't give us the
-correct path here so we need to be able to specify it ourselves.
-
-Upstream-Status: Inappropriate [config]
-Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
-
-Rebase to 2.9.9
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- configure.ac | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index ca911f3..3bbd654 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -808,7 +808,8 @@ dnl
-
- PYTHON_VERSION=
- PYTHON_INCLUDES=
--PYTHON_SITE_PACKAGES=
-+# Allow this to be set externally
-+#PYTHON_SITE_PACKAGES=
- PYTHON_TESTS=
- pythondir=
- if test "$with_python" != "no" ; then
---
-2.7.4
-
diff --git a/meta/recipes-core/libxml/libxml2/remove-fuzz-from-ptests.patch b/meta/recipes-core/libxml/libxml2/remove-fuzz-from-ptests.patch
deleted file mode 100644
index 66694af388..0000000000
--- a/meta/recipes-core/libxml/libxml2/remove-fuzz-from-ptests.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From e49a0d4a8f3f725d6f683854e1cad36a3cd02962 Mon Sep 17 00:00:00 2001
-From: Tony Tascioglu <tony.tascioglu@windriver.com>
-Date: Wed, 19 May 2021 19:43:56 -0400
-Subject: [PATCH] Remove fuzz testing from executing with ptests.
-
-Upstream version 2.9.12 introduced new fuzz-testing and a corresponding
-folder fuzz. These tests are not required for ptests of this package.
-
-This patch removes the fuzz testing targets from the Makefile.
-Otherwise, running the ptests will fail due to the invalid directory.
-
-Upstream-Status: Inappropriate [oe specific]
-Signed-off-by: Tony Tascioglu <tony.tascioglu@windriver.com>
----
- Makefile.am | 5 ++---
- 1 file changed, 2 insertions(+), 3 deletions(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index a9284b95..3d7b344d 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -2,9 +2,9 @@
-
- ACLOCAL_AMFLAGS = -I m4
-
--SUBDIRS = include . doc example fuzz xstc $(PYTHON_SUBDIR)
-+SUBDIRS = include . doc example xstc $(PYTHON_SUBDIR)
-
--DIST_SUBDIRS = include . doc example fuzz python xstc
-+DIST_SUBDIRS = include . doc example python xstc
-
- AM_CPPFLAGS = -I$(top_builddir)/include -I$(srcdir)/include
-
-@@ -210,7 +210,6 @@ runtests: runtest$(EXEEXT) testrecurse$(EXEEXT) testapi$(EXEEXT) \
- $(CHECKER) ./runxmlconf$(EXEEXT)
- @(if [ "$(PYTHON_SUBDIR)" != "" ] ; then cd python ; \
- $(MAKE) tests ; fi)
-- @cd fuzz; $(MAKE) tests
-
- check: all runtests
-
---
-2.25.1
-
diff --git a/meta/recipes-core/libxml/libxml2/run-ptest b/meta/recipes-core/libxml/libxml2/run-ptest
index c313d83263..f252a78f17 100644..100755
--- a/meta/recipes-core/libxml/libxml2/run-ptest
+++ b/meta/recipes-core/libxml/libxml2/run-ptest
@@ -1,4 +1,20 @@
#!/bin/sh
+set -e
+
export LC_ALL=en_US.UTF-8
-make -k runtests
+
+# testModule isn't that useful and hard-codes buildtree, so we don't run that
+TESTS="runtest runsuite testrecurse testchar testdict testThreads runxmlconf testapi"
+
+for T in $TESTS; do
+ echo Running $T
+ ./$T && echo PASS: $T || echo FAIL: $T
+done
+
+if test -d python/tests; then
+ cd python/tests
+ for T in *.py; do
+ python3 ./$T && echo PASS: $T || echo FAIL: $T
+ done
+fi
diff --git a/meta/recipes-core/libxml/libxml2/runtest.patch b/meta/recipes-core/libxml/libxml2/runtest.patch
deleted file mode 100644
index 42bb22cfd4..0000000000
--- a/meta/recipes-core/libxml/libxml2/runtest.patch
+++ /dev/null
@@ -1,849 +0,0 @@
-From 6172ccd1e74bc181f5298f19e240234e12876abe Mon Sep 17 00:00:00 2001
-From: Tony Tascioglu <tony.tascioglu@windriver.com>
-Date: Tue, 11 May 2021 11:57:46 -0400
-Subject: [PATCH] Add 'install-ptest' rule.
-
-Print a standard result line for each test.
-
-The patch needs a rework according to comments in the merge request.
-
-Signed-off-by: Mihaela Sendrea <mihaela.sendrea@enea.com>
-Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
-Upstream-Status: Inappropriate [https://gitlab.gnome.org/GNOME/libxml2/-/merge_requests/137]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-Signed-off-by: Tony Tascioglu <tony.tascioglu@windriver.com>
----
- Makefile.am | 9 +++
- runsuite.c | 1 +
- runtest.c | 2 +
- runxmlconf.c | 1 +
- testapi.c | 122 ++++++++++++++++++++++++++-------------
- testchar.c | 156 +++++++++++++++++++++++++++++++++++---------------
- testdict.c | 1 +
- testlimits.c | 1 +
- testrecurse.c | 2 +
- 9 files changed, 210 insertions(+), 85 deletions(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index 05d1671f..ae622745 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -198,6 +198,15 @@ runxmlconf_LDADD= $(LDADDS)
- #testOOM_DEPENDENCIES = $(DEPS)
- #testOOM_LDADD= $(LDADDS)
-
-+install-ptest:
-+ @(if [ -d .libs ] ; then cd .libs; fi; \
-+ install $(check_PROGRAMS) $(DESTDIR))
-+ cp -r $(srcdir)/test $(DESTDIR)
-+ cp -r $(srcdir)/result $(DESTDIR)
-+ cp -r $(srcdir)/python $(DESTDIR)
-+ cp Makefile $(DESTDIR)
-+ sed -i -e 's|^Makefile:|_Makefile:|' $(DESTDIR)/Makefile
-+
- runtests: runtest$(EXEEXT) testrecurse$(EXEEXT) testapi$(EXEEXT) \
- testchar$(EXEEXT) testdict$(EXEEXT) runxmlconf$(EXEEXT)
- [ -d test ] || $(LN_S) $(srcdir)/test .
-diff --git a/runsuite.c b/runsuite.c
-index d24b5ec3..f7ff2521 100644
---- a/runsuite.c
-+++ b/runsuite.c
-@@ -1147,6 +1147,7 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) {
-
- if (logfile != NULL)
- fclose(logfile);
-+ printf("%s: runsuite\n\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
- #else /* !SCHEMAS */
-diff --git a/runtest.c b/runtest.c
-index ffa98d04..470f95cb 100644
---- a/runtest.c
-+++ b/runtest.c
-@@ -4508,6 +4508,7 @@ launchTests(testDescPtr tst) {
- xmlCharEncCloseFunc(ebcdicHandler);
- xmlCharEncCloseFunc(eucJpHandler);
-
-+ printf("%s: %s\n", (err == 0) ? "PASS" : "FAIL", tst->desc);
- return(err);
- }
-
-@@ -4588,6 +4589,7 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) {
- xmlCleanupParser();
- xmlMemoryDump();
-
-+ printf("%s: runtest\n\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
-
-diff --git a/runxmlconf.c b/runxmlconf.c
-index 70f61017..e882b3a1 100644
---- a/runxmlconf.c
-+++ b/runxmlconf.c
-@@ -595,6 +595,7 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) {
-
- if (logfile != NULL)
- fclose(logfile);
-+ printf("%s: runxmlconf\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
-
-diff --git a/testapi.c b/testapi.c
-index ff8b470d..52b51d78 100644
---- a/testapi.c
-+++ b/testapi.c
-@@ -1246,49 +1246,91 @@ static int
- testlibxml2(void)
- {
- int test_ret = 0;
--
-- test_ret += test_HTMLparser();
-- test_ret += test_HTMLtree();
-- test_ret += test_SAX2();
-- test_ret += test_c14n();
-- test_ret += test_catalog();
-- test_ret += test_chvalid();
-- test_ret += test_debugXML();
-- test_ret += test_dict();
-- test_ret += test_encoding();
-- test_ret += test_entities();
-- test_ret += test_hash();
-- test_ret += test_list();
-- test_ret += test_nanoftp();
-- test_ret += test_nanohttp();
-- test_ret += test_parser();
-- test_ret += test_parserInternals();
-- test_ret += test_pattern();
-- test_ret += test_relaxng();
-- test_ret += test_schemasInternals();
-- test_ret += test_schematron();
-- test_ret += test_tree();
-- test_ret += test_uri();
-- test_ret += test_valid();
-- test_ret += test_xinclude();
-- test_ret += test_xmlIO();
-- test_ret += test_xmlautomata();
-- test_ret += test_xmlerror();
-- test_ret += test_xmlmodule();
-- test_ret += test_xmlreader();
-- test_ret += test_xmlregexp();
-- test_ret += test_xmlsave();
-- test_ret += test_xmlschemas();
-- test_ret += test_xmlschemastypes();
-- test_ret += test_xmlstring();
-- test_ret += test_xmlunicode();
-- test_ret += test_xmlwriter();
-- test_ret += test_xpath();
-- test_ret += test_xpathInternals();
-- test_ret += test_xpointer();
-+ int ret = 0;
-+
-+ test_ret += (ret = test_HTMLparser());
-+ printf("%s: HTMLparser\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_HTMLtree());
-+ printf("%s: HTMLtree\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_SAX2());
-+ printf("%s: SAX2\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_c14n());
-+ printf("%s: c14n\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_catalog());
-+ printf("%s: catalog\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_chvalid());
-+ printf("%s: chvalid\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_debugXML());
-+ printf("%s: debugXML\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_dict());
-+ printf("%s: dict\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_encoding());
-+ printf("%s: encoding\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_entities());
-+ printf("%s: entities\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_hash());
-+ printf("%s: hash\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_list());
-+ printf("%s: list\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_nanoftp());
-+ printf("%s: nanoftp\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_nanohttp());
-+ printf("%s: nanohttp\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_parser());
-+ printf("%s: parser\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_parserInternals());
-+ printf("%s: parserInternals\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_pattern());
-+ printf("%s: pattern\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_relaxng());
-+ printf("%s: relaxng\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_schemasInternals());
-+ printf("%s: schemasInternals\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_schematron());
-+ printf("%s: schematron\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_tree());
-+ printf("%s: tree\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_uri());
-+ printf("%s: uri\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_valid());
-+ printf("%s: valid\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xinclude());
-+ printf("%s: xinclude\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlIO());
-+ printf("%s: xmlIO\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlautomata());
-+ printf("%s: xmlautomata\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlerror());
-+ printf("%s: xmlerror\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlmodule());
-+ printf("%s: xmlmodule\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlreader());
-+ printf("%s: xmlreader\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlregexp());
-+ printf("%s: xmlregexp\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlsave());
-+ printf("%s: xmlsave\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlschemas());
-+ printf("%s: xmlschemas\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlschemastypes());
-+ printf("%s: xmlschemastypes\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlstring());
-+ printf("%s: xmlstring\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlunicode());
-+ printf("%s: xmlunicode\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xmlwriter());
-+ printf("%s: xmlwriter\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xpath());
-+ printf("%s: xpath\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xpathInternals());
-+ printf("%s: xpathInternals\n", (ret == 0) ? "PASS" : "FAIL");
-+ test_ret += (ret = test_xpointer());
-+ printf("%s: xpointer\n", (ret == 0) ? "PASS" : "FAIL");
-
- printf("Total: %d functions, %d tests, %d errors\n",
- function_tests, call_tests, test_ret);
-+
-+ printf("%s: testapi\n\n", (test_ret == 0) ? "PASS" : "FAIL");
- return(test_ret);
- }
-
-diff --git a/testchar.c b/testchar.c
-index 6866a175..7bce0132 100644
---- a/testchar.c
-+++ b/testchar.c
-@@ -23,7 +23,7 @@ static void errorHandler(void *unused, xmlErrorPtr err) {
- char document1[100] = "<doc>XXXX</doc>";
- char document2[100] = "<doc foo='XXXX'/>";
-
--static void testDocumentRangeByte1(xmlParserCtxtPtr ctxt, char *document,
-+static int testDocumentRangeByte1(xmlParserCtxtPtr ctxt, char *document,
- int len, char *data, int forbid1, int forbid2) {
- int i;
- xmlDocPtr res;
-@@ -37,33 +37,41 @@ static void testDocumentRangeByte1(xmlParserCtxtPtr ctxt, char *document,
- res = xmlReadMemory(document, len, "test", NULL, 0);
-
- if ((i == forbid1) || (i == forbid2)) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Byte 0x%02X: %c\n",
- i, i);
-+ return(1);
-+ }
- }
-
- else if ((i == '<') || (i == '&')) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect illegal char %c for Byte 0x%02X\n", i, i);
-+ return(1);
-+ }
- }
- else if (((i < 0x20) || (i >= 0x80)) &&
- (i != 0x9) && (i != 0xA) && (i != 0xD)) {
-- if ((lastError != XML_ERR_INVALID_CHAR) && (res != NULL))
-+ if ((lastError != XML_ERR_INVALID_CHAR) && (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Byte 0x%02X\n", i);
-+ return(1);
-+ }
- }
- else if (res == NULL) {
- fprintf(stderr,
- "Failed to parse valid char for Byte 0x%02X : %c\n", i, i);
-+ return(1);
- }
- if (res != NULL)
- xmlFreeDoc(res);
- }
-+ return(0);
- }
-
--static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
-+static int testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- int len, char *data) {
- int i, j;
- xmlDocPtr res;
-@@ -80,10 +88,12 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
-
- /* if first bit of first char is set, then second bit must too */
- if ((i & 0x80) && ((i & 0x40) == 0)) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -91,10 +101,12 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- * bits must be 10
- */
- else if ((i & 0x80) && ((j & 0xC0) != 0x80)) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -102,10 +114,12 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- * than 0x80, i.e. one of bits 5 to 1 of i must be set
- */
- else if ((i & 0x80) && ((i & 0x1E) == 0)) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -113,10 +127,12 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- * at least 3 bytes, but we give only 2 !
- */
- else if ((i & 0xE0) == 0xE0) {
-- if ((lastError == 0) || (res != NULL))
-+ if ((lastError == 0) || (res != NULL)) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x00\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -125,11 +141,13 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- else if ((lastError != 0) || (res == NULL)) {
- fprintf(stderr,
- "Failed to parse document for Bytes 0x%02X 0x%02X\n", i, j);
-+ return(1);
- }
- if (res != NULL)
- xmlFreeDoc(res);
- }
- }
-+ return(0);
- }
-
- /**
-@@ -141,9 +159,10 @@ static void testDocumentRangeByte2(xmlParserCtxtPtr ctxt, char *document,
- * CDATA in text or in attribute values.
- */
-
--static void testDocumentRanges(void) {
-+static int testDocumentRanges(void) {
- xmlParserCtxtPtr ctxt;
- char *data;
-+ int test_ret = 0;
-
- /*
- * Set up a parsing context using the first document as
-@@ -152,7 +171,7 @@ static void testDocumentRanges(void) {
- ctxt = xmlNewParserCtxt();
- if (ctxt == NULL) {
- fprintf(stderr, "Failed to allocate parser context\n");
-- return;
-+ return(1);
- }
-
- printf("testing 1 byte char in document: 1");
-@@ -163,7 +182,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 1 byte injection at beginning of area */
-- testDocumentRangeByte1(ctxt, &document1[0], strlen(document1),
-+ test_ret += testDocumentRangeByte1(ctxt, &document1[0], strlen(document1),
- data, -1, -1);
- printf(" 2");
- fflush(stdout);
-@@ -172,7 +191,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 1 byte injection at end of area */
-- testDocumentRangeByte1(ctxt, &document1[0], strlen(document1),
-+ test_ret += testDocumentRangeByte1(ctxt, &document1[0], strlen(document1),
- data + 3, -1, -1);
-
- printf(" 3");
-@@ -183,7 +202,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 1 byte injection at beginning of area */
-- testDocumentRangeByte1(ctxt, &document2[0], strlen(document2),
-+ test_ret += testDocumentRangeByte1(ctxt, &document2[0], strlen(document2),
- data, '\'', -1);
- printf(" 4");
- fflush(stdout);
-@@ -192,7 +211,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 1 byte injection at end of area */
-- testDocumentRangeByte1(ctxt, &document2[0], strlen(document2),
-+ test_ret += testDocumentRangeByte1(ctxt, &document2[0], strlen(document2),
- data + 3, '\'', -1);
- printf(" done\n");
-
-@@ -204,7 +223,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 2 byte injection at beginning of area */
-- testDocumentRangeByte2(ctxt, &document1[0], strlen(document1),
-+ test_ret += testDocumentRangeByte2(ctxt, &document1[0], strlen(document1),
- data);
- printf(" 2");
- fflush(stdout);
-@@ -213,7 +232,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 2 byte injection at end of area */
-- testDocumentRangeByte2(ctxt, &document1[0], strlen(document1),
-+ test_ret += testDocumentRangeByte2(ctxt, &document1[0], strlen(document1),
- data + 2);
-
- printf(" 3");
-@@ -224,7 +243,7 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 2 byte injection at beginning of area */
-- testDocumentRangeByte2(ctxt, &document2[0], strlen(document2),
-+ test_ret += testDocumentRangeByte2(ctxt, &document2[0], strlen(document2),
- data);
- printf(" 4");
- fflush(stdout);
-@@ -233,14 +252,15 @@ static void testDocumentRanges(void) {
- data[2] = ' ';
- data[3] = ' ';
- /* test 2 byte injection at end of area */
-- testDocumentRangeByte2(ctxt, &document2[0], strlen(document2),
-+ test_ret += testDocumentRangeByte2(ctxt, &document2[0], strlen(document2),
- data + 2);
- printf(" done\n");
-
- xmlFreeParserCtxt(ctxt);
-+ return(test_ret);
- }
-
--static void testCharRangeByte1(xmlParserCtxtPtr ctxt, char *data) {
-+static int testCharRangeByte1(xmlParserCtxtPtr ctxt, char *data) {
- int i = 0;
- int len, c;
-
-@@ -255,19 +275,25 @@ static void testCharRangeByte1(xmlParserCtxtPtr ctxt, char *data) {
- c = xmlCurrentChar(ctxt, &len);
- if ((i == 0) || (i >= 0x80)) {
- /* we must see an error there */
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Byte 0x%02X\n", i);
-+ return(1);
-+ }
- } else if (i == 0xD) {
-- if ((c != 0xA) || (len != 1))
-+ if ((c != 0xA) || (len != 1)) {
- fprintf(stderr, "Failed to convert char for Byte 0x%02X\n", i);
-+ return(1);
-+ }
- } else if ((c != i) || (len != 1)) {
- fprintf(stderr, "Failed to parse char for Byte 0x%02X\n", i);
-+ return(1);
- }
- }
-+ return(0);
- }
-
--static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
-+static int testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- int i, j;
- int len, c;
-
-@@ -284,10 +310,12 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
-
- /* if first bit of first char is set, then second bit must too */
- if ((i & 0x80) && ((i & 0x40) == 0)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -295,10 +323,12 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- * bits must be 10
- */
- else if ((i & 0x80) && ((j & 0xC0) != 0x80)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X: %d\n",
- i, j, c);
-+ return(1);
-+ }
- }
-
- /*
-@@ -306,10 +336,12 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- * than 0x80, i.e. one of bits 5 to 1 of i must be set
- */
- else if ((i & 0x80) && ((i & 0x1E) == 0)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X: %d\n",
- i, j, c);
-+ return(1);
-+ }
- }
-
- /*
-@@ -317,10 +349,12 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- * at least 3 bytes, but we give only 2 !
- */
- else if ((i & 0xE0) == 0xE0) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x00\n",
- i, j);
-+ return(1);
-+ }
- }
-
- /*
-@@ -329,6 +363,7 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- else if ((lastError != 0) || (len != 2)) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X\n", i, j);
-+ return(1);
- }
-
- /*
-@@ -338,12 +373,14 @@ static void testCharRangeByte2(xmlParserCtxtPtr ctxt, char *data) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X: expect %d got %d\n",
- i, j, ((j & 0x3F) + ((i & 0x1F) << 6)), c);
-+ return(1);
- }
- }
- }
-+ return(0);
- }
-
--static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
-+static int testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- int i, j, k, K;
- int len, c;
- unsigned char lows[6] = {0, 0x80, 0x81, 0xC1, 0xFF, 0xBF};
-@@ -368,20 +405,24 @@ static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- * at least 4 bytes, but we give only 3 !
- */
- if ((i & 0xF0) == 0xF0) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X 0x%02X\n",
- i, j, K, data[3]);
-+ return(1);
-+ }
- }
-
- /*
- * The second and the third bytes must start with 10
- */
- else if (((j & 0xC0) != 0x80) || ((K & 0xC0) != 0x80)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X\n",
- i, j, K);
-+ return(1);
-+ }
- }
-
- /*
-@@ -390,10 +431,12 @@ static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- * the 6th byte of data[1] must be set
- */
- else if (((i & 0xF) == 0) && ((j & 0x20) == 0)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X\n",
- i, j, K);
-+ return(1);
-+ }
- }
-
- /*
-@@ -401,10 +444,12 @@ static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- */
- else if (((value > 0xD7FF) && (value <0xE000)) ||
- ((value > 0xFFFD) && (value <0x10000))) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char 0x%04X for Bytes 0x%02X 0x%02X 0x%02X\n",
- value, i, j, K);
-+ return(1);
-+ }
- }
-
- /*
-@@ -414,6 +459,7 @@ static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X 0x%02X\n",
- i, j, K);
-+ return(1);
- }
-
- /*
-@@ -423,13 +469,15 @@ static void testCharRangeByte3(xmlParserCtxtPtr ctxt, char *data) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X 0x%02X: expect %d got %d\n",
- i, j, data[2], value, c);
-+ return(1);
- }
- }
- }
- }
-+ return(0);
- }
-
--static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
-+static int testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- int i, j, k, K, l, L;
- int len, c;
- unsigned char lows[6] = {0, 0x80, 0x81, 0xC1, 0xFF, 0xBF};
-@@ -458,10 +506,12 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- * at least 5 bytes, but we give only 4 !
- */
- if ((i & 0xF8) == 0xF8) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X 0x%02X\n",
- i, j, K, data[3]);
-+ return(1);
-+ }
- }
-
- /*
-@@ -469,10 +519,12 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- */
- else if (((j & 0xC0) != 0x80) || ((K & 0xC0) != 0x80) ||
- ((L & 0xC0) != 0x80)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X 0x%02X\n",
- i, j, K, L);
-+ return(1);
-+ }
- }
-
- /*
-@@ -481,10 +533,12 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- * the 6 or 5th byte of j must be set
- */
- else if (((i & 0x7) == 0) && ((j & 0x30) == 0)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char for Bytes 0x%02X 0x%02X 0x%02X 0x%02X\n",
- i, j, K, L);
-+ return(1);
-+ }
- }
-
- /*
-@@ -493,10 +547,12 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- else if (((value > 0xD7FF) && (value <0xE000)) ||
- ((value > 0xFFFD) && (value <0x10000)) ||
- (value > 0x10FFFF)) {
-- if (lastError != XML_ERR_INVALID_CHAR)
-+ if (lastError != XML_ERR_INVALID_CHAR) {
- fprintf(stderr,
- "Failed to detect invalid char 0x%04X for Bytes 0x%02X 0x%02X 0x%02X 0x%02X\n",
- value, i, j, K, L);
-+ return(1);
-+ }
- }
-
- /*
-@@ -506,6 +562,7 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X 0x%02X\n",
- i, j, K);
-+ return(1);
- }
-
- /*
-@@ -515,11 +572,13 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- fprintf(stderr,
- "Failed to parse char for Bytes 0x%02X 0x%02X 0x%02X: expect %d got %d\n",
- i, j, data[2], value, c);
-+ return(1);
- }
- }
- }
- }
- }
-+ return(0);
- }
-
- /**
-@@ -530,11 +589,12 @@ static void testCharRangeByte4(xmlParserCtxtPtr ctxt, char *data) {
- * cover the full range of UTF-8 chars accepted by XML-1.0
- */
-
--static void testCharRanges(void) {
-+static int testCharRanges(void) {
- char data[5];
- xmlParserCtxtPtr ctxt;
- xmlParserInputBufferPtr buf;
- xmlParserInputPtr input;
-+ int test_ret = 0;
-
- memset(data, 0, 5);
-
-@@ -545,17 +605,19 @@ static void testCharRanges(void) {
- ctxt = xmlNewParserCtxt();
- if (ctxt == NULL) {
- fprintf(stderr, "Failed to allocate parser context\n");
-- return;
-+ return(1);
- }
- buf = xmlParserInputBufferCreateStatic(data, sizeof(data),
- XML_CHAR_ENCODING_NONE);
- if (buf == NULL) {
- fprintf(stderr, "Failed to allocate input buffer\n");
-+ test_ret = 1;
- goto error;
- }
- input = xmlNewInputStream(ctxt);
- if (input == NULL) {
- xmlFreeParserInputBuffer(buf);
-+ test_ret = 1;
- goto error;
- }
- input->filename = NULL;
-@@ -567,25 +629,28 @@ static void testCharRanges(void) {
-
- printf("testing char range: 1");
- fflush(stdout);
-- testCharRangeByte1(ctxt, data);
-+ test_ret += testCharRangeByte1(ctxt, data);
- printf(" 2");
- fflush(stdout);
-- testCharRangeByte2(ctxt, data);
-+ test_ret += testCharRangeByte2(ctxt, data);
- printf(" 3");
- fflush(stdout);
-- testCharRangeByte3(ctxt, data);
-+ test_ret += testCharRangeByte3(ctxt, data);
- printf(" 4");
- fflush(stdout);
-- testCharRangeByte4(ctxt, data);
-+ test_ret += testCharRangeByte4(ctxt, data);
- printf(" done\n");
- fflush(stdout);
-
- error:
- xmlFreeParserCtxt(ctxt);
-+ return(test_ret);
- }
-
- int main(void) {
-
-+ int ret = 0;
-+
- /*
- * this initialize the library and check potential ABI mismatches
- * between the version it was compiled for and the actual shared
-@@ -602,8 +667,9 @@ int main(void) {
- /*
- * Run the tests
- */
-- testCharRanges();
-- testDocumentRanges();
-+ ret += testCharRanges();
-+ ret += testDocumentRanges();
-+ printf("%s: testchar\n\n", (ret == 0) ? "PASS" : "FAIL");
-
- /*
- * Cleanup function for the XML library.
-diff --git a/testdict.c b/testdict.c
-index 40bebd05..114b9347 100644
---- a/testdict.c
-+++ b/testdict.c
-@@ -440,5 +440,6 @@ int main(void)
- clean_strings();
- xmlCleanupParser();
- xmlMemoryDump();
-+ printf("%s: testdict\n\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
-diff --git a/testlimits.c b/testlimits.c
-index 059116a6..f0bee68d 100644
---- a/testlimits.c
-+++ b/testlimits.c
-@@ -1634,5 +1634,6 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) {
- xmlCleanupParser();
- xmlMemoryDump();
-
-+ printf("%s: testlimits\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
-diff --git a/testrecurse.c b/testrecurse.c
-index 0cbe25a6..3ecadb40 100644
---- a/testrecurse.c
-+++ b/testrecurse.c
-@@ -892,6 +892,7 @@ launchTests(testDescPtr tst) {
- err++;
- }
- }
-+ printf("%s: %s\n", (err == 0) ? "PASS" : "FAIL", tst->desc);
- return(err);
- }
-
-@@ -961,5 +962,6 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) {
- xmlCleanupParser();
- xmlMemoryDump();
-
-+ printf("%s: testrecurse\n\n", (ret == 0) ? "PASS" : "FAIL");
- return(ret);
- }
---
-2.25.1
-
diff --git a/meta/recipes-core/libxml/libxml2_2.12.6.bb b/meta/recipes-core/libxml/libxml2_2.12.6.bb
new file mode 100644
index 0000000000..14fcff7fa4
--- /dev/null
+++ b/meta/recipes-core/libxml/libxml2_2.12.6.bb
@@ -0,0 +1,101 @@
+SUMMARY = "XML C Parser Library and Toolkit"
+DESCRIPTION = "The XML Parser Library allows for manipulation of XML files. Libxml2 exports Push and Pull type parser interfaces for both XML and HTML. It can do DTD validation at parse time, on a parsed document instance or with an arbitrary DTD. Libxml2 includes complete XPath, XPointer and Xinclude implementations. It also has a SAX like interface, which is designed to be compatible with Expat."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/libxml2"
+BUGTRACKER = "http://bugzilla.gnome.org/buglist.cgi?product=libxml2"
+SECTION = "libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://Copyright;md5=fec7ecfe714722b2bb0aaff7d200c701 \
+ file://dict.c;beginline=6;endline=15;md5=2b4b7b827d2d8b080372433c4c9c85b6 \
+ file://list.c;beginline=4;endline=13;md5=b9c25b021ccaf287e50060602d20f3a7 \
+ file://trio.c;beginline=5;endline=14;md5=cd4f61e27f88c1d43df112966b1cd28f \
+ "
+
+DEPENDS = "zlib virtual/libiconv"
+
+GNOMEBASEBUILDCLASS = "autotools"
+inherit gnomebase
+
+SRC_URI += "http://www.w3.org/XML/Test/xmlts20130923.tar;subdir=${BP};name=testtar \
+ file://run-ptest \
+ file://install-tests.patch \
+ "
+
+SRC_URI[archive.sha256sum] = "889c593a881a3db5fdd96cc9318c87df34eb648edfc458272ad46fd607353fbb"
+SRC_URI[testtar.sha256sum] = "c6b2d42ee50b8b236e711a97d68e6c4b5c8d83e69a2be4722379f08702ea7273"
+
+# Disputed as a security issue, but fixed in d39f780
+CVE_STATUS[CVE-2023-45322] = "disputed: issue requires memory allocation to fail"
+
+BINCONFIG = "${bindir}/xml2-config"
+
+PACKAGECONFIG ??= "python \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
+"
+PACKAGECONFIG[python] = "--with-python=${PYTHON},--without-python,python3"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+
+inherit autotools pkgconfig binconfig-disabled ptest
+
+inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3targetconfig', '', d)}
+
+LDFLAGS:append:riscv64 = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-lld ptest', ' -fuse-ld=bfd', '', d)}"
+
+RDEPENDS:${PN}-ptest += "bash make locale-base-en-us ${@bb.utils.contains('PACKAGECONFIG', 'python', 'libgcc python3-core python3-logging python3-shell python3-stringold python3-threading python3-unittest ${PN}-python', '', d)}"
+
+RDEPENDS:${PN}-python += "${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3-core', '', d)}"
+
+RDEPENDS:${PN}-ptest:append:libc-musl = " musl-locales"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-gconv-ebcdic-us \
+ glibc-gconv-ibm1141 \
+ glibc-gconv-iso8859-5 \
+ glibc-gconv-euc-jp \
+ "
+
+# WARNING: zlib is required for RPM use
+EXTRA_OECONF = "--without-debug --without-legacy --with-catalog --with-c14n --without-lzma --with-fexceptions"
+EXTRA_OECONF:class-native = "--without-legacy --with-c14n --without-lzma --with-zlib"
+EXTRA_OECONF:class-nativesdk = "--without-legacy --with-c14n --without-lzma --with-zlib"
+EXTRA_OECONF:linuxstdbase = "--with-debug --with-legacy --with-c14n --without-lzma --with-zlib"
+
+python populate_packages:prepend () {
+ # autonamer would call this libxml2-2, but we don't want that
+ if d.getVar('DEBIAN_NAMES'):
+ d.setVar('PKG:libxml2', '${MLPREFIX}libxml2')
+}
+
+PACKAGE_BEFORE_PN += "${PN}-utils"
+PACKAGES += "${PN}-python"
+
+FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
+FILES:${PN}-utils = "${bindir}/*"
+FILES:${PN}-python = "${PYTHON_SITEPACKAGES_DIR}"
+
+do_configure:prepend () {
+ # executables take longer to package: these should not be executable
+ find ${S}/xmlconf/ -type f -exec chmod -x {} \+
+}
+
+do_install_ptest () {
+ oe_runmake DESTDIR=${D} ptestdir=${PTEST_PATH} install-test-data
+
+ cp -r ${S}/xmlconf ${D}${PTEST_PATH}
+
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'python', 'true', 'false', d)}; then
+ rm -rf ${D}${PTEST_DIR}/python
+ fi
+}
+
+# with musl we need to enable icu support explicitly for these tests
+do_install_ptest:append:libc-musl () {
+ rm -rf ${D}/${PTEST_PATH}/test/icu_parse_test.xml
+}
+
+do_install:append:class-native () {
+ # Docs are not needed in the native case
+ rm ${D}${datadir}/gtk-doc -rf
+
+ create_wrapper ${D}${bindir}/xmllint 'XML_CATALOG_FILES=${XML_CATALOG_FILES:-${sysconfdir}/xml/catalog}'
+}
+do_install[vardepsexclude] += "XML_CATALOG_FILES:-${sysconfdir}/xml/catalog"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/libxml/libxml2_2.9.14.bb b/meta/recipes-core/libxml/libxml2_2.9.14.bb
deleted file mode 100644
index 3081ebf92f..0000000000
--- a/meta/recipes-core/libxml/libxml2_2.9.14.bb
+++ /dev/null
@@ -1,110 +0,0 @@
-SUMMARY = "XML C Parser Library and Toolkit"
-DESCRIPTION = "The XML Parser Library allows for manipulation of XML files. Libxml2 exports Push and Pull type parser interfaces for both XML and HTML. It can do DTD validation at parse time, on a parsed document instance or with an arbitrary DTD. Libxml2 includes complete XPath, XPointer and Xinclude implementations. It also has a SAX like interface, which is designed to be compatible with Expat."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/libxml2"
-BUGTRACKER = "http://bugzilla.gnome.org/buglist.cgi?product=libxml2"
-SECTION = "libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://Copyright;md5=2044417e2e5006b65a8b9067b683fcf1 \
- file://hash.c;beginline=6;endline=15;md5=e77f77b12cb69e203d8b4090a0eee879 \
- file://list.c;beginline=4;endline=13;md5=b9c25b021ccaf287e50060602d20f3a7 \
- file://trio.c;beginline=5;endline=14;md5=cd4f61e27f88c1d43df112966b1cd28f"
-
-DEPENDS = "zlib virtual/libiconv"
-
-inherit gnomebase
-
-SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;subdir=${BP};name=testtar \
- file://libxml-64bit.patch \
- file://runtest.patch \
- file://run-ptest \
- file://python-sitepackages-dir.patch \
- file://0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch \
- file://fix-execution-of-ptests.patch \
- file://remove-fuzz-from-ptests.patch \
- file://libxml-m4-use-pkgconfig.patch \
- "
-
-SRC_URI[archive.sha256sum] = "60d74a257d1ccec0475e749cba2f21559e48139efba6ff28224357c7c798dfee"
-SRC_URI[testtar.sha256sum] = "96151685cec997e1f9f3387e3626d61e6284d4d6e66e0e440c209286c03e9cc7"
-
-BINCONFIG = "${bindir}/xml2-config"
-
-PACKAGECONFIG ??= "python \
- ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
-"
-PACKAGECONFIG[python] = "--with-python=${PYTHON},--without-python,python3"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-
-inherit autotools pkgconfig binconfig-disabled ptest
-
-inherit ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3targetconfig', '', d)}
-
-RDEPENDS:${PN}-ptest += "bash make ${@bb.utils.contains('PACKAGECONFIG', 'python', 'libgcc python3-core python3-logging python3-shell python3-stringold python3-threading python3-unittest ${PN}-python', '', d)}"
-
-RDEPENDS:${PN}-python += "${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3-core', '', d)}"
-
-RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-gconv-ebcdic-us \
- glibc-gconv-ibm1141 \
- glibc-gconv-iso8859-5 \
- glibc-gconv-euc-jp \
- locale-base-en-us \
- "
-
-export PYTHON_SITE_PACKAGES="${PYTHON_SITEPACKAGES_DIR}"
-
-# WARNING: zlib is required for RPM use
-EXTRA_OECONF = "--without-debug --without-legacy --with-catalog --without-docbook --with-c14n --without-lzma --with-fexceptions"
-EXTRA_OECONF:class-native = "--without-legacy --without-docbook --with-c14n --without-lzma --with-zlib"
-EXTRA_OECONF:class-nativesdk = "--without-legacy --without-docbook --with-c14n --without-lzma --with-zlib"
-EXTRA_OECONF:linuxstdbase = "--with-debug --with-legacy --with-docbook --with-c14n --without-lzma --with-zlib"
-
-python populate_packages:prepend () {
- # autonamer would call this libxml2-2, but we don't want that
- if d.getVar('DEBIAN_NAMES'):
- d.setVar('PKG:libxml2', '${MLPREFIX}libxml2')
-}
-
-PACKAGE_BEFORE_PN += "${PN}-utils"
-PACKAGES += "${PN}-python"
-
-FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
-FILES:${PN}-dev += "${libdir}/xml2Conf.sh"
-FILES:${PN}-utils = "${bindir}/*"
-FILES:${PN}-python = "${PYTHON_SITEPACKAGES_DIR}"
-
-do_configure:prepend () {
- # executables take longer to package: these should not be executable
- find ${S}/xmlconf/ -type f -exec chmod -x {} \+
-}
-
-do_compile_ptest() {
- oe_runmake check-am
-}
-
-do_install_ptest () {
- cp -r ${S}/xmlconf ${D}${PTEST_PATH}
- if [ "${@bb.utils.filter('PACKAGECONFIG', 'python', d)}" ]; then
- sed -i -e 's|^\(PYTHON = \).*|\1${USRBINPATH}/${PYTHON_PN}|' \
- ${D}${PTEST_PATH}/python/tests/Makefile
- grep -lrZ '#!/usr/bin/python' ${D}${PTEST_PATH}/python |
- xargs -0 sed -i -e 's|/usr/bin/python|${USRBINPATH}/${PYTHON_PN}|'
- fi
- #Remove build host references from various Makefiles
- find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
- sed -i \
- -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:${RECIPE_SYSROOT}::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- -e '/^RELDATE/d' \
- {} +
-}
-
-do_install:append:class-native () {
- # Docs are not needed in the native case
- rm ${D}${datadir}/gtk-doc -rf
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/meta/build-sysroots.bb b/meta/recipes-core/meta/build-sysroots.bb
index ad22a75eb2..db05c111ab 100644
--- a/meta/recipes-core/meta/build-sysroots.bb
+++ b/meta/recipes-core/meta/build-sysroots.bb
@@ -1,5 +1,6 @@
-INHIBIT_DEFAULT_DEPS = "1"
LICENSE = "MIT"
+SUMMARY = "Build old style sysroot based on everything in the components directory that matches the current MACHINE"
+INHIBIT_DEFAULT_DEPS = "1"
STANDALONE_SYSROOT = "${STAGING_DIR}/${MACHINE}"
STANDALONE_SYSROOT_NATIVE = "${STAGING_DIR}/${BUILD_ARCH}"
@@ -16,6 +17,18 @@ deltask configure
deltask compile
deltask install
deltask populate_sysroot
+deltask create_spdx
+deltask collect_spdx_deps
+deltask create_runtime_spdx
+deltask recipe_qa
+
+do_build_warn () {
+ bbwarn "Native or target sysroot population needs to be explicitly selected; please use
+bitbake -c build_native_sysroot build-sysroots
+bitbake -c build_target_sysroot build-sysroots
+or both."
+}
+addtask do_build_warn before do_build
python do_build_native_sysroot () {
targetsysroot = d.getVar("STANDALONE_SYSROOT")
@@ -26,7 +39,7 @@ python do_build_native_sysroot () {
}
do_build_native_sysroot[cleandirs] = "${STANDALONE_SYSROOT_NATIVE}"
do_build_native_sysroot[nostamp] = "1"
-addtask do_build_native_sysroot before do_build
+addtask do_build_native_sysroot
python do_build_target_sysroot () {
targetsysroot = d.getVar("STANDALONE_SYSROOT")
@@ -37,6 +50,6 @@ python do_build_target_sysroot () {
}
do_build_target_sysroot[cleandirs] = "${STANDALONE_SYSROOT}"
do_build_target_sysroot[nostamp] = "1"
-addtask do_build_target_sysroot before do_build
+addtask do_build_target_sysroot
do_clean[cleandirs] += "${STANDALONE_SYSROOT} ${STANDALONE_SYSROOT_NATIVE}"
diff --git a/meta/recipes-core/meta/buildtools-extended-tarball.bb b/meta/recipes-core/meta/buildtools-extended-tarball.bb
index 83e3fddccc..633f8e6b99 100644
--- a/meta/recipes-core/meta/buildtools-extended-tarball.bb
+++ b/meta/recipes-core/meta/buildtools-extended-tarball.bb
@@ -28,21 +28,13 @@ TOOLCHAIN_HOST_TASK += "\
nativesdk-libtool \
nativesdk-pkgconfig \
nativesdk-glibc-utils \
- nativesdk-glibc-gconv-ibm850 \
- nativesdk-glibc-gconv-iso8859-1 \
- nativesdk-glibc-gconv-utf-16 \
- nativesdk-glibc-gconv-cp1250 \
- nativesdk-glibc-gconv-cp1251 \
- nativesdk-glibc-gconv-cp1252 \
- nativesdk-glibc-gconv-euc-jp \
- nativesdk-glibc-gconv-libjis \
+ nativesdk-glibc-gconvs \
nativesdk-libxcrypt-dev \
nativesdk-parted \
nativesdk-dosfstools \
nativesdk-gptfdisk \
"
-# gconv-cp1250, cp1251 and euc-jp needed for iconv to work in vim builds
-# also copied list from uninative
+# gconvs needed for iconv to work in vim builds
TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-extended-nativesdk-standalone-${DISTRO_VERSION}"
diff --git a/meta/recipes-core/meta/buildtools-tarball.bb b/meta/recipes-core/meta/buildtools-tarball.bb
index 6b59e4934d..92fbda335d 100644
--- a/meta/recipes-core/meta/buildtools-tarball.bb
+++ b/meta/recipes-core/meta/buildtools-tarball.bb
@@ -11,8 +11,11 @@ TOOLCHAIN_HOST_TASK ?= "\
nativesdk-python3-git \
nativesdk-python3-jinja2 \
nativesdk-python3-testtools \
+ nativesdk-python3-pip \
+ nativesdk-python3-setuptools \
nativesdk-python3-subunit \
nativesdk-python3-pyyaml \
+ nativesdk-python3-websockets \
nativesdk-ncurses-terminfo-base \
nativesdk-chrpath \
nativesdk-tar \
@@ -30,6 +33,7 @@ TOOLCHAIN_HOST_TASK ?= "\
nativesdk-mtools \
nativesdk-zstd \
nativesdk-lz4 \
+ nativesdk-libacl \
"
MULTIMACH_TARGET_SYS = "${SDK_ARCH}-nativesdk${SDK_VENDOR}-${SDK_OS}"
@@ -67,12 +71,17 @@ create_sdk_files:append () {
# Generate new (mini) sdk-environment-setup file
script=${1:-${SDK_OUTPUT}/${SDKPATH}/environment-setup-${SDK_SYS}}
touch $script
- echo 'export PATH=${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH' >> $script
+ echo 'export PATH="${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${sbindir_nativesdk}:${SDKPATHNATIVE}${base_bindir_nativesdk}:${SDKPATHNATIVE}${base_sbindir_nativesdk}:$PATH"' >> $script
echo 'export OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script
if [ -e "${SDK_OUTPUT}${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt" ]; then
echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script
echo 'export SSL_CERT_FILE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script
+ echo 'export REQUESTS_CA_BUNDLE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script
+ echo 'export CURL_CA_BUNDLE="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script
fi
+ echo 'HOST_PKG_PATH=$(command -p pkg-config --variable=pc_path pkg-config 2>/dev/null)' >>$script
+ echo 'export PKG_CONFIG_LIBDIR=${SDKPATHNATIVE}/${libdir}/pkgconfig:${SDKPATHNATIVE}/${datadir}/pkgconfig:${HOST_PKG_PATH:-/usr/lib/pkgconfig:/usr/share/pkgconfig}' >>$script
+ echo 'unset HOST_PKG_PATH'
toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${SDK_SYS}
diff --git a/meta/recipes-core/meta/cve-update-db-native.bb b/meta/recipes-core/meta/cve-update-db-native.bb
deleted file mode 100644
index c8c1cbf115..0000000000
--- a/meta/recipes-core/meta/cve-update-db-native.bb
+++ /dev/null
@@ -1,237 +0,0 @@
-SUMMARY = "Updates the NVD CVE database"
-LICENSE = "MIT"
-
-INHIBIT_DEFAULT_DEPS = "1"
-
-inherit native
-
-deltask do_unpack
-deltask do_patch
-deltask do_configure
-deltask do_compile
-deltask do_install
-deltask do_populate_sysroot
-
-NVDCVE_URL ?= "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-"
-# CVE database update interval, in seconds. By default: once a day (24*60*60).
-# Use 0 to force the update
-CVE_DB_UPDATE_INTERVAL ?= "86400"
-
-python () {
- if not bb.data.inherits_class("cve-check", d):
- raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.")
-}
-
-python do_fetch() {
- """
- Update NVD database with json data feed
- """
- import bb.utils
- import bb.progress
- import sqlite3, urllib, urllib.parse, gzip
- from datetime import date
-
- bb.utils.export_proxies(d)
-
- YEAR_START = 2002
-
- db_file = d.getVar("CVE_CHECK_DB_FILE")
- db_dir = os.path.dirname(db_file)
-
- if os.path.exists("{0}-journal".format(db_file)):
- # If a journal is present the last update might have been interrupted. In that case,
- # just wipe any leftovers and force the DB to be recreated.
- os.remove("{0}-journal".format(db_file))
-
- if os.path.exists(db_file):
- os.remove(db_file)
-
- # The NVD database changes once a day, so no need to update more frequently
- # Allow the user to force-update
- try:
- import time
- update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL"))
- if (update_interval < 0):
- update_interval = 0
- if time.time() - os.path.getmtime(db_file) < update_interval:
- bb.debug(2, "Recently updated, skipping")
- return
-
- except OSError:
- pass
-
- bb.utils.mkdirhier(db_dir)
-
- # Connect to database
- conn = sqlite3.connect(db_file)
- c = conn.cursor()
-
- initialize_db(c)
-
- with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f:
- total_years = date.today().year + 1 - YEAR_START
- for i, year in enumerate(range(YEAR_START, date.today().year + 1)):
- bb.debug(2, "Updating %d" % year)
- ph.update((float(i + 1) / total_years) * 100)
- year_url = (d.getVar('NVDCVE_URL')) + str(year)
- meta_url = year_url + ".meta"
- json_url = year_url + ".json.gz"
-
- # Retrieve meta last modified date
- try:
- response = urllib.request.urlopen(meta_url)
- except urllib.error.URLError as e:
- cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n')
- bb.warn("Failed to fetch CVE data (%s)" % e.reason)
- return
-
- if response:
- for l in response.read().decode("utf-8").splitlines():
- key, value = l.split(":", 1)
- if key == "lastModifiedDate":
- last_modified = value
- break
- else:
- bb.warn("Cannot parse CVE metadata, update failed")
- return
-
- # Compare with current db last modified date
- c.execute("select DATE from META where YEAR = ?", (year,))
- meta = c.fetchone()
- if not meta or meta[0] != last_modified:
- bb.debug(2, "Updating entries")
- # Clear products table entries corresponding to current year
- c.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,))
-
- # Update db with current year json file
- try:
- response = urllib.request.urlopen(json_url)
- if response:
- update_db(c, gzip.decompress(response.read()).decode('utf-8'))
- c.execute("insert or replace into META values (?, ?)", [year, last_modified])
- except urllib.error.URLError as e:
- cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n')
- bb.warn("Cannot parse CVE data (%s), update failed" % e.reason)
- return
- else:
- bb.debug(2, "Already up to date (last modified %s)" % last_modified)
- # Update success, set the date to cve_check file.
- if year == date.today().year:
- cve_f.write('CVE database update : %s\n\n' % date.today())
-
- conn.commit()
- conn.close()
-}
-
-do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
-do_fetch[file-checksums] = ""
-do_fetch[vardeps] = ""
-
-def initialize_db(c):
- c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)")
-
- c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \
- SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)")
-
- c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \
- VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \
- VERSION_END TEXT, OPERATOR_END TEXT)")
- c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);")
-
-def parse_node_and_insert(c, node, cveId):
- # Parse children node if needed
- for child in node.get('children', ()):
- parse_node_and_insert(c, child, cveId)
-
- def cpe_generator():
- for cpe in node.get('cpe_match', ()):
- if not cpe['vulnerable']:
- return
- cpe23 = cpe.get('cpe23Uri')
- if not cpe23:
- return
- cpe23 = cpe23.split(':')
- if len(cpe23) < 6:
- return
- vendor = cpe23[3]
- product = cpe23[4]
- version = cpe23[5]
-
- if cpe23[6] == '*' or cpe23[6] == '-':
- version_suffix = ""
- else:
- version_suffix = "_" + cpe23[6]
-
- if version != '*' and version != '-':
- # Version is defined, this is a '=' match
- yield [cveId, vendor, product, version + version_suffix, '=', '', '']
- elif version == '-':
- # no version information is available
- yield [cveId, vendor, product, version, '', '', '']
- else:
- # Parse start version, end version and operators
- op_start = ''
- op_end = ''
- v_start = ''
- v_end = ''
-
- if 'versionStartIncluding' in cpe:
- op_start = '>='
- v_start = cpe['versionStartIncluding']
-
- if 'versionStartExcluding' in cpe:
- op_start = '>'
- v_start = cpe['versionStartExcluding']
-
- if 'versionEndIncluding' in cpe:
- op_end = '<='
- v_end = cpe['versionEndIncluding']
-
- if 'versionEndExcluding' in cpe:
- op_end = '<'
- v_end = cpe['versionEndExcluding']
-
- if op_start or op_end or v_start or v_end:
- yield [cveId, vendor, product, v_start, op_start, v_end, op_end]
- else:
- # This is no version information, expressed differently.
- # Save processing by representing as -.
- yield [cveId, vendor, product, '-', '', '', '']
-
- c.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator())
-
-def update_db(c, jsondata):
- import json
- root = json.loads(jsondata)
-
- for elt in root['CVE_Items']:
- if not elt['impact']:
- continue
-
- accessVector = None
- cveId = elt['cve']['CVE_data_meta']['ID']
- cveDesc = elt['cve']['description']['description_data'][0]['value']
- date = elt['lastModifiedDate']
- try:
- accessVector = elt['impact']['baseMetricV2']['cvssV2']['accessVector']
- cvssv2 = elt['impact']['baseMetricV2']['cvssV2']['baseScore']
- except KeyError:
- cvssv2 = 0.0
- try:
- accessVector = accessVector or elt['impact']['baseMetricV3']['cvssV3']['attackVector']
- cvssv3 = elt['impact']['baseMetricV3']['cvssV3']['baseScore']
- except KeyError:
- accessVector = accessVector or "UNKNOWN"
- cvssv3 = 0.0
-
- c.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)",
- [cveId, cveDesc, cvssv2, cvssv3, date, accessVector])
-
- configurations = elt['configurations']['nodes']
- for config in configurations:
- parse_node_and_insert(c, config, cveId)
-
-
-do_fetch[nostamp] = "1"
-
-EXCLUDE_FROM_WORLD = "1"
diff --git a/meta/recipes-core/meta/cve-update-nvd2-native.bb b/meta/recipes-core/meta/cve-update-nvd2-native.bb
new file mode 100644
index 0000000000..1901641965
--- /dev/null
+++ b/meta/recipes-core/meta/cve-update-nvd2-native.bb
@@ -0,0 +1,377 @@
+SUMMARY = "Updates the NVD CVE database"
+LICENSE = "MIT"
+
+# Important note:
+# This product uses the NVD API but is not endorsed or certified by the NVD.
+
+INHIBIT_DEFAULT_DEPS = "1"
+
+inherit native
+
+deltask do_unpack
+deltask do_patch
+deltask do_configure
+deltask do_compile
+deltask do_install
+deltask do_populate_sysroot
+
+NVDCVE_URL ?= "https://services.nvd.nist.gov/rest/json/cves/2.0"
+
+# If you have a NVD API key (https://nvd.nist.gov/developers/request-an-api-key)
+# then setting this to get higher rate limits.
+NVDCVE_API_KEY ?= ""
+
+# CVE database update interval, in seconds. By default: once a day (24*60*60).
+# Use 0 to force the update
+# Use a negative value to skip the update
+CVE_DB_UPDATE_INTERVAL ?= "86400"
+
+# CVE database incremental update age threshold, in seconds. If the database is
+# older than this threshold, do a full re-download, else, do an incremental
+# update. By default: the maximum allowed value from NVD: 120 days (120*24*60*60)
+# Use 0 to force a full download.
+CVE_DB_INCR_UPDATE_AGE_THRES ?= "10368000"
+
+# Number of attempts for each http query to nvd server before giving up
+CVE_DB_UPDATE_ATTEMPTS ?= "5"
+
+CVE_DB_TEMP_FILE ?= "${CVE_CHECK_DB_DIR}/temp_nvdcve_2.db"
+
+python () {
+ if not bb.data.inherits_class("cve-check", d):
+ raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.")
+}
+
+python do_fetch() {
+ """
+ Update NVD database with API 2.0
+ """
+ import bb.utils
+ import bb.progress
+ import shutil
+
+ bb.utils.export_proxies(d)
+
+ db_file = d.getVar("CVE_CHECK_DB_FILE")
+ db_dir = os.path.dirname(db_file)
+ db_tmp_file = d.getVar("CVE_DB_TEMP_FILE")
+
+ cleanup_db_download(db_file, db_tmp_file)
+ # By default let's update the whole database (since time 0)
+ database_time = 0
+
+ # The NVD database changes once a day, so no need to update more frequently
+ # Allow the user to force-update
+ try:
+ import time
+ update_interval = int(d.getVar("CVE_DB_UPDATE_INTERVAL"))
+ if update_interval < 0:
+ bb.note("CVE database update skipped")
+ return
+ if time.time() - os.path.getmtime(db_file) < update_interval:
+ bb.note("CVE database recently updated, skipping")
+ return
+ database_time = os.path.getmtime(db_file)
+
+ except OSError:
+ pass
+
+ bb.utils.mkdirhier(db_dir)
+ if os.path.exists(db_file):
+ shutil.copy2(db_file, db_tmp_file)
+
+ if update_db_file(db_tmp_file, d, database_time) == True:
+ # Update downloaded correctly, can swap files
+ shutil.move(db_tmp_file, db_file)
+ else:
+ # Update failed, do not modify the database
+ bb.warn("CVE database update failed")
+ os.remove(db_tmp_file)
+}
+
+do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
+do_fetch[file-checksums] = ""
+do_fetch[vardeps] = ""
+
+def cleanup_db_download(db_file, db_tmp_file):
+ """
+ Cleanup the download space from possible failed downloads
+ """
+
+ # Clean up the updates done on the main file
+ # Remove it only if a journal file exists - it means a complete re-download
+ if os.path.exists("{0}-journal".format(db_file)):
+ # If a journal is present the last update might have been interrupted. In that case,
+ # just wipe any leftovers and force the DB to be recreated.
+ os.remove("{0}-journal".format(db_file))
+
+ if os.path.exists(db_file):
+ os.remove(db_file)
+
+ # Clean-up the temporary file downloads, we can remove both journal
+ # and the temporary database
+ if os.path.exists("{0}-journal".format(db_tmp_file)):
+ # If a journal is present the last update might have been interrupted. In that case,
+ # just wipe any leftovers and force the DB to be recreated.
+ os.remove("{0}-journal".format(db_tmp_file))
+
+ if os.path.exists(db_tmp_file):
+ os.remove(db_tmp_file)
+
+def nvd_request_wait(attempt, min_wait):
+ return min ( ( (2 * attempt) + min_wait ) , 30)
+
+def nvd_request_next(url, attempts, api_key, args, min_wait):
+ """
+ Request next part of the NVD database
+ NVD API documentation: https://nvd.nist.gov/developers/vulnerabilities
+ """
+
+ import urllib.request
+ import urllib.parse
+ import gzip
+ import http
+ import time
+
+ request = urllib.request.Request(url + "?" + urllib.parse.urlencode(args))
+ if api_key:
+ request.add_header("apiKey", api_key)
+ bb.note("Requesting %s" % request.full_url)
+
+ for attempt in range(attempts):
+ try:
+ r = urllib.request.urlopen(request)
+
+ if (r.headers['content-encoding'] == 'gzip'):
+ buf = r.read()
+ raw_data = gzip.decompress(buf)
+ else:
+ raw_data = r.read().decode("utf-8")
+
+ r.close()
+
+ except Exception as e:
+ wait_time = nvd_request_wait(attempt, min_wait)
+ bb.note("CVE database: received error (%s)" % (e))
+ bb.note("CVE database: retrying download after %d seconds. attempted (%d/%d)" % (wait_time, attempt+1, attempts))
+ time.sleep(wait_time)
+ pass
+ else:
+ return raw_data
+ else:
+ # We failed at all attempts
+ return None
+
+def update_db_file(db_tmp_file, d, database_time):
+ """
+ Update the given database file
+ """
+ import bb.utils, bb.progress
+ import datetime
+ import sqlite3
+ import json
+
+ # Connect to database
+ conn = sqlite3.connect(db_tmp_file)
+ initialize_db(conn)
+
+ req_args = {'startIndex' : 0}
+
+ incr_update_threshold = int(d.getVar("CVE_DB_INCR_UPDATE_AGE_THRES"))
+ if database_time != 0:
+ database_date = datetime.datetime.fromtimestamp(database_time, tz=datetime.timezone.utc)
+ today_date = datetime.datetime.now(tz=datetime.timezone.utc)
+ delta = today_date - database_date
+ if incr_update_threshold == 0:
+ bb.note("CVE database: forced full update")
+ elif delta < datetime.timedelta(seconds=incr_update_threshold):
+ bb.note("CVE database: performing partial update")
+ # The maximum range for time is 120 days
+ if delta > datetime.timedelta(days=120):
+ bb.error("CVE database: Trying to do an incremental update on a larger than supported range")
+ req_args['lastModStartDate'] = database_date.isoformat()
+ req_args['lastModEndDate'] = today_date.isoformat()
+ else:
+ bb.note("CVE database: file too old, forcing a full update")
+ else:
+ bb.note("CVE database: no preexisting database, do a full download")
+
+ with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f:
+
+ bb.note("Updating entries")
+ index = 0
+ url = d.getVar("NVDCVE_URL")
+ api_key = d.getVar("NVDCVE_API_KEY") or None
+ attempts = int(d.getVar("CVE_DB_UPDATE_ATTEMPTS"))
+
+ # Recommended by NVD
+ wait_time = 6
+ if api_key:
+ wait_time = 2
+
+ while True:
+ req_args['startIndex'] = index
+ raw_data = nvd_request_next(url, attempts, api_key, req_args, wait_time)
+ if raw_data is None:
+ # We haven't managed to download data
+ return False
+
+ data = json.loads(raw_data)
+
+ index = data["startIndex"]
+ total = data["totalResults"]
+ per_page = data["resultsPerPage"]
+ bb.note("Got %d entries" % per_page)
+ for cve in data["vulnerabilities"]:
+ update_db(conn, cve)
+
+ index += per_page
+ ph.update((float(index) / (total+1)) * 100)
+ if index >= total:
+ break
+
+ # Recommended by NVD
+ time.sleep(wait_time)
+
+ # Update success, set the date to cve_check file.
+ cve_f.write('CVE database update : %s\n\n' % datetime.date.today())
+
+ conn.commit()
+ conn.close()
+ return True
+
+def initialize_db(conn):
+ with conn:
+ c = conn.cursor()
+
+ c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)")
+
+ c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \
+ SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT, VECTORSTRING TEXT)")
+
+ c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \
+ VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \
+ VERSION_END TEXT, OPERATOR_END TEXT)")
+ c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);")
+
+ c.close()
+
+def parse_node_and_insert(conn, node, cveId):
+
+ def cpe_generator():
+ for cpe in node.get('cpeMatch', ()):
+ if not cpe['vulnerable']:
+ return
+ cpe23 = cpe.get('criteria')
+ if not cpe23:
+ return
+ cpe23 = cpe23.split(':')
+ if len(cpe23) < 6:
+ return
+ vendor = cpe23[3]
+ product = cpe23[4]
+ version = cpe23[5]
+
+ if cpe23[6] == '*' or cpe23[6] == '-':
+ version_suffix = ""
+ else:
+ version_suffix = "_" + cpe23[6]
+
+ if version != '*' and version != '-':
+ # Version is defined, this is a '=' match
+ yield [cveId, vendor, product, version + version_suffix, '=', '', '']
+ elif version == '-':
+ # no version information is available
+ yield [cveId, vendor, product, version, '', '', '']
+ else:
+ # Parse start version, end version and operators
+ op_start = ''
+ op_end = ''
+ v_start = ''
+ v_end = ''
+
+ if 'versionStartIncluding' in cpe:
+ op_start = '>='
+ v_start = cpe['versionStartIncluding']
+
+ if 'versionStartExcluding' in cpe:
+ op_start = '>'
+ v_start = cpe['versionStartExcluding']
+
+ if 'versionEndIncluding' in cpe:
+ op_end = '<='
+ v_end = cpe['versionEndIncluding']
+
+ if 'versionEndExcluding' in cpe:
+ op_end = '<'
+ v_end = cpe['versionEndExcluding']
+
+ if op_start or op_end or v_start or v_end:
+ yield [cveId, vendor, product, v_start, op_start, v_end, op_end]
+ else:
+ # This is no version information, expressed differently.
+ # Save processing by representing as -.
+ yield [cveId, vendor, product, '-', '', '', '']
+
+ conn.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator()).close()
+
+def update_db(conn, elt):
+ """
+ Update a single entry in the on-disk database
+ """
+
+ accessVector = None
+ vectorString = None
+ cveId = elt['cve']['id']
+ if elt['cve']['vulnStatus'] == "Rejected":
+ c = conn.cursor()
+ c.execute("delete from PRODUCTS where ID = ?;", [cveId])
+ c.execute("delete from NVD where ID = ?;", [cveId])
+ c.close()
+ return
+ cveDesc = ""
+ for desc in elt['cve']['descriptions']:
+ if desc['lang'] == 'en':
+ cveDesc = desc['value']
+ date = elt['cve']['lastModified']
+ try:
+ accessVector = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['accessVector']
+ vectorString = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['vectorString']
+ cvssv2 = elt['cve']['metrics']['cvssMetricV2'][0]['cvssData']['baseScore']
+ except KeyError:
+ cvssv2 = 0.0
+ cvssv3 = None
+ try:
+ accessVector = accessVector or elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['attackVector']
+ vectorString = vectorString or elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['vectorString']
+ cvssv3 = elt['cve']['metrics']['cvssMetricV30'][0]['cvssData']['baseScore']
+ except KeyError:
+ pass
+ try:
+ accessVector = accessVector or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['attackVector']
+ vectorString = vectorString or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['vectorString']
+ cvssv3 = cvssv3 or elt['cve']['metrics']['cvssMetricV31'][0]['cvssData']['baseScore']
+ except KeyError:
+ pass
+ accessVector = accessVector or "UNKNOWN"
+ vectorString = vectorString or "UNKNOWN"
+ cvssv3 = cvssv3 or 0.0
+
+ conn.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?, ?)",
+ [cveId, cveDesc, cvssv2, cvssv3, date, accessVector, vectorString]).close()
+
+ try:
+ # Remove any pre-existing CVE configuration. Even for partial database
+ # update, those will be repopulated. This ensures that old
+ # configuration is not kept for an updated CVE.
+ conn.execute("delete from PRODUCTS where ID = ?", [cveId]).close()
+ for config in elt['cve']['configurations']:
+ # This is suboptimal as it doesn't handle AND/OR and negate, but is better than nothing
+ for node in config["nodes"]:
+ parse_node_and_insert(conn, node, cveId)
+ except KeyError:
+ bb.note("CVE %s has no configurations" % cveId)
+
+do_fetch[nostamp] = "1"
+
+EXCLUDE_FROM_WORLD = "1"
diff --git a/meta/recipes-core/meta/meta-environment.bb b/meta/recipes-core/meta/meta-environment.bb
index 7118fb2aef..65436bc3e6 100644
--- a/meta/recipes-core/meta/meta-environment.bb
+++ b/meta/recipes-core/meta/meta-environment.bb
@@ -1,6 +1,5 @@
SUMMARY = "Package of environment files for SDK"
LICENSE = "MIT"
-PR = "r8"
EXCLUDE_FROM_WORLD = "1"
diff --git a/meta/recipes-core/meta/meta-ide-support.bb b/meta/recipes-core/meta/meta-ide-support.bb
index 39317d50e0..d85aa120c0 100644
--- a/meta/recipes-core/meta/meta-ide-support.bb
+++ b/meta/recipes-core/meta/meta-ide-support.bb
@@ -2,11 +2,12 @@ SUMMARY = "Integrated Development Environment support"
DESCRIPTION = "Meta package for ensuring the build directory contains all appropriate toolchain packages for using an IDE"
LICENSE = "MIT"
-DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native cmake-native"
-PR = "r3"
+DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native cmake-native autoconf-native automake-native meson-native intltool-native pkgconfig-native"
RM_WORK_EXCLUDE += "${PN}"
-inherit toolchain-scripts nopackages
+inherit toolchain-scripts nopackages deploy testsdk
+
+TESTSDK_CLASS_NAME = "oeqa.sdk.testmetaidesupport.TestSDK"
do_populate_ide_support () {
toolchain_create_tree_env_script
@@ -18,4 +19,22 @@ python () {
d.appendVarFlag("do_populate_ide_support", "file-checksums", " " + " ".join(searched))
}
-addtask populate_ide_support before do_build after do_install
+addtask populate_ide_support before do_deploy after do_install
+
+python do_write_test_data() {
+ from oe.data import export2json
+
+ out_dir = d.getVar('B')
+ testdata_name = os.path.join(out_dir, "%s.testdata.json" % d.getVar('PN'))
+
+ export2json(d, testdata_name)
+}
+addtask write_test_data before do_deploy after do_install
+
+do_deploy () {
+ install ${B}/* ${DEPLOYDIR}
+}
+
+addtask deploy before do_build
+
+do_build[deptask] += "do_prepare_recipe_sysroot"
diff --git a/meta/recipes-core/meta/meta-toolchain.bb b/meta/recipes-core/meta/meta-toolchain.bb
index b02b0665e6..260e03934e 100644
--- a/meta/recipes-core/meta/meta-toolchain.bb
+++ b/meta/recipes-core/meta/meta-toolchain.bb
@@ -1,6 +1,5 @@
SUMMARY = "Meta package for building a installable toolchain"
LICENSE = "MIT"
-PR = "r7"
inherit populate_sdk
diff --git a/meta/recipes-core/meta/meta-world-pkgdata.bb b/meta/recipes-core/meta/meta-world-pkgdata.bb
index b299861375..0438bf6138 100644
--- a/meta/recipes-core/meta/meta-world-pkgdata.bb
+++ b/meta/recipes-core/meta/meta-world-pkgdata.bb
@@ -33,6 +33,8 @@ deltask do_patch
deltask do_configure
deltask do_compile
deltask do_install
+deltask do_create_spdx
+deltask do_create_spdx_runtime
do_prepare_recipe_sysroot[deptask] = ""
diff --git a/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
index 4909401c5a..bb4e746237 100644
--- a/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
+++ b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
@@ -36,7 +36,6 @@ DUMMYPROVIDES = "\
/usr/bin/perl \
"
-PR = "r2"
require dummy-sdk-package.inc
diff --git a/meta/recipes-core/meta/signing-keys.bb b/meta/recipes-core/meta/signing-keys.bb
index 03463f95f5..107a39d658 100644
--- a/meta/recipes-core/meta/signing-keys.bb
+++ b/meta/recipes-core/meta/signing-keys.bb
@@ -18,6 +18,8 @@ FILES:${PN}-rpm = "${sysconfdir}/pki/rpm-gpg"
FILES:${PN}-ipk = "${sysconfdir}/pki/ipk-gpg"
FILES:${PN}-packagefeed = "${sysconfdir}/pki/packagefeed-gpg"
+RDEPENDS:${PN}-dev = ""
+
python do_get_public_keys () {
from oe.gpg_sign import get_signer
diff --git a/meta/recipes-core/meta/testexport-tarball.bb b/meta/recipes-core/meta/testexport-tarball.bb
index bb9f8ded48..abdd009252 100644
--- a/meta/recipes-core/meta/testexport-tarball.bb
+++ b/meta/recipes-core/meta/testexport-tarball.bb
@@ -4,7 +4,7 @@ DESCRIPTION = "SDK type target for standalone tarball containing packages define
SUMMARY = "Standalone tarball for test systems with missing software"
LICENSE = "MIT"
-TEST_EXPORT_SDK_PACKAGES ??= ""
+require conf/testexport.conf
TOOLCHAIN_TARGET_TASK ?= ""
diff --git a/meta/recipes-core/meta/uninative-tarball.bb b/meta/recipes-core/meta/uninative-tarball.bb
index a21d08b591..7eebcaf11a 100644
--- a/meta/recipes-core/meta/uninative-tarball.bb
+++ b/meta/recipes-core/meta/uninative-tarball.bb
@@ -3,23 +3,16 @@ LICENSE = "MIT"
TOOLCHAIN_TARGET_TASK = ""
-# ibm850 - mcopy from mtools
-# iso8859-1 - guile
-# utf-16, cp1252 - binutils-windres
TOOLCHAIN_HOST_TASK = "\
nativesdk-glibc \
nativesdk-glibc-dbg \
- nativesdk-glibc-gconv-ibm850 \
- nativesdk-glibc-gconv-iso8859-1 \
- nativesdk-glibc-gconv-utf-16 \
- nativesdk-glibc-gconv-cp1252 \
- nativesdk-glibc-gconv-euc-jp \
- nativesdk-glibc-gconv-libjis \
+ nativesdk-glibc-gconvs \
nativesdk-patchelf \
nativesdk-libxcrypt \
nativesdk-libxcrypt-compat \
nativesdk-libnss-nis \
nativesdk-sdk-provides-dummy \
+ nativesdk-libgcc \
"
INHIBIT_DEFAULT_DEPS = "1"
diff --git a/meta/recipes-core/meta/wic-tools.bb b/meta/recipes-core/meta/wic-tools.bb
index b9580d7e33..76494e7fca 100644
--- a/meta/recipes-core/meta/wic-tools.bb
+++ b/meta/recipes-core/meta/wic-tools.bb
@@ -4,9 +4,10 @@ LICENSE = "MIT"
DEPENDS = "\
parted-native gptfdisk-native dosfstools-native \
- mtools-native bmap-tools-native grub-native cdrtools-native \
+ mtools-native bmaptool-native grub-native cdrtools-native \
btrfs-tools-native squashfs-tools-native pseudo-native \
e2fsprogs-native util-linux-native tar-native erofs-utils-native \
+ virtual/${TARGET_PREFIX}binutils \
"
DEPENDS:append:x86 = " syslinux-native syslinux grub-efi systemd-boot"
DEPENDS:append:x86-64 = " syslinux-native syslinux grub-efi systemd-boot"
diff --git a/meta/recipes-core/musl/bsd-headers.bb b/meta/recipes-core/musl/bsd-headers.bb
index cf8af0da3c..887a816031 100644
--- a/meta/recipes-core/musl/bsd-headers.bb
+++ b/meta/recipes-core/musl/bsd-headers.bb
@@ -27,5 +27,5 @@ do_install() {
#
COMPATIBLE_HOST = ".*-musl.*"
-RDEPENDS:${PN}-dev = ""
+DEV_PKG_DEPENDENCY = ""
RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
diff --git a/meta/recipes-core/musl/bsd-headers/sys-cdefs.h b/meta/recipes-core/musl/bsd-headers/sys-cdefs.h
index 209a623c0f..841a5da8ba 100644
--- a/meta/recipes-core/musl/bsd-headers/sys-cdefs.h
+++ b/meta/recipes-core/musl/bsd-headers/sys-cdefs.h
@@ -1,3 +1,6 @@
+#ifndef _SYS_CDEFS_H_
+#define _SYS_CDEFS_H_
+
#warning usage of non-standard #include <sys/cdefs.h> is deprecated
#undef __P
@@ -24,3 +27,8 @@
# define __THROW
# define __NTH(fct) fct
#endif
+
+#define __CONCAT(x,y) x ## y
+#define __STRING(x) #x
+
+#endif /* _SYS_CDEFS_H_ */
diff --git a/meta/recipes-core/musl/gcompat/0001-Add-fcntl64-wrapper.patch b/meta/recipes-core/musl/gcompat/0001-Add-fcntl64-wrapper.patch
new file mode 100644
index 0000000000..3f265e273a
--- /dev/null
+++ b/meta/recipes-core/musl/gcompat/0001-Add-fcntl64-wrapper.patch
@@ -0,0 +1,44 @@
+From 37f70f54c74c4ceeb089cbee88311ba00638f211 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 13 Oct 2023 21:02:23 -0700
+Subject: [PATCH] Add fcntl64 wrapper
+
+fixes loadtime errors with pvr precompiled driver for visionfive2
+
+load libpvr_dri_support.so: Error relocating /usr/lib/libpvr_dri_support.so: fcntl64: symbol not found
+
+Upstream-Status: Submitted [https://git.adelielinux.org/adelie/gcompat/-/merge_requests/28]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libgcompat/unistd.c | 11 +++++++++++
+ 1 file changed, 11 insertions(+)
+
+diff --git a/libgcompat/unistd.c b/libgcompat/unistd.c
+index 011fba2..400abf3 100644
+--- a/libgcompat/unistd.c
++++ b/libgcompat/unistd.c
+@@ -1,6 +1,7 @@
+ #include <assert.h> /* assert */
+ #include <fcntl.h> /* O_CREAT */
+ #include <limits.h> /* NGROUPS_MAX */
++#include <stdarg.h> /* va_list, va_start, va_end */
+ #include <stddef.h> /* NULL, size_t */
+ #include <unistd.h> /* confstr, getcwd, getgroups, ... */
+ #include <errno.h> /* ENOSYS, ENOMEM */
+@@ -250,3 +251,13 @@ int __close(int fd)
+ {
+ return close(fd);
+ }
++
++int fcntl64 (int fd, int cmd, ...)
++{
++ int ret;
++ va_list ap;
++ va_start(ap, cmd);
++ ret = fcntl(fd, cmd, ap);
++ va_end(ap);
++ return ret;
++}
+--
+2.42.0
+
diff --git a/meta/recipes-core/musl/gcompat_git.bb b/meta/recipes-core/musl/gcompat_git.bb
index b051e43b6c..40fe8c6a5f 100644
--- a/meta/recipes-core/musl/gcompat_git.bb
+++ b/meta/recipes-core/musl/gcompat_git.bb
@@ -5,15 +5,16 @@ SUMMARY = "A library which provides glibc-compatible APIs for use on musl libc s
HOMEPAGE = "https://git.adelielinux.org/adelie/gcompat"
LICENSE = "NCSA"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=eb33ef4af05a9c7602843afb7adfe792"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=802b1aed7330d90086be4de63a3188e3"
SRC_URI = "git://git.adelielinux.org/adelie/gcompat.git;protocol=https;branch=current \
- "
+ file://0001-Add-fcntl64-wrapper.patch \
+ "
SRC_URI:append:powerpc = "\
file://0001-make-Static-PIE-does-not-work-on-musl-ppc.patch \
"
-PV = "1.0.0+1.1+git${SRCPV}"
-SRCREV = "4d6a5156a6eb7f56b30d93853a872e36dadde81b"
+PV = "1.1.0"
+SRCREV = "b7bfe0b08c52fdc72e0c1d9d4dcb2129f1642bd6"
S = "${WORKDIR}/git"
@@ -42,6 +43,8 @@ do_install () {
install -d ${D}${nonarch_base_libdir}${SITEINFO_BITS}
ln -rs ${D}${GLIBC_LDSO} ${D}${nonarch_base_libdir}${SITEINFO_BITS}/`basename ${GLIBC_LDSO}`
fi
+ install -d ${D}${libdir}
+ ln -sf ${base_libdir}/libgcompat.so.0 ${D}${libdir}/libgcompat.so
}
FILES:${PN} += "${nonarch_base_libdir}${SITEINFO_BITS}"
diff --git a/meta/recipes-core/musl/libc-test/run-libc-ptests b/meta/recipes-core/musl/libc-test/run-libc-ptests
new file mode 100644
index 0000000000..0b4b687dec
--- /dev/null
+++ b/meta/recipes-core/musl/libc-test/run-libc-ptests
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+set -e
+
+cd /opt/libc-test
+make cleanall
+make run || true
+
+echo ""
+echo "--- ptest result ---"
+# libc-test runs tests by module(e.g. src/api) and generates sub-module test
+# report(e.g. src/api/REPORT) first. After all tests finish, it generates the
+# consolidated report file src/REPORT.
+report="/opt/libc-test/src/REPORT"
+if ! [ -f "${report}" ]; then
+ echo "${report} not found!"
+ echo "FAIL: libc-test"
+ exit 1
+# libc-test prints error on failure and prints nothing on success.
+elif grep -q '^FAIL src.*\.exe.*' "${report}"; then
+ # Print test failure in ptest format.
+ # e.g. "FAIL src/api/main.exe [status 1]" -> "FAIL: api_main"
+ grep '^FAIL src.*\.exe.*' "${report}" \
+ | sed 's|^FAIL src/|FAIL: |;s|/|_|;s|\.exe.*\]||'
+ exit 1
+else
+ echo "PASS: libc-test"
+fi
diff --git a/meta/recipes-core/musl/libc-test/run-ptest b/meta/recipes-core/musl/libc-test/run-ptest
new file mode 100644
index 0000000000..53cd34f506
--- /dev/null
+++ b/meta/recipes-core/musl/libc-test/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+chown -R ptest:ptest /opt/libc-test
+ su -c ./run-libc-ptests ptest
diff --git a/meta/recipes-core/musl/libc-test_git.bb b/meta/recipes-core/musl/libc-test_git.bb
new file mode 100644
index 0000000000..619a959fd2
--- /dev/null
+++ b/meta/recipes-core/musl/libc-test_git.bb
@@ -0,0 +1,57 @@
+SUMMARY = "Musl libc unit tests"
+HOMEPAGE = "https://wiki.musl-libc.org/libc-test.html"
+DESCRIPTION = "libc-test is a collection of unit tests to measure the \
+correctness and robustness of a C/POSIX standard library implementation. It is \
+developed as part of the musl project."
+SECTION = "tests"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=43ed1245085be90dc934288117d55a3b"
+
+inherit ptest
+
+SRCREV = "18e28496adee3d84fefdda6efcb9c5b8996a2398"
+SRC_URI = " \
+ git://repo.or.cz/libc-test;branch=master;protocol=https \
+ file://run-ptest \
+ file://run-libc-ptests \
+"
+
+PV = "0+git"
+
+S = "${WORKDIR}/git"
+
+# libc-test 'make' or 'make run' command is designed to build and run tests. It
+# reports both build and test failures. The commands should be run on target.
+do_compile() {
+ :
+}
+
+RDEPENDS:${PN} = " \
+ bash \
+ grep \
+ musl \
+ packagegroup-core-buildessential \
+"
+
+RDEPENDS:${PN}-ptest = " \
+ ${PN} \
+ musl-staticdev \
+ sed \
+"
+
+install_path = "/opt/${PN}"
+FILES:${PN} += "${install_path}/*"
+
+do_install () {
+ install -d ${D}${install_path}/
+ cp ${S}/Makefile ${D}${install_path}
+ cp ${S}/config.mak.def ${D}${install_path}/config.mak
+ cp -r ${S}/src ${D}${install_path}
+}
+
+do_install_ptest_base:append() {
+ install -Dm 0755 ${WORKDIR}/run-libc-ptests ${D}${PTEST_PATH}/run-libc-ptests
+}
+
+COMPATIBLE_HOST = "null"
+COMPATIBLE_HOST:libc-musl = "(.*)"
diff --git a/meta/recipes-core/musl/libssp-nonshared.bb b/meta/recipes-core/musl/libssp-nonshared.bb
index 748dacf312..3faf8f00c3 100644
--- a/meta/recipes-core/musl/libssp-nonshared.bb
+++ b/meta/recipes-core/musl/libssp-nonshared.bb
@@ -31,5 +31,5 @@ do_install() {
#
COMPATIBLE_HOST = ".*-musl.*"
RDEPENDS:${PN}-staticdev = ""
-RDEPENDS:${PN}-dev = ""
+DEV_PKG_DEPENDENCY = ""
RRECOMMENDS:${PN}-dbg = "${PN}-staticdev (= ${EXTENDPKGV})"
diff --git a/meta/recipes-core/musl/musl-legacy-error.bb b/meta/recipes-core/musl/musl-legacy-error.bb
new file mode 100644
index 0000000000..5ce5a233ab
--- /dev/null
+++ b/meta/recipes-core/musl/musl-legacy-error.bb
@@ -0,0 +1,26 @@
+# Copyright (C) 2023 Khem Raj <raj.khem@gmail.com>
+# Released under the MIT license (see COPYING.MIT for the terms)
+
+SUMMARY = "error API GNU extention implementation"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://error.h;beginline=1;md5=2ee396b23e8507fbf8f98af0471a77c6"
+SECTION = "devel"
+
+SRC_URI = "file://error.h"
+
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+
+INHIBIT_DEFAULT_DEPS = "1"
+
+S = "${WORKDIR}"
+
+do_install() {
+ install -Dm 0644 ${S}/error.h -t ${D}${includedir}
+}
+#
+# We will skip parsing for non-musl systems
+#
+COMPATIBLE_HOST = ".*-musl.*"
+DEV_PKG_DEPENDENCY = ""
+RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
diff --git a/meta/recipes-core/musl/musl-legacy-error/error.h b/meta/recipes-core/musl/musl-legacy-error/error.h
new file mode 100644
index 0000000000..9a4e1f8d00
--- /dev/null
+++ b/meta/recipes-core/musl/musl-legacy-error/error.h
@@ -0,0 +1,60 @@
+#ifndef _ERROR_H_
+#define _ERROR_H_
+
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+
+#warning usage of non-standard #include <error.h> is deprecated
+
+static unsigned int error_message_count = 0;
+
+static inline void error(int status, int errnum, const char* format, ...)
+{
+ /* should be fflush(stdout), but that's unspecified if stdout has been closed;
+ * stick with fflush(NULL) for simplicity (glibc checks if the fd is still valid) */
+ fflush(NULL);
+
+ va_list ap;
+ fprintf(stderr, "%s: ", program_invocation_name);
+ va_start(ap, format);
+ vfprintf(stderr, format, ap);
+ va_end(ap);
+ if (errnum)
+ fprintf(stderr, ": %s", strerror(errnum));
+ fprintf(stderr, "\n");
+ error_message_count++;
+ if (status)
+ exit(status);
+}
+
+static int error_one_per_line = 0;
+
+static inline void error_at_line(int status, int errnum, const char *filename,
+ unsigned int linenum, const char *format, ...)
+{
+ va_list ap;
+ if (error_one_per_line) {
+ static const char *old_filename;
+ static int old_linenum;
+ if (linenum == old_linenum && filename == old_filename)
+ return;
+ old_filename = filename;
+ old_linenum = linenum;
+ }
+ fprintf(stderr, "%s: %s:%u: ", program_invocation_name, filename, linenum);
+ va_start(ap, format);
+ vfprintf(stderr, format, ap);
+ va_end(ap);
+ if (errnum)
+ fprintf(stderr, ": %s", strerror(errnum));
+ fprintf(stderr, "\n");
+ error_message_count++;
+ if (status)
+ exit(status);
+}
+
+
+#endif /* _ERROR_H_ */
diff --git a/meta/recipes-core/musl/musl-locales_git.bb b/meta/recipes-core/musl/musl-locales_git.bb
index 92532237e0..1373c60daf 100644
--- a/meta/recipes-core/musl/musl-locales_git.bb
+++ b/meta/recipes-core/musl/musl-locales_git.bb
@@ -9,8 +9,8 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=cf5713fba707073020b1db2acaa73e78 \
SRC_URI = "git://git.adelielinux.org/adelie/musl-locales;protocol=https;branch=main"
-PV = "1.0+git${SRCPV}"
-SRCREV = "cac3dcdccd1d0c08d17f4919d862737d64b76f79"
+PV = "1.0+git"
+SRCREV = "5663f5bfd30bf9e1e0ba3fc5fe2da6725969f30e"
S = "${WORKDIR}/git"
@@ -35,6 +35,7 @@ LICENSE:locale-base-en-gb = "MIT"
LICENSE:locale-base-en-us = "MIT"
LICENSE:locale-base-es-es = "MIT"
LICENSE:locale-base-fi-fi = "MIT"
+LICENSE:locale-base-fr-ca = "MIT"
LICENSE:locale-base-fr-fr = "MIT"
LICENSE:locale-base-it-it = "MIT"
LICENSE:locale-base-nb-no = "MIT"
@@ -52,6 +53,7 @@ PACKAGES =+ "locale-base-cs-cz \
locale-base-en-us \
locale-base-es-es \
locale-base-fi-fi \
+ locale-base-fr-ca \
locale-base-fr-fr \
locale-base-it-it \
locale-base-nb-no \
@@ -69,6 +71,7 @@ FILES:locale-base-en-gb += "${datadir}/i18n/locales/musl/en_GB.UTF-8"
FILES:locale-base-en-us += "${datadir}/i18n/locales/musl/en_US.UTF-8"
FILES:locale-base-es-es += "${datadir}/i18n/locales/musl/es_ES.UTF-8"
FILES:locale-base-fi-fi += "${datadir}/i18n/locales/musl/fi_FI.UTF-8"
+FILES:locale-base-fr-ca += "${datadir}/i18n/locales/musl/fr_CA.UTF-8"
FILES:locale-base-fr-fr += "${datadir}/i18n/locales/musl/fr_FR.UTF-8"
FILES:locale-base-it-it += "${datadir}/i18n/locales/musl/it_IT.UTF-8"
FILES:locale-base-nb-no += "${datadir}/i18n/locales/musl/nb_NO.UTF-8"
diff --git a/meta/recipes-core/musl/musl-obstack.bb b/meta/recipes-core/musl/musl-obstack.bb
index 4df24c013b..4c71a141b2 100644
--- a/meta/recipes-core/musl/musl-obstack.bb
+++ b/meta/recipes-core/musl/musl-obstack.bb
@@ -8,9 +8,9 @@ LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=3d23e4eef8243efcaab6f0a438078932"
SECTION = "libs"
-PV = "1.1"
-SRCREV = "d2ad66b0df44a4b784956f7f7f2717131ddc05f4"
-SRC_URI = "git://github.com/pullmoll/musl-obstack;branch=master;protocol=https"
+PV = "1.2.3"
+SRCREV = "f4385255be1615688c6a5f042277304d7ab288b1"
+SRC_URI = "git://github.com/void-linux/musl-obstack;branch=master;protocol=https"
UPSTREAM_CHECK_COMMITS = "1"
diff --git a/meta/recipes-core/musl/musl-utils.bb b/meta/recipes-core/musl/musl-utils.bb
index c42dff3a53..8280333daf 100644
--- a/meta/recipes-core/musl/musl-utils.bb
+++ b/meta/recipes-core/musl/musl-utils.bb
@@ -1,7 +1,7 @@
# Copyright (C) 2018 Khem Raj <raj.khem@gmail.com>
# Released under the MIT license (see COPYING.MIT for the terms)
-DESCRIPTION = "getconf, getent and iconv implementations for musl"
+SUMMARY = "getconf, getent and iconv implementations for musl"
HOMEPAGE = "https://git.alpinelinux.org/cgit/aports/tree/main/musl"
LICENSE = "BSD-2-Clause & GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://LICENSE;md5=9d08215e611db87b357e8674b4b42564"
diff --git a/meta/recipes-core/musl/musl.inc b/meta/recipes-core/musl/musl.inc
index 66468e92ff..6ca52b186a 100644
--- a/meta/recipes-core/musl/musl.inc
+++ b/meta/recipes-core/musl/musl.inc
@@ -25,6 +25,9 @@ MIPS_INSTRUCTION_SET = "mips"
ARM_INSTRUCTION_SET:armv5 = "arm"
ARM_INSTRUCTION_SET:armv4 = "arm"
+# 1.2.4 doesn't support riscv32
+COMPATIBLE_HOST:riscv32 = "null"
+
# Enable out of tree build
B = "${WORKDIR}/build"
diff --git a/meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch b/meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch
index ba00efe7b3..8b097f3276 100644
--- a/meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch
+++ b/meta/recipes-core/musl/musl/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch
@@ -1,7 +1,7 @@
-From 0ec74744a4cba7c5fdfaa2685995119a4fca0260 Mon Sep 17 00:00:00 2001
+From f95b6fd0475a95c00e886219271cb5c93838e3c3 Mon Sep 17 00:00:00 2001
From: Amarnath Valluri <amarnath.valluri@intel.com>
Date: Wed, 18 Jan 2017 16:14:37 +0200
-Subject: [PATCH] Make dynamic linker a relative symlink to libc
+Subject: [PATCH 1/2] Make dynamic linker a relative symlink to libc
absolute symlink into $(libdir) fails to load in a cross build
environment, especially when executing qemu in usermode to run target
@@ -13,18 +13,19 @@ V2:
Make use of 'ln -r' to create relative symlinks, as most fo the distros
shipping coreutils 8.16+
+Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Amarnath Valluri <amarnath.valluri@intel.com>
---
-Upstream-Status: Pending
----
Makefile | 2 +-
tools/install.sh | 8 +++++---
2 files changed, 6 insertions(+), 4 deletions(-)
+diff --git a/Makefile b/Makefile
+index e8cc4436..466d9afd 100644
--- a/Makefile
+++ b/Makefile
-@@ -210,7 +210,7 @@ $(DESTDIR)$(includedir)/%: $(srcdir)/inc
+@@ -210,7 +210,7 @@ $(DESTDIR)$(includedir)/%: $(srcdir)/include/%
$(INSTALL) -D -m 644 $< $@
$(DESTDIR)$(LDSO_PATHNAME): $(DESTDIR)$(libdir)/libc.so
@@ -33,6 +34,8 @@ Upstream-Status: Pending
install-libs: $(ALL_LIBS:lib/%=$(DESTDIR)$(libdir)/%) $(if $(SHARED_LIBS),$(DESTDIR)$(LDSO_PATHNAME),)
+diff --git a/tools/install.sh b/tools/install.sh
+index d913b60b..b6a7f797 100755
--- a/tools/install.sh
+++ b/tools/install.sh
@@ -6,18 +6,20 @@
@@ -58,7 +61,7 @@ Upstream-Status: Pending
m) mode=$OPTARG ;;
?) usage ;;
esac
-@@ -48,7 +50,7 @@ trap 'rm -f "$tmp"' EXIT INT QUIT TERM H
+@@ -48,7 +50,7 @@ trap 'rm -f "$tmp"' EXIT INT QUIT TERM HUP
umask 077
if test "$symlink" ; then
@@ -67,3 +70,6 @@ Upstream-Status: Pending
else
cat < "$1" > "$tmp"
chmod "$mode" "$tmp"
+--
+2.37.2
+
diff --git a/meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch b/meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch
index 0aeb5eb5c2..59bfae5a27 100644
--- a/meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch
+++ b/meta/recipes-core/musl/musl/0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch
@@ -1,7 +1,8 @@
-From 5a2886f81dbca3f2ed28eebe7d27d471da278db8 Mon Sep 17 00:00:00 2001
+From 3cce8716c6c3ae2e0c835caeac3780ec35090b2d Mon Sep 17 00:00:00 2001
From: Serhey Popovych <serhe.popovych@gmail.com>
Date: Tue, 11 Dec 2018 05:44:20 -0500
-Subject: [PATCH] ldso: Use syslibdir and libdir as default pathes to libdirs
+Subject: [PATCH 2/2] ldso: Use syslibdir and libdir as default pathes to
+ libdirs
In absence of /etc/ld-musl-$(ARCH).path ldso uses default path to search
libraries /lib:/usr/local/lib:/usr/lib.
@@ -20,6 +21,8 @@ Signed-off-by: Serhey Popovych <serhe.popovych@gmail.com>
ldso/dynlink.c | 4 +++-
2 files changed, 5 insertions(+), 2 deletions(-)
+diff --git a/Makefile b/Makefile
+index 466d9afd..d2f458fa 100644
--- a/Makefile
+++ b/Makefile
@@ -47,7 +47,8 @@ CFLAGS_AUTO = -Os -pipe
@@ -32,6 +35,8 @@ Signed-off-by: Serhey Popovych <serhe.popovych@gmail.com>
CFLAGS_ALL += $(CPPFLAGS) $(CFLAGS_AUTO) $(CFLAGS)
LDFLAGS_ALL = $(LDFLAGS_AUTO) $(LDFLAGS)
+diff --git a/ldso/dynlink.c b/ldso/dynlink.c
+index cc677952..b0e8815b 100644
--- a/ldso/dynlink.c
+++ b/ldso/dynlink.c
@@ -29,6 +29,8 @@
@@ -40,10 +45,10 @@ Signed-off-by: Serhey Popovych <serhe.popovych@gmail.com>
+#define SYS_PATH_DFLT SYSLIBDIR ":" LIBDIR
+
- static void error(const char *, ...);
-
- #define MAXP2(a,b) (-(-(a)&-(b)))
-@@ -1094,7 +1096,7 @@ static struct dso *load_library(const ch
+ static void error_impl(const char *, ...);
+ static void error_noop(const char *, ...);
+ static void (*error)(const char *, ...) = error_noop;
+@@ -1097,7 +1099,7 @@ static struct dso *load_library(const char *name, struct dso *needed_by)
sys_path = "";
}
}
@@ -52,3 +57,6 @@ Signed-off-by: Serhey Popovych <serhe.popovych@gmail.com>
fd = path_open(name, sys_path, buf, sizeof buf);
}
pathname = buf;
+--
+2.37.2
+
diff --git a/meta/recipes-core/musl/musl/0003-elf.h-add-typedefs-for-Elf64_Relr-and-Elf32_Relr.patch b/meta/recipes-core/musl/musl/0003-elf.h-add-typedefs-for-Elf64_Relr-and-Elf32_Relr.patch
new file mode 100644
index 0000000000..45d40cd5b4
--- /dev/null
+++ b/meta/recipes-core/musl/musl/0003-elf.h-add-typedefs-for-Elf64_Relr-and-Elf32_Relr.patch
@@ -0,0 +1,37 @@
+From 65b0ac0d998bf0f36924a7c27ed9e702b2a5a453 Mon Sep 17 00:00:00 2001
+From: Violet Purcell <vimproved@inventati.org>
+Date: Sat, 4 Nov 2023 12:09:20 -0400
+Subject: [PATCH] elf.h: add typedefs for Elf64_Relr and Elf32_Relr
+
+These were overlooked when DT_RELR was added in commit
+d32dadd60efb9d3b255351a3b532f8e4c3dd0db1, potentially breaking
+software that treats presence of the DT_RELR macro as implying they
+exist.
+
+Upstream-Status: Backport [1.2.5]
+
+Signed-off-by: Zang Ruochen <zangruochen@loongson.cn>
+
+---
+ include/elf.h | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/include/elf.h b/include/elf.h
+index 23f2c4bc..72d17c3a 100644
+--- a/include/elf.h
++++ b/include/elf.h
+@@ -558,6 +558,11 @@ typedef struct {
+
+
+
++typedef Elf32_Word Elf32_Relr;
++typedef Elf64_Xword Elf64_Relr;
++
++
++
+ #define ELF32_R_SYM(val) ((val) >> 8)
+ #define ELF32_R_TYPE(val) ((val) & 0xff)
+ #define ELF32_R_INFO(sym, type) (((sym) << 8) + ((type) & 0xff))
+--
+2.25.1
+
diff --git a/meta/recipes-core/musl/musl_git.bb b/meta/recipes-core/musl/musl_git.bb
index fde5fc0cce..324269a968 100644
--- a/meta/recipes-core/musl/musl_git.bb
+++ b/meta/recipes-core/musl/musl_git.bb
@@ -4,17 +4,16 @@
require musl.inc
inherit linuxloader
-SRCREV = "6e9d2370c7559af80b32a91f20898f41597e093b"
+SRCREV = "79bdacff83a6bd5b70ff5ae5eb8b6de82c2f7c30"
-BASEVER = "1.2.3"
+BASEVER = "1.2.4"
-PV = "${BASEVER}+git${SRCPV}"
+PV = "${BASEVER}+git"
-# mirror is at git://github.com/kraj/musl.git
-
-SRC_URI = "git://git.musl-libc.org/musl;branch=master \
+SRC_URI = "git://git.etalabs.net/git/musl;branch=master;protocol=https \
file://0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch \
file://0002-ldso-Use-syslibdir-and-libdir-as-default-pathes-to-l.patch \
+ file://0003-elf.h-add-typedefs-for-Elf64_Relr-and-Elf32_Relr.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch b/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch
new file mode 100644
index 0000000000..1232c8c2a8
--- /dev/null
+++ b/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch
@@ -0,0 +1,462 @@
+From 3d54a41f12e9aa059f06e66e72d872f2283395b6 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Sun, 30 Jul 2023 21:14:00 -0700
+Subject: [PATCH] Fix CVE-2023-29491
+
+CVE: CVE-2023-29491
+
+Upstream-Status: Backport [http://ncurses.scripts.mit.edu/?p=ncurses.git;a=commitdiff;h=eb51b1ea1f75a0ec17c9c5937cb28df1e8eeec56]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ ncurses/tinfo/lib_tgoto.c | 10 +++-
+ ncurses/tinfo/lib_tparm.c | 116 ++++++++++++++++++++++++++++++++-----
+ ncurses/tinfo/read_entry.c | 3 +
+ progs/tic.c | 6 ++
+ progs/tparm_type.c | 9 +++
+ progs/tparm_type.h | 2 +
+ progs/tput.c | 61 ++++++++++++++++---
+ 7 files changed, 185 insertions(+), 22 deletions(-)
+
+diff --git a/ncurses/tinfo/lib_tgoto.c b/ncurses/tinfo/lib_tgoto.c
+index 9cf5e100..c50ed4df 100644
+--- a/ncurses/tinfo/lib_tgoto.c
++++ b/ncurses/tinfo/lib_tgoto.c
+@@ -207,6 +207,14 @@ tgoto(const char *string, int x, int y)
+ result = tgoto_internal(string, x, y);
+ else
+ #endif
+- result = TIPARM_2(string, y, x);
++ if ((result = TIPARM_2(string, y, x)) == NULL) {
++ /*
++ * Because termcap did not provide a more general solution such as
++ * tparm(), it was necessary to handle single-parameter capabilities
++ * using tgoto(). The internal _nc_tiparm() function returns a NULL
++ * for that case; retry for the single-parameter case.
++ */
++ result = TIPARM_1(string, y);
++ }
+ returnPtr(result);
+ }
+diff --git a/ncurses/tinfo/lib_tparm.c b/ncurses/tinfo/lib_tparm.c
+index d9bdfd8f..a10a3877 100644
+--- a/ncurses/tinfo/lib_tparm.c
++++ b/ncurses/tinfo/lib_tparm.c
+@@ -1086,6 +1086,64 @@ tparam_internal(TPARM_STATE *tps, const char *string, TPARM_DATA *data)
+ return (TPS(out_buff));
+ }
+
++#ifdef CUR
++/*
++ * Only a few standard capabilities accept string parameters. The others that
++ * are parameterized accept only numeric parameters.
++ */
++static bool
++check_string_caps(TPARM_DATA *data, const char *string)
++{
++ bool result = FALSE;
++
++#define CHECK_CAP(name) (VALID_STRING(name) && !strcmp(name, string))
++
++ /*
++ * Disallow string parameters unless we can check them against a terminal
++ * description.
++ */
++ if (cur_term != NULL) {
++ int want_type = 0;
++
++ if (CHECK_CAP(pkey_key))
++ want_type = 2; /* function key #1, type string #2 */
++ else if (CHECK_CAP(pkey_local))
++ want_type = 2; /* function key #1, execute string #2 */
++ else if (CHECK_CAP(pkey_xmit))
++ want_type = 2; /* function key #1, transmit string #2 */
++ else if (CHECK_CAP(plab_norm))
++ want_type = 2; /* label #1, show string #2 */
++ else if (CHECK_CAP(pkey_plab))
++ want_type = 6; /* function key #1, type string #2, show string #3 */
++#if NCURSES_XNAMES
++ else {
++ char *check;
++
++ check = tigetstr("Cs");
++ if (CHECK_CAP(check))
++ want_type = 1; /* style #1 */
++
++ check = tigetstr("Ms");
++ if (CHECK_CAP(check))
++ want_type = 3; /* storage unit #1, content #2 */
++ }
++#endif
++
++ if (want_type == data->tparm_type) {
++ result = TRUE;
++ } else {
++ T(("unexpected string-parameter"));
++ }
++ }
++ return result;
++}
++
++#define ValidCap() (myData.tparm_type == 0 || \
++ check_string_caps(&myData, string))
++#else
++#define ValidCap() 1
++#endif
++
+ #if NCURSES_TPARM_VARARGS
+
+ NCURSES_EXPORT(char *)
+@@ -1100,7 +1158,7 @@ tparm(const char *string, ...)
+ tps->tname = "tparm";
+ #endif /* TRACE */
+
+- if (tparm_setup(cur_term, string, &myData) == OK) {
++ if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) {
+ va_list ap;
+
+ va_start(ap, string);
+@@ -1135,7 +1193,7 @@ tparm(const char *string,
+ tps->tname = "tparm";
+ #endif /* TRACE */
+
+- if (tparm_setup(cur_term, string, &myData) == OK) {
++ if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) {
+
+ myData.param[0] = a1;
+ myData.param[1] = a2;
+@@ -1166,7 +1224,7 @@ tiparm(const char *string, ...)
+ tps->tname = "tiparm";
+ #endif /* TRACE */
+
+- if (tparm_setup(cur_term, string, &myData) == OK) {
++ if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) {
+ va_list ap;
+
+ va_start(ap, string);
+@@ -1179,7 +1237,25 @@ tiparm(const char *string, ...)
+ }
+
+ /*
+- * The internal-use flavor ensures that the parameters are numbers, not strings
++ * The internal-use flavor ensures that parameters are numbers, not strings.
++ * In addition to ensuring that they are numbers, it ensures that the parameter
++ * count is consistent with intended usage.
++ *
++ * Unlike the general-purpose tparm/tiparm, these internal calls are fairly
++ * well defined:
++ *
++ * expected == 0 - not applicable
++ * expected == 1 - set color, or vertical/horizontal addressing
++ * expected == 2 - cursor addressing
++ * expected == 4 - initialize color or color pair
++ * expected == 9 - set attributes
++ *
++ * Only for the last case (set attributes) should a parameter be optional.
++ * Also, a capability which calls for more parameters than expected should be
++ * ignored.
++ *
++ * Return a null if the parameter-checks fail. Otherwise, return a pointer to
++ * the formatted capability string.
+ */
+ NCURSES_EXPORT(char *)
+ _nc_tiparm(int expected, const char *string, ...)
+@@ -1189,22 +1265,36 @@ _nc_tiparm(int expected, const char *string, ...)
+ char *result = NULL;
+
+ _nc_tparm_err = 0;
++ T((T_CALLED("_nc_tiparm(%d, %s, ...)"), expected, _nc_visbuf(string)));
+ #ifdef TRACE
+ tps->tname = "_nc_tiparm";
+ #endif /* TRACE */
+
+- if (tparm_setup(cur_term, string, &myData) == OK
+- && myData.num_actual <= expected
+- && myData.tparm_type == 0) {
+- va_list ap;
++ if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) {
++ if (myData.num_actual == 0) {
++ T(("missing parameter%s, expected %s%d",
++ expected > 1 ? "s" : "",
++ expected == 9 ? "up to " : "",
++ expected));
++ } else if (myData.num_actual > expected) {
++ T(("too many parameters, have %d, expected %d",
++ myData.num_actual,
++ expected));
++ } else if (expected != 9 && myData.num_actual != expected) {
++ T(("expected %d parameters, have %d",
++ myData.num_actual,
++ expected));
++ } else {
++ va_list ap;
+
+- va_start(ap, string);
+- tparm_copy_valist(&myData, FALSE, ap);
+- va_end(ap);
++ va_start(ap, string);
++ tparm_copy_valist(&myData, FALSE, ap);
++ va_end(ap);
+
+- result = tparam_internal(tps, string, &myData);
++ result = tparam_internal(tps, string, &myData);
++ }
+ }
+- return result;
++ returnPtr(result);
+ }
+
+ /*
+diff --git a/ncurses/tinfo/read_entry.c b/ncurses/tinfo/read_entry.c
+index 2b1875ed..341337d2 100644
+--- a/ncurses/tinfo/read_entry.c
++++ b/ncurses/tinfo/read_entry.c
+@@ -323,6 +323,9 @@ _nc_read_termtype(TERMTYPE2 *ptr, char *buffer, int limit)
+ || bool_count < 0
+ || num_count < 0
+ || str_count < 0
++ || bool_count > BOOLCOUNT
++ || num_count > NUMCOUNT
++ || str_count > STRCOUNT
+ || str_size < 0) {
+ returnDB(TGETENT_NO);
+ }
+diff --git a/progs/tic.c b/progs/tic.c
+index 93a0b491..888927e2 100644
+--- a/progs/tic.c
++++ b/progs/tic.c
+@@ -2270,9 +2270,15 @@ check_1_infotocap(const char *name, NCURSES_CONST char *value, int count)
+
+ _nc_reset_tparm(NULL);
+ switch (actual) {
++ case Str:
++ result = TPARM_1(value, strings[1]);
++ break;
+ case Num_Str:
+ result = TPARM_2(value, numbers[1], strings[2]);
+ break;
++ case Str_Str:
++ result = TPARM_2(value, strings[1], strings[2]);
++ break;
+ case Num_Str_Str:
+ result = TPARM_3(value, numbers[1], strings[2], strings[3]);
+ break;
+diff --git a/progs/tparm_type.c b/progs/tparm_type.c
+index 3da4a077..644aa62a 100644
+--- a/progs/tparm_type.c
++++ b/progs/tparm_type.c
+@@ -47,6 +47,7 @@ tparm_type(const char *name)
+ {code, {longname} }, \
+ {code, {ti} }, \
+ {code, {tc} }
++#define XD(code, onlyname) TD(code, onlyname, onlyname, onlyname)
+ TParams result = Numbers;
+ /* *INDENT-OFF* */
+ static const struct {
+@@ -58,6 +59,10 @@ tparm_type(const char *name)
+ TD(Num_Str, "pkey_xmit", "pfx", "px"),
+ TD(Num_Str, "plab_norm", "pln", "pn"),
+ TD(Num_Str_Str, "pkey_plab", "pfxl", "xl"),
++#if NCURSES_XNAMES
++ XD(Str, "Cs"),
++ XD(Str_Str, "Ms"),
++#endif
+ };
+ /* *INDENT-ON* */
+
+@@ -80,12 +85,16 @@ guess_tparm_type(int nparam, char **p_is_s)
+ case 1:
+ if (!p_is_s[0])
+ result = Numbers;
++ if (p_is_s[0])
++ result = Str;
+ break;
+ case 2:
+ if (!p_is_s[0] && !p_is_s[1])
+ result = Numbers;
+ if (!p_is_s[0] && p_is_s[1])
+ result = Num_Str;
++ if (p_is_s[0] && p_is_s[1])
++ result = Str_Str;
+ break;
+ case 3:
+ if (!p_is_s[0] && !p_is_s[1] && !p_is_s[2])
+diff --git a/progs/tparm_type.h b/progs/tparm_type.h
+index 7c102a30..af5bcf0f 100644
+--- a/progs/tparm_type.h
++++ b/progs/tparm_type.h
+@@ -45,8 +45,10 @@
+ typedef enum {
+ Other = -1
+ ,Numbers = 0
++ ,Str
+ ,Num_Str
+ ,Num_Str_Str
++ ,Str_Str
+ } TParams;
+
+ extern TParams tparm_type(const char *name);
+diff --git a/progs/tput.c b/progs/tput.c
+index 4cd0c5ba..41508b72 100644
+--- a/progs/tput.c
++++ b/progs/tput.c
+@@ -1,5 +1,5 @@
+ /****************************************************************************
+- * Copyright 2018-2021,2022 Thomas E. Dickey *
++ * Copyright 2018-2022,2023 Thomas E. Dickey *
+ * Copyright 1998-2016,2017 Free Software Foundation, Inc. *
+ * *
+ * Permission is hereby granted, free of charge, to any person obtaining a *
+@@ -47,12 +47,15 @@
+ #include <transform.h>
+ #include <tty_settings.h>
+
+-MODULE_ID("$Id: tput.c,v 1.99 2022/02/26 23:19:31 tom Exp $")
++MODULE_ID("$Id: tput.c,v 1.102 2023/04/08 16:26:36 tom Exp $")
+
+ #define PUTS(s) fputs(s, stdout)
+
+ const char *_nc_progname = "tput";
+
++static bool opt_v = FALSE; /* quiet, do not show warnings */
++static bool opt_x = FALSE; /* clear scrollback if possible */
++
+ static bool is_init = FALSE;
+ static bool is_reset = FALSE;
+ static bool is_clear = FALSE;
+@@ -81,6 +84,7 @@ usage(const char *optstring)
+ KEEP(" -S << read commands from standard input")
+ KEEP(" -T TERM use this instead of $TERM")
+ KEEP(" -V print curses-version")
++ KEEP(" -v verbose, show warnings")
+ KEEP(" -x do not try to clear scrollback")
+ KEEP("")
+ KEEP("Commands:")
+@@ -148,7 +152,7 @@ exit_code(int token, int value)
+ * Returns nonzero on error.
+ */
+ static int
+-tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used)
++tput_cmd(int fd, TTY * settings, int argc, char **argv, int *used)
+ {
+ NCURSES_CONST char *name;
+ char *s;
+@@ -231,7 +235,9 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used)
+ } else if (VALID_STRING(s)) {
+ if (argc > 1) {
+ int k;
++ int narg;
+ int analyzed;
++ int provided;
+ int popcount;
+ long numbers[1 + NUM_PARM];
+ char *strings[1 + NUM_PARM];
+@@ -271,14 +277,45 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used)
+
+ popcount = 0;
+ _nc_reset_tparm(NULL);
++ /*
++ * Count the number of numeric parameters which are provided.
++ */
++ provided = 0;
++ for (narg = 1; narg < argc; ++narg) {
++ char *ending = NULL;
++ long check = strtol(argv[narg], &ending, 10);
++ if (check < 0 || ending == argv[narg] || *ending != '\0')
++ break;
++ provided = narg;
++ }
+ switch (paramType) {
++ case Str:
++ s = TPARM_1(s, strings[1]);
++ analyzed = 1;
++ if (provided == 0 && argc >= 1)
++ provided++;
++ break;
++ case Str_Str:
++ s = TPARM_2(s, strings[1], strings[2]);
++ analyzed = 2;
++ if (provided == 0 && argc >= 1)
++ provided++;
++ if (provided == 1 && argc >= 2)
++ provided++;
++ break;
+ case Num_Str:
+ s = TPARM_2(s, numbers[1], strings[2]);
+ analyzed = 2;
++ if (provided == 1 && argc >= 2)
++ provided++;
+ break;
+ case Num_Str_Str:
+ s = TPARM_3(s, numbers[1], strings[2], strings[3]);
+ analyzed = 3;
++ if (provided == 1 && argc >= 2)
++ provided++;
++ if (provided == 2 && argc >= 3)
++ provided++;
+ break;
+ case Numbers:
+ analyzed = _nc_tparm_analyze(NULL, s, p_is_s, &popcount);
+@@ -316,7 +353,13 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used)
+ if (analyzed < popcount) {
+ analyzed = popcount;
+ }
+- *used += analyzed;
++ if (opt_v && (analyzed != provided)) {
++ fprintf(stderr, "%s: %s parameters for \"%s\"\n",
++ _nc_progname,
++ (analyzed < provided ? "extra" : "missing"),
++ argv[0]);
++ }
++ *used += provided;
+ }
+
+ /* use putp() in order to perform padding */
+@@ -339,7 +382,6 @@ main(int argc, char **argv)
+ int used;
+ TTY old_settings;
+ TTY tty_settings;
+- bool opt_x = FALSE; /* clear scrollback if possible */
+ bool is_alias;
+ bool need_tty;
+
+@@ -348,7 +390,7 @@ main(int argc, char **argv)
+
+ term = getenv("TERM");
+
+- while ((c = getopt(argc, argv, is_alias ? "T:Vx" : "ST:Vx")) != -1) {
++ while ((c = getopt(argc, argv, is_alias ? "T:Vvx" : "ST:Vvx")) != -1) {
+ switch (c) {
+ case 'S':
+ cmdline = FALSE;
+@@ -361,6 +403,9 @@ main(int argc, char **argv)
+ case 'V':
+ puts(curses_version());
+ ExitProgram(EXIT_SUCCESS);
++ case 'v': /* verbose */
++ opt_v = TRUE;
++ break;
+ case 'x': /* do not try to clear scrollback */
+ opt_x = TRUE;
+ break;
+@@ -404,7 +449,7 @@ main(int argc, char **argv)
+ usage(NULL);
+ while (argc > 0) {
+ tty_settings = old_settings;
+- code = tput_cmd(fd, &tty_settings, opt_x, argc, argv, &used);
++ code = tput_cmd(fd, &tty_settings, argc, argv, &used);
+ if (code != 0)
+ break;
+ argc -= used;
+@@ -439,7 +484,7 @@ main(int argc, char **argv)
+ while (argnum > 0) {
+ int code;
+ tty_settings = old_settings;
+- code = tput_cmd(fd, &tty_settings, opt_x, argnum, argnow, &used);
++ code = tput_cmd(fd, &tty_settings, argnum, argnow, &used);
+ if (code != 0) {
+ if (result == 0)
+ result = ErrSystem(0); /* will return value >4 */
+--
+2.40.0
+
diff --git a/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch b/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch
new file mode 100644
index 0000000000..121db6bffe
--- /dev/null
+++ b/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch
@@ -0,0 +1,499 @@
+From 135d37072755704b8d018e5de74e62ff3f28c930 Mon Sep 17 00:00:00 2001
+From: Thomas E. Dickey <dickey@invisible-island.net>
+Date: Sun, 5 Nov 2023 05:54:54 +0530
+Subject: [PATCH] Updating reset code - ncurses 6.4 - patch 20231104
+
++ modify reset command to avoid altering clocal if the terminal uses a
+ modem (prompted by discussion with Werner Fink, Michal Suchanek,
+ OpenSUSE #1201384, Debian #60377).
++ build-fixes for --with-caps variations.
++ correct a couple of section-references in INSTALL.
+
+Signed-off-by: Thomas E. Dickey <dickey@invisible-island.net>
+
+Upstream-Status: Backport [https://ncurses.scripts.mit.edu/?p=ncurses.git;a=commitdiff;h=135d37072755704b8d018e5de74e62ff3f28c930]
+
+Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com>
+---
+ INSTALL | 8 +-
+ include/curses.events | 2 +-
+ ncurses/tinfo/lib_tparm.c | 2 +
+ progs/reset_cmd.c | 281 +++++++++++++++++++++-----------------
+ progs/tabs.c | 10 +-
+ progs/tic.c | 4 +
+ 6 files changed, 176 insertions(+), 131 deletions(-)
+
+diff --git a/INSTALL b/INSTALL
+index d9c1dd12..d0a39af0 100644
+--- a/INSTALL
++++ b/INSTALL
+@@ -47,7 +47,7 @@ If you are converting from BSD curses and do not have root access, be sure
+ to read the BSD CONVERSION NOTES section below.
+
+ If you are trying to build applications using gpm with ncurses,
+-read the USING NCURSES WITH GPM section below.
++read the USING GPM section below.
+
+ If you are cross-compiling, see the note below on BUILDING WITH A CROSS-COMPILER.
+
+@@ -79,7 +79,7 @@ INSTALLATION PROCEDURE:
+ The --prefix option to configure changes the root directory for installing
+ ncurses. The default is normally in subdirectories of /usr/local, except
+ for systems where ncurses is normally installed as a system library (see
+- "IF YOU ARE A SYSTEM INTEGRATOR"). Use --prefix=/usr to replace your
++ "FOR SYSTEM INTEGRATORS"). Use --prefix=/usr to replace your
+ default curses distribution.
+
+ The package gets installed beneath the --prefix directory as follows:
+@@ -176,7 +176,7 @@ INSTALLATION PROCEDURE:
+ You can make curses and terminfo fall back to an existing file of termcap
+ definitions by configuring with --enable-termcap. If you do this, the
+ library will search /etc/termcap before the terminfo database, and will
+- also interpret the contents of the TERM environment variable. See the
++ also interpret the contents of the $TERM environment variable. See the
+ section BSD CONVERSION NOTES below.
+
+ 3. Type `make'. Ignore any warnings, no error messages should be produced.
+@@ -1231,7 +1231,7 @@ CONFIGURE OPTIONS:
+ Specify a search-list of terminfo directories which will be compiled
+ into the ncurses library (default: DATADIR/terminfo)
+
+- This is a colon-separated list, like the TERMINFO_DIRS environment
++ This is a colon-separated list, like the $TERMINFO_DIRS environment
+ variable.
+
+ --with-termlib[=XXX]
+diff --git a/include/curses.events b/include/curses.events
+index 25a2583f..468bde18 100644
+--- a/include/curses.events
++++ b/include/curses.events
+@@ -50,6 +50,6 @@ typedef struct
+ extern NCURSES_EXPORT(int) wgetch_events (WINDOW *, _nc_eventlist *) GCC_DEPRECATED(experimental option); /* experimental */
+ extern NCURSES_EXPORT(int) wgetnstr_events (WINDOW *,char *,int,_nc_eventlist *) GCC_DEPRECATED(experimental option); /* experimental */
+
+-#define KEY_EVENT 0633 /* We were interrupted by an event */
++#define KEY_EVENT 0634 /* We were interrupted by an event */
+
+ #endif /* NCURSES_WGETCH_EVENTS */
+diff --git a/ncurses/tinfo/lib_tparm.c b/ncurses/tinfo/lib_tparm.c
+index a10a3877..cd972c0f 100644
+--- a/ncurses/tinfo/lib_tparm.c
++++ b/ncurses/tinfo/lib_tparm.c
+@@ -1113,8 +1113,10 @@ check_string_caps(TPARM_DATA *data, const char *string)
+ want_type = 2; /* function key #1, transmit string #2 */
+ else if (CHECK_CAP(plab_norm))
+ want_type = 2; /* label #1, show string #2 */
++#ifdef pkey_plab
+ else if (CHECK_CAP(pkey_plab))
+ want_type = 6; /* function key #1, type string #2, show string #3 */
++#endif
+ #if NCURSES_XNAMES
+ else {
+ char *check;
+diff --git a/progs/reset_cmd.c b/progs/reset_cmd.c
+index eff3af72..aec4b077 100644
+--- a/progs/reset_cmd.c
++++ b/progs/reset_cmd.c
+@@ -75,6 +75,9 @@ MODULE_ID("$Id: reset_cmd.c,v 1.28 2021/10/02 18:08:44 tom Exp $")
+ # endif
+ #endif
+
++#define set_flags(target, mask) target |= mask
++#define clear_flags(target, mask) target &= ~((unsigned)(mask))
++
+ static FILE *my_file;
+
+ static bool use_reset = FALSE; /* invoked as reset */
+@@ -188,6 +191,79 @@ out_char(int c)
+ #define reset_char(item, value) \
+ tty_settings->c_cc[item] = CHK(tty_settings->c_cc[item], value)
+
++/*
++ * Simplify ifdefs
++ */
++#ifndef BSDLY
++#define BSDLY 0
++#endif
++#ifndef CRDLY
++#define CRDLY 0
++#endif
++#ifndef ECHOCTL
++#define ECHOCTL 0
++#endif
++#ifndef ECHOKE
++#define ECHOKE 0
++#endif
++#ifndef ECHOPRT
++#define ECHOPRT 0
++#endif
++#ifndef FFDLY
++#define FFDLY 0
++#endif
++#ifndef IMAXBEL
++#define IMAXBEL 0
++#endif
++#ifndef IUCLC
++#define IUCLC 0
++#endif
++#ifndef IXANY
++#define IXANY 0
++#endif
++#ifndef NLDLY
++#define NLDLY 0
++#endif
++#ifndef OCRNL
++#define OCRNL 0
++#endif
++#ifndef OFDEL
++#define OFDEL 0
++#endif
++#ifndef OFILL
++#define OFILL 0
++#endif
++#ifndef OLCUC
++#define OLCUC 0
++#endif
++#ifndef ONLCR
++#define ONLCR 0
++#endif
++#ifndef ONLRET
++#define ONLRET 0
++#endif
++#ifndef ONOCR
++#define ONOCR 0
++#endif
++#ifndef OXTABS
++#define OXTABS 0
++#endif
++#ifndef TAB3
++#define TAB3 0
++#endif
++#ifndef TABDLY
++#define TABDLY 0
++#endif
++#ifndef TOSTOP
++#define TOSTOP 0
++#endif
++#ifndef VTDLY
++#define VTDLY 0
++#endif
++#ifndef XCASE
++#define XCASE 0
++#endif
++
+ /*
+ * Reset the terminal mode bits to a sensible state. Very useful after
+ * a child program dies in raw mode.
+@@ -195,6 +271,10 @@ out_char(int c)
+ void
+ reset_tty_settings(int fd, TTY * tty_settings, int noset)
+ {
++ unsigned mask;
++#ifdef TIOCMGET
++ int modem_bits;
++#endif
+ GET_TTY(fd, tty_settings);
+
+ #ifdef TERMIOS
+@@ -228,106 +308,65 @@ reset_tty_settings(int fd, TTY * tty_settings, int noset)
+ reset_char(VWERASE, CWERASE);
+ #endif
+
+- tty_settings->c_iflag &= ~((unsigned) (IGNBRK
+- | PARMRK
+- | INPCK
+- | ISTRIP
+- | INLCR
+- | IGNCR
+-#ifdef IUCLC
+- | IUCLC
+-#endif
+-#ifdef IXANY
+- | IXANY
+-#endif
+- | IXOFF));
+-
+- tty_settings->c_iflag |= (BRKINT
+- | IGNPAR
+- | ICRNL
+- | IXON
+-#ifdef IMAXBEL
+- | IMAXBEL
+-#endif
+- );
+-
+- tty_settings->c_oflag &= ~((unsigned) (0
+-#ifdef OLCUC
+- | OLCUC
+-#endif
+-#ifdef OCRNL
+- | OCRNL
+-#endif
+-#ifdef ONOCR
+- | ONOCR
+-#endif
+-#ifdef ONLRET
+- | ONLRET
+-#endif
+-#ifdef OFILL
+- | OFILL
+-#endif
+-#ifdef OFDEL
+- | OFDEL
+-#endif
+-#ifdef NLDLY
+- | NLDLY
+-#endif
+-#ifdef CRDLY
+- | CRDLY
+-#endif
+-#ifdef TABDLY
+- | TABDLY
+-#endif
+-#ifdef BSDLY
+- | BSDLY
+-#endif
+-#ifdef VTDLY
+- | VTDLY
+-#endif
+-#ifdef FFDLY
+- | FFDLY
+-#endif
+- ));
+-
+- tty_settings->c_oflag |= (OPOST
+-#ifdef ONLCR
+- | ONLCR
+-#endif
+- );
+-
+- tty_settings->c_cflag &= ~((unsigned) (CSIZE
+- | CSTOPB
+- | PARENB
+- | PARODD
+- | CLOCAL));
+- tty_settings->c_cflag |= (CS8 | CREAD);
+- tty_settings->c_lflag &= ~((unsigned) (ECHONL
+- | NOFLSH
+-#ifdef TOSTOP
+- | TOSTOP
+-#endif
+-#ifdef ECHOPTR
+- | ECHOPRT
+-#endif
+-#ifdef XCASE
+- | XCASE
+-#endif
+- ));
+-
+- tty_settings->c_lflag |= (ISIG
+- | ICANON
+- | ECHO
+- | ECHOE
+- | ECHOK
+-#ifdef ECHOCTL
+- | ECHOCTL
+-#endif
+-#ifdef ECHOKE
+- | ECHOKE
+-#endif
+- );
+-#endif
++ clear_flags(tty_settings->c_iflag, (IGNBRK
++ | PARMRK
++ | INPCK
++ | ISTRIP
++ | INLCR
++ | IGNCR
++ | IUCLC
++ | IXANY
++ | IXOFF));
++
++ set_flags(tty_settings->c_iflag, (BRKINT
++ | IGNPAR
++ | ICRNL
++ | IXON
++ | IMAXBEL));
++
++ clear_flags(tty_settings->c_oflag, (0
++ | OLCUC
++ | OCRNL
++ | ONOCR
++ | ONLRET
++ | OFILL
++ | OFDEL
++ | NLDLY
++ | CRDLY
++ | TABDLY
++ | BSDLY
++ | VTDLY
++ | FFDLY));
++
++ set_flags(tty_settings->c_oflag, (OPOST
++ | ONLCR));
++
++ mask = (CSIZE | CSTOPB | PARENB | PARODD);
++#ifdef TIOCMGET
++ /* leave clocal alone if this appears to use a modem */
++ if (ioctl(fd, TIOCMGET, &modem_bits) == -1)
++ mask |= CLOCAL;
++#else
++ /* cannot check - use the behavior from tset */
++ mask |= CLOCAL;
++#endif
++ clear_flags(tty_settings->c_cflag, mask);
++
++ set_flags(tty_settings->c_cflag, (CS8 | CREAD));
++ clear_flags(tty_settings->c_lflag, (ECHONL
++ | NOFLSH
++ | TOSTOP
++ | ECHOPRT
++ | XCASE));
++
++ set_flags(tty_settings->c_lflag, (ISIG
++ | ICANON
++ | ECHO
++ | ECHOE
++ | ECHOK
++ | ECHOCTL
++ | ECHOKE));
++#endif /* TERMIOS */
+
+ if (!noset) {
+ SET_TTY(fd, tty_settings);
+@@ -402,29 +441,23 @@ set_conversions(TTY * tty_settings)
+ #if defined(EXP_WIN32_DRIVER)
+ /* FIXME */
+ #else
+-#ifdef ONLCR
+- tty_settings->c_oflag |= ONLCR;
+-#endif
+- tty_settings->c_iflag |= ICRNL;
+- tty_settings->c_lflag |= ECHO;
+-#ifdef OXTABS
+- tty_settings->c_oflag |= OXTABS;
+-#endif /* OXTABS */
++ set_flags(tty_settings->c_oflag, ONLCR);
++ set_flags(tty_settings->c_iflag, ICRNL);
++ set_flags(tty_settings->c_lflag, ECHO);
++ set_flags(tty_settings->c_oflag, OXTABS);
+
+ /* test used to be tgetflag("NL") */
+ if (VALID_STRING(newline) && newline[0] == '\n' && !newline[1]) {
+ /* Newline, not linefeed. */
+-#ifdef ONLCR
+- tty_settings->c_oflag &= ~((unsigned) ONLCR);
+-#endif
+- tty_settings->c_iflag &= ~((unsigned) ICRNL);
++ clear_flags(tty_settings->c_oflag, ONLCR);
++ clear_flags(tty_settings->c_iflag, ICRNL);
+ }
+-#ifdef OXTABS
++#if OXTABS
+ /* test used to be tgetflag("pt") */
+ if (VALID_STRING(set_tab) && VALID_STRING(clear_all_tabs))
+- tty_settings->c_oflag &= ~OXTABS;
++ clear_flags(tty_settings->c_oflag, OXTABS);
+ #endif /* OXTABS */
+- tty_settings->c_lflag |= (ECHOE | ECHOK);
++ set_flags(tty_settings->c_lflag, (ECHOE | ECHOK));
+ #endif
+ }
+
+@@ -490,7 +523,7 @@ send_init_strings(int fd GCC_UNUSED, TTY * old_settings)
+ bool need_flush = FALSE;
+
+ (void) old_settings;
+-#ifdef TAB3
++#if TAB3
+ if (old_settings != 0 &&
+ old_settings->c_oflag & (TAB3 | ONLCR | OCRNL | ONLRET)) {
+ old_settings->c_oflag &= (TAB3 | ONLCR | OCRNL | ONLRET);
+@@ -512,22 +545,22 @@ send_init_strings(int fd GCC_UNUSED, TTY * old_settings)
+
+ if (VALID_STRING(clear_margins)) {
+ need_flush |= sent_string(clear_margins);
+- } else
++ }
+ #if defined(set_lr_margin)
+- if (VALID_STRING(set_lr_margin)) {
++ else if (VALID_STRING(set_lr_margin)) {
+ need_flush |= sent_string(TIPARM_2(set_lr_margin, 0, columns - 1));
+- } else
++ }
+ #endif
+ #if defined(set_left_margin_parm) && defined(set_right_margin_parm)
+- if (VALID_STRING(set_left_margin_parm)
+- && VALID_STRING(set_right_margin_parm)) {
++ else if (VALID_STRING(set_left_margin_parm)
++ && VALID_STRING(set_right_margin_parm)) {
+ need_flush |= sent_string(TIPARM_1(set_left_margin_parm, 0));
+ need_flush |= sent_string(TIPARM_1(set_right_margin_parm,
+ columns - 1));
+- } else
++ }
+ #endif
+- if (VALID_STRING(set_left_margin)
+- && VALID_STRING(set_right_margin)) {
++ else if (VALID_STRING(set_left_margin)
++ && VALID_STRING(set_right_margin)) {
+ need_flush |= to_left_margin();
+ need_flush |= sent_string(set_left_margin);
+ if (VALID_STRING(parm_right_cursor)) {
+diff --git a/progs/tabs.c b/progs/tabs.c
+index 7378d116..d904330b 100644
+--- a/progs/tabs.c
++++ b/progs/tabs.c
+@@ -370,7 +370,9 @@ do_set_margin(int margin, bool no_op)
+ }
+ tputs(set_left_margin, 1, putch);
+ }
+- } else if (VALID_STRING(set_left_margin_parm)) {
++ }
++#if defined(set_left_margin_parm) && defined(set_right_margin_parm)
++ else if (VALID_STRING(set_left_margin_parm)) {
+ result = TRUE;
+ if (!no_op) {
+ if (VALID_STRING(set_right_margin_parm)) {
+@@ -379,12 +381,16 @@ do_set_margin(int margin, bool no_op)
+ tputs(TIPARM_2(set_left_margin_parm, margin, max_cols), 1, putch);
+ }
+ }
+- } else if (VALID_STRING(set_lr_margin)) {
++ }
++#endif
++#if defined(set_lr_margin)
++ else if (VALID_STRING(set_lr_margin)) {
+ result = TRUE;
+ if (!no_op) {
+ tputs(TIPARM_2(set_lr_margin, margin, max_cols), 1, putch);
+ }
+ }
++#endif
+ return result;
+ }
+
+diff --git a/progs/tic.c b/progs/tic.c
+index 888927e2..78b568fa 100644
+--- a/progs/tic.c
++++ b/progs/tic.c
+@@ -3142,6 +3142,7 @@ guess_ANSI_VTxx(TERMTYPE2 *tp)
+ * In particular, any ECMA-48 terminal should support these, though the details
+ * for u9 are implementation dependent.
+ */
++#if defined(user6) && defined(user7) && defined(user8) && defined(user9)
+ static void
+ check_user_6789(TERMTYPE2 *tp)
+ {
+@@ -3177,6 +3178,9 @@ check_user_6789(TERMTYPE2 *tp)
+ break;
+ }
+ }
++#else
++#define check_user_6789(tp) /* nothing */
++#endif
+
+ /* other sanity-checks (things that we don't want in the normal
+ * logic that reads a terminfo entry)
+--
+2.40.0
diff --git a/meta/recipes-core/ncurses/files/exit_prototype.patch b/meta/recipes-core/ncurses/files/exit_prototype.patch
new file mode 100644
index 0000000000..fd961512e0
--- /dev/null
+++ b/meta/recipes-core/ncurses/files/exit_prototype.patch
@@ -0,0 +1,32 @@
+From 4a769a441d7e57a23017c3037cde3e53fb9f35fe Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 30 Aug 2022 15:58:32 -0700
+Subject: [PATCH] Add needed headers for including mbstate_t and exit()
+
+Upstream-Status: Inappropriate [Reconfigure will solve it]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+---
+ configure | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/configure b/configure
+index f377f551..163f8899 100755
+--- a/configure
++++ b/configure
+@@ -3423,6 +3423,7 @@ rm -f "conftest.$ac_objext" "conftest.$ac_ext"
+ cat >"conftest.$ac_ext" <<_ACEOF
+ #line 3424 "configure"
+ #include "confdefs.h"
++#include <stdlib.h>
+ $ac_declaration
+ int
+ main (void)
+@@ -13111,6 +13112,7 @@ cat >"conftest.$ac_ext" <<_ACEOF
+ #include <stdlib.h>
+ #include <stdarg.h>
+ #include <stdio.h>
++#include <wchar.h>
+ #ifdef HAVE_LIBUTF8_H
+ #include <libutf8.h>
+ #endif
diff --git a/meta/recipes-core/ncurses/ncurses.inc b/meta/recipes-core/ncurses/ncurses.inc
index 7a7c7dd227..761b6a3d31 100644
--- a/meta/recipes-core/ncurses/ncurses.inc
+++ b/meta/recipes-core/ncurses/ncurses.inc
@@ -2,7 +2,7 @@ SUMMARY = "The New Curses library"
DESCRIPTION = "SVr4 and XSI-Curses compatible curses library and terminfo tools including tic, infocmp, captoinfo. Supports color, multiple highlights, forms-drawing characters, and automatic recognition of keypad and function-key sequences. Extensions include resizable windows and mouse support on both xterm and Linux console using the gpm library."
HOMEPAGE = "http://www.gnu.org/software/ncurses/ncurses.html"
LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=9529289636145d1bf093c96af067695a;endline=27"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c5a4600fdef86384c41ca33ecc70a4b8;endline=27"
SECTION = "libs"
DEPENDS = "ncurses-native"
DEPENDS:class-native = ""
@@ -13,7 +13,7 @@ BINCONFIG = "${bindir}/ncurses5-config ${bindir}/ncursesw5-config \
inherit autotools binconfig-disabled multilib_header pkgconfig
# Upstream has useful patches at times at ftp://invisible-island.net/ncurses/
-SRC_URI = "git://salsa.debian.org/debian/ncurses.git;protocol=https;branch=master"
+SRC_URI = "git://github.com/mirror/ncurses.git;protocol=https;branch=master"
EXTRA_AUTORECONF = "-I m4"
@@ -27,10 +27,8 @@ EXTRASITECONFIG = "CFLAGS='${CFLAGS} -I${SYSROOT_DESTDIR}${includedir}'"
# TODO: remove this variable when widec is supported in every setup?
ENABLE_WIDEC ?= "true"
-# _GNU_SOURCE is required for widec stuff and is detected automatically
-# for target objects. But it must be set manually for native and sdk
-# builds.
-BUILD_CPPFLAGS += "-D_GNU_SOURCE"
+# _GNU_SOURCE is required for widec stuff and is not detected automatically
+CPPFLAGS += "-D_GNU_SOURCE"
# natives don't generally look in base_libdir
base_libdir:class-native = "${libdir}"
diff --git a/meta/recipes-core/ncurses/ncurses_6.3.bb b/meta/recipes-core/ncurses/ncurses_6.3.bb
deleted file mode 100644
index f0256dad22..0000000000
--- a/meta/recipes-core/ncurses/ncurses_6.3.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-require ncurses.inc
-
-SRC_URI += "file://0001-tic-hang.patch \
- file://0002-configure-reproducible.patch \
- file://0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch \
- "
-# commit id corresponds to the revision in package version
-SRCREV = "51d0fd9cc3edb975f04224f29f777f8f448e8ced"
-S = "${WORKDIR}/git"
-EXTRA_OECONF += "--with-abi-version=5"
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)$"
-
-# This is needed when using patchlevel versions like 6.1+20181013
-#CVE_VERSION = "${@d.getVar("PV").split('+')[0]}.${@d.getVar("PV").split('+')[1]}"
diff --git a/meta/recipes-core/ncurses/ncurses_6.4.bb b/meta/recipes-core/ncurses/ncurses_6.4.bb
new file mode 100644
index 0000000000..2c621525f9
--- /dev/null
+++ b/meta/recipes-core/ncurses/ncurses_6.4.bb
@@ -0,0 +1,17 @@
+require ncurses.inc
+
+SRC_URI += "file://0001-tic-hang.patch \
+ file://0002-configure-reproducible.patch \
+ file://0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch \
+ file://exit_prototype.patch \
+ file://0001-Fix-CVE-2023-29491.patch \
+ file://0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch \
+ "
+# commit id corresponds to the revision in package version
+SRCREV = "79b9071f2be20a24c7be031655a5638f6032f29f"
+S = "${WORKDIR}/git"
+EXTRA_OECONF += "--with-abi-version=5"
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)$"
+
+# This is needed when using patchlevel versions like 6.1+20181013
+#CVE_VERSION = "${@d.getVar("PV").split('+')[0]}.${@d.getVar("PV").split('+')[1]}"
diff --git a/meta/recipes-core/netbase/netbase_6.3.bb b/meta/recipes-core/netbase/netbase_6.3.bb
deleted file mode 100644
index bec6498cdd..0000000000
--- a/meta/recipes-core/netbase/netbase_6.3.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Basic TCP/IP networking support"
-DESCRIPTION = "This package provides the necessary infrastructure for basic TCP/IP based networking"
-HOMEPAGE = "http://packages.debian.org/netbase"
-SECTION = "base"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://debian/copyright;md5=3dd6192d306f582dee7687da3d8748ab"
-PE = "1"
-
-SRC_URI = "${DEBIAN_MIRROR}/main/n/${BPN}/${BPN}_${PV}.tar.xz"
-
-inherit allarch
-
-SRC_URI[sha256sum] = "7c42a6a1cafa0c64103c71cab6431fc8613179b2449a1a00e55e3584e860d81c"
-
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/n/netbase/"
-
-do_install () {
- install -d ${D}${sysconfdir}
- install -m 0644 ${S}/etc/rpc ${D}${sysconfdir}/rpc
- install -m 0644 ${S}/etc/protocols ${D}${sysconfdir}/protocols
- install -m 0644 ${S}/etc/services ${D}${sysconfdir}/services
- install -m 0644 ${S}/etc/ethertypes ${D}${sysconfdir}/ethertypes
-}
diff --git a/meta/recipes-core/netbase/netbase_6.4.bb b/meta/recipes-core/netbase/netbase_6.4.bb
new file mode 100644
index 0000000000..5a448a4b91
--- /dev/null
+++ b/meta/recipes-core/netbase/netbase_6.4.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Basic TCP/IP networking support"
+DESCRIPTION = "This package provides the necessary infrastructure for basic TCP/IP based networking"
+HOMEPAGE = "http://packages.debian.org/netbase"
+SECTION = "base"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://debian/copyright;md5=78dd2c7c6f487348e4a0092c17a19d42"
+PE = "1"
+
+SRC_URI = "${DEBIAN_MIRROR}/main/n/${BPN}/${BPN}_${PV}.tar.xz"
+
+inherit allarch
+
+SRC_URI[sha256sum] = "fa6621826ff1150e581bd90bc3c8a4ecafe5df90404f207db6dcdf2c75f26ad7"
+
+UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/n/netbase/"
+
+do_install () {
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${S}/etc/rpc ${D}${sysconfdir}/rpc
+ install -m 0644 ${S}/etc/protocols ${D}${sysconfdir}/protocols
+ install -m 0644 ${S}/etc/services ${D}${sysconfdir}/services
+ install -m 0644 ${S}/etc/ethertypes ${D}${sysconfdir}/ethertypes
+}
+
+S = "${WORKDIR}/netbase"
diff --git a/meta/recipes-core/newlib/libgloss/fix-rs6000-crt0.patch b/meta/recipes-core/newlib/libgloss/fix-rs6000-crt0.patch
index c220fb8437..8c29fea8cf 100644
--- a/meta/recipes-core/newlib/libgloss/fix-rs6000-crt0.patch
+++ b/meta/recipes-core/newlib/libgloss/fix-rs6000-crt0.patch
@@ -5,35 +5,41 @@ name for crt0 in specific, so performing all of them results in an error during
do_install, we simply modify the name of the objects so the installation can proceed
and leave it to the user to select which object files to use.
-Signed-off-by: Alejandro Enedino Hernandez Samaniego <alejandr@xilinx.com>
-Index: newlib-3.0.0/libgloss/rs6000/Makefile.in
+01/2023: Rebased for libgloss 4.3.0
+
+Signed-off-by: Alejandro Enedino Hernandez Samaniego <alejandro@enedino.org>
+
+
+Index: git/libgloss/rs6000/Makefile.in
===================================================================
---- newlib-3.0.0.orig/libgloss/rs6000/Makefile.in
-+++ newlib-3.0.0/libgloss/rs6000/Makefile.in
-@@ -358,7 +358,7 @@ install-sim:
- set -e; for x in ${SIM_CRT0} ${SIM_BSP} ${SIM_SCRIPTS}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+--- git.orig/libgloss/rs6000/Makefile.in
++++ git/libgloss/rs6000/Makefile.in
+@@ -362,7 +362,7 @@ install-sim:
install-mvme:
+ ${mkinstalldirs} ${DESTDIR}${tooldir}/lib${MULTISUBDIR}
- set -e; for x in ${MVME_CRT0} ${MVME_BSP} ${MVME_SCRIPTS}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+ set -e; for x in ${MVME_CRT0} ${MVME_BSP} ${MVME_SCRIPTS}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/mvme-$$x; done
install-solaris:
- set -e; for x in ${SOLARIS_CRT0} ${SOLARIS_BSP} ${SOLARIS_SCRIPTS}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
-@@ -367,15 +367,15 @@ install-linux:
- set -e; for x in ${LINUX_CRT0} ${LINUX_BSP} ${LINUX_SCRIPTS}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+ ${mkinstalldirs} ${DESTDIR}${tooldir}/lib${MULTISUBDIR}
+@@ -374,17 +374,17 @@ install-linux:
install-yellowknife:
+ ${mkinstalldirs} ${DESTDIR}${tooldir}/lib${MULTISUBDIR}
- set -e; for x in ${YELLOWKNIFE_CRT0} ${YELLOWKNIFE_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+ set -e; for x in ${YELLOWKNIFE_CRT0} ${YELLOWKNIFE_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/yellowknife-$$x; done
set -e; for x in ${YELLOWKNIFE_SCRIPTS}; do ${INSTALL_DATA} $(srcdir)/$$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
install-ads:
+ ${mkinstalldirs} ${DESTDIR}${tooldir}/lib${MULTISUBDIR}
- set -e; for x in ${ADS_CRT0} ${ADS_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+ set -e; for x in ${ADS_CRT0} ${ADS_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/ads-$$x; done
set -e; for x in ${ADS_SCRIPTS}; do ${INSTALL_DATA} $(srcdir)/$$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
install-mbx:
+ ${mkinstalldirs} ${DESTDIR}${tooldir}/lib${MULTISUBDIR}
- set -e; for x in ${MBX_CRT0} ${MBX_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
+ set -e; for x in ${MBX_CRT0} ${MBX_BSP}; do ${INSTALL_DATA} $$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/mbx-$$x; done
set -e; for x in ${MBX_SCRIPTS} ${MBX_SPECS}; do ${INSTALL_DATA} $(srcdir)/$$x $(DESTDIR)${tooldir}/lib${MULTISUBDIR}/$$x; done
diff --git a/meta/recipes-core/newlib/libgloss/fix_makefile_include_arm_h.patch b/meta/recipes-core/newlib/libgloss/fix_makefile_include_arm_h.patch
deleted file mode 100644
index 7645be7314..0000000000
--- a/meta/recipes-core/newlib/libgloss/fix_makefile_include_arm_h.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-Upstream-Status: Inappropriate [OE-Specific]
-
-When trying to build libgloss for an arm target, the build system
-complains about missing some include files:
-
-| fatal error: acle-compiat.h: No such file or directory
-| #include "acle-compat.h"
-| ^~~~~~~~~~~~~~~
-| compilation terminated.
-
-These include files come from the newlib source, but since we
-are building libgloss separately from newlib, libgloss is unaware
-of where they are, this patch fixes the INCLUDES so the build system
-can find such files.
-
-Signed-off-by: Alejandro Enedino Hernandez Samaniego <alejandr@xilinx.com>
-
-Index: newlib-3.0.0/libgloss/config/default.mh
-===================================================================
---- newlib-3.0.0.orig/libgloss/config/default.mh
-+++ newlib-3.0.0/libgloss/config/default.mh
-@@ -1,7 +1,7 @@
- NEWLIB_CFLAGS = `if [ -d ${objroot}/newlib ]; then echo -I${objroot}/newlib/targ-include -I${srcroot}/newlib/libc/include; fi`
- NEWLIB_LDFLAGS = `if [ -d ${objroot}/newlib ]; then echo -B${objroot}/newlib/ -L${objroot}/newlib/; fi`
-
--INCLUDES = -I. -I$(srcdir)/..
-+INCLUDES = -I. -I$(srcdir)/.. -I$(srcdir)/../newlib/libc/machine/arm
- # Note that when building the library, ${MULTILIB} is not the way multilib
- # options are passed; they're passed in $(CFLAGS).
- CFLAGS_FOR_TARGET = -O2 -g ${MULTILIB} ${INCLUDES} ${NEWLIB_CFLAGS}
diff --git a/meta/recipes-core/newlib/libgloss/libgloss-build-without-nostdinc.patch b/meta/recipes-core/newlib/libgloss/libgloss-build-without-nostdinc.patch
new file mode 100644
index 0000000000..2b66155eea
--- /dev/null
+++ b/meta/recipes-core/newlib/libgloss/libgloss-build-without-nostdinc.patch
@@ -0,0 +1,30 @@
+Upstream-Status: Inappropriate [OE-Specific]
+
+Since commit e0c0ad82 libgloss started passing the -nostdinc to CC.
+
+They dont want to build against C library headers that are already in
+the system to avoid pollution, however, we purposely build libgloss
+against the newly built newlib C library, thats why we keep newlib
+and libgloss in separate recipes and create a dependency between them.
+
+This causes an issue where bitbake stops finding newlib headers while
+libgloss is being built.
+
+Do not pass -nostdinc to CC to maintain current behavior of
+TCLIBC=newlib
+
+
+Signed-off-by: Alejandro Enedino Hernandez Samaniego <alejandro@enedino.org>
+Index: newlib-3.0.0/libgloss/configure
+===================================================================
+--- newlib-3.0.0.orig/libgloss/configure
++++ newlib-3.0.0/libgloss/configure
+@@ -5106,7 +5106,7 @@ fi
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $libc_cv_compiler_isystem" >&5
+ $as_echo "$libc_cv_compiler_isystem" >&6; }
+-CC="$CC -nostdinc $libc_cv_compiler_isystem"
++CC="$CC $libc_cv_compiler_isystem"
+
+ host_makefile_frag_path=$host_makefile_frag
+
diff --git a/meta/recipes-core/newlib/libgloss_4.2.0.bb b/meta/recipes-core/newlib/libgloss_4.2.0.bb
deleted file mode 100644
index 8c8ee91395..0000000000
--- a/meta/recipes-core/newlib/libgloss_4.2.0.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-require newlib.inc
-
-DEPENDS += "newlib"
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/libgloss:"
-
-SRC_URI:append:powerpc = " file://fix-rs6000-crt0.patch"
-SRC_URI:append:arm = " file://fix_makefile_include_arm_h.patch"
-
-do_configure() {
- ${S}/libgloss/configure ${EXTRA_OECONF}
-}
-
-do_install:prepend() {
- # install doesn't create this itself, avoid install error
- install -d ${D}${prefix}/${TARGET_SYS}/lib
-}
-
-do_install:append() {
- # Move libs to default directories so they can be picked up later
- install -d ${D}${libdir}
- mv -v ${D}${prefix}/${TARGET_SYS}/lib/* ${D}${libdir}
-
- # Remove original directory
- rmdir -p --ignore-fail-on-non-empty ${D}${prefix}/${TARGET_SYS}/lib
-}
-
-# Split packages correctly
-FILES:${PN} += "${libdir}/*.ld ${libdir}/*.specs"
-FILES:${PN}-dev += "${libdir}/cpu-init/*"
-
-INHIBIT_PACKAGE_STRIP = "1"
-INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
diff --git a/meta/recipes-core/newlib/libgloss_git.bb b/meta/recipes-core/newlib/libgloss_git.bb
new file mode 100644
index 0000000000..7e34e33c7a
--- /dev/null
+++ b/meta/recipes-core/newlib/libgloss_git.bb
@@ -0,0 +1,36 @@
+require newlib.inc
+
+DEPENDS += "newlib"
+
+FILESEXTRAPATHS:prepend := "${THISDIR}/libgloss:"
+
+SRC_URI:append = " file://libgloss-build-without-nostdinc.patch"
+SRC_URI:append:powerpc = " file://fix-rs6000-crt0.patch"
+SRC_URI:append:powerpc = " file://fix-rs6000-cflags.patch"
+
+do_configure() {
+ ${S}/libgloss/configure ${EXTRA_OECONF}
+}
+
+do_install:prepend() {
+ # install doesn't create this itself, avoid install error
+ install -d ${D}${prefix}/${TARGET_SYS}/lib
+}
+
+do_install:append() {
+ # Move libs to default directories so they can be picked up later
+ install -d ${D}${libdir}
+ mv -v ${D}${prefix}/${TARGET_SYS}/lib/* ${D}${libdir}
+
+ # Remove original directory
+ rmdir -p --ignore-fail-on-non-empty ${D}${prefix}/${TARGET_SYS}/lib
+}
+
+# Split packages correctly
+FILES:${PN} += "${libdir}/*.ld ${libdir}/*.specs"
+FILES:${PN}-dev += "${libdir}/cpu-init/*"
+# RiscV installation moved the syscall header to this location
+FILES:${PN}-dev += "${prefix}/${TARGET_SYS}/include/machine/*.h"
+
+INHIBIT_PACKAGE_STRIP = "1"
+INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
diff --git a/meta/recipes-core/newlib/newlib.inc b/meta/recipes-core/newlib/newlib.inc
index 27b52a43af..6113f5e831 100644
--- a/meta/recipes-core/newlib/newlib.inc
+++ b/meta/recipes-core/newlib/newlib.inc
@@ -3,31 +3,26 @@ HOMEPAGE = "https://sourceware.org/newlib/"
DESCRIPTION = "C library intended for use on embedded systems. It is a conglomeration of several library parts, all under free software licenses that make them easily usable on embedded products."
SECTION = "libs"
-LICENSE = "GPL-2.0-only & LGPL-3.0-only & GPL-3.0-only & LGPL-2.0-only & BSD-2-Clause & BSD-3-Clause & TCL"
+LICENSE = "GPL-2.0-only & LGPL-3.0-only & GPL-3.0-only & LGPL-2.0-only & BSD-2-Clause & BSD-3-Clause & TCL & Apache-2.0-with-LLVM-exception"
LIC_FILES_CHKSUM = " \
file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \
file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \
file://COPYING.LIBGLOSS;md5=c0469b6ebb847a75781066be515f032d \
file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
- file://COPYING.NEWLIB;md5=8bb75595dbcc7d45e5c0d116d7bdf6ce \
+ file://COPYING.NEWLIB;md5=4f1a15846ffee91e352418563e1bce27 \
file://newlib/libc/posix/COPYRIGHT;md5=103468ff1982be840fdf4ee9f8b51bbf \
- file://newlib/libc/sys/linux/linuxthreads/LICENSE;md5=73640207fbc79b198c7ffd4ad4d97aa0 \
"
-# Newlib maintainers sometimes publish the source
-# from a specific snapshot, after an upgrade we can
-# delete the following line and keep the empty default
-NEWLIB_SNAPSHOT = ".20211231"
-NEWLIB_SNAPSHOT ?= ""
-SRC_URI = "https://sourceware.org/pub/newlib/newlib-${PV}${NEWLIB_SNAPSHOT}.tar.gz"
-SRC_URI[sha256sum] = "c3a0e8b63bc3bef1aeee4ca3906b53b3b86c8d139867607369cb2915ffc54435"
-
+BASEVER = "4.4.0"
+PV = "${BASEVER}+git"
+SRC_URI = "git://sourceware.org/git/newlib-cygwin.git;protocol=https;branch=main"
+SRCREV="ad11e2587f83d61357a32c61c36d72ea4f39315e"
INHIBIT_DEFAULT_DEPS = "1"
DEPENDS = "virtual/${TARGET_PREFIX}gcc"
-S = "${WORKDIR}/newlib-${PV}${NEWLIB_SNAPSHOT}"
+S = "${WORKDIR}/git"
B = "${WORKDIR}/build"
## disable stdlib
@@ -52,6 +47,12 @@ EXTRA_OECONF = " \
do_configure[cleandirs] = "${B}"
+# We need a dummy limits.h to pass preprocessor checks
+do_configure:append(){
+ install -d ${STAGING_INCDIR}
+ touch ${STAGING_INCDIR}/limits.h
+}
+
do_install() {
oe_runmake install DESTDIR='${D}'
}
diff --git a/meta/recipes-core/newlib/newlib_4.2.0.bb b/meta/recipes-core/newlib/newlib_4.2.0.bb
deleted file mode 100644
index 0542c596ba..0000000000
--- a/meta/recipes-core/newlib/newlib_4.2.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require newlib.inc
-
-PROVIDES += "virtual/libc virtual/libiconv virtual/libintl"
-
-do_configure() {
- export CC_FOR_TARGET="${CC}"
- ${S}/configure ${EXTRA_OECONF}
-}
-
-do_install:append() {
- # Move include files and libs to default directories so they can be picked up later
- mv -v ${D}${prefix}/${TARGET_SYS}/lib ${D}${libdir}
- mv -v ${D}${prefix}/${TARGET_SYS}/include ${D}${includedir}
-
- # Remove original directory
- rmdir ${D}${prefix}/${TARGET_SYS}
-}
-
-# No rpm package is actually created but -dev depends on it, avoid dnf error
-RDEPENDS:${PN}-dev:libc-newlib = ""
diff --git a/meta/recipes-core/newlib/newlib_git.bb b/meta/recipes-core/newlib/newlib_git.bb
new file mode 100644
index 0000000000..fb922d65d1
--- /dev/null
+++ b/meta/recipes-core/newlib/newlib_git.bb
@@ -0,0 +1,20 @@
+require newlib.inc
+
+PROVIDES += "virtual/libc virtual/libiconv virtual/libintl"
+
+do_configure() {
+ export CC_FOR_TARGET="${CC}"
+ ${S}/configure ${EXTRA_OECONF}
+}
+
+do_install:append() {
+ # Move include files and libs to default directories so they can be picked up later
+ mv -v ${D}${prefix}/${TARGET_SYS}/lib ${D}${libdir}
+ mv -v ${D}${prefix}/${TARGET_SYS}/include ${D}${includedir}
+
+ # Remove original directory
+ rmdir ${D}${prefix}/${TARGET_SYS}
+}
+
+# No rpm package is actually created but -dev depends on it, avoid dnf error
+DEV_PKG_DEPENDENCY:libc-newlib = ""
diff --git a/meta/recipes-core/os-release/os-release.bb b/meta/recipes-core/os-release/os-release.bb
index caa2ed25aa..8906906bc3 100644
--- a/meta/recipes-core/os-release/os-release.bb
+++ b/meta/recipes-core/os-release/os-release.bb
@@ -2,6 +2,7 @@ inherit allarch
SUMMARY = "Operating system identification"
DESCRIPTION = "The /usr/lib/os-release file contains operating system identification data."
+HOMEPAGE = "https://www.freedesktop.org/software/systemd/man/os-release.html"
LICENSE = "MIT"
INHIBIT_DEFAULT_DEPS = "1"
@@ -10,10 +11,12 @@ do_unpack[noexec] = "1"
do_patch[noexec] = "1"
do_configure[noexec] = "1"
+# See: https://www.freedesktop.org/software/systemd/man/os-release.html
# Other valid fields: BUILD_ID ID_LIKE ANSI_COLOR CPE_NAME
# HOME_URL SUPPORT_URL BUG_REPORT_URL
OS_RELEASE_FIELDS = "\
- ID ID_LIKE NAME VERSION VERSION_ID PRETTY_NAME DISTRO_CODENAME \
+ ID ID_LIKE NAME VERSION VERSION_ID VERSION_CODENAME PRETTY_NAME \
+ CPE_NAME \
"
OS_RELEASE_UNQUOTED_FIELDS = "ID VERSION_ID VARIANT_ID"
@@ -21,7 +24,22 @@ ID = "${DISTRO}"
NAME = "${DISTRO_NAME}"
VERSION = "${DISTRO_VERSION}${@' (%s)' % DISTRO_CODENAME if 'DISTRO_CODENAME' in d else ''}"
VERSION_ID = "${DISTRO_VERSION}"
+VERSION_CODENAME = "${DISTRO_CODENAME}"
PRETTY_NAME = "${DISTRO_NAME} ${VERSION}"
+
+# The vendor field is hardcoded to "openembedded" deliberately. We'd
+# advise developers leave it as this value to clearly identify the
+# underlying build environment from which the OS was constructed. We
+# understand people will want to identify themselves as the people who
+# built the image, we'd suggest using the DISTRO element to do this, so
+# that is customisable.
+# This end result combines to mean systems can be traced back to both who
+# built them and which system was used, which is ultimately the goal of
+# the CPE.
+
+CPE_DISTRO ??= "${DISTRO}"
+CPE_NAME="cpe:/o:openembedded:${CPE_DISTRO}:${VERSION_ID}"
+
BUILD_ID ?= "${DATETIME}"
BUILD_ID[vardepsexclude] = "DATETIME"
diff --git a/meta/recipes-core/ovmf/ovmf-shell-image.bb b/meta/recipes-core/ovmf/ovmf-shell-image.bb
index 50c4517da3..4d7958eb5f 100644
--- a/meta/recipes-core/ovmf/ovmf-shell-image.bb
+++ b/meta/recipes-core/ovmf/ovmf-shell-image.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "boot image with UEFI shell and tools"
+SUMMARY = "boot image with UEFI shell and tools"
COMPATIBLE_HOST:class-target='(i.86|x86_64).*'
# For this image recipe, only the wic format with a
diff --git a/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning-1.patch b/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning-1.patch
deleted file mode 100644
index 23366b4d07..0000000000
--- a/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning-1.patch
+++ /dev/null
@@ -1,51 +0,0 @@
-From b0a0b7b605aeb8106e7b50d1efeb746f401498cc Mon Sep 17 00:00:00 2001
-From: Gerd Hoffmann <kraxel@redhat.com>
-Date: Thu, 24 Mar 2022 20:04:35 +0800
-Subject: [PATCH] BaseTools: fix gcc12 warning
-
-Sdk/C/LzmaEnc.c: In function ?LzmaEnc_CodeOneMemBlock?:
-Sdk/C/LzmaEnc.c:2828:19: error: storing the address of local variable ?outStream? in ?*p.rc.outStream? [-Werror=dangling-pointer=]
- 2828 | p->rc.outStream = &outStream.vt;
- | ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~
-Sdk/C/LzmaEnc.c:2811:28: note: ?outStream? declared here
- 2811 | CLzmaEnc_SeqOutStreamBuf outStream;
- | ^~~~~~~~~
-Sdk/C/LzmaEnc.c:2811:28: note: ?pp? declared here
-Sdk/C/LzmaEnc.c:2828:19: error: storing the address of local variable ?outStream? in ?*(CLzmaEnc *)pp.rc.outStream? [-Werror=dangling-pointer=]
- 2828 | p->rc.outStream = &outStream.vt;
- | ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~
-Sdk/C/LzmaEnc.c:2811:28: note: ?outStream? declared here
- 2811 | CLzmaEnc_SeqOutStreamBuf outStream;
- | ^~~~~~~~~
-Sdk/C/LzmaEnc.c:2811:28: note: ?pp? declared here
-cc1: all warnings being treated as errors
-
-Upstream-Status: Backport [https://github.com/tianocore/edk2/commit/85021f8cf22d1bd4114803c6c610dea5ef0059f1]
-Signed-off-by: Gerd Hoffmann <kraxel@redhat.com>
-Reviewed-by: Bob Feng <bob.c.feng@intel.com>
----
- BaseTools/Source/C/LzmaCompress/Sdk/C/LzmaEnc.c | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/BaseTools/Source/C/LzmaCompress/Sdk/C/LzmaEnc.c b/BaseTools/Source/C/LzmaCompress/Sdk/C/LzmaEnc.c
-index 4e9b499f8d..4b9f5fa692 100644
---- a/BaseTools/Source/C/LzmaCompress/Sdk/C/LzmaEnc.c
-+++ b/BaseTools/Source/C/LzmaCompress/Sdk/C/LzmaEnc.c
-@@ -2825,12 +2825,13 @@ SRes LzmaEnc_CodeOneMemBlock(CLzmaEncHandle pp, BoolInt reInit,
-
- nowPos64 = p->nowPos64;
- RangeEnc_Init(&p->rc);
-- p->rc.outStream = &outStream.vt;
-
- if (desiredPackSize == 0)
- return SZ_ERROR_OUTPUT_EOF;
-
-+ p->rc.outStream = &outStream.vt;
- res = LzmaEnc_CodeOneBlock(p, desiredPackSize, *unpackSize);
-+ p->rc.outStream = NULL;
-
- *unpackSize = (UInt32)(p->nowPos64 - nowPos64);
- *destLen -= outStream.rem;
---
-2.36.1
-
diff --git a/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning.patch b/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning.patch
deleted file mode 100644
index 91c01647db..0000000000
--- a/meta/recipes-core/ovmf/ovmf/0001-BaseTools-fix-gcc12-warning.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 2435c17e56652479315853cec5b91fb0ea0911a3 Mon Sep 17 00:00:00 2001
-From: Gerd Hoffmann <kraxel@redhat.com>
-Date: Thu, 24 Mar 2022 20:04:34 +0800
-Subject: [PATCH] BaseTools: fix gcc12 warning
-
-GenFfs.c:545:5: error: pointer ?InFileHandle? used after ?fclose? [-Werror=use-after-free]
- 545 | Error(NULL, 0, 4001, "Resource", "memory cannot be allocated of %s", InFileHandle);
- | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-GenFfs.c:544:5: note: call to ?fclose? here
- 544 | fclose (InFileHandle);
- | ^~~~~~~~~~~~~~~~~~~~~
-
-Upstream-Status: Backport [https://github.com/tianocore/edk2/commit/7b005f344e533cd913c3ca05b266f9872df886d1]
-Signed-off-by: Gerd Hoffmann <kraxel@redhat.com>
-Reviewed-by: Bob Feng <bob.c.feng@intel.com>
----
- BaseTools/Source/C/GenFfs/GenFfs.c | 2 +-
- BaseTools/Source/C/GenSec/GenSec.c | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/BaseTools/Source/C/GenFfs/GenFfs.c b/BaseTools/Source/C/GenFfs/GenFfs.c
-index 949025c333..d78d62ab36 100644
---- a/BaseTools/Source/C/GenFfs/GenFfs.c
-+++ b/BaseTools/Source/C/GenFfs/GenFfs.c
-@@ -542,7 +542,7 @@ GetAlignmentFromFile(char *InFile, UINT32 *Alignment)
- PeFileBuffer = (UINT8 *) malloc (PeFileSize);
- if (PeFileBuffer == NULL) {
- fclose (InFileHandle);
-- Error(NULL, 0, 4001, "Resource", "memory cannot be allocated of %s", InFileHandle);
-+ Error(NULL, 0, 4001, "Resource", "memory cannot be allocated for %s", InFile);
- return EFI_OUT_OF_RESOURCES;
- }
- fread (PeFileBuffer, sizeof (UINT8), PeFileSize, InFileHandle);
-diff --git a/BaseTools/Source/C/GenSec/GenSec.c b/BaseTools/Source/C/GenSec/GenSec.c
-index d54a4f9e0a..b1d05367ec 100644
---- a/BaseTools/Source/C/GenSec/GenSec.c
-+++ b/BaseTools/Source/C/GenSec/GenSec.c
-@@ -1062,7 +1062,7 @@ GetAlignmentFromFile(char *InFile, UINT32 *Alignment)
- PeFileBuffer = (UINT8 *) malloc (PeFileSize);
- if (PeFileBuffer == NULL) {
- fclose (InFileHandle);
-- Error(NULL, 0, 4001, "Resource", "memory cannot be allocated of %s", InFileHandle);
-+ Error(NULL, 0, 4001, "Resource", "memory cannot be allocated for %s", InFile);
- return EFI_OUT_OF_RESOURCES;
- }
- fread (PeFileBuffer, sizeof (UINT8), PeFileSize, InFileHandle);
---
-2.36.1
-
diff --git a/meta/recipes-core/ovmf/ovmf/0001-ovmf-update-path-to-native-BaseTools.patch b/meta/recipes-core/ovmf/ovmf/0001-ovmf-update-path-to-native-BaseTools.patch
index 89d9ffab5e..490d9e8046 100644
--- a/meta/recipes-core/ovmf/ovmf/0001-ovmf-update-path-to-native-BaseTools.patch
+++ b/meta/recipes-core/ovmf/ovmf/0001-ovmf-update-path-to-native-BaseTools.patch
@@ -1,7 +1,7 @@
-From 1125f5a02c2f327aeffe2d6b66a9d816ad2eeec0 Mon Sep 17 00:00:00 2001
+From d8df6b6433351763e1db791dd84d432983d2b249 Mon Sep 17 00:00:00 2001
From: Ricardo Neri <ricardo.neri-calderon@linux.intel.com>
Date: Thu, 9 Jun 2016 02:23:01 -0700
-Subject: [PATCH 1/6] ovmf: update path to native BaseTools
+Subject: [PATCH 1/4] ovmf: update path to native BaseTools
BaseTools is a set of utilities to build EDK-based firmware. These utilities
are used during the build process. Thus, they need to be built natively.
@@ -10,13 +10,13 @@ tools. The BBAKE_EDK_TOOLS_PATH string is used as a pattern to be replaced
with the appropriate location before building.
Signed-off-by: Ricardo Neri <ricardo.neri-calderon@linux.intel.com>
-Upstream-Status: Pending
+Upstream-Status: Inappropriate [oe-core cross compile specific]
---
OvmfPkg/build.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/OvmfPkg/build.sh b/OvmfPkg/build.sh
-index 91b1442ade..1858dae31a 100755
+index b0334fb76e..094f86f096 100755
--- a/OvmfPkg/build.sh
+++ b/OvmfPkg/build.sh
@@ -24,7 +24,7 @@ then
@@ -29,5 +29,5 @@ index 91b1442ade..1858dae31a 100755
source edksetup.sh BaseTools
else
--
-2.32.0
+2.30.2
diff --git a/meta/recipes-core/ovmf/ovmf/0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch b/meta/recipes-core/ovmf/ovmf/0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch
index f6141c8af5..eeedc9e20f 100644
--- a/meta/recipes-core/ovmf/ovmf/0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch
+++ b/meta/recipes-core/ovmf/ovmf/0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch
@@ -1,66 +1,69 @@
-From 19d4c7f9812062a683b3ba60b35aac0461190456 Mon Sep 17 00:00:00 2001
+From ac9df4fb92965f1f95a5bdbde5f2f86d0c569711 Mon Sep 17 00:00:00 2001
From: Ricardo Neri <ricardo.neri-calderon@linux.intel.com>
Date: Fri, 26 Jul 2019 17:34:26 -0400
-Subject: [PATCH 2/6] BaseTools: makefile: adjust to build in under bitbake
+Subject: [PATCH] BaseTools: makefile: adjust to build in under bitbake
Prepend the build flags with those of bitbake. This is to build
using the bitbake native sysroot include and library directories.
+Note from Alex: this is not appropriate for upstream submission as
+the recipe already does lots of similar in-place fixups elsewhere, so
+this patch shold be converted to follow that pattern. We're not going
+to fight against how upstream wants to configure the build.
+
Signed-off-by: Ricardo Neri <ricardo.neri@linux.intel.com>
-Upstream-Status: Pending
+Upstream-Status: Inappropriate [needs to be converted to in-recipe fixups]
---
- BaseTools/Source/C/Makefiles/header.makefile | 17 +++++++++--------
- 1 file changed, 9 insertions(+), 8 deletions(-)
+ BaseTools/Source/C/Makefiles/header.makefile | 15 +++++++--------
+ 1 file changed, 7 insertions(+), 8 deletions(-)
diff --git a/BaseTools/Source/C/Makefiles/header.makefile b/BaseTools/Source/C/Makefiles/header.makefile
-index 0df728f327..1299d47c87 100644
+index d369908a09..22c670f316 100644
--- a/BaseTools/Source/C/Makefiles/header.makefile
+++ b/BaseTools/Source/C/Makefiles/header.makefile
-@@ -75,35 +75,36 @@ $(error Bad HOST_ARCH)
- endif
+@@ -85,35 +85,34 @@ endif
INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
--BUILD_CPPFLAGS = $(INCLUDE)
-+BUILD_CPPFLAGS += $(INCLUDE)
+ INCLUDE += -I $(EDK2_PATH)/MdePkg/Include
+-CPPFLAGS = $(INCLUDE)
++CPPFLAGS += $(INCLUDE)
# keep EXTRA_OPTFLAGS last
BUILD_OPTFLAGS = -O2 $(EXTRA_OPTFLAGS)
ifeq ($(DARWIN),Darwin)
# assume clang or clang compatible flags on OS X
--BUILD_CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror \
-+BUILD_CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror \
+-CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror \
++CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror \
-Wno-deprecated-declarations -Wno-self-assign -Wno-unused-result -nostdlib -g
else
- ifeq ($(CXX), llvm)
--BUILD_CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
-+BUILD_CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
+ ifneq ($(CLANG),)
+-CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
++CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
-fno-delete-null-pointer-checks -Wall -Werror \
-Wno-deprecated-declarations -Wno-self-assign \
-Wno-unused-result -nostdlib -g
else
--BUILD_CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
-+BUILD_CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
+-CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
++CFLAGS += -MD -fshort-wchar -fno-strict-aliasing -fwrapv \
-fno-delete-null-pointer-checks -Wall -Werror \
-Wno-deprecated-declarations -Wno-stringop-truncation -Wno-restrict \
-Wno-unused-result -nostdlib -g
endif
endif
- ifeq ($(CXX), llvm)
--BUILD_LFLAGS =
--BUILD_CXXFLAGS = -Wno-deprecated-register -Wno-unused-result
-+BUILD_LFLAGS = $(LDFLAGS)
-+BUILD_CXXFLAGS += -Wno-deprecated-register -Wno-unused-result
+ ifneq ($(CLANG),)
+-LDFLAGS =
+-CXXFLAGS = -Wno-deprecated-register -Wno-unused-result -std=c++14
++CXXFLAGS += -Wno-deprecated-register -Wno-unused-result -std=c++14
else
--BUILD_LFLAGS =
--BUILD_CXXFLAGS = -Wno-unused-result
-+BUILD_LFLAGS = $(LDFLAGS)
-+BUILD_CXXFLAGS += -Wno-unused-result
+-LDFLAGS =
+-CXXFLAGS = -Wno-unused-result
++CXXFLAGS += -Wno-unused-result
endif
+
ifeq ($(HOST_ARCH), IA32)
#
# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns i386, but gcc defaults
--
-2.32.0
+2.30.2
diff --git a/meta/recipes-core/ovmf/ovmf/0003-debug-prefix-map.patch b/meta/recipes-core/ovmf/ovmf/0003-debug-prefix-map.patch
new file mode 100644
index 0000000000..c0c763c1cf
--- /dev/null
+++ b/meta/recipes-core/ovmf/ovmf/0003-debug-prefix-map.patch
@@ -0,0 +1,104 @@
+From 03e536b20d0b72cf078052f6748de8df3836625c Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Mon, 14 Jun 2021 19:56:28 +0200
+Subject: [PATCH 3/4] debug prefix map
+
+We want to pass ${DEBUG_PREFIX_MAP} to gcc commands and also pass in
+ --debug-prefix-map to nasm (we carry a patch to nasm for this). The
+tools definitions file is built by ovmf-native so we need to pass this in
+at target build time when we know the right values so we use the environment.
+
+By using determininistc file paths during the ovmf build, it removes the
+opportunitity for gcc/ld to change the output binaries due to path lengths
+overflowing section sizes and causing small changes in the binary output.
+Previously we relied on the stripped output being the same which isn't always
+the case if the size of the debug symbols varies.
+
+Upstream-Status: Submitted [https://github.com/tianocore/edk2/pull/2202]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+---
+ BaseTools/Conf/tools_def.template | 18 +++++++++---------
+ 1 file changed, 9 insertions(+), 9 deletions(-)
+
+diff --git a/BaseTools/Conf/tools_def.template b/BaseTools/Conf/tools_def.template
+index 503a6687c1..10ac38ef9e 100755
+--- a/BaseTools/Conf/tools_def.template
++++ b/BaseTools/Conf/tools_def.template
+@@ -739,7 +739,7 @@ NOOPT_*_*_OBJCOPY_ADDDEBUGFLAG = --add-gnu-debuglink="$(DEBUG_DIR)/$(MODULE_
+ *_*_*_DTCPP_PATH = DEF(DTCPP_BIN)
+ *_*_*_DTC_PATH = DEF(DTC_BIN)
+
+-DEFINE GCC_ALL_CC_FLAGS = -g -Os -fshort-wchar -fno-builtin -fno-strict-aliasing -Wall -Werror -Wno-array-bounds -include AutoGen.h -fno-common
++DEFINE GCC_ALL_CC_FLAGS = -g -Os -fshort-wchar -fno-builtin -fno-strict-aliasing -Wall -Werror -Wno-array-bounds -include AutoGen.h -fno-common ENV(GCC_PREFIX_MAP)
+ DEFINE GCC_ARM_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -mlittle-endian -mabi=aapcs -fno-short-enums -funsigned-char -ffunction-sections -fdata-sections -fomit-frame-pointer -Wno-address -mthumb -fno-pic -fno-pie
+ DEFINE GCC_LOONGARCH64_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -mabi=lp64d -fno-asynchronous-unwind-tables -fno-plt -Wno-address -fno-short-enums -fsigned-char -ffunction-sections -fdata-sections
+ DEFINE GCC_ARM_CC_XIPFLAGS = -mno-unaligned-access
+@@ -759,8 +759,8 @@ DEFINE GCC_ARM_ASLDLINK_FLAGS = DEF(GCC_ARM_DLINK_FLAGS) -Wl,--entry,Refere
+ DEFINE GCC_AARCH64_ASLDLINK_FLAGS = DEF(GCC_AARCH64_DLINK_FLAGS) -Wl,--entry,ReferenceAcpiTable -u $(IMAGE_ENTRY_POINT) DEF(GCC_ARM_AARCH64_ASLDLINK_FLAGS)
+ DEFINE GCC_LOONGARCH64_ASLDLINK_FLAGS = DEF(GCC_LOONGARCH64_DLINK_FLAGS) -Wl,--entry,ReferenceAcpiTable -u $(IMAGE_ENTRY_POINT)
+ DEFINE GCC_IA32_X64_DLINK_FLAGS = DEF(GCC_IA32_X64_DLINK_COMMON) --entry _$(IMAGE_ENTRY_POINT) --file-alignment 0x20 --section-alignment 0x20 -Map $(DEST_DIR_DEBUG)/$(BASE_NAME).map
+-DEFINE GCC_ASM_FLAGS = -c -x assembler -imacros AutoGen.h
+-DEFINE GCC_PP_FLAGS = -E -x assembler-with-cpp -include AutoGen.h
++DEFINE GCC_ASM_FLAGS = -c -x assembler -imacros AutoGen.h ENV(GCC_PREFIX_MAP)
++DEFINE GCC_PP_FLAGS = -E -x assembler-with-cpp -include AutoGen.h ENV(GCC_PREFIX_MAP)
+ DEFINE GCC_VFRPP_FLAGS = -x c -E -P -DVFRCOMPILE --include $(MODULE_NAME)StrDefs.h
+ DEFINE GCC_ASLPP_FLAGS = -x c -E -include AutoGen.h
+ DEFINE GCC_ASLCC_FLAGS = -x c
+@@ -913,7 +913,7 @@ DEFINE GCC5_LOONGARCH64_PP_FLAGS = -mabi=lp64d -march=loongarch64 DEF(
+ *_GCC48_IA32_DLINK2_FLAGS = DEF(GCC48_IA32_DLINK2_FLAGS)
+ *_GCC48_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
+ *_GCC48_IA32_OBJCOPY_FLAGS =
+-*_GCC48_IA32_NASM_FLAGS = -f elf32
++*_GCC48_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS)
+ RELEASE_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS) -Wno-unused-but-set-variable
+@@ -941,7 +941,7 @@ RELEASE_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS) -Wno-unused-but-set
+ *_GCC48_X64_DLINK2_FLAGS = DEF(GCC48_X64_DLINK2_FLAGS)
+ *_GCC48_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
+ *_GCC48_X64_OBJCOPY_FLAGS =
+-*_GCC48_X64_NASM_FLAGS = -f elf64
++*_GCC48_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC48_X64_CC_FLAGS = DEF(GCC48_X64_CC_FLAGS)
+ RELEASE_GCC48_X64_CC_FLAGS = DEF(GCC48_X64_CC_FLAGS) -Wno-unused-but-set-variable
+@@ -1050,7 +1050,7 @@ RELEASE_GCC48_AARCH64_CC_FLAGS = DEF(GCC48_AARCH64_CC_FLAGS) -Wno-unused-but-s
+ *_GCC49_IA32_DLINK2_FLAGS = DEF(GCC49_IA32_DLINK2_FLAGS)
+ *_GCC49_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
+ *_GCC49_IA32_OBJCOPY_FLAGS =
+-*_GCC49_IA32_NASM_FLAGS = -f elf32
++*_GCC49_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS)
+ RELEASE_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS) -Wno-unused-but-set-variable -Wno-unused-const-variable
+@@ -1078,7 +1078,7 @@ RELEASE_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS) -Wno-unused-but-set
+ *_GCC49_X64_DLINK2_FLAGS = DEF(GCC49_X64_DLINK2_FLAGS)
+ *_GCC49_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
+ *_GCC49_X64_OBJCOPY_FLAGS =
+-*_GCC49_X64_NASM_FLAGS = -f elf64
++*_GCC49_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC49_X64_CC_FLAGS = DEF(GCC49_X64_CC_FLAGS)
+ RELEASE_GCC49_X64_CC_FLAGS = DEF(GCC49_X64_CC_FLAGS) -Wno-unused-but-set-variable -Wno-unused-const-variable
+@@ -1337,7 +1337,7 @@ RELEASE_GCCNOLTO_AARCH64_DLINK_XIPFLAGS = -z common-page-size=0x20
+ *_GCC5_IA32_DLINK2_FLAGS = DEF(GCC5_IA32_DLINK2_FLAGS) -no-pie
+ *_GCC5_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
+ *_GCC5_IA32_OBJCOPY_FLAGS =
+-*_GCC5_IA32_NASM_FLAGS = -f elf32
++*_GCC5_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC5_IA32_CC_FLAGS = DEF(GCC5_IA32_CC_FLAGS) -flto
+ DEBUG_GCC5_IA32_DLINK_FLAGS = DEF(GCC5_IA32_X64_DLINK_FLAGS) -flto -Os -Wl,-m,elf_i386,--oformat=elf32-i386
+@@ -1369,7 +1369,7 @@ RELEASE_GCC5_IA32_DLINK_FLAGS = DEF(GCC5_IA32_X64_DLINK_FLAGS) -flto -Os -Wl,
+ *_GCC5_X64_DLINK2_FLAGS = DEF(GCC5_X64_DLINK2_FLAGS)
+ *_GCC5_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
+ *_GCC5_X64_OBJCOPY_FLAGS =
+-*_GCC5_X64_NASM_FLAGS = -f elf64
++*_GCC5_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
+
+ DEBUG_GCC5_X64_CC_FLAGS = DEF(GCC5_X64_CC_FLAGS) -flto -DUSING_LTO
+ DEBUG_GCC5_X64_DLINK_FLAGS = DEF(GCC5_X64_DLINK_FLAGS) -flto -Os
+--
+2.30.2
+
diff --git a/meta/recipes-core/ovmf/ovmf/0003-ovmf-Update-to-latest.patch b/meta/recipes-core/ovmf/ovmf/0003-ovmf-Update-to-latest.patch
deleted file mode 100644
index d710429899..0000000000
--- a/meta/recipes-core/ovmf/ovmf/0003-ovmf-Update-to-latest.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 67267d8cc31df16a3608cad1a17c5f1470ef8bbd Mon Sep 17 00:00:00 2001
-From: Steve Langasek <steve.langasek@ubuntu.com>
-Date: Sat, 10 Jun 2017 01:39:36 -0700
-Subject: [PATCH 3/6] ovmf: Update to latest
-
-Description: pass -fno-stack-protector to all GCC toolchains
- The upstream build rules inexplicably pass -fno-stack-protector only
- when building for i386 and amd64. Add this essential argument to the
- generic rules for gcc 4.4 and later.
-Last-Updated: 2016-04-12
-Upstream-Status: Pending
----
- BaseTools/Conf/tools_def.template | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/BaseTools/Conf/tools_def.template b/BaseTools/Conf/tools_def.template
-index 498696e583..36241b6ede 100755
---- a/BaseTools/Conf/tools_def.template
-+++ b/BaseTools/Conf/tools_def.template
-@@ -1897,10 +1897,10 @@ DEFINE GCC_RISCV64_RC_FLAGS = -I binary -O elf64-littleriscv -B riscv
- # GCC Build Flag for included header file list generation
- DEFINE GCC_DEPS_FLAGS = -MMD -MF $@.deps
-
--DEFINE GCC48_ALL_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -ffunction-sections -fdata-sections -DSTRING_ARRAY_NAME=$(BASE_NAME)Strings
-+DEFINE GCC48_ALL_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -ffunction-sections -fdata-sections -fno-stack-protector -DSTRING_ARRAY_NAME=$(BASE_NAME)Strings
- DEFINE GCC48_IA32_X64_DLINK_COMMON = -nostdlib -Wl,-n,-q,--gc-sections -z common-page-size=0x20
--DEFINE GCC48_IA32_CC_FLAGS = DEF(GCC48_ALL_CC_FLAGS) -m32 -march=i586 -malign-double -fno-stack-protector -D EFI32 -fno-asynchronous-unwind-tables -Wno-address
--DEFINE GCC48_X64_CC_FLAGS = DEF(GCC48_ALL_CC_FLAGS) -m64 -fno-stack-protector "-DEFIAPI=__attribute__((ms_abi))" -maccumulate-outgoing-args -mno-red-zone -Wno-address -mcmodel=small -fpie -fno-asynchronous-unwind-tables -Wno-address
-+DEFINE GCC48_IA32_CC_FLAGS = DEF(GCC48_ALL_CC_FLAGS) -m32 -march=i586 -malign-double -D EFI32 -fno-asynchronous-unwind-tables -Wno-address
-+DEFINE GCC48_X64_CC_FLAGS = DEF(GCC48_ALL_CC_FLAGS) -m64 "-DEFIAPI=__attribute__((ms_abi))" -maccumulate-outgoing-args -mno-red-zone -Wno-address -mcmodel=small -fpie -fno-asynchronous-unwind-tables -Wno-address
- DEFINE GCC48_IA32_X64_ASLDLINK_FLAGS = DEF(GCC48_IA32_X64_DLINK_COMMON) -Wl,--entry,ReferenceAcpiTable -u ReferenceAcpiTable
- DEFINE GCC48_IA32_X64_DLINK_FLAGS = DEF(GCC48_IA32_X64_DLINK_COMMON) -Wl,--entry,$(IMAGE_ENTRY_POINT) -u $(IMAGE_ENTRY_POINT) -Wl,-Map,$(DEST_DIR_DEBUG)/$(BASE_NAME).map,--whole-archive
- DEFINE GCC48_IA32_DLINK2_FLAGS = -Wl,--defsym=PECOFF_HEADER_SIZE=0x220 DEF(GCC_DLINK2_FLAGS_COMMON)
-@@ -1909,7 +1909,7 @@ DEFINE GCC48_X64_DLINK2_FLAGS = -Wl,--defsym=PECOFF_HEADER_SIZE=0x228 DEF
- DEFINE GCC48_ASM_FLAGS = DEF(GCC_ASM_FLAGS)
- DEFINE GCC48_ARM_ASM_FLAGS = $(ARCHASM_FLAGS) $(PLATFORM_FLAGS) DEF(GCC_ASM_FLAGS) -mlittle-endian
- DEFINE GCC48_AARCH64_ASM_FLAGS = $(ARCHASM_FLAGS) $(PLATFORM_FLAGS) DEF(GCC_ASM_FLAGS) -mlittle-endian
--DEFINE GCC48_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) DEF(GCC_ARM_CC_FLAGS) -fstack-protector -mword-relocations
-+DEFINE GCC48_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) DEF(GCC_ARM_CC_FLAGS) -mword-relocations
- DEFINE GCC48_ARM_CC_XIPFLAGS = DEF(GCC_ARM_CC_XIPFLAGS)
- DEFINE GCC48_AARCH64_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mcmodel=large DEF(GCC_AARCH64_CC_FLAGS)
- DEFINE GCC48_AARCH64_CC_XIPFLAGS = DEF(GCC_AARCH64_CC_XIPFLAGS)
---
-2.32.0
-
diff --git a/meta/recipes-core/ovmf/ovmf/0004-reproducible.patch b/meta/recipes-core/ovmf/ovmf/0004-reproducible.patch
new file mode 100644
index 0000000000..c3fdc3d863
--- /dev/null
+++ b/meta/recipes-core/ovmf/ovmf/0004-reproducible.patch
@@ -0,0 +1,180 @@
+From c59850367a190d70dec43e0a66f399a4d8a5ffed Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Mon, 14 Jun 2021 19:57:30 +0200
+Subject: [PATCH 4/4] reproducible
+
+This patch fixes various things which make the build more reproducible. Some changes
+here only change intermediate artefacts but that means when you have two build trees
+giving differing results, the differences can be isolated more easily. The issues here
+usually become apparent with longer paths.
+
+This was all debugged with:
+TMPDIR = "${TOPDIR}/tmp"
+vs.
+TMPDIR = "${TOPDIR}/tmp-inital-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath"
+
+The patch specifically:
+
+ * Sorts output in GNUmakefile
+ * Always generates indirect flags files used to avoid pathlength issues else the
+ compile commands suddenly change when using longer paths
+ * Sorts the AutoGenTimeStamp file contents
+ * Makes the TargetDescBlock objects from BuildEngine sortable to allow the makefile fix
+ * Fix ElfConvert within GenFw so that only the basename of the binary being converted
+ is used, else the output from "GenFw XXX.bin" differs from "GenFw /long/path/XXX.bin"
+ with sufficiently long paths
+
+Upstream-Status: Submitted [https://github.com/tianocore/edk2/pull/2176]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+---
+ BaseTools/Source/C/GenFw/Elf64Convert.c | 8 ++++---
+ .../Source/Python/AutoGen/BuildEngine.py | 3 +++
+ BaseTools/Source/Python/AutoGen/GenMake.py | 24 +++++++++----------
+ .../Source/Python/AutoGen/ModuleAutoGen.py | 5 +++-
+ 4 files changed, 24 insertions(+), 16 deletions(-)
+
+diff --git a/BaseTools/Source/C/GenFw/Elf64Convert.c b/BaseTools/Source/C/GenFw/Elf64Convert.c
+index 9c17c90b16..fcc7864141 100644
+--- a/BaseTools/Source/C/GenFw/Elf64Convert.c
++++ b/BaseTools/Source/C/GenFw/Elf64Convert.c
+@@ -15,6 +15,8 @@ SPDX-License-Identifier: BSD-2-Clause-Patent
+ #ifndef __GNUC__
+ #include <windows.h>
+ #include <io.h>
++#else
++#define _GNU_SOURCE
+ #endif
+ #include <assert.h>
+ #include <stdio.h>
+@@ -990,7 +992,7 @@ ScanSections64 (
+ }
+ mCoffOffset = mDebugOffset + sizeof(EFI_IMAGE_DEBUG_DIRECTORY_ENTRY) +
+ sizeof(EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY) +
+- strlen(mInImageName) + 1;
++ strlen(basename(mInImageName)) + 1;
+
+ //
+ // Add more space in the .debug data region for the DllCharacteristicsEx
+@@ -2261,7 +2263,7 @@ WriteDebug64 (
+ EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY *Nb10;
+ EFI_IMAGE_DEBUG_EX_DLLCHARACTERISTICS_ENTRY *DllEntry;
+
+- Len = strlen(mInImageName) + 1;
++ Len = strlen(basename(mInImageName)) + 1;
+
+ NtHdr = (EFI_IMAGE_OPTIONAL_HEADER_UNION *)(mCoffFile + mNtHdrOffset);
+ DataDir = &NtHdr->Pe32Plus.OptionalHeader.DataDirectory[EFI_IMAGE_DIRECTORY_ENTRY_DEBUG];
+@@ -2294,7 +2296,7 @@ WriteDebug64 (
+
+ Nb10 = (EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY*)(Dir + 1);
+ Nb10->Signature = CODEVIEW_SIGNATURE_NB10;
+- strcpy ((char *)(Nb10 + 1), mInImageName);
++ strcpy ((char *)(Nb10 + 1), basename(mInImageName));
+ }
+
+ STATIC
+diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
+index 752a1a1f6a..02054cccf8 100644
+--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
++++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
+@@ -70,6 +70,9 @@ class TargetDescBlock(object):
+ else:
+ return str(Other) == self.Target.Path
+
++ def __lt__(self, other):
++ return str(self) < str(other)
++
+ def AddInput(self, Input):
+ if Input not in self.Inputs:
+ self.Inputs.append(Input)
+diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
+index daec9c6d54..0e8cc20efe 100755
+--- a/BaseTools/Source/Python/AutoGen/GenMake.py
++++ b/BaseTools/Source/Python/AutoGen/GenMake.py
+@@ -575,7 +575,7 @@ cleanlib:
+ os.remove(RespFileList)
+
+ # convert source files and binary files to build targets
+- self.ResultFileList = [str(T.Target) for T in MyAgo.CodaTargetList]
++ self.ResultFileList = sorted([str(T.Target) for T in MyAgo.CodaTargetList])
+ if len(self.ResultFileList) == 0 and len(MyAgo.SourceFileList) != 0:
+ EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
+ ExtraData="[%s]" % str(MyAgo))
+@@ -726,7 +726,7 @@ cleanlib:
+ OutputFile = ''
+ DepsFileList = []
+
+- for Cmd in self.GenFfsList:
++ for Cmd in sorted(self.GenFfsList):
+ if Cmd[2]:
+ for CopyCmd in Cmd[2]:
+ Src, Dst = CopyCmd
+@@ -759,7 +759,7 @@ cleanlib:
+ self.BuildTargetList.append('\t%s' % CmdString)
+
+ self.ParseSecCmd(DepsFileList, Cmd[1])
+- for SecOutputFile, SecDepsFile, SecCmd in self.FfsOutputFileList :
++ for SecOutputFile, SecDepsFile, SecCmd in sorted(self.FfsOutputFileList):
+ self.BuildTargetList.append('%s : %s' % (self.ReplaceMacro(SecOutputFile), self.ReplaceMacro(SecDepsFile)))
+ self.BuildTargetList.append('\t%s' % self.ReplaceMacro(SecCmd))
+ self.FfsOutputFileList = []
+@@ -798,13 +798,13 @@ cleanlib:
+
+ def CommandExceedLimit(self):
+ FlagDict = {
+- 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
+- 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
+- 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
+- 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
+- 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
+- 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
+- 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
++ 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : True},
++ 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : True},
++ 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : True},
++ 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : True},
++ 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : True},
++ 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : True},
++ 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : True},
+ }
+
+ RespDict = {}
+@@ -1007,9 +1007,9 @@ cleanlib:
+ if not self.ObjTargetDict.get(T.Target.SubDir):
+ self.ObjTargetDict[T.Target.SubDir] = set()
+ self.ObjTargetDict[T.Target.SubDir].add(NewFile)
+- for Type in self._AutoGenObject.Targets:
++ for Type in sorted(self._AutoGenObject.Targets):
+ resp_file_number = 0
+- for T in self._AutoGenObject.Targets[Type]:
++ for T in sorted(self._AutoGenObject.Targets[Type]):
+ # Generate related macros if needed
+ if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
+ self.FileListMacros[T.FileListMacro] = []
+diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
+index d05410b329..99b3f64aba 100755
+--- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
++++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
+@@ -1474,6 +1474,9 @@ class ModuleAutoGen(AutoGen):
+ for File in Files:
+ if File.lower().endswith('.pdb'):
+ AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
++
++ AsBuiltInfDict['binary_item'] = sorted(AsBuiltInfDict['binary_item'])
++
+ HeaderComments = self.Module.HeaderComments
+ StartPos = 0
+ for Index in range(len(HeaderComments)):
+@@ -1749,7 +1752,7 @@ class ModuleAutoGen(AutoGen):
+ if os.path.exists (self.TimeStampPath):
+ os.remove (self.TimeStampPath)
+
+- SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
++ SaveFileOnChange(self.TimeStampPath, "\n".join(sorted(FileSet)), False)
+
+ # Ignore generating makefile when it is a binary module
+ if self.IsBinaryModule:
+--
+2.30.2
+
diff --git a/meta/recipes-core/ovmf/ovmf/0005-debug-prefix-map.patch b/meta/recipes-core/ovmf/ovmf/0005-debug-prefix-map.patch
deleted file mode 100644
index 9e345f4dda..0000000000
--- a/meta/recipes-core/ovmf/ovmf/0005-debug-prefix-map.patch
+++ /dev/null
@@ -1,104 +0,0 @@
-From 860bb1979f3578bb83257076fe0f3bd33f9d68bf Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 14 Jun 2021 19:56:28 +0200
-Subject: [PATCH 5/6] debug prefix map
-
-We want to pass ${DEBUG_PREFIX_MAP} to gcc commands and also pass in
- --debug-prefix-map to nasm (we carry a patch to nasm for this). The
-tools definitions file is built by ovmf-native so we need to pass this in
-at target build time when we know the right values so we use the environment.
-
-By using determininistc file paths during the ovmf build, it removes the
-opportunitity for gcc/ld to change the output binaries due to path lengths
-overflowing section sizes and causing small changes in the binary output.
-Previously we relied on the stripped output being the same which isn't always
-the case if the size of the debug symbols varies.
-
-Upstream-Status: Submitted [https://github.com/tianocore/edk2/pull/2202]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- BaseTools/Conf/tools_def.template | 18 +++++++++---------
- 1 file changed, 9 insertions(+), 9 deletions(-)
-
-diff --git a/BaseTools/Conf/tools_def.template b/BaseTools/Conf/tools_def.template
-index 36241b6ede..947fbf2e8d 100755
---- a/BaseTools/Conf/tools_def.template
-+++ b/BaseTools/Conf/tools_def.template
-@@ -1863,7 +1863,7 @@ NOOPT_*_*_OBJCOPY_ADDDEBUGFLAG = --add-gnu-debuglink=$(DEBUG_DIR)/$(MODULE_N
- *_*_*_DTCPP_PATH = DEF(DTCPP_BIN)
- *_*_*_DTC_PATH = DEF(DTC_BIN)
-
--DEFINE GCC_ALL_CC_FLAGS = -g -Os -fshort-wchar -fno-builtin -fno-strict-aliasing -Wall -Werror -Wno-array-bounds -include AutoGen.h -fno-common
-+DEFINE GCC_ALL_CC_FLAGS = -g -Os -fshort-wchar -fno-builtin -fno-strict-aliasing -Wall -Werror -Wno-array-bounds -include AutoGen.h -fno-common ENV(GCC_PREFIX_MAP)
- DEFINE GCC_IA32_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -m32 -malign-double -freorder-blocks -freorder-blocks-and-partition -O2 -mno-stack-arg-probe
- DEFINE GCC_X64_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -mno-red-zone -Wno-address -mno-stack-arg-probe
- DEFINE GCC_ARM_CC_FLAGS = DEF(GCC_ALL_CC_FLAGS) -mlittle-endian -mabi=aapcs -fno-short-enums -funsigned-char -ffunction-sections -fdata-sections -fomit-frame-pointer -Wno-address -mthumb -mfloat-abi=soft -fno-pic -fno-pie
-@@ -1881,8 +1881,8 @@ DEFINE GCC_IA32_X64_ASLDLINK_FLAGS = DEF(GCC_IA32_X64_DLINK_COMMON) --entry _Ref
- DEFINE GCC_ARM_ASLDLINK_FLAGS = DEF(GCC_ARM_DLINK_FLAGS) -Wl,--entry,ReferenceAcpiTable -u $(IMAGE_ENTRY_POINT) DEF(GCC_ARM_AARCH64_ASLDLINK_FLAGS)
- DEFINE GCC_AARCH64_ASLDLINK_FLAGS = DEF(GCC_AARCH64_DLINK_FLAGS) -Wl,--entry,ReferenceAcpiTable -u $(IMAGE_ENTRY_POINT) DEF(GCC_ARM_AARCH64_ASLDLINK_FLAGS)
- DEFINE GCC_IA32_X64_DLINK_FLAGS = DEF(GCC_IA32_X64_DLINK_COMMON) --entry _$(IMAGE_ENTRY_POINT) --file-alignment 0x20 --section-alignment 0x20 -Map $(DEST_DIR_DEBUG)/$(BASE_NAME).map
--DEFINE GCC_ASM_FLAGS = -c -x assembler -imacros AutoGen.h
--DEFINE GCC_PP_FLAGS = -E -x assembler-with-cpp -include AutoGen.h
-+DEFINE GCC_ASM_FLAGS = -c -x assembler -imacros AutoGen.h ENV(GCC_PREFIX_MAP)
-+DEFINE GCC_PP_FLAGS = -E -x assembler-with-cpp -include AutoGen.h ENV(GCC_PREFIX_MAP)
- DEFINE GCC_VFRPP_FLAGS = -x c -E -P -DVFRCOMPILE --include $(MODULE_NAME)StrDefs.h
- DEFINE GCC_ASLPP_FLAGS = -x c -E -include AutoGen.h
- DEFINE GCC_ASLCC_FLAGS = -x c
-@@ -2027,7 +2027,7 @@ DEFINE GCC_PP_FLAGS = -E -x assembler-with-cpp -include A
- *_GCC48_IA32_DLINK2_FLAGS = DEF(GCC48_IA32_DLINK2_FLAGS)
- *_GCC48_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
- *_GCC48_IA32_OBJCOPY_FLAGS =
--*_GCC48_IA32_NASM_FLAGS = -f elf32
-+*_GCC48_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS) -Os
- RELEASE_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS) -Os -Wno-unused-but-set-variable
-@@ -2055,7 +2055,7 @@ RELEASE_GCC48_IA32_CC_FLAGS = DEF(GCC48_IA32_CC_FLAGS) -Os -Wno-unused-but
- *_GCC48_X64_DLINK2_FLAGS = DEF(GCC48_X64_DLINK2_FLAGS)
- *_GCC48_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
- *_GCC48_X64_OBJCOPY_FLAGS =
--*_GCC48_X64_NASM_FLAGS = -f elf64
-+*_GCC48_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC48_X64_CC_FLAGS = DEF(GCC48_X64_CC_FLAGS) -Os
- RELEASE_GCC48_X64_CC_FLAGS = DEF(GCC48_X64_CC_FLAGS) -Os -Wno-unused-but-set-variable
-@@ -2167,7 +2167,7 @@ RELEASE_GCC48_AARCH64_CC_FLAGS = DEF(GCC48_AARCH64_CC_FLAGS) -Wno-unused-but-s
- *_GCC49_IA32_DLINK2_FLAGS = DEF(GCC49_IA32_DLINK2_FLAGS)
- *_GCC49_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
- *_GCC49_IA32_OBJCOPY_FLAGS =
--*_GCC49_IA32_NASM_FLAGS = -f elf32
-+*_GCC49_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS) -Os
- RELEASE_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS) -Os -Wno-unused-but-set-variable -Wno-unused-const-variable
-@@ -2195,7 +2195,7 @@ RELEASE_GCC49_IA32_CC_FLAGS = DEF(GCC49_IA32_CC_FLAGS) -Os -Wno-unused-but
- *_GCC49_X64_DLINK2_FLAGS = DEF(GCC49_X64_DLINK2_FLAGS)
- *_GCC49_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
- *_GCC49_X64_OBJCOPY_FLAGS =
--*_GCC49_X64_NASM_FLAGS = -f elf64
-+*_GCC49_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC49_X64_CC_FLAGS = DEF(GCC49_X64_CC_FLAGS) -Os
- RELEASE_GCC49_X64_CC_FLAGS = DEF(GCC49_X64_CC_FLAGS) -Os -Wno-unused-but-set-variable -Wno-unused-const-variable
-@@ -2313,7 +2313,7 @@ RELEASE_GCC49_AARCH64_DLINK_XIPFLAGS = -z common-page-size=0x20
- *_GCC5_IA32_DLINK2_FLAGS = DEF(GCC5_IA32_DLINK2_FLAGS) -no-pie
- *_GCC5_IA32_RC_FLAGS = DEF(GCC_IA32_RC_FLAGS)
- *_GCC5_IA32_OBJCOPY_FLAGS =
--*_GCC5_IA32_NASM_FLAGS = -f elf32
-+*_GCC5_IA32_NASM_FLAGS = -f elf32 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC5_IA32_CC_FLAGS = DEF(GCC5_IA32_CC_FLAGS) -flto -Os
- DEBUG_GCC5_IA32_DLINK_FLAGS = DEF(GCC5_IA32_X64_DLINK_FLAGS) -flto -Os -Wl,-m,elf_i386,--oformat=elf32-i386
-@@ -2345,7 +2345,7 @@ RELEASE_GCC5_IA32_DLINK_FLAGS = DEF(GCC5_IA32_X64_DLINK_FLAGS) -flto -Os -Wl,
- *_GCC5_X64_DLINK2_FLAGS = DEF(GCC5_X64_DLINK2_FLAGS)
- *_GCC5_X64_RC_FLAGS = DEF(GCC_X64_RC_FLAGS)
- *_GCC5_X64_OBJCOPY_FLAGS =
--*_GCC5_X64_NASM_FLAGS = -f elf64
-+*_GCC5_X64_NASM_FLAGS = -f elf64 ENV(NASM_PREFIX_MAP)
-
- DEBUG_GCC5_X64_CC_FLAGS = DEF(GCC5_X64_CC_FLAGS) -flto -DUSING_LTO -Os
- DEBUG_GCC5_X64_DLINK_FLAGS = DEF(GCC5_X64_DLINK_FLAGS) -flto -Os
---
-2.32.0
-
diff --git a/meta/recipes-core/ovmf/ovmf/0006-reproducible.patch b/meta/recipes-core/ovmf/ovmf/0006-reproducible.patch
deleted file mode 100644
index 846f408012..0000000000
--- a/meta/recipes-core/ovmf/ovmf/0006-reproducible.patch
+++ /dev/null
@@ -1,180 +0,0 @@
-From 27ed9962f5cb3afcc44d6c96c53277132a999712 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 14 Jun 2021 19:57:30 +0200
-Subject: [PATCH 6/6] reproducible
-
-This patch fixes various things which make the build more reproducible. Some changes
-here only change intermediate artefacts but that means when you have two build trees
-giving differing results, the differences can be isolated more easily. The issues here
-usually become apparent with longer paths.
-
-This was all debugged with:
-TMPDIR = "${TOPDIR}/tmp"
-vs.
-TMPDIR = "${TOPDIR}/tmp-inital-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath-mylongpath"
-
-The patch specifically:
-
- * Sorts output in GNUmakefile
- * Always generates indirect flags files used to avoid pathlength issues else the
- compile commands suddenly change when using longer paths
- * Sorts the AutoGenTimeStamp file contents
- * Makes the TargetDescBlock objects from BuildEngine sortable to allow the makefile fix
- * Fix ElfConvert within GenFw so that only the basename of the binary being converted
- is used, else the output from "GenFw XXX.bin" differs from "GenFw /long/path/XXX.bin"
- with sufficiently long paths
-
-Upstream-Status: Submitted [https://github.com/tianocore/edk2/pull/2176]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- BaseTools/Source/C/GenFw/Elf64Convert.c | 8 ++++---
- .../Source/Python/AutoGen/BuildEngine.py | 3 +++
- BaseTools/Source/Python/AutoGen/GenMake.py | 24 +++++++++----------
- .../Source/Python/AutoGen/ModuleAutoGen.py | 5 +++-
- 4 files changed, 24 insertions(+), 16 deletions(-)
-
-diff --git a/BaseTools/Source/C/GenFw/Elf64Convert.c b/BaseTools/Source/C/GenFw/Elf64Convert.c
-index d097db8632..a87ae6f3d0 100644
---- a/BaseTools/Source/C/GenFw/Elf64Convert.c
-+++ b/BaseTools/Source/C/GenFw/Elf64Convert.c
-@@ -14,6 +14,8 @@ SPDX-License-Identifier: BSD-2-Clause-Patent
- #ifndef __GNUC__
- #include <windows.h>
- #include <io.h>
-+#else
-+#define _GNU_SOURCE
- #endif
- #include <assert.h>
- #include <stdio.h>
-@@ -769,7 +771,7 @@ ScanSections64 (
- }
- mCoffOffset = mDebugOffset + sizeof(EFI_IMAGE_DEBUG_DIRECTORY_ENTRY) +
- sizeof(EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY) +
-- strlen(mInImageName) + 1;
-+ strlen(basename(mInImageName)) + 1;
-
- mCoffOffset = CoffAlign(mCoffOffset);
- if (SectionCount == 0) {
-@@ -1608,7 +1610,7 @@ WriteDebug64 (
- EFI_IMAGE_DEBUG_DIRECTORY_ENTRY *Dir;
- EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY *Nb10;
-
-- Len = strlen(mInImageName) + 1;
-+ Len = strlen(basename(mInImageName)) + 1;
-
- Dir = (EFI_IMAGE_DEBUG_DIRECTORY_ENTRY*)(mCoffFile + mDebugOffset);
- Dir->Type = EFI_IMAGE_DEBUG_TYPE_CODEVIEW;
-@@ -1618,7 +1620,7 @@ WriteDebug64 (
-
- Nb10 = (EFI_IMAGE_DEBUG_CODEVIEW_NB10_ENTRY*)(Dir + 1);
- Nb10->Signature = CODEVIEW_SIGNATURE_NB10;
-- strcpy ((char *)(Nb10 + 1), mInImageName);
-+ strcpy ((char *)(Nb10 + 1), basename(mInImageName));
-
-
- NtHdr = (EFI_IMAGE_OPTIONAL_HEADER_UNION *)(mCoffFile + mNtHdrOffset);
-diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
-index 722fead75a..8f1c236970 100644
---- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
-+++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
-@@ -70,6 +70,9 @@ class TargetDescBlock(object):
- else:
- return str(Other) == self.Target.Path
-
-+ def __lt__(self, other):
-+ return str(self) < str(other)
-+
- def AddInput(self, Input):
- if Input not in self.Inputs:
- self.Inputs.append(Input)
-diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
-index 961b2ab1c3..23c1592025 100755
---- a/BaseTools/Source/Python/AutoGen/GenMake.py
-+++ b/BaseTools/Source/Python/AutoGen/GenMake.py
-@@ -575,7 +575,7 @@ cleanlib:
- os.remove(RespFileList)
-
- # convert source files and binary files to build targets
-- self.ResultFileList = [str(T.Target) for T in MyAgo.CodaTargetList]
-+ self.ResultFileList = sorted([str(T.Target) for T in MyAgo.CodaTargetList])
- if len(self.ResultFileList) == 0 and len(MyAgo.SourceFileList) != 0:
- EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
- ExtraData="[%s]" % str(MyAgo))
-@@ -726,7 +726,7 @@ cleanlib:
- OutputFile = ''
- DepsFileList = []
-
-- for Cmd in self.GenFfsList:
-+ for Cmd in sorted(self.GenFfsList):
- if Cmd[2]:
- for CopyCmd in Cmd[2]:
- Src, Dst = CopyCmd
-@@ -759,7 +759,7 @@ cleanlib:
- self.BuildTargetList.append('\t%s' % CmdString)
-
- self.ParseSecCmd(DepsFileList, Cmd[1])
-- for SecOutputFile, SecDepsFile, SecCmd in self.FfsOutputFileList :
-+ for SecOutputFile, SecDepsFile, SecCmd in sorted(self.FfsOutputFileList):
- self.BuildTargetList.append('%s : %s' % (self.ReplaceMacro(SecOutputFile), self.ReplaceMacro(SecDepsFile)))
- self.BuildTargetList.append('\t%s' % self.ReplaceMacro(SecCmd))
- self.FfsOutputFileList = []
-@@ -798,13 +798,13 @@ cleanlib:
-
- def CommandExceedLimit(self):
- FlagDict = {
-- 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
-- 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
-- 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
-- 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
-- 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
-- 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
-- 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
-+ 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : True},
-+ 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : True},
-+ 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : True},
-+ 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : True},
-+ 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : True},
-+ 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : True},
-+ 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : True},
- }
-
- RespDict = {}
-@@ -1007,9 +1007,9 @@ cleanlib:
- if not self.ObjTargetDict.get(T.Target.SubDir):
- self.ObjTargetDict[T.Target.SubDir] = set()
- self.ObjTargetDict[T.Target.SubDir].add(NewFile)
-- for Type in self._AutoGenObject.Targets:
-+ for Type in sorted(self._AutoGenObject.Targets):
- resp_file_number = 0
-- for T in self._AutoGenObject.Targets[Type]:
-+ for T in sorted(self._AutoGenObject.Targets[Type]):
- # Generate related macros if needed
- if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
- self.FileListMacros[T.FileListMacro] = []
-diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
-index d70b0d7ae8..25dca9a6df 100755
---- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
-+++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
-@@ -1484,6 +1484,9 @@ class ModuleAutoGen(AutoGen):
- for File in Files:
- if File.lower().endswith('.pdb'):
- AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
-+
-+ AsBuiltInfDict['binary_item'] = sorted(AsBuiltInfDict['binary_item'])
-+
- HeaderComments = self.Module.HeaderComments
- StartPos = 0
- for Index in range(len(HeaderComments)):
-@@ -1759,7 +1762,7 @@ class ModuleAutoGen(AutoGen):
- if os.path.exists (self.TimeStampPath):
- os.remove (self.TimeStampPath)
-
-- SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
-+ SaveFileOnChange(self.TimeStampPath, "\n".join(sorted(FileSet)), False)
-
- # Ignore generating makefile when it is a binary module
- if self.IsBinaryModule:
---
-2.32.0
-
diff --git a/meta/recipes-core/ovmf/ovmf_git.bb b/meta/recipes-core/ovmf/ovmf_git.bb
index 84e3360a3a..35ca8d1834 100644
--- a/meta/recipes-core/ovmf/ovmf_git.bb
+++ b/meta/recipes-core/ovmf/ovmf_git.bb
@@ -22,17 +22,28 @@ BUILD_CFLAGS += "-Wno-error=stringop-overflow"
SRC_URI = "gitsm://github.com/tianocore/edk2.git;branch=master;protocol=https \
file://0001-ovmf-update-path-to-native-BaseTools.patch \
file://0002-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch \
- file://0003-ovmf-Update-to-latest.patch \
- file://0005-debug-prefix-map.patch \
- file://0006-reproducible.patch \
- file://0001-BaseTools-fix-gcc12-warning.patch \
- file://0001-BaseTools-fix-gcc12-warning-1.patch \
+ file://0003-debug-prefix-map.patch \
+ file://0004-reproducible.patch \
"
-PV = "edk2-stable202202"
-SRCREV = "b24306f15daa2ff8510b06702114724b33895d3c"
+PV = "edk2-stable202402"
+SRCREV = "edc6681206c1a8791981a2f911d2fb8b3d2f5768"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>edk2-stable.*)"
+CVE_PRODUCT = "edk2"
+CVE_VERSION = "${@d.getVar('PV').split('stable')[1]}"
+
+CVE_STATUS[CVE-2014-8271] = "fixed-version: Fixed in svn_16280, which is an unusual versioning breaking version comparison."
+CVE_STATUS[CVE-2014-4859] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2014-4860] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14553] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14559] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14562] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14563] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14575] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14586] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+CVE_STATUS[CVE-2019-14587] = "fixed-version: The CPE in the NVD database doesn't reflect correctly the vulnerable versions."
+
inherit deploy
PARALLEL_MAKE = ""
@@ -125,7 +136,7 @@ fix_toolchain:append:class-native() {
# --debug-prefix-map to nasm (we carry a patch to nasm for this). The
# tools definitions are built by ovmf-native so we need to pass this in
# at target build time when we know the right values.
-export NASM_PREFIX_MAP = "--debug-prefix-map=${WORKDIR}=/usr/src/debug/ovmf/${EXTENDPE}${PV}-${PR}"
+export NASM_PREFIX_MAP = "--debug-prefix-map=${WORKDIR}=${TARGET_DBGSRC_DIR}"
export GCC_PREFIX_MAP = "${DEBUG_PREFIX_MAP} -Wno-stringop-overflow -Wno-maybe-uninitialized"
GCC_VER="$(${CC} -v 2>&1 | tail -n1 | awk '{print $3}')"
diff --git a/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb b/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb
index 9166a0851f..c1d3c25060 100644
--- a/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb
+++ b/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb
@@ -3,9 +3,9 @@
#
SUMMARY = "Host packages for the standalone SDK or external toolchain"
-PR = "r12"
-inherit packagegroup nativesdk
+inherit packagegroup
+inherit_defer nativesdk
PACKAGEGROUP_DISABLE_COMPLEMENTARY = "1"
diff --git a/meta/recipes-core/packagegroups/packagegroup-base.bb b/meta/recipes-core/packagegroups/packagegroup-base.bb
index 7489ef61b0..70a1035003 100644
--- a/meta/recipes-core/packagegroups/packagegroup-base.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-base.bb
@@ -1,5 +1,4 @@
SUMMARY = "Merge machine and distro options to create a basic machine task/package"
-PR = "r83"
#
# packages which content depend on MACHINE_FEATURES need to be MACHINE_ARCH
@@ -16,7 +15,6 @@ PACKAGES = ' \
\
${@bb.utils.contains("MACHINE_FEATURES", "acpi", "packagegroup-base-acpi", "",d)} \
${@bb.utils.contains("MACHINE_FEATURES", "alsa", "packagegroup-base-alsa", "", d)} \
- ${@bb.utils.contains("MACHINE_FEATURES", "apm", "packagegroup-base-apm", "", d)} \
${@bb.utils.contains("MACHINE_FEATURES", "ext2", "packagegroup-base-ext2", "", d)} \
${@bb.utils.contains("MACHINE_FEATURES", "vfat", "packagegroup-base-vfat", "", d)} \
${@bb.utils.contains("MACHINE_FEATURES", "keyboard", "packagegroup-base-keyboard", "", d)} \
@@ -52,7 +50,6 @@ RDEPENDS:packagegroup-base = "\
packagegroup-machine-base \
\
module-init-tools \
- ${@bb.utils.contains('MACHINE_FEATURES', 'apm', 'packagegroup-base-apm', '',d)} \
${@bb.utils.contains('MACHINE_FEATURES', 'acpi', 'packagegroup-base-acpi', '',d)} \
${@bb.utils.contains('MACHINE_FEATURES', 'keyboard', 'packagegroup-base-keyboard', '',d)} \
${@bb.utils.contains('MACHINE_FEATURES', 'phone', 'packagegroup-base-phone', '',d)} \
@@ -132,7 +129,6 @@ RRECOMMENDS:packagegroup-distro-base = "${DISTRO_EXTRA_RRECOMMENDS}"
#
# packages added by machine config
#
-SUMMARY:packagegroup-machine-base = "${MACHINE} extras"
SUMMARY:packagegroup-machine-base = "Extra packages required to fully support ${MACHINE} hardware"
RDEPENDS:packagegroup-machine-base = "${MACHINE_EXTRA_RDEPENDS}"
RRECOMMENDS:packagegroup-machine-base = "${MACHINE_EXTRA_RRECOMMENDS}"
@@ -149,11 +145,6 @@ SUMMARY:packagegroup-base-acpi = "ACPI support"
RDEPENDS:packagegroup-base-acpi = "\
acpid"
-SUMMARY:packagegroup-base-apm = "APM support"
-RDEPENDS:packagegroup-base-apm = "\
- ${VIRTUAL-RUNTIME_apm} \
- apmd"
-
SUMMARY:packagegroup-base-ext2 = "ext2 filesystem support"
RDEPENDS:packagegroup-base-ext2 = "\
e2fsprogs-e2fsck \
@@ -267,11 +258,14 @@ RRECOMMENDS:packagegroup-base-ipsec = "\
# packagegroup-base-wifi contain everything needed to get WiFi working
# WEP/WPA connection needs to be supported out-of-box
#
+# Choose either 'wpa-supplicant' or 'iwd' as wireless-daemon
+WIRELESS_DAEMON ??= "wpa-supplicant"
SUMMARY:packagegroup-base-wifi = "WiFi support"
RDEPENDS:packagegroup-base-wifi = "\
iw \
wireless-regdb-static \
- wpa-supplicant"
+ ${WIRELESS_DAEMON} \
+"
RRECOMMENDS:packagegroup-base-wifi = "\
${@bb.utils.contains('COMBINED_FEATURES', 'usbhost', 'kernel-module-zd1211rw', '',d)} \
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-boot.bb b/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
index faf7bc0026..fecc3334ea 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
@@ -4,7 +4,6 @@
SUMMARY = "Minimal boot requirements"
DESCRIPTION = "The minimal set of packages required to boot the system"
-PR = "r17"
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -18,7 +17,6 @@ EFI_PROVIDER ??= "grub-efi"
SYSVINIT_SCRIPTS = "${@bb.utils.contains('MACHINE_FEATURES', 'rtc', '${VIRTUAL-RUNTIME_base-utils-hwclock}', '', d)} \
modutils-initscripts \
- init-ifupdown \
${VIRTUAL-RUNTIME_initscripts} \
"
@@ -38,4 +36,7 @@ RDEPENDS:${PN} = "\
RRECOMMENDS:${PN} = "\
${VIRTUAL-RUNTIME_base-utils-syslog} \
- ${MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS}"
+ ${MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS} \
+ ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "init-ifupdown", "", d)} \
+ ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit pni-names", "ifupdown", "", d)} \
+ "
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-nfs.bb b/meta/recipes-core/packagegroups/packagegroup-core-nfs.bb
index 35beb3fc05..b8a73175e8 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-nfs.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-nfs.bb
@@ -3,7 +3,6 @@
#
SUMMARY = "NFS package groups"
-PR = "r2"
inherit packagegroup
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb b/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
index d70aff22c7..84e1a41d9a 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb
@@ -3,7 +3,6 @@
#
SUMMARY = "Software development tools"
-PR = "r9"
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -11,6 +10,11 @@ inherit packagegroup
#PACKAGEFUNCS =+ 'generate_sdk_pkgs'
+TARGET_TOOLCHAIN_LANGS ??= "${SDK_TOOLCHAIN_LANGS}"
+TARGET_TOOLCHAIN_LANGS:remove:sdkmingw32 = "rust"
+# libstd-rs doesn't build for mips n32 with compiler constraint errors
+TARGET_TOOLCHAIN_LANGS:remove:mipsarchn32 = "rust"
+
RDEPENDS:packagegroup-core-sdk = "\
packagegroup-core-buildessential \
coreutils \
@@ -23,7 +27,10 @@ RDEPENDS:packagegroup-core-sdk = "\
less \
ldd \
file \
- tcl"
+ tcl \
+ ${@bb.utils.contains('TARGET_TOOLCHAIN_LANGS', 'go', 'packagegroup-go-sdk-target', '', d)} \
+ ${@bb.utils.contains('TARGET_TOOLCHAIN_LANGS', 'rust', 'packagegroup-rust-sdk-target', '', d)} \
+"
SANITIZERS = "libasan-dev libubsan-dev"
SANITIZERS:arc = ""
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb b/meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb
index 4f844ad925..206292ace4 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-ssh-dropbear.bb
@@ -1,6 +1,6 @@
SUMMARY = "Dropbear SSH client/server"
-PR = "r1"
inherit packagegroup
RDEPENDS:${PN} = "dropbear"
+RRECOMMENDS:${PN} = "openssh-sftp-server"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-ssh-openssh.bb b/meta/recipes-core/packagegroups/packagegroup-core-ssh-openssh.bb
index 846df12bc7..392403f21b 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-ssh-openssh.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-ssh-openssh.bb
@@ -1,5 +1,4 @@
SUMMARY = "OpenSSH SSH client/server"
-PR = "r1"
inherit packagegroup
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb b/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
index 5ebcbcec82..06fdda90c7 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
@@ -1,5 +1,4 @@
SUMMARY = "Target packages for the standalone SDK"
-PR = "r8"
PACKAGE_ARCH = "${TUNE_PKGARCH}"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
index c75850aa64..56ff1d2b06 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
@@ -8,7 +8,6 @@ PACKAGE_ARCH = "${TUNE_PKGARCH}"
inherit packagegroup
-PR = "r3"
MTRACE = ""
MTRACE:libc-glibc = "libc-mtrace"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
index 6330200d2f..b3a24b71de 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
@@ -4,22 +4,17 @@
SUMMARY = "Profiling tools"
-PR = "r3"
PACKAGE_ARCH = "${MACHINE_ARCH}"
inherit packagegroup
-PROFILE_TOOLS_X = ""
# sysprof doesn't support aarch64 and nios2
-PROFILE_TOOLS_X:aarch64 = ""
-PROFILE_TOOLS_X:nios2 = ""
PROFILE_TOOLS_SYSTEMD = "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd-analyze', '', d)}"
RRECOMMENDS:${PN} = "\
${PERF} \
blktrace \
- ${PROFILE_TOOLS_X} \
${PROFILE_TOOLS_SYSTEMD} \
"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
index e05e329020..34af40a43f 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
@@ -4,7 +4,6 @@
SUMMARY = "Testing tools/applications"
-PR = "r2"
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -25,8 +24,6 @@ GOTOOLS ?= "go-helloworld"
GOTOOLS:powerpc ?= ""
GOTOOLS:riscv32 ?= ""
-RUSTTOOLS ?= "rust-hello-world"
-
GSTEXAMPLES ?= "gst-examples"
GSTEXAMPLES:riscv64 = ""
@@ -59,5 +56,4 @@ RDEPENDS:${PN} = "\
${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', "${X11GLTOOLS}", "", d)} \
${@bb.utils.contains('DISTRO_FEATURES', '3g', "${3GTOOLS}", "", d)} \
${GOTOOLS} \
- ${RUSTTOOLS} \
"
diff --git a/meta/recipes-core/packagegroups/packagegroup-go-sdk-target.bb b/meta/recipes-core/packagegroups/packagegroup-go-sdk-target.bb
index 61629d273c..c03918acc8 100644
--- a/meta/recipes-core/packagegroups/packagegroup-go-sdk-target.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-go-sdk-target.bb
@@ -6,7 +6,6 @@ RDEPENDS:${PN} = " \
go \
go-runtime \
go-runtime-dev \
- go-runtime-staticdev \
"
COMPATIBLE_HOST = "^(?!riscv32).*"
diff --git a/meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb b/meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb
index 0d4f5ec9ef..bb10a2d34f 100644
--- a/meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb
@@ -6,13 +6,16 @@ inherit cross-canadian packagegroup
PACKAGEGROUP_DISABLE_COMPLEMENTARY = "1"
RUST="rust-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-CARGO="cargo-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-RUST_TOOLS="rust-tools-cross-canadian-${TRANSLATED_TARGET_ARCH}"
RDEPENDS:${PN} = " \
${@all_multilib_tune_values(d, 'RUST')} \
- ${@all_multilib_tune_values(d, 'CARGO')} \
- rust-cross-canadian-src \
- ${@all_multilib_tune_values(d, 'RUST_TOOLS')} \
+ nativesdk-binutils \
+ nativesdk-gcc \
+ nativesdk-glibc-dev \
+ nativesdk-libgcc-dev \
+ nativesdk-rust \
+ nativesdk-cargo \
+ nativesdk-rust-tools-clippy \
+ nativesdk-rust-tools-rustfmt \
"
diff --git a/meta/recipes-core/packagegroups/packagegroup-rust-sdk-target.bb b/meta/recipes-core/packagegroups/packagegroup-rust-sdk-target.bb
new file mode 100644
index 0000000000..59874c4c2c
--- /dev/null
+++ b/meta/recipes-core/packagegroups/packagegroup-rust-sdk-target.bb
@@ -0,0 +1,14 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+SUMMARY = "Target packages for the Rust SDK"
+
+inherit packagegroup
+
+RDEPENDS:${PN} = " \
+ rust \
+ cargo \
+"
diff --git a/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb b/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
index 9523aadd15..df71695a97 100644
--- a/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
@@ -4,7 +4,6 @@
SUMMARY = "Self-hosting"
DESCRIPTION = "Packages required to run the build system"
-PR = "r13"
PACKAGE_ARCH = "${TUNE_PKGARCH}"
@@ -41,7 +40,7 @@ RDEPENDS:packagegroup-self-hosted-host-tools = "\
iptables \
lsb-release \
mc \
- mc-fish \
+ mc-shell \
mc-helpers \
mc-helpers-perl \
parted \
@@ -98,11 +97,14 @@ RDEPENDS:packagegroup-self-hosted-sdk:append:libc-glibc = "\
glibc-utils \
rpcsvc-proto \
"
+
+STRACE = "strace"
+STRACE:riscv32 = ""
RDEPENDS:packagegroup-self-hosted-debug = " \
gdb \
gdbserver \
rsync \
- strace \
+ ${STRACE} \
tcf-agent"
@@ -124,12 +126,10 @@ RDEPENDS:packagegroup-self-hosted-extended = "\
grep \
groff \
gzip \
- settings-daemon \
libaio \
libusb1 \
libxml2 \
lsof \
- lzo \
man \
man-pages \
mdadm \
@@ -158,6 +158,7 @@ RDEPENDS:packagegroup-self-hosted-extended = "\
readline \
rpm \
setserial \
+ settings-daemon \
socat \
subversion \
sudo \
@@ -171,22 +172,23 @@ RDEPENDS:packagegroup-self-hosted-extended = "\
wget \
which \
xinetd \
+ xz \
zip \
zlib \
- xz \
+ zstd \
"
RDEPENDS:packagegroup-self-hosted-graphics = "\
+ adwaita-icon-theme \
builder \
+ l3afpad \
libgl \
libgl-dev \
libglu \
libglu-dev \
libx11-dev \
- adwaita-icon-theme \
- xdg-utils \
- l3afpad \
pcmanfm \
vte \
+ xdg-utils \
"
diff --git a/meta/recipes-core/psplash/files/psplash-poky-img.h b/meta/recipes-core/psplash/files/psplash-poky-img.h
deleted file mode 100644
index 8d56aa0201..0000000000
--- a/meta/recipes-core/psplash/files/psplash-poky-img.h
+++ /dev/null
@@ -1,1259 +0,0 @@
-/* GdkPixbuf RGB C-Source image dump 1-byte-run-length-encoded */
-
-#define POKY_IMG_ROWSTRIDE (1920)
-#define POKY_IMG_WIDTH (640)
-#define POKY_IMG_HEIGHT (480)
-#define POKY_IMG_BYTES_PER_PIXEL (3) /* 3:RGB, 4:RGBA */
-#define POKY_IMG_RLE_PIXEL_DATA ((uint8*) \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\237\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376" \
- "\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377" \
- "\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374" \
- "\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377" \
- "\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\363\376\377\374\3\373\375" \
- "\372\360\366\370\353\360\363\202\347\355\357\5\346\354\356\347\355\357" \
- "\351\356\360\360\366\370\371\373\370\363\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\353\376\377\374\10\356" \
- "\363\366\324\331\333\277\304\306\252\262\272\227\237\247}\220\233p\203" \
- "\216ew\202\212]oz\10dv\201n\201\214~\215\223\216\236\244\247\257\267" \
- "\274\301\303\320\326\330\352\357\362\353\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\346\376\377\374\5\366" \
- "\370\364\320\326\330\255\265\275\210\227\235j}\210\232]oz\5gy\204\204" \
- "\223\231\250\260\270\314\321\324\355\362\365\346\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\342\376\377" \
- "\374\5\373\375\372\327\334\337\253\263\273x\213\227^p{\243]oz\4q\204" \
- "\217\242\252\262\320\326\330\365\372\375\342\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\337\376\377\374" \
- "\4\371\373\370\317\324\327\225\245\253ew\202\252]oz\4bt\177\211\230\236" \
- "\306\314\316\367\371\366\337\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\335\376\377\374\3\333\340\343\236" \
- "\246\256fx\203\260]oz\3bt\177\225\235\245\320\326\330\335\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\332" \
- "\376\377\374\3\367\371\366\276\303\305u\210\224\266]oz\3m\200\213\257" \
- "\267\277\355\362\365\332\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\330\376\377\374\3\353\360\363\252\262" \
- "\272fx\203\272]oz\3as~\230\250\256\342\347\352\330\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\326\376\377" \
- "\374\3\352\357\362\236\246\256as~\276]oz\3^p{\216\236\244\341\346\351" \
- "\326\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\324\376\377\374\3\354\361\364\246\256\266as~\302]oz\3^p" \
- "{\227\237\247\346\354\356\324\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\322\376\377\374\3\372\374\371\265" \
- "\276\306fx\203\306]oz\3as~\250\260\270\362\367\371\322\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\321\376" \
- "\377\374\2\320\326\330p\203\216\312]oz\2hz\205\302\307\312\321\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\317\376\377\374\2\356\363\366\231\241\251\316]oz\2\204\223\231\345\352" \
- "\355\317\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\316\376\377\374\2\311\316\321i{\206\320]oz\2bt\177\264" \
- "\274\305\316\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\314\376\377\374\2\362\367\371\226\246\254\324]o" \
- "z\2\206\225\233\353\360\363\314\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\313\376\377\374\2\334\342\344o\202" \
- "\215\326]oz\2gy\204\315\322\325\313\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\312\376\377\374\2\302\307\312" \
- "as~\330]oz\3^p{\254\264\274\373\375\372\311\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\310\376\377\374\2\372" \
- "\374\371\236\246\256\334]oz\2\210\227\235\360\366\370\310\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\307" \
- "\376\377\374\2\360\366\370\205\224\232\336]oz\2s\206\221\347\355\357" \
- "\307\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\306\376\377\374\2\347\355\357t\207\222\340]oz\2i{\206\333" \
- "\340\343\306\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\305\376\377\374\2\343\350\353j}\210\251]oz\7bt\177" \
- "w\212\226\215\234\243\236\246\256\246\256\266\254\264\274\264\274\305" \
- "\202\300\305\310\7\264\274\305\255\265\275\250\260\270\233\252\261\222" \
- "\241\247~\215\223ew\202\251]oz\2cu\200\322\330\332\305\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\304\376" \
- "\377\374\2\337\344\346i{\206\245]oz\5j}\210\230\240\250\270\300\311\322" \
- "\330\332\351\356\360\220\376\377\374\5\354\361\364\326\333\335\300\305" \
- "\310\236\246\256p\203\216\245]oz\2as~\316\323\326\304\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\303\376" \
- "\377\374\2\335\343\345gy\204\242]oz\4ew\202\222\241\247\276\303\305\342" \
- "\347\352\232\376\377\374\4\352\357\362\304\311\314\233\243\253j}\210" \
- "\242]oz\2as~\313\320\322\303\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\302\376\377\374\2\341\346\351hz" \
- "\205\240]oz\4_q|\211\230\236\307\315\317\370\372\367\240\376\377\374" \
- "\4\373\375\372\320\326\330\231\241\251bt\177\240]oz\2as~\317\324\327" \
- "\302\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\301\376\377\374\2\343\350\353i{\206\237]oz\3q\204\217\266" \
- "\277\307\354\361\364\246\376\377\374\3\366\370\364\303\310\313\177\216" \
- "\224\237]oz\2bt\177\325\332\334\301\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\300\376\377\374\2\355\362\365" \
- "l\177\212\236]oz\3u\210\224\307\315\317\373\375\372\253\376\377\374\2" \
- "\324\331\333\204\223\231\236]oz\2ew\202\341\346\351\300\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\277\376" \
- "\377\374\2\365\372\375\177\216\224\235]oz\2~\215\223\315\322\325\260" \
- "\376\377\374\2\331\336\341\211\230\236\235]oz\2l\177\212\353\360\363" \
- "\277\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\277\376\377\374\1\214\233\242\234]oz\2i{\206\305\312\315" \
- "\264\376\377\374\2\324\331\333q\204\217\234]oz\2u\210\224\367\371\366" \
- "\276\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\276\376\377\374\1\257\267\277\233]oz\3^p{\242\252\262\362" \
- "\367\371\266\376\377\374\3\372\374\371\261\271\302bt\177\233]oz\1\225" \
- "\235\245\276\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\275\376\377\374\2\317\324\327^p{\232]oz\2w\212\226" \
- "\335\343\345\272\376\377\374\2\351\356\360\211\230\236\233]oz\1\264\274" \
- "\305\275\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\274\376\377\374\2\347\355\357ew\202\232]oz\2\242\252" \
- "\262\371\373\370\225\376\377\374\10\336\367\374\262\355\370\220\343\361" \
- "m\332\361G\317\353,\311\352(\307\350#\304\346\202\0\301\350\10\0\304" \
- "\352(\307\350,\311\352B\314\347i\326\356\207\341\364\260\353\366\333" \
- "\364\371\226\376\377\374\2\266\277\307as~\231]oz\2_q|\327\334\337\274" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\273\376\377\374\2\373\375\372}\220\233\231]oz\2ew\202\313\320" \
- "\322\223\376\377\374\4\334\365\372\233\345\364M\323\357\0\304\352\222" \
- "\25\274\344\4\0\301\350J\321\355\211\342\365\314\361\372\223\376\377" \
- "\374\2\332\337\342m\200\213\231]oz\2l\177\212\360\366\370\273\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\273\376\377\374\1\257\267\277\231]oz\2x\213\227\347\355\357\221\376" \
- "\377\374\3\336\367\374\206\340\363*\310\351\232\25\274\344\3#\304\346" \
- "w\333\356\316\363\374\221\376\377\374\2\360\366\370\214\233\242\231]" \
- "oz\1\227\237\247\273\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\272\376\377\374\2\335\343\345_q|\230]oz" \
- "\2\210\227\235\364\371\374\217\376\377\374\3\370\376\377\256\350\364" \
- "B\314\347\240\25\274\344\3*\310\351\233\345\364\367\375\377\217\376\377" \
- "\374\2\373\375\372\242\252\262\231]oz\1\305\312\315\272\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\271\376" \
- "\377\374\2\372\374\371w\212\226\230]oz\2\231\241\251\372\374\371\216" \
- "\376\377\374\3\367\375\377\233\345\364(\307\350\244\25\274\344\3\0\302" \
- "\351\206\340\363\356\373\374\217\376\377\374\1\257\267\277\230]oz\2h" \
- "z\205\356\363\366\271\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\271\376\377\374\1\257\267\277\230]oz\1\246" \
- "\256\266\217\376\377\374\2\237\350\367\0\304\352\250\25\274\344\3\0\276" \
- "\345\207\341\364\370\376\377\216\376\377\374\2\277\304\306_q|\227]oz" \
- "\1\231\241\251\271\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\270\376\377\374\2\346\354\356bt\177\227]oz\1" \
- "\251\261\271\216\376\377\374\2\315\362\3731\313\354\254\25\274\344\2" \
- "(\307\350\262\355\370\216\376\377\374\2\305\312\315^p{\227]oz\1\322\330" \
- "\332\270\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\270\376\377\374\1\215\234\243\227]oz\1\233\252\261\215" \
- "\376\377\374\3\367\375\377m\332\361\0\276\345\257\25\274\344\2L\322\356" \
- "\356\373\374\215\376\377\374\1\274\301\303\227]oz\2t\207\222\373\375" \
- "\372\267\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\267\376\377\374\1\325\332\334\227]oz\2\214\233\242\373" \
- "\375\372\214\376\377\374\2\333\364\371,\311\352\262\25\274\344\2\0\304" \
- "\352\301\356\366\215\376\377\374\1\252\262\272\227]oz\1\274\301\303\267" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\267\376\377\374\1v\211\225\226]oz\2\205\224\232\365\372\375" \
- "\214\376\377\374\2\236\347\366\0\276\345\264\25\274\344\3\0\276\345\203" \
- "\334\360\370\376\377\214\376\377\374\1\236\246\256\226]oz\2hz\205\362" \
- "\367\371\266\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\266\376\377\374\1\307\315\317\226]oz\2n\201\214" \
- "\360\366\370\214\376\377\374\1m\332\361\270\25\274\344\2I\320\354\366" \
- "\373\376\213\376\377\374\2\373\375\372\203\222\230\226]oz\1\254\264\274" \
- "\266\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\265\376\377\374\2\373\375\372s\206\221\225]oz\2as~\333\340" \
- "\343\213\376\377\374\2\370\376\377M\323\357\272\25\274\344\2""1\313\354" \
- "\356\373\374\213\376\377\374\2\353\360\363i{\206\225]oz\2ew\202\357\365" \
- "\367\265\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\265\376\377\374\1\305\312\315\226]oz\1\265\276\306\213" \
- "\376\377\374\2\366\373\376B\314\347\274\25\274\344\2(\307\350\336\367" \
- "\374\213\376\377\374\2\322\330\332^p{\225]oz\1\252\262\272\265\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\265\376\377\374\1u\210\224\225]oz\1\214\233\242\213\376\377\374\2\366" \
- "\373\376B\314\347\276\25\274\344\2#\304\346\337\370\375\213\376\377\374" \
- "\1\253\263\273\225]oz\2fx\203\367\371\366\264\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\264\376\377\374" \
- "\1\322\330\332\225]oz\2i{\206\360\366\370\212\376\377\374\2\370\376\377" \
- "I\320\354\300\25\274\344\2*\310\351\357\374\375\212\376\377\374\2\373" \
- "\375\372~\215\223\225]oz\1\264\274\305\264\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\264\376\377\374\1\210" \
- "\227\235\225]oz\1\303\310\313\213\376\377\374\1i\326\356\302\25\274\344" \
- "\2B\314\347\370\376\377\212\376\377\374\2\335\343\345^p{\224]oz\2m\200" \
- "\213\373\375\372\263\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\263\376\377\374\2\345\352\355^p{\224]oz" \
- "\1~\221\235\213\376\377\374\1y\336\360\304\25\274\344\1M\323\357\213" \
- "\376\377\374\1\242\252\262\225]oz\1\315\322\325\263\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\263\376\377" \
- "\374\1\252\262\272\224]oz\2_q|\341\346\351\212\376\377\374\2\274\355" \
- "\373\0\276\345\305\25\274\344\1\220\343\361\212\376\377\374\2\360\366" \
- "\370i{\206\224]oz\1\211\230\236\263\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\262\376\377\374\2\372\374\371" \
- "i{\206\224]oz\1\250\260\270\212\376\377\374\2\350\371\377\0\304\352\306" \
- "\25\274\344\2\0\276\345\316\363\374\212\376\377\374\1\303\310\313\224" \
- "]oz\2^p{\352\357\362\262\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\262\376\377\374\1\315\322\325\224]o" \
- "z\2dv\201\357\365\367\212\376\377\374\1G\317\353\240\25\274\344\3\0\302" \
- "\351,\311\352J\321\355\203L\322\356\2B\314\347#\304\346\240\25\274\344" \
- "\2*\310\351\367\375\377\212\376\377\374\1q\204\217\224]oz\1\257\267\277" \
- "\262\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\262\376\377\374\1\230\240\250\224]oz\1\247\257\267\212\376" \
- "\377\374\1\233\345\364\235\25\274\344\4*\310\351n\333\362\273\354\371" \
- "\360\375\376\210\376\377\374\4\357\374\375\303\361\370\206\340\363/\312" \
- "\353\235\25\274\344\1w\333\356\212\376\377\374\1\303\310\313\224]oz\1" \
- "v\211\225\262\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\261\376\377\374\2\365\372\375dv\201\223]oz\2as~" \
- "\353\360\363\211\376\377\374\2\356\373\374\0\302\351\232\25\274\344\3" \
- "\0\276\345m\332\361\335\366\373\220\376\377\374\2\336\367\374(\307\350" \
- "\232\25\274\344\2%\305\347\336\367\374\212\376\377\374\2\372\374\371" \
- "n\201\214\224]oz\1\343\350\353\261\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\261\376\377\374\1\313\320\322" \
- "\224]oz\1\233\252\261\212\376\377\374\1k\327\357\231\25\274\344\3\0\276" \
- "\345\206\340\363\357\374\375\222\376\377\374\1k\327\357\232\25\274\344" \
- "\2%\305\347\336\367\374\214\376\377\374\1\276\303\305\224]oz\1\257\267" \
- "\277\261\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\261\376\377\374\1\226\246\254\223]oz\2^p{\347\355\357" \
- "\211\376\377\374\2\316\363\374\0\276\345\230\25\274\344\2J\321\355\337" \
- "\370\375\223\376\377\374\1k\327\357\232\25\274\344\2%\305\347\336\367" \
- "\374\215\376\377\374\2\370\372\367i{\206\223]oz\1\177\216\224\261\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\261\376\377\374\1k~\211\223]oz\1\205\224\232\212\376\377\374\1J" \
- "\321\355\227\25\274\344\2\0\276\345\257\351\365\224\376\377\374\1m\332" \
- "\361\232\25\274\344\2%\305\347\336\367\374\217\376\377\374\1\246\256" \
- "\266\223]oz\2^p{\356\363\366\260\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\260\376\377\374\1\335\343\345" \
- "\224]oz\1\303\310\313\211\376\377\374\2\333\364\371\0\276\345\226\25" \
- "\274\344\2#\304\346\316\363\374\224\376\377\374\1m\332\361\232\25\274" \
- "\344\2%\305\347\336\367\374\220\376\377\374\1\335\343\345\224]oz\1\306" \
- "\314\316\260\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\260\376\377\374\1\264\274\305\223]oz\2cu\200\367" \
- "\371\366\211\376\377\374\1i\326\356\226\25\274\344\2,\311\352\347\370" \
- "\376\224\376\377\374\1m\332\361\232\25\274\344\2%\305\347\336\367\374" \
- "\222\376\377\374\1t\207\222\223]oz\1\236\246\256\260\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\260\376" \
- "\377\374\1\215\234\243\223]oz\1\231\241\251\211\376\377\374\2\347\370" \
- "\376\0\276\345\225\25\274\344\2*\310\351\357\374\375\224\376\377\374" \
- "\1m\332\361\232\25\274\344\2%\305\347\336\367\374\223\376\377\374\1\261" \
- "\271\302\223]oz\1q\204\217\260\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\260\376\377\374\1i{\206\223]oz\1" \
- "\317\324\327\211\376\377\374\1\207\341\364\225\25\274\344\2#\304\346" \
- "\337\370\375\224\376\377\374\1m\332\361\232\25\274\344\2%\305\347\336" \
- "\367\374\224\376\377\374\1\351\356\360\224]oz\1\356\363\366\257\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\257\376\377\374\1\345\352\355\223]oz\2as~\370\372\367\211\376\377\374" \
- "\1,\311\352\224\25\274\344\2\0\276\345\313\360\371\224\376\377\374\1" \
- "m\332\361\232\25\274\344\2%\305\347\336\367\374\226\376\377\374\1q\204" \
- "\217\223]oz\1\314\321\324\257\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\257\376\377\374\1\311\316\321\223" \
- "]oz\1\202\221\227\211\376\377\374\1\320\365\376\225\25\274\344\1\207" \
- "\341\364\224\376\377\374\1m\332\361\232\25\274\344\2%\305\347\336\367" \
- "\374\214\376\377\374\2\236\347\366\237\350\367\211\376\377\374\1\236" \
- "\246\256\223]oz\1\255\265\275\257\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\257\376\377\374\1\254\264\274" \
- "\223]oz\1\255\265\275\211\376\377\374\1y\336\360\224\25\274\344\2/\312" \
- "\353\370\376\377\223\376\377\374\1m\332\361\232\25\274\344\2%\305\347" \
- "\336\367\374\214\376\377\374\3\236\347\366\0\276\345J\321\355\211\376" \
- "\377\374\1\311\316\321\223]oz\1\215\234\243\257\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\257\376\377\374" \
- "\1\214\233\242\223]oz\1\326\333\335\211\376\377\374\1,\311\352\224\25" \
- "\274\344\1\312\357\370\223\376\377\374\1n\333\362\232\25\274\344\2%\305" \
- "\347\336\367\374\214\376\377\374\5\236\347\366\0\276\345\25\274\344\0" \
- "\276\345\367\375\377\210\376\377\374\2\356\363\366^p{\222]oz\1o\202\215" \
- "\257\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\257\376\377\374\1o\202\215\222]oz\2as~\366\373\376\210\376" \
- "\377\374\2\357\374\375\0\276\345\223\25\274\344\1J\321\355\223\376\377" \
- "\374\1n\333\362\232\25\274\344\2%\305\347\336\367\374\214\376\377\374" \
- "\2\236\347\366\0\276\345\203\25\274\344\1\274\355\373\211\376\377\374" \
- "\1u\210\224\222]oz\2^p{\370\372\367\256\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\256\376\377\374\2\373\375" \
- "\372_q|\222]oz\1u\210\224\211\376\377\374\1\261\354\367\224\25\274\344" \
- "\1\312\357\370\222\376\377\374\1n\333\362\232\25\274\344\2%\305\347\336" \
- "\367\374\214\376\377\374\2\236\347\366\0\276\345\204\25\274\344\1\204" \
- "\335\361\211\376\377\374\1\230\240\250\223]oz\1\342\347\352\256\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\256\376\377\374\1\351\356\360\223]oz\1\225\235\245\211\376\377\374\1" \
- "n\333\362\223\25\274\344\1,\311\352\222\376\377\374\1n\333\362\232\25" \
- "\274\344\2%\305\347\336\367\374\214\376\377\374\2\236\347\366\0\276\345" \
- "\205\25\274\344\1B\314\347\211\376\377\374\1\257\267\277\223]oz\1\317" \
- "\324\327\256\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\256\376\377\374\1\326\333\335\223]oz\1\253\263\273" \
- "\211\376\377\374\1B\314\347\223\25\274\344\1\211\342\365\221\376\377" \
- "\374\1n\333\362\232\25\274\344\2%\305\347\336\367\374\214\376\377\374" \
- "\2\236\347\366\0\276\345\206\25\274\344\1\0\302\351\211\376\377\374\1" \
- "\307\315\317\223]oz\1\276\303\305\256\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\256\376\377\374\1\306\314" \
- "\316\223]oz\1\305\312\315\211\376\377\374\1#\304\346\223\25\274\344\1" \
- "\337\370\375\220\376\377\374\1w\333\356\232\25\274\344\2%\305\347\336" \
- "\367\374\214\376\377\374\2\236\347\366\0\276\345\210\25\274\344\1\356" \
- "\373\374\210\376\377\374\1\337\344\346\223]oz\1\253\263\273\256\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\256\376\377\374\1\274\301\303\223]oz\1\333\340\343\210\376\377\374\2" \
- "\367\375\377\0\276\345\222\25\274\344\1(\307\350\220\376\377\374\1\231" \
- "\342\362\232\25\274\344\2%\305\347\336\367\374\214\376\377\374\2\236" \
- "\347\366\0\276\345\211\25\274\344\1\302\357\367\210\376\377\374\2\364" \
- "\371\374^p{\222]oz\1\236\246\256\256\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\256\376\377\374\1\257\267" \
- "\277\223]oz\1\353\360\363\210\376\377\374\1\316\363\374\223\25\274\344" \
- "\1\\\326\355\217\376\377\374\2\320\365\376\31\275\345\231\25\274\344" \
- "\2%\305\347\336\367\374\214\376\377\374\2\236\347\366\0\276\345\212\25" \
- "\274\344\1\237\350\367\211\376\377\374\1gy\204\222]oz\1\216\236\244\256" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\256\376\377\374\1\242\252\262\223]oz\1\366\370\364\210\376\377" \
- "\374\1\262\355\370\223\25\274\344\1\220\343\361\217\376\377\374\1J\321" \
- "\355\231\25\274\344\2%\305\347\336\367\374\214\376\377\374\2\236\347" \
- "\366\0\276\345\213\25\274\344\1\211\342\365\211\376\377\374\1m\200\213" \
- "\222]oz\1\204\223\231\256\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\256\376\377\374\1\226\246\254\223]o" \
- "z\211\376\377\374\1\237\350\367\223\25\274\344\1\262\355\370\216\376" \
- "\377\374\2\350\371\377\0\276\345\230\25\274\344\2%\305\347\336\367\374" \
- "\214\376\377\374\2\236\347\366\0\276\345\214\25\274\344\1n\333\362\211" \
- "\376\377\374\1u\210\224\222]oz\1y\214\230\256\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\256\376\377\374" \
- "\1\231\241\251\222]oz\1cu\200\211\376\377\374\1\233\345\364\223\25\274" \
- "\344\1\316\363\374\216\376\377\374\1\235\346\365\230\25\274\344\2%\305" \
- "\347\336\367\374\214\376\377\374\2\236\347\366\0\276\345\215\25\274\344" \
- "\1k\327\357\211\376\377\374\1\200\217\225\222]oz\1u\210\224\256\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\256\376\377\374\1\222\241\247\222]oz\1j}\210\211\376\377\374\1\220\343" \
- "\361\223\25\274\344\1\336\367\374\216\376\377\374\1w\333\356\227\25\274" \
- "\344\2#\304\346\336\367\374\214\376\377\374\2\236\347\366\0\276\345\216" \
- "\25\274\344\1M\323\357\211\376\377\374\1\210\227\235\222]oz\1s\206\221" \
- "\256\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\256\376\377\374\1\211\230\236\222]oz\1n\201\214\211\376" \
- "\377\374\1\206\340\363\223\25\274\344\1\350\371\377\216\376\377\374\1" \
- "i\326\356\226\25\274\344\2#\304\346\336\367\374\214\376\377\374\2\236" \
- "\347\366\0\276\345\217\25\274\344\1M\323\357\211\376\377\374\1\215\234" \
- "\243\222]oz\1m\200\213\256\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\256\376\377\374\1\222\241\247\222]o" \
- "z\1l\177\212\211\376\377\374\1\207\341\364\223\25\274\344\1\336\367\374" \
- "\216\376\377\374\1n\333\362\225\25\274\344\2#\304\346\336\367\374\214" \
- "\376\377\374\2\235\346\365\0\276\345\220\25\274\344\1M\323\357\211\376" \
- "\377\374\1\211\230\236\222]oz\1s\206\221\256\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\256\376\377\374" \
- "\1\231\241\251\222]oz\1ew\202\211\376\377\374\1\231\342\362\223\25\274" \
- "\344\1\320\365\376\216\376\377\374\1\221\344\362\224\25\274\344\2#\304" \
- "\346\336\367\374\214\376\377\374\2\235\346\365\0\276\345\221\25\274\344" \
- "\1i\326\356\211\376\377\374\1\202\221\227\222]oz\1u\210\224\256\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\256\376\377\374\1\242\252\262\222]oz\1^p{\211\376\377\374\1\237\350" \
- "\367\223\25\274\344\1\301\356\366\216\376\377\374\1\333\364\371\223\25" \
- "\274\344\2#\304\346\336\367\374\214\376\377\374\2\356\373\374\0\276\345" \
- "\222\25\274\344\1n\333\362\211\376\377\374\1w\212\226\222]oz\1\202\221" \
- "\227\256\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\256\376\377\374\1\246\256\266\223]oz\1\370\372\367\210" \
- "\376\377\374\1\260\353\366\223\25\274\344\1\233\345\364\217\376\377\374" \
- "\1,\311\352\221\25\274\344\2#\304\346\336\367\374\215\376\377\374\1\312" \
- "\357\370\223\25\274\344\1\204\335\361\211\376\377\374\1o\202\215\222" \
- "]oz\1\205\224\232\256\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\256\376\377\374\1\252\262\272\223]oz\1\355" \
- "\362\365\210\376\377\374\1\314\361\372\223\25\274\344\1m\332\361\217" \
- "\376\377\374\1\273\354\371\220\25\274\344\2#\304\346\336\367\374\216" \
- "\376\377\374\1\233\345\364\223\25\274\344\1\236\347\366\211\376\377\374" \
- "\1i{\206\222]oz\1\211\230\236\256\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\256\376\377\374\1\266\277\307" \
- "\223]oz\1\340\345\350\210\376\377\374\1\356\373\374\223\25\274\344\1" \
- ",\311\352\220\376\377\374\1i\326\356\216\25\274\344\2#\304\346\336\367" \
- "\374\217\376\377\374\1\\\326\355\223\25\274\344\1\274\355\373\210\376" \
- "\377\374\2\372\374\371_q|\222]oz\1\226\246\254\256\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\256\376\377" \
- "\374\1\307\315\317\223]oz\1\313\320\322\211\376\377\374\1\0\302\351\222" \
- "\25\274\344\2\0\276\345\357\374\375\217\376\377\374\2\370\376\377G\317" \
- "\353\214\25\274\344\2(\307\350\336\367\374\220\376\377\374\1%\305\347" \
- "\223\25\274\344\1\336\367\374\210\376\377\374\1\343\350\353\223]oz\1" \
- "\254\264\274\256\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\256\376\377\374\1\327\334\337\223]oz\1\257\267" \
- "\277\211\376\377\374\1B\314\347\223\25\274\344\1\237\350\367\220\376" \
- "\377\374\3\370\376\377n\333\362\31\275\345\211\25\274\344\2J\321\355" \
- "\356\373\374\220\376\377\374\1\315\362\373\223\25\274\344\1\0\301\350" \
- "\211\376\377\374\1\316\323\326\223]oz\1\276\303\305\256\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\256\376" \
- "\377\374\1\346\354\356\223]oz\1\225\245\253\211\376\377\374\1k\327\357" \
- "\223\25\274\344\1G\317\353\222\376\377\374\3\320\365\376Z\325\354\0\301" \
- "\350\204\25\274\344\3\0\276\345G\317\353\274\355\373\222\376\377\374" \
- "\1w\333\356\223\25\274\344\1""3\314\355\211\376\377\374\1\264\274\305" \
- "\223]oz\1\315\322\325\256\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\256\376\377\374\1\366\373\376\223]o" \
- "z\1z\215\231\211\376\377\374\1\237\350\367\223\25\274\344\2\31\275\345" \
- "\335\366\373\223\376\377\374\2\370\376\377\316\363\374\202\261\354\367" \
- "\2\313\360\371\366\373\376\223\376\377\374\2\370\376\377\0\302\351\223" \
- "\25\274\344\1n\333\362\211\376\377\374\1\230\250\256\223]oz\1\334\342" \
- "\344\256\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\257\376\377\374\1m\200\213\222]oz\1ew\202\211\376\377" \
- "\374\1\347\370\376\224\25\274\344\1m\332\361\254\376\377\374\1\233\345" \
- "\364\224\25\274\344\1\261\354\367\211\376\377\374\1}\220\233\223]oz\1" \
- "\366\370\364\256\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\257\376\377\374\1\211\230\236\223]oz\1\335\343" \
- "\345\211\376\377\374\1%\305\347\223\25\274\344\2\0\276\345\336\367\374" \
- "\252\376\377\374\2\367\375\377#\304\346\223\25\274\344\2\31\275\345\357" \
- "\374\375\210\376\377\374\2\370\372\367as~\222]oz\1m\200\213\257\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\257\376\377\374\1\251\261\271\223]oz\1\264\274\305\211\376\377\374\1" \
- "k\327\357\224\25\274\344\1L\322\356\252\376\377\374\1\206\340\363\224" \
- "\25\274\344\1""3\314\355\211\376\377\374\1\322\330\332\223]oz\1\211\230" \
- "\236\257\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\257\376\377\374\1\305\312\315\223]oz\1\211\230\236\211" \
- "\376\377\374\1\301\356\366\225\25\274\344\1\262\355\370\250\376\377\374" \
- "\2\334\365\372\0\276\345\224\25\274\344\1\220\343\361\211\376\377\374" \
- "\1\252\262\272\223]oz\1\251\261\271\257\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\257\376\377\374\1\337\344" \
- "\346\223]oz\2ew\202\373\375\372\210\376\377\374\2\370\376\377#\304\346" \
- "\224\25\274\344\2\0\302\351\347\370\376\246\376\377\374\2\367\375\377" \
- ",\311\352\224\25\274\344\2\0\276\345\337\370\375\211\376\377\374\1\177" \
- "\216\224\223]oz\1\305\312\315\257\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\257\376\377\374\2\373\375\372" \
- "dv\201\223]oz\1\331\336\341\211\376\377\374\1n\333\362\225\25\274\344" \
- "\2B\314\347\370\376\377\245\376\377\374\1i\326\356\225\25\274\344\1B" \
- "\314\347\211\376\377\374\2\362\367\371_q|\223]oz\1\346\354\356\257\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\260\376\377\374\1\210\227\235\223]oz\1\246\256\266\211\376\377\374" \
- "\1\316\363\374\226\25\274\344\2M\323\357\370\376\377\243\376\377\374" \
- "\1\206\340\363\226\25\274\344\1\256\350\364\211\376\377\374\1\301\306" \
- "\311\223]oz\1k~\211\260\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\260\376\377\374\1\257\267\277\223]oz\2i" \
- "{\206\373\375\372\211\376\377\374\1G\317\353\226\25\274\344\2\\\326\355" \
- "\370\376\377\241\376\377\374\1\220\343\361\226\25\274\344\2\0\304\352" \
- "\370\376\377\211\376\377\374\1\204\223\231\223]oz\1\230\240\250\260\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\260\376\377\374\1\331\336\341\224]oz\1\317\324\327\211\376\377\374" \
- "\1\301\356\366\227\25\274\344\2J\321\355\367\375\377\237\376\377\374" \
- "\1w\333\356\227\25\274\344\1\206\340\363\211\376\377\374\2\351\356\360" \
- "^p{\223]oz\1\277\304\306\260\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\260\376\377\374\2\372\374\371cu" \
- "\200\223]oz\1\222\241\247\212\376\377\374\1/\312\353\227\25\274\344\2" \
- ",\311\352\334\365\372\234\376\377\374\2\356\373\374I\320\354\227\25\274" \
- "\344\2\0\301\350\366\373\376\211\376\377\374\1\261\271\302\224]oz\1\343" \
- "\350\353\260\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\261\376\377\374\1\216\236\244\223]oz\2cu\200\362" \
- "\367\371\211\376\377\374\1\261\354\367\230\25\274\344\3\0\276\345\233" \
- "\345\364\370\376\377\231\376\377\374\2\262\355\370\0\304\352\230\25\274" \
- "\344\1\206\340\363\212\376\377\374\1s\206\221\223]oz\1q\204\217\261\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\261\376\377\374\1\305\312\315\224]oz\1\257\267\277\212\376\377\374" \
- "\1B\314\347\231\25\274\344\2""1\313\354\303\361\370\226\376\377\374\2" \
- "\334\365\372J\321\355\231\25\274\344\2#\304\346\366\373\376\211\376\377" \
- "\374\1\315\322\325\224]oz\1\251\261\271\261\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\261\376\377\374\2\362" \
- "\367\371as~\223]oz\2hz\205\370\372\367\211\376\377\374\2\334\365\372" \
- "\0\276\345\232\25\274\344\3B\314\347\262\355\370\370\376\377\221\376" \
- "\377\374\3\313\360\371L\322\356\0\276\345\232\25\274\344\1\256\350\364" \
- "\212\376\377\374\1z\215\231\224]oz\1\333\340\343\261\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\262\376" \
- "\377\374\1\207\226\234\224]oz\1\270\300\311\212\376\377\374\1w\333\356" \
- "\234\25\274\344\4\0\304\352m\332\361\273\354\371\366\373\376\212\376" \
- "\377\374\4\370\376\377\302\357\367y\336\360(\307\350\234\25\274\344\1" \
- "B\314\347\212\376\377\374\1\324\331\333\224]oz\1k~\211\262\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\262" \
- "\376\377\374\1\301\306\311\224]oz\2k~\211\372\374\371\211\376\377\374" \
- "\2\366\373\376(\307\350\236\25\274\344\5\31\275\345,\311\352M\323\357" \
- "y\336\360\211\342\365\202\233\345\364\5\220\343\361\204\335\361Z\325" \
- "\3541\313\354\0\276\345\236\25\274\344\2\0\301\350\336\367\374\212\376" \
- "\377\374\1\205\224\232\224]oz\1\246\256\266\262\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\262\376\377\374" \
- "\2\367\371\366dv\201\224]oz\1\274\301\303\212\376\377\374\2\315\362\373" \
- "\0\276\345\307\25\274\344\1\236\347\366\212\376\377\374\1\325\332\334" \
- "\225]oz\1\341\346\351\262\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\263\376\377\374\1\236\246\256\224]o" \
- "z\2fx\203\356\363\366\212\376\377\374\1\221\344\362\306\25\274\344\1" \
- "M\323\357\212\376\377\374\2\372\374\371t\207\222\224]oz\1\177\216\224" \
- "\263\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\263\376\377\374\1\333\340\343\225]oz\1\226\246\254\213\376" \
- "\377\374\1L\322\356\304\25\274\344\2,\311\352\366\373\376\212\376\377" \
- "\374\1\265\276\306\225]oz\1\302\307\312\263\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\264\376\377\374\1u\210" \
- "\224\224]oz\2^p{\331\336\341\212\376\377\374\2\366\373\376/\312\353\302" \
- "\25\274\344\2\0\302\351\336\367\374\212\376\377\374\2\353\360\363dv\201" \
- "\224]oz\2dv\201\364\371\374\263\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\264\376\377\374\1\302\307\312\225" \
- "]oz\2w\212\226\372\374\371\212\376\377\374\2\350\371\377%\305\347\300" \
- "\25\274\344\2\0\301\350\314\361\372\213\376\377\374\1\227\237\247\225" \
- "]oz\1\246\256\266\264\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\264\376\377\374\2\372\374\371l\177\212\225" \
- "]oz\1\252\262\272\213\376\377\374\2\337\370\375\0\304\352\276\25\274" \
- "\344\2\0\276\345\274\355\373\213\376\377\374\1\305\312\315\225]oz\2a" \
- "s~\353\360\363\264\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\265\376\377\374\1\270\300\311\225]oz\2^p{\322" \
- "\330\332\213\376\377\374\2\332\363\370#\304\346\274\25\274\344\2\0\276" \
- "\345\273\354\371\213\376\377\374\2\343\350\353dv\201\225]oz\1\236\246" \
- "\256\265\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\265\376\377\374\2\365\372\375hz\205\225]oz\2i{\206\354" \
- "\361\364\213\376\377\374\2\337\370\375*\310\351\272\25\274\344\2\0\302" \
- "\351\314\361\372\213\376\377\374\2\371\373\370x\213\227\225]oz\2_q|\346" \
- "\354\356\265\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\266\376\377\374\1\261\271\302\226]oz\2\205\224\232" \
- "\373\375\372\213\376\377\374\2\356\373\374B\314\347\270\25\274\344\2" \
- "%\305\347\335\366\373\214\376\377\374\1\242\252\262\226]oz\1\231\241" \
- "\251\266\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\266\376\377\374\2\370\372\367m\200\213\226]oz\1\242" \
- "\252\262\214\376\377\374\2\367\375\377i\326\356\266\25\274\344\2B\314" \
- "\347\350\371\377\214\376\377\374\1\277\304\306\226]oz\2bt\177\351\356" \
- "\360\266\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\267\376\377\374\1\305\312\315\227]oz\1\261\271\302\215" \
- "\376\377\374\2\250\347\371\0\301\350\262\25\274\344\3\31\275\345\206" \
- "\340\363\370\376\377\214\376\377\374\2\316\323\326_q|\226]oz\1\251\261" \
- "\271\267\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\270\376\377\374\1y\214\230\226]oz\2^p{\301\306\311\215" \
- "\376\377\374\2\335\366\373/\312\353\260\25\274\344\2#\304\346\312\357" \
- "\370\215\376\377\374\2\330\335\340cu\200\226]oz\2hz\205\366\370\364\267" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\270\376\377\374\1\332\337\342\230\222\241\247\1\353\360\363" \
- "\215\376\377\374\3\370\376\377\231\342\362\0\301\350\254\25\274\344\3" \
- "\0\276\345w\333\356\366\373\376\215\376\377\374\2\365\372\375\236\246" \
- "\256\227\222\241\247\1\307\315\317\270\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\341\376\377\374\3\356\373" \
- "\374i\326\356\0\276\345\251\25\274\344\2J\321\355\335\366\373\341\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\343\376\377\374\3\316\363\374Z\325\354\31\275\345\245\25\274\344" \
- "\2G\317\353\302\357\367\343\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\345\376\377\374\3\336\367\374m\332\361" \
- "\0\276\345\240\25\274\344\3\31\275\345M\323\357\316\363\374\345\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\347\376\377\374\4\366\373\376\237\350\367I\320\354\31\275\345\232\25" \
- "\274\344\4\0\276\345B\314\347\231\342\362\356\373\374\347\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\352" \
- "\376\377\374\4\356\373\374\235\346\365Z\325\354#\304\346\224\25\274\344" \
- "\4\0\302\351L\322\356\221\344\362\336\367\374\352\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\356\376\377" \
- "\374\6\336\367\374\237\350\367w\333\356M\323\357,\311\352\0\276\345\210" \
- "\25\274\344\6\0\276\345*\310\351J\321\355n\333\362\233\345\364\320\365" \
- "\376\356\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\364\376\377\374\10\367\375\377\356\373\374\335\366\373" \
- "\316\363\374\315\362\373\335\366\373\350\371\377\366\373\376\364\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376" \
- "\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\375\376\377\374\202" \
- "\370\376\377\250\376\377\374\1\367\375\377\217\376\377\374\1\367\375" \
- "\377\237\376\377\374\1\367\375\377\230\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\374\376\377\374\4\237\350" \
- "\367\0\276\345\31\275\345\203\334\360\245\376\377\374\5\316\363\374#" \
- "\304\346\25\274\344G\317\353\367\375\377\213\376\377\374\5\335\366\373" \
- "(\307\350\25\274\344B\314\347\366\373\376\233\376\377\374\5\356\373\374" \
- "/\312\353\25\274\344*\310\351\337\370\375\226\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\374\376\377\374" \
- "\1#\304\346\203\25\274\344\1\357\374\375\244\376\377\374\1i\326\356\203" \
- "\25\274\344\1\260\353\366\213\376\377\374\1n\333\362\203\25\274\344\1" \
- "\235\346\365\233\376\377\374\1\231\342\362\203\25\274\344\1y\336\360" \
- "\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\374\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374" \
- "\244\376\377\374\1I\320\354\203\25\274\344\1\231\342\362\213\376\377" \
- "\374\1M\323\357\203\25\274\344\1\206\340\363\233\376\377\374\1\206\340" \
- "\363\203\25\274\344\1k\327\357\226\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\374\376\377\374\1\0\301\350" \
- "\203\25\274\344\1\336\367\374\244\376\377\374\1I\320\354\203\25\274\344" \
- "\1\231\342\362\213\376\377\374\1M\323\357\203\25\274\344\1\206\340\363" \
- "\233\376\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\374\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374\244\376\377" \
- "\374\1I\320\354\203\25\274\344\1\231\342\362\213\376\377\374\1M\323\357" \
- "\203\25\274\344\1\206\340\363\233\376\377\374\1\206\340\363\203\25\274" \
- "\344\1k\327\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\374\376\377\374\1\0\301\350\203\25\274\344\1" \
- "\336\367\374\244\376\377\374\1I\320\354\203\25\274\344\1\231\342\362" \
- "\213\376\377\374\1M\323\357\203\25\274\344\1\206\340\363\233\376\377" \
- "\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\374\376\377" \
- "\374\1\0\301\350\203\25\274\344\1\336\367\374\244\376\377\374\1I\320" \
- "\354\203\25\274\344\1\231\342\362\213\376\377\374\1M\323\357\203\25\274" \
- "\344\1\206\340\363\233\376\377\374\1\206\340\363\203\25\274\344\1k\327" \
- "\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\232\376\377\374\7\311\316\321\211\230\236j}\210cu\200" \
- "n\201\214\233\243\253\333\340\343\206\376\377\374\2\236\246\256dv\201" \
- "\205]oz\4_q|k~\211\222\241\247\337\344\346\210\376\377\374\7\322\330" \
- "\332\222\241\247l\177\212cu\200k~\211\222\241\247\322\330\332\206\376" \
- "\377\374\2\257\267\277gy\204\205]oz\4^p{i{\206\211\230\236\325\332\334" \
- "\210\376\377\374\10\313\360\371\\\326\355%\305\347\0\276\345\0\302\351" \
- "G\317\353\237\350\367\370\376\377\205\376\377\374\2\235\346\365\0\304" \
- "\352\212\25\274\344\4\0\276\345\0\304\352M\323\357\315\362\373\205\376" \
- "\377\374\1\0\301\350\207\25\274\344\4\0\301\350B\314\347\233\345\364" \
- "\370\376\377\207\376\377\374\7\302\357\367M\323\357#\304\346\0\276\345" \
- "\0\304\352J\321\355\260\353\366\210\376\377\374\4\315\362\373M\323\357" \
- "#\304\346\31\275\345\207\25\274\344\1\231\342\362\204\376\377\374\4\323" \
- "\364\367\\\326\355%\305\347\0\276\345\207\25\274\344\1\206\340\363\205" \
- "\376\377\374\10\334\365\372m\332\361(\307\350\0\276\345\0\302\351B\314" \
- "\347\233\345\364\370\376\377\207\376\377\374\4\336\367\374m\332\361(" \
- "\307\350\0\276\345\207\25\274\344\1k\327\357\226\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\230\376\377" \
- "\374\2\356\363\366\202\221\227\206]oz\3^p{\247\257\267\373\375\372\203" \
- "\376\377\374\1\327\334\337\212]oz\2_q|\303\310\313\205\376\377\374\2" \
- "\364\371\374\222\241\247\207]oz\2\222\241\247\370\372\367\203\376\377" \
- "\374\2\353\360\363^p{\212]oz\1\254\264\274\205\376\377\374\2\370\376" \
- "\377m\332\361\207\25\274\344\2B\314\347\350\371\377\203\376\377\374\2" \
- "\370\376\377\0\276\345\216\25\274\344\2\31\275\345\235\346\365\204\376" \
- "\377\374\1\0\301\350\212\25\274\344\2B\314\347\357\374\375\204\376\377" \
- "\374\2\367\375\377Z\325\354\207\25\274\344\2G\317\353\357\374\375\205" \
- "\376\377\374\2\235\346\365\31\275\345\212\25\274\344\1\231\342\362\203" \
- "\376\377\374\2\256\350\364\0\276\345\212\25\274\344\1\206\340\363\204" \
- "\376\377\374\2\206\340\363\31\275\345\206\25\274\344\2*\310\351\335\366" \
- "\373\205\376\377\374\2\302\357\367\0\301\350\212\25\274\344\1k\327\357" \
- "\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\227\376\377\374\2\364\371\374u\210\224\211]oz\1\242\252" \
- "\262\203\376\377\374\1\306\314\316\213]oz\2_q|\325\332\334\203\376\377" \
- "\374\2\373\375\372\210\227\235\211]oz\2\211\230\236\373\375\372\202\376" \
- "\377\374\1\330\335\340\214]oz\1\304\311\314\204\376\377\374\1i\326\356" \
- "\211\25\274\344\2*\310\351\366\373\376\202\376\377\374\1\357\374\375" \
- "\220\25\274\344\2\0\276\345\274\355\373\203\376\377\374\1\0\301\350\213" \
- "\25\274\344\1M\323\357\204\376\377\374\1L\322\356\211\25\274\344\2""3" \
- "\314\355\370\376\377\203\376\377\374\2\313\360\371\0\276\345\213\25\274" \
- "\344\1\231\342\362\202\376\377\374\2\333\364\371\0\276\345\213\25\274" \
- "\344\1\206\340\363\203\376\377\374\1\206\340\363\211\25\274\344\2\0\304" \
- "\352\347\370\376\203\376\377\374\2\356\373\374\0\304\352\213\25\274\344" \
- "\1k\327\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\227\376\377\374\1\251\261\271\213]oz\1\322\330\332" \
- "\202\376\377\374\1\306\314\316\214]oz\1\200\217\225\203\376\377\374\1" \
- "\277\304\306\213]oz\1\277\304\306\202\376\377\374\1\330\335\340\214]" \
- "oz\2j}\210\373\375\372\202\376\377\374\1\273\354\371\213\25\274\344\1" \
- "n\333\362\202\376\377\374\1\357\374\375\221\25\274\344\1""1\313\354\203" \
- "\376\377\374\1\0\301\350\214\25\274\344\1\332\363\370\202\376\377\374" \
- "\1\237\350\367\213\25\274\344\1\206\340\363\203\376\377\374\1I\320\354" \
- "\214\25\274\344\1\231\342\362\202\376\377\374\1M\323\357\214\25\274\344" \
- "\1\206\340\363\202\376\377\374\2\320\365\376\31\275\345\212\25\274\344" \
- "\1L\322\356\203\376\377\374\1\203\334\360\214\25\274\344\1k\327\357\226" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\226\376\377\374\2\370\372\367cu\200\203]oz\5s\206\221\317\324" \
- "\327\343\350\353\276\303\305ew\202\203]oz\1\210\227\235\202\376\377\374" \
- "\1\306\314\316\203]oz\1y\214\230\203\347\355\357\2\346\354\356\264\274" \
- "\305\203]oz\2^p{\357\365\367\202\376\377\374\1p\203\216\203]oz\5k~\211" \
- "\307\315\317\343\350\353\306\314\316k~\211\203]oz\1p\203\216\202\376" \
- "\377\374\1\330\335\340\203]oz\1k~\211\204\347\355\357\2\304\311\314_" \
- "q|\203]oz\1\334\342\344\202\376\377\374\1B\314\347\203\25\274\344\5\0" \
- "\301\350\233\345\364\333\364\371\260\353\366(\307\350\203\25\274\344" \
- "\4\0\276\345\367\375\377\376\377\374\357\374\375\204\25\274\344\203\335" \
- "\366\373\204\25\274\344\3\302\357\367\335\366\373\206\340\363\204\25" \
- "\274\344\1\356\373\374\202\376\377\374\1\0\301\350\203\25\274\344\1\302" \
- "\357\367\203\335\366\373\2\302\357\367\0\304\352\203\25\274\344\1\207" \
- "\341\364\202\376\377\374\1,\311\352\203\25\274\344\5\0\302\351\236\347" \
- "\366\332\363\370\256\350\364#\304\346\203\25\274\344\5\0\302\351\370" \
- "\376\377\376\377\374\370\376\377\0\276\345\203\25\274\344\1w\333\356" \
- "\204\335\366\373\1B\314\347\203\25\274\344\1\231\342\362\202\376\377" \
- "\374\1\0\276\345\203\25\274\344\2m\332\361\334\365\372\203\335\366\373" \
- "\1I\320\354\203\25\274\344\1\206\340\363\202\376\377\374\1i\326\356\203" \
- "\25\274\344\5\0\276\345\206\340\363\323\364\367\273\354\3711\313\354" \
- "\204\25\274\344\1\335\366\373\202\376\377\374\1,\311\352\203\25\274\344" \
- "\2L\322\356\332\363\370\203\335\366\373\1n\333\362\203\25\274\344\1k" \
- "\327\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\226\376\377\374\1\324\331\333\203]oz\2cu\200\353" \
- "\360\363\203\376\377\374\1\307\315\317\203]oz\4as~\372\374\371\376\377" \
- "\374\306\314\316\203]oz\1\202\221\227\205\376\377\374\1hz\205\203]oz" \
- "\3\341\346\351\376\377\374\347\355\357\203]oz\2^p{\335\343\345\203\376" \
- "\377\374\1\333\340\343\204]oz\3\347\355\357\376\377\374\330\335\340\203" \
- "]oz\1m\200\213\205\376\377\374\1~\215\223\203]oz\4\313\320\322\376\377" \
- "\374\370\376\377\31\275\345\203\25\274\344\1\256\350\364\203\376\377" \
- "\374\2\336\367\374\0\276\345\203\25\274\344\3\274\355\373\376\377\374" \
- "\357\374\375\204\25\274\344\203\376\377\374\204\25\274\344\1\336\367" \
- "\374\202\376\377\374\1\0\301\350\203\25\274\344\1\323\364\367\202\376" \
- "\377\374\1\0\301\350\203\25\274\344\1\336\367\374\204\376\377\374\1n" \
- "\333\362\203\25\274\344\3m\332\361\376\377\374\356\373\374\204\25\274" \
- "\344\1\274\355\373\203\376\377\374\2\323\364\367\0\276\345\203\25\274" \
- "\344\3\315\362\373\376\377\374\335\366\373\203\25\274\344\2\0\276\345" \
- "\370\376\377\204\376\377\374\1I\320\354\203\25\274\344\3\231\342\362" \
- "\376\377\374\366\373\376\204\25\274\344\1\366\373\376\204\376\377\374" \
- "\1M\323\357\203\25\274\344\1\206\340\363\202\376\377\374\1%\305\347\203" \
- "\25\274\344\1\207\341\364\203\376\377\374\2\360\375\376\0\304\352\203" \
- "\25\274\344\1\233\345\364\202\376\377\374\1\0\301\350\203\25\274\344" \
- "\1\323\364\367\204\376\377\374\1\206\340\363\203\25\274\344\1k\327\357" \
- "\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\226\376\377\374\1\303\310\313\203]oz\1\204\223\231\204\376" \
- "\377\374\2\370\372\367_q|\203]oz\3\354\361\364\376\377\374\306\314\316" \
- "\203]oz\1\202\221\227\205\376\377\374\1m\200\213\203]oz\3\341\346\351" \
- "\376\377\374\326\333\335\203]oz\1n\201\214\205\376\377\374\1l\177\212" \
- "\203]oz\3\327\334\337\376\377\374\330\335\340\203]oz\1m\200\213\205\376" \
- "\377\374\1\202\221\227\203]oz\3\306\314\316\376\377\374\337\370\375\203" \
- "\25\274\344\2\31\275\345\370\376\377\204\376\377\374\1""3\314\355\203" \
- "\25\274\344\3\236\347\366\376\377\374\357\374\375\204\25\274\344\203" \
- "\376\377\374\204\25\274\344\1\336\367\374\202\376\377\374\1#\304\346" \
- "\203\25\274\344\1\316\363\374\202\376\377\374\1\0\301\350\203\25\274" \
- "\344\1\336\367\374\204\376\377\374\1w\333\356\203\25\274\344\3k\327\357" \
- "\376\377\374\315\362\373\203\25\274\344\1\0\301\350\205\376\377\374\1" \
- "*\310\351\203\25\274\344\3\261\354\367\376\377\374\316\363\374\203\25" \
- "\274\344\1\0\301\350\205\376\377\374\1I\320\354\203\25\274\344\3\231" \
- "\342\362\376\377\374\357\374\375\204\25\274\344\205\376\377\374\1M\323" \
- "\357\203\25\274\344\1\206\340\363\202\376\377\374\1\31\275\345\203\25" \
- "\274\344\1\335\366\373\204\376\377\374\1M\323\357\203\25\274\344\1\203" \
- "\334\360\202\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374\204" \
- "\376\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\226" \
- "\376\377\374\1\277\304\306\203]oz\1\211\230\236\205\376\377\374\1ew\202" \
- "\203]oz\3\352\357\362\376\377\374\306\314\316\203]oz\1\202\221\227\205" \
- "\376\377\374\1m\200\213\203]oz\3\341\346\351\376\377\374\317\324\327" \
- "\203]oz\1as~\205o\202\215\1as~\203]oz\3\317\324\327\376\377\374\330\335" \
- "\340\203]oz\1m\200\213\205\376\377\374\1\202\221\227\203]oz\3\306\314" \
- "\316\376\377\374\336\367\374\203\25\274\344\1\0\276\345\205%\305\347" \
- "\1\0\276\345\203\25\274\344\3\231\342\362\376\377\374\357\374\375\204" \
- "\25\274\344\203\376\377\374\204\25\274\344\1\336\367\374\202\376\377" \
- "\374\1#\304\346\203\25\274\344\1\316\363\374\202\376\377\374\1\0\301" \
- "\350\203\25\274\344\1\336\367\374\204\376\377\374\1w\333\356\203\25\274" \
- "\344\3k\327\357\376\377\374\302\357\367\203\25\274\344\1\31\275\345\205" \
- "%\305\347\1\0\276\345\203\25\274\344\3\237\350\367\376\377\374\316\363" \
- "\374\203\25\274\344\1\0\301\350\205\376\377\374\1I\320\354\203\25\274" \
- "\344\3\231\342\362\376\377\374\357\374\375\204\25\274\344\205\376\377" \
- "\374\1M\323\357\203\25\274\344\1\206\340\363\202\376\377\374\204\25\274" \
- "\344\1#\304\346\204%\305\347\1\0\301\350\203\25\274\344\1w\333\356\202" \
- "\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374\204\376\377\374" \
- "\1\206\340\363\203\25\274\344\1k\327\357\226\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\226\376\377\374" \
- "\1\277\304\306\203]oz\1\211\230\236\205\376\377\374\1ew\202\203]oz\3" \
- "\352\357\362\376\377\374\306\314\316\203]oz\1\202\221\227\205\376\377" \
- "\374\1m\200\213\203]oz\3\341\346\351\376\377\374\317\324\327\215]oz\3" \
- "\317\324\327\376\377\374\330\335\340\203]oz\1m\200\213\205\376\377\374" \
- "\1\202\221\227\203]oz\3\306\314\316\376\377\374\336\367\374\215\25\274" \
- "\344\3\231\342\362\376\377\374\357\374\375\204\25\274\344\203\376\377" \
- "\374\204\25\274\344\1\336\367\374\202\376\377\374\1#\304\346\203\25\274" \
- "\344\1\316\363\374\202\376\377\374\1\0\301\350\203\25\274\344\1\336\367" \
- "\374\204\376\377\374\1w\333\356\203\25\274\344\3k\327\357\376\377\374" \
- "\302\357\367\215\25\274\344\3\237\350\367\376\377\374\316\363\374\203" \
- "\25\274\344\1\0\301\350\205\376\377\374\1I\320\354\203\25\274\344\3\231" \
- "\342\362\376\377\374\357\374\375\204\25\274\344\205\376\377\374\1M\323" \
- "\357\203\25\274\344\1\206\340\363\202\376\377\374\215\25\274\344\1w\333" \
- "\356\202\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374\204\376" \
- "\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\226\376" \
- "\377\374\1\277\304\306\203]oz\1\211\230\236\205\376\377\374\1ew\202\203" \
- "]oz\3\352\357\362\376\377\374\306\314\316\203]oz\1\202\221\227\205\376" \
- "\377\374\1m\200\213\203]oz\3\341\346\351\376\377\374\317\324\327\215" \
- "]oz\3\317\324\327\376\377\374\330\335\340\203]oz\1m\200\213\205\376\377" \
- "\374\1\202\221\227\203]oz\3\306\314\316\376\377\374\336\367\374\215\25" \
- "\274\344\3\231\342\362\376\377\374\357\374\375\204\25\274\344\203\376" \
- "\377\374\204\25\274\344\1\336\367\374\202\376\377\374\1#\304\346\203" \
- "\25\274\344\1\316\363\374\202\376\377\374\1\0\301\350\203\25\274\344" \
- "\1\336\367\374\204\376\377\374\1w\333\356\203\25\274\344\3k\327\357\376" \
- "\377\374\302\357\367\215\25\274\344\3\237\350\367\376\377\374\316\363" \
- "\374\203\25\274\344\1\0\301\350\205\376\377\374\1I\320\354\203\25\274" \
- "\344\3\231\342\362\376\377\374\357\374\375\204\25\274\344\205\376\377" \
- "\374\1M\323\357\203\25\274\344\1\206\340\363\202\376\377\374\215\25\274" \
- "\344\1w\333\356\202\376\377\374\1\0\301\350\203\25\274\344\1\336\367" \
- "\374\204\376\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\226\376\377\374\1\277\304\306\203]oz\1\211\230\236\205\376\377\374" \
- "\1ew\202\203]oz\3\352\357\362\376\377\374\306\314\316\203]oz\1\202\221" \
- "\227\205\376\377\374\1m\200\213\203]oz\3\341\346\351\376\377\374\317" \
- "\324\327\215]oz\3\317\324\327\376\377\374\330\335\340\203]oz\1m\200\213" \
- "\205\376\377\374\1\202\221\227\203]oz\3\306\314\316\376\377\374\336\367" \
- "\374\215\25\274\344\3\231\342\362\376\377\374\357\374\375\204\25\274" \
- "\344\203\376\377\374\204\25\274\344\1\336\367\374\202\376\377\374\1#" \
- "\304\346\203\25\274\344\1\316\363\374\202\376\377\374\1\0\301\350\203" \
- "\25\274\344\1\336\367\374\204\376\377\374\1w\333\356\203\25\274\344\3" \
- "k\327\357\376\377\374\302\357\367\215\25\274\344\3\237\350\367\376\377" \
- "\374\316\363\374\203\25\274\344\1\0\301\350\205\376\377\374\1I\320\354" \
- "\203\25\274\344\3\231\342\362\376\377\374\357\374\375\204\25\274\344" \
- "\205\376\377\374\1M\323\357\203\25\274\344\1\206\340\363\202\376\377" \
- "\374\215\25\274\344\1w\333\356\202\376\377\374\1\0\301\350\203\25\274" \
- "\344\1\336\367\374\204\376\377\374\1\206\340\363\203\25\274\344\1k\327" \
- "\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\226\376\377\374\1\300\305\310\203]oz\1\211\230\236\205" \
- "\376\377\374\1bt\177\203]oz\3\352\357\362\376\377\374\306\314\316\203" \
- "]oz\1\202\221\227\205\376\377\374\1m\200\213\203]oz\3\341\346\351\376" \
- "\377\374\317\324\327\203]oz\1p\203\216\211\335\343\345\3\366\370\364" \
- "\376\377\374\330\335\340\203]oz\1m\200\213\205\376\377\374\1\202\221" \
- "\227\203]oz\3\306\314\316\376\377\374\336\367\374\203\25\274\344\1\0" \
- "\276\345\211\314\361\372\3\350\371\377\376\377\374\357\374\375\204\25" \
- "\274\344\203\376\377\374\204\25\274\344\1\336\367\374\202\376\377\374" \
- "\1#\304\346\203\25\274\344\1\316\363\374\202\376\377\374\1\0\301\350" \
- "\203\25\274\344\1\336\367\374\204\376\377\374\1w\333\356\203\25\274\344" \
- "\3k\327\357\376\377\374\302\357\367\203\25\274\344\1\0\302\351\211\314" \
- "\361\372\3\356\373\374\376\377\374\316\363\374\203\25\274\344\1\0\301" \
- "\350\205\376\377\374\1I\320\354\203\25\274\344\3\231\342\362\376\377" \
- "\374\357\374\375\204\25\274\344\205\376\377\374\1M\323\357\203\25\274" \
- "\344\1\206\340\363\202\376\377\374\204\25\274\344\1\301\356\366\210\314" \
- "\361\372\1\337\370\375\202\376\377\374\1\0\301\350\203\25\274\344\1\336" \
- "\367\374\204\376\377\374\1\206\340\363\203\25\274\344\1k\327\357\226" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\226\376\377\374\1\314\321\324\203]oz\2l\177\212\372\374\371" \
- "\203\376\377\374\1\333\340\343\203]oz\4^p{\366\373\376\376\377\374\306" \
- "\314\316\203]oz\1\202\221\227\205\376\377\374\1k~\211\203]oz\3\341\346" \
- "\351\376\377\374\320\326\330\203]oz\1t\207\222\213\376\377\374\1\330" \
- "\335\340\203]oz\1m\200\213\205\376\377\374\1\202\221\227\203]oz\3\306" \
- "\314\316\376\377\374\336\367\374\203\25\274\344\1\0\276\345\213\376\377" \
- "\374\1\357\374\375\204\25\274\344\203\376\377\374\204\25\274\344\1\336" \
- "\367\374\202\376\377\374\1#\304\346\203\25\274\344\1\316\363\374\202" \
- "\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374\204\376\377\374" \
- "\1n\333\362\203\25\274\344\3k\327\357\376\377\374\303\361\370\203\25" \
- "\274\344\1\0\304\352\213\376\377\374\1\332\363\370\203\25\274\344\1\0" \
- "\276\345\205\376\377\374\1I\320\354\203\25\274\344\3\231\342\362\376" \
- "\377\374\357\374\375\204\25\274\344\1\370\376\377\204\376\377\374\1M" \
- "\323\357\203\25\274\344\1\206\340\363\202\376\377\374\204\25\274\344" \
- "\1\356\373\374\213\376\377\374\1\0\301\350\203\25\274\344\1\335\366\373" \
- "\204\376\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\226\376\377\374\2\353\360\363^p{\203]oz\5\236\246\256\366\373\376\376" \
- "\377\374\347\355\357w\212\226\203]oz\1~\215\223\202\376\377\374\1\306" \
- "\314\316\203]oz\1\202\221\227\204\376\377\374\2\335\343\345^p{\203]o" \
- "z\3\351\356\360\376\377\374\335\343\345\203]oz\2bt\177\343\350\353\212" \
- "\376\377\374\1\330\335\340\203]oz\1m\200\213\205\376\377\374\1\202\221" \
- "\227\203]oz\3\306\314\316\376\377\374\356\373\374\204\25\274\344\1\274" \
- "\355\373\212\376\377\374\1\357\374\375\204\25\274\344\203\376\377\374" \
- "\204\25\274\344\1\336\367\374\202\376\377\374\1#\304\346\203\25\274\344" \
- "\1\316\363\374\202\376\377\374\1\0\301\350\203\25\274\344\1\336\367\374" \
- "\203\376\377\374\2\367\375\377/\312\353\203\25\274\344\3\204\335\361" \
- "\376\377\374\334\365\372\203\25\274\344\2\0\276\345\314\361\372\212\376" \
- "\377\374\1\356\373\374\204\25\274\344\1\274\355\373\204\376\377\374\1" \
- "I\320\354\203\25\274\344\4\231\342\362\376\377\374\370\376\377\0\276" \
- "\345\203\25\274\344\1\257\351\365\204\376\377\374\1M\323\357\203\25\274" \
- "\344\1\206\340\363\202\376\377\374\1\0\301\350\203\25\274\344\1\233\345" \
- "\364\213\376\377\374\1#\304\346\203\25\274\344\1\207\341\364\204\376" \
- "\377\374\1\206\340\363\203\25\274\344\1k\327\357\226\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\227\376" \
- "\377\374\1\211\230\236\204]oz\3cu\200p\203\216^p{\204]oz\1\277\304\306" \
- "\202\376\377\374\1\306\314\316\203]oz\1dv\201\203~\215\223\2v\211\225" \
- "as~\203]oz\1n\201\214\202\376\377\374\2\371\373\370dv\201\203]oz\2bt" \
- "\177w\212\226\205~\215\223\2\203\222\230\304\311\314\202\376\377\374" \
- "\1\330\335\340\203]oz\1m\200\213\205\376\377\374\1\202\221\227\203]o" \
- "z\1\306\314\316\202\376\377\374\1(\307\350\203\25\274\344\2\0\276\345" \
- "*\310\351\205,\311\352\2""1\313\354\207\341\364\202\376\377\374\1\357" \
- "\374\375\204\25\274\344\203\376\377\374\204\25\274\344\1\336\367\374" \
- "\202\376\377\374\1#\304\346\203\25\274\344\1\316\363\374\202\376\377" \
- "\374\1\0\301\350\203\25\274\344\1*\310\351\203,\311\352\1\0\302\351\204" \
- "\25\274\344\4\274\355\373\376\377\374\370\376\377\0\302\351\203\25\274" \
- "\344\2\31\275\345*\310\351\205,\311\352\2B\314\347\231\342\362\203\376" \
- "\377\374\1%\305\347\203\25\274\344\2\0\276\345*\310\351\203,\311\352" \
- "\1\0\301\350\203\25\274\344\1\231\342\362\202\376\377\374\1/\312\353" \
- "\203\25\274\344\2\0\276\345*\310\351\203,\311\352\1\0\301\350\203\25" \
- "\274\344\1\206\340\363\202\376\377\374\1G\317\353\204\25\274\344\1(\307" \
- "\350\205,\311\352\3""1\313\354w\333\356\370\376\377\202\376\377\374\1" \
- "M\323\357\204\25\274\344\1(\307\350\203,\311\352\1\0\304\352\203\25\274" \
- "\344\1k\327\357\226\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\227\376\377\374\2\343\350\353ew\202\211]oz\2" \
- "\177\216\224\373\375\372\202\376\377\374\1\306\314\316\214]oz\1\270\300" \
- "\311\203\376\377\374\1\253\263\273\213]oz\4_q|\352\357\362\376\377\374" \
- "\330\335\340\203]oz\1m\200\213\205\376\377\374\1\202\221\227\203]oz\1" \
- "\306\314\316\202\376\377\374\1\233\345\364\214\25\274\344\3\301\356\366" \
- "\376\377\374\357\374\375\204\25\274\344\203\376\377\374\204\25\274\344" \
- "\1\336\367\374\202\376\377\374\1#\304\346\203\25\274\344\1\316\363\374" \
- "\202\376\377\374\1\0\301\350\213\25\274\344\2,\311\352\370\376\377\202" \
- "\376\377\374\1\206\340\363\214\25\274\344\1\316\363\374\202\376\377\374" \
- "\1\233\345\364\214\25\274\344\1\231\342\362\202\376\377\374\1\250\347" \
- "\371\214\25\274\344\1\206\340\363\202\376\377\374\1\273\354\371\214\25" \
- "\274\344\1\236\347\366\202\376\377\374\2\313\360\371\0\276\345\213\25" \
- "\274\344\1k\327\357\226\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\230\376\377\374\2\325\332\334gy\204\207" \
- "]oz\2x\213\227\354\361\364\203\376\377\374\1\306\314\316\213]oz\1\225" \
- "\235\245\204\376\377\374\2\372\374\371~\221\235\213]oz\3\333\340\343" \
- "\376\377\374\342\347\352\203]oz\1t\207\222\205\376\377\374\1\211\230" \
- "\236\203]oz\1\317\324\327\203\376\377\374\1M\323\357\213\25\274\344\4" \
- "\237\350\367\376\377\374\367\375\377\0\276\345\202\25\274\344\1\0\276" \
- "\345\203\376\377\374\1\0\301\350\203\25\274\344\1\357\374\375\202\376" \
- "\377\374\1(\307\350\203\25\274\344\1\335\366\373\202\376\377\374\1\0" \
- "\302\351\212\25\274\344\2\0\301\350\333\364\371\203\376\377\374\2\370" \
- "\376\377G\317\353\213\25\274\344\1\273\354\371\203\376\377\374\1M\323" \
- "\357\213\25\274\344\1\237\350\367\203\376\377\374\1k\327\357\213\25\274" \
- "\344\1\231\342\362\203\376\377\374\1w\333\356\213\25\274\344\1\206\340" \
- "\363\203\376\377\374\1\207\341\364\213\25\274\344\1n\333\362\226\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\231\376\377\374\3\351\356\360\231\241\251bt\177\203]oz\3k~\211\257" \
- "\267\277\370\372\367\204\376\377\374\1\306\314\316\211]oz\3gy\204\261" \
- "\271\302\373\375\372\205\376\377\374\3\371\373\370\252\262\272dv\201" \
- "\210]oz\2x\213\227\372\374\371\202\376\377\374\4}\220\233]oz^p{\266\277" \
- "\307\205\376\377\374\5\311\316\321_q|]ozq\204\217\370\372\367\203\376" \
- "\377\374\3\370\376\377\211\342\365\0\301\350\210\25\274\344\2\0\304\352" \
- "\356\373\374\202\376\377\374\1M\323\357\202\25\274\344\1m\332\361\203" \
- "\376\377\374\1n\333\362\202\25\274\344\1L\322\356\203\376\377\374\1\206" \
- "\340\363\202\25\274\344\2B\314\347\370\376\377\202\376\377\374\1w\333" \
- "\356\210\25\274\344\3\0\276\345L\322\356\336\367\374\205\376\377\374" \
- "\3\370\376\377\204\335\361\0\301\350\210\25\274\344\2(\307\350\366\373" \
- "\376\203\376\377\374\3\370\376\377\211\342\365\0\301\350\210\25\274\344" \
- "\2#\304\346\356\373\374\204\376\377\374\2\233\345\364\0\302\351\210\25" \
- "\274\344\2\0\302\351\337\370\375\204\376\377\374\2\237\350\367#\304\346" \
- "\210\25\274\344\2\0\301\350\333\364\371\204\376\377\374\2\257\351\365" \
- "%\305\347\210\25\274\344\2\0\276\345\314\361\372\226\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\233\376" \
- "\377\374\4\365\372\375\335\343\345\325\332\334\343\350\353\207\376\377" \
- "\374\1\306\314\316\203]oz\1u\210\224\204\324\331\333\2\335\343\345\373" \
- "\375\372\211\376\377\374\2\372\374\371\334\342\344\206\324\331\333\2" \
- "\331\336\341\372\374\371\203\376\377\374\3\373\375\372\332\337\342\343" \
- "\350\353\207\376\377\374\3\351\356\360\330\335\340\366\373\376\206\376" \
- "\377\374\2\370\376\377\316\363\374\206\273\354\371\2\302\357\367\366" \
- "\373\376\204\376\377\374\2\314\361\372\315\362\373\205\376\377\374\2" \
- "\316\363\374\313\360\371\205\376\377\374\3\332\363\370\312\357\370\370" \
- "\376\377\204\376\377\374\1\316\363\374\206\273\354\371\2\302\357\367" \
- "\356\373\374\211\376\377\374\2\370\376\377\314\361\372\206\273\354\371" \
- "\2\302\357\367\367\375\377\206\376\377\374\2\370\376\377\316\363\374" \
- "\206\273\354\371\2\302\357\367\366\373\376\206\376\377\374\2\370\376" \
- "\377\316\363\374\206\273\354\371\2\301\356\366\357\374\375\207\376\377" \
- "\374\1\332\363\370\206\273\354\371\2\301\356\366\356\373\374\207\376" \
- "\377\374\2\333\364\371\274\355\373\205\273\354\371\2\274\355\373\350" \
- "\371\377\227\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\246\376\377\374\1\306\314\316\203]oz\1\202\221\227" \
- "\377\376\377\374\306\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\246\376\377\374\1\306\314\316\203]oz\1\202" \
- "\221\227\377\376\377\374\306\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\246\376\377\374\1\306\314\316\203" \
- "]oz\1\202\221\227\377\376\377\374\306\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\246\376\377\374\1\306\314" \
- "\316\203]oz\1\202\221\227\377\376\377\374\306\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\246\376\377\374" \
- "\1\315\322\325\203]oz\1\211\230\236\377\376\377\374\306\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\246\376" \
- "\377\374\2\360\366\370i{\206\202]oz\1\302\307\312\377\376\377\374\306" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\247\376\377\374\3\357\365\367\313\320\322\335\343\345\377\376" \
- "\377\374\307\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361" \
- "\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377" \
- "\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377" \
- "\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374" \
- "\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223" \
- "\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377" \
- "\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377" \
- "\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376\377" \
- "\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\223\377\377\377\377\376\377\374\361\376\377\374\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\223\377\377\377\377\376\377\374\361\376" \
- "\377\374\377\377\377\377\377\377\377\377\377\377\377\377\223\377\377" \
- "\377\377\376\377\374\361\376\377\374\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\223\377\377\377\377\376\377\374\361\376\377\374\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\223\377\377\377\377\376" \
- "\377\374\361\376\377\374\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" \
- "\377\377\377\377\232\377\377\377")
-
-
diff --git a/meta/recipes-core/psplash/files/psplash-poky-img.png b/meta/recipes-core/psplash/files/psplash-poky-img.png
new file mode 100644
index 0000000000..239db1186e
--- /dev/null
+++ b/meta/recipes-core/psplash/files/psplash-poky-img.png
Binary files differ
diff --git a/meta/recipes-core/psplash/files/psplash-poky-img.svg b/meta/recipes-core/psplash/files/psplash-poky-img.svg
new file mode 100644
index 0000000000..8d953983a9
--- /dev/null
+++ b/meta/recipes-core/psplash/files/psplash-poky-img.svg
@@ -0,0 +1,116 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ width="1280mm"
+ height="800mm"
+ viewBox="0 0 1280 800"
+ version="1.1"
+ id="svg5"
+ inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
+ sodipodi:docname="psplash-poky-img.svg"
+ inkscape:export-filename="/home/mike/work/yocto/poky/meta/recipes-core/psplash/files/psplash-poky-img.png"
+ inkscape:export-xdpi="25.4"
+ inkscape:export-ydpi="25.4"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:svg="http://www.w3.org/2000/svg">
+ <sodipodi:namedview
+ id="namedview7"
+ pagecolor="#ecece1"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageshadow="2"
+ inkscape:pageopacity="0"
+ inkscape:pagecheckerboard="0"
+ inkscape:document-units="mm"
+ showgrid="false"
+ units="mm"
+ inkscape:zoom="0.093984989"
+ inkscape:cx="2489.7593"
+ inkscape:cy="1005.4797"
+ inkscape:window-width="1654"
+ inkscape:window-height="1016"
+ inkscape:window-x="74"
+ inkscape:window-y="27"
+ inkscape:window-maximized="1"
+ inkscape:current-layer="layer1" />
+ <defs
+ id="defs2" />
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1">
+ <g
+ id="g10"
+ transform="matrix(0.90740741,0,0,-0.90740791,444,460.88448)"
+ style="stroke-width:0.0971934">
+ <g
+ id="g12"
+ transform="scale(0.1)"
+ style="stroke-width:0.0971934">
+ <path
+ inkscape:connector-curvature="0"
+ d="m 4320,994.227 c 0,-45.723 -36.96,-82.813 -82.81,-82.813 -45.72,0 -82.68,37.09 -82.68,82.813 0,45.713 36.96,82.793 82.68,82.793 45.85,0 82.81,-37.08 82.81,-82.793 v 0"
+ style="fill:#4597d9;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path14" />
+ <path
+ inkscape:connector-curvature="0"
+ d="M 796.738,1353.54 676.184,1421.84 433.375,929.832 174.371,1421.84 52.2461,1353.54 360.098,775.184 c -2.223,-5.477 -9.274,-20.625 -21.157,-45.704 -12.011,-23.906 -24.429,-47.792 -37.488,-71.718 -22.859,-41.274 -46.105,-74.84 -70.004,-100.961 -23.906,-27.16 -48.336,-48.84 -73.406,-65.172 C 133.098,475.301 107.105,462.898 79.9375,454.148 53.8125,445.52 27.168,438.461 0,432.98 L 55.5117,317.25 c 17.375,2.23 40.6211,7.719 70.0113,16.332 30.43,7.578 63.477,22.328 99.395,44.02 35.785,21.679 72.227,52.636 109.059,92.859 38.14,40.23 73.406,93.391 105.925,159.609 l 356.836,723.47"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path16" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 1455.8,993.445 c 0,-38.007 -6.52,-73.281 -19.59,-105.929 -13.07,-32.52 -30.96,-60.868 -53.68,-84.637 -21.81,-22.852 -47.8,-41.27 -78.23,-55.508 -29.4,-12.93 -61.39,-19.453 -96.14,-19.453 -34.74,0 -67.39,6.523 -97.69,19.453 -30.43,14.238 -57.08,32.656 -79.94,55.508 -21.68,23.769 -39.046,52.117 -52.112,84.637 -13.059,32.648 -19.59,67.922 -19.59,105.929 0,38.005 6.531,73.265 19.59,105.895 13.066,32.55 30.432,60.77 52.112,84.68 22.86,23.9 49.51,42.43 79.94,55.36 30.3,14.26 62.95,21.31 97.69,21.31 34.75,0 66.74,-7.05 96.14,-21.31 30.43,-12.93 56.42,-31.46 78.23,-55.36 22.72,-23.91 40.61,-52.13 53.68,-84.68 13.07,-32.63 19.59,-67.89 19.59,-105.895 m 151.51,0 c 0,56.425 -10.32,109.185 -30.95,158.035 -19.59,48.85 -47.28,91.18 -83.07,127.08 -35.92,35.8 -78.23,63.48 -127.08,83.08 -48.85,20.61 -101.62,30.95 -158.05,30.95 -56.42,0 -109.2,-10.34 -158.04,-30.95 -48.84,-19.6 -91.292,-47.28 -127.085,-83.08 -35.789,-35.9 -64.129,-78.23 -84.765,-127.08 -20.633,-48.85 -30.946,-101.61 -30.946,-158.035 0,-56.425 10.313,-109.199 30.946,-158.047 20.636,-47.804 48.976,-89.589 84.765,-125.39 35.793,-35.918 78.245,-64.121 127.085,-84.77 48.84,-19.59 101.62,-29.386 158.04,-29.386 56.43,0 109.2,9.796 158.05,29.386 48.85,20.649 91.16,48.852 127.08,84.77 35.79,35.801 63.48,77.586 83.07,125.39 20.63,48.848 30.95,101.622 30.95,158.047"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path18" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 3599.29,993.445 c 0,-38.007 -6.54,-73.281 -19.6,-105.929 -13.07,-32.52 -30.95,-60.868 -53.82,-84.637 -21.68,-22.852 -47.67,-41.27 -78.1,-55.508 -29.39,-12.93 -61.39,-19.453 -96.13,-19.453 -34.75,0 -67.4,6.523 -97.83,19.453 -30.3,14.238 -56.94,32.656 -79.8,55.508 -21.69,23.769 -39.06,52.117 -52.12,84.637 -13.06,32.648 -19.6,67.922 -19.6,105.929 0,38.005 6.54,73.265 19.6,105.895 13.06,32.55 30.43,60.77 52.12,84.68 22.86,23.9 49.5,42.43 79.8,55.36 30.43,14.26 63.08,21.31 97.83,21.31 34.74,0 66.74,-7.05 96.13,-21.31 30.43,-12.93 56.42,-31.46 78.1,-55.36 22.87,-23.91 40.75,-52.13 53.82,-84.68 13.06,-32.63 19.6,-67.89 19.6,-105.895 m 151.5,0 c 0,56.425 -10.32,109.185 -30.96,158.035 -19.59,48.85 -47.28,91.18 -83.06,127.08 -35.92,35.8 -78.23,63.48 -127.09,83.08 -48.85,20.61 -101.61,30.95 -158.04,30.95 -56.56,0 -109.19,-10.34 -158.05,-30.95 -48.84,-19.6 -91.3,-47.28 -127.08,-83.08 -35.79,-35.9 -64.13,-78.23 -84.77,-127.08 -20.63,-48.85 -30.95,-101.61 -30.95,-158.035 0,-56.425 10.32,-109.199 30.95,-158.047 20.64,-47.804 48.98,-89.589 84.77,-125.39 35.78,-35.918 78.24,-64.121 127.08,-84.77 48.86,-19.59 101.49,-29.386 158.05,-29.386 56.43,0 109.19,9.796 158.04,29.386 48.86,20.649 91.17,48.852 127.09,84.77 35.78,35.801 63.47,77.586 83.06,125.39 20.64,48.848 30.96,101.622 30.96,158.047"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path20" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 2676.51,1639.06 -145.11,-81.49 v -178.19 h -392.37 c -283.29,0 -454,-138.02 -454,-389.47 0,-390.531 406.85,-464.199 696.16,-324.57 l -60.99,118.34 c -228.97,-100.039 -482.49,-57.461 -482.49,208.847 0,171.893 89.48,265.933 297.8,265.933 H 2531.4 V 783.68 c 0,-209.899 240.45,-214.59 389.48,-128.535 l -57.34,113.261 c -95.87,-48.593 -187.03,-54.863 -187.03,38.262 v 451.792 h 198.53 v 120.92 h -198.53 v 259.68 0"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path22" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 1037.58,233.66 h 36.32 c 18.29,0.141 36.18,-12.93 36.18,-31.469 0,-31.089 -35.01,-35.8 -35.01,-35.8 l -37.49,-0.25 z m -52.365,44.41 V 3.91016 h 52.365 l 0.66,115.60984 c 93.38,-5.879 128.26,26.64 128.26,83.96 0,51.469 -46.11,74.59 -90.39,74.59 h -90.895 v 0"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path24" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 1986.62,140.809 c 0,-13.34 -2.49,-25.879 -7.45,-37.5 -4.58,-11.227 -11.1,-21.1488 -19.6,-29.6488 -8.49,-8.4805 -18.41,-15.1407 -29.78,-20.1211 -11.23,-4.5703 -23.5,-6.9102 -36.57,-6.9102 -13.19,0 -25.47,2.3399 -36.7,6.9102 -11.36,4.9804 -21.29,11.6406 -29.78,20.1211 -8.22,8.5 -14.76,18.4218 -19.59,29.6488 -4.96,11.621 -7.44,24.16 -7.44,37.5 0,13.32 2.48,25.722 7.44,37.089 4.83,11.481 11.37,21.543 19.59,30.043 8.49,8.469 18.42,15.02 29.78,19.719 11.23,4.828 23.51,7.309 36.7,7.309 13.07,0 25.34,-2.481 36.57,-7.309 11.37,-4.699 21.29,-11.25 29.78,-19.719 8.5,-8.5 15.02,-18.562 19.6,-30.043 4.96,-11.367 7.45,-23.769 7.45,-37.089 m 53.16,0 c 0,19.589 -3.8,37.871 -11.5,54.839 -7.45,17.25 -17.9,32.133 -31.35,44.672 -13.06,12.68 -28.61,22.602 -46.63,29.782 -17.76,7.468 -36.96,11.238 -57.47,11.238 -20.63,0 -39.96,-3.77 -57.99,-11.238 -17.76,-7.18 -33.17,-17.102 -46.24,-29.782 -13.19,-12.539 -23.5,-27.422 -31.34,-44.672 -7.46,-16.968 -11.11,-35.25 -11.11,-54.839 0,-19.868 3.65,-38.278 11.11,-55.2582 7.84,-16.9922 18.15,-31.8711 31.34,-44.6602 13.07,-12.6797 28.48,-22.6015 46.24,-29.789 18.03,-7.19144 37.36,-10.839881 57.99,-10.839881 20.51,0 39.71,3.648441 57.47,10.839881 18.02,7.1875 33.57,17.1093 46.63,29.789 13.45,12.7891 23.9,27.668 31.35,44.6602 7.7,16.9802 11.5,35.3902 11.5,55.2582"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path26" />
+ <path
+ inkscape:connector-curvature="0"
+ d="M 2253.58,277.57 V 69.75 c 0,-8.3594 -6.14,-17.6289 -16.32,-17.8906 l -16.32,-0.6602 0.91,-50.937481 15.28,-0.2500002 C 2288.33,-0.769531 2306.08,47.4102 2306.08,69.8906 V 277.16"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path28" />
+ <path
+ inkscape:connector-curvature="0"
+ d="M 2699.76,278.211 H 2524.99 V 4.96875 h 175.55 l -0.78,44.00005 H 2577.5 v 71.3322 h 102.28 v 44 H 2577.5 v 69.109 h 121.47 l 0.79,44.801"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path30" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 3102.69,212.641 c -89.46,49.109 -177.63,19.32 -177.63,-70.661 0,-72.4995 69.62,-128.2612 184.16,-68.3198 l 16.59,-44.7891 C 3015.83,-31.5898 2871.12,11.4883 2871.12,141.98 c 0,118.2 128.39,179.731 253.12,114.942 l -21.55,-44.281"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path32" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 3521.83,278.211 0.12,-44.801 h -87.24 V 4.19141 H 3382.2 V 233.41 h -83.71 l 1.17,44.801 h 222.17"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path34" />
+ <path
+ inkscape:connector-curvature="0"
+ d="m 1426.42,233.66 h 36.31 c 18.29,0.141 36.18,-12.93 36.18,-31.469 0,-31.089 -35,-35.8 -35,-35.8 l -37.49,-0.25 z m -52.25,44.41 V 3.91016 h 52.25 V 121.859 h 34.62 L 1521.63,3.91016 h 59.56 L 1508.96,136.102 c 28.61,12.789 43.5,35.269 43.5,67.378 0,53.559 -46.89,74.59 -98.49,74.59 h -79.8 v 0"
+ style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0971934"
+ id="path36" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/meta/recipes-core/psplash/files/psplash-start.service b/meta/recipes-core/psplash/files/psplash-start.service
index 36c2bb38e0..bec9368427 100644
--- a/meta/recipes-core/psplash/files/psplash-start.service
+++ b/meta/recipes-core/psplash/files/psplash-start.service
@@ -2,6 +2,7 @@
Description=Start psplash boot splash screen
DefaultDependencies=no
RequiresMountsFor=/run
+ConditionFileIsExecutable=/usr/bin/psplash
[Service]
Type=notify
diff --git a/meta/recipes-core/psplash/files/psplash-systemd.service b/meta/recipes-core/psplash/files/psplash-systemd.service
index 082207f232..e93e3deb35 100644
--- a/meta/recipes-core/psplash/files/psplash-systemd.service
+++ b/meta/recipes-core/psplash/files/psplash-systemd.service
@@ -4,6 +4,7 @@ DefaultDependencies=no
After=psplash-start.service
Requires=psplash-start.service
RequiresMountsFor=/run
+ConditionFileIsExecutable=/usr/bin/psplash
[Service]
ExecStart=/usr/bin/psplash-systemd
diff --git a/meta/recipes-core/psplash/psplash_git.bb b/meta/recipes-core/psplash/psplash_git.bb
index edc0ac1d89..40937098e6 100644
--- a/meta/recipes-core/psplash/psplash_git.bb
+++ b/meta/recipes-core/psplash/psplash_git.bb
@@ -6,17 +6,17 @@ LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://psplash.h;beginline=1;endline=8;md5=8f232c1e95929eacab37f00900580224"
DEPENDS = "gdk-pixbuf-native"
-SRCREV = "44afb7506d43cca15582b4c5b90ba5580344d75d"
-PV = "0.1+git${SRCPV}"
+SRCREV = "ecc1913756698d0c87ad8fa10e44b29537f09ad1"
+PV = "0.1+git"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master \
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https \
file://psplash-init \
file://psplash-start.service \
file://psplash-systemd.service \
${SPLASH_IMAGES}"
UPSTREAM_CHECK_COMMITS = "1"
-SPLASH_IMAGES = "file://psplash-poky-img.h;outsuffix=default"
+SPLASH_IMAGES = "file://psplash-poky-img.png;outsuffix=default"
python __anonymous() {
oldpkgs = d.getVar("PACKAGES").split()
@@ -58,16 +58,19 @@ python __anonymous() {
d.setVarFlag("ALTERNATIVE_TARGET_%s" % ep, 'psplash', '${bindir}/%s' % p)
d.appendVar("RDEPENDS:%s" % ep, " %s" % pn)
if p == "psplash-default":
- d.appendVar("RRECOMMENDS:%s" % pn, " %s" % ep)
+ d.appendVar("RDEPENDS:%s" % pn, " %s" % ep)
}
S = "${WORKDIR}/git"
inherit autotools pkgconfig update-rc.d update-alternatives systemd
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} progress-bar fullscreen"
PACKAGECONFIG[systemd] = "--with-systemd,--without-systemd,systemd"
+PACKAGECONFIG[fullscreen] = "--enable-img-fullscreen"
+PACKAGECONFIG[startup-msg] = ",--disable-startup-msg"
+PACKAGECONFIG[progress-bar] = ",--disable-progress-bar"
ALTERNATIVE_PRIORITY = "100"
ALTERNATIVE_LINK_NAME[psplash] = "${bindir}/psplash"
diff --git a/meta/recipes-core/readline/readline.inc b/meta/recipes-core/readline/readline.inc
index 7f2f1a092b..4aefc5636d 100644
--- a/meta/recipes-core/readline/readline.inc
+++ b/meta/recipes-core/readline/readline.inc
@@ -18,7 +18,7 @@ SRC_URI += "file://inputrc"
inherit autotools texinfo
EXTRA_AUTORECONF += "--exclude=autoheader"
-EXTRA_OECONF += "bash_cv_termcap_lib=ncurses"
+EXTRA_OECONF += "bash_cv_termcap_lib=ncurses --with-shared-termcap-library"
LEAD_SONAME = "libreadline.so"
diff --git a/meta/recipes-core/readline/readline/configure-fix.patch b/meta/recipes-core/readline/readline/configure-fix.patch
deleted file mode 100644
index ef3104f8a6..0000000000
--- a/meta/recipes-core/readline/readline/configure-fix.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-Upstream-Status: Pending
-
-Without this it fails to link against libtermcap causing various missing
-symbols issues.
-
-RP - 8/10/08
-
-Support 6.3 which uses configure.ac rather than configure.in.
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- configure.ac | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index cea8f91..9075b8f 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -218,10 +218,10 @@ if test -f ${srcdir}/support/shobj-conf; then
- AC_MSG_CHECKING(configuration for building shared libraries)
- eval `TERMCAP_LIB=$TERMCAP_LIB ${CONFIG_SHELL-/bin/sh} ${srcdir}/support/shobj-conf -C "${CC}" -c ${host_cpu} -o ${host_os} -v ${host_vendor}`
-
--# case "$SHLIB_LIBS" in
--# *curses*|*termcap*|*termlib*) ;;
--# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;;
--# esac
-+ case "$SHLIB_LIBS" in
-+ *curses*|*termcap*|*termlib*) ;;
-+ *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;;
-+ esac
-
- AC_SUBST(SHOBJ_CC)
- AC_SUBST(SHOBJ_CFLAGS)
---
-1.8.1.2
-
diff --git a/meta/recipes-core/readline/readline/readline82-001.patch b/meta/recipes-core/readline/readline/readline82-001.patch
new file mode 100644
index 0000000000..4e9839db9c
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-001.patch
@@ -0,0 +1,45 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-001
+
+Bug-Reported-by: Kan-Ru Chen <koster@debian.org>
+Bug-Reference-ID:
+Bug-Reference-URL: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1021109
+
+Bug-Description:
+
+Starting a readline application with an invalid locale specification for
+LC_ALL/LANG/LC_CTYPE can cause it crash on the first call to readline.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/nls.c 2022-08-15 09:38:51.000000000 -0400
+--- nls.c 2022-10-05 09:23:22.000000000 -0400
+***************
+*** 142,145 ****
+--- 142,149 ----
+ lspec = "";
+ ret = setlocale (LC_CTYPE, lspec); /* ok, since it does not change locale */
++ if (ret == 0 || *ret == 0)
++ ret = setlocale (LC_CTYPE, (char *)NULL);
++ if (ret == 0 || *ret == 0)
++ ret = RL_DEFAULT_LOCALE;
+ #else
+ ret = (lspec == 0 || *lspec == 0) ? RL_DEFAULT_LOCALE : lspec;
+
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 0
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 1
diff --git a/meta/recipes-core/readline/readline/readline82-002.patch b/meta/recipes-core/readline/readline/readline82-002.patch
new file mode 100644
index 0000000000..5629685dc1
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-002.patch
@@ -0,0 +1,51 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-002
+
+Bug-Reported-by: srobertson@peratonlabs.com
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-bash/2022-09/msg00049.html
+
+Bug-Description:
+
+It's possible for readline to try to zero out a line that's not null-
+terminated, leading to a memory fault.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/display.c 2022-04-05 10:47:31.000000000 -0400
+--- display.c 2022-12-13 13:11:22.000000000 -0500
+***************
+*** 2684,2692 ****
+
+ if (visible_line)
+! {
+! temp = visible_line;
+! while (*temp)
+! *temp++ = '\0';
+! }
+ rl_on_new_line ();
+ forced_display++;
+--- 2735,2740 ----
+
+ if (visible_line)
+! memset (visible_line, 0, line_size);
+!
+ rl_on_new_line ();
+ forced_display++;
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 1
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 2
diff --git a/meta/recipes-core/readline/readline/readline82-003.patch b/meta/recipes-core/readline/readline/readline82-003.patch
new file mode 100644
index 0000000000..61570bf4f4
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-003.patch
@@ -0,0 +1,46 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-003
+
+Bug-Reported-by: Stefan Klinger <readline-gnu.org@stefan-klinger.de>
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2023-08/msg00018.html
+
+Bug-Description:
+
+Patch (apply with `patch -p0'):
+
+The custom color prefix that readline uses to color possible completions
+must have a leading `.'.
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/colors.c 2021-12-08 11:38:25.000000000 -0500
+--- colors.c 2023-08-28 16:40:04.000000000 -0400
+***************
+*** 74,78 ****
+ static void restore_default_color (void);
+
+! #define RL_COLOR_PREFIX_EXTENSION "readline-colored-completion-prefix"
+
+ COLOR_EXT_TYPE *_rl_color_ext_list = 0;
+--- 74,78 ----
+ static void restore_default_color (void);
+
+! #define RL_COLOR_PREFIX_EXTENSION ".readline-colored-completion-prefix"
+
+ COLOR_EXT_TYPE *_rl_color_ext_list = 0;
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 2
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 3
diff --git a/meta/recipes-core/readline/readline/readline82-004.patch b/meta/recipes-core/readline/readline/readline82-004.patch
new file mode 100644
index 0000000000..cedc3d0fe4
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-004.patch
@@ -0,0 +1,68 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-004
+
+Bug-Reported-by: Henry Bent <henry.r.bent@gmail.com>
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-bash/2022-11/msg00044.html
+
+Bug-Description:
+
+Patch (apply with `patch -p0'):
+
+There are systems that supply one of select or pselect, but not both.
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/input.c 2022-04-08 15:43:24.000000000 -0400
+--- input.c 2022-11-28 09:41:08.000000000 -0500
+***************
+*** 152,156 ****
+--- 152,158 ----
+ int _rl_timeout_init (void);
+ int _rl_timeout_sigalrm_handler (void);
++ #if defined (RL_TIMEOUT_USE_SELECT)
+ int _rl_timeout_select (int, fd_set *, fd_set *, fd_set *, const struct timeval *, const sigset_t *);
++ #endif
+
+ static void _rl_timeout_handle (void);
+***************
+*** 249,253 ****
+ int chars_avail, k;
+ char input;
+! #if defined(HAVE_SELECT)
+ fd_set readfds, exceptfds;
+ struct timeval timeout;
+--- 251,255 ----
+ int chars_avail, k;
+ char input;
+! #if defined (HAVE_PSELECT) || defined (HAVE_SELECT)
+ fd_set readfds, exceptfds;
+ struct timeval timeout;
+***************
+*** 806,810 ****
+ unsigned char c;
+ int fd;
+! #if defined (HAVE_PSELECT)
+ sigset_t empty_set;
+ fd_set readfds;
+--- 815,819 ----
+ unsigned char c;
+ int fd;
+! #if defined (HAVE_PSELECT) || defined (HAVE_SELECT)
+ sigset_t empty_set;
+ fd_set readfds;
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 3
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 4
diff --git a/meta/recipes-core/readline/readline/readline82-005.patch b/meta/recipes-core/readline/readline/readline82-005.patch
new file mode 100644
index 0000000000..69c2e4f77a
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-005.patch
@@ -0,0 +1,53 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-005
+
+Bug-Reported-by: Simon Marchi <simon.marchi@polymtl.ca>
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2022-09/msg00005.html
+
+Bug-Description:
+
+If an application is using readline in callback mode, and a signal arrives
+after readline checks for it in rl_callback_read_char() but before it
+restores the application's signal handlers, it won't get processed until the
+next time the application calls rl_callback_read_char(). Readline needs to
+check for and resend any pending signals after restoring the application's
+signal handlers.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/callback.c 2022-04-29 12:02:56.000000000 -0400
+--- callback.c 2022-10-11 10:59:06.000000000 -0400
+***************
+*** 116,120 ****
+ do { \
+ if (rl_persistent_signal_handlers == 0) \
+! rl_clear_signals (); \
+ return; \
+ } while (0)
+--- 116,123 ----
+ do { \
+ if (rl_persistent_signal_handlers == 0) \
+! { \
+! rl_clear_signals (); \
+! if (_rl_caught_signal) _rl_signal_handler (_rl_caught_signal); \
+! } \
+ return; \
+ } while (0)
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 4
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 5
diff --git a/meta/recipes-core/readline/readline/readline82-006.patch b/meta/recipes-core/readline/readline/readline82-006.patch
new file mode 100644
index 0000000000..d66afe82c9
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-006.patch
@@ -0,0 +1,102 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-006
+
+Bug-Reported-by: Tom de Vries <tdevries@suse.de>
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2022-09/msg00001.html
+
+Bug-Description:
+
+This is a variant of the same issue as the one fixed by patch 5. In this
+case, the signal arrives and is pending before readline calls rl_getc().
+When this happens, the pending signal will be handled by the loop, but may
+alter or destroy some state that the callback uses. Readline needs to treat
+this case the same way it would if a signal interrupts pselect/select, so
+compound operations like searches and reading numeric arguments get cleaned
+up properly.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/input.c 2022-12-22 16:15:48.000000000 -0500
+--- input.c 2023-01-10 11:53:45.000000000 -0500
+***************
+*** 812,816 ****
+ rl_getc (FILE *stream)
+ {
+! int result;
+ unsigned char c;
+ int fd;
+--- 812,816 ----
+ rl_getc (FILE *stream)
+ {
+! int result, ostate, osig;
+ unsigned char c;
+ int fd;
+***************
+*** 823,828 ****
+--- 823,842 ----
+ while (1)
+ {
++ osig = _rl_caught_signal;
++ ostate = rl_readline_state;
++
+ RL_CHECK_SIGNALS ();
+
++ #if defined (READLINE_CALLBACKS)
++ /* Do signal handling post-processing here, but just in callback mode
++ for right now because the signal cleanup can change some of the
++ callback state, and we need to either let the application have a
++ chance to react or abort some current operation that gets cleaned
++ up by rl_callback_sigcleanup(). If not, we'll just run through the
++ loop again. */
++ if (osig != 0 && (ostate & RL_STATE_CALLBACK))
++ goto postproc_signal;
++ #endif
++
+ /* We know at this point that _rl_caught_signal == 0 */
+
+***************
+*** 888,891 ****
+--- 902,908 ----
+
+ handle_error:
++ osig = _rl_caught_signal;
++ ostate = rl_readline_state;
++
+ /* If the error that we received was EINTR, then try again,
+ this is simply an interrupted system call to read (). We allow
+***************
+*** 928,933 ****
+--- 945,959 ----
+ #endif /* SIGALRM */
+
++ postproc_signal:
++ /* POSIX says read(2)/pselect(2)/select(2) don't return EINTR for any
++ reason other than being interrupted by a signal, so we can safely
++ call the application's signal event hook. */
+ if (rl_signal_event_hook)
+ (*rl_signal_event_hook) ();
++ #if defined (READLINE_CALLBACKS)
++ else if (osig == SIGINT && (ostate & RL_STATE_CALLBACK) && (ostate & (RL_STATE_ISEARCH|RL_STATE_NSEARCH|RL_STATE_NUMERICARG)))
++ /* just these cases for now */
++ _rl_abort_internal ();
++ #endif
+ }
+ }
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 5
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 6
diff --git a/meta/recipes-core/readline/readline/readline82-007.patch b/meta/recipes-core/readline/readline/readline82-007.patch
new file mode 100644
index 0000000000..9fa1ccb552
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-007.patch
@@ -0,0 +1,51 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-007
+
+Bug-Reported-by: Kevin Pulo <kev@pulo.com.au>
+Bug-Reference-ID:
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2022-11/msg00002.html
+
+Bug-Description:
+
+If readline is called with no prompt, it should display a newline if return
+is typed on an empty line. It should still suppress the final newline if
+return is typed on the last (empty) line of a multi-line command.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/display.c 2022-04-05 10:47:31.000000000 -0400
+--- display.c 2022-12-13 13:11:22.000000000 -0500
+***************
+*** 3342,3348 ****
+ &last_face[_rl_screenwidth - 1 + woff], 1);
+ }
+! _rl_vis_botlin = 0;
+! if (botline_length > 0 || _rl_last_c_pos > 0)
+ rl_crlf ();
+ fflush (rl_outstream);
+ rl_display_fixed++;
+--- 3394,3400 ----
+ &last_face[_rl_screenwidth - 1 + woff], 1);
+ }
+! if ((_rl_vis_botlin == 0 && botline_length == 0) || botline_length > 0 || _rl_last_c_pos > 0)
+ rl_crlf ();
++ _rl_vis_botlin = 0;
+ fflush (rl_outstream);
+ rl_display_fixed++;
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 6
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 7
diff --git a/meta/recipes-core/readline/readline/readline82-008.patch b/meta/recipes-core/readline/readline/readline82-008.patch
new file mode 100644
index 0000000000..660cb1e00b
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-008.patch
@@ -0,0 +1,80 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-008
+
+Bug-Reported-by:
+Bug-Reference-ID:
+Bug-Reference-URL:
+
+Bug-Description:
+
+Add missing prototypes for several function declarations.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/text.c Wed Oct 27 11:03:59 2021
+--- text.c Thu Nov 16 16:24:58 2023
+***************
+*** 1765,1770 ****
+ #if defined (READLINE_CALLBACKS)
+ static int
+! _rl_char_search_callback (data)
+! _rl_callback_generic_arg *data;
+ {
+ _rl_callback_func = 0;
+--- 1765,1769 ----
+ #if defined (READLINE_CALLBACKS)
+ static int
+! _rl_char_search_callback (_rl_callback_generic_arg *data)
+ {
+ _rl_callback_func = 0;
+*** ../readline-8.2-patched/bind.c Wed Feb 9 11:02:22 2022
+--- bind.c Thu Nov 16 16:25:17 2023
+***************
+*** 1168,1174 ****
+
+ static int
+! parse_comparison_op (s, indp)
+! const char *s;
+! int *indp;
+ {
+ int i, peekc, op;
+--- 1168,1172 ----
+
+ static int
+! parse_comparison_op (const char *s, int *indp)
+ {
+ int i, peekc, op;
+*** ../readline-8.2-patched/rltty.c Fri Feb 18 11:14:22 2022
+--- rltty.c Thu Nov 16 16:25:36 2023
+***************
+*** 81,86 ****
+ to get the tty settings. */
+ static void
+! set_winsize (tty)
+! int tty;
+ {
+ #if defined (TIOCGWINSZ)
+--- 81,85 ----
+ to get the tty settings. */
+ static void
+! set_winsize (int tty)
+ {
+ #if defined (TIOCGWINSZ)
+
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 7
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 8
diff --git a/meta/recipes-core/readline/readline/readline82-009.patch b/meta/recipes-core/readline/readline/readline82-009.patch
new file mode 100644
index 0000000000..1fcf7b3535
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-009.patch
@@ -0,0 +1,76 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-009
+
+Bug-Reported-by: Stefan H. Holek <stefan@epy.co.at>
+Bug-Reference-ID: <50F8DA45-B7F3-4DE1-AB94-19AE42649CDC@epy.co.at>
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2022-10/msg00021.html
+
+Bug-Description:
+
+Fix issue where the directory name portion of the word to be completed (the
+part that is passed to opendir()) requires both tilde expansion and dequoting.
+Readline only performed tilde expansion in this case, so filename completion
+would fail.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/complete.c 2022-04-05 10:47:06.000000000 -0400
+--- complete.c 2022-10-26 15:08:51.000000000 -0400
+***************
+*** 2527,2531 ****
+ xfree (dirname);
+ dirname = temp;
+! tilde_dirname = 1;
+ }
+
+--- 2527,2532 ----
+ xfree (dirname);
+ dirname = temp;
+! if (*dirname != '~')
+! tilde_dirname = 1; /* indicate successful tilde expansion */
+ }
+
+***************
+*** 2546,2554 ****
+ users_dirname = savestring (dirname);
+ }
+! else if (tilde_dirname == 0 && rl_completion_found_quote && rl_filename_dequoting_function)
+ {
+! /* delete single and double quotes */
+ xfree (dirname);
+! dirname = savestring (users_dirname);
+ }
+ directory = opendir (dirname);
+--- 2547,2560 ----
+ users_dirname = savestring (dirname);
+ }
+! else if (rl_completion_found_quote && rl_filename_dequoting_function)
+ {
+! /* We already ran users_dirname through the dequoting function.
+! If tilde_dirname == 1, we successfully performed tilde expansion
+! on dirname. Now we need to reconcile those results. We either
+! just copy the already-dequoted users_dirname or tilde expand it
+! if we tilde-expanded dirname. */
+! temp = tilde_dirname ? tilde_expand (users_dirname) : savestring (users_dirname);
+ xfree (dirname);
+! dirname = temp;
+ }
+ directory = opendir (dirname);
+
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 8
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 9
diff --git a/meta/recipes-core/readline/readline/readline82-010.patch b/meta/recipes-core/readline/readline/readline82-010.patch
new file mode 100644
index 0000000000..6152953e91
--- /dev/null
+++ b/meta/recipes-core/readline/readline/readline82-010.patch
@@ -0,0 +1,70 @@
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 8.2
+Patch-ID: readline82-010
+
+Bug-Reported-by: Martin Castillo <castilma@uni-bremen.de>
+Bug-Reference-ID: <2d42153b-cf65-caba-dff1-cd3bc6268c7e@uni-bremen.de>
+Bug-Reference-URL: https://lists.gnu.org/archive/html/bug-readline/2023-01/msg00000.html
+
+Bug-Description:
+
+Fix the case where text to be completed from the line buffer (quoted) is
+compared to the common prefix of the possible matches (unquoted) and the
+quoting makes the former appear to be longer than the latter. Readline
+assumes the match doesn't add any characters to the word and doesn't display
+multiple matches.
+
+Patch (apply with `patch -p0'):
+
+Upstream-Status: Submitted [https://ftp.gnu.org/gnu/readline/readline-8.2-patches/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+*** ../readline-8.2-patched/complete.c Tue Apr 5 10:47:06 2022
+--- complete.c Sat Jan 7 14:19:45 2023
+***************
+*** 2032,2038 ****
+ text = rl_copy_text (start, end);
+ matches = gen_completion_matches (text, start, end, our_func, found_quote, quote_char);
+ /* nontrivial_lcd is set if the common prefix adds something to the word
+ being completed. */
+! nontrivial_lcd = matches && compare_match (text, matches[0]) != 0;
+ if (what_to_do == '!' || what_to_do == '@')
+ tlen = strlen (text);
+--- 2038,2060 ----
+ text = rl_copy_text (start, end);
+ matches = gen_completion_matches (text, start, end, our_func, found_quote, quote_char);
++ /* If TEXT contains quote characters, it will be dequoted as part of
++ generating the matches, and the matches will not contain any quote
++ characters. We need to dequote TEXT before performing the comparison.
++ Since compare_match performs the dequoting, and we only want to do it
++ once, we don't call compare_matches after dequoting TEXT; we call
++ strcmp directly. */
+ /* nontrivial_lcd is set if the common prefix adds something to the word
+ being completed. */
+! if (rl_filename_completion_desired && rl_filename_quoting_desired &&
+! rl_completion_found_quote && rl_filename_dequoting_function)
+! {
+! char *t;
+! t = (*rl_filename_dequoting_function) (text, rl_completion_quote_character);
+! xfree (text);
+! text = t;
+! nontrivial_lcd = matches && strcmp (text, matches[0]) != 0;
+! }
+! else
+! nontrivial_lcd = matches && strcmp (text, matches[0]) != 0;
+ if (what_to_do == '!' || what_to_do == '@')
+ tlen = strlen (text);
+
+*** ../readline-8.2/patchlevel 2013-11-15 08:11:11.000000000 -0500
+--- patchlevel 2014-03-21 08:28:40.000000000 -0400
+***************
+*** 1,3 ****
+ # Do not edit -- exists only for use by patch
+
+! 9
+--- 1,3 ----
+ # Do not edit -- exists only for use by patch
+
+! 10
diff --git a/meta/recipes-core/readline/readline_8.1.2.bb b/meta/recipes-core/readline/readline_8.1.2.bb
deleted file mode 100644
index 787f7e734a..0000000000
--- a/meta/recipes-core/readline/readline_8.1.2.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-require readline.inc
-
-SRC_URI += "file://configure-fix.patch \
- file://norpath.patch \
- "
-
-SRC_URI[archive.sha256sum] = "7589a2381a8419e68654a47623ce7dfcb756815c8fee726b98f90bf668af7bc6"
diff --git a/meta/recipes-core/readline/readline_8.2.bb b/meta/recipes-core/readline/readline_8.2.bb
new file mode 100644
index 0000000000..f0dba31251
--- /dev/null
+++ b/meta/recipes-core/readline/readline_8.2.bb
@@ -0,0 +1,17 @@
+require readline.inc
+
+SRC_URI += " file://norpath.patch"
+
+SRC_URI += "file://readline82-001.patch;striplevel=0 \
+ file://readline82-002.patch;striplevel=0 \
+ file://readline82-003.patch;striplevel=0 \
+ file://readline82-004.patch;striplevel=0 \
+ file://readline82-005.patch;striplevel=0 \
+ file://readline82-006.patch;striplevel=0 \
+ file://readline82-007.patch;striplevel=0 \
+ file://readline82-008.patch;striplevel=0 \
+ file://readline82-009.patch;striplevel=0 \
+ file://readline82-010.patch;striplevel=0 \
+ "
+
+SRC_URI[archive.sha256sum] = "3feb7171f16a84ee82ca18a36d7b9be109a52c04f492a053331d7d1095007c35"
diff --git a/meta/recipes-core/seatd/seatd_0.6.4.bb b/meta/recipes-core/seatd/seatd_0.6.4.bb
deleted file mode 100644
index 601736adef..0000000000
--- a/meta/recipes-core/seatd/seatd_0.6.4.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "A minimal seat management daemon, and a universal seat management library."
-DESCRIPTION = "Seat management takes care of mediating access to shared devices (graphics, input), without requiring the applications needing access to be root."
-HOMEPAGE = "https://git.sr.ht/~kennylevinsen/seatd"
-
-LICENSE = "MIT"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=715a99d2dd552e6188e74d4ed2914d5a"
-
-SRC_URI = "git://git.sr.ht/~kennylevinsen/seatd;protocol=https;nobranch=1 \
- file://init"
-SRCREV = "df13d03f9cfd0dc0c2b3298cd0e5eb4e4a10835e"
-S = "${WORKDIR}/git"
-
-inherit meson pkgconfig update-rc.d
-
-# https://www.openwall.com/lists/musl/2020/01/20/3
-CFLAGS:append:libc-musl:powerpc64le = " -Wno-error=overflow"
-
-PACKAGECONFIG ?= " \
- ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
- libseat-builtin \
-"
-
-PACKAGECONFIG[libseat-builtin] = "-Dlibseat-builtin=enabled,-Dlibseat-builtin=disabled"
-PACKAGECONFIG[systemd] = ",,systemd"
-
-do_install:append() {
- if [ "${VIRTUAL-RUNTIME_init_manager}" != "systemd" ]; then
- install -Dm755 ${WORKDIR}/init ${D}/${sysconfdir}/init.d/seatd
- fi
-}
-
-INITSCRIPT_NAME = "seatd"
-INITSCRIPT_PARAMS = "start 9 5 2 . stop 20 0 1 6 ."
-INHIBIT_UPDATERCD_BBCLASS = "${@oe.utils.conditional('VIRTUAL-RUNTIME_init_manager', 'systemd', '1', '', d)}"
diff --git a/meta/recipes-core/seatd/seatd_0.8.0.bb b/meta/recipes-core/seatd/seatd_0.8.0.bb
new file mode 100644
index 0000000000..14c5b1b7ae
--- /dev/null
+++ b/meta/recipes-core/seatd/seatd_0.8.0.bb
@@ -0,0 +1,35 @@
+SUMMARY = "A minimal seat management daemon, and a universal seat management library."
+DESCRIPTION = "Seat management takes care of mediating access to shared devices (graphics, input), without requiring the applications needing access to be root."
+HOMEPAGE = "https://git.sr.ht/~kennylevinsen/seatd"
+
+LICENSE = "MIT"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=715a99d2dd552e6188e74d4ed2914d5a"
+
+SRC_URI = "git://git.sr.ht/~kennylevinsen/seatd;protocol=https;branch=master \
+ file://init"
+SRCREV = "3e9ef69f14f630a719dd464f3c90a7932f1c8296"
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig update-rc.d
+
+# https://www.openwall.com/lists/musl/2020/01/20/3
+CFLAGS:append:libc-musl:powerpc64le = " -Wno-error=overflow"
+
+PACKAGECONFIG ?= " \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
+ libseat-builtin \
+"
+
+PACKAGECONFIG[libseat-builtin] = "-Dlibseat-builtin=enabled,-Dlibseat-builtin=disabled"
+PACKAGECONFIG[systemd] = ",,systemd"
+
+do_install:append() {
+ if [ "${VIRTUAL-RUNTIME_init_manager}" != "systemd" ]; then
+ install -Dm755 ${WORKDIR}/init ${D}/${sysconfdir}/init.d/seatd
+ fi
+}
+
+INITSCRIPT_NAME = "seatd"
+INITSCRIPT_PARAMS = "start 9 5 2 . stop 20 0 1 6 ."
+INHIBIT_UPDATERCD_BBCLASS = "${@oe.utils.conditional('VIRTUAL-RUNTIME_init_manager', 'systemd', '1', '', d)}"
diff --git a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/obsolete_automake_macros.patch b/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/obsolete_automake_macros.patch
deleted file mode 100644
index 9d828d7026..0000000000
--- a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/obsolete_automake_macros.patch
+++ /dev/null
@@ -1,15 +0,0 @@
-Upstream-Status: Submitted [http://sourceforge.net/tracker/?func=detail&aid=3600345&group_id=44427&atid=439544]
-
-Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
-diff -Nurd sysfsutils-2.1.0/configure.ac sysfsutils-2.1.0/configure.ac
---- sysfsutils-2.1.0/configure.ac 2006-08-07 08:08:00.000000000 +0300
-+++ sysfsutils-2.1.0/configure.ac 2013-01-11 08:13:08.651550634 +0200
-@@ -2,7 +2,7 @@
- AC_INIT(sysfsutils, 2.1.0, linux-diag-devel@lists.sourceforge.net)
- AM_INIT_AUTOMAKE
- AC_CONFIG_SRCDIR([config.h.in])
--AM_CONFIG_HEADER([config.h])
-+AC_CONFIG_HEADERS([config.h])
-
- # Checks for KLIBC support (should be before AC_PROG_LIBTOOL and AC_PROG_CC)
- AC_CHECK_KLIBC
diff --git a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/separatebuild.patch b/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/separatebuild.patch
deleted file mode 100644
index 82e725e2ac..0000000000
--- a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/separatebuild.patch
+++ /dev/null
@@ -1,65 +0,0 @@
-Upstream-Status: Pending
-
-Fix out of tree build issues so ${B} != ${S} works.
-
-RP 2013/03/18
-
-Index: sysfsutils-2.1.0/cmd/Makefile.am
-===================================================================
---- sysfsutils-2.1.0.orig/cmd/Makefile.am 2013-03-08 08:57:27.224556508 +0000
-+++ sysfsutils-2.1.0/cmd/Makefile.am 2013-03-08 08:57:27.480556502 +0000
-@@ -1,6 +1,6 @@
- bin_PROGRAMS = systool
- systool_SOURCES = systool.c names.c names.h
--INCLUDES = -I../include
-+INCLUDES = -I$(srcdir)/../include
- LDADD = ../lib/libsysfs.la
- EXTRA_CFLAGS = @EXTRA_CFLAGS@
- AM_CFLAGS = -Wall -W -Wstrict-prototypes $(EXTRA_CFLAGS)
-Index: sysfsutils-2.1.0/lib/Makefile.am
-===================================================================
---- sysfsutils-2.1.0.orig/lib/Makefile.am 2013-03-08 08:57:27.224556508 +0000
-+++ sysfsutils-2.1.0/lib/Makefile.am 2013-03-08 08:57:27.480556502 +0000
-@@ -1,7 +1,7 @@
- lib_LTLIBRARIES = libsysfs.la
- libsysfs_la_SOURCES = sysfs_utils.c sysfs_attr.c sysfs_class.c dlist.c \
- sysfs_device.c sysfs_driver.c sysfs_bus.c sysfs_module.c sysfs.h
--INCLUDES = -I../include
-+INCLUDES = -I$(srcdir)/../include
- libsysfs_la_LDFLAGS = -version-info 2:1:0
- EXTRA_CFLAGS = @EXTRA_CLFAGS@
- libsysfs_la_CFLAGS = -Wall -W -Wstrict-prototypes $(EXTRA_CLFAGS)
-Index: sysfsutils-2.1.0/test/Makefile.am
-===================================================================
---- sysfsutils-2.1.0.orig/test/Makefile.am 2013-03-08 08:57:27.224556508 +0000
-+++ sysfsutils-2.1.0/test/Makefile.am 2013-03-08 09:06:48.196543326 +0000
-@@ -2,14 +2,14 @@
- BUILT_SOURCES = test.h
- CLEANFILES = test.h
- test.h:
-- ./create-test
-+ $(srcdir)/create-test $(srcdir)/libsysfs.conf
- get_device_SOURCES = get_device.c
- get_driver_SOURCES = get_driver.c
- get_module_SOURCES = get_module.c
- testlibsysfs_SOURCES = test.c test_attr.c test_bus.c test_class.c \
- test_device.c test_driver.c test_module.c test_utils.c \
- testout.c test-defs.h libsysfs.conf create-test
--INCLUDES = -I../include
-+INCLUDES = -I$(srcdir)/../include
- LDADD = ../lib/libsysfs.la
- EXTRA_CFLAGS = @EXTRA_CLFAGS@
- AM_CFLAGS = -Wall -W -Wstrict-prototypes $(EXTRA_CLFAGS)
-Index: sysfsutils-2.1.0/test/create-test
-===================================================================
---- sysfsutils-2.1.0.orig/test/create-test 2005-11-28 10:22:10.000000000 +0000
-+++ sysfsutils-2.1.0/test/create-test 2013-03-08 09:07:03.372542838 +0000
-@@ -2,7 +2,7 @@
-
- rm -f test.h
-
--conf_file=./libsysfs.conf
-+conf_file=$1
-
- . $conf_file
-
diff --git a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/sysfsutils-2.0.0-class-dup.patch b/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/sysfsutils-2.0.0-class-dup.patch
deleted file mode 100644
index 1a35b7897a..0000000000
--- a/meta/recipes-core/sysfsutils/sysfsutils-2.1.0/sysfsutils-2.0.0-class-dup.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-Upstream-Status: Backport [from fedora core 9]
-
-This patch is from the Fedora Core 9 sysfsutils-2.1.0-3.fc9 package.
-
-It fixes a problem in the upstream package where not all devices
-will be returned by the function.
-
-The package License indicates this is GPLv2 licensed.
-
-Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
-
-diff -puN lib/sysfs_class.c~sysfsutils_class_dup lib/sysfs_class.c
---- sysfsutils-2.1.0/lib/sysfs_class.c~sysfsutils_class_dup 2006-09-07 17:01:26.000000000 -0500
-+++ sysfsutils-2.1.0-bjking1/lib/sysfs_class.c 2006-09-07 17:01:26.000000000 -0500
-@@ -66,7 +66,7 @@ static int cdev_name_equal(void *a, void
- return 0;
-
- if (strncmp((char *)a, ((struct sysfs_class_device *)b)->name,
-- strlen((char *)a)) == 0)
-+ SYSFS_NAME_LEN) == 0)
- return 1;
-
- return 0;
diff --git a/meta/recipes-core/sysfsutils/sysfsutils_2.1.0.bb b/meta/recipes-core/sysfsutils/sysfsutils_2.1.0.bb
deleted file mode 100644
index c90a02f131..0000000000
--- a/meta/recipes-core/sysfsutils/sysfsutils_2.1.0.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "Tools for working with sysfs"
-DESCRIPTION = "Tools for working with the sysfs virtual filesystem. The tool 'systool' can query devices by bus, class and topology."
-HOMEPAGE = "http://linux-diag.sourceforge.net/Sysfsutils.html"
-
-LICENSE = "GPL-2.0-only & LGPL-2.1-only"
-LICENSE:${PN} = "GPL-2.0-only"
-LICENSE:libsysfs = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3d06403ea54c7574a9e581c6478cc393 \
- file://cmd/GPL;md5=d41d4e2e1e108554e0388ea4aecd8d27 \
- file://lib/LGPL;md5=b75d069791103ffe1c0d6435deeff72e"
-PR = "r5"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/linux-diag/sysfsutils-${PV}.tar.gz \
- file://sysfsutils-2.0.0-class-dup.patch \
- file://obsolete_automake_macros.patch \
- file://separatebuild.patch"
-
-SRC_URI[md5sum] = "14e7dcd0436d2f49aa403f67e1ef7ddc"
-SRC_URI[sha256sum] = "e865de2c1f559fff0d3fc936e660c0efaf7afe662064f2fb97ccad1ec28d208a"
-
-UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/linux-diag/files/sysfsutils/"
-UPSTREAM_CHECK_REGEX = "/sysfsutils/(?P<pver>(\d+[\.\-_]*)+)/"
-
-S = "${WORKDIR}/sysfsutils-${PV}"
-
-inherit autotools
-
-PACKAGES =+ "libsysfs"
-FILES:libsysfs = "${libdir}/lib*${SOLIBS}"
-
-export libdir = "${base_libdir}"
diff --git a/meta/recipes-core/sysfsutils/sysfsutils_2.1.1.bb b/meta/recipes-core/sysfsutils/sysfsutils_2.1.1.bb
new file mode 100644
index 0000000000..86cc06a2cd
--- /dev/null
+++ b/meta/recipes-core/sysfsutils/sysfsutils_2.1.1.bb
@@ -0,0 +1,21 @@
+SUMMARY = "Tools for working with sysfs"
+DESCRIPTION = "Tools for working with the sysfs virtual filesystem. The tool 'systool' can query devices by bus, class and topology."
+HOMEPAGE = "http://linux-diag.sourceforge.net/Sysfsutils.html"
+
+LICENSE = "GPL-2.0-only & LGPL-2.1-only"
+LICENSE:${PN} = "GPL-2.0-only"
+LICENSE:libsysfs = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=dcc19fa9307a50017fca61423a7d9754 \
+ file://cmd/GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://lib/LGPL;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "git://github.com/linux-ras/sysfsutils.git;protocol=https;branch=master"
+
+SRCREV = "da2f1f8500c0af6663a56ce2bff07f67e60a92e0"
+
+S = "${WORKDIR}/git"
+
+inherit autotools
+
+PACKAGES =+ "libsysfs"
+FILES:libsysfs = "${libdir}/lib*${SOLIBS}"
diff --git a/meta/recipes-core/systemd/systemd-boot-native_255.4.bb b/meta/recipes-core/systemd/systemd-boot-native_255.4.bb
new file mode 100644
index 0000000000..73db59b14e
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd-boot-native_255.4.bb
@@ -0,0 +1,15 @@
+require systemd.inc
+
+inherit native
+
+deltask do_configure
+deltask do_compile
+
+do_install () {
+ install -Dm 0755 ${S}/src/ukify/ukify.py ${D}${bindir}/ukify
+}
+addtask install after do_unpack
+
+PACKAGES = "${PN}"
+
+FILES:${PN} = "${bindir}/ukify"
diff --git a/meta/recipes-core/systemd/systemd-boot_250.5.bb b/meta/recipes-core/systemd/systemd-boot_250.5.bb
deleted file mode 100644
index b67706b731..0000000000
--- a/meta/recipes-core/systemd/systemd-boot_250.5.bb
+++ /dev/null
@@ -1,73 +0,0 @@
-require systemd.inc
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/systemd:"
-
-require conf/image-uefi.conf
-
-DEPENDS = "intltool-native libcap util-linux gnu-efi gperf-native python3-jinja2-native"
-
-inherit meson pkgconfig gettext
-inherit deploy
-
-LDFLAGS:prepend = "${@ " ".join(d.getVar('LD').split()[1:])} "
-
-do_write_config[vardeps] += "CC OBJCOPY"
-do_write_config:append() {
- cat >${WORKDIR}/meson-${PN}.cross <<EOF
-[binaries]
-efi_cc = ${@meson_array('CC', d)}
-objcopy = ${@meson_array('OBJCOPY', d)}
-EOF
-}
-
-EFI_LD = "bfd"
-
-EXTRA_OEMESON += "-Defi=true \
- -Dgnu-efi=true \
- -Defi-includedir=${STAGING_INCDIR}/efi \
- -Defi-libdir=${STAGING_LIBDIR} \
- -Defi-ld=${EFI_LD} \
- -Dman=false \
- --cross-file ${WORKDIR}/meson-${PN}.cross \
- "
-
-# install to the image as boot*.efi if its the EFI_PROVIDER,
-# otherwise install as the full name.
-# This allows multiple bootloaders to coexist in a single image.
-python __anonymous () {
- import re
- target = d.getVar('TARGET_ARCH')
- prefix = "" if d.getVar('EFI_PROVIDER') == "systemd-boot" else "systemd-"
- systemdimage = prefix + d.getVar("EFI_BOOT_IMAGE")
- d.setVar("SYSTEMD_BOOT_IMAGE", systemdimage)
- prefix = "systemd-" if prefix == "" else ""
- d.setVar("SYSTEMD_BOOT_IMAGE_PREFIX", prefix)
-}
-
-FILES:${PN} = "${EFI_FILES_PATH}/${SYSTEMD_BOOT_IMAGE}"
-
-RDEPENDS:${PN} += "virtual-systemd-bootconf"
-
-# Imported from the old gummiboot recipe
-TUNE_CCARGS:remove = "-mfpmath=sse"
-
-COMPATIBLE_HOST = "(aarch64.*|arm.*|x86_64.*|i.86.*)-linux"
-COMPATIBLE_HOST:x86-x32 = "null"
-
-do_compile() {
- ninja \
- src/boot/efi/${SYSTEMD_BOOT_IMAGE_PREFIX}${SYSTEMD_BOOT_IMAGE} \
- src/boot/efi/linux${EFI_ARCH}.efi.stub
-}
-
-do_install() {
- install -d ${D}${EFI_FILES_PATH}
- install ${B}/src/boot/efi/systemd-boot*.efi ${D}${EFI_FILES_PATH}/${SYSTEMD_BOOT_IMAGE}
-}
-
-do_deploy () {
- install ${B}/src/boot/efi/systemd-boot*.efi ${DEPLOYDIR}
- install ${B}/src/boot/efi/linux*.efi.stub ${DEPLOYDIR}
-}
-
-addtask deploy before do_build after do_compile
-
diff --git a/meta/recipes-core/systemd/systemd-boot_255.4.bb b/meta/recipes-core/systemd/systemd-boot_255.4.bb
new file mode 100644
index 0000000000..4ee25ee72f
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd-boot_255.4.bb
@@ -0,0 +1,67 @@
+require systemd.inc
+FILESEXTRAPATHS =. "${FILE_DIRNAME}/systemd:"
+
+require conf/image-uefi.conf
+
+DEPENDS = "intltool-native libcap util-linux gperf-native python3-jinja2-native python3-pyelftools-native"
+
+inherit meson pkgconfig gettext
+inherit deploy
+
+LDFLAGS:prepend = "${@ " ".join(d.getVar('LD').split()[1:])} "
+
+EFI_LD = "bfd"
+LDFLAGS:append = " -fuse-ld=${EFI_LD}"
+
+do_write_config[vardeps] += "EFI_LD"
+do_write_config:append() {
+ cat >${WORKDIR}/meson-${PN}.cross <<EOF
+[binaries]
+c_ld = ${@meson_array('EFI_LD', d)}
+EOF
+}
+
+MESON_CROSS_FILE:append = " --cross-file ${WORKDIR}/meson-${PN}.cross"
+
+MESON_TARGET = "systemd-boot"
+
+EXTRA_OEMESON += "-Defi=true \
+ -Dbootloader=true \
+ -Dman=false \
+ "
+
+# install to the image as boot*.efi if its the EFI_PROVIDER,
+# otherwise install as the full name.
+# This allows multiple bootloaders to coexist in a single image.
+python __anonymous () {
+ import re
+ target = d.getVar('TARGET_ARCH')
+ prefix = "" if d.getVar('EFI_PROVIDER') == "systemd-boot" else "systemd-"
+ systemdimage = prefix + d.getVar("EFI_BOOT_IMAGE")
+ d.setVar("SYSTEMD_BOOT_IMAGE", systemdimage)
+ prefix = "systemd-" if prefix == "" else ""
+ d.setVar("SYSTEMD_BOOT_IMAGE_PREFIX", prefix)
+}
+
+FILES:${PN} = "${EFI_FILES_PATH}/${SYSTEMD_BOOT_IMAGE}"
+
+RDEPENDS:${PN} += "virtual-systemd-bootconf"
+
+CFLAGS:append:libc-musl = " -D__DEFINED_wchar_t"
+
+COMPATIBLE_HOST = "(aarch64.*|arm.*|x86_64.*|i.86.*)-linux"
+COMPATIBLE_HOST:x86-x32 = "null"
+
+do_install() {
+ install -d ${D}${EFI_FILES_PATH}
+ install ${B}/src/boot/efi/systemd-boot*.efi ${D}${EFI_FILES_PATH}/${SYSTEMD_BOOT_IMAGE}
+}
+
+do_deploy () {
+ install ${B}/src/boot/efi/systemd-boot*.efi ${DEPLOYDIR}
+ install ${B}/src/boot/efi/linux*.efi.stub ${DEPLOYDIR}
+ install ${B}/src/boot/efi/addon*.efi.stub ${DEPLOYDIR}
+}
+
+addtask deploy before do_build after do_compile
+
diff --git a/meta/recipes-core/systemd/systemd-compat-units.bb b/meta/recipes-core/systemd/systemd-compat-units.bb
index 55ebf99117..c03d97f9c9 100644
--- a/meta/recipes-core/systemd/systemd-compat-units.bb
+++ b/meta/recipes-core/systemd/systemd-compat-units.bb
@@ -2,7 +2,6 @@ SUMMARY = "Enhances systemd compatilibity with existing SysVinit scripts"
HOMEPAGE = "http://www.freedesktop.org/wiki/Software/systemd"
LICENSE = "MIT"
-PR = "r29"
PACKAGE_WRITE_DEPS += "systemd-systemctl-native"
@@ -14,7 +13,8 @@ INHIBIT_DEFAULT_DEPS = "1"
ALLOW_EMPTY:${PN} = "1"
-REQUIRED_DISTRO_FEATURES = "systemd"
+REQUIRED_DISTRO_FEATURES += "systemd"
+REQUIRED_DISTRO_FEATURES += "usrmerge"
SYSTEMD_DISABLED_SYSV_SERVICES = " \
busybox-udhcpc \
@@ -27,7 +27,8 @@ SYSTEMD_DISABLED_SYSV_SERVICES = " \
pkg_postinst:${PN} () {
- cd $D${sysconfdir}/init.d || exit 0
+ test -d $D${sysconfdir}/init.d || exit 0
+ cd $D${sysconfdir}/init.d
echo "Disabling the following sysv scripts: "
diff --git a/meta/recipes-core/systemd/systemd-conf_1.0.bb b/meta/recipes-core/systemd/systemd-conf_1.0.bb
index 61ce7939d3..2355936631 100644
--- a/meta/recipes-core/systemd/systemd-conf_1.0.bb
+++ b/meta/recipes-core/systemd/systemd-conf_1.0.bb
@@ -5,6 +5,9 @@ DefaultTimeoutStartSec setting."
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+inherit features_check
+REQUIRED_DISTRO_FEATURES += "usrmerge"
+
PE = "1"
PACKAGECONFIG ??= "dhcp-ethernet"
diff --git a/meta/recipes-core/systemd/systemd-machine-units_1.0.bb b/meta/recipes-core/systemd/systemd-machine-units_1.0.bb
index 12f27d6ae3..8df7ff7cf1 100644
--- a/meta/recipes-core/systemd/systemd-machine-units_1.0.bb
+++ b/meta/recipes-core/systemd/systemd-machine-units_1.0.bb
@@ -5,9 +5,9 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda
PACKAGE_ARCH = "${MACHINE_ARCH}"
-PR = "r19"
-inherit systemd
+inherit systemd features_check
+REQUIRED_DISTRO_FEATURES += "usrmerge"
SYSTEMD_SERVICE:${PN} = ""
ALLOW_EMPTY:${PN} = "1"
diff --git a/meta/recipes-core/systemd/systemd-serialgetty.bb b/meta/recipes-core/systemd/systemd-serialgetty.bb
index fd888bb834..44a93ac684 100644
--- a/meta/recipes-core/systemd/systemd-serialgetty.bb
+++ b/meta/recipes-core/systemd/systemd-serialgetty.bb
@@ -3,7 +3,6 @@ HOMEPAGE = "https://www.freedesktop.org/wiki/Software/systemd/"
LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-PR = "r5"
SERIAL_CONSOLES ?= "115200;ttyS0"
SERIAL_TERM ?= "linux"
@@ -14,7 +13,8 @@ S = "${WORKDIR}"
# As this package is tied to systemd, only build it when we're also building systemd.
inherit features_check
-REQUIRED_DISTRO_FEATURES = "systemd"
+REQUIRED_DISTRO_FEATURES += "systemd"
+REQUIRED_DISTRO_FEATURES += "usrmerge"
do_install() {
if [ ! -z "${SERIAL_CONSOLES}" ] ; then
diff --git a/meta/recipes-core/systemd/systemd-systemctl-native.bb b/meta/recipes-core/systemd/systemd-systemctl-native.bb
index fadc8433d8..54283bcba1 100644
--- a/meta/recipes-core/systemd/systemd-systemctl-native.bb
+++ b/meta/recipes-core/systemd/systemd-systemctl-native.bb
@@ -3,7 +3,6 @@ SUMMARY = "Wrapper for enabling systemd services"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-PR = "r6"
inherit native
diff --git a/meta/recipes-core/systemd/systemd-systemctl/systemctl b/meta/recipes-core/systemd/systemd-systemctl/systemctl
index 6324319a45..2229bc7b6d 100755
--- a/meta/recipes-core/systemd/systemd-systemctl/systemctl
+++ b/meta/recipes-core/systemd/systemd-systemctl/systemctl
@@ -11,6 +11,7 @@ import re
import sys
from collections import namedtuple
+from itertools import chain
from pathlib import Path
version = 1.0
@@ -25,12 +26,19 @@ locations = list()
class SystemdFile():
"""Class representing a single systemd configuration file"""
- def __init__(self, root, path):
+
+ _clearable_keys = ['WantedBy']
+
+ def __init__(self, root, path, instance_unit_name):
self.sections = dict()
self._parse(root, path)
dirname = os.path.basename(path.name) + ".d"
for location in locations:
- for path2 in sorted((root / location / "system" / dirname).glob("*.conf")):
+ files = (root / location / "system" / dirname).glob("*.conf")
+ if instance_unit_name:
+ inst_dirname = instance_unit_name + ".d"
+ files = chain(files, (root / location / "system" / inst_dirname).glob("*.conf"))
+ for path2 in sorted(files):
self._parse(root, path2)
def _parse(self, root, path):
@@ -75,6 +83,14 @@ class SystemdFile():
v = m.group('value')
if k not in section:
section[k] = list()
+
+ # If we come across a "key=" line for a "clearable key", then
+ # forget all preceding assignments. This works because we are
+ # processing files in correct parse order.
+ if k in self._clearable_keys and not v:
+ del section[k]
+ continue
+
section[k].extend(v.split())
def get(self, section, prop):
@@ -179,24 +195,29 @@ class SystemdUnit():
raise SystemdUnitNotFoundError(self.root, unit)
- def _process_deps(self, config, service, location, prop, dirstem):
+ def _process_deps(self, config, service, location, prop, dirstem, instance):
systemdir = self.root / SYSCONFDIR / "systemd" / "system"
target = ROOT / location.relative_to(self.root)
try:
for dependent in config.get('Install', prop):
+ # expand any %i to instance (ignoring escape sequence %%)
+ dependent = re.sub("([^%](%%)*)%i", "\\g<1>{}".format(instance), dependent)
wants = systemdir / "{}.{}".format(dependent, dirstem) / service
add_link(wants, target)
except KeyError:
pass
- def enable(self, caller_unit=None):
+ def enable(self, units_enabled=[]):
# if we're enabling an instance, first extract the actual instance
# then figure out what the template unit is
template = re.match(r"[^@]+@(?P<instance>[^\.]*)\.", self.unit)
+ instance_unit_name = None
if template:
instance = template.group('instance')
+ if instance != "":
+ instance_unit_name = self.unit
unit = re.sub(r"@[^\.]*\.", "@.", self.unit, 1)
else:
instance = None
@@ -208,7 +229,7 @@ class SystemdUnit():
# ignore aliases
return
- config = SystemdFile(self.root, path)
+ config = SystemdFile(self.root, path, instance_unit_name)
if instance == "":
try:
default_instance = config.get('Install', 'DefaultInstance')[0]
@@ -221,14 +242,15 @@ class SystemdUnit():
else:
service = self.unit
- self._process_deps(config, service, path, 'WantedBy', 'wants')
- self._process_deps(config, service, path, 'RequiredBy', 'requires')
+ self._process_deps(config, service, path, 'WantedBy', 'wants', instance)
+ self._process_deps(config, service, path, 'RequiredBy', 'requires', instance)
try:
for also in config.get('Install', 'Also'):
try:
- if caller_unit != also:
- SystemdUnit(self.root, also).enable(unit)
+ units_enabled.append(unit)
+ if also not in units_enabled:
+ SystemdUnit(self.root, also).enable(units_enabled)
except SystemdUnitNotFoundError as e:
sys.exit("Error: Systemctl also enable issue with %s (%s)" % (service, e.unit))
diff --git a/meta/recipes-core/systemd/systemd.inc b/meta/recipes-core/systemd/systemd.inc
index 309105290f..a35db5091e 100644
--- a/meta/recipes-core/systemd/systemd.inc
+++ b/meta/recipes-core/systemd/systemd.inc
@@ -10,12 +10,13 @@ state, maintains mount and automount points and implements an \
elaborate transactional dependency-based service control logic. It can \
work as a drop-in replacement for sysvinit."
-LICENSE = "GPL-2.0-only & LGPL-2.1-only"
+LICENSE = "GPL-2.0-only & LGPL-2.1-or-later"
+LICENSE:libsystemd = "LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://LICENSE.GPL2;md5=751419260aa954499f7abaabaa882bbe \
file://LICENSE.LGPL2.1;md5=4fbd65380cdd255951079008b364516c"
-SRCREV = "4a31fa2fb040005b73253da75cf84949b8485175"
-SRCBRANCH = "v250-stable"
+SRCREV = "387a14a7b67b8b76adaed4175e14bb7e39b2f738"
+SRCBRANCH = "v255-stable"
SRC_URI = "git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-core/systemd/systemd/00-create-volatile.conf b/meta/recipes-core/systemd/systemd/00-create-volatile.conf
index 87cbe1e7d3..c4277221a2 100644
--- a/meta/recipes-core/systemd/systemd/00-create-volatile.conf
+++ b/meta/recipes-core/systemd/systemd/00-create-volatile.conf
@@ -3,5 +3,6 @@
# inside /var/log.
+d /run/lock 1777 - - -
d /var/volatile/log - - - -
d /var/volatile/tmp 1777 - -
diff --git a/meta/recipes-core/systemd/systemd/0001-Adjust-for-musl-headers.patch b/meta/recipes-core/systemd/systemd/0001-Adjust-for-musl-headers.patch
deleted file mode 100644
index c42c66786f..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-Adjust-for-musl-headers.patch
+++ /dev/null
@@ -1,525 +0,0 @@
-From 9a1841402ce3ef21a10a7314a07a615f8196d406 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 21 Jan 2022 22:19:37 -0800
-Subject: [PATCH] Adjust for musl headers
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/libsystemd-network/sd-dhcp6-client.c | 2 +-
- src/network/netdev/bareudp.c | 2 +-
- src/network/netdev/batadv.c | 2 +-
- src/network/netdev/bond.c | 2 +-
- src/network/netdev/bridge.c | 2 +-
- src/network/netdev/dummy.c | 2 +-
- src/network/netdev/geneve.c | 2 +-
- src/network/netdev/ifb.c | 2 +-
- src/network/netdev/ipoib.c | 2 +-
- src/network/netdev/ipvlan.c | 2 +-
- src/network/netdev/macsec.c | 2 +-
- src/network/netdev/macvlan.c | 2 +-
- src/network/netdev/netdev.c | 2 +-
- src/network/netdev/netdevsim.c | 2 +-
- src/network/netdev/nlmon.c | 2 +-
- src/network/netdev/tunnel.c | 2 +-
- src/network/netdev/vcan.c | 2 +-
- src/network/netdev/veth.c | 2 +-
- src/network/netdev/vlan.c | 2 +-
- src/network/netdev/vrf.c | 2 +-
- src/network/netdev/vxcan.c | 2 +-
- src/network/netdev/vxlan.c | 2 +-
- src/network/netdev/wireguard.c | 2 +-
- src/network/netdev/xfrm.c | 2 +-
- src/network/networkd-bridge-mdb.c | 4 ++--
- src/network/networkd-dhcp-common.c | 3 ++-
- src/network/networkd-dhcp-prefix-delegation.c | 4 ++--
- src/network/networkd-dhcp-server.c | 2 +-
- src/network/networkd-dhcp4.c | 2 +-
- src/network/networkd-link.c | 2 +-
- src/network/networkd-route.c | 8 ++++----
- src/network/networkd-setlink.c | 2 +-
- src/shared/linux/ethtool.h | 3 ++-
- src/shared/netif-util.c | 2 +-
- src/udev/udev-builtin-net_id.c | 2 +-
- 35 files changed, 42 insertions(+), 40 deletions(-)
-
-diff --git a/src/libsystemd-network/sd-dhcp6-client.c b/src/libsystemd-network/sd-dhcp6-client.c
-index 84bc739bba..ff8cb6bf9d 100644
---- a/src/libsystemd-network/sd-dhcp6-client.c
-+++ b/src/libsystemd-network/sd-dhcp6-client.c
-@@ -5,7 +5,7 @@
-
- #include <errno.h>
- #include <sys/ioctl.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_infiniband.h>
-
- #include "sd-dhcp6-client.h"
-diff --git a/src/network/netdev/bareudp.c b/src/network/netdev/bareudp.c
-index 8ff0eb1360..7e06b8d57d 100644
---- a/src/network/netdev/bareudp.c
-+++ b/src/network/netdev/bareudp.c
-@@ -2,7 +2,7 @@
- * Copyright © 2020 VMware, Inc. */
-
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "bareudp.h"
- #include "netlink-util.h"
-diff --git a/src/network/netdev/batadv.c b/src/network/netdev/batadv.c
-index 15f3aee3a6..ec76428436 100644
---- a/src/network/netdev/batadv.c
-+++ b/src/network/netdev/batadv.c
-@@ -3,7 +3,7 @@
- #include <inttypes.h>
- #include <netinet/in.h>
- #include <linux/genetlink.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "batadv.h"
- #include "fileio.h"
-diff --git a/src/network/netdev/bond.c b/src/network/netdev/bond.c
-index 5d94aa1d68..4e379a326d 100644
---- a/src/network/netdev/bond.c
-+++ b/src/network/netdev/bond.c
-@@ -1,7 +1,7 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "alloc-util.h"
- #include "bond.h"
-diff --git a/src/network/netdev/bridge.c b/src/network/netdev/bridge.c
-index b974f2ae0a..9a5f18d556 100644
---- a/src/network/netdev/bridge.c
-+++ b/src/network/netdev/bridge.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_bridge.h>
-
- #include "bridge.h"
-diff --git a/src/network/netdev/dummy.c b/src/network/netdev/dummy.c
-index 00df1d2787..77b506b422 100644
---- a/src/network/netdev/dummy.c
-+++ b/src/network/netdev/dummy.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "dummy.h"
-
-diff --git a/src/network/netdev/geneve.c b/src/network/netdev/geneve.c
-index 224c17e979..fb79cc13f6 100644
---- a/src/network/netdev/geneve.c
-+++ b/src/network/netdev/geneve.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "alloc-util.h"
- #include "conf-parser.h"
-diff --git a/src/network/netdev/ifb.c b/src/network/netdev/ifb.c
-index d7ff44cb9e..e037629ae4 100644
---- a/src/network/netdev/ifb.c
-+++ b/src/network/netdev/ifb.c
-@@ -1,7 +1,7 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later
- * Copyright © 2019 VMware, Inc. */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "ifb.h"
-
-diff --git a/src/network/netdev/ipoib.c b/src/network/netdev/ipoib.c
-index e0ff9e8c62..ab085c1f6d 100644
---- a/src/network/netdev/ipoib.c
-+++ b/src/network/netdev/ipoib.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_link.h>
-
- #include "ipoib.h"
-diff --git a/src/network/netdev/ipvlan.c b/src/network/netdev/ipvlan.c
-index d15766cd7b..60728b4f94 100644
---- a/src/network/netdev/ipvlan.c
-+++ b/src/network/netdev/ipvlan.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "conf-parser.h"
- #include "ipvlan.h"
-diff --git a/src/network/netdev/macsec.c b/src/network/netdev/macsec.c
-index f1a566a9ca..1f37927a83 100644
---- a/src/network/netdev/macsec.c
-+++ b/src/network/netdev/macsec.c
-@@ -1,7 +1,7 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_ether.h>
- #include <linux/if_macsec.h>
- #include <linux/genetlink.h>
-diff --git a/src/network/netdev/macvlan.c b/src/network/netdev/macvlan.c
-index c41be6e78f..ee2660c5bf 100644
---- a/src/network/netdev/macvlan.c
-+++ b/src/network/netdev/macvlan.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "conf-parser.h"
- #include "macvlan.h"
-diff --git a/src/network/netdev/netdev.c b/src/network/netdev/netdev.c
-index 8e7fe11c18..701ab2bd69 100644
---- a/src/network/netdev/netdev.c
-+++ b/src/network/netdev/netdev.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <unistd.h>
-
- #include "alloc-util.h"
-diff --git a/src/network/netdev/netdevsim.c b/src/network/netdev/netdevsim.c
-index 15d5c132f9..a3ffa48b15 100644
---- a/src/network/netdev/netdevsim.c
-+++ b/src/network/netdev/netdevsim.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "netdevsim.h"
-
-diff --git a/src/network/netdev/nlmon.c b/src/network/netdev/nlmon.c
-index ff372092e6..eef66811f4 100644
---- a/src/network/netdev/nlmon.c
-+++ b/src/network/netdev/nlmon.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "nlmon.h"
-
-diff --git a/src/network/netdev/tunnel.c b/src/network/netdev/tunnel.c
-index 97e534fe99..0302c1cb94 100644
---- a/src/network/netdev/tunnel.c
-+++ b/src/network/netdev/tunnel.c
-@@ -2,7 +2,7 @@
-
- #include <netinet/in.h>
- #include <linux/fou.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_tunnel.h>
- #include <linux/ip.h>
- #include <linux/ip6_tunnel.h>
-diff --git a/src/network/netdev/vcan.c b/src/network/netdev/vcan.c
-index 380547ee1e..137c1adf8a 100644
---- a/src/network/netdev/vcan.c
-+++ b/src/network/netdev/vcan.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "vcan.h"
-
-diff --git a/src/network/netdev/veth.c b/src/network/netdev/veth.c
-index c946e81fc0..d1a6be73f9 100644
---- a/src/network/netdev/veth.c
-+++ b/src/network/netdev/veth.c
-@@ -3,7 +3,7 @@
- #include <errno.h>
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/veth.h>
-
- #include "netlink-util.h"
-diff --git a/src/network/netdev/vlan.c b/src/network/netdev/vlan.c
-index af3e77963e..efa4b0a164 100644
---- a/src/network/netdev/vlan.c
-+++ b/src/network/netdev/vlan.c
-@@ -2,7 +2,7 @@
-
- #include <errno.h>
- #include <net/if.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_vlan.h>
-
- #include "parse-util.h"
-diff --git a/src/network/netdev/vrf.c b/src/network/netdev/vrf.c
-index b1b6707441..1c6d1982e1 100644
---- a/src/network/netdev/vrf.c
-+++ b/src/network/netdev/vrf.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "vrf.h"
-
-diff --git a/src/network/netdev/vxcan.c b/src/network/netdev/vxcan.c
-index a0ba048eb1..875f2e5901 100644
---- a/src/network/netdev/vxcan.c
-+++ b/src/network/netdev/vxcan.c
-@@ -1,7 +1,7 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
- #include <linux/can/vxcan.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "vxcan.h"
-
-diff --git a/src/network/netdev/vxlan.c b/src/network/netdev/vxlan.c
-index 30b0855598..a065158801 100644
---- a/src/network/netdev/vxlan.c
-+++ b/src/network/netdev/vxlan.c
-@@ -2,7 +2,7 @@
-
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "conf-parser.h"
- #include "alloc-util.h"
-diff --git a/src/network/netdev/wireguard.c b/src/network/netdev/wireguard.c
-index 88f668753a..5fc753384b 100644
---- a/src/network/netdev/wireguard.c
-+++ b/src/network/netdev/wireguard.c
-@@ -6,7 +6,7 @@
- #include <sys/ioctl.h>
- #include <net/if.h>
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/ipv6_route.h>
-
- #include "sd-resolve.h"
-diff --git a/src/network/netdev/xfrm.c b/src/network/netdev/xfrm.c
-index ef5e735b2b..419afd75f2 100644
---- a/src/network/netdev/xfrm.c
-+++ b/src/network/netdev/xfrm.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "missing_network.h"
- #include "xfrm.h"
-diff --git a/src/network/networkd-bridge-mdb.c b/src/network/networkd-bridge-mdb.c
-index 10025a97ae..a0239ea83a 100644
---- a/src/network/networkd-bridge-mdb.c
-+++ b/src/network/networkd-bridge-mdb.c
-@@ -1,7 +1,5 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <net/if.h>
--#include <linux/if_bridge.h>
-
- #include "netlink-util.h"
- #include "networkd-bridge-mdb.h"
-@@ -11,6 +9,8 @@
- #include "networkd-queue.h"
- #include "string-util.h"
- #include "vlan-util.h"
-+#include <net/if.h>
-+#include <linux/if_bridge.h>
-
- #define STATIC_BRIDGE_MDB_ENTRIES_PER_NETWORK_MAX 1024U
-
-diff --git a/src/network/networkd-dhcp-common.c b/src/network/networkd-dhcp-common.c
-index 7996960bd1..e870b9ba26 100644
---- a/src/network/networkd-dhcp-common.c
-+++ b/src/network/networkd-dhcp-common.c
-@@ -1,7 +1,8 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-+#include <net/if.h>
-
- #include "bus-error.h"
- #include "dhcp-identifier.h"
-diff --git a/src/network/networkd-dhcp-prefix-delegation.c b/src/network/networkd-dhcp-prefix-delegation.c
-index 7be9713d46..e830fcd575 100644
---- a/src/network/networkd-dhcp-prefix-delegation.c
-+++ b/src/network/networkd-dhcp-prefix-delegation.c
-@@ -1,7 +1,5 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/ipv6_route.h>
--
- #include "sd-dhcp6-client.h"
-
- #include "hashmap.h"
-@@ -21,6 +19,8 @@
- #include "strv.h"
- #include "tunnel.h"
-
-+#include <linux/ipv6_route.h>
-+
- bool link_dhcp_pd_is_enabled(Link *link) {
- assert(link);
-
-diff --git a/src/network/networkd-dhcp-server.c b/src/network/networkd-dhcp-server.c
-index 9acfd17d49..3108289602 100644
---- a/src/network/networkd-dhcp-server.c
-+++ b/src/network/networkd-dhcp-server.c
-@@ -1,7 +1,7 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
- #include <netinet/in.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if.h>
-
- #include "sd-dhcp-server.h"
-diff --git a/src/network/networkd-dhcp4.c b/src/network/networkd-dhcp4.c
-index cb9c428ae9..a35d58f3f1 100644
---- a/src/network/networkd-dhcp4.c
-+++ b/src/network/networkd-dhcp4.c
-@@ -3,7 +3,7 @@
- #include <netinet/in.h>
- #include <netinet/ip.h>
- #include <linux/if.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "alloc-util.h"
- #include "dhcp-client-internal.h"
-diff --git a/src/network/networkd-link.c b/src/network/networkd-link.c
-index b62a154828..75949e6094 100644
---- a/src/network/networkd-link.c
-+++ b/src/network/networkd-link.c
-@@ -3,7 +3,7 @@
- #include <net/if.h>
- #include <netinet/in.h>
- #include <linux/if.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_link.h>
- #include <linux/netdevice.h>
- #include <sys/socket.h>
-diff --git a/src/network/networkd-route.c b/src/network/networkd-route.c
-index ee7a535075..ce6ed64133 100644
---- a/src/network/networkd-route.c
-+++ b/src/network/networkd-route.c
-@@ -1,9 +1,5 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/icmpv6.h>
--#include <linux/ipv6_route.h>
--#include <linux/nexthop.h>
--
- #include "alloc-util.h"
- #include "event-util.h"
- #include "netlink-util.h"
-@@ -21,6 +17,10 @@
- #include "vrf.h"
- #include "wireguard.h"
-
-+#include <linux/icmpv6.h>
-+#include <linux/ipv6_route.h>
-+#include <linux/nexthop.h>
-+
- int route_new(Route **ret) {
- _cleanup_(route_freep) Route *route = NULL;
-
-diff --git a/src/network/networkd-setlink.c b/src/network/networkd-setlink.c
-index e00cc1e589..e392c7e1a2 100644
---- a/src/network/networkd-setlink.c
-+++ b/src/network/networkd-setlink.c
-@@ -2,7 +2,7 @@
-
- #include <netinet/in.h>
- #include <linux/if.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/if_bridge.h>
-
- #include "missing_network.h"
-diff --git a/src/shared/linux/ethtool.h b/src/shared/linux/ethtool.h
-index 974d4292e7..fe9b8a9e07 100644
---- a/src/shared/linux/ethtool.h
-+++ b/src/shared/linux/ethtool.h
-@@ -16,7 +16,8 @@
-
- #include <linux/kernel.h>
- #include <linux/types.h>
--#include <linux/if_ether.h>
-+#include <netinet/if_ether.h>
-+//#include <linux/if_ether.h>
-
- #ifndef __KERNEL__
- #include <limits.h> /* for INT_MAX */
-diff --git a/src/shared/netif-util.c b/src/shared/netif-util.c
-index 603d4de109..7e3531808a 100644
---- a/src/shared/netif-util.c
-+++ b/src/shared/netif-util.c
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
-
- #include "arphrd-util.h"
- #include "device-util.h"
-diff --git a/src/udev/udev-builtin-net_id.c b/src/udev/udev-builtin-net_id.c
-index 65e003eb15..0b3dc04be0 100644
---- a/src/udev/udev-builtin-net_id.c
-+++ b/src/udev/udev-builtin-net_id.c
-@@ -18,7 +18,7 @@
- #include <stdarg.h>
- #include <unistd.h>
- #include <linux/if.h>
--#include <linux/if_arp.h>
-+//#include <linux/if_arp.h>
- #include <linux/netdevice.h>
- #include <linux/pci_regs.h>
-
diff --git a/meta/recipes-core/systemd/systemd/0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch b/meta/recipes-core/systemd/systemd/0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch
deleted file mode 100644
index 330ad492ba..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch
+++ /dev/null
@@ -1,81 +0,0 @@
-From f9974d7dc289551bfbf823b716fd32b43c54e465 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Thu, 21 Feb 2019 16:23:24 +0800
-Subject: [PATCH] binfmt: Don't install dependency links at install time for
- the binfmt services
-
-use [Install] blocks so that they get created when the service is enabled
-like a traditional service.
-
-The [Install] blocks were rejected upstream as they don't have a way to
-"enable" it on install without static symlinks which can't be disabled,
-only masked. We however can do that in a postinst.
-
-Upstream-Status: Denied
-
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- units/meson.build | 6 ++----
- units/proc-sys-fs-binfmt_misc.automount | 3 +++
- units/systemd-binfmt.service.in | 4 ++++
- 3 files changed, 9 insertions(+), 4 deletions(-)
-
-diff --git a/units/meson.build b/units/meson.build
-index a9bf28f6d9..11d3644168 100644
---- a/units/meson.build
-+++ b/units/meson.build
-@@ -63,8 +63,7 @@ units = [
- ['poweroff.target', '',
- (with_runlevels ? 'runlevel0.target' : '')],
- ['printer.target', ''],
-- ['proc-sys-fs-binfmt_misc.automount', 'ENABLE_BINFMT',
-- 'sysinit.target.wants/'],
-+ ['proc-sys-fs-binfmt_misc.automount', 'ENABLE_BINFMT'],
- ['proc-sys-fs-binfmt_misc.mount', 'ENABLE_BINFMT'],
- ['reboot.target', '',
- 'ctrl-alt-del.target' + (with_runlevels ? ' runlevel6.target' : '')],
-@@ -184,8 +183,7 @@ in_units = [
- ['rescue.service', ''],
- ['serial-getty@.service', ''],
- ['systemd-backlight@.service', 'ENABLE_BACKLIGHT'],
-- ['systemd-binfmt.service', 'ENABLE_BINFMT',
-- 'sysinit.target.wants/'],
-+ ['systemd-binfmt.service', 'ENABLE_BINFMT'],
- ['systemd-bless-boot.service', 'HAVE_GNU_EFI HAVE_BLKID'],
- ['systemd-boot-check-no-failures.service', ''],
- ['systemd-coredump@.service', 'ENABLE_COREDUMP'],
-diff --git a/units/proc-sys-fs-binfmt_misc.automount b/units/proc-sys-fs-binfmt_misc.automount
-index 172c8757ab..f65d8930c6 100644
---- a/units/proc-sys-fs-binfmt_misc.automount
-+++ b/units/proc-sys-fs-binfmt_misc.automount
-@@ -19,3 +19,6 @@ ConditionPathIsReadWrite=/proc/sys/
-
- [Automount]
- Where=/proc/sys/fs/binfmt_misc
-+
-+[Install]
-+WantedBy=sysinit.target
-diff --git a/units/systemd-binfmt.service.in b/units/systemd-binfmt.service.in
-index 96f595ad72..7c010bb224 100644
---- a/units/systemd-binfmt.service.in
-+++ b/units/systemd-binfmt.service.in
-@@ -14,6 +14,7 @@ Documentation=https://www.kernel.org/doc/html/latest/admin-guide/binfmt-misc.htm
- Documentation=https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
- DefaultDependencies=no
- Conflicts=shutdown.target
-+Wants=proc-sys-fs-binfmt_misc.automount
- After=proc-sys-fs-binfmt_misc.automount
- After=proc-sys-fs-binfmt_misc.mount
- After=local-fs.target
-@@ -31,3 +32,6 @@ RemainAfterExit=yes
- ExecStart={{ROOTLIBEXECDIR}}/systemd-binfmt
- ExecStop={{ROOTLIBEXECDIR}}/systemd-binfmt --unregister
- TimeoutSec=90s
-+
-+[Install]
-+WantedBy=sysinit.target
diff --git a/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch b/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch
new file mode 100644
index 0000000000..2aa5dee6b5
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch
@@ -0,0 +1,61 @@
+From 01195eb9f7d59139fb45df506ac6b3968c14a57f Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 13:55:12 +0800
+Subject: [PATCH 01/22] missing_type.h: add comparison_fn_t
+
+Make it work with musl where comparison_fn_t and is not provided.
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+[Rebased for v244]
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[Rebased for v242]
+Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
+[Rebased for v250, Drop __compare_fn_t]
+Signed-off-by: Jiaqing Zhao <jiaqing.zhao@linux.intel.com>
+---
+ src/basic/missing_type.h | 4 ++++
+ src/basic/sort-util.h | 1 +
+ src/libsystemd/sd-journal/catalog.c | 1 +
+ 3 files changed, 6 insertions(+)
+
+diff --git a/src/basic/missing_type.h b/src/basic/missing_type.h
+index f6233090a9..6c0456349d 100644
+--- a/src/basic/missing_type.h
++++ b/src/basic/missing_type.h
+@@ -10,3 +10,7 @@
+ #if !HAVE_CHAR16_T
+ #define char16_t uint16_t
+ #endif
++
++#ifndef __GLIBC__
++typedef int (*comparison_fn_t)(const void *, const void *);
++#endif
+diff --git a/src/basic/sort-util.h b/src/basic/sort-util.h
+index 9c818bd747..ef10c8be2c 100644
+--- a/src/basic/sort-util.h
++++ b/src/basic/sort-util.h
+@@ -4,6 +4,7 @@
+ #include <stdlib.h>
+
+ #include "macro.h"
++#include "missing_type.h"
+
+ /* This is the same as glibc's internal __compar_d_fn_t type. glibc exports a public comparison_fn_t, for the
+ * external type __compar_fn_t, but doesn't do anything similar for __compar_d_fn_t. Let's hence do that
+diff --git a/src/libsystemd/sd-journal/catalog.c b/src/libsystemd/sd-journal/catalog.c
+index ae91534198..7f67eea38b 100644
+--- a/src/libsystemd/sd-journal/catalog.c
++++ b/src/libsystemd/sd-journal/catalog.c
+@@ -28,6 +28,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "tmpfile-util.h"
++#include "missing_type.h"
+
+ const char * const catalog_file_dirs[] = {
+ "/usr/local/lib/systemd/catalog/",
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0001-pass-correct-parameters-to-getdents64.patch b/meta/recipes-core/systemd/systemd/0001-pass-correct-parameters-to-getdents64.patch
deleted file mode 100644
index 028f50b243..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-pass-correct-parameters-to-getdents64.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From 8c8899b4641125cfe8e7baee32e5c5f452545d2c Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 21 Jan 2022 15:15:11 -0800
-Subject: [PATCH] pass correct parameters to getdents64
-
-Fixes
-../git/src/basic/recurse-dir.c:57:40: error: incompatible pointer types passing 'uint8_t *' (aka 'unsigned char *') to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types]
- n = getdents64(dir_fd, (uint8_t*) de->buffer + de->buffer_size, bs - de->buffer_size);
- ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-../git/src/basic/stat-util.c:102:28: error: incompatible pointer types passing 'union (unnamed union at ../git/src/basic/stat-util.c:78:9) *' to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types]
- n = getdents64(fd, &buffer, sizeof(buffer));
- ^~~~~~~
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/basic/recurse-dir.c | 2 +-
- src/basic/stat-util.c | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/basic/recurse-dir.c b/src/basic/recurse-dir.c
-index efa1797b7b..797285e3be 100644
---- a/src/basic/recurse-dir.c
-+++ b/src/basic/recurse-dir.c
-@@ -54,7 +54,7 @@ int readdir_all(int dir_fd,
- bs = MIN(MALLOC_SIZEOF_SAFE(de) - offsetof(DirectoryEntries, buffer), (size_t) SSIZE_MAX);
- assert(bs > de->buffer_size);
-
-- n = getdents64(dir_fd, (uint8_t*) de->buffer + de->buffer_size, bs - de->buffer_size);
-+ n = getdents64(dir_fd, de->buffer + de->buffer_size, bs - de->buffer_size);
- if (n < 0)
- return -errno;
- if (n == 0)
-diff --git a/src/basic/stat-util.c b/src/basic/stat-util.c
-index c2269844f8..7cd6c7fa42 100644
---- a/src/basic/stat-util.c
-+++ b/src/basic/stat-util.c
-@@ -99,7 +99,7 @@ int dir_is_empty_at(int dir_fd, const char *path) {
- return fd;
- }
-
-- n = getdents64(fd, &buffer, sizeof(buffer));
-+ n = getdents64(fd, (struct dirent *)&buffer, sizeof(buffer));
- if (n < 0)
- return -errno;
-
diff --git a/meta/recipes-core/systemd/systemd/0001-resolve-Use-sockaddr-pointer-type-for-bind.patch b/meta/recipes-core/systemd/systemd/0001-resolve-Use-sockaddr-pointer-type-for-bind.patch
deleted file mode 100644
index 8567283537..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-resolve-Use-sockaddr-pointer-type-for-bind.patch
+++ /dev/null
@@ -1,46 +0,0 @@
-From ad1428f29196bcc88ae382ee67ff705928e2be24 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 5 May 2022 20:25:37 -0700
-Subject: [PATCH] resolve: Use sockaddr pointer type for bind()
-
-bind() expects sockaddr* but SERVER_ADDRESS is sockaddr_in type struct
-
-Fixes errors with clang e.g.
-
-../git/src/resolve/test-resolved-stream.c:112:32: error: incompatible pointer types passing 'struct sockaddr_in *' to parameter of type 'const struct sockaddr *' [-Werror,-Wincompatible-pointer-types]
- assert_se(bind(bindfd, &SERVER_ADDRESS, sizeof(SERVER_ADDRESS)) >= 0);
- ^~~~~~~~~~~~~~~
-../git/src/resolve/test-resolved-stream.c:251:39: error: incompatible pointer types passing 'struct sockaddr_in *' to parameter of type 'const struct sockaddr *' [-Werror,-Wincompatible-pointer-types]
- r = connect(clientfd, &SERVER_ADDRESS, sizeof(SERVER_ADDRESS));
-
-Upstream-Status: Submitted [https://github.com/systemd/systemd/pull/23281]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/resolve/test-resolved-stream.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/resolve/test-resolved-stream.c b/src/resolve/test-resolved-stream.c
-index f12c729e50..504b532002 100644
---- a/src/resolve/test-resolved-stream.c
-+++ b/src/resolve/test-resolved-stream.c
-@@ -109,7 +109,7 @@ static void *tcp_dns_server(void *p) {
-
- assert_se((bindfd = socket(AF_INET, SOCK_STREAM | SOCK_CLOEXEC, 0)) >= 0);
- assert_se(setsockopt(bindfd, SOL_SOCKET, SO_REUSEADDR, &(int){1}, sizeof(int)) >= 0);
-- assert_se(bind(bindfd, &SERVER_ADDRESS, sizeof(SERVER_ADDRESS)) >= 0);
-+ assert_se(bind(bindfd, (struct sockaddr*)&SERVER_ADDRESS, sizeof(SERVER_ADDRESS)) >= 0);
- assert_se(listen(bindfd, 1) >= 0);
- assert_se((acceptfd = accept(bindfd, NULL, NULL)) >= 0);
- server_handle(acceptfd);
-@@ -248,7 +248,7 @@ static void test_dns_stream(bool tls) {
- assert_se((clientfd = socket(AF_INET, SOCK_STREAM | SOCK_CLOEXEC, 0)) >= 0);
-
- for (int i = 0; i < 100; i++) {
-- r = connect(clientfd, &SERVER_ADDRESS, sizeof(SERVER_ADDRESS));
-+ r = connect(clientfd, (struct sockaddr*)&SERVER_ADDRESS, sizeof(SERVER_ADDRESS));
- if (r >= 0)
- break;
- usleep(EVENT_TIMEOUT_USEC / 100);
---
-2.36.0
-
diff --git a/meta/recipes-core/systemd/systemd/0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch b/meta/recipes-core/systemd/systemd/0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch
deleted file mode 100644
index 752824688f..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From 58860e0f248576a80ff2af256ba42713c186ae93 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 29 Sep 2020 18:01:41 -0700
-Subject: [PATCH] systemd.pc.in: use ROOTPREFIX without suffixed slash
-
-This complements the commit
-https://github.com/poettering/systemd/commit/b612c26ceb9f56af0271fc9f07c1724d2d260a8a
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/core/systemd.pc.in | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/src/core/systemd.pc.in b/src/core/systemd.pc.in
-index b5cc8f9..21dbf30 100644
---- a/src/core/systemd.pc.in
-+++ b/src/core/systemd.pc.in
-@@ -65,16 +65,16 @@ systemdshutdowndir=${systemd_shutdown_dir}
- tmpfiles_dir=${prefix}/lib/tmpfiles.d
- tmpfilesdir=${tmpfiles_dir}
-
--sysusers_dir=${rootprefix}/lib/sysusers.d
-+sysusers_dir=${prefix}/lib/sysusers.d
- sysusersdir=${sysusers_dir}
-
--sysctl_dir=${rootprefix}/lib/sysctl.d
-+sysctl_dir=${prefix}/lib/sysctl.d
- sysctldir=${sysctl_dir}
-
--binfmt_dir=${rootprefix}/lib/binfmt.d
-+binfmt_dir=${prefix}/lib/binfmt.d
- binfmtdir=${binfmt_dir}
-
--modules_load_dir=${rootprefix}/lib/modules-load.d
-+modules_load_dir=${prefix}/lib/modules-load.d
- modulesloaddir=${modules_load_dir}
-
- catalog_dir=${prefix}/lib/systemd/catalog
---
-2.25.1
-
diff --git a/meta/recipes-core/systemd/systemd/0001-test-parse-argument-Include-signal.h.patch b/meta/recipes-core/systemd/systemd/0001-test-parse-argument-Include-signal.h.patch
deleted file mode 100644
index 898e8af904..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-test-parse-argument-Include-signal.h.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 93c7e482d39cf7765974b3c729d29c1b120a60e3 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 24 May 2021 18:26:27 -0700
-Subject: [PATCH] test-parse-argument: Include signal.h
-
-Fixes
-src/test/test-parse-argument.c:49:29: error: use of undeclared identifier 'SIGABRT'
-
-Upstream-Status: Submitted [https://github.com/systemd/systemd/pull/19718]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/test/test-parse-argument.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/src/test/test-parse-argument.c b/src/test/test-parse-argument.c
-index cf3d54288a..15104e5282 100644
---- a/src/test/test-parse-argument.c
-+++ b/src/test/test-parse-argument.c
-@@ -5,6 +5,7 @@
- #include "parse-argument.h"
- #include "stdio-util.h"
- #include "tests.h"
-+#include <signal.h>
-
- TEST(parse_json_argument) {
- JsonFormatFlags flags = JSON_FORMAT_PRETTY;
diff --git a/meta/recipes-core/systemd/systemd/0002-Add-sys-stat.h-for-S_IFDIR.patch b/meta/recipes-core/systemd/systemd/0002-Add-sys-stat.h-for-S_IFDIR.patch
deleted file mode 100644
index 8cf0546450..0000000000
--- a/meta/recipes-core/systemd/systemd/0002-Add-sys-stat.h-for-S_IFDIR.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 4b731a5e2547b5292f9a774b849e14c0cf7b3955 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 21 Jan 2022 15:17:37 -0800
-Subject: [PATCH] Add sys/stat.h for S_IFDIR
-
-../git/src/shared/mkdir-label.c:13:61: error: use of undeclared identifier 'S_IFDIR'
- r = mac_selinux_create_file_prepare_at(dirfd, path, S_IFDIR);
-
-Upstream-Status: Submitted [https://github.com/systemd/systemd/pull/23441]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/shared/mkdir-label.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/src/shared/mkdir-label.c b/src/shared/mkdir-label.c
-index d36a6466d7..63b764cd83 100644
---- a/src/shared/mkdir-label.c
-+++ b/src/shared/mkdir-label.c
-@@ -4,6 +4,7 @@
- #include "selinux-util.h"
- #include "smack-util.h"
- #include "user-util.h"
-+#include <sys/stat.h>
-
- int mkdirat_label(int dirfd, const char *path, mode_t mode) {
- int r;
diff --git a/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch b/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch
new file mode 100644
index 0000000000..900a931632
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch
@@ -0,0 +1,434 @@
+From 872b72739e62123867ce6c4f82aa37de24cc3f75 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Sat, 22 May 2021 20:26:24 +0200
+Subject: [PATCH 02/22] add fallback parse_printf_format implementation
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Emil Renner Berthing <systemd@esmil.dk>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+---
+ meson.build | 1 +
+ src/basic/meson.build | 5 +
+ src/basic/parse-printf-format.c | 273 +++++++++++++++++++++++
+ src/basic/parse-printf-format.h | 57 +++++
+ src/basic/stdio-util.h | 2 +-
+ src/libsystemd/sd-journal/journal-send.c | 2 +-
+ 6 files changed, 338 insertions(+), 2 deletions(-)
+ create mode 100644 src/basic/parse-printf-format.c
+ create mode 100644 src/basic/parse-printf-format.h
+
+diff --git a/meson.build b/meson.build
+index 7419e2b0b0..01fd3ffc19 100644
+--- a/meson.build
++++ b/meson.build
+@@ -725,6 +725,7 @@ endif
+ foreach header : ['crypt.h',
+ 'linux/memfd.h',
+ 'linux/vm_sockets.h',
++ 'printf.h',
+ 'sys/auxv.h',
+ 'threads.h',
+ 'valgrind/memcheck.h',
+diff --git a/src/basic/meson.build b/src/basic/meson.build
+index d7450d8b44..c3e3daf4bd 100644
+--- a/src/basic/meson.build
++++ b/src/basic/meson.build
+@@ -183,6 +183,11 @@ endforeach
+
+ basic_sources += generated_gperf_headers
+
++if conf.get('HAVE_PRINTF_H') != 1
++ basic_sources += [files('parse-printf-format.c')]
++endif
++
++
+ ############################################################
+
+ arch_list = [
+diff --git a/src/basic/parse-printf-format.c b/src/basic/parse-printf-format.c
+new file mode 100644
+index 0000000000..49437e5445
+--- /dev/null
++++ b/src/basic/parse-printf-format.c
+@@ -0,0 +1,273 @@
++/*-*- Mode: C; c-basic-offset: 8; indent-tabs-mode: nil -*-*/
++
++/***
++ This file is part of systemd.
++
++ Copyright 2014 Emil Renner Berthing <systemd@esmil.dk>
++
++ With parts from the musl C library
++ Copyright 2005-2014 Rich Felker, et al.
++
++ systemd is free software; you can redistribute it and/or modify it
++ under the terms of the GNU Lesser General Public License as published by
++ the Free Software Foundation; either version 2.1 of the License, or
++ (at your option) any later version.
++
++ systemd is distributed in the hope that it will be useful, but
++ WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public License
++ along with systemd; If not, see <http://www.gnu.org/licenses/>.
++***/
++
++#include <stddef.h>
++#include <string.h>
++
++#include "parse-printf-format.h"
++
++static const char *consume_nonarg(const char *fmt)
++{
++ do {
++ if (*fmt == '\0')
++ return fmt;
++ } while (*fmt++ != '%');
++ return fmt;
++}
++
++static const char *consume_num(const char *fmt)
++{
++ for (;*fmt >= '0' && *fmt <= '9'; fmt++)
++ /* do nothing */;
++ return fmt;
++}
++
++static const char *consume_argn(const char *fmt, size_t *arg)
++{
++ const char *p = fmt;
++ size_t val = 0;
++
++ if (*p < '1' || *p > '9')
++ return fmt;
++ do {
++ val = 10*val + (*p++ - '0');
++ } while (*p >= '0' && *p <= '9');
++
++ if (*p != '$')
++ return fmt;
++ *arg = val;
++ return p+1;
++}
++
++static const char *consume_flags(const char *fmt)
++{
++ while (1) {
++ switch (*fmt) {
++ case '#':
++ case '0':
++ case '-':
++ case ' ':
++ case '+':
++ case '\'':
++ case 'I':
++ fmt++;
++ continue;
++ }
++ return fmt;
++ }
++}
++
++enum state {
++ BARE,
++ LPRE,
++ LLPRE,
++ HPRE,
++ HHPRE,
++ BIGLPRE,
++ ZTPRE,
++ JPRE,
++ STOP
++};
++
++enum type {
++ NONE,
++ PTR,
++ INT,
++ UINT,
++ ULLONG,
++ LONG,
++ ULONG,
++ SHORT,
++ USHORT,
++ CHAR,
++ UCHAR,
++ LLONG,
++ SIZET,
++ IMAX,
++ UMAX,
++ PDIFF,
++ UIPTR,
++ DBL,
++ LDBL,
++ MAXTYPE
++};
++
++static const short pa_types[MAXTYPE] = {
++ [NONE] = PA_INT,
++ [PTR] = PA_POINTER,
++ [INT] = PA_INT,
++ [UINT] = PA_INT,
++ [ULLONG] = PA_INT | PA_FLAG_LONG_LONG,
++ [LONG] = PA_INT | PA_FLAG_LONG,
++ [ULONG] = PA_INT | PA_FLAG_LONG,
++ [SHORT] = PA_INT | PA_FLAG_SHORT,
++ [USHORT] = PA_INT | PA_FLAG_SHORT,
++ [CHAR] = PA_CHAR,
++ [UCHAR] = PA_CHAR,
++ [LLONG] = PA_INT | PA_FLAG_LONG_LONG,
++ [SIZET] = PA_INT | PA_FLAG_LONG,
++ [IMAX] = PA_INT | PA_FLAG_LONG_LONG,
++ [UMAX] = PA_INT | PA_FLAG_LONG_LONG,
++ [PDIFF] = PA_INT | PA_FLAG_LONG_LONG,
++ [UIPTR] = PA_INT | PA_FLAG_LONG,
++ [DBL] = PA_DOUBLE,
++ [LDBL] = PA_DOUBLE | PA_FLAG_LONG_DOUBLE
++};
++
++#define S(x) [(x)-'A']
++#define E(x) (STOP + (x))
++
++static const unsigned char states[]['z'-'A'+1] = {
++ { /* 0: bare types */
++ S('d') = E(INT), S('i') = E(INT),
++ S('o') = E(UINT),S('u') = E(UINT),S('x') = E(UINT), S('X') = E(UINT),
++ S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL),
++ S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL),
++ S('c') = E(CHAR),S('C') = E(INT),
++ S('s') = E(PTR), S('S') = E(PTR), S('p') = E(UIPTR),S('n') = E(PTR),
++ S('m') = E(NONE),
++ S('l') = LPRE, S('h') = HPRE, S('L') = BIGLPRE,
++ S('z') = ZTPRE, S('j') = JPRE, S('t') = ZTPRE
++ }, { /* 1: l-prefixed */
++ S('d') = E(LONG), S('i') = E(LONG),
++ S('o') = E(ULONG),S('u') = E(ULONG),S('x') = E(ULONG),S('X') = E(ULONG),
++ S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL),
++ S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL),
++ S('c') = E(INT), S('s') = E(PTR), S('n') = E(PTR),
++ S('l') = LLPRE
++ }, { /* 2: ll-prefixed */
++ S('d') = E(LLONG), S('i') = E(LLONG),
++ S('o') = E(ULLONG),S('u') = E(ULLONG),
++ S('x') = E(ULLONG),S('X') = E(ULLONG),
++ S('n') = E(PTR)
++ }, { /* 3: h-prefixed */
++ S('d') = E(SHORT), S('i') = E(SHORT),
++ S('o') = E(USHORT),S('u') = E(USHORT),
++ S('x') = E(USHORT),S('X') = E(USHORT),
++ S('n') = E(PTR),
++ S('h') = HHPRE
++ }, { /* 4: hh-prefixed */
++ S('d') = E(CHAR), S('i') = E(CHAR),
++ S('o') = E(UCHAR),S('u') = E(UCHAR),
++ S('x') = E(UCHAR),S('X') = E(UCHAR),
++ S('n') = E(PTR)
++ }, { /* 5: L-prefixed */
++ S('e') = E(LDBL),S('f') = E(LDBL),S('g') = E(LDBL), S('a') = E(LDBL),
++ S('E') = E(LDBL),S('F') = E(LDBL),S('G') = E(LDBL), S('A') = E(LDBL),
++ S('n') = E(PTR)
++ }, { /* 6: z- or t-prefixed (assumed to be same size) */
++ S('d') = E(PDIFF),S('i') = E(PDIFF),
++ S('o') = E(SIZET),S('u') = E(SIZET),
++ S('x') = E(SIZET),S('X') = E(SIZET),
++ S('n') = E(PTR)
++ }, { /* 7: j-prefixed */
++ S('d') = E(IMAX), S('i') = E(IMAX),
++ S('o') = E(UMAX), S('u') = E(UMAX),
++ S('x') = E(UMAX), S('X') = E(UMAX),
++ S('n') = E(PTR)
++ }
++};
++
++size_t parse_printf_format(const char *fmt, size_t n, int *types)
++{
++ size_t i = 0;
++ size_t last = 0;
++
++ memset(types, 0, n);
++
++ while (1) {
++ size_t arg;
++ unsigned int state;
++
++ fmt = consume_nonarg(fmt);
++ if (*fmt == '\0')
++ break;
++ if (*fmt == '%') {
++ fmt++;
++ continue;
++ }
++ arg = 0;
++ fmt = consume_argn(fmt, &arg);
++ /* flags */
++ fmt = consume_flags(fmt);
++ /* width */
++ if (*fmt == '*') {
++ size_t warg = 0;
++ fmt = consume_argn(fmt+1, &warg);
++ if (warg == 0)
++ warg = ++i;
++ if (warg > last)
++ last = warg;
++ if (warg <= n && types[warg-1] == NONE)
++ types[warg-1] = INT;
++ } else
++ fmt = consume_num(fmt);
++ /* precision */
++ if (*fmt == '.') {
++ fmt++;
++ if (*fmt == '*') {
++ size_t parg = 0;
++ fmt = consume_argn(fmt+1, &parg);
++ if (parg == 0)
++ parg = ++i;
++ if (parg > last)
++ last = parg;
++ if (parg <= n && types[parg-1] == NONE)
++ types[parg-1] = INT;
++ } else {
++ if (*fmt == '-')
++ fmt++;
++ fmt = consume_num(fmt);
++ }
++ }
++ /* length modifier and conversion specifier */
++ state = BARE;
++ do {
++ unsigned char c = *fmt++;
++
++ if (c < 'A' || c > 'z')
++ continue;
++ state = states[state]S(c);
++ if (state == 0)
++ continue;
++ } while (state < STOP);
++
++ if (state == E(NONE))
++ continue;
++
++ if (arg == 0)
++ arg = ++i;
++ if (arg > last)
++ last = arg;
++ if (arg <= n)
++ types[arg-1] = state - STOP;
++ }
++
++ if (last > n)
++ last = n;
++ for (i = 0; i < last; i++)
++ types[i] = pa_types[types[i]];
++
++ return last;
++}
+diff --git a/src/basic/parse-printf-format.h b/src/basic/parse-printf-format.h
+new file mode 100644
+index 0000000000..47be7522d7
+--- /dev/null
++++ b/src/basic/parse-printf-format.h
+@@ -0,0 +1,57 @@
++/*-*- Mode: C; c-basic-offset: 8; indent-tabs-mode: nil -*-*/
++
++/***
++ This file is part of systemd.
++
++ Copyright 2014 Emil Renner Berthing <systemd@esmil.dk>
++
++ With parts from the GNU C Library
++ Copyright 1991-2014 Free Software Foundation, Inc.
++
++ systemd is free software; you can redistribute it and/or modify it
++ under the terms of the GNU Lesser General Public License as published by
++ the Free Software Foundation; either version 2.1 of the License, or
++ (at your option) any later version.
++
++ systemd is distributed in the hope that it will be useful, but
++ WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public License
++ along with systemd; If not, see <http://www.gnu.org/licenses/>.
++***/
++
++#pragma once
++
++#include "config.h"
++
++#if HAVE_PRINTF_H
++#include <printf.h>
++#else
++
++#include <stddef.h>
++
++enum { /* C type: */
++ PA_INT, /* int */
++ PA_CHAR, /* int, cast to char */
++ PA_WCHAR, /* wide char */
++ PA_STRING, /* const char *, a '\0'-terminated string */
++ PA_WSTRING, /* const wchar_t *, wide character string */
++ PA_POINTER, /* void * */
++ PA_FLOAT, /* float */
++ PA_DOUBLE, /* double */
++ PA_LAST
++};
++
++/* Flag bits that can be set in a type returned by `parse_printf_format'. */
++#define PA_FLAG_MASK 0xff00
++#define PA_FLAG_LONG_LONG (1 << 8)
++#define PA_FLAG_LONG_DOUBLE PA_FLAG_LONG_LONG
++#define PA_FLAG_LONG (1 << 9)
++#define PA_FLAG_SHORT (1 << 10)
++#define PA_FLAG_PTR (1 << 11)
++
++size_t parse_printf_format(const char *fmt, size_t n, int *types);
++
++#endif /* HAVE_PRINTF_H */
+diff --git a/src/basic/stdio-util.h b/src/basic/stdio-util.h
+index 4e93ac90c9..f9deb6f662 100644
+--- a/src/basic/stdio-util.h
++++ b/src/basic/stdio-util.h
+@@ -1,12 +1,12 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+ #pragma once
+
+-#include <printf.h>
+ #include <stdarg.h>
+ #include <stdio.h>
+ #include <sys/types.h>
+
+ #include "macro.h"
++#include "parse-printf-format.h"
+
+ _printf_(3, 4)
+ static inline char *snprintf_ok(char *buf, size_t len, const char *format, ...) {
+diff --git a/src/libsystemd/sd-journal/journal-send.c b/src/libsystemd/sd-journal/journal-send.c
+index be23b2fe75..69a2eb6404 100644
+--- a/src/libsystemd/sd-journal/journal-send.c
++++ b/src/libsystemd/sd-journal/journal-send.c
+@@ -2,7 +2,6 @@
+
+ #include <errno.h>
+ #include <fcntl.h>
+-#include <printf.h>
+ #include <stddef.h>
+ #include <sys/un.h>
+ #include <unistd.h>
+@@ -28,6 +27,7 @@
+ #include "stdio-util.h"
+ #include "string-util.h"
+ #include "tmpfile-util.h"
++#include "parse-printf-format.h"
+
+ #define SNDBUF_SIZE (8*1024*1024)
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch b/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch
new file mode 100644
index 0000000000..be231cf6b2
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch
@@ -0,0 +1,79 @@
+From 29a58009a172e369ad7166e16dab2f4945c6b0d2 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 21 Feb 2019 16:23:24 +0800
+Subject: [PATCH 1/2] binfmt: Don't install dependency links at install time
+ for the binfmt services
+
+use [Install] blocks so that they get created when the service is enabled
+like a traditional service.
+
+The [Install] blocks were rejected upstream as they don't have a way to
+"enable" it on install without static symlinks which can't be disabled,
+only masked. We however can do that in a postinst.
+
+Upstream-Status: Denied
+
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+---
+ units/meson.build | 2 --
+ units/proc-sys-fs-binfmt_misc.automount | 3 +++
+ units/systemd-binfmt.service.in | 4 ++++
+ 3 files changed, 7 insertions(+), 2 deletions(-)
+
+diff --git a/units/meson.build b/units/meson.build
+index e7bfb7f838..1d5ec4b178 100644
+--- a/units/meson.build
++++ b/units/meson.build
+@@ -154,7 +154,6 @@ units = [
+ {
+ 'file' : 'proc-sys-fs-binfmt_misc.automount',
+ 'conditions' : ['ENABLE_BINFMT'],
+- 'symlinks' : ['sysinit.target.wants/'],
+ },
+ {
+ 'file' : 'proc-sys-fs-binfmt_misc.mount',
+@@ -251,7 +250,6 @@ units = [
+ {
+ 'file' : 'systemd-binfmt.service.in',
+ 'conditions' : ['ENABLE_BINFMT'],
+- 'symlinks' : ['sysinit.target.wants/'],
+ },
+ {
+ 'file' : 'systemd-bless-boot.service.in',
+diff --git a/units/proc-sys-fs-binfmt_misc.automount b/units/proc-sys-fs-binfmt_misc.automount
+index 5d212015a5..6c2900ca77 100644
+--- a/units/proc-sys-fs-binfmt_misc.automount
++++ b/units/proc-sys-fs-binfmt_misc.automount
+@@ -22,3 +22,6 @@ Before=shutdown.target
+
+ [Automount]
+ Where=/proc/sys/fs/binfmt_misc
++
++[Install]
++WantedBy=sysinit.target
+diff --git a/units/systemd-binfmt.service.in b/units/systemd-binfmt.service.in
+index 6861c76674..531e9fbd90 100644
+--- a/units/systemd-binfmt.service.in
++++ b/units/systemd-binfmt.service.in
+@@ -14,6 +14,7 @@ Documentation=https://docs.kernel.org/admin-guide/binfmt-misc.html
+ Documentation=https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
+ DefaultDependencies=no
+ Conflicts=shutdown.target
++Wants=proc-sys-fs-binfmt_misc.automount
+ After=proc-sys-fs-binfmt_misc.automount
+ After=proc-sys-fs-binfmt_misc.mount
+ After=local-fs.target
+@@ -31,3 +32,6 @@ RemainAfterExit=yes
+ ExecStart={{LIBEXECDIR}}/systemd-binfmt
+ ExecStop={{LIBEXECDIR}}/systemd-binfmt --unregister
+ TimeoutSec=90s
++
++[Install]
++WantedBy=sysinit.target
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch b/meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch
deleted file mode 100644
index d109860e1a..0000000000
--- a/meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch
+++ /dev/null
@@ -1,163 +0,0 @@
-From c542d2d93cf536e91d4edb8791fdc0de732b0a52 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 13:41:41 +0800
-Subject: [PATCH] don't use glibc-specific qsort_r
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-[Rebased for v241]
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[Rebased for v242]
-Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
-[Rebased for v247]
-Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
-
----
- src/basic/sort-util.h | 14 --------------
- src/shared/format-table.c | 36 ++++++++++++++++++++++++------------
- src/shared/hwdb-util.c | 19 ++++++++++++++-----
- 3 files changed, 38 insertions(+), 31 deletions(-)
-
-diff --git a/src/basic/sort-util.h b/src/basic/sort-util.h
-index 02a6784d99..cb448df109 100644
---- a/src/basic/sort-util.h
-+++ b/src/basic/sort-util.h
-@@ -61,18 +61,4 @@ static inline void _qsort_safe(void *base, size_t nmemb, size_t size, comparison
- _qsort_safe((p), (n), sizeof((p)[0]), (comparison_fn_t) _func_); \
- })
-
--static inline void qsort_r_safe(void *base, size_t nmemb, size_t size, comparison_userdata_fn_t compar, void *userdata) {
-- if (nmemb <= 1)
-- return;
--
-- assert(base);
-- qsort_r(base, nmemb, size, compar, userdata);
--}
--
--#define typesafe_qsort_r(p, n, func, userdata) \
-- ({ \
-- int (*_func_)(const typeof(p[0])*, const typeof(p[0])*, typeof(userdata)) = func; \
-- qsort_r_safe((p), (n), sizeof((p)[0]), (comparison_userdata_fn_t) _func_, userdata); \
-- })
--
- int cmp_int(const int *a, const int *b);
-diff --git a/src/shared/format-table.c b/src/shared/format-table.c
-index b95680b365..5ffa208615 100644
---- a/src/shared/format-table.c
-+++ b/src/shared/format-table.c
-@@ -1324,30 +1324,32 @@ static int cell_data_compare(TableData *a, size_t index_a, TableData *b, size_t
- return CMP(index_a, index_b);
- }
-
--static int table_data_compare(const size_t *a, const size_t *b, Table *t) {
-+static Table *user_table;
-+static int table_data_compare(const void *x, const void *y) {
-+ const size_t *a = x, *b=y;
- int r;
-
-- assert(t);
-- assert(t->sort_map);
-+ assert(user_table);
-+ assert(user_table->sort_map);
-
- /* Make sure the header stays at the beginning */
-- if (*a < t->n_columns && *b < t->n_columns)
-+ if (*a < user_table->n_columns && *b < user_table->n_columns)
- return 0;
-- if (*a < t->n_columns)
-+ if (*a < user_table->n_columns)
- return -1;
-- if (*b < t->n_columns)
-+ if (*b < user_table->n_columns)
- return 1;
-
- /* Order other lines by the sorting map */
-- for (size_t i = 0; i < t->n_sort_map; i++) {
-+ for (size_t i = 0; i < user_table->n_sort_map; i++) {
- TableData *d, *dd;
-
-- d = t->data[*a + t->sort_map[i]];
-- dd = t->data[*b + t->sort_map[i]];
-+ d = user_table->data[*a + user_table->sort_map[i]];
-+ dd = user_table->data[*b + user_table->sort_map[i]];
-
- r = cell_data_compare(d, *a, dd, *b);
- if (r != 0)
-- return t->reverse_map && t->reverse_map[t->sort_map[i]] ? -r : r;
-+ return user_table->reverse_map && user_table->reverse_map[user_table->sort_map[i]] ? -r : r;
- }
-
- /* Order identical lines by the order there were originally added in */
-@@ -2009,7 +2011,12 @@ int table_print(Table *t, FILE *f) {
- for (size_t i = 0; i < n_rows; i++)
- sorted[i] = i * t->n_columns;
-
-- typesafe_qsort_r(sorted, n_rows, table_data_compare, t);
-+ if (n_rows <= 1)
-+ return 0;
-+ assert(sorted);
-+ user_table = t;
-+ qsort(sorted, n_rows, sizeof(size_t), table_data_compare);
-+ user_table = NULL;
- }
-
- if (t->display_map)
-@@ -2647,7 +2654,12 @@ int table_to_json(Table *t, JsonVariant **ret) {
- for (size_t i = 0; i < n_rows; i++)
- sorted[i] = i * t->n_columns;
-
-- typesafe_qsort_r(sorted, n_rows, table_data_compare, t);
-+ if (n_rows <= 1)
-+ return 0;
-+ assert(sorted);
-+ user_table = t;
-+ qsort(sorted, n_rows, sizeof(size_t), table_data_compare);
-+ user_table = NULL;
- }
-
- if (t->display_map)
-diff --git a/src/shared/hwdb-util.c b/src/shared/hwdb-util.c
-index fe4785f3e5..827e1639c3 100644
---- a/src/shared/hwdb-util.c
-+++ b/src/shared/hwdb-util.c
-@@ -127,9 +127,13 @@ static struct trie* trie_free(struct trie *trie) {
-
- DEFINE_TRIVIAL_CLEANUP_FUNC(struct trie*, trie_free);
-
--static int trie_values_cmp(const struct trie_value_entry *a, const struct trie_value_entry *b, struct trie *trie) {
-- return strcmp(trie->strings->buf + a->key_off,
-- trie->strings->buf + b->key_off);
-+static struct trie *trie_node_add_value_trie;
-+static int trie_values_cmp(const void *v1, const void *v2) {
-+ const struct trie_value_entry *a = v1;
-+ const struct trie_value_entry *b = v2;
-+
-+ return strcmp(trie_node_add_value_trie->strings->buf + a->key_off,
-+ trie_node_add_value_trie->strings->buf + b->key_off);
- }
-
- static int trie_node_add_value(struct trie *trie, struct trie_node *node,
-@@ -157,7 +161,10 @@ static int trie_node_add_value(struct trie *trie, struct trie_node *node,
- .value_off = v,
- };
-
-- val = typesafe_bsearch_r(&search, node->values, node->values_count, trie_values_cmp, trie);
-+ trie_node_add_value_trie = trie;
-+ val = bsearch(&search, node->values, node->values_count, sizeof(struct trie_value_entry), trie_values_cmp);
-+ trie_node_add_value_trie = NULL;
-+
- if (val) {
- /* At this point we have 2 identical properties on the same match-string.
- * Since we process files in order, we just replace the previous value. */
-@@ -183,7 +190,9 @@ static int trie_node_add_value(struct trie *trie, struct trie_node *node,
- .line_number = line_number,
- };
- node->values_count++;
-- typesafe_qsort_r(node->values, node->values_count, trie_values_cmp, trie);
-+ trie_node_add_value_trie = trie;
-+ qsort(node->values, node->values_count, sizeof(struct trie_value_entry), trie_values_cmp);
-+ trie_node_add_value_trie = NULL;
- return 0;
- }
-
diff --git a/meta/recipes-core/systemd/systemd/0003-implment-systemd-sysv-install-for-OE.patch b/meta/recipes-core/systemd/systemd/0003-implment-systemd-sysv-install-for-OE.patch
deleted file mode 100644
index c6204786b3..0000000000
--- a/meta/recipes-core/systemd/systemd/0003-implment-systemd-sysv-install-for-OE.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From f9078501a1495c9991431d1435d081cd2e830328 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 5 Sep 2015 06:31:47 +0000
-Subject: [PATCH] implment systemd-sysv-install for OE
-
-Use update-rc.d for enabling/disabling and status command
-to check the status of the sysv service
-
-Upstream-Status: Inappropriate [OE-Specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/systemctl/systemd-sysv-install.SKELETON | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/systemctl/systemd-sysv-install.SKELETON b/src/systemctl/systemd-sysv-install.SKELETON
-index 8c16cf99913f..9f078a121469 100755
---- a/src/systemctl/systemd-sysv-install.SKELETON
-+++ b/src/systemctl/systemd-sysv-install.SKELETON
-@@ -32,17 +32,17 @@ case "$1" in
- enable)
- # call the command to enable SysV init script $NAME here
- # (consider optional $ROOT)
-- echo "IMPLEMENT ME: enabling SysV init.d script $NAME"
-+ update-rc.d -f $NAME defaults
- ;;
- disable)
- # call the command to disable SysV init script $NAME here
- # (consider optional $ROOT)
-- echo "IMPLEMENT ME: disabling SysV init.d script $NAME"
-+ update-rc.d -f $NAME remove
- ;;
- is-enabled)
- # exit with 0 if $NAME is enabled, non-zero if it is disabled
- # (consider optional $ROOT)
-- echo "IMPLEMENT ME: checking SysV init.d script $NAME"
-+ /etc/init.d/$NAME status
- ;;
- *)
- usage ;;
diff --git a/meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch b/meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch
deleted file mode 100644
index dfe4164900..0000000000
--- a/meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From a4f51ef07375466f79cb148ff1178ed11f808f0a Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 13:55:12 +0800
-Subject: [PATCH] missing_type.h: add __compare_fn_t and comparison_fn_t
-
-Make it work with musl where comparison_fn_t and __compare_fn_t
-is not provided.
-
-Revisit this when upgrading to 251+ systemd since systemd does not need
-__compare_fn_t anymore
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
-[Rebased for v244]
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[Rebased for v242]
-Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
-
----
- src/basic/missing_type.h | 9 +++++++++
- src/basic/sort-util.h | 1 +
- src/core/kmod-setup.c | 1 +
- src/libsystemd/sd-journal/catalog.c | 1 +
- 4 files changed, 12 insertions(+)
-
-diff --git a/src/basic/missing_type.h b/src/basic/missing_type.h
-index f6233090a9..aeaf6ad5ec 100644
---- a/src/basic/missing_type.h
-+++ b/src/basic/missing_type.h
-@@ -10,3 +10,12 @@
- #if !HAVE_CHAR16_T
- #define char16_t uint16_t
- #endif
-+
-+#ifndef __GLIBC__
-+typedef int (*comparison_fn_t)(const void *, const void *);
-+#endif
-+
-+#ifndef __COMPAR_FN_T
-+#define __COMPAR_FN_T
-+typedef int (*__compar_fn_t)(const void *, const void *);
-+#endif
-diff --git a/src/basic/sort-util.h b/src/basic/sort-util.h
-index cb448df109..fd738a65ab 100644
---- a/src/basic/sort-util.h
-+++ b/src/basic/sort-util.h
-@@ -4,6 +4,7 @@
- #include <stdlib.h>
-
- #include "macro.h"
-+#include "missing_type.h"
-
- /* This is the same as glibc's internal __compar_d_fn_t type. glibc exports a public comparison_fn_t, for the
- * external type __compar_fn_t, but doesn't do anything similar for __compar_d_fn_t. Let's hence do that
-diff --git a/src/core/kmod-setup.c b/src/core/kmod-setup.c
-index d054668b8e..d3bf55acbe 100644
---- a/src/core/kmod-setup.c
-+++ b/src/core/kmod-setup.c
-@@ -10,6 +10,7 @@
- #include "macro.h"
- #include "recurse-dir.h"
- #include "string-util.h"
-+#include "missing_type.h"
-
- #if HAVE_KMOD
- #include "module-util.h"
-diff --git a/src/libsystemd/sd-journal/catalog.c b/src/libsystemd/sd-journal/catalog.c
-index 8fc87b131a..36a6efdbd8 100644
---- a/src/libsystemd/sd-journal/catalog.c
-+++ b/src/libsystemd/sd-journal/catalog.c
-@@ -28,6 +28,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "tmpfile-util.h"
-+#include "missing_type.h"
-
- const char * const catalog_file_dirs[] = {
- "/usr/local/lib/systemd/catalog/",
diff --git a/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch b/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch
new file mode 100644
index 0000000000..5595b5bc23
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch
@@ -0,0 +1,699 @@
+From 87f1d38f40c5fe9cadf2b2de442473e4e5605788 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 14:18:21 +0800
+Subject: [PATCH 03/22] src/basic/missing.h: check for missing strndupa
+
+include missing.h for definition of strndupa
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[Rebased for v242]
+Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+[rebased for systemd 244]
+[Rebased for v247]
+Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
+[Rebased for v254]
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[Rebased for v255.1]
+---
+ meson.build | 1 +
+ src/backlight/backlight.c | 1 +
+ src/basic/cgroup-util.c | 1 +
+ src/basic/env-util.c | 1 +
+ src/basic/log.c | 1 +
+ src/basic/missing_stdlib.h | 12 ++++++++++++
+ src/basic/mkdir.c | 1 +
+ src/basic/mountpoint-util.c | 1 +
+ src/basic/parse-util.c | 1 +
+ src/basic/path-lookup.c | 1 +
+ src/basic/percent-util.c | 1 +
+ src/basic/proc-cmdline.c | 1 +
+ src/basic/procfs-util.c | 1 +
+ src/basic/time-util.c | 1 +
+ src/boot/bless-boot.c | 1 +
+ src/core/dbus-cgroup.c | 1 +
+ src/core/dbus-execute.c | 1 +
+ src/core/dbus-util.c | 1 +
+ src/core/execute.c | 1 +
+ src/core/kmod-setup.c | 1 +
+ src/core/service.c | 1 +
+ src/coredump/coredump-vacuum.c | 1 +
+ src/fstab-generator/fstab-generator.c | 1 +
+ src/journal-remote/journal-remote-main.c | 1 +
+ src/journal/journalctl.c | 1 +
+ src/libsystemd/sd-bus/bus-message.c | 1 +
+ src/libsystemd/sd-bus/bus-objects.c | 1 +
+ src/libsystemd/sd-bus/bus-socket.c | 1 +
+ src/libsystemd/sd-bus/sd-bus.c | 1 +
+ src/libsystemd/sd-bus/test-bus-benchmark.c | 1 +
+ src/libsystemd/sd-journal/sd-journal.c | 1 +
+ src/login/pam_systemd.c | 1 +
+ src/network/generator/network-generator.c | 1 +
+ src/nspawn/nspawn-settings.c | 1 +
+ src/nss-mymachines/nss-mymachines.c | 1 +
+ src/portable/portable.c | 1 +
+ src/resolve/resolvectl.c | 1 +
+ src/shared/bus-get-properties.c | 1 +
+ src/shared/bus-unit-procs.c | 1 +
+ src/shared/bus-unit-util.c | 1 +
+ src/shared/bus-util.c | 1 +
+ src/shared/dns-domain.c | 1 +
+ src/shared/journal-importer.c | 1 +
+ src/shared/logs-show.c | 1 +
+ src/shared/pager.c | 1 +
+ src/socket-proxy/socket-proxyd.c | 1 +
+ src/test/test-hexdecoct.c | 1 +
+ src/udev/udev-builtin-net_id.c | 1 +
+ src/udev/udev-builtin-path_id.c | 1 +
+ src/udev/udev-event.c | 1 +
+ src/udev/udev-rules.c | 1 +
+ 51 files changed, 62 insertions(+)
+
+diff --git a/meson.build b/meson.build
+index 01fd3ffc19..61a872b753 100644
+--- a/meson.build
++++ b/meson.build
+@@ -567,6 +567,7 @@ foreach ident : ['secure_getenv', '__secure_getenv']
+ endforeach
+
+ foreach ident : [
++ ['strndupa' , '''#include <string.h>'''],
+ ['memfd_create', '''#include <sys/mman.h>'''],
+ ['gettid', '''#include <sys/types.h>
+ #include <unistd.h>'''],
+diff --git a/src/backlight/backlight.c b/src/backlight/backlight.c
+index 5ac9f904a9..99d5122dd7 100644
+--- a/src/backlight/backlight.c
++++ b/src/backlight/backlight.c
+@@ -20,6 +20,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "terminal-util.h"
++#include "missing_stdlib.h"
+
+ #define PCI_CLASS_GRAPHICS_CARD 0x30000
+
+diff --git a/src/basic/cgroup-util.c b/src/basic/cgroup-util.c
+index 18b16ecc0e..d2be79622f 100644
+--- a/src/basic/cgroup-util.c
++++ b/src/basic/cgroup-util.c
+@@ -38,6 +38,7 @@
+ #include "unit-name.h"
+ #include "user-util.h"
+ #include "xattr-util.h"
++#include "missing_stdlib.h"
+
+ static int cg_enumerate_items(const char *controller, const char *path, FILE **ret, const char *item) {
+ _cleanup_free_ char *fs = NULL;
+diff --git a/src/basic/env-util.c b/src/basic/env-util.c
+index d3bf73385f..16b17358ca 100644
+--- a/src/basic/env-util.c
++++ b/src/basic/env-util.c
+@@ -19,6 +19,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ /* We follow bash for the character set. Different shells have different rules. */
+ #define VALID_BASH_ENV_NAME_CHARS \
+diff --git a/src/basic/log.c b/src/basic/log.c
+index 1470611a75..9924ec2b9a 100644
+--- a/src/basic/log.c
++++ b/src/basic/log.c
+@@ -40,6 +40,7 @@
+ #include "terminal-util.h"
+ #include "time-util.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ #define SNDBUF_SIZE (8*1024*1024)
+ #define IOVEC_MAX 256U
+diff --git a/src/basic/missing_stdlib.h b/src/basic/missing_stdlib.h
+index 8c76f93eb2..9068bfb4f0 100644
+--- a/src/basic/missing_stdlib.h
++++ b/src/basic/missing_stdlib.h
+@@ -11,3 +11,15 @@
+ # error "neither secure_getenv nor __secure_getenv are available"
+ # endif
+ #endif
++
++/* string.h */
++#if ! HAVE_STRNDUPA
++#define strndupa(s, n) \
++ ({ \
++ const char *__old = (s); \
++ size_t __len = strnlen(__old, (n)); \
++ char *__new = (char *)alloca(__len + 1); \
++ __new[__len] = '\0'; \
++ (char *)memcpy(__new, __old, __len); \
++ })
++#endif
+diff --git a/src/basic/mkdir.c b/src/basic/mkdir.c
+index c770e5ed32..1fd8816cd0 100644
+--- a/src/basic/mkdir.c
++++ b/src/basic/mkdir.c
+@@ -16,6 +16,7 @@
+ #include "stat-util.h"
+ #include "stdio-util.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ int mkdirat_safe_internal(
+ int dir_fd,
+diff --git a/src/basic/mountpoint-util.c b/src/basic/mountpoint-util.c
+index bf67f7e01a..409f8d8a73 100644
+--- a/src/basic/mountpoint-util.c
++++ b/src/basic/mountpoint-util.c
+@@ -18,6 +18,7 @@
+ #include "missing_stat.h"
+ #include "missing_syscall.h"
+ #include "mkdir.h"
++#include "missing_stdlib.h"
+ #include "mountpoint-util.h"
+ #include "nulstr-util.h"
+ #include "parse-util.h"
+diff --git a/src/basic/parse-util.c b/src/basic/parse-util.c
+index 0430e33e40..f3728de026 100644
+--- a/src/basic/parse-util.c
++++ b/src/basic/parse-util.c
+@@ -18,6 +18,7 @@
+ #include "stat-util.h"
+ #include "string-util.h"
+ #include "strv.h"
++#include "missing_stdlib.h"
+
+ int parse_boolean(const char *v) {
+ if (!v)
+diff --git a/src/basic/path-lookup.c b/src/basic/path-lookup.c
+index 4e3d59fc56..726e240df0 100644
+--- a/src/basic/path-lookup.c
++++ b/src/basic/path-lookup.c
+@@ -16,6 +16,7 @@
+ #include "strv.h"
+ #include "tmpfile-util.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ int xdg_user_runtime_dir(char **ret, const char *suffix) {
+ const char *e;
+diff --git a/src/basic/percent-util.c b/src/basic/percent-util.c
+index cab9d0eaea..5f6ca258e9 100644
+--- a/src/basic/percent-util.c
++++ b/src/basic/percent-util.c
+@@ -3,6 +3,7 @@
+ #include "percent-util.h"
+ #include "string-util.h"
+ #include "parse-util.h"
++#include "missing_stdlib.h"
+
+ static int parse_parts_value_whole(const char *p, const char *symbol) {
+ const char *pc, *n;
+diff --git a/src/basic/proc-cmdline.c b/src/basic/proc-cmdline.c
+index 522d8de1f4..7c129dc0fc 100644
+--- a/src/basic/proc-cmdline.c
++++ b/src/basic/proc-cmdline.c
+@@ -16,6 +16,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "virt.h"
++#include "missing_stdlib.h"
+
+ int proc_cmdline_filter_pid1_args(char **argv, char ***ret) {
+ enum {
+diff --git a/src/basic/procfs-util.c b/src/basic/procfs-util.c
+index d7cfcd9105..6cb0ddf575 100644
+--- a/src/basic/procfs-util.c
++++ b/src/basic/procfs-util.c
+@@ -12,6 +12,7 @@
+ #include "procfs-util.h"
+ #include "stdio-util.h"
+ #include "string-util.h"
++#include "missing_stdlib.h"
+
+ int procfs_get_pid_max(uint64_t *ret) {
+ _cleanup_free_ char *value = NULL;
+diff --git a/src/basic/time-util.c b/src/basic/time-util.c
+index f9014dc560..1d7840a5b5 100644
+--- a/src/basic/time-util.c
++++ b/src/basic/time-util.c
+@@ -27,6 +27,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "time-util.h"
++#include "missing_stdlib.h"
+
+ static clockid_t map_clock_id(clockid_t c) {
+
+diff --git a/src/boot/bless-boot.c b/src/boot/bless-boot.c
+index 0c0b4f23c7..68fe5ca509 100644
+--- a/src/boot/bless-boot.c
++++ b/src/boot/bless-boot.c
+@@ -22,6 +22,7 @@
+ #include "terminal-util.h"
+ #include "verbs.h"
+ #include "virt.h"
++#include "missing_stdlib.h"
+
+ static char **arg_path = NULL;
+
+diff --git a/src/core/dbus-cgroup.c b/src/core/dbus-cgroup.c
+index 4237e694c0..05f9d9d9a9 100644
+--- a/src/core/dbus-cgroup.c
++++ b/src/core/dbus-cgroup.c
+@@ -25,6 +25,7 @@
+ #include "parse-util.h"
+ #include "path-util.h"
+ #include "percent-util.h"
++#include "missing_stdlib.h"
+ #include "socket-util.h"
+
+ BUS_DEFINE_PROPERTY_GET(bus_property_get_tasks_max, "t", CGroupTasksMax, cgroup_tasks_max_resolve);
+diff --git a/src/core/dbus-execute.c b/src/core/dbus-execute.c
+index 4daa1cefd3..2c77901471 100644
+--- a/src/core/dbus-execute.c
++++ b/src/core/dbus-execute.c
+@@ -42,6 +42,7 @@
+ #include "unit-printf.h"
+ #include "user-util.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ BUS_DEFINE_PROPERTY_GET_ENUM(bus_property_get_exec_output, exec_output, ExecOutput);
+ static BUS_DEFINE_PROPERTY_GET_ENUM(property_get_exec_input, exec_input, ExecInput);
+diff --git a/src/core/dbus-util.c b/src/core/dbus-util.c
+index d680a64268..e59f48103e 100644
+--- a/src/core/dbus-util.c
++++ b/src/core/dbus-util.c
+@@ -9,6 +9,7 @@
+ #include "unit-printf.h"
+ #include "user-util.h"
+ #include "unit.h"
++#include "missing_stdlib.h"
+
+ int bus_property_get_triggered_unit(
+ sd_bus *bus,
+diff --git a/src/core/execute.c b/src/core/execute.c
+index ef0bf88687..bd3da0c401 100644
+--- a/src/core/execute.c
++++ b/src/core/execute.c
+@@ -72,6 +72,7 @@
+ #include "unit-serialize.h"
+ #include "user-util.h"
+ #include "utmp-wtmp.h"
++#include "missing_stdlib.h"
+
+ static bool is_terminal_input(ExecInput i) {
+ return IN_SET(i,
+diff --git a/src/core/kmod-setup.c b/src/core/kmod-setup.c
+index b8e3f7aadd..8ce8ca68d8 100644
+--- a/src/core/kmod-setup.c
++++ b/src/core/kmod-setup.c
+@@ -13,6 +13,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "virt.h"
++#include "missing_stdlib.h"
+
+ #if HAVE_KMOD
+ #include "module-util.h"
+diff --git a/src/core/service.c b/src/core/service.c
+index b9eb40c555..268fe7573b 100644
+--- a/src/core/service.c
++++ b/src/core/service.c
+@@ -45,6 +45,7 @@
+ #include "unit-name.h"
+ #include "unit.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ #define service_spawn(...) service_spawn_internal(__func__, __VA_ARGS__)
+
+diff --git a/src/coredump/coredump-vacuum.c b/src/coredump/coredump-vacuum.c
+index 7e0c98cb7d..978a7f5874 100644
+--- a/src/coredump/coredump-vacuum.c
++++ b/src/coredump/coredump-vacuum.c
+@@ -17,6 +17,7 @@
+ #include "string-util.h"
+ #include "time-util.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ #define DEFAULT_MAX_USE_LOWER (uint64_t) (1ULL*1024ULL*1024ULL) /* 1 MiB */
+ #define DEFAULT_MAX_USE_UPPER (uint64_t) (4ULL*1024ULL*1024ULL*1024ULL) /* 4 GiB */
+diff --git a/src/fstab-generator/fstab-generator.c b/src/fstab-generator/fstab-generator.c
+index 016f3baa7f..b1def81313 100644
+--- a/src/fstab-generator/fstab-generator.c
++++ b/src/fstab-generator/fstab-generator.c
+@@ -37,6 +37,7 @@
+ #include "unit-name.h"
+ #include "virt.h"
+ #include "volatile-util.h"
++#include "missing_stdlib.h"
+
+ typedef enum MountPointFlags {
+ MOUNT_NOAUTO = 1 << 0,
+diff --git a/src/journal-remote/journal-remote-main.c b/src/journal-remote/journal-remote-main.c
+index da0f20d3ce..f22ce41908 100644
+--- a/src/journal-remote/journal-remote-main.c
++++ b/src/journal-remote/journal-remote-main.c
+@@ -27,6 +27,7 @@
+ #include "stat-util.h"
+ #include "string-table.h"
+ #include "strv.h"
++#include "missing_stdlib.h"
+
+ #define PRIV_KEY_FILE CERTIFICATE_ROOT "/private/journal-remote.pem"
+ #define CERT_FILE CERTIFICATE_ROOT "/certs/journal-remote.pem"
+diff --git a/src/journal/journalctl.c b/src/journal/journalctl.c
+index 7f3dcd56a4..41b7cbaaf1 100644
+--- a/src/journal/journalctl.c
++++ b/src/journal/journalctl.c
+@@ -77,6 +77,7 @@
+ #include "unit-name.h"
+ #include "user-util.h"
+ #include "varlink.h"
++#include "missing_stdlib.h"
+
+ #define DEFAULT_FSS_INTERVAL_USEC (15*USEC_PER_MINUTE)
+ #define PROCESS_INOTIFY_INTERVAL 1024 /* Every 1,024 messages processed */
+diff --git a/src/libsystemd/sd-bus/bus-message.c b/src/libsystemd/sd-bus/bus-message.c
+index ff0228081f..9066fcb133 100644
+--- a/src/libsystemd/sd-bus/bus-message.c
++++ b/src/libsystemd/sd-bus/bus-message.c
+@@ -19,6 +19,7 @@
+ #include "strv.h"
+ #include "time-util.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ static int message_append_basic(sd_bus_message *m, char type, const void *p, const void **stored);
+ static int message_parse_fields(sd_bus_message *m);
+diff --git a/src/libsystemd/sd-bus/bus-objects.c b/src/libsystemd/sd-bus/bus-objects.c
+index c25c40ff37..57a5da704f 100644
+--- a/src/libsystemd/sd-bus/bus-objects.c
++++ b/src/libsystemd/sd-bus/bus-objects.c
+@@ -11,6 +11,7 @@
+ #include "missing_capability.h"
+ #include "string-util.h"
+ #include "strv.h"
++#include "missing_stdlib.h"
+
+ static int node_vtable_get_userdata(
+ sd_bus *bus,
+diff --git a/src/libsystemd/sd-bus/bus-socket.c b/src/libsystemd/sd-bus/bus-socket.c
+index 3c59d0d615..746922d46f 100644
+--- a/src/libsystemd/sd-bus/bus-socket.c
++++ b/src/libsystemd/sd-bus/bus-socket.c
+@@ -29,6 +29,7 @@
+ #include "string-util.h"
+ #include "user-util.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ #define SNDBUF_SIZE (8*1024*1024)
+
+diff --git a/src/libsystemd/sd-bus/sd-bus.c b/src/libsystemd/sd-bus/sd-bus.c
+index 4a0259f8bb..aaa90d2223 100644
+--- a/src/libsystemd/sd-bus/sd-bus.c
++++ b/src/libsystemd/sd-bus/sd-bus.c
+@@ -46,6 +46,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ #define log_debug_bus_message(m) \
+ do { \
+diff --git a/src/libsystemd/sd-bus/test-bus-benchmark.c b/src/libsystemd/sd-bus/test-bus-benchmark.c
+index d988588de0..458df8df9a 100644
+--- a/src/libsystemd/sd-bus/test-bus-benchmark.c
++++ b/src/libsystemd/sd-bus/test-bus-benchmark.c
+@@ -14,6 +14,7 @@
+ #include "string-util.h"
+ #include "tests.h"
+ #include "time-util.h"
++#include "missing_stdlib.h"
+
+ #define MAX_SIZE (2*1024*1024)
+
+diff --git a/src/libsystemd/sd-journal/sd-journal.c b/src/libsystemd/sd-journal/sd-journal.c
+index 6b9ff0a4ed..4a5027ad0f 100644
+--- a/src/libsystemd/sd-journal/sd-journal.c
++++ b/src/libsystemd/sd-journal/sd-journal.c
+@@ -44,6 +44,7 @@
+ #include "strv.h"
+ #include "syslog-util.h"
+ #include "uid-alloc-range.h"
++#include "missing_stdlib.h"
+
+ #define JOURNAL_FILES_RECHECK_USEC (2 * USEC_PER_SEC)
+
+diff --git a/src/login/pam_systemd.c b/src/login/pam_systemd.c
+index b8da266e27..4bb8dd9496 100644
+--- a/src/login/pam_systemd.c
++++ b/src/login/pam_systemd.c
+@@ -35,6 +35,7 @@
+ #include "login-util.h"
+ #include "macro.h"
+ #include "missing_syscall.h"
++#include "missing_stdlib.h"
+ #include "pam-util.h"
+ #include "parse-util.h"
+ #include "path-util.h"
+diff --git a/src/network/generator/network-generator.c b/src/network/generator/network-generator.c
+index 48527a2c73..9777fe0561 100644
+--- a/src/network/generator/network-generator.c
++++ b/src/network/generator/network-generator.c
+@@ -14,6 +14,7 @@
+ #include "string-table.h"
+ #include "string-util.h"
+ #include "strv.h"
++#include "missing_stdlib.h"
+
+ /*
+ # .network
+diff --git a/src/nspawn/nspawn-settings.c b/src/nspawn/nspawn-settings.c
+index 161b1c1c70..ba1c459f78 100644
+--- a/src/nspawn/nspawn-settings.c
++++ b/src/nspawn/nspawn-settings.c
+@@ -16,6 +16,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ Settings *settings_new(void) {
+ Settings *s;
+diff --git a/src/nss-mymachines/nss-mymachines.c b/src/nss-mymachines/nss-mymachines.c
+index c64e79bdff..eda26b0b9a 100644
+--- a/src/nss-mymachines/nss-mymachines.c
++++ b/src/nss-mymachines/nss-mymachines.c
+@@ -21,6 +21,7 @@
+ #include "nss-util.h"
+ #include "signal-util.h"
+ #include "string-util.h"
++#include "missing_stdlib.h"
+
+ static void setup_logging_once(void) {
+ static pthread_once_t once = PTHREAD_ONCE_INIT;
+diff --git a/src/portable/portable.c b/src/portable/portable.c
+index d4b448a627..bb26623565 100644
+--- a/src/portable/portable.c
++++ b/src/portable/portable.c
+@@ -40,6 +40,7 @@
+ #include "strv.h"
+ #include "tmpfile-util.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ /* Markers used in the first line of our 20-portable.conf unit file drop-in to determine, that a) the unit file was
+ * dropped there by the portable service logic and b) for which image it was dropped there. */
+diff --git a/src/resolve/resolvectl.c b/src/resolve/resolvectl.c
+index afa537f160..32ccee4ae5 100644
+--- a/src/resolve/resolvectl.c
++++ b/src/resolve/resolvectl.c
+@@ -48,6 +48,7 @@
+ #include "varlink.h"
+ #include "verb-log-control.h"
+ #include "verbs.h"
++#include "missing_stdlib.h"
+
+ static int arg_family = AF_UNSPEC;
+ static int arg_ifindex = 0;
+diff --git a/src/shared/bus-get-properties.c b/src/shared/bus-get-properties.c
+index 53e5d6b99f..851ecd5644 100644
+--- a/src/shared/bus-get-properties.c
++++ b/src/shared/bus-get-properties.c
+@@ -4,6 +4,7 @@
+ #include "rlimit-util.h"
+ #include "stdio-util.h"
+ #include "string-util.h"
++#include "missing_stdlib.h"
+
+ int bus_property_get_bool(
+ sd_bus *bus,
+diff --git a/src/shared/bus-unit-procs.c b/src/shared/bus-unit-procs.c
+index 8b462b5627..183ce1c18e 100644
+--- a/src/shared/bus-unit-procs.c
++++ b/src/shared/bus-unit-procs.c
+@@ -11,6 +11,7 @@
+ #include "sort-util.h"
+ #include "string-util.h"
+ #include "terminal-util.h"
++#include "missing_stdlib.h"
+
+ struct CGroupInfo {
+ char *cgroup_path;
+diff --git a/src/shared/bus-unit-util.c b/src/shared/bus-unit-util.c
+index 4ee9706847..30c8084847 100644
+--- a/src/shared/bus-unit-util.c
++++ b/src/shared/bus-unit-util.c
+@@ -50,6 +50,7 @@
+ #include "unit-def.h"
+ #include "user-util.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ int bus_parse_unit_info(sd_bus_message *message, UnitInfo *u) {
+ assert(message);
+diff --git a/src/shared/bus-util.c b/src/shared/bus-util.c
+index 4123152d93..74f148c8b4 100644
+--- a/src/shared/bus-util.c
++++ b/src/shared/bus-util.c
+@@ -24,6 +24,7 @@
+ #include "path-util.h"
+ #include "socket-util.h"
+ #include "stdio-util.h"
++#include "missing_stdlib.h"
+
+ static int name_owner_change_callback(sd_bus_message *m, void *userdata, sd_bus_error *ret_error) {
+ sd_event *e = ASSERT_PTR(userdata);
+diff --git a/src/shared/dns-domain.c b/src/shared/dns-domain.c
+index b41c9b06ca..e69050a507 100644
+--- a/src/shared/dns-domain.c
++++ b/src/shared/dns-domain.c
+@@ -18,6 +18,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "utf8.h"
++#include "missing_stdlib.h"
+
+ int dns_label_unescape(const char **name, char *dest, size_t sz, DNSLabelFlags flags) {
+ const char *n;
+diff --git a/src/shared/journal-importer.c b/src/shared/journal-importer.c
+index 83e9834bbf..74eaae6f5e 100644
+--- a/src/shared/journal-importer.c
++++ b/src/shared/journal-importer.c
+@@ -16,6 +16,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "unaligned.h"
++#include "missing_stdlib.h"
+
+ enum {
+ IMPORTER_STATE_LINE = 0, /* waiting to read, or reading line */
+diff --git a/src/shared/logs-show.c b/src/shared/logs-show.c
+index a5d04003bd..10392c132d 100644
+--- a/src/shared/logs-show.c
++++ b/src/shared/logs-show.c
+@@ -41,6 +41,7 @@
+ #include "time-util.h"
+ #include "utf8.h"
+ #include "web-util.h"
++#include "missing_stdlib.h"
+
+ /* up to three lines (each up to 100 characters) or 300 characters, whichever is less */
+ #define PRINT_LINE_THRESHOLD 3
+diff --git a/src/shared/pager.c b/src/shared/pager.c
+index 19deefab56..6b6d0af1a0 100644
+--- a/src/shared/pager.c
++++ b/src/shared/pager.c
+@@ -25,6 +25,7 @@
+ #include "string-util.h"
+ #include "strv.h"
+ #include "terminal-util.h"
++#include "missing_stdlib.h"
+
+ static pid_t pager_pid = 0;
+
+diff --git a/src/socket-proxy/socket-proxyd.c b/src/socket-proxy/socket-proxyd.c
+index 287fd6c181..8f8d5493da 100644
+--- a/src/socket-proxy/socket-proxyd.c
++++ b/src/socket-proxy/socket-proxyd.c
+@@ -27,6 +27,7 @@
+ #include "set.h"
+ #include "socket-util.h"
+ #include "string-util.h"
++#include "missing_stdlib.h"
+
+ #define BUFFER_SIZE (256 * 1024)
+
+diff --git a/src/test/test-hexdecoct.c b/src/test/test-hexdecoct.c
+index f884008660..987e180697 100644
+--- a/src/test/test-hexdecoct.c
++++ b/src/test/test-hexdecoct.c
+@@ -7,6 +7,7 @@
+ #include "macro.h"
+ #include "random-util.h"
+ #include "string-util.h"
++#include "missing_stdlib.h"
+ #include "tests.h"
+
+ TEST(hexchar) {
+diff --git a/src/udev/udev-builtin-net_id.c b/src/udev/udev-builtin-net_id.c
+index 91b40088f4..f528a46b8e 100644
+--- a/src/udev/udev-builtin-net_id.c
++++ b/src/udev/udev-builtin-net_id.c
+@@ -39,6 +39,7 @@
+ #include "strv.h"
+ #include "strxcpyx.h"
+ #include "udev-builtin.h"
++#include "missing_stdlib.h"
+
+ #define ONBOARD_14BIT_INDEX_MAX ((1U << 14) - 1)
+ #define ONBOARD_16BIT_INDEX_MAX ((1U << 16) - 1)
+diff --git a/src/udev/udev-builtin-path_id.c b/src/udev/udev-builtin-path_id.c
+index 467c9a6ad3..f74dae60af 100644
+--- a/src/udev/udev-builtin-path_id.c
++++ b/src/udev/udev-builtin-path_id.c
+@@ -24,6 +24,7 @@
+ #include "sysexits.h"
+ #include "udev-builtin.h"
+ #include "udev-util.h"
++#include "missing_stdlib.h"
+
+ _printf_(2,3)
+ static void path_prepend(char **path, const char *fmt, ...) {
+diff --git a/src/udev/udev-event.c b/src/udev/udev-event.c
+index ed22c8b679..19ebe20237 100644
+--- a/src/udev/udev-event.c
++++ b/src/udev/udev-event.c
+@@ -16,6 +16,7 @@
+ #include "udev-util.h"
+ #include "udev-watch.h"
+ #include "user-util.h"
++#include "missing_stdlib.h"
+
+ UdevEvent *udev_event_new(sd_device *dev, usec_t exec_delay_usec, sd_netlink *rtnl, int log_level) {
+ UdevEvent *event;
+diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c
+index 5f12002394..febe345b4c 100644
+--- a/src/udev/udev-rules.c
++++ b/src/udev/udev-rules.c
+@@ -41,6 +41,7 @@
+ #include "udev-util.h"
+ #include "user-util.h"
+ #include "virt.h"
++#include "missing_stdlib.h"
+
+ #define RULES_DIRS ((const char* const*) CONF_PATHS_STRV("udev/rules.d"))
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch b/meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch
deleted file mode 100644
index 1bd538b0c0..0000000000
--- a/meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch
+++ /dev/null
@@ -1,433 +0,0 @@
-From 3d9910dcda697b1e361bba49c99050ee0d116742 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Sat, 22 May 2021 20:26:24 +0200
-Subject: [PATCH] add fallback parse_printf_format implementation
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Emil Renner Berthing <systemd@esmil.dk>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- meson.build | 1 +
- src/basic/meson.build | 5 +
- src/basic/parse-printf-format.c | 273 +++++++++++++++++++++++
- src/basic/parse-printf-format.h | 57 +++++
- src/basic/stdio-util.h | 2 +-
- src/libsystemd/sd-journal/journal-send.c | 2 +-
- 6 files changed, 338 insertions(+), 2 deletions(-)
- create mode 100644 src/basic/parse-printf-format.c
- create mode 100644 src/basic/parse-printf-format.h
-
-diff --git a/meson.build b/meson.build
-index cb9936ee8b..ae53345260 100644
---- a/meson.build
-+++ b/meson.build
-@@ -686,6 +686,7 @@ endif
- foreach header : ['crypt.h',
- 'linux/memfd.h',
- 'linux/vm_sockets.h',
-+ 'printf.h',
- 'sys/auxv.h',
- 'valgrind/memcheck.h',
- 'valgrind/valgrind.h',
-diff --git a/src/basic/meson.build b/src/basic/meson.build
-index 49e1e7f43e..4131494bfa 100644
---- a/src/basic/meson.build
-+++ b/src/basic/meson.build
-@@ -334,6 +334,11 @@ endforeach
-
- basic_sources += generated_gperf_headers
-
-+if conf.get('HAVE_PRINTF_H') != 1
-+ basic_sources += [files('parse-printf-format.c')]
-+endif
-+
-+
- ############################################################
-
- arch_list = [
-diff --git a/src/basic/parse-printf-format.c b/src/basic/parse-printf-format.c
-new file mode 100644
-index 0000000000..49437e5445
---- /dev/null
-+++ b/src/basic/parse-printf-format.c
-@@ -0,0 +1,273 @@
-+/*-*- Mode: C; c-basic-offset: 8; indent-tabs-mode: nil -*-*/
-+
-+/***
-+ This file is part of systemd.
-+
-+ Copyright 2014 Emil Renner Berthing <systemd@esmil.dk>
-+
-+ With parts from the musl C library
-+ Copyright 2005-2014 Rich Felker, et al.
-+
-+ systemd is free software; you can redistribute it and/or modify it
-+ under the terms of the GNU Lesser General Public License as published by
-+ the Free Software Foundation; either version 2.1 of the License, or
-+ (at your option) any later version.
-+
-+ systemd is distributed in the hope that it will be useful, but
-+ WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-+ Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with systemd; If not, see <http://www.gnu.org/licenses/>.
-+***/
-+
-+#include <stddef.h>
-+#include <string.h>
-+
-+#include "parse-printf-format.h"
-+
-+static const char *consume_nonarg(const char *fmt)
-+{
-+ do {
-+ if (*fmt == '\0')
-+ return fmt;
-+ } while (*fmt++ != '%');
-+ return fmt;
-+}
-+
-+static const char *consume_num(const char *fmt)
-+{
-+ for (;*fmt >= '0' && *fmt <= '9'; fmt++)
-+ /* do nothing */;
-+ return fmt;
-+}
-+
-+static const char *consume_argn(const char *fmt, size_t *arg)
-+{
-+ const char *p = fmt;
-+ size_t val = 0;
-+
-+ if (*p < '1' || *p > '9')
-+ return fmt;
-+ do {
-+ val = 10*val + (*p++ - '0');
-+ } while (*p >= '0' && *p <= '9');
-+
-+ if (*p != '$')
-+ return fmt;
-+ *arg = val;
-+ return p+1;
-+}
-+
-+static const char *consume_flags(const char *fmt)
-+{
-+ while (1) {
-+ switch (*fmt) {
-+ case '#':
-+ case '0':
-+ case '-':
-+ case ' ':
-+ case '+':
-+ case '\'':
-+ case 'I':
-+ fmt++;
-+ continue;
-+ }
-+ return fmt;
-+ }
-+}
-+
-+enum state {
-+ BARE,
-+ LPRE,
-+ LLPRE,
-+ HPRE,
-+ HHPRE,
-+ BIGLPRE,
-+ ZTPRE,
-+ JPRE,
-+ STOP
-+};
-+
-+enum type {
-+ NONE,
-+ PTR,
-+ INT,
-+ UINT,
-+ ULLONG,
-+ LONG,
-+ ULONG,
-+ SHORT,
-+ USHORT,
-+ CHAR,
-+ UCHAR,
-+ LLONG,
-+ SIZET,
-+ IMAX,
-+ UMAX,
-+ PDIFF,
-+ UIPTR,
-+ DBL,
-+ LDBL,
-+ MAXTYPE
-+};
-+
-+static const short pa_types[MAXTYPE] = {
-+ [NONE] = PA_INT,
-+ [PTR] = PA_POINTER,
-+ [INT] = PA_INT,
-+ [UINT] = PA_INT,
-+ [ULLONG] = PA_INT | PA_FLAG_LONG_LONG,
-+ [LONG] = PA_INT | PA_FLAG_LONG,
-+ [ULONG] = PA_INT | PA_FLAG_LONG,
-+ [SHORT] = PA_INT | PA_FLAG_SHORT,
-+ [USHORT] = PA_INT | PA_FLAG_SHORT,
-+ [CHAR] = PA_CHAR,
-+ [UCHAR] = PA_CHAR,
-+ [LLONG] = PA_INT | PA_FLAG_LONG_LONG,
-+ [SIZET] = PA_INT | PA_FLAG_LONG,
-+ [IMAX] = PA_INT | PA_FLAG_LONG_LONG,
-+ [UMAX] = PA_INT | PA_FLAG_LONG_LONG,
-+ [PDIFF] = PA_INT | PA_FLAG_LONG_LONG,
-+ [UIPTR] = PA_INT | PA_FLAG_LONG,
-+ [DBL] = PA_DOUBLE,
-+ [LDBL] = PA_DOUBLE | PA_FLAG_LONG_DOUBLE
-+};
-+
-+#define S(x) [(x)-'A']
-+#define E(x) (STOP + (x))
-+
-+static const unsigned char states[]['z'-'A'+1] = {
-+ { /* 0: bare types */
-+ S('d') = E(INT), S('i') = E(INT),
-+ S('o') = E(UINT),S('u') = E(UINT),S('x') = E(UINT), S('X') = E(UINT),
-+ S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL),
-+ S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL),
-+ S('c') = E(CHAR),S('C') = E(INT),
-+ S('s') = E(PTR), S('S') = E(PTR), S('p') = E(UIPTR),S('n') = E(PTR),
-+ S('m') = E(NONE),
-+ S('l') = LPRE, S('h') = HPRE, S('L') = BIGLPRE,
-+ S('z') = ZTPRE, S('j') = JPRE, S('t') = ZTPRE
-+ }, { /* 1: l-prefixed */
-+ S('d') = E(LONG), S('i') = E(LONG),
-+ S('o') = E(ULONG),S('u') = E(ULONG),S('x') = E(ULONG),S('X') = E(ULONG),
-+ S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL),
-+ S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL),
-+ S('c') = E(INT), S('s') = E(PTR), S('n') = E(PTR),
-+ S('l') = LLPRE
-+ }, { /* 2: ll-prefixed */
-+ S('d') = E(LLONG), S('i') = E(LLONG),
-+ S('o') = E(ULLONG),S('u') = E(ULLONG),
-+ S('x') = E(ULLONG),S('X') = E(ULLONG),
-+ S('n') = E(PTR)
-+ }, { /* 3: h-prefixed */
-+ S('d') = E(SHORT), S('i') = E(SHORT),
-+ S('o') = E(USHORT),S('u') = E(USHORT),
-+ S('x') = E(USHORT),S('X') = E(USHORT),
-+ S('n') = E(PTR),
-+ S('h') = HHPRE
-+ }, { /* 4: hh-prefixed */
-+ S('d') = E(CHAR), S('i') = E(CHAR),
-+ S('o') = E(UCHAR),S('u') = E(UCHAR),
-+ S('x') = E(UCHAR),S('X') = E(UCHAR),
-+ S('n') = E(PTR)
-+ }, { /* 5: L-prefixed */
-+ S('e') = E(LDBL),S('f') = E(LDBL),S('g') = E(LDBL), S('a') = E(LDBL),
-+ S('E') = E(LDBL),S('F') = E(LDBL),S('G') = E(LDBL), S('A') = E(LDBL),
-+ S('n') = E(PTR)
-+ }, { /* 6: z- or t-prefixed (assumed to be same size) */
-+ S('d') = E(PDIFF),S('i') = E(PDIFF),
-+ S('o') = E(SIZET),S('u') = E(SIZET),
-+ S('x') = E(SIZET),S('X') = E(SIZET),
-+ S('n') = E(PTR)
-+ }, { /* 7: j-prefixed */
-+ S('d') = E(IMAX), S('i') = E(IMAX),
-+ S('o') = E(UMAX), S('u') = E(UMAX),
-+ S('x') = E(UMAX), S('X') = E(UMAX),
-+ S('n') = E(PTR)
-+ }
-+};
-+
-+size_t parse_printf_format(const char *fmt, size_t n, int *types)
-+{
-+ size_t i = 0;
-+ size_t last = 0;
-+
-+ memset(types, 0, n);
-+
-+ while (1) {
-+ size_t arg;
-+ unsigned int state;
-+
-+ fmt = consume_nonarg(fmt);
-+ if (*fmt == '\0')
-+ break;
-+ if (*fmt == '%') {
-+ fmt++;
-+ continue;
-+ }
-+ arg = 0;
-+ fmt = consume_argn(fmt, &arg);
-+ /* flags */
-+ fmt = consume_flags(fmt);
-+ /* width */
-+ if (*fmt == '*') {
-+ size_t warg = 0;
-+ fmt = consume_argn(fmt+1, &warg);
-+ if (warg == 0)
-+ warg = ++i;
-+ if (warg > last)
-+ last = warg;
-+ if (warg <= n && types[warg-1] == NONE)
-+ types[warg-1] = INT;
-+ } else
-+ fmt = consume_num(fmt);
-+ /* precision */
-+ if (*fmt == '.') {
-+ fmt++;
-+ if (*fmt == '*') {
-+ size_t parg = 0;
-+ fmt = consume_argn(fmt+1, &parg);
-+ if (parg == 0)
-+ parg = ++i;
-+ if (parg > last)
-+ last = parg;
-+ if (parg <= n && types[parg-1] == NONE)
-+ types[parg-1] = INT;
-+ } else {
-+ if (*fmt == '-')
-+ fmt++;
-+ fmt = consume_num(fmt);
-+ }
-+ }
-+ /* length modifier and conversion specifier */
-+ state = BARE;
-+ do {
-+ unsigned char c = *fmt++;
-+
-+ if (c < 'A' || c > 'z')
-+ continue;
-+ state = states[state]S(c);
-+ if (state == 0)
-+ continue;
-+ } while (state < STOP);
-+
-+ if (state == E(NONE))
-+ continue;
-+
-+ if (arg == 0)
-+ arg = ++i;
-+ if (arg > last)
-+ last = arg;
-+ if (arg <= n)
-+ types[arg-1] = state - STOP;
-+ }
-+
-+ if (last > n)
-+ last = n;
-+ for (i = 0; i < last; i++)
-+ types[i] = pa_types[types[i]];
-+
-+ return last;
-+}
-diff --git a/src/basic/parse-printf-format.h b/src/basic/parse-printf-format.h
-new file mode 100644
-index 0000000000..47be7522d7
---- /dev/null
-+++ b/src/basic/parse-printf-format.h
-@@ -0,0 +1,57 @@
-+/*-*- Mode: C; c-basic-offset: 8; indent-tabs-mode: nil -*-*/
-+
-+/***
-+ This file is part of systemd.
-+
-+ Copyright 2014 Emil Renner Berthing <systemd@esmil.dk>
-+
-+ With parts from the GNU C Library
-+ Copyright 1991-2014 Free Software Foundation, Inc.
-+
-+ systemd is free software; you can redistribute it and/or modify it
-+ under the terms of the GNU Lesser General Public License as published by
-+ the Free Software Foundation; either version 2.1 of the License, or
-+ (at your option) any later version.
-+
-+ systemd is distributed in the hope that it will be useful, but
-+ WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-+ Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with systemd; If not, see <http://www.gnu.org/licenses/>.
-+***/
-+
-+#pragma once
-+
-+#include "config.h"
-+
-+#if HAVE_PRINTF_H
-+#include <printf.h>
-+#else
-+
-+#include <stddef.h>
-+
-+enum { /* C type: */
-+ PA_INT, /* int */
-+ PA_CHAR, /* int, cast to char */
-+ PA_WCHAR, /* wide char */
-+ PA_STRING, /* const char *, a '\0'-terminated string */
-+ PA_WSTRING, /* const wchar_t *, wide character string */
-+ PA_POINTER, /* void * */
-+ PA_FLOAT, /* float */
-+ PA_DOUBLE, /* double */
-+ PA_LAST
-+};
-+
-+/* Flag bits that can be set in a type returned by `parse_printf_format'. */
-+#define PA_FLAG_MASK 0xff00
-+#define PA_FLAG_LONG_LONG (1 << 8)
-+#define PA_FLAG_LONG_DOUBLE PA_FLAG_LONG_LONG
-+#define PA_FLAG_LONG (1 << 9)
-+#define PA_FLAG_SHORT (1 << 10)
-+#define PA_FLAG_PTR (1 << 11)
-+
-+size_t parse_printf_format(const char *fmt, size_t n, int *types);
-+
-+#endif /* HAVE_PRINTF_H */
-diff --git a/src/basic/stdio-util.h b/src/basic/stdio-util.h
-index 69d7062ec6..f55c5aab2c 100644
---- a/src/basic/stdio-util.h
-+++ b/src/basic/stdio-util.h
-@@ -1,13 +1,13 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
- #pragma once
-
--#include <printf.h>
- #include <stdarg.h>
- #include <stdio.h>
- #include <sys/types.h>
-
- #include "macro.h"
- #include "memory-util.h"
-+#include "parse-printf-format.h"
-
- #define snprintf_ok(buf, len, fmt, ...) \
- ({ \
-diff --git a/src/libsystemd/sd-journal/journal-send.c b/src/libsystemd/sd-journal/journal-send.c
-index 1e10ed5524..e6ceba54f9 100644
---- a/src/libsystemd/sd-journal/journal-send.c
-+++ b/src/libsystemd/sd-journal/journal-send.c
-@@ -2,7 +2,6 @@
-
- #include <errno.h>
- #include <fcntl.h>
--#include <printf.h>
- #include <stddef.h>
- #include <sys/un.h>
- #include <unistd.h>
-@@ -25,6 +24,7 @@
- #include "stdio-util.h"
- #include "string-util.h"
- #include "tmpfile-util.h"
-+#include "parse-printf-format.h"
-
- #define SNDBUF_SIZE (8*1024*1024)
-
diff --git a/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch b/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch
new file mode 100644
index 0000000000..15877bea88
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch
@@ -0,0 +1,156 @@
+From 5325ab5813617f35f03806ec420829dde7104387 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 14:56:21 +0800
+Subject: [PATCH 04/22] don't fail if GLOB_BRACE and GLOB_ALTDIRFUNC is not
+ defined
+
+If the standard library doesn't provide brace
+expansion users just won't get it.
+
+Dont use GNU GLOB extentions on non-glibc systems
+
+Conditionalize use of GLOB_ALTDIRFUNC
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+---
+ src/basic/glob-util.c | 12 ++++++++++++
+ src/test/test-glob-util.c | 16 ++++++++++++++++
+ src/tmpfiles/tmpfiles.c | 10 ++++++++++
+ 3 files changed, 38 insertions(+)
+
+diff --git a/src/basic/glob-util.c b/src/basic/glob-util.c
+index 802ca8c655..23818a67c6 100644
+--- a/src/basic/glob-util.c
++++ b/src/basic/glob-util.c
+@@ -12,6 +12,12 @@
+ #include "path-util.h"
+ #include "strv.h"
+
++/* Don't fail if the standard library
++ * doesn't provide brace expansion */
++#ifndef GLOB_BRACE
++#define GLOB_BRACE 0
++#endif
++
+ static void closedir_wrapper(void* v) {
+ (void) closedir(v);
+ }
+@@ -19,6 +25,7 @@ static void closedir_wrapper(void* v) {
+ int safe_glob(const char *path, int flags, glob_t *pglob) {
+ int k;
+
++#ifdef GLOB_ALTDIRFUNC
+ /* We want to set GLOB_ALTDIRFUNC ourselves, don't allow it to be set. */
+ assert(!(flags & GLOB_ALTDIRFUNC));
+
+@@ -32,9 +39,14 @@ int safe_glob(const char *path, int flags, glob_t *pglob) {
+ pglob->gl_lstat = lstat;
+ if (!pglob->gl_stat)
+ pglob->gl_stat = stat;
++#endif
+
+ errno = 0;
++#ifdef GLOB_ALTDIRFUNC
+ k = glob(path, flags | GLOB_ALTDIRFUNC, NULL, pglob);
++#else
++ k = glob(path, flags, NULL, pglob);
++#endif
+ if (k == GLOB_NOMATCH)
+ return -ENOENT;
+ if (k == GLOB_NOSPACE)
+diff --git a/src/test/test-glob-util.c b/src/test/test-glob-util.c
+index 9b3e73cce0..3790ba3be5 100644
+--- a/src/test/test-glob-util.c
++++ b/src/test/test-glob-util.c
+@@ -34,6 +34,12 @@ TEST(glob_first) {
+ assert_se(first == NULL);
+ }
+
++/* Don't fail if the standard library
++ * doesn't provide brace expansion */
++#ifndef GLOB_BRACE
++#define GLOB_BRACE 0
++#endif
++
+ TEST(glob_exists) {
+ char name[] = "/tmp/test-glob_exists.XXXXXX";
+ int fd = -EBADF;
+@@ -61,11 +67,13 @@ TEST(glob_no_dot) {
+ const char *fn;
+
+ _cleanup_globfree_ glob_t g = {
++#ifdef GLOB_ALTDIRFUNC
+ .gl_closedir = closedir_wrapper,
+ .gl_readdir = (struct dirent *(*)(void *)) readdir_no_dot,
+ .gl_opendir = (void *(*)(const char *)) opendir,
+ .gl_lstat = lstat,
+ .gl_stat = stat,
++#endif
+ };
+
+ int r;
+@@ -73,11 +81,19 @@ TEST(glob_no_dot) {
+ assert_se(mkdtemp(template));
+
+ fn = strjoina(template, "/*");
++#ifdef GLOB_ALTDIRFUNC
+ r = glob(fn, GLOB_NOSORT|GLOB_BRACE|GLOB_ALTDIRFUNC, NULL, &g);
++#else
++ r = glob(fn, GLOB_NOSORT|GLOB_BRACE, NULL, &g);
++#endif
+ assert_se(r == GLOB_NOMATCH);
+
+ fn = strjoina(template, "/.*");
++#ifdef GLOB_ALTDIRFUNC
+ r = glob(fn, GLOB_NOSORT|GLOB_BRACE|GLOB_ALTDIRFUNC, NULL, &g);
++#else
++ r = glob(fn, GLOB_NOSORT|GLOB_BRACE, NULL, &g);
++#endif
+ assert_se(r == GLOB_NOMATCH);
+
+ (void) rm_rf(template, REMOVE_ROOT|REMOVE_PHYSICAL);
+diff --git a/src/tmpfiles/tmpfiles.c b/src/tmpfiles/tmpfiles.c
+index 230ec09b97..2cc5f391d7 100644
+--- a/src/tmpfiles/tmpfiles.c
++++ b/src/tmpfiles/tmpfiles.c
+@@ -73,6 +73,12 @@
+ #include "user-util.h"
+ #include "virt.h"
+
++/* Don't fail if the standard library
++ * doesn't provide brace expansion */
++#ifndef GLOB_BRACE
++#define GLOB_BRACE 0
++#endif
++
+ /* This reads all files listed in /etc/tmpfiles.d/?*.conf and creates
+ * them in the file system. This is intended to be used to create
+ * properly owned directories beneath /tmp, /var/tmp, /run, which are
+@@ -2434,7 +2440,9 @@ finish:
+
+ static int glob_item(Context *c, Item *i, action_t action) {
+ _cleanup_globfree_ glob_t g = {
++#ifdef GLOB_ALTDIRFUNC
+ .gl_opendir = (void *(*)(const char *)) opendir_nomod,
++#endif
+ };
+ int r = 0, k;
+
+@@ -2461,7 +2469,9 @@ static int glob_item_recursively(
+ fdaction_t action) {
+
+ _cleanup_globfree_ glob_t g = {
++#ifdef GLOB_ALTDIRFUNC
+ .gl_opendir = (void *(*)(const char *)) opendir_nomod,
++#endif
+ };
+ int r = 0, k;
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch b/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch
new file mode 100644
index 0000000000..a1dfca22cd
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch
@@ -0,0 +1,44 @@
+From dad7f897c0de654fa5592fda3e90f874639849f9 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 15:00:06 +0800
+Subject: [PATCH 05/22] add missing FTW_ macros for musl
+
+This is to avoid build failures like below for musl.
+
+ locale-util.c:296:24: error: 'FTW_STOP' undeclared
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/basic/missing_type.h | 4 ++++
+ src/test/test-recurse-dir.c | 1 +
+ 2 files changed, 5 insertions(+)
+
+diff --git a/src/basic/missing_type.h b/src/basic/missing_type.h
+index 6c0456349d..73a5b90e3c 100644
+--- a/src/basic/missing_type.h
++++ b/src/basic/missing_type.h
+@@ -14,3 +14,7 @@
+ #ifndef __GLIBC__
+ typedef int (*comparison_fn_t)(const void *, const void *);
+ #endif
++
++#ifndef FTW_CONTINUE
++#define FTW_CONTINUE 0
++#endif
+diff --git a/src/test/test-recurse-dir.c b/src/test/test-recurse-dir.c
+index 8684d064ec..70fc2b5376 100644
+--- a/src/test/test-recurse-dir.c
++++ b/src/test/test-recurse-dir.c
+@@ -8,6 +8,7 @@
+ #include "recurse-dir.h"
+ #include "strv.h"
+ #include "tests.h"
++#include "missing_type.h"
+
+ static char **list_nftw = NULL;
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch b/meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch
deleted file mode 100644
index 4b1fac06b5..0000000000
--- a/meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch
+++ /dev/null
@@ -1,707 +0,0 @@
-From 55d48dd81e57add5b2d4b5a7d697c575a0f37ef5 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 14:18:21 +0800
-Subject: [PATCH] src/basic/missing.h: check for missing strndupa
-
-include missing.h for definition of strndupa
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[Rebased for v242]
-Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
-[rebased for systemd 244]
-[Rebased for v247]
-Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
-
----
- meson.build | 1 +
- src/backlight/backlight.c | 1 +
- src/basic/cgroup-util.c | 1 +
- src/basic/env-util.c | 1 +
- src/basic/log.c | 1 +
- src/basic/missing_stdlib.h | 12 ++++++++++++
- src/basic/mkdir.c | 1 +
- src/basic/mountpoint-util.c | 1 +
- src/basic/parse-util.c | 1 +
- src/basic/path-lookup.c | 1 +
- src/basic/percent-util.c | 1 +
- src/basic/proc-cmdline.c | 1 +
- src/basic/procfs-util.c | 1 +
- src/basic/time-util.c | 1 +
- src/boot/bless-boot.c | 1 +
- src/core/dbus-cgroup.c | 1 +
- src/core/dbus-execute.c | 1 +
- src/core/dbus-util.c | 1 +
- src/core/execute.c | 1 +
- src/core/kmod-setup.c | 1 +
- src/core/service.c | 1 +
- src/coredump/coredump-vacuum.c | 1 +
- src/fstab-generator/fstab-generator.c | 1 +
- src/journal-remote/journal-remote-main.c | 1 +
- src/journal/journalctl.c | 1 +
- src/libsystemd/sd-bus/bus-message.c | 1 +
- src/libsystemd/sd-bus/bus-objects.c | 1 +
- src/libsystemd/sd-bus/bus-socket.c | 1 +
- src/libsystemd/sd-bus/sd-bus.c | 1 +
- src/libsystemd/sd-bus/test-bus-benchmark.c | 1 +
- src/libsystemd/sd-journal/sd-journal.c | 1 +
- src/locale/keymap-util.c | 1 +
- src/login/pam_systemd.c | 1 +
- src/network/generator/network-generator.c | 1 +
- src/nspawn/nspawn-settings.c | 1 +
- src/nss-mymachines/nss-mymachines.c | 1 +
- src/portable/portable.c | 1 +
- src/resolve/resolvectl.c | 1 +
- src/shared/bus-get-properties.c | 1 +
- src/shared/bus-unit-procs.c | 1 +
- src/shared/bus-unit-util.c | 1 +
- src/shared/bus-util.c | 1 +
- src/shared/dns-domain.c | 1 +
- src/shared/journal-importer.c | 1 +
- src/shared/logs-show.c | 1 +
- src/shared/pager.c | 1 +
- src/shared/uid-range.c | 1 +
- src/socket-proxy/socket-proxyd.c | 1 +
- src/test/test-hexdecoct.c | 1 +
- src/udev/udev-builtin-path_id.c | 1 +
- src/udev/udev-event.c | 1 +
- src/udev/udev-rules.c | 1 +
- 52 files changed, 63 insertions(+)
-
-diff --git a/meson.build b/meson.build
-index ae53345260..8c8a6c9bdf 100644
---- a/meson.build
-+++ b/meson.build
-@@ -507,6 +507,7 @@ foreach ident : ['secure_getenv', '__secure_getenv']
- endforeach
-
- foreach ident : [
-+ ['strndupa' , '''#include <string.h>'''],
- ['memfd_create', '''#include <sys/mman.h>'''],
- ['gettid', '''#include <sys/types.h>
- #include <unistd.h>'''],
-diff --git a/src/backlight/backlight.c b/src/backlight/backlight.c
-index 5a3095cbba..22cfa4d526 100644
---- a/src/backlight/backlight.c
-+++ b/src/backlight/backlight.c
-@@ -19,6 +19,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- static int help(void) {
- _cleanup_free_ char *link = NULL;
-diff --git a/src/basic/cgroup-util.c b/src/basic/cgroup-util.c
-index a626ecf2e2..f7dc6c8421 100644
---- a/src/basic/cgroup-util.c
-+++ b/src/basic/cgroup-util.c
-@@ -37,6 +37,7 @@
- #include "unit-name.h"
- #include "user-util.h"
- #include "xattr-util.h"
-+#include "missing_stdlib.h"
-
- static int cg_enumerate_items(const char *controller, const char *path, FILE **_f, const char *item) {
- _cleanup_free_ char *fs = NULL;
-diff --git a/src/basic/env-util.c b/src/basic/env-util.c
-index 885967e7f3..d0b7dc845e 100644
---- a/src/basic/env-util.c
-+++ b/src/basic/env-util.c
-@@ -19,6 +19,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- /* We follow bash for the character set. Different shells have different rules. */
- #define VALID_BASH_ENV_NAME_CHARS \
-diff --git a/src/basic/log.c b/src/basic/log.c
-index 12071e2ebd..15254c7bbc 100644
---- a/src/basic/log.c
-+++ b/src/basic/log.c
-@@ -36,6 +36,7 @@
- #include "terminal-util.h"
- #include "time-util.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- #define SNDBUF_SIZE (8*1024*1024)
-
-diff --git a/src/basic/missing_stdlib.h b/src/basic/missing_stdlib.h
-index 8c76f93eb2..9068bfb4f0 100644
---- a/src/basic/missing_stdlib.h
-+++ b/src/basic/missing_stdlib.h
-@@ -11,3 +11,15 @@
- # error "neither secure_getenv nor __secure_getenv are available"
- # endif
- #endif
-+
-+/* string.h */
-+#if ! HAVE_STRNDUPA
-+#define strndupa(s, n) \
-+ ({ \
-+ const char *__old = (s); \
-+ size_t __len = strnlen(__old, (n)); \
-+ char *__new = (char *)alloca(__len + 1); \
-+ __new[__len] = '\0'; \
-+ (char *)memcpy(__new, __old, __len); \
-+ })
-+#endif
-diff --git a/src/basic/mkdir.c b/src/basic/mkdir.c
-index 51a0d74e87..03569f71f8 100644
---- a/src/basic/mkdir.c
-+++ b/src/basic/mkdir.c
-@@ -15,6 +15,7 @@
- #include "stat-util.h"
- #include "stdio-util.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- int mkdir_safe_internal(
- const char *path,
-diff --git a/src/basic/mountpoint-util.c b/src/basic/mountpoint-util.c
-index 82a33a6829..d947774b40 100644
---- a/src/basic/mountpoint-util.c
-+++ b/src/basic/mountpoint-util.c
-@@ -13,6 +13,7 @@
- #include "missing_stat.h"
- #include "missing_syscall.h"
- #include "mkdir.h"
-+#include "missing_stdlib.h"
- #include "mountpoint-util.h"
- #include "nulstr-util.h"
- #include "parse-util.h"
-diff --git a/src/basic/parse-util.c b/src/basic/parse-util.c
-index 2888ab6523..d941afec2d 100644
---- a/src/basic/parse-util.c
-+++ b/src/basic/parse-util.c
-@@ -18,6 +18,7 @@
- #include "stat-util.h"
- #include "string-util.h"
- #include "strv.h"
-+#include "missing_stdlib.h"
-
- int parse_boolean(const char *v) {
- if (!v)
-diff --git a/src/basic/path-lookup.c b/src/basic/path-lookup.c
-index 6fb8c40e7a..c4b59e8518 100644
---- a/src/basic/path-lookup.c
-+++ b/src/basic/path-lookup.c
-@@ -16,6 +16,7 @@
- #include "strv.h"
- #include "tmpfile-util.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- int xdg_user_runtime_dir(char **ret, const char *suffix) {
- const char *e;
-diff --git a/src/basic/percent-util.c b/src/basic/percent-util.c
-index cab9d0eaea..5f6ca258e9 100644
---- a/src/basic/percent-util.c
-+++ b/src/basic/percent-util.c
-@@ -3,6 +3,7 @@
- #include "percent-util.h"
- #include "string-util.h"
- #include "parse-util.h"
-+#include "missing_stdlib.h"
-
- static int parse_parts_value_whole(const char *p, const char *symbol) {
- const char *pc, *n;
-diff --git a/src/basic/proc-cmdline.c b/src/basic/proc-cmdline.c
-index 410b8a3eb5..f2c4355609 100644
---- a/src/basic/proc-cmdline.c
-+++ b/src/basic/proc-cmdline.c
-@@ -15,6 +15,7 @@
- #include "string-util.h"
- #include "util.h"
- #include "virt.h"
-+#include "missing_stdlib.h"
-
- int proc_cmdline(char **ret) {
- const char *e;
-diff --git a/src/basic/procfs-util.c b/src/basic/procfs-util.c
-index 65f96abb06..e485a0196b 100644
---- a/src/basic/procfs-util.c
-+++ b/src/basic/procfs-util.c
-@@ -12,6 +12,7 @@
- #include "procfs-util.h"
- #include "stdio-util.h"
- #include "string-util.h"
-+#include "missing_stdlib.h"
-
- int procfs_get_pid_max(uint64_t *ret) {
- _cleanup_free_ char *value = NULL;
-diff --git a/src/basic/time-util.c b/src/basic/time-util.c
-index b659d6905d..020112be24 100644
---- a/src/basic/time-util.c
-+++ b/src/basic/time-util.c
-@@ -26,6 +26,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "time-util.h"
-+#include "missing_stdlib.h"
-
- static clockid_t map_clock_id(clockid_t c) {
-
-diff --git a/src/boot/bless-boot.c b/src/boot/bless-boot.c
-index 9e4b0d1f72..d640316ff7 100644
---- a/src/boot/bless-boot.c
-+++ b/src/boot/bless-boot.c
-@@ -19,6 +19,7 @@
- #include "util.h"
- #include "verbs.h"
- #include "virt.h"
-+#include "missing_stdlib.h"
-
- static char **arg_path = NULL;
-
-diff --git a/src/core/dbus-cgroup.c b/src/core/dbus-cgroup.c
-index f0d8759e85..b4c1053e64 100644
---- a/src/core/dbus-cgroup.c
-+++ b/src/core/dbus-cgroup.c
-@@ -21,6 +21,7 @@
- #include "parse-util.h"
- #include "path-util.h"
- #include "percent-util.h"
-+#include "missing_stdlib.h"
- #include "socket-util.h"
-
- BUS_DEFINE_PROPERTY_GET(bus_property_get_tasks_max, "t", TasksMax, tasks_max_resolve);
-diff --git a/src/core/dbus-execute.c b/src/core/dbus-execute.c
-index 5c499e5d06..e7ab1bb9a5 100644
---- a/src/core/dbus-execute.c
-+++ b/src/core/dbus-execute.c
-@@ -44,6 +44,7 @@
- #include "unit-printf.h"
- #include "user-util.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- BUS_DEFINE_PROPERTY_GET_ENUM(bus_property_get_exec_output, exec_output, ExecOutput);
- static BUS_DEFINE_PROPERTY_GET_ENUM(property_get_exec_input, exec_input, ExecInput);
-diff --git a/src/core/dbus-util.c b/src/core/dbus-util.c
-index 32a2ec0ff9..36be2511e4 100644
---- a/src/core/dbus-util.c
-+++ b/src/core/dbus-util.c
-@@ -9,6 +9,7 @@
- #include "unit-printf.h"
- #include "user-util.h"
- #include "unit.h"
-+#include "missing_stdlib.h"
-
- int bus_property_get_triggered_unit(
- sd_bus *bus,
-diff --git a/src/core/execute.c b/src/core/execute.c
-index 0b20d386d3..fccfb9268c 100644
---- a/src/core/execute.c
-+++ b/src/core/execute.c
-@@ -102,6 +102,7 @@
- #include "unit-serialize.h"
- #include "user-util.h"
- #include "utmp-wtmp.h"
-+#include "missing_stdlib.h"
-
- #define IDLE_TIMEOUT_USEC (5*USEC_PER_SEC)
- #define IDLE_TIMEOUT2_USEC (1*USEC_PER_SEC)
-diff --git a/src/core/kmod-setup.c b/src/core/kmod-setup.c
-index d3bf55acbe..63dd807b8a 100644
---- a/src/core/kmod-setup.c
-+++ b/src/core/kmod-setup.c
-@@ -11,6 +11,7 @@
- #include "recurse-dir.h"
- #include "string-util.h"
- #include "missing_type.h"
-+#include "missing_stdlib.h"
-
- #if HAVE_KMOD
- #include "module-util.h"
-diff --git a/src/core/service.c b/src/core/service.c
-index 87f0d34c8c..ccda3feb29 100644
---- a/src/core/service.c
-+++ b/src/core/service.c
-@@ -42,6 +42,7 @@
- #include "unit.h"
- #include "utf8.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- static const UnitActiveState state_translation_table[_SERVICE_STATE_MAX] = {
- [SERVICE_DEAD] = UNIT_INACTIVE,
-diff --git a/src/coredump/coredump-vacuum.c b/src/coredump/coredump-vacuum.c
-index dcf9cc03cd..8f8d992ec2 100644
---- a/src/coredump/coredump-vacuum.c
-+++ b/src/coredump/coredump-vacuum.c
-@@ -16,6 +16,7 @@
- #include "string-util.h"
- #include "time-util.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- #define DEFAULT_MAX_USE_LOWER (uint64_t) (1ULL*1024ULL*1024ULL) /* 1 MiB */
- #define DEFAULT_MAX_USE_UPPER (uint64_t) (4ULL*1024ULL*1024ULL*1024ULL) /* 4 GiB */
-diff --git a/src/fstab-generator/fstab-generator.c b/src/fstab-generator/fstab-generator.c
-index 9b32383a76..f8d3397a06 100644
---- a/src/fstab-generator/fstab-generator.c
-+++ b/src/fstab-generator/fstab-generator.c
-@@ -29,6 +29,7 @@
- #include "util.h"
- #include "virt.h"
- #include "volatile-util.h"
-+#include "missing_stdlib.h"
-
- typedef enum MountPointFlags {
- MOUNT_NOAUTO = 1 << 0,
-diff --git a/src/journal-remote/journal-remote-main.c b/src/journal-remote/journal-remote-main.c
-index 3e3646e45f..6a8fc60f6d 100644
---- a/src/journal-remote/journal-remote-main.c
-+++ b/src/journal-remote/journal-remote-main.c
-@@ -24,6 +24,7 @@
- #include "stat-util.h"
- #include "string-table.h"
- #include "strv.h"
-+#include "missing_stdlib.h"
-
- #define PRIV_KEY_FILE CERTIFICATE_ROOT "/private/journal-remote.pem"
- #define CERT_FILE CERTIFICATE_ROOT "/certs/journal-remote.pem"
-diff --git a/src/journal/journalctl.c b/src/journal/journalctl.c
-index 3c4a7c0a7a..6a792404f2 100644
---- a/src/journal/journalctl.c
-+++ b/src/journal/journalctl.c
-@@ -73,6 +73,7 @@
- #include "unit-name.h"
- #include "user-util.h"
- #include "varlink.h"
-+#include "missing_stdlib.h"
-
- #define DEFAULT_FSS_INTERVAL_USEC (15*USEC_PER_MINUTE)
- #define PROCESS_INOTIFY_INTERVAL 1024 /* Every 1,024 messages processed */
-diff --git a/src/libsystemd/sd-bus/bus-message.c b/src/libsystemd/sd-bus/bus-message.c
-index 96529b422b..ddb5e9c698 100644
---- a/src/libsystemd/sd-bus/bus-message.c
-+++ b/src/libsystemd/sd-bus/bus-message.c
-@@ -20,6 +20,7 @@
- #include "strv.h"
- #include "time-util.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- static int message_append_basic(sd_bus_message *m, char type, const void *p, const void **stored);
-
-diff --git a/src/libsystemd/sd-bus/bus-objects.c b/src/libsystemd/sd-bus/bus-objects.c
-index 28d8336718..5d3ce88a53 100644
---- a/src/libsystemd/sd-bus/bus-objects.c
-+++ b/src/libsystemd/sd-bus/bus-objects.c
-@@ -12,6 +12,7 @@
- #include "set.h"
- #include "string-util.h"
- #include "strv.h"
-+#include "missing_stdlib.h"
-
- static int node_vtable_get_userdata(
- sd_bus *bus,
-diff --git a/src/libsystemd/sd-bus/bus-socket.c b/src/libsystemd/sd-bus/bus-socket.c
-index 14951ccb33..b7f86ca501 100644
---- a/src/libsystemd/sd-bus/bus-socket.c
-+++ b/src/libsystemd/sd-bus/bus-socket.c
-@@ -28,6 +28,7 @@
- #include "string-util.h"
- #include "user-util.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- #define SNDBUF_SIZE (8*1024*1024)
-
-diff --git a/src/libsystemd/sd-bus/sd-bus.c b/src/libsystemd/sd-bus/sd-bus.c
-index 9e1d29cc1d..8c3165f0ce 100644
---- a/src/libsystemd/sd-bus/sd-bus.c
-+++ b/src/libsystemd/sd-bus/sd-bus.c
-@@ -43,6 +43,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- #define log_debug_bus_message(m) \
- do { \
-diff --git a/src/libsystemd/sd-bus/test-bus-benchmark.c b/src/libsystemd/sd-bus/test-bus-benchmark.c
-index 317653bedc..d028216c48 100644
---- a/src/libsystemd/sd-bus/test-bus-benchmark.c
-+++ b/src/libsystemd/sd-bus/test-bus-benchmark.c
-@@ -14,6 +14,7 @@
- #include "string-util.h"
- #include "time-util.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- #define MAX_SIZE (2*1024*1024)
-
-diff --git a/src/libsystemd/sd-journal/sd-journal.c b/src/libsystemd/sd-journal/sd-journal.c
-index 7a6cc4aca3..b7f7cd65c5 100644
---- a/src/libsystemd/sd-journal/sd-journal.c
-+++ b/src/libsystemd/sd-journal/sd-journal.c
-@@ -41,6 +41,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "syslog-util.h"
-+#include "missing_stdlib.h"
-
- #define JOURNAL_FILES_MAX 7168
-
-diff --git a/src/locale/keymap-util.c b/src/locale/keymap-util.c
-index 10d2ed7aec..4fbe3f6b4a 100644
---- a/src/locale/keymap-util.c
-+++ b/src/locale/keymap-util.c
-@@ -24,6 +24,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "tmpfile-util.h"
-+#include "missing_stdlib.h"
-
- static bool startswith_comma(const char *s, const char *prefix) {
- s = startswith(s, prefix);
-diff --git a/src/login/pam_systemd.c b/src/login/pam_systemd.c
-index 5bd7efc3e8..282899601e 100644
---- a/src/login/pam_systemd.c
-+++ b/src/login/pam_systemd.c
-@@ -31,6 +31,7 @@
- #include "locale-util.h"
- #include "login-util.h"
- #include "macro.h"
-+#include "missing_stdlib.h"
- #include "pam-util.h"
- #include "parse-util.h"
- #include "path-util.h"
-diff --git a/src/network/generator/network-generator.c b/src/network/generator/network-generator.c
-index 063ad08d80..f9823a433b 100644
---- a/src/network/generator/network-generator.c
-+++ b/src/network/generator/network-generator.c
-@@ -13,6 +13,7 @@
- #include "string-table.h"
- #include "string-util.h"
- #include "strv.h"
-+#include "missing_stdlib.h"
-
- /*
- # .network
-diff --git a/src/nspawn/nspawn-settings.c b/src/nspawn/nspawn-settings.c
-index 1f58bf3ed4..8457a3b0e3 100644
---- a/src/nspawn/nspawn-settings.c
-+++ b/src/nspawn/nspawn-settings.c
-@@ -17,6 +17,7 @@
- #include "strv.h"
- #include "user-util.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- Settings *settings_new(void) {
- Settings *s;
-diff --git a/src/nss-mymachines/nss-mymachines.c b/src/nss-mymachines/nss-mymachines.c
-index c64e79bdff..eda26b0b9a 100644
---- a/src/nss-mymachines/nss-mymachines.c
-+++ b/src/nss-mymachines/nss-mymachines.c
-@@ -21,6 +21,7 @@
- #include "nss-util.h"
- #include "signal-util.h"
- #include "string-util.h"
-+#include "missing_stdlib.h"
-
- static void setup_logging_once(void) {
- static pthread_once_t once = PTHREAD_ONCE_INIT;
-diff --git a/src/portable/portable.c b/src/portable/portable.c
-index 0e6461ba93..54148d5924 100644
---- a/src/portable/portable.c
-+++ b/src/portable/portable.c
-@@ -39,6 +39,7 @@
- #include "strv.h"
- #include "tmpfile-util.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- /* Markers used in the first line of our 20-portable.conf unit file drop-in to determine, that a) the unit file was
- * dropped there by the portable service logic and b) for which image it was dropped there. */
-diff --git a/src/resolve/resolvectl.c b/src/resolve/resolvectl.c
-index 5b3ceeff36..d36d1d57ae 100644
---- a/src/resolve/resolvectl.c
-+++ b/src/resolve/resolvectl.c
-@@ -43,6 +43,7 @@
- #include "utf8.h"
- #include "verb-log-control.h"
- #include "verbs.h"
-+#include "missing_stdlib.h"
-
- static int arg_family = AF_UNSPEC;
- static int arg_ifindex = 0;
-diff --git a/src/shared/bus-get-properties.c b/src/shared/bus-get-properties.c
-index 8b4f66b22e..5926e4c61b 100644
---- a/src/shared/bus-get-properties.c
-+++ b/src/shared/bus-get-properties.c
-@@ -4,6 +4,7 @@
- #include "rlimit-util.h"
- #include "stdio-util.h"
- #include "string-util.h"
-+#include "missing_stdlib.h"
-
- int bus_property_get_bool(
- sd_bus *bus,
-diff --git a/src/shared/bus-unit-procs.c b/src/shared/bus-unit-procs.c
-index 87c0334fec..402ab3493b 100644
---- a/src/shared/bus-unit-procs.c
-+++ b/src/shared/bus-unit-procs.c
-@@ -10,6 +10,7 @@
- #include "sort-util.h"
- #include "string-util.h"
- #include "terminal-util.h"
-+#include "missing_stdlib.h"
-
- struct CGroupInfo {
- char *cgroup_path;
-diff --git a/src/shared/bus-unit-util.c b/src/shared/bus-unit-util.c
-index dcce530c99..faf5a5bda0 100644
---- a/src/shared/bus-unit-util.c
-+++ b/src/shared/bus-unit-util.c
-@@ -49,6 +49,7 @@
- #include "unit-def.h"
- #include "user-util.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- int bus_parse_unit_info(sd_bus_message *message, UnitInfo *u) {
- assert(message);
-diff --git a/src/shared/bus-util.c b/src/shared/bus-util.c
-index 4a2b7684bc..ee6d687c58 100644
---- a/src/shared/bus-util.c
-+++ b/src/shared/bus-util.c
-@@ -21,6 +21,7 @@
- #include "path-util.h"
- #include "socket-util.h"
- #include "stdio-util.h"
-+#include "missing_stdlib.h"
-
- static int name_owner_change_callback(sd_bus_message *m, void *userdata, sd_bus_error *ret_error) {
- sd_event *e = userdata;
-diff --git a/src/shared/dns-domain.c b/src/shared/dns-domain.c
-index f54b187a1b..299758c7e4 100644
---- a/src/shared/dns-domain.c
-+++ b/src/shared/dns-domain.c
-@@ -17,6 +17,7 @@
- #include "string-util.h"
- #include "strv.h"
- #include "utf8.h"
-+#include "missing_stdlib.h"
-
- int dns_label_unescape(const char **name, char *dest, size_t sz, DNSLabelFlags flags) {
- const char *n;
-diff --git a/src/shared/journal-importer.c b/src/shared/journal-importer.c
-index c6caf9330a..ebe33bd44a 100644
---- a/src/shared/journal-importer.c
-+++ b/src/shared/journal-importer.c
-@@ -15,6 +15,7 @@
- #include "parse-util.h"
- #include "string-util.h"
- #include "unaligned.h"
-+#include "missing_stdlib.h"
-
- enum {
- IMPORTER_STATE_LINE = 0, /* waiting to read, or reading line */
-diff --git a/src/shared/logs-show.c b/src/shared/logs-show.c
-index cf83eb6bca..e672a003a3 100644
---- a/src/shared/logs-show.c
-+++ b/src/shared/logs-show.c
-@@ -42,6 +42,7 @@
- #include "utf8.h"
- #include "util.h"
- #include "web-util.h"
-+#include "missing_stdlib.h"
-
- /* up to three lines (each up to 100 characters) or 300 characters, whichever is less */
- #define PRINT_LINE_THRESHOLD 3
-diff --git a/src/shared/pager.c b/src/shared/pager.c
-index f75ef62d2d..530001a821 100644
---- a/src/shared/pager.c
-+++ b/src/shared/pager.c
-@@ -26,6 +26,7 @@
- #include "strv.h"
- #include "terminal-util.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- static pid_t pager_pid = 0;
-
-diff --git a/src/shared/uid-range.c b/src/shared/uid-range.c
-index 2c07a1b7a8..2a9d6ef4ab 100644
---- a/src/shared/uid-range.c
-+++ b/src/shared/uid-range.c
-@@ -9,6 +9,7 @@
- #include "sort-util.h"
- #include "uid-range.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- static bool uid_range_intersect(UidRange *range, uid_t start, uid_t nr) {
- assert(range);
-diff --git a/src/socket-proxy/socket-proxyd.c b/src/socket-proxy/socket-proxyd.c
-index 7e9ab19666..56f619e54e 100644
---- a/src/socket-proxy/socket-proxyd.c
-+++ b/src/socket-proxy/socket-proxyd.c
-@@ -26,6 +26,7 @@
- #include "socket-util.h"
- #include "string-util.h"
- #include "util.h"
-+#include "missing_stdlib.h"
-
- #define BUFFER_SIZE (256 * 1024)
-
-diff --git a/src/test/test-hexdecoct.c b/src/test/test-hexdecoct.c
-index cc9a7cb838..a679614a47 100644
---- a/src/test/test-hexdecoct.c
-+++ b/src/test/test-hexdecoct.c
-@@ -7,6 +7,7 @@
- #include "macro.h"
- #include "random-util.h"
- #include "string-util.h"
-+#include "missing_stdlib.h"
- #include "tests.h"
-
- TEST(hexchar) {
-diff --git a/src/udev/udev-builtin-path_id.c b/src/udev/udev-builtin-path_id.c
-index ae92e45205..1e6f3205cb 100644
---- a/src/udev/udev-builtin-path_id.c
-+++ b/src/udev/udev-builtin-path_id.c
-@@ -22,6 +22,7 @@
- #include "sysexits.h"
- #include "udev-builtin.h"
- #include "udev-util.h"
-+#include "missing_stdlib.h"
-
- _printf_(2,3)
- static void path_prepend(char **path, const char *fmt, ...) {
-diff --git a/src/udev/udev-event.c b/src/udev/udev-event.c
-index a60e4f294c..571c43765b 100644
---- a/src/udev/udev-event.c
-+++ b/src/udev/udev-event.c
-@@ -35,6 +35,7 @@
- #include "udev-util.h"
- #include "udev-watch.h"
- #include "user-util.h"
-+#include "missing_stdlib.h"
-
- typedef struct Spawn {
- sd_device *device;
-diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c
-index 1a384d6b38..0089833e3f 100644
---- a/src/udev/udev-rules.c
-+++ b/src/udev/udev-rules.c
-@@ -34,6 +34,7 @@
- #include "udev-util.h"
- #include "user-util.h"
- #include "virt.h"
-+#include "missing_stdlib.h"
-
- #define RULES_DIRS (const char* const*) CONF_PATHS_STRV("udev/rules.d")
-
diff --git a/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch b/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch
new file mode 100644
index 0000000000..4be14b72ec
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch
@@ -0,0 +1,106 @@
+From 96e975a2412a20e5f80bd3ab144057d275eb8597 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 15:12:41 +0800
+Subject: [PATCH 06/22] Use uintmax_t for handling rlim_t
+
+PRIu{32,64} is not right format to represent rlim_t type
+therefore use %ju and typecast the rlim_t variables to
+uintmax_t.
+
+Fixes portablility errors like
+
+execute.c:3446:36: error: format '%lu' expects argument of type 'long unsigned int', but argument 5 has type 'rlim_t {aka long long unsigned int}' [-Werror=format=]
+| fprintf(f, "%s%s: " RLIM_FMT "\n",
+| ^~~~~~~~
+| prefix, rlimit_to_string(i), c->rlimit[i]->rlim_max);
+| ~~~~~~~~~~~~~~~~~~~~~~
+
+Upstream-Status: Denied [https://github.com/systemd/systemd/pull/7199]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+[Rebased for v241]
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/basic/format-util.h | 8 +-------
+ src/basic/rlimit-util.c | 12 ++++++------
+ src/core/execute.c | 4 ++--
+ 3 files changed, 9 insertions(+), 15 deletions(-)
+
+diff --git a/src/basic/format-util.h b/src/basic/format-util.h
+index 8719df3e29..9becc96066 100644
+--- a/src/basic/format-util.h
++++ b/src/basic/format-util.h
+@@ -34,13 +34,7 @@ assert_cc(sizeof(gid_t) == sizeof(uint32_t));
+ # error Unknown timex member size
+ #endif
+
+-#if SIZEOF_RLIM_T == 8
+-# define RLIM_FMT "%" PRIu64
+-#elif SIZEOF_RLIM_T == 4
+-# define RLIM_FMT "%" PRIu32
+-#else
+-# error Unknown rlim_t size
+-#endif
++#define RLIM_FMT "%ju"
+
+ #if SIZEOF_DEV_T == 8
+ # define DEV_FMT "%" PRIu64
+diff --git a/src/basic/rlimit-util.c b/src/basic/rlimit-util.c
+index c1f0b2b974..61c5412582 100644
+--- a/src/basic/rlimit-util.c
++++ b/src/basic/rlimit-util.c
+@@ -44,7 +44,7 @@ int setrlimit_closest(int resource, const struct rlimit *rlim) {
+ fixed.rlim_max == highest.rlim_max)
+ return 0;
+
+- log_debug("Failed at setting rlimit " RLIM_FMT " for resource RLIMIT_%s. Will attempt setting value " RLIM_FMT " instead.", rlim->rlim_max, rlimit_to_string(resource), fixed.rlim_max);
++ log_debug("Failed at setting rlimit " RLIM_FMT " for resource RLIMIT_%s. Will attempt setting value " RLIM_FMT " instead.", (uintmax_t)rlim->rlim_max, rlimit_to_string(resource), (uintmax_t)fixed.rlim_max);
+
+ return RET_NERRNO(setrlimit(resource, &fixed));
+ }
+@@ -307,13 +307,13 @@ int rlimit_format(const struct rlimit *rl, char **ret) {
+ if (rl->rlim_cur >= RLIM_INFINITY && rl->rlim_max >= RLIM_INFINITY)
+ r = free_and_strdup(&s, "infinity");
+ else if (rl->rlim_cur >= RLIM_INFINITY)
+- r = asprintf(&s, "infinity:" RLIM_FMT, rl->rlim_max);
++ r = asprintf(&s, "infinity:" RLIM_FMT, (uintmax_t)rl->rlim_max);
+ else if (rl->rlim_max >= RLIM_INFINITY)
+- r = asprintf(&s, RLIM_FMT ":infinity", rl->rlim_cur);
++ r = asprintf(&s, RLIM_FMT ":infinity", (uintmax_t)rl->rlim_cur);
+ else if (rl->rlim_cur == rl->rlim_max)
+- r = asprintf(&s, RLIM_FMT, rl->rlim_cur);
++ r = asprintf(&s, RLIM_FMT, (uintmax_t)rl->rlim_cur);
+ else
+- r = asprintf(&s, RLIM_FMT ":" RLIM_FMT, rl->rlim_cur, rl->rlim_max);
++ r = asprintf(&s, RLIM_FMT ":" RLIM_FMT, (uintmax_t)rl->rlim_cur, (uintmax_t)rl->rlim_max);
+ if (r < 0)
+ return -ENOMEM;
+
+@@ -422,7 +422,7 @@ int rlimit_nofile_safe(void) {
+ rl.rlim_max = MIN(rl.rlim_max, (rlim_t) read_nr_open());
+ rl.rlim_cur = MIN((rlim_t) FD_SETSIZE, rl.rlim_max);
+ if (setrlimit(RLIMIT_NOFILE, &rl) < 0)
+- return log_debug_errno(errno, "Failed to lower RLIMIT_NOFILE's soft limit to " RLIM_FMT ": %m", rl.rlim_cur);
++ return log_debug_errno(errno, "Failed to lower RLIMIT_NOFILE's soft limit to " RLIM_FMT ": %m", (uintmax_t)rl.rlim_cur);
+
+ return 1;
+ }
+diff --git a/src/core/execute.c b/src/core/execute.c
+index bd3da0c401..df1870fd2f 100644
+--- a/src/core/execute.c
++++ b/src/core/execute.c
+@@ -1045,9 +1045,9 @@ void exec_context_dump(const ExecContext *c, FILE* f, const char *prefix) {
+ for (unsigned i = 0; i < RLIM_NLIMITS; i++)
+ if (c->rlimit[i]) {
+ fprintf(f, "%sLimit%s: " RLIM_FMT "\n",
+- prefix, rlimit_to_string(i), c->rlimit[i]->rlim_max);
++ prefix, rlimit_to_string(i), (uintmax_t)c->rlimit[i]->rlim_max);
+ fprintf(f, "%sLimit%sSoft: " RLIM_FMT "\n",
+- prefix, rlimit_to_string(i), c->rlimit[i]->rlim_cur);
++ prefix, rlimit_to_string(i), (uintmax_t)c->rlimit[i]->rlim_cur);
+ }
+
+ if (c->ioprio_set) {
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch b/meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch
deleted file mode 100644
index b84fbaa67e..0000000000
--- a/meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch
+++ /dev/null
@@ -1,153 +0,0 @@
-From 74c664bcd6b9a5fcf3466310c07f608d12456f7f Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 14:56:21 +0800
-Subject: [PATCH] don't fail if GLOB_BRACE and GLOB_ALTDIRFUNC is not defined
-
-If the standard library doesn't provide brace
-expansion users just won't get it.
-
-Dont use GNU GLOB extentions on non-glibc systems
-
-Conditionalize use of GLOB_ALTDIRFUNC
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- src/basic/glob-util.c | 12 ++++++++++++
- src/test/test-glob-util.c | 16 ++++++++++++++++
- src/tmpfiles/tmpfiles.c | 10 ++++++++++
- 3 files changed, 38 insertions(+)
-
-diff --git a/src/basic/glob-util.c b/src/basic/glob-util.c
-index e026b29478..815e56ef68 100644
---- a/src/basic/glob-util.c
-+++ b/src/basic/glob-util.c
-@@ -12,6 +12,12 @@
- #include "path-util.h"
- #include "strv.h"
-
-+/* Don't fail if the standard library
-+ * doesn't provide brace expansion */
-+#ifndef GLOB_BRACE
-+#define GLOB_BRACE 0
-+#endif
-+
- static void closedir_wrapper(void* v) {
- (void) closedir(v);
- }
-@@ -19,6 +25,7 @@ static void closedir_wrapper(void* v) {
- int safe_glob(const char *path, int flags, glob_t *pglob) {
- int k;
-
-+#ifdef GLOB_ALTDIRFUNC
- /* We want to set GLOB_ALTDIRFUNC ourselves, don't allow it to be set. */
- assert(!(flags & GLOB_ALTDIRFUNC));
-
-@@ -32,9 +39,14 @@ int safe_glob(const char *path, int flags, glob_t *pglob) {
- pglob->gl_lstat = lstat;
- if (!pglob->gl_stat)
- pglob->gl_stat = stat;
-+#endif
-
- errno = 0;
-+#ifdef GLOB_ALTDIRFUNC
- k = glob(path, flags | GLOB_ALTDIRFUNC, NULL, pglob);
-+#else
-+ k = glob(path, flags, NULL, pglob);
-+#endif
- if (k == GLOB_NOMATCH)
- return -ENOENT;
- if (k == GLOB_NOSPACE)
-diff --git a/src/test/test-glob-util.c b/src/test/test-glob-util.c
-index ec8b74f48f..d99a6095df 100644
---- a/src/test/test-glob-util.c
-+++ b/src/test/test-glob-util.c
-@@ -13,6 +13,12 @@
- #include "tests.h"
- #include "tmpfile-util.h"
-
-+/* Don't fail if the standard library
-+ * doesn't provide brace expansion */
-+#ifndef GLOB_BRACE
-+#define GLOB_BRACE 0
-+#endif
-+
- TEST(glob_exists) {
- char name[] = "/tmp/test-glob_exists.XXXXXX";
- int fd = -1;
-@@ -40,11 +46,13 @@ TEST(glob_no_dot) {
- const char *fn;
-
- _cleanup_globfree_ glob_t g = {
-+#ifdef GLOB_ALTDIRFUNC
- .gl_closedir = closedir_wrapper,
- .gl_readdir = (struct dirent *(*)(void *)) readdir_no_dot,
- .gl_opendir = (void *(*)(const char *)) opendir,
- .gl_lstat = lstat,
- .gl_stat = stat,
-+#endif
- };
-
- int r;
-@@ -52,11 +60,19 @@ TEST(glob_no_dot) {
- assert_se(mkdtemp(template));
-
- fn = strjoina(template, "/*");
-+#ifdef GLOB_ALTDIRFUNC
- r = glob(fn, GLOB_NOSORT|GLOB_BRACE|GLOB_ALTDIRFUNC, NULL, &g);
-+#else
-+ r = glob(fn, GLOB_NOSORT|GLOB_BRACE, NULL, &g);
-+#endif
- assert_se(r == GLOB_NOMATCH);
-
- fn = strjoina(template, "/.*");
-+#ifdef GLOB_ALTDIRFUNC
- r = glob(fn, GLOB_NOSORT|GLOB_BRACE|GLOB_ALTDIRFUNC, NULL, &g);
-+#else
-+ r = glob(fn, GLOB_NOSORT|GLOB_BRACE, NULL, &g);
-+#endif
- assert_se(r == GLOB_NOMATCH);
-
- (void) rm_rf(template, REMOVE_ROOT|REMOVE_PHYSICAL);
-diff --git a/src/tmpfiles/tmpfiles.c b/src/tmpfiles/tmpfiles.c
-index fcab51c208..fdef1807ae 100644
---- a/src/tmpfiles/tmpfiles.c
-+++ b/src/tmpfiles/tmpfiles.c
-@@ -67,6 +67,12 @@
- #include "umask-util.h"
- #include "user-util.h"
-
-+/* Don't fail if the standard library
-+ * doesn't provide brace expansion */
-+#ifndef GLOB_BRACE
-+#define GLOB_BRACE 0
-+#endif
-+
- /* This reads all files listed in /etc/tmpfiles.d/?*.conf and creates
- * them in the file system. This is intended to be used to create
- * properly owned directories beneath /tmp, /var/tmp, /run, which are
-@@ -1961,7 +1967,9 @@ finish:
-
- static int glob_item(Item *i, action_t action) {
- _cleanup_globfree_ glob_t g = {
-+#ifdef GLOB_ALTDIRFUNC
- .gl_opendir = (void *(*)(const char *)) opendir_nomod,
-+#endif
- };
- int r = 0, k;
- char **fn;
-@@ -1981,7 +1989,9 @@ static int glob_item(Item *i, action_t action) {
-
- static int glob_item_recursively(Item *i, fdaction_t action) {
- _cleanup_globfree_ glob_t g = {
-+#ifdef GLOB_ALTDIRFUNC
- .gl_opendir = (void *(*)(const char *)) opendir_nomod,
-+#endif
- };
- int r = 0, k;
- char **fn;
diff --git a/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch b/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch
new file mode 100644
index 0000000000..8d6084239e
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch
@@ -0,0 +1,99 @@
+From 4842cff4f1329f0b5034b529d56f8ad1f234ac4c Mon Sep 17 00:00:00 2001
+From: Andre McCurdy <armccurdy@gmail.com>
+Date: Tue, 10 Oct 2017 14:33:30 -0700
+Subject: [PATCH 07/22] don't pass AT_SYMLINK_NOFOLLOW flag to faccessat()
+
+Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right
+thing to do and it's not portable (not supported by musl). See:
+
+ http://lists.landley.net/pipermail/toybox-landley.net/2014-September/003610.html
+ http://www.openwall.com/lists/musl/2015/02/05/2
+
+Note that laccess() is never passing AT_EACCESS so a lot of the
+discussion in the links above doesn't apply. Note also that
+(currently) all systemd callers of laccess() pass mode as F_OK, so
+only check for existence of a file, not access permissions.
+Therefore, in this case, the only distiction between faccessat()
+with (flag == 0) and (flag == AT_SYMLINK_NOFOLLOW) is the behaviour
+for broken symlinks; laccess() on a broken symlink will succeed with
+(flag == AT_SYMLINK_NOFOLLOW) and fail (flag == 0).
+
+The laccess() macros was added to systemd some time ago and it's not
+clear if or why it needs to return success for broken symlinks. Maybe
+just historical and not actually necessary or desired behaviour?
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
+---
+ src/basic/fs-util.h | 21 ++++++++++++++++++++-
+ src/shared/base-filesystem.c | 6 +++---
+ 2 files changed, 23 insertions(+), 4 deletions(-)
+
+diff --git a/src/basic/fs-util.h b/src/basic/fs-util.h
+index 1023ab73ca..c78ff6f27f 100644
+--- a/src/basic/fs-util.h
++++ b/src/basic/fs-util.h
+@@ -49,8 +49,27 @@ int futimens_opath(int fd, const struct timespec ts[2]);
+ int fd_warn_permissions(const char *path, int fd);
+ int stat_warn_permissions(const char *path, const struct stat *st);
+
++/*
++ Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right thing to
++ do and it's not portable (not supported by musl). See:
++
++ http://lists.landley.net/pipermail/toybox-landley.net/2014-September/003610.html
++ http://www.openwall.com/lists/musl/2015/02/05/2
++
++ Note that laccess() is never passing AT_EACCESS so a lot of the discussion in
++ the links above doesn't apply. Note also that (currently) all systemd callers
++ of laccess() pass mode as F_OK, so only check for existence of a file, not
++ access permissions. Therefore, in this case, the only distiction between
++ faccessat() with (flag == 0) and (flag == AT_SYMLINK_NOFOLLOW) is the
++ behaviour for broken symlinks; laccess() on a broken symlink will succeed
++ with (flag == AT_SYMLINK_NOFOLLOW) and fail (flag == 0).
++
++ The laccess() macros was added to systemd some time ago and it's not clear if
++ or why it needs to return success for broken symlinks. Maybe just historical
++ and not actually necessary or desired behaviour?
++*/
+ #define laccess(path, mode) \
+- RET_NERRNO(faccessat(AT_FDCWD, (path), (mode), AT_SYMLINK_NOFOLLOW))
++ RET_NERRNO(faccessat(AT_FDCWD, (path), (mode), 0))
+
+ int touch_file(const char *path, bool parents, usec_t stamp, uid_t uid, gid_t gid, mode_t mode);
+
+diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c
+index 569ef466c3..7ae921a113 100644
+--- a/src/shared/base-filesystem.c
++++ b/src/shared/base-filesystem.c
+@@ -145,7 +145,7 @@ int base_filesystem_create_fd(int fd, const char *root, uid_t uid, gid_t gid) {
+ /* The "root" parameter is decoration only – it's only used as part of log messages */
+
+ for (size_t i = 0; i < ELEMENTSOF(table); i++) {
+- if (faccessat(fd, table[i].dir, F_OK, AT_SYMLINK_NOFOLLOW) >= 0)
++ if (faccessat(fd, table[i].dir, F_OK, 0) >= 0)
+ continue;
+
+ if (table[i].target) { /* Create as symlink? */
+@@ -153,7 +153,7 @@ int base_filesystem_create_fd(int fd, const char *root, uid_t uid, gid_t gid) {
+
+ /* check if one of the targets exists */
+ NULSTR_FOREACH(s, table[i].target) {
+- if (faccessat(fd, s, F_OK, AT_SYMLINK_NOFOLLOW) < 0)
++ if (faccessat(fd, s, F_OK, 0) < 0)
+ continue;
+
+ /* check if a specific file exists at the target path */
+@@ -164,7 +164,7 @@ int base_filesystem_create_fd(int fd, const char *root, uid_t uid, gid_t gid) {
+ if (!p)
+ return log_oom();
+
+- if (faccessat(fd, p, F_OK, AT_SYMLINK_NOFOLLOW) < 0)
++ if (faccessat(fd, p, F_OK, 0) < 0)
+ continue;
+ }
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch b/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch
new file mode 100644
index 0000000000..c1a8bb19fe
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch
@@ -0,0 +1,34 @@
+From bab07e779ff23d5593bb118efaaa31b60a6dce87 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 27 May 2018 08:36:44 -0700
+Subject: [PATCH 08/22] Define glibc compatible basename() for non-glibc
+ systems
+
+Fixes builds with musl, even though systemd is adamant about
+using non-posix basename implementation, we have a way out
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/basic/string-util.h | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/src/basic/string-util.h b/src/basic/string-util.h
+index b6d8be3083..0a29036c4c 100644
+--- a/src/basic/string-util.h
++++ b/src/basic/string-util.h
+@@ -26,6 +26,10 @@
+ #define URI_UNRESERVED ALPHANUMERICAL "-._~" /* [RFC3986] */
+ #define URI_VALID URI_RESERVED URI_UNRESERVED /* [RFC3986] */
+
++#if !defined(__GLIBC__)
++#define basename(src) (strrchr(src,'/') ? strrchr(src,'/')+1 : src)
++#endif
++
+ static inline char* strstr_ptr(const char *haystack, const char *needle) {
+ if (!haystack || !needle)
+ return NULL;
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch b/meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch
deleted file mode 100644
index 4cce9a3fe6..0000000000
--- a/meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-From a0450f7909348e7ff1d58adc0aee4119a0519c1f Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 15:00:06 +0800
-Subject: [PATCH] add missing FTW_ macros for musl
-
-This is to avoid build failures like below for musl.
-
- locale-util.c:296:24: error: 'FTW_STOP' undeclared
-
-Revisit this when upgrading to 251+ since most of these defines are not
-needed anymore except FTW_CONTINUE
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/basic/missing_type.h | 20 ++++++++++++++++++++
- src/shared/mount-setup.c | 1 +
- src/test/test-recurse-dir.c | 1 +
- 3 files changed, 22 insertions(+)
-
-diff --git a/src/basic/missing_type.h b/src/basic/missing_type.h
-index aeaf6ad5ec..3df1084ef2 100644
---- a/src/basic/missing_type.h
-+++ b/src/basic/missing_type.h
-@@ -19,3 +19,23 @@ typedef int (*comparison_fn_t)(const void *, const void *);
- #define __COMPAR_FN_T
- typedef int (*__compar_fn_t)(const void *, const void *);
- #endif
-+
-+#ifndef FTW_ACTIONRETVAL
-+#define FTW_ACTIONRETVAL 16
-+#endif
-+
-+#ifndef FTW_CONTINUE
-+#define FTW_CONTINUE 0
-+#endif
-+
-+#ifndef FTW_STOP
-+#define FTW_STOP 1
-+#endif
-+
-+#ifndef FTW_SKIP_SUBTREE
-+#define FTW_SKIP_SUBTREE 2
-+#endif
-+
-+#ifndef FTW_SKIP_SIBLINGS
-+#define FTW_SKIP_SIBLINGS 3
-+#endif
-diff --git a/src/shared/mount-setup.c b/src/shared/mount-setup.c
-index 7917968497..cc3d5baaab 100644
---- a/src/shared/mount-setup.c
-+++ b/src/shared/mount-setup.c
-@@ -32,6 +32,7 @@
- #include "strv.h"
- #include "user-util.h"
- #include "virt.h"
-+#include "missing_type.h"
-
- typedef enum MountMode {
- MNT_NONE = 0,
-diff --git a/src/test/test-recurse-dir.c b/src/test/test-recurse-dir.c
-index 2c2120b136..bc60a178a2 100644
---- a/src/test/test-recurse-dir.c
-+++ b/src/test/test-recurse-dir.c
-@@ -6,6 +6,7 @@
- #include "recurse-dir.h"
- #include "strv.h"
- #include "tests.h"
-+#include "missing_type.h"
-
- static char **list_nftw = NULL;
-
diff --git a/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch b/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch
new file mode 100644
index 0000000000..acff18dc43
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch
@@ -0,0 +1,43 @@
+From 5712d56f1cd654d2e5d2e9117ff77fe4c299f76b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 5 Sep 2015 06:31:47 +0000
+Subject: [PATCH] implment systemd-sysv-install for OE
+
+Use update-rc.d for enabling/disabling and status command
+to check the status of the sysv service
+
+Upstream-Status: Inappropriate [OE-Specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/systemctl/systemd-sysv-install.SKELETON | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/src/systemctl/systemd-sysv-install.SKELETON b/src/systemctl/systemd-sysv-install.SKELETON
+index cb58d8243b..000bdf6165 100755
+--- a/src/systemctl/systemd-sysv-install.SKELETON
++++ b/src/systemctl/systemd-sysv-install.SKELETON
+@@ -34,17 +34,17 @@ case "$1" in
+ enable)
+ # call the command to enable SysV init script $NAME here
+ # (consider optional $ROOT)
+- echo "IMPLEMENT ME: enabling SysV init.d script $NAME"
++ update-rc.d -f $NAME defaults
+ ;;
+ disable)
+ # call the command to disable SysV init script $NAME here
+ # (consider optional $ROOT)
+- echo "IMPLEMENT ME: disabling SysV init.d script $NAME"
++ update-rc.d -f $NAME remove
+ ;;
+ is-enabled)
+ # exit with 0 if $NAME is enabled, non-zero if it is disabled
+ # (consider optional $ROOT)
+- echo "IMPLEMENT ME: checking SysV init.d script $NAME"
++ /etc/init.d/$NAME status
+ ;;
+ *)
+ usage ;;
+--
+2.39.2
+
diff --git a/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch b/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch
new file mode 100644
index 0000000000..3ff0177ae3
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch
@@ -0,0 +1,41 @@
+From 25093c5017725b8577c444dfea0f42ad85b43522 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Wed, 4 Jul 2018 15:00:44 +0800
+Subject: [PATCH 09/22] Do not disable buffering when writing to oom_score_adj
+
+On musl, disabling buffering when writing to oom_score_adj will
+cause the following error.
+
+ Failed to adjust OOM setting: Invalid argument
+
+This error appears for systemd-udevd.service and dbus.service.
+This is because kernel receives '-' instead of the whole '-900'
+if buffering is disabled.
+
+This is libc implementation specific, as glibc does not have this issue.
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+---
+ src/basic/process-util.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/basic/process-util.c b/src/basic/process-util.c
+index 201c5596ae..ea51595b6c 100644
+--- a/src/basic/process-util.c
++++ b/src/basic/process-util.c
+@@ -1716,7 +1716,7 @@ int set_oom_score_adjust(int value) {
+ xsprintf(t, "%i", value);
+
+ return write_string_file("/proc/self/oom_score_adj", t,
+- WRITE_STRING_FILE_VERIFY_ON_FAILURE|WRITE_STRING_FILE_DISABLE_BUFFER);
++ WRITE_STRING_FILE_VERIFY_ON_FAILURE);
+ }
+
+ int get_oom_score_adjust(int *ret) {
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch b/meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch
deleted file mode 100644
index e7b7269f95..0000000000
--- a/meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch
+++ /dev/null
@@ -1,46 +0,0 @@
-From 3ca0920429f7eaf8c59f9ac8afd30a43b83d95ed Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 15:03:47 +0800
-Subject: [PATCH] fix missing of __register_atfork for non-glibc builds
-
-Upstream-Status: Inappropriate [musl specific]
-
-Drop this when upgrading to 251+, systemd does not use
-__register_atfork anymore
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/basic/process-util.c | 7 +++++++
- 1 file changed, 7 insertions(+)
-
-diff --git a/src/basic/process-util.c b/src/basic/process-util.c
-index c971852158..df6e85b1fc 100644
---- a/src/basic/process-util.c
-+++ b/src/basic/process-util.c
-@@ -18,6 +18,9 @@
- #if HAVE_VALGRIND_VALGRIND_H
- #include <valgrind/valgrind.h>
- #endif
-+#ifndef __GLIBC__
-+#include <pthread.h>
-+#endif
-
- #include "alloc-util.h"
- #include "architecture.h"
-@@ -1161,11 +1164,15 @@ void reset_cached_pid(void) {
- cached_pid = CACHED_PID_UNSET;
- }
-
-+#ifdef __GLIBC__
- /* We use glibc __register_atfork() + __dso_handle directly here, as they are not included in the glibc
- * headers. __register_atfork() is mostly equivalent to pthread_atfork(), but doesn't require us to link against
- * libpthread, as it is part of glibc anyway. */
- extern int __register_atfork(void (*prepare) (void), void (*parent) (void), void (*child) (void), void *dso_handle);
- extern void* __dso_handle _weak_;
-+#else
-+#define __register_atfork(prepare,parent,child,dso) pthread_atfork(prepare,parent,child)
-+#endif
-
- pid_t getpid_cached(void) {
- static bool installed = false;
diff --git a/meta/recipes-core/systemd/systemd/0010-Use-uintmax_t-for-handling-rlim_t.patch b/meta/recipes-core/systemd/systemd/0010-Use-uintmax_t-for-handling-rlim_t.patch
deleted file mode 100644
index 3a47d09e8a..0000000000
--- a/meta/recipes-core/systemd/systemd/0010-Use-uintmax_t-for-handling-rlim_t.patch
+++ /dev/null
@@ -1,104 +0,0 @@
-From 48a791aae7a47a2a08e9e60c18054071a43b8cda Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 15:12:41 +0800
-Subject: [PATCH] Use uintmax_t for handling rlim_t
-
-PRIu{32,64} is not right format to represent rlim_t type
-therefore use %ju and typecast the rlim_t variables to
-uintmax_t.
-
-Fixes portablility errors like
-
-execute.c:3446:36: error: format '%lu' expects argument of type 'long unsigned int', but argument 5 has type 'rlim_t {aka long long unsigned int}' [-Werror=format=]
-| fprintf(f, "%s%s: " RLIM_FMT "\n",
-| ^~~~~~~~
-| prefix, rlimit_to_string(i), c->rlimit[i]->rlim_max);
-| ~~~~~~~~~~~~~~~~~~~~~~
-
-Upstream-Status: Denied [https://github.com/systemd/systemd/pull/7199]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-[Rebased for v241]
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/basic/format-util.h | 8 +-------
- src/basic/rlimit-util.c | 12 ++++++------
- src/core/execute.c | 4 ++--
- 3 files changed, 9 insertions(+), 15 deletions(-)
-
-diff --git a/src/basic/format-util.h b/src/basic/format-util.h
-index 8719df3e29..9becc96066 100644
---- a/src/basic/format-util.h
-+++ b/src/basic/format-util.h
-@@ -34,13 +34,7 @@ assert_cc(sizeof(gid_t) == sizeof(uint32_t));
- # error Unknown timex member size
- #endif
-
--#if SIZEOF_RLIM_T == 8
--# define RLIM_FMT "%" PRIu64
--#elif SIZEOF_RLIM_T == 4
--# define RLIM_FMT "%" PRIu32
--#else
--# error Unknown rlim_t size
--#endif
-+#define RLIM_FMT "%ju"
-
- #if SIZEOF_DEV_T == 8
- # define DEV_FMT "%" PRIu64
-diff --git a/src/basic/rlimit-util.c b/src/basic/rlimit-util.c
-index 33dfde9d6c..e018fd81fd 100644
---- a/src/basic/rlimit-util.c
-+++ b/src/basic/rlimit-util.c
-@@ -44,7 +44,7 @@ int setrlimit_closest(int resource, const struct rlimit *rlim) {
- fixed.rlim_max == highest.rlim_max)
- return 0;
-
-- log_debug("Failed at setting rlimit " RLIM_FMT " for resource RLIMIT_%s. Will attempt setting value " RLIM_FMT " instead.", rlim->rlim_max, rlimit_to_string(resource), fixed.rlim_max);
-+ log_debug("Failed at setting rlimit " RLIM_FMT " for resource RLIMIT_%s. Will attempt setting value " RLIM_FMT " instead.", (uintmax_t)rlim->rlim_max, rlimit_to_string(resource), (uintmax_t)fixed.rlim_max);
-
- return RET_NERRNO(setrlimit(resource, &fixed));
- }
-@@ -307,13 +307,13 @@ int rlimit_format(const struct rlimit *rl, char **ret) {
- if (rl->rlim_cur >= RLIM_INFINITY && rl->rlim_max >= RLIM_INFINITY)
- r = free_and_strdup(&s, "infinity");
- else if (rl->rlim_cur >= RLIM_INFINITY)
-- r = asprintf(&s, "infinity:" RLIM_FMT, rl->rlim_max);
-+ r = asprintf(&s, "infinity:" RLIM_FMT, (uintmax_t)rl->rlim_max);
- else if (rl->rlim_max >= RLIM_INFINITY)
-- r = asprintf(&s, RLIM_FMT ":infinity", rl->rlim_cur);
-+ r = asprintf(&s, RLIM_FMT ":infinity", (uintmax_t)rl->rlim_cur);
- else if (rl->rlim_cur == rl->rlim_max)
-- r = asprintf(&s, RLIM_FMT, rl->rlim_cur);
-+ r = asprintf(&s, RLIM_FMT, (uintmax_t)rl->rlim_cur);
- else
-- r = asprintf(&s, RLIM_FMT ":" RLIM_FMT, rl->rlim_cur, rl->rlim_max);
-+ r = asprintf(&s, RLIM_FMT ":" RLIM_FMT, (uintmax_t)rl->rlim_cur, (uintmax_t)rl->rlim_max);
- if (r < 0)
- return -ENOMEM;
-
-@@ -403,7 +403,7 @@ int rlimit_nofile_safe(void) {
-
- rl.rlim_cur = FD_SETSIZE;
- if (setrlimit(RLIMIT_NOFILE, &rl) < 0)
-- return log_debug_errno(errno, "Failed to lower RLIMIT_NOFILE's soft limit to " RLIM_FMT ": %m", rl.rlim_cur);
-+ return log_debug_errno(errno, "Failed to lower RLIMIT_NOFILE's soft limit to " RLIM_FMT ": %m", (uintmax_t)rl.rlim_cur);
-
- return 1;
- }
-diff --git a/src/core/execute.c b/src/core/execute.c
-index fccfb9268c..90f00e10a5 100644
---- a/src/core/execute.c
-+++ b/src/core/execute.c
-@@ -5633,9 +5633,9 @@ void exec_context_dump(const ExecContext *c, FILE* f, const char *prefix) {
- for (unsigned i = 0; i < RLIM_NLIMITS; i++)
- if (c->rlimit[i]) {
- fprintf(f, "%sLimit%s: " RLIM_FMT "\n",
-- prefix, rlimit_to_string(i), c->rlimit[i]->rlim_max);
-+ prefix, rlimit_to_string(i), (uintmax_t)c->rlimit[i]->rlim_max);
- fprintf(f, "%sLimit%sSoft: " RLIM_FMT "\n",
-- prefix, rlimit_to_string(i), c->rlimit[i]->rlim_cur);
-+ prefix, rlimit_to_string(i), (uintmax_t)c->rlimit[i]->rlim_cur);
- }
-
- if (c->ioprio_set) {
diff --git a/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch b/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch
new file mode 100644
index 0000000000..cf59ac7d06
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch
@@ -0,0 +1,76 @@
+From 2adbe9773cd65c48eec9df96868d4a738927c8d9 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Tue, 10 Jul 2018 15:40:17 +0800
+Subject: [PATCH 10/22] distinguish XSI-compliant strerror_r from GNU-specifi
+ strerror_r
+
+XSI-compliant strerror_r and GNU-specifi strerror_r are different.
+
+ int strerror_r(int errnum, char *buf, size_t buflen);
+ /* XSI-compliant */
+
+ char *strerror_r(int errnum, char *buf, size_t buflen);
+ /* GNU-specific */
+
+We need to distinguish between them. Otherwise, we'll get an int value
+assigned to (char *) variable, resulting in segment fault.
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/libsystemd/sd-bus/bus-error.c | 11 ++++++++++-
+ src/libsystemd/sd-journal/journal-send.c | 5 +++++
+ 2 files changed, 15 insertions(+), 1 deletion(-)
+
+diff --git a/src/libsystemd/sd-bus/bus-error.c b/src/libsystemd/sd-bus/bus-error.c
+index 77b2e1a0fd..fdba0e0142 100644
+--- a/src/libsystemd/sd-bus/bus-error.c
++++ b/src/libsystemd/sd-bus/bus-error.c
+@@ -408,7 +408,12 @@ static void bus_error_strerror(sd_bus_error *e, int error) {
+ return;
+
+ errno = 0;
++#ifndef __GLIBC__
++ strerror_r(error, m, k);
++ x = m;
++#else
+ x = strerror_r(error, m, k);
++#endif
+ if (errno == ERANGE || strlen(x) >= k - 1) {
+ free(m);
+ k *= 2;
+@@ -593,8 +598,12 @@ const char* _bus_error_message(const sd_bus_error *e, int error, char buf[static
+
+ if (e && e->message)
+ return e->message;
+-
++#ifndef __GLIBC__
++ strerror_r(abs(error), buf, ERRNO_BUF_LEN);
++ return buf;
++#else
+ return strerror_r(abs(error), buf, ERRNO_BUF_LEN);
++#endif
+ }
+
+ static bool map_ok(const sd_bus_error_map *map) {
+diff --git a/src/libsystemd/sd-journal/journal-send.c b/src/libsystemd/sd-journal/journal-send.c
+index 69a2eb6404..1561859650 100644
+--- a/src/libsystemd/sd-journal/journal-send.c
++++ b/src/libsystemd/sd-journal/journal-send.c
+@@ -361,7 +361,12 @@ static int fill_iovec_perror_and_send(const char *message, int skip, struct iove
+ char* j;
+
+ errno = 0;
++#ifndef __GLIBC__
++ strerror_r(_saved_errno_, buffer + 8 + k, n - 8 - k);
++ j = buffer + 8 + k;
++#else
+ j = strerror_r(_saved_errno_, buffer + 8 + k, n - 8 - k);
++#endif
+ if (errno == 0) {
+ char error[STRLEN("ERRNO=") + DECIMAL_STR_MAX(int) + 1];
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch b/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch
new file mode 100644
index 0000000000..e481b2e2e4
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch
@@ -0,0 +1,32 @@
+From 49c446cfb78cf74a909bed8c3798b77a5469866a Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Mon, 25 Feb 2019 15:44:54 +0800
+Subject: [PATCH 11/22] avoid redefinition of prctl_mm_map structure
+
+Fix the following compile failure:
+error: redefinition of 'struct prctl_mm_map'
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/basic/missing_prctl.h | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/src/basic/missing_prctl.h b/src/basic/missing_prctl.h
+index 7d9e395c92..88c2d7dfac 100644
+--- a/src/basic/missing_prctl.h
++++ b/src/basic/missing_prctl.h
+@@ -1,7 +1,9 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+ #pragma once
+
++#ifdef __GLIBC__
+ #include <linux/prctl.h>
++#endif
+
+ /* 58319057b7847667f0c9585b9de0e8932b0fdb08 (4.3) */
+ #ifndef PR_CAP_AMBIENT
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch b/meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch
deleted file mode 100644
index 7e4587cc23..0000000000
--- a/meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From e8025c8eefdf1be4bba34c48f3430838f3859c52 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Wed, 28 Feb 2018 21:25:22 -0800
-Subject: [PATCH] test-sizeof.c: Disable tests for missing typedefs in musl
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/test/test-sizeof.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/src/test/test-sizeof.c b/src/test/test-sizeof.c
-index f349852553..602772227e 100644
---- a/src/test/test-sizeof.c
-+++ b/src/test/test-sizeof.c
-@@ -55,8 +55,10 @@ int main(void) {
- info(unsigned);
- info(long unsigned);
- info(long long unsigned);
-+#ifdef __GLIBC__
- info(__syscall_ulong_t);
- info(__syscall_slong_t);
-+#endif
- info(intmax_t);
- info(uintmax_t);
-
-@@ -76,7 +78,9 @@ int main(void) {
- info(ssize_t);
- info(time_t);
- info(usec_t);
-+#ifdef __GLIBC__
- info(__time_t);
-+#endif
- info(pid_t);
- info(uid_t);
- info(gid_t);
diff --git a/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch b/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch
new file mode 100644
index 0000000000..66be79077e
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch
@@ -0,0 +1,562 @@
+From e4885a8e60f883d9217e26e1db3754c2906aca31 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Fri, 1 Mar 2019 15:22:15 +0800
+Subject: [PATCH 12/22] do not disable buffer in writing files
+
+Do not disable buffer in writing files, otherwise we get
+failure at boot for musl like below.
+
+ [!!!!!!] Failed to allocate manager object.
+
+And there will be other failures, critical or not critical.
+This is specific to musl.
+
+Upstream-Status: Inappropriate [musl]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[Rebased for v242]
+Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
+[rebased for systemd 243]
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+[rebased for systemd 254]
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+[rebased for systemd 255.1]
+---
+ src/basic/cgroup-util.c | 12 ++++++------
+ src/basic/namespace-util.c | 4 ++--
+ src/basic/procfs-util.c | 4 ++--
+ src/basic/sysctl-util.c | 2 +-
+ src/binfmt/binfmt.c | 6 +++---
+ src/core/cgroup.c | 2 +-
+ src/core/main.c | 2 +-
+ src/core/smack-setup.c | 8 ++++----
+ src/home/homework.c | 2 +-
+ src/libsystemd/sd-device/sd-device.c | 2 +-
+ src/nspawn/nspawn-cgroup.c | 2 +-
+ src/nspawn/nspawn.c | 6 +++---
+ src/shared/binfmt-util.c | 2 +-
+ src/shared/cgroup-setup.c | 4 ++--
+ src/shared/coredump-util.c | 4 ++--
+ src/shared/hibernate-util.c | 4 ++--
+ src/shared/smack-util.c | 2 +-
+ src/shared/watchdog.c | 2 +-
+ src/sleep/sleep.c | 4 ++--
+ src/storagetm/storagetm.c | 24 ++++++++++++------------
+ src/udev/udev-rules.c | 1 -
+ src/vconsole/vconsole-setup.c | 2 +-
+ 22 files changed, 50 insertions(+), 51 deletions(-)
+
+diff --git a/src/basic/cgroup-util.c b/src/basic/cgroup-util.c
+index d2be79622f..e65fecb68d 100644
+--- a/src/basic/cgroup-util.c
++++ b/src/basic/cgroup-util.c
+@@ -417,7 +417,7 @@ int cg_kill_kernel_sigkill(const char *path) {
+ if (r < 0)
+ return r;
+
+- r = write_string_file(killfile, "1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(killfile, "1", 0);
+ if (r < 0)
+ return r;
+
+@@ -843,7 +843,7 @@ int cg_install_release_agent(const char *controller, const char *agent) {
+
+ sc = strstrip(contents);
+ if (isempty(sc)) {
+- r = write_string_file(fs, agent, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(fs, agent, 0);
+ if (r < 0)
+ return r;
+ } else if (!path_equal(sc, agent))
+@@ -861,7 +861,7 @@ int cg_install_release_agent(const char *controller, const char *agent) {
+
+ sc = strstrip(contents);
+ if (streq(sc, "0")) {
+- r = write_string_file(fs, "1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(fs, "1", 0);
+ if (r < 0)
+ return r;
+
+@@ -888,7 +888,7 @@ int cg_uninstall_release_agent(const char *controller) {
+ if (r < 0)
+ return r;
+
+- r = write_string_file(fs, "0", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(fs, "0", 0);
+ if (r < 0)
+ return r;
+
+@@ -898,7 +898,7 @@ int cg_uninstall_release_agent(const char *controller) {
+ if (r < 0)
+ return r;
+
+- r = write_string_file(fs, "", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(fs, "", 0);
+ if (r < 0)
+ return r;
+
+@@ -1814,7 +1814,7 @@ int cg_set_attribute(const char *controller, const char *path, const char *attri
+ if (r < 0)
+ return r;
+
+- return write_string_file(p, value, WRITE_STRING_FILE_DISABLE_BUFFER);
++ return write_string_file(p, value, 0);
+ }
+
+ int cg_get_attribute(const char *controller, const char *path, const char *attribute, char **ret) {
+diff --git a/src/basic/namespace-util.c b/src/basic/namespace-util.c
+index 2101f617ad..63817bae17 100644
+--- a/src/basic/namespace-util.c
++++ b/src/basic/namespace-util.c
+@@ -227,12 +227,12 @@ int userns_acquire(const char *uid_map, const char *gid_map) {
+ freeze();
+
+ xsprintf(path, "/proc/" PID_FMT "/uid_map", pid);
+- r = write_string_file(path, uid_map, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(path, uid_map, 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write UID map: %m");
+
+ xsprintf(path, "/proc/" PID_FMT "/gid_map", pid);
+- r = write_string_file(path, gid_map, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(path, gid_map, 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write GID map: %m");
+
+diff --git a/src/basic/procfs-util.c b/src/basic/procfs-util.c
+index 6cb0ddf575..247cf9e1d1 100644
+--- a/src/basic/procfs-util.c
++++ b/src/basic/procfs-util.c
+@@ -64,13 +64,13 @@ int procfs_tasks_set_limit(uint64_t limit) {
+ * decrease it, as threads-max is the much more relevant sysctl. */
+ if (limit > pid_max-1) {
+ sprintf(buffer, "%" PRIu64, limit+1); /* Add one, since PID 0 is not a valid PID */
+- r = write_string_file("/proc/sys/kernel/pid_max", buffer, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/kernel/pid_max", buffer, 0);
+ if (r < 0)
+ return r;
+ }
+
+ sprintf(buffer, "%" PRIu64, limit);
+- r = write_string_file("/proc/sys/kernel/threads-max", buffer, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/kernel/threads-max", buffer, 0);
+ if (r < 0) {
+ uint64_t threads_max;
+
+diff --git a/src/basic/sysctl-util.c b/src/basic/sysctl-util.c
+index b66a6622ae..8d1c93008a 100644
+--- a/src/basic/sysctl-util.c
++++ b/src/basic/sysctl-util.c
+@@ -58,7 +58,7 @@ int sysctl_write(const char *property, const char *value) {
+
+ log_debug("Setting '%s' to '%s'", p, value);
+
+- return write_string_file(p, value, WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_SUPPRESS_REDUNDANT_VIRTUAL);
++ return write_string_file(p, value, WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_SUPPRESS_REDUNDANT_VIRTUAL);
+ }
+
+ int sysctl_writef(const char *property, const char *format, ...) {
+diff --git a/src/binfmt/binfmt.c b/src/binfmt/binfmt.c
+index d21f3f79ff..258607cc7e 100644
+--- a/src/binfmt/binfmt.c
++++ b/src/binfmt/binfmt.c
+@@ -30,7 +30,7 @@ static bool arg_unregister = false;
+
+ static int delete_rule(const char *rulename) {
+ const char *fn = strjoina("/proc/sys/fs/binfmt_misc/", rulename);
+- return write_string_file(fn, "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ return write_string_file(fn, "-1", 0);
+ }
+
+ static int apply_rule(const char *filename, unsigned line, const char *rule) {
+@@ -58,7 +58,7 @@ static int apply_rule(const char *filename, unsigned line, const char *rule) {
+ if (r >= 0)
+ log_debug("%s:%u: Rule '%s' deleted.", filename, line, rulename);
+
+- r = write_string_file("/proc/sys/fs/binfmt_misc/register", rule, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/fs/binfmt_misc/register", rule, 0);
+ if (r < 0)
+ return log_error_errno(r, "%s:%u: Failed to add binary format '%s': %m",
+ filename, line, rulename);
+@@ -248,7 +248,7 @@ static int run(int argc, char *argv[]) {
+ return r;
+
+ /* Flush out all rules */
+- r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to flush binfmt_misc rules, ignoring: %m");
+ else
+diff --git a/src/core/cgroup.c b/src/core/cgroup.c
+index 61ac4df1a6..ea18970196 100644
+--- a/src/core/cgroup.c
++++ b/src/core/cgroup.c
+@@ -4578,7 +4578,7 @@ int unit_cgroup_freezer_action(Unit *u, FreezerAction action) {
+ u->freezer_state = FREEZER_THAWING;
+ }
+
+- r = write_string_file(path, one_zero(action == FREEZER_FREEZE), WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(path, one_zero(action == FREEZER_FREEZE), 0);
+ if (r < 0)
+ return r;
+
+diff --git a/src/core/main.c b/src/core/main.c
+index 3f71cc0947..0e5aec3e9e 100644
+--- a/src/core/main.c
++++ b/src/core/main.c
+@@ -1678,7 +1678,7 @@ static void initialize_core_pattern(bool skip_setup) {
+ if (getpid_cached() != 1)
+ return;
+
+- r = write_string_file("/proc/sys/kernel/core_pattern", arg_early_core_pattern, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/kernel/core_pattern", arg_early_core_pattern, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to write '%s' to /proc/sys/kernel/core_pattern, ignoring: %m",
+ arg_early_core_pattern);
+diff --git a/src/core/smack-setup.c b/src/core/smack-setup.c
+index 7ea902b6f9..1aef2988d0 100644
+--- a/src/core/smack-setup.c
++++ b/src/core/smack-setup.c
+@@ -321,17 +321,17 @@ int mac_smack_setup(bool *loaded_policy) {
+ }
+
+ #if HAVE_SMACK_RUN_LABEL
+- r = write_string_file("/proc/self/attr/current", SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/self/attr/current", SMACK_RUN_LABEL, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set SMACK label \"" SMACK_RUN_LABEL "\" on self: %m");
+- r = write_string_file("/sys/fs/smackfs/ambient", SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/sys/fs/smackfs/ambient", SMACK_RUN_LABEL, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set SMACK ambient label \"" SMACK_RUN_LABEL "\": %m");
+ r = write_string_file("/sys/fs/smackfs/netlabel",
+- "0.0.0.0/0 " SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
++ "0.0.0.0/0 " SMACK_RUN_LABEL, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set SMACK netlabel rule \"0.0.0.0/0 " SMACK_RUN_LABEL "\": %m");
+- r = write_string_file("/sys/fs/smackfs/netlabel", "127.0.0.1 -CIPSO", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/sys/fs/smackfs/netlabel", "127.0.0.1 -CIPSO", 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set SMACK netlabel rule \"127.0.0.1 -CIPSO\": %m");
+ #endif
+diff --git a/src/home/homework.c b/src/home/homework.c
+index 066483e342..5f92dd7064 100644
+--- a/src/home/homework.c
++++ b/src/home/homework.c
+@@ -278,7 +278,7 @@ static void drop_caches_now(void) {
+ * for details. We write "2" into /proc/sys/vm/drop_caches to ensure dentries/inodes are flushed, but
+ * not more. */
+
+- r = write_string_file("/proc/sys/vm/drop_caches", "2\n", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/vm/drop_caches", "2\n", 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to drop caches, ignoring: %m");
+ else
+diff --git a/src/libsystemd/sd-device/sd-device.c b/src/libsystemd/sd-device/sd-device.c
+index 2fbc619a34..09d9591e37 100644
+--- a/src/libsystemd/sd-device/sd-device.c
++++ b/src/libsystemd/sd-device/sd-device.c
+@@ -2516,7 +2516,7 @@ _public_ int sd_device_set_sysattr_value(sd_device *device, const char *sysattr,
+ if (!value)
+ return -ENOMEM;
+
+- r = write_string_file(path, value, WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_NOFOLLOW);
++ r = write_string_file(path, value, 0 | WRITE_STRING_FILE_NOFOLLOW);
+ if (r < 0) {
+ /* On failure, clear cache entry, as we do not know how it fails. */
+ device_remove_cached_sysattr_value(device, sysattr);
+diff --git a/src/nspawn/nspawn-cgroup.c b/src/nspawn/nspawn-cgroup.c
+index a5002437c6..b12e6cd9c9 100644
+--- a/src/nspawn/nspawn-cgroup.c
++++ b/src/nspawn/nspawn-cgroup.c
+@@ -124,7 +124,7 @@ int sync_cgroup(pid_t pid, CGroupUnified unified_requested, uid_t uid_shift) {
+ fn = strjoina(tree, cgroup, "/cgroup.procs");
+
+ sprintf(pid_string, PID_FMT, pid);
+- r = write_string_file(fn, pid_string, WRITE_STRING_FILE_DISABLE_BUFFER|WRITE_STRING_FILE_MKDIR_0755);
++ r = write_string_file(fn, pid_string, WRITE_STRING_FILE_MKDIR_0755);
+ if (r < 0) {
+ log_error_errno(r, "Failed to move process: %m");
+ goto finish;
+diff --git a/src/nspawn/nspawn.c b/src/nspawn/nspawn.c
+index 6ab604d3dc..bbec6b686c 100644
+--- a/src/nspawn/nspawn.c
++++ b/src/nspawn/nspawn.c
+@@ -2688,7 +2688,7 @@ static int reset_audit_loginuid(void) {
+ if (streq(p, "4294967295"))
+ return 0;
+
+- r = write_string_file("/proc/self/loginuid", "4294967295", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/self/loginuid", "4294967295", 0);
+ if (r < 0) {
+ log_error_errno(r,
+ "Failed to reset audit login UID. This probably means that your kernel is too\n"
+@@ -4141,7 +4141,7 @@ static int setup_uid_map(
+ return log_oom();
+
+ xsprintf(uid_map, "/proc/" PID_FMT "/uid_map", pid);
+- r = write_string_file(uid_map, s, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(uid_map, s, 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write UID map: %m");
+
+@@ -4151,7 +4151,7 @@ static int setup_uid_map(
+ return log_oom();
+
+ xsprintf(uid_map, "/proc/" PID_FMT "/gid_map", pid);
+- r = write_string_file(uid_map, s, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(uid_map, s, 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write GID map: %m");
+
+diff --git a/src/shared/binfmt-util.c b/src/shared/binfmt-util.c
+index a26175474b..1413a9c72c 100644
+--- a/src/shared/binfmt-util.c
++++ b/src/shared/binfmt-util.c
+@@ -46,7 +46,7 @@ int disable_binfmt(void) {
+ return 0;
+ }
+
+- r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", 0);
+ if (r < 0)
+ return log_warning_errno(r, "Failed to unregister binfmt_misc entries: %m");
+
+diff --git a/src/shared/cgroup-setup.c b/src/shared/cgroup-setup.c
+index 934a16eaf3..c921ced861 100644
+--- a/src/shared/cgroup-setup.c
++++ b/src/shared/cgroup-setup.c
+@@ -351,7 +351,7 @@ int cg_attach(const char *controller, const char *path, pid_t pid) {
+
+ xsprintf(c, PID_FMT "\n", pid);
+
+- r = write_string_file(fs, c, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(fs, c, 0);
+ if (r == -EOPNOTSUPP && cg_is_threaded(path) > 0)
+ /* When the threaded mode is used, we cannot read/write the file. Let's return recognizable error. */
+ return -EUCLEAN;
+@@ -966,7 +966,7 @@ int cg_enable_everywhere(
+ return log_debug_errno(errno, "Failed to open cgroup.subtree_control file of %s: %m", p);
+ }
+
+- r = write_string_stream(f, s, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_stream(f, s, 0);
+ if (r < 0) {
+ log_debug_errno(r, "Failed to %s controller %s for %s (%s): %m",
+ FLAGS_SET(mask, bit) ? "enable" : "disable", n, p, fs);
+diff --git a/src/shared/coredump-util.c b/src/shared/coredump-util.c
+index 805503f366..01a7ccb291 100644
+--- a/src/shared/coredump-util.c
++++ b/src/shared/coredump-util.c
+@@ -163,7 +163,7 @@ int set_coredump_filter(uint64_t value) {
+ xsprintf(t, "0x%"PRIx64, value);
+
+ return write_string_file("/proc/self/coredump_filter", t,
+- WRITE_STRING_FILE_VERIFY_ON_FAILURE|WRITE_STRING_FILE_DISABLE_BUFFER);
++ 0);
+ }
+
+ /* Turn off core dumps but only if we're running outside of a container. */
+@@ -173,7 +173,7 @@ void disable_coredumps(void) {
+ if (detect_container() > 0)
+ return;
+
+- r = write_string_file("/proc/sys/kernel/core_pattern", "|/bin/false", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/proc/sys/kernel/core_pattern", "|/bin/false", 0);
+ if (r < 0)
+ log_debug_errno(r, "Failed to turn off coredumps, ignoring: %m");
+ }
+diff --git a/src/shared/hibernate-util.c b/src/shared/hibernate-util.c
+index 3eb13d48f6..d09b901be1 100644
+--- a/src/shared/hibernate-util.c
++++ b/src/shared/hibernate-util.c
+@@ -481,7 +481,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) {
+
+ /* We write the offset first since it's safer. Note that this file is only available in 4.17+, so
+ * fail gracefully if it doesn't exist and we're only overwriting it with 0. */
+- r = write_string_file("/sys/power/resume_offset", offset_str, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/sys/power/resume_offset", offset_str, 0);
+ if (r == -ENOENT) {
+ if (offset != 0)
+ return log_error_errno(SYNTHETIC_ERRNO(EOPNOTSUPP),
+@@ -497,7 +497,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) {
+ log_debug("Wrote resume_offset=%s for device '%s' to /sys/power/resume_offset.",
+ offset_str, device);
+
+- r = write_string_file("/sys/power/resume", devno_str, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/sys/power/resume", devno_str, 0);
+ if (r < 0)
+ return log_error_errno(r,
+ "Failed to write device '%s' (%s) to /sys/power/resume: %m",
+diff --git a/src/shared/smack-util.c b/src/shared/smack-util.c
+index 1f88e724d0..feb18b320a 100644
+--- a/src/shared/smack-util.c
++++ b/src/shared/smack-util.c
+@@ -113,7 +113,7 @@ int mac_smack_apply_pid(pid_t pid, const char *label) {
+ return 0;
+
+ p = procfs_file_alloca(pid, "attr/current");
+- r = write_string_file(p, label, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file(p, label, 0);
+ if (r < 0)
+ return r;
+
+diff --git a/src/shared/watchdog.c b/src/shared/watchdog.c
+index 4c1a968718..6faf6806a5 100644
+--- a/src/shared/watchdog.c
++++ b/src/shared/watchdog.c
+@@ -93,7 +93,7 @@ static int set_pretimeout_governor(const char *governor) {
+
+ r = write_string_file(sys_fn,
+ governor,
+- WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE);
++ WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set pretimeout_governor to '%s': %m", governor);
+
+diff --git a/src/sleep/sleep.c b/src/sleep/sleep.c
+index 21af3e9e52..6d4b84b5d5 100644
+--- a/src/sleep/sleep.c
++++ b/src/sleep/sleep.c
+@@ -137,7 +137,7 @@ static int write_state(int fd, char * const *states) {
+ if (k < 0)
+ return RET_GATHER(r, k);
+
+- k = write_string_stream(f, *state, WRITE_STRING_FILE_DISABLE_BUFFER);
++ k = write_string_stream(f, *state, 0);
+ if (k >= 0) {
+ log_debug("Using sleep state '%s'.", *state);
+ return 0;
+@@ -155,7 +155,7 @@ static int write_mode(char * const *modes) {
+ STRV_FOREACH(mode, modes) {
+ int k;
+
+- k = write_string_file("/sys/power/disk", *mode, WRITE_STRING_FILE_DISABLE_BUFFER);
++ k = write_string_file("/sys/power/disk", *mode, 0);
+ if (k >= 0) {
+ log_debug("Using sleep disk mode '%s'.", *mode);
+ return 0;
+diff --git a/src/storagetm/storagetm.c b/src/storagetm/storagetm.c
+index ae63baaf79..82eeca479a 100644
+--- a/src/storagetm/storagetm.c
++++ b/src/storagetm/storagetm.c
+@@ -186,7 +186,7 @@ static int nvme_subsystem_unlink(NvmeSubsystem *s) {
+ if (!enable_fn)
+ return log_oom();
+
+- r = write_string_file_at(namespaces_fd, enable_fn, "0", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(namespaces_fd, enable_fn, "0", 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to disable namespace '%s' of NVME subsystem '%s', ignoring: %m", e->d_name, s->name);
+
+@@ -254,7 +254,7 @@ static int nvme_subsystem_write_metadata(int subsystem_fd, sd_device *device) {
+ _cleanup_free_ char *truncated = strndup(w, 40); /* kernel refuses more than 40 chars (as per nvme spec) */
+
+ /* The default string stored in 'attr_model' is "Linux" btw. */
+- r = write_string_file_at(subsystem_fd, "attr_model", truncated, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(subsystem_fd, "attr_model", truncated, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set model of subsystem to '%s', ignoring: %m", w);
+ }
+@@ -268,7 +268,7 @@ static int nvme_subsystem_write_metadata(int subsystem_fd, sd_device *device) {
+ return log_oom();
+
+ /* The default string stored in 'attr_firmware' is `uname -r` btw, but truncated to 8 chars. */
+- r = write_string_file_at(subsystem_fd, "attr_firmware", truncated, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(subsystem_fd, "attr_firmware", truncated, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set model of subsystem to '%s', ignoring: %m", truncated);
+ }
+@@ -295,7 +295,7 @@ static int nvme_subsystem_write_metadata(int subsystem_fd, sd_device *device) {
+ if (!truncated)
+ return log_oom();
+
+- r = write_string_file_at(subsystem_fd, "attr_serial", truncated, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(subsystem_fd, "attr_serial", truncated, 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set serial of subsystem to '%s', ignoring: %m", truncated);
+ }
+@@ -345,7 +345,7 @@ static int nvme_namespace_write_metadata(int namespace_fd, sd_device *device, co
+ id = id128_digest(j, l);
+ }
+
+- r = write_string_file_at(namespace_fd, "device_uuid", SD_ID128_TO_UUID_STRING(id), WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(namespace_fd, "device_uuid", SD_ID128_TO_UUID_STRING(id), 0);
+ if (r < 0)
+ log_warning_errno(r, "Failed to set uuid of namespace to '%s', ignoring: %m", SD_ID128_TO_UUID_STRING(id));
+
+@@ -408,7 +408,7 @@ static int nvme_subsystem_add(const char *node, int consumed_fd, sd_device *devi
+ if (subsystem_fd < 0)
+ return log_error_errno(subsystem_fd, "Failed to create NVME subsystem '%s': %m", j);
+
+- r = write_string_file_at(subsystem_fd, "attr_allow_any_host", "1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(subsystem_fd, "attr_allow_any_host", "1", 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set 'attr_allow_any_host' flag: %m");
+
+@@ -423,11 +423,11 @@ static int nvme_subsystem_add(const char *node, int consumed_fd, sd_device *devi
+
+ /* We use /proc/$PID/fd/$FD rather than /proc/self/fd/$FD, because this string is visible to others
+ * via configfs, and by including the PID it's clear to who the stuff belongs. */
+- r = write_string_file_at(namespace_fd, "device_path", FORMAT_PROC_PID_FD_PATH(0, fd), WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(namespace_fd, "device_path", FORMAT_PROC_PID_FD_PATH(0, fd), 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write 'device_path' attribute: %m");
+
+- r = write_string_file_at(namespace_fd, "enable", "1", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(namespace_fd, "enable", "1", 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to write 'enable' attribute: %m");
+
+@@ -557,19 +557,19 @@ static int nvme_port_add_portnr(
+ return 0;
+ }
+
+- r = write_string_file_at(port_fd, "addr_adrfam", af_to_ipv4_ipv6(ip_family), WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(port_fd, "addr_adrfam", af_to_ipv4_ipv6(ip_family), 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set address family on NVME port %" PRIu16 ": %m", portnr);
+
+- r = write_string_file_at(port_fd, "addr_trtype", "tcp", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(port_fd, "addr_trtype", "tcp", 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set transport type on NVME port %" PRIu16 ": %m", portnr);
+
+- r = write_string_file_at(port_fd, "addr_trsvcid", fname, WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(port_fd, "addr_trsvcid", fname, 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set IP port on NVME port %" PRIu16 ": %m", portnr);
+
+- r = write_string_file_at(port_fd, "addr_traddr", ip_family == AF_INET6 ? "::" : "0.0.0.0", WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file_at(port_fd, "addr_traddr", ip_family == AF_INET6 ? "::" : "0.0.0.0", 0);
+ if (r < 0)
+ return log_error_errno(r, "Failed to set IP address on NVME port %" PRIu16 ": %m", portnr);
+
+diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c
+index febe345b4c..a90b610ba1 100644
+--- a/src/udev/udev-rules.c
++++ b/src/udev/udev-rules.c
+@@ -2711,7 +2711,6 @@ static int udev_rule_apply_token_to_event(
+ log_event_debug(dev, token, "ATTR '%s' writing '%s'", buf, value);
+ r = write_string_file(buf, value,
+ WRITE_STRING_FILE_VERIFY_ON_FAILURE |
+- WRITE_STRING_FILE_DISABLE_BUFFER |
+ WRITE_STRING_FILE_AVOID_NEWLINE |
+ WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE);
+ if (r < 0)
+diff --git a/src/vconsole/vconsole-setup.c b/src/vconsole/vconsole-setup.c
+index 4d82c65f0a..3a3d861b83 100644
+--- a/src/vconsole/vconsole-setup.c
++++ b/src/vconsole/vconsole-setup.c
+@@ -261,7 +261,7 @@ static int toggle_utf8_vc(const char *name, int fd, bool utf8) {
+ static int toggle_utf8_sysfs(bool utf8) {
+ int r;
+
+- r = write_string_file("/sys/module/vt/parameters/default_utf8", one_zero(utf8), WRITE_STRING_FILE_DISABLE_BUFFER);
++ r = write_string_file("/sys/module/vt/parameters/default_utf8", one_zero(utf8), 0);
+ if (r < 0)
+ return log_warning_errno(r, "Failed to %s sysfs UTF-8 flag: %m", enable_disable(utf8));
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch b/meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch
deleted file mode 100644
index 6eecd3197c..0000000000
--- a/meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch
+++ /dev/null
@@ -1,97 +0,0 @@
-From 46fdc959257d60d9b32953cae0152ae118f8564b Mon Sep 17 00:00:00 2001
-From: Andre McCurdy <armccurdy@gmail.com>
-Date: Tue, 10 Oct 2017 14:33:30 -0700
-Subject: [PATCH] don't pass AT_SYMLINK_NOFOLLOW flag to faccessat()
-
-Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right
-thing to do and it's not portable (not supported by musl). See:
-
- http://lists.landley.net/pipermail/toybox-landley.net/2014-September/003610.html
- http://www.openwall.com/lists/musl/2015/02/05/2
-
-Note that laccess() is never passing AT_EACCESS so a lot of the
-discussion in the links above doesn't apply. Note also that
-(currently) all systemd callers of laccess() pass mode as F_OK, so
-only check for existence of a file, not access permissions.
-Therefore, in this case, the only distiction between faccessat()
-with (flag == 0) and (flag == AT_SYMLINK_NOFOLLOW) is the behaviour
-for broken symlinks; laccess() on a broken symlink will succeed with
-(flag == AT_SYMLINK_NOFOLLOW) and fail (flag == 0).
-
-The laccess() macros was added to systemd some time ago and it's not
-clear if or why it needs to return success for broken symlinks. Maybe
-just historical and not actually necessary or desired behaviour?
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
-
----
- src/basic/fs-util.h | 21 ++++++++++++++++++++-
- src/shared/base-filesystem.c | 6 +++---
- 2 files changed, 23 insertions(+), 4 deletions(-)
-
-diff --git a/src/basic/fs-util.h b/src/basic/fs-util.h
-index 0bbb3f6298..3dc494dbfb 100644
---- a/src/basic/fs-util.h
-+++ b/src/basic/fs-util.h
-@@ -46,8 +46,27 @@ int futimens_opath(int fd, const struct timespec ts[2]);
- int fd_warn_permissions(const char *path, int fd);
- int stat_warn_permissions(const char *path, const struct stat *st);
-
-+/*
-+ Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right thing to
-+ do and it's not portable (not supported by musl). See:
-+
-+ http://lists.landley.net/pipermail/toybox-landley.net/2014-September/003610.html
-+ http://www.openwall.com/lists/musl/2015/02/05/2
-+
-+ Note that laccess() is never passing AT_EACCESS so a lot of the discussion in
-+ the links above doesn't apply. Note also that (currently) all systemd callers
-+ of laccess() pass mode as F_OK, so only check for existence of a file, not
-+ access permissions. Therefore, in this case, the only distiction between
-+ faccessat() with (flag == 0) and (flag == AT_SYMLINK_NOFOLLOW) is the
-+ behaviour for broken symlinks; laccess() on a broken symlink will succeed
-+ with (flag == AT_SYMLINK_NOFOLLOW) and fail (flag == 0).
-+
-+ The laccess() macros was added to systemd some time ago and it's not clear if
-+ or why it needs to return success for broken symlinks. Maybe just historical
-+ and not actually necessary or desired behaviour?
-+*/
- #define laccess(path, mode) \
-- RET_NERRNO(faccessat(AT_FDCWD, (path), (mode), AT_SYMLINK_NOFOLLOW))
-+ RET_NERRNO(faccessat(AT_FDCWD, (path), (mode), 0))
-
- int touch_file(const char *path, bool parents, usec_t stamp, uid_t uid, gid_t gid, mode_t mode);
- int touch(const char *path);
-diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c
-index 5f5328c8cf..d396bc99fe 100644
---- a/src/shared/base-filesystem.c
-+++ b/src/shared/base-filesystem.c
-@@ -117,7 +117,7 @@ int base_filesystem_create(const char *root, uid_t uid, gid_t gid) {
- return log_error_errno(errno, "Failed to open root file system: %m");
-
- for (size_t i = 0; i < ELEMENTSOF(table); i++) {
-- if (faccessat(fd, table[i].dir, F_OK, AT_SYMLINK_NOFOLLOW) >= 0)
-+ if (faccessat(fd, table[i].dir, F_OK, 0) >= 0)
- continue;
-
- if (table[i].target) {
-@@ -125,7 +125,7 @@ int base_filesystem_create(const char *root, uid_t uid, gid_t gid) {
-
- /* check if one of the targets exists */
- NULSTR_FOREACH(s, table[i].target) {
-- if (faccessat(fd, s, F_OK, AT_SYMLINK_NOFOLLOW) < 0)
-+ if (faccessat(fd, s, F_OK, 0) < 0)
- continue;
-
- /* check if a specific file exists at the target path */
-@@ -136,7 +136,7 @@ int base_filesystem_create(const char *root, uid_t uid, gid_t gid) {
- if (!p)
- return log_oom();
-
-- if (faccessat(fd, p, F_OK, AT_SYMLINK_NOFOLLOW) < 0)
-+ if (faccessat(fd, p, F_OK, 0) < 0)
- continue;
- }
-
diff --git a/meta/recipes-core/systemd/systemd/0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch b/meta/recipes-core/systemd/systemd/0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch
deleted file mode 100644
index 7b22d6214f..0000000000
--- a/meta/recipes-core/systemd/systemd/0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From d0bdce977b7acc5e45e82cf84256c4bedc0e74c4 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 27 May 2018 08:36:44 -0700
-Subject: [PATCH] Define glibc compatible basename() for non-glibc systems
-
-Fixes builds with musl, even though systemd is adamant about
-using non-posix basename implementation, we have a way out
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/machine/machine-dbus.c | 5 +++++
- 1 file changed, 5 insertions(+)
-
-diff --git a/src/machine/machine-dbus.c b/src/machine/machine-dbus.c
-index 8f11afd65b..a2b57deb7a 100644
---- a/src/machine/machine-dbus.c
-+++ b/src/machine/machine-dbus.c
-@@ -10,6 +10,11 @@
- #include <libgen.h>
- #undef basename
-
-+#if !defined(__GLIBC__)
-+#include <string.h>
-+#define basename(src) (strrchr(src,'/') ? strrchr(src,'/')+1 : src)
-+#endif
-+
- #include "alloc-util.h"
- #include "bus-common-errors.h"
- #include "bus-get-properties.h"
diff --git a/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch b/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch
new file mode 100644
index 0000000000..43f75373a6
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch
@@ -0,0 +1,60 @@
+From 2f90f8463423cfbb7e83fcef42f1071018c3b56e Mon Sep 17 00:00:00 2001
+From: Scott Murray <scott.murray@konsulko.com>
+Date: Fri, 13 Sep 2019 19:26:27 -0400
+Subject: [PATCH 13/22] Handle __cpu_mask usage
+
+Fixes errors:
+
+src/test/test-cpu-set-util.c:18:54: error: '__cpu_mask' undeclared (first use in this function)
+src/test/test-sizeof.c:73:14: error: '__cpu_mask' undeclared (first use in this function)
+
+__cpu_mask is an internal type of glibc's cpu_set implementation, not
+part of the POSIX definition, which is problematic when building with
+musl, which does not define a matching type. From inspection of musl's
+sched.h, however, it is clear that the corresponding type would be
+unsigned long, which does match glibc's actual __CPU_MASK_TYPE. So,
+add a typedef to cpu-set-util.h defining __cpu_mask appropriately.
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Scott Murray <scott.murray@konsulko.com>
+---
+ src/shared/cpu-set-util.h | 2 ++
+ src/test/test-sizeof.c | 2 +-
+ 2 files changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/src/shared/cpu-set-util.h b/src/shared/cpu-set-util.h
+index 3c63a58826..4c2d4347fc 100644
+--- a/src/shared/cpu-set-util.h
++++ b/src/shared/cpu-set-util.h
+@@ -6,6 +6,8 @@
+ #include "macro.h"
+ #include "missing_syscall.h"
+
++typedef unsigned long __cpu_mask;
++
+ /* This wraps the libc interface with a variable to keep the allocated size. */
+ typedef struct CPUSet {
+ cpu_set_t *set;
+diff --git a/src/test/test-sizeof.c b/src/test/test-sizeof.c
+index ea0c58770e..b65c0bd370 100644
+--- a/src/test/test-sizeof.c
++++ b/src/test/test-sizeof.c
+@@ -1,6 +1,5 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <sched.h>
+ #include <stdio.h>
+ #include <string.h>
+ #include <sys/resource.h>
+@@ -12,6 +11,7 @@
+ #include <float.h>
+
+ #include "time-util.h"
++#include "cpu-set-util.h"
+
+ /* Print information about various types. Useful when diagnosing
+ * gcc diagnostics on an unfamiliar architecture. */
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch b/meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch
deleted file mode 100644
index 015347cb6a..0000000000
--- a/meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From e480d28305907c3874f4e58b722b8aa43c3ac7a2 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Wed, 4 Jul 2018 15:00:44 +0800
-Subject: [PATCH] Do not disable buffering when writing to oom_score_adj
-
-On musl, disabling buffering when writing to oom_score_adj will
-cause the following error.
-
- Failed to adjust OOM setting: Invalid argument
-
-This error appears for systemd-udevd.service and dbus.service.
-This is because kernel receives '-' instead of the whole '-900'
-if buffering is disabled.
-
-This is libc implementation specific, as glibc does not have this issue.
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- src/basic/process-util.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/basic/process-util.c b/src/basic/process-util.c
-index df6e85b1fc..635dbb5d26 100644
---- a/src/basic/process-util.c
-+++ b/src/basic/process-util.c
-@@ -1489,7 +1489,7 @@ int set_oom_score_adjust(int value) {
- xsprintf(t, "%i", value);
-
- return write_string_file("/proc/self/oom_score_adj", t,
-- WRITE_STRING_FILE_VERIFY_ON_FAILURE|WRITE_STRING_FILE_DISABLE_BUFFER);
-+ WRITE_STRING_FILE_VERIFY_ON_FAILURE);
- }
-
- int get_oom_score_adjust(int *ret) {
diff --git a/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch b/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch
new file mode 100644
index 0000000000..a751e1ba6f
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch
@@ -0,0 +1,173 @@
+From b7c827bb44edbb6251c9fcdb80aa03982c0e7bf3 Mon Sep 17 00:00:00 2001
+From: Alex Kiernan <alex.kiernan@gmail.com>
+Date: Tue, 10 Mar 2020 11:05:20 +0000
+Subject: [PATCH 14/22] Handle missing gshadow
+
+gshadow usage is now present in the userdb code. Mask all uses of it to
+allow compilation on musl
+
+Upstream-Status: Inappropriate [musl specific]
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+[Rebased for v247]
+Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
+---
+ src/shared/user-record-nss.c | 20 ++++++++++++++++++++
+ src/shared/user-record-nss.h | 4 ++++
+ src/shared/userdb.c | 7 ++++++-
+ 3 files changed, 30 insertions(+), 1 deletion(-)
+
+diff --git a/src/shared/user-record-nss.c b/src/shared/user-record-nss.c
+index 414a49331b..1a4e1b628c 100644
+--- a/src/shared/user-record-nss.c
++++ b/src/shared/user-record-nss.c
+@@ -329,8 +329,10 @@ int nss_group_to_group_record(
+ if (isempty(grp->gr_name))
+ return -EINVAL;
+
++#if ENABLE_GSHADOW
+ if (sgrp && !streq_ptr(sgrp->sg_namp, grp->gr_name))
+ return -EINVAL;
++#endif
+
+ g = group_record_new();
+ if (!g)
+@@ -346,6 +348,7 @@ int nss_group_to_group_record(
+
+ g->gid = grp->gr_gid;
+
++#if ENABLE_GSHADOW
+ if (sgrp) {
+ if (looks_like_hashed_password(utf8_only(sgrp->sg_passwd))) {
+ g->hashed_password = strv_new(sgrp->sg_passwd);
+@@ -361,6 +364,7 @@ int nss_group_to_group_record(
+ if (r < 0)
+ return r;
+ }
++#endif
+
+ r = json_build(&g->json, JSON_BUILD_OBJECT(
+ JSON_BUILD_PAIR("groupName", JSON_BUILD_STRING(g->group_name)),
+@@ -387,6 +391,7 @@ int nss_sgrp_for_group(const struct group *grp, struct sgrp *ret_sgrp, char **re
+ assert(ret_sgrp);
+ assert(ret_buffer);
+
++#if ENABLE_GSHADOW
+ for (;;) {
+ _cleanup_free_ char *buf = NULL;
+ struct sgrp sgrp, *result;
+@@ -415,6 +420,9 @@ int nss_sgrp_for_group(const struct group *grp, struct sgrp *ret_sgrp, char **re
+ buflen *= 2;
+ buf = mfree(buf);
+ }
++#else
++ return -ESRCH;
++#endif
+ }
+
+ int nss_group_record_by_name(
+@@ -426,7 +434,9 @@ int nss_group_record_by_name(
+ struct group grp, *result;
+ bool incomplete = false;
+ size_t buflen = 4096;
++#if ENABLE_GSHADOW
+ struct sgrp sgrp, *sresult = NULL;
++#endif
+ int r;
+
+ assert(name);
+@@ -455,6 +465,7 @@ int nss_group_record_by_name(
+ buf = mfree(buf);
+ }
+
++#if ENABLE_GSHADOW
+ if (with_shadow) {
+ r = nss_sgrp_for_group(result, &sgrp, &sbuf);
+ if (r < 0) {
+@@ -466,6 +477,9 @@ int nss_group_record_by_name(
+ incomplete = true;
+
+ r = nss_group_to_group_record(result, sresult, ret);
++#else
++ r = nss_group_to_group_record(result, NULL, ret);
++#endif
+ if (r < 0)
+ return r;
+
+@@ -483,7 +497,9 @@ int nss_group_record_by_gid(
+ struct group grp, *result;
+ bool incomplete = false;
+ size_t buflen = 4096;
++#if ENABLE_GSHADOW
+ struct sgrp sgrp, *sresult = NULL;
++#endif
+ int r;
+
+ for (;;) {
+@@ -509,6 +525,7 @@ int nss_group_record_by_gid(
+ buf = mfree(buf);
+ }
+
++#if ENABLE_GSHADOW
+ if (with_shadow) {
+ r = nss_sgrp_for_group(result, &sgrp, &sbuf);
+ if (r < 0) {
+@@ -520,6 +537,9 @@ int nss_group_record_by_gid(
+ incomplete = true;
+
+ r = nss_group_to_group_record(result, sresult, ret);
++#else
++ r = nss_group_to_group_record(result, NULL, ret);
++#endif
+ if (r < 0)
+ return r;
+
+diff --git a/src/shared/user-record-nss.h b/src/shared/user-record-nss.h
+index 22ab04d6ee..4e52e7a911 100644
+--- a/src/shared/user-record-nss.h
++++ b/src/shared/user-record-nss.h
+@@ -2,7 +2,11 @@
+ #pragma once
+
+ #include <grp.h>
++#if ENABLE_GSHADOW
+ #include <gshadow.h>
++#else
++struct sgrp;
++#endif
+ #include <pwd.h>
+ #include <shadow.h>
+
+diff --git a/src/shared/userdb.c b/src/shared/userdb.c
+index f60d48ace4..e878199a28 100644
+--- a/src/shared/userdb.c
++++ b/src/shared/userdb.c
+@@ -1038,13 +1038,15 @@ int groupdb_iterator_get(UserDBIterator *iterator, GroupRecord **ret) {
+ if (gr) {
+ _cleanup_free_ char *buffer = NULL;
+ bool incomplete = false;
++#if ENABLE_GSHADOW
+ struct sgrp sgrp;
+-
++#endif
+ if (streq_ptr(gr->gr_name, "root"))
+ iterator->synthesize_root = false;
+ if (gr->gr_gid == GID_NOBODY)
+ iterator->synthesize_nobody = false;
+
++#if ENABLE_GSHADOW
+ if (!FLAGS_SET(iterator->flags, USERDB_SUPPRESS_SHADOW)) {
+ r = nss_sgrp_for_group(gr, &sgrp, &buffer);
+ if (r < 0) {
+@@ -1057,6 +1059,9 @@ int groupdb_iterator_get(UserDBIterator *iterator, GroupRecord **ret) {
+ }
+
+ r = nss_group_to_group_record(gr, r >= 0 ? &sgrp : NULL, ret);
++#else
++ r = nss_group_to_group_record(gr, NULL, ret);
++#endif
+ if (r < 0)
+ return r;
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch b/meta/recipes-core/systemd/systemd/0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch
deleted file mode 100644
index c563982607..0000000000
--- a/meta/recipes-core/systemd/systemd/0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch
+++ /dev/null
@@ -1,60 +0,0 @@
-From 0542d27ebbb250c09bdcfcf9f2ea3d27426fe522 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Tue, 10 Jul 2018 15:40:17 +0800
-Subject: [PATCH] distinguish XSI-compliant strerror_r from GNU-specifi
- strerror_r
-
-XSI-compliant strerror_r and GNU-specifi strerror_r are different.
-
- int strerror_r(int errnum, char *buf, size_t buflen);
- /* XSI-compliant */
-
- char *strerror_r(int errnum, char *buf, size_t buflen);
- /* GNU-specific */
-
-We need to distinguish between them. Otherwise, we'll get an int value
-assigned to (char *) variable, resulting in segment fault.
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/libsystemd/sd-bus/bus-error.c | 5 +++++
- src/libsystemd/sd-journal/journal-send.c | 5 +++++
- 2 files changed, 10 insertions(+)
-
-diff --git a/src/libsystemd/sd-bus/bus-error.c b/src/libsystemd/sd-bus/bus-error.c
-index bdfa145ab7..61928f4bf3 100644
---- a/src/libsystemd/sd-bus/bus-error.c
-+++ b/src/libsystemd/sd-bus/bus-error.c
-@@ -409,7 +409,12 @@ static void bus_error_strerror(sd_bus_error *e, int error) {
- return;
-
- errno = 0;
-+#ifndef __GLIBC__
-+ strerror_r(error, m, k);
-+ x = m;
-+#else
- x = strerror_r(error, m, k);
-+#endif
- if (errno == ERANGE || strlen(x) >= k - 1) {
- free(m);
- k *= 2;
-diff --git a/src/libsystemd/sd-journal/journal-send.c b/src/libsystemd/sd-journal/journal-send.c
-index e6ceba54f9..285ebbc9ef 100644
---- a/src/libsystemd/sd-journal/journal-send.c
-+++ b/src/libsystemd/sd-journal/journal-send.c
-@@ -370,7 +370,12 @@ static int fill_iovec_perror_and_send(const char *message, int skip, struct iove
- char* j;
-
- errno = 0;
-+#ifndef __GLIBC__
-+ strerror_r(_saved_errno_, buffer + 8 + k, n - 8 - k);
-+ j = buffer + 8 + k;
-+#else
- j = strerror_r(_saved_errno_, buffer + 8 + k, n - 8 - k);
-+#endif
- if (errno == 0) {
- char error[STRLEN("ERRNO=") + DECIMAL_STR_MAX(int) + 1];
-
diff --git a/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch b/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch
new file mode 100644
index 0000000000..e112766a9b
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch
@@ -0,0 +1,49 @@
+From 3dc9d9d410bcce54fddfd94f43f7f77f3aa8e281 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 12 Apr 2021 23:44:53 -0700
+Subject: [PATCH 15/22] missing_syscall.h: Define MIPS ABI defines for musl
+
+musl does not define _MIPS_SIM_ABI32, _MIPS_SIM_NABI32, _MIPS_SIM_ABI64
+unlike glibc where these are provided by libc headers, therefore define
+them here in case they are undefined
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/basic/missing_syscall.h | 6 ++++++
+ src/shared/base-filesystem.c | 1 +
+ 2 files changed, 7 insertions(+)
+
+diff --git a/src/basic/missing_syscall.h b/src/basic/missing_syscall.h
+index d795efd8f2..d6729d3c1d 100644
+--- a/src/basic/missing_syscall.h
++++ b/src/basic/missing_syscall.h
+@@ -20,6 +20,12 @@
+ #include <asm/sgidefs.h>
+ #endif
+
++#ifndef _MIPS_SIM_ABI32
++#define _MIPS_SIM_ABI32 1
++#define _MIPS_SIM_NABI32 2
++#define _MIPS_SIM_ABI64 3
++#endif
++
+ #include "macro.h"
+ #include "missing_keyctl.h"
+ #include "missing_stat.h"
+diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c
+index 7ae921a113..0ef9d1fd39 100644
+--- a/src/shared/base-filesystem.c
++++ b/src/shared/base-filesystem.c
+@@ -20,6 +20,7 @@
+ #include "string-util.h"
+ #include "umask-util.h"
+ #include "user-util.h"
++#include "missing_syscall.h"
+
+ typedef struct BaseFilesystem {
+ const char *dir; /* directory or symlink to create */
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch b/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch
new file mode 100644
index 0000000000..0be817e62d
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch
@@ -0,0 +1,37 @@
+From 0994b59dba9f248ad31cb7087046dc00b72cb4ea Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 21 Jan 2022 15:15:11 -0800
+Subject: [PATCH 16/22] pass correct parameters to getdents64
+
+Fixes
+../git/src/basic/recurse-dir.c:57:40: error: incompatible pointer types passing 'uint8_t *' (aka 'unsigned char *') to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types]
+ n = getdents64(dir_fd, (uint8_t*) de->buffer + de->buffer_size, bs - de->buffer_size);
+ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+../git/src/basic/stat-util.c:102:28: error: incompatible pointer types passing 'union (unnamed union at ../git/src/basic/stat-util.c:78:9) *' to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types]
+ n = getdents64(fd, &buffer, sizeof(buffer));
+ ^~~~~~~
+
+Upstream-Status: Inappropriate [musl specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Jiaqing Zhao <jiaqing.zhao@linux.intel.com>
+---
+ src/basic/recurse-dir.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/basic/recurse-dir.c b/src/basic/recurse-dir.c
+index 5e98b7a5d8..aef065047b 100644
+--- a/src/basic/recurse-dir.c
++++ b/src/basic/recurse-dir.c
+@@ -55,7 +55,7 @@ int readdir_all(int dir_fd,
+ bs = MIN(MALLOC_SIZEOF_SAFE(de) - offsetof(DirectoryEntries, buffer), (size_t) SSIZE_MAX);
+ assert(bs > de->buffer_size);
+
+- n = getdents64(dir_fd, (uint8_t*) de->buffer + de->buffer_size, bs - de->buffer_size);
++ n = getdents64(dir_fd, (struct dirent*)((uint8_t*) de->buffer + de->buffer_size), bs - de->buffer_size);
+ if (n < 0)
+ return -errno;
+ if (n == 0)
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch b/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch
new file mode 100644
index 0000000000..4176522a1c
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch
@@ -0,0 +1,572 @@
+From 3c094d443ca30f19114392fd8ef274af6eabc12d Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 21 Jan 2022 22:19:37 -0800
+Subject: [PATCH 17/22] Adjust for musl headers
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+[Rebased for v255.1]
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/libsystemd-network/sd-dhcp6-client.c | 2 +-
+ src/network/netdev/bareudp.c | 2 +-
+ src/network/netdev/batadv.c | 2 +-
+ src/network/netdev/bond.c | 2 +-
+ src/network/netdev/bridge.c | 2 +-
+ src/network/netdev/dummy.c | 2 +-
+ src/network/netdev/geneve.c | 2 +-
+ src/network/netdev/ifb.c | 2 +-
+ src/network/netdev/ipoib.c | 2 +-
+ src/network/netdev/ipvlan.c | 2 +-
+ src/network/netdev/macsec.c | 2 +-
+ src/network/netdev/macvlan.c | 2 +-
+ src/network/netdev/netdev.c | 2 +-
+ src/network/netdev/netdevsim.c | 2 +-
+ src/network/netdev/nlmon.c | 2 +-
+ src/network/netdev/tunnel.c | 2 +-
+ src/network/netdev/vcan.c | 2 +-
+ src/network/netdev/veth.c | 2 +-
+ src/network/netdev/vlan.c | 2 +-
+ src/network/netdev/vrf.c | 2 +-
+ src/network/netdev/vxcan.c | 2 +-
+ src/network/netdev/vxlan.c | 2 +-
+ src/network/netdev/wireguard.c | 2 +-
+ src/network/netdev/xfrm.c | 2 +-
+ src/network/networkd-bridge-mdb.c | 4 ++--
+ src/network/networkd-dhcp-common.c | 3 ++-
+ src/network/networkd-dhcp-prefix-delegation.c | 3 ++-
+ src/network/networkd-dhcp-server.c | 2 +-
+ src/network/networkd-dhcp4.c | 2 +-
+ src/network/networkd-ipv6ll.c | 2 +-
+ src/network/networkd-link.c | 2 +-
+ src/network/networkd-ndisc.c | 2 +-
+ src/network/networkd-route.c | 8 ++++----
+ src/network/networkd-setlink.c | 2 +-
+ src/network/networkd-sysctl.c | 2 +-
+ src/shared/linux/ethtool.h | 3 ++-
+ src/shared/netif-util.c | 2 +-
+ src/udev/udev-builtin-net_id.c | 2 +-
+ 38 files changed, 45 insertions(+), 42 deletions(-)
+
+diff --git a/src/libsystemd-network/sd-dhcp6-client.c b/src/libsystemd-network/sd-dhcp6-client.c
+index c20367dfc9..b8d4cd8c2a 100644
+--- a/src/libsystemd-network/sd-dhcp6-client.c
++++ b/src/libsystemd-network/sd-dhcp6-client.c
+@@ -5,7 +5,7 @@
+
+ #include <errno.h>
+ #include <sys/ioctl.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_infiniband.h>
+
+ #include "sd-dhcp6-client.h"
+diff --git a/src/network/netdev/bareudp.c b/src/network/netdev/bareudp.c
+index 1df886573b..c8b6714726 100644
+--- a/src/network/netdev/bareudp.c
++++ b/src/network/netdev/bareudp.c
+@@ -2,7 +2,7 @@
+ * Copyright © 2020 VMware, Inc. */
+
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "bareudp.h"
+ #include "netlink-util.h"
+diff --git a/src/network/netdev/batadv.c b/src/network/netdev/batadv.c
+index 26da0231d4..2e8002af8c 100644
+--- a/src/network/netdev/batadv.c
++++ b/src/network/netdev/batadv.c
+@@ -3,7 +3,7 @@
+ #include <inttypes.h>
+ #include <netinet/in.h>
+ #include <linux/genetlink.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "batadv.h"
+ #include "fileio.h"
+diff --git a/src/network/netdev/bond.c b/src/network/netdev/bond.c
+index 4d75a0d6bf..985b3197e0 100644
+--- a/src/network/netdev/bond.c
++++ b/src/network/netdev/bond.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "alloc-util.h"
+ #include "bond.h"
+diff --git a/src/network/netdev/bridge.c b/src/network/netdev/bridge.c
+index 3e394edadf..f12f667687 100644
+--- a/src/network/netdev/bridge.c
++++ b/src/network/netdev/bridge.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_bridge.h>
+
+ #include "bridge.h"
+diff --git a/src/network/netdev/dummy.c b/src/network/netdev/dummy.c
+index 00df1d2787..77b506b422 100644
+--- a/src/network/netdev/dummy.c
++++ b/src/network/netdev/dummy.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "dummy.h"
+
+diff --git a/src/network/netdev/geneve.c b/src/network/netdev/geneve.c
+index bc655ec7ff..a77e8e17e4 100644
+--- a/src/network/netdev/geneve.c
++++ b/src/network/netdev/geneve.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "alloc-util.h"
+ #include "conf-parser.h"
+diff --git a/src/network/netdev/ifb.c b/src/network/netdev/ifb.c
+index d7ff44cb9e..e037629ae4 100644
+--- a/src/network/netdev/ifb.c
++++ b/src/network/netdev/ifb.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later
+ * Copyright © 2019 VMware, Inc. */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "ifb.h"
+
+diff --git a/src/network/netdev/ipoib.c b/src/network/netdev/ipoib.c
+index d5fe299b7b..c9c8002eac 100644
+--- a/src/network/netdev/ipoib.c
++++ b/src/network/netdev/ipoib.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_link.h>
+
+ #include "ipoib.h"
+diff --git a/src/network/netdev/ipvlan.c b/src/network/netdev/ipvlan.c
+index 05d5d010f6..d440f49537 100644
+--- a/src/network/netdev/ipvlan.c
++++ b/src/network/netdev/ipvlan.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "conf-parser.h"
+ #include "ipvlan.h"
+diff --git a/src/network/netdev/macsec.c b/src/network/netdev/macsec.c
+index 17d6acefb6..679d0984f9 100644
+--- a/src/network/netdev/macsec.c
++++ b/src/network/netdev/macsec.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_ether.h>
+ #include <linux/if_macsec.h>
+ #include <linux/genetlink.h>
+diff --git a/src/network/netdev/macvlan.c b/src/network/netdev/macvlan.c
+index 203807e3a5..8ab09a387e 100644
+--- a/src/network/netdev/macvlan.c
++++ b/src/network/netdev/macvlan.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "conf-parser.h"
+ #include "macvlan.h"
+diff --git a/src/network/netdev/netdev.c b/src/network/netdev/netdev.c
+index 57127a861a..7f787d0b9f 100644
+--- a/src/network/netdev/netdev.c
++++ b/src/network/netdev/netdev.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <unistd.h>
+
+ #include "alloc-util.h"
+diff --git a/src/network/netdev/netdevsim.c b/src/network/netdev/netdevsim.c
+index 15d5c132f9..a3ffa48b15 100644
+--- a/src/network/netdev/netdevsim.c
++++ b/src/network/netdev/netdevsim.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "netdevsim.h"
+
+diff --git a/src/network/netdev/nlmon.c b/src/network/netdev/nlmon.c
+index ff372092e6..eef66811f4 100644
+--- a/src/network/netdev/nlmon.c
++++ b/src/network/netdev/nlmon.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "nlmon.h"
+
+diff --git a/src/network/netdev/tunnel.c b/src/network/netdev/tunnel.c
+index db84e7cf6e..93d5642962 100644
+--- a/src/network/netdev/tunnel.c
++++ b/src/network/netdev/tunnel.c
+@@ -2,7 +2,7 @@
+
+ #include <netinet/in.h>
+ #include <linux/fou.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_tunnel.h>
+ #include <linux/ip.h>
+ #include <linux/ip6_tunnel.h>
+diff --git a/src/network/netdev/vcan.c b/src/network/netdev/vcan.c
+index 380547ee1e..137c1adf8a 100644
+--- a/src/network/netdev/vcan.c
++++ b/src/network/netdev/vcan.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "vcan.h"
+
+diff --git a/src/network/netdev/veth.c b/src/network/netdev/veth.c
+index e0f5b4ebb1..8a424ed03d 100644
+--- a/src/network/netdev/veth.c
++++ b/src/network/netdev/veth.c
+@@ -3,7 +3,7 @@
+ #include <errno.h>
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/veth.h>
+
+ #include "netlink-util.h"
+diff --git a/src/network/netdev/vlan.c b/src/network/netdev/vlan.c
+index 2390206993..efec630e30 100644
+--- a/src/network/netdev/vlan.c
++++ b/src/network/netdev/vlan.c
+@@ -2,7 +2,7 @@
+
+ #include <errno.h>
+ #include <net/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_vlan.h>
+
+ #include "parse-util.h"
+diff --git a/src/network/netdev/vrf.c b/src/network/netdev/vrf.c
+index b75ec2bcc6..6aeeea640b 100644
+--- a/src/network/netdev/vrf.c
++++ b/src/network/netdev/vrf.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "vrf.h"
+
+diff --git a/src/network/netdev/vxcan.c b/src/network/netdev/vxcan.c
+index c0343f45b6..f9e718f40b 100644
+--- a/src/network/netdev/vxcan.c
++++ b/src/network/netdev/vxcan.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <linux/can/vxcan.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "vxcan.h"
+
+diff --git a/src/network/netdev/vxlan.c b/src/network/netdev/vxlan.c
+index b11fdbbd0d..a971a917f0 100644
+--- a/src/network/netdev/vxlan.c
++++ b/src/network/netdev/vxlan.c
+@@ -2,7 +2,7 @@
+
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "conf-parser.h"
+ #include "alloc-util.h"
+diff --git a/src/network/netdev/wireguard.c b/src/network/netdev/wireguard.c
+index 4c7d837c41..6df6dfb816 100644
+--- a/src/network/netdev/wireguard.c
++++ b/src/network/netdev/wireguard.c
+@@ -6,7 +6,7 @@
+ #include <sys/ioctl.h>
+ #include <net/if.h>
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/ipv6_route.h>
+
+ #include "sd-resolve.h"
+diff --git a/src/network/netdev/xfrm.c b/src/network/netdev/xfrm.c
+index 905bfc0bdf..39e34dbb3b 100644
+--- a/src/network/netdev/xfrm.c
++++ b/src/network/netdev/xfrm.c
+@@ -1,6 +1,6 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "missing_network.h"
+ #include "xfrm.h"
+diff --git a/src/network/networkd-bridge-mdb.c b/src/network/networkd-bridge-mdb.c
+index bd1a9745dc..949d3da029 100644
+--- a/src/network/networkd-bridge-mdb.c
++++ b/src/network/networkd-bridge-mdb.c
+@@ -1,7 +1,5 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <net/if.h>
+-#include <linux/if_bridge.h>
+
+ #include "netlink-util.h"
+ #include "networkd-bridge-mdb.h"
+@@ -11,6 +9,8 @@
+ #include "networkd-queue.h"
+ #include "string-util.h"
+ #include "vlan-util.h"
++#include <net/if.h>
++#include <linux/if_bridge.h>
+
+ #define STATIC_BRIDGE_MDB_ENTRIES_PER_NETWORK_MAX 1024U
+
+diff --git a/src/network/networkd-dhcp-common.c b/src/network/networkd-dhcp-common.c
+index 080b15387c..efe8283957 100644
+--- a/src/network/networkd-dhcp-common.c
++++ b/src/network/networkd-dhcp-common.c
+@@ -1,7 +1,8 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
++#include <net/if.h>
+
+ #include "bus-error.h"
+ #include "bus-locator.h"
+diff --git a/src/network/networkd-dhcp-prefix-delegation.c b/src/network/networkd-dhcp-prefix-delegation.c
+index af2fe9efcd..511565700f 100644
+--- a/src/network/networkd-dhcp-prefix-delegation.c
++++ b/src/network/networkd-dhcp-prefix-delegation.c
+@@ -1,6 +1,5 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/ipv6_route.h>
+
+ #include "dhcp6-lease-internal.h"
+ #include "hashmap.h"
+@@ -20,6 +19,8 @@
+ #include "strv.h"
+ #include "tunnel.h"
+
++#include <linux/ipv6_route.h>
++
+ bool link_dhcp_pd_is_enabled(Link *link) {
+ assert(link);
+
+diff --git a/src/network/networkd-dhcp-server.c b/src/network/networkd-dhcp-server.c
+index 607fe0053c..9ce4005874 100644
+--- a/src/network/networkd-dhcp-server.c
++++ b/src/network/networkd-dhcp-server.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <netinet/in.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if.h>
+
+ #include "sd-dhcp-server.h"
+diff --git a/src/network/networkd-dhcp4.c b/src/network/networkd-dhcp4.c
+index efbae6d868..1ea2151d50 100644
+--- a/src/network/networkd-dhcp4.c
++++ b/src/network/networkd-dhcp4.c
+@@ -3,7 +3,7 @@
+ #include <netinet/in.h>
+ #include <netinet/ip.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "alloc-util.h"
+ #include "dhcp-client-internal.h"
+diff --git a/src/network/networkd-ipv6ll.c b/src/network/networkd-ipv6ll.c
+index 32229a3fc7..662a345d6e 100644
+--- a/src/network/networkd-ipv6ll.c
++++ b/src/network/networkd-ipv6ll.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "in-addr-util.h"
+ #include "networkd-address.h"
+diff --git a/src/network/networkd-link.c b/src/network/networkd-link.c
+index ee5f0f2c0a..ea5269a2de 100644
+--- a/src/network/networkd-link.c
++++ b/src/network/networkd-link.c
+@@ -3,7 +3,7 @@
+ #include <net/if.h>
+ #include <netinet/in.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_link.h>
+ #include <linux/netdevice.h>
+ #include <sys/socket.h>
+diff --git a/src/network/networkd-ndisc.c b/src/network/networkd-ndisc.c
+index ab9eeb13a5..dd96fe7483 100644
+--- a/src/network/networkd-ndisc.c
++++ b/src/network/networkd-ndisc.c
+@@ -6,7 +6,7 @@
+ #include <arpa/inet.h>
+ #include <netinet/icmp6.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "sd-ndisc.h"
+
+diff --git a/src/network/networkd-route.c b/src/network/networkd-route.c
+index 7218d799fc..30d5574eae 100644
+--- a/src/network/networkd-route.c
++++ b/src/network/networkd-route.c
+@@ -1,9 +1,5 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+-#include <linux/icmpv6.h>
+-#include <linux/ipv6_route.h>
+-#include <linux/nexthop.h>
+-
+ #include "alloc-util.h"
+ #include "event-util.h"
+ #include "netlink-util.h"
+@@ -21,6 +17,10 @@
+ #include "vrf.h"
+ #include "wireguard.h"
+
++#include <linux/icmpv6.h>
++#include <linux/ipv6_route.h>
++#include <linux/nexthop.h>
++
+ int route_new(Route **ret) {
+ _cleanup_(route_freep) Route *route = NULL;
+
+diff --git a/src/network/networkd-setlink.c b/src/network/networkd-setlink.c
+index 2298f9ea3a..7d5f87de53 100644
+--- a/src/network/networkd-setlink.c
++++ b/src/network/networkd-setlink.c
+@@ -2,7 +2,7 @@
+
+ #include <netinet/in.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/if_bridge.h>
+
+ #include "missing_network.h"
+diff --git a/src/network/networkd-sysctl.c b/src/network/networkd-sysctl.c
+index 2b226b2e2a..f12a474e2f 100644
+--- a/src/network/networkd-sysctl.c
++++ b/src/network/networkd-sysctl.c
+@@ -2,7 +2,7 @@
+
+ #include <netinet/in.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "missing_network.h"
+ #include "networkd-link.h"
+diff --git a/src/shared/linux/ethtool.h b/src/shared/linux/ethtool.h
+index 3d1da515c0..3fca9a4faf 100644
+--- a/src/shared/linux/ethtool.h
++++ b/src/shared/linux/ethtool.h
+@@ -16,7 +16,8 @@
+
+ #include <linux/const.h>
+ #include <linux/types.h>
+-#include <linux/if_ether.h>
++#include <netinet/if_ether.h>
++//#include <linux/if_ether.h>
+
+ #include <limits.h> /* for INT_MAX */
+
+diff --git a/src/shared/netif-util.c b/src/shared/netif-util.c
+index f56c5646c1..5af28ff119 100644
+--- a/src/shared/netif-util.c
++++ b/src/shared/netif-util.c
+@@ -1,7 +1,7 @@
+ /* SPDX-License-Identifier: LGPL-2.1-or-later */
+
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+
+ #include "arphrd-util.h"
+ #include "device-util.h"
+diff --git a/src/udev/udev-builtin-net_id.c b/src/udev/udev-builtin-net_id.c
+index f528a46b8e..830318cda5 100644
+--- a/src/udev/udev-builtin-net_id.c
++++ b/src/udev/udev-builtin-net_id.c
+@@ -18,7 +18,7 @@
+ #include <stdarg.h>
+ #include <unistd.h>
+ #include <linux/if.h>
+-#include <linux/if_arp.h>
++//#include <linux/if_arp.h>
+ #include <linux/netdevice.h>
+ #include <linux/pci_regs.h>
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0018-avoid-redefinition-of-prctl_mm_map-structure.patch b/meta/recipes-core/systemd/systemd/0018-avoid-redefinition-of-prctl_mm_map-structure.patch
deleted file mode 100644
index 1fcba7af08..0000000000
--- a/meta/recipes-core/systemd/systemd/0018-avoid-redefinition-of-prctl_mm_map-structure.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From e1d0210b47906dd121f936f3181092835df6a95c Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 15:44:54 +0800
-Subject: [PATCH] avoid redefinition of prctl_mm_map structure
-
-Fix the following compile failure:
-error: redefinition of 'struct prctl_mm_map'
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/basic/missing_prctl.h | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/src/basic/missing_prctl.h b/src/basic/missing_prctl.h
-index ab851306ba..5547cad875 100644
---- a/src/basic/missing_prctl.h
-+++ b/src/basic/missing_prctl.h
-@@ -1,7 +1,9 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
- #pragma once
-
-+#ifdef __GLIBC__
- #include <linux/prctl.h>
-+#endif
-
- /* 58319057b7847667f0c9585b9de0e8932b0fdb08 (4.3) */
- #ifndef PR_CAP_AMBIENT
diff --git a/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch b/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch
new file mode 100644
index 0000000000..75f6b9094a
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch
@@ -0,0 +1,52 @@
+From be02bd0876a061728661535a709d313e39fe1ac3 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 8 Nov 2022 13:31:34 -0800
+Subject: [PATCH 18/22] test-bus-error: strerror() is assumed to be GNU
+ specific version mark it so
+
+Upstream-Status: Inappropriate [Upstream systemd only supports glibc]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/libsystemd/sd-bus/test-bus-error.c | 2 ++
+ src/test/test-errno-util.c | 3 ++-
+ 2 files changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/src/libsystemd/sd-bus/test-bus-error.c b/src/libsystemd/sd-bus/test-bus-error.c
+index a55f3f9856..4123bf3da0 100644
+--- a/src/libsystemd/sd-bus/test-bus-error.c
++++ b/src/libsystemd/sd-bus/test-bus-error.c
+@@ -99,7 +99,9 @@ TEST(error) {
+ assert_se(!sd_bus_error_is_set(&error));
+ assert_se(sd_bus_error_set_errno(&error, EBUSY) == -EBUSY);
+ assert_se(streq(error.name, "System.Error.EBUSY"));
++#ifdef __GLIBC__
+ assert_se(streq(error.message, STRERROR(EBUSY)));
++#endif
+ assert_se(sd_bus_error_has_name(&error, "System.Error.EBUSY"));
+ assert_se(sd_bus_error_get_errno(&error) == EBUSY);
+ assert_se(sd_bus_error_is_set(&error));
+diff --git a/src/test/test-errno-util.c b/src/test/test-errno-util.c
+index 376d532281..967cfd4d67 100644
+--- a/src/test/test-errno-util.c
++++ b/src/test/test-errno-util.c
+@@ -4,7 +4,7 @@
+ #include "stdio-util.h"
+ #include "string-util.h"
+ #include "tests.h"
+-
++#ifdef __GLIBC__
+ TEST(strerror_not_threadsafe) {
+ /* Just check that strerror really is not thread-safe. */
+ log_info("strerror(%d) → %s", 200, strerror(200));
+@@ -46,6 +46,7 @@ TEST(STRERROR_OR_ELSE) {
+ log_info("STRERROR_OR_ELSE(EPERM, \"EOF\") → %s", STRERROR_OR_EOF(EPERM));
+ log_info("STRERROR_OR_ELSE(-EPERM, \"EOF\") → %s", STRERROR_OR_EOF(-EPERM));
+ }
++#endif /* __GLIBC__ */
+
+ TEST(PROTECT_ERRNO) {
+ errno = 12;
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch b/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch
new file mode 100644
index 0000000000..e038b73678
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch
@@ -0,0 +1,42 @@
+From 46d80840bfe37e67d4f18c37a77751ea1fe63a07 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 23 Jan 2023 23:39:46 -0800
+Subject: [PATCH 19/22] errno-util: Make STRERROR portable for musl
+
+Sadly, systemd has decided to use yet another GNU extention in a macro
+lets make this such that we can use XSI compliant strerror_r() for
+non-glibc hosts
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/basic/errno-util.h | 12 ++++++++++--
+ 1 file changed, 10 insertions(+), 2 deletions(-)
+
+diff --git a/src/basic/errno-util.h b/src/basic/errno-util.h
+index 27804e6382..274c1c6ef1 100644
+--- a/src/basic/errno-util.h
++++ b/src/basic/errno-util.h
+@@ -15,8 +15,16 @@
+ * https://stackoverflow.com/questions/34880638/compound-literal-lifetime-and-if-blocks
+ *
+ * Note that we use the GNU variant of strerror_r() here. */
+-#define STRERROR(errnum) strerror_r(abs(errnum), (char[ERRNO_BUF_LEN]){}, ERRNO_BUF_LEN)
+-
++static inline const char * STRERROR(int errnum);
++
++static inline const char * STRERROR(int errnum) {
++#ifdef __GLIBC__
++ return strerror_r(abs(errnum), (char[ERRNO_BUF_LEN]){}, ERRNO_BUF_LEN);
++#else
++ static __thread char buf[ERRNO_BUF_LEN];
++ return strerror_r(abs(errnum), buf, ERRNO_BUF_LEN) ? "unknown error" : buf;
++#endif
++}
+ /* A helper to print an error message or message for functions that return 0 on EOF.
+ * Note that we can't use ({ … }) to define a temporary variable, so errnum is
+ * evaluated twice. */
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch b/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch
new file mode 100644
index 0000000000..b83fffe793
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch
@@ -0,0 +1,39 @@
+From 9eb4867b4e2dbdb2484ae854022aff97e2f0feb3 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 2 Aug 2023 12:06:27 -0700
+Subject: [PATCH 20/22] sd-event: Make malloc_trim() conditional on glibc
+
+musl does not have this API
+
+Upstream-Status: Inappropriate [musl-specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/libsystemd/sd-event/sd-event.c | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/src/libsystemd/sd-event/sd-event.c b/src/libsystemd/sd-event/sd-event.c
+index 288798a0dc..6419a7f216 100644
+--- a/src/libsystemd/sd-event/sd-event.c
++++ b/src/libsystemd/sd-event/sd-event.c
+@@ -1874,7 +1874,7 @@ _public_ int sd_event_add_exit(
+ }
+
+ _public_ int sd_event_trim_memory(void) {
+- int r;
++ int r = 0;
+
+ /* A default implementation of a memory pressure callback. Simply releases our own allocation caches
+ * and glibc's. This is automatically used when people call sd_event_add_memory_pressure() with a
+@@ -1888,7 +1888,9 @@ _public_ int sd_event_trim_memory(void) {
+
+ usec_t before_timestamp = now(CLOCK_MONOTONIC);
+ hashmap_trim_pools();
++#ifdef __GLIBC__
+ r = malloc_trim(0);
++#endif
+ usec_t after_timestamp = now(CLOCK_MONOTONIC);
+
+ if (r > 0)
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch b/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch
new file mode 100644
index 0000000000..7eff069bb7
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch
@@ -0,0 +1,57 @@
+From 502597b9ddd6b145541b23fadca0b1d3ca9f6367 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 2 Aug 2023 12:20:40 -0700
+Subject: [PATCH 21/22] shared: Do not use malloc_info on musl
+
+Upstream-Status: Inappropriate [musl-specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/shared/bus-util.c | 5 +++--
+ src/shared/common-signal.c | 4 ++--
+ 2 files changed, 5 insertions(+), 4 deletions(-)
+
+diff --git a/src/shared/bus-util.c b/src/shared/bus-util.c
+index 74f148c8b4..2d862a123d 100644
+--- a/src/shared/bus-util.c
++++ b/src/shared/bus-util.c
+@@ -611,15 +611,16 @@ static int method_dump_memory_state_by_fd(sd_bus_message *message, void *userdat
+ _cleanup_close_ int fd = -EBADF;
+ size_t dump_size;
+ FILE *f;
+- int r;
++ int r = 0;
+
+ assert(message);
+
+ f = memstream_init(&m);
+ if (!f)
+ return -ENOMEM;
+-
++#ifdef __GLIBC__
+ r = RET_NERRNO(malloc_info(/* options= */ 0, f));
++#endif
+ if (r < 0)
+ return r;
+
+diff --git a/src/shared/common-signal.c b/src/shared/common-signal.c
+index 8e70e365dd..9e782caec9 100644
+--- a/src/shared/common-signal.c
++++ b/src/shared/common-signal.c
+@@ -65,12 +65,12 @@ int sigrtmin18_handler(sd_event_source *s, const struct signalfd_siginfo *si, vo
+ log_oom();
+ break;
+ }
+-
++#ifdef __GLIBC__
+ if (malloc_info(0, f) < 0) {
+ log_error_errno(errno, "Failed to invoke malloc_info(): %m");
+ break;
+ }
+-
++#endif
+ (void) memstream_dump(LOG_INFO, &m);
+ break;
+ }
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch b/meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch
deleted file mode 100644
index 82a01f732e..0000000000
--- a/meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From e10a73de254b570bbc29b26423dbb86b4265bb05 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Mon, 25 Feb 2019 16:53:06 +0800
-Subject: [PATCH] test-json.c: define M_PIl
-
-Fix the following compile failure:
-src/test/test-json.c:305:50: error: 'M_PIl' undeclared (first use in this function); did you mean 'M_PI'?
-
-Upstream-Status: Inappropriate [musl specific]
-
-This is fixed upstream where systemd no longer needs M_PIl so it could
-be dropped when upgrading to 251+ see
-https://github.com/systemd/systemd/commit/f9a1fd2a3b2d8212ba84ef1c3b55657ced34475e
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
----
- src/test/test-json.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/src/test/test-json.c b/src/test/test-json.c
-index b385edc269..5e5830238c 100644
---- a/src/test/test-json.c
-+++ b/src/test/test-json.c
-@@ -14,6 +14,10 @@
- #include "tests.h"
- #include "util.h"
-
-+#ifndef M_PIl
-+#define M_PIl 3.141592653589793238462643383279502884L
-+#endif
-+
- static void test_tokenizer_one(const char *data, ...) {
- unsigned line = 0, column = 0;
- void *state = NULL;
diff --git a/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch b/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch
new file mode 100644
index 0000000000..24f3bf74a0
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch
@@ -0,0 +1,43 @@
+From fd52f1764647e03a35e8f0ed0ef952049073ccbd Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Tue, 2 Jan 2024 11:03:27 +0800
+Subject: [PATCH 22/22] avoid missing LOCK_EX declaration
+
+This only happens on MUSL. Include sys/file.h to avoid compilation
+error about missing LOCK_EX declaration.
+
+Upstream-Status: Inappropriate [musl specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/core/exec-invoke.c | 1 +
+ src/shared/dev-setup.h | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/src/core/exec-invoke.c b/src/core/exec-invoke.c
+index 70d963e269..7084811439 100644
+--- a/src/core/exec-invoke.c
++++ b/src/core/exec-invoke.c
+@@ -4,6 +4,7 @@
+ #include <sys/ioctl.h>
+ #include <sys/mount.h>
+ #include <sys/prctl.h>
++#include <sys/file.h>
+
+ #if HAVE_PAM
+ #include <security/pam_appl.h>
+diff --git a/src/shared/dev-setup.h b/src/shared/dev-setup.h
+index 5339bc4e5e..0697495f23 100644
+--- a/src/shared/dev-setup.h
++++ b/src/shared/dev-setup.h
+@@ -2,6 +2,7 @@
+ #pragma once
+
+ #include <sys/types.h>
++#include <sys/file.h>
+
+ int lock_dev_console(void);
+
+--
+2.34.1
+
diff --git a/meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch b/meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch
deleted file mode 100644
index 4dd6ff6e2e..0000000000
--- a/meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch
+++ /dev/null
@@ -1,467 +0,0 @@
-From 414e2f97008a1f3c26a260a6dc4d51a8c1fa6900 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Fri, 1 Mar 2019 15:22:15 +0800
-Subject: [PATCH] do not disable buffer in writing files
-
-Do not disable buffer in writing files, otherwise we get
-failure at boot for musl like below.
-
- [!!!!!!] Failed to allocate manager object.
-
-And there will be other failures, critical or not critical.
-This is specific to musl.
-
-Upstream-Status: Inappropriate [musl]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-[Rebased for v242]
-Signed-off-by: Andrej Valek <andrej.valek@siemens.com>
-[rebased for systemd 243]
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- src/basic/cgroup-util.c | 12 ++++++------
- src/basic/namespace-util.c | 4 ++--
- src/basic/procfs-util.c | 4 ++--
- src/basic/sysctl-util.c | 2 +-
- src/basic/util.c | 2 +-
- src/binfmt/binfmt.c | 6 +++---
- src/core/cgroup.c | 2 +-
- src/core/main.c | 4 ++--
- src/core/smack-setup.c | 8 ++++----
- src/hibernate-resume/hibernate-resume.c | 2 +-
- src/home/homework.c | 2 +-
- src/libsystemd/sd-device/sd-device.c | 2 +-
- src/nspawn/nspawn-cgroup.c | 2 +-
- src/nspawn/nspawn.c | 6 +++---
- src/shared/binfmt-util.c | 2 +-
- src/shared/cgroup-setup.c | 4 ++--
- src/shared/coredump-util.c | 2 +-
- src/shared/smack-util.c | 2 +-
- src/sleep/sleep.c | 8 ++++----
- src/udev/udev-rules.c | 1 -
- src/vconsole/vconsole-setup.c | 2 +-
- 21 files changed, 39 insertions(+), 40 deletions(-)
-
-diff --git a/src/basic/cgroup-util.c b/src/basic/cgroup-util.c
-index f7dc6c8421..5f7a27c2c4 100644
---- a/src/basic/cgroup-util.c
-+++ b/src/basic/cgroup-util.c
-@@ -390,7 +390,7 @@ int cg_kill_kernel_sigkill(const char *controller, const char *path) {
- if (r < 0)
- return r;
-
-- r = write_string_file(killfile, "1", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(killfile, "1", 0);
- if (r < 0)
- return r;
-
-@@ -803,7 +803,7 @@ int cg_install_release_agent(const char *controller, const char *agent) {
-
- sc = strstrip(contents);
- if (isempty(sc)) {
-- r = write_string_file(fs, agent, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(fs, agent, 0);
- if (r < 0)
- return r;
- } else if (!path_equal(sc, agent))
-@@ -821,7 +821,7 @@ int cg_install_release_agent(const char *controller, const char *agent) {
-
- sc = strstrip(contents);
- if (streq(sc, "0")) {
-- r = write_string_file(fs, "1", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(fs, "1", 0);
- if (r < 0)
- return r;
-
-@@ -848,7 +848,7 @@ int cg_uninstall_release_agent(const char *controller) {
- if (r < 0)
- return r;
-
-- r = write_string_file(fs, "0", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(fs, "0", 0);
- if (r < 0)
- return r;
-
-@@ -858,7 +858,7 @@ int cg_uninstall_release_agent(const char *controller) {
- if (r < 0)
- return r;
-
-- r = write_string_file(fs, "", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(fs, "", 0);
- if (r < 0)
- return r;
-
-@@ -1704,7 +1704,7 @@ int cg_set_attribute(const char *controller, const char *path, const char *attri
- if (r < 0)
- return r;
-
-- return write_string_file(p, value, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ return write_string_file(p, value, 0);
- }
-
- int cg_get_attribute(const char *controller, const char *path, const char *attribute, char **ret) {
-diff --git a/src/basic/namespace-util.c b/src/basic/namespace-util.c
-index b9120a5ed0..78e460b75f 100644
---- a/src/basic/namespace-util.c
-+++ b/src/basic/namespace-util.c
-@@ -202,12 +202,12 @@ int userns_acquire(const char *uid_map, const char *gid_map) {
- freeze();
-
- xsprintf(path, "/proc/" PID_FMT "/uid_map", pid);
-- r = write_string_file(path, uid_map, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(path, uid_map, 0);
- if (r < 0)
- return log_error_errno(r, "Failed to write UID map: %m");
-
- xsprintf(path, "/proc/" PID_FMT "/gid_map", pid);
-- r = write_string_file(path, gid_map, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(path, gid_map, 0);
- if (r < 0)
- return log_error_errno(r, "Failed to write GID map: %m");
-
-diff --git a/src/basic/procfs-util.c b/src/basic/procfs-util.c
-index e485a0196b..8bff210356 100644
---- a/src/basic/procfs-util.c
-+++ b/src/basic/procfs-util.c
-@@ -64,13 +64,13 @@ int procfs_tasks_set_limit(uint64_t limit) {
- * decrease it, as threads-max is the much more relevant sysctl. */
- if (limit > pid_max-1) {
- sprintf(buffer, "%" PRIu64, limit+1); /* Add one, since PID 0 is not a valid PID */
-- r = write_string_file("/proc/sys/kernel/pid_max", buffer, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/kernel/pid_max", buffer, 0);
- if (r < 0)
- return r;
- }
-
- sprintf(buffer, "%" PRIu64, limit);
-- r = write_string_file("/proc/sys/kernel/threads-max", buffer, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/kernel/threads-max", buffer, 0);
- if (r < 0) {
- uint64_t threads_max;
-
-diff --git a/src/basic/sysctl-util.c b/src/basic/sysctl-util.c
-index b66a6622ae..8d1c93008a 100644
---- a/src/basic/sysctl-util.c
-+++ b/src/basic/sysctl-util.c
-@@ -58,7 +58,7 @@ int sysctl_write(const char *property, const char *value) {
-
- log_debug("Setting '%s' to '%s'", p, value);
-
-- return write_string_file(p, value, WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_SUPPRESS_REDUNDANT_VIRTUAL);
-+ return write_string_file(p, value, WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_SUPPRESS_REDUNDANT_VIRTUAL);
- }
-
- int sysctl_writef(const char *property, const char *format, ...) {
-diff --git a/src/basic/util.c b/src/basic/util.c
-index d7ef382737..31c35118d1 100644
---- a/src/basic/util.c
-+++ b/src/basic/util.c
-@@ -168,7 +168,7 @@ void disable_coredumps(void) {
- if (detect_container() > 0)
- return;
-
-- r = write_string_file("/proc/sys/kernel/core_pattern", "|/bin/false", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/kernel/core_pattern", "|/bin/false", 0);
- if (r < 0)
- log_debug_errno(r, "Failed to turn off coredumps, ignoring: %m");
- }
-diff --git a/src/binfmt/binfmt.c b/src/binfmt/binfmt.c
-index 18231c2618..6c598d55c8 100644
---- a/src/binfmt/binfmt.c
-+++ b/src/binfmt/binfmt.c
-@@ -29,7 +29,7 @@ static bool arg_unregister = false;
-
- static int delete_rule(const char *rulename) {
- const char *fn = strjoina("/proc/sys/fs/binfmt_misc/", rulename);
-- return write_string_file(fn, "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ return write_string_file(fn, "-1", 0);
- }
-
- static int apply_rule(const char *filename, unsigned line, const char *rule) {
-@@ -59,7 +59,7 @@ static int apply_rule(const char *filename, unsigned line, const char *rule) {
- if (r >= 0)
- log_debug("%s:%u: Rule '%s' deleted.", filename, line, rulename);
-
-- r = write_string_file("/proc/sys/fs/binfmt_misc/register", rule, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/fs/binfmt_misc/register", rule, 0);
- if (r < 0)
- return log_error_errno(r, "%s:%u: Failed to add binary format '%s': %m",
- filename, line, rulename);
-@@ -226,7 +226,7 @@ static int run(int argc, char *argv[]) {
- }
-
- /* Flush out all rules */
-- r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", 0);
- if (r < 0)
- log_warning_errno(r, "Failed to flush binfmt_misc rules, ignoring: %m");
- else
-diff --git a/src/core/cgroup.c b/src/core/cgroup.c
-index f58de95a49..7a97ab6f99 100644
---- a/src/core/cgroup.c
-+++ b/src/core/cgroup.c
-@@ -4140,7 +4140,7 @@ int unit_cgroup_freezer_action(Unit *u, FreezerAction action) {
- else
- u->freezer_state = FREEZER_THAWING;
-
-- r = write_string_file(path, one_zero(action == FREEZER_FREEZE), WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(path, one_zero(action == FREEZER_FREEZE), 0);
- if (r < 0)
- return r;
-
-diff --git a/src/core/main.c b/src/core/main.c
-index 57aedb9b93..7ef36d22f5 100644
---- a/src/core/main.c
-+++ b/src/core/main.c
-@@ -1466,7 +1466,7 @@ static int bump_unix_max_dgram_qlen(void) {
- if (v >= DEFAULT_UNIX_MAX_DGRAM_QLEN)
- return 0;
-
-- r = write_string_filef("/proc/sys/net/unix/max_dgram_qlen", WRITE_STRING_FILE_DISABLE_BUFFER,
-+ r = write_string_filef("/proc/sys/net/unix/max_dgram_qlen", 0,
- "%lu", DEFAULT_UNIX_MAX_DGRAM_QLEN);
- if (r < 0)
- return log_full_errno(IN_SET(r, -EROFS, -EPERM, -EACCES) ? LOG_DEBUG : LOG_WARNING, r,
-@@ -1737,7 +1737,7 @@ static void initialize_core_pattern(bool skip_setup) {
- if (getpid_cached() != 1)
- return;
-
-- r = write_string_file("/proc/sys/kernel/core_pattern", arg_early_core_pattern, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/kernel/core_pattern", arg_early_core_pattern, 0);
- if (r < 0)
- log_warning_errno(r, "Failed to write '%s' to /proc/sys/kernel/core_pattern, ignoring: %m",
- arg_early_core_pattern);
-diff --git a/src/core/smack-setup.c b/src/core/smack-setup.c
-index f88cb80834..68055fb64a 100644
---- a/src/core/smack-setup.c
-+++ b/src/core/smack-setup.c
-@@ -320,17 +320,17 @@ int mac_smack_setup(bool *loaded_policy) {
- }
-
- #if HAVE_SMACK_RUN_LABEL
-- r = write_string_file("/proc/self/attr/current", SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/self/attr/current", SMACK_RUN_LABEL, 0);
- if (r < 0)
- log_warning_errno(r, "Failed to set SMACK label \"" SMACK_RUN_LABEL "\" on self: %m");
-- r = write_string_file("/sys/fs/smackfs/ambient", SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/fs/smackfs/ambient", SMACK_RUN_LABEL, 0);
- if (r < 0)
- log_warning_errno(r, "Failed to set SMACK ambient label \"" SMACK_RUN_LABEL "\": %m");
- r = write_string_file("/sys/fs/smackfs/netlabel",
-- "0.0.0.0/0 " SMACK_RUN_LABEL, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ "0.0.0.0/0 " SMACK_RUN_LABEL, 0);
- if (r < 0)
- log_warning_errno(r, "Failed to set SMACK netlabel rule \"0.0.0.0/0 " SMACK_RUN_LABEL "\": %m");
-- r = write_string_file("/sys/fs/smackfs/netlabel", "127.0.0.1 -CIPSO", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/fs/smackfs/netlabel", "127.0.0.1 -CIPSO", 0);
- if (r < 0)
- log_warning_errno(r, "Failed to set SMACK netlabel rule \"127.0.0.1 -CIPSO\": %m");
- #endif
-diff --git a/src/hibernate-resume/hibernate-resume.c b/src/hibernate-resume/hibernate-resume.c
-index 58e35e403e..1d0beb4008 100644
---- a/src/hibernate-resume/hibernate-resume.c
-+++ b/src/hibernate-resume/hibernate-resume.c
-@@ -45,7 +45,7 @@ int main(int argc, char *argv[]) {
- return EXIT_FAILURE;
- }
-
-- r = write_string_file("/sys/power/resume", major_minor, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/power/resume", major_minor, 0);
- if (r < 0) {
- log_error_errno(r, "Failed to write '%s' to /sys/power/resume: %m", major_minor);
- return EXIT_FAILURE;
-diff --git a/src/home/homework.c b/src/home/homework.c
-index 9fdc74b775..9858a2b415 100644
---- a/src/home/homework.c
-+++ b/src/home/homework.c
-@@ -284,7 +284,7 @@ static void drop_caches_now(void) {
- * details. We write "2" into /proc/sys/vm/drop_caches to ensure dentries/inodes are flushed, but not
- * more. */
-
-- r = write_string_file("/proc/sys/vm/drop_caches", "2\n", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/vm/drop_caches", "2\n", 0);
- if (r < 0)
- log_warning_errno(r, "Failed to drop caches, ignoring: %m");
- else
-diff --git a/src/libsystemd/sd-device/sd-device.c b/src/libsystemd/sd-device/sd-device.c
-index b163a0fb6b..fd6c5301d6 100644
---- a/src/libsystemd/sd-device/sd-device.c
-+++ b/src/libsystemd/sd-device/sd-device.c
-@@ -2108,7 +2108,7 @@ _public_ int sd_device_set_sysattr_value(sd_device *device, const char *sysattr,
- if (!value)
- return -ENOMEM;
-
-- r = write_string_file(path, value, WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_NOFOLLOW);
-+ r = write_string_file(path, value, 0 | WRITE_STRING_FILE_NOFOLLOW);
- if (r < 0) {
- /* On failure, clear cache entry, as we do not know how it fails. */
- device_remove_cached_sysattr_value(device, sysattr);
-diff --git a/src/nspawn/nspawn-cgroup.c b/src/nspawn/nspawn-cgroup.c
-index d472e80c03..c7780c7fc6 100644
---- a/src/nspawn/nspawn-cgroup.c
-+++ b/src/nspawn/nspawn-cgroup.c
-@@ -124,7 +124,7 @@ int sync_cgroup(pid_t pid, CGroupUnified unified_requested, uid_t uid_shift) {
- fn = strjoina(tree, cgroup, "/cgroup.procs");
-
- sprintf(pid_string, PID_FMT, pid);
-- r = write_string_file(fn, pid_string, WRITE_STRING_FILE_DISABLE_BUFFER|WRITE_STRING_FILE_MKDIR_0755);
-+ r = write_string_file(fn, pid_string, WRITE_STRING_FILE_MKDIR_0755);
- if (r < 0) {
- log_error_errno(r, "Failed to move process: %m");
- goto finish;
-diff --git a/src/nspawn/nspawn.c b/src/nspawn/nspawn.c
-index fb6af295b5..0d83f1e4d2 100644
---- a/src/nspawn/nspawn.c
-+++ b/src/nspawn/nspawn.c
-@@ -2759,7 +2759,7 @@ static int reset_audit_loginuid(void) {
- if (streq(p, "4294967295"))
- return 0;
-
-- r = write_string_file("/proc/self/loginuid", "4294967295", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/self/loginuid", "4294967295", 0);
- if (r < 0) {
- log_error_errno(r,
- "Failed to reset audit login UID. This probably means that your kernel is too\n"
-@@ -4175,7 +4175,7 @@ static int setup_uid_map(
- return log_oom();
-
- xsprintf(uid_map, "/proc/" PID_FMT "/uid_map", pid);
-- r = write_string_file(uid_map, s, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(uid_map, s, 0);
- if (r < 0)
- return log_error_errno(r, "Failed to write UID map: %m");
-
-@@ -4185,7 +4185,7 @@ static int setup_uid_map(
- return log_oom();
-
- xsprintf(uid_map, "/proc/" PID_FMT "/gid_map", pid);
-- r = write_string_file(uid_map, s, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(uid_map, s, 0);
- if (r < 0)
- return log_error_errno(r, "Failed to write GID map: %m");
-
-diff --git a/src/shared/binfmt-util.c b/src/shared/binfmt-util.c
-index 724d7f27d9..dd725cff92 100644
---- a/src/shared/binfmt-util.c
-+++ b/src/shared/binfmt-util.c
-@@ -26,7 +26,7 @@ int disable_binfmt(void) {
- if (r < 0)
- return log_warning_errno(r, "Failed to determine whether binfmt_misc is mounted: %m");
-
-- r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/proc/sys/fs/binfmt_misc/status", "-1", 0);
- if (r < 0)
- return log_warning_errno(r, "Failed to unregister binfmt_misc entries: %m");
-
-diff --git a/src/shared/cgroup-setup.c b/src/shared/cgroup-setup.c
-index 8bda66ca36..4e1e9943ee 100644
---- a/src/shared/cgroup-setup.c
-+++ b/src/shared/cgroup-setup.c
-@@ -345,7 +345,7 @@ int cg_attach(const char *controller, const char *path, pid_t pid) {
-
- xsprintf(c, PID_FMT "\n", pid);
-
-- r = write_string_file(fs, c, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(fs, c, 0);
- if (r < 0)
- return r;
-
-@@ -877,7 +877,7 @@ int cg_enable_everywhere(
- return log_debug_errno(errno, "Failed to open cgroup.subtree_control file of %s: %m", p);
- }
-
-- r = write_string_stream(f, s, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_stream(f, s, 0);
- if (r < 0) {
- log_debug_errno(r, "Failed to %s controller %s for %s (%s): %m",
- FLAGS_SET(mask, bit) ? "enable" : "disable", n, p, fs);
-diff --git a/src/shared/coredump-util.c b/src/shared/coredump-util.c
-index a0b648bf79..13f921390d 100644
---- a/src/shared/coredump-util.c
-+++ b/src/shared/coredump-util.c
-@@ -70,5 +70,5 @@ int set_coredump_filter(uint64_t value) {
- sprintf(t, "0x%"PRIx64, value);
-
- return write_string_file("/proc/self/coredump_filter", t,
-- WRITE_STRING_FILE_VERIFY_ON_FAILURE|WRITE_STRING_FILE_DISABLE_BUFFER);
-+ WRITE_STRING_FILE_VERIFY_ON_FAILURE);
- }
-diff --git a/src/shared/smack-util.c b/src/shared/smack-util.c
-index 0df1778cb2..3b9a0c934e 100644
---- a/src/shared/smack-util.c
-+++ b/src/shared/smack-util.c
-@@ -114,7 +114,7 @@ int mac_smack_apply_pid(pid_t pid, const char *label) {
- return 0;
-
- p = procfs_file_alloca(pid, "attr/current");
-- r = write_string_file(p, label, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file(p, label, 0);
- if (r < 0)
- return r;
-
-diff --git a/src/sleep/sleep.c b/src/sleep/sleep.c
-index 7064f3a905..8f2a7d9da2 100644
---- a/src/sleep/sleep.c
-+++ b/src/sleep/sleep.c
-@@ -46,7 +46,7 @@ static int write_hibernate_location_info(const HibernateLocation *hibernate_loca
- assert(hibernate_location->swap);
-
- xsprintf(resume_str, "%u:%u", major(hibernate_location->devno), minor(hibernate_location->devno));
-- r = write_string_file("/sys/power/resume", resume_str, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/power/resume", resume_str, 0);
- if (r < 0)
- return log_debug_errno(r, "Failed to write partition device to /sys/power/resume for '%s': '%s': %m",
- hibernate_location->swap->device, resume_str);
-@@ -73,7 +73,7 @@ static int write_hibernate_location_info(const HibernateLocation *hibernate_loca
- }
-
- xsprintf(offset_str, "%" PRIu64, hibernate_location->offset);
-- r = write_string_file("/sys/power/resume_offset", offset_str, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/power/resume_offset", offset_str, 0);
- if (r < 0)
- return log_debug_errno(r, "Failed to write swap file offset to /sys/power/resume_offset for '%s': '%s': %m",
- hibernate_location->swap->device, offset_str);
-@@ -90,7 +90,7 @@ static int write_mode(char **modes) {
- STRV_FOREACH(mode, modes) {
- int k;
-
-- k = write_string_file("/sys/power/disk", *mode, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ k = write_string_file("/sys/power/disk", *mode, 0);
- if (k >= 0)
- return 0;
-
-@@ -112,7 +112,7 @@ static int write_state(FILE **f, char **states) {
- STRV_FOREACH(state, states) {
- int k;
-
-- k = write_string_stream(*f, *state, WRITE_STRING_FILE_DISABLE_BUFFER);
-+ k = write_string_stream(*f, *state, 0);
- if (k >= 0)
- return 0;
- log_debug_errno(k, "Failed to write '%s' to /sys/power/state: %m", *state);
-diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c
-index 0089833e3f..0a6a3abbb4 100644
---- a/src/udev/udev-rules.c
-+++ b/src/udev/udev-rules.c
-@@ -2181,7 +2181,6 @@ static int udev_rule_apply_token_to_event(
- log_rule_debug(dev, rules, "ATTR '%s' writing '%s'", buf, value);
- r = write_string_file(buf, value,
- WRITE_STRING_FILE_VERIFY_ON_FAILURE |
-- WRITE_STRING_FILE_DISABLE_BUFFER |
- WRITE_STRING_FILE_AVOID_NEWLINE |
- WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE);
- if (r < 0)
-diff --git a/src/vconsole/vconsole-setup.c b/src/vconsole/vconsole-setup.c
-index 50930d4af3..5efd5d3728 100644
---- a/src/vconsole/vconsole-setup.c
-+++ b/src/vconsole/vconsole-setup.c
-@@ -108,7 +108,7 @@ static int toggle_utf8_vc(const char *name, int fd, bool utf8) {
- static int toggle_utf8_sysfs(bool utf8) {
- int r;
-
-- r = write_string_file("/sys/module/vt/parameters/default_utf8", one_zero(utf8), WRITE_STRING_FILE_DISABLE_BUFFER);
-+ r = write_string_file("/sys/module/vt/parameters/default_utf8", one_zero(utf8), 0);
- if (r < 0)
- return log_warning_errno(r, "Failed to %s sysfs UTF-8 flag: %m", enable_disable(utf8));
-
diff --git a/meta/recipes-core/systemd/systemd/0025-Handle-__cpu_mask-usage.patch b/meta/recipes-core/systemd/systemd/0025-Handle-__cpu_mask-usage.patch
deleted file mode 100644
index 6981d70af0..0000000000
--- a/meta/recipes-core/systemd/systemd/0025-Handle-__cpu_mask-usage.patch
+++ /dev/null
@@ -1,58 +0,0 @@
-From 8871f78c559f37169c0cfaf20b0af1dbec0399af Mon Sep 17 00:00:00 2001
-From: Scott Murray <scott.murray@konsulko.com>
-Date: Fri, 13 Sep 2019 19:26:27 -0400
-Subject: [PATCH] Handle __cpu_mask usage
-
-Fixes errors:
-
-src/test/test-cpu-set-util.c:18:54: error: '__cpu_mask' undeclared (first use in this function)
-src/test/test-sizeof.c:73:14: error: '__cpu_mask' undeclared (first use in this function)
-
-__cpu_mask is an internal type of glibc's cpu_set implementation, not
-part of the POSIX definition, which is problematic when building with
-musl, which does not define a matching type. From inspection of musl's
-sched.h, however, it is clear that the corresponding type would be
-unsigned long, which does match glibc's actual __CPU_MASK_TYPE. So,
-add a typedef to cpu-set-util.h defining __cpu_mask appropriately.
-
-Upstream-Status: Inappropriate [musl specific]
-
-Signed-off-by: Scott Murray <scott.murray@konsulko.com>
-
----
- src/shared/cpu-set-util.h | 2 ++
- src/test/test-sizeof.c | 2 +-
- 2 files changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/src/shared/cpu-set-util.h b/src/shared/cpu-set-util.h
-index 3c63a58826..4c2d4347fc 100644
---- a/src/shared/cpu-set-util.h
-+++ b/src/shared/cpu-set-util.h
-@@ -6,6 +6,8 @@
- #include "macro.h"
- #include "missing_syscall.h"
-
-+typedef unsigned long __cpu_mask;
-+
- /* This wraps the libc interface with a variable to keep the allocated size. */
- typedef struct CPUSet {
- cpu_set_t *set;
-diff --git a/src/test/test-sizeof.c b/src/test/test-sizeof.c
-index 602772227e..7f1ed19d77 100644
---- a/src/test/test-sizeof.c
-+++ b/src/test/test-sizeof.c
-@@ -1,6 +1,5 @@
- /* SPDX-License-Identifier: LGPL-2.1-or-later */
-
--#include <sched.h>
- #include <stdio.h>
- #include <string.h>
- #include <sys/types.h>
-@@ -10,6 +9,7 @@
- #include <float.h>
-
- #include "time-util.h"
-+#include "cpu-set-util.h"
-
- /* Print information about various types. Useful when diagnosing
- * gcc diagnostics on an unfamiliar architecture. */
diff --git a/meta/recipes-core/systemd/systemd/0026-Handle-missing-gshadow.patch b/meta/recipes-core/systemd/systemd/0026-Handle-missing-gshadow.patch
deleted file mode 100644
index 2c56838644..0000000000
--- a/meta/recipes-core/systemd/systemd/0026-Handle-missing-gshadow.patch
+++ /dev/null
@@ -1,171 +0,0 @@
-From ec519727bb1ceda6e7787ccf86237a6aad07137c Mon Sep 17 00:00:00 2001
-From: Alex Kiernan <alex.kiernan@gmail.com>
-Date: Tue, 10 Mar 2020 11:05:20 +0000
-Subject: [PATCH] Handle missing gshadow
-
-gshadow usage is now present in the userdb code. Mask all uses of it to
-allow compilation on musl
-
-Upstream-Status: Inappropriate [musl specific]
-Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
-[Rebased for v247]
-Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
-
----
- src/shared/user-record-nss.c | 20 ++++++++++++++++++++
- src/shared/user-record-nss.h | 4 ++++
- src/shared/userdb.c | 7 ++++++-
- 3 files changed, 30 insertions(+), 1 deletion(-)
-
-diff --git a/src/shared/user-record-nss.c b/src/shared/user-record-nss.c
-index 88b8fc2f8f..a819d41bac 100644
---- a/src/shared/user-record-nss.c
-+++ b/src/shared/user-record-nss.c
-@@ -331,8 +331,10 @@ int nss_group_to_group_record(
- if (isempty(grp->gr_name))
- return -EINVAL;
-
-+#if ENABLE_GSHADOW
- if (sgrp && !streq_ptr(sgrp->sg_namp, grp->gr_name))
- return -EINVAL;
-+#endif
-
- g = group_record_new();
- if (!g)
-@@ -348,6 +350,7 @@ int nss_group_to_group_record(
-
- g->gid = grp->gr_gid;
-
-+#if ENABLE_GSHADOW
- if (sgrp) {
- if (looks_like_hashed_password(utf8_only(sgrp->sg_passwd))) {
- g->hashed_password = strv_new(sgrp->sg_passwd);
-@@ -363,6 +366,7 @@ int nss_group_to_group_record(
- if (r < 0)
- return r;
- }
-+#endif
-
- r = json_build(&g->json, JSON_BUILD_OBJECT(
- JSON_BUILD_PAIR("groupName", JSON_BUILD_STRING(g->group_name)),
-@@ -388,6 +392,7 @@ int nss_sgrp_for_group(const struct group *grp, struct sgrp *ret_sgrp, char **re
- assert(ret_sgrp);
- assert(ret_buffer);
-
-+#if ENABLE_GSHADOW
- for (;;) {
- _cleanup_free_ char *buf = NULL;
- struct sgrp sgrp, *result;
-@@ -416,6 +421,9 @@ int nss_sgrp_for_group(const struct group *grp, struct sgrp *ret_sgrp, char **re
- buflen *= 2;
- buf = mfree(buf);
- }
-+#else
-+ return -ESRCH;
-+#endif
- }
-
- int nss_group_record_by_name(
-@@ -427,7 +435,9 @@ int nss_group_record_by_name(
- struct group grp, *result;
- bool incomplete = false;
- size_t buflen = 4096;
-+#if ENABLE_GSHADOW
- struct sgrp sgrp, *sresult = NULL;
-+#endif
- int r;
-
- assert(name);
-@@ -457,6 +467,7 @@ int nss_group_record_by_name(
- buf = mfree(buf);
- }
-
-+#if ENABLE_GSHADOW
- if (with_shadow) {
- r = nss_sgrp_for_group(result, &sgrp, &sbuf);
- if (r < 0) {
-@@ -468,6 +479,9 @@ int nss_group_record_by_name(
- incomplete = true;
-
- r = nss_group_to_group_record(result, sresult, ret);
-+#else
-+ r = nss_group_to_group_record(result, NULL, ret);
-+#endif
- if (r < 0)
- return r;
-
-@@ -484,7 +498,9 @@ int nss_group_record_by_gid(
- struct group grp, *result;
- bool incomplete = false;
- size_t buflen = 4096;
-+#if ENABLE_GSHADOW
- struct sgrp sgrp, *sresult = NULL;
-+#endif
- int r;
-
- assert(ret);
-@@ -512,6 +528,7 @@ int nss_group_record_by_gid(
- buf = mfree(buf);
- }
-
-+#if ENABLE_GSHADOW
- if (with_shadow) {
- r = nss_sgrp_for_group(result, &sgrp, &sbuf);
- if (r < 0) {
-@@ -523,6 +540,9 @@ int nss_group_record_by_gid(
- incomplete = true;
-
- r = nss_group_to_group_record(result, sresult, ret);
-+#else
-+ r = nss_group_to_group_record(result, NULL, ret);
-+#endif
- if (r < 0)
- return r;
-
-diff --git a/src/shared/user-record-nss.h b/src/shared/user-record-nss.h
-index 22ab04d6ee..4e52e7a911 100644
---- a/src/shared/user-record-nss.h
-+++ b/src/shared/user-record-nss.h
-@@ -2,7 +2,11 @@
- #pragma once
-
- #include <grp.h>
-+#if ENABLE_GSHADOW
- #include <gshadow.h>
-+#else
-+struct sgrp;
-+#endif
- #include <pwd.h>
- #include <shadow.h>
-
-diff --git a/src/shared/userdb.c b/src/shared/userdb.c
-index 0eddd382e6..d506b8e263 100644
---- a/src/shared/userdb.c
-+++ b/src/shared/userdb.c
-@@ -1046,13 +1046,15 @@ int groupdb_iterator_get(UserDBIterator *iterator, GroupRecord **ret) {
- if (gr) {
- _cleanup_free_ char *buffer = NULL;
- bool incomplete = false;
-+#if ENABLE_GSHADOW
- struct sgrp sgrp;
--
-+#endif
- if (streq_ptr(gr->gr_name, "root"))
- iterator->synthesize_root = false;
- if (gr->gr_gid == GID_NOBODY)
- iterator->synthesize_nobody = false;
-
-+#if ENABLE_GSHADOW
- if (!FLAGS_SET(iterator->flags, USERDB_SUPPRESS_SHADOW)) {
- r = nss_sgrp_for_group(gr, &sgrp, &buffer);
- if (r < 0) {
-@@ -1065,6 +1067,9 @@ int groupdb_iterator_get(UserDBIterator *iterator, GroupRecord **ret) {
- }
-
- r = nss_group_to_group_record(gr, r >= 0 ? &sgrp : NULL, ret);
-+#else
-+ r = nss_group_to_group_record(gr, NULL, ret);
-+#endif
- if (r < 0)
- return r;
-
diff --git a/meta/recipes-core/systemd/systemd/0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch b/meta/recipes-core/systemd/systemd/0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch
deleted file mode 100644
index 6c97a272e2..0000000000
--- a/meta/recipes-core/systemd/systemd/0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 754a16eeb255c06dbdd4655632276573f0f075ec Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 12 Apr 2021 23:44:53 -0700
-Subject: [PATCH] missing_syscall.h: Define MIPS ABI defines for musl
-
-musl does not define _MIPS_SIM_ABI32, _MIPS_SIM_NABI32, _MIPS_SIM_ABI64
-unlike glibc where these are provided by libc headers, therefore define
-them here in case they are undefined
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/basic/missing_syscall.h | 6 ++++++
- src/shared/base-filesystem.c | 1 +
- 2 files changed, 7 insertions(+)
-
-diff --git a/src/basic/missing_syscall.h b/src/basic/missing_syscall.h
-index 793d111c55..9665848b88 100644
---- a/src/basic/missing_syscall.h
-+++ b/src/basic/missing_syscall.h
-@@ -20,6 +20,12 @@
- #include <asm/sgidefs.h>
- #endif
-
-+#ifndef _MIPS_SIM_ABI32
-+#define _MIPS_SIM_ABI32 1
-+#define _MIPS_SIM_NABI32 2
-+#define _MIPS_SIM_ABI64 3
-+#endif
-+
- #include "macro.h"
- #include "missing_keyctl.h"
- #include "missing_stat.h"
-diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c
-index d396bc99fe..7e9c0c3412 100644
---- a/src/shared/base-filesystem.c
-+++ b/src/shared/base-filesystem.c
-@@ -19,6 +19,7 @@
- #include "string-util.h"
- #include "umask-util.h"
- #include "user-util.h"
-+#include "missing_syscall.h"
-
- typedef struct BaseFilesystem {
- const char *dir; /* directory or symlink to create */
diff --git a/meta/recipes-core/systemd/systemd_250.5.bb b/meta/recipes-core/systemd/systemd_250.5.bb
deleted file mode 100644
index c121fb763e..0000000000
--- a/meta/recipes-core/systemd/systemd_250.5.bb
+++ /dev/null
@@ -1,792 +0,0 @@
-require systemd.inc
-
-PROVIDES = "udev"
-
-PE = "1"
-
-DEPENDS = "intltool-native gperf-native libcap util-linux python3-jinja2-native"
-
-SECTION = "base/shell"
-
-inherit useradd pkgconfig meson perlnative update-rc.d update-alternatives qemu systemd gettext bash-completion manpages features_check
-
-# As this recipe builds udev, respect systemd being in DISTRO_FEATURES so
-# that we don't build both udev and systemd in world builds.
-REQUIRED_DISTRO_FEATURES = "systemd"
-
-SRC_URI += "file://touchscreen.rules \
- file://00-create-volatile.conf \
- ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://org.freedesktop.hostname1_no_polkit.conf', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://00-hostnamed-network-user.conf', '', d)} \
- file://init \
- file://99-default.preset \
- file://systemd-pager.sh \
- file://0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch \
- file://0003-implment-systemd-sysv-install-for-OE.patch \
- file://0001-systemd.pc.in-use-ROOTPREFIX-without-suffixed-slash.patch \
- file://0001-test-parse-argument-Include-signal.h.patch \
- file://0001-resolve-Use-sockaddr-pointer-type-for-bind.patch \
- "
-
-# patches needed by musl
-SRC_URI:append:libc-musl = " ${SRC_URI_MUSL}"
-SRC_URI_MUSL = "\
- file://0002-don-t-use-glibc-specific-qsort_r.patch \
- file://0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch \
- file://0004-add-fallback-parse_printf_format-implementation.patch \
- file://0005-src-basic-missing.h-check-for-missing-strndupa.patch \
- file://0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch \
- file://0008-add-missing-FTW_-macros-for-musl.patch \
- file://0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch \
- file://0010-Use-uintmax_t-for-handling-rlim_t.patch \
- file://0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch \
- file://0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch \
- file://0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch \
- file://0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch \
- file://0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch \
- file://0018-avoid-redefinition-of-prctl_mm_map-structure.patch \
- file://0021-test-json.c-define-M_PIl.patch \
- file://0022-do-not-disable-buffer-in-writing-files.patch \
- file://0025-Handle-__cpu_mask-usage.patch \
- file://0026-Handle-missing-gshadow.patch \
- file://0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch \
- file://0001-pass-correct-parameters-to-getdents64.patch \
- file://0002-Add-sys-stat.h-for-S_IFDIR.patch \
- file://0001-Adjust-for-musl-headers.patch \
- "
-
-PAM_PLUGINS = " \
- pam-plugin-unix \
- pam-plugin-loginuid \
- pam-plugin-keyinit \
-"
-
-PACKAGECONFIG ??= " \
- ${@bb.utils.filter('DISTRO_FEATURES', 'acl audit efi ldconfig pam selinux smack usrmerge polkit seccomp', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wifi', 'rfkill', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xkbcommon', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', '', 'link-udev-shared', d)} \
- backlight \
- binfmt \
- gshadow \
- hibernate \
- hostnamed \
- idn \
- ima \
- kmod \
- localed \
- logind \
- machined \
- myhostname \
- networkd \
- nss \
- nss-mymachines \
- nss-resolve \
- quotacheck \
- randomseed \
- resolved \
- set-time-epoch \
- sysusers \
- sysvinit \
- timedated \
- timesyncd \
- userdb \
- utmp \
- vconsole \
- wheel-group \
- zstd \
-"
-
-PACKAGECONFIG:remove:libc-musl = " \
- gshadow \
- idn \
- localed \
- myhostname \
- nss \
- nss-mymachines \
- nss-resolve \
- sysusers \
- userdb \
- utmp \
-"
-
-# https://github.com/seccomp/libseccomp/issues/347
-PACKAGECONFIG:remove:mipsarch = "seccomp"
-
-CFLAGS:append:libc-musl = " -D__UAPI_DEF_ETHHDR=0 "
-
-# Some of the dependencies are weak-style recommends - if not available at runtime,
-# systemd won't fail but the library-related feature will be skipped with a warning.
-
-# Use the upstream systemd serial-getty@.service and rely on
-# systemd-getty-generator instead of using the OE-core specific
-# systemd-serialgetty.bb - not enabled by default.
-PACKAGECONFIG[serial-getty-generator] = ""
-
-PACKAGECONFIG[acl] = "-Dacl=true,-Dacl=false,acl"
-PACKAGECONFIG[audit] = "-Daudit=true,-Daudit=false,audit"
-PACKAGECONFIG[backlight] = "-Dbacklight=true,-Dbacklight=false"
-PACKAGECONFIG[binfmt] = "-Dbinfmt=true,-Dbinfmt=false"
-PACKAGECONFIG[bzip2] = "-Dbzip2=true,-Dbzip2=false,bzip2"
-PACKAGECONFIG[cgroupv2] = "-Ddefault-hierarchy=unified,-Ddefault-hierarchy=hybrid"
-PACKAGECONFIG[coredump] = "-Dcoredump=true,-Dcoredump=false"
-PACKAGECONFIG[cryptsetup] = "-Dlibcryptsetup=true,-Dlibcryptsetup=false,cryptsetup,,cryptsetup"
-PACKAGECONFIG[tpm2] = "-Dtpm2=true,-Dtpm2=false,tpm2-tss,tpm2-tss libtss2 libtss2-tcti-device"
-PACKAGECONFIG[dbus] = "-Ddbus=true,-Ddbus=false,dbus"
-PACKAGECONFIG[efi] = "-Defi=true,-Defi=false"
-PACKAGECONFIG[gnu-efi] = "-Dgnu-efi=true -Defi-libdir=${STAGING_LIBDIR} -Defi-includedir=${STAGING_INCDIR}/efi,-Dgnu-efi=false,gnu-efi"
-PACKAGECONFIG[elfutils] = "-Delfutils=true,-Delfutils=false,elfutils"
-PACKAGECONFIG[firstboot] = "-Dfirstboot=true,-Dfirstboot=false"
-PACKAGECONFIG[repart] = "-Drepart=true,-Drepart=false"
-PACKAGECONFIG[homed] = "-Dhomed=true,-Dhomed=false"
-# Sign the journal for anti-tampering
-PACKAGECONFIG[gcrypt] = "-Dgcrypt=true,-Dgcrypt=false,libgcrypt"
-PACKAGECONFIG[gnutls] = "-Dgnutls=true,-Dgnutls=false,gnutls"
-PACKAGECONFIG[gshadow] = "-Dgshadow=true,-Dgshadow=false"
-PACKAGECONFIG[hibernate] = "-Dhibernate=true,-Dhibernate=false"
-PACKAGECONFIG[hostnamed] = "-Dhostnamed=true,-Dhostnamed=false"
-PACKAGECONFIG[idn] = "-Didn=true,-Didn=false"
-PACKAGECONFIG[ima] = "-Dima=true,-Dima=false"
-# importd requires journal-upload/xz/zlib/bzip2/gcrypt
-PACKAGECONFIG[importd] = "-Dimportd=true,-Dimportd=false"
-# Update NAT firewall rules
-PACKAGECONFIG[iptc] = "-Dlibiptc=true,-Dlibiptc=false,iptables"
-PACKAGECONFIG[journal-upload] = "-Dlibcurl=true,-Dlibcurl=false,curl"
-PACKAGECONFIG[kmod] = "-Dkmod=true,-Dkmod=false,kmod"
-PACKAGECONFIG[ldconfig] = "-Dldconfig=true,-Dldconfig=false,,ldconfig"
-PACKAGECONFIG[libidn] = "-Dlibidn=true,-Dlibidn=false,libidn,,libidn"
-PACKAGECONFIG[libidn2] = "-Dlibidn2=true,-Dlibidn2=false,libidn2,,libidn2"
-# Link udev shared with systemd helper library.
-# If enabled the udev package depends on the systemd package (which has the needed shared library).
-PACKAGECONFIG[link-udev-shared] = "-Dlink-udev-shared=true,-Dlink-udev-shared=false"
-PACKAGECONFIG[localed] = "-Dlocaled=true,-Dlocaled=false"
-PACKAGECONFIG[logind] = "-Dlogind=true,-Dlogind=false"
-PACKAGECONFIG[lz4] = "-Dlz4=true,-Dlz4=false,lz4"
-PACKAGECONFIG[machined] = "-Dmachined=true,-Dmachined=false"
-PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
-PACKAGECONFIG[microhttpd] = "-Dmicrohttpd=true,-Dmicrohttpd=false,libmicrohttpd"
-PACKAGECONFIG[myhostname] = "-Dnss-myhostname=true,-Dnss-myhostname=false,,libnss-myhostname"
-PACKAGECONFIG[networkd] = "-Dnetworkd=true,-Dnetworkd=false"
-PACKAGECONFIG[nss] = "-Dnss-systemd=true,-Dnss-systemd=false"
-PACKAGECONFIG[nss-mymachines] = "-Dnss-mymachines=true,-Dnss-mymachines=false"
-PACKAGECONFIG[nss-resolve] = "-Dnss-resolve=true,-Dnss-resolve=false"
-PACKAGECONFIG[oomd] = "-Doomd=true,-Doomd=false"
-PACKAGECONFIG[openssl] = "-Dopenssl=true,-Dopenssl=false,openssl"
-PACKAGECONFIG[pam] = "-Dpam=true,-Dpam=false,libpam,${PAM_PLUGINS}"
-PACKAGECONFIG[pcre2] = "-Dpcre2=true,-Dpcre2=false,libpcre2"
-PACKAGECONFIG[polkit] = "-Dpolkit=true,-Dpolkit=false"
-# If polkit is disabled and networkd+hostnamed are in use, enabling this option and
-# using dbus-broker will allow networkd to be authorized to change the
-# hostname without acquiring additional privileges
-PACKAGECONFIG[polkit_hostnamed_fallback] = ",,,,dbus-broker,polkit"
-PACKAGECONFIG[portabled] = "-Dportabled=true,-Dportabled=false"
-PACKAGECONFIG[qrencode] = "-Dqrencode=true,-Dqrencode=false,qrencode,,qrencode"
-PACKAGECONFIG[quotacheck] = "-Dquotacheck=true,-Dquotacheck=false"
-PACKAGECONFIG[randomseed] = "-Drandomseed=true,-Drandomseed=false"
-PACKAGECONFIG[resolved] = "-Dresolve=true,-Dresolve=false"
-PACKAGECONFIG[rfkill] = "-Drfkill=true,-Drfkill=false"
-PACKAGECONFIG[seccomp] = "-Dseccomp=true,-Dseccomp=false,libseccomp"
-PACKAGECONFIG[selinux] = "-Dselinux=true,-Dselinux=false,libselinux,initscripts-sushell"
-PACKAGECONFIG[smack] = "-Dsmack=true,-Dsmack=false"
-PACKAGECONFIG[sysusers] = "-Dsysusers=true,-Dsysusers=false"
-PACKAGECONFIG[sysvinit] = "-Dsysvinit-path=${sysconfdir}/init.d -Dsysvrcnd-path=${sysconfdir},-Dsysvinit-path= -Dsysvrcnd-path=,,systemd-compat-units update-rc.d"
-# When enabled use reproducble build timestamp if set as time epoch,
-# or build time if not. When disabled, time epoch is unset.
-def build_epoch(d):
- epoch = d.getVar('SOURCE_DATE_EPOCH') or "-1"
- return '-Dtime-epoch=%d' % int(epoch)
-PACKAGECONFIG[set-time-epoch] = "${@build_epoch(d)},-Dtime-epoch=0"
-PACKAGECONFIG[timedated] = "-Dtimedated=true,-Dtimedated=false"
-PACKAGECONFIG[timesyncd] = "-Dtimesyncd=true,-Dtimesyncd=false"
-PACKAGECONFIG[usrmerge] = "-Dsplit-usr=false,-Dsplit-usr=true"
-PACKAGECONFIG[sbinmerge] = "-Dsplit-bin=false,-Dsplit-bin=true"
-PACKAGECONFIG[userdb] = "-Duserdb=true,-Duserdb=false"
-PACKAGECONFIG[utmp] = "-Dutmp=true,-Dutmp=false"
-PACKAGECONFIG[valgrind] = "-DVALGRIND=1,,valgrind"
-PACKAGECONFIG[vconsole] = "-Dvconsole=true,-Dvconsole=false,,${PN}-vconsole-setup"
-PACKAGECONFIG[wheel-group] = "-Dwheel-group=true, -Dwheel-group=false"
-PACKAGECONFIG[xdg-autostart] = "-Dxdg-autostart=true,-Dxdg-autostart=false"
-# Verify keymaps on locale change
-PACKAGECONFIG[xkbcommon] = "-Dxkbcommon=true,-Dxkbcommon=false,libxkbcommon"
-PACKAGECONFIG[xz] = "-Dxz=true,-Dxz=false,xz"
-PACKAGECONFIG[zlib] = "-Dzlib=true,-Dzlib=false,zlib"
-PACKAGECONFIG[zstd] = "-Dzstd=true,-Dzstd=false,zstd"
-
-# Helper variables to clarify locations. This mirrors the logic in systemd's
-# build system.
-rootprefix ?= "${root_prefix}"
-rootlibdir ?= "${base_libdir}"
-rootlibexecdir = "${rootprefix}/lib"
-
-EXTRA_OEMESON += "-Dnobody-user=nobody \
- -Dnobody-group=nobody \
- -Drootlibdir=${rootlibdir} \
- -Drootprefix=${rootprefix} \
- -Ddefault-locale=C \
- -Dmode=release \
- -Dsystem-alloc-uid-min=101 \
- -Dsystem-uid-max=999 \
- -Dsystem-alloc-gid-min=101 \
- -Dsystem-gid-max=999 \
- "
-
-# Hardcode target binary paths to avoid using paths from sysroot
-EXTRA_OEMESON += "-Dkexec-path=${sbindir}/kexec \
- -Dkmod-path=${base_bindir}/kmod \
- -Dmount-path=${base_bindir}/mount \
- -Dquotacheck-path=${sbindir}/quotacheck \
- -Dquotaon-path=${sbindir}/quotaon \
- -Dsulogin-path=${base_sbindir}/sulogin \
- -Dnologin-path=${base_sbindir}/nologin \
- -Dumount-path=${base_bindir}/umount"
-
-do_install() {
- meson_do_install
- install -d ${D}/${base_sbindir}
- if ${@bb.utils.contains('PACKAGECONFIG', 'serial-getty-generator', 'false', 'true', d)}; then
- # Provided by a separate recipe
- rm ${D}${systemd_system_unitdir}/serial-getty* -f
- fi
-
- # Provide support for initramfs
- [ ! -e ${D}/init ] && ln -s ${rootlibexecdir}/systemd/systemd ${D}/init
- [ ! -e ${D}/${base_sbindir}/udevd ] && ln -s ${rootlibexecdir}/systemd/systemd-udevd ${D}/${base_sbindir}/udevd
-
- install -d ${D}${sysconfdir}/udev/rules.d/
- install -d ${D}${sysconfdir}/tmpfiles.d
- for rule in $(find ${WORKDIR} -maxdepth 1 -type f -name "*.rules"); do
- install -m 0644 $rule ${D}${sysconfdir}/udev/rules.d/
- done
-
- install -m 0644 ${WORKDIR}/00-create-volatile.conf ${D}${sysconfdir}/tmpfiles.d/
-
- if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','true','false',d)}; then
- install -d ${D}${sysconfdir}/init.d
- install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/systemd-udevd
- sed -i s%@UDEVD@%${rootlibexecdir}/systemd/systemd-udevd% ${D}${sysconfdir}/init.d/systemd-udevd
- install -Dm 0755 ${S}/src/systemctl/systemd-sysv-install.SKELETON ${D}${systemd_unitdir}/systemd-sysv-install
- fi
-
- if "${@'true' if oe.types.boolean(d.getVar('VOLATILE_LOG_DIR')) else 'false'}"; then
- # /var/log is typically a symbolic link to inside /var/volatile,
- # which is expected to be empty.
- rm -rf ${D}${localstatedir}/log
- else
- chown root:systemd-journal ${D}${localstatedir}/log/journal
-
- # journal-remote creates this at start
- rm -rf ${D}${localstatedir}/log/journal/remote
- fi
-
- install -d ${D}${systemd_system_unitdir}/graphical.target.wants
- install -d ${D}${systemd_system_unitdir}/multi-user.target.wants
- install -d ${D}${systemd_system_unitdir}/poweroff.target.wants
- install -d ${D}${systemd_system_unitdir}/reboot.target.wants
- install -d ${D}${systemd_system_unitdir}/rescue.target.wants
-
- # Create symlinks for systemd-update-utmp-runlevel.service
- if ${@bb.utils.contains('PACKAGECONFIG', 'utmp', 'true', 'false', d)}; then
- ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/graphical.target.wants/systemd-update-utmp-runlevel.service
- ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/multi-user.target.wants/systemd-update-utmp-runlevel.service
- ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/poweroff.target.wants/systemd-update-utmp-runlevel.service
- ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/reboot.target.wants/systemd-update-utmp-runlevel.service
- ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/rescue.target.wants/systemd-update-utmp-runlevel.service
- fi
-
- # this file is needed to exist if networkd is disabled but timesyncd is still in use since timesyncd checks it
- # for existence else it fails
- if [ -s ${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf ] &&
- ! ${@bb.utils.contains('PACKAGECONFIG', 'networkd', 'true', 'false', d)}; then
- echo 'd /run/systemd/netif/links 0755 root root -' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
- fi
- if ! ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'true', 'false', d)}; then
- echo 'L! ${sysconfdir}/resolv.conf - - - - ../run/systemd/resolve/resolv.conf' >>${D}${exec_prefix}/lib/tmpfiles.d/etc.conf
- echo 'd /run/systemd/resolve 0755 root root -' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
- echo 'f /run/systemd/resolve/resolv.conf 0644 root root' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
- ln -s ../run/systemd/resolve/resolv.conf ${D}${sysconfdir}/resolv-conf.systemd
- else
- sed -i -e "s%^L! /etc/resolv.conf.*$%L! /etc/resolv.conf - - - - ../run/systemd/resolve/resolv.conf%g" ${D}${exec_prefix}/lib/tmpfiles.d/etc.conf
- ln -s ../run/systemd/resolve/resolv.conf ${D}${sysconfdir}/resolv-conf.systemd
- fi
- if ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'false', 'true', d)}; then
- rm ${D}${exec_prefix}/lib/tmpfiles.d/x11.conf
- rm -r ${D}${sysconfdir}/X11
- fi
-
- # If polkit is setup fixup permissions and ownership
- if ${@bb.utils.contains('PACKAGECONFIG', 'polkit', 'true', 'false', d)}; then
- if [ -d ${D}${datadir}/polkit-1/rules.d ]; then
- chmod 700 ${D}${datadir}/polkit-1/rules.d
- chown polkitd:root ${D}${datadir}/polkit-1/rules.d
- fi
- fi
-
- # If polkit is not available and a fallback was requested, install a drop-in that allows networkd to
- # request hostname changes via DBUS without elevating its privileges
- if ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'true', 'false', d)}; then
- install -d ${D}${systemd_system_unitdir}/systemd-hostnamed.service.d/
- install -m 0644 ${WORKDIR}/00-hostnamed-network-user.conf ${D}${systemd_system_unitdir}/systemd-hostnamed.service.d/
- install -d ${D}${datadir}/dbus-1/system.d/
- install -m 0644 ${WORKDIR}/org.freedesktop.hostname1_no_polkit.conf ${D}${datadir}/dbus-1/system.d/
- fi
-
- # create link for existing udev rules
- ln -s ${base_bindir}/udevadm ${D}${base_sbindir}/udevadm
-
- # install default policy for presets
- # https://www.freedesktop.org/wiki/Software/systemd/Preset/#howto
- install -Dm 0644 ${WORKDIR}/99-default.preset ${D}${systemd_unitdir}/system-preset/99-default.preset
-
- # add a profile fragment to disable systemd pager with busybox less
- install -Dm 0644 ${WORKDIR}/systemd-pager.sh ${D}${sysconfdir}/profile.d/systemd-pager.sh
-}
-
-python populate_packages:prepend (){
- systemdlibdir = d.getVar("rootlibdir")
- do_split_packages(d, systemdlibdir, r'^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True)
-}
-PACKAGES_DYNAMIC += "^lib(udev|systemd|nss).*"
-
-PACKAGE_BEFORE_PN = "\
- ${PN}-gui \
- ${PN}-vconsole-setup \
- ${PN}-initramfs \
- ${PN}-analyze \
- ${PN}-kernel-install \
- ${PN}-rpm-macros \
- ${PN}-binfmt \
- ${PN}-zsh-completion \
- ${PN}-container \
- ${PN}-journal-gatewayd \
- ${PN}-journal-upload \
- ${PN}-journal-remote \
- ${PN}-extra-utils \
- ${PN}-udev-rules \
- libsystemd-shared \
- udev \
- udev-hwdb \
-"
-
-SUMMARY:${PN}-container = "Tools for containers and VMs"
-DESCRIPTION:${PN}-container = "Systemd tools to spawn and manage containers and virtual machines."
-
-SUMMARY:${PN}-journal-gatewayd = "HTTP server for journal events"
-DESCRIPTION:${PN}-journal-gatewayd = "systemd-journal-gatewayd serves journal events over the network. Clients must connect using HTTP. The server listens on port 19531 by default."
-
-SUMMARY:${PN}-journal-upload = "Send journal messages over the network"
-DESCRIPTION:${PN}-journal-upload = "systemd-journal-upload uploads journal entries to a specified URL."
-
-SUMMARY:${PN}-journal-remote = "Receive journal messages over the network"
-DESCRIPTION:${PN}-journal-remote = "systemd-journal-remote is a command to receive serialized journal events and store them to journal files."
-
-SUMMARY:libsystemd-shared = "Systemd shared library"
-
-SYSTEMD_PACKAGES = "${@bb.utils.contains('PACKAGECONFIG', 'binfmt', '${PN}-binfmt', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-gatewayd', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-remote', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'journal-upload', '${PN}-journal-upload', '', d)} \
-"
-SYSTEMD_SERVICE:${PN}-binfmt = "systemd-binfmt.service"
-
-USERADD_PACKAGES = "${PN} ${PN}-extra-utils \
- ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-gatewayd', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-remote', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'journal-upload', '${PN}-journal-upload', '', d)} \
-"
-GROUPADD_PARAM:${PN} = "-r systemd-journal;"
-GROUPADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', '-r systemd-hostname;', '', d)}"
-USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'coredump', '--system -d / -M --shell /sbin/nologin systemd-coredump;', '', d)}"
-USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'networkd', '--system -d / -M --shell /sbin/nologin systemd-network;', '', d)}"
-USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'polkit', '--system --no-create-home --user-group --home-dir ${sysconfdir}/polkit-1 polkitd;', '', d)}"
-USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'resolved', '--system -d / -M --shell /sbin/nologin systemd-resolve;', '', d)}"
-USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'timesyncd', '--system -d / -M --shell /sbin/nologin systemd-timesync;', '', d)}"
-USERADD_PARAM:${PN}-extra-utils = "--system -d / -M --shell /sbin/nologin systemd-bus-proxy"
-USERADD_PARAM:${PN}-journal-gatewayd = "--system -d / -M --shell /sbin/nologin systemd-journal-gateway"
-USERADD_PARAM:${PN}-journal-remote = "--system -d / -M --shell /sbin/nologin systemd-journal-remote"
-USERADD_PARAM:${PN}-journal-upload = "--system -d / -M --shell /sbin/nologin systemd-journal-upload"
-
-FILES:${PN}-analyze = "${bindir}/systemd-analyze"
-
-FILES:${PN}-initramfs = "/init"
-RDEPENDS:${PN}-initramfs = "${PN}"
-
-FILES:${PN}-gui = "${bindir}/systemadm"
-
-FILES:${PN}-vconsole-setup = "${rootlibexecdir}/systemd/systemd-vconsole-setup \
- ${systemd_system_unitdir}/systemd-vconsole-setup.service \
- ${systemd_system_unitdir}/sysinit.target.wants/systemd-vconsole-setup.service"
-
-RDEPENDS:${PN}-kernel-install += "bash"
-FILES:${PN}-kernel-install = "${bindir}/kernel-install \
- ${sysconfdir}/kernel/ \
- ${exec_prefix}/lib/kernel \
- "
-FILES:${PN}-rpm-macros = "${exec_prefix}/lib/rpm \
- "
-
-FILES:${PN}-zsh-completion = "${datadir}/zsh/site-functions"
-
-FILES:${PN}-binfmt = "${sysconfdir}/binfmt.d/ \
- ${exec_prefix}/lib/binfmt.d \
- ${rootlibexecdir}/systemd/systemd-binfmt \
- ${systemd_system_unitdir}/proc-sys-fs-binfmt_misc.* \
- ${systemd_system_unitdir}/systemd-binfmt.service"
-RRECOMMENDS:${PN}-binfmt = "kernel-module-binfmt-misc"
-
-RRECOMMENDS:${PN}-vconsole-setup = "kbd kbd-consolefonts kbd-keymaps"
-
-
-FILES:${PN}-journal-gatewayd = "${rootlibexecdir}/systemd/systemd-journal-gatewayd \
- ${systemd_system_unitdir}/systemd-journal-gatewayd.service \
- ${systemd_system_unitdir}/systemd-journal-gatewayd.socket \
- ${systemd_system_unitdir}/sockets.target.wants/systemd-journal-gatewayd.socket \
- ${datadir}/systemd/gatewayd/browse.html \
- "
-SYSTEMD_SERVICE:${PN}-journal-gatewayd = "systemd-journal-gatewayd.socket"
-
-FILES:${PN}-journal-upload = "${rootlibexecdir}/systemd/systemd-journal-upload \
- ${systemd_system_unitdir}/systemd-journal-upload.service \
- ${sysconfdir}/systemd/journal-upload.conf \
- "
-SYSTEMD_SERVICE:${PN}-journal-upload = "systemd-journal-upload.service"
-
-FILES:${PN}-journal-remote = "${rootlibexecdir}/systemd/systemd-journal-remote \
- ${sysconfdir}/systemd/journal-remote.conf \
- ${systemd_system_unitdir}/systemd-journal-remote.service \
- ${systemd_system_unitdir}/systemd-journal-remote.socket \
- "
-SYSTEMD_SERVICE:${PN}-journal-remote = "systemd-journal-remote.socket"
-
-
-FILES:${PN}-container = "${sysconfdir}/dbus-1/system.d/org.freedesktop.import1.conf \
- ${sysconfdir}/dbus-1/system.d/org.freedesktop.machine1.conf \
- ${sysconfdir}/systemd/system/multi-user.target.wants/machines.target \
- ${base_bindir}/machinectl \
- ${bindir}/systemd-nspawn \
- ${nonarch_libdir}/systemd/import-pubring.gpg \
- ${systemd_system_unitdir}/busnames.target.wants/org.freedesktop.import1.busname \
- ${systemd_system_unitdir}/busnames.target.wants/org.freedesktop.machine1.busname \
- ${systemd_system_unitdir}/local-fs.target.wants/var-lib-machines.mount \
- ${systemd_system_unitdir}/machines.target.wants/var-lib-machines.mount \
- ${systemd_system_unitdir}/remote-fs.target.wants/var-lib-machines.mount \
- ${systemd_system_unitdir}/machine.slice \
- ${systemd_system_unitdir}/machines.target \
- ${systemd_system_unitdir}/org.freedesktop.import1.busname \
- ${systemd_system_unitdir}/org.freedesktop.machine1.busname \
- ${systemd_system_unitdir}/systemd-importd.service \
- ${systemd_system_unitdir}/systemd-machined.service \
- ${systemd_system_unitdir}/dbus-org.freedesktop.machine1.service \
- ${systemd_system_unitdir}/var-lib-machines.mount \
- ${rootlibexecdir}/systemd/systemd-import \
- ${rootlibexecdir}/systemd/systemd-importd \
- ${rootlibexecdir}/systemd/systemd-machined \
- ${rootlibexecdir}/systemd/systemd-pull \
- ${exec_prefix}/lib/tmpfiles.d/systemd-nspawn.conf \
- ${exec_prefix}/lib/tmpfiles.d/README \
- ${systemd_system_unitdir}/systemd-nspawn@.service \
- ${libdir}/libnss_mymachines.so.2 \
- ${datadir}/dbus-1/system-services/org.freedesktop.import1.service \
- ${datadir}/dbus-1/system-services/org.freedesktop.machine1.service \
- ${datadir}/dbus-1/system.d/org.freedesktop.import1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.machine1.conf \
- ${datadir}/polkit-1/actions/org.freedesktop.import1.policy \
- ${datadir}/polkit-1/actions/org.freedesktop.machine1.policy \
- "
-
-# "machinectl import-tar" uses "tar --numeric-owner", not supported by busybox.
-RRECOMMENDS:${PN}-container += "\
- ${PN}-journal-gatewayd \
- ${PN}-journal-remote \
- ${PN}-journal-upload \
- kernel-module-dm-mod \
- kernel-module-loop \
- kernel-module-tun \
- tar \
- "
-
-FILES:${PN}-extra-utils = "\
- ${base_bindir}/systemd-escape \
- ${base_bindir}/systemd-inhibit \
- ${bindir}/systemd-detect-virt \
- ${bindir}/systemd-dissect \
- ${bindir}/systemd-path \
- ${bindir}/systemd-run \
- ${bindir}/systemd-cat \
- ${bindir}/systemd-delta \
- ${bindir}/systemd-cgls \
- ${bindir}/systemd-cgtop \
- ${bindir}/systemd-stdio-bridge \
- ${bindir}/systemd-sysext \
- ${base_bindir}/systemd-ask-password \
- ${base_bindir}/systemd-tty-ask-password-agent \
- ${systemd_system_unitdir}/systemd-ask-password-console.path \
- ${systemd_system_unitdir}/systemd-ask-password-console.service \
- ${systemd_system_unitdir}/systemd-ask-password-wall.path \
- ${systemd_system_unitdir}/systemd-ask-password-wall.service \
- ${systemd_system_unitdir}/sysinit.target.wants/systemd-ask-password-console.path \
- ${systemd_system_unitdir}/sysinit.target.wants/systemd-ask-password-wall.path \
- ${systemd_system_unitdir}/multi-user.target.wants/systemd-ask-password-wall.path \
- ${rootlibexecdir}/systemd/systemd-resolve-host \
- ${rootlibexecdir}/systemd/systemd-ac-power \
- ${rootlibexecdir}/systemd/systemd-activate \
- ${rootlibexecdir}/systemd/systemd-bus-proxyd \
- ${systemd_system_unitdir}/systemd-bus-proxyd.service \
- ${systemd_system_unitdir}/systemd-bus-proxyd.socket \
- ${rootlibexecdir}/systemd/systemd-socket-proxyd \
- ${rootlibexecdir}/systemd/systemd-reply-password \
- ${rootlibexecdir}/systemd/systemd-sleep \
- ${rootlibexecdir}/systemd/system-sleep \
- ${systemd_system_unitdir}/systemd-hibernate.service \
- ${systemd_system_unitdir}/systemd-hybrid-sleep.service \
- ${systemd_system_unitdir}/systemd-suspend.service \
- ${systemd_system_unitdir}/sleep.target \
- ${rootlibexecdir}/systemd/systemd-initctl \
- ${systemd_system_unitdir}/systemd-initctl.service \
- ${systemd_system_unitdir}/systemd-initctl.socket \
- ${systemd_system_unitdir}/sockets.target.wants/systemd-initctl.socket \
- ${rootlibexecdir}/systemd/system-generators/systemd-gpt-auto-generator \
- ${rootlibexecdir}/systemd/systemd-cgroups-agent \
-"
-
-FILES:${PN}-udev-rules = "\
- ${rootlibexecdir}/udev/rules.d/70-uaccess.rules \
- ${rootlibexecdir}/udev/rules.d/71-seat.rules \
- ${rootlibexecdir}/udev/rules.d/73-seat-late.rules \
- ${rootlibexecdir}/udev/rules.d/99-systemd.rules \
-"
-
-CONFFILES:${PN} = "${sysconfdir}/systemd/coredump.conf \
- ${sysconfdir}/systemd/journald.conf \
- ${sysconfdir}/systemd/logind.conf \
- ${sysconfdir}/systemd/networkd.conf \
- ${sysconfdir}/systemd/pstore.conf \
- ${sysconfdir}/systemd/resolved.conf \
- ${sysconfdir}/systemd/sleep.conf \
- ${sysconfdir}/systemd/system.conf \
- ${sysconfdir}/systemd/timesyncd.conf \
- ${sysconfdir}/systemd/user.conf \
-"
-
-FILES:${PN} = " ${base_bindir}/* \
- ${base_sbindir}/shutdown \
- ${base_sbindir}/halt \
- ${base_sbindir}/poweroff \
- ${base_sbindir}/runlevel \
- ${base_sbindir}/telinit \
- ${base_sbindir}/resolvconf \
- ${base_sbindir}/reboot \
- ${base_sbindir}/init \
- ${datadir}/dbus-1/services \
- ${datadir}/dbus-1/system-services \
- ${datadir}/polkit-1 \
- ${datadir}/${BPN} \
- ${datadir}/factory \
- ${sysconfdir}/dbus-1/ \
- ${sysconfdir}/modules-load.d/ \
- ${sysconfdir}/pam.d/ \
- ${sysconfdir}/profile.d/ \
- ${sysconfdir}/sysctl.d/ \
- ${sysconfdir}/systemd/ \
- ${sysconfdir}/tmpfiles.d/ \
- ${sysconfdir}/xdg/ \
- ${sysconfdir}/init.d/README \
- ${sysconfdir}/resolv-conf.systemd \
- ${sysconfdir}/X11/xinit/xinitrc.d/* \
- ${rootlibexecdir}/systemd/* \
- ${libdir}/pam.d \
- ${nonarch_libdir}/pam.d \
- ${systemd_unitdir}/* \
- ${base_libdir}/security/*.so \
- /cgroup \
- ${bindir}/systemd* \
- ${bindir}/busctl \
- ${bindir}/coredumpctl \
- ${bindir}/localectl \
- ${bindir}/hostnamectl \
- ${bindir}/resolvectl \
- ${bindir}/timedatectl \
- ${bindir}/bootctl \
- ${bindir}/oomctl \
- ${exec_prefix}/lib/tmpfiles.d/*.conf \
- ${exec_prefix}/lib/systemd \
- ${exec_prefix}/lib/modules-load.d \
- ${exec_prefix}/lib/sysctl.d \
- ${exec_prefix}/lib/sysusers.d \
- ${exec_prefix}/lib/environment.d \
- ${localstatedir} \
- ${rootlibexecdir}/modprobe.d/systemd.conf \
- ${rootlibexecdir}/modprobe.d/README \
- ${datadir}/dbus-1/system.d/org.freedesktop.timedate1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.locale1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.network1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.resolve1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.systemd1.conf \
- ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', '${datadir}/dbus-1/system.d/org.freedesktop.hostname1_no_polkit.conf', '', d)} \
- ${datadir}/dbus-1/system.d/org.freedesktop.hostname1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.login1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.timesync1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.portable1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.oom1.conf \
- ${datadir}/dbus-1/system.d/org.freedesktop.home1.conf \
- "
-
-FILES:${PN}-dev += "${base_libdir}/security/*.la ${datadir}/dbus-1/interfaces/ ${sysconfdir}/rpm/macros.systemd"
-
-RDEPENDS:${PN} += "kmod dbus util-linux-mount util-linux-umount udev (= ${EXTENDPKGV}) systemd-udev-rules util-linux-agetty util-linux-fsck"
-RDEPENDS:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'serial-getty-generator', '', 'systemd-serialgetty', d)}"
-RDEPENDS:${PN} += "volatile-binds"
-
-RRECOMMENDS:${PN} += "systemd-extra-utils \
- udev-hwdb \
- e2fsprogs-e2fsck \
- kernel-module-autofs4 kernel-module-unix kernel-module-ipv6 kernel-module-sch-fq-codel \
- os-release \
- systemd-conf \
-"
-
-INSANE_SKIP:${PN} += "dev-so libdir"
-INSANE_SKIP:${PN}-dbg += "libdir"
-INSANE_SKIP:${PN}-doc += " libdir"
-INSANE_SKIP:libsystemd-shared += "libdir"
-
-FILES:libsystemd-shared = "${rootlibexecdir}/systemd/libsystemd-shared*.so"
-
-RPROVIDES:udev = "hotplug"
-
-RDEPENDS:udev-hwdb += "udev"
-
-FILES:udev += "${base_sbindir}/udevd \
- ${rootlibexecdir}/systemd/network/99-default.link \
- ${rootlibexecdir}/systemd/systemd-udevd \
- ${rootlibexecdir}/udev/accelerometer \
- ${rootlibexecdir}/udev/ata_id \
- ${rootlibexecdir}/udev/cdrom_id \
- ${rootlibexecdir}/udev/collect \
- ${rootlibexecdir}/udev/dmi_memory_id \
- ${rootlibexecdir}/udev/fido_id \
- ${rootlibexecdir}/udev/findkeyboards \
- ${rootlibexecdir}/udev/keyboard-force-release.sh \
- ${rootlibexecdir}/udev/keymap \
- ${rootlibexecdir}/udev/mtd_probe \
- ${rootlibexecdir}/udev/scsi_id \
- ${rootlibexecdir}/udev/v4l_id \
- ${rootlibexecdir}/udev/keymaps \
- ${rootlibexecdir}/udev/rules.d/50-udev-default.rules \
- ${rootlibexecdir}/udev/rules.d/60-autosuspend.rules \
- ${rootlibexecdir}/udev/rules.d/60-autosuspend-chromiumos.rules \
- ${rootlibexecdir}/udev/rules.d/60-block.rules \
- ${rootlibexecdir}/udev/rules.d/60-cdrom_id.rules \
- ${rootlibexecdir}/udev/rules.d/60-drm.rules \
- ${rootlibexecdir}/udev/rules.d/60-evdev.rules \
- ${rootlibexecdir}/udev/rules.d/60-fido-id.rules \
- ${rootlibexecdir}/udev/rules.d/60-input-id.rules \
- ${rootlibexecdir}/udev/rules.d/60-persistent-alsa.rules \
- ${rootlibexecdir}/udev/rules.d/60-persistent-input.rules \
- ${rootlibexecdir}/udev/rules.d/60-persistent-storage.rules \
- ${rootlibexecdir}/udev/rules.d/60-persistent-storage-tape.rules \
- ${rootlibexecdir}/udev/rules.d/60-persistent-v4l.rules \
- ${rootlibexecdir}/udev/rules.d/60-sensor.rules \
- ${rootlibexecdir}/udev/rules.d/60-serial.rules \
- ${rootlibexecdir}/udev/rules.d/61-autosuspend-manual.rules \
- ${rootlibexecdir}/udev/rules.d/64-btrfs.rules \
- ${rootlibexecdir}/udev/rules.d/70-camera.rules \
- ${rootlibexecdir}/udev/rules.d/70-joystick.rules \
- ${rootlibexecdir}/udev/rules.d/70-memory.rules \
- ${rootlibexecdir}/udev/rules.d/70-mouse.rules \
- ${rootlibexecdir}/udev/rules.d/70-power-switch.rules \
- ${rootlibexecdir}/udev/rules.d/70-touchpad.rules \
- ${rootlibexecdir}/udev/rules.d/75-net-description.rules \
- ${rootlibexecdir}/udev/rules.d/75-probe_mtd.rules \
- ${rootlibexecdir}/udev/rules.d/78-sound-card.rules \
- ${rootlibexecdir}/udev/rules.d/80-drivers.rules \
- ${rootlibexecdir}/udev/rules.d/80-net-setup-link.rules \
- ${rootlibexecdir}/udev/rules.d/81-net-dhcp.rules \
- ${rootlibexecdir}/udev/rules.d/90-vconsole.rules \
- ${rootlibexecdir}/udev/rules.d/README \
- ${sysconfdir}/udev \
- ${sysconfdir}/init.d/systemd-udevd \
- ${systemd_system_unitdir}/*udev* \
- ${systemd_system_unitdir}/*.wants/*udev* \
- ${base_bindir}/systemd-hwdb \
- ${base_bindir}/udevadm \
- ${base_sbindir}/udevadm \
- ${datadir}/bash-completion/completions/udevadm \
- ${systemd_system_unitdir}/systemd-hwdb-update.service \
- "
-
-FILES:udev-hwdb = "${rootlibexecdir}/udev/hwdb.d \
- "
-
-RCONFLICTS:${PN} = "tiny-init ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'resolvconf', '', d)}"
-
-INITSCRIPT_PACKAGES = "udev"
-INITSCRIPT_NAME:udev = "systemd-udevd"
-INITSCRIPT_PARAMS:udev = "start 03 S ."
-
-python __anonymous() {
- if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
- d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
-
- if bb.utils.contains('PACKAGECONFIG', 'repart', True, False, d) and not bb.utils.contains('PACKAGECONFIG', 'openssl', True, False, d):
- bb.error("PACKAGECONFIG[repart] requires PACKAGECONFIG[openssl]")
-
- if bb.utils.contains('PACKAGECONFIG', 'homed', True, False, d) and not bb.utils.contains('PACKAGECONFIG', 'userdb openssl cryptsetup', True, False, d):
- bb.error("PACKAGECONFIG[homed] requires PACKAGECONFIG[userdb], PACKAGECONFIG[openssl] and PACKAGECONFIG[cryptsetup]")
-}
-
-python do_warn_musl() {
- if d.getVar('TCLIBC') == "musl":
- bb.warn("Using systemd with musl is not recommended since it is not supported upstream and some patches are known to be problematic.")
-}
-addtask warn_musl before do_configure
-
-ALTERNATIVE:${PN} = "halt reboot shutdown poweroff runlevel ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'resolv-conf', '', d)}"
-
-ALTERNATIVE_TARGET[resolv-conf] = "${sysconfdir}/resolv-conf.systemd"
-ALTERNATIVE_LINK_NAME[resolv-conf] = "${sysconfdir}/resolv.conf"
-ALTERNATIVE_PRIORITY[resolv-conf] ?= "50"
-
-ALTERNATIVE_TARGET[halt] = "${base_bindir}/systemctl"
-ALTERNATIVE_LINK_NAME[halt] = "${base_sbindir}/halt"
-ALTERNATIVE_PRIORITY[halt] ?= "300"
-
-ALTERNATIVE_TARGET[reboot] = "${base_bindir}/systemctl"
-ALTERNATIVE_LINK_NAME[reboot] = "${base_sbindir}/reboot"
-ALTERNATIVE_PRIORITY[reboot] ?= "300"
-
-ALTERNATIVE_TARGET[shutdown] = "${base_bindir}/systemctl"
-ALTERNATIVE_LINK_NAME[shutdown] = "${base_sbindir}/shutdown"
-ALTERNATIVE_PRIORITY[shutdown] ?= "300"
-
-ALTERNATIVE_TARGET[poweroff] = "${base_bindir}/systemctl"
-ALTERNATIVE_LINK_NAME[poweroff] = "${base_sbindir}/poweroff"
-ALTERNATIVE_PRIORITY[poweroff] ?= "300"
-
-ALTERNATIVE_TARGET[runlevel] = "${base_bindir}/systemctl"
-ALTERNATIVE_LINK_NAME[runlevel] = "${base_sbindir}/runlevel"
-ALTERNATIVE_PRIORITY[runlevel] ?= "300"
-
-pkg_postinst:${PN}:libc-glibc () {
- sed -e '/^hosts:/s/\s*\<myhostname\>//' \
- -e 's/\(^hosts:.*\)\(\<files\>\)\(.*\)\(\<dns\>\)\(.*\)/\1\2 myhostname \3\4\5/' \
- -i $D${sysconfdir}/nsswitch.conf
-}
-
-pkg_prerm:${PN}:libc-glibc () {
- sed -e '/^hosts:/s/\s*\<myhostname\>//' \
- -e '/^hosts:/s/\s*myhostname//' \
- -i $D${sysconfdir}/nsswitch.conf
-}
-
-PACKAGE_WRITE_DEPS += "qemu-native"
-pkg_postinst:udev-hwdb () {
- if test -n "$D"; then
- $INTERCEPT_DIR/postinst_intercept update_udev_hwdb ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX} rootlibexecdir="${rootlibexecdir}" PREFERRED_PROVIDER_udev="${PREFERRED_PROVIDER_udev}"
- else
- udevadm hwdb --update
- fi
-}
-
-pkg_prerm:udev-hwdb () {
- rm -f $D${sysconfdir}/udev/hwdb.bin
-}
diff --git a/meta/recipes-core/systemd/systemd_255.4.bb b/meta/recipes-core/systemd/systemd_255.4.bb
new file mode 100644
index 0000000000..e7498c802d
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd_255.4.bb
@@ -0,0 +1,899 @@
+require systemd.inc
+
+PROVIDES = "udev"
+
+PE = "1"
+
+DEPENDS = "intltool-native gperf-native libcap util-linux python3-jinja2-native"
+
+SECTION = "base/shell"
+
+inherit useradd pkgconfig meson perlnative update-rc.d update-alternatives qemu systemd gettext bash-completion manpages features_check
+
+# unmerged-usr support is deprecated upstream, taints the system and will be
+# removed in the near future. Fail the build if it is not enabled.
+REQUIRED_DISTRO_FEATURES += "usrmerge"
+
+# As this recipe builds udev, respect systemd being in DISTRO_FEATURES so
+# that we don't build both udev and systemd in world builds.
+REQUIRED_DISTRO_FEATURES += "systemd"
+
+SRC_URI += " \
+ file://touchscreen.rules \
+ file://00-create-volatile.conf \
+ ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://org.freedesktop.hostname1_no_polkit.conf', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://00-hostnamed-network-user.conf', '', d)} \
+ file://init \
+ file://99-default.preset \
+ file://systemd-pager.sh \
+ file://0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch \
+ file://0008-implment-systemd-sysv-install-for-OE.patch \
+ "
+
+# patches needed by musl
+SRC_URI:append:libc-musl = " ${SRC_URI_MUSL}"
+SRC_URI_MUSL = "\
+ file://0001-missing_type.h-add-comparison_fn_t.patch \
+ file://0002-add-fallback-parse_printf_format-implementation.patch \
+ file://0003-src-basic-missing.h-check-for-missing-strndupa.patch \
+ file://0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch \
+ file://0005-add-missing-FTW_-macros-for-musl.patch \
+ file://0006-Use-uintmax_t-for-handling-rlim_t.patch \
+ file://0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch \
+ file://0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch \
+ file://0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch \
+ file://0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch \
+ file://0011-avoid-redefinition-of-prctl_mm_map-structure.patch \
+ file://0012-do-not-disable-buffer-in-writing-files.patch \
+ file://0013-Handle-__cpu_mask-usage.patch \
+ file://0014-Handle-missing-gshadow.patch \
+ file://0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch \
+ file://0016-pass-correct-parameters-to-getdents64.patch \
+ file://0017-Adjust-for-musl-headers.patch \
+ file://0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch \
+ file://0019-errno-util-Make-STRERROR-portable-for-musl.patch \
+ file://0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch \
+ file://0021-shared-Do-not-use-malloc_info-on-musl.patch \
+ file://0022-avoid-missing-LOCK_EX-declaration.patch \
+ "
+
+PAM_PLUGINS = " \
+ pam-plugin-unix \
+ pam-plugin-loginuid \
+ pam-plugin-keyinit \
+ pam-plugin-namespace \
+"
+
+PACKAGECONFIG ??= " \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'acl audit efi ldconfig pam pni-names selinux smack usrmerge polkit seccomp', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', 'coredump elfutils', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wifi', 'rfkill', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xkbcommon', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', '', 'link-udev-shared', d)} \
+ backlight \
+ binfmt \
+ cgroupv2 \
+ gshadow \
+ hibernate \
+ hostnamed \
+ idn \
+ ima \
+ kmod \
+ localed \
+ logind \
+ machined \
+ myhostname \
+ networkd \
+ nss \
+ nss-mymachines \
+ nss-resolve \
+ quotacheck \
+ randomseed \
+ resolved \
+ set-time-epoch \
+ sysusers \
+ sysvinit \
+ timedated \
+ timesyncd \
+ userdb \
+ utmp \
+ vconsole \
+ wheel-group \
+ zstd \
+"
+
+PACKAGECONFIG:remove:libc-musl = " \
+ gshadow \
+ idn \
+ localed \
+ myhostname \
+ nss \
+ nss-mymachines \
+ nss-resolve \
+ sysusers \
+ userdb \
+ utmp \
+"
+
+# https://github.com/seccomp/libseccomp/issues/347
+PACKAGECONFIG:remove:mipsarch = "seccomp"
+
+TARGET_CC_ARCH:append:libc-musl = " -D__UAPI_DEF_ETHHDR=0 -D_LARGEFILE64_SOURCE"
+
+# Some of the dependencies are weak-style recommends - if not available at runtime,
+# systemd won't fail but the library-related feature will be skipped with a warning.
+
+# Use the upstream systemd serial-getty@.service and rely on
+# systemd-getty-generator instead of using the OE-core specific
+# systemd-serialgetty.bb - not enabled by default.
+PACKAGECONFIG[serial-getty-generator] = ""
+
+PACKAGECONFIG[acl] = "-Dacl=true,-Dacl=false,acl"
+PACKAGECONFIG[audit] = "-Daudit=true,-Daudit=false,audit"
+PACKAGECONFIG[backlight] = "-Dbacklight=true,-Dbacklight=false"
+PACKAGECONFIG[binfmt] = "-Dbinfmt=true,-Dbinfmt=false"
+PACKAGECONFIG[bzip2] = "-Dbzip2=true,-Dbzip2=false,bzip2"
+PACKAGECONFIG[cgroupv2] = "-Ddefault-hierarchy=unified,-Ddefault-hierarchy=hybrid"
+PACKAGECONFIG[coredump] = "-Dcoredump=true,-Dcoredump=false"
+PACKAGECONFIG[cryptsetup] = "-Dlibcryptsetup=true,-Dlibcryptsetup=false,cryptsetup,,cryptsetup"
+PACKAGECONFIG[cryptsetup-plugins] = "-Dlibcryptsetup-plugins=true,-Dlibcryptsetup-plugins=false,cryptsetup,,cryptsetup"
+PACKAGECONFIG[tpm2] = "-Dtpm2=true,-Dtpm2=false,tpm2-tss,tpm2-tss libtss2 libtss2-tcti-device"
+# If multiple compression libraries are enabled, the format to use for compression is chosen implicitly,
+# so if you want to compress with e.g. lz4 you cannot enable zstd, so you cannot read zstd-compressed journal files.
+# This option allows to enable all compression formats for reading, but choosing a specific one for writing.
+PACKAGECONFIG[default-compression-lz4] = "-Dlz4=true -Ddefault-compression=lz4,,lz4"
+PACKAGECONFIG[default-compression-xz] = "-Dxz=true -Ddefault-compression=xz,,xz"
+PACKAGECONFIG[default-compression-zstd] = "-Dzstd=true -Ddefault-compression=zstd,,zstd"
+PACKAGECONFIG[dbus] = "-Ddbus=true,-Ddbus=false,dbus"
+PACKAGECONFIG[efi] = "-Defi=true -Dbootloader=true,-Defi=false -Dbootloader=false,python3-pyelftools-native"
+PACKAGECONFIG[elfutils] = "-Delfutils=true,-Delfutils=false,elfutils,,libelf libdw"
+PACKAGECONFIG[firstboot] = "-Dfirstboot=true,-Dfirstboot=false"
+PACKAGECONFIG[repart] = "-Drepart=true,-Drepart=false"
+PACKAGECONFIG[homed] = "-Dhomed=true,-Dhomed=false"
+# Sign the journal for anti-tampering
+PACKAGECONFIG[gcrypt] = "-Dgcrypt=true,-Dgcrypt=false,libgcrypt"
+PACKAGECONFIG[gnutls] = "-Dgnutls=true,-Dgnutls=false,gnutls"
+PACKAGECONFIG[gshadow] = "-Dgshadow=true,-Dgshadow=false"
+PACKAGECONFIG[hibernate] = "-Dhibernate=true,-Dhibernate=false"
+PACKAGECONFIG[hostnamed] = "-Dhostnamed=true,-Dhostnamed=false"
+PACKAGECONFIG[idn] = "-Didn=true,-Didn=false"
+PACKAGECONFIG[ima] = "-Dima=true,-Dima=false"
+# importd requires journal-upload/xz/zlib/bzip2/gcrypt
+PACKAGECONFIG[importd] = "-Dimportd=true,-Dimportd=false,glib-2.0"
+# Update NAT firewall rules
+PACKAGECONFIG[iptc] = "-Dlibiptc=true,-Dlibiptc=false,iptables"
+PACKAGECONFIG[journal-color] = ",,,less"
+PACKAGECONFIG[journal-upload] = "-Dlibcurl=true,-Dlibcurl=false,curl"
+PACKAGECONFIG[kmod] = "-Dkmod=true,-Dkmod=false,kmod"
+PACKAGECONFIG[ldconfig] = "-Dldconfig=true,-Dldconfig=false,,ldconfig"
+PACKAGECONFIG[libidn] = "-Dlibidn=true,-Dlibidn=false,libidn,,libidn"
+PACKAGECONFIG[libidn2] = "-Dlibidn2=true,-Dlibidn2=false,libidn2,,libidn2"
+# Link udev shared with systemd helper library.
+# If enabled the udev package depends on the systemd package (which has the needed shared library).
+PACKAGECONFIG[link-udev-shared] = "-Dlink-udev-shared=true,-Dlink-udev-shared=false"
+PACKAGECONFIG[localed] = "-Dlocaled=true,-Dlocaled=false"
+PACKAGECONFIG[logind] = "-Dlogind=true,-Dlogind=false"
+PACKAGECONFIG[lz4] = "-Dlz4=true,-Dlz4=false,lz4"
+PACKAGECONFIG[machined] = "-Dmachined=true,-Dmachined=false"
+PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
+PACKAGECONFIG[microhttpd] = "-Dmicrohttpd=true,-Dmicrohttpd=false,libmicrohttpd"
+PACKAGECONFIG[myhostname] = "-Dnss-myhostname=true,-Dnss-myhostname=false,,libnss-myhostname"
+PACKAGECONFIG[networkd] = "-Dnetworkd=true,-Dnetworkd=false"
+PACKAGECONFIG[no-dns-fallback] = "-Ddns-servers="
+PACKAGECONFIG[no-ntp-fallback] = "-Dntp-servers="
+PACKAGECONFIG[nss] = "-Dnss-systemd=true,-Dnss-systemd=false,,libnss-systemd"
+PACKAGECONFIG[nss-mymachines] = "-Dnss-mymachines=true,-Dnss-mymachines=false"
+PACKAGECONFIG[nss-resolve] = "-Dnss-resolve=true,-Dnss-resolve=false"
+PACKAGECONFIG[oomd] = "-Doomd=true,-Doomd=false"
+PACKAGECONFIG[openssl] = "-Dopenssl=true,-Dopenssl=false,openssl"
+PACKAGECONFIG[p11kit] = "-Dp11kit=true,-Dp11kit=false,p11-kit"
+PACKAGECONFIG[pam] = "-Dpam=true,-Dpam=false,libpam,${PAM_PLUGINS}"
+PACKAGECONFIG[pcre2] = "-Dpcre2=true,-Dpcre2=false,libpcre2"
+PACKAGECONFIG[polkit] = "-Dpolkit=true,-Dpolkit=false"
+# If polkit is disabled and networkd+hostnamed are in use, enabling this option and
+# using dbus-broker will allow networkd to be authorized to change the
+# hostname without acquiring additional privileges
+PACKAGECONFIG[polkit_hostnamed_fallback] = ",,,,dbus-broker,polkit"
+PACKAGECONFIG[portabled] = "-Dportabled=true,-Dportabled=false"
+PACKAGECONFIG[pstore] = "-Dpstore=true,-Dpstore=false"
+PACKAGECONFIG[pni-names] = ",,,"
+PACKAGECONFIG[qrencode] = "-Dqrencode=true,-Dqrencode=false,qrencode,,qrencode"
+PACKAGECONFIG[quotacheck] = "-Dquotacheck=true,-Dquotacheck=false"
+PACKAGECONFIG[randomseed] = "-Drandomseed=true,-Drandomseed=false"
+PACKAGECONFIG[resolved] = "-Dresolve=true,-Dresolve=false"
+PACKAGECONFIG[rfkill] = "-Drfkill=true,-Drfkill=false"
+PACKAGECONFIG[seccomp] = "-Dseccomp=true,-Dseccomp=false,libseccomp"
+PACKAGECONFIG[selinux] = "-Dselinux=true,-Dselinux=false,libselinux,initscripts-sushell"
+PACKAGECONFIG[smack] = "-Dsmack=true,-Dsmack=false"
+PACKAGECONFIG[sysext] = "-Dsysext=true, -Dsysext=false"
+PACKAGECONFIG[sysusers] = "-Dsysusers=true,-Dsysusers=false"
+PACKAGECONFIG[sysvinit] = "-Dsysvinit-path=${sysconfdir}/init.d -Dsysvrcnd-path=${sysconfdir},-Dsysvinit-path= -Dsysvrcnd-path=,,systemd-compat-units update-rc.d"
+# When enabled use reproducible build timestamp if set as time epoch,
+# or build time if not. When disabled, time epoch is unset.
+def build_epoch(d):
+ epoch = d.getVar('SOURCE_DATE_EPOCH') or "-1"
+ return '-Dtime-epoch=%d' % int(epoch)
+PACKAGECONFIG[set-time-epoch] = "${@build_epoch(d)},-Dtime-epoch=0"
+PACKAGECONFIG[timedated] = "-Dtimedated=true,-Dtimedated=false"
+PACKAGECONFIG[timesyncd] = "-Dtimesyncd=true,-Dtimesyncd=false"
+PACKAGECONFIG[usrmerge] = "-Dsplit-usr=false,-Dsplit-usr=true"
+PACKAGECONFIG[sbinmerge] = "-Dsplit-bin=false,-Dsplit-bin=true"
+PACKAGECONFIG[userdb] = "-Duserdb=true,-Duserdb=false"
+PACKAGECONFIG[utmp] = "-Dutmp=true,-Dutmp=false"
+PACKAGECONFIG[valgrind] = "-DVALGRIND=1,,valgrind"
+PACKAGECONFIG[vconsole] = "-Dvconsole=true,-Dvconsole=false,,${PN}-vconsole-setup"
+PACKAGECONFIG[wheel-group] = "-Dwheel-group=true, -Dwheel-group=false"
+PACKAGECONFIG[xdg-autostart] = "-Dxdg-autostart=true,-Dxdg-autostart=false"
+# Verify keymaps on locale change
+PACKAGECONFIG[xkbcommon] = "-Dxkbcommon=true,-Dxkbcommon=false,libxkbcommon"
+PACKAGECONFIG[xz] = "-Dxz=true,-Dxz=false,xz"
+PACKAGECONFIG[zlib] = "-Dzlib=true,-Dzlib=false,zlib"
+PACKAGECONFIG[zstd] = "-Dzstd=true,-Dzstd=false,zstd"
+
+RESOLV_CONF ??= ""
+
+# Helper variables to clarify locations. This mirrors the logic in systemd's
+# build system.
+rootprefix ?= "${root_prefix}"
+rootlibdir ?= "${base_libdir}"
+rootlibexecdir = "${rootprefix}/lib"
+
+EXTRA_OEMESON += "-Dnobody-user=nobody \
+ -Dnobody-group=nogroup \
+ -Drootlibdir=${rootlibdir} \
+ -Drootprefix=${rootprefix} \
+ -Ddefault-locale=C \
+ -Dmode=release \
+ -Dsystem-alloc-uid-min=101 \
+ -Dsystem-uid-max=999 \
+ -Dsystem-alloc-gid-min=101 \
+ -Dsystem-gid-max=999 \
+ -Dcreate-log-dirs=false \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'zeroconf', '-Ddefault-mdns=no -Ddefault-llmnr=no', '', d)} \
+ "
+
+# Hardcode target binary paths to avoid using paths from sysroot or worse
+# it pokes for these binaries on build host and encodes that distro assumption
+# into target
+EXTRA_OEMESON += "-Dkexec-path=${sbindir}/kexec \
+ -Dkmod-path=${base_bindir}/kmod \
+ -Dmount-path=${base_bindir}/mount \
+ -Dquotacheck-path=${sbindir}/quotacheck \
+ -Dquotaon-path=${sbindir}/quotaon \
+ -Dsulogin-path=${base_sbindir}/sulogin \
+ -Dnologin-path=${base_sbindir}/nologin \
+ -Dumount-path=${base_bindir}/umount \
+ -Dloadkeys-path=${bindir}/loadkeys \
+ -Dsetfont-path=${bindir}/setfont"
+
+# The 60 seconds is watchdog's default vaule.
+WATCHDOG_TIMEOUT ??= "60"
+
+do_install() {
+ meson_do_install
+ # Change the root user's home directory in /lib/sysusers.d/basic.conf.
+ # This is done merely for backward compatibility with previous systemd recipes.
+ # systemd hardcodes root user's HOME to be "/root". Changing to use other values
+ # may have unexpected runtime behaviors.
+ if [ "${ROOT_HOME}" != "/root" ]; then
+ bbwarn "Using ${ROOT_HOME} as root user's home directory is not fully supported by systemd"
+ sed -i -e 's#/root#${ROOT_HOME}#g' ${D}${exec_prefix}/lib/sysusers.d/basic.conf
+ fi
+ install -d ${D}/${base_sbindir}
+ if ${@bb.utils.contains('PACKAGECONFIG', 'serial-getty-generator', 'false', 'true', d)}; then
+ # Provided by a separate recipe
+ rm ${D}${systemd_system_unitdir}/serial-getty* -f
+ fi
+
+ # Provide support for initramfs
+ [ ! -e ${D}/init ] && ln -s ${rootlibexecdir}/systemd/systemd ${D}/init
+ [ ! -e ${D}/${base_sbindir}/udevd ] && ln -s ${rootlibexecdir}/systemd/systemd-udevd ${D}/${base_sbindir}/udevd
+
+ install -d ${D}${sysconfdir}/udev/rules.d/
+ install -d ${D}${nonarch_libdir}/tmpfiles.d
+ for rule in $(find ${WORKDIR} -maxdepth 1 -type f -name "*.rules"); do
+ install -m 0644 $rule ${D}${sysconfdir}/udev/rules.d/
+ done
+
+ install -m 0644 ${WORKDIR}/00-create-volatile.conf ${D}${nonarch_libdir}/tmpfiles.d/
+
+ if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','true','false',d)}; then
+ install -d ${D}${sysconfdir}/init.d
+ install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/systemd-udevd
+ sed -i s%@UDEVD@%${rootlibexecdir}/systemd/systemd-udevd% ${D}${sysconfdir}/init.d/systemd-udevd
+ install -Dm 0755 ${S}/src/systemctl/systemd-sysv-install.SKELETON ${D}${systemd_unitdir}/systemd-sysv-install
+ fi
+
+ if "${@'true' if oe.types.boolean(d.getVar('VOLATILE_LOG_DIR')) else 'false'}"; then
+ # /var/log is typically a symbolic link to inside /var/volatile,
+ # which is expected to be empty.
+ rm -rf ${D}${localstatedir}/log
+ elif [ -e ${D}${localstatedir}/log/journal ]; then
+ chown root:systemd-journal ${D}${localstatedir}/log/journal
+
+ # journal-remote creates this at start
+ rm -rf ${D}${localstatedir}/log/journal/remote
+ fi
+
+ # if the user requests /tmp be on persistent storage (i.e. not volatile)
+ # then don't use a tmpfs for /tmp
+ if [ "${VOLATILE_TMP_DIR}" != "yes" ]; then
+ rm -f ${D}${rootlibdir}/systemd/system/tmp.mount
+ rm -f ${D}${rootlibdir}/systemd/system/local-fs.target.wants/tmp.mount
+ fi
+
+ install -d ${D}${systemd_system_unitdir}/graphical.target.wants
+ install -d ${D}${systemd_system_unitdir}/multi-user.target.wants
+ install -d ${D}${systemd_system_unitdir}/poweroff.target.wants
+ install -d ${D}${systemd_system_unitdir}/reboot.target.wants
+ install -d ${D}${systemd_system_unitdir}/rescue.target.wants
+
+ # Create symlinks for systemd-update-utmp-runlevel.service
+ if ${@bb.utils.contains('PACKAGECONFIG', 'utmp', 'true', 'false', d)}; then
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/graphical.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/multi-user.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/poweroff.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/reboot.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_system_unitdir}/rescue.target.wants/systemd-update-utmp-runlevel.service
+ fi
+
+ # this file is needed to exist if networkd is disabled but timesyncd is still in use since timesyncd checks it
+ # for existence else it fails
+ if [ -s ${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf ] &&
+ ! ${@bb.utils.contains('PACKAGECONFIG', 'networkd', 'true', 'false', d)}; then
+ echo 'd /run/systemd/netif/links 0755 root root -' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
+ fi
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'true', 'false', d)}; then
+ echo 'L! ${sysconfdir}/resolv.conf - - - - ../run/systemd/resolve/resolv.conf' >>${D}${exec_prefix}/lib/tmpfiles.d/etc.conf
+ echo 'd /run/systemd/resolve 0755 root root -' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
+ echo 'f /run/systemd/resolve/resolv.conf 0644 root root' >>${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf
+ ln -s ../run/systemd/resolve/resolv.conf ${D}${sysconfdir}/resolv-conf.systemd
+ else
+ resolv_conf="${@bb.utils.contains('RESOLV_CONF', 'stub-resolv', 'run/systemd/resolve/stub-resolv.conf', 'run/systemd/resolve/resolv.conf', d)}"
+ sed -i -e "s%^L! /etc/resolv.conf.*$%L! /etc/resolv.conf - - - - ../${resolv_conf}%g" ${D}${exec_prefix}/lib/tmpfiles.d/etc.conf
+ ln -s ../${resolv_conf} ${D}${sysconfdir}/resolv-conf.systemd
+ fi
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'false', 'true', d)}; then
+ rm ${D}${exec_prefix}/lib/tmpfiles.d/x11.conf
+ rm -r ${D}${sysconfdir}/X11
+ fi
+
+ # If polkit is setup fixup permissions and ownership
+ if ${@bb.utils.contains('PACKAGECONFIG', 'polkit', 'true', 'false', d)}; then
+ if [ -d ${D}${datadir}/polkit-1/rules.d ]; then
+ chmod 700 ${D}${datadir}/polkit-1/rules.d
+ chown polkitd:root ${D}${datadir}/polkit-1/rules.d
+ fi
+ fi
+
+ # If polkit is not available and a fallback was requested, install a drop-in that allows networkd to
+ # request hostname changes via DBUS without elevating its privileges
+ if ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'true', 'false', d)}; then
+ install -d ${D}${systemd_system_unitdir}/systemd-hostnamed.service.d/
+ install -m 0644 ${WORKDIR}/00-hostnamed-network-user.conf ${D}${systemd_system_unitdir}/systemd-hostnamed.service.d/
+ install -d ${D}${datadir}/dbus-1/system.d/
+ install -m 0644 ${WORKDIR}/org.freedesktop.hostname1_no_polkit.conf ${D}${datadir}/dbus-1/system.d/
+ fi
+
+ # create link for existing udev rules
+ ln -s ${base_bindir}/udevadm ${D}${base_sbindir}/udevadm
+
+ # install default policy for presets
+ # https://www.freedesktop.org/wiki/Software/systemd/Preset/#howto
+ install -Dm 0644 ${WORKDIR}/99-default.preset ${D}${systemd_unitdir}/system-preset/99-default.preset
+
+ # add a profile fragment to disable systemd pager with busybox less
+ install -Dm 0644 ${WORKDIR}/systemd-pager.sh ${D}${sysconfdir}/profile.d/systemd-pager.sh
+
+ if [ -n "${WATCHDOG_TIMEOUT}" ]; then
+ sed -i -e 's/#RebootWatchdogSec=10min/RebootWatchdogSec=${WATCHDOG_TIMEOUT}/' \
+ ${D}/${sysconfdir}/systemd/system.conf
+ fi
+
+ if ${@bb.utils.contains('PACKAGECONFIG', 'pni-names', 'true', 'false', d)}; then
+ if ! grep -q '^NamePolicy=.*mac' ${D}${rootlibexecdir}/systemd/network/99-default.link; then
+ sed -i '/^NamePolicy=/s/$/ mac/' ${D}${rootlibexecdir}/systemd/network/99-default.link
+ fi
+ if ! grep -q 'AlternativeNamesPolicy=.*mac' ${D}${rootlibexecdir}/systemd/network/99-default.link; then
+ sed -i '/AlternativeNamesPolicy=/s/$/ mac/' ${D}${rootlibexecdir}/systemd/network/99-default.link
+ fi
+ fi
+}
+
+python populate_packages:prepend (){
+ systemdlibdir = d.getVar("rootlibdir")
+ do_split_packages(d, systemdlibdir, r'^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True)
+}
+PACKAGES_DYNAMIC += "^lib(udev|systemd|nss).*"
+
+PACKAGE_BEFORE_PN = "\
+ ${PN}-analyze \
+ ${PN}-binfmt \
+ ${PN}-container \
+ ${PN}-crypt \
+ ${PN}-extra-utils \
+ ${PN}-gui \
+ ${PN}-initramfs \
+ ${PN}-journal-gatewayd \
+ ${PN}-journal-upload \
+ ${PN}-journal-remote \
+ ${PN}-kernel-install \
+ ${PN}-rpm-macros \
+ ${PN}-udev-rules \
+ ${PN}-vconsole-setup \
+ ${PN}-zsh-completion \
+ libsystemd-shared \
+ udev \
+ udev-bash-completion \
+ udev-hwdb \
+"
+
+SUMMARY:${PN}-container = "Tools for containers and VMs"
+DESCRIPTION:${PN}-container = "Systemd tools to spawn and manage containers and virtual machines."
+
+SUMMARY:${PN}-journal-gatewayd = "HTTP server for journal events"
+DESCRIPTION:${PN}-journal-gatewayd = "systemd-journal-gatewayd serves journal events over the network. Clients must connect using HTTP. The server listens on port 19531 by default."
+
+SUMMARY:${PN}-journal-upload = "Send journal messages over the network"
+DESCRIPTION:${PN}-journal-upload = "systemd-journal-upload uploads journal entries to a specified URL."
+
+SUMMARY:${PN}-journal-remote = "Receive journal messages over the network"
+DESCRIPTION:${PN}-journal-remote = "systemd-journal-remote is a command to receive serialized journal events and store them to journal files."
+
+SUMMARY:libsystemd-shared = "Systemd shared library"
+
+SYSTEMD_PACKAGES = "${@bb.utils.contains('PACKAGECONFIG', 'binfmt', '${PN}-binfmt', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-gatewayd', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-remote', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'journal-upload', '${PN}-journal-upload', '', d)} \
+"
+SYSTEMD_SERVICE:${PN}-binfmt = "systemd-binfmt.service"
+
+USERADD_PACKAGES = "${PN} \
+ udev \
+ ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-gatewayd', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'microhttpd', '${PN}-journal-remote', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'journal-upload', '${PN}-journal-upload', '', d)} \
+"
+GROUPADD_PARAM:${PN} = "-r systemd-journal;"
+GROUPADD_PARAM:udev = "-r render"
+GROUPADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', '-r systemd-hostname;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'coredump', '--system -d / -M --shell /sbin/nologin systemd-coredump;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'networkd', '--system -d / -M --shell /sbin/nologin systemd-network;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'polkit', '--system --no-create-home --user-group --home-dir ${sysconfdir}/polkit-1 polkitd;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'resolved', '--system -d / -M --shell /sbin/nologin systemd-resolve;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'timesyncd', '--system -d / -M --shell /sbin/nologin systemd-timesync;', '', d)}"
+USERADD_PARAM:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'oomd', '--system -d / -M --shell /sbin/nologin systemd-oom;', '', d)}"
+USERADD_PARAM:${PN}-journal-gatewayd = "--system -d / -M --shell /sbin/nologin systemd-journal-gateway"
+USERADD_PARAM:${PN}-journal-remote = "--system -d / -M --shell /sbin/nologin systemd-journal-remote"
+USERADD_PARAM:${PN}-journal-upload = "--system -d / -M --shell /sbin/nologin systemd-journal-upload"
+
+FILES:${PN}-analyze = "${bindir}/systemd-analyze"
+
+FILES:${PN}-crypt = "${bindir}/systemd-cryptenroll \
+ ${libdir}/cryptsetup \
+ "
+RRECOMMENDS:${PN} += "${PN}-crypt"
+
+FILES:${PN}-initramfs = "/init"
+RDEPENDS:${PN}-initramfs = "${PN}"
+
+FILES:${PN}-gui = "${bindir}/systemadm"
+
+FILES:${PN}-vconsole-setup = "${rootlibexecdir}/systemd/systemd-vconsole-setup \
+ ${systemd_system_unitdir}/systemd-vconsole-setup.service \
+ ${systemd_system_unitdir}/sysinit.target.wants/systemd-vconsole-setup.service"
+
+RDEPENDS:${PN}-kernel-install += "bash"
+FILES:${PN}-kernel-install = "${bindir}/kernel-install \
+ ${sysconfdir}/kernel/ \
+ ${exec_prefix}/lib/kernel \
+ "
+FILES:${PN}-rpm-macros = "${exec_prefix}/lib/rpm \
+ "
+
+FILES:${PN}-zsh-completion = "${datadir}/zsh/site-functions"
+
+FILES:${PN}-binfmt = "${sysconfdir}/binfmt.d/ \
+ ${exec_prefix}/lib/binfmt.d \
+ ${rootlibexecdir}/systemd/systemd-binfmt \
+ ${systemd_system_unitdir}/proc-sys-fs-binfmt_misc.* \
+ ${systemd_system_unitdir}/systemd-binfmt.service"
+RRECOMMENDS:${PN}-binfmt = "${@bb.utils.contains('PACKAGECONFIG', 'binfmt', 'kernel-module-binfmt-misc', '', d)}"
+
+RDEPENDS:${PN}-vconsole-setup = "${@bb.utils.contains('PACKAGECONFIG', 'vconsole', 'kbd kbd-consolefonts kbd-keymaps', '', d)}"
+
+
+FILES:${PN}-journal-gatewayd = "${rootlibexecdir}/systemd/systemd-journal-gatewayd \
+ ${systemd_system_unitdir}/systemd-journal-gatewayd.service \
+ ${systemd_system_unitdir}/systemd-journal-gatewayd.socket \
+ ${systemd_system_unitdir}/sockets.target.wants/systemd-journal-gatewayd.socket \
+ ${datadir}/systemd/gatewayd/browse.html \
+ "
+SYSTEMD_SERVICE:${PN}-journal-gatewayd = "systemd-journal-gatewayd.socket"
+
+FILES:${PN}-journal-upload = "${rootlibexecdir}/systemd/systemd-journal-upload \
+ ${systemd_system_unitdir}/systemd-journal-upload.service \
+ ${sysconfdir}/systemd/journal-upload.conf \
+ "
+SYSTEMD_SERVICE:${PN}-journal-upload = "systemd-journal-upload.service"
+
+FILES:${PN}-journal-remote = "${rootlibexecdir}/systemd/systemd-journal-remote \
+ ${sysconfdir}/systemd/journal-remote.conf \
+ ${systemd_system_unitdir}/systemd-journal-remote.service \
+ ${systemd_system_unitdir}/systemd-journal-remote.socket \
+ "
+SYSTEMD_SERVICE:${PN}-journal-remote = "systemd-journal-remote.socket"
+
+
+FILES:${PN}-container = "${sysconfdir}/dbus-1/system.d/org.freedesktop.import1.conf \
+ ${sysconfdir}/dbus-1/system.d/org.freedesktop.machine1.conf \
+ ${sysconfdir}/systemd/system/multi-user.target.wants/machines.target \
+ ${base_bindir}/machinectl \
+ ${bindir}/systemd-nspawn \
+ ${nonarch_libdir}/systemd/import-pubring.gpg \
+ ${systemd_system_unitdir}/busnames.target.wants/org.freedesktop.import1.busname \
+ ${systemd_system_unitdir}/busnames.target.wants/org.freedesktop.machine1.busname \
+ ${systemd_system_unitdir}/local-fs.target.wants/var-lib-machines.mount \
+ ${systemd_system_unitdir}/machines.target.wants/var-lib-machines.mount \
+ ${systemd_system_unitdir}/remote-fs.target.wants/var-lib-machines.mount \
+ ${systemd_system_unitdir}/machine.slice \
+ ${systemd_system_unitdir}/machines.target \
+ ${systemd_system_unitdir}/org.freedesktop.import1.busname \
+ ${systemd_system_unitdir}/org.freedesktop.machine1.busname \
+ ${systemd_system_unitdir}/systemd-importd.service \
+ ${systemd_system_unitdir}/systemd-machined.service \
+ ${systemd_system_unitdir}/dbus-org.freedesktop.machine1.service \
+ ${systemd_system_unitdir}/var-lib-machines.mount \
+ ${rootlibexecdir}/systemd/systemd-import \
+ ${rootlibexecdir}/systemd/systemd-importd \
+ ${rootlibexecdir}/systemd/systemd-machined \
+ ${rootlibexecdir}/systemd/systemd-pull \
+ ${exec_prefix}/lib/tmpfiles.d/systemd-nspawn.conf \
+ ${exec_prefix}/lib/tmpfiles.d/README \
+ ${systemd_system_unitdir}/systemd-nspawn@.service \
+ ${datadir}/dbus-1/system-services/org.freedesktop.import1.service \
+ ${datadir}/dbus-1/system-services/org.freedesktop.machine1.service \
+ ${datadir}/dbus-1/system.d/org.freedesktop.import1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.machine1.conf \
+ ${datadir}/polkit-1/actions/org.freedesktop.import1.policy \
+ ${datadir}/polkit-1/actions/org.freedesktop.machine1.policy \
+ "
+
+RDEPENDS:${PN}-container = "${@bb.utils.contains('PACKAGECONFIG', 'nss-mymachines', 'libnss-mymachines', '', d)}"
+
+# "machinectl import-tar" uses "tar --numeric-owner", not supported by busybox.
+RRECOMMENDS:${PN}-container += "\
+ ${PN}-journal-gatewayd \
+ ${PN}-journal-remote \
+ ${PN}-journal-upload \
+ kernel-module-dm-mod \
+ kernel-module-loop \
+ kernel-module-tun \
+ tar \
+ "
+
+FILES:${PN}-extra-utils = "\
+ ${base_bindir}/systemd-escape \
+ ${base_bindir}/systemd-inhibit \
+ ${bindir}/systemd-detect-virt \
+ ${bindir}/systemd-dissect \
+ ${bindir}/systemd-path \
+ ${bindir}/systemd-run \
+ ${bindir}/systemd-cat \
+ ${bindir}/systemd-creds \
+ ${bindir}/systemd-delta \
+ ${bindir}/systemd-cgls \
+ ${bindir}/systemd-cgtop \
+ ${bindir}/systemd-stdio-bridge \
+ ${base_bindir}/systemd-ask-password \
+ ${base_bindir}/systemd-tty-ask-password-agent \
+ ${base_sbindir}/mount.ddi \
+ ${systemd_system_unitdir}/initrd.target.wants/systemd-pcrphase-initrd.path \
+ ${systemd_system_unitdir}/systemd-ask-password-console.path \
+ ${systemd_system_unitdir}/systemd-ask-password-console.service \
+ ${systemd_system_unitdir}/systemd-ask-password-wall.path \
+ ${systemd_system_unitdir}/systemd-ask-password-wall.service \
+ ${systemd_system_unitdir}/sysinit.target.wants/systemd-ask-password-console.path \
+ ${systemd_system_unitdir}/sysinit.target.wants/systemd-ask-password-wall.path \
+ ${systemd_system_unitdir}/sysinit.target.wants/systemd-pcrphase.path \
+ ${systemd_system_unitdir}/sysinit.target.wants/systemd-pcrphase-sysinit.path \
+ ${systemd_system_unitdir}/multi-user.target.wants/systemd-ask-password-wall.path \
+ ${rootlibexecdir}/systemd/systemd-resolve-host \
+ ${rootlibexecdir}/systemd/systemd-ac-power \
+ ${rootlibexecdir}/systemd/systemd-activate \
+ ${rootlibexecdir}/systemd/systemd-measure \
+ ${rootlibexecdir}/systemd/systemd-pcrphase \
+ ${rootlibexecdir}/systemd/systemd-socket-proxyd \
+ ${rootlibexecdir}/systemd/systemd-reply-password \
+ ${rootlibexecdir}/systemd/systemd-sleep \
+ ${rootlibexecdir}/systemd/system-sleep \
+ ${systemd_system_unitdir}/systemd-hibernate.service \
+ ${systemd_system_unitdir}/systemd-hybrid-sleep.service \
+ ${systemd_system_unitdir}/systemd-pcrphase-initrd.service \
+ ${systemd_system_unitdir}/systemd-pcrphase.service \
+ ${systemd_system_unitdir}/systemd-pcrphase-sysinit.service \
+ ${systemd_system_unitdir}/systemd-suspend.service \
+ ${systemd_system_unitdir}/sleep.target \
+ ${rootlibexecdir}/systemd/systemd-initctl \
+ ${systemd_system_unitdir}/systemd-initctl.service \
+ ${systemd_system_unitdir}/systemd-initctl.socket \
+ ${systemd_system_unitdir}/sockets.target.wants/systemd-initctl.socket \
+ ${rootlibexecdir}/systemd/system-generators/systemd-gpt-auto-generator \
+ ${rootlibexecdir}/systemd/systemd-cgroups-agent \
+"
+
+FILES:${PN}-udev-rules = "\
+ ${rootlibexecdir}/udev/rules.d/70-uaccess.rules \
+ ${rootlibexecdir}/udev/rules.d/71-seat.rules \
+ ${rootlibexecdir}/udev/rules.d/73-seat-late.rules \
+ ${rootlibexecdir}/udev/rules.d/99-systemd.rules \
+"
+
+CONFFILES:${PN} = "${sysconfdir}/systemd/coredump.conf \
+ ${sysconfdir}/systemd/journald.conf \
+ ${sysconfdir}/systemd/logind.conf \
+ ${sysconfdir}/systemd/networkd.conf \
+ ${sysconfdir}/systemd/pstore.conf \
+ ${sysconfdir}/systemd/resolved.conf \
+ ${sysconfdir}/systemd/sleep.conf \
+ ${sysconfdir}/systemd/system.conf \
+ ${sysconfdir}/systemd/timesyncd.conf \
+ ${sysconfdir}/systemd/user.conf \
+"
+
+FILES:${PN} = " ${base_bindir}/* \
+ ${base_sbindir}/shutdown \
+ ${base_sbindir}/halt \
+ ${base_sbindir}/poweroff \
+ ${base_sbindir}/runlevel \
+ ${base_sbindir}/telinit \
+ ${base_sbindir}/resolvconf \
+ ${base_sbindir}/reboot \
+ ${base_sbindir}/init \
+ ${datadir}/dbus-1/services \
+ ${datadir}/dbus-1/system-services \
+ ${datadir}/polkit-1 \
+ ${datadir}/${BPN} \
+ ${datadir}/factory \
+ ${sysconfdir}/credstore/ \
+ ${sysconfdir}/credstore.encrypted/ \
+ ${sysconfdir}/dbus-1/ \
+ ${sysconfdir}/modules-load.d/ \
+ ${sysconfdir}/pam.d/ \
+ ${sysconfdir}/profile.d/ \
+ ${sysconfdir}/sysctl.d/ \
+ ${sysconfdir}/systemd/ \
+ ${sysconfdir}/tmpfiles.d/ \
+ ${sysconfdir}/xdg/ \
+ ${sysconfdir}/init.d/README \
+ ${sysconfdir}/resolv-conf.systemd \
+ ${sysconfdir}/X11/xinit/xinitrc.d/* \
+ ${rootlibexecdir}/systemd/* \
+ ${rootlibdir}/systemd/libsystemd-core* \
+ ${libdir}/pam.d \
+ ${nonarch_libdir}/pam.d \
+ ${systemd_unitdir}/* \
+ ${base_libdir}/security/*.so \
+ /cgroup \
+ ${bindir}/systemd* \
+ ${bindir}/busctl \
+ ${bindir}/coredumpctl \
+ ${bindir}/localectl \
+ ${bindir}/hostnamectl \
+ ${bindir}/resolvectl \
+ ${bindir}/timedatectl \
+ ${bindir}/bootctl \
+ ${bindir}/oomctl \
+ ${bindir}/userdbctl \
+ ${exec_prefix}/lib/credstore \
+ ${exec_prefix}/lib/tmpfiles.d/*.conf \
+ ${exec_prefix}/lib/systemd \
+ ${exec_prefix}/lib/modules-load.d \
+ ${exec_prefix}/lib/sysctl.d \
+ ${exec_prefix}/lib/sysusers.d \
+ ${exec_prefix}/lib/environment.d \
+ ${exec_prefix}/lib/pcrlock.d \
+ ${localstatedir} \
+ ${rootlibexecdir}/modprobe.d/systemd.conf \
+ ${rootlibexecdir}/modprobe.d/README \
+ ${datadir}/dbus-1/system.d/org.freedesktop.timedate1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.locale1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.network1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.resolve1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.systemd1.conf \
+ ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', '${datadir}/dbus-1/system.d/org.freedesktop.hostname1_no_polkit.conf', '', d)} \
+ ${datadir}/dbus-1/system.d/org.freedesktop.hostname1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.login1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.timesync1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.portable1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.oom1.conf \
+ ${datadir}/dbus-1/system.d/org.freedesktop.home1.conf \
+ "
+
+FILES:${PN}-dev += "${base_libdir}/security/*.la ${datadir}/dbus-1/interfaces/ ${sysconfdir}/rpm/macros.systemd"
+
+RDEPENDS:${PN} += "kmod dbus util-linux-mount util-linux-umount udev (= ${EXTENDPKGV}) systemd-udev-rules util-linux-agetty util-linux-fsck util-linux-swaponoff"
+RDEPENDS:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'serial-getty-generator', '', 'systemd-serialgetty', d)}"
+RDEPENDS:${PN} += "volatile-binds"
+
+RRECOMMENDS:${PN} += "systemd-extra-utils \
+ udev-hwdb \
+ e2fsprogs-e2fsck \
+ kernel-module-autofs4 kernel-module-unix kernel-module-ipv6 kernel-module-sch-fq-codel \
+ os-release \
+ systemd-conf \
+ ${@bb.utils.contains('PACKAGECONFIG', 'logind', 'pam-plugin-umask', '', d)} \
+"
+
+INSANE_SKIP:${PN} += "dev-so libdir"
+INSANE_SKIP:${PN}-dbg += "libdir"
+INSANE_SKIP:${PN}-doc += " libdir"
+INSANE_SKIP:libsystemd-shared += "libdir"
+
+FILES:libsystemd-shared = "${rootlibdir}/systemd/libsystemd-shared*.so"
+
+RPROVIDES:udev = "hotplug"
+
+RDEPENDS:udev-bash-completion += "bash-completion"
+RDEPENDS:udev-hwdb += "udev"
+
+FILES:udev += "${base_sbindir}/udevd \
+ ${rootlibexecdir}/systemd/network/99-default.link \
+ ${rootlibexecdir}/systemd/systemd-udevd \
+ ${rootlibexecdir}/udev/accelerometer \
+ ${rootlibexecdir}/udev/ata_id \
+ ${rootlibexecdir}/udev/cdrom_id \
+ ${rootlibexecdir}/udev/collect \
+ ${rootlibexecdir}/udev/dmi_memory_id \
+ ${rootlibexecdir}/udev/fido_id \
+ ${rootlibexecdir}/udev/findkeyboards \
+ ${rootlibexecdir}/udev/iocost \
+ ${rootlibexecdir}/udev/keyboard-force-release.sh \
+ ${rootlibexecdir}/udev/keymap \
+ ${rootlibexecdir}/udev/mtd_probe \
+ ${rootlibexecdir}/udev/scsi_id \
+ ${rootlibexecdir}/udev/v4l_id \
+ ${rootlibexecdir}/udev/keymaps \
+ ${rootlibexecdir}/udev/rules.d/50-udev-default.rules \
+ ${rootlibexecdir}/udev/rules.d/60-autosuspend.rules \
+ ${rootlibexecdir}/udev/rules.d/60-autosuspend-chromiumos.rules \
+ ${rootlibexecdir}/udev/rules.d/60-block.rules \
+ ${rootlibexecdir}/udev/rules.d/60-cdrom_id.rules \
+ ${rootlibexecdir}/udev/rules.d/60-dmi-id.rules \
+ ${rootlibexecdir}/udev/rules.d/60-drm.rules \
+ ${rootlibexecdir}/udev/rules.d/60-evdev.rules \
+ ${rootlibexecdir}/udev/rules.d/60-fido-id.rules \
+ ${rootlibexecdir}/udev/rules.d/60-infiniband.rules \
+ ${rootlibexecdir}/udev/rules.d/60-input-id.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-alsa.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-input.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-storage.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-storage-mtd.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-storage-tape.rules \
+ ${rootlibexecdir}/udev/rules.d/60-persistent-v4l.rules \
+ ${rootlibexecdir}/udev/rules.d/60-sensor.rules \
+ ${rootlibexecdir}/udev/rules.d/60-serial.rules \
+ ${rootlibexecdir}/udev/rules.d/61-autosuspend-manual.rules \
+ ${rootlibexecdir}/udev/rules.d/64-btrfs.rules \
+ ${rootlibexecdir}/udev/rules.d/70-camera.rules \
+ ${rootlibexecdir}/udev/rules.d/70-joystick.rules \
+ ${rootlibexecdir}/udev/rules.d/70-memory.rules \
+ ${rootlibexecdir}/udev/rules.d/70-mouse.rules \
+ ${rootlibexecdir}/udev/rules.d/70-power-switch.rules \
+ ${rootlibexecdir}/udev/rules.d/70-touchpad.rules \
+ ${rootlibexecdir}/udev/rules.d/75-net-description.rules \
+ ${rootlibexecdir}/udev/rules.d/75-probe_mtd.rules \
+ ${rootlibexecdir}/udev/rules.d/78-sound-card.rules \
+ ${rootlibexecdir}/udev/rules.d/80-drivers.rules \
+ ${rootlibexecdir}/udev/rules.d/80-net-setup-link.rules \
+ ${rootlibexecdir}/udev/rules.d/81-net-dhcp.rules \
+ ${rootlibexecdir}/udev/rules.d/90-vconsole.rules \
+ ${rootlibexecdir}/udev/rules.d/90-iocost.rules \
+ ${rootlibexecdir}/udev/rules.d/README \
+ ${sysconfdir}/udev \
+ ${sysconfdir}/init.d/systemd-udevd \
+ ${systemd_system_unitdir}/*udev* \
+ ${systemd_system_unitdir}/*.wants/*udev* \
+ ${base_bindir}/systemd-hwdb \
+ ${base_bindir}/udevadm \
+ ${base_sbindir}/udevadm \
+ ${systemd_system_unitdir}/systemd-hwdb-update.service \
+ "
+
+FILES:udev-bash-completion = "${datadir}/bash-completion/completions/udevadm"
+FILES:udev-hwdb = "${rootlibexecdir}/udev/hwdb.d \
+ "
+
+RCONFLICTS:${PN} = "tiny-init ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'resolvconf', '', d)}"
+
+INITSCRIPT_PACKAGES = "udev"
+INITSCRIPT_NAME:udev = "systemd-udevd"
+INITSCRIPT_PARAMS:udev = "start 03 S ."
+
+python __anonymous() {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+
+ if bb.utils.contains('DISTRO_FEATURES', 'systemd-resolved', True, False, d) and not bb.utils.contains('PACKAGECONFIG', 'nss-resolve resolved', True, False, d):
+ bb.error("DISTRO_FEATURES[systemd-resolved] requires PACKAGECONFIG[nss-resolve, resolved]")
+
+ if bb.utils.contains('PACKAGECONFIG', 'repart', True, False, d) and not bb.utils.contains('PACKAGECONFIG', 'openssl', True, False, d):
+ bb.error("PACKAGECONFIG[repart] requires PACKAGECONFIG[openssl]")
+
+ if bb.utils.contains('PACKAGECONFIG', 'homed', True, False, d) and not bb.utils.contains('PACKAGECONFIG', 'userdb openssl cryptsetup', True, False, d):
+ bb.error("PACKAGECONFIG[homed] requires PACKAGECONFIG[userdb], PACKAGECONFIG[openssl] and PACKAGECONFIG[cryptsetup]")
+}
+
+python do_warn_musl() {
+ if d.getVar('TCLIBC') == "musl":
+ bb.warn("Using systemd with musl is not recommended since it is not supported upstream and some patches are known to be problematic.")
+}
+addtask warn_musl before do_configure
+
+ALTERNATIVE:${PN} = "halt reboot shutdown poweroff runlevel ${@bb.utils.contains('PACKAGECONFIG', 'resolved', 'resolv-conf', '', d)}"
+
+ALTERNATIVE_TARGET[resolv-conf] = "${sysconfdir}/resolv-conf.systemd"
+ALTERNATIVE_LINK_NAME[resolv-conf] = "${sysconfdir}/resolv.conf"
+ALTERNATIVE_PRIORITY[resolv-conf] ?= "50"
+
+ALTERNATIVE_TARGET[halt] = "${base_bindir}/systemctl"
+ALTERNATIVE_LINK_NAME[halt] = "${base_sbindir}/halt"
+ALTERNATIVE_PRIORITY[halt] ?= "300"
+
+ALTERNATIVE_TARGET[reboot] = "${base_bindir}/systemctl"
+ALTERNATIVE_LINK_NAME[reboot] = "${base_sbindir}/reboot"
+ALTERNATIVE_PRIORITY[reboot] ?= "300"
+
+ALTERNATIVE_TARGET[shutdown] = "${base_bindir}/systemctl"
+ALTERNATIVE_LINK_NAME[shutdown] = "${base_sbindir}/shutdown"
+ALTERNATIVE_PRIORITY[shutdown] ?= "300"
+
+ALTERNATIVE_TARGET[poweroff] = "${base_bindir}/systemctl"
+ALTERNATIVE_LINK_NAME[poweroff] = "${base_sbindir}/poweroff"
+ALTERNATIVE_PRIORITY[poweroff] ?= "300"
+
+ALTERNATIVE_TARGET[runlevel] = "${base_bindir}/systemctl"
+ALTERNATIVE_LINK_NAME[runlevel] = "${base_sbindir}/runlevel"
+ALTERNATIVE_PRIORITY[runlevel] ?= "300"
+
+pkg_postinst:${PN}:libc-glibc () {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'myhostname', 'true', 'false', d)}; then
+ sed -e '/^hosts:/s/\s*\<myhostname\>//' \
+ -e 's/\(^hosts:.*\)\(\<files\>\)\(.*\)\(\<dns\>\)\(.*\)/\1\2 myhostname \3\4\5/' \
+ -i $D${sysconfdir}/nsswitch.conf
+ fi
+ if ${@bb.utils.contains('PACKAGECONFIG', 'nss', 'true', 'false', d)}; then
+ sed -e 's#\(^passwd:.*\)#\1 systemd#' \
+ -e 's#\(^group:.*\)#\1 systemd#' \
+ -e 's#\(^shadow:.*\)#\1 systemd#' \
+ -i $D${sysconfdir}/nsswitch.conf
+ fi
+}
+
+pkg_prerm:${PN}:libc-glibc () {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'myhostname', 'true', 'false', d)}; then
+ sed -e '/^hosts:/s/\s*\<myhostname\>//' \
+ -e '/^hosts:/s/\s*myhostname//' \
+ -i $D${sysconfdir}/nsswitch.conf
+ fi
+ if ${@bb.utils.contains('PACKAGECONFIG', 'nss', 'true', 'false', d)}; then
+ sed -e '/^passwd:/s#\s*systemd##' \
+ -e '/^group:/s#\s*systemd##' \
+ -e '/^shadow:/s#\s*systemd##' \
+ -i $D${sysconfdir}/nsswitch.conf
+ fi
+}
+
+PACKAGE_WRITE_DEPS += "qemu-native"
+pkg_postinst:udev-hwdb () {
+ if test -n "$D"; then
+ $INTERCEPT_DIR/postinst_intercept update_udev_hwdb ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX} rootlibexecdir="${rootlibexecdir}" PREFERRED_PROVIDER_udev="${PREFERRED_PROVIDER_udev}" base_bindir="${base_bindir}"
+ else
+ udevadm hwdb --update
+ fi
+}
+
+pkg_prerm:udev-hwdb () {
+ rm -f $D${sysconfdir}/udev/hwdb.bin
+}
diff --git a/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty b/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty
index 699a1ead1a..f5671ee53d 100644
--- a/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty
+++ b/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty
@@ -1,5 +1,9 @@
#!/bin/sh
+SPEED=$1
+DEVICE=$2
+TERM=$3
+
# busybox' getty does this itself, util-linux' agetty needs extra help
getty="/sbin/getty"
case $(readlink -f "${getty}") in
@@ -9,9 +13,8 @@ case $(readlink -f "${getty}") in
if [ -x "/usr/bin/setsid" ] ; then
setsid="/usr/bin/setsid"
fi
+ options=""
;;
esac
-if [ -e /sys/class/tty/$2 -a -c /dev/$2 ]; then
- ${setsid:-} ${getty} -L $1 $2 $3
-fi
+${setsid:-} ${getty} ${options:-} -L $SPEED $DEVICE $TERM
diff --git a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
index 7aad2e2bf2..6bbe517df1 100644
--- a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
+++ b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
@@ -2,7 +2,6 @@ SUMMARY = "Inittab configuration for SysVinit"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-PR = "r10"
SRC_URI = "file://inittab \
file://start_getty"
@@ -22,14 +21,14 @@ do_install() {
install -m 0755 ${WORKDIR}/start_getty ${D}${base_bindir}/start_getty
sed -e 's,/usr/bin,${bindir},g' -i ${D}${base_bindir}/start_getty
- set -x
- tmp="${SERIAL_CONSOLES}"
- for i in $tmp
+ CONSOLES="${SERIAL_CONSOLES}"
+ for s in $CONSOLES
do
- j=`echo ${i} | sed s/\;/\ /g`
- l=`echo ${i} | sed -e 's/tty//' -e 's/^.*;//' -e 's/;.*//'`
- label=`echo $l | sed 's/.*\(....\)/\1/'`
- echo "$label:12345:respawn:${base_bindir}/start_getty ${j} vt102" >> ${D}${sysconfdir}/inittab
+ speed=$(echo $s | cut -d\; -f 1)
+ device=$(echo $s | cut -d\; -f 2)
+ label=$(echo $device | sed -e 's/tty//' | tail --bytes=5)
+
+ echo "$label:12345:respawn:${sbindir}/ttyrun $device ${base_bindir}/start_getty $speed $device vt102" >> ${D}${sysconfdir}/inittab
done
if [ "${USE_VT}" = "1" ]; then
@@ -53,33 +52,6 @@ EOF
fi
}
-pkg_postinst:${PN} () {
-# run this on host and on target
-if [ "${SERIAL_CONSOLES_CHECK}" = "" ]; then
- exit 0
-fi
-}
-
-pkg_postinst_ontarget:${PN} () {
-# run this on the target
-if [ -e /proc/consoles ]; then
- tmp="${SERIAL_CONSOLES_CHECK}"
- for i in $tmp
- do
- j=`echo ${i} | sed -e s/^.*\;//g -e s/\:.*//g`
- k=`echo ${i} | sed s/^.*\://g`
- if [ -z "`grep ${j} /proc/consoles`" ]; then
- if [ -z "${k}" ] || [ -z "`grep ${k} /proc/consoles`" ] || [ ! -e /dev/${j} ]; then
- sed -i -e /^.*${j}\ /d -e /^.*${j}$/d /etc/inittab
- fi
- fi
- done
- kill -HUP 1
-else
- exit 1
-fi
-}
-
# USE_VT and SERIAL_CONSOLES are generally defined by the MACHINE .conf.
# Set PACKAGE_ARCH appropriately.
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -90,4 +62,5 @@ CONFFILES:${PN} = "${sysconfdir}/inittab"
USE_VT ?= "1"
SYSVINIT_ENABLED_GETTYS ?= "1"
+RDEPENDS:${PN} = "ttyrun"
RCONFLICTS:${PN} = "busybox-inittab"
diff --git a/meta/recipes-core/sysvinit/sysvinit/0001-hddown-include-libgen.h-for-basename-API.patch b/meta/recipes-core/sysvinit/sysvinit/0001-hddown-include-libgen.h-for-basename-API.patch
new file mode 100644
index 0000000000..5e4053bad1
--- /dev/null
+++ b/meta/recipes-core/sysvinit/sysvinit/0001-hddown-include-libgen.h-for-basename-API.patch
@@ -0,0 +1,38 @@
+From a07c1d94e79840c59563741b45e690e77d4f3dfa Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 3 Dec 2023 20:09:30 -0800
+Subject: [PATCH] hddown: include libgen.h for basename API
+
+musl has removed the non-prototype declaration of basename from string.h [1] which now results in build errors with clang-17+ compiler
+
+include libgen.h for using the posix declaration of the funciton.
+
+Fixes
+
+hddown.c:135:8: error: incompatible integer to pointer conversion assigning to 'char *' from 'int' [-Wint-conversion]
+ 135 | ptr = basename(lnk);
+ | ^ ~~~~~~~~~~~~~
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://github.com/slicer69/sysvinit/pull/21]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/hddown.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/hddown.c b/src/hddown.c
+index 7a2cf28..3b31bc0 100644
+--- a/src/hddown.c
++++ b/src/hddown.c
+@@ -24,6 +24,7 @@ char *v_hddown = "@(#)hddown.c 1.02 22-Apr-2003 miquels@cistron.nl";
+ #ifndef _GNU_SOURCE
+ #define _GNU_SOURCE
+ #endif
++#include <libgen.h>
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <unistd.h>
+--
+2.43.0
+
diff --git a/meta/recipes-core/sysvinit/sysvinit/install.patch b/meta/recipes-core/sysvinit/sysvinit/install.patch
index 90563a6294..bc6d493c2b 100644
--- a/meta/recipes-core/sysvinit/sysvinit/install.patch
+++ b/meta/recipes-core/sysvinit/sysvinit/install.patch
@@ -3,7 +3,7 @@ From: Qing He <qing.he@intel.com>
Date: Fri, 18 Jun 2010 09:40:30 +0800
Subject: [PATCH] sysvinit: upgrade to version 2.88dsf
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://github.com/slicer69/sysvinit/pull/13]
---
src/Makefile | 53 +++++++++++++++++++++++++++++-----------------------
diff --git a/meta/recipes-core/sysvinit/sysvinit/sysvinit_remove_linux_fs.patch b/meta/recipes-core/sysvinit/sysvinit/sysvinit_remove_linux_fs.patch
new file mode 100644
index 0000000000..89d65c23b7
--- /dev/null
+++ b/meta/recipes-core/sysvinit/sysvinit/sysvinit_remove_linux_fs.patch
@@ -0,0 +1,17 @@
+# From glibc 2.36, <linux/mount.h> (included from <linux/fs.h>) and
+# <sys/mount.h> (included from glibc) are no longer compatible:
+# https://sourceware.org/glibc/wiki/Release/2.36#Usage_of_.3Clinux.2Fmount.h.3E_and_.3Csys.2Fmount.h.3E
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+--- sysvinit-3.04/src/sulogin.c.orig 2022-08-07 23:07:42.952576274 +0200
++++ sysvinit-3.04/src/sulogin.c 2022-08-07 23:08:26.511470983 +0200
+@@ -51,7 +51,6 @@
+ #ifdef __linux__
+ # include <sys/statfs.h>
+ # include <sys/mount.h>
+-# include <linux/fs.h>
+ # include <linux/magic.h>
+ # include <linux/major.h>
+ # ifndef TMPFS_MAGIC
diff --git a/meta/recipes-core/sysvinit/sysvinit_3.04.bb b/meta/recipes-core/sysvinit/sysvinit_3.04.bb
index f678f65c1e..6a612468f3 100644
--- a/meta/recipes-core/sysvinit/sysvinit_3.04.bb
+++ b/meta/recipes-core/sysvinit/sysvinit_3.04.bb
@@ -15,11 +15,13 @@ SRC_URI = "${SAVANNAH_GNU_MIRROR}/sysvinit/sysvinit-${PV}.tar.xz \
file://pidof-add-m-option.patch \
file://realpath.patch \
file://0001-include-sys-sysmacros.h-for-major-minor-defines-in-g.patch \
+ file://sysvinit_remove_linux_fs.patch \
file://rcS-default \
file://rc \
file://rcS \
file://bootlogd.init \
file://01_bootlogd \
+ file://0001-hddown-include-libgen.h-for-basename-API.patch \
"
SRC_URI[sha256sum] = "2a621fe6e4528bc91308b74867ddaaebbdf7753f02395c0c5bae817bd2b7e3a5"
diff --git a/meta/recipes-core/ttyrun/ttyrun_2.32.0.bb b/meta/recipes-core/ttyrun/ttyrun_2.32.0.bb
new file mode 100644
index 0000000000..9a8be15dab
--- /dev/null
+++ b/meta/recipes-core/ttyrun/ttyrun_2.32.0.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Start the program if the specified terminal device is available."
+DESCRIPTION = "ttyrun is typically used to prevent a respawn through the \
+init(8) program when a terminal is not available."
+HOMEPAGE = "https://github.com/ibm-s390-linux/s390-tools"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=f5118f167b055bfd7c3450803f1847af"
+
+SRC_URI = "git://github.com/ibm-s390-linux/s390-tools;protocol=https;branch=master"
+SRCREV = "9eea78b3ad8ab3710fb3b2d80b9cd058d7c8aba7"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OEMAKE = "\
+ V=1 \
+ CC="${CC}" \
+ DISTRELEASE=${PR} \
+ "
+
+# We just want ttyrun and not the rest of s390-utils
+
+do_configure() {
+ oe_runmake -C ${S}/iucvterm/src clean
+}
+
+do_compile() {
+ oe_runmake -C ${S}/iucvterm/src ttyrun
+}
+
+do_install() {
+ install -d ${D}${sbindir}
+ install ${S}/iucvterm/src/ttyrun ${D}${sbindir}
+}
diff --git a/meta/recipes-core/udev/eudev/0001-build-Remove-dead-g-i-r-configuration.patch b/meta/recipes-core/udev/eudev/0001-build-Remove-dead-g-i-r-configuration.patch
deleted file mode 100644
index 2836f30c3a..0000000000
--- a/meta/recipes-core/udev/eudev/0001-build-Remove-dead-g-i-r-configuration.patch
+++ /dev/null
@@ -1,155 +0,0 @@
-From 520c4d451efc488573746f169d8e47d5a131afc2 Mon Sep 17 00:00:00 2001
-From: Alex Kiernan <alex.kiernan@gmail.com>
-Date: Fri, 20 May 2022 09:35:08 +0100
-Subject: [PATCH] build: Remove dead g-i-r configuration
-
-g-i-r support was removed in 2015 as part of removal of Gobject libudev
-support, but the autoconf support wasn't removed but is dead.
-
-Fixes: 252150e181c5 ("src/gudev: remove Gobject libudev support.")
-Upstream-Status: Submitted [https://github.com/eudev-project/eudev/pull/231]
-Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
-Signed-off-by: Alex Kiernan <alex.kiernan@zuma.ai>
----
- configure.ac | 12 ------
- m4/introspection.m4 | 96 ---------------------------------------------
- 2 files changed, 108 deletions(-)
- delete mode 100644 m4/introspection.m4
-
-diff --git a/configure.ac b/configure.ac
-index 85a524a618ae..987d5037ae77 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -183,17 +183,8 @@ AC_SUBST([udevrulesdir],[${udevlibexecdir}/rules.d])
- AC_SUBST([pkgconfiglibdir], [${libdir}/pkgconfig])
- AC_SUBST([sharepkgconfigdir],[${datadir}/pkgconfig])
-
--# introspection paths
--AC_SUBST([girdir], [${datadir}/gir-1.0])
--AC_SUBST([typelibsdir], [${libdir}/girepository-1.0])
--
- AC_SUBST([rootrundir],[${with_rootrundir}])
-
--# ------------------------------------------------------------------------------
--
--GOBJECT_INTROSPECTION_CHECK([1.31.1])
--AM_CONDITIONAL([HAVE_INTROSPECTION], [test "$enable_introspection" = "yes"])
--
- # ------------------------------------------------------------------------------
- AC_ARG_ENABLE([programs],
- AS_HELP_STRING([--disable-programs], [disable programs (udevd, udevadm and helpers)]),
-@@ -349,9 +340,6 @@ AC_MSG_RESULT([
-
- pkgconfiglibdir: ${libdir}/pkgconfig
- sharepkgconfigdir ${datadir}/pkgconfig
--
-- girdir ${datadir}/gir-1.0
-- typelibsdir ${libdir}/girepository-1.0
- ])
-
- # ------------------------------------------------------------------------------
-diff --git a/m4/introspection.m4 b/m4/introspection.m4
-deleted file mode 100644
-index d89c3d907d9c..000000000000
---- a/m4/introspection.m4
-+++ /dev/null
-@@ -1,96 +0,0 @@
--dnl -*- mode: autoconf -*-
--dnl Copyright 2009 Johan Dahlin
--dnl
--dnl This file is free software; the author(s) gives unlimited
--dnl permission to copy and/or distribute it, with or without
--dnl modifications, as long as this notice is preserved.
--dnl
--
--# serial 1
--
--m4_define([_GOBJECT_INTROSPECTION_CHECK_INTERNAL],
--[
-- AC_BEFORE([AC_PROG_LIBTOOL],[$0])dnl setup libtool first
-- AC_BEFORE([AM_PROG_LIBTOOL],[$0])dnl setup libtool first
-- AC_BEFORE([LT_INIT],[$0])dnl setup libtool first
--
-- dnl enable/disable introspection
-- m4_if([$2], [require],
-- [dnl
-- enable_introspection=yes
-- ],[dnl
-- AC_ARG_ENABLE(introspection,
-- AS_HELP_STRING([--enable-introspection[=@<:@no/auto/yes@:>@]],
-- [Enable introspection for this build]),,
-- [enable_introspection=auto])
-- ])dnl
--
-- AC_MSG_CHECKING([for gobject-introspection])
--
-- dnl presence/version checking
-- AS_CASE([$enable_introspection],
-- [no], [dnl
-- found_introspection="no (disabled, use --enable-introspection to enable)"
-- ],dnl
-- [yes],[dnl
-- PKG_CHECK_EXISTS([gobject-introspection-1.0],,
-- AC_MSG_ERROR([gobject-introspection-1.0 is not installed]))
-- PKG_CHECK_EXISTS([gobject-introspection-1.0 >= $1],
-- found_introspection=yes,
-- AC_MSG_ERROR([You need to have gobject-introspection >= $1 installed to build AC_PACKAGE_NAME]))
-- ],dnl
-- [auto],[dnl
-- PKG_CHECK_EXISTS([gobject-introspection-1.0 >= $1], found_introspection=yes, found_introspection=no)
-- dnl Canonicalize enable_introspection
-- enable_introspection=$found_introspection
-- ],dnl
-- [dnl
-- AC_MSG_ERROR([invalid argument passed to --enable-introspection, should be one of @<:@no/auto/yes@:>@])
-- ])dnl
--
-- AC_MSG_RESULT([$found_introspection])
--
-- INTROSPECTION_SCANNER=
-- INTROSPECTION_COMPILER=
-- INTROSPECTION_GENERATE=
-- INTROSPECTION_GIRDIR=
-- INTROSPECTION_TYPELIBDIR=
-- if test "x$found_introspection" = "xyes"; then
-- INTROSPECTION_SCANNER=`$PKG_CONFIG --variable=g_ir_scanner gobject-introspection-1.0`
-- INTROSPECTION_COMPILER=`$PKG_CONFIG --variable=g_ir_compiler gobject-introspection-1.0`
-- INTROSPECTION_GENERATE=`$PKG_CONFIG --variable=g_ir_generate gobject-introspection-1.0`
-- INTROSPECTION_GIRDIR=`$PKG_CONFIG --variable=girdir gobject-introspection-1.0`
-- INTROSPECTION_TYPELIBDIR="$($PKG_CONFIG --variable=typelibdir gobject-introspection-1.0)"
-- INTROSPECTION_CFLAGS=`$PKG_CONFIG --cflags gobject-introspection-1.0`
-- INTROSPECTION_LIBS=`$PKG_CONFIG --libs gobject-introspection-1.0`
-- INTROSPECTION_MAKEFILE=`$PKG_CONFIG --variable=datadir gobject-introspection-1.0`/gobject-introspection-1.0/Makefile.introspection
-- fi
-- AC_SUBST(INTROSPECTION_SCANNER)
-- AC_SUBST(INTROSPECTION_COMPILER)
-- AC_SUBST(INTROSPECTION_GENERATE)
-- AC_SUBST(INTROSPECTION_GIRDIR)
-- AC_SUBST(INTROSPECTION_TYPELIBDIR)
-- AC_SUBST(INTROSPECTION_CFLAGS)
-- AC_SUBST(INTROSPECTION_LIBS)
-- AC_SUBST(INTROSPECTION_MAKEFILE)
--
-- AM_CONDITIONAL(HAVE_INTROSPECTION, test "x$found_introspection" = "xyes")
--])
--
--
--dnl Usage:
--dnl GOBJECT_INTROSPECTION_CHECK([minimum-g-i-version])
--
--AC_DEFUN([GOBJECT_INTROSPECTION_CHECK],
--[
-- _GOBJECT_INTROSPECTION_CHECK_INTERNAL([$1])
--])
--
--dnl Usage:
--dnl GOBJECT_INTROSPECTION_REQUIRE([minimum-g-i-version])
--
--
--AC_DEFUN([GOBJECT_INTROSPECTION_REQUIRE],
--[
-- _GOBJECT_INTROSPECTION_CHECK_INTERNAL([$1], [require])
--])
---
-2.35.1
-
diff --git a/meta/recipes-core/udev/eudev/netifnames.patch b/meta/recipes-core/udev/eudev/netifnames.patch
new file mode 100644
index 0000000000..4f8e54d12d
--- /dev/null
+++ b/meta/recipes-core/udev/eudev/netifnames.patch
@@ -0,0 +1,17 @@
+eudev: consider ID_NET_NAME_MAC as an interface name
+
+eudev might not create names based on slot or path.
+
+Upstream-Status: Backport [github.com/eudev-project/eudev/pull/274]
+
+Signed-off-by: Joe Slater <joe.slater@windriver.com>
+
+--- a/rules/80-net-name-slot.rules
++++ b/rules/80-net-name-slot.rules
+@@ -10,5 +10,6 @@ ENV{net.ifnames}=="0", GOTO="net_name_sl
+ NAME=="", ENV{ID_NET_NAME_ONBOARD}!="", NAME="$env{ID_NET_NAME_ONBOARD}"
+ NAME=="", ENV{ID_NET_NAME_SLOT}!="", NAME="$env{ID_NET_NAME_SLOT}"
+ NAME=="", ENV{ID_NET_NAME_PATH}!="", NAME="$env{ID_NET_NAME_PATH}"
++NAME=="", ENV{ID_NET_NAME_MAC}!="", NAME="$env{ID_NET_NAME_MAC}"
+
+ LABEL="net_name_slot_end"
diff --git a/meta/recipes-core/udev/eudev_3.2.11.bb b/meta/recipes-core/udev/eudev_3.2.11.bb
deleted file mode 100644
index bc2c77de89..0000000000
--- a/meta/recipes-core/udev/eudev_3.2.11.bb
+++ /dev/null
@@ -1,89 +0,0 @@
-SUMMARY = "eudev is a fork of systemd's udev"
-HOMEPAGE = "https://github.com/eudev-project/eudev"
-DESCRIPTION = "eudev is Gentoo's fork of udev, systemd's device file manager for the Linux kernel. It manages device nodes in /dev and handles all user space actions when adding or removing devices."
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LICENSE:libudev = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-
-DEPENDS = "gperf-native"
-
-PROVIDES = "udev"
-
-SRC_URI = "https://github.com/eudev-project/${BPN}/releases/download/v${PV}/${BP}.tar.gz \
- file://init \
- file://local.rules \
- file://0001-build-Remove-dead-g-i-r-configuration.patch \
-"
-
-SRC_URI[sha256sum] = "19847cafec67897da855fde56f9dc7d92e21c50e450aa79068a7e704ed44558b"
-
-UPSTREAM_CHECK_URI = "https://github.com/eudev-project/eudev/releases"
-UPSTREAM_CHECK_REGEX = "eudev-(?P<pver>\d+(\.\d+)+)\.tar"
-
-inherit autotools update-rc.d qemu pkgconfig features_check manpages
-
-CONFLICT_DISTRO_FEATURES = "systemd"
-
-EXTRA_OECONF = " \
- --sbindir=${base_sbindir} \
- --with-rootlibdir=${base_libdir} \
- --with-rootlibexecdir=${nonarch_base_libdir}/udev \
- --with-rootprefix= \
-"
-
-PACKAGECONFIG ?= "blkid hwdb kmod \
- ${@bb.utils.filter('DISTRO_FEATURES', 'selinux', d)} \
-"
-PACKAGECONFIG[blkid] = "--enable-blkid,--disable-blkid,util-linux"
-PACKAGECONFIG[hwdb] = "--enable-hwdb,--disable-hwdb"
-PACKAGECONFIG[kmod] = "--enable-kmod,--disable-kmod,kmod"
-PACKAGECONFIG[manpages] = "--enable-manpages,--disable-manpages"
-PACKAGECONFIG[rule-generator] = "--enable-rule-generator,--disable-rule-generator"
-PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
-
-do_install:append() {
- install -d ${D}${sysconfdir}/init.d
- install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/udev
- sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev
- sed -i s%@KMOD@%${base_bindir}/kmod% ${D}${sysconfdir}/init.d/udev
-
- install -d ${D}${sysconfdir}/udev/rules.d
- install -m 0644 ${WORKDIR}/local.rules ${D}${sysconfdir}/udev/rules.d/local.rules
-
- # Use classic network interface naming scheme
- touch ${D}${sysconfdir}/udev/rules.d/80-net-name-slot.rules
-}
-
-do_install:prepend:class-target () {
- # Remove references to buildmachine
- sed -i -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- ${B}/src/udev/keyboard-keys-from-name.h
-}
-
-INITSCRIPT_NAME = "udev"
-INITSCRIPT_PARAMS = "start 04 S ."
-
-PACKAGE_BEFORE_PN = "libudev ${PN}-hwdb"
-
-FILES:${PN} += "${nonarch_base_libdir}/udev"
-FILES:libudev = "${base_libdir}/libudev.so.*"
-FILES:${PN}-hwdb = "${sysconfdir}/udev/hwdb.d"
-
-RDEPENDS:${PN}-hwdb += "eudev"
-RDEPENDS:${PN} += "kmod"
-
-RPROVIDES:${PN} = "hotplug udev"
-RPROVIDES:${PN}-hwdb += "udev-hwdb"
-
-PACKAGE_WRITE_DEPS += "qemu-native"
-pkg_postinst:${PN}-hwdb () {
- if test -n "$D"; then
- $INTERCEPT_DIR/postinst_intercept update_udev_hwdb ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX}
- else
- udevadm hwdb --update
- fi
-}
-
-pkg_prerm:${PN}-hwdb () {
- rm -f $D${sysconfdir}/udev/hwdb.bin
-}
diff --git a/meta/recipes-core/udev/eudev_3.2.14.bb b/meta/recipes-core/udev/eudev_3.2.14.bb
new file mode 100644
index 0000000000..18696679c8
--- /dev/null
+++ b/meta/recipes-core/udev/eudev_3.2.14.bb
@@ -0,0 +1,90 @@
+SUMMARY = "eudev is a fork of systemd's udev"
+HOMEPAGE = "https://github.com/eudev-project/eudev"
+DESCRIPTION = "eudev is Gentoo's fork of udev, systemd's device file manager for the Linux kernel. It manages device nodes in /dev and handles all user space actions when adding or removing devices."
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LICENSE:libudev = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
+
+DEPENDS = "gperf-native"
+
+PROVIDES = "udev"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
+ file://netifnames.patch \
+ file://init \
+ file://local.rules \
+ "
+
+SRC_URI[sha256sum] = "8da4319102f24abbf7fff5ce9c416af848df163b29590e666d334cc1927f006f"
+
+GITHUB_BASE_URI = "https://github.com/eudev-project/eudev/releases"
+
+inherit autotools update-rc.d qemu pkgconfig features_check manpages github-releases
+
+CONFLICT_DISTRO_FEATURES = "systemd"
+
+EXTRA_OECONF = " \
+ --sbindir=${base_sbindir} \
+ --with-rootlibdir=${base_libdir} \
+ --with-rootlibexecdir=${nonarch_base_libdir}/udev \
+ --with-rootprefix= \
+"
+
+PACKAGECONFIG ?= "blkid hwdb kmod \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'selinux', d)} \
+"
+PACKAGECONFIG[blkid] = "--enable-blkid,--disable-blkid,util-linux"
+PACKAGECONFIG[hwdb] = "--enable-hwdb,--disable-hwdb"
+PACKAGECONFIG[kmod] = "--enable-kmod,--disable-kmod,kmod"
+PACKAGECONFIG[manpages] = "--enable-manpages,--disable-manpages"
+PACKAGECONFIG[rule-generator] = "--enable-rule-generator,--disable-rule-generator"
+PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
+
+do_install:append() {
+ install -d ${D}${sysconfdir}/init.d
+ install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/udev
+ sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev
+ sed -i s%@KMOD@%${base_bindir}/kmod% ${D}${sysconfdir}/init.d/udev
+
+ install -d ${D}${sysconfdir}/udev/rules.d
+ install -m 0644 ${WORKDIR}/local.rules ${D}${sysconfdir}/udev/rules.d/local.rules
+
+ # Use classic network interface naming scheme if no 'pni-names' distro feature
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'pni-names', 'false', 'true', d)}; then
+ touch ${D}${sysconfdir}/udev/rules.d/80-net-name-slot.rules
+ fi
+}
+
+do_install:prepend:class-target () {
+ # Remove references to buildmachine
+ sed -i -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ ${B}/src/udev/keyboard-keys-from-name.h
+}
+
+INITSCRIPT_NAME = "udev"
+INITSCRIPT_PARAMS = "start 04 S ."
+
+PACKAGE_BEFORE_PN = "libudev ${PN}-hwdb"
+
+FILES:${PN} += "${nonarch_base_libdir}/udev"
+FILES:libudev = "${base_libdir}/libudev.so.*"
+FILES:${PN}-hwdb = "${sysconfdir}/udev/hwdb.d"
+
+RDEPENDS:${PN}-hwdb += "eudev"
+RDEPENDS:${PN} += "kmod"
+
+RPROVIDES:${PN} = "hotplug udev"
+RPROVIDES:${PN}-hwdb += "udev-hwdb"
+
+PACKAGE_WRITE_DEPS += "qemu-native"
+pkg_postinst:${PN}-hwdb () {
+ if test -n "$D"; then
+ $INTERCEPT_DIR/postinst_intercept update_udev_hwdb ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX}
+ else
+ udevadm hwdb --update
+ fi
+}
+
+pkg_prerm:${PN}-hwdb () {
+ rm -f $D${sysconfdir}/udev/hwdb.bin
+}
diff --git a/meta/recipes-core/udev/udev-extraconf/mount.sh b/meta/recipes-core/udev/udev-extraconf/mount.sh
index 5ba66e98e2..c19e2aa68a 100644
--- a/meta/recipes-core/udev/udev-extraconf/mount.sh
+++ b/meta/recipes-core/udev/udev-extraconf/mount.sh
@@ -6,6 +6,7 @@
BASE_INIT="`readlink -f "@base_sbindir@/init"`"
INIT_SYSTEMD="@systemd_unitdir@/systemd"
+MOUNT_BASE="@MOUNT_BASE@"
if [ "x$BASE_INIT" = "x$INIT_SYSTEMD" ];then
# systemd as init uses systemd-mount to mount block devices
@@ -35,15 +36,35 @@ do
fi
done
+is_filesystem_supported() {
+ while read -r fs; do
+ if [ "${fs#nodev}" = "$1" ];
+ then
+ return 0
+ fi
+ done < "/proc/filesystems"
+ return 1
+}
+
automount_systemd() {
name="`basename "$DEVNAME"`"
# Skip already mounted partitions
- if [ -f /run/systemd/transient/run-media-$name.mount ]; then
- logger "mount.sh/automount" "/run/media/$name already mounted"
+ if [ -f /run/systemd/transient/$(echo $MOUNT_BASE | cut -d '/' -f 2- | sed 's#/#-#g')-*$name.mount ]; then
+ logger "mount.sh/automount" "$MOUNT_BASE/$name already mounted"
return
fi
+ # Get the unique name for mount point
+ get_label_name "${DEVNAME}"
+
+ # Only go for auto-mounting when the device has been cleaned up in remove
+ # or has not been identified yet
+ if [ -e "/tmp/.automount-$name" ]; then
+ logger "mount.sh/automount" "[$MOUNT_BASE/$name] is already cached"
+ return
+ fi
+
# Skip the partition which are already in /etc/fstab
grep "^[[:space:]]*$DEVNAME" /etc/fstab && return
for n in LABEL PARTLABEL UUID PARTUUID; do
@@ -53,7 +74,12 @@ automount_systemd() {
grep "^[[:space:]]*$tmp" /etc/fstab && return
done
- [ -d "/run/media/$name" ] || mkdir -p "/run/media/$name"
+ if ! is_filesystem_supported $ID_FS_TYPE; then
+ logger "mount.sh/automount" "Filesystem '$ID_FS_TYPE' on '${DEVNAME}' is unsupported"
+ return
+ fi
+
+ [ -d "$MOUNT_BASE/$name" ] || mkdir -p "$MOUNT_BASE/$name"
MOUNT="$MOUNT -o silent"
@@ -65,18 +91,20 @@ automount_systemd() {
;;
swap)
return ;;
+ lvm*|LVM*)
+ return ;;
# TODO
*)
;;
esac
- if ! $MOUNT --no-block -t auto $DEVNAME "/run/media/$name"
+ if ! $MOUNT --no-block -t auto $DEVNAME "$MOUNT_BASE/$name"
then
- #logger "mount.sh/automount" "$MOUNT -t auto $DEVNAME \"/run/media/$name\" failed!"
- rm_dir "/run/media/$name"
+ #logger "mount.sh/automount" "$MOUNT -t auto $DEVNAME \"$MOUNT_BASE/$name\" failed!"
+ rm_dir "$MOUNT_BASE/$name"
else
- logger "mount.sh/automount" "Auto-mount of [/run/media/$name] successful"
- touch "/tmp/.automount-$name"
+ logger "mount.sh/automount" "Auto-mount of [$MOUNT_BASE/$name] successful"
+ echo "$name" > "/tmp/.automount-$name"
fi
}
@@ -93,7 +121,17 @@ automount() {
# configured in fstab
grep -q "^$DEVNAME " /proc/mounts && return
- ! test -d "/run/media/$name" && mkdir -p "/run/media/$name"
+ # Get the unique name for mount point
+ get_label_name "${DEVNAME}"
+
+ # Only go for auto-mounting when the device has been cleaned up in remove
+ # or has not been identified yet
+ if [ -e "/tmp/.automount-$name" ]; then
+ logger "mount.sh/automount" "[$MOUNT_BASE/$name] is already cached"
+ return
+ fi
+
+ ! test -d "$MOUNT_BASE/$name" && mkdir -p "$MOUNT_BASE/$name"
# Silent util-linux's version of mounting auto
if [ "x`readlink $MOUNT`" = "x/bin/mount.util-linux" ] ;
then
@@ -108,18 +146,23 @@ automount() {
;;
swap)
return ;;
+ lvm*|LVM*)
+ return ;;
# TODO
*)
;;
esac
- if ! $MOUNT -t auto $DEVNAME "/run/media/$name"
+ if ! $MOUNT -t auto $DEVNAME "$MOUNT_BASE/$name"
then
- #logger "mount.sh/automount" "$MOUNT -t auto $DEVNAME \"/run/media/$name\" failed!"
- rm_dir "/run/media/$name"
+ #logger "mount.sh/automount" "$MOUNT -t auto $DEVNAME \"$MOUNT_BASE/$name\" failed!"
+ rm_dir "$MOUNT_BASE/$name"
else
- logger "mount.sh/automount" "Auto-mount of [/run/media/$name] successful"
- touch "/tmp/.automount-$name"
+ logger "mount.sh/automount" "Auto-mount of [$MOUNT_BASE/$name] successful"
+ # The actual device might not be present in the remove event so blkid cannot
+ # be used to calculate what name was generated here. Simply save the mount
+ # name in our tmp file.
+ echo "$name" > "/tmp/.automount-$name"
fi
}
@@ -133,6 +176,18 @@ rm_dir() {
fi
}
+get_label_name() {
+ # Get the LABEL or PARTLABEL
+ LABEL=`/sbin/blkid | grep "$1:" | grep -o 'LABEL=".*"' | cut -d '"' -f2`
+ # If the $DEVNAME has a LABEL or a PARTLABEL
+ if [ -n "$LABEL" ]; then
+ # Set the mount location dir name to LABEL appended
+ # with $name e.g. label-sda. That would avoid overlapping
+ # mounts in case two devices have same LABEL
+ name="${LABEL}-${name}"
+ fi
+}
+
# No ID_FS_TYPE for cdrom device, yet it should be mounted
name="`basename "$DEVNAME"`"
[ -e /sys/block/$name/device/media ] && media_type=`cat /sys/block/$name/device/media`
@@ -150,12 +205,18 @@ if [ "$ACTION" = "add" ] && [ -n "$DEVNAME" ] && [ -n "$ID_FS_TYPE" -o "$media_t
fi
if [ "$ACTION" = "remove" ] || [ "$ACTION" = "change" ] && [ -x "$UMOUNT" ] && [ -n "$DEVNAME" ]; then
- for mnt in `cat /proc/mounts | grep "$DEVNAME" | cut -f 2 -d " " `
- do
- $UMOUNT $mnt
- done
-
- # Remove empty directories from auto-mounter
name="`basename "$DEVNAME"`"
- test -e "/tmp/.automount-$name" && rm_dir "/run/media/$name"
+ tmpfile=`find /tmp | grep "\.automount-.*${name}$"`
+ if [ ! -e "/sys/$DEVPATH" -a -e "$tmpfile" ]; then
+ logger "mount.sh/remove" "cleaning up $DEVNAME, was mounted by the auto-mounter"
+ for mnt in `cat /proc/mounts | grep "$DEVNAME" | cut -f 2 -d " " `
+ do
+ $UMOUNT "`printf $mnt`"
+ done
+ # Remove mount directory created by the auto-mounter
+ # and clean up our tmp cache file
+ mntdir=`cat "$tmpfile"`
+ rm_dir "$MOUNT_BASE/$mntdir"
+ rm "$tmpfile"
+ fi
fi
diff --git a/meta/recipes-core/udev/udev-extraconf_1.1.bb b/meta/recipes-core/udev/udev-extraconf_1.1.bb
index 7da04379c0..30f1fe76d0 100644
--- a/meta/recipes-core/udev/udev-extraconf_1.1.bb
+++ b/meta/recipes-core/udev/udev-extraconf_1.1.bb
@@ -15,6 +15,7 @@ SRC_URI = " \
S = "${WORKDIR}"
+MOUNT_BASE = "/run/media"
do_install() {
install -d ${D}${sysconfdir}/udev/rules.d
@@ -31,12 +32,24 @@ do_install() {
install -m 0755 ${WORKDIR}/mount.sh ${D}${sysconfdir}/udev/scripts/mount.sh
sed -i 's|@systemd_unitdir@|${systemd_unitdir}|g' ${D}${sysconfdir}/udev/scripts/mount.sh
sed -i 's|@base_sbindir@|${base_sbindir}|g' ${D}${sysconfdir}/udev/scripts/mount.sh
+ sed -i 's|@MOUNT_BASE@|${MOUNT_BASE}|g' ${D}${sysconfdir}/udev/scripts/mount.sh
install -m 0755 ${WORKDIR}/network.sh ${D}${sysconfdir}/udev/scripts
}
-FILES:${PN} = "${sysconfdir}/udev"
-RDEPENDS:${PN} = "udev"
+pkg_postinst:${PN} () {
+ if [ -e $D${systemd_unitdir}/system/systemd-udevd.service ]; then
+ sed -i "/\[Service\]/aMountFlags=shared" $D${systemd_unitdir}/system/systemd-udevd.service
+ fi
+}
+
+pkg_postrm:${PN} () {
+ if [ -e $D${systemd_unitdir}/system/systemd-udevd.service ]; then
+ sed -i "/MountFlags=shared/d" $D${systemd_unitdir}/system/systemd-udevd.service
+ fi
+}
+
+RDEPENDS:${PN} = "udev util-linux-blkid ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'util-linux-lsblk', '', d)}"
CONFFILES:${PN} = "${sysconfdir}/udev/mount.ignorelist"
# to replace udev-extra-rules from meta-oe
diff --git a/meta/recipes-core/update-rc.d/update-rc.d_0.8.bb b/meta/recipes-core/update-rc.d/update-rc.d_0.8.bb
index ee49198429..ba622fe716 100644
--- a/meta/recipes-core/update-rc.d/update-rc.d_0.8.bb
+++ b/meta/recipes-core/update-rc.d/update-rc.d_0.8.bb
@@ -6,8 +6,8 @@ SECTION = "base"
LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://update-rc.d;beginline=5;endline=15;md5=d40a07c27f535425934bb5001f2037d9"
-SRC_URI = "git://git.yoctoproject.org/update-rc.d;branch=master"
-SRCREV = "8636cf478d426b568c1be11dbd9346f67e03adac"
+SRC_URI = "git://git.yoctoproject.org/update-rc.d;branch=master;protocol=https"
+SRCREV = "b8f950105010270a768aa12245d6abf166346015"
UPSTREAM_CHECK_COMMITS = "1"
diff --git a/meta/recipes-core/util-linux/util-linux-libuuid_2.38.bb b/meta/recipes-core/util-linux/util-linux-libuuid_2.38.bb
deleted file mode 100644
index 5d759aed94..0000000000
--- a/meta/recipes-core/util-linux/util-linux-libuuid_2.38.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-# To allow util-linux to optionally build-depend on cryptsetup, libuuid is
-# split out of the main recipe, as it's needed by cryptsetup
-
-require util-linux.inc
-
-inherit autotools gettext pkgconfig
-
-S = "${WORKDIR}/util-linux-${PV}"
-EXTRA_OECONF += "--disable-all-programs --enable-libuuid"
-LICENSE = "BSD-3-Clause"
-
-do_install:append() {
- rm -rf ${D}${datadir} ${D}${bindir} ${D}${base_bindir} ${D}${sbindir} ${D}${base_sbindir} ${D}${exec_prefix}/sbin
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/util-linux/util-linux-libuuid_2.39.3.bb b/meta/recipes-core/util-linux/util-linux-libuuid_2.39.3.bb
new file mode 100644
index 0000000000..ec04c1d384
--- /dev/null
+++ b/meta/recipes-core/util-linux/util-linux-libuuid_2.39.3.bb
@@ -0,0 +1,19 @@
+# To allow util-linux to optionally build-depend on cryptsetup, libuuid is
+# split out of the main recipe, as it's needed by cryptsetup
+
+require util-linux.inc
+
+inherit autotools gettext pkgconfig
+
+S = "${WORKDIR}/util-linux-${PV}"
+
+EXTRA_AUTORECONF += "--exclude=gtkdocize"
+EXTRA_OECONF += "--disable-all-programs --enable-libuuid"
+
+LICENSE = "BSD-3-Clause"
+
+do_install:append() {
+ rm -rf ${D}${datadir} ${D}${bindir} ${D}${base_bindir} ${D}${sbindir} ${D}${base_sbindir} ${D}${exec_prefix}/sbin
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/util-linux/util-linux.inc b/meta/recipes-core/util-linux/util-linux.inc
index c9bddfb7a6..d506783f9a 100644
--- a/meta/recipes-core/util-linux/util-linux.inc
+++ b/meta/recipes-core/util-linux/util-linux.inc
@@ -6,13 +6,15 @@ disk partitioning, kernel message management, filesystem creation, and system lo
SECTION = "base"
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later & BSD-3-Clause & BSD-4-Clause"
+LICENSE = "GPL-1.0-or-later & GPL-2.0-or-later & LGPL-2.1-or-later & BSD-2-Clause & BSD-3-Clause & BSD-4-Clause & MIT"
+LICENSE:${PN}-fcntl-lock = "MIT"
+LICENSE:${PN}-fdisk = "GPL-1.0-or-later"
LICENSE:${PN}-libblkid = "LGPL-2.1-or-later"
LICENSE:${PN}-libfdisk = "LGPL-2.1-or-later"
LICENSE:${PN}-libmount = "LGPL-2.1-or-later"
LICENSE:${PN}-libsmartcols = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://README.licensing;md5=0fd5c050c6187d2bf0a4492b7f4e33da \
+LIC_FILES_CHKSUM = "file://README.licensing;md5=cc80239f106687ab39ef0271ff5cf4ba \
file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://Documentation/licenses/COPYING.GPL-2.0-or-later;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://Documentation/licenses/COPYING.LGPL-2.1-or-later;md5=4fbd65380cdd255951079008b364516c \
@@ -23,11 +25,12 @@ LIC_FILES_CHKSUM = "file://README.licensing;md5=0fd5c050c6187d2bf0a4492b7f4e33da
file://libblkid/COPYING;md5=693bcbbe16d3a4a4b37bc906bc01cc04 \
file://libfdisk/COPYING;md5=693bcbbe16d3a4a4b37bc906bc01cc04 \
file://libsmartcols/COPYING;md5=693bcbbe16d3a4a4b37bc906bc01cc04 \
-"
+ "
FILESEXTRAPATHS:prepend := "${THISDIR}/util-linux:"
MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:2])}"
SRC_URI = "${KERNELORG_MIRROR}/linux/utils/util-linux/v${MAJOR_VERSION}/util-linux-${PV}.tar.xz \
+ file://mit-license.patch \
file://configure-sbindir.patch \
file://runuser.pamd \
file://runuser-l.pamd \
@@ -35,6 +38,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/util-linux/v${MAJOR_VERSION}/util-lin
file://run-ptest \
file://display_testname_for_subtest.patch \
file://avoid_parallel_tests.patch \
+ file://0001-login-utils-include-libgen.h-for-basename-API.patch \
+ file://fcntl-lock.c \
"
-SRC_URI[sha256sum] = "6d111cbe4d55b336db2f1fbeffbc65b89908704c01136371d32aa9bec373eb64"
+SRC_URI[sha256sum] = "7b6605e48d1a49f43cc4b4cfc59f313d0dd5402fa40b96810bd572e167dfed0f"
diff --git a/meta/recipes-core/util-linux/util-linux/0001-login-utils-include-libgen.h-for-basename-API.patch b/meta/recipes-core/util-linux/util-linux/0001-login-utils-include-libgen.h-for-basename-API.patch
new file mode 100644
index 0000000000..6258710e1e
--- /dev/null
+++ b/meta/recipes-core/util-linux/util-linux/0001-login-utils-include-libgen.h-for-basename-API.patch
@@ -0,0 +1,60 @@
+From d44e3ad1f6f8b5c1b3098bb7d537943a4c21d22f Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 3 Dec 2023 19:59:46 -0800
+Subject: [PATCH] login-utils: include libgen.h for basename API
+
+musl has removed the non-prototype declaration of basename from string.h [1] which now results in build errors with clang-17+ compiler
+
+include libgen.h for using the posix declaration of the funciton.
+
+Fixes
+
+../util-linux-2.39.2/login-utils/su-common.c:847:20: error: call to undeclared function 'basename'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
+ 847 | shell_basename = basename(shell);
+ | ^
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://github.com/util-linux/util-linux/pull/2615]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+---
+ login-utils/su-common.c | 10 +++++++---
+ 1 file changed, 7 insertions(+), 3 deletions(-)
+
+diff --git a/login-utils/su-common.c b/login-utils/su-common.c
+index b674920..3297c78 100644
+--- a/login-utils/su-common.c
++++ b/login-utils/su-common.c
+@@ -26,6 +26,7 @@
+ #include <sys/types.h>
+ #include <pwd.h>
+ #include <grp.h>
++#include <libgen.h>
+ #include <security/pam_appl.h>
+ #ifdef HAVE_SECURITY_PAM_MISC_H
+ # include <security/pam_misc.h>
+@@ -840,17 +841,20 @@ static void run_shell(
+ su->simulate_login ? " login" : "",
+ su->fast_startup ? " fast-start" : ""));
+
++ char* tmp = xstrdup(shell);
+ if (su->simulate_login) {
+ char *arg0;
+ char *shell_basename;
+
+- shell_basename = basename(shell);
++ shell_basename = basename(tmp);
+ arg0 = xmalloc(strlen(shell_basename) + 2);
+ arg0[0] = '-';
+ strcpy(arg0 + 1, shell_basename);
+ args[0] = arg0;
+- } else
+- args[0] = basename(shell);
++ } else {
++ args[0] = basename(tmp);
++ }
++ free(tmp);
+
+ if (su->fast_startup)
+ args[argno++] = "-f";
diff --git a/meta/recipes-core/util-linux/util-linux/avoid_parallel_tests.patch b/meta/recipes-core/util-linux/util-linux/avoid_parallel_tests.patch
index f1cbdb3beb..85ad7a5575 100644
--- a/meta/recipes-core/util-linux/util-linux/avoid_parallel_tests.patch
+++ b/meta/recipes-core/util-linux/util-linux/avoid_parallel_tests.patch
@@ -1,4 +1,4 @@
-From ee3c7812e1efa6719af68b994804f0e6caceabd8 Mon Sep 17 00:00:00 2001
+From 0b05e4695a0616badef71dfa459a00ef6ff1b521 Mon Sep 17 00:00:00 2001
From: Tudor Florea <tudor.florea@enea.com>
Date: Mon, 14 Jun 2021 14:00:31 +0200
Subject: [PATCH] util-linux: Add ptest
@@ -15,7 +15,7 @@ Upstream-Status: Inappropriate
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index 5664f9f..075ef27 100644
+index 890212f..870e817 100644
--- a/configure.ac
+++ b/configure.ac
@@ -11,7 +11,7 @@ AC_CONFIG_MACRO_DIR([m4])
diff --git a/meta/recipes-core/util-linux/util-linux/configure-sbindir.patch b/meta/recipes-core/util-linux/util-linux/configure-sbindir.patch
index e475289f65..75adeca188 100644
--- a/meta/recipes-core/util-linux/util-linux/configure-sbindir.patch
+++ b/meta/recipes-core/util-linux/util-linux/configure-sbindir.patch
@@ -1,3 +1,8 @@
+From c79222a9a5e3425c55e150edc0b7ac59c573aa2f Mon Sep 17 00:00:00 2001
+From: Phil Blundell <pb@pbcl.net>
+Date: Mon, 24 Sep 2012 07:24:51 +0100
+Subject: [PATCH] util-linux: Ensure that ${sbindir} is respected
+
util-linux: take ${sbindir} from the environment if it is set there
fix the test, the [ ] syntax was getting eaten by autoconf
@@ -5,11 +10,15 @@ Signed-off-by: Phil Blundell <pb@pbcl.net>
Signed-off-by: Saul Wold <sgw@linux.intel.com
Upstream-Status: Inappropriate [configuration]
-Index: util-linux-2.31/configure.ac
-===================================================================
---- util-linux-2.31.orig/configure.ac
-+++ util-linux-2.31/configure.ac
-@@ -89,7 +89,10 @@ AC_SUBST([runstatedir])
+---
+ configure.ac | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index 36c24b4..890212f 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -102,7 +102,10 @@ AC_SUBST([runstatedir])
usrbin_execdir='${exec_prefix}/bin'
AC_SUBST([usrbin_execdir])
diff --git a/meta/recipes-core/util-linux/util-linux/display_testname_for_subtest.patch b/meta/recipes-core/util-linux/util-linux/display_testname_for_subtest.patch
index 417ca1d98f..815ae9c915 100644
--- a/meta/recipes-core/util-linux/util-linux/display_testname_for_subtest.patch
+++ b/meta/recipes-core/util-linux/util-linux/display_testname_for_subtest.patch
@@ -1,4 +1,7 @@
-Display testname for subtest
+From fc5de1de898fd1a372a2fd2fa493dc57323a029d Mon Sep 17 00:00:00 2001
+From: Tudor Florea <tudor.florea@enea.com>
+Date: Thu, 3 Dec 2015 04:08:00 +0100
+Subject: [PATCH] Display testname for subtest
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Upstream-Status: Pending
@@ -8,10 +11,10 @@ Upstream-Status: Pending
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/functions.sh b/tests/functions.sh
-index 5246605..b24dc15 100644
+index 5a562a3..098145e 100644
--- a/tests/functions.sh
+++ b/tests/functions.sh
-@@ -320,7 +320,7 @@ function ts_init_subtest {
+@@ -437,7 +437,7 @@ function ts_init_subtest {
if [ "$TS_PARSABLE" != "yes" ]; then
[ $TS_NSUBTESTS -eq 1 ] && echo
@@ -20,6 +23,3 @@ index 5246605..b24dc15 100644
fi
}
---
-2.8.3
-
diff --git a/meta/recipes-core/util-linux/util-linux/fcntl-lock.c b/meta/recipes-core/util-linux/util-linux/fcntl-lock.c
new file mode 100644
index 0000000000..966d8c5ecb
--- /dev/null
+++ b/meta/recipes-core/util-linux/util-linux/fcntl-lock.c
@@ -0,0 +1,332 @@
+// From https://github.com/magnumripper/fcntl-lock
+// SPDX-License-Identifier: MIT
+/* ----------------------------------------------------------------------- *
+ *
+ * Copyright 2003-2005 H. Peter Anvin - All Rights Reserved
+ * Copyright 2015 magnum (fcntl version)
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom
+ * the Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall
+ * be included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ *
+ * ----------------------------------------------------------------------- */
+
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <getopt.h>
+#include <signal.h>
+#include <ctype.h>
+#include <string.h>
+#include <paths.h>
+#include <sysexits.h>
+#include <sys/types.h>
+#include <sys/file.h>
+#include <sys/time.h>
+#include <sys/wait.h>
+
+#define PACKAGE_STRING "magnum"
+#define _(x) (x)
+
+static const struct option long_options[] = {
+ { "shared", 0, NULL, 's' },
+ { "exclusive", 0, NULL, 'x' },
+ { "unlock", 0, NULL, 'u' },
+ { "nonblocking", 0, NULL, 'n' },
+ { "nb", 0, NULL, 'n' },
+ { "timeout", 1, NULL, 'w' },
+ { "wait", 1, NULL, 'w' },
+ { "close", 0, NULL, 'o' },
+ { "help", 0, NULL, 'h' },
+ { "version", 0, NULL, 'V' },
+ { 0, 0, 0, 0 }
+};
+
+const char *program;
+
+static void usage(int ex)
+{
+ fputs("fcntl-lock (" PACKAGE_STRING ")\n", stderr);
+ fprintf(stderr,
+ _("Usage: %1$s [-sxun][-w #] fd#\n"
+ " %1$s [-sxon][-w #] file [-c] command...\n"
+ " %1$s [-sxon][-w #] directory [-c] command...\n"
+ " -s --shared Get a shared lock\n"
+ " -x --exclusive Get an exclusive lock\n"
+ " -u --unlock Remove a lock\n"
+ " -n --nonblock Fail rather than wait\n"
+ " -w --timeout Wait for a limited amount of time\n"
+ " -o --close Close file descriptor before running command\n"
+ " -c --command Run a single command string through the shell\n"
+ " -h --help Display this text\n"
+ " -V --version Display version\n"),
+ program);
+ exit(ex);
+}
+
+
+static sig_atomic_t timeout_expired = 0;
+
+static void timeout_handler(int sig)
+{
+ (void)sig;
+
+ timeout_expired = 1;
+}
+
+
+static char * strtotimeval(const char *str, struct timeval *tv)
+{
+ char *s;
+ long fs; /* Fractional seconds */
+ int i;
+
+ tv->tv_sec = strtol(str, &s, 10);
+ fs = 0;
+
+ if ( *s == '.' ) {
+ s++;
+
+ for ( i = 0 ; i < 6 ; i++ ) {
+ if ( !isdigit(*s) )
+ break;
+
+ fs *= 10;
+ fs += *s++ - '0';
+ }
+
+ for ( ; i < 6; i++ )
+ fs *= 10;
+
+ while ( isdigit(*s) )
+ s++;
+ }
+
+ tv->tv_usec = fs;
+ return s;
+}
+
+int main(int argc, char *argv[])
+{
+ struct itimerval timeout, old_timer;
+ int have_timeout = 0;
+ int type = F_WRLCK;
+ int block = F_SETLKW;
+ int fd = -1;
+ int opt, ix;
+ int do_close = 0;
+ int err;
+ int status;
+ char *eon;
+ char **cmd_argv = NULL, *sh_c_argv[4];
+ struct flock lock;
+ const char *filename = NULL;
+ struct sigaction sa, old_sa;
+
+ program = argv[0];
+
+ if ( argc < 2 )
+ usage(EX_USAGE);
+
+ memset(&timeout, 0, sizeof timeout);
+
+ optopt = 0;
+ while ( (opt = getopt_long(argc, argv, "+sexnouw:hV?", long_options, &ix)) != EOF ) {
+ switch(opt) {
+ case 's':
+ type = F_RDLCK;
+ break;
+ case 'e':
+ case 'x':
+ type = F_WRLCK;
+ break;
+ case 'u':
+ type = F_UNLCK;
+ break;
+ case 'o':
+ do_close = 1;
+ break;
+ case 'n':
+ block = F_SETLK;
+ break;
+ case 'w':
+ have_timeout = 1;
+ eon = strtotimeval(optarg, &timeout.it_value);
+ if ( *eon )
+ usage(EX_USAGE);
+ break;
+ case 'V':
+ printf("fcntl-lock (%s)\n", PACKAGE_STRING);
+ exit(0);
+ default:
+ /* optopt will be set if this was an unrecognized option, i.e. *not* 'h' or '?' */
+ usage(optopt ? EX_USAGE : 0);
+ break;
+ }
+ }
+
+ if ( argc > optind+1 ) {
+ /* Run command */
+
+ if ( !strcmp(argv[optind+1], "-c") ||
+ !strcmp(argv[optind+1], "--command") ) {
+
+ if ( argc != optind+3 ) {
+ fprintf(stderr, _("%s: %s requires exactly one command argument\n"),
+ program, argv[optind+1]);
+ exit(EX_USAGE);
+ }
+
+ cmd_argv = sh_c_argv;
+
+ cmd_argv[0] = getenv("SHELL");
+ if ( !cmd_argv[0] || !*cmd_argv[0] )
+ cmd_argv[0] = _PATH_BSHELL;
+
+ cmd_argv[1] = "-c";
+ cmd_argv[2] = argv[optind+2];
+ cmd_argv[3] = 0;
+ } else {
+ cmd_argv = &argv[optind+1];
+ }
+
+ filename = argv[optind];
+ fd = open(filename, O_RDWR|O_NOCTTY|O_CREAT, 0666);
+ /* Linux doesn't like O_CREAT on a directory, even though it should be a
+ no-op */
+ if (fd < 0 && errno == EISDIR)
+ fd = open(filename, O_RDONLY|O_NOCTTY);
+
+ if ( fd < 0 ) {
+ err = errno;
+ fprintf(stderr, _("%s: cannot open lock file %s: %s\n"),
+ program, argv[optind], strerror(err));
+ exit((err == ENOMEM||err == EMFILE||err == ENFILE) ? EX_OSERR :
+ (err == EROFS||err == ENOSPC) ? EX_CANTCREAT :
+ EX_NOINPUT);
+ }
+
+ } else if (optind < argc) {
+ /* Use provided file descriptor */
+
+ fd = (int)strtol(argv[optind], &eon, 10);
+ if ( *eon || !argv[optind] ) {
+ fprintf(stderr, _("%s: bad number: %s\n"), program, argv[optind]);
+ exit(EX_USAGE);
+ }
+
+ } else {
+ /* Bad options */
+
+ fprintf(stderr, _("%s: requires file descriptor, file or directory\n"),
+ program);
+ exit(EX_USAGE);
+ }
+
+
+ if ( have_timeout ) {
+ if ( timeout.it_value.tv_sec == 0 &&
+ timeout.it_value.tv_usec == 0 ) {
+ /* -w 0 is equivalent to -n; this has to be special-cased
+ because setting an itimer to zero means disabled! */
+
+ have_timeout = 0;
+ block = F_SETLK;
+ } else {
+ memset(&sa, 0, sizeof sa);
+
+ sa.sa_handler = timeout_handler;
+ sa.sa_flags = SA_RESETHAND;
+ sigaction(SIGALRM, &sa, &old_sa);
+
+ setitimer(ITIMER_REAL, &timeout, &old_timer);
+ }
+ }
+
+ memset(&lock, 0, sizeof(lock));
+ lock.l_type = type;
+ while ( fcntl(fd, block, &lock) ) {
+ switch( (err = errno) ) {
+ case EAGAIN: /* -n option set and failed to lock */
+ case EACCES: /* -n option set and failed to lock */
+ exit(1);
+ case EINTR: /* Signal received */
+ if ( timeout_expired )
+ exit(1); /* -w option set and failed to lock */
+ continue; /* otherwise try again */
+ default: /* Other errors */
+ if ( filename )
+ fprintf(stderr, "%s: %s: %s\n", program, filename, strerror(err));
+ else
+ fprintf(stderr, "%s: %d: %s\n", program, fd, strerror(err));
+ exit((err == ENOLCK||err == ENOMEM) ? EX_OSERR : EX_DATAERR);
+ }
+ }
+
+ if ( have_timeout ) {
+ setitimer(ITIMER_REAL, &old_timer, NULL); /* Cancel itimer */
+ sigaction(SIGALRM, &old_sa, NULL); /* Cancel signal handler */
+ }
+
+ status = 0;
+
+ if ( cmd_argv ) {
+ pid_t w, f;
+
+ /* Clear any inherited settings */
+ signal(SIGCHLD, SIG_DFL);
+ f = fork();
+
+ if ( f < 0 ) {
+ err = errno;
+ fprintf(stderr, _("%s: fork failed: %s\n"), program, strerror(err));
+ exit(EX_OSERR);
+ } else if ( f == 0 ) {
+ if ( do_close )
+ close(fd);
+ err = errno;
+ execvp(cmd_argv[0], cmd_argv);
+ /* execvp() failed */
+ fprintf(stderr, "%s: %s: %s\n", program, cmd_argv[0], strerror(err));
+ _exit((err == ENOMEM) ? EX_OSERR: EX_UNAVAILABLE);
+ } else {
+ do {
+ w = waitpid(f, &status, 0);
+ if (w == -1 && errno != EINTR)
+ break;
+ } while ( w != f );
+
+ if (w == -1) {
+ err = errno;
+ status = EXIT_FAILURE;
+ fprintf(stderr, "%s: waitpid failed: %s\n", program, strerror(err));
+ } else if ( WIFEXITED(status) )
+ status = WEXITSTATUS(status);
+ else if ( WIFSIGNALED(status) )
+ status = WTERMSIG(status) + 128;
+ else
+ status = EX_OSERR; /* WTF? */
+ }
+ }
+
+ return status;
+}
diff --git a/meta/recipes-core/util-linux/util-linux/mit-license.patch b/meta/recipes-core/util-linux/util-linux/mit-license.patch
new file mode 100644
index 0000000000..afbec98f18
--- /dev/null
+++ b/meta/recipes-core/util-linux/util-linux/mit-license.patch
@@ -0,0 +1,45 @@
+From 5b8fab1584017d9d9be008c23b90128bba41a7b5 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Thu, 28 Mar 2024 12:16:57 +0000
+Subject: [PATCH] README.licensing/flock: Add MIT license mention
+
+Looking at the license text, flock.c is under the MIT license (see
+https://spdx.org/licenses/MIT).
+
+Add an SPDX license identifier header and add to the list of licenses the
+source so everything is correctly listed/identified.
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Upstream-Status: Submitted [https://github.com/util-linux/util-linux/pull/2870]
+
+---
+ README.licensing | 2 ++
+ sys-utils/flock.c | 4 +++-
+ 2 files changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/README.licensing b/README.licensing
+index 4454f8392a..535ad34813 100644
+--- a/README.licensing
++++ b/README.licensing
+@@ -12,6 +12,8 @@ There is code under:
+
+ * LGPL-2.1-or-later - GNU Lesser General Public License 2.1 or any later version
+
++ * MIT - MIT License
++
+ * BSD-2-Clause - Simplified BSD License
+
+ * BSD-3-Clause - BSD 3-Clause "New" or "Revised" License
+diff --git a/sys-utils/flock.c b/sys-utils/flock.c
+index fed29d7270..7d878ff810 100644
+--- a/sys-utils/flock.c
++++ b/sys-utils/flock.c
+@@ -1,4 +1,6 @@
+-/* Copyright 2003-2005 H. Peter Anvin - All Rights Reserved
++/* SPDX-License-Identifier: MIT
++ *
++ * Copyright 2003-2005 H. Peter Anvin - All Rights Reserved
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
diff --git a/meta/recipes-core/util-linux/util-linux/ptest.patch b/meta/recipes-core/util-linux/util-linux/ptest.patch
index ba2bd3f6ac..6221de7182 100644
--- a/meta/recipes-core/util-linux/util-linux/ptest.patch
+++ b/meta/recipes-core/util-linux/util-linux/ptest.patch
@@ -1,4 +1,4 @@
-From af073c13ef184ca75811df688e0a0a25827b36c3 Mon Sep 17 00:00:00 2001
+From d0a69ce80c579cbb7627a2f20e8b92e006a8d8ad Mon Sep 17 00:00:00 2001
From: Tudor Florea <tudor.florea@enea.com>
Date: Thu, 3 Dec 2015 04:08:00 +0100
Subject: [PATCH] Define TESTS variable
@@ -11,7 +11,7 @@ Upstream-Status: Pending
1 file changed, 1 insertion(+)
diff --git a/Makefile.am b/Makefile.am
-index 886598d..1cf4346 100644
+index effbb02..7d2bd1e 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -57,6 +57,7 @@ systemdsystemunit_DATA =
diff --git a/meta/recipes-core/util-linux/util-linux/run-ptest b/meta/recipes-core/util-linux/util-linux/run-ptest
index 097107cd09..7b6b1d1dc2 100644
--- a/meta/recipes-core/util-linux/util-linux/run-ptest
+++ b/meta/recipes-core/util-linux/util-linux/run-ptest
@@ -16,7 +16,15 @@ export PATH=$bindir/bin:$PATH
# losetup tests will be skipped and/or fail otherwise
modprobe loop
-./tests/run.sh --use-system-commands --parsable --show-diff | sed -u '{
+# required for mount/fallback test to pass
+# systemd does this by default, but ptest images do not use it
+# see https://man7.org/linux/man-pages/man7/mount_namespaces.7.html
+# for a long description of mount namespaces in Linux
+mount --make-shared /
+
+# lsfd/option-inet has races in the test script:
+# https://github.com/util-linux/util-linux/issues/2399
+./tests/run.sh --use-system-commands --parsable --show-diff --exclude=lsfd/option-inet | sed -u '{
s/^\(.*\):\(.*\) \.\.\. OK$/PASS: \1:\2/
s/^\(.*\):\(.*\) \.\.\. FAILED \(.*\)$/FAIL: \1:\2 \3/
s/^\(.*\):\(.*\) \.\.\. SKIPPED \(.*\)$/SKIP: \1:\2 \3/
diff --git a/meta/recipes-core/util-linux/util-linux_2.38.bb b/meta/recipes-core/util-linux/util-linux_2.38.bb
deleted file mode 100644
index 8a7b47a0c6..0000000000
--- a/meta/recipes-core/util-linux/util-linux_2.38.bb
+++ /dev/null
@@ -1,321 +0,0 @@
-require util-linux.inc
-
-#gtk-doc is not enabled as it requires xmlto which requires util-linux
-inherit autotools gettext manpages pkgconfig systemd update-alternatives python3-dir bash-completion ptest
-DEPENDS = "libcap-ng ncurses virtual/crypt zlib util-linux-libuuid"
-
-PACKAGES =+ "${PN}-swaponoff"
-PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'pylibmount', '${PN}-pylibmount', '', d)}"
-
-python util_linux_binpackages () {
- def pkg_hook(f, pkg, file_regex, output_pattern, modulename):
- pn = d.getVar('PN')
- d.appendVar('RRECOMMENDS:%s' % pn, ' %s' % pkg)
-
- if d.getVar('ALTERNATIVE:' + pkg):
- return
- if d.getVarFlag('ALTERNATIVE_LINK_NAME', modulename):
- d.setVar('ALTERNATIVE:' + pkg, modulename)
-
- bindirs = sorted(list(set(d.expand("${base_sbindir} ${base_bindir} ${sbindir} ${bindir}").split())))
- for dir in bindirs:
- do_split_packages(d, root=dir,
- file_regex=r'(.*)', output_pattern='${PN}-%s',
- description='${PN} %s',
- hook=pkg_hook, extra_depends='')
-
- # There are some symlinks for some binaries which we have ignored
- # above. Add them to the package owning the binary they are
- # pointing to
- extras = {}
- dvar = d.getVar('PKGD')
- for root in bindirs:
- for walkroot, dirs, files in os.walk(dvar + root):
- for f in files:
- file = os.path.join(walkroot, f)
- if not os.path.islink(file):
- continue
-
- pkg = os.path.basename(os.readlink(file))
- extras.setdefault(pkg, [])
- extras[pkg].append(file.replace(dvar, '', 1))
-
- pn = d.getVar('PN')
- for pkg, links in extras.items():
- of = d.getVar('FILES:' + pn + '-' + pkg)
- links = of + " " + " ".join(sorted(links))
- d.setVar('FILES:' + pn + '-' + pkg, links)
-}
-
-# we must execute before update-alternatives PACKAGE_PREPROCESS_FUNCS
-PACKAGE_PREPROCESS_FUNCS =+ "util_linux_binpackages "
-
-# skip libuuid as it will be packaged by the util-linux-libuuid recipe
-python util_linux_libpackages() {
- do_split_packages(d, root=d.getVar('UTIL_LINUX_LIBDIR'), file_regex=r'^lib(?!uuid)(.*)\.so\..*$',
- output_pattern='${PN}-lib%s',
- description='${PN} lib%s',
- extra_depends='', prepend=True, allow_links=True)
-}
-
-PACKAGESPLITFUNCS =+ "util_linux_libpackages"
-
-PACKAGES_DYNAMIC = "^${PN}-.*"
-
-CACHED_CONFIGUREVARS += "scanf_cv_alloc_modifier=ms"
-UTIL_LINUX_LIBDIR = "${libdir}"
-UTIL_LINUX_LIBDIR:class-target = "${base_libdir}"
-EXTRA_OECONF = "\
- --enable-libuuid --enable-libblkid \
- \
- --enable-fsck --enable-kill --enable-last --enable-mesg \
- --enable-mount --enable-partx --enable-raw --enable-rfkill \
- --enable-unshare --enable-write \
- \
- --disable-bfs --disable-login \
- --disable-makeinstall-chown --disable-minix --disable-newgrp \
- --disable-use-tty-group --disable-vipw --disable-raw \
- \
- --without-udev \
- \
- usrsbin_execdir='${sbindir}' \
- --libdir='${UTIL_LINUX_LIBDIR}' \
-"
-
-EXTRA_OECONF:append:class-target = " --enable-setpriv"
-EXTRA_OECONF:append:class-native = " --without-cap-ng --disable-setpriv"
-EXTRA_OECONF:append:class-nativesdk = " --without-cap-ng --disable-setpriv"
-EXTRA_OECONF:append = " --disable-hwclock-gplv3"
-
-# enable pcre2 for native/nativesdk to match host distros
-# this helps to keep same expectations when using the SDK or
-# build host versions during development
-#
-PACKAGECONFIG ?= "pcre2"
-PACKAGECONFIG:class-target ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'chfn-chsh pam', '', d)}"
-# inherit manpages requires this to be present, however util-linux does not have
-# configuration options, and installs manpages always
-PACKAGECONFIG[manpages] = ""
-PACKAGECONFIG[pam] = "--enable-su --enable-runuser,--disable-su --disable-runuser, libpam,"
-# Respect the systemd feature for uuidd
-PACKAGECONFIG[systemd] = "--with-systemd --with-systemdsystemunitdir=${systemd_system_unitdir}, --without-systemd --without-systemdsystemunitdir,systemd"
-# Build python bindings for libmount
-PACKAGECONFIG[pylibmount] = "--with-python=3 --enable-pylibmount,--without-python --disable-pylibmount,python3"
-# Readline support
-PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
-# PCRE support in hardlink
-PACKAGECONFIG[pcre2] = ",,libpcre2"
-PACKAGECONFIG[cryptsetup] = "--with-cryptsetup,--without-cryptsetup,cryptsetup"
-PACKAGECONFIG[chfn-chsh] = "--enable-chfn-chsh,--disable-chfn-chsh,"
-
-EXTRA_OEMAKE = "ARCH=${TARGET_ARCH} CPU= CPUOPT= 'OPT=${CFLAGS}'"
-
-ALLOW_EMPTY:${PN} = "1"
-FILES:${PN} = ""
-FILES:${PN}-doc += "${datadir}/getopt/getopt-*.*"
-FILES:${PN}-dev += "${PYTHON_SITEPACKAGES_DIR}/libmount/pylibmount.la"
-FILES:${PN}-mount = "${sysconfdir}/default/mountall"
-FILES:${PN}-runuser = "${sysconfdir}/pam.d/runuser*"
-FILES:${PN}-su = "${sysconfdir}/pam.d/su-l"
-CONFFILES:${PN}-su = "${sysconfdir}/pam.d/su-l"
-FILES:${PN}-pylibmount = "${PYTHON_SITEPACKAGES_DIR}/libmount/pylibmount.so \
- ${PYTHON_SITEPACKAGES_DIR}/libmount/__init__.* \
- ${PYTHON_SITEPACKAGES_DIR}/libmount/__pycache__/*"
-
-# Util-linux' blkid replaces the e2fsprogs one
-RCONFLICTS:${PN}-blkid = "${MLPREFIX}e2fsprogs-blkid"
-RREPLACES:${PN}-blkid = "${MLPREFIX}e2fsprogs-blkid"
-
-RRECOMMENDS:${PN}:class-native = ""
-RRECOMMENDS:${PN}:class-nativesdk = ""
-RDEPENDS:${PN}:class-native = ""
-RDEPENDS:${PN}:class-nativesdk = ""
-
-RDEPENDS:${PN} += " util-linux-libuuid"
-RDEPENDS:${PN}-dev += " util-linux-libuuid-dev"
-
-RPROVIDES:${PN}-dev = "${PN}-libblkid-dev ${PN}-libmount-dev"
-
-RDEPENDS:${PN}-bash-completion += "${PN}-lsblk"
-RDEPENDS:${PN}-ptest += "bash bc btrfs-tools coreutils e2fsprogs findutils grep iproute2 kmod mdadm procps sed socat which xz"
-RRECOMMENDS:${PN}-ptest += "kernel-module-scsi-debug kernel-module-sd-mod kernel-module-loop kernel-module-algif-hash"
-RDEPENDS:${PN}-swaponoff = "${PN}-swapon ${PN}-swapoff"
-ALLOW_EMPTY:${PN}-swaponoff = "1"
-
-#SYSTEMD_PACKAGES = "${PN}-uuidd ${PN}-fstrim"
-SYSTEMD_SERVICE:${PN}-uuidd = "uuidd.socket uuidd.service"
-SYSTEMD_AUTO_ENABLE:${PN}-uuidd = "disable"
-SYSTEMD_SERVICE:${PN}-fstrim = "fstrim.timer fstrim.service"
-SYSTEMD_AUTO_ENABLE:${PN}-fstrim = "disable"
-
-do_install () {
- # with ccache the timestamps on compiled files may
- # end up earlier than on their inputs, this allows
- # for the resultant compilation in the install step.
- oe_runmake 'CC=${CC}' 'LD=${LD}' \
- 'LDFLAGS=${LDFLAGS}' 'DESTDIR=${D}' install
-
- mkdir -p ${D}${base_bindir}
-
- sbinprogs="agetty ctrlaltdel cfdisk vipw vigr"
- sbinprogs_a="pivot_root hwclock mkswap losetup swapon swapoff fdisk fsck blkid blockdev fstrim sulogin switch_root nologin"
- binprogs_a="dmesg getopt kill more umount mount login su mountpoint"
-
- if [ "${base_sbindir}" != "${sbindir}" ]; then
- mkdir -p ${D}${base_sbindir}
- for p in $sbinprogs $sbinprogs_a; do
- if [ -f "${D}${sbindir}/$p" ]; then
- mv "${D}${sbindir}/$p" "${D}${base_sbindir}/$p"
- fi
- done
- fi
-
- if [ "${base_bindir}" != "${bindir}" ]; then
- mkdir -p ${D}${base_bindir}
- for p in $binprogs_a; do
- if [ -f "${D}${bindir}/$p" ]; then
- mv "${D}${bindir}/$p" "${D}${base_bindir}/$p"
- fi
- done
- fi
-
- install -d ${D}${sysconfdir}/default/
- echo 'MOUNTALL="-t nonfs,nosmbfs,noncpfs"' > ${D}${sysconfdir}/default/mountall
-
- rm -f ${D}${bindir}/chkdupexe
-}
-
-do_install:append:class-target () {
- if [ "${@bb.utils.filter('PACKAGECONFIG', 'pam', d)}" ]; then
- install -d ${D}${sysconfdir}/pam.d
- install -m 0644 ${WORKDIR}/runuser.pamd ${D}${sysconfdir}/pam.d/runuser
- install -m 0644 ${WORKDIR}/runuser-l.pamd ${D}${sysconfdir}/pam.d/runuser-l
- # Required for "su -" aka "su --login" because
- # otherwise it uses "other", which has "auth pam_deny.so"
- # and thus prevents the operation.
- ln -s su ${D}${sysconfdir}/pam.d/su-l
- fi
-}
-# nologin causes a conflict with shadow-native
-# kill causes a conflict with coreutils-native (if ${bindir}==${base_bindir})
-do_install:append:class-native () {
- rm -f ${D}${base_sbindir}/nologin
- rm -f ${D}${base_bindir}/kill
-}
-
-# dm-verity support introduces a circular build dependency, so util-linux-libuuid is split out for target builds
-# Need to build libuuid for uuidgen, but then delete it and let the other recipe ship it
-do_install:append () {
- rm -rf ${D}${includedir}/uuid ${D}${libdir}/pkgconfig/uuid.pc ${D}${libdir}/libuuid* ${D}${base_libdir}/libuuid*
-}
-
-ALTERNATIVE_PRIORITY = "80"
-
-ALTERNATIVE_LINK_NAME[blkid] = "${base_sbindir}/blkid"
-ALTERNATIVE_LINK_NAME[blockdev] = "${base_sbindir}/blockdev"
-ALTERNATIVE_LINK_NAME[cal] = "${bindir}/cal"
-ALTERNATIVE_LINK_NAME[chfn] = "${bindir}/chfn"
-ALTERNATIVE_LINK_NAME[chsh] = "${bindir}/chsh"
-ALTERNATIVE_LINK_NAME[chrt] = "${bindir}/chrt"
-ALTERNATIVE_LINK_NAME[dmesg] = "${base_bindir}/dmesg"
-ALTERNATIVE_LINK_NAME[eject] = "${bindir}/eject"
-ALTERNATIVE_LINK_NAME[fallocate] = "${bindir}/fallocate"
-ALTERNATIVE_LINK_NAME[fdisk] = "${base_sbindir}/fdisk"
-ALTERNATIVE_LINK_NAME[findfs] = "${sbindir}/findfs"
-ALTERNATIVE_LINK_NAME[flock] = "${bindir}/flock"
-ALTERNATIVE_LINK_NAME[fsck] = "${base_sbindir}/fsck"
-ALTERNATIVE_LINK_NAME[fsfreeze] = "${sbindir}/fsfreeze"
-ALTERNATIVE_LINK_NAME[fstrim] = "${base_sbindir}/fstrim"
-ALTERNATIVE_LINK_NAME[getopt] = "${base_bindir}/getopt"
-ALTERNATIVE:${PN}-agetty = "getty"
-ALTERNATIVE_LINK_NAME[getty] = "${base_sbindir}/getty"
-ALTERNATIVE_TARGET[getty] = "${base_sbindir}/agetty"
-ALTERNATIVE_LINK_NAME[hexdump] = "${bindir}/hexdump"
-ALTERNATIVE_LINK_NAME[hwclock] = "${base_sbindir}/hwclock"
-ALTERNATIVE_LINK_NAME[ionice] = "${bindir}/ionice"
-ALTERNATIVE_LINK_NAME[kill] = "${base_bindir}/kill"
-ALTERNATIVE:${PN}-last = "last lastb"
-ALTERNATIVE_LINK_NAME[last] = "${bindir}/last"
-ALTERNATIVE_LINK_NAME[lastb] = "${bindir}/lastb"
-ALTERNATIVE_LINK_NAME[logger] = "${bindir}/logger"
-ALTERNATIVE_LINK_NAME[losetup] = "${base_sbindir}/losetup"
-ALTERNATIVE_LINK_NAME[mesg] = "${bindir}/mesg"
-ALTERNATIVE_LINK_NAME[mkswap] = "${base_sbindir}/mkswap"
-ALTERNATIVE_LINK_NAME[mcookie] = "${bindir}/mcookie"
-ALTERNATIVE_LINK_NAME[more] = "${base_bindir}/more"
-ALTERNATIVE_LINK_NAME[mount] = "${base_bindir}/mount"
-ALTERNATIVE_LINK_NAME[mountpoint] = "${base_bindir}/mountpoint"
-ALTERNATIVE_LINK_NAME[nologin] = "${base_sbindir}/nologin"
-ALTERNATIVE_LINK_NAME[nsenter] = "${bindir}/nsenter"
-ALTERNATIVE_LINK_NAME[pivot_root] = "${base_sbindir}/pivot_root"
-ALTERNATIVE_LINK_NAME[prlimit] = "${bindir}/prlimit"
-ALTERNATIVE_LINK_NAME[readprofile] = "${sbindir}/readprofile"
-ALTERNATIVE_LINK_NAME[renice] = "${bindir}/renice"
-ALTERNATIVE_LINK_NAME[rev] = "${bindir}/rev"
-ALTERNATIVE_LINK_NAME[rfkill] = "${sbindir}/rfkill"
-ALTERNATIVE_LINK_NAME[rtcwake] = "${sbindir}/rtcwake"
-ALTERNATIVE_LINK_NAME[setpriv] = "${bindir}/setpriv"
-ALTERNATIVE_LINK_NAME[setsid] = "${bindir}/setsid"
-ALTERNATIVE_LINK_NAME[su] = "${base_bindir}/su"
-ALTERNATIVE_LINK_NAME[sulogin] = "${base_sbindir}/sulogin"
-ALTERNATIVE_LINK_NAME[swapoff] = "${base_sbindir}/swapoff"
-ALTERNATIVE_LINK_NAME[swapon] = "${base_sbindir}/swapon"
-ALTERNATIVE_LINK_NAME[switch_root] = "${base_sbindir}/switch_root"
-ALTERNATIVE_LINK_NAME[taskset] = "${bindir}/taskset"
-ALTERNATIVE_LINK_NAME[umount] = "${base_bindir}/umount"
-ALTERNATIVE_LINK_NAME[unshare] = "${bindir}/unshare"
-ALTERNATIVE_LINK_NAME[utmpdump] = "${bindir}/utmpdump"
-ALTERNATIVE_LINK_NAME[uuidgen] = "${bindir}/uuidgen"
-ALTERNATIVE_LINK_NAME[wall] = "${bindir}/wall"
-
-ALTERNATIVE:${PN}-doc = "\
-blkid.8 eject.1 findfs.8 fsck.8 kill.1 last.1 lastb.1 libblkid.3 logger.1 mesg.1 \
-mountpoint.1 nologin.8 rfkill.8 sulogin.8 utmpdump.1 uuid.3 wall.1\
-"
-ALTERNATIVE:${PN}-doc += "${@bb.utils.contains('PACKAGECONFIG', 'pam', 'su.1', '', d)}"
-
-ALTERNATIVE_LINK_NAME[blkid.8] = "${mandir}/man8/blkid.8"
-ALTERNATIVE_LINK_NAME[eject.1] = "${mandir}/man1/eject.1"
-ALTERNATIVE_LINK_NAME[findfs.8] = "${mandir}/man8/findfs.8"
-ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
-ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
-ALTERNATIVE_LINK_NAME[last.1] = "${mandir}/man1/last.1"
-ALTERNATIVE_LINK_NAME[lastb.1] = "${mandir}/man1/lastb.1"
-ALTERNATIVE_LINK_NAME[libblkid.3] = "${mandir}/man3/libblkid.3"
-ALTERNATIVE_LINK_NAME[logger.1] = "${mandir}/man1/logger.1"
-ALTERNATIVE_LINK_NAME[mesg.1] = "${mandir}/man1/mesg.1"
-ALTERNATIVE_LINK_NAME[mountpoint.1] = "${mandir}/man1/mountpoint.1"
-ALTERNATIVE_LINK_NAME[nologin.8] = "${mandir}/man8/nologin.8"
-ALTERNATIVE_LINK_NAME[rfkill.8] = "${mandir}/man8/rfkill.8"
-ALTERNATIVE_LINK_NAME[setpriv.1] = "${mandir}/man1/setpriv.1"
-ALTERNATIVE_LINK_NAME[su.1] = "${mandir}/man1/su.1"
-ALTERNATIVE_LINK_NAME[sulogin.8] = "${mandir}/man8/sulogin.8"
-ALTERNATIVE_LINK_NAME[utmpdump.1] = "${mandir}/man1/utmpdump.1"
-ALTERNATIVE_LINK_NAME[uuid.3] = "${mandir}/man3/uuid.3"
-ALTERNATIVE_LINK_NAME[wall.1] = "${mandir}/man1/wall.1"
-
-BBCLASSEXTEND = "native nativesdk"
-
-PTEST_BINDIR = "1"
-do_compile_ptest() {
- oe_runmake buildtest-TESTS
-}
-
-do_install_ptest() {
- mkdir -p ${D}${PTEST_PATH}/tests/ts
- find . -name 'test*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
- find ./.libs -name 'sample*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
- find ./.libs -name 'test*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
-
- cp ${S}/tests/*.sh ${D}${PTEST_PATH}/tests/
- cp -pR ${S}/tests/expected ${D}${PTEST_PATH}/tests/expected
- cp -pR ${S}/tests/ts ${D}${PTEST_PATH}/tests/
- cp ${WORKDIR}/build/config.h ${D}${PTEST_PATH}
-
- sed -i 's|@base_sbindir@|${base_sbindir}|g' ${D}${PTEST_PATH}/run-ptest
-
- # chfn needs PAM
- if ! ${@bb.utils.contains('PACKAGECONFIG', 'pam', 'true', 'false', d)}; then
- rm -rf ${D}${PTEST_PATH}/tests/ts/chfn
- fi
-}
diff --git a/meta/recipes-core/util-linux/util-linux_2.39.3.bb b/meta/recipes-core/util-linux/util-linux_2.39.3.bb
new file mode 100644
index 0000000000..83b3f4e05b
--- /dev/null
+++ b/meta/recipes-core/util-linux/util-linux_2.39.3.bb
@@ -0,0 +1,345 @@
+require util-linux.inc
+
+inherit autotools gettext manpages pkgconfig systemd update-alternatives python3-dir bash-completion ptest gtk-doc
+DEPENDS = "libcap-ng ncurses virtual/crypt zlib util-linux-libuuid"
+
+PACKAGES =+ "${PN}-swaponoff"
+PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'pylibmount', '${PN}-pylibmount', '', d)}"
+
+python util_linux_binpackages () {
+ def pkg_hook(f, pkg, file_regex, output_pattern, modulename):
+ pn = d.getVar('PN')
+ d.appendVar('RRECOMMENDS:%s' % pn, ' %s' % pkg)
+
+ if d.getVar('ALTERNATIVE:' + pkg):
+ return
+ if d.getVarFlag('ALTERNATIVE_LINK_NAME', modulename):
+ d.setVar('ALTERNATIVE:' + pkg, modulename)
+
+ bindirs = sorted(list(set(d.expand("${base_sbindir} ${base_bindir} ${sbindir} ${bindir}").split())))
+ for dir in bindirs:
+ do_split_packages(d, root=dir,
+ file_regex=r'(.*)', output_pattern='${PN}-%s',
+ description='${PN} %s',
+ hook=pkg_hook, extra_depends='')
+
+ # There are some symlinks for some binaries which we have ignored
+ # above. Add them to the package owning the binary they are
+ # pointing to
+ extras = {}
+ dvar = d.getVar('PKGD')
+ for root in bindirs:
+ for walkroot, dirs, files in os.walk(dvar + root):
+ for f in files:
+ file = os.path.join(walkroot, f)
+ if not os.path.islink(file):
+ continue
+
+ pkg = os.path.basename(os.readlink(file))
+ extras.setdefault(pkg, [])
+ extras[pkg].append(file.replace(dvar, '', 1))
+
+ pn = d.getVar('PN')
+ for pkg, links in extras.items():
+ of = d.getVar('FILES:' + pn + '-' + pkg)
+ links = of + " " + " ".join(sorted(links))
+ d.setVar('FILES:' + pn + '-' + pkg, links)
+}
+
+# we must execute before update-alternatives PACKAGE_PREPROCESS_FUNCS
+PACKAGE_PREPROCESS_FUNCS =+ "util_linux_binpackages "
+
+# skip libuuid as it will be packaged by the util-linux-libuuid recipe
+python util_linux_libpackages() {
+ do_split_packages(d, root=d.getVar('UTIL_LINUX_LIBDIR'), file_regex=r'^lib(?!uuid)(.*)\.so\..*$',
+ output_pattern='${PN}-lib%s',
+ description='${PN} lib%s',
+ extra_depends='', prepend=True, allow_links=True)
+}
+
+PACKAGESPLITFUNCS =+ "util_linux_libpackages"
+
+PACKAGES_DYNAMIC = "^${PN}-.*"
+
+UTIL_LINUX_LIBDIR = "${libdir}"
+UTIL_LINUX_LIBDIR:class-target = "${base_libdir}"
+EXTRA_OECONF = "\
+ --enable-libuuid --enable-libblkid \
+ \
+ --enable-fsck --enable-kill --enable-last --enable-mesg \
+ --enable-mount --enable-partx --enable-rfkill \
+ --enable-unshare --enable-write \
+ \
+ --disable-bfs --disable-login \
+ --disable-makeinstall-chown --disable-minix --disable-newgrp \
+ --disable-use-tty-group --disable-vipw --disable-raw \
+ \
+ --without-udev \
+ \
+ usrsbin_execdir='${sbindir}' \
+ --libdir='${UTIL_LINUX_LIBDIR}' \
+"
+
+EXTRA_OECONF:append:class-target = " --enable-setpriv"
+EXTRA_OECONF:append:class-native = " --without-cap-ng --disable-setpriv"
+EXTRA_OECONF:append:class-nativesdk = " --without-cap-ng --disable-setpriv"
+EXTRA_OECONF:append = " --disable-hwclock-gplv3"
+
+# enable pcre2 for native/nativesdk to match host distros
+# this helps to keep same expectations when using the SDK or
+# build host versions during development
+#
+PACKAGECONFIG ?= "pcre2"
+PACKAGECONFIG:class-target ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'chfn-chsh pam', '', d)}"
+# inherit manpages requires this to be present, however util-linux does not have
+# configuration options, and installs manpages always
+PACKAGECONFIG[manpages] = ""
+PACKAGECONFIG[pam] = "--enable-su --enable-runuser,--disable-su --disable-runuser, libpam,"
+# Respect the systemd feature for uuidd
+PACKAGECONFIG[systemd] = "--with-systemd --with-systemdsystemunitdir=${systemd_system_unitdir}, --without-systemd --without-systemdsystemunitdir,systemd"
+# Build python bindings for libmount
+PACKAGECONFIG[pylibmount] = "--with-python=3 --enable-pylibmount,--without-python --disable-pylibmount,python3"
+# Readline support
+PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
+# PCRE support in hardlink
+PACKAGECONFIG[pcre2] = ",,libpcre2"
+PACKAGECONFIG[cryptsetup] = "--with-cryptsetup,--without-cryptsetup,cryptsetup"
+PACKAGECONFIG[chfn-chsh] = "--enable-chfn-chsh,--disable-chfn-chsh,"
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
+
+EXTRA_OEMAKE = "ARCH=${TARGET_ARCH} CPU= CPUOPT= 'OPT=${CFLAGS}'"
+
+ALLOW_EMPTY:${PN} = "1"
+FILES:${PN} = ""
+FILES:${PN}-doc += "${datadir}/getopt/getopt-*.*"
+FILES:${PN}-dev += "${PYTHON_SITEPACKAGES_DIR}/libmount/pylibmount.la"
+FILES:${PN}-mount = "${sysconfdir}/default/mountall"
+FILES:${PN}-runuser = "${sysconfdir}/pam.d/runuser*"
+FILES:${PN}-su = "${sysconfdir}/pam.d/su-l"
+CONFFILES:${PN}-su = "${sysconfdir}/pam.d/su-l"
+FILES:${PN}-pylibmount = "${PYTHON_SITEPACKAGES_DIR}/libmount/pylibmount.so \
+ ${PYTHON_SITEPACKAGES_DIR}/libmount/__init__.* \
+ ${PYTHON_SITEPACKAGES_DIR}/libmount/__pycache__/*"
+
+# Util-linux' blkid replaces the e2fsprogs one
+RCONFLICTS:${PN}-blkid = "${MLPREFIX}e2fsprogs-blkid"
+RREPLACES:${PN}-blkid = "${MLPREFIX}e2fsprogs-blkid"
+
+RRECOMMENDS:${PN}:class-native = ""
+RRECOMMENDS:${PN}:class-nativesdk = ""
+RDEPENDS:${PN}:class-native = ""
+RDEPENDS:${PN}:class-nativesdk = ""
+
+RDEPENDS:${PN} += " util-linux-libuuid"
+RDEPENDS:${PN}-dev += " util-linux-libuuid-dev"
+
+RPROVIDES:${PN}-dev = "${PN}-libblkid-dev ${PN}-libmount-dev"
+
+RDEPENDS:${PN}-bash-completion += "${PN}-lsblk"
+RDEPENDS:${PN}-ptest += "bash bc btrfs-tools coreutils e2fsprogs findutils grep iproute2 kmod procps sed socat which xz"
+RRECOMMENDS:${PN}-ptest += "kernel-module-scsi-debug kernel-module-sd-mod kernel-module-loop kernel-module-algif-hash"
+RDEPENDS:${PN}-swaponoff = "${PN}-swapon ${PN}-swapoff"
+ALLOW_EMPTY:${PN}-swaponoff = "1"
+
+#SYSTEMD_PACKAGES = "${PN}-uuidd ${PN}-fstrim"
+SYSTEMD_SERVICE:${PN}-uuidd = "uuidd.socket uuidd.service"
+SYSTEMD_AUTO_ENABLE:${PN}-uuidd = "disable"
+SYSTEMD_SERVICE:${PN}-fstrim = "fstrim.timer fstrim.service"
+SYSTEMD_AUTO_ENABLE:${PN}-fstrim = "disable"
+
+do_compile:append () {
+ cp ${WORKDIR}/fcntl-lock.c ${S}/fcntl-lock.c
+ ${CC} ${CFLAGS} ${LDFLAGS} ${S}/fcntl-lock.c -o ${B}/fcntl-lock
+}
+
+do_install () {
+ # with ccache the timestamps on compiled files may
+ # end up earlier than on their inputs, this allows
+ # for the resultant compilation in the install step.
+ oe_runmake 'CC=${CC}' 'LD=${LD}' \
+ 'LDFLAGS=${LDFLAGS}' 'DESTDIR=${D}' install
+
+ mkdir -p ${D}${base_bindir}
+
+ sbinprogs="agetty ctrlaltdel cfdisk vipw vigr"
+ sbinprogs_a="pivot_root hwclock mkswap losetup swapon swapoff fdisk fsck blkid blockdev fstrim sulogin switch_root nologin"
+ binprogs_a="dmesg getopt kill more umount mount login su mountpoint"
+
+ if [ "${base_sbindir}" != "${sbindir}" ]; then
+ mkdir -p ${D}${base_sbindir}
+ for p in $sbinprogs $sbinprogs_a; do
+ if [ -f "${D}${sbindir}/$p" ]; then
+ mv "${D}${sbindir}/$p" "${D}${base_sbindir}/$p"
+ fi
+ done
+ fi
+
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ mkdir -p ${D}${base_bindir}
+ for p in $binprogs_a; do
+ if [ -f "${D}${bindir}/$p" ]; then
+ mv "${D}${bindir}/$p" "${D}${base_bindir}/$p"
+ fi
+ done
+ fi
+
+ install -d ${D}${sysconfdir}/default/
+ echo 'MOUNTALL="-t nonfs,nosmbfs,noncpfs"' > ${D}${sysconfdir}/default/mountall
+
+ rm -f ${D}${bindir}/chkdupexe
+
+ install -m 0755 ${B}/fcntl-lock ${D}${bindir}
+}
+
+do_install:append:class-target () {
+ if [ "${@bb.utils.filter('PACKAGECONFIG', 'pam', d)}" ]; then
+ install -d ${D}${sysconfdir}/pam.d
+ install -m 0644 ${WORKDIR}/runuser.pamd ${D}${sysconfdir}/pam.d/runuser
+ install -m 0644 ${WORKDIR}/runuser-l.pamd ${D}${sysconfdir}/pam.d/runuser-l
+ # Required for "su -" aka "su --login" because
+ # otherwise it uses "other", which has "auth pam_deny.so"
+ # and thus prevents the operation.
+ ln -s su ${D}${sysconfdir}/pam.d/su-l
+ fi
+}
+# nologin causes a conflict with shadow-native
+# kill causes a conflict with coreutils-native (if ${bindir}==${base_bindir})
+do_install:append:class-native () {
+ rm -f ${D}${base_sbindir}/nologin
+ rm -f ${D}${base_bindir}/kill
+}
+
+# dm-verity support introduces a circular build dependency, so util-linux-libuuid is split out for target builds
+# Need to build libuuid for uuidgen, but then delete it and let the other recipe ship it
+do_install:append () {
+ rm -rf ${D}${includedir}/uuid ${D}${libdir}/pkgconfig/uuid.pc ${D}${libdir}/libuuid* ${D}${base_libdir}/libuuid*
+}
+
+ALTERNATIVE_PRIORITY = "80"
+
+ALTERNATIVE_LINK_NAME[blkid] = "${base_sbindir}/blkid"
+ALTERNATIVE_LINK_NAME[blockdev] = "${base_sbindir}/blockdev"
+ALTERNATIVE_LINK_NAME[cal] = "${bindir}/cal"
+ALTERNATIVE_LINK_NAME[chfn] = "${bindir}/chfn"
+ALTERNATIVE_LINK_NAME[chsh] = "${bindir}/chsh"
+ALTERNATIVE_LINK_NAME[chrt] = "${bindir}/chrt"
+ALTERNATIVE_LINK_NAME[dmesg] = "${base_bindir}/dmesg"
+ALTERNATIVE_LINK_NAME[eject] = "${bindir}/eject"
+ALTERNATIVE_LINK_NAME[fallocate] = "${bindir}/fallocate"
+ALTERNATIVE_LINK_NAME[fdisk] = "${base_sbindir}/fdisk"
+ALTERNATIVE_LINK_NAME[findfs] = "${sbindir}/findfs"
+ALTERNATIVE_LINK_NAME[flock] = "${bindir}/flock"
+ALTERNATIVE_LINK_NAME[fsck] = "${base_sbindir}/fsck"
+ALTERNATIVE_LINK_NAME[fsfreeze] = "${sbindir}/fsfreeze"
+ALTERNATIVE_LINK_NAME[fstrim] = "${base_sbindir}/fstrim"
+ALTERNATIVE_LINK_NAME[getopt] = "${base_bindir}/getopt"
+ALTERNATIVE:${PN}-agetty = "getty"
+ALTERNATIVE_LINK_NAME[getty] = "${base_sbindir}/getty"
+ALTERNATIVE_TARGET[getty] = "${base_sbindir}/agetty"
+ALTERNATIVE_LINK_NAME[hexdump] = "${bindir}/hexdump"
+ALTERNATIVE_LINK_NAME[hwclock] = "${base_sbindir}/hwclock"
+ALTERNATIVE_LINK_NAME[ionice] = "${bindir}/ionice"
+ALTERNATIVE_LINK_NAME[ipcrm] = "${bindir}/ipcrm"
+ALTERNATIVE_LINK_NAME[ipcs] = "${bindir}/ipcs"
+ALTERNATIVE_LINK_NAME[kill] = "${base_bindir}/kill"
+ALTERNATIVE:${PN}-last = "last lastb"
+ALTERNATIVE_LINK_NAME[last] = "${bindir}/last"
+ALTERNATIVE_LINK_NAME[lastb] = "${bindir}/lastb"
+ALTERNATIVE_LINK_NAME[logger] = "${bindir}/logger"
+ALTERNATIVE_LINK_NAME[losetup] = "${base_sbindir}/losetup"
+ALTERNATIVE_LINK_NAME[mesg] = "${bindir}/mesg"
+ALTERNATIVE_LINK_NAME[mkswap] = "${base_sbindir}/mkswap"
+ALTERNATIVE_LINK_NAME[mcookie] = "${bindir}/mcookie"
+ALTERNATIVE_LINK_NAME[more] = "${base_bindir}/more"
+ALTERNATIVE_LINK_NAME[mount] = "${base_bindir}/mount"
+ALTERNATIVE_LINK_NAME[mountpoint] = "${base_bindir}/mountpoint"
+ALTERNATIVE_LINK_NAME[nologin] = "${base_sbindir}/nologin"
+ALTERNATIVE_LINK_NAME[nsenter] = "${bindir}/nsenter"
+ALTERNATIVE_LINK_NAME[pivot_root] = "${base_sbindir}/pivot_root"
+ALTERNATIVE_LINK_NAME[prlimit] = "${bindir}/prlimit"
+ALTERNATIVE_LINK_NAME[readprofile] = "${sbindir}/readprofile"
+ALTERNATIVE_LINK_NAME[renice] = "${bindir}/renice"
+ALTERNATIVE_LINK_NAME[rev] = "${bindir}/rev"
+ALTERNATIVE_LINK_NAME[rfkill] = "${sbindir}/rfkill"
+ALTERNATIVE_LINK_NAME[rtcwake] = "${sbindir}/rtcwake"
+ALTERNATIVE_LINK_NAME[scriptreplay] = "${bindir}/scriptreplay"
+ALTERNATIVE_LINK_NAME[setpriv] = "${bindir}/setpriv"
+ALTERNATIVE_LINK_NAME[setsid] = "${bindir}/setsid"
+ALTERNATIVE_LINK_NAME[su] = "${base_bindir}/su"
+ALTERNATIVE_LINK_NAME[sulogin] = "${base_sbindir}/sulogin"
+ALTERNATIVE_LINK_NAME[swapoff] = "${base_sbindir}/swapoff"
+ALTERNATIVE_LINK_NAME[swapon] = "${base_sbindir}/swapon"
+ALTERNATIVE_LINK_NAME[switch_root] = "${base_sbindir}/switch_root"
+ALTERNATIVE_LINK_NAME[taskset] = "${bindir}/taskset"
+ALTERNATIVE_LINK_NAME[umount] = "${base_bindir}/umount"
+ALTERNATIVE_LINK_NAME[unshare] = "${bindir}/unshare"
+ALTERNATIVE_LINK_NAME[utmpdump] = "${bindir}/utmpdump"
+ALTERNATIVE_LINK_NAME[uuidgen] = "${bindir}/uuidgen"
+ALTERNATIVE_LINK_NAME[wall] = "${bindir}/wall"
+
+ALTERNATIVE:${PN}-doc = "\
+blkid.8 eject.1 findfs.8 fsck.8 kill.1 last.1 lastb.1 libblkid.3 logger.1 mesg.1 \
+mountpoint.1 nologin.8 rfkill.8 sulogin.8 utmpdump.1 uuid.3 wall.1\
+"
+ALTERNATIVE:${PN}-doc += "${@bb.utils.contains('PACKAGECONFIG', 'pam', 'su.1', '', d)}"
+
+ALTERNATIVE_LINK_NAME[blkid.8] = "${mandir}/man8/blkid.8"
+ALTERNATIVE_LINK_NAME[eject.1] = "${mandir}/man1/eject.1"
+ALTERNATIVE_LINK_NAME[findfs.8] = "${mandir}/man8/findfs.8"
+ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
+ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
+ALTERNATIVE_LINK_NAME[last.1] = "${mandir}/man1/last.1"
+ALTERNATIVE_LINK_NAME[lastb.1] = "${mandir}/man1/lastb.1"
+ALTERNATIVE_LINK_NAME[libblkid.3] = "${mandir}/man3/libblkid.3"
+ALTERNATIVE_LINK_NAME[logger.1] = "${mandir}/man1/logger.1"
+ALTERNATIVE_LINK_NAME[mesg.1] = "${mandir}/man1/mesg.1"
+ALTERNATIVE_LINK_NAME[mountpoint.1] = "${mandir}/man1/mountpoint.1"
+ALTERNATIVE_LINK_NAME[nologin.8] = "${mandir}/man8/nologin.8"
+ALTERNATIVE_LINK_NAME[rfkill.8] = "${mandir}/man8/rfkill.8"
+ALTERNATIVE_LINK_NAME[setpriv.1] = "${mandir}/man1/setpriv.1"
+ALTERNATIVE_LINK_NAME[su.1] = "${mandir}/man1/su.1"
+ALTERNATIVE_LINK_NAME[sulogin.8] = "${mandir}/man8/sulogin.8"
+ALTERNATIVE_LINK_NAME[utmpdump.1] = "${mandir}/man1/utmpdump.1"
+ALTERNATIVE_LINK_NAME[uuid.3] = "${mandir}/man3/uuid.3"
+ALTERNATIVE_LINK_NAME[wall.1] = "${mandir}/man1/wall.1"
+
+BBCLASSEXTEND = "native nativesdk"
+
+PTEST_BINDIR = "1"
+do_compile_ptest() {
+ oe_runmake buildtest-TESTS
+}
+
+do_install_ptest() {
+ mkdir -p ${D}${PTEST_PATH}/tests/ts
+ find . -name 'test*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
+ find ./.libs -name 'sample*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
+ find ./.libs -name 'test*' -maxdepth 1 -type f -perm -111 -exec cp {} ${D}${PTEST_PATH} \;
+
+ cp ${S}/tests/*.sh ${D}${PTEST_PATH}/tests/
+ cp -pR ${S}/tests/expected ${D}${PTEST_PATH}/tests/expected
+ cp -pR ${S}/tests/ts ${D}${PTEST_PATH}/tests/
+ cp ${WORKDIR}/build/config.h ${D}${PTEST_PATH}
+
+ sed -i 's|@base_sbindir@|${base_sbindir}|g' ${D}${PTEST_PATH}/run-ptest
+
+ # chfn needs PAM
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'pam', 'true', 'false', d)}; then
+ rm -rf ${D}${PTEST_PATH}/tests/ts/chfn
+ fi
+ # remove raid tests, known failures and avoid dependency on mdadm therefore
+ # See https://github.com/util-linux/util-linux/commit/7519c3edab120b14623931d5ddb16fdc6e7cad5d
+ rm -rf ${D}${PTEST_PATH}/tests/ts/blkid/md-raid0-whole
+ rm -rf ${D}${PTEST_PATH}/tests/ts/blkid/md-raid1-part
+ rm -rf ${D}${PTEST_PATH}/tests/ts/blkid/md-raid1-whole
+ rm -rf ${D}${PTEST_PATH}/tests/ts/fdisk/align-512-4K-md
+}
+
+# Delete tests not working on musl
+do_install_ptest:append:libc-musl() {
+ for t in tests/ts/col/multibyte \
+ tests/ts/lib/timeutils \
+ tests/ts/dmesg/limit; do
+ rm -rf ${D}${PTEST_PATH}/$t
+ done
+}
diff --git a/meta/recipes-core/volatile-binds/files/volatile-binds.service.in b/meta/recipes-core/volatile-binds/files/volatile-binds.service.in
index 52384c8264..5a0055bec3 100644
--- a/meta/recipes-core/volatile-binds/files/volatile-binds.service.in
+++ b/meta/recipes-core/volatile-binds/files/volatile-binds.service.in
@@ -3,7 +3,8 @@ Description=Bind mount volatile @where@
DefaultDependencies=no
Before=local-fs.target
RequiresMountsFor=@whatparent@ @whereparent@
-ConditionPathIsReadWrite=@whatparent@
+ConditionPathIsReadWrite=|@whatparent@
+ConditionPathExists=|!@whatparent@
ConditionPathExists=@where@
ConditionPathIsReadWrite=!@where@
diff --git a/meta/recipes-core/volatile-binds/volatile-binds.bb b/meta/recipes-core/volatile-binds/volatile-binds.bb
index 3fefa9abde..cca8a65fb4 100644
--- a/meta/recipes-core/volatile-binds/volatile-binds.bb
+++ b/meta/recipes-core/volatile-binds/volatile-binds.bb
@@ -16,10 +16,10 @@ inherit allarch systemd features_check
REQUIRED_DISTRO_FEATURES = "systemd"
VOLATILE_BINDS ?= "\
- /var/volatile/lib /var/lib\n\
- /var/volatile/cache /var/cache\n\
- /var/volatile/spool /var/spool\n\
- /var/volatile/srv /srv\n\
+ ${localstatedir}/volatile/lib ${localstatedir}/lib\n\
+ ${localstatedir}/volatile/cache ${localstatedir}/cache\n\
+ ${localstatedir}/volatile/spool ${localstatedir}/spool\n\
+ ${localstatedir}/volatile/srv /srv\n\
"
VOLATILE_BINDS[type] = "list"
VOLATILE_BINDS[separator] = "\n"
@@ -46,8 +46,8 @@ do_compile () {
continue
fi
- servicefile="${spec#/}"
- servicefile="$(echo "$servicefile" | tr / -).service"
+ servicefile="$(echo "${spec#/}" | tr / -).service"
+ [ "$mountpoint" != ${localstatedir}/lib ] || var_lib_servicefile=$servicefile
sed -e "s#@what@#$spec#g; s#@where@#$mountpoint#g" \
-e "s#@whatparent@#${spec%/*}#g; s#@whereparent@#${mountpoint%/*}#g" \
-e "s#@avoid_overlayfs@#${@d.getVar('AVOID_OVERLAYFS')}#g" \
@@ -56,12 +56,12 @@ do_compile () {
${@d.getVar('VOLATILE_BINDS').replace("\\n", "\n")}
END
- if [ -e var-volatile-lib.service ]; then
+ if [ -e "$var_lib_servicefile" ]; then
# As the seed is stored under /var/lib, ensure that this service runs
# after the volatile /var/lib is mounted.
sed -i -e "/^Before=/s/\$/ systemd-random-seed.service/" \
-e "/^WantedBy=/s/\$/ systemd-random-seed.service/" \
- var-volatile-lib.service
+ "$var_lib_servicefile"
fi
}
do_compile[dirs] = "${WORKDIR}"
@@ -78,7 +78,7 @@ do_install () {
# Suppress attempts to process some tmpfiles that are not temporary.
#
- install -d ${D}${sysconfdir}/tmpfiles.d ${D}/var/cache
+ install -d ${D}${sysconfdir}/tmpfiles.d ${D}${localstatedir}/cache
ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/etc.conf
ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/home.conf
}
diff --git a/meta/recipes-core/zlib/zlib/0001-Correct-incorrect-inputs-provided-to-the-CRC-functio.patch b/meta/recipes-core/zlib/zlib/0001-Correct-incorrect-inputs-provided-to-the-CRC-functio.patch
deleted file mode 100644
index ad5e59de04..0000000000
--- a/meta/recipes-core/zlib/zlib/0001-Correct-incorrect-inputs-provided-to-the-CRC-functio.patch
+++ /dev/null
@@ -1,54 +0,0 @@
-From ec3df00224d4b396e2ac6586ab5d25f673caa4c2 Mon Sep 17 00:00:00 2001
-From: Mark Adler <madler@alumni.caltech.edu>
-Date: Wed, 30 Mar 2022 11:14:53 -0700
-Subject: [PATCH] Correct incorrect inputs provided to the CRC functions.
-
-The previous releases of zlib were not sensitive to incorrect CRC
-inputs with bits set above the low 32. This commit restores that
-behavior, so that applications with such bugs will continue to
-operate as before.
-
-Upstream-Status: Backport [https://github.com/madler/zlib/commit/ec3df00224d4b396e2ac6586ab5d25f673caa4c2]
-Signed-off-by: Jacob Kroon <jacob.kroon@gmail.com>
----
- crc32.c | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/crc32.c b/crc32.c
-index a1bdce5..451887b 100644
---- a/crc32.c
-+++ b/crc32.c
-@@ -630,7 +630,7 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
- #endif /* DYNAMIC_CRC_TABLE */
-
- /* Pre-condition the CRC */
-- crc ^= 0xffffffff;
-+ crc = (~crc) & 0xffffffff;
-
- /* Compute the CRC up to a word boundary. */
- while (len && ((z_size_t)buf & 7) != 0) {
-@@ -749,7 +749,7 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
- #endif /* DYNAMIC_CRC_TABLE */
-
- /* Pre-condition the CRC */
-- crc ^= 0xffffffff;
-+ crc = (~crc) & 0xffffffff;
-
- #ifdef W
-
-@@ -1077,7 +1077,7 @@ uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
- #ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
- #endif /* DYNAMIC_CRC_TABLE */
-- return multmodp(x2nmodp(len2, 3), crc1) ^ crc2;
-+ return multmodp(x2nmodp(len2, 3), crc1) ^ (crc2 & 0xffffffff);
- }
-
- /* ========================================================================= */
-@@ -1112,5 +1112,5 @@ uLong crc32_combine_op(crc1, crc2, op)
- uLong crc2;
- uLong op;
- {
-- return multmodp(op, crc1) ^ crc2;
-+ return multmodp(op, crc1) ^ (crc2 & 0xffffffff);
- }
diff --git a/meta/recipes-core/zlib/zlib/0001-configure-Pass-LDFLAGS-to-link-tests.patch b/meta/recipes-core/zlib/zlib/0001-configure-Pass-LDFLAGS-to-link-tests.patch
index e6cc915ba5..07b2cd3879 100644
--- a/meta/recipes-core/zlib/zlib/0001-configure-Pass-LDFLAGS-to-link-tests.patch
+++ b/meta/recipes-core/zlib/zlib/0001-configure-Pass-LDFLAGS-to-link-tests.patch
@@ -1,7 +1,7 @@
Upstream-Status: Submitted [https://github.com/madler/zlib/pull/599]
Signed-off-by: Ross Burton <ross.burton@arm.com>
-From f15584918a7fbbe3cc794ad59100e5e8153ea9f6 Mon Sep 17 00:00:00 2001
+From ea77f1f003a4d18b23cca703f3c824942863a1b4 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 8 Mar 2022 22:38:47 -0800
Subject: [PATCH] configure: Pass LDFLAGS to link tests
@@ -13,24 +13,25 @@ tests perform correctly. Without this some tests may fail resulting in
wrong confgure result, ending in miscompiling the package
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
---
configure | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/configure b/configure
-index 52ff4a0..d04ee59 100755
+index c55098a..a7c6d72 100755
--- a/configure
+++ b/configure
-@@ -427,7 +427,7 @@ if test $shared -eq 1; then
+@@ -443,7 +443,7 @@ if test $shared -eq 1; then
echo Checking for shared library support... | tee -a configure.log
# we must test in two steps (cc then ld), required at least on SunOS 4.x
- if try $CC -w -c $SFLAGS $test.c &&
+ if try $CC -c $SFLAGS $test.c &&
- try $LDSHARED $SFLAGS -o $test$shared_ext $test.o; then
+ try $LDSHARED $SFLAGS $LDFLAGS -o $test$shared_ext $test.o; then
echo Building shared library $SHAREDLIBV with $CC. | tee -a configure.log
elif test -z "$old_cc" -a -z "$old_cflags"; then
echo No shared library support. | tee -a configure.log
-@@ -503,7 +503,7 @@ int main(void) {
+@@ -505,7 +505,7 @@ int main(void) {
}
EOF
fi
@@ -39,7 +40,7 @@ index 52ff4a0..d04ee59 100755
sizet=`./$test`
echo "Checking for a pointer-size integer type..." $sizet"." | tee -a configure.log
CFLAGS="${CFLAGS} -DNO_SIZE_T=${sizet}"
-@@ -537,7 +537,7 @@ int main(void) {
+@@ -539,7 +539,7 @@ int main(void) {
return 0;
}
EOF
@@ -48,7 +49,7 @@ index 52ff4a0..d04ee59 100755
echo "Checking for fseeko... Yes." | tee -a configure.log
else
CFLAGS="${CFLAGS} -DNO_FSEEKO"
-@@ -554,7 +554,7 @@ cat > $test.c <<EOF
+@@ -556,7 +556,7 @@ cat > $test.c <<EOF
#include <errno.h>
int main() { return strlen(strerror(errno)); }
EOF
@@ -57,7 +58,7 @@ index 52ff4a0..d04ee59 100755
echo "Checking for strerror... Yes." | tee -a configure.log
else
CFLAGS="${CFLAGS} -DNO_STRERROR"
-@@ -661,7 +661,7 @@ int main()
+@@ -663,7 +663,7 @@ int main()
return (mytest("Hello%d\n", 1));
}
EOF
@@ -66,7 +67,7 @@ index 52ff4a0..d04ee59 100755
echo "Checking for vsnprintf() in stdio.h... Yes." | tee -a configure.log
echo >> configure.log
-@@ -751,7 +751,7 @@ int main()
+@@ -753,7 +753,7 @@ int main()
}
EOF
@@ -75,6 +76,3 @@ index 52ff4a0..d04ee59 100755
echo "Checking for snprintf() in stdio.h... Yes." | tee -a configure.log
echo >> configure.log
---
-2.25.1
-
diff --git a/meta/recipes-core/zlib/zlib/cc.patch b/meta/recipes-core/zlib/zlib/cc.patch
deleted file mode 100644
index 8fb974ded4..0000000000
--- a/meta/recipes-core/zlib/zlib/cc.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 05796d3d8d5546cf1b4dfe2cd72ab746afae505d Mon Sep 17 00:00:00 2001
-From: Mark Adler <madler@alumni.caltech.edu>
-Date: Mon, 28 Mar 2022 18:34:10 -0700
-Subject: [PATCH] Fix configure issue that discarded provided CC definition.
-
----
- configure | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/configure b/configure
-index 52ff4a04e..3fa3e8618 100755
---- a/configure
-+++ b/configure
-@@ -174,7 +174,10 @@ if test -z "$CC"; then
- else
- cc=${CROSS_PREFIX}cc
- fi
-+else
-+ cc=${CC}
- fi
-+
- cflags=${CFLAGS-"-O3"}
- # to force the asm version use: CFLAGS="-O3 -DASMV" ./configure
- case "$cc" in
diff --git a/meta/recipes-core/zlib/zlib/ldflags-tests.patch b/meta/recipes-core/zlib/zlib/ldflags-tests.patch
deleted file mode 100644
index 286390665f..0000000000
--- a/meta/recipes-core/zlib/zlib/ldflags-tests.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-Obey LDFLAGS for tests
-
-Upstream-Status: Submitted [https://github.com/madler/zlib/pull/409]
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
---- zlib-1.2.8.orig/Makefile.in
-+++ zlib-1.2.8/Makefile.in
-@@ -26,7 +26,7 @@ CFLAGS=-O
-
- SFLAGS=-O
- LDFLAGS=
--TEST_LDFLAGS=-L. libz.a
-+TEST_LDFLAGS=-L. $(LDFLAGS)
- LDSHARED=$(CC)
- CPP=$(CC) -E
-
-@@ -176,22 +176,22 @@ placebo $(SHAREDLIBV): $(PIC_OBJS) libz.
- -@rmdir objs
-
- example$(EXE): example.o $(STATICLIB)
-- $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS)
-+ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(STATICLIB)
-
- minigzip$(EXE): minigzip.o $(STATICLIB)
-- $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS)
-+ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(STATICLIB)
-
- examplesh$(EXE): example.o $(SHAREDLIBV)
-- $(CC) $(CFLAGS) -o $@ example.o -L. $(SHAREDLIBV)
-+ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(SHAREDLIBV)
-
- minigzipsh$(EXE): minigzip.o $(SHAREDLIBV)
-- $(CC) $(CFLAGS) -o $@ minigzip.o -L. $(SHAREDLIBV)
-+ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(SHAREDLIBV)
-
- example64$(EXE): example64.o $(STATICLIB)
-- $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS)
-+ $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) $(STATICLIB)
-
- minigzip64$(EXE): minigzip64.o $(STATICLIB)
-- $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS)
-+ $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) $(STATICLIB)
-
- install-libs: $(LIBS)
- -@if [ ! -d $(DESTDIR)$(exec_prefix) ]; then mkdir -p $(DESTDIR)$(exec_prefix); fi
diff --git a/meta/recipes-core/zlib/zlib_1.2.12.bb b/meta/recipes-core/zlib/zlib_1.2.12.bb
deleted file mode 100644
index e921703137..0000000000
--- a/meta/recipes-core/zlib/zlib_1.2.12.bb
+++ /dev/null
@@ -1,54 +0,0 @@
-SUMMARY = "Zlib Compression Library"
-DESCRIPTION = "Zlib is a general-purpose, patent-free, lossless data compression \
-library which is used by many different programs."
-HOMEPAGE = "http://zlib.net/"
-SECTION = "libs"
-LICENSE = "Zlib"
-LIC_FILES_CHKSUM = "file://zlib.h;beginline=6;endline=23;md5=5377232268e952e9ef63bc555f7aa6c0"
-
-SRC_URI = "https://zlib.net/${BP}.tar.xz \
- file://cc.patch \
- file://ldflags-tests.patch \
- file://0001-configure-Pass-LDFLAGS-to-link-tests.patch \
- file://run-ptest \
- file://0001-Correct-incorrect-inputs-provided-to-the-CRC-functio.patch \
- "
-UPSTREAM_CHECK_URI = "http://zlib.net/"
-
-SRC_URI[sha256sum] = "7db46b8d7726232a621befaab4a1c870f00a90805511c0e0090441dac57def18"
-
-CFLAGS += "-D_REENTRANT"
-
-RDEPENDS:${PN}-ptest += "make"
-
-inherit ptest
-
-do_configure() {
- LDCONFIG=true ./configure --prefix=${prefix} --shared --libdir=${libdir} --uname=GNU
-}
-
-do_compile() {
- oe_runmake shared
-}
-
-do_install() {
- oe_runmake DESTDIR=${D} install
-}
-
-do_install_ptest() {
- install ${B}/examplesh ${D}${PTEST_PATH}
-}
-
-# Move zlib shared libraries for target builds to $base_libdir so the library
-# can be used in early boot before $prefix is mounted.
-do_install:append:class-target() {
- if [ ${base_libdir} != ${libdir} ]
- then
- mkdir -p ${D}/${base_libdir}
- mv ${D}/${libdir}/libz.so.* ${D}/${base_libdir}
- libname=`readlink ${D}/${libdir}/libz.so`
- ln -sf ${@oe.path.relative("${libdir}", "${base_libdir}")}/$libname ${D}${libdir}/libz.so
- fi
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-core/zlib/zlib_1.3.1.bb b/meta/recipes-core/zlib/zlib_1.3.1.bb
new file mode 100644
index 0000000000..e6a81ef789
--- /dev/null
+++ b/meta/recipes-core/zlib/zlib_1.3.1.bb
@@ -0,0 +1,50 @@
+SUMMARY = "Zlib Compression Library"
+DESCRIPTION = "Zlib is a general-purpose, patent-free, lossless data compression \
+library which is used by many different programs."
+HOMEPAGE = "http://zlib.net/"
+SECTION = "libs"
+LICENSE = "Zlib"
+LIC_FILES_CHKSUM = "file://zlib.h;beginline=6;endline=23;md5=5377232268e952e9ef63bc555f7aa6c0"
+
+# The source tarball needs to be .gz as only the .gz ends up in fossils/
+SRC_URI = "https://zlib.net/${BP}.tar.gz \
+ file://0001-configure-Pass-LDFLAGS-to-link-tests.patch \
+ file://run-ptest \
+ "
+UPSTREAM_CHECK_URI = "http://zlib.net/"
+
+SRC_URI[sha256sum] = "9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23"
+
+# When a new release is made the previous release is moved to fossils/, so add this
+# to PREMIRRORS so it is also searched automatically.
+PREMIRRORS:append = " https://zlib.net/ https://zlib.net/fossils/"
+
+CFLAGS += "-D_REENTRANT"
+
+RDEPENDS:${PN}-ptest += "make"
+
+inherit ptest
+
+B = "${WORKDIR}/build"
+
+do_configure() {
+ LDCONFIG=true ${S}/configure --prefix=${prefix} --shared --libdir=${libdir} --uname=GNU
+}
+do_configure[cleandirs] += "${B}"
+
+do_compile() {
+ oe_runmake shared
+}
+
+do_install() {
+ oe_runmake DESTDIR=${D} install
+}
+
+do_install_ptest() {
+ install ${B}/examplesh ${D}${PTEST_PATH}
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+CVE_STATUS[CVE-2023-45853] = "not-applicable-config: we don't build minizip"
+CVE_STATUS[CVE-2023-6992] = "cpe-incorrect: this CVE is for cloudflare zlib"
diff --git a/meta/recipes-devtools/apt/apt/0001-Disable-documentation-directory-altogether.patch b/meta/recipes-devtools/apt/apt/0001-Disable-documentation-directory-altogether.patch
index 8b28ede8a8..5443ff6caa 100644
--- a/meta/recipes-devtools/apt/apt/0001-Disable-documentation-directory-altogether.patch
+++ b/meta/recipes-devtools/apt/apt/0001-Disable-documentation-directory-altogether.patch
@@ -13,11 +13,11 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 9745c13..7cfc9ee 100644
+index 668e2d762..62f441bfa 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -239,7 +239,7 @@ add_subdirectory(apt-pkg)
- add_subdirectory(apt-private)
+@@ -246,7 +246,7 @@ add_subdirectory(apt-private)
+ endif()
add_subdirectory(cmdline)
add_subdirectory(completions)
-add_subdirectory(doc)
diff --git a/meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch b/meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch
index 59b9cd190f..37a3133010 100644
--- a/meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch
+++ b/meta/recipes-devtools/apt/apt/0001-Do-not-init-tables-from-dpkg-configuration.patch
@@ -1,4 +1,4 @@
-From 11ba49594ae9d11f0070198c146b5e437fa83022 Mon Sep 17 00:00:00 2001
+From b84280fec4e1d0d33eca78e76556023f8f8fe5b7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 10 May 2019 16:47:38 +0200
Subject: [PATCH] Do not init tables from dpkg configuration
@@ -13,7 +13,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
-index b9d9b15..1725c59 100644
+index b9d9b15d2..1725c5966 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -281,8 +281,8 @@ bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys)
diff --git a/meta/recipes-devtools/apt/apt/0001-Remove-using-std-binary_function.patch b/meta/recipes-devtools/apt/apt/0001-Remove-using-std-binary_function.patch
new file mode 100644
index 0000000000..15b036b90d
--- /dev/null
+++ b/meta/recipes-devtools/apt/apt/0001-Remove-using-std-binary_function.patch
@@ -0,0 +1,87 @@
+From e91fb0618ce0a5d42f239d0fca602544858f0819 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 16 Aug 2022 08:44:18 -0700
+Subject: [PATCH] Remove using std::binary_function
+
+std::binary_function and std::unary_function are deprecated since c++11
+and removed in c++17, therefore remove it and use lambda functions to get same
+functionality implemented.
+
+Upstream-Status: Submitted [https://salsa.debian.org/apt-team/apt/-/merge_requests/253]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ftparchive/apt-ftparchive.cc | 33 ++++++++++-----------------------
+ 1 file changed, 10 insertions(+), 23 deletions(-)
+
+diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc
+index 0f6587281..0a253b12b 100644
+--- a/ftparchive/apt-ftparchive.cc
++++ b/ftparchive/apt-ftparchive.cc
+@@ -48,6 +48,11 @@
+ using namespace std;
+ unsigned Quiet = 0;
+
++auto ContentsCompare = [](const auto &a, const auto &b) { return a.ContentsMTime < b.ContentsMTime; };
++auto DBCompare = [](const auto &a, const auto &b) { return a.BinCacheDB < b.BinCacheDB; };
++auto SrcDBCompare = [](const auto &a, const auto &b) { return a.SrcCacheDB < b.SrcCacheDB; };
++
++
+ static struct timeval GetTimevalFromSteadyClock() /*{{{*/
+ {
+ auto const Time = std::chrono::steady_clock::now().time_since_epoch();
+@@ -116,24 +121,6 @@ struct PackageMap
+ bool SrcDone;
+ time_t ContentsMTime;
+
+- struct ContentsCompare
+- {
+- inline bool operator() (const PackageMap &x,const PackageMap &y)
+- {return x.ContentsMTime < y.ContentsMTime;};
+- };
+-
+- struct DBCompare
+- {
+- inline bool operator() (const PackageMap &x,const PackageMap &y)
+- {return x.BinCacheDB < y.BinCacheDB;};
+- };
+-
+- struct SrcDBCompare
+- {
+- inline bool operator() (const PackageMap &x,const PackageMap &y)
+- {return x.SrcCacheDB < y.SrcCacheDB;};
+- };
+-
+ void GetGeneral(Configuration &Setup,Configuration &Block);
+ bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
+@@ -869,7 +856,7 @@ static bool DoGenerateContents(Configuration &Setup,
+ else
+ I->ContentsMTime = A.st_mtime;
+ }
+- stable_sort(PkgList.begin(),PkgList.end(),PackageMap::ContentsCompare());
++ stable_sort(PkgList.begin(),PkgList.end(),ContentsCompare);
+
+ /* Now for Contents.. The process here is to do a make-like dependency
+ check. Each contents file is verified to be newer than the package files
+@@ -941,8 +928,8 @@ static bool Generate(CommandLine &CmdL)
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+- stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+- stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare());
++ stable_sort(PkgList.begin(),PkgList.end(),DBCompare);
++ stable_sort(PkgList.begin(),PkgList.end(),SrcDBCompare);
+
+ // Generate packages
+ if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false)
+@@ -993,8 +980,8 @@ static bool Clean(CommandLine &CmdL)
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+- stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+- stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare());
++ stable_sort(PkgList.begin(),PkgList.end(),DBCompare);
++ stable_sort(PkgList.begin(),PkgList.end(),SrcDBCompare);
+
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
diff --git a/meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch b/meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch
index 593ed7d096..6f4d5b6e72 100644
--- a/meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch
+++ b/meta/recipes-devtools/apt/apt/0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch
@@ -1,4 +1,4 @@
-From 47c2b42af60ceefd8ed52b32a3a365facf0e05b8 Mon Sep 17 00:00:00 2001
+From a2dd661484536492b47d4c88998f2bf516749bc8 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 21 May 2020 20:13:25 +0000
Subject: [PATCH] Revert "always run 'dpkg --configure -a' at the end of our
@@ -20,7 +20,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
-index 93effa9..4375781 100644
+index 93effa959..4375781d1 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -1199,12 +1199,6 @@ void pkgDPkgPM::BuildPackagesProgressMap()
diff --git a/meta/recipes-devtools/apt/apt/0001-cmake-Do-not-build-po-files.patch b/meta/recipes-devtools/apt/apt/0001-cmake-Do-not-build-po-files.patch
index 2837b7f1b3..036ce35963 100644
--- a/meta/recipes-devtools/apt/apt/0001-cmake-Do-not-build-po-files.patch
+++ b/meta/recipes-devtools/apt/apt/0001-cmake-Do-not-build-po-files.patch
@@ -15,16 +15,18 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 7 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 3c8ec3f..821a24f 100644
+index be157a55f..54163ae6c 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -245,13 +245,6 @@ add_subdirectory(ftparchive)
+@@ -252,15 +252,6 @@ add_subdirectory(ftparchive)
add_subdirectory(methods)
add_subdirectory(test)
-if (USE_NLS)
-add_subdirectory(po)
+-endif()
-
+-if(TARGET update-po AND TARGET update-po4a)
-# Link update-po4a into the update-po target
-add_dependencies(update-po update-po4a)
-endif()
@@ -32,6 +34,3 @@ index 3c8ec3f..821a24f 100644
# Create our directories.
install_empty_directories(
${CONF_DIR}/apt.conf.d
---
-2.31.0
-
diff --git a/meta/recipes-devtools/apt/apt_2.4.5.bb b/meta/recipes-devtools/apt/apt_2.4.5.bb
deleted file mode 100644
index 95c25e3036..0000000000
--- a/meta/recipes-devtools/apt/apt_2.4.5.bb
+++ /dev/null
@@ -1,136 +0,0 @@
-SUMMARY = "Advanced front-end for dpkg"
-DESCRIPTION = "APT is the Advanced Package Tool, an advanced interface to the Debian packaging system which provides the apt-get program."
-HOMEPAGE = "https://packages.debian.org/jessie/apt"
-LICENSE = "GPL-2.0-or-later"
-SECTION = "base"
-
-# Triehash script taken from https://github.com/julian-klode/triehash
-SRC_URI = "${DEBIAN_MIRROR}/main/a/apt/${BPN}_${PV}.tar.xz \
- file://triehash \
- file://0001-Disable-documentation-directory-altogether.patch \
- file://0001-Fix-musl-build.patch \
- file://0001-CMakeLists.txt-avoid-changing-install-paths-based-on.patch \
- file://0001-cmake-Do-not-build-po-files.patch \
- file://0001-Hide-fstatat64-and-prlimit64-defines-on-musl.patch \
- file://0001-aptwebserver.cc-Include-array.patch \
- "
-
-SRC_URI:append:class-native = " \
- file://0001-Do-not-init-tables-from-dpkg-configuration.patch \
- file://0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch \
- "
-
-SRC_URI:append:class-nativesdk = " \
- file://0001-Do-not-init-tables-from-dpkg-configuration.patch \
- file://0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch \
- "
-
-SRC_URI[sha256sum] = "5552f175c3a3924f5cda0c079b821b30f68a2521959f2c30ab164d2ec7993ecf"
-LIC_FILES_CHKSUM = "file://COPYING.GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-# the package is taken from snapshots.debian.org; that source is static and goes stale
-# so we check the latest upstream from a directory that does get updated
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/a/apt/"
-
-inherit cmake perlnative bash-completion upstream-version-is-even useradd
-
-# User is added to allow apt to drop privs, will runtime warn without
-USERADD_PACKAGES = "${PN}"
-USERADD_PARAM:${PN} = "--system --home /nonexistent --no-create-home _apt"
-
-BBCLASSEXTEND = "native nativesdk"
-
-DEPENDS += "db gnutls lz4 zlib bzip2 xz libgcrypt xxhash"
-
-EXTRA_OECMAKE:append = " -DCURRENT_VENDOR=debian -DWITH_DOC=False \
- -DDPKG_DATADIR=${datadir}/dpkg \
- -DTRIEHASH_EXECUTABLE=${WORKDIR}/triehash \
- -DCMAKE_DISABLE_FIND_PACKAGE_ZSTD=True \
- -DCMAKE_DISABLE_FIND_PACKAGE_SECCOMP=True \
- -DWITH_TESTS=False \
-"
-
-do_configure:prepend() {
- echo "set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH )" >> ${WORKDIR}/toolchain.cmake
-}
-
-# Unfortunately apt hardcodes this all over the place
-FILES:${PN} += "${prefix}/lib/dpkg ${prefix}/lib/apt"
-RDEPENDS:${PN} += "bash perl dpkg"
-
-customize_apt_conf_sample() {
- cat > ${D}${sysconfdir}/apt/apt.conf.sample << EOF
-Dir "${STAGING_DIR_NATIVE}/"
-{
- State "var/lib/apt/"
- {
- Lists "#APTCONF#/lists/";
- status "#ROOTFS#/var/lib/dpkg/status";
- };
- Cache "var/cache/apt/"
- {
- Archives "archives/";
- pkgcache "";
- srcpkgcache "";
- };
- Bin "${STAGING_BINDIR_NATIVE}/"
- {
- methods "${STAGING_LIBDIR}/apt/methods/";
- gzip "/bin/gzip";
- dpkg "dpkg";
- dpkg-source "dpkg-source";
- dpkg-buildpackage "dpkg-buildpackage";
- apt-get "apt-get";
- apt-cache "apt-cache";
- };
- Etc "#APTCONF#"
- {
- Preferences "preferences";
- };
- Log "var/log/apt";
-};
-
-APT
-{
- Install-Recommends "true";
- Immediate-Configure "false";
- Architecture "i586";
- Get
- {
- Assume-Yes "true";
- };
-};
-
-Acquire
-{
- AllowInsecureRepositories "true";
-};
-
-DPkg::Options {"--root=#ROOTFS#";"--admindir=#ROOTFS#/var/lib/dpkg";"--force-all";"--no-debsig"};
-DPkg::Path "";
-EOF
-}
-
-do_install:append:class-native() {
- customize_apt_conf_sample
-}
-
-do_install:append:class-nativesdk() {
- customize_apt_conf_sample
-}
-
-do_install:append:class-target() {
- # Write the correct apt-architecture to apt.conf
- APT_CONF=${D}${sysconfdir}/apt/apt.conf
- echo 'APT::Architecture "${DPKG_ARCH}";' > ${APT_CONF}
-
- # Remove /var/log/apt. /var/log is normally a link to /var/volatile/log
- # and /var/volatile is a tmpfs mount. So anything created in /var/log
- # will not be available when the tmpfs is mounted.
- rm -rf ${D}${localstatedir}/log
-}
-
-do_install:append() {
- # Avoid non-reproducible -src package
- sed -i -e "s,${B},,g" ${B}/apt-pkg/tagfile-keys.cc
-}
diff --git a/meta/recipes-devtools/apt/apt_2.6.1.bb b/meta/recipes-devtools/apt/apt_2.6.1.bb
new file mode 100644
index 0000000000..fb4ff899d2
--- /dev/null
+++ b/meta/recipes-devtools/apt/apt_2.6.1.bb
@@ -0,0 +1,142 @@
+SUMMARY = "Advanced front-end for dpkg"
+DESCRIPTION = "APT is the Advanced Package Tool, an advanced interface to the Debian packaging system which provides the apt-get program."
+HOMEPAGE = "https://packages.debian.org/sid/apt"
+LICENSE = "GPL-2.0-or-later"
+SECTION = "base"
+
+# Triehash script taken from https://github.com/julian-klode/triehash
+SRC_URI = "${DEBIAN_MIRROR}/main/a/apt/${BPN}_${PV}.tar.xz \
+ file://triehash \
+ file://0001-Disable-documentation-directory-altogether.patch \
+ file://0001-Fix-musl-build.patch \
+ file://0001-CMakeLists.txt-avoid-changing-install-paths-based-on.patch \
+ file://0001-cmake-Do-not-build-po-files.patch \
+ file://0001-Hide-fstatat64-and-prlimit64-defines-on-musl.patch \
+ file://0001-aptwebserver.cc-Include-array.patch \
+ file://0001-Remove-using-std-binary_function.patch \
+ "
+
+SRC_URI:append:class-native = " \
+ file://0001-Do-not-init-tables-from-dpkg-configuration.patch \
+ file://0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch \
+ "
+
+SRC_URI:append:class-nativesdk = " \
+ file://0001-Do-not-init-tables-from-dpkg-configuration.patch \
+ file://0001-Revert-always-run-dpkg-configure-a-at-the-end-of-our.patch \
+ "
+
+SRC_URI[sha256sum] = "86b888c901fa2e78f1bf52a2aaa2f400ff82a472b94ff0ac6631939ee68fa6fd"
+LIC_FILES_CHKSUM = "file://COPYING.GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+# the package is taken from snapshots.debian.org; that source is static and goes stale
+# so we check the latest upstream from a directory that does get updated
+UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/a/apt/"
+# apt seems to follow a peculiar version policy, where every *other* even version
+# is considered stable, e.g. 1.0, 1.4, 1.8, 2.2, 2.6, etc. As there is no way
+# to express 'divisible by 4 plus 2' in regex (that I know of), let's hardcode a few.
+UPSTREAM_CHECK_REGEX = "[^\d\.](?P<pver>((2\.2)|(2\.6)|(3\.0)|(3\.4)|(3\.8)|(4\.2))(\.\d+)+)\.tar"
+
+inherit cmake perlnative bash-completion useradd
+
+# User is added to allow apt to drop privs, will runtime warn without
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "--system --home /nonexistent --no-create-home _apt"
+
+BBCLASSEXTEND = "native nativesdk"
+
+DEPENDS += "db gnutls lz4 zlib bzip2 xz libgcrypt xxhash"
+
+EXTRA_OECMAKE:append = " -DCURRENT_VENDOR=debian -DWITH_DOC=False \
+ -DDPKG_DATADIR=${datadir}/dpkg \
+ -DTRIEHASH_EXECUTABLE=${WORKDIR}/triehash \
+ -DCMAKE_DISABLE_FIND_PACKAGE_ZSTD=True \
+ -DCMAKE_DISABLE_FIND_PACKAGE_SECCOMP=True \
+ -DWITH_TESTS=False \
+"
+
+do_configure:prepend() {
+ echo "set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH )" >> ${WORKDIR}/toolchain.cmake
+}
+
+# Unfortunately apt hardcodes this all over the place
+FILES:${PN} += "${prefix}/lib/dpkg ${prefix}/lib/apt"
+RDEPENDS:${PN} += "bash perl dpkg"
+
+customize_apt_conf_sample() {
+ cat > ${D}${sysconfdir}/apt/apt.conf.sample << EOF
+Dir "${STAGING_DIR_NATIVE}/"
+{
+ State "var/lib/apt/"
+ {
+ Lists "#APTCONF#/lists/";
+ status "#ROOTFS#/var/lib/dpkg/status";
+ };
+ Cache "var/cache/apt/"
+ {
+ Archives "archives/";
+ pkgcache "";
+ srcpkgcache "";
+ };
+ Bin "${STAGING_BINDIR_NATIVE}/"
+ {
+ methods "${STAGING_LIBDIR}/apt/methods/";
+ gzip "/bin/gzip";
+ dpkg "dpkg";
+ dpkg-source "dpkg-source";
+ dpkg-buildpackage "dpkg-buildpackage";
+ apt-get "apt-get";
+ apt-cache "apt-cache";
+ };
+ Etc "#APTCONF#"
+ {
+ Preferences "preferences";
+ };
+ Log "var/log/apt";
+};
+
+APT
+{
+ Install-Recommends "true";
+ Immediate-Configure "false";
+ Architecture "i586";
+ Get
+ {
+ Assume-Yes "true";
+ };
+};
+
+Acquire
+{
+ AllowInsecureRepositories "true";
+};
+
+DPkg::Options {"--root=#ROOTFS#";"--admindir=#ROOTFS#/var/lib/dpkg";"--force-all";"--no-debsig"};
+DPkg::Path "";
+EOF
+}
+
+do_install:append:class-native() {
+ customize_apt_conf_sample
+}
+
+do_install:append:class-nativesdk() {
+ customize_apt_conf_sample
+ rm -rf ${D}${localstatedir}/log
+}
+
+do_install:append:class-target() {
+ # Write the correct apt-architecture to apt.conf
+ APT_CONF=${D}${sysconfdir}/apt/apt.conf
+ echo 'APT::Architecture "${DPKG_ARCH}";' > ${APT_CONF}
+
+ # Remove /var/log/apt. /var/log is normally a link to /var/volatile/log
+ # and /var/volatile is a tmpfs mount. So anything created in /var/log
+ # will not be available when the tmpfs is mounted.
+ rm -rf ${D}${localstatedir}/log
+}
+
+do_install:append() {
+ # Avoid non-reproducible -src package
+ sed -i -e "s,${B}/include/,,g" ${B}/apt-pkg/tagfile-keys.cc
+}
diff --git a/meta/recipes-devtools/autoconf-archive/autoconf-archive_2022.02.11.bb b/meta/recipes-devtools/autoconf-archive/autoconf-archive_2022.02.11.bb
deleted file mode 100644
index 47bf2380f0..0000000000
--- a/meta/recipes-devtools/autoconf-archive/autoconf-archive_2022.02.11.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-SUMMARY = "a collection of freely re-usable Autoconf macros"
-HOMEPAGE = "http://www.gnu.org/software/autoconf-archive/"
-SECTION = "devel"
-LICENSE = "GPL-3.0-with-autoconf-exception"
-LIC_FILES_CHKSUM = "file://COPYING;md5=11cc2d3ee574f9d6b7ee797bdce4d423 \
- file://COPYING.EXCEPTION;md5=fdef168ebff3bc2f13664c365a5fb515"
-
-SRC_URI = "${GNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "78a61b611e2eeb55a89e0398e0ce387bcaf57fe2dd53c6fe427130f777ad1e8c"
-
-inherit autotools allarch texinfo
-
-PACKAGES = "${PN} ${PN}-doc"
-
-FILES:${PN} += "${datadir}/aclocal"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/autoconf-archive/autoconf-archive_2023.02.20.bb b/meta/recipes-devtools/autoconf-archive/autoconf-archive_2023.02.20.bb
new file mode 100644
index 0000000000..6ba7cdf3fe
--- /dev/null
+++ b/meta/recipes-devtools/autoconf-archive/autoconf-archive_2023.02.20.bb
@@ -0,0 +1,17 @@
+SUMMARY = "a collection of freely re-usable Autoconf macros"
+HOMEPAGE = "http://www.gnu.org/software/autoconf-archive/"
+SECTION = "devel"
+LICENSE = "GPL-3.0-with-autoconf-exception"
+LIC_FILES_CHKSUM = "file://COPYING;md5=11cc2d3ee574f9d6b7ee797bdce4d423 \
+ file://COPYING.EXCEPTION;md5=fdef168ebff3bc2f13664c365a5fb515"
+
+SRC_URI = "${GNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "71d4048479ae28f1f5794619c3d72df9c01df49b1c628ef85fde37596dc31a33"
+
+inherit autotools allarch texinfo
+
+PACKAGES = "${PN} ${PN}-doc"
+
+FILES:${PN} += "${datadir}/aclocal"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/autoconf/autoconf/autoreconf-exclude.patch b/meta/recipes-devtools/autoconf/autoconf/autoreconf-exclude.patch
index c73aca41ee..2814196200 100644
--- a/meta/recipes-devtools/autoconf/autoconf/autoreconf-exclude.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/autoreconf-exclude.patch
@@ -1,25 +1,26 @@
-From 0071d28e304745a16871561f23117fdb00dd2559 Mon Sep 17 00:00:00 2001
+From 1a50157aa11da48921200a0d8d4308863716eab0 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 12 Mar 2020 17:25:23 +0000
-Subject: [PATCH 4/7] autoreconf-exclude.patch
+Subject: [PATCH] autoreconf-exclude.patch
Upstream-Status: Inappropriate [oe specific]
+
---
bin/autoreconf.in | 26 ++++++++++++++++++++++++++
1 file changed, 26 insertions(+)
diff --git a/bin/autoreconf.in b/bin/autoreconf.in
-index bb9f316d..7da3005b 100644
+index 98ebab6..937f758 100644
--- a/bin/autoreconf.in
+++ b/bin/autoreconf.in
-@@ -82,6 +82,7 @@ Operation modes:
+@@ -83,6 +83,7 @@ Operation modes:
-i, --install copy missing standard auxiliary files
--no-recursive don't rebuild sub-packages
-s, --symlink with -i, install symbolic links instead of copies
+ -x, --exclude=STEPS steps we should not run
-m, --make when applicable, re-run ./configure && make
- -W, --warnings=CATEGORY report the warnings falling in CATEGORY [syntax]
-
+ -W, --warnings=CATEGORY report the warnings falling in CATEGORY
+ (comma-separated list accepted)
@@ -141,6 +142,10 @@ my $run_make = 0;
# Recurse into subpackages
my $recursive = 1;
@@ -60,7 +61,7 @@ index bb9f316d..7da3005b 100644
}
-@@ -687,9 +698,12 @@ sub autoreconf_current_directory ($)
+@@ -691,9 +702,12 @@ sub autoreconf_current_directory ($)
{
$libtoolize .= " --ltdl";
}
@@ -73,7 +74,7 @@ index bb9f316d..7da3005b 100644
}
else
{
-@@ -726,8 +740,11 @@ sub autoreconf_current_directory ($)
+@@ -730,8 +744,11 @@ sub autoreconf_current_directory ($)
}
elsif ($install)
{
@@ -85,7 +86,7 @@ index bb9f316d..7da3005b 100644
}
else
{
-@@ -765,7 +782,10 @@ sub autoreconf_current_directory ($)
+@@ -769,7 +786,10 @@ sub autoreconf_current_directory ($)
# latter runs the former, and (ii) autoconf is stricter than
# autoheader. So all in all, autoconf should give better error
# messages.
@@ -96,7 +97,7 @@ index bb9f316d..7da3005b 100644
# -------------------- #
-@@ -786,7 +806,10 @@ sub autoreconf_current_directory ($)
+@@ -790,7 +810,10 @@ sub autoreconf_current_directory ($)
}
else
{
@@ -107,7 +108,7 @@ index bb9f316d..7da3005b 100644
}
-@@ -803,7 +826,10 @@ sub autoreconf_current_directory ($)
+@@ -807,7 +830,10 @@ sub autoreconf_current_directory ($)
# We should always run automake, and let it decide whether it shall
# update the file or not. In fact, the effect of '$force' is already
# included in '$automake' via '--no-force'.
@@ -118,6 +119,3 @@ index bb9f316d..7da3005b 100644
}
# ---------------------------------------------------- #
---
-2.25.1
-
diff --git a/meta/recipes-devtools/autoconf/autoconf/autotest-automake-result-format.patch b/meta/recipes-devtools/autoconf/autoconf/autotest-automake-result-format.patch
index 23329f7927..38725574ba 100644
--- a/meta/recipes-devtools/autoconf/autoconf/autotest-automake-result-format.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/autotest-automake-result-format.patch
@@ -1,18 +1,19 @@
-From 8c0f24404bebffdaf3132d81e2b9560d34ff1677 Mon Sep 17 00:00:00 2001
+From b28bd61e4716e744617bd681a5b0d5472f62bd67 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 12 Mar 2020 17:25:45 +0000
-Subject: [PATCH 6/7] autotest-automake-result-format.patch
+Subject: [PATCH] autotest-automake-result-format.patch
Upstream-Status: Inappropriate [oe specific]
+
---
lib/autotest/general.m4 | 39 +++++++++++++++++++++++++++++----------
1 file changed, 29 insertions(+), 10 deletions(-)
diff --git a/lib/autotest/general.m4 b/lib/autotest/general.m4
-index 0c0e3c5b..17590e96 100644
+index bf18866..8097523 100644
--- a/lib/autotest/general.m4
+++ b/lib/autotest/general.m4
-@@ -412,6 +412,9 @@ at_recheck=
+@@ -427,6 +427,9 @@ at_recheck=
# Whether a write failure occurred
at_write_fail=0
@@ -22,7 +23,7 @@ index 0c0e3c5b..17590e96 100644
# The directory we run the suite in. Default to . if no -C option.
at_dir=`pwd`
# An absolute reference to this testsuite script.
-@@ -525,6 +528,10 @@ do
+@@ -540,6 +543,10 @@ do
at_check_filter_trace=at_fn_filter_trace
;;
@@ -33,7 +34,7 @@ index 0c0e3c5b..17590e96 100644
[[0-9] | [0-9][0-9] | [0-9][0-9][0-9] | [0-9][0-9][0-9][0-9]])
at_fn_validate_ranges at_option
AS_VAR_APPEND([at_groups], ["$at_option$as_nl"])
-@@ -713,10 +720,10 @@ m4_divert_push([HELP_MODES])dnl
+@@ -728,10 +735,10 @@ m4_divert_push([HELP_MODES])dnl
cat <<_ATEOF || at_write_fail=1
Operation modes:
@@ -48,7 +49,7 @@ index 0c0e3c5b..17590e96 100644
_ATEOF
m4_divert_pop([HELP_MODES])dnl
m4_wrap([m4_divert_push([HELP_TUNING_BEGIN])dnl
-@@ -742,6 +749,7 @@ Execution tuning:
+@@ -757,6 +764,7 @@ Execution tuning:
-d, --debug inhibit clean up and top-level logging
[ default for debugging scripts]
-x, --trace enable tests shell tracing
@@ -56,7 +57,7 @@ index 0c0e3c5b..17590e96 100644
_ATEOF
m4_divert_pop([HELP_TUNING_BEGIN])])dnl
m4_divert_push([HELP_END])dnl
-@@ -1129,7 +1137,9 @@ at_fn_group_banner ()
+@@ -1139,7 +1147,9 @@ at_fn_group_banner ()
[*]) at_desc_line="$[1]: " ;;
esac
AS_VAR_APPEND([at_desc_line], ["$[3]$[4]"])
@@ -67,7 +68,7 @@ index 0c0e3c5b..17590e96 100644
echo "# -*- compilation -*-" >> "$at_group_log"
}
-@@ -1155,42 +1165,51 @@ _ATEOF
+@@ -1165,42 +1175,51 @@ _ATEOF
case $at_xfail:$at_status in
yes:0)
at_msg="UNEXPECTED PASS"
@@ -124,6 +125,3 @@ index 0c0e3c5b..17590e96 100644
fi
at_log_msg="$at_group. $at_desc ($at_setup_line): $at_msg"
case $at_status in
---
-2.25.1
-
diff --git a/meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch b/meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch
index c6c135625d..0f49583a64 100644
--- a/meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch
@@ -1,13 +1,20 @@
-Don't use the target perl when regenerating the man pages.
+From 1c033f2a23941c46d88b9ac279f87bf2c6e99499 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Wed, 15 Jul 2020 16:03:21 +0100
+Subject: [PATCH] Don't use the target perl when regenerating the man pages.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ man/local.mk | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
diff --git a/man/local.mk b/man/local.mk
-index e69858b1..78c68ab5 100644
+index 775c131..ba94753 100644
--- a/man/local.mk
+++ b/man/local.mk
-@@ -67,13 +67,12 @@ SUFFIXES += .w .1
+@@ -77,13 +77,12 @@ SUFFIXES += .w .1
@echo "Updating man page $@"
$(MKDIR_P) $(@D)
PATH="$(top_srcdir)/man$(PATH_SEPARATOR)$$PATH"; \
diff --git a/meta/recipes-devtools/autoconf/autoconf/no-man.patch b/meta/recipes-devtools/autoconf/autoconf/no-man.patch
index 2c44375f43..3e741edfb2 100644
--- a/meta/recipes-devtools/autoconf/autoconf/no-man.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/no-man.patch
@@ -1,14 +1,26 @@
+From 8bcaf677e41f1f5d3fa0a746e35958e7b303ac71 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Wed, 15 Jul 2020 16:03:21 +0100
+Subject: [PATCH] autoconf: upgrade to 2.71
+
For native builds we don't care about the documentation, and this would
otherwise pull in a dependency on help2man.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ Makefile.in | 10 ----------
+ 1 file changed, 10 deletions(-)
+
diff --git a/Makefile.in b/Makefile.in
-index 146e8e3..a1827c1 100644
+index c8d6425..72d6d05 100644
--- a/Makefile.in
+++ b/Makefile.in
-@@ -763,10 +762,0 @@ dist_buildaux_SCRIPTS = \
+@@ -771,16 +771,6 @@ buildauxdir = $(pkgdatadir)/build-aux
+ dist_buildaux_DATA = \
+ $(AUXSCRIPTS)
+
-dist_man_MANS = \
- man/autoconf.1 \
- man/autoheader.1 \
@@ -19,3 +31,6 @@ index 146e8e3..a1827c1 100644
- man/ifnames.1
-
-
+ # Each manpage depends on:
+ # - its .w and .x files and its source script in bin/
+ # - common.x for the SEE ALSO list
diff --git a/meta/recipes-devtools/autoconf/autoconf/preferbash.patch b/meta/recipes-devtools/autoconf/autoconf/preferbash.patch
index cfb145a279..64fed1fd82 100644
--- a/meta/recipes-devtools/autoconf/autoconf/preferbash.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/preferbash.patch
@@ -1,7 +1,7 @@
-From 0aac3047cd7681d610b22d79501c297fa3433148 Mon Sep 17 00:00:00 2001
+From a877ff979349d3bf6f5f0d92fe4e741be0ad98b4 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 12 Mar 2020 17:25:41 +0000
-Subject: [PATCH 2/7] m4sh: prefer bash over sh
+Subject: [PATCH] m4sh: prefer bash over sh
_AS_DETECT_BETTER_SHELL looks for a good shell to use, and tries to look for
'sh' before 'bash'. Whilst for many systems sh is a symlink to bash,
@@ -16,15 +16,16 @@ Change the search order to bash then sh, so that a known-good shell (bash)
is used if available over something which is merely POSIX compliant.
Upstream-Status: Inappropriate [oe specific]
+
---
lib/m4sugar/m4sh.m4 | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/m4sugar/m4sh.m4 b/lib/m4sugar/m4sh.m4
-index 9d543952..84ef84a9 100644
+index 368487f..cc70f51 100644
--- a/lib/m4sugar/m4sh.m4
+++ b/lib/m4sugar/m4sh.m4
-@@ -230,7 +230,7 @@ dnl Remove any tests from suggested that are also required
+@@ -233,7 +233,7 @@ dnl Remove any tests from suggested that are also required
[_AS_PATH_WALK([/bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH],
[case $as_dir in @%:@(
/*)
@@ -33,6 +34,3 @@ index 9d543952..84ef84a9 100644
# Try only shells that exist, to save several forks.
as_shell=$as_dir$as_base
AS_IF([{ test -f "$as_shell" || test -f "$as_shell.exe"; } &&
---
-2.25.1
-
diff --git a/meta/recipes-devtools/autoconf/autoconf/program_prefix.patch b/meta/recipes-devtools/autoconf/autoconf/program_prefix.patch
index 657cbb351b..f647f2a8c0 100644
--- a/meta/recipes-devtools/autoconf/autoconf/program_prefix.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/program_prefix.patch
@@ -1,19 +1,20 @@
-From f4f19a5c03e8ae3b9cc93d24b76694f4b7b2eb76 Mon Sep 17 00:00:00 2001
+From 7949496ff3834dcd98407cc3f3ea022ee2471d52 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 12 Mar 2020 17:28:38 +0000
-Subject: [PATCH 3/7] program_prefix.patch
+Subject: [PATCH] program_prefix.patch
Upstream-Status: Inappropriate [oe specific]
Signed-off-by: Ross Burton <ross.burton@intel.com>
+
---
lib/autoconf/general.m4 | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/autoconf/general.m4 b/lib/autoconf/general.m4
-index 16f0d074..4c5e0b36 100644
+index 47d896d..3deaa46 100644
--- a/lib/autoconf/general.m4
+++ b/lib/autoconf/general.m4
-@@ -2070,7 +2070,7 @@ _AC_CANONICAL_SPLIT([target])
+@@ -2071,7 +2071,7 @@ _AC_CANONICAL_SPLIT([target])
# The aliases save the names the user supplied, while $host etc.
# will get canonicalized.
@@ -22,6 +23,3 @@ index 16f0d074..4c5e0b36 100644
test "$program_prefix$program_suffix$program_transform_name" = \
NONENONEs,x,x, &&
program_prefix=${target_alias}-[]dnl
---
-2.25.1
-
diff --git a/meta/recipes-devtools/autoconf/autoconf/remove-usr-local-lib-from-m4.patch b/meta/recipes-devtools/autoconf/autoconf/remove-usr-local-lib-from-m4.patch
index f38780130a..ca1534b8f0 100644
--- a/meta/recipes-devtools/autoconf/autoconf/remove-usr-local-lib-from-m4.patch
+++ b/meta/recipes-devtools/autoconf/autoconf/remove-usr-local-lib-from-m4.patch
@@ -1,19 +1,20 @@
-From a08643ac3fef884900d6cfa161f0acec3ef104d1 Mon Sep 17 00:00:00 2001
+From 294a8d47a70db077691624615c5cb6d331a3299b Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 12 Mar 2020 17:25:37 +0000
-Subject: [PATCH 1/7] remove-usr-local-lib-from-m4.patch
+Subject: [PATCH] remove-usr-local-lib-from-m4.patch
Upstream-Status: Inappropriate [oe specific]
Signed-off-by: Ross Burton <ross.burton@intel.com>
+
---
lib/autoconf/functions.m4 | 9 ---------
1 file changed, 9 deletions(-)
diff --git a/lib/autoconf/functions.m4 b/lib/autoconf/functions.m4
-index 12f60b99..07da7941 100644
+index 9b3f3c0..1faa99b 100644
--- a/lib/autoconf/functions.m4
+++ b/lib/autoconf/functions.m4
-@@ -801,15 +801,6 @@ if test $ac_have_func = no; then
+@@ -825,15 +825,6 @@ if test $ac_have_func = no; then
[LIBS="-lutil $LIBS" ac_have_func=yes ac_cv_func_getloadavg_setgid=yes])
fi
@@ -29,6 +30,3 @@ index 12f60b99..07da7941 100644
# Make sure it is really in the library, if we think we found it,
# otherwise set up the replacement function.
AC_CHECK_FUNCS(getloadavg, [],
---
-2.25.1
-
diff --git a/meta/recipes-devtools/autoconf/autoconf_2.71.bb b/meta/recipes-devtools/autoconf/autoconf_2.71.bb
deleted file mode 100644
index 799191e2ca..0000000000
--- a/meta/recipes-devtools/autoconf/autoconf_2.71.bb
+++ /dev/null
@@ -1,81 +0,0 @@
-SUMMARY = "A GNU tool that produce shell scripts to automatically configure software"
-DESCRIPTION = "Autoconf is an extensible package of M4 macros that produce shell scripts to automatically \
-configure software source code packages. Autoconf creates a configuration script for a package from a template \
-file that lists the operating system features that the package can use, in the form of M4 macro calls."
-LICENSE = "GPL-3.0-or-later"
-HOMEPAGE = "http://www.gnu.org/software/autoconf/"
-SECTION = "devel"
-DEPENDS = "m4-native autoconf-native automake-native gnu-config-native help2man-native"
-DEPENDS:remove:class-native = "autoconf-native automake-native help2man-native"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=cc3f3a7596cb558bbd9eb7fbaa3ef16c \
- file://COPYINGv3;md5=1ebbd3e34237af26da5dc08a4e440464"
-
-SRC_URI = "${GNU_MIRROR}/autoconf/${BP}.tar.gz \
- file://program_prefix.patch \
- file://autoreconf-exclude.patch \
- file://remove-usr-local-lib-from-m4.patch \
- file://preferbash.patch \
- file://autotest-automake-result-format.patch \
- file://man-host-perl.patch \
- "
-SRC_URI:append:class-native = " file://no-man.patch"
-
-SRC_URI[sha256sum] = "431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c"
-
-RDEPENDS:${PN} = "m4 gnu-config \
- perl \
- perl-module-bytes \
- perl-module-carp \
- perl-module-constant \
- perl-module-data-dumper \
- perl-module-errno \
- perl-module-exporter \
- perl-module-file-basename \
- perl-module-file-compare \
- perl-module-file-copy \
- perl-module-file-find \
- perl-module-file-glob \
- perl-module-file-path \
- perl-module-file-spec \
- perl-module-file-spec-unix \
- perl-module-file-stat \
- perl-module-file-temp \
- perl-module-getopt-long \
- perl-module-io-file \
- perl-module-list-util \
- perl-module-overloading \
- perl-module-posix \
- perl-module-scalar-util \
- perl-module-symbol \
- perl-module-thread-queue \
- perl-module-threads \
- "
-RDEPENDS:${PN}:class-native = "m4-native gnu-config-native hostperl-runtime-native"
-
-inherit autotools texinfo
-
-PERL = "${USRBINPATH}/perl"
-PERL:class-native = "/usr/bin/env perl"
-PERL:class-nativesdk = "/usr/bin/env perl"
-
-CACHED_CONFIGUREVARS += "ac_cv_path_PERL='${PERL}'"
-
-EXTRA_OECONF += "ac_cv_path_M4=m4 ac_cv_prog_TEST_EMACS=no"
-
-# As autoconf installs its own config.* files, ensure that they're always up to date.
-update_gnu_config() {
- install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}/build-aux
- install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}/build-aux
-}
-do_configure[prefuncs] += "update_gnu_config"
-
-do_configure:class-native() {
- oe_runconf
-}
-
-do_install:append() {
- rm -rf ${D}${datadir}/emacs
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/autoconf/autoconf_2.72e.bb b/meta/recipes-devtools/autoconf/autoconf_2.72e.bb
new file mode 100644
index 0000000000..db374373cf
--- /dev/null
+++ b/meta/recipes-devtools/autoconf/autoconf_2.72e.bb
@@ -0,0 +1,83 @@
+SUMMARY = "A GNU tool that produce shell scripts to automatically configure software"
+DESCRIPTION = "Autoconf is an extensible package of M4 macros that produce shell scripts to automatically \
+configure software source code packages. Autoconf creates a configuration script for a package from a template \
+file that lists the operating system features that the package can use, in the form of M4 macro calls."
+LICENSE = "GPL-3.0-or-later"
+HOMEPAGE = "http://www.gnu.org/software/autoconf/"
+SECTION = "devel"
+DEPENDS = "m4-native autoconf-native automake-native gnu-config-native help2man-native"
+DEPENDS:remove:class-native = "autoconf-native automake-native help2man-native"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=cc3f3a7596cb558bbd9eb7fbaa3ef16c \
+ file://COPYINGv3;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+SRC_URI = " \
+ https://alpha.gnu.org/gnu/autoconf/${BP}.tar.gz \
+ file://program_prefix.patch \
+ file://autoreconf-exclude.patch \
+ file://remove-usr-local-lib-from-m4.patch \
+ file://preferbash.patch \
+ file://autotest-automake-result-format.patch \
+ file://man-host-perl.patch \
+"
+SRC_URI:append:class-native = " file://no-man.patch"
+
+SRC_URI[sha256sum] = "f3478d3b597d51f5d61596fb2f6f6aba49cdd974b4b05ff0bac57f56b5cfdb39"
+
+RDEPENDS:${PN} = "m4 gnu-config \
+ perl \
+ perl-module-bytes \
+ perl-module-carp \
+ perl-module-constant \
+ perl-module-data-dumper \
+ perl-module-errno \
+ perl-module-exporter \
+ perl-module-file-basename \
+ perl-module-file-compare \
+ perl-module-file-copy \
+ perl-module-file-find \
+ perl-module-file-glob \
+ perl-module-file-path \
+ perl-module-file-spec \
+ perl-module-file-spec-unix \
+ perl-module-file-stat \
+ perl-module-file-temp \
+ perl-module-getopt-long \
+ perl-module-io-file \
+ perl-module-list-util \
+ perl-module-overloading \
+ perl-module-posix \
+ perl-module-scalar-util \
+ perl-module-symbol \
+ perl-module-thread-queue \
+ perl-module-threads \
+ perl-module-feature \
+ "
+RDEPENDS:${PN}:class-native = "m4-native gnu-config-native hostperl-runtime-native"
+
+inherit autotools texinfo
+
+PERL = "${USRBINPATH}/perl"
+PERL:class-native = "/usr/bin/env perl"
+PERL:class-nativesdk = "/usr/bin/env perl"
+
+CACHED_CONFIGUREVARS += "ac_cv_path_PERL='${PERL}'"
+
+EXTRA_OECONF += "ac_cv_path_M4=m4 ac_cv_prog_TEST_EMACS=no"
+
+# As autoconf installs its own config.* files, ensure that they're always up to date.
+update_gnu_config() {
+ install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}/build-aux
+ install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}/build-aux
+}
+do_configure[prefuncs] += "update_gnu_config"
+
+do_configure:class-native() {
+ oe_runconf
+}
+
+do_install:append() {
+ rm -rf ${D}${datadir}/emacs
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/automake/automake/buildtest.patch b/meta/recipes-devtools/automake/automake/buildtest.patch
index b88b9e8693..c43a4ac8f3 100644
--- a/meta/recipes-devtools/automake/automake/buildtest.patch
+++ b/meta/recipes-devtools/automake/automake/buildtest.patch
@@ -36,7 +36,7 @@ index e0db651..de137fa 100644
-check-TESTS: $(TESTS)
+AM_RECURSIVE_TARGETS += buildtest runtest
+
-+buildtest-TESTS: $(TESTS)
++buildtest-TESTS: $(TESTS) $(check_PROGRAMS)
+
+check-TESTS: buildtest-TESTS
+ $(MAKE) $(AM_MAKEFLAGS) runtest-TESTS
diff --git a/meta/recipes-devtools/binutils/binutils-2.38.inc b/meta/recipes-devtools/binutils/binutils-2.38.inc
deleted file mode 100644
index dc0a2a4054..0000000000
--- a/meta/recipes-devtools/binutils/binutils-2.38.inc
+++ /dev/null
@@ -1,36 +0,0 @@
-LIC_FILES_CHKSUM="\
- file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\
- file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674\
- file://COPYING3;md5=d32239bcb673463ab874e80d47fae504\
- file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6\
- file://gas/COPYING;md5=d32239bcb673463ab874e80d47fae504\
- file://include/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\
- file://include/COPYING3;md5=d32239bcb673463ab874e80d47fae504\
- file://libiberty/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7\
- file://bfd/COPYING;md5=d32239bcb673463ab874e80d47fae504\
- "
-
-# When upgrading to 2.39, please make sure there is no trailing .0, so
-# that upstream version check can work correctly.
-PV = "2.38"
-CVE_VERSION = "2.38"
-SRCBRANCH ?= "binutils-2_38-branch"
-
-UPSTREAM_CHECK_GITTAGREGEX = "binutils-(?P<pver>\d+_(\d_?)*)"
-
-SRCREV ?= "134f17ef688ba4c72a6c4e57af7382882cc1a705"
-BINUTILS_GIT_URI ?= "git://sourceware.org/git/binutils-gdb.git;branch=${SRCBRANCH};protocol=git"
-SRC_URI = "\
- ${BINUTILS_GIT_URI} \
- file://0004-Point-scripts-location-to-libdir.patch \
- file://0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch \
- file://0006-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch \
- file://0007-warn-for-uses-of-system-directories-when-cross-linki.patch \
- file://0008-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch \
- file://0009-Use-libtool-2.4.patch \
- file://0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch \
- file://0011-sync-with-OE-libtool-changes.patch \
- file://0012-Check-for-clang-before-checking-gcc-version.patch \
- file://0013-Avoid-as-info-race-condition.patch \
-"
-S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/binutils/binutils-2.42.inc b/meta/recipes-devtools/binutils/binutils-2.42.inc
new file mode 100644
index 0000000000..3b6f47d4ce
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils-2.42.inc
@@ -0,0 +1,40 @@
+LIC_FILES_CHKSUM="\
+ file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\
+ file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674\
+ file://COPYING3;md5=d32239bcb673463ab874e80d47fae504\
+ file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6\
+ file://gas/COPYING;md5=d32239bcb673463ab874e80d47fae504\
+ file://include/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\
+ file://include/COPYING3;md5=d32239bcb673463ab874e80d47fae504\
+ file://libiberty/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7\
+ file://bfd/COPYING;md5=d32239bcb673463ab874e80d47fae504\
+ "
+
+# When upgrading to 2.42, please make sure there is no trailing .0, so
+# that upstream version check can work correctly.
+PV = "2.42"
+CVE_VERSION = "2.42"
+SRCBRANCH ?= "binutils-2_42-branch"
+
+UPSTREAM_CHECK_GITTAGREGEX = "binutils-(?P<pver>\d+_(\d_?)*)"
+
+CVE_STATUS[CVE-2023-25584] = "cpe-incorrect: Applies only for version 2.40 and earlier"
+
+SRCREV ?= "553c7f61b74badf91df484450944675efd9cd485"
+BINUTILS_GIT_URI ?= "git://sourceware.org/git/binutils-gdb.git;branch=${SRCBRANCH};protocol=https"
+SRC_URI = "\
+ ${BINUTILS_GIT_URI} \
+ file://0004-Point-scripts-location-to-libdir.patch \
+ file://0005-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch \
+ file://0006-warn-for-uses-of-system-directories-when-cross-linki.patch \
+ file://0007-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch \
+ file://0008-Use-libtool-2.4.patch \
+ file://0009-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch \
+ file://0010-sync-with-OE-libtool-changes.patch \
+ file://0011-Check-for-clang-before-checking-gcc-version.patch \
+ file://0012-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch \
+ file://0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \
+ file://0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch \
+ file://0015-gprofng-change-use-of-bignum-to-bigint.patch \
+"
+S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/binutils/binutils-cross-canadian.inc b/meta/recipes-devtools/binutils/binutils-cross-canadian.inc
index b3d591e658..62ebb3f5bc 100644
--- a/meta/recipes-devtools/binutils/binutils-cross-canadian.inc
+++ b/meta/recipes-devtools/binutils/binutils-cross-canadian.inc
@@ -4,7 +4,7 @@ SUMMARY = "GNU binary utilities (cross-canadian for ${TARGET_ARCH} target)"
PN = "binutils-cross-canadian-${TRANSLATED_TARGET_ARCH}"
BPN = "binutils"
-DEPENDS = "flex-native bison-native virtual/${HOST_PREFIX}gcc-crosssdk virtual/nativesdk-libc nativesdk-zlib nativesdk-gettext nativesdk-flex"
+DEPENDS = "flex-native bison-native virtual/${HOST_PREFIX}gcc virtual/nativesdk-libc nativesdk-zlib nativesdk-gettext nativesdk-flex"
EXTRA_OECONF += "--with-sysroot=${SDKPATH}/sysroots/${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS} \
--enable-poison-system-directories \
"
@@ -23,7 +23,9 @@ do_install () {
rm -f ${D}${libdir}/libiberty*
rm -f ${D}${libdir}/libopcodes*
rm -f ${D}${includedir}/*.h
-
+ rm -f ${D}${sysconfdir}/gprofng.rc
+ rmdir ${D}${sysconfdir} || :
+
cross_canadian_bindirlinks
}
diff --git a/meta/recipes-devtools/binutils/binutils-cross-canadian_2.38.bb b/meta/recipes-devtools/binutils/binutils-cross-canadian_2.42.bb
index 5dbaa03017..5dbaa03017 100644
--- a/meta/recipes-devtools/binutils/binutils-cross-canadian_2.38.bb
+++ b/meta/recipes-devtools/binutils/binutils-cross-canadian_2.42.bb
diff --git a/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.38.bb b/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.38.bb
deleted file mode 100644
index ca99e91130..0000000000
--- a/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.38.bb
+++ /dev/null
@@ -1,81 +0,0 @@
-require binutils.inc
-require binutils-${PV}.inc
-
-BPN = "binutils"
-
-DEPENDS += "dejagnu-native expect-native"
-DEPENDS += "binutils-native"
-
-deltask do_compile
-deltask do_install
-
-inherit nopackages
-
-do_configure[dirs] += "${B}/ld ${B}/bfd"
-do_configure() {
- # create config.h, oe enables initfini-array by default
- echo "#define HAVE_INITFINI_ARRAY" > ${B}/ld/config.h
-}
-
-# target depends
-DEPENDS += "virtual/${MLPREFIX}${TARGET_PREFIX}binutils"
-DEPENDS += "virtual/${MLPREFIX}${TARGET_PREFIX}gcc"
-DEPENDS += "virtual/${MLPREFIX}${TARGET_PREFIX}compilerlibs"
-DEPENDS += "virtual/${MLPREFIX}libc"
-
-python check_prepare() {
- def suffix_sys(sys):
- if sys.endswith("-linux"):
- return sys + "-gnu"
- return sys
-
- def generate_site_exp(d, suite):
- content = []
- content.append('set srcdir "{0}/{1}"'.format(d.getVar("S"), suite))
- content.append('set objdir "{0}/{1}"'.format(d.getVar("B"), suite))
- content.append('set build_alias "{0}"'.format(d.getVar("BUILD_SYS")))
- content.append('set build_triplet {0}'.format(d.getVar("BUILD_SYS")))
- # use BUILD here since HOST=TARGET
- content.append('set host_alias "{0}"'.format(d.getVar("BUILD_SYS")))
- content.append('set host_triplet {0}'.format(d.getVar("BUILD_SYS")))
- content.append('set target_alias "{0}"'.format(d.getVar("TARGET_SYS")))
- content.append('set target_triplet {0}'.format(suffix_sys(d.getVar("TARGET_SYS"))))
- content.append("set development true")
- content.append("set experimental false")
-
- content.append(d.expand('set CXXFILT "${TARGET_PREFIX}c++filt"'))
- content.append(d.expand('set CC "${TARGET_PREFIX}gcc --sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
- content.append(d.expand('set CXX "${TARGET_PREFIX}g++ --sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
- content.append(d.expand('set CFLAGS_FOR_TARGET "--sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
-
- if suite == "ld" and d.getVar("TUNE_ARCH") == "mips64":
- # oe patches binutils to have the default mips64 abi as 64bit, but
- # skips gas causing issues with the ld test suite (which uses gas)
- content.append('set ASFLAGS "-64"')
-
- return "\n".join(content)
-
- for i in ["binutils", "gas", "ld"]:
- builddir = os.path.join(d.getVar("B"), i)
- if not os.path.isdir(builddir):
- os.makedirs(builddir)
- with open(os.path.join(builddir, "site.exp"), "w") as f:
- f.write(generate_site_exp(d, i))
-}
-
-CHECK_TARGETS ??= "binutils gas ld"
-
-do_check[dirs] = "${B} ${B}/binutils ${B}/gas ${B}/ld"
-do_check[prefuncs] += "check_prepare"
-do_check[nostamp] = "1"
-do_check() {
- export LC_ALL=C
- for i in ${CHECK_TARGETS}; do
- (cd ${B}/$i; runtest \
- --tool $i \
- --srcdir ${S}/$i/testsuite \
- --ignore 'plugin.exp' \
- || true)
- done
-}
-addtask check after do_configure
diff --git a/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.42.bb b/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.42.bb
new file mode 100644
index 0000000000..630815c7a3
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils-cross-testsuite_2.42.bb
@@ -0,0 +1,83 @@
+require binutils.inc
+require binutils-${PV}.inc
+
+BPN = "binutils"
+
+DEPENDS += "dejagnu-native expect-native"
+DEPENDS += "binutils-native"
+
+deltask do_compile
+deltask do_install
+
+inherit nopackages
+
+do_configure[dirs] += "${B}/ld ${B}/bfd"
+do_configure() {
+ # create config.h, oe enables initfini-array by default
+ echo "#define HAVE_INITFINI_ARRAY" > ${B}/ld/config.h
+}
+
+# target depends
+DEPENDS += "virtual/${TARGET_PREFIX}binutils"
+DEPENDS += "virtual/${TARGET_PREFIX}gcc"
+DEPENDS += "virtual/${MLPREFIX}${TARGET_PREFIX}compilerlibs"
+DEPENDS += "virtual/${MLPREFIX}libc"
+
+python check_prepare() {
+ def suffix_sys(sys):
+ if sys.endswith("-linux"):
+ return sys + "-gnu"
+ return sys
+
+ def generate_site_exp(d, suite):
+ content = []
+ content.append('set srcdir "{0}/{1}"'.format(d.getVar("S"), suite))
+ content.append('set objdir "{0}/{1}"'.format(d.getVar("B"), suite))
+ content.append('set build_alias "{0}"'.format(d.getVar("BUILD_SYS")))
+ content.append('set build_triplet {0}'.format(d.getVar("BUILD_SYS")))
+ # use BUILD here since HOST=TARGET
+ content.append('set host_alias "{0}"'.format(d.getVar("BUILD_SYS")))
+ content.append('set host_triplet {0}'.format(d.getVar("BUILD_SYS")))
+ content.append('set target_alias "{0}"'.format(d.getVar("TARGET_SYS")))
+ content.append('set target_triplet {0}'.format(suffix_sys(d.getVar("TARGET_SYS"))))
+ content.append("set development true")
+ content.append("set experimental false")
+
+ content.append(d.expand('set CXXFILT "${TARGET_PREFIX}c++filt"'))
+ content.append(d.expand('set CC "${TARGET_PREFIX}gcc --sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
+ content.append(d.expand('set CXX "${TARGET_PREFIX}g++ --sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
+ content.append(d.expand('set CFLAGS_FOR_TARGET "--sysroot=${STAGING_DIR_TARGET} ${TUNE_CCARGS}"'))
+ content.append(d.expand('set LD "${TARGET_PREFIX}ld ${TUNE_LDARGS}"'))
+ content.append(d.expand('set LDFLAGS_FOR_TARGET "${TUNE_LDARGS}"'))
+
+ if suite == "ld" and d.getVar("TUNE_ARCH") == "mips64":
+ # oe patches binutils to have the default mips64 abi as 64bit, but
+ # skips gas causing issues with the ld test suite (which uses gas)
+ content.append('set ASFLAGS "-64"')
+
+ return "\n".join(content)
+
+ for i in ["binutils", "gas", "ld"]:
+ builddir = os.path.join(d.getVar("B"), i)
+ if not os.path.isdir(builddir):
+ os.makedirs(builddir)
+ with open(os.path.join(builddir, "site.exp"), "w") as f:
+ f.write(generate_site_exp(d, i))
+}
+
+CHECK_TARGETS ??= "binutils gas ld"
+
+do_check[dirs] = "${B} ${B}/binutils ${B}/gas ${B}/ld"
+do_check[prefuncs] += "check_prepare"
+do_check[nostamp] = "1"
+do_check() {
+ export LC_ALL=C
+ for i in ${CHECK_TARGETS}; do
+ (cd ${B}/$i; runtest \
+ --tool $i \
+ --srcdir ${S}/$i/testsuite \
+ --ignore 'plugin.exp' \
+ || true)
+ done
+}
+addtask check after do_configure
diff --git a/meta/recipes-devtools/binutils/binutils-cross.inc b/meta/recipes-devtools/binutils/binutils-cross.inc
index 02ec891606..835d4fa69b 100644
--- a/meta/recipes-devtools/binutils/binutils-cross.inc
+++ b/meta/recipes-devtools/binutils/binutils-cross.inc
@@ -16,6 +16,7 @@ SRC_URI += "file://0002-binutils-cross-Do-not-generate-linker-script-directo.pat
# and mean the linker scripts have to be relocated.
EXTRA_OECONF += "--with-sysroot=${STAGING_DIR_TARGET} \
--disable-install-libbfd \
+ --disable-gprofng \
--enable-poison-system-directories \
--with-lib-path==${target_base_libdir}:=${target_libdir} \
"
diff --git a/meta/recipes-devtools/binutils/binutils-cross_2.38.bb b/meta/recipes-devtools/binutils/binutils-cross_2.42.bb
index fbd1f7d25a..fbd1f7d25a 100644
--- a/meta/recipes-devtools/binutils/binutils-cross_2.38.bb
+++ b/meta/recipes-devtools/binutils/binutils-cross_2.42.bb
diff --git a/meta/recipes-devtools/binutils/binutils-crosssdk_2.38.bb b/meta/recipes-devtools/binutils/binutils-crosssdk_2.38.bb
deleted file mode 100644
index 5bd036fd46..0000000000
--- a/meta/recipes-devtools/binutils/binutils-crosssdk_2.38.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require binutils-cross_${PV}.bb
-
-inherit crosssdk
-
-PN = "binutils-crosssdk-${SDK_SYS}"
-
-PROVIDES = "virtual/${TARGET_PREFIX}binutils-crosssdk"
-
-SRC_URI += "file://0001-binutils-crosssdk-Generate-relocatable-SDKs.patch"
-
-do_configure:prepend () {
- sed -i 's#/usr/local/lib /lib /usr/lib#${SDKPATHNATIVE}/lib ${SDKPATHNATIVE}/usr/lib /usr/local/lib /lib /usr/lib#' ${S}/ld/configure.tgt
-}
diff --git a/meta/recipes-devtools/binutils/binutils-crosssdk_2.42.bb b/meta/recipes-devtools/binutils/binutils-crosssdk_2.42.bb
new file mode 100644
index 0000000000..6752659304
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils-crosssdk_2.42.bb
@@ -0,0 +1,11 @@
+require binutils-cross_${PV}.bb
+
+inherit crosssdk
+
+PN = "binutils-crosssdk-${SDK_SYS}"
+
+SRC_URI += "file://0001-binutils-crosssdk-Generate-relocatable-SDKs.patch"
+
+do_configure:prepend () {
+ sed -i 's#/usr/local/lib /lib /usr/lib#${SDKPATHNATIVE}/lib ${SDKPATHNATIVE}/usr/lib /usr/local/lib /lib /usr/lib#' ${S}/ld/configure.tgt
+}
diff --git a/meta/recipes-devtools/binutils/binutils.inc b/meta/recipes-devtools/binutils/binutils.inc
index 789c8bec21..e176b5cff1 100644
--- a/meta/recipes-devtools/binutils/binutils.inc
+++ b/meta/recipes-devtools/binutils/binutils.inc
@@ -33,12 +33,33 @@ FILES:${PN}-dev = " \
${libdir}/libctf-nobfd.so \
${libdir}/libopcodes.so"
+FILES:${PN}-staticdev += "${libdir}/gprofng/*.a"
+
# Rather than duplicating multiple entries for these, make one
# list and reuse it.
+GPROFNGS = " \
+ gp-archive \
+ gp-collect-app \
+ gp-display-html \
+ gp-display-src \
+ gp-display-text \
+ gprofng \
+"
+
+# it disables gprofng for clang and musl in the bb file
+GPROFNGS:toolchain-clang = ""
+GPROFNGS:libc-musl = ""
+
+GPROFNG_ALTS ?= ""
+GPROFNG_ALTS:x86 = "${GPROFNGS}"
+GPROFNG_ALTS:x86-64 = "${GPROFNGS}"
+GPROFNG_ALTS:aarch64 = "${GPROFNGS}"
+
LDGOLD_ALTS ?= "ld.gold dwp"
LDGOLD_ALTS:riscv64 = ""
LDGOLD_ALTS:riscv32 = ""
+LDGOLD_ALTS:loongarch64 = ""
LDGOLD_ALTS:libc-glibc:mipsarch = ""
USE_ALTERNATIVES_FOR = " \
@@ -48,6 +69,7 @@ USE_ALTERNATIVES_FOR = " \
c++filt \
elfedit \
gprof \
+ ${GPROFNG_ALTS} \
ld \
ld.bfd \
${LDGOLD_ALTS} \
@@ -76,6 +98,7 @@ EXTRA_OECONF = "--program-prefix=${TARGET_PREFIX} \
--disable-werror \
--enable-deterministic-archives \
--enable-plugins \
+ --enable-new-dtags \
--disable-gdb \
--disable-gdbserver \
--disable-libdecnumber \
@@ -179,6 +202,6 @@ ALTERNATIVE_PRIORITY = "100"
ALTERNATIVE:${PN}:class-target = "${USE_ALTERNATIVES_FOR}"
python () {
- if bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', True, False, d) and bb.utils.contains_any('TARGET_ARCH', 'riscv32 riscv64', True, False, d):
- bb.fatal("Gold linker does not _yet_ support RISC-V architecture please remove ld-is-gold from DISTRO_FEATURES")
+ if bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', True, False, d) and bb.utils.contains_any('TARGET_ARCH', 'riscv32 riscv64 loongarch64', True, False, d):
+ bb.fatal("Gold linker does not _yet_ support RISC-V and LoongArch architecture please remove ld-is-gold from DISTRO_FEATURES")
}
diff --git a/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch b/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch
index 719928be79..4ef11c56e7 100644
--- a/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch
+++ b/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch
@@ -1,4 +1,4 @@
-From 07bb7fbdacaf9cd6a1a252ffbc98f4e05e305d50 Mon Sep 17 00:00:00 2001
+From 089ee95b342e79af09258b45c888a13b35fadf26 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 2 Mar 2015 01:58:54 +0000
Subject: [PATCH] binutils-crosssdk: Generate relocatable SDKs
@@ -21,10 +21,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
3 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/gold/layout.cc b/gold/layout.cc
-index 3efe8d98ae9..9cc389664a6 100644
+index b43ae841a6c..6101a95195b 100644
--- a/gold/layout.cc
+++ b/gold/layout.cc
-@@ -5033,7 +5033,7 @@ Layout::create_interp(const Target* target)
+@@ -5102,7 +5102,7 @@ Layout::create_interp(const Target* target)
gold_assert(interp != NULL);
}
@@ -34,10 +34,10 @@ index 3efe8d98ae9..9cc389664a6 100644
Output_section_data* odata = new Output_data_const(interp, len, 1);
diff --git a/ld/genscripts.sh b/ld/genscripts.sh
-index 278f212bdad..0c52ebee4d0 100755
+index d6ceb3fe4f5..365c0e778cc 100755
--- a/ld/genscripts.sh
+++ b/ld/genscripts.sh
-@@ -304,6 +304,7 @@ DATA_ALIGNMENT_u="${DATA_ALIGNMENT_u-${DATA_ALIGNMENT_r}}"
+@@ -298,6 +298,7 @@ DATA_ALIGNMENT_u="${DATA_ALIGNMENT_u-${DATA_ALIGNMENT_r}}"
LD_FLAG=r
DATA_ALIGNMENT=${DATA_ALIGNMENT_r}
DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})"
@@ -45,7 +45,7 @@ index 278f212bdad..0c52ebee4d0 100755
( echo "/* Script for -r */"
source_sh ${CUSTOMIZER_SCRIPT}
source_sh ${srcdir}/scripttempl/${SCRIPT_NAME}.sc
-@@ -312,10 +313,12 @@ DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})"
+@@ -306,10 +307,12 @@ DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})"
LD_FLAG=u
DATA_ALIGNMENT=${DATA_ALIGNMENT_u}
CONSTRUCTING=" "
@@ -59,10 +59,10 @@ index 278f212bdad..0c52ebee4d0 100755
DATA_ALIGNMENT=${DATA_ALIGNMENT_}
RELOCATING=" "
diff --git a/ld/scripttempl/elf.sc b/ld/scripttempl/elf.sc
-index bf2268bb0ad..b1811ef130f 100644
+index fae7c2ad71c..7fe37eb1874 100644
--- a/ld/scripttempl/elf.sc
+++ b/ld/scripttempl/elf.sc
-@@ -148,8 +148,8 @@ if test -z "$DATA_SEGMENT_ALIGN"; then
+@@ -150,8 +150,8 @@ if test -z "$DATA_SEGMENT_ALIGN"; then
DATA_SEGMENT_RELRO_END=". = DATA_SEGMENT_RELRO_END (${SEPARATE_GOTPLT-0}, .);"
fi
fi
diff --git a/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch b/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch
index a3f7d62898..1652473bcb 100644
--- a/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch
+++ b/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch
@@ -1,4 +1,4 @@
-From f820ab7ea7e94d4df548be3388163ff2efb2ea96 Mon Sep 17 00:00:00 2001
+From 6fae7afeb713a60755e17443964e46190bb97ede Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 6 Mar 2017 23:37:05 -0800
Subject: [PATCH] binutils-cross: Do not generate linker script directories
@@ -22,10 +22,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 25 deletions(-)
diff --git a/ld/genscripts.sh b/ld/genscripts.sh
-index 0c52ebee4d0..1acbe66bd2e 100755
+index 365c0e778cc..b0893fda3c5 100755
--- a/ld/genscripts.sh
+++ b/ld/genscripts.sh
-@@ -235,31 +235,6 @@ append_to_lib_path()
+@@ -229,31 +229,6 @@ append_to_lib_path()
fi
}
diff --git a/meta/recipes-devtools/binutils/binutils/0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch b/meta/recipes-devtools/binutils/binutils/0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch
index 59a97c13c7..f45690ed68 100644
--- a/meta/recipes-devtools/binutils/binutils/0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch
+++ b/meta/recipes-devtools/binutils/binutils/0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch
@@ -1,4 +1,4 @@
-From b2ccd25828b40310caeb094c0413e3a30a4dc0a5 Mon Sep 17 00:00:00 2001
+From 1fe9aa3f25e09281cb018b29081db4cc44bdc01f Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Wed, 19 Feb 2020 09:51:16 -0800
Subject: [PATCH] binutils-nativesdk: Search for alternative ld.so.conf in SDK
@@ -29,51 +29,51 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
5 files changed, 7 insertions(+), 3 deletions(-)
diff --git a/ld/Makefile.am b/ld/Makefile.am
-index b55a873d927..61db131fb0d 100644
+index f9ee05b1400..07801c2c3ac 100644
--- a/ld/Makefile.am
+++ b/ld/Makefile.am
@@ -42,7 +42,8 @@ ZLIBINC = @zlibinc@
- ELF_CLFAGS=-DELF_LIST_OPTIONS=@elf_list_options@ \
+ ELF_CFLAGS=-DELF_LIST_OPTIONS=@elf_list_options@ \
-DELF_SHLIB_LIST_OPTIONS=@elf_shlib_list_options@ \
- -DELF_PLT_UNWIND_LIST_OPTIONS=@elf_plt_unwind_list_options@
+ -DELF_PLT_UNWIND_LIST_OPTIONS=@elf_plt_unwind_list_options@ \
+ -DSYSCONFDIR="\"$(sysconfdir)\""
WARN_CFLAGS = @WARN_CFLAGS@
NO_WERROR = @NO_WERROR@
- AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS)
+ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CFLAGS) $(JANSSON_CFLAGS) $(ZSTD_CFLAGS)
diff --git a/ld/Makefile.in b/ld/Makefile.in
-index 61e93eeaf1e..860eb21a785 100644
+index abb0565718f..6329a12a0d6 100644
--- a/ld/Makefile.in
+++ b/ld/Makefile.in
-@@ -556,7 +556,8 @@ ZLIB = @zlibdir@ -lz
+@@ -576,7 +576,8 @@ ZLIB = @zlibdir@ -lz
ZLIBINC = @zlibinc@
- ELF_CLFAGS = -DELF_LIST_OPTIONS=@elf_list_options@ \
+ ELF_CFLAGS = -DELF_LIST_OPTIONS=@elf_list_options@ \
-DELF_SHLIB_LIST_OPTIONS=@elf_shlib_list_options@ \
- -DELF_PLT_UNWIND_LIST_OPTIONS=@elf_plt_unwind_list_options@
+ -DELF_PLT_UNWIND_LIST_OPTIONS=@elf_plt_unwind_list_options@ \
+ -DSYSCONFDIR="\"$(sysconfdir)\""
- AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS)
+ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CFLAGS) $(JANSSON_CFLAGS) $(ZSTD_CFLAGS)
diff --git a/ld/ldelf.c b/ld/ldelf.c
-index 121c25d948f..34cbc60e5e9 100644
+index 2852851dcd5..187b359af86 100644
--- a/ld/ldelf.c
+++ b/ld/ldelf.c
-@@ -930,7 +930,7 @@ ldelf_check_ld_so_conf (const struct bfd_link_needed_list *l, int force,
+@@ -939,7 +939,7 @@ ldelf_check_ld_so_conf (const struct bfd_link_needed_list *l, int force,
info.path = NULL;
info.len = info.alloc = 0;
- tmppath = concat (ld_sysroot, prefix, "/etc/ld.so.conf",
-+ tmppath = concat (ld_sysconfdir, "/etc/ld.so.conf",
++ tmppath = concat (ld_sysconfdir, "/ld.so.conf",
(const char *) NULL);
if (!ldelf_parse_ld_so_conf (&info, tmppath))
{
diff --git a/ld/ldmain.c b/ld/ldmain.c
-index ea72b14a301..1ae90a77749 100644
+index e90c2021b33..01b306e58a1 100644
--- a/ld/ldmain.c
+++ b/ld/ldmain.c
-@@ -70,6 +70,7 @@ char *program_name;
+@@ -69,6 +69,7 @@ char *program_name;
/* The prefix for system library directories. */
const char *ld_sysroot;
@@ -82,7 +82,7 @@ index ea72b14a301..1ae90a77749 100644
/* The canonical representation of ld_sysroot. */
char *ld_canon_sysroot;
diff --git a/ld/ldmain.h b/ld/ldmain.h
-index f6d05a02f59..e60292cd522 100644
+index 0238aed0ebd..2dd00db13b3 100644
--- a/ld/ldmain.h
+++ b/ld/ldmain.h
@@ -23,6 +23,7 @@
diff --git a/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch b/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch
index 8f323eb0c5..ab12c145ea 100644
--- a/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch
+++ b/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch
@@ -1,4 +1,4 @@
-From 7a7b777cdfded080aab1021fa6bcdb20345f5cfd Mon Sep 17 00:00:00 2001
+From 780109902b00652dfdc080607c614f210e96ae9b Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 2 Mar 2015 01:09:58 +0000
Subject: [PATCH] Point scripts location to libdir
@@ -12,10 +12,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/ld/Makefile.am b/ld/Makefile.am
-index 61db131fb0d..5b5ee64d121 100644
+index 07801c2c3ac..a2bcffecf73 100644
--- a/ld/Makefile.am
+++ b/ld/Makefile.am
-@@ -51,7 +51,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS)
+@@ -51,7 +51,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CFLAGS) $(JANSSON_CFLAGS) $(ZSTD_CFLAGS)
# We put the scripts in the directory $(scriptdir)/ldscripts.
# We can't put the scripts in $(datadir) because the SEARCH_DIR
# directives need to be different for native and cross linkers.
@@ -25,10 +25,10 @@ index 61db131fb0d..5b5ee64d121 100644
EMUL = @EMUL@
EMULATION_OFILES = @EMULATION_OFILES@
diff --git a/ld/Makefile.in b/ld/Makefile.in
-index 860eb21a785..d719747919c 100644
+index 6329a12a0d6..15311048855 100644
--- a/ld/Makefile.in
+++ b/ld/Makefile.in
-@@ -564,7 +564,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS)
+@@ -584,7 +584,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CFLAGS) $(JANSSON_CFLAGS) $(ZSTD_CFLAGS)
# We put the scripts in the directory $(scriptdir)/ldscripts.
# We can't put the scripts in $(datadir) because the SEARCH_DIR
# directives need to be different for native and cross linkers.
diff --git a/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch b/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch
deleted file mode 100644
index 997774020e..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From edddb1f294d667eac94649ba0665fe464990ed18 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 2 Mar 2015 01:27:17 +0000
-Subject: [PATCH] Only generate an RPATH entry if LD_RUN_PATH is not empty
-
-for cases where -rpath isn't specified. debian (#151024)
-
-Upstream-Status: Pending
-
-Signed-off-by: Chris Chimelis <chris@debian.org>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ld/ldelf.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/ld/ldelf.c b/ld/ldelf.c
-index 34cbc60e5e9..b1965a9e96f 100644
---- a/ld/ldelf.c
-+++ b/ld/ldelf.c
-@@ -1277,6 +1277,8 @@ ldelf_after_open (int use_libpath, int native, int is_linux, int is_freebsd,
- && command_line.rpath == NULL)
- {
- path = (const char *) getenv ("LD_RUN_PATH");
-+ if ((path) && (strlen (path) == 0))
-+ path = NULL;
- if (path
- && ldelf_search_needed (path, &n, force,
- is_linux, elfsize))
-@@ -1636,6 +1638,8 @@ ldelf_before_allocation (char *audit, char *depaudit,
- rpath = command_line.rpath;
- if (rpath == NULL)
- rpath = (const char *) getenv ("LD_RUN_PATH");
-+ if ((rpath) && (strlen (rpath) == 0))
-+ rpath = NULL;
-
- for (abfd = link_info.input_bfds; abfd; abfd = abfd->link.next)
- if (bfd_get_flavour (abfd) == bfd_target_elf_flavour)
diff --git a/meta/recipes-devtools/binutils/binutils/0005-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch b/meta/recipes-devtools/binutils/binutils/0005-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch
new file mode 100644
index 0000000000..f7c3987542
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0005-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch
@@ -0,0 +1,32 @@
+From c8cca8d2364c9f233d6e771136a4d4ff7f405c21 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 2 Mar 2015 01:39:01 +0000
+Subject: [PATCH] don't let the distro compiler point to the wrong installation
+ location
+
+Thanks to RP for helping find the source code causing the issue.
+
+2010/08/13
+Nitin A Kamble <nitin.a.kamble@intel.com>
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libiberty/Makefile.in | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/libiberty/Makefile.in b/libiberty/Makefile.in
+index 85c4b6b6ef8..cb6e91c8f9b 100644
+--- a/libiberty/Makefile.in
++++ b/libiberty/Makefile.in
+@@ -385,7 +385,8 @@ install-strip: install
+ # multilib-specific flags, it's overridden by FLAGS_TO_PASS from the
+ # default multilib, so we have to take CFLAGS into account as well,
+ # since it will be passed the multilib flags.
+-MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory`
++#MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory`
++MULTIOSDIR = ""
+ install_to_libdir: all
+ if test -n "${target_header_dir}"; then \
+ ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \
diff --git a/meta/recipes-devtools/binutils/binutils/0006-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch b/meta/recipes-devtools/binutils/binutils/0006-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch
deleted file mode 100644
index 507d0b1b2d..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0006-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From fc9e8b99969bb32a4b009eab763bade6c554ef73 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 2 Mar 2015 01:39:01 +0000
-Subject: [PATCH] don't let the distro compiler point to the wrong installation
- location
-
-Thanks to RP for helping find the source code causing the issue.
-
-2010/08/13
-Nitin A Kamble <nitin.a.kamble@intel.com>
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- libiberty/Makefile.in | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/libiberty/Makefile.in b/libiberty/Makefile.in
-index abef3c4601b..880c8826482 100644
---- a/libiberty/Makefile.in
-+++ b/libiberty/Makefile.in
-@@ -385,7 +385,8 @@ install-strip: install
- # multilib-specific flags, it's overridden by FLAGS_TO_PASS from the
- # default multilib, so we have to take CFLAGS into account as well,
- # since it will be passed the multilib flags.
--MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory`
-+#MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory`
-+MULTIOSDIR = ""
- install_to_libdir: all
- if test -n "${target_header_dir}"; then \
- ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \
diff --git a/meta/recipes-devtools/binutils/binutils/0006-warn-for-uses-of-system-directories-when-cross-linki.patch b/meta/recipes-devtools/binutils/binutils/0006-warn-for-uses-of-system-directories-when-cross-linki.patch
new file mode 100644
index 0000000000..bf6a1a3961
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0006-warn-for-uses-of-system-directories-when-cross-linki.patch
@@ -0,0 +1,287 @@
+From a92c46e3703d038d7d810bdc91e8540ad9d9150a Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 15 Jan 2016 06:31:09 +0000
+Subject: [PATCH] warn for uses of system directories when cross linking
+
+2008-07-02 Joseph Myers <joseph@codesourcery.com>
+
+ ld/
+ * ld.h (args_type): Add error_poison_system_directories.
+ * ld.texinfo (--error-poison-system-directories): Document.
+ * ldfile.c (ldfile_add_library_path): Check
+ command_line.error_poison_system_directories.
+ * ldmain.c (main): Initialize
+ command_line.error_poison_system_directories.
+ * lexsup.c (enum option_values): Add
+ OPTION_ERROR_POISON_SYSTEM_DIRECTORIES.
+ (ld_options): Add --error-poison-system-directories.
+ (parse_args): Handle new option.
+
+2007-06-13 Joseph Myers <joseph@codesourcery.com>
+
+ ld/
+ * config.in: Regenerate.
+ * ld.h (args_type): Add poison_system_directories.
+ * ld.texinfo (--no-poison-system-directories): Document.
+ * ldfile.c (ldfile_add_library_path): Check
+ command_line.poison_system_directories.
+ * ldmain.c (main): Initialize
+ command_line.poison_system_directories.
+ * lexsup.c (enum option_values): Add
+ OPTION_NO_POISON_SYSTEM_DIRECTORIES.
+ (ld_options): Add --no-poison-system-directories.
+ (parse_args): Handle new option.
+
+2007-04-20 Joseph Myers <joseph@codesourcery.com>
+
+ Merge from Sourcery G++ binutils 2.17:
+
+ 2007-03-20 Joseph Myers <joseph@codesourcery.com>
+ Based on patch by Mark Hatle <mark.hatle@windriver.com>.
+ ld/
+ * configure.in (--enable-poison-system-directories): New option.
+ * configure, config.in: Regenerate.
+ * ldfile.c (ldfile_add_library_path): If
+ ENABLE_POISON_SYSTEM_DIRECTORIES defined, warn for use of /lib,
+ /usr/lib, /usr/local/lib or /usr/X11R6/lib.
+
+Upstream-Status: Pending
+
+Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
+Signed-off-by: Scott Garman <scott.a.garman@intel.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ld/config.in | 3 +++
+ ld/configure | 16 ++++++++++++++++
+ ld/configure.ac | 10 ++++++++++
+ ld/ld.h | 8 ++++++++
+ ld/ld.texi | 12 ++++++++++++
+ ld/ldfile.c | 17 +++++++++++++++++
+ ld/ldlex.h | 2 ++
+ ld/ldmain.c | 6 ++++--
+ ld/lexsup.c | 15 +++++++++++++++
+ 9 files changed, 87 insertions(+), 2 deletions(-)
+
+diff --git a/ld/config.in b/ld/config.in
+index 52d62f06ff0..b5944e7ba55 100644
+--- a/ld/config.in
++++ b/ld/config.in
+@@ -70,6 +70,9 @@
+ language is requested. */
+ #undef ENABLE_NLS
+
++/* Define to warn for use of native system library directories */
++#undef ENABLE_POISON_SYSTEM_DIRECTORIES
++
+ /* Additional extension a shared object might have. */
+ #undef EXTRA_SHLIB_EXTENSION
+
+diff --git a/ld/configure b/ld/configure
+index 6f8a05c3b6c..98a76d6da7f 100755
+--- a/ld/configure
++++ b/ld/configure
+@@ -844,6 +844,7 @@ with_lib_path
+ enable_targets
+ enable_64_bit_bfd
+ with_sysroot
++enable_poison_system_directories
+ enable_gold
+ enable_got
+ enable_compressed_debug_sections
+@@ -1534,6 +1535,8 @@ Optional Features:
+ --enable-checking enable run-time checks
+ --enable-targets alternative target configurations
+ --enable-64-bit-bfd 64-bit support (on hosts with narrower word sizes)
++ --enable-poison-system-directories
++ warn for use of native system library directories
+ --enable-gold[=ARG] build gold [ARG={default,yes,no}]
+ --enable-got=<type> GOT handling scheme (target, single, negative,
+ multigot)
+@@ -15566,6 +15569,19 @@ fi
+
+
+
++# Check whether --enable-poison-system-directories was given.
++if test "${enable_poison_system_directories+set}" = set; then :
++ enableval=$enable_poison_system_directories;
++else
++ enable_poison_system_directories=no
++fi
++
++if test "x${enable_poison_system_directories}" = "xyes"; then
++
++$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h
++
++fi
++
+ # Check whether --enable-gold was given.
+ if test "${enable_gold+set}" = set; then :
+ enableval=$enable_gold; case "${enableval}" in
+diff --git a/ld/configure.ac b/ld/configure.ac
+index 4a11787ae71..104a531fb0f 100644
+--- a/ld/configure.ac
++++ b/ld/configure.ac
+@@ -103,6 +103,16 @@ AC_SUBST(use_sysroot)
+ AC_SUBST(TARGET_SYSTEM_ROOT)
+ AC_SUBST(TARGET_SYSTEM_ROOT_DEFINE)
+
++AC_ARG_ENABLE([poison-system-directories],
++ AS_HELP_STRING([--enable-poison-system-directories],
++ [warn for use of native system library directories]),,
++ [enable_poison_system_directories=no])
++if test "x${enable_poison_system_directories}" = "xyes"; then
++ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES],
++ [1],
++ [Define to warn for use of native system library directories])
++fi
++
+ dnl Use --enable-gold to decide if this linker should be the default.
+ dnl "install_as_default" is set to false if gold is the default linker.
+ dnl "installed_linker" is the installed BFD linker name.
+diff --git a/ld/ld.h b/ld/ld.h
+index 54d9079678c..9f0163b2911 100644
+--- a/ld/ld.h
++++ b/ld/ld.h
+@@ -166,6 +166,14 @@ typedef struct
+ in the linker script. */
+ bool force_group_allocation;
+
++ /* If TRUE (the default) warn for uses of system directories when
++ cross linking. */
++ bool poison_system_directories;
++
++ /* If TRUE (default FALSE) give an error for uses of system
++ directories when cross linking instead of a warning. */
++ bool error_poison_system_directories;
++
+ /* Big or little endian as set on command line. */
+ enum endian_enum endian;
+
+diff --git a/ld/ld.texi b/ld/ld.texi
+index 4fda259a552..22685f5f1dd 100644
+--- a/ld/ld.texi
++++ b/ld/ld.texi
+@@ -3119,6 +3119,18 @@ creation of the metadata note, if one had been enabled by an earlier
+ occurrence of the --package-metadata option.
+ If the linker has been built with libjansson, then the JSON string
+ will be validated.
++
++@kindex --no-poison-system-directories
++@item --no-poison-system-directories
++Do not warn for @option{-L} options using system directories such as
++@file{/usr/lib} when cross linking. This option is intended for use
++in chroot environments when such directories contain the correct
++libraries for the target system rather than the host.
++
++@kindex --error-poison-system-directories
++@item --error-poison-system-directories
++Give an error instead of a warning for @option{-L} options using
++system directories when cross linking.
+ @end table
+
+ @c man end
+diff --git a/ld/ldfile.c b/ld/ldfile.c
+index dc9875d8813..49d899ee49d 100644
+--- a/ld/ldfile.c
++++ b/ld/ldfile.c
+@@ -327,6 +327,23 @@ ldfile_add_library_path (const char *name, bool cmdline)
+ new_dirs->name = concat (ld_sysroot, name + strlen ("$SYSROOT"), (const char *) NULL);
+ else
+ new_dirs->name = xstrdup (name);
++
++#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES
++ if (command_line.poison_system_directories
++ && ((!strncmp (name, "/lib", 4))
++ || (!strncmp (name, "/usr/lib", 8))
++ || (!strncmp (name, "/usr/local/lib", 14))
++ || (!strncmp (name, "/usr/X11R6/lib", 14))))
++ {
++ if (command_line.error_poison_system_directories)
++ einfo (_("%X%P: error: library search path \"%s\" is unsafe for "
++ "cross-compilation\n"), name);
++ else
++ einfo (_("%P: warning: library search path \"%s\" is unsafe for "
++ "cross-compilation\n"), name);
++ }
++#endif
++
+ }
+
+ /* Try to open a BFD for a lang_input_statement. */
+diff --git a/ld/ldlex.h b/ld/ldlex.h
+index 161a9d4d8dc..757fe2f7650 100644
+--- a/ld/ldlex.h
++++ b/ld/ldlex.h
+@@ -147,6 +147,8 @@ enum option_values
+ OPTION_PLUGIN,
+ OPTION_PLUGIN_OPT,
+ #endif /* BFD_SUPPORTS_PLUGINS */
++ OPTION_NO_POISON_SYSTEM_DIRECTORIES,
++ OPTION_ERROR_POISON_SYSTEM_DIRECTORIES,
+ OPTION_DEFAULT_SCRIPT,
+ OPTION_PRINT_OUTPUT_FORMAT,
+ OPTION_PRINT_SYSROOT,
+diff --git a/ld/ldmain.c b/ld/ldmain.c
+index 01b306e58a1..eddefc24332 100644
+--- a/ld/ldmain.c
++++ b/ld/ldmain.c
+@@ -326,6 +326,8 @@ main (int argc, char **argv)
+ command_line.warn_mismatch = true;
+ command_line.warn_search_mismatch = true;
+ command_line.check_section_addresses = -1;
++ command_line.poison_system_directories = true;
++ command_line.error_poison_system_directories = false;
+
+ /* We initialize DEMANGLING based on the environment variable
+ COLLECT_NO_DEMANGLE. The gcc collect2 program will demangle the
+@@ -1458,7 +1460,7 @@ undefined_symbol (struct bfd_link_info *info,
+ argv[1] = "undefined-symbol";
+ argv[2] = (char *) name;
+ argv[3] = NULL;
+-
++
+ if (verbose)
+ einfo (_("%P: About to run error handling script '%s' with arguments: '%s' '%s'\n"),
+ argv[0], argv[1], argv[2]);
+@@ -1479,7 +1481,7 @@ undefined_symbol (struct bfd_link_info *info,
+ carry on to issue the normal error message. */
+ }
+ #endif /* SUPPORT_ERROR_HANDLING_SCRIPT */
+-
++
+ if (section != NULL)
+ {
+ if (error_count < MAX_ERRORS_IN_A_ROW)
+diff --git a/ld/lexsup.c b/ld/lexsup.c
+index 099dff8ecde..e9939000b2e 100644
+--- a/ld/lexsup.c
++++ b/ld/lexsup.c
+@@ -642,6 +642,14 @@ static const struct ld_option ld_options[] =
+ " <method> is: share-unconflicted (default),\n"
+ " share-duplicated"),
+ TWO_DASHES },
++ { {"no-poison-system-directories", no_argument, NULL,
++ OPTION_NO_POISON_SYSTEM_DIRECTORIES},
++ '\0', NULL, N_("Do not warn for -L options using system directories"),
++ TWO_DASHES },
++ { {"error-poison-system-directories", no_argument, NULL,
++ + OPTION_ERROR_POISON_SYSTEM_DIRECTORIES},
++ '\0', NULL, N_("Give an error for -L options using system directories"),
++ TWO_DASHES },
+ };
+
+ #define OPTION_COUNT ARRAY_SIZE (ld_options)
+@@ -1818,6 +1826,13 @@ parse_args (unsigned argc, char **argv)
+
+ case OPTION_PRINT_MAP_LOCALS:
+ config.print_map_locals = true;
++
++ case OPTION_NO_POISON_SYSTEM_DIRECTORIES:
++ command_line.poison_system_directories = false;
++ break;
++
++ case OPTION_ERROR_POISON_SYSTEM_DIRECTORIES:
++ command_line.error_poison_system_directories = true;
+ break;
+
+ case OPTION_DEPENDENCY_FILE:
diff --git a/meta/recipes-devtools/binutils/binutils/0007-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch b/meta/recipes-devtools/binutils/binutils/0007-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch
new file mode 100644
index 0000000000..f9fa0d4f78
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0007-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch
@@ -0,0 +1,37 @@
+From d35fd95eee9930ab41d9ad637eacb35aa9fe6f44 Mon Sep 17 00:00:00 2001
+From: Zhenhua Luo <zhenhua.luo@nxp.com>
+Date: Sat, 11 Jun 2016 22:08:29 -0500
+Subject: [PATCH] fix the incorrect assembling for ppc wait mnemonic
+
+The wait mnemonic for ppc targets is incorrectly assembled into 0x7c00003c due
+to duplicated address definition with waitasec instruction. The issue causes
+kernel boot calltrace for ppc targets when wait instruction is executed.
+
+Upstream-Status: Pending
+Signed-off-by: Zhenhua Luo <zhenhua.luo@nxp.com>
+---
+ opcodes/ppc-opc.c | 4 +---
+ 1 file changed, 1 insertion(+), 3 deletions(-)
+
+diff --git a/opcodes/ppc-opc.c b/opcodes/ppc-opc.c
+index e55bfe846cd..66b37e36e6f 100644
+--- a/opcodes/ppc-opc.c
++++ b/opcodes/ppc-opc.c
+@@ -7138,8 +7138,6 @@ const struct powerpc_opcode powerpc_opcodes[] = {
+ {"waitasec", X(31,30), XRTRARB_MASK, POWER8, POWER9, {0}},
+ {"waitrsv", XWCPL(31,30,1,0),0xffffffff, POWER10, EXT, {0}},
+ {"pause_short", XWCPL(31,30,2,0),0xffffffff, POWER10, EXT, {0}},
+-{"wait", X(31,30), XWCPL_MASK, POWER10, 0, {WC, PL}},
+-{"wait", X(31,30), XWC_MASK, POWER9, POWER10, {WC}},
+
+ {"lwepx", X(31,31), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}},
+
+@@ -7193,7 +7191,7 @@ const struct powerpc_opcode powerpc_opcodes[] = {
+
+ {"waitrsv", X(31,62)|(1<<21), 0xffffffff, E500MC|PPCA2, EXT, {0}},
+ {"waitimpl", X(31,62)|(2<<21), 0xffffffff, E500MC|PPCA2, EXT, {0}},
+-{"wait", X(31,62), XWC_MASK, E500MC|PPCA2, 0, {WC}},
++{"wait", X(31,62), XWC_MASK, E500MC|PPCA2|POWER9|POWER10, 0, {WC}},
+
+ {"dcbstep", XRT(31,63,0), XRT_MASK, E500MC|PPCA2, 0, {RA0, RB}},
+
diff --git a/meta/recipes-devtools/binutils/binutils/0007-warn-for-uses-of-system-directories-when-cross-linki.patch b/meta/recipes-devtools/binutils/binutils/0007-warn-for-uses-of-system-directories-when-cross-linki.patch
deleted file mode 100644
index 547bfcac68..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0007-warn-for-uses-of-system-directories-when-cross-linki.patch
+++ /dev/null
@@ -1,288 +0,0 @@
-From 9fb1bafb20371d82b674778d2a8b5c9444fed417 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 15 Jan 2016 06:31:09 +0000
-Subject: [PATCH] warn for uses of system directories when cross linking
-
-2008-07-02 Joseph Myers <joseph@codesourcery.com>
-
- ld/
- * ld.h (args_type): Add error_poison_system_directories.
- * ld.texinfo (--error-poison-system-directories): Document.
- * ldfile.c (ldfile_add_library_path): Check
- command_line.error_poison_system_directories.
- * ldmain.c (main): Initialize
- command_line.error_poison_system_directories.
- * lexsup.c (enum option_values): Add
- OPTION_ERROR_POISON_SYSTEM_DIRECTORIES.
- (ld_options): Add --error-poison-system-directories.
- (parse_args): Handle new option.
-
-2007-06-13 Joseph Myers <joseph@codesourcery.com>
-
- ld/
- * config.in: Regenerate.
- * ld.h (args_type): Add poison_system_directories.
- * ld.texinfo (--no-poison-system-directories): Document.
- * ldfile.c (ldfile_add_library_path): Check
- command_line.poison_system_directories.
- * ldmain.c (main): Initialize
- command_line.poison_system_directories.
- * lexsup.c (enum option_values): Add
- OPTION_NO_POISON_SYSTEM_DIRECTORIES.
- (ld_options): Add --no-poison-system-directories.
- (parse_args): Handle new option.
-
-2007-04-20 Joseph Myers <joseph@codesourcery.com>
-
- Merge from Sourcery G++ binutils 2.17:
-
- 2007-03-20 Joseph Myers <joseph@codesourcery.com>
- Based on patch by Mark Hatle <mark.hatle@windriver.com>.
- ld/
- * configure.in (--enable-poison-system-directories): New option.
- * configure, config.in: Regenerate.
- * ldfile.c (ldfile_add_library_path): If
- ENABLE_POISON_SYSTEM_DIRECTORIES defined, warn for use of /lib,
- /usr/lib, /usr/local/lib or /usr/X11R6/lib.
-
-Upstream-Status: Pending
-
-Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
-Signed-off-by: Scott Garman <scott.a.garman@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ld/config.in | 3 +++
- ld/configure | 16 ++++++++++++++++
- ld/configure.ac | 10 ++++++++++
- ld/ld.h | 8 ++++++++
- ld/ld.texi | 12 ++++++++++++
- ld/ldfile.c | 17 +++++++++++++++++
- ld/ldlex.h | 2 ++
- ld/ldmain.c | 6 ++++--
- ld/lexsup.c | 16 ++++++++++++++++
- 9 files changed, 88 insertions(+), 2 deletions(-)
-
-diff --git a/ld/config.in b/ld/config.in
-index 26d55a00d47..ffad464783c 100644
---- a/ld/config.in
-+++ b/ld/config.in
-@@ -43,6 +43,9 @@
- language is requested. */
- #undef ENABLE_NLS
-
-+/* Define to warn for use of native system library directories */
-+#undef ENABLE_POISON_SYSTEM_DIRECTORIES
-+
- /* Additional extension a shared object might have. */
- #undef EXTRA_SHLIB_EXTENSION
-
-diff --git a/ld/configure b/ld/configure
-index 26150d62898..1f9ec8ec580 100755
---- a/ld/configure
-+++ b/ld/configure
-@@ -831,6 +831,7 @@ with_lib_path
- enable_targets
- enable_64_bit_bfd
- with_sysroot
-+enable_poison_system_directories
- enable_gold
- enable_got
- enable_compressed_debug_sections
-@@ -1500,6 +1501,8 @@ Optional Features:
- --enable-checking enable run-time checks
- --enable-targets alternative target configurations
- --enable-64-bit-bfd 64-bit support (on hosts with narrower word sizes)
-+ --enable-poison-system-directories
-+ warn for use of native system library directories
- --enable-gold[=ARG] build gold [ARG={default,yes,no}]
- --enable-got=<type> GOT handling scheme (target, single, negative,
- multigot)
-@@ -15312,6 +15315,19 @@ fi
-
-
-
-+# Check whether --enable-poison-system-directories was given.
-+if test "${enable_poison_system_directories+set}" = set; then :
-+ enableval=$enable_poison_system_directories;
-+else
-+ enable_poison_system_directories=no
-+fi
-+
-+if test "x${enable_poison_system_directories}" = "xyes"; then
-+
-+$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h
-+
-+fi
-+
- # Check whether --enable-gold was given.
- if test "${enable_gold+set}" = set; then :
- enableval=$enable_gold; case "${enableval}" in
-diff --git a/ld/configure.ac b/ld/configure.ac
-index 7f4cff079b7..57d1abff870 100644
---- a/ld/configure.ac
-+++ b/ld/configure.ac
-@@ -102,6 +102,16 @@ AC_SUBST(use_sysroot)
- AC_SUBST(TARGET_SYSTEM_ROOT)
- AC_SUBST(TARGET_SYSTEM_ROOT_DEFINE)
-
-+AC_ARG_ENABLE([poison-system-directories],
-+ AS_HELP_STRING([--enable-poison-system-directories],
-+ [warn for use of native system library directories]),,
-+ [enable_poison_system_directories=no])
-+if test "x${enable_poison_system_directories}" = "xyes"; then
-+ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES],
-+ [1],
-+ [Define to warn for use of native system library directories])
-+fi
-+
- dnl Use --enable-gold to decide if this linker should be the default.
- dnl "install_as_default" is set to false if gold is the default linker.
- dnl "installed_linker" is the installed BFD linker name.
-diff --git a/ld/ld.h b/ld/ld.h
-index f3086bf30de..db5064243c7 100644
---- a/ld/ld.h
-+++ b/ld/ld.h
-@@ -162,6 +162,14 @@ typedef struct
- in the linker script. */
- bool force_group_allocation;
-
-+ /* If TRUE (the default) warn for uses of system directories when
-+ cross linking. */
-+ bool poison_system_directories;
-+
-+ /* If TRUE (default FALSE) give an error for uses of system
-+ directories when cross linking instead of a warning. */
-+ bool error_poison_system_directories;
-+
- /* Big or little endian as set on command line. */
- enum endian_enum endian;
-
-diff --git a/ld/ld.texi b/ld/ld.texi
-index fc75e9b3625..dca697d626e 100644
---- a/ld/ld.texi
-+++ b/ld/ld.texi
-@@ -2892,6 +2892,18 @@ string identifying the original linked file does not change.
-
- Passing @code{none} for @var{style} disables the setting from any
- @code{--build-id} options earlier on the command line.
-+
-+@kindex --no-poison-system-directories
-+@item --no-poison-system-directories
-+Do not warn for @option{-L} options using system directories such as
-+@file{/usr/lib} when cross linking. This option is intended for use
-+in chroot environments when such directories contain the correct
-+libraries for the target system rather than the host.
-+
-+@kindex --error-poison-system-directories
-+@item --error-poison-system-directories
-+Give an error instead of a warning for @option{-L} options using
-+system directories when cross linking.
- @end table
-
- @c man end
-diff --git a/ld/ldfile.c b/ld/ldfile.c
-index 731ae5f7aed..dd8f03fd960 100644
---- a/ld/ldfile.c
-+++ b/ld/ldfile.c
-@@ -117,6 +117,23 @@ ldfile_add_library_path (const char *name, bool cmdline)
- new_dirs->name = concat (ld_sysroot, name + strlen ("$SYSROOT"), (const char *) NULL);
- else
- new_dirs->name = xstrdup (name);
-+
-+#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES
-+ if (command_line.poison_system_directories
-+ && ((!strncmp (name, "/lib", 4))
-+ || (!strncmp (name, "/usr/lib", 8))
-+ || (!strncmp (name, "/usr/local/lib", 14))
-+ || (!strncmp (name, "/usr/X11R6/lib", 14))))
-+ {
-+ if (command_line.error_poison_system_directories)
-+ einfo (_("%X%P: error: library search path \"%s\" is unsafe for "
-+ "cross-compilation\n"), name);
-+ else
-+ einfo (_("%P: warning: library search path \"%s\" is unsafe for "
-+ "cross-compilation\n"), name);
-+ }
-+#endif
-+
- }
-
- /* Try to open a BFD for a lang_input_statement. */
-diff --git a/ld/ldlex.h b/ld/ldlex.h
-index bc58fea73cc..a1595589197 100644
---- a/ld/ldlex.h
-+++ b/ld/ldlex.h
-@@ -164,6 +164,8 @@ enum option_values
- OPTION_CTF_VARIABLES,
- OPTION_NO_CTF_VARIABLES,
- OPTION_CTF_SHARE_TYPES,
-+ OPTION_NO_POISON_SYSTEM_DIRECTORIES,
-+ OPTION_ERROR_POISON_SYSTEM_DIRECTORIES,
- };
-
- /* The initial parser states. */
-diff --git a/ld/ldmain.c b/ld/ldmain.c
-index 1ae90a77749..f40750fd816 100644
---- a/ld/ldmain.c
-+++ b/ld/ldmain.c
-@@ -322,6 +322,8 @@ main (int argc, char **argv)
- command_line.warn_mismatch = true;
- command_line.warn_search_mismatch = true;
- command_line.check_section_addresses = -1;
-+ command_line.poison_system_directories = true;
-+ command_line.error_poison_system_directories = false;
-
- /* We initialize DEMANGLING based on the environment variable
- COLLECT_NO_DEMANGLE. The gcc collect2 program will demangle the
-@@ -1447,7 +1449,7 @@ undefined_symbol (struct bfd_link_info *info,
- argv[1] = "undefined-symbol";
- argv[2] = (char *) name;
- argv[3] = NULL;
--
-+
- if (verbose)
- einfo (_("%P: About to run error handling script '%s' with arguments: '%s' '%s'\n"),
- argv[0], argv[1], argv[2]);
-@@ -1468,7 +1470,7 @@ undefined_symbol (struct bfd_link_info *info,
- carry on to issue the normal error message. */
- }
- #endif /* SUPPORT_ERROR_HANDLING_SCRIPT */
--
-+
- if (section != NULL)
- {
- if (error_count < MAX_ERRORS_IN_A_ROW)
-diff --git a/ld/lexsup.c b/ld/lexsup.c
-index 5acc47ed5a0..d03c6136ccf 100644
---- a/ld/lexsup.c
-+++ b/ld/lexsup.c
-@@ -600,6 +600,14 @@ static const struct ld_option ld_options[] =
- " <method> is: share-unconflicted (default),\n"
- " share-duplicated"),
- TWO_DASHES },
-+ { {"no-poison-system-directories", no_argument, NULL,
-+ OPTION_NO_POISON_SYSTEM_DIRECTORIES},
-+ '\0', NULL, N_("Do not warn for -L options using system directories"),
-+ TWO_DASHES },
-+ { {"error-poison-system-directories", no_argument, NULL,
-+ + OPTION_ERROR_POISON_SYSTEM_DIRECTORIES},
-+ '\0', NULL, N_("Give an error for -L options using system directories"),
-+ TWO_DASHES },
- };
-
- #define OPTION_COUNT ARRAY_SIZE (ld_options)
-@@ -1702,6 +1710,14 @@ parse_args (unsigned argc, char **argv)
- config.print_map_discarded = true;
- break;
-
-+ case OPTION_NO_POISON_SYSTEM_DIRECTORIES:
-+ command_line.poison_system_directories = false;
-+ break;
-+
-+ case OPTION_ERROR_POISON_SYSTEM_DIRECTORIES:
-+ command_line.error_poison_system_directories = true;
-+ break;
-+
- case OPTION_DEPENDENCY_FILE:
- config.dependency_file = optarg;
- break;
diff --git a/meta/recipes-devtools/binutils/binutils/0008-Use-libtool-2.4.patch b/meta/recipes-devtools/binutils/binutils/0008-Use-libtool-2.4.patch
new file mode 100644
index 0000000000..8f74c639d5
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0008-Use-libtool-2.4.patch
@@ -0,0 +1,34406 @@
+From da089ddacb6506f406acd8939389183a4356ebf0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 14 Feb 2016 17:04:07 +0000
+Subject: [PATCH] Use libtool 2.4
+
+get libtool sysroot support
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ bfd/Makefile.in | 3 +
+ bfd/configure | 1333 +++++++++---
+ bfd/configure.ac | 2 +-
+ binutils/Makefile.in | 3 +
+ binutils/configure | 1331 +++++++++---
+ gas/Makefile.in | 3 +
+ gas/configure | 1331 +++++++++---
+ gprof/Makefile.in | 3 +
+ gprof/configure | 1331 +++++++++---
+ gprofng/Makefile.in | 2 +
+ gprofng/configure | 1693 ++++++++++++----
+ gprofng/doc/Makefile.in | 2 +
+ gprofng/gp-display-html/Makefile.in | 2 +
+ gprofng/libcollector/Makefile.in | 2 +
+ gprofng/libcollector/configure | 1693 ++++++++++++----
+ gprofng/src/Makefile.in | 2 +
+ ld/Makefile.in | 3 +
+ ld/configure | 1694 ++++++++++++----
+ libbacktrace/Makefile.in | 108 +-
+ libbacktrace/aclocal.m4 | 73 +-
+ libbacktrace/configure | 1441 +++++++++----
+ libctf/Makefile.in | 2 +
+ libctf/configure | 1334 +++++++++---
+ libsframe/Makefile.in | 344 ++--
+ libsframe/aclocal.m4 | 220 +-
+ libsframe/configure | 1553 ++++++++++----
+ libtool.m4 | 1085 ++++++----
+ ltmain.sh | 2921 ++++++++++++++++++---------
+ ltoptions.m4 | 2 +-
+ ltversion.m4 | 12 +-
+ lt~obsolete.m4 | 2 +-
+ opcodes/Makefile.in | 3 +
+ opcodes/configure | 1331 +++++++++---
+ zlib/Makefile.in | 3 +
+ zlib/configure | 1331 +++++++++---
+ 35 files changed, 16094 insertions(+), 6104 deletions(-)
+
+diff --git a/bfd/Makefile.in b/bfd/Makefile.in
+index faaa0c424b8..5371e093163 100644
+--- a/bfd/Makefile.in
++++ b/bfd/Makefile.in
+@@ -340,6 +340,7 @@ DATADIRNAME = @DATADIRNAME@
+ DEBUGDIR = @DEBUGDIR@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -379,6 +380,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -425,6 +427,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ all_backends = @all_backends@
+diff --git a/bfd/configure b/bfd/configure
+index acbac2f364a..a53a9d52719 100755
+--- a/bfd/configure
++++ b/bfd/configure
+@@ -712,6 +712,9 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++ac_ct_AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -830,6 +833,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_plugins
+ enable_largefile
+@@ -1526,6 +1530,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-mmap try using mmap for BFD input files if available
+ --with-separate-debug-dir=DIR
+ Look for global separate debug info in DIR
+@@ -5063,8 +5069,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -5104,7 +5110,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5797,8 +5803,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5847,6 +5853,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5863,6 +5943,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -6031,7 +6116,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6190,6 +6276,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6205,6 +6306,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6219,8 +6471,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6236,7 +6490,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6256,11 +6510,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6276,7 +6534,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6295,6 +6553,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6306,25 +6568,20 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
+
+
+
+@@ -6335,6 +6592,63 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
+
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+@@ -6675,8 +6989,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -6712,6 +7026,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6753,6 +7068,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6764,7 +7091,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6790,8 +7117,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6801,8 +7128,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6839,6 +7166,14 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
+
+
+
+@@ -6857,6 +7192,47 @@ fi
+
+
+
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
++
++
+
+
+
+@@ -7066,6 +7442,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -7629,6 +8122,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8182,8 +8677,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8349,6 +8842,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8411,7 +8910,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8468,13 +8967,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8535,6 +9038,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8885,7 +9393,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8984,12 +9493,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -9003,8 +9512,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -9022,8 +9531,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9069,8 +9578,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9200,7 +9709,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9213,22 +9728,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9240,7 +9762,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9253,22 +9781,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9313,20 +9848,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9387,7 +9965,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9395,7 +9973,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9411,7 +9989,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9435,10 +10013,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9517,23 +10095,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9618,7 +10209,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9637,9 +10228,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10215,8 +10806,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10249,13 +10841,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11157,7 +11807,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11160 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11201,10 +11851,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11263,7 +11913,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11266 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11307,10 +11957,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -15691,7 +16341,7 @@ SHARED_LDFLAGS=
+ if test "$enable_shared" = "yes"; then
+ x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'`
+ if test -n "$x"; then
+- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty"
++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a"
+ fi
+ fi
+
+@@ -18313,13 +18963,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -18334,14 +18991,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -18374,12 +19034,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -18434,8 +19094,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -18445,12 +19110,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -18466,7 +19133,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -18502,6 +19168,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -19278,7 +19945,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -19381,19 +20049,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -19423,6 +20114,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -19432,6 +20129,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -19546,12 +20246,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -19638,9 +20338,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -19656,6 +20353,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -19688,210 +20388,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/bfd/configure.ac b/bfd/configure.ac
+index 7fcc5d4a947..f7191d3f30f 100644
+--- a/bfd/configure.ac
++++ b/bfd/configure.ac
+@@ -276,7 +276,7 @@ changequote(,)dnl
+ x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'`
+ changequote([,])dnl
+ if test -n "$x"; then
+- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty"
++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a"
+ fi
+ fi
+
+diff --git a/binutils/Makefile.in b/binutils/Makefile.in
+index 842a6d99b54..037cccde038 100644
+--- a/binutils/Makefile.in
++++ b/binutils/Makefile.in
+@@ -495,6 +495,7 @@ DEBUGINFOD_LIBS = @DEBUGINFOD_LIBS@
+ DEFS = @DEFS@
+ DEMANGLER_NAME = @DEMANGLER_NAME@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DLLTOOL_DEFS = @DLLTOOL_DEFS@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+@@ -539,6 +540,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -587,6 +589,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__include = @am__include@
+diff --git a/binutils/configure b/binutils/configure
+index a1092735311..f3bdee98f3f 100755
+--- a/binutils/configure
++++ b/binutils/configure
+@@ -704,8 +704,11 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
+ RANLIB
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -822,6 +825,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_plugins
+ enable_largefile
+@@ -1529,6 +1533,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-debuginfod Enable debuginfo lookups with debuginfod
+ (auto/yes/no)
+ --with-gnu-ld assume the C compiler uses GNU ld default=no
+@@ -4958,8 +4964,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -4999,7 +5005,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5692,8 +5698,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5742,6 +5748,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5758,6 +5838,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -5926,7 +6011,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6085,6 +6171,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6100,6 +6201,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6114,8 +6366,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6131,7 +6385,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6151,11 +6405,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6171,7 +6429,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6190,6 +6448,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6201,25 +6463,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -6231,6 +6487,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -6570,8 +6884,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -6607,6 +6921,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6648,6 +6963,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6659,7 +6986,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6685,8 +7012,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6696,8 +7023,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6734,6 +7061,18 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
++
+
+
+
+@@ -6750,6 +7089,43 @@ fi
+
+
+
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -6961,6 +7337,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -7524,6 +8017,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8108,8 +8603,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8275,6 +8768,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8337,7 +8836,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8394,13 +8893,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8461,6 +8964,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8811,7 +9319,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8910,12 +9419,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -8929,8 +9438,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -8948,8 +9457,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8995,8 +9504,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9126,7 +9635,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9139,22 +9654,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9166,7 +9688,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9179,22 +9707,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9239,20 +9774,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9313,7 +9891,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9321,7 +9899,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9337,7 +9915,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9361,10 +9939,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9443,23 +10021,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9544,7 +10135,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9563,9 +10154,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10141,8 +10732,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10175,13 +10767,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11083,7 +11733,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11086 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11127,10 +11777,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11189,7 +11839,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11192 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11233,10 +11883,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -17390,13 +18040,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -17411,14 +18068,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -17451,12 +18111,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -17511,8 +18171,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -17522,12 +18187,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -17543,7 +18210,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -17579,6 +18245,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -18352,7 +19019,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -18455,19 +19123,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -18497,6 +19188,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -18506,6 +19203,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -18620,12 +19320,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -18712,9 +19412,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -18730,6 +19427,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -18762,210 +19462,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/gas/Makefile.in b/gas/Makefile.in
+index bc25765cb5b..c481d96a05a 100644
+--- a/gas/Makefile.in
++++ b/gas/Makefile.in
+@@ -384,6 +384,7 @@ CYGPATH_W = @CYGPATH_W@
+ DATADIRNAME = @DATADIRNAME@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -425,6 +426,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -468,6 +470,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__include = @am__include@
+diff --git a/gas/configure b/gas/configure
+index 5f8c8493589..998727b6108 100755
+--- a/gas/configure
++++ b/gas/configure
+@@ -692,8 +692,11 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
+ RANLIB
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -810,6 +813,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_plugins
+ enable_largefile
+@@ -1517,6 +1521,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-cpu=CPU default cpu variant is CPU (currently only supported
+ on ARC)
+ --with-gnu-ld assume the C compiler uses GNU ld default=no
+@@ -4652,8 +4658,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -4693,7 +4699,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5386,8 +5392,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5436,6 +5442,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5452,6 +5532,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -5620,7 +5705,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -5779,6 +5865,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -5794,6 +5895,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -5808,8 +6060,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -5825,7 +6079,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5845,11 +6099,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -5865,7 +6123,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5884,6 +6142,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -5895,29 +6157,81 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
+
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
+
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
+
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
+
+
+
+@@ -6264,8 +6578,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -6301,6 +6615,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6342,6 +6657,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6353,7 +6680,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6379,8 +6706,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6390,8 +6717,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6428,6 +6755,19 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
++
++
+
+
+
+@@ -6444,6 +6784,42 @@ fi
+
+
+
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -6655,6 +7031,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -7218,6 +7711,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -7802,8 +8297,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -7969,6 +8462,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8031,7 +8530,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8088,13 +8587,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8155,6 +8658,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8505,7 +9013,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8604,12 +9113,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -8623,8 +9132,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -8642,8 +9151,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8689,8 +9198,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8820,7 +9329,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -8833,22 +9348,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -8860,7 +9382,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -8873,22 +9401,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -8933,20 +9468,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9007,7 +9585,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9015,7 +9593,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9031,7 +9609,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9055,10 +9633,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9137,23 +9715,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9238,7 +9829,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9257,9 +9848,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -9835,8 +10426,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -9869,13 +10461,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -10777,7 +11427,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10780 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10821,10 +11471,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -10883,7 +11533,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10886 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10927,10 +11577,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -17080,13 +17730,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -17101,14 +17758,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -17141,12 +17801,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -17201,8 +17861,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -17212,12 +17877,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -17233,7 +17900,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -17269,6 +17935,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -18049,7 +18716,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -18152,19 +18820,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -18194,6 +18885,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -18203,6 +18900,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -18317,12 +19017,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -18409,9 +19109,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -18427,6 +19124,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -18459,210 +19159,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/gprof/Makefile.in b/gprof/Makefile.in
+index a7398231030..fc7a3d78db0 100644
+--- a/gprof/Makefile.in
++++ b/gprof/Makefile.in
+@@ -327,6 +327,7 @@ CYGPATH_W = @CYGPATH_W@
+ DATADIRNAME = @DATADIRNAME@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -363,6 +364,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -398,6 +400,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__include = @am__include@
+diff --git a/gprof/configure b/gprof/configure
+index 2d62b6fbb07..0cafee249eb 100755
+--- a/gprof/configure
++++ b/gprof/configure
+@@ -669,8 +669,11 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
+ RANLIB
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -787,6 +790,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_plugins
+ enable_largefile
+@@ -1462,6 +1466,8 @@ Optional Packages:
+ --with-libintl-prefix[=DIR] search for libintl in DIR/include and DIR/lib
+ --without-libintl-prefix don't search for libintl in includedir and libdir
+ --with-libintl-type=TYPE type of library to search for (auto/static/shared)
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+
+ Some influential environment variables:
+ CC C compiler command
+@@ -4530,8 +4536,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -4571,7 +4577,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5264,8 +5270,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5314,6 +5320,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5330,6 +5410,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -5498,7 +5583,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -5657,6 +5743,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -5672,6 +5773,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -5686,8 +5938,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -5703,7 +5957,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5723,11 +5977,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -5743,7 +6001,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5762,6 +6020,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -5773,25 +6035,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -5803,6 +6059,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -6142,8 +6456,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -6179,6 +6493,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6220,6 +6535,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6231,7 +6558,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6257,8 +6584,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6268,8 +6595,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6306,6 +6633,18 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
++
+
+
+
+@@ -6322,6 +6661,43 @@ fi
+
+
+
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -6533,6 +6909,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -7096,6 +7589,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -7680,8 +8175,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -7847,6 +8340,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -7909,7 +8408,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -7966,13 +8465,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8033,6 +8536,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8383,7 +8891,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8482,12 +8991,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -8501,8 +9010,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -8520,8 +9029,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8567,8 +9076,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8698,7 +9207,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -8711,22 +9226,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -8738,7 +9260,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -8751,22 +9279,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -8811,20 +9346,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -8885,7 +9463,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -8893,7 +9471,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -8909,7 +9487,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -8933,10 +9511,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9015,23 +9593,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9116,7 +9707,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9135,9 +9726,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -9713,8 +10304,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -9747,13 +10339,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -10655,7 +11305,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10658 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10699,10 +11349,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -10761,7 +11411,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10764 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10805,10 +11455,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -14728,13 +15378,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -14749,14 +15406,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -14789,12 +15449,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -14849,8 +15509,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -14860,12 +15525,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -14881,7 +15548,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -14917,6 +15583,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -15690,7 +16357,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -15793,19 +16461,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -15835,6 +16526,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -15844,6 +16541,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -15958,12 +16658,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -16050,9 +16750,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -16068,6 +16765,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -16100,210 +16800,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/gprofng/Makefile.in b/gprofng/Makefile.in
+index dbc868191b7..1ca6dd01f58 100644
+--- a/gprofng/Makefile.in
++++ b/gprofng/Makefile.in
+@@ -253,6 +253,7 @@ CXXFLAGS = @CXXFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -289,6 +290,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+diff --git a/gprofng/configure b/gprofng/configure
+index bdfa2281f2c..b6376eb3f77 100755
+--- a/gprofng/configure
++++ b/gprofng/configure
+@@ -671,6 +671,8 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -801,6 +803,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_werror_always
+ enable_gprofng_tools
+@@ -1464,6 +1467,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-jdk=PATH specify prefix directory for installed JDK.
+ --with-system-zlib use installed libz
+
+@@ -6152,8 +6157,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -6193,7 +6198,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -6886,8 +6891,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -6936,6 +6941,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -6952,6 +7031,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -7120,7 +7204,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -7279,6 +7364,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -7294,6 +7394,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -7308,8 +7559,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -7325,7 +7578,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -7345,11 +7598,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -7365,7 +7622,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -7384,6 +7641,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -7395,25 +7656,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -7425,6 +7680,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -7764,8 +8077,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -7801,6 +8114,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -7842,6 +8156,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -7853,7 +8179,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -7879,8 +8205,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -7890,8 +8216,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -7928,6 +8254,14 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
+
+
+
+@@ -7946,6 +8280,47 @@ fi
+
+
+
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
++
++
+
+
+
+@@ -8155,6 +8530,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -8718,6 +9210,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8788,6 +9282,16 @@ done
+
+
+
++func_stripname_cnf ()
++{
++ case ${2} in
++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
++ esac
++} # func_stripname_cnf
++
++
++
+
+
+ # Set options
+@@ -9273,8 +9777,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -9440,6 +9942,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -9502,7 +10010,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -9559,13 +10067,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -9626,6 +10138,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -9976,7 +10493,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -10075,12 +10593,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -10094,8 +10612,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -10113,8 +10631,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -10160,8 +10678,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -10291,7 +10809,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -10304,22 +10828,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -10331,7 +10862,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -10344,22 +10881,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -10404,20 +10948,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -10478,7 +11065,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -10486,7 +11073,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -10502,7 +11089,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -10526,10 +11113,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -10608,23 +11195,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -10709,7 +11309,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -10728,9 +11328,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -11306,8 +11906,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -11340,13 +11941,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -12248,7 +12907,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 12251 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -12292,10 +12951,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12354,7 +13013,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 12357 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -12398,10 +13057,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12793,6 +13452,7 @@ $RM -r conftest*
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
++ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+@@ -12810,6 +13470,7 @@ $RM -r conftest*
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
++ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ compiler_CXX=$CC
+ for cc_temp in $compiler""; do
+@@ -13092,7 +13753,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ allow_undefined_flag_CXX='-berok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -13105,22 +13772,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+@@ -13133,7 +13807,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -13146,22 +13826,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -13204,29 +13891,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
+- # as there is no search path for DLLs.
+- hardcode_libdir_flag_spec_CXX='-L$libdir'
+- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
+- allow_undefined_flag_CXX=unsupported
+- always_export_symbols_CXX=no
+- enable_shared_with_static_runtimes_CXX=yes
+-
+- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- # If the export-symbols file already is a .def file (1st line
+- # is EXPORTS), use it as is; otherwise, prepend...
+- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+- cp $export_symbols $output_objdir/$soname.def;
+- else
+- echo EXPORTS > $output_objdir/$soname.def;
+- cat $export_symbols >> $output_objdir/$soname.def;
+- fi~
+- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- else
+- ld_shlibs_CXX=no
+- fi
+- ;;
++ case $GXX,$cc_basename in
++ ,cl* | no,cl*)
++ # Native MSVC
++ # hardcode_libdir_flag_spec is actually meaningless, as there is
++ # no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX=' '
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=yes
++ file_list_spec_CXX='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true'
++ enable_shared_with_static_runtimes_CXX=yes
++ # Don't use ranlib
++ old_postinstall_cmds_CXX='chmod 644 $oldlib'
++ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ func_to_tool_file "$lt_outputfile"~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # g++
++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
++ # as there is no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX='-L$libdir'
++ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=no
++ enable_shared_with_static_runtimes_CXX=yes
++
++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
++ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ # If the export-symbols file already is a .def file (1st line
++ # is EXPORTS), use it as is; otherwise, prepend...
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ cp $export_symbols $output_objdir/$soname.def;
++ else
++ echo EXPORTS > $output_objdir/$soname.def;
++ cat $export_symbols >> $output_objdir/$soname.def;
++ fi~
++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ else
++ ld_shlibs_CXX=no
++ fi
++ ;;
++ esac
++ ;;
+ darwin* | rhapsody*)
+
+
+@@ -13332,7 +14065,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+ *)
+ if test "$GXX" = yes; then
+- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ ld_shlibs_CXX=no
+@@ -13403,10 +14136,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+@@ -13447,9 +14180,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ *)
+ if test "$GXX" = yes; then
+ if test "$with_gnu_ld" = no; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+ fi
+ fi
+ link_all_deplibs_CXX=yes
+@@ -13727,7 +14460,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ esac
+
+@@ -13773,7 +14506,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ archive_cmds_need_lc_CXX=yes
+ no_undefined_flag_CXX=' -zdefs'
+@@ -13814,9 +14547,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ no_undefined_flag_CXX=' ${wl}-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+@@ -13952,6 +14685,13 @@ private:
+ };
+ _LT_EOF
+
++
++_lt_libdeps_save_CFLAGS=$CFLAGS
++case "$CC $CFLAGS " in #(
++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
++esac
++
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ (eval $ac_compile) 2>&5
+ ac_status=$?
+@@ -13965,7 +14705,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+- case $p in
++ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+@@ -13974,13 +14714,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ test $p = "-R"; then
+ prev=$p
+ continue
+- else
+- prev=
+ fi
+
++ # Expand the sysroot to ease extracting the directories later.
++ if test -z "$prev"; then
++ case $p in
++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
++ esac
++ fi
++ case $p in
++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
++ esac
+ if test "$pre_test_object_deps_done" = no; then
+- case $p in
+- -L* | -R*)
++ case ${prev} in
++ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+@@ -14000,8 +14749,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ postdeps_CXX="${postdeps_CXX} ${prev}${p}"
+ fi
+ fi
++ prev=
+ ;;
+
++ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+@@ -14037,6 +14788,7 @@ else
+ fi
+
+ $RM -f confest.$objext
++CFLAGS=$_lt_libdeps_save_CFLAGS
+
+ # PORTME: override above test on systems where it is broken
+ case $host_os in
+@@ -14072,7 +14824,7 @@ linux*)
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+@@ -14137,8 +14889,6 @@ fi
+ lt_prog_compiler_pic_CXX=
+ lt_prog_compiler_static_CXX=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ # C++ specific cases for pic, static, wl, etc.
+ if test "$GXX" = yes; then
+@@ -14243,6 +14993,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ esac
+ ;;
++ mingw* | cygwin* | os2* | pw32* | cegcc*)
++ # This hack is so that the source file can tell whether it is being
++ # built for inclusion in a dll (and should export symbols for example).
++ lt_prog_compiler_pic_CXX='-DDLL_EXPORT'
++ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+@@ -14395,7 +15150,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ lt_prog_compiler_pic_CXX='-KPIC'
+ lt_prog_compiler_static_CXX='-Bstatic'
+@@ -14460,10 +15215,17 @@ case $host_os in
+ lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5
+-$as_echo "$lt_prog_compiler_pic_CXX" >&6; }
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic_CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5
++$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; }
++lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -14521,6 +15283,8 @@ fi
+
+
+
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -14698,6 +15462,7 @@ fi
+ $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+ case $host_os in
+ aix[4-9]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+@@ -14712,15 +15477,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ ;;
+ pw32*)
+ export_symbols_cmds_CXX="$ltdll_cmds"
+- ;;
++ ;;
+ cygwin* | mingw* | cegcc*)
+- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ case $cc_basename in
++ cl*) ;;
++ *)
++ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
++ ;;
++ esac
++ ;;
+ *)
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ ;;
+ esac
+- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
+ $as_echo "$ld_shlibs_CXX" >&6; }
+@@ -14983,8 +15753,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -15016,13 +15787,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -15586,6 +16415,7 @@ fi
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
++ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+@@ -17738,13 +18568,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -17759,14 +18596,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -17799,12 +18639,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -17843,8 +18683,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote
+ compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`'
+ GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`'
+ archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -17871,12 +18711,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_
+ hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
++postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`'
+ hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`'
+ compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -17914,8 +18754,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -17925,12 +18770,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -17946,7 +18793,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -17968,8 +18814,8 @@ LD_CXX \
+ reload_flag_CXX \
+ compiler_CXX \
+ lt_prog_compiler_no_builtin_flag_CXX \
+-lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_pic_CXX \
++lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_static_CXX \
+ lt_cv_prog_compiler_c_o_CXX \
+ export_dynamic_flag_spec_CXX \
+@@ -17981,7 +18827,6 @@ no_undefined_flag_CXX \
+ hardcode_libdir_flag_spec_CXX \
+ hardcode_libdir_flag_spec_ld_CXX \
+ hardcode_libdir_separator_CXX \
+-fix_srcfile_path_CXX \
+ exclude_expsyms_CXX \
+ include_expsyms_CXX \
+ file_list_spec_CXX \
+@@ -18015,6 +18860,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -18029,7 +18875,8 @@ archive_expsym_cmds_CXX \
+ module_cmds_CXX \
+ module_expsym_cmds_CXX \
+ export_symbols_cmds_CXX \
+-prelink_cmds_CXX; do
++prelink_cmds_CXX \
++postlink_cmds_CXX; do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[\\\\\\\`\\"\\\$]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+@@ -18786,7 +19633,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -18889,19 +19737,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -18931,6 +19802,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -18940,6 +19817,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -19054,12 +19934,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -19146,9 +20026,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -19164,6 +20041,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -19210,210 +20090,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+@@ -19441,12 +20280,12 @@ with_gcc=$GCC_CXX
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl_CXX
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic_CXX
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl_CXX
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static_CXX
+
+@@ -19533,9 +20372,6 @@ inherit_rpath=$inherit_rpath_CXX
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs_CXX
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path_CXX
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols_CXX
+
+@@ -19551,6 +20387,9 @@ include_expsyms=$lt_include_expsyms_CXX
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds_CXX
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds_CXX
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec_CXX
+
+diff --git a/gprofng/doc/Makefile.in b/gprofng/doc/Makefile.in
+index 3306c51b31c..6ab3d9636c8 100644
+--- a/gprofng/doc/Makefile.in
++++ b/gprofng/doc/Makefile.in
+@@ -238,6 +238,7 @@ CXXFLAGS = @CXXFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -272,6 +273,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+diff --git a/gprofng/gp-display-html/Makefile.in b/gprofng/gp-display-html/Makefile.in
+index 61c00cd6e46..edce3cb94c5 100644
+--- a/gprofng/gp-display-html/Makefile.in
++++ b/gprofng/gp-display-html/Makefile.in
+@@ -197,6 +197,7 @@ CXXFLAGS = @CXXFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -231,6 +232,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+diff --git a/gprofng/libcollector/Makefile.in b/gprofng/libcollector/Makefile.in
+index ce8755c80c9..91e29c29ab8 100644
+--- a/gprofng/libcollector/Makefile.in
++++ b/gprofng/libcollector/Makefile.in
+@@ -316,6 +316,7 @@ CXXFLAGS = @CXXFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -341,6 +342,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+diff --git a/gprofng/libcollector/configure b/gprofng/libcollector/configure
+index 8b0092433fe..a363ab1047c 100755
+--- a/gprofng/libcollector/configure
++++ b/gprofng/libcollector/configure
+@@ -640,6 +640,8 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -769,6 +771,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ '
+ ac_precious_vars='build_alias
+@@ -1424,6 +1427,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+
+ Some influential environment variables:
+ CC C compiler command
+@@ -5968,8 +5973,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -6009,7 +6014,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -6702,8 +6707,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -6752,6 +6757,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -6768,6 +6847,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -6936,7 +7020,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -7095,6 +7180,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -7110,6 +7210,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -7124,8 +7375,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -7141,7 +7394,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -7161,11 +7414,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -7181,7 +7438,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -7200,6 +7457,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -7211,25 +7472,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -7241,6 +7496,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -7580,8 +7893,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -7617,6 +7930,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -7658,6 +7972,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -7669,7 +7995,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -7695,8 +8021,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -7706,8 +8032,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -7744,6 +8070,13 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
+
+
+
+@@ -7763,6 +8096,48 @@ fi
+
+
+
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
++
++
++
+
+
+ # Check whether --enable-libtool-lock was given.
+@@ -7971,6 +8346,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -8534,6 +9026,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8604,6 +9098,16 @@ done
+
+
+
++func_stripname_cnf ()
++{
++ case ${2} in
++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
++ esac
++} # func_stripname_cnf
++
++
++
+
+
+ # Set options
+@@ -9119,8 +9623,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -9286,6 +9788,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -9348,7 +9856,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -9405,13 +9913,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -9472,6 +9984,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -9822,7 +10339,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -9921,12 +10439,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -9940,8 +10458,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -9959,8 +10477,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -10006,8 +10524,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -10137,7 +10655,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -10150,22 +10674,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -10177,7 +10708,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -10190,22 +10727,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -10250,20 +10794,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -10324,7 +10911,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -10332,7 +10919,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -10348,7 +10935,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -10372,10 +10959,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -10454,23 +11041,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -10555,7 +11155,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -10574,9 +11174,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -11152,8 +11752,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -11186,13 +11787,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -12094,7 +12753,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 12097 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -12138,10 +12797,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12200,7 +12859,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 12203 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -12244,10 +12903,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12639,6 +13298,7 @@ $RM -r conftest*
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
++ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+@@ -12656,6 +13316,7 @@ $RM -r conftest*
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
++ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ compiler_CXX=$CC
+ for cc_temp in $compiler""; do
+@@ -12938,7 +13599,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ allow_undefined_flag_CXX='-berok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -12951,22 +13618,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+@@ -12979,7 +13653,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -12992,22 +13672,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -13050,29 +13737,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
+- # as there is no search path for DLLs.
+- hardcode_libdir_flag_spec_CXX='-L$libdir'
+- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
+- allow_undefined_flag_CXX=unsupported
+- always_export_symbols_CXX=no
+- enable_shared_with_static_runtimes_CXX=yes
+-
+- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- # If the export-symbols file already is a .def file (1st line
+- # is EXPORTS), use it as is; otherwise, prepend...
+- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+- cp $export_symbols $output_objdir/$soname.def;
+- else
+- echo EXPORTS > $output_objdir/$soname.def;
+- cat $export_symbols >> $output_objdir/$soname.def;
+- fi~
+- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- else
+- ld_shlibs_CXX=no
+- fi
+- ;;
++ case $GXX,$cc_basename in
++ ,cl* | no,cl*)
++ # Native MSVC
++ # hardcode_libdir_flag_spec is actually meaningless, as there is
++ # no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX=' '
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=yes
++ file_list_spec_CXX='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true'
++ enable_shared_with_static_runtimes_CXX=yes
++ # Don't use ranlib
++ old_postinstall_cmds_CXX='chmod 644 $oldlib'
++ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ func_to_tool_file "$lt_outputfile"~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # g++
++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
++ # as there is no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX='-L$libdir'
++ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=no
++ enable_shared_with_static_runtimes_CXX=yes
++
++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
++ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ # If the export-symbols file already is a .def file (1st line
++ # is EXPORTS), use it as is; otherwise, prepend...
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ cp $export_symbols $output_objdir/$soname.def;
++ else
++ echo EXPORTS > $output_objdir/$soname.def;
++ cat $export_symbols >> $output_objdir/$soname.def;
++ fi~
++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ else
++ ld_shlibs_CXX=no
++ fi
++ ;;
++ esac
++ ;;
+ darwin* | rhapsody*)
+
+
+@@ -13178,7 +13911,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+ *)
+ if test "$GXX" = yes; then
+- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ ld_shlibs_CXX=no
+@@ -13249,10 +13982,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+@@ -13293,9 +14026,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ *)
+ if test "$GXX" = yes; then
+ if test "$with_gnu_ld" = no; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+ fi
+ fi
+ link_all_deplibs_CXX=yes
+@@ -13573,7 +14306,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ esac
+
+@@ -13619,7 +14352,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ archive_cmds_need_lc_CXX=yes
+ no_undefined_flag_CXX=' -zdefs'
+@@ -13660,9 +14393,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ no_undefined_flag_CXX=' ${wl}-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+@@ -13798,6 +14531,13 @@ private:
+ };
+ _LT_EOF
+
++
++_lt_libdeps_save_CFLAGS=$CFLAGS
++case "$CC $CFLAGS " in #(
++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
++esac
++
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ (eval $ac_compile) 2>&5
+ ac_status=$?
+@@ -13811,7 +14551,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+- case $p in
++ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+@@ -13820,13 +14560,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ test $p = "-R"; then
+ prev=$p
+ continue
+- else
+- prev=
+ fi
+
++ # Expand the sysroot to ease extracting the directories later.
++ if test -z "$prev"; then
++ case $p in
++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
++ esac
++ fi
++ case $p in
++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
++ esac
+ if test "$pre_test_object_deps_done" = no; then
+- case $p in
+- -L* | -R*)
++ case ${prev} in
++ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+@@ -13846,8 +14595,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ postdeps_CXX="${postdeps_CXX} ${prev}${p}"
+ fi
+ fi
++ prev=
+ ;;
+
++ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+@@ -13883,6 +14634,7 @@ else
+ fi
+
+ $RM -f confest.$objext
++CFLAGS=$_lt_libdeps_save_CFLAGS
+
+ # PORTME: override above test on systems where it is broken
+ case $host_os in
+@@ -13918,7 +14670,7 @@ linux*)
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+@@ -13983,8 +14735,6 @@ fi
+ lt_prog_compiler_pic_CXX=
+ lt_prog_compiler_static_CXX=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ # C++ specific cases for pic, static, wl, etc.
+ if test "$GXX" = yes; then
+@@ -14089,6 +14839,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ esac
+ ;;
++ mingw* | cygwin* | os2* | pw32* | cegcc*)
++ # This hack is so that the source file can tell whether it is being
++ # built for inclusion in a dll (and should export symbols for example).
++ lt_prog_compiler_pic_CXX='-DDLL_EXPORT'
++ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+@@ -14241,7 +14996,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ lt_prog_compiler_pic_CXX='-KPIC'
+ lt_prog_compiler_static_CXX='-Bstatic'
+@@ -14306,10 +15061,17 @@ case $host_os in
+ lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5
+-$as_echo "$lt_prog_compiler_pic_CXX" >&6; }
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic_CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5
++$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; }
++lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -14367,6 +15129,8 @@ fi
+
+
+
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -14544,6 +15308,7 @@ fi
+ $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+ case $host_os in
+ aix[4-9]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+@@ -14558,15 +15323,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ ;;
+ pw32*)
+ export_symbols_cmds_CXX="$ltdll_cmds"
+- ;;
++ ;;
+ cygwin* | mingw* | cegcc*)
+- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ case $cc_basename in
++ cl*) ;;
++ *)
++ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
++ ;;
++ esac
++ ;;
+ *)
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ ;;
+ esac
+- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
+ $as_echo "$ld_shlibs_CXX" >&6; }
+@@ -14829,8 +15599,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -14862,13 +15633,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -15432,6 +16261,7 @@ fi
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
++ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+@@ -16359,13 +17189,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -16380,14 +17217,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -16420,12 +17260,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -16464,8 +17304,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote
+ compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`'
+ GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`'
+ archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -16492,12 +17332,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_
+ hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
++postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`'
+ hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`'
+ compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -16535,8 +17375,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -16546,12 +17391,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -16567,7 +17414,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -16589,8 +17435,8 @@ LD_CXX \
+ reload_flag_CXX \
+ compiler_CXX \
+ lt_prog_compiler_no_builtin_flag_CXX \
+-lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_pic_CXX \
++lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_static_CXX \
+ lt_cv_prog_compiler_c_o_CXX \
+ export_dynamic_flag_spec_CXX \
+@@ -16602,7 +17448,6 @@ no_undefined_flag_CXX \
+ hardcode_libdir_flag_spec_CXX \
+ hardcode_libdir_flag_spec_ld_CXX \
+ hardcode_libdir_separator_CXX \
+-fix_srcfile_path_CXX \
+ exclude_expsyms_CXX \
+ include_expsyms_CXX \
+ file_list_spec_CXX \
+@@ -16636,6 +17481,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -16650,7 +17496,8 @@ archive_expsym_cmds_CXX \
+ module_cmds_CXX \
+ module_expsym_cmds_CXX \
+ export_symbols_cmds_CXX \
+-prelink_cmds_CXX; do
++prelink_cmds_CXX \
++postlink_cmds_CXX; do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[\\\\\\\`\\"\\\$]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+@@ -17404,7 +18251,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -17507,19 +18355,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -17549,6 +18420,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -17558,6 +18435,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -17672,12 +18552,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -17764,9 +18644,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -17782,6 +18659,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -17828,210 +18708,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+@@ -18059,12 +18898,12 @@ with_gcc=$GCC_CXX
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl_CXX
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic_CXX
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl_CXX
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static_CXX
+
+@@ -18151,9 +18990,6 @@ inherit_rpath=$inherit_rpath_CXX
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs_CXX
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path_CXX
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols_CXX
+
+@@ -18169,6 +19005,9 @@ include_expsyms=$lt_include_expsyms_CXX
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds_CXX
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds_CXX
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec_CXX
+
+diff --git a/gprofng/src/Makefile.in b/gprofng/src/Makefile.in
+index db763ef71b2..726dc69bf85 100644
+--- a/gprofng/src/Makefile.in
++++ b/gprofng/src/Makefile.in
+@@ -321,6 +321,7 @@ CXXFLAGS = @CXXFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -355,6 +356,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+diff --git a/ld/Makefile.in b/ld/Makefile.in
+index 15311048855..71b39ee9311 100644
+--- a/ld/Makefile.in
++++ b/ld/Makefile.in
+@@ -390,6 +390,7 @@ CYGPATH_W = @CYGPATH_W@
+ DATADIRNAME = @DATADIRNAME@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -445,6 +446,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -495,6 +497,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_CXX = @ac_ct_CXX@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+diff --git a/ld/configure b/ld/configure
+index 98a76d6da7f..a4266996c44 100755
+--- a/ld/configure
++++ b/ld/configure
+@@ -705,8 +705,11 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
+ RANLIB
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -836,6 +839,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_plugins
+ enable_largefile
+@@ -1581,6 +1585,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-lib-path=dir1:dir2... set default LIB_PATH
+ --with-sysroot=DIR Search for usr/lib et al within DIR.
+ --with-gnu-ld assume the C compiler uses GNU ld default=no
+@@ -5560,8 +5566,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -5601,7 +5607,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -6294,8 +6300,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -6344,6 +6350,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -6360,6 +6440,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -6528,7 +6613,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6687,6 +6773,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6702,6 +6803,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6716,8 +6968,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6733,7 +6987,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6753,11 +7007,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6773,7 +7031,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6792,6 +7050,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6803,29 +7065,81 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
++
++
++
++
++
++
++
++
++
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
+
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
+
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
+
+
+
+@@ -7172,8 +7486,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -7209,6 +7523,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -7250,6 +7565,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -7261,7 +7588,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -7287,8 +7614,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -7298,8 +7625,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -7336,6 +7663,27 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
++
++
++
++
++
++
++
++
++
++
+
+
+
+@@ -7348,10 +7696,38 @@ fi
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
+
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
+
+
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
+
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -7563,6 +7939,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -8126,6 +8619,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8196,6 +8691,16 @@ done
+
+
+
++func_stripname_cnf ()
++{
++ case ${2} in
++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
++ esac
++} # func_stripname_cnf
++
++
++
+
+
+ # Set options
+@@ -8711,8 +9216,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8878,6 +9381,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8940,7 +9449,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8997,13 +9506,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -9064,6 +9577,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -9414,7 +9932,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -9513,12 +10032,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -9532,8 +10051,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -9551,8 +10070,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9598,8 +10117,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9729,7 +10248,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9742,22 +10267,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9769,7 +10301,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9782,22 +10320,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9842,20 +10387,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9916,7 +10504,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9924,7 +10512,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9940,7 +10528,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9964,10 +10552,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -10046,23 +10634,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -10147,7 +10748,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -10166,9 +10767,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10744,8 +11345,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10778,13 +11380,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11686,7 +12346,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11686 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11730,10 +12390,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11792,7 +12452,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11792 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11836,10 +12496,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12231,6 +12891,7 @@ $RM -r conftest*
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
++ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+@@ -12248,6 +12909,7 @@ $RM -r conftest*
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
++ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ compiler_CXX=$CC
+ for cc_temp in $compiler""; do
+@@ -12530,7 +13192,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ allow_undefined_flag_CXX='-berok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -12543,22 +13211,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+@@ -12571,7 +13246,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath__CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -12584,22 +13265,29 @@ main ()
+ _ACEOF
+ if ac_fn_cxx_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath__CXX"; then
++ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath__CXX
++fi
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -12642,29 +13330,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
+- # as there is no search path for DLLs.
+- hardcode_libdir_flag_spec_CXX='-L$libdir'
+- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
+- allow_undefined_flag_CXX=unsupported
+- always_export_symbols_CXX=no
+- enable_shared_with_static_runtimes_CXX=yes
+-
+- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- # If the export-symbols file already is a .def file (1st line
+- # is EXPORTS), use it as is; otherwise, prepend...
+- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+- cp $export_symbols $output_objdir/$soname.def;
+- else
+- echo EXPORTS > $output_objdir/$soname.def;
+- cat $export_symbols >> $output_objdir/$soname.def;
+- fi~
+- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- else
+- ld_shlibs_CXX=no
+- fi
+- ;;
++ case $GXX,$cc_basename in
++ ,cl* | no,cl*)
++ # Native MSVC
++ # hardcode_libdir_flag_spec is actually meaningless, as there is
++ # no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX=' '
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=yes
++ file_list_spec_CXX='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true'
++ enable_shared_with_static_runtimes_CXX=yes
++ # Don't use ranlib
++ old_postinstall_cmds_CXX='chmod 644 $oldlib'
++ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ func_to_tool_file "$lt_outputfile"~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # g++
++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
++ # as there is no search path for DLLs.
++ hardcode_libdir_flag_spec_CXX='-L$libdir'
++ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
++ allow_undefined_flag_CXX=unsupported
++ always_export_symbols_CXX=no
++ enable_shared_with_static_runtimes_CXX=yes
++
++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
++ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ # If the export-symbols file already is a .def file (1st line
++ # is EXPORTS), use it as is; otherwise, prepend...
++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ cp $export_symbols $output_objdir/$soname.def;
++ else
++ echo EXPORTS > $output_objdir/$soname.def;
++ cat $export_symbols >> $output_objdir/$soname.def;
++ fi~
++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ else
++ ld_shlibs_CXX=no
++ fi
++ ;;
++ esac
++ ;;
+ darwin* | rhapsody*)
+
+
+@@ -12770,7 +13504,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ ;;
+ *)
+ if test "$GXX" = yes; then
+- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ ld_shlibs_CXX=no
+@@ -12841,10 +13575,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+@@ -12885,9 +13619,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ *)
+ if test "$GXX" = yes; then
+ if test "$with_gnu_ld" = no; then
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+ fi
+ fi
+ link_all_deplibs_CXX=yes
+@@ -13165,7 +13899,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ *)
+- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ esac
+
+@@ -13211,7 +13945,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ archive_cmds_need_lc_CXX=yes
+ no_undefined_flag_CXX=' -zdefs'
+@@ -13252,9 +13986,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ no_undefined_flag_CXX=' ${wl}-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+@@ -13390,6 +14124,13 @@ private:
+ };
+ _LT_EOF
+
++
++_lt_libdeps_save_CFLAGS=$CFLAGS
++case "$CC $CFLAGS " in #(
++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
++esac
++
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ (eval $ac_compile) 2>&5
+ ac_status=$?
+@@ -13403,7 +14144,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+- case $p in
++ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+@@ -13412,13 +14153,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ test $p = "-R"; then
+ prev=$p
+ continue
+- else
+- prev=
+ fi
+
++ # Expand the sysroot to ease extracting the directories later.
++ if test -z "$prev"; then
++ case $p in
++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
++ esac
++ fi
++ case $p in
++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
++ esac
+ if test "$pre_test_object_deps_done" = no; then
+- case $p in
+- -L* | -R*)
++ case ${prev} in
++ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+@@ -13438,8 +14188,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+ postdeps_CXX="${postdeps_CXX} ${prev}${p}"
+ fi
+ fi
++ prev=
+ ;;
+
++ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+@@ -13475,6 +14227,7 @@ else
+ fi
+
+ $RM -f confest.$objext
++CFLAGS=$_lt_libdeps_save_CFLAGS
+
+ # PORTME: override above test on systems where it is broken
+ case $host_os in
+@@ -13510,7 +14263,7 @@ linux*)
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+@@ -13575,8 +14328,6 @@ fi
+ lt_prog_compiler_pic_CXX=
+ lt_prog_compiler_static_CXX=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ # C++ specific cases for pic, static, wl, etc.
+ if test "$GXX" = yes; then
+@@ -13681,6 +14432,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ esac
+ ;;
++ mingw* | cygwin* | os2* | pw32* | cegcc*)
++ # This hack is so that the source file can tell whether it is being
++ # built for inclusion in a dll (and should export symbols for example).
++ lt_prog_compiler_pic_CXX='-DDLL_EXPORT'
++ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+@@ -13833,7 +14589,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ ;;
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ lt_prog_compiler_pic_CXX='-KPIC'
+ lt_prog_compiler_static_CXX='-Bstatic'
+@@ -13898,10 +14654,17 @@ case $host_os in
+ lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5
+-$as_echo "$lt_prog_compiler_pic_CXX" >&6; }
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic_CXX+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5
++$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; }
++lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -13959,6 +14722,8 @@ fi
+
+
+
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -14136,6 +14901,7 @@ fi
+ $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+ case $host_os in
+ aix[4-9]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+@@ -14150,15 +14916,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ ;;
+ pw32*)
+ export_symbols_cmds_CXX="$ltdll_cmds"
+- ;;
++ ;;
+ cygwin* | mingw* | cegcc*)
+- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ case $cc_basename in
++ cl*) ;;
++ *)
++ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
++ ;;
++ esac
++ ;;
+ *)
+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ ;;
+ esac
+- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
+ $as_echo "$ld_shlibs_CXX" >&6; }
+@@ -14421,8 +15192,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -14454,13 +15226,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -15024,6 +15854,7 @@ fi
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
++ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+@@ -20310,13 +21141,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -20331,14 +21169,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -20371,12 +21212,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -20415,8 +21256,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote
+ compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`'
+ GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`'
+ archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -20443,12 +21284,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_
+ hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
++postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
+ file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`'
+ hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`'
+ compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`'
+@@ -20486,8 +21327,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -20497,12 +21343,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -20518,7 +21366,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -20540,8 +21387,8 @@ LD_CXX \
+ reload_flag_CXX \
+ compiler_CXX \
+ lt_prog_compiler_no_builtin_flag_CXX \
+-lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_pic_CXX \
++lt_prog_compiler_wl_CXX \
+ lt_prog_compiler_static_CXX \
+ lt_cv_prog_compiler_c_o_CXX \
+ export_dynamic_flag_spec_CXX \
+@@ -20553,7 +21400,6 @@ no_undefined_flag_CXX \
+ hardcode_libdir_flag_spec_CXX \
+ hardcode_libdir_flag_spec_ld_CXX \
+ hardcode_libdir_separator_CXX \
+-fix_srcfile_path_CXX \
+ exclude_expsyms_CXX \
+ include_expsyms_CXX \
+ file_list_spec_CXX \
+@@ -20587,6 +21433,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -20601,7 +21448,8 @@ archive_expsym_cmds_CXX \
+ module_cmds_CXX \
+ module_expsym_cmds_CXX \
+ export_symbols_cmds_CXX \
+-prelink_cmds_CXX; do
++prelink_cmds_CXX \
++postlink_cmds_CXX; do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[\\\\\\\`\\"\\\$]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+@@ -21374,7 +22222,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -21477,19 +22326,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -21519,6 +22391,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -21528,6 +22406,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -21642,12 +22523,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -21734,9 +22615,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -21752,6 +22630,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -21798,210 +22679,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+@@ -22029,12 +22869,12 @@ with_gcc=$GCC_CXX
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl_CXX
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic_CXX
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl_CXX
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static_CXX
+
+@@ -22121,9 +22961,6 @@ inherit_rpath=$inherit_rpath_CXX
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs_CXX
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path_CXX
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols_CXX
+
+@@ -22139,6 +22976,9 @@ include_expsyms=$lt_include_expsyms_CXX
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds_CXX
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds_CXX
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec_CXX
+
+diff --git a/libbacktrace/Makefile.in b/libbacktrace/Makefile.in
+index 0a61880ba67..500756cd023 100644
+--- a/libbacktrace/Makefile.in
++++ b/libbacktrace/Makefile.in
+@@ -1,7 +1,7 @@
+-# Makefile.in generated by automake 1.15.1 from Makefile.am.
++# Makefile.in generated by automake 1.16.5 from Makefile.am.
+ # @configure_input@
+
+-# Copyright (C) 1994-2017 Free Software Foundation, Inc.
++# Copyright (C) 1994-2021 Free Software Foundation, Inc.
+
+ # This Makefile.in is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -189,6 +189,35 @@ CONFIG_HEADER = config.h
+ CONFIG_CLEAN_FILES = backtrace-supported.h \
+ install-debuginfo-for-buildid.sh
+ CONFIG_CLEAN_VPATH_FILES =
++@NATIVE_TRUE@am__EXEEXT_1 = allocfail$(EXEEXT)
++@HAVE_ELF_TRUE@@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__EXEEXT_2 = b2test$(EXEEXT)
++@HAVE_DWZ_TRUE@@HAVE_ELF_TRUE@@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__EXEEXT_3 = b3test$(EXEEXT)
++@NATIVE_TRUE@am__EXEEXT_4 = test_elf_32$(EXEEXT) test_elf_64$(EXEEXT) \
++@NATIVE_TRUE@ test_macho$(EXEEXT) test_xcoff_32$(EXEEXT) \
++@NATIVE_TRUE@ test_xcoff_64$(EXEEXT) test_pecoff$(EXEEXT) \
++@NATIVE_TRUE@ test_unknown$(EXEEXT) unittest$(EXEEXT) \
++@NATIVE_TRUE@ unittest_alloc$(EXEEXT) btest$(EXEEXT)
++@HAVE_ELF_TRUE@@NATIVE_TRUE@am__EXEEXT_5 = btest_lto$(EXEEXT)
++@NATIVE_TRUE@am__EXEEXT_6 = btest_alloc$(EXEEXT) stest$(EXEEXT) \
++@NATIVE_TRUE@ stest_alloc$(EXEEXT)
++@HAVE_ELF_TRUE@@NATIVE_TRUE@am__EXEEXT_7 = ztest$(EXEEXT) \
++@HAVE_ELF_TRUE@@NATIVE_TRUE@ ztest_alloc$(EXEEXT)
++@NATIVE_TRUE@am__EXEEXT_8 = edtest$(EXEEXT) edtest_alloc$(EXEEXT)
++@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@am__EXEEXT_9 = ttest$(EXEEXT) \
++@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ ttest_alloc$(EXEEXT)
++@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am__EXEEXT_10 = \
++@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg$(EXEEXT) \
++@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta$(EXEEXT) \
++@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg_alloc$(EXEEXT) \
++@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta_alloc$(EXEEXT)
++@HAVE_DWARF5_TRUE@@NATIVE_TRUE@am__EXEEXT_11 = dwarf5$(EXEEXT) \
++@HAVE_DWARF5_TRUE@@NATIVE_TRUE@ dwarf5_alloc$(EXEEXT)
++@NATIVE_TRUE@am__EXEEXT_12 = mtest$(EXEEXT)
++@HAVE_ELF_TRUE@am__EXEEXT_13 = xztest$(EXEEXT) xztest_alloc$(EXEEXT)
++am__EXEEXT_14 = $(am__EXEEXT_4) $(am__EXEEXT_5) $(am__EXEEXT_6) \
++ $(am__EXEEXT_7) $(am__EXEEXT_8) $(am__EXEEXT_9) \
++ $(am__EXEEXT_10) $(am__EXEEXT_11) $(am__EXEEXT_12) \
++ $(am__EXEEXT_13)
+ LTLIBRARIES = $(noinst_LTLIBRARIES)
+ am__DEPENDENCIES_1 =
+ am_libbacktrace_la_OBJECTS = atomic.lo dwarf.lo fileline.lo posix.lo \
+@@ -220,35 +249,6 @@ libbacktrace_instrumented_alloc_la_OBJECTS = \
+ libbacktrace_noformat_la_OBJECTS = \
+ $(am_libbacktrace_noformat_la_OBJECTS)
+ @NATIVE_TRUE@am_libbacktrace_noformat_la_rpath =
+-@NATIVE_TRUE@am__EXEEXT_1 = allocfail$(EXEEXT)
+-@HAVE_ELF_TRUE@@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__EXEEXT_2 = b2test$(EXEEXT)
+-@HAVE_DWZ_TRUE@@HAVE_ELF_TRUE@@HAVE_OBJCOPY_DEBUGLINK_TRUE@@NATIVE_TRUE@am__EXEEXT_3 = b3test$(EXEEXT)
+-@NATIVE_TRUE@am__EXEEXT_4 = test_elf_32$(EXEEXT) test_elf_64$(EXEEXT) \
+-@NATIVE_TRUE@ test_macho$(EXEEXT) test_xcoff_32$(EXEEXT) \
+-@NATIVE_TRUE@ test_xcoff_64$(EXEEXT) test_pecoff$(EXEEXT) \
+-@NATIVE_TRUE@ test_unknown$(EXEEXT) unittest$(EXEEXT) \
+-@NATIVE_TRUE@ unittest_alloc$(EXEEXT) btest$(EXEEXT)
+-@HAVE_ELF_TRUE@@NATIVE_TRUE@am__EXEEXT_5 = btest_lto$(EXEEXT)
+-@NATIVE_TRUE@am__EXEEXT_6 = btest_alloc$(EXEEXT) stest$(EXEEXT) \
+-@NATIVE_TRUE@ stest_alloc$(EXEEXT)
+-@HAVE_ELF_TRUE@@NATIVE_TRUE@am__EXEEXT_7 = ztest$(EXEEXT) \
+-@HAVE_ELF_TRUE@@NATIVE_TRUE@ ztest_alloc$(EXEEXT)
+-@NATIVE_TRUE@am__EXEEXT_8 = edtest$(EXEEXT) edtest_alloc$(EXEEXT)
+-@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@am__EXEEXT_9 = ttest$(EXEEXT) \
+-@HAVE_PTHREAD_TRUE@@NATIVE_TRUE@ ttest_alloc$(EXEEXT)
+-@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@am__EXEEXT_10 = \
+-@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg$(EXEEXT) \
+-@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta$(EXEEXT) \
+-@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctestg_alloc$(EXEEXT) \
+-@HAVE_COMPRESSED_DEBUG_TRUE@@NATIVE_TRUE@ ctesta_alloc$(EXEEXT)
+-@HAVE_DWARF5_TRUE@@NATIVE_TRUE@am__EXEEXT_11 = dwarf5$(EXEEXT) \
+-@HAVE_DWARF5_TRUE@@NATIVE_TRUE@ dwarf5_alloc$(EXEEXT)
+-@NATIVE_TRUE@am__EXEEXT_12 = mtest$(EXEEXT)
+-@HAVE_ELF_TRUE@am__EXEEXT_13 = xztest$(EXEEXT) xztest_alloc$(EXEEXT)
+-am__EXEEXT_14 = $(am__EXEEXT_4) $(am__EXEEXT_5) $(am__EXEEXT_6) \
+- $(am__EXEEXT_7) $(am__EXEEXT_8) $(am__EXEEXT_9) \
+- $(am__EXEEXT_10) $(am__EXEEXT_11) $(am__EXEEXT_12) \
+- $(am__EXEEXT_13)
+ @NATIVE_TRUE@am_allocfail_OBJECTS = allocfail-allocfail.$(OBJEXT) \
+ @NATIVE_TRUE@ allocfail-testlib.$(OBJEXT)
+ allocfail_OBJECTS = $(am_allocfail_OBJECTS)
+@@ -536,7 +536,7 @@ am__v_at_0 = @
+ am__v_at_1 =
+ DEFAULT_INCLUDES = -I.@am__isrc@
+ depcomp =
+-am__depfiles_maybe =
++am__maybe_remake_depfiles =
+ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+ LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+@@ -579,8 +579,8 @@ am__can_run_installinfo = \
+ n|no|NO) false;; \
+ *) (install-info --version) >/dev/null 2>&1;; \
+ esac
+-am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \
+- $(LISP)config.h.in
++am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) \
++ config.h.in
+ # Read a list of newline-separated strings from the standard input,
+ # and print each of them once, without duplicates. Input order is
+ # *not* preserved.
+@@ -597,9 +597,6 @@ am__define_uniq_tagged_files = \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | $(am__uniquify_input)`
+-ETAGS = etags
+-CTAGS = ctags
+-CSCOPE = cscope
+ AM_RECURSIVE_TARGETS = cscope check recheck
+ am__tty_colors_dummy = \
+ mgn= red= grn= lgn= blu= brg= std=; \
+@@ -783,6 +780,7 @@ am__set_TESTS_bases = \
+ bases='$(TEST_LOGS)'; \
+ bases=`for i in $$bases; do echo $$i; done | sed 's/\.log$$//'`; \
+ bases=`echo $$bases`
++AM_TESTSUITE_SUMMARY_HEADER = ' for $(PACKAGE_STRING)'
+ RECHECK_LOGS = $(TEST_LOGS)
+ TEST_SUITE_LOG = test-suite.log
+ TEST_EXTENSIONS = @EXEEXT@ .test
+@@ -825,8 +823,11 @@ CLOCK_GETTIME_LINK = @CLOCK_GETTIME_LINK@
+ COMM = @COMM@
+ CPP = @CPP@
+ CPPFLAGS = @CPPFLAGS@
++CSCOPE = @CSCOPE@
++CTAGS = @CTAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ DWZ = @DWZ@
+@@ -834,6 +835,7 @@ ECHO_C = @ECHO_C@
+ ECHO_N = @ECHO_N@
+ ECHO_T = @ECHO_T@
+ EGREP = @EGREP@
++ETAGS = @ETAGS@
+ EXEEXT = @EXEEXT@
+ EXTRA_FLAGS = @EXTRA_FLAGS@
+ FGREP = @FGREP@
+@@ -854,6 +856,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+@@ -886,6 +889,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__leading_dot = @am__leading_dot@
+@@ -1178,8 +1182,8 @@ Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ echo ' $(SHELL) ./config.status'; \
+ $(SHELL) ./config.status;; \
+ *) \
+- echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+- cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
++ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \
++ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \
+ esac;
+ $(top_srcdir)/../multilib.am $(am__empty):
+
+@@ -1211,6 +1215,15 @@ backtrace-supported.h: $(top_builddir)/config.status $(srcdir)/backtrace-support
+ install-debuginfo-for-buildid.sh: $(top_builddir)/config.status $(srcdir)/install-debuginfo-for-buildid.sh.in
+ cd $(top_builddir) && $(SHELL) ./config.status $@
+
++clean-checkPROGRAMS:
++ @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
++ echo " rm -f" $$list; \
++ rm -f $$list || exit $$?; \
++ test -n "$(EXEEXT)" || exit 0; \
++ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
++ echo " rm -f" $$list; \
++ rm -f $$list
++
+ clean-checkLTLIBRARIES:
+ -test -z "$(check_LTLIBRARIES)" || rm -f $(check_LTLIBRARIES)
+ @list='$(check_LTLIBRARIES)'; \
+@@ -1248,15 +1261,6 @@ libbacktrace_instrumented_alloc.la: $(libbacktrace_instrumented_alloc_la_OBJECTS
+ libbacktrace_noformat.la: $(libbacktrace_noformat_la_OBJECTS) $(libbacktrace_noformat_la_DEPENDENCIES) $(EXTRA_libbacktrace_noformat_la_DEPENDENCIES)
+ $(AM_V_CCLD)$(LINK) $(am_libbacktrace_noformat_la_rpath) $(libbacktrace_noformat_la_OBJECTS) $(libbacktrace_noformat_la_LIBADD) $(LIBS)
+
+-clean-checkPROGRAMS:
+- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
+- echo " rm -f" $$list; \
+- rm -f $$list || exit $$?; \
+- test -n "$(EXEEXT)" || exit 0; \
+- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
+- echo " rm -f" $$list; \
+- rm -f $$list
+-
+ allocfail$(EXEEXT): $(allocfail_OBJECTS) $(allocfail_DEPENDENCIES) $(EXTRA_allocfail_DEPENDENCIES)
+ @rm -f allocfail$(EXEEXT)
+ $(AM_V_CCLD)$(allocfail_LINK) $(allocfail_OBJECTS) $(allocfail_LDADD) $(LIBS)
+@@ -1959,7 +1963,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
+ test x"$$VERBOSE" = x || cat $(TEST_SUITE_LOG); \
+ fi; \
+ echo "$${col}$$br$${std}"; \
+- echo "$${col}Testsuite summary for $(PACKAGE_STRING)$${std}"; \
++ echo "$${col}Testsuite summary"$(AM_TESTSUITE_SUMMARY_HEADER)"$${std}"; \
+ echo "$${col}$$br$${std}"; \
+ create_testsuite_report --maybe-color; \
+ echo "$$col$$br$$std"; \
+@@ -1972,7 +1976,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
+ fi; \
+ $$success || exit 1
+
+-check-TESTS:
++check-TESTS: $(check_PROGRAMS) $(check_LTLIBRARIES) $(check_DATA)
+ @list='$(RECHECK_LOGS)'; test -z "$$list" || rm -f $$list
+ @list='$(RECHECK_LOGS:.log=.trs)'; test -z "$$list" || rm -f $$list
+ @test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
+@@ -1982,7 +1986,7 @@ check-TESTS:
+ log_list=`echo $$log_list`; trs_list=`echo $$trs_list`; \
+ $(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) TEST_LOGS="$$log_list"; \
+ exit $$?;
+-recheck: all $(check_LTLIBRARIES) $(check_PROGRAMS) $(check_DATA)
++recheck: all $(check_PROGRAMS) $(check_LTLIBRARIES) $(check_DATA)
+ @test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
+ @set +e; $(am__set_TESTS_bases); \
+ bases=`for i in $$bases; do echo $$i; done \
+@@ -2260,7 +2264,7 @@ xztest_alloc.log: xztest_alloc$(EXEEXT)
+ @am__EXEEXT_TRUE@ $(am__common_driver_flags) $(AM_TEST_LOG_DRIVER_FLAGS) $(TEST_LOG_DRIVER_FLAGS) -- $(TEST_LOG_COMPILE) \
+ @am__EXEEXT_TRUE@ "$$tst" $(AM_TESTS_FD_REDIRECT)
+ check-am: all-am
+- $(MAKE) $(AM_MAKEFLAGS) $(check_LTLIBRARIES) $(check_PROGRAMS) \
++ $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(check_LTLIBRARIES) \
+ $(check_DATA)
+ $(MAKE) $(AM_MAKEFLAGS) check-TESTS
+ check: check-am
+diff --git a/libbacktrace/aclocal.m4 b/libbacktrace/aclocal.m4
+index 528e6173930..d88a5ec84f2 100644
+--- a/libbacktrace/aclocal.m4
++++ b/libbacktrace/aclocal.m4
+@@ -1,6 +1,6 @@
+-# generated automatically by aclocal 1.15.1 -*- Autoconf -*-
++# generated automatically by aclocal 1.16.5 -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -20,7 +20,7 @@ You have another version of autoconf. It may work, but is not guaranteed to.
+ If you have problems, you may need to regenerate the build system entirely.
+ To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+-# Copyright (C) 2002-2017 Free Software Foundation, Inc.
++# Copyright (C) 2002-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -32,10 +32,10 @@ To do so, use the procedure documented by the package, typically 'autoreconf'.])
+ # generated from the m4 files accompanying Automake X.Y.
+ # (This private macro should not be called outside this file.)
+ AC_DEFUN([AM_AUTOMAKE_VERSION],
+-[am__api_version='1.15'
++[am__api_version='1.16'
+ dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+ dnl require some minimum version. Point them to the right macro.
+-m4_if([$1], [1.15.1], [],
++m4_if([$1], [1.16.5], [],
+ [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+ ])
+
+@@ -51,14 +51,14 @@ m4_define([_AM_AUTOCONF_VERSION], [])
+ # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+ # This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+ AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+-[AM_AUTOMAKE_VERSION([1.15.1])dnl
++[AM_AUTOMAKE_VERSION([1.16.5])dnl
+ m4_ifndef([AC_AUTOCONF_VERSION],
+ [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+ _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
+
+ # AM_AUX_DIR_EXPAND -*- Autoconf -*-
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -110,7 +110,7 @@ am_aux_dir=`cd "$ac_aux_dir" && pwd`
+
+ # AM_CONDITIONAL -*- Autoconf -*-
+
+-# Copyright (C) 1997-2017 Free Software Foundation, Inc.
++# Copyright (C) 1997-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -143,7 +143,7 @@ fi])])
+
+ # Do all the work for Automake. -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -171,6 +171,10 @@ m4_defn([AC_PROG_CC])
+ # release and drop the old call support.
+ AC_DEFUN([AM_INIT_AUTOMAKE],
+ [AC_PREREQ([2.65])dnl
++m4_ifdef([_$0_ALREADY_INIT],
++ [m4_fatal([$0 expanded multiple times
++]m4_defn([_$0_ALREADY_INIT]))],
++ [m4_define([_$0_ALREADY_INIT], m4_expansion_stack)])dnl
+ dnl Autoconf wants to disallow AM_ names. We explicitly allow
+ dnl the ones we care about.
+ m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+@@ -207,7 +211,7 @@ m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ [_AM_SET_OPTIONS([$1])dnl
+ dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+ m4_if(
+- m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]),
++ m4_ifset([AC_PACKAGE_NAME], [ok]):m4_ifset([AC_PACKAGE_VERSION], [ok]),
+ [ok:ok],,
+ [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+@@ -230,8 +234,8 @@ AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
+ AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+ # For better backward compatibility. To be removed once Automake 1.9.x
+ # dies out for good. For more background, see:
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+ AC_SUBST([mkdir_p], ['$(MKDIR_P)'])
+ # We need awk for the "check" target (and possibly the TAP driver). The
+ # system "awk" is bad on some platforms.
+@@ -259,6 +263,20 @@ AC_PROVIDE_IFELSE([AC_PROG_OBJCXX],
+ [m4_define([AC_PROG_OBJCXX],
+ m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl
+ ])
++# Variables for tags utilities; see am/tags.am
++if test -z "$CTAGS"; then
++ CTAGS=ctags
++fi
++AC_SUBST([CTAGS])
++if test -z "$ETAGS"; then
++ ETAGS=etags
++fi
++AC_SUBST([ETAGS])
++if test -z "$CSCOPE"; then
++ CSCOPE=cscope
++fi
++AC_SUBST([CSCOPE])
++
+ AC_REQUIRE([AM_SILENT_RULES])dnl
+ dnl The testsuite driver may need to know about EXEEXT, so add the
+ dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This
+@@ -298,7 +316,7 @@ END
+ Aborting the configuration process, to ensure you take notice of the issue.
+
+ You can download and install GNU coreutils to get an 'rm' implementation
+-that behaves properly: <http://www.gnu.org/software/coreutils/>.
++that behaves properly: <https://www.gnu.org/software/coreutils/>.
+
+ If you want to complete the configuration process using your problematic
+ 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+@@ -340,7 +358,7 @@ for _am_header in $config_headers :; do
+ done
+ echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -364,7 +382,7 @@ AC_SUBST([install_sh])])
+ # Add --enable-maintainer-mode option to configure. -*- Autoconf -*-
+ # From Jim Meyering
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -399,7 +417,7 @@ AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles])
+
+ # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*-
+
+-# Copyright (C) 1997-2017 Free Software Foundation, Inc.
++# Copyright (C) 1997-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -420,12 +438,7 @@ AC_DEFUN([AM_MISSING_HAS_RUN],
+ [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+ AC_REQUIRE_AUX_FILE([missing])dnl
+ if test x"${MISSING+set}" != xset; then
+- case $am_aux_dir in
+- *\ * | *\ *)
+- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+- *)
+- MISSING="\${SHELL} $am_aux_dir/missing" ;;
+- esac
++ MISSING="\${SHELL} '$am_aux_dir/missing'"
+ fi
+ # Use eval to expand $SHELL
+ if eval "$MISSING --is-lightweight"; then
+@@ -438,7 +451,7 @@ fi
+
+ # Helper functions for option handling. -*- Autoconf -*-
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -467,7 +480,7 @@ AC_DEFUN([_AM_SET_OPTIONS],
+ AC_DEFUN([_AM_IF_OPTION],
+ [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
+
+-# Copyright (C) 1999-2017 Free Software Foundation, Inc.
++# Copyright (C) 1999-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -514,7 +527,7 @@ AC_LANG_POP([C])])
+ # For backward compatibility.
+ AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -533,7 +546,7 @@ AC_DEFUN([AM_RUN_LOG],
+
+ # Check to make sure that the build environment is sane. -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -614,7 +627,7 @@ AC_CONFIG_COMMANDS_PRE(
+ rm -f conftest.file
+ ])
+
+-# Copyright (C) 2009-2017 Free Software Foundation, Inc.
++# Copyright (C) 2009-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -674,7 +687,7 @@ AC_SUBST([AM_BACKSLASH])dnl
+ _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl
+ ])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -702,7 +715,7 @@ fi
+ INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+ AC_SUBST([INSTALL_STRIP_PROGRAM])])
+
+-# Copyright (C) 2006-2017 Free Software Foundation, Inc.
++# Copyright (C) 2006-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -721,7 +734,7 @@ AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
+
+ # Check how to create a tarball. -*- Autoconf -*-
+
+-# Copyright (C) 2004-2017 Free Software Foundation, Inc.
++# Copyright (C) 2004-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+diff --git a/libbacktrace/configure b/libbacktrace/configure
+index e5ca8ad9379..b7c4c4be78d 100755
+--- a/libbacktrace/configure
++++ b/libbacktrace/configure
+@@ -680,7 +680,10 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -701,6 +704,9 @@ AM_BACKSLASH
+ AM_DEFAULT_VERBOSITY
+ AM_DEFAULT_V
+ AM_V
++CSCOPE
++ETAGS
++CTAGS
+ am__untar
+ am__tar
+ AMTAR
+@@ -798,6 +804,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_largefile
+ enable_cet
+@@ -1458,6 +1465,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-system-libunwind use installed libunwind
+
+ Some influential environment variables:
+@@ -4047,7 +4056,7 @@ libtool_VERSION=1:0:0
+ # -Wall: Issue all automake warnings.
+ # -Wno-portability: Don't warn about constructs supported by GNU make.
+ # (because GCC requires GNU make anyhow).
+-am__api_version='1.15'
++am__api_version='1.16'
+
+ # Find a good install program. We prefer a C program (faster),
+ # so one script is as good as another. But avoid the broken or
+@@ -4220,12 +4229,7 @@ ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
+ program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
+
+ if test x"${MISSING+set}" != xset; then
+- case $am_aux_dir in
+- *\ * | *\ *)
+- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+- *)
+- MISSING="\${SHELL} $am_aux_dir/missing" ;;
+- esac
++ MISSING="\${SHELL} '$am_aux_dir/missing'"
+ fi
+ # Use eval to expand $SHELL
+ if eval "$MISSING --is-lightweight"; then
+@@ -4551,8 +4555,8 @@ MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
+
+ # For better backward compatibility. To be removed once Automake 1.9.x
+ # dies out for good. For more background, see:
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+ mkdir_p='$(MKDIR_P)'
+
+ # We need awk for the "check" target (and possibly the TAP driver). The
+@@ -4571,6 +4575,20 @@ am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'
+
+
+
++# Variables for tags utilities; see am/tags.am
++if test -z "$CTAGS"; then
++ CTAGS=ctags
++fi
++
++if test -z "$ETAGS"; then
++ ETAGS=etags
++fi
++
++if test -z "$CSCOPE"; then
++ CSCOPE=cscope
++fi
++
++
+
+ # POSIX will say in a future version that running "rm -f" with no argument
+ # is OK; and we want to be able to make that assumption in our Makefile
+@@ -4603,7 +4621,7 @@ END
+ Aborting the configuration process, to ensure you take notice of the issue.
+
+ You can download and install GNU coreutils to get an 'rm' implementation
+-that behaves properly: <http://www.gnu.org/software/coreutils/>.
++that behaves properly: <https://www.gnu.org/software/coreutils/>.
+
+ If you want to complete the configuration process using your problematic
+ 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+@@ -5446,8 +5464,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -5487,7 +5505,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -6180,8 +6198,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -6230,6 +6248,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -6246,6 +6338,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -6414,7 +6511,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6573,6 +6671,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6588,6 +6701,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6602,8 +6866,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6619,7 +6885,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6639,11 +6905,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6659,7 +6929,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6678,6 +6948,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6689,31 +6963,83 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
++
++
++
++
+
+
+
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
+
+
+
+@@ -7058,8 +7384,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -7095,6 +7421,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -7136,6 +7463,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -7147,7 +7486,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -7173,8 +7512,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -7184,8 +7523,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -7222,6 +7561,18 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
++
+
+
+
+@@ -7238,6 +7589,43 @@ fi
+
+
+
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -7404,50 +7792,167 @@ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+ lt_cv_cc_needs_belf=yes
+ else
+- lt_cv_cc_needs_belf=no
++ lt_cv_cc_needs_belf=no
++fi
++rm -f core conftest.err conftest.$ac_objext \
++ conftest$ac_exeext conftest.$ac_ext
++ ac_ext=c
++ac_cpp='$CPP $CPPFLAGS'
++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
++ac_compiler_gnu=$ac_cv_c_compiler_gnu
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
++$as_echo "$lt_cv_cc_needs_belf" >&6; }
++ if test x"$lt_cv_cc_needs_belf" != x"yes"; then
++ # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
++ CFLAGS="$SAVE_CFLAGS"
++ fi
++ ;;
++sparc*-*solaris*)
++ # Find out which ABI we are using.
++ echo 'int i;' > conftest.$ac_ext
++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
++ (eval $ac_compile) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }; then
++ case `/usr/bin/file conftest.o` in
++ *64-bit*)
++ case $lt_cv_prog_gnu_ld in
++ yes*) LD="${LD-ld} -m elf64_sparc" ;;
++ *)
++ if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
++ LD="${LD-ld} -64"
++ fi
++ ;;
++ esac
++ ;;
++ esac
++ fi
++ rm -rf conftest*
++ ;;
++esac
++
++need_locks="$enable_libtool_lock"
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
+ fi
+-rm -f core conftest.err conftest.$ac_objext \
+- conftest$ac_exeext conftest.$ac_ext
+- ac_ext=c
+-ac_cpp='$CPP $CPPFLAGS'
+-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+-ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+-fi
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
+-$as_echo "$lt_cv_cc_needs_belf" >&6; }
+- if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+- # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+- CFLAGS="$SAVE_CFLAGS"
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
+ fi
+- ;;
+-sparc*-*solaris*)
+- # Find out which ABI we are using.
+- echo 'int i;' > conftest.$ac_ext
+- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+- (eval $ac_compile) 2>&5
+- ac_status=$?
+- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+- test $ac_status = 0; }; then
+- case `/usr/bin/file conftest.o` in
+- *64-bit*)
+- case $lt_cv_prog_gnu_ld in
+- yes*) LD="${LD-ld} -m elf64_sparc" ;;
+- *)
+- if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+- LD="${LD-ld} -64"
+- fi
+- ;;
+- esac
+- ;;
+- esac
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
+ fi
+- rm -rf conftest*
+- ;;
+-esac
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
+
+-need_locks="$enable_libtool_lock"
+
+
+ case $host_os in
+@@ -8012,6 +8517,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8596,8 +9103,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8763,6 +9268,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8825,7 +9336,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8882,13 +9393,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8949,6 +9464,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -9299,7 +9819,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -9398,12 +9919,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -9417,8 +9938,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -9436,8 +9957,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9483,8 +10004,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9614,7 +10135,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9627,22 +10154,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9654,7 +10188,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9667,22 +10207,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9727,20 +10274,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9801,7 +10391,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9809,7 +10399,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9825,7 +10415,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9849,10 +10439,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9931,23 +10521,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -10032,7 +10635,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -10051,9 +10654,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10629,8 +11232,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10663,13 +11267,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11571,7 +12233,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11574 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11615,10 +12277,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11677,7 +12339,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11680 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11721,10 +12383,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -15010,13 +15672,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -15031,14 +15700,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -15071,12 +15743,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -15131,8 +15803,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -15142,12 +15819,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -15163,7 +15842,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -15199,6 +15877,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -15897,7 +16576,8 @@ esac ;;
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -16000,19 +16680,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -16042,6 +16745,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -16051,6 +16760,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -16165,12 +16877,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -16257,9 +16969,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -16275,6 +16984,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -16307,210 +17019,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/libctf/Makefile.in b/libctf/Makefile.in
+index 1cdf105a323..b69d6472e86 100644
+--- a/libctf/Makefile.in
++++ b/libctf/Makefile.in
+@@ -442,6 +442,7 @@ CYGPATH_W = @CYGPATH_W@
+ DATADIRNAME = @DATADIRNAME@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -481,6 +482,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+diff --git a/libctf/configure b/libctf/configure
+index 3701bd8e796..9e36ae7c51c 100755
+--- a/libctf/configure
++++ b/libctf/configure
+@@ -669,6 +669,8 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -814,6 +816,7 @@ enable_shared
+ enable_static
+ with_pic
+ enable_fast_install
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_largefile
+ enable_werror_always
+@@ -1486,6 +1489,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+ --with-system-zlib use installed libz
+
+ Some influential environment variables:
+@@ -7501,8 +7506,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -7542,7 +7547,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -8235,8 +8240,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -8285,6 +8290,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -8301,6 +8380,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -8469,7 +8553,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -8628,6 +8713,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -8643,6 +8743,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -8657,8 +8908,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -8674,7 +8927,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -8694,11 +8947,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -8714,7 +8971,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -8733,6 +8990,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -8744,25 +9005,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -8774,6 +9029,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -9113,8 +9426,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -9150,6 +9463,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -9191,6 +9505,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -9202,7 +9528,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -9228,8 +9554,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -9239,8 +9565,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -9277,6 +9603,17 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
+
+
+
+@@ -9293,6 +9630,44 @@ fi
+
+
+
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -9504,6 +9879,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -10067,6 +10559,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -10621,8 +11115,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -10788,6 +11280,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -10850,7 +11348,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -10907,13 +11405,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -10974,6 +11476,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -11324,7 +11831,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -11423,12 +11931,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -11442,8 +11950,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -11461,8 +11969,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -11508,8 +12016,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -11639,7 +12147,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -11652,22 +12166,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -11679,7 +12200,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -11692,22 +12219,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -11752,20 +12286,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -11826,7 +12403,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -11834,7 +12411,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -11850,7 +12427,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -11874,10 +12451,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -11956,23 +12533,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -12057,7 +12647,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -12076,9 +12666,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -12654,8 +13244,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -12688,13 +13279,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -13596,7 +14245,11 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
++<<<<<<< HEAD
+ #line 13599 "configure"
++=======
++#line $LINENO "configure"
++>>>>>>> b932158cf4c (Use libtool 2.4)
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -13640,10 +14293,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -13702,7 +14355,11 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
++<<<<<<< HEAD
+ #line 13705 "configure"
++=======
++#line $LINENO "configure"
++>>>>>>> b932158cf4c (Use libtool 2.4)
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -13746,10 +14403,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -17797,13 +18454,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -17818,14 +18482,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -17858,12 +18525,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -17918,8 +18585,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -17929,12 +18601,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -17950,7 +18624,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -17986,6 +18659,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -18845,7 +19519,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -18948,19 +19623,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -18990,6 +19688,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -18999,6 +19703,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -19113,12 +19820,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -19205,9 +19912,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -19223,6 +19927,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -19255,210 +19962,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/libsframe/Makefile.in b/libsframe/Makefile.in
+index 9e3d268902b..c22ed33460a 100644
+--- a/libsframe/Makefile.in
++++ b/libsframe/Makefile.in
+@@ -1,7 +1,7 @@
+-# Makefile.in generated by automake 1.15.1 from Makefile.am.
++# Makefile.in generated by automake 1.16.5 from Makefile.am.
+ # @configure_input@
+
+-# Copyright (C) 1994-2017 Free Software Foundation, Inc.
++# Copyright (C) 1994-2021 Free Software Foundation, Inc.
+
+ # This Makefile.in is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -142,6 +142,13 @@ mkinstalldirs = $(SHELL) $(top_srcdir)/../mkinstalldirs
+ CONFIG_HEADER = config.h
+ CONFIG_CLEAN_FILES =
+ CONFIG_CLEAN_VPATH_FILES =
++@HAVE_COMPAT_DEJAGNU_TRUE@am__EXEEXT_1 = testsuite/libsframe.decode/be-flipping$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.decode/frecnt-1$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.decode/frecnt-2$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.encode/encode-1$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/findfre-1$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/findfunc-1$(EXEEXT) \
++@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/plt-findfre-1$(EXEEXT)
+ am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+ am__vpath_adj = case $$p in \
+ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+@@ -185,45 +192,44 @@ libsframe_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+ $(libsframe_la_LDFLAGS) $(LDFLAGS) -o $@
+ @INSTALL_LIBBFD_FALSE@am_libsframe_la_rpath =
+ @INSTALL_LIBBFD_TRUE@am_libsframe_la_rpath = -rpath $(libdir)
+-@HAVE_COMPAT_DEJAGNU_TRUE@am__EXEEXT_1 = testsuite/libsframe.decode/be-flipping$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.decode/frecnt-1$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.decode/frecnt-2$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.encode/encode-1$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/findfre-1$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/findfunc-1$(EXEEXT) \
+-@HAVE_COMPAT_DEJAGNU_TRUE@ testsuite/libsframe.find/plt-findfre-1$(EXEEXT)
+ am__dirstamp = $(am__leading_dot)dirstamp
+-am_testsuite_libsframe_decode_be_flipping_OBJECTS = testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.$(OBJEXT)
++am_testsuite_libsframe_decode_be_flipping_OBJECTS = \
++ testsuite/libsframe.decode/be_flipping-be-flipping.$(OBJEXT)
+ testsuite_libsframe_decode_be_flipping_OBJECTS = \
+ $(am_testsuite_libsframe_decode_be_flipping_OBJECTS)
+ testsuite_libsframe_decode_be_flipping_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_decode_frecnt_1_OBJECTS = testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.$(OBJEXT)
++am_testsuite_libsframe_decode_frecnt_1_OBJECTS = \
++ testsuite/libsframe.decode/frecnt_1-frecnt-1.$(OBJEXT)
+ testsuite_libsframe_decode_frecnt_1_OBJECTS = \
+ $(am_testsuite_libsframe_decode_frecnt_1_OBJECTS)
+ testsuite_libsframe_decode_frecnt_1_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_decode_frecnt_2_OBJECTS = testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.$(OBJEXT)
++am_testsuite_libsframe_decode_frecnt_2_OBJECTS = \
++ testsuite/libsframe.decode/frecnt_2-frecnt-2.$(OBJEXT)
+ testsuite_libsframe_decode_frecnt_2_OBJECTS = \
+ $(am_testsuite_libsframe_decode_frecnt_2_OBJECTS)
+ testsuite_libsframe_decode_frecnt_2_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_encode_encode_1_OBJECTS = testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.$(OBJEXT)
++am_testsuite_libsframe_encode_encode_1_OBJECTS = \
++ testsuite/libsframe.encode/encode_1-encode-1.$(OBJEXT)
+ testsuite_libsframe_encode_encode_1_OBJECTS = \
+ $(am_testsuite_libsframe_encode_encode_1_OBJECTS)
+ testsuite_libsframe_encode_encode_1_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_find_findfre_1_OBJECTS = testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.$(OBJEXT)
++am_testsuite_libsframe_find_findfre_1_OBJECTS = \
++ testsuite/libsframe.find/findfre_1-findfre-1.$(OBJEXT)
+ testsuite_libsframe_find_findfre_1_OBJECTS = \
+ $(am_testsuite_libsframe_find_findfre_1_OBJECTS)
+ testsuite_libsframe_find_findfre_1_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_find_findfunc_1_OBJECTS = testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.$(OBJEXT)
++am_testsuite_libsframe_find_findfunc_1_OBJECTS = \
++ testsuite/libsframe.find/findfunc_1-findfunc-1.$(OBJEXT)
+ testsuite_libsframe_find_findfunc_1_OBJECTS = \
+ $(am_testsuite_libsframe_find_findfunc_1_OBJECTS)
+ testsuite_libsframe_find_findfunc_1_DEPENDENCIES = \
+ ${top_builddir}/libsframe.la
+-am_testsuite_libsframe_find_plt_findfre_1_OBJECTS = testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.$(OBJEXT)
++am_testsuite_libsframe_find_plt_findfre_1_OBJECTS = testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.$(OBJEXT)
+ testsuite_libsframe_find_plt_findfre_1_OBJECTS = \
+ $(am_testsuite_libsframe_find_plt_findfre_1_OBJECTS)
+ testsuite_libsframe_find_plt_findfre_1_DEPENDENCIES = \
+@@ -242,7 +248,17 @@ am__v_at_0 = @
+ am__v_at_1 =
+ DEFAULT_INCLUDES = -I.@am__isrc@
+ depcomp = $(SHELL) $(top_srcdir)/../depcomp
+-am__depfiles_maybe = depfiles
++am__maybe_remake_depfiles = depfiles
++am__depfiles_remade = ./$(DEPDIR)/libsframe_la-sframe-dump.Plo \
++ ./$(DEPDIR)/libsframe_la-sframe-error.Plo \
++ ./$(DEPDIR)/libsframe_la-sframe.Plo \
++ testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po \
++ testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po \
++ testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po \
++ testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po \
++ testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po \
++ testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po \
++ testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po
+ am__mv = mv -f
+ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+@@ -325,8 +341,8 @@ am__can_run_installinfo = \
+ esac
+ am__include_HEADERS_DIST = $(INCDIR)/sframe.h $(INCDIR)/sframe-api.h
+ HEADERS = $(include_HEADERS)
+-am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \
+- $(LISP)config.h.in
++am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) \
++ config.h.in
+ # Read a list of newline-separated strings from the standard input,
+ # and print each of them once, without duplicates. Input order is
+ # *not* preserved.
+@@ -343,9 +359,6 @@ am__define_uniq_tagged_files = \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | $(am__uniquify_input)`
+-ETAGS = etags
+-CTAGS = ctags
+-CSCOPE = cscope
+ AM_RECURSIVE_TARGETS = cscope
+ DEJATOOL = $(PACKAGE)
+ RUNTESTDEFAULTFLAGS = --tool $$tool --srcdir $$srcdir
+@@ -372,6 +385,8 @@ am__post_remove_distdir = $(am__remove_distdir)
+ DIST_ARCHIVES = $(distdir).tar.gz
+ GZIP_ENV = --best
+ DIST_TARGETS = dist-gzip
++# Exists only to be overridden by the user if desired.
++AM_DISTCHECK_DVI_TARGET = dvi
+ distuninstallcheck_listfiles = find . -type f -print
+ am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \
+ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$'
+@@ -390,15 +405,19 @@ CFLAGS = @CFLAGS@
+ COMPAT_DEJAGNU = @COMPAT_DEJAGNU@
+ CPP = @CPP@
+ CPPFLAGS = @CPPFLAGS@
++CSCOPE = @CSCOPE@
++CTAGS = @CTAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+ ECHO_N = @ECHO_N@
+ ECHO_T = @ECHO_T@
+ EGREP = @EGREP@
++ETAGS = @ETAGS@
+ EXEEXT = @EXEEXT@
+ FGREP = @FGREP@
+ GREP = @GREP@
+@@ -417,6 +436,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+@@ -589,8 +609,8 @@ Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ echo ' $(SHELL) ./config.status'; \
+ $(SHELL) ./config.status;; \
+ *) \
+- echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+- cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
++ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \
++ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \
+ esac;
+ $(srcdir)/doc/local.mk $(srcdir)/testsuite/local.mk $(srcdir)/testsuite/libsframe.decode/local.mk $(srcdir)/testsuite/libsframe.encode/local.mk $(srcdir)/testsuite/libsframe.find/local.mk $(am__empty):
+
+@@ -618,6 +638,15 @@ $(srcdir)/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+ distclean-hdr:
+ -rm -f config.h stamp-h1
+
++clean-checkPROGRAMS:
++ @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
++ echo " rm -f" $$list; \
++ rm -f $$list || exit $$?; \
++ test -n "$(EXEEXT)" || exit 0; \
++ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
++ echo " rm -f" $$list; \
++ rm -f $$list
++
+ install-libLTLIBRARIES: $(lib_LTLIBRARIES)
+ @$(NORMAL_INSTALL)
+ @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
+@@ -666,36 +695,27 @@ clean-noinstLTLIBRARIES:
+
+ libsframe.la: $(libsframe_la_OBJECTS) $(libsframe_la_DEPENDENCIES) $(EXTRA_libsframe_la_DEPENDENCIES)
+ $(AM_V_CCLD)$(libsframe_la_LINK) $(am_libsframe_la_rpath) $(libsframe_la_OBJECTS) $(libsframe_la_LIBADD) $(LIBS)
+-
+-clean-checkPROGRAMS:
+- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
+- echo " rm -f" $$list; \
+- rm -f $$list || exit $$?; \
+- test -n "$(EXEEXT)" || exit 0; \
+- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
+- echo " rm -f" $$list; \
+- rm -f $$list
+ testsuite/libsframe.decode/$(am__dirstamp):
+ @$(MKDIR_P) testsuite/libsframe.decode
+ @: > testsuite/libsframe.decode/$(am__dirstamp)
+ testsuite/libsframe.decode/$(DEPDIR)/$(am__dirstamp):
+ @$(MKDIR_P) testsuite/libsframe.decode/$(DEPDIR)
+ @: > testsuite/libsframe.decode/$(DEPDIR)/$(am__dirstamp)
+-testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.$(OBJEXT): \
++testsuite/libsframe.decode/be_flipping-be-flipping.$(OBJEXT): \
+ testsuite/libsframe.decode/$(am__dirstamp) \
+ testsuite/libsframe.decode/$(DEPDIR)/$(am__dirstamp)
+
+ testsuite/libsframe.decode/be-flipping$(EXEEXT): $(testsuite_libsframe_decode_be_flipping_OBJECTS) $(testsuite_libsframe_decode_be_flipping_DEPENDENCIES) $(EXTRA_testsuite_libsframe_decode_be_flipping_DEPENDENCIES) testsuite/libsframe.decode/$(am__dirstamp)
+ @rm -f testsuite/libsframe.decode/be-flipping$(EXEEXT)
+ $(AM_V_CCLD)$(LINK) $(testsuite_libsframe_decode_be_flipping_OBJECTS) $(testsuite_libsframe_decode_be_flipping_LDADD) $(LIBS)
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.$(OBJEXT): \
++testsuite/libsframe.decode/frecnt_1-frecnt-1.$(OBJEXT): \
+ testsuite/libsframe.decode/$(am__dirstamp) \
+ testsuite/libsframe.decode/$(DEPDIR)/$(am__dirstamp)
+
+ testsuite/libsframe.decode/frecnt-1$(EXEEXT): $(testsuite_libsframe_decode_frecnt_1_OBJECTS) $(testsuite_libsframe_decode_frecnt_1_DEPENDENCIES) $(EXTRA_testsuite_libsframe_decode_frecnt_1_DEPENDENCIES) testsuite/libsframe.decode/$(am__dirstamp)
+ @rm -f testsuite/libsframe.decode/frecnt-1$(EXEEXT)
+ $(AM_V_CCLD)$(LINK) $(testsuite_libsframe_decode_frecnt_1_OBJECTS) $(testsuite_libsframe_decode_frecnt_1_LDADD) $(LIBS)
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.$(OBJEXT): \
++testsuite/libsframe.decode/frecnt_2-frecnt-2.$(OBJEXT): \
+ testsuite/libsframe.decode/$(am__dirstamp) \
+ testsuite/libsframe.decode/$(DEPDIR)/$(am__dirstamp)
+
+@@ -708,7 +728,7 @@ testsuite/libsframe.encode/$(am__dirstamp):
+ testsuite/libsframe.encode/$(DEPDIR)/$(am__dirstamp):
+ @$(MKDIR_P) testsuite/libsframe.encode/$(DEPDIR)
+ @: > testsuite/libsframe.encode/$(DEPDIR)/$(am__dirstamp)
+-testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.$(OBJEXT): \
++testsuite/libsframe.encode/encode_1-encode-1.$(OBJEXT): \
+ testsuite/libsframe.encode/$(am__dirstamp) \
+ testsuite/libsframe.encode/$(DEPDIR)/$(am__dirstamp)
+
+@@ -721,21 +741,21 @@ testsuite/libsframe.find/$(am__dirstamp):
+ testsuite/libsframe.find/$(DEPDIR)/$(am__dirstamp):
+ @$(MKDIR_P) testsuite/libsframe.find/$(DEPDIR)
+ @: > testsuite/libsframe.find/$(DEPDIR)/$(am__dirstamp)
+-testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.$(OBJEXT): \
++testsuite/libsframe.find/findfre_1-findfre-1.$(OBJEXT): \
+ testsuite/libsframe.find/$(am__dirstamp) \
+ testsuite/libsframe.find/$(DEPDIR)/$(am__dirstamp)
+
+ testsuite/libsframe.find/findfre-1$(EXEEXT): $(testsuite_libsframe_find_findfre_1_OBJECTS) $(testsuite_libsframe_find_findfre_1_DEPENDENCIES) $(EXTRA_testsuite_libsframe_find_findfre_1_DEPENDENCIES) testsuite/libsframe.find/$(am__dirstamp)
+ @rm -f testsuite/libsframe.find/findfre-1$(EXEEXT)
+ $(AM_V_CCLD)$(LINK) $(testsuite_libsframe_find_findfre_1_OBJECTS) $(testsuite_libsframe_find_findfre_1_LDADD) $(LIBS)
+-testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.$(OBJEXT): \
++testsuite/libsframe.find/findfunc_1-findfunc-1.$(OBJEXT): \
+ testsuite/libsframe.find/$(am__dirstamp) \
+ testsuite/libsframe.find/$(DEPDIR)/$(am__dirstamp)
+
+ testsuite/libsframe.find/findfunc-1$(EXEEXT): $(testsuite_libsframe_find_findfunc_1_OBJECTS) $(testsuite_libsframe_find_findfunc_1_DEPENDENCIES) $(EXTRA_testsuite_libsframe_find_findfunc_1_DEPENDENCIES) testsuite/libsframe.find/$(am__dirstamp)
+ @rm -f testsuite/libsframe.find/findfunc-1$(EXEEXT)
+ $(AM_V_CCLD)$(LINK) $(testsuite_libsframe_find_findfunc_1_OBJECTS) $(testsuite_libsframe_find_findfunc_1_LDADD) $(LIBS)
+-testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.$(OBJEXT): \
++testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.$(OBJEXT): \
+ testsuite/libsframe.find/$(am__dirstamp) \
+ testsuite/libsframe.find/$(DEPDIR)/$(am__dirstamp)
+
+@@ -752,16 +772,22 @@ mostlyclean-compile:
+ distclean-compile:
+ -rm -f *.tab.c
+
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe-dump.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe-error.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Po@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Po@am__quote@
++@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe-dump.Plo@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe-error.Plo@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libsframe_la-sframe.Plo@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po@am__quote@ # am--include-marker
++@AMDEP_TRUE@@am__include@ @am__quote@testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po@am__quote@ # am--include-marker
++
++$(am__depfiles_remade):
++ @$(MKDIR_P) $(@D)
++ @echo '# dummy' >$@-t && $(am__mv) $@-t $@
++
++am--depfiles: $(am__depfiles_remade)
+
+ .c.o:
+ @am__fastdepCC_TRUE@ $(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
+@@ -808,103 +834,103 @@ libsframe_la-sframe-error.lo: sframe-error.c
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libsframe_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libsframe_la-sframe-error.lo `test -f 'sframe-error.c' || echo '$(srcdir)/'`sframe-error.c
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.o: testsuite/libsframe.decode/be-flipping.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.o `test -f 'testsuite/libsframe.decode/be-flipping.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/be-flipping.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/be-flipping.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/be_flipping-be-flipping.o: testsuite/libsframe.decode/be-flipping.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/be_flipping-be-flipping.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Tpo -c -o testsuite/libsframe.decode/be_flipping-be-flipping.o `test -f 'testsuite/libsframe.decode/be-flipping.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/be-flipping.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Tpo testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/be-flipping.c' object='testsuite/libsframe.decode/be_flipping-be-flipping.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.o `test -f 'testsuite/libsframe.decode/be-flipping.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/be-flipping.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/be_flipping-be-flipping.o `test -f 'testsuite/libsframe.decode/be-flipping.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/be-flipping.c
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.obj: testsuite/libsframe.decode/be-flipping.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.obj `if test -f 'testsuite/libsframe.decode/be-flipping.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/be-flipping.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/be-flipping.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_be_flipping-be-flipping.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/be-flipping.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/be_flipping-be-flipping.obj: testsuite/libsframe.decode/be-flipping.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/be_flipping-be-flipping.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Tpo -c -o testsuite/libsframe.decode/be_flipping-be-flipping.obj `if test -f 'testsuite/libsframe.decode/be-flipping.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/be-flipping.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/be-flipping.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Tpo testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/be-flipping.c' object='testsuite/libsframe.decode/be_flipping-be-flipping.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_be_flipping-be-flipping.obj `if test -f 'testsuite/libsframe.decode/be-flipping.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/be-flipping.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/be-flipping.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_be_flipping_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/be_flipping-be-flipping.obj `if test -f 'testsuite/libsframe.decode/be-flipping.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/be-flipping.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/be-flipping.c'; fi`
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.o: testsuite/libsframe.decode/frecnt-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.o `test -f 'testsuite/libsframe.decode/frecnt-1.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-1.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/frecnt_1-frecnt-1.o: testsuite/libsframe.decode/frecnt-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/frecnt_1-frecnt-1.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Tpo -c -o testsuite/libsframe.decode/frecnt_1-frecnt-1.o `test -f 'testsuite/libsframe.decode/frecnt-1.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-1.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Tpo testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-1.c' object='testsuite/libsframe.decode/frecnt_1-frecnt-1.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.o `test -f 'testsuite/libsframe.decode/frecnt-1.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-1.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/frecnt_1-frecnt-1.o `test -f 'testsuite/libsframe.decode/frecnt-1.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-1.c
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.obj: testsuite/libsframe.decode/frecnt-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.obj `if test -f 'testsuite/libsframe.decode/frecnt-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-1.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_1-frecnt-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-1.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/frecnt_1-frecnt-1.obj: testsuite/libsframe.decode/frecnt-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/frecnt_1-frecnt-1.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Tpo -c -o testsuite/libsframe.decode/frecnt_1-frecnt-1.obj `if test -f 'testsuite/libsframe.decode/frecnt-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-1.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Tpo testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-1.c' object='testsuite/libsframe.decode/frecnt_1-frecnt-1.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_1-frecnt-1.obj `if test -f 'testsuite/libsframe.decode/frecnt-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-1.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/frecnt_1-frecnt-1.obj `if test -f 'testsuite/libsframe.decode/frecnt-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-1.c'; fi`
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.o: testsuite/libsframe.decode/frecnt-2.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.o `test -f 'testsuite/libsframe.decode/frecnt-2.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-2.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-2.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/frecnt_2-frecnt-2.o: testsuite/libsframe.decode/frecnt-2.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/frecnt_2-frecnt-2.o -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Tpo -c -o testsuite/libsframe.decode/frecnt_2-frecnt-2.o `test -f 'testsuite/libsframe.decode/frecnt-2.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-2.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Tpo testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-2.c' object='testsuite/libsframe.decode/frecnt_2-frecnt-2.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.o `test -f 'testsuite/libsframe.decode/frecnt-2.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-2.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/frecnt_2-frecnt-2.o `test -f 'testsuite/libsframe.decode/frecnt-2.c' || echo '$(srcdir)/'`testsuite/libsframe.decode/frecnt-2.c
+
+-testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.obj: testsuite/libsframe.decode/frecnt-2.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Tpo -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.obj `if test -f 'testsuite/libsframe.decode/frecnt-2.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-2.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-2.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Tpo testsuite/libsframe.decode/$(DEPDIR)/testsuite_libsframe_decode_frecnt_2-frecnt-2.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-2.c' object='testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.decode/frecnt_2-frecnt-2.obj: testsuite/libsframe.decode/frecnt-2.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.decode/frecnt_2-frecnt-2.obj -MD -MP -MF testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Tpo -c -o testsuite/libsframe.decode/frecnt_2-frecnt-2.obj `if test -f 'testsuite/libsframe.decode/frecnt-2.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-2.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-2.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Tpo testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.decode/frecnt-2.c' object='testsuite/libsframe.decode/frecnt_2-frecnt-2.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/testsuite_libsframe_decode_frecnt_2-frecnt-2.obj `if test -f 'testsuite/libsframe.decode/frecnt-2.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-2.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-2.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_decode_frecnt_2_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.decode/frecnt_2-frecnt-2.obj `if test -f 'testsuite/libsframe.decode/frecnt-2.c'; then $(CYGPATH_W) 'testsuite/libsframe.decode/frecnt-2.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.decode/frecnt-2.c'; fi`
+
+-testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.o: testsuite/libsframe.encode/encode-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.o -MD -MP -MF testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Tpo -c -o testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.o `test -f 'testsuite/libsframe.encode/encode-1.c' || echo '$(srcdir)/'`testsuite/libsframe.encode/encode-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Tpo testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.encode/encode-1.c' object='testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.encode/encode_1-encode-1.o: testsuite/libsframe.encode/encode-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.encode/encode_1-encode-1.o -MD -MP -MF testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Tpo -c -o testsuite/libsframe.encode/encode_1-encode-1.o `test -f 'testsuite/libsframe.encode/encode-1.c' || echo '$(srcdir)/'`testsuite/libsframe.encode/encode-1.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Tpo testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.encode/encode-1.c' object='testsuite/libsframe.encode/encode_1-encode-1.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.o `test -f 'testsuite/libsframe.encode/encode-1.c' || echo '$(srcdir)/'`testsuite/libsframe.encode/encode-1.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.encode/encode_1-encode-1.o `test -f 'testsuite/libsframe.encode/encode-1.c' || echo '$(srcdir)/'`testsuite/libsframe.encode/encode-1.c
+
+-testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.obj: testsuite/libsframe.encode/encode-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.obj -MD -MP -MF testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Tpo -c -o testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.obj `if test -f 'testsuite/libsframe.encode/encode-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.encode/encode-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.encode/encode-1.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Tpo testsuite/libsframe.encode/$(DEPDIR)/testsuite_libsframe_encode_encode_1-encode-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.encode/encode-1.c' object='testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.encode/encode_1-encode-1.obj: testsuite/libsframe.encode/encode-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.encode/encode_1-encode-1.obj -MD -MP -MF testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Tpo -c -o testsuite/libsframe.encode/encode_1-encode-1.obj `if test -f 'testsuite/libsframe.encode/encode-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.encode/encode-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.encode/encode-1.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Tpo testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.encode/encode-1.c' object='testsuite/libsframe.encode/encode_1-encode-1.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.encode/testsuite_libsframe_encode_encode_1-encode-1.obj `if test -f 'testsuite/libsframe.encode/encode-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.encode/encode-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.encode/encode-1.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_encode_encode_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.encode/encode_1-encode-1.obj `if test -f 'testsuite/libsframe.encode/encode-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.encode/encode-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.encode/encode-1.c'; fi`
+
+-testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.o: testsuite/libsframe.find/findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.o `test -f 'testsuite/libsframe.find/findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfre-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/findfre_1-findfre-1.o: testsuite/libsframe.find/findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/findfre_1-findfre-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Tpo -c -o testsuite/libsframe.find/findfre_1-findfre-1.o `test -f 'testsuite/libsframe.find/findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfre-1.c' object='testsuite/libsframe.find/findfre_1-findfre-1.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.o `test -f 'testsuite/libsframe.find/findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfre-1.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/findfre_1-findfre-1.o `test -f 'testsuite/libsframe.find/findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfre-1.c
+
+-testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.obj: testsuite/libsframe.find/findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.obj `if test -f 'testsuite/libsframe.find/findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfre-1.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfre_1-findfre-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfre-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/findfre_1-findfre-1.obj: testsuite/libsframe.find/findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/findfre_1-findfre-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Tpo -c -o testsuite/libsframe.find/findfre_1-findfre-1.obj `if test -f 'testsuite/libsframe.find/findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfre-1.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfre-1.c' object='testsuite/libsframe.find/findfre_1-findfre-1.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfre_1-findfre-1.obj `if test -f 'testsuite/libsframe.find/findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfre-1.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/findfre_1-findfre-1.obj `if test -f 'testsuite/libsframe.find/findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfre-1.c'; fi`
+
+-testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.o: testsuite/libsframe.find/findfunc-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.o `test -f 'testsuite/libsframe.find/findfunc-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfunc-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfunc-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/findfunc_1-findfunc-1.o: testsuite/libsframe.find/findfunc-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/findfunc_1-findfunc-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Tpo -c -o testsuite/libsframe.find/findfunc_1-findfunc-1.o `test -f 'testsuite/libsframe.find/findfunc-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfunc-1.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Tpo testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfunc-1.c' object='testsuite/libsframe.find/findfunc_1-findfunc-1.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.o `test -f 'testsuite/libsframe.find/findfunc-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfunc-1.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/findfunc_1-findfunc-1.o `test -f 'testsuite/libsframe.find/findfunc-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/findfunc-1.c
+
+-testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.obj: testsuite/libsframe.find/findfunc-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.obj `if test -f 'testsuite/libsframe.find/findfunc-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfunc-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfunc-1.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_findfunc_1-findfunc-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfunc-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/findfunc_1-findfunc-1.obj: testsuite/libsframe.find/findfunc-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/findfunc_1-findfunc-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Tpo -c -o testsuite/libsframe.find/findfunc_1-findfunc-1.obj `if test -f 'testsuite/libsframe.find/findfunc-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfunc-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfunc-1.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Tpo testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/findfunc-1.c' object='testsuite/libsframe.find/findfunc_1-findfunc-1.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_findfunc_1-findfunc-1.obj `if test -f 'testsuite/libsframe.find/findfunc-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfunc-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfunc-1.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_findfunc_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/findfunc_1-findfunc-1.obj `if test -f 'testsuite/libsframe.find/findfunc-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/findfunc-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/findfunc-1.c'; fi`
+
+-testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.o: testsuite/libsframe.find/plt-findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.o `test -f 'testsuite/libsframe.find/plt-findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/plt-findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/plt-findfre-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.o' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.o: testsuite/libsframe.find/plt-findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.o -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Tpo -c -o testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.o `test -f 'testsuite/libsframe.find/plt-findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/plt-findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/plt-findfre-1.c' object='testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.o' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.o `test -f 'testsuite/libsframe.find/plt-findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/plt-findfre-1.c
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.o `test -f 'testsuite/libsframe.find/plt-findfre-1.c' || echo '$(srcdir)/'`testsuite/libsframe.find/plt-findfre-1.c
+
+-testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.obj: testsuite/libsframe.find/plt-findfre-1.c
+-@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Tpo -c -o testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.obj `if test -f 'testsuite/libsframe.find/plt-findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/plt-findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/plt-findfre-1.c'; fi`
+-@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.Po
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/plt-findfre-1.c' object='testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.obj' libtool=no @AMDEPBACKSLASH@
++testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.obj: testsuite/libsframe.find/plt-findfre-1.c
++@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.obj -MD -MP -MF testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Tpo -c -o testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.obj `if test -f 'testsuite/libsframe.find/plt-findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/plt-findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/plt-findfre-1.c'; fi`
++@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Tpo testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po
++@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='testsuite/libsframe.find/plt-findfre-1.c' object='testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.obj' libtool=no @AMDEPBACKSLASH@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/testsuite_libsframe_find_plt_findfre_1-plt-findfre-1.obj `if test -f 'testsuite/libsframe.find/plt-findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/plt-findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/plt-findfre-1.c'; fi`
++@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(testsuite_libsframe_find_plt_findfre_1_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o testsuite/libsframe.find/plt_findfre_1-plt-findfre-1.obj `if test -f 'testsuite/libsframe.find/plt-findfre-1.c'; then $(CYGPATH_W) 'testsuite/libsframe.find/plt-findfre-1.c'; else $(CYGPATH_W) '$(srcdir)/testsuite/libsframe.find/plt-findfre-1.c'; fi`
+
+ mostlyclean-libtool:
+ -rm -f *.lo
+@@ -1142,7 +1168,7 @@ site.exp: Makefile $(EXTRA_DEJAGNU_SITE_CONFIG)
+ @echo '# Do not edit here. If you wish to override these values' >>site.tmp
+ @echo '# edit the last section' >>site.tmp
+ @echo 'set srcdir "$(srcdir)"' >>site.tmp
+- @echo "set objdir `pwd`" >>site.tmp
++ @echo "set objdir \"`pwd`\"" >>site.tmp
+ @echo 'set build_alias "$(build_alias)"' >>site.tmp
+ @echo 'set build_triplet $(build_triplet)' >>site.tmp
+ @echo 'set host_alias "$(host_alias)"' >>site.tmp
+@@ -1166,8 +1192,10 @@ distclean-DEJAGNU:
+ -l='$(DEJATOOL)'; for tool in $$l; do \
+ rm -f $$tool.sum $$tool.log; \
+ done
++distdir: $(BUILT_SOURCES)
++ $(MAKE) $(AM_MAKEFLAGS) distdir-am
+
+-distdir: $(DISTFILES)
++distdir-am: $(DISTFILES)
+ $(am__remove_distdir)
+ test -d "$(distdir)" || mkdir "$(distdir)"
+ @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+@@ -1225,6 +1253,10 @@ dist-xz: distdir
+ tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz
+ $(am__post_remove_distdir)
+
++dist-zstd: distdir
++ tardir=$(distdir) && $(am__tar) | zstd -c $${ZSTD_CLEVEL-$${ZSTD_OPT--19}} >$(distdir).tar.zst
++ $(am__post_remove_distdir)
++
+ dist-tarZ: distdir
+ @echo WARNING: "Support for distribution archives compressed with" \
+ "legacy program 'compress' is deprecated." >&2
+@@ -1267,6 +1299,8 @@ distcheck: dist
+ eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\
+ *.zip*) \
+ unzip $(distdir).zip ;;\
++ *.tar.zst*) \
++ zstd -dc $(distdir).tar.zst | $(am__untar) ;;\
+ esac
+ chmod -R a-w $(distdir)
+ chmod u+w $(distdir)
+@@ -1282,7 +1316,7 @@ distcheck: dist
+ $(DISTCHECK_CONFIGURE_FLAGS) \
+ --srcdir=../.. --prefix="$$dc_install_base" \
+ && $(MAKE) $(AM_MAKEFLAGS) \
+- && $(MAKE) $(AM_MAKEFLAGS) dvi \
++ && $(MAKE) $(AM_MAKEFLAGS) $(AM_DISTCHECK_DVI_TARGET) \
+ && $(MAKE) $(AM_MAKEFLAGS) check \
+ && $(MAKE) $(AM_MAKEFLAGS) install \
+ && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+@@ -1338,6 +1372,8 @@ check-am: all-am
+ $(MAKE) $(AM_MAKEFLAGS) check-DEJAGNU
+ check: check-am
+ all-am: Makefile $(INFO_DEPS) $(LTLIBRARIES) $(HEADERS) config.h
++install-checkPROGRAMS: install-libLTLIBRARIES
++
+ installdirs:
+ for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(infodir)" "$(DESTDIR)$(includedir)"; do \
+ test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+@@ -1390,7 +1426,16 @@ clean-am: clean-aminfo clean-checkPROGRAMS clean-generic \
+
+ distclean: distclean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+- -rm -rf ./$(DEPDIR) testsuite/libsframe.decode/$(DEPDIR) testsuite/libsframe.encode/$(DEPDIR) testsuite/libsframe.find/$(DEPDIR)
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe-dump.Plo
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe-error.Plo
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe.Plo
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po
++ -rm -f testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po
+ -rm -f Makefile
+ distclean-am: clean-am distclean-DEJAGNU distclean-compile \
+ distclean-generic distclean-hdr distclean-libtool \
+@@ -1530,7 +1575,16 @@ installcheck-am:
+ maintainer-clean: maintainer-clean-am
+ -rm -f $(am__CONFIG_DISTCLEAN_FILES)
+ -rm -rf $(top_srcdir)/autom4te.cache
+- -rm -rf ./$(DEPDIR) testsuite/libsframe.decode/$(DEPDIR) testsuite/libsframe.encode/$(DEPDIR) testsuite/libsframe.find/$(DEPDIR)
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe-dump.Plo
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe-error.Plo
++ -rm -f ./$(DEPDIR)/libsframe_la-sframe.Plo
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/be_flipping-be-flipping.Po
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/frecnt_1-frecnt-1.Po
++ -rm -f testsuite/libsframe.decode/$(DEPDIR)/frecnt_2-frecnt-2.Po
++ -rm -f testsuite/libsframe.encode/$(DEPDIR)/encode_1-encode-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/findfre_1-findfre-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/findfunc_1-findfunc-1.Po
++ -rm -f testsuite/libsframe.find/$(DEPDIR)/plt_findfre_1-plt-findfre-1.Po
+ -rm -f Makefile
+ maintainer-clean-am: distclean-am maintainer-clean-aminfo \
+ maintainer-clean-generic
+@@ -1554,28 +1608,28 @@ uninstall-am: uninstall-dvi-am uninstall-html-am \
+
+ .MAKE: all check-am install-am install-strip
+
+-.PHONY: CTAGS GTAGS TAGS all all-am am--refresh check check-DEJAGNU \
+- check-am clean clean-aminfo clean-checkPROGRAMS clean-cscope \
+- clean-generic clean-libLTLIBRARIES clean-libtool \
++.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles am--refresh check \
++ check-DEJAGNU check-am clean clean-aminfo clean-checkPROGRAMS \
++ clean-cscope clean-generic clean-libLTLIBRARIES clean-libtool \
+ clean-noinstLTLIBRARIES cscope cscopelist-am ctags ctags-am \
+ dist dist-all dist-bzip2 dist-gzip dist-info dist-lzip \
+- dist-shar dist-tarZ dist-xz dist-zip distcheck distclean \
+- distclean-DEJAGNU distclean-compile distclean-generic \
+- distclean-hdr distclean-libtool distclean-tags distcleancheck \
+- distdir distuninstallcheck dvi dvi-am html html-am html-local \
+- info info-am install install-am install-data install-data-am \
+- install-dvi install-dvi-am install-exec install-exec-am \
+- install-html install-html-am install-includeHEADERS \
+- install-info install-info-am install-libLTLIBRARIES \
+- install-man install-pdf install-pdf-am install-ps \
+- install-ps-am install-strip installcheck installcheck-am \
+- installdirs maintainer-clean maintainer-clean-aminfo \
+- maintainer-clean-generic mostlyclean mostlyclean-aminfo \
+- mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+- pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \
+- uninstall-dvi-am uninstall-html-am uninstall-includeHEADERS \
+- uninstall-info-am uninstall-libLTLIBRARIES uninstall-pdf-am \
+- uninstall-ps-am
++ dist-shar dist-tarZ dist-xz dist-zip dist-zstd distcheck \
++ distclean distclean-DEJAGNU distclean-compile \
++ distclean-generic distclean-hdr distclean-libtool \
++ distclean-tags distcleancheck distdir distuninstallcheck dvi \
++ dvi-am html html-am html-local info info-am install install-am \
++ install-data install-data-am install-dvi install-dvi-am \
++ install-exec install-exec-am install-html install-html-am \
++ install-includeHEADERS install-info install-info-am \
++ install-libLTLIBRARIES install-man install-pdf install-pdf-am \
++ install-ps install-ps-am install-strip installcheck \
++ installcheck-am installdirs maintainer-clean \
++ maintainer-clean-aminfo maintainer-clean-generic mostlyclean \
++ mostlyclean-aminfo mostlyclean-compile mostlyclean-generic \
++ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
++ uninstall-am uninstall-dvi-am uninstall-html-am \
++ uninstall-includeHEADERS uninstall-info-am \
++ uninstall-libLTLIBRARIES uninstall-pdf-am uninstall-ps-am
+
+ .PRECIOUS: Makefile
+
+diff --git a/libsframe/aclocal.m4 b/libsframe/aclocal.m4
+index b0cdd6b184d..c83d4f62ece 100644
+--- a/libsframe/aclocal.m4
++++ b/libsframe/aclocal.m4
+@@ -1,6 +1,6 @@
+-# generated automatically by aclocal 1.15.1 -*- Autoconf -*-
++# generated automatically by aclocal 1.16.5 -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -20,7 +20,7 @@ You have another version of autoconf. It may work, but is not guaranteed to.
+ If you have problems, you may need to regenerate the build system entirely.
+ To do so, use the procedure documented by the package, typically 'autoreconf'.])])
+
+-# Copyright (C) 2002-2017 Free Software Foundation, Inc.
++# Copyright (C) 2002-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -32,10 +32,10 @@ To do so, use the procedure documented by the package, typically 'autoreconf'.])
+ # generated from the m4 files accompanying Automake X.Y.
+ # (This private macro should not be called outside this file.)
+ AC_DEFUN([AM_AUTOMAKE_VERSION],
+-[am__api_version='1.15'
++[am__api_version='1.16'
+ dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+ dnl require some minimum version. Point them to the right macro.
+-m4_if([$1], [1.15.1], [],
++m4_if([$1], [1.16.5], [],
+ [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+ ])
+
+@@ -51,12 +51,12 @@ m4_define([_AM_AUTOCONF_VERSION], [])
+ # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+ # This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+ AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+-[AM_AUTOMAKE_VERSION([1.15.1])dnl
++[AM_AUTOMAKE_VERSION([1.16.5])dnl
+ m4_ifndef([AC_AUTOCONF_VERSION],
+ [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+ _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
+
+-# Copyright (C) 2011-2017 Free Software Foundation, Inc.
++# Copyright (C) 2011-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -118,7 +118,7 @@ AC_SUBST([AR])dnl
+
+ # AM_AUX_DIR_EXPAND -*- Autoconf -*-
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -170,7 +170,7 @@ am_aux_dir=`cd "$ac_aux_dir" && pwd`
+
+ # AM_CONDITIONAL -*- Autoconf -*-
+
+-# Copyright (C) 1997-2017 Free Software Foundation, Inc.
++# Copyright (C) 1997-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -201,7 +201,7 @@ AC_CONFIG_COMMANDS_PRE(
+ Usually this means the macro was only invoked conditionally.]])
+ fi])])
+
+-# Copyright (C) 1999-2017 Free Software Foundation, Inc.
++# Copyright (C) 1999-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -392,13 +392,12 @@ _AM_SUBST_NOTMAKE([am__nodep])dnl
+
+ # Generate code to set up dependency tracking. -*- Autoconf -*-
+
+-# Copyright (C) 1999-2017 Free Software Foundation, Inc.
++# Copyright (C) 1999-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+ # with or without modifications, as long as this notice is preserved.
+
+-
+ # _AM_OUTPUT_DEPENDENCY_COMMANDS
+ # ------------------------------
+ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+@@ -406,49 +405,43 @@ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+ # Older Autoconf quotes --file arguments for eval, but not when files
+ # are listed without --file. Let's play safe and only enable the eval
+ # if we detect the quoting.
+- case $CONFIG_FILES in
+- *\'*) eval set x "$CONFIG_FILES" ;;
+- *) set x $CONFIG_FILES ;;
+- esac
++ # TODO: see whether this extra hack can be removed once we start
++ # requiring Autoconf 2.70 or later.
++ AS_CASE([$CONFIG_FILES],
++ [*\'*], [eval set x "$CONFIG_FILES"],
++ [*], [set x $CONFIG_FILES])
+ shift
+- for mf
++ # Used to flag and report bootstrapping failures.
++ am_rc=0
++ for am_mf
+ do
+ # Strip MF so we end up with the name of the file.
+- mf=`echo "$mf" | sed -e 's/:.*$//'`
+- # Check whether this is an Automake generated Makefile or not.
+- # We used to match only the files named 'Makefile.in', but
+- # some people rename them; so instead we look at the file content.
+- # Grep'ing the first line is not enough: some people post-process
+- # each Makefile.in and add a new line on top of each file to say so.
+- # Grep'ing the whole file is not good either: AIX grep has a line
++ am_mf=`AS_ECHO(["$am_mf"]) | sed -e 's/:.*$//'`
++ # Check whether this is an Automake generated Makefile which includes
++ # dependency-tracking related rules and includes.
++ # Grep'ing the whole file directly is not great: AIX grep has a line
+ # limit of 2048, but all sed's we know have understand at least 4000.
+- if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+- dirpart=`AS_DIRNAME("$mf")`
+- else
+- continue
+- fi
+- # Extract the definition of DEPDIR, am__include, and am__quote
+- # from the Makefile without running 'make'.
+- DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+- test -z "$DEPDIR" && continue
+- am__include=`sed -n 's/^am__include = //p' < "$mf"`
+- test -z "$am__include" && continue
+- am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+- # Find all dependency output files, they are included files with
+- # $(DEPDIR) in their names. We invoke sed twice because it is the
+- # simplest approach to changing $(DEPDIR) to its actual value in the
+- # expansion.
+- for file in `sed -n "
+- s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+- sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
+- # Make sure the directory exists.
+- test -f "$dirpart/$file" && continue
+- fdir=`AS_DIRNAME(["$file"])`
+- AS_MKDIR_P([$dirpart/$fdir])
+- # echo "creating $dirpart/$file"
+- echo '# dummy' > "$dirpart/$file"
+- done
++ sed -n 's,^am--depfiles:.*,X,p' "$am_mf" | grep X >/dev/null 2>&1 \
++ || continue
++ am_dirpart=`AS_DIRNAME(["$am_mf"])`
++ am_filepart=`AS_BASENAME(["$am_mf"])`
++ AM_RUN_LOG([cd "$am_dirpart" \
++ && sed -e '/# am--include-marker/d' "$am_filepart" \
++ | $MAKE -f - am--depfiles]) || am_rc=$?
+ done
++ if test $am_rc -ne 0; then
++ AC_MSG_FAILURE([Something went wrong bootstrapping makefile fragments
++ for automatic dependency tracking. If GNU make was not used, consider
++ re-running the configure script with MAKE="gmake" (or whatever is
++ necessary). You can also try re-running configure with the
++ '--disable-dependency-tracking' option to at least be able to build
++ the package (albeit without support for automatic dependency tracking).])
++ fi
++ AS_UNSET([am_dirpart])
++ AS_UNSET([am_filepart])
++ AS_UNSET([am_mf])
++ AS_UNSET([am_rc])
++ rm -f conftest-deps.mk
+ }
+ ])# _AM_OUTPUT_DEPENDENCY_COMMANDS
+
+@@ -457,18 +450,17 @@ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+ # -----------------------------
+ # This macro should only be invoked once -- use via AC_REQUIRE.
+ #
+-# This code is only required when automatic dependency tracking
+-# is enabled. FIXME. This creates each '.P' file that we will
+-# need in order to bootstrap the dependency handling code.
++# This code is only required when automatic dependency tracking is enabled.
++# This creates each '.Po' and '.Plo' makefile fragment that we'll need in
++# order to bootstrap the dependency handling code.
+ AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
+ [AC_CONFIG_COMMANDS([depfiles],
+ [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
+- [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
+-])
++ [AMDEP_TRUE="$AMDEP_TRUE" MAKE="${MAKE-make}"])])
+
+ # Do all the work for Automake. -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -496,6 +488,10 @@ m4_defn([AC_PROG_CC])
+ # release and drop the old call support.
+ AC_DEFUN([AM_INIT_AUTOMAKE],
+ [AC_PREREQ([2.65])dnl
++m4_ifdef([_$0_ALREADY_INIT],
++ [m4_fatal([$0 expanded multiple times
++]m4_defn([_$0_ALREADY_INIT]))],
++ [m4_define([_$0_ALREADY_INIT], m4_expansion_stack)])dnl
+ dnl Autoconf wants to disallow AM_ names. We explicitly allow
+ dnl the ones we care about.
+ m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+@@ -532,7 +528,7 @@ m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ [_AM_SET_OPTIONS([$1])dnl
+ dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+ m4_if(
+- m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]),
++ m4_ifset([AC_PACKAGE_NAME], [ok]):m4_ifset([AC_PACKAGE_VERSION], [ok]),
+ [ok:ok],,
+ [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+@@ -555,8 +551,8 @@ AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
+ AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+ # For better backward compatibility. To be removed once Automake 1.9.x
+ # dies out for good. For more background, see:
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+ AC_SUBST([mkdir_p], ['$(MKDIR_P)'])
+ # We need awk for the "check" target (and possibly the TAP driver). The
+ # system "awk" is bad on some platforms.
+@@ -584,6 +580,20 @@ AC_PROVIDE_IFELSE([AC_PROG_OBJCXX],
+ [m4_define([AC_PROG_OBJCXX],
+ m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl
+ ])
++# Variables for tags utilities; see am/tags.am
++if test -z "$CTAGS"; then
++ CTAGS=ctags
++fi
++AC_SUBST([CTAGS])
++if test -z "$ETAGS"; then
++ ETAGS=etags
++fi
++AC_SUBST([ETAGS])
++if test -z "$CSCOPE"; then
++ CSCOPE=cscope
++fi
++AC_SUBST([CSCOPE])
++
+ AC_REQUIRE([AM_SILENT_RULES])dnl
+ dnl The testsuite driver may need to know about EXEEXT, so add the
+ dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This
+@@ -623,7 +633,7 @@ END
+ Aborting the configuration process, to ensure you take notice of the issue.
+
+ You can download and install GNU coreutils to get an 'rm' implementation
+-that behaves properly: <http://www.gnu.org/software/coreutils/>.
++that behaves properly: <https://www.gnu.org/software/coreutils/>.
+
+ If you want to complete the configuration process using your problematic
+ 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+@@ -665,7 +675,7 @@ for _am_header in $config_headers :; do
+ done
+ echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -689,7 +699,7 @@ AC_SUBST([install_sh])])
+ # Add --enable-maintainer-mode option to configure. -*- Autoconf -*-
+ # From Jim Meyering
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -724,7 +734,7 @@ AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles])
+
+ # Check to see how 'make' treats includes. -*- Autoconf -*-
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -732,49 +742,42 @@ AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles])
+
+ # AM_MAKE_INCLUDE()
+ # -----------------
+-# Check to see how make treats includes.
++# Check whether make has an 'include' directive that can support all
++# the idioms we need for our automatic dependency tracking code.
+ AC_DEFUN([AM_MAKE_INCLUDE],
+-[am_make=${MAKE-make}
+-cat > confinc << 'END'
++[AC_MSG_CHECKING([whether ${MAKE-make} supports the include directive])
++cat > confinc.mk << 'END'
+ am__doit:
+- @echo this is the am__doit target
++ @echo this is the am__doit target >confinc.out
+ .PHONY: am__doit
+ END
+-# If we don't find an include directive, just comment out the code.
+-AC_MSG_CHECKING([for style of include used by $am_make])
+ am__include="#"
+ am__quote=
+-_am_result=none
+-# First try GNU make style include.
+-echo "include confinc" > confmf
+-# Ignore all kinds of additional output from 'make'.
+-case `$am_make -s -f confmf 2> /dev/null` in #(
+-*the\ am__doit\ target*)
+- am__include=include
+- am__quote=
+- _am_result=GNU
+- ;;
+-esac
+-# Now try BSD make style include.
+-if test "$am__include" = "#"; then
+- echo '.include "confinc"' > confmf
+- case `$am_make -s -f confmf 2> /dev/null` in #(
+- *the\ am__doit\ target*)
+- am__include=.include
+- am__quote="\""
+- _am_result=BSD
+- ;;
+- esac
+-fi
+-AC_SUBST([am__include])
+-AC_SUBST([am__quote])
+-AC_MSG_RESULT([$_am_result])
+-rm -f confinc confmf
+-])
++# BSD make does it like this.
++echo '.include "confinc.mk" # ignored' > confmf.BSD
++# Other make implementations (GNU, Solaris 10, AIX) do it like this.
++echo 'include confinc.mk # ignored' > confmf.GNU
++_am_result=no
++for s in GNU BSD; do
++ AM_RUN_LOG([${MAKE-make} -f confmf.$s && cat confinc.out])
++ AS_CASE([$?:`cat confinc.out 2>/dev/null`],
++ ['0:this is the am__doit target'],
++ [AS_CASE([$s],
++ [BSD], [am__include='.include' am__quote='"'],
++ [am__include='include' am__quote=''])])
++ if test "$am__include" != "#"; then
++ _am_result="yes ($s style)"
++ break
++ fi
++done
++rm -f confinc.* confmf.*
++AC_MSG_RESULT([${_am_result}])
++AC_SUBST([am__include])])
++AC_SUBST([am__quote])])
+
+ # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*-
+
+-# Copyright (C) 1997-2017 Free Software Foundation, Inc.
++# Copyright (C) 1997-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -795,12 +798,7 @@ AC_DEFUN([AM_MISSING_HAS_RUN],
+ [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+ AC_REQUIRE_AUX_FILE([missing])dnl
+ if test x"${MISSING+set}" != xset; then
+- case $am_aux_dir in
+- *\ * | *\ *)
+- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+- *)
+- MISSING="\${SHELL} $am_aux_dir/missing" ;;
+- esac
++ MISSING="\${SHELL} '$am_aux_dir/missing'"
+ fi
+ # Use eval to expand $SHELL
+ if eval "$MISSING --is-lightweight"; then
+@@ -813,7 +811,7 @@ fi
+
+ # Helper functions for option handling. -*- Autoconf -*-
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -842,7 +840,7 @@ AC_DEFUN([_AM_SET_OPTIONS],
+ AC_DEFUN([_AM_IF_OPTION],
+ [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
+
+-# Copyright (C) 1999-2017 Free Software Foundation, Inc.
++# Copyright (C) 1999-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -889,7 +887,7 @@ AC_LANG_POP([C])])
+ # For backward compatibility.
+ AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -908,7 +906,7 @@ AC_DEFUN([AM_RUN_LOG],
+
+ # Check to make sure that the build environment is sane. -*- Autoconf -*-
+
+-# Copyright (C) 1996-2017 Free Software Foundation, Inc.
++# Copyright (C) 1996-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -989,7 +987,7 @@ AC_CONFIG_COMMANDS_PRE(
+ rm -f conftest.file
+ ])
+
+-# Copyright (C) 2009-2017 Free Software Foundation, Inc.
++# Copyright (C) 2009-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -1049,7 +1047,7 @@ AC_SUBST([AM_BACKSLASH])dnl
+ _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl
+ ])
+
+-# Copyright (C) 2001-2017 Free Software Foundation, Inc.
++# Copyright (C) 2001-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -1077,7 +1075,7 @@ fi
+ INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+ AC_SUBST([INSTALL_STRIP_PROGRAM])])
+
+-# Copyright (C) 2006-2017 Free Software Foundation, Inc.
++# Copyright (C) 2006-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+@@ -1096,7 +1094,7 @@ AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
+
+ # Check how to create a tarball. -*- Autoconf -*-
+
+-# Copyright (C) 2004-2017 Free Software Foundation, Inc.
++# Copyright (C) 2004-2021 Free Software Foundation, Inc.
+ #
+ # This file is free software; the Free Software Foundation
+ # gives unlimited permission to copy and/or distribute it,
+diff --git a/libsframe/configure b/libsframe/configure
+index 1d9e5e10695..a234c4815b1 100755
+--- a/libsframe/configure
++++ b/libsframe/configure
+@@ -658,6 +658,8 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -682,6 +684,9 @@ AM_BACKSLASH
+ AM_DEFAULT_VERBOSITY
+ AM_DEFAULT_V
+ AM_V
++CSCOPE
++ETAGS
++CTAGS
+ am__fastdepCC_FALSE
+ am__fastdepCC_TRUE
+ CCDEPMODE
+@@ -689,7 +694,6 @@ am__nodep
+ AMDEPBACKSLASH
+ AMDEP_FALSE
+ AMDEP_TRUE
+-am__quote
+ am__include
+ DEPDIR
+ am__untar
+@@ -762,7 +766,8 @@ PACKAGE_VERSION
+ PACKAGE_TARNAME
+ PACKAGE_NAME
+ PATH_SEPARATOR
+-SHELL'
++SHELL
++am__quote'
+ ac_subst_files=''
+ ac_user_opts='
+ enable_option_checking
+@@ -773,6 +778,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_largefile
+ enable_maintainer_mode
+@@ -1428,6 +1434,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+
+ Some influential environment variables:
+ CC C compiler command
+@@ -3610,7 +3618,7 @@ $as_echo "$ac_cv_safe_to_define___extensions__" >&6; }
+ $as_echo "#define _TANDEM_SOURCE 1" >>confdefs.h
+
+
+-am__api_version='1.15'
++am__api_version='1.16'
+
+ # Find a good install program. We prefer a C program (faster),
+ # so one script is as good as another. But avoid the broken or
+@@ -3783,12 +3791,7 @@ ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
+ program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
+
+ if test x"${MISSING+set}" != xset; then
+- case $am_aux_dir in
+- *\ * | *\ *)
+- MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
+- *)
+- MISSING="\${SHELL} $am_aux_dir/missing" ;;
+- esac
++ MISSING="\${SHELL} '$am_aux_dir/missing'"
+ fi
+ # Use eval to expand $SHELL
+ if eval "$MISSING --is-lightweight"; then
+@@ -4036,45 +4039,45 @@ DEPDIR="${am__leading_dot}deps"
+
+ ac_config_commands="$ac_config_commands depfiles"
+
+-
+-am_make=${MAKE-make}
+-cat > confinc << 'END'
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} supports the include directive" >&5
++$as_echo_n "checking whether ${MAKE-make} supports the include directive... " >&6; }
++cat > confinc.mk << 'END'
+ am__doit:
+- @echo this is the am__doit target
++ @echo this is the am__doit target >confinc.out
+ .PHONY: am__doit
+ END
+-# If we don't find an include directive, just comment out the code.
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
+-$as_echo_n "checking for style of include used by $am_make... " >&6; }
+ am__include="#"
+ am__quote=
+-_am_result=none
+-# First try GNU make style include.
+-echo "include confinc" > confmf
+-# Ignore all kinds of additional output from 'make'.
+-case `$am_make -s -f confmf 2> /dev/null` in #(
+-*the\ am__doit\ target*)
+- am__include=include
+- am__quote=
+- _am_result=GNU
+- ;;
+-esac
+-# Now try BSD make style include.
+-if test "$am__include" = "#"; then
+- echo '.include "confinc"' > confmf
+- case `$am_make -s -f confmf 2> /dev/null` in #(
+- *the\ am__doit\ target*)
+- am__include=.include
+- am__quote="\""
+- _am_result=BSD
++# BSD make does it like this.
++echo '.include "confinc.mk" # ignored' > confmf.BSD
++# Other make implementations (GNU, Solaris 10, AIX) do it like this.
++echo 'include confinc.mk # ignored' > confmf.GNU
++_am_result=no
++for s in GNU BSD; do
++ { echo "$as_me:$LINENO: ${MAKE-make} -f confmf.$s && cat confinc.out" >&5
++ (${MAKE-make} -f confmf.$s && cat confinc.out) >&5 2>&5
++ ac_status=$?
++ echo "$as_me:$LINENO: \$? = $ac_status" >&5
++ (exit $ac_status); }
++ case $?:`cat confinc.out 2>/dev/null` in #(
++ '0:this is the am__doit target') :
++ case $s in #(
++ BSD) :
++ am__include='.include' am__quote='"' ;; #(
++ *) :
++ am__include='include' am__quote='' ;;
++esac ;; #(
++ *) :
+ ;;
+- esac
+-fi
+-
+-
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
+-$as_echo "$_am_result" >&6; }
+-rm -f confinc confmf
++esac
++ if test "$am__include" != "#"; then
++ _am_result="yes ($s style)"
++ break
++ fi
++done
++rm -f confinc.* confmf.*
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${_am_result}" >&5
++$as_echo "${_am_result}" >&6; }
+
+ # Check whether --enable-dependency-tracking was given.
+ if test "${enable_dependency_tracking+set}" = set; then :
+@@ -4186,8 +4189,8 @@ MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
+
+ # For better backward compatibility. To be removed once Automake 1.9.x
+ # dies out for good. For more background, see:
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
+-# <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html>
++# <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html>
+ mkdir_p='$(MKDIR_P)'
+
+ # We need awk for the "check" target (and possibly the TAP driver). The
+@@ -4334,6 +4337,20 @@ else
+ fi
+
+
++# Variables for tags utilities; see am/tags.am
++if test -z "$CTAGS"; then
++ CTAGS=ctags
++fi
++
++if test -z "$ETAGS"; then
++ ETAGS=etags
++fi
++
++if test -z "$CSCOPE"; then
++ CSCOPE=cscope
++fi
++
++
+
+ # POSIX will say in a future version that running "rm -f" with no argument
+ # is OK; and we want to be able to make that assumption in our Makefile
+@@ -4366,7 +4383,7 @@ END
+ Aborting the configuration process, to ensure you take notice of the issue.
+
+ You can download and install GNU coreutils to get an 'rm' implementation
+-that behaves properly: <http://www.gnu.org/software/coreutils/>.
++that behaves properly: <https://www.gnu.org/software/coreutils/>.
+
+ If you want to complete the configuration process using your problematic
+ 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
+@@ -5351,8 +5368,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -5463,7 +5480,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -6156,8 +6173,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -6206,6 +6223,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -6222,6 +6313,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -6390,7 +6486,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6549,6 +6646,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6564,6 +6676,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6578,8 +6841,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6595,7 +6860,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6615,11 +6880,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6635,7 +6904,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6654,6 +6923,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6665,29 +6938,81 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
++
++
++
++
++
++
+
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
+
+
+
+@@ -7034,8 +7359,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -7071,6 +7396,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -7112,6 +7438,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -7123,7 +7461,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -7149,8 +7487,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -7160,8 +7498,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -7198,6 +7536,16 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
+
+
+
+@@ -7214,6 +7562,45 @@ fi
+
+
+
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -7419,11 +7806,128 @@ sparc*-*solaris*)
+ ;;
+ esac
+ fi
+- rm -rf conftest*
+- ;;
++ rm -rf conftest*
++ ;;
++esac
++
++need_locks="$enable_libtool_lock"
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
+ esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
+
+-need_locks="$enable_libtool_lock"
+
+
+ case $host_os in
+@@ -7988,6 +8492,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8542,8 +9048,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8709,6 +9213,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8771,7 +9281,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8828,13 +9338,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8895,6 +9409,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -9245,7 +9764,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -9344,12 +9864,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -9363,8 +9883,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -9382,8 +9902,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9429,8 +9949,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9560,7 +10080,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9573,22 +10099,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9600,7 +10133,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9613,22 +10152,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9673,20 +10219,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9747,7 +10336,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9755,7 +10344,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9771,7 +10360,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9795,10 +10384,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9877,23 +10466,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9978,7 +10580,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9997,9 +10599,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10575,8 +11177,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10609,13 +11212,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11517,7 +12178,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11520 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11561,10 +12222,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11623,7 +12284,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11626 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11667,10 +12328,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -13490,7 +14151,7 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ #
+ # INIT-COMMANDS
+ #
+-AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
++AMDEP_TRUE="$AMDEP_TRUE" MAKE="${MAKE-make}"
+
+
+ # The HP-UX ksh and POSIX shell print the target directory to stdout
+@@ -13528,13 +14189,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -13549,14 +14217,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -13589,12 +14260,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -13649,8 +14320,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -13660,12 +14336,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -13681,7 +14359,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -13717,6 +14394,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -14363,29 +15041,35 @@ $as_echo "$as_me: executing $ac_file commands" >&6;}
+ # Older Autoconf quotes --file arguments for eval, but not when files
+ # are listed without --file. Let's play safe and only enable the eval
+ # if we detect the quoting.
+- case $CONFIG_FILES in
+- *\'*) eval set x "$CONFIG_FILES" ;;
+- *) set x $CONFIG_FILES ;;
+- esac
++ # TODO: see whether this extra hack can be removed once we start
++ # requiring Autoconf 2.70 or later.
++ case $CONFIG_FILES in #(
++ *\'*) :
++ eval set x "$CONFIG_FILES" ;; #(
++ *) :
++ set x $CONFIG_FILES ;; #(
++ *) :
++ ;;
++esac
+ shift
+- for mf
++ # Used to flag and report bootstrapping failures.
++ am_rc=0
++ for am_mf
+ do
+ # Strip MF so we end up with the name of the file.
+- mf=`echo "$mf" | sed -e 's/:.*$//'`
+- # Check whether this is an Automake generated Makefile or not.
+- # We used to match only the files named 'Makefile.in', but
+- # some people rename them; so instead we look at the file content.
+- # Grep'ing the first line is not enough: some people post-process
+- # each Makefile.in and add a new line on top of each file to say so.
+- # Grep'ing the whole file is not good either: AIX grep has a line
++ am_mf=`$as_echo "$am_mf" | sed -e 's/:.*$//'`
++ # Check whether this is an Automake generated Makefile which includes
++ # dependency-tracking related rules and includes.
++ # Grep'ing the whole file directly is not great: AIX grep has a line
+ # limit of 2048, but all sed's we know have understand at least 4000.
+- if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
+- dirpart=`$as_dirname -- "$mf" ||
+-$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+- X"$mf" : 'X\(//\)[^/]' \| \
+- X"$mf" : 'X\(//\)$' \| \
+- X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
+-$as_echo X"$mf" |
++ sed -n 's,^am--depfiles:.*,X,p' "$am_mf" | grep X >/dev/null 2>&1 \
++ || continue
++ am_dirpart=`$as_dirname -- "$am_mf" ||
++$as_expr X"$am_mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
++ X"$am_mf" : 'X\(//\)[^/]' \| \
++ X"$am_mf" : 'X\(//\)$' \| \
++ X"$am_mf" : 'X\(/\)' \| . 2>/dev/null ||
++$as_echo X"$am_mf" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+@@ -14403,53 +15087,50 @@ $as_echo X"$mf" |
+ q
+ }
+ s/.*/./; q'`
+- else
+- continue
+- fi
+- # Extract the definition of DEPDIR, am__include, and am__quote
+- # from the Makefile without running 'make'.
+- DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+- test -z "$DEPDIR" && continue
+- am__include=`sed -n 's/^am__include = //p' < "$mf"`
+- test -z "$am__include" && continue
+- am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+- # Find all dependency output files, they are included files with
+- # $(DEPDIR) in their names. We invoke sed twice because it is the
+- # simplest approach to changing $(DEPDIR) to its actual value in the
+- # expansion.
+- for file in `sed -n "
+- s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+- sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
+- # Make sure the directory exists.
+- test -f "$dirpart/$file" && continue
+- fdir=`$as_dirname -- "$file" ||
+-$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+- X"$file" : 'X\(//\)[^/]' \| \
+- X"$file" : 'X\(//\)$' \| \
+- X"$file" : 'X\(/\)' \| . 2>/dev/null ||
+-$as_echo X"$file" |
+- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+- s//\1/
+- q
+- }
+- /^X\(\/\/\)[^/].*/{
++ am_filepart=`$as_basename -- "$am_mf" ||
++$as_expr X/"$am_mf" : '.*/\([^/][^/]*\)/*$' \| \
++ X"$am_mf" : 'X\(//\)$' \| \
++ X"$am_mf" : 'X\(/\)' \| . 2>/dev/null ||
++$as_echo X/"$am_mf" |
++ sed '/^.*\/\([^/][^/]*\)\/*$/{
+ s//\1/
+ q
+ }
+- /^X\(\/\/\)$/{
++ /^X\/\(\/\/\)$/{
+ s//\1/
+ q
+ }
+- /^X\(\/\).*/{
++ /^X\/\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'`
+- as_dir=$dirpart/$fdir; as_fn_mkdir_p
+- # echo "creating $dirpart/$file"
+- echo '# dummy' > "$dirpart/$file"
+- done
++ { echo "$as_me:$LINENO: cd "$am_dirpart" \
++ && sed -e '/# am--include-marker/d' "$am_filepart" \
++ | $MAKE -f - am--depfiles" >&5
++ (cd "$am_dirpart" \
++ && sed -e '/# am--include-marker/d' "$am_filepart" \
++ | $MAKE -f - am--depfiles) >&5 2>&5
++ ac_status=$?
++ echo "$as_me:$LINENO: \$? = $ac_status" >&5
++ (exit $ac_status); } || am_rc=$?
+ done
++ if test $am_rc -ne 0; then
++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
++as_fn_error $? "Something went wrong bootstrapping makefile fragments
++ for automatic dependency tracking. If GNU make was not used, consider
++ re-running the configure script with MAKE=\"gmake\" (or whatever is
++ necessary). You can also try re-running configure with the
++ '--disable-dependency-tracking' option to at least be able to build
++ the package (albeit without support for automatic dependency tracking).
++See \`config.log' for more details" "$LINENO" 5; }
++ fi
++ { am_dirpart=; unset am_dirpart;}
++ { am_filepart=; unset am_filepart;}
++ { am_mf=; unset am_mf;}
++ { am_rc=; unset am_rc;}
++ rm -f conftest-deps.mk
+ }
+ ;;
+ "libtool":C)
+@@ -14473,7 +15154,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -14576,19 +15258,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -14618,6 +15323,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -14627,6 +15338,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -14741,12 +15455,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -14833,9 +15547,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -14851,6 +15562,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -14883,210 +15597,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/libtool.m4 b/libtool.m4
+index e36fdd3c0e2..4acf9d67956 100644
+--- a/libtool.m4
++++ b/libtool.m4
+@@ -1,7 +1,8 @@
+ # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is free software; the Free Software Foundation gives
+@@ -10,7 +11,8 @@
+
+ m4_define([_LT_COPYING], [dnl
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -37,7 +39,7 @@ m4_define([_LT_COPYING], [dnl
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ ])
+
+-# serial 56 LT_INIT
++# serial 57 LT_INIT
+
+
+ # LT_PREREQ(VERSION)
+@@ -166,10 +168,13 @@ _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+ dnl
+ m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+ m4_require([_LT_CHECK_SHELL_FEATURES])dnl
++m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+ m4_require([_LT_CMD_RELOAD])dnl
+ m4_require([_LT_CHECK_MAGIC_METHOD])dnl
++m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+ m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+ m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
++m4_require([_LT_WITH_SYSROOT])dnl
+
+ _LT_CONFIG_LIBTOOL_INIT([
+ # See if we are running on zsh, and set the options which allow our
+@@ -632,7 +637,7 @@ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+ m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+ configured by $[0], generated by m4_PACKAGE_STRING.
+
+-Copyright (C) 2009 Free Software Foundation, Inc.
++Copyright (C) 2010 Free Software Foundation, Inc.
+ This config.lt script is free software; the Free Software Foundation
+ gives unlimited permision to copy, distribute and modify it."
+
+@@ -746,15 +751,12 @@ _LT_EOF
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
+
+- _LT_PROG_XSI_SHELLFNS
++ _LT_PROG_REPLACE_SHELLFNS
+
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+ ],
+@@ -980,6 +982,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+ echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+ $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
++ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
++ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -1071,30 +1075,41 @@ m4_defun([_LT_DARWIN_LINKER_FEATURES],
+ fi
+ ])
+
+-# _LT_SYS_MODULE_PATH_AIX
+-# -----------------------
++# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
++# ----------------------------------
+ # Links a minimal program and checks the executable
+ # for the system default hardcoded library path. In most cases,
+ # this is /usr/lib:/lib, but when the MPI compilers are used
+ # the location of the communication and MPI libs are included too.
+ # If we don't find anything, use the default library path according
+ # to the aix ld manual.
++# Store the results from the different compilers for each TAGNAME.
++# Allow to override them for all tags through lt_cv_aix_libpath.
+ m4_defun([_LT_SYS_MODULE_PATH_AIX],
+ [m4_require([_LT_DECL_SED])dnl
+-AC_LINK_IFELSE([AC_LANG_SOURCE([AC_LANG_PROGRAM])],[
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi],[])
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
++ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
++ lt_aix_libpath_sed='[
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }]'
++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi],[])
++ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
++ fi
++ ])
++ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
++fi
+ ])# _LT_SYS_MODULE_PATH_AIX
+
+
+@@ -1119,7 +1134,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+ AC_MSG_CHECKING([how to print strings])
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -1163,6 +1178,39 @@ _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+ ])# _LT_PROG_ECHO_BACKSLASH
+
+
++# _LT_WITH_SYSROOT
++# ----------------
++AC_DEFUN([_LT_WITH_SYSROOT],
++[AC_MSG_CHECKING([for sysroot])
++AC_ARG_WITH([libtool-sysroot],
++[ --with-libtool-sysroot[=DIR] Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).],
++[], [with_libtool_sysroot=no])
++
++dnl lt_sysroot will always be passed unquoted. We quote it here
++dnl in case the user passed a directory name.
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ AC_MSG_RESULT([${with_libtool_sysroot}])
++ AC_MSG_ERROR([The sysroot must be an absolute path.])
++ ;;
++esac
++
++ AC_MSG_RESULT([${lt_sysroot:-no}])
++_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
++[dependent libraries, and in which our libraries should be installed.])])
++
+ # _LT_ENABLE_LOCK
+ # ---------------
+ m4_defun([_LT_ENABLE_LOCK],
+@@ -1322,6 +1370,51 @@ need_locks="$enable_libtool_lock"
+ ])# _LT_ENABLE_LOCK
+
+
++# _LT_PROG_AR
++# -----------
++m4_defun([_LT_PROG_AR],
++[AC_CHECK_TOOLS(AR, [ar], false)
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ AC_MSG_WARN([Failed: $AR $plugin_option rc])
++ else
++ AR="$AR $plugin_option"
++ fi
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++_LT_DECL([], [AR], [1], [The archiver])
++_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
++
++AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
++ [lt_cv_ar_at_file=no
++ AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
++ [echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
++ AC_TRY_EVAL([lt_ar_try])
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ AC_TRY_EVAL([lt_ar_try])
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++ ])
++ ])
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++_LT_DECL([], [archiver_list_spec], [1],
++ [How to feed a file listing to the archiver])
++])# _LT_PROG_AR
++
++
+ # _LT_CMD_OLD_ARCHIVE
+ # -------------------
+ m4_defun([_LT_CMD_OLD_ARCHIVE],
+@@ -1338,23 +1431,7 @@ for plugin in $plugin_names; do
+ fi
+ done
+
+-AC_CHECK_TOOL(AR, ar, false)
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- AC_MSG_WARN([Failed: $AR $plugin_option rc])
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
+- fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
+-_LT_DECL([], [AR], [1], [The archiver])
+-_LT_DECL([], [AR_FLAGS], [1])
++_LT_PROG_AR
+
+ AC_CHECK_TOOL(STRIP, strip, :)
+ test -z "$STRIP" && STRIP=:
+@@ -1655,7 +1732,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-[#line __oline__ "configure"
++[#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -1699,10 +1776,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -2242,8 +2319,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -2276,13 +2354,71 @@ m4_if([$1], [],[
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -2996,6 +3132,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -3062,7 +3203,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -3218,6 +3360,21 @@ vxworks*)
+ ;;
+ esac
+ ])
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -3225,7 +3382,11 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+ _LT_DECL([], [deplibs_check_method], [1],
+ [Method to check whether dependent libraries are shared objects])
+ _LT_DECL([], [file_magic_cmd], [1],
+- [Command to use when deplibs_check_method == "file_magic"])
++ [Command to use when deplibs_check_method = "file_magic"])
++_LT_DECL([], [file_magic_glob], [1],
++ [How to find potential files when deplibs_check_method = "file_magic"])
++_LT_DECL([], [want_nocaseglob], [1],
++ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+ ])# _LT_CHECK_MAGIC_METHOD
+
+
+@@ -3336,6 +3497,67 @@ dnl aclocal-1.4 backwards compatibility:
+ dnl AC_DEFUN([AM_PROG_NM], [])
+ dnl AC_DEFUN([AC_PROG_NM], [])
+
++# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
++# --------------------------------
++# how to determine the name of the shared library
++# associated with a specific link library.
++# -- PORTME fill in with the dynamic library characteristics
++m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
++[m4_require([_LT_DECL_EGREP])
++m4_require([_LT_DECL_OBJDUMP])
++m4_require([_LT_DECL_DLLTOOL])
++AC_CACHE_CHECK([how to associate runtime and link libraries],
++lt_cv_sharedlib_from_linklib_cmd,
++[lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++])
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
++ [Command to associate shared and link libraries])
++])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
++
++
++# _LT_PATH_MANIFEST_TOOL
++# ----------------------
++# locate the manifest tool
++m4_defun([_LT_PATH_MANIFEST_TOOL],
++[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
++ [lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&AS_MESSAGE_LOG_FD
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*])
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
++])# _LT_PATH_MANIFEST_TOOL
++
+
+ # LT_LIB_M
+ # --------
+@@ -3462,8 +3684,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -3499,6 +3721,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -3532,6 +3755,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT@&t@_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT@&t@_DLSYM_CONST
++#else
++# define LT@&t@_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -3543,7 +3778,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT@&t@_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -3569,15 +3804,15 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+ fi
+@@ -3610,6 +3845,13 @@ else
+ AC_MSG_RESULT(ok)
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
+ _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+ [Take the output of nm and produce a listing of raw symbols and C names])
+ _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+@@ -3620,6 +3862,8 @@ _LT_DECL([global_symbol_to_c_name_address],
+ _LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+ [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+ [Transform the output of nm in a C name address pair when lib prefix is needed])
++_LT_DECL([], [nm_file_list_spec], [1],
++ [Specify filename containing input files for $NM])
+ ]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+@@ -3631,7 +3875,6 @@ _LT_TAGVAR(lt_prog_compiler_wl, $1)=
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+-AC_MSG_CHECKING([for $compiler option to produce PIC])
+ m4_if([$1], [CXX], [
+ # C++ specific cases for pic, static, wl, etc.
+ if test "$GXX" = yes; then
+@@ -3737,6 +3980,12 @@ m4_if([$1], [CXX], [
+ ;;
+ esac
+ ;;
++ mingw* | cygwin* | os2* | pw32* | cegcc*)
++ # This hack is so that the source file can tell whether it is being
++ # built for inclusion in a dll (and should export symbols for example).
++ m4_if([$1], [GCJ], [],
++ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
++ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+@@ -3889,7 +4138,7 @@ m4_if([$1], [CXX], [
+ ;;
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+@@ -4112,6 +4361,12 @@ m4_if([$1], [CXX], [
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
++ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -4174,7 +4429,7 @@ m4_if([$1], [CXX], [
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+@@ -4231,9 +4486,11 @@ case $host_os in
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+ ;;
+ esac
+-AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+-_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+- [How to pass a linker flag through the compiler])
++
++AC_CACHE_CHECK([for $compiler option to produce PIC],
++ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
++ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
++_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -4252,6 +4509,8 @@ fi
+ _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+ [Additional compiler flags for building library objects])
+
++_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
++ [How to pass a linker flag through the compiler])
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -4272,6 +4531,7 @@ _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+ m4_defun([_LT_LINKER_SHLIBS],
+ [AC_REQUIRE([LT_PATH_LD])dnl
+ AC_REQUIRE([LT_PATH_NM])dnl
++m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+ m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+ m4_require([_LT_DECL_EGREP])dnl
+ m4_require([_LT_DECL_SED])dnl
+@@ -4280,6 +4540,7 @@ m4_require([_LT_TAG_COMPILER])dnl
+ AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+ m4_if([$1], [CXX], [
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
++ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ case $host_os in
+ aix[[4-9]]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+@@ -4294,15 +4555,20 @@ m4_if([$1], [CXX], [
+ ;;
+ pw32*)
+ _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+- ;;
++ ;;
+ cygwin* | mingw* | cegcc*)
+- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ case $cc_basename in
++ cl*) ;;
++ *)
++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
++ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
++ ;;
++ esac
++ ;;
+ *)
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+- ;;
++ ;;
+ esac
+- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ ], [
+ runpath_var=
+ _LT_TAGVAR(allow_undefined_flag, $1)=
+@@ -4470,7 +4736,8 @@ _LT_EOF
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
++ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -4569,12 +4836,12 @@ _LT_EOF
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir'
+- _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -4588,8 +4855,8 @@ _LT_EOF
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -4607,8 +4874,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+@@ -4654,8 +4921,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+@@ -4785,7 +5052,7 @@ _LT_EOF
+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- _LT_SYS_MODULE_PATH_AIX
++ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+ else
+@@ -4796,7 +5063,7 @@ _LT_EOF
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- _LT_SYS_MODULE_PATH_AIX
++ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+@@ -4840,20 +5107,63 @@ _LT_EOF
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+- # FIXME: Should let the user specify the lib program.
+- _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`'
+- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
++ _LT_TAGVAR(always_export_symbols, $1)=yes
++ _LT_TAGVAR(file_list_spec, $1)='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
++ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
++ # FIXME: Should let the user specify the lib program.
++ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -4887,7 +5197,7 @@ _LT_EOF
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+@@ -4895,7 +5205,7 @@ _LT_EOF
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -4911,7 +5221,7 @@ _LT_EOF
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -4935,10 +5245,10 @@ _LT_EOF
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -4985,16 +5295,31 @@ _LT_EOF
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- AC_LINK_IFELSE([AC_LANG_SOURCE([int foo(void) {}])],
+- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+- )
+- LDFLAGS="$save_LDFLAGS"
++ # This should be the same for all languages, so no per-tag cache variable.
++ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
++ [lt_cv_irix_exported_symbol],
++ [save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ AC_LINK_IFELSE(
++ [AC_LANG_SOURCE(
++ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
++ [C++], [[int foo (void) { return 0; }]],
++ [Fortran 77], [[
++ subroutine foo
++ end]],
++ [Fortran], [[
++ subroutine foo
++ end]])])],
++ [lt_cv_irix_exported_symbol=yes],
++ [lt_cv_irix_exported_symbol=no])
++ LDFLAGS="$save_LDFLAGS"])
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -5079,7 +5404,7 @@ _LT_EOF
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ else
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+@@ -5098,9 +5423,9 @@ _LT_EOF
+ _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -5372,8 +5697,6 @@ _LT_TAGDECL([], [inherit_rpath], [0],
+ to runtime path list])
+ _LT_TAGDECL([], [link_all_deplibs], [0],
+ [Whether libtool must link a program against all its dependency libraries])
+-_LT_TAGDECL([], [fix_srcfile_path], [1],
+- [Fix the shell variable $srcfile for the compiler])
+ _LT_TAGDECL([], [always_export_symbols], [0],
+ [Set to "yes" if exported symbols are required])
+ _LT_TAGDECL([], [export_symbols_cmds], [2],
+@@ -5384,6 +5707,8 @@ _LT_TAGDECL([], [include_expsyms], [1],
+ [Symbols that must always be exported])
+ _LT_TAGDECL([], [prelink_cmds], [2],
+ [Commands necessary for linking programs (against libraries) with templates])
++_LT_TAGDECL([], [postlink_cmds], [2],
++ [Commands necessary for finishing linking programs])
+ _LT_TAGDECL([], [file_list_spec], [1],
+ [Specify filename containing input files])
+ dnl FIXME: Not yet implemented
+@@ -5485,6 +5810,7 @@ CC="$lt_save_CC"
+ m4_defun([_LT_LANG_CXX_CONFIG],
+ [m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+ m4_require([_LT_DECL_EGREP])dnl
++m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+ if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+ ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+ (test "X$CXX" != "Xg++"))) ; then
+@@ -5546,6 +5872,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
++ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+@@ -5563,6 +5890,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
++ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+@@ -5726,7 +6054,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+- _LT_SYS_MODULE_PATH_AIX
++ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -5738,7 +6066,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- _LT_SYS_MODULE_PATH_AIX
++ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+@@ -5780,29 +6108,75 @@ if test "$_lt_caught_CXX_error" != yes; then
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+- # as there is no search path for DLLs.
+- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+- _LT_TAGVAR(always_export_symbols, $1)=no
+- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+-
+- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- # If the export-symbols file already is a .def file (1st line
+- # is EXPORTS), use it as is; otherwise, prepend...
+- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+- cp $export_symbols $output_objdir/$soname.def;
+- else
+- echo EXPORTS > $output_objdir/$soname.def;
+- cat $export_symbols >> $output_objdir/$soname.def;
+- fi~
+- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+- else
+- _LT_TAGVAR(ld_shlibs, $1)=no
+- fi
+- ;;
++ case $GXX,$cc_basename in
++ ,cl* | no,cl*)
++ # Native MSVC
++ # hardcode_libdir_flag_spec is actually meaningless, as there is
++ # no search path for DLLs.
++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
++ _LT_TAGVAR(always_export_symbols, $1)=yes
++ _LT_TAGVAR(file_list_spec, $1)='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
++ # Don't use ranlib
++ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
++ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ func_to_tool_file "$lt_outputfile"~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # g++
++ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
++ # as there is no search path for DLLs.
++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
++ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
++ _LT_TAGVAR(always_export_symbols, $1)=no
++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
++
++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ # If the export-symbols file already is a .def file (1st line
++ # is EXPORTS), use it as is; otherwise, prepend...
++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ cp $export_symbols $output_objdir/$soname.def;
++ else
++ echo EXPORTS > $output_objdir/$soname.def;
++ cat $export_symbols >> $output_objdir/$soname.def;
++ fi~
++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
++ else
++ _LT_TAGVAR(ld_shlibs, $1)=no
++ fi
++ ;;
++ esac
++ ;;
+ darwin* | rhapsody*)
+ _LT_DARWIN_LINKER_FEATURES($1)
+ ;;
+@@ -5877,7 +6251,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ ;;
+ *)
+ if test "$GXX" = yes; then
+- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+@@ -5948,10 +6322,10 @@ if test "$_lt_caught_CXX_error" != yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+@@ -5992,9 +6366,9 @@ if test "$_lt_caught_CXX_error" != yes; then
+ *)
+ if test "$GXX" = yes; then
+ if test "$with_gnu_ld" = no; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+ fi
+ fi
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+@@ -6272,7 +6646,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ *)
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ esac
+
+@@ -6318,7 +6692,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+ _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+@@ -6359,9 +6733,9 @@ if test "$_lt_caught_CXX_error" != yes; then
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+@@ -6491,6 +6865,7 @@ if test "$_lt_caught_CXX_error" != yes; then
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
++ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+@@ -6505,6 +6880,29 @@ AC_LANG_POP
+ ])# _LT_LANG_CXX_CONFIG
+
+
++# _LT_FUNC_STRIPNAME_CNF
++# ----------------------
++# func_stripname_cnf prefix suffix name
++# strip PREFIX and SUFFIX off of NAME.
++# PREFIX and SUFFIX must not contain globbing or regex special
++# characters, hashes, percent signs, but SUFFIX may contain a leading
++# dot (in which case that matches only a dot).
++#
++# This function is identical to the (non-XSI) version of func_stripname,
++# except this one can be used by m4 code that may be executed by configure,
++# rather than the libtool script.
++m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
++AC_REQUIRE([_LT_DECL_SED])
++AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
++func_stripname_cnf ()
++{
++ case ${2} in
++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
++ esac
++} # func_stripname_cnf
++])# _LT_FUNC_STRIPNAME_CNF
++
+ # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+ # ---------------------------------
+ # Figure out "hidden" library dependencies from verbose
+@@ -6513,6 +6911,7 @@ AC_LANG_POP
+ # objects, libraries and library flags.
+ m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+ [m4_require([_LT_FILEUTILS_DEFAULTS])dnl
++AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+ # Dependencies to place before and after the object being linked:
+ _LT_TAGVAR(predep_objects, $1)=
+ _LT_TAGVAR(postdep_objects, $1)=
+@@ -6563,6 +6962,13 @@ public class foo {
+ };
+ _LT_EOF
+ ])
++
++_lt_libdeps_save_CFLAGS=$CFLAGS
++case "$CC $CFLAGS " in #(
++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
++esac
++
+ dnl Parse the compiler output and extract the necessary
+ dnl objects, libraries and library flags.
+ if AC_TRY_EVAL(ac_compile); then
+@@ -6574,7 +6980,7 @@ if AC_TRY_EVAL(ac_compile); then
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+- case $p in
++ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+@@ -6583,13 +6989,22 @@ if AC_TRY_EVAL(ac_compile); then
+ test $p = "-R"; then
+ prev=$p
+ continue
+- else
+- prev=
+ fi
+
++ # Expand the sysroot to ease extracting the directories later.
++ if test -z "$prev"; then
++ case $p in
++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
++ esac
++ fi
++ case $p in
++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
++ esac
+ if test "$pre_test_object_deps_done" = no; then
+- case $p in
+- -L* | -R*)
++ case ${prev} in
++ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+@@ -6609,8 +7024,10 @@ if AC_TRY_EVAL(ac_compile); then
+ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+ fi
+ fi
++ prev=
+ ;;
+
++ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+@@ -6646,6 +7063,7 @@ else
+ fi
+
+ $RM -f confest.$objext
++CFLAGS=$_lt_libdeps_save_CFLAGS
+
+ # PORTME: override above test on systems where it is broken
+ m4_if([$1], [CXX],
+@@ -6682,7 +7100,7 @@ linux*)
+
+ solaris*)
+ case $cc_basename in
+- CC*)
++ CC* | sunCC*)
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+@@ -6795,7 +7213,9 @@ if test "$_lt_disable_F77" != yes; then
+ # Allow CC to be a program name with arguments.
+ lt_save_CC="$CC"
+ lt_save_GCC=$GCC
++ lt_save_CFLAGS=$CFLAGS
+ CC=${F77-"f77"}
++ CFLAGS=$FFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+@@ -6849,6 +7269,7 @@ if test "$_lt_disable_F77" != yes; then
+
+ GCC=$lt_save_GCC
+ CC="$lt_save_CC"
++ CFLAGS="$lt_save_CFLAGS"
+ fi # test "$_lt_disable_F77" != yes
+
+ AC_LANG_POP
+@@ -6925,7 +7346,9 @@ if test "$_lt_disable_FC" != yes; then
+ # Allow CC to be a program name with arguments.
+ lt_save_CC="$CC"
+ lt_save_GCC=$GCC
++ lt_save_CFLAGS=$CFLAGS
+ CC=${FC-"f95"}
++ CFLAGS=$FCFLAGS
+ compiler=$CC
+ GCC=$ac_cv_fc_compiler_gnu
+
+@@ -6981,7 +7404,8 @@ if test "$_lt_disable_FC" != yes; then
+ fi # test -n "$compiler"
+
+ GCC=$lt_save_GCC
+- CC="$lt_save_CC"
++ CC=$lt_save_CC
++ CFLAGS=$lt_save_CFLAGS
+ fi # test "$_lt_disable_FC" != yes
+
+ AC_LANG_POP
+@@ -7018,10 +7442,12 @@ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+-lt_save_CC="$CC"
++lt_save_CC=$CC
++lt_save_CFLAGS=$CFLAGS
+ lt_save_GCC=$GCC
+ GCC=yes
+ CC=${GCJ-"gcj"}
++CFLAGS=$GCJFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_TAGVAR(LD, $1)="$LD"
+@@ -7052,7 +7478,8 @@ fi
+ AC_LANG_RESTORE
+
+ GCC=$lt_save_GCC
+-CC="$lt_save_CC"
++CC=$lt_save_CC
++CFLAGS=$lt_save_CFLAGS
+ ])# _LT_LANG_GCJ_CONFIG
+
+
+@@ -7087,9 +7514,11 @@ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC="$CC"
++lt_save_CFLAGS=$CFLAGS
+ lt_save_GCC=$GCC
+ GCC=
+ CC=${RC-"windres"}
++CFLAGS=
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+@@ -7102,7 +7531,8 @@ fi
+
+ GCC=$lt_save_GCC
+ AC_LANG_RESTORE
+-CC="$lt_save_CC"
++CC=$lt_save_CC
++CFLAGS=$lt_save_CFLAGS
+ ])# _LT_LANG_RC_CONFIG
+
+
+@@ -7161,6 +7591,15 @@ _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+ AC_SUBST([OBJDUMP])
+ ])
+
++# _LT_DECL_DLLTOOL
++# ----------------
++# Ensure DLLTOOL variable is set.
++m4_defun([_LT_DECL_DLLTOOL],
++[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
++AC_SUBST([DLLTOOL])
++])
+
+ # _LT_DECL_SED
+ # ------------
+@@ -7254,8 +7693,8 @@ m4_defun([_LT_CHECK_SHELL_FEATURES],
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -7294,206 +7733,162 @@ _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+ ])# _LT_CHECK_SHELL_FEATURES
+
+
+-# _LT_PROG_XSI_SHELLFNS
+-# ---------------------
+-# Bourne and XSI compatible variants of some useful shell functions.
+-m4_defun([_LT_PROG_XSI_SHELLFNS],
+-[case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $[*] ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
++# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
++# ------------------------------------------------------
++# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
++# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
++m4_defun([_LT_PROG_FUNCTION_REPLACE],
++[dnl {
++sed -e '/^$1 ()$/,/^} # $1 /c\
++$1 ()\
++{\
++m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1])
++} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++])
+
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
++# _LT_PROG_REPLACE_SHELLFNS
++# -------------------------
++# Replace existing portable implementations of several shell functions with
++# equivalent extended shell implementations where those features are available..
++m4_defun([_LT_PROG_REPLACE_SHELLFNS],
++[if test x"$xsi_shell" = xyes; then
++ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
++ case ${1} in
++ */*) func_dirname_result="${1%/*}${2}" ;;
++ * ) func_dirname_result="${3}" ;;
++ esac])
++
++ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
++ func_basename_result="${1##*/}"])
++
++ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
++ case ${1} in
++ */*) func_dirname_result="${1%/*}${2}" ;;
++ * ) func_dirname_result="${3}" ;;
++ esac
++ func_basename_result="${1##*/}"])
+
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
++ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
++ # positional parameters, so assign one to ordinary parameter first.
++ func_stripname_result=${3}
++ func_stripname_result=${func_stripname_result#"${1}"}
++ func_stripname_result=${func_stripname_result%"${2}"}])
+
+-dnl func_dirname_and_basename
+-dnl A portable version of this function is already defined in general.m4sh
+-dnl so there is no need for it here.
++ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
++ func_split_long_opt_name=${1%%=*}
++ func_split_long_opt_arg=${1#*=}])
+
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
++ func_split_short_opt_arg=${1#??}
++ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
+
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[[^=]]*=//'
++ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
++ case ${1} in
++ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
++ *) func_lo2o_result=${1} ;;
++ esac])
+
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo])
+
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))])
+
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'`
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}])
++fi
+
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$[@]"`
+-}
++if test x"$lt_shell_append" = xyes; then
++ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"])
+
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
++ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
++ func_quote_for_eval "${2}"
++dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
++ eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
+
+-_LT_EOF
+-esac
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
+
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
++if test x"$_lt_function_replace_fail" = x":"; then
++ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
++fi
++])
+
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$[1]+=\$[2]"
+-}
+-_LT_EOF
++# _LT_PATH_CONVERSION_FUNCTIONS
++# -----------------------------
++# Determine which file name conversion functions should be used by
++# func_to_host_file (and, implicitly, by func_to_host_path). These are needed
++# for certain cross-compile configurations and native mingw.
++m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
++[AC_REQUIRE([AC_CANONICAL_HOST])dnl
++AC_REQUIRE([AC_CANONICAL_BUILD])dnl
++AC_MSG_CHECKING([how to convert $build file names to $host format])
++AC_CACHE_VAL(lt_cv_to_host_file_cmd,
++[case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
+ ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$[1]=\$$[1]\$[2]"
+-}
+-
+-_LT_EOF
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
+ ;;
+- esac
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++])
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
++_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
++ [0], [convert $build file names to $host format])dnl
++
++AC_MSG_CHECKING([how to convert $build file names to toolchain format])
++AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
++[#assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
+ ])
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
++_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
++ [0], [convert $build files to toolchain format])dnl
++])# _LT_PATH_CONVERSION_FUNCTIONS
+diff --git a/ltmain.sh b/ltmain.sh
+index 70990740b6c..7f7104ffc82 100644
+--- a/ltmain.sh
++++ b/ltmain.sh
+@@ -1,10 +1,9 @@
+-# Generated from ltmain.m4sh.
+
+-# libtool (GNU libtool 1.3134 2009-11-29) 2.2.7a
++# libtool (GNU libtool) 2.4
+ # Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+-# 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+ # This is free software; see the source for copying conditions. There is NO
+ # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+@@ -38,7 +37,6 @@
+ # -n, --dry-run display commands without modifying any files
+ # --features display basic configuration information and exit
+ # --mode=MODE use operation mode MODE
+-# --no-finish let install mode avoid finish commands
+ # --preserve-dup-deps don't remove duplicate dependency libraries
+ # --quiet, --silent don't print informational messages
+ # --no-quiet, --no-silent
+@@ -71,17 +69,19 @@
+ # compiler: $LTCC
+ # compiler flags: $LTCFLAGS
+ # linker: $LD (gnu? $with_gnu_ld)
+-# $progname: (GNU libtool 1.3134 2009-11-29) 2.2.7a
++# $progname: (GNU libtool) 2.4
+ # automake: $automake_version
+ # autoconf: $autoconf_version
+ #
+ # Report bugs to <bug-libtool@gnu.org>.
++# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
++# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+ PROGRAM=libtool
+ PACKAGE=libtool
+-VERSION=2.2.7a
+-TIMESTAMP=" 1.3134 2009-11-29"
+-package_revision=1.3134
++VERSION=2.4
++TIMESTAMP=""
++package_revision=1.3293
+
+ # Be Bourne compatible
+ if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+@@ -106,9 +106,6 @@ _LTECHO_EOF'
+ }
+
+ # NLS nuisances: We save the old values to restore during execute mode.
+-# Only set LANG and LC_ALL to C if already set.
+-# These must not be set unconditionally because not all systems understand
+-# e.g. LANG=C (notably SCO).
+ lt_user_locale=
+ lt_safe_locale=
+ for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+@@ -121,15 +118,13 @@ do
+ lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+ fi"
+ done
++LC_ALL=C
++LANGUAGE=C
++export LANGUAGE LC_ALL
+
+ $lt_unset CDPATH
+
+
+-
+-
+-
+-
+-
+ # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+ # is ksh but when the shell is invoked as "sh" and the current value of
+ # the _XPG environment variable is not equal to 1 (one), the special
+@@ -140,7 +135,7 @@ progpath="$0"
+
+
+ : ${CP="cp -f"}
+-: ${ECHO=$as_echo}
++test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+ : ${EGREP="/bin/grep -E"}
+ : ${FGREP="/bin/grep -F"}
+ : ${GREP="/bin/grep"}
+@@ -149,7 +144,7 @@ progpath="$0"
+ : ${MKDIR="mkdir"}
+ : ${MV="mv -f"}
+ : ${RM="rm -f"}
+-: ${SED="/mount/endor/wildenhu/local-x86_64/bin/sed"}
++: ${SED="/bin/sed"}
+ : ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+ : ${Xsed="$SED -e 1s/^X//"}
+
+@@ -169,6 +164,27 @@ IFS=" $lt_nl"
+ dirname="s,/[^/]*$,,"
+ basename="s,^.*/,,"
+
++# func_dirname file append nondir_replacement
++# Compute the dirname of FILE. If nonempty, add APPEND to the result,
++# otherwise set result to NONDIR_REPLACEMENT.
++func_dirname ()
++{
++ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
++ if test "X$func_dirname_result" = "X${1}"; then
++ func_dirname_result="${3}"
++ else
++ func_dirname_result="$func_dirname_result${2}"
++ fi
++} # func_dirname may be replaced by extended shell implementation
++
++
++# func_basename file
++func_basename ()
++{
++ func_basename_result=`$ECHO "${1}" | $SED "$basename"`
++} # func_basename may be replaced by extended shell implementation
++
++
+ # func_dirname_and_basename file append nondir_replacement
+ # perform func_basename and func_dirname in a single function
+ # call:
+@@ -183,17 +199,31 @@ basename="s,^.*/,,"
+ # those functions but instead duplicate the functionality here.
+ func_dirname_and_basename ()
+ {
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+- func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+-}
++ # Extract subdirectory from the argument.
++ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
++ if test "X$func_dirname_result" = "X${1}"; then
++ func_dirname_result="${3}"
++ else
++ func_dirname_result="$func_dirname_result${2}"
++ fi
++ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
++} # func_dirname_and_basename may be replaced by extended shell implementation
++
++
++# func_stripname prefix suffix name
++# strip PREFIX and SUFFIX off of NAME.
++# PREFIX and SUFFIX must not contain globbing or regex special
++# characters, hashes, percent signs, but SUFFIX may contain a leading
++# dot (in which case that matches only a dot).
++# func_strip_suffix prefix name
++func_stripname ()
++{
++ case ${2} in
++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
++ esac
++} # func_stripname may be replaced by extended shell implementation
+
+-# Generated shell functions inserted here.
+
+ # These SED scripts presuppose an absolute path with a trailing slash.
+ pathcar='s,^/\([^/]*\).*$,\1,'
+@@ -376,6 +406,15 @@ sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+ # Same as above, but do not quote variable references.
+ double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
++# Sed substitution that turns a string into a regex matching for the
++# string literally.
++sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
++
++# Sed substitution that converts a w32 file name or path
++# which contains forward slashes, into one that contains
++# (escaped) backslashes. A very naive implementation.
++lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
++
+ # Re-`\' parameter expansions in output of double_quote_subst that were
+ # `\'-ed in input to the same. If an odd number of `\' preceded a '$'
+ # in input to double_quote_subst, that '$' was protected from expansion.
+@@ -404,7 +443,7 @@ opt_warning=:
+ # name if it has been set yet.
+ func_echo ()
+ {
+- $ECHO "$progname${mode+: }$mode: $*"
++ $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+ }
+
+ # func_verbose arg...
+@@ -430,14 +469,14 @@ func_echo_all ()
+ # Echo program name prefixed message to standard error.
+ func_error ()
+ {
+- $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2
++ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+ }
+
+ # func_warning arg...
+ # Echo program name prefixed warning message to standard error.
+ func_warning ()
+ {
+- $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2
++ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+ # bash bug again:
+ :
+@@ -656,19 +695,35 @@ func_show_eval_locale ()
+ fi
+ }
+
+-
+-
++# func_tr_sh
++# Turn $1 into a string suitable for a shell variable name.
++# Result is stored in $func_tr_sh_result. All characters
++# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
++# if $1 begins with a digit, a '_' is prepended as well.
++func_tr_sh ()
++{
++ case $1 in
++ [0-9]* | *[!a-zA-Z0-9_]*)
++ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
++ ;;
++ * )
++ func_tr_sh_result=$1
++ ;;
++ esac
++}
+
+
+ # func_version
+ # Echo version message to standard output and exit.
+ func_version ()
+ {
++ $opt_debug
++
+ $SED -n '/(C)/!b go
+ :more
+ /\./!{
+ N
+- s/\n# //
++ s/\n# / /
+ b more
+ }
+ :go
+@@ -685,7 +740,9 @@ func_version ()
+ # Echo short help message to standard output and exit.
+ func_usage ()
+ {
+- $SED -n '/^# Usage:/,/^# *-h/ {
++ $opt_debug
++
++ $SED -n '/^# Usage:/,/^# *.*--help/ {
+ s/^# //
+ s/^# *$//
+ s/\$progname/'$progname'/
+@@ -701,7 +758,10 @@ func_usage ()
+ # unless 'noexit' is passed as argument.
+ func_help ()
+ {
++ $opt_debug
++
+ $SED -n '/^# Usage:/,/# Report bugs to/ {
++ :print
+ s/^# //
+ s/^# *$//
+ s*\$progname*'$progname'*
+@@ -714,7 +774,11 @@ func_help ()
+ s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/
+ s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/
+ p
+- }' < "$progpath"
++ d
++ }
++ /^# .* home page:/b print
++ /^# General help using/b print
++ ' < "$progpath"
+ ret=$?
+ if test -z "$1"; then
+ exit $ret
+@@ -726,12 +790,39 @@ func_help ()
+ # exit_cmd.
+ func_missing_arg ()
+ {
+- func_error "missing argument for $1"
++ $opt_debug
++
++ func_error "missing argument for $1."
+ exit_cmd=exit
+ }
+
+-exit_cmd=:
+
++# func_split_short_opt shortopt
++# Set func_split_short_opt_name and func_split_short_opt_arg shell
++# variables after splitting SHORTOPT after the 2nd character.
++func_split_short_opt ()
++{
++ my_sed_short_opt='1s/^\(..\).*$/\1/;q'
++ my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
++
++ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
++ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
++} # func_split_short_opt may be replaced by extended shell implementation
++
++
++# func_split_long_opt longopt
++# Set func_split_long_opt_name and func_split_long_opt_arg shell
++# variables after splitting LONGOPT at the `=' sign.
++func_split_long_opt ()
++{
++ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
++ my_sed_long_arg='1s/^--[^=]*=//'
++
++ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
++ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
++} # func_split_long_opt may be replaced by extended shell implementation
++
++exit_cmd=:
+
+
+
+@@ -741,26 +832,64 @@ magic="%%%MAGIC variable%%%"
+ magic_exe="%%%MAGIC EXE variable%%%"
+
+ # Global variables.
+-# $mode is unset
+ nonopt=
+-execute_dlfiles=
+ preserve_args=
+ lo2o="s/\\.lo\$/.${objext}/"
+ o2lo="s/\\.${objext}\$/.lo/"
+ extracted_archives=
+ extracted_serial=0
+
+-opt_dry_run=false
+-opt_finish=:
+-opt_duplicate_deps=false
+-opt_silent=false
+-opt_debug=:
+-
+ # If this variable is set in any of the actions, the command in it
+ # will be execed at the end. This prevents here-documents from being
+ # left over by shells.
+ exec_cmd=
+
++# func_append var value
++# Append VALUE to the end of shell variable VAR.
++func_append ()
++{
++ eval "${1}=\$${1}\${2}"
++} # func_append may be replaced by extended shell implementation
++
++# func_append_quoted var value
++# Quote VALUE and append to the end of shell variable VAR, separated
++# by a space.
++func_append_quoted ()
++{
++ func_quote_for_eval "${2}"
++ eval "${1}=\$${1}\\ \$func_quote_for_eval_result"
++} # func_append_quoted may be replaced by extended shell implementation
++
++
++# func_arith arithmetic-term...
++func_arith ()
++{
++ func_arith_result=`expr "${@}"`
++} # func_arith may be replaced by extended shell implementation
++
++
++# func_len string
++# STRING may not start with a hyphen.
++func_len ()
++{
++ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len`
++} # func_len may be replaced by extended shell implementation
++
++
++# func_lo2o object
++func_lo2o ()
++{
++ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
++} # func_lo2o may be replaced by extended shell implementation
++
++
++# func_xform libobj-or-source
++func_xform ()
++{
++ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
++} # func_xform may be replaced by extended shell implementation
++
++
+ # func_fatal_configuration arg...
+ # Echo program name prefixed message to standard error, followed by
+ # a configuration failure hint, and exit.
+@@ -850,130 +979,204 @@ func_enable_tag ()
+ esac
+ }
+
+-# Parse options once, thoroughly. This comes as soon as possible in
+-# the script to make things like `libtool --version' happen quickly.
++# func_check_version_match
++# Ensure that we are using m4 macros, and libtool script from the same
++# release of libtool.
++func_check_version_match ()
+ {
++ if test "$package_revision" != "$macro_revision"; then
++ if test "$VERSION" != "$macro_version"; then
++ if test -z "$macro_version"; then
++ cat >&2 <<_LT_EOF
++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
++$progname: definition of this LT_INIT comes from an older release.
++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
++$progname: and run autoconf again.
++_LT_EOF
++ else
++ cat >&2 <<_LT_EOF
++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
++$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
++$progname: and run autoconf again.
++_LT_EOF
++ fi
++ else
++ cat >&2 <<_LT_EOF
++$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
++$progname: but the definition of this LT_INIT comes from revision $macro_revision.
++$progname: You should recreate aclocal.m4 with macros from revision $package_revision
++$progname: of $PACKAGE $VERSION and run autoconf again.
++_LT_EOF
++ fi
+
+- # Shorthand for --mode=foo, only valid as the first argument
+- case $1 in
+- clean|clea|cle|cl)
+- shift; set dummy --mode clean ${1+"$@"}; shift
+- ;;
+- compile|compil|compi|comp|com|co|c)
+- shift; set dummy --mode compile ${1+"$@"}; shift
+- ;;
+- execute|execut|execu|exec|exe|ex|e)
+- shift; set dummy --mode execute ${1+"$@"}; shift
+- ;;
+- finish|finis|fini|fin|fi|f)
+- shift; set dummy --mode finish ${1+"$@"}; shift
+- ;;
+- install|instal|insta|inst|ins|in|i)
+- shift; set dummy --mode install ${1+"$@"}; shift
+- ;;
+- link|lin|li|l)
+- shift; set dummy --mode link ${1+"$@"}; shift
+- ;;
+- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+- shift; set dummy --mode uninstall ${1+"$@"}; shift
+- ;;
+- esac
++ exit $EXIT_MISMATCH
++ fi
++}
++
++
++# Shorthand for --mode=foo, only valid as the first argument
++case $1 in
++clean|clea|cle|cl)
++ shift; set dummy --mode clean ${1+"$@"}; shift
++ ;;
++compile|compil|compi|comp|com|co|c)
++ shift; set dummy --mode compile ${1+"$@"}; shift
++ ;;
++execute|execut|execu|exec|exe|ex|e)
++ shift; set dummy --mode execute ${1+"$@"}; shift
++ ;;
++finish|finis|fini|fin|fi|f)
++ shift; set dummy --mode finish ${1+"$@"}; shift
++ ;;
++install|instal|insta|inst|ins|in|i)
++ shift; set dummy --mode install ${1+"$@"}; shift
++ ;;
++link|lin|li|l)
++ shift; set dummy --mode link ${1+"$@"}; shift
++ ;;
++uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
++ shift; set dummy --mode uninstall ${1+"$@"}; shift
++ ;;
++esac
+
+- # Parse non-mode specific arguments:
+- while test "$#" -gt 0; do
++
++
++# Option defaults:
++opt_debug=:
++opt_dry_run=false
++opt_config=false
++opt_preserve_dup_deps=false
++opt_features=false
++opt_finish=false
++opt_help=false
++opt_help_all=false
++opt_silent=:
++opt_verbose=:
++opt_silent=false
++opt_verbose=false
++
++
++# Parse options once, thoroughly. This comes as soon as possible in the
++# script to make things like `--version' happen as quickly as we can.
++{
++ # this just eases exit handling
++ while test $# -gt 0; do
+ opt="$1"
+ shift
+-
+ case $opt in
+- --config) func_config ;;
+-
+- --debug) preserve_args="$preserve_args $opt"
++ --debug|-x) opt_debug='set -x'
+ func_echo "enabling shell trace mode"
+- opt_debug='set -x'
+ $opt_debug
+ ;;
+-
+- -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break
+- execute_dlfiles="$execute_dlfiles $1"
+- shift
++ --dry-run|--dryrun|-n)
++ opt_dry_run=:
+ ;;
+-
+- --dry-run | -n) opt_dry_run=: ;;
+- --features) func_features ;;
+- --finish) mode="finish" ;;
+- --no-finish) opt_finish=false ;;
+-
+- --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break
+- case $1 in
+- # Valid mode arguments:
+- clean) ;;
+- compile) ;;
+- execute) ;;
+- finish) ;;
+- install) ;;
+- link) ;;
+- relink) ;;
+- uninstall) ;;
+-
+- # Catch anything else as an error
+- *) func_error "invalid argument for $opt"
+- exit_cmd=exit
+- break
+- ;;
+- esac
+-
+- mode="$1"
++ --config)
++ opt_config=:
++func_config
++ ;;
++ --dlopen|-dlopen)
++ optarg="$1"
++ opt_dlopen="${opt_dlopen+$opt_dlopen
++}$optarg"
+ shift
+ ;;
+-
+ --preserve-dup-deps)
+- opt_duplicate_deps=: ;;
+-
+- --quiet|--silent) preserve_args="$preserve_args $opt"
+- opt_silent=:
+- opt_verbose=false
++ opt_preserve_dup_deps=:
+ ;;
+-
+- --no-quiet|--no-silent)
+- preserve_args="$preserve_args $opt"
+- opt_silent=false
++ --features)
++ opt_features=:
++func_features
+ ;;
+-
+- --verbose| -v) preserve_args="$preserve_args $opt"
++ --finish)
++ opt_finish=:
++set dummy --mode finish ${1+"$@"}; shift
++ ;;
++ --help)
++ opt_help=:
++ ;;
++ --help-all)
++ opt_help_all=:
++opt_help=': help-all'
++ ;;
++ --mode)
++ test $# = 0 && func_missing_arg $opt && break
++ optarg="$1"
++ opt_mode="$optarg"
++case $optarg in
++ # Valid mode arguments:
++ clean|compile|execute|finish|install|link|relink|uninstall) ;;
++
++ # Catch anything else as an error
++ *) func_error "invalid argument for $opt"
++ exit_cmd=exit
++ break
++ ;;
++esac
++ shift
++ ;;
++ --no-silent|--no-quiet)
+ opt_silent=false
+- opt_verbose=:
++func_append preserve_args " $opt"
+ ;;
+-
+- --no-verbose) preserve_args="$preserve_args $opt"
++ --no-verbose)
+ opt_verbose=false
++func_append preserve_args " $opt"
+ ;;
+-
+- --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break
+- preserve_args="$preserve_args $opt $1"
+- func_enable_tag "$1" # tagname is set here
++ --silent|--quiet)
++ opt_silent=:
++func_append preserve_args " $opt"
++ opt_verbose=false
++ ;;
++ --verbose|-v)
++ opt_verbose=:
++func_append preserve_args " $opt"
++opt_silent=false
++ ;;
++ --tag)
++ test $# = 0 && func_missing_arg $opt && break
++ optarg="$1"
++ opt_tag="$optarg"
++func_append preserve_args " $opt $optarg"
++func_enable_tag "$optarg"
+ shift
+ ;;
+
++ -\?|-h) func_usage ;;
++ --help) func_help ;;
++ --version) func_version ;;
++
+ # Separate optargs to long options:
+- -dlopen=*|--mode=*|--tag=*)
+- func_opt_split "$opt"
+- set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"}
++ --*=*)
++ func_split_long_opt "$opt"
++ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+ shift
+ ;;
+
+- -\?|-h) func_usage ;;
+- --help) opt_help=: ;;
+- --help-all) opt_help=': help-all' ;;
+- --version) func_version ;;
+-
+- -*) func_fatal_help "unrecognized option \`$opt'" ;;
+-
+- *) nonopt="$opt"
+- break
++ # Separate non-argument short options:
++ -\?*|-h*|-n*|-v*)
++ func_split_short_opt "$opt"
++ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
++ shift
+ ;;
++
++ --) break ;;
++ -*) func_fatal_help "unrecognized option \`$opt'" ;;
++ *) set dummy "$opt" ${1+"$@"}; shift; break ;;
+ esac
+ done
+
++ # Validate options:
++
++ # save first non-option argument
++ if test "$#" -gt 0; then
++ nonopt="$opt"
++ shift
++ fi
++
++ # preserve --debug
++ test "$opt_debug" = : || func_append preserve_args " --debug"
+
+ case $host in
+ *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* )
+@@ -981,82 +1184,44 @@ func_enable_tag ()
+ opt_duplicate_compiler_generated_deps=:
+ ;;
+ *)
+- opt_duplicate_compiler_generated_deps=$opt_duplicate_deps
++ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+ ;;
+ esac
+
+- # Having warned about all mis-specified options, bail out if
+- # anything was wrong.
+- $exit_cmd $EXIT_FAILURE
+-}
++ $opt_help || {
++ # Sanity checks first:
++ func_check_version_match
+
+-# func_check_version_match
+-# Ensure that we are using m4 macros, and libtool script from the same
+-# release of libtool.
+-func_check_version_match ()
+-{
+- if test "$package_revision" != "$macro_revision"; then
+- if test "$VERSION" != "$macro_version"; then
+- if test -z "$macro_version"; then
+- cat >&2 <<_LT_EOF
+-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+-$progname: definition of this LT_INIT comes from an older release.
+-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+-$progname: and run autoconf again.
+-_LT_EOF
+- else
+- cat >&2 <<_LT_EOF
+-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+-$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+-$progname: and run autoconf again.
+-_LT_EOF
+- fi
+- else
+- cat >&2 <<_LT_EOF
+-$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
+-$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+-$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+-$progname: of $PACKAGE $VERSION and run autoconf again.
+-_LT_EOF
++ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
++ func_fatal_configuration "not configured to build any kind of library"
+ fi
+
+- exit $EXIT_MISMATCH
+- fi
+-}
+-
++ # Darwin sucks
++ eval std_shrext=\"$shrext_cmds\"
+
+-## ----------- ##
+-## Main. ##
+-## ----------- ##
+-
+-$opt_help || {
+- # Sanity checks first:
+- func_check_version_match
+-
+- if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+- func_fatal_configuration "not configured to build any kind of library"
+- fi
++ # Only execute mode is allowed to have -dlopen flags.
++ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
++ func_error "unrecognized option \`-dlopen'"
++ $ECHO "$help" 1>&2
++ exit $EXIT_FAILURE
++ fi
+
+- test -z "$mode" && func_fatal_error "error: you must specify a MODE."
++ # Change the help message to a mode-specific one.
++ generic_help="$help"
++ help="Try \`$progname --help --mode=$opt_mode' for more information."
++ }
+
+
+- # Darwin sucks
+- eval "std_shrext=\"$shrext_cmds\""
++ # Bail if the options were screwed
++ $exit_cmd $EXIT_FAILURE
++}
+
+
+- # Only execute mode is allowed to have -dlopen flags.
+- if test -n "$execute_dlfiles" && test "$mode" != execute; then
+- func_error "unrecognized option \`-dlopen'"
+- $ECHO "$help" 1>&2
+- exit $EXIT_FAILURE
+- fi
+
+- # Change the help message to a mode-specific one.
+- generic_help="$help"
+- help="Try \`$progname --help --mode=$mode' for more information."
+-}
+
++## ----------- ##
++## Main. ##
++## ----------- ##
+
+ # func_lalib_p file
+ # True iff FILE is a libtool `.la' library or `.lo' object file.
+@@ -1121,12 +1286,9 @@ func_ltwrapper_executable_p ()
+ # temporary ltwrapper_script.
+ func_ltwrapper_scriptname ()
+ {
+- func_ltwrapper_scriptname_result=""
+- if func_ltwrapper_executable_p "$1"; then
+- func_dirname_and_basename "$1" "" "."
+- func_stripname '' '.exe' "$func_basename_result"
+- func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+- fi
++ func_dirname_and_basename "$1" "" "."
++ func_stripname '' '.exe' "$func_basename_result"
++ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+ }
+
+ # func_ltwrapper_p file
+@@ -1149,7 +1311,7 @@ func_execute_cmds ()
+ save_ifs=$IFS; IFS='~'
+ for cmd in $1; do
+ IFS=$save_ifs
+- eval "cmd=\"$cmd\""
++ eval cmd=\"$cmd\"
+ func_show_eval "$cmd" "${2-:}"
+ done
+ IFS=$save_ifs
+@@ -1172,6 +1334,37 @@ func_source ()
+ }
+
+
++# func_resolve_sysroot PATH
++# Replace a leading = in PATH with a sysroot. Store the result into
++# func_resolve_sysroot_result
++func_resolve_sysroot ()
++{
++ func_resolve_sysroot_result=$1
++ case $func_resolve_sysroot_result in
++ =*)
++ func_stripname '=' '' "$func_resolve_sysroot_result"
++ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
++ ;;
++ esac
++}
++
++# func_replace_sysroot PATH
++# If PATH begins with the sysroot, replace it with = and
++# store the result into func_replace_sysroot_result.
++func_replace_sysroot ()
++{
++ case "$lt_sysroot:$1" in
++ ?*:"$lt_sysroot"*)
++ func_stripname "$lt_sysroot" '' "$1"
++ func_replace_sysroot_result="=$func_stripname_result"
++ ;;
++ *)
++ # Including no sysroot.
++ func_replace_sysroot_result=$1
++ ;;
++ esac
++}
++
+ # func_infer_tag arg
+ # Infer tagged configuration to use if any are available and
+ # if one wasn't chosen via the "--tag" command line option.
+@@ -1184,8 +1377,7 @@ func_infer_tag ()
+ if test -n "$available_tags" && test -z "$tagname"; then
+ CC_quoted=
+ for arg in $CC; do
+- func_quote_for_eval "$arg"
+- CC_quoted="$CC_quoted $func_quote_for_eval_result"
++ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+@@ -1204,8 +1396,7 @@ func_infer_tag ()
+ CC_quoted=
+ for arg in $CC; do
+ # Double-quote args containing other shell metacharacters.
+- func_quote_for_eval "$arg"
+- CC_quoted="$CC_quoted $func_quote_for_eval_result"
++ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+@@ -1274,6 +1465,486 @@ EOF
+ }
+ }
+
++
++##################################################
++# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
++##################################################
++
++# func_convert_core_file_wine_to_w32 ARG
++# Helper function used by file name conversion functions when $build is *nix,
++# and $host is mingw, cygwin, or some other w32 environment. Relies on a
++# correctly configured wine environment available, with the winepath program
++# in $build's $PATH.
++#
++# ARG is the $build file name to be converted to w32 format.
++# Result is available in $func_convert_core_file_wine_to_w32_result, and will
++# be empty on error (or when ARG is empty)
++func_convert_core_file_wine_to_w32 ()
++{
++ $opt_debug
++ func_convert_core_file_wine_to_w32_result="$1"
++ if test -n "$1"; then
++ # Unfortunately, winepath does not exit with a non-zero error code, so we
++ # are forced to check the contents of stdout. On the other hand, if the
++ # command is not found, the shell will set an exit code of 127 and print
++ # *an error message* to stdout. So we must check for both error code of
++ # zero AND non-empty stdout, which explains the odd construction:
++ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
++ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
++ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
++ $SED -e "$lt_sed_naive_backslashify"`
++ else
++ func_convert_core_file_wine_to_w32_result=
++ fi
++ fi
++}
++# end: func_convert_core_file_wine_to_w32
++
++
++# func_convert_core_path_wine_to_w32 ARG
++# Helper function used by path conversion functions when $build is *nix, and
++# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
++# configured wine environment available, with the winepath program in $build's
++# $PATH. Assumes ARG has no leading or trailing path separator characters.
++#
++# ARG is path to be converted from $build format to win32.
++# Result is available in $func_convert_core_path_wine_to_w32_result.
++# Unconvertible file (directory) names in ARG are skipped; if no directory names
++# are convertible, then the result may be empty.
++func_convert_core_path_wine_to_w32 ()
++{
++ $opt_debug
++ # unfortunately, winepath doesn't convert paths, only file names
++ func_convert_core_path_wine_to_w32_result=""
++ if test -n "$1"; then
++ oldIFS=$IFS
++ IFS=:
++ for func_convert_core_path_wine_to_w32_f in $1; do
++ IFS=$oldIFS
++ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
++ if test -n "$func_convert_core_file_wine_to_w32_result" ; then
++ if test -z "$func_convert_core_path_wine_to_w32_result"; then
++ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
++ else
++ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
++ fi
++ fi
++ done
++ IFS=$oldIFS
++ fi
++}
++# end: func_convert_core_path_wine_to_w32
++
++
++# func_cygpath ARGS...
++# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
++# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
++# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
++# (2), returns the Cygwin file name or path in func_cygpath_result (input
++# file name or path is assumed to be in w32 format, as previously converted
++# from $build's *nix or MSYS format). In case (3), returns the w32 file name
++# or path in func_cygpath_result (input file name or path is assumed to be in
++# Cygwin format). Returns an empty string on error.
++#
++# ARGS are passed to cygpath, with the last one being the file name or path to
++# be converted.
++#
++# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
++# environment variable; do not put it in $PATH.
++func_cygpath ()
++{
++ $opt_debug
++ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
++ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
++ if test "$?" -ne 0; then
++ # on failure, ensure result is empty
++ func_cygpath_result=
++ fi
++ else
++ func_cygpath_result=
++ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
++ fi
++}
++#end: func_cygpath
++
++
++# func_convert_core_msys_to_w32 ARG
++# Convert file name or path ARG from MSYS format to w32 format. Return
++# result in func_convert_core_msys_to_w32_result.
++func_convert_core_msys_to_w32 ()
++{
++ $opt_debug
++ # awkward: cmd appends spaces to result
++ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
++ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
++}
++#end: func_convert_core_msys_to_w32
++
++
++# func_convert_file_check ARG1 ARG2
++# Verify that ARG1 (a file name in $build format) was converted to $host
++# format in ARG2. Otherwise, emit an error message, but continue (resetting
++# func_to_host_file_result to ARG1).
++func_convert_file_check ()
++{
++ $opt_debug
++ if test -z "$2" && test -n "$1" ; then
++ func_error "Could not determine host file name corresponding to"
++ func_error " \`$1'"
++ func_error "Continuing, but uninstalled executables may not work."
++ # Fallback:
++ func_to_host_file_result="$1"
++ fi
++}
++# end func_convert_file_check
++
++
++# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
++# Verify that FROM_PATH (a path in $build format) was converted to $host
++# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
++# func_to_host_file_result to a simplistic fallback value (see below).
++func_convert_path_check ()
++{
++ $opt_debug
++ if test -z "$4" && test -n "$3"; then
++ func_error "Could not determine the host path corresponding to"
++ func_error " \`$3'"
++ func_error "Continuing, but uninstalled executables may not work."
++ # Fallback. This is a deliberately simplistic "conversion" and
++ # should not be "improved". See libtool.info.
++ if test "x$1" != "x$2"; then
++ lt_replace_pathsep_chars="s|$1|$2|g"
++ func_to_host_path_result=`echo "$3" |
++ $SED -e "$lt_replace_pathsep_chars"`
++ else
++ func_to_host_path_result="$3"
++ fi
++ fi
++}
++# end func_convert_path_check
++
++
++# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
++# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
++# and appending REPL if ORIG matches BACKPAT.
++func_convert_path_front_back_pathsep ()
++{
++ $opt_debug
++ case $4 in
++ $1 ) func_to_host_path_result="$3$func_to_host_path_result"
++ ;;
++ esac
++ case $4 in
++ $2 ) func_append func_to_host_path_result "$3"
++ ;;
++ esac
++}
++# end func_convert_path_front_back_pathsep
++
++
++##################################################
++# $build to $host FILE NAME CONVERSION FUNCTIONS #
++##################################################
++# invoked via `$to_host_file_cmd ARG'
++#
++# In each case, ARG is the path to be converted from $build to $host format.
++# Result will be available in $func_to_host_file_result.
++
++
++# func_to_host_file ARG
++# Converts the file name ARG from $build format to $host format. Return result
++# in func_to_host_file_result.
++func_to_host_file ()
++{
++ $opt_debug
++ $to_host_file_cmd "$1"
++}
++# end func_to_host_file
++
++
++# func_to_tool_file ARG LAZY
++# converts the file name ARG from $build format to toolchain format. Return
++# result in func_to_tool_file_result. If the conversion in use is listed
++# in (the comma separated) LAZY, no conversion takes place.
++func_to_tool_file ()
++{
++ $opt_debug
++ case ,$2, in
++ *,"$to_tool_file_cmd",*)
++ func_to_tool_file_result=$1
++ ;;
++ *)
++ $to_tool_file_cmd "$1"
++ func_to_tool_file_result=$func_to_host_file_result
++ ;;
++ esac
++}
++# end func_to_tool_file
++
++
++# func_convert_file_noop ARG
++# Copy ARG to func_to_host_file_result.
++func_convert_file_noop ()
++{
++ func_to_host_file_result="$1"
++}
++# end func_convert_file_noop
++
++
++# func_convert_file_msys_to_w32 ARG
++# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
++# conversion to w32 is not available inside the cwrapper. Returns result in
++# func_to_host_file_result.
++func_convert_file_msys_to_w32 ()
++{
++ $opt_debug
++ func_to_host_file_result="$1"
++ if test -n "$1"; then
++ func_convert_core_msys_to_w32 "$1"
++ func_to_host_file_result="$func_convert_core_msys_to_w32_result"
++ fi
++ func_convert_file_check "$1" "$func_to_host_file_result"
++}
++# end func_convert_file_msys_to_w32
++
++
++# func_convert_file_cygwin_to_w32 ARG
++# Convert file name ARG from Cygwin to w32 format. Returns result in
++# func_to_host_file_result.
++func_convert_file_cygwin_to_w32 ()
++{
++ $opt_debug
++ func_to_host_file_result="$1"
++ if test -n "$1"; then
++ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
++ # LT_CYGPATH in this case.
++ func_to_host_file_result=`cygpath -m "$1"`
++ fi
++ func_convert_file_check "$1" "$func_to_host_file_result"
++}
++# end func_convert_file_cygwin_to_w32
++
++
++# func_convert_file_nix_to_w32 ARG
++# Convert file name ARG from *nix to w32 format. Requires a wine environment
++# and a working winepath. Returns result in func_to_host_file_result.
++func_convert_file_nix_to_w32 ()
++{
++ $opt_debug
++ func_to_host_file_result="$1"
++ if test -n "$1"; then
++ func_convert_core_file_wine_to_w32 "$1"
++ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
++ fi
++ func_convert_file_check "$1" "$func_to_host_file_result"
++}
++# end func_convert_file_nix_to_w32
++
++
++# func_convert_file_msys_to_cygwin ARG
++# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
++# Returns result in func_to_host_file_result.
++func_convert_file_msys_to_cygwin ()
++{
++ $opt_debug
++ func_to_host_file_result="$1"
++ if test -n "$1"; then
++ func_convert_core_msys_to_w32 "$1"
++ func_cygpath -u "$func_convert_core_msys_to_w32_result"
++ func_to_host_file_result="$func_cygpath_result"
++ fi
++ func_convert_file_check "$1" "$func_to_host_file_result"
++}
++# end func_convert_file_msys_to_cygwin
++
++
++# func_convert_file_nix_to_cygwin ARG
++# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
++# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
++# in func_to_host_file_result.
++func_convert_file_nix_to_cygwin ()
++{
++ $opt_debug
++ func_to_host_file_result="$1"
++ if test -n "$1"; then
++ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
++ func_convert_core_file_wine_to_w32 "$1"
++ func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
++ func_to_host_file_result="$func_cygpath_result"
++ fi
++ func_convert_file_check "$1" "$func_to_host_file_result"
++}
++# end func_convert_file_nix_to_cygwin
++
++
++#############################################
++# $build to $host PATH CONVERSION FUNCTIONS #
++#############################################
++# invoked via `$to_host_path_cmd ARG'
++#
++# In each case, ARG is the path to be converted from $build to $host format.
++# The result will be available in $func_to_host_path_result.
++#
++# Path separators are also converted from $build format to $host format. If
++# ARG begins or ends with a path separator character, it is preserved (but
++# converted to $host format) on output.
++#
++# All path conversion functions are named using the following convention:
++# file name conversion function : func_convert_file_X_to_Y ()
++# path conversion function : func_convert_path_X_to_Y ()
++# where, for any given $build/$host combination the 'X_to_Y' value is the
++# same. If conversion functions are added for new $build/$host combinations,
++# the two new functions must follow this pattern, or func_init_to_host_path_cmd
++# will break.
++
++
++# func_init_to_host_path_cmd
++# Ensures that function "pointer" variable $to_host_path_cmd is set to the
++# appropriate value, based on the value of $to_host_file_cmd.
++to_host_path_cmd=
++func_init_to_host_path_cmd ()
++{
++ $opt_debug
++ if test -z "$to_host_path_cmd"; then
++ func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
++ to_host_path_cmd="func_convert_path_${func_stripname_result}"
++ fi
++}
++
++
++# func_to_host_path ARG
++# Converts the path ARG from $build format to $host format. Return result
++# in func_to_host_path_result.
++func_to_host_path ()
++{
++ $opt_debug
++ func_init_to_host_path_cmd
++ $to_host_path_cmd "$1"
++}
++# end func_to_host_path
++
++
++# func_convert_path_noop ARG
++# Copy ARG to func_to_host_path_result.
++func_convert_path_noop ()
++{
++ func_to_host_path_result="$1"
++}
++# end func_convert_path_noop
++
++
++# func_convert_path_msys_to_w32 ARG
++# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
++# conversion to w32 is not available inside the cwrapper. Returns result in
++# func_to_host_path_result.
++func_convert_path_msys_to_w32 ()
++{
++ $opt_debug
++ func_to_host_path_result="$1"
++ if test -n "$1"; then
++ # Remove leading and trailing path separator characters from ARG. MSYS
++ # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
++ # and winepath ignores them completely.
++ func_stripname : : "$1"
++ func_to_host_path_tmp1=$func_stripname_result
++ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
++ func_to_host_path_result="$func_convert_core_msys_to_w32_result"
++ func_convert_path_check : ";" \
++ "$func_to_host_path_tmp1" "$func_to_host_path_result"
++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
++ fi
++}
++# end func_convert_path_msys_to_w32
++
++
++# func_convert_path_cygwin_to_w32 ARG
++# Convert path ARG from Cygwin to w32 format. Returns result in
++# func_to_host_file_result.
++func_convert_path_cygwin_to_w32 ()
++{
++ $opt_debug
++ func_to_host_path_result="$1"
++ if test -n "$1"; then
++ # See func_convert_path_msys_to_w32:
++ func_stripname : : "$1"
++ func_to_host_path_tmp1=$func_stripname_result
++ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
++ func_convert_path_check : ";" \
++ "$func_to_host_path_tmp1" "$func_to_host_path_result"
++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
++ fi
++}
++# end func_convert_path_cygwin_to_w32
++
++
++# func_convert_path_nix_to_w32 ARG
++# Convert path ARG from *nix to w32 format. Requires a wine environment and
++# a working winepath. Returns result in func_to_host_file_result.
++func_convert_path_nix_to_w32 ()
++{
++ $opt_debug
++ func_to_host_path_result="$1"
++ if test -n "$1"; then
++ # See func_convert_path_msys_to_w32:
++ func_stripname : : "$1"
++ func_to_host_path_tmp1=$func_stripname_result
++ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
++ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
++ func_convert_path_check : ";" \
++ "$func_to_host_path_tmp1" "$func_to_host_path_result"
++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
++ fi
++}
++# end func_convert_path_nix_to_w32
++
++
++# func_convert_path_msys_to_cygwin ARG
++# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
++# Returns result in func_to_host_file_result.
++func_convert_path_msys_to_cygwin ()
++{
++ $opt_debug
++ func_to_host_path_result="$1"
++ if test -n "$1"; then
++ # See func_convert_path_msys_to_w32:
++ func_stripname : : "$1"
++ func_to_host_path_tmp1=$func_stripname_result
++ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
++ func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
++ func_to_host_path_result="$func_cygpath_result"
++ func_convert_path_check : : \
++ "$func_to_host_path_tmp1" "$func_to_host_path_result"
++ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
++ fi
++}
++# end func_convert_path_msys_to_cygwin
++
++
++# func_convert_path_nix_to_cygwin ARG
++# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
++# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
++# func_to_host_file_result.
++func_convert_path_nix_to_cygwin ()
++{
++ $opt_debug
++ func_to_host_path_result="$1"
++ if test -n "$1"; then
++ # Remove leading and trailing path separator characters from
++ # ARG. msys behavior is inconsistent here, cygpath turns them
++ # into '.;' and ';.', and winepath ignores them completely.
++ func_stripname : : "$1"
++ func_to_host_path_tmp1=$func_stripname_result
++ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
++ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
++ func_to_host_path_result="$func_cygpath_result"
++ func_convert_path_check : : \
++ "$func_to_host_path_tmp1" "$func_to_host_path_result"
++ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
++ fi
++}
++# end func_convert_path_nix_to_cygwin
++
++
+ # func_mode_compile arg...
+ func_mode_compile ()
+ {
+@@ -1314,12 +1985,12 @@ func_mode_compile ()
+ ;;
+
+ -pie | -fpie | -fPIE)
+- pie_flag="$pie_flag $arg"
++ func_append pie_flag " $arg"
+ continue
+ ;;
+
+ -shared | -static | -prefer-pic | -prefer-non-pic)
+- later="$later $arg"
++ func_append later " $arg"
+ continue
+ ;;
+
+@@ -1340,15 +2011,14 @@ func_mode_compile ()
+ save_ifs="$IFS"; IFS=','
+ for arg in $args; do
+ IFS="$save_ifs"
+- func_quote_for_eval "$arg"
+- lastarg="$lastarg $func_quote_for_eval_result"
++ func_append_quoted lastarg "$arg"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$lastarg"
+ lastarg=$func_stripname_result
+
+ # Add the arguments to base_compile.
+- base_compile="$base_compile $lastarg"
++ func_append base_compile " $lastarg"
+ continue
+ ;;
+
+@@ -1364,8 +2034,7 @@ func_mode_compile ()
+ esac # case $arg_mode
+
+ # Aesthetically quote the previous argument.
+- func_quote_for_eval "$lastarg"
+- base_compile="$base_compile $func_quote_for_eval_result"
++ func_append_quoted base_compile "$lastarg"
+ done # for arg
+
+ case $arg_mode in
+@@ -1496,17 +2165,16 @@ compiler."
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+- removelist="$removelist $output_obj"
++ func_append removelist " $output_obj"
+ $ECHO "$srcfile" > "$lockfile"
+ fi
+
+ $opt_dry_run || $RM $removelist
+- removelist="$removelist $lockfile"
++ func_append removelist " $lockfile"
+ trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+- if test -n "$fix_srcfile_path"; then
+- eval "srcfile=\"$fix_srcfile_path\""
+- fi
++ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
++ srcfile=$func_to_tool_file_result
+ func_quote_for_eval "$srcfile"
+ qsrcfile=$func_quote_for_eval_result
+
+@@ -1526,7 +2194,7 @@ compiler."
+
+ if test -z "$output_obj"; then
+ # Place PIC objects in $objdir
+- command="$command -o $lobj"
++ func_append command " -o $lobj"
+ fi
+
+ func_show_eval_locale "$command" \
+@@ -1573,11 +2241,11 @@ compiler."
+ command="$base_compile $qsrcfile $pic_flag"
+ fi
+ if test "$compiler_c_o" = yes; then
+- command="$command -o $obj"
++ func_append command " -o $obj"
+ fi
+
+ # Suppress compiler output if we already did a PIC compilation.
+- command="$command$suppress_output"
++ func_append command "$suppress_output"
+ func_show_eval_locale "$command" \
+ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+@@ -1622,13 +2290,13 @@ compiler."
+ }
+
+ $opt_help || {
+- test "$mode" = compile && func_mode_compile ${1+"$@"}
++ test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+ }
+
+ func_mode_help ()
+ {
+ # We need to display help for each of the modes.
+- case $mode in
++ case $opt_mode in
+ "")
+ # Generic help is extracted from the usage comments
+ # at the start of this file.
+@@ -1659,8 +2327,8 @@ This mode accepts the following additional options:
+
+ -o OUTPUT-FILE set the output file name to OUTPUT-FILE
+ -no-suppress do not suppress compiler output for multiple passes
+- -prefer-pic try to building PIC objects only
+- -prefer-non-pic try to building non-PIC objects only
++ -prefer-pic try to build PIC objects only
++ -prefer-non-pic try to build non-PIC objects only
+ -shared do not build a \`.o' file suitable for static linking
+ -static only build a \`.o' file suitable for static linking
+ -Wc,FLAG pass FLAG directly to the compiler
+@@ -1804,7 +2472,7 @@ Otherwise, only FILE itself is deleted using RM."
+ ;;
+
+ *)
+- func_fatal_help "invalid operation mode \`$mode'"
++ func_fatal_help "invalid operation mode \`$opt_mode'"
+ ;;
+ esac
+
+@@ -1819,13 +2487,13 @@ if $opt_help; then
+ else
+ {
+ func_help noexit
+- for mode in compile link execute install finish uninstall clean; do
++ for opt_mode in compile link execute install finish uninstall clean; do
+ func_mode_help
+ done
+ } | sed -n '1p; 2,$s/^Usage:/ or: /p'
+ {
+ func_help noexit
+- for mode in compile link execute install finish uninstall clean; do
++ for opt_mode in compile link execute install finish uninstall clean; do
+ echo
+ func_mode_help
+ done
+@@ -1854,13 +2522,16 @@ func_mode_execute ()
+ func_fatal_help "you must specify a COMMAND"
+
+ # Handle -dlopen flags immediately.
+- for file in $execute_dlfiles; do
++ for file in $opt_dlopen; do
+ test -f "$file" \
+ || func_fatal_help "\`$file' is not a file"
+
+ dir=
+ case $file in
+ *.la)
++ func_resolve_sysroot "$file"
++ file=$func_resolve_sysroot_result
++
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "\`$lib' is not a valid libtool archive"
+@@ -1882,7 +2553,7 @@ func_mode_execute ()
+ dir="$func_dirname_result"
+
+ if test -f "$dir/$objdir/$dlname"; then
+- dir="$dir/$objdir"
++ func_append dir "/$objdir"
+ else
+ if test ! -f "$dir/$dlname"; then
+ func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+@@ -1907,10 +2578,10 @@ func_mode_execute ()
+ test -n "$absdir" && dir="$absdir"
+
+ # Now add the directory to shlibpath_var.
+- if eval test -z \"\$$shlibpath_var\"; then
+- eval $shlibpath_var=\$dir
++ if eval "test -z \"\$$shlibpath_var\""; then
++ eval "$shlibpath_var=\"\$dir\""
+ else
+- eval $shlibpath_var=\$dir:\$$shlibpath_var
++ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+ fi
+ done
+
+@@ -1939,8 +2610,7 @@ func_mode_execute ()
+ ;;
+ esac
+ # Quote arguments (to preserve shell metacharacters).
+- func_quote_for_eval "$file"
+- args="$args $func_quote_for_eval_result"
++ func_append_quoted args "$file"
+ done
+
+ if test "X$opt_dry_run" = Xfalse; then
+@@ -1972,22 +2642,59 @@ func_mode_execute ()
+ fi
+ }
+
+-test "$mode" = execute && func_mode_execute ${1+"$@"}
++test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+ # func_mode_finish arg...
+ func_mode_finish ()
+ {
+ $opt_debug
+- libdirs="$nonopt"
++ libs=
++ libdirs=
+ admincmds=
+
+- if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+- for dir
+- do
+- libdirs="$libdirs $dir"
+- done
++ for opt in "$nonopt" ${1+"$@"}
++ do
++ if test -d "$opt"; then
++ func_append libdirs " $opt"
+
++ elif test -f "$opt"; then
++ if func_lalib_unsafe_p "$opt"; then
++ func_append libs " $opt"
++ else
++ func_warning "\`$opt' is not a valid libtool archive"
++ fi
++
++ else
++ func_fatal_error "invalid argument \`$opt'"
++ fi
++ done
++
++ if test -n "$libs"; then
++ if test -n "$lt_sysroot"; then
++ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
++ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
++ else
++ sysroot_cmd=
++ fi
++
++ # Remove sysroot references
++ if $opt_dry_run; then
++ for lib in $libs; do
++ echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
++ done
++ else
++ tmpdir=`func_mktempdir`
++ for lib in $libs; do
++ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
++ > $tmpdir/tmp-la
++ mv -f $tmpdir/tmp-la $lib
++ done
++ ${RM}r "$tmpdir"
++ fi
++ fi
++
++ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+ for libdir in $libdirs; do
+ if test -n "$finish_cmds"; then
+ # Do each command in the finish commands.
+@@ -1997,7 +2704,7 @@ func_mode_finish ()
+ if test -n "$finish_eval"; then
+ # Do the single finish_eval.
+ eval cmds=\"$finish_eval\"
+- $opt_dry_run || eval "$cmds" || admincmds="$admincmds
++ $opt_dry_run || eval "$cmds" || func_append admincmds "
+ $cmds"
+ fi
+ done
+@@ -2006,53 +2713,55 @@ func_mode_finish ()
+ # Exit here if they wanted silent mode.
+ $opt_silent && exit $EXIT_SUCCESS
+
+- echo "----------------------------------------------------------------------"
+- echo "Libraries have been installed in:"
+- for libdir in $libdirs; do
+- $ECHO " $libdir"
+- done
+- echo
+- echo "If you ever happen to want to link against installed libraries"
+- echo "in a given directory, LIBDIR, you must either use libtool, and"
+- echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+- echo "flag during linking and do at least one of the following:"
+- if test -n "$shlibpath_var"; then
+- echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
+- echo " during execution"
+- fi
+- if test -n "$runpath_var"; then
+- echo " - add LIBDIR to the \`$runpath_var' environment variable"
+- echo " during linking"
+- fi
+- if test -n "$hardcode_libdir_flag_spec"; then
+- libdir=LIBDIR
+- eval "flag=\"$hardcode_libdir_flag_spec\""
++ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
++ echo "----------------------------------------------------------------------"
++ echo "Libraries have been installed in:"
++ for libdir in $libdirs; do
++ $ECHO " $libdir"
++ done
++ echo
++ echo "If you ever happen to want to link against installed libraries"
++ echo "in a given directory, LIBDIR, you must either use libtool, and"
++ echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
++ echo "flag during linking and do at least one of the following:"
++ if test -n "$shlibpath_var"; then
++ echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
++ echo " during execution"
++ fi
++ if test -n "$runpath_var"; then
++ echo " - add LIBDIR to the \`$runpath_var' environment variable"
++ echo " during linking"
++ fi
++ if test -n "$hardcode_libdir_flag_spec"; then
++ libdir=LIBDIR
++ eval flag=\"$hardcode_libdir_flag_spec\"
+
+- $ECHO " - use the \`$flag' linker flag"
+- fi
+- if test -n "$admincmds"; then
+- $ECHO " - have your system administrator run these commands:$admincmds"
+- fi
+- if test -f /etc/ld.so.conf; then
+- echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+- fi
+- echo
++ $ECHO " - use the \`$flag' linker flag"
++ fi
++ if test -n "$admincmds"; then
++ $ECHO " - have your system administrator run these commands:$admincmds"
++ fi
++ if test -f /etc/ld.so.conf; then
++ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
++ fi
++ echo
+
+- echo "See any operating system documentation about shared libraries for"
+- case $host in
+- solaris2.[6789]|solaris2.1[0-9])
+- echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+- echo "pages."
+- ;;
+- *)
+- echo "more information, such as the ld(1) and ld.so(8) manual pages."
+- ;;
+- esac
+- echo "----------------------------------------------------------------------"
++ echo "See any operating system documentation about shared libraries for"
++ case $host in
++ solaris2.[6789]|solaris2.1[0-9])
++ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
++ echo "pages."
++ ;;
++ *)
++ echo "more information, such as the ld(1) and ld.so(8) manual pages."
++ ;;
++ esac
++ echo "----------------------------------------------------------------------"
++ fi
+ exit $EXIT_SUCCESS
+ }
+
+-test "$mode" = finish && func_mode_finish ${1+"$@"}
++test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+ # func_mode_install arg...
+@@ -2077,7 +2786,7 @@ func_mode_install ()
+ # The real first argument should be the name of the installation program.
+ # Aesthetically quote it.
+ func_quote_for_eval "$arg"
+- install_prog="$install_prog$func_quote_for_eval_result"
++ func_append install_prog "$func_quote_for_eval_result"
+ install_shared_prog=$install_prog
+ case " $install_prog " in
+ *[\\\ /]cp\ *) install_cp=: ;;
+@@ -2097,7 +2806,7 @@ func_mode_install ()
+ do
+ arg2=
+ if test -n "$dest"; then
+- files="$files $dest"
++ func_append files " $dest"
+ dest=$arg
+ continue
+ fi
+@@ -2135,11 +2844,11 @@ func_mode_install ()
+
+ # Aesthetically quote the argument.
+ func_quote_for_eval "$arg"
+- install_prog="$install_prog $func_quote_for_eval_result"
++ func_append install_prog " $func_quote_for_eval_result"
+ if test -n "$arg2"; then
+ func_quote_for_eval "$arg2"
+ fi
+- install_shared_prog="$install_shared_prog $func_quote_for_eval_result"
++ func_append install_shared_prog " $func_quote_for_eval_result"
+ done
+
+ test -z "$install_prog" && \
+@@ -2151,7 +2860,7 @@ func_mode_install ()
+ if test -n "$install_override_mode" && $no_mode; then
+ if $install_cp; then :; else
+ func_quote_for_eval "$install_override_mode"
+- install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result"
++ func_append install_shared_prog " -m $func_quote_for_eval_result"
+ fi
+ fi
+
+@@ -2209,10 +2918,13 @@ func_mode_install ()
+ case $file in
+ *.$libext)
+ # Do the static libraries later.
+- staticlibs="$staticlibs $file"
++ func_append staticlibs " $file"
+ ;;
+
+ *.la)
++ func_resolve_sysroot "$file"
++ file=$func_resolve_sysroot_result
++
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "\`$file' is not a valid libtool archive"
+@@ -2226,23 +2938,30 @@ func_mode_install ()
+ if test "X$destdir" = "X$libdir"; then
+ case "$current_libdirs " in
+ *" $libdir "*) ;;
+- *) current_libdirs="$current_libdirs $libdir" ;;
++ *) func_append current_libdirs " $libdir" ;;
+ esac
+ else
+ # Note the libdir as a future libdir.
+ case "$future_libdirs " in
+ *" $libdir "*) ;;
+- *) future_libdirs="$future_libdirs $libdir" ;;
++ *) func_append future_libdirs " $libdir" ;;
+ esac
+ fi
+
+ func_dirname "$file" "/" ""
+ dir="$func_dirname_result"
+- dir="$dir$objdir"
++ func_append dir "$objdir"
+
+ if test -n "$relink_command"; then
++ # Strip any trailing slash from the destination.
++ func_stripname '' '/' "$libdir"
++ destlibdir=$func_stripname_result
++
++ func_stripname '' '/' "$destdir"
++ s_destdir=$func_stripname_result
++
+ # Determine the prefix the user has applied to our future dir.
+- inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
++ inst_prefix_dir=`$ECHO "X$s_destdir" | $Xsed -e "s%$destlibdir\$%%"`
+
+ # Don't allow the user to place us outside of our expected
+ # location b/c this prevents finding dependent libraries that
+@@ -2315,7 +3034,7 @@ func_mode_install ()
+ func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+ # Maybe install the static library, too.
+- test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
++ test -n "$old_library" && func_append staticlibs " $dir/$old_library"
+ ;;
+
+ *.lo)
+@@ -2503,7 +3222,7 @@ func_mode_install ()
+ test -n "$future_libdirs" && \
+ func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+- if test -n "$current_libdirs" && $opt_finish; then
++ if test -n "$current_libdirs"; then
+ # Maybe just do a dry run.
+ $opt_dry_run && current_libdirs=" -n$current_libdirs"
+ exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+@@ -2512,7 +3231,7 @@ func_mode_install ()
+ fi
+ }
+
+-test "$mode" = install && func_mode_install ${1+"$@"}
++test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+ # func_generate_dlsyms outputname originator pic_p
+@@ -2559,6 +3278,18 @@ extern \"C\" {
+ #pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+ #endif
+
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ /* External symbol declarations for the compiler. */\
+ "
+
+@@ -2570,21 +3301,22 @@ extern \"C\" {
+ # Add our own program objects to the symbol list.
+ progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ for progfile in $progfiles; do
+- func_verbose "extracting global C symbols from \`$progfile'"
+- $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'"
++ func_to_tool_file "$progfile" func_convert_file_msys_to_w32
++ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
++ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+ done
+
+ if test -n "$exclude_expsyms"; then
+ $opt_dry_run || {
+- $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+- $MV "$nlist"T "$nlist"
++ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
++ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+ if test -n "$export_symbols_regex"; then
+ $opt_dry_run || {
+- $EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T
+- $MV "$nlist"T "$nlist"
++ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
++ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+@@ -2593,23 +3325,23 @@ extern \"C\" {
+ export_symbols="$output_objdir/$outputname.exp"
+ $opt_dry_run || {
+ $RM $export_symbols
+- ${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' < "$nlist" > "$export_symbols"
++ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+- echo EXPORTS > "$output_objdir/$outputname.def"
+- cat "$export_symbols" >> "$output_objdir/$outputname.def"
++ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
++ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+ else
+ $opt_dry_run || {
+- ${SED} -e 's/\([].[*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/' < "$export_symbols" > "$output_objdir/$outputname.exp"
+- $GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T
+- $MV "$nlist"T "$nlist"
++ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
++ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
++ eval '$MV "$nlist"T "$nlist"'
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+- echo EXPORTS > "$output_objdir/$outputname.def"
+- cat "$nlist" >> "$output_objdir/$outputname.def"
++ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
++ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+@@ -2620,10 +3352,52 @@ extern \"C\" {
+ func_verbose "extracting global C symbols from \`$dlprefile'"
+ func_basename "$dlprefile"
+ name="$func_basename_result"
+- $opt_dry_run || {
+- $ECHO ": $name " >> "$nlist"
+- eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+- }
++ case $host in
++ *cygwin* | *mingw* | *cegcc* )
++ # if an import library, we need to obtain dlname
++ if func_win32_import_lib_p "$dlprefile"; then
++ func_tr_sh "$dlprefile"
++ eval "curr_lafile=\$libfile_$func_tr_sh_result"
++ dlprefile_dlbasename=""
++ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
++ # Use subshell, to avoid clobbering current variable values
++ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
++ if test -n "$dlprefile_dlname" ; then
++ func_basename "$dlprefile_dlname"
++ dlprefile_dlbasename="$func_basename_result"
++ else
++ # no lafile. user explicitly requested -dlpreopen <import library>.
++ $sharedlib_from_linklib_cmd "$dlprefile"
++ dlprefile_dlbasename=$sharedlib_from_linklib_result
++ fi
++ fi
++ $opt_dry_run || {
++ if test -n "$dlprefile_dlbasename" ; then
++ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
++ else
++ func_warning "Could not compute DLL name from $name"
++ eval '$ECHO ": $name " >> "$nlist"'
++ fi
++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
++ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
++ }
++ else # not an import lib
++ $opt_dry_run || {
++ eval '$ECHO ": $name " >> "$nlist"'
++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
++ }
++ fi
++ ;;
++ *)
++ $opt_dry_run || {
++ eval '$ECHO ": $name " >> "$nlist"'
++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
++ }
++ ;;
++ esac
+ done
+
+ $opt_dry_run || {
+@@ -2661,26 +3435,9 @@ typedef struct {
+ const char *name;
+ void *address;
+ } lt_dlsymlist;
+-"
+- case $host in
+- *cygwin* | *mingw* | *cegcc* )
+- echo >> "$output_objdir/$my_dlsyms" "\
+-/* DATA imports from DLLs on WIN32 con't be const, because
+- runtime relocations are performed -- see ld's documentation
+- on pseudo-relocs. */"
+- lt_dlsym_const= ;;
+- *osf5*)
+- echo >> "$output_objdir/$my_dlsyms" "\
+-/* This system does not cope well with relocations in const data */"
+- lt_dlsym_const= ;;
+- *)
+- lt_dlsym_const=const ;;
+- esac
+-
+- echo >> "$output_objdir/$my_dlsyms" "\
+-extern $lt_dlsym_const lt_dlsymlist
++extern LT_DLSYM_CONST lt_dlsymlist
+ lt_${my_prefix}_LTX_preloaded_symbols[];
+-$lt_dlsym_const lt_dlsymlist
++LT_DLSYM_CONST lt_dlsymlist
+ lt_${my_prefix}_LTX_preloaded_symbols[] =
+ {\
+ { \"$my_originator\", (void *) 0 },"
+@@ -2736,7 +3493,7 @@ static const void *lt_preloaded_setup() {
+ for arg in $LTCFLAGS; do
+ case $arg in
+ -pie | -fpie | -fPIE) ;;
+- *) symtab_cflags="$symtab_cflags $arg" ;;
++ *) func_append symtab_cflags " $arg" ;;
+ esac
+ done
+
+@@ -2796,9 +3553,11 @@ func_win32_libid ()
+ win32_libid_type="x86 archive import"
+ ;;
+ *ar\ archive*) # could be an import, or static
+- if $OBJDUMP -f "$1" | $SED -e '10q' 2>/dev/null |
+- $EGREP 'file format (pe-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+- win32_nmres=`$NM -f posix -A "$1" |
++ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
++ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
++ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
++ func_to_tool_file "$1" func_convert_file_msys_to_w32
++ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+ $SED -n -e '
+ 1,100{
+ / I /{
+@@ -2827,6 +3586,131 @@ func_win32_libid ()
+ $ECHO "$win32_libid_type"
+ }
+
++# func_cygming_dll_for_implib ARG
++#
++# Platform-specific function to extract the
++# name of the DLL associated with the specified
++# import library ARG.
++# Invoked by eval'ing the libtool variable
++# $sharedlib_from_linklib_cmd
++# Result is available in the variable
++# $sharedlib_from_linklib_result
++func_cygming_dll_for_implib ()
++{
++ $opt_debug
++ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
++}
++
++# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
++#
++# The is the core of a fallback implementation of a
++# platform-specific function to extract the name of the
++# DLL associated with the specified import library LIBNAME.
++#
++# SECTION_NAME is either .idata$6 or .idata$7, depending
++# on the platform and compiler that created the implib.
++#
++# Echos the name of the DLL associated with the
++# specified import library.
++func_cygming_dll_for_implib_fallback_core ()
++{
++ $opt_debug
++ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
++ $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
++ $SED '/^Contents of section '"$match_literal"':/{
++ # Place marker at beginning of archive member dllname section
++ s/.*/====MARK====/
++ p
++ d
++ }
++ # These lines can sometimes be longer than 43 characters, but
++ # are always uninteresting
++ /:[ ]*file format pe[i]\{,1\}-/d
++ /^In archive [^:]*:/d
++ # Ensure marker is printed
++ /^====MARK====/p
++ # Remove all lines with less than 43 characters
++ /^.\{43\}/!d
++ # From remaining lines, remove first 43 characters
++ s/^.\{43\}//' |
++ $SED -n '
++ # Join marker and all lines until next marker into a single line
++ /^====MARK====/ b para
++ H
++ $ b para
++ b
++ :para
++ x
++ s/\n//g
++ # Remove the marker
++ s/^====MARK====//
++ # Remove trailing dots and whitespace
++ s/[\. \t]*$//
++ # Print
++ /./p' |
++ # we now have a list, one entry per line, of the stringified
++ # contents of the appropriate section of all members of the
++ # archive which possess that section. Heuristic: eliminate
++ # all those which have a first or second character that is
++ # a '.' (that is, objdump's representation of an unprintable
++ # character.) This should work for all archives with less than
++ # 0x302f exports -- but will fail for DLLs whose name actually
++ # begins with a literal '.' or a single character followed by
++ # a '.'.
++ #
++ # Of those that remain, print the first one.
++ $SED -e '/^\./d;/^.\./d;q'
++}
++
++# func_cygming_gnu_implib_p ARG
++# This predicate returns with zero status (TRUE) if
++# ARG is a GNU/binutils-style import library. Returns
++# with nonzero status (FALSE) otherwise.
++func_cygming_gnu_implib_p ()
++{
++ $opt_debug
++ func_to_tool_file "$1" func_convert_file_msys_to_w32
++ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
++ test -n "$func_cygming_gnu_implib_tmp"
++}
++
++# func_cygming_ms_implib_p ARG
++# This predicate returns with zero status (TRUE) if
++# ARG is an MS-style import library. Returns
++# with nonzero status (FALSE) otherwise.
++func_cygming_ms_implib_p ()
++{
++ $opt_debug
++ func_to_tool_file "$1" func_convert_file_msys_to_w32
++ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
++ test -n "$func_cygming_ms_implib_tmp"
++}
++
++# func_cygming_dll_for_implib_fallback ARG
++# Platform-specific function to extract the
++# name of the DLL associated with the specified
++# import library ARG.
++#
++# This fallback implementation is for use when $DLLTOOL
++# does not support the --identify-strict option.
++# Invoked by eval'ing the libtool variable
++# $sharedlib_from_linklib_cmd
++# Result is available in the variable
++# $sharedlib_from_linklib_result
++func_cygming_dll_for_implib_fallback ()
++{
++ $opt_debug
++ if func_cygming_gnu_implib_p "$1" ; then
++ # binutils import library
++ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
++ elif func_cygming_ms_implib_p "$1" ; then
++ # ms-generated import library
++ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
++ else
++ # unknown
++ sharedlib_from_linklib_result=""
++ fi
++}
+
+
+ # func_extract_an_archive dir oldlib
+@@ -3014,7 +3898,110 @@ func_fallback_echo ()
+ _LTECHO_EOF'
+ }
+ ECHO=\"$qECHO\"
+- fi\
++ fi
++
++# Very basic option parsing. These options are (a) specific to
++# the libtool wrapper, (b) are identical between the wrapper
++# /script/ and the wrapper /executable/ which is used only on
++# windows platforms, and (c) all begin with the string "--lt-"
++# (application programs are unlikely to have options which match
++# this pattern).
++#
++# There are only two supported options: --lt-debug and
++# --lt-dump-script. There is, deliberately, no --lt-help.
++#
++# The first argument to this parsing function should be the
++# script's $0 value, followed by "$@".
++lt_option_debug=
++func_parse_lt_options ()
++{
++ lt_script_arg0=\$0
++ shift
++ for lt_opt
++ do
++ case \"\$lt_opt\" in
++ --lt-debug) lt_option_debug=1 ;;
++ --lt-dump-script)
++ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
++ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
++ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
++ cat \"\$lt_dump_D/\$lt_dump_F\"
++ exit 0
++ ;;
++ --lt-*)
++ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
++ exit 1
++ ;;
++ esac
++ done
++
++ # Print the debug banner immediately:
++ if test -n \"\$lt_option_debug\"; then
++ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
++ fi
++}
++
++# Used when --lt-debug. Prints its arguments to stdout
++# (redirection is the responsibility of the caller)
++func_lt_dump_args ()
++{
++ lt_dump_args_N=1;
++ for lt_arg
++ do
++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
++ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
++ done
++}
++
++# Core function for launching the target application
++func_exec_program_core ()
++{
++"
++ case $host in
++ # Backslashes separate directories on plain windows
++ *-*-mingw | *-*-os2* | *-cegcc*)
++ $ECHO "\
++ if test -n \"\$lt_option_debug\"; then
++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
++ func_lt_dump_args \${1+\"\$@\"} 1>&2
++ fi
++ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
++"
++ ;;
++
++ *)
++ $ECHO "\
++ if test -n \"\$lt_option_debug\"; then
++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
++ func_lt_dump_args \${1+\"\$@\"} 1>&2
++ fi
++ exec \"\$progdir/\$program\" \${1+\"\$@\"}
++"
++ ;;
++ esac
++ $ECHO "\
++ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
++ exit 1
++}
++
++# A function to encapsulate launching the target application
++# Strips options in the --lt-* namespace from \$@ and
++# launches target application with the remaining arguments.
++func_exec_program ()
++{
++ for lt_wr_arg
++ do
++ case \$lt_wr_arg in
++ --lt-*) ;;
++ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
++ esac
++ shift
++ done
++ func_exec_program_core \${1+\"\$@\"}
++}
++
++ # Parse options
++ func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+ # Find the directory that this script lives in.
+ thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+@@ -3078,7 +4065,7 @@ _LTECHO_EOF'
+
+ # relink executable if necessary
+ if test -n \"\$relink_command\"; then
+- if relink_command_output=\`eval \"\$relink_command\" 2>&1\`; then :
++ if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+ else
+ $ECHO \"\$relink_command_output\" >&2
+ $RM \"\$progdir/\$file\"
+@@ -3102,6 +4089,18 @@ _LTECHO_EOF'
+
+ if test -f \"\$progdir/\$program\"; then"
+
++ # fixup the dll searchpath if we need to.
++ #
++ # Fix the DLL searchpath if we need to. Do this before prepending
++ # to shlibpath, because on Windows, both are PATH and uninstalled
++ # libraries must come first.
++ if test -n "$dllsearchpath"; then
++ $ECHO "\
++ # Add the dll search path components to the executable PATH
++ PATH=$dllsearchpath:\$PATH
++"
++ fi
++
+ # Export our shlibpath_var if we have one.
+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+ $ECHO "\
+@@ -3116,35 +4115,10 @@ _LTECHO_EOF'
+ "
+ fi
+
+- # fixup the dll searchpath if we need to.
+- if test -n "$dllsearchpath"; then
+- $ECHO "\
+- # Add the dll search path components to the executable PATH
+- PATH=$dllsearchpath:\$PATH
+-"
+- fi
+-
+ $ECHO "\
+ if test \"\$libtool_execute_magic\" != \"$magic\"; then
+ # Run the actual program with our arguments.
+-"
+- case $host in
+- # Backslashes separate directories on plain windows
+- *-*-mingw | *-*-os2* | *-cegcc*)
+- $ECHO "\
+- exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+-"
+- ;;
+-
+- *)
+- $ECHO "\
+- exec \"\$progdir/\$program\" \${1+\"\$@\"}
+-"
+- ;;
+- esac
+- $ECHO "\
+- \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+- exit 1
++ func_exec_program \${1+\"\$@\"}
+ fi
+ else
+ # The program doesn't exist.
+@@ -3158,166 +4132,6 @@ fi\
+ }
+
+
+-# func_to_host_path arg
+-#
+-# Convert paths to host format when used with build tools.
+-# Intended for use with "native" mingw (where libtool itself
+-# is running under the msys shell), or in the following cross-
+-# build environments:
+-# $build $host
+-# mingw (msys) mingw [e.g. native]
+-# cygwin mingw
+-# *nix + wine mingw
+-# where wine is equipped with the `winepath' executable.
+-# In the native mingw case, the (msys) shell automatically
+-# converts paths for any non-msys applications it launches,
+-# but that facility isn't available from inside the cwrapper.
+-# Similar accommodations are necessary for $host mingw and
+-# $build cygwin. Calling this function does no harm for other
+-# $host/$build combinations not listed above.
+-#
+-# ARG is the path (on $build) that should be converted to
+-# the proper representation for $host. The result is stored
+-# in $func_to_host_path_result.
+-func_to_host_path ()
+-{
+- func_to_host_path_result="$1"
+- if test -n "$1"; then
+- case $host in
+- *mingw* )
+- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+- case $build in
+- *mingw* ) # actually, msys
+- # awkward: cmd appends spaces to result
+- func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null |
+- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+- ;;
+- *cygwin* )
+- func_to_host_path_result=`cygpath -w "$1" |
+- $SED -e "$lt_sed_naive_backslashify"`
+- ;;
+- * )
+- # Unfortunately, winepath does not exit with a non-zero
+- # error code, so we are forced to check the contents of
+- # stdout. On the other hand, if the command is not
+- # found, the shell will set an exit code of 127 and print
+- # *an error message* to stdout. So we must check for both
+- # error code of zero AND non-empty stdout, which explains
+- # the odd construction:
+- func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null`
+- if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then
+- func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" |
+- $SED -e "$lt_sed_naive_backslashify"`
+- else
+- # Allow warning below.
+- func_to_host_path_result=
+- fi
+- ;;
+- esac
+- if test -z "$func_to_host_path_result" ; then
+- func_error "Could not determine host path corresponding to"
+- func_error " \`$1'"
+- func_error "Continuing, but uninstalled executables may not work."
+- # Fallback:
+- func_to_host_path_result="$1"
+- fi
+- ;;
+- esac
+- fi
+-}
+-# end: func_to_host_path
+-
+-# func_to_host_pathlist arg
+-#
+-# Convert pathlists to host format when used with build tools.
+-# See func_to_host_path(), above. This function supports the
+-# following $build/$host combinations (but does no harm for
+-# combinations not listed here):
+-# $build $host
+-# mingw (msys) mingw [e.g. native]
+-# cygwin mingw
+-# *nix + wine mingw
+-#
+-# Path separators are also converted from $build format to
+-# $host format. If ARG begins or ends with a path separator
+-# character, it is preserved (but converted to $host format)
+-# on output.
+-#
+-# ARG is a pathlist (on $build) that should be converted to
+-# the proper representation on $host. The result is stored
+-# in $func_to_host_pathlist_result.
+-func_to_host_pathlist ()
+-{
+- func_to_host_pathlist_result="$1"
+- if test -n "$1"; then
+- case $host in
+- *mingw* )
+- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+- # Remove leading and trailing path separator characters from
+- # ARG. msys behavior is inconsistent here, cygpath turns them
+- # into '.;' and ';.', and winepath ignores them completely.
+- func_stripname : : "$1"
+- func_to_host_pathlist_tmp1=$func_stripname_result
+- case $build in
+- *mingw* ) # Actually, msys.
+- # Awkward: cmd appends spaces to result.
+- func_to_host_pathlist_result=`
+- ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null |
+- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+- ;;
+- *cygwin* )
+- func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" |
+- $SED -e "$lt_sed_naive_backslashify"`
+- ;;
+- * )
+- # unfortunately, winepath doesn't convert pathlists
+- func_to_host_pathlist_result=""
+- func_to_host_pathlist_oldIFS=$IFS
+- IFS=:
+- for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do
+- IFS=$func_to_host_pathlist_oldIFS
+- if test -n "$func_to_host_pathlist_f" ; then
+- func_to_host_path "$func_to_host_pathlist_f"
+- if test -n "$func_to_host_path_result" ; then
+- if test -z "$func_to_host_pathlist_result" ; then
+- func_to_host_pathlist_result="$func_to_host_path_result"
+- else
+- func_append func_to_host_pathlist_result ";$func_to_host_path_result"
+- fi
+- fi
+- fi
+- done
+- IFS=$func_to_host_pathlist_oldIFS
+- ;;
+- esac
+- if test -z "$func_to_host_pathlist_result"; then
+- func_error "Could not determine the host path(s) corresponding to"
+- func_error " \`$1'"
+- func_error "Continuing, but uninstalled executables may not work."
+- # Fallback. This may break if $1 contains DOS-style drive
+- # specifications. The fix is not to complicate the expression
+- # below, but for the user to provide a working wine installation
+- # with winepath so that path translation in the cross-to-mingw
+- # case works properly.
+- lt_replace_pathsep_nix_to_dos="s|:|;|g"
+- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\
+- $SED -e "$lt_replace_pathsep_nix_to_dos"`
+- fi
+- # Now, add the leading and trailing path separators back
+- case "$1" in
+- :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result"
+- ;;
+- esac
+- case "$1" in
+- *: ) func_append func_to_host_pathlist_result ";"
+- ;;
+- esac
+- ;;
+- esac
+- fi
+-}
+-# end: func_to_host_pathlist
+-
+ # func_emit_cwrapperexe_src
+ # emit the source code for a wrapper executable on stdout
+ # Must ONLY be called from within func_mode_link because
+@@ -3334,10 +4148,6 @@ func_emit_cwrapperexe_src ()
+
+ This wrapper executable should never be moved out of the build directory.
+ If it is, it will not operate correctly.
+-
+- Currently, it simply execs the wrapper *script* "$SHELL $output",
+- but could eventually absorb all of the scripts functionality and
+- exec $objdir/$outputname directly.
+ */
+ EOF
+ cat <<"EOF"
+@@ -3462,22 +4272,13 @@ int setenv (const char *, const char *, int);
+ if (stale) { free ((void *) stale); stale = 0; } \
+ } while (0)
+
+-#undef LTWRAPPER_DEBUGPRINTF
+-#if defined LT_DEBUGWRAPPER
+-# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args
+-static void
+-ltwrapper_debugprintf (const char *fmt, ...)
+-{
+- va_list args;
+- va_start (args, fmt);
+- (void) vfprintf (stderr, fmt, args);
+- va_end (args);
+-}
++#if defined(LT_DEBUGWRAPPER)
++static int lt_debug = 1;
+ #else
+-# define LTWRAPPER_DEBUGPRINTF(args)
++static int lt_debug = 0;
+ #endif
+
+-const char *program_name = NULL;
++const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+ void *xmalloc (size_t num);
+ char *xstrdup (const char *string);
+@@ -3487,7 +4288,10 @@ char *chase_symlinks (const char *pathspec);
+ int make_executable (const char *path);
+ int check_executable (const char *path);
+ char *strendzap (char *str, const char *pat);
+-void lt_fatal (const char *message, ...);
++void lt_debugprintf (const char *file, int line, const char *fmt, ...);
++void lt_fatal (const char *file, int line, const char *message, ...);
++static const char *nonnull (const char *s);
++static const char *nonempty (const char *s);
+ void lt_setenv (const char *name, const char *value);
+ char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+ void lt_update_exe_path (const char *name, const char *value);
+@@ -3497,14 +4301,14 @@ void lt_dump_script (FILE *f);
+ EOF
+
+ cat <<EOF
+-const char * MAGIC_EXE = "$magic_exe";
++volatile const char * MAGIC_EXE = "$magic_exe";
+ const char * LIB_PATH_VARNAME = "$shlibpath_var";
+ EOF
+
+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+- func_to_host_pathlist "$temp_rpath"
++ func_to_host_path "$temp_rpath"
+ cat <<EOF
+-const char * LIB_PATH_VALUE = "$func_to_host_pathlist_result";
++const char * LIB_PATH_VALUE = "$func_to_host_path_result";
+ EOF
+ else
+ cat <<"EOF"
+@@ -3513,10 +4317,10 @@ EOF
+ fi
+
+ if test -n "$dllsearchpath"; then
+- func_to_host_pathlist "$dllsearchpath:"
++ func_to_host_path "$dllsearchpath:"
+ cat <<EOF
+ const char * EXE_PATH_VARNAME = "PATH";
+-const char * EXE_PATH_VALUE = "$func_to_host_pathlist_result";
++const char * EXE_PATH_VALUE = "$func_to_host_path_result";
+ EOF
+ else
+ cat <<"EOF"
+@@ -3539,12 +4343,10 @@ EOF
+ cat <<"EOF"
+
+ #define LTWRAPPER_OPTION_PREFIX "--lt-"
+-#define LTWRAPPER_OPTION_PREFIX_LENGTH 5
+
+-static const size_t opt_prefix_len = LTWRAPPER_OPTION_PREFIX_LENGTH;
+ static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+-
+ static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
++static const char *debug_opt = LTWRAPPER_OPTION_PREFIX "debug";
+
+ int
+ main (int argc, char *argv[])
+@@ -3561,10 +4363,13 @@ main (int argc, char *argv[])
+ int i;
+
+ program_name = (char *) xstrdup (base_name (argv[0]));
+- LTWRAPPER_DEBUGPRINTF (("(main) argv[0] : %s\n", argv[0]));
+- LTWRAPPER_DEBUGPRINTF (("(main) program_name : %s\n", program_name));
++ newargz = XMALLOC (char *, argc + 1);
+
+- /* very simple arg parsing; don't want to rely on getopt */
++ /* very simple arg parsing; don't want to rely on getopt
++ * also, copy all non cwrapper options to newargz, except
++ * argz[0], which is handled differently
++ */
++ newargc=0;
+ for (i = 1; i < argc; i++)
+ {
+ if (strcmp (argv[i], dumpscript_opt) == 0)
+@@ -3581,21 +4386,54 @@ EOF
+ lt_dump_script (stdout);
+ return 0;
+ }
++ if (strcmp (argv[i], debug_opt) == 0)
++ {
++ lt_debug = 1;
++ continue;
++ }
++ if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
++ {
++ /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
++ namespace, but it is not one of the ones we know about and
++ have already dealt with, above (inluding dump-script), then
++ report an error. Otherwise, targets might begin to believe
++ they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
++ namespace. The first time any user complains about this, we'll
++ need to make LTWRAPPER_OPTION_PREFIX a configure-time option
++ or a configure.ac-settable value.
++ */
++ lt_fatal (__FILE__, __LINE__,
++ "unrecognized %s option: '%s'",
++ ltwrapper_option_prefix, argv[i]);
++ }
++ /* otherwise ... */
++ newargz[++newargc] = xstrdup (argv[i]);
+ }
++ newargz[++newargc] = NULL;
++
++EOF
++ cat <<EOF
++ /* The GNU banner must be the first non-error debug message */
++ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
++EOF
++ cat <<"EOF"
++ lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
++ lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+- newargz = XMALLOC (char *, argc + 1);
+ tmp_pathspec = find_executable (argv[0]);
+ if (tmp_pathspec == NULL)
+- lt_fatal ("Couldn't find %s", argv[0]);
+- LTWRAPPER_DEBUGPRINTF (("(main) found exe (before symlink chase) at : %s\n",
+- tmp_pathspec));
++ lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
++ lt_debugprintf (__FILE__, __LINE__,
++ "(main) found exe (before symlink chase) at: %s\n",
++ tmp_pathspec);
+
+ actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+- LTWRAPPER_DEBUGPRINTF (("(main) found exe (after symlink chase) at : %s\n",
+- actual_cwrapper_path));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(main) found exe (after symlink chase) at: %s\n",
++ actual_cwrapper_path);
+ XFREE (tmp_pathspec);
+
+- actual_cwrapper_name = xstrdup( base_name (actual_cwrapper_path));
++ actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+ strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+ /* wrapper name transforms */
+@@ -3613,8 +4451,9 @@ EOF
+ target_name = tmp_pathspec;
+ tmp_pathspec = 0;
+
+- LTWRAPPER_DEBUGPRINTF (("(main) libtool target name: %s\n",
+- target_name));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(main) libtool target name: %s\n",
++ target_name);
+ EOF
+
+ cat <<EOF
+@@ -3664,35 +4503,19 @@ EOF
+
+ lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+ lt_setenv ("DUALCASE", "1"); /* for MSK sh */
+- lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
++ /* Update the DLL searchpath. EXE_PATH_VALUE ($dllsearchpath) must
++ be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
++ because on Windows, both *_VARNAMEs are PATH but uninstalled
++ libraries must come first. */
+ lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
++ lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+- newargc=0;
+- for (i = 1; i < argc; i++)
+- {
+- if (strncmp (argv[i], ltwrapper_option_prefix, opt_prefix_len) == 0)
+- {
+- /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+- namespace, but it is not one of the ones we know about and
+- have already dealt with, above (inluding dump-script), then
+- report an error. Otherwise, targets might begin to believe
+- they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+- namespace. The first time any user complains about this, we'll
+- need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+- or a configure.ac-settable value.
+- */
+- lt_fatal ("Unrecognized option in %s namespace: '%s'",
+- ltwrapper_option_prefix, argv[i]);
+- }
+- /* otherwise ... */
+- newargz[++newargc] = xstrdup (argv[i]);
+- }
+- newargz[++newargc] = NULL;
+-
+- LTWRAPPER_DEBUGPRINTF (("(main) lt_argv_zero : %s\n", (lt_argv_zero ? lt_argv_zero : "<NULL>")));
++ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
++ nonnull (lt_argv_zero));
+ for (i = 0; i < newargc; i++)
+ {
+- LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : "<NULL>")));
++ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
++ i, nonnull (newargz[i]));
+ }
+
+ EOF
+@@ -3706,7 +4529,9 @@ EOF
+ if (rval == -1)
+ {
+ /* failed to start process */
+- LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(main) failed to launch target \"%s\": %s\n",
++ lt_argv_zero, nonnull (strerror (errno)));
+ return 127;
+ }
+ return rval;
+@@ -3728,7 +4553,7 @@ xmalloc (size_t num)
+ {
+ void *p = (void *) malloc (num);
+ if (!p)
+- lt_fatal ("Memory exhausted");
++ lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+ return p;
+ }
+@@ -3762,8 +4587,8 @@ check_executable (const char *path)
+ {
+ struct stat st;
+
+- LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n",
+- path ? (*path ? path : "EMPTY!") : "NULL!"));
++ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
++ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+@@ -3780,8 +4605,8 @@ make_executable (const char *path)
+ int rval = 0;
+ struct stat st;
+
+- LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n",
+- path ? (*path ? path : "EMPTY!") : "NULL!"));
++ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
++ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+@@ -3807,8 +4632,8 @@ find_executable (const char *wrapper)
+ int tmp_len;
+ char *concat_name;
+
+- LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n",
+- wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!"));
++ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
++ nonempty (wrapper));
+
+ if ((wrapper == NULL) || (*wrapper == '\0'))
+ return NULL;
+@@ -3861,7 +4686,8 @@ find_executable (const char *wrapper)
+ {
+ /* empty path: current directory */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+- lt_fatal ("getcwd failed");
++ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
++ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name =
+ XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+@@ -3886,7 +4712,8 @@ find_executable (const char *wrapper)
+ }
+ /* Relative path | not found in path: prepend cwd */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+- lt_fatal ("getcwd failed");
++ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
++ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, tmp, tmp_len);
+@@ -3912,8 +4739,9 @@ chase_symlinks (const char *pathspec)
+ int has_symlinks = 0;
+ while (strlen (tmp_pathspec) && !has_symlinks)
+ {
+- LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n",
+- tmp_pathspec));
++ lt_debugprintf (__FILE__, __LINE__,
++ "checking path component for symlinks: %s\n",
++ tmp_pathspec);
+ if (lstat (tmp_pathspec, &s) == 0)
+ {
+ if (S_ISLNK (s.st_mode) != 0)
+@@ -3935,8 +4763,9 @@ chase_symlinks (const char *pathspec)
+ }
+ else
+ {
+- char *errstr = strerror (errno);
+- lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr);
++ lt_fatal (__FILE__, __LINE__,
++ "error accessing file \"%s\": %s",
++ tmp_pathspec, nonnull (strerror (errno)));
+ }
+ }
+ XFREE (tmp_pathspec);
+@@ -3949,7 +4778,8 @@ chase_symlinks (const char *pathspec)
+ tmp_pathspec = realpath (pathspec, buf);
+ if (tmp_pathspec == 0)
+ {
+- lt_fatal ("Could not follow symlinks for %s", pathspec);
++ lt_fatal (__FILE__, __LINE__,
++ "could not follow symlinks for %s", pathspec);
+ }
+ return xstrdup (tmp_pathspec);
+ #endif
+@@ -3975,11 +4805,25 @@ strendzap (char *str, const char *pat)
+ return str;
+ }
+
++void
++lt_debugprintf (const char *file, int line, const char *fmt, ...)
++{
++ va_list args;
++ if (lt_debug)
++ {
++ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
++ va_start (args, fmt);
++ (void) vfprintf (stderr, fmt, args);
++ va_end (args);
++ }
++}
++
+ static void
+-lt_error_core (int exit_status, const char *mode,
++lt_error_core (int exit_status, const char *file,
++ int line, const char *mode,
+ const char *message, va_list ap)
+ {
+- fprintf (stderr, "%s: %s: ", program_name, mode);
++ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+ vfprintf (stderr, message, ap);
+ fprintf (stderr, ".\n");
+
+@@ -3988,20 +4832,32 @@ lt_error_core (int exit_status, const char *mode,
+ }
+
+ void
+-lt_fatal (const char *message, ...)
++lt_fatal (const char *file, int line, const char *message, ...)
+ {
+ va_list ap;
+ va_start (ap, message);
+- lt_error_core (EXIT_FAILURE, "FATAL", message, ap);
++ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+ va_end (ap);
+ }
+
++static const char *
++nonnull (const char *s)
++{
++ return s ? s : "(null)";
++}
++
++static const char *
++nonempty (const char *s)
++{
++ return (s && !*s) ? "(empty)" : nonnull (s);
++}
++
+ void
+ lt_setenv (const char *name, const char *value)
+ {
+- LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n",
+- (name ? name : "<NULL>"),
+- (value ? value : "<NULL>")));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(lt_setenv) setting '%s' to '%s'\n",
++ nonnull (name), nonnull (value));
+ {
+ #ifdef HAVE_SETENV
+ /* always make a copy, for consistency with !HAVE_SETENV */
+@@ -4049,9 +4905,9 @@ lt_extend_str (const char *orig_value, const char *add, int to_end)
+ void
+ lt_update_exe_path (const char *name, const char *value)
+ {
+- LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+- (name ? name : "<NULL>"),
+- (value ? value : "<NULL>")));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
++ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+@@ -4070,9 +4926,9 @@ lt_update_exe_path (const char *name, const char *value)
+ void
+ lt_update_lib_path (const char *name, const char *value)
+ {
+- LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+- (name ? name : "<NULL>"),
+- (value ? value : "<NULL>")));
++ lt_debugprintf (__FILE__, __LINE__,
++ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
++ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+@@ -4222,7 +5078,7 @@ EOF
+ func_win32_import_lib_p ()
+ {
+ $opt_debug
+- case `eval "$file_magic_cmd \"\$1\" 2>/dev/null" | $SED -e 10q` in
++ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+ *import*) : ;;
+ *) false ;;
+ esac
+@@ -4401,9 +5257,9 @@ func_mode_link ()
+ ;;
+ *)
+ if test "$prev" = dlfiles; then
+- dlfiles="$dlfiles $arg"
++ func_append dlfiles " $arg"
+ else
+- dlprefiles="$dlprefiles $arg"
++ func_append dlprefiles " $arg"
+ fi
+ prev=
+ continue
+@@ -4427,7 +5283,7 @@ func_mode_link ()
+ *-*-darwin*)
+ case "$deplibs " in
+ *" $qarg.ltframework "*) ;;
+- *) deplibs="$deplibs $qarg.ltframework" # this is fixed later
++ *) func_append deplibs " $qarg.ltframework" # this is fixed later
+ ;;
+ esac
+ ;;
+@@ -4446,7 +5302,7 @@ func_mode_link ()
+ moreargs=
+ for fil in `cat "$save_arg"`
+ do
+-# moreargs="$moreargs $fil"
++# func_append moreargs " $fil"
+ arg=$fil
+ # A libtool-controlled object.
+
+@@ -4475,7 +5331,7 @@ func_mode_link ()
+
+ if test "$prev" = dlfiles; then
+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+- dlfiles="$dlfiles $pic_object"
++ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+@@ -4487,7 +5343,7 @@ func_mode_link ()
+ # CHECK ME: I think I busted this. -Ossama
+ if test "$prev" = dlprefiles; then
+ # Preload the old-style object.
+- dlprefiles="$dlprefiles $pic_object"
++ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+@@ -4557,12 +5413,12 @@ func_mode_link ()
+ if test "$prev" = rpath; then
+ case "$rpath " in
+ *" $arg "*) ;;
+- *) rpath="$rpath $arg" ;;
++ *) func_append rpath " $arg" ;;
+ esac
+ else
+ case "$xrpath " in
+ *" $arg "*) ;;
+- *) xrpath="$xrpath $arg" ;;
++ *) func_append xrpath " $arg" ;;
+ esac
+ fi
+ prev=
+@@ -4574,28 +5430,28 @@ func_mode_link ()
+ continue
+ ;;
+ weak)
+- weak_libs="$weak_libs $arg"
++ func_append weak_libs " $arg"
+ prev=
+ continue
+ ;;
+ xcclinker)
+- linker_flags="$linker_flags $qarg"
+- compiler_flags="$compiler_flags $qarg"
++ func_append linker_flags " $qarg"
++ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xcompiler)
+- compiler_flags="$compiler_flags $qarg"
++ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xlinker)
+- linker_flags="$linker_flags $qarg"
+- compiler_flags="$compiler_flags $wl$qarg"
++ func_append linker_flags " $qarg"
++ func_append compiler_flags " $wl$qarg"
+ prev=
+ func_append compile_command " $wl$qarg"
+ func_append finalize_command " $wl$qarg"
+@@ -4686,15 +5542,16 @@ func_mode_link ()
+ ;;
+
+ -L*)
+- func_stripname '-L' '' "$arg"
+- dir=$func_stripname_result
+- if test -z "$dir"; then
++ func_stripname "-L" '' "$arg"
++ if test -z "$func_stripname_result"; then
+ if test "$#" -gt 0; then
+ func_fatal_error "require no space between \`-L' and \`$1'"
+ else
+ func_fatal_error "need path for \`-L' option"
+ fi
+ fi
++ func_resolve_sysroot "$func_stripname_result"
++ dir=$func_resolve_sysroot_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+@@ -4706,10 +5563,16 @@ func_mode_link ()
+ ;;
+ esac
+ case "$deplibs " in
+- *" -L$dir "*) ;;
++ *" -L$dir "* | *" $arg "*)
++ # Will only happen for absolute or sysroot arguments
++ ;;
+ *)
+- deplibs="$deplibs -L$dir"
+- lib_search_path="$lib_search_path $dir"
++ # Preserve sysroot, but never include relative directories
++ case $dir in
++ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
++ *) func_append deplibs " -L$dir" ;;
++ esac
++ func_append lib_search_path " $dir"
+ ;;
+ esac
+ case $host in
+@@ -4718,12 +5581,12 @@ func_mode_link ()
+ case :$dllsearchpath: in
+ *":$dir:"*) ;;
+ ::) dllsearchpath=$dir;;
+- *) dllsearchpath="$dllsearchpath:$dir";;
++ *) func_append dllsearchpath ":$dir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+- *) dllsearchpath="$dllsearchpath:$testbindir";;
++ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+@@ -4747,7 +5610,7 @@ func_mode_link ()
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C and math libraries are in the System framework
+- deplibs="$deplibs System.ltframework"
++ func_append deplibs " System.ltframework"
+ continue
+ ;;
+ *-*-sco3.2v5* | *-*-sco5v6*)
+@@ -4758,9 +5621,6 @@ func_mode_link ()
+ # Compiler inserts libc in the correct place for threads to work
+ test "X$arg" = "X-lc" && continue
+ ;;
+- *-*-linux*)
+- test "X$arg" = "X-lc" && continue
+- ;;
+ esac
+ elif test "X$arg" = "X-lc_r"; then
+ case $host in
+@@ -4770,7 +5630,7 @@ func_mode_link ()
+ ;;
+ esac
+ fi
+- deplibs="$deplibs $arg"
++ func_append deplibs " $arg"
+ continue
+ ;;
+
+@@ -4782,8 +5642,8 @@ func_mode_link ()
+ # Tru64 UNIX uses -model [arg] to determine the layout of C++
+ # classes, name mangling, and exception handling.
+ # Darwin uses the -arch flag to determine output architecture.
+- -model|-arch|-isysroot)
+- compiler_flags="$compiler_flags $arg"
++ -model|-arch|-isysroot|--sysroot)
++ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ prev=xcompiler
+@@ -4791,12 +5651,12 @@ func_mode_link ()
+ ;;
+
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+- compiler_flags="$compiler_flags $arg"
++ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ case "$new_inherited_linker_flags " in
+ *" $arg "*) ;;
+- * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;;
++ * ) func_append new_inherited_linker_flags " $arg" ;;
+ esac
+ continue
+ ;;
+@@ -4863,13 +5723,17 @@ func_mode_link ()
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
++ =*)
++ func_stripname '=' '' "$dir"
++ dir=$lt_sysroot$func_stripname_result
++ ;;
+ *)
+ func_fatal_error "only absolute run-paths are allowed"
+ ;;
+ esac
+ case "$xrpath " in
+ *" $dir "*) ;;
+- *) xrpath="$xrpath $dir" ;;
++ *) func_append xrpath " $dir" ;;
+ esac
+ continue
+ ;;
+@@ -4922,8 +5786,8 @@ func_mode_link ()
+ for flag in $args; do
+ IFS="$save_ifs"
+ func_quote_for_eval "$flag"
+- arg="$arg $func_quote_for_eval_result"
+- compiler_flags="$compiler_flags $func_quote_for_eval_result"
++ func_append arg " $func_quote_for_eval_result"
++ func_append compiler_flags " $func_quote_for_eval_result"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+@@ -4938,9 +5802,9 @@ func_mode_link ()
+ for flag in $args; do
+ IFS="$save_ifs"
+ func_quote_for_eval "$flag"
+- arg="$arg $wl$func_quote_for_eval_result"
+- compiler_flags="$compiler_flags $wl$func_quote_for_eval_result"
+- linker_flags="$linker_flags $func_quote_for_eval_result"
++ func_append arg " $wl$func_quote_for_eval_result"
++ func_append compiler_flags " $wl$func_quote_for_eval_result"
++ func_append linker_flags " $func_quote_for_eval_result"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+@@ -4968,24 +5832,27 @@ func_mode_link ()
+ arg="$func_quote_for_eval_result"
+ ;;
+
+- # -64, -mips[0-9] enable 64-bit mode on the SGI compiler
+- # -r[0-9][0-9]* specifies the processor on the SGI compiler
+- # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler
+- # +DA*, +DD* enable 64-bit mode on the HP compiler
+- # -q* pass through compiler args for the IBM compiler
+- # -m*, -t[45]*, -txscale* pass through architecture-specific
+- # compiler args for GCC
+- # -F/path gives path to uninstalled frameworks, gcc on darwin
+- # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC
+- # @file GCC response files
+- # -tp=* Portland pgcc target processor selection
++ # Flags to be passed through unchanged, with rationale:
++ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler
++ # -r[0-9][0-9]* specify processor for the SGI compiler
++ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
++ # +DA*, +DD* enable 64-bit mode for the HP compiler
++ # -q* compiler args for the IBM compiler
++ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
++ # -F/path path to uninstalled frameworks, gcc on darwin
++ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC
++ # @file GCC response files
++ # -tp=* Portland pgcc target processor selection
++ # --sysroot=* for sysroot support
++ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+ -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*)
++ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
++ -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+ func_quote_for_eval "$arg"
+ arg="$func_quote_for_eval_result"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+- compiler_flags="$compiler_flags $arg"
++ func_append compiler_flags " $arg"
+ continue
+ ;;
+
+@@ -4997,7 +5864,7 @@ func_mode_link ()
+
+ *.$objext)
+ # A standard object.
+- objs="$objs $arg"
++ func_append objs " $arg"
+ ;;
+
+ *.lo)
+@@ -5028,7 +5895,7 @@ func_mode_link ()
+
+ if test "$prev" = dlfiles; then
+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+- dlfiles="$dlfiles $pic_object"
++ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+@@ -5040,7 +5907,7 @@ func_mode_link ()
+ # CHECK ME: I think I busted this. -Ossama
+ if test "$prev" = dlprefiles; then
+ # Preload the old-style object.
+- dlprefiles="$dlprefiles $pic_object"
++ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+@@ -5085,24 +5952,25 @@ func_mode_link ()
+
+ *.$libext)
+ # An archive.
+- deplibs="$deplibs $arg"
+- old_deplibs="$old_deplibs $arg"
++ func_append deplibs " $arg"
++ func_append old_deplibs " $arg"
+ continue
+ ;;
+
+ *.la)
+ # A libtool-controlled library.
+
++ func_resolve_sysroot "$arg"
+ if test "$prev" = dlfiles; then
+ # This library was specified with -dlopen.
+- dlfiles="$dlfiles $arg"
++ func_append dlfiles " $func_resolve_sysroot_result"
+ prev=
+ elif test "$prev" = dlprefiles; then
+ # The library was specified with -dlpreopen.
+- dlprefiles="$dlprefiles $arg"
++ func_append dlprefiles " $func_resolve_sysroot_result"
+ prev=
+ else
+- deplibs="$deplibs $arg"
++ func_append deplibs " $func_resolve_sysroot_result"
+ fi
+ continue
+ ;;
+@@ -5127,7 +5995,7 @@ func_mode_link ()
+ func_fatal_help "the \`$prevarg' option requires an argument"
+
+ if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+- eval "arg=\"$export_dynamic_flag_spec\""
++ eval arg=\"$export_dynamic_flag_spec\"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ fi
+@@ -5144,11 +6012,13 @@ func_mode_link ()
+ else
+ shlib_search_path=
+ fi
+- eval "sys_lib_search_path=\"$sys_lib_search_path_spec\""
+- eval "sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\""
++ eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
++ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+ func_dirname "$output" "/" ""
+ output_objdir="$func_dirname_result$objdir"
++ func_to_tool_file "$output_objdir/"
++ tool_output_objdir=$func_to_tool_file_result
+ # Create the object directory.
+ func_mkdir_p "$output_objdir"
+
+@@ -5169,12 +6039,12 @@ func_mode_link ()
+ # Find all interdependent deplibs by searching for libraries
+ # that are linked more than once (e.g. -la -lb -la)
+ for deplib in $deplibs; do
+- if $opt_duplicate_deps ; then
++ if $opt_preserve_dup_deps ; then
+ case "$libs " in
+- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
++ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+- libs="$libs $deplib"
++ func_append libs " $deplib"
+ done
+
+ if test "$linkmode" = lib; then
+@@ -5187,9 +6057,9 @@ func_mode_link ()
+ if $opt_duplicate_compiler_generated_deps; then
+ for pre_post_dep in $predeps $postdeps; do
+ case "$pre_post_deps " in
+- *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
++ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
+ esac
+- pre_post_deps="$pre_post_deps $pre_post_dep"
++ func_append pre_post_deps " $pre_post_dep"
+ done
+ fi
+ pre_post_deps=
+@@ -5256,8 +6126,9 @@ func_mode_link ()
+ for lib in $dlprefiles; do
+ # Ignore non-libtool-libs
+ dependency_libs=
++ func_resolve_sysroot "$lib"
+ case $lib in
+- *.la) func_source "$lib" ;;
++ *.la) func_source "$func_resolve_sysroot_result" ;;
+ esac
+
+ # Collect preopened libtool deplibs, except any this library
+@@ -5267,7 +6138,7 @@ func_mode_link ()
+ deplib_base=$func_basename_result
+ case " $weak_libs " in
+ *" $deplib_base "*) ;;
+- *) deplibs="$deplibs $deplib" ;;
++ *) func_append deplibs " $deplib" ;;
+ esac
+ done
+ done
+@@ -5288,11 +6159,11 @@ func_mode_link ()
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+- compiler_flags="$compiler_flags $deplib"
++ func_append compiler_flags " $deplib"
+ if test "$linkmode" = lib ; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
++ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+@@ -5377,7 +6248,7 @@ func_mode_link ()
+ if test "$linkmode" = lib ; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
++ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+@@ -5390,7 +6261,8 @@ func_mode_link ()
+ test "$pass" = conv && continue
+ newdependency_libs="$deplib $newdependency_libs"
+ func_stripname '-L' '' "$deplib"
+- newlib_search_path="$newlib_search_path $func_stripname_result"
++ func_resolve_sysroot "$func_stripname_result"
++ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ prog)
+ if test "$pass" = conv; then
+@@ -5404,7 +6276,8 @@ func_mode_link ()
+ finalize_deplibs="$deplib $finalize_deplibs"
+ fi
+ func_stripname '-L' '' "$deplib"
+- newlib_search_path="$newlib_search_path $func_stripname_result"
++ func_resolve_sysroot "$func_stripname_result"
++ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ *)
+ func_warning "\`-L' is ignored for archives/objects"
+@@ -5415,17 +6288,21 @@ func_mode_link ()
+ -R*)
+ if test "$pass" = link; then
+ func_stripname '-R' '' "$deplib"
+- dir=$func_stripname_result
++ func_resolve_sysroot "$func_stripname_result"
++ dir=$func_resolve_sysroot_result
+ # Make sure the xrpath contains only unique directories.
+ case "$xrpath " in
+ *" $dir "*) ;;
+- *) xrpath="$xrpath $dir" ;;
++ *) func_append xrpath " $dir" ;;
+ esac
+ fi
+ deplibs="$deplib $deplibs"
+ continue
+ ;;
+- *.la) lib="$deplib" ;;
++ *.la)
++ func_resolve_sysroot "$deplib"
++ lib=$func_resolve_sysroot_result
++ ;;
+ *.$libext)
+ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+@@ -5488,11 +6365,11 @@ func_mode_link ()
+ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+ # If there is no dlopen support or we're linking statically,
+ # we need to preload.
+- newdlprefiles="$newdlprefiles $deplib"
++ func_append newdlprefiles " $deplib"
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+- newdlfiles="$newdlfiles $deplib"
++ func_append newdlfiles " $deplib"
+ fi
+ fi
+ continue
+@@ -5538,7 +6415,7 @@ func_mode_link ()
+ for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+ case " $new_inherited_linker_flags " in
+ *" $tmp_inherited_linker_flag "*) ;;
+- *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";;
++ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
+ esac
+ done
+ fi
+@@ -5546,8 +6423,8 @@ func_mode_link ()
+ if test "$linkmode,$pass" = "lib,link" ||
+ test "$linkmode,$pass" = "prog,scan" ||
+ { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+- test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
+- test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
++ test -n "$dlopen" && func_append dlfiles " $dlopen"
++ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
+ fi
+
+ if test "$pass" = conv; then
+@@ -5558,20 +6435,20 @@ func_mode_link ()
+ func_fatal_error "cannot find name of link library for \`$lib'"
+ fi
+ # It is a libtool convenience library, so add in its objects.
+- convenience="$convenience $ladir/$objdir/$old_library"
+- old_convenience="$old_convenience $ladir/$objdir/$old_library"
++ func_append convenience " $ladir/$objdir/$old_library"
++ func_append old_convenience " $ladir/$objdir/$old_library"
+ elif test "$linkmode" != prog && test "$linkmode" != lib; then
+ func_fatal_error "\`$lib' is not a convenience library"
+ fi
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ deplibs="$deplib $deplibs"
+- if $opt_duplicate_deps ; then
++ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
++ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+- tmp_libs="$tmp_libs $deplib"
++ func_append tmp_libs " $deplib"
+ done
+ continue
+ fi # $pass = conv
+@@ -5579,9 +6456,15 @@ func_mode_link ()
+
+ # Get the name of the library we link against.
+ linklib=
+- for l in $old_library $library_names; do
+- linklib="$l"
+- done
++ if test -n "$old_library" &&
++ { test "$prefer_static_libs" = yes ||
++ test "$prefer_static_libs,$installed" = "built,no"; }; then
++ linklib=$old_library
++ else
++ for l in $old_library $library_names; do
++ linklib="$l"
++ done
++ fi
+ if test -z "$linklib"; then
+ func_fatal_error "cannot find name of link library for \`$lib'"
+ fi
+@@ -5598,9 +6481,9 @@ func_mode_link ()
+ # statically, we need to preload. We also need to preload any
+ # dependent libraries so libltdl's deplib preloader doesn't
+ # bomb out in the load deplibs phase.
+- dlprefiles="$dlprefiles $lib $dependency_libs"
++ func_append dlprefiles " $lib $dependency_libs"
+ else
+- newdlfiles="$newdlfiles $lib"
++ func_append newdlfiles " $lib"
+ fi
+ continue
+ fi # $pass = dlopen
+@@ -5622,14 +6505,14 @@ func_mode_link ()
+
+ # Find the relevant object directory and library name.
+ if test "X$installed" = Xyes; then
+- if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
++ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+ func_warning "library \`$lib' was moved."
+ dir="$ladir"
+ absdir="$abs_ladir"
+ libdir="$abs_ladir"
+ else
+- dir="$libdir"
+- absdir="$libdir"
++ dir="$lt_sysroot$libdir"
++ absdir="$lt_sysroot$libdir"
+ fi
+ test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+ else
+@@ -5637,12 +6520,12 @@ func_mode_link ()
+ dir="$ladir"
+ absdir="$abs_ladir"
+ # Remove this search path later
+- notinst_path="$notinst_path $abs_ladir"
++ func_append notinst_path " $abs_ladir"
+ else
+ dir="$ladir/$objdir"
+ absdir="$abs_ladir/$objdir"
+ # Remove this search path later
+- notinst_path="$notinst_path $abs_ladir"
++ func_append notinst_path " $abs_ladir"
+ fi
+ fi # $installed = yes
+ func_stripname 'lib' '.la' "$laname"
+@@ -5653,20 +6536,46 @@ func_mode_link ()
+ if test -z "$libdir" && test "$linkmode" = prog; then
+ func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+ fi
+- # Prefer using a static library (so that no silly _DYNAMIC symbols
+- # are required to link).
+- if test -n "$old_library"; then
+- newdlprefiles="$newdlprefiles $dir/$old_library"
+- # Keep a list of preopened convenience libraries to check
+- # that they are being used correctly in the link pass.
+- test -z "$libdir" && \
+- dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library"
+- # Otherwise, use the dlname, so that lt_dlopen finds it.
+- elif test -n "$dlname"; then
+- newdlprefiles="$newdlprefiles $dir/$dlname"
+- else
+- newdlprefiles="$newdlprefiles $dir/$linklib"
+- fi
++ case "$host" in
++ # special handling for platforms with PE-DLLs.
++ *cygwin* | *mingw* | *cegcc* )
++ # Linker will automatically link against shared library if both
++ # static and shared are present. Therefore, ensure we extract
++ # symbols from the import library if a shared library is present
++ # (otherwise, the dlopen module name will be incorrect). We do
++ # this by putting the import library name into $newdlprefiles.
++ # We recover the dlopen module name by 'saving' the la file
++ # name in a special purpose variable, and (later) extracting the
++ # dlname from the la file.
++ if test -n "$dlname"; then
++ func_tr_sh "$dir/$linklib"
++ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
++ func_append newdlprefiles " $dir/$linklib"
++ else
++ func_append newdlprefiles " $dir/$old_library"
++ # Keep a list of preopened convenience libraries to check
++ # that they are being used correctly in the link pass.
++ test -z "$libdir" && \
++ func_append dlpreconveniencelibs " $dir/$old_library"
++ fi
++ ;;
++ * )
++ # Prefer using a static library (so that no silly _DYNAMIC symbols
++ # are required to link).
++ if test -n "$old_library"; then
++ func_append newdlprefiles " $dir/$old_library"
++ # Keep a list of preopened convenience libraries to check
++ # that they are being used correctly in the link pass.
++ test -z "$libdir" && \
++ func_append dlpreconveniencelibs " $dir/$old_library"
++ # Otherwise, use the dlname, so that lt_dlopen finds it.
++ elif test -n "$dlname"; then
++ func_append newdlprefiles " $dir/$dlname"
++ else
++ func_append newdlprefiles " $dir/$linklib"
++ fi
++ ;;
++ esac
+ fi # $pass = dlpreopen
+
+ if test -z "$libdir"; then
+@@ -5684,7 +6593,7 @@ func_mode_link ()
+
+
+ if test "$linkmode" = prog && test "$pass" != link; then
+- newlib_search_path="$newlib_search_path $ladir"
++ func_append newlib_search_path " $ladir"
+ deplibs="$lib $deplibs"
+
+ linkalldeplibs=no
+@@ -5697,7 +6606,8 @@ func_mode_link ()
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) func_stripname '-L' '' "$deplib"
+- newlib_search_path="$newlib_search_path $func_stripname_result"
++ func_resolve_sysroot "$func_stripname_result"
++ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ esac
+ # Need to link against all dependency_libs?
+@@ -5708,12 +6618,12 @@ func_mode_link ()
+ # or/and link against static libraries
+ newdependency_libs="$deplib $newdependency_libs"
+ fi
+- if $opt_duplicate_deps ; then
++ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
++ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+- tmp_libs="$tmp_libs $deplib"
++ func_append tmp_libs " $deplib"
+ done # for deplib
+ continue
+ fi # $linkmode = prog...
+@@ -5728,7 +6638,7 @@ func_mode_link ()
+ # Make sure the rpath contains only unique directories.
+ case "$temp_rpath:" in
+ *"$absdir:"*) ;;
+- *) temp_rpath="$temp_rpath$absdir:" ;;
++ *) func_append temp_rpath "$absdir:" ;;
+ esac
+ fi
+
+@@ -5740,7 +6650,7 @@ func_mode_link ()
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+- *) compile_rpath="$compile_rpath $absdir"
++ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+@@ -5749,7 +6659,7 @@ func_mode_link ()
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+- *) finalize_rpath="$finalize_rpath $libdir"
++ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+@@ -5774,12 +6684,12 @@ func_mode_link ()
+ case $host in
+ *cygwin* | *mingw* | *cegcc*)
+ # No point in relinking DLLs because paths are not encoded
+- notinst_deplibs="$notinst_deplibs $lib"
++ func_append notinst_deplibs " $lib"
+ need_relink=no
+ ;;
+ *)
+ if test "$installed" = no; then
+- notinst_deplibs="$notinst_deplibs $lib"
++ func_append notinst_deplibs " $lib"
+ need_relink=yes
+ fi
+ ;;
+@@ -5814,7 +6724,7 @@ func_mode_link ()
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+- *) compile_rpath="$compile_rpath $absdir"
++ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+@@ -5823,7 +6733,7 @@ func_mode_link ()
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+- *) finalize_rpath="$finalize_rpath $libdir"
++ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+@@ -5835,7 +6745,7 @@ func_mode_link ()
+ shift
+ realname="$1"
+ shift
+- eval "libname=\"$libname_spec\""
++ libname=`eval "\\$ECHO \"$libname_spec\""`
+ # use dlname if we got it. it's perfectly good, no?
+ if test -n "$dlname"; then
+ soname="$dlname"
+@@ -5848,7 +6758,7 @@ func_mode_link ()
+ versuffix="-$major"
+ ;;
+ esac
+- eval "soname=\"$soname_spec\""
++ eval soname=\"$soname_spec\"
+ else
+ soname="$realname"
+ fi
+@@ -5877,7 +6787,7 @@ func_mode_link ()
+ linklib=$newlib
+ fi # test -n "$old_archive_from_expsyms_cmds"
+
+- if test "$linkmode" = prog || test "$mode" != relink; then
++ if test "$linkmode" = prog || test "$opt_mode" != relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -5933,7 +6843,7 @@ func_mode_link ()
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+- add_dir="$add_dir -L$inst_prefix_dir$libdir"
++ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+@@ -5955,7 +6865,7 @@ func_mode_link ()
+ if test -n "$add_shlibpath"; then
+ case :$compile_shlibpath: in
+ *":$add_shlibpath:"*) ;;
+- *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
++ *) func_append compile_shlibpath "$add_shlibpath:" ;;
+ esac
+ fi
+ if test "$linkmode" = prog; then
+@@ -5969,13 +6879,13 @@ func_mode_link ()
+ test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
++ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ fi
+ fi
+ fi
+
+- if test "$linkmode" = prog || test "$mode" = relink; then
++ if test "$linkmode" = prog || test "$opt_mode" = relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -5989,7 +6899,7 @@ func_mode_link ()
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
++ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ add="-l$name"
+ elif test "$hardcode_automatic" = yes; then
+@@ -6001,12 +6911,12 @@ func_mode_link ()
+ fi
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+- add_dir="-L$libdir"
++ add_dir="-L$lt_sysroot$libdir"
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+- add_dir="$add_dir -L$inst_prefix_dir$libdir"
++ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+@@ -6083,27 +6993,33 @@ func_mode_link ()
+ temp_xrpath=$func_stripname_result
+ case " $xrpath " in
+ *" $temp_xrpath "*) ;;
+- *) xrpath="$xrpath $temp_xrpath";;
++ *) func_append xrpath " $temp_xrpath";;
+ esac;;
+- *) temp_deplibs="$temp_deplibs $libdir";;
++ *) func_append temp_deplibs " $libdir";;
+ esac
+ done
+ dependency_libs="$temp_deplibs"
+ fi
+
+- newlib_search_path="$newlib_search_path $absdir"
++ func_append newlib_search_path " $absdir"
+ # Link against this library
+ test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+ # ... and its dependency_libs
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ newdependency_libs="$deplib $newdependency_libs"
+- if $opt_duplicate_deps ; then
++ case $deplib in
++ -L*) func_stripname '-L' '' "$deplib"
++ func_resolve_sysroot "$func_stripname_result";;
++ *) func_resolve_sysroot "$deplib" ;;
++ esac
++ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
++ *" $func_resolve_sysroot_result "*)
++ func_append specialdeplibs " $func_resolve_sysroot_result" ;;
+ esac
+ fi
+- tmp_libs="$tmp_libs $deplib"
++ func_append tmp_libs " $func_resolve_sysroot_result"
+ done
+
+ if test "$link_all_deplibs" != no; then
+@@ -6113,8 +7029,10 @@ func_mode_link ()
+ case $deplib in
+ -L*) path="$deplib" ;;
+ *.la)
++ func_resolve_sysroot "$deplib"
++ deplib=$func_resolve_sysroot_result
+ func_dirname "$deplib" "" "."
+- dir="$func_dirname_result"
++ dir=$func_dirname_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+@@ -6130,7 +7048,7 @@ func_mode_link ()
+ case $host in
+ *-*-darwin*)
+ depdepl=
+- deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
++ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+ if test -n "$deplibrary_names" ; then
+ for tmp in $deplibrary_names ; do
+ depdepl=$tmp
+@@ -6141,8 +7059,8 @@ func_mode_link ()
+ if test -z "$darwin_install_name"; then
+ darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+ fi
+- compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+- linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}"
++ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
++ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}"
+ path=
+ fi
+ fi
+@@ -6152,7 +7070,7 @@ func_mode_link ()
+ ;;
+ esac
+ else
+- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$deplib' is not a valid libtool archive"
+ test "$absdir" != "$libdir" && \
+@@ -6192,7 +7110,7 @@ func_mode_link ()
+ for dir in $newlib_search_path; do
+ case "$lib_search_path " in
+ *" $dir "*) ;;
+- *) lib_search_path="$lib_search_path $dir" ;;
++ *) func_append lib_search_path " $dir" ;;
+ esac
+ done
+ newlib_search_path=
+@@ -6205,7 +7123,7 @@ func_mode_link ()
+ fi
+ for var in $vars dependency_libs; do
+ # Add libraries to $var in reverse order
+- eval tmp_libs=\$$var
++ eval tmp_libs=\"\$$var\"
+ new_libs=
+ for deplib in $tmp_libs; do
+ # FIXME: Pedantically, this is the right thing to do, so
+@@ -6250,13 +7168,13 @@ func_mode_link ()
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+- *) tmp_libs="$tmp_libs $deplib" ;;
++ *) func_append tmp_libs " $deplib" ;;
+ esac
+ ;;
+- *) tmp_libs="$tmp_libs $deplib" ;;
++ *) func_append tmp_libs " $deplib" ;;
+ esac
+ done
+- eval $var=\$tmp_libs
++ eval $var=\"$tmp_libs\"
+ done # for var
+ fi
+ # Last step: remove runtime libs from dependency_libs
+@@ -6269,7 +7187,7 @@ func_mode_link ()
+ ;;
+ esac
+ if test -n "$i" ; then
+- tmp_libs="$tmp_libs $i"
++ func_append tmp_libs " $i"
+ fi
+ done
+ dependency_libs=$tmp_libs
+@@ -6310,7 +7228,7 @@ func_mode_link ()
+ # Now set the variables for building old libraries.
+ build_libtool_libs=no
+ oldlibs="$output"
+- objs="$objs$old_deplibs"
++ func_append objs "$old_deplibs"
+ ;;
+
+ lib)
+@@ -6319,8 +7237,8 @@ func_mode_link ()
+ lib*)
+ func_stripname 'lib' '.la' "$outputname"
+ name=$func_stripname_result
+- eval "shared_ext=\"$shrext_cmds\""
+- eval "libname=\"$libname_spec\""
++ eval shared_ext=\"$shrext_cmds\"
++ eval libname=\"$libname_spec\"
+ ;;
+ *)
+ test "$module" = no && \
+@@ -6330,8 +7248,8 @@ func_mode_link ()
+ # Add the "lib" prefix for modules if required
+ func_stripname '' '.la' "$outputname"
+ name=$func_stripname_result
+- eval "shared_ext=\"$shrext_cmds\""
+- eval "libname=\"$libname_spec\""
++ eval shared_ext=\"$shrext_cmds\"
++ eval libname=\"$libname_spec\"
+ else
+ func_stripname '' '.la' "$outputname"
+ libname=$func_stripname_result
+@@ -6346,7 +7264,7 @@ func_mode_link ()
+ echo
+ $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+ $ECHO "*** objects $objs is not portable!"
+- libobjs="$libobjs $objs"
++ func_append libobjs " $objs"
+ fi
+ fi
+
+@@ -6544,7 +7462,7 @@ func_mode_link ()
+ done
+
+ # Make executables depend on our current version.
+- verstring="$verstring:${current}.0"
++ func_append verstring ":${current}.0"
+ ;;
+
+ qnx)
+@@ -6612,10 +7530,10 @@ func_mode_link ()
+ fi
+
+ func_generate_dlsyms "$libname" "$libname" "yes"
+- libobjs="$libobjs $symfileobj"
++ func_append libobjs " $symfileobj"
+ test "X$libobjs" = "X " && libobjs=
+
+- if test "$mode" != relink; then
++ if test "$opt_mode" != relink; then
+ # Remove our outputs, but don't remove object files since they
+ # may have been created when compiling PIC objects.
+ removelist=
+@@ -6631,7 +7549,7 @@ func_mode_link ()
+ continue
+ fi
+ fi
+- removelist="$removelist $p"
++ func_append removelist " $p"
+ ;;
+ *) ;;
+ esac
+@@ -6642,7 +7560,7 @@ func_mode_link ()
+
+ # Now set the variables for building old libraries.
+ if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+- oldlibs="$oldlibs $output_objdir/$libname.$libext"
++ func_append oldlibs " $output_objdir/$libname.$libext"
+
+ # Transform .lo files to .o files.
+ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+@@ -6659,10 +7577,11 @@ func_mode_link ()
+ # If the user specified any rpath flags, then add them.
+ temp_xrpath=
+ for libdir in $xrpath; do
+- temp_xrpath="$temp_xrpath -R$libdir"
++ func_replace_sysroot "$libdir"
++ func_append temp_xrpath " -R$func_replace_sysroot_result"
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+- *) finalize_rpath="$finalize_rpath $libdir" ;;
++ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+@@ -6676,7 +7595,7 @@ func_mode_link ()
+ for lib in $old_dlfiles; do
+ case " $dlprefiles $dlfiles " in
+ *" $lib "*) ;;
+- *) dlfiles="$dlfiles $lib" ;;
++ *) func_append dlfiles " $lib" ;;
+ esac
+ done
+
+@@ -6686,7 +7605,7 @@ func_mode_link ()
+ for lib in $old_dlprefiles; do
+ case "$dlprefiles " in
+ *" $lib "*) ;;
+- *) dlprefiles="$dlprefiles $lib" ;;
++ *) func_append dlprefiles " $lib" ;;
+ esac
+ done
+
+@@ -6698,7 +7617,7 @@ func_mode_link ()
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C library is in the System framework
+- deplibs="$deplibs System.ltframework"
++ func_append deplibs " System.ltframework"
+ ;;
+ *-*-netbsd*)
+ # Don't link with libc until the a.out ld.so is fixed.
+@@ -6715,7 +7634,7 @@ func_mode_link ()
+ *)
+ # Add libc to deplibs on all other systems if necessary.
+ if test "$build_libtool_need_lc" = "yes"; then
+- deplibs="$deplibs -lc"
++ func_append deplibs " -lc"
+ fi
+ ;;
+ esac
+@@ -6764,18 +7683,18 @@ EOF
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $i "*)
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ i=""
+ ;;
+ esac
+ fi
+ if test -n "$i" ; then
+- eval "libname=\"$libname_spec\""
+- eval "deplib_matches=\"$library_names_spec\""
++ libname=`eval "\\$ECHO \"$libname_spec\""`
++ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+ set dummy $deplib_matches; shift
+ deplib_match=$1
+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ else
+ droppeddeps=yes
+ echo
+@@ -6789,7 +7708,7 @@ EOF
+ fi
+ ;;
+ *)
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ ;;
+ esac
+ done
+@@ -6807,18 +7726,18 @@ EOF
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $i "*)
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ i=""
+ ;;
+ esac
+ fi
+ if test -n "$i" ; then
+- eval "libname=\"$libname_spec\""
+- eval "deplib_matches=\"$library_names_spec\""
++ libname=`eval "\\$ECHO \"$libname_spec\""`
++ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+ set dummy $deplib_matches; shift
+ deplib_match=$1
+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ else
+ droppeddeps=yes
+ echo
+@@ -6840,7 +7759,7 @@ EOF
+ fi
+ ;;
+ *)
+- newdeplibs="$newdeplibs $i"
++ func_append newdeplibs " $i"
+ ;;
+ esac
+ done
+@@ -6857,15 +7776,27 @@ EOF
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib" ; then
+- eval "libname=\"$libname_spec\""
++ libname=`eval "\\$ECHO \"$libname_spec\""`
++ if test -n "$file_magic_glob"; then
++ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
++ else
++ libnameglob=$libname
++ fi
++ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+- potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
++ if test "$want_nocaseglob" = yes; then
++ shopt -s nocaseglob
++ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
++ $nocaseglob
++ else
++ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
++ fi
+ for potent_lib in $potential_libs; do
+ # Follow soft links.
+ if ls -lLd "$potent_lib" 2>/dev/null |
+@@ -6885,10 +7816,10 @@ EOF
+ *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+ esac
+ done
+- if eval "$file_magic_cmd \"\$potlib\"" 2>/dev/null |
++ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+ $SED -e 10q |
+ $EGREP "$file_magic_regex" > /dev/null; then
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ break 2
+ fi
+@@ -6913,7 +7844,7 @@ EOF
+ ;;
+ *)
+ # Add a -L argument.
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+@@ -6929,20 +7860,20 @@ EOF
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib" ; then
+- eval "libname=\"$libname_spec\""
++ libname=`eval "\\$ECHO \"$libname_spec\""`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+ potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+ for potent_lib in $potential_libs; do
+ potlib="$potent_lib" # see symlink-check above in file_magic test
+ if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+ $EGREP "$match_pattern_regex" > /dev/null; then
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ break 2
+ fi
+@@ -6967,7 +7898,7 @@ EOF
+ ;;
+ *)
+ # Add a -L argument.
+- newdeplibs="$newdeplibs $a_deplib"
++ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+@@ -7071,7 +8002,7 @@ EOF
+ *)
+ case " $deplibs " in
+ *" -L$path/$objdir "*)
+- new_libs="$new_libs -L$path/$objdir" ;;
++ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+@@ -7081,10 +8012,10 @@ EOF
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+- *) new_libs="$new_libs $deplib" ;;
++ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+- *) new_libs="$new_libs $deplib" ;;
++ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ deplibs="$new_libs"
+@@ -7101,10 +8032,12 @@ EOF
+ hardcode_libdirs=
+ dep_rpath=
+ rpath="$finalize_rpath"
+- test "$mode" != relink && rpath="$compile_rpath$rpath"
++ test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+ for libdir in $rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
++ func_replace_sysroot "$libdir"
++ libdir=$func_replace_sysroot_result
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
+ else
+@@ -7113,18 +8046,18 @@ EOF
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+- eval "flag=\"$hardcode_libdir_flag_spec\""
+- dep_rpath="$dep_rpath $flag"
++ eval flag=\"$hardcode_libdir_flag_spec\"
++ func_append dep_rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+- *) perm_rpath="$perm_rpath $libdir" ;;
++ *) func_apped perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+@@ -7133,40 +8066,38 @@ EOF
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+ if test -n "$hardcode_libdir_flag_spec_ld"; then
+- eval "dep_rpath=\"$hardcode_libdir_flag_spec_ld\""
++ eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
+ else
+- eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
++ eval dep_rpath=\"$hardcode_libdir_flag_spec\"
+ fi
+ fi
+ if test -n "$runpath_var" && test -n "$perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+- rpath="$rpath$dir:"
++ func_append rpath "$dir:"
+ done
+- eval $runpath_var=\$rpath\$$runpath_var
+- export $runpath_var
++ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+ fi
+ test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+ fi
+
+ shlibpath="$finalize_shlibpath"
+- test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
++ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+ if test -n "$shlibpath"; then
+- eval $shlibpath_var=\$shlibpath\$$shlibpath_var
+- export $shlibpath_var
++ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+ fi
+
+ # Get the real and link names of the library.
+- eval "shared_ext=\"$shrext_cmds\""
+- eval "library_names=\"$library_names_spec\""
++ eval shared_ext=\"$shrext_cmds\"
++ eval library_names=\"$library_names_spec\"
+ set dummy $library_names
+ shift
+ realname="$1"
+ shift
+
+ if test -n "$soname_spec"; then
+- eval "soname=\"$soname_spec\""
++ eval soname=\"$soname_spec\"
+ else
+ soname="$realname"
+ fi
+@@ -7178,7 +8109,7 @@ EOF
+ linknames=
+ for link
+ do
+- linknames="$linknames $link"
++ func_append linknames " $link"
+ done
+
+ # Use standard objects if they are pic
+@@ -7189,7 +8120,7 @@ EOF
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+ export_symbols="$output_objdir/$libname.uexp"
+- delfiles="$delfiles $export_symbols"
++ func_append delfiles " $export_symbols"
+ fi
+
+ orig_export_symbols=
+@@ -7220,13 +8151,45 @@ EOF
+ $opt_dry_run || $RM $export_symbols
+ cmds=$export_symbols_cmds
+ save_ifs="$IFS"; IFS='~'
+- for cmd in $cmds; do
++ for cmd1 in $cmds; do
+ IFS="$save_ifs"
+- eval "cmd=\"$cmd\""
+- func_len " $cmd"
+- len=$func_len_result
+- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
++ # Take the normal branch if the nm_file_list_spec branch
++ # doesn't work or if tool conversion is not needed.
++ case $nm_file_list_spec~$to_tool_file_cmd in
++ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
++ try_normal_branch=yes
++ eval cmd=\"$cmd1\"
++ func_len " $cmd"
++ len=$func_len_result
++ ;;
++ *)
++ try_normal_branch=no
++ ;;
++ esac
++ if test "$try_normal_branch" = yes \
++ && { test "$len" -lt "$max_cmd_len" \
++ || test "$max_cmd_len" -le -1; }
++ then
++ func_show_eval "$cmd" 'exit $?'
++ skipped_export=false
++ elif test -n "$nm_file_list_spec"; then
++ func_basename "$output"
++ output_la=$func_basename_result
++ save_libobjs=$libobjs
++ save_output=$output
++ output=${output_objdir}/${output_la}.nm
++ func_to_tool_file "$output"
++ libobjs=$nm_file_list_spec$func_to_tool_file_result
++ func_append delfiles " $output"
++ func_verbose "creating $NM input file list: $output"
++ for obj in $save_libobjs; do
++ func_to_tool_file "$obj"
++ $ECHO "$func_to_tool_file_result"
++ done > "$output"
++ eval cmd=\"$cmd1\"
+ func_show_eval "$cmd" 'exit $?'
++ output=$save_output
++ libobjs=$save_libobjs
+ skipped_export=false
+ else
+ # The command line is too long to execute in one step.
+@@ -7248,7 +8211,7 @@ EOF
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols="$export_symbols"
+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"
++ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+@@ -7260,7 +8223,7 @@ EOF
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
++ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+@@ -7270,7 +8233,7 @@ EOF
+ case " $convenience " in
+ *" $test_deplib "*) ;;
+ *)
+- tmp_deplibs="$tmp_deplibs $test_deplib"
++ func_append tmp_deplibs " $test_deplib"
+ ;;
+ esac
+ done
+@@ -7286,43 +8249,43 @@ EOF
+ fi
+ if test -n "$whole_archive_flag_spec"; then
+ save_libobjs=$libobjs
+- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\""
++ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ else
+ gentop="$output_objdir/${outputname}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+- libobjs="$libobjs $func_extract_archives_result"
++ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ fi
+
+ if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+- eval "flag=\"$thread_safe_flag_spec\""
+- linker_flags="$linker_flags $flag"
++ eval flag=\"$thread_safe_flag_spec\"
++ func_append linker_flags " $flag"
+ fi
+
+ # Make a backup of the uninstalled library when relinking
+- if test "$mode" = relink; then
+- $opt_dry_run || (cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U) || exit $?
++ if test "$opt_mode" = relink; then
++ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+ fi
+
+ # Do each of the archive commands.
+ if test "$module" = yes && test -n "$module_cmds" ; then
+ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+- eval "test_cmds=\"$module_expsym_cmds\""
++ eval test_cmds=\"$module_expsym_cmds\"
+ cmds=$module_expsym_cmds
+ else
+- eval "test_cmds=\"$module_cmds\""
++ eval test_cmds=\"$module_cmds\"
+ cmds=$module_cmds
+ fi
+ else
+ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+- eval "test_cmds=\"$archive_expsym_cmds\""
++ eval test_cmds=\"$archive_expsym_cmds\"
+ cmds=$archive_expsym_cmds
+ else
+- eval "test_cmds=\"$archive_cmds\""
++ eval test_cmds=\"$archive_cmds\"
+ cmds=$archive_cmds
+ fi
+ fi
+@@ -7366,10 +8329,13 @@ EOF
+ echo 'INPUT (' > $output
+ for obj in $save_libobjs
+ do
+- $ECHO "$obj" >> $output
++ func_to_tool_file "$obj"
++ $ECHO "$func_to_tool_file_result" >> $output
+ done
+ echo ')' >> $output
+- delfiles="$delfiles $output"
++ func_append delfiles " $output"
++ func_to_tool_file "$output"
++ output=$func_to_tool_file_result
+ elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+ output=${output_objdir}/${output_la}.lnk
+ func_verbose "creating linker input file list: $output"
+@@ -7383,15 +8349,17 @@ EOF
+ fi
+ for obj
+ do
+- $ECHO "$obj" >> $output
++ func_to_tool_file "$obj"
++ $ECHO "$func_to_tool_file_result" >> $output
+ done
+- delfiles="$delfiles $output"
+- output=$firstobj\"$file_list_spec$output\"
++ func_append delfiles " $output"
++ func_to_tool_file "$output"
++ output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+ else
+ if test -n "$save_libobjs"; then
+ func_verbose "creating reloadable object files..."
+ output=$output_objdir/$output_la-${k}.$objext
+- eval "test_cmds=\"$reload_cmds\""
++ eval test_cmds=\"$reload_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+@@ -7411,12 +8379,12 @@ EOF
+ if test "$k" -eq 1 ; then
+ # The first file doesn't have a previous command to add.
+ reload_objs=$objlist
+- eval "concat_cmds=\"$reload_cmds\""
++ eval concat_cmds=\"$reload_cmds\"
+ else
+ # All subsequent reloadable object files will link in
+ # the last one created.
+ reload_objs="$objlist $last_robj"
+- eval "concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\""
++ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+ fi
+ last_robj=$output_objdir/$output_la-${k}.$objext
+ func_arith $k + 1
+@@ -7433,11 +8401,11 @@ EOF
+ # files will link in the last one created.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ reload_objs="$objlist $last_robj"
+- eval "concat_cmds=\"\${concat_cmds}$reload_cmds\""
++ eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+ if test -n "$last_robj"; then
+- eval "concat_cmds=\"\${concat_cmds}~\$RM $last_robj\""
++ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+ fi
+- delfiles="$delfiles $output"
++ func_append delfiles " $output"
+
+ else
+ output=
+@@ -7450,9 +8418,9 @@ EOF
+ libobjs=$output
+ # Append the command to create the export file.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+- eval "concat_cmds=\"\$concat_cmds$export_symbols_cmds\""
++ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+ if test -n "$last_robj"; then
+- eval "concat_cmds=\"\$concat_cmds~\$RM $last_robj\""
++ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+ fi
+ fi
+
+@@ -7471,7 +8439,7 @@ EOF
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+- if test "$mode" = relink; then
++ if test "$opt_mode" = relink; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+@@ -7492,7 +8460,7 @@ EOF
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols="$export_symbols"
+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"
++ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test -n "$orig_export_symbols"; then
+@@ -7504,7 +8472,7 @@ EOF
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
++ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+@@ -7515,7 +8483,7 @@ EOF
+ output=$save_output
+
+ if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\""
++ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ # Expand the library linking commands again to reset the
+@@ -7539,23 +8507,23 @@ EOF
+
+ if test -n "$delfiles"; then
+ # Append the command to remove temporary files to $cmds.
+- eval "cmds=\"\$cmds~\$RM $delfiles\""
++ eval cmds=\"\$cmds~\$RM $delfiles\"
+ fi
+
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop="$output_objdir/${outputname}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+- libobjs="$libobjs $func_extract_archives_result"
++ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+
+ save_ifs="$IFS"; IFS='~'
+ for cmd in $cmds; do
+ IFS="$save_ifs"
+- eval "cmd=\"$cmd\""
++ eval cmd=\"$cmd\"
+ $opt_silent || {
+ func_quote_for_expand "$cmd"
+ eval "func_echo $func_quote_for_expand_result"
+@@ -7564,7 +8532,7 @@ EOF
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+- if test "$mode" = relink; then
++ if test "$opt_mode" = relink; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+@@ -7576,8 +8544,8 @@ EOF
+ IFS="$save_ifs"
+
+ # Restore the uninstalled library and exit
+- if test "$mode" = relink; then
+- $opt_dry_run || (cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname) || exit $?
++ if test "$opt_mode" = relink; then
++ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+ if test -n "$convenience"; then
+ if test -z "$whole_archive_flag_spec"; then
+@@ -7656,17 +8624,20 @@ EOF
+
+ if test -n "$convenience"; then
+ if test -n "$whole_archive_flag_spec"; then
+- eval "tmp_whole_archive_flags=\"$whole_archive_flag_spec\""
++ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+ else
+ gentop="$output_objdir/${obj}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+ reload_conv_objs="$reload_objs $func_extract_archives_result"
+ fi
+ fi
+
++ # If we're not building shared, we need to use non_pic_objs
++ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
++
+ # Create the old-style object.
+ reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+@@ -7690,7 +8661,7 @@ EOF
+ # Create an invalid libtool object if no PIC, so that we don't
+ # accidentally link it into a program.
+ # $show "echo timestamp > $libobj"
+- # $opt_dry_run || echo timestamp > $libobj || exit $?
++ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+ exit $EXIT_SUCCESS
+ fi
+
+@@ -7740,8 +8711,8 @@ EOF
+ if test "$tagname" = CXX ; then
+ case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+ 10.[0123])
+- compile_command="$compile_command ${wl}-bind_at_load"
+- finalize_command="$finalize_command ${wl}-bind_at_load"
++ func_append compile_command " ${wl}-bind_at_load"
++ func_append finalize_command " ${wl}-bind_at_load"
+ ;;
+ esac
+ fi
+@@ -7761,7 +8732,7 @@ EOF
+ *)
+ case " $compile_deplibs " in
+ *" -L$path/$objdir "*)
+- new_libs="$new_libs -L$path/$objdir" ;;
++ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+@@ -7771,17 +8742,17 @@ EOF
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+- *) new_libs="$new_libs $deplib" ;;
++ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+- *) new_libs="$new_libs $deplib" ;;
++ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ compile_deplibs="$new_libs"
+
+
+- compile_command="$compile_command $compile_deplibs"
+- finalize_command="$finalize_command $finalize_deplibs"
++ func_append compile_command " $compile_deplibs"
++ func_append finalize_command " $finalize_deplibs"
+
+ if test -n "$rpath$xrpath"; then
+ # If the user specified any rpath flags, then add them.
+@@ -7789,7 +8760,7 @@ EOF
+ # This is the magic to use -rpath.
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+- *) finalize_rpath="$finalize_rpath $libdir" ;;
++ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ fi
+@@ -7808,18 +8779,18 @@ EOF
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+- eval "flag=\"$hardcode_libdir_flag_spec\""
+- rpath="$rpath $flag"
++ eval flag=\"$hardcode_libdir_flag_spec\"
++ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+- *) perm_rpath="$perm_rpath $libdir" ;;
++ *) func_append perm_rpath " $libdir" ;;
+ esac
+ fi
+ case $host in
+@@ -7828,12 +8799,12 @@ EOF
+ case :$dllsearchpath: in
+ *":$libdir:"*) ;;
+ ::) dllsearchpath=$libdir;;
+- *) dllsearchpath="$dllsearchpath:$libdir";;
++ *) func_append dllsearchpath ":$libdir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+- *) dllsearchpath="$dllsearchpath:$testbindir";;
++ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+@@ -7842,7 +8813,7 @@ EOF
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+- eval "rpath=\" $hardcode_libdir_flag_spec\""
++ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ compile_rpath="$rpath"
+
+@@ -7859,18 +8830,18 @@ EOF
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+- eval "flag=\"$hardcode_libdir_flag_spec\""
+- rpath="$rpath $flag"
++ eval flag=\"$hardcode_libdir_flag_spec\"
++ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$finalize_perm_rpath " in
+ *" $libdir "*) ;;
+- *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
++ *) func_append finalize_perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+@@ -7878,7 +8849,7 @@ EOF
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+- eval "rpath=\" $hardcode_libdir_flag_spec\""
++ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ finalize_rpath="$rpath"
+
+@@ -7921,6 +8892,12 @@ EOF
+ exit_status=0
+ func_show_eval "$link_command" 'exit_status=$?'
+
++ if test -n "$postlink_cmds"; then
++ func_to_tool_file "$output"
++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
++ func_execute_cmds "$postlink_cmds" 'exit $?'
++ fi
++
+ # Delete the generated files.
+ if test -f "$output_objdir/${outputname}S.${objext}"; then
+ func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+@@ -7943,7 +8920,7 @@ EOF
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+- rpath="$rpath$dir:"
++ func_append rpath "$dir:"
+ done
+ compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+@@ -7951,7 +8928,7 @@ EOF
+ # We should set the runpath_var.
+ rpath=
+ for dir in $finalize_perm_rpath; do
+- rpath="$rpath$dir:"
++ func_append rpath "$dir:"
+ done
+ finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+@@ -7966,6 +8943,13 @@ EOF
+ $opt_dry_run || $RM $output
+ # Link the executable and exit
+ func_show_eval "$link_command" 'exit $?'
++
++ if test -n "$postlink_cmds"; then
++ func_to_tool_file "$output"
++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
++ func_execute_cmds "$postlink_cmds" 'exit $?'
++ fi
++
+ exit $EXIT_SUCCESS
+ fi
+
+@@ -7999,6 +8983,12 @@ EOF
+
+ func_show_eval "$link_command" 'exit $?'
+
++ if test -n "$postlink_cmds"; then
++ func_to_tool_file "$output_objdir/$outputname"
++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
++ func_execute_cmds "$postlink_cmds" 'exit $?'
++ fi
++
+ # Now create the wrapper script.
+ func_verbose "creating $output"
+
+@@ -8096,7 +9086,7 @@ EOF
+ else
+ oldobjs="$old_deplibs $non_pic_objects"
+ if test "$preload" = yes && test -f "$symfileobj"; then
+- oldobjs="$oldobjs $symfileobj"
++ func_append oldobjs " $symfileobj"
+ fi
+ fi
+ addlibs="$old_convenience"
+@@ -8104,10 +9094,10 @@ EOF
+
+ if test -n "$addlibs"; then
+ gentop="$output_objdir/${outputname}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+
+ func_extract_archives $gentop $addlibs
+- oldobjs="$oldobjs $func_extract_archives_result"
++ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # Do each command in the archive commands.
+@@ -8118,10 +9108,10 @@ EOF
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop="$output_objdir/${outputname}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+- oldobjs="$oldobjs $func_extract_archives_result"
++ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # POSIX demands no paths to be encoded in archives. We have
+@@ -8139,7 +9129,7 @@ EOF
+ else
+ echo "copying selected object files to avoid basename conflicts..."
+ gentop="$output_objdir/${outputname}x"
+- generated="$generated $gentop"
++ func_append generated " $gentop"
+ func_mkdir_p "$gentop"
+ save_oldobjs=$oldobjs
+ oldobjs=
+@@ -8163,18 +9153,28 @@ EOF
+ esac
+ done
+ func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+- oldobjs="$oldobjs $gentop/$newobj"
++ func_append oldobjs " $gentop/$newobj"
+ ;;
+- *) oldobjs="$oldobjs $obj" ;;
++ *) func_append oldobjs " $obj" ;;
+ esac
+ done
+ fi
+- eval "cmds=\"$old_archive_cmds\""
++ eval cmds=\"$old_archive_cmds\"
+
+ func_len " $cmds"
+ len=$func_len_result
+ if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+ cmds=$old_archive_cmds
++ elif test -n "$archiver_list_spec"; then
++ func_verbose "using command file archive linking..."
++ for obj in $oldobjs
++ do
++ func_to_tool_file "$obj"
++ $ECHO "$func_to_tool_file_result"
++ done > $output_objdir/$libname.libcmd
++ func_to_tool_file "$output_objdir/$libname.libcmd"
++ oldobjs=" $archiver_list_spec$func_to_tool_file_result"
++ cmds=$old_archive_cmds
+ else
+ # the command line is too long to link in one step, link in parts
+ func_verbose "using piecewise archive linking..."
+@@ -8189,7 +9189,7 @@ EOF
+ do
+ last_oldobj=$obj
+ done
+- eval "test_cmds=\"$old_archive_cmds\""
++ eval test_cmds=\"$old_archive_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+@@ -8208,7 +9208,7 @@ EOF
+ RANLIB=$save_RANLIB
+ fi
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+- eval "concat_cmds=\"\${concat_cmds}$old_archive_cmds\""
++ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+ objlist=
+ len=$len0
+ fi
+@@ -8216,9 +9216,9 @@ EOF
+ RANLIB=$save_RANLIB
+ oldobjs=$objlist
+ if test "X$oldobjs" = "X" ; then
+- eval "cmds=\"\$concat_cmds\""
++ eval cmds=\"\$concat_cmds\"
+ else
+- eval "cmds=\"\$concat_cmds~\$old_archive_cmds\""
++ eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+ fi
+ fi
+ fi
+@@ -8268,12 +9268,23 @@ EOF
+ *.la)
+ func_basename "$deplib"
+ name="$func_basename_result"
+- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
++ func_resolve_sysroot "$deplib"
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+ test -z "$libdir" && \
+ func_fatal_error "\`$deplib' is not a valid libtool archive"
+- newdependency_libs="$newdependency_libs $libdir/$name"
++ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
++ ;;
++ -L*)
++ func_stripname -L '' "$deplib"
++ func_replace_sysroot "$func_stripname_result"
++ func_append newdependency_libs " -L$func_replace_sysroot_result"
+ ;;
+- *) newdependency_libs="$newdependency_libs $deplib" ;;
++ -R*)
++ func_stripname -R '' "$deplib"
++ func_replace_sysroot "$func_stripname_result"
++ func_append newdependency_libs " -R$func_replace_sysroot_result"
++ ;;
++ *) func_append newdependency_libs " $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+@@ -8284,12 +9295,14 @@ EOF
+ *.la)
+ func_basename "$lib"
+ name="$func_basename_result"
+- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
++ func_resolve_sysroot "$lib"
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
++
+ test -z "$libdir" && \
+ func_fatal_error "\`$lib' is not a valid libtool archive"
+- newdlfiles="$newdlfiles $libdir/$name"
++ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+- *) newdlfiles="$newdlfiles $lib" ;;
++ *) func_append newdlfiles " $lib" ;;
+ esac
+ done
+ dlfiles="$newdlfiles"
+@@ -8303,10 +9316,11 @@ EOF
+ # the library:
+ func_basename "$lib"
+ name="$func_basename_result"
+- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
++ func_resolve_sysroot "$lib"
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
+ test -z "$libdir" && \
+ func_fatal_error "\`$lib' is not a valid libtool archive"
+- newdlprefiles="$newdlprefiles $libdir/$name"
++ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ esac
+ done
+@@ -8318,7 +9332,7 @@ EOF
+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+- newdlfiles="$newdlfiles $abs"
++ func_append newdlfiles " $abs"
+ done
+ dlfiles="$newdlfiles"
+ newdlprefiles=
+@@ -8327,7 +9341,7 @@ EOF
+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+- newdlprefiles="$newdlprefiles $abs"
++ func_append newdlprefiles " $abs"
+ done
+ dlprefiles="$newdlprefiles"
+ fi
+@@ -8412,7 +9426,7 @@ relink_command=\"$relink_command\""
+ exit $EXIT_SUCCESS
+ }
+
+-{ test "$mode" = link || test "$mode" = relink; } &&
++{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+ func_mode_link ${1+"$@"}
+
+
+@@ -8432,9 +9446,9 @@ func_mode_uninstall ()
+ for arg
+ do
+ case $arg in
+- -f) RM="$RM $arg"; rmforce=yes ;;
+- -*) RM="$RM $arg" ;;
+- *) files="$files $arg" ;;
++ -f) func_append RM " $arg"; rmforce=yes ;;
++ -*) func_append RM " $arg" ;;
++ *) func_append files " $arg" ;;
+ esac
+ done
+
+@@ -8443,24 +9457,23 @@ func_mode_uninstall ()
+
+ rmdirs=
+
+- origobjdir="$objdir"
+ for file in $files; do
+ func_dirname "$file" "" "."
+ dir="$func_dirname_result"
+ if test "X$dir" = X.; then
+- objdir="$origobjdir"
++ odir="$objdir"
+ else
+- objdir="$dir/$origobjdir"
++ odir="$dir/$objdir"
+ fi
+ func_basename "$file"
+ name="$func_basename_result"
+- test "$mode" = uninstall && objdir="$dir"
++ test "$opt_mode" = uninstall && odir="$dir"
+
+- # Remember objdir for removal later, being careful to avoid duplicates
+- if test "$mode" = clean; then
++ # Remember odir for removal later, being careful to avoid duplicates
++ if test "$opt_mode" = clean; then
+ case " $rmdirs " in
+- *" $objdir "*) ;;
+- *) rmdirs="$rmdirs $objdir" ;;
++ *" $odir "*) ;;
++ *) func_append rmdirs " $odir" ;;
+ esac
+ fi
+
+@@ -8486,18 +9499,17 @@ func_mode_uninstall ()
+
+ # Delete the libtool libraries and symlinks.
+ for n in $library_names; do
+- rmfiles="$rmfiles $objdir/$n"
++ func_append rmfiles " $odir/$n"
+ done
+- test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
++ test -n "$old_library" && func_append rmfiles " $odir/$old_library"
+
+- case "$mode" in
++ case "$opt_mode" in
+ clean)
+- case " $library_names " in
+- # " " in the beginning catches empty $dlname
++ case " $library_names " in
+ *" $dlname "*) ;;
+- *) rmfiles="$rmfiles $objdir/$dlname" ;;
++ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
+ esac
+- test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
++ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
+ ;;
+ uninstall)
+ if test -n "$library_names"; then
+@@ -8525,19 +9537,19 @@ func_mode_uninstall ()
+ # Add PIC object to the list of files to remove.
+ if test -n "$pic_object" &&
+ test "$pic_object" != none; then
+- rmfiles="$rmfiles $dir/$pic_object"
++ func_append rmfiles " $dir/$pic_object"
+ fi
+
+ # Add non-PIC object to the list of files to remove.
+ if test -n "$non_pic_object" &&
+ test "$non_pic_object" != none; then
+- rmfiles="$rmfiles $dir/$non_pic_object"
++ func_append rmfiles " $dir/$non_pic_object"
+ fi
+ fi
+ ;;
+
+ *)
+- if test "$mode" = clean ; then
++ if test "$opt_mode" = clean ; then
+ noexename=$name
+ case $file in
+ *.exe)
+@@ -8547,7 +9559,7 @@ func_mode_uninstall ()
+ noexename=$func_stripname_result
+ # $file with .exe has already been added to rmfiles,
+ # add $file without .exe
+- rmfiles="$rmfiles $file"
++ func_append rmfiles " $file"
+ ;;
+ esac
+ # Do a test to see if this is a libtool program.
+@@ -8556,7 +9568,7 @@ func_mode_uninstall ()
+ func_ltwrapper_scriptname "$file"
+ relink_command=
+ func_source $func_ltwrapper_scriptname_result
+- rmfiles="$rmfiles $func_ltwrapper_scriptname_result"
++ func_append rmfiles " $func_ltwrapper_scriptname_result"
+ else
+ relink_command=
+ func_source $dir/$noexename
+@@ -8564,12 +9576,12 @@ func_mode_uninstall ()
+
+ # note $name still contains .exe if it was in $file originally
+ # as does the version of $file that was added into $rmfiles
+- rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
++ func_append rmfiles " $odir/$name $odir/${name}S.${objext}"
+ if test "$fast_install" = yes && test -n "$relink_command"; then
+- rmfiles="$rmfiles $objdir/lt-$name"
++ func_append rmfiles " $odir/lt-$name"
+ fi
+ if test "X$noexename" != "X$name" ; then
+- rmfiles="$rmfiles $objdir/lt-${noexename}.c"
++ func_append rmfiles " $odir/lt-${noexename}.c"
+ fi
+ fi
+ fi
+@@ -8577,7 +9589,6 @@ func_mode_uninstall ()
+ esac
+ func_show_eval "$RM $rmfiles" 'exit_status=1'
+ done
+- objdir="$origobjdir"
+
+ # Try to remove the ${objdir}s in the directories where we deleted files
+ for dir in $rmdirs; do
+@@ -8589,16 +9600,16 @@ func_mode_uninstall ()
+ exit $exit_status
+ }
+
+-{ test "$mode" = uninstall || test "$mode" = clean; } &&
++{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+ func_mode_uninstall ${1+"$@"}
+
+-test -z "$mode" && {
++test -z "$opt_mode" && {
+ help="$generic_help"
+ func_fatal_help "you must specify a MODE"
+ }
+
+ test -z "$exec_cmd" && \
+- func_fatal_help "invalid operation mode \`$mode'"
++ func_fatal_help "invalid operation mode \`$opt_mode'"
+
+ if test -n "$exec_cmd"; then
+ eval exec "$exec_cmd"
+diff --git a/ltoptions.m4 b/ltoptions.m4
+index 5ef12ced2a8..17cfd51c0b3 100644
+--- a/ltoptions.m4
++++ b/ltoptions.m4
+@@ -8,7 +8,7 @@
+ # unlimited permission to copy and/or distribute it, with or without
+ # modifications, as long as this notice is preserved.
+
+-# serial 6 ltoptions.m4
++# serial 7 ltoptions.m4
+
+ # This is to help aclocal find these macros, as it can't see m4_define.
+ AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+diff --git a/ltversion.m4 b/ltversion.m4
+index bf87f77132d..9c7b5d41185 100644
+--- a/ltversion.m4
++++ b/ltversion.m4
+@@ -7,17 +7,17 @@
+ # unlimited permission to copy and/or distribute it, with or without
+ # modifications, as long as this notice is preserved.
+
+-# Generated from ltversion.in.
++# @configure_input@
+
+-# serial 3134 ltversion.m4
++# serial 3293 ltversion.m4
+ # This file is part of GNU Libtool
+
+-m4_define([LT_PACKAGE_VERSION], [2.2.7a])
+-m4_define([LT_PACKAGE_REVISION], [1.3134])
++m4_define([LT_PACKAGE_VERSION], [2.4])
++m4_define([LT_PACKAGE_REVISION], [1.3293])
+
+ AC_DEFUN([LTVERSION_VERSION],
+-[macro_version='2.2.7a'
+-macro_revision='1.3134'
++[macro_version='2.4'
++macro_revision='1.3293'
+ _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+ _LT_DECL(, macro_revision, 0)
+ ])
+diff --git a/lt~obsolete.m4 b/lt~obsolete.m4
+index bf92b5e0790..c573da90c5c 100644
+--- a/lt~obsolete.m4
++++ b/lt~obsolete.m4
+@@ -7,7 +7,7 @@
+ # unlimited permission to copy and/or distribute it, with or without
+ # modifications, as long as this notice is preserved.
+
+-# serial 4 lt~obsolete.m4
++# serial 5 lt~obsolete.m4
+
+ # These exist entirely to fool aclocal when bootstrapping libtool.
+ #
+diff --git a/opcodes/Makefile.in b/opcodes/Makefile.in
+index 245479140c9..bfed90449bf 100644
+--- a/opcodes/Makefile.in
++++ b/opcodes/Makefile.in
+@@ -298,6 +298,7 @@ CYGPATH_W = @CYGPATH_W@
+ DATADIRNAME = @DATADIRNAME@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -336,6 +337,7 @@ LTLIBINTL = @LTLIBINTL@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ MKINSTALLDIRS = @MKINSTALLDIRS@
+ MSGFMT = @MSGFMT@
+@@ -374,6 +376,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__include = @am__include@
+diff --git a/opcodes/configure b/opcodes/configure
+index 1660c3266a3..a1b84ed81f9 100755
+--- a/opcodes/configure
++++ b/opcodes/configure
+@@ -688,6 +688,9 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
++ac_ct_AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -806,6 +809,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_checking
+ enable_targets
+@@ -1487,6 +1491,8 @@ Optional Packages:
+ --with-libintl-prefix[=DIR] search for libintl in DIR/include and DIR/lib
+ --without-libintl-prefix don't search for libintl in includedir and libdir
+ --with-libintl-type=TYPE type of library to search for (auto/static/shared)
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+
+ Some influential environment variables:
+ CC C compiler command
+@@ -4997,8 +5003,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -5038,7 +5044,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5731,8 +5737,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5781,6 +5787,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5797,6 +5877,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -5965,7 +6050,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -6124,6 +6210,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -6139,6 +6240,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -6153,8 +6405,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -6170,7 +6424,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6190,11 +6444,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -6210,7 +6468,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -6229,6 +6487,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -6240,25 +6502,20 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
++
+
+
+
+@@ -6269,6 +6526,63 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
+
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+@@ -6609,8 +6923,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -6646,6 +6960,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6687,6 +7002,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6698,7 +7025,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6724,8 +7051,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6735,8 +7062,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6773,6 +7100,14 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
+
+
+
+@@ -6791,6 +7126,47 @@ fi
+
+
+
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
++
++
+
+
+
+@@ -7000,6 +7376,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -7563,6 +8056,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -8117,8 +8612,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -8284,6 +8777,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -8346,7 +8845,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8403,13 +8902,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8470,6 +8973,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8820,7 +9328,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8919,12 +9428,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -8938,8 +9447,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -8957,8 +9466,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9004,8 +9513,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -9135,7 +9644,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9148,22 +9663,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -9175,7 +9697,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+ int
+@@ -9188,22 +9716,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -9248,20 +9783,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -9322,7 +9900,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -9330,7 +9908,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -9346,7 +9924,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9370,10 +9948,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9452,23 +10030,36 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9553,7 +10144,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9572,9 +10163,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -10150,8 +10741,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -10184,13 +10776,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -11092,7 +11742,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11095 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11136,10 +11786,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -11198,7 +11848,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 11201 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -11242,10 +11892,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -15351,13 +16001,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -15372,14 +16029,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -15412,12 +16072,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -15472,8 +16132,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -15483,12 +16148,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -15504,7 +16171,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -15540,6 +16206,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -16313,7 +16980,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -16416,19 +17084,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -16458,6 +17149,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -16467,6 +17164,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -16581,12 +17281,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -16673,9 +17373,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -16691,6 +17388,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -16723,210 +17423,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
+diff --git a/zlib/Makefile.in b/zlib/Makefile.in
+index dd8a21deba1..28628282c56 100644
+--- a/zlib/Makefile.in
++++ b/zlib/Makefile.in
+@@ -281,6 +281,7 @@ CPPFLAGS = @CPPFLAGS@
+ CYGPATH_W = @CYGPATH_W@
+ DEFS = @DEFS@
+ DEPDIR = @DEPDIR@
++DLLTOOL = @DLLTOOL@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+ ECHO_C = @ECHO_C@
+@@ -305,6 +306,7 @@ LN_S = @LN_S@
+ LTLIBOBJS = @LTLIBOBJS@
+ MAINT = @MAINT@
+ MAKEINFO = @MAKEINFO@
++MANIFEST_TOOL = @MANIFEST_TOOL@
+ MKDIR_P = @MKDIR_P@
+ NM = @NM@
+ NMEDIT = @NMEDIT@
+@@ -331,6 +333,7 @@ abs_builddir = @abs_builddir@
+ abs_srcdir = @abs_srcdir@
+ abs_top_builddir = @abs_top_builddir@
+ abs_top_srcdir = @abs_top_srcdir@
++ac_ct_AR = @ac_ct_AR@
+ ac_ct_CC = @ac_ct_CC@
+ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ am__include = @am__include@
+diff --git a/zlib/configure b/zlib/configure
+index 2d551360683..e01d0a70438 100755
+--- a/zlib/configure
++++ b/zlib/configure
+@@ -648,8 +648,11 @@ OTOOL
+ LIPO
+ NMEDIT
+ DSYMUTIL
++MANIFEST_TOOL
+ RANLIB
++ac_ct_AR
+ AR
++DLLTOOL
+ OBJDUMP
+ LN_S
+ NM
+@@ -776,6 +779,7 @@ enable_static
+ with_pic
+ enable_fast_install
+ with_gnu_ld
++with_libtool_sysroot
+ enable_libtool_lock
+ enable_host_shared
+ enable_host_pie
+@@ -1432,6 +1436,8 @@ Optional Packages:
+ --with-pic try to use only PIC/non-PIC objects [default=use
+ both]
+ --with-gnu-ld assume the C compiler uses GNU ld [default=no]
++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
++ (or the compiler's sysroot if not specified).
+
+ Some influential environment variables:
+ CC C compiler command
+@@ -4190,8 +4196,8 @@ esac
+
+
+
+-macro_version='2.2.7a'
+-macro_revision='1.3134'
++macro_version='2.4'
++macro_revision='1.3293'
+
+
+
+@@ -4231,7 +4237,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+ $as_echo_n "checking how to print strings... " >&6; }
+ # Test print first, because it will be a builtin if present.
+-if test "X`print -r -- -n 2>/dev/null`" = X-n && \
++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+ elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+@@ -5054,8 +5060,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
+ # Try some XSI features
+ xsi_shell=no
+ ( _lt_dummy="a/b/c"
+- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+- = c,a/b,, \
++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
++ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+@@ -5104,6 +5110,80 @@ esac
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
++$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
++if ${lt_cv_to_host_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
++ ;;
++ esac
++ ;;
++ *-*-cygwin* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
++ ;;
++ *-*-cygwin* )
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++ * ) # otherwise, assume *nix
++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
++ ;;
++ esac
++ ;;
++ * ) # unhandled hosts (and "normal" native builds)
++ lt_cv_to_host_file_cmd=func_convert_file_noop
++ ;;
++esac
++
++fi
++
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
++$as_echo "$lt_cv_to_host_file_cmd" >&6; }
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
++if ${lt_cv_to_tool_file_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ #assume ordinary cross tools, or native build.
++lt_cv_to_tool_file_cmd=func_convert_file_noop
++case $host in
++ *-*-mingw* )
++ case $build in
++ *-*-mingw* ) # actually msys
++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
++ ;;
++ esac
++ ;;
++esac
++
++fi
++
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
++$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
++
++
++
++
++
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+ $as_echo_n "checking for $LD option to reload object files... " >&6; }
+ if ${lt_cv_ld_reload_flag+:} false; then :
+@@ -5120,6 +5200,11 @@ case $reload_flag in
+ esac
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ case $host_os in
++ cygwin* | mingw* | pw32* | cegcc*)
++ if test "$GCC" != yes; then
++ reload_cmds=false
++ fi
++ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+@@ -5288,7 +5373,8 @@ mingw* | pw32*)
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
++ # Keep this pattern in sync with the one in func_win32_libid.
++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+@@ -5447,6 +5533,21 @@ esac
+ fi
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+ $as_echo "$lt_cv_deplibs_check_method" >&6; }
++
++file_magic_glob=
++want_nocaseglob=no
++if test "$build" = "$host"; then
++ case $host_os in
++ mingw* | pw32*)
++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
++ want_nocaseglob=yes
++ else
++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
++ fi
++ ;;
++ esac
++fi
++
+ file_magic_cmd=$lt_cv_file_magic_cmd
+ deplibs_check_method=$lt_cv_deplibs_check_method
+ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+@@ -5462,6 +5563,158 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
++
++
++
++
++
++
++
++
++
++
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
++set dummy ${ac_tool_prefix}dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$DLLTOOL"; then
++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++DLLTOOL=$ac_cv_prog_DLLTOOL
++if test -n "$DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
++$as_echo "$DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_DLLTOOL"; then
++ ac_ct_DLLTOOL=$DLLTOOL
++ # Extract the first word of "dlltool", so it can be a program name with args.
++set dummy dlltool; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_DLLTOOL"; then
++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
++if test -n "$ac_ct_DLLTOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
++$as_echo "$ac_ct_DLLTOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_DLLTOOL" = x; then
++ DLLTOOL="false"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ DLLTOOL=$ac_ct_DLLTOOL
++ fi
++else
++ DLLTOOL="$ac_cv_prog_DLLTOOL"
++fi
++
++test -z "$DLLTOOL" && DLLTOOL=dlltool
++
++
++
++
++
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
++$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
++if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_sharedlib_from_linklib_cmd='unknown'
++
++case $host_os in
++cygwin* | mingw* | pw32* | cegcc*)
++ # two different shell functions defined in ltmain.sh
++ # decide which to use based on capabilities of $DLLTOOL
++ case `$DLLTOOL --help 2>&1` in
++ *--identify-strict*)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
++ ;;
++ *)
++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
++ ;;
++ esac
++ ;;
++*)
++ # fallback: assume linklib IS sharedlib
++ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
++ ;;
++esac
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
++
++
++
++
++
++
++
++
+ plugin_option=
+ plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
+ for plugin in $plugin_names; do
+@@ -5476,8 +5729,10 @@ for plugin in $plugin_names; do
+ done
+
+ if test -n "$ac_tool_prefix"; then
+- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+-set dummy ${ac_tool_prefix}ar; ac_word=$2
++ for ac_prog in ar
++ do
++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
++set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_AR+:} false; then :
+@@ -5493,7 +5748,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_AR="${ac_tool_prefix}ar"
++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5513,11 +5768,15 @@ $as_echo "no" >&6; }
+ fi
+
+
++ test -n "$AR" && break
++ done
+ fi
+-if test -z "$ac_cv_prog_AR"; then
++if test -z "$AR"; then
+ ac_ct_AR=$AR
+- # Extract the first word of "ar", so it can be a program name with args.
+-set dummy ar; ac_word=$2
++ for ac_prog in ar
++do
++ # Extract the first word of "$ac_prog", so it can be a program name with args.
++set dummy $ac_prog; ac_word=$2
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+ $as_echo_n "checking for $ac_word... " >&6; }
+ if ${ac_cv_prog_ac_ct_AR+:} false; then :
+@@ -5533,7 +5792,7 @@ do
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+- ac_cv_prog_ac_ct_AR="ar"
++ ac_cv_prog_ac_ct_AR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+@@ -5552,6 +5811,10 @@ else
+ $as_echo "no" >&6; }
+ fi
+
++
++ test -n "$ac_ct_AR" && break
++done
++
+ if test "x$ac_ct_AR" = x; then
+ AR="false"
+ else
+@@ -5563,25 +5826,19 @@ ac_tool_warned=yes ;;
+ esac
+ AR=$ac_ct_AR
+ fi
+-else
+- AR="$ac_cv_prog_AR"
+ fi
+
+-test -z "$AR" && AR=ar
+-if test -n "$plugin_option"; then
+- if $AR --help 2>&1 | grep -q "\--plugin"; then
+- touch conftest.c
+- $AR $plugin_option rc conftest.a conftest.c
+- if test "$?" != 0; then
+- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
++ touch conftest.c
++ $AR $plugin_option rc conftest.a conftest.c
++ if test "$?" != 0; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
+ $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
+- else
+- AR="$AR $plugin_option"
+- fi
+- rm -f conftest.*
++ else
++ AR="$AR $plugin_option"
+ fi
+-fi
+-test -z "$AR_FLAGS" && AR_FLAGS=cru
++ rm -f conftest.*
++: ${AR=ar}
++: ${AR_FLAGS=cru}
+
+
+
+@@ -5593,6 +5850,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
++$as_echo_n "checking for archiver @FILE support... " >&6; }
++if ${lt_cv_ar_at_file+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_ar_at_file=no
++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
++/* end confdefs.h. */
++
++int
++main ()
++{
++
++ ;
++ return 0;
++}
++_ACEOF
++if ac_fn_c_try_compile "$LINENO"; then :
++ echo conftest.$ac_objext > conftest.lst
++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -eq 0; then
++ # Ensure the archiver fails upon bogus file names.
++ rm -f conftest.$ac_objext libconftest.a
++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
++ (eval $lt_ar_try) 2>&5
++ ac_status=$?
++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
++ test $ac_status = 0; }
++ if test "$ac_status" -ne 0; then
++ lt_cv_ar_at_file=@
++ fi
++ fi
++ rm -f conftest.* libconftest.a
++
++fi
++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
++
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
++$as_echo "$lt_cv_ar_at_file" >&6; }
++
++if test "x$lt_cv_ar_at_file" = xno; then
++ archiver_list_spec=
++else
++ archiver_list_spec=$lt_cv_ar_at_file
++fi
++
++
++
++
++
++
++
+ if test -n "$ac_tool_prefix"; then
+ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+ set dummy ${ac_tool_prefix}strip; ac_word=$2
+@@ -5932,8 +6247,8 @@ esac
+ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+ # Transform an extracted symbol line into symbol name and symbol address
+-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
+-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+ # Handle CRLF in mingw tool chain
+ opt_cr=
+@@ -5969,6 +6284,7 @@ for ac_symprfx in "" "_"; do
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+@@ -6010,6 +6326,18 @@ _LT_EOF
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
++/* DATA imports from DLLs on WIN32 con't be const, because runtime
++ relocations are performed -- see ld's documentation on pseudo-relocs. */
++# define LT_DLSYM_CONST
++#elif defined(__osf__)
++/* This system does not cope well with relocations in const data. */
++# define LT_DLSYM_CONST
++#else
++# define LT_DLSYM_CONST const
++#endif
++
+ #ifdef __cplusplus
+ extern "C" {
+ #endif
+@@ -6021,7 +6349,7 @@ _LT_EOF
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+ /* The mapping between symbol names and symbols. */
+-const struct {
++LT_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+ }
+@@ -6047,8 +6375,8 @@ static const void *lt_preloaded_setup() {
+ _LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+- lt_save_LIBS="$LIBS"
+- lt_save_CFLAGS="$CFLAGS"
++ lt_globsym_save_LIBS=$LIBS
++ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+@@ -6058,8 +6386,8 @@ _LT_EOF
+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+- LIBS="$lt_save_LIBS"
+- CFLAGS="$lt_save_CFLAGS"
++ LIBS=$lt_globsym_save_LIBS
++ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&5
+ fi
+@@ -6096,6 +6424,17 @@ else
+ $as_echo "ok" >&6; }
+ fi
+
++# Response file support.
++if test "$lt_cv_nm_interface" = "MS dumpbin"; then
++ nm_file_list_spec='@'
++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
++ nm_file_list_spec='@'
++fi
++
++
++
++
++
+
+
+
+@@ -6113,6 +6452,43 @@ fi
+
+
+
++
++
++
++
++
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
++$as_echo_n "checking for sysroot... " >&6; }
++
++# Check whether --with-libtool-sysroot was given.
++if test "${with_libtool_sysroot+set}" = set; then :
++ withval=$with_libtool_sysroot;
++else
++ with_libtool_sysroot=no
++fi
++
++
++lt_sysroot=
++case ${with_libtool_sysroot} in #(
++ yes)
++ if test "$GCC" = yes; then
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ fi
++ ;; #(
++ /*)
++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
++ ;; #(
++ no|'')
++ ;; #(
++ *)
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
++$as_echo "${with_libtool_sysroot}" >&6; }
++ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
++ ;;
++esac
++
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
++$as_echo "${lt_sysroot:-no}" >&6; }
+
+
+
+@@ -6327,6 +6703,123 @@ esac
+
+ need_locks="$enable_libtool_lock"
+
++if test -n "$ac_tool_prefix"; then
++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
++set dummy ${ac_tool_prefix}mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$MANIFEST_TOOL"; then
++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
++if test -n "$MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
++$as_echo "$MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++
++fi
++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
++ # Extract the first word of "mt", so it can be a program name with args.
++set dummy mt; ac_word=$2
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
++$as_echo_n "checking for $ac_word... " >&6; }
++if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test -n "$ac_ct_MANIFEST_TOOL"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
++else
++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
++for as_dir in $PATH
++do
++ IFS=$as_save_IFS
++ test -z "$as_dir" && as_dir=.
++ for ac_exec_ext in '' $ac_executable_extensions; do
++ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
++ break 2
++ fi
++done
++ done
++IFS=$as_save_IFS
++
++fi
++fi
++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
++if test -n "$ac_ct_MANIFEST_TOOL"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
++else
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
++$as_echo "no" >&6; }
++fi
++
++ if test "x$ac_ct_MANIFEST_TOOL" = x; then
++ MANIFEST_TOOL=":"
++ else
++ case $cross_compiling:$ac_tool_warned in
++yes:)
++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
++ac_tool_warned=yes ;;
++esac
++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
++ fi
++else
++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
++fi
++
++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
++if ${lt_cv_path_mainfest_tool+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_path_mainfest_tool=no
++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
++ cat conftest.err >&5
++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
++ lt_cv_path_mainfest_tool=yes
++ fi
++ rm -f conftest*
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
++$as_echo "$lt_cv_path_mainfest_tool" >&6; }
++if test "x$lt_cv_path_mainfest_tool" != xyes; then
++ MANIFEST_TOOL=:
++fi
++
++
++
++
++
+
+ case $host_os in
+ rhapsody* | darwin*)
+@@ -6893,6 +7386,8 @@ _LT_EOF
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+ echo "$AR cru libconftest.a conftest.o" >&5
+ $AR cru libconftest.a conftest.o 2>&5
++ echo "$RANLIB libconftest.a" >&5
++ $RANLIB libconftest.a 2>&5
+ cat > conftest.c << _LT_EOF
+ int main() { return 0;}
+ _LT_EOF
+@@ -7744,8 +8239,6 @@ fi
+ lt_prog_compiler_pic=
+ lt_prog_compiler_static=
+
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+ if test "$GCC" = yes; then
+ lt_prog_compiler_wl='-Wl,'
+@@ -7911,6 +8404,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='--shared'
+ lt_prog_compiler_static='--static'
+ ;;
++ nagfor*)
++ # NAG Fortran compiler
++ lt_prog_compiler_wl='-Wl,-Wl,,'
++ lt_prog_compiler_pic='-PIC'
++ lt_prog_compiler_static='-Bstatic'
++ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+@@ -7973,7 +8472,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+ lt_prog_compiler_pic='-KPIC'
+ lt_prog_compiler_static='-Bstatic'
+ case $cc_basename in
+- f77* | f90* | f95*)
++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ lt_prog_compiler_wl='-Qoption ld ';;
+ *)
+ lt_prog_compiler_wl='-Wl,';;
+@@ -8030,13 +8529,17 @@ case $host_os in
+ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+ ;;
+ esac
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+-$as_echo "$lt_prog_compiler_pic" >&6; }
+-
+-
+-
+-
+
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
++$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
++if ${lt_cv_prog_compiler_pic+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
++$as_echo "$lt_cv_prog_compiler_pic" >&6; }
++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
+
+ #
+ # Check to make sure the PIC flag actually works.
+@@ -8097,6 +8600,11 @@ fi
+
+
+
++
++
++
++
++
+ #
+ # Check to make sure the static flag actually works.
+ #
+@@ -8447,7 +8955,8 @@ _LT_EOF
+ allow_undefined_flag=unsupported
+ always_export_symbols=no
+ enable_shared_with_static_runtimes=yes
+- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+@@ -8546,12 +9055,12 @@ _LT_EOF
+ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+ hardcode_libdir_flag_spec=
+ hardcode_libdir_flag_spec_ld='-rpath $libdir'
+- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+@@ -8565,8 +9074,8 @@ _LT_EOF
+ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+@@ -8584,8 +9093,8 @@ _LT_EOF
+
+ _LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8631,8 +9140,8 @@ _LT_EOF
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ ld_shlibs=no
+ fi
+@@ -8762,7 +9271,13 @@ _LT_EOF
+ allow_undefined_flag='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- if test x$gcc_no_link = xyes; then
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test x$gcc_no_link = xyes; then
+ as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
+ fi
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+@@ -8778,22 +9293,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+@@ -8805,7 +9327,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+- if test x$gcc_no_link = xyes; then
++ if test "${lt_cv_aix_libpath+set}" = set; then
++ aix_libpath=$lt_cv_aix_libpath
++else
++ if ${lt_cv_aix_libpath_+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ if test x$gcc_no_link = xyes; then
+ as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
+ fi
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+@@ -8821,22 +9349,29 @@ main ()
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+
+-lt_aix_libpath_sed='
+- /Import File Strings/,/^$/ {
+- /^0/ {
+- s/^0 *\(.*\)$/\1/
+- p
+- }
+- }'
+-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-# Check for a 64-bit object if we didn't find anything.
+-if test -z "$aix_libpath"; then
+- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+-fi
++ lt_aix_libpath_sed='
++ /Import File Strings/,/^$/ {
++ /^0/ {
++ s/^0 *\([^ ]*\) *$/\1/
++ p
++ }
++ }'
++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ # Check for a 64-bit object if we didn't find anything.
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
++ fi
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
++ if test -z "$lt_cv_aix_libpath_"; then
++ lt_cv_aix_libpath_="/usr/lib:/lib"
++ fi
++
++fi
++
++ aix_libpath=$lt_cv_aix_libpath_
++fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+@@ -8881,20 +9416,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+- hardcode_libdir_flag_spec=' '
+- allow_undefined_flag=unsupported
+- # Tell ltmain to make .lib files, not .a files.
+- libext=lib
+- # Tell ltmain to make .dll files, not .so files.
+- shrext_cmds=".dll"
+- # FIXME: Setting linknames here is a bad hack.
+- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+- # The linker will automatically build a .lib file if we build a DLL.
+- old_archive_from_new_cmds='true'
+- # FIXME: Should let the user specify the lib program.
+- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+- fix_srcfile_path='`cygpath -w "$srcfile"`'
+- enable_shared_with_static_runtimes=yes
++ case $cc_basename in
++ cl*)
++ # Native MSVC
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ always_export_symbols=yes
++ file_list_spec='@'
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
++ else
++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
++ fi~
++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
++ linknames='
++ # The linker will not automatically build a static lib if we build a DLL.
++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
++ enable_shared_with_static_runtimes=yes
++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
++ # Don't use ranlib
++ old_postinstall_cmds='chmod 644 $oldlib'
++ postlink_cmds='lt_outputfile="@OUTPUT@"~
++ lt_tool_outputfile="@TOOL_OUTPUT@"~
++ case $lt_outputfile in
++ *.exe|*.EXE) ;;
++ *)
++ lt_outputfile="$lt_outputfile.exe"
++ lt_tool_outputfile="$lt_tool_outputfile.exe"
++ ;;
++ esac~
++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
++ $RM "$lt_outputfile.manifest";
++ fi'
++ ;;
++ *)
++ # Assume MSVC wrapper
++ hardcode_libdir_flag_spec=' '
++ allow_undefined_flag=unsupported
++ # Tell ltmain to make .lib files, not .a files.
++ libext=lib
++ # Tell ltmain to make .dll files, not .so files.
++ shrext_cmds=".dll"
++ # FIXME: Setting linknames here is a bad hack.
++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
++ # The linker will automatically build a .lib file if we build a DLL.
++ old_archive_from_new_cmds='true'
++ # FIXME: Should let the user specify the lib program.
++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
++ enable_shared_with_static_runtimes=yes
++ ;;
++ esac
+ ;;
+
+ darwin* | rhapsody*)
+@@ -8955,7 +9533,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ hardcode_libdir_flag_spec='-R$libdir'
+ hardcode_direct=yes
+ hardcode_shlibpath_var=no
+@@ -8963,7 +9541,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux9*)
+ if test "$GCC" = yes; then
+- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+@@ -8979,7 +9557,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+@@ -9003,10 +9581,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+@@ -9085,26 +9663,39 @@ fi
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+- save_LDFLAGS="$LDFLAGS"
+- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+- if test x$gcc_no_link = xyes; then
++ # This should be the same for all languages, so no per-tag cache variable.
++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
++if ${lt_cv_irix_exported_symbol+:} false; then :
++ $as_echo_n "(cached) " >&6
++else
++ save_LDFLAGS="$LDFLAGS"
++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
++ if test x$gcc_no_link = xyes; then
+ as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
+ fi
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+-int foo(void) {}
++int foo (void) { return 0; }
+ _ACEOF
+ if ac_fn_c_try_link "$LINENO"; then :
+- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+-
++ lt_cv_irix_exported_symbol=yes
++else
++ lt_cv_irix_exported_symbol=no
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
+- LDFLAGS="$save_LDFLAGS"
++ LDFLAGS="$save_LDFLAGS"
++fi
++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
++$as_echo "$lt_cv_irix_exported_symbol" >&6; }
++ if test "$lt_cv_irix_exported_symbol" = yes; then
++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
++ fi
+ else
+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+@@ -9189,7 +9780,7 @@ rm -f core conftest.err conftest.$ac_objext \
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+ else
+ allow_undefined_flag=' -expect_unresolved \*'
+@@ -9208,9 +9799,9 @@ rm -f core conftest.err conftest.$ac_objext \
+ no_undefined_flag=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+@@ -9786,8 +10377,9 @@ cygwin* | mingw* | pw32* | cegcc*)
+ need_version=no
+ need_lib_prefix=no
+
+- case $GCC,$host_os in
+- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
++ case $GCC,$cc_basename in
++ yes,*)
++ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+@@ -9820,13 +10412,71 @@ cygwin* | mingw* | pw32* | cegcc*)
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
++ dynamic_linker='Win32 ld.exe'
++ ;;
++
++ *,cl*)
++ # Native MSVC
++ libname_spec='$name'
++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
++ library_names_spec='${libname}.dll.lib'
++
++ case $build_os in
++ mingw*)
++ sys_lib_search_path_spec=
++ lt_save_ifs=$IFS
++ IFS=';'
++ for lt_path in $LIB
++ do
++ IFS=$lt_save_ifs
++ # Let DOS variable expansion print the short 8.3 style file name.
++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
++ done
++ IFS=$lt_save_ifs
++ # Convert to MSYS style.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
++ ;;
++ cygwin*)
++ # Convert to unix form, then to dos form, then back to unix form
++ # but this time dos style (no spaces!) so that the unix form looks
++ # like /cygdrive/c/PROGRA~1:/cygdr...
++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ ;;
++ *)
++ sys_lib_search_path_spec="$LIB"
++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
++ # It is most probably a Windows format PATH.
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
++ else
++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
++ fi
++ # FIXME: find the short name or the path components, as spaces are
++ # common. (e.g. "Program Files" -> "PROGRA~1")
++ ;;
++ esac
++
++ # DLL is installed to $(libdir)/../bin by postinstall_cmds
++ postinstall_cmds='base_file=`basename \${file}`~
++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
++ dldir=$destdir/`dirname \$dlpath`~
++ test -d \$dldir || mkdir -p \$dldir~
++ $install_prog $dir/$dlname \$dldir/$dlname'
++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
++ dlpath=$dir/\$dldll~
++ $RM \$dlpath'
++ shlibpath_overrides_runpath=yes
++ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
++ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
++ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+- dynamic_linker='Win32 ld.exe'
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+@@ -10746,7 +11396,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10749 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10790,10 +11440,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -10852,7 +11502,7 @@ else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+-#line 10855 "configure"
++#line $LINENO "configure"
+ #include "confdefs.h"
+
+ #if HAVE_DLFCN_H
+@@ -10896,10 +11546,10 @@ else
+ /* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+-void fnord () __attribute__((visibility("default")));
++int fnord () __attribute__((visibility("default")));
+ #endif
+
+-void fnord () { int i=42; }
++int fnord () { return 42; }
+ int main ()
+ {
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+@@ -12385,13 +13035,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+ lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+ lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
+ reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+ reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+ OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+ deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+ file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
+ AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+ AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
+ STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+ RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+ old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+@@ -12406,14 +13063,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
+ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
+ objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+ MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+ lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+ lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+ need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
+ DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+ NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+ LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+@@ -12446,12 +13106,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
+ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+ inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+ link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+ always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+ export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+ exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+ include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+ prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
+ file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+ variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+ need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+@@ -12506,8 +13166,13 @@ reload_flag \
+ OBJDUMP \
+ deplibs_check_method \
+ file_magic_cmd \
++file_magic_glob \
++want_nocaseglob \
++DLLTOOL \
++sharedlib_from_linklib_cmd \
+ AR \
+ AR_FLAGS \
++archiver_list_spec \
+ STRIP \
+ RANLIB \
+ CC \
+@@ -12517,12 +13182,14 @@ lt_cv_sys_global_symbol_pipe \
+ lt_cv_sys_global_symbol_to_cdecl \
+ lt_cv_sys_global_symbol_to_c_name_address \
+ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
++nm_file_list_spec \
+ lt_prog_compiler_no_builtin_flag \
+-lt_prog_compiler_wl \
+ lt_prog_compiler_pic \
++lt_prog_compiler_wl \
+ lt_prog_compiler_static \
+ lt_cv_prog_compiler_c_o \
+ need_locks \
++MANIFEST_TOOL \
+ DSYMUTIL \
+ NMEDIT \
+ LIPO \
+@@ -12538,7 +13205,6 @@ no_undefined_flag \
+ hardcode_libdir_flag_spec \
+ hardcode_libdir_flag_spec_ld \
+ hardcode_libdir_separator \
+-fix_srcfile_path \
+ exclude_expsyms \
+ include_expsyms \
+ file_list_spec \
+@@ -12574,6 +13240,7 @@ module_cmds \
+ module_expsym_cmds \
+ export_symbols_cmds \
+ prelink_cmds \
++postlink_cmds \
+ postinstall_cmds \
+ postuninstall_cmds \
+ finish_cmds \
+@@ -13172,7 +13839,8 @@ $as_echo X"$file" |
+ # NOTE: Changes made to this file will be lost: look at ltmain.sh.
+ #
+ # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
++# Inc.
+ # Written by Gordon Matzigkeit, 1996
+ #
+ # This file is part of GNU Libtool.
+@@ -13275,19 +13943,42 @@ SP2NL=$lt_lt_SP2NL
+ # turn newlines into spaces.
+ NL2SP=$lt_lt_NL2SP
+
++# convert \$build file names to \$host format.
++to_host_file_cmd=$lt_cv_to_host_file_cmd
++
++# convert \$build files to toolchain format.
++to_tool_file_cmd=$lt_cv_to_tool_file_cmd
++
+ # An object symbol dumper.
+ OBJDUMP=$lt_OBJDUMP
+
+ # Method to check whether dependent libraries are shared objects.
+ deplibs_check_method=$lt_deplibs_check_method
+
+-# Command to use when deplibs_check_method == "file_magic".
++# Command to use when deplibs_check_method = "file_magic".
+ file_magic_cmd=$lt_file_magic_cmd
+
++# How to find potential files when deplibs_check_method = "file_magic".
++file_magic_glob=$lt_file_magic_glob
++
++# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
++want_nocaseglob=$lt_want_nocaseglob
++
++# DLL creation program.
++DLLTOOL=$lt_DLLTOOL
++
++# Command to associate shared and link libraries.
++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
++
+ # The archiver.
+ AR=$lt_AR
++
++# Flags to create an archive.
+ AR_FLAGS=$lt_AR_FLAGS
+
++# How to feed a file listing to the archiver.
++archiver_list_spec=$lt_archiver_list_spec
++
+ # A symbol stripping program.
+ STRIP=$lt_STRIP
+
+@@ -13317,6 +14008,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+ # Transform the output of nm in a C name address pair when lib prefix is needed.
+ global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
++# Specify filename containing input files for \$NM.
++nm_file_list_spec=$lt_nm_file_list_spec
++
++# The root where to search for dependent libraries,and in which our libraries should be installed.
++lt_sysroot=$lt_sysroot
++
+ # The name of the directory that contains temporary libtool files.
+ objdir=$objdir
+
+@@ -13326,6 +14023,9 @@ MAGIC_CMD=$MAGIC_CMD
+ # Must we lock files when doing compilation?
+ need_locks=$lt_need_locks
+
++# Manifest tool.
++MANIFEST_TOOL=$lt_MANIFEST_TOOL
++
+ # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+ DSYMUTIL=$lt_DSYMUTIL
+
+@@ -13440,12 +14140,12 @@ with_gcc=$GCC
+ # Compiler flag to turn off builtin functions.
+ no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+-# How to pass a linker flag through the compiler.
+-wl=$lt_lt_prog_compiler_wl
+-
+ # Additional compiler flags for building library objects.
+ pic_flag=$lt_lt_prog_compiler_pic
+
++# How to pass a linker flag through the compiler.
++wl=$lt_lt_prog_compiler_wl
++
+ # Compiler flag to prevent dynamic linking.
+ link_static_flag=$lt_lt_prog_compiler_static
+
+@@ -13532,9 +14232,6 @@ inherit_rpath=$inherit_rpath
+ # Whether libtool must link a program against all its dependency libraries.
+ link_all_deplibs=$link_all_deplibs
+
+-# Fix the shell variable \$srcfile for the compiler.
+-fix_srcfile_path=$lt_fix_srcfile_path
+-
+ # Set to "yes" if exported symbols are required.
+ always_export_symbols=$always_export_symbols
+
+@@ -13550,6 +14247,9 @@ include_expsyms=$lt_include_expsyms
+ # Commands necessary for linking programs (against libraries) with templates.
+ prelink_cmds=$lt_prelink_cmds
+
++# Commands necessary for finishing linking programs.
++postlink_cmds=$lt_postlink_cmds
++
+ # Specify filename containing input files.
+ file_list_spec=$lt_file_list_spec
+
+@@ -13582,210 +14282,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- case $xsi_shell in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_dirname_and_basename file append nondir_replacement
+-# perform func_basename and func_dirname in a single function
+-# call:
+-# dirname: Compute the dirname of FILE. If nonempty,
+-# add APPEND to the result, otherwise set result
+-# to NONDIR_REPLACEMENT.
+-# value returned in "$func_dirname_result"
+-# basename: Compute filename of FILE.
+-# value retuned in "$func_basename_result"
+-# Implementation must be kept synchronized with func_dirname
+-# and func_basename. For efficiency, we do not delegate to
+-# those functions but instead duplicate the functionality here.
+-func_dirname_and_basename ()
+-{
+- case ${1} in
+- */*) func_dirname_result="${1%/*}${2}" ;;
+- * ) func_dirname_result="${3}" ;;
+- esac
+- func_basename_result="${1##*/}"
+-}
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-func_stripname ()
+-{
+- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+- # positional parameters, so assign one to ordinary parameter first.
+- func_stripname_result=${3}
+- func_stripname_result=${func_stripname_result#"${1}"}
+- func_stripname_result=${func_stripname_result%"${2}"}
+-}
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=${1%%=*}
+- func_opt_split_arg=${1#*=}
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- case ${1} in
+- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+- *) func_lo2o_result=${1} ;;
+- esac
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=${1%.*}.lo
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=$(( $* ))
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=${#1}
+-}
+-
+-_LT_EOF
+- ;;
+- *) # Bourne compatible functions.
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_dirname file append nondir_replacement
+-# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+-# otherwise set result to NONDIR_REPLACEMENT.
+-func_dirname ()
+-{
+- # Extract subdirectory from the argument.
+- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+- if test "X$func_dirname_result" = "X${1}"; then
+- func_dirname_result="${3}"
+- else
+- func_dirname_result="$func_dirname_result${2}"
+- fi
+-}
+-
+-# func_basename file
+-func_basename ()
+-{
+- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+-}
+-
+-
+-# func_stripname prefix suffix name
+-# strip PREFIX and SUFFIX off of NAME.
+-# PREFIX and SUFFIX must not contain globbing or regex special
+-# characters, hashes, percent signs, but SUFFIX may contain a leading
+-# dot (in which case that matches only a dot).
+-# func_strip_suffix prefix name
+-func_stripname ()
+-{
+- case ${2} in
+- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+- esac
+-}
+-
+-# sed scripts:
+-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+-my_sed_long_arg='1s/^-[^=]*=//'
+-
+-# func_opt_split
+-func_opt_split ()
+-{
+- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+-}
+-
+-# func_lo2o object
+-func_lo2o ()
+-{
+- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+-}
+-
+-# func_xform libobj-or-source
+-func_xform ()
+-{
+- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+-}
+-
+-# func_arith arithmetic-term...
+-func_arith ()
+-{
+- func_arith_result=`expr "$@"`
+-}
+-
+-# func_len string
+-# STRING may not start with a hyphen.
+-func_len ()
+-{
+- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+-}
+-
+-_LT_EOF
+-esac
+-
+-case $lt_shell_append in
+- yes)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1+=\$2"
+-}
+-_LT_EOF
+- ;;
+- *)
+- cat << \_LT_EOF >> "$cfgfile"
+-
+-# func_append var value
+-# Append VALUE to the end of shell variable VAR.
+-func_append ()
+-{
+- eval "$1=\$$1\$2"
+-}
+-
+-_LT_EOF
+- ;;
+- esac
+-
+-
+- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+- || (rm -f "$cfgfile"; exit 1)
+-
+- mv -f "$cfgfile" "$ofile" ||
++ sed '$q' "$ltmain" >> "$cfgfile" \
++ || (rm -f "$cfgfile"; exit 1)
++
++ if test x"$xsi_shell" = xyes; then
++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
++func_dirname ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_basename ()$/,/^} # func_basename /c\
++func_basename ()\
++{\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
++func_dirname_and_basename ()\
++{\
++\ case ${1} in\
++\ */*) func_dirname_result="${1%/*}${2}" ;;\
++\ * ) func_dirname_result="${3}" ;;\
++\ esac\
++\ func_basename_result="${1##*/}"\
++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
++func_stripname ()\
++{\
++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
++\ # positional parameters, so assign one to ordinary parameter first.\
++\ func_stripname_result=${3}\
++\ func_stripname_result=${func_stripname_result#"${1}"}\
++\ func_stripname_result=${func_stripname_result%"${2}"}\
++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
++func_split_long_opt ()\
++{\
++\ func_split_long_opt_name=${1%%=*}\
++\ func_split_long_opt_arg=${1#*=}\
++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
++func_split_short_opt ()\
++{\
++\ func_split_short_opt_arg=${1#??}\
++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
++func_lo2o ()\
++{\
++\ case ${1} in\
++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
++\ *) func_lo2o_result=${1} ;;\
++\ esac\
++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_xform ()$/,/^} # func_xform /c\
++func_xform ()\
++{\
++ func_xform_result=${1%.*}.lo\
++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_arith ()$/,/^} # func_arith /c\
++func_arith ()\
++{\
++ func_arith_result=$(( $* ))\
++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_len ()$/,/^} # func_len /c\
++func_len ()\
++{\
++ func_len_result=${#1}\
++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++fi
++
++if test x"$lt_shell_append" = xyes; then
++ sed -e '/^func_append ()$/,/^} # func_append /c\
++func_append ()\
++{\
++ eval "${1}+=\\${2}"\
++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
++func_append_quoted ()\
++{\
++\ func_quote_for_eval "${2}"\
++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++test 0 -eq $? || _lt_function_replace_fail=:
++
++
++ # Save a `func_append' function call where possible by direct use of '+='
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++else
++ # Save a `func_append' function call even when '+=' is not available
++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
++ && mv -f "$cfgfile.tmp" "$cfgfile" \
++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
++ test 0 -eq $? || _lt_function_replace_fail=:
++fi
++
++if test x"$_lt_function_replace_fail" = x":"; then
++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
++fi
++
++
++ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+
diff --git a/meta/recipes-devtools/binutils/binutils/0008-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch b/meta/recipes-devtools/binutils/binutils/0008-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch
deleted file mode 100644
index 648bdc13d2..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0008-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 00ae1ee97ad3ad0624798b28c6bab94a19b3ef39 Mon Sep 17 00:00:00 2001
-From: Zhenhua Luo <zhenhua.luo@nxp.com>
-Date: Sat, 11 Jun 2016 22:08:29 -0500
-Subject: [PATCH] fix the incorrect assembling for ppc wait mnemonic
-
-The wait mnemonic for ppc targets is incorrectly assembled into 0x7c00003c due
-to duplicated address definition with waitasec instruction. The issue causes
-kernel boot calltrace for ppc targets when wait instruction is executed.
-
-Upstream-Status: Pending
-Signed-off-by: Zhenhua Luo <zhenhua.luo@nxp.com>
----
- opcodes/ppc-opc.c | 4 +---
- 1 file changed, 1 insertion(+), 3 deletions(-)
-
-diff --git a/opcodes/ppc-opc.c b/opcodes/ppc-opc.c
-index a424dd924de..406d5b60917 100644
---- a/opcodes/ppc-opc.c
-+++ b/opcodes/ppc-opc.c
-@@ -6378,8 +6378,6 @@ const struct powerpc_opcode powerpc_opcodes[] = {
- {"waitasec", X(31,30), XRTRARB_MASK, POWER8, POWER9, {0}},
- {"waitrsv", XWCPL(31,30,1,0),0xffffffff, POWER10, EXT, {0}},
- {"pause_short", XWCPL(31,30,2,0),0xffffffff, POWER10, EXT, {0}},
--{"wait", X(31,30), XWCPL_MASK, POWER10, 0, {WC, PL}},
--{"wait", X(31,30), XWC_MASK, POWER9, POWER10, {WC}},
-
- {"lwepx", X(31,31), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}},
-
-@@ -6433,7 +6431,7 @@ const struct powerpc_opcode powerpc_opcodes[] = {
-
- {"waitrsv", X(31,62)|(1<<21), 0xffffffff, E500MC|PPCA2, EXT, {0}},
- {"waitimpl", X(31,62)|(2<<21), 0xffffffff, E500MC|PPCA2, EXT, {0}},
--{"wait", X(31,62), XWC_MASK, E500MC|PPCA2, 0, {WC}},
-+{"wait", X(31,62), XWC_MASK, E500MC|PPCA2|POWER9|POWER10, 0, {WC}},
-
- {"dcbstep", XRT(31,63,0), XRT_MASK, E500MC|PPCA2, 0, {RA0, RB}},
-
diff --git a/meta/recipes-devtools/binutils/binutils/0009-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch b/meta/recipes-devtools/binutils/binutils/0009-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch
new file mode 100644
index 0000000000..50201fb6b6
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0009-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch
@@ -0,0 +1,49 @@
+From c9e810840d95554046b7132e3e2c98bf99cb2d89 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 2 Mar 2015 01:42:38 +0000
+Subject: [PATCH] Fix rpath in libtool when sysroot is enabled
+
+Enabling sysroot support in libtool exposed a bug where the final
+library had an RPATH encoded into it which still pointed to the
+sysroot. This works around the issue until it gets sorted out
+upstream.
+
+Fix suggested by Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Scott Garman <scott.a.garman@intel.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ltmain.sh | 10 ++++++++--
+ 1 file changed, 8 insertions(+), 2 deletions(-)
+
+diff --git a/ltmain.sh b/ltmain.sh
+index 7f7104ffc82..f59eb4aa631 100644
+--- a/ltmain.sh
++++ b/ltmain.sh
+@@ -8035,9 +8035,11 @@ EOF
+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+ for libdir in $rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
++ func_replace_sysroot "$libdir"
++ libdir=$func_replace_sysroot_result
++ func_stripname '=' '' "$libdir"
++ libdir=$func_stripname_result
+ if test -n "$hardcode_libdir_separator"; then
+- func_replace_sysroot "$libdir"
+- libdir=$func_replace_sysroot_result
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
+ else
+@@ -8770,6 +8772,10 @@ EOF
+ hardcode_libdirs=
+ for libdir in $compile_rpath $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
++ func_replace_sysroot "$libdir"
++ libdir=$func_replace_sysroot_result
++ func_stripname '=' '' "$libdir"
++ libdir=$func_stripname_result
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
diff --git a/meta/recipes-devtools/binutils/binutils/0009-Use-libtool-2.4.patch b/meta/recipes-devtools/binutils/binutils/0009-Use-libtool-2.4.patch
deleted file mode 100644
index 9f0209e274..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0009-Use-libtool-2.4.patch
+++ /dev/null
@@ -1,25302 +0,0 @@
-From 9a0dea4d2f1f0f2c71f519e6195ef9cfacd9fda9 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 14 Feb 2016 17:04:07 +0000
-Subject: [PATCH] Use libtool 2.4
-
-get libtool sysroot support
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- bfd/configure | 1333 +++++++++++++-----
- bfd/configure.ac | 2 +-
- binutils/configure | 1331 +++++++++++++-----
- gas/configure | 1331 +++++++++++++-----
- gprof/configure | 1331 +++++++++++++-----
- ld/configure | 1704 +++++++++++++++++------
- libbacktrace/configure | 1534 +++++++++++++++------
- libctf/configure | 1330 +++++++++++++-----
- libtool.m4 | 1093 ++++++++++-----
- ltmain.sh | 2925 +++++++++++++++++++++++++++-------------
- ltoptions.m4 | 2 +-
- ltversion.m4 | 12 +-
- lt~obsolete.m4 | 2 +-
- opcodes/configure | 1331 +++++++++++++-----
- zlib/configure | 1331 +++++++++++++-----
- 15 files changed, 12067 insertions(+), 4525 deletions(-)
-
-diff --git a/bfd/configure b/bfd/configure
-index b23c9eebfd7..fb25d046cd2 100755
---- a/bfd/configure
-+++ b/bfd/configure
-@@ -707,6 +707,9 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
-+ac_ct_AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -825,6 +828,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_plugins
- enable_largefile
-@@ -1509,6 +1513,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-mmap try using mmap for BFD input files if available
- --with-separate-debug-dir=DIR
- Look for global separate debug info in DIR
-@@ -5029,8 +5035,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -5070,7 +5076,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5757,8 +5763,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5807,6 +5813,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5823,6 +5903,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5991,7 +6076,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -6145,6 +6231,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6160,6 +6261,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -6174,8 +6426,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6191,7 +6445,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6211,11 +6465,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6231,7 +6489,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6250,6 +6508,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6261,25 +6523,20 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+
-
-
-
-@@ -6290,6 +6547,63 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-@@ -6630,8 +6944,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -6667,6 +6981,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -6708,6 +7023,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6719,7 +7046,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6745,8 +7072,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6756,8 +7083,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6794,6 +7121,14 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-
-
-
-@@ -6812,6 +7147,47 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-+
-+
-
-
-
-@@ -7021,6 +7397,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7584,6 +8077,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -8135,8 +8630,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8302,6 +8795,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8364,7 +8863,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8421,13 +8920,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8488,6 +8991,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8838,7 +9346,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8937,12 +9446,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8956,8 +9465,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8975,8 +9484,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9022,8 +9531,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9153,7 +9662,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9166,22 +9681,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9193,7 +9715,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9206,22 +9734,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9266,20 +9801,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9340,7 +9918,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9348,7 +9926,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9364,7 +9942,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9388,10 +9966,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9470,23 +10048,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9571,7 +10162,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9590,9 +10181,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10168,8 +10759,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10202,13 +10794,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -11086,7 +11736,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11089 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11130,10 +11780,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11192,7 +11842,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11195 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11236,10 +11886,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -13224,7 +13874,7 @@ SHARED_LDFLAGS=
- if test "$enable_shared" = "yes"; then
- x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'`
- if test -n "$x"; then
-- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty"
-+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a"
- fi
- fi
-
-@@ -15879,13 +16529,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -15900,14 +16557,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -15940,12 +16600,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -16000,8 +16660,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -16011,12 +16676,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -16032,7 +16699,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -16068,6 +16734,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -16837,7 +17504,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -16940,19 +17608,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -16982,6 +17673,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -16991,6 +17688,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -17105,12 +17805,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -17197,9 +17897,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -17215,6 +17912,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -17247,210 +17947,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/bfd/configure.ac b/bfd/configure.ac
-index a9078965c40..22b5b7ea567 100644
---- a/bfd/configure.ac
-+++ b/bfd/configure.ac
-@@ -303,7 +303,7 @@ changequote(,)dnl
- x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'`
- changequote([,])dnl
- if test -n "$x"; then
-- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty"
-+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a"
- fi
- fi
-
-diff --git a/binutils/configure b/binutils/configure
-index 8cde216cb1f..15f3f4eb874 100755
---- a/binutils/configure
-+++ b/binutils/configure
-@@ -696,8 +696,11 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
- RANLIB
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -814,6 +817,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_plugins
- enable_largefile
-@@ -1509,6 +1513,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-debuginfod Enable debuginfo lookups with debuginfod
- (auto/yes/no)
- --with-system-zlib use installed libz
-@@ -4883,8 +4889,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -4924,7 +4930,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5611,8 +5617,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5661,6 +5667,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5677,6 +5757,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5845,7 +5930,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -5999,6 +6085,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6014,6 +6115,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -6028,8 +6280,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6045,7 +6299,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6065,11 +6319,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6085,7 +6343,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6104,6 +6362,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6115,29 +6377,81 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-
-
-
-@@ -6484,8 +6798,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -6521,6 +6835,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -6562,6 +6877,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6573,7 +6900,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6599,8 +6926,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6610,8 +6937,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6648,6 +6975,19 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-+
-+
-
-
-
-@@ -6664,6 +7004,42 @@ fi
-
-
-
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -6875,6 +7251,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7438,6 +7931,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -8020,8 +8515,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8187,6 +8680,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8249,7 +8748,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8306,13 +8805,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8373,6 +8876,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8723,7 +9231,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8822,12 +9331,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8841,8 +9350,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8860,8 +9369,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8907,8 +9416,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9038,7 +9547,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9051,22 +9566,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9078,7 +9600,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9091,22 +9619,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9151,20 +9686,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9225,7 +9803,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9233,7 +9811,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9249,7 +9827,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9273,10 +9851,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9355,23 +9933,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9456,7 +10047,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9475,9 +10066,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10053,8 +10644,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10087,13 +10679,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -10971,7 +11621,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10974 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11015,10 +11665,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11077,7 +11727,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11080 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11121,10 +11771,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -15505,13 +16155,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -15526,14 +16183,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -15566,12 +16226,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -15626,8 +16286,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -15637,12 +16302,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -15658,7 +16325,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -15694,6 +16360,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -16459,7 +17126,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -16562,19 +17230,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -16604,6 +17295,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -16613,6 +17310,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -16727,12 +17427,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -16819,9 +17519,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -16837,6 +17534,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -16869,210 +17569,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/gas/configure b/gas/configure
-index dc6a6682aa4..10364bd81da 100755
---- a/gas/configure
-+++ b/gas/configure
-@@ -681,8 +681,11 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
- RANLIB
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -799,6 +802,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_plugins
- enable_largefile
-@@ -1490,6 +1494,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-cpu=CPU default cpu variant is CPU (currently only supported
- on ARC)
- --with-system-zlib use installed libz
-@@ -4608,8 +4614,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -4649,7 +4655,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5336,8 +5342,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5386,6 +5392,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5402,6 +5482,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5570,7 +5655,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -5724,6 +5810,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -5739,6 +5840,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -5753,8 +6005,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -5770,7 +6024,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5790,11 +6044,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -5810,7 +6068,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5829,6 +6087,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -5840,29 +6102,81 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-
-
-
-@@ -6209,8 +6523,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -6246,6 +6560,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -6287,6 +6602,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6298,7 +6625,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6324,8 +6651,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6335,8 +6662,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6373,6 +6700,19 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-+
-+
-
-
-
-@@ -6389,6 +6729,42 @@ fi
-
-
-
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -6600,6 +6976,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7163,6 +7656,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -7745,8 +8240,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -7912,6 +8405,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -7974,7 +8473,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8031,13 +8530,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8098,6 +8601,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8448,7 +8956,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8547,12 +9056,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8566,8 +9075,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8585,8 +9094,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8632,8 +9141,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8763,7 +9272,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -8776,22 +9291,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -8803,7 +9325,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -8816,22 +9344,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -8876,20 +9411,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -8950,7 +9528,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -8958,7 +9536,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -8974,7 +9552,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -8998,10 +9576,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9080,23 +9658,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9181,7 +9772,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9200,9 +9791,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -9778,8 +10369,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -9812,13 +10404,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -10696,7 +11346,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10699 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10740,10 +11390,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -10802,7 +11452,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10805 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10846,10 +11496,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -14832,13 +15482,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -14853,14 +15510,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -14893,12 +15553,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -14953,8 +15613,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -14964,12 +15629,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -14985,7 +15652,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -15021,6 +15687,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -15793,7 +16460,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -15896,19 +16564,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -15938,6 +16629,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -15947,6 +16644,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -16061,12 +16761,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -16153,9 +16853,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -16171,6 +16868,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -16203,210 +16903,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/gprof/configure b/gprof/configure
-index a7f788f0411..e7703613024 100755
---- a/gprof/configure
-+++ b/gprof/configure
-@@ -663,8 +663,11 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
- RANLIB
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -781,6 +784,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_plugins
- enable_largefile
-@@ -1443,6 +1447,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
-
- Some influential environment variables:
- CC C compiler command
-@@ -4510,8 +4516,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -4551,7 +4557,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5238,8 +5244,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5288,6 +5294,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5304,6 +5384,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5472,7 +5557,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -5626,6 +5712,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -5641,6 +5742,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -5655,8 +5907,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -5672,7 +5926,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5692,11 +5946,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -5712,7 +5970,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5731,6 +5989,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -5742,25 +6004,19 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-
-
-
-@@ -5772,6 +6028,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-+
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
- set dummy ${ac_tool_prefix}strip; ac_word=$2
-@@ -6111,8 +6425,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -6148,6 +6462,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -6189,6 +6504,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6200,7 +6527,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6226,8 +6553,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6237,8 +6564,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6275,6 +6602,18 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-+
-
-
-
-@@ -6291,6 +6630,43 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -6502,6 +6878,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7065,6 +7558,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -7647,8 +8142,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -7814,6 +8307,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -7876,7 +8375,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -7933,13 +8432,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8000,6 +8503,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8350,7 +8858,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8449,12 +8958,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8468,8 +8977,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8487,8 +8996,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8534,8 +9043,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8665,7 +9174,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -8678,22 +9193,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -8705,7 +9227,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -8718,22 +9246,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -8778,20 +9313,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -8852,7 +9430,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -8860,7 +9438,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -8876,7 +9454,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -8900,10 +9478,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -8982,23 +9560,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9083,7 +9674,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9102,9 +9693,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -9680,8 +10271,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -9714,13 +10306,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -10598,7 +11248,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10601 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10642,10 +11292,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -10704,7 +11354,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10707 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10748,10 +11398,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -12771,13 +13421,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -12792,14 +13449,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -12832,12 +13492,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -12892,8 +13552,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -12903,12 +13568,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -12924,7 +13591,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -12960,6 +13626,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -13725,7 +14392,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -13828,19 +14496,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -13870,6 +14561,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -13879,6 +14576,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -13993,12 +14693,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -14085,9 +14785,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -14103,6 +14800,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -14135,210 +14835,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/ld/configure b/ld/configure
-index 1f9ec8ec580..4a35108ce7c 100755
---- a/ld/configure
-+++ b/ld/configure
-@@ -695,8 +695,11 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
- RANLIB
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -823,6 +826,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_plugins
- enable_largefile
-@@ -1530,6 +1534,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-lib-path=dir1:dir2... set default LIB_PATH
- --with-sysroot=DIR Search for usr/lib et al within DIR.
- --with-system-zlib use installed libz
-@@ -5368,8 +5374,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -5409,7 +5415,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -6096,8 +6102,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -6146,6 +6152,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -6162,6 +6242,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -6330,7 +6415,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -6484,6 +6570,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6499,6 +6600,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -6513,8 +6765,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6530,7 +6784,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6550,11 +6804,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6570,7 +6828,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6589,6 +6847,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6600,25 +6862,19 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-
-
-
-@@ -6630,6 +6886,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-+
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
- set dummy ${ac_tool_prefix}strip; ac_word=$2
-@@ -6969,8 +7283,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -7006,6 +7320,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -7047,6 +7362,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -7058,7 +7385,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -7084,8 +7411,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -7095,8 +7422,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -7133,6 +7460,17 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-
-
-
-@@ -7149,6 +7487,44 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -7360,6 +7736,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7923,6 +8416,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -7991,6 +8486,16 @@ done
-
-
-
-+func_stripname_cnf ()
-+{
-+ case ${2} in
-+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-+ esac
-+} # func_stripname_cnf
-+
-+
-+
-
-
- # Set options
-@@ -8506,8 +9011,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8673,6 +9176,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8735,7 +9244,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8792,13 +9301,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8859,6 +9372,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -9209,7 +9727,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -9308,12 +9827,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -9327,8 +9846,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -9346,8 +9865,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9393,8 +9912,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9524,7 +10043,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9537,22 +10062,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9564,7 +10096,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9577,22 +10115,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9636,21 +10181,64 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # When not using gcc, we currently assume that we are using
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
-- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ # no search path for DLLs.
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9711,7 +10299,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9719,7 +10307,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9735,7 +10323,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9759,10 +10347,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9841,23 +10429,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9942,7 +10543,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9961,9 +10562,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10539,8 +11140,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10573,13 +11175,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -11457,7 +12117,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11457 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11501,10 +12161,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11563,7 +12223,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11563 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11607,10 +12267,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -12002,6 +12662,7 @@ $RM -r conftest*
-
- # Allow CC to be a program name with arguments.
- lt_save_CC=$CC
-+ lt_save_CFLAGS=$CFLAGS
- lt_save_LD=$LD
- lt_save_GCC=$GCC
- GCC=$GXX
-@@ -12019,6 +12680,7 @@ $RM -r conftest*
- fi
- test -z "${LDCXX+set}" || LD=$LDCXX
- CC=${CXX-"c++"}
-+ CFLAGS=$CXXFLAGS
- compiler=$CC
- compiler_CXX=$CC
- for cc_temp in $compiler""; do
-@@ -12301,7 +12963,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
- allow_undefined_flag_CXX='-berok'
- # Determine the default libpath from the value encoded in an empty
- # executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath__CXX+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -12314,22 +12982,29 @@ main ()
- _ACEOF
- if ac_fn_cxx_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath__CXX"; then
-+ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath__CXX"; then
-+ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath__CXX
-+fi
-
- hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
-
-@@ -12342,7 +13017,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath__CXX+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -12355,22 +13036,29 @@ main ()
- _ACEOF
- if ac_fn_cxx_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath__CXX"; then
-+ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath__CXX"; then
-+ lt_cv_aix_libpath__CXX="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath__CXX
-+fi
-
- hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -12413,29 +13101,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
-- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
-- # as there is no search path for DLLs.
-- hardcode_libdir_flag_spec_CXX='-L$libdir'
-- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
-- allow_undefined_flag_CXX=unsupported
-- always_export_symbols_CXX=no
-- enable_shared_with_static_runtimes_CXX=yes
--
-- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-- # If the export-symbols file already is a .def file (1st line
-- # is EXPORTS), use it as is; otherwise, prepend...
-- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-- cp $export_symbols $output_objdir/$soname.def;
-- else
-- echo EXPORTS > $output_objdir/$soname.def;
-- cat $export_symbols >> $output_objdir/$soname.def;
-- fi~
-- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-- else
-- ld_shlibs_CXX=no
-- fi
-- ;;
-+ case $GXX,$cc_basename in
-+ ,cl* | no,cl*)
-+ # Native MSVC
-+ # hardcode_libdir_flag_spec is actually meaningless, as there is
-+ # no search path for DLLs.
-+ hardcode_libdir_flag_spec_CXX=' '
-+ allow_undefined_flag_CXX=unsupported
-+ always_export_symbols_CXX=yes
-+ file_list_spec_CXX='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true'
-+ enable_shared_with_static_runtimes_CXX=yes
-+ # Don't use ranlib
-+ old_postinstall_cmds_CXX='chmod 644 $oldlib'
-+ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ func_to_tool_file "$lt_outputfile"~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # g++
-+ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless,
-+ # as there is no search path for DLLs.
-+ hardcode_libdir_flag_spec_CXX='-L$libdir'
-+ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols'
-+ allow_undefined_flag_CXX=unsupported
-+ always_export_symbols_CXX=no
-+ enable_shared_with_static_runtimes_CXX=yes
-+
-+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-+ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-+ # If the export-symbols file already is a .def file (1st line
-+ # is EXPORTS), use it as is; otherwise, prepend...
-+ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ cp $export_symbols $output_objdir/$soname.def;
-+ else
-+ echo EXPORTS > $output_objdir/$soname.def;
-+ cat $export_symbols >> $output_objdir/$soname.def;
-+ fi~
-+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-+ else
-+ ld_shlibs_CXX=no
-+ fi
-+ ;;
-+ esac
-+ ;;
- darwin* | rhapsody*)
-
-
-@@ -12541,7 +13275,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- ;;
- *)
- if test "$GXX" = yes; then
-- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- # FIXME: insert proper C++ library support
- ld_shlibs_CXX=no
-@@ -12612,10 +13346,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
-- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
-+ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
-- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
-+ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- fi
-@@ -12656,9 +13390,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- *)
- if test "$GXX" = yes; then
- if test "$with_gnu_ld" = no; then
-- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
-- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
-+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
- fi
- fi
- link_all_deplibs_CXX=yes
-@@ -12728,20 +13462,20 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- prelink_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
-- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"'
-+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
- old_archive_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
-- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~
-+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
- $RANLIB $oldlib'
- archive_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
-- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
-+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- archive_expsym_cmds_CXX='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
-- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
-+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- *) # Version 6 and above use weak symbols
- archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
-@@ -12936,7 +13670,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- *)
-- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- esac
-
-@@ -12982,7 +13716,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- archive_cmds_need_lc_CXX=yes
- no_undefined_flag_CXX=' -zdefs'
-@@ -13023,9 +13757,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- no_undefined_flag_CXX=' ${wl}-z ${wl}defs'
- if $CC --version | $GREP -v '^2\.7' > /dev/null; then
-- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
-+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
-@@ -13160,6 +13894,13 @@ private:
- };
- _LT_EOF
-
-+
-+_lt_libdeps_save_CFLAGS=$CFLAGS
-+case "$CC $CFLAGS " in #(
-+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
-+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
-+esac
-+
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- (eval $ac_compile) 2>&5
- ac_status=$?
-@@ -13173,7 +13914,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- pre_test_object_deps_done=no
-
- for p in `eval "$output_verbose_link_cmd"`; do
-- case $p in
-+ case ${prev}${p} in
-
- -L* | -R* | -l*)
- # Some compilers place space between "-{L,R}" and the path.
-@@ -13182,13 +13923,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- test $p = "-R"; then
- prev=$p
- continue
-- else
-- prev=
- fi
-
-+ # Expand the sysroot to ease extracting the directories later.
-+ if test -z "$prev"; then
-+ case $p in
-+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
-+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
-+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
-+ esac
-+ fi
-+ case $p in
-+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
-+ esac
- if test "$pre_test_object_deps_done" = no; then
-- case $p in
-- -L* | -R*)
-+ case ${prev} in
-+ -L | -R)
- # Internal compiler library paths should come after those
- # provided the user. The postdeps already come after the
- # user supplied libs so there is no need to process them.
-@@ -13208,8 +13958,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
- postdeps_CXX="${postdeps_CXX} ${prev}${p}"
- fi
- fi
-+ prev=
- ;;
-
-+ *.lto.$objext) ;; # Ignore GCC LTO objects
- *.$objext)
- # This assumes that the test object file only shows up
- # once in the compiler output.
-@@ -13245,6 +13997,7 @@ else
- fi
-
- $RM -f confest.$objext
-+CFLAGS=$_lt_libdeps_save_CFLAGS
-
- # PORTME: override above test on systems where it is broken
- case $host_os in
-@@ -13280,7 +14033,7 @@ linux*)
-
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
-@@ -13345,8 +14098,6 @@ fi
- lt_prog_compiler_pic_CXX=
- lt_prog_compiler_static_CXX=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- # C++ specific cases for pic, static, wl, etc.
- if test "$GXX" = yes; then
-@@ -13451,6 +14202,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- ;;
- esac
- ;;
-+ mingw* | cygwin* | os2* | pw32* | cegcc*)
-+ # This hack is so that the source file can tell whether it is being
-+ # built for inclusion in a dll (and should export symbols for example).
-+ lt_prog_compiler_pic_CXX='-DDLL_EXPORT'
-+ ;;
- dgux*)
- case $cc_basename in
- ec++*)
-@@ -13603,7 +14359,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- ;;
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- lt_prog_compiler_pic_CXX='-KPIC'
- lt_prog_compiler_static_CXX='-Bstatic'
-@@ -13668,10 +14424,17 @@ case $host_os in
- lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5
--$as_echo "$lt_prog_compiler_pic_CXX" >&6; }
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic_CXX+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5
-+$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; }
-+lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -13729,6 +14492,8 @@ fi
-
-
-
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -13906,6 +14671,7 @@ fi
- $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
- case $host_os in
- aix[4-9]*)
- # If we're using GNU nm, then we don't want the "-C" option.
-@@ -13920,15 +14686,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
- ;;
- pw32*)
- export_symbols_cmds_CXX="$ltdll_cmds"
-- ;;
-+ ;;
- cygwin* | mingw* | cegcc*)
-- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-- ;;
-+ case $cc_basename in
-+ cl*) ;;
-+ *)
-+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-+ ;;
-+ esac
-+ ;;
- *)
- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-- ;;
-+ ;;
- esac
-- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5
- $as_echo "$ld_shlibs_CXX" >&6; }
-@@ -14191,8 +14962,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -14224,13 +14996,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -14770,6 +15600,7 @@ fi
- fi # test -n "$compiler"
-
- CC=$lt_save_CC
-+ CFLAGS=$lt_save_CFLAGS
- LDCXX=$LD
- LD=$lt_save_LD
- GCC=$lt_save_GCC
-@@ -17830,13 +18661,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -17851,14 +18689,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -17891,12 +18732,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -17935,8 +18776,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote
- compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`'
- GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`'
- archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`'
-@@ -17963,12 +18804,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_
- hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`'
- inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`'
- always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
- include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`'
- prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`'
- file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`'
- hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`'
- compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`'
-@@ -18006,8 +18847,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -18017,12 +18863,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -18038,7 +18886,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -18060,8 +18907,8 @@ LD_CXX \
- reload_flag_CXX \
- compiler_CXX \
- lt_prog_compiler_no_builtin_flag_CXX \
--lt_prog_compiler_wl_CXX \
- lt_prog_compiler_pic_CXX \
-+lt_prog_compiler_wl_CXX \
- lt_prog_compiler_static_CXX \
- lt_cv_prog_compiler_c_o_CXX \
- export_dynamic_flag_spec_CXX \
-@@ -18073,7 +18920,6 @@ no_undefined_flag_CXX \
- hardcode_libdir_flag_spec_CXX \
- hardcode_libdir_flag_spec_ld_CXX \
- hardcode_libdir_separator_CXX \
--fix_srcfile_path_CXX \
- exclude_expsyms_CXX \
- include_expsyms_CXX \
- file_list_spec_CXX \
-@@ -18107,6 +18953,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -18121,7 +18968,8 @@ archive_expsym_cmds_CXX \
- module_cmds_CXX \
- module_expsym_cmds_CXX \
- export_symbols_cmds_CXX \
--prelink_cmds_CXX; do
-+prelink_cmds_CXX \
-+postlink_cmds_CXX; do
- case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
- *[\\\\\\\`\\"\\\$]*)
- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
-@@ -18886,7 +19734,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -18989,19 +19838,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -19031,6 +19903,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -19040,6 +19918,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -19154,12 +20035,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -19246,9 +20127,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -19264,6 +20142,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -19310,210 +20191,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-@@ -19541,12 +20381,12 @@ with_gcc=$GCC_CXX
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl_CXX
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic_CXX
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl_CXX
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static_CXX
-
-@@ -19633,9 +20473,6 @@ inherit_rpath=$inherit_rpath_CXX
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs_CXX
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path_CXX
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols_CXX
-
-@@ -19651,6 +20488,9 @@ include_expsyms=$lt_include_expsyms_CXX
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds_CXX
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds_CXX
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec_CXX
-
-diff --git a/libbacktrace/configure b/libbacktrace/configure
-index a2f33c0f35d..90667680701 100755
---- a/libbacktrace/configure
-+++ b/libbacktrace/configure
-@@ -680,7 +680,10 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -798,6 +801,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_largefile
- enable_cet
-@@ -1458,6 +1462,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-system-libunwind use installed libunwind
-
- Some influential environment variables:
-@@ -5446,8 +5452,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -5487,7 +5493,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5818,48 +5824,49 @@ if ${lt_cv_path_NM+:} false; then :
- $as_echo_n "(cached) " >&6
- else
- if test -n "$NM"; then
-- # Let the user override the test.
-- lt_cv_path_NM="$NM"
--else
-- lt_nm_to_check="${ac_tool_prefix}nm"
-- if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
-- lt_nm_to_check="$lt_nm_to_check nm"
-- fi
-- for lt_tmp_nm in $lt_nm_to_check; do
-- lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-- for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
-- IFS="$lt_save_ifs"
-- test -z "$ac_dir" && ac_dir=.
-- tmp_nm="$ac_dir/$lt_tmp_nm"
-- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
-- # Check to see if the nm accepts a BSD-compat flag.
-- # Adding the `sed 1q' prevents false positives on HP-UX, which says:
-- # nm: unknown option "B" ignored
-- # Tru64's nm complains that /dev/null is an invalid object file
-- case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
-- */dev/null* | *'Invalid file or object type'*)
-- lt_cv_path_NM="$tmp_nm -B"
-- break
-- ;;
-- *)
-- case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
-- */dev/null*)
-- lt_cv_path_NM="$tmp_nm -p"
-- break
-- ;;
-- *)
-- lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
-- continue # so that we can try to find one that supports BSD flags
-- ;;
-- esac
-- ;;
-- esac
-- fi
-- done
-- IFS="$lt_save_ifs"
-- done
-- : ${lt_cv_path_NM=no}
--fi
-+ # Let the user override the nm to test.
-+ lt_nm_to_check="$NM"
-+ else
-+ lt_nm_to_check="${ac_tool_prefix}nm"
-+ if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
-+ lt_nm_to_check="$lt_nm_to_check nm"
-+ fi
-+ fi
-+ for lt_tmp_nm in $lt_nm_to_check; do
-+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-+ for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
-+ IFS="$lt_save_ifs"
-+ test -z "$ac_dir" && ac_dir=.
-+ case "$lt_tmp_nm" in
-+ */*|*\\*) tmp_nm="$lt_tmp_nm";;
-+ *) tmp_nm="$ac_dir/$lt_tmp_nm";;
-+ esac
-+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
-+ # Check to see if the nm accepts a BSD-compat flag.
-+ # Adding the `sed 1q' prevents false positives on HP-UX, which says:
-+ # nm: unknown option "B" ignored
-+ case `"$tmp_nm" -B "$tmp_nm" 2>&1 | grep -v '^ *$' | sed '1q'` in
-+ *$tmp_nm*) lt_cv_path_NM="$tmp_nm -B"
-+ break
-+ ;;
-+ *)
-+ case `"$tmp_nm" -p "$tmp_nm" 2>&1 | grep -v '^ *$' | sed '1q'` in
-+ *$tmp_nm*)
-+ lt_cv_path_NM="$tmp_nm -p"
-+ break
-+ ;;
-+ *)
-+ lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
-+ continue # so that we can try to find one that supports BSD flags
-+ ;;
-+ esac
-+ ;;
-+ esac
-+ fi
-+ done
-+ IFS="$lt_save_ifs"
-+ done
-+ : ${lt_cv_path_NM=no}
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
- $as_echo "$lt_cv_path_NM" >&6; }
-@@ -6173,8 +6180,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -6223,6 +6230,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -6239,6 +6320,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -6407,7 +6493,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -6480,7 +6567,7 @@ irix5* | irix6* | nonstopux*)
- ;;
-
- # This must be Linux ELF.
--linux* | k*bsd*-gnu | kopensolaris*-gnu | uclinuxfdpiceabi)
-+linux* | k*bsd*-gnu | kopensolaris*-gnu)
- lt_cv_deplibs_check_method=pass_all
- ;;
-
-@@ -6561,6 +6648,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6574,11 +6676,177 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
-+plugin_option=
-+plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
-+for plugin in $plugin_names; do
-+ plugin_so=`${CC} ${CFLAGS} --print-prog-name $plugin`
-+ if test x$plugin_so = x$plugin; then
-+ plugin_so=`${CC} ${CFLAGS} --print-file-name $plugin`
-+ fi
-+ if test x$plugin_so != x$plugin; then
-+ plugin_option="--plugin $plugin_so"
-+ break
-+ fi
-+done
-+
-+if test -n "$ac_tool_prefix"; then
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6594,7 +6862,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6614,11 +6882,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6634,7 +6906,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6653,6 +6925,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6664,12 +6940,21 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+$as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-+ else
-+ AR="$AR $plugin_option"
-+ fi
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+
-+
-
-
-
-@@ -6679,6 +6964,62 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-
-
- if test -n "$ac_tool_prefix"; then
-@@ -6873,6 +7214,11 @@ else
- fi
-
- test -z "$RANLIB" && RANLIB=:
-+if test -n "$plugin_option" && test "$RANLIB" != ":"; then
-+ if $RANLIB --help 2>&1 | grep -q "\--plugin"; then
-+ RANLIB="$RANLIB $plugin_option"
-+ fi
-+fi
-
-
-
-@@ -6987,7 +7333,7 @@ osf*)
- symcode='[BCDEGQRST]'
- ;;
- solaris*)
-- symcode='[BDRT]'
-+ symcode='[BCDRT]'
- ;;
- sco3.2v5*)
- symcode='[DT]'
-@@ -7015,8 +7361,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -7052,6 +7398,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -7093,6 +7440,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -7104,7 +7463,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -7130,8 +7489,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -7141,8 +7500,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -7179,6 +7538,17 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-
-
-
-@@ -7195,6 +7565,44 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -7372,39 +7780,156 @@ ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $
- ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
- fi
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
--$as_echo "$lt_cv_cc_needs_belf" >&6; }
-- if test x"$lt_cv_cc_needs_belf" != x"yes"; then
-- # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
-- CFLAGS="$SAVE_CFLAGS"
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
-+$as_echo "$lt_cv_cc_needs_belf" >&6; }
-+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then
-+ # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
-+ CFLAGS="$SAVE_CFLAGS"
-+ fi
-+ ;;
-+sparc*-*solaris*)
-+ # Find out which ABI we are using.
-+ echo 'int i;' > conftest.$ac_ext
-+ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-+ (eval $ac_compile) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }; then
-+ case `/usr/bin/file conftest.o` in
-+ *64-bit*)
-+ case $lt_cv_prog_gnu_ld in
-+ yes*) LD="${LD-ld} -m elf64_sparc" ;;
-+ *)
-+ if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
-+ LD="${LD-ld} -64"
-+ fi
-+ ;;
-+ esac
-+ ;;
-+ esac
-+ fi
-+ rm -rf conftest*
-+ ;;
-+esac
-+
-+need_locks="$enable_libtool_lock"
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
- fi
-- ;;
--sparc*-*solaris*)
-- # Find out which ABI we are using.
-- echo 'int i;' > conftest.$ac_ext
-- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-- (eval $ac_compile) 2>&5
-- ac_status=$?
-- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-- test $ac_status = 0; }; then
-- case `/usr/bin/file conftest.o` in
-- *64-bit*)
-- case $lt_cv_prog_gnu_ld in
-- yes*) LD="${LD-ld} -m elf64_sparc" ;;
-- *)
-- if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
-- LD="${LD-ld} -64"
-- fi
-- ;;
-- esac
-- ;;
-- esac
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
- fi
-- rm -rf conftest*
-- ;;
--esac
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-
--need_locks="$enable_libtool_lock"
-
-
- case $host_os in
-@@ -7969,6 +8494,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -7986,25 +8513,23 @@ _LT_EOF
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
- $as_echo "$lt_cv_ld_force_load" >&6; }
-- # Allow for Darwin 4-7 (macOS 10.0-10.3) although these are not expect to
-- # build without first building modern cctools / linker.
-- case $host_cpu-$host_os in
-- *-rhapsody* | *-darwin1.[012])
-+ case $host_os in
-+ rhapsody* | darwin1.[012])
- _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
-- *-darwin1.*)
-+ darwin1.*)
- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-- *-darwin*)
-- # darwin 5.x (macOS 10.1) onwards we only need to adjust when the
-- # deployment target is forced to an earlier version.
-- case ${MACOSX_DEPLOYMENT_TARGET-UNSET},$host in
-- UNSET,*-darwin[89]*|UNSET,*-darwin[12][0123456789]*)
-- ;;
-+ darwin*) # darwin 5.x on
-+ # if running on 10.5 or later, the deployment target defaults
-+ # to the OS version, if on x86, and 10.4, the deployment
-+ # target defaults to 10.4. Don't you love it?
-+ case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
-+ 10.0,*86*-darwin8*|10.0,*-darwin[91]*)
-+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
- 10.[012][,.]*)
-- _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress'
-- ;;
-- *)
-- ;;
-- esac
-+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-+ 10.*)
-+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
-+ esac
- ;;
- esac
- if test "$lt_cv_apple_cc_single_mod" = "yes"; then
-@@ -8553,8 +9078,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8720,6 +9243,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8782,7 +9311,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8839,13 +9368,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8906,6 +9439,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -9256,7 +9794,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -9294,7 +9833,7 @@ _LT_EOF
- archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
- ;;
-
-- gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu | uclinuxfdpiceabi)
-+ gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
- tmp_diet=no
- if test "$host_os" = linux-dietlibc; then
- case $cc_basename in
-@@ -9355,12 +9894,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -9374,8 +9913,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -9393,8 +9932,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9440,8 +9979,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9571,7 +10110,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9584,22 +10129,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9611,7 +10163,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9624,22 +10182,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9684,20 +10249,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9758,7 +10366,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9766,7 +10374,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9782,7 +10390,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9803,19 +10411,19 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
- case $host_cpu in
- hppa*64*)
-- archive_cmds='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
- case $host_cpu in
- hppa*64*)
-- archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
- archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-@@ -9888,23 +10496,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9989,7 +10610,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -10008,9 +10629,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10586,8 +11207,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10620,13 +11242,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -10708,7 +11388,7 @@ haiku*)
- soname_spec='${libname}${release}${shared_ext}$major'
- shlibpath_var=LIBRARY_PATH
- shlibpath_overrides_runpath=yes
-- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib'
-+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
- hardcode_into_libs=yes
- ;;
-
-@@ -10815,12 +11495,7 @@ linux*oldld* | linux*aout* | linux*coff*)
- ;;
-
- # This must be Linux ELF.
--
--# uclinux* changes (here and below) have been submitted to the libtool
--# project, but have not yet been accepted: they are GCC-local changes
--# for the time being. (See
--# https://lists.gnu.org/archive/html/libtool-patches/2018-05/msg00000.html)
--linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu* | uclinuxfdpiceabi)
-+linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
- version_type=linux
- need_lib_prefix=no
- need_version=no
-@@ -11509,7 +12184,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11512 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11553,10 +12228,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11615,7 +12290,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11618 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11659,10 +12334,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -14948,13 +15623,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -14969,14 +15651,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -15009,12 +15694,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -15069,8 +15754,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -15080,12 +15770,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -15101,7 +15793,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -15137,6 +15828,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -15835,7 +16527,8 @@ esac ;;
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -15938,19 +16631,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -15980,6 +16696,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -15989,6 +16711,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -16103,12 +16828,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -16195,9 +16920,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -16213,6 +16935,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -16245,210 +16970,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/libctf/configure b/libctf/configure
-index de10fef84a1..1b0ee0d32c6 100755
---- a/libctf/configure
-+++ b/libctf/configure
-@@ -669,6 +669,8 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -801,6 +803,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_largefile
- enable_werror_always
-@@ -1475,6 +1478,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
- --with-system-zlib use installed libz
-
- Some influential environment variables:
-@@ -5583,8 +5588,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -5624,7 +5629,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -6311,8 +6316,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -6361,6 +6366,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -6377,6 +6456,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -6545,7 +6629,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -6699,6 +6784,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6714,6 +6814,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -6728,8 +6979,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6745,7 +6998,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6765,11 +7018,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6785,7 +7042,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6804,6 +7061,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6815,25 +7076,19 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-
-
-
-@@ -6845,6 +7100,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-+
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
- set dummy ${ac_tool_prefix}strip; ac_word=$2
-@@ -7184,8 +7497,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -7221,6 +7534,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -7262,6 +7576,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -7273,7 +7599,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -7299,8 +7625,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -7310,8 +7636,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -7348,6 +7674,14 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-
-
-
-@@ -7366,6 +7700,47 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-+
-+
-
-
-
-@@ -7575,6 +7950,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -8138,6 +8630,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -8690,8 +9184,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8857,6 +9349,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8919,7 +9417,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8976,13 +9474,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -9043,6 +9545,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -9393,7 +9900,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -9492,12 +10000,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -9511,8 +10019,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -9530,8 +10038,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9577,8 +10085,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9708,7 +10216,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9721,22 +10235,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9748,7 +10269,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9761,22 +10288,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9821,20 +10355,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9895,7 +10472,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9903,7 +10480,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9919,7 +10496,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9943,10 +10520,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -10025,23 +10602,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -10126,7 +10716,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -10145,9 +10735,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10723,8 +11313,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10757,13 +11348,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -11641,7 +12290,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11644 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11685,10 +12334,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11747,7 +12396,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11750 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11791,10 +12440,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -14479,13 +15128,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -14500,14 +15156,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -14540,12 +15199,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -14600,8 +15259,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -14611,12 +15275,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -14632,7 +15298,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -14668,6 +15333,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -15424,7 +16090,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -15527,19 +16194,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -15569,6 +16259,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -15578,6 +16274,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -15692,12 +16391,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -15784,9 +16483,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -15802,6 +16498,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -15834,210 +16533,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/libtool.m4 b/libtool.m4
-index a216bb14e99..e37c45ac0b1 100644
---- a/libtool.m4
-+++ b/libtool.m4
-@@ -1,7 +1,8 @@
- # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is free software; the Free Software Foundation gives
-@@ -10,7 +11,8 @@
-
- m4_define([_LT_COPYING], [dnl
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -37,7 +39,7 @@ m4_define([_LT_COPYING], [dnl
- # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- ])
-
--# serial 56 LT_INIT
-+# serial 57 LT_INIT
-
-
- # LT_PREREQ(VERSION)
-@@ -166,10 +168,13 @@ _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
- dnl
- m4_require([_LT_FILEUTILS_DEFAULTS])dnl
- m4_require([_LT_CHECK_SHELL_FEATURES])dnl
-+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
- m4_require([_LT_CMD_RELOAD])dnl
- m4_require([_LT_CHECK_MAGIC_METHOD])dnl
-+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
- m4_require([_LT_CMD_OLD_ARCHIVE])dnl
- m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
-+m4_require([_LT_WITH_SYSROOT])dnl
-
- _LT_CONFIG_LIBTOOL_INIT([
- # See if we are running on zsh, and set the options which allow our
-@@ -632,7 +637,7 @@ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
- m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
- configured by $[0], generated by m4_PACKAGE_STRING.
-
--Copyright (C) 2009 Free Software Foundation, Inc.
-+Copyright (C) 2010 Free Software Foundation, Inc.
- This config.lt script is free software; the Free Software Foundation
- gives unlimited permision to copy, distribute and modify it."
-
-@@ -746,15 +751,12 @@ _LT_EOF
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-
-- _LT_PROG_XSI_SHELLFNS
-+ _LT_PROG_REPLACE_SHELLFNS
-
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
- ],
-@@ -980,6 +982,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
- echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
- $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
-+ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
-+ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -1069,30 +1073,41 @@ m4_defun([_LT_DARWIN_LINKER_FEATURES],
- fi
- ])
-
--# _LT_SYS_MODULE_PATH_AIX
--# -----------------------
-+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
-+# ----------------------------------
- # Links a minimal program and checks the executable
- # for the system default hardcoded library path. In most cases,
- # this is /usr/lib:/lib, but when the MPI compilers are used
- # the location of the communication and MPI libs are included too.
- # If we don't find anything, use the default library path according
- # to the aix ld manual.
-+# Store the results from the different compilers for each TAGNAME.
-+# Allow to override them for all tags through lt_cv_aix_libpath.
- m4_defun([_LT_SYS_MODULE_PATH_AIX],
- [m4_require([_LT_DECL_SED])dnl
--AC_LINK_IFELSE([AC_LANG_SOURCE([AC_LANG_PROGRAM])],[
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi],[])
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
-+ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
-+ lt_aix_libpath_sed='[
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }]'
-+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
-+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi],[])
-+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
-+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
-+ fi
-+ ])
-+ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
-+fi
- ])# _LT_SYS_MODULE_PATH_AIX
-
-
-@@ -1117,7 +1132,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
-
- AC_MSG_CHECKING([how to print strings])
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -1161,6 +1176,39 @@ _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
- ])# _LT_PROG_ECHO_BACKSLASH
-
-
-+# _LT_WITH_SYSROOT
-+# ----------------
-+AC_DEFUN([_LT_WITH_SYSROOT],
-+[AC_MSG_CHECKING([for sysroot])
-+AC_ARG_WITH([libtool-sysroot],
-+[ --with-libtool-sysroot[=DIR] Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).],
-+[], [with_libtool_sysroot=no])
-+
-+dnl lt_sysroot will always be passed unquoted. We quote it here
-+dnl in case the user passed a directory name.
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ AC_MSG_RESULT([${with_libtool_sysroot}])
-+ AC_MSG_ERROR([The sysroot must be an absolute path.])
-+ ;;
-+esac
-+
-+ AC_MSG_RESULT([${lt_sysroot:-no}])
-+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
-+[dependent libraries, and in which our libraries should be installed.])])
-+
- # _LT_ENABLE_LOCK
- # ---------------
- m4_defun([_LT_ENABLE_LOCK],
-@@ -1320,6 +1368,51 @@ need_locks="$enable_libtool_lock"
- ])# _LT_ENABLE_LOCK
-
-
-+# _LT_PROG_AR
-+# -----------
-+m4_defun([_LT_PROG_AR],
-+[AC_CHECK_TOOLS(AR, [ar], false)
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ AC_MSG_WARN([Failed: $AR $plugin_option rc])
-+ else
-+ AR="$AR $plugin_option"
-+ fi
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+_LT_DECL([], [AR], [1], [The archiver])
-+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
-+
-+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
-+ [lt_cv_ar_at_file=no
-+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
-+ [echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
-+ AC_TRY_EVAL([lt_ar_try])
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ AC_TRY_EVAL([lt_ar_try])
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+ ])
-+ ])
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+_LT_DECL([], [archiver_list_spec], [1],
-+ [How to feed a file listing to the archiver])
-+])# _LT_PROG_AR
-+
-+
- # _LT_CMD_OLD_ARCHIVE
- # -------------------
- m4_defun([_LT_CMD_OLD_ARCHIVE],
-@@ -1336,23 +1429,7 @@ for plugin in $plugin_names; do
- fi
- done
-
--AC_CHECK_TOOL(AR, ar, false)
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- AC_MSG_WARN([Failed: $AR $plugin_option rc])
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
--_LT_DECL([], [AR], [1], [The archiver])
--_LT_DECL([], [AR_FLAGS], [1])
-+_LT_PROG_AR
-
- AC_CHECK_TOOL(STRIP, strip, :)
- test -z "$STRIP" && STRIP=:
-@@ -1653,7 +1730,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--[#line __oline__ "configure"
-+[#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -1697,10 +1774,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -2240,8 +2317,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -2274,13 +2352,71 @@ m4_if([$1], [],[
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -2970,6 +3106,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -3036,7 +3177,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -3187,6 +3329,21 @@ tpf*)
- ;;
- esac
- ])
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -3194,7 +3351,11 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
- _LT_DECL([], [deplibs_check_method], [1],
- [Method to check whether dependent libraries are shared objects])
- _LT_DECL([], [file_magic_cmd], [1],
-- [Command to use when deplibs_check_method == "file_magic"])
-+ [Command to use when deplibs_check_method = "file_magic"])
-+_LT_DECL([], [file_magic_glob], [1],
-+ [How to find potential files when deplibs_check_method = "file_magic"])
-+_LT_DECL([], [want_nocaseglob], [1],
-+ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
- ])# _LT_CHECK_MAGIC_METHOD
-
-
-@@ -3299,6 +3460,67 @@ dnl aclocal-1.4 backwards compatibility:
- dnl AC_DEFUN([AM_PROG_NM], [])
- dnl AC_DEFUN([AC_PROG_NM], [])
-
-+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
-+# --------------------------------
-+# how to determine the name of the shared library
-+# associated with a specific link library.
-+# -- PORTME fill in with the dynamic library characteristics
-+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
-+[m4_require([_LT_DECL_EGREP])
-+m4_require([_LT_DECL_OBJDUMP])
-+m4_require([_LT_DECL_DLLTOOL])
-+AC_CACHE_CHECK([how to associate runtime and link libraries],
-+lt_cv_sharedlib_from_linklib_cmd,
-+[lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+])
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
-+ [Command to associate shared and link libraries])
-+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
-+
-+
-+# _LT_PATH_MANIFEST_TOOL
-+# ----------------------
-+# locate the manifest tool
-+m4_defun([_LT_PATH_MANIFEST_TOOL],
-+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
-+ [lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&AS_MESSAGE_LOG_FD
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*])
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
-+])# _LT_PATH_MANIFEST_TOOL
-+
-
- # LT_LIB_M
- # --------
-@@ -3425,8 +3647,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -3462,6 +3684,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -3495,6 +3718,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT@&t@_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT@&t@_DLSYM_CONST
-+#else
-+# define LT@&t@_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -3506,7 +3741,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT@&t@_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -3532,15 +3767,15 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
- if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
- fi
-@@ -3573,6 +3808,13 @@ else
- AC_MSG_RESULT(ok)
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
- _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
- [Take the output of nm and produce a listing of raw symbols and C names])
- _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
-@@ -3583,6 +3825,8 @@ _LT_DECL([global_symbol_to_c_name_address],
- _LT_DECL([global_symbol_to_c_name_address_lib_prefix],
- [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
- [Transform the output of nm in a C name address pair when lib prefix is needed])
-+_LT_DECL([], [nm_file_list_spec], [1],
-+ [Specify filename containing input files for $NM])
- ]) # _LT_CMD_GLOBAL_SYMBOLS
-
-
-@@ -3594,7 +3838,6 @@ _LT_TAGVAR(lt_prog_compiler_wl, $1)=
- _LT_TAGVAR(lt_prog_compiler_pic, $1)=
- _LT_TAGVAR(lt_prog_compiler_static, $1)=
-
--AC_MSG_CHECKING([for $compiler option to produce PIC])
- m4_if([$1], [CXX], [
- # C++ specific cases for pic, static, wl, etc.
- if test "$GXX" = yes; then
-@@ -3700,6 +3943,12 @@ m4_if([$1], [CXX], [
- ;;
- esac
- ;;
-+ mingw* | cygwin* | os2* | pw32* | cegcc*)
-+ # This hack is so that the source file can tell whether it is being
-+ # built for inclusion in a dll (and should export symbols for example).
-+ m4_if([$1], [GCJ], [],
-+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
-+ ;;
- dgux*)
- case $cc_basename in
- ec++*)
-@@ -3852,7 +4101,7 @@ m4_if([$1], [CXX], [
- ;;
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
-@@ -4075,6 +4324,12 @@ m4_if([$1], [CXX], [
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
-+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
-+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -4137,7 +4392,7 @@ m4_if([$1], [CXX], [
- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
- *)
- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
-@@ -4194,9 +4449,11 @@ case $host_os in
- _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
- ;;
- esac
--AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
--_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
-- [How to pass a linker flag through the compiler])
-+
-+AC_CACHE_CHECK([for $compiler option to produce PIC],
-+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
-+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
-+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -4215,6 +4472,8 @@ fi
- _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
- [Additional compiler flags for building library objects])
-
-+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
-+ [How to pass a linker flag through the compiler])
- #
- # Check to make sure the static flag actually works.
- #
-@@ -4235,6 +4494,7 @@ _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
- m4_defun([_LT_LINKER_SHLIBS],
- [AC_REQUIRE([LT_PATH_LD])dnl
- AC_REQUIRE([LT_PATH_NM])dnl
-+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
- m4_require([_LT_FILEUTILS_DEFAULTS])dnl
- m4_require([_LT_DECL_EGREP])dnl
- m4_require([_LT_DECL_SED])dnl
-@@ -4243,6 +4503,7 @@ m4_require([_LT_TAG_COMPILER])dnl
- AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
- m4_if([$1], [CXX], [
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
- case $host_os in
- aix[[4-9]]*)
- # If we're using GNU nm, then we don't want the "-C" option.
-@@ -4257,15 +4518,20 @@ m4_if([$1], [CXX], [
- ;;
- pw32*)
- _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
-- ;;
-+ ;;
- cygwin* | mingw* | cegcc*)
-- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
-- ;;
-+ case $cc_basename in
-+ cl*) ;;
-+ *)
-+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
-+ ;;
-+ esac
-+ ;;
- *)
- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-- ;;
-+ ;;
- esac
-- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
- ], [
- runpath_var=
- _LT_TAGVAR(allow_undefined_flag, $1)=
-@@ -4433,7 +4699,8 @@ _LT_EOF
- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
- _LT_TAGVAR(always_export_symbols, $1)=no
- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
-+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -4532,12 +4799,12 @@ _LT_EOF
- _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
- _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir'
-- _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -4551,8 +4818,8 @@ _LT_EOF
- _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -4570,8 +4837,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
-@@ -4617,8 +4884,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- _LT_TAGVAR(ld_shlibs, $1)=no
- fi
-@@ -4748,7 +5015,7 @@ _LT_EOF
- _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- _LT_SYS_MODULE_PATH_AIX
-+ _LT_SYS_MODULE_PATH_AIX([$1])
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
- else
-@@ -4759,7 +5026,7 @@ _LT_EOF
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- _LT_SYS_MODULE_PATH_AIX
-+ _LT_SYS_MODULE_PATH_AIX([$1])
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
-@@ -4803,20 +5070,63 @@ _LT_EOF
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
-- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
-- # FIXME: Should let the user specify the lib program.
-- _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`'
-- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
-+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-+ _LT_TAGVAR(always_export_symbols, $1)=yes
-+ _LT_TAGVAR(file_list_spec, $1)='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
-+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
-+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
-+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
-+ # FIXME: Should let the user specify the lib program.
-+ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -4850,7 +5160,7 @@ _LT_EOF
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
- _LT_TAGVAR(hardcode_direct, $1)=yes
- _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
-@@ -4858,7 +5168,7 @@ _LT_EOF
-
- hpux9*)
- if test "$GCC" = yes; then
-- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -4874,7 +5184,7 @@ _LT_EOF
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -4898,10 +5208,10 @@ _LT_EOF
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -4948,16 +5258,31 @@ _LT_EOF
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- AC_LINK_IFELSE([AC_LANG_SOURCE([int foo(void) {}])],
-- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-- )
-- LDFLAGS="$save_LDFLAGS"
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
-+ [lt_cv_irix_exported_symbol],
-+ [save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ AC_LINK_IFELSE(
-+ [AC_LANG_SOURCE(
-+ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
-+ [C++], [[int foo (void) { return 0; }]],
-+ [Fortran 77], [[
-+ subroutine foo
-+ end]],
-+ [Fortran], [[
-+ subroutine foo
-+ end]])])],
-+ [lt_cv_irix_exported_symbol=yes],
-+ [lt_cv_irix_exported_symbol=no])
-+ LDFLAGS="$save_LDFLAGS"])
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -5042,7 +5367,7 @@ _LT_EOF
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
- else
- _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
-@@ -5061,9 +5386,9 @@ _LT_EOF
- _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -5335,8 +5660,6 @@ _LT_TAGDECL([], [inherit_rpath], [0],
- to runtime path list])
- _LT_TAGDECL([], [link_all_deplibs], [0],
- [Whether libtool must link a program against all its dependency libraries])
--_LT_TAGDECL([], [fix_srcfile_path], [1],
-- [Fix the shell variable $srcfile for the compiler])
- _LT_TAGDECL([], [always_export_symbols], [0],
- [Set to "yes" if exported symbols are required])
- _LT_TAGDECL([], [export_symbols_cmds], [2],
-@@ -5347,6 +5670,8 @@ _LT_TAGDECL([], [include_expsyms], [1],
- [Symbols that must always be exported])
- _LT_TAGDECL([], [prelink_cmds], [2],
- [Commands necessary for linking programs (against libraries) with templates])
-+_LT_TAGDECL([], [postlink_cmds], [2],
-+ [Commands necessary for finishing linking programs])
- _LT_TAGDECL([], [file_list_spec], [1],
- [Specify filename containing input files])
- dnl FIXME: Not yet implemented
-@@ -5448,6 +5773,7 @@ CC="$lt_save_CC"
- m4_defun([_LT_LANG_CXX_CONFIG],
- [m4_require([_LT_FILEUTILS_DEFAULTS])dnl
- m4_require([_LT_DECL_EGREP])dnl
-+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
- if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
- ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
- (test "X$CXX" != "Xg++"))) ; then
-@@ -5509,6 +5835,7 @@ if test "$_lt_caught_CXX_error" != yes; then
-
- # Allow CC to be a program name with arguments.
- lt_save_CC=$CC
-+ lt_save_CFLAGS=$CFLAGS
- lt_save_LD=$LD
- lt_save_GCC=$GCC
- GCC=$GXX
-@@ -5526,6 +5853,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- fi
- test -z "${LDCXX+set}" || LD=$LDCXX
- CC=${CXX-"c++"}
-+ CFLAGS=$CXXFLAGS
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
-@@ -5689,7 +6017,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
- # Determine the default libpath from the value encoded in an empty
- # executable.
-- _LT_SYS_MODULE_PATH_AIX
-+ _LT_SYS_MODULE_PATH_AIX([$1])
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
-
- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -5701,7 +6029,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- _LT_SYS_MODULE_PATH_AIX
-+ _LT_SYS_MODULE_PATH_AIX([$1])
- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
- # -berok will link without error, but may produce a broken library.
-@@ -5743,29 +6071,75 @@ if test "$_lt_caught_CXX_error" != yes; then
- ;;
-
- cygwin* | mingw* | pw32* | cegcc*)
-- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
-- # as there is no search path for DLLs.
-- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
-- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
-- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-- _LT_TAGVAR(always_export_symbols, $1)=no
-- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
--
-- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-- # If the export-symbols file already is a .def file (1st line
-- # is EXPORTS), use it as is; otherwise, prepend...
-- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-- cp $export_symbols $output_objdir/$soname.def;
-- else
-- echo EXPORTS > $output_objdir/$soname.def;
-- cat $export_symbols >> $output_objdir/$soname.def;
-- fi~
-- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-- else
-- _LT_TAGVAR(ld_shlibs, $1)=no
-- fi
-- ;;
-+ case $GXX,$cc_basename in
-+ ,cl* | no,cl*)
-+ # Native MSVC
-+ # hardcode_libdir_flag_spec is actually meaningless, as there is
-+ # no search path for DLLs.
-+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
-+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-+ _LT_TAGVAR(always_export_symbols, $1)=yes
-+ _LT_TAGVAR(file_list_spec, $1)='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
-+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-+ # Don't use ranlib
-+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
-+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ func_to_tool_file "$lt_outputfile"~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # g++
-+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
-+ # as there is no search path for DLLs.
-+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
-+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
-+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
-+ _LT_TAGVAR(always_export_symbols, $1)=no
-+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
-+
-+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-+ # If the export-symbols file already is a .def file (1st line
-+ # is EXPORTS), use it as is; otherwise, prepend...
-+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ cp $export_symbols $output_objdir/$soname.def;
-+ else
-+ echo EXPORTS > $output_objdir/$soname.def;
-+ cat $export_symbols >> $output_objdir/$soname.def;
-+ fi~
-+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-+ else
-+ _LT_TAGVAR(ld_shlibs, $1)=no
-+ fi
-+ ;;
-+ esac
-+ ;;
- darwin* | rhapsody*)
- _LT_DARWIN_LINKER_FEATURES($1)
- ;;
-@@ -5840,7 +6214,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- ;;
- *)
- if test "$GXX" = yes; then
-- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- # FIXME: insert proper C++ library support
- _LT_TAGVAR(ld_shlibs, $1)=no
-@@ -5911,10 +6285,10 @@ if test "$_lt_caught_CXX_error" != yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- ia64*)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- *)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
- ;;
- esac
- fi
-@@ -5955,9 +6329,9 @@ if test "$_lt_caught_CXX_error" != yes; then
- *)
- if test "$GXX" = yes; then
- if test "$with_gnu_ld" = no; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- else
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
- fi
- fi
- _LT_TAGVAR(link_all_deplibs, $1)=yes
-@@ -6027,20 +6401,20 @@ if test "$_lt_caught_CXX_error" != yes; then
- _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
-- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"'
-+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
- _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
-- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~
-+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
- $RANLIB $oldlib'
- _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
-- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
-+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
- rm -rf $tpldir~
- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
-- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
-+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
- ;;
- *) # Version 6 and above use weak symbols
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
-@@ -6235,7 +6609,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- *)
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- ;;
- esac
-
-@@ -6281,7 +6655,7 @@ if test "$_lt_caught_CXX_error" != yes; then
-
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # Sun C++ 4.2, 5.x and Centerline C++
- _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
- _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
-@@ -6322,9 +6696,9 @@ if test "$_lt_caught_CXX_error" != yes; then
- if test "$GXX" = yes && test "$with_gnu_ld" = no; then
- _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
- if $CC --version | $GREP -v '^2\.7' > /dev/null; then
-- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
-+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
- _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
-
- # Commands to make compiler produce verbose output that lists
- # what "hidden" libraries, object files and flags are used when
-@@ -6453,6 +6827,7 @@ if test "$_lt_caught_CXX_error" != yes; then
- fi # test -n "$compiler"
-
- CC=$lt_save_CC
-+ CFLAGS=$lt_save_CFLAGS
- LDCXX=$LD
- LD=$lt_save_LD
- GCC=$lt_save_GCC
-@@ -6467,6 +6842,29 @@ AC_LANG_POP
- ])# _LT_LANG_CXX_CONFIG
-
-
-+# _LT_FUNC_STRIPNAME_CNF
-+# ----------------------
-+# func_stripname_cnf prefix suffix name
-+# strip PREFIX and SUFFIX off of NAME.
-+# PREFIX and SUFFIX must not contain globbing or regex special
-+# characters, hashes, percent signs, but SUFFIX may contain a leading
-+# dot (in which case that matches only a dot).
-+#
-+# This function is identical to the (non-XSI) version of func_stripname,
-+# except this one can be used by m4 code that may be executed by configure,
-+# rather than the libtool script.
-+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
-+AC_REQUIRE([_LT_DECL_SED])
-+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
-+func_stripname_cnf ()
-+{
-+ case ${2} in
-+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-+ esac
-+} # func_stripname_cnf
-+])# _LT_FUNC_STRIPNAME_CNF
-+
- # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
- # ---------------------------------
- # Figure out "hidden" library dependencies from verbose
-@@ -6475,6 +6873,7 @@ AC_LANG_POP
- # objects, libraries and library flags.
- m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
- [m4_require([_LT_FILEUTILS_DEFAULTS])dnl
-+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
- # Dependencies to place before and after the object being linked:
- _LT_TAGVAR(predep_objects, $1)=
- _LT_TAGVAR(postdep_objects, $1)=
-@@ -6525,6 +6924,13 @@ public class foo {
- };
- _LT_EOF
- ])
-+
-+_lt_libdeps_save_CFLAGS=$CFLAGS
-+case "$CC $CFLAGS " in #(
-+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
-+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
-+esac
-+
- dnl Parse the compiler output and extract the necessary
- dnl objects, libraries and library flags.
- if AC_TRY_EVAL(ac_compile); then
-@@ -6536,7 +6942,7 @@ if AC_TRY_EVAL(ac_compile); then
- pre_test_object_deps_done=no
-
- for p in `eval "$output_verbose_link_cmd"`; do
-- case $p in
-+ case ${prev}${p} in
-
- -L* | -R* | -l*)
- # Some compilers place space between "-{L,R}" and the path.
-@@ -6545,13 +6951,22 @@ if AC_TRY_EVAL(ac_compile); then
- test $p = "-R"; then
- prev=$p
- continue
-- else
-- prev=
- fi
-
-+ # Expand the sysroot to ease extracting the directories later.
-+ if test -z "$prev"; then
-+ case $p in
-+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
-+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
-+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
-+ esac
-+ fi
-+ case $p in
-+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
-+ esac
- if test "$pre_test_object_deps_done" = no; then
-- case $p in
-- -L* | -R*)
-+ case ${prev} in
-+ -L | -R)
- # Internal compiler library paths should come after those
- # provided the user. The postdeps already come after the
- # user supplied libs so there is no need to process them.
-@@ -6571,8 +6986,10 @@ if AC_TRY_EVAL(ac_compile); then
- _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
- fi
- fi
-+ prev=
- ;;
-
-+ *.lto.$objext) ;; # Ignore GCC LTO objects
- *.$objext)
- # This assumes that the test object file only shows up
- # once in the compiler output.
-@@ -6608,6 +7025,7 @@ else
- fi
-
- $RM -f confest.$objext
-+CFLAGS=$_lt_libdeps_save_CFLAGS
-
- # PORTME: override above test on systems where it is broken
- m4_if([$1], [CXX],
-@@ -6644,7 +7062,7 @@ linux*)
-
- solaris*)
- case $cc_basename in
-- CC*)
-+ CC* | sunCC*)
- # The more standards-conforming stlport4 library is
- # incompatible with the Cstd library. Avoid specifying
- # it if it's in CXXFLAGS. Ignore libCrun as
-@@ -6757,7 +7175,9 @@ if test "$_lt_disable_F77" != yes; then
- # Allow CC to be a program name with arguments.
- lt_save_CC="$CC"
- lt_save_GCC=$GCC
-+ lt_save_CFLAGS=$CFLAGS
- CC=${F77-"f77"}
-+ CFLAGS=$FFLAGS
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
-@@ -6811,6 +7231,7 @@ if test "$_lt_disable_F77" != yes; then
-
- GCC=$lt_save_GCC
- CC="$lt_save_CC"
-+ CFLAGS="$lt_save_CFLAGS"
- fi # test "$_lt_disable_F77" != yes
-
- AC_LANG_POP
-@@ -6887,7 +7308,9 @@ if test "$_lt_disable_FC" != yes; then
- # Allow CC to be a program name with arguments.
- lt_save_CC="$CC"
- lt_save_GCC=$GCC
-+ lt_save_CFLAGS=$CFLAGS
- CC=${FC-"f95"}
-+ CFLAGS=$FCFLAGS
- compiler=$CC
- GCC=$ac_cv_fc_compiler_gnu
-
-@@ -6943,7 +7366,8 @@ if test "$_lt_disable_FC" != yes; then
- fi # test -n "$compiler"
-
- GCC=$lt_save_GCC
-- CC="$lt_save_CC"
-+ CC=$lt_save_CC
-+ CFLAGS=$lt_save_CFLAGS
- fi # test "$_lt_disable_FC" != yes
-
- AC_LANG_POP
-@@ -6980,10 +7404,12 @@ _LT_COMPILER_BOILERPLATE
- _LT_LINKER_BOILERPLATE
-
- # Allow CC to be a program name with arguments.
--lt_save_CC="$CC"
-+lt_save_CC=$CC
-+lt_save_CFLAGS=$CFLAGS
- lt_save_GCC=$GCC
- GCC=yes
- CC=${GCJ-"gcj"}
-+CFLAGS=$GCJFLAGS
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_TAGVAR(LD, $1)="$LD"
-@@ -7014,7 +7440,8 @@ fi
- AC_LANG_RESTORE
-
- GCC=$lt_save_GCC
--CC="$lt_save_CC"
-+CC=$lt_save_CC
-+CFLAGS=$lt_save_CFLAGS
- ])# _LT_LANG_GCJ_CONFIG
-
-
-@@ -7049,9 +7476,11 @@ _LT_LINKER_BOILERPLATE
-
- # Allow CC to be a program name with arguments.
- lt_save_CC="$CC"
-+lt_save_CFLAGS=$CFLAGS
- lt_save_GCC=$GCC
- GCC=
- CC=${RC-"windres"}
-+CFLAGS=
- compiler=$CC
- _LT_TAGVAR(compiler, $1)=$CC
- _LT_CC_BASENAME([$compiler])
-@@ -7064,7 +7493,8 @@ fi
-
- GCC=$lt_save_GCC
- AC_LANG_RESTORE
--CC="$lt_save_CC"
-+CC=$lt_save_CC
-+CFLAGS=$lt_save_CFLAGS
- ])# _LT_LANG_RC_CONFIG
-
-
-@@ -7123,6 +7553,15 @@ _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
- AC_SUBST([OBJDUMP])
- ])
-
-+# _LT_DECL_DLLTOOL
-+# ----------------
-+# Ensure DLLTOOL variable is set.
-+m4_defun([_LT_DECL_DLLTOOL],
-+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
-+AC_SUBST([DLLTOOL])
-+])
-
- # _LT_DECL_SED
- # ------------
-@@ -7216,8 +7655,8 @@ m4_defun([_LT_CHECK_SHELL_FEATURES],
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -7256,206 +7695,162 @@ _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
- ])# _LT_CHECK_SHELL_FEATURES
-
-
--# _LT_PROG_XSI_SHELLFNS
--# ---------------------
--# Bourne and XSI compatible variants of some useful shell functions.
--m4_defun([_LT_PROG_XSI_SHELLFNS],
--[case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $[*] ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
-+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
-+# ------------------------------------------------------
-+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
-+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
-+m4_defun([_LT_PROG_FUNCTION_REPLACE],
-+[dnl {
-+sed -e '/^$1 ()$/,/^} # $1 /c\
-+$1 ()\
-+{\
-+m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1])
-+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+])
-
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
-
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
-+# _LT_PROG_REPLACE_SHELLFNS
-+# -------------------------
-+# Replace existing portable implementations of several shell functions with
-+# equivalent extended shell implementations where those features are available..
-+m4_defun([_LT_PROG_REPLACE_SHELLFNS],
-+[if test x"$xsi_shell" = xyes; then
-+ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
-+ case ${1} in
-+ */*) func_dirname_result="${1%/*}${2}" ;;
-+ * ) func_dirname_result="${3}" ;;
-+ esac])
-+
-+ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
-+ func_basename_result="${1##*/}"])
-+
-+ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
-+ case ${1} in
-+ */*) func_dirname_result="${1%/*}${2}" ;;
-+ * ) func_dirname_result="${3}" ;;
-+ esac
-+ func_basename_result="${1##*/}"])
-
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
-+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-+ # positional parameters, so assign one to ordinary parameter first.
-+ func_stripname_result=${3}
-+ func_stripname_result=${func_stripname_result#"${1}"}
-+ func_stripname_result=${func_stripname_result%"${2}"}])
-
--dnl func_dirname_and_basename
--dnl A portable version of this function is already defined in general.m4sh
--dnl so there is no need for it here.
-+ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
-+ func_split_long_opt_name=${1%%=*}
-+ func_split_long_opt_arg=${1#*=}])
-
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
-+ func_split_short_opt_arg=${1#??}
-+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
-
--# sed scripts:
--my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[[^=]]*=//'
-+ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
-+ case ${1} in
-+ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-+ *) func_lo2o_result=${1} ;;
-+ esac])
-
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo])
-
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))])
-
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'`
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}])
-+fi
-
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$[@]"`
--}
-+if test x"$lt_shell_append" = xyes; then
-+ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"])
-
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
-+ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
-+ func_quote_for_eval "${2}"
-+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
-+ eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
-
--_LT_EOF
--esac
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
-+fi
-+])
-
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$[1]+=\$[2]"
--}
--_LT_EOF
-+# _LT_PATH_CONVERSION_FUNCTIONS
-+# -----------------------------
-+# Determine which file name conversion functions should be used by
-+# func_to_host_file (and, implicitly, by func_to_host_path). These are needed
-+# for certain cross-compile configurations and native mingw.
-+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
-+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
-+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
-+AC_MSG_CHECKING([how to convert $build file names to $host format])
-+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
-+[case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$[1]=\$$[1]\$[2]"
--}
--
--_LT_EOF
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
- ;;
-- esac
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+])
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
-+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
-+ [0], [convert $build file names to $host format])dnl
-+
-+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
-+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
-+[#assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
- ])
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
-+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
-+ [0], [convert $build files to toolchain format])dnl
-+])# _LT_PATH_CONVERSION_FUNCTIONS
-diff --git a/ltmain.sh b/ltmain.sh
-index 9503ec85d70..70e856e0659 100644
---- a/ltmain.sh
-+++ b/ltmain.sh
-@@ -1,10 +1,9 @@
--# Generated from ltmain.m4sh.
-
--# libtool (GNU libtool 1.3134 2009-11-29) 2.2.7a
-+# libtool (GNU libtool) 2.4
- # Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
-
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
--# 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
- # This is free software; see the source for copying conditions. There is NO
- # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
-@@ -38,7 +37,6 @@
- # -n, --dry-run display commands without modifying any files
- # --features display basic configuration information and exit
- # --mode=MODE use operation mode MODE
--# --no-finish let install mode avoid finish commands
- # --preserve-dup-deps don't remove duplicate dependency libraries
- # --quiet, --silent don't print informational messages
- # --no-quiet, --no-silent
-@@ -71,17 +69,19 @@
- # compiler: $LTCC
- # compiler flags: $LTCFLAGS
- # linker: $LD (gnu? $with_gnu_ld)
--# $progname: (GNU libtool 1.3134 2009-11-29) 2.2.7a
-+# $progname: (GNU libtool) 2.4
- # automake: $automake_version
- # autoconf: $autoconf_version
- #
- # Report bugs to <bug-libtool@gnu.org>.
-+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
-+# General help using GNU software: <http://www.gnu.org/gethelp/>.
-
- PROGRAM=libtool
- PACKAGE=libtool
--VERSION=2.2.7a
--TIMESTAMP=" 1.3134 2009-11-29"
--package_revision=1.3134
-+VERSION=2.4
-+TIMESTAMP=""
-+package_revision=1.3293
-
- # Be Bourne compatible
- if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
-@@ -106,9 +106,6 @@ _LTECHO_EOF'
- }
-
- # NLS nuisances: We save the old values to restore during execute mode.
--# Only set LANG and LC_ALL to C if already set.
--# These must not be set unconditionally because not all systems understand
--# e.g. LANG=C (notably SCO).
- lt_user_locale=
- lt_safe_locale=
- for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
-@@ -121,15 +118,13 @@ do
- lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
- fi"
- done
-+LC_ALL=C
-+LANGUAGE=C
-+export LANGUAGE LC_ALL
-
- $lt_unset CDPATH
-
-
--
--
--
--
--
- # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
- # is ksh but when the shell is invoked as "sh" and the current value of
- # the _XPG environment variable is not equal to 1 (one), the special
-@@ -140,7 +135,7 @@ progpath="$0"
-
-
- : ${CP="cp -f"}
--: ${ECHO=$as_echo}
-+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
- : ${EGREP="/bin/grep -E"}
- : ${FGREP="/bin/grep -F"}
- : ${GREP="/bin/grep"}
-@@ -149,7 +144,7 @@ progpath="$0"
- : ${MKDIR="mkdir"}
- : ${MV="mv -f"}
- : ${RM="rm -f"}
--: ${SED="/mount/endor/wildenhu/local-x86_64/bin/sed"}
-+: ${SED="/bin/sed"}
- : ${SHELL="${CONFIG_SHELL-/bin/sh}"}
- : ${Xsed="$SED -e 1s/^X//"}
-
-@@ -169,6 +164,27 @@ IFS=" $lt_nl"
- dirname="s,/[^/]*$,,"
- basename="s,^.*/,,"
-
-+# func_dirname file append nondir_replacement
-+# Compute the dirname of FILE. If nonempty, add APPEND to the result,
-+# otherwise set result to NONDIR_REPLACEMENT.
-+func_dirname ()
-+{
-+ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-+ if test "X$func_dirname_result" = "X${1}"; then
-+ func_dirname_result="${3}"
-+ else
-+ func_dirname_result="$func_dirname_result${2}"
-+ fi
-+} # func_dirname may be replaced by extended shell implementation
-+
-+
-+# func_basename file
-+func_basename ()
-+{
-+ func_basename_result=`$ECHO "${1}" | $SED "$basename"`
-+} # func_basename may be replaced by extended shell implementation
-+
-+
- # func_dirname_and_basename file append nondir_replacement
- # perform func_basename and func_dirname in a single function
- # call:
-@@ -183,17 +199,31 @@ basename="s,^.*/,,"
- # those functions but instead duplicate the functionality here.
- func_dirname_and_basename ()
- {
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
-- func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
--}
-+ # Extract subdirectory from the argument.
-+ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
-+ if test "X$func_dirname_result" = "X${1}"; then
-+ func_dirname_result="${3}"
-+ else
-+ func_dirname_result="$func_dirname_result${2}"
-+ fi
-+ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
-+} # func_dirname_and_basename may be replaced by extended shell implementation
-+
-+
-+# func_stripname prefix suffix name
-+# strip PREFIX and SUFFIX off of NAME.
-+# PREFIX and SUFFIX must not contain globbing or regex special
-+# characters, hashes, percent signs, but SUFFIX may contain a leading
-+# dot (in which case that matches only a dot).
-+# func_strip_suffix prefix name
-+func_stripname ()
-+{
-+ case ${2} in
-+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-+ esac
-+} # func_stripname may be replaced by extended shell implementation
-
--# Generated shell functions inserted here.
-
- # These SED scripts presuppose an absolute path with a trailing slash.
- pathcar='s,^/\([^/]*\).*$,\1,'
-@@ -376,6 +406,15 @@ sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
- # Same as above, but do not quote variable references.
- double_quote_subst='s/\(["`\\]\)/\\\1/g'
-
-+# Sed substitution that turns a string into a regex matching for the
-+# string literally.
-+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
-+
-+# Sed substitution that converts a w32 file name or path
-+# which contains forward slashes, into one that contains
-+# (escaped) backslashes. A very naive implementation.
-+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
-+
- # Re-`\' parameter expansions in output of double_quote_subst that were
- # `\'-ed in input to the same. If an odd number of `\' preceded a '$'
- # in input to double_quote_subst, that '$' was protected from expansion.
-@@ -404,7 +443,7 @@ opt_warning=:
- # name if it has been set yet.
- func_echo ()
- {
-- $ECHO "$progname${mode+: }$mode: $*"
-+ $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
- }
-
- # func_verbose arg...
-@@ -430,14 +469,14 @@ func_echo_all ()
- # Echo program name prefixed message to standard error.
- func_error ()
- {
-- $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2
-+ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
- }
-
- # func_warning arg...
- # Echo program name prefixed warning message to standard error.
- func_warning ()
- {
-- $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2
-+ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
-
- # bash bug again:
- :
-@@ -656,19 +695,35 @@ func_show_eval_locale ()
- fi
- }
-
--
--
-+# func_tr_sh
-+# Turn $1 into a string suitable for a shell variable name.
-+# Result is stored in $func_tr_sh_result. All characters
-+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
-+# if $1 begins with a digit, a '_' is prepended as well.
-+func_tr_sh ()
-+{
-+ case $1 in
-+ [0-9]* | *[!a-zA-Z0-9_]*)
-+ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
-+ ;;
-+ * )
-+ func_tr_sh_result=$1
-+ ;;
-+ esac
-+}
-
-
- # func_version
- # Echo version message to standard output and exit.
- func_version ()
- {
-+ $opt_debug
-+
- $SED -n '/(C)/!b go
- :more
- /\./!{
- N
-- s/\n# //
-+ s/\n# / /
- b more
- }
- :go
-@@ -685,7 +740,9 @@ func_version ()
- # Echo short help message to standard output and exit.
- func_usage ()
- {
-- $SED -n '/^# Usage:/,/^# *-h/ {
-+ $opt_debug
-+
-+ $SED -n '/^# Usage:/,/^# *.*--help/ {
- s/^# //
- s/^# *$//
- s/\$progname/'$progname'/
-@@ -701,7 +758,10 @@ func_usage ()
- # unless 'noexit' is passed as argument.
- func_help ()
- {
-+ $opt_debug
-+
- $SED -n '/^# Usage:/,/# Report bugs to/ {
-+ :print
- s/^# //
- s/^# *$//
- s*\$progname*'$progname'*
-@@ -714,7 +774,11 @@ func_help ()
- s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/
- s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/
- p
-- }' < "$progpath"
-+ d
-+ }
-+ /^# .* home page:/b print
-+ /^# General help using/b print
-+ ' < "$progpath"
- ret=$?
- if test -z "$1"; then
- exit $ret
-@@ -726,12 +790,39 @@ func_help ()
- # exit_cmd.
- func_missing_arg ()
- {
-- func_error "missing argument for $1"
-+ $opt_debug
-+
-+ func_error "missing argument for $1."
- exit_cmd=exit
- }
-
--exit_cmd=:
-
-+# func_split_short_opt shortopt
-+# Set func_split_short_opt_name and func_split_short_opt_arg shell
-+# variables after splitting SHORTOPT after the 2nd character.
-+func_split_short_opt ()
-+{
-+ my_sed_short_opt='1s/^\(..\).*$/\1/;q'
-+ my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
-+
-+ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
-+ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
-+} # func_split_short_opt may be replaced by extended shell implementation
-+
-+
-+# func_split_long_opt longopt
-+# Set func_split_long_opt_name and func_split_long_opt_arg shell
-+# variables after splitting LONGOPT at the `=' sign.
-+func_split_long_opt ()
-+{
-+ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
-+ my_sed_long_arg='1s/^--[^=]*=//'
-+
-+ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
-+ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
-+} # func_split_long_opt may be replaced by extended shell implementation
-+
-+exit_cmd=:
-
-
-
-@@ -741,26 +832,64 @@ magic="%%%MAGIC variable%%%"
- magic_exe="%%%MAGIC EXE variable%%%"
-
- # Global variables.
--# $mode is unset
- nonopt=
--execute_dlfiles=
- preserve_args=
- lo2o="s/\\.lo\$/.${objext}/"
- o2lo="s/\\.${objext}\$/.lo/"
- extracted_archives=
- extracted_serial=0
-
--opt_dry_run=false
--opt_finish=:
--opt_duplicate_deps=false
--opt_silent=false
--opt_debug=:
--
- # If this variable is set in any of the actions, the command in it
- # will be execed at the end. This prevents here-documents from being
- # left over by shells.
- exec_cmd=
-
-+# func_append var value
-+# Append VALUE to the end of shell variable VAR.
-+func_append ()
-+{
-+ eval "${1}=\$${1}\${2}"
-+} # func_append may be replaced by extended shell implementation
-+
-+# func_append_quoted var value
-+# Quote VALUE and append to the end of shell variable VAR, separated
-+# by a space.
-+func_append_quoted ()
-+{
-+ func_quote_for_eval "${2}"
-+ eval "${1}=\$${1}\\ \$func_quote_for_eval_result"
-+} # func_append_quoted may be replaced by extended shell implementation
-+
-+
-+# func_arith arithmetic-term...
-+func_arith ()
-+{
-+ func_arith_result=`expr "${@}"`
-+} # func_arith may be replaced by extended shell implementation
-+
-+
-+# func_len string
-+# STRING may not start with a hyphen.
-+func_len ()
-+{
-+ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len`
-+} # func_len may be replaced by extended shell implementation
-+
-+
-+# func_lo2o object
-+func_lo2o ()
-+{
-+ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
-+} # func_lo2o may be replaced by extended shell implementation
-+
-+
-+# func_xform libobj-or-source
-+func_xform ()
-+{
-+ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
-+} # func_xform may be replaced by extended shell implementation
-+
-+
- # func_fatal_configuration arg...
- # Echo program name prefixed message to standard error, followed by
- # a configuration failure hint, and exit.
-@@ -850,130 +979,204 @@ func_enable_tag ()
- esac
- }
-
--# Parse options once, thoroughly. This comes as soon as possible in
--# the script to make things like `libtool --version' happen quickly.
-+# func_check_version_match
-+# Ensure that we are using m4 macros, and libtool script from the same
-+# release of libtool.
-+func_check_version_match ()
- {
-+ if test "$package_revision" != "$macro_revision"; then
-+ if test "$VERSION" != "$macro_version"; then
-+ if test -z "$macro_version"; then
-+ cat >&2 <<_LT_EOF
-+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
-+$progname: definition of this LT_INIT comes from an older release.
-+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
-+$progname: and run autoconf again.
-+_LT_EOF
-+ else
-+ cat >&2 <<_LT_EOF
-+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
-+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
-+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
-+$progname: and run autoconf again.
-+_LT_EOF
-+ fi
-+ else
-+ cat >&2 <<_LT_EOF
-+$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
-+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
-+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
-+$progname: of $PACKAGE $VERSION and run autoconf again.
-+_LT_EOF
-+ fi
-
-- # Shorthand for --mode=foo, only valid as the first argument
-- case $1 in
-- clean|clea|cle|cl)
-- shift; set dummy --mode clean ${1+"$@"}; shift
-- ;;
-- compile|compil|compi|comp|com|co|c)
-- shift; set dummy --mode compile ${1+"$@"}; shift
-- ;;
-- execute|execut|execu|exec|exe|ex|e)
-- shift; set dummy --mode execute ${1+"$@"}; shift
-- ;;
-- finish|finis|fini|fin|fi|f)
-- shift; set dummy --mode finish ${1+"$@"}; shift
-- ;;
-- install|instal|insta|inst|ins|in|i)
-- shift; set dummy --mode install ${1+"$@"}; shift
-- ;;
-- link|lin|li|l)
-- shift; set dummy --mode link ${1+"$@"}; shift
-- ;;
-- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
-- shift; set dummy --mode uninstall ${1+"$@"}; shift
-- ;;
-- esac
-+ exit $EXIT_MISMATCH
-+ fi
-+}
-+
-+
-+# Shorthand for --mode=foo, only valid as the first argument
-+case $1 in
-+clean|clea|cle|cl)
-+ shift; set dummy --mode clean ${1+"$@"}; shift
-+ ;;
-+compile|compil|compi|comp|com|co|c)
-+ shift; set dummy --mode compile ${1+"$@"}; shift
-+ ;;
-+execute|execut|execu|exec|exe|ex|e)
-+ shift; set dummy --mode execute ${1+"$@"}; shift
-+ ;;
-+finish|finis|fini|fin|fi|f)
-+ shift; set dummy --mode finish ${1+"$@"}; shift
-+ ;;
-+install|instal|insta|inst|ins|in|i)
-+ shift; set dummy --mode install ${1+"$@"}; shift
-+ ;;
-+link|lin|li|l)
-+ shift; set dummy --mode link ${1+"$@"}; shift
-+ ;;
-+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
-+ shift; set dummy --mode uninstall ${1+"$@"}; shift
-+ ;;
-+esac
-
-- # Parse non-mode specific arguments:
-- while test "$#" -gt 0; do
-+
-+
-+# Option defaults:
-+opt_debug=:
-+opt_dry_run=false
-+opt_config=false
-+opt_preserve_dup_deps=false
-+opt_features=false
-+opt_finish=false
-+opt_help=false
-+opt_help_all=false
-+opt_silent=:
-+opt_verbose=:
-+opt_silent=false
-+opt_verbose=false
-+
-+
-+# Parse options once, thoroughly. This comes as soon as possible in the
-+# script to make things like `--version' happen as quickly as we can.
-+{
-+ # this just eases exit handling
-+ while test $# -gt 0; do
- opt="$1"
- shift
--
- case $opt in
-- --config) func_config ;;
--
-- --debug) preserve_args="$preserve_args $opt"
-+ --debug|-x) opt_debug='set -x'
- func_echo "enabling shell trace mode"
-- opt_debug='set -x'
- $opt_debug
- ;;
--
-- -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break
-- execute_dlfiles="$execute_dlfiles $1"
-- shift
-+ --dry-run|--dryrun|-n)
-+ opt_dry_run=:
- ;;
--
-- --dry-run | -n) opt_dry_run=: ;;
-- --features) func_features ;;
-- --finish) mode="finish" ;;
-- --no-finish) opt_finish=false ;;
--
-- --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break
-- case $1 in
-- # Valid mode arguments:
-- clean) ;;
-- compile) ;;
-- execute) ;;
-- finish) ;;
-- install) ;;
-- link) ;;
-- relink) ;;
-- uninstall) ;;
--
-- # Catch anything else as an error
-- *) func_error "invalid argument for $opt"
-- exit_cmd=exit
-- break
-- ;;
-- esac
--
-- mode="$1"
-+ --config)
-+ opt_config=:
-+func_config
-+ ;;
-+ --dlopen|-dlopen)
-+ optarg="$1"
-+ opt_dlopen="${opt_dlopen+$opt_dlopen
-+}$optarg"
- shift
- ;;
--
- --preserve-dup-deps)
-- opt_duplicate_deps=: ;;
--
-- --quiet|--silent) preserve_args="$preserve_args $opt"
-- opt_silent=:
-- opt_verbose=false
-+ opt_preserve_dup_deps=:
- ;;
--
-- --no-quiet|--no-silent)
-- preserve_args="$preserve_args $opt"
-- opt_silent=false
-+ --features)
-+ opt_features=:
-+func_features
- ;;
--
-- --verbose| -v) preserve_args="$preserve_args $opt"
-+ --finish)
-+ opt_finish=:
-+set dummy --mode finish ${1+"$@"}; shift
-+ ;;
-+ --help)
-+ opt_help=:
-+ ;;
-+ --help-all)
-+ opt_help_all=:
-+opt_help=': help-all'
-+ ;;
-+ --mode)
-+ test $# = 0 && func_missing_arg $opt && break
-+ optarg="$1"
-+ opt_mode="$optarg"
-+case $optarg in
-+ # Valid mode arguments:
-+ clean|compile|execute|finish|install|link|relink|uninstall) ;;
-+
-+ # Catch anything else as an error
-+ *) func_error "invalid argument for $opt"
-+ exit_cmd=exit
-+ break
-+ ;;
-+esac
-+ shift
-+ ;;
-+ --no-silent|--no-quiet)
- opt_silent=false
-- opt_verbose=:
-+func_append preserve_args " $opt"
- ;;
--
-- --no-verbose) preserve_args="$preserve_args $opt"
-+ --no-verbose)
- opt_verbose=false
-+func_append preserve_args " $opt"
- ;;
--
-- --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break
-- preserve_args="$preserve_args $opt $1"
-- func_enable_tag "$1" # tagname is set here
-+ --silent|--quiet)
-+ opt_silent=:
-+func_append preserve_args " $opt"
-+ opt_verbose=false
-+ ;;
-+ --verbose|-v)
-+ opt_verbose=:
-+func_append preserve_args " $opt"
-+opt_silent=false
-+ ;;
-+ --tag)
-+ test $# = 0 && func_missing_arg $opt && break
-+ optarg="$1"
-+ opt_tag="$optarg"
-+func_append preserve_args " $opt $optarg"
-+func_enable_tag "$optarg"
- shift
- ;;
-
-+ -\?|-h) func_usage ;;
-+ --help) func_help ;;
-+ --version) func_version ;;
-+
- # Separate optargs to long options:
-- -dlopen=*|--mode=*|--tag=*)
-- func_opt_split "$opt"
-- set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"}
-+ --*=*)
-+ func_split_long_opt "$opt"
-+ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
- shift
- ;;
-
-- -\?|-h) func_usage ;;
-- --help) opt_help=: ;;
-- --help-all) opt_help=': help-all' ;;
-- --version) func_version ;;
--
-- -*) func_fatal_help "unrecognized option \`$opt'" ;;
--
-- *) nonopt="$opt"
-- break
-+ # Separate non-argument short options:
-+ -\?*|-h*|-n*|-v*)
-+ func_split_short_opt "$opt"
-+ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
-+ shift
- ;;
-+
-+ --) break ;;
-+ -*) func_fatal_help "unrecognized option \`$opt'" ;;
-+ *) set dummy "$opt" ${1+"$@"}; shift; break ;;
- esac
- done
-
-+ # Validate options:
-+
-+ # save first non-option argument
-+ if test "$#" -gt 0; then
-+ nonopt="$opt"
-+ shift
-+ fi
-+
-+ # preserve --debug
-+ test "$opt_debug" = : || func_append preserve_args " --debug"
-
- case $host in
- *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* )
-@@ -981,82 +1184,44 @@ func_enable_tag ()
- opt_duplicate_compiler_generated_deps=:
- ;;
- *)
-- opt_duplicate_compiler_generated_deps=$opt_duplicate_deps
-+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
- ;;
- esac
-
-- # Having warned about all mis-specified options, bail out if
-- # anything was wrong.
-- $exit_cmd $EXIT_FAILURE
--}
-+ $opt_help || {
-+ # Sanity checks first:
-+ func_check_version_match
-
--# func_check_version_match
--# Ensure that we are using m4 macros, and libtool script from the same
--# release of libtool.
--func_check_version_match ()
--{
-- if test "$package_revision" != "$macro_revision"; then
-- if test "$VERSION" != "$macro_version"; then
-- if test -z "$macro_version"; then
-- cat >&2 <<_LT_EOF
--$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
--$progname: definition of this LT_INIT comes from an older release.
--$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
--$progname: and run autoconf again.
--_LT_EOF
-- else
-- cat >&2 <<_LT_EOF
--$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
--$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
--$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
--$progname: and run autoconf again.
--_LT_EOF
-- fi
-- else
-- cat >&2 <<_LT_EOF
--$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
--$progname: but the definition of this LT_INIT comes from revision $macro_revision.
--$progname: You should recreate aclocal.m4 with macros from revision $package_revision
--$progname: of $PACKAGE $VERSION and run autoconf again.
--_LT_EOF
-+ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
-+ func_fatal_configuration "not configured to build any kind of library"
- fi
-
-- exit $EXIT_MISMATCH
-- fi
--}
--
-+ # Darwin sucks
-+ eval std_shrext=\"$shrext_cmds\"
-
--## ----------- ##
--## Main. ##
--## ----------- ##
--
--$opt_help || {
-- # Sanity checks first:
-- func_check_version_match
--
-- if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
-- func_fatal_configuration "not configured to build any kind of library"
-- fi
-+ # Only execute mode is allowed to have -dlopen flags.
-+ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
-+ func_error "unrecognized option \`-dlopen'"
-+ $ECHO "$help" 1>&2
-+ exit $EXIT_FAILURE
-+ fi
-
-- test -z "$mode" && func_fatal_error "error: you must specify a MODE."
-+ # Change the help message to a mode-specific one.
-+ generic_help="$help"
-+ help="Try \`$progname --help --mode=$opt_mode' for more information."
-+ }
-
-
-- # Darwin sucks
-- eval "std_shrext=\"$shrext_cmds\""
-+ # Bail if the options were screwed
-+ $exit_cmd $EXIT_FAILURE
-+}
-
-
-- # Only execute mode is allowed to have -dlopen flags.
-- if test -n "$execute_dlfiles" && test "$mode" != execute; then
-- func_error "unrecognized option \`-dlopen'"
-- $ECHO "$help" 1>&2
-- exit $EXIT_FAILURE
-- fi
-
-- # Change the help message to a mode-specific one.
-- generic_help="$help"
-- help="Try \`$progname --help --mode=$mode' for more information."
--}
-
-+## ----------- ##
-+## Main. ##
-+## ----------- ##
-
- # func_lalib_p file
- # True iff FILE is a libtool `.la' library or `.lo' object file.
-@@ -1121,12 +1286,9 @@ func_ltwrapper_executable_p ()
- # temporary ltwrapper_script.
- func_ltwrapper_scriptname ()
- {
-- func_ltwrapper_scriptname_result=""
-- if func_ltwrapper_executable_p "$1"; then
-- func_dirname_and_basename "$1" "" "."
-- func_stripname '' '.exe' "$func_basename_result"
-- func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
-- fi
-+ func_dirname_and_basename "$1" "" "."
-+ func_stripname '' '.exe' "$func_basename_result"
-+ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
- }
-
- # func_ltwrapper_p file
-@@ -1149,7 +1311,7 @@ func_execute_cmds ()
- save_ifs=$IFS; IFS='~'
- for cmd in $1; do
- IFS=$save_ifs
-- eval "cmd=\"$cmd\""
-+ eval cmd=\"$cmd\"
- func_show_eval "$cmd" "${2-:}"
- done
- IFS=$save_ifs
-@@ -1172,6 +1334,37 @@ func_source ()
- }
-
-
-+# func_resolve_sysroot PATH
-+# Replace a leading = in PATH with a sysroot. Store the result into
-+# func_resolve_sysroot_result
-+func_resolve_sysroot ()
-+{
-+ func_resolve_sysroot_result=$1
-+ case $func_resolve_sysroot_result in
-+ =*)
-+ func_stripname '=' '' "$func_resolve_sysroot_result"
-+ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
-+ ;;
-+ esac
-+}
-+
-+# func_replace_sysroot PATH
-+# If PATH begins with the sysroot, replace it with = and
-+# store the result into func_replace_sysroot_result.
-+func_replace_sysroot ()
-+{
-+ case "$lt_sysroot:$1" in
-+ ?*:"$lt_sysroot"*)
-+ func_stripname "$lt_sysroot" '' "$1"
-+ func_replace_sysroot_result="=$func_stripname_result"
-+ ;;
-+ *)
-+ # Including no sysroot.
-+ func_replace_sysroot_result=$1
-+ ;;
-+ esac
-+}
-+
- # func_infer_tag arg
- # Infer tagged configuration to use if any are available and
- # if one wasn't chosen via the "--tag" command line option.
-@@ -1184,8 +1377,7 @@ func_infer_tag ()
- if test -n "$available_tags" && test -z "$tagname"; then
- CC_quoted=
- for arg in $CC; do
-- func_quote_for_eval "$arg"
-- CC_quoted="$CC_quoted $func_quote_for_eval_result"
-+ func_append_quoted CC_quoted "$arg"
- done
- CC_expanded=`func_echo_all $CC`
- CC_quoted_expanded=`func_echo_all $CC_quoted`
-@@ -1204,8 +1396,7 @@ func_infer_tag ()
- CC_quoted=
- for arg in $CC; do
- # Double-quote args containing other shell metacharacters.
-- func_quote_for_eval "$arg"
-- CC_quoted="$CC_quoted $func_quote_for_eval_result"
-+ func_append_quoted CC_quoted "$arg"
- done
- CC_expanded=`func_echo_all $CC`
- CC_quoted_expanded=`func_echo_all $CC_quoted`
-@@ -1274,6 +1465,486 @@ EOF
- }
- }
-
-+
-+##################################################
-+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
-+##################################################
-+
-+# func_convert_core_file_wine_to_w32 ARG
-+# Helper function used by file name conversion functions when $build is *nix,
-+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
-+# correctly configured wine environment available, with the winepath program
-+# in $build's $PATH.
-+#
-+# ARG is the $build file name to be converted to w32 format.
-+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
-+# be empty on error (or when ARG is empty)
-+func_convert_core_file_wine_to_w32 ()
-+{
-+ $opt_debug
-+ func_convert_core_file_wine_to_w32_result="$1"
-+ if test -n "$1"; then
-+ # Unfortunately, winepath does not exit with a non-zero error code, so we
-+ # are forced to check the contents of stdout. On the other hand, if the
-+ # command is not found, the shell will set an exit code of 127 and print
-+ # *an error message* to stdout. So we must check for both error code of
-+ # zero AND non-empty stdout, which explains the odd construction:
-+ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
-+ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
-+ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
-+ $SED -e "$lt_sed_naive_backslashify"`
-+ else
-+ func_convert_core_file_wine_to_w32_result=
-+ fi
-+ fi
-+}
-+# end: func_convert_core_file_wine_to_w32
-+
-+
-+# func_convert_core_path_wine_to_w32 ARG
-+# Helper function used by path conversion functions when $build is *nix, and
-+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
-+# configured wine environment available, with the winepath program in $build's
-+# $PATH. Assumes ARG has no leading or trailing path separator characters.
-+#
-+# ARG is path to be converted from $build format to win32.
-+# Result is available in $func_convert_core_path_wine_to_w32_result.
-+# Unconvertible file (directory) names in ARG are skipped; if no directory names
-+# are convertible, then the result may be empty.
-+func_convert_core_path_wine_to_w32 ()
-+{
-+ $opt_debug
-+ # unfortunately, winepath doesn't convert paths, only file names
-+ func_convert_core_path_wine_to_w32_result=""
-+ if test -n "$1"; then
-+ oldIFS=$IFS
-+ IFS=:
-+ for func_convert_core_path_wine_to_w32_f in $1; do
-+ IFS=$oldIFS
-+ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
-+ if test -n "$func_convert_core_file_wine_to_w32_result" ; then
-+ if test -z "$func_convert_core_path_wine_to_w32_result"; then
-+ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
-+ else
-+ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
-+ fi
-+ fi
-+ done
-+ IFS=$oldIFS
-+ fi
-+}
-+# end: func_convert_core_path_wine_to_w32
-+
-+
-+# func_cygpath ARGS...
-+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
-+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
-+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
-+# (2), returns the Cygwin file name or path in func_cygpath_result (input
-+# file name or path is assumed to be in w32 format, as previously converted
-+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
-+# or path in func_cygpath_result (input file name or path is assumed to be in
-+# Cygwin format). Returns an empty string on error.
-+#
-+# ARGS are passed to cygpath, with the last one being the file name or path to
-+# be converted.
-+#
-+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
-+# environment variable; do not put it in $PATH.
-+func_cygpath ()
-+{
-+ $opt_debug
-+ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
-+ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
-+ if test "$?" -ne 0; then
-+ # on failure, ensure result is empty
-+ func_cygpath_result=
-+ fi
-+ else
-+ func_cygpath_result=
-+ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
-+ fi
-+}
-+#end: func_cygpath
-+
-+
-+# func_convert_core_msys_to_w32 ARG
-+# Convert file name or path ARG from MSYS format to w32 format. Return
-+# result in func_convert_core_msys_to_w32_result.
-+func_convert_core_msys_to_w32 ()
-+{
-+ $opt_debug
-+ # awkward: cmd appends spaces to result
-+ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
-+ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
-+}
-+#end: func_convert_core_msys_to_w32
-+
-+
-+# func_convert_file_check ARG1 ARG2
-+# Verify that ARG1 (a file name in $build format) was converted to $host
-+# format in ARG2. Otherwise, emit an error message, but continue (resetting
-+# func_to_host_file_result to ARG1).
-+func_convert_file_check ()
-+{
-+ $opt_debug
-+ if test -z "$2" && test -n "$1" ; then
-+ func_error "Could not determine host file name corresponding to"
-+ func_error " \`$1'"
-+ func_error "Continuing, but uninstalled executables may not work."
-+ # Fallback:
-+ func_to_host_file_result="$1"
-+ fi
-+}
-+# end func_convert_file_check
-+
-+
-+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
-+# Verify that FROM_PATH (a path in $build format) was converted to $host
-+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
-+# func_to_host_file_result to a simplistic fallback value (see below).
-+func_convert_path_check ()
-+{
-+ $opt_debug
-+ if test -z "$4" && test -n "$3"; then
-+ func_error "Could not determine the host path corresponding to"
-+ func_error " \`$3'"
-+ func_error "Continuing, but uninstalled executables may not work."
-+ # Fallback. This is a deliberately simplistic "conversion" and
-+ # should not be "improved". See libtool.info.
-+ if test "x$1" != "x$2"; then
-+ lt_replace_pathsep_chars="s|$1|$2|g"
-+ func_to_host_path_result=`echo "$3" |
-+ $SED -e "$lt_replace_pathsep_chars"`
-+ else
-+ func_to_host_path_result="$3"
-+ fi
-+ fi
-+}
-+# end func_convert_path_check
-+
-+
-+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
-+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
-+# and appending REPL if ORIG matches BACKPAT.
-+func_convert_path_front_back_pathsep ()
-+{
-+ $opt_debug
-+ case $4 in
-+ $1 ) func_to_host_path_result="$3$func_to_host_path_result"
-+ ;;
-+ esac
-+ case $4 in
-+ $2 ) func_append func_to_host_path_result "$3"
-+ ;;
-+ esac
-+}
-+# end func_convert_path_front_back_pathsep
-+
-+
-+##################################################
-+# $build to $host FILE NAME CONVERSION FUNCTIONS #
-+##################################################
-+# invoked via `$to_host_file_cmd ARG'
-+#
-+# In each case, ARG is the path to be converted from $build to $host format.
-+# Result will be available in $func_to_host_file_result.
-+
-+
-+# func_to_host_file ARG
-+# Converts the file name ARG from $build format to $host format. Return result
-+# in func_to_host_file_result.
-+func_to_host_file ()
-+{
-+ $opt_debug
-+ $to_host_file_cmd "$1"
-+}
-+# end func_to_host_file
-+
-+
-+# func_to_tool_file ARG LAZY
-+# converts the file name ARG from $build format to toolchain format. Return
-+# result in func_to_tool_file_result. If the conversion in use is listed
-+# in (the comma separated) LAZY, no conversion takes place.
-+func_to_tool_file ()
-+{
-+ $opt_debug
-+ case ,$2, in
-+ *,"$to_tool_file_cmd",*)
-+ func_to_tool_file_result=$1
-+ ;;
-+ *)
-+ $to_tool_file_cmd "$1"
-+ func_to_tool_file_result=$func_to_host_file_result
-+ ;;
-+ esac
-+}
-+# end func_to_tool_file
-+
-+
-+# func_convert_file_noop ARG
-+# Copy ARG to func_to_host_file_result.
-+func_convert_file_noop ()
-+{
-+ func_to_host_file_result="$1"
-+}
-+# end func_convert_file_noop
-+
-+
-+# func_convert_file_msys_to_w32 ARG
-+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
-+# conversion to w32 is not available inside the cwrapper. Returns result in
-+# func_to_host_file_result.
-+func_convert_file_msys_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_file_result="$1"
-+ if test -n "$1"; then
-+ func_convert_core_msys_to_w32 "$1"
-+ func_to_host_file_result="$func_convert_core_msys_to_w32_result"
-+ fi
-+ func_convert_file_check "$1" "$func_to_host_file_result"
-+}
-+# end func_convert_file_msys_to_w32
-+
-+
-+# func_convert_file_cygwin_to_w32 ARG
-+# Convert file name ARG from Cygwin to w32 format. Returns result in
-+# func_to_host_file_result.
-+func_convert_file_cygwin_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_file_result="$1"
-+ if test -n "$1"; then
-+ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
-+ # LT_CYGPATH in this case.
-+ func_to_host_file_result=`cygpath -m "$1"`
-+ fi
-+ func_convert_file_check "$1" "$func_to_host_file_result"
-+}
-+# end func_convert_file_cygwin_to_w32
-+
-+
-+# func_convert_file_nix_to_w32 ARG
-+# Convert file name ARG from *nix to w32 format. Requires a wine environment
-+# and a working winepath. Returns result in func_to_host_file_result.
-+func_convert_file_nix_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_file_result="$1"
-+ if test -n "$1"; then
-+ func_convert_core_file_wine_to_w32 "$1"
-+ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
-+ fi
-+ func_convert_file_check "$1" "$func_to_host_file_result"
-+}
-+# end func_convert_file_nix_to_w32
-+
-+
-+# func_convert_file_msys_to_cygwin ARG
-+# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
-+# Returns result in func_to_host_file_result.
-+func_convert_file_msys_to_cygwin ()
-+{
-+ $opt_debug
-+ func_to_host_file_result="$1"
-+ if test -n "$1"; then
-+ func_convert_core_msys_to_w32 "$1"
-+ func_cygpath -u "$func_convert_core_msys_to_w32_result"
-+ func_to_host_file_result="$func_cygpath_result"
-+ fi
-+ func_convert_file_check "$1" "$func_to_host_file_result"
-+}
-+# end func_convert_file_msys_to_cygwin
-+
-+
-+# func_convert_file_nix_to_cygwin ARG
-+# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
-+# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
-+# in func_to_host_file_result.
-+func_convert_file_nix_to_cygwin ()
-+{
-+ $opt_debug
-+ func_to_host_file_result="$1"
-+ if test -n "$1"; then
-+ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
-+ func_convert_core_file_wine_to_w32 "$1"
-+ func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
-+ func_to_host_file_result="$func_cygpath_result"
-+ fi
-+ func_convert_file_check "$1" "$func_to_host_file_result"
-+}
-+# end func_convert_file_nix_to_cygwin
-+
-+
-+#############################################
-+# $build to $host PATH CONVERSION FUNCTIONS #
-+#############################################
-+# invoked via `$to_host_path_cmd ARG'
-+#
-+# In each case, ARG is the path to be converted from $build to $host format.
-+# The result will be available in $func_to_host_path_result.
-+#
-+# Path separators are also converted from $build format to $host format. If
-+# ARG begins or ends with a path separator character, it is preserved (but
-+# converted to $host format) on output.
-+#
-+# All path conversion functions are named using the following convention:
-+# file name conversion function : func_convert_file_X_to_Y ()
-+# path conversion function : func_convert_path_X_to_Y ()
-+# where, for any given $build/$host combination the 'X_to_Y' value is the
-+# same. If conversion functions are added for new $build/$host combinations,
-+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
-+# will break.
-+
-+
-+# func_init_to_host_path_cmd
-+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
-+# appropriate value, based on the value of $to_host_file_cmd.
-+to_host_path_cmd=
-+func_init_to_host_path_cmd ()
-+{
-+ $opt_debug
-+ if test -z "$to_host_path_cmd"; then
-+ func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
-+ to_host_path_cmd="func_convert_path_${func_stripname_result}"
-+ fi
-+}
-+
-+
-+# func_to_host_path ARG
-+# Converts the path ARG from $build format to $host format. Return result
-+# in func_to_host_path_result.
-+func_to_host_path ()
-+{
-+ $opt_debug
-+ func_init_to_host_path_cmd
-+ $to_host_path_cmd "$1"
-+}
-+# end func_to_host_path
-+
-+
-+# func_convert_path_noop ARG
-+# Copy ARG to func_to_host_path_result.
-+func_convert_path_noop ()
-+{
-+ func_to_host_path_result="$1"
-+}
-+# end func_convert_path_noop
-+
-+
-+# func_convert_path_msys_to_w32 ARG
-+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
-+# conversion to w32 is not available inside the cwrapper. Returns result in
-+# func_to_host_path_result.
-+func_convert_path_msys_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_path_result="$1"
-+ if test -n "$1"; then
-+ # Remove leading and trailing path separator characters from ARG. MSYS
-+ # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
-+ # and winepath ignores them completely.
-+ func_stripname : : "$1"
-+ func_to_host_path_tmp1=$func_stripname_result
-+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
-+ func_to_host_path_result="$func_convert_core_msys_to_w32_result"
-+ func_convert_path_check : ";" \
-+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
-+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
-+ fi
-+}
-+# end func_convert_path_msys_to_w32
-+
-+
-+# func_convert_path_cygwin_to_w32 ARG
-+# Convert path ARG from Cygwin to w32 format. Returns result in
-+# func_to_host_file_result.
-+func_convert_path_cygwin_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_path_result="$1"
-+ if test -n "$1"; then
-+ # See func_convert_path_msys_to_w32:
-+ func_stripname : : "$1"
-+ func_to_host_path_tmp1=$func_stripname_result
-+ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
-+ func_convert_path_check : ";" \
-+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
-+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
-+ fi
-+}
-+# end func_convert_path_cygwin_to_w32
-+
-+
-+# func_convert_path_nix_to_w32 ARG
-+# Convert path ARG from *nix to w32 format. Requires a wine environment and
-+# a working winepath. Returns result in func_to_host_file_result.
-+func_convert_path_nix_to_w32 ()
-+{
-+ $opt_debug
-+ func_to_host_path_result="$1"
-+ if test -n "$1"; then
-+ # See func_convert_path_msys_to_w32:
-+ func_stripname : : "$1"
-+ func_to_host_path_tmp1=$func_stripname_result
-+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
-+ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
-+ func_convert_path_check : ";" \
-+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
-+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
-+ fi
-+}
-+# end func_convert_path_nix_to_w32
-+
-+
-+# func_convert_path_msys_to_cygwin ARG
-+# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
-+# Returns result in func_to_host_file_result.
-+func_convert_path_msys_to_cygwin ()
-+{
-+ $opt_debug
-+ func_to_host_path_result="$1"
-+ if test -n "$1"; then
-+ # See func_convert_path_msys_to_w32:
-+ func_stripname : : "$1"
-+ func_to_host_path_tmp1=$func_stripname_result
-+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
-+ func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
-+ func_to_host_path_result="$func_cygpath_result"
-+ func_convert_path_check : : \
-+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
-+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
-+ fi
-+}
-+# end func_convert_path_msys_to_cygwin
-+
-+
-+# func_convert_path_nix_to_cygwin ARG
-+# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
-+# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
-+# func_to_host_file_result.
-+func_convert_path_nix_to_cygwin ()
-+{
-+ $opt_debug
-+ func_to_host_path_result="$1"
-+ if test -n "$1"; then
-+ # Remove leading and trailing path separator characters from
-+ # ARG. msys behavior is inconsistent here, cygpath turns them
-+ # into '.;' and ';.', and winepath ignores them completely.
-+ func_stripname : : "$1"
-+ func_to_host_path_tmp1=$func_stripname_result
-+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
-+ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
-+ func_to_host_path_result="$func_cygpath_result"
-+ func_convert_path_check : : \
-+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
-+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
-+ fi
-+}
-+# end func_convert_path_nix_to_cygwin
-+
-+
- # func_mode_compile arg...
- func_mode_compile ()
- {
-@@ -1314,12 +1985,12 @@ func_mode_compile ()
- ;;
-
- -pie | -fpie | -fPIE)
-- pie_flag="$pie_flag $arg"
-+ func_append pie_flag " $arg"
- continue
- ;;
-
- -shared | -static | -prefer-pic | -prefer-non-pic)
-- later="$later $arg"
-+ func_append later " $arg"
- continue
- ;;
-
-@@ -1340,15 +2011,14 @@ func_mode_compile ()
- save_ifs="$IFS"; IFS=','
- for arg in $args; do
- IFS="$save_ifs"
-- func_quote_for_eval "$arg"
-- lastarg="$lastarg $func_quote_for_eval_result"
-+ func_append_quoted lastarg "$arg"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$lastarg"
- lastarg=$func_stripname_result
-
- # Add the arguments to base_compile.
-- base_compile="$base_compile $lastarg"
-+ func_append base_compile " $lastarg"
- continue
- ;;
-
-@@ -1364,8 +2034,7 @@ func_mode_compile ()
- esac # case $arg_mode
-
- # Aesthetically quote the previous argument.
-- func_quote_for_eval "$lastarg"
-- base_compile="$base_compile $func_quote_for_eval_result"
-+ func_append_quoted base_compile "$lastarg"
- done # for arg
-
- case $arg_mode in
-@@ -1496,17 +2165,16 @@ compiler."
- $opt_dry_run || $RM $removelist
- exit $EXIT_FAILURE
- fi
-- removelist="$removelist $output_obj"
-+ func_append removelist " $output_obj"
- $ECHO "$srcfile" > "$lockfile"
- fi
-
- $opt_dry_run || $RM $removelist
-- removelist="$removelist $lockfile"
-+ func_append removelist " $lockfile"
- trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
-
-- if test -n "$fix_srcfile_path"; then
-- eval "srcfile=\"$fix_srcfile_path\""
-- fi
-+ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
-+ srcfile=$func_to_tool_file_result
- func_quote_for_eval "$srcfile"
- qsrcfile=$func_quote_for_eval_result
-
-@@ -1526,7 +2194,7 @@ compiler."
-
- if test -z "$output_obj"; then
- # Place PIC objects in $objdir
-- command="$command -o $lobj"
-+ func_append command " -o $lobj"
- fi
-
- func_show_eval_locale "$command" \
-@@ -1573,11 +2241,11 @@ compiler."
- command="$base_compile $qsrcfile $pic_flag"
- fi
- if test "$compiler_c_o" = yes; then
-- command="$command -o $obj"
-+ func_append command " -o $obj"
- fi
-
- # Suppress compiler output if we already did a PIC compilation.
-- command="$command$suppress_output"
-+ func_append command "$suppress_output"
- func_show_eval_locale "$command" \
- '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
-
-@@ -1622,13 +2290,13 @@ compiler."
- }
-
- $opt_help || {
-- test "$mode" = compile && func_mode_compile ${1+"$@"}
-+ test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
- }
-
- func_mode_help ()
- {
- # We need to display help for each of the modes.
-- case $mode in
-+ case $opt_mode in
- "")
- # Generic help is extracted from the usage comments
- # at the start of this file.
-@@ -1659,8 +2327,8 @@ This mode accepts the following additional options:
-
- -o OUTPUT-FILE set the output file name to OUTPUT-FILE
- -no-suppress do not suppress compiler output for multiple passes
-- -prefer-pic try to building PIC objects only
-- -prefer-non-pic try to building non-PIC objects only
-+ -prefer-pic try to build PIC objects only
-+ -prefer-non-pic try to build non-PIC objects only
- -shared do not build a \`.o' file suitable for static linking
- -static only build a \`.o' file suitable for static linking
- -Wc,FLAG pass FLAG directly to the compiler
-@@ -1804,7 +2472,7 @@ Otherwise, only FILE itself is deleted using RM."
- ;;
-
- *)
-- func_fatal_help "invalid operation mode \`$mode'"
-+ func_fatal_help "invalid operation mode \`$opt_mode'"
- ;;
- esac
-
-@@ -1819,13 +2487,13 @@ if $opt_help; then
- else
- {
- func_help noexit
-- for mode in compile link execute install finish uninstall clean; do
-+ for opt_mode in compile link execute install finish uninstall clean; do
- func_mode_help
- done
- } | sed -n '1p; 2,$s/^Usage:/ or: /p'
- {
- func_help noexit
-- for mode in compile link execute install finish uninstall clean; do
-+ for opt_mode in compile link execute install finish uninstall clean; do
- echo
- func_mode_help
- done
-@@ -1854,13 +2522,16 @@ func_mode_execute ()
- func_fatal_help "you must specify a COMMAND"
-
- # Handle -dlopen flags immediately.
-- for file in $execute_dlfiles; do
-+ for file in $opt_dlopen; do
- test -f "$file" \
- || func_fatal_help "\`$file' is not a file"
-
- dir=
- case $file in
- *.la)
-+ func_resolve_sysroot "$file"
-+ file=$func_resolve_sysroot_result
-+
- # Check to see that this really is a libtool archive.
- func_lalib_unsafe_p "$file" \
- || func_fatal_help "\`$lib' is not a valid libtool archive"
-@@ -1882,7 +2553,7 @@ func_mode_execute ()
- dir="$func_dirname_result"
-
- if test -f "$dir/$objdir/$dlname"; then
-- dir="$dir/$objdir"
-+ func_append dir "/$objdir"
- else
- if test ! -f "$dir/$dlname"; then
- func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
-@@ -1907,10 +2578,10 @@ func_mode_execute ()
- test -n "$absdir" && dir="$absdir"
-
- # Now add the directory to shlibpath_var.
-- if eval test -z \"\$$shlibpath_var\"; then
-- eval $shlibpath_var=\$dir
-+ if eval "test -z \"\$$shlibpath_var\""; then
-+ eval "$shlibpath_var=\"\$dir\""
- else
-- eval $shlibpath_var=\$dir:\$$shlibpath_var
-+ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
- fi
- done
-
-@@ -1939,8 +2610,7 @@ func_mode_execute ()
- ;;
- esac
- # Quote arguments (to preserve shell metacharacters).
-- func_quote_for_eval "$file"
-- args="$args $func_quote_for_eval_result"
-+ func_append_quoted args "$file"
- done
-
- if test "X$opt_dry_run" = Xfalse; then
-@@ -1972,22 +2642,59 @@ func_mode_execute ()
- fi
- }
-
--test "$mode" = execute && func_mode_execute ${1+"$@"}
-+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
-
-
- # func_mode_finish arg...
- func_mode_finish ()
- {
- $opt_debug
-- libdirs="$nonopt"
-+ libs=
-+ libdirs=
- admincmds=
-
-- if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
-- for dir
-- do
-- libdirs="$libdirs $dir"
-- done
-+ for opt in "$nonopt" ${1+"$@"}
-+ do
-+ if test -d "$opt"; then
-+ func_append libdirs " $opt"
-
-+ elif test -f "$opt"; then
-+ if func_lalib_unsafe_p "$opt"; then
-+ func_append libs " $opt"
-+ else
-+ func_warning "\`$opt' is not a valid libtool archive"
-+ fi
-+
-+ else
-+ func_fatal_error "invalid argument \`$opt'"
-+ fi
-+ done
-+
-+ if test -n "$libs"; then
-+ if test -n "$lt_sysroot"; then
-+ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
-+ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
-+ else
-+ sysroot_cmd=
-+ fi
-+
-+ # Remove sysroot references
-+ if $opt_dry_run; then
-+ for lib in $libs; do
-+ echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
-+ done
-+ else
-+ tmpdir=`func_mktempdir`
-+ for lib in $libs; do
-+ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
-+ > $tmpdir/tmp-la
-+ mv -f $tmpdir/tmp-la $lib
-+ done
-+ ${RM}r "$tmpdir"
-+ fi
-+ fi
-+
-+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
- for libdir in $libdirs; do
- if test -n "$finish_cmds"; then
- # Do each command in the finish commands.
-@@ -1997,7 +2704,7 @@ func_mode_finish ()
- if test -n "$finish_eval"; then
- # Do the single finish_eval.
- eval cmds=\"$finish_eval\"
-- $opt_dry_run || eval "$cmds" || admincmds="$admincmds
-+ $opt_dry_run || eval "$cmds" || func_append admincmds "
- $cmds"
- fi
- done
-@@ -2006,53 +2713,55 @@ func_mode_finish ()
- # Exit here if they wanted silent mode.
- $opt_silent && exit $EXIT_SUCCESS
-
-- echo "----------------------------------------------------------------------"
-- echo "Libraries have been installed in:"
-- for libdir in $libdirs; do
-- $ECHO " $libdir"
-- done
-- echo
-- echo "If you ever happen to want to link against installed libraries"
-- echo "in a given directory, LIBDIR, you must either use libtool, and"
-- echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
-- echo "flag during linking and do at least one of the following:"
-- if test -n "$shlibpath_var"; then
-- echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
-- echo " during execution"
-- fi
-- if test -n "$runpath_var"; then
-- echo " - add LIBDIR to the \`$runpath_var' environment variable"
-- echo " during linking"
-- fi
-- if test -n "$hardcode_libdir_flag_spec"; then
-- libdir=LIBDIR
-- eval "flag=\"$hardcode_libdir_flag_spec\""
-+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
-+ echo "----------------------------------------------------------------------"
-+ echo "Libraries have been installed in:"
-+ for libdir in $libdirs; do
-+ $ECHO " $libdir"
-+ done
-+ echo
-+ echo "If you ever happen to want to link against installed libraries"
-+ echo "in a given directory, LIBDIR, you must either use libtool, and"
-+ echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
-+ echo "flag during linking and do at least one of the following:"
-+ if test -n "$shlibpath_var"; then
-+ echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
-+ echo " during execution"
-+ fi
-+ if test -n "$runpath_var"; then
-+ echo " - add LIBDIR to the \`$runpath_var' environment variable"
-+ echo " during linking"
-+ fi
-+ if test -n "$hardcode_libdir_flag_spec"; then
-+ libdir=LIBDIR
-+ eval flag=\"$hardcode_libdir_flag_spec\"
-
-- $ECHO " - use the \`$flag' linker flag"
-- fi
-- if test -n "$admincmds"; then
-- $ECHO " - have your system administrator run these commands:$admincmds"
-- fi
-- if test -f /etc/ld.so.conf; then
-- echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
-- fi
-- echo
-+ $ECHO " - use the \`$flag' linker flag"
-+ fi
-+ if test -n "$admincmds"; then
-+ $ECHO " - have your system administrator run these commands:$admincmds"
-+ fi
-+ if test -f /etc/ld.so.conf; then
-+ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
-+ fi
-+ echo
-
-- echo "See any operating system documentation about shared libraries for"
-- case $host in
-- solaris2.[6789]|solaris2.1[0-9])
-- echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
-- echo "pages."
-- ;;
-- *)
-- echo "more information, such as the ld(1) and ld.so(8) manual pages."
-- ;;
-- esac
-- echo "----------------------------------------------------------------------"
-+ echo "See any operating system documentation about shared libraries for"
-+ case $host in
-+ solaris2.[6789]|solaris2.1[0-9])
-+ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
-+ echo "pages."
-+ ;;
-+ *)
-+ echo "more information, such as the ld(1) and ld.so(8) manual pages."
-+ ;;
-+ esac
-+ echo "----------------------------------------------------------------------"
-+ fi
- exit $EXIT_SUCCESS
- }
-
--test "$mode" = finish && func_mode_finish ${1+"$@"}
-+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
-
-
- # func_mode_install arg...
-@@ -2077,7 +2786,7 @@ func_mode_install ()
- # The real first argument should be the name of the installation program.
- # Aesthetically quote it.
- func_quote_for_eval "$arg"
-- install_prog="$install_prog$func_quote_for_eval_result"
-+ func_append install_prog "$func_quote_for_eval_result"
- install_shared_prog=$install_prog
- case " $install_prog " in
- *[\\\ /]cp\ *) install_cp=: ;;
-@@ -2097,7 +2806,7 @@ func_mode_install ()
- do
- arg2=
- if test -n "$dest"; then
-- files="$files $dest"
-+ func_append files " $dest"
- dest=$arg
- continue
- fi
-@@ -2135,11 +2844,11 @@ func_mode_install ()
-
- # Aesthetically quote the argument.
- func_quote_for_eval "$arg"
-- install_prog="$install_prog $func_quote_for_eval_result"
-+ func_append install_prog " $func_quote_for_eval_result"
- if test -n "$arg2"; then
- func_quote_for_eval "$arg2"
- fi
-- install_shared_prog="$install_shared_prog $func_quote_for_eval_result"
-+ func_append install_shared_prog " $func_quote_for_eval_result"
- done
-
- test -z "$install_prog" && \
-@@ -2151,7 +2860,7 @@ func_mode_install ()
- if test -n "$install_override_mode" && $no_mode; then
- if $install_cp; then :; else
- func_quote_for_eval "$install_override_mode"
-- install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result"
-+ func_append install_shared_prog " -m $func_quote_for_eval_result"
- fi
- fi
-
-@@ -2209,10 +2918,13 @@ func_mode_install ()
- case $file in
- *.$libext)
- # Do the static libraries later.
-- staticlibs="$staticlibs $file"
-+ func_append staticlibs " $file"
- ;;
-
- *.la)
-+ func_resolve_sysroot "$file"
-+ file=$func_resolve_sysroot_result
-+
- # Check to see that this really is a libtool archive.
- func_lalib_unsafe_p "$file" \
- || func_fatal_help "\`$file' is not a valid libtool archive"
-@@ -2226,23 +2938,30 @@ func_mode_install ()
- if test "X$destdir" = "X$libdir"; then
- case "$current_libdirs " in
- *" $libdir "*) ;;
-- *) current_libdirs="$current_libdirs $libdir" ;;
-+ *) func_append current_libdirs " $libdir" ;;
- esac
- else
- # Note the libdir as a future libdir.
- case "$future_libdirs " in
- *" $libdir "*) ;;
-- *) future_libdirs="$future_libdirs $libdir" ;;
-+ *) func_append future_libdirs " $libdir" ;;
- esac
- fi
-
- func_dirname "$file" "/" ""
- dir="$func_dirname_result"
-- dir="$dir$objdir"
-+ func_append dir "$objdir"
-
- if test -n "$relink_command"; then
-+ # Strip any trailing slash from the destination.
-+ func_stripname '' '/' "$libdir"
-+ destlibdir=$func_stripname_result
-+
-+ func_stripname '' '/' "$destdir"
-+ s_destdir=$func_stripname_result
-+
- # Determine the prefix the user has applied to our future dir.
-- inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
-+ inst_prefix_dir=`$ECHO "X$s_destdir" | $Xsed -e "s%$destlibdir\$%%"`
-
- # Don't allow the user to place us outside of our expected
- # location b/c this prevents finding dependent libraries that
-@@ -2315,7 +3034,7 @@ func_mode_install ()
- func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
-
- # Maybe install the static library, too.
-- test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
-+ test -n "$old_library" && func_append staticlibs " $dir/$old_library"
- ;;
-
- *.lo)
-@@ -2503,7 +3222,7 @@ func_mode_install ()
- test -n "$future_libdirs" && \
- func_warning "remember to run \`$progname --finish$future_libdirs'"
-
-- if test -n "$current_libdirs" && $opt_finish; then
-+ if test -n "$current_libdirs"; then
- # Maybe just do a dry run.
- $opt_dry_run && current_libdirs=" -n$current_libdirs"
- exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
-@@ -2512,7 +3231,7 @@ func_mode_install ()
- fi
- }
-
--test "$mode" = install && func_mode_install ${1+"$@"}
-+test "$opt_mode" = install && func_mode_install ${1+"$@"}
-
-
- # func_generate_dlsyms outputname originator pic_p
-@@ -2559,6 +3278,18 @@ extern \"C\" {
- #pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
- #endif
-
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- /* External symbol declarations for the compiler. */\
- "
-
-@@ -2570,21 +3301,22 @@ extern \"C\" {
- # Add our own program objects to the symbol list.
- progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
- for progfile in $progfiles; do
-- func_verbose "extracting global C symbols from \`$progfile'"
-- $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'"
-+ func_to_tool_file "$progfile" func_convert_file_msys_to_w32
-+ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
-+ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
- done
-
- if test -n "$exclude_expsyms"; then
- $opt_dry_run || {
-- $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
-- $MV "$nlist"T "$nlist"
-+ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
-+ eval '$MV "$nlist"T "$nlist"'
- }
- fi
-
- if test -n "$export_symbols_regex"; then
- $opt_dry_run || {
-- $EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T
-- $MV "$nlist"T "$nlist"
-+ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
-+ eval '$MV "$nlist"T "$nlist"'
- }
- fi
-
-@@ -2593,23 +3325,23 @@ extern \"C\" {
- export_symbols="$output_objdir/$outputname.exp"
- $opt_dry_run || {
- $RM $export_symbols
-- ${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' < "$nlist" > "$export_symbols"
-+ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
- case $host in
- *cygwin* | *mingw* | *cegcc* )
-- echo EXPORTS > "$output_objdir/$outputname.def"
-- cat "$export_symbols" >> "$output_objdir/$outputname.def"
-+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
-+ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
- ;;
- esac
- }
- else
- $opt_dry_run || {
-- ${SED} -e 's/\([].[*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/' < "$export_symbols" > "$output_objdir/$outputname.exp"
-- $GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T
-- $MV "$nlist"T "$nlist"
-+ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
-+ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
-+ eval '$MV "$nlist"T "$nlist"'
- case $host in
- *cygwin* | *mingw* | *cegcc* )
-- echo EXPORTS > "$output_objdir/$outputname.def"
-- cat "$nlist" >> "$output_objdir/$outputname.def"
-+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
-+ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
- ;;
- esac
- }
-@@ -2620,10 +3352,52 @@ extern \"C\" {
- func_verbose "extracting global C symbols from \`$dlprefile'"
- func_basename "$dlprefile"
- name="$func_basename_result"
-- $opt_dry_run || {
-- $ECHO ": $name " >> "$nlist"
-- eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'"
-- }
-+ case $host in
-+ *cygwin* | *mingw* | *cegcc* )
-+ # if an import library, we need to obtain dlname
-+ if func_win32_import_lib_p "$dlprefile"; then
-+ func_tr_sh "$dlprefile"
-+ eval "curr_lafile=\$libfile_$func_tr_sh_result"
-+ dlprefile_dlbasename=""
-+ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
-+ # Use subshell, to avoid clobbering current variable values
-+ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
-+ if test -n "$dlprefile_dlname" ; then
-+ func_basename "$dlprefile_dlname"
-+ dlprefile_dlbasename="$func_basename_result"
-+ else
-+ # no lafile. user explicitly requested -dlpreopen <import library>.
-+ $sharedlib_from_linklib_cmd "$dlprefile"
-+ dlprefile_dlbasename=$sharedlib_from_linklib_result
-+ fi
-+ fi
-+ $opt_dry_run || {
-+ if test -n "$dlprefile_dlbasename" ; then
-+ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
-+ else
-+ func_warning "Could not compute DLL name from $name"
-+ eval '$ECHO ": $name " >> "$nlist"'
-+ fi
-+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
-+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
-+ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
-+ }
-+ else # not an import lib
-+ $opt_dry_run || {
-+ eval '$ECHO ": $name " >> "$nlist"'
-+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
-+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
-+ }
-+ fi
-+ ;;
-+ *)
-+ $opt_dry_run || {
-+ eval '$ECHO ": $name " >> "$nlist"'
-+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
-+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
-+ }
-+ ;;
-+ esac
- done
-
- $opt_dry_run || {
-@@ -2661,26 +3435,9 @@ typedef struct {
- const char *name;
- void *address;
- } lt_dlsymlist;
--"
-- case $host in
-- *cygwin* | *mingw* | *cegcc* )
-- echo >> "$output_objdir/$my_dlsyms" "\
--/* DATA imports from DLLs on WIN32 con't be const, because
-- runtime relocations are performed -- see ld's documentation
-- on pseudo-relocs. */"
-- lt_dlsym_const= ;;
-- *osf5*)
-- echo >> "$output_objdir/$my_dlsyms" "\
--/* This system does not cope well with relocations in const data */"
-- lt_dlsym_const= ;;
-- *)
-- lt_dlsym_const=const ;;
-- esac
--
-- echo >> "$output_objdir/$my_dlsyms" "\
--extern $lt_dlsym_const lt_dlsymlist
-+extern LT_DLSYM_CONST lt_dlsymlist
- lt_${my_prefix}_LTX_preloaded_symbols[];
--$lt_dlsym_const lt_dlsymlist
-+LT_DLSYM_CONST lt_dlsymlist
- lt_${my_prefix}_LTX_preloaded_symbols[] =
- {\
- { \"$my_originator\", (void *) 0 },"
-@@ -2736,7 +3493,7 @@ static const void *lt_preloaded_setup() {
- for arg in $LTCFLAGS; do
- case $arg in
- -pie | -fpie | -fPIE) ;;
-- *) symtab_cflags="$symtab_cflags $arg" ;;
-+ *) func_append symtab_cflags " $arg" ;;
- esac
- done
-
-@@ -2796,9 +3553,11 @@ func_win32_libid ()
- win32_libid_type="x86 archive import"
- ;;
- *ar\ archive*) # could be an import, or static
-- if $OBJDUMP -f "$1" | $SED -e '10q' 2>/dev/null |
-- $EGREP 'file format (pe-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
-- win32_nmres=`$NM -f posix -A "$1" |
-+ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
-+ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
-+ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
-+ func_to_tool_file "$1" func_convert_file_msys_to_w32
-+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
- $SED -n -e '
- 1,100{
- / I /{
-@@ -2827,6 +3586,131 @@ func_win32_libid ()
- $ECHO "$win32_libid_type"
- }
-
-+# func_cygming_dll_for_implib ARG
-+#
-+# Platform-specific function to extract the
-+# name of the DLL associated with the specified
-+# import library ARG.
-+# Invoked by eval'ing the libtool variable
-+# $sharedlib_from_linklib_cmd
-+# Result is available in the variable
-+# $sharedlib_from_linklib_result
-+func_cygming_dll_for_implib ()
-+{
-+ $opt_debug
-+ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
-+}
-+
-+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
-+#
-+# The is the core of a fallback implementation of a
-+# platform-specific function to extract the name of the
-+# DLL associated with the specified import library LIBNAME.
-+#
-+# SECTION_NAME is either .idata$6 or .idata$7, depending
-+# on the platform and compiler that created the implib.
-+#
-+# Echos the name of the DLL associated with the
-+# specified import library.
-+func_cygming_dll_for_implib_fallback_core ()
-+{
-+ $opt_debug
-+ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
-+ $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
-+ $SED '/^Contents of section '"$match_literal"':/{
-+ # Place marker at beginning of archive member dllname section
-+ s/.*/====MARK====/
-+ p
-+ d
-+ }
-+ # These lines can sometimes be longer than 43 characters, but
-+ # are always uninteresting
-+ /:[ ]*file format pe[i]\{,1\}-/d
-+ /^In archive [^:]*:/d
-+ # Ensure marker is printed
-+ /^====MARK====/p
-+ # Remove all lines with less than 43 characters
-+ /^.\{43\}/!d
-+ # From remaining lines, remove first 43 characters
-+ s/^.\{43\}//' |
-+ $SED -n '
-+ # Join marker and all lines until next marker into a single line
-+ /^====MARK====/ b para
-+ H
-+ $ b para
-+ b
-+ :para
-+ x
-+ s/\n//g
-+ # Remove the marker
-+ s/^====MARK====//
-+ # Remove trailing dots and whitespace
-+ s/[\. \t]*$//
-+ # Print
-+ /./p' |
-+ # we now have a list, one entry per line, of the stringified
-+ # contents of the appropriate section of all members of the
-+ # archive which possess that section. Heuristic: eliminate
-+ # all those which have a first or second character that is
-+ # a '.' (that is, objdump's representation of an unprintable
-+ # character.) This should work for all archives with less than
-+ # 0x302f exports -- but will fail for DLLs whose name actually
-+ # begins with a literal '.' or a single character followed by
-+ # a '.'.
-+ #
-+ # Of those that remain, print the first one.
-+ $SED -e '/^\./d;/^.\./d;q'
-+}
-+
-+# func_cygming_gnu_implib_p ARG
-+# This predicate returns with zero status (TRUE) if
-+# ARG is a GNU/binutils-style import library. Returns
-+# with nonzero status (FALSE) otherwise.
-+func_cygming_gnu_implib_p ()
-+{
-+ $opt_debug
-+ func_to_tool_file "$1" func_convert_file_msys_to_w32
-+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
-+ test -n "$func_cygming_gnu_implib_tmp"
-+}
-+
-+# func_cygming_ms_implib_p ARG
-+# This predicate returns with zero status (TRUE) if
-+# ARG is an MS-style import library. Returns
-+# with nonzero status (FALSE) otherwise.
-+func_cygming_ms_implib_p ()
-+{
-+ $opt_debug
-+ func_to_tool_file "$1" func_convert_file_msys_to_w32
-+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
-+ test -n "$func_cygming_ms_implib_tmp"
-+}
-+
-+# func_cygming_dll_for_implib_fallback ARG
-+# Platform-specific function to extract the
-+# name of the DLL associated with the specified
-+# import library ARG.
-+#
-+# This fallback implementation is for use when $DLLTOOL
-+# does not support the --identify-strict option.
-+# Invoked by eval'ing the libtool variable
-+# $sharedlib_from_linklib_cmd
-+# Result is available in the variable
-+# $sharedlib_from_linklib_result
-+func_cygming_dll_for_implib_fallback ()
-+{
-+ $opt_debug
-+ if func_cygming_gnu_implib_p "$1" ; then
-+ # binutils import library
-+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
-+ elif func_cygming_ms_implib_p "$1" ; then
-+ # ms-generated import library
-+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
-+ else
-+ # unknown
-+ sharedlib_from_linklib_result=""
-+ fi
-+}
-
-
- # func_extract_an_archive dir oldlib
-@@ -2917,7 +3801,7 @@ func_extract_archives ()
- darwin_file=
- darwin_files=
- for darwin_file in $darwin_filelist; do
-- darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP`
-+ darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
- $LIPO -create -output "$darwin_file" $darwin_files
- done # $darwin_filelist
- $RM -rf unfat-$$
-@@ -2932,7 +3816,7 @@ func_extract_archives ()
- func_extract_an_archive "$my_xdir" "$my_xabs"
- ;;
- esac
-- my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP`
-+ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
- done
-
- func_extract_archives_result="$my_oldobjs"
-@@ -3014,7 +3898,110 @@ func_fallback_echo ()
- _LTECHO_EOF'
- }
- ECHO=\"$qECHO\"
-- fi\
-+ fi
-+
-+# Very basic option parsing. These options are (a) specific to
-+# the libtool wrapper, (b) are identical between the wrapper
-+# /script/ and the wrapper /executable/ which is used only on
-+# windows platforms, and (c) all begin with the string "--lt-"
-+# (application programs are unlikely to have options which match
-+# this pattern).
-+#
-+# There are only two supported options: --lt-debug and
-+# --lt-dump-script. There is, deliberately, no --lt-help.
-+#
-+# The first argument to this parsing function should be the
-+# script's $0 value, followed by "$@".
-+lt_option_debug=
-+func_parse_lt_options ()
-+{
-+ lt_script_arg0=\$0
-+ shift
-+ for lt_opt
-+ do
-+ case \"\$lt_opt\" in
-+ --lt-debug) lt_option_debug=1 ;;
-+ --lt-dump-script)
-+ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
-+ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
-+ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
-+ cat \"\$lt_dump_D/\$lt_dump_F\"
-+ exit 0
-+ ;;
-+ --lt-*)
-+ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
-+ exit 1
-+ ;;
-+ esac
-+ done
-+
-+ # Print the debug banner immediately:
-+ if test -n \"\$lt_option_debug\"; then
-+ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
-+ fi
-+}
-+
-+# Used when --lt-debug. Prints its arguments to stdout
-+# (redirection is the responsibility of the caller)
-+func_lt_dump_args ()
-+{
-+ lt_dump_args_N=1;
-+ for lt_arg
-+ do
-+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
-+ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
-+ done
-+}
-+
-+# Core function for launching the target application
-+func_exec_program_core ()
-+{
-+"
-+ case $host in
-+ # Backslashes separate directories on plain windows
-+ *-*-mingw | *-*-os2* | *-cegcc*)
-+ $ECHO "\
-+ if test -n \"\$lt_option_debug\"; then
-+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
-+ func_lt_dump_args \${1+\"\$@\"} 1>&2
-+ fi
-+ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
-+"
-+ ;;
-+
-+ *)
-+ $ECHO "\
-+ if test -n \"\$lt_option_debug\"; then
-+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
-+ func_lt_dump_args \${1+\"\$@\"} 1>&2
-+ fi
-+ exec \"\$progdir/\$program\" \${1+\"\$@\"}
-+"
-+ ;;
-+ esac
-+ $ECHO "\
-+ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
-+ exit 1
-+}
-+
-+# A function to encapsulate launching the target application
-+# Strips options in the --lt-* namespace from \$@ and
-+# launches target application with the remaining arguments.
-+func_exec_program ()
-+{
-+ for lt_wr_arg
-+ do
-+ case \$lt_wr_arg in
-+ --lt-*) ;;
-+ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
-+ esac
-+ shift
-+ done
-+ func_exec_program_core \${1+\"\$@\"}
-+}
-+
-+ # Parse options
-+ func_parse_lt_options \"\$0\" \${1+\"\$@\"}
-
- # Find the directory that this script lives in.
- thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
-@@ -3078,7 +4065,7 @@ _LTECHO_EOF'
-
- # relink executable if necessary
- if test -n \"\$relink_command\"; then
-- if relink_command_output=\`eval \"\$relink_command\" 2>&1\`; then :
-+ if relink_command_output=\`eval \$relink_command 2>&1\`; then :
- else
- $ECHO \"\$relink_command_output\" >&2
- $RM \"\$progdir/\$file\"
-@@ -3102,6 +4089,18 @@ _LTECHO_EOF'
-
- if test -f \"\$progdir/\$program\"; then"
-
-+ # fixup the dll searchpath if we need to.
-+ #
-+ # Fix the DLL searchpath if we need to. Do this before prepending
-+ # to shlibpath, because on Windows, both are PATH and uninstalled
-+ # libraries must come first.
-+ if test -n "$dllsearchpath"; then
-+ $ECHO "\
-+ # Add the dll search path components to the executable PATH
-+ PATH=$dllsearchpath:\$PATH
-+"
-+ fi
-+
- # Export our shlibpath_var if we have one.
- if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
- $ECHO "\
-@@ -3116,35 +4115,10 @@ _LTECHO_EOF'
- "
- fi
-
-- # fixup the dll searchpath if we need to.
-- if test -n "$dllsearchpath"; then
-- $ECHO "\
-- # Add the dll search path components to the executable PATH
-- PATH=$dllsearchpath:\$PATH
--"
-- fi
--
- $ECHO "\
- if test \"\$libtool_execute_magic\" != \"$magic\"; then
- # Run the actual program with our arguments.
--"
-- case $host in
-- # Backslashes separate directories on plain windows
-- *-*-mingw | *-*-os2* | *-cegcc*)
-- $ECHO "\
-- exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
--"
-- ;;
--
-- *)
-- $ECHO "\
-- exec \"\$progdir/\$program\" \${1+\"\$@\"}
--"
-- ;;
-- esac
-- $ECHO "\
-- \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
-- exit 1
-+ func_exec_program \${1+\"\$@\"}
- fi
- else
- # The program doesn't exist.
-@@ -3158,166 +4132,6 @@ fi\
- }
-
-
--# func_to_host_path arg
--#
--# Convert paths to host format when used with build tools.
--# Intended for use with "native" mingw (where libtool itself
--# is running under the msys shell), or in the following cross-
--# build environments:
--# $build $host
--# mingw (msys) mingw [e.g. native]
--# cygwin mingw
--# *nix + wine mingw
--# where wine is equipped with the `winepath' executable.
--# In the native mingw case, the (msys) shell automatically
--# converts paths for any non-msys applications it launches,
--# but that facility isn't available from inside the cwrapper.
--# Similar accommodations are necessary for $host mingw and
--# $build cygwin. Calling this function does no harm for other
--# $host/$build combinations not listed above.
--#
--# ARG is the path (on $build) that should be converted to
--# the proper representation for $host. The result is stored
--# in $func_to_host_path_result.
--func_to_host_path ()
--{
-- func_to_host_path_result="$1"
-- if test -n "$1"; then
-- case $host in
-- *mingw* )
-- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
-- case $build in
-- *mingw* ) # actually, msys
-- # awkward: cmd appends spaces to result
-- func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null |
-- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
-- ;;
-- *cygwin* )
-- func_to_host_path_result=`cygpath -w "$1" |
-- $SED -e "$lt_sed_naive_backslashify"`
-- ;;
-- * )
-- # Unfortunately, winepath does not exit with a non-zero
-- # error code, so we are forced to check the contents of
-- # stdout. On the other hand, if the command is not
-- # found, the shell will set an exit code of 127 and print
-- # *an error message* to stdout. So we must check for both
-- # error code of zero AND non-empty stdout, which explains
-- # the odd construction:
-- func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null`
-- if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then
-- func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" |
-- $SED -e "$lt_sed_naive_backslashify"`
-- else
-- # Allow warning below.
-- func_to_host_path_result=
-- fi
-- ;;
-- esac
-- if test -z "$func_to_host_path_result" ; then
-- func_error "Could not determine host path corresponding to"
-- func_error " \`$1'"
-- func_error "Continuing, but uninstalled executables may not work."
-- # Fallback:
-- func_to_host_path_result="$1"
-- fi
-- ;;
-- esac
-- fi
--}
--# end: func_to_host_path
--
--# func_to_host_pathlist arg
--#
--# Convert pathlists to host format when used with build tools.
--# See func_to_host_path(), above. This function supports the
--# following $build/$host combinations (but does no harm for
--# combinations not listed here):
--# $build $host
--# mingw (msys) mingw [e.g. native]
--# cygwin mingw
--# *nix + wine mingw
--#
--# Path separators are also converted from $build format to
--# $host format. If ARG begins or ends with a path separator
--# character, it is preserved (but converted to $host format)
--# on output.
--#
--# ARG is a pathlist (on $build) that should be converted to
--# the proper representation on $host. The result is stored
--# in $func_to_host_pathlist_result.
--func_to_host_pathlist ()
--{
-- func_to_host_pathlist_result="$1"
-- if test -n "$1"; then
-- case $host in
-- *mingw* )
-- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
-- # Remove leading and trailing path separator characters from
-- # ARG. msys behavior is inconsistent here, cygpath turns them
-- # into '.;' and ';.', and winepath ignores them completely.
-- func_stripname : : "$1"
-- func_to_host_pathlist_tmp1=$func_stripname_result
-- case $build in
-- *mingw* ) # Actually, msys.
-- # Awkward: cmd appends spaces to result.
-- func_to_host_pathlist_result=`
-- ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null |
-- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
-- ;;
-- *cygwin* )
-- func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" |
-- $SED -e "$lt_sed_naive_backslashify"`
-- ;;
-- * )
-- # unfortunately, winepath doesn't convert pathlists
-- func_to_host_pathlist_result=""
-- func_to_host_pathlist_oldIFS=$IFS
-- IFS=:
-- for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do
-- IFS=$func_to_host_pathlist_oldIFS
-- if test -n "$func_to_host_pathlist_f" ; then
-- func_to_host_path "$func_to_host_pathlist_f"
-- if test -n "$func_to_host_path_result" ; then
-- if test -z "$func_to_host_pathlist_result" ; then
-- func_to_host_pathlist_result="$func_to_host_path_result"
-- else
-- func_append func_to_host_pathlist_result ";$func_to_host_path_result"
-- fi
-- fi
-- fi
-- done
-- IFS=$func_to_host_pathlist_oldIFS
-- ;;
-- esac
-- if test -z "$func_to_host_pathlist_result"; then
-- func_error "Could not determine the host path(s) corresponding to"
-- func_error " \`$1'"
-- func_error "Continuing, but uninstalled executables may not work."
-- # Fallback. This may break if $1 contains DOS-style drive
-- # specifications. The fix is not to complicate the expression
-- # below, but for the user to provide a working wine installation
-- # with winepath so that path translation in the cross-to-mingw
-- # case works properly.
-- lt_replace_pathsep_nix_to_dos="s|:|;|g"
-- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\
-- $SED -e "$lt_replace_pathsep_nix_to_dos"`
-- fi
-- # Now, add the leading and trailing path separators back
-- case "$1" in
-- :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result"
-- ;;
-- esac
-- case "$1" in
-- *: ) func_append func_to_host_pathlist_result ";"
-- ;;
-- esac
-- ;;
-- esac
-- fi
--}
--# end: func_to_host_pathlist
--
- # func_emit_cwrapperexe_src
- # emit the source code for a wrapper executable on stdout
- # Must ONLY be called from within func_mode_link because
-@@ -3334,10 +4148,6 @@ func_emit_cwrapperexe_src ()
-
- This wrapper executable should never be moved out of the build directory.
- If it is, it will not operate correctly.
--
-- Currently, it simply execs the wrapper *script* "$SHELL $output",
-- but could eventually absorb all of the scripts functionality and
-- exec $objdir/$outputname directly.
- */
- EOF
- cat <<"EOF"
-@@ -3462,22 +4272,13 @@ int setenv (const char *, const char *, int);
- if (stale) { free ((void *) stale); stale = 0; } \
- } while (0)
-
--#undef LTWRAPPER_DEBUGPRINTF
--#if defined LT_DEBUGWRAPPER
--# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args
--static void
--ltwrapper_debugprintf (const char *fmt, ...)
--{
-- va_list args;
-- va_start (args, fmt);
-- (void) vfprintf (stderr, fmt, args);
-- va_end (args);
--}
-+#if defined(LT_DEBUGWRAPPER)
-+static int lt_debug = 1;
- #else
--# define LTWRAPPER_DEBUGPRINTF(args)
-+static int lt_debug = 0;
- #endif
-
--const char *program_name = NULL;
-+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
-
- void *xmalloc (size_t num);
- char *xstrdup (const char *string);
-@@ -3487,7 +4288,10 @@ char *chase_symlinks (const char *pathspec);
- int make_executable (const char *path);
- int check_executable (const char *path);
- char *strendzap (char *str, const char *pat);
--void lt_fatal (const char *message, ...);
-+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
-+void lt_fatal (const char *file, int line, const char *message, ...);
-+static const char *nonnull (const char *s);
-+static const char *nonempty (const char *s);
- void lt_setenv (const char *name, const char *value);
- char *lt_extend_str (const char *orig_value, const char *add, int to_end);
- void lt_update_exe_path (const char *name, const char *value);
-@@ -3497,14 +4301,14 @@ void lt_dump_script (FILE *f);
- EOF
-
- cat <<EOF
--const char * MAGIC_EXE = "$magic_exe";
-+volatile const char * MAGIC_EXE = "$magic_exe";
- const char * LIB_PATH_VARNAME = "$shlibpath_var";
- EOF
-
- if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
-- func_to_host_pathlist "$temp_rpath"
-+ func_to_host_path "$temp_rpath"
- cat <<EOF
--const char * LIB_PATH_VALUE = "$func_to_host_pathlist_result";
-+const char * LIB_PATH_VALUE = "$func_to_host_path_result";
- EOF
- else
- cat <<"EOF"
-@@ -3513,10 +4317,10 @@ EOF
- fi
-
- if test -n "$dllsearchpath"; then
-- func_to_host_pathlist "$dllsearchpath:"
-+ func_to_host_path "$dllsearchpath:"
- cat <<EOF
- const char * EXE_PATH_VARNAME = "PATH";
--const char * EXE_PATH_VALUE = "$func_to_host_pathlist_result";
-+const char * EXE_PATH_VALUE = "$func_to_host_path_result";
- EOF
- else
- cat <<"EOF"
-@@ -3539,12 +4343,10 @@ EOF
- cat <<"EOF"
-
- #define LTWRAPPER_OPTION_PREFIX "--lt-"
--#define LTWRAPPER_OPTION_PREFIX_LENGTH 5
-
--static const size_t opt_prefix_len = LTWRAPPER_OPTION_PREFIX_LENGTH;
- static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
--
- static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
-+static const char *debug_opt = LTWRAPPER_OPTION_PREFIX "debug";
-
- int
- main (int argc, char *argv[])
-@@ -3561,10 +4363,13 @@ main (int argc, char *argv[])
- int i;
-
- program_name = (char *) xstrdup (base_name (argv[0]));
-- LTWRAPPER_DEBUGPRINTF (("(main) argv[0] : %s\n", argv[0]));
-- LTWRAPPER_DEBUGPRINTF (("(main) program_name : %s\n", program_name));
-+ newargz = XMALLOC (char *, argc + 1);
-
-- /* very simple arg parsing; don't want to rely on getopt */
-+ /* very simple arg parsing; don't want to rely on getopt
-+ * also, copy all non cwrapper options to newargz, except
-+ * argz[0], which is handled differently
-+ */
-+ newargc=0;
- for (i = 1; i < argc; i++)
- {
- if (strcmp (argv[i], dumpscript_opt) == 0)
-@@ -3581,21 +4386,54 @@ EOF
- lt_dump_script (stdout);
- return 0;
- }
-+ if (strcmp (argv[i], debug_opt) == 0)
-+ {
-+ lt_debug = 1;
-+ continue;
-+ }
-+ if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
-+ {
-+ /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
-+ namespace, but it is not one of the ones we know about and
-+ have already dealt with, above (inluding dump-script), then
-+ report an error. Otherwise, targets might begin to believe
-+ they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
-+ namespace. The first time any user complains about this, we'll
-+ need to make LTWRAPPER_OPTION_PREFIX a configure-time option
-+ or a configure.ac-settable value.
-+ */
-+ lt_fatal (__FILE__, __LINE__,
-+ "unrecognized %s option: '%s'",
-+ ltwrapper_option_prefix, argv[i]);
-+ }
-+ /* otherwise ... */
-+ newargz[++newargc] = xstrdup (argv[i]);
- }
-+ newargz[++newargc] = NULL;
-+
-+EOF
-+ cat <<EOF
-+ /* The GNU banner must be the first non-error debug message */
-+ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
-+EOF
-+ cat <<"EOF"
-+ lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
-+ lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
-
-- newargz = XMALLOC (char *, argc + 1);
- tmp_pathspec = find_executable (argv[0]);
- if (tmp_pathspec == NULL)
-- lt_fatal ("Couldn't find %s", argv[0]);
-- LTWRAPPER_DEBUGPRINTF (("(main) found exe (before symlink chase) at : %s\n",
-- tmp_pathspec));
-+ lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(main) found exe (before symlink chase) at: %s\n",
-+ tmp_pathspec);
-
- actual_cwrapper_path = chase_symlinks (tmp_pathspec);
-- LTWRAPPER_DEBUGPRINTF (("(main) found exe (after symlink chase) at : %s\n",
-- actual_cwrapper_path));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(main) found exe (after symlink chase) at: %s\n",
-+ actual_cwrapper_path);
- XFREE (tmp_pathspec);
-
-- actual_cwrapper_name = xstrdup( base_name (actual_cwrapper_path));
-+ actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
- strendzap (actual_cwrapper_path, actual_cwrapper_name);
-
- /* wrapper name transforms */
-@@ -3613,8 +4451,9 @@ EOF
- target_name = tmp_pathspec;
- tmp_pathspec = 0;
-
-- LTWRAPPER_DEBUGPRINTF (("(main) libtool target name: %s\n",
-- target_name));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(main) libtool target name: %s\n",
-+ target_name);
- EOF
-
- cat <<EOF
-@@ -3664,35 +4503,19 @@ EOF
-
- lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
- lt_setenv ("DUALCASE", "1"); /* for MSK sh */
-- lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
-+ /* Update the DLL searchpath. EXE_PATH_VALUE ($dllsearchpath) must
-+ be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
-+ because on Windows, both *_VARNAMEs are PATH but uninstalled
-+ libraries must come first. */
- lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
-+ lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
-
-- newargc=0;
-- for (i = 1; i < argc; i++)
-- {
-- if (strncmp (argv[i], ltwrapper_option_prefix, opt_prefix_len) == 0)
-- {
-- /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
-- namespace, but it is not one of the ones we know about and
-- have already dealt with, above (inluding dump-script), then
-- report an error. Otherwise, targets might begin to believe
-- they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
-- namespace. The first time any user complains about this, we'll
-- need to make LTWRAPPER_OPTION_PREFIX a configure-time option
-- or a configure.ac-settable value.
-- */
-- lt_fatal ("Unrecognized option in %s namespace: '%s'",
-- ltwrapper_option_prefix, argv[i]);
-- }
-- /* otherwise ... */
-- newargz[++newargc] = xstrdup (argv[i]);
-- }
-- newargz[++newargc] = NULL;
--
-- LTWRAPPER_DEBUGPRINTF (("(main) lt_argv_zero : %s\n", (lt_argv_zero ? lt_argv_zero : "<NULL>")));
-+ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
-+ nonnull (lt_argv_zero));
- for (i = 0; i < newargc; i++)
- {
-- LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : "<NULL>")));
-+ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
-+ i, nonnull (newargz[i]));
- }
-
- EOF
-@@ -3706,7 +4529,9 @@ EOF
- if (rval == -1)
- {
- /* failed to start process */
-- LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(main) failed to launch target \"%s\": %s\n",
-+ lt_argv_zero, nonnull (strerror (errno)));
- return 127;
- }
- return rval;
-@@ -3728,7 +4553,7 @@ xmalloc (size_t num)
- {
- void *p = (void *) malloc (num);
- if (!p)
-- lt_fatal ("Memory exhausted");
-+ lt_fatal (__FILE__, __LINE__, "memory exhausted");
-
- return p;
- }
-@@ -3762,8 +4587,8 @@ check_executable (const char *path)
- {
- struct stat st;
-
-- LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n",
-- path ? (*path ? path : "EMPTY!") : "NULL!"));
-+ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
-+ nonempty (path));
- if ((!path) || (!*path))
- return 0;
-
-@@ -3780,8 +4605,8 @@ make_executable (const char *path)
- int rval = 0;
- struct stat st;
-
-- LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n",
-- path ? (*path ? path : "EMPTY!") : "NULL!"));
-+ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
-+ nonempty (path));
- if ((!path) || (!*path))
- return 0;
-
-@@ -3807,8 +4632,8 @@ find_executable (const char *wrapper)
- int tmp_len;
- char *concat_name;
-
-- LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n",
-- wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!"));
-+ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
-+ nonempty (wrapper));
-
- if ((wrapper == NULL) || (*wrapper == '\0'))
- return NULL;
-@@ -3861,7 +4686,8 @@ find_executable (const char *wrapper)
- {
- /* empty path: current directory */
- if (getcwd (tmp, LT_PATHMAX) == NULL)
-- lt_fatal ("getcwd failed");
-+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
-+ nonnull (strerror (errno)));
- tmp_len = strlen (tmp);
- concat_name =
- XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
-@@ -3886,7 +4712,8 @@ find_executable (const char *wrapper)
- }
- /* Relative path | not found in path: prepend cwd */
- if (getcwd (tmp, LT_PATHMAX) == NULL)
-- lt_fatal ("getcwd failed");
-+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
-+ nonnull (strerror (errno)));
- tmp_len = strlen (tmp);
- concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
- memcpy (concat_name, tmp, tmp_len);
-@@ -3912,8 +4739,9 @@ chase_symlinks (const char *pathspec)
- int has_symlinks = 0;
- while (strlen (tmp_pathspec) && !has_symlinks)
- {
-- LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n",
-- tmp_pathspec));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "checking path component for symlinks: %s\n",
-+ tmp_pathspec);
- if (lstat (tmp_pathspec, &s) == 0)
- {
- if (S_ISLNK (s.st_mode) != 0)
-@@ -3935,8 +4763,9 @@ chase_symlinks (const char *pathspec)
- }
- else
- {
-- char *errstr = strerror (errno);
-- lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr);
-+ lt_fatal (__FILE__, __LINE__,
-+ "error accessing file \"%s\": %s",
-+ tmp_pathspec, nonnull (strerror (errno)));
- }
- }
- XFREE (tmp_pathspec);
-@@ -3949,7 +4778,8 @@ chase_symlinks (const char *pathspec)
- tmp_pathspec = realpath (pathspec, buf);
- if (tmp_pathspec == 0)
- {
-- lt_fatal ("Could not follow symlinks for %s", pathspec);
-+ lt_fatal (__FILE__, __LINE__,
-+ "could not follow symlinks for %s", pathspec);
- }
- return xstrdup (tmp_pathspec);
- #endif
-@@ -3975,11 +4805,25 @@ strendzap (char *str, const char *pat)
- return str;
- }
-
-+void
-+lt_debugprintf (const char *file, int line, const char *fmt, ...)
-+{
-+ va_list args;
-+ if (lt_debug)
-+ {
-+ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
-+ va_start (args, fmt);
-+ (void) vfprintf (stderr, fmt, args);
-+ va_end (args);
-+ }
-+}
-+
- static void
--lt_error_core (int exit_status, const char *mode,
-+lt_error_core (int exit_status, const char *file,
-+ int line, const char *mode,
- const char *message, va_list ap)
- {
-- fprintf (stderr, "%s: %s: ", program_name, mode);
-+ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
- vfprintf (stderr, message, ap);
- fprintf (stderr, ".\n");
-
-@@ -3988,20 +4832,32 @@ lt_error_core (int exit_status, const char *mode,
- }
-
- void
--lt_fatal (const char *message, ...)
-+lt_fatal (const char *file, int line, const char *message, ...)
- {
- va_list ap;
- va_start (ap, message);
-- lt_error_core (EXIT_FAILURE, "FATAL", message, ap);
-+ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
- va_end (ap);
- }
-
-+static const char *
-+nonnull (const char *s)
-+{
-+ return s ? s : "(null)";
-+}
-+
-+static const char *
-+nonempty (const char *s)
-+{
-+ return (s && !*s) ? "(empty)" : nonnull (s);
-+}
-+
- void
- lt_setenv (const char *name, const char *value)
- {
-- LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n",
-- (name ? name : "<NULL>"),
-- (value ? value : "<NULL>")));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(lt_setenv) setting '%s' to '%s'\n",
-+ nonnull (name), nonnull (value));
- {
- #ifdef HAVE_SETENV
- /* always make a copy, for consistency with !HAVE_SETENV */
-@@ -4049,9 +4905,9 @@ lt_extend_str (const char *orig_value, const char *add, int to_end)
- void
- lt_update_exe_path (const char *name, const char *value)
- {
-- LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
-- (name ? name : "<NULL>"),
-- (value ? value : "<NULL>")));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
-+ nonnull (name), nonnull (value));
-
- if (name && *name && value && *value)
- {
-@@ -4070,9 +4926,9 @@ lt_update_exe_path (const char *name, const char *value)
- void
- lt_update_lib_path (const char *name, const char *value)
- {
-- LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
-- (name ? name : "<NULL>"),
-- (value ? value : "<NULL>")));
-+ lt_debugprintf (__FILE__, __LINE__,
-+ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
-+ nonnull (name), nonnull (value));
-
- if (name && *name && value && *value)
- {
-@@ -4222,7 +5078,7 @@ EOF
- func_win32_import_lib_p ()
- {
- $opt_debug
-- case `eval "$file_magic_cmd \"\$1\" 2>/dev/null" | $SED -e 10q` in
-+ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
- *import*) : ;;
- *) false ;;
- esac
-@@ -4401,9 +5257,9 @@ func_mode_link ()
- ;;
- *)
- if test "$prev" = dlfiles; then
-- dlfiles="$dlfiles $arg"
-+ func_append dlfiles " $arg"
- else
-- dlprefiles="$dlprefiles $arg"
-+ func_append dlprefiles " $arg"
- fi
- prev=
- continue
-@@ -4427,7 +5283,7 @@ func_mode_link ()
- *-*-darwin*)
- case "$deplibs " in
- *" $qarg.ltframework "*) ;;
-- *) deplibs="$deplibs $qarg.ltframework" # this is fixed later
-+ *) func_append deplibs " $qarg.ltframework" # this is fixed later
- ;;
- esac
- ;;
-@@ -4446,7 +5302,7 @@ func_mode_link ()
- moreargs=
- for fil in `cat "$save_arg"`
- do
--# moreargs="$moreargs $fil"
-+# func_append moreargs " $fil"
- arg=$fil
- # A libtool-controlled object.
-
-@@ -4475,7 +5331,7 @@ func_mode_link ()
-
- if test "$prev" = dlfiles; then
- if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
-- dlfiles="$dlfiles $pic_object"
-+ func_append dlfiles " $pic_object"
- prev=
- continue
- else
-@@ -4487,7 +5343,7 @@ func_mode_link ()
- # CHECK ME: I think I busted this. -Ossama
- if test "$prev" = dlprefiles; then
- # Preload the old-style object.
-- dlprefiles="$dlprefiles $pic_object"
-+ func_append dlprefiles " $pic_object"
- prev=
- fi
-
-@@ -4557,12 +5413,12 @@ func_mode_link ()
- if test "$prev" = rpath; then
- case "$rpath " in
- *" $arg "*) ;;
-- *) rpath="$rpath $arg" ;;
-+ *) func_append rpath " $arg" ;;
- esac
- else
- case "$xrpath " in
- *" $arg "*) ;;
-- *) xrpath="$xrpath $arg" ;;
-+ *) func_append xrpath " $arg" ;;
- esac
- fi
- prev=
-@@ -4574,28 +5430,28 @@ func_mode_link ()
- continue
- ;;
- weak)
-- weak_libs="$weak_libs $arg"
-+ func_append weak_libs " $arg"
- prev=
- continue
- ;;
- xcclinker)
-- linker_flags="$linker_flags $qarg"
-- compiler_flags="$compiler_flags $qarg"
-+ func_append linker_flags " $qarg"
-+ func_append compiler_flags " $qarg"
- prev=
- func_append compile_command " $qarg"
- func_append finalize_command " $qarg"
- continue
- ;;
- xcompiler)
-- compiler_flags="$compiler_flags $qarg"
-+ func_append compiler_flags " $qarg"
- prev=
- func_append compile_command " $qarg"
- func_append finalize_command " $qarg"
- continue
- ;;
- xlinker)
-- linker_flags="$linker_flags $qarg"
-- compiler_flags="$compiler_flags $wl$qarg"
-+ func_append linker_flags " $qarg"
-+ func_append compiler_flags " $wl$qarg"
- prev=
- func_append compile_command " $wl$qarg"
- func_append finalize_command " $wl$qarg"
-@@ -4686,15 +5542,16 @@ func_mode_link ()
- ;;
-
- -L*)
-- func_stripname '-L' '' "$arg"
-- dir=$func_stripname_result
-- if test -z "$dir"; then
-+ func_stripname "-L" '' "$arg"
-+ if test -z "$func_stripname_result"; then
- if test "$#" -gt 0; then
- func_fatal_error "require no space between \`-L' and \`$1'"
- else
- func_fatal_error "need path for \`-L' option"
- fi
- fi
-+ func_resolve_sysroot "$func_stripname_result"
-+ dir=$func_resolve_sysroot_result
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
-@@ -4706,10 +5563,16 @@ func_mode_link ()
- ;;
- esac
- case "$deplibs " in
-- *" -L$dir "*) ;;
-+ *" -L$dir "* | *" $arg "*)
-+ # Will only happen for absolute or sysroot arguments
-+ ;;
- *)
-- deplibs="$deplibs -L$dir"
-- lib_search_path="$lib_search_path $dir"
-+ # Preserve sysroot, but never include relative directories
-+ case $dir in
-+ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
-+ *) func_append deplibs " -L$dir" ;;
-+ esac
-+ func_append lib_search_path " $dir"
- ;;
- esac
- case $host in
-@@ -4718,12 +5581,12 @@ func_mode_link ()
- case :$dllsearchpath: in
- *":$dir:"*) ;;
- ::) dllsearchpath=$dir;;
-- *) dllsearchpath="$dllsearchpath:$dir";;
-+ *) func_append dllsearchpath ":$dir";;
- esac
- case :$dllsearchpath: in
- *":$testbindir:"*) ;;
- ::) dllsearchpath=$testbindir;;
-- *) dllsearchpath="$dllsearchpath:$testbindir";;
-+ *) func_append dllsearchpath ":$testbindir";;
- esac
- ;;
- esac
-@@ -4747,7 +5610,7 @@ func_mode_link ()
- ;;
- *-*-rhapsody* | *-*-darwin1.[012])
- # Rhapsody C and math libraries are in the System framework
-- deplibs="$deplibs System.ltframework"
-+ func_append deplibs " System.ltframework"
- continue
- ;;
- *-*-sco3.2v5* | *-*-sco5v6*)
-@@ -4758,9 +5621,6 @@ func_mode_link ()
- # Compiler inserts libc in the correct place for threads to work
- test "X$arg" = "X-lc" && continue
- ;;
-- *-*-linux*)
-- test "X$arg" = "X-lc" && continue
-- ;;
- esac
- elif test "X$arg" = "X-lc_r"; then
- case $host in
-@@ -4770,7 +5630,7 @@ func_mode_link ()
- ;;
- esac
- fi
-- deplibs="$deplibs $arg"
-+ func_append deplibs " $arg"
- continue
- ;;
-
-@@ -4782,8 +5642,8 @@ func_mode_link ()
- # Tru64 UNIX uses -model [arg] to determine the layout of C++
- # classes, name mangling, and exception handling.
- # Darwin uses the -arch flag to determine output architecture.
-- -model|-arch|-isysroot)
-- compiler_flags="$compiler_flags $arg"
-+ -model|-arch|-isysroot|--sysroot)
-+ func_append compiler_flags " $arg"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- prev=xcompiler
-@@ -4791,12 +5651,12 @@ func_mode_link ()
- ;;
-
- -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
-- compiler_flags="$compiler_flags $arg"
-+ func_append compiler_flags " $arg"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- case "$new_inherited_linker_flags " in
- *" $arg "*) ;;
-- * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;;
-+ * ) func_append new_inherited_linker_flags " $arg" ;;
- esac
- continue
- ;;
-@@ -4863,13 +5723,17 @@ func_mode_link ()
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) ;;
-+ =*)
-+ func_stripname '=' '' "$dir"
-+ dir=$lt_sysroot$func_stripname_result
-+ ;;
- *)
- func_fatal_error "only absolute run-paths are allowed"
- ;;
- esac
- case "$xrpath " in
- *" $dir "*) ;;
-- *) xrpath="$xrpath $dir" ;;
-+ *) func_append xrpath " $dir" ;;
- esac
- continue
- ;;
-@@ -4922,8 +5786,8 @@ func_mode_link ()
- for flag in $args; do
- IFS="$save_ifs"
- func_quote_for_eval "$flag"
-- arg="$arg $func_quote_for_eval_result"
-- compiler_flags="$compiler_flags $func_quote_for_eval_result"
-+ func_append arg " $func_quote_for_eval_result"
-+ func_append compiler_flags " $func_quote_for_eval_result"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$arg"
-@@ -4938,9 +5802,9 @@ func_mode_link ()
- for flag in $args; do
- IFS="$save_ifs"
- func_quote_for_eval "$flag"
-- arg="$arg $wl$func_quote_for_eval_result"
-- compiler_flags="$compiler_flags $wl$func_quote_for_eval_result"
-- linker_flags="$linker_flags $func_quote_for_eval_result"
-+ func_append arg " $wl$func_quote_for_eval_result"
-+ func_append compiler_flags " $wl$func_quote_for_eval_result"
-+ func_append linker_flags " $func_quote_for_eval_result"
- done
- IFS="$save_ifs"
- func_stripname ' ' '' "$arg"
-@@ -4968,24 +5832,27 @@ func_mode_link ()
- arg="$func_quote_for_eval_result"
- ;;
-
-- # -64, -mips[0-9] enable 64-bit mode on the SGI compiler
-- # -r[0-9][0-9]* specifies the processor on the SGI compiler
-- # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler
-- # +DA*, +DD* enable 64-bit mode on the HP compiler
-- # -q* pass through compiler args for the IBM compiler
-- # -m*, -t[45]*, -txscale* pass through architecture-specific
-- # compiler args for GCC
-- # -F/path gives path to uninstalled frameworks, gcc on darwin
-- # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC
-- # @file GCC response files
-- # -tp=* Portland pgcc target processor selection
-+ # Flags to be passed through unchanged, with rationale:
-+ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler
-+ # -r[0-9][0-9]* specify processor for the SGI compiler
-+ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
-+ # +DA*, +DD* enable 64-bit mode for the HP compiler
-+ # -q* compiler args for the IBM compiler
-+ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
-+ # -F/path path to uninstalled frameworks, gcc on darwin
-+ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC
-+ # @file GCC response files
-+ # -tp=* Portland pgcc target processor selection
-+ # --sysroot=* for sysroot support
-+ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
- -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
-- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*)
-+ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
-+ -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
- func_quote_for_eval "$arg"
- arg="$func_quote_for_eval_result"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
-- compiler_flags="$compiler_flags $arg"
-+ func_append compiler_flags " $arg"
- continue
- ;;
-
-@@ -4997,7 +5864,7 @@ func_mode_link ()
-
- *.$objext)
- # A standard object.
-- objs="$objs $arg"
-+ func_append objs " $arg"
- ;;
-
- *.lo)
-@@ -5028,7 +5895,7 @@ func_mode_link ()
-
- if test "$prev" = dlfiles; then
- if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
-- dlfiles="$dlfiles $pic_object"
-+ func_append dlfiles " $pic_object"
- prev=
- continue
- else
-@@ -5040,7 +5907,7 @@ func_mode_link ()
- # CHECK ME: I think I busted this. -Ossama
- if test "$prev" = dlprefiles; then
- # Preload the old-style object.
-- dlprefiles="$dlprefiles $pic_object"
-+ func_append dlprefiles " $pic_object"
- prev=
- fi
-
-@@ -5085,24 +5952,25 @@ func_mode_link ()
-
- *.$libext)
- # An archive.
-- deplibs="$deplibs $arg"
-- old_deplibs="$old_deplibs $arg"
-+ func_append deplibs " $arg"
-+ func_append old_deplibs " $arg"
- continue
- ;;
-
- *.la)
- # A libtool-controlled library.
-
-+ func_resolve_sysroot "$arg"
- if test "$prev" = dlfiles; then
- # This library was specified with -dlopen.
-- dlfiles="$dlfiles $arg"
-+ func_append dlfiles " $func_resolve_sysroot_result"
- prev=
- elif test "$prev" = dlprefiles; then
- # The library was specified with -dlpreopen.
-- dlprefiles="$dlprefiles $arg"
-+ func_append dlprefiles " $func_resolve_sysroot_result"
- prev=
- else
-- deplibs="$deplibs $arg"
-+ func_append deplibs " $func_resolve_sysroot_result"
- fi
- continue
- ;;
-@@ -5127,7 +5995,7 @@ func_mode_link ()
- func_fatal_help "the \`$prevarg' option requires an argument"
-
- if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
-- eval "arg=\"$export_dynamic_flag_spec\""
-+ eval arg=\"$export_dynamic_flag_spec\"
- func_append compile_command " $arg"
- func_append finalize_command " $arg"
- fi
-@@ -5144,11 +6012,13 @@ func_mode_link ()
- else
- shlib_search_path=
- fi
-- eval "sys_lib_search_path=\"$sys_lib_search_path_spec\""
-- eval "sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\""
-+ eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
-+ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
-
- func_dirname "$output" "/" ""
- output_objdir="$func_dirname_result$objdir"
-+ func_to_tool_file "$output_objdir/"
-+ tool_output_objdir=$func_to_tool_file_result
- # Create the object directory.
- func_mkdir_p "$output_objdir"
-
-@@ -5169,12 +6039,12 @@ func_mode_link ()
- # Find all interdependent deplibs by searching for libraries
- # that are linked more than once (e.g. -la -lb -la)
- for deplib in $deplibs; do
-- if $opt_duplicate_deps ; then
-+ if $opt_preserve_dup_deps ; then
- case "$libs " in
-- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
-+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
- esac
- fi
-- libs="$libs $deplib"
-+ func_append libs " $deplib"
- done
-
- if test "$linkmode" = lib; then
-@@ -5187,9 +6057,9 @@ func_mode_link ()
- if $opt_duplicate_compiler_generated_deps; then
- for pre_post_dep in $predeps $postdeps; do
- case "$pre_post_deps " in
-- *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
-+ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
- esac
-- pre_post_deps="$pre_post_deps $pre_post_dep"
-+ func_append pre_post_deps " $pre_post_dep"
- done
- fi
- pre_post_deps=
-@@ -5256,8 +6126,9 @@ func_mode_link ()
- for lib in $dlprefiles; do
- # Ignore non-libtool-libs
- dependency_libs=
-+ func_resolve_sysroot "$lib"
- case $lib in
-- *.la) func_source "$lib" ;;
-+ *.la) func_source "$func_resolve_sysroot_result" ;;
- esac
-
- # Collect preopened libtool deplibs, except any this library
-@@ -5267,7 +6138,7 @@ func_mode_link ()
- deplib_base=$func_basename_result
- case " $weak_libs " in
- *" $deplib_base "*) ;;
-- *) deplibs="$deplibs $deplib" ;;
-+ *) func_append deplibs " $deplib" ;;
- esac
- done
- done
-@@ -5288,11 +6159,11 @@ func_mode_link ()
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
-- compiler_flags="$compiler_flags $deplib"
-+ func_append compiler_flags " $deplib"
- if test "$linkmode" = lib ; then
- case "$new_inherited_linker_flags " in
- *" $deplib "*) ;;
-- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
-+ * ) func_append new_inherited_linker_flags " $deplib" ;;
- esac
- fi
- fi
-@@ -5377,7 +6248,7 @@ func_mode_link ()
- if test "$linkmode" = lib ; then
- case "$new_inherited_linker_flags " in
- *" $deplib "*) ;;
-- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
-+ * ) func_append new_inherited_linker_flags " $deplib" ;;
- esac
- fi
- fi
-@@ -5390,7 +6261,8 @@ func_mode_link ()
- test "$pass" = conv && continue
- newdependency_libs="$deplib $newdependency_libs"
- func_stripname '-L' '' "$deplib"
-- newlib_search_path="$newlib_search_path $func_stripname_result"
-+ func_resolve_sysroot "$func_stripname_result"
-+ func_append newlib_search_path " $func_resolve_sysroot_result"
- ;;
- prog)
- if test "$pass" = conv; then
-@@ -5404,7 +6276,8 @@ func_mode_link ()
- finalize_deplibs="$deplib $finalize_deplibs"
- fi
- func_stripname '-L' '' "$deplib"
-- newlib_search_path="$newlib_search_path $func_stripname_result"
-+ func_resolve_sysroot "$func_stripname_result"
-+ func_append newlib_search_path " $func_resolve_sysroot_result"
- ;;
- *)
- func_warning "\`-L' is ignored for archives/objects"
-@@ -5415,17 +6288,21 @@ func_mode_link ()
- -R*)
- if test "$pass" = link; then
- func_stripname '-R' '' "$deplib"
-- dir=$func_stripname_result
-+ func_resolve_sysroot "$func_stripname_result"
-+ dir=$func_resolve_sysroot_result
- # Make sure the xrpath contains only unique directories.
- case "$xrpath " in
- *" $dir "*) ;;
-- *) xrpath="$xrpath $dir" ;;
-+ *) func_append xrpath " $dir" ;;
- esac
- fi
- deplibs="$deplib $deplibs"
- continue
- ;;
-- *.la) lib="$deplib" ;;
-+ *.la)
-+ func_resolve_sysroot "$deplib"
-+ lib=$func_resolve_sysroot_result
-+ ;;
- *.$libext)
- if test "$pass" = conv; then
- deplibs="$deplib $deplibs"
-@@ -5488,11 +6365,11 @@ func_mode_link ()
- if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
- # If there is no dlopen support or we're linking statically,
- # we need to preload.
-- newdlprefiles="$newdlprefiles $deplib"
-+ func_append newdlprefiles " $deplib"
- compile_deplibs="$deplib $compile_deplibs"
- finalize_deplibs="$deplib $finalize_deplibs"
- else
-- newdlfiles="$newdlfiles $deplib"
-+ func_append newdlfiles " $deplib"
- fi
- fi
- continue
-@@ -5538,7 +6415,7 @@ func_mode_link ()
- for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
- case " $new_inherited_linker_flags " in
- *" $tmp_inherited_linker_flag "*) ;;
-- *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";;
-+ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
- esac
- done
- fi
-@@ -5546,8 +6423,8 @@ func_mode_link ()
- if test "$linkmode,$pass" = "lib,link" ||
- test "$linkmode,$pass" = "prog,scan" ||
- { test "$linkmode" != prog && test "$linkmode" != lib; }; then
-- test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
-- test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
-+ test -n "$dlopen" && func_append dlfiles " $dlopen"
-+ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
- fi
-
- if test "$pass" = conv; then
-@@ -5558,20 +6435,20 @@ func_mode_link ()
- func_fatal_error "cannot find name of link library for \`$lib'"
- fi
- # It is a libtool convenience library, so add in its objects.
-- convenience="$convenience $ladir/$objdir/$old_library"
-- old_convenience="$old_convenience $ladir/$objdir/$old_library"
-+ func_append convenience " $ladir/$objdir/$old_library"
-+ func_append old_convenience " $ladir/$objdir/$old_library"
- elif test "$linkmode" != prog && test "$linkmode" != lib; then
- func_fatal_error "\`$lib' is not a convenience library"
- fi
- tmp_libs=
- for deplib in $dependency_libs; do
- deplibs="$deplib $deplibs"
-- if $opt_duplicate_deps ; then
-+ if $opt_preserve_dup_deps ; then
- case "$tmp_libs " in
-- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
-+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
- esac
- fi
-- tmp_libs="$tmp_libs $deplib"
-+ func_append tmp_libs " $deplib"
- done
- continue
- fi # $pass = conv
-@@ -5579,9 +6456,15 @@ func_mode_link ()
-
- # Get the name of the library we link against.
- linklib=
-- for l in $old_library $library_names; do
-- linklib="$l"
-- done
-+ if test -n "$old_library" &&
-+ { test "$prefer_static_libs" = yes ||
-+ test "$prefer_static_libs,$installed" = "built,no"; }; then
-+ linklib=$old_library
-+ else
-+ for l in $old_library $library_names; do
-+ linklib="$l"
-+ done
-+ fi
- if test -z "$linklib"; then
- func_fatal_error "cannot find name of link library for \`$lib'"
- fi
-@@ -5598,9 +6481,9 @@ func_mode_link ()
- # statically, we need to preload. We also need to preload any
- # dependent libraries so libltdl's deplib preloader doesn't
- # bomb out in the load deplibs phase.
-- dlprefiles="$dlprefiles $lib $dependency_libs"
-+ func_append dlprefiles " $lib $dependency_libs"
- else
-- newdlfiles="$newdlfiles $lib"
-+ func_append newdlfiles " $lib"
- fi
- continue
- fi # $pass = dlopen
-@@ -5622,14 +6505,14 @@ func_mode_link ()
-
- # Find the relevant object directory and library name.
- if test "X$installed" = Xyes; then
-- if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
-+ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
- func_warning "library \`$lib' was moved."
- dir="$ladir"
- absdir="$abs_ladir"
- libdir="$abs_ladir"
- else
-- dir="$libdir"
-- absdir="$libdir"
-+ dir="$lt_sysroot$libdir"
-+ absdir="$lt_sysroot$libdir"
- fi
- test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
- else
-@@ -5637,12 +6520,12 @@ func_mode_link ()
- dir="$ladir"
- absdir="$abs_ladir"
- # Remove this search path later
-- notinst_path="$notinst_path $abs_ladir"
-+ func_append notinst_path " $abs_ladir"
- else
- dir="$ladir/$objdir"
- absdir="$abs_ladir/$objdir"
- # Remove this search path later
-- notinst_path="$notinst_path $abs_ladir"
-+ func_append notinst_path " $abs_ladir"
- fi
- fi # $installed = yes
- func_stripname 'lib' '.la' "$laname"
-@@ -5653,20 +6536,46 @@ func_mode_link ()
- if test -z "$libdir" && test "$linkmode" = prog; then
- func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
- fi
-- # Prefer using a static library (so that no silly _DYNAMIC symbols
-- # are required to link).
-- if test -n "$old_library"; then
-- newdlprefiles="$newdlprefiles $dir/$old_library"
-- # Keep a list of preopened convenience libraries to check
-- # that they are being used correctly in the link pass.
-- test -z "$libdir" && \
-- dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library"
-- # Otherwise, use the dlname, so that lt_dlopen finds it.
-- elif test -n "$dlname"; then
-- newdlprefiles="$newdlprefiles $dir/$dlname"
-- else
-- newdlprefiles="$newdlprefiles $dir/$linklib"
-- fi
-+ case "$host" in
-+ # special handling for platforms with PE-DLLs.
-+ *cygwin* | *mingw* | *cegcc* )
-+ # Linker will automatically link against shared library if both
-+ # static and shared are present. Therefore, ensure we extract
-+ # symbols from the import library if a shared library is present
-+ # (otherwise, the dlopen module name will be incorrect). We do
-+ # this by putting the import library name into $newdlprefiles.
-+ # We recover the dlopen module name by 'saving' the la file
-+ # name in a special purpose variable, and (later) extracting the
-+ # dlname from the la file.
-+ if test -n "$dlname"; then
-+ func_tr_sh "$dir/$linklib"
-+ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
-+ func_append newdlprefiles " $dir/$linklib"
-+ else
-+ func_append newdlprefiles " $dir/$old_library"
-+ # Keep a list of preopened convenience libraries to check
-+ # that they are being used correctly in the link pass.
-+ test -z "$libdir" && \
-+ func_append dlpreconveniencelibs " $dir/$old_library"
-+ fi
-+ ;;
-+ * )
-+ # Prefer using a static library (so that no silly _DYNAMIC symbols
-+ # are required to link).
-+ if test -n "$old_library"; then
-+ func_append newdlprefiles " $dir/$old_library"
-+ # Keep a list of preopened convenience libraries to check
-+ # that they are being used correctly in the link pass.
-+ test -z "$libdir" && \
-+ func_append dlpreconveniencelibs " $dir/$old_library"
-+ # Otherwise, use the dlname, so that lt_dlopen finds it.
-+ elif test -n "$dlname"; then
-+ func_append newdlprefiles " $dir/$dlname"
-+ else
-+ func_append newdlprefiles " $dir/$linklib"
-+ fi
-+ ;;
-+ esac
- fi # $pass = dlpreopen
-
- if test -z "$libdir"; then
-@@ -5684,7 +6593,7 @@ func_mode_link ()
-
-
- if test "$linkmode" = prog && test "$pass" != link; then
-- newlib_search_path="$newlib_search_path $ladir"
-+ func_append newlib_search_path " $ladir"
- deplibs="$lib $deplibs"
-
- linkalldeplibs=no
-@@ -5697,7 +6606,8 @@ func_mode_link ()
- for deplib in $dependency_libs; do
- case $deplib in
- -L*) func_stripname '-L' '' "$deplib"
-- newlib_search_path="$newlib_search_path $func_stripname_result"
-+ func_resolve_sysroot "$func_stripname_result"
-+ func_append newlib_search_path " $func_resolve_sysroot_result"
- ;;
- esac
- # Need to link against all dependency_libs?
-@@ -5708,12 +6618,12 @@ func_mode_link ()
- # or/and link against static libraries
- newdependency_libs="$deplib $newdependency_libs"
- fi
-- if $opt_duplicate_deps ; then
-+ if $opt_preserve_dup_deps ; then
- case "$tmp_libs " in
-- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
-+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
- esac
- fi
-- tmp_libs="$tmp_libs $deplib"
-+ func_append tmp_libs " $deplib"
- done # for deplib
- continue
- fi # $linkmode = prog...
-@@ -5728,7 +6638,7 @@ func_mode_link ()
- # Make sure the rpath contains only unique directories.
- case "$temp_rpath:" in
- *"$absdir:"*) ;;
-- *) temp_rpath="$temp_rpath$absdir:" ;;
-+ *) func_append temp_rpath "$absdir:" ;;
- esac
- fi
-
-@@ -5740,7 +6650,7 @@ func_mode_link ()
- *)
- case "$compile_rpath " in
- *" $absdir "*) ;;
-- *) compile_rpath="$compile_rpath $absdir"
-+ *) func_append compile_rpath " $absdir" ;;
- esac
- ;;
- esac
-@@ -5749,7 +6659,7 @@ func_mode_link ()
- *)
- case "$finalize_rpath " in
- *" $libdir "*) ;;
-- *) finalize_rpath="$finalize_rpath $libdir"
-+ *) func_append finalize_rpath " $libdir" ;;
- esac
- ;;
- esac
-@@ -5774,12 +6684,12 @@ func_mode_link ()
- case $host in
- *cygwin* | *mingw* | *cegcc*)
- # No point in relinking DLLs because paths are not encoded
-- notinst_deplibs="$notinst_deplibs $lib"
-+ func_append notinst_deplibs " $lib"
- need_relink=no
- ;;
- *)
- if test "$installed" = no; then
-- notinst_deplibs="$notinst_deplibs $lib"
-+ func_append notinst_deplibs " $lib"
- need_relink=yes
- fi
- ;;
-@@ -5814,7 +6724,7 @@ func_mode_link ()
- *)
- case "$compile_rpath " in
- *" $absdir "*) ;;
-- *) compile_rpath="$compile_rpath $absdir"
-+ *) func_append compile_rpath " $absdir" ;;
- esac
- ;;
- esac
-@@ -5823,7 +6733,7 @@ func_mode_link ()
- *)
- case "$finalize_rpath " in
- *" $libdir "*) ;;
-- *) finalize_rpath="$finalize_rpath $libdir"
-+ *) func_append finalize_rpath " $libdir" ;;
- esac
- ;;
- esac
-@@ -5835,7 +6745,7 @@ func_mode_link ()
- shift
- realname="$1"
- shift
-- eval "libname=\"$libname_spec\""
-+ libname=`eval "\\$ECHO \"$libname_spec\""`
- # use dlname if we got it. it's perfectly good, no?
- if test -n "$dlname"; then
- soname="$dlname"
-@@ -5848,7 +6758,7 @@ func_mode_link ()
- versuffix="-$major"
- ;;
- esac
-- eval "soname=\"$soname_spec\""
-+ eval soname=\"$soname_spec\"
- else
- soname="$realname"
- fi
-@@ -5877,7 +6787,7 @@ func_mode_link ()
- linklib=$newlib
- fi # test -n "$old_archive_from_expsyms_cmds"
-
-- if test "$linkmode" = prog || test "$mode" != relink; then
-+ if test "$linkmode" = prog || test "$opt_mode" != relink; then
- add_shlibpath=
- add_dir=
- add=
-@@ -5933,7 +6843,7 @@ func_mode_link ()
- if test -n "$inst_prefix_dir"; then
- case $libdir in
- [\\/]*)
-- add_dir="$add_dir -L$inst_prefix_dir$libdir"
-+ func_append add_dir " -L$inst_prefix_dir$libdir"
- ;;
- esac
- fi
-@@ -5955,7 +6865,7 @@ func_mode_link ()
- if test -n "$add_shlibpath"; then
- case :$compile_shlibpath: in
- *":$add_shlibpath:"*) ;;
-- *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
-+ *) func_append compile_shlibpath "$add_shlibpath:" ;;
- esac
- fi
- if test "$linkmode" = prog; then
-@@ -5969,13 +6879,13 @@ func_mode_link ()
- test "$hardcode_shlibpath_var" = yes; then
- case :$finalize_shlibpath: in
- *":$libdir:"*) ;;
-- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
-+ *) func_append finalize_shlibpath "$libdir:" ;;
- esac
- fi
- fi
- fi
-
-- if test "$linkmode" = prog || test "$mode" = relink; then
-+ if test "$linkmode" = prog || test "$opt_mode" = relink; then
- add_shlibpath=
- add_dir=
- add=
-@@ -5989,7 +6899,7 @@ func_mode_link ()
- elif test "$hardcode_shlibpath_var" = yes; then
- case :$finalize_shlibpath: in
- *":$libdir:"*) ;;
-- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
-+ *) func_append finalize_shlibpath "$libdir:" ;;
- esac
- add="-l$name"
- elif test "$hardcode_automatic" = yes; then
-@@ -6001,12 +6911,12 @@ func_mode_link ()
- fi
- else
- # We cannot seem to hardcode it, guess we'll fake it.
-- add_dir="-L$libdir"
-+ add_dir="-L$lt_sysroot$libdir"
- # Try looking first in the location we're being installed to.
- if test -n "$inst_prefix_dir"; then
- case $libdir in
- [\\/]*)
-- add_dir="$add_dir -L$inst_prefix_dir$libdir"
-+ func_append add_dir " -L$inst_prefix_dir$libdir"
- ;;
- esac
- fi
-@@ -6083,27 +6993,33 @@ func_mode_link ()
- temp_xrpath=$func_stripname_result
- case " $xrpath " in
- *" $temp_xrpath "*) ;;
-- *) xrpath="$xrpath $temp_xrpath";;
-+ *) func_append xrpath " $temp_xrpath";;
- esac;;
-- *) temp_deplibs="$temp_deplibs $libdir";;
-+ *) func_append temp_deplibs " $libdir";;
- esac
- done
- dependency_libs="$temp_deplibs"
- fi
-
-- newlib_search_path="$newlib_search_path $absdir"
-+ func_append newlib_search_path " $absdir"
- # Link against this library
- test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
- # ... and its dependency_libs
- tmp_libs=
- for deplib in $dependency_libs; do
- newdependency_libs="$deplib $newdependency_libs"
-- if $opt_duplicate_deps ; then
-+ case $deplib in
-+ -L*) func_stripname '-L' '' "$deplib"
-+ func_resolve_sysroot "$func_stripname_result";;
-+ *) func_resolve_sysroot "$deplib" ;;
-+ esac
-+ if $opt_preserve_dup_deps ; then
- case "$tmp_libs " in
-- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
-+ *" $func_resolve_sysroot_result "*)
-+ func_append specialdeplibs " $func_resolve_sysroot_result" ;;
- esac
- fi
-- tmp_libs="$tmp_libs $deplib"
-+ func_append tmp_libs " $func_resolve_sysroot_result"
- done
-
- if test "$link_all_deplibs" != no; then
-@@ -6113,8 +7029,10 @@ func_mode_link ()
- case $deplib in
- -L*) path="$deplib" ;;
- *.la)
-+ func_resolve_sysroot "$deplib"
-+ deplib=$func_resolve_sysroot_result
- func_dirname "$deplib" "" "."
-- dir="$func_dirname_result"
-+ dir=$func_dirname_result
- # We need an absolute path.
- case $dir in
- [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
-@@ -6130,7 +7048,7 @@ func_mode_link ()
- case $host in
- *-*-darwin*)
- depdepl=
-- deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
-+ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
- if test -n "$deplibrary_names" ; then
- for tmp in $deplibrary_names ; do
- depdepl=$tmp
-@@ -6141,8 +7059,8 @@ func_mode_link ()
- if test -z "$darwin_install_name"; then
- darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
- fi
-- compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
-- linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}"
-+ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
-+ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}"
- path=
- fi
- fi
-@@ -6152,7 +7070,7 @@ func_mode_link ()
- ;;
- esac
- else
-- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
-+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
- test -z "$libdir" && \
- func_fatal_error "\`$deplib' is not a valid libtool archive"
- test "$absdir" != "$libdir" && \
-@@ -6192,7 +7110,7 @@ func_mode_link ()
- for dir in $newlib_search_path; do
- case "$lib_search_path " in
- *" $dir "*) ;;
-- *) lib_search_path="$lib_search_path $dir" ;;
-+ *) func_append lib_search_path " $dir" ;;
- esac
- done
- newlib_search_path=
-@@ -6205,7 +7123,7 @@ func_mode_link ()
- fi
- for var in $vars dependency_libs; do
- # Add libraries to $var in reverse order
-- eval tmp_libs=\$$var
-+ eval tmp_libs=\"\$$var\"
- new_libs=
- for deplib in $tmp_libs; do
- # FIXME: Pedantically, this is the right thing to do, so
-@@ -6250,13 +7168,13 @@ func_mode_link ()
- -L*)
- case " $tmp_libs " in
- *" $deplib "*) ;;
-- *) tmp_libs="$tmp_libs $deplib" ;;
-+ *) func_append tmp_libs " $deplib" ;;
- esac
- ;;
-- *) tmp_libs="$tmp_libs $deplib" ;;
-+ *) func_append tmp_libs " $deplib" ;;
- esac
- done
-- eval $var=\$tmp_libs
-+ eval $var=\"$tmp_libs\"
- done # for var
- fi
- # Last step: remove runtime libs from dependency_libs
-@@ -6269,7 +7187,7 @@ func_mode_link ()
- ;;
- esac
- if test -n "$i" ; then
-- tmp_libs="$tmp_libs $i"
-+ func_append tmp_libs " $i"
- fi
- done
- dependency_libs=$tmp_libs
-@@ -6310,7 +7228,7 @@ func_mode_link ()
- # Now set the variables for building old libraries.
- build_libtool_libs=no
- oldlibs="$output"
-- objs="$objs$old_deplibs"
-+ func_append objs "$old_deplibs"
- ;;
-
- lib)
-@@ -6319,8 +7237,8 @@ func_mode_link ()
- lib*)
- func_stripname 'lib' '.la' "$outputname"
- name=$func_stripname_result
-- eval "shared_ext=\"$shrext_cmds\""
-- eval "libname=\"$libname_spec\""
-+ eval shared_ext=\"$shrext_cmds\"
-+ eval libname=\"$libname_spec\"
- ;;
- *)
- test "$module" = no && \
-@@ -6330,8 +7248,8 @@ func_mode_link ()
- # Add the "lib" prefix for modules if required
- func_stripname '' '.la' "$outputname"
- name=$func_stripname_result
-- eval "shared_ext=\"$shrext_cmds\""
-- eval "libname=\"$libname_spec\""
-+ eval shared_ext=\"$shrext_cmds\"
-+ eval libname=\"$libname_spec\"
- else
- func_stripname '' '.la' "$outputname"
- libname=$func_stripname_result
-@@ -6346,7 +7264,7 @@ func_mode_link ()
- echo
- $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
- $ECHO "*** objects $objs is not portable!"
-- libobjs="$libobjs $objs"
-+ func_append libobjs " $objs"
- fi
- fi
-
-@@ -6544,7 +7462,7 @@ func_mode_link ()
- done
-
- # Make executables depend on our current version.
-- verstring="$verstring:${current}.0"
-+ func_append verstring ":${current}.0"
- ;;
-
- qnx)
-@@ -6612,10 +7530,10 @@ func_mode_link ()
- fi
-
- func_generate_dlsyms "$libname" "$libname" "yes"
-- libobjs="$libobjs $symfileobj"
-+ func_append libobjs " $symfileobj"
- test "X$libobjs" = "X " && libobjs=
-
-- if test "$mode" != relink; then
-+ if test "$opt_mode" != relink; then
- # Remove our outputs, but don't remove object files since they
- # may have been created when compiling PIC objects.
- removelist=
-@@ -6631,7 +7549,7 @@ func_mode_link ()
- continue
- fi
- fi
-- removelist="$removelist $p"
-+ func_append removelist " $p"
- ;;
- *) ;;
- esac
-@@ -6642,7 +7560,7 @@ func_mode_link ()
-
- # Now set the variables for building old libraries.
- if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
-- oldlibs="$oldlibs $output_objdir/$libname.$libext"
-+ func_append oldlibs " $output_objdir/$libname.$libext"
-
- # Transform .lo files to .o files.
- oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
-@@ -6659,10 +7577,11 @@ func_mode_link ()
- # If the user specified any rpath flags, then add them.
- temp_xrpath=
- for libdir in $xrpath; do
-- temp_xrpath="$temp_xrpath -R$libdir"
-+ func_replace_sysroot "$libdir"
-+ func_append temp_xrpath " -R$func_replace_sysroot_result"
- case "$finalize_rpath " in
- *" $libdir "*) ;;
-- *) finalize_rpath="$finalize_rpath $libdir" ;;
-+ *) func_append finalize_rpath " $libdir" ;;
- esac
- done
- if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
-@@ -6676,7 +7595,7 @@ func_mode_link ()
- for lib in $old_dlfiles; do
- case " $dlprefiles $dlfiles " in
- *" $lib "*) ;;
-- *) dlfiles="$dlfiles $lib" ;;
-+ *) func_append dlfiles " $lib" ;;
- esac
- done
-
-@@ -6686,7 +7605,7 @@ func_mode_link ()
- for lib in $old_dlprefiles; do
- case "$dlprefiles " in
- *" $lib "*) ;;
-- *) dlprefiles="$dlprefiles $lib" ;;
-+ *) func_append dlprefiles " $lib" ;;
- esac
- done
-
-@@ -6698,7 +7617,7 @@ func_mode_link ()
- ;;
- *-*-rhapsody* | *-*-darwin1.[012])
- # Rhapsody C library is in the System framework
-- deplibs="$deplibs System.ltframework"
-+ func_append deplibs " System.ltframework"
- ;;
- *-*-netbsd*)
- # Don't link with libc until the a.out ld.so is fixed.
-@@ -6715,7 +7634,7 @@ func_mode_link ()
- *)
- # Add libc to deplibs on all other systems if necessary.
- if test "$build_libtool_need_lc" = "yes"; then
-- deplibs="$deplibs -lc"
-+ func_append deplibs " -lc"
- fi
- ;;
- esac
-@@ -6764,18 +7683,18 @@ EOF
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $i "*)
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- i=""
- ;;
- esac
- fi
- if test -n "$i" ; then
-- eval "libname=\"$libname_spec\""
-- eval "deplib_matches=\"$library_names_spec\""
-+ libname=`eval "\\$ECHO \"$libname_spec\""`
-+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
- set dummy $deplib_matches; shift
- deplib_match=$1
- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- else
- droppeddeps=yes
- echo
-@@ -6789,7 +7708,7 @@ EOF
- fi
- ;;
- *)
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- ;;
- esac
- done
-@@ -6807,18 +7726,18 @@ EOF
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $i "*)
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- i=""
- ;;
- esac
- fi
- if test -n "$i" ; then
-- eval "libname=\"$libname_spec\""
-- eval "deplib_matches=\"$library_names_spec\""
-+ libname=`eval "\\$ECHO \"$libname_spec\""`
-+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
- set dummy $deplib_matches; shift
- deplib_match=$1
- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- else
- droppeddeps=yes
- echo
-@@ -6840,7 +7759,7 @@ EOF
- fi
- ;;
- *)
-- newdeplibs="$newdeplibs $i"
-+ func_append newdeplibs " $i"
- ;;
- esac
- done
-@@ -6857,15 +7776,27 @@ EOF
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $a_deplib "*)
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- a_deplib=""
- ;;
- esac
- fi
- if test -n "$a_deplib" ; then
-- eval "libname=\"$libname_spec\""
-+ libname=`eval "\\$ECHO \"$libname_spec\""`
-+ if test -n "$file_magic_glob"; then
-+ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
-+ else
-+ libnameglob=$libname
-+ fi
-+ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
- for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
-- potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
-+ if test "$want_nocaseglob" = yes; then
-+ shopt -s nocaseglob
-+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
-+ $nocaseglob
-+ else
-+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
-+ fi
- for potent_lib in $potential_libs; do
- # Follow soft links.
- if ls -lLd "$potent_lib" 2>/dev/null |
-@@ -6885,10 +7816,10 @@ EOF
- *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
- esac
- done
-- if eval "$file_magic_cmd \"\$potlib\"" 2>/dev/null |
-+ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
- $SED -e 10q |
- $EGREP "$file_magic_regex" > /dev/null; then
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- a_deplib=""
- break 2
- fi
-@@ -6913,7 +7844,7 @@ EOF
- ;;
- *)
- # Add a -L argument.
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- ;;
- esac
- done # Gone through all deplibs.
-@@ -6929,20 +7860,20 @@ EOF
- if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
- case " $predeps $postdeps " in
- *" $a_deplib "*)
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- a_deplib=""
- ;;
- esac
- fi
- if test -n "$a_deplib" ; then
-- eval "libname=\"$libname_spec\""
-+ libname=`eval "\\$ECHO \"$libname_spec\""`
- for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
- potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
- for potent_lib in $potential_libs; do
- potlib="$potent_lib" # see symlink-check above in file_magic test
- if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
- $EGREP "$match_pattern_regex" > /dev/null; then
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- a_deplib=""
- break 2
- fi
-@@ -6967,7 +7898,7 @@ EOF
- ;;
- *)
- # Add a -L argument.
-- newdeplibs="$newdeplibs $a_deplib"
-+ func_append newdeplibs " $a_deplib"
- ;;
- esac
- done # Gone through all deplibs.
-@@ -7071,7 +8002,7 @@ EOF
- *)
- case " $deplibs " in
- *" -L$path/$objdir "*)
-- new_libs="$new_libs -L$path/$objdir" ;;
-+ func_append new_libs " -L$path/$objdir" ;;
- esac
- ;;
- esac
-@@ -7081,10 +8012,10 @@ EOF
- -L*)
- case " $new_libs " in
- *" $deplib "*) ;;
-- *) new_libs="$new_libs $deplib" ;;
-+ *) func_append new_libs " $deplib" ;;
- esac
- ;;
-- *) new_libs="$new_libs $deplib" ;;
-+ *) func_append new_libs " $deplib" ;;
- esac
- done
- deplibs="$new_libs"
-@@ -7101,10 +8032,12 @@ EOF
- hardcode_libdirs=
- dep_rpath=
- rpath="$finalize_rpath"
-- test "$mode" != relink && rpath="$compile_rpath$rpath"
-+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
- for libdir in $rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
- if test -n "$hardcode_libdir_separator"; then
-+ func_replace_sysroot "$libdir"
-+ libdir=$func_replace_sysroot_result
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
- else
-@@ -7113,18 +8046,18 @@ EOF
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
-- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
-+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
-- eval "flag=\"$hardcode_libdir_flag_spec\""
-- dep_rpath="$dep_rpath $flag"
-+ eval flag=\"$hardcode_libdir_flag_spec\"
-+ func_append dep_rpath " $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
- *" $libdir "*) ;;
-- *) perm_rpath="$perm_rpath $libdir" ;;
-+ *) func_apped perm_rpath " $libdir" ;;
- esac
- fi
- done
-@@ -7133,40 +8066,38 @@ EOF
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
- if test -n "$hardcode_libdir_flag_spec_ld"; then
-- eval "dep_rpath=\"$hardcode_libdir_flag_spec_ld\""
-+ eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
- else
-- eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
-+ eval dep_rpath=\"$hardcode_libdir_flag_spec\"
- fi
- fi
- if test -n "$runpath_var" && test -n "$perm_rpath"; then
- # We should set the runpath_var.
- rpath=
- for dir in $perm_rpath; do
-- rpath="$rpath$dir:"
-+ func_append rpath "$dir:"
- done
-- eval $runpath_var=\$rpath\$$runpath_var
-- export $runpath_var
-+ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
- fi
- test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
- fi
-
- shlibpath="$finalize_shlibpath"
-- test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
-+ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
- if test -n "$shlibpath"; then
-- eval $shlibpath_var=\$shlibpath\$$shlibpath_var
-- export $shlibpath_var
-+ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
- fi
-
- # Get the real and link names of the library.
-- eval "shared_ext=\"$shrext_cmds\""
-- eval "library_names=\"$library_names_spec\""
-+ eval shared_ext=\"$shrext_cmds\"
-+ eval library_names=\"$library_names_spec\"
- set dummy $library_names
- shift
- realname="$1"
- shift
-
- if test -n "$soname_spec"; then
-- eval "soname=\"$soname_spec\""
-+ eval soname=\"$soname_spec\"
- else
- soname="$realname"
- fi
-@@ -7178,7 +8109,7 @@ EOF
- linknames=
- for link
- do
-- linknames="$linknames $link"
-+ func_append linknames " $link"
- done
-
- # Use standard objects if they are pic
-@@ -7189,7 +8120,7 @@ EOF
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
- export_symbols="$output_objdir/$libname.uexp"
-- delfiles="$delfiles $export_symbols"
-+ func_append delfiles " $export_symbols"
- fi
-
- orig_export_symbols=
-@@ -7220,13 +8151,45 @@ EOF
- $opt_dry_run || $RM $export_symbols
- cmds=$export_symbols_cmds
- save_ifs="$IFS"; IFS='~'
-- for cmd in $cmds; do
-+ for cmd1 in $cmds; do
- IFS="$save_ifs"
-- eval "cmd=\"$cmd\""
-- func_len " $cmd"
-- len=$func_len_result
-- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
-+ # Take the normal branch if the nm_file_list_spec branch
-+ # doesn't work or if tool conversion is not needed.
-+ case $nm_file_list_spec~$to_tool_file_cmd in
-+ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
-+ try_normal_branch=yes
-+ eval cmd=\"$cmd1\"
-+ func_len " $cmd"
-+ len=$func_len_result
-+ ;;
-+ *)
-+ try_normal_branch=no
-+ ;;
-+ esac
-+ if test "$try_normal_branch" = yes \
-+ && { test "$len" -lt "$max_cmd_len" \
-+ || test "$max_cmd_len" -le -1; }
-+ then
-+ func_show_eval "$cmd" 'exit $?'
-+ skipped_export=false
-+ elif test -n "$nm_file_list_spec"; then
-+ func_basename "$output"
-+ output_la=$func_basename_result
-+ save_libobjs=$libobjs
-+ save_output=$output
-+ output=${output_objdir}/${output_la}.nm
-+ func_to_tool_file "$output"
-+ libobjs=$nm_file_list_spec$func_to_tool_file_result
-+ func_append delfiles " $output"
-+ func_verbose "creating $NM input file list: $output"
-+ for obj in $save_libobjs; do
-+ func_to_tool_file "$obj"
-+ $ECHO "$func_to_tool_file_result"
-+ done > "$output"
-+ eval cmd=\"$cmd1\"
- func_show_eval "$cmd" 'exit $?'
-+ output=$save_output
-+ libobjs=$save_libobjs
- skipped_export=false
- else
- # The command line is too long to execute in one step.
-@@ -7248,7 +8211,7 @@ EOF
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- tmp_export_symbols="$export_symbols"
- test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
-- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"
-+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
- fi
-
- if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
-@@ -7260,7 +8223,7 @@ EOF
- # global variables. join(1) would be nice here, but unfortunately
- # isn't a blessed tool.
- $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
-- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
-+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
- export_symbols=$output_objdir/$libname.def
- $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
- fi
-@@ -7270,7 +8233,7 @@ EOF
- case " $convenience " in
- *" $test_deplib "*) ;;
- *)
-- tmp_deplibs="$tmp_deplibs $test_deplib"
-+ func_append tmp_deplibs " $test_deplib"
- ;;
- esac
- done
-@@ -7286,43 +8249,43 @@ EOF
- fi
- if test -n "$whole_archive_flag_spec"; then
- save_libobjs=$libobjs
-- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\""
-+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
- test "X$libobjs" = "X " && libobjs=
- else
- gentop="$output_objdir/${outputname}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
-
- func_extract_archives $gentop $convenience
-- libobjs="$libobjs $func_extract_archives_result"
-+ func_append libobjs " $func_extract_archives_result"
- test "X$libobjs" = "X " && libobjs=
- fi
- fi
-
- if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
-- eval "flag=\"$thread_safe_flag_spec\""
-- linker_flags="$linker_flags $flag"
-+ eval flag=\"$thread_safe_flag_spec\"
-+ func_append linker_flags " $flag"
- fi
-
- # Make a backup of the uninstalled library when relinking
-- if test "$mode" = relink; then
-- $opt_dry_run || (cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U) || exit $?
-+ if test "$opt_mode" = relink; then
-+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
- fi
-
- # Do each of the archive commands.
- if test "$module" = yes && test -n "$module_cmds" ; then
- if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
-- eval "test_cmds=\"$module_expsym_cmds\""
-+ eval test_cmds=\"$module_expsym_cmds\"
- cmds=$module_expsym_cmds
- else
-- eval "test_cmds=\"$module_cmds\""
-+ eval test_cmds=\"$module_cmds\"
- cmds=$module_cmds
- fi
- else
- if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
-- eval "test_cmds=\"$archive_expsym_cmds\""
-+ eval test_cmds=\"$archive_expsym_cmds\"
- cmds=$archive_expsym_cmds
- else
-- eval "test_cmds=\"$archive_cmds\""
-+ eval test_cmds=\"$archive_cmds\"
- cmds=$archive_cmds
- fi
- fi
-@@ -7366,10 +8329,13 @@ EOF
- echo 'INPUT (' > $output
- for obj in $save_libobjs
- do
-- $ECHO "$obj" >> $output
-+ func_to_tool_file "$obj"
-+ $ECHO "$func_to_tool_file_result" >> $output
- done
- echo ')' >> $output
-- delfiles="$delfiles $output"
-+ func_append delfiles " $output"
-+ func_to_tool_file "$output"
-+ output=$func_to_tool_file_result
- elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
- output=${output_objdir}/${output_la}.lnk
- func_verbose "creating linker input file list: $output"
-@@ -7383,15 +8349,17 @@ EOF
- fi
- for obj
- do
-- $ECHO "$obj" >> $output
-+ func_to_tool_file "$obj"
-+ $ECHO "$func_to_tool_file_result" >> $output
- done
-- delfiles="$delfiles $output"
-- output=$firstobj\"$file_list_spec$output\"
-+ func_append delfiles " $output"
-+ func_to_tool_file "$output"
-+ output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
- else
- if test -n "$save_libobjs"; then
- func_verbose "creating reloadable object files..."
- output=$output_objdir/$output_la-${k}.$objext
-- eval "test_cmds=\"$reload_cmds\""
-+ eval test_cmds=\"$reload_cmds\"
- func_len " $test_cmds"
- len0=$func_len_result
- len=$len0
-@@ -7411,12 +8379,12 @@ EOF
- if test "$k" -eq 1 ; then
- # The first file doesn't have a previous command to add.
- reload_objs=$objlist
-- eval "concat_cmds=\"$reload_cmds\""
-+ eval concat_cmds=\"$reload_cmds\"
- else
- # All subsequent reloadable object files will link in
- # the last one created.
- reload_objs="$objlist $last_robj"
-- eval "concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\""
-+ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
- fi
- last_robj=$output_objdir/$output_la-${k}.$objext
- func_arith $k + 1
-@@ -7433,11 +8401,11 @@ EOF
- # files will link in the last one created.
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
- reload_objs="$objlist $last_robj"
-- eval "concat_cmds=\"\${concat_cmds}$reload_cmds\""
-+ eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
- if test -n "$last_robj"; then
-- eval "concat_cmds=\"\${concat_cmds}~\$RM $last_robj\""
-+ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
- fi
-- delfiles="$delfiles $output"
-+ func_append delfiles " $output"
-
- else
- output=
-@@ -7450,9 +8418,9 @@ EOF
- libobjs=$output
- # Append the command to create the export file.
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
-- eval "concat_cmds=\"\$concat_cmds$export_symbols_cmds\""
-+ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
- if test -n "$last_robj"; then
-- eval "concat_cmds=\"\$concat_cmds~\$RM $last_robj\""
-+ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
- fi
- fi
-
-@@ -7471,7 +8439,7 @@ EOF
- lt_exit=$?
-
- # Restore the uninstalled library and exit
-- if test "$mode" = relink; then
-+ if test "$opt_mode" = relink; then
- ( cd "$output_objdir" && \
- $RM "${realname}T" && \
- $MV "${realname}U" "$realname" )
-@@ -7492,7 +8460,7 @@ EOF
- if test -n "$export_symbols" && test -n "$include_expsyms"; then
- tmp_export_symbols="$export_symbols"
- test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
-- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"
-+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
- fi
-
- if test -n "$orig_export_symbols"; then
-@@ -7504,7 +8472,7 @@ EOF
- # global variables. join(1) would be nice here, but unfortunately
- # isn't a blessed tool.
- $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
-- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
-+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
- export_symbols=$output_objdir/$libname.def
- $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
- fi
-@@ -7515,7 +8483,7 @@ EOF
- output=$save_output
-
- if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
-- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\""
-+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
- test "X$libobjs" = "X " && libobjs=
- fi
- # Expand the library linking commands again to reset the
-@@ -7539,23 +8507,23 @@ EOF
-
- if test -n "$delfiles"; then
- # Append the command to remove temporary files to $cmds.
-- eval "cmds=\"\$cmds~\$RM $delfiles\""
-+ eval cmds=\"\$cmds~\$RM $delfiles\"
- fi
-
- # Add any objects from preloaded convenience libraries
- if test -n "$dlprefiles"; then
- gentop="$output_objdir/${outputname}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
-
- func_extract_archives $gentop $dlprefiles
-- libobjs="$libobjs $func_extract_archives_result"
-+ func_append libobjs " $func_extract_archives_result"
- test "X$libobjs" = "X " && libobjs=
- fi
-
- save_ifs="$IFS"; IFS='~'
- for cmd in $cmds; do
- IFS="$save_ifs"
-- eval "cmd=\"$cmd\""
-+ eval cmd=\"$cmd\"
- $opt_silent || {
- func_quote_for_expand "$cmd"
- eval "func_echo $func_quote_for_expand_result"
-@@ -7564,7 +8532,7 @@ EOF
- lt_exit=$?
-
- # Restore the uninstalled library and exit
-- if test "$mode" = relink; then
-+ if test "$opt_mode" = relink; then
- ( cd "$output_objdir" && \
- $RM "${realname}T" && \
- $MV "${realname}U" "$realname" )
-@@ -7576,8 +8544,8 @@ EOF
- IFS="$save_ifs"
-
- # Restore the uninstalled library and exit
-- if test "$mode" = relink; then
-- $opt_dry_run || (cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname) || exit $?
-+ if test "$opt_mode" = relink; then
-+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
-
- if test -n "$convenience"; then
- if test -z "$whole_archive_flag_spec"; then
-@@ -7656,17 +8624,20 @@ EOF
-
- if test -n "$convenience"; then
- if test -n "$whole_archive_flag_spec"; then
-- eval "tmp_whole_archive_flags=\"$whole_archive_flag_spec\""
-+ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
- reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
- else
- gentop="$output_objdir/${obj}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
-
- func_extract_archives $gentop $convenience
- reload_conv_objs="$reload_objs $func_extract_archives_result"
- fi
- fi
-
-+ # If we're not building shared, we need to use non_pic_objs
-+ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
-+
- # Create the old-style object.
- reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
-
-@@ -7690,7 +8661,7 @@ EOF
- # Create an invalid libtool object if no PIC, so that we don't
- # accidentally link it into a program.
- # $show "echo timestamp > $libobj"
-- # $opt_dry_run || echo timestamp > $libobj || exit $?
-+ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
- exit $EXIT_SUCCESS
- fi
-
-@@ -7740,8 +8711,8 @@ EOF
- if test "$tagname" = CXX ; then
- case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
- 10.[0123])
-- compile_command="$compile_command ${wl}-bind_at_load"
-- finalize_command="$finalize_command ${wl}-bind_at_load"
-+ func_append compile_command " ${wl}-bind_at_load"
-+ func_append finalize_command " ${wl}-bind_at_load"
- ;;
- esac
- fi
-@@ -7761,7 +8732,7 @@ EOF
- *)
- case " $compile_deplibs " in
- *" -L$path/$objdir "*)
-- new_libs="$new_libs -L$path/$objdir" ;;
-+ func_append new_libs " -L$path/$objdir" ;;
- esac
- ;;
- esac
-@@ -7771,17 +8742,17 @@ EOF
- -L*)
- case " $new_libs " in
- *" $deplib "*) ;;
-- *) new_libs="$new_libs $deplib" ;;
-+ *) func_append new_libs " $deplib" ;;
- esac
- ;;
-- *) new_libs="$new_libs $deplib" ;;
-+ *) func_append new_libs " $deplib" ;;
- esac
- done
- compile_deplibs="$new_libs"
-
-
-- compile_command="$compile_command $compile_deplibs"
-- finalize_command="$finalize_command $finalize_deplibs"
-+ func_append compile_command " $compile_deplibs"
-+ func_append finalize_command " $finalize_deplibs"
-
- if test -n "$rpath$xrpath"; then
- # If the user specified any rpath flags, then add them.
-@@ -7789,7 +8760,7 @@ EOF
- # This is the magic to use -rpath.
- case "$finalize_rpath " in
- *" $libdir "*) ;;
-- *) finalize_rpath="$finalize_rpath $libdir" ;;
-+ *) func_append finalize_rpath " $libdir" ;;
- esac
- done
- fi
-@@ -7808,18 +8779,18 @@ EOF
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
-- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
-+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
-- eval "flag=\"$hardcode_libdir_flag_spec\""
-- rpath="$rpath $flag"
-+ eval flag=\"$hardcode_libdir_flag_spec\"
-+ func_append rpath " $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
- *" $libdir "*) ;;
-- *) perm_rpath="$perm_rpath $libdir" ;;
-+ *) func_append perm_rpath " $libdir" ;;
- esac
- fi
- case $host in
-@@ -7828,12 +8799,12 @@ EOF
- case :$dllsearchpath: in
- *":$libdir:"*) ;;
- ::) dllsearchpath=$libdir;;
-- *) dllsearchpath="$dllsearchpath:$libdir";;
-+ *) func_append dllsearchpath ":$libdir";;
- esac
- case :$dllsearchpath: in
- *":$testbindir:"*) ;;
- ::) dllsearchpath=$testbindir;;
-- *) dllsearchpath="$dllsearchpath:$testbindir";;
-+ *) func_append dllsearchpath ":$testbindir";;
- esac
- ;;
- esac
-@@ -7842,7 +8813,7 @@ EOF
- if test -n "$hardcode_libdir_separator" &&
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
-- eval "rpath=\" $hardcode_libdir_flag_spec\""
-+ eval rpath=\" $hardcode_libdir_flag_spec\"
- fi
- compile_rpath="$rpath"
-
-@@ -7859,18 +8830,18 @@ EOF
- *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
- ;;
- *)
-- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
-+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
- ;;
- esac
- fi
- else
-- eval "flag=\"$hardcode_libdir_flag_spec\""
-- rpath="$rpath $flag"
-+ eval flag=\"$hardcode_libdir_flag_spec\"
-+ func_append rpath " $flag"
- fi
- elif test -n "$runpath_var"; then
- case "$finalize_perm_rpath " in
- *" $libdir "*) ;;
-- *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
-+ *) func_append finalize_perm_rpath " $libdir" ;;
- esac
- fi
- done
-@@ -7878,7 +8849,7 @@ EOF
- if test -n "$hardcode_libdir_separator" &&
- test -n "$hardcode_libdirs"; then
- libdir="$hardcode_libdirs"
-- eval "rpath=\" $hardcode_libdir_flag_spec\""
-+ eval rpath=\" $hardcode_libdir_flag_spec\"
- fi
- finalize_rpath="$rpath"
-
-@@ -7921,6 +8892,12 @@ EOF
- exit_status=0
- func_show_eval "$link_command" 'exit_status=$?'
-
-+ if test -n "$postlink_cmds"; then
-+ func_to_tool_file "$output"
-+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
-+ func_execute_cmds "$postlink_cmds" 'exit $?'
-+ fi
-+
- # Delete the generated files.
- if test -f "$output_objdir/${outputname}S.${objext}"; then
- func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
-@@ -7943,7 +8920,7 @@ EOF
- # We should set the runpath_var.
- rpath=
- for dir in $perm_rpath; do
-- rpath="$rpath$dir:"
-+ func_append rpath "$dir:"
- done
- compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
- fi
-@@ -7951,7 +8928,7 @@ EOF
- # We should set the runpath_var.
- rpath=
- for dir in $finalize_perm_rpath; do
-- rpath="$rpath$dir:"
-+ func_append rpath "$dir:"
- done
- finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
- fi
-@@ -7966,6 +8943,13 @@ EOF
- $opt_dry_run || $RM $output
- # Link the executable and exit
- func_show_eval "$link_command" 'exit $?'
-+
-+ if test -n "$postlink_cmds"; then
-+ func_to_tool_file "$output"
-+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
-+ func_execute_cmds "$postlink_cmds" 'exit $?'
-+ fi
-+
- exit $EXIT_SUCCESS
- fi
-
-@@ -7999,6 +8983,12 @@ EOF
-
- func_show_eval "$link_command" 'exit $?'
-
-+ if test -n "$postlink_cmds"; then
-+ func_to_tool_file "$output_objdir/$outputname"
-+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
-+ func_execute_cmds "$postlink_cmds" 'exit $?'
-+ fi
-+
- # Now create the wrapper script.
- func_verbose "creating $output"
-
-@@ -8096,7 +9086,7 @@ EOF
- else
- oldobjs="$old_deplibs $non_pic_objects"
- if test "$preload" = yes && test -f "$symfileobj"; then
-- oldobjs="$oldobjs $symfileobj"
-+ func_append oldobjs " $symfileobj"
- fi
- fi
- addlibs="$old_convenience"
-@@ -8104,10 +9094,10 @@ EOF
-
- if test -n "$addlibs"; then
- gentop="$output_objdir/${outputname}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
-
- func_extract_archives $gentop $addlibs
-- oldobjs="$oldobjs $func_extract_archives_result"
-+ func_append oldobjs " $func_extract_archives_result"
- fi
-
- # Do each command in the archive commands.
-@@ -8118,10 +9108,10 @@ EOF
- # Add any objects from preloaded convenience libraries
- if test -n "$dlprefiles"; then
- gentop="$output_objdir/${outputname}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
-
- func_extract_archives $gentop $dlprefiles
-- oldobjs="$oldobjs $func_extract_archives_result"
-+ func_append oldobjs " $func_extract_archives_result"
- fi
-
- # POSIX demands no paths to be encoded in archives. We have
-@@ -8139,7 +9129,7 @@ EOF
- else
- echo "copying selected object files to avoid basename conflicts..."
- gentop="$output_objdir/${outputname}x"
-- generated="$generated $gentop"
-+ func_append generated " $gentop"
- func_mkdir_p "$gentop"
- save_oldobjs=$oldobjs
- oldobjs=
-@@ -8163,18 +9153,28 @@ EOF
- esac
- done
- func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
-- oldobjs="$oldobjs $gentop/$newobj"
-+ func_append oldobjs " $gentop/$newobj"
- ;;
-- *) oldobjs="$oldobjs $obj" ;;
-+ *) func_append oldobjs " $obj" ;;
- esac
- done
- fi
-- eval "cmds=\"$old_archive_cmds\""
-+ eval cmds=\"$old_archive_cmds\"
-
- func_len " $cmds"
- len=$func_len_result
- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
- cmds=$old_archive_cmds
-+ elif test -n "$archiver_list_spec"; then
-+ func_verbose "using command file archive linking..."
-+ for obj in $oldobjs
-+ do
-+ func_to_tool_file "$obj"
-+ $ECHO "$func_to_tool_file_result"
-+ done > $output_objdir/$libname.libcmd
-+ func_to_tool_file "$output_objdir/$libname.libcmd"
-+ oldobjs=" $archiver_list_spec$func_to_tool_file_result"
-+ cmds=$old_archive_cmds
- else
- # the command line is too long to link in one step, link in parts
- func_verbose "using piecewise archive linking..."
-@@ -8189,7 +9189,7 @@ EOF
- do
- last_oldobj=$obj
- done
-- eval "test_cmds=\"$old_archive_cmds\""
-+ eval test_cmds=\"$old_archive_cmds\"
- func_len " $test_cmds"
- len0=$func_len_result
- len=$len0
-@@ -8208,7 +9208,7 @@ EOF
- RANLIB=$save_RANLIB
- fi
- test -z "$concat_cmds" || concat_cmds=$concat_cmds~
-- eval "concat_cmds=\"\${concat_cmds}$old_archive_cmds\""
-+ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
- objlist=
- len=$len0
- fi
-@@ -8216,9 +9216,9 @@ EOF
- RANLIB=$save_RANLIB
- oldobjs=$objlist
- if test "X$oldobjs" = "X" ; then
-- eval "cmds=\"\$concat_cmds\""
-+ eval cmds=\"\$concat_cmds\"
- else
-- eval "cmds=\"\$concat_cmds~\$old_archive_cmds\""
-+ eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
- fi
- fi
- fi
-@@ -8268,12 +9268,23 @@ EOF
- *.la)
- func_basename "$deplib"
- name="$func_basename_result"
-- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
-+ func_resolve_sysroot "$deplib"
-+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
- test -z "$libdir" && \
- func_fatal_error "\`$deplib' is not a valid libtool archive"
-- newdependency_libs="$newdependency_libs $libdir/$name"
-+ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
-+ ;;
-+ -L*)
-+ func_stripname -L '' "$deplib"
-+ func_replace_sysroot "$func_stripname_result"
-+ func_append newdependency_libs " -L$func_replace_sysroot_result"
- ;;
-- *) newdependency_libs="$newdependency_libs $deplib" ;;
-+ -R*)
-+ func_stripname -R '' "$deplib"
-+ func_replace_sysroot "$func_stripname_result"
-+ func_append newdependency_libs " -R$func_replace_sysroot_result"
-+ ;;
-+ *) func_append newdependency_libs " $deplib" ;;
- esac
- done
- dependency_libs="$newdependency_libs"
-@@ -8284,12 +9295,14 @@ EOF
- *.la)
- func_basename "$lib"
- name="$func_basename_result"
-- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
-+ func_resolve_sysroot "$lib"
-+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
-+
- test -z "$libdir" && \
- func_fatal_error "\`$lib' is not a valid libtool archive"
-- newdlfiles="$newdlfiles $libdir/$name"
-+ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
- ;;
-- *) newdlfiles="$newdlfiles $lib" ;;
-+ *) func_append newdlfiles " $lib" ;;
- esac
- done
- dlfiles="$newdlfiles"
-@@ -8303,10 +9316,11 @@ EOF
- # the library:
- func_basename "$lib"
- name="$func_basename_result"
-- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
-+ func_resolve_sysroot "$lib"
-+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
- test -z "$libdir" && \
- func_fatal_error "\`$lib' is not a valid libtool archive"
-- newdlprefiles="$newdlprefiles $libdir/$name"
-+ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
- ;;
- esac
- done
-@@ -8318,7 +9332,7 @@ EOF
- [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
- *) abs=`pwd`"/$lib" ;;
- esac
-- newdlfiles="$newdlfiles $abs"
-+ func_append newdlfiles " $abs"
- done
- dlfiles="$newdlfiles"
- newdlprefiles=
-@@ -8327,7 +9341,7 @@ EOF
- [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
- *) abs=`pwd`"/$lib" ;;
- esac
-- newdlprefiles="$newdlprefiles $abs"
-+ func_append newdlprefiles " $abs"
- done
- dlprefiles="$newdlprefiles"
- fi
-@@ -8412,7 +9426,7 @@ relink_command=\"$relink_command\""
- exit $EXIT_SUCCESS
- }
-
--{ test "$mode" = link || test "$mode" = relink; } &&
-+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
- func_mode_link ${1+"$@"}
-
-
-@@ -8432,9 +9446,9 @@ func_mode_uninstall ()
- for arg
- do
- case $arg in
-- -f) RM="$RM $arg"; rmforce=yes ;;
-- -*) RM="$RM $arg" ;;
-- *) files="$files $arg" ;;
-+ -f) func_append RM " $arg"; rmforce=yes ;;
-+ -*) func_append RM " $arg" ;;
-+ *) func_append files " $arg" ;;
- esac
- done
-
-@@ -8443,24 +9457,23 @@ func_mode_uninstall ()
-
- rmdirs=
-
-- origobjdir="$objdir"
- for file in $files; do
- func_dirname "$file" "" "."
- dir="$func_dirname_result"
- if test "X$dir" = X.; then
-- objdir="$origobjdir"
-+ odir="$objdir"
- else
-- objdir="$dir/$origobjdir"
-+ odir="$dir/$objdir"
- fi
- func_basename "$file"
- name="$func_basename_result"
-- test "$mode" = uninstall && objdir="$dir"
-+ test "$opt_mode" = uninstall && odir="$dir"
-
-- # Remember objdir for removal later, being careful to avoid duplicates
-- if test "$mode" = clean; then
-+ # Remember odir for removal later, being careful to avoid duplicates
-+ if test "$opt_mode" = clean; then
- case " $rmdirs " in
-- *" $objdir "*) ;;
-- *) rmdirs="$rmdirs $objdir" ;;
-+ *" $odir "*) ;;
-+ *) func_append rmdirs " $odir" ;;
- esac
- fi
-
-@@ -8486,18 +9499,17 @@ func_mode_uninstall ()
-
- # Delete the libtool libraries and symlinks.
- for n in $library_names; do
-- rmfiles="$rmfiles $objdir/$n"
-+ func_append rmfiles " $odir/$n"
- done
-- test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
-+ test -n "$old_library" && func_append rmfiles " $odir/$old_library"
-
-- case "$mode" in
-+ case "$opt_mode" in
- clean)
-- case " $library_names " in
-- # " " in the beginning catches empty $dlname
-+ case " $library_names " in
- *" $dlname "*) ;;
-- *) rmfiles="$rmfiles $objdir/$dlname" ;;
-+ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
- esac
-- test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
-+ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
- ;;
- uninstall)
- if test -n "$library_names"; then
-@@ -8525,19 +9537,19 @@ func_mode_uninstall ()
- # Add PIC object to the list of files to remove.
- if test -n "$pic_object" &&
- test "$pic_object" != none; then
-- rmfiles="$rmfiles $dir/$pic_object"
-+ func_append rmfiles " $dir/$pic_object"
- fi
-
- # Add non-PIC object to the list of files to remove.
- if test -n "$non_pic_object" &&
- test "$non_pic_object" != none; then
-- rmfiles="$rmfiles $dir/$non_pic_object"
-+ func_append rmfiles " $dir/$non_pic_object"
- fi
- fi
- ;;
-
- *)
-- if test "$mode" = clean ; then
-+ if test "$opt_mode" = clean ; then
- noexename=$name
- case $file in
- *.exe)
-@@ -8547,7 +9559,7 @@ func_mode_uninstall ()
- noexename=$func_stripname_result
- # $file with .exe has already been added to rmfiles,
- # add $file without .exe
-- rmfiles="$rmfiles $file"
-+ func_append rmfiles " $file"
- ;;
- esac
- # Do a test to see if this is a libtool program.
-@@ -8556,7 +9568,7 @@ func_mode_uninstall ()
- func_ltwrapper_scriptname "$file"
- relink_command=
- func_source $func_ltwrapper_scriptname_result
-- rmfiles="$rmfiles $func_ltwrapper_scriptname_result"
-+ func_append rmfiles " $func_ltwrapper_scriptname_result"
- else
- relink_command=
- func_source $dir/$noexename
-@@ -8564,12 +9576,12 @@ func_mode_uninstall ()
-
- # note $name still contains .exe if it was in $file originally
- # as does the version of $file that was added into $rmfiles
-- rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
-+ func_append rmfiles " $odir/$name $odir/${name}S.${objext}"
- if test "$fast_install" = yes && test -n "$relink_command"; then
-- rmfiles="$rmfiles $objdir/lt-$name"
-+ func_append rmfiles " $odir/lt-$name"
- fi
- if test "X$noexename" != "X$name" ; then
-- rmfiles="$rmfiles $objdir/lt-${noexename}.c"
-+ func_append rmfiles " $odir/lt-${noexename}.c"
- fi
- fi
- fi
-@@ -8577,7 +9589,6 @@ func_mode_uninstall ()
- esac
- func_show_eval "$RM $rmfiles" 'exit_status=1'
- done
-- objdir="$origobjdir"
-
- # Try to remove the ${objdir}s in the directories where we deleted files
- for dir in $rmdirs; do
-@@ -8589,16 +9600,16 @@ func_mode_uninstall ()
- exit $exit_status
- }
-
--{ test "$mode" = uninstall || test "$mode" = clean; } &&
-+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
- func_mode_uninstall ${1+"$@"}
-
--test -z "$mode" && {
-+test -z "$opt_mode" && {
- help="$generic_help"
- func_fatal_help "you must specify a MODE"
- }
-
- test -z "$exec_cmd" && \
-- func_fatal_help "invalid operation mode \`$mode'"
-+ func_fatal_help "invalid operation mode \`$opt_mode'"
-
- if test -n "$exec_cmd"; then
- eval exec "$exec_cmd"
-diff --git a/ltoptions.m4 b/ltoptions.m4
-index 5ef12ced2a8..17cfd51c0b3 100644
---- a/ltoptions.m4
-+++ b/ltoptions.m4
-@@ -8,7 +8,7 @@
- # unlimited permission to copy and/or distribute it, with or without
- # modifications, as long as this notice is preserved.
-
--# serial 6 ltoptions.m4
-+# serial 7 ltoptions.m4
-
- # This is to help aclocal find these macros, as it can't see m4_define.
- AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
-diff --git a/ltversion.m4 b/ltversion.m4
-index bf87f77132d..9c7b5d41185 100644
---- a/ltversion.m4
-+++ b/ltversion.m4
-@@ -7,17 +7,17 @@
- # unlimited permission to copy and/or distribute it, with or without
- # modifications, as long as this notice is preserved.
-
--# Generated from ltversion.in.
-+# @configure_input@
-
--# serial 3134 ltversion.m4
-+# serial 3293 ltversion.m4
- # This file is part of GNU Libtool
-
--m4_define([LT_PACKAGE_VERSION], [2.2.7a])
--m4_define([LT_PACKAGE_REVISION], [1.3134])
-+m4_define([LT_PACKAGE_VERSION], [2.4])
-+m4_define([LT_PACKAGE_REVISION], [1.3293])
-
- AC_DEFUN([LTVERSION_VERSION],
--[macro_version='2.2.7a'
--macro_revision='1.3134'
-+[macro_version='2.4'
-+macro_revision='1.3293'
- _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
- _LT_DECL(, macro_revision, 0)
- ])
-diff --git a/lt~obsolete.m4 b/lt~obsolete.m4
-index bf92b5e0790..c573da90c5c 100644
---- a/lt~obsolete.m4
-+++ b/lt~obsolete.m4
-@@ -7,7 +7,7 @@
- # unlimited permission to copy and/or distribute it, with or without
- # modifications, as long as this notice is preserved.
-
--# serial 4 lt~obsolete.m4
-+# serial 5 lt~obsolete.m4
-
- # These exist entirely to fool aclocal when bootstrapping libtool.
- #
-diff --git a/opcodes/configure b/opcodes/configure
-index 6690a502b2f..badcc0776df 100755
---- a/opcodes/configure
-+++ b/opcodes/configure
-@@ -682,6 +682,9 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
-+ac_ct_AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -800,6 +803,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_checking
- enable_targets
-@@ -1468,6 +1472,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
-
- Some influential environment variables:
- CC C compiler command
-@@ -4977,8 +4983,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -5018,7 +5024,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5705,8 +5711,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5755,6 +5761,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5771,6 +5851,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5939,7 +6024,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -6093,6 +6179,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -6108,6 +6209,157 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -6122,8 +6374,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -6139,7 +6393,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6159,11 +6413,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -6179,7 +6437,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -6198,6 +6456,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -6209,25 +6471,20 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-+
-
-
-
-@@ -6238,6 +6495,63 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-@@ -6578,8 +6892,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -6615,6 +6929,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -6656,6 +6971,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6667,7 +6994,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6693,8 +7020,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6704,8 +7031,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6742,6 +7069,14 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-
-
-
-@@ -6760,6 +7095,47 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-+
-+
-
-
-
-@@ -6969,6 +7345,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -7532,6 +8025,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -8084,8 +8579,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -8251,6 +8744,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -8313,7 +8812,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8370,13 +8869,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8437,6 +8940,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8787,7 +9295,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8886,12 +9395,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8905,8 +9414,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8924,8 +9433,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8971,8 +9480,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -9102,7 +9611,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9115,22 +9630,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -9142,7 +9664,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
- int
-@@ -9155,22 +9683,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -9215,20 +9750,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -9289,7 +9867,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -9297,7 +9875,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -9313,7 +9891,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -9337,10 +9915,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9419,23 +9997,36 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9520,7 +10111,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9539,9 +10130,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -10117,8 +10708,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -10151,13 +10743,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -11035,7 +11685,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11038 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11079,10 +11729,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -11141,7 +11791,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 11144 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -11185,10 +11835,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -13390,13 +14040,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -13411,14 +14068,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -13451,12 +14111,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -13511,8 +14171,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -13522,12 +14187,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -13543,7 +14210,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -13579,6 +14245,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -14344,7 +15011,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -14447,19 +15115,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -14489,6 +15180,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -14498,6 +15195,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -14612,12 +15312,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -14704,9 +15404,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -14722,6 +15419,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -14754,210 +15454,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
-diff --git a/zlib/configure b/zlib/configure
-index db7845c5d42..cd59daa39b5 100755
---- a/zlib/configure
-+++ b/zlib/configure
-@@ -646,8 +646,11 @@ OTOOL
- LIPO
- NMEDIT
- DSYMUTIL
-+MANIFEST_TOOL
- RANLIB
-+ac_ct_AR
- AR
-+DLLTOOL
- OBJDUMP
- LN_S
- NM
-@@ -774,6 +777,7 @@ enable_static
- with_pic
- enable_fast_install
- with_gnu_ld
-+with_libtool_sysroot
- enable_libtool_lock
- enable_host_shared
- '
-@@ -1428,6 +1432,8 @@ Optional Packages:
- --with-pic try to use only PIC/non-PIC objects [default=use
- both]
- --with-gnu-ld assume the C compiler uses GNU ld [default=no]
-+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR
-+ (or the compiler's sysroot if not specified).
-
- Some influential environment variables:
- CC C compiler command
-@@ -4186,8 +4192,8 @@ esac
-
-
-
--macro_version='2.2.7a'
--macro_revision='1.3134'
-+macro_version='2.4'
-+macro_revision='1.3293'
-
-
-
-@@ -4227,7 +4233,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
- $as_echo_n "checking how to print strings... " >&6; }
- # Test print first, because it will be a builtin if present.
--if test "X`print -r -- -n 2>/dev/null`" = X-n && \
-+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
- test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
- ECHO='print -r --'
- elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-@@ -5044,8 +5050,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6;
- # Try some XSI features
- xsi_shell=no
- ( _lt_dummy="a/b/c"
-- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
-- = c,a/b,, \
-+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-+ = c,a/b,b/c, \
- && eval 'test $(( 1 + 1 )) -eq 2 \
- && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
- && xsi_shell=yes
-@@ -5094,6 +5100,80 @@ esac
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-+$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-+if ${lt_cv_to_host_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-+ ;;
-+ esac
-+ ;;
-+ *-*-cygwin* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-+ ;;
-+ *-*-cygwin* )
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+ * ) # otherwise, assume *nix
-+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-+ ;;
-+ esac
-+ ;;
-+ * ) # unhandled hosts (and "normal" native builds)
-+ lt_cv_to_host_file_cmd=func_convert_file_noop
-+ ;;
-+esac
-+
-+fi
-+
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-+$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-+if ${lt_cv_to_tool_file_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ #assume ordinary cross tools, or native build.
-+lt_cv_to_tool_file_cmd=func_convert_file_noop
-+case $host in
-+ *-*-mingw* )
-+ case $build in
-+ *-*-mingw* ) # actually msys
-+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-+ ;;
-+ esac
-+ ;;
-+esac
-+
-+fi
-+
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-+$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-+
-+
-+
-+
-+
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
- $as_echo_n "checking for $LD option to reload object files... " >&6; }
- if ${lt_cv_ld_reload_flag+:} false; then :
-@@ -5110,6 +5190,11 @@ case $reload_flag in
- esac
- reload_cmds='$LD$reload_flag -o $output$reload_objs'
- case $host_os in
-+ cygwin* | mingw* | pw32* | cegcc*)
-+ if test "$GCC" != yes; then
-+ reload_cmds=false
-+ fi
-+ ;;
- darwin*)
- if test "$GCC" = yes; then
- reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-@@ -5278,7 +5363,8 @@ mingw* | pw32*)
- lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
- lt_cv_file_magic_cmd='func_win32_libid'
- else
-- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?'
-+ # Keep this pattern in sync with the one in func_win32_libid.
-+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
- lt_cv_file_magic_cmd='$OBJDUMP -f'
- fi
- ;;
-@@ -5432,6 +5518,21 @@ esac
- fi
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
- $as_echo "$lt_cv_deplibs_check_method" >&6; }
-+
-+file_magic_glob=
-+want_nocaseglob=no
-+if test "$build" = "$host"; then
-+ case $host_os in
-+ mingw* | pw32*)
-+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-+ want_nocaseglob=yes
-+ else
-+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-+ fi
-+ ;;
-+ esac
-+fi
-+
- file_magic_cmd=$lt_cv_file_magic_cmd
- deplibs_check_method=$lt_cv_deplibs_check_method
- test -z "$deplibs_check_method" && deplibs_check_method=unknown
-@@ -5447,6 +5548,158 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$DLLTOOL"; then
-+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+DLLTOOL=$ac_cv_prog_DLLTOOL
-+if test -n "$DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-+$as_echo "$DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_DLLTOOL"; then
-+ ac_ct_DLLTOOL=$DLLTOOL
-+ # Extract the first word of "dlltool", so it can be a program name with args.
-+set dummy dlltool; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_DLLTOOL"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-+if test -n "$ac_ct_DLLTOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-+$as_echo "$ac_ct_DLLTOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_DLLTOOL" = x; then
-+ DLLTOOL="false"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ DLLTOOL=$ac_ct_DLLTOOL
-+ fi
-+else
-+ DLLTOOL="$ac_cv_prog_DLLTOOL"
-+fi
-+
-+test -z "$DLLTOOL" && DLLTOOL=dlltool
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-+$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-+if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_sharedlib_from_linklib_cmd='unknown'
-+
-+case $host_os in
-+cygwin* | mingw* | pw32* | cegcc*)
-+ # two different shell functions defined in ltmain.sh
-+ # decide which to use based on capabilities of $DLLTOOL
-+ case `$DLLTOOL --help 2>&1` in
-+ *--identify-strict*)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-+ ;;
-+ *)
-+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-+ ;;
-+ esac
-+ ;;
-+*)
-+ # fallback: assume linklib IS sharedlib
-+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-+ ;;
-+esac
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-+
-+
-+
-+
-+
-+
-+
-+
- plugin_option=
- plugin_names="liblto_plugin.so liblto_plugin-0.dll cyglto_plugin-0.dll"
- for plugin in $plugin_names; do
-@@ -5461,8 +5714,10 @@ for plugin in $plugin_names; do
- done
-
- if test -n "$ac_tool_prefix"; then
-- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
--set dummy ${ac_tool_prefix}ar; ac_word=$2
-+ for ac_prog in ar
-+ do
-+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_AR+:} false; then :
-@@ -5478,7 +5733,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_AR="${ac_tool_prefix}ar"
-+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5498,11 +5753,15 @@ $as_echo "no" >&6; }
- fi
-
-
-+ test -n "$AR" && break
-+ done
- fi
--if test -z "$ac_cv_prog_AR"; then
-+if test -z "$AR"; then
- ac_ct_AR=$AR
-- # Extract the first word of "ar", so it can be a program name with args.
--set dummy ar; ac_word=$2
-+ for ac_prog in ar
-+do
-+ # Extract the first word of "$ac_prog", so it can be a program name with args.
-+set dummy $ac_prog; ac_word=$2
- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
- $as_echo_n "checking for $ac_word... " >&6; }
- if ${ac_cv_prog_ac_ct_AR+:} false; then :
-@@ -5518,7 +5777,7 @@ do
- test -z "$as_dir" && as_dir=.
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-- ac_cv_prog_ac_ct_AR="ar"
-+ ac_cv_prog_ac_ct_AR="$ac_prog"
- $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
- break 2
- fi
-@@ -5537,6 +5796,10 @@ else
- $as_echo "no" >&6; }
- fi
-
-+
-+ test -n "$ac_ct_AR" && break
-+done
-+
- if test "x$ac_ct_AR" = x; then
- AR="false"
- else
-@@ -5548,25 +5811,19 @@ ac_tool_warned=yes ;;
- esac
- AR=$ac_ct_AR
- fi
--else
-- AR="$ac_cv_prog_AR"
- fi
-
--test -z "$AR" && AR=ar
--if test -n "$plugin_option"; then
-- if $AR --help 2>&1 | grep -q "\--plugin"; then
-- touch conftest.c
-- $AR $plugin_option rc conftest.a conftest.c
-- if test "$?" != 0; then
-- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
-+ touch conftest.c
-+ $AR $plugin_option rc conftest.a conftest.c
-+ if test "$?" != 0; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Failed: $AR $plugin_option rc" >&5
- $as_echo "$as_me: WARNING: Failed: $AR $plugin_option rc" >&2;}
-- else
-- AR="$AR $plugin_option"
-- fi
-- rm -f conftest.*
-+ else
-+ AR="$AR $plugin_option"
- fi
--fi
--test -z "$AR_FLAGS" && AR_FLAGS=cru
-+ rm -f conftest.*
-+: ${AR=ar}
-+: ${AR_FLAGS=cru}
-
-
-
-@@ -5578,6 +5835,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru
-
-
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-+$as_echo_n "checking for archiver @FILE support... " >&6; }
-+if ${lt_cv_ar_at_file+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_ar_at_file=no
-+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-+/* end confdefs.h. */
-+
-+int
-+main ()
-+{
-+
-+ ;
-+ return 0;
-+}
-+_ACEOF
-+if ac_fn_c_try_compile "$LINENO"; then :
-+ echo conftest.$ac_objext > conftest.lst
-+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -eq 0; then
-+ # Ensure the archiver fails upon bogus file names.
-+ rm -f conftest.$ac_objext libconftest.a
-+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-+ (eval $lt_ar_try) 2>&5
-+ ac_status=$?
-+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-+ test $ac_status = 0; }
-+ if test "$ac_status" -ne 0; then
-+ lt_cv_ar_at_file=@
-+ fi
-+ fi
-+ rm -f conftest.* libconftest.a
-+
-+fi
-+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-+
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-+$as_echo "$lt_cv_ar_at_file" >&6; }
-+
-+if test "x$lt_cv_ar_at_file" = xno; then
-+ archiver_list_spec=
-+else
-+ archiver_list_spec=$lt_cv_ar_at_file
-+fi
-+
-+
-+
-+
-+
-+
-+
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
- set dummy ${ac_tool_prefix}strip; ac_word=$2
-@@ -5917,8 +6232,8 @@ esac
- lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
- # Transform an extracted symbol line into symbol name and symbol address
--lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
--lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'"
-+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
-
- # Handle CRLF in mingw tool chain
- opt_cr=
-@@ -5954,6 +6269,7 @@ for ac_symprfx in "" "_"; do
- else
- lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
- fi
-+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
- # Check to see that the pipe works correctly.
- pipe_works=no
-@@ -5995,6 +6311,18 @@ _LT_EOF
- if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
- if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
- cat <<_LT_EOF > conftest.$ac_ext
-+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
-+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-+/* DATA imports from DLLs on WIN32 con't be const, because runtime
-+ relocations are performed -- see ld's documentation on pseudo-relocs. */
-+# define LT_DLSYM_CONST
-+#elif defined(__osf__)
-+/* This system does not cope well with relocations in const data. */
-+# define LT_DLSYM_CONST
-+#else
-+# define LT_DLSYM_CONST const
-+#endif
-+
- #ifdef __cplusplus
- extern "C" {
- #endif
-@@ -6006,7 +6334,7 @@ _LT_EOF
- cat <<_LT_EOF >> conftest.$ac_ext
-
- /* The mapping between symbol names and symbols. */
--const struct {
-+LT_DLSYM_CONST struct {
- const char *name;
- void *address;
- }
-@@ -6032,8 +6360,8 @@ static const void *lt_preloaded_setup() {
- _LT_EOF
- # Now try linking the two files.
- mv conftest.$ac_objext conftstm.$ac_objext
-- lt_save_LIBS="$LIBS"
-- lt_save_CFLAGS="$CFLAGS"
-+ lt_globsym_save_LIBS=$LIBS
-+ lt_globsym_save_CFLAGS=$CFLAGS
- LIBS="conftstm.$ac_objext"
- CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-@@ -6043,8 +6371,8 @@ _LT_EOF
- test $ac_status = 0; } && test -s conftest${ac_exeext}; then
- pipe_works=yes
- fi
-- LIBS="$lt_save_LIBS"
-- CFLAGS="$lt_save_CFLAGS"
-+ LIBS=$lt_globsym_save_LIBS
-+ CFLAGS=$lt_globsym_save_CFLAGS
- else
- echo "cannot find nm_test_func in $nlist" >&5
- fi
-@@ -6081,6 +6409,17 @@ else
- $as_echo "ok" >&6; }
- fi
-
-+# Response file support.
-+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-+ nm_file_list_spec='@'
-+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-+ nm_file_list_spec='@'
-+fi
-+
-+
-+
-+
-+
-
-
-
-@@ -6098,6 +6437,43 @@ fi
-
-
-
-+
-+
-+
-+
-+
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-+$as_echo_n "checking for sysroot... " >&6; }
-+
-+# Check whether --with-libtool-sysroot was given.
-+if test "${with_libtool_sysroot+set}" = set; then :
-+ withval=$with_libtool_sysroot;
-+else
-+ with_libtool_sysroot=no
-+fi
-+
-+
-+lt_sysroot=
-+case ${with_libtool_sysroot} in #(
-+ yes)
-+ if test "$GCC" = yes; then
-+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ fi
-+ ;; #(
-+ /*)
-+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"`
-+ ;; #(
-+ no|'')
-+ ;; #(
-+ *)
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5
-+$as_echo "${with_libtool_sysroot}" >&6; }
-+ as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-+ ;;
-+esac
-+
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-+$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-@@ -6312,6 +6688,123 @@ esac
-
- need_locks="$enable_libtool_lock"
-
-+if test -n "$ac_tool_prefix"; then
-+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-+set dummy ${ac_tool_prefix}mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$MANIFEST_TOOL"; then
-+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-+if test -n "$MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-+$as_echo "$MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+
-+fi
-+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-+ # Extract the first word of "mt", so it can be a program name with args.
-+set dummy mt; ac_word=$2
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-+$as_echo_n "checking for $ac_word... " >&6; }
-+if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-+else
-+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-+for as_dir in $PATH
-+do
-+ IFS=$as_save_IFS
-+ test -z "$as_dir" && as_dir=.
-+ for ac_exec_ext in '' $ac_executable_extensions; do
-+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-+ break 2
-+ fi
-+done
-+ done
-+IFS=$as_save_IFS
-+
-+fi
-+fi
-+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-+if test -n "$ac_ct_MANIFEST_TOOL"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-+else
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-+$as_echo "no" >&6; }
-+fi
-+
-+ if test "x$ac_ct_MANIFEST_TOOL" = x; then
-+ MANIFEST_TOOL=":"
-+ else
-+ case $cross_compiling:$ac_tool_warned in
-+yes:)
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-+ac_tool_warned=yes ;;
-+esac
-+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-+ fi
-+else
-+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-+fi
-+
-+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-+if ${lt_cv_path_mainfest_tool+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_path_mainfest_tool=no
-+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-+ cat conftest.err >&5
-+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-+ lt_cv_path_mainfest_tool=yes
-+ fi
-+ rm -f conftest*
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-+$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-+if test "x$lt_cv_path_mainfest_tool" != xyes; then
-+ MANIFEST_TOOL=:
-+fi
-+
-+
-+
-+
-+
-
- case $host_os in
- rhapsody* | darwin*)
-@@ -6878,6 +7371,8 @@ _LT_EOF
- $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
- echo "$AR cru libconftest.a conftest.o" >&5
- $AR cru libconftest.a conftest.o 2>&5
-+ echo "$RANLIB libconftest.a" >&5
-+ $RANLIB libconftest.a 2>&5
- cat > conftest.c << _LT_EOF
- int main() { return 0;}
- _LT_EOF
-@@ -7727,8 +8222,6 @@ fi
- lt_prog_compiler_pic=
- lt_prog_compiler_static=
-
--{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
--$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-
- if test "$GCC" = yes; then
- lt_prog_compiler_wl='-Wl,'
-@@ -7894,6 +8387,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='--shared'
- lt_prog_compiler_static='--static'
- ;;
-+ nagfor*)
-+ # NAG Fortran compiler
-+ lt_prog_compiler_wl='-Wl,-Wl,,'
-+ lt_prog_compiler_pic='-PIC'
-+ lt_prog_compiler_static='-Bstatic'
-+ ;;
- pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
- # Portland Group compilers (*not* the Pentium gcc compiler,
- # which looks to be a dead project)
-@@ -7956,7 +8455,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; }
- lt_prog_compiler_pic='-KPIC'
- lt_prog_compiler_static='-Bstatic'
- case $cc_basename in
-- f77* | f90* | f95*)
-+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
- lt_prog_compiler_wl='-Qoption ld ';;
- *)
- lt_prog_compiler_wl='-Wl,';;
-@@ -8013,13 +8512,17 @@ case $host_os in
- lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
- ;;
- esac
--{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
--$as_echo "$lt_prog_compiler_pic" >&6; }
--
--
--
--
-
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-+if ${lt_cv_prog_compiler_pic+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-+$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
- #
- # Check to make sure the PIC flag actually works.
-@@ -8080,6 +8583,11 @@ fi
-
-
-
-+
-+
-+
-+
-+
- #
- # Check to make sure the static flag actually works.
- #
-@@ -8430,7 +8938,8 @@ _LT_EOF
- allow_undefined_flag=unsupported
- always_export_symbols=no
- enable_shared_with_static_runtimes=yes
-- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-@@ -8529,12 +9038,12 @@ _LT_EOF
- whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
- hardcode_libdir_flag_spec=
- hardcode_libdir_flag_spec_ld='-rpath $libdir'
-- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib'
-+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
- if test "x$supports_anon_versioning" = xyes; then
- archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
- echo "local: *; };" >> $output_objdir/$libname.ver~
-- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
- fi
- ;;
- esac
-@@ -8548,8 +9057,8 @@ _LT_EOF
- archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
- wlarc=
- else
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- fi
- ;;
-
-@@ -8567,8 +9076,8 @@ _LT_EOF
-
- _LT_EOF
- elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8614,8 +9123,8 @@ _LT_EOF
-
- *)
- if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
- else
- ld_shlibs=no
- fi
-@@ -8745,7 +9254,13 @@ _LT_EOF
- allow_undefined_flag='-berok'
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- if test x$gcc_no_link = xyes; then
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test x$gcc_no_link = xyes; then
- as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
- fi
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-@@ -8761,22 +9276,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-@@ -8788,7 +9310,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- else
- # Determine the default libpath from the value encoded in an
- # empty executable.
-- if test x$gcc_no_link = xyes; then
-+ if test "${lt_cv_aix_libpath+set}" = set; then
-+ aix_libpath=$lt_cv_aix_libpath
-+else
-+ if ${lt_cv_aix_libpath_+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ if test x$gcc_no_link = xyes; then
- as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
- fi
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-@@ -8804,22 +9332,29 @@ main ()
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-
--lt_aix_libpath_sed='
-- /Import File Strings/,/^$/ {
-- /^0/ {
-- s/^0 *\(.*\)$/\1/
-- p
-- }
-- }'
--aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--# Check for a 64-bit object if we didn't find anything.
--if test -z "$aix_libpath"; then
-- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
--fi
-+ lt_aix_libpath_sed='
-+ /Import File Strings/,/^$/ {
-+ /^0/ {
-+ s/^0 *\([^ ]*\) *$/\1/
-+ p
-+ }
-+ }'
-+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ # Check for a 64-bit object if we didn't find anything.
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-+ fi
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
--if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-+ if test -z "$lt_cv_aix_libpath_"; then
-+ lt_cv_aix_libpath_="/usr/lib:/lib"
-+ fi
-+
-+fi
-+
-+ aix_libpath=$lt_cv_aix_libpath_
-+fi
-
- hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
- # Warning - without using the other run time loading flags,
-@@ -8864,20 +9399,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- # Microsoft Visual C++.
- # hardcode_libdir_flag_spec is actually meaningless, as there is
- # no search path for DLLs.
-- hardcode_libdir_flag_spec=' '
-- allow_undefined_flag=unsupported
-- # Tell ltmain to make .lib files, not .a files.
-- libext=lib
-- # Tell ltmain to make .dll files, not .so files.
-- shrext_cmds=".dll"
-- # FIXME: Setting linknames here is a bad hack.
-- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-- # The linker will automatically build a .lib file if we build a DLL.
-- old_archive_from_new_cmds='true'
-- # FIXME: Should let the user specify the lib program.
-- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-- fix_srcfile_path='`cygpath -w "$srcfile"`'
-- enable_shared_with_static_runtimes=yes
-+ case $cc_basename in
-+ cl*)
-+ # Native MSVC
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ always_export_symbols=yes
-+ file_list_spec='@'
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-+ else
-+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-+ fi~
-+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-+ linknames='
-+ # The linker will not automatically build a static lib if we build a DLL.
-+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-+ enable_shared_with_static_runtimes=yes
-+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-+ # Don't use ranlib
-+ old_postinstall_cmds='chmod 644 $oldlib'
-+ postlink_cmds='lt_outputfile="@OUTPUT@"~
-+ lt_tool_outputfile="@TOOL_OUTPUT@"~
-+ case $lt_outputfile in
-+ *.exe|*.EXE) ;;
-+ *)
-+ lt_outputfile="$lt_outputfile.exe"
-+ lt_tool_outputfile="$lt_tool_outputfile.exe"
-+ ;;
-+ esac~
-+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-+ $RM "$lt_outputfile.manifest";
-+ fi'
-+ ;;
-+ *)
-+ # Assume MSVC wrapper
-+ hardcode_libdir_flag_spec=' '
-+ allow_undefined_flag=unsupported
-+ # Tell ltmain to make .lib files, not .a files.
-+ libext=lib
-+ # Tell ltmain to make .dll files, not .so files.
-+ shrext_cmds=".dll"
-+ # FIXME: Setting linknames here is a bad hack.
-+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-+ # The linker will automatically build a .lib file if we build a DLL.
-+ old_archive_from_new_cmds='true'
-+ # FIXME: Should let the user specify the lib program.
-+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-+ enable_shared_with_static_runtimes=yes
-+ ;;
-+ esac
- ;;
-
- darwin* | rhapsody*)
-@@ -8938,7 +9516,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
- freebsd* | dragonfly*)
-- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
- hardcode_libdir_flag_spec='-R$libdir'
- hardcode_direct=yes
- hardcode_shlibpath_var=no
-@@ -8946,7 +9524,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux9*)
- if test "$GCC" = yes; then
-- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- else
- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
- fi
-@@ -8962,7 +9540,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
-
- hpux10*)
- if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- else
- archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
- fi
-@@ -8986,10 +9564,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
- archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- ia64*)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- *)
-- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
- ;;
- esac
- else
-@@ -9068,26 +9646,39 @@ fi
-
- irix5* | irix6* | nonstopux*)
- if test "$GCC" = yes; then
-- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- # Try to use the -exported_symbol ld option, if it does not
- # work, assume that -exports_file does not work either and
- # implicitly export all symbols.
-- save_LDFLAGS="$LDFLAGS"
-- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-- if test x$gcc_no_link = xyes; then
-+ # This should be the same for all languages, so no per-tag cache variable.
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-+if ${lt_cv_irix_exported_symbol+:} false; then :
-+ $as_echo_n "(cached) " >&6
-+else
-+ save_LDFLAGS="$LDFLAGS"
-+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-+ if test x$gcc_no_link = xyes; then
- as_fn_error $? "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5
- fi
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
--int foo(void) {}
-+int foo (void) { return 0; }
- _ACEOF
- if ac_fn_c_try_link "$LINENO"; then :
-- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
--
-+ lt_cv_irix_exported_symbol=yes
-+else
-+ lt_cv_irix_exported_symbol=no
- fi
- rm -f core conftest.err conftest.$ac_objext \
- conftest$ac_exeext conftest.$ac_ext
-- LDFLAGS="$save_LDFLAGS"
-+ LDFLAGS="$save_LDFLAGS"
-+fi
-+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-+$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-+ if test "$lt_cv_irix_exported_symbol" = yes; then
-+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-+ fi
- else
- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-@@ -9172,7 +9763,7 @@ rm -f core conftest.err conftest.$ac_objext \
- osf4* | osf5*) # as osf3* with the addition of -msym flag
- if test "$GCC" = yes; then
- allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
- hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
- else
- allow_undefined_flag=' -expect_unresolved \*'
-@@ -9191,9 +9782,9 @@ rm -f core conftest.err conftest.$ac_objext \
- no_undefined_flag=' -z defs'
- if test "$GCC" = yes; then
- wlarc='${wl}'
-- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
- archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
- else
- case `$CC -V 2>&1` in
- *"Compilers 5.0"*)
-@@ -9769,8 +10360,9 @@ cygwin* | mingw* | pw32* | cegcc*)
- need_version=no
- need_lib_prefix=no
-
-- case $GCC,$host_os in
-- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
-+ case $GCC,$cc_basename in
-+ yes,*)
-+ # gcc
- library_names_spec='$libname.dll.a'
- # DLL is installed to $(libdir)/../bin by postinstall_cmds
- postinstall_cmds='base_file=`basename \${file}`~
-@@ -9803,13 +10395,71 @@ cygwin* | mingw* | pw32* | cegcc*)
- library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
- ;;
- esac
-+ dynamic_linker='Win32 ld.exe'
-+ ;;
-+
-+ *,cl*)
-+ # Native MSVC
-+ libname_spec='$name'
-+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-+ library_names_spec='${libname}.dll.lib'
-+
-+ case $build_os in
-+ mingw*)
-+ sys_lib_search_path_spec=
-+ lt_save_ifs=$IFS
-+ IFS=';'
-+ for lt_path in $LIB
-+ do
-+ IFS=$lt_save_ifs
-+ # Let DOS variable expansion print the short 8.3 style file name.
-+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-+ done
-+ IFS=$lt_save_ifs
-+ # Convert to MSYS style.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-+ ;;
-+ cygwin*)
-+ # Convert to unix form, then to dos form, then back to unix form
-+ # but this time dos style (no spaces!) so that the unix form looks
-+ # like /cygdrive/c/PROGRA~1:/cygdr...
-+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ ;;
-+ *)
-+ sys_lib_search_path_spec="$LIB"
-+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-+ # It is most probably a Windows format PATH.
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-+ else
-+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-+ fi
-+ # FIXME: find the short name or the path components, as spaces are
-+ # common. (e.g. "Program Files" -> "PROGRA~1")
-+ ;;
-+ esac
-+
-+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
-+ postinstall_cmds='base_file=`basename \${file}`~
-+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-+ dldir=$destdir/`dirname \$dlpath`~
-+ test -d \$dldir || mkdir -p \$dldir~
-+ $install_prog $dir/$dlname \$dldir/$dlname'
-+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-+ dlpath=$dir/\$dldll~
-+ $RM \$dlpath'
-+ shlibpath_overrides_runpath=yes
-+ dynamic_linker='Win32 link.exe'
- ;;
-
- *)
-+ # Assume MSVC wrapper
- library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-+ dynamic_linker='Win32 ld.exe'
- ;;
- esac
-- dynamic_linker='Win32 ld.exe'
- # FIXME: first we should search . and the directory the executable is in
- shlibpath_var=PATH
- ;;
-@@ -10705,7 +11355,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10708 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10749,10 +11399,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -10811,7 +11461,7 @@ else
- lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
- lt_status=$lt_dlunknown
- cat > conftest.$ac_ext <<_LT_EOF
--#line 10814 "configure"
-+#line $LINENO "configure"
- #include "confdefs.h"
-
- #if HAVE_DLFCN_H
-@@ -10855,10 +11505,10 @@ else
- /* When -fvisbility=hidden is used, assume the code has been annotated
- correspondingly for the symbols needed. */
- #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
--void fnord () __attribute__((visibility("default")));
-+int fnord () __attribute__((visibility("default")));
- #endif
-
--void fnord () { int i=42; }
-+int fnord () { return 42; }
- int main ()
- {
- void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-@@ -12328,13 +12978,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
- lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
- lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
- lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
- reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
- reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
- OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
- deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
- file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
- AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
- AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
- STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
- RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
- old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-@@ -12349,14 +13006,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de
- lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
- objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
- MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
--lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
- lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
- lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
- need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
- DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
- NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
- LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-@@ -12389,12 +13049,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q
- hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
- inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
- link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
--fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
- always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
- export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
- exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
- include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
- prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
- file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
- variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
- need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-@@ -12449,8 +13109,13 @@ reload_flag \
- OBJDUMP \
- deplibs_check_method \
- file_magic_cmd \
-+file_magic_glob \
-+want_nocaseglob \
-+DLLTOOL \
-+sharedlib_from_linklib_cmd \
- AR \
- AR_FLAGS \
-+archiver_list_spec \
- STRIP \
- RANLIB \
- CC \
-@@ -12460,12 +13125,14 @@ lt_cv_sys_global_symbol_pipe \
- lt_cv_sys_global_symbol_to_cdecl \
- lt_cv_sys_global_symbol_to_c_name_address \
- lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-+nm_file_list_spec \
- lt_prog_compiler_no_builtin_flag \
--lt_prog_compiler_wl \
- lt_prog_compiler_pic \
-+lt_prog_compiler_wl \
- lt_prog_compiler_static \
- lt_cv_prog_compiler_c_o \
- need_locks \
-+MANIFEST_TOOL \
- DSYMUTIL \
- NMEDIT \
- LIPO \
-@@ -12481,7 +13148,6 @@ no_undefined_flag \
- hardcode_libdir_flag_spec \
- hardcode_libdir_flag_spec_ld \
- hardcode_libdir_separator \
--fix_srcfile_path \
- exclude_expsyms \
- include_expsyms \
- file_list_spec \
-@@ -12517,6 +13183,7 @@ module_cmds \
- module_expsym_cmds \
- export_symbols_cmds \
- prelink_cmds \
-+postlink_cmds \
- postinstall_cmds \
- postuninstall_cmds \
- finish_cmds \
-@@ -13115,7 +13782,8 @@ $as_echo X"$file" |
- # NOTE: Changes made to this file will be lost: look at ltmain.sh.
- #
- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
--# 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
-+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
-+# Inc.
- # Written by Gordon Matzigkeit, 1996
- #
- # This file is part of GNU Libtool.
-@@ -13218,19 +13886,42 @@ SP2NL=$lt_lt_SP2NL
- # turn newlines into spaces.
- NL2SP=$lt_lt_NL2SP
-
-+# convert \$build file names to \$host format.
-+to_host_file_cmd=$lt_cv_to_host_file_cmd
-+
-+# convert \$build files to toolchain format.
-+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-+
- # An object symbol dumper.
- OBJDUMP=$lt_OBJDUMP
-
- # Method to check whether dependent libraries are shared objects.
- deplibs_check_method=$lt_deplibs_check_method
-
--# Command to use when deplibs_check_method == "file_magic".
-+# Command to use when deplibs_check_method = "file_magic".
- file_magic_cmd=$lt_file_magic_cmd
-
-+# How to find potential files when deplibs_check_method = "file_magic".
-+file_magic_glob=$lt_file_magic_glob
-+
-+# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-+want_nocaseglob=$lt_want_nocaseglob
-+
-+# DLL creation program.
-+DLLTOOL=$lt_DLLTOOL
-+
-+# Command to associate shared and link libraries.
-+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-+
- # The archiver.
- AR=$lt_AR
-+
-+# Flags to create an archive.
- AR_FLAGS=$lt_AR_FLAGS
-
-+# How to feed a file listing to the archiver.
-+archiver_list_spec=$lt_archiver_list_spec
-+
- # A symbol stripping program.
- STRIP=$lt_STRIP
-
-@@ -13260,6 +13951,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
- # Transform the output of nm in a C name address pair when lib prefix is needed.
- global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-+# Specify filename containing input files for \$NM.
-+nm_file_list_spec=$lt_nm_file_list_spec
-+
-+# The root where to search for dependent libraries,and in which our libraries should be installed.
-+lt_sysroot=$lt_sysroot
-+
- # The name of the directory that contains temporary libtool files.
- objdir=$objdir
-
-@@ -13269,6 +13966,9 @@ MAGIC_CMD=$MAGIC_CMD
- # Must we lock files when doing compilation?
- need_locks=$lt_need_locks
-
-+# Manifest tool.
-+MANIFEST_TOOL=$lt_MANIFEST_TOOL
-+
- # Tool to manipulate archived DWARF debug symbol files on Mac OS X.
- DSYMUTIL=$lt_DSYMUTIL
-
-@@ -13383,12 +14083,12 @@ with_gcc=$GCC
- # Compiler flag to turn off builtin functions.
- no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
--# How to pass a linker flag through the compiler.
--wl=$lt_lt_prog_compiler_wl
--
- # Additional compiler flags for building library objects.
- pic_flag=$lt_lt_prog_compiler_pic
-
-+# How to pass a linker flag through the compiler.
-+wl=$lt_lt_prog_compiler_wl
-+
- # Compiler flag to prevent dynamic linking.
- link_static_flag=$lt_lt_prog_compiler_static
-
-@@ -13475,9 +14175,6 @@ inherit_rpath=$inherit_rpath
- # Whether libtool must link a program against all its dependency libraries.
- link_all_deplibs=$link_all_deplibs
-
--# Fix the shell variable \$srcfile for the compiler.
--fix_srcfile_path=$lt_fix_srcfile_path
--
- # Set to "yes" if exported symbols are required.
- always_export_symbols=$always_export_symbols
-
-@@ -13493,6 +14190,9 @@ include_expsyms=$lt_include_expsyms
- # Commands necessary for linking programs (against libraries) with templates.
- prelink_cmds=$lt_prelink_cmds
-
-+# Commands necessary for finishing linking programs.
-+postlink_cmds=$lt_postlink_cmds
-+
- # Specify filename containing input files.
- file_list_spec=$lt_file_list_spec
-
-@@ -13525,210 +14225,169 @@ ltmain="$ac_aux_dir/ltmain.sh"
- # if finds mixed CR/LF and LF-only lines. Since sed operates in
- # text mode, it properly converts lines to CR/LF. This bash problem
- # is reportedly fixed, but why not run on old versions too?
-- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- case $xsi_shell in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result="${1##*/}"
--}
--
--# func_dirname_and_basename file append nondir_replacement
--# perform func_basename and func_dirname in a single function
--# call:
--# dirname: Compute the dirname of FILE. If nonempty,
--# add APPEND to the result, otherwise set result
--# to NONDIR_REPLACEMENT.
--# value returned in "$func_dirname_result"
--# basename: Compute filename of FILE.
--# value retuned in "$func_basename_result"
--# Implementation must be kept synchronized with func_dirname
--# and func_basename. For efficiency, we do not delegate to
--# those functions but instead duplicate the functionality here.
--func_dirname_and_basename ()
--{
-- case ${1} in
-- */*) func_dirname_result="${1%/*}${2}" ;;
-- * ) func_dirname_result="${3}" ;;
-- esac
-- func_basename_result="${1##*/}"
--}
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--func_stripname ()
--{
-- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
-- # positional parameters, so assign one to ordinary parameter first.
-- func_stripname_result=${3}
-- func_stripname_result=${func_stripname_result#"${1}"}
-- func_stripname_result=${func_stripname_result%"${2}"}
--}
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=${1%%=*}
-- func_opt_split_arg=${1#*=}
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- case ${1} in
-- *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
-- *) func_lo2o_result=${1} ;;
-- esac
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=${1%.*}.lo
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=$(( $* ))
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=${#1}
--}
--
--_LT_EOF
-- ;;
-- *) # Bourne compatible functions.
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_dirname file append nondir_replacement
--# Compute the dirname of FILE. If nonempty, add APPEND to the result,
--# otherwise set result to NONDIR_REPLACEMENT.
--func_dirname ()
--{
-- # Extract subdirectory from the argument.
-- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
-- if test "X$func_dirname_result" = "X${1}"; then
-- func_dirname_result="${3}"
-- else
-- func_dirname_result="$func_dirname_result${2}"
-- fi
--}
--
--# func_basename file
--func_basename ()
--{
-- func_basename_result=`$ECHO "${1}" | $SED "$basename"`
--}
--
--
--# func_stripname prefix suffix name
--# strip PREFIX and SUFFIX off of NAME.
--# PREFIX and SUFFIX must not contain globbing or regex special
--# characters, hashes, percent signs, but SUFFIX may contain a leading
--# dot (in which case that matches only a dot).
--# func_strip_suffix prefix name
--func_stripname ()
--{
-- case ${2} in
-- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-- esac
--}
--
--# sed scripts:
--my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
--my_sed_long_arg='1s/^-[^=]*=//'
--
--# func_opt_split
--func_opt_split ()
--{
-- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
-- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
--}
--
--# func_lo2o object
--func_lo2o ()
--{
-- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
--}
--
--# func_xform libobj-or-source
--func_xform ()
--{
-- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
--}
--
--# func_arith arithmetic-term...
--func_arith ()
--{
-- func_arith_result=`expr "$@"`
--}
--
--# func_len string
--# STRING may not start with a hyphen.
--func_len ()
--{
-- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
--}
--
--_LT_EOF
--esac
--
--case $lt_shell_append in
-- yes)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1+=\$2"
--}
--_LT_EOF
-- ;;
-- *)
-- cat << \_LT_EOF >> "$cfgfile"
--
--# func_append var value
--# Append VALUE to the end of shell variable VAR.
--func_append ()
--{
-- eval "$1=\$$1\$2"
--}
--
--_LT_EOF
-- ;;
-- esac
--
--
-- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
-- || (rm -f "$cfgfile"; exit 1)
--
-- mv -f "$cfgfile" "$ofile" ||
-+ sed '$q' "$ltmain" >> "$cfgfile" \
-+ || (rm -f "$cfgfile"; exit 1)
-+
-+ if test x"$xsi_shell" = xyes; then
-+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-+func_dirname ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_basename ()$/,/^} # func_basename /c\
-+func_basename ()\
-+{\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-+func_dirname_and_basename ()\
-+{\
-+\ case ${1} in\
-+\ */*) func_dirname_result="${1%/*}${2}" ;;\
-+\ * ) func_dirname_result="${3}" ;;\
-+\ esac\
-+\ func_basename_result="${1##*/}"\
-+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-+func_stripname ()\
-+{\
-+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-+\ # positional parameters, so assign one to ordinary parameter first.\
-+\ func_stripname_result=${3}\
-+\ func_stripname_result=${func_stripname_result#"${1}"}\
-+\ func_stripname_result=${func_stripname_result%"${2}"}\
-+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-+func_split_long_opt ()\
-+{\
-+\ func_split_long_opt_name=${1%%=*}\
-+\ func_split_long_opt_arg=${1#*=}\
-+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-+func_split_short_opt ()\
-+{\
-+\ func_split_short_opt_arg=${1#??}\
-+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-+func_lo2o ()\
-+{\
-+\ case ${1} in\
-+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-+\ *) func_lo2o_result=${1} ;;\
-+\ esac\
-+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_xform ()$/,/^} # func_xform /c\
-+func_xform ()\
-+{\
-+ func_xform_result=${1%.*}.lo\
-+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_arith ()$/,/^} # func_arith /c\
-+func_arith ()\
-+{\
-+ func_arith_result=$(( $* ))\
-+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_len ()$/,/^} # func_len /c\
-+func_len ()\
-+{\
-+ func_len_result=${#1}\
-+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+fi
-+
-+if test x"$lt_shell_append" = xyes; then
-+ sed -e '/^func_append ()$/,/^} # func_append /c\
-+func_append ()\
-+{\
-+ eval "${1}+=\\${2}"\
-+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-+func_append_quoted ()\
-+{\
-+\ func_quote_for_eval "${2}"\
-+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+test 0 -eq $? || _lt_function_replace_fail=:
-+
-+
-+ # Save a `func_append' function call where possible by direct use of '+='
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+else
-+ # Save a `func_append' function call even when '+=' is not available
-+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-+ && mv -f "$cfgfile.tmp" "$cfgfile" \
-+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-+ test 0 -eq $? || _lt_function_replace_fail=:
-+fi
-+
-+if test x"$_lt_function_replace_fail" = x":"; then
-+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-+fi
-+
-+
-+ mv -f "$cfgfile" "$ofile" ||
- (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
- chmod +x "$ofile"
-
diff --git a/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch b/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch
deleted file mode 100644
index 217ba5df85..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 1c4581a059afe2799bb825b388ae92f8fa6f19a3 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 2 Mar 2015 01:42:38 +0000
-Subject: [PATCH] Fix rpath in libtool when sysroot is enabled
-
-Enabling sysroot support in libtool exposed a bug where the final
-library had an RPATH encoded into it which still pointed to the
-sysroot. This works around the issue until it gets sorted out
-upstream.
-
-Fix suggested by Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Scott Garman <scott.a.garman@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ltmain.sh | 10 ++++++++--
- 1 file changed, 8 insertions(+), 2 deletions(-)
-
-diff --git a/ltmain.sh b/ltmain.sh
-index 70e856e0659..11ee684cccf 100644
---- a/ltmain.sh
-+++ b/ltmain.sh
-@@ -8035,9 +8035,11 @@ EOF
- test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
- for libdir in $rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
-+ func_replace_sysroot "$libdir"
-+ libdir=$func_replace_sysroot_result
-+ func_stripname '=' '' "$libdir"
-+ libdir=$func_stripname_result
- if test -n "$hardcode_libdir_separator"; then
-- func_replace_sysroot "$libdir"
-- libdir=$func_replace_sysroot_result
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
- else
-@@ -8770,6 +8772,10 @@ EOF
- hardcode_libdirs=
- for libdir in $compile_rpath $finalize_rpath; do
- if test -n "$hardcode_libdir_flag_spec"; then
-+ func_replace_sysroot "$libdir"
-+ libdir=$func_replace_sysroot_result
-+ func_stripname '=' '' "$libdir"
-+ libdir=$func_stripname_result
- if test -n "$hardcode_libdir_separator"; then
- if test -z "$hardcode_libdirs"; then
- hardcode_libdirs="$libdir"
diff --git a/meta/recipes-devtools/binutils/binutils/0010-sync-with-OE-libtool-changes.patch b/meta/recipes-devtools/binutils/binutils/0010-sync-with-OE-libtool-changes.patch
new file mode 100644
index 0000000000..57d9ac5f27
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0010-sync-with-OE-libtool-changes.patch
@@ -0,0 +1,86 @@
+From befc176c209bbb4c32b57c2068e813c88b1ab6b1 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Mon, 6 Mar 2017 23:33:27 -0800
+Subject: [PATCH] sync with OE libtool changes
+
+Apply these patches from our libtool patches as not only are redundant RPATHs a
+waste of space but they can cause incorrect linking when native packages are
+restored from sstate.
+
+fix-rpath.patch:
+We don't want to add RPATHS which match default linker
+search paths, they're a waste of space. This patch
+filters libtools list and removes the ones we don't need.
+
+norm-rpath.patch:
+Libtool may be passed link paths of the form "/usr/lib/../lib", which
+fool its detection code into thinking it should be included as an
+RPATH in the generated binary. Normalize before comparision.
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ltmain.sh | 34 ++++++++++++++++++++++++++++------
+ 1 file changed, 28 insertions(+), 6 deletions(-)
+
+diff --git a/ltmain.sh b/ltmain.sh
+index f59eb4aa631..ce97db42ce7 100644
+--- a/ltmain.sh
++++ b/ltmain.sh
+@@ -8053,8 +8053,16 @@ EOF
+ esac
+ fi
+ else
+- eval flag=\"$hardcode_libdir_flag_spec\"
+- func_append dep_rpath " $flag"
++ # We only want to hardcode in an rpath if it isn't in the
++ # default dlsearch path.
++ func_normal_abspath "$libdir"
++ libdir_norm=$func_normal_abspath_result
++ case " $sys_lib_dlsearch_path " in
++ *" $libdir_norm "*) ;;
++ *) eval flag=\"$hardcode_libdir_flag_spec\"
++ func_append dep_rpath " $flag"
++ ;;
++ esac
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+@@ -8790,8 +8798,16 @@ EOF
+ esac
+ fi
+ else
+- eval flag=\"$hardcode_libdir_flag_spec\"
+- func_append rpath " $flag"
++ # We only want to hardcode in an rpath if it isn't in the
++ # default dlsearch path.
++ func_normal_abspath "$libdir"
++ libdir_norm=$func_normal_abspath_result
++ case " $sys_lib_dlsearch_path " in
++ *" $libdir_norm "*) ;;
++ *) eval flag=\"$hardcode_libdir_flag_spec\"
++ rpath+=" $flag"
++ ;;
++ esac
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+@@ -8841,8 +8857,14 @@ EOF
+ esac
+ fi
+ else
+- eval flag=\"$hardcode_libdir_flag_spec\"
+- func_append rpath " $flag"
++ # We only want to hardcode in an rpath if it isn't in the
++ # default dlsearch path.
++ case " $sys_lib_dlsearch_path " in
++ *" $libdir "*) ;;
++ *) eval flag=\"$hardcode_libdir_flag_spec\"
++ func_append rpath " $flag"
++ ;;
++ esac
+ fi
+ elif test -n "$runpath_var"; then
+ case "$finalize_perm_rpath " in
diff --git a/meta/recipes-devtools/binutils/binutils/0011-Check-for-clang-before-checking-gcc-version.patch b/meta/recipes-devtools/binutils/binutils/0011-Check-for-clang-before-checking-gcc-version.patch
new file mode 100644
index 0000000000..c47f41f5ca
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0011-Check-for-clang-before-checking-gcc-version.patch
@@ -0,0 +1,45 @@
+From cad852e0ed46a1c26607f9e7d17cc5974b05e38a Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 15 Apr 2020 14:17:20 -0700
+Subject: [PATCH] Check for clang before checking gcc version
+
+Clang advertises itself to be gcc 4.2.1, so when compiling this test
+here fails since gcc < 4.4.5 did not support -static-libstdc++ but thats
+not true for clang, so its better to make an additional check for clang
+before resorting to gcc version check. This should let clang enable
+static libstdc++ linking
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ configure | 2 +-
+ configure.ac | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/configure b/configure
+index 670684d83d1..f9ba1c0a4f7 100755
+--- a/configure
++++ b/configure
+@@ -5432,7 +5432,7 @@ ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+-#if (__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5)
++#if !defined(__clang__) && ((__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5))
+ #error -static-libstdc++ not implemented
+ #endif
+ int main() {}
+diff --git a/configure.ac b/configure.ac
+index 88b4800e298..6cd4530db56 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -1410,7 +1410,7 @@ if test "$GCC" = yes; then
+ AC_MSG_CHECKING([whether g++ accepts -static-libstdc++ -static-libgcc])
+ AC_LANG_PUSH(C++)
+ AC_LINK_IFELSE([AC_LANG_SOURCE([
+-#if (__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5)
++#if !defined(__clang__) && ((__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5))
+ #error -static-libstdc++ not implemented
+ #endif
+ int main() {}])],
diff --git a/meta/recipes-devtools/binutils/binutils/0011-sync-with-OE-libtool-changes.patch b/meta/recipes-devtools/binutils/binutils/0011-sync-with-OE-libtool-changes.patch
deleted file mode 100644
index 3607e36ef4..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0011-sync-with-OE-libtool-changes.patch
+++ /dev/null
@@ -1,86 +0,0 @@
-From d71c715554a054c534954b0aa357ca699ed68430 Mon Sep 17 00:00:00 2001
-From: Ross Burton <ross.burton@intel.com>
-Date: Mon, 6 Mar 2017 23:33:27 -0800
-Subject: [PATCH] sync with OE libtool changes
-
-Apply these patches from our libtool patches as not only are redundant RPATHs a
-waste of space but they can cause incorrect linking when native packages are
-restored from sstate.
-
-fix-rpath.patch:
-We don't want to add RPATHS which match default linker
-search paths, they're a waste of space. This patch
-filters libtools list and removes the ones we don't need.
-
-norm-rpath.patch:
-Libtool may be passed link paths of the form "/usr/lib/../lib", which
-fool its detection code into thinking it should be included as an
-RPATH in the generated binary. Normalize before comparision.
-
-Upstream-Status: Inappropriate
-
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ltmain.sh | 34 ++++++++++++++++++++++++++++------
- 1 file changed, 28 insertions(+), 6 deletions(-)
-
-diff --git a/ltmain.sh b/ltmain.sh
-index 11ee684cccf..3b19ac15328 100644
---- a/ltmain.sh
-+++ b/ltmain.sh
-@@ -8053,8 +8053,16 @@ EOF
- esac
- fi
- else
-- eval flag=\"$hardcode_libdir_flag_spec\"
-- func_append dep_rpath " $flag"
-+ # We only want to hardcode in an rpath if it isn't in the
-+ # default dlsearch path.
-+ func_normal_abspath "$libdir"
-+ libdir_norm=$func_normal_abspath_result
-+ case " $sys_lib_dlsearch_path " in
-+ *" $libdir_norm "*) ;;
-+ *) eval flag=\"$hardcode_libdir_flag_spec\"
-+ func_append dep_rpath " $flag"
-+ ;;
-+ esac
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
-@@ -8790,8 +8798,16 @@ EOF
- esac
- fi
- else
-- eval flag=\"$hardcode_libdir_flag_spec\"
-- func_append rpath " $flag"
-+ # We only want to hardcode in an rpath if it isn't in the
-+ # default dlsearch path.
-+ func_normal_abspath "$libdir"
-+ libdir_norm=$func_normal_abspath_result
-+ case " $sys_lib_dlsearch_path " in
-+ *" $libdir_norm "*) ;;
-+ *) eval flag=\"$hardcode_libdir_flag_spec\"
-+ rpath+=" $flag"
-+ ;;
-+ esac
- fi
- elif test -n "$runpath_var"; then
- case "$perm_rpath " in
-@@ -8841,8 +8857,14 @@ EOF
- esac
- fi
- else
-- eval flag=\"$hardcode_libdir_flag_spec\"
-- func_append rpath " $flag"
-+ # We only want to hardcode in an rpath if it isn't in the
-+ # default dlsearch path.
-+ case " $sys_lib_dlsearch_path " in
-+ *" $libdir "*) ;;
-+ *) eval flag=\"$hardcode_libdir_flag_spec\"
-+ func_append rpath " $flag"
-+ ;;
-+ esac
- fi
- elif test -n "$runpath_var"; then
- case "$finalize_perm_rpath " in
diff --git a/meta/recipes-devtools/binutils/binutils/0012-Check-for-clang-before-checking-gcc-version.patch b/meta/recipes-devtools/binutils/binutils/0012-Check-for-clang-before-checking-gcc-version.patch
deleted file mode 100644
index 8848c05ae0..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0012-Check-for-clang-before-checking-gcc-version.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 787d7cd71d7886d3193c0fd747101c54ad7c3cd8 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 15 Apr 2020 14:17:20 -0700
-Subject: [PATCH] Check for clang before checking gcc version
-
-Clang advertises itself to be gcc 4.2.1, so when compiling this test
-here fails since gcc < 4.4.5 did not support -static-libstdc++ but thats
-not true for clang, so its better to make an additional check for clang
-before resorting to gcc version check. This should let clang enable
-static libstdc++ linking
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- configure | 2 +-
- configure.ac | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/configure b/configure
-index 6a1da1665d8..916656dc233 100755
---- a/configure
-+++ b/configure
-@@ -5287,7 +5287,7 @@ ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
- /* end confdefs.h. */
-
--#if (__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5)
-+#if !defined(__clang__) && ((__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5))
- #error -static-libstdc++ not implemented
- #endif
- int main() {}
-diff --git a/configure.ac b/configure.ac
-index 2b10e9a1b02..677a0196c2b 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -1309,7 +1309,7 @@ if test "$GCC" = yes; then
- AC_MSG_CHECKING([whether g++ accepts -static-libstdc++ -static-libgcc])
- AC_LANG_PUSH(C++)
- AC_LINK_IFELSE([AC_LANG_SOURCE([
--#if (__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5)
-+#if !defined(__clang__) && ((__GNUC__ < 4) || (__GNUC__ == 4 && __GNUC_MINOR__ < 5))
- #error -static-libstdc++ not implemented
- #endif
- int main() {}])],
diff --git a/meta/recipes-devtools/binutils/binutils/0012-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch b/meta/recipes-devtools/binutils/binutils/0012-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch
new file mode 100644
index 0000000000..985911aab1
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0012-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch
@@ -0,0 +1,38 @@
+From 47e53916e3112391d8bff2b2257dcec0a2426d66 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 10 Mar 2022 21:21:33 -0800
+Subject: [PATCH] Only generate an RPATH entry if LD_RUN_PATH is not empty
+
+for cases where -rpath isn't specified. debian (#151024)
+
+Upstream-Status: Pending
+
+Signed-off-by: Chris Chimelis <chris@debian.org>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ld/ldelf.c | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/ld/ldelf.c b/ld/ldelf.c
+index 187b359af86..ad792f02d63 100644
+--- a/ld/ldelf.c
++++ b/ld/ldelf.c
+@@ -1130,6 +1130,9 @@ ldelf_handle_dt_needed (struct elf_link_hash_table *htab,
+ && command_line.rpath == NULL)
+ {
+ path = (const char *) getenv ("LD_RUN_PATH");
++ if ((path) && (strlen (path) == 0))
++ path = NULL;
++
+ if (path
+ && ldelf_search_needed (path, &n, force,
+ is_linux, elfsize))
+@@ -1805,6 +1808,8 @@ ldelf_before_allocation (char *audit, char *depaudit,
+ rpath = command_line.rpath;
+ if (rpath == NULL)
+ rpath = (const char *) getenv ("LD_RUN_PATH");
++ if ((rpath) && (strlen (rpath) == 0))
++ rpath = NULL;
+
+ for (abfd = link_info.input_bfds; abfd; abfd = abfd->link.next)
+ if (bfd_get_flavour (abfd) == bfd_target_elf_flavour)
diff --git a/meta/recipes-devtools/binutils/binutils/0013-Avoid-as-info-race-condition.patch b/meta/recipes-devtools/binutils/binutils/0013-Avoid-as-info-race-condition.patch
deleted file mode 100644
index 3b3d0bb769..0000000000
--- a/meta/recipes-devtools/binutils/binutils/0013-Avoid-as-info-race-condition.patch
+++ /dev/null
@@ -1,75 +0,0 @@
-From 9a84a44d5df4618dd616137fa755bd71b7eacc5f Mon Sep 17 00:00:00 2001
-From: Mike Frysinger <vapier@gentoo.org>
-Date: Sun, 23 Jan 2022 12:44:24 -0500
-Subject: [PATCH] gas: drop old cygnus install hack
-
-This was needed when gas was using the automake cygnus option, but
-this was removed years ago by Simon in d0ac1c44885daf68f631befa37e
-("Bump to autoconf 2.69 and automake 1.15.1"). So delete it here.
-The info pages are already & still installed by default w/out it.
-
-Upstream-Status: Backport [https://sourceware.org/git/?p=binutils-gdb.git;a=commit;h=9a84a44d5df4618dd616137fa755bd71b7eacc5f]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gas/Makefile.in | 14 +++++---------
- gas/doc/local.mk | 4 ----
- 2 files changed, 5 insertions(+), 13 deletions(-)
-
-diff --git a/gas/Makefile.in b/gas/Makefile.in
-index 8f0a56fd8d6..67dac53f68c 100644
---- a/gas/Makefile.in
-+++ b/gas/Makefile.in
-@@ -1854,7 +1854,7 @@ info: info-recursive
-
- info-am: $(INFO_DEPS) info-local
-
--install-data-am: install-data-local install-info-am install-man
-+install-data-am: install-info-am install-man
-
- install-dvi: install-dvi-recursive
-
-@@ -2008,10 +2008,10 @@ uninstall-man: uninstall-man1
- distclean-DEJAGNU distclean-compile distclean-generic \
- distclean-hdr distclean-libtool distclean-tags dvi dvi-am html \
- html-am html-local info info-am info-local install install-am \
-- install-data install-data-am install-data-local install-dvi \
-- install-dvi-am install-exec install-exec-am install-exec-local \
-- install-html install-html-am install-info install-info-am \
-- install-man install-man1 install-pdf install-pdf-am install-ps \
-+ install-data install-data-am install-dvi install-dvi-am \
-+ install-exec install-exec-am install-exec-local install-html \
-+ install-html-am install-info install-info-am install-man \
-+ install-man1 install-pdf install-pdf-am install-ps \
- install-ps-am install-strip installcheck installcheck-am \
- installdirs installdirs-am maintainer-clean \
- maintainer-clean-aminfo maintainer-clean-generic mostlyclean \
-@@ -2211,10 +2211,6 @@ doc/asconfig.texi: doc/$(CONFIG).texi doc/$(am__dirstamp)
- $(AM_V_GEN)cp $(srcdir)/doc/$(CONFIG).texi doc/asconfig.texi
- $(AM_V_at)chmod u+w doc/asconfig.texi
-
--# We want install to imply install-info as per GNU standards, despite the
--# cygnus option.
--install-data-local: install-info
--
- # Maintenance
-
- # We need it for the taz target in ../Makefile.in.
-diff --git a/gas/doc/local.mk b/gas/doc/local.mk
-index c2de441257c..ac205cf08a2 100644
---- a/gas/doc/local.mk
-+++ b/gas/doc/local.mk
-@@ -101,10 +101,6 @@ CPU_DOCS = \
- %D%/c-z80.texi \
- %D%/c-z8k.texi
-
--# We want install to imply install-info as per GNU standards, despite the
--# cygnus option.
--install-data-local: install-info
--
- # This one isn't ready for prime time yet. Not even a little bit.
-
- noinst_TEXINFOS = %D%/internals.texi
---
-2.27.0
-
diff --git a/meta/recipes-devtools/binutils/binutils/0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch b/meta/recipes-devtools/binutils/binutils/0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch
new file mode 100644
index 0000000000..0cb2f46a07
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch
@@ -0,0 +1,48 @@
+From d479020609329cdc7fa3675bc3daf072401fefae Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 15 Jan 2023 00:16:25 -0800
+Subject: [PATCH] Define alignof using _Alignof when using C11 or newer
+
+WG14 N2350 made very clear that it is an UB having type definitions
+within "offsetof" [1]. This patch enhances the implementation of macro
+alignof to use builtin "_Alignof" to avoid undefined behavior on
+when using std=c11 or newer
+
+clang 16+ has started to flag this [2]
+
+Fixes build when using -std >= gnu11 and using clang16+
+
+Older compilers gcc < 4.9 or clang < 8 has buggy _Alignof even though it
+may support C11, exclude those compilers too
+
+[1] https://www.open-std.org/jtc1/sc22/wg14/www/docs/n2350.htm
+[2] https://reviews.llvm.org/D133574
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libiberty/sha1.c | 10 ++++++++++
+ 1 file changed, 10 insertions(+)
+
+diff --git a/libiberty/sha1.c b/libiberty/sha1.c
+index 49e8e0b6c2b..72de5cc8ad6 100644
+--- a/libiberty/sha1.c
++++ b/libiberty/sha1.c
+@@ -234,7 +234,17 @@ sha1_process_bytes (const void *buffer, size_t len, struct sha1_ctx *ctx)
+ if (len >= 64)
+ {
+ #if !_STRING_ARCH_unaligned
++/* GCC releases before GCC 4.9 had a bug in _Alignof. See GCC bug 52023
++ <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=52023>.
++ clang versions < 8.0.0 have the same bug. */
++#if (!defined __STDC_VERSION__ || __STDC_VERSION__ < 201112 \
++ || (defined __GNUC__ && __GNUC__ < 4 + (__GNUC_MINOR__ < 9) \
++ && !defined __clang__) \
++ || (defined __clang__ && __clang_major__ < 8))
+ # define alignof(type) offsetof (struct { char c; type x; }, x)
++#else
++# define alignof(type) _Alignof(type)
++#endif
+ # define UNALIGNED_P(p) (((size_t) p) % alignof (sha1_uint32) != 0)
+ if (UNALIGNED_P (buffer))
+ while (len > 64)
diff --git a/meta/recipes-devtools/binutils/binutils/0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch b/meta/recipes-devtools/binutils/binutils/0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch
new file mode 100644
index 0000000000..8aef2cc710
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch
@@ -0,0 +1,32 @@
+From ddd941856582a82171626cbe609325b11e223704 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 18 Jan 2023 19:35:07 -0800
+Subject: [PATCH] Remove duplicate pe-dll.o entry deom targ_extra_ofiles
+
+Commit c60b3806799abf1d7f6cf5108a1b0e733a950b13 added support for
+aarch64-pe which introduced wrapper over pep-dll.c for x86_64 as well as
+aarch64, on x86_64 it was added but the old object pe-dll.o needs to be
+removed too, otherwise build fails with duplicate symbols from pe-dll.o
+and pep-dll-x86_64.o
+
+Upstream-Status: Submitted [https://sourceware.org/pipermail/binutils/2023-January/125739.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Cc: Jedidiah Thompson <wej22007@outlook.com>
+Cc: Zac Walker <zac.walker@linaro.org>
+---
+ ld/configure.tgt | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/ld/configure.tgt b/ld/configure.tgt
+index f937f78b876..830613744c2 100644
+--- a/ld/configure.tgt
++++ b/ld/configure.tgt
+@@ -1067,7 +1067,7 @@ x86_64-*-cygwin) targ_emul=i386pep ;
+ ;;
+ x86_64-*-mingw*) targ_emul=i386pep ;
+ targ_extra_emuls=i386pe
+- targ_extra_ofiles="deffilep.o pdb.o pep-dll.o pe-dll.o"
++ targ_extra_ofiles="deffilep.o pdb.o pe-dll.o"
+ ;;
+ x86_64-*-gnu*) targ_emul=elf_x86_64
+ targ_extra_emuls="elf32_x86_64 elf_iamcu elf_i386"
diff --git a/meta/recipes-devtools/binutils/binutils/0015-gprofng-change-use-of-bignum-to-bigint.patch b/meta/recipes-devtools/binutils/binutils/0015-gprofng-change-use-of-bignum-to-bigint.patch
new file mode 100644
index 0000000000..0d3d289eba
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/0015-gprofng-change-use-of-bignum-to-bigint.patch
@@ -0,0 +1,17 @@
+Upstream-Status: Backport [https://sourceware.org/git/?p=binutils-gdb.git;a=commit;h=3243d790ee32aa8eda69226d81b1e79dbd1dcd87]
+
+Signed-off-by: Harish Sadineni <Harish.Sadineni@windriver.com>
+
+diff --git a/gprofng/gp-display-html/gp-display-html.in b/gprofng/gp-display-html/gp-display-html.in
+index 6f37ca282e7..306c99a0ec3 100644
+--- a/gprofng/gp-display-html/gp-display-html.in
++++ b/gprofng/gp-display-html/gp-display-html.in
+@@ -25,7 +25,7 @@ use warnings;
+ # Disable before release
+ # use Perl::Critic;
+
+-use bignum;
++use bigint;
+ use List::Util qw (max);
+ use Cwd qw (abs_path cwd);
+ use File::Basename;
diff --git a/meta/recipes-devtools/binutils/binutils_2.38.bb b/meta/recipes-devtools/binutils/binutils_2.38.bb
deleted file mode 100644
index 12a6fb5577..0000000000
--- a/meta/recipes-devtools/binutils/binutils_2.38.bb
+++ /dev/null
@@ -1,69 +0,0 @@
-require binutils.inc
-require binutils-${PV}.inc
-
-DEPENDS += "zlib"
-
-EXTRA_OECONF += "--with-sysroot=/ \
- --enable-install-libbfd \
- --enable-install-libiberty \
- --enable-shared \
- --with-system-zlib \
- "
-
-EXTRA_OEMAKE:append:libc-musl = "\
- gt_cv_func_gnugettext1_libc=yes \
- gt_cv_func_gnugettext2_libc=yes \
- "
-EXTRA_OECONF:class-native = "--enable-targets=all \
- --enable-64-bit-bfd \
- --enable-install-libiberty \
- --enable-install-libbfd \
- --disable-gdb \
- --disable-gdbserver \
- --disable-libdecnumber \
- --disable-readline \
- --disable-sim \
- --disable-werror"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'debuginfod', d)}"
-PACKAGECONFIG[debuginfod] = "--with-debuginfod, --without-debuginfod, elfutils"
-
-do_install:class-native () {
- autotools_do_install
-
- # Install the libiberty header
- install -d ${D}${includedir}
- install -m 644 ${S}/include/ansidecl.h ${D}${includedir}
- install -m 644 ${S}/include/libiberty.h ${D}${includedir}
-
- # We only want libiberty, libbfd and libopcodes
- rm -rf ${D}${bindir}
- rm -rf ${D}${prefix}/${TARGET_SYS}
- rm -rf ${D}${prefix}/lib/ldscripts
- rm -rf ${D}${prefix}/share/info
- rm -rf ${D}${prefix}/share/locale
- rm -rf ${D}${prefix}/share/man
- rmdir ${D}${prefix}/share || :
- rmdir ${D}/${libdir}/gcc-lib || :
- rmdir ${D}/${libdir}64/gcc-lib || :
- rmdir ${D}/${libdir} || :
- rmdir ${D}/${libdir}64 || :
-}
-
-# libctf races with libbfd
-PARALLEL_MAKEINST:class-target = ""
-PARALLEL_MAKEINST:class-nativesdk = ""
-
-# Split out libbfd-*.so and libopcodes-*.so so including perf doesn't include
-# extra stuff
-PACKAGE_BEFORE_PN += "libbfd libopcodes"
-FILES:libbfd = "${libdir}/libbfd-*.so.* ${libdir}/libbfd-*.so"
-FILES:libopcodes = "${libdir}/libopcodes-*.so.* ${libdir}/libopcodes-*.so"
-
-SRC_URI:append:class-nativesdk = " file://0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch "
-
-USE_ALTERNATIVES_FOR:class-nativesdk = ""
-FILES:${PN}:append:class-nativesdk = " ${bindir}"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-devtools/binutils/binutils_2.42.bb b/meta/recipes-devtools/binutils/binutils_2.42.bb
new file mode 100644
index 0000000000..2cce40f1ef
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils_2.42.bb
@@ -0,0 +1,76 @@
+require binutils.inc
+require binutils-${PV}.inc
+
+# perl-native for pod2man for man page generation
+DEPENDS += "zlib perl-native"
+
+EXTRA_OECONF += "--with-sysroot=/ \
+ --enable-install-libbfd \
+ --enable-install-libiberty \
+ --enable-shared \
+ --with-system-zlib \
+ "
+
+EXTRA_OEMAKE:append:libc-musl = "\
+ gt_cv_func_gnugettext1_libc=yes \
+ gt_cv_func_gnugettext2_libc=yes \
+ "
+# libcollector/collector.c:547:15: error: no member named '__fprintf_chk' in 'struct CollectorUtilFuncs'
+EXTRA_OECONF:append:toolchain-clang = " --disable-gprofng"
+# | ../../../gprofng/libcollector/../src/collector_module.h:78:13: error: duplicate member 'pwrite'
+# | ../../../gprofng/libcollector/dispatcher.c:578:8: error: 'struct sigevent' has no member named '_sigev_un'
+EXTRA_OECONF:append:libc-musl = " --disable-gprofng"
+
+EXTRA_OECONF:class-native = "--enable-targets=all \
+ --enable-64-bit-bfd \
+ --enable-install-libiberty \
+ --enable-install-libbfd \
+ --disable-gdb \
+ --disable-gdbserver \
+ --disable-libdecnumber \
+ --disable-readline \
+ --disable-sim \
+ --disable-werror"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'debuginfod', d)}"
+PACKAGECONFIG[debuginfod] = "--with-debuginfod, --without-debuginfod, elfutils"
+
+do_install:class-native () {
+ autotools_do_install
+
+ # Install the libiberty header
+ install -d ${D}${includedir}
+ install -m 644 ${S}/include/ansidecl.h ${D}${includedir}
+ install -m 644 ${S}/include/libiberty.h ${D}${includedir}
+
+ # We only want libiberty, libbfd and libopcodes
+ rm -rf ${D}${bindir}
+ rm -rf ${D}${prefix}/${TARGET_SYS}
+ rm -rf ${D}${prefix}/lib/ldscripts
+ rm -rf ${D}${prefix}/share/info
+ rm -rf ${D}${prefix}/share/locale
+ rm -rf ${D}${prefix}/share/man
+ rmdir ${D}${prefix}/share || :
+ rmdir ${D}/${libdir}/gcc-lib || :
+ rmdir ${D}/${libdir}64/gcc-lib || :
+ rmdir ${D}/${libdir} || :
+ rmdir ${D}/${libdir}64 || :
+}
+
+# libctf races with libbfd
+PARALLEL_MAKEINST:class-target = ""
+PARALLEL_MAKEINST:class-nativesdk = ""
+
+# Split out libbfd-*.so and libopcodes-*.so so including perf doesn't include
+# extra stuff
+PACKAGE_BEFORE_PN += "libbfd libopcodes gprofng"
+FILES:libbfd = "${libdir}/libbfd-*.so.* ${libdir}/libbfd-*.so"
+FILES:libopcodes = "${libdir}/libopcodes-*.so.* ${libdir}/libopcodes-*.so"
+FILES:gprofng = "${sysconfdir}/gprofng.rc ${libdir}/gprofng/libgp-*.so ${libdir}/gprofng/libgprofng.so.* ${bindir}/gp-* ${bindir}/gprofng"
+FILES:${PN}-dev += "${libdir}/libgprofng.so ${libdir}/libsframe.so"
+SRC_URI:append:class-nativesdk = " file://0003-binutils-nativesdk-Search-for-alternative-ld.so.conf.patch "
+
+USE_ALTERNATIVES_FOR:class-nativesdk = ""
+FILES:${PN}:append:class-nativesdk = " ${bindir}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/bison/bison/autoconf-2.73.patch b/meta/recipes-devtools/bison/bison/autoconf-2.73.patch
new file mode 100644
index 0000000000..8360d3928f
--- /dev/null
+++ b/meta/recipes-devtools/bison/bison/autoconf-2.73.patch
@@ -0,0 +1,24 @@
+The gnulib largefile macro needs updating to work with autoconf 2.73. Rather
+than the full code:
+
+https://git.savannah.gnu.org/cgit/gnulib.git/commit/m4/largefile.m4?id=f91f633858cf132e50924224c50d6264a92caabb
+
+Just tweak the exiting code to work with 2.73. The next bison upgrade should
+update to new gnulib
+
+Upstream-Status: Inappropriate
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: findutils-4.9.0/gl/m4/largefile.m4
+===================================================================
+--- findutils-4.9.0.orig/m4/largefile.m4
++++ findutils-4.9.0/m4/largefile.m4
+@@ -26,7 +26,7 @@ AC_DEFUN([gl_SET_LARGEFILE_SOURCE],
+ # with _TIME_BITS. Also, work around a problem in autoconf <= 2.69:
+ # AC_SYS_LARGEFILE does not configure for large inodes on Mac OS X 10.5,
+ # or configures them incorrectly in some cases.
+-m4_version_prereq([2.70], [], [
++m4_version_prereq([2.73], [], [
+
+ # _AC_SYS_LARGEFILE_TEST_INCLUDES
+ # -------------------------------
diff --git a/meta/recipes-devtools/bison/bison_3.8.2.bb b/meta/recipes-devtools/bison/bison_3.8.2.bb
index c95f321244..da138e3587 100644
--- a/meta/recipes-devtools/bison/bison_3.8.2.bb
+++ b/meta/recipes-devtools/bison/bison_3.8.2.bb
@@ -10,6 +10,7 @@ SECTION = "devel"
DEPENDS = "bison-native flex-native"
SRC_URI = "${GNU_MIRROR}/bison/bison-${PV}.tar.xz \
+ file://autoconf-2.73.patch \
file://add-with-bisonlocaledir.patch \
"
SRC_URI[sha256sum] = "9bba0214ccf7f1079c5d59210045227bcf619519840ebfa80cd3849cff5a5bf2"
diff --git a/meta/recipes-devtools/bootchart2/bootchart2/0001-Do-not-include-linux-fs.h.patch b/meta/recipes-devtools/bootchart2/bootchart2/0001-Do-not-include-linux-fs.h.patch
new file mode 100644
index 0000000000..4e71e5c788
--- /dev/null
+++ b/meta/recipes-devtools/bootchart2/bootchart2/0001-Do-not-include-linux-fs.h.patch
@@ -0,0 +1,31 @@
+From 8591c1e3edaea8f17396e3d2819d9064b2818cfb Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 6 Aug 2022 20:39:01 -0700
+Subject: [PATCH] Do not include linux/fs.h
+
+This header is not needed to be included anymore, moreover it conflicts
+with sys/mount.h from glibc 2.36+ see [1]
+
+[1] https://sourceware.org/glibc/wiki/Release/2.36
+
+Upstream-Status: Submitted [https://github.com/xrmx/bootchart/pull/99]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ collector/collector.c | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/collector/collector.c b/collector/collector.c
+index 5055181..12738ff 100644
+--- a/collector/collector.c
++++ b/collector/collector.c
+@@ -34,7 +34,6 @@
+
+ #include <sys/mount.h>
+ #include <sys/sysmacros.h>
+-#include <linux/fs.h>
+ #include <linux/genetlink.h>
+ #include <linux/taskstats.h>
+ #include <linux/cgroupstats.h>
+--
+2.37.1
+
diff --git a/meta/recipes-devtools/bootchart2/bootchart2/0001-bootchart2-support-usrmerge.patch b/meta/recipes-devtools/bootchart2/bootchart2/0001-bootchart2-support-usrmerge.patch
deleted file mode 100644
index 88597cf3a9..0000000000
--- a/meta/recipes-devtools/bootchart2/bootchart2/0001-bootchart2-support-usrmerge.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From b6d1a1ff2de363b1b76c8c70f77ae56a4e4d4b56 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Thu, 5 Sep 2019 18:37:31 +0800
-Subject: [PATCH] bootchart2: support usrmerge
-
-Upstream-Status: Inappropriate [oe-specific]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- Makefile | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index 1cc2974..f988904 100644
---- a/Makefile
-+++ b/Makefile
-@@ -36,7 +36,7 @@ endif
- PY_SITEDIR ?= $(PY_LIBDIR)/site-packages
- LIBC_A_PATH = /usr$(LIBDIR)
- # Always lib, even on systems that otherwise use lib64
--SYSTEMD_UNIT_DIR = $(EARLY_PREFIX)/lib/systemd/system
-+SYSTEMD_UNIT_DIR ?= $(EARLY_PREFIX)/lib/systemd/system
- COLLECTOR = \
- collector/collector.o \
- collector/output.o \
-@@ -99,7 +99,7 @@ install-chroot:
- install -d $(DESTDIR)$(PKGLIBDIR)/tmpfs
-
- install-collector: all install-chroot
-- install -m 755 -D bootchartd $(DESTDIR)$(EARLY_PREFIX)/sbin/$(PROGRAM_PREFIX)bootchartd$(PROGRAM_SUFFIX)
-+ install -m 755 -D bootchartd $(DESTDIR)${BASE_SBINDIR}/$(PROGRAM_PREFIX)bootchartd$(PROGRAM_SUFFIX)
- install -m 644 -D bootchartd.conf $(DESTDIR)/etc/$(PROGRAM_PREFIX)bootchartd$(PROGRAM_SUFFIX).conf
- install -m 755 -D bootchart-collector $(DESTDIR)$(PKGLIBDIR)/$(PROGRAM_PREFIX)bootchart$(PROGRAM_SUFFIX)-collector
-
---
-2.7.4
-
diff --git a/meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb b/meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb
index b1628075a7..4d8ce4c741 100644
--- a/meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb
+++ b/meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb
@@ -83,7 +83,6 @@
SUMMARY = "Booting sequence and CPU,I/O usage monitor"
DESCRIPTION = "Monitors where the system spends its time at start, creating a graph of all processes, disk utilization, and wait time."
-AUTHOR = "Wonhong Kwon <wonhong.kwon@lge.com>"
HOMEPAGE = "https://github.com/mmeeks/bootchart"
LICENSE = "GPL-3.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=44ac4678311254db62edf8fd39cb8124"
@@ -93,16 +92,13 @@ UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+\.\d+(\.\d+)*)"
SRC_URI = "git://github.com/xrmx/bootchart.git;branch=master;protocol=https \
file://bootchartd_stop.sh \
file://0001-collector-Allocate-space-on-heap-for-chunks.patch \
- file://0001-bootchart2-support-usrmerge.patch \
file://0001-bootchartd.in-make-sure-only-one-bootchartd-process.patch \
+ file://0001-Do-not-include-linux-fs.h.patch \
"
S = "${WORKDIR}/git"
SRCREV = "868a2afab9da34f32c007d773b77253c93104636"
-# remove at next version upgrade or when output changes
-PR = "r1"
-HASHEQUIV_HASH_VERSION .= ".1"
inherit systemd update-rc.d python3native update-alternatives
@@ -119,12 +115,11 @@ UPDATERCPN = "bootchartd-stop-initscript"
INITSCRIPT_NAME = "bootchartd_stop.sh"
INITSCRIPT_PARAMS = "start 99 2 3 4 5 ."
-EXTRA_OEMAKE = 'BASE_SBINDIR="${base_sbindir}"'
-
do_compile:prepend () {
export PY_LIBDIR="${libdir}/${PYTHON_DIR}"
export BINDIR="${bindir}"
- export LIBDIR="${base_libdir}"
+ export LIBDIR="/${baselib}"
+ export EARLY_PREFIX="${root_prefix}"
}
do_install () {
@@ -132,9 +127,8 @@ do_install () {
export PY_LIBDIR="${libdir}/${PYTHON_DIR}"
export BINDIR="${bindir}"
export DESTDIR="${D}"
- export LIBDIR="${base_libdir}"
- export PKGLIBDIR="${base_libdir}/bootchart"
- export SYSTEMD_UNIT_DIR="${systemd_system_unitdir}"
+ export LIBDIR="/${baselib}"
+ export EARLY_PREFIX="${root_prefix}"
oe_runmake install NO_PYTHON_COMPILE=1
install -d ${D}${sysconfdir}/init.d
diff --git a/meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-Add-a-possibility-to-specify-where-python-modules-ar.patch b/meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-Add-a-possibility-to-specify-where-python-modules-ar.patch
index 5846f04d1a..4b1797b65f 100644
--- a/meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-Add-a-possibility-to-specify-where-python-modules-ar.patch
+++ b/meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-Add-a-possibility-to-specify-where-python-modules-ar.patch
@@ -1,4 +1,4 @@
-From d3adfc21c9cc264bd191722f102963cbc4794259 Mon Sep 17 00:00:00 2001
+From 980f6edc269fa3ef8d4d4b9cd1aada2328131c19 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 23 May 2018 21:20:35 +0300
Subject: [PATCH] Add a possibility to specify where python modules are
@@ -11,10 +11,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
-index 1697794c..8ab38818 100644
+index 374f59b9..ed083f6b 100644
--- a/Makefile
+++ b/Makefile
-@@ -651,7 +651,7 @@ endif
+@@ -959,7 +959,7 @@ endif
ifeq ($(PYTHON_BINDINGS),1)
install_python: libbtrfsutil_python
$(Q)cd libbtrfsutil/python; \
diff --git a/meta/recipes-devtools/btrfs-tools/btrfs-tools_5.16.2.bb b/meta/recipes-devtools/btrfs-tools/btrfs-tools_5.16.2.bb
deleted file mode 100644
index 4ab486c465..0000000000
--- a/meta/recipes-devtools/btrfs-tools/btrfs-tools_5.16.2.bb
+++ /dev/null
@@ -1,71 +0,0 @@
-SUMMARY = "Checksumming Copy on Write Filesystem utilities"
-DESCRIPTION = "Btrfs is a new copy on write filesystem for Linux aimed at \
-implementing advanced features while focusing on fault tolerance, repair and \
-easy administration. \
-This package contains utilities (mkfs, fsck, btrfsctl) used to work with \
-btrfs and an utility (btrfs-convert) to make a btrfs filesystem from an ext3."
-
-HOMEPAGE = "https://btrfs.wiki.kernel.org"
-
-LICENSE = "GPL-2.0-only & LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = " \
- file://COPYING;md5=fcb02dc552a041dee27e4b85c7396067 \
- file://libbtrfsutil/COPYING;md5=4fbd65380cdd255951079008b364516c \
-"
-SECTION = "base"
-DEPENDS = "lzo util-linux zlib"
-
-SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git;branch=master \
- file://0001-Add-a-possibility-to-specify-where-python-modules-ar.patch \
- "
-SRCREV = "31458c9c81935abbed010221261897273a98d2c1"
-S = "${WORKDIR}/git"
-
-PACKAGECONFIG ??= " \
- programs \
- convert \
- python \
- crypto-builtin \
-"
-PACKAGECONFIG[manpages] = "--enable-documentation, --disable-documentation, asciidoc-native xmlto-native"
-PACKAGECONFIG[programs] = "--enable-programs,--disable-programs"
-PACKAGECONFIG[convert] = "--enable-convert --with-convert=ext2,--disable-convert --without-convert,e2fsprogs"
-PACKAGECONFIG[zoned] = "--enable-zoned,--disable-zoned"
-PACKAGECONFIG[python] = "--enable-python,--disable-python,python3-setuptools-native"
-PACKAGECONFIG[zstd] = "--enable-zstd,--disable-zstd,zstd"
-PACKAGECONFIG[udev] = "--enable-libudev,--disable-libudev,udev"
-
-# Pick only one crypto provider
-PACKAGECONFIG[crypto-builtin] = "--with-crypto=builtin"
-PACKAGECONFIG[crypto-libgcrypt] = "--with-crypto=libgcrypt,,libgcrypt"
-PACKAGECONFIG[crypto-libsodium] = "--with-crypto=libsodium,,libsodium"
-PACKAGECONFIG[crypto-libkcapi] = "--with-crypto=libkcapi,,libkcapi"
-
-inherit autotools-brokensep pkgconfig manpages
-inherit ${@bb.utils.contains('PACKAGECONFIG', 'python', 'setuptools3-base', '', d)}
-
-CLEANBROKEN = "1"
-
-EXTRA_OECONF = "--enable-largefile"
-EXTRA_OECONF:append:libc-musl = " --disable-backtrace "
-EXTRA_PYTHON_CFLAGS = "${DEBUG_PREFIX_MAP}"
-EXTRA_PYTHON_CFLAGS:class-native = ""
-EXTRA_PYTHON_LDFLAGS = "${LDFLAGS}"
-EXTRA_OEMAKE = "V=1 'EXTRA_PYTHON_CFLAGS=${EXTRA_PYTHON_CFLAGS}' 'EXTRA_PYTHON_LDFLAGS=${EXTRA_PYTHON_LDFLAGS}'"
-
-do_configure:prepend() {
- # Upstream doesn't ship this and autoreconf won't install it as automake isn't used.
- mkdir -p ${S}/config
- cp -f $(automake --print-libdir)/install-sh ${S}/config/
-}
-
-
-do_install:append() {
- if [ "${@bb.utils.filter('PACKAGECONFIG', 'python', d)}" ]; then
- oe_runmake 'DESTDIR=${D}' 'PYTHON_SITEPACKAGES_DIR=${PYTHON_SITEPACKAGES_DIR}' install_python
- fi
-}
-
-RDEPENDS:${PN} = "libgcc"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb b/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb
new file mode 100644
index 0000000000..15cc7ac244
--- /dev/null
+++ b/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb
@@ -0,0 +1,72 @@
+SUMMARY = "Checksumming Copy on Write Filesystem utilities"
+DESCRIPTION = "Btrfs is a new copy on write filesystem for Linux aimed at \
+implementing advanced features while focusing on fault tolerance, repair and \
+easy administration. \
+This package contains utilities (mkfs, fsck, btrfsctl) used to work with \
+btrfs and an utility (btrfs-convert) to make a btrfs filesystem from an ext3."
+
+HOMEPAGE = "https://btrfs.wiki.kernel.org"
+
+LICENSE = "GPL-2.0-only & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = " \
+ file://COPYING;md5=fcb02dc552a041dee27e4b85c7396067 \
+ file://libbtrfsutil/COPYING;md5=4fbd65380cdd255951079008b364516c \
+"
+SECTION = "base"
+DEPENDS = "util-linux zlib"
+
+SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git;branch=master;protocol=https \
+ file://0001-Add-a-possibility-to-specify-where-python-modules-ar.patch \
+ "
+SRCREV = "3793e987d2b4e878410da16f33d963043d137d48"
+S = "${WORKDIR}/git"
+
+PACKAGECONFIG ??= " \
+ programs \
+ convert \
+ python \
+ crypto-builtin \
+"
+PACKAGECONFIG[manpages] = "--enable-documentation, --disable-documentation, python3-sphinx-native python3-sphinx-rtd-theme-native"
+PACKAGECONFIG[programs] = "--enable-programs,--disable-programs"
+PACKAGECONFIG[convert] = "--enable-convert --with-convert=ext2,--disable-convert --without-convert,e2fsprogs"
+PACKAGECONFIG[zoned] = "--enable-zoned,--disable-zoned"
+PACKAGECONFIG[python] = "--enable-python,--disable-python,python3-setuptools-native"
+PACKAGECONFIG[lzo] = "--enable-lzo,--disable-lzo,lzo"
+PACKAGECONFIG[zstd] = "--enable-zstd,--disable-zstd,zstd"
+PACKAGECONFIG[udev] = "--enable-libudev,--disable-libudev,udev"
+
+# Pick only one crypto provider
+PACKAGECONFIG[crypto-builtin] = "--with-crypto=builtin"
+PACKAGECONFIG[crypto-libgcrypt] = "--with-crypto=libgcrypt,,libgcrypt"
+PACKAGECONFIG[crypto-libsodium] = "--with-crypto=libsodium,,libsodium"
+PACKAGECONFIG[crypto-libkcapi] = "--with-crypto=libkcapi,,libkcapi"
+
+inherit autotools-brokensep pkgconfig manpages
+inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python', 'setuptools3-base', '', d)}
+
+CLEANBROKEN = "1"
+
+EXTRA_OECONF = "--enable-largefile"
+EXTRA_OECONF:append:libc-musl = " --disable-backtrace "
+EXTRA_PYTHON_CFLAGS = "${DEBUG_PREFIX_MAP}"
+EXTRA_PYTHON_CFLAGS:class-native = ""
+EXTRA_PYTHON_LDFLAGS = "${LDFLAGS}"
+EXTRA_OEMAKE = "V=1 'EXTRA_PYTHON_CFLAGS=${EXTRA_PYTHON_CFLAGS}' 'EXTRA_PYTHON_LDFLAGS=${EXTRA_PYTHON_LDFLAGS}'"
+
+do_configure:prepend() {
+ # Upstream doesn't ship this and autoreconf won't install it as automake isn't used.
+ mkdir -p ${S}/config
+ cp -f $(automake --print-libdir)/install-sh ${S}/config/
+}
+
+
+do_install:append() {
+ if [ "${@bb.utils.filter('PACKAGECONFIG', 'python', d)}" ]; then
+ oe_runmake 'DESTDIR=${D}' 'PYTHON_SITEPACKAGES_DIR=${PYTHON_SITEPACKAGES_DIR}' install_python
+ fi
+}
+
+RDEPENDS:${PN} = "libgcc"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/cargo/cargo-cross-canadian.inc b/meta/recipes-devtools/cargo/cargo-cross-canadian.inc
deleted file mode 100644
index 7fc22a4128..0000000000
--- a/meta/recipes-devtools/cargo/cargo-cross-canadian.inc
+++ /dev/null
@@ -1,72 +0,0 @@
-SUMMARY = "Cargo, a package manager for Rust cross canadian flavor."
-
-RUST_ALTERNATE_EXE_PATH = "${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config"
-
-HOST_SYS = "${HOST_ARCH}-unknown-linux-gnu"
-CARGO_RUST_TARGET_CCLD = "${RUST_BUILD_CCLD}"
-
-require recipes-devtools/rust/rust-common.inc
-require cargo.inc
-
-CARGO = "${WORKDIR}/${CARGO_SNAPSHOT}/bin/cargo"
-BASEDEPENDS:remove = "cargo-native"
-
-export RUST_TARGET_PATH="${WORKDIR}/targets/"
-
-RUSTLIB = " \
- -L ${STAGING_DIR_NATIVE}/${SDKPATHNATIVE}/usr/lib/${TARGET_SYS}/rustlib/${HOST_SYS}/lib \
-"
-
-DEPENDS += "rust-native \
- rust-cross-canadian-${TRANSLATED_TARGET_ARCH} \
- virtual/nativesdk-${HOST_PREFIX}compilerlibs \
- nativesdk-openssl nativesdk-zlib \
- virtual/nativesdk-libc \
-"
-
-inherit cross-canadian
-
-PN = "cargo-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-
-python do_rust_gen_targets () {
- wd = d.getVar('WORKDIR') + '/targets/'
-
- rust_gen_target(d, 'BUILD', wd, "", "generic", d.getVar('BUILD_ARCH'))
- rust_gen_target(d, 'HOST', wd, "", "generic", d.getVar('HOST_ARCH'))
-}
-
-do_compile:prepend () {
- PKG_CONFIG_PATH="${RECIPE_SYSROOT_NATIVE}/usr/lib/pkgconfig:${PKG_CONFIG_PATH}"
-}
-
-do_install () {
- SYS_BINDIR=$(dirname ${D}${bindir})
- install -d "${SYS_BINDIR}"
- install -m 755 "${B}/target/${CARGO_TARGET_SUBDIR}/cargo" "${SYS_BINDIR}"
- for i in ${SYS_BINDIR}/*; do
- chrpath -r "\$ORIGIN/../lib" ${i}
- done
-
- ENV_SETUP_DIR=${D}${base_prefix}/environment-setup.d
- mkdir "${ENV_SETUP_DIR}"
- ENV_SETUP_SH="${ENV_SETUP_DIR}/cargo.sh"
- cat <<- EOF > "${ENV_SETUP_SH}"
- export CARGO_HOME="\$OECORE_TARGET_SYSROOT/home/cargo"
- mkdir -p "\$CARGO_HOME"
- # Init the default target once, it might be otherwise user modified.
- if [ ! -f "\$CARGO_HOME/config" ]; then
- touch "\$CARGO_HOME/config"
- echo "[build]" >> "\$CARGO_HOME/config"
- echo 'target = "'${TARGET_SYS}'"' >> "\$CARGO_HOME/config"
- fi
-
- # Keep the below off as long as HTTP/2 is disabled.
- export CARGO_HTTP_MULTIPLEXING=false
-
- export CARGO_HTTP_CAINFO="\$OECORE_NATIVE_SYSROOT/etc/ssl/certs/ca-certificates.crt"
- EOF
-}
-
-PKG_SYS_BINDIR = "${SDKPATHNATIVE}/usr/bin"
-FILES:${PN} += "${base_prefix}/environment-setup.d ${PKG_SYS_BINDIR}"
-
diff --git a/meta/recipes-devtools/cargo/cargo-cross-canadian_1.60.0.bb b/meta/recipes-devtools/cargo/cargo-cross-canadian_1.60.0.bb
deleted file mode 100644
index 63fd69107b..0000000000
--- a/meta/recipes-devtools/cargo/cargo-cross-canadian_1.60.0.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require recipes-devtools/rust/rust-source.inc
-require recipes-devtools/rust/rust-snapshot.inc
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/cargo-${PV}:"
-
-require cargo-cross-canadian.inc
diff --git a/meta/recipes-devtools/cargo/cargo.inc b/meta/recipes-devtools/cargo/cargo.inc
deleted file mode 100644
index 607c51fc3d..0000000000
--- a/meta/recipes-devtools/cargo/cargo.inc
+++ /dev/null
@@ -1,56 +0,0 @@
-SUMMARY ?= "Cargo, a package manager for Rust."
-HOMEPAGE = "https://crates.io"
-LICENSE = "MIT | Apache-2.0"
-SECTION = "devel"
-
-DEPENDS = "openssl zlib curl ca-certificates libssh2"
-
-LIC_FILES_CHKSUM = " \
- file://LICENSE-MIT;md5=b377b220f43d747efdec40d69fcaa69d \
- file://LICENSE-APACHE;md5=71b224ca933f0676e26d5c2e2271331c \
- file://LICENSE-THIRD-PARTY;md5=f257ad009884cb88a3a87d6920e7180a \
-"
-
-
-S = "${RUSTSRC}/src/tools/cargo"
-CARGO_VENDORING_DIRECTORY = "${RUSTSRC}/vendor"
-EXCLUDE_FROM_WORLD = "1"
-
-inherit cargo pkgconfig
-
-do_cargo_setup_snapshot () {
- ${WORKDIR}/rust-snapshot-components/${CARGO_SNAPSHOT}/install.sh --prefix="${WORKDIR}/${CARGO_SNAPSHOT}" --disable-ldconfig
- # Need to use uninative's loader if enabled/present since the library paths
- # are used internally by rust and result in symbol mismatches if we don't
- if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then
- patchelf-uninative ${WORKDIR}/${CARGO_SNAPSHOT}/bin/cargo --set-interpreter ${UNINATIVE_LOADER}
- fi
-}
-
-addtask cargo_setup_snapshot after do_unpack before do_configure
-do_cargo_setup_snapshot[dirs] += "${WORKDIR}/${CARGO_SNAPSHOT}"
-do_cargo_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER"
-
-
-do_compile:prepend () {
- export RUSTC_BOOTSTRAP="1"
-}
-
-do_install () {
- install -d "${D}${bindir}"
- install -m 755 "${B}/target/${CARGO_TARGET_SUBDIR}/cargo" "${D}${bindir}"
-}
-
-# Disabled due to incompatibility with libgit2 0.28.x (https://github.com/rust-lang/git2-rs/issues/458, https://bugs.gentoo.org/707746#c1)
-# as shipped by Yocto Dunfell.
-# According to https://github.com/rust-lang/git2-rs/issues/458#issuecomment-522567539, there are no compatibility guarantees between
-# libgit2-sys and arbitrary system libgit2 versions, so better keep this turned off.
-#export LIBGIT2_SYS_USE_PKG_CONFIG = "1"
-
-# Needed for pkg-config to be used
-export LIBSSH2_SYS_USE_PKG_CONFIG = "1"
-
-# When building cargo-native we don't have cargo-native to use and depend on,
-# so we must use the locally set up snapshot to bootstrap the build.
-BASEDEPENDS:remove:class-native = "cargo-native"
-CARGO:class-native = "${WORKDIR}/${CARGO_SNAPSHOT}/bin/cargo"
diff --git a/meta/recipes-devtools/cargo/cargo_1.60.0.bb b/meta/recipes-devtools/cargo/cargo_1.60.0.bb
deleted file mode 100644
index eee58fc245..0000000000
--- a/meta/recipes-devtools/cargo/cargo_1.60.0.bb
+++ /dev/null
@@ -1,4 +0,0 @@
-require recipes-devtools/rust/rust-source.inc
-require recipes-devtools/rust/rust-snapshot.inc
-require cargo.inc
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/ccache/ccache_4.6.bb b/meta/recipes-devtools/ccache/ccache_4.6.bb
deleted file mode 100644
index f019679cf1..0000000000
--- a/meta/recipes-devtools/ccache/ccache_4.6.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "a fast C/C++ compiler cache"
-DESCRIPTION = "ccache is a compiler cache. It speeds up recompilation \
-by caching the result of previous compilations and detecting when the \
-same compilation is being done again. Supported languages are C, C\+\+, \
-Objective-C and Objective-C++."
-HOMEPAGE = "http://ccache.samba.org"
-SECTION = "devel"
-
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://LICENSE.adoc;md5=ff5327dc93e2b286c931dda3d6079da9"
-
-DEPENDS = "zstd"
-
-SRC_URI = "https://github.com/ccache/ccache/releases/download/v${PV}/${BP}.tar.gz"
-SRC_URI[sha256sum] = "73a1767ac6b7c0404a1a55f761a746d338e702883c7137fbf587023062258625"
-
-UPSTREAM_CHECK_URI = "https://github.com/ccache/ccache/releases/"
-
-inherit cmake
-
-PATCHTOOL = "patch"
-
-BBCLASSEXTEND = "native nativesdk"
-
-PACKAGECONFIG[docs] = "-DENABLE_DOCUMENTATION=ON,-DENABLE_DOCUMENTATION=OFF,asciidoc"
-PACKAGECONFIG[redis] = "-DREDIS_STORAGE_BACKEND=ON,-DREDIS_STORAGE_BACKEND=OFF,hiredis"
diff --git a/meta/recipes-devtools/ccache/ccache_4.9.1.bb b/meta/recipes-devtools/ccache/ccache_4.9.1.bb
new file mode 100644
index 0000000000..0d447c4915
--- /dev/null
+++ b/meta/recipes-devtools/ccache/ccache_4.9.1.bb
@@ -0,0 +1,26 @@
+SUMMARY = "a fast C/C++ compiler cache"
+DESCRIPTION = "ccache is a compiler cache. It speeds up recompilation \
+by caching the result of previous compilations and detecting when the \
+same compilation is being done again. Supported languages are C, C\+\+, \
+Objective-C and Objective-C++."
+HOMEPAGE = "http://ccache.samba.org"
+SECTION = "devel"
+
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://LICENSE.adoc;md5=9896d6f0aee4d89b9e5ff0afaae0af06"
+
+DEPENDS = "zstd"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
+ file://0001-xxhash.h-Fix-build-with-gcc-12.patch \
+ "
+SRC_URI[sha256sum] = "12834ecaaaf2db069dda1d1d991f91c19e3274cc04a471af5b64195def17e90f"
+
+inherit cmake github-releases
+
+PATCHTOOL = "patch"
+
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGECONFIG[docs] = "-DENABLE_DOCUMENTATION=ON,-DENABLE_DOCUMENTATION=OFF,asciidoc"
+PACKAGECONFIG[redis] = "-DREDIS_STORAGE_BACKEND=ON,-DREDIS_STORAGE_BACKEND=OFF,hiredis"
diff --git a/meta/recipes-devtools/ccache/files/0001-xxhash.h-Fix-build-with-gcc-12.patch b/meta/recipes-devtools/ccache/files/0001-xxhash.h-Fix-build-with-gcc-12.patch
new file mode 100644
index 0000000000..e65b830257
--- /dev/null
+++ b/meta/recipes-devtools/ccache/files/0001-xxhash.h-Fix-build-with-gcc-12.patch
@@ -0,0 +1,37 @@
+From d17fdacf4892a15fafb56b0890ece05b485e89fb Mon Sep 17 00:00:00 2001
+From: Mingli Yu <mingli.yu@windriver.com>
+Date: Mon, 6 Jun 2022 17:53:20 +0800
+Subject: [PATCH] xxhash.h: Fix build with gcc-12
+
+Remove inline attribute to fix below build failure:
+ | /buildarea/tmp/work/core2-64-poky-linux/ccache/4.6.1-r0/ccache-4.6.1/src/third_party/xxhash.h:3932:1: error: inlining failed in call to 'always_inline' 'XXH3_accumulate_512_sse2': function not considered for inlining
+ 3932 | XXH3_accumulate_512_sse2( void* XXH_RESTRICT acc,
+ | ^~~~~~~~~~~~~~~~~~~~~~~~
+ /buildarea/tmp/work/core2-64-poky-linux/ccache/4.6.1-r0/ccache-4.6.1/src/third_party/xxhash.h:4369:9: note: called from here
+ 4369 | f_acc512(acc,
+ | ^~~~~~~~~~~~~
+ 4370 | in,
+ | ~~~
+ 4371 | secret + n*XXH_SECRET_CONSUME_RATE);
+
+Upstream-Status: Submitted [https://github.com/Cyan4973/xxHash/pull/720]
+
+Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
+
+---
+ src/third_party/xxhash.h | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/third_party/xxhash.h b/src/third_party/xxhash.h
+index a18e8c7..1b72307 100644
+--- a/src/third_party/xxhash.h
++++ b/src/third_party/xxhash.h
+@@ -2107,7 +2107,7 @@ static void* XXH_memcpy(void* dest, const void* src, size_t size)
+ # define XXH_NO_INLINE static
+ /* enable inlining hints */
+ #elif defined(__GNUC__) || defined(__clang__)
+-# define XXH_FORCE_INLINE static __inline__ __attribute__((always_inline, unused))
++# define XXH_FORCE_INLINE static
+ # define XXH_NO_INLINE static __attribute__((noinline))
+ #elif defined(_MSC_VER) /* Visual Studio */
+ # define XXH_FORCE_INLINE static __forceinline
diff --git a/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb b/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb
index 757132eae6..bf8be1ad0c 100644
--- a/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb
+++ b/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb
@@ -8,9 +8,12 @@ SECTION = "console/utils"
LICENSE = "GPL-2.0-only & CDDL-1.0 & LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=32f68170be424c2cd64804337726b312"
+DEPENDS += "gnu-config-native"
+
SRC_URI = " \
${SOURCEFORGE_MIRROR}/project/cdrtools/cdrtools-${PV}.tar.bz2 \
file://0001-Don-t-set-uid-gid-during-install.patch \
+ file://riscv64-linux-gcc.rul \
"
SRC_URI[md5sum] = "7d45c5b7e1f78d85d1583b361aee6e8b"
@@ -23,6 +26,12 @@ export ac_cv_prog_CC = "${CC}"
inherit native
+do_configure() {
+ install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}/autoconf
+ install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}/autoconf
+ install -m 0644 ${WORKDIR}/riscv64-linux-gcc.rul ${S}/RULES/
+}
+
do_install() {
make install GMAKE_NOWARN=true INS_BASE=${prefix} DESTDIR=${D}
}
diff --git a/meta/recipes-devtools/cdrtools/cdrtools/riscv64-linux-gcc.rul b/meta/recipes-devtools/cdrtools/cdrtools/riscv64-linux-gcc.rul
new file mode 100644
index 0000000000..3e930225dc
--- /dev/null
+++ b/meta/recipes-devtools/cdrtools/cdrtools/riscv64-linux-gcc.rul
@@ -0,0 +1,65 @@
+#ident "@(#)i586-linux-gcc.rul 1.18 18/11/07 "
+###########################################################################
+# Written 1996-2018 by J. Schilling
+###########################################################################
+#
+# Platform dependent MACROS for Linux
+#
+###########################################################################
+# Copyright (c) J. Schilling
+###########################################################################
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License, Version 1.0 only
+# (the "License"). You may not use this file except in compliance
+# with the License.
+#
+# See the file CDDL.Schily.txt in this distribution for details.
+# A copy of the CDDL is also available via the Internet at
+# http://www.opensource.org/licenses/cddl1.txt
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file CDDL.Schily.txt from this distribution.
+###########################################################################
+include $(SRCROOT)/$(RULESDIR)/rules.prg
+###########################################################################
+include $(SRCROOT)/$(RULESDIR)/cc-$(C_ARCH).rul
+
+#
+# LINUX_SRC_INCLUDE is defined in DEFAULTS/Defaults.linux
+#
+INCDIRSX += $(LINUX_SRC_INCLUDE)
+OSDEFS +=
+
+KDEFINES= -DKERNEL -D_KERNEL
+
+LIB_PREFIX= lib
+LIB_SUFFIX= .a
+SHL_SUFFIX= .so.$(DYNMAJOR).$(DYNMINOR)
+
+LIB_SOCKET=
+LIB_MATH= -lm
+LIB_KVM=
+
+#
+# Sunpro C/C++ run on Solaris and Linux and both have linkers
+# that support mapfiles
+#
+MAPVERS= $(_MAPVERS) # This enables to use mapfiles
+#
+# The GNU linker is buggy and does not like the official order in linker map
+# files for symbol versioning. The following command reverses the order of
+# the version names in the linker map file.
+#
+MAPFILE_POST= | sed 's/^SCHILY/+SCHILY/' | tr '\012' '@' | tr '+' '\012' | sort -V | tr '@' '\012'
+
+#LDOPTS= $(LIBS_PATH) $(LDPATH) $(RUNPATH:-R%=-Wl,-R%)
+#
+# Uncomment the next line in case you are on an old Linux version that
+# does not support the -R linker flag.
+#
+#LDOPTS= $(LIBS_PATH) $(LDPATH)
+LDOPTMAP= $(PMAPVERS:%=-Wl,--version-script=%)
+LDOPTDYN= -shared -Wl,-soname,$(TARGET) $(LDOPTMAP)
+
+LORDER= echo
+TSORT= cat
diff --git a/meta/recipes-devtools/cmake/cmake-native_3.23.1.bb b/meta/recipes-devtools/cmake/cmake-native_3.23.1.bb
deleted file mode 100644
index 722a486f20..0000000000
--- a/meta/recipes-devtools/cmake/cmake-native_3.23.1.bb
+++ /dev/null
@@ -1,64 +0,0 @@
-require cmake.inc
-inherit native
-
-DEPENDS += "bzip2-replacement-native xz-native zlib-native curl-native ncurses-native zstd-native"
-
-SRC_URI += "file://OEToolchainConfig.cmake \
- file://environment.d-cmake.sh \
- file://0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch \
- file://0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch \
- "
-
-LICENSE:append = " & BSD-1-Clause & MIT & BSD-2-Clause"
-LIC_FILES_CHKSUM:append = " \
- file://Utilities/cmjsoncpp/LICENSE;md5=5d73c165a0f9e86a1342f32d19ec5926 \
- file://Utilities/cmlibarchive/COPYING;md5=d499814247adaee08d88080841cb5665 \
- file://Utilities/cmexpat/COPYING;md5=9e2ce3b3c4c0f2670883a23bbd7c37a9 \
- file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \
- file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \
-"
-
-B = "${WORKDIR}/build"
-do_configure[cleandirs] = "${B}"
-
-CMAKE_EXTRACONF = "\
- -DCMAKE_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \
- -DBUILD_CursesDialog=1 \
- -DCMAKE_USE_SYSTEM_LIBRARIES=1 \
- -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_LIBARCHIVE=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_LIBRHASH=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_EXPAT=0 \
- -DENABLE_ACL=0 -DHAVE_ACL_LIBACL_H=0 \
- -DHAVE_SYS_ACL_H=0 \
-"
-
-do_configure () {
- ${S}/configure --verbose --prefix=${prefix} \
- ${@oe.utils.parallel_make_argument(d, '--parallel=%d')} \
- ${@bb.utils.contains('CCACHE', 'ccache ', '--enable-ccache', '', d)} \
- -- ${CMAKE_EXTRACONF}
-}
-
-do_compile() {
- oe_runmake
-}
-
-do_install() {
- oe_runmake 'DESTDIR=${D}' install
-
- # The following codes are here because eSDK needs to provide compatibilty
- # for SDK. That is, eSDK could also be used like traditional SDK.
- mkdir -p ${D}${datadir}/cmake
- install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/
- mkdir -p ${D}${base_prefix}/environment-setup.d
- install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${base_prefix}/environment-setup.d/cmake.sh
-
- # Help docs create tons of files in the native sysroot and aren't needed there
- rm -rf ${D}${datadir}/cmake-*/Help
-}
-
-do_compile[progress] = "percent"
-
-SYSROOT_DIRS_NATIVE += "${datadir}/cmake ${base_prefix}/environment-setup.d"
diff --git a/meta/recipes-devtools/cmake/cmake-native_3.28.3.bb b/meta/recipes-devtools/cmake/cmake-native_3.28.3.bb
new file mode 100644
index 0000000000..546d117156
--- /dev/null
+++ b/meta/recipes-devtools/cmake/cmake-native_3.28.3.bb
@@ -0,0 +1,67 @@
+require cmake.inc
+inherit native
+
+DEPENDS += "bzip2-replacement-native xz-native zlib-native ncurses-native zstd-native openssl-native"
+
+SRC_URI += "file://OEToolchainConfig.cmake \
+ file://environment.d-cmake.sh \
+ file://0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch \
+ file://0001-CMakeLists.txt-disable-USE_NGHTTP2.patch \
+ "
+
+LICENSE:append = " & BSD-1-Clause & MIT & BSD-2-Clause & curl"
+LIC_FILES_CHKSUM:append = " \
+ file://Utilities/cmjsoncpp/LICENSE;md5=5d73c165a0f9e86a1342f32d19ec5926 \
+ file://Utilities/cmlibarchive/COPYING;md5=d499814247adaee08d88080841cb5665 \
+ file://Utilities/cmexpat/COPYING;md5=9e2ce3b3c4c0f2670883a23bbd7c37a9 \
+ file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \
+ file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \
+ file://Utilities/cmcurl/COPYING;md5=db8448a1e43eb2125f7740fc397db1f6 \
+"
+
+B = "${WORKDIR}/build"
+do_configure[cleandirs] = "${B}"
+
+CMAKE_EXTRACONF = "\
+ -DCMAKE_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \
+ -DBUILD_CursesDialog=1 \
+ -DCMAKE_USE_SYSTEM_LIBRARIES=1 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_CPPDAP=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_LIBARCHIVE=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_LIBRHASH=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_EXPAT=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_CURL=0 \
+ -DENABLE_ACL=0 -DHAVE_ACL_LIBACL_H=0 \
+ -DHAVE_SYS_ACL_H=0 \
+"
+
+do_configure () {
+ ${S}/bootstrap --verbose --prefix=${prefix} \
+ ${@oe.utils.parallel_make_argument(d, '--parallel=%d')} \
+ ${@bb.utils.contains('CCACHE', 'ccache ', '--enable-ccache', '', d)} \
+ -- ${CMAKE_EXTRACONF}
+}
+
+do_compile() {
+ oe_runmake
+}
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' install
+
+ # The following codes are here because eSDK needs to provide compatibilty
+ # for SDK. That is, eSDK could also be used like traditional SDK.
+ mkdir -p ${D}${datadir}/cmake
+ install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/
+ mkdir -p ${D}${base_prefix}/environment-setup.d
+ install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${base_prefix}/environment-setup.d/cmake.sh
+
+ # Help docs create tons of files in the native sysroot and aren't needed there
+ rm -rf ${D}${datadir}/cmake-*/Help
+}
+
+do_compile[progress] = "percent"
+
+SYSROOT_DIRS_NATIVE += "${datadir}/cmake ${base_prefix}/environment-setup.d"
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc
index 934d577acf..ab9f459c05 100644
--- a/meta/recipes-devtools/cmake/cmake.inc
+++ b/meta/recipes-devtools/cmake/cmake.inc
@@ -10,21 +10,17 @@ HOMEPAGE = "http://www.cmake.org/"
BUGTRACKER = "http://public.kitware.com/Bug/my_view_page.php"
SECTION = "console/utils"
LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://Copyright.txt;md5=f2102a52df7aa592cf072180e7ebc8c7 \
+LIC_FILES_CHKSUM = "file://Copyright.txt;md5=9d3d12c5f3b4c1f83650adcc65b59c06 \
file://Source/cmake.h;beginline=1;endline=2;md5=a5f70e1fef8614734eae0d62b4f5891b \
"
CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:2])}"
SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \
- file://0003-cmake-support-OpenEmbedded-Qt4-tool-binary-names.patch \
- file://0004-Fail-silently-if-system-Qt-installation-is-broken.patch \
"
-SRC_URI[sha256sum] = "33fd10a8ec687a4d0d5b42473f10459bb92b3ae7def2b745dc10b192760869f3"
+SRC_URI[sha256sum] = "72b7570e5c8593de6ac4ab433b73eab18c5fb328880460c86ce32608141ad5c1"
UPSTREAM_CHECK_REGEX = "cmake-(?P<pver>\d+(\.\d+)+)\.tar"
-# This is specific to the npm package that installs cmake, so isn't
-# relevant to OpenEmbedded
-CVE_CHECK_IGNORE += "CVE-2016-10642"
+CVE_STATUS[CVE-2016-10642] = "cpe-incorrect: This is specific to the npm package that installs cmake, so isn't relevant to OpenEmbedded"
diff --git a/meta/recipes-devtools/cmake/cmake/0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch b/meta/recipes-devtools/cmake/cmake/0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch
deleted file mode 100644
index 9a2287f517..0000000000
--- a/meta/recipes-devtools/cmake/cmake/0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 89f6c846f02ad6d30b9ebb7eaaaa4fb6f9cec054 Mon Sep 17 00:00:00 2001
-From: Cody P Schafer <dev@codyps.com>
-Date: Thu, 27 Apr 2017 11:35:05 -0400
-Subject: [PATCH] CMakeDetermineSystem: use oe environment vars to load default
- toolchain file in sdk
-
-Passing the toolchain by:
-
- - shell aliases does not work if cmake is called by a script
- - unconditionally by a wrapper script causes cmake to believe it is
- configuring things when it is not (for example, `cmake --build` breaks).
-
-The OE_CMAKE_TOOLCHAIN_FILE variable is only used as a default if no
-toolchain is explicitly specified.
-
-Setting the CMAKE_TOOLCHAIN_FILE cmake variable is marked as cached
-because '-D' options are cache entries themselves.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Cody P Schafer <dev@codyps.com>
-Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
-
----
- Modules/CMakeDetermineSystem.cmake | 7 +++++++
- 1 file changed, 7 insertions(+)
-
-diff --git a/Modules/CMakeDetermineSystem.cmake b/Modules/CMakeDetermineSystem.cmake
-index 8c7af067..ade2b189 100644
---- a/Modules/CMakeDetermineSystem.cmake
-+++ b/Modules/CMakeDetermineSystem.cmake
-@@ -112,6 +112,13 @@ else()
- endif()
- endif()
-
-+if(NOT DEFINED CMAKE_TOOLCHAIN_FILE)
-+ if(DEFINED ENV{OE_CMAKE_TOOLCHAIN_FILE})
-+ set(CMAKE_TOOLCHAIN_FILE "$ENV{OE_CMAKE_TOOLCHAIN_FILE}" CACHE FILEPATH "toolchain file")
-+ message(STATUS "Toolchain file defaulted to '${CMAKE_TOOLCHAIN_FILE}'")
-+ endif()
-+endif()
-+
- # if a toolchain file is used, the user wants to cross compile.
- # in this case read the toolchain file and keep the CMAKE_HOST_SYSTEM_*
- # variables around so they can be used in CMakeLists.txt.
diff --git a/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch b/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch
new file mode 100644
index 0000000000..b2933d88be
--- /dev/null
+++ b/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch
@@ -0,0 +1,35 @@
+From d33d8a5e9f3b25a80d47b72b1a8a6624a85563c1 Mon Sep 17 00:00:00 2001
+From: Changqing Li <changqing.li@windriver.com>
+Date: Wed, 28 Dec 2022 17:51:27 +0800
+Subject: [PATCH] CMakeLists.txt: disable USE_NGHTTP2
+
+nghttp2 depends on cmake-native to build, to break circular
+dependency, disable nghttp2.
+
+Upstream-Status: Inappropriate [oe specific]
+
+Signed-off-by: Changqing Li <changqing.li@windriver.com>
+
+Adjust the patch to apply on top of v3.28.3.
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ Utilities/cmcurl/CMakeLists.txt | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Utilities/cmcurl/CMakeLists.txt b/Utilities/cmcurl/CMakeLists.txt
+index 9387247366..d3de01f4e8 100644
+--- a/Utilities/cmcurl/CMakeLists.txt
++++ b/Utilities/cmcurl/CMakeLists.txt
+@@ -88,7 +88,7 @@ set(HTTP_ONLY OFF CACHE INTERNAL "Curl is not http-only")
+ set(PICKY_COMPILER OFF CACHE INTERNAL "Enable picky compiler options")
+ set(SHARE_LIB_OBJECT OFF)
+ set(USE_LIBIDN2 ON)
+-set(USE_NGHTTP2 ON)
++set(USE_NGHTTP2 OFF)
+ set(USE_NGTCP2 OFF)
+ set(USE_QUICHE OFF)
+ set(USE_WIN32_IDN OFF)
+--
+2.43.0
+
diff --git a/meta/recipes-devtools/cmake/cmake/0003-cmake-support-OpenEmbedded-Qt4-tool-binary-names.patch b/meta/recipes-devtools/cmake/cmake/0003-cmake-support-OpenEmbedded-Qt4-tool-binary-names.patch
deleted file mode 100644
index 575a5cb7fb..0000000000
--- a/meta/recipes-devtools/cmake/cmake/0003-cmake-support-OpenEmbedded-Qt4-tool-binary-names.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-From 2d02ac91d5a5d72eaddba4894eaa6db3ed8fee62 Mon Sep 17 00:00:00 2001
-From: Otavio Salvador <otavio@ossystems.com.br>
-Date: Thu, 12 May 2011 15:36:03 +0000
-Subject: [PATCH] cmake: support OpenEmbedded Qt4 tool binary names
-
-The FindQt4 module looks for Qt4 binaries to be able to gather the
-paths used for compilation and also to be using during other processes
-(translation update, translation binary generating and like) however
-OpenEmbedded has renamed those to allow old QMake to be used in
-parallel with the current one. This patch adds support for the
-OpenEmbedded specific binary names.
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
-
-The patch was slightly adapted in order to match cmake 3.2.2:
-Instead of find_program, _find_qt4_program is now used.
-
-Signed-off-by: Moritz Blume <moritz.blume@bmw-carit.de>
-Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
-
----
- Modules/FindQt4.cmake | 10 +++++-----
- 1 file changed, 5 insertions(+), 5 deletions(-)
-
-diff --git a/Modules/FindQt4.cmake b/Modules/FindQt4.cmake
-index 3993968..b2a8585 100644
---- a/Modules/FindQt4.cmake
-+++ b/Modules/FindQt4.cmake
-@@ -518,7 +518,7 @@ endfunction()
-
- set(QT4_INSTALLED_VERSION_TOO_OLD FALSE)
-
--set(_QT4_QMAKE_NAMES qmake qmake4 qmake-qt4 qmake-mac)
-+set(_QT4_QMAKE_NAMES qmake qmake2 qmake4 qmake-qt4 qmake-mac)
- _qt4_find_qmake("${_QT4_QMAKE_NAMES}" QT_QMAKE_EXECUTABLE QTVERSION)
-
- if (QT_QMAKE_EXECUTABLE AND
-@@ -1136,12 +1136,12 @@ if (QT_QMAKE_EXECUTABLE AND
- _find_qt4_program(QT_MOC_EXECUTABLE Qt4::moc moc-qt4 moc4 moc)
- _find_qt4_program(QT_UIC_EXECUTABLE Qt4::uic uic-qt4 uic4 uic)
- _find_qt4_program(QT_UIC3_EXECUTABLE Qt4::uic3 uic3)
-- _find_qt4_program(QT_RCC_EXECUTABLE Qt4::rcc rcc)
-- _find_qt4_program(QT_DBUSCPP2XML_EXECUTABLE Qt4::qdbuscpp2xml qdbuscpp2xml)
-- _find_qt4_program(QT_DBUSXML2CPP_EXECUTABLE Qt4::qdbusxml2cpp qdbusxml2cpp)
-+ _find_qt4_program(QT_RCC_EXECUTABLE Qt4::rcc rcc4 rcc)
-+ _find_qt4_program(QT_DBUSCPP2XML_EXECUTABLE Qt4::qdbuscpp2xml qdbuscpp2xml4 qdbuscpp2xml)
-+ _find_qt4_program(QT_DBUSXML2CPP_EXECUTABLE Qt4::qdbusxml2cpp qdbusxml2cpp4 qdbusxml2cpp)
- _find_qt4_program(QT_LUPDATE_EXECUTABLE Qt4::lupdate lupdate-qt4 lupdate4 lupdate)
- _find_qt4_program(QT_LRELEASE_EXECUTABLE Qt4::lrelease lrelease-qt4 lrelease4 lrelease)
-- _find_qt4_program(QT_QCOLLECTIONGENERATOR_EXECUTABLE Qt4::qcollectiongenerator qcollectiongenerator-qt4 qcollectiongenerator)
-+ _find_qt4_program(QT_QCOLLECTIONGENERATOR_EXECUTABLE Qt4::qcollectiongenerator qcollectiongenerator-qt4 qcollectiongenerator qcollectiongenerator4)
- _find_qt4_program(QT_DESIGNER_EXECUTABLE Qt4::designer designer-qt4 designer4 designer)
- _find_qt4_program(QT_LINGUIST_EXECUTABLE Qt4::linguist linguist-qt4 linguist4 linguist)
-
diff --git a/meta/recipes-devtools/cmake/cmake/0004-Fail-silently-if-system-Qt-installation-is-broken.patch b/meta/recipes-devtools/cmake/cmake/0004-Fail-silently-if-system-Qt-installation-is-broken.patch
deleted file mode 100644
index 1b196db81a..0000000000
--- a/meta/recipes-devtools/cmake/cmake/0004-Fail-silently-if-system-Qt-installation-is-broken.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 60864efbe52cc12018efaafbc4e4c3c8b4af2b65 Mon Sep 17 00:00:00 2001
-From: Otavio Salvador <otavio@ossystems.com.br>
-Date: Thu, 5 Jul 2018 10:26:48 -0300
-Subject: [PATCH] Fail silently if system Qt installation is broken
-
-Fixes a regression in behaviour from 2.8.10 to 2.8.11 resulting in the
-following error if the system Qt installation is broken:
-
-CMake Error at Modules/FindQt4.cmake:1028 (set_property):
- set_property could not find TARGET Qt4::QtCore. Perhaps it has not yet
- been created.
-Call Stack (most recent call first):
- Tests/RunCMake/CMakeLists.txt:79 (find_package)
-
-Upstream-Status: Pending
-
-Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
-
-The patch was slightly adapted in order to match cmake 3.2.2:
-Another set_property was introduced which had to be included
-within the if(QT_QTCORE_FOUND) statement.
-
-Signed-off-by: Moritz Blume <moritz.blume@bmw-carit.de>
-Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
-
----
- Modules/FindQt4.cmake | 39 ++++++++++++++++++++-------------------
- 1 file changed, 20 insertions(+), 19 deletions(-)
-
-diff --git a/Modules/FindQt4.cmake b/Modules/FindQt4.cmake
-index b2a8585..77c89aa 100644
---- a/Modules/FindQt4.cmake
-+++ b/Modules/FindQt4.cmake
-@@ -988,25 +988,26 @@ if (QT_QMAKE_EXECUTABLE AND
- endif()
- endmacro()
-
--
-- # Set QT_xyz_LIBRARY variable and add
-- # library include path to QT_INCLUDES
-- _QT4_ADJUST_LIB_VARS(QtCore)
-- set_property(TARGET Qt4::QtCore APPEND PROPERTY
-- INTERFACE_INCLUDE_DIRECTORIES
-- "${QT_MKSPECS_DIR}/default"
-- ${QT_INCLUDE_DIR}
-- )
-- set_property(TARGET Qt4::QtCore APPEND PROPERTY
-- INTERFACE_COMPILE_DEFINITIONS
-- $<$<NOT:$<CONFIG:Debug>>:QT_NO_DEBUG>
-- )
-- set_property(TARGET Qt4::QtCore PROPERTY
-- INTERFACE_QT_MAJOR_VERSION 4
-- )
-- set_property(TARGET Qt4::QtCore APPEND PROPERTY
-- COMPATIBLE_INTERFACE_STRING QT_MAJOR_VERSION
-- )
-+ if(QT_QTCORE_FOUND)
-+ # Set QT_xyz_LIBRARY variable and add
-+ # library include path to QT_INCLUDES
-+ _QT4_ADJUST_LIB_VARS(QtCore)
-+ set_property(TARGET Qt4::QtCore APPEND PROPERTY
-+ INTERFACE_INCLUDE_DIRECTORIES
-+ "${QT_MKSPECS_DIR}/default"
-+ ${QT_INCLUDE_DIR}
-+ )
-+ set_property(TARGET Qt4::QtCore APPEND PROPERTY
-+ INTERFACE_COMPILE_DEFINITIONS
-+ $<$<NOT:$<CONFIG:Debug>>:QT_NO_DEBUG>
-+ )
-+ set_property(TARGET Qt4::QtCore PROPERTY
-+ INTERFACE_QT_MAJOR_VERSION 4
-+ )
-+ set_property(TARGET Qt4::QtCore APPEND PROPERTY
-+ COMPATIBLE_INTERFACE_STRING QT_MAJOR_VERSION
-+ )
-+ endif()
-
- foreach(QT_MODULE ${QT_MODULES})
- _QT4_ADJUST_LIB_VARS(${QT_MODULE})
diff --git a/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake b/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake
index 86446c3ace..6434b27371 100644
--- a/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake
+++ b/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake
@@ -1,7 +1,6 @@
set( CMAKE_SYSTEM_NAME Linux )
set( CMAKE_C_FLAGS $ENV{CFLAGS} CACHE STRING "" FORCE )
set( CMAKE_CXX_FLAGS $ENV{CXXFLAGS} CACHE STRING "" FORCE )
-set( CMAKE_ASM_FLAGS ${CMAKE_C_FLAGS} CACHE STRING "" FORCE )
set( CMAKE_SYSROOT $ENV{OECORE_TARGET_SYSROOT} )
set( CMAKE_FIND_ROOT_PATH $ENV{OECORE_TARGET_SYSROOT} )
@@ -12,13 +11,13 @@ set( CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY )
set(CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX "$ENV{OE_CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX}")
-# Set CMAKE_SYSTEM_PROCESSOR from the sysroot name (assuming processor-distro-os).
-if ($ENV{SDKTARGETSYSROOT} MATCHES "/sysroots/([a-zA-Z0-9_-]+)-.+-.+")
- set(CMAKE_SYSTEM_PROCESSOR ${CMAKE_MATCH_1})
-endif()
+set( CMAKE_SYSTEM_PROCESSOR $ENV{OECORE_TARGET_ARCH} )
# Include the toolchain configuration subscripts
file( GLOB toolchain_config_files "${CMAKE_CURRENT_LIST_FILE}.d/*.cmake" )
foreach(config ${toolchain_config_files})
include(${config})
endforeach()
+
+unset(CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES)
+unset(CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES)
diff --git a/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh b/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh
index 7bdb19fb6c..c94b6bb3e3 100644
--- a/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh
+++ b/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh
@@ -1,2 +1,2 @@
-export OE_CMAKE_TOOLCHAIN_FILE="$OECORE_NATIVE_SYSROOT/usr/share/cmake/OEToolchainConfig.cmake"
+export CMAKE_TOOLCHAIN_FILE="$OECORE_NATIVE_SYSROOT/usr/share/cmake/OEToolchainConfig.cmake"
export OE_CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX="`echo $OECORE_BASELIB | sed -e s/lib//`"
diff --git a/meta/recipes-devtools/cmake/cmake_3.23.1.bb b/meta/recipes-devtools/cmake/cmake_3.23.1.bb
deleted file mode 100644
index bb7ed83e30..0000000000
--- a/meta/recipes-devtools/cmake/cmake_3.23.1.bb
+++ /dev/null
@@ -1,67 +0,0 @@
-require cmake.inc
-
-inherit cmake bash-completion
-
-DEPENDS += "curl expat zlib libarchive xz ncurses bzip2"
-
-SRC_URI:append:class-nativesdk = " \
- file://OEToolchainConfig.cmake \
- file://SDKToolchainConfig.cmake.template \
- file://cmake-setup.py \
- file://environment.d-cmake.sh \
- file://0001-CMakeDetermineSystem-use-oe-environment-vars-to-load.patch \
-"
-
-LICENSE:append = " & BSD-1-Clause & MIT"
-LIC_FILES_CHKSUM:append = " \
- file://Utilities/cmjsoncpp/LICENSE;md5=5d73c165a0f9e86a1342f32d19ec5926 \
- file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \
- file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \
-"
-
-# Strip ${prefix} from ${docdir}, set result into docdir_stripped
-python () {
- prefix=d.getVar("prefix")
- docdir=d.getVar("docdir")
-
- if not docdir.startswith(prefix):
- bb.fatal('docdir must contain prefix as its prefix')
-
- docdir_stripped = docdir[len(prefix):]
- if len(docdir_stripped) > 0 and docdir_stripped[0] == '/':
- docdir_stripped = docdir_stripped[1:]
-
- d.setVar("docdir_stripped", docdir_stripped)
-}
-
-EXTRA_OECMAKE=" \
- -DCMAKE_DOC_DIR=${docdir_stripped}/cmake-${CMAKE_MAJOR_VERSION} \
- -DCMAKE_USE_SYSTEM_LIBRARIES=1 \
- -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \
- -DCMAKE_USE_SYSTEM_LIBRARY_LIBRHASH=0 \
- -DKWSYS_CHAR_IS_SIGNED=1 \
- -DBUILD_CursesDialog=0 \
- -DKWSYS_LFS_WORKS=1 \
-"
-
-do_install:append:class-nativesdk() {
- mkdir -p ${D}${datadir}/cmake
- install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/
-
- mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
- install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${SDKPATHNATIVE}/environment-setup.d/cmake.sh
-
- # install cmake-setup.py to create arch-specific toolchain cmake file from template
- install -m 0644 ${WORKDIR}/SDKToolchainConfig.cmake.template ${D}${datadir}/cmake/
- install -d ${D}${SDKPATHNATIVE}/post-relocate-setup.d
- install -m 0755 ${WORKDIR}/cmake-setup.py ${D}${SDKPATHNATIVE}/post-relocate-setup.d/
-}
-
-FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}"
-
-FILES:${PN} += "${datadir}/cmake-${CMAKE_MAJOR_VERSION} ${datadir}/cmake ${datadir}/aclocal ${datadir}/emacs ${datadir}/vim"
-FILES:${PN}-doc += "${docdir}/cmake-${CMAKE_MAJOR_VERSION}"
-FILES:${PN}-dev = ""
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/cmake/cmake_3.28.3.bb b/meta/recipes-devtools/cmake/cmake_3.28.3.bb
new file mode 100644
index 0000000000..6a9a3266df
--- /dev/null
+++ b/meta/recipes-devtools/cmake/cmake_3.28.3.bb
@@ -0,0 +1,68 @@
+require cmake.inc
+
+inherit cmake bash-completion
+
+DEPENDS += "curl expat zlib libarchive xz ncurses bzip2"
+
+SRC_URI:append:class-nativesdk = " \
+ file://OEToolchainConfig.cmake \
+ file://SDKToolchainConfig.cmake.template \
+ file://cmake-setup.py \
+ file://environment.d-cmake.sh \
+"
+
+LICENSE:append = " & BSD-1-Clause & MIT"
+LIC_FILES_CHKSUM:append = " \
+ file://Utilities/cmjsoncpp/LICENSE;md5=5d73c165a0f9e86a1342f32d19ec5926 \
+ file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \
+ file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \
+"
+
+# Strip ${prefix} from ${docdir}, set result into docdir_stripped
+python () {
+ prefix=d.getVar("prefix")
+ docdir=d.getVar("docdir")
+
+ if not docdir.startswith(prefix):
+ bb.fatal('docdir must contain prefix as its prefix')
+
+ docdir_stripped = docdir[len(prefix):]
+ if len(docdir_stripped) > 0 and docdir_stripped[0] == '/':
+ docdir_stripped = docdir_stripped[1:]
+
+ d.setVar("docdir_stripped", docdir_stripped)
+}
+
+EXTRA_OECMAKE=" \
+ -DCMAKE_DOC_DIR=${docdir_stripped}/cmake-${CMAKE_MAJOR_VERSION} \
+ -DCMAKE_USE_SYSTEM_LIBRARIES=1 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_CPPDAP=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \
+ -DCMAKE_USE_SYSTEM_LIBRARY_LIBRHASH=0 \
+ -DKWSYS_CHAR_IS_SIGNED=1 \
+ -DBUILD_CursesDialog=0 \
+ -DKWSYS_LFS_WORKS=1 \
+ -DCMake_ENABLE_DEBUGGER=0 \
+"
+
+do_install:append:class-nativesdk() {
+ mkdir -p ${D}${datadir}/cmake
+ install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/
+
+ mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
+ install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${SDKPATHNATIVE}/environment-setup.d/cmake.sh
+
+ # install cmake-setup.py to create arch-specific toolchain cmake file from template
+ install -m 0644 ${WORKDIR}/SDKToolchainConfig.cmake.template ${D}${datadir}/cmake/
+ install -d ${D}${SDKPATHNATIVE}/post-relocate-setup.d
+ install -m 0755 ${WORKDIR}/cmake-setup.py ${D}${SDKPATHNATIVE}/post-relocate-setup.d/
+}
+
+FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}"
+
+FILES:${PN} += "${datadir}/cmake-${CMAKE_MAJOR_VERSION} ${datadir}/cmake ${datadir}/aclocal ${datadir}/emacs ${datadir}/vim"
+FILES:${PN}-doc += "${docdir}/cmake-${CMAKE_MAJOR_VERSION}"
+FILES:${PN}-dev = ""
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/createrepo-c/createrepo-c/0001-include-rpm-rpmstring.h.patch b/meta/recipes-devtools/createrepo-c/createrepo-c/0001-include-rpm-rpmstring.h.patch
new file mode 100644
index 0000000000..b7e5710b39
--- /dev/null
+++ b/meta/recipes-devtools/createrepo-c/createrepo-c/0001-include-rpm-rpmstring.h.patch
@@ -0,0 +1,30 @@
+From eb66326c3fc6e942282d01ddd56659c78ed7400b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 13 Jan 2023 13:21:51 -0800
+Subject: [PATCH] include rpm/rpmstring.h
+
+Its needed for rasprintf declaration
+
+Fixes
+src/xml_file.c:341:36: error: call to undeclared functi
+on 'rasprintf'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
+
+Upstream-Status: Submitted [https://github.com/rpm-software-management/createrepo_c/pull/340]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+---
+ src/xml_file.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/xml_file.c b/src/xml_file.c
+index d2400b8..871109c 100644
+--- a/src/xml_file.c
++++ b/src/xml_file.c
+@@ -19,6 +19,7 @@
+
+ #include <glib.h>
+ #include <glib/gstdio.h>
++#include <rpm/rpmstring.h>
+ #include <assert.h>
+ #include <rpm/rpmstring.h>
+ #include "xml_file.h"
diff --git a/meta/recipes-devtools/createrepo-c/createrepo-c_0.20.0.bb b/meta/recipes-devtools/createrepo-c/createrepo-c_0.20.0.bb
deleted file mode 100644
index bc61cac955..0000000000
--- a/meta/recipes-devtools/createrepo-c/createrepo-c_0.20.0.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-DESCRIPTION = "C implementation of createrepo."
-HOMEPAGE = "https://github.com/rpm-software-management/createrepo_c/wiki"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "git://github.com/rpm-software-management/createrepo_c;branch=master;protocol=https \
- file://0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
- "
-
-SRCREV = "e53188f86f4dfad46f5b81d6931cbaec19e44ea2"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "expat curl glib-2.0 libxml2 openssl bzip2 zlib file sqlite3 xz rpm"
-DEPENDS:append:class-native = " file-replacement-native"
-
-inherit cmake pkgconfig bash-completion setuptools3-base
-
-EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3 -DWITH_ZCHUNK=OFF -DENABLE_DRPM=OFF -DWITH_LIBMODULEMD=OFF"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# Direct createrepo to read rpm configuration from our sysroot, not the one it was compiled in
-do_install:append:class-native() {
- create_wrapper ${D}/${bindir}/createrepo_c \
- RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
- MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc
- create_wrapper ${D}/${bindir}/modifyrepo_c \
- MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc
-}
-
-do_install:append:class-nativesdk() {
- create_wrapper ${D}/${bindir}/createrepo_c \
- RPM_CONFIGDIR=${SDKPATHNATIVE}${libdir_nativesdk}/rpm \
- MAGIC=${datadir}/misc/magic.mgc
- create_wrapper ${D}/${bindir}/modifyrepo_c \
- MAGIC=${datadir}/misc/magic.mgc
- rm -rf ${D}/etc
-}
diff --git a/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb b/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb
new file mode 100644
index 0000000000..1f97c99bde
--- /dev/null
+++ b/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb
@@ -0,0 +1,41 @@
+SUMMARY = "C implementation of createrepo."
+HOMEPAGE = "https://github.com/rpm-software-management/createrepo_c/wiki"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "git://github.com/rpm-software-management/createrepo_c;branch=master;protocol=https \
+ file://0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
+ file://0001-include-rpm-rpmstring.h.patch \
+ "
+
+SRCREV = "10a8a7af4f1de3f98a21a7d08fe3a46ef306d197"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "expat curl glib-2.0 libxml2 openssl bzip2 zlib file sqlite3 xz rpm"
+DEPENDS:append:class-native = " file-replacement-native"
+
+inherit cmake pkgconfig bash-completion setuptools3-base
+
+EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3 -DWITH_ZCHUNK=OFF -DENABLE_DRPM=OFF -DWITH_LIBMODULEMD=OFF"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Direct createrepo to read rpm configuration from our sysroot, not the one it was compiled in
+do_install:append:class-native() {
+ create_wrapper ${D}/${bindir}/createrepo_c \
+ RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
+ MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc
+ create_wrapper ${D}/${bindir}/modifyrepo_c \
+ MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc
+}
+
+do_install:append:class-nativesdk() {
+ create_wrapper ${D}/${bindir}/createrepo_c \
+ RPM_CONFIGDIR=${SDKPATHNATIVE}${libdir_nativesdk}/rpm \
+ MAGIC=${datadir}/misc/magic.mgc
+ create_wrapper ${D}/${bindir}/modifyrepo_c \
+ MAGIC=${datadir}/misc/magic.mgc
+ rm -rf ${D}/etc
+}
diff --git a/meta/recipes-devtools/debugedit/debugedit_5.0.bb b/meta/recipes-devtools/debugedit/debugedit_5.0.bb
new file mode 100644
index 0000000000..63ad7babd9
--- /dev/null
+++ b/meta/recipes-devtools/debugedit/debugedit_5.0.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Tools for creating debuginfo and source file distributions"
+DESCRIPTION = "debugedit provides programs and scripts for creating \
+debuginfo and source file distributions, collect build-ids and rewrite \
+source paths in DWARF data for debugging, tracing and profiling."
+HOMEPAGE = "https://sourceware.org/debugedit/"
+
+LICENSE = "GPL-2.0-only & GPL-3.0-only & LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
+ file://COPYING3;md5=d32239bcb673463ab874e80d47fae504"
+
+SRC_URI = "https://sourceware.org/ftp/debugedit/${PV}/debugedit-${PV}.tar.xz"
+
+SRC_URI:append:libc-musl = "\
+ file://0002-sepdebugcrcfix.c-do-not-use-64bit-variants.patch \
+ file://0003-Makefile.am-do-not-update-manual.patch \
+ "
+
+SRC_URI[sha256sum] = "e9ecd7d350bebae1f178ce6776ca19a648b6fe8fa22f5b3044b38d7899aa553e"
+
+DEPENDS = "elfutils"
+DEPENDS:append:libc-musl = " musl-legacy-error"
+
+inherit pkgconfig autotools
+
+RDEPENDS:${PN} += "bash elfutils-binutils"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/debugedit/files/0002-sepdebugcrcfix.c-do-not-use-64bit-variants.patch b/meta/recipes-devtools/debugedit/files/0002-sepdebugcrcfix.c-do-not-use-64bit-variants.patch
new file mode 100644
index 0000000000..4463bd2324
--- /dev/null
+++ b/meta/recipes-devtools/debugedit/files/0002-sepdebugcrcfix.c-do-not-use-64bit-variants.patch
@@ -0,0 +1,56 @@
+From 3e05bbc1f7909ab6f529e66f0d0f70fb1e60583a Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 23 Mar 2023 11:55:53 +0800
+Subject: [PATCH 2/2] sepdebugcrcfix.c: do not use 64bit variants
+
+configure.ac checks for largefile support via AC_SYS_LARGEFILE
+already, therefore use off_t, open and lseek instead of 64bit
+variants. Musl e.g. does not define them without _LARGEFILE64_SOURCE
+and error is not seen on glibc because _GNU_SOURCE defines
+_LARGEFILE64_SOURCE.
+
+This patch is marked as inappropriate as debugedit obviously only
+wants to support glibc or some glibc compatible libcs. We can see
+this from the error() usage. And this patch is only for musl.
+
+Upstream-Status: Inappropriate [OE Specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ tools/sepdebugcrcfix.c | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/tools/sepdebugcrcfix.c b/tools/sepdebugcrcfix.c
+index c4a9d56..882e5f5 100644
+--- a/tools/sepdebugcrcfix.c
++++ b/tools/sepdebugcrcfix.c
+@@ -144,7 +144,7 @@ crc32 (const char *fname, const char *base_fname, uint32_t *crcp)
+ error (0, errno, _("cannot open \"%s\""), debugname);
+ return false;
+ }
+- off64_t size = lseek64 (fd, 0, SEEK_END);
++ off_t size = lseek (fd, 0, SEEK_END);
+ if (size == -1)
+ {
+ error (0, errno, _("cannot get size of \"%s\""), debugname);
+@@ -289,7 +289,7 @@ process (Elf *elf, int fd, const char *fname)
+ return true;
+ }
+ updated_count++;
+- off64_t seekto = (shdr->sh_offset + data->d_off
++ off_t seekto = (shdr->sh_offset + data->d_off
+ + (crcp - (const uint8_t *) data->d_buf));
+ uint32_t crc_targetendian = (ehdr->e_ident[EI_DATA] == ELFDATA2LSB
+ ? htole32 (crc) : htobe32 (crc));
+@@ -361,7 +361,7 @@ main (int argc, char **argv)
+ error (0, errno, _("cannot chmod \"%s\" to make sure we can read and write"), fname);
+
+ bool failed = false;
+- int fd = open64 (fname, O_RDWR);
++ int fd = open (fname, O_RDWR);
+ if (fd == -1)
+ {
+ error (0, errno, _("cannot open \"%s\""), fname);
+--
+2.17.1
+
diff --git a/meta/recipes-devtools/debugedit/files/0003-Makefile.am-do-not-update-manual.patch b/meta/recipes-devtools/debugedit/files/0003-Makefile.am-do-not-update-manual.patch
new file mode 100644
index 0000000000..de467f5365
--- /dev/null
+++ b/meta/recipes-devtools/debugedit/files/0003-Makefile.am-do-not-update-manual.patch
@@ -0,0 +1,65 @@
+From 4f0d7d2f4900ce8555e09854dc681278b7a3d9a9 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 23 Mar 2023 13:09:23 +0800
+Subject: [PATCH 3/3] Makefile.am: do not update manual
+
+The tarball ships these manuals, no need to re-generate them.
+We have local patches for debugedit.c and sepdebugcrcfix.c,
+this will triger re-generation of the manuals, which causes
+error of missing help2man.
+
+This is an OE specific patch. If we don't have local patches
+patching debugedit.c and sepdebugcrcfix.c, this patch is also
+not needed.
+
+Upstream-Status: Inappropriate [OE Specific]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ Makefile.am | 30 ------------------------------
+ 1 file changed, 30 deletions(-)
+
+diff --git a/Makefile.am b/Makefile.am
+index 98b2f20..f91deea 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -52,36 +52,6 @@ sepdebugcrcfix_LDADD = @LIBELF_LIBS@
+ # Manual pages are generated for dist
+ dist_man_MANS = debugedit.1 sepdebugcrcfix.1 find-debuginfo.1
+
+-# The 'case' ensures the man pages are only generated if the corresponding
+-# source script (the first prerequisite) or configure.ac (for the version)
+-# has been changed. The executable prerequisite is solely meant to force
+-# these docs to be made only after the executable has been compiled.
+-# This makes sure help2man is not normally necessary (since the generated
+-# man pages are distributed).
+-debugedit.1: tools/debugedit.c configure.ac debugedit$(EXEEXT)
+- @case '$?' in \
+- *$<* | *configure.ac* ) $(HELP2MAN) -N --output=$@ \
+- --name='debug source path manipulation tool' \
+- ./debugedit$(EXEEXT) ;; \
+- * ) : ;; \
+- esac
+-
+-sepdebugcrcfix.1: tools/sepdebugcrcfix.c configure.ac sepdebugcrcfix$(EXEEXT)
+- @case '$?' in \
+- *$<* | *configure.ac* ) $(HELP2MAN) -N --output=$@ \
+- --name='fixes CRC for separate .debug files' \
+- ./sepdebugcrcfix$(EXEEXT) ;;\
+- * ) : ;; \
+- esac
+-
+-find-debuginfo.1: $(top_srcdir)/scripts/find-debuginfo.in configure.ac find-debuginfo
+- @case '$?' in \
+- *$<* | *configure.ac* ) $(HELP2MAN) -N --output=$@ \
+- --name='finds debuginfo and processes it' \
+- ./find-debuginfo ;;\
+- * ) : ;; \
+- esac
+-
+ noinst_HEADERS= tools/ansidecl.h \
+ tools/hashtab.h \
+ tools/md5.h \
+--
+2.17.1
+
diff --git a/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.26.bb b/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.26.bb
deleted file mode 100644
index f6d1146e89..0000000000
--- a/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.26.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Command line utilities for working with *.desktop files"
-DESCRIPTION = "desktop-file-utils contains a few command line utilities for working with desktop entries"
-HOMEPAGE = "http://www.freedesktop.org/wiki/Software/desktop-file-utils"
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-or-later"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://src/validator.c;beginline=4;endline=27;md5=281e1114ee6c486a1a0a4295986b9416"
-
-SRC_URI = "http://freedesktop.org/software/${BPN}/releases/${BP}.tar.xz"
-SRC_URI[md5sum] = "29739e005f5887cf41639b8450f3c23f"
-SRC_URI[sha256sum] = "b26dbde79ea72c8c84fb7f9d870ffd857381d049a86d25e0038c4cef4c747309"
-
-DEPENDS = "glib-2.0"
-
-inherit autotools pkgconfig
-
-EXTRA_OECONF += "ac_cv_prog_EMACS=no"
-
-BBCLASSEXTEND = "native nativesdk"
-
-do_install:append() {
- rm -rf ${D}${datadir}/emacs
-}
-
diff --git a/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.27.bb b/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.27.bb
new file mode 100644
index 0000000000..55c88afcc9
--- /dev/null
+++ b/meta/recipes-devtools/desktop-file-utils/desktop-file-utils_0.27.bb
@@ -0,0 +1,21 @@
+SUMMARY = "Command line utilities for working with *.desktop files"
+DESCRIPTION = "desktop-file-utils contains a few command line utilities for working with desktop entries"
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/desktop-file-utils"
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-or-later"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://src/validator.c;beginline=4;endline=27;md5=281e1114ee6c486a1a0a4295986b9416"
+
+SRC_URI = "http://freedesktop.org/software/${BPN}/releases/${BP}.tar.xz"
+SRC_URI[sha256sum] = "a0817df39ce385b6621880407c56f1f298168c040c2032cedf88d5b76affe836"
+
+DEPENDS = "glib-2.0"
+
+inherit meson pkgconfig
+
+BBCLASSEXTEND = "native nativesdk"
+
+do_install:append() {
+ rm -rf ${D}${datadir}/emacs
+}
diff --git a/meta/recipes-devtools/devel-config/nfs-export-root.bb b/meta/recipes-devtools/devel-config/nfs-export-root.bb
index 0aaec36b41..5e69962d7c 100644
--- a/meta/recipes-devtools/devel-config/nfs-export-root.bb
+++ b/meta/recipes-devtools/devel-config/nfs-export-root.bb
@@ -3,7 +3,6 @@ DESCRIPTION = "Enables NFS access from any host to the entire filesystem (for de
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-PR = "r1"
SRC_URI = "file://exports"
diff --git a/meta/recipes-devtools/diffstat/diffstat/avoid-check-user-break-cc.patch b/meta/recipes-devtools/diffstat/diffstat/avoid-check-user-break-cc.patch
index 811faa48da..46f1cfabcf 100644
--- a/meta/recipes-devtools/diffstat/diffstat/avoid-check-user-break-cc.patch
+++ b/meta/recipes-devtools/diffstat/diffstat/avoid-check-user-break-cc.patch
@@ -1,4 +1,4 @@
-From 1046593aacb74ff888a0d68c0ff89b20c4bc9ed4 Mon Sep 17 00:00:00 2001
+From 708c765ffb41ecc734316f69043583312fe444bd Mon Sep 17 00:00:00 2001
From: Kai Kang <kai.kang@windriver.com>
Date: Tue, 15 May 2018 17:17:01 +0800
Subject: [PATCH] diffstat: fix wrong evaluation of CC
@@ -18,10 +18,10 @@ Signed-off-by: Kai Kang <kai.kang@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aclocal.m4 b/aclocal.m4
-index 9281aa8..dd57b7a 100644
+index d4bd283..1b653c4 100644
--- a/aclocal.m4
+++ b/aclocal.m4
-@@ -1239,7 +1239,7 @@ CF_GCC_VERSION
+@@ -1522,7 +1522,7 @@ CF_GCC_VERSION
CF_ACVERSION_CHECK(2.52,
[AC_PROG_CC_STDC],
[CF_ANSI_CC_REQD])
diff --git a/meta/recipes-devtools/diffstat/diffstat_1.64.bb b/meta/recipes-devtools/diffstat/diffstat_1.64.bb
deleted file mode 100644
index 717f503039..0000000000
--- a/meta/recipes-devtools/diffstat/diffstat_1.64.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "Tool to produce a statistics based on a diff"
-DESCRIPTION = "diffstat reads the output of diff and displays a histogram of \
-the insertions, deletions, and modifications per-file. It is useful for \
-reviewing large, complex patch files."
-HOMEPAGE = "http://invisible-island.net/diffstat/"
-SECTION = "devel"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f605b1986cc3b808ec0e4fa9d0e0f2d9"
-
-SRC_URI = "http://invisible-mirror.net/archives/${BPN}/${BP}.tgz \
- file://run-ptest \
- file://avoid-check-user-break-cc.patch \
- "
-
-SRC_URI[sha256sum] = "b8aee38d9d2e1d05926e6b55810a9d2c2dd407f24d6a267387563a4436e3f7fc"
-
-inherit autotools gettext ptest
-
-EXTRA_AUTORECONF += "--exclude=aclocal"
-
-LDFLAGS += "${TOOLCHAIN_OPTIONS}"
-
-do_install_ptest() {
- cp -r ${S}/testing ${D}${PTEST_PATH}
-}
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/diffstat/diffstat_1.66.bb b/meta/recipes-devtools/diffstat/diffstat_1.66.bb
new file mode 100644
index 0000000000..d59379fd2b
--- /dev/null
+++ b/meta/recipes-devtools/diffstat/diffstat_1.66.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Tool to produce a statistics based on a diff"
+DESCRIPTION = "diffstat reads the output of diff and displays a histogram of \
+the insertions, deletions, and modifications per-file. It is useful for \
+reviewing large, complex patch files."
+HOMEPAGE = "http://invisible-island.net/diffstat/"
+SECTION = "devel"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5713b4719a66a6527e6301e8f8745877"
+
+SRC_URI = "http://invisible-mirror.net/archives/${BPN}/${BP}.tgz \
+ file://run-ptest \
+ file://avoid-check-user-break-cc.patch \
+ "
+
+SRC_URI[sha256sum] = "f54531bbe32e8e0fa461f018b41e3af516b632080172f361f05e50367ecbb69e"
+
+inherit autotools gettext ptest
+
+EXTRA_AUTORECONF += "--exclude=aclocal"
+
+LDFLAGS += "${TOOLCHAIN_OPTIONS}"
+
+do_install_ptest() {
+ cp -r ${S}/testing ${D}${PTEST_PATH}
+}
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/dmidecode/dmidecode/0001-Committing-changes-from-do_unpack_extra.patch b/meta/recipes-devtools/dmidecode/dmidecode/0001-Committing-changes-from-do_unpack_extra.patch
deleted file mode 100644
index d082459ca9..0000000000
--- a/meta/recipes-devtools/dmidecode/dmidecode/0001-Committing-changes-from-do_unpack_extra.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From 2c0437f47eaaf565aaccf0c0d150d5fc0fc734f5 Mon Sep 17 00:00:00 2001
-From: OpenEmbedded <oe.patch@oe>
-Date: Thu, 22 Nov 2018 12:05:04 +0000
-Subject: [PATCH] Do not install to /usr/local
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Makefile | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/Makefile b/Makefile
-index 7aa729d..e079b18 100644
---- a/Makefile
-+++ b/Makefile
-@@ -31,7 +31,7 @@ CFLAGS += -D_FILE_OFFSET_BITS=64
- LDFLAGS ?=
-
- DESTDIR =
--prefix = /usr/local
-+prefix = /usr
- sbindir = $(prefix)/sbin
- mandir = $(prefix)/share/man
- man8dir = $(mandir)/man8
diff --git a/meta/recipes-devtools/dmidecode/dmidecode_3.3.bb b/meta/recipes-devtools/dmidecode/dmidecode_3.3.bb
deleted file mode 100644
index 23540b2703..0000000000
--- a/meta/recipes-devtools/dmidecode/dmidecode_3.3.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "DMI (Desktop Management Interface) table related utilities"
-HOMEPAGE = "http://www.nongnu.org/dmidecode/"
-DESCRIPTION = "Dmidecode reports information about your system's hardware as described in your system BIOS according to the SMBIOS/DMI standard (see a sample output)."
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/dmidecode/${BP}.tar.xz \
- file://0001-Committing-changes-from-do_unpack_extra.patch \
- "
-
-COMPATIBLE_HOST = "(i.86|x86_64|aarch64|arm|powerpc|powerpc64).*-linux"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS="
-
-# The upstream buildsystem uses 'docdir' as the path where it puts AUTHORS,
-# README, etc, but we don't want those in the root of our docdir.
-docdir .= "/${BPN}"
-
-do_install() {
- oe_runmake DESTDIR="${D}" install
-}
-
-SRC_URI[sha256sum] = "82c737a780614c38a783e8055340d295e332fb12c7f418b5d21a0797d3fb1455"
-
diff --git a/meta/recipes-devtools/dmidecode/dmidecode_3.5.bb b/meta/recipes-devtools/dmidecode/dmidecode_3.5.bb
new file mode 100644
index 0000000000..3e2bb6e30b
--- /dev/null
+++ b/meta/recipes-devtools/dmidecode/dmidecode_3.5.bb
@@ -0,0 +1,21 @@
+SUMMARY = "DMI (Desktop Management Interface) table related utilities"
+HOMEPAGE = "http://www.nongnu.org/dmidecode/"
+DESCRIPTION = "Dmidecode reports information about your system's hardware as described in your system BIOS according to the SMBIOS/DMI standard (see a sample output)."
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/dmidecode/${BP}.tar.xz"
+
+COMPATIBLE_HOST = "(i.86|x86_64|aarch64|arm|powerpc|powerpc64).*-linux"
+
+do_install() {
+ oe_runmake \
+ DESTDIR="${D}" \
+ prefix="${prefix}" \
+ sbindir="${sbindir}" \
+ docdir="${docdir}/${BPN}" \
+ mandir="${mandir}" \
+ install
+}
+
+SRC_URI[sha256sum] = "79d76735ee8e25196e2a722964cf9683f5a09581503537884b256b01389cc073"
diff --git a/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch b/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch
index 6692b41a16..fd942228b9 100644
--- a/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch
+++ b/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch
@@ -1,11 +1,10 @@
-From 05e059cd4e9910c00b32d377f4f98e3c8dde6bc6 Mon Sep 17 00:00:00 2001
+From f70eb308c837f2c944e23bb680a501a605004d65 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 26 Jan 2017 16:36:20 +0200
-Subject: [PATCH 4/5] Corretly install tmpfiles.d configuration
+Subject: [PATCH] Corretly install tmpfiles.d configuration
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
etc/tmpfiles.d/CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
@@ -17,6 +16,3 @@ index f69c773e..3eb6d0e8 100644
@@ -1 +1 @@
-INSTALL (FILES dnf.conf DESTINATION /usr/lib/tmpfiles.d/)
+INSTALL (FILES dnf.conf DESTINATION ${SYSCONFDIR}/tmpfiles.d/)
---
-2.14.2
-
diff --git a/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch b/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch
index cb09f17a56..37359a5765 100644
--- a/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch
+++ b/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch
@@ -1,17 +1,16 @@
-From ffb7942cc73c3d2c6aa7c689cdaf9bc0bcbc144b Mon Sep 17 00:00:00 2001
+From 3ca6d14fbc6419ff6239b4ba16f77da20fb31d03 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 26 Jan 2017 16:25:47 +0200
Subject: [PATCH] Do not hardcode /etc and systemd unit directories
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
CMakeLists.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 73552967..38cf4fbf 100644
+index 4aee99fb..9e2e9e9e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -3,8 +3,8 @@ CMAKE_MINIMUM_REQUIRED (VERSION 2.4)
diff --git a/meta/recipes-devtools/dnf/dnf/0001-dnf-write-the-log-lock-to-root.patch b/meta/recipes-devtools/dnf/dnf/0001-dnf-write-the-log-lock-to-root.patch
index 21b50dee01..18f9a30949 100644
--- a/meta/recipes-devtools/dnf/dnf/0001-dnf-write-the-log-lock-to-root.patch
+++ b/meta/recipes-devtools/dnf/dnf/0001-dnf-write-the-log-lock-to-root.patch
@@ -1,4 +1,4 @@
-From 5e07c16a506b19cbb107d5e99fca41d679b23b9a Mon Sep 17 00:00:00 2001
+From 049e2832284ab883e185d9020c881518a68e6c38 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 28 Apr 2020 15:55:00 +0200
Subject: [PATCH] dnf: write the log lock to root
@@ -15,10 +15,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dnf/logging.py b/dnf/logging.py
-index bd660470..a9d808b1 100644
+index ef0b25f3..94610af6 100644
--- a/dnf/logging.py
+++ b/dnf/logging.py
-@@ -94,7 +94,7 @@ class MultiprocessRotatingFileHandler(logging.handlers.RotatingFileHandler):
+@@ -118,7 +118,7 @@ class MultiprocessRotatingFileHandler(logging.handlers.RotatingFileHandler):
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
super(MultiprocessRotatingFileHandler, self).__init__(
filename, mode, maxBytes, backupCount, encoding, delay)
diff --git a/meta/recipes-devtools/dnf/dnf/0001-lock.py-fix-Exception-handling.patch b/meta/recipes-devtools/dnf/dnf/0001-lock.py-fix-Exception-handling.patch
new file mode 100644
index 0000000000..6bffe9af0a
--- /dev/null
+++ b/meta/recipes-devtools/dnf/dnf/0001-lock.py-fix-Exception-handling.patch
@@ -0,0 +1,62 @@
+From 3881757eabfde2ff54400ab127b106ab085d83f0 Mon Sep 17 00:00:00 2001
+From: Changqing Li <changqing.li@windriver.com>
+Date: Wed, 13 Mar 2024 11:22:05 +0800
+Subject: [PATCH] lock.py: fix Exception handling
+
+Before, when logdir is not writable, _try_lock will raise an Exception
+like "Permission denied: '/var/log/log_lock.pid'", and in this case,
+_unlock_thread will not be called and the variable count will not be
+handled, it maybe cause log_lock.pid not be deleted in case like [1].
+
+For [1], it is an cross compile case, when dnf install some packages to
+rootfs, seems like some threads don't do chroot like work, some threads
+do chroot like work. so for the threads don't do chroot, "Permission denied"
+Exception happend, for the threads that do chroot, log_lock.pid will be
+created under installroot/var/log/log_lock.pid, since variable count not
+handled correct before, log_lock.pid may not be deleted correctly.
+
+So fixed like this, if _try_lock raise Exception, _unlock_thread first,
+then raise the Exception.
+
+[1] https://github.com/rpm-software-management/dnf/issues/1963
+
+Upstream-Status: Submitted [ https://github.com/rpm-software-management/dnf/pull/2065 ]
+
+Signed-off-by: Changqing Li <changqing.li@windriver.com>
+---
+ dnf/lock.py | 12 ++++++++++--
+ 1 file changed, 10 insertions(+), 2 deletions(-)
+
+diff --git a/dnf/lock.py b/dnf/lock.py
+index 6817aac9..5718062a 100644
+--- a/dnf/lock.py
++++ b/dnf/lock.py
+@@ -128,7 +128,11 @@ class ProcessLock(object):
+ self._lock_thread()
+ prev_pid = -1
+ my_pid = os.getpid()
+- pid = self._try_lock(my_pid)
++ try:
++ pid = self._try_lock(my_pid)
++ except Exception:
++ self._unlock_thread()
++ raise
+ while pid != my_pid:
+ if pid != -1:
+ if not self.blocking:
+@@ -140,7 +144,11 @@ class ProcessLock(object):
+ logger.info(msg)
+ prev_pid = pid
+ time.sleep(1)
+- pid = self._try_lock(my_pid)
++ try:
++ pid = self._try_lock(my_pid)
++ except Exception:
++ self._unlock_thread()
++ raise
+
+ def __exit__(self, *exc_args):
+ if self.count == 1:
+--
+2.25.1
+
diff --git a/meta/recipes-devtools/dnf/dnf/0001-set-python-path-for-completion_helper.patch b/meta/recipes-devtools/dnf/dnf/0001-set-python-path-for-completion_helper.patch
index 448f6408bc..fcd970a7ae 100644
--- a/meta/recipes-devtools/dnf/dnf/0001-set-python-path-for-completion_helper.patch
+++ b/meta/recipes-devtools/dnf/dnf/0001-set-python-path-for-completion_helper.patch
@@ -1,4 +1,4 @@
-From 7e79b3b67fd5cecd7380e7e365fd88eca63b5bfa Mon Sep 17 00:00:00 2001
+From fa32c7dcabaa3c00d3620a3266e49629365c0cbe Mon Sep 17 00:00:00 2001
From: Jeremy Puhlman <jpuhlman@mvista.com>
Date: Wed, 11 Mar 2020 22:10:02 +0000
Subject: [PATCH] set python path for completion_helper
@@ -10,7 +10,7 @@ Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dnf/cli/completion_helper.py.in b/dnf/cli/completion_helper.py.in
-index 351226759..2835cd3b6 100644
+index 0da0f2a2..9330d15b 100644
--- a/dnf/cli/completion_helper.py.in
+++ b/dnf/cli/completion_helper.py.in
@@ -1,4 +1,4 @@
@@ -19,6 +19,3 @@ index 351226759..2835cd3b6 100644
#
# This file is part of dnf.
#
---
-2.23.0
-
diff --git a/meta/recipes-devtools/dnf/dnf/0005-Do-not-prepend-installroot-to-logdir.patch b/meta/recipes-devtools/dnf/dnf/0005-Do-not-prepend-installroot-to-logdir.patch
index aa20009cef..cfbda11f77 100644
--- a/meta/recipes-devtools/dnf/dnf/0005-Do-not-prepend-installroot-to-logdir.patch
+++ b/meta/recipes-devtools/dnf/dnf/0005-Do-not-prepend-installroot-to-logdir.patch
@@ -1,7 +1,7 @@
-From 6365389074a1b86962f3d8b22a2ead2202026a98 Mon Sep 17 00:00:00 2001
+From c6d03b51e2098fc681e6811790bd5dc6597091eb Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 11 Jan 2017 15:10:13 +0200
-Subject: [PATCH 5/5] Do not prepend installroot to logdir.
+Subject: [PATCH] Do not prepend installroot to logdir.
This would otherwise write the logs into rootfs/var/log
(whereas we want them in $T),
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dnf/cli/cli.py b/dnf/cli/cli.py
-index d2d9c082..82270ecf 100644
+index 1824bd00..4dcb1c1c 100644
--- a/dnf/cli/cli.py
+++ b/dnf/cli/cli.py
-@@ -920,7 +920,7 @@ class Cli(object):
+@@ -944,7 +944,7 @@ class Cli(object):
logger.warning(_("Unable to detect release version (use '--releasever' to specify "
"release version)"))
@@ -26,6 +26,3 @@ index d2d9c082..82270ecf 100644
conf.prepend_installroot(opt)
self.base._logging._setup_from_dnf_conf(conf)
---
-2.14.2
-
diff --git a/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch b/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch
index 5cffc9dce1..64ea7874d3 100644
--- a/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch
+++ b/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch
@@ -1,11 +1,10 @@
-From ceb1043c4f3de095e36908232320b74a4128a94e Mon Sep 17 00:00:00 2001
+From d5b154ea69afdcd862299a0b7f255f6ece3686c6 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 30 Dec 2016 18:29:07 +0200
Subject: [PATCH] Do not set PYTHON_INSTALL_DIR by running python
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch b/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch
index eedbb5723f..eb46113f3a 100644
--- a/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch
+++ b/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch
@@ -1,20 +1,19 @@
-From 8d97b72a1d77149e2f9048d1ca6cef66da1a8aa5 Mon Sep 17 00:00:00 2001
+From d3556767b84f3687743fdad0a88af0739d736ea9 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 30 Dec 2016 18:29:37 +0200
-Subject: [PATCH 2/5] Run python scripts using env
+Subject: [PATCH] Run python scripts using env
Otherwise the build tools hardcode the python path into them.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
bin/dnf-automatic.in | 2 +-
bin/dnf.in | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/bin/dnf-automatic.in b/bin/dnf-automatic.in
-index 5b06aa26..891b4c24 100755
+index 17e35a05..28827e38 100755
--- a/bin/dnf-automatic.in
+++ b/bin/dnf-automatic.in
@@ -1,4 +1,4 @@
@@ -24,7 +23,7 @@ index 5b06aa26..891b4c24 100755
#
# Copyright (C) 2014-2016 Red Hat, Inc.
diff --git a/bin/dnf.in b/bin/dnf.in
-index 645d0f06..bdf7b3c4 100755
+index 55ceb3f2..e38973c7 100755
--- a/bin/dnf.in
+++ b/bin/dnf.in
@@ -1,4 +1,4 @@
@@ -33,6 +32,3 @@ index 645d0f06..bdf7b3c4 100755
# The dnf executable script.
#
# Copyright (C) 2012-2016 Red Hat, Inc.
---
-2.14.2
-
diff --git a/meta/recipes-devtools/dnf/dnf_4.12.0.bb b/meta/recipes-devtools/dnf/dnf_4.12.0.bb
deleted file mode 100644
index d621e72d4e..0000000000
--- a/meta/recipes-devtools/dnf/dnf_4.12.0.bb
+++ /dev/null
@@ -1,90 +0,0 @@
-SUMMARY = "Package manager forked from Yum, using libsolv as a dependency resolver"
-DESCRIPTION = "Software package manager that installs, updates, and removes \
-packages on RPM-based Linux distributions. It automatically computes \
-dependencies and determines the actions required to install packages."
-HOMEPAGE = "https://github.com/rpm-software-management/dnf"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://PACKAGE-LICENSING;md5=4a0548e303dbc77f067335b4d688e745 \
- "
-
-SRC_URI = "git://github.com/rpm-software-management/dnf.git;branch=master;protocol=https \
- file://0001-Corretly-install-tmpfiles.d-configuration.patch \
- file://0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch \
- file://0005-Do-not-prepend-installroot-to-logdir.patch \
- file://0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
- file://0030-Run-python-scripts-using-env.patch \
- file://0001-set-python-path-for-completion_helper.patch \
- file://0001-dnf-write-the-log-lock-to-root.patch \
- "
-
-SRCREV = "956b5c74bab2affde27f404e7aee98c10818b188"
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)"
-
-S = "${WORKDIR}/git"
-
-inherit cmake gettext bash-completion setuptools3-base systemd
-
-DEPENDS += "libdnf librepo libcomps python3-iniparse"
-
-# manpages generation requires http://www.sphinx-doc.org/
-EXTRA_OECMAKE = " -DWITH_MAN=0 -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3"
-
-BBCLASSEXTEND = "native nativesdk"
-
-RDEPENDS:${PN} += " \
- python3-core \
- python3-codecs \
- python3-netclient \
- python3-email \
- python3-threading \
- python3-logging \
- python3-fcntl \
- librepo \
- python3-shell \
- libcomps \
- libdnf \
- python3-sqlite3 \
- python3-compression \
- python3-rpm \
- python3-iniparse \
- python3-json \
- python3-curses \
- python3-misc \
- python3-gpg \
- "
-
-RDEPENDS:${PN}:class-native = ""
-
-RRECOMMENDS:${PN}:class-target += "gnupg"
-
-# Create a symlink called 'dnf' as 'make install' does not do it, but
-# .spec file in dnf source tree does (and then Fedora and dnf documentation
-# says that dnf binary is plain 'dnf').
-do_install:append() {
- ln -rs ${D}/${bindir}/dnf-3 ${D}/${bindir}/dnf
- ln -rs ${D}/${bindir}/dnf-automatic-3 ${D}/${bindir}/dnf-automatic
-}
-
-# Direct dnf-native to read rpm configuration from our sysroot, not the one it was compiled in
-do_install:append:class-native() {
- create_wrapper ${D}/${bindir}/dnf \
- RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
- RPM_NO_CHROOT_FOR_SCRIPTS=1
-}
-
-do_install:append:class-nativesdk() {
- create_wrapper ${D}/${bindir}/dnf \
- RPM_CONFIGDIR=${SDKPATHNATIVE}${libdir_nativesdk}/rpm \
- RPM_NO_CHROOT_FOR_SCRIPTS=1
-}
-
-SYSTEMD_SERVICE:${PN} = "dnf-makecache.service dnf-makecache.timer \
- dnf-automatic.service dnf-automatic.timer \
- dnf-automatic-download.service dnf-automatic-download.timer \
- dnf-automatic-install.service dnf-automatic-install.timer \
- dnf-automatic-notifyonly.service dnf-automatic-notifyonly.timer \
-"
-SYSTEMD_AUTO_ENABLE ?= "disable"
-
-SKIP_RECIPE[dnf] ?= "${@bb.utils.contains('PACKAGE_CLASSES', 'package_rpm', '', 'does not build without package_rpm in PACKAGE_CLASSES due disabled rpm support in libsolv', d)}"
diff --git a/meta/recipes-devtools/dnf/dnf_4.19.2.bb b/meta/recipes-devtools/dnf/dnf_4.19.2.bb
new file mode 100644
index 0000000000..cc91dbe400
--- /dev/null
+++ b/meta/recipes-devtools/dnf/dnf_4.19.2.bb
@@ -0,0 +1,97 @@
+SUMMARY = "Package manager forked from Yum, using libsolv as a dependency resolver"
+DESCRIPTION = "Software package manager that installs, updates, and removes \
+packages on RPM-based Linux distributions. It automatically computes \
+dependencies and determines the actions required to install packages."
+HOMEPAGE = "https://github.com/rpm-software-management/dnf"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://PACKAGE-LICENSING;md5=4a0548e303dbc77f067335b4d688e745 \
+ "
+
+SRC_URI = "git://github.com/rpm-software-management/dnf.git;branch=master;protocol=https \
+ file://0001-Corretly-install-tmpfiles.d-configuration.patch \
+ file://0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch \
+ file://0005-Do-not-prepend-installroot-to-logdir.patch \
+ file://0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
+ file://0030-Run-python-scripts-using-env.patch \
+ file://0001-set-python-path-for-completion_helper.patch \
+ file://0001-lock.py-fix-Exception-handling.patch \
+ "
+
+SRC_URI:append:class-native = "file://0001-dnf-write-the-log-lock-to-root.patch"
+
+SRCREV = "9b2b2e8ddab99caba4bc8059cab4263163172e81"
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)"
+
+S = "${WORKDIR}/git"
+
+inherit cmake gettext bash-completion setuptools3-base systemd
+
+DEPENDS += "libdnf librepo libcomps python3-iniparse"
+
+# manpages generation requires http://www.sphinx-doc.org/
+EXTRA_OECMAKE = " -DWITH_MAN=0 -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += " \
+ python3-core \
+ python3-codecs \
+ python3-netclient \
+ python3-email \
+ python3-threading \
+ python3-logging \
+ python3-fcntl \
+ librepo \
+ python3-shell \
+ libcomps \
+ libdnf \
+ python3-sqlite3 \
+ python3-compression \
+ python3-rpm \
+ python3-iniparse \
+ python3-json \
+ python3-curses \
+ python3-misc \
+ "
+
+RDEPENDS:${PN}:class-native = ""
+
+RRECOMMENDS:${PN}:class-target += "gnupg"
+
+# Create a symlink called 'dnf' as 'make install' does not do it, but
+# .spec file in dnf source tree does (and then Fedora and dnf documentation
+# says that dnf binary is plain 'dnf').
+do_install:append() {
+ ln -rs ${D}/${bindir}/dnf-3 ${D}/${bindir}/dnf
+ ln -rs ${D}/${bindir}/dnf-automatic-3 ${D}/${bindir}/dnf-automatic
+}
+
+# Direct dnf-native to read rpm configuration from our sysroot, not the one it was compiled in
+do_install:append:class-native() {
+ create_wrapper ${D}/${bindir}/dnf \
+ RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
+ RPM_NO_CHROOT_FOR_SCRIPTS=1
+}
+
+do_install:append:class-nativesdk() {
+ create_wrapper ${D}/${bindir}/dnf \
+ RPM_CONFIGDIR=${SDKPATHNATIVE}${libdir_nativesdk}/rpm \
+ RPM_NO_CHROOT_FOR_SCRIPTS=1
+}
+
+SYSTEMD_SERVICE:${PN} = "dnf-makecache.service dnf-makecache.timer \
+ dnf-automatic.service dnf-automatic.timer \
+ dnf-automatic-download.service dnf-automatic-download.timer \
+ dnf-automatic-install.service dnf-automatic-install.timer \
+ dnf-automatic-notifyonly.service dnf-automatic-notifyonly.timer \
+"
+SYSTEMD_AUTO_ENABLE ?= "disable"
+
+SKIP_RECIPE[dnf] ?= "${@bb.utils.contains('PACKAGE_CLASSES', 'package_rpm', '', 'does not build without package_rpm in PACKAGE_CLASSES due disabled rpm support in libsolv', d)}"
+
+# Packages for testing purposes
+PACKAGES += "${PN}-test-main ${PN}-test-dep"
+ALLOW_EMPTY:${PN}-test-main = "1"
+ALLOW_EMPTY:${PN}-test-dep = "1"
+RRECOMMENDS:${PN}-test-main = "${PN}-test-dep"
diff --git a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4/docbook-xml-update-catalog.xml.patch b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4/docbook-xml-update-catalog.xml.patch
index c1385631d7..1f3364900c 100644
--- a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4/docbook-xml-update-catalog.xml.patch
+++ b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4/docbook-xml-update-catalog.xml.patch
@@ -1,97 +1,12 @@
+21/3/2023: modified to remove 4.0
+
docbook-xml: update catalog.xml
Refer Ubuntu 13.04 to update catalog.xml
Upstream-Status: Inappropriate [docbook 4.x development has ceased, docbook 5 is entirely different]
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- docbook-4.0/catalog.xml | 72 +++++++++++++++++++++++++++++++++++++++++++++++
- docbook-4.1.2/catalog.xml | 66 +++++++++++++++++++++++++++++++++++++++++++
- docbook-4.2/catalog.xml | 69 ++++++---------------------------------------
- docbook-4.3/catalog.xml | 72 ++++++++---------------------------------------
- docbook-4.4/catalog.xml | 63 ++---------------------------------------
- docbook-4.5/catalog.xml | 63 ++---------------------------------------
- 6 files changed, 165 insertions(+), 240 deletions(-)
- create mode 100644 docbook-4.0/catalog.xml
- create mode 100644 docbook-4.1.2/catalog.xml
-diff --git a/docbook-4.0/catalog.xml b/docbook-4.0/catalog.xml
-new file mode 100644
---- /dev/null
-+++ b/docbook-4.0/catalog.xml
-@@ -0,0 +1,72 @@
-+<?xml version='1.0'?>
-+<!DOCTYPE catalog PUBLIC "-//OASIS//DTD Entity Resolution XML Catalog V1.0//EN"
-+ "http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd">
-+
-+<catalog xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog">
-+
-+<!-- ...................................................................... -->
-+<!-- XML Catalog data for DocBook XML V4.0............................... -->
-+<!-- File catalog.xml ..................................................... -->
-+
-+<!-- Please direct all questions, bug reports, or suggestions for
-+ changes to the docbook@lists.oasis-open.org mailing list. For more
-+ information, see http://www.oasis-open.org/.
-+ -->
-+
-+<!-- This is the catalog data file for DocBook V4.0. It is provided as
-+ a convenience in building your own catalog files. You need not use
-+ the filenames listed here, and need not use the filename method of
-+ identifying storage objects at all. See the documentation for
-+ detailed information on the files associated with the DocBook DTD.
-+ See XML Catalogs at http://www.oasis-open.org/committees/entity/ for
-+ detailed information on supplying and using catalog data.
-+ -->
-+
-+<!-- ...................................................................... -->
-+<!-- DocBook driver file .................................................. -->
-+
-+<public publicId="-//Norman Walsh//DTD DocBk XML V4.0//EN"
-+ uri="docbookx.dtd"/>
-+
-+<!-- Other ways to refer to DocBook XML v4.0............................... -->
-+<public publicId="-//Norman Walsh//DTD DocBook XML V4.0//EN"
-+ uri="docbookx.dtd"/>
-+
-+<system systemId="http://www.oasis-open.org/docbook/xml/4.0/docbookx.dtd"
-+ uri="docbookx.dtd"/>
-+
-+<!-- ...................................................................... -->
-+<!-- DocBook modules ...................................................... -->
-+
-+<public publicId="-//Norman Walsh//ENTITIES DocBook XML Notations V4.0//EN"
-+ uri="dbnotnx.mod"/>
-+
-+<public publicId="-//Norman Walsh//ENTITIES DocBook XML Character Entities V4.0//EN"
-+ uri="dbcentx.mod"/>
-+
-+<public publicId="-//Norman Walsh//ELEMENTS DocBook XML Information Pool V4.0//EN"
-+ uri="dbpoolx.mod"/>
-+
-+<public publicId="-//Norman Walsh//ELEMENTS DocBook XML Document Hierarchy V4.0//EN"
-+ uri="dbhierx.mod"/>
-+
-+<public publicId="-//Norman Walsh//ENTITIES DocBook XML Additional General Entities V4.0//EN"
-+ uri="dbgenent.mod"/>
-+
-+<public publicId="-//Norman Walsh//DTD CALS Table Model XML V4.0//EN"
-+ uri="calstblx.dtd"/>
-+
-+<!-- UNOFFICIAL conversion of the exchange table model to XML, based on the OASIS Exchange -->
-+<!-- Table Model PUBLIC "-//SGML Open//DTD Exchange Table Model 19960430//EN" -->
-+<public publicId="-//Normal Walsh//Exchange Table Model 19960430 XML V4.0//EN"
-+ uri="soextblx.dtd"/>
-+
-+<!-- the following FPI is declared for the DTD inside the DTD, but we'll -->
-+<!-- use the version from the latest DocBook
-+<public publicId="-//OASIS//DTD XML Exchange Table Model 19990315//EN"
-+ uri="soextblx.dtd"/> -->
-+
-+<!-- End of catalog data for DocBook XML V4.0 ............................. -->
-+<!-- ...................................................................... -->
-+
-+</catalog>
diff --git a/docbook-4.1.2/catalog.xml b/docbook-4.1.2/catalog.xml
new file mode 100644
--- /dev/null
diff --git a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb
index 2bec1442af..e4b4201b1f 100644
--- a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb
+++ b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb
@@ -1,30 +1,47 @@
SUMMARY = "Document type definitions for verification of XML data files"
DESCRIPTION = "Document type definitions for verification of XML data \
-files against the DocBook rule set, it ships with the latest DocBook 4.5 \
-XML DTD, as well as a selected set of legacy DTDs for use with older \
-documents, including 4.0, 4.1.2, 4.2, 4.3 and 4.4"
-HOMEPAGE = "http://www.docbook.org/xml/"
+files against the DocBook rule set."
+HOMEPAGE = "https://docbook.org"
+
+# The upstream sources are not distributed with a license file.
+# LICENSE-OASIS is included as a "patch" to workaround this. When
+# upgrading this recipe, please verify whether this is still needed.
LICENSE = "OASIS"
LIC_FILES_CHKSUM = "file://${WORKDIR}/LICENSE-OASIS;md5=c608985dd5f7f215e669e7639a0b1d2e"
-# Note: the upstream sources are not distributed with a license file.
-# LICENSE-OASIS is included as a "patch" to workaround this. When
-# upgrading this recipe, please verify whether this is still needed.
-SRC_URI = "http://snapshot.debian.org/archive/debian/20160728T043443Z/pool/main/d/docbook-xml/docbook-xml_${PV}.orig.tar.gz \
- file://LICENSE-OASIS \
- file://docbook-xml-update-catalog.xml.patch \
-"
+# To support apps with xml schema backward compatibility, we must
+# install a set of schemas. Install the latest based on PV and then
+# name any specific versions as required. TODO: Figure out a mechanism
+# to filter exactly which versions are necessary, if that's even
+# possible.
+#
+# DocBook.org lists available release packages at https://docbook.org/xml/.
+#
+# The release packages relate docbook source at
+# https://github.com/docbook/docbook but building them requires the
+# Gradle build system. In future, it might be safer to use the source
+# tree which would also enable proper SBOM generation through the
+# bitbake build system.
+#
-SRC_URI[md5sum] = "487b4d44e15cffb1f4048af23f98208e"
-SRC_URI[sha256sum] = "b0f8edcf697f5318e63dd98c9a931f3fee167af0805ba441db372e0f17b2a44f"
+SRC_URI = "https://docbook.org/xml/4.1.2/docbkx412.zip;name=payload412;subdir=docbook-4.1.2 \
+ https://docbook.org/xml/4.2/docbook-xml-4.2.zip;name=payload42;subdir=docbook-4.2 \
+ https://docbook.org/xml/4.3/docbook-xml-4.3.zip;name=payload43;subdir=docbook-4.3 \
+ https://docbook.org/xml/4.4/docbook-xml-4.4.zip;name=payload44;subdir=docbook-4.4 \
+ https://docbook.org/xml/${PV}/docbook-xml-${PV}.zip;name=payloadPV;subdir=docbook-${PV} \
+ file://docbook-xml-update-catalog.xml.patch \
+ file://LICENSE-OASIS"
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/d/docbook-xml/"
+SRC_URI[payload412.sha256sum] = "30f0644064e0ea71751438251940b1431f46acada814a062870f486c772e7772"
+SRC_URI[payload42.sha256sum] = "acc4601e4f97a196076b7e64b368d9248b07c7abf26b34a02cca40eeebe60fa2"
+SRC_URI[payload43.sha256sum] = "23068a94ea6fd484b004c5a73ec36a66aa47ea8f0d6b62cc1695931f5c143464"
+SRC_URI[payload44.sha256sum] = "02f159eb88c4254d95e831c51c144b1863b216d909b5ff45743a1ce6f5273090"
+SRC_URI[payloadPV.sha256sum] = "4e4e037a2b83c98c6c94818390d4bdd3f6e10f6ec62dd79188594e26190dc7b4"
-S = "${WORKDIR}/docbook-xml-4.5.c31424"
+UPSTREAM_CHECK_REGEX = "docbook-xml-(?P<pver>4(\.\d+)).zip"
-inherit allarch
-BBCLASSEXTEND = "native"
+S = "${WORKDIR}"
do_configure (){
:
@@ -38,15 +55,20 @@ do_install () {
install -d ${D}${sysconfdir}/xml/
xmlcatalog --create --noout ${D}${sysconfdir}/xml/docbook-xml.xml
- for DTDVERSION in 4.0 4.1.2 4.2 4.3 4.4 4.5; do
+ for DTDVERSION in 4.1.2 4.2 4.3 4.4 4.5; do
DEST=${datadir}/xml/docbook/schema/dtd/$DTDVERSION
install -d -m 755 ${D}$DEST
cp -v -R docbook-$DTDVERSION/* ${D}$DEST
- xmlcatalog --verbose --noout --add nextCatalog unused file://$DEST/catalog.xml ${D}${sysconfdir}/xml/docbook-xml.xml
+ xmlcatalog --verbose --noout --add nextCatalog unused \
+ file://$DEST/catalog.xml ${D}${sysconfdir}/xml/docbook-xml.xml
done
}
+# Magic environment variable is required for downstream recipe processing
XMLCATALOGS = "${sysconfdir}/xml/docbook-xml.xml"
inherit xmlcatalog
FILES:${PN} = "${datadir}/* ${sysconfdir}/xml/docbook-xml.xml"
+
+inherit allarch
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb b/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
index 289d939bb9..47d81dac8d 100644
--- a/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
+++ b/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
@@ -9,13 +9,11 @@ SECTION = "base"
LICENSE = "GPL-3.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-SRC_URI = "https://github.com/dosfstools/dosfstools/releases/download/v${PV}/${BP}.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
"
SRC_URI[sha256sum] = "64926eebf90092dca21b14259a5301b7b98e7b1943e8a201c7d726084809b527"
-UPSTREAM_CHECK_URI = "https://github.com/dosfstools/dosfstools/releases"
-
-inherit autotools gettext pkgconfig update-alternatives
+inherit autotools gettext pkgconfig update-alternatives github-releases
EXTRA_OECONF = "--enable-compat-symlinks --without-iconv"
diff --git a/meta/recipes-devtools/dpkg/dpkg.inc b/meta/recipes-devtools/dpkg/dpkg.inc
index 0d17a98b80..4c1d42e0af 100644
--- a/meta/recipes-devtools/dpkg/dpkg.inc
+++ b/meta/recipes-devtools/dpkg/dpkg.inc
@@ -4,14 +4,14 @@ HOMEPAGE = "https://salsa.debian.org/dpkg-team/dpkg"
DESCRIPTION = "The primary interface for the dpkg suite is the dselect program. A more low-level and less user-friendly interface is available in the form of the dpkg command."
SECTION = "base"
-DEPENDS = "zlib bzip2 perl ncurses"
-DEPENDS:class-native = "bzip2-replacement-native zlib-native virtual/update-alternatives-native gettext-native perl-native"
+DEPENDS = "zlib bzip2 perl ncurses libmd"
+DEPENDS:class-native = "bzip2-replacement-native zlib-native virtual/update-alternatives-native gettext-native perl-native libmd-native"
RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_update-alternatives} perl"
RDEPENDS:${PN}:class-native = ""
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
-inherit autotools gettext perlnative pkgconfig perl-version update-alternatives
+inherit autotools gettext perlnative pkgconfig perl-version update-alternatives bash-completion
PERL:class-native = "${STAGING_BINDIR_NATIVE}/perl-native/perl"
diff --git a/meta/recipes-devtools/dpkg/dpkg/0001-Add-support-for-riscv32-CPU.patch b/meta/recipes-devtools/dpkg/dpkg/0001-Add-support-for-riscv32-CPU.patch
index 52e85705fa..b8a8697585 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0001-Add-support-for-riscv32-CPU.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0001-Add-support-for-riscv32-CPU.patch
@@ -1,4 +1,4 @@
-From 279e4c274f5f295823cf9fa95d3ba131f6d711db Mon Sep 17 00:00:00 2001
+From 21459bb8d9a997e6a92885a4ef337ede9cc5aba7 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 29 Apr 2020 22:02:23 -0700
Subject: [PATCH] Add support for riscv32 CPU
@@ -11,10 +11,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
2 files changed, 2 insertions(+)
diff --git a/data/cputable b/data/cputable
-index 9f2a8e0..1d935b1 100644
+index 575c008e3..7a801a03a 100644
--- a/data/cputable
+++ b/data/cputable
-@@ -41,6 +41,7 @@ powerpc powerpc (powerpc|ppc) 32 big
+@@ -43,6 +43,7 @@ powerpc powerpc (powerpc|ppc) 32 big
powerpcel powerpcle powerpcle 32 little
ppc64 powerpc64 (powerpc|ppc)64 64 big
ppc64el powerpc64le powerpc64le 64 little
@@ -23,10 +23,10 @@ index 9f2a8e0..1d935b1 100644
s390 s390 s390 32 big
s390x s390x s390x 64 big
diff --git a/scripts/Dpkg/Vendor/Debian.pm b/scripts/Dpkg/Vendor/Debian.pm
-index a352bbd..fa1d90b 100644
+index fcf5b1e2a..175c9f436 100644
--- a/scripts/Dpkg/Vendor/Debian.pm
+++ b/scripts/Dpkg/Vendor/Debian.pm
-@@ -306,6 +306,7 @@ sub _add_build_flags {
+@@ -202,6 +202,7 @@ sub set_build_features {
powerpc
ppc64
ppc64el
@@ -34,6 +34,3 @@ index a352bbd..fa1d90b 100644
riscv64
s390x
sparc
---
-2.26.2
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/0001-build.c-ignore-return-of-1-from-tar-cf.patch b/meta/recipes-devtools/dpkg/dpkg/0001-build.c-ignore-return-of-1-from-tar-cf.patch
index dc0d9bfc2e..95a49053e8 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0001-build.c-ignore-return-of-1-from-tar-cf.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0001-build.c-ignore-return-of-1-from-tar-cf.patch
@@ -1,4 +1,4 @@
-From 839f228556c00739f72534e8635195935eb3752f Mon Sep 17 00:00:00 2001
+From 4c5e6c280a2ab4d2009d3264e94286f5fe244d0b Mon Sep 17 00:00:00 2001
From: Paul Eggleton <paul.eggleton@linux.microsoft.com>
Date: Tue, 16 Jun 2020 03:57:25 +0000
Subject: [PATCH] build.c: ignore return of 1 from tar -cf
@@ -23,16 +23,15 @@ Upstream-Status: Inappropriate [OE specific]
Original patch by RP 2015/3/27, rebased by Paul Eggleton
Signed-off-by: Paul Eggleton <paul.eggleton@microsoft.com>
-
---
src/deb/build.c | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/deb/build.c b/src/deb/build.c
-index 76613ad..7c216d1 100644
+index 92aba9553..6436b33da 100644
--- a/src/deb/build.c
+++ b/src/deb/build.c
-@@ -482,6 +482,7 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
+@@ -481,6 +481,7 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
{
int pipe_filenames[2], pipe_tarball[2];
pid_t pid_tar, pid_comp;
@@ -40,7 +39,7 @@ index 76613ad..7c216d1 100644
/* Fork off a tar. We will feed it a list of filenames on stdin later. */
m_pipe(pipe_filenames);
-@@ -534,7 +535,9 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
+@@ -533,7 +534,9 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
/* All done, clean up wait for tar and <compress> to finish their job. */
close(pipe_filenames[1]);
subproc_reap(pid_comp, _("<compress> from tar -cf"), 0);
@@ -51,6 +50,3 @@ index 76613ad..7c216d1 100644
}
static intmax_t
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/0001-dpkg-Support-muslx32-build.patch b/meta/recipes-devtools/dpkg/dpkg/0001-dpkg-Support-muslx32-build.patch
index d66ab4476a..e8d8576f85 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0001-dpkg-Support-muslx32-build.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0001-dpkg-Support-muslx32-build.patch
@@ -1,4 +1,4 @@
-From a328c8bec0bf8071ae8f20fee4c7475205064ba1 Mon Sep 17 00:00:00 2001
+From 1d192b60fc43e24e1c2d6ff452dabeee7a227cc0 Mon Sep 17 00:00:00 2001
From: sweeaun <swee.aun.khor@intel.com>
Date: Sun, 10 Sep 2017 00:14:15 -0700
Subject: [PATCH] dpkg: Support muslx32 build
@@ -13,10 +13,10 @@ Signed-off-by: sweeaun <swee.aun.khor@intel.com>
2 files changed, 2 insertions(+)
diff --git a/data/ostable b/data/ostable
-index be64342..87db273 100644
+index 860355774..28779beca 100644
--- a/data/ostable
+++ b/data/ostable
-@@ -19,6 +19,7 @@ base-uclibc-linux linux-uclibc linux[^-]*-uclibc
+@@ -21,6 +21,7 @@ base-uclibc-linux linux-uclibc linux[^-]*-uclibc
eabihf-musl-linux linux-musleabihf linux[^-]*-musleabihf
eabi-musl-linux linux-musleabi linux[^-]*-musleabi
base-musl-linux linux-musl linux[^-]*-musl
@@ -25,17 +25,14 @@ index be64342..87db273 100644
eabi-gnu-linux linux-gnueabi linux[^-]*-gnueabi
abin32-gnu-linux linux-gnuabin32 linux[^-]*-gnuabin32
diff --git a/data/tupletable b/data/tupletable
-index 28f00bf..748ffab 100644
+index 82ae3604e..707d85bdb 100644
--- a/data/tupletable
+++ b/data/tupletable
-@@ -10,6 +10,7 @@ base-uclibc-linux-<cpu> uclibc-linux-<cpu>
+@@ -26,6 +26,7 @@ base-uclibc-linux-<cpu> uclibc-linux-<cpu>
eabihf-musl-linux-arm musl-linux-armhf
eabi-musl-linux-arm musl-linux-armel
base-musl-linux-<cpu> musl-linux-<cpu>
+x32-musl-linux-amd64 x32
- ilp32-gnu-linux-arm64 arm64ilp32
eabihf-gnu-linux-arm armhf
eabi-gnu-linux-arm armel
---
-2.7.4
-
+ eabi-gnu-linux-armeb armeb
diff --git a/meta/recipes-devtools/dpkg/dpkg/0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch b/meta/recipes-devtools/dpkg/dpkg/0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch
index 9fe0ca7600..fc097e5a66 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch
@@ -1,8 +1,8 @@
-From b4ea54158c399874e12394ebc91afe98954695e2 Mon Sep 17 00:00:00 2001
+From 0cac67ce5920d6d0c9df4278bfa77da878a8a37a Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 26 Aug 2015 16:16:16 +0300
-Subject: [PATCH 2/5] Adapt to linux-wrs kernel version, which has character
- '_' inside. Remove the first-char-digit-check (as the 1.15.8.5 version does).
+Subject: [PATCH] Adapt to linux-wrs kernel version, which has character '_'
+ inside. Remove the first-char-digit-check (as the 1.15.8.5 version does).
Signed-off-by: Dongxiao Xu <dongxiao.xu@intel.com>
Signed-off-by: Constantin Musca <constantinx.musca@intel.com>
@@ -13,14 +13,14 @@ Upstream-Status: Inappropriate [embedded specific]
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/lib/dpkg/parsehelp.c b/lib/dpkg/parsehelp.c
-index 453077fd9..f42ea2882 100644
+index 63a36f55c..81901bd5a 100644
--- a/lib/dpkg/parsehelp.c
+++ b/lib/dpkg/parsehelp.c
-@@ -243,14 +243,12 @@ parseversion(struct dpkg_version *rversion, const char *string,
+@@ -275,14 +275,12 @@ parseversion(struct dpkg_version *rversion, const char *string,
ptr = rversion->version;
if (!*ptr)
return dpkg_put_error(err, _("version number is empty"));
-- if (*ptr && !c_isdigit(*ptr++))
+- if (!c_isdigit(*ptr++))
- return dpkg_put_warn(err, _("version number does not start with digit"));
for (; *ptr; ptr++) {
- if (!c_isdigit(*ptr) && !c_isalpha(*ptr) && strchr(".-+~:", *ptr) == NULL)
@@ -33,6 +33,3 @@ index 453077fd9..f42ea2882 100644
return dpkg_put_warn(err, _("invalid character in revision number"));
}
-
---
-2.11.0
diff --git a/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch b/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch
index 75ae848264..916c7dfb00 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch
@@ -1,4 +1,4 @@
-From dd11ed66640f79143e42d778b58fdd5a61fb5836 Mon Sep 17 00:00:00 2001
+From b6c28222276704a1e1a544983e38dfa2f3fb481a Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 26 Aug 2015 16:25:45 +0300
Subject: [PATCH] Our pre/postinsts expect $D to be set when running in a
@@ -12,21 +12,21 @@ ALIMON 2016/05/26
ALIMON 2017/02/21
KKang 2019/02/20
---
- src/main/script.c | 54 +++--------------------------------------------
- 1 file changed, 3 insertions(+), 51 deletions(-)
+ src/main/script.c | 53 +++--------------------------------------------
+ 1 file changed, 3 insertions(+), 50 deletions(-)
diff --git a/src/main/script.c b/src/main/script.c
-index abe65b6..0edb8f1 100644
+index 017d92efe..181e7c710 100644
--- a/src/main/script.c
+++ b/src/main/script.c
-@@ -96,58 +96,10 @@ setexecute(const char *path, struct stat *stab)
- static const char *
+@@ -97,58 +97,11 @@ static const char *
maintscript_pre_exec(struct command *cmd)
{
+ const char *instdir = dpkg_fsys_get_dir();
- const char *admindir = dpkg_db_get_dir();
- const char *changedir;
- size_t instdirlen = strlen(instdir);
--
+
- if (instdirlen > 0 && in_force(FORCE_SCRIPT_CHROOTLESS))
- changedir = instdir;
- else
@@ -81,6 +81,3 @@ index abe65b6..0edb8f1 100644
}
/**
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch b/meta/recipes-devtools/dpkg/dpkg/0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch
index bbd5aba418..35c0c246f6 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch
@@ -1,8 +1,7 @@
-From adb6bfd0feeceaf030df0debe3343d7f73e708a0 Mon Sep 17 00:00:00 2001
+From 80ad29d22f8ca4033a6a79a726580fee17bdade9 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 26 Aug 2015 16:27:45 +0300
-Subject: [PATCH 4/5] The lutimes function doesn't work properly for all
- systems.
+Subject: [PATCH] The lutimes function doesn't work properly for all systems.
Signed-off-by: Constantin Musca <constantinx.musca@intel.com>
@@ -12,10 +11,10 @@ Upstream-Status: Inappropriate [embedded specific]
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/main/archives.c b/src/main/archives.c
-index 92340b9..7a55c27 100644
+index 7e399f922..ec3b6878f 100644
--- a/src/main/archives.c
+++ b/src/main/archives.c
-@@ -490,8 +490,9 @@ tarobject_set_mtime(struct tar_entry *te, const char *path)
+@@ -491,8 +491,9 @@ tarobject_set_mtime(struct tar_entry *te, const char *path)
if (te->type == TAR_FILETYPE_SYMLINK) {
#ifdef HAVE_LUTIMES
@@ -26,6 +25,3 @@ index 92340b9..7a55c27 100644
#endif
} else {
if (utimes(path, tv))
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/0006-add-musleabi-to-known-target-tripets.patch b/meta/recipes-devtools/dpkg/dpkg/0006-add-musleabi-to-known-target-tripets.patch
index 8797ea55c6..8ac646b1a1 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0006-add-musleabi-to-known-target-tripets.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0006-add-musleabi-to-known-target-tripets.patch
@@ -1,7 +1,10 @@
-From f8910022dc3ec622272f168cd0022dbdf6dff93a Mon Sep 17 00:00:00 2001
+From 1c9e78dda91ba66fbd8fe02b66b6c603d08d3343 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 30 Dec 2015 23:05:41 +0000
Subject: [PATCH] add musleabi to known target tripets
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
helps compiling dpkg for musl/arm-softfloat
@@ -14,10 +17,10 @@ Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
2 files changed, 2 insertions(+)
diff --git a/data/ostable b/data/ostable
-index 99c1f889d..be6434271 100644
+index 64f424490..860355774 100644
--- a/data/ostable
+++ b/data/ostable
-@@ -17,6 +17,7 @@
+@@ -19,6 +19,7 @@
eabi-uclibc-linux linux-uclibceabi linux[^-]*-uclibceabi
base-uclibc-linux linux-uclibc linux[^-]*-uclibc
eabihf-musl-linux linux-musleabihf linux[^-]*-musleabihf
@@ -26,17 +29,14 @@ index 99c1f889d..be6434271 100644
eabihf-gnu-linux linux-gnueabihf linux[^-]*-gnueabihf
eabi-gnu-linux linux-gnueabi linux[^-]*-gnueabi
diff --git a/data/tupletable b/data/tupletable
-index 5f500f6ca..28f00bfe6 100644
+index 7436f8056..82ae3604e 100644
--- a/data/tupletable
+++ b/data/tupletable
-@@ -8,6 +8,7 @@
+@@ -24,6 +24,7 @@
eabi-uclibc-linux-arm uclibc-linux-armel
base-uclibc-linux-<cpu> uclibc-linux-<cpu>
eabihf-musl-linux-arm musl-linux-armhf
+eabi-musl-linux-arm musl-linux-armel
base-musl-linux-<cpu> musl-linux-<cpu>
- ilp32-gnu-linux-arm64 arm64ilp32
eabihf-gnu-linux-arm armhf
---
-2.11.0
-
+ eabi-gnu-linux-arm armel
diff --git a/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch b/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch
index 117f9234ad..3d3a4f0bb9 100644
--- a/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch
@@ -1,7 +1,10 @@
-From 8659eeeeda74d71e12080121f0b13a88cbdda433 Mon Sep 17 00:00:00 2001
+From 6dd80236a91a505b5753bb74e5f1b47330d8b16b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?An=C3=ADbal=20Lim=C3=B3n?= <anibal.limon@linux.intel.com>
Date: Tue, 21 Feb 2017 11:23:27 -0600
Subject: [PATCH] dpkg-deb/build.c: Remove usage of --clamp-mtime in tar
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
Recently dpkg added --clamp-mtime to tar to create reproducible
build tarballs [1].
@@ -24,10 +27,10 @@ Signed-off-by: Kai Kang <kai.kang@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/deb/build.c b/src/deb/build.c
-index 5c74ff3..76613ad 100644
+index 1f0c050ee..92aba9553 100644
--- a/src/deb/build.c
+++ b/src/deb/build.c
-@@ -505,7 +505,7 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
+@@ -504,7 +504,7 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder,
command_init(&cmd, TAR, "tar -cf");
command_add_args(&cmd, "tar", "-cf", "-", "--format=gnu",
@@ -36,6 +39,3 @@ index 5c74ff3..76613ad 100644
/* Mode might become a positional argument, pass it before -T. */
if (options->mode)
command_add_args(&cmd, "--mode", options->mode, NULL);
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/add_armeb_triplet_entry.patch b/meta/recipes-devtools/dpkg/dpkg/add_armeb_triplet_entry.patch
index d165616a19..cbdf01dbd9 100644
--- a/meta/recipes-devtools/dpkg/dpkg/add_armeb_triplet_entry.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/add_armeb_triplet_entry.patch
@@ -1,5 +1,11 @@
-Author: Krishnanjanappa, Jagadeesh <jagadeesh.krishnanjanappa@caviumnetworks.com>
-Date: Wed Apr 8 18:08:14 2015 +0530
+From 1c3a109df54b6092fa85a1fe2b7771e3b959655f Mon Sep 17 00:00:00 2001
+From: "Krishnanjanappa, Jagadeesh"
+ <jagadeesh.krishnanjanappa@caviumnetworks.com>
+Date: Wed, 8 Apr 2015 18:08:14 +0530
+Subject: [PATCH] dpkg: add triplet entry to fix build error for armeb
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
[PATCH] add armeb triplet entry into triplettable.
@@ -25,24 +31,19 @@ Upstream-Status: Pending
Signed-off-by: Krishnanjanappa, Jagadeesh <jagadeesh.krishnanjanappa@caviumnetworks.com>
Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
-
---
data/tupletable | 1 +
1 file changed, 1 insertion(+)
diff --git a/data/tupletable b/data/tupletable
-index b7802bec3..5f500f6ca 100644
+index ae9f2ddb4..7436f8056 100644
--- a/data/tupletable
+++ b/data/tupletable
-@@ -12,6 +12,7 @@ base-musl-linux-<cpu> musl-linux-<cpu>
- ilp32-gnu-linux-arm64 arm64ilp32
+@@ -27,6 +27,7 @@ eabihf-musl-linux-arm musl-linux-armhf
+ base-musl-linux-<cpu> musl-linux-<cpu>
eabihf-gnu-linux-arm armhf
eabi-gnu-linux-arm armel
+eabi-gnu-linux-armeb armeb
abin32-gnu-linux-mips64r6el mipsn32r6el
abin32-gnu-linux-mips64r6 mipsn32r6
abin32-gnu-linux-mips64el mipsn32el
---
-2.11.0
-
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/arch_pm.patch b/meta/recipes-devtools/dpkg/dpkg/arch_pm.patch
index 4e0d22acbb..df2cd88ca4 100644
--- a/meta/recipes-devtools/dpkg/dpkg/arch_pm.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/arch_pm.patch
@@ -1,3 +1,8 @@
+From bdf60ebbeb433a80e6cfcbde9d83d89564e79e20 Mon Sep 17 00:00:00 2001
+From: Joe Slater <jslater@windriver.com>
+Date: Mon, 26 Aug 2013 23:38:45 +0000
+Subject: [PATCH] dpkg: fix configuration issue for mips64
+
configure cannot determine the proper cpu, os, or
architecture for mips64, and possibly other arch's
because of faulty code added to Arch.pm in the latest
@@ -6,16 +11,15 @@ release from upstream. We remove that code.
Upstream-Status: Pending
Signed-off-by: Joe Slater <jslater@windriver.com>
-
---
scripts/Dpkg/Arch.pm | 3 ---
1 file changed, 3 deletions(-)
diff --git a/scripts/Dpkg/Arch.pm b/scripts/Dpkg/Arch.pm
-index 1720847b8..6345ce3b9 100644
+index 0d352eeb9..4ef5fa307 100644
--- a/scripts/Dpkg/Arch.pm
+++ b/scripts/Dpkg/Arch.pm
-@@ -323,9 +323,6 @@ sub _load_tupletable()
+@@ -326,9 +326,6 @@ sub _load_tupletable()
(my $dt = $debtuple) =~ s/<cpu>/$_cpu/;
(my $da = $debarch) =~ s/<cpu>/$_cpu/;
@@ -25,5 +29,3 @@ index 1720847b8..6345ce3b9 100644
$debarch_to_debtuple{$da} = $dt;
$debtuple_to_debarch{$dt} = $da;
}
---
-2.11.0
diff --git a/meta/recipes-devtools/dpkg/dpkg/noman.patch b/meta/recipes-devtools/dpkg/dpkg/noman.patch
index 6900716b11..e80549d740 100644
--- a/meta/recipes-devtools/dpkg/dpkg/noman.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/noman.patch
@@ -1,11 +1,15 @@
-Upstream-Status: Inappropriate [disable feature]
+From 008ec5150dd086ffa3940cb520f1ca91939f138d Mon Sep 17 00:00:00 2001
+From: Chris Larson <kergoth@openedhand.com>
+Date: Tue, 5 Sep 2006 07:24:58 +0000
+Subject: [PATCH] Add dpkg, modified from upstream oe.
+Upstream-Status: Inappropriate [disable feature]
---
Makefile.am | 1 -
1 file changed, 1 deletion(-)
diff --git a/Makefile.am b/Makefile.am
-index d963a10..7cef7f5 100644
+index 7186045d4..daca9faf2 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -11,7 +11,6 @@ SUBDIRS = \
@@ -16,6 +20,3 @@ index d963a10..7cef7f5 100644
# EOL
ACLOCAL_AMFLAGS = -I m4
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg/pager.patch b/meta/recipes-devtools/dpkg/dpkg/pager.patch
deleted file mode 100644
index e56b9d28af..0000000000
--- a/meta/recipes-devtools/dpkg/dpkg/pager.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-pager: Use less instead of pager
-
-pager is a Debianism. Istead use directly pager.
-
-Upstream-Status: Inappropriate [OE-Core integration specific]
-
-Suggested-by: Burton, Ross <ross.burton@intel.com>
-Signed-off-by: Ricardo Ribalda <ricardo@ribalda.com>
-diff --git a/lib/dpkg/dpkg.h b/lib/dpkg/dpkg.h
-index 2bb067a..6cbce80 100644
---- a/lib/dpkg/dpkg.h
-+++ b/lib/dpkg/dpkg.h
-@@ -95,7 +95,7 @@ DPKG_BEGIN_DECLS
- #define MAXUPDATES 250
-
- #define DEFAULTSHELL "sh"
--#define DEFAULTPAGER "pager"
-+#define DEFAULTPAGER "less"
-
- #define MD5HASHLEN 32
- #define MAXTRIGDIRECTIVE 256
diff --git a/meta/recipes-devtools/dpkg/dpkg/remove-tar-no-timestamp.patch b/meta/recipes-devtools/dpkg/dpkg/remove-tar-no-timestamp.patch
index ebf838ffe9..9307725e8b 100644
--- a/meta/recipes-devtools/dpkg/dpkg/remove-tar-no-timestamp.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/remove-tar-no-timestamp.patch
@@ -1,4 +1,8 @@
-busybox-1.19.4 tar utility doesn't support --warning=no-timestamp
+From add92699ca1397205e1d7b46c3ab43de06b9a6c7 Mon Sep 17 00:00:00 2001
+From: Constantin Musca <constantinx.musca@intel.com>
+Date: Tue, 28 Aug 2012 17:02:40 +0300
+Subject: [PATCH] busybox-1.19.4 tar utility doesn't support
+ --warning=no-timestamp
Signed-off-by: Constantin Musca <constantinx.musca@intel.com>
@@ -8,10 +12,10 @@ Upstream-Status: Inappropriate [configuration]
1 file changed, 1 deletion(-)
diff --git a/src/deb/extract.c b/src/deb/extract.c
-index a1b2dc0..95e2372 100644
+index 8b78a7eab..fd7595808 100644
--- a/src/deb/extract.c
+++ b/src/deb/extract.c
-@@ -333,7 +333,6 @@ extracthalf(const char *debar, const char *dir,
+@@ -338,7 +338,6 @@ extracthalf(const char *debar, const char *dir,
command_add_arg(&cmd, "-f");
command_add_arg(&cmd, "-");
@@ -19,6 +23,3 @@ index a1b2dc0..95e2372 100644
m_dup2(p2[0],0);
close(p2[0]);
---
-2.25.1
-
diff --git a/meta/recipes-devtools/dpkg/dpkg_1.21.7.bb b/meta/recipes-devtools/dpkg/dpkg_1.21.7.bb
deleted file mode 100644
index 902cc03bcd..0000000000
--- a/meta/recipes-devtools/dpkg/dpkg_1.21.7.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-require dpkg.inc
-LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-
-SRC_URI = "git://salsa.debian.org/dpkg-team/dpkg.git;protocol=https;branch=main \
- file://noman.patch \
- file://remove-tar-no-timestamp.patch \
- file://arch_pm.patch \
- file://add_armeb_triplet_entry.patch \
- file://0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch \
- file://0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch \
- file://0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch \
- file://0006-add-musleabi-to-known-target-tripets.patch \
- file://0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch \
- file://0001-dpkg-Support-muslx32-build.patch \
- file://pager.patch \
- file://0001-Add-support-for-riscv32-CPU.patch \
- "
-
-SRC_URI:append:class-native = " file://0001-build.c-ignore-return-of-1-from-tar-cf.patch"
-
-SRCREV = "e61f582015a9c67bbb3791cb93a864cfeb9c7151"
-
-S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/dpkg/dpkg_1.22.5.bb b/meta/recipes-devtools/dpkg/dpkg_1.22.5.bb
new file mode 100644
index 0000000000..9f1d00e208
--- /dev/null
+++ b/meta/recipes-devtools/dpkg/dpkg_1.22.5.bb
@@ -0,0 +1,22 @@
+require dpkg.inc
+LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
+
+SRC_URI = "git://salsa.debian.org/dpkg-team/dpkg.git;protocol=https;branch=main \
+ file://noman.patch \
+ file://remove-tar-no-timestamp.patch \
+ file://arch_pm.patch \
+ file://add_armeb_triplet_entry.patch \
+ file://0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch \
+ file://0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch \
+ file://0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch \
+ file://0006-add-musleabi-to-known-target-tripets.patch \
+ file://0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch \
+ file://0001-dpkg-Support-muslx32-build.patch \
+ file://0001-Add-support-for-riscv32-CPU.patch \
+ "
+
+SRC_URI:append:class-native = " file://0001-build.c-ignore-return-of-1-from-tar-cf.patch"
+
+SRCREV = "1c92a4a8bfbeea30ceb0109b096c4ec845e3c6ce"
+
+S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc b/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc
index f41a5d0b4e..0cb3f0e6a6 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc
@@ -17,9 +17,9 @@ LIC_FILES_CHKSUM = "file://NOTICE;md5=d50be0580c0b0a7fbc7a4830bbe6c12b \
file://lib/et/et_name.c;beginline=1;endline=11;md5=ead236447dac7b980dbc5b4804d8c836 \
file://lib/ss/ss.h;beginline=1;endline=20;md5=6e89ad47da6e75fecd2b5e0e81e1d4a6"
SECTION = "base"
-DEPENDS = "util-linux attr autoconf-archive"
+DEPENDS = "util-linux attr autoconf-archive-native"
-SRC_URI = "git://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git;branch=master"
+SRC_URI = "git://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git;branch=master;protocol=https"
S = "${WORKDIR}/git"
inherit autotools gettext texinfo pkgconfig multilib_header update-alternatives ptest
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch b/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch
index 29078f9dd3..b038e61eb7 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch
@@ -1,4 +1,4 @@
-From 22d7557905534d9e1b39f7d2a6d2036a40bf0c4e Mon Sep 17 00:00:00 2001
+From 5bc75654690a2d916190168b865770a7c93e65dd Mon Sep 17 00:00:00 2001
From: Jackie Huang <jackie.huang@windriver.com>
Date: Wed, 10 Aug 2016 11:19:44 +0800
Subject: [PATCH] Fix missing check for permission denied.
@@ -19,7 +19,7 @@ Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/support/profile.c b/lib/support/profile.c
-index f54739e7..53ea68f1 100644
+index bdb14b17..1bd62406 100644
--- a/lib/support/profile.c
+++ b/lib/support/profile.c
@@ -335,7 +335,7 @@ profile_init(const char * const *files, profile_t *ret_profile)
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/extents.patch b/meta/recipes-devtools/e2fsprogs/e2fsprogs/extents.patch
deleted file mode 100644
index 2c09bb276d..0000000000
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/extents.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-CVE: CVE-2022-1304
-Upstream-Status: Backport [ ab51d587bb9b229b1fade1afd02e1574c1ba5c76 ]
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 347084c9c1ad20f47dae16f5a3dcd8628d5fc7b0 Mon Sep 17 00:00:00 2001
-From: Lukas Czerner <lczerner@redhat.com>
-Date: Thu, 21 Apr 2022 19:31:48 +0200
-Subject: [PATCH] e2fsprogs: add sanity check to extent manipulation
-
-It is possible to have a corrupted extent tree in such a way that a leaf
-node contains zero extents in it. Currently if that happens and we try
-to traverse the tree we can end up accessing wrong data, or possibly
-even uninitialized memory. Make sure we don't do that.
-
-Additionally make sure that we have a sane number of bytes passed to
-memmove() in ext2fs_extent_delete().
-
-Note that e2fsck is currently unable to spot and fix such corruption in
-pass1.
-
-Signed-off-by: Lukas Czerner <lczerner@redhat.com>
-Reported-by: Nils Bars <nils_bars@t-online.de>
-Addressess: https://bugzilla.redhat.com/show_bug.cgi?id=2068113
----
- lib/ext2fs/extent.c | 8 ++++++++
- 1 file changed, 8 insertions(+)
-
-diff --git a/lib/ext2fs/extent.c b/lib/ext2fs/extent.c
-index b324c7b0..1a206a16 100644
---- a/lib/ext2fs/extent.c
-+++ b/lib/ext2fs/extent.c
-@@ -495,6 +495,10 @@ retry:
- ext2fs_le16_to_cpu(eh->eh_entries);
- newpath->max_entries = ext2fs_le16_to_cpu(eh->eh_max);
-
-+ /* Make sure there is at least one extent present */
-+ if (newpath->left <= 0)
-+ return EXT2_ET_EXTENT_NO_DOWN;
-+
- if (path->left > 0) {
- ix++;
- newpath->end_blk = ext2fs_le32_to_cpu(ix->ei_block);
-@@ -1630,6 +1634,10 @@ errcode_t ext2fs_extent_delete(ext2_extent_handle_t handle, int flags)
-
- cp = path->curr;
-
-+ /* Sanity check before memmove() */
-+ if (path->left < 0)
-+ return EXT2_ET_EXTENT_LEAF_BAD;
-+
- if (path->left) {
- memmove(cp, cp + sizeof(struct ext3_extent_idx),
- path->left * sizeof(struct ext3_extent_idx));
---
-2.25.1
-
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch b/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch
index c3e46ce65f..20839b7286 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch
@@ -22,7 +22,7 @@ index 8c4d2048..e021af32 100644
@echo "HTREE=y" >> test_one
@echo "QUOTA=y" >> test_one
- @echo "SRCDIR=@srcdir@" >> test_one
-+ @echo "SRCDIR=${prefix}${libdir}/e2fsprogs/ptest/test" >> test_one
++ @echo "SRCDIR=@PTEST_PATH@/test" >> test_one
@echo "DIFF_OPTS=@UNI_DIFF_OPTS@" >> test_one
@echo "SIZEOF_TIME_T=@SIZEOF_TIME_T@" >> test_one
@echo "DD=@DD@" >>test_one
@@ -31,7 +31,7 @@ index 8c4d2048..e021af32 100644
@[ -f test_script ] && chmod u+w test_script || true
@echo "#!/bin/sh" > test_script
- @echo "SRCDIR=@srcdir@" >> test_script
-+ @echo "SRCDIR=${prefix}${libdir}/e2fsprogs/ptest/test" >> test_script
++ @echo "SRCDIR=@PTEST_PATH@/test" >> test_script
@cat $(srcdir)/test_script.in >> test_script
@chmod +x-w test_script
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch b/meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch
index 902a369eb0..0a6904208d 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch
@@ -1,4 +1,4 @@
-From 5408b6463ee700a080a15102bccccdeb2615d734 Mon Sep 17 00:00:00 2001
+From 580ef6cae2d353f3aa5d5c52d6614bdc1df50f08 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Mon, 23 Dec 2013 13:38:34 +0000
Subject: [PATCH] e2fsprogs: silence debugfs
@@ -14,10 +14,10 @@ Signed-off-by: Ross Burton <ross.burton@intel.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/debugfs/debugfs.c b/debugfs/debugfs.c
-index b67a88bc..76dd5556 100644
+index 9b6321dc..8ebf3ddb 100644
--- a/debugfs/debugfs.c
+++ b/debugfs/debugfs.c
-@@ -2518,7 +2518,7 @@ static int source_file(const char *cmd_file, int ss_idx)
+@@ -2516,7 +2516,7 @@ static int source_file(const char *cmd_file, int ss_idx)
cp = strchr(buf, '\r');
if (cp)
*cp = 0;
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest b/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest
index c97c0377e9..279923db8e 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest
@@ -8,3 +8,4 @@ rm -f *.tmp
rm -f *.ok
rm -f *.failed
rm -f *.log
+cp ../data/test_data.tmp ./
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.5.bb b/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.5.bb
deleted file mode 100644
index ec48f419c7..0000000000
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.5.bb
+++ /dev/null
@@ -1,141 +0,0 @@
-require e2fsprogs.inc
-
-SRC_URI += "file://remove.ldconfig.call.patch \
- file://run-ptest \
- file://ptest.patch \
- file://mkdir_p.patch \
- file://extents.patch \
- "
-SRC_URI:append:class-native = " \
- file://e2fsprogs-fix-missing-check-for-permission-denied.patch \
- file://quiet-debugfs.patch \
- "
-
-SRCREV = "02540dedd3ddc52c6ae8aaa8a95ce75c3f8be1c0"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+\.\d+(\.\d+)*)$"
-
-EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} \
- --enable-elf-shlibs --disable-libuuid --disable-uuidd \
- --disable-libblkid --enable-verbose-makecmds \
- --with-crond-dir=no"
-
-EXTRA_OECONF:darwin = "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-bsd-shlibs"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[fuse] = '--enable-fuse2fs,--disable-fuse2fs,fuse'
-
-# make locale rules sometimes fire, sometimes don't as git doesn't preserve
-# file mktime. Touch the files introducing non-determinism to the build
-do_compile:prepend (){
- find ${S}/po -type f -name "*.po" -exec touch {} +
-}
-
-do_install () {
- oe_runmake 'DESTDIR=${D}' install
- oe_runmake 'DESTDIR=${D}' install-libs
- # We use blkid from util-linux now so remove from here
- rm -f ${D}${base_libdir}/libblkid*
- rm -rf ${D}${includedir}/blkid
- rm -f ${D}${base_libdir}/pkgconfig/blkid.pc
- rm -f ${D}${base_sbindir}/blkid
- rm -f ${D}${base_sbindir}/fsck
- rm -f ${D}${base_sbindir}/findfs
-
- # e2initrd_helper and the pkgconfig files belong in libdir
- if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
- install -d ${D}${libdir}
- mv ${D}${base_libdir}/e2initrd_helper ${D}${libdir}
- mv ${D}${base_libdir}/pkgconfig ${D}${libdir}
- fi
-
- oe_multilib_header ext2fs/ext2_types.h
- install -d ${D}${base_bindir}
- mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs
-
- install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/
-
- # Clean host path (build directory) in compile_et, mk_cmds
- sed -i -e "s,\(ET_DIR=.*\)${S}/lib/et\(.*\),\1${datadir}/et\2,g" ${D}${bindir}/compile_et
- sed -i -e "s,\(SS_DIR=.*\)${S}/lib/ss\(.*\),\1${datadir}/ss\2,g" ${D}${bindir}/mk_cmds
-}
-
-# Need to find the right mke2fs.conf file
-e2fsprogs_conf_fixup () {
- for i in mke2fs mkfs.ext2 mkfs.ext3 mkfs.ext4; do
- create_wrapper ${D}${base_sbindir}/$i MKE2FS_CONFIG=${sysconfdir}/mke2fs.conf
- done
-}
-
-do_install:append:class-native() {
- e2fsprogs_conf_fixup
-}
-
-do_install:append:class-nativesdk() {
- e2fsprogs_conf_fixup
-}
-
-do_install:append:class-target() {
- mv ${D}${base_sbindir}/mke2fs ${D}${base_sbindir}/mke2fs.e2fsprogs
- mv ${D}${base_sbindir}/mkfs.ext2 ${D}${base_sbindir}/mkfs.ext2.e2fsprogs
- mv ${D}${base_sbindir}/tune2fs ${D}${base_sbindir}/tune2fs.e2fsprogs
-}
-
-RDEPENDS:e2fsprogs = "e2fsprogs-badblocks e2fsprogs-dumpe2fs"
-RRECOMMENDS:e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck"
-
-PACKAGES =+ "e2fsprogs-badblocks e2fsprogs-dumpe2fs e2fsprogs-e2fsck e2fsprogs-e2scrub e2fsprogs-mke2fs e2fsprogs-resize2fs e2fsprogs-tune2fs"
-PACKAGES =+ "libcomerr libss libe2p libext2fs"
-
-FILES:e2fsprogs-dumpe2fs = "${base_sbindir}/dumpe2fs"
-FILES:e2fsprogs-resize2fs = "${base_sbindir}/resize2fs*"
-FILES:e2fsprogs-e2fsck = "${base_sbindir}/e2fsck ${base_sbindir}/fsck.ext*"
-FILES:e2fsprogs-e2scrub = "${base_sbindir}/e2scrub*"
-FILES:e2fsprogs-mke2fs = "${base_sbindir}/mke2fs.e2fsprogs ${base_sbindir}/mkfs.ext* ${sysconfdir}/mke2fs.conf"
-FILES:e2fsprogs-tune2fs = "${base_sbindir}/tune2fs.e2fsprogs ${base_sbindir}/e2label"
-FILES:e2fsprogs-badblocks = "${base_sbindir}/badblocks"
-FILES:libcomerr = "${base_libdir}/libcom_err.so.*"
-FILES:libss = "${base_libdir}/libss.so.*"
-FILES:libe2p = "${base_libdir}/libe2p.so.*"
-FILES:libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*"
-FILES:${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds"
-
-ALTERNATIVE:${PN} = "chattr"
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr"
-ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs"
-
-ALTERNATIVE:${PN}-doc = "fsck.8"
-ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
-
-ALTERNATIVE:${PN}-mke2fs = "mke2fs mkfs.ext2"
-ALTERNATIVE_LINK_NAME[mke2fs] = "${base_sbindir}/mke2fs"
-ALTERNATIVE_LINK_NAME[mkfs.ext2] = "${base_sbindir}/mkfs.ext2"
-
-ALTERNATIVE:${PN}-tune2fs = "tune2fs"
-ALTERNATIVE_LINK_NAME[tune2fs] = "${base_sbindir}/tune2fs"
-
-RDEPENDS:e2fsprogs-e2scrub = "bash"
-RDEPENDS:${PN}-ptest += "coreutils procps bash bzip2 diffutils perl sed"
-RDEPENDS:${PN}-ptest += "e2fsprogs-badblocks e2fsprogs-dumpe2fs e2fsprogs-e2fsck e2fsprogs-mke2fs e2fsprogs-resize2fs e2fsprogs-tune2fs"
-
-do_compile_ptest() {
- oe_runmake -C ${B}/tests
-}
-
-do_install_ptest() {
- # This file's permissions depends on the host umask so be deterministic
- chmod 0644 ${B}/tests/test_data.tmp
- cp -R --no-dereference --preserve=mode,links -v ${B}/tests ${D}${PTEST_PATH}/test
- cp -R --no-dereference --preserve=mode,links -v ${S}/tests/* ${D}${PTEST_PATH}/test
- sed -e 's!../e2fsck/e2fsck!e2fsck!g' \
- -e 's!../misc/tune2fs!tune2fs!g' -i ${D}${PTEST_PATH}/test/*/expect*
- sed -e 's!../e2fsck/e2fsck!${base_sbindir}/e2fsck!g' -i ${D}${PTEST_PATH}/test/*/script
-
- # Remove various files
- find "${D}${PTEST_PATH}" -type f \
- \( -name 'Makefile' -o -name 'Makefile.in' -o -name '*.o' -o -name '*.c' -o -name '*.h' \)\
- -exec rm -f {} +
-
- install -d ${D}${PTEST_PATH}/lib
- install -m 0644 ${B}/lib/config.h ${D}${PTEST_PATH}/lib/
-}
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.47.0.bb b/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.47.0.bb
new file mode 100644
index 0000000000..940b47c155
--- /dev/null
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.47.0.bb
@@ -0,0 +1,148 @@
+require e2fsprogs.inc
+
+SRC_URI += "file://remove.ldconfig.call.patch \
+ file://run-ptest \
+ file://ptest.patch \
+ file://mkdir_p.patch \
+ "
+SRC_URI:append:class-native = " \
+ file://e2fsprogs-fix-missing-check-for-permission-denied.patch \
+ file://quiet-debugfs.patch \
+ "
+
+SRCREV = "f4c9cc4bedacde8408edda3520a32d3842290112"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+\.\d+(\.\d+)*)$"
+
+EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} \
+ --enable-elf-shlibs --disable-libuuid --disable-uuidd \
+ --disable-libblkid --enable-verbose-makecmds \
+ --enable-largefile --with-crond-dir=no"
+
+EXTRA_OECONF:darwin = "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-bsd-shlibs"
+
+CFLAGS:append:riscv32 = " -D_FILE_OFFSET_BITS=64"
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[fuse] = '--enable-fuse2fs,--disable-fuse2fs,fuse'
+
+# make locale rules sometimes fire, sometimes don't as git doesn't preserve
+# file mktime. Touch the files introducing non-determinism to the build
+do_compile:prepend (){
+ find ${S}/po -type f -name "*.po" -exec touch {} +
+}
+
+do_install () {
+ oe_runmake 'DESTDIR=${D}' install
+ oe_runmake 'DESTDIR=${D}' install-libs
+ # We use blkid from util-linux now so remove from here
+ rm -f ${D}${base_libdir}/libblkid*
+ rm -rf ${D}${includedir}/blkid
+ rm -f ${D}${base_libdir}/pkgconfig/blkid.pc
+ rm -f ${D}${base_sbindir}/blkid
+ rm -f ${D}${base_sbindir}/fsck
+ rm -f ${D}${base_sbindir}/findfs
+
+ # e2initrd_helper and the pkgconfig files belong in libdir
+ if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
+ install -d ${D}${libdir}
+ mv ${D}${base_libdir}/e2initrd_helper ${D}${libdir}
+ mv ${D}${base_libdir}/pkgconfig ${D}${libdir}
+ fi
+
+ oe_multilib_header ext2fs/ext2_types.h
+ install -d ${D}${base_bindir}
+ mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs
+ mv ${D}${bindir}/lsattr ${D}${base_bindir}/lsattr.e2fsprogs
+
+ install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/
+
+ # Clean host path (build directory) in compile_et, mk_cmds
+ sed -i -e "s,\(ET_DIR=.*\)${S}/lib/et\(.*\),\1${datadir}/et\2,g" ${D}${bindir}/compile_et
+ sed -i -e "s,\(SS_DIR=.*\)${S}/lib/ss\(.*\),\1${datadir}/ss\2,g" ${D}${bindir}/mk_cmds
+}
+
+# Need to find the right mke2fs.conf file
+e2fsprogs_conf_fixup () {
+ for i in mke2fs mkfs.ext2 mkfs.ext3 mkfs.ext4; do
+ create_wrapper ${D}${base_sbindir}/$i MKE2FS_CONFIG=${sysconfdir}/mke2fs.conf
+ done
+}
+
+do_install:append:class-native() {
+ e2fsprogs_conf_fixup
+}
+
+do_install:append:class-nativesdk() {
+ e2fsprogs_conf_fixup
+}
+
+do_install:append:class-target() {
+ mv ${D}${base_sbindir}/mke2fs ${D}${base_sbindir}/mke2fs.e2fsprogs
+ mv ${D}${base_sbindir}/mkfs.ext2 ${D}${base_sbindir}/mkfs.ext2.e2fsprogs
+ mv ${D}${base_sbindir}/tune2fs ${D}${base_sbindir}/tune2fs.e2fsprogs
+}
+
+RDEPENDS:e2fsprogs = "e2fsprogs-badblocks e2fsprogs-dumpe2fs"
+RRECOMMENDS:e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck"
+
+PACKAGES =+ "e2fsprogs-badblocks e2fsprogs-dumpe2fs e2fsprogs-e2fsck e2fsprogs-e2scrub e2fsprogs-mke2fs e2fsprogs-resize2fs e2fsprogs-tune2fs"
+PACKAGES =+ "libcomerr libss libe2p libext2fs"
+
+FILES:e2fsprogs-dumpe2fs = "${base_sbindir}/dumpe2fs"
+FILES:e2fsprogs-resize2fs = "${base_sbindir}/resize2fs*"
+FILES:e2fsprogs-e2fsck = "${base_sbindir}/e2fsck ${base_sbindir}/fsck.ext*"
+FILES:e2fsprogs-e2scrub = "${base_sbindir}/e2scrub*"
+FILES:e2fsprogs-mke2fs = "${base_sbindir}/mke2fs.e2fsprogs ${base_sbindir}/mkfs.ext* ${sysconfdir}/mke2fs.conf"
+FILES:e2fsprogs-tune2fs = "${base_sbindir}/tune2fs.e2fsprogs ${base_sbindir}/e2label"
+FILES:e2fsprogs-badblocks = "${base_sbindir}/badblocks"
+FILES:libcomerr = "${base_libdir}/libcom_err.so.*"
+FILES:libss = "${base_libdir}/libss.so.*"
+FILES:libe2p = "${base_libdir}/libe2p.so.*"
+FILES:libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*"
+FILES:${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds"
+
+ALTERNATIVE:${PN} = "chattr lsattr"
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr"
+ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs"
+ALTERNATIVE_LINK_NAME[lsattr] = "${base_bindir}/lsattr"
+ALTERNATIVE_TARGET[lsattr] = "${base_bindir}/lsattr.e2fsprogs"
+
+ALTERNATIVE:${PN}-doc = "fsck.8"
+ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8"
+
+ALTERNATIVE:${PN}-mke2fs = "mke2fs mkfs.ext2"
+ALTERNATIVE_LINK_NAME[mke2fs] = "${base_sbindir}/mke2fs"
+ALTERNATIVE_LINK_NAME[mkfs.ext2] = "${base_sbindir}/mkfs.ext2"
+
+ALTERNATIVE:${PN}-tune2fs = "tune2fs"
+ALTERNATIVE_LINK_NAME[tune2fs] = "${base_sbindir}/tune2fs"
+
+RDEPENDS:e2fsprogs-e2scrub = "bash"
+RDEPENDS:${PN}-ptest += "coreutils procps bash bzip2 diffutils perl sed"
+RDEPENDS:${PN}-ptest += "e2fsprogs-badblocks e2fsprogs-dumpe2fs e2fsprogs-e2fsck e2fsprogs-mke2fs e2fsprogs-resize2fs e2fsprogs-tune2fs"
+
+do_compile_ptest() {
+ oe_runmake -C ${B}/tests
+}
+
+do_install_ptest() {
+ # This file's permissions depends on the host umask so be deterministic
+ chmod 0644 ${B}/tests/test_data.tmp
+ cp -R --no-dereference --preserve=mode,links -v ${B}/tests ${D}${PTEST_PATH}/test
+ cp -R --no-dereference --preserve=mode,links -v ${S}/tests/* ${D}${PTEST_PATH}/test
+ sed -e 's!../e2fsck/e2fsck!e2fsck!g' \
+ -e 's!../misc/tune2fs!tune2fs!g' -i ${D}${PTEST_PATH}/test/*/expect*
+ sed -e 's!../e2fsck/e2fsck!${base_sbindir}/e2fsck!g' -i ${D}${PTEST_PATH}/test/*/script
+ sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/test/test_script ${D}${PTEST_PATH}/test/test_one
+
+ # Remove various files
+ find "${D}${PTEST_PATH}" -type f \
+ \( -name 'Makefile' -o -name 'Makefile.in' -o -name '*.o' -o -name '*.c' -o -name '*.h' \)\
+ -exec rm -f {} +
+
+ install -d ${D}${PTEST_PATH}/lib
+ install -m 0644 ${B}/lib/config.h ${D}${PTEST_PATH}/lib/
+
+ install -d ${D}${PTEST_PATH}/data
+ install -m 0644 ${B}/tests/test_data.tmp ${D}${PTEST_PATH}/data/
+}
diff --git a/meta/recipes-devtools/elfutils/elfutils_0.187.bb b/meta/recipes-devtools/elfutils/elfutils_0.187.bb
deleted file mode 100644
index 31983dfd6b..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils_0.187.bb
+++ /dev/null
@@ -1,166 +0,0 @@
-SUMMARY = "Utilities and libraries for handling compiled object files"
-HOMEPAGE = "https://sourceware.org/elfutils"
-DESCRIPTION = "elfutils is a collection of utilities and libraries to read, create and modify ELF binary files, find and handle DWARF debug data, symbols, thread state and stacktraces for processes and core files on GNU/Linux."
-SECTION = "base"
-LICENSE = "GPL-2.0-only & GPL-2.0-or-later & LGPL-3.0-or-later & GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://debuginfod/debuginfod-client.c;endline=28;md5=f0a7c3170776866ee94e8f9225a6ad79 \
- "
-DEPENDS = "zlib virtual/libintl"
-DEPENDS:append:libc-musl = " argp-standalone fts musl-obstack "
-# The Debian patches below are from:
-# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.176-1.debian.tar.xz
-SRC_URI = "https://sourceware.org/elfutils/ftp/${PV}/${BP}.tar.bz2 \
- file://0001-dso-link-change.patch \
- file://0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch \
- file://0003-fixheadercheck.patch \
- file://0006-Fix-build-on-aarch64-musl.patch \
- file://0001-libasm-may-link-with-libbz2-if-found.patch \
- file://0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch \
- file://0001-skip-the-test-when-gcc-not-deployed.patch \
- file://run-ptest \
- file://ptest.patch \
- file://0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch \
- "
-SRC_URI:append:libc-musl = " \
- file://0003-musl-utils.patch \
- file://0015-config-eu.am-do-not-use-Werror.patch \
- "
-SRC_URI[sha256sum] = "e70b0dfbe610f90c4d1fe0d71af142a4e25c3c4ef9ebab8d2d72b65159d454c8"
-
-inherit autotools gettext ptest pkgconfig
-PTEST_ENABLED:libc-musl = "0"
-
-EXTRA_OECONF = "--program-prefix=eu-"
-
-BUILD_CFLAGS += "-Wno-error=stringop-overflow"
-
-DEPENDS_BZIP2 = "bzip2-replacement-native"
-DEPENDS_BZIP2:class-target = "bzip2"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'debuginfod', 'debuginfod libdebuginfod', '', d)}"
-PACKAGECONFIG[bzip2] = "--with-bzlib,--without-bzlib,${DEPENDS_BZIP2}"
-PACKAGECONFIG[xz] = "--with-lzma,--without-lzma,xz"
-PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
-PACKAGECONFIG[libdebuginfod] = "--enable-libdebuginfod,--disable-libdebuginfod,curl"
-PACKAGECONFIG[debuginfod] = "--enable-debuginfod,--disable-debuginfod,libarchive sqlite3 libmicrohttpd"
-
-RDEPENDS:${PN}-ptest += "libasm libelf bash make coreutils ${PN}-binutils iproute2-ss bsdtar gcc-symlinks binutils-symlinks libgcc-dev"
-
-EXTRA_OECONF:append:class-target = " --disable-tests-rpath"
-
-RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils glibc-dbg glibc-dev"
-INSANE_SKIP:${PN}-ptest = "debug-deps dev-deps"
-
-do_compile_ptest() {
- cd ${B}/tests
- oe_runmake buildtest-TESTS oecheck
-}
-
-do_install_ptest() {
- if [ ${PTEST_ENABLED} = "1" ]; then
- # copy the files which needed by the cases
- TEST_FILES="strip strip.o addr2line elfcmp objdump readelf size.o nm.o nm elflint elfcompress elfclassify stack unstrip"
- install -d -m 755 ${D}${PTEST_PATH}/src
- install -d -m 755 ${D}${PTEST_PATH}/libelf
- install -d -m 755 ${D}${PTEST_PATH}/libdw
- install -d -m 755 ${D}${PTEST_PATH}/libdwfl
- install -d -m 755 ${D}${PTEST_PATH}/libdwelf
- install -d -m 755 ${D}${PTEST_PATH}/libasm
- install -d -m 755 ${D}${PTEST_PATH}/libcpu
- install -d -m 755 ${D}${PTEST_PATH}/libebl
- for test_file in ${TEST_FILES}; do
- if [ -f ${B}/src/${test_file} ]; then
- cp -r ${B}/src/${test_file} ${D}${PTEST_PATH}/src
- fi
- done
- cp ${D}${libdir}/libelf-${PV}.so ${D}${PTEST_PATH}/libelf/libelf.so
- cp ${D}${libdir}/libdw-${PV}.so ${D}${PTEST_PATH}/libdw/libdw.so
- cp ${D}${libdir}/libasm-${PV}.so ${D}${PTEST_PATH}/libasm/libasm.so
- cp ${B}/libcpu/libcpu.a ${D}${PTEST_PATH}/libcpu/
- cp ${B}/libebl/libebl.a ${D}${PTEST_PATH}/libebl/
- cp ${S}/libelf/*.h ${D}${PTEST_PATH}/libelf/
- cp ${S}/libdw/*.h ${D}${PTEST_PATH}/libdw/
- cp ${S}/libdwfl/*.h ${D}${PTEST_PATH}/libdwfl/
- cp ${S}/libdwelf/*.h ${D}${PTEST_PATH}/libdwelf/
- cp ${S}/libasm/*.h ${D}${PTEST_PATH}/libasm/
- cp -r ${S}/tests/ ${D}${PTEST_PATH}
- cp -r ${B}/tests/* ${D}${PTEST_PATH}/tests
- cp -r ${B}/config.h ${D}${PTEST_PATH}
- cp -r ${B}/backends ${D}${PTEST_PATH}
- cp -r ${B}/debuginfod ${D}${PTEST_PATH}
- sed -i '/^Makefile:/c Makefile:' ${D}${PTEST_PATH}/tests/Makefile
- find ${D}${PTEST_PATH} -type f -name *.[hoc] | xargs -i rm {}
- fi
-}
-
-EXTRA_OEMAKE:class-native = ""
-EXTRA_OEMAKE:class-nativesdk = ""
-
-BBCLASSEXTEND = "native nativesdk"
-
-# Package utilities separately
-PACKAGES =+ "${PN}-binutils libelf libasm libdw libdebuginfod"
-
-# Shared libraries are licensed GPL-2.0-only or GPL-3.0-or-later, binaries
-# GPL-3.0-or-later. According to NEWS file:
-# "The license is now GPLv2/LGPLv3+ for the libraries and GPLv3+ for stand-alone
-# programs. There is now also a formal CONTRIBUTING document describing how to
-# submit patches."
-LICENSE:${PN}-binutils = "GPL-3.0-or-later"
-LICENSE:${PN} = "GPL-3.0-or-later"
-LICENSE:libelf = "GPL-2.0-only | LGPL-3.0-or-later"
-LICENSE:libasm = "GPL-2.0-only | LGPL-3.0-or-later"
-LICENSE:libdw = "GPL-2.0-only | LGPL-3.0-or-later"
-LICENSE:libdebuginfod = "GPL-2.0-or-later | LGPL-3.0-or-later"
-
-FILES:${PN}-binutils = "\
- ${bindir}/eu-addr2line \
- ${bindir}/eu-ld \
- ${bindir}/eu-nm \
- ${bindir}/eu-readelf \
- ${bindir}/eu-size \
- ${bindir}/eu-strip"
-
-FILES:libelf = "${libdir}/libelf-${PV}.so ${libdir}/libelf.so.*"
-FILES:libasm = "${libdir}/libasm-${PV}.so ${libdir}/libasm.so.*"
-FILES:libdw = "${libdir}/libdw-${PV}.so ${libdir}/libdw.so.* ${libdir}/elfutils/lib*"
-FILES:libdebuginfod = "${libdir}/libdebuginfod-${PV}.so ${libdir}/libdebuginfod.so.*"
-# Some packages have the version preceeding the .so instead properly
-# versioned .so.<version>, so we need to reorder and repackage.
-#FILES:${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so"
-#FILES_SOLIBSDEV = "${libdir}/libasm.so ${libdir}/libdw.so ${libdir}/libelf.so"
-
-# The package contains symlinks that trip up insane
-INSANE_SKIP:${MLPREFIX}libdw = "dev-so"
-# The nlist binary in the tests uses explicitly minimal compiler flags
-INSANE_SKIP:${PN}-ptest += "ldflags"
-
-# avoid stripping some generated binaries otherwise some of the tests such as test-nlist,
-# run-strip-reloc.sh, run-strip-strmerge.sh and so on will fail
-INHIBIT_PACKAGE_STRIP_FILES = "\
- ${PKGD}${PTEST_PATH}/tests/test-nlist \
- ${PKGD}${PTEST_PATH}/tests/elfstrmerge \
- ${PKGD}${PTEST_PATH}/tests/backtrace-child \
- ${PKGD}${PTEST_PATH}/tests/backtrace-data \
- ${PKGD}${PTEST_PATH}/tests/backtrace-dwarf \
- ${PKGD}${PTEST_PATH}/tests/deleted \
- ${PKGD}${PTEST_PATH}/tests/dwfllines \
- ${PKGD}${PTEST_PATH}/src/strip \
- ${PKGD}${PTEST_PATH}/src/addr2line \
- ${PKGD}${PTEST_PATH}/src/elfcmp \
- ${PKGD}${PTEST_PATH}/src/objdump \
- ${PKGD}${PTEST_PATH}/src/readelf \
- ${PKGD}${PTEST_PATH}/src/nm \
- ${PKGD}${PTEST_PATH}/src/elflint \
- ${PKGD}${PTEST_PATH}/src/elfclassify \
- ${PKGD}${PTEST_PATH}/src/stack \
- ${PKGD}${PTEST_PATH}/src/unstrip \
- ${PKGD}${PTEST_PATH}/libelf/libelf.so \
- ${PKGD}${PTEST_PATH}/libdw/libdw.so \
- ${PKGD}${PTEST_PATH}/libasm/libasm.so \
- ${PKGD}${PTEST_PATH}/backends/libebl_i386.so \
- ${PKGD}${PTEST_PATH}/backends/libebl_x86_64.so \
-"
-
-PRIVATE_LIBS:${PN}-ptest = "libdw.so.1 libelf.so.1 libasm.so.1 libdebuginfod.so.1"
diff --git a/meta/recipes-devtools/elfutils/elfutils_0.191.bb b/meta/recipes-devtools/elfutils/elfutils_0.191.bb
new file mode 100644
index 0000000000..c4d872430b
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/elfutils_0.191.bb
@@ -0,0 +1,177 @@
+SUMMARY = "Utilities and libraries for handling compiled object files"
+HOMEPAGE = "https://sourceware.org/elfutils"
+DESCRIPTION = "elfutils is a collection of utilities and libraries to read, create and modify ELF binary files, find and handle DWARF debug data, symbols, thread state and stacktraces for processes and core files on GNU/Linux."
+SECTION = "base"
+LICENSE = "( GPL-2.0-or-later | LGPL-3.0-or-later ) & GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
+ file://debuginfod/debuginfod-client.c;endline=28;md5=f0a7c3170776866ee94e8f9225a6ad79 \
+ "
+DEPENDS = "zlib virtual/libintl"
+DEPENDS:append:libc-musl = " argp-standalone fts musl-legacy-error musl-obstack"
+# The Debian patches below are from:
+# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.176-1.debian.tar.xz
+SRC_URI = "https://sourceware.org/elfutils/ftp/${PV}/${BP}.tar.bz2 \
+ file://run-ptest \
+ file://0001-dso-link-change.patch \
+ file://0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch \
+ file://0003-fixheadercheck.patch \
+ file://0006-Fix-build-on-aarch64-musl.patch \
+ file://0001-libasm-may-link-with-libbz2-if-found.patch \
+ file://0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch \
+ file://0001-skip-the-test-when-gcc-not-deployed.patch \
+ file://ptest.patch \
+ file://0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch \
+ file://0001-debuginfod-Remove-unused-variable.patch \
+ file://0001-srcfiles-fix-unused-variable-BUFFER_SIZE.patch \
+ "
+SRC_URI:append:libc-musl = " \
+ file://0003-musl-utils.patch \
+ "
+SRC_URI[sha256sum] = "df76db71366d1d708365fc7a6c60ca48398f14367eb2b8954efc8897147ad871"
+
+inherit autotools gettext ptest pkgconfig
+
+EXTRA_OECONF = "--program-prefix=eu-"
+
+# Only used at runtime for make check but we want deterministic makefiles for ptest so hardcode
+CACHED_CONFIGUREVARS += "ac_cv_prog_HAVE_BUNZIP2=yes"
+
+BUILD_CFLAGS += "-Wno-error=stringop-overflow"
+
+DEPENDS_BZIP2 = "bzip2-replacement-native"
+DEPENDS_BZIP2:class-target = "bzip2"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'debuginfod', 'debuginfod libdebuginfod', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', 'xz', '', d)} \
+ "
+PACKAGECONFIG[bzip2] = "--with-bzlib,--without-bzlib,${DEPENDS_BZIP2}"
+PACKAGECONFIG[xz] = "--with-lzma,--without-lzma,xz"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
+PACKAGECONFIG[libdebuginfod] = "--enable-libdebuginfod,--disable-libdebuginfod,curl"
+PACKAGECONFIG[debuginfod] = "--enable-debuginfod,--disable-debuginfod,libarchive sqlite3 libmicrohttpd"
+
+RDEPENDS:${PN}-ptest += "libasm libelf bash make coreutils ${PN}-binutils iproute2-ss bsdtar gcc-symlinks binutils-symlinks libgcc-dev"
+
+EXTRA_OECONF:append:class-target = " --disable-tests-rpath"
+
+# symver functions not currently supported on microblaze
+EXTRA_OECONF:append:class-target:microblaze = " --disable-symbol-versioning"
+
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils glibc-dbg glibc-dev"
+INSANE_SKIP:${PN}-ptest = "debug-deps dev-deps"
+
+do_compile_ptest() {
+ cd ${B}/tests
+ oe_runmake buildtest-TESTS oecheck
+}
+PTEST_PARALLEL_MAKE = ""
+
+do_install_ptest() {
+ if [ ${PTEST_ENABLED} = "1" ]; then
+ # copy the files which needed by the cases
+ TEST_FILES="strip strip.o addr2line elfcmp objdump readelf size.o nm.o nm elflint elfcompress elfclassify stack unstrip srcfiles"
+ install -d -m 755 ${D}${PTEST_PATH}/src
+ install -d -m 755 ${D}${PTEST_PATH}/lib
+ install -d -m 755 ${D}${PTEST_PATH}/libelf
+ install -d -m 755 ${D}${PTEST_PATH}/libdw
+ install -d -m 755 ${D}${PTEST_PATH}/libdwfl
+ install -d -m 755 ${D}${PTEST_PATH}/libdwelf
+ install -d -m 755 ${D}${PTEST_PATH}/libasm
+ install -d -m 755 ${D}${PTEST_PATH}/libcpu
+ install -d -m 755 ${D}${PTEST_PATH}/libebl
+ for test_file in ${TEST_FILES}; do
+ if [ -f ${B}/src/${test_file} ]; then
+ cp -r ${B}/src/${test_file} ${D}${PTEST_PATH}/src
+ fi
+ done
+ cp ${D}${libdir}/libelf-${PV}.so ${D}${PTEST_PATH}/libelf/libelf.so
+ cp ${D}${libdir}/libdw-${PV}.so ${D}${PTEST_PATH}/libdw/libdw.so
+ cp ${D}${libdir}/libasm-${PV}.so ${D}${PTEST_PATH}/libasm/libasm.so
+ cp ${B}/libcpu/libcpu.a ${D}${PTEST_PATH}/libcpu/
+ cp ${B}/libebl/libebl.a ${D}${PTEST_PATH}/libebl/
+ cp ${B}/lib/libeu.a ${D}${PTEST_PATH}/lib/
+ cp ${S}/libelf/*.h ${D}${PTEST_PATH}/libelf/
+ cp ${S}/libdw/*.h ${D}${PTEST_PATH}/libdw/
+ cp ${S}/libdwfl/*.h ${D}${PTEST_PATH}/libdwfl/
+ cp ${S}/libdwelf/*.h ${D}${PTEST_PATH}/libdwelf/
+ cp ${S}/libasm/*.h ${D}${PTEST_PATH}/libasm/
+ cp -r ${S}/tests/ ${D}${PTEST_PATH}
+ cp -r ${B}/tests/* ${D}${PTEST_PATH}/tests
+ cp -r ${B}/config.h ${D}${PTEST_PATH}
+ cp -r ${B}/backends ${D}${PTEST_PATH}
+ cp -r ${B}/debuginfod ${D}${PTEST_PATH}
+ sed -i '/^Makefile:/c Makefile:' ${D}${PTEST_PATH}/tests/Makefile
+ find ${D}${PTEST_PATH} -type f -name *.[hoc] | xargs -i rm {}
+ fi
+}
+
+EXTRA_OEMAKE:class-native = ""
+EXTRA_OEMAKE:class-nativesdk = ""
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Package utilities and libraries are listed separately
+PACKAGES =+ "${PN}-binutils libelf libasm libdw libdebuginfod"
+
+# According to the upstream website https://sourceware.org/elfutils, the latest
+# license policy is as follows:
+# "License. The libraries and backends are dual GPLv2+/LGPLv3+. The utilities
+# are GPLv3+."
+LICENSE:${PN}-binutils = "GPL-3.0-or-later"
+LICENSE:${PN} = "GPL-3.0-or-later"
+LICENSE:libelf = "GPL-2.0-or-later | LGPL-3.0-or-later"
+LICENSE:libasm = "GPL-2.0-or-later | LGPL-3.0-or-later"
+LICENSE:libdw = "GPL-2.0-or-later | LGPL-3.0-or-later"
+LICENSE:libdebuginfod = "GPL-2.0-or-later | LGPL-3.0-or-later"
+
+FILES:${PN}-binutils = "\
+ ${bindir}/eu-addr2line \
+ ${bindir}/eu-ld \
+ ${bindir}/eu-nm \
+ ${bindir}/eu-readelf \
+ ${bindir}/eu-size \
+ ${bindir}/eu-strip"
+
+FILES:libelf = "${libdir}/libelf-${PV}.so ${libdir}/libelf.so.*"
+FILES:libasm = "${libdir}/libasm-${PV}.so ${libdir}/libasm.so.*"
+FILES:libdw = "${libdir}/libdw-${PV}.so ${libdir}/libdw.so.* ${libdir}/elfutils/lib*"
+FILES:libdebuginfod = "${libdir}/libdebuginfod-${PV}.so ${libdir}/libdebuginfod.so.*"
+# Some packages have the version preceeding the .so instead properly
+# versioned .so.<version>, so we need to reorder and repackage.
+#FILES:${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so"
+#FILES_SOLIBSDEV = "${libdir}/libasm.so ${libdir}/libdw.so ${libdir}/libelf.so"
+
+# The package contains symlinks that trip up insane
+INSANE_SKIP:${MLPREFIX}libdw = "dev-so"
+# The nlist binary in the tests uses explicitly minimal compiler flags
+INSANE_SKIP:${PN}-ptest += "ldflags"
+
+# avoid stripping some generated binaries otherwise some of the tests such as test-nlist,
+# run-strip-reloc.sh, run-strip-strmerge.sh and so on will fail
+INHIBIT_PACKAGE_STRIP_FILES = "\
+ ${PKGD}${PTEST_PATH}/tests/test-nlist \
+ ${PKGD}${PTEST_PATH}/tests/elfstrmerge \
+ ${PKGD}${PTEST_PATH}/tests/backtrace-child \
+ ${PKGD}${PTEST_PATH}/tests/backtrace-data \
+ ${PKGD}${PTEST_PATH}/tests/backtrace-dwarf \
+ ${PKGD}${PTEST_PATH}/tests/deleted \
+ ${PKGD}${PTEST_PATH}/tests/dwfllines \
+ ${PKGD}${PTEST_PATH}/src/strip \
+ ${PKGD}${PTEST_PATH}/src/addr2line \
+ ${PKGD}${PTEST_PATH}/src/elfcmp \
+ ${PKGD}${PTEST_PATH}/src/objdump \
+ ${PKGD}${PTEST_PATH}/src/readelf \
+ ${PKGD}${PTEST_PATH}/src/nm \
+ ${PKGD}${PTEST_PATH}/src/elflint \
+ ${PKGD}${PTEST_PATH}/src/elfclassify \
+ ${PKGD}${PTEST_PATH}/src/stack \
+ ${PKGD}${PTEST_PATH}/src/unstrip \
+ ${PKGD}${PTEST_PATH}/src/srcfiles \
+ ${PKGD}${PTEST_PATH}/libelf/libelf.so \
+ ${PKGD}${PTEST_PATH}/libdw/libdw.so \
+ ${PKGD}${PTEST_PATH}/libasm/libasm.so \
+ ${PKGD}${PTEST_PATH}/backends/libebl_i386.so \
+ ${PKGD}${PTEST_PATH}/backends/libebl_x86_64.so \
+"
+
+PRIVATE_LIBS:${PN}-ptest = "libdw.so.1 libelf.so.1 libasm.so.1 libdebuginfod.so.1"
diff --git a/meta/recipes-devtools/elfutils/files/0001-debuginfod-Remove-unused-variable.patch b/meta/recipes-devtools/elfutils/files/0001-debuginfod-Remove-unused-variable.patch
new file mode 100644
index 0000000000..81f49dbf21
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/files/0001-debuginfod-Remove-unused-variable.patch
@@ -0,0 +1,34 @@
+From c3502140e51886bffc6ae5cd256308e40e0cbb78 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 9 Mar 2024 15:52:32 -0800
+Subject: [PATCH] debuginfod: Remove unused variable
+
+Recent commit acd9525e9 has removed all references to max_fds
+therefore remove it, moreover clang18 is happier
+
+| ../../elfutils-0.191/debuginfod/debuginfod.cxx:1448:8: error: private field 'max_fds' is not used [-Werror,-Wunused-private-field]
+| 1448 | long max_fds;
+| | ^
+| 1 error generated.
+
+Upstream-Status: Submitted [https://sourceware.org/pipermail/elfutils-devel/2024q1/006900.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ debuginfod/debuginfod.cxx | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/debuginfod/debuginfod.cxx b/debuginfod/debuginfod.cxx
+index 560880f2..72617848 100644
+--- a/debuginfod/debuginfod.cxx
++++ b/debuginfod/debuginfod.cxx
+@@ -1445,7 +1445,6 @@ private:
+
+ map<key,fdcache_entry> entries; // optimized for lookup
+ time_t last_cleaning;
+- long max_fds;
+ long max_mbs;
+
+ public:
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/elfutils/files/0001-dso-link-change.patch b/meta/recipes-devtools/elfutils/files/0001-dso-link-change.patch
index 6acc036406..4f7539bef2 100644
--- a/meta/recipes-devtools/elfutils/files/0001-dso-link-change.patch
+++ b/meta/recipes-devtools/elfutils/files/0001-dso-link-change.patch
@@ -1,4 +1,4 @@
-From c5fb59ac0819b5b6d8244c613cbcf92cb09840c1 Mon Sep 17 00:00:00 2001
+From 63070df4b0dc7af37a720915b5e6494204463c9a Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 15 Aug 2017 17:10:57 +0800
Subject: [PATCH] dso link change
@@ -16,31 +16,30 @@ more details.
Rebase to 0.170
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
src/Makefile.am | 2 +-
tests/Makefile.am | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/Makefile.am b/src/Makefile.am
-index 88d0ac8..c28d81f 100644
+index 1d592d4..853eda4 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
-@@ -45,7 +45,7 @@ libdw = ../libdw/libdw.a -lz $(zip_LIBS) $(libelf) -ldl -lpthread
- libelf = ../libelf/libelf.a -lz
+@@ -50,7 +50,7 @@ libdebuginfod =
+ endif
else
libasm = ../libasm/libasm.so
-libdw = ../libdw/libdw.so
+libdw = ../libdw/libdw.so $(zip_LIBS) $(libelf) $(libebl) -ldl
libelf = ../libelf/libelf.so
- endif
- libebl = ../libebl/libebl.a ../backends/libebl_backends.a ../libcpu/libcpu.a
+ if LIBDEBUGINFOD
+ libdebuginfod = ../debuginfod/libdebuginfod.so
diff --git a/tests/Makefile.am b/tests/Makefile.am
-index c145720..72afd0e 100644
+index 9141074..ee49d07 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
-@@ -554,7 +554,7 @@ libdw = ../libdw/libdw.a -lz $(zip_LIBS) $(libelf) $(libebl) -ldl -lpthread
- libelf = ../libelf/libelf.a -lz
+@@ -694,7 +694,7 @@ libdw = ../libdw/libdw.a -lz $(zip_LIBS) $(libelf) $(libebl) -ldl -lpthread
+ libelf = ../libelf/libelf.a -lz $(zstd_LIBS)
libasm = ../libasm/libasm.a
else
-libdw = ../libdw/libdw.so
diff --git a/meta/recipes-devtools/elfutils/files/0001-libasm-may-link-with-libbz2-if-found.patch b/meta/recipes-devtools/elfutils/files/0001-libasm-may-link-with-libbz2-if-found.patch
index 09c9d3ea24..a9f5e030fc 100644
--- a/meta/recipes-devtools/elfutils/files/0001-libasm-may-link-with-libbz2-if-found.patch
+++ b/meta/recipes-devtools/elfutils/files/0001-libasm-may-link-with-libbz2-if-found.patch
@@ -1,4 +1,4 @@
-From ed1975deeaa47f98d212fd144c8bda075b1a5d36 Mon Sep 17 00:00:00 2001
+From 46d9d889a07fc9f9f089f800e5c0e895889c44ae Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 4 Oct 2017 22:30:46 -0700
Subject: [PATCH] libasm may link with libbz2 if found
@@ -11,26 +11,29 @@ where indirect libraries may be not found by linker
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---
src/Makefile.am | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/Makefile.am b/src/Makefile.am
-index c28d81f..951e978 100644
+index 853eda4..da7f3b4 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
-@@ -40,11 +40,11 @@ EXTRA_DIST += make-debug-archive.in
+@@ -40,7 +40,7 @@ EXTRA_DIST += make-debug-archive.in
CLEANFILES += make-debug-archive
if BUILD_STATIC
-libasm = ../libasm/libasm.a
+libasm = ../libasm/libasm.a $(zip_LIBS)
libdw = ../libdw/libdw.a -lz $(zip_LIBS) $(libelf) -ldl -lpthread
- libelf = ../libelf/libelf.a -lz
+ libelf = ../libelf/libelf.a -lz $(zstd_LIBS)
+ if LIBDEBUGINFOD
+@@ -49,7 +49,7 @@ else
+ libdebuginfod =
+ endif
else
-libasm = ../libasm/libasm.so
+libasm = ../libasm/libasm.so $(zip_LIBS)
libdw = ../libdw/libdw.so $(zip_LIBS) $(libelf) $(libebl) -ldl
libelf = ../libelf/libelf.so
- endif
+ if LIBDEBUGINFOD
diff --git a/meta/recipes-devtools/elfutils/files/0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch b/meta/recipes-devtools/elfutils/files/0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch
index c8d55d583a..57e4b3890a 100644
--- a/meta/recipes-devtools/elfutils/files/0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch
+++ b/meta/recipes-devtools/elfutils/files/0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch
@@ -1,4 +1,4 @@
-From f40cbd43acdb1fefef4fa53a6034296d83cbff7d Mon Sep 17 00:00:00 2001
+From 19d9e9d838e74e4a0a22f08ae03167380f8aa490 Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Thu, 16 Aug 2018 09:58:26 +0800
Subject: [PATCH] libelf/elf_end.c: check data_list.data.d.d_buf before free it
@@ -14,16 +14,15 @@ The segmentation fault happens when prelink call elf_end().
Upstream-Status: Submitted [https://sourceware.org/ml/elfutils-devel/2018-q3/msg00085.html]
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
-
---
libelf/elf_end.c | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/libelf/elf_end.c b/libelf/elf_end.c
-index 160f0b8..5280a70 100644
+index 80f4d13..b103959 100644
--- a/libelf/elf_end.c
+++ b/libelf/elf_end.c
-@@ -160,14 +160,16 @@ elf_end (Elf *elf)
+@@ -169,14 +169,16 @@ elf_end (Elf *elf)
architecture doesn't require overly stringent
alignment the raw data buffer is the same as the
one used for presenting to the caller. */
diff --git a/meta/recipes-devtools/elfutils/files/0001-skip-the-test-when-gcc-not-deployed.patch b/meta/recipes-devtools/elfutils/files/0001-skip-the-test-when-gcc-not-deployed.patch
index 0e6bf5573c..096361e215 100644
--- a/meta/recipes-devtools/elfutils/files/0001-skip-the-test-when-gcc-not-deployed.patch
+++ b/meta/recipes-devtools/elfutils/files/0001-skip-the-test-when-gcc-not-deployed.patch
@@ -1,4 +1,4 @@
-From 2fa52d61b1abdf4a3f3b153c771fb2081666430c Mon Sep 17 00:00:00 2001
+From f9ab54454000fd210dbaa92cf516084d05060f9d Mon Sep 17 00:00:00 2001
From: Mingli Yu <Mingli.Yu@windriver.com>
Date: Tue, 21 May 2019 15:20:34 +0800
Subject: [PATCH] skip the test when gcc not deployed
@@ -9,7 +9,6 @@ gcc not deployed.
Upstream-Status: Submitted [https://sourceware.org/ml/elfutils-devel/2019-q2/msg00091.html]
Signed-off-by: Mingli Yu <Mingli.Yu@windriver.com>
-
---
tests/run-disasm-x86-64.sh | 2 ++
tests/run-disasm-x86.sh | 2 ++
diff --git a/meta/recipes-devtools/elfutils/files/0001-srcfiles-fix-unused-variable-BUFFER_SIZE.patch b/meta/recipes-devtools/elfutils/files/0001-srcfiles-fix-unused-variable-BUFFER_SIZE.patch
new file mode 100644
index 0000000000..b3032327a2
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/files/0001-srcfiles-fix-unused-variable-BUFFER_SIZE.patch
@@ -0,0 +1,36 @@
+From db51a55a8e3811d19265bf12d2ff715c6cf558b4 Mon Sep 17 00:00:00 2001
+From: Jose Quaresma <jose.quaresma@foundries.io>
+Date: Tue, 19 Mar 2024 10:17:59 +0000
+Subject: [PATCH] srcfiles: fix unused variable BUFFER_SIZE
+
+The const variable BUFFER_SIZE is used only on the zip_files
+function witch is only available with LIBARCHIVE.
+
+| ../../elfutils-0.191/src/srcfiles.cxx:81:18: error: unused variable 'BUFFER_SIZE' [-Werror,-Wunused-const-variable]
+| 81 | constexpr size_t BUFFER_SIZE = 8192;
+| | ^~~~~~~~~~~
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=elfutils.git;a=commitdiff;h=ef8a4b841aaf26326b8961a651dbe915d54d23e7]
+
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/srcfiles.cxx | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/src/srcfiles.cxx b/src/srcfiles.cxx
+index 892737cc..09d50f8d 100644
+--- a/src/srcfiles.cxx
++++ b/src/srcfiles.cxx
+@@ -78,7 +78,9 @@ ARGP_PROGRAM_VERSION_HOOK_DEF = print_version;
+ /* Bug report address. */
+ ARGP_PROGRAM_BUG_ADDRESS_DEF = PACKAGE_BUGREPORT;
+
++#ifdef HAVE_LIBARCHIVE
+ constexpr size_t BUFFER_SIZE = 8192;
++#endif
+
+ /* Definitions of arguments for argp functions. */
+ static const struct argp_option options[] =
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/elfutils/files/0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch b/meta/recipes-devtools/elfutils/files/0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch
index c494d7f2e5..d410eb13d8 100644
--- a/meta/recipes-devtools/elfutils/files/0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch
+++ b/meta/recipes-devtools/elfutils/files/0001-tests-Makefile.am-compile-test_nlist-with-standard-C.patch
@@ -1,4 +1,4 @@
-From 22e0e1c01ec680a2970f4d5ca9e47f90259cbdcf Mon Sep 17 00:00:00 2001
+From 2d4dfb814dda02193e49c9203147cf73e6d3f8b7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 23 Jun 2020 07:49:35 +0000
Subject: [PATCH] tests/Makefile.am: compile test_nlist with standard CFLAGS
@@ -8,16 +8,15 @@ be reproducible.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
tests/Makefile.am | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/Makefile.am b/tests/Makefile.am
-index a2dfd43..40a0228 100644
+index 3bd8e58..370c6a8 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
-@@ -90,7 +90,7 @@ endif
+@@ -103,7 +103,7 @@ endif
test-nlist$(EXEEXT): test-nlist.c
$(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) \
@@ -25,4 +24,4 @@ index a2dfd43..40a0228 100644
+ $(CFLAGS) $(GCOV_FLAGS) -o $@ $< $(test_nlist_LDADD)
TESTS = run-arextract.sh run-arsymtest.sh run-ar.sh newfile test-nlist \
- update1 update2 update3 update4 \
+ run-ar-N.sh \
diff --git a/meta/recipes-devtools/elfutils/files/0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch b/meta/recipes-devtools/elfutils/files/0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch
index be48ef010b..fa2282b8dd 100644
--- a/meta/recipes-devtools/elfutils/files/0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch
+++ b/meta/recipes-devtools/elfutils/files/0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch
@@ -1,4 +1,4 @@
-From 1643d793761216252bb654e28aaa5b8eb1536bca Mon Sep 17 00:00:00 2001
+From d8f07a23d608b744dcc0592f9f32f258b186a77c Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 15 Aug 2017 17:13:59 +0800
Subject: [PATCH] Fix elf_cvt_gunhash if dest and src are same.
@@ -12,16 +12,15 @@ Signed-off-by: Baoshan Pang <BaoShan.Pang@windriver.com>
Rebase to 0.170
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
libelf/gnuhash_xlate.h | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/libelf/gnuhash_xlate.h b/libelf/gnuhash_xlate.h
-index 6faf113..04d9ca1 100644
+index 3a00ae0..40468fc 100644
--- a/libelf/gnuhash_xlate.h
+++ b/libelf/gnuhash_xlate.h
-@@ -40,6 +40,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
+@@ -42,6 +42,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
words. We must detangle them here. */
Elf32_Word *dest32 = dest;
const Elf32_Word *src32 = src;
@@ -29,7 +28,7 @@ index 6faf113..04d9ca1 100644
/* First four control words, 32 bits. */
for (unsigned int cnt = 0; cnt < 4; ++cnt)
-@@ -50,7 +51,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
+@@ -52,7 +53,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
len -= 4;
}
diff --git a/meta/recipes-devtools/elfutils/files/0003-fixheadercheck.patch b/meta/recipes-devtools/elfutils/files/0003-fixheadercheck.patch
index 212b358dc2..e8b986e4ba 100644
--- a/meta/recipes-devtools/elfutils/files/0003-fixheadercheck.patch
+++ b/meta/recipes-devtools/elfutils/files/0003-fixheadercheck.patch
@@ -1,4 +1,4 @@
-From 7e1f91c42ef5b0bf10afefec10dd08588df3ab1f Mon Sep 17 00:00:00 2001
+From 614f062b22e6da108643f8644a3e92a1108f2b9b Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 15 Aug 2017 17:17:20 +0800
Subject: [PATCH] fixheadercheck
@@ -16,16 +16,15 @@ Upstream-Status: Pending
Rebase to 0.170
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
libelf/elf32_updatenull.c | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/libelf/elf32_updatenull.c b/libelf/elf32_updatenull.c
-index d0d4d1e..4ecf5a5 100644
+index 3594e8b..a3314e5 100644
--- a/libelf/elf32_updatenull.c
+++ b/libelf/elf32_updatenull.c
-@@ -354,8 +354,8 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
+@@ -355,8 +355,8 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum)
we test for the alignment of the section being large
enough for the largest alignment required by a data
block. */
diff --git a/meta/recipes-devtools/elfutils/files/0003-musl-utils.patch b/meta/recipes-devtools/elfutils/files/0003-musl-utils.patch
index 1c7cde6d7d..230ea88f41 100644
--- a/meta/recipes-devtools/elfutils/files/0003-musl-utils.patch
+++ b/meta/recipes-devtools/elfutils/files/0003-musl-utils.patch
@@ -1,4 +1,4 @@
-From 81da32c3404b58cbad7b3af00854e0cf2dc3dbf1 Mon Sep 17 00:00:00 2001
+From aab5985a29bd7ab6e0b06eaab190b42a04e10f70 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Fri, 23 Aug 2019 10:19:48 +0800
Subject: [PATCH] musl-utils
@@ -13,7 +13,6 @@ Upstream-Status: Inappropriate [workaround for musl]
Rebase to 0.177
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
src/arlib.h | 6 ++++++
src/elfcompress.c | 7 +++++++
@@ -22,7 +21,7 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
4 files changed, 29 insertions(+)
diff --git a/src/arlib.h b/src/arlib.h
-index e117166..8326f6c 100644
+index d4a4221..f6336d9 100644
--- a/src/arlib.h
+++ b/src/arlib.h
@@ -29,6 +29,12 @@
@@ -39,7 +38,7 @@ index e117166..8326f6c 100644
/* State of -D/-U flags. */
extern bool arlib_deterministic_output;
diff --git a/src/elfcompress.c b/src/elfcompress.c
-index 92f2fac..0b037a5 100644
+index f771b92..263de62 100644
--- a/src/elfcompress.c
+++ b/src/elfcompress.c
@@ -37,6 +37,13 @@
@@ -57,10 +56,10 @@ index 92f2fac..0b037a5 100644
ARGP_PROGRAM_VERSION_HOOK_DEF = print_version;
diff --git a/src/strip.c b/src/strip.c
-index 30a1f9d..e89a7f0 100644
+index 6436443..1608496 100644
--- a/src/strip.c
+++ b/src/strip.c
-@@ -46,6 +46,13 @@
+@@ -45,6 +45,13 @@
#include <system.h>
#include <printversion.h>
@@ -75,10 +74,10 @@ index 30a1f9d..e89a7f0 100644
/* Name and version of program. */
diff --git a/src/unstrip.c b/src/unstrip.c
-index 3472637..40c73f3 100644
+index d70053d..b8a6ff3 100644
--- a/src/unstrip.c
+++ b/src/unstrip.c
-@@ -52,6 +52,15 @@
+@@ -51,6 +51,15 @@
#include "libeu.h"
#include "printversion.h"
diff --git a/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch b/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch
index 8942ad7828..149e0e6a7b 100644
--- a/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch
+++ b/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch
@@ -1,4 +1,4 @@
-From 2396fd67d81e54e18fbad67a5ff67d5684a01013 Mon Sep 17 00:00:00 2001
+From 4409f128c81a9d76b9360b002a1d76043c77b53e Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 15 Aug 2017 17:27:30 +0800
Subject: [PATCH] Fix build on aarch64/musl
@@ -16,7 +16,6 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
Rebase to 0.170
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
backends/aarch64_initreg.c | 4 ++--
backends/arm_initreg.c | 2 +-
diff --git a/meta/recipes-devtools/elfutils/files/0015-config-eu.am-do-not-use-Werror.patch b/meta/recipes-devtools/elfutils/files/0015-config-eu.am-do-not-use-Werror.patch
deleted file mode 100644
index a7715587db..0000000000
--- a/meta/recipes-devtools/elfutils/files/0015-config-eu.am-do-not-use-Werror.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From ff6ab57ba5dd37947ef1ffe5de7af5dbebfeb4e9 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 22 Jun 2020 21:35:16 +0000
-Subject: [PATCH] config/eu.am: do not use -Werror
-
-Due to re-definition of error() on musl, gcc starts throwing
-errors where none happen with glibc. Since upstream is not
-likely to be interested in musl builds, lets just disable
-Werror.
-
-Upstream-Status: Inappropriate [oe core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- config/eu.am | 2 --
- 1 file changed, 2 deletions(-)
-
-diff --git a/config/eu.am b/config/eu.am
-index 58cd3c4..ac42390 100644
---- a/config/eu.am
-+++ b/config/eu.am
-@@ -91,7 +91,6 @@ AM_CFLAGS = -std=gnu99 -Wall -Wshadow -Wformat=2 \
- -Wold-style-definition -Wstrict-prototypes $(TRAMPOLINES_WARNING) \
- $(LOGICAL_OP_WARNING) $(DUPLICATED_COND_WARNING) \
- $(NULL_DEREFERENCE_WARNING) $(IMPLICIT_FALLTHROUGH_WARNING) \
-- $(if $($(*F)_no_Werror),,-Werror) \
- $(if $($(*F)_no_Wunused),,-Wunused -Wextra) \
- $(if $($(*F)_no_Wstack_usage),,$(STACK_USAGE_WARNING)) \
- $(if $($(*F)_no_Wpacked_not_aligned),$(NO_PACKED_NOT_ALIGNED_WARNING),) \
-@@ -101,7 +100,6 @@ AM_CXXFLAGS = -std=c++11 -Wall -Wshadow \
- $(TRAMPOLINES_WARNING) \
- $(LOGICAL_OP_WARNING) $(DUPLICATED_COND_WARNING) \
- $(NULL_DEREFERENCE_WARNING) $(IMPLICIT_FALLTHROUGH_WARNING) \
-- $(if $($(*F)_no_Werror),,-Werror) \
- $(if $($(*F)_no_Wunused),,-Wunused -Wextra) \
- $(if $($(*F)_no_Wstack_usage),,$(STACK_USAGE_WARNING)) \
- $(if $($(*F)_no_Wpacked_not_aligned),$(NO_PACKED_NOT_ALIGNED_WARNING),) \
diff --git a/meta/recipes-devtools/elfutils/files/ptest.patch b/meta/recipes-devtools/elfutils/files/ptest.patch
index fe6f272a83..2152875d63 100644
--- a/meta/recipes-devtools/elfutils/files/ptest.patch
+++ b/meta/recipes-devtools/elfutils/files/ptest.patch
@@ -1,4 +1,4 @@
-From bfbf393e7d5b1b41df85ce1c37e887776c45d529 Mon Sep 17 00:00:00 2001
+From d49f6a135762ec1f1831d0e80b8df2a4269b0a66 Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Wed, 1 May 2019 16:37:48 +0100
Subject: [PATCH] Changes to allow ptest to run standalone on target:
@@ -14,7 +14,6 @@ d) Add an oecheck make target which we can use to build the test binaries we nee
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Upstream-Status: Inappropriate [oe specific]
-
---
configure.ac | 2 +-
tests/Makefile.am | 2 ++
@@ -22,10 +21,10 @@ Upstream-Status: Inappropriate [oe specific]
3 files changed, 3 insertions(+), 7 deletions(-)
diff --git a/configure.ac b/configure.ac
-index d345495..67933d1 100644
+index bbe8673..488712b 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -48,7 +48,7 @@ AC_COPYRIGHT([Copyright (C) 1996-2021 The elfutils developers.])
+@@ -49,7 +49,7 @@ AC_COPYRIGHT([Copyright (C) 1996-2024 The elfutils developers.])
AC_PREREQ(2.63) dnl Minimum Autoconf version required.
dnl We use GNU make extensions; automake 1.10 defaults to -Wportability.
@@ -35,10 +34,10 @@ index d345495..67933d1 100644
AM_SILENT_RULES([yes])
diff --git a/tests/Makefile.am b/tests/Makefile.am
-index 72afd0e..a2dfd43 100644
+index ee49d07..3bd8e58 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
-@@ -698,3 +698,5 @@ check: check-am coverage
+@@ -852,3 +852,5 @@ check: check-am coverage
coverage:
-$(srcdir)/coverage.sh
endif
diff --git a/meta/recipes-devtools/erofs-utils/erofs-utils/0001-fsck-main.c-add-missing-include.patch b/meta/recipes-devtools/erofs-utils/erofs-utils/0001-fsck-main.c-add-missing-include.patch
deleted file mode 100644
index c3b3c0bd56..0000000000
--- a/meta/recipes-devtools/erofs-utils/erofs-utils/0001-fsck-main.c-add-missing-include.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 821778286843b8e88d0cd73a97d20b5a2fff8ea1 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 6 Dec 2021 20:12:54 +0100
-Subject: [PATCH] fsck/main.c: add missing include
-
-Otherwise musl C library builds fail with missing S_IFMT/S_IFDIR
-definitions.
-
-Upstream-Status: Submitted [email to linux-erofs@lists.ozlabs.org,hsiangkao@linux.alibaba.com]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- fsck/main.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/fsck/main.c b/fsck/main.c
-index aefa881..ad48e35 100644
---- a/fsck/main.c
-+++ b/fsck/main.c
-@@ -6,6 +6,7 @@
- #include <stdlib.h>
- #include <getopt.h>
- #include <time.h>
-+#include <sys/stat.h>
- #include "erofs/print.h"
- #include "erofs/io.h"
- #include "erofs/decompress.h"
diff --git a/meta/recipes-devtools/erofs-utils/erofs-utils_1.4.bb b/meta/recipes-devtools/erofs-utils/erofs-utils_1.4.bb
deleted file mode 100644
index e5e19e75b4..0000000000
--- a/meta/recipes-devtools/erofs-utils/erofs-utils_1.4.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "Tools for erofs filesystems"
-LICENSE = "GPL-2.0-or-later"
-SECTION = "base"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94fa01670a2a8f2d3ab2de15004e0848"
-HOMEPAGE = "https://git.kernel.org/pub/scm/linux/kernel/git/xiang/erofs-utils.git/tree/README"
-
-SRCREV = "ee97fe5fb77c737df0f77d92ab0d92edd3a11be6"
-SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/xiang/erofs-utils.git;branch=master \
- file://0001-fsck-main.c-add-missing-include.patch \
- "
-
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>(\d+(\.\d+)+))"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "util-linux-libuuid"
-
-inherit pkgconfig autotools
-
-PACKAGECONFIG ??= "lz4"
-PACKAGECONFIG[lz4] = "--enable-lz4,--disable-lz4,lz4"
-
-EXTRA_OECONF = "${PACKAGECONFIG_CONFARGS} --disable-fuse"
-
-CFLAGS:append:powerpc64le = " -D__SANE_USERSPACE_TYPES__"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/erofs-utils/erofs-utils_1.7.1.bb b/meta/recipes-devtools/erofs-utils/erofs-utils_1.7.1.bb
new file mode 100644
index 0000000000..a23cb330ea
--- /dev/null
+++ b/meta/recipes-devtools/erofs-utils/erofs-utils_1.7.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Tools for erofs filesystems"
+# liberofs also available under Apache 2.0
+LICENSE = "GPL-2.0-or-later"
+SECTION = "base"
+LIC_FILES_CHKSUM = "file://COPYING;md5=73001d804ea1e3d84365f652242cca20"
+HOMEPAGE = "https://git.kernel.org/pub/scm/linux/kernel/git/xiang/erofs-utils.git/tree/README"
+
+SRCREV = "83d94dc619075e71ca4d0f42941cfc18d269a2af"
+SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/xiang/erofs-utils.git;branch=master;protocol=https"
+
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>(\d+(\.\d+)+))"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "util-linux-libuuid"
+
+inherit pkgconfig autotools
+
+PACKAGECONFIG ??= "lz4 zlib"
+PACKAGECONFIG[lz4] = "--enable-lz4,--disable-lz4,lz4"
+PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
+
+EXTRA_OECONF = "${PACKAGECONFIG_CONFARGS} --disable-fuse"
+
+CFLAGS:append:powerpc64le = " -D__SANE_USERSPACE_TYPES__"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/expect/expect/0001-Add-prototype-to-function-definitions.patch b/meta/recipes-devtools/expect/expect/0001-Add-prototype-to-function-definitions.patch
new file mode 100644
index 0000000000..7d211b3dff
--- /dev/null
+++ b/meta/recipes-devtools/expect/expect/0001-Add-prototype-to-function-definitions.patch
@@ -0,0 +1,113 @@
+From 904c7cf6647594939ce1e398468bca3c885f0622 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 15 Aug 2022 18:25:23 -0700
+Subject: [PATCH] Add prototype to function definitions
+
+Compilers like clang has started erroring out on implicit-function-declaration
+therefore arrange the relevant include files where needed.
+
+Upstream-Status: Submitted [https://sourceforge.net/p/expect/patches/24/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ exp_chan.c | 5 +++--
+ exp_clib.c | 4 +++-
+ exp_main_sub.c | 5 +++++
+ pty_termios.c | 4 ++++
+ 4 files changed, 15 insertions(+), 3 deletions(-)
+
+diff --git a/exp_chan.c b/exp_chan.c
+index 79f486c..50375d3 100644
+--- a/exp_chan.c
++++ b/exp_chan.c
+@@ -35,6 +35,7 @@
+ #include "exp_prog.h"
+ #include "exp_command.h"
+ #include "exp_log.h"
++#include "exp_event.h" /* exp_background_channelhandler */
+ #include "tcldbg.h" /* Dbg_StdinMode */
+
+ extern int expSetBlockModeProc _ANSI_ARGS_((int fd, int mode));
+@@ -631,7 +632,7 @@ expWaitOnOne() {
+ }
+
+ void
+-exp_background_channelhandlers_run_all()
++exp_background_channelhandlers_run_all(void)
+ {
+ ThreadSpecificData *tsdPtr = TCL_TSD_INIT(&dataKey);
+ ExpState *esPtr;
+@@ -760,7 +761,7 @@ expCreateChannel(interp,fdin,fdout,pid)
+ }
+
+ void
+-expChannelInit() {
++expChannelInit(void) {
+ ThreadSpecificData *tsdPtr = TCL_TSD_INIT(&dataKey);
+
+ tsdPtr->channelCount = 0;
+diff --git a/exp_clib.c b/exp_clib.c
+index b21fb5d..8f31fc3 100644
+--- a/exp_clib.c
++++ b/exp_clib.c
+@@ -9,13 +9,14 @@ would appreciate credit if this program or parts of it are used.
+
+ #include "expect_cf.h"
+ #include <stdio.h>
++#include <unistd.h>
+ #include <setjmp.h>
+ #ifdef HAVE_INTTYPES_H
+ # include <inttypes.h>
+ #endif
+ #include <sys/types.h>
+ #include <sys/ioctl.h>
+-
++#include <sys/wait.h>
+ #ifdef TIME_WITH_SYS_TIME
+ # include <sys/time.h>
+ # include <time.h>
+@@ -1738,6 +1739,7 @@ int exp_getptyslave();
+ #define sysreturn(x) return(errno = x, -1)
+
+ void exp_init_pty();
++void exp_init_tty();
+
+ /*
+ The following functions are linked from the Tcl library. They
+diff --git a/exp_main_sub.c b/exp_main_sub.c
+index bf6c4be..f53b89e 100644
+--- a/exp_main_sub.c
++++ b/exp_main_sub.c
+@@ -61,6 +61,11 @@ int exp_cmdlinecmds = FALSE;
+ int exp_interactive = FALSE;
+ int exp_buffer_command_input = FALSE;/* read in entire cmdfile at once */
+ int exp_fgets();
++int exp_tty_cooked_echo(
++ Tcl_Interp *interp,
++ exp_tty *tty_old,
++ int *was_raw,
++ int *was_echo);
+
+ Tcl_Interp *exp_interp; /* for use by signal handlers who can't figure out */
+ /* the interpreter directly */
+diff --git a/pty_termios.c b/pty_termios.c
+index c605b23..80ed5e7 100644
+--- a/pty_termios.c
++++ b/pty_termios.c
+@@ -7,6 +7,7 @@ would appreciate credit if you use this file or parts of it.
+
+ */
+
++#include <pty.h> /* openpty */
+ #include <stdio.h>
+ #include <signal.h>
+
+@@ -15,6 +16,9 @@ would appreciate credit if you use this file or parts of it.
+ #endif
+
+ #include "expect_cf.h"
++#include "tclInt.h"
++
++extern char * expErrnoMsg _ANSI_ARGS_((int));
+
+ /*
+ The following functions are linked from the Tcl library. They
diff --git a/meta/recipes-devtools/expect/expect/run-ptest b/meta/recipes-devtools/expect/expect/run-ptest
new file mode 100755
index 0000000000..856c314eaf
--- /dev/null
+++ b/meta/recipes-devtools/expect/expect/run-ptest
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+out_put=`tclsh tests/all.tcl -verbose bpse`
+echo
+echo "${out_put}" | awk '/PASSED|FAILED|SKIPPED/{gsub(/PASSED/,"PASS"); gsub(/FAILED/,"FAIL"); gsub(/SKIPPED/,"SKIP"); if ($NF=="PASS"){print $NF": "$(NF-1)}else{print $NF": "$2}}' | uniq
+
diff --git a/meta/recipes-devtools/expect/expect_5.45.4.bb b/meta/recipes-devtools/expect/expect_5.45.4.bb
index e22fa140d5..7b610b1ff2 100644
--- a/meta/recipes-devtools/expect/expect_5.45.4.bb
+++ b/meta/recipes-devtools/expect/expect_5.45.4.bb
@@ -16,7 +16,7 @@ LIC_FILES_CHKSUM = "file://license.terms;md5=fbf2de7e9102505b1439db06fc36ce5c"
DEPENDS += "tcl"
RDEPENDS:${PN} = "tcl"
-inherit autotools update-alternatives
+inherit autotools update-alternatives ptest
SRC_URI = "${SOURCEFORGE_MIRROR}/expect/Expect/${PV}/${BPN}${PV}.tar.gz \
file://0001-configure.in.patch \
@@ -26,7 +26,9 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/expect/Expect/${PV}/${BPN}${PV}.tar.gz \
file://0001-expect-Fix-segfaults-if-Tcl-is-built-with-stubs-and-.patch \
file://0001-exp_main_sub.c-Use-PATH_MAX-for-path.patch \
file://0001-fixline1-fix-line-1.patch \
- "
+ file://0001-Add-prototype-to-function-definitions.patch \
+ file://run-ptest \
+ "
SRC_URI[md5sum] = "00fce8de158422f5ccd2666512329bd2"
SRC_URI[sha256sum] = "49a7da83b0bdd9f46d04a04deec19c7767bb9a323e40c4781f89caf760b92c34"
@@ -43,6 +45,11 @@ do_install:append() {
sed -e 's|$dir|${libdir}|' -i ${D}${libdir}/expect${PV}/pkgIndex.tcl
}
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}
+ cp -r ${S}/tests ${D}${PTEST_PATH}
+}
+
# Apparently the public Tcl headers are only in /usr/include/tcl8.6
# when building for the target and nativesdk.
TCL_INCLUDE_PATH = "--with-tclinclude=${STAGING_INCDIR}/tcl8.6"
diff --git a/meta/recipes-devtools/fdisk/gptfdisk/0001-gptcurses-correctly-include-curses.h.patch b/meta/recipes-devtools/fdisk/gptfdisk/0001-gptcurses-correctly-include-curses.h.patch
index 266afbfa11..011eec36f9 100644
--- a/meta/recipes-devtools/fdisk/gptfdisk/0001-gptcurses-correctly-include-curses.h.patch
+++ b/meta/recipes-devtools/fdisk/gptfdisk/0001-gptcurses-correctly-include-curses.h.patch
@@ -1,17 +1,16 @@
-From 6bc6e867c5b3a774c0d7819ee5a3d2885e97caa9 Mon Sep 17 00:00:00 2001
+From 510d0d27f90dfb1c4afd0722580bb8a828b52b7f Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 30 Mar 2020 17:11:19 +0200
Subject: [PATCH] gptcurses: correctly include curses.h
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
gptcurses.cc | 4 ----
1 file changed, 4 deletions(-)
diff --git a/gptcurses.cc b/gptcurses.cc
-index 71aa734..4ebfde1 100644
+index b476700..476fc43 100644
--- a/gptcurses.cc
+++ b/gptcurses.cc
@@ -23,11 +23,7 @@
diff --git a/meta/recipes-devtools/fdisk/gptfdisk_1.0.10.bb b/meta/recipes-devtools/fdisk/gptfdisk_1.0.10.bb
new file mode 100644
index 0000000000..ba891931ac
--- /dev/null
+++ b/meta/recipes-devtools/fdisk/gptfdisk_1.0.10.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Utility for modifying GPT disk partitioning"
+DESCRIPTION = "GPT fdisk is a disk partitioning tool loosely modeled on Linux fdisk, but used for modifying GUID Partition Table (GPT) disks. The related FixParts utility fixes some common problems on Master Boot Record (MBR) disks."
+HOMEPAGE = "https://sourceforge.net/projects/gptfdisk/"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552"
+
+DEPENDS = "util-linux"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${PV}/${BP}.tar.gz \
+ file://0001-gptcurses-correctly-include-curses.h.patch \
+ "
+SRC_URI[sha256sum] = "2abed61bc6d2b9ec498973c0440b8b804b7a72d7144069b5a9209b2ad693a282"
+
+UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/gptfdisk/files/gptfdisk/"
+UPSTREAM_CHECK_REGEX = "/gptfdisk/(?P<pver>(\d+[\.\-_]*)+)/"
+
+EXTRA_OEMAKE = "'CC=${CC}' 'CXX=${CXX}' gdisk fixparts ${PACKAGECONFIG_CONFARGS}"
+
+PACKAGECONFIG ??= "ncurses popt"
+PACKAGECONFIG[ncurses] = "cgdisk,,ncurses"
+PACKAGECONFIG[popt] = "sgdisk,,popt"
+
+do_install() {
+ install -d ${D}${sbindir}
+ for f in cgdisk sgdisk; do
+ if [ -x $f ]; then
+ install -m 0755 $f ${D}${sbindir}
+ fi
+ done
+ install -m 0755 gdisk ${D}${sbindir}
+ install -m 0755 fixparts ${D}${sbindir}
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/fdisk/gptfdisk_1.0.9.bb b/meta/recipes-devtools/fdisk/gptfdisk_1.0.9.bb
deleted file mode 100644
index e473b9cd55..0000000000
--- a/meta/recipes-devtools/fdisk/gptfdisk_1.0.9.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "Utility for modifying GPT disk partitioning"
-DESCRIPTION = "GPT fdisk is a disk partitioning tool loosely modeled on Linux fdisk, but used for modifying GUID Partition Table (GPT) disks. The related FixParts utility fixes some common problems on Master Boot Record (MBR) disks."
-HOMEPAGE = "https://sourceforge.net/projects/gptfdisk/"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552"
-
-DEPENDS = "util-linux"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${PV}/${BP}.tar.gz \
- file://0001-gptcurses-correctly-include-curses.h.patch \
- "
-SRC_URI[sha256sum] = "dafead2693faeb8e8b97832b23407f6ed5b3219bc1784f482dd855774e2d50c2"
-
-UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/gptfdisk/files/gptfdisk/"
-UPSTREAM_CHECK_REGEX = "/gptfdisk/(?P<pver>(\d+[\.\-_]*)+)/"
-
-EXTRA_OEMAKE = "'CC=${CC}' 'CXX=${CXX}' gdisk fixparts ${PACKAGECONFIG_CONFARGS}"
-
-PACKAGECONFIG ??= "ncurses popt"
-PACKAGECONFIG[ncurses] = "cgdisk,,ncurses"
-PACKAGECONFIG[popt] = "sgdisk,,popt"
-
-do_install() {
- install -d ${D}${sbindir}
- for f in cgdisk sgdisk; do
- if [ -x $f ]; then
- install -m 0755 $f ${D}${sbindir}
- fi
- done
- install -m 0755 gdisk ${D}${sbindir}
- install -m 0755 fixparts ${D}${sbindir}
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/file/file_5.41.bb b/meta/recipes-devtools/file/file_5.41.bb
deleted file mode 100644
index 653887e97a..0000000000
--- a/meta/recipes-devtools/file/file_5.41.bb
+++ /dev/null
@@ -1,54 +0,0 @@
-SUMMARY = "File classification tool"
-DESCRIPTION = "File attempts to classify files depending \
-on their contents and prints a description if a match is found."
-HOMEPAGE = "http://www.darwinsys.com/file/"
-SECTION = "console/utils"
-
-# two clause BSD
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;beginline=2;md5=0251eaec1188b20d9a72c502ecfdda1b"
-
-DEPENDS = "file-replacement-native"
-DEPENDS:class-native = "bzip2-replacement-native"
-
-SRC_URI = "git://github.com/file/file.git;branch=master;protocol=https"
-
-SRCREV = "504206e53a89fd6eed71aeaf878aa3512418eab1"
-S = "${WORKDIR}/git"
-
-inherit autotools update-alternatives
-
-PACKAGECONFIG ??= "bz2 lzma zlib"
-PACKAGECONFIG[bz2] = "--enable-bzlib, --disable-bzlib, bzip2"
-PACKAGECONFIG[lzma] = "--enable-xzlib, --disable-xzlib, xz"
-PACKAGECONFIG[zlib] = "--enable-zlib, --disable-zlib, zlib"
-
-EXTRA_OECONF += "--disable-libseccomp"
-
-ALTERNATIVE:${PN} = "file"
-ALTERNATIVE_LINK_NAME[file] = "${bindir}/file"
-
-EXTRA_OEMAKE:append:class-target = " -e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
-EXTRA_OEMAKE:append:class-nativesdk = " -e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
-
-FILES:${PN} += "${datadir}/misc/*.mgc"
-
-do_compile:append:class-native() {
- oe_runmake check
-}
-
-do_install:append:class-native() {
- create_cmdline_wrapper ${D}/${bindir}/file \
- --magic-file ${datadir}/misc/magic.mgc
-}
-
-do_install:append:class-nativesdk() {
- create_cmdline_wrapper ${D}/${bindir}/file \
- --magic-file ${datadir}/misc/magic.mgc
-}
-
-BBCLASSEXTEND = "native nativesdk"
-PROVIDES:append:class-native = " file-replacement-native"
-# Don't use NATIVE_PACKAGE_PATH_SUFFIX as that hides libmagic from anyone who
-# depends on file-replacement-native.
-bindir:append:class-native = "/file-native"
diff --git a/meta/recipes-devtools/file/file_5.45.bb b/meta/recipes-devtools/file/file_5.45.bb
new file mode 100644
index 0000000000..fa8dc576dc
--- /dev/null
+++ b/meta/recipes-devtools/file/file_5.45.bb
@@ -0,0 +1,61 @@
+SUMMARY = "File classification tool"
+DESCRIPTION = "File attempts to classify files depending \
+on their contents and prints a description if a match is found."
+HOMEPAGE = "http://www.darwinsys.com/file/"
+SECTION = "console/utils"
+
+# two clause BSD
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;beginline=2;md5=0251eaec1188b20d9a72c502ecfdda1b"
+
+DEPENDS = "file-replacement-native"
+DEPENDS:class-native = "bzip2-replacement-native"
+
+SRC_URI = "git://github.com/file/file.git;branch=master;protocol=https \
+ file://print_c.patch \
+ "
+
+SRCREV = "4cbd5c8f0851201d203755b76cb66ba991ffd8be"
+S = "${WORKDIR}/git"
+
+inherit autotools update-alternatives
+
+PACKAGECONFIG ??= "bz2 lzma zlib zstdlib lzlib"
+PACKAGECONFIG[bz2] = "--enable-bzlib, --disable-bzlib, bzip2"
+PACKAGECONFIG[lzma] = "--enable-xzlib, --disable-xzlib, xz"
+PACKAGECONFIG[zlib] = "--enable-zlib, --disable-zlib, zlib"
+PACKAGECONFIG[zstdlib] = "--enable-zstdlib, --disable-zstdlib, zstd"
+PACKAGECONFIG[lzlib] = "--enable-lzlib, --disable-lzlib, lzlib"
+PACKAGECONFIG[seccomp] = "--enable-libseccomp, --disable-libseccomp, libseccomp"
+
+ALTERNATIVE:${PN} = "file"
+ALTERNATIVE_LINK_NAME[file] = "${bindir}/file"
+
+EXTRA_OEMAKE:append:class-target = " -e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
+EXTRA_OEMAKE:append:class-nativesdk = " -e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
+
+FILES:${PN} += "${datadir}/misc/*.mgc"
+FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}/environment-setup.d/file.sh"
+
+do_compile:append:class-native() {
+ oe_runmake check
+}
+
+do_install:append:class-native() {
+ create_cmdline_wrapper ${D}/${bindir}/file \
+ --magic-file ${datadir}/misc/magic.mgc
+}
+
+do_install:append:class-nativesdk() {
+ create_wrapper ${D}/${bindir}/file MAGIC=${datadir}/misc/magic.mgc
+ mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
+ cat <<- EOF > ${D}${SDKPATHNATIVE}/environment-setup.d/file.sh
+ export MAGIC="${datadir}/misc/magic.mgc"
+ EOF
+}
+
+BBCLASSEXTEND = "native nativesdk"
+PROVIDES:append:class-native = " file-replacement-native"
+# Don't use NATIVE_PACKAGE_PATH_SUFFIX as that hides libmagic from anyone who
+# depends on file-replacement-native.
+bindir:append:class-native = "/file-native"
diff --git a/meta/recipes-devtools/file/files/print_c.patch b/meta/recipes-devtools/file/files/print_c.patch
new file mode 100644
index 0000000000..760813f9c2
--- /dev/null
+++ b/meta/recipes-devtools/file/files/print_c.patch
@@ -0,0 +1,27 @@
+From e329257b8e22362b62e6c930447ef6feadd63f32 Mon Sep 17 00:00:00 2001
+From: Joe Slater <joe.slater@windriver.com>
+Date: Mon, 7 Aug 2023 22:37:19 +0000
+Subject: [PATCH] print.c: initialize timezone data for localtime_r()
+
+The man page for localtime() points out that while it acts
+like tzset() has been called, localtime_r() might not. We
+have a local version of localtime_r() that avoids this, but
+we do not compile it.
+
+Upstream-Status: Submitted [file@astron.com]
+
+Signed-off-by: Joe Slater <joe.slater@windriver.com>
+---
+ src/print.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+--- git.orig/src/print.c
++++ git/src/print.c
+@@ -289,6 +289,7 @@ file_fmtdatetime(char *buf, size_t bsize
+ goto out;
+
+ if (flags & FILE_T_LOCAL) {
++ tzset();
+ tm = localtime_r(&t, &tmz);
+ } else {
+ tm = gmtime_r(&t, &tmz);
diff --git a/meta/recipes-devtools/flex/flex_2.6.4.bb b/meta/recipes-devtools/flex/flex_2.6.4.bb
index c7cd965347..f7d4ef1f08 100644
--- a/meta/recipes-devtools/flex/flex_2.6.4.bb
+++ b/meta/recipes-devtools/flex/flex_2.6.4.bb
@@ -12,7 +12,7 @@ BBCLASSEXTEND = "native nativesdk"
LIC_FILES_CHKSUM = "file://COPYING;md5=e4742cf92e89040b39486a6219b68067 \
file://src/gettext.h;beginline=1;endline=17;md5=9c05dda2f58d89b850c399cf22e1a00c"
-SRC_URI = "https://github.com/westes/flex/releases/download/v${PV}/flex-${PV}.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/flex-${PV}.tar.gz \
file://run-ptest \
file://0001-tests-add-a-target-for-building-tests-without-runnin.patch \
${@bb.utils.contains('PTEST_ENABLED', '1', '', 'file://disable-tests.patch', d)} \
@@ -24,16 +24,14 @@ SRC_URI = "https://github.com/westes/flex/releases/download/v${PV}/flex-${PV}.ta
SRC_URI[md5sum] = "2882e3179748cc9f9c23ec593d6adc8d"
SRC_URI[sha256sum] = "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
-# Flex has moved to github from 2.6.1 onwards
-UPSTREAM_CHECK_URI = "https://github.com/westes/flex/releases"
-UPSTREAM_CHECK_REGEX = "flex-(?P<pver>\d+(\.\d+)+)\.tar"
+GITHUB_BASE_URI = "https://github.com/westes/flex/releases"
-# Disputed - yes there is stack exhaustion but no bug and it is building the
-# parser, not running it, effectively similar to a compiler ICE. Upstream no plans to address
# https://github.com/westes/flex/issues/414
-CVE_CHECK_IGNORE += "CVE-2019-6293"
+CVE_STATUS[CVE-2019-6293] = "upstream-wontfix: \
+there is stack exhaustion but no bug and it is building the \
+parser, not running it, effectively similar to a compiler ICE. Upstream no plans to address this."
-inherit autotools gettext texinfo ptest
+inherit autotools gettext texinfo ptest github-releases
M4 = "${bindir}/m4"
M4:class-native = "${STAGING_BINDIR_NATIVE}/m4"
@@ -60,6 +58,7 @@ RDEPENDS:${PN}-ptest += "bash gawk make"
do_compile_ptest() {
oe_runmake -C ${B}/tests -f ${B}/tests/Makefile top_builddir=${B} INCLUDES=-I${S}/src buildtests
}
+PTEST_PARALLEL_MAKE = ""
do_install_ptest() {
mkdir -p ${D}${PTEST_PATH}/build-aux/
@@ -69,6 +68,7 @@ do_install_ptest() {
sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
-e 's|${DEBUG_PREFIX_MAP}||g' \
-e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:\(^LDFLAGS_FOR_BUILD =\).*:\1:g' \
-e 's:${RECIPE_SYSROOT_NATIVE}::g' \
-e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \-e 's/^Makefile:/_Makefile:/' \
-e 's/^srcdir = \(.*\)/srcdir = ./' -e 's/^top_srcdir = \(.*\)/top_srcdir = ./' \
diff --git a/meta/recipes-devtools/gcc/gcc-12.1.inc b/meta/recipes-devtools/gcc/gcc-12.1.inc
deleted file mode 100644
index c8c4ae93e1..0000000000
--- a/meta/recipes-devtools/gcc/gcc-12.1.inc
+++ /dev/null
@@ -1,114 +0,0 @@
-require gcc-common.inc
-
-# Third digit in PV should be incremented after a minor release
-
-PV = "12.1.0"
-
-# BINV should be incremented to a revision after a minor gcc release
-
-BINV = "12.1.0"
-
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc:${FILE_DIRNAME}/gcc/backport:"
-
-DEPENDS =+ "mpfr gmp libmpc zlib flex-native"
-NATIVEDEPS = "mpfr-native gmp-native libmpc-native zlib-native flex-native"
-
-LICENSE = "GPL-3.0-with-GCC-exception & GPL-3.0-only"
-
-LIC_FILES_CHKSUM = "\
- file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
- file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \
- file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \
- file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
- file://COPYING.RUNTIME;md5=fe60d87048567d4fe8c8a0ed2448bcc8 \
-"
-# from git
-#RELEASE ?= "7092b7aea122a91824d048aeb23834cf1d19b1a1"
-#BASEURI ?= "https://repo.or.cz/official-gcc.git/snapshot/${RELEASE}.tar.gz;downloadfilename=gcc-${PV}-${RELEASE}.tar.gz"
-#SOURCEDIR ?= "official-gcc-${@'${RELEASE}'[0:7]}"
-
-# from snapshot
-#RELEASE ?= "12.1.0-RC-20220429"
-#SOURCEDIR ?= "gcc-${RELEASE}"
-#BASEURI ?= "https://gcc.gnu.org/pub/gcc/snapshots/${RELEASE}/gcc-${RELEASE}.tar.xz"
-
-# official release
-RELEASE ?= "${PV}"
-BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.xz"
-SOURCEDIR ?= "gcc-${PV}"
-
-SRC_URI = "${BASEURI} \
- file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \
- file://0002-gcc-poison-system-directories.patch \
- file://0003-64-bit-multilib-hack.patch \
- file://0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch \
- file://0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch \
- file://0006-cpp-honor-sysroot.patch \
- file://0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch \
- file://0008-libtool.patch \
- file://0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch \
- file://0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch \
- file://0011-Avoid-using-libdir-from-.la-which-usually-points-to-.patch \
- file://0012-export-CPP.patch \
- file://0013-Ensure-target-gcc-headers-can-be-included.patch \
- file://0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch \
- file://0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch \
- file://0016-handle-sysroot-support-for-nativesdk-gcc.patch \
- file://0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch \
- file://0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch \
- file://0019-Re-introduce-spe-commandline-options.patch \
- file://0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch \
- file://0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch \
- file://0022-mingw32-Enable-operation_not_supported.patch \
- file://0023-libatomic-Do-not-enforce-march-on-aarch64.patch \
- file://0024-Fix-install-path-of-linux64.h.patch \
- file://0025-Move-sched.h-include-ahead-of-user-headers.patch \
-"
-SRC_URI[sha256sum] = "62fd634889f31c02b64af2c468f064b47ad1ca78411c45abe6ac4b5f8dd19c7b"
-
-S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/${SOURCEDIR}"
-B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
-
-# Language Overrides
-FORTRAN = ""
-JAVA = ""
-
-SSP ?= "--disable-libssp"
-SSP:mingw32 = "--enable-libssp"
-
-EXTRA_OECONF_BASE = "\
- ${SSP} \
- --enable-libitm \
- --enable-lto \
- --disable-bootstrap \
- --with-system-zlib \
- ${@'--with-linker-hash-style=${LINKER_HASH_STYLE}' if '${LINKER_HASH_STYLE}' else ''} \
- --enable-linker-build-id \
- --with-ppl=no \
- --with-cloog=no \
- --enable-checking=release \
- --enable-cheaders=c_global \
- --without-isl \
-"
-
-EXTRA_OECONF_INITIAL = "\
- --disable-libgomp \
- --disable-libitm \
- --disable-libquadmath \
- --with-system-zlib \
- --disable-lto \
- --disable-plugin \
- --enable-linker-build-id \
- --enable-decimal-float=no \
- --without-isl \
- --disable-libssp \
-"
-
-EXTRA_OECONF_PATHS = "\
- --with-gxx-include-dir=/not/exist{target_includedir}/c++/${BINV} \
- --with-sysroot=/not/exist \
- --with-build-sysroot=${STAGING_DIR_TARGET} \
-"
-
-# Is a binutils 2.26 issue, not gcc
-CVE_CHECK_IGNORE += "CVE-2021-37322"
diff --git a/meta/recipes-devtools/gcc/gcc-13.2.inc b/meta/recipes-devtools/gcc/gcc-13.2.inc
new file mode 100644
index 0000000000..603377a49a
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-13.2.inc
@@ -0,0 +1,119 @@
+require gcc-common.inc
+
+# Third digit in PV should be incremented after a minor release
+
+PV = "13.2.0"
+
+# BINV should be incremented to a revision after a minor gcc release
+
+BINV = "13.2.0"
+
+FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc:${FILE_DIRNAME}/gcc/backport:"
+
+DEPENDS =+ "mpfr gmp libmpc zlib zstd flex-native"
+NATIVEDEPS = "mpfr-native gmp-native libmpc-native zlib-native flex-native zstd-native"
+
+LICENSE = "GPL-3.0-with-GCC-exception & GPL-3.0-only"
+
+LIC_FILES_CHKSUM = "\
+ file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \
+ file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \
+ file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
+ file://COPYING.RUNTIME;md5=fe60d87048567d4fe8c8a0ed2448bcc8 \
+"
+# snapshot from git
+#RELEASE ?= "9b6bf076c11cba0f9ccdace63e8b4044b1a858ea"
+#BASEURI ?= "https://git.linaro.org/toolchain/gcc.git/snapshot/gcc-${RELEASE}.tar.gz"
+#SOURCEDIR = "gcc-${RELEASE}"
+#BASEURI ?= "https://repo.or.cz/official-gcc.git/snapshot/${RELEASE}.tar.gz;downloadfilename=gcc-${PV}-${RELEASE}.tar.gz"
+#SOURCEDIR ?= "official-gcc-${@'${RELEASE}'[0:7]}"
+
+# from snapshot
+#RELEASE ?= "13-20230520"
+#SOURCEDIR ?= "gcc-${RELEASE}"
+#BASEURI ?= "https://gcc.gnu.org/pub/gcc/snapshots/${RELEASE}/gcc-${RELEASE}.tar.xz"
+
+# official release
+RELEASE ?= "${PV}"
+BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.xz"
+SOURCEDIR ?= "gcc-${PV}"
+
+SRC_URI = "${BASEURI} \
+ file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \
+ file://0002-gcc-poison-system-directories.patch \
+ file://0003-64-bit-multilib-hack.patch \
+ file://0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch \
+ file://0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch \
+ file://0006-cpp-honor-sysroot.patch \
+ file://0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch \
+ file://0008-libtool.patch \
+ file://0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch \
+ file://0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch \
+ file://0011-aarch64-Fix-include-paths-when-S-B.patch \
+ file://0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch \
+ file://0013-Ensure-target-gcc-headers-can-be-included.patch \
+ file://0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch \
+ file://0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch \
+ file://0016-handle-sysroot-support-for-nativesdk-gcc.patch \
+ file://0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch \
+ file://0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch \
+ file://0019-Re-introduce-spe-commandline-options.patch \
+ file://0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch \
+ file://0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch \
+ file://0022-libatomic-Do-not-enforce-march-on-aarch64.patch \
+ file://0023-Fix-install-path-of-linux64.h.patch \
+ file://0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch \
+ file://0025-gcc-testsuite-tweaks-for-mips-OE.patch \
+ file://CVE-2023-4039.patch \
+ file://0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch \
+ file://0027-Fix-gcc-vect-module-testcases.patch \
+"
+SRC_URI[sha256sum] = "e275e76442a6067341a27f04c5c6b83d8613144004c0413528863dc6b5c743da"
+
+S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/${SOURCEDIR}"
+B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
+
+# Language Overrides
+FORTRAN = ""
+JAVA = ""
+
+SSP ?= "--disable-libssp"
+SSP:mingw32 = "--enable-libssp"
+
+EXTRA_OECONF_BASE = "\
+ ${SSP} \
+ --enable-libitm \
+ --enable-lto \
+ --disable-bootstrap \
+ --with-system-zlib \
+ ${@'--with-linker-hash-style=${LINKER_HASH_STYLE}' if '${LINKER_HASH_STYLE}' else ''} \
+ --enable-linker-build-id \
+ --with-ppl=no \
+ --with-cloog=no \
+ --enable-checking=release \
+ --enable-cheaders=c_global \
+ --without-isl \
+"
+
+EXTRA_OECONF_INITIAL = "\
+ --disable-libgomp \
+ --disable-libitm \
+ --disable-libquadmath \
+ --with-system-zlib \
+ --disable-lto \
+ --disable-plugin \
+ --enable-linker-build-id \
+ --enable-decimal-float=no \
+ --without-isl \
+ --disable-libssp \
+"
+
+EXTRA_OECONF_PATHS = "\
+ --with-gxx-include-dir=/not/exist{target_includedir}/c++/${BINV} \
+ --with-sysroot=/not/exist \
+ --with-build-sysroot=${STAGING_DIR_TARGET} \
+"
+
+CVE_STATUS[CVE-2021-37322] = "cpe-incorrect: Is a binutils 2.26 issue, not gcc"
+CVE_STATUS[CVE-2023-4039] = "fixed-version: Fixed via CVE-2023-4039.patch included here. Set the status explictly to deal with all recipes that share the gcc-source"
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc
index 0f70be7dd4..5ac82b1b57 100644
--- a/meta/recipes-devtools/gcc/gcc-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-common.inc
@@ -11,8 +11,7 @@ CVE_PRODUCT = "gcc"
inherit autotools gettext texinfo
BPN = "gcc"
-COMPILERDEP = "virtual/${MLPREFIX}${TARGET_PREFIX}gcc:do_gcc_stash_builddir"
-COMPILERDEP:class-nativesdk = "virtual/${TARGET_PREFIX}gcc-crosssdk:do_gcc_stash_builddir"
+COMPILERDEP = "virtual/${TARGET_PREFIX}gcc:do_gcc_stash_builddir"
python extract_stashed_builddir () {
src = d.expand("${COMPONENTS_DIR}/${BUILD_ARCH}/gcc-stashed-builddir-${TARGET_SYS}")
@@ -32,6 +31,16 @@ def get_gcc_float_setting(bb, d):
get_gcc_float_setting[vardepvalue] = "${@get_gcc_float_setting(bb, d)}"
+def get_gcc_x86_64_arch_setting(bb, d):
+ import re
+ march = re.match(r'^.*-march=([^\s]*)', d.getVar('TUNE_CCARGS'))
+ if march:
+ return "--with-arch=%s " % march.group(1)
+ # The earliest supported x86-64 CPU
+ return "--with-arch=core2"
+
+get_gcc_x86_64_arch_setting[vardepvalue] = "${@get_gcc_x86_64_arch_setting(bb, d)}"
+
def get_gcc_mips_plt_setting(bb, d):
if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'mips', 'mipsel' ] and bb.utils.contains('DISTRO_FEATURES', 'mplt', True, False, d):
return "--with-mips-plt"
@@ -96,7 +105,7 @@ BINV = "${PV}"
#S = "${WORKDIR}/gcc-${PV}"
S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}"
-B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
+B ?= "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
target_includedir ?= "${includedir}"
target_libdir ?= "${libdir}"
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc
index e4cdb73f0a..dba25eb754 100644
--- a/meta/recipes-devtools/gcc/gcc-configure-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc
@@ -40,7 +40,6 @@ EXTRA_OECONF = "\
${@get_gcc_mips_plt_setting(bb, d)} \
${@get_gcc_ppc_plt_settings(bb, d)} \
${@get_gcc_multiarch_setting(bb, d)} \
- --enable-standard-branch-protection \
"
# glibc version is a minimum controlling whether features are enabled.
diff --git a/meta/recipes-devtools/gcc/gcc-cross-canadian.inc b/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
index 168486bd4e..ec87b46219 100644
--- a/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
@@ -3,12 +3,13 @@ inherit cross-canadian
SUMMARY = "GNU cc and gcc C compilers (cross-canadian for ${TARGET_ARCH} target)"
PN = "gcc-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-DEPENDS = "virtual/${TARGET_PREFIX}gcc virtual/${HOST_PREFIX}gcc-crosssdk virtual/${HOST_PREFIX}binutils-crosssdk virtual/nativesdk-libc nativesdk-gettext flex-native virtual/libc"
+DEPENDS = "virtual/${TARGET_PREFIX}gcc virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}binutils virtual/nativesdk-libc nativesdk-gettext flex-native virtual/libc"
GCCMULTILIB = "--enable-multilib"
require gcc-configure-common.inc
+EXTRA_OECONF += "--with-plugin-ld=ld"
EXTRA_OECONF_PATHS = "\
--with-gxx-include-dir=/not/exist${target_includedir}/c++/${BINV} \
--with-build-time-tools=${STAGING_DIR_NATIVE}${prefix_native}/${TARGET_SYS}/bin \
@@ -134,8 +135,6 @@ do_install () {
ln -sf ${BINRELPATH}/${TARGET_PREFIX}$t$suffix $dest$t$suffix
done
- t=real-ld
- ln -sf ${BINRELPATH}/${TARGET_PREFIX}ld$suffix $dest$t$suffix
# libquadmath headers need to be available in the gcc libexec dir
install -d ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include/
@@ -175,7 +174,7 @@ do_install () {
}
ELFUTILS = "nativesdk-elfutils"
-DEPENDS += "nativesdk-gmp nativesdk-mpfr nativesdk-libmpc ${ELFUTILS} nativesdk-zlib"
+DEPENDS += "nativesdk-gmp nativesdk-mpfr nativesdk-libmpc ${ELFUTILS} nativesdk-zlib nativesdk-zstd"
RDEPENDS:${PN} += "nativesdk-mpfr nativesdk-libmpc ${ELFUTILS}"
SYSTEMHEADERS = "${target_includedir}/"
diff --git a/meta/recipes-devtools/gcc/gcc-cross-canadian_12.1.bb b/meta/recipes-devtools/gcc/gcc-cross-canadian_13.2.bb
index bf53c5cd78..bf53c5cd78 100644
--- a/meta/recipes-devtools/gcc/gcc-cross-canadian_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-cross-canadian_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-cross.inc b/meta/recipes-devtools/gcc/gcc-cross.inc
index 3ffa1f0c46..a540fb2434 100644
--- a/meta/recipes-devtools/gcc/gcc-cross.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross.inc
@@ -149,6 +149,7 @@ do_gcc_stash_builddir () {
# Makefile does move-if-change which can end up with 'timestamp' as file contents so break links to those files
rm $dest/gcc/include/*.h
cp gcc/include/*.h $dest/gcc/include/
+ sysroot-relativelinks.py $dest
}
addtask do_gcc_stash_builddir after do_compile before do_install
SSTATETASKS += "do_gcc_stash_builddir"
diff --git a/meta/recipes-devtools/gcc/gcc-cross_12.1.bb b/meta/recipes-devtools/gcc/gcc-cross_13.2.bb
index b43cca0c52..b43cca0c52 100644
--- a/meta/recipes-devtools/gcc/gcc-cross_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-cross_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-crosssdk.inc b/meta/recipes-devtools/gcc/gcc-crosssdk.inc
index bd2e71d63f..7eaad2314c 100644
--- a/meta/recipes-devtools/gcc/gcc-crosssdk.inc
+++ b/meta/recipes-devtools/gcc/gcc-crosssdk.inc
@@ -8,5 +8,7 @@ SYSTEMLIBS1 = "${SDKPATHNATIVE}${libdir_nativesdk}/"
GCCMULTILIB = "--disable-multilib"
-DEPENDS = "virtual/${TARGET_PREFIX}binutils-crosssdk gettext-native ${NATIVEDEPS}"
-PROVIDES = "virtual/${TARGET_PREFIX}gcc-crosssdk virtual/${TARGET_PREFIX}g++-crosssdk"
+DEPENDS = "virtual/${TARGET_PREFIX}binutils gettext-native ${NATIVEDEPS}"
+PROVIDES = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}g++"
+
+gcc_multilib_setup[vardepsexclude] = "MULTILIB_VARIANTS"
diff --git a/meta/recipes-devtools/gcc/gcc-crosssdk_12.1.bb b/meta/recipes-devtools/gcc/gcc-crosssdk_13.2.bb
index 40a6c4feff..40a6c4feff 100644
--- a/meta/recipes-devtools/gcc/gcc-crosssdk_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-crosssdk_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-multilib-config.inc b/meta/recipes-devtools/gcc/gcc-multilib-config.inc
index 26bfed9507..2dbbc23c94 100644
--- a/meta/recipes-devtools/gcc/gcc-multilib-config.inc
+++ b/meta/recipes-devtools/gcc/gcc-multilib-config.inc
@@ -154,7 +154,7 @@ python gcc_multilib_setup() {
gcc_header_config_files = {
'x86_64' : ['gcc/config/linux.h', 'gcc/config/i386/linux.h', 'gcc/config/i386/linux64.h'],
'i586' : ['gcc/config/linux.h', 'gcc/config/i386/linux.h', 'gcc/config/i386/linux64.h'],
- 'i686' : ['gcc/config/linux.h', 'gcc/config/i386/linux64.h'],
+ 'i686' : ['gcc/config/linux.h', 'gcc/config/i386/linux.h', 'gcc/config/i386/linux64.h'],
'mips' : ['gcc/config/linux.h', 'gcc/config/mips/linux.h', 'gcc/config/mips/linux64.h'],
'mips64' : ['gcc/config/linux.h', 'gcc/config/mips/linux.h', 'gcc/config/mips/linux64.h'],
'powerpc' : ['gcc/config/linux.h', 'gcc/config/rs6000/linux64.h'],
diff --git a/meta/recipes-devtools/gcc/gcc-runtime.inc b/meta/recipes-devtools/gcc/gcc-runtime.inc
index c39a0caf8a..dbc9141000 100644
--- a/meta/recipes-devtools/gcc/gcc-runtime.inc
+++ b/meta/recipes-devtools/gcc/gcc-runtime.inc
@@ -16,7 +16,8 @@ EXTRA_OECONF_PATHS = "\
EXTRA_OECONF:append:linuxstdbase = " --enable-clocale=gnu"
EXTRA_OECONF:append = " --cache-file=${B}/config.cache"
-EXTRA_OECONF:append:libc-newlib = " --with-newlib"
+EXTRA_OECONF:append:libc-newlib = " --with-newlib --with-target-subdir"
+EXTRA_OECONF:append:libc-baremetal = " --with-target-subdir"
# Disable ifuncs for libatomic on arm conflicts -march/-mcpu
EXTRA_OECONF:append:arm = " libat_cv_have_ifunc=no "
@@ -37,6 +38,7 @@ RUNTIMELIBITM:nios2 = ""
RUNTIMELIBITM:microblaze = ""
RUNTIMELIBITM:riscv32 = ""
RUNTIMELIBITM:riscv64 = ""
+RUNTIMELIBITM:loongarch64 = ""
RUNTIMELIBSSP ?= ""
RUNTIMELIBSSP:mingw32 ?= "libssp"
@@ -49,34 +51,20 @@ RUNTIMETARGET:libc-newlib = "libstdc++-v3"
# libiberty
# libgfortran needs separate recipe due to libquadmath dependency
-# Relative path to be repaced into debug info
-REL_S = "/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}"
-
-DEBUG_PREFIX_MAP:class-target = " \
- -fdebug-prefix-map=${WORKDIR}/recipe-sysroot= \
- -fdebug-prefix-map=${WORKDIR}/recipe-sysroot-native= \
- -fdebug-prefix-map=${S}=${REL_S} \
- -fdebug-prefix-map=${S}/include=${REL_S}/libstdc++-v3/../include \
- -fdebug-prefix-map=${S}/libiberty=${REL_S}/libstdc++-v3/../libiberty \
- -fdebug-prefix-map=${S}/libgcc=${REL_S}/libstdc++-v3/../libgcc \
- -fdebug-prefix-map=${B}=${REL_S} \
- -ffile-prefix-map=${B}/${HOST_SYS}/libstdc++-v3/include=${includedir}/c++/${BINV} \
- "
-
do_configure () {
export CXX="${CXX} -nostdinc++ -L${WORKDIR}/dummylib"
# libstdc++ isn't built yet so CXX would error not able to find it which breaks stdc++'s configure
# tests. Create a dummy empty lib for the purposes of configure.
mkdir -p ${WORKDIR}/dummylib
- touch ${WORKDIR}/dummylib/libstdc++.so
+ ${CC} -x c /dev/null -c -o ${WORKDIR}/dummylib/dummylib.o
+ ${AR} rcs ${WORKDIR}/dummylib/libstdc++.a ${WORKDIR}/dummylib/dummylib.o
for d in libgcc ${RUNTIMETARGET}; do
echo "Configuring $d"
rm -rf ${B}/${TARGET_SYS}/$d/
mkdir -p ${B}/${TARGET_SYS}/$d/
cd ${B}/${TARGET_SYS}/$d/
chmod a+x ${S}/$d/configure
- relpath=${@os.path.relpath("${S}/$d", "${B}/${TARGET_SYS}/$d")}
- $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF}
+ ${S}/$d/configure ${CONFIGUREOPTS} ${EXTRA_OECONF}
if [ "$d" = "libgcc" ]; then
(cd ${B}/${TARGET_SYS}/libgcc; oe_runmake enable-execute-stack.c unwind.h md-unwind-support.h sfp-machine.h gthr-default.h)
fi
@@ -97,6 +85,8 @@ do_install () {
cd ${B}/${TARGET_SYS}/$d/
oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/${TARGET_SYS}/$d/ install
done
+ install -d ${D}${datadir}/gdb/auto-load/${libdir}
+ mv ${D}${libdir}/libstdc++*-gdb.py ${D}${datadir}/gdb/auto-load/${libdir}
if [ -d ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include ]; then
install -d ${D}${libdir}/${TARGET_SYS}/${BINV}/include
mv ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include/* ${D}${libdir}/${TARGET_SYS}/${BINV}/include
@@ -111,6 +101,9 @@ do_install () {
if [ -d ${D}${infodir} ]; then
rmdir --ignore-fail-on-non-empty -p ${D}${infodir}
fi
+ if [ -d ${D}${libdir} ]; then
+ rmdir --ignore-fail-on-non-empty -p ${D}${libdir}
+ fi
}
do_install:append:class-target () {
@@ -144,7 +137,7 @@ do_install:append:class-target () {
ln -s ../${TARGET_SYS}/ext ${D}${includedir}/c++/${BINV}/${TARGET_ARCH}${TARGET_VENDOR_MULTILIB_ORIGINAL}-${TARGET_OS}/ext
fi
- if [ "${TARGET_ARCH}" == "x86_64" -a "${MULTILIB_VARIANTS}" != "" ];then
+ if [ "${TARGET_ARCH}" = "x86_64" -a "${MULTILIB_VARIANTS}" != "" ];then
ln -sf ../${X86ARCH32}${TARGET_VENDOR}-${TARGET_OS} ${D}${includedir}/c++/${BINV}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}/32
fi
@@ -194,9 +187,14 @@ RRECOMMENDS:${PN}-dbg = ""
# include python debugging scripts
FILES:${PN}-dbg += "\
- ${libdir}/libstdc++.*-gdb.py \
${datadir}/gcc-${BINV}/python/libstdcxx \
+ ${datadir}/gdb/auto-load \
"
+# Needed by libstdcxx pretty printer, however it is disabled intentionally
+# as it adds build time dependency on bash and some cases e.g. no GPL3 cases
+# bash is not availbale and builds fails
+# So it needs to be added manually to images sadly.
+# RDEPENDS:${PN}-dbg += "python3-datetime"
FILES:libg2c = "${target_libdir}/libg2c.so.*"
SUMMARY:libg2c = "Companion runtime library for g77"
diff --git a/meta/recipes-devtools/gcc/gcc-runtime_12.1.bb b/meta/recipes-devtools/gcc/gcc-runtime_13.2.bb
index dd430b57eb..dd430b57eb 100644
--- a/meta/recipes-devtools/gcc/gcc-runtime_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-runtime_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-sanitizers.inc b/meta/recipes-devtools/gcc/gcc-sanitizers.inc
index 9881afa837..524ebd4ba4 100644
--- a/meta/recipes-devtools/gcc/gcc-sanitizers.inc
+++ b/meta/recipes-devtools/gcc/gcc-sanitizers.inc
@@ -78,14 +78,15 @@ do_package_write_ipk[depends] += "virtual/${MLPREFIX}${TARGET_PREFIX}compilerlib
do_package_write_deb[depends] += "virtual/${MLPREFIX}${TARGET_PREFIX}compilerlibs:do_packagedata"
do_package_write_rpm[depends] += "virtual/${MLPREFIX}${TARGET_PREFIX}compilerlibs:do_packagedata"
-# Only x86, powerpc, sparc, s390, arm, and aarch64 are supported
-COMPATIBLE_HOST = '(x86_64|i.86|powerpc|sparc|s390|arm|aarch64).*-linux'
+# Only x86, powerpc, sparc, s390, arm, aarch64 and loongarch64 are supported
+COMPATIBLE_HOST = '(x86_64|i.86|powerpc|sparc|s390|arm|aarch64|loongarch64).*-linux'
# musl is currently broken entirely
COMPATIBLE_HOST:libc-musl = 'null'
FILES:libasan += "${libdir}/libasan.so.* ${libdir}/libhwasan.so.*"
FILES:libasan-dev += "\
${libdir}/libasan_preinit.o \
+ ${libdir}/libhwasan_preinit.o \
${libdir}/libasan.so \
${libdir}/libhwasan.so \
${libdir}/libasan.la \
diff --git a/meta/recipes-devtools/gcc/gcc-sanitizers_12.1.bb b/meta/recipes-devtools/gcc/gcc-sanitizers_13.2.bb
index 8bda2ccad6..8bda2ccad6 100644
--- a/meta/recipes-devtools/gcc/gcc-sanitizers_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-sanitizers_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-shared-source.inc b/meta/recipes-devtools/gcc/gcc-shared-source.inc
index aac4b49313..03f520b093 100644
--- a/meta/recipes-devtools/gcc/gcc-shared-source.inc
+++ b/meta/recipes-devtools/gcc/gcc-shared-source.inc
@@ -9,3 +9,13 @@ SRC_URI = ""
do_configure[depends] += "gcc-source-${PV}:do_preconfigure"
do_populate_lic[depends] += "gcc-source-${PV}:do_unpack"
+do_deploy_source_date_epoch[depends] += "gcc-source-${PV}:do_deploy_source_date_epoch"
+
+# Copy the SDE from the shared workdir to the recipe workdir
+do_deploy_source_date_epoch () {
+ sde_file=${SDE_FILE}
+ sde_file=${sde_file#${WORKDIR}/}
+ mkdir -p ${SDE_DEPLOYDIR} $(dirname ${SDE_FILE})
+ cp -p $(dirname ${S})/$sde_file ${SDE_DEPLOYDIR}
+ cp -p $(dirname ${S})/$sde_file ${SDE_FILE}
+}
diff --git a/meta/recipes-devtools/gcc/gcc-source.inc b/meta/recipes-devtools/gcc/gcc-source.inc
index 03bab97815..265bcf4bef 100644
--- a/meta/recipes-devtools/gcc/gcc-source.inc
+++ b/meta/recipes-devtools/gcc/gcc-source.inc
@@ -17,15 +17,21 @@ STAMPCLEAN = "${STAMPS_DIR}/work-shared/gcc-${PV}-*"
INHIBIT_DEFAULT_DEPS = "1"
DEPENDS = ""
PACKAGES = ""
+TARGET_ARCH = "allarch"
+TARGET_AS_ARCH = "none"
+TARGET_CC_ARCH = "none"
+TARGET_LD_ARCH = "none"
+TARGET_OS = "linux"
+baselib = "lib"
+PACKAGE_ARCH = "all"
+B = "${WORKDIR}/build"
# This needs to be Python to avoid lots of shell variables becoming dependencies.
python do_preconfigure () {
import subprocess
cmd = d.expand('cd ${S} && PATH=${PATH} gnu-configize')
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
- # See 0044-gengtypes.patch, we need to regenerate this file
- bb.utils.remove(d.expand("${S}/gcc/gengtype-lex.c"))
cmd = d.expand("sed -i 's/BUILD_INFO=info/BUILD_INFO=/' ${S}/gcc/configure")
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
diff --git a/meta/recipes-devtools/gcc/gcc-source_12.1.bb b/meta/recipes-devtools/gcc/gcc-source_13.2.bb
index b890fa33ea..b890fa33ea 100644
--- a/meta/recipes-devtools/gcc/gcc-source_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc-source_13.2.bb
diff --git a/meta/recipes-devtools/gcc/gcc-target.inc b/meta/recipes-devtools/gcc/gcc-target.inc
index cc65e995c3..7dac3ef422 100644
--- a/meta/recipes-devtools/gcc/gcc-target.inc
+++ b/meta/recipes-devtools/gcc/gcc-target.inc
@@ -19,7 +19,7 @@ EXTRA_OECONF:append:armv6:class-target = " --with-arch=armv6${ARMFPARCHEXT}"
EXTRA_OECONF:append:armv7a:class-target = " --with-arch=armv7-a${ARMFPARCHEXT}"
EXTRA_OECONF:append:armv7ve:class-target = " --with-arch=armv7ve${ARMFPARCHEXT}"
EXTRA_OECONF:append:arc:class-target = " --with-cpu=${TUNE_PKGARCH}"
-EXTRA_OECONF:append:x86-64:class-target = " --with-arch=native"
+EXTRA_OECONF:append:x86-64:class-target = " ${@get_gcc_x86_64_arch_setting(bb, d)}"
# libcc1 requres gcc_cv_objdump when cross build, but gcc_cv_objdump is
# set in subdir gcc, so subdir libcc1 can't use it, export it here to
diff --git a/meta/recipes-devtools/gcc/gcc-testsuite.inc b/meta/recipes-devtools/gcc/gcc-testsuite.inc
index f68fec58ed..f16d471478 100644
--- a/meta/recipes-devtools/gcc/gcc-testsuite.inc
+++ b/meta/recipes-devtools/gcc/gcc-testsuite.inc
@@ -51,9 +51,14 @@ python check_prepare() {
# enable all valid instructions, since the test suite itself does not
# limit itself to the target cpu options.
# - valid for x86*, powerpc, arm, arm64
- if qemu_binary.lstrip("qemu-") in ["x86_64", "i386", "ppc", "arm", "aarch64"]:
+ if qemu_binary.endswith(("x86_64", "i386", "arm", "aarch64")):
args += ["-cpu", "max"]
-
+ elif qemu_binary.endswith(("ppc", "mips", "mips64")):
+ extra = d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH'))
+ if extra:
+ args += extra.split()
+ # For mips64 we could set a maximal CPU (e.g. Loongson-3A4000) however they either have MSA
+ # or Loongson-MMI vector extensions, not both and qemu lacks complete support for MMI
sysroot = d.getVar("RECIPE_SYSROOT")
args += ["-L", sysroot]
# lib paths are static here instead of using $libdir since this is used by a -cross recipe
diff --git a/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch b/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch
index 66e582ca98..5c75698eda 100644
--- a/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch
+++ b/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch
@@ -1,4 +1,4 @@
-From 31f94ef5b43a984a98f0eebd2dcf1b53aa1d7bce Mon Sep 17 00:00:00 2001
+From 553564bdcabdcc5d4cc4de73c7eb94c505ef51f5 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 08:37:11 +0400
Subject: [PATCH] gcc-4.3.1: ARCH_FLAGS_FOR_TARGET
@@ -12,10 +12,10 @@ Upstream-Status: Inappropriate [embedded specific]
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/configure b/configure
-index 5dcaab14ae9..f76310a36bb 100755
+index 117a7ef23f2..535265253fd 100755
--- a/configure
+++ b/configure
-@@ -10165,7 +10165,7 @@ fi
+@@ -10195,7 +10195,7 @@ fi
# for target_alias and gcc doesn't manage it consistently.
target_configargs="--cache-file=./config.cache ${target_configargs}"
@@ -25,10 +25,10 @@ index 5dcaab14ae9..f76310a36bb 100755
*" newlib "*)
case " $target_configargs " in
diff --git a/configure.ac b/configure.ac
-index 85977482aee..8b9097c7a45 100644
+index b3e9bbd2aa5..5ac8d6490f6 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -3346,7 +3346,7 @@ fi
+@@ -3351,7 +3351,7 @@ fi
# for target_alias and gcc doesn't manage it consistently.
target_configargs="--cache-file=./config.cache ${target_configargs}"
diff --git a/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch b/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch
index 5a51ae7d96..492300047d 100644
--- a/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch
+++ b/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch
@@ -1,4 +1,4 @@
-From e1dbdcd0ea667bab4b551294354e04c6fe288ab6 Mon Sep 17 00:00:00 2001
+From 52676b5934ba127c3af39fc484c8236c8fa60b96 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 8 Mar 2021 16:04:20 -0800
Subject: [PATCH] gcc: poison-system-directories
@@ -11,7 +11,7 @@ aborted.
Instead, we add the two missing items to the current scan. If the user
wants this to be a failure, they can add "-Werror=poison-system-directories".
-Upstream-Status: Pending
+Upstream-Status: Inappropriate [OE configuration]
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
@@ -25,10 +25,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
7 files changed, 86 insertions(+), 2 deletions(-)
diff --git a/gcc/common.opt b/gcc/common.opt
-index 8a0dafc522d..0357868e22c 100644
+index 862c474d3c8..64c4277c991 100644
--- a/gcc/common.opt
+++ b/gcc/common.opt
-@@ -710,6 +710,10 @@ Wreturn-local-addr
+@@ -711,6 +711,10 @@ Wreturn-local-addr
Common Var(warn_return_local_addr) Init(1) Warning
Warn about returning a pointer/reference to a local or temporary variable.
@@ -40,10 +40,10 @@ index 8a0dafc522d..0357868e22c 100644
Common Var(warn_shadow) Warning
Warn when one variable shadows another. Same as -Wshadow=global.
diff --git a/gcc/config.in b/gcc/config.in
-index 64c27c9cfac..a693cb8a886 100644
+index 4cad077bfbe..80e832fdb84 100644
--- a/gcc/config.in
+++ b/gcc/config.in
-@@ -230,6 +230,16 @@
+@@ -236,6 +236,16 @@
#endif
@@ -61,10 +61,10 @@ index 64c27c9cfac..a693cb8a886 100644
optimizer and back end) to be checked for dynamic type safety at runtime.
This is quite expensive. */
diff --git a/gcc/configure b/gcc/configure
-index 5ce0557719a..dc2d59701ad 100755
+index c7b26d1927d..3508be7b439 100755
--- a/gcc/configure
+++ b/gcc/configure
-@@ -1023,6 +1023,7 @@ enable_maintainer_mode
+@@ -1026,6 +1026,7 @@ enable_maintainer_mode
enable_link_mutex
enable_link_serialization
enable_version_specific_runtime_libs
@@ -72,7 +72,7 @@ index 5ce0557719a..dc2d59701ad 100755
enable_plugin
enable_host_shared
enable_libquadmath_support
-@@ -1785,6 +1786,8 @@ Optional Features:
+@@ -1788,6 +1789,8 @@ Optional Features:
--enable-version-specific-runtime-libs
specify that runtime libraries should be installed
in a compiler-specific directory
@@ -81,7 +81,7 @@ index 5ce0557719a..dc2d59701ad 100755
--enable-plugin enable plugin support
--enable-host-shared build host code as shared libraries
--disable-libquadmath-support
-@@ -31982,6 +31985,22 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then :
+@@ -31753,6 +31756,22 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then :
fi
@@ -105,10 +105,10 @@ index 5ce0557719a..dc2d59701ad 100755
diff --git a/gcc/configure.ac b/gcc/configure.ac
-index 23bee7010a3..36ce78924de 100644
+index 09082e8ccae..6cd01a8966b 100644
--- a/gcc/configure.ac
+++ b/gcc/configure.ac
-@@ -7421,6 +7421,22 @@ AC_ARG_ENABLE(version-specific-runtime-libs,
+@@ -7292,6 +7292,22 @@ AC_ARG_ENABLE(version-specific-runtime-libs,
[specify that runtime libraries should be
installed in a compiler-specific directory])])
@@ -132,37 +132,37 @@ index 23bee7010a3..36ce78924de 100644
AC_SUBST(subdirs)
AC_SUBST(srcdir)
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
-index 07b440190c3..b2de464798a 100644
+index de40f62e219..d6f203c8b71 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
-@@ -379,6 +379,7 @@ Objective-C and Objective-C++ Dialects}.
- -Wpacked -Wno-packed-bitfield-compat -Wpacked-not-aligned -Wpadded @gol
- -Wparentheses -Wno-pedantic-ms-format @gol
- -Wpointer-arith -Wno-pointer-compare -Wno-pointer-to-int-cast @gol
-+-Wno-poison-system-directories @gol
- -Wno-pragmas -Wno-prio-ctor-dtor -Wredundant-decls @gol
- -Wrestrict -Wno-return-local-addr -Wreturn-type @gol
- -Wno-scalar-storage-order -Wsequence-point @gol
-@@ -8029,6 +8030,14 @@ made up of data only and thus requires no special treatment. But, for
+@@ -384,6 +384,7 @@ Objective-C and Objective-C++ Dialects}.
+ -Wpacked -Wno-packed-bitfield-compat -Wpacked-not-aligned -Wpadded
+ -Wparentheses -Wno-pedantic-ms-format
+ -Wpointer-arith -Wno-pointer-compare -Wno-pointer-to-int-cast
++-Wno-poison-system-directories
+ -Wno-pragmas -Wno-prio-ctor-dtor -Wredundant-decls
+ -Wrestrict -Wno-return-local-addr -Wreturn-type
+ -Wno-scalar-storage-order -Wsequence-point
+@@ -8426,6 +8427,14 @@ made up of data only and thus requires no special treatment. But, for
most targets, it is made up of code and thus requires the stack to be
made executable in order for the program to work properly.
-+@item -Wno-poison-system-directories
+@opindex Wno-poison-system-directories
++@item -Wno-poison-system-directories
+Do not warn for @option{-I} or @option{-L} options using system
+directories such as @file{/usr/include} when cross compiling. This
+option is intended for use in chroot environments when such
+directories contain the correct headers and libraries for the target
+system rather than the host.
+
- @item -Wfloat-equal
@opindex Wfloat-equal
@opindex Wno-float-equal
+ @item -Wfloat-equal
diff --git a/gcc/gcc.cc b/gcc/gcc.cc
-index bb07cc244e3..ce161d3c853 100644
+index 16bb07f2cdc..5feae021545 100644
--- a/gcc/gcc.cc
+++ b/gcc/gcc.cc
-@@ -1159,6 +1159,8 @@ proper position among the other output files. */
+@@ -1146,6 +1146,8 @@ proper position among the other output files. */
"%{fuse-ld=*:-fuse-ld=%*} " LINK_COMPRESS_DEBUG_SPEC \
"%X %{o*} %{e*} %{N} %{n} %{r}\
%{s} %{t} %{u*} %{z} %{Z} %{!nostdlib:%{!r:%{!nostartfiles:%S}}} \
@@ -171,7 +171,7 @@ index bb07cc244e3..ce161d3c853 100644
%{static|no-pie|static-pie:} %@{L*} %(mfwrap) %(link_libgcc) " \
VTABLE_VERIFICATION_SPEC " " SANITIZER_EARLY_SPEC " %o "" \
%{fopenacc|fopenmp|%:gt(%{ftree-parallelize-loops=*:%*} 1):\
-@@ -1254,8 +1256,11 @@ static const char *cpp_unique_options =
+@@ -1241,8 +1243,11 @@ static const char *cpp_unique_options =
static const char *cpp_options =
"%(cpp_unique_options) %1 %{m*} %{std*&ansi&trigraphs} %{W*&pedantic*} %{w}\
%{f*} %{g*:%{%:debug-level-gt(0):%{g*}\
@@ -186,7 +186,7 @@ index bb07cc244e3..ce161d3c853 100644
/* Pass -d* flags, possibly modifying -dumpdir, -dumpbase et al.
diff --git a/gcc/incpath.cc b/gcc/incpath.cc
-index bd2a97938eb..c80f100f476 100644
+index 4d44321183f..46c0d543205 100644
--- a/gcc/incpath.cc
+++ b/gcc/incpath.cc
@@ -26,6 +26,7 @@
@@ -208,17 +208,17 @@ index bd2a97938eb..c80f100f476 100644
+ struct cpp_dir *p;
+
+ for (p = heads[INC_QUOTE]; p; p = p->next)
-+ {
-+ if ((!strncmp (p->name, "/usr/include", 12))
-+ || (!strncmp (p->name, "/usr/local/include", 18))
-+ || (!strncmp (p->name, "/usr/X11R6/include", 18))
-+ || (!strncmp (p->name, "/sw/include", 11))
-+ || (!strncmp (p->name, "/opt/include", 12)))
-+ warning (OPT_Wpoison_system_directories,
-+ "include location \"%s\" is unsafe for "
-+ "cross-compilation",
-+ p->name);
-+ }
++ {
++ if ((!strncmp (p->name, "/usr/include", 12))
++ || (!strncmp (p->name, "/usr/local/include", 18))
++ || (!strncmp (p->name, "/usr/X11R6/include", 18))
++ || (!strncmp (p->name, "/sw/include", 11))
++ || (!strncmp (p->name, "/opt/include", 12)))
++ warning (OPT_Wpoison_system_directories,
++ "include location \"%s\" is unsafe for "
++ "cross-compilation",
++ p->name);
++ }
+ }
+#endif
}
diff --git a/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch b/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch
index e83f05b8aa..69e7fa0ba9 100644
--- a/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch
+++ b/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch
@@ -1,7 +1,7 @@
-From 34b861e7a4cfd7b1f0d2c0f8cf9bb0b0b81eb61a Mon Sep 17 00:00:00 2001
+From febfac59d0e8a864370d0b4018b4e497ceec156d Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:10:06 +0400
-Subject: [PATCH] 64-bit multilib hack.
+Subject: [PATCH] 64-bit multilib hack
GCC has internal multilib handling code but it assumes a very specific rigid directory
layout. The build system implementation of multilib layout is very generic and allows
@@ -28,18 +28,19 @@ Upstream-Status: Inappropriate [OE-Specific]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Elvis Dowson <elvis.dowson@gmail.com>
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Zang Ruochen <zangruochen@loongson.cn>
---
gcc/config/aarch64/t-aarch64-linux | 8 ++++----
gcc/config/arc/t-multilib-linux | 4 ++--
gcc/config/i386/t-linux64 | 6 ++----
- gcc/config/mips/t-linux64 | 10 +++-------
- gcc/config/riscv/t-linux | 6 ++++--
+ gcc/config/mips/t-linux64 | 28 ++--------------------------
+ gcc/config/riscv/t-linux | 4 ++--
gcc/config/rs6000/t-linux64 | 5 ++---
- 6 files changed, 17 insertions(+), 22 deletions(-)
+ gcc/config/loongarch/t-linux | 34 ++++++++++++++++++----------------
+ 7 files changed, 32 insertions(+), 57 deletions(-)
diff --git a/gcc/config/aarch64/t-aarch64-linux b/gcc/config/aarch64/t-aarch64-linux
-index d0cd546002a..f4056d68372 100644
+index 57bf4100fcd..aaef5da8059 100644
--- a/gcc/config/aarch64/t-aarch64-linux
+++ b/gcc/config/aarch64/t-aarch64-linux
@@ -21,8 +21,8 @@
@@ -56,7 +57,7 @@ index d0cd546002a..f4056d68372 100644
-MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32)
+#MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32)
diff --git a/gcc/config/arc/t-multilib-linux b/gcc/config/arc/t-multilib-linux
-index ecb9ae6859f..12a164028d4 100644
+index a839e4ea67c..f92664573a9 100644
--- a/gcc/config/arc/t-multilib-linux
+++ b/gcc/config/arc/t-multilib-linux
@@ -16,9 +16,9 @@
@@ -72,7 +73,7 @@ index ecb9ae6859f..12a164028d4 100644
# Aliases:
MULTILIB_MATCHES += mcpu?arc700=mA7
diff --git a/gcc/config/i386/t-linux64 b/gcc/config/i386/t-linux64
-index 5526ad0e6cc..fa51c88912b 100644
+index 138956b0962..d6e0cdc4342 100644
--- a/gcc/config/i386/t-linux64
+++ b/gcc/config/i386/t-linux64
@@ -32,7 +32,5 @@
@@ -86,37 +87,53 @@ index 5526ad0e6cc..fa51c88912b 100644
+MULTILIB_DIRNAMES = . .
+MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir))
diff --git a/gcc/config/mips/t-linux64 b/gcc/config/mips/t-linux64
-index 2fdd8e00407..04f2099250f 100644
+index 176091cabb6..8258ef40559 100644
--- a/gcc/config/mips/t-linux64
+++ b/gcc/config/mips/t-linux64
-@@ -17,10 +17,6 @@
+@@ -17,29 +17,5 @@
# <http://www.gnu.org/licenses/>.
MULTILIB_OPTIONS = mabi=n32/mabi=32/mabi=64
-MULTILIB_DIRNAMES = n32 32 64
-MIPS_EL = $(if $(filter %el, $(firstword $(subst -, ,$(target)))),el)
-MIPS_SOFT = $(if $(strip $(filter MASK_SOFT_FLOAT_ABI, $(target_cpu_default)) $(filter soft, $(with_float))),soft)
--MULTILIB_OSDIRNAMES = \
+-ifeq (yes,$(enable_multiarch))
+- ifneq (,$(findstring gnuabi64,$(target)))
+- MULTILIB_OSDIRNAMES = \
+- ../lib32$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \
+- ../libo32$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \
+- ../lib$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT))
+- else ifneq (,$(findstring gnuabin32,$(target)))
+- MULTILIB_OSDIRNAMES = \
+- ../lib$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \
+- ../libo32$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \
+- ../lib64$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT))
+- else
+- MULTILIB_OSDIRNAMES = \
- ../lib32$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \
- ../lib$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \
- ../lib64$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT))
+- endif
+-else
+- MULTILIB_OSDIRNAMES = \
+- ../lib32$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \
+- ../lib$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \
+- ../lib64$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT))
+-endif
+MULTILIB_DIRNAMES = . . .
+MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir))
-+
diff --git a/gcc/config/riscv/t-linux b/gcc/config/riscv/t-linux
-index 216d2776a18..e4d817621fc 100644
+index 216d2776a18..e3c520f4bf6 100644
--- a/gcc/config/riscv/t-linux
+++ b/gcc/config/riscv/t-linux
-@@ -1,3 +1,5 @@
+@@ -1,3 +1,3 @@
# Only XLEN and ABI affect Linux multilib dir names, e.g. /lib32/ilp32d/
-MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES)))
-MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES))
+#MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES)))
-+MULTILIB_DIRNAMES := . .
+#MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES))
-+MULTILIB_OSDIRNAMES := ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir))
diff --git a/gcc/config/rs6000/t-linux64 b/gcc/config/rs6000/t-linux64
-index 47e0efd5764..05f5a3f188e 100644
+index 01a94242308..1429eceaebf 100644
--- a/gcc/config/rs6000/t-linux64
+++ b/gcc/config/rs6000/t-linux64
@@ -26,10 +26,9 @@
@@ -132,3 +149,53 @@ index 47e0efd5764..05f5a3f188e 100644
rs6000-linux.o: $(srcdir)/config/rs6000/rs6000-linux.cc
$(COMPILE) $<
+diff --git a/gcc/config/loongarch/t-linux b/gcc/config/loongarch/t-linux
+index e40da1792..0c7ec9f8a 100644
+--- a/gcc/config/loongarch/t-linux
++++ b/gcc/config/loongarch/t-linux
+@@ -18,7 +18,9 @@
+
+ # Multilib
+ MULTILIB_OPTIONS = mabi=lp64d/mabi=lp64f/mabi=lp64s
+-MULTILIB_DIRNAMES = base/lp64d base/lp64f base/lp64s
++#MULTILIB_DIRNAMES = base/lp64d base/lp64f base/lp64s
++MULTILIB_DIRNAMES = . . .
++MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir))
+
+ # The GCC driver always gets all abi-related options on the command line.
+ # (see loongarch-driver.c:driver_get_normalized_m_opts)
+@@ -36,18 +38,18 @@ else
+ endif
+
+ # Don't define MULTILIB_OSDIRNAMES if multilib is disabled.
+-ifeq ($(filter LA_DISABLE_MULTILIB,$(tm_defines)),)
+-
+- MULTILIB_OSDIRNAMES = \
+- mabi.lp64d=../lib64$\
+- $(call if_multiarch,:loongarch64-linux-gnu)
+-
+- MULTILIB_OSDIRNAMES += \
+- mabi.lp64f=../lib64/f32$\
+- $(call if_multiarch,:loongarch64-linux-gnuf32)
+-
+- MULTILIB_OSDIRNAMES += \
+- mabi.lp64s=../lib64/sf$\
+- $(call if_multiarch,:loongarch64-linux-gnusf)
+-
+-endif
++#ifeq ($(filter LA_DISABLE_MULTILIB,$(tm_defines)),)
++#
++# MULTILIB_OSDIRNAMES = \
++# mabi.lp64d=../lib64$\
++# $(call if_multiarch,:loongarch64-linux-gnu)
++#
++# MULTILIB_OSDIRNAMES += \
++# mabi.lp64f=../lib64/f32$\
++# $(call if_multiarch,:loongarch64-linux-gnuf32)
++#
++# MULTILIB_OSDIRNAMES += \
++# mabi.lp64s=../lib64/sf$\
++# $(call if_multiarch,:loongarch64-linux-gnusf)
++#
++#endif
+
diff --git a/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch b/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch
index e8f2163476..7e33bf17b0 100644
--- a/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch
+++ b/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch
@@ -1,4 +1,4 @@
-From 7f40f8321fb999e9b34d948724517d3fb0d26820 Mon Sep 17 00:00:00 2001
+From 6fbf920ccde6efc2d0caafde996d9e5738a1ba37 Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Thu, 28 Oct 2021 11:33:40 +0100
Subject: [PATCH] Pass CXXFLAGS_FOR_BUILD in a couple of places to avoid these
@@ -13,7 +13,7 @@ ChangeLog:
* Makefile.in: Regenerate.
* Makefile.tpl: Add missing CXXFLAGS_FOR_BUILD overrides
-Upstream-Status: Pending [should be submittable]
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2023-February/612560.html]
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
@@ -23,10 +23,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
2 files changed, 4 insertions(+)
diff --git a/Makefile.in b/Makefile.in
-index 593495e1650..1d9c83cc566 100644
+index 06a9398e172..4b0069b257c 100644
--- a/Makefile.in
+++ b/Makefile.in
-@@ -176,6 +176,7 @@ BUILD_EXPORTS = \
+@@ -178,6 +178,7 @@ BUILD_EXPORTS = \
# built for the build system to override those in BASE_FLAGS_TO_PASS.
EXTRA_BUILD_FLAGS = \
CFLAGS="$(CFLAGS_FOR_BUILD)" \
@@ -34,7 +34,7 @@ index 593495e1650..1d9c83cc566 100644
LDFLAGS="$(LDFLAGS_FOR_BUILD)"
# This is the list of directories to built for the host system.
-@@ -207,6 +208,7 @@ HOST_EXPORTS = \
+@@ -210,6 +211,7 @@ HOST_EXPORTS = \
CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \
CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \
CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \
@@ -43,10 +43,10 @@ index 593495e1650..1d9c83cc566 100644
DSYMUTIL="$(DSYMUTIL)"; export DSYMUTIL; \
LD="$(LD)"; export LD; \
diff --git a/Makefile.tpl b/Makefile.tpl
-index ef58fac2b9a..bab04f335c2 100644
+index dfbd74b68f8..419b332953b 100644
--- a/Makefile.tpl
+++ b/Makefile.tpl
-@@ -179,6 +179,7 @@ BUILD_EXPORTS = \
+@@ -181,6 +181,7 @@ BUILD_EXPORTS = \
# built for the build system to override those in BASE_FLAGS_TO_PASS.
EXTRA_BUILD_FLAGS = \
CFLAGS="$(CFLAGS_FOR_BUILD)" \
@@ -54,7 +54,7 @@ index ef58fac2b9a..bab04f335c2 100644
LDFLAGS="$(LDFLAGS_FOR_BUILD)"
# This is the list of directories to built for the host system.
-@@ -210,6 +211,7 @@ HOST_EXPORTS = \
+@@ -213,6 +214,7 @@ HOST_EXPORTS = \
CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \
CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \
CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \
diff --git a/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch b/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
index e34eb2cf3f..db2fea3d16 100644
--- a/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
+++ b/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
@@ -1,4 +1,4 @@
-From 5455fc1de74897a27c1199dc5611ec02243e24af Mon Sep 17 00:00:00 2001
+From 2cedf13819c0cc929660072d8a972f5e422f9701 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:17:25 +0400
Subject: [PATCH] Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B}
@@ -7,16 +7,16 @@ Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B}, so that
the source can be shared between gcc-cross-initial,
gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build.
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
-
While compiling gcc-crosssdk-initial-x86_64 on some host, there is
occasionally failure that test the existance of default.h doesn't
work, the reason is tm_include_list='** defaults.h' rather than
tm_include_list='** ./defaults.h'
So we add the test condition for this situation.
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
---
gcc/Makefile.in | 2 +-
@@ -26,10 +26,10 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
-index 31ff95500c9..a8277254696 100644
+index 775aaa1b3c4..04f28984b34 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
-@@ -553,7 +553,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@
+@@ -561,7 +561,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@
TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@
xmake_file=@xmake_file@
@@ -39,10 +39,10 @@ index 31ff95500c9..a8277254696 100644
TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@
TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@
diff --git a/gcc/configure b/gcc/configure
-index dc2d59701ad..3fc0e2f5813 100755
+index 3508be7b439..cf773a8b854 100755
--- a/gcc/configure
+++ b/gcc/configure
-@@ -13381,8 +13381,8 @@ for f in $tm_file; do
+@@ -13507,8 +13507,8 @@ for f in $tm_file; do
tm_include_list="${tm_include_list} $f"
;;
defaults.h )
@@ -54,10 +54,10 @@ index dc2d59701ad..3fc0e2f5813 100755
* )
tm_file_list="${tm_file_list} \$(srcdir)/config/$f"
diff --git a/gcc/configure.ac b/gcc/configure.ac
-index 36ce78924de..46de496b256 100644
+index 6cd01a8966b..22591478b72 100644
--- a/gcc/configure.ac
+++ b/gcc/configure.ac
-@@ -2332,8 +2332,8 @@ for f in $tm_file; do
+@@ -2357,8 +2357,8 @@ for f in $tm_file; do
tm_include_list="${tm_include_list} $f"
;;
defaults.h )
@@ -69,7 +69,7 @@ index 36ce78924de..46de496b256 100644
* )
tm_file_list="${tm_file_list} \$(srcdir)/config/$f"
diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh
-index 91cc43f69ff..8de33713cd8 100644
+index 054ede89647..3b2c2b9df37 100644
--- a/gcc/mkconfig.sh
+++ b/gcc/mkconfig.sh
@@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then
diff --git a/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch b/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch
index b08aecc736..704c44cb72 100644
--- a/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch
+++ b/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch
@@ -1,4 +1,4 @@
-From abc3b82ab24169277f2090e9df1ceac3573142be Mon Sep 17 00:00:00 2001
+From f0b4d02a3a3dca1d67fd7add15ed63c2cd572bb9 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:22:00 +0400
Subject: [PATCH] cpp: honor sysroot.
@@ -17,16 +17,16 @@ The fix below adds %I to the cpp-output spec macro so the default substitutions
RP 2012/04/13
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-Status: Inappropriate [embedded specific]
-Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
gcc/cp/lang-specs.h | 2 +-
gcc/gcc.cc | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/gcc/cp/lang-specs.h b/gcc/cp/lang-specs.h
-index f35c9fab76b..19ddc98ce7f 100644
+index c591d155cc1..61927869fe1 100644
--- a/gcc/cp/lang-specs.h
+++ b/gcc/cp/lang-specs.h
@@ -116,7 +116,7 @@ along with GCC; see the file COPYING3. If not see
@@ -39,11 +39,11 @@ index f35c9fab76b..19ddc98ce7f 100644
" %{fmodule-only:%{!S:-o %g.s%V}}"
" %{!fmodule-only:%{!fmodule-header*:%(invoke_as)}}}"
diff --git a/gcc/gcc.cc b/gcc/gcc.cc
-index ce161d3c853..aa4cf92fb78 100644
+index 5feae021545..8af0c814c33 100644
--- a/gcc/gcc.cc
+++ b/gcc/gcc.cc
-@@ -1476,7 +1476,7 @@ static const struct compiler default_compilers[] =
- %W{o*:--output-pch=%*}}%V}}}}}}}", 0, 0, 0},
+@@ -1468,7 +1468,7 @@ static const struct compiler default_compilers[] =
+ %W{o*:--output-pch %*}}%V}}}}}}}", 0, 0, 0},
{".i", "@cpp-output", 0, 0, 0},
{"@cpp-output",
- "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0},
diff --git a/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch b/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch
index b59eed57e9..079142c540 100644
--- a/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch
+++ b/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch
@@ -1,4 +1,4 @@
-From 4de00af67b57b5440bdf61ab364ad959ad0aeee7 Mon Sep 17 00:00:00 2001
+From aacfd6e14dd583b1fdc65691def61c5e1bc89708 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:24:50 +0400
Subject: [PATCH] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER
@@ -12,8 +12,6 @@ SH, sparc, alpha for possible future support (if any)
Removes the do_headerfix task in metadata
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
Upstream-Status: Inappropriate [OE configuration]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
@@ -38,7 +36,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
18 files changed, 53 insertions(+), 58 deletions(-)
diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h
-index 5e4553d79f5..877e8841eb2 100644
+index 4277f03da2a..e4c92c03291 100644
--- a/gcc/config/aarch64/aarch64-linux.h
+++ b/gcc/config/aarch64/aarch64-linux.h
@@ -21,10 +21,10 @@
@@ -55,7 +53,7 @@ index 5e4553d79f5..877e8841eb2 100644
#undef ASAN_CC1_SPEC
#define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}"
diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h
-index 17f16a55910..0a7be38fa63 100644
+index 03f783f2ad1..4fa02668aa7 100644
--- a/gcc/config/alpha/linux-elf.h
+++ b/gcc/config/alpha/linux-elf.h
@@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see
@@ -70,10 +68,10 @@ index 17f16a55910..0a7be38fa63 100644
#define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}"
#elif DEFAULT_LIBC == LIBC_GLIBC
diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h
-index 50cc0bc6d08..17c18b27145 100644
+index a119875599d..dce7f59eeea 100644
--- a/gcc/config/arm/linux-eabi.h
+++ b/gcc/config/arm/linux-eabi.h
-@@ -65,8 +65,8 @@
+@@ -62,8 +62,8 @@
GLIBC_DYNAMIC_LINKER_DEFAULT and TARGET_DEFAULT_FLOAT_ABI. */
#undef GLIBC_DYNAMIC_LINKER
@@ -84,7 +82,7 @@ index 50cc0bc6d08..17c18b27145 100644
#define GLIBC_DYNAMIC_LINKER_DEFAULT GLIBC_DYNAMIC_LINKER_SOFT_FLOAT
#define GLIBC_DYNAMIC_LINKER \
-@@ -89,7 +89,7 @@
+@@ -86,7 +86,7 @@
#define MUSL_DYNAMIC_LINKER_E "%{mbig-endian:eb}"
#endif
#define MUSL_DYNAMIC_LINKER \
@@ -94,7 +92,7 @@ index 50cc0bc6d08..17c18b27145 100644
/* At this point, bpabi.h will have clobbered LINK_SPEC. We want to
use the GNU/Linux version, not the generic BPABI version. */
diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h
-index df3da67c4f0..37456e9d5a4 100644
+index 7b7b7cbbe14..98ef2267117 100644
--- a/gcc/config/arm/linux-elf.h
+++ b/gcc/config/arm/linux-elf.h
@@ -60,7 +60,7 @@
@@ -107,7 +105,7 @@ index df3da67c4f0..37456e9d5a4 100644
#define LINUX_TARGET_LINK_SPEC "%{h*} \
%{static:-Bstatic} \
diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h
-index 5d99ee56d5b..a76022c9ccc 100644
+index bbb7cc7115e..7d9272040ee 100644
--- a/gcc/config/i386/linux.h
+++ b/gcc/config/i386/linux.h
@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see
@@ -121,7 +119,7 @@ index 5d99ee56d5b..a76022c9ccc 100644
-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1"
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-i386.so.1"
diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h
-index 8681e36f10d..ddce49b6b60 100644
+index 2bd9f48e271..dbbe7ca5440 100644
--- a/gcc/config/i386/linux64.h
+++ b/gcc/config/i386/linux64.h
@@ -27,13 +27,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
@@ -145,10 +143,10 @@ index 8681e36f10d..ddce49b6b60 100644
-#define MUSL_DYNAMIC_LINKERX32 "/lib/ld-musl-x32.so.1"
+#define MUSL_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-musl-x32.so.1"
diff --git a/gcc/config/linux.h b/gcc/config/linux.h
-index 74f70793d90..4ce173384ef 100644
+index e3aca79cccc..6491c6b84f5 100644
--- a/gcc/config/linux.h
+++ b/gcc/config/linux.h
-@@ -99,10 +99,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -86,10 +86,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
GLIBC_DYNAMIC_LINKER must be defined for each target using them, or
GLIBC_DYNAMIC_LINKER32 and GLIBC_DYNAMIC_LINKER64 for targets
supporting both 32-bit and 64-bit compilation. */
@@ -164,7 +162,7 @@ index 74f70793d90..4ce173384ef 100644
#define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker"
#define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64"
diff --git a/gcc/config/loongarch/gnu-user.h b/gcc/config/loongarch/gnu-user.h
-index 664dc9206ad..082bd7cfc6f 100644
+index aecaa02a199..62f88f7f9a2 100644
--- a/gcc/config/loongarch/gnu-user.h
+++ b/gcc/config/loongarch/gnu-user.h
@@ -31,11 +31,11 @@ along with GCC; see the file COPYING3. If not see
@@ -182,7 +180,7 @@ index 664dc9206ad..082bd7cfc6f 100644
#undef GNU_USER_TARGET_LINK_SPEC
#define GNU_USER_TARGET_LINK_SPEC \
diff --git a/gcc/config/microblaze/linux.h b/gcc/config/microblaze/linux.h
-index 5b1a365eda4..2e63df1ae9c 100644
+index e2e2c421c52..6f26480e3b5 100644
--- a/gcc/config/microblaze/linux.h
+++ b/gcc/config/microblaze/linux.h
@@ -28,7 +28,7 @@
@@ -204,7 +202,7 @@ index 5b1a365eda4..2e63df1ae9c 100644
#undef SUBTARGET_EXTRA_SPECS
#define SUBTARGET_EXTRA_SPECS \
diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h
-index 230b7789bb8..d96d134bfcf 100644
+index 5add34ea134..34692b433b8 100644
--- a/gcc/config/mips/linux.h
+++ b/gcc/config/mips/linux.h
@@ -22,29 +22,29 @@ along with GCC; see the file COPYING3. If not see
@@ -247,22 +245,24 @@ index 230b7789bb8..d96d134bfcf 100644
#define BIONIC_DYNAMIC_LINKERN32 "/system/bin/linker32"
#define GNU_USER_DYNAMIC_LINKERN32 \
diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h
-index f5dd813acad..7a13e1c9799 100644
+index 2ce097ebbce..1d45d7b4962 100644
--- a/gcc/config/nios2/linux.h
+++ b/gcc/config/nios2/linux.h
-@@ -29,8 +29,8 @@
+@@ -29,10 +29,10 @@
#undef CPP_SPEC
#define CPP_SPEC "%{posix:-D_POSIX_SOURCE} %{pthread:-D_REENTRANT}"
-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-nios2.so.1"
--#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-nios2.so.1"
+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-nios2.so.1"
+
+ #undef MUSL_DYNAMIC_LINKER
+-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-nios2.so.1"
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-nios2.so.1"
#undef LINK_SPEC
#define LINK_SPEC LINK_SPEC_ENDIAN \
diff --git a/gcc/config/riscv/linux.h b/gcc/config/riscv/linux.h
-index 38803723ba9..d5ef8a96a19 100644
+index 3e625e0f867..dc3afc97e27 100644
--- a/gcc/config/riscv/linux.h
+++ b/gcc/config/riscv/linux.h
@@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see
@@ -281,10 +281,10 @@ index 38803723ba9..d5ef8a96a19 100644
-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-riscv" XLEN_SPEC MUSL_ABI_SUFFIX ".so.1"
+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-riscv" XLEN_SPEC MUSL_ABI_SUFFIX ".so.1"
- /* Because RISC-V only has word-sized atomics, it requries libatomic where
- others do not. So link libatomic by default, as needed. */
+ #define ICACHE_FLUSH_FUNC "__riscv_flush_icache"
+
diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h
-index b2a7afabc73..364c1a5b155 100644
+index 9e457033d11..2ddab7c99c1 100644
--- a/gcc/config/rs6000/linux64.h
+++ b/gcc/config/rs6000/linux64.h
@@ -339,24 +339,19 @@ extern int dot_symbols;
@@ -318,10 +318,10 @@ index b2a7afabc73..364c1a5b155 100644
#undef DEFAULT_ASM_ENDIAN
#if (TARGET_DEFAULT & MASK_LITTLE_ENDIAN)
diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h
-index 7e2519de5d4..a73954d9de5 100644
+index ae932fc22f0..26db003cb3a 100644
--- a/gcc/config/rs6000/sysv4.h
+++ b/gcc/config/rs6000/sysv4.h
-@@ -779,10 +779,10 @@ GNU_USER_TARGET_CC1_SPEC
+@@ -771,10 +771,10 @@ GNU_USER_TARGET_CC1_SPEC
#define MUSL_DYNAMIC_LINKER_E ENDIAN_SELECT("","le","")
@@ -335,7 +335,7 @@ index 7e2519de5d4..a73954d9de5 100644
#ifndef GNU_USER_DYNAMIC_LINKER
#define GNU_USER_DYNAMIC_LINKER GLIBC_DYNAMIC_LINKER
diff --git a/gcc/config/s390/linux.h b/gcc/config/s390/linux.h
-index d7b7e7a7b02..0139b4d06ca 100644
+index 02aa1edaff8..fab268d61f4 100644
--- a/gcc/config/s390/linux.h
+++ b/gcc/config/s390/linux.h
@@ -72,13 +72,13 @@ along with GCC; see the file COPYING3. If not see
@@ -357,7 +357,7 @@ index d7b7e7a7b02..0139b4d06ca 100644
#undef LINK_SPEC
#define LINK_SPEC \
diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h
-index d96d077c99e..7d27f9893ee 100644
+index 29f5902b98b..83d1e53e6e2 100644
--- a/gcc/config/sh/linux.h
+++ b/gcc/config/sh/linux.h
@@ -61,10 +61,10 @@ along with GCC; see the file COPYING3. If not see
@@ -374,7 +374,7 @@ index d96d077c99e..7d27f9893ee 100644
#undef SUBTARGET_LINK_EMUL_SUFFIX
#define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}"
diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h
-index 6a809e9092d..60603765ad6 100644
+index 0e33b3cac2c..84f29adbb35 100644
--- a/gcc/config/sparc/linux.h
+++ b/gcc/config/sparc/linux.h
@@ -78,7 +78,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv);
@@ -387,7 +387,7 @@ index 6a809e9092d..60603765ad6 100644
#undef LINK_SPEC
#define LINK_SPEC "-m elf32_sparc %{shared:-shared} \
diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h
-index d08a2ef96fe..e6955da0a5b 100644
+index f1cc0a19e49..94bc2032803 100644
--- a/gcc/config/sparc/linux64.h
+++ b/gcc/config/sparc/linux64.h
@@ -78,8 +78,8 @@ along with GCC; see the file COPYING3. If not see
diff --git a/meta/recipes-devtools/gcc/gcc/0008-libtool.patch b/meta/recipes-devtools/gcc/gcc/0008-libtool.patch
index c9bc38ccf0..5b44dc809e 100644
--- a/meta/recipes-devtools/gcc/gcc/0008-libtool.patch
+++ b/meta/recipes-devtools/gcc/gcc/0008-libtool.patch
@@ -1,4 +1,4 @@
-From 5117519c1897a49b09fe7fff213b9c2ea15d37f5 Mon Sep 17 00:00:00 2001
+From 7608e93ab97e8c33e3b14323d0cabc651926e403 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:29:11 +0400
Subject: [PATCH] libtool
@@ -15,9 +15,8 @@ to filter the zero case.
RP 2012/8/24
+Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2023-04/msg00000.html]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
---
ltmain.sh | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch b/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch
index dd67b115f0..86542bd146 100644
--- a/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch
+++ b/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch
@@ -1,4 +1,4 @@
-From 32129f9682d0d27fc67af10f077ad2768935cbe6 Mon Sep 17 00:00:00 2001
+From 4b0efc18e0d91967a3db11d9ef0595a5a76ad67a Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:30:32 +0400
Subject: [PATCH] gcc: armv4: pass fix-v4bx to linker to support EABI.
@@ -11,18 +11,17 @@ for eabi defaulting toolchains.
We might want to send it upstream.
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2023-April/615319.html]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
---
gcc/config/arm/linux-eabi.h | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h
-index 17c18b27145..8eacb099317 100644
+index dce7f59eeea..27402c629c6 100644
--- a/gcc/config/arm/linux-eabi.h
+++ b/gcc/config/arm/linux-eabi.h
-@@ -91,10 +91,14 @@
+@@ -88,10 +88,14 @@
#define MUSL_DYNAMIC_LINKER \
SYSTEMLIBS_DIR "ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}%{mfdpic:-fdpic}.so.1"
diff --git a/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch b/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch
index 45edc62eb5..bc3943087c 100644
--- a/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch
+++ b/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch
@@ -1,4 +1,4 @@
-From bf85b8bbcb4b77725d4c22c1bb25a29f6ff21038 Mon Sep 17 00:00:00 2001
+From b015460586e2ea8a35a11d1a607728707bdf6509 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:33:04 +0400
Subject: [PATCH] Use the multilib config files from ${B} instead of using the
@@ -18,10 +18,10 @@ Upstream-Status: Inappropriate [configuration]
2 files changed, 36 insertions(+), 8 deletions(-)
diff --git a/gcc/configure b/gcc/configure
-index 3fc0e2f5813..2f0f0e057a9 100755
+index cf773a8b854..448a1ec093e 100755
--- a/gcc/configure
+++ b/gcc/configure
-@@ -13361,10 +13361,20 @@ done
+@@ -13487,10 +13487,20 @@ done
tmake_file_=
for f in ${tmake_file}
do
@@ -46,7 +46,7 @@ index 3fc0e2f5813..2f0f0e057a9 100755
done
tmake_file="${tmake_file_}${omp_device_property_tmake_file}"
-@@ -13375,6 +13385,10 @@ tm_file_list="options.h"
+@@ -13501,6 +13511,10 @@ tm_file_list="options.h"
tm_include_list="options.h insn-constants.h"
for f in $tm_file; do
case $f in
@@ -58,10 +58,10 @@ index 3fc0e2f5813..2f0f0e057a9 100755
f=`echo $f | sed 's/^..//'`
tm_file_list="${tm_file_list} $f"
diff --git a/gcc/configure.ac b/gcc/configure.ac
-index 46de496b256..6155b83a732 100644
+index 22591478b72..b6e7f5149a7 100644
--- a/gcc/configure.ac
+++ b/gcc/configure.ac
-@@ -2312,10 +2312,20 @@ done
+@@ -2337,10 +2337,20 @@ done
tmake_file_=
for f in ${tmake_file}
do
@@ -86,7 +86,7 @@ index 46de496b256..6155b83a732 100644
done
tmake_file="${tmake_file_}${omp_device_property_tmake_file}"
-@@ -2326,6 +2336,10 @@ tm_file_list="options.h"
+@@ -2351,6 +2361,10 @@ tm_file_list="options.h"
tm_include_list="options.h insn-constants.h"
for f in $tm_file; do
case $f in
diff --git a/meta/recipes-devtools/gcc/gcc/0011-Avoid-using-libdir-from-.la-which-usually-points-to-.patch b/meta/recipes-devtools/gcc/gcc/0011-Avoid-using-libdir-from-.la-which-usually-points-to-.patch
deleted file mode 100644
index 352c6eec29..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0011-Avoid-using-libdir-from-.la-which-usually-points-to-.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From e5463727ff028cee5e452da38f5b4c44d52e412e Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 20 Feb 2015 09:39:38 +0000
-Subject: [PATCH] Avoid using libdir from .la which usually points to a host
- path
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Jonathan Liu <net147@gmail.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ltmain.sh | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/ltmain.sh b/ltmain.sh
-index ee938056bef..9ebc7e3d1e0 100644
---- a/ltmain.sh
-+++ b/ltmain.sh
-@@ -5628,6 +5628,9 @@ func_mode_link ()
- absdir="$abs_ladir"
- libdir="$abs_ladir"
- else
-+ # Instead of using libdir from .la which usually points to a host path,
-+ # use the path the .la is contained in.
-+ libdir="$abs_ladir"
- dir="$libdir"
- absdir="$libdir"
- fi
diff --git a/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch b/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch
new file mode 100644
index 0000000000..974aca5ee4
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch
@@ -0,0 +1,55 @@
+From b7ce05b2d969b311c6061bda32c3117c76bf7e0c Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 31 Jan 2023 22:03:38 -0800
+Subject: [PATCH] aarch64: Fix include paths when S != B
+
+aarch64.h gets copied into build directory when built out of tree, in
+this case build uses this file but does not find the includes inside it
+since they are not found in any of include paths specified in compiler
+cmdline.
+
+Fixes build errors like
+
+% g++ -c -isystem/mnt/b/yoe/master/build/tmp/work/x86_64-linux/gcc-cross-aarch64/13.0.1-r0/recipe-sysroot-native/usr/include -O2 -pipe -DIN_GCC -DCROSS_DIRECTORY_STRUCTURE -fno-exceptions -fno-rtti -fasynchronous-unwind-tables -W -Wall -Wno-narrowing -Wwrite-strings -Wcast-qual -Wmissing-format-attribute -Wconditionally-supported -Woverloaded-virtual -pedantic -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -DHAVE_CONFIG_H -DGENERATOR_FILE -I. -Ibuild -I../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc -I../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc/build -I../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc/../include -I../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc/../libcpp/include -o build/gencheck.o ../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc/gencheck.cc
+In file included from ./tm.h:34,
+ from ../../../../../../../work-shared/gcc-13.0.1-r0/gcc-b2ec2504af77b35e748067eeb846821d12a6b6b4/gcc/gencheck.cc:23:
+./config/aarch64/aarch64.h:164:10: fatal error: aarch64-option-extensions.def: No such file or directory
+ 164 | #include "aarch64-option-extensions.def"
+ | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+compilation terminated.
+
+See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105144
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gcc/config/aarch64/aarch64.h | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 73b09e20508..10ea3672f20 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -161,8 +161,8 @@
+ enum class aarch64_feature : unsigned char {
+ #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) IDENT,
+ #define AARCH64_ARCH(A, B, IDENT, D, E) IDENT,
+-#include "aarch64-option-extensions.def"
+-#include "aarch64-arches.def"
++#include "config/aarch64/aarch64-option-extensions.def"
++#include "config/aarch64/aarch64-arches.def"
+ };
+
+ /* Define unique flags for each of the above. */
+@@ -171,8 +171,8 @@ enum class aarch64_feature : unsigned char {
+ = aarch64_feature_flags (1) << int (aarch64_feature::IDENT);
+ #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) HANDLE (IDENT)
+ #define AARCH64_ARCH(A, B, IDENT, D, E) HANDLE (IDENT)
+-#include "aarch64-option-extensions.def"
+-#include "aarch64-arches.def"
++#include "config/aarch64/aarch64-option-extensions.def"
++#include "config/aarch64/aarch64-arches.def"
+ #undef HANDLE
+
+ #endif
diff --git a/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch b/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch
new file mode 100644
index 0000000000..cf5efcd32d
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch
@@ -0,0 +1,28 @@
+From 39ab6fe76f2788b2c989d29c9016f1fe53cb736e Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 20 Feb 2015 09:39:38 +0000
+Subject: [PATCH] Avoid using libdir from .la which usually points to a host
+ path
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Jonathan Liu <net147@gmail.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ ltmain.sh | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/ltmain.sh b/ltmain.sh
+index ee938056bef..9ebc7e3d1e0 100644
+--- a/ltmain.sh
++++ b/ltmain.sh
+@@ -5628,6 +5628,9 @@ func_mode_link ()
+ absdir="$abs_ladir"
+ libdir="$abs_ladir"
+ else
++ # Instead of using libdir from .la which usually points to a host path,
++ # use the path the .la is contained in.
++ libdir="$abs_ladir"
+ dir="$libdir"
+ absdir="$libdir"
+ fi
diff --git a/meta/recipes-devtools/gcc/gcc/0012-export-CPP.patch b/meta/recipes-devtools/gcc/gcc/0012-export-CPP.patch
deleted file mode 100644
index 7e1ebef463..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0012-export-CPP.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-From 20afebc61199cd74481b0b831c1b56465cd37fa0 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 20 Feb 2015 09:40:59 +0000
-Subject: [PATCH] export CPP
-
-The OE environment sets and exports CPP as being the target gcc. When
-building gcc-cross-canadian for a mingw targetted sdk, the following can be found
-in build.x86_64-pokysdk-mingw32.i586-poky-linux/build-x86_64-linux/libiberty/config.log:
-
-configure:3641: checking for _FILE_OFFSET_BITS value needed for large files
-configure:3666: gcc -c -isystem/media/build1/poky/build/tmp/sysroots/x86_64-linux/usr/include -O2 -pipe conftest.c >&5
-configure:3666: $? = 0
-configure:3698: result: no
-configure:3786: checking how to run the C preprocessor
-configure:3856: result: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32
-configure:3876: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32 conftest.c
-configure:3876: $? = 0
-
-Note this is a *build* target (in build-x86_64-linux) so it should be
-using the host "gcc", not x86_64-pokysdk-mingw32-gcc. Since the mingw32
-headers are very different, using the wrong cpp is a real problem. It is leaking
-into configure through the CPP variable. Ultimately this leads to build
-failures related to not being able to include a process.h file for pem-unix.c.
-
-The fix is to ensure we export a sane CPP value into the build
-environment when using build targets. We could define a CPP_FOR_BUILD value which may be
-the version which needs to be upstreamed but for now, this fix is good enough to
-avoid the problem.
-
-RP 22/08/2013
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- Makefile.in | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/Makefile.in b/Makefile.in
-index 1d9c83cc566..11819667751 100644
---- a/Makefile.in
-+++ b/Makefile.in
-@@ -152,6 +152,7 @@ BUILD_EXPORTS = \
- AR="$(AR_FOR_BUILD)"; export AR; \
- AS="$(AS_FOR_BUILD)"; export AS; \
- CC="$(CC_FOR_BUILD)"; export CC; \
-+ CPP="$(CC_FOR_BUILD) -E"; export CPP; \
- CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \
- CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \
- CPP="$(CPP_FOR_BUILD)"; export CPP; \
diff --git a/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch b/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch
index 61e61ecc6e..e2343a3c17 100644
--- a/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch
+++ b/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch
@@ -1,4 +1,4 @@
-From 612801d426e75ff997cfabda380dbe52c2cbc532 Mon Sep 17 00:00:00 2001
+From 531b9df680c4380797e8e7705a8e7f8ed17ebe68 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 20 Feb 2015 10:25:11 +0000
Subject: [PATCH] Ensure target gcc headers can be included
@@ -13,7 +13,7 @@ command line in order to resolve this.
Extend target gcc headers search to musl too
-Upstream-Status: Pending
+Upstream-Status: Inappropriate [embedded specific]
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
@@ -25,10 +25,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
4 files changed, 22 insertions(+)
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
-index a8277254696..07fa63b6640 100644
+index 04f28984b34..8ef996c0f4d 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
-@@ -632,6 +632,7 @@ libexecdir = @libexecdir@
+@@ -640,6 +640,7 @@ libexecdir = @libexecdir@
# Directory in which the compiler finds libraries etc.
libsubdir = $(libdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix)
@@ -36,7 +36,7 @@ index a8277254696..07fa63b6640 100644
# Directory in which the compiler finds executables
libexecsubdir = $(libexecdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix)
# Directory in which all plugin resources are installed
-@@ -3024,6 +3025,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\"
+@@ -3059,6 +3060,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\"
PREPROCESSOR_DEFINES = \
-DGCC_INCLUDE_DIR=\"$(libsubdir)/include\" \
@@ -45,10 +45,10 @@ index a8277254696..07fa63b6640 100644
-DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \
-DGPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT=$(gcc_gxx_include_dir_add_sysroot) \
diff --git a/gcc/config/linux.h b/gcc/config/linux.h
-index 4ce173384ef..8a3cd4f2d34 100644
+index 6491c6b84f5..57496ff1f2f 100644
--- a/gcc/config/linux.h
+++ b/gcc/config/linux.h
-@@ -170,6 +170,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -157,6 +157,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
#define INCLUDE_DEFAULTS_MUSL_TOOL
#endif
@@ -62,7 +62,7 @@ index 4ce173384ef..8a3cd4f2d34 100644
#ifdef NATIVE_SYSTEM_HEADER_DIR
#define INCLUDE_DEFAULTS_MUSL_NATIVE \
{ NATIVE_SYSTEM_HEADER_DIR, 0, 0, 0, 1, 2 }, \
-@@ -196,6 +203,7 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -183,6 +190,7 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
INCLUDE_DEFAULTS_MUSL_PREFIX \
INCLUDE_DEFAULTS_MUSL_CROSS \
INCLUDE_DEFAULTS_MUSL_TOOL \
@@ -71,10 +71,10 @@ index 4ce173384ef..8a3cd4f2d34 100644
{ GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \
{ 0, 0, 0, 0, 0, 0 } \
diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h
-index a73954d9de5..e5dd6538358 100644
+index 26db003cb3a..3a443abcf6b 100644
--- a/gcc/config/rs6000/sysv4.h
+++ b/gcc/config/rs6000/sysv4.h
-@@ -994,6 +994,13 @@ ncrtn.o%s"
+@@ -986,6 +986,13 @@ ncrtn.o%s"
#define INCLUDE_DEFAULTS_MUSL_TOOL
#endif
@@ -88,7 +88,7 @@ index a73954d9de5..e5dd6538358 100644
#ifdef NATIVE_SYSTEM_HEADER_DIR
#define INCLUDE_DEFAULTS_MUSL_NATIVE \
{ NATIVE_SYSTEM_HEADER_DIR, 0, 0, 0, 1, 2 }, \
-@@ -1020,6 +1027,7 @@ ncrtn.o%s"
+@@ -1012,6 +1019,7 @@ ncrtn.o%s"
INCLUDE_DEFAULTS_MUSL_PREFIX \
INCLUDE_DEFAULTS_MUSL_CROSS \
INCLUDE_DEFAULTS_MUSL_TOOL \
@@ -97,7 +97,7 @@ index a73954d9de5..e5dd6538358 100644
{ GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \
{ 0, 0, 0, 0, 0, 0 } \
diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc
-index 7888300f277..52cf14e92f8 100644
+index 141bb4d25f6..734590a7059 100644
--- a/gcc/cppdefault.cc
+++ b/gcc/cppdefault.cc
@@ -64,6 +64,10 @@ const struct default_include cpp_include_defaults[]
diff --git a/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch b/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch
index 94308b2aca..30224d7485 100644
--- a/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch
+++ b/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch
@@ -1,4 +1,4 @@
-From 9ae49e7b88c208ab79ec9c2fc4a2fa8a3f1e85bb Mon Sep 17 00:00:00 2001
+From 793201cebfeb129f6f263e64310b30a0ffa48895 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 3 Mar 2015 08:21:19 +0000
Subject: [PATCH] Don't search host directory during "relink" if $inst_prefix
diff --git a/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch b/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch
index ce9635ce4d..33c601ac97 100644
--- a/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch
+++ b/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch
@@ -1,4 +1,4 @@
-From bf918db7117f41d3c04162095641165ca241707d Mon Sep 17 00:00:00 2001
+From 5de63874335c1c673dd132f6aca00dc13f1eac51 Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Sun, 5 Jul 2015 20:25:18 -0700
Subject: [PATCH] libcc1: fix libcc1's install path and rpath
@@ -20,7 +20,7 @@ Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/libcc1/Makefile.am b/libcc1/Makefile.am
-index 6e3a34ff7e2..3f3f6391aba 100644
+index 921a33fe236..938e6f964cd 100644
--- a/libcc1/Makefile.am
+++ b/libcc1/Makefile.am
@@ -40,8 +40,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \
diff --git a/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch b/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch
index 3b54719536..bdffcae7dc 100644
--- a/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch
+++ b/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch
@@ -1,4 +1,4 @@
-From 4fbbd40d7db89cdbeaf93df1e1da692b1f80a5bc Mon Sep 17 00:00:00 2001
+From bbc75b93bff66891fa7ffb3af5c6ad53df1fff68 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 7 Dec 2015 23:39:54 +0000
Subject: [PATCH] handle sysroot support for nativesdk-gcc
@@ -38,15 +38,15 @@ Signed-off-by: Mark Hatle <mark.hatle@kernel.crashing.org>
gcc/c-family/c-opts.cc | 4 +--
gcc/config/linux.h | 24 +++++++--------
gcc/config/rs6000/sysv4.h | 24 +++++++--------
- gcc/cppdefault.cc | 63 ++++++++++++++++++++++++---------------
- gcc/cppdefault.h | 13 ++++----
- gcc/gcc.cc | 20 +++++++++----
+ gcc/cppdefault.cc | 65 ++++++++++++++++++++++++---------------
+ gcc/cppdefault.h | 13 +++-----
+ gcc/gcc.cc | 20 ++++++++----
gcc/incpath.cc | 12 ++++----
gcc/prefix.cc | 6 ++--
- 8 files changed, 94 insertions(+), 72 deletions(-)
+ 8 files changed, 95 insertions(+), 73 deletions(-)
diff --git a/gcc/c-family/c-opts.cc b/gcc/c-family/c-opts.cc
-index a341a061758..83b0bef4dbb 100644
+index c68a2a27469..77e9b5eceaa 100644
--- a/gcc/c-family/c-opts.cc
+++ b/gcc/c-family/c-opts.cc
@@ -1458,8 +1458,8 @@ add_prefixed_path (const char *suffix, incpath_kind chain)
@@ -61,10 +61,10 @@ index a341a061758..83b0bef4dbb 100644
path = (char *) xmalloc (prefix_len + suffix_len + 1);
memcpy (path, prefix, prefix_len);
diff --git a/gcc/config/linux.h b/gcc/config/linux.h
-index 8a3cd4f2d34..58143dff731 100644
+index 57496ff1f2f..c921cf6ef63 100644
--- a/gcc/config/linux.h
+++ b/gcc/config/linux.h
-@@ -134,53 +134,53 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -121,53 +121,53 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
* Unfortunately, this is mostly duplicated from cppdefault.cc */
#if DEFAULT_LIBC == LIBC_MUSL
#define INCLUDE_DEFAULTS_MUSL_GPP \
@@ -129,7 +129,7 @@ index 8a3cd4f2d34..58143dff731 100644
#else
#define INCLUDE_DEFAULTS_MUSL_NATIVE
#endif
-@@ -205,7 +205,7 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -192,7 +192,7 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
INCLUDE_DEFAULTS_MUSL_TOOL \
INCLUDE_DEFAULTS_MUSL_SUBDIR_TARGET \
INCLUDE_DEFAULTS_MUSL_NATIVE \
@@ -139,10 +139,10 @@ index 8a3cd4f2d34..58143dff731 100644
}
#endif
diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h
-index e5dd6538358..b496849b792 100644
+index 3a443abcf6b..ef83a5a432e 100644
--- a/gcc/config/rs6000/sysv4.h
+++ b/gcc/config/rs6000/sysv4.h
-@@ -958,53 +958,53 @@ ncrtn.o%s"
+@@ -950,53 +950,53 @@ ncrtn.o%s"
/* Include order changes for musl, same as in generic linux.h. */
#if DEFAULT_LIBC == LIBC_MUSL
#define INCLUDE_DEFAULTS_MUSL_GPP \
@@ -207,7 +207,7 @@ index e5dd6538358..b496849b792 100644
#else
#define INCLUDE_DEFAULTS_MUSL_NATIVE
#endif
-@@ -1029,7 +1029,7 @@ ncrtn.o%s"
+@@ -1021,7 +1021,7 @@ ncrtn.o%s"
INCLUDE_DEFAULTS_MUSL_TOOL \
INCLUDE_DEFAULTS_MUSL_SUBDIR_TARGET \
INCLUDE_DEFAULTS_MUSL_NATIVE \
@@ -217,7 +217,7 @@ index e5dd6538358..b496849b792 100644
}
#endif
diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc
-index 52cf14e92f8..d8977afc05e 100644
+index 734590a7059..b4a8fc29e4a 100644
--- a/gcc/cppdefault.cc
+++ b/gcc/cppdefault.cc
@@ -35,6 +35,30 @@
@@ -272,7 +272,7 @@ index 52cf14e92f8..d8977afc05e 100644
GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 0 },
#endif
#ifdef GPLUSPLUS_LIBCXX_INCLUDE_DIR
-@@ -62,23 +86,23 @@ const struct default_include cpp_include_defaults[]
+@@ -62,26 +86,26 @@ const struct default_include cpp_include_defaults[]
#endif
#ifdef GCC_INCLUDE_DIR
/* This is the dir for gcc's private headers. */
@@ -297,12 +297,16 @@ index 52cf14e92f8..d8977afc05e 100644
#endif
#ifdef FIXED_INCLUDE_DIR
/* This is the dir for fixincludes. */
+ #ifndef SYSROOT_HEADERS_SUFFIX_SPEC
+- { FIXED_INCLUDE_DIR, "GCC", 0, 0, 0, 2 },
++ { FIXED_INCLUDE_DIRVAR, "GCC", 0, 0, 0, 2 },
+ #endif
- { FIXED_INCLUDE_DIR, "GCC", 0, 0, 0,
+ { FIXED_INCLUDE_DIRVAR, "GCC", 0, 0, 0,
/* A multilib suffix needs adding if different multilibs use
different headers. */
#ifdef SYSROOT_HEADERS_SUFFIX_SPEC
-@@ -90,33 +114,24 @@ const struct default_include cpp_include_defaults[]
+@@ -93,33 +117,24 @@ const struct default_include cpp_include_defaults[]
#endif
#ifdef CROSS_INCLUDE_DIR
/* One place the target system's headers might be. */
@@ -343,7 +347,7 @@ index 52cf14e92f8..d8977afc05e 100644
/* This value is set by cpp_relocated at runtime */
const char *gcc_exec_prefix;
diff --git a/gcc/cppdefault.h b/gcc/cppdefault.h
-index fb97c0b5814..6267150facc 100644
+index e26b424e99c..c9abb090dcd 100644
--- a/gcc/cppdefault.h
+++ b/gcc/cppdefault.h
@@ -33,7 +33,8 @@
@@ -378,10 +382,10 @@ index fb97c0b5814..6267150facc 100644
subdirectory of the actual installation. */
extern const char *gcc_exec_prefix;
diff --git a/gcc/gcc.cc b/gcc/gcc.cc
-index aa4cf92fb78..5569a39a14a 100644
+index 8af0c814c33..605fe3b8c0d 100644
--- a/gcc/gcc.cc
+++ b/gcc/gcc.cc
-@@ -252,6 +252,8 @@ FILE *report_times_to_file = NULL;
+@@ -255,6 +255,8 @@ FILE *report_times_to_file = NULL;
#endif
static const char *target_system_root = DEFAULT_TARGET_SYSTEM_ROOT;
@@ -390,7 +394,7 @@ index aa4cf92fb78..5569a39a14a 100644
/* Nonzero means pass the updated target_system_root to the compiler. */
static int target_system_root_changed;
-@@ -575,6 +577,7 @@ or with constant text in a single argument.
+@@ -578,6 +580,7 @@ or with constant text in a single argument.
%G process LIBGCC_SPEC as a spec.
%R Output the concatenation of target_system_root and
target_sysroot_suffix.
@@ -398,7 +402,7 @@ index aa4cf92fb78..5569a39a14a 100644
%S process STARTFILE_SPEC as a spec. A capital S is actually used here.
%E process ENDFILE_SPEC as a spec. A capital E is actually used here.
%C process CPP_SPEC as a spec.
-@@ -1627,10 +1630,10 @@ static const char *gcc_libexec_prefix;
+@@ -1619,10 +1622,10 @@ static const char *gcc_libexec_prefix;
gcc_exec_prefix is set because, in that case, we know where the
compiler has been installed, and use paths relative to that
location instead. */
@@ -413,7 +417,7 @@ index aa4cf92fb78..5569a39a14a 100644
/* For native compilers, these are well-known paths containing
components that may be provided by the system. For cross
-@@ -1638,9 +1641,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX;
+@@ -1630,9 +1633,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX;
static const char *md_exec_prefix = MD_EXEC_PREFIX;
static const char *md_startfile_prefix = MD_STARTFILE_PREFIX;
static const char *md_startfile_prefix_1 = MD_STARTFILE_PREFIX_1;
@@ -425,7 +429,7 @@ index aa4cf92fb78..5569a39a14a 100644
= STANDARD_STARTFILE_PREFIX_2;
/* A relative path to be used in finding the location of tools
-@@ -6676,6 +6679,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
+@@ -6652,6 +6655,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
}
break;
@@ -438,7 +442,7 @@ index aa4cf92fb78..5569a39a14a 100644
value = do_spec_1 (startfile_spec, 0, NULL);
if (value != 0)
diff --git a/gcc/incpath.cc b/gcc/incpath.cc
-index c80f100f476..5ac03c08693 100644
+index 46c0d543205..d088dae7b04 100644
--- a/gcc/incpath.cc
+++ b/gcc/incpath.cc
@@ -135,7 +135,7 @@ add_standard_paths (const char *sysroot, const char *iprefix,
@@ -485,10 +489,10 @@ index c80f100f476..5ac03c08693 100644
str = update_path (ostr, p->component);
free (ostr);
diff --git a/gcc/prefix.cc b/gcc/prefix.cc
-index 096ed5afa3d..2526f0ecc39 100644
+index c2a37bde5ea..33944701ced 100644
--- a/gcc/prefix.cc
+++ b/gcc/prefix.cc
-@@ -72,7 +72,9 @@ License along with GCC; see the file COPYING3. If not see
+@@ -73,7 +73,9 @@ License along with GCC; see the file COPYING3. If not see
#include "prefix.h"
#include "common/common-target.h"
@@ -499,7 +503,7 @@ index 096ed5afa3d..2526f0ecc39 100644
static const char *get_key_value (char *);
static char *translate_name (char *);
-@@ -212,7 +214,7 @@ translate_name (char *name)
+@@ -213,7 +215,7 @@ translate_name (char *name)
prefix = getenv (key);
if (prefix == 0)
diff --git a/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch b/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch
index 9b05da64a7..8a11049ca3 100644
--- a/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch
+++ b/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch
@@ -1,4 +1,4 @@
-From 33a1f07a4417247dc24819d4e583ca09f56d5a7b Mon Sep 17 00:00:00 2001
+From 7e095089452b6e895ec40981752e9f902f0ad889 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 7 Dec 2015 23:41:45 +0000
Subject: [PATCH] Search target sysroot gcc version specific dirs with
@@ -42,19 +42,20 @@ binaries can be found first. With this change the search path becomes:
<sysroot>/lib32/
<sysroot>/usr/lib32/
-Upstream-Status: Pending
RP 2015/7/31
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2023-April/615320.html]
+
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
gcc/gcc.cc | 29 ++++++++++++++++++++++++++++-
1 file changed, 28 insertions(+), 1 deletion(-)
diff --git a/gcc/gcc.cc b/gcc/gcc.cc
-index 5569a39a14a..4598f6cd7c9 100644
+index 605fe3b8c0d..c3a1dab38c4 100644
--- a/gcc/gcc.cc
+++ b/gcc/gcc.cc
-@@ -2817,7 +2817,7 @@ for_each_path (const struct path_prefix *paths,
+@@ -2809,7 +2809,7 @@ for_each_path (const struct path_prefix *paths,
if (path == NULL)
{
len = paths->max_len + extra_space + 1;
@@ -63,7 +64,7 @@ index 5569a39a14a..4598f6cd7c9 100644
path = XNEWVEC (char, len);
}
-@@ -2829,6 +2829,33 @@ for_each_path (const struct path_prefix *paths,
+@@ -2821,6 +2821,33 @@ for_each_path (const struct path_prefix *paths,
/* Look first in MACHINE/VERSION subdirectory. */
if (!skip_multi_dir)
{
diff --git a/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch b/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch
index 56793e03a3..9bc77b485f 100644
--- a/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch
+++ b/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch
@@ -1,4 +1,4 @@
-From d7dc2861840e88a4592817a398a054a886c3f3ee Mon Sep 17 00:00:00 2001
+From bf92b290556b7050df0a001cc7ae43cf79990456 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 27 Jun 2017 18:10:54 -0700
Subject: [PATCH] Add ssp_nonshared to link commandline for musl targets
@@ -13,7 +13,7 @@ are already present in libc_nonshared library therefore
we do not need any library helper on glibc based systems
but musl needs the libssp_noshared from gcc
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2023-April/615317.html]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
@@ -23,10 +23,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
3 files changed, 27 insertions(+)
diff --git a/gcc/config/linux.h b/gcc/config/linux.h
-index 58143dff731..d2409ccac26 100644
+index c921cf6ef63..32e1bc1ae2d 100644
--- a/gcc/config/linux.h
+++ b/gcc/config/linux.h
-@@ -208,6 +208,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+@@ -195,6 +195,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
{ GCC_INCLUDE_DIRVAR, "GCC", 0, 1, 0, 0 }, \
{ 0, 0, 0, 0, 0, 0 } \
}
@@ -41,7 +41,7 @@ index 58143dff731..d2409ccac26 100644
#if (DEFAULT_LIBC == LIBC_UCLIBC) && defined (SINGLE_LIBC) /* uClinux */
diff --git a/gcc/config/rs6000/linux.h b/gcc/config/rs6000/linux.h
-index 8c9039ac1e5..259cd485973 100644
+index 5d21befe8e4..1248a68e4ca 100644
--- a/gcc/config/rs6000/linux.h
+++ b/gcc/config/rs6000/linux.h
@@ -99,6 +99,16 @@
@@ -62,7 +62,7 @@ index 8c9039ac1e5..259cd485973 100644
#define LINK_OS_LINUX_SPEC LINK_OS_LINUX_EMUL " %{!shared: %{!static: \
%{!static-pie: \
diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h
-index 364c1a5b155..e33d9ae98e0 100644
+index 2ddab7c99c1..9641580fc83 100644
--- a/gcc/config/rs6000/linux64.h
+++ b/gcc/config/rs6000/linux64.h
@@ -372,6 +372,16 @@ extern int dot_symbols;
diff --git a/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch b/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch
index bb1699be25..f785688661 100644
--- a/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch
+++ b/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch
@@ -1,4 +1,4 @@
-From bf0d7c463e1fab62804556099b56319fe94be1eb Mon Sep 17 00:00:00 2001
+From 587ac4a59ea56da18a9989c31a75124e974cb37c Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 6 Jun 2018 12:10:22 -0700
Subject: [PATCH] Re-introduce spe commandline options
@@ -14,10 +14,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 13 insertions(+)
diff --git a/gcc/config/rs6000/rs6000.opt b/gcc/config/rs6000/rs6000.opt
-index 4931d781c4e..3fb87b6f7d5 100644
+index bde6d3ff664..5af9640825c 100644
--- a/gcc/config/rs6000/rs6000.opt
+++ b/gcc/config/rs6000/rs6000.opt
-@@ -348,6 +348,19 @@ mdebug=
+@@ -344,6 +344,19 @@ mdebug=
Target RejectNegative Joined
-mdebug= Enable debug output.
diff --git a/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch b/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch
index f37092089f..b86edab5d5 100644
--- a/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch
+++ b/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch
@@ -1,4 +1,4 @@
-From a32c75b37209d6836eaaa943dc6b1207acba5d27 Mon Sep 17 00:00:00 2001
+From 8c4c59521720f8c1b1e38e38896b47fcb1bf00ac Mon Sep 17 00:00:00 2001
From: Szabolcs Nagy <nsz@port70.net>
Date: Sat, 24 Oct 2015 20:09:53 +0000
Subject: [PATCH] libgcc_s: Use alias for __cpu_indicator_init instead of
@@ -29,7 +29,7 @@ gcc/Changelog:
* config/i386/i386-expand.c (ix86_expand_builtin): Make __builtin_cpu_init
call __cpu_indicator_init_local instead of __cpu_indicator_init.
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2023-February/612559.html]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
@@ -39,10 +39,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/gcc/config/i386/i386-expand.cc b/gcc/config/i386/i386-expand.cc
-index 68978ef8dc2..0c71f36b572 100644
+index 0d817fc3f3b..8d34d19d3f9 100644
--- a/gcc/config/i386/i386-expand.cc
+++ b/gcc/config/i386/i386-expand.cc
-@@ -12321,10 +12321,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget,
+@@ -12691,10 +12691,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget,
{
case IX86_BUILTIN_CPU_INIT:
{
@@ -56,7 +56,7 @@ index 68978ef8dc2..0c71f36b572 100644
return expand_expr (call_expr, target, mode, EXPAND_NORMAL);
}
diff --git a/libgcc/config/i386/cpuinfo.c b/libgcc/config/i386/cpuinfo.c
-index dab1d98060f..cf824b4114a 100644
+index 50b6d8248a2..724ced402a1 100644
--- a/libgcc/config/i386/cpuinfo.c
+++ b/libgcc/config/i386/cpuinfo.c
@@ -63,7 +63,7 @@ __cpu_indicator_init (void)
diff --git a/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch b/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch
index f5f04ae314..b6707592dc 100644
--- a/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch
+++ b/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch
@@ -1,4 +1,4 @@
-From 4efc42b99c96b026f560b0918de7e237ac3dc8d1 Mon Sep 17 00:00:00 2001
+From f15b19d8e058c983c49c4566c1879fdaf5b1ab54 Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Tue, 10 Mar 2020 08:26:53 -0700
Subject: [PATCH] gentypes/genmodes: Do not use __LINE__ for maintaining
@@ -17,10 +17,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
2 files changed, 19 insertions(+), 19 deletions(-)
diff --git a/gcc/gengtype.cc b/gcc/gengtype.cc
-index 386ae1b0506..9762e914296 100644
+index 7763f40e9ab..4f0c1eb1508 100644
--- a/gcc/gengtype.cc
+++ b/gcc/gengtype.cc
-@@ -1006,7 +1006,7 @@ create_field_at (pair_p next, type_p type, const char *name, options_p opt,
+@@ -1005,7 +1005,7 @@ create_field_at (pair_p next, type_p type, const char *name, options_p opt,
/* Create a fake field with the given type and name. NEXT is the next
field in the chain. */
#define create_field(next,type,name) \
@@ -29,7 +29,7 @@ index 386ae1b0506..9762e914296 100644
/* Like create_field, but the field is only valid when condition COND
is true. */
-@@ -1039,7 +1039,7 @@ create_optional_field_ (pair_p next, type_p type, const char *name,
+@@ -1038,7 +1038,7 @@ create_optional_field_ (pair_p next, type_p type, const char *name,
}
#define create_optional_field(next,type,name,cond) \
@@ -38,7 +38,7 @@ index 386ae1b0506..9762e914296 100644
/* Reverse a linked list of 'struct pair's in place. */
pair_p
-@@ -5238,7 +5238,7 @@ main (int argc, char **argv)
+@@ -5223,7 +5223,7 @@ main (int argc, char **argv)
/* These types are set up with #define or else outside of where
we can see them. We should initialize them before calling
read_input_list. */
@@ -48,10 +48,10 @@ index 386ae1b0506..9762e914296 100644
POS_HERE (do_scalar_typedef ("CUMULATIVE_ARGS", &pos));
POS_HERE (do_scalar_typedef ("REAL_VALUE_TYPE", &pos));
diff --git a/gcc/genmodes.cc b/gcc/genmodes.cc
-index 59850bb070a..e187f8542a1 100644
+index 715787b8f48..302adff28d5 100644
--- a/gcc/genmodes.cc
+++ b/gcc/genmodes.cc
-@@ -440,7 +440,7 @@ complete_all_modes (void)
+@@ -441,7 +441,7 @@ complete_all_modes (void)
}
/* For each mode in class CLASS, construct a corresponding complex mode. */
@@ -60,7 +60,7 @@ index 59850bb070a..e187f8542a1 100644
static void
make_complex_modes (enum mode_class cl,
const char *file, unsigned int line)
-@@ -499,7 +499,7 @@ make_complex_modes (enum mode_class cl,
+@@ -500,7 +500,7 @@ make_complex_modes (enum mode_class cl,
having as many components as necessary. ORDER is the sorting order
of the mode, with smaller numbers indicating a higher priority. */
#define VECTOR_MODES_WITH_PREFIX(PREFIX, C, W, ORDER) \
@@ -69,7 +69,7 @@ index 59850bb070a..e187f8542a1 100644
#define VECTOR_MODES(C, W) VECTOR_MODES_WITH_PREFIX (V, C, W, 0)
static void ATTRIBUTE_UNUSED
make_vector_modes (enum mode_class cl, const char *prefix, unsigned int width,
-@@ -552,7 +552,7 @@ make_vector_modes (enum mode_class cl, const char *prefix, unsigned int width,
+@@ -553,7 +553,7 @@ make_vector_modes (enum mode_class cl, const char *prefix, unsigned int width,
BYTESIZE bytes in total. */
#define VECTOR_BOOL_MODE(NAME, COUNT, COMPONENT, BYTESIZE) \
make_vector_bool_mode (#NAME, COUNT, #COMPONENT, BYTESIZE, \
@@ -78,7 +78,7 @@ index 59850bb070a..e187f8542a1 100644
static void ATTRIBUTE_UNUSED
make_vector_bool_mode (const char *name, unsigned int count,
const char *component, unsigned int bytesize,
-@@ -574,7 +574,7 @@ make_vector_bool_mode (const char *name, unsigned int count,
+@@ -575,7 +575,7 @@ make_vector_bool_mode (const char *name, unsigned int count,
/* Input. */
#define _SPECIAL_MODE(C, N) \
@@ -87,7 +87,7 @@ index 59850bb070a..e187f8542a1 100644
#define RANDOM_MODE(N) _SPECIAL_MODE (RANDOM, N)
#define CC_MODE(N) _SPECIAL_MODE (CC, N)
-@@ -587,7 +587,7 @@ make_special_mode (enum mode_class cl, const char *name,
+@@ -588,7 +588,7 @@ make_special_mode (enum mode_class cl, const char *name,
#define INT_MODE(N, Y) FRACTIONAL_INT_MODE (N, -1U, Y)
#define FRACTIONAL_INT_MODE(N, B, Y) \
@@ -96,7 +96,7 @@ index 59850bb070a..e187f8542a1 100644
static void
make_int_mode (const char *name,
-@@ -628,16 +628,16 @@ make_opaque_mode (const char *name,
+@@ -629,16 +629,16 @@ make_opaque_mode (const char *name,
}
#define FRACT_MODE(N, Y, F) \
@@ -117,7 +117,7 @@ index 59850bb070a..e187f8542a1 100644
/* Create a fixed-point mode by setting CL, NAME, BYTESIZE, IBIT, FBIT,
FILE, and LINE. */
-@@ -658,7 +658,7 @@ make_fixed_point_mode (enum mode_class cl,
+@@ -659,7 +659,7 @@ make_fixed_point_mode (enum mode_class cl,
#define FLOAT_MODE(N, Y, F) FRACTIONAL_FLOAT_MODE (N, -1U, Y, F)
#define FRACTIONAL_FLOAT_MODE(N, B, Y, F) \
@@ -126,7 +126,7 @@ index 59850bb070a..e187f8542a1 100644
static void
make_float_mode (const char *name,
-@@ -675,7 +675,7 @@ make_float_mode (const char *name,
+@@ -676,7 +676,7 @@ make_float_mode (const char *name,
#define DECIMAL_FLOAT_MODE(N, Y, F) \
FRACTIONAL_DECIMAL_FLOAT_MODE (N, -1U, Y, F)
#define FRACTIONAL_DECIMAL_FLOAT_MODE(N, B, Y, F) \
@@ -135,7 +135,7 @@ index 59850bb070a..e187f8542a1 100644
static void
make_decimal_float_mode (const char *name,
-@@ -690,7 +690,7 @@ make_decimal_float_mode (const char *name,
+@@ -691,7 +691,7 @@ make_decimal_float_mode (const char *name,
}
#define RESET_FLOAT_FORMAT(N, F) \
@@ -144,7 +144,7 @@ index 59850bb070a..e187f8542a1 100644
static void ATTRIBUTE_UNUSED
reset_float_format (const char *name, const char *format,
const char *file, unsigned int line)
-@@ -711,7 +711,7 @@ reset_float_format (const char *name, const char *format,
+@@ -712,7 +712,7 @@ reset_float_format (const char *name, const char *format,
/* __intN support. */
#define INT_N(M,PREC) \
@@ -153,7 +153,7 @@ index 59850bb070a..e187f8542a1 100644
static void ATTRIBUTE_UNUSED
make_int_n (const char *m, int bitsize,
const char *file, unsigned int line)
-@@ -740,7 +740,7 @@ make_int_n (const char *m, int bitsize,
+@@ -741,7 +741,7 @@ make_int_n (const char *m, int bitsize,
/* Partial integer modes are specified by relation to a full integer
mode. */
#define PARTIAL_INT_MODE(M,PREC,NAME) \
@@ -162,7 +162,7 @@ index 59850bb070a..e187f8542a1 100644
static void ATTRIBUTE_UNUSED
make_partial_integer_mode (const char *base, const char *name,
unsigned int precision,
-@@ -767,7 +767,7 @@ make_partial_integer_mode (const char *base, const char *name,
+@@ -768,7 +768,7 @@ make_partial_integer_mode (const char *base, const char *name,
/* A single vector mode can be specified by naming its component
mode and the number of components. */
#define VECTOR_MODE_WITH_PREFIX(PREFIX, C, M, N, ORDER) \
@@ -171,7 +171,7 @@ index 59850bb070a..e187f8542a1 100644
#define VECTOR_MODE(C, M, N) VECTOR_MODE_WITH_PREFIX(V, C, M, N, 0);
static void ATTRIBUTE_UNUSED
make_vector_mode (enum mode_class bclass,
-@@ -814,7 +814,7 @@ make_vector_mode (enum mode_class bclass,
+@@ -815,7 +815,7 @@ make_vector_mode (enum mode_class bclass,
/* Adjustability. */
#define _ADD_ADJUST(A, M, X, C1, C2) \
diff --git a/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch b/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch
new file mode 100644
index 0000000000..0ea8aac543
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch
@@ -0,0 +1,42 @@
+From 939a899b862f7a25e52b74d1587fc75fc65779c0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 13 May 2020 15:10:38 -0700
+Subject: [PATCH] libatomic: Do not enforce march on aarch64
+
+OE passes the right options via gcc compiler cmdline via TUNE_CCARGS
+this can conflict between -mcpu settings and -march setting here, since
+-mcpu will translate into an appropriate -march, lets depend on that
+instead of setting it explicitly
+
+Upstream-Status: Inappropriate [OE-Specific]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libatomic/Makefile.am | 1 -
+ libatomic/Makefile.in | 1 -
+ 2 files changed, 2 deletions(-)
+
+diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am
+index c6c8d81c56a..d959a5d040e 100644
+--- a/libatomic/Makefile.am
++++ b/libatomic/Makefile.am
+@@ -125,7 +125,6 @@ libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix _$(s)_.lo,$(SIZEOBJS)))
+ ## On a target-specific basis, include alternates to be selected by IFUNC.
+ if HAVE_IFUNC
+ if ARCH_AARCH64_LINUX
+-IFUNC_OPTIONS = -march=armv8-a+lse
+ libatomic_la_LIBADD += $(foreach s,$(SIZES),$(addsuffix _$(s)_1_.lo,$(SIZEOBJS)))
+ libatomic_la_SOURCES += atomic_16.S
+
+diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in
+index a0fa3dfc8cc..e70d389874a 100644
+--- a/libatomic/Makefile.in
++++ b/libatomic/Makefile.in
+@@ -447,7 +447,6 @@ M_SRC = $(firstword $(filter %/$(M_FILE), $(all_c_files)))
+ libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix \
+ _$(s)_.lo,$(SIZEOBJS))) $(am__append_1) $(am__append_3) \
+ $(am__append_4) $(am__append_5)
+-@ARCH_AARCH64_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv8-a+lse
+ @ARCH_ARM_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv7-a+fp -DHAVE_KERNEL64
+ @ARCH_I386_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=i586
+ @ARCH_X86_64_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -mcx16 -mcx16
diff --git a/meta/recipes-devtools/gcc/gcc/0022-mingw32-Enable-operation_not_supported.patch b/meta/recipes-devtools/gcc/gcc/0022-mingw32-Enable-operation_not_supported.patch
deleted file mode 100644
index f88ad51ee8..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0022-mingw32-Enable-operation_not_supported.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 061cf79e7b6e89fdf0f2630ddaebbf1d7b271ac3 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 12 May 2020 10:39:09 -0700
-Subject: [PATCH] mingw32: Enable operation_not_supported
-
-Fixes nativesdk build errors on mingw32 gcc-runtime
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- libstdc++-v3/config/os/mingw32/error_constants.h | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/libstdc++-v3/config/os/mingw32/error_constants.h b/libstdc++-v3/config/os/mingw32/error_constants.h
-index da5f4c2ac85..e855c86267c 100644
---- a/libstdc++-v3/config/os/mingw32/error_constants.h
-+++ b/libstdc++-v3/config/os/mingw32/error_constants.h
-@@ -107,7 +107,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
- #ifdef EPERM
- operation_not_permitted = EPERM,
- #endif
--// operation_not_supported = EOPNOTSUPP,
-+ operation_not_supported = EOPNOTSUPP,
- #ifdef EWOULDBLOCK
- operation_would_block = EWOULDBLOCK,
- #endif
diff --git a/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch b/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch
new file mode 100644
index 0000000000..cd962d82bd
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch
@@ -0,0 +1,31 @@
+From 696d696381dd99ec2bddb1170f96f98da36eb418 Mon Sep 17 00:00:00 2001
+From: Andrei Gherzan <andrei.gherzan@huawei.com>
+Date: Wed, 22 Dec 2021 12:49:25 +0100
+Subject: [PATCH] Fix install path of linux64.h
+
+We add linux64.h to tm includes[1] as a relative path to B. This patch
+adapts the install path of linux64.h to match the include in tm.h.
+
+[1] 0016-Use-the-multilib-config-files-from-B-instead-of-usin.patch
+
+Signed-off-by: Andrei Gherzan <andrei.gherzan@huawei.com>
+
+Upstream-Status: Inappropriate [configuration]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gcc/Makefile.in | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/gcc/Makefile.in b/gcc/Makefile.in
+index 8ef996c0f4d..21daf380e34 100644
+--- a/gcc/Makefile.in
++++ b/gcc/Makefile.in
+@@ -3731,6 +3731,8 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype
+ "$(srcdir)"/config/* | "$(srcdir)"/common/config/* \
+ | "$(srcdir)"/c-family/* | "$(srcdir)"/*.def ) \
+ base=`echo "$$path" | sed -e "s|$$srcdirstrip/||"`;; \
++ */linux64.h ) \
++ base=`dirname $$path`;;\
+ *) base=`basename $$path` ;; \
+ esac; \
+ dest=$(plugin_includedir)/$$base; \
diff --git a/meta/recipes-devtools/gcc/gcc/0023-libatomic-Do-not-enforce-march-on-aarch64.patch b/meta/recipes-devtools/gcc/gcc/0023-libatomic-Do-not-enforce-march-on-aarch64.patch
deleted file mode 100644
index 2f01659847..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0023-libatomic-Do-not-enforce-march-on-aarch64.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From 52931ec7a708b58d68e69ce9eb99001ae9f099dd Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 13 May 2020 15:10:38 -0700
-Subject: [PATCH] libatomic: Do not enforce march on aarch64
-
-OE passes the right options via gcc compiler cmdline via TUNE_CCARGS
-this can conflict between -mcpu settings and -march setting here, since
--mcpu will translate into an appropriate -march, lets depend on that
-instead of setting it explicitly
-
-Upstream-Status: Inappropriate [OE-Specific]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- libatomic/Makefile.am | 1 -
- libatomic/Makefile.in | 1 -
- 2 files changed, 2 deletions(-)
-
-diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am
-index d88515e4a03..e0e2f8b442a 100644
---- a/libatomic/Makefile.am
-+++ b/libatomic/Makefile.am
-@@ -125,7 +125,6 @@ libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix _$(s)_.lo,$(SIZEOBJS)))
- ## On a target-specific basis, include alternates to be selected by IFUNC.
- if HAVE_IFUNC
- if ARCH_AARCH64_LINUX
--IFUNC_OPTIONS = -march=armv8-a+lse
- libatomic_la_LIBADD += $(foreach s,$(SIZES),$(addsuffix _$(s)_1_.lo,$(SIZEOBJS)))
- endif
- if ARCH_ARM_LINUX
-diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in
-index 80d25653dc7..7377689ab34 100644
---- a/libatomic/Makefile.in
-+++ b/libatomic/Makefile.in
-@@ -434,7 +434,6 @@ M_SRC = $(firstword $(filter %/$(M_FILE), $(all_c_files)))
- libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix \
- _$(s)_.lo,$(SIZEOBJS))) $(am__append_1) $(am__append_2) \
- $(am__append_3) $(am__append_4)
--@ARCH_AARCH64_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv8-a+lse
- @ARCH_ARM_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv7-a+fp -DHAVE_KERNEL64
- @ARCH_I386_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=i586
- @ARCH_X86_64_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -mcx16 -mcx16
diff --git a/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch b/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch
new file mode 100644
index 0000000000..04d940ae0b
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch
@@ -0,0 +1,28 @@
+From 9487b1d6136ea09cce4792d59d0170c712575550 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sat, 20 Aug 2022 09:04:14 -0700
+Subject: [PATCH] Avoid hardcoded build paths into ppc libgcc
+
+Avoid encoding build paths into sources used for floating point on powerpc.
+(MACHINE=qemuppc bitbake libgcc).
+
+Upstream-Status: Submitted [https://gcc.gnu.org/pipermail/gcc-patches/2022-August/599882.html]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libgcc/config/rs6000/t-float128 | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/libgcc/config/rs6000/t-float128 b/libgcc/config/rs6000/t-float128
+index b09b5664af0..513e63748f1 100644
+--- a/libgcc/config/rs6000/t-float128
++++ b/libgcc/config/rs6000/t-float128
+@@ -103,7 +103,7 @@ $(ibm128_dec_objs) : INTERNAL_CFLAGS += $(IBM128_CFLAGS_DECIMAL)
+ $(fp128_softfp_src) : $(srcdir)/soft-fp/$(subst -sw,,$(subst kf,tf,$@)) $(fp128_dep)
+ @src="$(srcdir)/soft-fp/$(subst -sw,,$(subst kf,tf,$@))"; \
+ echo "Create $@"; \
+- (echo "/* file created from $$src */"; \
++ (echo "/* file created from `basename $$src` */"; \
+ echo; \
+ sed -f $(fp128_sed) < $$src) > $@
+
diff --git a/meta/recipes-devtools/gcc/gcc/0024-Fix-install-path-of-linux64.h.patch b/meta/recipes-devtools/gcc/gcc/0024-Fix-install-path-of-linux64.h.patch
deleted file mode 100644
index 555be62328..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0024-Fix-install-path-of-linux64.h.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 3e67c9c77e46132c252911bf1e5e4222dfd3aa34 Mon Sep 17 00:00:00 2001
-From: Andrei Gherzan <andrei.gherzan@huawei.com>
-Date: Wed, 22 Dec 2021 12:49:25 +0100
-Subject: [PATCH] Fix install path of linux64.h
-
-We add linux64.h to tm includes[1] as a relative path to B. This patch
-adapts the install path of linux64.h to match the include in tm.h.
-
-[1] 0016-Use-the-multilib-config-files-from-B-instead-of-usin.patch
-
-Signed-off-by: Andrei Gherzan <andrei.gherzan@huawei.com>
-
-Upstream-Status: Inappropriate [configuration]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gcc/Makefile.in | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/gcc/Makefile.in b/gcc/Makefile.in
-index 07fa63b6640..0def7394454 100644
---- a/gcc/Makefile.in
-+++ b/gcc/Makefile.in
-@@ -3706,6 +3706,8 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype
- "$(srcdir)"/config/* | "$(srcdir)"/common/config/* \
- | "$(srcdir)"/c-family/* | "$(srcdir)"/*.def ) \
- base=`echo "$$path" | sed -e "s|$$srcdirstrip/||"`;; \
-+ */linux64.h ) \
-+ base=`dirname $$path`;;\
- *) base=`basename $$path` ;; \
- esac; \
- dest=$(plugin_includedir)/$$base; \
diff --git a/meta/recipes-devtools/gcc/gcc/0025-Move-sched.h-include-ahead-of-user-headers.patch b/meta/recipes-devtools/gcc/gcc/0025-Move-sched.h-include-ahead-of-user-headers.patch
deleted file mode 100644
index d4aeacfed5..0000000000
--- a/meta/recipes-devtools/gcc/gcc/0025-Move-sched.h-include-ahead-of-user-headers.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-From 7422adfb471f4b4f2ec870124064632d55f72e50 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 11 Apr 2022 15:46:18 -0700
-Subject: [PATCH] Move sched.h include ahead of user headers
-
-Fix attempt to use poisoned calloc error, this moves the sched.h before
-using system.h from gcc headers which includes #pragma GCC poison calloc
-
-Fixes
-In file included from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/pthread.h:30,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/aarch64-yoe-linux-musl/bits/gthr-default.h:35,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/aarch64-yoe-linux-musl/bits/gthr.h:148,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/ext/atomicity.h:35,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/bits/shared_ptr_base.h:61,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/bits/shared_ptr.h:53,
- from /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/c++/12.0.1/memory:77,
- from ../../../../../../../work-shared/gcc-12.0.1-r0/gcc-12-20220410/libcc1/deleter.hh:23,
- from ../../../../../../../work-shared/gcc-12.0.1-r0/gcc-12-20220410/libcc1/rpc.hh:25,
- from ../../../../../../../work-shared/gcc-12.0.1-r0/gcc-12-20220410/libcc1/libcc1plugin.cc:67:
-/mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/sched.h:84:7: error: attempt to use poisoned "calloc"
- 84 | void *calloc(size_t, size_t);
- | ^
-/mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux-musl/gcc/12.0.1-r0/recipe-sysroot/usr/include/sched.h:124:36: error: attempt to use poisoned "calloc"
- 124 | #define CPU_ALLOC(n) ((cpu_set_t *)calloc(1,CPU_ALLOC_SIZE(n)))
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- libcc1/libcc1plugin.cc | 1 +
- libcc1/libcp1plugin.cc | 1 +
- 2 files changed, 2 insertions(+)
-
-diff --git a/libcc1/libcc1plugin.cc b/libcc1/libcc1plugin.cc
-index 12ab5a57c8d..fff9bfab18b 100644
---- a/libcc1/libcc1plugin.cc
-+++ b/libcc1/libcc1plugin.cc
-@@ -17,6 +17,7 @@
- along with GCC; see the file COPYING3. If not see
- <http://www.gnu.org/licenses/>. */
-
-+#include <sched.h>
- #include <cc1plugin-config.h>
-
- #undef PACKAGE_NAME
-diff --git a/libcc1/libcp1plugin.cc b/libcc1/libcp1plugin.cc
-index 83dab7f58b1..0b83ce7a09d 100644
---- a/libcc1/libcp1plugin.cc
-+++ b/libcc1/libcp1plugin.cc
-@@ -18,6 +18,7 @@
- along with GCC; see the file COPYING3. If not see
- <http://www.gnu.org/licenses/>. */
-
-+#include <sched.h>
- #include <cc1plugin-config.h>
-
- #undef PACKAGE_NAME
diff --git a/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch b/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch
new file mode 100644
index 0000000000..e4d57c27ef
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch
@@ -0,0 +1,233 @@
+From f12acc6a383546d48da3bdfb2f25ca2adb7976d7 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sun, 13 Aug 2023 10:24:05 +0100
+Subject: [PATCH] gcc testsuite tweaks for mips/OE
+
+Disable loongson-mmi runtine, qemu doesn't appear to fully support them even if some
+of the instruction decoding is there.
+
+Also disable MSA mips runtime extensions. For some reason qemu appears to accept the test
+code when it shouldn't. Our selected MIPS cpu for QEMU doesn't support them.
+
+MIPS is unusual in the gcc testsuite as it uses EFFECTIVE_TARGETS and loops
+multiple times through the vector testsuite. In the case of the two above, we can
+compile/link them but not run them. Even with the runtime disabled, if the code
+marks it as a runtime test, it will elevate itself to that. Setting the default
+target to compile therefore isn't enough.
+
+Therefore add code to downgrade runtime tests to link tests if the hardware
+support isn't there to run them. This avoids thousands of test failures. To do
+this we have to hook downgrade code into the main test runner.
+
+Enable that downgrading for other cases where hardware to run vector extensions is
+unavailable to remove test failures on other architectures too.
+
+Also, for gcc.target tests, add checks on wheter loongson or msa code can
+be run before trying that, allowing downgrading of tests there to work too.
+
+Upstream-Status: Pending
+[Parts of the patch may be able to be split off and acceptable to upstream with
+discussion. Need to investigate why qemu-user passes the 'bad' instructions']
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ gcc/testsuite/gcc.target/mips/mips.exp | 16 +++++++++
+ gcc/testsuite/lib/gcc-dg.exp | 11 +++++++
+ gcc/testsuite/lib/target-supports.exp | 45 ++++++++------------------
+ 3 files changed, 41 insertions(+), 31 deletions(-)
+
+diff --git a/gcc/testsuite/gcc.target/mips/mips.exp b/gcc/testsuite/gcc.target/mips/mips.exp
+index 15d574202d3..2cef9709774 100644
+--- a/gcc/testsuite/gcc.target/mips/mips.exp
++++ b/gcc/testsuite/gcc.target/mips/mips.exp
+@@ -709,7 +709,23 @@ proc mips_first_unsupported_option { upstatus } {
+ global mips_option_tests
+ upvar $upstatus status
+
++ if { [mips_have_test_option_p status "-mmsa"] } {
++ verbose -log "Found -mmsa"
++ if { ![check_mips_msa_hw_available] } {
++ verbose -log "No MSA avail"
++ return "-mmsa"
++ }
++ }
++ if { [mips_have_test_option_p status "-mloongson-mmi"] } {
++ verbose -log "Found -mloonson-mmi"
++ if { ![check_mips_loongson_mmi_hw_available] } {
++ verbose -log "No MMI avail"
++ return "-mloonson-mmi"
++ }
++ }
++
+ foreach { option code } [array get mips_option_tests] {
++
+ if { [mips_have_test_option_p status $option] } {
+ regsub -all "\n" $code "\\n\\\n" asm
+ # Use check_runtime from target-supports.exp, which caches
+diff --git a/gcc/testsuite/lib/gcc-dg.exp b/gcc/testsuite/lib/gcc-dg.exp
+index 9d79b9402e9..e0e5cbb1af8 100644
+--- a/gcc/testsuite/lib/gcc-dg.exp
++++ b/gcc/testsuite/lib/gcc-dg.exp
+@@ -240,9 +240,20 @@ proc schedule-cleanups { opts } {
+
+ proc gcc-dg-test-1 { target_compile prog do_what extra_tool_flags } {
+ # Set up the compiler flags, based on what we're going to do.
++ global do-what-limit
+
+ set options [list]
+
++ if [info exists do-what-limit] then {
++ # Demote run tests to $do-what-limit if set
++ switch $do_what {
++ run {
++ set do_what ${do-what-limit}
++ set dg-do-what ${do-what-limit}
++ }
++ }
++ }
++
+ switch $do_what {
+ "preprocess" {
+ set compile_type "preprocess"
+diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp
+index 40f71e9ed8b..10e267fa16d 100644
+--- a/gcc/testsuite/lib/target-supports.exp
++++ b/gcc/testsuite/lib/target-supports.exp
+@@ -2155,14 +2155,7 @@ proc check_mips_loongson_mmi_hw_available { } {
+ if { !([istarget mips*-*-*]) } {
+ expr 0
+ } else {
+- check_runtime_nocache mips_loongson_mmi_hw_available {
+- #include <loongson-mmiintrin.h>
+- int main()
+- {
+- asm volatile ("paddw $f2,$f4,$f6");
+- return 0;
+- }
+- } "-mloongson-mmi"
++ expr 0
+ }
+ }]
+ }
+@@ -2176,29 +2169,7 @@ proc check_mips_msa_hw_available { } {
+ if { !([istarget mips*-*-*]) } {
+ expr 0
+ } else {
+- check_runtime_nocache mips_msa_hw_available {
+- #if !defined(__mips_msa)
+- #error "MSA NOT AVAIL"
+- #else
+- #if !(((__mips == 64) || (__mips == 32)) && (__mips_isa_rev >= 2))
+- #error "MSA NOT AVAIL FOR ISA REV < 2"
+- #endif
+- #if !defined(__mips_hard_float)
+- #error "MSA HARD_FLOAT REQUIRED"
+- #endif
+- #if __mips_fpr != 64
+- #error "MSA 64-bit FPR REQUIRED"
+- #endif
+- #include <msa.h>
+-
+- int main()
+- {
+- v8i16 v = __builtin_msa_ldi_h (0);
+- v[0] = 0;
+- return v[0];
+- }
+- #endif
+- } "-mmsa"
++ expr 0
+ }
+ }]
+ }
+@@ -9187,6 +9158,7 @@ proc is-effective-target-keyword { arg } {
+
+ proc et-dg-runtest { runtest testcases flags default-extra-flags } {
+ global dg-do-what-default
++ global do-what-limit
+ global EFFECTIVE_TARGETS
+ global et_index
+
+@@ -9194,6 +9166,7 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } {
+ foreach target $EFFECTIVE_TARGETS {
+ set target_flags $flags
+ set dg-do-what-default compile
++ set do-what-limit link
+ set et_index [lsearch -exact $EFFECTIVE_TARGETS $target]
+ if { [info procs add_options_for_${target}] != [list] } {
+ set target_flags [add_options_for_${target} "$flags"]
+@@ -9201,8 +9174,10 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } {
+ if { [info procs check_effective_target_${target}_runtime]
+ != [list] && [check_effective_target_${target}_runtime] } {
+ set dg-do-what-default run
++ set do-what-limit run
+ }
+ $runtest $testcases $target_flags ${default-extra-flags}
++ unset do-what-limit
+ }
+ } else {
+ set et_index 0
+@@ -10789,6 +10764,7 @@ proc check_effective_target_sigsetjmp {} {
+ proc check_vect_support_and_set_flags { } {
+ global DEFAULT_VECTCFLAGS
+ global dg-do-what-default
++ global do-what-limit
+ global EFFECTIVE_TARGETS
+
+ if [istarget powerpc-*paired*] {
+@@ -10797,6 +10773,7 @@ proc check_vect_support_and_set_flags { } {
+ set dg-do-what-default run
+ } else {
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif [istarget powerpc*-*-*] {
+ # Skip targets not supporting -maltivec.
+@@ -10821,6 +10798,7 @@ proc check_vect_support_and_set_flags { } {
+ lappend DEFAULT_VECTCFLAGS "-mcpu=970"
+ }
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif { [istarget i?86-*-*] || [istarget x86_64-*-*] } {
+ lappend DEFAULT_VECTCFLAGS "-msse2"
+@@ -10828,6 +10806,7 @@ proc check_vect_support_and_set_flags { } {
+ set dg-do-what-default run
+ } else {
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif { [istarget mips*-*-*]
+ && [check_effective_target_nomips16] } {
+@@ -10847,6 +10826,7 @@ proc check_vect_support_and_set_flags { } {
+ set dg-do-what-default run
+ } else {
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif [istarget alpha*-*-*] {
+ # Alpha's vectorization capabilities are extremely limited.
+@@ -10860,6 +10840,7 @@ proc check_vect_support_and_set_flags { } {
+ set dg-do-what-default run
+ } else {
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif [istarget ia64-*-*] {
+ set dg-do-what-default run
+@@ -10873,6 +10854,7 @@ proc check_vect_support_and_set_flags { } {
+ set dg-do-what-default run
+ } else {
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif [istarget aarch64*-*-*] {
+ set dg-do-what-default run
+@@ -10897,6 +10879,7 @@ proc check_vect_support_and_set_flags { } {
+ } else {
+ lappend DEFAULT_VECTCFLAGS "-march=z14" "-mzarch"
+ set dg-do-what-default compile
++ set do-what-limit link
+ }
+ } elseif [istarget amdgcn-*-*] {
+ set dg-do-what-default run
diff --git a/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch b/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch
new file mode 100644
index 0000000000..a408a98698
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch
@@ -0,0 +1,117 @@
+From adb60dc78e0da4877747f32347cee339364775be Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Fri, 15 Sep 2023 09:19:14 +0100
+Subject: [PATCH] aarch64: Fix loose ldpstp check [PR111411]
+
+aarch64_operands_ok_for_ldpstp contained the code:
+
+ /* One of the memory accesses must be a mempair operand.
+ If it is not the first one, they need to be swapped by the
+ peephole. */
+ if (!aarch64_mem_pair_operand (mem_1, GET_MODE (mem_1))
+ && !aarch64_mem_pair_operand (mem_2, GET_MODE (mem_2)))
+ return false;
+
+But the requirement isn't just that one of the accesses must be a
+valid mempair operand. It's that the lower access must be, since
+that's the access that will be used for the instruction operand.
+
+gcc/
+ PR target/111411
+ * config/aarch64/aarch64.cc (aarch64_operands_ok_for_ldpstp): Require
+ the lower memory access to a mem-pair operand.
+
+gcc/testsuite/
+ PR target/111411
+ * gcc.dg/rtl/aarch64/pr111411.c: New test.
+
+Upstream-Status: Backport [https://gcc.gnu.org/git/gitweb.cgi?p=gcc.git;h=2d38f45bcca62ca0c7afef4b579f82c5c2a01610]
+Signed-off-by: Martin Jansa <martin.jansa@gmail.com>
+---
+ gcc/config/aarch64/aarch64.cc | 8 ++-
+ gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c | 57 +++++++++++++++++++++
+ 2 files changed, 60 insertions(+), 5 deletions(-)
+ create mode 100644 gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 6118a3354ac..9b1f791ca8b 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -26154,11 +26154,9 @@ aarch64_operands_ok_for_ldpstp (rtx *operands, bool load,
+ gcc_assert (known_eq (GET_MODE_SIZE (GET_MODE (mem_1)),
+ GET_MODE_SIZE (GET_MODE (mem_2))));
+
+- /* One of the memory accesses must be a mempair operand.
+- If it is not the first one, they need to be swapped by the
+- peephole. */
+- if (!aarch64_mem_pair_operand (mem_1, GET_MODE (mem_1))
+- && !aarch64_mem_pair_operand (mem_2, GET_MODE (mem_2)))
++ /* The lower memory access must be a mem-pair operand. */
++ rtx lower_mem = reversed ? mem_2 : mem_1;
++ if (!aarch64_mem_pair_operand (lower_mem, GET_MODE (lower_mem)))
+ return false;
+
+ if (REG_P (reg_1) && FP_REGNUM_P (REGNO (reg_1)))
+diff --git a/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c b/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c
+new file mode 100644
+index 00000000000..ad07e9c6c89
+--- /dev/null
++++ b/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c
+@@ -0,0 +1,57 @@
++/* { dg-do compile { target aarch64*-*-* } } */
++/* { dg-require-effective-target lp64 } */
++/* { dg-options "-O -fdisable-rtl-postreload -fpeephole2 -fno-schedule-fusion" } */
++
++extern int data[];
++
++void __RTL (startwith ("ira")) foo (void *ptr)
++{
++ (function "foo"
++ (param "ptr"
++ (DECL_RTL (reg/v:DI <0> [ ptr ]))
++ (DECL_RTL_INCOMING (reg/v:DI x0 [ ptr ]))
++ ) ;; param "ptr"
++ (insn-chain
++ (block 2
++ (edge-from entry (flags "FALLTHRU"))
++ (cnote 3 [bb 2] NOTE_INSN_BASIC_BLOCK)
++ (insn 4 (set (reg:DI <0>) (reg:DI x0)))
++ (insn 5 (set (reg:DI <1>)
++ (plus:DI (reg:DI <0>) (const_int 768))))
++ (insn 6 (set (mem:SI (plus:DI (reg:DI <0>)
++ (const_int 508)) [1 &data+508 S4 A4])
++ (const_int 0)))
++ (insn 7 (set (mem:SI (plus:DI (reg:DI <1>)
++ (const_int -256)) [1 &data+512 S4 A4])
++ (const_int 0)))
++ (edge-to exit (flags "FALLTHRU"))
++ ) ;; block 2
++ ) ;; insn-chain
++ ) ;; function
++}
++
++void __RTL (startwith ("ira")) bar (void *ptr)
++{
++ (function "bar"
++ (param "ptr"
++ (DECL_RTL (reg/v:DI <0> [ ptr ]))
++ (DECL_RTL_INCOMING (reg/v:DI x0 [ ptr ]))
++ ) ;; param "ptr"
++ (insn-chain
++ (block 2
++ (edge-from entry (flags "FALLTHRU"))
++ (cnote 3 [bb 2] NOTE_INSN_BASIC_BLOCK)
++ (insn 4 (set (reg:DI <0>) (reg:DI x0)))
++ (insn 5 (set (reg:DI <1>)
++ (plus:DI (reg:DI <0>) (const_int 768))))
++ (insn 6 (set (mem:SI (plus:DI (reg:DI <1>)
++ (const_int -256)) [1 &data+512 S4 A4])
++ (const_int 0)))
++ (insn 7 (set (mem:SI (plus:DI (reg:DI <0>)
++ (const_int 508)) [1 &data+508 S4 A4])
++ (const_int 0)))
++ (edge-to exit (flags "FALLTHRU"))
++ ) ;; block 2
++ ) ;; insn-chain
++ ) ;; function
++}
diff --git a/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch b/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch
new file mode 100644
index 0000000000..4b89036814
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Backport [https://gcc.gnu.org/git/gitweb.cgi?p=gcc.git;h=948dbc5ee45f9ffd5f41fd6782704081cc7c8c27]
+
+Signed-off-by: Harish Sadineni <Harish.Sadineni@windriver.com>
+
+diff --git a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c
+index ed63ff59cc0..009c849b7e7 100644
+--- a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c
++++ b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c
+@@ -1,3 +1,5 @@
++/* Since this uses dg-additional-sources, need to specify `dg-do run` instead of the default. */
++/* { dg-do run } */
+ /* { dg-require-effective-target vect_simd_clones } */
+ /* { dg-additional-options "-fopenmp-simd" } */
+ /* { dg-additional-options "-mavx" { target avx_runtime } } */
+diff --git a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c
+index c44471e35bc..4699a3f3c80 100644
+--- a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c
++++ b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c
+@@ -1,3 +1,5 @@
++/* Since this uses dg-additional-sources, need to specify `dg-do run` instead of the default. */
++/* { dg-do run } */
+ /* { dg-require-effective-target vect_simd_clones } */
+ /* { dg-additional-options "-fopenmp-simd" } */
+ /* { dg-additional-options "-mavx" { target avx_runtime } } */
+--
+2.43.0
diff --git a/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch b/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch
new file mode 100644
index 0000000000..81b5067c33
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch
@@ -0,0 +1,3093 @@
+From: Richard Sandiford <richard.sandiford@arm.com>
+Subject: [PATCH 00/19] aarch64: Fix -fstack-protector issue
+Date: Tue, 12 Sep 2023 16:25:10 +0100
+
+This series of patches fixes deficiencies in GCC's -fstack-protector
+implementation for AArch64 when using dynamically allocated stack space.
+This is CVE-2023-4039. See:
+
+https://developer.arm.com/Arm%20Security%20Center/GCC%20Stack%20Protector%20Vulnerability%20AArch64
+https://github.com/metaredteam/external-disclosures/security/advisories/GHSA-x7ch-h5rf-w2mf
+
+for more details.
+
+The fix is to put the saved registers above the locals area when
+-fstack-protector is used.
+
+The series also fixes a stack-clash problem that I found while working
+on the CVE. In unpatched sources, the stack-clash problem would only
+trigger for unrealistic numbers of arguments (8K 64-bit arguments, or an
+equivalent). But it would be a more significant issue with the new
+-fstack-protector frame layout. It's therefore important that both
+problems are fixed together.
+
+Some reorganisation of the code seemed necessary to fix the problems in a
+cleanish way. The series is therefore quite long, but only a handful of
+patches should have any effect on code generation.
+
+See the individual patches for a detailed description.
+
+Tested on aarch64-linux-gnu. Pushed to trunk and to all active branches.
+I've also pushed backports to GCC 7+ to vendors/ARM/heads/CVE-2023-4039.
+
+CVE: CVE-2023-4039
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+
+From 71a2aa2127283f450c623d3604dbcabe0e14a8d4 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:12 +0100
+Subject: [PATCH 01/19] aarch64: Use local frame vars in shrink-wrapping code
+
+aarch64_layout_frame uses a shorthand for referring to
+cfun->machine->frame:
+
+ aarch64_frame &frame = cfun->machine->frame;
+
+This patch does the same for some other heavy users of the structure.
+No functional change intended.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_save_callee_saves): Use
+ a local shorthand for cfun->machine->frame.
+ (aarch64_restore_callee_saves, aarch64_get_separate_components):
+ (aarch64_process_components): Likewise.
+ (aarch64_allocate_and_probe_stack_space): Likewise.
+ (aarch64_expand_prologue, aarch64_expand_epilogue): Likewise.
+ (aarch64_layout_frame): Use existing shorthand for one more case.
+---
+ gcc/config/aarch64/aarch64.cc | 123 ++++++++++++++++++----------------
+ 1 file changed, 64 insertions(+), 59 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 822a2b49a46..5d473d161d9 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8612,7 +8612,7 @@ aarch64_layout_frame (void)
+ frame.is_scs_enabled
+ = (!crtl->calls_eh_return
+ && sanitize_flags_p (SANITIZE_SHADOW_CALL_STACK)
+- && known_ge (cfun->machine->frame.reg_offset[LR_REGNUM], 0));
++ && known_ge (frame.reg_offset[LR_REGNUM], 0));
+
+ /* When shadow call stack is enabled, the scs_pop in the epilogue will
+ restore x30, and we don't need to pop x30 again in the traditional
+@@ -9078,6 +9078,7 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ unsigned start, unsigned limit, bool skip_wb,
+ bool hard_fp_valid_p)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
+ rtx_insn *insn;
+ unsigned regno;
+ unsigned regno2;
+@@ -9092,8 +9093,8 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ bool frame_related_p = aarch64_emit_cfi_for_reg_p (regno);
+
+ if (skip_wb
+- && (regno == cfun->machine->frame.wb_push_candidate1
+- || regno == cfun->machine->frame.wb_push_candidate2))
++ && (regno == frame.wb_push_candidate1
++ || regno == frame.wb_push_candidate2))
+ continue;
+
+ if (cfun->machine->reg_is_wrapped_separately[regno])
+@@ -9101,7 +9102,7 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = start_offset + cfun->machine->frame.reg_offset[regno];
++ offset = start_offset + frame.reg_offset[regno];
+ rtx base_rtx = stack_pointer_rtx;
+ poly_int64 sp_offset = offset;
+
+@@ -9114,7 +9115,7 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ {
+ gcc_assert (known_eq (start_offset, 0));
+ poly_int64 fp_offset
+- = cfun->machine->frame.below_hard_fp_saved_regs_size;
++ = frame.below_hard_fp_saved_regs_size;
+ if (hard_fp_valid_p)
+ base_rtx = hard_frame_pointer_rtx;
+ else
+@@ -9136,8 +9137,7 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ && (regno2 = aarch64_next_callee_save (regno + 1, limit)) <= limit
+ && !cfun->machine->reg_is_wrapped_separately[regno2]
+ && known_eq (GET_MODE_SIZE (mode),
+- cfun->machine->frame.reg_offset[regno2]
+- - cfun->machine->frame.reg_offset[regno]))
++ frame.reg_offset[regno2] - frame.reg_offset[regno]))
+ {
+ rtx reg2 = gen_rtx_REG (mode, regno2);
+ rtx mem2;
+@@ -9187,6 +9187,7 @@ static void
+ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start,
+ unsigned limit, bool skip_wb, rtx *cfi_ops)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
+ unsigned regno;
+ unsigned regno2;
+ poly_int64 offset;
+@@ -9203,13 +9204,13 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start,
+ rtx reg, mem;
+
+ if (skip_wb
+- && (regno == cfun->machine->frame.wb_pop_candidate1
+- || regno == cfun->machine->frame.wb_pop_candidate2))
++ && (regno == frame.wb_pop_candidate1
++ || regno == frame.wb_pop_candidate2))
+ continue;
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = start_offset + cfun->machine->frame.reg_offset[regno];
++ offset = start_offset + frame.reg_offset[regno];
+ rtx base_rtx = stack_pointer_rtx;
+ if (mode == VNx2DImode && BYTES_BIG_ENDIAN)
+ aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg,
+@@ -9220,8 +9221,7 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start,
+ && (regno2 = aarch64_next_callee_save (regno + 1, limit)) <= limit
+ && !cfun->machine->reg_is_wrapped_separately[regno2]
+ && known_eq (GET_MODE_SIZE (mode),
+- cfun->machine->frame.reg_offset[regno2]
+- - cfun->machine->frame.reg_offset[regno]))
++ frame.reg_offset[regno2] - frame.reg_offset[regno]))
+ {
+ rtx reg2 = gen_rtx_REG (mode, regno2);
+ rtx mem2;
+@@ -9326,6 +9326,7 @@ offset_12bit_unsigned_scaled_p (machine_mode mode, poly_int64 offset)
+ static sbitmap
+ aarch64_get_separate_components (void)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
+ sbitmap components = sbitmap_alloc (LAST_SAVED_REGNUM + 1);
+ bitmap_clear (components);
+
+@@ -9342,18 +9343,18 @@ aarch64_get_separate_components (void)
+ if (mode == VNx2DImode && BYTES_BIG_ENDIAN)
+ continue;
+
+- poly_int64 offset = cfun->machine->frame.reg_offset[regno];
++ poly_int64 offset = frame.reg_offset[regno];
+
+ /* If the register is saved in the first SVE save slot, we use
+ it as a stack probe for -fstack-clash-protection. */
+ if (flag_stack_clash_protection
+- && maybe_ne (cfun->machine->frame.below_hard_fp_saved_regs_size, 0)
++ && maybe_ne (frame.below_hard_fp_saved_regs_size, 0)
+ && known_eq (offset, 0))
+ continue;
+
+ /* Get the offset relative to the register we'll use. */
+ if (frame_pointer_needed)
+- offset -= cfun->machine->frame.below_hard_fp_saved_regs_size;
++ offset -= frame.below_hard_fp_saved_regs_size;
+ else
+ offset += crtl->outgoing_args_size;
+
+@@ -9372,11 +9373,11 @@ aarch64_get_separate_components (void)
+ /* If the spare predicate register used by big-endian SVE code
+ is call-preserved, it must be saved in the main prologue
+ before any saves that use it. */
+- if (cfun->machine->frame.spare_pred_reg != INVALID_REGNUM)
+- bitmap_clear_bit (components, cfun->machine->frame.spare_pred_reg);
++ if (frame.spare_pred_reg != INVALID_REGNUM)
++ bitmap_clear_bit (components, frame.spare_pred_reg);
+
+- unsigned reg1 = cfun->machine->frame.wb_push_candidate1;
+- unsigned reg2 = cfun->machine->frame.wb_push_candidate2;
++ unsigned reg1 = frame.wb_push_candidate1;
++ unsigned reg2 = frame.wb_push_candidate2;
+ /* If registers have been chosen to be stored/restored with
+ writeback don't interfere with them to avoid having to output explicit
+ stack adjustment instructions. */
+@@ -9485,6 +9486,7 @@ aarch64_get_next_set_bit (sbitmap bmp, unsigned int start)
+ static void
+ aarch64_process_components (sbitmap components, bool prologue_p)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
+ rtx ptr_reg = gen_rtx_REG (Pmode, frame_pointer_needed
+ ? HARD_FRAME_POINTER_REGNUM
+ : STACK_POINTER_REGNUM);
+@@ -9499,9 +9501,9 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ machine_mode mode = aarch64_reg_save_mode (regno);
+
+ rtx reg = gen_rtx_REG (mode, regno);
+- poly_int64 offset = cfun->machine->frame.reg_offset[regno];
++ poly_int64 offset = frame.reg_offset[regno];
+ if (frame_pointer_needed)
+- offset -= cfun->machine->frame.below_hard_fp_saved_regs_size;
++ offset -= frame.below_hard_fp_saved_regs_size;
+ else
+ offset += crtl->outgoing_args_size;
+
+@@ -9526,14 +9528,14 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ break;
+ }
+
+- poly_int64 offset2 = cfun->machine->frame.reg_offset[regno2];
++ poly_int64 offset2 = frame.reg_offset[regno2];
+ /* The next register is not of the same class or its offset is not
+ mergeable with the current one into a pair. */
+ if (aarch64_sve_mode_p (mode)
+ || !satisfies_constraint_Ump (mem)
+ || GP_REGNUM_P (regno) != GP_REGNUM_P (regno2)
+ || (crtl->abi->id () == ARM_PCS_SIMD && FP_REGNUM_P (regno))
+- || maybe_ne ((offset2 - cfun->machine->frame.reg_offset[regno]),
++ || maybe_ne ((offset2 - frame.reg_offset[regno]),
+ GET_MODE_SIZE (mode)))
+ {
+ insn = emit_insn (set);
+@@ -9555,7 +9557,7 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ /* REGNO2 can be saved/restored in a pair with REGNO. */
+ rtx reg2 = gen_rtx_REG (mode, regno2);
+ if (frame_pointer_needed)
+- offset2 -= cfun->machine->frame.below_hard_fp_saved_regs_size;
++ offset2 -= frame.below_hard_fp_saved_regs_size;
+ else
+ offset2 += crtl->outgoing_args_size;
+ rtx addr2 = plus_constant (Pmode, ptr_reg, offset2);
+@@ -9650,6 +9652,7 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ bool frame_related_p,
+ bool final_adjustment_p)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
+ HOST_WIDE_INT guard_size
+ = 1 << param_stack_clash_protection_guard_size;
+ HOST_WIDE_INT guard_used_by_caller = STACK_CLASH_CALLER_GUARD;
+@@ -9670,25 +9673,25 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ register as a probe. We can't assume that LR was saved at position 0
+ though, so treat any space below it as unprobed. */
+ if (final_adjustment_p
+- && known_eq (cfun->machine->frame.below_hard_fp_saved_regs_size, 0))
++ && known_eq (frame.below_hard_fp_saved_regs_size, 0))
+ {
+- poly_int64 lr_offset = cfun->machine->frame.reg_offset[LR_REGNUM];
++ poly_int64 lr_offset = frame.reg_offset[LR_REGNUM];
+ if (known_ge (lr_offset, 0))
+ min_probe_threshold -= lr_offset.to_constant ();
+ else
+ gcc_assert (!flag_stack_clash_protection || known_eq (poly_size, 0));
+ }
+
+- poly_int64 frame_size = cfun->machine->frame.frame_size;
++ poly_int64 frame_size = frame.frame_size;
+
+ /* We should always have a positive probe threshold. */
+ gcc_assert (min_probe_threshold > 0);
+
+ if (flag_stack_clash_protection && !final_adjustment_p)
+ {
+- poly_int64 initial_adjust = cfun->machine->frame.initial_adjust;
+- poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust;
+- poly_int64 final_adjust = cfun->machine->frame.final_adjust;
++ poly_int64 initial_adjust = frame.initial_adjust;
++ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
++ poly_int64 final_adjust = frame.final_adjust;
+
+ if (known_eq (frame_size, 0))
+ {
+@@ -9977,17 +9980,18 @@ aarch64_epilogue_uses (int regno)
+ void
+ aarch64_expand_prologue (void)
+ {
+- poly_int64 frame_size = cfun->machine->frame.frame_size;
+- poly_int64 initial_adjust = cfun->machine->frame.initial_adjust;
+- HOST_WIDE_INT callee_adjust = cfun->machine->frame.callee_adjust;
+- poly_int64 final_adjust = cfun->machine->frame.final_adjust;
+- poly_int64 callee_offset = cfun->machine->frame.callee_offset;
+- poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust;
++ aarch64_frame &frame = cfun->machine->frame;
++ poly_int64 frame_size = frame.frame_size;
++ poly_int64 initial_adjust = frame.initial_adjust;
++ HOST_WIDE_INT callee_adjust = frame.callee_adjust;
++ poly_int64 final_adjust = frame.final_adjust;
++ poly_int64 callee_offset = frame.callee_offset;
++ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+ poly_int64 below_hard_fp_saved_regs_size
+- = cfun->machine->frame.below_hard_fp_saved_regs_size;
+- unsigned reg1 = cfun->machine->frame.wb_push_candidate1;
+- unsigned reg2 = cfun->machine->frame.wb_push_candidate2;
+- bool emit_frame_chain = cfun->machine->frame.emit_frame_chain;
++ = frame.below_hard_fp_saved_regs_size;
++ unsigned reg1 = frame.wb_push_candidate1;
++ unsigned reg2 = frame.wb_push_candidate2;
++ bool emit_frame_chain = frame.emit_frame_chain;
+ rtx_insn *insn;
+
+ if (flag_stack_clash_protection && known_eq (callee_adjust, 0))
+@@ -10018,7 +10022,7 @@ aarch64_expand_prologue (void)
+ }
+
+ /* Push return address to shadow call stack. */
+- if (cfun->machine->frame.is_scs_enabled)
++ if (frame.is_scs_enabled)
+ emit_insn (gen_scs_push ());
+
+ if (flag_stack_usage_info)
+@@ -10057,7 +10061,7 @@ aarch64_expand_prologue (void)
+
+ /* The offset of the frame chain record (if any) from the current SP. */
+ poly_int64 chain_offset = (initial_adjust + callee_adjust
+- - cfun->machine->frame.hard_fp_offset);
++ - frame.hard_fp_offset);
+ gcc_assert (known_ge (chain_offset, 0));
+
+ /* The offset of the bottom of the save area from the current SP. */
+@@ -10160,16 +10164,17 @@ aarch64_use_return_insn_p (void)
+ void
+ aarch64_expand_epilogue (bool for_sibcall)
+ {
+- poly_int64 initial_adjust = cfun->machine->frame.initial_adjust;
+- HOST_WIDE_INT callee_adjust = cfun->machine->frame.callee_adjust;
+- poly_int64 final_adjust = cfun->machine->frame.final_adjust;
+- poly_int64 callee_offset = cfun->machine->frame.callee_offset;
+- poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust;
++ aarch64_frame &frame = cfun->machine->frame;
++ poly_int64 initial_adjust = frame.initial_adjust;
++ HOST_WIDE_INT callee_adjust = frame.callee_adjust;
++ poly_int64 final_adjust = frame.final_adjust;
++ poly_int64 callee_offset = frame.callee_offset;
++ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+ poly_int64 below_hard_fp_saved_regs_size
+- = cfun->machine->frame.below_hard_fp_saved_regs_size;
+- unsigned reg1 = cfun->machine->frame.wb_pop_candidate1;
+- unsigned reg2 = cfun->machine->frame.wb_pop_candidate2;
+- unsigned int last_gpr = (cfun->machine->frame.is_scs_enabled
++ = frame.below_hard_fp_saved_regs_size;
++ unsigned reg1 = frame.wb_pop_candidate1;
++ unsigned reg2 = frame.wb_pop_candidate2;
++ unsigned int last_gpr = (frame.is_scs_enabled
+ ? R29_REGNUM : R30_REGNUM);
+ rtx cfi_ops = NULL;
+ rtx_insn *insn;
+@@ -10203,7 +10208,7 @@ aarch64_expand_epilogue (bool for_sibcall)
+ /* We need to add memory barrier to prevent read from deallocated stack. */
+ bool need_barrier_p
+ = maybe_ne (get_frame_size ()
+- + cfun->machine->frame.saved_varargs_size, 0);
++ + frame.saved_varargs_size, 0);
+
+ /* Emit a barrier to prevent loads from a deallocated stack. */
+ if (maybe_gt (final_adjust, crtl->outgoing_args_size)
+@@ -10284,7 +10289,7 @@ aarch64_expand_epilogue (bool for_sibcall)
+ }
+
+ /* Pop return address from shadow call stack. */
+- if (cfun->machine->frame.is_scs_enabled)
++ if (frame.is_scs_enabled)
+ {
+ machine_mode mode = aarch64_reg_save_mode (R30_REGNUM);
+ rtx reg = gen_rtx_REG (mode, R30_REGNUM);
+@@ -12740,24 +12745,24 @@ aarch64_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
+ poly_int64
+ aarch64_initial_elimination_offset (unsigned from, unsigned to)
+ {
++ aarch64_frame &frame = cfun->machine->frame;
++
+ if (to == HARD_FRAME_POINTER_REGNUM)
+ {
+ if (from == ARG_POINTER_REGNUM)
+- return cfun->machine->frame.hard_fp_offset;
++ return frame.hard_fp_offset;
+
+ if (from == FRAME_POINTER_REGNUM)
+- return cfun->machine->frame.hard_fp_offset
+- - cfun->machine->frame.locals_offset;
++ return frame.hard_fp_offset - frame.locals_offset;
+ }
+
+ if (to == STACK_POINTER_REGNUM)
+ {
+ if (from == FRAME_POINTER_REGNUM)
+- return cfun->machine->frame.frame_size
+- - cfun->machine->frame.locals_offset;
++ return frame.frame_size - frame.locals_offset;
+ }
+
+- return cfun->machine->frame.frame_size;
++ return frame.frame_size;
+ }
+
+
+--
+2.34.1
+
+
+From 89a9fa287706c5011f61926eaf65e7b996b963a3 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:12 +0100
+Subject: [PATCH 02/19] aarch64: Avoid a use of callee_offset
+
+When we emit the frame chain, i.e. when we reach Here in this statement
+of aarch64_expand_prologue:
+
+ if (emit_frame_chain)
+ {
+ // Here
+ ...
+ }
+
+the stack is in one of two states:
+
+- We've allocated up to the frame chain, but no more.
+
+- We've allocated the whole frame, and the frame chain is within easy
+ reach of the new SP.
+
+The offset of the frame chain from the current SP is available
+in aarch64_frame as callee_offset. It is also available as the
+chain_offset local variable, where the latter is calculated from other
+data. (However, chain_offset is not always equal to callee_offset when
+!emit_frame_chain, so chain_offset isn't redundant.)
+
+In c600df9a4060da3c6121ff4d0b93f179eafd69d1 I switched to using
+chain_offset for the initialisation of the hard frame pointer:
+
+ aarch64_add_offset (Pmode, hard_frame_pointer_rtx,
+- stack_pointer_rtx, callee_offset,
++ stack_pointer_rtx, chain_offset,
+ tmp1_rtx, tmp0_rtx, frame_pointer_needed);
+
+But the later REG_CFA_ADJUST_CFA handling still used callee_offset.
+
+I think the difference is harmless, but it's more logical for the
+CFA note to be in sync, and it's more convenient for later patches
+if it uses chain_offset.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_expand_prologue): Use
+ chain_offset rather than callee_offset.
+---
+ gcc/config/aarch64/aarch64.cc | 4 +---
+ 1 file changed, 1 insertion(+), 3 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 5d473d161d9..4f233c95140 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -9985,7 +9985,6 @@ aarch64_expand_prologue (void)
+ poly_int64 initial_adjust = frame.initial_adjust;
+ HOST_WIDE_INT callee_adjust = frame.callee_adjust;
+ poly_int64 final_adjust = frame.final_adjust;
+- poly_int64 callee_offset = frame.callee_offset;
+ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+ poly_int64 below_hard_fp_saved_regs_size
+ = frame.below_hard_fp_saved_regs_size;
+@@ -10098,8 +10097,7 @@ aarch64_expand_prologue (void)
+ implicit. */
+ if (!find_reg_note (insn, REG_CFA_ADJUST_CFA, NULL_RTX))
+ {
+- rtx src = plus_constant (Pmode, stack_pointer_rtx,
+- callee_offset);
++ rtx src = plus_constant (Pmode, stack_pointer_rtx, chain_offset);
+ add_reg_note (insn, REG_CFA_ADJUST_CFA,
+ gen_rtx_SET (hard_frame_pointer_rtx, src));
+ }
+--
+2.34.1
+
+
+From b36a2a78040722dab6124366c5d6baf8eaf80aef Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:13 +0100
+Subject: [PATCH 03/19] aarch64: Explicitly handle frames with no saved
+ registers
+
+If a frame has no saved registers, it can be allocated in one go.
+There is no need to treat the areas below and above the saved
+registers as separate.
+
+And if we allocate the frame in one go, it should be allocated
+as the initial_adjust rather than the final_adjust. This allows the
+frame size to grow to guard_size - guard_used_by_caller before a stack
+probe is needed. (A frame with no register saves is necessarily a
+leaf frame.)
+
+This is a no-op as thing stand, since a leaf function will have
+no outgoing arguments, and so all the frame will be above where
+the saved registers normally go.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Explicitly
+ allocate the frame in one go if there are no saved registers.
+---
+ gcc/config/aarch64/aarch64.cc | 8 +++++---
+ 1 file changed, 5 insertions(+), 3 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 4f233c95140..37643041ffb 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8639,9 +8639,11 @@ aarch64_layout_frame (void)
+
+ HOST_WIDE_INT const_size, const_outgoing_args_size, const_fp_offset;
+ HOST_WIDE_INT const_saved_regs_size;
+- if (frame.frame_size.is_constant (&const_size)
+- && const_size < max_push_offset
+- && known_eq (frame.hard_fp_offset, const_size))
++ if (known_eq (frame.saved_regs_size, 0))
++ frame.initial_adjust = frame.frame_size;
++ else if (frame.frame_size.is_constant (&const_size)
++ && const_size < max_push_offset
++ && known_eq (frame.hard_fp_offset, const_size))
+ {
+ /* Simple, small frame with no outgoing arguments:
+
+--
+2.34.1
+
+
+From ada2ab0093596be707f23a3466ac82cff59fcffe Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:13 +0100
+Subject: [PATCH 04/19] aarch64: Add bytes_below_saved_regs to frame info
+
+The frame layout code currently hard-codes the assumption that
+the number of bytes below the saved registers is equal to the
+size of the outgoing arguments. This patch abstracts that
+value into a new field of aarch64_frame.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::bytes_below_saved_regs): New
+ field.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize it,
+ and use it instead of crtl->outgoing_args_size.
+ (aarch64_get_separate_components): Use bytes_below_saved_regs instead
+ of outgoing_args_size.
+ (aarch64_process_components): Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 71 ++++++++++++++++++-----------------
+ gcc/config/aarch64/aarch64.h | 5 +++
+ 2 files changed, 41 insertions(+), 35 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 37643041ffb..dacc2b0e4dd 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8478,6 +8478,8 @@ aarch64_layout_frame (void)
+ gcc_assert (crtl->is_leaf
+ || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED));
+
++ frame.bytes_below_saved_regs = crtl->outgoing_args_size;
++
+ /* Now assign stack slots for the registers. Start with the predicate
+ registers, since predicate LDR and STR have a relatively small
+ offset range. These saves happen below the hard frame pointer. */
+@@ -8582,18 +8584,18 @@ aarch64_layout_frame (void)
+
+ poly_int64 varargs_and_saved_regs_size = offset + frame.saved_varargs_size;
+
+- poly_int64 above_outgoing_args
++ poly_int64 saved_regs_and_above
+ = aligned_upper_bound (varargs_and_saved_regs_size
+ + get_frame_size (),
+ STACK_BOUNDARY / BITS_PER_UNIT);
+
+ frame.hard_fp_offset
+- = above_outgoing_args - frame.below_hard_fp_saved_regs_size;
++ = saved_regs_and_above - frame.below_hard_fp_saved_regs_size;
+
+ /* Both these values are already aligned. */
+- gcc_assert (multiple_p (crtl->outgoing_args_size,
++ gcc_assert (multiple_p (frame.bytes_below_saved_regs,
+ STACK_BOUNDARY / BITS_PER_UNIT));
+- frame.frame_size = above_outgoing_args + crtl->outgoing_args_size;
++ frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs;
+
+ frame.locals_offset = frame.saved_varargs_size;
+
+@@ -8637,7 +8639,7 @@ aarch64_layout_frame (void)
+ else if (frame.wb_pop_candidate1 != INVALID_REGNUM)
+ max_push_offset = 256;
+
+- HOST_WIDE_INT const_size, const_outgoing_args_size, const_fp_offset;
++ HOST_WIDE_INT const_size, const_below_saved_regs, const_fp_offset;
+ HOST_WIDE_INT const_saved_regs_size;
+ if (known_eq (frame.saved_regs_size, 0))
+ frame.initial_adjust = frame.frame_size;
+@@ -8645,31 +8647,31 @@ aarch64_layout_frame (void)
+ && const_size < max_push_offset
+ && known_eq (frame.hard_fp_offset, const_size))
+ {
+- /* Simple, small frame with no outgoing arguments:
++ /* Simple, small frame with no data below the saved registers.
+
+ stp reg1, reg2, [sp, -frame_size]!
+ stp reg3, reg4, [sp, 16] */
+ frame.callee_adjust = const_size;
+ }
+- else if (crtl->outgoing_args_size.is_constant (&const_outgoing_args_size)
++ else if (frame.bytes_below_saved_regs.is_constant (&const_below_saved_regs)
+ && frame.saved_regs_size.is_constant (&const_saved_regs_size)
+- && const_outgoing_args_size + const_saved_regs_size < 512
+- /* We could handle this case even with outgoing args, provided
+- that the number of args left us with valid offsets for all
+- predicate and vector save slots. It's such a rare case that
+- it hardly seems worth the effort though. */
+- && (!saves_below_hard_fp_p || const_outgoing_args_size == 0)
++ && const_below_saved_regs + const_saved_regs_size < 512
++ /* We could handle this case even with data below the saved
++ registers, provided that that data left us with valid offsets
++ for all predicate and vector save slots. It's such a rare
++ case that it hardly seems worth the effort though. */
++ && (!saves_below_hard_fp_p || const_below_saved_regs == 0)
+ && !(cfun->calls_alloca
+ && frame.hard_fp_offset.is_constant (&const_fp_offset)
+ && const_fp_offset < max_push_offset))
+ {
+- /* Frame with small outgoing arguments:
++ /* Frame with small area below the saved registers:
+
+ sub sp, sp, frame_size
+- stp reg1, reg2, [sp, outgoing_args_size]
+- stp reg3, reg4, [sp, outgoing_args_size + 16] */
++ stp reg1, reg2, [sp, bytes_below_saved_regs]
++ stp reg3, reg4, [sp, bytes_below_saved_regs + 16] */
+ frame.initial_adjust = frame.frame_size;
+- frame.callee_offset = const_outgoing_args_size;
++ frame.callee_offset = const_below_saved_regs;
+ }
+ else if (saves_below_hard_fp_p
+ && known_eq (frame.saved_regs_size,
+@@ -8679,30 +8681,29 @@ aarch64_layout_frame (void)
+
+ sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size
+ save SVE registers relative to SP
+- sub sp, sp, outgoing_args_size */
++ sub sp, sp, bytes_below_saved_regs */
+ frame.initial_adjust = (frame.hard_fp_offset
+ + frame.below_hard_fp_saved_regs_size);
+- frame.final_adjust = crtl->outgoing_args_size;
++ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+ else if (frame.hard_fp_offset.is_constant (&const_fp_offset)
+ && const_fp_offset < max_push_offset)
+ {
+- /* Frame with large outgoing arguments or SVE saves, but with
+- a small local area:
++ /* Frame with large area below the saved registers, or with SVE saves,
++ but with a small area above:
+
+ stp reg1, reg2, [sp, -hard_fp_offset]!
+ stp reg3, reg4, [sp, 16]
+ [sub sp, sp, below_hard_fp_saved_regs_size]
+ [save SVE registers relative to SP]
+- sub sp, sp, outgoing_args_size */
++ sub sp, sp, bytes_below_saved_regs */
+ frame.callee_adjust = const_fp_offset;
+ frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
+- frame.final_adjust = crtl->outgoing_args_size;
++ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+ else
+ {
+- /* Frame with large local area and outgoing arguments or SVE saves,
+- using frame pointer:
++ /* General case:
+
+ sub sp, sp, hard_fp_offset
+ stp x29, x30, [sp, 0]
+@@ -8710,10 +8711,10 @@ aarch64_layout_frame (void)
+ stp reg3, reg4, [sp, 16]
+ [sub sp, sp, below_hard_fp_saved_regs_size]
+ [save SVE registers relative to SP]
+- sub sp, sp, outgoing_args_size */
++ sub sp, sp, bytes_below_saved_regs */
+ frame.initial_adjust = frame.hard_fp_offset;
+ frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
+- frame.final_adjust = crtl->outgoing_args_size;
++ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+
+ /* Make sure the individual adjustments add up to the full frame size. */
+@@ -9358,7 +9359,7 @@ aarch64_get_separate_components (void)
+ if (frame_pointer_needed)
+ offset -= frame.below_hard_fp_saved_regs_size;
+ else
+- offset += crtl->outgoing_args_size;
++ offset += frame.bytes_below_saved_regs;
+
+ /* Check that we can access the stack slot of the register with one
+ direct load with no adjustments needed. */
+@@ -9507,7 +9508,7 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ if (frame_pointer_needed)
+ offset -= frame.below_hard_fp_saved_regs_size;
+ else
+- offset += crtl->outgoing_args_size;
++ offset += frame.bytes_below_saved_regs;
+
+ rtx addr = plus_constant (Pmode, ptr_reg, offset);
+ rtx mem = gen_frame_mem (mode, addr);
+@@ -9561,7 +9562,7 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ if (frame_pointer_needed)
+ offset2 -= frame.below_hard_fp_saved_regs_size;
+ else
+- offset2 += crtl->outgoing_args_size;
++ offset2 += frame.bytes_below_saved_regs;
+ rtx addr2 = plus_constant (Pmode, ptr_reg, offset2);
+ rtx mem2 = gen_frame_mem (mode, addr2);
+ rtx set2 = prologue_p ? gen_rtx_SET (mem2, reg2)
+@@ -9635,10 +9636,10 @@ aarch64_stack_clash_protection_alloca_probe_range (void)
+ registers. If POLY_SIZE is not large enough to require a probe this function
+ will only adjust the stack. When allocating the stack space
+ FRAME_RELATED_P is then used to indicate if the allocation is frame related.
+- FINAL_ADJUSTMENT_P indicates whether we are allocating the outgoing
+- arguments. If we are then we ensure that any allocation larger than the ABI
+- defined buffer needs a probe so that the invariant of having a 1KB buffer is
+- maintained.
++ FINAL_ADJUSTMENT_P indicates whether we are allocating the area below
++ the saved registers. If we are then we ensure that any allocation
++ larger than the ABI defined buffer needs a probe so that the
++ invariant of having a 1KB buffer is maintained.
+
+ We emit barriers after each stack adjustment to prevent optimizations from
+ breaking the invariant that we never drop the stack more than a page. This
+@@ -9847,7 +9848,7 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ /* Handle any residuals. Residuals of at least MIN_PROBE_THRESHOLD have to
+ be probed. This maintains the requirement that each page is probed at
+ least once. For initial probing we probe only if the allocation is
+- more than GUARD_SIZE - buffer, and for the outgoing arguments we probe
++ more than GUARD_SIZE - buffer, and below the saved registers we probe
+ if the amount is larger than buffer. GUARD_SIZE - buffer + buffer ==
+ GUARD_SIZE. This works that for any allocation that is large enough to
+ trigger a probe here, we'll have at least one, and if they're not large
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 73b09e20508..0b6faa3ddf1 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -777,6 +777,11 @@ struct GTY (()) aarch64_frame
+ /* The size of the callee-save registers with a slot in REG_OFFSET. */
+ poly_int64 saved_regs_size;
+
++ /* The number of bytes between the bottom of the static frame (the bottom
++ of the outgoing arguments) and the bottom of the register save area.
++ This value is always a multiple of STACK_BOUNDARY. */
++ poly_int64 bytes_below_saved_regs;
++
+ /* The size of the callee-save registers with a slot in REG_OFFSET that
+ are saved below the hard frame pointer. */
+ poly_int64 below_hard_fp_saved_regs_size;
+--
+2.34.1
+
+
+From 82f6b3e1b596ef0f4e3ac3bb9c6e88fb4458f402 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:14 +0100
+Subject: [PATCH 05/19] aarch64: Add bytes_below_hard_fp to frame info
+
+Following on from the previous bytes_below_saved_regs patch, this one
+records the number of bytes that are below the hard frame pointer.
+This eventually replaces below_hard_fp_saved_regs_size.
+
+If a frame pointer is not needed, the epilogue adds final_adjust
+to the stack pointer before restoring registers:
+
+ aarch64_add_sp (tmp1_rtx, tmp0_rtx, final_adjust, true);
+
+Therefore, if the epilogue needs to restore the stack pointer from
+the hard frame pointer, the directly corresponding offset is:
+
+ -bytes_below_hard_fp + final_adjust
+
+i.e. go from the hard frame pointer to the bottom of the frame,
+then add the same amount as if we were using the stack pointer
+from the outset.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::bytes_below_hard_fp): New
+ field.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize it.
+ (aarch64_expand_epilogue): Use it instead of
+ below_hard_fp_saved_regs_size.
+---
+ gcc/config/aarch64/aarch64.cc | 6 +++---
+ gcc/config/aarch64/aarch64.h | 5 +++++
+ 2 files changed, 8 insertions(+), 3 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index dacc2b0e4dd..a3f7aabcc59 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8530,6 +8530,7 @@ aarch64_layout_frame (void)
+ of the callee save area. */
+ bool saves_below_hard_fp_p = maybe_ne (offset, 0);
+ frame.below_hard_fp_saved_regs_size = offset;
++ frame.bytes_below_hard_fp = offset + frame.bytes_below_saved_regs;
+ if (frame.emit_frame_chain)
+ {
+ /* FP and LR are placed in the linkage record. */
+@@ -10171,8 +10172,7 @@ aarch64_expand_epilogue (bool for_sibcall)
+ poly_int64 final_adjust = frame.final_adjust;
+ poly_int64 callee_offset = frame.callee_offset;
+ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+- poly_int64 below_hard_fp_saved_regs_size
+- = frame.below_hard_fp_saved_regs_size;
++ poly_int64 bytes_below_hard_fp = frame.bytes_below_hard_fp;
+ unsigned reg1 = frame.wb_pop_candidate1;
+ unsigned reg2 = frame.wb_pop_candidate2;
+ unsigned int last_gpr = (frame.is_scs_enabled
+@@ -10230,7 +10230,7 @@ aarch64_expand_epilogue (bool for_sibcall)
+ is restored on the instruction doing the writeback. */
+ aarch64_add_offset (Pmode, stack_pointer_rtx,
+ hard_frame_pointer_rtx,
+- -callee_offset - below_hard_fp_saved_regs_size,
++ -bytes_below_hard_fp + final_adjust,
+ tmp1_rtx, tmp0_rtx, callee_adjust == 0);
+ else
+ /* The case where we need to re-use the register here is very rare, so
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 0b6faa3ddf1..4263d29d29d 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -786,6 +786,11 @@ struct GTY (()) aarch64_frame
+ are saved below the hard frame pointer. */
+ poly_int64 below_hard_fp_saved_regs_size;
+
++ /* The number of bytes between the bottom of the static frame (the bottom
++ of the outgoing arguments) and the hard frame pointer. This value is
++ always a multiple of STACK_BOUNDARY. */
++ poly_int64 bytes_below_hard_fp;
++
+ /* Offset from the base of the frame (incomming SP) to the
+ top of the locals area. This value is always a multiple of
+ STACK_BOUNDARY. */
+--
+2.34.1
+
+
+From 86fa43e9fe4a8bf954f2919f07cbe3646d1d1df3 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:14 +0100
+Subject: [PATCH 06/19] aarch64: Tweak aarch64_save/restore_callee_saves
+
+aarch64_save_callee_saves and aarch64_restore_callee_saves took
+a parameter called start_offset that gives the offset of the
+bottom of the saved register area from the current stack pointer.
+However, it's more convenient for later patches if we use the
+bottom of the entire frame as the reference point, rather than
+the bottom of the saved registers.
+
+Doing that removes the need for the callee_offset field.
+Other than that, this is not a win on its own. It only really
+makes sense in combination with the follow-on patches.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::callee_offset): Delete.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Remove
+ callee_offset handling.
+ (aarch64_save_callee_saves): Replace the start_offset parameter
+ with a bytes_below_sp parameter.
+ (aarch64_restore_callee_saves): Likewise.
+ (aarch64_expand_prologue): Update accordingly.
+ (aarch64_expand_epilogue): Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 56 +++++++++++++++++------------------
+ gcc/config/aarch64/aarch64.h | 4 ---
+ 2 files changed, 28 insertions(+), 32 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index a3f7aabcc59..46ae5cf7673 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8604,7 +8604,6 @@ aarch64_layout_frame (void)
+ frame.final_adjust = 0;
+ frame.callee_adjust = 0;
+ frame.sve_callee_adjust = 0;
+- frame.callee_offset = 0;
+
+ frame.wb_pop_candidate1 = frame.wb_push_candidate1;
+ frame.wb_pop_candidate2 = frame.wb_push_candidate2;
+@@ -8672,7 +8671,6 @@ aarch64_layout_frame (void)
+ stp reg1, reg2, [sp, bytes_below_saved_regs]
+ stp reg3, reg4, [sp, bytes_below_saved_regs + 16] */
+ frame.initial_adjust = frame.frame_size;
+- frame.callee_offset = const_below_saved_regs;
+ }
+ else if (saves_below_hard_fp_p
+ && known_eq (frame.saved_regs_size,
+@@ -9073,12 +9071,13 @@ aarch64_add_cfa_expression (rtx_insn *insn, rtx reg,
+ }
+
+ /* Emit code to save the callee-saved registers from register number START
+- to LIMIT to the stack at the location starting at offset START_OFFSET,
+- skipping any write-back candidates if SKIP_WB is true. HARD_FP_VALID_P
+- is true if the hard frame pointer has been set up. */
++ to LIMIT to the stack. The stack pointer is currently BYTES_BELOW_SP
++ bytes above the bottom of the static frame. Skip any write-back
++ candidates if SKIP_WB is true. HARD_FP_VALID_P is true if the hard
++ frame pointer has been set up. */
+
+ static void
+-aarch64_save_callee_saves (poly_int64 start_offset,
++aarch64_save_callee_saves (poly_int64 bytes_below_sp,
+ unsigned start, unsigned limit, bool skip_wb,
+ bool hard_fp_valid_p)
+ {
+@@ -9106,7 +9105,9 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = start_offset + frame.reg_offset[regno];
++ offset = (frame.reg_offset[regno]
++ + frame.bytes_below_saved_regs
++ - bytes_below_sp);
+ rtx base_rtx = stack_pointer_rtx;
+ poly_int64 sp_offset = offset;
+
+@@ -9117,9 +9118,7 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ else if (GP_REGNUM_P (regno)
+ && (!offset.is_constant (&const_offset) || const_offset >= 512))
+ {
+- gcc_assert (known_eq (start_offset, 0));
+- poly_int64 fp_offset
+- = frame.below_hard_fp_saved_regs_size;
++ poly_int64 fp_offset = frame.bytes_below_hard_fp - bytes_below_sp;
+ if (hard_fp_valid_p)
+ base_rtx = hard_frame_pointer_rtx;
+ else
+@@ -9183,12 +9182,13 @@ aarch64_save_callee_saves (poly_int64 start_offset,
+ }
+
+ /* Emit code to restore the callee registers from register number START
+- up to and including LIMIT. Restore from the stack offset START_OFFSET,
+- skipping any write-back candidates if SKIP_WB is true. Write the
+- appropriate REG_CFA_RESTORE notes into CFI_OPS. */
++ up to and including LIMIT. The stack pointer is currently BYTES_BELOW_SP
++ bytes above the bottom of the static frame. Skip any write-back
++ candidates if SKIP_WB is true. Write the appropriate REG_CFA_RESTORE
++ notes into CFI_OPS. */
+
+ static void
+-aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start,
++aarch64_restore_callee_saves (poly_int64 bytes_below_sp, unsigned start,
+ unsigned limit, bool skip_wb, rtx *cfi_ops)
+ {
+ aarch64_frame &frame = cfun->machine->frame;
+@@ -9214,7 +9214,9 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start,
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = start_offset + frame.reg_offset[regno];
++ offset = (frame.reg_offset[regno]
++ + frame.bytes_below_saved_regs
++ - bytes_below_sp);
+ rtx base_rtx = stack_pointer_rtx;
+ if (mode == VNx2DImode && BYTES_BIG_ENDIAN)
+ aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg,
+@@ -9990,8 +9992,6 @@ aarch64_expand_prologue (void)
+ HOST_WIDE_INT callee_adjust = frame.callee_adjust;
+ poly_int64 final_adjust = frame.final_adjust;
+ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+- poly_int64 below_hard_fp_saved_regs_size
+- = frame.below_hard_fp_saved_regs_size;
+ unsigned reg1 = frame.wb_push_candidate1;
+ unsigned reg2 = frame.wb_push_candidate2;
+ bool emit_frame_chain = frame.emit_frame_chain;
+@@ -10067,8 +10067,8 @@ aarch64_expand_prologue (void)
+ - frame.hard_fp_offset);
+ gcc_assert (known_ge (chain_offset, 0));
+
+- /* The offset of the bottom of the save area from the current SP. */
+- poly_int64 saved_regs_offset = chain_offset - below_hard_fp_saved_regs_size;
++ /* The offset of the current SP from the bottom of the static frame. */
++ poly_int64 bytes_below_sp = frame_size - initial_adjust - callee_adjust;
+
+ if (emit_frame_chain)
+ {
+@@ -10076,7 +10076,7 @@ aarch64_expand_prologue (void)
+ {
+ reg1 = R29_REGNUM;
+ reg2 = R30_REGNUM;
+- aarch64_save_callee_saves (saved_regs_offset, reg1, reg2,
++ aarch64_save_callee_saves (bytes_below_sp, reg1, reg2,
+ false, false);
+ }
+ else
+@@ -10116,7 +10116,7 @@ aarch64_expand_prologue (void)
+ emit_insn (gen_stack_tie (stack_pointer_rtx, hard_frame_pointer_rtx));
+ }
+
+- aarch64_save_callee_saves (saved_regs_offset, R0_REGNUM, R30_REGNUM,
++ aarch64_save_callee_saves (bytes_below_sp, R0_REGNUM, R30_REGNUM,
+ callee_adjust != 0 || emit_frame_chain,
+ emit_frame_chain);
+ if (maybe_ne (sve_callee_adjust, 0))
+@@ -10126,16 +10126,17 @@ aarch64_expand_prologue (void)
+ aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx,
+ sve_callee_adjust,
+ !frame_pointer_needed, false);
+- saved_regs_offset += sve_callee_adjust;
++ bytes_below_sp -= sve_callee_adjust;
+ }
+- aarch64_save_callee_saves (saved_regs_offset, P0_REGNUM, P15_REGNUM,
++ aarch64_save_callee_saves (bytes_below_sp, P0_REGNUM, P15_REGNUM,
+ false, emit_frame_chain);
+- aarch64_save_callee_saves (saved_regs_offset, V0_REGNUM, V31_REGNUM,
++ aarch64_save_callee_saves (bytes_below_sp, V0_REGNUM, V31_REGNUM,
+ callee_adjust != 0 || emit_frame_chain,
+ emit_frame_chain);
+
+ /* We may need to probe the final adjustment if it is larger than the guard
+ that is assumed by the called. */
++ gcc_assert (known_eq (bytes_below_sp, final_adjust));
+ aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx, final_adjust,
+ !frame_pointer_needed, true);
+ }
+@@ -10170,7 +10171,6 @@ aarch64_expand_epilogue (bool for_sibcall)
+ poly_int64 initial_adjust = frame.initial_adjust;
+ HOST_WIDE_INT callee_adjust = frame.callee_adjust;
+ poly_int64 final_adjust = frame.final_adjust;
+- poly_int64 callee_offset = frame.callee_offset;
+ poly_int64 sve_callee_adjust = frame.sve_callee_adjust;
+ poly_int64 bytes_below_hard_fp = frame.bytes_below_hard_fp;
+ unsigned reg1 = frame.wb_pop_candidate1;
+@@ -10240,9 +10240,9 @@ aarch64_expand_epilogue (bool for_sibcall)
+
+ /* Restore the vector registers before the predicate registers,
+ so that we can use P4 as a temporary for big-endian SVE frames. */
+- aarch64_restore_callee_saves (callee_offset, V0_REGNUM, V31_REGNUM,
++ aarch64_restore_callee_saves (final_adjust, V0_REGNUM, V31_REGNUM,
+ callee_adjust != 0, &cfi_ops);
+- aarch64_restore_callee_saves (callee_offset, P0_REGNUM, P15_REGNUM,
++ aarch64_restore_callee_saves (final_adjust, P0_REGNUM, P15_REGNUM,
+ false, &cfi_ops);
+ if (maybe_ne (sve_callee_adjust, 0))
+ aarch64_add_sp (NULL_RTX, NULL_RTX, sve_callee_adjust, true);
+@@ -10250,7 +10250,7 @@ aarch64_expand_epilogue (bool for_sibcall)
+ /* When shadow call stack is enabled, the scs_pop in the epilogue will
+ restore x30, we don't need to restore x30 again in the traditional
+ way. */
+- aarch64_restore_callee_saves (callee_offset - sve_callee_adjust,
++ aarch64_restore_callee_saves (final_adjust + sve_callee_adjust,
+ R0_REGNUM, last_gpr,
+ callee_adjust != 0, &cfi_ops);
+
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 4263d29d29d..fd820b1be4e 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -813,10 +813,6 @@ struct GTY (()) aarch64_frame
+ It is zero when no push is used. */
+ HOST_WIDE_INT callee_adjust;
+
+- /* The offset from SP to the callee-save registers after initial_adjust.
+- It may be non-zero if no push is used (ie. callee_adjust == 0). */
+- poly_int64 callee_offset;
+-
+ /* The size of the stack adjustment before saving or after restoring
+ SVE registers. */
+ poly_int64 sve_callee_adjust;
+--
+2.34.1
+
+
+From 8ae9181426f2700c2e5a2909487fa630e6fa406b Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:15 +0100
+Subject: [PATCH 07/19] aarch64: Only calculate chain_offset if there is a
+ chain
+
+After previous patches, it is no longer necessary to calculate
+a chain_offset in cases where there is no chain record.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_expand_prologue): Move the
+ calculation of chain_offset into the emit_frame_chain block.
+---
+ gcc/config/aarch64/aarch64.cc | 10 +++++-----
+ 1 file changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 46ae5cf7673..0e9b9717c08 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -10062,16 +10062,16 @@ aarch64_expand_prologue (void)
+ if (callee_adjust != 0)
+ aarch64_push_regs (reg1, reg2, callee_adjust);
+
+- /* The offset of the frame chain record (if any) from the current SP. */
+- poly_int64 chain_offset = (initial_adjust + callee_adjust
+- - frame.hard_fp_offset);
+- gcc_assert (known_ge (chain_offset, 0));
+-
+ /* The offset of the current SP from the bottom of the static frame. */
+ poly_int64 bytes_below_sp = frame_size - initial_adjust - callee_adjust;
+
+ if (emit_frame_chain)
+ {
++ /* The offset of the frame chain record (if any) from the current SP. */
++ poly_int64 chain_offset = (initial_adjust + callee_adjust
++ - frame.hard_fp_offset);
++ gcc_assert (known_ge (chain_offset, 0));
++
+ if (callee_adjust == 0)
+ {
+ reg1 = R29_REGNUM;
+--
+2.34.1
+
+
+From 375794feb614cee1f41b710b9cc1b6f25da6c1cb Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:15 +0100
+Subject: [PATCH 08/19] aarch64: Rename locals_offset to bytes_above_locals
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+locals_offset was described as:
+
+ /* Offset from the base of the frame (incomming SP) to the
+ top of the locals area. This value is always a multiple of
+ STACK_BOUNDARY. */
+
+This is implicitly an “upside down” view of the frame: the incoming
+SP is at offset 0, and anything N bytes below the incoming SP is at
+offset N (rather than -N).
+
+However, reg_offset instead uses a “right way up” view; that is,
+it views offsets in address terms. Something above X is at a
+positive offset from X and something below X is at a negative
+offset from X.
+
+Also, even on FRAME_GROWS_DOWNWARD targets like AArch64,
+target-independent code views offsets in address terms too:
+locals are allocated at negative offsets to virtual_stack_vars.
+
+It seems confusing to have *_offset fields of the same structure
+using different polarities like this. This patch tries to avoid
+that by renaming locals_offset to bytes_above_locals.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::locals_offset): Rename to...
+ (aarch64_frame::bytes_above_locals): ...this.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame)
+ (aarch64_initial_elimination_offset): Update accordingly.
+---
+ gcc/config/aarch64/aarch64.cc | 6 +++---
+ gcc/config/aarch64/aarch64.h | 6 +++---
+ 2 files changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 0e9b9717c08..0a22f91520e 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8598,7 +8598,7 @@ aarch64_layout_frame (void)
+ STACK_BOUNDARY / BITS_PER_UNIT));
+ frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs;
+
+- frame.locals_offset = frame.saved_varargs_size;
++ frame.bytes_above_locals = frame.saved_varargs_size;
+
+ frame.initial_adjust = 0;
+ frame.final_adjust = 0;
+@@ -12754,13 +12754,13 @@ aarch64_initial_elimination_offset (unsigned from, unsigned to)
+ return frame.hard_fp_offset;
+
+ if (from == FRAME_POINTER_REGNUM)
+- return frame.hard_fp_offset - frame.locals_offset;
++ return frame.hard_fp_offset - frame.bytes_above_locals;
+ }
+
+ if (to == STACK_POINTER_REGNUM)
+ {
+ if (from == FRAME_POINTER_REGNUM)
+- return frame.frame_size - frame.locals_offset;
++ return frame.frame_size - frame.bytes_above_locals;
+ }
+
+ return frame.frame_size;
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index fd820b1be4e..7ae12d13e2b 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -791,10 +791,10 @@ struct GTY (()) aarch64_frame
+ always a multiple of STACK_BOUNDARY. */
+ poly_int64 bytes_below_hard_fp;
+
+- /* Offset from the base of the frame (incomming SP) to the
+- top of the locals area. This value is always a multiple of
++ /* The number of bytes between the top of the locals area and the top
++ of the frame (the incomming SP). This value is always a multiple of
+ STACK_BOUNDARY. */
+- poly_int64 locals_offset;
++ poly_int64 bytes_above_locals;
+
+ /* Offset from the base of the frame (incomming SP) to the
+ hard_frame_pointer. This value is always a multiple of
+--
+2.34.1
+
+
+From 1a9ea1c45c75615ffbfabe652b3598a1d7be2168 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:16 +0100
+Subject: [PATCH 09/19] aarch64: Rename hard_fp_offset to bytes_above_hard_fp
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Similarly to the previous locals_offset patch, hard_fp_offset
+was described as:
+
+ /* Offset from the base of the frame (incomming SP) to the
+ hard_frame_pointer. This value is always a multiple of
+ STACK_BOUNDARY. */
+ poly_int64 hard_fp_offset;
+
+which again took an “upside-down” view: higher offsets meant lower
+addresses. This patch renames the field to bytes_above_hard_fp instead.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::hard_fp_offset): Rename
+ to...
+ (aarch64_frame::bytes_above_hard_fp): ...this.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame)
+ (aarch64_expand_prologue): Update accordingly.
+ (aarch64_initial_elimination_offset): Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 26 +++++++++++++-------------
+ gcc/config/aarch64/aarch64.h | 6 +++---
+ 2 files changed, 16 insertions(+), 16 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 0a22f91520e..95499ae49ba 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8590,7 +8590,7 @@ aarch64_layout_frame (void)
+ + get_frame_size (),
+ STACK_BOUNDARY / BITS_PER_UNIT);
+
+- frame.hard_fp_offset
++ frame.bytes_above_hard_fp
+ = saved_regs_and_above - frame.below_hard_fp_saved_regs_size;
+
+ /* Both these values are already aligned. */
+@@ -8639,13 +8639,13 @@ aarch64_layout_frame (void)
+ else if (frame.wb_pop_candidate1 != INVALID_REGNUM)
+ max_push_offset = 256;
+
+- HOST_WIDE_INT const_size, const_below_saved_regs, const_fp_offset;
++ HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp;
+ HOST_WIDE_INT const_saved_regs_size;
+ if (known_eq (frame.saved_regs_size, 0))
+ frame.initial_adjust = frame.frame_size;
+ else if (frame.frame_size.is_constant (&const_size)
+ && const_size < max_push_offset
+- && known_eq (frame.hard_fp_offset, const_size))
++ && known_eq (frame.bytes_above_hard_fp, const_size))
+ {
+ /* Simple, small frame with no data below the saved registers.
+
+@@ -8662,8 +8662,8 @@ aarch64_layout_frame (void)
+ case that it hardly seems worth the effort though. */
+ && (!saves_below_hard_fp_p || const_below_saved_regs == 0)
+ && !(cfun->calls_alloca
+- && frame.hard_fp_offset.is_constant (&const_fp_offset)
+- && const_fp_offset < max_push_offset))
++ && frame.bytes_above_hard_fp.is_constant (&const_above_fp)
++ && const_above_fp < max_push_offset))
+ {
+ /* Frame with small area below the saved registers:
+
+@@ -8681,12 +8681,12 @@ aarch64_layout_frame (void)
+ sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size
+ save SVE registers relative to SP
+ sub sp, sp, bytes_below_saved_regs */
+- frame.initial_adjust = (frame.hard_fp_offset
++ frame.initial_adjust = (frame.bytes_above_hard_fp
+ + frame.below_hard_fp_saved_regs_size);
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+- else if (frame.hard_fp_offset.is_constant (&const_fp_offset)
+- && const_fp_offset < max_push_offset)
++ else if (frame.bytes_above_hard_fp.is_constant (&const_above_fp)
++ && const_above_fp < max_push_offset)
+ {
+ /* Frame with large area below the saved registers, or with SVE saves,
+ but with a small area above:
+@@ -8696,7 +8696,7 @@ aarch64_layout_frame (void)
+ [sub sp, sp, below_hard_fp_saved_regs_size]
+ [save SVE registers relative to SP]
+ sub sp, sp, bytes_below_saved_regs */
+- frame.callee_adjust = const_fp_offset;
++ frame.callee_adjust = const_above_fp;
+ frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+@@ -8711,7 +8711,7 @@ aarch64_layout_frame (void)
+ [sub sp, sp, below_hard_fp_saved_regs_size]
+ [save SVE registers relative to SP]
+ sub sp, sp, bytes_below_saved_regs */
+- frame.initial_adjust = frame.hard_fp_offset;
++ frame.initial_adjust = frame.bytes_above_hard_fp;
+ frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+@@ -10069,7 +10069,7 @@ aarch64_expand_prologue (void)
+ {
+ /* The offset of the frame chain record (if any) from the current SP. */
+ poly_int64 chain_offset = (initial_adjust + callee_adjust
+- - frame.hard_fp_offset);
++ - frame.bytes_above_hard_fp);
+ gcc_assert (known_ge (chain_offset, 0));
+
+ if (callee_adjust == 0)
+@@ -12751,10 +12751,10 @@ aarch64_initial_elimination_offset (unsigned from, unsigned to)
+ if (to == HARD_FRAME_POINTER_REGNUM)
+ {
+ if (from == ARG_POINTER_REGNUM)
+- return frame.hard_fp_offset;
++ return frame.bytes_above_hard_fp;
+
+ if (from == FRAME_POINTER_REGNUM)
+- return frame.hard_fp_offset - frame.bytes_above_locals;
++ return frame.bytes_above_hard_fp - frame.bytes_above_locals;
+ }
+
+ if (to == STACK_POINTER_REGNUM)
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 7ae12d13e2b..3808f49e9ca 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -796,10 +796,10 @@ struct GTY (()) aarch64_frame
+ STACK_BOUNDARY. */
+ poly_int64 bytes_above_locals;
+
+- /* Offset from the base of the frame (incomming SP) to the
+- hard_frame_pointer. This value is always a multiple of
++ /* The number of bytes between the hard_frame_pointer and the top of
++ the frame (the incomming SP). This value is always a multiple of
+ STACK_BOUNDARY. */
+- poly_int64 hard_fp_offset;
++ poly_int64 bytes_above_hard_fp;
+
+ /* The size of the frame. This value is the offset from base of the
+ frame (incomming SP) to the stack_pointer. This value is always
+--
+2.34.1
+
+
+From d202ce1ecf60a36a3e1009917dd76109248ce9be Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:16 +0100
+Subject: [PATCH 10/19] aarch64: Tweak frame_size comment
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+This patch fixes another case in which a value was described with
+an “upside-down” view.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::frame_size): Tweak comment.
+---
+ gcc/config/aarch64/aarch64.h | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 3808f49e9ca..108a5731b0d 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -801,8 +801,8 @@ struct GTY (()) aarch64_frame
+ STACK_BOUNDARY. */
+ poly_int64 bytes_above_hard_fp;
+
+- /* The size of the frame. This value is the offset from base of the
+- frame (incomming SP) to the stack_pointer. This value is always
++ /* The size of the frame, i.e. the number of bytes between the bottom
++ of the outgoing arguments and the incoming SP. This value is always
+ a multiple of STACK_BOUNDARY. */
+ poly_int64 frame_size;
+
+--
+2.34.1
+
+
+From f2b585375205b0a1802d79c682ba33766ecd1f0f Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:17 +0100
+Subject: [PATCH 11/19] aarch64: Measure reg_offset from the bottom of the
+ frame
+
+reg_offset was measured from the bottom of the saved register area.
+This made perfect sense with the original layout, since the bottom
+of the saved register area was also the hard frame pointer address.
+It became slightly less obvious with SVE, since we save SVE
+registers below the hard frame pointer, but it still made sense.
+
+However, if we want to allow different frame layouts, it's more
+convenient and obvious to measure reg_offset from the bottom of
+the frame. After previous patches, it's also a slight simplification
+in its own right.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame): Add comment above
+ reg_offset.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Walk offsets
+ from the bottom of the frame, rather than the bottom of the saved
+ register area. Measure reg_offset from the bottom of the frame
+ rather than the bottom of the saved register area.
+ (aarch64_save_callee_saves): Update accordingly.
+ (aarch64_restore_callee_saves): Likewise.
+ (aarch64_get_separate_components): Likewise.
+ (aarch64_process_components): Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 53 ++++++++++++++++-------------------
+ gcc/config/aarch64/aarch64.h | 3 ++
+ 2 files changed, 27 insertions(+), 29 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 95499ae49ba..af99807ef8a 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8400,7 +8400,6 @@ aarch64_needs_frame_chain (void)
+ static void
+ aarch64_layout_frame (void)
+ {
+- poly_int64 offset = 0;
+ int regno, last_fp_reg = INVALID_REGNUM;
+ machine_mode vector_save_mode = aarch64_reg_save_mode (V8_REGNUM);
+ poly_int64 vector_save_size = GET_MODE_SIZE (vector_save_mode);
+@@ -8478,7 +8477,9 @@ aarch64_layout_frame (void)
+ gcc_assert (crtl->is_leaf
+ || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED));
+
+- frame.bytes_below_saved_regs = crtl->outgoing_args_size;
++ poly_int64 offset = crtl->outgoing_args_size;
++ gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT));
++ frame.bytes_below_saved_regs = offset;
+
+ /* Now assign stack slots for the registers. Start with the predicate
+ registers, since predicate LDR and STR have a relatively small
+@@ -8490,7 +8491,8 @@ aarch64_layout_frame (void)
+ offset += BYTES_PER_SVE_PRED;
+ }
+
+- if (maybe_ne (offset, 0))
++ poly_int64 saved_prs_size = offset - frame.bytes_below_saved_regs;
++ if (maybe_ne (saved_prs_size, 0))
+ {
+ /* If we have any vector registers to save above the predicate registers,
+ the offset of the vector register save slots need to be a multiple
+@@ -8508,10 +8510,10 @@ aarch64_layout_frame (void)
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+ else
+ {
+- if (known_le (offset, vector_save_size))
+- offset = vector_save_size;
+- else if (known_le (offset, vector_save_size * 2))
+- offset = vector_save_size * 2;
++ if (known_le (saved_prs_size, vector_save_size))
++ offset = frame.bytes_below_saved_regs + vector_save_size;
++ else if (known_le (saved_prs_size, vector_save_size * 2))
++ offset = frame.bytes_below_saved_regs + vector_save_size * 2;
+ else
+ gcc_unreachable ();
+ }
+@@ -8528,9 +8530,10 @@ aarch64_layout_frame (void)
+
+ /* OFFSET is now the offset of the hard frame pointer from the bottom
+ of the callee save area. */
+- bool saves_below_hard_fp_p = maybe_ne (offset, 0);
+- frame.below_hard_fp_saved_regs_size = offset;
+- frame.bytes_below_hard_fp = offset + frame.bytes_below_saved_regs;
++ frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs;
++ bool saves_below_hard_fp_p
++ = maybe_ne (frame.below_hard_fp_saved_regs_size, 0);
++ frame.bytes_below_hard_fp = offset;
+ if (frame.emit_frame_chain)
+ {
+ /* FP and LR are placed in the linkage record. */
+@@ -8581,9 +8584,10 @@ aarch64_layout_frame (void)
+
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+
+- frame.saved_regs_size = offset;
++ frame.saved_regs_size = offset - frame.bytes_below_saved_regs;
+
+- poly_int64 varargs_and_saved_regs_size = offset + frame.saved_varargs_size;
++ poly_int64 varargs_and_saved_regs_size
++ = frame.saved_regs_size + frame.saved_varargs_size;
+
+ poly_int64 saved_regs_and_above
+ = aligned_upper_bound (varargs_and_saved_regs_size
+@@ -9105,9 +9109,7 @@ aarch64_save_callee_saves (poly_int64 bytes_below_sp,
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = (frame.reg_offset[regno]
+- + frame.bytes_below_saved_regs
+- - bytes_below_sp);
++ offset = frame.reg_offset[regno] - bytes_below_sp;
+ rtx base_rtx = stack_pointer_rtx;
+ poly_int64 sp_offset = offset;
+
+@@ -9214,9 +9216,7 @@ aarch64_restore_callee_saves (poly_int64 bytes_below_sp, unsigned start,
+
+ machine_mode mode = aarch64_reg_save_mode (regno);
+ reg = gen_rtx_REG (mode, regno);
+- offset = (frame.reg_offset[regno]
+- + frame.bytes_below_saved_regs
+- - bytes_below_sp);
++ offset = frame.reg_offset[regno] - bytes_below_sp;
+ rtx base_rtx = stack_pointer_rtx;
+ if (mode == VNx2DImode && BYTES_BIG_ENDIAN)
+ aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg,
+@@ -9355,14 +9355,12 @@ aarch64_get_separate_components (void)
+ it as a stack probe for -fstack-clash-protection. */
+ if (flag_stack_clash_protection
+ && maybe_ne (frame.below_hard_fp_saved_regs_size, 0)
+- && known_eq (offset, 0))
++ && known_eq (offset, frame.bytes_below_saved_regs))
+ continue;
+
+ /* Get the offset relative to the register we'll use. */
+ if (frame_pointer_needed)
+- offset -= frame.below_hard_fp_saved_regs_size;
+- else
+- offset += frame.bytes_below_saved_regs;
++ offset -= frame.bytes_below_hard_fp;
+
+ /* Check that we can access the stack slot of the register with one
+ direct load with no adjustments needed. */
+@@ -9509,9 +9507,7 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ rtx reg = gen_rtx_REG (mode, regno);
+ poly_int64 offset = frame.reg_offset[regno];
+ if (frame_pointer_needed)
+- offset -= frame.below_hard_fp_saved_regs_size;
+- else
+- offset += frame.bytes_below_saved_regs;
++ offset -= frame.bytes_below_hard_fp;
+
+ rtx addr = plus_constant (Pmode, ptr_reg, offset);
+ rtx mem = gen_frame_mem (mode, addr);
+@@ -9563,9 +9559,7 @@ aarch64_process_components (sbitmap components, bool prologue_p)
+ /* REGNO2 can be saved/restored in a pair with REGNO. */
+ rtx reg2 = gen_rtx_REG (mode, regno2);
+ if (frame_pointer_needed)
+- offset2 -= frame.below_hard_fp_saved_regs_size;
+- else
+- offset2 += frame.bytes_below_saved_regs;
++ offset2 -= frame.bytes_below_hard_fp;
+ rtx addr2 = plus_constant (Pmode, ptr_reg, offset2);
+ rtx mem2 = gen_frame_mem (mode, addr2);
+ rtx set2 = prologue_p ? gen_rtx_SET (mem2, reg2)
+@@ -9681,7 +9675,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ if (final_adjustment_p
+ && known_eq (frame.below_hard_fp_saved_regs_size, 0))
+ {
+- poly_int64 lr_offset = frame.reg_offset[LR_REGNUM];
++ poly_int64 lr_offset = (frame.reg_offset[LR_REGNUM]
++ - frame.bytes_below_saved_regs);
+ if (known_ge (lr_offset, 0))
+ min_probe_threshold -= lr_offset.to_constant ();
+ else
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 108a5731b0d..c8becb098c8 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -766,6 +766,9 @@ extern enum aarch64_processor aarch64_tune;
+ #ifdef HAVE_POLY_INT_H
+ struct GTY (()) aarch64_frame
+ {
++ /* The offset from the bottom of the static frame (the bottom of the
++ outgoing arguments) of each register save slot, or -2 if no save is
++ needed. */
+ poly_int64 reg_offset[LAST_SAVED_REGNUM + 1];
+
+ /* The number of extra stack bytes taken up by register varargs.
+--
+2.34.1
+
+
+From 79faabda181d0d9fd29a3cf5726ba65bdee945b5 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:17 +0100
+Subject: [PATCH 12/19] aarch64: Simplify top of frame allocation
+
+After previous patches, it no longer really makes sense to allocate
+the top of the frame in terms of varargs_and_saved_regs_size and
+saved_regs_and_above.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Simplify
+ the allocation of the top of the frame.
+---
+ gcc/config/aarch64/aarch64.cc | 23 ++++++++---------------
+ 1 file changed, 8 insertions(+), 15 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index af99807ef8a..31b00094c2a 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8586,23 +8586,16 @@ aarch64_layout_frame (void)
+
+ frame.saved_regs_size = offset - frame.bytes_below_saved_regs;
+
+- poly_int64 varargs_and_saved_regs_size
+- = frame.saved_regs_size + frame.saved_varargs_size;
+-
+- poly_int64 saved_regs_and_above
+- = aligned_upper_bound (varargs_and_saved_regs_size
+- + get_frame_size (),
+- STACK_BOUNDARY / BITS_PER_UNIT);
+-
+- frame.bytes_above_hard_fp
+- = saved_regs_and_above - frame.below_hard_fp_saved_regs_size;
++ offset += get_frame_size ();
++ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
++ auto top_of_locals = offset;
+
+- /* Both these values are already aligned. */
+- gcc_assert (multiple_p (frame.bytes_below_saved_regs,
+- STACK_BOUNDARY / BITS_PER_UNIT));
+- frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs;
++ offset += frame.saved_varargs_size;
++ gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT));
++ frame.frame_size = offset;
+
+- frame.bytes_above_locals = frame.saved_varargs_size;
++ frame.bytes_above_hard_fp = frame.frame_size - frame.bytes_below_hard_fp;
++ frame.bytes_above_locals = frame.frame_size - top_of_locals;
+
+ frame.initial_adjust = 0;
+ frame.final_adjust = 0;
+--
+2.34.1
+
+
+From 4e62049e403b141e6f916176160dac8cbd65fe47 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:18 +0100
+Subject: [PATCH 13/19] aarch64: Minor initial adjustment tweak
+
+This patch just changes a calculation of initial_adjust
+to one that makes it slightly more obvious that the total
+adjustment is frame.frame_size.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Tweak
+ calculation of initial_adjust for frames in which all saves
+ are SVE saves.
+---
+ gcc/config/aarch64/aarch64.cc | 5 ++---
+ 1 file changed, 2 insertions(+), 3 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 31b00094c2a..1aa79da0673 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8675,11 +8675,10 @@ aarch64_layout_frame (void)
+ {
+ /* Frame in which all saves are SVE saves:
+
+- sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size
++ sub sp, sp, frame_size - bytes_below_saved_regs
+ save SVE registers relative to SP
+ sub sp, sp, bytes_below_saved_regs */
+- frame.initial_adjust = (frame.bytes_above_hard_fp
+- + frame.below_hard_fp_saved_regs_size);
++ frame.initial_adjust = frame.frame_size - frame.bytes_below_saved_regs;
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+ else if (frame.bytes_above_hard_fp.is_constant (&const_above_fp)
+--
+2.34.1
+
+
+From aaa1a0a5912d9e5d571e5f1c6f09ceac99544ab5 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:18 +0100
+Subject: [PATCH 14/19] aarch64: Tweak stack clash boundary condition
+
+The AArch64 ABI says that, when stack clash protection is used,
+there can be a maximum of 1KiB of unprobed space at sp on entry
+to a function. Therefore, we need to probe when allocating
+>= guard_size - 1KiB of data (>= rather than >). This is what
+GCC does.
+
+If an allocation is exactly guard_size bytes, it is enough to allocate
+those bytes and probe once at offset 1024. It isn't possible to use a
+single probe at any other offset: higher would conmplicate later code,
+by leaving more unprobed space than usual, while lower would risk
+leaving an entire page unprobed. For simplicity, the code probes all
+allocations at offset 1024.
+
+Some register saves also act as probes. If we need to allocate
+more space below the last such register save probe, we need to
+probe the allocation if it is > 1KiB. Again, this allocation is
+then sometimes (but not always) probed at offset 1024. This sort of
+allocation is currently only used for outgoing arguments, which are
+rarely this big.
+
+However, the code also probed if this final outgoing-arguments
+allocation was == 1KiB, rather than just > 1KiB. This isn't
+necessary, since the register save then probes at offset 1024
+as required. Continuing to probe allocations of exactly 1KiB
+would complicate later patches.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_allocate_and_probe_stack_space):
+ Don't probe final allocations that are exactly 1KiB in size (after
+ unprobed space above the final allocation has been deducted).
+
+gcc/testsuite/
+ * gcc.target/aarch64/stack-check-prologue-17.c: New test.
+---
+ gcc/config/aarch64/aarch64.cc | 4 +-
+ .../aarch64/stack-check-prologue-17.c | 55 +++++++++++++++++++
+ 2 files changed, 58 insertions(+), 1 deletion(-)
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 1aa79da0673..5cad847977a 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -9648,9 +9648,11 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ HOST_WIDE_INT guard_size
+ = 1 << param_stack_clash_protection_guard_size;
+ HOST_WIDE_INT guard_used_by_caller = STACK_CLASH_CALLER_GUARD;
++ HOST_WIDE_INT byte_sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
++ gcc_assert (multiple_p (poly_size, byte_sp_alignment));
+ HOST_WIDE_INT min_probe_threshold
+ = (final_adjustment_p
+- ? guard_used_by_caller
++ ? guard_used_by_caller + byte_sp_alignment
+ : guard_size - guard_used_by_caller);
+ /* When doing the final adjustment for the outgoing arguments, take into
+ account any unprobed space there is above the current SP. There are
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
+new file mode 100644
+index 00000000000..0d8a25d73a2
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
+@@ -0,0 +1,55 @@
++/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12" } */
++/* { dg-final { check-function-bodies "**" "" } } */
++
++void f(int, ...);
++void g();
++
++/*
++** test1:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1024
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test1(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x);
++ }
++ g();
++ return 1;
++}
++
++/*
++** test2:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1040
++** str xzr, \[sp\]
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test2(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x);
++ }
++ g();
++ return 1;
++}
+--
+2.34.1
+
+
+From 8433953434a7b58c0923140d39eb3c5988c1d097 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:19 +0100
+Subject: [PATCH 15/19] aarch64: Put LR save probe in first 16 bytes
+
+-fstack-clash-protection uses the save of LR as a probe for the next
+allocation. The next allocation could be:
+
+* another part of the static frame, e.g. when allocating SVE save slots
+ or outgoing arguments
+
+* an alloca in the same function
+
+* an allocation made by a callee function
+
+However, when -fomit-frame-pointer is used, the LR save slot is placed
+above the other GPR save slots. It could therefore be up to 80 bytes
+above the base of the GPR save area (which is also the hard fp address).
+
+aarch64_allocate_and_probe_stack_space took this into account when
+deciding how much subsequent space could be allocated without needing
+a probe. However, it interacted badly with:
+
+ /* If doing a small final adjustment, we always probe at offset 0.
+ This is done to avoid issues when LR is not at position 0 or when
+ the final adjustment is smaller than the probing offset. */
+ else if (final_adjustment_p && rounded_size == 0)
+ residual_probe_offset = 0;
+
+which forces any allocation that is smaller than the guard page size
+to be probed at offset 0 rather than the usual offset 1024. It was
+therefore possible to construct cases in which we had:
+
+* a probe using LR at SP + 80 bytes (or some other value >= 16)
+* an allocation of the guard page size - 16 bytes
+* a probe at SP + 0
+
+which allocates guard page size + 64 consecutive unprobed bytes.
+
+This patch requires the LR probe to be in the first 16 bytes of the
+save area when stack clash protection is active. Doing it
+unconditionally would cause code-quality regressions.
+
+Putting LR before other registers prevents push/pop allocation
+when shadow call stacks are enabled, since LR is restored
+separately from the other callee-saved registers.
+
+The new comment doesn't say that the probe register is required
+to be LR, since a later patch removes that restriction.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Ensure that
+ the LR save slot is in the first 16 bytes of the register save area.
+ Only form STP/LDP push/pop candidates if both registers are valid.
+ (aarch64_allocate_and_probe_stack_space): Remove workaround for
+ when LR was not in the first 16 bytes.
+
+gcc/testsuite/
+ * gcc.target/aarch64/stack-check-prologue-18.c: New test.
+ * gcc.target/aarch64/stack-check-prologue-19.c: Likewise.
+ * gcc.target/aarch64/stack-check-prologue-20.c: Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 72 ++++++-------
+ .../aarch64/stack-check-prologue-18.c | 100 ++++++++++++++++++
+ .../aarch64/stack-check-prologue-19.c | 100 ++++++++++++++++++
+ .../aarch64/stack-check-prologue-20.c | 3 +
+ 4 files changed, 233 insertions(+), 42 deletions(-)
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 5cad847977a..a765f92329d 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8534,26 +8534,34 @@ aarch64_layout_frame (void)
+ bool saves_below_hard_fp_p
+ = maybe_ne (frame.below_hard_fp_saved_regs_size, 0);
+ frame.bytes_below_hard_fp = offset;
++
++ auto allocate_gpr_slot = [&](unsigned int regno)
++ {
++ frame.reg_offset[regno] = offset;
++ if (frame.wb_push_candidate1 == INVALID_REGNUM)
++ frame.wb_push_candidate1 = regno;
++ else if (frame.wb_push_candidate2 == INVALID_REGNUM)
++ frame.wb_push_candidate2 = regno;
++ offset += UNITS_PER_WORD;
++ };
++
+ if (frame.emit_frame_chain)
+ {
+ /* FP and LR are placed in the linkage record. */
+- frame.reg_offset[R29_REGNUM] = offset;
+- frame.wb_push_candidate1 = R29_REGNUM;
+- frame.reg_offset[R30_REGNUM] = offset + UNITS_PER_WORD;
+- frame.wb_push_candidate2 = R30_REGNUM;
+- offset += 2 * UNITS_PER_WORD;
++ allocate_gpr_slot (R29_REGNUM);
++ allocate_gpr_slot (R30_REGNUM);
+ }
++ else if (flag_stack_clash_protection
++ && known_eq (frame.reg_offset[R30_REGNUM], SLOT_REQUIRED))
++ /* Put the LR save slot first, since it makes a good choice of probe
++ for stack clash purposes. The idea is that the link register usually
++ has to be saved before a call anyway, and so we lose little by
++ stopping it from being individually shrink-wrapped. */
++ allocate_gpr_slot (R30_REGNUM);
+
+ for (regno = R0_REGNUM; regno <= R30_REGNUM; regno++)
+ if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED))
+- {
+- frame.reg_offset[regno] = offset;
+- if (frame.wb_push_candidate1 == INVALID_REGNUM)
+- frame.wb_push_candidate1 = regno;
+- else if (frame.wb_push_candidate2 == INVALID_REGNUM)
+- frame.wb_push_candidate2 = regno;
+- offset += UNITS_PER_WORD;
+- }
++ allocate_gpr_slot (regno);
+
+ poly_int64 max_int_offset = offset;
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+@@ -8631,10 +8639,13 @@ aarch64_layout_frame (void)
+ max_push_offset to 0, because no registers are popped at this time,
+ so callee_adjust cannot be adjusted. */
+ HOST_WIDE_INT max_push_offset = 0;
+- if (frame.wb_pop_candidate2 != INVALID_REGNUM)
+- max_push_offset = 512;
+- else if (frame.wb_pop_candidate1 != INVALID_REGNUM)
+- max_push_offset = 256;
++ if (frame.wb_pop_candidate1 != INVALID_REGNUM)
++ {
++ if (frame.wb_pop_candidate2 != INVALID_REGNUM)
++ max_push_offset = 512;
++ else
++ max_push_offset = 256;
++ }
+
+ HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp;
+ HOST_WIDE_INT const_saved_regs_size;
+@@ -9654,29 +9665,6 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ = (final_adjustment_p
+ ? guard_used_by_caller + byte_sp_alignment
+ : guard_size - guard_used_by_caller);
+- /* When doing the final adjustment for the outgoing arguments, take into
+- account any unprobed space there is above the current SP. There are
+- two cases:
+-
+- - When saving SVE registers below the hard frame pointer, we force
+- the lowest save to take place in the prologue before doing the final
+- adjustment (i.e. we don't allow the save to be shrink-wrapped).
+- This acts as a probe at SP, so there is no unprobed space.
+-
+- - When there are no SVE register saves, we use the store of the link
+- register as a probe. We can't assume that LR was saved at position 0
+- though, so treat any space below it as unprobed. */
+- if (final_adjustment_p
+- && known_eq (frame.below_hard_fp_saved_regs_size, 0))
+- {
+- poly_int64 lr_offset = (frame.reg_offset[LR_REGNUM]
+- - frame.bytes_below_saved_regs);
+- if (known_ge (lr_offset, 0))
+- min_probe_threshold -= lr_offset.to_constant ();
+- else
+- gcc_assert (!flag_stack_clash_protection || known_eq (poly_size, 0));
+- }
+-
+ poly_int64 frame_size = frame.frame_size;
+
+ /* We should always have a positive probe threshold. */
+@@ -9856,8 +9844,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ if (final_adjustment_p && rounded_size != 0)
+ min_probe_threshold = 0;
+ /* If doing a small final adjustment, we always probe at offset 0.
+- This is done to avoid issues when LR is not at position 0 or when
+- the final adjustment is smaller than the probing offset. */
++ This is done to avoid issues when the final adjustment is smaller
++ than the probing offset. */
+ else if (final_adjustment_p && rounded_size == 0)
+ residual_probe_offset = 0;
+
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
+new file mode 100644
+index 00000000000..82447d20fff
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
+@@ -0,0 +1,100 @@
++/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12" } */
++/* { dg-final { check-function-bodies "**" "" } } */
++
++void f(int, ...);
++void g();
++
++/*
++** test1:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #4064
++** str xzr, \[sp\]
++** cbnz w0, .*
++** bl g
++** ...
++** str x26, \[sp, #?4128\]
++** ...
++*/
++int test1(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x);
++ }
++ g();
++ return 1;
++}
++
++/*
++** test2:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1040
++** str xzr, \[sp\]
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test2(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x);
++ }
++ g();
++ return 1;
++}
++
++/*
++** test3:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1024
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test3(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x);
++ }
++ g();
++ return 1;
++}
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
+new file mode 100644
+index 00000000000..73ac3e4e4eb
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
+@@ -0,0 +1,100 @@
++/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12 -fsanitize=shadow-call-stack -ffixed-x18" } */
++/* { dg-final { check-function-bodies "**" "" } } */
++
++void f(int, ...);
++void g();
++
++/*
++** test1:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #4064
++** str xzr, \[sp\]
++** cbnz w0, .*
++** bl g
++** ...
++** str x26, \[sp, #?4128\]
++** ...
++*/
++int test1(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x);
++ }
++ g();
++ return 1;
++}
++
++/*
++** test2:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1040
++** str xzr, \[sp\]
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test2(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x);
++ }
++ g();
++ return 1;
++}
++
++/*
++** test3:
++** ...
++** str x30, \[sp\]
++** sub sp, sp, #1024
++** cbnz w0, .*
++** bl g
++** ...
++*/
++int test3(int z) {
++ __uint128_t x = 0;
++ int y[0x400];
++ if (z)
++ {
++ asm volatile ("" :::
++ "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26");
++ f(0, 0, 0, 0, 0, 0, 0, &y,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x,
++ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x);
++ }
++ g();
++ return 1;
++}
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c
+new file mode 100644
+index 00000000000..690aae8dfd5
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c
+@@ -0,0 +1,3 @@
++/* { dg-options "-O2 -fstack-protector-all -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12 -fsanitize=shadow-call-stack -ffixed-x18" } */
++
++#include "stack-check-prologue-19.c"
+--
+2.34.1
+
+
+From eea1759073e09dd1aefbc9a881601ab1eebfdd18 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:19 +0100
+Subject: [PATCH 16/19] aarch64: Simplify probe of final frame allocation
+
+Previous patches ensured that the final frame allocation only needs
+a probe when the size is strictly greater than 1KiB. It's therefore
+safe to use the normal 1024 probe offset in all cases.
+
+The main motivation for doing this is to simplify the code and
+remove the number of special cases.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_allocate_and_probe_stack_space):
+ Always probe the residual allocation at offset 1024, asserting
+ that that is in range.
+
+gcc/testsuite/
+ * gcc.target/aarch64/stack-check-prologue-17.c: Expect the probe
+ to be at offset 1024 rather than offset 0.
+ * gcc.target/aarch64/stack-check-prologue-18.c: Likewise.
+ * gcc.target/aarch64/stack-check-prologue-19.c: Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 12 ++++--------
+ .../gcc.target/aarch64/stack-check-prologue-17.c | 2 +-
+ .../gcc.target/aarch64/stack-check-prologue-18.c | 4 ++--
+ .../gcc.target/aarch64/stack-check-prologue-19.c | 4 ++--
+ 4 files changed, 9 insertions(+), 13 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index a765f92329d..37809a306f7 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -9838,16 +9838,12 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ are still safe. */
+ if (residual)
+ {
+- HOST_WIDE_INT residual_probe_offset = guard_used_by_caller;
++ gcc_assert (guard_used_by_caller + byte_sp_alignment <= size);
++
+ /* If we're doing final adjustments, and we've done any full page
+ allocations then any residual needs to be probed. */
+ if (final_adjustment_p && rounded_size != 0)
+ min_probe_threshold = 0;
+- /* If doing a small final adjustment, we always probe at offset 0.
+- This is done to avoid issues when the final adjustment is smaller
+- than the probing offset. */
+- else if (final_adjustment_p && rounded_size == 0)
+- residual_probe_offset = 0;
+
+ aarch64_sub_sp (temp1, temp2, residual, frame_related_p);
+ if (residual >= min_probe_threshold)
+@@ -9858,8 +9854,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
+ HOST_WIDE_INT_PRINT_DEC " bytes, probing will be required."
+ "\n", residual);
+
+- emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
+- residual_probe_offset));
++ emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
++ guard_used_by_caller));
+ emit_insn (gen_blockage ());
+ }
+ }
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
+index 0d8a25d73a2..f0ec1389771 100644
+--- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c
+@@ -33,7 +33,7 @@ int test1(int z) {
+ ** ...
+ ** str x30, \[sp\]
+ ** sub sp, sp, #1040
+-** str xzr, \[sp\]
++** str xzr, \[sp, #?1024\]
+ ** cbnz w0, .*
+ ** bl g
+ ** ...
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
+index 82447d20fff..6383bec5ebc 100644
+--- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c
+@@ -9,7 +9,7 @@ void g();
+ ** ...
+ ** str x30, \[sp\]
+ ** sub sp, sp, #4064
+-** str xzr, \[sp\]
++** str xzr, \[sp, #?1024\]
+ ** cbnz w0, .*
+ ** bl g
+ ** ...
+@@ -50,7 +50,7 @@ int test1(int z) {
+ ** ...
+ ** str x30, \[sp\]
+ ** sub sp, sp, #1040
+-** str xzr, \[sp\]
++** str xzr, \[sp, #?1024\]
+ ** cbnz w0, .*
+ ** bl g
+ ** ...
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
+index 73ac3e4e4eb..562039b5e9b 100644
+--- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
++++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c
+@@ -9,7 +9,7 @@ void g();
+ ** ...
+ ** str x30, \[sp\]
+ ** sub sp, sp, #4064
+-** str xzr, \[sp\]
++** str xzr, \[sp, #?1024\]
+ ** cbnz w0, .*
+ ** bl g
+ ** ...
+@@ -50,7 +50,7 @@ int test1(int z) {
+ ** ...
+ ** str x30, \[sp\]
+ ** sub sp, sp, #1040
+-** str xzr, \[sp\]
++** str xzr, \[sp, #?1024\]
+ ** cbnz w0, .*
+ ** bl g
+ ** ...
+--
+2.34.1
+
+
+From 96d85187c3b9c9a7efc2fd698c3d452e80d8aa47 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:20 +0100
+Subject: [PATCH 17/19] aarch64: Explicitly record probe registers in frame
+ info
+
+The stack frame is currently divided into three areas:
+
+A: the area above the hard frame pointer
+B: the SVE saves below the hard frame pointer
+C: the outgoing arguments
+
+If the stack frame is allocated in one chunk, the allocation needs a
+probe if the frame size is >= guard_size - 1KiB. In addition, if the
+function is not a leaf function, it must probe an address no more than
+1KiB above the outgoing SP. We ensured the second condition by
+
+(1) using single-chunk allocations for non-leaf functions only if
+ the link register save slot is within 512 bytes of the bottom
+ of the frame; and
+
+(2) using the link register save as a probe (meaning, for instance,
+ that it can't be individually shrink wrapped)
+
+If instead the stack is allocated in multiple chunks, then:
+
+* an allocation involving only the outgoing arguments (C above) requires
+ a probe if the allocation size is > 1KiB
+
+* any other allocation requires a probe if the allocation size
+ is >= guard_size - 1KiB
+
+* second and subsequent allocations require the previous allocation
+ to probe at the bottom of the allocated area, regardless of the size
+ of that previous allocation
+
+The final point means that, unlike for single allocations,
+it can be necessary to have both a non-SVE register probe and
+an SVE register probe. For example:
+
+* allocate A, probe using a non-SVE register save
+* allocate B, probe using an SVE register save
+* allocate C
+
+The non-SVE register used in this case was again the link register.
+It was previously used even if the link register save slot was some
+bytes above the bottom of the non-SVE register saves, but an earlier
+patch avoided that by putting the link register save slot first.
+
+As a belt-and-braces fix, this patch explicitly records which
+probe registers we're using and allows the non-SVE probe to be
+whichever register comes first (as for SVE).
+
+The patch also avoids unnecessary probes in sve/pcs/stack_clash_3.c.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::sve_save_and_probe)
+ (aarch64_frame::hard_fp_save_and_probe): New fields.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize them.
+ Rather than asserting that a leaf function saves LR, instead assert
+ that a leaf function saves something.
+ (aarch64_get_separate_components): Prevent the chosen probe
+ registers from being individually shrink-wrapped.
+ (aarch64_allocate_and_probe_stack_space): Remove workaround for
+ probe registers that aren't at the bottom of the previous allocation.
+
+gcc/testsuite/
+ * gcc.target/aarch64/sve/pcs/stack_clash_3.c: Avoid redundant probes.
+---
+ gcc/config/aarch64/aarch64.cc | 68 +++++++++++++++----
+ gcc/config/aarch64/aarch64.h | 8 +++
+ .../aarch64/sve/pcs/stack_clash_3.c | 6 +-
+ 3 files changed, 64 insertions(+), 18 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 37809a306f7..6c59c39a639 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8471,15 +8471,11 @@ aarch64_layout_frame (void)
+ && !crtl->abi->clobbers_full_reg_p (regno))
+ frame.reg_offset[regno] = SLOT_REQUIRED;
+
+- /* With stack-clash, LR must be saved in non-leaf functions. The saving of
+- LR counts as an implicit probe which allows us to maintain the invariant
+- described in the comment at expand_prologue. */
+- gcc_assert (crtl->is_leaf
+- || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED));
+
+ poly_int64 offset = crtl->outgoing_args_size;
+ gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT));
+ frame.bytes_below_saved_regs = offset;
++ frame.sve_save_and_probe = INVALID_REGNUM;
+
+ /* Now assign stack slots for the registers. Start with the predicate
+ registers, since predicate LDR and STR have a relatively small
+@@ -8487,6 +8483,8 @@ aarch64_layout_frame (void)
+ for (regno = P0_REGNUM; regno <= P15_REGNUM; regno++)
+ if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED))
+ {
++ if (frame.sve_save_and_probe == INVALID_REGNUM)
++ frame.sve_save_and_probe = regno;
+ frame.reg_offset[regno] = offset;
+ offset += BYTES_PER_SVE_PRED;
+ }
+@@ -8524,6 +8522,8 @@ aarch64_layout_frame (void)
+ for (regno = V0_REGNUM; regno <= V31_REGNUM; regno++)
+ if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED))
+ {
++ if (frame.sve_save_and_probe == INVALID_REGNUM)
++ frame.sve_save_and_probe = regno;
+ frame.reg_offset[regno] = offset;
+ offset += vector_save_size;
+ }
+@@ -8533,10 +8533,18 @@ aarch64_layout_frame (void)
+ frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs;
+ bool saves_below_hard_fp_p
+ = maybe_ne (frame.below_hard_fp_saved_regs_size, 0);
++ gcc_assert (!saves_below_hard_fp_p
++ || (frame.sve_save_and_probe != INVALID_REGNUM
++ && known_eq (frame.reg_offset[frame.sve_save_and_probe],
++ frame.bytes_below_saved_regs)));
++
+ frame.bytes_below_hard_fp = offset;
++ frame.hard_fp_save_and_probe = INVALID_REGNUM;
+
+ auto allocate_gpr_slot = [&](unsigned int regno)
+ {
++ if (frame.hard_fp_save_and_probe == INVALID_REGNUM)
++ frame.hard_fp_save_and_probe = regno;
+ frame.reg_offset[regno] = offset;
+ if (frame.wb_push_candidate1 == INVALID_REGNUM)
+ frame.wb_push_candidate1 = regno;
+@@ -8570,6 +8578,8 @@ aarch64_layout_frame (void)
+ for (regno = V0_REGNUM; regno <= V31_REGNUM; regno++)
+ if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED))
+ {
++ if (frame.hard_fp_save_and_probe == INVALID_REGNUM)
++ frame.hard_fp_save_and_probe = regno;
+ /* If there is an alignment gap between integer and fp callee-saves,
+ allocate the last fp register to it if possible. */
+ if (regno == last_fp_reg
+@@ -8593,6 +8603,17 @@ aarch64_layout_frame (void)
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+
+ frame.saved_regs_size = offset - frame.bytes_below_saved_regs;
++ gcc_assert (known_eq (frame.saved_regs_size,
++ frame.below_hard_fp_saved_regs_size)
++ || (frame.hard_fp_save_and_probe != INVALID_REGNUM
++ && known_eq (frame.reg_offset[frame.hard_fp_save_and_probe],
++ frame.bytes_below_hard_fp)));
++
++ /* With stack-clash, a register must be saved in non-leaf functions.
++ The saving of the bottommost register counts as an implicit probe,
++ which allows us to maintain the invariant described in the comment
++ at expand_prologue. */
++ gcc_assert (crtl->is_leaf || maybe_ne (frame.saved_regs_size, 0));
+
+ offset += get_frame_size ();
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+@@ -8723,6 +8744,25 @@ aarch64_layout_frame (void)
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+
++ /* The frame is allocated in pieces, with each non-final piece
++ including a register save at offset 0 that acts as a probe for
++ the following piece. In addition, the save of the bottommost register
++ acts as a probe for callees and allocas. Roll back any probes that
++ aren't needed.
++
++ A probe isn't needed if it is associated with the final allocation
++ (including callees and allocas) that happens before the epilogue is
++ executed. */
++ if (crtl->is_leaf
++ && !cfun->calls_alloca
++ && known_eq (frame.final_adjust, 0))
++ {
++ if (maybe_ne (frame.sve_callee_adjust, 0))
++ frame.sve_save_and_probe = INVALID_REGNUM;
++ else
++ frame.hard_fp_save_and_probe = INVALID_REGNUM;
++ }
++
+ /* Make sure the individual adjustments add up to the full frame size. */
+ gcc_assert (known_eq (frame.initial_adjust
+ + frame.callee_adjust
+@@ -9354,13 +9394,6 @@ aarch64_get_separate_components (void)
+
+ poly_int64 offset = frame.reg_offset[regno];
+
+- /* If the register is saved in the first SVE save slot, we use
+- it as a stack probe for -fstack-clash-protection. */
+- if (flag_stack_clash_protection
+- && maybe_ne (frame.below_hard_fp_saved_regs_size, 0)
+- && known_eq (offset, frame.bytes_below_saved_regs))
+- continue;
+-
+ /* Get the offset relative to the register we'll use. */
+ if (frame_pointer_needed)
+ offset -= frame.bytes_below_hard_fp;
+@@ -9395,6 +9428,13 @@ aarch64_get_separate_components (void)
+
+ bitmap_clear_bit (components, LR_REGNUM);
+ bitmap_clear_bit (components, SP_REGNUM);
++ if (flag_stack_clash_protection)
++ {
++ if (frame.sve_save_and_probe != INVALID_REGNUM)
++ bitmap_clear_bit (components, frame.sve_save_and_probe);
++ if (frame.hard_fp_save_and_probe != INVALID_REGNUM)
++ bitmap_clear_bit (components, frame.hard_fp_save_and_probe);
++ }
+
+ return components;
+ }
+@@ -9931,8 +9971,8 @@ aarch64_epilogue_uses (int regno)
+ When probing is needed, we emit a probe at the start of the prologue
+ and every PARAM_STACK_CLASH_PROTECTION_GUARD_SIZE bytes thereafter.
+
+- We have to track how much space has been allocated and the only stores
+- to the stack we track as implicit probes are the FP/LR stores.
++ We can also use register saves as probes. These are stored in
++ sve_save_and_probe and hard_fp_save_and_probe.
+
+ For outgoing arguments we probe if the size is larger than 1KB, such that
+ the ABI specified buffer is maintained for the next callee.
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index c8becb098c8..fbfb73545ba 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -863,6 +863,14 @@ struct GTY (()) aarch64_frame
+ This is the register they should use. */
+ unsigned spare_pred_reg;
+
++ /* An SVE register that is saved below the hard frame pointer and that acts
++ as a probe for later allocations, or INVALID_REGNUM if none. */
++ unsigned sve_save_and_probe;
++
++ /* A register that is saved at the hard frame pointer and that acts
++ as a probe for later allocations, or INVALID_REGNUM if none. */
++ unsigned hard_fp_save_and_probe;
++
+ bool laid_out;
+
+ /* True if shadow call stack should be enabled for the current function. */
+diff --git a/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c b/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c
+index 3e01ec36c3a..3530a0d504b 100644
+--- a/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c
++++ b/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c
+@@ -11,11 +11,10 @@
+ ** mov x11, sp
+ ** ...
+ ** sub sp, sp, x13
+-** str p4, \[sp\]
+ ** cbz w0, [^\n]*
++** str p4, \[sp\]
+ ** ...
+ ** ptrue p0\.b, all
+-** ldr p4, \[sp\]
+ ** addvl sp, sp, #1
+ ** ldr x24, \[sp\], 32
+ ** ret
+@@ -39,13 +38,12 @@ test_1 (int n)
+ ** mov x11, sp
+ ** ...
+ ** sub sp, sp, x13
+-** str p4, \[sp\]
+ ** cbz w0, [^\n]*
++** str p4, \[sp\]
+ ** str p5, \[sp, #1, mul vl\]
+ ** str p6, \[sp, #2, mul vl\]
+ ** ...
+ ** ptrue p0\.b, all
+-** ldr p4, \[sp\]
+ ** addvl sp, sp, #1
+ ** ldr x24, \[sp\], 32
+ ** ret
+--
+2.34.1
+
+
+From 56df065080950bb30dda9c260f71be54269bdda5 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:20 +0100
+Subject: [PATCH 18/19] aarch64: Remove below_hard_fp_saved_regs_size
+
+After previous patches, it's no longer necessary to store
+saved_regs_size and below_hard_fp_saved_regs_size in the frame info.
+All measurements instead use the top or bottom of the frame as
+reference points.
+
+gcc/
+ * config/aarch64/aarch64.h (aarch64_frame::saved_regs_size)
+ (aarch64_frame::below_hard_fp_saved_regs_size): Delete.
+ * config/aarch64/aarch64.cc (aarch64_layout_frame): Update accordingly.
+---
+ gcc/config/aarch64/aarch64.cc | 45 ++++++++++++++++-------------------
+ gcc/config/aarch64/aarch64.h | 7 ------
+ 2 files changed, 21 insertions(+), 31 deletions(-)
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index 6c59c39a639..b95e805a8cc 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8530,9 +8530,8 @@ aarch64_layout_frame (void)
+
+ /* OFFSET is now the offset of the hard frame pointer from the bottom
+ of the callee save area. */
+- frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs;
+- bool saves_below_hard_fp_p
+- = maybe_ne (frame.below_hard_fp_saved_regs_size, 0);
++ auto below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs;
++ bool saves_below_hard_fp_p = maybe_ne (below_hard_fp_saved_regs_size, 0);
+ gcc_assert (!saves_below_hard_fp_p
+ || (frame.sve_save_and_probe != INVALID_REGNUM
+ && known_eq (frame.reg_offset[frame.sve_save_and_probe],
+@@ -8602,9 +8601,8 @@ aarch64_layout_frame (void)
+
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+
+- frame.saved_regs_size = offset - frame.bytes_below_saved_regs;
+- gcc_assert (known_eq (frame.saved_regs_size,
+- frame.below_hard_fp_saved_regs_size)
++ auto saved_regs_size = offset - frame.bytes_below_saved_regs;
++ gcc_assert (known_eq (saved_regs_size, below_hard_fp_saved_regs_size)
+ || (frame.hard_fp_save_and_probe != INVALID_REGNUM
+ && known_eq (frame.reg_offset[frame.hard_fp_save_and_probe],
+ frame.bytes_below_hard_fp)));
+@@ -8613,7 +8611,7 @@ aarch64_layout_frame (void)
+ The saving of the bottommost register counts as an implicit probe,
+ which allows us to maintain the invariant described in the comment
+ at expand_prologue. */
+- gcc_assert (crtl->is_leaf || maybe_ne (frame.saved_regs_size, 0));
++ gcc_assert (crtl->is_leaf || maybe_ne (saved_regs_size, 0));
+
+ offset += get_frame_size ();
+ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+@@ -8670,7 +8668,7 @@ aarch64_layout_frame (void)
+
+ HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp;
+ HOST_WIDE_INT const_saved_regs_size;
+- if (known_eq (frame.saved_regs_size, 0))
++ if (known_eq (saved_regs_size, 0))
+ frame.initial_adjust = frame.frame_size;
+ else if (frame.frame_size.is_constant (&const_size)
+ && const_size < max_push_offset
+@@ -8683,7 +8681,7 @@ aarch64_layout_frame (void)
+ frame.callee_adjust = const_size;
+ }
+ else if (frame.bytes_below_saved_regs.is_constant (&const_below_saved_regs)
+- && frame.saved_regs_size.is_constant (&const_saved_regs_size)
++ && saved_regs_size.is_constant (&const_saved_regs_size)
+ && const_below_saved_regs + const_saved_regs_size < 512
+ /* We could handle this case even with data below the saved
+ registers, provided that that data left us with valid offsets
+@@ -8702,8 +8700,7 @@ aarch64_layout_frame (void)
+ frame.initial_adjust = frame.frame_size;
+ }
+ else if (saves_below_hard_fp_p
+- && known_eq (frame.saved_regs_size,
+- frame.below_hard_fp_saved_regs_size))
++ && known_eq (saved_regs_size, below_hard_fp_saved_regs_size))
+ {
+ /* Frame in which all saves are SVE saves:
+
+@@ -8725,7 +8722,7 @@ aarch64_layout_frame (void)
+ [save SVE registers relative to SP]
+ sub sp, sp, bytes_below_saved_regs */
+ frame.callee_adjust = const_above_fp;
+- frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
++ frame.sve_callee_adjust = below_hard_fp_saved_regs_size;
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+ else
+@@ -8740,7 +8737,7 @@ aarch64_layout_frame (void)
+ [save SVE registers relative to SP]
+ sub sp, sp, bytes_below_saved_regs */
+ frame.initial_adjust = frame.bytes_above_hard_fp;
+- frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size;
++ frame.sve_callee_adjust = below_hard_fp_saved_regs_size;
+ frame.final_adjust = frame.bytes_below_saved_regs;
+ }
+
+@@ -9936,17 +9933,17 @@ aarch64_epilogue_uses (int regno)
+ | local variables | <-- frame_pointer_rtx
+ | |
+ +-------------------------------+
+- | padding | \
+- +-------------------------------+ |
+- | callee-saved registers | | frame.saved_regs_size
+- +-------------------------------+ |
+- | LR' | |
+- +-------------------------------+ |
+- | FP' | |
+- +-------------------------------+ |<- hard_frame_pointer_rtx (aligned)
+- | SVE vector registers | | \
+- +-------------------------------+ | | below_hard_fp_saved_regs_size
+- | SVE predicate registers | / /
++ | padding |
++ +-------------------------------+
++ | callee-saved registers |
++ +-------------------------------+
++ | LR' |
++ +-------------------------------+
++ | FP' |
++ +-------------------------------+ <-- hard_frame_pointer_rtx (aligned)
++ | SVE vector registers |
++ +-------------------------------+
++ | SVE predicate registers |
+ +-------------------------------+
+ | dynamic allocation |
+ +-------------------------------+
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index fbfb73545ba..cfeaf4657ab 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -777,18 +777,11 @@ struct GTY (()) aarch64_frame
+ STACK_BOUNDARY. */
+ HOST_WIDE_INT saved_varargs_size;
+
+- /* The size of the callee-save registers with a slot in REG_OFFSET. */
+- poly_int64 saved_regs_size;
+-
+ /* The number of bytes between the bottom of the static frame (the bottom
+ of the outgoing arguments) and the bottom of the register save area.
+ This value is always a multiple of STACK_BOUNDARY. */
+ poly_int64 bytes_below_saved_regs;
+
+- /* The size of the callee-save registers with a slot in REG_OFFSET that
+- are saved below the hard frame pointer. */
+- poly_int64 below_hard_fp_saved_regs_size;
+-
+ /* The number of bytes between the bottom of the static frame (the bottom
+ of the outgoing arguments) and the hard frame pointer. This value is
+ always a multiple of STACK_BOUNDARY. */
+--
+2.34.1
+
+
+From b96e66fd4ef3e36983969fb8cdd1956f551a074b Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Tue, 12 Sep 2023 16:07:21 +0100
+Subject: [PATCH 19/19] aarch64: Make stack smash canary protect saved
+ registers
+
+AArch64 normally puts the saved registers near the bottom of the frame,
+immediately above any dynamic allocations. But this means that a
+stack-smash attack on those dynamic allocations could overwrite the
+saved registers without needing to reach as far as the stack smash
+canary.
+
+The same thing could also happen for variable-sized arguments that are
+passed by value, since those are allocated before a call and popped on
+return.
+
+This patch avoids that by putting the locals (and thus the canary) below
+the saved registers when stack smash protection is active.
+
+The patch fixes CVE-2023-4039.
+
+gcc/
+ * config/aarch64/aarch64.cc (aarch64_save_regs_above_locals_p):
+ New function.
+ (aarch64_layout_frame): Use it to decide whether locals should
+ go above or below the saved registers.
+ (aarch64_expand_prologue): Update stack layout comment.
+ Emit a stack tie after the final adjustment.
+
+gcc/testsuite/
+ * gcc.target/aarch64/stack-protector-8.c: New test.
+ * gcc.target/aarch64/stack-protector-9.c: Likewise.
+---
+ gcc/config/aarch64/aarch64.cc | 46 +++++++--
+ .../gcc.target/aarch64/stack-protector-8.c | 95 +++++++++++++++++++
+ .../gcc.target/aarch64/stack-protector-9.c | 33 +++++++
+ 3 files changed, 168 insertions(+), 6 deletions(-)
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-protector-8.c
+ create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-protector-9.c
+
+diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
+index b95e805a8cc..389c0e29353 100644
+--- a/gcc/config/aarch64/aarch64.cc
++++ b/gcc/config/aarch64/aarch64.cc
+@@ -8394,6 +8394,20 @@ aarch64_needs_frame_chain (void)
+ return aarch64_use_frame_pointer;
+ }
+
++/* Return true if the current function should save registers above
++ the locals area, rather than below it. */
++
++static bool
++aarch64_save_regs_above_locals_p ()
++{
++ /* When using stack smash protection, make sure that the canary slot
++ comes between the locals and the saved registers. Otherwise,
++ it would be possible for a carefully sized smash attack to change
++ the saved registers (particularly LR and FP) without reaching the
++ canary. */
++ return crtl->stack_protect_guard;
++}
++
+ /* Mark the registers that need to be saved by the callee and calculate
+ the size of the callee-saved registers area and frame record (both FP
+ and LR may be omitted). */
+@@ -8405,6 +8419,7 @@ aarch64_layout_frame (void)
+ poly_int64 vector_save_size = GET_MODE_SIZE (vector_save_mode);
+ bool frame_related_fp_reg_p = false;
+ aarch64_frame &frame = cfun->machine->frame;
++ poly_int64 top_of_locals = -1;
+
+ frame.emit_frame_chain = aarch64_needs_frame_chain ();
+
+@@ -8471,9 +8486,16 @@ aarch64_layout_frame (void)
+ && !crtl->abi->clobbers_full_reg_p (regno))
+ frame.reg_offset[regno] = SLOT_REQUIRED;
+
++ bool regs_at_top_p = aarch64_save_regs_above_locals_p ();
+
+ poly_int64 offset = crtl->outgoing_args_size;
+ gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT));
++ if (regs_at_top_p)
++ {
++ offset += get_frame_size ();
++ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
++ top_of_locals = offset;
++ }
+ frame.bytes_below_saved_regs = offset;
+ frame.sve_save_and_probe = INVALID_REGNUM;
+
+@@ -8613,15 +8635,18 @@ aarch64_layout_frame (void)
+ at expand_prologue. */
+ gcc_assert (crtl->is_leaf || maybe_ne (saved_regs_size, 0));
+
+- offset += get_frame_size ();
+- offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
+- auto top_of_locals = offset;
+-
++ if (!regs_at_top_p)
++ {
++ offset += get_frame_size ();
++ offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT);
++ top_of_locals = offset;
++ }
+ offset += frame.saved_varargs_size;
+ gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT));
+ frame.frame_size = offset;
+
+ frame.bytes_above_hard_fp = frame.frame_size - frame.bytes_below_hard_fp;
++ gcc_assert (known_ge (top_of_locals, 0));
+ frame.bytes_above_locals = frame.frame_size - top_of_locals;
+
+ frame.initial_adjust = 0;
+@@ -9930,10 +9955,10 @@ aarch64_epilogue_uses (int regno)
+ | for register varargs |
+ | |
+ +-------------------------------+
+- | local variables | <-- frame_pointer_rtx
++ | local variables (1) | <-- frame_pointer_rtx
+ | |
+ +-------------------------------+
+- | padding |
++ | padding (1) |
+ +-------------------------------+
+ | callee-saved registers |
+ +-------------------------------+
+@@ -9945,6 +9970,10 @@ aarch64_epilogue_uses (int regno)
+ +-------------------------------+
+ | SVE predicate registers |
+ +-------------------------------+
++ | local variables (2) |
++ +-------------------------------+
++ | padding (2) |
++ +-------------------------------+
+ | dynamic allocation |
+ +-------------------------------+
+ | padding |
+@@ -9954,6 +9983,9 @@ aarch64_epilogue_uses (int regno)
+ +-------------------------------+
+ | | <-- stack_pointer_rtx (aligned)
+
++ The regions marked (1) and (2) are mutually exclusive. (2) is used
++ when aarch64_save_regs_above_locals_p is true.
++
+ Dynamic stack allocations via alloca() decrease stack_pointer_rtx
+ but leave frame_pointer_rtx and hard_frame_pointer_rtx
+ unchanged.
+@@ -10149,6 +10181,8 @@ aarch64_expand_prologue (void)
+ gcc_assert (known_eq (bytes_below_sp, final_adjust));
+ aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx, final_adjust,
+ !frame_pointer_needed, true);
++ if (emit_frame_chain && maybe_ne (final_adjust, 0))
++ emit_insn (gen_stack_tie (stack_pointer_rtx, hard_frame_pointer_rtx));
+ }
+
+ /* Return TRUE if we can use a simple_return insn.
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c b/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c
+new file mode 100644
+index 00000000000..e71d820e365
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c
+@@ -0,0 +1,95 @@
++/* { dg-options " -O -fstack-protector-strong -mstack-protector-guard=sysreg -mstack-protector-guard-reg=tpidr2_el0 -mstack-protector-guard-offset=16" } */
++/* { dg-final { check-function-bodies "**" "" } } */
++
++void g(void *);
++__SVBool_t *h(void *);
++
++/*
++** test1:
++** sub sp, sp, #288
++** stp x29, x30, \[sp, #?272\]
++** add x29, sp, #?272
++** mrs (x[0-9]+), tpidr2_el0
++** ldr (x[0-9]+), \[\1, #?16\]
++** str \2, \[sp, #?264\]
++** mov \2, #?0
++** add x0, sp, #?8
++** bl g
++** ...
++** mrs .*
++** ...
++** bne .*
++** ...
++** ldp x29, x30, \[sp, #?272\]
++** add sp, sp, #?288
++** ret
++** bl __stack_chk_fail
++*/
++int test1() {
++ int y[0x40];
++ g(y);
++ return 1;
++}
++
++/*
++** test2:
++** stp x29, x30, \[sp, #?-16\]!
++** mov x29, sp
++** sub sp, sp, #1040
++** mrs (x[0-9]+), tpidr2_el0
++** ldr (x[0-9]+), \[\1, #?16\]
++** str \2, \[sp, #?1032\]
++** mov \2, #?0
++** add x0, sp, #?8
++** bl g
++** ...
++** mrs .*
++** ...
++** bne .*
++** ...
++** add sp, sp, #?1040
++** ldp x29, x30, \[sp\], #?16
++** ret
++** bl __stack_chk_fail
++*/
++int test2() {
++ int y[0x100];
++ g(y);
++ return 1;
++}
++
++#pragma GCC target "+sve"
++
++/*
++** test3:
++** stp x29, x30, \[sp, #?-16\]!
++** mov x29, sp
++** addvl sp, sp, #-18
++** ...
++** str p4, \[sp\]
++** ...
++** sub sp, sp, #272
++** mrs (x[0-9]+), tpidr2_el0
++** ldr (x[0-9]+), \[\1, #?16\]
++** str \2, \[sp, #?264\]
++** mov \2, #?0
++** add x0, sp, #?8
++** bl h
++** ...
++** mrs .*
++** ...
++** bne .*
++** ...
++** add sp, sp, #?272
++** ...
++** ldr p4, \[sp\]
++** ...
++** addvl sp, sp, #18
++** ldp x29, x30, \[sp\], #?16
++** ret
++** bl __stack_chk_fail
++*/
++__SVBool_t test3() {
++ int y[0x40];
++ return *h(y);
++}
+diff --git a/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c b/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c
+new file mode 100644
+index 00000000000..58f322aa480
+--- /dev/null
++++ b/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c
+@@ -0,0 +1,33 @@
++/* { dg-options "-O2 -mcpu=neoverse-v1 -fstack-protector-all" } */
++/* { dg-final { check-function-bodies "**" "" } } */
++
++/*
++** main:
++** ...
++** stp x29, x30, \[sp, #?-[0-9]+\]!
++** ...
++** sub sp, sp, #[0-9]+
++** ...
++** str x[0-9]+, \[x29, #?-8\]
++** ...
++*/
++int f(const char *);
++void g(void *);
++int main(int argc, char* argv[])
++{
++ int a;
++ int b;
++ char c[2+f(argv[1])];
++ int d[0x100];
++ char y;
++
++ y=42; a=4; b=10;
++ c[0] = 'h'; c[1] = '\0';
++
++ c[f(argv[2])] = '\0';
++
++ __builtin_printf("%d %d\n%s\n", a, b, c);
++ g(d);
++
++ return 0;
++}
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/gcc/gcc_12.1.bb b/meta/recipes-devtools/gcc/gcc_13.2.bb
index 255fe552bd..255fe552bd 100644
--- a/meta/recipes-devtools/gcc/gcc_12.1.bb
+++ b/meta/recipes-devtools/gcc/gcc_13.2.bb
diff --git a/meta/recipes-devtools/gcc/libgcc-common.inc b/meta/recipes-devtools/gcc/libgcc-common.inc
index d48dc8b823..d9084af51a 100644
--- a/meta/recipes-devtools/gcc/libgcc-common.inc
+++ b/meta/recipes-devtools/gcc/libgcc-common.inc
@@ -10,8 +10,7 @@ do_configure () {
mkdir -p ${B}/${TARGET_SYS}/${BPN}/
cd ${B}/${BPN}
chmod a+x ${S}/${BPN}/configure
- relpath=${@os.path.relpath("${S}/${BPN}", "${B}/${BPN}")}
- $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF}
+ ${S}/${BPN}/configure ${CONFIGUREOPTS} ${EXTRA_OECONF}
}
EXTRACONFFUNCS += "extract_stashed_builddir"
do_configure[depends] += "${COMPILERDEP}"
@@ -45,15 +44,19 @@ do_install () {
}
do_install:append:libc-baremetal () {
- rmdir ${D}${base_libdir}
+ if [ "${base_libdir}" != "${libdir}" ]; then
+ rmdir ${D}${base_libdir}
+ fi
}
do_install:append:libc-newlib () {
- rmdir ${D}${base_libdir}
+ if [ "${base_libdir}" != "${libdir}" ]; then
+ rmdir ${D}${base_libdir}
+ fi
}
# No rpm package is actually created but -dev depends on it, avoid dnf error
-RDEPENDS:${PN}-dev:libc-baremetal = ""
-RDEPENDS:${PN}-dev:libc-newlib = ""
+DEV_PKG_DEPENDENCY:libc-baremetal = ""
+DEV_PKG_DEPENDENCY:libc-newlib = ""
BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/gcc/libgcc-initial_12.1.bb b/meta/recipes-devtools/gcc/libgcc-initial_13.2.bb
index a259082b47..a259082b47 100644
--- a/meta/recipes-devtools/gcc/libgcc-initial_12.1.bb
+++ b/meta/recipes-devtools/gcc/libgcc-initial_13.2.bb
diff --git a/meta/recipes-devtools/gcc/libgcc_12.1.bb b/meta/recipes-devtools/gcc/libgcc_12.1.bb
deleted file mode 100644
index f88963b0a4..0000000000
--- a/meta/recipes-devtools/gcc/libgcc_12.1.bb
+++ /dev/null
@@ -1,5 +0,0 @@
-require recipes-devtools/gcc/gcc-${PV}.inc
-require libgcc.inc
-
-# Building with thumb enabled on armv6t fails
-ARM_INSTRUCTION_SET:armv6 = "arm"
diff --git a/meta/recipes-devtools/gcc/libgcc_13.2.bb b/meta/recipes-devtools/gcc/libgcc_13.2.bb
new file mode 100644
index 0000000000..fdcd6cc0da
--- /dev/null
+++ b/meta/recipes-devtools/gcc/libgcc_13.2.bb
@@ -0,0 +1,7 @@
+require recipes-devtools/gcc/gcc-${PV}.inc
+require libgcc.inc
+
+LDFLAGS += "-fuse-ld=bfd"
+
+# Building with thumb enabled on armv6t fails
+ARM_INSTRUCTION_SET:armv6 = "arm"
diff --git a/meta/recipes-devtools/gcc/libgfortran_12.1.bb b/meta/recipes-devtools/gcc/libgfortran_13.2.bb
index 71dd8b4bdc..71dd8b4bdc 100644
--- a/meta/recipes-devtools/gcc/libgfortran_12.1.bb
+++ b/meta/recipes-devtools/gcc/libgfortran_13.2.bb
diff --git a/meta/recipes-devtools/gdb/gdb-common.inc b/meta/recipes-devtools/gdb/gdb-common.inc
index 925b0c2f80..ea1fc27a61 100644
--- a/meta/recipes-devtools/gdb/gdb-common.inc
+++ b/meta/recipes-devtools/gdb/gdb-common.inc
@@ -2,7 +2,7 @@ SUMMARY = "GNU debugger"
HOMEPAGE = "http://www.gnu.org/software/gdb/"
DESCRIPTION = "GDB, the GNU Project debugger, allows you to see what is going on inside another program while it executes -- or what another program was doing at the moment it crashed."
SECTION = "devel"
-DEPENDS = "expat gmp zlib ncurses virtual/libiconv ${LTTNGUST} bison-native"
+DEPENDS = "expat gmp mpfr zlib ncurses virtual/libiconv ${LTTNGUST} bison-native"
LTTNGUST = "lttng-ust"
LTTNGUST:arc = ""
@@ -30,13 +30,14 @@ EXTRA_OECONF = "--disable-gdbtk --disable-x --disable-werror \
--with-libgmp-prefix=${STAGING_EXECPREFIXDIR} \
"
-PACKAGECONFIG ??= "readline ${@bb.utils.filter('DISTRO_FEATURES', 'debuginfod', d)}"
+PACKAGECONFIG ??= "readline ${@bb.utils.filter('DISTRO_FEATURES', 'debuginfod', d)} python \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', 'xz', '', d)} \
+ "
# Use --without-system-readline to compile with readline 5.
PACKAGECONFIG[readline] = "--with-system-readline,--without-system-readline,readline"
-PACKAGECONFIG[python] = "--with-python=${WORKDIR}/python,--without-python,python3,python3 python3-codecs"
+PACKAGECONFIG[python] = "--with-python=${WORKDIR}/python,--without-python,python3,python3-codecs"
PACKAGECONFIG[babeltrace] = "--with-babeltrace,--without-babeltrace,babeltrace"
-# ncurses is already a hard DEPENDS, but would be added here if it weren't
-PACKAGECONFIG[tui] = "--enable-tui,--disable-tui"
+PACKAGECONFIG[tui] = "--enable-tui,--disable-tui,,ncurses-terminfo-base"
PACKAGECONFIG[xz] = "--with-lzma --with-liblzma-prefix=${STAGING_DIR_HOST},--without-lzma,xz"
PACKAGECONFIG[debuginfod] = "--with-debuginfod, --without-debuginfod, elfutils"
@@ -58,6 +59,7 @@ do_install:append() {
rm -rf ${D}${includedir}
rm -rf ${D}${datadir}/locale
rm -f ${D}${infodir}/bfd.info
+ rm -f ${D}${infodir}/sframe-spec.info
}
RRECOMMENDS:gdb:append:linux = " glibc-thread-db "
diff --git a/meta/recipes-devtools/gdb/gdb-cross-canadian.inc b/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
index 8e926e81e1..058ffbc9c5 100644
--- a/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
+++ b/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
@@ -6,8 +6,8 @@ SUMMARY = "GNU debugger (cross-canadian gdb for ${TARGET_ARCH} target)"
PN = "gdb-cross-canadian-${TRANSLATED_TARGET_ARCH}"
BPN = "gdb"
-DEPENDS = "nativesdk-ncurses nativesdk-expat nativesdk-gettext nativesdk-gmp \
- virtual/${HOST_PREFIX}gcc-crosssdk virtual/${HOST_PREFIX}binutils-crosssdk virtual/nativesdk-libc"
+DEPENDS = "nativesdk-ncurses nativesdk-expat nativesdk-gettext nativesdk-gmp nativesdk-mpfr \
+ virtual/${HOST_PREFIX}gcc virtual/${HOST_PREFIX}binutils virtual/nativesdk-libc"
GDBPROPREFIX = "--program-prefix='${TARGET_PREFIX}'"
diff --git a/meta/recipes-devtools/gdb/gdb-cross-canadian_12.1.bb b/meta/recipes-devtools/gdb/gdb-cross-canadian_14.2.bb
index 4ab2b7156d..4ab2b7156d 100644
--- a/meta/recipes-devtools/gdb/gdb-cross-canadian_12.1.bb
+++ b/meta/recipes-devtools/gdb/gdb-cross-canadian_14.2.bb
diff --git a/meta/recipes-devtools/gdb/gdb-cross.inc b/meta/recipes-devtools/gdb/gdb-cross.inc
index 9f9675c1b3..399f4bba97 100644
--- a/meta/recipes-devtools/gdb/gdb-cross.inc
+++ b/meta/recipes-devtools/gdb/gdb-cross.inc
@@ -1,6 +1,6 @@
require gdb-common.inc
-DEPENDS = "expat-native gmp-native ncurses-native flex-native bison-native"
+DEPENDS = "expat-native gmp-native mpfr-native ncurses-native flex-native bison-native"
inherit python3native pkgconfig
diff --git a/meta/recipes-devtools/gdb/gdb-cross_12.1.bb b/meta/recipes-devtools/gdb/gdb-cross_14.2.bb
index 3b654a2f0d..3b654a2f0d 100644
--- a/meta/recipes-devtools/gdb/gdb-cross_12.1.bb
+++ b/meta/recipes-devtools/gdb/gdb-cross_14.2.bb
diff --git a/meta/recipes-devtools/gdb/gdb.inc b/meta/recipes-devtools/gdb/gdb.inc
index 3b569fd40e..81ac441462 100644
--- a/meta/recipes-devtools/gdb/gdb.inc
+++ b/meta/recipes-devtools/gdb/gdb.inc
@@ -5,14 +5,13 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674"
SRC_URI = "${GNU_MIRROR}/gdb/gdb-${PV}.tar.xz \
- file://0001-make-man-install-relative-to-DESTDIR.patch \
- file://0002-mips-linux-nat-Define-_ABIO32-if-not-defined.patch \
- file://0003-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch \
- file://0004-Dont-disable-libreadline.a-when-using-disable-static.patch \
- file://0005-use-asm-sgidefs.h.patch \
- file://0006-Change-order-of-CFLAGS.patch \
- file://0007-resolve-restrict-keyword-conflict.patch \
- file://0008-Fix-invalid-sigprocmask-call.patch \
- file://0009-gdbserver-ctrl-c-handling.patch \
+ file://0001-mips-linux-nat-Define-_ABIO32-if-not-defined.patch \
+ file://0002-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch \
+ file://0003-Dont-disable-libreadline.a-when-using-disable-static.patch \
+ file://0004-use-asm-sgidefs.h.patch \
+ file://0005-Change-order-of-CFLAGS.patch \
+ file://0006-resolve-restrict-keyword-conflict.patch \
+ file://0007-Fix-invalid-sigprocmask-call.patch \
+ file://0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \
"
-SRC_URI[sha256sum] = "0e1793bf8f2b54d53f46dea84ccfd446f48f81b297b28c4f7fc017b818d69fed"
+SRC_URI[sha256sum] = "2d4dd8061d8ded12b6c63f55e45344881e8226105f4d2a9b234040efa5ce7772"
diff --git a/meta/recipes-devtools/gdb/gdb/0001-make-man-install-relative-to-DESTDIR.patch b/meta/recipes-devtools/gdb/gdb/0001-make-man-install-relative-to-DESTDIR.patch
deleted file mode 100644
index 16d6cf196f..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0001-make-man-install-relative-to-DESTDIR.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 8eca28eddcda4ce8a345ca031f43ff1ed6f37089 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 2 Mar 2015 02:27:55 +0000
-Subject: [PATCH 1/9] make man install relative to DESTDIR
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- sim/common/Make-common.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/sim/common/Make-common.in b/sim/common/Make-common.in
-index 74e5dad3049..9e95c224ba4 100644
---- a/sim/common/Make-common.in
-+++ b/sim/common/Make-common.in
-@@ -70,7 +70,7 @@ tooldir = $(libdir)/$(target_alias)
- datadir = @datadir@
- datarootdir = @datarootdir@
- mandir = @mandir@
--man1dir = $(mandir)/man1
-+man1dir = $(DESTDIR)$(mandir)/man1
- infodir = @infodir@
- includedir = @includedir@
-
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0001-mips-linux-nat-Define-_ABIO32-if-not-defined.patch b/meta/recipes-devtools/gdb/gdb/0001-mips-linux-nat-Define-_ABIO32-if-not-defined.patch
new file mode 100644
index 0000000000..86f6e2642d
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0001-mips-linux-nat-Define-_ABIO32-if-not-defined.patch
@@ -0,0 +1,32 @@
+From ff77aebf533758f0b6f7d22b272d32e08ed06e45 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 23 Mar 2016 06:30:09 +0000
+Subject: [PATCH] mips-linux-nat: Define _ABIO32 if not defined
+
+This helps building gdb on mips64 on musl, since
+musl does not provide sgidefs.h this define is
+only defined when GCC is using o32 ABI, in that
+case gcc emits it as built-in define and hence
+it works ok for mips32
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gdb/mips-linux-nat.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c
+index 8a7cc95f2a4..bfd1c72cba6 100644
+--- a/gdb/mips-linux-nat.c
++++ b/gdb/mips-linux-nat.c
+@@ -41,6 +41,10 @@
+ #ifndef PTRACE_GET_THREAD_AREA
+ #define PTRACE_GET_THREAD_AREA 25
+ #endif
++/* musl does not define and relies on compiler built-in macros for it */
++#ifndef _ABIO32
++#define _ABIO32 1
++#endif
+
+ class mips_linux_nat_target final : public linux_nat_trad_target
+ {
diff --git a/meta/recipes-devtools/gdb/gdb/0002-mips-linux-nat-Define-_ABIO32-if-not-defined.patch b/meta/recipes-devtools/gdb/gdb/0002-mips-linux-nat-Define-_ABIO32-if-not-defined.patch
deleted file mode 100644
index 8d263de896..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0002-mips-linux-nat-Define-_ABIO32-if-not-defined.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 37d3afd2eaa95c89ad7cb5d0079b017752e4d0ea Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 23 Mar 2016 06:30:09 +0000
-Subject: [PATCH 2/9] mips-linux-nat: Define _ABIO32 if not defined
-
-This helps building gdb on mips64 on musl, since
-musl does not provide sgidefs.h this define is
-only defined when GCC is using o32 ABI, in that
-case gcc emits it as built-in define and hence
-it works ok for mips32
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdb/mips-linux-nat.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c
-index 20e12b6889e..6adc61235aa 100644
---- a/gdb/mips-linux-nat.c
-+++ b/gdb/mips-linux-nat.c
-@@ -41,6 +41,10 @@
- #ifndef PTRACE_GET_THREAD_AREA
- #define PTRACE_GET_THREAD_AREA 25
- #endif
-+/* musl does not define and relies on compiler built-in macros for it */
-+#ifndef _ABIO32
-+#define _ABIO32 1
-+#endif
-
- class mips_linux_nat_target final : public linux_nat_trad_target
- {
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0002-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch b/meta/recipes-devtools/gdb/gdb/0002-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch
new file mode 100644
index 0000000000..897cfd0df9
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0002-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch
@@ -0,0 +1,49 @@
+From 78748dc55b1ceafe9a64beb9628f1d51b215482f Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 30 Apr 2016 18:32:14 -0700
+Subject: [PATCH] ppc/ptrace: Define pt_regs uapi_pt_regs on !GLIBC systems
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gdb/nat/ppc-linux.h | 6 ++++++
+ gdbserver/linux-ppc-low.cc | 6 ++++++
+ 2 files changed, 12 insertions(+)
+
+diff --git a/gdb/nat/ppc-linux.h b/gdb/nat/ppc-linux.h
+index c84f9146bbd..8c8580c95e1 100644
+--- a/gdb/nat/ppc-linux.h
++++ b/gdb/nat/ppc-linux.h
+@@ -18,7 +18,13 @@
+ #ifndef NAT_PPC_LINUX_H
+ #define NAT_PPC_LINUX_H
+
++#if !defined(__GLIBC__)
++# define pt_regs uapi_pt_regs
++#endif
+ #include <asm/ptrace.h>
++#if !defined(__GLIBC__)
++# undef pt_regs
++#endif
+ #include <asm/cputable.h>
+
+ /* This sometimes isn't defined. */
+diff --git a/gdbserver/linux-ppc-low.cc b/gdbserver/linux-ppc-low.cc
+index 86fbc8f5d96..8a1a39bc750 100644
+--- a/gdbserver/linux-ppc-low.cc
++++ b/gdbserver/linux-ppc-low.cc
+@@ -23,7 +23,13 @@
+ #include "elf/common.h"
+ #include <sys/uio.h>
+ #include <elf.h>
++#if !defined(__GLIBC__)
++# define pt_regs uapi_pt_regs
++#endif
+ #include <asm/ptrace.h>
++#if !defined(__GLIBC__)
++# undef pt_regs
++#endif
+
+ #include "arch/ppc-linux-common.h"
+ #include "arch/ppc-linux-tdesc.h"
diff --git a/meta/recipes-devtools/gdb/gdb/0003-Dont-disable-libreadline.a-when-using-disable-static.patch b/meta/recipes-devtools/gdb/gdb/0003-Dont-disable-libreadline.a-when-using-disable-static.patch
new file mode 100644
index 0000000000..e41be8209e
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0003-Dont-disable-libreadline.a-when-using-disable-static.patch
@@ -0,0 +1,47 @@
+From 683650564cd231bbf09c7cbc35543b77ca0b10d0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 30 Apr 2016 15:25:03 -0700
+Subject: [PATCH] Dont disable libreadline.a when using --disable-static
+
+If gdb is configured with --disable-static then this is dutifully passed to
+readline which then disables libreadline.a, which causes a problem when gdb
+tries to link against that.
+
+To ensure that readline always builds static libraries, pass --enable-static to
+the sub-configure.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ Makefile.def | 3 ++-
+ Makefile.in | 2 +-
+ 2 files changed, 3 insertions(+), 2 deletions(-)
+
+diff --git a/Makefile.def b/Makefile.def
+index 870150183b9..eb3788da0d2 100644
+--- a/Makefile.def
++++ b/Makefile.def
+@@ -119,7 +119,8 @@ host_modules= { module= libiconv;
+ missing= install-html;
+ missing= install-info; };
+ host_modules= { module= m4; };
+-host_modules= { module= readline; };
++host_modules= { module= readline;
++ extra_configure_flags='--enable-static';};
+ host_modules= { module= sid; };
+ host_modules= { module= sim; };
+ host_modules= { module= texinfo; no_install= true; };
+diff --git a/Makefile.in b/Makefile.in
+index c97130a2338..a83f98e4778 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -31691,7 +31691,7 @@ configure-readline:
+ $$s/$$module_srcdir/configure \
+ --srcdir=$${topdir}/$$module_srcdir \
+ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
+- --target=${target_alias} \
++ --target=${target_alias} --enable-static \
+ || exit 1
+ @endif readline
+
diff --git a/meta/recipes-devtools/gdb/gdb/0003-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch b/meta/recipes-devtools/gdb/gdb/0003-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch
deleted file mode 100644
index 7e09404bb0..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0003-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-From e689eec672ee8c53b3adb2ade2b5deb9b7cd99d4 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 30 Apr 2016 18:32:14 -0700
-Subject: [PATCH 3/9] ppc/ptrace: Define pt_regs uapi_pt_regs on !GLIBC systems
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdb/nat/ppc-linux.h | 6 ++++++
- gdbserver/linux-ppc-low.cc | 6 ++++++
- 2 files changed, 12 insertions(+)
-
-diff --git a/gdb/nat/ppc-linux.h b/gdb/nat/ppc-linux.h
-index 1094f6b0be3..d8588a646c2 100644
---- a/gdb/nat/ppc-linux.h
-+++ b/gdb/nat/ppc-linux.h
-@@ -18,7 +18,13 @@
- #ifndef NAT_PPC_LINUX_H
- #define NAT_PPC_LINUX_H
-
-+#if !defined(__GLIBC__)
-+# define pt_regs uapi_pt_regs
-+#endif
- #include <asm/ptrace.h>
-+#if !defined(__GLIBC__)
-+# undef pt_regs
-+#endif
- #include <asm/cputable.h>
-
- /* This sometimes isn't defined. */
-diff --git a/gdbserver/linux-ppc-low.cc b/gdbserver/linux-ppc-low.cc
-index 08824887003..69afbae5359 100644
---- a/gdbserver/linux-ppc-low.cc
-+++ b/gdbserver/linux-ppc-low.cc
-@@ -23,7 +23,13 @@
- #include "elf/common.h"
- #include <sys/uio.h>
- #include <elf.h>
-+#if !defined(__GLIBC__)
-+# define pt_regs uapi_pt_regs
-+#endif
- #include <asm/ptrace.h>
-+#if !defined(__GLIBC__)
-+# undef pt_regs
-+#endif
-
- #include "arch/ppc-linux-common.h"
- #include "arch/ppc-linux-tdesc.h"
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0004-Dont-disable-libreadline.a-when-using-disable-static.patch b/meta/recipes-devtools/gdb/gdb/0004-Dont-disable-libreadline.a-when-using-disable-static.patch
deleted file mode 100644
index a1e85e91b3..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0004-Dont-disable-libreadline.a-when-using-disable-static.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-From 15ee6a626242efb8f367be49c13e00d0b72317f0 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 30 Apr 2016 15:25:03 -0700
-Subject: [PATCH 4/9] Dont disable libreadline.a when using --disable-static
-
-If gdb is configured with --disable-static then this is dutifully passed to
-readline which then disables libreadline.a, which causes a problem when gdb
-tries to link against that.
-
-To ensure that readline always builds static libraries, pass --enable-static to
-the sub-configure.
-
-Upstream-Status: Pending
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- Makefile.def | 3 ++-
- Makefile.in | 2 +-
- 2 files changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/Makefile.def b/Makefile.def
-index acdcd625ed6..78fc31e1199 100644
---- a/Makefile.def
-+++ b/Makefile.def
-@@ -120,7 +120,8 @@ host_modules= { module= libiconv;
- missing= install-html;
- missing= install-info; };
- host_modules= { module= m4; };
--host_modules= { module= readline; };
-+host_modules= { module= readline;
-+ extra_configure_flags='--enable-static';};
- host_modules= { module= sid; };
- host_modules= { module= sim; };
- host_modules= { module= texinfo; no_install= true; };
-diff --git a/Makefile.in b/Makefile.in
-index 3aacd2daac9..aa58adada4a 100644
---- a/Makefile.in
-+++ b/Makefile.in
-@@ -32791,7 +32791,7 @@ configure-readline:
- $$s/$$module_srcdir/configure \
- --srcdir=$${topdir}/$$module_srcdir \
- $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \
-- --target=${target_alias} \
-+ --target=${target_alias} --enable-static \
- || exit 1
- @endif readline
-
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0004-use-asm-sgidefs.h.patch b/meta/recipes-devtools/gdb/gdb/0004-use-asm-sgidefs.h.patch
new file mode 100644
index 0000000000..413610d71f
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0004-use-asm-sgidefs.h.patch
@@ -0,0 +1,33 @@
+From 04eba78ea6bd167b7eeef3af668c8f49888dc688 Mon Sep 17 00:00:00 2001
+From: Andre McCurdy <amccurdy@gmail.com>
+Date: Sat, 30 Apr 2016 15:29:06 -0700
+Subject: [PATCH] use <asm/sgidefs.h>
+
+Build fix for MIPS with musl libc
+
+The MIPS specific header <sgidefs.h> is provided by glibc and uclibc
+but not by musl. Regardless of the libc, the kernel headers provide
+<asm/sgidefs.h> which provides the same definitions, so use that
+instead.
+
+Upstream-Status: Pending
+
+Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gdb/mips-linux-nat.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c
+index bfd1c72cba6..13dc9cdb703 100644
+--- a/gdb/mips-linux-nat.c
++++ b/gdb/mips-linux-nat.c
+@@ -31,7 +31,7 @@
+ #include "gdb_proc_service.h"
+ #include "gregset.h"
+
+-#include <sgidefs.h>
++#include <asm/sgidefs.h>
+ #include "nat/gdb_ptrace.h"
+ #include <asm/ptrace.h>
+ #include "inf-ptrace.h"
diff --git a/meta/recipes-devtools/gdb/gdb/0005-Change-order-of-CFLAGS.patch b/meta/recipes-devtools/gdb/gdb/0005-Change-order-of-CFLAGS.patch
new file mode 100644
index 0000000000..7631e75048
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0005-Change-order-of-CFLAGS.patch
@@ -0,0 +1,27 @@
+From e7041e9346aa0c3aee0b76315559e3ab2ef8977e Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 30 Apr 2016 15:35:39 -0700
+Subject: [PATCH] Change order of CFLAGS
+
+Lets us override Werror if need be
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gdbserver/Makefile.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/gdbserver/Makefile.in b/gdbserver/Makefile.in
+index b597515d428..9e615e2842d 100644
+--- a/gdbserver/Makefile.in
++++ b/gdbserver/Makefile.in
+@@ -156,7 +156,7 @@ WIN32APILIBS = @WIN32APILIBS@
+ INTERNAL_CFLAGS_BASE = ${GLOBAL_CFLAGS} \
+ ${PROFILE_CFLAGS} ${INCLUDE_CFLAGS} ${CPPFLAGS} $(PTHREAD_CFLAGS)
+ INTERNAL_WARN_CFLAGS = ${INTERNAL_CFLAGS_BASE} $(WARN_CFLAGS)
+-INTERNAL_CFLAGS = ${INTERNAL_WARN_CFLAGS} $(WERROR_CFLAGS) -DGDBSERVER
++INTERNAL_CFLAGS = ${INTERNAL_WARN_CFLAGS} $(WERROR_CFLAGS) ${COMPILER_CFLAGS} -DGDBSERVER
+
+ # LDFLAGS is specifically reserved for setting from the command line
+ # when running make.
diff --git a/meta/recipes-devtools/gdb/gdb/0005-use-asm-sgidefs.h.patch b/meta/recipes-devtools/gdb/gdb/0005-use-asm-sgidefs.h.patch
deleted file mode 100644
index 242099b9b1..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0005-use-asm-sgidefs.h.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 25a75aaf29791f4302f0e4452f7ebaf735d4f083 Mon Sep 17 00:00:00 2001
-From: Andre McCurdy <amccurdy@gmail.com>
-Date: Sat, 30 Apr 2016 15:29:06 -0700
-Subject: [PATCH 5/9] use <asm/sgidefs.h>
-
-Build fix for MIPS with musl libc
-
-The MIPS specific header <sgidefs.h> is provided by glibc and uclibc
-but not by musl. Regardless of the libc, the kernel headers provide
-<asm/sgidefs.h> which provides the same definitions, so use that
-instead.
-
-Upstream-Status: Pending
-
-Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdb/mips-linux-nat.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c
-index 6adc61235aa..afb40066744 100644
---- a/gdb/mips-linux-nat.c
-+++ b/gdb/mips-linux-nat.c
-@@ -31,7 +31,7 @@
- #include "gdb_proc_service.h"
- #include "gregset.h"
-
--#include <sgidefs.h>
-+#include <asm/sgidefs.h>
- #include "nat/gdb_ptrace.h"
- #include <asm/ptrace.h>
- #include "inf-ptrace.h"
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0006-Change-order-of-CFLAGS.patch b/meta/recipes-devtools/gdb/gdb/0006-Change-order-of-CFLAGS.patch
deleted file mode 100644
index 58c9b1d0a7..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0006-Change-order-of-CFLAGS.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From c0e7c34134aa1f9644075c596a2338a50d3d923e Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 30 Apr 2016 15:35:39 -0700
-Subject: [PATCH 6/9] Change order of CFLAGS
-
-Lets us override Werror if need be
-
-Upstream-Status: Inappropriate
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdbserver/Makefile.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/gdbserver/Makefile.in b/gdbserver/Makefile.in
-index 47648b8d962..5599779de57 100644
---- a/gdbserver/Makefile.in
-+++ b/gdbserver/Makefile.in
-@@ -156,7 +156,7 @@ WIN32APILIBS = @WIN32APILIBS@
- INTERNAL_CFLAGS_BASE = ${GLOBAL_CFLAGS} \
- ${PROFILE_CFLAGS} ${INCLUDE_CFLAGS} ${CPPFLAGS} $(PTHREAD_CFLAGS)
- INTERNAL_WARN_CFLAGS = ${INTERNAL_CFLAGS_BASE} $(WARN_CFLAGS)
--INTERNAL_CFLAGS = ${INTERNAL_WARN_CFLAGS} $(WERROR_CFLAGS) -DGDBSERVER
-+INTERNAL_CFLAGS = ${INTERNAL_WARN_CFLAGS} $(WERROR_CFLAGS) ${COMPILER_CFLAGS} -DGDBSERVER
-
- # LDFLAGS is specifically reserved for setting from the command line
- # when running make.
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch b/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch
new file mode 100644
index 0000000000..45388c5ac5
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch
@@ -0,0 +1,45 @@
+From 477f1b2049c7f940b8e8fda4ac396cfe322b269f Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 10 May 2016 08:47:05 -0700
+Subject: [PATCH] resolve restrict keyword conflict
+
+GCC detects that we call 'restrict' as param name in function
+signatures and complains since both params are called 'restrict'
+therefore we use __restrict to denote the C99 keywork
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gnulib/import/sys_time.in.h | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/gnulib/import/sys_time.in.h b/gnulib/import/sys_time.in.h
+index 87db1a88745..e6b98c7e467 100644
+--- a/gnulib/import/sys_time.in.h
++++ b/gnulib/import/sys_time.in.h
+@@ -93,20 +93,20 @@ struct timeval
+ # define gettimeofday rpl_gettimeofday
+ # endif
+ _GL_FUNCDECL_RPL (gettimeofday, int,
+- (struct timeval *restrict, void *restrict)
++ (struct timeval *__restrict, void *__restrict)
+ _GL_ARG_NONNULL ((1)));
+ _GL_CXXALIAS_RPL (gettimeofday, int,
+- (struct timeval *restrict, void *restrict));
++ (struct timeval *__restrict, void *__restrict));
+ # else
+ # if !@HAVE_GETTIMEOFDAY@
+ _GL_FUNCDECL_SYS (gettimeofday, int,
+- (struct timeval *restrict, void *restrict)
++ (struct timeval *__restrict, void *__restrict)
+ _GL_ARG_NONNULL ((1)));
+ # endif
+ /* Need to cast, because on glibc systems, by default, the second argument is
+ struct timezone *. */
+ _GL_CXXALIAS_SYS_CAST (gettimeofday, int,
+- (struct timeval *restrict, void *restrict));
++ (struct timeval *__restrict, void *__restrict));
+ # endif
+ _GL_CXXALIASWARN (gettimeofday);
+ # if defined __cplusplus && defined GNULIB_NAMESPACE
diff --git a/meta/recipes-devtools/gdb/gdb/0007-Fix-invalid-sigprocmask-call.patch b/meta/recipes-devtools/gdb/gdb/0007-Fix-invalid-sigprocmask-call.patch
new file mode 100644
index 0000000000..26e3ad9098
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0007-Fix-invalid-sigprocmask-call.patch
@@ -0,0 +1,46 @@
+From ebeb6125acf6b4e4a8a4eaa59e2830b9ca02d683 Mon Sep 17 00:00:00 2001
+From: Yousong Zhou <yszhou4tech@gmail.com>
+Date: Fri, 24 Mar 2017 10:36:03 +0800
+Subject: [PATCH] Fix invalid sigprocmask call
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+The POSIX document says
+
+ The pthread_sigmask() and sigprocmask() functions shall fail if:
+
+ [EINVAL]
+ The value of the how argument is not equal to one of the defined values.
+
+and this is how musl-libc is currently doing. Fix the call to be safe
+and correct
+
+ [1] http://pubs.opengroup.org/onlinepubs/9699919799/functions/pthread_sigmask.html
+
+gdb/ChangeLog:
+2017-03-24 Yousong Zhou <yszhou4tech@gmail.com>
+
+ * common/signals-state-save-restore.c (save_original_signals_state):
+ Fix invalid sigprocmask call.
+
+Upstream-Status: Pending [not author, cherry-picked from LEDE https://bugs.lede-project.org/index.php?do=details&task_id=637&openedfrom=-1%2Bweek]
+Signed-off-by: André Draszik <adraszik@tycoint.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gdbsupport/signals-state-save-restore.cc | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/gdbsupport/signals-state-save-restore.cc b/gdbsupport/signals-state-save-restore.cc
+index 3ec7a259c9c..0702eca7725 100644
+--- a/gdbsupport/signals-state-save-restore.cc
++++ b/gdbsupport/signals-state-save-restore.cc
+@@ -38,7 +38,7 @@ save_original_signals_state (bool quiet)
+ int i;
+ int res;
+
+- res = gdb_sigmask (0, NULL, &original_signal_mask);
++ res = gdb_sigmask (SIG_BLOCK, NULL, &original_signal_mask);
+ if (res == -1)
+ perror_with_name (("sigprocmask"));
+
diff --git a/meta/recipes-devtools/gdb/gdb/0007-resolve-restrict-keyword-conflict.patch b/meta/recipes-devtools/gdb/gdb/0007-resolve-restrict-keyword-conflict.patch
deleted file mode 100644
index bbd1f0b27f..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0007-resolve-restrict-keyword-conflict.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From 44fa1ecfbd8a5fe0cfea12a175fa041686842a0c Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 10 May 2016 08:47:05 -0700
-Subject: [PATCH 7/9] resolve restrict keyword conflict
-
-GCC detects that we call 'restrict' as param name in function
-signatures and complains since both params are called 'restrict'
-therefore we use __restrict to denote the C99 keywork
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gnulib/import/sys_time.in.h | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/gnulib/import/sys_time.in.h b/gnulib/import/sys_time.in.h
-index 90a67d18426..664641a1fe8 100644
---- a/gnulib/import/sys_time.in.h
-+++ b/gnulib/import/sys_time.in.h
-@@ -93,20 +93,20 @@ struct timeval
- # define gettimeofday rpl_gettimeofday
- # endif
- _GL_FUNCDECL_RPL (gettimeofday, int,
-- (struct timeval *restrict, void *restrict)
-+ (struct timeval *__restrict, void *__restrict)
- _GL_ARG_NONNULL ((1)));
- _GL_CXXALIAS_RPL (gettimeofday, int,
-- (struct timeval *restrict, void *restrict));
-+ (struct timeval *__restrict, void *__restrict));
- # else
- # if !@HAVE_GETTIMEOFDAY@
- _GL_FUNCDECL_SYS (gettimeofday, int,
-- (struct timeval *restrict, void *restrict)
-+ (struct timeval *__restrict, void *__restrict)
- _GL_ARG_NONNULL ((1)));
- # endif
- /* Need to cast, because on glibc systems, by default, the second argument is
- struct timezone *. */
- _GL_CXXALIAS_SYS_CAST (gettimeofday, int,
-- (struct timeval *restrict, void *restrict));
-+ (struct timeval *__restrict, void *__restrict));
- # endif
- _GL_CXXALIASWARN (gettimeofday);
- # if defined __cplusplus && defined GNULIB_NAMESPACE
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch b/meta/recipes-devtools/gdb/gdb/0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch
new file mode 100644
index 0000000000..819f1c9f17
--- /dev/null
+++ b/meta/recipes-devtools/gdb/gdb/0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch
@@ -0,0 +1,48 @@
+From cb7c8df338184d2d2f31ee1fd238653e3162da44 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 15 Jan 2023 00:16:25 -0800
+Subject: [PATCH] Define alignof using _Alignof when using C11 or newer
+
+WG14 N2350 made very clear that it is an UB having type definitions
+within "offsetof" [1]. This patch enhances the implementation of macro
+alignof to use builtin "_Alignof" to avoid undefined behavior on
+when using std=c11 or newer
+
+clang 16+ has started to flag this [2]
+
+Fixes build when using -std >= gnu11 and using clang16+
+
+Older compilers gcc < 4.9 or clang < 8 has buggy _Alignof even though it
+may support C11, exclude those compilers too
+
+[1] https://www.open-std.org/jtc1/sc22/wg14/www/docs/n2350.htm
+[2] https://reviews.llvm.org/D133574
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libiberty/sha1.c | 10 ++++++++++
+ 1 file changed, 10 insertions(+)
+
+diff --git a/libiberty/sha1.c b/libiberty/sha1.c
+index 6c71e3ebb41..a2caf947d31 100644
+--- a/libiberty/sha1.c
++++ b/libiberty/sha1.c
+@@ -229,7 +229,17 @@ sha1_process_bytes (const void *buffer, size_t len, struct sha1_ctx *ctx)
+ if (len >= 64)
+ {
+ #if !_STRING_ARCH_unaligned
++/* GCC releases before GCC 4.9 had a bug in _Alignof. See GCC bug 52023
++ <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=52023>.
++ clang versions < 8.0.0 have the same bug. */
++#if (!defined __STDC_VERSION__ || __STDC_VERSION__ < 201112 \
++ || (defined __GNUC__ && __GNUC__ < 4 + (__GNUC_MINOR__ < 9) \
++ && !defined __clang__) \
++ || (defined __clang__ && __clang_major__ < 8))
+ # define alignof(type) offsetof (struct { char c; type x; }, x)
++#else
++# define alignof(type) _Alignof(type)
++#endif
+ # define UNALIGNED_P(p) (((size_t) p) % alignof (sha1_uint32) != 0)
+ if (UNALIGNED_P (buffer))
+ while (len > 64)
diff --git a/meta/recipes-devtools/gdb/gdb/0008-Fix-invalid-sigprocmask-call.patch b/meta/recipes-devtools/gdb/gdb/0008-Fix-invalid-sigprocmask-call.patch
deleted file mode 100644
index ed1310ced2..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0008-Fix-invalid-sigprocmask-call.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 5bdd15553daef7370ca3c1f12d8f14247fdd4907 Mon Sep 17 00:00:00 2001
-From: Yousong Zhou <yszhou4tech@gmail.com>
-Date: Fri, 24 Mar 2017 10:36:03 +0800
-Subject: [PATCH 8/9] Fix invalid sigprocmask call
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-The POSIX document says
-
- The pthread_sigmask() and sigprocmask() functions shall fail if:
-
- [EINVAL]
- The value of the how argument is not equal to one of the defined values.
-
-and this is how musl-libc is currently doing. Fix the call to be safe
-and correct
-
- [1] http://pubs.opengroup.org/onlinepubs/9699919799/functions/pthread_sigmask.html
-
-gdb/ChangeLog:
-2017-03-24 Yousong Zhou <yszhou4tech@gmail.com>
-
- * common/signals-state-save-restore.c (save_original_signals_state):
- Fix invalid sigprocmask call.
-
-Upstream-Status: Pending [not author, cherry-picked from LEDE https://bugs.lede-project.org/index.php?do=details&task_id=637&openedfrom=-1%2Bweek]
-Signed-off-by: André Draszik <adraszik@tycoint.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdbsupport/signals-state-save-restore.cc | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/gdbsupport/signals-state-save-restore.cc b/gdbsupport/signals-state-save-restore.cc
-index 92e799d3551..a4a0234272a 100644
---- a/gdbsupport/signals-state-save-restore.cc
-+++ b/gdbsupport/signals-state-save-restore.cc
-@@ -38,7 +38,7 @@ save_original_signals_state (bool quiet)
- int i;
- int res;
-
-- res = gdb_sigmask (0, NULL, &original_signal_mask);
-+ res = gdb_sigmask (SIG_BLOCK, NULL, &original_signal_mask);
- if (res == -1)
- perror_with_name (("sigprocmask"));
-
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb/0009-gdbserver-ctrl-c-handling.patch b/meta/recipes-devtools/gdb/gdb/0009-gdbserver-ctrl-c-handling.patch
deleted file mode 100644
index f53d3bd1e5..0000000000
--- a/meta/recipes-devtools/gdb/gdb/0009-gdbserver-ctrl-c-handling.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From bc3b1f6aacf2d8fe66b022fbfcf28cd82c76e52f Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 29 Nov 2018 18:00:23 -0800
-Subject: [PATCH 9/9] gdbserver ctrl-c handling
-
-This problem was created by the upstream commit 78708b7c8c
-After applying the commit, it will send SIGINT to the process
-group(-signal_pid).
-But if we use gdbserver send SIGINT, and the attached process is not a
-process
-group leader, then the "kill (-signal_pid, SIGINT)" returns error and
-fails to
-interrupt the attached process.
-
-Upstream-Status: Submitted
-[https://sourceware.org/bugzilla/show_bug.cgi?id=18945]
-
-Author: Josh Gao
-Signed-off-by: Zhixiong Chi <zhixiong.chi@windriver.com>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gdbserver/linux-low.cc | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/gdbserver/linux-low.cc b/gdbserver/linux-low.cc
-index 7726a4a0c36..f750e074a03 100644
---- a/gdbserver/linux-low.cc
-+++ b/gdbserver/linux-low.cc
-@@ -5496,7 +5496,7 @@ linux_process_target::request_interrupt ()
- {
- /* Send a SIGINT to the process group. This acts just like the user
- typed a ^C on the controlling terminal. */
-- ::kill (-signal_pid, SIGINT);
-+ ::kill (signal_pid, SIGINT);
- }
-
- bool
---
-2.36.1
-
diff --git a/meta/recipes-devtools/gdb/gdb_12.1.bb b/meta/recipes-devtools/gdb/gdb_14.2.bb
index 9c6db4ca2c..9c6db4ca2c 100644
--- a/meta/recipes-devtools/gdb/gdb_12.1.bb
+++ b/meta/recipes-devtools/gdb/gdb_14.2.bb
diff --git a/meta/recipes-devtools/git/git_2.36.1.bb b/meta/recipes-devtools/git/git_2.36.1.bb
deleted file mode 100644
index 2c53616642..0000000000
--- a/meta/recipes-devtools/git/git_2.36.1.bb
+++ /dev/null
@@ -1,168 +0,0 @@
-SUMMARY = "Distributed version control system"
-HOMEPAGE = "http://git-scm.com"
-DESCRIPTION = "Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency."
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-only & GPL-2.0-or-later & BSD-3-Clause & MIT & BSL-1.0 & LGPL-2.1-or-later"
-DEPENDS = "openssl zlib"
-
-PROVIDES:append:class-native = " git-replacement-native"
-
-SRC_URI = "${KERNELORG_MIRROR}/software/scm/git/git-${PV}.tar.gz;name=tarball \
- file://fixsort.patch \
- file://0001-config.mak.uname-do-not-force-RHEL-7-specific-build-.patch \
- "
-
-S = "${WORKDIR}/git-${PV}"
-
-LIC_FILES_CHKSUM = "\
- file://COPYING;md5=7c0d7ef03a7eb04ce795b0f60e68e7e1 \
- file://reftable/LICENSE;md5=1a6424cafc4c9c88c689848e165af33b \
- file://sha1dc/LICENSE.txt;md5=9bbe4c990a9e98ea4b98ef5d3bcb8a7a \
- file://compat/nedmalloc/License.txt;md5=e4224ccaecb14d942c71d31bef20d78c \
- file://compat/inet_ntop.c;md5=76593c6f74e8ced5b24520175688d59b;endline=16 \
- file://compat/obstack.h;md5=08ad25fee5428cd879ceef451ce3a22e;endline=18 \
- file://compat/poll/poll.h;md5=9fc00170a53b8e3e52157c91ac688dd1;endline=19 \
- file://compat/regex/regex.h;md5=30cc8af0e6f0f8a25acec6d8783bb763;beginline=4;endline=22 \
-"
-
-CVE_PRODUCT = "git-scm:git"
-
-# This is about a manpage not mentioning --mirror may "leak" information
-# in mirrored git repos. Most OE users wouldn't build the docs and
-# we don't see this as a major issue for our general users/usecases.
-CVE_CHECK_IGNORE += "CVE-2022-24975"
-
-PACKAGECONFIG ??= "expat curl"
-PACKAGECONFIG[cvsserver] = ""
-PACKAGECONFIG[svn] = ""
-PACKAGECONFIG[manpages] = ",,asciidoc-native xmlto-native"
-PACKAGECONFIG[curl] = "--with-curl,--without-curl,curl"
-PACKAGECONFIG[expat] = "--with-expat,--without-expat,expat"
-
-EXTRA_OECONF = "--with-perl=${STAGING_BINDIR_NATIVE}/perl-native/perl \
- --without-tcltk \
- --without-iconv \
-"
-EXTRA_OECONF:append:class-nativesdk = " --with-gitconfig=/etc/gitconfig "
-
-# Needs brokensep as this doesn't use automake
-inherit autotools-brokensep perlnative bash-completion manpages
-
-EXTRA_OEMAKE = "NO_PYTHON=1 CFLAGS='${CFLAGS}' LDFLAGS='${LDFLAGS}'"
-EXTRA_OEMAKE += "'PERL_PATH=/usr/bin/env perl'"
-EXTRA_OEMAKE += "COMPUTE_HEADER_DEPENDENCIES=no"
-EXTRA_OEMAKE:append:class-native = " NO_CROSS_DIRECTORY_HARDLINKS=1"
-
-do_compile:prepend () {
- # Remove perl/perl.mak to fix the out-of-date perl.mak error
- # during rebuild
- rm -f perl/perl.mak
-
- if [ "${@bb.utils.filter('PACKAGECONFIG', 'manpages', d)}" ]; then
- oe_runmake man
- fi
-}
-
-do_install () {
- oe_runmake install DESTDIR="${D}" bindir=${bindir} \
- template_dir=${datadir}/git-core/templates
-
- install -d ${D}/${datadir}/bash-completion/completions/
- install -m 644 ${S}/contrib/completion/git-completion.bash ${D}/${datadir}/bash-completion/completions/git
-
- if [ "${@bb.utils.filter('PACKAGECONFIG', 'manpages', d)}" ]; then
- oe_runmake install-man DESTDIR="${D}"
- fi
-}
-
-perl_native_fixup () {
- sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \
- -e 's#${libdir}/perl-native/#${libdir}/#' \
- ${@d.getVar("PERLTOOLS").replace(' /',d.getVar('D') + '/')}
-
- if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'cvsserver', d)}" ]; then
- # Only install the git cvsserver command if explicitly requested
- # as it requires the DBI Perl module, which does not exist in
- # OE-Core.
- rm ${D}${libexecdir}/git-core/git-cvsserver \
- ${D}${bindir}/git-cvsserver
- fi
-
- if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'svn', d)}" ]; then
- # Only install the git svn command and all Git::SVN Perl modules
- # if explicitly requested as they require the SVN::Core Perl
- # module, which does not exist in OE-Core.
- rm -r ${D}${libexecdir}/git-core/git-svn \
- ${D}${datadir}/perl5/Git/SVN*
- fi
-}
-
-REL_GIT_EXEC_PATH = "${@os.path.relpath(libexecdir, bindir)}/git-core"
-REL_GIT_TEMPLATE_DIR = "${@os.path.relpath(datadir, bindir)}/git-core/templates"
-
-do_install:append:class-target () {
- perl_native_fixup
-}
-
-do_install:append:class-native() {
- create_wrapper ${D}${bindir}/git \
- GIT_EXEC_PATH='`dirname $''realpath`'/${REL_GIT_EXEC_PATH} \
- GIT_TEMPLATE_DIR='`dirname $''realpath`'/${REL_GIT_TEMPLATE_DIR}
-}
-
-do_install:append:class-nativesdk() {
- create_wrapper ${D}${bindir}/git \
- GIT_EXEC_PATH='`dirname $''realpath`'/${REL_GIT_EXEC_PATH} \
- GIT_TEMPLATE_DIR='`dirname $''realpath`'/${REL_GIT_TEMPLATE_DIR}
- perl_native_fixup
-}
-
-FILES:${PN} += "${datadir}/git-core ${libexecdir}/git-core/"
-
-PERLTOOLS = " \
- ${bindir}/git-cvsserver \
- ${libexecdir}/git-core/git-add--interactive \
- ${libexecdir}/git-core/git-archimport \
- ${libexecdir}/git-core/git-cvsexportcommit \
- ${libexecdir}/git-core/git-cvsimport \
- ${libexecdir}/git-core/git-cvsserver \
- ${libexecdir}/git-core/git-send-email \
- ${libexecdir}/git-core/git-svn \
- ${libexecdir}/git-core/git-instaweb \
- ${datadir}/gitweb/gitweb.cgi \
- ${datadir}/git-core/templates/hooks/prepare-commit-msg.sample \
- ${datadir}/git-core/templates/hooks/pre-rebase.sample \
- ${datadir}/git-core/templates/hooks/fsmonitor-watchman.sample \
-"
-
-# Git tools requiring perl
-PACKAGES =+ "${PN}-perltools"
-FILES:${PN}-perltools += " \
- ${PERLTOOLS} \
- ${libdir}/perl \
- ${datadir}/perl5 \
-"
-
-RDEPENDS:${PN}-perltools = "${PN} perl perl-module-file-path findutils"
-
-# git-tk package with gitk and git-gui
-PACKAGES =+ "${PN}-tk"
-#RDEPENDS_${PN}-tk = "${PN} tk tcl"
-#EXTRA_OEMAKE = "TCL_PATH=${STAGING_BINDIR_CROSS}/tclsh"
-FILES:${PN}-tk = " \
- ${bindir}/gitk \
- ${datadir}/gitk \
-"
-
-PACKAGES =+ "gitweb"
-FILES:gitweb = "${datadir}/gitweb/"
-RDEPENDS:gitweb = "perl"
-
-BBCLASSEXTEND = "native nativesdk"
-
-EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no \
- ac_cv_fread_reads_directories=${ac_cv_fread_reads_directories=yes} \
- "
-EXTRA_OEMAKE += "NO_GETTEXT=1"
-
-SRC_URI[tarball.sha256sum] = "37d936fd17c81aa9ddd3dba4e56e88a45fa534ad0ba946454e8ce818760c6a2c"
diff --git a/meta/recipes-devtools/git/git_2.44.0.bb b/meta/recipes-devtools/git/git_2.44.0.bb
new file mode 100644
index 0000000000..90e555eba7
--- /dev/null
+++ b/meta/recipes-devtools/git/git_2.44.0.bb
@@ -0,0 +1,166 @@
+SUMMARY = "Distributed version control system"
+HOMEPAGE = "http://git-scm.com"
+DESCRIPTION = "Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency."
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-only & GPL-2.0-or-later & BSD-3-Clause & MIT & BSL-1.0 & LGPL-2.1-or-later"
+DEPENDS = "openssl zlib"
+DEPENDS:class-native += "ca-certificates"
+
+PROVIDES:append:class-native = " git-replacement-native"
+
+SRC_URI = "${KERNELORG_MIRROR}/software/scm/git/git-${PV}.tar.gz;name=tarball \
+ file://fixsort.patch \
+ file://0001-config.mak.uname-do-not-force-RHEL-7-specific-build-.patch \
+ "
+
+S = "${WORKDIR}/git-${PV}"
+
+LIC_FILES_CHKSUM = "\
+ file://COPYING;md5=7c0d7ef03a7eb04ce795b0f60e68e7e1 \
+ file://reftable/LICENSE;md5=1a6424cafc4c9c88c689848e165af33b \
+ file://sha1dc/LICENSE.txt;md5=9bbe4c990a9e98ea4b98ef5d3bcb8a7a \
+ file://compat/nedmalloc/License.txt;md5=e4224ccaecb14d942c71d31bef20d78c \
+ file://compat/inet_ntop.c;md5=76593c6f74e8ced5b24520175688d59b;endline=16 \
+ file://compat/obstack.h;md5=08ad25fee5428cd879ceef451ce3a22e;endline=18 \
+ file://compat/poll/poll.h;md5=9fc00170a53b8e3e52157c91ac688dd1;endline=19 \
+ file://compat/regex/regex.h;md5=30cc8af0e6f0f8a25acec6d8783bb763;beginline=4;endline=22 \
+"
+
+CVE_PRODUCT = "git-scm:git"
+
+PACKAGECONFIG ??= "expat curl"
+PACKAGECONFIG[cvsserver] = ""
+PACKAGECONFIG[svn] = ""
+PACKAGECONFIG[manpages] = ",,asciidoc-native xmlto-native"
+PACKAGECONFIG[curl] = "--with-curl,--without-curl,curl"
+PACKAGECONFIG[expat] = "--with-expat,--without-expat,expat"
+
+EXTRA_OECONF = "--with-perl=${STAGING_BINDIR_NATIVE}/perl-native/perl \
+ --without-tcltk \
+ --without-iconv \
+"
+EXTRA_OECONF:append:class-nativesdk = " --with-gitconfig=/etc/gitconfig "
+
+# Needs brokensep as this doesn't use automake
+inherit autotools-brokensep perlnative bash-completion manpages
+
+EXTRA_OEMAKE = "NO_PYTHON=1 CFLAGS='${CFLAGS}' LDFLAGS='${LDFLAGS}'"
+EXTRA_OEMAKE += "'PERL_PATH=/usr/bin/env perl'"
+EXTRA_OEMAKE += "COMPUTE_HEADER_DEPENDENCIES=no"
+EXTRA_OEMAKE:append:class-native = " NO_CROSS_DIRECTORY_HARDLINKS=1"
+
+do_compile:prepend () {
+ # Remove perl/perl.mak to fix the out-of-date perl.mak error
+ # during rebuild
+ rm -f perl/perl.mak
+
+ if [ "${@bb.utils.filter('PACKAGECONFIG', 'manpages', d)}" ]; then
+ oe_runmake man
+ fi
+}
+
+do_install () {
+ oe_runmake install DESTDIR="${D}" bindir=${bindir} \
+ template_dir=${datadir}/git-core/templates
+
+ install -d ${D}/${datadir}/bash-completion/completions/
+ install -m 644 ${S}/contrib/completion/git-completion.bash ${D}/${datadir}/bash-completion/completions/git
+
+ if [ "${@bb.utils.filter('PACKAGECONFIG', 'manpages', d)}" ]; then
+ # Needs to be serial with make 4.4 due to https://savannah.gnu.org/bugs/index.php?63362
+ make install-man DESTDIR="${D}"
+ fi
+}
+
+perl_native_fixup () {
+ sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \
+ -e 's#${libdir}/perl-native/#${libdir}/#' \
+ ${@d.getVar("PERLTOOLS").replace(' /',d.getVar('D') + '/')}
+
+ if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'cvsserver', d)}" ]; then
+ # Only install the git cvsserver command if explicitly requested
+ # as it requires the DBI Perl module, which does not exist in
+ # OE-Core.
+ rm ${D}${libexecdir}/git-core/git-cvsserver \
+ ${D}${bindir}/git-cvsserver
+ fi
+
+ if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'svn', d)}" ]; then
+ # Only install the git svn command and all Git::SVN Perl modules
+ # if explicitly requested as they require the SVN::Core Perl
+ # module, which does not exist in OE-Core.
+ rm -r ${D}${libexecdir}/git-core/git-svn \
+ ${D}${datadir}/perl5/Git/SVN*
+ fi
+}
+
+REL_GIT_EXEC_PATH = "${@os.path.relpath(libexecdir, bindir)}/git-core"
+REL_GIT_TEMPLATE_DIR = "${@os.path.relpath(datadir, bindir)}/git-core/templates"
+REL_GIT_SSL_CAINFO = "${@os.path.relpath(sysconfdir, bindir)}/ssl/certs/ca-certificates.crt"
+
+do_install:append:class-target () {
+ perl_native_fixup
+}
+
+do_install:append:class-native() {
+ create_wrapper ${D}${bindir}/git \
+ GIT_EXEC_PATH='`dirname $''realpath`'/${REL_GIT_EXEC_PATH} \
+ GIT_SSL_CAINFO='`dirname $''realpath`'/${REL_GIT_SSL_CAINFO} \
+ GIT_TEMPLATE_DIR='`dirname $''realpath`'/${REL_GIT_TEMPLATE_DIR}
+}
+
+do_install:append:class-nativesdk() {
+ create_wrapper ${D}${bindir}/git \
+ GIT_EXEC_PATH='`dirname $''realpath`'/${REL_GIT_EXEC_PATH} \
+ GIT_TEMPLATE_DIR='`dirname $''realpath`'/${REL_GIT_TEMPLATE_DIR}
+ perl_native_fixup
+}
+
+FILES:${PN} += "${datadir}/git-core ${libexecdir}/git-core/"
+
+PERLTOOLS = " \
+ ${bindir}/git-cvsserver \
+ ${libexecdir}/git-core/git-archimport \
+ ${libexecdir}/git-core/git-cvsexportcommit \
+ ${libexecdir}/git-core/git-cvsimport \
+ ${libexecdir}/git-core/git-cvsserver \
+ ${libexecdir}/git-core/git-send-email \
+ ${libexecdir}/git-core/git-svn \
+ ${libexecdir}/git-core/git-instaweb \
+ ${datadir}/gitweb/gitweb.cgi \
+ ${datadir}/git-core/templates/hooks/prepare-commit-msg.sample \
+ ${datadir}/git-core/templates/hooks/pre-rebase.sample \
+ ${datadir}/git-core/templates/hooks/fsmonitor-watchman.sample \
+"
+
+# Git tools requiring perl
+PACKAGES =+ "${PN}-perltools"
+FILES:${PN}-perltools += " \
+ ${PERLTOOLS} \
+ ${libdir}/perl \
+ ${datadir}/perl5 \
+"
+
+RDEPENDS:${PN}-perltools = "${PN} perl perl-module-file-path findutils"
+
+# git-tk package with gitk and git-gui
+PACKAGES =+ "${PN}-tk"
+#RDEPENDS:${PN}-tk = "${PN} tk tcl"
+#EXTRA_OEMAKE = "TCL_PATH=${STAGING_BINDIR_CROSS}/tclsh"
+FILES:${PN}-tk = " \
+ ${bindir}/gitk \
+ ${datadir}/gitk \
+"
+
+PACKAGES =+ "gitweb"
+FILES:gitweb = "${datadir}/gitweb/"
+RDEPENDS:gitweb = "perl"
+
+BBCLASSEXTEND = "native nativesdk"
+
+EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no \
+ ac_cv_fread_reads_directories=${ac_cv_fread_reads_directories=yes} \
+ "
+EXTRA_OEMAKE += "NO_GETTEXT=1"
+
+SRC_URI[tarball.sha256sum] = "f9e36f085458fe9688fbbe7846b8c4770b13d161fcd8953655f36b2b85f06b76"
diff --git a/meta/recipes-devtools/glide/glide_0.13.3.bb b/meta/recipes-devtools/glide/glide_0.13.3.bb
deleted file mode 100644
index db703c2d21..0000000000
--- a/meta/recipes-devtools/glide/glide_0.13.3.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "Vendor Package Management for Golang"
-HOMEPAGE = "https://github.com/Masterminds/glide"
-DESCRIPTION = "Glide is a Vendor Package Management for Golang"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://src/${GO_IMPORT}/LICENSE;md5=54905cf894f8cc416a92f4fc350c35b2"
-
-GO_IMPORT = "github.com/Masterminds/glide"
-SRC_URI = "git://${GO_IMPORT};branch=master;protocol=https"
-SRCREV = "8ed5b9292379d86c39592a7e6a58eb9c903877cf"
-
-inherit go
-
-# New Go versions has Go modules support enabled by default and cause the Glide
-# tool build to fail.
-export GO111MODULE = "off"
-
-RDEPENDS:${PN}-dev += "bash"
-RDEPENDS:${PN}-ptest += "bash"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# for x86 ends with textrel in ${PN}
-# http://errors.yoctoproject.org/Errors/Details/185631/
-# ERROR: QA Issue: ELF binary '/work/i586-oe-linux/glide/0.13.1-r0/packages-split/glide/usr/bin/glide' has relocations in .text [textrel]
-INSANE_SKIP:${PN} += "textrel"
-
-# for aarch64 ends with textrel in ${PN}-ptest
-# http://errors.yoctoproject.org/Errors/Details/185633/
-# ERROR: QA Issue: ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/glide.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/dependency/dependency.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/repo/repo.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/mirrors/mirrors.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/cfg/cfg.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/godep/strip/strip.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/path/path.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/tree/tree.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/util/util.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/action/action.test' has relocations in .text
-# ELF binary '/work/aarch64-oe-linux/glide/0.13.1-r0/packages-split/glide-ptest/usr/lib/glide/ptest/github.com/Masterminds/glide/cache/cache.test' has relocations in .text [textrel]
-INSANE_SKIP:${PN}-ptest += "textrel"
-
-# fails to run task compile_ptest_base on mips
-PTEST_ENABLED:mipsarch = "0"
diff --git a/meta/recipes-devtools/gnu-config/gnu-config_git.bb b/meta/recipes-devtools/gnu-config/gnu-config_git.bb
index 0b2bcc0b26..f9d864b987 100644
--- a/meta/recipes-devtools/gnu-config/gnu-config_git.bb
+++ b/meta/recipes-devtools/gnu-config/gnu-config_git.bb
@@ -9,8 +9,8 @@ DEPENDS:class-native = "hostperl-runtime-native"
INHIBIT_DEFAULT_DEPS = "1"
-SRCREV = "f56a7140386d08a531bcfd444d632b28c61a6329"
-PV = "20220516+git${SRCPV}"
+SRCREV = "948ae97ca5703224bd3eada06b7a69f40dd15a02"
+PV = "20240101+git"
SRC_URI = "git://git.savannah.gnu.org/git/config.git;protocol=https;branch=master \
file://gnu-configize.in"
diff --git a/meta/recipes-devtools/go/go-1.18.2.inc b/meta/recipes-devtools/go/go-1.18.2.inc
deleted file mode 100644
index 8f4f47db9e..0000000000
--- a/meta/recipes-devtools/go/go-1.18.2.inc
+++ /dev/null
@@ -1,17 +0,0 @@
-require go-common.inc
-
-FILESEXTRAPATHS:prepend := "${FILE_DIRNAME}/go:"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707"
-
-SRC_URI += "\
- file://0001-cmd-go-make-content-based-hash-generation-less-pedan.patch \
- file://0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch \
- file://0004-ld-add-soname-to-shareable-objects.patch \
- file://0005-make.bash-override-CC-when-building-dist-and-go_boot.patch \
- file://0006-cmd-dist-separate-host-and-target-builds.patch \
- file://0007-cmd-go-make-GOROOT-precious-by-default.patch \
- file://0001-exec.go-do-not-write-linker-flags-into-buildids.patch \
- file://0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch \
-"
-SRC_URI[main.sha256sum] = "2c44d03ea2c34092137ab919ba602f2c261a038d08eb468528a3f3a28e5667e2"
diff --git a/meta/recipes-devtools/go/go-1.22.2.inc b/meta/recipes-devtools/go/go-1.22.2.inc
new file mode 100644
index 0000000000..b399207311
--- /dev/null
+++ b/meta/recipes-devtools/go/go-1.22.2.inc
@@ -0,0 +1,18 @@
+require go-common.inc
+
+FILESEXTRAPATHS:prepend := "${FILE_DIRNAME}/go:"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707"
+
+SRC_URI += "\
+ file://0001-cmd-go-make-content-based-hash-generation-less-pedan.patch \
+ file://0002-cmd-go-Allow-GOTOOLDIR-to-be-overridden-in-the-envir.patch \
+ file://0003-ld-add-soname-to-shareable-objects.patch \
+ file://0004-make.bash-override-CC-when-building-dist-and-go_boot.patch \
+ file://0005-cmd-dist-separate-host-and-target-builds.patch \
+ file://0006-cmd-go-make-GOROOT-precious-by-default.patch \
+ file://0007-exec.go-filter-out-build-specific-paths-from-linker-.patch \
+ file://0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch \
+ file://0009-go-Filter-build-paths-on-staticly-linked-arches.patch \
+"
+SRC_URI[main.sha256sum] = "374ea82b289ec738e968267cac59c7d5ff180f9492250254784b2044e90df5a9"
diff --git a/meta/recipes-devtools/go/go-binary-native_1.18.2.bb b/meta/recipes-devtools/go/go-binary-native_1.18.2.bb
deleted file mode 100644
index c5514c5fdf..0000000000
--- a/meta/recipes-devtools/go/go-binary-native_1.18.2.bb
+++ /dev/null
@@ -1,46 +0,0 @@
-# This recipe is for bootstrapping our go-cross from a prebuilt binary of Go from golang.org.
-
-SUMMARY = "Go programming language compiler (upstream binary for bootstrap)"
-HOMEPAGE = " http://golang.org/"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707"
-
-PROVIDES = "go-native"
-
-SRC_URI = "https://dl.google.com/go/go${PV}.${BUILD_GOOS}-${BUILD_GOARCH}.tar.gz;name=go_${BUILD_GOTUPLE}"
-SRC_URI[go_linux_amd64.sha256sum] = "e54bec97a1a5d230fc2f9ad0880fcbabb5888f30ed9666eca4a91c5a32e86cbc"
-SRC_URI[go_linux_arm64.sha256sum] = "fc4ad28d0501eaa9c9d6190de3888c9d44d8b5fb02183ce4ae93713f67b8a35b"
-
-UPSTREAM_CHECK_URI = "https://golang.org/dl/"
-UPSTREAM_CHECK_REGEX = "go(?P<pver>\d+(\.\d+)+)\.linux"
-
-S = "${WORKDIR}/go"
-
-inherit goarch native
-
-do_compile() {
- :
-}
-
-make_wrapper() {
- rm -f ${D}${bindir}/$1
- cat <<END >${D}${bindir}/$1
-#!/bin/bash
-here=\`dirname \$0\`
-export GOROOT="${GOROOT:-\`readlink -f \$here/../lib/go\`}"
-\$here/../lib/go/bin/$1 "\$@"
-END
- chmod +x ${D}${bindir}/$1
-}
-
-do_install() {
- find ${S} -depth -type d -name testdata -exec rm -rf {} +
-
- install -d ${D}${bindir} ${D}${libdir}/go
- cp --preserve=mode,timestamps -R ${S}/ ${D}${libdir}/
-
- for f in ${S}/bin/*
- do
- make_wrapper `basename $f`
- done
-}
diff --git a/meta/recipes-devtools/go/go-binary-native_1.22.2.bb b/meta/recipes-devtools/go/go-binary-native_1.22.2.bb
new file mode 100644
index 0000000000..0f00509f03
--- /dev/null
+++ b/meta/recipes-devtools/go/go-binary-native_1.22.2.bb
@@ -0,0 +1,50 @@
+# This recipe is for bootstrapping our go-cross from a prebuilt binary of Go from golang.org.
+
+SUMMARY = "Go programming language compiler (upstream binary for bootstrap)"
+HOMEPAGE = " http://golang.org/"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707"
+
+PROVIDES = "go-native"
+
+# Checksums available at https://go.dev/dl/
+SRC_URI = "https://dl.google.com/go/go${PV}.${BUILD_GOOS}-${BUILD_GOARCH}.tar.gz;name=go_${BUILD_GOTUPLE}"
+SRC_URI[go_linux_amd64.sha256sum] = "5901c52b7a78002aeff14a21f93e0f064f74ce1360fce51c6ee68cd471216a17"
+SRC_URI[go_linux_arm64.sha256sum] = "36e720b2d564980c162a48c7e97da2e407dfcc4239e1e58d98082dfa2486a0c1"
+SRC_URI[go_linux_ppc64le.sha256sum] = "251a8886c5113be6490bdbb955ddee98763b49c9b1bf4c8364c02d3b482dab00"
+
+UPSTREAM_CHECK_URI = "https://golang.org/dl/"
+UPSTREAM_CHECK_REGEX = "go(?P<pver>\d+(\.\d+)+)\.linux"
+
+CVE_PRODUCT = "golang:go"
+
+S = "${WORKDIR}/go"
+
+inherit goarch native
+
+do_compile() {
+ :
+}
+
+make_wrapper() {
+ rm -f ${D}${bindir}/$1
+ cat <<END >${D}${bindir}/$1
+#!/bin/bash
+here=\`dirname \$0\`
+export GOROOT="${GOROOT:-\`readlink -f \$here/../lib/go\`}"
+\$here/../lib/go/bin/$1 "\$@"
+END
+ chmod +x ${D}${bindir}/$1
+}
+
+do_install() {
+ find ${S} -depth -type d -name testdata -exec rm -rf {} +
+
+ install -d ${D}${bindir} ${D}${libdir}/go
+ cp --preserve=mode,timestamps -R ${S}/ ${D}${libdir}/
+
+ for f in ${S}/bin/*
+ do
+ make_wrapper `basename $f`
+ done
+}
diff --git a/meta/recipes-devtools/go/go-common.inc b/meta/recipes-devtools/go/go-common.inc
index 83f8db7b39..db165792dc 100644
--- a/meta/recipes-devtools/go/go-common.inc
+++ b/meta/recipes-devtools/go/go-common.inc
@@ -19,6 +19,9 @@ S = "${WORKDIR}/go"
B = "${S}"
UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)\.src\.tar"
+# all recipe variants are created from the same product
+CVE_PRODUCT = "golang:go"
+
INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
SSTATE_SCAN_CMD = "true"
diff --git a/meta/recipes-devtools/go/go-cross-canadian.inc b/meta/recipes-devtools/go/go-cross-canadian.inc
index c1aa987427..dd485b6799 100644
--- a/meta/recipes-devtools/go/go-cross-canadian.inc
+++ b/meta/recipes-devtools/go/go-cross-canadian.inc
@@ -1,14 +1,14 @@
inherit cross-canadian
-DEPENDS = "go-native virtual/${HOST_PREFIX}go-crosssdk virtual/nativesdk-${HOST_PREFIX}go-runtime \
- virtual/${HOST_PREFIX}gcc-crosssdk virtual/nativesdk-libc \
+DEPENDS = "go-native virtual/${HOST_PREFIX}go virtual/nativesdk-${HOST_PREFIX}go-runtime \
+ virtual/${HOST_PREFIX}gcc virtual/nativesdk-libc \
virtual/nativesdk-${HOST_PREFIX}compilerlibs"
PN = "go-cross-canadian-${TRANSLATED_TARGET_ARCH}"
# it uses gcc on build machine during go-cross-canadian bootstrap, but
# the gcc version may be old and not support option '-fmacro-prefix-map'
# which is one of default values of DEBUG_PREFIX_MAP
-DEBUG_PREFIX_MAP = "-fdebug-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR} \
+DEBUG_PREFIX_MAP = "-fdebug-prefix-map=${WORKDIR}=${TARGET_DBGSRC_DIR} \
-fdebug-prefix-map=${STAGING_DIR_HOST}= \
-fdebug-prefix-map=${STAGING_DIR_NATIVE}= \
"
@@ -16,15 +16,15 @@ DEBUG_PREFIX_MAP = "-fdebug-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDP
export GOTOOLDIR_BOOTSTRAP = "${STAGING_LIBDIR_NATIVE}/${HOST_SYS}/go/pkg/tool/${BUILD_GOTUPLE}"
export CGO_CFLAGS = "${CFLAGS}"
export CGO_LDFLAGS = "${LDFLAGS}"
-export GO_LDFLAGS = '-linkmode external -extld ${HOST_PREFIX}gcc -extldflags "--sysroot=${STAGING_DIR_HOST} ${SECURITY_NOPIE_CFLAGS} ${HOST_CC_ARCH} ${LDFLAGS}"'
+export GO_LDFLAGS = '-extld ${HOST_PREFIX}gcc -extldflags "--sysroot=${STAGING_DIR_HOST} ${SECURITY_NOPIE_CFLAGS} ${HOST_CC_ARCH} ${LDFLAGS}"'
do_configure[noexec] = "1"
do_compile() {
- export CC_FOR_${HOST_GOTUPLE}="${HOST_PREFIX}gcc --sysroot=${STAGING_DIR_HOST}${SDKPATHNATIVE} ${SECURITY_NOPIE_CFLAGS}"
- export CXX_FOR_${HOST_GOTUPLE}="${HOST_PREFIX}gxx --sysroot=${STAGING_DIR_HOST}${SDKPATHNATIVE} ${SECURITY_NOPIE_CFLAGS}"
+ export CC_FOR_${HOST_GOTUPLE}="${HOST_PREFIX}gcc --sysroot=${STAGING_DIR_HOST} ${SECURITY_NOPIE_CFLAGS}"
+ export CXX_FOR_${HOST_GOTUPLE}="${HOST_PREFIX}gxx --sysroot=${STAGING_DIR_HOST} ${SECURITY_NOPIE_CFLAGS}"
cd src
- ./make.bash --host-only --no-banner
+ ./make.bash --target-only --no-banner
cd ${B}
}
do_compile[cleandirs] += "${GOTMPDIR} ${B}/bin ${B}/pkg"
diff --git a/meta/recipes-devtools/go/go-cross-canadian_1.18.2.bb b/meta/recipes-devtools/go/go-cross-canadian_1.22.2.bb
index 7ac9449e47..7ac9449e47 100644
--- a/meta/recipes-devtools/go/go-cross-canadian_1.18.2.bb
+++ b/meta/recipes-devtools/go/go-cross-canadian_1.22.2.bb
diff --git a/meta/recipes-devtools/go/go-cross_1.18.2.bb b/meta/recipes-devtools/go/go-cross_1.22.2.bb
index 80b5a03f6c..80b5a03f6c 100644
--- a/meta/recipes-devtools/go/go-cross_1.18.2.bb
+++ b/meta/recipes-devtools/go/go-cross_1.22.2.bb
diff --git a/meta/recipes-devtools/go/go-crosssdk.inc b/meta/recipes-devtools/go/go-crosssdk.inc
index cd23cca2fe..12939005c0 100644
--- a/meta/recipes-devtools/go/go-crosssdk.inc
+++ b/meta/recipes-devtools/go/go-crosssdk.inc
@@ -1,8 +1,10 @@
inherit crosssdk
-DEPENDS = "go-native virtual/${TARGET_PREFIX}gcc-crosssdk virtual/nativesdk-${TARGET_PREFIX}compilerlibs virtual/${TARGET_PREFIX}binutils-crosssdk"
+DEPENDS = "go-native virtual/${TARGET_PREFIX}gcc virtual/nativesdk-${TARGET_PREFIX}compilerlibs virtual/${TARGET_PREFIX}binutils"
PN = "go-crosssdk-${SDK_SYS}"
-PROVIDES = "virtual/${TARGET_PREFIX}go-crosssdk"
+PROVIDES = "virtual/${TARGET_PREFIX}go"
+
+export GOCACHE = "${B}/.cache"
do_configure[noexec] = "1"
diff --git a/meta/recipes-devtools/go/go-crosssdk_1.18.2.bb b/meta/recipes-devtools/go/go-crosssdk_1.22.2.bb
index 1857c8a577..1857c8a577 100644
--- a/meta/recipes-devtools/go/go-crosssdk_1.18.2.bb
+++ b/meta/recipes-devtools/go/go-crosssdk_1.22.2.bb
diff --git a/meta/recipes-devtools/go/go-native_1.18.2.bb b/meta/recipes-devtools/go/go-native_1.18.2.bb
deleted file mode 100644
index 76c0ab73a6..0000000000
--- a/meta/recipes-devtools/go/go-native_1.18.2.bb
+++ /dev/null
@@ -1,58 +0,0 @@
-# This recipe builds a native Go (written in Go) by first building an old Go 1.4
-# (written in C). However this old Go does not support all hosts platforms.
-
-require go-${PV}.inc
-
-inherit native
-
-SRC_URI:append = " https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz;name=bootstrap;subdir=go1.4"
-SRC_URI[bootstrap.sha256sum] = "f4ff5b5eb3a3cae1c993723f3eab519c5bae18866b5e5f96fe1102f0cb5c3e52"
-
-export GOOS = "${BUILD_GOOS}"
-export GOARCH = "${BUILD_GOARCH}"
-CC = "${@d.getVar('BUILD_CC').strip()}"
-
-GOMAKEARGS ?= "--no-banner"
-
-do_configure() {
- cd ${WORKDIR}/go1.4/go/src
- CGO_ENABLED=0 GOROOT=${WORKDIR}/go1.4/go ./make.bash
-}
-
-do_compile() {
- export GOROOT_FINAL="${libdir_native}/go"
- export GOROOT_BOOTSTRAP="${WORKDIR}/go1.4/go"
-
- cd src
- ./make.bash ${GOMAKEARGS}
- cd ${B}
-}
-do_compile[cleandirs] += "${GOTMPDIR} ${B}/bin"
-
-make_wrapper() {
- rm -f ${D}${bindir}/$2$3
- cat <<END >${D}${bindir}/$2$3
-#!/bin/bash
-here=\`dirname \$0\`
-export GOROOT="${GOROOT:-\`readlink -f \$here/../lib/go\`}"
-\$here/../lib/go/bin/$1 "\$@"
-END
- chmod +x ${D}${bindir}/$2
-}
-
-do_install() {
- install -d ${D}${libdir}/go
- cp --preserve=mode,timestamps -R ${B}/pkg ${D}${libdir}/go/
- install -d ${D}${libdir}/go/src
- (cd ${S}/src; for d in *; do \
- [ -d $d ] && cp -a ${S}/src/$d ${D}${libdir}/go/src/; \
- done)
- find ${D}${libdir}/go/src -depth -type d -name testdata -exec rm -rf {} \;
- install -d ${D}${bindir} ${D}${libdir}/go/bin
- for f in ${B}/bin/*
- do
- base=`basename $f`
- install -m755 $f ${D}${libdir}/go/bin
- make_wrapper $base $base
- done
-}
diff --git a/meta/recipes-devtools/go/go-native_1.22.2.bb b/meta/recipes-devtools/go/go-native_1.22.2.bb
new file mode 100644
index 0000000000..ddf25b2c9b
--- /dev/null
+++ b/meta/recipes-devtools/go/go-native_1.22.2.bb
@@ -0,0 +1,58 @@
+# This recipe builds a native Go (written in Go) by first building an old Go 1.4
+# (written in C). However this old Go does not support all hosts platforms.
+
+require go-${PV}.inc
+
+inherit native
+
+SRC_URI += "https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz;name=bootstrap;subdir=go1.4"
+SRC_URI[bootstrap.sha256sum] = "f4ff5b5eb3a3cae1c993723f3eab519c5bae18866b5e5f96fe1102f0cb5c3e52"
+
+export GOOS = "${BUILD_GOOS}"
+export GOARCH = "${BUILD_GOARCH}"
+CC = "${@d.getVar('BUILD_CC').strip()}"
+
+GOMAKEARGS ?= "--no-banner"
+
+do_configure() {
+ cd ${WORKDIR}/go1.4/go/src
+ CGO_ENABLED=0 GOROOT=${WORKDIR}/go1.4/go ./make.bash
+}
+
+do_compile() {
+ export GOROOT_FINAL="${libdir_native}/go"
+ export GOROOT_BOOTSTRAP="${WORKDIR}/go1.4/go"
+
+ cd src
+ ./make.bash ${GOMAKEARGS}
+ cd ${B}
+}
+do_compile[cleandirs] += "${GOTMPDIR} ${B}/bin"
+
+make_wrapper() {
+ rm -f ${D}${bindir}/$2$3
+ cat <<END >${D}${bindir}/$2$3
+#!/bin/bash
+here=\`dirname \$0\`
+export GOROOT="${GOROOT:-\`readlink -f \$here/../lib/go\`}"
+\$here/../lib/go/bin/$1 "\$@"
+END
+ chmod +x ${D}${bindir}/$2
+}
+
+do_install() {
+ install -d ${D}${libdir}/go
+ cp --preserve=mode,timestamps -R ${B}/pkg ${D}${libdir}/go/
+ install -d ${D}${libdir}/go/src
+ (cd ${S}/src; for d in *; do \
+ [ -d $d ] && cp -a ${S}/src/$d ${D}${libdir}/go/src/; \
+ done)
+ find ${D}${libdir}/go/src -depth -type d -name testdata -exec rm -rf {} \;
+ install -d ${D}${bindir} ${D}${libdir}/go/bin
+ for f in ${B}/bin/*
+ do
+ base=`basename $f`
+ install -m755 $f ${D}${libdir}/go/bin
+ make_wrapper $base $base
+ done
+}
diff --git a/meta/recipes-devtools/go/go-runtime.inc b/meta/recipes-devtools/go/go-runtime.inc
index e18339cddb..3f1e795dd9 100644
--- a/meta/recipes-devtools/go/go-runtime.inc
+++ b/meta/recipes-devtools/go/go-runtime.inc
@@ -1,5 +1,5 @@
DEPENDS = "virtual/${TUNE_PKGARCH}-go go-native"
-DEPENDS:class-nativesdk = "virtual/${TARGET_PREFIX}go-crosssdk"
+DEPENDS:class-nativesdk = "virtual/${TARGET_PREFIX}go"
PROVIDES = "virtual/${TARGET_PREFIX}go-runtime"
DEBUG_PREFIX_MAP = "\
@@ -50,6 +50,8 @@ do_install() {
rm -rf ${D}${libdir}/go/pkg/tool
rm -rf ${D}${libdir}/go/pkg/obj
rm -rf ${D}${libdir}/go/pkg/bootstrap
+ # the cmd directory is built for the native arch so if BUILD == TARGET
+ rm -rf ${D}${libdir}/go/pkg/${BUILD_GOTUPLE}/cmd
find src -mindepth 1 -maxdepth 1 -type d | while read srcdir; do
cp --preserve=mode,timestamps -R $srcdir ${D}${libdir}/go/src/
done
diff --git a/meta/recipes-devtools/go/go-runtime_1.18.2.bb b/meta/recipes-devtools/go/go-runtime_1.22.2.bb
index 63464a1501..63464a1501 100644
--- a/meta/recipes-devtools/go/go-runtime_1.18.2.bb
+++ b/meta/recipes-devtools/go/go-runtime_1.22.2.bb
diff --git a/meta/recipes-devtools/go/go-target.inc b/meta/recipes-devtools/go/go-target.inc
index ed09cfe806..981c7abd31 100644
--- a/meta/recipes-devtools/go/go-target.inc
+++ b/meta/recipes-devtools/go/go-target.inc
@@ -1,5 +1,5 @@
DEPENDS = "virtual/${TUNE_PKGARCH}-go go-native"
-DEPENDS:class-nativesdk = "virtual/${TARGET_PREFIX}go-crosssdk go-native"
+DEPENDS:class-nativesdk = "virtual/${TARGET_PREFIX}go go-native"
DEBUG_PREFIX_MAP = "\
-fdebug-prefix-map=${STAGING_DIR_HOST}= \
@@ -13,9 +13,7 @@ export CGO_CXXFLAGS = "${CXXFLAGS}"
export CGO_LDFLAGS = "${@ ' '.join(filter(lambda f: not f.startswith('-fdebug-prefix-map'), d.getVar('LDFLAGS').split())) }"
export GOCACHE = "${B}/.cache"
-GO_LDFLAGS = ""
-GO_LDFLAGS:class-nativesdk = " -linkmode external"
-export GO_LDFLAGS
+export GO_LDFLAGS = ""
export GOBUILDFLAGS = "-gcflags=-trimpath=$GOPATH -asmflags=-trimpath=$GOPATH -trimpath"
CC:append:class-nativesdk = " ${SECURITY_NOPIE_CFLAGS}"
diff --git a/meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch b/meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch
index f9db5df4eb..564837c7cd 100644
--- a/meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch
+++ b/meta/recipes-devtools/go/go/0001-cmd-go-make-content-based-hash-generation-less-pedan.patch
@@ -1,7 +1,7 @@
-From 61de6067f5ad127d246543527947a357647f95e5 Mon Sep 17 00:00:00 2001
+From 9a6c5040cbcd88b10ceb8ceaebc8d6158c086670 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 28 Mar 2022 10:59:03 -0700
-Subject: [PATCH] cmd/go: make content-based hash generation less pedantic
+Subject: [PATCH 1/9] cmd/go: make content-based hash generation less pedantic
Go 1.10's build tool now uses content-based hashes to
determine when something should be built or re-built.
@@ -25,25 +25,30 @@ Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Alex Kube <alexander.j.kube@gmail.com>
Signed-off-by: Matt Madison <matt@madison.systems>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
---
src/cmd/go/internal/envcmd/env.go | 2 +-
- src/cmd/go/internal/work/exec.go | 42 +++++++++++++++++++++++++------
- 2 files changed, 35 insertions(+), 9 deletions(-)
+ src/cmd/go/internal/work/exec.go | 44 ++++++++++++++++++++++++-------
+ 2 files changed, 36 insertions(+), 10 deletions(-)
+diff --git a/src/cmd/go/internal/envcmd/env.go b/src/cmd/go/internal/envcmd/env.go
+index c7c2e83e0f..4a90d9da5c 100644
--- a/src/cmd/go/internal/envcmd/env.go
+++ b/src/cmd/go/internal/envcmd/env.go
-@@ -169,7 +169,7 @@ func ExtraEnvVars() []cfg.EnvVar {
- func ExtraEnvVarsCostly() []cfg.EnvVar {
- var b work.Builder
- b.Init()
+@@ -189,7 +189,7 @@ func ExtraEnvVarsCostly() []cfg.EnvVar {
+ }
+ }()
+
- cppflags, cflags, cxxflags, fflags, ldflags, err := b.CFlags(&load.Package{})
+ cppflags, cflags, cxxflags, fflags, ldflags, err := b.CFlags(&load.Package{}, false)
if err != nil {
// Should not happen - b.CFlags was given an empty package.
fmt.Fprintf(os.Stderr, "go: invalid cflags: %v\n", err)
+diff --git a/src/cmd/go/internal/work/exec.go b/src/cmd/go/internal/work/exec.go
+index e05471b06c..9724cd07d0 100644
--- a/src/cmd/go/internal/work/exec.go
+++ b/src/cmd/go/internal/work/exec.go
-@@ -213,6 +213,8 @@ func (b *Builder) Do(ctx context.Context
+@@ -232,6 +232,8 @@ func (b *Builder) Do(ctx context.Context, root *Action) {
writeActionGraph()
}
@@ -52,7 +57,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
// buildActionID computes the action ID for a build action.
func (b *Builder) buildActionID(a *Action) cache.ActionID {
p := a.Package
-@@ -234,7 +236,7 @@ func (b *Builder) buildActionID(a *Actio
+@@ -253,7 +255,7 @@ func (b *Builder) buildActionID(a *Action) cache.ActionID {
if p.Module != nil {
fmt.Fprintf(h, "module %s@%s\n", p.Module.Path, p.Module.Version)
}
@@ -61,7 +66,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
// The Go compiler always hides the exact value of $GOROOT
// when building things in GOROOT.
//
-@@ -266,9 +268,9 @@ func (b *Builder) buildActionID(a *Actio
+@@ -285,9 +287,9 @@ func (b *Builder) buildActionID(a *Action) cache.ActionID {
}
if len(p.CgoFiles)+len(p.SwigFiles)+len(p.SwigCXXFiles) > 0 {
fmt.Fprintf(h, "cgo %q\n", b.toolID("cgo"))
@@ -69,50 +74,52 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
+ cppflags, cflags, cxxflags, fflags, ldflags, _ := b.CFlags(p, true)
- ccExe := b.ccExe()
-+ ccExe := filterCompilerFlags(b.ccExe())
++ ccExe := filterCompilerFlags(b.ccExe(), true)
fmt.Fprintf(h, "CC=%q %q %q %q\n", ccExe, cppflags, cflags, ldflags)
// Include the C compiler tool ID so that if the C
// compiler changes we rebuild the package.
-@@ -281,14 +283,14 @@ func (b *Builder) buildActionID(a *Actio
- }
+@@ -295,14 +297,14 @@ func (b *Builder) buildActionID(a *Action) cache.ActionID {
+ fmt.Fprintf(h, "CC ID=%q\n", ccID)
}
if len(p.CXXFiles)+len(p.SwigCXXFiles) > 0 {
- cxxExe := b.cxxExe()
-+ cxxExe := filterCompilerFlags(b.cxxExe())
++ cxxExe := filterCompilerFlags(b.cxxExe(), true)
fmt.Fprintf(h, "CXX=%q %q\n", cxxExe, cxxflags)
- if cxxID, err := b.gccToolID(cxxExe[0], "c++"); err == nil {
+ if cxxID, _, err := b.gccToolID(cxxExe[0], "c++"); err == nil {
fmt.Fprintf(h, "CXX ID=%q\n", cxxID)
}
}
if len(p.FFiles) > 0 {
- fcExe := b.fcExe()
-+ fcExe := filterCompilerFlags(b.fcExe())
++ fcExe := filterCompilerFlags(b.fcExe(), true)
fmt.Fprintf(h, "FC=%q %q\n", fcExe, fflags)
- if fcID, err := b.gccToolID(fcExe[0], "f95"); err == nil {
+ if fcID, _, err := b.gccToolID(fcExe[0], "f95"); err == nil {
fmt.Fprintf(h, "FC ID=%q\n", fcID)
-@@ -304,7 +306,7 @@ func (b *Builder) buildActionID(a *Actio
- fmt.Fprintf(h, "fuzz %q\n", fuzzFlags)
+@@ -319,7 +321,7 @@ func (b *Builder) buildActionID(a *Action) cache.ActionID {
}
}
-- fmt.Fprintf(h, "modinfo %q\n", p.Internal.BuildInfo)
-+ //fmt.Fprintf(h, "modinfo %q\n", p.Internal.BuildInfo)
+ if p.Internal.BuildInfo != nil {
+- fmt.Fprintf(h, "modinfo %q\n", p.Internal.BuildInfo.String())
++ //fmt.Fprintf(h, "modinfo %q\n", p.Internal.BuildInfo.String())
+ }
// Configuration specific to compiler toolchain.
- switch cfg.BuildToolchainName {
-@@ -2679,8 +2681,23 @@ func envList(key, def string) []string {
+@@ -2679,8 +2681,25 @@ func envList(key, def string) []string {
return args
}
+var filterFlags = os.Getenv("CGO_PEDANTIC") == ""
+
-+func filterCompilerFlags(flags []string) []string {
++func filterCompilerFlags(flags []string, keepfirst bool) []string {
+ var newflags []string
++ var realkeepfirst bool = keepfirst
+ if !filterFlags {
+ return flags
+ }
+ for _, flag := range flags {
-+ if strings.HasPrefix(flag, "-m") {
++ if strings.HasPrefix(flag, "-m") || realkeepfirst {
+ newflags = append(newflags, flag)
++ realkeepfirst = false
+ }
+ }
+ return newflags
@@ -121,38 +128,41 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
// CFlags returns the flags to use when invoking the C, C++ or Fortran compilers, or cgo.
-func (b *Builder) CFlags(p *load.Package) (cppflags, cflags, cxxflags, fflags, ldflags []string, err error) {
+func (b *Builder) CFlags(p *load.Package, filtered bool) (cppflags, cflags, cxxflags, fflags, ldflags []string, err error) {
- defaults := "-g -O2"
-
if cppflags, err = buildFlags("CPPFLAGS", "", p.CgoCPPFLAGS, checkCompilerFlags); err != nil {
-@@ -2698,6 +2715,13 @@ func (b *Builder) CFlags(p *load.Package
- if ldflags, err = buildFlags("LDFLAGS", defaults, p.CgoLDFLAGS, checkLinkerFlags); err != nil {
+ return
+ }
+@@ -2696,6 +2715,13 @@ func (b *Builder) CFlags(p *load.Package) (cppflags, cflags, cxxflags, fflags, l
+ if ldflags, err = buildFlags("LDFLAGS", defaultCFlags, p.CgoLDFLAGS, checkLinkerFlags); err != nil {
return
}
+ if filtered {
-+ cppflags = filterCompilerFlags(cppflags)
-+ cflags = filterCompilerFlags(cflags)
-+ cxxflags = filterCompilerFlags(cxxflags)
-+ fflags = filterCompilerFlags(fflags)
-+ ldflags = filterCompilerFlags(ldflags)
++ cppflags = filterCompilerFlags(cppflags, false)
++ cflags = filterCompilerFlags(cflags, false)
++ cxxflags = filterCompilerFlags(cxxflags, false)
++ fflags = filterCompilerFlags(fflags, false)
++ ldflags = filterCompilerFlags(ldflags, false)
+ }
return
}
-@@ -2713,7 +2737,7 @@ var cgoRe = lazyregexp.New(`[/\\:]`)
-
- func (b *Builder) cgo(a *Action, cgoExe, objdir string, pcCFLAGS, pcLDFLAGS, cgofiles, gccfiles, gxxfiles, mfiles, ffiles []string) (outGo, outObj []string, err error) {
+@@ -2713,7 +2739,7 @@ func (b *Builder) cgo(a *Action, cgoExe, objdir string, pcCFLAGS, pcLDFLAGS, cgo
p := a.Package
+ sh := b.Shell(a)
+
- cgoCPPFLAGS, cgoCFLAGS, cgoCXXFLAGS, cgoFFLAGS, cgoLDFLAGS, err := b.CFlags(p)
+ cgoCPPFLAGS, cgoCFLAGS, cgoCXXFLAGS, cgoFFLAGS, cgoLDFLAGS, err := b.CFlags(p, false)
if err != nil {
return nil, nil, err
}
-@@ -3174,7 +3198,7 @@ func (b *Builder) swigIntSize(objdir str
+@@ -3268,7 +3294,7 @@ func (b *Builder) swigOne(a *Action, file, objdir string, pcCFLAGS []string, cxx
+ p := a.Package
+ sh := b.Shell(a)
- // Run SWIG on one SWIG input file.
- func (b *Builder) swigOne(a *Action, p *load.Package, file, objdir string, pcCFLAGS []string, cxx bool, intgosize string) (outGo, outC string, err error) {
- cgoCPPFLAGS, cgoCFLAGS, cgoCXXFLAGS, _, _, err := b.CFlags(p)
+ cgoCPPFLAGS, cgoCFLAGS, cgoCXXFLAGS, _, _, err := b.CFlags(p, false)
if err != nil {
return "", "", err
}
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0001-exec.go-do-not-write-linker-flags-into-buildids.patch b/meta/recipes-devtools/go/go/0001-exec.go-do-not-write-linker-flags-into-buildids.patch
deleted file mode 100644
index f117152f2a..0000000000
--- a/meta/recipes-devtools/go/go/0001-exec.go-do-not-write-linker-flags-into-buildids.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From bdd69b55387f80c8df18d0af5008bf5e1a66be6a Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 23 Nov 2020 19:22:04 +0000
-Subject: [PATCH] exec.go: do not write linker flags into buildids
-
-The flags can contain build-specific paths, breaking reproducibility.
-
-To make this acceptable to upstream, we probably need to trim the flags,
-removing those known to be buildhost-specific.
-
-Upstream-Status: Inappropriate [needs upstream discussion]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- src/cmd/go/internal/work/exec.go | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
---- a/src/cmd/go/internal/work/exec.go
-+++ b/src/cmd/go/internal/work/exec.go
-@@ -1274,7 +1274,7 @@ func (b *Builder) linkActionID(a *Action
- }
-
- // Toolchain-dependent configuration, shared with b.linkSharedActionID.
-- b.printLinkerConfig(h, p)
-+ //b.printLinkerConfig(h, p)
-
- // Input files.
- for _, a1 := range a.Deps {
-@@ -1568,7 +1568,7 @@ func (b *Builder) linkSharedActionID(a *
- fmt.Fprintf(h, "goos %s goarch %s\n", cfg.Goos, cfg.Goarch)
-
- // Toolchain-dependent configuration, shared with b.linkActionID.
-- b.printLinkerConfig(h, nil)
-+ //b.printLinkerConfig(h, nil)
-
- // Input files.
- for _, a1 := range a.Deps {
diff --git a/meta/recipes-devtools/go/go/0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch b/meta/recipes-devtools/go/go/0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch
deleted file mode 100644
index ef1cc6716a..0000000000
--- a/meta/recipes-devtools/go/go/0001-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From 2055a46b396e272616c0b2273903e02c3b49a2ff Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 10 Nov 2020 16:33:27 +0000
-Subject: [PATCH] src/cmd/dist/buildgo.go: do not hardcode host compilers into
- target binaries
-
-These come from $CC/$CXX on the build host and are not useful on targets;
-additionally as they contain host specific paths, this helps reproducibility.
-
-Upstream-Status: Inappropriate [needs upstream discussion]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- src/cmd/dist/buildgo.go | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
---- a/src/cmd/dist/buildgo.go
-+++ b/src/cmd/dist/buildgo.go
-@@ -34,8 +34,8 @@ func mkzdefaultcc(dir, file string) {
- fmt.Fprintf(&buf, "package cfg\n")
- fmt.Fprintln(&buf)
- fmt.Fprintf(&buf, "const DefaultPkgConfig = `%s`\n", defaultpkgconfig)
-- buf.WriteString(defaultCCFunc("DefaultCC", defaultcc))
-- buf.WriteString(defaultCCFunc("DefaultCXX", defaultcxx))
-+ buf.WriteString(defaultCCFunc("DefaultCC", map[string]string{"":"gcc"}))
-+ buf.WriteString(defaultCCFunc("DefaultCXX", map[string]string{"":"g++"}))
- writefile(buf.String(), file, writeSkipSame)
- return
- }
-@@ -46,8 +46,8 @@ func mkzdefaultcc(dir, file string) {
- fmt.Fprintf(&buf, "package main\n")
- fmt.Fprintln(&buf)
- fmt.Fprintf(&buf, "const defaultPkgConfig = `%s`\n", defaultpkgconfig)
-- buf.WriteString(defaultCCFunc("defaultCC", defaultcc))
-- buf.WriteString(defaultCCFunc("defaultCXX", defaultcxx))
-+ buf.WriteString(defaultCCFunc("defaultCC", map[string]string{"":"gcc"}))
-+ buf.WriteString(defaultCCFunc("defaultCXX", map[string]string{"":"g++"}))
- writefile(buf.String(), file, writeSkipSame)
- }
-
diff --git a/meta/recipes-devtools/go/go/0002-cmd-go-Allow-GOTOOLDIR-to-be-overridden-in-the-envir.patch b/meta/recipes-devtools/go/go/0002-cmd-go-Allow-GOTOOLDIR-to-be-overridden-in-the-envir.patch
new file mode 100644
index 0000000000..001c94a4e7
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0002-cmd-go-Allow-GOTOOLDIR-to-be-overridden-in-the-envir.patch
@@ -0,0 +1,56 @@
+From e3f9a8a69d3a340c1a1d0bba566e71f20f635a43 Mon Sep 17 00:00:00 2001
+From: Alex Kube <alexander.j.kube@gmail.com>
+Date: Wed, 23 Oct 2019 21:15:37 +0430
+Subject: [PATCH 2/9] cmd/go: Allow GOTOOLDIR to be overridden in the
+ environment
+
+to allow for split host/target build roots
+
+Adapted to Go 1.13 from patches originally submitted to
+the meta/recipes-devtools/go tree by
+Matt Madison <matt@madison.systems>.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/dist/build.go | 4 +++-
+ src/cmd/go/internal/cfg/cfg.go | 6 +++++-
+ 2 files changed, 8 insertions(+), 2 deletions(-)
+
+diff --git a/src/cmd/dist/build.go b/src/cmd/dist/build.go
+index 32e59b446a..06ee4de8a9 100644
+--- a/src/cmd/dist/build.go
++++ b/src/cmd/dist/build.go
+@@ -259,7 +259,9 @@ func xinit() {
+ }
+ xatexit(rmworkdir)
+
+- tooldir = pathf("%s/pkg/tool/%s_%s", goroot, gohostos, gohostarch)
++ if tooldir = os.Getenv("GOTOOLDIR"); tooldir == "" {
++ tooldir = pathf("%s/pkg/tool/%s_%s", goroot, gohostos, gohostarch)
++ }
+
+ goversion := findgoversion()
+ isRelease = strings.HasPrefix(goversion, "release.") || strings.HasPrefix(goversion, "go")
+diff --git a/src/cmd/go/internal/cfg/cfg.go b/src/cmd/go/internal/cfg/cfg.go
+index a8daa2dfc3..393ada39c9 100644
+--- a/src/cmd/go/internal/cfg/cfg.go
++++ b/src/cmd/go/internal/cfg/cfg.go
+@@ -230,7 +230,11 @@ func SetGOROOT(goroot string, isTestGo bool) {
+ // This matches the initialization of ToolDir in go/build, except for
+ // using ctxt.GOROOT and the installed GOOS and GOARCH rather than the
+ // GOROOT, GOOS, and GOARCH reported by the runtime package.
+- build.ToolDir = filepath.Join(GOROOTpkg, "tool", installedGOOS+"_"+installedGOARCH)
++ if s := os.Getenv("GOTOOLDIR"); s != "" {
++ build.ToolDir = filepath.Clean(s)
++ } else {
++ build.ToolDir = filepath.Join(GOROOTpkg, "tool", installedGOOS+"_"+installedGOARCH)
++ }
+ }
+ }
+ }
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch b/meta/recipes-devtools/go/go/0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch
deleted file mode 100644
index c3ccffc3e9..0000000000
--- a/meta/recipes-devtools/go/go/0003-allow-GOTOOLDIR-to-be-overridden-in-the-environment.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From 8512964c0bfdfc3c9c3805743ea7de551a1d476a Mon Sep 17 00:00:00 2001
-From: Alex Kube <alexander.j.kube@gmail.com>
-Date: Wed, 23 Oct 2019 21:15:37 +0430
-Subject: [PATCH] cmd/go: Allow GOTOOLDIR to be overridden in the environment
-
-to allow for split host/target build roots
-
-Adapted to Go 1.13 from patches originally submitted to
-the meta/recipes-devtools/go tree by
-Matt Madison <matt@madison.systems>.
-
-Upstream-Status: Inappropriate [OE specific]
-
-Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
-
----
- src/cmd/dist/build.go | 4 +++-
- src/cmd/go/internal/cfg/cfg.go | 6 +++++-
- 2 files changed, 8 insertions(+), 2 deletions(-)
-
---- a/src/cmd/dist/build.go
-+++ b/src/cmd/dist/build.go
-@@ -251,7 +251,9 @@ func xinit() {
- }
- xatexit(rmworkdir)
-
-- tooldir = pathf("%s/pkg/tool/%s_%s", goroot, gohostos, gohostarch)
-+ if tooldir = os.Getenv("GOTOOLDIR"); tooldir == "" {
-+ tooldir = pathf("%s/pkg/tool/%s_%s", goroot, gohostos, gohostarch)
-+ }
- }
-
- // compilerEnv returns a map from "goos/goarch" to the
---- a/src/cmd/go/internal/cfg/cfg.go
-+++ b/src/cmd/go/internal/cfg/cfg.go
-@@ -76,7 +76,11 @@ func defaultContext() build.Context {
- // variables. This matches the initialization of ToolDir in
- // go/build, except for using ctxt.GOROOT rather than
- // runtime.GOROOT.
-- build.ToolDir = filepath.Join(ctxt.GOROOT, "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)
-+ if s := os.Getenv("GOTOOLDIR"); s != "" {
-+ build.ToolDir = filepath.Clean(s)
-+ } else {
-+ build.ToolDir = filepath.Join(ctxt.GOROOT, "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)
-+ }
- }
-
- ctxt.GOPATH = envOr("GOPATH", gopath(ctxt))
diff --git a/meta/recipes-devtools/go/go/0003-ld-add-soname-to-shareable-objects.patch b/meta/recipes-devtools/go/go/0003-ld-add-soname-to-shareable-objects.patch
new file mode 100644
index 0000000000..9cab2969c8
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0003-ld-add-soname-to-shareable-objects.patch
@@ -0,0 +1,51 @@
+From 7dde77b3ce8138314dd2736604b1b110dbcc0ac1 Mon Sep 17 00:00:00 2001
+From: Alex Kube <alexander.j.kube@gmail.com>
+Date: Wed, 23 Oct 2019 21:16:32 +0430
+Subject: [PATCH 3/9] ld: add soname to shareable objects
+
+so that OE's shared library dependency handling
+can find them.
+
+Adapted to Go 1.13 from patches originally submitted to
+the meta/recipes-devtools/go tree by
+Matt Madison <matt@madison.systems>.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/link/internal/ld/lib.go | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/src/cmd/link/internal/ld/lib.go b/src/cmd/link/internal/ld/lib.go
+index eab74dc328..ae9bbc9093 100644
+--- a/src/cmd/link/internal/ld/lib.go
++++ b/src/cmd/link/internal/ld/lib.go
+@@ -1576,6 +1576,7 @@ func (ctxt *Link) hostlink() {
+ argv = append(argv, "-Wl,-z,relro")
+ }
+ argv = append(argv, "-shared")
++ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
+ if ctxt.HeadType == objabi.Hwindows {
+ argv = addASLRargs(argv, *flagAslr)
+ } else {
+@@ -1591,6 +1592,7 @@ func (ctxt *Link) hostlink() {
+ argv = append(argv, "-Wl,-z,relro")
+ }
+ argv = append(argv, "-shared")
++ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
+ case BuildModePlugin:
+ if ctxt.HeadType == objabi.Hdarwin {
+ argv = append(argv, "-dynamiclib")
+@@ -1599,6 +1601,7 @@ func (ctxt *Link) hostlink() {
+ argv = append(argv, "-Wl,-z,relro")
+ }
+ argv = append(argv, "-shared")
++ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
+ }
+ }
+
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0004-ld-add-soname-to-shareable-objects.patch b/meta/recipes-devtools/go/go/0004-ld-add-soname-to-shareable-objects.patch
deleted file mode 100644
index 058fa64225..0000000000
--- a/meta/recipes-devtools/go/go/0004-ld-add-soname-to-shareable-objects.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From bf5cf5301ae5914498454c87293d1df2e1d8489f Mon Sep 17 00:00:00 2001
-From: Alex Kube <alexander.j.kube@gmail.com>
-Date: Wed, 23 Oct 2019 21:16:32 +0430
-Subject: [PATCH 4/9] ld: add soname to shareable objects
-
-so that OE's shared library dependency handling
-can find them.
-
-Adapted to Go 1.13 from patches originally submitted to
-the meta/recipes-devtools/go tree by
-Matt Madison <matt@madison.systems>.
-
-Upstream-Status: Inappropriate [OE specific]
-
-Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
----
- src/cmd/link/internal/ld/lib.go | 3 +++
- 1 file changed, 3 insertions(+)
-
---- a/src/cmd/link/internal/ld/lib.go
-+++ b/src/cmd/link/internal/ld/lib.go
-@@ -1347,6 +1347,7 @@ func (ctxt *Link) hostlink() {
- argv = append(argv, "-Wl,-z,relro")
- }
- argv = append(argv, "-shared")
-+ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
- if ctxt.HeadType == objabi.Hwindows {
- if *flagAslr {
- argv = addASLRargs(argv)
-@@ -1364,6 +1365,7 @@ func (ctxt *Link) hostlink() {
- argv = append(argv, "-Wl,-z,relro")
- }
- argv = append(argv, "-shared")
-+ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
- case BuildModePlugin:
- if ctxt.HeadType == objabi.Hdarwin {
- argv = append(argv, "-dynamiclib")
-@@ -1372,6 +1374,7 @@ func (ctxt *Link) hostlink() {
- argv = append(argv, "-Wl,-z,relro")
- }
- argv = append(argv, "-shared")
-+ argv = append(argv, fmt.Sprintf("-Wl,-soname,%s", filepath.Base(*flagOutfile)))
- }
- }
-
diff --git a/meta/recipes-devtools/go/go/0004-make.bash-override-CC-when-building-dist-and-go_boot.patch b/meta/recipes-devtools/go/go/0004-make.bash-override-CC-when-building-dist-and-go_boot.patch
new file mode 100644
index 0000000000..8889aef1cf
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0004-make.bash-override-CC-when-building-dist-and-go_boot.patch
@@ -0,0 +1,45 @@
+From 9f59e46991074d3e3c4d00f3971e62bfcd707167 Mon Sep 17 00:00:00 2001
+From: Alex Kube <alexander.j.kube@gmail.com>
+Date: Wed, 23 Oct 2019 21:17:16 +0430
+Subject: [PATCH 4/9] make.bash: override CC when building dist and
+ go_bootstrap
+
+for handling OE cross-canadian builds.
+
+Adapted to Go 1.13 from patches originally submitted to
+the meta/recipes-devtools/go tree by
+Matt Madison <matt@madison.systems>.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/make.bash | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/make.bash b/src/make.bash
+index 76ad51624a..074e129a24 100755
+--- a/src/make.bash
++++ b/src/make.bash
+@@ -198,7 +198,7 @@ if [[ "$GOROOT_BOOTSTRAP" == "$GOROOT" ]]; then
+ exit 1
+ fi
+ rm -f cmd/dist/dist
+-GOROOT="$GOROOT_BOOTSTRAP" nogoenv "$GOROOT_BOOTSTRAP/bin/go" build -o cmd/dist/dist ./cmd/dist
++CC="${BUILD_CC:-${CC}}" GOROOT="$GOROOT_BOOTSTRAP" nogoenv "$GOROOT_BOOTSTRAP/bin/go" build -o cmd/dist/dist ./cmd/dist
+
+ # -e doesn't propagate out of eval, so check success by hand.
+ eval $(./cmd/dist/dist env -p || echo FAIL=true)
+@@ -223,7 +223,7 @@ fi
+ # Run dist bootstrap to complete make.bash.
+ # Bootstrap installs a proper cmd/dist, built with the new toolchain.
+ # Throw ours, built with the bootstrap toolchain, away after bootstrap.
+-./cmd/dist/dist bootstrap -a $vflag $GO_DISTFLAGS "$@"
++CC="${BUILD_CC:-${CC}}" ./cmd/dist/dist bootstrap -a $vflag $GO_DISTFLAGS "$@"
+ rm -f ./cmd/dist/dist
+
+ # DO NOT ADD ANY NEW CODE HERE.
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0005-cmd-dist-separate-host-and-target-builds.patch b/meta/recipes-devtools/go/go/0005-cmd-dist-separate-host-and-target-builds.patch
new file mode 100644
index 0000000000..364fce907a
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0005-cmd-dist-separate-host-and-target-builds.patch
@@ -0,0 +1,221 @@
+From 6dda78d528e60993a4688cd9d49440a726378ac8 Mon Sep 17 00:00:00 2001
+From: Alex Kube <alexander.j.kube@gmail.com>
+Date: Wed, 23 Oct 2019 21:18:12 +0430
+Subject: [PATCH 5/9] cmd/dist: separate host and target builds
+
+Change the dist tool to allow for OE-style cross-
+and cross-canadian builds:
+
+ - command flags --host-only and --target only are added;
+ if one is present, the other changes mentioned below
+ take effect, and arguments may also be specified on
+ the command line to enumerate the package(s) to be
+ built.
+
+ - for OE cross builds, go_bootstrap is always built for
+ the current build host, and is moved, along with the supporting
+ toolchain (asm, compile, etc.) to a separate 'native_native'
+ directory under GOROOT/pkg/tool.
+
+ - go_bootstrap is not automatically removed after the build,
+ so it can be reused later (e.g., building both static and
+ shared runtime).
+
+Note that for --host-only builds, it would be nice to specify
+just the "cmd" package to build only the go commands/tools,
+the staleness checks in the dist tool will fail if the "std"
+library has not also been built. So host-only builds have to
+build everything anyway.
+
+Adapted to Go 1.13 from patches originally submitted to
+the meta/recipes-devtools/go tree by
+Matt Madison <matt@madison.systems>.
+
+Rework the patch to avoid identation, it breaks formatting rules but
+makes the changes more obvious and maintainable.
+Jose Quaresma <jose.quaresma@foundries.io>
+Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/dist/build.go | 76 ++++++++++++++++++++++++++++++++++++++++++-
+ 1 file changed, 75 insertions(+), 1 deletion(-)
+
+diff --git a/src/cmd/dist/build.go b/src/cmd/dist/build.go
+index 06ee4de8a9..74b7c7098f 100644
+--- a/src/cmd/dist/build.go
++++ b/src/cmd/dist/build.go
+@@ -46,6 +46,7 @@ var (
+ goexperiment string
+ workdir string
+ tooldir string
++ build_tooldir string
+ oldgoos string
+ oldgoarch string
+ oldgocache string
+@@ -58,6 +59,7 @@ var (
+ rebuildall bool
+ noOpt bool
+ isRelease bool
++ crossBuild bool
+
+ vflag int // verbosity
+ )
+@@ -265,6 +267,8 @@ func xinit() {
+
+ goversion := findgoversion()
+ isRelease = strings.HasPrefix(goversion, "release.") || strings.HasPrefix(goversion, "go")
++
++ build_tooldir = pathf("%s/pkg/tool/native_native", goroot)
+ }
+
+ // compilerEnv returns a map from "goos/goarch" to the
+@@ -499,8 +503,10 @@ func setup() {
+ goosGoarch := pathf("%s/pkg/%s_%s", goroot, gohostos, gohostarch)
+ if rebuildall {
+ xremoveall(goosGoarch)
++ xremoveall(build_tooldir)
+ }
+ xmkdirall(goosGoarch)
++ xmkdirall(build_tooldir)
+ xatexit(func() {
+ if files := xreaddir(goosGoarch); len(files) == 0 {
+ xremove(goosGoarch)
+@@ -1338,14 +1344,20 @@ func cmdbootstrap() {
+ defer timelog("end", "dist bootstrap")
+
+ var debug, distpack, force, noBanner, noClean bool
++ var hostOnly bool
++ var targetOnly bool
++ var toBuild = []string{"std", "cmd"}
++
+ flag.BoolVar(&rebuildall, "a", rebuildall, "rebuild all")
+ flag.BoolVar(&debug, "d", debug, "enable debugging of bootstrap process")
+ flag.BoolVar(&distpack, "distpack", distpack, "write distribution files to pkg/distpack")
+ flag.BoolVar(&force, "force", force, "build even if the port is marked as broken")
+ flag.BoolVar(&noBanner, "no-banner", noBanner, "do not print banner")
+ flag.BoolVar(&noClean, "no-clean", noClean, "print deprecation warning")
++ flag.BoolVar(&hostOnly, "host-only", hostOnly, "build only host binaries, not target")
++ flag.BoolVar(&targetOnly, "target-only", targetOnly, "build only target binaries, not host")
+
+- xflagparse(0)
++ xflagparse(-1)
+
+ if noClean {
+ xprintf("warning: --no-clean is deprecated and has no effect; use 'go install std cmd' instead\n")
+@@ -1357,6 +1369,18 @@ func cmdbootstrap() {
+ "Use the -force flag to build anyway.\n", goos, goarch)
+ }
+
++ if hostOnly && targetOnly {
++ fatalf("specify only one of --host-only or --target-only\n")
++ }
++ crossBuild = hostOnly || targetOnly
++ if flag.NArg() > 0 {
++ if crossBuild {
++ toBuild = flag.Args()
++ } else {
++ fatalf("package names not permitted without --host-only or --target-only\n")
++ }
++ }
++
+ // Set GOPATH to an internal directory. We shouldn't actually
+ // need to store files here, since the toolchain won't
+ // depend on modules outside of vendor directories, but if
+@@ -1434,9 +1458,14 @@ func cmdbootstrap() {
+ xprintf("\n")
+ }
+
++ // For split host/target cross/cross-canadian builds, we don't
++ // want to be setting these flags until after we have compiled
++ // the toolchain that runs on the build host.
++if !crossBuild {
+ gogcflags = os.Getenv("GO_GCFLAGS") // we were using $BOOT_GO_GCFLAGS until now
+ setNoOpt()
+ goldflags = os.Getenv("GO_LDFLAGS") // we were using $BOOT_GO_LDFLAGS until now
++}
+ goBootstrap := pathf("%s/go_bootstrap", tooldir)
+ if debug {
+ run("", ShowOutput|CheckExit, pathf("%s/compile", tooldir), "-V=full")
+@@ -1464,7 +1493,11 @@ func cmdbootstrap() {
+ xprintf("\n")
+ }
+ xprintf("Building Go toolchain2 using go_bootstrap and Go toolchain1.\n")
++if !crossBuild {
+ os.Setenv("CC", compilerEnvLookup("CC", defaultcc, goos, goarch))
++} else {
++ os.Setenv("CC", defaultcc[""])
++}
+ // Now that cmd/go is in charge of the build process, enable GOEXPERIMENT.
+ os.Setenv("GOEXPERIMENT", goexperiment)
+ // No need to enable PGO for toolchain2.
+@@ -1517,6 +1550,7 @@ func cmdbootstrap() {
+ os.Setenv("GOCACHE", oldgocache)
+ }
+
++if !crossBuild {
+ if goos == oldgoos && goarch == oldgoarch {
+ // Common case - not setting up for cross-compilation.
+ timelog("build", "toolchain")
+@@ -1560,6 +1594,42 @@ func cmdbootstrap() {
+ checkNotStale(toolenv(), goBootstrap, toolchain...)
+ copyfile(pathf("%s/compile4", tooldir), pathf("%s/compile", tooldir), writeExec)
+ }
++} else {
++ gogcflags = os.Getenv("GO_GCFLAGS")
++ goldflags = os.Getenv("GO_LDFLAGS")
++ tool_files, _ := filepath.Glob(pathf("%s/*", tooldir))
++ for _, f := range tool_files {
++ copyfile(pathf("%s/%s", build_tooldir, filepath.Base(f)), f, writeExec)
++ xremove(f)
++ }
++ os.Setenv("GOTOOLDIR", build_tooldir)
++ goBootstrap = pathf("%s/go_bootstrap", build_tooldir)
++ if hostOnly {
++ timelog("build", "host toolchain")
++ if vflag > 0 {
++ xprintf("\n")
++ }
++ xprintf("Building %s for host, %s/%s.\n", strings.Join(toBuild, ","), goos, goarch)
++ goInstall(toolenv(), goBootstrap, toBuild...)
++ checkNotStale(toolenv(), goBootstrap, toBuild...)
++ // Skip cmdGo staleness checks here, since we can't necessarily run the cmdGo binary
++
++ timelog("build", "target toolchain")
++ if vflag > 0 {
++ xprintf("\n")
++ }
++ } else if targetOnly {
++ goos = oldgoos
++ goarch = oldgoarch
++ os.Setenv("GOOS", goos)
++ os.Setenv("GOARCH", goarch)
++ os.Setenv("CC", compilerEnvLookup("CC", defaultcc, goos, goarch))
++ xprintf("Building %s for target, %s/%s.\n", strings.Join(toBuild, ","), goos, goarch)
++ goInstall(toolenv(), goBootstrap, toBuild...)
++ checkNotStale(toolenv(), goBootstrap, toBuild...)
++ // Skip cmdGo staleness checks here, since we can't run the target's cmdGo binary
++ }
++}
+
+ // Check that there are no new files in $GOROOT/bin other than
+ // go and gofmt and $GOOS_$GOARCH (target bin when cross-compiling).
+@@ -1582,8 +1652,12 @@ func cmdbootstrap() {
+ }
+ }
+
++ // Except that for split host/target cross-builds, we need to
++ // keep it.
++if !crossBuild {
+ // Remove go_bootstrap now that we're done.
+ xremove(pathf("%s/go_bootstrap"+exe, tooldir))
++}
+
+ if goos == "android" {
+ // Make sure the exec wrapper will sync a fresh $GOROOT to the device.
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0005-make.bash-override-CC-when-building-dist-and-go_boot.patch b/meta/recipes-devtools/go/go/0005-make.bash-override-CC-when-building-dist-and-go_boot.patch
deleted file mode 100644
index a6937672f5..0000000000
--- a/meta/recipes-devtools/go/go/0005-make.bash-override-CC-when-building-dist-and-go_boot.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-From 153e2dda6103fd9dd871be4bb495a8da5328301e Mon Sep 17 00:00:00 2001
-From: Alex Kube <alexander.j.kube@gmail.com>
-Date: Wed, 23 Oct 2019 21:17:16 +0430
-Subject: [PATCH] make.bash: override CC when building dist and go_bootstrap
-
-for handling OE cross-canadian builds.
-
-Adapted to Go 1.13 from patches originally submitted to
-the meta/recipes-devtools/go tree by
-Matt Madison <matt@madison.systems>.
-
-Upstream-Status: Inappropriate [OE specific]
-
-Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
-
----
- src/make.bash | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
---- a/src/make.bash
-+++ b/src/make.bash
-@@ -195,7 +195,7 @@ if [ "$GOROOT_BOOTSTRAP" = "$GOROOT" ];
- exit 1
- fi
- rm -f cmd/dist/dist
--GOROOT="$GOROOT_BOOTSTRAP" GOOS="" GOARCH="" GO111MODULE=off "$GOROOT_BOOTSTRAP/bin/go" build -o cmd/dist/dist ./cmd/dist
-+CC="${BUILD_CC:-${CC}}" GOROOT="$GOROOT_BOOTSTRAP" GOOS="" GOARCH="" GO111MODULE=off "$GOROOT_BOOTSTRAP/bin/go" build -o cmd/dist/dist ./cmd/dist
-
- # -e doesn't propagate out of eval, so check success by hand.
- eval $(./cmd/dist/dist env -p || echo FAIL=true)
-@@ -220,7 +220,7 @@ fi
- # Run dist bootstrap to complete make.bash.
- # Bootstrap installs a proper cmd/dist, built with the new toolchain.
- # Throw ours, built with Go 1.4, away after bootstrap.
--./cmd/dist/dist bootstrap -a $vflag $GO_DISTFLAGS "$@"
-+CC="${BUILD_CC:-${CC}}" ./cmd/dist/dist bootstrap -a $vflag $GO_DISTFLAGS "$@"
- rm -f ./cmd/dist/dist
-
- # DO NOT ADD ANY NEW CODE HERE.
diff --git a/meta/recipes-devtools/go/go/0006-cmd-dist-separate-host-and-target-builds.patch b/meta/recipes-devtools/go/go/0006-cmd-dist-separate-host-and-target-builds.patch
deleted file mode 100644
index ee743ab990..0000000000
--- a/meta/recipes-devtools/go/go/0006-cmd-dist-separate-host-and-target-builds.patch
+++ /dev/null
@@ -1,281 +0,0 @@
-From 7bc891e00be4263311d75aa2b2ee6a3b7b75355f Mon Sep 17 00:00:00 2001
-From: Alex Kube <alexander.j.kube@gmail.com>
-Date: Wed, 23 Oct 2019 21:18:12 +0430
-Subject: [PATCH] cmd/dist: separate host and target builds
-
-Upstream-Status: Inappropriate [OE specific]
-
-Change the dist tool to allow for OE-style cross-
-and cross-canadian builds:
-
- - command flags --host-only and --target only are added;
- if one is present, the other changes mentioned below
- take effect, and arguments may also be specified on
- the command line to enumerate the package(s) to be
- built.
-
- - for OE cross builds, go_bootstrap is always built for
- the current build host, and is moved, along with the supporting
- toolchain (asm, compile, etc.) to a separate 'native_native'
- directory under GOROOT/pkg/tool.
-
- - go_bootstrap is not automatically removed after the build,
- so it can be reused later (e.g., building both static and
- shared runtime).
-
-Note that for --host-only builds, it would be nice to specify
-just the "cmd" package to build only the go commands/tools,
-the staleness checks in the dist tool will fail if the "std"
-library has not also been built. So host-only builds have to
-build everything anyway.
-
-Adapted to Go 1.13 from patches originally submitted to
-the meta/recipes-devtools/go tree by
-Matt Madison <matt@madison.systems>.
-
-Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
-
----
- src/cmd/dist/build.go | 156 ++++++++++++++++++++++++++++++------------
- 1 file changed, 113 insertions(+), 43 deletions(-)
-
---- a/src/cmd/dist/build.go
-+++ b/src/cmd/dist/build.go
-@@ -44,6 +44,7 @@ var (
- goexperiment string
- workdir string
- tooldir string
-+ build_tooldir string
- oldgoos string
- oldgoarch string
- exe string
-@@ -54,6 +55,7 @@ var (
-
- rebuildall bool
- defaultclang bool
-+ crossBuild bool
-
- vflag int // verbosity
- )
-@@ -254,6 +256,8 @@ func xinit() {
- if tooldir = os.Getenv("GOTOOLDIR"); tooldir == "" {
- tooldir = pathf("%s/pkg/tool/%s_%s", goroot, gohostos, gohostarch)
- }
-+
-+ build_tooldir = pathf("%s/pkg/tool/native_native", goroot)
- }
-
- // compilerEnv returns a map from "goos/goarch" to the
-@@ -499,8 +503,10 @@ func setup() {
- p := pathf("%s/pkg/%s_%s", goroot, gohostos, gohostarch)
- if rebuildall {
- xremoveall(p)
-+ xremoveall(build_tooldir)
- }
- xmkdirall(p)
-+ xmkdirall(build_tooldir)
-
- if goos != gohostos || goarch != gohostarch {
- p := pathf("%s/pkg/%s_%s", goroot, goos, goarch)
-@@ -1252,17 +1258,35 @@ func cmdbootstrap() {
-
- var noBanner, noClean bool
- var debug bool
-+ var hostOnly bool
-+ var targetOnly bool
-+ var toBuild = []string{"std", "cmd"}
-+
- flag.BoolVar(&rebuildall, "a", rebuildall, "rebuild all")
- flag.BoolVar(&debug, "d", debug, "enable debugging of bootstrap process")
- flag.BoolVar(&noBanner, "no-banner", noBanner, "do not print banner")
- flag.BoolVar(&noClean, "no-clean", noClean, "print deprecation warning")
-+ flag.BoolVar(&hostOnly, "host-only", hostOnly, "build only host binaries, not target")
-+ flag.BoolVar(&targetOnly, "target-only", targetOnly, "build only target binaries, not host")
-
-- xflagparse(0)
-+ xflagparse(-1)
-
- if noClean {
- xprintf("warning: --no-clean is deprecated and has no effect; use 'go install std cmd' instead\n")
- }
-
-+ if hostOnly && targetOnly {
-+ fatalf("specify only one of --host-only or --target-only\n")
-+ }
-+ crossBuild = hostOnly || targetOnly
-+ if flag.NArg() > 0 {
-+ if crossBuild {
-+ toBuild = flag.Args()
-+ } else {
-+ fatalf("package names not permitted without --host-only or --target-only\n")
-+ }
-+ }
-+
- // Set GOPATH to an internal directory. We shouldn't actually
- // need to store files here, since the toolchain won't
- // depend on modules outside of vendor directories, but if
-@@ -1330,8 +1354,13 @@ func cmdbootstrap() {
- xprintf("\n")
- }
-
-- gogcflags = os.Getenv("GO_GCFLAGS") // we were using $BOOT_GO_GCFLAGS until now
-- goldflags = os.Getenv("GO_LDFLAGS") // we were using $BOOT_GO_LDFLAGS until now
-+ // For split host/target cross/cross-canadian builds, we don't
-+ // want to be setting these flags until after we have compiled
-+ // the toolchain that runs on the build host.
-+ if !crossBuild {
-+ gogcflags = os.Getenv("GO_GCFLAGS") // we were using $BOOT_GO_GCFLAGS until now
-+ goldflags = os.Getenv("GO_LDFLAGS") // we were using $BOOT_GO_LDFLAGS until now
-+ }
- goBootstrap := pathf("%s/go_bootstrap", tooldir)
- cmdGo := pathf("%s/go", gobin)
- if debug {
-@@ -1360,7 +1389,11 @@ func cmdbootstrap() {
- xprintf("\n")
- }
- xprintf("Building Go toolchain2 using go_bootstrap and Go toolchain1.\n")
-- os.Setenv("CC", compilerEnvLookup(defaultcc, goos, goarch))
-+ if crossBuild {
-+ os.Setenv("CC", defaultcc[""])
-+ } else {
-+ os.Setenv("CC", compilerEnvLookup(defaultcc, goos, goarch))
-+ }
- // Now that cmd/go is in charge of the build process, enable GOEXPERIMENT.
- os.Setenv("GOEXPERIMENT", goexperiment)
- goInstall(goBootstrap, append([]string{"-i"}, toolchain...)...)
-@@ -1399,50 +1432,84 @@ func cmdbootstrap() {
- }
- checkNotStale(goBootstrap, append(toolchain, "runtime/internal/sys")...)
-
-- if goos == oldgoos && goarch == oldgoarch {
-- // Common case - not setting up for cross-compilation.
-- timelog("build", "toolchain")
-- if vflag > 0 {
-- xprintf("\n")
-+ if crossBuild {
-+ gogcflags = os.Getenv("GO_GCFLAGS")
-+ goldflags = os.Getenv("GO_LDFLAGS")
-+ tool_files, _ := filepath.Glob(pathf("%s/*", tooldir))
-+ for _, f := range tool_files {
-+ copyfile(pathf("%s/%s", build_tooldir, filepath.Base(f)), f, writeExec)
-+ xremove(f)
-+ }
-+ os.Setenv("GOTOOLDIR", build_tooldir)
-+ goBootstrap = pathf("%s/go_bootstrap", build_tooldir)
-+ if hostOnly {
-+ timelog("build", "host toolchain")
-+ if vflag > 0 {
-+ xprintf("\n")
-+ }
-+ xprintf("Building %s for host, %s/%s.\n", strings.Join(toBuild, ","), goos, goarch)
-+ goInstall(goBootstrap, toBuild...)
-+ checkNotStale(goBootstrap, toBuild...)
-+ // Skip cmdGo staleness checks here, since we can't necessarily run the cmdGo binary
-+
-+ timelog("build", "target toolchain")
-+ if vflag > 0 {
-+ xprintf("\n")
-+ }
-+ } else if targetOnly {
-+ goos = oldgoos
-+ goarch = oldgoarch
-+ os.Setenv("GOOS", goos)
-+ os.Setenv("GOARCH", goarch)
-+ os.Setenv("CC", compilerEnvLookup(defaultcc, goos, goarch))
-+ xprintf("Building %s for target, %s/%s.\n", strings.Join(toBuild, ","), goos, goarch)
-+ goInstall(goBootstrap, toBuild...)
-+ checkNotStale(goBootstrap, toBuild...)
-+ // Skip cmdGo staleness checks here, since we can't run the target's cmdGo binary
- }
-- xprintf("Building packages and commands for %s/%s.\n", goos, goarch)
- } else {
-- // GOOS/GOARCH does not match GOHOSTOS/GOHOSTARCH.
-- // Finish GOHOSTOS/GOHOSTARCH installation and then
-- // run GOOS/GOARCH installation.
-- timelog("build", "host toolchain")
-- if vflag > 0 {
-- xprintf("\n")
-+
-+ if goos == oldgoos && goarch == oldgoarch {
-+ // Common case - not setting up for cross-compilation.
-+ timelog("build", "toolchain")
-+ if vflag > 0 {
-+ xprintf("\n")
-+ }
-+ xprintf("Building packages and commands for %s/%s.\n", goos, goarch)
-+ } else {
-+ // GOOS/GOARCH does not match GOHOSTOS/GOHOSTARCH.
-+ // Finish GOHOSTOS/GOHOSTARCH installation and then
-+ // run GOOS/GOARCH installation.
-+ timelog("build", "host toolchain")
-+ if vflag > 0 {
-+ xprintf("\n")
-+ }
-+ xprintf("Building packages and commands for host, %s/%s.\n", goos, goarch)
-+ goInstall(goBootstrap, "std", "cmd")
-+ checkNotStale(goBootstrap, "std", "cmd")
-+ checkNotStale(cmdGo, "std", "cmd")
-+
-+ timelog("build", "target toolchain")
-+ if vflag > 0 {
-+ xprintf("\n")
-+ }
-+ goos = oldgoos
-+ goarch = oldgoarch
-+ os.Setenv("GOOS", goos)
-+ os.Setenv("GOARCH", goarch)
-+ os.Setenv("CC", compilerEnvLookup(defaultcc, goos, goarch))
-+ xprintf("Building packages and commands for target, %s/%s.\n", goos, goarch)
- }
-- xprintf("Building packages and commands for host, %s/%s.\n", goos, goarch)
- goInstall(goBootstrap, "std", "cmd")
- checkNotStale(goBootstrap, "std", "cmd")
- checkNotStale(cmdGo, "std", "cmd")
-
-- timelog("build", "target toolchain")
-- if vflag > 0 {
-- xprintf("\n")
-- }
-- goos = oldgoos
-- goarch = oldgoarch
-- os.Setenv("GOOS", goos)
-- os.Setenv("GOARCH", goarch)
-- os.Setenv("CC", compilerEnvLookup(defaultcc, goos, goarch))
-- xprintf("Building packages and commands for target, %s/%s.\n", goos, goarch)
-- }
-- targets := []string{"std", "cmd"}
-- if goos == "js" && goarch == "wasm" {
-- // Skip the cmd tools for js/wasm. They're not usable.
-- targets = targets[:1]
-- }
-- goInstall(goBootstrap, targets...)
-- checkNotStale(goBootstrap, targets...)
-- checkNotStale(cmdGo, targets...)
-- if debug {
-- run("", ShowOutput|CheckExit, pathf("%s/compile", tooldir), "-V=full")
-- run("", ShowOutput|CheckExit, pathf("%s/buildid", tooldir), pathf("%s/pkg/%s_%s/runtime/internal/sys.a", goroot, goos, goarch))
-- checkNotStale(goBootstrap, append(toolchain, "runtime/internal/sys")...)
-- copyfile(pathf("%s/compile4", tooldir), pathf("%s/compile", tooldir), writeExec)
-+ if debug {
-+ run("", ShowOutput|CheckExit, pathf("%s/compile", tooldir), "-V=full")
-+ run("", ShowOutput|CheckExit, pathf("%s/buildid", tooldir), pathf("%s/pkg/%s_%s/runtime/internal/sys.a", goroot, goos, goarch))
-+ checkNotStale(goBootstrap, append(toolchain, "runtime/internal/sys")...)
-+ copyfile(pathf("%s/compile4", tooldir), pathf("%s/compile", tooldir), writeExec)
-+ }
- }
-
- // Check that there are no new files in $GOROOT/bin other than
-@@ -1459,8 +1526,11 @@ func cmdbootstrap() {
- }
- }
-
-- // Remove go_bootstrap now that we're done.
-- xremove(pathf("%s/go_bootstrap", tooldir))
-+ // Except that for split host/target cross-builds, we need to
-+ // keep it.
-+ if !crossBuild {
-+ xremove(pathf("%s/go_bootstrap", tooldir))
-+ }
-
- if goos == "android" {
- // Make sure the exec wrapper will sync a fresh $GOROOT to the device.
diff --git a/meta/recipes-devtools/go/go/0006-cmd-go-make-GOROOT-precious-by-default.patch b/meta/recipes-devtools/go/go/0006-cmd-go-make-GOROOT-precious-by-default.patch
new file mode 100644
index 0000000000..262f1e96b8
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0006-cmd-go-make-GOROOT-precious-by-default.patch
@@ -0,0 +1,114 @@
+From aff5a740d6286c04beb0593fc68b0aea5a95ad39 Mon Sep 17 00:00:00 2001
+From: Alex Kube <alexander.j.kube@gmail.com>
+Date: Wed, 23 Oct 2019 21:18:56 +0430
+Subject: [PATCH 6/9] cmd/go: make GOROOT precious by default
+
+The go build tool normally rebuilds whatever it detects is
+stale. This can be a problem when GOROOT is intended to
+be read-only and the go runtime has been built as a shared
+library, since we don't want every application to be rebuilding
+the shared runtime - particularly in cross-build/packaging
+setups, since that would lead to 'abi mismatch' runtime errors.
+
+This patch prevents the install and linkshared actions from
+installing to GOROOT unless overridden with the GOROOT_OVERRIDE
+environment variable.
+
+Adapted to Go 1.13 from patches originally submitted to
+the meta/recipes-devtools/go tree by
+Matt Madison <matt@madison.systems>.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/go/internal/work/action.go | 3 +++
+ src/cmd/go/internal/work/build.go | 6 ++++++
+ src/cmd/go/internal/work/exec.go | 25 +++++++++++++++++++++++++
+ 3 files changed, 34 insertions(+)
+
+diff --git a/src/cmd/go/internal/work/action.go b/src/cmd/go/internal/work/action.go
+index a59072e591..9e35ebde0c 100644
+--- a/src/cmd/go/internal/work/action.go
++++ b/src/cmd/go/internal/work/action.go
+@@ -754,6 +754,9 @@ func (b *Builder) addTransitiveLinkDeps(a, a1 *Action, shlib string) {
+ if p1 == nil || p1.Shlib == "" || haveShlib[filepath.Base(p1.Shlib)] {
+ continue
+ }
++ if goRootPrecious && (p1.Standard || p1.Goroot) {
++ continue
++ }
+ haveShlib[filepath.Base(p1.Shlib)] = true
+ // TODO(rsc): The use of ModeInstall here is suspect, but if we only do ModeBuild,
+ // we'll end up building an overall library or executable that depends at runtime
+diff --git a/src/cmd/go/internal/work/build.go b/src/cmd/go/internal/work/build.go
+index 408edb5119..3d60252127 100644
+--- a/src/cmd/go/internal/work/build.go
++++ b/src/cmd/go/internal/work/build.go
+@@ -233,6 +233,8 @@ See also: go install, go get, go clean.
+
+ const concurrentGCBackendCompilationEnabledByDefault = true
+
++var goRootPrecious bool = true
++
+ func init() {
+ // break init cycle
+ CmdBuild.Run = runBuild
+@@ -246,6 +248,10 @@ func init() {
+ AddCoverFlags(CmdBuild, nil)
+ AddCoverFlags(CmdInstall, nil)
+ }
++
++ if x := os.Getenv("GOROOT_OVERRIDE"); x != "" {
++ goRootPrecious = false
++ }
+ }
+
+ // Note that flags consulted by other parts of the code
+diff --git a/src/cmd/go/internal/work/exec.go b/src/cmd/go/internal/work/exec.go
+index 9724cd07d0..544df461a2 100644
+--- a/src/cmd/go/internal/work/exec.go
++++ b/src/cmd/go/internal/work/exec.go
+@@ -544,6 +544,23 @@ func (b *Builder) build(ctx context.Context, a *Action) (err error) {
+ return err
+ }
+
++ if goRootPrecious && (a.Package.Standard || a.Package.Goroot) {
++ _, err := os.Stat(a.Package.Target)
++ if err == nil {
++ a.built = a.Package.Target
++ a.Target = a.Package.Target
++ a.buildID = b.fileHash(a.Package.Target)
++ a.Package.Stale = false
++ a.Package.StaleReason = "GOROOT-resident package"
++ return nil
++ }
++ a.Package.Stale = true
++ a.Package.StaleReason = "missing or invalid GOROOT-resident package"
++ if b.IsCmdList {
++ return nil
++ }
++ }
++
+ if err := sh.Mkdir(a.Objdir); err != nil {
+ return err
+ }
+@@ -1737,6 +1754,14 @@ func (b *Builder) linkShared(ctx context.Context, a *Action) (err error) {
+ return err
+ }
+
++ if goRootPrecious && a.Package != nil {
++ p := a.Package
++ if p.Standard || p.Goroot {
++ err := fmt.Errorf("attempting to install package %s into read-only GOROOT", p.ImportPath)
++ return err
++ }
++ }
++
+ if err := b.Shell(a).Mkdir(a.Objdir); err != nil {
+ return err
+ }
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0007-cmd-go-make-GOROOT-precious-by-default.patch b/meta/recipes-devtools/go/go/0007-cmd-go-make-GOROOT-precious-by-default.patch
deleted file mode 100644
index 534d431045..0000000000
--- a/meta/recipes-devtools/go/go/0007-cmd-go-make-GOROOT-precious-by-default.patch
+++ /dev/null
@@ -1,104 +0,0 @@
-From 9ba507e076c744f4d394418e4a849e68cd426a4a Mon Sep 17 00:00:00 2001
-From: Alex Kube <alexander.j.kube@gmail.com>
-Date: Wed, 23 Oct 2019 21:18:56 +0430
-Subject: [PATCH 7/9] cmd/go: make GOROOT precious by default
-
-Upstream-Status: Inappropriate [OE specific]
-
-The go build tool normally rebuilds whatever it detects is
-stale. This can be a problem when GOROOT is intended to
-be read-only and the go runtime has been built as a shared
-library, since we don't want every application to be rebuilding
-the shared runtime - particularly in cross-build/packaging
-setups, since that would lead to 'abi mismatch' runtime errors.
-
-This patch prevents the install and linkshared actions from
-installing to GOROOT unless overridden with the GOROOT_OVERRIDE
-environment variable.
-
-Adapted to Go 1.13 from patches originally submitted to
-the meta/recipes-devtools/go tree by
-Matt Madison <matt@madison.systems>.
-
-Signed-off-by: Alexander J Kube <alexander.j.kube@gmail.com>
----
- src/cmd/go/internal/work/action.go | 3 +++
- src/cmd/go/internal/work/build.go | 6 ++++++
- src/cmd/go/internal/work/exec.go | 25 +++++++++++++++++++++++++
- 3 files changed, 34 insertions(+)
-
---- a/src/cmd/go/internal/work/action.go
-+++ b/src/cmd/go/internal/work/action.go
-@@ -673,6 +673,9 @@ func (b *Builder) addTransitiveLinkDeps(
- if p1 == nil || p1.Shlib == "" || haveShlib[filepath.Base(p1.Shlib)] {
- continue
- }
-+ if goRootPrecious && (p1.Standard || p1.Goroot) {
-+ continue
-+ }
- haveShlib[filepath.Base(p1.Shlib)] = true
- // TODO(rsc): The use of ModeInstall here is suspect, but if we only do ModeBuild,
- // we'll end up building an overall library or executable that depends at runtime
---- a/src/cmd/go/internal/work/build.go
-+++ b/src/cmd/go/internal/work/build.go
-@@ -197,6 +197,8 @@ See also: go install, go get, go clean.
-
- const concurrentGCBackendCompilationEnabledByDefault = true
-
-+var goRootPrecious bool = true
-+
- func init() {
- // break init cycle
- CmdBuild.Run = runBuild
-@@ -209,6 +211,10 @@ func init() {
-
- AddBuildFlags(CmdBuild, DefaultBuildFlags)
- AddBuildFlags(CmdInstall, DefaultBuildFlags)
-+
-+ if x := os.Getenv("GOROOT_OVERRIDE"); x != "" {
-+ goRootPrecious = false
-+ }
- }
-
- // Note that flags consulted by other parts of the code
---- a/src/cmd/go/internal/work/exec.go
-+++ b/src/cmd/go/internal/work/exec.go
-@@ -535,6 +535,23 @@ func (b *Builder) build(ctx context.Cont
- return errors.New("binary-only packages are no longer supported")
- }
-
-+ if goRootPrecious && (a.Package.Standard || a.Package.Goroot) {
-+ _, err := os.Stat(a.Package.Target)
-+ if err == nil {
-+ a.built = a.Package.Target
-+ a.Target = a.Package.Target
-+ a.buildID = b.fileHash(a.Package.Target)
-+ a.Package.Stale = false
-+ a.Package.StaleReason = "GOROOT-resident package"
-+ return nil
-+ }
-+ a.Package.Stale = true
-+ a.Package.StaleReason = "missing or invalid GOROOT-resident package"
-+ if b.IsCmdList {
-+ return nil
-+ }
-+ }
-+
- if err := b.Mkdir(a.Objdir); err != nil {
- return err
- }
-@@ -1585,6 +1602,14 @@ func (b *Builder) linkShared(ctx context
- return err
- }
-
-+ if goRootPrecious && a.Package != nil {
-+ p := a.Package
-+ if p.Standard || p.Goroot {
-+ err := fmt.Errorf("attempting to install package %s into read-only GOROOT", p.ImportPath)
-+ return err
-+ }
-+ }
-+
- if err := b.Mkdir(a.Objdir); err != nil {
- return err
- }
diff --git a/meta/recipes-devtools/go/go/0007-exec.go-filter-out-build-specific-paths-from-linker-.patch b/meta/recipes-devtools/go/go/0007-exec.go-filter-out-build-specific-paths-from-linker-.patch
new file mode 100644
index 0000000000..c5bf28f54a
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0007-exec.go-filter-out-build-specific-paths-from-linker-.patch
@@ -0,0 +1,61 @@
+From 083b5c74b12a1abeb11dd7f58a1cb1593d0000c0 Mon Sep 17 00:00:00 2001
+From: Changqing Li <changqing.li@windriver.com>
+Date: Tue, 27 Feb 2024 18:06:51 +0800
+Subject: [PATCH] exec.go: filter out build-specific paths from linker flags
+
+The flags can contain build-specific paths, breaking reproducibility.
+Filter out options that have build-specific paths.
+
+Upstream-Status: Inappropriate [ Not perfect for upstream ]
+
+Signed-off-by: Changqing Li <changqing.li@windriver.com>
+---
+ src/cmd/go/internal/work/exec.go | 25 ++++++++++++++++++++++++-
+ 1 file changed, 24 insertions(+), 1 deletion(-)
+
+diff --git a/src/cmd/go/internal/work/exec.go b/src/cmd/go/internal/work/exec.go
+index cde867b..e3ce17d 100644
+--- a/src/cmd/go/internal/work/exec.go
++++ b/src/cmd/go/internal/work/exec.go
+@@ -1358,6 +1358,29 @@ func (b *Builder) linkActionID(a *Action) cache.ActionID {
+ return h.Sum()
+ }
+
++func filterLinkerFlags(flags []string) []string {
++ var newflags []string
++ var skipflag bool
++ skipflag = false
++ for i, flag := range flags {
++ if skipflag == true {
++ skipflag = false
++ continue
++ }
++ if strings.HasPrefix(flag, "--sysroot") || strings.HasPrefix(flag, "-fmacro-prefix-map") || strings.HasPrefix(flag, "-fdebug-prefix-map") || strings.HasPrefix(flag, "-ffile-prefix-map") || strings.HasPrefix(flag, "-fcanon-prefix-map") || strings.HasPrefix(flag, "-fprofile-prefix-map") || strings.HasPrefix(flag, "-Wl,-rpath-link"){
++ continue
++ } else if strings.HasPrefix(flag, "-extldflags") {
++ skipflag = true
++ newflags = append(newflags, flag)
++ var filterd_Extldflags []string = filterLinkerFlags(strings.Split(flags[i+1], " "))
++ newflags = append(newflags, strings.Join(filterd_Extldflags, " "))
++ } else {
++ newflags = append(newflags, flag)
++ }
++ }
++ return newflags
++}
++
+ // printLinkerConfig prints the linker config into the hash h,
+ // as part of the computation of a linker-related action ID.
+ func (b *Builder) printLinkerConfig(h io.Writer, p *load.Package) {
+@@ -1368,7 +1391,7 @@ func (b *Builder) printLinkerConfig(h io.Writer, p *load.Package) {
+ case "gc":
+ fmt.Fprintf(h, "link %s %q %s\n", b.toolID("link"), forcedLdflags, ldBuildmode)
+ if p != nil {
+- fmt.Fprintf(h, "linkflags %q\n", p.Internal.Ldflags)
++ fmt.Fprintf(h, "linkflags %q\n", filterLinkerFlags(p.Internal.Ldflags))
+ }
+
+ // GOARM, GOMIPS, etc.
+--
+2.25.1
+
diff --git a/meta/recipes-devtools/go/go/0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch b/meta/recipes-devtools/go/go/0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch
new file mode 100644
index 0000000000..0662f66af5
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch
@@ -0,0 +1,46 @@
+From e0999902687e2e394499f7153db8d62440c4dab0 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Tue, 10 Nov 2020 16:33:27 +0000
+Subject: [PATCH 8/9] src/cmd/dist/buildgo.go: do not hardcode host compilers
+ into target binaries
+
+These come from $CC/$CXX on the build host and are not useful on targets;
+additionally as they contain host specific paths, this helps reproducibility.
+
+Upstream-Status: Inappropriate [needs upstream discussion]
+
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/dist/buildgo.go | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/src/cmd/dist/buildgo.go b/src/cmd/dist/buildgo.go
+index 884e9d729a..2f52edacfe 100644
+--- a/src/cmd/dist/buildgo.go
++++ b/src/cmd/dist/buildgo.go
+@@ -51,8 +51,8 @@ func mkzdefaultcc(dir, file string) {
+ fmt.Fprintf(&buf, "package cfg\n")
+ fmt.Fprintln(&buf)
+ fmt.Fprintf(&buf, "const DefaultPkgConfig = `%s`\n", defaultpkgconfig)
+- buf.WriteString(defaultCCFunc("DefaultCC", defaultcc))
+- buf.WriteString(defaultCCFunc("DefaultCXX", defaultcxx))
++ buf.WriteString(defaultCCFunc("DefaultCC", map[string]string{"":"gcc"}))
++ buf.WriteString(defaultCCFunc("DefaultCXX", map[string]string{"":"g++"}))
+ writefile(buf.String(), file, writeSkipSame)
+ return
+ }
+@@ -62,8 +62,8 @@ func mkzdefaultcc(dir, file string) {
+ fmt.Fprintf(&buf, "package main\n")
+ fmt.Fprintln(&buf)
+ fmt.Fprintf(&buf, "const defaultPkgConfig = `%s`\n", defaultpkgconfig)
+- buf.WriteString(defaultCCFunc("defaultCC", defaultcc))
+- buf.WriteString(defaultCCFunc("defaultCXX", defaultcxx))
++ buf.WriteString(defaultCCFunc("defaultCC", map[string]string{"":"gcc"}))
++ buf.WriteString(defaultCCFunc("defaultCXX", map[string]string{"":"g++"}))
+ writefile(buf.String(), file, writeSkipSame)
+ }
+
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go/0009-go-Filter-build-paths-on-staticly-linked-arches.patch b/meta/recipes-devtools/go/go/0009-go-Filter-build-paths-on-staticly-linked-arches.patch
new file mode 100644
index 0000000000..cc45496e9c
--- /dev/null
+++ b/meta/recipes-devtools/go/go/0009-go-Filter-build-paths-on-staticly-linked-arches.patch
@@ -0,0 +1,61 @@
+From 6c2438f187ca912c54a71b4ac65ab98999a019d2 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sat, 2 Jul 2022 23:08:13 +0100
+Subject: [PATCH 9/9] go: Filter build paths on staticly linked arches
+
+Filter out build time paths from ldflags and other flags variables when they're
+embedded in the go binary so that builds are reproducible regardless of build
+location. This codepath is hit for statically linked go binaries such as those
+on mips/ppc.
+
+Upstream-Status: Submitted [https://github.com/golang/go/pull/56410]
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Jose Quaresma <jose.quaresma@foundries.io>
+---
+ src/cmd/go/internal/load/pkg.go | 15 +++++++++++++--
+ 1 file changed, 13 insertions(+), 2 deletions(-)
+
+diff --git a/src/cmd/go/internal/load/pkg.go b/src/cmd/go/internal/load/pkg.go
+index 1549800afb..f41fb2c4ef 100644
+--- a/src/cmd/go/internal/load/pkg.go
++++ b/src/cmd/go/internal/load/pkg.go
+@@ -2277,6 +2277,17 @@ func appendBuildSetting(info *debug.BuildInfo, key, value string) {
+ info.Settings = append(info.Settings, debug.BuildSetting{Key: key, Value: value})
+ }
+
++func filterCompilerFlags(flags string) string {
++ var newflags []string
++ for _, flag := range strings.Fields(flags) {
++ if strings.HasPrefix(flag, "--sysroot") || strings.HasPrefix(flag, "-fmacro-prefix-map") || strings.HasPrefix(flag, "-fdebug-prefix-map") {
++ continue
++ }
++ newflags = append(newflags, flag)
++ }
++ return strings.Join(newflags, " ")
++}
++
+ // setBuildInfo gathers build information and sets it into
+ // p.Internal.BuildInfo, which will later be formatted as a string and embedded
+ // in the binary. setBuildInfo should only be called on a main package with no
+@@ -2384,7 +2395,7 @@ func (p *Package) setBuildInfo(ctx context.Context, autoVCS bool) {
+ if gcflags := BuildGcflags.String(); gcflags != "" && cfg.BuildContext.Compiler == "gc" {
+ appendSetting("-gcflags", gcflags)
+ }
+- if ldflags := BuildLdflags.String(); ldflags != "" {
++ if ldflags := filterCompilerFlags(BuildLdflags.String()); ldflags != "" {
+ // https://go.dev/issue/52372: only include ldflags if -trimpath is not set,
+ // since it can include system paths through various linker flags (notably
+ // -extar, -extld, and -extldflags).
+@@ -2427,7 +2438,7 @@ func (p *Package) setBuildInfo(ctx context.Context, autoVCS bool) {
+ // subset of flags that are known not to be paths?
+ if cfg.BuildContext.CgoEnabled && !cfg.BuildTrimpath {
+ for _, name := range []string{"CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_CXXFLAGS", "CGO_LDFLAGS"} {
+- appendSetting(name, cfg.Getenv(name))
++ appendSetting(name, filterCompilerFlags(cfg.Getenv(name)))
+ }
+ }
+ appendSetting("GOARCH", cfg.BuildContext.GOARCH)
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/go/go_1.18.2.bb b/meta/recipes-devtools/go/go_1.18.2.bb
deleted file mode 100644
index 98977673ee..0000000000
--- a/meta/recipes-devtools/go/go_1.18.2.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-require go-${PV}.inc
-require go-target.inc
-
-inherit linuxloader
-
-CGO_LDFLAGS:append:mips = " -no-pie"
-
-export GO_LDSO = "${@get_linuxloader(d)}"
-export CC_FOR_TARGET = "gcc"
-export CXX_FOR_TARGET = "g++"
-
-# mips/rv64 doesn't support -buildmode=pie, so skip the QA checking for mips/riscv32 and its
-# variants.
-python() {
- if 'mips' in d.getVar('TARGET_ARCH',True) or 'riscv32' in d.getVar('TARGET_ARCH',True):
- d.appendVar('INSANE_SKIP:%s' % d.getVar('PN',True), " textrel")
-}
-
diff --git a/meta/recipes-devtools/go/go_1.22.2.bb b/meta/recipes-devtools/go/go_1.22.2.bb
new file mode 100644
index 0000000000..46f5fbc6be
--- /dev/null
+++ b/meta/recipes-devtools/go/go_1.22.2.bb
@@ -0,0 +1,18 @@
+require go-${PV}.inc
+require go-target.inc
+
+inherit linuxloader
+
+CGO_LDFLAGS:append = " -no-pie"
+
+export GO_LDSO = "${@get_linuxloader(d)}"
+export CC_FOR_TARGET = "gcc"
+export CXX_FOR_TARGET = "g++"
+
+# mips/rv64 doesn't support -buildmode=pie, so skip the QA checking for mips/riscv32 and its
+# variants.
+python() {
+ if 'mips' in d.getVar('TARGET_ARCH') or 'riscv32' in d.getVar('TARGET_ARCH'):
+ d.appendVar('INSANE_SKIP:%s' % d.getVar('PN'), " textrel")
+}
+
diff --git a/meta/recipes-devtools/help2man/help2man_1.49.2.bb b/meta/recipes-devtools/help2man/help2man_1.49.2.bb
deleted file mode 100644
index 62e1f67b55..0000000000
--- a/meta/recipes-devtools/help2man/help2man_1.49.2.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "Program for creating simple man pages"
-HOMEPAGE = "https://www.gnu.org/software/help2man/"
-DESCRIPTION = "help2man is a tool for automatically generating simple manual pages from program output."
-SECTION = "devel"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
-
-SRC_URI = "${GNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "9e2e0e213a7e0a36244eed6204d902b6504602a578b6ecd15268b1454deadd36"
-
-inherit autotools
-
-# This is a hand-maintained aclocal.m4 but our autotools class currently deletes
-# aclocal.m4.
-EXTRA_AUTORECONF += "--exclude=aclocal"
-
-EXTRA_OECONF = "--disable-nls"
-
-do_install:append () {
- # Make sure we use /usr/bin/env perl
- sed -i -e "1s:#!.*:#! /usr/bin/env perl:" ${D}${bindir}/help2man
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/help2man/help2man_1.49.3.bb b/meta/recipes-devtools/help2man/help2man_1.49.3.bb
new file mode 100644
index 0000000000..75931a511f
--- /dev/null
+++ b/meta/recipes-devtools/help2man/help2man_1.49.3.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Program for creating simple man pages"
+HOMEPAGE = "https://www.gnu.org/software/help2man/"
+DESCRIPTION = "help2man is a tool for automatically generating simple manual pages from program output."
+SECTION = "devel"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+SRC_URI = "${GNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "4d7e4fdef2eca6afe07a2682151cea78781e0a4e8f9622142d9f70c083a2fd4f"
+
+inherit autotools
+
+# This is a hand-maintained aclocal.m4 but our autotools class currently deletes
+# aclocal.m4.
+EXTRA_AUTORECONF += "--exclude=aclocal"
+
+EXTRA_OECONF = "--disable-nls"
+
+do_install:append () {
+ # Make sure we use /usr/bin/env perl
+ sed -i -e "1s:#!.*:#! /usr/bin/env perl:" ${D}${bindir}/help2man
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb b/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb
index b716b0245b..1826c7a40e 100644
--- a/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb
+++ b/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb
@@ -7,7 +7,6 @@ SECTION = "base"
LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://icecc-create-env;beginline=2;endline=5;md5=ae1df3d6a058bfda40b66094c5f6065f"
-PR = "r2"
DEPENDS = ""
INHIBIT_DEFAULT_DEPS = "1"
diff --git a/meta/recipes-devtools/jquery/jquery_3.6.0.bb b/meta/recipes-devtools/jquery/jquery_3.6.0.bb
deleted file mode 100644
index 39ffd38114..0000000000
--- a/meta/recipes-devtools/jquery/jquery_3.6.0.bb
+++ /dev/null
@@ -1,39 +0,0 @@
-SUMMARY = "jQuery is a fast, small, and feature-rich JavaScript library"
-HOMEPAGE = "https://jquery.com/"
-DESCRIPTION = "${SUMMARY}"
-LICENSE = "MIT"
-SECTION = "devel"
-LIC_FILES_CHKSUM = "file://${S}/${BP}.js;beginline=8;endline=10;md5=9c7c6e9ab275fc1e0d99cb7180ecd14c"
-
-# unpack items to ${S} so the archiver can see them
-#
-SRC_URI = "\
- https://code.jquery.com/${BP}.js;name=js;subdir=${BP} \
- https://code.jquery.com/${BP}.min.js;name=min;subdir=${BP} \
- https://code.jquery.com/${BP}.min.map;name=map;subdir=${BP} \
- "
-
-SRC_URI[js.sha256sum] = "1fe2bb5390a75e5d61e72c107cab528fc3c29a837d69aab7d200e1dbb5dcd239"
-SRC_URI[min.sha256sum] = "ff1523fb7389539c84c65aba19260648793bb4f5e29329d2ee8804bc37a3fe6e"
-SRC_URI[map.sha256sum] = "399548fb0e7b146c12f5ba18099a47d594a970fee96212eee0ab4852f3e56782"
-
-UPSTREAM_CHECK_REGEX = "jquery-(?P<pver>\d+(\.\d+)+)\.js"
-
-# https://github.com/jquery/jquery/issues/3927
-# There are ways jquery can expose security issues but any issues are in the apps exposing them
-# and there is little we can directly do
-CVE_CHECK_IGNORE += "CVE-2007-2379"
-
-inherit allarch
-
-do_install() {
- install -d ${D}${datadir}/javascript/${BPN}/
- install -m 644 ${S}/${BP}.js ${D}${datadir}/javascript/${BPN}/${BPN}.js
- install -m 644 ${S}/${BP}.min.js ${D}${datadir}/javascript/${BPN}/${BPN}.min.js
- install -m 644 ${S}/${BP}.min.map ${D}${datadir}/javascript/${BPN}/${BPN}.min.map
-}
-
-PACKAGES = "${PN}"
-FILES:${PN} = "${datadir}"
-
-BBCLASSEXTEND += "native nativesdk"
diff --git a/meta/recipes-devtools/jquery/jquery_3.7.1.bb b/meta/recipes-devtools/jquery/jquery_3.7.1.bb
new file mode 100644
index 0000000000..33147b493c
--- /dev/null
+++ b/meta/recipes-devtools/jquery/jquery_3.7.1.bb
@@ -0,0 +1,38 @@
+SUMMARY = "jQuery is a fast, small, and feature-rich JavaScript library"
+HOMEPAGE = "https://jquery.com/"
+DESCRIPTION = "${SUMMARY}"
+LICENSE = "MIT"
+SECTION = "devel"
+LIC_FILES_CHKSUM = "file://${S}/${BP}.js;beginline=5;endline=7;md5=9c7c6e9ab275fc1e0d99cb7180ecd14c"
+
+# unpack items to ${S} so the archiver can see them
+#
+SRC_URI = "\
+ https://code.jquery.com/${BP}.js;name=js;subdir=${BP} \
+ https://code.jquery.com/${BP}.min.js;name=min;subdir=${BP} \
+ https://code.jquery.com/${BP}.min.map;name=map;subdir=${BP} \
+ "
+
+SRC_URI[js.sha256sum] = "78a85aca2f0b110c29e0d2b137e09f0a1fb7a8e554b499f740d6744dc8962cfe"
+SRC_URI[min.sha256sum] = "fc9a93dd241f6b045cbff0481cf4e1901becd0e12fb45166a8f17f95823f0b1a"
+SRC_URI[map.sha256sum] = "5e7d6d9c28b7f21006535e8875eb47e9667852a14c4624eed301c6cea19ae62b"
+
+UPSTREAM_CHECK_REGEX = "jquery-(?P<pver>\d+(\.\d+)+)\.js"
+
+# https://github.com/jquery/jquery/issues/3927
+CVE_STATUS[CVE-2007-2379] = "upstream-wontfix: There are ways jquery can expose security issues but any issues \
+are in the apps exposing them and there is little we can directly do."
+
+inherit allarch
+
+do_install() {
+ install -d ${D}${datadir}/javascript/${BPN}/
+ install -m 644 ${S}/${BP}.js ${D}${datadir}/javascript/${BPN}/${BPN}.js
+ install -m 644 ${S}/${BP}.min.js ${D}${datadir}/javascript/${BPN}/${BPN}.min.js
+ install -m 644 ${S}/${BP}.min.map ${D}${datadir}/javascript/${BPN}/${BPN}.min.map
+}
+
+PACKAGES = "${PN}"
+FILES:${PN} = "${datadir}"
+
+BBCLASSEXTEND += "native nativesdk"
diff --git a/meta/recipes-devtools/json-c/json-c/run-ptest b/meta/recipes-devtools/json-c/json-c/run-ptest
new file mode 100644
index 0000000000..2d0e94cd3a
--- /dev/null
+++ b/meta/recipes-devtools/json-c/json-c/run-ptest
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+# This script is used to run json-c test suites
+cd tests
+
+ret_val=0
+for i in test*.test; do
+ # test_basic is not an own testcase, just
+ # contains common code of other tests
+ if [ "$i" != "test_basic.test" ]; then
+ if ./$i >> json-c_test.log 2>&1 ; then
+ echo PASS: $i
+ else
+ ret_val=1
+ echo FAIL: $i
+ fi
+ fi
+done
+
+exit $ret_val
diff --git a/meta/recipes-devtools/json-c/json-c_0.16.bb b/meta/recipes-devtools/json-c/json-c_0.16.bb
deleted file mode 100644
index fdec5ec9af..0000000000
--- a/meta/recipes-devtools/json-c/json-c_0.16.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "C bindings for apps which will manipulate JSON data"
-DESCRIPTION = "JSON-C implements a reference counting object model that allows you to easily construct JSON objects in C."
-HOMEPAGE = "https://github.com/json-c/json-c/wiki"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=de54b60fbbc35123ba193fea8ee216f2"
-
-SRC_URI = "https://s3.amazonaws.com/json-c_releases/releases/${BP}.tar.gz"
-
-SRC_URI[sha256sum] = "8e45ac8f96ec7791eaf3bb7ee50e9c2100bbbc87b8d0f1d030c5ba8a0288d96b"
-
-UPSTREAM_CHECK_URI = "https://github.com/${BPN}/${BPN}/tags"
-UPSTREAM_CHECK_REGEX = "json-c-(?P<pver>\d+(\.\d+)+)-\d+"
-
-RPROVIDES:${PN} = "libjson"
-
-inherit cmake
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/json-c/json-c_0.17.bb b/meta/recipes-devtools/json-c/json-c_0.17.bb
new file mode 100644
index 0000000000..20bcece768
--- /dev/null
+++ b/meta/recipes-devtools/json-c/json-c_0.17.bb
@@ -0,0 +1,34 @@
+SUMMARY = "C bindings for apps which will manipulate JSON data"
+DESCRIPTION = "JSON-C implements a reference counting object model that allows you to easily construct JSON objects in C."
+HOMEPAGE = "https://github.com/json-c/json-c/wiki"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=de54b60fbbc35123ba193fea8ee216f2"
+
+SRC_URI = "https://s3.amazonaws.com/json-c_releases/releases/${BP}.tar.gz \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "7550914d58fb63b2c3546f3ccfbe11f1c094147bd31a69dcd23714d7956159e6"
+
+# NVD uses full tag name including date
+CVE_VERSION = "0.17-20230812"
+
+UPSTREAM_CHECK_URI = "https://github.com/${BPN}/${BPN}/tags"
+UPSTREAM_CHECK_REGEX = "json-c-(?P<pver>\d+(\.\d+)+)-\d+"
+
+RPROVIDES:${PN} = "libjson"
+
+# Required for ICECC builds
+EXTRA_OECMAKE = "-DDISABLE_WERROR=ON"
+
+inherit cmake ptest
+
+do_install_ptest() {
+ install -d ${D}/${PTEST_PATH}/tests
+ install ${B}/tests/test* ${D}/${PTEST_PATH}/tests
+ install ${S}/tests/*.test ${D}/${PTEST_PATH}/tests
+ install ${S}/tests/*.expected ${D}/${PTEST_PATH}/tests
+ install ${S}/tests/test-defs.sh ${D}/${PTEST_PATH}/tests
+ install ${S}/tests/*json ${D}/${PTEST_PATH}/tests
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/libcomps/libcomps_0.1.18.bb b/meta/recipes-devtools/libcomps/libcomps_0.1.18.bb
deleted file mode 100644
index dd0a1f8f95..0000000000
--- a/meta/recipes-devtools/libcomps/libcomps_0.1.18.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Libcomps is alternative for yum.comps library (which is for managing rpm package groups)."
-HOMEPAGE = "https://github.com/rpm-software-management/libcomps"
-DESCRIPTION = "Libcomps is alternative for yum.comps library. It's written in pure C as library and there's bindings for python2 and python3."
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "git://github.com/rpm-software-management/libcomps.git;branch=master;protocol=https \
- file://0002-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
- "
-
-SRCREV = "dee4ae37f7818709802de28c4d16fa823bd83ae2"
-
-S = "${WORKDIR}/git"
-
-inherit cmake setuptools3-base
-
-DEPENDS += "libxml2 expat libcheck"
-
-EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3"
-OECMAKE_SOURCEPATH = "${S}/libcomps"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-devtools/libcomps/libcomps_0.1.21.bb b/meta/recipes-devtools/libcomps/libcomps_0.1.21.bb
new file mode 100644
index 0000000000..91170dfbed
--- /dev/null
+++ b/meta/recipes-devtools/libcomps/libcomps_0.1.21.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Libcomps is alternative for yum.comps library (which is for managing rpm package groups)."
+HOMEPAGE = "https://github.com/rpm-software-management/libcomps"
+DESCRIPTION = "Libcomps is alternative for yum.comps library. It's written in pure C as library and there's bindings for python2 and python3."
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "git://github.com/rpm-software-management/libcomps.git;branch=master;protocol=https \
+ file://0002-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \
+ "
+
+SRCREV = "2e973ce22698dd64f472180e3a689755268fb06b"
+
+S = "${WORKDIR}/git"
+
+inherit cmake setuptools3-base
+
+DEPENDS = "expat libxml2 zlib"
+
+EXTRA_OECMAKE = "-DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} \
+ -DENABLE_DOCS=OFF \
+ -DENABLE_TESTS=OFF"
+
+OECMAKE_SOURCEPATH = "${S}/libcomps"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch b/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch
index 791a32e748..046e09eab2 100644
--- a/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch
+++ b/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch
@@ -1,7 +1,7 @@
-From 9bb7630915c3e787732463a3e2064fe0e177101b Mon Sep 17 00:00:00 2001
+From 5d6db56791d326e6b486dca54fe2335af0225229 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 24 Nov 2016 14:33:07 +0200
-Subject: [PATCH 1/4] FindGtkDoc.cmake: drop the requirement for
+Subject: [PATCH] FindGtkDoc.cmake: drop the requirement for
GTKDOC_SCANGOBJ_WRAPPER
For some reason cmake is not able to find it when building in openembedded,
@@ -14,7 +14,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/cmake/modules/FindGtkDoc.cmake b/cmake/modules/FindGtkDoc.cmake
-index 92b2cc7..39f34bd 100644
+index 92b2cc7e..39f34bd5 100644
--- a/cmake/modules/FindGtkDoc.cmake
+++ b/cmake/modules/FindGtkDoc.cmake
@@ -52,7 +52,7 @@ find_program(GTKDOC_MKHTML_EXE gtkdoc-mkhtml PATH "${GLIB_PREFIX}/bin")
@@ -26,6 +26,3 @@ index 92b2cc7..39f34bd 100644
VERSION_VAR GtkDoc_VERSION)
# ::
---
-2.11.0
-
diff --git a/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch b/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch
index c7b2af89d1..223c1be218 100644
--- a/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch
+++ b/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch
@@ -1,4 +1,4 @@
-From 9294cd19e5e3121fb8d37b44ee82dd7c4b3ab2c7 Mon Sep 17 00:00:00 2001
+From bf9bde4af952b67cc357d25b9863889099ea9665 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 7 Feb 2017 12:16:03 +0200
Subject: [PATCH] Get parameters for both libsolv and libsolvext (libdnf is
@@ -7,21 +7,20 @@ Subject: [PATCH] Get parameters for both libsolv and libsolvext (libdnf is
Upstream-Status: Submitted [https://github.com/rpm-software-management/libdnf/pull/312]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index b722d4fb..ce88b9e3 100644
+index e99b28d0..548a9137 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -52,7 +52,7 @@ endif()
+@@ -51,7 +51,7 @@ endif()
+
# build dependencies
- find_package(Gpgme REQUIRED)
-PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv)
+PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv libsolvext)
set(LIBSOLV_LIBRARY ${LIBSOLV_LIBRARIES})
-
+
diff --git a/meta/recipes-devtools/libdnf/libdnf/0001-drop-FindPythonInstDir.cmake.patch b/meta/recipes-devtools/libdnf/libdnf/0001-drop-FindPythonInstDir.cmake.patch
index d483dd410b..27f76077d7 100644
--- a/meta/recipes-devtools/libdnf/libdnf/0001-drop-FindPythonInstDir.cmake.patch
+++ b/meta/recipes-devtools/libdnf/libdnf/0001-drop-FindPythonInstDir.cmake.patch
@@ -1,4 +1,4 @@
-From c0a465ccae395871ab10932975e37894220bc6a1 Mon Sep 17 00:00:00 2001
+From a6b3bc4fb3db996cd6c5616a99c16d085704dae0 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 1 Jun 2021 10:23:59 +0800
Subject: [PATCH] drop FindPythonInstDir.cmake
@@ -37,6 +37,3 @@ index ed098ded..8b137891 100644
-stdout.write(path)"
-OUTPUT_VARIABLE PYTHON_INSTALL_DIR)
+
---
-2.18.1
-
diff --git a/meta/recipes-devtools/libdnf/libdnf/0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch b/meta/recipes-devtools/libdnf/libdnf/0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch
deleted file mode 100644
index 6f8a3dcb50..0000000000
--- a/meta/recipes-devtools/libdnf/libdnf/0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 2f7382b35d59fe08034603497e82ffb943fedef1 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Wed, 30 Jun 2021 15:31:16 +0200
-Subject: [PATCH] libdnf/dnf-context.cpp: do not try to access BDB database
-
-Upstream-Status: Inappropriate [upstream needs to rework this to support
-sqlite]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- libdnf/dnf-context.cpp | 14 --------------
- 1 file changed, 14 deletions(-)
-
-diff --git a/libdnf/dnf-context.cpp b/libdnf/dnf-context.cpp
-index 86f71a79..9cdcf769 100644
---- a/libdnf/dnf-context.cpp
-+++ b/libdnf/dnf-context.cpp
-@@ -2264,20 +2264,6 @@ dnf_context_setup(DnfContext *context,
- !dnf_context_set_os_release(context, error))
- return FALSE;
-
-- /* setup a file monitor on the rpmdb, if we're operating on the native / */
-- if (g_strcmp0(priv->install_root, "/") == 0) {
-- rpmdb_path = g_build_filename(priv->install_root, "var/lib/rpm/Packages", NULL);
-- file_rpmdb = g_file_new_for_path(rpmdb_path);
-- priv->monitor_rpmdb = g_file_monitor_file(file_rpmdb,
-- G_FILE_MONITOR_NONE,
-- NULL,
-- error);
-- if (priv->monitor_rpmdb == NULL)
-- return FALSE;
-- g_signal_connect(priv->monitor_rpmdb, "changed",
-- G_CALLBACK(dnf_context_rpmdb_changed_cb), context);
-- }
--
- /* copy any vendor distributed cached metadata */
- if (!dnf_context_copy_vendor_cache(context, error))
- return FALSE;
diff --git a/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch b/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch
index 643a5f37b6..ecab47e5af 100644
--- a/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch
+++ b/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch
@@ -1,4 +1,4 @@
-From fbb181d25ad85778add7ed45b6aaf114e02d0f79 Mon Sep 17 00:00:00 2001
+From e40def862bdeb10da295b15c5904fe2829d391c7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 30 Dec 2016 18:24:50 +0200
Subject: [PATCH] Set libsolv variables with pkg-config (cmake's own module
@@ -7,19 +7,18 @@ Subject: [PATCH] Set libsolv variables with pkg-config (cmake's own module
Upstream-Status: Submitted [https://github.com/rpm-software-management/libdnf/pull/312]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
CMakeLists.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 405dc4e8..53837448 100644
+index 6444c374..e99b28d0 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -52,7 +52,8 @@ endif()
+@@ -51,7 +51,8 @@ endif()
+
# build dependencies
- find_package(Gpgme REQUIRED)
-find_package(LibSolv 0.7.21 REQUIRED COMPONENTS ext)
+PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv)
+set(LIBSOLV_LIBRARY ${LIBSOLV_LIBRARIES})
diff --git a/meta/recipes-devtools/libdnf/libdnf/armarch.patch b/meta/recipes-devtools/libdnf/libdnf/armarch.patch
new file mode 100644
index 0000000000..63debdb58f
--- /dev/null
+++ b/meta/recipes-devtools/libdnf/libdnf/armarch.patch
@@ -0,0 +1,50 @@
+From aa0f8b65feec64420a9b7b61cfcc8bcce161e14b Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Thu, 9 Nov 2023 10:29:31 +0000
+Subject: [PATCH] libdnf: Fix arm arch mapping issues for qemuarmv5
+
+We change the way rpm architectures work, we make the machine name the default machine
+specific package architecture.
+
+This arm mapping code can work or in the case of qemuarmv5, it doesn't as it creates
+armv5hl which doesn't exist and causes errrors. We can simply remove it, we don't need it.
+
+Upstream-Status: Inappropriate [Relies on OE rpm config]
+---
+ libdnf/hy-util.cpp | 23 -----------------------
+ 1 file changed, 23 deletions(-)
+
+diff --git a/libdnf/hy-util.cpp b/libdnf/hy-util.cpp
+index 9978c8e9..aa1369b6 100644
+--- a/libdnf/hy-util.cpp
++++ b/libdnf/hy-util.cpp
+@@ -117,29 +117,6 @@ hy_detect_arch(char **arch)
+ if (uname(&un) < 0)
+ return DNF_ERROR_FAILED;
+
+- if (!strncmp(un.machine, "armv", 4)) {
+- /* un.machine is armvXE, where X is version number and E is
+- * endianness (b or l); we need to add modifiers such as
+- * h (hardfloat), n (neon). Neon is a requirement of armv8 so
+- * as far as rpm is concerned armv8l is the equivilent of armv7hnl
+- * (or 7hnb) so we don't explicitly add 'n' for 8+ as it's expected. */
+- char endian = un.machine[strlen(un.machine)-1];
+- char *modifier = un.machine + 5;
+- while(isdigit(*modifier)) /* keep armv7, armv8, armv9, armv10, armv100, ... */
+- modifier++;
+- if (getauxval(AT_HWCAP) & HWCAP_ARM_VFP)
+- *modifier++ = 'h';
+- if ((atoi(un.machine+4) == 7) && (getauxval(AT_HWCAP) & HWCAP_ARM_NEON))
+- *modifier++ = 'n';
+- *modifier++ = endian;
+- *modifier = 0;
+- }
+-#ifdef __MIPSEL__
+- if (!strcmp(un.machine, "mips"))
+- strcpy(un.machine, "mipsel");
+- else if (!strcmp(un.machine, "mips64"))
+- strcpy(un.machine, "mips64el");
+-#endif
+ *arch = g_strdup(un.machine);
+ return 0;
+ }
diff --git a/meta/recipes-devtools/libdnf/libdnf/enable_test_data_dir_set.patch b/meta/recipes-devtools/libdnf/libdnf/enable_test_data_dir_set.patch
index e3784cc9e4..e4f6d447f1 100644
--- a/meta/recipes-devtools/libdnf/libdnf/enable_test_data_dir_set.patch
+++ b/meta/recipes-devtools/libdnf/libdnf/enable_test_data_dir_set.patch
@@ -1,4 +1,7 @@
-libdnf: allow reproducible binary builds
+From 25229773a9d4472235278bb45f75439e56630cee Mon Sep 17 00:00:00 2001
+From: Joe Slater <joe.slater@windriver.com>
+Date: Wed, 22 Jul 2020 13:31:11 -0700
+Subject: [PATCH] libdnf: allow reproducible binary builds
Use a dummy directory for test data if not built WITH_TESTS. Allow for overriding
TESTDATADIR, since the default is guaranteed to be wrong for target builds.
@@ -6,12 +9,16 @@ TESTDATADIR, since the default is guaranteed to be wrong for target builds.
Upstream-Status: Pending
Signed-off-by: Joe Slater <joe.slater@windriver.com>
+---
+ CMakeLists.txt | 7 ++++++-
+ 1 file changed, 6 insertions(+), 1 deletion(-)
-
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 548a9137..c378e7d9 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -133,7 +133,12 @@ add_definitions(-DG_LOG_DOMAIN=\\"libdnf
- add_definitions(-D_FILE_OFFSET_BITS=64)
+@@ -132,7 +132,12 @@ add_definitions(-DGETTEXT_DOMAIN=\\"libdnf\\")
+ add_definitions(-DG_LOG_DOMAIN=\\"libdnf\\")
# tests
-add_definitions(-DTESTDATADIR=\\"${CMAKE_SOURCE_DIR}/data/tests\\")
diff --git a/meta/recipes-devtools/libdnf/libdnf_0.67.0.bb b/meta/recipes-devtools/libdnf/libdnf_0.67.0.bb
deleted file mode 100644
index 69255c5a57..0000000000
--- a/meta/recipes-devtools/libdnf/libdnf_0.67.0.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Library providing simplified C and Python API to libsolv"
-HOMEPAGE = "https://github.com/rpm-software-management/libdnf"
-DESCRIPTION = "This library provides a high level package-manager. It's core library of dnf, PackageKit and rpm-ostree. It's replacement for deprecated hawkey library which it contains inside and uses librepo under the hood."
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI = "git://github.com/rpm-software-management/libdnf;branch=dnf-4-master;protocol=https \
- file://0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch \
- file://0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch \
- file://0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch \
- file://enable_test_data_dir_set.patch \
- file://0001-drop-FindPythonInstDir.cmake.patch \
- file://0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch \
- "
-
-SRCREV = "1742be5225b3a4928707696db8c69391def55f5a"
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(?!4\.90)\d+(\.\d+)+)"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "glib-2.0 libsolv libcheck librepo rpm gtk-doc libmodulemd json-c swig-native util-linux"
-
-inherit gtk-doc gobject-introspection cmake pkgconfig setuptools3-base
-
-EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DWITH_MAN=OFF -DPYTHON_DESIRED=3 \
- ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DWITH_GIR=ON', '-DWITH_GIR=OFF', d)} \
- -DWITH_TESTS=OFF \
- -DWITH_ZCHUNK=OFF \
- -DWITH_HTML=OFF \
- "
-EXTRA_OECMAKE:append:class-native = " -DWITH_GIR=OFF"
-EXTRA_OECMAKE:append:class-nativesdk = " -DWITH_GIR=OFF"
-
-BBCLASSEXTEND = "native nativesdk"
-SKIP_RECIPE[libdnf] ?= "${@bb.utils.contains('PACKAGE_CLASSES', 'package_rpm', '', 'Does not build without package_rpm in PACKAGE_CLASSES due disabled rpm support in libsolv', d)}"
-
diff --git a/meta/recipes-devtools/libdnf/libdnf_0.73.1.bb b/meta/recipes-devtools/libdnf/libdnf_0.73.1.bb
new file mode 100644
index 0000000000..3ab840b1b0
--- /dev/null
+++ b/meta/recipes-devtools/libdnf/libdnf_0.73.1.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Library providing simplified C and Python API to libsolv"
+HOMEPAGE = "https://github.com/rpm-software-management/libdnf"
+DESCRIPTION = "This library provides a high level package-manager. It's core library of dnf, PackageKit and rpm-ostree. It's replacement for deprecated hawkey library which it contains inside and uses librepo under the hood."
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "git://github.com/rpm-software-management/libdnf;branch=dnf-4-master;protocol=https \
+ file://0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch \
+ file://0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch \
+ file://0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch \
+ file://enable_test_data_dir_set.patch \
+ file://0001-drop-FindPythonInstDir.cmake.patch \
+ file://armarch.patch \
+ "
+
+SRCREV = "0120e70747dcf05e716792e2e846c62eccd44319"
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(?!4\.90)\d+(\.\d+)+)"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "glib-2.0 libsolv libcheck librepo rpm gtk-doc libmodulemd json-c swig-native util-linux"
+
+inherit gtk-doc gobject-introspection cmake pkgconfig setuptools3-base
+
+EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DWITH_MAN=OFF -DPYTHON_DESIRED=3 \
+ ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DWITH_GIR=ON', '-DWITH_GIR=OFF', d)} \
+ -DWITH_TESTS=OFF \
+ -DWITH_ZCHUNK=OFF \
+ -DWITH_HTML=OFF \
+ "
+EXTRA_OECMAKE:append:class-native = " -DWITH_GIR=OFF"
+EXTRA_OECMAKE:append:class-nativesdk = " -DWITH_GIR=OFF"
+
+BBCLASSEXTEND = "native nativesdk"
+SKIP_RECIPE[libdnf] ?= "${@bb.utils.contains('PACKAGE_CLASSES', 'package_rpm', '', 'Does not build without package_rpm in PACKAGE_CLASSES due disabled rpm support in libsolv', d)}"
+
diff --git a/meta/recipes-devtools/libedit/libedit_20210910-3.1.bb b/meta/recipes-devtools/libedit/libedit_20210910-3.1.bb
deleted file mode 100644
index 5aa83ef246..0000000000
--- a/meta/recipes-devtools/libedit/libedit_20210910-3.1.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "BSD replacement for libreadline"
-DESCRIPTION = "Command line editor library providing generic line editing, \
-history, and tokenization functions"
-HOMEPAGE = "http://www.thrysoee.dk/editline/"
-SECTION = "libs"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1e4228d0c5a9093b01aeaaeae6641533"
-
-DEPENDS = "ncurses"
-
-inherit autotools
-
-SRC_URI = "http://www.thrysoee.dk/editline/${BP}.tar.gz \
- file://stdc-predef.patch \
- "
-SRC_URI[sha256sum] = "6792a6a992050762edcca28ff3318cdb7de37dccf7bc30db59fcd7017eed13c5"
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "90"
-ALTERNATIVE:${PN}-doc = "history.3"
-ALTERNATIVE_LINK_NAME[history.3] = "${mandir}/man3/history.3"
diff --git a/meta/recipes-devtools/libedit/libedit_20230828-3.1.bb b/meta/recipes-devtools/libedit/libedit_20230828-3.1.bb
new file mode 100644
index 0000000000..1684b57d31
--- /dev/null
+++ b/meta/recipes-devtools/libedit/libedit_20230828-3.1.bb
@@ -0,0 +1,24 @@
+SUMMARY = "BSD replacement for libreadline"
+DESCRIPTION = "Command line editor library providing generic line editing, \
+history, and tokenization functions"
+HOMEPAGE = "http://www.thrysoee.dk/editline/"
+SECTION = "libs"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1e4228d0c5a9093b01aeaaeae6641533"
+
+DEPENDS = "ncurses"
+
+inherit autotools
+
+SRC_URI = "http://www.thrysoee.dk/editline/${BP}.tar.gz \
+ file://stdc-predef.patch \
+ "
+SRC_URI[sha256sum] = "4ee8182b6e569290e7d1f44f0f78dac8716b35f656b76528f699c69c98814dad"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "90"
+ALTERNATIVE:${PN}-doc = "history.3"
+ALTERNATIVE_LINK_NAME[history.3] = "${mandir}/man3/history.3"
diff --git a/meta/recipes-devtools/libmodulemd/libmodulemd_git.bb b/meta/recipes-devtools/libmodulemd/libmodulemd_git.bb
index 9e32134628..d3bef83306 100644
--- a/meta/recipes-devtools/libmodulemd/libmodulemd_git.bb
+++ b/meta/recipes-devtools/libmodulemd/libmodulemd_git.bb
@@ -6,8 +6,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=25a3927bff3ee4f5b21bcb0ed3fcd6bb"
SRC_URI = "git://github.com/fedora-modularity/libmodulemd;protocol=https;branch=main"
-PV = "2.14.0"
-SRCREV = "ee80309bc766d781a144e6879419b29f444d94eb"
+PV = "2.15.0"
+SRCREV = "bfde7f2d04fbb22e26c9eb843e4ccc478762dd8d"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/librepo/librepo/0001-gpg_gpgme.c-fix-build-errors-with-older-gcc.patch b/meta/recipes-devtools/librepo/librepo/0001-gpg_gpgme.c-fix-build-errors-with-older-gcc.patch
new file mode 100644
index 0000000000..8727b181b3
--- /dev/null
+++ b/meta/recipes-devtools/librepo/librepo/0001-gpg_gpgme.c-fix-build-errors-with-older-gcc.patch
@@ -0,0 +1,36 @@
+From b525cdec3051d1c6ff0c3cd38bf3070b18d6fb50 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Wed, 6 Mar 2024 10:13:38 +0100
+Subject: [PATCH] gpg_gpgme.c: fix build errors with older gcc
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+When selinux is not enabled, older gcc versions error out this way:
+
+| /home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/hosttools/gcc -DG_LOG_DOMAIN=\"librepo\" -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE -Dlibrepo_EXPORTS -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/git -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/lib/pkgconfig/../../../usr/include/libmount -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/lib/pkgconfig/../../../usr/include/blkid -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/lib/pkgconfig/../../../usr/include/glib-2.0 -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/lib/pkgconfig/../../../usr/lib/glib-2.0/include -I/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/lib/pkgconfig/../../../usr/include/libxml2 -isystem/home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/recipe-sysroot-native/usr/include -O2 -pipe -std=c99 -Wall -fPIC -MD -MT librepo/CMakeFiles/librepo.dir/gpg_gpgme.c.o -MF librepo/CMakeFiles/librepo.dir/gpg_gpgme.c.o.d -o librepo/CMakeFiles/librepo.dir/gpg_gpgme.c.o -c /home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/git/librepo/gpg_gpgme.c
+| /home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/git/librepo/gpg_gpgme.c: In function ‘lr_gpg_ensure_socket_dir_exists’:
+| /home/pokybuild/yocto-worker/oe-selftest-armhost/build/build-st-1938845/tmp-mc-tiny/work/aarch64-linux/librepo-native/1.17.0/git/librepo/gpg_gpgme.c:135:1: error: label at end of compound statement
+| 135 | exit:
+| | ^~~~
+
+Ensuring the exit: block is not empty fixes the issue.
+
+Upstream-Status: Submitted [https://github.com/rpm-software-management/librepo/pull/300]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ librepo/gpg_gpgme.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/librepo/gpg_gpgme.c b/librepo/gpg_gpgme.c
+index c4addb2..47c3153 100644
+--- a/librepo/gpg_gpgme.c
++++ b/librepo/gpg_gpgme.c
+@@ -144,6 +144,7 @@ exit:
+ }
+ freecon(old_default_context);
+ #endif
++ return;
+ }
+
+ static gpgme_ctx_t
diff --git a/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch b/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch
index 2ea50f00ff..4a851ad1e2 100644
--- a/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch
+++ b/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch
@@ -1,4 +1,4 @@
-From 82bd6d196a0453657cbacaaedd75b2d2fe0bf9ba Mon Sep 17 00:00:00 2001
+From 76052aa40c61580869472fd3f009a4ab1620b998 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 30 Dec 2016 18:05:36 +0200
Subject: [PATCH] Do not try to obtain PYTHON_INSTALL_DIR by running python.
@@ -33,6 +33,3 @@ index 8523ca7..06e5f7b 100644
INCLUDE_DIRECTORIES (${PYTHON_INCLUDE_PATH})
MESSAGE(STATUS "Python3 install dir is ${PYTHON_INSTALL_DIR}")
---
-2.25.1
-
diff --git a/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch b/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch
index d61d8fbf85..fd1df95473 100644
--- a/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch
+++ b/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch
@@ -1,4 +1,4 @@
-From 25113b34bc1aae377d7bf447e69528783e2c177e Mon Sep 17 00:00:00 2001
+From b5918f06d790dc346d41de4b3a3ec01f290c1d25 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 30 Dec 2016 18:23:27 +0200
Subject: [PATCH] Set gpgme variables with pkg-config, not with cmake module
@@ -6,22 +6,21 @@ Subject: [PATCH] Set gpgme variables with pkg-config, not with cmake module
Upstream-Status: Inappropriate [gpgme upstream does not have pkg-config support and is not interested in it]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
CMakeLists.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index a45d5c4..40249e6 100644
+index 6c00024..a2f57af 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -32,7 +32,8 @@ PKG_CHECK_MODULES(GLIB2 glib-2.0 REQUIRED)
- PKG_SEARCH_MODULE(LIBCRYPTO REQUIRED libcrypto openssl)
- PKG_CHECK_MODULES(LIBXML2 libxml-2.0 REQUIRED)
+@@ -37,7 +37,8 @@ PKG_CHECK_MODULES(LIBXML2 libxml-2.0 REQUIRED)
FIND_PACKAGE(CURL 7.52.0 REQUIRED)
--FIND_PACKAGE(Gpgme REQUIRED)
-+PKG_CHECK_MODULES(GPGME gpgme REQUIRED)
-+set(GPGME_VANILLA_LIBRARIES ${GPGME_LIBRARIES})
-
- IF (WITH_ZCHUNK)
+ IF (USE_GPGME)
+- FIND_PACKAGE(Gpgme REQUIRED)
++ PKG_CHECK_MODULES(GPGME gpgme REQUIRED)
++ set(GPGME_VANILLA_LIBRARIES ${GPGME_LIBRARIES})
+ IF (ENABLE_SELINUX)
+ PKG_CHECK_MODULES(SELINUX REQUIRED libselinux)
+ ENDIF(ENABLE_SELINUX)
diff --git a/meta/recipes-devtools/librepo/librepo_1.14.3.bb b/meta/recipes-devtools/librepo/librepo_1.14.3.bb
deleted file mode 100644
index 2c8e592251..0000000000
--- a/meta/recipes-devtools/librepo/librepo_1.14.3.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "A library providing C and Python (libcURL like) API \
- for downloading linux repository metadata and packages."
-HOMEPAGE = "https://github.com/rpm-software-management/librepo"
-DESCRIPTION = "${SUMMARY}"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI = "git://github.com/rpm-software-management/librepo.git;branch=master;protocol=https \
- file://0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch \
- file://0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch \
- "
-
-SRCREV = "8fc7950795282d9c7c50071f45973006de5594ab"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "curl glib-2.0 openssl attr gpgme libxml2"
-
-inherit cmake setuptools3-base pkgconfig
-
-EXTRA_OECMAKE = " \
- -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} \
- -DPYTHON_DESIRED=3 \
- -DENABLE_TESTS=OFF \
- -DENABLE_DOCS=OFF \
- -DWITH_ZCHUNK=OFF \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/librepo/librepo_1.17.0.bb b/meta/recipes-devtools/librepo/librepo_1.17.0.bb
new file mode 100644
index 0000000000..810191459d
--- /dev/null
+++ b/meta/recipes-devtools/librepo/librepo_1.17.0.bb
@@ -0,0 +1,31 @@
+SUMMARY = "A library providing C and Python (libcURL like) API \
+ for downloading linux repository metadata and packages."
+HOMEPAGE = "https://github.com/rpm-software-management/librepo"
+DESCRIPTION = "${SUMMARY}"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "git://github.com/rpm-software-management/librepo.git;branch=master;protocol=https \
+ file://0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch \
+ file://0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch \
+ file://0001-gpg_gpgme.c-fix-build-errors-with-older-gcc.patch \
+ "
+
+SRCREV = "ae727d99086f70f39fba5695af5460e1da908c1b"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "curl glib-2.0 openssl attr gpgme libxml2"
+
+inherit cmake setuptools3-base pkgconfig
+
+EXTRA_OECMAKE = " \
+ -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} \
+ -DPYTHON_DESIRED=3 \
+ -DENABLE_TESTS=OFF \
+ -DENABLE_DOCS=OFF \
+ -DWITH_ZCHUNK=OFF \
+ -DENABLE_SELINUX=OFF \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/libtool/libtool-2.4.7.inc b/meta/recipes-devtools/libtool/libtool-2.4.7.inc
index a07df720ac..16bb10d48f 100644
--- a/meta/recipes-devtools/libtool/libtool-2.4.7.inc
+++ b/meta/recipes-devtools/libtool/libtool-2.4.7.inc
@@ -15,6 +15,7 @@ SRC_URI = "${GNU_MIRROR}/libtool/libtool-${PV}.tar.gz \
file://0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch \
file://0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch \
file://dont-depend-on-help2man.patch \
+ file://0003-libtool.m4-Cleanup-sysroot-trailing.patch \
file://0006-libtool.m4-Handle-as-a-sysroot-correctly.patch \
file://nohardcodepaths.patch \
file://0007-libtool-Fix-support-for-NIOS2-processor.patch \
diff --git a/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Handle-trailing-slashes-on-install-command.patch b/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Handle-trailing-slashes-on-install-command.patch
index 4c4d0547a3..51c0c52da8 100644
--- a/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Handle-trailing-slashes-on-install-command.patch
+++ b/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Handle-trailing-slashes-on-install-command.patch
@@ -15,10 +15,10 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00010.html]
diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
-index 96b37003..3d5dcd0a 100644
+index a5f21a1..f884824 100644
--- a/build-aux/ltmain.in
+++ b/build-aux/ltmain.in
-@@ -2378,8 +2378,14 @@ func_mode_install ()
+@@ -2381,8 +2381,14 @@ func_mode_install ()
func_append dir "$objdir"
if test -n "$relink_command"; then
diff --git a/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Parse-additional-clang-options.patch b/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Parse-additional-clang-options.patch
index 0285c143d2..c52083327c 100644
--- a/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Parse-additional-clang-options.patch
+++ b/meta/recipes-devtools/libtool/libtool/0001-ltmain.in-Parse-additional-clang-options.patch
@@ -1,21 +1,26 @@
-From: Khem Raj <raj.khem@gmail.com>
-Subject: [PATCH] ltmain.in: Parse additional clang options
+ltmain.in: Parse additional clang options
-clang uses -rtlib and --unwindlib to select proper compiler runtime in
-some cases. There fore pass these options to linker when found in
+clang uses -rtlib and --unwindlib to select proper compiler
+runtime in some cases. Therefore pass these options to linker when found in
ldflags
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
+* build-aux/ltmain.in: Handle clang linker options
-Index: libtool-2.4.7/build-aux/ltmain.in
-===================================================================
---- libtool-2.4.7.orig/build-aux/ltmain.in
-+++ libtool-2.4.7/build-aux/ltmain.in
-@@ -5420,10 +5420,12 @@ func_mode_link ()
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=725646bf095bf5c07c49ae38dd060f95bd95ae3c]
+
+---
+ build-aux/ltmain.in | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
+index 037f009..ba5c816 100644
+--- a/build-aux/ltmain.in
++++ b/build-aux/ltmain.in
+@@ -5414,10 +5414,12 @@ func_mode_link ()
# -fsanitize=* Clang/GCC memory and address sanitizer
# -fuse-ld=* Linker select flags for GCC
- # -f*-prefix-map* needed for lto linking
+ # -f{file|debug|macro|profile}-prefix-map* needed for lto linking
+ # -rtlib=* select c runtime lib with clang
+ # --unwindlib=* select unwinder library with clang
# -Wa,* Pass flags directly to the assembler
@@ -23,6 +28,6 @@ Index: libtool-2.4.7/build-aux/ltmain.in
-t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
- -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*| \
+ -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*|-rtlib=*|--unwindlib=*| \
- -specs=*|-fsanitize=*|-fuse-ld=*|-f*-prefix-map*|-Wa,*)
+ -specs=*|-fsanitize=*|-fuse-ld=*|-ffile-prefix-map=*|-fdebug-prefix-map=*|-fmacro-prefix-map=*|-fprofile-prefix-map=*|-Wa,*)
func_quote_arg pretty "$arg"
arg=$func_quote_arg_result
diff --git a/meta/recipes-devtools/libtool/libtool/0002-libtool.m4-Rename-the-with-sysroot-option-to-avoid-c.patch b/meta/recipes-devtools/libtool/libtool/0002-libtool.m4-Rename-the-with-sysroot-option-to-avoid-c.patch
index 1920e2e6db..2126f82953 100644
--- a/meta/recipes-devtools/libtool/libtool/0002-libtool.m4-Rename-the-with-sysroot-option-to-avoid-c.patch
+++ b/meta/recipes-devtools/libtool/libtool/0002-libtool.m4-Rename-the-with-sysroot-option-to-avoid-c.patch
@@ -84,9 +84,10 @@ Index: libtool-2.4.7/tests/sysroot.at
AC_OUTPUT(Makefile)
]])
diff --git a/tests/testsuite b/tests/testsuite
+index 24265e4..d388e3e 100755
--- a/tests/testsuite
+++ b/tests/testsuite
-@@ -48945,7 +48945,7 @@ $at_traceon; }
+@@ -48997,7 +48997,7 @@ $at_traceon; }
LDFLAGS="$LDFLAGS --sysroot=$sysroot -no-undefined"
@@ -95,7 +96,7 @@ diff --git a/tests/testsuite b/tests/testsuite
#???
if test PATH = "$shlibpath_var"; then
-@@ -49154,7 +49154,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -49211,7 +49211,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([lib2.c])
LT_INIT
@@ -104,7 +105,7 @@ diff --git a/tests/testsuite b/tests/testsuite
AC_SUBST([sysroot])
AC_OUTPUT(Makefile)
_ATEOF
-@@ -49342,7 +49342,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -49404,7 +49404,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([prog.c])
LT_INIT
@@ -113,7 +114,7 @@ diff --git a/tests/testsuite b/tests/testsuite
AC_SUBST([sysroot])
AC_OUTPUT(Makefile)
_ATEOF
-@@ -49694,7 +49694,7 @@ $at_traceon; }
+@@ -49761,7 +49761,7 @@ $at_traceon; }
LDFLAGS="$LDFLAGS --sysroot=$sysroot -no-undefined"
@@ -122,7 +123,7 @@ diff --git a/tests/testsuite b/tests/testsuite
#???
if test PATH = "$shlibpath_var"; then
-@@ -49903,7 +49903,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -49975,7 +49975,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([lib2.c])
LT_INIT
@@ -131,7 +132,7 @@ diff --git a/tests/testsuite b/tests/testsuite
AC_SUBST([sysroot])
AC_OUTPUT(Makefile)
_ATEOF
-@@ -50091,7 +50091,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -50168,7 +50168,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([prog.c])
LT_INIT
@@ -140,7 +141,7 @@ diff --git a/tests/testsuite b/tests/testsuite
AC_SUBST([sysroot])
AC_OUTPUT(Makefile)
_ATEOF
-@@ -50443,7 +50443,7 @@ $at_traceon; }
+@@ -50525,7 +50525,7 @@ $at_traceon; }
LDFLAGS="$LDFLAGS --sysroot=$sysroot -no-undefined"
@@ -149,7 +150,7 @@ diff --git a/tests/testsuite b/tests/testsuite
#???
if test PATH = "$shlibpath_var"; then
-@@ -50652,7 +50652,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -50739,7 +50739,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([lib2.c])
LT_INIT
@@ -158,7 +159,7 @@ diff --git a/tests/testsuite b/tests/testsuite
AC_SUBST([sysroot])
AC_OUTPUT(Makefile)
_ATEOF
-@@ -50840,7 +50840,7 @@ AM_INIT_AUTOMAKE([foreign])
+@@ -50932,7 +50932,7 @@ AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AC_CONFIG_SRCDIR([prog.c])
LT_INIT
diff --git a/meta/recipes-devtools/libtool/libtool/0003-libtool.m4-Cleanup-sysroot-trailing.patch b/meta/recipes-devtools/libtool/libtool/0003-libtool.m4-Cleanup-sysroot-trailing.patch
new file mode 100644
index 0000000000..6d922382ef
--- /dev/null
+++ b/meta/recipes-devtools/libtool/libtool/0003-libtool.m4-Cleanup-sysroot-trailing.patch
@@ -0,0 +1,37 @@
+libtool.m4: Cleanup sysroot trailing "/"
+
+If $CC has --sysroot=/, it is a valid configuration however libtool will
+then set lt_sysroot to "/".
+
+This means references like $lt_sysroot$libdir become //usr/lib instead
+of the more normally expected /usr/lib. This may or may not break something
+but certainly is confusing to the user and gives confusing output. Making
+"/" simply unset lt_sysroot is much cleaner.
+
+Whilst here, trim any trailing '/' from sysroot paths to drop the duplication
+and result in cleaner/consistent output.
+
+* m4/libtool.m4: Cleanup sysroot trailing '/' handling
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=365805327c7b9bbdb0e622b954b6b0d8eaeb3f99]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+---
+ m4/libtool.m4 | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/m4/libtool.m4 b/m4/libtool.m4
+index fa1ae91..2f31d24 100644
+--- a/m4/libtool.m4
++++ b/m4/libtool.m4
+@@ -1256,7 +1256,9 @@ lt_sysroot=
+ case $with_libtool_sysroot in #(
+ yes)
+ if test yes = "$GCC"; then
+- lt_sysroot=`$CC --print-sysroot 2>/dev/null`
++ # Trim trailing / since we'll always append absolute paths and we want
++ # to avoid //, if only for less confusing output for the user.
++ lt_sysroot=`$CC --print-sysroot 2>/dev/null | $SED 's:/\+$::'`
+ fi
+ ;; #(
+ /*)
diff --git a/meta/recipes-devtools/libtool/libtool/0003-ltmain.in-Add-missing-sysroot-to-library-path.patch b/meta/recipes-devtools/libtool/libtool/0003-ltmain.in-Add-missing-sysroot-to-library-path.patch
index 07c7f911e0..35f48ef906 100644
--- a/meta/recipes-devtools/libtool/libtool/0003-ltmain.in-Add-missing-sysroot-to-library-path.patch
+++ b/meta/recipes-devtools/libtool/libtool/0003-ltmain.in-Add-missing-sysroot-to-library-path.patch
@@ -11,10 +11,10 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00017.html]
diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
-index 3d5dcd0a..2c994612 100644
+index f884824..25a91de 100644
--- a/build-aux/ltmain.in
+++ b/build-aux/ltmain.in
-@@ -6475,7 +6475,7 @@ func_mode_link ()
+@@ -6503,7 +6503,7 @@ func_mode_link ()
fi
else
# We cannot seem to hardcode it, guess we'll fake it.
diff --git a/meta/recipes-devtools/libtool/libtool/0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch b/meta/recipes-devtools/libtool/libtool/0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch
index 4fcf456b0b..9203302182 100644
--- a/meta/recipes-devtools/libtool/libtool/0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch
+++ b/meta/recipes-devtools/libtool/libtool/0004-ltmain.sh-Fix-sysroot-paths-being-encoded-into-RPATH.patch
@@ -10,10 +10,10 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00009.html]
diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
-index 2c994612..96238350 100644
+index 25a91de..40cb94a 100644
--- a/build-aux/ltmain.in
+++ b/build-aux/ltmain.in
-@@ -7654,9 +7654,11 @@ EOF
+@@ -7682,9 +7682,11 @@ EOF
test relink = "$opt_mode" || rpath=$compile_rpath$rpath
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
@@ -27,7 +27,7 @@ index 2c994612..96238350 100644
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
-@@ -8386,6 +8388,10 @@ EOF
+@@ -8414,6 +8416,10 @@ EOF
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
@@ -38,7 +38,7 @@ index 2c994612..96238350 100644
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
-@@ -8437,6 +8443,10 @@ EOF
+@@ -8465,6 +8471,10 @@ EOF
hardcode_libdirs=
for libdir in $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
diff --git a/meta/recipes-devtools/libtool/libtool/0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch b/meta/recipes-devtools/libtool/libtool/0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch
index 589a33e8a6..8b6352b01b 100644
--- a/meta/recipes-devtools/libtool/libtool/0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch
+++ b/meta/recipes-devtools/libtool/libtool/0005-ltmain.in-Don-t-encode-RATHS-which-match-default-lin.patch
@@ -13,10 +13,10 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00013.html]
diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
-index 96238350..6fb58ed2 100644
+index 40cb94a..2fa055e 100644
--- a/build-aux/ltmain.in
+++ b/build-aux/ltmain.in
-@@ -7672,8 +7672,16 @@ EOF
+@@ -7700,8 +7700,16 @@ EOF
esac
fi
else
@@ -35,7 +35,7 @@ index 96238350..6fb58ed2 100644
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
-@@ -8406,8 +8414,16 @@ EOF
+@@ -8434,8 +8442,16 @@ EOF
esac
fi
else
@@ -54,7 +54,7 @@ index 96238350..6fb58ed2 100644
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
-@@ -8461,8 +8477,14 @@ EOF
+@@ -8489,8 +8505,14 @@ EOF
esac
fi
else
diff --git a/meta/recipes-devtools/libtool/libtool/0006-libtool.m4-Handle-as-a-sysroot-correctly.patch b/meta/recipes-devtools/libtool/libtool/0006-libtool.m4-Handle-as-a-sysroot-correctly.patch
index a221dab528..c104e904cc 100644
--- a/meta/recipes-devtools/libtool/libtool/0006-libtool.m4-Handle-as-a-sysroot-correctly.patch
+++ b/meta/recipes-devtools/libtool/libtool/0006-libtool.m4-Handle-as-a-sysroot-correctly.patch
@@ -1,34 +1,29 @@
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Subject: [PATCH 06/12] libtool.m4: Handle "/" as a sysroot correctly
+libtool.m4: Change libtool to handle sysroots by default
-Update libtool.m4 to resolve a problem with lt_sysroot not being properly
-updated if the option '--with[-libtool]-sysroot' is not provided when
-running the 'configure' script for a package so that "/" as a sysroot
-is handled correctly by libtool.
+Rather than using no sysroot by default, always query gcc to obtain the sysroot.
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream Report:
-http://lists.gnu.org/archive/html/bug-libtool/2013-09/msg00005.html
+Upstream-Status: Inappropriate [Upstream are unlikely to accept this change of default]
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00018.html]
+---
+ m4/libtool.m4 | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
-Index: libtool-2.4.7/m4/libtool.m4
-===================================================================
---- libtool-2.4.7.orig/m4/libtool.m4
-+++ libtool-2.4.7/m4/libtool.m4
-@@ -1254,16 +1254,20 @@ dnl lt_sysroot will always be passed unq
+diff --git a/m4/libtool.m4 b/m4/libtool.m4
+index 2f31d24..bd90775 100644
+--- a/m4/libtool.m4
++++ b/m4/libtool.m4
+@@ -1254,18 +1254,18 @@ dnl lt_sysroot will always be passed unquoted. We quote it here
dnl in case the user passed a directory name.
lt_sysroot=
case $with_libtool_sysroot in #(
- yes)
+ no)
if test yes = "$GCC"; then
- lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-+ # Treat "/" the same a an unset sysroot.
-+ if test "$lt_sysroot" = /; then
-+ lt_sysroot=
-+ fi
+ # Trim trailing / since we'll always append absolute paths and we want
+ # to avoid //, if only for less confusing output for the user.
+ lt_sysroot=`$CC --print-sysroot 2>/dev/null | $SED 's:/\+$::'`
fi
;; #(
+ yes|''|/)
diff --git a/meta/recipes-devtools/libtool/libtool/0007-libtool-Fix-support-for-NIOS2-processor.patch b/meta/recipes-devtools/libtool/libtool/0007-libtool-Fix-support-for-NIOS2-processor.patch
index 277afe0af6..676ce7fd1b 100644
--- a/meta/recipes-devtools/libtool/libtool/0007-libtool-Fix-support-for-NIOS2-processor.patch
+++ b/meta/recipes-devtools/libtool/libtool/0007-libtool-Fix-support-for-NIOS2-processor.patch
@@ -1,65 +1,60 @@
-From: Marek Vasut <marex@denx.de>
-Subject: [PATCH 07/12] libtool: Fix support for NIOS2 processor
+libtool: Fix support for NIOS2 processor
The name of the system contains the string "nios2". This string
is caught by the some of the greedy checks for OS/2 in libtool,
in particular the *os2* branches of switch statements match for
the nios2 string, which results in incorrect behavior of libtool.
-This patch adds an explicit check for *nios2* before the *os2*
-checks to prevent the OS/2 check incorrectly trapping the nios2
-as well.
+Switch to use $host_os instead of $host and tweak the patterns to
+match to avoid this problem for nios2.
+
+* build-aux/ltmain.in: Fix NIOS2 support
+---
+ build-aux/ltmain.in | 12 ++++++------
+ 1 file changed, 6 insertions(+), 6 deletions(-)
-Signed-off-by: Marek Vasut <marex@denx.de>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00021.html]
+Submitted: https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00021.html
+Reworked and submitted: https://lists.gnu.org/archive/html/libtool-patches/2024-01/msg00068.html
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=49e6cb0d4dfdca2a59b909dc4532fe22dbc57ad5]
-diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
-index 6fb58ed2..606f17be 100644
---- a/build-aux/ltmain.in
-+++ b/build-aux/ltmain.in
-@@ -519,6 +519,12 @@ libtool_validate_options ()
+Index: libtool-2.4.7/build-aux/ltmain.in
+===================================================================
+--- libtool-2.4.7.orig/build-aux/ltmain.in
++++ libtool-2.4.7/build-aux/ltmain.in
+@@ -518,10 +518,10 @@ libtool_validate_options ()
+ # preserve --debug
test : = "$debug_cmd" || func_append preserve_args " --debug"
- case $host in
-+ # For NIOS2, we want to make sure that it's not caught by the
-+ # more general OS/2 check below. Otherwise, NIOS2 is the same
-+ # as the default option.
-+ *nios2*)
-+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
-+ ;;
+- case $host in
++ case $host_os in
# Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452
# see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788
- *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*)
-@@ -6246,6 +6252,15 @@ func_mode_link ()
+- *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*)
++ cygwin* | mingw* | pw32* | cegcc* | solaris2* | os2*)
+ # don't eliminate duplications in $postdeps and $predeps
+ opt_duplicate_compiler_generated_deps=:
+ ;;
+@@ -6273,8 +6273,8 @@ func_mode_link ()
+ fi
if test -n "$library_names" &&
{ test no = "$use_static_libs" || test -z "$old_library"; }; then
- case $host in
-+ *nios2*)
-+ # For NIOS2, we want to make sure that it's not caught by the
-+ # more general OS/2 check below. Otherwise, NIOS2 is the same
-+ # as the default option.
-+ if test no = "$installed"; then
-+ func_append notinst_deplibs " $lib"
-+ need_relink=yes
-+ fi
-+ ;;
- *cygwin* | *mingw* | *cegcc* | *os2*)
+- case $host in
+- *cygwin* | *mingw* | *cegcc* | *os2*)
++ case $host_os in
++ cygwin* | mingw* | cegcc* | os2*)
# No point in relinking DLLs because paths are not encoded
func_append notinst_deplibs " $lib"
-@@ -6316,6 +6331,11 @@ func_mode_link ()
+ need_relink=no
+@@ -6343,8 +6343,8 @@ func_mode_link ()
+ soname=$dlname
elif test -n "$soname_spec"; then
# bleh windows
- case $host in
-+ *nios2*)
-+ # For NIOS2, we want to make sure that it's not caught by the
-+ # more general OS/2 check below. Otherwise, NIOS2 is the same
-+ # as the default option.
-+ ;;
- *cygwin* | mingw* | *cegcc* | *os2*)
+- case $host in
+- *cygwin* | mingw* | *cegcc* | *os2*)
++ case $host_os in
++ cygwin* | mingw* | cegcc* | os2*)
func_arith $current - $age
major=$func_arith_result
---
-2.25.1
-
+ versuffix=-$major
diff --git a/meta/recipes-devtools/libtool/libtool/0008-libtool-Check-for-static-libs-for-internal-compiler-.patch b/meta/recipes-devtools/libtool/libtool/0008-libtool-Check-for-static-libs-for-internal-compiler-.patch
index 83c74373ae..c151c3b481 100644
--- a/meta/recipes-devtools/libtool/libtool/0008-libtool-Check-for-static-libs-for-internal-compiler-.patch
+++ b/meta/recipes-devtools/libtool/libtool/0008-libtool-Check-for-static-libs-for-internal-compiler-.patch
@@ -16,10 +16,10 @@ https://debbugs.gnu.org/cgi/bugreport.cgi?bug=27866
Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00016.html]
diff --git a/m4/libtool.m4 b/m4/libtool.m4
-index 180dd9d1..022c1292 100644
+index bd90775..3794130 100644
--- a/m4/libtool.m4
+++ b/m4/libtool.m4
-@@ -7560,7 +7560,7 @@ if AC_TRY_EVAL(ac_compile); then
+@@ -7556,7 +7556,7 @@ if AC_TRY_EVAL(ac_compile); then
for p in `eval "$output_verbose_link_cmd"`; do
case $prev$p in
diff --git a/meta/recipes-devtools/libtool/libtool/0009-Makefile.am-make-sure-autoheader-run-before-autoconf.patch b/meta/recipes-devtools/libtool/libtool/0009-Makefile.am-make-sure-autoheader-run-before-autoconf.patch
index 437c09fa6d..f51deecbef 100644
--- a/meta/recipes-devtools/libtool/libtool/0009-Makefile.am-make-sure-autoheader-run-before-autoconf.patch
+++ b/meta/recipes-devtools/libtool/libtool/0009-Makefile.am-make-sure-autoheader-run-before-autoconf.patch
@@ -11,10 +11,10 @@ So make sure autoheader run before autoconf to avoid this race.
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00015.html]
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=e7dc729dd27b367905cd0ce52b5466d91537857a]
diff --git a/Makefile.am b/Makefile.am
-index 6b546092..84795d87 100644
+index c29860e..9c34bfd 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -370,7 +370,7 @@ lt_configure_deps = $(lt_aclocal_m4) $(lt_aclocal_m4_deps)
diff --git a/meta/recipes-devtools/libtool/libtool/0010-Makefile.am-make-sure-autoheader-run-before-automake.patch b/meta/recipes-devtools/libtool/libtool/0010-Makefile.am-make-sure-autoheader-run-before-automake.patch
index a92e22905f..e451de59e3 100644
--- a/meta/recipes-devtools/libtool/libtool/0010-Makefile.am-make-sure-autoheader-run-before-automake.patch
+++ b/meta/recipes-devtools/libtool/libtool/0010-Makefile.am-make-sure-autoheader-run-before-automake.patch
@@ -11,12 +11,12 @@ so make automake run after autoheader to avoid the above race.
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00020.html]
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=e01c0bfe5e041418d84460901a1a5b11b89d596f]
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
diff --git a/Makefile.am b/Makefile.am
-index 84795d87..8c9949ed 100644
+index 9c34bfd..231ef3f 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -333,7 +333,7 @@ EXTRA_DIST += $(lt_aclocal_m4) \
diff --git a/meta/recipes-devtools/libtool/libtool/0011-ltmain.in-Handle-prefix-map-compiler-options-correct.patch b/meta/recipes-devtools/libtool/libtool/0011-ltmain.in-Handle-prefix-map-compiler-options-correct.patch
index 4945533bad..4c6b6f05b4 100644
--- a/meta/recipes-devtools/libtool/libtool/0011-ltmain.in-Handle-prefix-map-compiler-options-correct.patch
+++ b/meta/recipes-devtools/libtool/libtool/0011-ltmain.in-Handle-prefix-map-compiler-options-correct.patch
@@ -1,28 +1,37 @@
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Subject: [PATCH 11/12] ltmain.in: Handle prefix-map compiler options correctly
+ltmain.in: Handle prefix-map compiler options correctly
+
+If lto is enabled, we need the prefix-map variables to be passed to the linker
+to correctly link the objects using correctly mapped paths.
-If lto is enabled, we need the prefix-map variables to be passed to the linker.
Add these to the list of options libtool passes through.
+* build-aux/ltmain.in: Handle prefix-map compiler options
+
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00019.html]
+https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00019.html
+https://lists.gnu.org/archive/html/libtool-patches/2024-01/msg00066.html
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=cdf4bf702f11d17e06569936e8a433a77f791228]
+
+---
+ build-aux/ltmain.in | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
-Index: libtool-2.4.7/build-aux/ltmain.in
-===================================================================
---- libtool-2.4.7.orig/build-aux/ltmain.in
-+++ libtool-2.4.7/build-aux/ltmain.in
-@@ -5419,11 +5419,12 @@ func_mode_link ()
+diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
+index 0a50f5b..037f009 100644
+--- a/build-aux/ltmain.in
++++ b/build-aux/ltmain.in
+@@ -5413,11 +5413,12 @@ func_mode_link ()
# -stdlib=* select c++ std lib with clang
# -fsanitize=* Clang/GCC memory and address sanitizer
# -fuse-ld=* Linker select flags for GCC
-+ # -f*-prefix-map* needed for lto linking
++ # -f{file|debug|macro|profile}-prefix-map* needed for lto linking
# -Wa,* Pass flags directly to the assembler
-64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
-t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
-O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*| \
- -specs=*|-fsanitize=*|-fuse-ld=*|-Wa,*)
-+ -specs=*|-fsanitize=*|-fuse-ld=*|-f*-prefix-map*|-Wa,*)
++ -specs=*|-fsanitize=*|-fuse-ld=*|-ffile-prefix-map=*|-fdebug-prefix-map=*|-fmacro-prefix-map=*|-fprofile-prefix-map=*|-Wa,*)
func_quote_arg pretty "$arg"
arg=$func_quote_arg_result
func_append compile_command " $arg"
diff --git a/meta/recipes-devtools/libtool/libtool/0012-libtool.m4-For-reproducibility-stop-encoding-hostnam.patch b/meta/recipes-devtools/libtool/libtool/0012-libtool.m4-For-reproducibility-stop-encoding-hostnam.patch
index c750303d43..8469b8727e 100644
--- a/meta/recipes-devtools/libtool/libtool/0012-libtool.m4-For-reproducibility-stop-encoding-hostnam.patch
+++ b/meta/recipes-devtools/libtool/libtool/0012-libtool.m4-For-reproducibility-stop-encoding-hostnam.patch
@@ -10,13 +10,13 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
https://sources.debian.org/data/main/libt/libtool/2.4.6-10/debian/patches/
no_hostname.patch
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/libtool-patches/2021-10/msg00011.html]
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/libtool.git/commit/?id=bd826173c4c9c3fa8d77d92785754897cb4bfd89]
diff --git a/m4/libtool.m4 b/m4/libtool.m4
-index 022c1292..1a8a2998 100644
+index 3794130..84a550c 100644
--- a/m4/libtool.m4
+++ b/m4/libtool.m4
-@@ -728,7 +728,6 @@ _LT_CONFIG_SAVE_COMMANDS([
+@@ -730,7 +730,6 @@ _LT_CONFIG_SAVE_COMMANDS([
cat <<_LT_EOF >> "$cfgfile"
#! $SHELL
# Generated automatically by $as_me ($PACKAGE) $VERSION
diff --git a/meta/recipes-devtools/libtool/libtool/dont-depend-on-help2man.patch b/meta/recipes-devtools/libtool/libtool/dont-depend-on-help2man.patch
index fd4084c859..0013d16544 100644
--- a/meta/recipes-devtools/libtool/libtool/dont-depend-on-help2man.patch
+++ b/meta/recipes-devtools/libtool/libtool/dont-depend-on-help2man.patch
@@ -4,9 +4,10 @@ Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
Updated by: Robert Yang <liezhi.yang@windriver.com>
diff --git a/Makefile.am b/Makefile.am
+index 7bd7f72..c29860e 100644
--- a/Makefile.am
+++ b/Makefile.am
-@@ -404,21 +404,6 @@ $(notes_txt): $(notes_texi)
+@@ -411,21 +411,6 @@ $(notes_txt): $(notes_texi)
$(AM_V_GEN)$(MAKEINFO) -P '$(srcdir)/doc' --no-headers \
$(MAKEINFOFLAGS) -o '$@' '$(notes_texi)'
diff --git a/meta/recipes-devtools/llvm/llvm/0006-llvm-TargetLibraryInfo-Undefine-libc-functions-if-th.patch b/meta/recipes-devtools/llvm/llvm/0006-llvm-TargetLibraryInfo-Undefine-libc-functions-if-th.patch
deleted file mode 100644
index d02b7ba6ab..0000000000
--- a/meta/recipes-devtools/llvm/llvm/0006-llvm-TargetLibraryInfo-Undefine-libc-functions-if-th.patch
+++ /dev/null
@@ -1,90 +0,0 @@
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-From dbeecdb307be8b783b42cbc89dcb9c5e7f528989 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 21 May 2016 00:33:20 +0000
-Subject: [PATCH] llvm: TargetLibraryInfo: Undefine libc functions if they are macros
-
-musl defines some functions as macros and not inline functions
-if this is the case then make sure to undefine them
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- .../llvm/Analysis/TargetLibraryInfo.def | 21 +++++++++++++++++++
- 1 file changed, 21 insertions(+)
-
-diff --git a/llvm/include/llvm/Analysis/TargetLibraryInfo.def b/llvm/include/llvm/Analysis/TargetLibraryInfo.def
-index afed404f04c..876888656f2 100644
---- a/llvm/include/llvm/Analysis/TargetLibraryInfo.def
-+++ b/llvm/include/llvm/Analysis/TargetLibraryInfo.def
-@@ -782,6 +782,9 @@ TLI_DEFINE_STRING_INTERNAL("fmodl")
- TLI_DEFINE_ENUM_INTERNAL(fopen)
- TLI_DEFINE_STRING_INTERNAL("fopen")
- /// FILE *fopen64(const char *filename, const char *opentype)
-+#ifdef fopen64
-+#undef fopen64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(fopen64)
- TLI_DEFINE_STRING_INTERNAL("fopen64")
- /// int fork();
-@@ -829,6 +832,9 @@ TLI_DEFINE_STRING_INTERNAL("fseek")
- /// int fseeko(FILE *stream, off_t offset, int whence);
- TLI_DEFINE_ENUM_INTERNAL(fseeko)
- TLI_DEFINE_STRING_INTERNAL("fseeko")
-+#ifdef fseeko64
-+#undef fseeko64
-+#endif
- /// int fseeko64(FILE *stream, off64_t offset, int whence)
- TLI_DEFINE_ENUM_INTERNAL(fseeko64)
- TLI_DEFINE_STRING_INTERNAL("fseeko64")
-@@ -839,6 +845,9 @@ TLI_DEFINE_STRING_INTERNAL("fsetpos")
- TLI_DEFINE_ENUM_INTERNAL(fstat)
- TLI_DEFINE_STRING_INTERNAL("fstat")
- /// int fstat64(int filedes, struct stat64 *buf)
-+#ifdef fstat64
-+#undef fstat64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(fstat64)
- TLI_DEFINE_STRING_INTERNAL("fstat64")
- /// int fstatvfs(int fildes, struct statvfs *buf);
-@@ -854,6 +863,9 @@ TLI_DEFINE_STRING_INTERNAL("ftell")
- TLI_DEFINE_ENUM_INTERNAL(ftello)
- TLI_DEFINE_STRING_INTERNAL("ftello")
- /// off64_t ftello64(FILE *stream)
-+#ifdef ftello64
-+#undef ftello64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(ftello64)
- TLI_DEFINE_STRING_INTERNAL("ftello64")
- /// int ftrylockfile(FILE *file);
-@@ -980,6 +992,9 @@ TLI_DEFINE_STRING_INTERNAL("logl")
- TLI_DEFINE_ENUM_INTERNAL(lstat)
- TLI_DEFINE_STRING_INTERNAL("lstat")
- /// int lstat64(const char *path, struct stat64 *buf);
-+#ifdef lstat64
-+#undef lstat64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(lstat64)
- TLI_DEFINE_STRING_INTERNAL("lstat64")
- /// void *malloc(size_t size);
-@@ -1205,6 +1220,9 @@ TLI_DEFINE_STRING_INTERNAL("sscanf")
- TLI_DEFINE_ENUM_INTERNAL(stat)
- TLI_DEFINE_STRING_INTERNAL("stat")
- /// int stat64(const char *path, struct stat64 *buf);
-+#ifdef stat64
-+#undef stat64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(stat64)
- TLI_DEFINE_STRING_INTERNAL("stat64")
- /// int statvfs(const char *path, struct statvfs *buf);
-@@ -1340,6 +1358,9 @@ TLI_DEFINE_STRING_INTERNAL("times")
- TLI_DEFINE_ENUM_INTERNAL(tmpfile)
- TLI_DEFINE_STRING_INTERNAL("tmpfile")
- /// FILE *tmpfile64(void)
-+#ifdef tmpfile64
-+#undef tmpfile64
-+#endif
- TLI_DEFINE_ENUM_INTERNAL(tmpfile64)
- TLI_DEFINE_STRING_INTERNAL("tmpfile64")
- /// int toascii(int c);
diff --git a/meta/recipes-devtools/llvm/llvm/0007-llvm-allow-env-override-of-exe-path.patch b/meta/recipes-devtools/llvm/llvm/0007-llvm-allow-env-override-of-exe-path.patch
index b01b8647c9..add38b3bb4 100644
--- a/meta/recipes-devtools/llvm/llvm/0007-llvm-allow-env-override-of-exe-path.patch
+++ b/meta/recipes-devtools/llvm/llvm/0007-llvm-allow-env-override-of-exe-path.patch
@@ -1,27 +1,26 @@
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-From 61b00e1e051e367f5483d7b5253b6c85a9e8a90f Mon Sep 17 00:00:00 2001
+From 588a8694c6540e31140c7e242bfb5e279d6ca08c Mon Sep 17 00:00:00 2001
From: Martin Kelly <mkelly@xevo.com>
Date: Fri, 19 May 2017 00:22:57 -0700
-Subject: [PATCH] llvm: allow env override of exe path
+Subject: [PATCH] llvm: allow env override of exe and libdir path
When using a native llvm-config from inside a sysroot, we need llvm-config to
return the libraries, include directories, etc. from inside the sysroot rather
than from the native sysroot. Thus provide an env override for calling
llvm-config from a target sysroot.
+Upstream-Status: Inappropriate [OE-specific]
+
Signed-off-by: Martin Kelly <mkelly@xevo.com>
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
- llvm/tools/llvm-config/llvm-config.cpp | 7 +++++++
- 1 file changed, 7 insertions(+)
+ llvm/tools/llvm-config/llvm-config.cpp | 25 +++++++++++++++++++------
+ 1 file changed, 19 insertions(+), 6 deletions(-)
diff --git a/llvm/tools/llvm-config/llvm-config.cpp b/llvm/tools/llvm-config/llvm-config.cpp
-index 7ef7c46a262..a4f7ed82c7b 100644
+index e86eb2b44b10..7b2abf318dbe 100644
--- a/llvm/tools/llvm-config/llvm-config.cpp
+++ b/llvm/tools/llvm-config/llvm-config.cpp
-@@ -225,6 +225,13 @@ Typical components:\n\
+@@ -246,6 +246,13 @@ Typical components:\n\
/// Compute the path to the main executable.
std::string GetExecutablePath(const char *Argv0) {
@@ -35,3 +34,4 @@ index 7ef7c46a262..a4f7ed82c7b 100644
// This just needs to be some symbol in the binary; C++ doesn't
// allow taking the address of ::main however.
void *P = (void *)(intptr_t)GetExecutablePath;
+
diff --git a/meta/recipes-devtools/llvm/llvm/llvm-config b/meta/recipes-devtools/llvm/llvm/llvm-config
new file mode 100644
index 0000000000..5e4ded2da5
--- /dev/null
+++ b/meta/recipes-devtools/llvm/llvm/llvm-config
@@ -0,0 +1,51 @@
+#!/bin/bash
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+# Wrap llvm-config since the native llvm-config will remap some values correctly
+# if placed in the target sysroot but for flags, it would provide the native ones.
+# Provide ours from the environment instead.
+
+NEXT_LLVM_CONFIG="$(which -a llvm-config | sed -n 2p)"
+if [[ $# == 0 ]]; then
+ exec "$NEXT_LLVM_CONFIG"
+fi
+
+remain=""
+output=""
+for arg in "$@"; do
+ case "$arg" in
+ --cppflags)
+ output="${output} ${CPPFLAGS}"
+ ;;
+ --cflags)
+ output="${output} ${CFLAGS}"
+ ;;
+ --cxxflags)
+ output="${output} ${CXXFLAGS}"
+ ;;
+ --ldflags)
+ output="${output} ${LDFLAGS}"
+ ;;
+ --shared-mode)
+ output="${output} shared"
+ ;;
+ --libs)
+ output="${output} -lLLVM"
+ ;;
+ --link-shared)
+ break
+ ;;
+ *)
+ remain="${remain} ${arg}"
+ ;;
+ esac
+done
+
+if [ "${remain}" != "" ]; then
+ output="${output} "$("$NEXT_LLVM_CONFIG" ${remain})
+fi
+
+echo "${output}"
diff --git a/meta/recipes-devtools/llvm/llvm_git.bb b/meta/recipes-devtools/llvm/llvm_git.bb
index 0b5301cbff..c4fd73f2d7 100644
--- a/meta/recipes-devtools/llvm/llvm_git.bb
+++ b/meta/recipes-devtools/llvm/llvm_git.bb
@@ -1,36 +1,34 @@
# Copyright (C) 2017 Khem Raj <raj.khem@gmail.com>
# Released under the MIT license (see COPYING.MIT for the terms)
-DESCRIPTION = "The LLVM Compiler Infrastructure"
+SUMMARY = "The LLVM Compiler Infrastructure"
HOMEPAGE = "http://llvm.org"
LICENSE = "Apache-2.0-with-LLVM-exception"
SECTION = "devel"
LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=8a15a0759ef07f2682d2ba4b893c9afe"
-DEPENDS = "libffi libxml2 zlib libedit ninja-native llvm-native"
-
-COMPATIBLE_HOST:riscv64 = "null"
-COMPATIBLE_HOST:riscv32 = "null"
+DEPENDS = "libffi libxml2 zlib zstd libedit ninja-native llvm-native"
RDEPENDS:${PN}:append:class-target = " ncurses-terminfo"
inherit cmake pkgconfig
-PROVIDES += "llvm${PV}"
+# could be 'rcX' or 'git' or empty ( for release )
+VER_SUFFIX = ""
-PV = "14.0.3"
+PV = "18.1.3${VER_SUFFIX}"
MAJOR_VERSION = "${@oe.utils.trim_version("${PV}", 1)}"
LLVM_RELEASE = "${PV}"
BRANCH = "release/${MAJOR_VERSION}.x"
-SRCREV = "1f9140064dfbfb0bbda8e51306ea51080b2f7aac"
+SRCREV = "c13b7485b87909fcf739f62cfa382b55407433c0"
SRC_URI = "git://github.com/llvm/llvm-project.git;branch=${BRANCH};protocol=https \
- file://0006-llvm-TargetLibraryInfo-Undefine-libc-functions-if-th.patch;striplevel=2 \
file://0007-llvm-allow-env-override-of-exe-path.patch;striplevel=2 \
file://0001-AsmMatcherEmitter-sort-ClassInfo-lists-by-name-as-we.patch;striplevel=2 \
+ file://llvm-config \
"
UPSTREAM_CHECK_GITTAGREGEX = "llvmorg-(?P<pver>\d+(\.\d+)+)"
@@ -56,6 +54,12 @@ def get_llvm_arch(bb, d, arch_var):
def get_llvm_host_arch(bb, d):
return get_llvm_arch(bb, d, 'HOST_ARCH')
+PACKAGECONFIG ??= "libllvm"
+PACKAGECONFIG:class-native = "${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'libllvm', '', d)}"
+# if optviewer OFF, force the modules to be not found or the ones on the host would be found
+PACKAGECONFIG[optviewer] = ",-DPY_PYGMENTS_FOUND=OFF -DPY_PYGMENTS_LEXERS_C_CPP_FOUND=OFF -DPY_YAML_FOUND=OFF,python3-pygments python3-pyyaml,python3-pygments python3-pyyaml"
+PACKAGECONFIG[libllvm] = ""
+
#
# Default to build all OE-Core supported target arches (user overridable).
#
@@ -74,9 +78,10 @@ EXTRA_OECMAKE += "-DLLVM_ENABLE_ASSERTIONS=OFF \
-DFFI_INCLUDE_DIR=$(pkg-config --variable=includedir libffi) \
-DLLVM_OPTIMIZED_TABLEGEN=ON \
-DLLVM_TARGETS_TO_BUILD='${LLVM_TARGETS}' \
+ -DLLVM_VERSION_SUFFIX='${VER_SUFFIX}' \
-DLLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN=ON \
- -DPYTHON_EXECUTABLE=${HOSTTOOLS_DIR}/python3 \
- -G Ninja"
+ -DCMAKE_BUILD_TYPE=Release \
+ "
EXTRA_OECMAKE:append:class-target = "\
-DCMAKE_CROSSCOMPILING:BOOL=ON \
@@ -96,14 +101,15 @@ do_compile:prepend:class-target() {
}
do_compile() {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'libllvm', 'true', 'false', d)}; then
ninja -v ${PARALLEL_MAKE}
-}
-
-do_compile:class-native() {
+ else
ninja -v ${PARALLEL_MAKE} llvm-config llvm-tblgen
+ fi
}
do_install() {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'libllvm', 'true', 'false', d)}; then
DESTDIR=${D} ninja -v install
# llvm harcodes usr/lib as install path, so this corrects it to actual libdir
@@ -115,11 +121,21 @@ do_install() {
# reproducibility
sed -i -e 's,${WORKDIR},,g' ${D}/${libdir}/cmake/llvm/LLVMConfig.cmake
+ fi
}
-do_install:class-native() {
+do_install:append:class-native() {
install -D -m 0755 ${B}/bin/llvm-tblgen ${D}${bindir}/llvm-tblgen${PV}
install -D -m 0755 ${B}/bin/llvm-config ${D}${bindir}/llvm-config${PV}
+ ln -sf llvm-config${PV} ${D}${bindir}/llvm-config
+}
+
+SYSROOT_PREPROCESS_FUNCS:append:class-target = " llvm_sysroot_preprocess"
+
+llvm_sysroot_preprocess() {
+ install -d ${SYSROOT_DESTDIR}${bindir_crossscripts}/
+ install -m 0755 ${WORKDIR}/llvm-config ${SYSROOT_DESTDIR}${bindir_crossscripts}/
+ ln -sf llvm-config ${SYSROOT_DESTDIR}${bindir_crossscripts}/llvm-config${PV}
}
PACKAGES =+ "${PN}-bugpointpasses ${PN}-llvmhello ${PN}-libllvm ${PN}-liboptremarks ${PN}-liblto"
@@ -132,6 +148,7 @@ FILES:${PN}-bugpointpasses = "\
FILES:${PN}-libllvm = "\
${libdir}/libLLVM-${MAJOR_VERSION}.so \
+ ${libdir}/libLLVM.so.${MAJOR_VER}.${MINOR_VER} \
"
FILES:${PN}-liblto += "\
diff --git a/meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb b/meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb
deleted file mode 100644
index 3798b93f76..0000000000
--- a/meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "log4cplus provides a simple C++ logging API for log management"
-SECTION = "libs"
-HOMEPAGE = "http://sourceforge.net/projects/log4cplus/"
-DESCRIPTION = "log4cplus is a simple to use C++ logging API providing thread-safe, flexible, and arbitrarily granular control over log management and configuration. It is modelled after the Java log4j API."
-BUGTRACKER = "http://sourceforge.net/p/log4cplus/bugs/"
-
-LICENSE = "Apache-2.0 & BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=41e8e060c26822886b592ab4765c756b"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/${BPN}/${BPN}-stable/${PV}/${BP}.tar.gz \
- "
-SRC_URI[sha256sum] = "086451c7e7c582862cbd6c60d87bb6d9d63c4b65321dba85fa71766382f7ec6d"
-
-UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/log4cplus/files/log4cplus-stable/"
-UPSTREAM_CHECK_REGEX = "log4cplus-stable/(?P<pver>\d+(\.\d+)+)/"
-
-inherit autotools pkgconfig
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/log4cplus/log4cplus_2.1.1.bb b/meta/recipes-devtools/log4cplus/log4cplus_2.1.1.bb
new file mode 100644
index 0000000000..be3c787ab2
--- /dev/null
+++ b/meta/recipes-devtools/log4cplus/log4cplus_2.1.1.bb
@@ -0,0 +1,19 @@
+SUMMARY = "log4cplus provides a simple C++ logging API for log management"
+SECTION = "libs"
+HOMEPAGE = "http://sourceforge.net/projects/log4cplus/"
+DESCRIPTION = "log4cplus is a simple to use C++ logging API providing thread-safe, flexible, and arbitrarily granular control over log management and configuration. It is modelled after the Java log4j API."
+BUGTRACKER = "http://sourceforge.net/p/log4cplus/bugs/"
+
+LICENSE = "Apache-2.0 & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=41e8e060c26822886b592ab4765c756b"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/project/${BPN}/${BPN}-stable/${PV}/${BP}.tar.gz \
+ "
+SRC_URI[sha256sum] = "42dc435928917fd2f847046c4a0c6086b2af23664d198c7fc1b982c0bfe600c1"
+
+UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/log4cplus/files/log4cplus-stable/"
+UPSTREAM_CHECK_REGEX = "log4cplus-stable/(?P<pver>\d+(\.\d+)+)/"
+
+inherit autotools pkgconfig
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/lua/lua/CVE-2022-28805.patch b/meta/recipes-devtools/lua/lua/CVE-2022-28805.patch
deleted file mode 100644
index 3680c715a7..0000000000
--- a/meta/recipes-devtools/lua/lua/CVE-2022-28805.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 1f3c6f4534c6411313361697d98d1145a1f030fa Mon Sep 17 00:00:00 2001
-From: Roberto Ierusalimschy <roberto@inf.puc-rio.br>
-Date: Tue, 15 Feb 2022 12:28:46 -0300
-Subject: [PATCH] Bug: Lua can generate wrong code when _ENV is <const>
-
-CVE: CVE-2022-28805
-
-Upstream-Status: Backport [https://github.com/lua/lua/commit/1f3c6f4534c6411313361697d98d1145a1f030fa]
-
-Signed-off-by: Steve Sakoman <steve@sakoman.com>
----
- src/lparser.c | 1 +
- 1 files changed, 1 insertions(+)
-
-diff --git a/src/lparser.c b/src/lparser.c
-index 3abe3d751..a5cd55257 100644
---- a/src/lparser.c
-+++ b/src/lparser.c
-@@ -468,6 +468,7 @@ static void singlevar (LexState *ls, expdesc *var) {
- expdesc key;
- singlevaraux(fs, ls->envn, var, 1); /* get environment variable */
- lua_assert(var->k != VVOID); /* this one must exist */
-+ luaK_exp2anyregup(fs, var); /* but could be a constant */
- codestring(&key, varname); /* key is variable name */
- luaK_indexed(fs, var, &key); /* env[varname] */
- }
diff --git a/meta/recipes-devtools/lua/lua/lua.pc.in b/meta/recipes-devtools/lua/lua/lua.pc.in
index c27e86e85d..1fc288c4fe 100644
--- a/meta/recipes-devtools/lua/lua/lua.pc.in
+++ b/meta/recipes-devtools/lua/lua/lua.pc.in
@@ -1,6 +1,5 @@
-prefix=/usr
-libdir=${prefix}/lib
-includedir=${prefix}/include
+libdir=@LIBDIR@
+includedir=@INCLUDEDIR@
Name: Lua
Description: Lua language engine
diff --git a/meta/recipes-devtools/lua/lua_5.4.4.bb b/meta/recipes-devtools/lua/lua_5.4.4.bb
deleted file mode 100644
index d704841378..0000000000
--- a/meta/recipes-devtools/lua/lua_5.4.4.bb
+++ /dev/null
@@ -1,58 +0,0 @@
-DESCRIPTION = "Lua is a powerful light-weight programming language designed \
-for extending applications."
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://doc/readme.html;beginline=307;endline=330;md5=79c3f6b19ad05efe24c1681f025026bb"
-HOMEPAGE = "http://www.lua.org/"
-
-SRC_URI = "http://www.lua.org/ftp/lua-${PV}.tar.gz;name=tarballsrc \
- file://lua.pc.in \
- file://CVE-2022-28805.patch \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'http://www.lua.org/tests/lua-${PV_testsuites}-tests.tar.gz;name=tarballtest file://run-ptest ', '', d)} \
- "
-
-# if no test suite matches PV release of Lua exactly, download the suite for the closest Lua release.
-PV_testsuites = "5.4.4"
-
-SRC_URI[tarballsrc.sha256sum] = "164c7849653b80ae67bec4b7473b884bf5cc8d2dca05653475ec2ed27b9ebf61"
-SRC_URI[tarballtest.sha256sum] = "04d28355cd67a2299dfe5708b55a0ff221ccb1a3907a3113cc103ccc05ac6aad"
-
-inherit pkgconfig binconfig ptest
-
-PACKAGECONFIG ??= "readline"
-PACKAGECONFIG[readline] = ",,readline"
-
-TARGET_CC_ARCH += " -fPIC ${LDFLAGS}"
-EXTRA_OEMAKE = "'CC=${CC} -fPIC' 'MYCFLAGS=${CFLAGS} -fPIC' MYLDFLAGS='${LDFLAGS}' 'AR=ar rcD' 'RANLIB=ranlib -D'"
-
-do_configure:prepend() {
- sed -i -e s:/usr/local:${prefix}:g src/luaconf.h
- sed -i -e s:lib/lua/:${baselib}/lua/:g src/luaconf.h
-}
-
-do_compile () {
- oe_runmake ${@bb.utils.contains('PACKAGECONFIG', 'readline', 'linux-readline', 'linux', d)}
-}
-
-do_install () {
- oe_runmake \
- 'INSTALL_TOP=${D}${prefix}' \
- 'INSTALL_BIN=${D}${bindir}' \
- 'INSTALL_INC=${D}${includedir}/' \
- 'INSTALL_MAN=${D}${mandir}/man1' \
- 'INSTALL_SHARE=${D}${datadir}/lua' \
- 'INSTALL_LIB=${D}${libdir}' \
- 'INSTALL_CMOD=${D}${libdir}/lua/5.4' \
- install
- install -d ${D}${libdir}/pkgconfig
-
- sed -e s/@VERSION@/${PV}/ ${WORKDIR}/lua.pc.in > ${WORKDIR}/lua.pc
- install -m 0644 ${WORKDIR}/lua.pc ${D}${libdir}/pkgconfig/
- rmdir ${D}${datadir}/lua/5.4
- rmdir ${D}${datadir}/lua
-}
-
-do_install_ptest () {
- cp -R --no-dereference --preserve=mode,links -v ${WORKDIR}/lua-${PV_testsuites}-tests ${D}${PTEST_PATH}/test
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/lua/lua_5.4.6.bb b/meta/recipes-devtools/lua/lua_5.4.6.bb
new file mode 100644
index 0000000000..eabfc89575
--- /dev/null
+++ b/meta/recipes-devtools/lua/lua_5.4.6.bb
@@ -0,0 +1,66 @@
+SUMMARY = "Lua is a powerful light-weight programming language designed \
+for extending applications."
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://doc/readme.html;beginline=303;endline=324;md5=e05449eb28c092473f854670c6e8375a"
+HOMEPAGE = "http://www.lua.org/"
+
+SRC_URI = "http://www.lua.org/ftp/lua-${PV}.tar.gz;name=tarballsrc \
+ file://lua.pc.in \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'http://www.lua.org/tests/lua-${PV_testsuites}-tests.tar.gz;name=tarballtest file://run-ptest ', '', d)} \
+ "
+
+# if no test suite matches PV release of Lua exactly, download the suite for the closest Lua release.
+PV_testsuites = "5.4.4"
+
+SRC_URI[tarballsrc.sha256sum] = "7d5ea1b9cb6aa0b59ca3dde1c6adcb57ef83a1ba8e5432c0ecd06bf439b3ad88"
+SRC_URI[tarballtest.sha256sum] = "04d28355cd67a2299dfe5708b55a0ff221ccb1a3907a3113cc103ccc05ac6aad"
+
+inherit pkgconfig binconfig ptest
+
+PACKAGECONFIG ??= "readline"
+PACKAGECONFIG[readline] = ",,readline"
+
+TARGET_CC_ARCH += " -fPIC ${LDFLAGS}"
+EXTRA_OEMAKE = "'CC=${CC} -fPIC' 'MYCFLAGS=${CFLAGS} -fPIC' MYLDFLAGS='${LDFLAGS}' 'AR=ar rcD' 'RANLIB=ranlib -D'"
+
+do_configure:prepend() {
+ sed -i -e s:/usr/local:${prefix}:g src/luaconf.h
+ sed -i -e s:lib/lua/:${baselib}/lua/:g src/luaconf.h
+}
+
+do_compile () {
+ oe_runmake ${@bb.utils.contains('PACKAGECONFIG', 'readline', 'linux-readline', 'linux', d)}
+}
+
+do_install () {
+ oe_runmake \
+ 'INSTALL_TOP=${D}${prefix}' \
+ 'INSTALL_BIN=${D}${bindir}' \
+ 'INSTALL_INC=${D}${includedir}/' \
+ 'INSTALL_MAN=${D}${mandir}/man1' \
+ 'INSTALL_SHARE=${D}${datadir}/lua' \
+ 'INSTALL_LIB=${D}${libdir}' \
+ 'INSTALL_CMOD=${D}${libdir}/lua/5.4' \
+ install
+ install -d ${D}${libdir}/pkgconfig
+
+ sed -e s/@VERSION@/${PV}/ -e s#@LIBDIR@#${libdir}# -e s#@INCLUDEDIR@#${includedir}# ${WORKDIR}/lua.pc.in > ${WORKDIR}/lua.pc
+ install -m 0644 ${WORKDIR}/lua.pc ${D}${libdir}/pkgconfig/
+ rmdir ${D}${datadir}/lua/5.4
+ rmdir ${D}${datadir}/lua
+}
+
+do_install_ptest () {
+ cp -R --no-dereference --preserve=mode,links -v ${WORKDIR}/lua-${PV_testsuites}-tests ${D}${PTEST_PATH}/test
+}
+
+do_install_ptest:append:libc-musl () {
+ # locale tests does not work on musl, due to limited locale implementation
+ # https://wiki.musl-libc.org/open-issues.html#Locale-limitations
+ sed -i -e 's|os.setlocale("pt_BR") or os.setlocale("ptb")|false|g' ${D}${PTEST_PATH}/test/literals.lua
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit multilib_script
+MULTILIB_SCRIPTS = "${PN}-dev:${includedir}/luaconf.h"
diff --git a/meta/recipes-devtools/m4/m4-1.4.19.inc b/meta/recipes-devtools/m4/m4-1.4.19.inc
index 2adf4de6f2..fcc9e58270 100644
--- a/meta/recipes-devtools/m4/m4-1.4.19.inc
+++ b/meta/recipes-devtools/m4/m4-1.4.19.inc
@@ -9,6 +9,7 @@ inherit autotools texinfo ptest gettext
SRC_URI = "${GNU_MIRROR}/m4/m4-${PV}.tar.gz \
file://ac_config_links.patch \
file://0001-sigsegv-Fix-build-on-ppc-musl.patch \
+ file://0001-Define-alignof_slot-using-_Alignof-when-using-C11-or.patch \
"
SRC_URI:append:class-target = " file://run-ptest \
file://serial-tests-config.patch \
@@ -39,8 +40,6 @@ do_install_ptest() {
cp -r ${B}/tests ${D}${PTEST_PATH}
cp -r ${S}/tests/* ${D}${PTEST_PATH}/tests/
sed -i '/^Makefile:/c Makefile:' ${D}${PTEST_PATH}/tests/Makefile
- sed -i -e "s;LOCALE_FR='fr_FR';LOCALE_FR='fr_FR.iso88591';g" \
- -e "s;LOCALE_FR_UTF8='none';LOCALE_FR_UTF8='fr_FR.utf8';g" ${D}${PTEST_PATH}/tests/Makefile
find ${D}${PTEST_PATH} -type f -name *.[hoc] | xargs -i rm {}
cp ${S}/build-aux/update-copyright ${D}${PTEST_PATH}/tests/
sed -i 's;update-copyright;./update-copyright;g' ${D}${PTEST_PATH}/tests/test-update-copyright.sh
@@ -51,10 +50,16 @@ do_install_ptest() {
ln -s ptest ${D}${libdir}/${BPN}/${BP}
}
+do_install_ptest:append:libc-glibc() {
+ sed -i -e "s;LOCALE_FR='fr_FR';LOCALE_FR='fr_FR.iso88591';g" \
+ -e "s;LOCALE_FR_UTF8='none';LOCALE_FR_UTF8='fr_FR.utf8';g" ${D}${PTEST_PATH}/tests/Makefile
+}
-RDEPENDS:${PN}-ptest += "make coreutils diffutils"
+RDEPENDS:${PN}-ptest += "make coreutils diffutils bash"
RDEPENDS:${PN}-ptest:append:libc-glibc = "\
+ locale-base-fr-fr \
locale-base-fr-fr.iso-8859-1 \
+ glibc-gconv-iso8859-1 \
"
INSANE_SKIP:${PN}-ptest += "ldflags"
diff --git a/meta/recipes-devtools/m4/m4/0001-Define-alignof_slot-using-_Alignof-when-using-C11-or.patch b/meta/recipes-devtools/m4/m4/0001-Define-alignof_slot-using-_Alignof-when-using-C11-or.patch
new file mode 100644
index 0000000000..8757abd7a0
--- /dev/null
+++ b/meta/recipes-devtools/m4/m4/0001-Define-alignof_slot-using-_Alignof-when-using-C11-or.patch
@@ -0,0 +1,49 @@
+From b0fd3a58354b1f5ead891907979dfd3dd36840d5 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 14 Jan 2023 14:55:03 -0800
+Subject: [PATCH] Define alignof_slot using _Alignof when using C11 or newer
+
+WG14 N2350 made very clear that it is an UB having type definitions
+within "offsetof" [1]. This patch enhances the implementation of macro
+alignof_slot to use builtin "_Alignof" to avoid undefined behavior on
+when using std=c11 or newer
+
+clang 16+ has started to flag this [2]
+
+Fixes build when using -std >= gnu11 and using clang16+ [3]
+
+[1] https://www.open-std.org/jtc1/sc22/wg14/www/docs/n2350.htm
+[2] https://reviews.llvm.org/D133574
+[3] https://public-inbox.org/bug-gnulib/20230114232744.215167-1-raj.khem@gmail.com/T/#u
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/gnulib.git/commit/?id=2d404c7dd974cc65f894526f4a1b76bc1dcd8d82]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ lib/alignof.h | 2 ++
+ 1 file changed, 2 insertions(+)
+
+--- a/lib/alignof.h
++++ b/lib/alignof.h
+@@ -18,19 +18,19 @@
+ #define _ALIGNOF_H
+
+ #include <stddef.h>
++#include "stdalign.h"
+
+ /* alignof_slot (TYPE)
+ Determine the alignment of a structure slot (field) of a given type,
+ at compile time. Note that the result depends on the ABI.
+- This is the same as alignof (TYPE) and _Alignof (TYPE), defined in
+- <stdalign.h> if __alignof_is_defined is 1.
++ This is the same as alignof (TYPE).
+ Note: The result cannot be used as a value for an 'enum' constant,
+ due to bugs in HP-UX 10.20 cc and AIX 3.2.5 xlc. */
+ #if defined __cplusplus
+ template <class type> struct alignof_helper { char __slot1; type __slot2; };
+ # define alignof_slot(type) offsetof (alignof_helper<type>, __slot2)
+ #else
+-# define alignof_slot(type) offsetof (struct { char __slot1; type __slot2; }, __slot2)
++# define alignof_slot(type) alignof (type)
+ #endif
+
+ /* alignof_type (TYPE)
diff --git a/meta/recipes-devtools/make/make.inc b/meta/recipes-devtools/make/make.inc
index a0a72b6295..56b863480c 100644
--- a/meta/recipes-devtools/make/make.inc
+++ b/meta/recipes-devtools/make/make.inc
@@ -11,3 +11,8 @@ SRC_URI = "${GNU_MIRROR}/make/make-${PV}.tar.gz \
inherit autotools gettext pkgconfig texinfo
PROVIDES = "virtual/make"
+
+# Otherwise $CXX leaks into /usr/bin/make
+do_configure:prepend() {
+ unset CXX
+}
diff --git a/meta/recipes-devtools/make/make/0001-makeinst-Do-not-undef-POSIX-on-clang-arm.patch b/meta/recipes-devtools/make/make/0001-makeinst-Do-not-undef-POSIX-on-clang-arm.patch
deleted file mode 100644
index 2da7c983dc..0000000000
--- a/meta/recipes-devtools/make/make/0001-makeinst-Do-not-undef-POSIX-on-clang-arm.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 86b7947156a0c33e768d0a265e38f2881a70a7e2 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 6 Mar 2020 23:19:37 -0800
-Subject: [PATCH] makeinst: Do not undef POSIX on clang/arm
-
-if __arm internal compiler macro is defined then make assumes that the
-system is not posix and goes ahead and undefs POSIX, which results in
-miscompiling make with clang, since clang does define __arm unlike gcc
-which does not, but they both support posix just fine, so here check for
-compiler not being clang when __arm is defined before undefining posix
-
-Fixes error like
-../make-4.3/src/job.c:507:27: error: too many arguments to function call, expected 0, have 1
- sigsetmask (siggetmask (0) & ~fatal_signal_mask)
- ~~~~~~~~~~ ^
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/makeint.h | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/makeint.h b/src/makeint.h
-index c428a36..fadf963 100644
---- a/src/makeint.h
-+++ b/src/makeint.h
-@@ -115,7 +115,7 @@ extern int errno;
- #endif
-
- /* Some systems define _POSIX_VERSION but are not really POSIX.1. */
--#if (defined (butterfly) || defined (__arm) || (defined (__mips) && defined (_SYSTYPE_SVR3)) || (defined (sequent) && defined (i386)))
-+#if (defined (butterfly) || (defined (__arm) && !defined(__clang__)) || (defined (__mips) && defined (_SYSTYPE_SVR3)) || (defined (sequent) && defined (i386)))
- # undef POSIX
- #endif
-
---
-2.25.1
-
diff --git a/meta/recipes-devtools/make/make/0001-src-dir.c-fix-buffer-overflow-warning.patch b/meta/recipes-devtools/make/make/0001-src-dir.c-fix-buffer-overflow-warning.patch
deleted file mode 100644
index 57970824f6..0000000000
--- a/meta/recipes-devtools/make/make/0001-src-dir.c-fix-buffer-overflow-warning.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From cd7091a7d88306004ca98c5dafcc40f44589b105 Mon Sep 17 00:00:00 2001
-From: Jens Rehsack <sno@netbsd.org>
-Date: Mon, 24 Feb 2020 10:52:21 +0100
-Subject: [PATCH 1/3] src/dir.c: fix buffer-overflow warning
-
-Fix compiler warning:
- src/dir.c:1294:7: warning: 'strncpy' specified bound depends on the
- length of the source argument [-Wstringop-overflow=]
-
-The existing code assumes `path` will never exceed `MAXPATHLEN`. Also the
-size of the buffer is increased by 1 to hold a path with the length of
-`MAXPATHLEN` and trailing `0`.
-
-Signed-off-by: Jens Rehsack <sno@netbsd.org>
----
-Upstream-Status: Pending (https://savannah.gnu.org/bugs/?57888)
-
- src/dir.c | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/dir.c b/src/dir.c
-index 862a18e..cad4c4a 100644
---- a/src/dir.c
-+++ b/src/dir.c
-@@ -1289,10 +1289,10 @@ local_stat (const char *path, struct stat *buf)
- if (plen > 1 && path[plen - 1] == '.'
- && (path[plen - 2] == '/' || path[plen - 2] == '\\'))
- {
-- char parent[MAXPATHLEN];
-+ char parent[MAXPATHLEN+1];
-
-- strncpy (parent, path, plen - 2);
-- parent[plen - 2] = '\0';
-+ strncpy (parent, path, MAXPATHLEN);
-+ parent[MIN(plen - 2, MAXPATHLEN)] = '\0';
- if (stat (parent, buf) < 0 || !_S_ISDIR (buf->st_mode))
- return -1;
- }
---
-2.17.1
-
diff --git a/meta/recipes-devtools/make/make/0002-modules-fcntl-allow-being-detected-by-importing-proj.patch b/meta/recipes-devtools/make/make/0002-modules-fcntl-allow-being-detected-by-importing-proj.patch
deleted file mode 100644
index b3d97f9a3a..0000000000
--- a/meta/recipes-devtools/make/make/0002-modules-fcntl-allow-being-detected-by-importing-proj.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From fb8aaed3b040e589cd880fd714dda5ec00687217 Mon Sep 17 00:00:00 2001
-From: Jens Rehsack <sno@netbsd.org>
-Date: Mon, 24 Feb 2020 12:10:06 +0100
-Subject: [PATCH 2/2] modules: fcntl: allow being detected by importing
- projects
-
-GNU project `make` relies on gnulib but provides some own compatibility
-functions - including an `fcntl`, which fails on mingw.
-The intension of gnulib is providing these functions and being wider tested,
-but silently injecting a function opens battle of compatibility layers.
-
-So adding a hint into target `config.h` to allow deciding whether using
-an own compatibility implementation or not.
-
-Signed-off-by: Jens Rehsack <sno@netbsd.org>
----
-Upstream-Status: Pending
-
- m4/gnulib-comp.m4 | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/m4/gnulib-comp.m4 b/m4/gnulib-comp.m4
-index 3ee0811..cf75541 100644
---- a/m4/gnulib-comp.m4
-+++ b/m4/gnulib-comp.m4
-@@ -147,6 +147,7 @@
- gl_FUNC_FCNTL
- if test $HAVE_FCNTL = 0 || test $REPLACE_FCNTL = 1; then
- AC_LIBOBJ([fcntl])
-+ AC_DEFINE(HAVE_GNULIB_FCNTL, 1, [Define to 1 if you have the `fcntl' function via gnulib.])
- fi
- gl_FCNTL_MODULE_INDICATOR([fcntl])
- gl_FCNTL_H
diff --git a/meta/recipes-devtools/make/make/0002-w32-compat-dirent.c-follow-header.patch b/meta/recipes-devtools/make/make/0002-w32-compat-dirent.c-follow-header.patch
deleted file mode 100644
index 9ecc44543e..0000000000
--- a/meta/recipes-devtools/make/make/0002-w32-compat-dirent.c-follow-header.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 4dd8b4f43aa0078707ad9a7932f4e137bc4383ed Mon Sep 17 00:00:00 2001
-From: Jens Rehsack <sno@netbsd.org>
-Date: Mon, 24 Feb 2020 11:12:43 +0100
-Subject: [PATCH 2/3] w32: compat: dirent.c: follow header
-
-src/w32/include/dirent.h completely delegates to mingw dirent implementation,
-gnulib detects it as fine and completely usable - trust in that.
-
-Signed-off-by: Jens Rehsack <sno@netbsd.org>
----
-Upstream-Status: Pending (https://savannah.gnu.org/bugs/?57888)
-
- src/w32/compat/dirent.c | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/src/w32/compat/dirent.c b/src/w32/compat/dirent.c
-index b8ec615..de80f72 100644
---- a/src/w32/compat/dirent.c
-+++ b/src/w32/compat/dirent.c
-@@ -23,7 +23,7 @@ this program. If not, see <http://www.gnu.org/licenses/>. */
- #include <stdlib.h>
- #include "dirent.h"
-
--
-+#ifndef __MINGW32__
- DIR*
- opendir(const char* pDirName)
- {
-@@ -193,3 +193,4 @@ seekdir(DIR* pDir, long nPosition)
-
- return;
- }
-+#endif /* !__MINGW32__ */
---
-2.17.1
-
diff --git a/meta/recipes-devtools/make/make/0003-posixfcn-fcntl-gnulib-make-emulated.patch b/meta/recipes-devtools/make/make/0003-posixfcn-fcntl-gnulib-make-emulated.patch
deleted file mode 100644
index 70414c51f4..0000000000
--- a/meta/recipes-devtools/make/make/0003-posixfcn-fcntl-gnulib-make-emulated.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 3d074c8fca5fcf3e6b83d33788f35a8f1b3a44a2 Mon Sep 17 00:00:00 2001
-From: Jens Rehsack <sno@netbsd.org>
-Date: Fri, 21 Feb 2020 19:29:49 +0100
-Subject: [PATCH 3/3] posixfcn: fcntl: gnulib > make-emulated
-
-Rate the fcntl emulation from gnulib higher than the own one.
-
-Signed-off-by: Jens Rehsack <sno@netbsd.org>
----
-Upstream-Status: Pending (https://savannah.gnu.org/bugs/?57888)
-
- src/output.h | 19 ++++++++++++++-----
- src/w32/compat/posixfcn.c | 2 ++
- 2 files changed, 16 insertions(+), 5 deletions(-)
-
-diff --git a/src/output.h b/src/output.h
-index a506505..d3ce6b7 100644
---- a/src/output.h
-+++ b/src/output.h
-@@ -67,14 +67,21 @@ void output_dump (struct output *out);
-
- # ifdef WINDOWS32
- /* For emulations in w32/compat/posixfcn.c. */
--# define F_GETFD 1
--# define F_SETLKW 2
-+# ifndef F_GETFD
-+# define F_GETFD 1
-+# endif
-+# ifndef F_SETLKW
-+# define F_SETLKW 2
-+# endif
- /* Implementation note: None of the values of l_type below can be zero
- -- they are compared with a static instance of the struct, so zero
- means unknown/invalid, see w32/compat/posixfcn.c. */
--# define F_WRLCK 1
--# define F_UNLCK 2
--
-+# ifndef F_WRLCK
-+# define F_WRLCK 1
-+# endif
-+# ifndef F_UNLCK
-+# define F_UNLCK 2
-+# endif
- struct flock
- {
- short l_type;
-@@ -89,7 +96,9 @@ struct flock
- typedef intptr_t sync_handle_t;
-
- /* Public functions emulated/provided in posixfcn.c. */
-+# ifndef HAVE_GNULIB_FCNTL
- int fcntl (intptr_t fd, int cmd, ...);
-+# endif
- intptr_t create_mutex (void);
- int same_stream (FILE *f1, FILE *f2);
-
-diff --git a/src/w32/compat/posixfcn.c b/src/w32/compat/posixfcn.c
-index 975dfb7..d337b9c 100644
---- a/src/w32/compat/posixfcn.c
-+++ b/src/w32/compat/posixfcn.c
-@@ -29,6 +29,7 @@ this program. If not, see <http://www.gnu.org/licenses/>. */
- #ifndef NO_OUTPUT_SYNC
- /* Support for OUTPUT_SYNC and related functionality. */
-
-+#ifndef HAVE_GNULIB_FCNTL
- /* Emulation of fcntl that supports only F_GETFD and F_SETLKW. */
- int
- fcntl (intptr_t fd, int cmd, ...)
-@@ -142,6 +143,7 @@ fcntl (intptr_t fd, int cmd, ...)
- return -1;
- }
- }
-+#endif /* GNULIB_TEST_FCNTL */
-
- static intptr_t mutex_handle = -1;
-
---
-2.17.1
-
diff --git a/meta/recipes-devtools/make/make_4.3.bb b/meta/recipes-devtools/make/make_4.3.bb
deleted file mode 100644
index 9350bed05a..0000000000
--- a/meta/recipes-devtools/make/make_4.3.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-require make.inc
-
-SRC_URI += "\
- file://0001-m4-getloadavg.m4-restrict-AIX-specific-test-on-AIX.patch \
- file://0002-modules-fcntl-allow-being-detected-by-importing-proj.patch \
- file://0001-src-dir.c-fix-buffer-overflow-warning.patch \
- file://0002-w32-compat-dirent.c-follow-header.patch \
- file://0003-posixfcn-fcntl-gnulib-make-emulated.patch \
- file://0001-makeinst-Do-not-undef-POSIX-on-clang-arm.patch \
-"
-
-EXTRA_OECONF += "--without-guile"
-
-SRC_URI[sha256sum] = "e05fdde47c5f7ca45cb697e973894ff4f5d79e13b750ed57d7b66d8defc78e19"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/make/make_4.4.1.bb b/meta/recipes-devtools/make/make_4.4.1.bb
new file mode 100644
index 0000000000..c73751ddcb
--- /dev/null
+++ b/meta/recipes-devtools/make/make_4.4.1.bb
@@ -0,0 +1,13 @@
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c678957b0c8e964aa6c70fd77641a71e"
+require make.inc
+
+SRC_URI += " \
+ file://0001-m4-getloadavg.m4-restrict-AIX-specific-test-on-AIX.patch \
+ "
+
+EXTRA_OECONF += "--without-guile"
+
+SRC_URI[sha256sum] = "dd16fb1d67bfab79a72f5e8390735c49e3e8e70b4945a15ab1f81ddb78658fb3"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/makedevs/makedevs/COPYING.patch b/meta/recipes-devtools/makedevs/makedevs/COPYING.patch
deleted file mode 100644
index 3a8902b97a..0000000000
--- a/meta/recipes-devtools/makedevs/makedevs/COPYING.patch
+++ /dev/null
@@ -1,346 +0,0 @@
-Upstream-Status: Inappropriate [licensing]
-
-diff -ruN makedevs-1.0.0-orig/COPYING makedevs-1.0.0/COPYING
---- makedevs-1.0.0-orig/COPYING 1970-01-01 08:00:00.000000000 +0800
-+++ makedevs-1.0.0/COPYING 2010-12-09 16:42:20.274984665 +0800
-@@ -0,0 +1,340 @@
-+ GNU GENERAL PUBLIC LICENSE
-+ Version 2, June 1991
-+
-+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
-+ 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-+ Everyone is permitted to copy and distribute verbatim copies
-+ of this license document, but changing it is not allowed.
-+
-+ Preamble
-+
-+ The licenses for most software are designed to take away your
-+freedom to share and change it. By contrast, the GNU General Public
-+License is intended to guarantee your freedom to share and change free
-+software--to make sure the software is free for all its users. This
-+General Public License applies to most of the Free Software
-+Foundation's software and to any other program whose authors commit to
-+using it. (Some other Free Software Foundation software is covered by
-+the GNU Library General Public License instead.) You can apply it to
-+your programs, too.
-+
-+ When we speak of free software, we are referring to freedom, not
-+price. Our General Public Licenses are designed to make sure that you
-+have the freedom to distribute copies of free software (and charge for
-+this service if you wish), that you receive source code or can get it
-+if you want it, that you can change the software or use pieces of it
-+in new free programs; and that you know you can do these things.
-+
-+ To protect your rights, we need to make restrictions that forbid
-+anyone to deny you these rights or to ask you to surrender the rights.
-+These restrictions translate to certain responsibilities for you if you
-+distribute copies of the software, or if you modify it.
-+
-+ For example, if you distribute copies of such a program, whether
-+gratis or for a fee, you must give the recipients all the rights that
-+you have. You must make sure that they, too, receive or can get the
-+source code. And you must show them these terms so they know their
-+rights.
-+
-+ We protect your rights with two steps: (1) copyright the software, and
-+(2) offer you this license which gives you legal permission to copy,
-+distribute and/or modify the software.
-+
-+ Also, for each author's protection and ours, we want to make certain
-+that everyone understands that there is no warranty for this free
-+software. If the software is modified by someone else and passed on, we
-+want its recipients to know that what they have is not the original, so
-+that any problems introduced by others will not reflect on the original
-+authors' reputations.
-+
-+ Finally, any free program is threatened constantly by software
-+patents. We wish to avoid the danger that redistributors of a free
-+program will individually obtain patent licenses, in effect making the
-+program proprietary. To prevent this, we have made it clear that any
-+patent must be licensed for everyone's free use or not licensed at all.
-+
-+ The precise terms and conditions for copying, distribution and
-+modification follow.
-+
-+ GNU GENERAL PUBLIC LICENSE
-+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-+
-+ 0. This License applies to any program or other work which contains
-+a notice placed by the copyright holder saying it may be distributed
-+under the terms of this General Public License. The "Program", below,
-+refers to any such program or work, and a "work based on the Program"
-+means either the Program or any derivative work under copyright law:
-+that is to say, a work containing the Program or a portion of it,
-+either verbatim or with modifications and/or translated into another
-+language. (Hereinafter, translation is included without limitation in
-+the term "modification".) Each licensee is addressed as "you".
-+
-+Activities other than copying, distribution and modification are not
-+covered by this License; they are outside its scope. The act of
-+running the Program is not restricted, and the output from the Program
-+is covered only if its contents constitute a work based on the
-+Program (independent of having been made by running the Program).
-+Whether that is true depends on what the Program does.
-+
-+ 1. You may copy and distribute verbatim copies of the Program's
-+source code as you receive it, in any medium, provided that you
-+conspicuously and appropriately publish on each copy an appropriate
-+copyright notice and disclaimer of warranty; keep intact all the
-+notices that refer to this License and to the absence of any warranty;
-+and give any other recipients of the Program a copy of this License
-+along with the Program.
-+
-+You may charge a fee for the physical act of transferring a copy, and
-+you may at your option offer warranty protection in exchange for a fee.
-+
-+ 2. You may modify your copy or copies of the Program or any portion
-+of it, thus forming a work based on the Program, and copy and
-+distribute such modifications or work under the terms of Section 1
-+above, provided that you also meet all of these conditions:
-+
-+ a) You must cause the modified files to carry prominent notices
-+ stating that you changed the files and the date of any change.
-+
-+ b) You must cause any work that you distribute or publish, that in
-+ whole or in part contains or is derived from the Program or any
-+ part thereof, to be licensed as a whole at no charge to all third
-+ parties under the terms of this License.
-+
-+ c) If the modified program normally reads commands interactively
-+ when run, you must cause it, when started running for such
-+ interactive use in the most ordinary way, to print or display an
-+ announcement including an appropriate copyright notice and a
-+ notice that there is no warranty (or else, saying that you provide
-+ a warranty) and that users may redistribute the program under
-+ these conditions, and telling the user how to view a copy of this
-+ License. (Exception: if the Program itself is interactive but
-+ does not normally print such an announcement, your work based on
-+ the Program is not required to print an announcement.)
-+
-+These requirements apply to the modified work as a whole. If
-+identifiable sections of that work are not derived from the Program,
-+and can be reasonably considered independent and separate works in
-+themselves, then this License, and its terms, do not apply to those
-+sections when you distribute them as separate works. But when you
-+distribute the same sections as part of a whole which is a work based
-+on the Program, the distribution of the whole must be on the terms of
-+this License, whose permissions for other licensees extend to the
-+entire whole, and thus to each and every part regardless of who wrote it.
-+
-+Thus, it is not the intent of this section to claim rights or contest
-+your rights to work written entirely by you; rather, the intent is to
-+exercise the right to control the distribution of derivative or
-+collective works based on the Program.
-+
-+In addition, mere aggregation of another work not based on the Program
-+with the Program (or with a work based on the Program) on a volume of
-+a storage or distribution medium does not bring the other work under
-+the scope of this License.
-+
-+ 3. You may copy and distribute the Program (or a work based on it,
-+under Section 2) in object code or executable form under the terms of
-+Sections 1 and 2 above provided that you also do one of the following:
-+
-+ a) Accompany it with the complete corresponding machine-readable
-+ source code, which must be distributed under the terms of Sections
-+ 1 and 2 above on a medium customarily used for software interchange; or,
-+
-+ b) Accompany it with a written offer, valid for at least three
-+ years, to give any third party, for a charge no more than your
-+ cost of physically performing source distribution, a complete
-+ machine-readable copy of the corresponding source code, to be
-+ distributed under the terms of Sections 1 and 2 above on a medium
-+ customarily used for software interchange; or,
-+
-+ c) Accompany it with the information you received as to the offer
-+ to distribute corresponding source code. (This alternative is
-+ allowed only for noncommercial distribution and only if you
-+ received the program in object code or executable form with such
-+ an offer, in accord with Subsection b above.)
-+
-+The source code for a work means the preferred form of the work for
-+making modifications to it. For an executable work, complete source
-+code means all the source code for all modules it contains, plus any
-+associated interface definition files, plus the scripts used to
-+control compilation and installation of the executable. However, as a
-+special exception, the source code distributed need not include
-+anything that is normally distributed (in either source or binary
-+form) with the major components (compiler, kernel, and so on) of the
-+operating system on which the executable runs, unless that component
-+itself accompanies the executable.
-+
-+If distribution of executable or object code is made by offering
-+access to copy from a designated place, then offering equivalent
-+access to copy the source code from the same place counts as
-+distribution of the source code, even though third parties are not
-+compelled to copy the source along with the object code.
-+
-+ 4. You may not copy, modify, sublicense, or distribute the Program
-+except as expressly provided under this License. Any attempt
-+otherwise to copy, modify, sublicense or distribute the Program is
-+void, and will automatically terminate your rights under this License.
-+However, parties who have received copies, or rights, from you under
-+this License will not have their licenses terminated so long as such
-+parties remain in full compliance.
-+
-+ 5. You are not required to accept this License, since you have not
-+signed it. However, nothing else grants you permission to modify or
-+distribute the Program or its derivative works. These actions are
-+prohibited by law if you do not accept this License. Therefore, by
-+modifying or distributing the Program (or any work based on the
-+Program), you indicate your acceptance of this License to do so, and
-+all its terms and conditions for copying, distributing or modifying
-+the Program or works based on it.
-+
-+ 6. Each time you redistribute the Program (or any work based on the
-+Program), the recipient automatically receives a license from the
-+original licensor to copy, distribute or modify the Program subject to
-+these terms and conditions. You may not impose any further
-+restrictions on the recipients' exercise of the rights granted herein.
-+You are not responsible for enforcing compliance by third parties to
-+this License.
-+
-+ 7. If, as a consequence of a court judgment or allegation of patent
-+infringement or for any other reason (not limited to patent issues),
-+conditions are imposed on you (whether by court order, agreement or
-+otherwise) that contradict the conditions of this License, they do not
-+excuse you from the conditions of this License. If you cannot
-+distribute so as to satisfy simultaneously your obligations under this
-+License and any other pertinent obligations, then as a consequence you
-+may not distribute the Program at all. For example, if a patent
-+license would not permit royalty-free redistribution of the Program by
-+all those who receive copies directly or indirectly through you, then
-+the only way you could satisfy both it and this License would be to
-+refrain entirely from distribution of the Program.
-+
-+If any portion of this section is held invalid or unenforceable under
-+any particular circumstance, the balance of the section is intended to
-+apply and the section as a whole is intended to apply in other
-+circumstances.
-+
-+It is not the purpose of this section to induce you to infringe any
-+patents or other property right claims or to contest validity of any
-+such claims; this section has the sole purpose of protecting the
-+integrity of the free software distribution system, which is
-+implemented by public license practices. Many people have made
-+generous contributions to the wide range of software distributed
-+through that system in reliance on consistent application of that
-+system; it is up to the author/donor to decide if he or she is willing
-+to distribute software through any other system and a licensee cannot
-+impose that choice.
-+
-+This section is intended to make thoroughly clear what is believed to
-+be a consequence of the rest of this License.
-+
-+ 8. If the distribution and/or use of the Program is restricted in
-+certain countries either by patents or by copyrighted interfaces, the
-+original copyright holder who places the Program under this License
-+may add an explicit geographical distribution limitation excluding
-+those countries, so that distribution is permitted only in or among
-+countries not thus excluded. In such case, this License incorporates
-+the limitation as if written in the body of this License.
-+
-+ 9. The Free Software Foundation may publish revised and/or new versions
-+of the General Public License from time to time. Such new versions will
-+be similar in spirit to the present version, but may differ in detail to
-+address new problems or concerns.
-+
-+Each version is given a distinguishing version number. If the Program
-+specifies a version number of this License which applies to it and "any
-+later version", you have the option of following the terms and conditions
-+either of that version or of any later version published by the Free
-+Software Foundation. If the Program does not specify a version number of
-+this License, you may choose any version ever published by the Free Software
-+Foundation.
-+
-+ 10. If you wish to incorporate parts of the Program into other free
-+programs whose distribution conditions are different, write to the author
-+to ask for permission. For software which is copyrighted by the Free
-+Software Foundation, write to the Free Software Foundation; we sometimes
-+make exceptions for this. Our decision will be guided by the two goals
-+of preserving the free status of all derivatives of our free software and
-+of promoting the sharing and reuse of software generally.
-+
-+ NO WARRANTY
-+
-+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-+REPAIR OR CORRECTION.
-+
-+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-+POSSIBILITY OF SUCH DAMAGES.
-+
-+ END OF TERMS AND CONDITIONS
-+
-+ How to Apply These Terms to Your New Programs
-+
-+ If you develop a new program, and you want it to be of the greatest
-+possible use to the public, the best way to achieve this is to make it
-+free software which everyone can redistribute and change under these terms.
-+
-+ To do so, attach the following notices to the program. It is safest
-+to attach them to the start of each source file to most effectively
-+convey the exclusion of warranty; and each file should have at least
-+the "copyright" line and a pointer to where the full notice is found.
-+
-+ <one line to give the program's name and a brief idea of what it does.>
-+ Copyright (C) <year> <name of author>
-+
-+ This program is free software; you can redistribute it and/or modify
-+ it under the terms of the GNU General Public License as published by
-+ the Free Software Foundation; either version 2 of the License, or
-+ (at your option) any later version.
-+
-+ This program is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU General Public License for more details.
-+
-+ You should have received a copy of the GNU General Public License
-+ along with this program; if not, write to the Free Software
-+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-+
-+
-+Also add information on how to contact you by electronic and paper mail.
-+
-+If the program is interactive, make it output a short notice like this
-+when it starts in an interactive mode:
-+
-+ Gnomovision version 69, Copyright (C) year name of author
-+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
-+ This is free software, and you are welcome to redistribute it
-+ under certain conditions; type `show c' for details.
-+
-+The hypothetical commands `show w' and `show c' should show the appropriate
-+parts of the General Public License. Of course, the commands you use may
-+be called something other than `show w' and `show c'; they could even be
-+mouse-clicks or menu items--whatever suits your program.
-+
-+You should also get your employer (if you work as a programmer) or your
-+school, if any, to sign a "copyright disclaimer" for the program, if
-+necessary. Here is a sample; alter the names:
-+
-+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
-+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
-+
-+ <signature of Ty Coon>, 1 April 1989
-+ Ty Coon, President of Vice
-+
-+This General Public License does not permit incorporating your program into
-+proprietary programs. If your program is a subroutine library, you may
-+consider it more useful to permit linking proprietary applications with the
-+library. If this is what you want to do, use the GNU Library General
-+Public License instead of this License.
diff --git a/meta/recipes-devtools/makedevs/makedevs/makedevs.c b/meta/recipes-devtools/makedevs/makedevs/makedevs.c
index 32b9872932..df2e3cfad5 100644
--- a/meta/recipes-devtools/makedevs/makedevs/makedevs.c
+++ b/meta/recipes-devtools/makedevs/makedevs/makedevs.c
@@ -1,3 +1,7 @@
+/*
+ * SPDX-License-Identifier: GPL-2.0-only
+ */
+
#define _GNU_SOURCE
#include <stdio.h>
#include <errno.h>
diff --git a/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb b/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb
index 007ebbca89..0d6c7a01eb 100644
--- a/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb
+++ b/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb
@@ -1,10 +1,9 @@
SUMMARY = "Tool for creating device nodes"
DESCRIPTION = "${SUMMARY}"
LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=393a5ca445f6965873eca0259a17f833"
+LIC_FILES_CHKSUM = "file://makedevs.c;beginline=2;endline=2;md5=c3817b10013a30076c68a90e40a55570"
SECTION = "base"
-SRC_URI = "file://makedevs.c \
- file://COPYING.patch"
+SRC_URI = "file://makedevs.c"
S = "${WORKDIR}"
diff --git a/meta/recipes-devtools/meson/meson/0001-Check-for-clang-before-guessing-gcc-or-lcc.patch b/meta/recipes-devtools/meson/meson/0001-Check-for-clang-before-guessing-gcc-or-lcc.patch
deleted file mode 100644
index 58fa119439..0000000000
--- a/meta/recipes-devtools/meson/meson/0001-Check-for-clang-before-guessing-gcc-or-lcc.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-From 8739e1c3bef653415ad4b9b9c318ccfa76c43da6 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 31 Mar 2022 15:00:24 -0700
-Subject: [PATCH] Check for clang before guessing gcc or lcc
-
-clang --version can yield a string like below when its installed into
-such a directory
-
-clang version 14.0.0 (https://github.com/llvm/llvm-project 3f43d803382d57e3fc010ca19833077d1023e9c9)
-Target: aarch64-yoe-linux
-Thread model: posix
-InstalledDir: /mnt/b/yoe/master/build/tmp/work/cortexa72-yoe-linux/gnome-text-editor/42.0-r0/recipe-sysroot-native/usr/bin/aarch64-yoe-linux
-
-as you can see InstallDir has 'xt-' subtring and this trips the check to
-guess gcc
-
-if 'Free Software Foundation' in out or 'xt-' in out:
-
-Therefore, check if compiler is clang then there is no point of running
-this check anyway.
-
-Upstream-Status: Submitted [https://github.com/mesonbuild/meson/pull/10218]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- mesonbuild/compilers/detect.py | 15 ++++++++-------
- 1 file changed, 8 insertions(+), 7 deletions(-)
-
-diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
-index 53948b01a..ba335cf39 100644
---- a/mesonbuild/compilers/detect.py
-+++ b/mesonbuild/compilers/detect.py
-@@ -427,13 +427,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
- version = search_version(out)
-
- guess_gcc_or_lcc: T.Optional[str] = None
-- if 'Free Software Foundation' in out or 'xt-' in out:
-- guess_gcc_or_lcc = 'gcc'
-- if 'e2k' in out and 'lcc' in out:
-- guess_gcc_or_lcc = 'lcc'
-- if 'Microchip Technology' in out:
-- # this output has "Free Software Foundation" in its version
-- guess_gcc_or_lcc = None
-+ if not 'clang' in compiler_name:
-+ if 'Free Software Foundation' in out or 'xt-' in out:
-+ guess_gcc_or_lcc = 'gcc'
-+ if 'e2k' in out and 'lcc' in out:
-+ guess_gcc_or_lcc = 'lcc'
-+ if 'Microchip Technology' in out:
-+ # this output has "Free Software Foundation" in its version
-+ guess_gcc_or_lcc = None
-
- if guess_gcc_or_lcc:
- defines = _get_gnu_compiler_defines(compiler)
---
-2.35.1
-
diff --git a/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch b/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch
index 848dccfbe7..8ea7c35950 100644
--- a/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch
+++ b/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch
@@ -1,4 +1,4 @@
-From 6c4eef1d92e9e42fdbc888365cab3c95fb33c605 Mon Sep 17 00:00:00 2001
+From b77cbe67df5fa0998946503f207c256ee740bb5f Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Tue, 3 Jul 2018 13:59:09 +0100
Subject: [PATCH] Make CPU family warnings fatal
@@ -7,15 +7,15 @@ Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Ross Burton <ross.burton@intel.com>
---
- mesonbuild/envconfig.py | 2 +-
- mesonbuild/environment.py | 4 +---
- 2 files changed, 2 insertions(+), 4 deletions(-)
+ mesonbuild/envconfig.py | 4 ++--
+ mesonbuild/environment.py | 6 ++----
+ 2 files changed, 4 insertions(+), 6 deletions(-)
-Index: meson-0.60.2/mesonbuild/envconfig.py
-===================================================================
---- meson-0.60.2.orig/mesonbuild/envconfig.py
-+++ meson-0.60.2/mesonbuild/envconfig.py
-@@ -266,8 +266,8 @@ class MachineInfo(HoldableObject):
+diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
+index 07f1229..a35c356 100644
+--- a/mesonbuild/envconfig.py
++++ b/mesonbuild/envconfig.py
+@@ -285,8 +285,8 @@ class MachineInfo(HoldableObject):
'but is missing {}.'.format(minimum_literal - set(literal)))
cpu_family = literal['cpu_family']
@@ -26,13 +26,13 @@ Index: meson-0.60.2/mesonbuild/envconfig.py
endian = literal['endian']
if endian not in ('little', 'big'):
-Index: meson-0.60.2/mesonbuild/environment.py
-===================================================================
---- meson-0.60.2.orig/mesonbuild/environment.py
-+++ meson-0.60.2/mesonbuild/environment.py
-@@ -354,10 +354,8 @@ def detect_cpu_family(compilers: Compile
- if any_compiler_has_define(compilers, '__64BIT__'):
- trial = 'ppc64'
+diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
+index 2ba2054..d798e3b 100644
+--- a/mesonbuild/environment.py
++++ b/mesonbuild/environment.py
+@@ -359,10 +359,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
+ if compilers and not any_compiler_has_define(compilers, '__mips64'):
+ trial = 'mips'
- if trial not in known_cpu_families:
- mlog.warning(f'Unknown CPU family {trial!r}, please report this at '
diff --git a/meta/recipes-devtools/meson/meson/0001-is_debianlike-always-return-False.patch b/meta/recipes-devtools/meson/meson/0001-is_debianlike-always-return-False.patch
deleted file mode 100644
index 83c4782a61..0000000000
--- a/meta/recipes-devtools/meson/meson/0001-is_debianlike-always-return-False.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 667b9ede638677fb37911306937ea62f05897581 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 18 Oct 2021 15:55:59 +0200
-Subject: [PATCH] is_debianlike(): always return False
-
-Otherwise, host contamination happens.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- mesonbuild/mesonlib/universal.py | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/mesonbuild/mesonlib/universal.py b/mesonbuild/mesonlib/universal.py
-index d670d04..47d1b52 100644
---- a/mesonbuild/mesonlib/universal.py
-+++ b/mesonbuild/mesonlib/universal.py
-@@ -651,7 +651,7 @@ def is_cygwin() -> bool:
-
-
- def is_debianlike() -> bool:
-- return os.path.isfile('/etc/debian_version')
-+ return False
-
-
- def is_dragonflybsd() -> bool:
diff --git a/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch b/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch
index f01a667818..2e0a4b1bbe 100644
--- a/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch
+++ b/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch
@@ -1,4 +1,4 @@
-From 2e9582167bf9d3273004edb2637310531f0155ab Mon Sep 17 00:00:00 2001
+From e85683698aa3556bf14fc6d35f2c067f16af520b Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 19 Nov 2018 14:24:26 +0100
Subject: [PATCH] python module: do not manipulate the environment when calling
@@ -8,30 +8,30 @@ Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
---
- mesonbuild/modules/python.py | 6 +-----
+ mesonbuild/dependencies/python.py | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)
-diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
-index 3bbccd1..fda7a25 100644
---- a/mesonbuild/modules/python.py
-+++ b/mesonbuild/modules/python.py
-@@ -277,9 +277,6 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice',
- # there is no LIBPC, so we can't search in it
- return NotFoundDependency('python', env)
+diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py
+index 9aea6bd..8c13ede 100644
+--- a/mesonbuild/dependencies/python.py
++++ b/mesonbuild/dependencies/python.py
+@@ -380,9 +380,6 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice',
+ empty.name = 'python'
+ return empty
-- old_pkg_libdir = os.environ.pop('PKG_CONFIG_LIBDIR', None)
-- old_pkg_path = os.environ.pop('PKG_CONFIG_PATH', None)
-- os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
- try:
- return PythonPkgConfigDependency(name, env, kwargs, installation, True)
- finally:
-@@ -288,8 +285,7 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice',
- os.environ[name] = value
- elif name in os.environ:
- del os.environ[name]
-- set_env('PKG_CONFIG_LIBDIR', old_pkg_libdir)
-- set_env('PKG_CONFIG_PATH', old_pkg_path)
-+ pass
+- old_pkg_libdir = os.environ.pop('PKG_CONFIG_LIBDIR', None)
+- old_pkg_path = os.environ.pop('PKG_CONFIG_PATH', None)
+- os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
+ try:
+ return PythonPkgConfigDependency(name, env, kwargs, installation, True)
+ finally:
+@@ -391,8 +388,7 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice',
+ os.environ[name] = value
+ elif name in os.environ:
+ del os.environ[name]
+- set_env('PKG_CONFIG_LIBDIR', old_pkg_libdir)
+- set_env('PKG_CONFIG_PATH', old_pkg_path)
++ pass
- candidates.append(functools.partial(wrap_in_pythons_pc_dir, pkg_name, env, kwargs, installation))
- # We only need to check both, if a python install has a LIBPC. It might point to the wrong location,
+ candidates.append(functools.partial(wrap_in_pythons_pc_dir, pkg_name, env, kwargs, installation))
+ # We only need to check both, if a python install has a LIBPC. It might point to the wrong location,
diff --git a/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch b/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch
index dcc1ce9c27..a8396f30bb 100644
--- a/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch
+++ b/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch
@@ -1,21 +1,22 @@
-From 656bf55fed01df2d2e2ad6d9d9887173cb16b85c Mon Sep 17 00:00:00 2001
+From 6fb8db54929b40e1fd7ac949ef44f0d37df0bae9 Mon Sep 17 00:00:00 2001
From: Peter Kjellerstedt <pkj@axis.com>
Date: Thu, 26 Jul 2018 16:32:49 +0200
-Subject: [PATCH 2/2] Support building allarch recipes again
+Subject: [PATCH] Support building allarch recipes again
This registers "allarch" as a known CPU family.
Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
+
---
mesonbuild/envconfig.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
-index 4d58c91..ff01ad1 100644
+index a35c356..436355f 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
-@@ -36,6 +36,7 @@ from pathlib import Path
+@@ -38,6 +38,7 @@ from pathlib import Path
known_cpu_families = (
@@ -23,6 +24,3 @@ index 4d58c91..ff01ad1 100644
'aarch64',
'alpha',
'arc',
---
-2.24.0
-
diff --git a/meta/recipes-devtools/meson/meson/disable-rpath-handling.patch b/meta/recipes-devtools/meson/meson/disable-rpath-handling.patch
deleted file mode 100644
index 7aaed8b4a3..0000000000
--- a/meta/recipes-devtools/meson/meson/disable-rpath-handling.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 18600f7a1cddf23aeabd188f86e66983f27ccfe3 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Fri, 23 Nov 2018 15:28:28 +0000
-Subject: [PATCH] meson: Disable rpath stripping at install time
-
-We need to allow our rpaths generated through the compiler flags to make it into
-our binaries. Therefore disable the meson manipulations of these unless there
-is a specific directive to do something differently in the project.
-
-RP 2018/11/23
-
-Upstream-Status: Submitted [https://github.com/mesonbuild/meson/issues/2567]
----
- mesonbuild/minstall.py | 7 +++++--
- 1 file changed, 5 insertions(+), 2 deletions(-)
-
-diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
-index 7d0da13..17d50db 100644
---- a/mesonbuild/minstall.py
-+++ b/mesonbuild/minstall.py
-@@ -718,8 +718,11 @@ class Installer:
- if file_copied:
- self.did_install_something = True
- try:
-- self.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
-- install_name_mappings, verbose=False)
-+ if install_rpath:
-+ self.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
-+ install_name_mappings, verbose=False)
-+ else:
-+ print("RPATH changes at install time disabled")
- except SystemExit as e:
- if isinstance(e.code, int) and e.code == 0:
- pass
---
-2.20.1
-
diff --git a/meta/recipes-devtools/meson/meson/meson-wrapper b/meta/recipes-devtools/meson/meson/meson-wrapper
index 8fafaad975..7455985297 100755
--- a/meta/recipes-devtools/meson/meson/meson-wrapper
+++ b/meta/recipes-devtools/meson/meson/meson-wrapper
@@ -1,11 +1,11 @@
#!/bin/sh
if [ -z "$OECORE_NATIVE_SYSROOT" ]; then
- echo "OECORE_NATIVE_SYSROOT not set; are you in a Yocto SDK environment?" >&2
+ exec "meson.real" "$@"
fi
if [ -z "$SSL_CERT_DIR" ]; then
- export SSL_CERT_DIR="${OECORE_NATIVE_SYSROOT}/etc/ssl/certs/"
+ export SSL_CERT_DIR="$OECORE_NATIVE_SYSROOT/etc/ssl/certs/"
fi
# If these are set to a cross-compile path, meson will get confused and try to
@@ -13,7 +13,19 @@ fi
# config is already in meson.cross.
unset CC CXX CPP LD AR NM STRIP
+case "$1" in
+setup|configure|dist|install|introspect|init|test|wrap|subprojects|rewrite|compile|devenv|env2mfile|help) MESON_CMD="$1" ;;
+*) echo meson-wrapper: Implicit setup command assumed; MESON_CMD=setup ;;
+esac
+
+if [ "$MESON_CMD" = "setup" ]; then
+ MESON_SETUP_OPTS=" \
+ --cross-file="$OECORE_NATIVE_SYSROOT/usr/share/meson/${TARGET_PREFIX}meson.cross" \
+ --native-file="$OECORE_NATIVE_SYSROOT/usr/share/meson/meson.native" \
+ "
+ echo meson-wrapper: Running meson with setup options: \"$MESON_SETUP_OPTS\"
+fi
+
exec "$OECORE_NATIVE_SYSROOT/usr/bin/meson.real" \
- --cross-file "${OECORE_NATIVE_SYSROOT}/usr/share/meson/${TARGET_PREFIX}meson.cross" \
- --native-file "${OECORE_NATIVE_SYSROOT}/usr/share/meson/meson.native" \
- "$@"
+ "$@" \
+ $MESON_SETUP_OPTS
diff --git a/meta/recipes-devtools/meson/meson_0.62.1.bb b/meta/recipes-devtools/meson/meson_0.62.1.bb
deleted file mode 100644
index dabdcaab01..0000000000
--- a/meta/recipes-devtools/meson/meson_0.62.1.bb
+++ /dev/null
@@ -1,134 +0,0 @@
-HOMEPAGE = "http://mesonbuild.com"
-SUMMARY = "A high performance build system"
-DESCRIPTION = "Meson is a build system designed to increase programmer \
-productivity. It does this by providing a fast, simple and easy to use \
-interface for modern software development tools and practices."
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-SRC_URI = "https://github.com/mesonbuild/meson/releases/download/${PV}/meson-${PV}.tar.gz \
- file://meson-setup.py \
- file://meson-wrapper \
- file://0001-python-module-do-not-manipulate-the-environment-when.patch \
- file://disable-rpath-handling.patch \
- file://0001-Make-CPU-family-warnings-fatal.patch \
- file://0002-Support-building-allarch-recipes-again.patch \
- file://0001-is_debianlike-always-return-False.patch \
- file://0001-Check-for-clang-before-guessing-gcc-or-lcc.patch \
- "
-SRC_URI[sha256sum] = "a0f5caa1e70da12d5e63aa6a9504273759b891af36c8d87de381a4ed1380e845"
-
-UPSTREAM_CHECK_URI = "https://github.com/mesonbuild/meson/releases"
-UPSTREAM_CHECK_REGEX = "meson-(?P<pver>\d+(\.\d+)+)\.tar"
-
-inherit python_setuptools_build_meta
-
-RDEPENDS:${PN} = "ninja python3-modules python3-pkg-resources"
-
-FILES:${PN} += "${datadir}/polkit-1"
-
-do_install:append () {
- # As per the same issue in the python recipe itself:
- # Unfortunately the following pyc files are non-deterministc due to 'frozenset'
- # being written without strict ordering, even with PYTHONHASHSEED = 0
- # Upstream is discussing ways to solve the issue properly, until then let's
- # just not install the problematic files.
- # More info: http://benno.id.au/blog/2013/01/15/python-determinism
- rm ${D}${libdir}/python*/site-packages/mesonbuild/dependencies/__pycache__/mpi.cpython*
-}
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit meson-routines
-
-# The cross file logic is similar but not identical to that in meson.bbclass,
-# since it's generating for an SDK rather than a cross-compile. Important
-# differences are:
-# - We can't set vars like CC, CXX, etc. yet because they will be filled in with
-# real paths by meson-setup.sh when the SDK is extracted.
-# - Some overrides aren't needed, since the SDK injects paths that take care of
-# them.
-def var_list2str(var, d):
- items = d.getVar(var).split()
- return items[0] if len(items) == 1 else ', '.join(repr(s) for s in items)
-
-def generate_native_link_template(d):
- val = ['-L@{OECORE_NATIVE_SYSROOT}${libdir_native}',
- '-L@{OECORE_NATIVE_SYSROOT}${base_libdir_native}',
- '-Wl,-rpath-link,@{OECORE_NATIVE_SYSROOT}${libdir_native}',
- '-Wl,-rpath-link,@{OECORE_NATIVE_SYSROOT}${base_libdir_native}',
- '-Wl,--allow-shlib-undefined'
- ]
- build_arch = d.getVar('BUILD_ARCH')
- if 'x86_64' in build_arch:
- loader = 'ld-linux-x86-64.so.2'
- elif 'i686' in build_arch:
- loader = 'ld-linux.so.2'
- elif 'aarch64' in build_arch:
- loader = 'ld-linux-aarch64.so.1'
- elif 'ppc64le' in build_arch:
- loader = 'ld64.so.2'
-
- if loader:
- val += ['-Wl,--dynamic-linker=@{OECORE_NATIVE_SYSROOT}${base_libdir_native}/' + loader]
-
- return repr(val)
-
-do_install:append:class-nativesdk() {
- install -d ${D}${datadir}/meson
-
- cat >${D}${datadir}/meson/meson.native.template <<EOF
-[binaries]
-c = ${@meson_array('BUILD_CC', d)}
-cpp = ${@meson_array('BUILD_CXX', d)}
-ar = ${@meson_array('BUILD_AR', d)}
-nm = ${@meson_array('BUILD_NM', d)}
-strip = ${@meson_array('BUILD_STRIP', d)}
-readelf = ${@meson_array('BUILD_READELF', d)}
-pkgconfig = 'pkg-config-native'
-
-[built-in options]
-c_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}]
-c_link_args = ${@generate_native_link_template(d)}
-cpp_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}]
-cpp_link_args = ${@generate_native_link_template(d)}
-[properties]
-sys_root = '@OECORE_NATIVE_SYSROOT'
-EOF
-
- cat >${D}${datadir}/meson/meson.cross.template <<EOF
-[binaries]
-c = @CC
-cpp = @CXX
-ar = @AR
-nm = @NM
-strip = @STRIP
-pkgconfig = 'pkg-config'
-
-[built-in options]
-c_args = @CFLAGS
-c_link_args = @LDFLAGS
-cpp_args = @CPPFLAGS
-cpp_link_args = @LDFLAGS
-
-[properties]
-needs_exe_wrapper = true
-sys_root = @OECORE_TARGET_SYSROOT
-
-[host_machine]
-system = '${SDK_OS}'
-cpu_family = '${@meson_cpu_family("SDK_ARCH", d)}'
-cpu = '${SDK_ARCH}'
-endian = '${@meson_endian("SDK", d)}'
-EOF
-
- install -d ${D}${SDKPATHNATIVE}/post-relocate-setup.d
- install -m 0755 ${WORKDIR}/meson-setup.py ${D}${SDKPATHNATIVE}/post-relocate-setup.d/
-
- # We need to wrap the real meson with a thin env setup wrapper.
- mv ${D}${bindir}/meson ${D}${bindir}/meson.real
- install -m 0755 ${WORKDIR}/meson-wrapper ${D}${bindir}/meson
-}
-
-FILES:${PN}:append:class-nativesdk = "${datadir}/meson ${SDKPATHNATIVE}"
diff --git a/meta/recipes-devtools/meson/meson_1.3.1.bb b/meta/recipes-devtools/meson/meson_1.3.1.bb
new file mode 100644
index 0000000000..9cc0cc31ab
--- /dev/null
+++ b/meta/recipes-devtools/meson/meson_1.3.1.bb
@@ -0,0 +1,158 @@
+HOMEPAGE = "http://mesonbuild.com"
+SUMMARY = "A high performance build system"
+DESCRIPTION = "Meson is a build system designed to increase programmer \
+productivity. It does this by providing a fast, simple and easy to use \
+interface for modern software development tools and practices."
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+GITHUB_BASE_URI = "https://github.com/mesonbuild/meson/releases/"
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/meson-${PV}.tar.gz \
+ file://meson-setup.py \
+ file://meson-wrapper \
+ file://0001-python-module-do-not-manipulate-the-environment-when.patch \
+ file://0001-Make-CPU-family-warnings-fatal.patch \
+ file://0002-Support-building-allarch-recipes-again.patch \
+ "
+SRC_URI[sha256sum] = "6020568bdede1643d4fb41e28215be38eff5d52da28ac7d125457c59e0032ad7"
+UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)$"
+
+inherit python_setuptools_build_meta github-releases
+
+RDEPENDS:${PN} = "ninja python3-modules python3-pkg-resources"
+
+FILES:${PN} += "${datadir}/polkit-1"
+
+do_install:append () {
+ # As per the same issue in the python recipe itself:
+ # Unfortunately the following pyc files are non-deterministc due to 'frozenset'
+ # being written without strict ordering, even with PYTHONHASHSEED = 0
+ # Upstream is discussing ways to solve the issue properly, until then let's
+ # just not install the problematic files.
+ # More info: http://benno.id.au/blog/2013/01/15/python-determinism
+ rm -f ${D}${libdir}/python*/site-packages/mesonbuild/dependencies/__pycache__/mpi.cpython*
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit meson-routines
+
+# The cross file logic is similar but not identical to that in meson.bbclass,
+# since it's generating for an SDK rather than a cross-compile. Important
+# differences are:
+# - We can't set vars like CC, CXX, etc. yet because they will be filled in with
+# real paths by meson-setup.sh when the SDK is extracted.
+# - Some overrides aren't needed, since the SDK injects paths that take care of
+# them.
+def var_list2str(var, d):
+ items = d.getVar(var).split()
+ return items[0] if len(items) == 1 else ', '.join(repr(s) for s in items)
+
+def generate_native_link_template(d):
+ val = ['-L@{OECORE_NATIVE_SYSROOT}${libdir_native}',
+ '-L@{OECORE_NATIVE_SYSROOT}${base_libdir_native}',
+ '-Wl,-rpath-link,@{OECORE_NATIVE_SYSROOT}${libdir_native}',
+ '-Wl,-rpath-link,@{OECORE_NATIVE_SYSROOT}${base_libdir_native}',
+ '-Wl,--allow-shlib-undefined'
+ ]
+ build_arch = d.getVar('BUILD_ARCH')
+ if 'x86_64' in build_arch:
+ loader = 'ld-linux-x86-64.so.2'
+ elif 'i686' in build_arch:
+ loader = 'ld-linux.so.2'
+ elif 'aarch64' in build_arch:
+ loader = 'ld-linux-aarch64.so.1'
+ elif 'ppc64le' in build_arch:
+ loader = 'ld64.so.2'
+ elif 'loongarch64' in build_arch:
+ loader = 'ld-linux-loongarch-lp64d.so.1'
+ elif 'riscv64' in build_arch:
+ loader = 'ld-linux-riscv64-lp64d.so.1'
+
+ if loader:
+ val += ['-Wl,--dynamic-linker=@{OECORE_NATIVE_SYSROOT}${base_libdir_native}/' + loader]
+
+ return repr(val)
+
+install_templates() {
+ install -d ${D}${datadir}/meson
+
+ cat >${D}${datadir}/meson/meson.native.template <<EOF
+[binaries]
+c = ${@meson_array('BUILD_CC', d)}
+cpp = ${@meson_array('BUILD_CXX', d)}
+ar = ${@meson_array('BUILD_AR', d)}
+nm = ${@meson_array('BUILD_NM', d)}
+strip = ${@meson_array('BUILD_STRIP', d)}
+readelf = ${@meson_array('BUILD_READELF', d)}
+pkgconfig = 'pkg-config-native'
+
+[built-in options]
+c_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}]
+c_link_args = ${@generate_native_link_template(d)}
+cpp_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}]
+cpp_link_args = ${@generate_native_link_template(d)}
+[properties]
+sys_root = '@OECORE_NATIVE_SYSROOT'
+EOF
+
+ cat >${D}${datadir}/meson/meson.cross.template <<EOF
+[binaries]
+c = @CC
+cpp = @CXX
+ar = @AR
+nm = @NM
+strip = @STRIP
+pkgconfig = 'pkg-config'
+
+[built-in options]
+c_args = @CFLAGS
+c_link_args = @LDFLAGS
+cpp_args = @CPPFLAGS
+cpp_link_args = @LDFLAGS
+
+[properties]
+needs_exe_wrapper = true
+sys_root = @OECORE_TARGET_SYSROOT
+
+[host_machine]
+system = '$host_system'
+cpu_family = '$host_cpu_family'
+cpu = '$host_cpu'
+endian = '$host_endian'
+EOF
+}
+
+do_install:append:class-nativesdk() {
+ host_system=${SDK_OS}
+ host_cpu_family=${@meson_cpu_family("SDK_ARCH", d)}
+ host_cpu=${SDK_ARCH}
+ host_endian=${@meson_endian("SDK", d)}
+ install_templates
+
+ install -d ${D}${SDKPATHNATIVE}/post-relocate-setup.d
+ install -m 0755 ${WORKDIR}/meson-setup.py ${D}${SDKPATHNATIVE}/post-relocate-setup.d/
+
+ # We need to wrap the real meson with a thin env setup wrapper.
+ mv ${D}${bindir}/meson ${D}${bindir}/meson.real
+ install -m 0755 ${WORKDIR}/meson-wrapper ${D}${bindir}/meson
+}
+
+FILES:${PN}:append:class-nativesdk = "${datadir}/meson ${SDKPATHNATIVE}"
+
+do_install:append:class-native() {
+ host_system=${HOST_OS}
+ host_cpu_family=${@meson_cpu_family("HOST_ARCH", d)}
+ host_cpu=${HOST_ARCH}
+ host_endian=${@meson_endian("HOST", d)}
+ install_templates
+
+ install -d ${D}${datadir}/post-relocate-setup.d
+ install -m 0755 ${WORKDIR}/meson-setup.py ${D}${datadir}/post-relocate-setup.d/
+
+ # We need to wrap the real meson with a thin wrapper that substitues native/cross files
+ # when running in a direct SDK environment.
+ mv ${D}${bindir}/meson ${D}${bindir}/meson.real
+ install -m 0755 ${WORKDIR}/meson-wrapper ${D}${bindir}/meson
+}
diff --git a/meta/recipes-devtools/mmc/mmc-utils_git.bb b/meta/recipes-devtools/mmc/mmc-utils_git.bb
index 3e611d1306..a7e4d369ff 100644
--- a/meta/recipes-devtools/mmc/mmc-utils_git.bb
+++ b/meta/recipes-devtools/mmc/mmc-utils_git.bb
@@ -5,11 +5,11 @@ LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://mmc.c;beginline=1;endline=20;md5=fae32792e20f4d27ade1c5a762d16b7d"
SRCBRANCH ?= "master"
-SRCREV = "b7e4d5a6ae9942d26a11de9b05ae7d52c0802802"
+SRCREV = "b5ca140312d279ad2f22068fd72a6230eea13436"
-PV = "0.1+git${SRCPV}"
+PV = "0.1+git"
-SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branch=${SRCBRANCH}"
+SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branch=${SRCBRANCH};protocol=https"
UPSTREAM_CHECK_COMMITS = "1"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch b/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch
deleted file mode 100644
index 5d874d9810..0000000000
--- a/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch
+++ /dev/null
@@ -1,105 +0,0 @@
-Upstream-Status: Pending
-
-Index: git/jffsX-utils/mkfs.jffs2.c
-===================================================================
---- git.orig/jffsX-utils/mkfs.jffs2.c
-+++ git/jffsX-utils/mkfs.jffs2.c
-@@ -100,6 +100,11 @@ struct filesystem_entry {
- struct rb_node hardlink_rb;
- };
-
-+struct ignorepath_entry {
-+ struct ignorepath_entry* next; /* Points to the next ignorepath element */
-+ char name[PATH_MAX]; /* Name of the entry */
-+};
-+static struct ignorepath_entry* ignorepath = 0;
- struct rb_root hardlinks;
- static int out_fd = -1;
- static int in_fd = -1;
-@@ -309,7 +314,7 @@ static struct filesystem_entry *recursiv
- char *hpath, *tpath;
- struct dirent *dp, **namelist;
- struct filesystem_entry *entry;
--
-+ struct ignorepath_entry* element = ignorepath;
-
- if (lstat(hostpath, &sb)) {
- sys_errmsg_die("%s", hostpath);
-@@ -318,6 +323,15 @@ static struct filesystem_entry *recursiv
- entry = add_host_filesystem_entry(targetpath, hostpath,
- sb.st_uid, sb.st_gid, sb.st_mode, 0, parent);
-
-+ while ( element ) {
-+ if ( strcmp( element->name, targetpath ) == 0 ) {
-+ printf( "Note: ignoring directories below '%s'\n", targetpath );
-+ return entry;
-+ break;
-+ }
-+ element = element->next;
-+ }
-+
- n = scandir(hostpath, &namelist, 0, alphasort);
- if (n < 0) {
- sys_errmsg_die("opening directory %s", hostpath);
-@@ -1359,6 +1373,7 @@ static struct option long_options[] = {
- {"root", 1, NULL, 'r'},
- {"pagesize", 1, NULL, 's'},
- {"eraseblock", 1, NULL, 'e'},
-+ {"ignore", 1, NULL, 'I'},
- {"output", 1, NULL, 'o'},
- {"help", 0, NULL, 'h'},
- {"verbose", 0, NULL, 'v'},
-@@ -1409,6 +1424,7 @@ static const char helptext[] =
- " -L, --list-compressors Show the list of the available compressors\n"
- " -t, --test-compression Call decompress and compare with the original (for test)\n"
- " -n, --no-cleanmarkers Don't add a cleanmarker to every eraseblock\n"
-+" -I, --ignore=PATH Ignore sub directory and file tree below PATH when recursing over the file system\n"
- " -o, --output=FILE Output to FILE (default: stdout)\n"
- " -l, --little-endian Create a little-endian filesystem\n"
- " -b, --big-endian Create a big-endian filesystem\n"
-@@ -1566,6 +1582,7 @@ int main(int argc, char **argv)
- char *compr_name = NULL;
- int compr_prior = -1;
- int warn_page_size = 0;
-+ struct ignorepath_entry* element = ignorepath;
-
- page_size = sysconf(_SC_PAGESIZE);
- if (page_size < 0) /* System doesn't know so ... */
-@@ -1576,7 +1593,7 @@ int main(int argc, char **argv)
- jffs2_compressors_init();
-
- while ((opt = getopt_long(argc, argv,
-- "D:d:r:s:o:qUPfh?vVe:lbp::nc:m:x:X:Lty:i:", long_options, &c)) >= 0)
-+ "D:d:r:s:I:o:qUPfh?vVe:lbp::nc:m:x:X:Lty:i:", long_options, &c)) >= 0)
- {
- switch (opt) {
- case 'D':
-@@ -1600,6 +1617,28 @@ int main(int argc, char **argv)
- warn_page_size = 0; /* set by user, so don't need to warn */
- break;
-
-+ case 'I':
-+ printf( "Note: Adding '%s' to ignore Path\n", optarg );
-+ element = ignorepath;
-+ if ( !ignorepath ) {
-+ ignorepath = xmalloc( sizeof( struct ignorepath_entry ) );
-+ ignorepath->next = 0;
-+ strcpy( &ignorepath->name[0], optarg );
-+ } else {
-+ while ( element->next ) element = element->next;
-+ element->next = xmalloc( sizeof( struct ignorepath_entry ) );
-+ element->next->next = 0;
-+ strcpy( &element->next->name[0], optarg );
-+ }
-+ printf( "--------- Dumping ignore path list ----------------\n" );
-+ element = ignorepath;
-+ while ( element ) {
-+ printf( " * '%s'\n", &element->name[0] );
-+ element = element->next;
-+ }
-+ printf( "---------------------------------------------------\n" );
-+ break;
-+
- case 'o':
- if (out_fd != -1) {
- errmsg_die("output filename specified more than once");
diff --git a/meta/recipes-devtools/mtd/mtd-utils_git.bb b/meta/recipes-devtools/mtd/mtd-utils_git.bb
index 3318277477..a40d79c864 100644
--- a/meta/recipes-devtools/mtd/mtd-utils_git.bb
+++ b/meta/recipes-devtools/mtd/mtd-utils_git.bb
@@ -11,12 +11,10 @@ inherit autotools pkgconfig update-alternatives
DEPENDS = "zlib e2fsprogs util-linux"
RDEPENDS:mtd-utils-tests += "bash"
-PV = "2.1.4"
+PV = "2.2.0"
-SRCREV = "c7f1bfa44a84d02061787e2f6093df5cc40b9f5c"
-SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master \
- file://add-exclusion-to-mkfs-jffs2-git-2.patch \
- "
+SRCREV = "31e990c56aba7584cde310685d663bb122f16003"
+SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"
S = "${WORKDIR}/git"
@@ -57,6 +55,14 @@ ALTERNATIVE_LINK_NAME[flashcp] = "${sbindir}/flashcp"
do_install () {
oe_runmake install DESTDIR=${D} SBINDIR=${sbindir} MANDIR=${mandir} INCLUDEDIR=${includedir}
+ install -d ${D}${includedir}/mtd
+ install -d ${D}${libdir}
+ install -m 0644 ${S}/include/libubi.h ${D}${includedir}
+ install -m 0644 ${S}/include/libmtd.h ${D}${includedir}
+ install -m 0644 ${S}/include/libscan.h ${D}${includedir}
+ install -m 0644 ${S}/include/libubigen.h ${D}${includedir}
+ oe_libinstall -a libubi ${D}${libdir}/
+ oe_libinstall -a libmtd ${D}${libdir}/
}
PACKAGES =+ "mtd-utils-misc mtd-utils-tests"
diff --git a/meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch b/meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch
index 25ee2b74b5..57be935487 100644
--- a/meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch
+++ b/meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch
@@ -1,4 +1,4 @@
-From 142f40b80ea908831ef7a4d694cec6a5985a0724 Mon Sep 17 00:00:00 2001
+From 2ef9b371a5cc44e730143e694d71665831fac216 Mon Sep 17 00:00:00 2001
From: Ed Bartosh <ed.bartosh@linux.intel.com>
Date: Tue, 13 Jun 2017 14:55:52 +0300
Subject: [PATCH] Disabled reading host configs.
@@ -12,10 +12,10 @@ Signed-off-by: Ed Bartosh <ed.bartosh@linux.intel.com>
1 file changed, 8 deletions(-)
diff --git a/config.c b/config.c
-index 173eae0..0654cc1 100644
+index 2f6a297..3181ed7 100644
--- a/config.c
+++ b/config.c
-@@ -836,14 +836,6 @@ void read_config(void)
+@@ -844,14 +844,6 @@ void read_config(void)
memcpy(devices, const_devices,
nr_const_devices*sizeof(struct device));
diff --git a/meta/recipes-devtools/mtools/mtools_4.0.39.bb b/meta/recipes-devtools/mtools/mtools_4.0.39.bb
deleted file mode 100644
index 564ef02aee..0000000000
--- a/meta/recipes-devtools/mtools/mtools_4.0.39.bb
+++ /dev/null
@@ -1,49 +0,0 @@
-SUMMARY = "Utilities to access MS-DOS disks without mounting them"
-DESCRIPTION = "Mtools is a collection of utilities to access MS-DOS disks from GNU and Unix without mounting them."
-HOMEPAGE = "http://www.gnu.org/software/mtools/"
-SECTION = "optional"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-DEPENDS += "virtual/libiconv"
-
-RDEPENDS:${PN}:libc-glibc = "glibc-gconv-ibm850"
-RRECOMMENDS:${PN}:libc-glibc = "\
- glibc-gconv-ibm437 \
- glibc-gconv-ibm737 \
- glibc-gconv-ibm775 \
- glibc-gconv-ibm851 \
- glibc-gconv-ibm852 \
- glibc-gconv-ibm855 \
- glibc-gconv-ibm857 \
- glibc-gconv-ibm860 \
- glibc-gconv-ibm861 \
- glibc-gconv-ibm862 \
- glibc-gconv-ibm863 \
- glibc-gconv-ibm865 \
- glibc-gconv-ibm866 \
- glibc-gconv-ibm869 \
- "
-SRC_URI[sha256sum] = "397f1e2b7b7a2a270eb7970fa363e445f956926ec51e8170c3869da85b0987bd"
-
-SRC_URI = "${GNU_MIRROR}/mtools/mtools-${PV}.tar.bz2 \
- file://mtools-makeinfo.patch \
- file://clang_UNUSED.patch \
- "
-
-SRC_URI:append:class-native = " file://disable-hardcoded-configs.patch"
-
-inherit autotools texinfo
-
-EXTRA_OECONF = "--without-x"
-
-BBCLASSEXTEND = "native nativesdk"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[libbsd] = "ac_cv_lib_bsd_main=yes,ac_cv_lib_bsd_main=no,libbsd"
-
-do_install:prepend () {
- # Create bindir to fix parallel installation issues
- mkdir -p ${D}/${bindir}
- mkdir -p ${D}/${datadir}
-}
diff --git a/meta/recipes-devtools/mtools/mtools_4.0.43.bb b/meta/recipes-devtools/mtools/mtools_4.0.43.bb
new file mode 100644
index 0000000000..859103979e
--- /dev/null
+++ b/meta/recipes-devtools/mtools/mtools_4.0.43.bb
@@ -0,0 +1,49 @@
+SUMMARY = "Utilities to access MS-DOS disks without mounting them"
+DESCRIPTION = "Mtools is a collection of utilities to access MS-DOS disks from GNU and Unix without mounting them."
+HOMEPAGE = "http://www.gnu.org/software/mtools/"
+SECTION = "optional"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+DEPENDS += "virtual/libiconv"
+
+RDEPENDS:${PN}:libc-glibc = "glibc-gconv-ibm850"
+RRECOMMENDS:${PN}:libc-glibc = "\
+ glibc-gconv-ibm437 \
+ glibc-gconv-ibm737 \
+ glibc-gconv-ibm775 \
+ glibc-gconv-ibm851 \
+ glibc-gconv-ibm852 \
+ glibc-gconv-ibm855 \
+ glibc-gconv-ibm857 \
+ glibc-gconv-ibm860 \
+ glibc-gconv-ibm861 \
+ glibc-gconv-ibm862 \
+ glibc-gconv-ibm863 \
+ glibc-gconv-ibm865 \
+ glibc-gconv-ibm866 \
+ glibc-gconv-ibm869 \
+ "
+SRC_URI[sha256sum] = "541e179665dc4e272b9602f2074243591a157da89cc47064da8c5829dbd2b339"
+
+SRC_URI = "${GNU_MIRROR}/mtools/mtools-${PV}.tar.bz2 \
+ file://mtools-makeinfo.patch \
+ file://clang_UNUSED.patch \
+ "
+
+SRC_URI:append:class-native = " file://disable-hardcoded-configs.patch"
+
+inherit autotools texinfo
+
+EXTRA_OECONF = "--without-x"
+
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[libbsd] = "ac_cv_lib_bsd_main=yes,ac_cv_lib_bsd_main=no,libbsd"
+
+do_install:prepend () {
+ # Create bindir to fix parallel installation issues
+ mkdir -p ${D}/${bindir}
+ mkdir -p ${D}/${datadir}
+}
diff --git a/meta/recipes-devtools/nasm/nasm/0001-stdlib-Add-strlcat.patch b/meta/recipes-devtools/nasm/nasm/0001-stdlib-Add-strlcat.patch
index 0ede8a8328..1b8e947c56 100644
--- a/meta/recipes-devtools/nasm/nasm/0001-stdlib-Add-strlcat.patch
+++ b/meta/recipes-devtools/nasm/nasm/0001-stdlib-Add-strlcat.patch
@@ -1,4 +1,4 @@
-From 1c5023002bad3a5b0bbc181fdb324160beace733 Mon Sep 17 00:00:00 2001
+From 680220e772dfa381829983fa73b915416f676894 Mon Sep 17 00:00:00 2001
From: Joshua Watt <JPEWhacker@gmail.com>
Date: Tue, 19 Nov 2019 12:47:30 -0600
Subject: [PATCH] stdlib: Add strlcat
@@ -17,23 +17,23 @@ Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
create mode 100644 stdlib/strlcat.c
diff --git a/Makefile.in b/Makefile.in
-index bfae1f8..156dc4c 100644
+index b85ebee..045fabe 100644
--- a/Makefile.in
+++ b/Makefile.in
-@@ -101,7 +101,7 @@ NASM = asm/nasm.$(O)
- NDISASM = disasm/ndisasm.$(O)
+@@ -104,7 +104,7 @@ PROGOBJ = $(NASM) $(NDISASM)
+ PROGS = nasm$(X) ndisasm$(X)
- LIBOBJ = stdlib/snprintf.$(O) stdlib/vsnprintf.$(O) stdlib/strlcpy.$(O) \
+ LIBOBJ_NW = stdlib/snprintf.$(O) stdlib/vsnprintf.$(O) stdlib/strlcpy.$(O) \
- stdlib/strnlen.$(O) stdlib/strrchrnul.$(O) \
+ stdlib/strnlen.$(O) stdlib/strrchrnul.$(O) stdlib/strlcat.$(O) \
\
nasmlib/ver.$(O) \
nasmlib/alloc.$(O) nasmlib/asprintf.$(O) nasmlib/errfile.$(O) \
diff --git a/configure.ac b/configure.ac
-index 7b72769..14fd033 100644
+index 42cd198..e206338 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -234,6 +234,7 @@ PA_FUNC_SNPRINTF
+@@ -236,6 +236,7 @@ PA_FUNC_SNPRINTF
PA_FUNC_VSNPRINTF
AC_CHECK_FUNCS([strlcpy])
AC_CHECK_FUNCS([strrchrnul])
@@ -41,7 +41,7 @@ index 7b72769..14fd033 100644
dnl These types are POSIX-specific, and Windows does it differently...
AC_CHECK_TYPES([struct _stati64])
-@@ -253,6 +254,7 @@ AC_CHECK_DECLS(strsep)
+@@ -255,6 +256,7 @@ AC_CHECK_DECLS(strsep)
AC_CHECK_DECLS(strlcpy)
AC_CHECK_DECLS(strnlen)
AC_CHECK_DECLS(strrchrnul)
@@ -50,7 +50,7 @@ index 7b72769..14fd033 100644
dnl Check for missing types
AC_TYPE_UINTPTR_T
diff --git a/include/compiler.h b/include/compiler.h
-index b4fd3a8..7fb4821 100644
+index 407c160..b64da6a 100644
--- a/include/compiler.h
+++ b/include/compiler.h
@@ -169,6 +169,10 @@ size_t strlcpy(char *, const char *, size_t);
diff --git a/meta/recipes-devtools/nasm/nasm/0002-Add-debug-prefix-map-option.patch b/meta/recipes-devtools/nasm/nasm/0002-Add-debug-prefix-map-option.patch
index 9f4c8dc0bd..84fcca0fe1 100644
--- a/meta/recipes-devtools/nasm/nasm/0002-Add-debug-prefix-map-option.patch
+++ b/meta/recipes-devtools/nasm/nasm/0002-Add-debug-prefix-map-option.patch
@@ -1,4 +1,4 @@
-From 81d6519499dcfebe7d21e65e002a8885a4e8d852 Mon Sep 17 00:00:00 2001
+From e28c8883050d34d18ee2d66dfeece51e13adb6d5 Mon Sep 17 00:00:00 2001
From: Joshua Watt <JPEWhacker@gmail.com>
Date: Tue, 19 Nov 2019 13:12:17 -0600
Subject: [PATCH] Add --debug-prefix-map option
@@ -17,17 +17,17 @@ Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
nasmlib/filename.c | 20 ++++++++++++++++++++
output/outas86.c | 4 +++-
output/outcoff.c | 4 ++--
- output/outelf.c | 2 +-
+ output/outelf.c | 13 ++++++++-----
output/outieee.c | 2 +-
output/outobj.c | 2 +-
stdlib/strlcat.c | 2 +-
test/elfdebugprefix.asm | 6 ++++++
test/performtest.pl | 12 ++++++++++--
- 12 files changed, 82 insertions(+), 9 deletions(-)
+ 12 files changed, 89 insertions(+), 13 deletions(-)
create mode 100644 test/elfdebugprefix.asm
diff --git a/asm/nasm.c b/asm/nasm.c
-index e5ae89a..7a7f8b4 100644
+index 76c70f6..08ff119 100644
--- a/asm/nasm.c
+++ b/asm/nasm.c
@@ -939,6 +939,7 @@ enum text_options {
@@ -46,7 +46,7 @@ index e5ae89a..7a7f8b4 100644
{"reproducible", OPT_REPRODUCIBLE, ARG_NO, 0},
{NULL, OPT_BOGUS, ARG_NO, 0}
};
-@@ -1337,6 +1339,26 @@ static bool process_arg(char *p, char *q, int pass)
+@@ -1335,6 +1337,26 @@ static bool process_arg(char *p, char *q, int pass)
case OPT_REPRODUCIBLE:
reproducible = true;
break;
@@ -73,7 +73,7 @@ index e5ae89a..7a7f8b4 100644
case OPT_HELP:
help(stdout);
exit(0);
-@@ -2304,6 +2326,8 @@ static void help(FILE *out)
+@@ -2298,6 +2320,8 @@ static void help(FILE *out)
" -w-x disable warning x (also -Wno-x)\n"
" -w[+-]error promote all warnings to errors (also -Werror)\n"
" -w[+-]error=x promote warning x to errors (also -Werror=x)\n"
@@ -83,7 +83,7 @@ index e5ae89a..7a7f8b4 100644
fprintf(out, " %-20s %s\n",
diff --git a/include/nasmlib.h b/include/nasmlib.h
-index 438178d..4c3e90d 100644
+index 87a7fc6..a3e5144 100644
--- a/include/nasmlib.h
+++ b/include/nasmlib.h
@@ -250,10 +250,19 @@ int64_t readstrnum(char *str, int length, bool *warn);
@@ -107,7 +107,7 @@ index 438178d..4c3e90d 100644
/*
* Utility macros...
diff --git a/nasm.txt b/nasm.txt
-index cc7fa27..d3485c9 100644
+index 950c361..784618c 100644
--- a/nasm.txt
+++ b/nasm.txt
@@ -147,6 +147,10 @@ OPTIONS
@@ -179,10 +179,10 @@ index 54b22f8..c4a412c 100644
static void as86_cleanup(void)
diff --git a/output/outcoff.c b/output/outcoff.c
-index 58fa024..14baf7b 100644
+index c2b4eb6..e242db2 100644
--- a/output/outcoff.c
+++ b/output/outcoff.c
-@@ -1072,14 +1072,14 @@ static void coff_symbol(char *name, int32_t strpos, int32_t value,
+@@ -1259,7 +1259,7 @@ static void coff_symbol(char *name, int32_t strpos, int32_t value,
static void coff_write_symbols(void)
{
@@ -191,29 +191,61 @@ index 58fa024..14baf7b 100644
uint32_t i;
/*
- * The `.file' record, and the file name auxiliary record.
- */
- coff_symbol(".file", 0L, 0L, -2, 0, 0x67, 1);
-- strncpy(filename, inname, 18);
-+ filename_debug_remap(filename, inname, 19);
+@@ -1269,7 +1269,7 @@ static void coff_write_symbols(void)
+ if (reproducible)
+ memset(filename, 0, 18);
+ else
+- strncpy(filename, inname, 18);
++ filename_debug_remap(filename, inname, 19);
nasm_write(filename, 18, ofile);
/*
diff --git a/output/outelf.c b/output/outelf.c
-index 61af020..1292958 100644
+index ad8d210..29f1dc1 100644
--- a/output/outelf.c
+++ b/output/outelf.c
-@@ -553,7 +553,7 @@ static void elf_init(void)
- };
+@@ -546,8 +546,8 @@ static void elf_init(void)
const char * const *p;
+ const char * cur_path = nasm_realpath(inname);
- strlcpy(elf_module, inname, sizeof(elf_module));
+- strlcpy(elf_dir, nasm_dirname(cur_path), sizeof(elf_dir));
+ filename_debug_remap(elf_module, inname, sizeof(elf_module));
++ filename_debug_remap(elf_dir, nasm_dirname(cur_path), sizeof(elf_dir));
sects = NULL;
nsects = sectlen = 0;
syms = saa_init((int32_t)sizeof(struct elf_symbol));
+@@ -3590,13 +3590,17 @@ static void dwarf_findfile(const char * fname)
+ if (dwarf_clist && !(strcmp(fname, dwarf_clist->filename)))
+ return;
+
++ char * fname_remapped = nasm_malloc(FILENAME_MAX);
++ filename_debug_remap(fname_remapped,fname,FILENAME_MAX);
++
+ /* search for match */
+ match = 0;
+ if (dwarf_flist) {
+ match = dwarf_flist;
+ for (finx = 0; finx < dwarf_numfiles; finx++) {
+- if (!(strcmp(fname, match->filename))) {
++ if (!(strcmp(fname_remapped, match->filename))) {
+ dwarf_clist = match;
++ nasm_free(fname_remapped);
+ return;
+ }
+ match = match->next;
+@@ -3607,8 +3611,7 @@ static void dwarf_findfile(const char * fname)
+ dwarf_clist = nasm_malloc(sizeof(struct linelist));
+ dwarf_numfiles++;
+ dwarf_clist->line = dwarf_numfiles;
+- dwarf_clist->filename = nasm_malloc(strlen(fname) + 1);
+- strcpy(dwarf_clist->filename,fname);
++ dwarf_clist->filename = fname_remapped;
+ dwarf_clist->next = 0;
+ if (!dwarf_flist) { /* if first entry */
+ dwarf_flist = dwarf_elist = dwarf_clist;
diff --git a/output/outieee.c b/output/outieee.c
-index 6d6d4b2..cdb8333 100644
+index 7ba9036..796e5af 100644
--- a/output/outieee.c
+++ b/output/outieee.c
@@ -207,7 +207,7 @@ static void ieee_unqualified_name(char *, char *);
@@ -226,7 +258,7 @@ index 6d6d4b2..cdb8333 100644
fpubhead = NULL;
fpubtail = &fpubhead;
diff --git a/output/outobj.c b/output/outobj.c
-index 56b43f9..fefea94 100644
+index 281839d..fc336c1 100644
--- a/output/outobj.c
+++ b/output/outobj.c
@@ -644,7 +644,7 @@ static enum directive_result obj_directive(enum directive, char *);
@@ -264,7 +296,7 @@ index 0000000..a67ba29
+ ret
+
diff --git a/test/performtest.pl b/test/performtest.pl
-index f7865b3..096f960 100755
+index 46b1bdf..2426848 100755
--- a/test/performtest.pl
+++ b/test/performtest.pl
@@ -42,14 +42,22 @@ sub perform {
diff --git a/meta/recipes-devtools/nasm/nasm_2.15.05.bb b/meta/recipes-devtools/nasm/nasm_2.15.05.bb
deleted file mode 100644
index edc17aeebf..0000000000
--- a/meta/recipes-devtools/nasm/nasm_2.15.05.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "General-purpose x86 assembler"
-SECTION = "devel"
-HOMEPAGE = "http://www.nasm.us/"
-DESCRIPTION = "The Netwide Assembler (NASM) is an assembler and disassembler for the Intel x86 architecture."
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=90904486f8fbf1861cf42752e1a39efe"
-
-SRC_URI = "http://www.nasm.us/pub/nasm/releasebuilds/${PV}/nasm-${PV}.tar.bz2 \
- file://0001-stdlib-Add-strlcat.patch \
- file://0002-Add-debug-prefix-map-option.patch \
- "
-
-SRC_URI[sha256sum] = "3c4b8339e5ab54b1bcb2316101f8985a5da50a3f9e504d43fa6f35668bee2fd0"
-
-EXTRA_AUTORECONF:append = " -I autoconf/m4"
-
-inherit autotools-brokensep
-
-BBCLASSEXTEND = "native"
-
-DEPENDS = "groff-native"
-
-CVE_PRODUCT = "netwide_assembler"
diff --git a/meta/recipes-devtools/nasm/nasm_2.16.01.bb b/meta/recipes-devtools/nasm/nasm_2.16.01.bb
new file mode 100644
index 0000000000..219cc49360
--- /dev/null
+++ b/meta/recipes-devtools/nasm/nasm_2.16.01.bb
@@ -0,0 +1,23 @@
+SUMMARY = "General-purpose x86 assembler"
+SECTION = "devel"
+HOMEPAGE = "http://www.nasm.us/"
+DESCRIPTION = "The Netwide Assembler (NASM) is an assembler and disassembler for the Intel x86 architecture."
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=90904486f8fbf1861cf42752e1a39efe"
+
+SRC_URI = "http://www.nasm.us/pub/nasm/releasebuilds/${PV}/nasm-${PV}.tar.bz2 \
+ file://0001-stdlib-Add-strlcat.patch \
+ file://0002-Add-debug-prefix-map-option.patch \
+ "
+
+SRC_URI[sha256sum] = "35b6ad2ee048d41c4779f073f3efca7762a822b7d2d4ef4e8df24cf65747bb2e"
+
+EXTRA_AUTORECONF:append = " -I autoconf/m4"
+
+inherit autotools-brokensep
+
+BBCLASSEXTEND = "native"
+
+DEPENDS = "groff-native"
+
+CVE_PRODUCT = "netwide_assembler"
diff --git a/meta/recipes-devtools/ninja/ninja_1.10.2.bb b/meta/recipes-devtools/ninja/ninja_1.10.2.bb
deleted file mode 100644
index 7270321d6e..0000000000
--- a/meta/recipes-devtools/ninja/ninja_1.10.2.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "Ninja is a small build system with a focus on speed."
-HOMEPAGE = "https://ninja-build.org/"
-DESCRIPTION = "Ninja is a small build system with a focus on speed. It differs from other build systems in two major respects: it is designed to have its input files generated by a higher-level build system, and it is designed to run builds as fast as possible."
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a81586a64ad4e476c791cda7e2f2c52e"
-
-DEPENDS = "re2c-native ninja-native"
-
-SRCREV = "e72d1d581c945c158ed68d9bc48911063022a2c6"
-
-SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
-
-S = "${WORKDIR}/git"
-
-do_configure[noexec] = "1"
-
-do_compile:class-native() {
- python3 ./configure.py --bootstrap
-}
-
-do_compile() {
- python3 ./configure.py
- ninja
-}
-
-do_install() {
- install -D -m 0755 ${S}/ninja ${D}${bindir}/ninja
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/ninja/ninja_1.11.1.bb b/meta/recipes-devtools/ninja/ninja_1.11.1.bb
new file mode 100644
index 0000000000..8e297ec4d4
--- /dev/null
+++ b/meta/recipes-devtools/ninja/ninja_1.11.1.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Ninja is a small build system with a focus on speed."
+HOMEPAGE = "https://ninja-build.org/"
+DESCRIPTION = "Ninja is a small build system with a focus on speed. It differs from other build systems in two major respects: it is designed to have its input files generated by a higher-level build system, and it is designed to run builds as fast as possible."
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a81586a64ad4e476c791cda7e2f2c52e"
+
+DEPENDS = "re2c-native ninja-native"
+
+SRCREV = "a524bf3f6bacd1b4ad85d719eed2737d8562f27a"
+
+SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
+
+S = "${WORKDIR}/git"
+
+do_configure[noexec] = "1"
+
+do_compile:class-native() {
+ python3 ./configure.py --bootstrap
+}
+
+do_compile() {
+ python3 ./configure.py
+ ninja
+}
+
+do_install() {
+ install -D -m 0755 ${S}/ninja ${D}${bindir}/ninja
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+CVE_STATUS[CVE-2021-4336] = "cpe-incorrect: This is a different Ninja"
diff --git a/meta/recipes-devtools/opkg-utils/opkg-utils/0001-opkg-build-remove-numeric-owner-parameter-overzealou.patch b/meta/recipes-devtools/opkg-utils/opkg-utils/0001-opkg-build-remove-numeric-owner-parameter-overzealou.patch
new file mode 100644
index 0000000000..0cdeb9574a
--- /dev/null
+++ b/meta/recipes-devtools/opkg-utils/opkg-utils/0001-opkg-build-remove-numeric-owner-parameter-overzealou.patch
@@ -0,0 +1,34 @@
+From 19c2c6a14c8760c3595f5dc6cc89cde85fbdbac1 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Piotr=20=C5=81obacz?= <p.lobacz@welotec.com>
+Date: Tue, 9 Jan 2024 09:20:55 +0100
+Subject: [PATCH] opkg-build: remove `--numeric-owner` parameter overzealously
+ added
+
+In some corner cases this parameter has been added overzealously.
+All discussion on this topic can be read in here:
+https://bugzilla.yoctoproject.org/show_bug.cgi?id=15334
+
+Upstream-Status: Backport [https://git.yoctoproject.org/opkg-utils/commit/?id=68a969f0e867ace0d94faf8ebe7c7bb67f59d386]
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ opkg-build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/opkg-build b/opkg-build
+index 6edd938..0582958 100755
+--- a/opkg-build
++++ b/opkg-build
+@@ -337,7 +337,7 @@ export LANG=C
+ export LC_ALL=C
+ ( cd $pkg_dir/$CONTROL && find . -type f | sort > $tmp_dir/control_list )
+ ( cd $pkg_dir && find . -path ./$CONTROL -prune -o -path . -o -print | sort > $tmp_dir/file_list )
+-( cd $pkg_dir && tar $attributesargs $ogargs $tsortargs --numeric-owner --no-recursion $mtime_args -c $tarformat -T $tmp_dir/file_list | $compressor $compressorargs > $tmp_dir/data.tar.$cext )
++( cd $pkg_dir && tar $attributesargs $ogargs $tsortargs --no-recursion $mtime_args -c $tarformat -T $tmp_dir/file_list | $compressor $compressorargs > $tmp_dir/data.tar.$cext )
+ ( cd $pkg_dir/$CONTROL && tar $ogargs $tsortargs --no-recursion $mtime_args -c $tarformat -T $tmp_dir/control_list | gzip $zipargs > $tmp_dir/control.tar.gz )
+ rm $tmp_dir/file_list
+ rm $tmp_dir/control_list
+--
+2.43.0
+
diff --git a/meta/recipes-devtools/opkg-utils/opkg-utils_0.5.0.bb b/meta/recipes-devtools/opkg-utils/opkg-utils_0.5.0.bb
deleted file mode 100644
index e72c171b92..0000000000
--- a/meta/recipes-devtools/opkg-utils/opkg-utils_0.5.0.bb
+++ /dev/null
@@ -1,65 +0,0 @@
-SUMMARY = "Additional utilities for the opkg package manager"
-SUMMARY:update-alternatives-opkg = "Utility for managing the alternatives system"
-SECTION = "base"
-HOMEPAGE = "http://git.yoctoproject.org/cgit/cgit.cgi/opkg-utils"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://opkg.py;beginline=2;endline=18;md5=ffa11ff3c15eb31c6a7ceaa00cc9f986"
-PROVIDES += "${@bb.utils.contains('PACKAGECONFIG', 'update-alternatives', 'virtual/update-alternatives', '', d)}"
-
-SRC_URI = "http://git.yoctoproject.org/cgit/cgit.cgi/${BPN}/snapshot/${BPN}-${PV}.tar.gz \
- file://0001-update-alternatives-correctly-match-priority.patch \
- "
-UPSTREAM_CHECK_URI = "http://git.yoctoproject.org/cgit/cgit.cgi/opkg-utils/refs/"
-
-SRC_URI[sha256sum] = "55733c0f8ffde2bb4f9593cfd66a1f68e6a2f814e8e62f6fd78472911c818c32"
-
-TARGET_CC_ARCH += "${LDFLAGS}"
-
-RDEPENDS:${PN} += "bash"
-
-inherit perlnative
-
-# For native builds we use the host Python
-PYTHONRDEPS = "python3 python3-shell python3-io python3-math python3-crypt python3-logging python3-fcntl python3-pickle python3-compression python3-stringold"
-PYTHONRDEPS:class-native = ""
-
-PACKAGECONFIG = "python update-alternatives"
-PACKAGECONFIG[python] = ",,,${PYTHONRDEPS}"
-PACKAGECONFIG[update-alternatives] = ",,,"
-
-do_install() {
- oe_runmake PREFIX=${prefix} DESTDIR=${D} install
- if ! ${@bb.utils.contains('PACKAGECONFIG', 'update-alternatives', 'true', 'false', d)}; then
- rm -f "${D}${bindir}/update-alternatives"
- fi
-}
-
-do_install:append:class-target() {
- if ! ${@bb.utils.contains('PACKAGECONFIG', 'python', 'true', 'false', d)}; then
- grep -lZ "/usr/bin/env.*python" ${D}${bindir}/* | xargs -0 rm
- fi
-
- if [ -e "${D}${bindir}/update-alternatives" ]; then
- sed -i ${D}${bindir}/update-alternatives -e 's,/usr/bin,${bindir},g; s,/usr/lib,${nonarch_libdir},g'
- fi
-}
-
-# These are empty and will pull python3-dev into images where it wouldn't
-# have been otherwise, so don't generate them.
-PACKAGES:remove = "${PN}-dev ${PN}-staticdev"
-
-PACKAGES =+ "update-alternatives-opkg"
-FILES:update-alternatives-opkg = "${bindir}/update-alternatives"
-RPROVIDES:update-alternatives-opkg = "update-alternatives update-alternatives-cworth"
-RREPLACES:update-alternatives-opkg = "update-alternatives-cworth"
-RCONFLICTS:update-alternatives-opkg = "update-alternatives-cworth"
-
-pkg_postrm:update-alternatives-opkg() {
- rm -rf $D${nonarch_libdir}/opkg/alternatives
- rmdir $D${nonarch_libdir}/opkg || true
-}
-
-BBCLASSEXTEND = "native nativesdk"
-
-CLEANBROKEN = "1"
diff --git a/meta/recipes-devtools/opkg-utils/opkg-utils_0.6.3.bb b/meta/recipes-devtools/opkg-utils/opkg-utils_0.6.3.bb
new file mode 100644
index 0000000000..e138bc24af
--- /dev/null
+++ b/meta/recipes-devtools/opkg-utils/opkg-utils_0.6.3.bb
@@ -0,0 +1,66 @@
+SUMMARY = "Additional utilities for the opkg package manager"
+SUMMARY:update-alternatives-opkg = "Utility for managing the alternatives system"
+SECTION = "base"
+HOMEPAGE = "http://git.yoctoproject.org/cgit/cgit.cgi/opkg-utils"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://opkg.py;beginline=2;endline=18;md5=ffa11ff3c15eb31c6a7ceaa00cc9f986"
+PROVIDES += "${@bb.utils.contains('PACKAGECONFIG', 'update-alternatives', 'virtual/update-alternatives', '', d)}"
+
+SRC_URI = "git://git.yoctoproject.org/opkg-utils;protocol=https;branch=master \
+ file://0001-update-alternatives-correctly-match-priority.patch \
+ file://0001-opkg-build-remove-numeric-owner-parameter-overzealou.patch \
+ "
+SRCREV = "589880d01969eb9af1e66120e731d43193504718"
+
+S = "${WORKDIR}/git"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
+RDEPENDS:${PN} += "bash"
+
+inherit perlnative
+
+# For native builds we use the host Python
+PYTHONRDEPS = "python3 python3-shell python3-io python3-math python3-crypt python3-logging python3-fcntl python3-pickle python3-compression python3-stringold"
+PYTHONRDEPS:class-native = ""
+
+PACKAGECONFIG = "python update-alternatives"
+PACKAGECONFIG[python] = ",,,${PYTHONRDEPS}"
+PACKAGECONFIG[update-alternatives] = ",,,"
+
+do_install() {
+ oe_runmake PREFIX=${prefix} DESTDIR=${D} install
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'update-alternatives', 'true', 'false', d)}; then
+ rm -f "${D}${bindir}/update-alternatives"
+ fi
+}
+
+do_install:append:class-target() {
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'python', 'true', 'false', d)}; then
+ grep -lZ "/usr/bin/env.*python" ${D}${bindir}/* | xargs -0 rm
+ fi
+
+ if [ -e "${D}${bindir}/update-alternatives" ]; then
+ sed -i ${D}${bindir}/update-alternatives -e 's,/usr/bin,${bindir},g; s,/usr/lib,${nonarch_libdir},g'
+ fi
+}
+
+# These are empty and will pull python3-dev into images where it wouldn't
+# have been otherwise, so don't generate them.
+PACKAGES:remove = "${PN}-dev ${PN}-staticdev"
+
+PACKAGES =+ "update-alternatives-opkg"
+FILES:update-alternatives-opkg = "${bindir}/update-alternatives"
+RPROVIDES:update-alternatives-opkg = "update-alternatives update-alternatives-cworth"
+RREPLACES:update-alternatives-opkg = "update-alternatives-cworth"
+RCONFLICTS:update-alternatives-opkg = "update-alternatives-cworth"
+
+pkg_postrm:update-alternatives-opkg() {
+ rm -rf $D${nonarch_libdir}/opkg/alternatives
+ rmdir $D${nonarch_libdir}/opkg || true
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+CLEANBROKEN = "1"
diff --git a/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb b/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb
index dc17de2e8e..726a259a8c 100644
--- a/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb
+++ b/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb
@@ -1,8 +1,7 @@
SUMMARY = "Architecture-dependent configuration for opkg"
-HOMEPAGE = "http://code.google.com/p/opkg/"
+HOMEPAGE = "https://git.yoctoproject.org/opkg/"
LICENSE = "MIT"
PACKAGE_ARCH = "${MACHINE_ARCH}"
-PR = "r1"
S = "${WORKDIR}"
diff --git a/meta/recipes-devtools/opkg/opkg/0001-libopkg-Use-libgen.h-to-provide-basename-API.patch b/meta/recipes-devtools/opkg/opkg/0001-libopkg-Use-libgen.h-to-provide-basename-API.patch
new file mode 100644
index 0000000000..61581e752b
--- /dev/null
+++ b/meta/recipes-devtools/opkg/opkg/0001-libopkg-Use-libgen.h-to-provide-basename-API.patch
@@ -0,0 +1,62 @@
+From 7a1c13a48cf020c40dda1721d5c2ffd95e8e669a Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 10 Dec 2023 19:39:29 -0800
+Subject: [PATCH v2] libopkg: Use libgen.h to provide basename API
+
+Also ensure that copy of filename is passed into archive_entry_set_pathname
+so it can be operated upon by posix basename which expect non-const
+character pointer as input.
+
+This became evident with latest musl where basename declaration was
+dropped from string.h [1]
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Backport [74fc3a991f974095644897d18d43846b5f359dae]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+v2: Do not override basename wholesale
+
+ libopkg/opkg_archive.c | 6 ++++--
+ libopkg/opkg_remove.c | 1 +
+ 2 files changed, 5 insertions(+), 2 deletions(-)
+
+diff --git a/libopkg/opkg_archive.c b/libopkg/opkg_archive.c
+index 03a4afb..b099f5b 100644
+--- a/libopkg/opkg_archive.c
++++ b/libopkg/opkg_archive.c
+@@ -20,6 +20,7 @@
+
+ #include <archive.h>
+ #include <archive_entry.h>
++#include <libgen.h>
+ #include <stdlib.h>
+ #include <stdio.h>
+ #include <string.h>
+@@ -797,8 +798,9 @@ int gz_write_archive(const char *filename, const char *gz_filename)
+ }
+
+ /* Remove path hierarchy, as we are only compressing a single file */
+- archive_entry_set_pathname(entry, basename(filename));
+-
++ char* tmp = xstrdup(filename);
++ archive_entry_set_pathname(entry, basename(tmp));
++ free(tmp);
+ r = archive_write_header(a, entry);
+ if (r != ARCHIVE_OK) {
+ opkg_msg(ERROR, "Failed to create compressed file: '%s' : %s (errno=%d)",
+diff --git a/libopkg/opkg_remove.c b/libopkg/opkg_remove.c
+index 889c672..5254388 100644
+--- a/libopkg/opkg_remove.c
++++ b/libopkg/opkg_remove.c
+@@ -20,6 +20,7 @@
+
+ #include "config.h"
+
++#include <libgen.h>
+ #include <stdio.h>
+ #include <glob.h>
+ #include <unistd.h>
+--
+2.43.0
+
diff --git a/meta/recipes-devtools/opkg/opkg_0.5.0.bb b/meta/recipes-devtools/opkg/opkg_0.5.0.bb
deleted file mode 100644
index e91d7250bc..0000000000
--- a/meta/recipes-devtools/opkg/opkg_0.5.0.bb
+++ /dev/null
@@ -1,72 +0,0 @@
-SUMMARY = "Open Package Manager"
-SUMMARY:libopkg = "Open Package Manager library"
-SECTION = "base"
-HOMEPAGE = "http://code.google.com/p/opkg/"
-DESCRIPTION = "Opkg is a lightweight package management system based on Ipkg."
-BUGTRACKER = "http://code.google.com/p/opkg/issues/list"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://src/opkg.c;beginline=4;endline=18;md5=d6200b0f2b41dee278aa5fad333eecae"
-
-DEPENDS = "libarchive"
-
-PE = "1"
-
-SRC_URI = "http://downloads.yoctoproject.org/releases/${BPN}/${BPN}-${PV}.tar.gz \
- file://opkg.conf \
- file://0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch \
- file://run-ptest \
-"
-
-SRC_URI[sha256sum] = "559c3e1b893abaa1dd473ce3a9a5f7dd3f60ceb6cd14caaef76ddf0f7721ad1c"
-
-# This needs to be before ptest inherit, otherwise all ptest files end packaged
-# in libopkg package if OPKGLIBDIR == libdir, because default
-# PTEST_PATH ?= "${libdir}/${BPN}/ptest"
-PACKAGES =+ "libopkg"
-
-inherit autotools pkgconfig ptest
-
-target_localstatedir := "${localstatedir}"
-OPKGLIBDIR ??= "${target_localstatedir}/lib"
-
-PACKAGECONFIG ??= "libsolv"
-
-PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,\
- gnupg gpgme libgpg-error,\
- ${@ "gnupg" if ("native" in d.getVar("PN")) else "gnupg-gpg"}\
- "
-PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl"
-PACKAGECONFIG[ssl-curl] = "--enable-ssl-curl,--disable-ssl-curl,curl openssl"
-PACKAGECONFIG[sha256] = "--enable-sha256,--disable-sha256"
-PACKAGECONFIG[libsolv] = "--with-libsolv,--without-libsolv,libsolv"
-
-EXTRA_OECONF:class-native = "--localstatedir=/${@os.path.relpath('${localstatedir}', '${STAGING_DIR_NATIVE}')} --sysconfdir=/${@os.path.relpath('${sysconfdir}', '${STAGING_DIR_NATIVE}')}"
-
-do_install:append () {
- install -d ${D}${sysconfdir}/opkg
- install -m 0644 ${WORKDIR}/opkg.conf ${D}${sysconfdir}/opkg/opkg.conf
- echo "option lists_dir ${OPKGLIBDIR}/opkg/lists" >>${D}${sysconfdir}/opkg/opkg.conf
-
- # We need to create the lock directory
- install -d ${D}${OPKGLIBDIR}/opkg
-}
-
-do_install_ptest () {
- sed -i -e '/@echo $^/d' ${D}${PTEST_PATH}/tests/Makefile
- sed -i -e '/@PYTHONPATH=. $(PYTHON) $^/a\\t@if [ "$$?" != "0" ];then echo "FAIL:"$^;else echo "PASS:"$^;fi' ${D}${PTEST_PATH}/tests/Makefile
-}
-
-RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_update-alternatives} opkg-arch-config libarchive"
-RDEPENDS:${PN}:class-native = ""
-RDEPENDS:${PN}:class-nativesdk = ""
-RDEPENDS:${PN}-ptest += "make binutils python3-core python3-compression"
-RREPLACES:${PN} = "opkg-nogpg opkg-collateral"
-RCONFLICTS:${PN} = "opkg-collateral"
-RPROVIDES:${PN} = "opkg-collateral"
-
-FILES:libopkg = "${libdir}/*.so.* ${OPKGLIBDIR}/opkg/"
-
-BBCLASSEXTEND = "native nativesdk"
-
-CONFFILES:${PN} = "${sysconfdir}/opkg/opkg.conf"
diff --git a/meta/recipes-devtools/opkg/opkg_0.6.3.bb b/meta/recipes-devtools/opkg/opkg_0.6.3.bb
new file mode 100644
index 0000000000..9592ffc5d6
--- /dev/null
+++ b/meta/recipes-devtools/opkg/opkg_0.6.3.bb
@@ -0,0 +1,85 @@
+SUMMARY = "Open Package Manager"
+SUMMARY:libopkg = "Open Package Manager library"
+SECTION = "base"
+HOMEPAGE = "https://git.yoctoproject.org/opkg/"
+DESCRIPTION = "Opkg is a lightweight package management system based on Ipkg."
+BUGTRACKER = "https://bugzilla.yoctoproject.org/buglist.cgi?quicksearch=Product%3Aopkg"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://src/opkg.c;beginline=4;endline=18;md5=d6200b0f2b41dee278aa5fad333eecae"
+
+DEPENDS = "libarchive zstd"
+
+PE = "1"
+
+SRC_URI = "http://downloads.yoctoproject.org/releases/${BPN}/${BPN}-${PV}.tar.gz \
+ file://opkg.conf \
+ file://0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch \
+ file://0001-libopkg-Use-libgen.h-to-provide-basename-API.patch \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "f3938e359646b406c40d5d442a1467c7e72357f91ab822e442697529641e06de"
+
+# This needs to be before ptest inherit, otherwise all ptest files end packaged
+# in libopkg package if OPKGLIBDIR == libdir, because default
+# PTEST_PATH ?= "${libdir}/${BPN}/ptest"
+PACKAGES =+ "libopkg"
+
+inherit autotools pkgconfig ptest
+
+target_localstatedir := "${localstatedir}"
+OPKGLIBDIR ??= "${target_localstatedir}/lib"
+
+PACKAGECONFIG ??= "libsolv"
+
+PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,\
+ gnupg gpgme libgpg-error,\
+ ${@ "gnupg" if ("native" in d.getVar("PN")) else "gnupg-gpg"}\
+ "
+PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl"
+PACKAGECONFIG[ssl-curl] = "--enable-ssl-curl,--disable-ssl-curl,curl openssl"
+PACKAGECONFIG[sha256] = "--enable-sha256,--disable-sha256"
+PACKAGECONFIG[libsolv] = "--with-libsolv,--without-libsolv,libsolv"
+
+EXTRA_OECONF = "--enable-zstd"
+EXTRA_OECONF:append:class-native = " --localstatedir=/${@os.path.relpath('${localstatedir}', '${STAGING_DIR_NATIVE}')} --sysconfdir=/${@os.path.relpath('${sysconfdir}', '${STAGING_DIR_NATIVE}')}"
+
+do_install:append () {
+ install -d ${D}${sysconfdir}/opkg
+ install -m 0644 ${WORKDIR}/opkg.conf ${D}${sysconfdir}/opkg/opkg.conf
+ echo "option lists_dir ${OPKGLIBDIR}/opkg/lists" >>${D}${sysconfdir}/opkg/opkg.conf
+ echo "option info_dir ${OPKGLIBDIR}/opkg/info" >>${D}${sysconfdir}/opkg/opkg.conf
+ echo "option status_file ${OPKGLIBDIR}/opkg/status" >>${D}${sysconfdir}/opkg/opkg.conf
+
+ # We need to create the lock directory
+ install -d ${D}${OPKGLIBDIR}/opkg
+}
+
+do_install_ptest () {
+ sed -i -e '/@echo $^/d' ${D}${PTEST_PATH}/tests/Makefile
+ sed -i -e '/@PYTHONPATH=. $(PYTHON) $^/a\\t@if [ "$$?" != "0" ];then echo "FAIL:"$^;else echo "PASS:"$^;fi' ${D}${PTEST_PATH}/tests/Makefile
+}
+
+WARN_QA:append = " internal-solver-deprecation"
+QARECIPETEST[internal-solver-deprecation] = "qa_check_solver_deprecation"
+def qa_check_solver_deprecation (pn, d, messages):
+ pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
+
+ if "libsolv" not in pkgconfig:
+ oe.qa.handle_error("internal-solver-deprecation", "The opkg internal solver will be deprecated in future opkg releases. Consider enabling \"libsolv\" in PACKAGECONFIG.", d)
+
+
+RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_update-alternatives} opkg-arch-config libarchive"
+RDEPENDS:${PN}:class-native = ""
+RDEPENDS:${PN}:class-nativesdk = ""
+RDEPENDS:${PN}-ptest += "make binutils python3-core python3-compression bash python3-crypt python3-io"
+RREPLACES:${PN} = "opkg-nogpg opkg-collateral"
+RCONFLICTS:${PN} = "opkg-collateral"
+RPROVIDES:${PN} = "opkg-collateral"
+
+FILES:libopkg = "${libdir}/*.so.* ${OPKGLIBDIR}/opkg/"
+
+BBCLASSEXTEND = "native nativesdk"
+
+CONFFILES:${PN} = "${sysconfdir}/opkg/opkg.conf"
diff --git a/meta/recipes-devtools/orc/orc_0.4.32.bb b/meta/recipes-devtools/orc/orc_0.4.32.bb
deleted file mode 100644
index 829255f110..0000000000
--- a/meta/recipes-devtools/orc/orc_0.4.32.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Optimised Inner Loop Runtime Compiler"
-HOMEPAGE = "http://gstreamer.freedesktop.org/modules/orc.html"
-DESCRIPTION = "Optimised Inner Loop Runtime Compiler is a Library and set of tools for compiling and executing SIMD assembly language-like programs that operate on arrays of data."
-LICENSE = "BSD-2-Clause & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1400bd9d09e8af56b9ec982b3d85797e"
-
-SRC_URI = "http://gstreamer.freedesktop.org/src/orc/orc-${PV}.tar.xz"
-SRC_URI[sha256sum] = "a66e3d8f2b7e65178d786a01ef61f2a0a0b4d0b8370de7ce134ba73da4af18f0"
-
-inherit meson pkgconfig gtk-doc
-
-GTKDOC_MESON_OPTION = "gtk_doc"
-GTKDOC_MESON_ENABLE_FLAG = "enabled"
-GTKDOC_MESON_DISABLE_FLAG = "disabled"
-
-BBCLASSEXTEND = "native nativesdk"
-
-PACKAGES =+ "orc-examples"
-PACKAGES_DYNAMIC += "^liborc-.*"
-FILES:orc-examples = "${libdir}/orc/*"
-FILES:${PN} = "${bindir}/*"
-
-python populate_packages:prepend () {
- libdir = d.expand('${libdir}')
- do_split_packages(d, libdir, r'^lib(.*)\.so\.*', 'lib%s', 'ORC %s library', extra_depends='', allow_links=True)
-}
-
-do_compile:prepend:class-native () {
- sed -i -e 's#/tmp#.#g' ${S}/orc/orccodemem.c
-}
diff --git a/meta/recipes-devtools/orc/orc_0.4.38.bb b/meta/recipes-devtools/orc/orc_0.4.38.bb
new file mode 100644
index 0000000000..5d2296694a
--- /dev/null
+++ b/meta/recipes-devtools/orc/orc_0.4.38.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Optimised Inner Loop Runtime Compiler"
+HOMEPAGE = "http://gstreamer.freedesktop.org/modules/orc.html"
+DESCRIPTION = "Optimised Inner Loop Runtime Compiler is a Library and set of tools for compiling and executing SIMD assembly language-like programs that operate on arrays of data."
+LICENSE = "BSD-2-Clause & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1400bd9d09e8af56b9ec982b3d85797e"
+
+SRC_URI = "http://gstreamer.freedesktop.org/src/orc/orc-${PV}.tar.xz"
+SRC_URI[sha256sum] = "a55a98d4772567aa3faed8fb84d540c3db77eaba16d3e2e10b044fbc9228668d"
+
+inherit meson pkgconfig gtk-doc
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+GTKDOC_MESON_ENABLE_FLAG = "enabled"
+GTKDOC_MESON_DISABLE_FLAG = "disabled"
+
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGES =+ "orc-examples"
+PACKAGES_DYNAMIC += "^liborc-.*"
+FILES:orc-examples = "${libdir}/orc/*"
+FILES:${PN} = "${bindir}/*"
+
+python populate_packages:prepend () {
+ libdir = d.expand('${libdir}')
+ do_split_packages(d, libdir, r'^lib(.*)\.so\.*', 'lib%s', 'ORC %s library', extra_depends='', allow_links=True)
+}
+
+do_compile:prepend:class-native () {
+ sed -i -e 's#/tmp#.#g' ${S}/orc/orccodemem.c
+}
diff --git a/meta/recipes-devtools/patchelf/patchelf/0001-Set-interpreter-only-when-necessary.patch b/meta/recipes-devtools/patchelf/patchelf/0001-Set-interpreter-only-when-necessary.patch
new file mode 100644
index 0000000000..9a8216b3fe
--- /dev/null
+++ b/meta/recipes-devtools/patchelf/patchelf/0001-Set-interpreter-only-when-necessary.patch
@@ -0,0 +1,31 @@
+From f5df94952e87eaa390e5c845bc48fdb3dbc31cc2 Mon Sep 17 00:00:00 2001
+From: Yuta Hayama <hayama@lineo.co.jp>
+Date: Fri, 21 Jul 2023 10:47:02 +0900
+Subject: [PATCH] Set interpreter only when necessary
+
+If the given interpreter is already set, nothing needs to be done.
+As with modifySoname(), it skips unnecessary processing.
+
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+Upstream-Status: Submitted [https://github.com/NixOS/patchelf/pull/508]
+
+ src/patchelf.cc | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/src/patchelf.cc b/src/patchelf.cc
+index 86429c4..e562c49 100644
+--- a/src/patchelf.cc
++++ b/src/patchelf.cc
+@@ -1460,6 +1460,11 @@ void ElfFile<ElfFileParamNames>::modifySoname(sonameMode op, const std::string &
+ template<ElfFileParams>
+ void ElfFile<ElfFileParamNames>::setInterpreter(const std::string & newInterpreter)
+ {
++ if (getInterpreter() == newInterpreter) {
++ debug("given interpreter is already set\n");
++ return;
++ }
++
+ std::string & section = replaceSection(".interp", newInterpreter.size() + 1);
+ setSubstr(section, 0, newInterpreter + '\0');
+ changed = true;
diff --git a/meta/recipes-devtools/patchelf/patchelf/0002-align-startOffset-with-p_align-instead-of-pagesize-f.patch b/meta/recipes-devtools/patchelf/patchelf/0002-align-startOffset-with-p_align-instead-of-pagesize-f.patch
new file mode 100644
index 0000000000..7906f0f73b
--- /dev/null
+++ b/meta/recipes-devtools/patchelf/patchelf/0002-align-startOffset-with-p_align-instead-of-pagesize-f.patch
@@ -0,0 +1,42 @@
+From 1198329b922f3cdddc3e87a7c81d7730b646c088 Mon Sep 17 00:00:00 2001
+From: Yuta Hayama <hayama@lineo.co.jp>
+Date: Fri, 28 Jul 2023 16:22:31 +0900
+Subject: [PATCH] align startOffset with p_align instead of pagesize for
+ compatibility
+
+According to the ELF specification, the alignment of loadable process segments
+should satisfy (p_vaddr mod pagesize) == (p_offset mod pagesize). However,
+glibc earlier than 2.35 incorrectly requires that the LOAD segment be (p_vaddr
+mod p_align) == (p_offset mod p_align), and will output the error message
+"ELF load command address/offset not properly aligned" if this is not met.
+
+Since there are many systems that use glibc earlier than 2.35, it is preferable
+that newly added LOAD segments satisfy (p_vaddr mod p_align) == (p_offset mod
+p_align) for compatibility.
+
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+Upstream-Status: Submitted [https://github.com/NixOS/patchelf/pull/510]
+
+ src/patchelf.cc | 8 +++++++-
+ 1 file changed, 7 insertions(+), 1 deletion(-)
+
+diff --git a/src/patchelf.cc b/src/patchelf.cc
+index 82b4b46..6edb81a 100644
+--- a/src/patchelf.cc
++++ b/src/patchelf.cc
+@@ -843,7 +843,13 @@ void ElfFile<ElfFileParamNames>::rewriteSectionsLibrary()
+ neededSpace += headerTableSpace;
+ debug("needed space is %d\n", neededSpace);
+
+- Elf_Off startOffset = roundUp(fileContents->size(), getPageSize());
++ /* glibc earlier than 2.35 requires that the LOAD segment satisfies
++ (p_vaddr mod p_align) == (p_offset mod p_align).
++ The ELF specification requires that loadable process segments satisfy
++ (p_vaddr mod pagesize) == (p_offset mod pagesize), so glibc is probably
++ wrong, but here startOffset is calculated according to p_align for
++ compatibility. */
++ Elf_Off startOffset = roundUp(fileContents->size(), alignStartPage);
+
+ // In older version of binutils (2.30), readelf would check if the dynamic
+ // section segment is strictly smaller than the file (and not same size).
diff --git a/meta/recipes-devtools/patchelf/patchelf/0003-make-LOAD-segment-extensions-based-on-p_align-instea.patch b/meta/recipes-devtools/patchelf/patchelf/0003-make-LOAD-segment-extensions-based-on-p_align-instea.patch
new file mode 100644
index 0000000000..37eaf992d8
--- /dev/null
+++ b/meta/recipes-devtools/patchelf/patchelf/0003-make-LOAD-segment-extensions-based-on-p_align-instea.patch
@@ -0,0 +1,32 @@
+From 299ad5766921d593e11a42a8e4dec55b4b350876 Mon Sep 17 00:00:00 2001
+From: Yuta Hayama <hayama@lineo.co.jp>
+Date: Mon, 31 Jul 2023 11:58:49 +0900
+Subject: [PATCH] make LOAD segment extensions based on p_align instead of
+ pagesize
+
+Since the p_align of the LOAD segment is no longer pagesize, the actual p_align
+value is used to calculate for the LOAD segment extension.
+
+If calculated with pagesize, new LOAD segment may be added even though the
+existing LOAD segment can be extended.
+
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+Upstream-Status: Submitted [https://github.com/NixOS/patchelf/pull/510]
+
+ src/patchelf.cc | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/patchelf.cc b/src/patchelf.cc
+index 6edb81a..86429c4 100644
+--- a/src/patchelf.cc
++++ b/src/patchelf.cc
+@@ -885,7 +885,7 @@ void ElfFile<ElfFileParamNames>::rewriteSectionsLibrary()
+ rdi(lastSeg.p_type) == PT_LOAD &&
+ rdi(lastSeg.p_flags) == (PF_R | PF_W) &&
+ rdi(lastSeg.p_align) == alignStartPage) {
+- auto segEnd = roundUp(rdi(lastSeg.p_offset) + rdi(lastSeg.p_memsz), getPageSize());
++ auto segEnd = roundUp(rdi(lastSeg.p_offset) + rdi(lastSeg.p_memsz), alignStartPage);
+ if (segEnd == startOffset) {
+ auto newSz = startOffset + neededSpace - rdi(lastSeg.p_offset);
+ wri(lastSeg.p_filesz, wri(lastSeg.p_memsz, newSz));
diff --git a/meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch b/meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch
deleted file mode 100644
index b755a263a4..0000000000
--- a/meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch
+++ /dev/null
@@ -1,65 +0,0 @@
-From 682fb48c137b687477008b68863c2a0b73ed47d1 Mon Sep 17 00:00:00 2001
-From: Fabio Berton <fabio.berton@ossystems.com.br>
-Date: Fri, 9 Sep 2016 16:00:42 -0300
-Subject: [PATCH] handle read-only files
-
-Patch from:
-https://github.com/darealshinji/patchelf/commit/40e66392bc4b96e9b4eda496827d26348a503509
-
-Upstream-Status: Denied [https://github.com/NixOS/patchelf/pull/89]
-
-Signed-off-by: Fabio Berton <fabio.berton@ossystems.com.br>
-
----
- src/patchelf.cc | 16 +++++++++++++++-
- 1 file changed, 15 insertions(+), 1 deletion(-)
-
-Index: git/src/patchelf.cc
-===================================================================
---- git.orig/src/patchelf.cc
-+++ git/src/patchelf.cc
-@@ -534,9 +534,19 @@ void ElfFile<ElfFileParamNames>::sortShd
-
- static void writeFile(const std::string & fileName, const FileContents & contents)
- {
-+ struct stat st;
-+ int fd;
-+
- debug("writing %s\n", fileName.c_str());
-
-- int fd = open(fileName.c_str(), O_CREAT | O_TRUNC | O_WRONLY, 0777);
-+ if (stat(fileName.c_str(), &st) != 0)
-+ error("stat");
-+
-+ if (chmod(fileName.c_str(), 0600) != 0)
-+ error("chmod");
-+
-+ fd = open(fileName.c_str(), O_CREAT | O_TRUNC | O_WRONLY, 0777);
-+
- if (fd == -1)
- error("open");
-
-@@ -551,8 +561,6 @@ static void writeFile(const std::string
- bytesWritten += portion;
- }
-
-- if (close(fd) >= 0)
-- return;
- /*
- * Just ignore EINTR; a retry loop is the wrong thing to do.
- *
-@@ -561,9 +569,11 @@ static void writeFile(const std::string
- * http://utcc.utoronto.ca/~cks/space/blog/unix/CloseEINTR
- * https://sites.google.com/site/michaelsafyan/software-engineering/checkforeintrwheninvokingclosethinkagain
- */
-- if (errno == EINTR)
-- return;
-- error("close");
-+ if ((close(fd) < 0) && errno != EINTR)
-+ error("close");
-+
-+ if (chmod(fileName.c_str(), st.st_mode) != 0)
-+ error("chmod");
- }
-
-
diff --git a/meta/recipes-devtools/patchelf/patchelf_0.14.5.bb b/meta/recipes-devtools/patchelf/patchelf_0.14.5.bb
deleted file mode 100644
index 0fa2c00f1d..0000000000
--- a/meta/recipes-devtools/patchelf/patchelf_0.14.5.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "Tool to allow editing of RPATH and interpreter fields in ELF binaries"
-DESCRIPTION = "PatchELF is a simple utility for modifying existing ELF executables and libraries."
-HOMEPAGE = "https://github.com/NixOS/patchelf"
-
-LICENSE = "GPL-3.0-only"
-
-SRC_URI = "git://github.com/NixOS/patchelf;protocol=https;branch=master \
- file://handle-read-only-files.patch \
- "
-SRCREV = "a35054504293f9ff64539850d1ed0bfd2f5399f2"
-
-S = "${WORKDIR}/git"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-inherit autotools
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/patchelf/patchelf_0.18.0.bb b/meta/recipes-devtools/patchelf/patchelf_0.18.0.bb
new file mode 100644
index 0000000000..dece34240c
--- /dev/null
+++ b/meta/recipes-devtools/patchelf/patchelf_0.18.0.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Tool to allow editing of RPATH and interpreter fields in ELF binaries"
+DESCRIPTION = "PatchELF is a simple utility for modifying existing ELF executables and libraries."
+HOMEPAGE = "https://github.com/NixOS/patchelf"
+
+LICENSE = "GPL-3.0-only"
+
+SRC_URI = "git://github.com/NixOS/patchelf;protocol=https;branch=master \
+ file://0001-Set-interpreter-only-when-necessary.patch \
+ file://0002-align-startOffset-with-p_align-instead-of-pagesize-f.patch \
+ file://0003-make-LOAD-segment-extensions-based-on-p_align-instea.patch \
+"
+SRCREV = "99c24238981b7b1084313aca8f5c493bb46f302c"
+
+S = "${WORKDIR}/git"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+inherit autotools
+
+PACKAGES += "${PN}-zsh-completion"
+FILES:${PN}-zsh-completion = "${datadir}/zsh"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/perl-cross/files/0001-Makefile-check-the-file-if-patched-or-not.patch b/meta/recipes-devtools/perl-cross/files/0001-Makefile-check-the-file-if-patched-or-not.patch
index 8c8f3b717c..4e9153ebf1 100644
--- a/meta/recipes-devtools/perl-cross/files/0001-Makefile-check-the-file-if-patched-or-not.patch
+++ b/meta/recipes-devtools/perl-cross/files/0001-Makefile-check-the-file-if-patched-or-not.patch
@@ -1,4 +1,4 @@
-From 24a3e0c48f9ebe473b5f1078663e275c27d0537f Mon Sep 17 00:00:00 2001
+From 3eb33dce6e3c93e1b3efcc9649f871100adada30 Mon Sep 17 00:00:00 2001
From: Mingli Yu <mingli.yu@windriver.com>
Date: Fri, 2 Jul 2021 09:08:21 +0000
Subject: [PATCH] Makefile: check the file if patched or not
@@ -9,23 +9,21 @@ one time.
Upstream-Status: Inappropriate (OE-specific)
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
+
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
-index f4a26f5..7bc748e 100644
+index c6d6042..d137976 100644
--- a/Makefile
+++ b/Makefile
-@@ -61,7 +61,7 @@ miniperlmain$O: $(CROSSPATCHED)
+@@ -67,7 +67,7 @@ miniperlmain$O: $(CROSSPATCHED)
# Original versions are not saved anymore; patch generally takes care of this,
# and if that fails, reaching for the source tarball is the safest option.
$(CROSSPATCHED): %.applied: %.patch
-- patch -p1 -i $< && touch $@
-+ test ! -f $@ && (patch -p1 -i $< && touch $@) || echo "$@ exist"
+- $(cpatch) -p1 -i $< && touch $@
++ test ! -f $@ && ($(cpatch) -p1 -i $< && touch $@) || echo "$@ exist"
# ---[ common ]-----------------------------------------------------------------
---
-2.29.2
-
diff --git a/meta/recipes-devtools/perl-cross/perlcross_1.3.7.bb b/meta/recipes-devtools/perl-cross/perlcross_1.3.7.bb
deleted file mode 100644
index 99a9ca1027..0000000000
--- a/meta/recipes-devtools/perl-cross/perlcross_1.3.7.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "Perl-cross build system"
-HOMEPAGE = "https://github.com/arsv/perl-cross"
-DESCRIPTION = "perl-cross provides configure script, top-level Makefile and some auxiliary files for perl, \
-with the primary emphasis on cross-compiling the source."
-SECTION = "devel"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-# README.md is taken from https://github.com/arsv/perl-cross/blob/master/README.md
-# but is not provided inside the release tarballs
-LIC_FILES_CHKSUM = "file://${WORKDIR}/README.md;md5=252fcce2026b765fee1ad74d2fb07a3b"
-
-inherit allarch
-
-SRC_URI = "https://github.com/arsv/perl-cross/releases/download/${PV}/perl-cross-${PV}.tar.gz;name=perl-cross \
- file://README.md \
- file://0001-perl-cross-add-LDFLAGS-when-linking-libperl.patch \
- file://determinism.patch \
- file://0001-Makefile-check-the-file-if-patched-or-not.patch \
- "
-UPSTREAM_CHECK_URI = "https://github.com/arsv/perl-cross/releases/"
-
-SRC_URI[perl-cross.sha256sum] = "77f13ca84a63025053852331b72d4046c1f90ded98bd45ccedea738621907335"
-
-S = "${WORKDIR}/perl-cross-${PV}"
-
-do_configure () {
-}
-
-do_compile () {
-}
-
-do_install:class-native() {
- mkdir -p ${D}/${datadir}/perl-cross/
- cp -rf ${S}/* ${D}/${datadir}/perl-cross/
- rm -rf ${D}/${datadir}/perl-cross/patches/
-}
-
-BBCLASSEXTEND = "native"
-
diff --git a/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb b/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb
new file mode 100644
index 0000000000..b41c182fad
--- /dev/null
+++ b/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Perl-cross build system"
+HOMEPAGE = "https://github.com/arsv/perl-cross"
+DESCRIPTION = "perl-cross provides configure script, top-level Makefile and some auxiliary files for perl, \
+with the primary emphasis on cross-compiling the source."
+SECTION = "devel"
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+# README.md is taken from https://github.com/arsv/perl-cross/blob/master/README.md
+# but is not provided inside the release tarballs
+LIC_FILES_CHKSUM = "file://${WORKDIR}/README.md;md5=252fcce2026b765fee1ad74d2fb07a3b"
+
+inherit allarch github-releases
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/perl-cross-${PV}.tar.gz;name=perl-cross \
+ file://README.md \
+ file://0001-perl-cross-add-LDFLAGS-when-linking-libperl.patch \
+ file://determinism.patch \
+ file://0001-Makefile-check-the-file-if-patched-or-not.patch \
+ "
+GITHUB_BASE_URI = "https://github.com/arsv/perl-cross/releases/"
+
+SRC_URI[perl-cross.sha256sum] = "584dc54c48dca25e032b676a15bef377c1fed9de318b4fc140292a5dbf326e90"
+
+S = "${WORKDIR}/perl-cross-${PV}"
+
+do_configure () {
+}
+
+do_compile () {
+}
+
+do_install:class-native() {
+ mkdir -p ${D}/${datadir}/perl-cross/
+ cp -rf ${S}/* ${D}/${datadir}/perl-cross/
+ rm -rf ${D}/${datadir}/perl-cross/patches/
+}
+
+BBCLASSEXTEND = "native"
+
diff --git a/meta/recipes-devtools/perl/files/0001-CheckLib.pm-do-not-attempt-to-run-a-cross-executable.patch b/meta/recipes-devtools/perl/files/0001-CheckLib.pm-do-not-attempt-to-run-a-cross-executable.patch
deleted file mode 100644
index c5bbe7888e..0000000000
--- a/meta/recipes-devtools/perl/files/0001-CheckLib.pm-do-not-attempt-to-run-a-cross-executable.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-From a033c9ece12b6eead48eed63f106ccdec6159b0c Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Fri, 20 Dec 2019 16:26:55 +0100
-Subject: [PATCH] CheckLib.pm: do not attempt to run a cross executable
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- inc/Devel/CheckLib.pm | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/inc/Devel/CheckLib.pm b/inc/Devel/CheckLib.pm
-index 36a451a..b04acc1 100644
---- a/inc/Devel/CheckLib.pm
-+++ b/inc/Devel/CheckLib.pm
-@@ -330,7 +330,7 @@ sub assert_lib {
- push @missing, $lib if $rv != 0 || !-x $exefile;
- my $absexefile = File::Spec->rel2abs($exefile);
- $absexefile = '"' . $absexefile . '"' if $absexefile =~ m/\s/;
-- push @wrongresult, $lib if $rv == 0 && -x $exefile && system($absexefile) != 0;
-+ push @wrongresult, $lib if $rv == 0 && -x $exefile && 0 != 0;
- unlink $ofile if -e $ofile;
- _cleanup_exe($exefile);
- }
diff --git a/meta/recipes-devtools/perl/files/0001-Fix-intermittent-failure-of-test-t-op-sigsystem.t.patch b/meta/recipes-devtools/perl/files/0001-Fix-intermittent-failure-of-test-t-op-sigsystem.t.patch
new file mode 100644
index 0000000000..86fd42cd3d
--- /dev/null
+++ b/meta/recipes-devtools/perl/files/0001-Fix-intermittent-failure-of-test-t-op-sigsystem.t.patch
@@ -0,0 +1,77 @@
+From 75d974a58c461b3b5d35280e497810e46abae4ca Mon Sep 17 00:00:00 2001
+From: William Lyu <William.Lyu@windriver.com>
+Date: Wed, 4 Oct 2023 08:58:41 -0400
+Subject: [PATCH] Fix intermittent failure of test t/op/sigsystem.t
+
+[Perl issue #21546] -- https://github.com/Perl/perl5/issues/21546
+
+This fix addresses the intermittent failure of the test
+t/op/sigsystem.t by improving its robustness. Before the fix, this
+test waits a hard-coded amount of time in the parent process for the
+child process to exit, and the child process may not be able to exit
+soon enough. With this fix, the parent process in this test polls for
+whether the SIGCHLD handler reaped the child process for at most 25
+seconds.
+
+Upstream-Status: Backport [commit ID: 75d974a]
+
+Signed-off-by: William Lyu <William.Lyu@windriver.com>
+Signed-off-by: Randy MacLeod <randy.macleod@windriver.com>
+Reported-by: Alexandre Belloni <alexandre.belloni@bootlin.com>
+
+Committer: William Lyu is now a Perl author.
+---
+ AUTHORS | 1 +
+ t/op/sigsystem.t | 17 ++++++++++++++---
+ 2 files changed, 15 insertions(+), 3 deletions(-)
+
+diff --git a/AUTHORS b/AUTHORS
+index 21948bfdc7..527dd992fd 100644
+--- a/AUTHORS
++++ b/AUTHORS
+@@ -1443,6 +1443,7 @@ Wayne Scott <wscott@ichips.intel.com>
+ Wayne Thompson <Wayne.Thompson@Ebay.sun.com>
+ Wilfredo Sánchez <wsanchez@mit.edu>
+ William J. Middleton <William.Middleton@oslo.mobil.telenor.no>
++William Lyu <William.Lyu@windriver.com>
+ William Mann <wmann@avici.com>
+ William Middleton <wmiddlet@adobe.com>
+ William R Ward <hermit@BayView.COM>
+diff --git a/t/op/sigsystem.t b/t/op/sigsystem.t
+index 25da854902..831feefb0f 100644
+--- a/t/op/sigsystem.t
++++ b/t/op/sigsystem.t
+@@ -37,7 +37,15 @@ SKIP: {
+ test_system('with reaper');
+
+ note("Waiting briefly for SIGCHLD...");
+- Time::HiRes::sleep(0.500);
++
++ # Wait at most 50 * 0.500 = 25.0 seconds for the child process to be
++ # reaped. If the child process exits and gets reaped early, this polling
++ # loop will exit early.
++
++ for (1..50) {
++ last if @pids;
++ Time::HiRes::sleep(0.500);
++ }
+
+ ok(@pids == 1, 'Reaped only one process');
+ ok($pids[0] == $pid, "Reaped the right process.") or diag(Dumper(\@pids));
+@@ -50,8 +58,11 @@ sub test_system {
+ my $got_zeroes = 0;
+
+ # This test is looking for a race between system()'s waitpid() and a
+- # signal handler. Looping a few times increases the chances of
+- # catching the error.
++ # signal handler. The system() call is expected to not interfere with the
++ # SIGCHLD signal handler. In particular, the wait() called within system()
++ # is expected to reap the child process forked by system() before the
++ # SIGCHLD signal handler is called.
++ # Looping a few times increases the chances of catching the error.
+
+ for (1..$expected_zeroes) {
+ $got_zeroes++ unless system(TRUE);
+--
+2.25.1
+
diff --git a/meta/recipes-devtools/perl/files/perl-configpm-switch.patch b/meta/recipes-devtools/perl/files/perl-configpm-switch.patch
index 15189a0c04..0be1d5a93c 100644
--- a/meta/recipes-devtools/perl/files/perl-configpm-switch.patch
+++ b/meta/recipes-devtools/perl/files/perl-configpm-switch.patch
@@ -1,4 +1,4 @@
-From 1f7cc5db2ca549c37c6a7923368e1a0104f31b99 Mon Sep 17 00:00:00 2001
+From c25d460a2f00e9af25087d40447fe1a81c89710c Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Sun, 27 May 2007 21:04:11 +0000
Subject: [PATCH] perl: 5.8.7 -> 5.8.8 (from OE)
@@ -20,38 +20,38 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 16 insertions(+), 2 deletions(-)
diff --git a/configpm b/configpm
-index 94a4778..99b20c9 100755
+index 07219d8..01a23fa 100755
--- a/configpm
+++ b/configpm
-@@ -687,7 +687,7 @@ sub FETCH {
- my($self, $key) = @_;
-
- # check for cached value (which may be undef so we use exists not defined)
-- return exists $self->{$key} ? $self->{$key} : $self->fetch_string($key);
-+ return $self->fetch_string($key);
- }
-
+@@ -718,7 +718,7 @@ $config_txt .= uncomment <<'ENDOFEND';
+ # my($self, $key) = @_;
+ #
+ # # check for cached value (which may be undef so we use exists not defined)
+-# return exists $self->{$key} ? $self->{$key} : $self->fetch_string($key);
++# return $self->fetch_string($key);
+ # }
+ #
ENDOFEND
-@@ -845,7 +845,21 @@ $config_txt .= sprintf <<'ENDOFTIE', $fast_config;
- sub DESTROY { }
-
- sub AUTOLOAD {
-- require 'Config_heavy.pl';
-+ my $cfgfile = 'Config_heavy.pl';
-+ if (defined $ENV{PERLCONFIGTARGET} and $ENV{PERLCONFIGTARGET} eq "yes")
-+ {
-+ $cfgfile = 'Config_heavy-target.pl';
-+ }
-+ if (defined $ENV{PERL_ARCHLIB})
-+ {
-+ push @INC, $ENV{PERL_ARCHLIB};
-+ require $cfgfile;
-+ pop @INC;
-+ }
-+ else
-+ {
-+ require $cfgfile;
-+ }
- goto \&launcher unless $Config::AUTOLOAD =~ /launcher$/;
- die "&Config::AUTOLOAD failed on $Config::AUTOLOAD";
- }
+@@ -876,7 +876,21 @@ $config_txt .= sprintf uncomment <<'ENDOFTIE', $fast_config;
+ # sub DESTROY { }
+ #
+ # sub AUTOLOAD {
+-# require 'Config_heavy.pl';
++# my $cfgfile = 'Config_heavy.pl';
++# if (defined $ENV{PERLCONFIGTARGET} and $ENV{PERLCONFIGTARGET} eq "yes")
++# {
++# $cfgfile = 'Config_heavy-target.pl';
++# }
++# if (defined $ENV{PERL_ARCHLIB})
++# {
++# push @INC, $ENV{PERL_ARCHLIB};
++# require $cfgfile;
++# pop @INC;
++# }
++# else
++# {
++# require $cfgfile;
++# }
+ # goto \&launcher unless $Config::AUTOLOAD =~ /launcher$/;
+ # die "&Config::AUTOLOAD failed on $Config::AUTOLOAD";
+ # }
diff --git a/meta/recipes-devtools/perl/files/perl-dynloader.patch b/meta/recipes-devtools/perl/files/perl-dynloader.patch
index b49e6e4066..887f7c0f7f 100644
--- a/meta/recipes-devtools/perl/files/perl-dynloader.patch
+++ b/meta/recipes-devtools/perl/files/perl-dynloader.patch
@@ -22,7 +22,7 @@ Index: perl-5.24.1/dist/XSLoader/XSLoader_pm.PL
+ my $hostlib = $ENV{PERLHOSTLIB};
+ my $hostarchlib = $ENV{PERLHOSTARCHLIB};
+ print STDERR "*** Module name IN: $modlibname\n";
-+ ($p1, $p2, $p3, $p4, $p5, $p6, $p7) = $modlibname =~ m/(^(.*lib\w*\/)?)((perl5\/[0-9\.]*\/)?)(([^\/]*)\/)?(.*)$/;
++ my ($p1, $p2, $p3, $p4, $p5, $p6, $p7) = $modlibname =~ m/(^(.*lib\w*\/)?)((perl5\/[0-9\.]*\/)?)(([^\/]*)\/)?(.*)$/;
+ print STDERR "*** p1: $p1 p3: $p3 p5: $p5 p7: $p7\n";
+ if ( $p1 ne "" ) {
+ $modlibname = $hostlib.$p7;
diff --git a/meta/recipes-devtools/perl/files/perl-rdepends.txt b/meta/recipes-devtools/perl/files/perl-rdepends.txt
index 74c24c3bb5..e5f45bf291 100644
--- a/meta/recipes-devtools/perl/files/perl-rdepends.txt
+++ b/meta/recipes-devtools/perl/files/perl-rdepends.txt
@@ -131,28 +131,23 @@ RDEPENDS:perl-module-b-concise += "perl-module-strict"
RDEPENDS:perl-module-b-concise += "perl-module-warnings"
RDEPENDS:perl-module-benchmark += "perl-module-exporter"
RDEPENDS:perl-module-benchmark += "perl-module-strict"
+RDEPENDS:perl-module-bigfloat += "perl-module-constant"
+RDEPENDS:perl-module-bigfloat += "perl-module-exporter"
+RDEPENDS:perl-module-bigfloat += "perl-module-overload"
+RDEPENDS:perl-module-bigfloat += "perl-module-strict"
+RDEPENDS:perl-module-bigfloat += "perl-module-warnings"
RDEPENDS:perl-module-bigint += "perl-module-constant"
RDEPENDS:perl-module-bigint += "perl-module-exporter"
-RDEPENDS:perl-module-bigint += "perl-module-math-bigint"
-RDEPENDS:perl-module-bigint += "perl-module-math-bigint-trace"
RDEPENDS:perl-module-bigint += "perl-module-overload"
RDEPENDS:perl-module-bigint += "perl-module-strict"
RDEPENDS:perl-module-bigint += "perl-module-warnings"
-RDEPENDS:perl-module-bignum += "perl-module-bigint"
+RDEPENDS:perl-module-bignum += "perl-module-constant"
RDEPENDS:perl-module-bignum += "perl-module-exporter"
-RDEPENDS:perl-module-bignum += "perl-module-math-bigfloat"
-RDEPENDS:perl-module-bignum += "perl-module-math-bigfloat-trace"
-RDEPENDS:perl-module-bignum += "perl-module-math-bigint"
-RDEPENDS:perl-module-bignum += "perl-module-math-bigint-trace"
RDEPENDS:perl-module-bignum += "perl-module-overload"
RDEPENDS:perl-module-bignum += "perl-module-strict"
RDEPENDS:perl-module-bignum += "perl-module-warnings"
-RDEPENDS:perl-module-bigrat += "perl-module-bigint"
+RDEPENDS:perl-module-bigrat += "perl-module-constant"
RDEPENDS:perl-module-bigrat += "perl-module-exporter"
-RDEPENDS:perl-module-bigrat += "perl-module-math-bigfloat"
-RDEPENDS:perl-module-bigrat += "perl-module-math-bigint"
-RDEPENDS:perl-module-bigrat += "perl-module-math-bigint-trace"
-RDEPENDS:perl-module-bigrat += "perl-module-math-bigrat"
RDEPENDS:perl-module-bigrat += "perl-module-overload"
RDEPENDS:perl-module-bigrat += "perl-module-strict"
RDEPENDS:perl-module-bigrat += "perl-module-warnings"
@@ -166,6 +161,8 @@ RDEPENDS:perl-module-b-showlex += "perl-module-strict"
RDEPENDS:perl-module-b-terse += "perl-module-b"
RDEPENDS:perl-module-b-terse += "perl-module-b-concise"
RDEPENDS:perl-module-b-terse += "perl-module-strict"
+RDEPENDS:perl-module-builtin += "perl-module-strict"
+RDEPENDS:perl-module-builtin += "perl-module-warnings"
RDEPENDS:perl-module-b-xref += "perl-module-b"
RDEPENDS:perl-module-b-xref += "perl-module-config"
RDEPENDS:perl-module-b-xref += "perl-module-strict"
@@ -257,7 +254,6 @@ RDEPENDS:perl-module-cwd += "perl-module-errno"
RDEPENDS:perl-module-cwd += "perl-module-exporter"
RDEPENDS:perl-module-cwd += "perl-module-strict"
RDEPENDS:perl-module-cwd += "perl-module-xsloader"
-RDEPENDS:perl-module-data-dumper += "perl-module-config"
RDEPENDS:perl-module-data-dumper += "perl-module-constant"
RDEPENDS:perl-module-data-dumper += "perl-module-exporter"
RDEPENDS:perl-module-data-dumper += "perl-module-strict"
@@ -313,6 +309,8 @@ RDEPENDS:perl-module-digest-sha += "perl-module-vars"
RDEPENDS:perl-module-digest-sha += "perl-module-warnings"
RDEPENDS:perl-module-digest-sha += "perl-module-xsloader"
RDEPENDS:perl-module-dynaloader += "perl-module-config"
+RDEPENDS:perl-module-dynaloader += "perl-module-strict"
+RDEPENDS:perl-module-dynaloader += "perl-module-vars"
RDEPENDS:perl-module-encode-alias += "perl-module-constant"
RDEPENDS:perl-module-encode-alias += "perl-module-encode"
RDEPENDS:perl-module-encode-alias += "perl-module-exporter"
@@ -796,11 +794,12 @@ RDEPENDS:perl-module-file-basename += "perl-module-warnings"
RDEPENDS:perl-module-filecache += "perl-module-parent"
RDEPENDS:perl-module-filecache += "perl-module-strict"
RDEPENDS:perl-module-file-compare += "perl-module-exporter"
-RDEPENDS:perl-module-file-compare += "perl-module-strict"
RDEPENDS:perl-module-file-compare += "perl-module-warnings"
+RDEPENDS:perl-module-file-copy += "perl-module-builtin"
RDEPENDS:perl-module-file-copy += "perl-module-config"
RDEPENDS:perl-module-file-copy += "perl-module-exporter"
RDEPENDS:perl-module-file-copy += "perl-module-file-basename"
+RDEPENDS:perl-module-file-copy += "perl-module-overload"
RDEPENDS:perl-module-file-copy += "perl-module-strict"
RDEPENDS:perl-module-file-copy += "perl-module-warnings"
RDEPENDS:perl-module-file-dosglob += "perl-module-strict"
@@ -887,10 +886,12 @@ RDEPENDS:perl-module-getopt-long += "perl-module-warnings"
RDEPENDS:perl-module-getopt-std += "perl-module-exporter"
RDEPENDS:perl-module-getopt-std += "perl-module-strict"
RDEPENDS:perl-module-getopt-std += "perl-module-warnings"
+RDEPENDS:perl-module-hash-util-fieldhash += "perl-module-builtin"
RDEPENDS:perl-module-hash-util-fieldhash += "perl-module-exporter"
RDEPENDS:perl-module-hash-util-fieldhash += "perl-module-strict"
RDEPENDS:perl-module-hash-util-fieldhash += "perl-module-warnings"
RDEPENDS:perl-module-hash-util-fieldhash += "perl-module-xsloader"
+RDEPENDS:perl-module-hash-util += "perl-module-builtin"
RDEPENDS:perl-module-hash-util += "perl-module-exporter"
RDEPENDS:perl-module-hash-util += "perl-module-hash-util-fieldhash"
RDEPENDS:perl-module-hash-util += "perl-module-strict"
@@ -1258,6 +1259,7 @@ RDEPENDS:perl-module-math-bigint-calc += "perl-module-integer"
RDEPENDS:perl-module-math-bigint-calc += "perl-module-math-bigint-lib"
RDEPENDS:perl-module-math-bigint-calc += "perl-module-strict"
RDEPENDS:perl-module-math-bigint-calc += "perl-module-warnings"
+RDEPENDS:perl-module-math-bigint-fastcalc += "perl-module-config"
RDEPENDS:perl-module-math-bigint-fastcalc += "perl-module-math-bigint-calc"
RDEPENDS:perl-module-math-bigint-fastcalc += "perl-module-strict"
RDEPENDS:perl-module-math-bigint-fastcalc += "perl-module-warnings"
@@ -1278,9 +1280,15 @@ RDEPENDS:perl-module-math-bigint-trace += "perl-module-strict"
RDEPENDS:perl-module-math-bigint-trace += "perl-module-warnings"
RDEPENDS:perl-module-math-bigrat += "perl-module-math-bigfloat"
RDEPENDS:perl-module-math-bigrat += "perl-module-math-bigint"
+RDEPENDS:perl-module-math-bigrat += "perl-module-math-complex"
RDEPENDS:perl-module-math-bigrat += "perl-module-overload"
RDEPENDS:perl-module-math-bigrat += "perl-module-strict"
RDEPENDS:perl-module-math-bigrat += "perl-module-warnings"
+RDEPENDS:perl-module-math-bigrat-trace += "perl-module-exporter"
+RDEPENDS:perl-module-math-bigrat-trace += "perl-module-math-bigrat"
+RDEPENDS:perl-module-math-bigrat-trace += "perl-module-overload"
+RDEPENDS:perl-module-math-bigrat-trace += "perl-module-strict"
+RDEPENDS:perl-module-math-bigrat-trace += "perl-module-warnings"
RDEPENDS:perl-module-math-complex += "perl-module-config"
RDEPENDS:perl-module-math-complex += "perl-module-exporter"
RDEPENDS:perl-module-math-complex += "perl-module-overload"
@@ -1665,6 +1673,7 @@ RDEPENDS:perl-module-socket += "perl-module-strict"
RDEPENDS:perl-module-socket += "perl-module-warnings-register"
RDEPENDS:perl-module-socket += "perl-module-xsloader"
RDEPENDS:perl-module-sort += "perl-module-strict"
+RDEPENDS:perl-module-sort += "perl-module-warnings"
RDEPENDS:perl-module-storable += "perl-module-exporter"
RDEPENDS:perl-module-storable += "perl-module-io-file"
RDEPENDS:perl-module-subs += "perl-module-strict"
@@ -1697,6 +1706,7 @@ RDEPENDS:perl-module-tap-base += "perl-module-constant"
RDEPENDS:perl-module-tap-base += "perl-module-strict"
RDEPENDS:perl-module-tap-base += "perl-module-warnings"
RDEPENDS:perl-module-tap-formatter-base += "perl-module-base"
+RDEPENDS:perl-module-tap-formatter-base += "perl-module-config"
RDEPENDS:perl-module-tap-formatter-base += "perl-module-posix"
RDEPENDS:perl-module-tap-formatter-base += "perl-module-strict"
RDEPENDS:perl-module-tap-formatter-base += "perl-module-tap-formatter-color"
@@ -1770,6 +1780,7 @@ RDEPENDS:perl-module-tap-parser-iterator-stream += "perl-module-strict"
RDEPENDS:perl-module-tap-parser-iterator-stream += "perl-module-warnings"
RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-base"
RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-constant"
+RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-errno"
RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-io-select"
RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-strict"
RDEPENDS:perl-module-tap-parser-multiplexer += "perl-module-warnings"
@@ -1947,6 +1958,7 @@ RDEPENDS:perl-module-test2-api += "perl-module-test2-hub-interceptor-terminator"
RDEPENDS:perl-module-test2-api += "perl-module-test2-hub-subtest"
RDEPENDS:perl-module-test2-api += "perl-module-test2-util"
RDEPENDS:perl-module-test2-api += "perl-module-test2-util-trace"
+RDEPENDS:perl-module-test2-api += "perl-module-time-hires"
RDEPENDS:perl-module-test2-api += "perl-module-warnings"
RDEPENDS:perl-module-test2-api-stack += "perl-module-strict"
RDEPENDS:perl-module-test2-api-stack += "perl-module-test2-api"
@@ -2197,14 +2209,11 @@ RDEPENDS:perl-module-text-balanced += "perl-module-strict"
RDEPENDS:perl-module-text-balanced += "perl-module-vars"
RDEPENDS:perl-module-text-parsewords += "perl-module-exporter"
RDEPENDS:perl-module-text-parsewords += "perl-module-strict"
-RDEPENDS:perl-module-text-tabs += "perl-module-exporter"
+RDEPENDS:perl-module-text-parsewords += "perl-module-warnings"
RDEPENDS:perl-module-text-tabs += "perl-module-strict"
-RDEPENDS:perl-module-text-tabs += "perl-module-vars"
-RDEPENDS:perl-module-text-wrap += "perl-module-exporter"
RDEPENDS:perl-module-text-wrap += "perl-module-re"
RDEPENDS:perl-module-text-wrap += "perl-module-strict"
RDEPENDS:perl-module-text-wrap += "perl-module-text-tabs"
-RDEPENDS:perl-module-text-wrap += "perl-module-vars"
RDEPENDS:perl-module-text-wrap += "perl-module-warnings-register"
RDEPENDS:perl-module-thread += "perl-module-config"
RDEPENDS:perl-module-thread += "perl-module-exporter"
@@ -2246,6 +2255,8 @@ RDEPENDS:perl-module-tie-scalar += "perl-module-warnings-register"
RDEPENDS:perl-module-tie-stdhandle += "perl-module-strict"
RDEPENDS:perl-module-tie-stdhandle += "perl-module-tie-handle"
RDEPENDS:perl-module-tie-substrhash += "perl-module-integer"
+RDEPENDS:perl-module-tie-substrhash += "perl-module-strict"
+RDEPENDS:perl-module-tie-substrhash += "perl-module-warnings"
RDEPENDS:perl-module-time-gmtime += "perl-module-exporter"
RDEPENDS:perl-module-time-gmtime += "perl-module-strict"
RDEPENDS:perl-module-time-gmtime += "perl-module-time-tm"
@@ -2295,6 +2306,7 @@ RDEPENDS:perl-module-unicode-collate += "perl-module-constant"
RDEPENDS:perl-module-unicode-collate += "perl-module-strict"
RDEPENDS:perl-module-unicode-collate += "perl-module-warnings"
RDEPENDS:perl-module-unicode-collate += "perl-module-xsloader"
+RDEPENDS:perl-module-unicode-normalize += "perl-module-bytes"
RDEPENDS:perl-module-unicode-normalize += "perl-module-exporter"
RDEPENDS:perl-module-unicode-normalize += "perl-module-strict"
RDEPENDS:perl-module-unicode-normalize += "perl-module-warnings"
@@ -2323,3 +2335,4 @@ RDEPENDS:perl-module-version += "perl-module-version-regex"
RDEPENDS:perl-module-version += "perl-module-warnings-register"
RDEPENDS:perl-module-version-regex += "perl-module-strict"
RDEPENDS:perl-module-xsloader += "perl-module-dynaloader"
+RDEPENDS:perl-module-xsloader += "perl-module-strict"
diff --git a/meta/recipes-devtools/perl/files/run-ptest b/meta/recipes-devtools/perl/files/run-ptest
index dad4d42916..0547f818b2 100644
--- a/meta/recipes-devtools/perl/files/run-ptest
+++ b/meta/recipes-devtools/perl/files/run-ptest
@@ -1,2 +1,2 @@
#!/bin/sh
-cd t && PERL_BUILD_PACKAGING=1 ./TEST | sed -u -e 's|\(.*\) .* ok$|PASS: \1|' -e 's|\(.*\) .* skipped|SKIP: \1|' -e 's|\(.*\) \.\(.*\)|FAIL: \1|'
+{ cd t && PERL_BUILD_PACKAGING=1 ./TEST || echo "FAIL: perl" ; } | sed -u -e 's|\(.*\) .* ok$|PASS: \1|' -e 's|\(.*\) .* skipped|SKIP: \1|' -e 's|\(.*\) \.\(.*\)|FAIL: \1|'
diff --git a/meta/recipes-devtools/perl/liberror-perl_0.17029.bb b/meta/recipes-devtools/perl/liberror-perl_0.17029.bb
deleted file mode 100644
index 67e5811f3c..0000000000
--- a/meta/recipes-devtools/perl/liberror-perl_0.17029.bb
+++ /dev/null
@@ -1,51 +0,0 @@
-SUMMARY = "Error - Error/exception handling in an OO-ish way"
-DESCRIPTION = "The Error package provides two interfaces. Firstly \
-Error provides a procedural interface to exception handling. \
-Secondly Error is a base class for errors/exceptions that can \
-either be thrown, for subsequent catch, or can simply be recorded."
-HOMEPAGE = "https://github.com/shlomif/perl-error.pm"
-SECTION = "libs"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=8f3499d09ee74a050c0319391ff9d100"
-
-# remove at next version upgrade or when output changes
-PR = "r1"
-HASHEQUIV_HASH_VERSION .= ".1"
-
-DEPENDS += "perl"
-
-RDEPENDS:${PN} += " \
- perl-module-carp \
- perl-module-exporter \
- perl-module-scalar-util \
- perl-module-overload \
- perl-module-strict \
- perl-module-vars \
- perl-module-warnings \
-"
-
-RDEPENDS:${PN}-ptest += " \
- perl-module-base \
- perl-module-file-spec \
- perl-module-io-handle \
- perl-module-ipc-open3 \
- perl-module-lib \
- perl-module-test-more \
-"
-
-SRC_URI = "http://cpan.metacpan.org/authors/id/S/SH/SHLOMIF/Error-${PV}.tar.gz"
-
-SRC_URI[md5sum] = "6732b1c6207e4a9a3e2987c88368039a"
-SRC_URI[sha256sum] = "1a23f7913032aed6d4b68321373a3899ca66590f4727391a091ec19c95bf7adc"
-
-S = "${WORKDIR}/Error-${PV}"
-
-inherit cpan ptest-perl
-
-do_install:prepend() {
- # test requires "-T" (taint) command line option
- rm -rf ${B}/t/pod-coverage.t
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libmodule-build-perl_0.4231.bb b/meta/recipes-devtools/perl/libmodule-build-perl_0.4231.bb
deleted file mode 100644
index e2c79d962b..0000000000
--- a/meta/recipes-devtools/perl/libmodule-build-perl_0.4231.bb
+++ /dev/null
@@ -1,123 +0,0 @@
-SUMMARY = "Module::Build - Build and install Perl modules"
-DESCRIPTION = "Many Perl distributions use a Build.PL file instead of a \
-Makefile.PL file to drive distribution configuration, build, test and \
-installation. Traditionally, Build.PL uses Module::Build as the underlying \
-build system. This module provides a simple, lightweight, drop-in replacement. \
-Whereas Module::Build has over 6,700 lines of code; this module has less than \
-120, yet supports the features needed by most distributions."
-
-SECTION = "libs"
-
-HOMEPAGE = "https://metacpan.org/release/Module-Build"
-
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-LIC_FILES_CHKSUM = "file://README;beginline=949;endline=954;md5=624c06db56a2af4d70cf9edc29fcae1b"
-
-SRC_URI = "${CPAN_MIRROR}/authors/id/L/LE/LEONT/Module-Build-${PV}.tar.gz \
- file://run-ptest \
- "
-SRC_URI[md5sum] = "066b193e461d7dfe1eca17a139353001"
-SRC_URI[sha256sum] = "7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717"
-
-S = "${WORKDIR}/Module-Build-${PV}"
-
-inherit cpan_build ptest-perl
-
-# From:
-# https://github.com/rehsack/meta-cpan/blob/master/recipes-devel/module-build-perl/module-build-perl_0.4216.bb
-#
-do_patch_module_build () {
- cd ${S}
- sed -i -e 's,my $interpreter = $self->{properties}{perl};,my $interpreter = "${bindir}/perl";,g' lib/Module/Build/Base.pm
-}
-
-do_patch[postfuncs] += "do_patch_module_build"
-
-EXTRA_CPAN_BUILD_FLAGS = "--create_packlist=0"
-
-do_install:append () {
- rm -rf ${D}${docdir}/perl/html
-}
-
-do_install_ptest() {
- cp -r ${B}/inc ${D}${PTEST_PATH}
- cp -r ${B}/blib ${D}${PTEST_PATH}
- cp -r ${B}/_build ${D}${PTEST_PATH}
- cp -r ${B}/lib ${D}${PTEST_PATH}
- chown -R root:root ${D}${PTEST_PATH}
- sed -i -e "s,'perl' => .*,'perl' => '/usr/bin/perl'\,,g" \
- -e "s,${STAGING_BINDIR_NATIVE}/perl-native/\.\.,${bindir}/,g" \
- -e "s,${S},,g" \
- -e "s,${D},,g" \
- ${D}${PTEST_PATH}/_build/build_params \
- ${D}${PTEST_PATH}/_build/runtime_params
- rm -rf ${D}${PTEST_PATH}/blib/libhtml/site/lib/Module/
- rm -rf ${D}${PTEST_PATH}/_build/magicnum
-}
-
-RDEPENDS:${PN} += " \
- perl-module-carp \
- perl-module-cpan \
- perl-module-config \
- perl-module-cwd \
- perl-module-data-dumper \
- perl-module-encode \
- perl-module-extutils-cbuilder \
- perl-module-extutils-command \
- perl-module-extutils-install \
- perl-module-extutils-installed \
- perl-module-extutils-mkbootstrap \
- perl-module-extutils-packlist \
- perl-module-extutils-parsexs \
- perl-module-file-basename \
- perl-module-file-compare \
- perl-module-file-copy \
- perl-module-file-find \
- perl-module-file-glob \
- perl-module-file-path \
- perl-module-file-spec \
- perl-module-file-spec-functions \
- perl-module-getopt-long \
- perl-module-metadata \
- perl-module-perl-ostype \
- perl-module-pod-man \
- perl-module-tap-harness \
- perl-module-text-abbrev \
- perl-module-text-parsewords \
- perl-module-utf8 \
-"
-
-RDEPENDS:${PN}-ptest += " \
- packagegroup-core-buildessential \
- perl-dev \
- perl-module-blib \
- perl-module-extutils-command-mm \
- perl-module-file-temp \
- perl-module-lib \
- perl-module-perlio \
- perl-module-perlio-encoding \
- perl-module-pod-text \
- perl-module-tap-harness-env \
- perl-module-tap-parser \
- perl-module-tap-parser-scheduler \
- perl-module-test-harness \
- perl-module-test-more \
-"
-
-RPROVIDES:${PN} += "\
- libmodule-build-base-perl \
- libmodule-build-compat-perl \
- libmodule-build-config-perl \
- libmodule-build-cookbook-perl \
- libmodule-build-dumper-perl \
- libmodule-build-notes-perl \
- libmodule-build-ppmaker-perl \
- libmodule-build-platform-default-perl \
- libmodule-build-platform-unix-perl \
- libmodule-build-podparser-perl \
-"
-
-# t/xs.t RDEPENDS on "EXTERN.h" provided by perl-dev
-INSANE_SKIP:${PN}-ptest = "dev-deps"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libmodule-build-perl_0.4234.bb b/meta/recipes-devtools/perl/libmodule-build-perl_0.4234.bb
new file mode 100644
index 0000000000..b57618724d
--- /dev/null
+++ b/meta/recipes-devtools/perl/libmodule-build-perl_0.4234.bb
@@ -0,0 +1,138 @@
+SUMMARY = "Module::Build - Build and install Perl modules"
+DESCRIPTION = "Many Perl distributions use a Build.PL file instead of a \
+Makefile.PL file to drive distribution configuration, build, test and \
+installation. Traditionally, Build.PL uses Module::Build as the underlying \
+build system. This module provides a simple, lightweight, drop-in replacement. \
+Whereas Module::Build has over 6,700 lines of code; this module has less than \
+120, yet supports the features needed by most distributions."
+
+SECTION = "libs"
+
+HOMEPAGE = "https://metacpan.org/release/Module-Build"
+
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+LIC_FILES_CHKSUM = "file://README;beginline=881;endline=886;md5=3027f56c664545e54678c26b7f1ac19c"
+
+SRC_URI = "${CPAN_MIRROR}/authors/id/L/LE/LEONT/Module-Build-${PV}.tar.gz \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "66aeac6127418be5e471ead3744648c766bd01482825c5b66652675f2bc86a8f"
+
+S = "${WORKDIR}/Module-Build-${PV}"
+
+inherit cpan_build ptest-perl
+
+# From:
+# https://github.com/rehsack/meta-cpan/blob/master/recipes-devel/module-build-perl/module-build-perl_0.4216.bb
+#
+do_patch_module_build () {
+ cd ${S}
+ sed -i -e 's,my $interpreter = $self->{properties}{perl};,my $interpreter = "${bindir}/perl";,g' lib/Module/Build/Base.pm
+}
+
+do_patch[postfuncs] += "do_patch_module_build"
+
+EXTRA_CPAN_BUILD_FLAGS = "--create_packlist=0"
+
+do_install:prepend () {
+ # We do not have a recipe for libpod-parser-perl which is for
+ # documentation (and is deprecated in favor of Pod::Simple)
+ rm -rf ${B}/t/pod_parser.t
+}
+
+do_install:append () {
+ rm -rf ${D}${docdir}/perl/html
+ sed -i "s:^#!.*:#!/usr/bin/env perl:" ${D}${bindir}/config_data
+}
+
+do_install_ptest() {
+ cp -r ${B}/inc ${D}${PTEST_PATH}
+ cp -r ${B}/blib ${D}${PTEST_PATH}
+ cp -r ${B}/_build ${D}${PTEST_PATH}
+ cp -r ${B}/lib ${D}${PTEST_PATH}
+ chown -R root:root ${D}${PTEST_PATH}
+ sed -i -e "s,'perl' => .*,'perl' => '/usr/bin/perl'\,,g" \
+ -e "s,${STAGING_BINDIR_NATIVE}/perl-native/\.\.,${bindir}/,g" \
+ -e "s,${S},,g" \
+ -e "s,${D},,g" \
+ ${D}${PTEST_PATH}/_build/build_params \
+ ${D}${PTEST_PATH}/_build/runtime_params
+ rm -rf ${D}${PTEST_PATH}/blib/libhtml/site/lib/Module/
+ rm -rf ${D}${PTEST_PATH}/_build/magicnum
+}
+
+RDEPENDS:${PN} += " \
+ perl-module-carp \
+ perl-module-cpan \
+ perl-module-config \
+ perl-module-cwd \
+ perl-module-data-dumper \
+ perl-module-encode \
+ perl-module-extutils-cbuilder \
+ perl-module-extutils-command \
+ perl-module-extutils-install \
+ perl-module-extutils-installed \
+ perl-module-extutils-mkbootstrap \
+ perl-module-extutils-packlist \
+ perl-module-extutils-parsexs \
+ perl-module-file-basename \
+ perl-module-file-compare \
+ perl-module-file-copy \
+ perl-module-file-find \
+ perl-module-file-glob \
+ perl-module-file-path \
+ perl-module-file-spec \
+ perl-module-file-spec-functions \
+ perl-module-getopt-long \
+ perl-module-metadata \
+ perl-module-perl-ostype \
+ perl-module-pod-man \
+ perl-module-tap-harness \
+ perl-module-text-abbrev \
+ perl-module-text-parsewords \
+ perl-module-utf8 \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ packagegroup-core-buildessential \
+ perl-dev \
+ perl-module-blib \
+ perl-module-encode-encoding \
+ perl-module-extutils-cbuilder-base \
+ perl-module-extutils-command-mm \
+ perl-module-extutils-mm-unix \
+ perl-module-file-temp \
+ perl-module-lib \
+ perl-module-parse-cpan-meta \
+ perl-module-perlio \
+ perl-module-perlio-encoding \
+ perl-module-pod-simple-transcodesmart \
+ perl-module-pod-text \
+ perl-module-tap-base \
+ perl-module-tap-formatter-base \
+ perl-module-tap-formatter-file \
+ perl-module-tap-formatter-session \
+ perl-module-tap-harness-env \
+ perl-module-tap-parser \
+ perl-module-tap-parser-scheduler \
+ perl-module-test-harness \
+ perl-module-test-more \
+"
+
+RPROVIDES:${PN} += "\
+ libmodule-build-base-perl \
+ libmodule-build-compat-perl \
+ libmodule-build-config-perl \
+ libmodule-build-cookbook-perl \
+ libmodule-build-dumper-perl \
+ libmodule-build-notes-perl \
+ libmodule-build-ppmaker-perl \
+ libmodule-build-platform-default-perl \
+ libmodule-build-platform-unix-perl \
+ libmodule-build-podparser-perl \
+"
+
+# t/xs.t RDEPENDS on "EXTERN.h" provided by perl-dev
+INSANE_SKIP:${PN}-ptest = "dev-deps"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libtest-fatal-perl_0.017.bb b/meta/recipes-devtools/perl/libtest-fatal-perl_0.017.bb
new file mode 100644
index 0000000000..1c3a7e5136
--- /dev/null
+++ b/meta/recipes-devtools/perl/libtest-fatal-perl_0.017.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Incredibly simple helpers for testing code with exceptions"
+DESCRIPTION = "Test::Fatal is an alternative to the popular Test::Exception.\
+It does much less, but should allow greater flexibility in testing \
+exception-throwing code with about the same amount of typing."
+HOMEPAGE = "https://github.com/rjbs/Test-Fatal"
+BUGTRACKER = "https://github.com/rjbs/Test-Fatal/issues"
+SECTION = "libs"
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=b5c851290cab1dda12fcfb0e9ec43639"
+
+SRC_URI = "${CPAN_MIRROR}/authors/id/R/RJ/RJBS/Test-Fatal-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "37dfffdafb84b762efe96b02fb2aa41f37026c73e6b83590db76229697f3c4a6"
+
+S = "${WORKDIR}/Test-Fatal-${PV}"
+
+inherit cpan ptest-perl
+
+RDEPENDS:${PN} += "\
+ libtry-tiny-perl \
+ perl-module-carp \
+ perl-module-exporter \
+ perl-module-test-builder \
+"
+
+RDEPENDS:${PN}-ptest += "\
+ perl-module-extutils-makemaker \
+ perl-module-extutils-mm-unix \
+ perl-module-file-spec \
+ perl-module-overload \
+ perl-module-test-builder-tester \
+ perl-module-test-more \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/perl/libtest-needs-perl_0.002009.bb b/meta/recipes-devtools/perl/libtest-needs-perl_0.002009.bb
deleted file mode 100644
index 43a9a6b7d3..0000000000
--- a/meta/recipes-devtools/perl/libtest-needs-perl_0.002009.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "Skip tests when modules not available"
-DESCRIPTION = "Skip test scripts if modules are not available. \
-The requested modules will be loaded, and optionally have their versions \
-checked. If the module is missing, the test script will be skipped. Modules \
-that are found but fail to compile will exit with an error rather than skip."
-
-HOMEPAGE = "https://metacpan.org/release/Test-Needs"
-SECTION = "libs"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-
-CPAN_NAME = "Test-Needs"
-CPAN_AUTHOR = "HAARG"
-
-LIC_FILES_CHKSUM = "file://README;md5=3f3ccd21a0a48aa313db212cc3b1bc09;beginline=88;endline=89"
-
-DEPENDS += "perl"
-
-SRC_URI = "https://cpan.metacpan.org/authors/id/H/HA/${CPAN_AUTHOR}/${CPAN_NAME}-${PV}.tar.gz"
-
-SRC_URI[md5sum] = "5643cd323afb77d20363acbaf9b12bcc"
-SRC_URI[sha256sum] = "571c21193ad16195df58b06b268798796a391b398c443271721d2cc0fb7c4ac3"
-
-S = "${WORKDIR}/${CPAN_NAME}-${PV}"
-
-inherit cpan ptest-perl
-
-RDEPENDS:${PN}-ptest += "perl-module-test-more perl-module-ipc-open3 perl-module-lib perl-module-version"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libtest-needs-perl_0.002010.bb b/meta/recipes-devtools/perl/libtest-needs-perl_0.002010.bb
new file mode 100644
index 0000000000..79a06170e0
--- /dev/null
+++ b/meta/recipes-devtools/perl/libtest-needs-perl_0.002010.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Skip tests when modules not available"
+DESCRIPTION = "Skip test scripts if modules are not available. \
+The requested modules will be loaded, and optionally have their versions \
+checked. If the module is missing, the test script will be skipped. Modules \
+that are found but fail to compile will exit with an error rather than skip."
+
+HOMEPAGE = "https://metacpan.org/release/Test-Needs"
+SECTION = "libs"
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+
+CPAN_NAME = "Test-Needs"
+CPAN_AUTHOR = "HAARG"
+
+LIC_FILES_CHKSUM = "file://README;md5=3f3ccd21a0a48aa313db212cc3b1bc09;beginline=88;endline=89"
+
+DEPENDS += "perl"
+
+SRC_URI = "https://cpan.metacpan.org/authors/id/H/HA/${CPAN_AUTHOR}/${CPAN_NAME}-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "923ffdc78fcba96609753e4bae26b0ba0186893de4a63cd5236e012c7c90e208"
+
+S = "${WORKDIR}/${CPAN_NAME}-${PV}"
+
+inherit cpan ptest-perl
+
+RDEPENDS:${PN}-ptest += "perl-module-test-more perl-module-ipc-open3 perl-module-lib perl-module-version"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libtest-warnings-perl_0.033.bb b/meta/recipes-devtools/perl/libtest-warnings-perl_0.033.bb
new file mode 100644
index 0000000000..17b4cc3c9e
--- /dev/null
+++ b/meta/recipes-devtools/perl/libtest-warnings-perl_0.033.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Test::Warnings - Test for warnings and the lack of them"
+DESCRIPTION = "If you've ever tried to use Test::NoWarnings to confirm there are no \
+warnings generated by your tests, combined with the convenience of \
+\\"done_testing\\" to not have to declare a test count, you'll have discovered \
+that these two features do not play well together, as the test count will \
+be calculated *before* the warnings test is run, resulting in a TAP error. \
+(See "examples/test_nowarnings.pl" in this distribution for a \
+demonstration.)"
+HOMEPAGE = "https://github.com/karenetheridge/Test-Warnings"
+BUGTRACKER = "https://rt.cpan.org/Public/Dist/Display.html?Name=Test-Warnings"
+SECTION = "libs"
+LICENSE = "Artistic-1.0-Perl | GPL-1.0-or-later"
+
+LIC_FILES_CHKSUM = "file://LICENCE;md5=f98106ac3cc05d9cbebcdb8fbf7b7815"
+
+SRC_URI = "${CPAN_MIRROR}/authors/id/E/ET/ETHER/Test-Warnings-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "b9c375719f2c61c5f97aa5ee6cf4c901a972347c415969379b0b51f67c48bbcb"
+
+S = "${WORKDIR}/Test-Warnings-${PV}"
+
+inherit cpan ptest-perl
+
+RDEPENDS:${PN} += "\
+ perl-module-test-builder \
+"
+
+# Many hidden dependencies and mysterious failures occur without full perl-modules
+RDEPENDS:${PN}-ptest += "perl-modules"
+
+do_install_ptest_perl:append () {
+ cp -r ${B}/t/lib ${D}${PTEST_PATH}/t/
+ chown -R root:root ${D}${PTEST_PATH}/t/lib
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/perl/libtry-tiny-perl_0.31.bb b/meta/recipes-devtools/perl/libtry-tiny-perl_0.31.bb
new file mode 100644
index 0000000000..a3728d8435
--- /dev/null
+++ b/meta/recipes-devtools/perl/libtry-tiny-perl_0.31.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Try::Tiny - Minimal try/catch with proper preservation of $@"
+DESCRIPTION = "This module provides bare bones try/catch/finally statements \
+that are designed to minimize common mistakes with eval blocks, and NOTHING \
+else."
+HOMEPAGE = "https://github.com/p5sagit/Try-Tiny"
+BUGTRACKER = "https://rt.cpan.org/Public/Dist/Display.html?Name=Try-Tiny"
+SECTION = "libs"
+LICENSE = "MIT"
+
+LIC_FILES_CHKSUM = "file://LICENCE;md5=5dc332c2d4aade55f5db244681000091"
+
+SRC_URI = "${CPAN_MIRROR}/authors/id/E/ET/ETHER/Try-Tiny-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "3300d31d8a4075b26d8f46ce864a1d913e0e8467ceeba6655d5d2b2e206c11be"
+
+S = "${WORKDIR}/Try-Tiny-${PV}"
+
+inherit cpan ptest-perl
+
+RDEPENDS:${PN} += "\
+ perl-module-carp \
+ perl-module-constant \
+ perl-module-exporter \
+"
+RRECOMMENDS:${PN} += "\
+ perl-module-sub-util \
+"
+RDEPENDS:${PN}-ptest += "\
+ perl-module-extutils-makemaker \
+ perl-module-extutils-mm-unix \
+ perl-module-file-spec \
+ perl-module-if \
+ perl-module-test-more \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/perl/liburi-perl/0001-Skip-TODO-test-cases-that-fail.patch b/meta/recipes-devtools/perl/liburi-perl/0001-Skip-TODO-test-cases-that-fail.patch
new file mode 100644
index 0000000000..ed1f25125f
--- /dev/null
+++ b/meta/recipes-devtools/perl/liburi-perl/0001-Skip-TODO-test-cases-that-fail.patch
@@ -0,0 +1,110 @@
+From 5a4271456104bdf027644c81c3a208cde5cf522e Mon Sep 17 00:00:00 2001
+From: Tim Orling <tim.orling@konsulko.com>
+Date: Thu, 17 Nov 2022 16:33:20 -0800
+Subject: [PATCH] Skip TODO test cases that fail
+
+TODO cases report as "not ok" with ptest-runner
+
+Upstream-Status: Inappropriate [ptest-runner specific]
+
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
+
+---
+ t/escape-char.t | 20 ++++++++++----------
+ t/iri.t | 18 +++++++++---------
+ t/mailto.t | 12 ++++++------
+ 3 files changed, 25 insertions(+), 25 deletions(-)
+
+diff --git a/t/escape-char.t b/t/escape-char.t
+index c6ce79c..5e62ad5 100644
+--- a/t/escape-char.t
++++ b/t/escape-char.t
+@@ -6,16 +6,16 @@ use warnings;
+ use Test::More;
+ use URI ();
+
+-TODO: {
+- my $str = "http://foo/\xE9";
+- utf8::upgrade($str);
+- my $uri = URI->new($str);
+-
+- local $TODO = 'URI::Escape::escape_char misunderstands utf8';
+-
+- # http://foo/%C3%A9
+- is("$uri", 'http://foo/%E9', 'correctly created a URI from a utf8-upgraded string');
+-}
++#TODO: {
++# my $str = "http://foo/\xE9";
++# utf8::upgrade($str);
++# my $uri = URI->new($str);
++#
++# local $TODO = 'URI::Escape::escape_char misunderstands utf8';
++#
++# # http://foo/%C3%A9
++# is("$uri", 'http://foo/%E9', 'correctly created a URI from a utf8-upgraded string');
++#}
+
+ {
+ my $str = "http://foo/\xE9";
+diff --git a/t/iri.t b/t/iri.t
+index cf983d6..884b36e 100644
+--- a/t/iri.t
++++ b/t/iri.t
+@@ -6,7 +6,7 @@ use Test::More;
+ use Config qw( %Config );
+
+ if (defined $Config{useperlio}) {
+- plan tests=>30;
++ plan tests=>28;
+ } else {
+ plan skip_all=>"this perl doesn't support PerlIO layers";
+ }
+@@ -67,17 +67,17 @@ is $u->as_iri, "http://➡.ws/";
+ # draft-duerst-iri-bis.txt examples (section 3.7.1):
+ is(URI->new("http://www.example.org/D%C3%BCrst")->as_iri, "http://www.example.org/D\xFCrst");
+ is(URI->new("http://www.example.org/D%FCrst")->as_iri, "http://www.example.org/D%FCrst");
+-TODO: {
+- local $TODO = "some chars (like U+202E, RIGHT-TO-LEFT OVERRIDE) need to stay escaped";
+-is(URI->new("http://xn--99zt52a.example.org/%e2%80%ae")->as_iri, "http://\x{7D0D}\x{8C46}.example.org/%e2%80%ae");
+-}
++#TODO: {
++# local $TODO = "some chars (like U+202E, RIGHT-TO-LEFT OVERRIDE) need to stay escaped";
++#is(URI->new("http://xn--99zt52a.example.org/%e2%80%ae")->as_iri, "http://\x{7D0D}\x{8C46}.example.org/%e2%80%ae");
++#}
+
+ # try some URLs that can't be IDNA encoded (fallback to encoded UTF8 bytes)
+ $u = URI->new("http://" . ("ü" x 128));
+ is $u, "http://" . ("%C3%BC" x 128);
+ is $u->host, ("\xC3\xBC" x 128);
+-TODO: {
+- local $TODO = "should ihost decode UTF8 bytes?";
+- is $u->ihost, ("ü" x 128);
+-}
++#TODO: {
++# local $TODO = "should ihost decode UTF8 bytes?";
++# is $u->ihost, ("ü" x 128);
++#}
+ is $u->as_iri, "http://" . ("ü" x 128);
+diff --git a/t/mailto.t b/t/mailto.t
+index 79e9a13..c68cfb2 100644
+--- a/t/mailto.t
++++ b/t/mailto.t
+@@ -48,12 +48,12 @@ $u = URI->new('mailto:user+detail@example.com');
+ is $u->to, 'user+detail@example.com', 'subaddress with `+` parsed correctly';
+ is $u, 'mailto:user+detail@example.com', '... and stringification works';
+
+-TODO: {
+- local $TODO = "We can't handle quoted local parts without properly parsing the email addresses";
+- $u = URI->new('mailto:"foo bar+baz"@example.com');
+- is $u->to, '"foo bar+baz"@example.com', 'address with quoted local part containing spaces is parsed correctly';
+- is $u, 'mailto:%22foo%20bar+baz%22@example.com', '... and stringification works';
+-}
++#TODO: {
++# local $TODO = "We can't handle quoted local parts without properly parsing the email addresses";
++# $u = URI->new('mailto:"foo bar+baz"@example.com');
++# is $u->to, '"foo bar+baz"@example.com', 'address with quoted local part containing spaces is parsed correctly';
++# is $u, 'mailto:%22foo%20bar+baz%22@example.com', '... and stringification works';
++#}
+
+ # RFC 5321 (4.1.3) - Address Literals
+
diff --git a/meta/recipes-devtools/perl/liburi-perl_5.08.bb b/meta/recipes-devtools/perl/liburi-perl_5.08.bb
deleted file mode 100644
index 5428c9ddf9..0000000000
--- a/meta/recipes-devtools/perl/liburi-perl_5.08.bb
+++ /dev/null
@@ -1,50 +0,0 @@
-SUMMARY = "Perl module to manipulate and access URI strings"
-DESCRIPTION = "This package contains the URI.pm module with friends. \
-The module implements the URI class. URI objects can be used to access \
-and manipulate the various components that make up these strings."
-
-HOMEPAGE = "http://search.cpan.org/dist/URI/"
-SECTION = "libs"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=c453e94fae672800f83bc1bd7a38b53f"
-
-DEPENDS += "perl"
-
-SRC_URI = "http://www.cpan.org/authors/id/E/ET/ETHER/URI-${PV}.tar.gz"
-
-SRC_URI[md5sum] = "cdbbf8f8ccdec5c162c8505077a35c2c"
-SRC_URI[sha256sum] = "7e2c6fe3b1d5947da334fa558a96e748aaa619213b85bcdce5b5347d4d26c46e"
-
-S = "${WORKDIR}/URI-${PV}"
-
-EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR}"
-
-inherit cpan ptest-perl
-
-do_compile() {
- export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')"
- cpan_do_compile
-}
-
-do_install:prepend() {
- # these tests require "-T" (taint) command line option
- rm -rf ${B}/t/cwd.t
- rm -rf ${B}/t/file.t
-}
-
-RDEPENDS:${PN} += "perl-module-integer perl-module-mime-base64"
-RDEPENDS:${PN}-ptest += " \
- libtest-needs-perl \
- perl-module-test-more \
- perl-module-test \
- perl-module-utf8 \
- perl-module-extutils-makemaker \
- perl-module-net-domain \
- perl-module-encode \
- perl-module-extutils-mm-unix \
- perl-module-file-spec-functions \
- perl-module-perlio \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/liburi-perl_5.28.bb b/meta/recipes-devtools/perl/liburi-perl_5.28.bb
new file mode 100644
index 0000000000..1fc0efd3e5
--- /dev/null
+++ b/meta/recipes-devtools/perl/liburi-perl_5.28.bb
@@ -0,0 +1,56 @@
+SUMMARY = "Perl module to manipulate and access URI strings"
+DESCRIPTION = "This package contains the URI.pm module with friends. \
+The module implements the URI class. URI objects can be used to access \
+and manipulate the various components that make up these strings."
+HOMEPAGE = "https://metacpan.org/dist/URI"
+SECTION = "libs"
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=9944b87af51186f848ae558344aded9f"
+
+SRC_URI = "${CPAN_MIRROR}/authors/id/O/OA/OALDERS/URI-${PV}.tar.gz \
+ file://0001-Skip-TODO-test-cases-that-fail.patch \
+ "
+
+SRC_URI[sha256sum] = "e7985da359b15efd00917fa720292b711c396f2f9f9a7349e4e7dec74aa79765"
+
+S = "${WORKDIR}/URI-${PV}"
+
+EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR}"
+
+inherit cpan ptest-perl
+
+do_compile() {
+ export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')"
+ cpan_do_compile
+}
+
+do_install:prepend() {
+ # these tests require "-T" (taint) command line option
+ rm -rf ${B}/t/cwd.t
+ rm -rf ${B}/t/file.t
+}
+
+RDEPENDS:${PN} += "\
+ perl-module-integer \
+ perl-module-mime-base64 \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ libtest-fatal-perl \
+ libtest-needs-perl \
+ libtest-warnings-perl \
+ perl-module-encode \
+ perl-module-encode-encoding \
+ perl-module-extutils-makemaker \
+ perl-module-extutils-mm-unix \
+ perl-module-file-spec-functions \
+ perl-module-net-domain \
+ perl-module-perlio \
+ perl-module-perlio-encoding \
+ perl-module-test \
+ perl-module-test-more \
+ perl-module-utf8 \
+"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/perl/libxml-parser-perl/0001-Makefile.PL-make-check_lib-cross-friendly.patch b/meta/recipes-devtools/perl/libxml-parser-perl/0001-Makefile.PL-make-check_lib-cross-friendly.patch
new file mode 100644
index 0000000000..457fb77384
--- /dev/null
+++ b/meta/recipes-devtools/perl/libxml-parser-perl/0001-Makefile.PL-make-check_lib-cross-friendly.patch
@@ -0,0 +1,28 @@
+From bd9b0e10843da72276982bd1394ade734fea0289 Mon Sep 17 00:00:00 2001
+From: Tim Orling <tim.orling@konsulko.com>
+Date: Fri, 2 Feb 2024 21:15:34 -0800
+Subject: [PATCH] Makefile.PL: make check_lib cross friendly
+
+lib => qw(expat) does not seem to respect EXPATLIBPATH and
+EXPATINCPATH when we are cross-compiling.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
+---
+ Makefile.PL | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Makefile.PL b/Makefile.PL
+index 505d1df..19f428b 100644
+--- a/Makefile.PL
++++ b/Makefile.PL
+@@ -30,7 +30,7 @@ foreach (@ARGV) {
+
+ unless (
+ check_lib( # fill in what you prompted the user for here
+- lib => [qw(expat)],
++ #lib => [qw(expat)],
+ header => ['expat.h'],
+ incpath => $expat_incpath,
+ ( $expat_libpath ? ( libpath => $expat_libpath ) : () ),
diff --git a/meta/recipes-devtools/perl/libxml-parser-perl_2.46.bb b/meta/recipes-devtools/perl/libxml-parser-perl_2.46.bb
deleted file mode 100644
index 6cd40bd292..0000000000
--- a/meta/recipes-devtools/perl/libxml-parser-perl_2.46.bb
+++ /dev/null
@@ -1,59 +0,0 @@
-SUMMARY = "XML::Parser - A perl module for parsing XML documents"
-HOMEPAGE = "https://libexpat.github.io/"
-SECTION = "libs"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-LIC_FILES_CHKSUM = "file://Parser.pm;beginline=1;endline=7;md5=d12cc778c80fc4c518f0e5dee29fd5fb"
-
-DEPENDS += "expat"
-
-SRC_URI = "http://www.cpan.org/modules/by-module/XML/XML-Parser-${PV}.tar.gz \
- file://ptest-perl/run-ptest \
- file://0001-CheckLib.pm-do-not-attempt-to-run-a-cross-executable.patch \
- "
-SRC_URI[md5sum] = "80bb18a8e6240fcf7ec2f7b57601c170"
-SRC_URI[sha256sum] = "d331332491c51cccfb4cb94ffc44f9cd73378e618498d4a37df9e043661c515d"
-
-S = "${WORKDIR}/XML-Parser-${PV}"
-
-EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR} CC='${CC}' LD='${CCLD}' FULL_AR='${AR}'"
-
-inherit cpan ptest-perl
-
-# fix up sub MakeMaker project as arguments don't get propagated though
-# see https://rt.cpan.org/Public/Bug/Display.html?id=28632
-do_configure:append:class-target() {
- sed -E \
- -e 's:-L${STAGING_LIBDIR}::g' -e 's:-I${STAGING_INCDIR}::g' \
- -i Makefile Expat/Makefile
-}
-
-do_configure:append() {
- sed -e 's:--sysroot=.*\(\s\|$\):--sysroot=${STAGING_DIR_TARGET} :g' \
- -i Makefile Expat/Makefile
- sed 's:^FULL_AR = .*:FULL_AR = ${AR}:g' -i Expat/Makefile
- # make sure these two do not build in parallel
- sed 's!^$(INST_DYNAMIC):!$(INST_DYNAMIC): $(BOOTSTRAP)!' -i Expat/Makefile
-}
-
-do_compile() {
- export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')"
- cpan_do_compile
-}
-
-do_compile:class-native() {
- cpan_do_compile
-}
-
-do_install_ptest() {
- sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlstats
- sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlfilter
- sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlcomments
- sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/canonical
- cp -r ${B}/samples ${D}${PTEST_PATH}
- chown -R root:root ${D}${PTEST_PATH}/samples
-}
-
-RDEPENDS:${PN} += "perl-module-carp perl-module-file-spec"
-RDEPENDS:${PN}-ptest += "perl-module-filehandle perl-module-if perl-module-test perl-module-test-more"
-
-BBCLASSEXTEND="native nativesdk"
diff --git a/meta/recipes-devtools/perl/libxml-parser-perl_2.47.bb b/meta/recipes-devtools/perl/libxml-parser-perl_2.47.bb
new file mode 100644
index 0000000000..cffc133a45
--- /dev/null
+++ b/meta/recipes-devtools/perl/libxml-parser-perl_2.47.bb
@@ -0,0 +1,42 @@
+SUMMARY = "XML::Parser - A perl module for parsing XML documents"
+HOMEPAGE = "https://libexpat.github.io/"
+SECTION = "libs"
+LICENSE = "Artistic-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=4342f85bf14a1fdd6a751573f1e61c03"
+
+DEPENDS += "expat"
+
+SRC_URI = "${CPAN_MIRROR}/modules/by-module/XML/XML-Parser-${PV}.tar.gz \
+ file://0001-Makefile.PL-make-check_lib-cross-friendly.patch \
+ "
+
+SRC_URI[sha256sum] = "ad4aae643ec784f489b956abe952432871a622d4e2b5c619e8855accbfc4d1d8"
+
+S = "${WORKDIR}/XML-Parser-${PV}"
+
+EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR} CC='${CC}' LD='${CCLD}' FULL_AR='${AR}'"
+
+inherit cpan pkgconfig ptest-perl
+
+do_compile() {
+ export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')"
+ cpan_do_compile
+}
+
+do_compile:class-native() {
+ cpan_do_compile
+}
+
+do_install_ptest() {
+ sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlstats
+ sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlfilter
+ sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/xmlcomments
+ sed -i -e "s:/usr/local/bin/perl:/usr/bin/perl:g" ${B}/samples/canonical
+ cp -r ${B}/samples ${D}${PTEST_PATH}
+ chown -R root:root ${D}${PTEST_PATH}/samples
+}
+
+RDEPENDS:${PN} += "perl-module-carp perl-module-file-spec"
+RDEPENDS:${PN}-ptest += "perl-module-filehandle perl-module-if perl-module-test perl-module-test-more"
+
+BBCLASSEXTEND="native nativesdk"
diff --git a/meta/recipes-devtools/perl/libxml-perl_0.08.bb b/meta/recipes-devtools/perl/libxml-perl_0.08.bb
index 323853cc2f..89acb64ef5 100644
--- a/meta/recipes-devtools/perl/libxml-perl_0.08.bb
+++ b/meta/recipes-devtools/perl/libxml-perl_0.08.bb
@@ -6,7 +6,6 @@ HOMEPAGE = "http://search.cpan.org/dist/libxml-perl/"
SUMMARY = "Collection of Perl modules for working with XML"
SECTION = "libs"
LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-PR = "r3"
LIC_FILES_CHKSUM = "file://README;beginline=33;endline=35;md5=1705549eef7577a3d6ba71123a1f0ce8"
@@ -27,4 +26,4 @@ do_compile() {
}
RDEPENDS:${PN} += "perl-module-carp perl-module-overload perl-module-universal perl-module-io-handle"
-RDEPENDS:${PN}-ptest += "libxml-parser-perl perl-module-file-glob"
+RDEPENDS:${PN}-ptest += "libxml-parser-perl perl-module-file-glob perl-module-cwd perl-module-constant"
diff --git a/meta/recipes-devtools/perl/perl-ptest.inc b/meta/recipes-devtools/perl/perl-ptest.inc
index 54c7807571..e07355d3f5 100644
--- a/meta/recipes-devtools/perl/perl-ptest.inc
+++ b/meta/recipes-devtools/perl/perl-ptest.inc
@@ -10,12 +10,12 @@ do_install_ptest () {
sed -e "s:\/usr\/local:${bindir}:g" -i cpan/version/t/*
sed -e "s:\/opt:\/usr:" -i Porting/add-package.pl
sed -e "s:\/local\/gnu\/:\/:" -i hints/cxux.sh
- tar -c --exclude=try --exclude=a.out --exclude='*.o' --exclude=libperl.so* --exclude=Makefile --exclude=makefile --exclude=hostperl \
+ tar -c --exclude=try --exclude=a.out --exclude='*.o' --exclude=libperl.so* --exclude=[Mm]akefile --exclude=hostperl \
--exclude=cygwin --exclude=os2 --exclude=djgpp --exclude=qnx --exclude=symbian --exclude=haiku \
--exclude=vms --exclude=vos --exclude=NetWare --exclude=amigaos4 --exclude=buildcustomize.pl \
--exclude='win32/config.*' --exclude=plan9 --exclude=README.plan9 --exclude=perlplan9.pod --exclude=Configure \
--exclude=veryclean.sh --exclude=realclean.sh --exclude=getioctlsizes \
- --exclude=dl_aix.xs --exclude=sdbm.3 --exclude='cflags.SH' --exclude=makefile.old \
+ --exclude=dl_aix.xs --exclude=sdbm.3 --exclude='cflags.SH' --exclude=[Mm]akefile.old \
--exclude=miniperl --exclude=generate_uudmap --exclude=patches --exclude='config.log' * | ( cd ${D}${PTEST_PATH} && tar -x )
ln -sf ${bindir}/perl ${D}${PTEST_PATH}/t/perl
@@ -56,7 +56,7 @@ python populate_packages:prepend() {
'${PN}-ptest%s', '%s', recursive=True, match_path=True)
}
-RDEPENDS:${PN}-ptest += "${PN}-modules ${PN}-doc sed"
+RDEPENDS:${PN}-ptest += "${PN}-modules ${PN}-doc sed procps-ps"
# The perl-ptest package contains Perl internal modules and generating file
# dependencies for it causes problems.
diff --git a/meta/recipes-devtools/perl/perl_5.34.1.bb b/meta/recipes-devtools/perl/perl_5.34.1.bb
deleted file mode 100644
index 0e9d0c032e..0000000000
--- a/meta/recipes-devtools/perl/perl_5.34.1.bb
+++ /dev/null
@@ -1,413 +0,0 @@
-SUMMARY = "Perl scripting language"
-HOMEPAGE = "http://www.perl.org/"
-DESCRIPTION = "Perl is a highly capable, feature-rich programming language"
-SECTION = "devel"
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-LIC_FILES_CHKSUM = "file://Copying;md5=5b122a36d0f6dc55279a0ebc69f3c60b \
- file://Artistic;md5=71a4d5d9acc18c0952a6df2218bb68da \
- "
-
-
-SRC_URI = "https://www.cpan.org/src/5.0/perl-${PV}.tar.gz;name=perl \
- file://perl-rdepends.txt \
- file://0001-Somehow-this-module-breaks-through-the-perl-wrapper-.patch \
- file://errno_ver.diff \
- file://native-perlinc.patch \
- file://perl-dynloader.patch \
- file://0002-Constant-Fix-up-shebang.patch \
- file://determinism.patch \
- file://0001-cpan-Sys-Syslog-Makefile.PL-Fix-_PATH_LOG-for-determ.patch \
- "
-SRC_URI:append:class-native = " \
- file://perl-configpm-switch.patch \
-"
-SRC_URI:append:class-target = " \
- file://encodefix.patch \
-"
-
-SRC_URI[perl.sha256sum] = "357951a491b0ba1ce3611263922feec78ccd581dddc24a446b033e25acf242a1"
-
-S = "${WORKDIR}/perl-${PV}"
-
-inherit upstream-version-is-even update-alternatives
-
-DEPENDS += "perlcross-native zlib virtual/crypt"
-
-PERL_LIB_VER = "${@'.'.join(d.getVar('PV').split('.')[0:2])}.0"
-
-PACKAGECONFIG ??= "gdbm"
-PACKAGECONFIG[bdb] = ",-Ui_db,db"
-PACKAGECONFIG[gdbm] = ",-Ui_gdbm,gdbm"
-
-# Don't generate comments in enc2xs output files. They are not reproducible
-export ENC2XS_NO_COMMENTS = "1"
-
-do_configure:prepend() {
- cp -rfp ${STAGING_DATADIR_NATIVE}/perl-cross/* ${S}
-}
-
-do_configure:class-target() {
- ./configure --prefix=${prefix} --libdir=${libdir} \
- --target=${TARGET_SYS} \
- -Duseshrplib \
- -Dusethreads \
- -Dsoname=libperl.so.5 \
- -Dvendorprefix=${prefix} \
- -Dvendorlibdir=${libdir} \
- -Darchlibexp=${STAGING_LIBDIR}/perl5/${PV}/${TARGET_ARCH}-linux \
- -Dlibpth='${libdir} ${base_libdir}' \
- -Dglibpth='${libdir} ${base_libdir}' \
- -Alddlflags=' ${LDFLAGS}' \
- ${PACKAGECONFIG_CONFARGS}
-
- #perl.c uses an ARCHLIB_EXP define to generate compile-time code that
- #adds the archlibexp path to @INC during run-time initialization of a
- #new perl interpreter.
-
- #Because we've changed this value in a temporary way to make it
- #possible to use ExtUtils::Embed in the target build (the temporary
- #value in config.sh gets re-stripped out during packaging), the
- #ARCHLIB_EXP value that gets generated still uses the temporary version
- #instead of the original expected version (i.e. becauses it's in the
- #generated config.h, it doesn't get stripped out during packaging like
- #the others in config.sh).
-
- sed -i -e "s,${STAGING_LIBDIR},${libdir},g" config.h
-}
-
-do_configure:class-nativesdk() {
- ./configure --prefix=${prefix} \
- --target=${TARGET_SYS} \
- -Duseshrplib \
- -Dusethreads \
- -Dsoname=libperl.so.5 \
- -Dvendorprefix=${prefix} \
- -Darchlibexp=${STAGING_LIBDIR}/perl5/${PV}/${TARGET_ARCH}-linux \
- -Alddlflags=' ${LDFLAGS}' \
- ${PACKAGECONFIG_CONFARGS}
-
- # See the comment above
- sed -i -e "s,${STAGING_LIBDIR},${libdir},g" config.h
-}
-
-do_configure:class-native() {
- ./configure --prefix=${prefix} \
- -Dbin=${bindir}/perl-native \
- -Duseshrplib \
- -Dusethreads \
- -Dsoname=libperl.so.5 \
- -Dvendorprefix=${prefix} \
- -Ui_xlocale \
- -Alddlflags=' ${LDFLAGS}' \
- ${PACKAGECONFIG_CONFARGS}
-}
-
-do_configure:append() {
- if [ -n "$SOURCE_DATE_EPOCH" ]; then
- PERL_BUILD_DATE="$(${PYTHON} -c "\
-from datetime import datetime, timezone; \
-print(datetime.fromtimestamp($SOURCE_DATE_EPOCH, timezone.utc).strftime('%a %b %d %H:%M:%S %Y')) \
- ")"
- echo "#define PERL_BUILD_DATE \"$PERL_BUILD_DATE\"" >> config.h
- fi
-}
-
-do_compile() {
- oe_runmake
- # This isn't generated reliably so delete and re-generate.
- # https://github.com/arsv/perl-cross/issues/86
-
- if [ -e pod/perltoc.pod ]; then
- bbnote Rebuilding perltoc.pod
- rm -f pod/perltoc.pod
- oe_runmake pod/perltoc.pod
- fi
-}
-
-do_install() {
- oe_runmake 'DESTDIR=${D}' install
-
- install -d ${D}${libdir}/perl5
- install -d ${D}${libdir}/perl5/${PV}/
- install -d ${D}${libdir}/perl5/${PV}/ExtUtils/
-
- # Save native config
- install config.sh ${D}${libdir}/perl5
- install lib/Config.pm ${D}${libdir}/perl5/${PV}/
- install lib/ExtUtils/typemap ${D}${libdir}/perl5/${PV}/ExtUtils/
-
- # Fix up shared library
- dir=$(echo ${D}/${libdir}/perl5/${PV}/*/CORE)
- rm $dir/libperl.so
- ln -sf ../../../../libperl.so.${PERL_LIB_VER} $dir/libperl.so
-
- # Try to catch Bug #13946
- if [ -e ${D}/${libdir}/perl5/${PV}/Storable.pm ]; then
- bbfatal 'non-arch specific Storable.pm found! See https://bugzilla.yoctoproject.org/show_bug.cgi?id=13946'
- fi
-}
-
-do_install:append:class-target() {
- # This is used to substitute target configuration when running native perl via perl-configpm-switch.patch
- ln -s Config_heavy.pl ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy-target.pl
-
- # This contains host-specific information used for building miniperl (a helper executable built with host compiler)
- # and therefore isn't reproducible. I believe the file isn't actually needed on target.
- rm ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/xconfig.h
-}
-
-do_install:append:class-nativesdk() {
- # This is used to substitute target configuration when running native perl via perl-configpm-switch.patch
- ln -s Config_heavy.pl ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy-target.pl
-
- create_wrapper ${D}${bindir}/perl \
- PERL5LIB='$PERL5LIB:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/site_perl/${PV}:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/vendor_perl/${PV}:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/${PV}'
-}
-
-do_install:append:class-native () {
- # Those wrappers mean that perl installed from sstate (which may change
- # path location) works and that in the nativesdk case, the SDK can be
- # installed to a different location from the one it was built for.
- create_wrapper ${D}${bindir}/perl-native/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl5/site_perl/${PV}:${STAGING_LIBDIR}/perl5/vendor_perl/${PV}:${STAGING_LIBDIR}/perl5/${PV}'
-
- # Use /usr/bin/env nativeperl for the perl script.
- for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do
- sed -i -e 's|${bindir}/perl|/usr/bin/env nativeperl|' $f
- done
-}
-
-PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess"
-
-perl_package_preprocess () {
- # Fix up installed configuration
- sed -i -e "s,${D},,g" \
- -e "s,${DEBUG_PREFIX_MAP},,g" \
- -e "s,--sysroot=${STAGING_DIR_HOST},,g" \
- -e "s,-isystem${STAGING_INCDIR} ,,g" \
- -e "s,${STAGING_LIBDIR},${libdir},g" \
- -e "s,${STAGING_BINDIR},${bindir},g" \
- -e "s,${STAGING_INCDIR},${includedir},g" \
- -e "s,${STAGING_BINDIR_NATIVE}/perl-native/,${bindir}/,g" \
- -e "s,${STAGING_BINDIR_NATIVE}/,,g" \
- -e "s,${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX},${bindir},g" \
- -e 's:${RECIPE_SYSROOT}::g' \
- ${PKGD}${bindir}/h2xs.perl \
- ${PKGD}${bindir}/h2ph.perl \
- ${PKGD}${bindir}/pod2man.perl \
- ${PKGD}${bindir}/pod2text.perl \
- ${PKGD}${bindir}/pod2usage.perl \
- ${PKGD}${bindir}/podchecker.perl \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/config.h \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/perl.h \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/pp.h \
- ${PKGD}${libdir}/perl5/${PV}/Config.pm \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pm \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pod \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_git.pl \
- ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy.pl \
- ${PKGD}${libdir}/perl5/${PV}/ExtUtils/Liblist/Kid.pm \
- ${PKGD}${libdir}/perl5/${PV}/FileCache.pm \
- ${PKGD}${libdir}/perl5/${PV}/pod/*.pod \
- ${PKGD}${libdir}/perl5/config.sh
-}
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN}-misc = "corelist cpan enc2xs encguess h2ph h2xs instmodsh json_pp libnetcfg \
- piconv pl2pm pod2html pod2man pod2text pod2usage podchecker \
- prove ptar ptardiff ptargrep shasum splain streamzip xsubpp zipdetails"
-ALTERNATIVE_LINK_NAME[corelist] = "${bindir}/corelist"
-ALTERNATIVE_LINK_NAME[cpan] = "${bindir}/cpan"
-ALTERNATIVE_LINK_NAME[enc2xs] = "${bindir}/enc2xs"
-ALTERNATIVE_LINK_NAME[encguess] = "${bindir}/encguess"
-ALTERNATIVE_LINK_NAME[h2ph] = "${bindir}/h2ph"
-ALTERNATIVE_LINK_NAME[h2xs] = "${bindir}/h2xs"
-ALTERNATIVE_LINK_NAME[instmodsh] = "${bindir}/instmodsh"
-ALTERNATIVE_LINK_NAME[json_pp] = "${bindir}/json_pp"
-ALTERNATIVE_LINK_NAME[libnetcfg] = "${bindir}/libnetcfg"
-ALTERNATIVE_LINK_NAME[piconv] = "${bindir}/piconv"
-ALTERNATIVE_LINK_NAME[pl2pm] = "${bindir}/pl2pm"
-ALTERNATIVE_LINK_NAME[pod2html] = "${bindir}/pod2html"
-ALTERNATIVE_LINK_NAME[pod2man] = "${bindir}/pod2man"
-ALTERNATIVE_LINK_NAME[pod2text] = "${bindir}/pod2text"
-ALTERNATIVE_LINK_NAME[pod2usage] = "${bindir}/pod2usage"
-ALTERNATIVE_LINK_NAME[podchecker] = "${bindir}/podchecker"
-ALTERNATIVE_LINK_NAME[prove] = "${bindir}/prove"
-ALTERNATIVE_LINK_NAME[ptar] = "${bindir}/ptar"
-ALTERNATIVE_LINK_NAME[ptardiff] = "${bindir}/ptardiff"
-ALTERNATIVE_LINK_NAME[ptargrep] = "${bindir}/ptargrep"
-ALTERNATIVE_LINK_NAME[shasum] = "${bindir}/shasum"
-ALTERNATIVE_LINK_NAME[splain] = "${bindir}/splain"
-ALTERNATIVE_LINK_NAME[streamzip] = "${bindir}/streamzip"
-ALTERNATIVE_LINK_NAME[xsubpp] = "${bindir}/xsubpp"
-ALTERNATIVE_LINK_NAME[zipdetails] = "${bindir}/zipdetails"
-
-require perl-ptest.inc
-
-FILES:${PN} = "${bindir}/perl ${bindir}/perl.real ${bindir}/perl${PV} ${libdir}/libperl.so* \
- ${libdir}/perl5/site_perl \
- ${libdir}/perl5/${PV}/Config.pm \
- ${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pm \
- ${libdir}/perl5/${PV}/*/Config_git.pl \
- ${libdir}/perl5/${PV}/*/Config_heavy-target.pl \
- ${libdir}/perl5/config.sh \
- ${libdir}/perl5/${PV}/strict.pm \
- ${libdir}/perl5/${PV}/warnings.pm \
- ${libdir}/perl5/${PV}/warnings \
- ${libdir}/perl5/${PV}/vars.pm \
- ${libdir}/perl5/site_perl \
- ${libdir}/perl5/${PV}/ExtUtils/MANIFEST.SKIP \
- ${libdir}/perl5/${PV}/ExtUtils/xsubpp \
- ${libdir}/perl5/${PV}/ExtUtils/typemap \
- "
-RPROVIDES:${PN} += "perl-module-strict perl-module-vars perl-module-config perl-module-warnings \
- perl-module-warnings-register"
-
-FILES:${PN}-staticdev:append = " ${libdir}/perl5/${PV}/*/CORE/libperl.a"
-
-FILES:${PN}-dev:append = " ${libdir}/perl5/${PV}/*/CORE"
-
-FILES:${PN}-doc:append = " ${libdir}/perl5/${PV}/Unicode/Collate/*.txt \
- ${libdir}/perl5/${PV}/*/.packlist \
- ${libdir}/perl5/${PV}/Encode/encode.h \
- "
-PACKAGES += "${PN}-misc"
-
-FILES:${PN}-misc = "${bindir}/*"
-
-PACKAGES += "${PN}-pod"
-
-FILES:${PN}-pod = "${libdir}/perl5/${PV}/pod \
- ${libdir}/perl5/${PV}/*.pod \
- ${libdir}/perl5/${PV}/*/*.pod \
- ${libdir}/perl5/${PV}/*/*/*.pod \
- ${libdir}/perl5/${PV}/*/*/*/*.pod \
- "
-
-PACKAGES += "${PN}-module-cpan ${PN}-module-unicore"
-
-FILES:${PN}-module-cpan += "${libdir}/perl5/${PV}/CPAN \
- "
-FILES:${PN}-module-unicore += "${libdir}/perl5/${PV}/unicore"
-
-ALTERNATIVE_PRIORITY = "40"
-ALTERNATIVE:${PN}-doc = "Thread.3"
-ALTERNATIVE_LINK_NAME[Thread.3] = "${mandir}/man3/Thread.3"
-
-# Create a perl-modules package recommending all the other perl
-# packages (actually the non modules packages and not created too)
-ALLOW_EMPTY:${PN}-modules = "1"
-PACKAGES += "${PN}-modules "
-
-PACKAGESPLITFUNCS:prepend = "split_perl_packages "
-
-python split_perl_packages () {
- libdir = d.expand('${libdir}/perl5/${PV}')
- do_split_packages(d, libdir, r'.*/auto/([^.]*)/[^/]*\.(so|ld|ix|al)', '${PN}-module-%s', 'perl module %s', recursive=True, match_path=True, prepend=False)
- do_split_packages(d, libdir, r'.*linux/([^\/]*)\.pm', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
- do_split_packages(d, libdir, r'Module/([^\/]*)\.pm', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
- do_split_packages(d, libdir, r'Module/([^\/]*)/.*', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
- do_split_packages(d, libdir, r'.*linux/([^\/].*)\.(pm|pl|e2x)', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
- do_split_packages(d, libdir, r'(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|.*linux\/)[^\/]).*)\.(pm|pl|e2x)', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
-
- # perl-modules should recommend every perl module, and only the
- # modules. Don't attempt to use the result of do_split_packages() as some
- # modules are manually split (eg. perl-module-unicore).
- packages = filter(lambda p: 'perl-module-' in p, d.getVar('PACKAGES').split())
- d.setVar(d.expand("RRECOMMENDS:${PN}-modules"), ' '.join(packages))
-
- # Read the pre-generated dependency file, and use it to set module dependecies
- for line in open(d.expand("${WORKDIR}") + '/perl-rdepends.txt').readlines():
- splitline = line.split()
- # Filter empty lines and comments
- if len(splitline) == 0 or splitline[0].startswith("#"):
- continue
- if bb.data.inherits_class('native', d):
- module = splitline[0] + '-native'
- depends = "perl-native"
- else:
- module = splitline[0].replace("RDEPENDS:perl", "RDEPENDS:${PN}")
- depends = splitline[2].strip('"').replace("perl-module", "${PN}-module")
- d.appendVar(d.expand(module), " " + depends)
-}
-
-python() {
- if d.getVar('CLASSOVERRIDE') == "class-target":
- d.setVar("PACKAGES_DYNAMIC", "^${MLPREFIX}perl-module-.*(?<!native)$")
- elif d.getVar('CLASSOVERRIDE') == "class-native":
- d.setVar("PACKAGES_DYNAMIC", "^perl-module-.*-native$")
- elif d.getVar('CLASSOVERRIDE') == "class-nativesdk":
- d.setVar("PACKAGES_DYNAMIC", "^nativesdk-perl-module-.*")
-}
-
-RDEPENDS:${PN}-misc += "perl perl-modules"
-RDEPENDS:${PN}-pod += "perl"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh"
-
-do_create_rdepends_inc() {
- cd ${WORKDIR}
- cat <<'EOPREAMBLE' > ${WORKDIR}/perl-rdepends.inc
-
-# Some additional dependencies that the above doesn't manage to figure out
-RDEPENDS:${PN}-module-file-spec += "${PN}-module-file-spec-unix"
-RDEPENDS:${PN}-module-scalar-util += "${PN}-module-list-util"
-RDEPENDS:${PN}-module-file-temp += "${PN}-module-scalar-util"
-RDEPENDS:${PN}-module-file-temp += "${PN}-module-file-spec"
-RDEPENDS:${PN}-module-io-file += "${PN}-module-symbol"
-RDEPENDS:${PN}-module-io-file += "${PN}-module-carp"
-RDEPENDS:${PN}-module-math-bigint += "${PN}-module-math-bigint-calc"
-RDEPENDS:${PN}-module-test-builder += "${PN}-module-list-util"
-RDEPENDS:${PN}-module-test-builder += "${PN}-module-scalar-util"
-RDEPENDS:${PN}-module-test-builder-formatter += "${PN}-module-test2-formatter-tap"
-RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-fail"
-RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-pass"
-RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-v2"
-RDEPENDS:${PN}-module-test2-formatter-tap += "${PN}-module-test2-formatter"
-RDEPENDS:${PN}-module-thread-queue += "${PN}-module-attributes"
-RDEPENDS:${PN}-module-overload += "${PN}-module-overloading"
-
-# Generated depends list beyond this line
-EOPREAMBLE
- test -e packages-split.new && rm -rf packages-split.new
- cp -r packages-split packages-split.new && cd packages-split.new
- find . -name \*.pm | xargs sed -i '/^=head/,/^=cut/d'
- egrep -r "^\s*(\<use .*|\<require .*);?" perl-module-* --include="*.pm" | \
- sed "s/\/.*\.pm: */ += /g;s/[\"\']//g;s/;.*/\"/g;s/+= .*\(require\|use\)\> */+= \"perl-module-/g;s/CPANPLUS::.*/cpanplus/g;s/CPAN::.*/cpan/g;s/::/-/g;s/ [^+\"].*//g;s/_/-/g;s/\.pl\"$/\"/;s/\"\?\$/\"/;s/(//;s/)//;" | tr [:upper:] [:lower:] | \
- awk '{if ($3 != "\x22"$1"\x22"){ print $0}}'| \
- grep -v -e "\-vms\-" -e module-5 -e "^$" -e "\\$" -e your -e tk -e autoperl -e html -e http -e parse-cpan -e perl-ostype -e ndbm-file -e module-mac -e fcgi -e lwp -e dbd -e dbix | \
- sort -u | \
- sed 's/^/RDEPENDS:/;s/perl-module-/${PN}-module-/g;s/module-\(module-\)/\1/g;s/\(module-load\)-conditional/\1/g;s/encode-configlocal/&-pm/;' | \
- egrep -wv '=>|module-a|module-apache.?|module-apr|module-authen-sasl|module-b-asmdata|module-convert-ebcdic|module-devel-size|module-digest-perl-md5|module-dumpvalue|module-extutils-constant-aaargh56hash|module-extutils-xssymset|module-file-bsdglob|module-for|module-it|module-io-socket-inet6|module-io-socket-ssl|module-io-string|module-ipc-system-simple|module-lexical|module-local-lib|metadata|module-modperl-util|module-pluggable-object|module-test-builder-io-scalar|module-text-unidecode|module-unicore|module-win32|objects\sload|syscall.ph|systeminfo.ph|%s' | \
- egrep -wv '=>|module-algorithm-diff|module-carp|module-c<extutils-mm-unix>|module-l<extutils-mm-unix>|module-encode-hanextra|module-extutils-makemaker-version-regex|module-file-spec|module-io-compress-lzma|module-io-uncompress-unxz|module-locale-maketext-lexicon|module-log-agent|module-meta-notation|module-net-localcfg|module-net-ping-external|module-b-deparse|module-scalar-util|module-some-module|module-symbol|module-uri|module-win32api-file' > ${WORKDIR}/perl-rdepends.generated
- cat ${WORKDIR}/perl-rdepends.inc ${WORKDIR}/perl-rdepends.generated > ${THISDIR}/files/perl-rdepends.txt
-}
-
-# bitbake perl -c create_rdepends_inc
-addtask do_create_rdepends_inc
-
-SYSROOT_PREPROCESS_FUNCS += "perl_sysroot_create_wrapper"
-
-perl_sysroot_create_wrapper () {
- mkdir -p ${SYSROOT_DESTDIR}${bindir}
- # Create a wrapper that /usr/bin/env perl will use to get perl-native.
- # This MUST live in the normal bindir.
- cat > ${SYSROOT_DESTDIR}${bindir}/nativeperl << EOF
-#!/bin/sh
-realpath=\`readlink -fn \$0\`
-exec \`dirname \$realpath\`/perl-native/perl "\$@"
-EOF
- chmod 0755 ${SYSROOT_DESTDIR}${bindir}/nativeperl
- cat ${SYSROOT_DESTDIR}${bindir}/nativeperl
-}
-
-SSTATE_HASHEQUIV_FILEMAP = " \
- populate_sysroot:*/lib*/perl5/*/*/Config_heavy.pl:${TMPDIR} \
- populate_sysroot:*/lib*/perl5/*/*/Config_heavy.pl:${COREBASE} \
- populate_sysroot:*/lib*/perl5/config.sh:${TMPDIR} \
- populate_sysroot:*/lib*/perl5/config.sh:${COREBASE} \
- "
diff --git a/meta/recipes-devtools/perl/perl_5.38.2.bb b/meta/recipes-devtools/perl/perl_5.38.2.bb
new file mode 100644
index 0000000000..b6c9cda7ae
--- /dev/null
+++ b/meta/recipes-devtools/perl/perl_5.38.2.bb
@@ -0,0 +1,426 @@
+SUMMARY = "Perl scripting language"
+HOMEPAGE = "http://www.perl.org/"
+DESCRIPTION = "Perl is a highly capable, feature-rich programming language"
+SECTION = "devel"
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+LIC_FILES_CHKSUM = "file://Copying;md5=5b122a36d0f6dc55279a0ebc69f3c60b \
+ file://Artistic;md5=71a4d5d9acc18c0952a6df2218bb68da \
+ "
+
+
+SRC_URI = "https://www.cpan.org/src/5.0/perl-${PV}.tar.gz;name=perl \
+ file://perl-rdepends.txt \
+ file://0001-Somehow-this-module-breaks-through-the-perl-wrapper-.patch \
+ file://errno_ver.diff \
+ file://native-perlinc.patch \
+ file://perl-dynloader.patch \
+ file://0002-Constant-Fix-up-shebang.patch \
+ file://determinism.patch \
+ file://0001-cpan-Sys-Syslog-Makefile.PL-Fix-_PATH_LOG-for-determ.patch \
+ file://0001-Fix-intermittent-failure-of-test-t-op-sigsystem.t.patch \
+ "
+SRC_URI:append:class-native = " \
+ file://perl-configpm-switch.patch \
+"
+SRC_URI:append:class-target = " \
+ file://encodefix.patch \
+"
+
+SRC_URI[perl.sha256sum] = "a0a31534451eb7b83c7d6594a497543a54d488bc90ca00f5e34762577f40655e"
+
+B = "${WORKDIR}/perl-${PV}-build"
+
+inherit upstream-version-is-even update-alternatives
+
+DEPENDS += "perlcross-native zlib virtual/crypt"
+# make 4.1 has race issues with the double-colon usage of MakeMaker, see #14096
+DEPENDS += "make-native"
+
+PERL_LIB_VER = "${@'.'.join(d.getVar('PV').split('.')[0:2])}.0"
+
+PACKAGECONFIG ??= "gdbm"
+PACKAGECONFIG:append:libc-musl = " anylocale"
+PACKAGECONFIG[bdb] = ",-Ui_db,db"
+PACKAGECONFIG[gdbm] = ",-Ui_gdbm,gdbm"
+PACKAGECONFIG[anylocale] = "-Dd_setlocale_accepts_any_locale_name=define,,"
+
+# Don't generate comments in enc2xs output files. They are not reproducible
+export ENC2XS_NO_COMMENTS = "1"
+
+CFLAGS += "-D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64"
+
+do_configure:prepend() {
+ rm -rf ${B}
+ cp -rfp ${S} ${B}
+ cp -rfp ${STAGING_DATADIR_NATIVE}/perl-cross/* ${B}
+ cd ${B}
+}
+
+do_configure:class-target() {
+ ./configure --prefix=${prefix} --libdir=${libdir} \
+ --target=${TARGET_SYS} \
+ -Duse64bitint \
+ -Duseshrplib \
+ -Dusethreads \
+ -Dsoname=libperl.so.5 \
+ -Dvendorprefix=${prefix} \
+ -Dvendorlibdir=${libdir} \
+ -Darchlibexp=${STAGING_LIBDIR}/perl5/${PV}/${TARGET_ARCH}-linux \
+ -Dlibpth='${libdir} ${base_libdir}' \
+ -Dglibpth='${libdir} ${base_libdir}' \
+ -Alddlflags=' ${LDFLAGS}' \
+ -Dd_gnulibc=define \
+ ${PACKAGECONFIG_CONFARGS}
+
+ #perl.c uses an ARCHLIB_EXP define to generate compile-time code that
+ #adds the archlibexp path to @INC during run-time initialization of a
+ #new perl interpreter.
+
+ #Because we've changed this value in a temporary way to make it
+ #possible to use ExtUtils::Embed in the target build (the temporary
+ #value in config.sh gets re-stripped out during packaging), the
+ #ARCHLIB_EXP value that gets generated still uses the temporary version
+ #instead of the original expected version (i.e. becauses it's in the
+ #generated config.h, it doesn't get stripped out during packaging like
+ #the others in config.sh).
+
+ sed -i -e "s,${STAGING_LIBDIR},${libdir},g" config.h
+}
+
+do_configure:class-nativesdk() {
+ ./configure --prefix=${prefix} \
+ --target=${TARGET_SYS} \
+ -Duseshrplib \
+ -Dusethreads \
+ -Dsoname=libperl.so.5 \
+ -Dvendorprefix=${prefix} \
+ -Darchlibexp=${STAGING_LIBDIR}/perl5/${PV}/${TARGET_ARCH}-linux \
+ -Alddlflags=' ${LDFLAGS}' \
+ ${PACKAGECONFIG_CONFARGS}
+
+ # See the comment above
+ sed -i -e "s,${STAGING_LIBDIR},${libdir},g" config.h
+}
+
+do_configure:class-native() {
+ ./configure --prefix=${prefix} \
+ -Dbin=${bindir}/perl-native \
+ -Duseshrplib \
+ -Dusethreads \
+ -Dsoname=libperl.so.5 \
+ -Dvendorprefix=${prefix} \
+ -Ui_xlocale \
+ -Alddlflags=' ${LDFLAGS}' \
+ ${PACKAGECONFIG_CONFARGS}
+
+ # This prevents leakage of build paths into perl-native binaries, which
+ # causes non-deterministic troubles when those paths no longer exist or aren't accessible.
+ sed -i -e "s,${STAGING_LIBDIR},/completelyboguspath,g" config.h
+}
+
+do_configure:append() {
+ if [ -n "$SOURCE_DATE_EPOCH" ]; then
+ PERL_BUILD_DATE="$(${PYTHON} -c "\
+from datetime import datetime, timezone; \
+print(datetime.fromtimestamp($SOURCE_DATE_EPOCH, timezone.utc).strftime('%a %b %d %H:%M:%S %Y')) \
+ ")"
+ echo "#define PERL_BUILD_DATE \"$PERL_BUILD_DATE\"" >> config.h
+ fi
+}
+
+do_compile() {
+ oe_runmake
+}
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' install
+
+ install -d ${D}${libdir}/perl5
+ install -d ${D}${libdir}/perl5/${PV}/
+ install -d ${D}${libdir}/perl5/${PV}/ExtUtils/
+
+ # Save native config
+ install config.sh ${D}${libdir}/perl5
+ install lib/Config.pm ${D}${libdir}/perl5/${PV}/
+ install lib/ExtUtils/typemap ${D}${libdir}/perl5/${PV}/ExtUtils/
+
+ # Fix up shared library
+ dir=$(echo ${D}/${libdir}/perl5/${PV}/*/CORE)
+ rm $dir/libperl.so
+ ln -sf ../../../../libperl.so.${PERL_LIB_VER} $dir/libperl.so
+
+ # Try to catch Bug #13946
+ if [ -e ${D}/${libdir}/perl5/${PV}/Storable.pm ]; then
+ bbfatal 'non-arch specific Storable.pm found! See https://bugzilla.yoctoproject.org/show_bug.cgi?id=13946'
+ fi
+}
+
+do_install:append:class-target() {
+ # This is used to substitute target configuration when running native perl via perl-configpm-switch.patch
+ ln -s Config_heavy.pl ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy-target.pl
+
+ # xconfig.h contains references to build host architecture, and yet is included from various other places.
+ # To make it reproducible let's make it a copy of config.h patch that is specific to the target architecture.
+ # It is believed that the original header is the product of building miniperl (a helper executable built with host compiler).
+ cp ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/config.h ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/xconfig.h
+}
+
+do_install:append:class-nativesdk() {
+ # This is used to substitute target configuration when running native perl via perl-configpm-switch.patch
+ ln -s Config_heavy.pl ${D}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy-target.pl
+
+ create_wrapper ${D}${bindir}/perl \
+ PERL5LIB='$PERL5LIB:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/site_perl/${PV}:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/vendor_perl/${PV}:${SDKPATHNATIVE}/${libdir_nativesdk}/perl5/${PV}'
+}
+
+do_install:append:class-native () {
+ # Those wrappers mean that perl installed from sstate (which may change
+ # path location) works and that in the nativesdk case, the SDK can be
+ # installed to a different location from the one it was built for.
+ create_wrapper ${D}${bindir}/perl-native/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl5/site_perl/${PV}:${STAGING_LIBDIR}/perl5/vendor_perl/${PV}:${STAGING_LIBDIR}/perl5/${PV}'
+
+ # Use /usr/bin/env nativeperl for the perl script.
+ for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do
+ sed -i -e 's|${bindir}/perl|/usr/bin/env nativeperl|' $f
+ done
+}
+
+PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess"
+
+perl_package_preprocess () {
+ # Fix up installed configuration
+ sed -i -e "s,${D},,g" \
+ -e "s,${DEBUG_PREFIX_MAP},,g" \
+ -e "s,--sysroot=${STAGING_DIR_HOST},,g" \
+ -e "s,-isystem${STAGING_INCDIR} ,,g" \
+ -e "s,${STAGING_LIBDIR},${libdir},g" \
+ -e "s,${STAGING_BINDIR},${bindir},g" \
+ -e "s,${STAGING_INCDIR},${includedir},g" \
+ -e "s,${STAGING_BINDIR_NATIVE}/perl-native/,${bindir}/,g" \
+ -e "s,${STAGING_BINDIR_NATIVE}/,,g" \
+ -e "s,${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX},${bindir},g" \
+ -e 's:${RECIPE_SYSROOT}::g' \
+ ${PKGD}${bindir}/h2xs.perl \
+ ${PKGD}${bindir}/h2ph.perl \
+ ${PKGD}${bindir}/pod2man.perl \
+ ${PKGD}${bindir}/pod2text.perl \
+ ${PKGD}${bindir}/pod2usage.perl \
+ ${PKGD}${bindir}/podchecker.perl \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/config.h \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/xconfig.h \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/perl.h \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/CORE/pp.h \
+ ${PKGD}${libdir}/perl5/${PV}/Config.pm \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pm \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pod \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_git.pl \
+ ${PKGD}${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config_heavy.pl \
+ ${PKGD}${libdir}/perl5/${PV}/ExtUtils/Liblist/Kid.pm \
+ ${PKGD}${libdir}/perl5/${PV}/FileCache.pm \
+ ${PKGD}${libdir}/perl5/${PV}/pod/*.pod \
+ ${PKGD}${libdir}/perl5/config.sh
+}
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN}-misc = "corelist cpan enc2xs encguess h2ph h2xs instmodsh json_pp libnetcfg \
+ piconv pl2pm pod2html pod2man pod2text pod2usage podchecker \
+ prove ptar ptardiff ptargrep shasum splain streamzip xsubpp zipdetails"
+ALTERNATIVE_LINK_NAME[corelist] = "${bindir}/corelist"
+ALTERNATIVE_LINK_NAME[cpan] = "${bindir}/cpan"
+ALTERNATIVE_LINK_NAME[enc2xs] = "${bindir}/enc2xs"
+ALTERNATIVE_LINK_NAME[encguess] = "${bindir}/encguess"
+ALTERNATIVE_LINK_NAME[h2ph] = "${bindir}/h2ph"
+ALTERNATIVE_LINK_NAME[h2xs] = "${bindir}/h2xs"
+ALTERNATIVE_LINK_NAME[instmodsh] = "${bindir}/instmodsh"
+ALTERNATIVE_LINK_NAME[json_pp] = "${bindir}/json_pp"
+ALTERNATIVE_LINK_NAME[libnetcfg] = "${bindir}/libnetcfg"
+ALTERNATIVE_LINK_NAME[piconv] = "${bindir}/piconv"
+ALTERNATIVE_LINK_NAME[pl2pm] = "${bindir}/pl2pm"
+ALTERNATIVE_LINK_NAME[pod2html] = "${bindir}/pod2html"
+ALTERNATIVE_LINK_NAME[pod2man] = "${bindir}/pod2man"
+ALTERNATIVE_LINK_NAME[pod2text] = "${bindir}/pod2text"
+ALTERNATIVE_LINK_NAME[pod2usage] = "${bindir}/pod2usage"
+ALTERNATIVE_LINK_NAME[podchecker] = "${bindir}/podchecker"
+ALTERNATIVE_LINK_NAME[prove] = "${bindir}/prove"
+ALTERNATIVE_LINK_NAME[ptar] = "${bindir}/ptar"
+ALTERNATIVE_LINK_NAME[ptardiff] = "${bindir}/ptardiff"
+ALTERNATIVE_LINK_NAME[ptargrep] = "${bindir}/ptargrep"
+ALTERNATIVE_LINK_NAME[shasum] = "${bindir}/shasum"
+ALTERNATIVE_LINK_NAME[splain] = "${bindir}/splain"
+ALTERNATIVE_LINK_NAME[streamzip] = "${bindir}/streamzip"
+ALTERNATIVE_LINK_NAME[xsubpp] = "${bindir}/xsubpp"
+ALTERNATIVE_LINK_NAME[zipdetails] = "${bindir}/zipdetails"
+
+require perl-ptest.inc
+
+FILES:${PN} = "${bindir}/perl ${bindir}/perl.real ${bindir}/perl${PV} ${libdir}/libperl.so* \
+ ${libdir}/perl5/site_perl \
+ ${libdir}/perl5/${PV}/Config.pm \
+ ${libdir}/perl5/${PV}/${TARGET_ARCH}-linux/Config.pm \
+ ${libdir}/perl5/${PV}/*/Config_git.pl \
+ ${libdir}/perl5/${PV}/*/Config_heavy-target.pl \
+ ${libdir}/perl5/config.sh \
+ ${libdir}/perl5/${PV}/strict.pm \
+ ${libdir}/perl5/${PV}/warnings.pm \
+ ${libdir}/perl5/${PV}/warnings \
+ ${libdir}/perl5/${PV}/vars.pm \
+ ${libdir}/perl5/site_perl \
+ ${libdir}/perl5/${PV}/ExtUtils/MANIFEST.SKIP \
+ ${libdir}/perl5/${PV}/ExtUtils/xsubpp \
+ ${libdir}/perl5/${PV}/ExtUtils/typemap \
+ "
+RPROVIDES:${PN} += "perl-module-strict perl-module-vars perl-module-config perl-module-warnings \
+ perl-module-warnings-register perl-module-config-git"
+
+FILES:${PN}-staticdev:append = " ${libdir}/perl5/${PV}/*/CORE/libperl.a"
+
+FILES:${PN}-dev:append = " ${libdir}/perl5/${PV}/*/CORE"
+
+FILES:${PN}-doc:append = " ${libdir}/perl5/${PV}/Unicode/Collate/*.txt \
+ ${libdir}/perl5/${PV}/*/.packlist \
+ ${libdir}/perl5/${PV}/Encode/encode.h \
+ "
+PACKAGES += "${PN}-misc"
+
+FILES:${PN}-misc = "${bindir}/*"
+
+PACKAGES += "${PN}-pod"
+
+FILES:${PN}-pod = "${libdir}/perl5/${PV}/pod \
+ ${libdir}/perl5/${PV}/*.pod \
+ ${libdir}/perl5/${PV}/*/*.pod \
+ ${libdir}/perl5/${PV}/*/*/*.pod \
+ ${libdir}/perl5/${PV}/*/*/*/*.pod \
+ "
+
+PACKAGES += "${PN}-module-cpan ${PN}-module-unicore"
+
+FILES:${PN}-module-cpan += "${libdir}/perl5/${PV}/CPAN \
+ "
+FILES:${PN}-module-unicore += "${libdir}/perl5/${PV}/unicore"
+
+ALTERNATIVE_PRIORITY = "40"
+ALTERNATIVE:${PN}-doc = "Thread.3"
+ALTERNATIVE_LINK_NAME[Thread.3] = "${mandir}/man3/Thread.3"
+
+# Create a perl-modules package that represents the collection of all the
+# other perl packages (actually the non modules packages and not created too).
+ALLOW_EMPTY:${PN}-modules = "1"
+PACKAGES += "${PN}-modules "
+
+PACKAGESPLITFUNCS =+ "split_perl_packages"
+
+python split_perl_packages () {
+ libdir = d.expand('${libdir}/perl5/${PV}')
+ do_split_packages(d, libdir, r'.*/auto/([^.]*)/[^/]*\.(so|ld|ix|al)', '${PN}-module-%s', 'perl module %s', recursive=True, match_path=True, prepend=False)
+ do_split_packages(d, libdir, r'.*linux/([^\/]*)\.pm', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+ do_split_packages(d, libdir, r'Module/([^\/]*)\.pm', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+ do_split_packages(d, libdir, r'Module/([^\/]*)/.*', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+ do_split_packages(d, libdir, r'.*linux/([^\/].*)\.(pm|pl|e2x)', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+ do_split_packages(d, libdir, r'(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|.*linux\/)[^\/]).*)\.(pm|pl|e2x)', '${PN}-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+
+ # perl-modules should runtime-depend on every perl module, and only the
+ # modules. Don't attempt to use the result of do_split_packages() as some
+ # modules are manually split (eg. perl-module-unicore). Also, the split
+ # packages should not include packages defined in RPROVIDES:${PN}.
+ perl_sub_pkgs = d.getVar(d.expand("RPROVIDES:${PN}")).split()
+ packages = filter(lambda p: 'perl-module-' in p and p not in perl_sub_pkgs, d.getVar('PACKAGES').split())
+ d.setVar(d.expand("RDEPENDS:${PN}-modules"), ' '.join(packages))
+
+ # Read the pre-generated dependency file, and use it to set module dependecies
+ for line in open(d.expand("${WORKDIR}") + '/perl-rdepends.txt').readlines():
+ splitline = line.split()
+ # Filter empty lines and comments
+ if len(splitline) == 0 or splitline[0].startswith("#"):
+ continue
+ if bb.data.inherits_class('native', d):
+ module = splitline[0] + '-native'
+ depends = "perl-native"
+ else:
+ module = splitline[0].replace("RDEPENDS:perl", "RDEPENDS:${PN}")
+ depends = splitline[2].strip('"').replace("perl-module", "${PN}-module")
+ d.appendVar(d.expand(module), " " + depends)
+}
+
+python() {
+ if d.getVar('CLASSOVERRIDE') == "class-target":
+ d.setVar("PACKAGES_DYNAMIC", "^${MLPREFIX}perl-module-.*(?<!native)$")
+ elif d.getVar('CLASSOVERRIDE') == "class-native":
+ d.setVar("PACKAGES_DYNAMIC", "^perl-module-.*-native$")
+ elif d.getVar('CLASSOVERRIDE') == "class-nativesdk":
+ d.setVar("PACKAGES_DYNAMIC", "^nativesdk-perl-module-.*")
+}
+
+RDEPENDS:${PN}-misc += "perl"
+RRECOMMENDS:${PN}-misc += "perl-modules"
+RDEPENDS:${PN}-pod += "perl"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh"
+
+do_create_rdepends_inc() {
+ cd ${WORKDIR}
+ cat <<'EOPREAMBLE' > ${WORKDIR}/perl-rdepends.inc
+
+# Some additional dependencies that the above doesn't manage to figure out
+RDEPENDS:${PN}-module-file-spec += "${PN}-module-file-spec-unix"
+RDEPENDS:${PN}-module-scalar-util += "${PN}-module-list-util"
+RDEPENDS:${PN}-module-file-temp += "${PN}-module-scalar-util"
+RDEPENDS:${PN}-module-file-temp += "${PN}-module-file-spec"
+RDEPENDS:${PN}-module-io-file += "${PN}-module-symbol"
+RDEPENDS:${PN}-module-io-file += "${PN}-module-carp"
+RDEPENDS:${PN}-module-math-bigint += "${PN}-module-math-bigint-calc"
+RDEPENDS:${PN}-module-test-builder += "${PN}-module-list-util"
+RDEPENDS:${PN}-module-test-builder += "${PN}-module-scalar-util"
+RDEPENDS:${PN}-module-test-builder-formatter += "${PN}-module-test2-formatter-tap"
+RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-fail"
+RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-pass"
+RDEPENDS:${PN}-module-test2-api += "${PN}-module-test2-event-v2"
+RDEPENDS:${PN}-module-test2-formatter-tap += "${PN}-module-test2-formatter"
+RDEPENDS:${PN}-module-thread-queue += "${PN}-module-attributes"
+RDEPENDS:${PN}-module-overload += "${PN}-module-overloading"
+
+# Generated depends list beyond this line
+EOPREAMBLE
+ test -e packages-split.new && rm -rf packages-split.new
+ cp -r packages-split packages-split.new && cd packages-split.new
+ find . -name \*.pm | xargs sed -i '/^=head/,/^=cut/d'
+ egrep -r "^\s*(\<use .*|\<require .*);?" perl-module-* --include="*.pm" | \
+ sed "s/\/.*\.pm: */ += /g;s/[\"\']//g;s/;.*/\"/g;s/+= .*\(require\|use\)\> */+= \"perl-module-/g;s/CPANPLUS::.*/cpanplus/g;s/CPAN::.*/cpan/g;s/::/-/g;s/ [^+\"].*//g;s/_/-/g;s/\.pl\"$/\"/;s/\"\?\$/\"/;s/(//;s/)//;" | tr [:upper:] [:lower:] | \
+ awk '{if ($3 != "\x22"$1"\x22"){ print $0}}'| \
+ grep -v -e "\-vms\-" -e module-5 -e "^$" -e "\\$" -e your -e tk -e autoperl -e html -e http -e parse-cpan -e perl-ostype -e ndbm-file -e module-mac -e fcgi -e lwp -e dbd -e dbix | \
+ sort -u | \
+ sed 's/^/RDEPENDS:/;s/perl-module-/${PN}-module-/g;s/module-\(module-\)/\1/g;s/\(module-load\)-conditional/\1/g;s/encode-configlocal/&-pm/;' | \
+ egrep -wv '=>|module-a|module-apache.?|module-apr|module-authen-sasl|module-b-asmdata|module-convert-ebcdic|module-devel-size|module-digest-perl-md5|module-dumpvalue|module-extutils-constant-aaargh56hash|module-extutils-xssymset|module-file-bsdglob|module-for|module-it|module-io-socket-inet6|module-io-socket-ssl|module-io-string|module-ipc-system-simple|module-lexical|module-local-lib|metadata|module-modperl-util|module-pluggable-object|module-test-builder-io-scalar|module-text-unidecode|module-unicore|module-win32|objects\sload|syscall.ph|systeminfo.ph|%s' | \
+ egrep -wv '=>|module-algorithm-diff|module-carp|module-c<extutils-mm-unix>|module-l<extutils-mm-unix>|module-encode-hanextra|module-extutils-makemaker-version-regex|module-file-spec|module-io-compress-lzma|module-io-uncompress-unxz|module-locale-maketext-lexicon|module-log-agent|module-meta-notation|module-net-localcfg|module-net-ping-external|module-b-deparse|module-scalar-util|module-some-module|module-symbol|module-uri|module-win32api-file' > ${WORKDIR}/perl-rdepends.generated
+ cat ${WORKDIR}/perl-rdepends.inc ${WORKDIR}/perl-rdepends.generated > ${THISDIR}/files/perl-rdepends.txt
+}
+
+# bitbake perl -c create_rdepends_inc
+addtask do_create_rdepends_inc
+
+SYSROOT_PREPROCESS_FUNCS += "perl_sysroot_create_wrapper"
+
+perl_sysroot_create_wrapper () {
+ mkdir -p ${SYSROOT_DESTDIR}${bindir}
+ # Create a wrapper that /usr/bin/env perl will use to get perl-native.
+ # This MUST live in the normal bindir.
+ cat > ${SYSROOT_DESTDIR}${bindir}/nativeperl << EOF
+#!/bin/sh
+realpath=\`readlink -fn \$0\`
+exec \`dirname \$realpath\`/perl-native/perl "\$@"
+EOF
+ chmod 0755 ${SYSROOT_DESTDIR}${bindir}/nativeperl
+ cat ${SYSROOT_DESTDIR}${bindir}/nativeperl
+}
+
+SSTATE_HASHEQUIV_FILEMAP = " \
+ populate_sysroot:*/lib*/perl5/*/*/Config_heavy.pl:${TMPDIR} \
+ populate_sysroot:*/lib*/perl5/*/*/Config_heavy.pl:${COREBASE} \
+ populate_sysroot:*/lib*/perl5/config.sh:${TMPDIR} \
+ populate_sysroot:*/lib*/perl5/config.sh:${COREBASE} \
+ "
diff --git a/meta/recipes-devtools/pkgconf/pkgconf_1.8.0.bb b/meta/recipes-devtools/pkgconf/pkgconf_1.8.0.bb
deleted file mode 100644
index 887e15e28c..0000000000
--- a/meta/recipes-devtools/pkgconf/pkgconf_1.8.0.bb
+++ /dev/null
@@ -1,67 +0,0 @@
-SUMMARY = "pkgconf provides compiler and linker configuration for development frameworks."
-DESCRIPTION = "pkgconf is a program which helps to configure compiler and linker \
-flags for development frameworks. It is similar to pkg-config from \
-freedesktop.org, providing additional functionality while also maintaining \
-compatibility."
-HOMEPAGE = "http://pkgconf.org"
-BUGTRACKER = "https://github.com/pkgconf/pkgconf/issues"
-SECTION = "devel"
-PROVIDES += "pkgconfig"
-RPROVIDES:${PN} += "pkgconfig"
-
-# The pkgconf license seems to be functionally equivalent to BSD-2-Clause or
-# ISC, but has different wording, so needs its own name.
-LICENSE = "pkgconf"
-LIC_FILES_CHKSUM = "file://COPYING;md5=2214222ec1a820bd6cc75167a56925e0"
-
-SRC_URI = "\
- https://distfiles.dereferenced.org/pkgconf/pkgconf-${PV}.tar.xz \
- file://pkg-config-wrapper \
- file://pkg-config-native.in \
- file://pkg-config-esdk.in \
-"
-SRC_URI[sha256sum] = "ef9c7e61822b7cb8356e6e9e1dca58d9556f3200d78acab35e4347e9d4c2bbaf"
-
-inherit autotools
-
-EXTRA_OECONF += "--with-pkg-config-dir='${libdir}/pkgconfig:${datadir}/pkgconfig'"
-
-do_install:append () {
- # Install a wrapper which deals, as much as possible with pkgconf vs
- # pkg-config compatibility issues.
- install -m 0755 "${WORKDIR}/pkg-config-wrapper" "${D}${bindir}/pkg-config"
-}
-
-do_install:append:class-native () {
- # Install a pkg-config-native wrapper that will use the native sysroot instead
- # of the MACHINE sysroot, for using pkg-config when building native tools.
- sed -e "s|@PATH_NATIVE@|${PKG_CONFIG_PATH}|" \
- < ${WORKDIR}/pkg-config-native.in > ${B}/pkg-config-native
- install -m755 ${B}/pkg-config-native ${D}${bindir}/pkg-config-native
- sed -e "s|@PATH_NATIVE@|${PKG_CONFIG_PATH}|" \
- -e "s|@LIBDIR_NATIVE@|${PKG_CONFIG_LIBDIR}|" \
- < ${WORKDIR}/pkg-config-esdk.in > ${B}/pkg-config-esdk
- install -m755 ${B}/pkg-config-esdk ${D}${bindir}/pkg-config-esdk
-}
-
-# When using the RPM generated automatic package dependencies, some packages
-# will end up requiring 'pkgconfig(pkg-config)'. Allow this behavior by
-# specifying an appropriate provide.
-RPROVIDES:${PN} += "pkgconfig(pkg-config)"
-
-# Include pkg.m4 in the main package, leaving libpkgconf dev files in -dev
-FILES:${PN}-dev:remove = "${datadir}/aclocal"
-FILES:${PN} += "${datadir}/aclocal"
-
-BBCLASSEXTEND += "native nativesdk"
-
-pkgconf_sstate_fixup_esdk () {
- if [ "${BB_CURRENTTASK}" = "populate_sysroot_setscene" -a "${WITHIN_EXT_SDK}" = "1" ] ; then
- pkgconfdir="${SSTATE_INSTDIR}/recipe-sysroot-native/${bindir_native}"
- mv $pkgconfdir/pkg-config $pkgconfdir/pkg-config.real
- ln -rs $pkgconfdir/pkg-config-esdk $pkgconfdir/pkg-config
- sed -i -e "s|^pkg-config|pkg-config.real|" $pkgconfdir/pkg-config-native
- fi
-}
-
-SSTATEPOSTUNPACKFUNCS:append:class-native = " pkgconf_sstate_fixup_esdk"
diff --git a/meta/recipes-devtools/pkgconf/pkgconf_2.2.0.bb b/meta/recipes-devtools/pkgconf/pkgconf_2.2.0.bb
new file mode 100644
index 0000000000..e98458ea55
--- /dev/null
+++ b/meta/recipes-devtools/pkgconf/pkgconf_2.2.0.bb
@@ -0,0 +1,67 @@
+SUMMARY = "pkgconf provides compiler and linker configuration for development frameworks."
+DESCRIPTION = "pkgconf is a program which helps to configure compiler and linker \
+flags for development frameworks. It is similar to pkg-config from \
+freedesktop.org, providing additional functionality while also maintaining \
+compatibility."
+HOMEPAGE = "http://pkgconf.org"
+BUGTRACKER = "https://github.com/pkgconf/pkgconf/issues"
+SECTION = "devel"
+PROVIDES += "pkgconfig"
+RPROVIDES:${PN} += "pkgconfig"
+
+# The pkgconf license seems to be functionally equivalent to BSD-2-Clause or
+# ISC, but has different wording, so needs its own name.
+LICENSE = "pkgconf"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2214222ec1a820bd6cc75167a56925e0"
+
+SRC_URI = "\
+ https://distfiles.ariadne.space/pkgconf/pkgconf-${PV}.tar.xz \
+ file://pkg-config-wrapper \
+ file://pkg-config-native.in \
+ file://pkg-config-esdk.in \
+"
+SRC_URI[sha256sum] = "b06ff63a83536aa8c2f6422fa80ad45e4833f590266feb14eaddfe1d4c853c69"
+
+inherit autotools
+
+EXTRA_OECONF += "--with-pkg-config-dir='${libdir}/pkgconfig:${datadir}/pkgconfig'"
+
+do_install:append () {
+ # Install a wrapper which deals, as much as possible with pkgconf vs
+ # pkg-config compatibility issues.
+ install -m 0755 "${WORKDIR}/pkg-config-wrapper" "${D}${bindir}/pkg-config"
+}
+
+do_install:append:class-native () {
+ # Install a pkg-config-native wrapper that will use the native sysroot instead
+ # of the MACHINE sysroot, for using pkg-config when building native tools.
+ sed -e "s|@PATH_NATIVE@|${PKG_CONFIG_PATH}|" \
+ < ${WORKDIR}/pkg-config-native.in > ${B}/pkg-config-native
+ install -m755 ${B}/pkg-config-native ${D}${bindir}/pkg-config-native
+ sed -e "s|@PATH_NATIVE@|${PKG_CONFIG_PATH}|" \
+ -e "s|@LIBDIR_NATIVE@|${PKG_CONFIG_LIBDIR}|" \
+ < ${WORKDIR}/pkg-config-esdk.in > ${B}/pkg-config-esdk
+ install -m755 ${B}/pkg-config-esdk ${D}${bindir}/pkg-config-esdk
+}
+
+# When using the RPM generated automatic package dependencies, some packages
+# will end up requiring 'pkgconfig(pkg-config)'. Allow this behavior by
+# specifying an appropriate provide.
+RPROVIDES:${PN} += "pkgconfig(pkg-config)"
+
+# Include pkg.m4 in the main package, leaving libpkgconf dev files in -dev
+FILES:${PN}-dev:remove = "${datadir}/aclocal"
+FILES:${PN} += "${datadir}/aclocal"
+
+BBCLASSEXTEND += "native nativesdk"
+
+pkgconf_sstate_fixup_esdk () {
+ if [ "${BB_CURRENTTASK}" = "populate_sysroot_setscene" -a "${WITHIN_EXT_SDK}" = "1" ] ; then
+ pkgconfdir="${SSTATE_INSTDIR}/recipe-sysroot-native/${bindir_native}"
+ mv $pkgconfdir/pkg-config $pkgconfdir/pkg-config.real
+ ln -rs $pkgconfdir/pkg-config-esdk $pkgconfdir/pkg-config
+ sed -i -e "s|^pkg-config|pkg-config.real|" $pkgconfdir/pkg-config-native
+ fi
+}
+
+SSTATEPOSTUNPACKFUNCS:append:class-native = " pkgconf_sstate_fixup_esdk"
diff --git a/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-remove-support-for-the-__int64-type.-See-1.patch b/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-remove-support-for-the-__int64-type.-See-1.patch
deleted file mode 100644
index bee4585724..0000000000
--- a/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-remove-support-for-the-__int64-type.-See-1.patch
+++ /dev/null
@@ -1,144 +0,0 @@
-From 7e821441c482917e54435a07893272d87d3ad9e5 Mon Sep 17 00:00:00 2001
-From: Christoph Reiter <reiter.christoph@gmail.com>
-Date: Fri, 24 Aug 2018 19:43:04 +0200
-Subject: [PATCH] autotools: remove support for the __int64 type. See #1313
-
-__int64 was the 64bit type for Visual Studio before it added support for
-"long long" with VS2013. I think this was used to build glib with mingw and
-make the result usable for VS 6.0 which didn't support "long long" (??)
-
-Given that newer MSVC links against a different crt and mixing is not supported
-and everything supports "long long" nowadays just remove it.
-
-This is also a cleanup for printf format changes needed for #1497
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Backport [https://github.com/GNOME/glib/commit/7e821441c4]
----
- configure.ac | 43 ++++---------------------------------------
- 1 file changed, 4 insertions(+), 39 deletions(-)
-
---- a/glib/configure.ac
-+++ b/glib/configure.ac
-@@ -550,7 +550,6 @@ AC_CHECK_SIZEOF(long)
- AC_CHECK_SIZEOF(int)
- AC_CHECK_SIZEOF(void *)
- AC_CHECK_SIZEOF(long long)
--AC_CHECK_SIZEOF(__int64)
-
- AC_CACHE_CHECK([for sig_atomic_t], ac_cv_type_sig_atomic_t,
- [AC_TRY_LINK([#include <signal.h>
-@@ -564,7 +563,7 @@ if test x$ac_cv_type_sig_atomic_t = xyes
- [Define if you have the 'sig_atomic_t' type.])
- fi
-
--if test x$ac_cv_sizeof_long = x8 || test x$ac_cv_sizeof_long_long = x8 || test x$ac_cv_sizeof___int64 = x8 ; then
-+if test x$ac_cv_sizeof_long = x8 || test x$ac_cv_sizeof_long_long = x8 ; then
- :
- else
- AC_MSG_ERROR([
-@@ -573,7 +572,7 @@ else
- ])
- fi
-
--AS_IF([test x$glib_native_win32 != xyes && test x$ac_cv_sizeof_long_long = x8], [
-+AS_IF([test x$ac_cv_sizeof_long_long = x8], [
- # long long is a 64 bit integer.
- AC_MSG_CHECKING(for format to printf and scanf a guint64)
- AC_CACHE_VAL(glib_cv_long_long_format,[
-@@ -599,14 +598,6 @@ AS_IF([test x$glib_native_win32 != xyes
- AC_DEFINE(HAVE_INT64_AND_I64,1,[define to support printing 64-bit integers with format I64])
- fi
- ], [AC_MSG_RESULT(none)])
--],[ test x$ac_cv_sizeof___int64 = x8], [
-- # __int64 is a 64 bit integer.
-- AC_MSG_CHECKING(for format to printf and scanf a guint64)
-- # We know this is MSVCRT.DLL, and what the formats are
-- glib_cv_long_long_format=I64
-- AC_MSG_RESULT(%${glib_cv_long_long_format}u)
-- AC_DEFINE(HAVE_LONG_LONG_FORMAT,1,[define if system printf can print long long])
-- AC_DEFINE(HAVE_INT64_AND_I64,1,[define to support printing 64-bit integers with format I64])
- ])
-
- AC_C_CONST
-@@ -872,9 +863,6 @@ case $ac_cv_sizeof_size_t in
- $ac_cv_sizeof_long_long)
- glib_size_type='long long'
- ;;
-- $ac_cv_sizeof__int64)
-- glib_size_type='__int64'
-- ;;
- *) AC_MSG_ERROR([No type matching size_t in size])
- ;;
- esac
-@@ -931,9 +919,6 @@ case $ac_cv_sizeof_ssize_t in
- $ac_cv_sizeof_long_long)
- glib_ssize_type='long long'
- ;;
-- $ac_cv_sizeof__int64)
-- glib_ssize_type='__int64'
-- ;;
- *) AC_MSG_ERROR([No type matching ssize_t in size])
- ;;
- esac
-@@ -2985,17 +2970,6 @@ $ac_cv_sizeof_long_long)
- gint64_constant='(G_GNUC_EXTENSION (val##LL))'
- guint64_constant='(G_GNUC_EXTENSION (val##ULL))'
- ;;
--$ac_cv_sizeof___int64)
-- gint64='__int64'
-- if test -n "$glib_cv_long_long_format"; then
-- gint64_modifier='"'$glib_cv_long_long_format'"'
-- gint64_format='"'$glib_cv_long_long_format'i"'
-- guint64_format='"'$glib_cv_long_long_format'u"'
-- fi
-- glib_extension=
-- gint64_constant='(val##i64)'
-- guint64_constant='(val##ui64)'
-- ;;
- esac
- glib_size_t=$ac_cv_sizeof_size_t
- glib_ssize_t=$ac_cv_sizeof_ssize_t
-@@ -3020,7 +2994,7 @@ long)
- gsize_format='"lu"'
- glib_msize_type='LONG'
- ;;
--"long long"|__int64)
-+"long long")
- gsize_modifier='"I64"'
- gsize_format='"I64u"'
- glib_msize_type='INT64'
-@@ -3043,7 +3017,7 @@ long)
- gssize_format='"li"'
- glib_mssize_type='LONG'
- ;;
--"long long"|__int64)
-+"long long")
- gssize_modifier='"I64"'
- gssize_format='"I64i"'
- glib_mssize_type='INT64'
-@@ -3080,14 +3054,6 @@ $ac_cv_sizeof_long_long)
- glib_gpi_cast='(gint64)'
- glib_gpui_cast='(guint64)'
- ;;
--$ac_cv_sizeof___int64)
-- glib_intptr_type_define=__int64
-- gintptr_modifier='"I64"'
-- gintptr_format='"I64i"'
-- guintptr_format='"I64u"'
-- glib_gpi_cast='(gint64)'
-- glib_gpui_cast='(guint64)'
-- ;;
- *)
- glib_unknown_void_p=yes
- ;;
-@@ -3258,9 +3224,6 @@ $ac_cv_sizeof_long)
- $ac_cv_sizeof_long_long)
- gint64='long long'
- ;;
--$ac_cv_sizeof___int64)
-- gint64='__int64'
-- ;;
- esac
-
- AC_CHECK_TYPE([guint32],,,[typedef unsigned $gint32 guint32;])
diff --git a/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-use-C99-printf-format-specifiers-on-Window.patch b/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-use-C99-printf-format-specifiers-on-Window.patch
deleted file mode 100644
index a4e92da34b..0000000000
--- a/meta/recipes-devtools/pkgconfig/pkgconfig/0001-autotools-use-C99-printf-format-specifiers-on-Window.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-From 3d7cde654c4c6f3bdad32f5521f28f5802a7c377 Mon Sep 17 00:00:00 2001
-From: Christoph Reiter <reiter.christoph@gmail.com>
-Date: Fri, 24 Aug 2018 21:46:47 +0200
-Subject: [PATCH] autotools: use C99 printf format specifiers on Windows. Fixes
- #1497
-
-Since we now require a C99 compatible printf and use gnulib on Windows,
-we also mark our printf functions as gnu_printf. GCC complains about the
-Windows specific I64 specifiers we still write to glibconfig.h with the
-autotools build.
-
-To fix this switch all I64(x) to ll(x).
-
-This also makes the glibconfig.h output for those macros match the ones
-we get when using meson.
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Backport [https://github.com/GNOME/glib/commit/3d7cde654c]
----
- configure.ac | 18 +++++++++---------
- 1 file changed, 9 insertions(+), 9 deletions(-)
-
---- a/glib/configure.ac
-+++ b/glib/configure.ac
-@@ -576,7 +576,7 @@ AS_IF([test x$ac_cv_sizeof_long_long = x
- # long long is a 64 bit integer.
- AC_MSG_CHECKING(for format to printf and scanf a guint64)
- AC_CACHE_VAL(glib_cv_long_long_format,[
-- for format in ll q I64; do
-+ for format in ll q; do
- AC_TRY_RUN([#include <stdio.h>
- int main()
- {
-@@ -2995,8 +2995,8 @@ long)
- glib_msize_type='LONG'
- ;;
- "long long")
-- gsize_modifier='"I64"'
-- gsize_format='"I64u"'
-+ gsize_modifier='"ll"'
-+ gsize_format='"llu"'
- glib_msize_type='INT64'
- ;;
- esac
-@@ -3018,8 +3018,8 @@ long)
- glib_mssize_type='LONG'
- ;;
- "long long")
-- gssize_modifier='"I64"'
-- gssize_format='"I64i"'
-+ gssize_modifier='"ll"'
-+ gssize_format='"lli"'
- glib_mssize_type='INT64'
- ;;
- esac
-@@ -3048,9 +3048,9 @@ $ac_cv_sizeof_long)
- ;;
- $ac_cv_sizeof_long_long)
- glib_intptr_type_define='long long'
-- gintptr_modifier='"I64"'
-- gintptr_format='"I64i"'
-- guintptr_format='"I64u"'
-+ gintptr_modifier='"ll"'
-+ gintptr_format='"lli"'
-+ guintptr_format='"llu"'
- glib_gpi_cast='(gint64)'
- glib_gpui_cast='(guint64)'
- ;;
diff --git a/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb b/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb
index 7e5860c18c..16e6c5b609 100644
--- a/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb
+++ b/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb
@@ -9,14 +9,12 @@ LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
SRCREV = "d97db4fae4c1cd099b506970b285dc2afd818ea2"
-PV = "0.29.2+git${SRCPV}"
+PV = "0.29.2+git"
SRC_URI = "git://gitlab.freedesktop.org/pkg-config/pkg-config.git;branch=master;protocol=https \
file://pkg-config-esdk.in \
file://pkg-config-native.in \
file://0001-glib-gettext.m4-Update-AM_GLIB_GNU_GETTEXT-to-match-.patch \
- file://0001-autotools-remove-support-for-the-__int64-type.-See-1.patch \
- file://0001-autotools-use-C99-printf-format-specifiers-on-Window.patch \
"
S = "${WORKDIR}/git"
@@ -30,7 +28,6 @@ EXTRA_OECONF += "--disable-indirect-deps"
PACKAGECONFIG ??= "glib"
PACKAGECONFIG:class-native = ""
-PACKAGECONFIG:class-nativesdk = ""
PACKAGECONFIG[glib] = "--without-internal-glib,--with-internal-glib,glib-2.0 pkgconfig-native"
diff --git a/meta/recipes-devtools/pseudo/files/glibc238.patch b/meta/recipes-devtools/pseudo/files/glibc238.patch
new file mode 100644
index 0000000000..da4b8caee3
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/files/glibc238.patch
@@ -0,0 +1,59 @@
+glibc 2.38 would include __isoc23_strtol and similar symbols. This is trggerd by
+_GNU_SOURCE but we have to set that for other definitions. Therefore play with defines
+to turn this off within pseudo_wrappers.c. Elsewhere we can switch to _DEFAULT_SOURCE
+rather than _GNU_SOURCE.
+
+Upstream-Status: Pending
+
+Index: git/pseudo_wrappers.c
+===================================================================
+--- git.orig/pseudo_wrappers.c
++++ git/pseudo_wrappers.c
+@@ -6,6 +6,15 @@
+ * SPDX-License-Identifier: LGPL-2.1-only
+ *
+ */
++/* glibc 2.38 would include __isoc23_strtol and similar symbols. This is trggerd by
++ * _GNU_SOURCE but we have to set that for other definitions. Therefore play with defines
++ * to turn this off.
++ */
++#include <features.h>
++#undef __GLIBC_USE_ISOC2X
++#undef __GLIBC_USE_C2X_STRTOL
++#define __GLIBC_USE_C2X_STRTOL 0
++
+ #include <assert.h>
+ #include <stdlib.h>
+ #include <limits.h>
+Index: git/pseudo_util.c
+===================================================================
+--- git.orig/pseudo_util.c
++++ git/pseudo_util.c
+@@ -8,6 +8,14 @@
+ */
+ /* we need access to RTLD_NEXT for a horrible workaround */
+ #define _GNU_SOURCE
++/* glibc 2.38 would include __isoc23_strtol and similar symbols. This is trggerd by
++ * _GNU_SOURCE but we have to set that for other definitions. Therefore play with defines
++ * to turn this off.
++ */
++#include <features.h>
++#undef __GLIBC_USE_ISOC2X
++#undef __GLIBC_USE_C2X_STRTOL
++#define __GLIBC_USE_C2X_STRTOL 0
+
+ #include <ctype.h>
+ #include <errno.h>
+Index: git/pseudo_client.c
+===================================================================
+--- git.orig/pseudo_client.c
++++ git/pseudo_client.c
+@@ -6,7 +6,7 @@
+ * SPDX-License-Identifier: LGPL-2.1-only
+ *
+ */
+-#define _GNU_SOURCE
++#define _DEFAULT_SOURCE
+
+ #include <stdio.h>
+ #include <signal.h>
diff --git a/meta/recipes-devtools/pseudo/pseudo_git.bb b/meta/recipes-devtools/pseudo/pseudo_git.bb
index e7ef6a730c..c70b509233 100644
--- a/meta/recipes-devtools/pseudo/pseudo_git.bb
+++ b/meta/recipes-devtools/pseudo/pseudo_git.bb
@@ -1,7 +1,8 @@
require pseudo.inc
-SRC_URI = "git://git.yoctoproject.org/pseudo;branch=oe-core \
+SRC_URI = "git://git.yoctoproject.org/pseudo;branch=master;protocol=https \
file://0001-configure-Prune-PIE-flags.patch \
+ file://glibc238.patch \
file://fallback-passwd \
file://fallback-group \
"
@@ -13,9 +14,16 @@ SRC_URI:append:class-nativesdk = " \
file://older-glibc-symbols.patch"
SRC_URI[prebuilt.sha256sum] = "ed9f456856e9d86359f169f46a70ad7be4190d6040282b84c8d97b99072485aa"
-SRCREV = "2b4b88eb513335b0ece55fe51854693d9b20de35"
+SRCREV = "0d292df61aeb886ae8ca33d9edc3b6d0ff5c0f0f"
S = "${WORKDIR}/git"
-PV = "1.9.0+git${SRCPV}"
+PV = "1.9.0+git"
+
+# largefile and 64bit time_t support adds these macros via compiler flags globally
+# remove them for pseudo since pseudo intercepts some of the functions which will be
+# aliased due to this e.g. open/open64 and it will complain about duplicate definitions
+# pseudo on 32bit systems is not much of use anyway and these features are not of much
+# use for it.
+TARGET_CC_ARCH:remove = "-D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_TIME_BITS=64"
# error: use of undeclared identifier '_STAT_VER'
COMPATIBLE_HOST:libc-musl = 'null'
diff --git a/meta/recipes-devtools/python/python-async.inc b/meta/recipes-devtools/python/python-async.inc
deleted file mode 100644
index fde864601c..0000000000
--- a/meta/recipes-devtools/python/python-async.inc
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
-HOMEPAGE = "http://github.com/gitpython-developers/async"
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
-
-inherit pypi
-
-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
-SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-threading"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python-cython.inc b/meta/recipes-devtools/python/python-cython.inc
index 5050dfd427..2235aa9332 100644
--- a/meta/recipes-devtools/python/python-cython.inc
+++ b/meta/recipes-devtools/python/python-cython.inc
@@ -5,38 +5,41 @@ It's designed to bridge the gap between the nice, high-level, easy-to-use world
and the messy, low-level world of C."
SECTION = "devel/python"
LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=e23fadd6ceef8c618fc1c65191d846fa"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=61c3ee8961575861fa86c7e62bc9f69c"
PYPI_PACKAGE = "Cython"
BBCLASSEXTEND = "native nativesdk"
-SRC_URI[sha256sum] = "d6fac2342802c30e51426828fe084ff4deb1b3387367cf98976bb2e64b6f8e45"
+SRC_URI[sha256sum] = "a2d354f059d1f055d34cfaa62c5b68bc78ac2ceab6407148d47fb508cf3ba4f3"
UPSTREAM_CHECK_REGEX = "Cython-(?P<pver>.*)\.tar"
inherit pypi
RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-misc \
- ${PYTHON_PN}-netserver \
- ${PYTHON_PN}-pkgutil \
- ${PYTHON_PN}-pyparsing \
- ${PYTHON_PN}-setuptools \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-xml \
+ python3-misc \
+ python3-netserver \
+ python3-pkgutil \
+ python3-pyparsing \
+ python3-setuptools \
+ python3-shell \
+ python3-xml \
"
RDEPENDS:${PN}:class-nativesdk += "\
- nativesdk-${PYTHON_PN}-misc \
- nativesdk-${PYTHON_PN}-netserver \
- nativesdk-${PYTHON_PN}-pkgutil \
- nativesdk-${PYTHON_PN}-pyparsing \
- nativesdk-${PYTHON_PN}-setuptools \
- nativesdk-${PYTHON_PN}-shell \
- nativesdk-${PYTHON_PN}-xml \
+ nativesdk-python3-misc \
+ nativesdk-python3-netserver \
+ nativesdk-python3-pkgutil \
+ nativesdk-python3-pyparsing \
+ nativesdk-python3-setuptools \
+ nativesdk-python3-shell \
+ nativesdk-python3-xml \
"
do_install:append() {
# Make sure we use /usr/bin/env python
for PYTHSCRIPT in `grep -rIl '^#!.*python' ${D}`; do
- sed -i -e '1s|^#!.*|#!/usr/bin/env ${PYTHON_PN}|' $PYTHSCRIPT
+ sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
done
+
+ # remove build paths from generated sources
+ sed -i -e 's|${WORKDIR}||' ${S}/Cython/*.c ${S}/Cython/Compiler/*.c ${S}/Cython/Plex/*.c
}
diff --git a/meta/recipes-devtools/python/python-gitdb.inc b/meta/recipes-devtools/python/python-gitdb.inc
deleted file mode 100644
index 9482964f30..0000000000
--- a/meta/recipes-devtools/python/python-gitdb.inc
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "A pure-Python git object database"
-HOMEPAGE = "http://github.com/gitpython-developers/gitdb"
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=59e5ecb13339a936eedf83282eaf4528"
-
-inherit pypi
-
-PYPI_PACKAGE = "gitdb"
-
-SRC_URI[sha256sum] = "bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"
-
-DEPENDS = "${PYTHON_PN}-async ${PYTHON_PN}-setuptools-native ${PYTHON_PN}-smmap"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-async \
- ${PYTHON_PN}-compression \
- ${PYTHON_PN}-crypt \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-mmap \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-smmap \
-"
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python-pbr.inc b/meta/recipes-devtools/python/python-pbr.inc
index 59fbc6dce4..f08d9fd381 100644
--- a/meta/recipes-devtools/python/python-pbr.inc
+++ b/meta/recipes-devtools/python/python-pbr.inc
@@ -9,6 +9,6 @@ SRC_URI += "file://0001-change-shebang-to-python3.patch"
inherit pypi
-RDEPENDS:${PN} += "${PYTHON_PN}-pip"
+RDEPENDS:${PN} += "python3-pip"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python-pyasn1.inc b/meta/recipes-devtools/python/python-pyasn1.inc
index 6cbed0fcd5..7b269f2940 100644
--- a/meta/recipes-devtools/python/python-pyasn1.inc
+++ b/meta/recipes-devtools/python/python-pyasn1.inc
@@ -1,16 +1,15 @@
SUMMARY = "Python library implementing ASN.1 types."
HOMEPAGE = "http://pyasn1.sourceforge.net/"
LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=a14482d15c2249de3b6f0e8a47e021fd"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=190f79253908c986e6cacf380c3a5f6d"
-SRC_URI[md5sum] = "dffae4ff9f997a83324b3f33fe62be54"
-SRC_URI[sha256sum] = "aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
+SRC_URI[sha256sum] = "3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"
RDEPENDS:${PN}:class-target += " \
- ${PYTHON_PN}-codecs \
- ${PYTHON_PN}-logging \
- ${PYTHON_PN}-math \
- ${PYTHON_PN}-shell \
+ python3-codecs \
+ python3-logging \
+ python3-math \
+ python3-shell \
"
BBCLASSEXTEND = "native nativesdk"
@@ -18,11 +17,12 @@ BBCLASSEXTEND = "native nativesdk"
inherit ptest
SRC_URI += " \
- file://run-ptest \
-"
+ file://run-ptest \
+ "
RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
+ python3-pytest \
+ python3-unittest-automake-output \
"
do_install_ptest() {
diff --git a/meta/recipes-devtools/python/python-pycryptodome.inc b/meta/recipes-devtools/python/python-pycryptodome.inc
index 29fe80d224..8d9b6d911e 100644
--- a/meta/recipes-devtools/python/python-pycryptodome.inc
+++ b/meta/recipes-devtools/python/python-pycryptodome.inc
@@ -10,12 +10,14 @@ inherit pypi
PYPI_PACKAGE_EXT = "tar.gz"
RDEPENDS:${PN} += " \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-math \
+ python3-cffi \
+ python3-ctypes \
+ python3-io \
+ python3-math \
"
RDEPENDS:${PN}-tests += " \
- ${PYTHON_PN}-unittest \
+ python3-unittest \
"
PACKAGES =+ "${PN}-tests"
diff --git a/meta/recipes-devtools/python/python-six.inc b/meta/recipes-devtools/python/python-six.inc
index aac6765149..6b1b4ba481 100644
--- a/meta/recipes-devtools/python/python-six.inc
+++ b/meta/recipes-devtools/python/python-six.inc
@@ -6,6 +6,6 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=43cfc9e4ac0e377acfb9b76f56b8415d"
inherit pypi
-RDEPENDS:${PN} = "${PYTHON_PN}-io"
+RDEPENDS:${PN} = "python3-io"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python-testtools.inc b/meta/recipes-devtools/python/python-testtools.inc
deleted file mode 100644
index ef5e15cc3a..0000000000
--- a/meta/recipes-devtools/python/python-testtools.inc
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "Extensions to the Python standard library unit testing framework"
-HOMEPAGE = "https://pypi.org/project/testtools/"
-SECTION = "devel/python"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=e2c9d3e8ba7141c83bfef190e0b9379a"
-
-inherit pypi
-
-SRC_URI[sha256sum] = "57c13433d94f9ffde3be6534177d10fb0c1507cc499319128958ca91a65cb23f"
-
-DEPENDS += " \
- ${PYTHON_PN}-pbr \
- "
-
-# Satisfy setup.py 'setup_requires'
-DEPENDS += " \
- ${PYTHON_PN}-pbr-native \
- "
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-doctest \
- ${PYTHON_PN}-extras \
- ${PYTHON_PN}-pbr \
- ${PYTHON_PN}-six \
- "
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/python/python3-alabaster_0.7.12.bb b/meta/recipes-devtools/python/python3-alabaster_0.7.12.bb
deleted file mode 100644
index 3381ce9568..0000000000
--- a/meta/recipes-devtools/python/python3-alabaster_0.7.12.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-DESCRIPTION = "Alabaster is a visually (c)lean, responsive, configurable theme for the Sphinx documentation system. It is Python 2+3 compatible."
-HOMEPAGE = "https://alabaster.readthedocs.io/en/latest/"
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=da053683d66d543813a727e8a30c96ca"
-
-SRC_URI[sha256sum] = "a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
-
-inherit setuptools3 pypi
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-alabaster_0.7.16.bb b/meta/recipes-devtools/python/python3-alabaster_0.7.16.bb
new file mode 100644
index 0000000000..c61fce454f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-alabaster_0.7.16.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Alabaster is a visually (c)lean, responsive, configurable theme for the Sphinx documentation system."
+HOMEPAGE = "https://alabaster.readthedocs.io/en/latest/"
+BUGTRACKER = "https://github.com/sphinx-doc/alabaster/issues"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=21860fdb805bf4e0bfaf94b566b747fa"
+
+SRC_URI[sha256sum] = "75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"
+
+inherit python_flit_core pypi
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-asn1crypto_1.5.1.bb b/meta/recipes-devtools/python/python3-asn1crypto_1.5.1.bb
index 5fa2ed987c..322497b09b 100644
--- a/meta/recipes-devtools/python/python3-asn1crypto_1.5.1.bb
+++ b/meta/recipes-devtools/python/python3-asn1crypto_1.5.1.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "A fast, pure Python library for parsing and serializing ASN.1 structures"
+SUMMARY = "A fast, pure Python library for parsing and serializing ASN.1 structures"
HOMEPAGE = "https://github.com/wbond/asn1crypto"
SECTION = "devel/python"
LICENSE = "MIT"
@@ -11,14 +11,14 @@ SRC_URI[sha256sum] = "13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c80530
inherit pypi setuptools3
RDEPENDS:${PN}:class-target += " \
- ${PYTHON_PN}-codecs \
- ${PYTHON_PN}-crypt \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-shell \
+ python3-codecs \
+ python3-crypt \
+ python3-ctypes \
+ python3-datetime \
+ python3-io \
+ python3-netclient \
+ python3-numbers \
+ python3-shell \
"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-async_0.6.2.bb b/meta/recipes-devtools/python/python3-async_0.6.2.bb
deleted file mode 100644
index 0da5fdecdc..0000000000
--- a/meta/recipes-devtools/python/python3-async_0.6.2.bb
+++ /dev/null
@@ -1,2 +0,0 @@
-inherit setuptools3
-require python-async.inc
diff --git a/meta/recipes-devtools/python/python3-atomicwrites/run-ptest b/meta/recipes-devtools/python/python3-atomicwrites/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-atomicwrites/run-ptest
+++ b/meta/recipes-devtools/python/python3-atomicwrites/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-atomicwrites_1.4.0.bb b/meta/recipes-devtools/python/python3-atomicwrites_1.4.0.bb
deleted file mode 100644
index 065a2c7b4f..0000000000
--- a/meta/recipes-devtools/python/python3-atomicwrites_1.4.0.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-DESCRIPTION = "Powerful Python library for atomic file writes"
-HOMEPAGE = "https://github.com/untitaker/python-atomicwrites"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=91cc36cfafeefb7863673bcfcb1d4da4"
-
-SRC_URI[md5sum] = "b5cc15c8f9f180a48665f9aacf91d817"
-SRC_URI[sha256sum] = "ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"
-
-inherit pypi setuptools3 ptest
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
- ${PYTHON_PN}-unixadmin \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-RDEPENDS:${PN} = "${PYTHON_PN}-misc"
diff --git a/meta/recipes-devtools/python/python3-atomicwrites_1.4.1.bb b/meta/recipes-devtools/python/python3-atomicwrites_1.4.1.bb
new file mode 100644
index 0000000000..848f74d612
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-atomicwrites_1.4.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Powerful Python library for atomic file writes"
+HOMEPAGE = "https://github.com/untitaker/python-atomicwrites"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=91cc36cfafeefb7863673bcfcb1d4da4"
+
+SRC_URI[sha256sum] = "81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+ python3-unixadmin \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+RDEPENDS:${PN} = "python3-misc"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-attrs/0001-conftest.py-disable-deadline.patch b/meta/recipes-devtools/python/python3-attrs/0001-conftest.py-disable-deadline.patch
new file mode 100644
index 0000000000..b1e6d99916
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-attrs/0001-conftest.py-disable-deadline.patch
@@ -0,0 +1,45 @@
+From 41103f017f6a233ebb477dd1712fe82ea8f1c84c Mon Sep 17 00:00:00 2001
+From: Tim Orling <tim.orling@konsulko.com>
+Date: Thu, 29 Feb 2024 08:45:54 -0800
+Subject: [PATCH] conftest.py: disable deadline
+
+The deadline is by default 200ms, but this is intended to be useful to
+the developer and not necessarily recommended for heavily loaded CI
+systems. Avoid warnings by disabling the deadline completely.
+
+https://github.com/HypothesisWorks/hypothesis/issues/3713
+https://hypothesis.readthedocs.io/en/latest/settings.html#hypothesis.settings.deadline
+https://lists.openembedded.org/g/openembedded-core/topic/104640034#196437
+
+Fixes:
+"""
+Unreliable test timings! On an initial run, this test took 268.29ms,
+which exceeded the deadline of 200.00ms, but on a subsequent run it
+took 2.63 ms, which did not. If you expect this sort of variability in
+your test timings, consider turning deadlines off for this test by
+setting deadline=None.
+"""
+
+Upstream-Status: Submitted [https://github.com/python-attrs/attrs/pull/1252]
+
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
+---
+ conftest.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/conftest.py b/conftest.py
+index 144e5f3..fdead3d 100644
+--- a/conftest.py
++++ b/conftest.py
+@@ -20,7 +20,7 @@ def _frozen(request):
+ def pytest_configure(config):
+ # HealthCheck.too_slow causes more trouble than good -- especially in CIs.
+ settings.register_profile(
+- "patience", settings(suppress_health_check=[HealthCheck.too_slow])
++ "patience", settings(suppress_health_check=[HealthCheck.too_slow], deadline=None)
+ )
+ settings.load_profile("patience")
+
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/python/python3-attrs/0001-test_funcs-skip-test_unknown-for-pytest-8.patch b/meta/recipes-devtools/python/python3-attrs/0001-test_funcs-skip-test_unknown-for-pytest-8.patch
new file mode 100644
index 0000000000..bb69c0f157
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-attrs/0001-test_funcs-skip-test_unknown-for-pytest-8.patch
@@ -0,0 +1,30 @@
+From cbe95e1aa6d95195dce13406a6f0522b2964babc Mon Sep 17 00:00:00 2001
+From: Tim Orling <tim.orling@konsulko.com>
+Date: Fri, 16 Feb 2024 07:17:19 -0800
+Subject: [PATCH] test_funcs: skip test_unknown for pytest 8
+
+https://github.com/python-attrs/attrs/issues/1233
+
+Upstream-Status: Inappropriate [Test case needs to be properly fixed upstream]
+
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
+---
+ tests/test_funcs.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tests/test_funcs.py b/tests/test_funcs.py
+index 044aaab..0872c31 100644
+--- a/tests/test_funcs.py
++++ b/tests/test_funcs.py
+@@ -593,7 +593,7 @@ class TestAssoc:
+ for k, v in change_dict.items():
+ assert getattr(changed, k) == v
+
+- @given(simple_classes())
++ @pytest.mark.skip('Broken with pytest 8. See https://github.com/python-attrs/attrs/issues/1233')
+ def test_unknown(self, C):
+ """
+ Wanting to change an unknown attribute raises an
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/python/python3-attrs/run-ptest b/meta/recipes-devtools/python/python3-attrs/run-ptest
new file mode 100644
index 0000000000..8d2017d39c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-attrs/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-attrs_21.4.0.bb b/meta/recipes-devtools/python/python3-attrs_21.4.0.bb
deleted file mode 100644
index 1839ce6245..0000000000
--- a/meta/recipes-devtools/python/python3-attrs_21.4.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-DESCRIPTION = "Classes Without Boilerplate"
-HOMEPAGE = "http://www.attrs.org/"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=d4ab25949a73fe7d4fdee93bcbdbf8ff"
-
-SRC_URI[sha256sum] = "626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"
-
-inherit pypi python_setuptools_build_meta
-
-RDEPENDS:${PN}:class-target += " \
- ${PYTHON_PN}-crypt \
- ${PYTHON_PN}-ctypes \
-"
-RDEPENDS:${PN}:class-nativesdk += " \
- ${PYTHON_PN}-crypt \
- ${PYTHON_PN}-ctypes \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-attrs_23.2.0.bb b/meta/recipes-devtools/python/python3-attrs_23.2.0.bb
new file mode 100644
index 0000000000..a638097988
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-attrs_23.2.0.bb
@@ -0,0 +1,39 @@
+SUMMARY = "Classes Without Boilerplate"
+HOMEPAGE = "http://www.attrs.org/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5e55731824cf9205cfabeab9a0600887"
+
+SRC_URI[sha256sum] = "935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"
+
+inherit pypi ptest python_hatchling
+
+SRC_URI += " \
+ file://0001-test_funcs-skip-test_unknown-for-pytest-8.patch \
+ file://0001-conftest.py-disable-deadline.patch \
+ file://run-ptest \
+"
+
+DEPENDS += " \
+ python3-hatch-vcs-native \
+ python3-hatch-fancy-pypi-readme-native \
+"
+
+RDEPENDS:${PN}+= " \
+ python3-compression \
+ python3-ctypes \
+ python3-crypt \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-hypothesis \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+ install ${S}/conftest.py ${D}${PTEST_PATH}/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-babel_2.10.1.bb b/meta/recipes-devtools/python/python3-babel_2.10.1.bb
deleted file mode 100644
index d06eb2bf5c..0000000000
--- a/meta/recipes-devtools/python/python3-babel_2.10.1.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-DESCRIPTION = "A collection of tools for internationalizing Python applications"
-HOMEPAGE = "http://babel.edgewall.org/"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=1b3f4650099e6d6a73e5a7fc8774ff18"
-
-SRC_URI[sha256sum] = "98aeaca086133efb3e1e2aad0396987490c8425929ddbcfe0550184fdc54cd13"
-
-PYPI_PACKAGE = "Babel"
-
-inherit pypi setuptools3
-
-CLEANBROKEN = "1"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-codecs \
- ${PYTHON_PN}-difflib \
- ${PYTHON_PN}-distutils \
- ${PYTHON_PN}-netserver \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-pickle \
- ${PYTHON_PN}-pytz \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-threading \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-babel_2.14.0.bb b/meta/recipes-devtools/python/python3-babel_2.14.0.bb
new file mode 100644
index 0000000000..cd40d4222b
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-babel_2.14.0.bb
@@ -0,0 +1,26 @@
+SUMMARY = "A collection of tools for internationalizing Python applications"
+HOMEPAGE = "http://babel.edgewall.org/"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=0f97d9a63e91407b4c0d01efde91cfc0"
+
+SRC_URI[sha256sum] = "6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"
+
+PYPI_PACKAGE = "Babel"
+
+inherit pypi setuptools3
+
+CLEANBROKEN = "1"
+
+RDEPENDS:${PN} += " \
+ python3-codecs \
+ python3-difflib \
+ python3-netserver \
+ python3-numbers \
+ python3-pickle \
+ python3-pytz \
+ python3-setuptools \
+ python3-shell \
+ python3-threading \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-bcrypt-crates.inc b/meta/recipes-devtools/python/python3-bcrypt-crates.inc
new file mode 100644
index 0000000000..0e7479f0b5
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-bcrypt-crates.inc
@@ -0,0 +1,114 @@
+# Autogenerated with 'bitbake -c update_crates python3-bcrypt'
+
+# from src/_bcrypt/Cargo.lock
+SRC_URI += " \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/base64/0.21.5 \
+ crate://crates.io/bcrypt/0.15.0 \
+ crate://crates.io/bcrypt-pbkdf/0.10.0 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/block-buffer/0.10.4 \
+ crate://crates.io/blowfish/0.9.1 \
+ crate://crates.io/byteorder/1.5.0 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/cipher/0.4.4 \
+ crate://crates.io/cpufeatures/0.2.11 \
+ crate://crates.io/crypto-common/0.1.6 \
+ crate://crates.io/digest/0.10.7 \
+ crate://crates.io/generic-array/0.14.7 \
+ crate://crates.io/getrandom/0.2.11 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/indoc/2.0.4 \
+ crate://crates.io/inout/0.1.3 \
+ crate://crates.io/libc/0.2.151 \
+ crate://crates.io/lock_api/0.4.11 \
+ crate://crates.io/memoffset/0.9.0 \
+ crate://crates.io/once_cell/1.19.0 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.9 \
+ crate://crates.io/pbkdf2/0.12.2 \
+ crate://crates.io/portable-atomic/1.6.0 \
+ crate://crates.io/proc-macro2/1.0.70 \
+ crate://crates.io/pyo3/0.20.3 \
+ crate://crates.io/pyo3-build-config/0.20.3 \
+ crate://crates.io/pyo3-ffi/0.20.3 \
+ crate://crates.io/pyo3-macros/0.20.3 \
+ crate://crates.io/pyo3-macros-backend/0.20.3 \
+ crate://crates.io/quote/1.0.33 \
+ crate://crates.io/redox_syscall/0.4.1 \
+ crate://crates.io/scopeguard/1.2.0 \
+ crate://crates.io/sha2/0.10.8 \
+ crate://crates.io/smallvec/1.11.2 \
+ crate://crates.io/subtle/2.5.0 \
+ crate://crates.io/syn/2.0.41 \
+ crate://crates.io/target-lexicon/0.12.12 \
+ crate://crates.io/typenum/1.17.0 \
+ crate://crates.io/unicode-ident/1.0.12 \
+ crate://crates.io/unindent/0.2.3 \
+ crate://crates.io/version_check/0.9.4 \
+ crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \
+ crate://crates.io/windows-targets/0.48.5 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.5 \
+ crate://crates.io/windows_aarch64_msvc/0.48.5 \
+ crate://crates.io/windows_i686_gnu/0.48.5 \
+ crate://crates.io/windows_i686_msvc/0.48.5 \
+ crate://crates.io/windows_x86_64_gnu/0.48.5 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.5 \
+ crate://crates.io/windows_x86_64_msvc/0.48.5 \
+ crate://crates.io/zeroize/1.7.0 \
+"
+
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[base64-0.21.5.sha256sum] = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
+SRC_URI[bcrypt-0.15.0.sha256sum] = "28d1c9c15093eb224f0baa400f38fcd713fc1391a6f1c389d886beef146d60a3"
+SRC_URI[bcrypt-pbkdf-0.10.0.sha256sum] = "6aeac2e1fe888769f34f05ac343bbef98b14d1ffb292ab69d4608b3abc86f2a2"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[block-buffer-0.10.4.sha256sum] = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+SRC_URI[blowfish-0.9.1.sha256sum] = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7"
+SRC_URI[byteorder-1.5.0.sha256sum] = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[cipher-0.4.4.sha256sum] = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+SRC_URI[cpufeatures-0.2.11.sha256sum] = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
+SRC_URI[crypto-common-0.1.6.sha256sum] = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+SRC_URI[digest-0.10.7.sha256sum] = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+SRC_URI[generic-array-0.14.7.sha256sum] = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+SRC_URI[getrandom-0.2.11.sha256sum] = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[indoc-2.0.4.sha256sum] = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+SRC_URI[inout-0.1.3.sha256sum] = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
+SRC_URI[libc-0.2.151.sha256sum] = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4"
+SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+SRC_URI[pbkdf2-0.12.2.sha256sum] = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
+SRC_URI[portable-atomic-1.6.0.sha256sum] = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+SRC_URI[proc-macro2-1.0.70.sha256sum] = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
+SRC_URI[pyo3-0.20.3.sha256sum] = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
+SRC_URI[pyo3-build-config-0.20.3.sha256sum] = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
+SRC_URI[pyo3-ffi-0.20.3.sha256sum] = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
+SRC_URI[pyo3-macros-0.20.3.sha256sum] = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
+SRC_URI[pyo3-macros-backend-0.20.3.sha256sum] = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
+SRC_URI[quote-1.0.33.sha256sum] = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+SRC_URI[sha2-0.10.8.sha256sum] = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
+SRC_URI[smallvec-1.11.2.sha256sum] = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
+SRC_URI[subtle-2.5.0.sha256sum] = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
+SRC_URI[syn-2.0.41.sha256sum] = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269"
+SRC_URI[target-lexicon-0.12.12.sha256sum] = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a"
+SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+SRC_URI[unindent-0.2.3.sha256sum] = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
+SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+SRC_URI[zeroize-1.7.0.sha256sum] = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
diff --git a/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch b/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch
new file mode 100644
index 0000000000..3f671fcc98
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch
@@ -0,0 +1,111 @@
+From cfdd98b3215cc12e66190a9c7f0a32c052e3c2e7 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 26 Feb 2024 18:26:30 -0800
+Subject: [PATCH] Bump pyo3 from 0.20.0 to 0.20.3 in /src/_bcrypt (#746)
+
+It fixes build on hosts without 64bit atomics
+
+Upstream-Status: Backport [https://github.com/pyca/bcrypt/commit/c2ef9350798ab59b18e8a0e04a01389858578fe0]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/_bcrypt/Cargo.lock | 22 ++++++++++++++--------
+ src/_bcrypt/Cargo.toml | 2 +-
+ 2 files changed, 15 insertions(+), 9 deletions(-)
+
+--- a/src/_bcrypt/Cargo.lock
++++ b/src/_bcrypt/Cargo.lock
+@@ -233,6 +233,12 @@ dependencies = [
+ ]
+
+ [[package]]
++name = "portable-atomic"
++version = "1.6.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
++
++[[package]]
+ name = "proc-macro2"
+ version = "1.0.70"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+@@ -243,15 +249,16 @@ dependencies = [
+
+ [[package]]
+ name = "pyo3"
+-version = "0.20.0"
++version = "0.20.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "04e8453b658fe480c3e70c8ed4e3d3ec33eb74988bd186561b0cc66b85c3bc4b"
++checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
+ dependencies = [
+ "cfg-if",
+ "indoc",
+ "libc",
+ "memoffset",
+ "parking_lot",
++ "portable-atomic",
+ "pyo3-build-config",
+ "pyo3-ffi",
+ "pyo3-macros",
+@@ -260,9 +267,9 @@ dependencies = [
+
+ [[package]]
+ name = "pyo3-build-config"
+-version = "0.20.0"
++version = "0.20.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "a96fe70b176a89cff78f2fa7b3c930081e163d5379b4dcdf993e3ae29ca662e5"
++checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
+ dependencies = [
+ "once_cell",
+ "target-lexicon",
+@@ -270,9 +277,9 @@ dependencies = [
+
+ [[package]]
+ name = "pyo3-ffi"
+-version = "0.20.0"
++version = "0.20.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "214929900fd25e6604661ed9cf349727c8920d47deff196c4e28165a6ef2a96b"
++checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
+ dependencies = [
+ "libc",
+ "pyo3-build-config",
+@@ -280,9 +287,9 @@ dependencies = [
+
+ [[package]]
+ name = "pyo3-macros"
+-version = "0.20.0"
++version = "0.20.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "dac53072f717aa1bfa4db832b39de8c875b7c7af4f4a6fe93cdbf9264cf8383b"
++checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
+ dependencies = [
+ "proc-macro2",
+ "pyo3-macros-backend",
+@@ -292,12 +299,13 @@ dependencies = [
+
+ [[package]]
+ name = "pyo3-macros-backend"
+-version = "0.20.0"
++version = "0.20.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "7774b5a8282bd4f25f803b1f0d945120be959a36c72e08e7cd031c792fdfd424"
++checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
+ dependencies = [
+ "heck",
+ "proc-macro2",
++ "pyo3-build-config",
+ "quote",
+ "syn",
+ ]
+--- a/src/_bcrypt/Cargo.toml
++++ b/src/_bcrypt/Cargo.toml
+@@ -6,7 +6,7 @@ edition = "2018"
+ publish = false
+
+ [dependencies]
+-pyo3 = { version = "0.20.0", features = ["abi3"] }
++pyo3 = { version = "0.20.3", features = ["abi3"] }
+ bcrypt = "0.15"
+ bcrypt-pbkdf = "0.10.0"
+ base64 = "0.21.5"
diff --git a/meta/recipes-devtools/python/python3-bcrypt/run-ptest b/meta/recipes-devtools/python/python3-bcrypt/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-bcrypt/run-ptest
+++ b/meta/recipes-devtools/python/python3-bcrypt/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-bcrypt_3.2.2.bb b/meta/recipes-devtools/python/python3-bcrypt_3.2.2.bb
deleted file mode 100644
index 54070e4b9c..0000000000
--- a/meta/recipes-devtools/python/python3-bcrypt_3.2.2.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-DESCRIPTION = "Modern password hashing for your software and your servers."
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=8f7bb094c7232b058c7e9f2e431f389c"
-HOMEPAGE = "https://pypi.org/project/bcrypt/"
-
-DEPENDS += "${PYTHON_PN}-cffi-native"
-
-SRC_URI[sha256sum] = "433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"
-
-inherit pypi python_setuptools_build_meta ptest
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-cffi \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-six \
-"
diff --git a/meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb b/meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb
new file mode 100644
index 0000000000..93fa645f33
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb
@@ -0,0 +1,37 @@
+SUMMARY = "Modern password hashing for your software and your servers."
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=8f7bb094c7232b058c7e9f2e431f389c"
+HOMEPAGE = "https://pypi.org/project/bcrypt/"
+
+DEPENDS += "python3-cffi-native"
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', ' -fuse-ld=bfd', '', d)}"
+
+SRC_URI[sha256sum] = "33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"
+
+inherit pypi python_setuptools3_rust ptest-cargo cargo-update-recipe-crates
+
+SRC_URI += " \
+ file://0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch \
+ file://run-ptest \
+"
+
+CARGO_SRC_DIR = "src/_bcrypt"
+
+require ${BPN}-crates.inc
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+RDEPENDS:${PN}:class-target += "\
+ python3-cffi \
+ python3-ctypes \
+ python3-shell \
+ python3-six \
+"
diff --git a/meta/recipes-devtools/python/python3-beartype_0.18.2.bb b/meta/recipes-devtools/python/python3-beartype_0.18.2.bb
new file mode 100644
index 0000000000..1b6ab6a42b
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-beartype_0.18.2.bb
@@ -0,0 +1,11 @@
+SUMMARY = "Unbearably fast runtime type checking in pure Python."
+HOMEPAGE = "https://beartype.readthedocs.io"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=e71f94261c1b39896cacacfeaf60560e"
+
+SRC_URI[sha256sum] = "a6fbc0be9269889312388bfec6a9ddf41bf8fe31b68bcf9c8239db35cd38f411"
+
+inherit setuptools3 pypi
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-booleanpy_4.0.bb b/meta/recipes-devtools/python/python3-booleanpy_4.0.bb
new file mode 100644
index 0000000000..41fd3d960a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-booleanpy_4.0.bb
@@ -0,0 +1,13 @@
+SUMMARY = "Define boolean algebras, create and parse boolean expressions and create custom boolean DSL"
+HOMEPAGE = "https://github.com/bastikr/boolean.py"
+
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=d118b5feceee598ebeca76e13395c2bd"
+
+SRC_URI[sha256sum] = "17b9a181630e43dde1851d42bef546d616d5d9b4480357514597e78b203d06e4"
+
+PYPI_PACKAGE = "boolean.py"
+
+inherit pypi setuptools3
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-build_1.2.1.bb b/meta/recipes-devtools/python/python3-build_1.2.1.bb
new file mode 100644
index 0000000000..0156861201
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-build_1.2.1.bb
@@ -0,0 +1,32 @@
+SUMMARY = "A simple, correct PEP517 package builder"
+HOMEPAGE = "https://github.com/pypa/build"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=310439af287b0fb4780b2ad6907c256c"
+
+SRC_URI[sha256sum] = "526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"
+
+inherit pypi python_flit_core
+
+DEPENDS += "python3-pyproject-hooks-native"
+
+DEPENDS:remove:class-native = "python3-build-native"
+
+# Skip dependencies as we're doing a minimal build to bootstrap
+PEP517_BUILD_OPTS:class-native = "--skip-dependency-check"
+
+do_compile:prepend:class-native() {
+ export PYTHONPATH="${S}/src"
+}
+
+RDEPENDS:${PN} += " \
+ python3-compression \
+ python3-difflib \
+ python3-ensurepip \
+ python3-logging \
+ python3-packaging \
+ python3-pyproject-hooks \
+ python3-tomllib \
+ python3-venv \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-calver/0001-setup.py-hard-code-version.patch b/meta/recipes-devtools/python/python3-calver/0001-setup.py-hard-code-version.patch
new file mode 100644
index 0000000000..43f8a78ef0
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-calver/0001-setup.py-hard-code-version.patch
@@ -0,0 +1,32 @@
+From 390a233ed969f82b2ef209b23bfb523e785603f9 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Tue, 9 May 2023 10:19:41 -0400
+Subject: [PATCH] setup.py: hard-code version
+
+setup.py is pulling the build version from the current date rather than
+a release tag or other predictable method, causing reproducibility
+issues in builds. Patch this to make reproducible builds work while
+discussing this with upstream maintainer (or developing a patch that can
+make calver rely on a more standard pyproject.toml solution).
+
+Upstream-Status: Inappropriate (configuration)
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ setup.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/setup.py b/setup.py
+index 73f6b10..2e27cf1 100644
+--- a/setup.py
++++ b/setup.py
+@@ -42,5 +42,5 @@ setup(
+ "use_calver = calver.integration:version",
+ ],
+ },
+- version=calver_version(True),
++ version=calver_version("2022.6.26"),
+ )
+--
+2.40.0
+
diff --git a/meta/recipes-devtools/python/python3-calver/run-ptest b/meta/recipes-devtools/python/python3-calver/run-ptest
new file mode 100644
index 0000000000..8d2017d39c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-calver/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-calver_2022.6.26.bb b/meta/recipes-devtools/python/python3-calver_2022.6.26.bb
new file mode 100644
index 0000000000..71b5431400
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-calver_2022.6.26.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Setuptools extension for CalVer package versions"
+HOMEPAGE = "https://github.com/di/calver"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+SRC_URI = " \
+ git://github.com/di/calver;branch=master;protocol=https \
+ file://run-ptest \
+ file://0001-setup.py-hard-code-version.patch \
+"
+SRCREV = "3268d8acf2c345f32a1c5f08ba25dc67f76cca81"
+
+inherit python_setuptools_build_meta ptest
+
+S = "${WORKDIR}/git"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pretend \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests ${D}${PTEST_PATH}/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-certifi_2021.10.8.bb b/meta/recipes-devtools/python/python3-certifi_2021.10.8.bb
deleted file mode 100644
index 4c376da897..0000000000
--- a/meta/recipes-devtools/python/python3-certifi_2021.10.8.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "Python package for providing Mozilla's CA Bundle."
-DESCRIPTION = "This installable Python package contains a CA Bundle that you can reference in your \
-Python code. This is useful for verifying HTTP requests, for example. This is the same CA Bundle \
-which ships with the Requests codebase, and is derived from Mozilla Firefox's canonical set."
-HOMEPAGE = " http://certifi.io/"
-
-LICENSE = "ISC"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=67da0714c3f9471067b729eca6c9fbe8"
-
-SRC_URI[sha256sum] = "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-certifi_2024.2.2.bb b/meta/recipes-devtools/python/python3-certifi_2024.2.2.bb
new file mode 100644
index 0000000000..4e61b8d9d4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-certifi_2024.2.2.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Python package for providing Mozilla's CA Bundle."
+DESCRIPTION = "This installable Python package contains a CA Bundle that you can reference in your \
+Python code. This is useful for verifying HTTP requests, for example. This is the same CA Bundle \
+which ships with the Requests codebase, and is derived from Mozilla Firefox's canonical set."
+HOMEPAGE = " http://certifi.io/"
+
+LICENSE = "ISC"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=11618cb6a975948679286b1211bd573c"
+
+SRC_URI[sha256sum] = "0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"
+
+inherit pypi setuptools3
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += "python3-io"
diff --git a/meta/recipes-devtools/python/python3-cffi_1.15.0.bb b/meta/recipes-devtools/python/python3-cffi_1.15.0.bb
deleted file mode 100644
index c36f23b3dd..0000000000
--- a/meta/recipes-devtools/python/python3-cffi_1.15.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "Foreign Function Interface for Python calling C code"
-HOMEPAGE = "http://cffi.readthedocs.org/"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5677e2fdbf7cdda61d6dd2b57df547bf"
-DEPENDS += "libffi ${PYTHON_PN}-pycparser"
-
-SRC_URI[sha256sum] = "920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN}:class-target = " \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-pycparser \
- ${PYTHON_PN}-shell \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-cffi_1.16.0.bb b/meta/recipes-devtools/python/python3-cffi_1.16.0.bb
new file mode 100644
index 0000000000..c5f6578ab2
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-cffi_1.16.0.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Foreign Function Interface for Python calling C code"
+HOMEPAGE = "http://cffi.readthedocs.org/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5677e2fdbf7cdda61d6dd2b57df547bf"
+DEPENDS += "libffi python3-pycparser"
+
+SRC_URI[sha256sum] = "bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"
+
+inherit pypi setuptools3
+
+RDEPENDS:${PN}:class-target = " \
+ python3-ctypes \
+ python3-io \
+ python3-pycparser \
+ python3-shell \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-chardet_4.0.0.bb b/meta/recipes-devtools/python/python3-chardet_4.0.0.bb
deleted file mode 100644
index f53bdc974a..0000000000
--- a/meta/recipes-devtools/python/python3-chardet_4.0.0.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "Universal encoding detector for Python 2 and 3"
-HOMEPAGE = "https://pypi.org/project/chardet/"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=a6f89e2100d9b6cdffcea4f398e37343"
-
-SRC_URI[sha256sum] = "0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"
-
-# setup.py of chardet needs this.
-DEPENDS += "${PYTHON_PN}-pytest-runner-native"
-
-inherit pypi setuptools3
-
-PACKAGES =+ "${PN}-cli"
-FILES:${PN}-cli += " \
- ${PYTHON_SITEPACKAGES_DIR}/chardet/cli \
-"
-
-RDEPENDS:${PN}-cli = "${PN} "
-
-RDEPENDS:${PN}:class-target += " \
- ${PYTHON_PN}-logging \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-chardet_5.2.0.bb b/meta/recipes-devtools/python/python3-chardet_5.2.0.bb
new file mode 100644
index 0000000000..ed792b1c7e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-chardet_5.2.0.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Universal encoding detector for Python 2 and 3"
+HOMEPAGE = "https://pypi.org/project/chardet/"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI[sha256sum] = "1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"
+
+# setup.py of chardet needs this.
+DEPENDS += "python3-pytest-runner-native"
+
+inherit pypi python_setuptools_build_meta
+
+PACKAGES =+ "${PN}-cli"
+FILES:${PN}-cli += " \
+ ${PYTHON_SITEPACKAGES_DIR}/chardet/cli \
+"
+
+RDEPENDS:${PN}-cli = "${PN} "
+
+RDEPENDS:${PN}:class-target += " \
+ python3-logging \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-click/run-ptest b/meta/recipes-devtools/python/python3-click/run-ptest
new file mode 100644
index 0000000000..b63c4de0d9
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-click/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
diff --git a/meta/recipes-devtools/python/python3-click_8.1.7.bb b/meta/recipes-devtools/python/python3-click_8.1.7.bb
new file mode 100644
index 0000000000..7d91e1af83
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-click_8.1.7.bb
@@ -0,0 +1,39 @@
+SUMMARY = "A simple wrapper around optparse for powerful command line utilities."
+DESCRIPTION = "\
+Click is a Python package for creating beautiful command line interfaces \
+in a composable way with as little code as necessary. It's the "Command \
+Line Interface Creation Kit". It's highly configurable but comes with \
+sensible defaults out of the box."
+HOMEPAGE = "http://click.pocoo.org/"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=1fa98232fd645608937a0fdc82e999b8"
+
+SRC_URI[sha256sum] = "ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += "file://run-ptest"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-terminal \
+ python3-unixadmin \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/setup.cfg ${D}${PTEST_PATH}/
+ cp -rf ${S}/docs ${D}${PTEST_PATH}/
+}
+
+UPSTREAM_CHECK_REGEX = "click/(?P<pver>\d+(\.\d+)+)/"
+
+CLEANBROKEN = "1"
+
+RDEPENDS:${PN} += "\
+ python3-io \
+ python3-threading \
+ "
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-cryptography-crates.inc b/meta/recipes-devtools/python/python3-cryptography-crates.inc
new file mode 100644
index 0000000000..b26e22b70c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-cryptography-crates.inc
@@ -0,0 +1,102 @@
+# Autogenerated with 'bitbake -c update_crates python3-cryptography'
+
+# from src/rust/Cargo.lock
+SRC_URI += " \
+ crate://crates.io/asn1/0.15.5 \
+ crate://crates.io/asn1_derive/0.15.5 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/base64/0.21.7 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/bitflags/2.4.2 \
+ crate://crates.io/cc/1.0.83 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/foreign-types/0.3.2 \
+ crate://crates.io/foreign-types-shared/0.1.1 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/indoc/2.0.4 \
+ crate://crates.io/libc/0.2.152 \
+ crate://crates.io/lock_api/0.4.11 \
+ crate://crates.io/memoffset/0.9.0 \
+ crate://crates.io/once_cell/1.19.0 \
+ crate://crates.io/openssl/0.10.63 \
+ crate://crates.io/openssl-macros/0.1.1 \
+ crate://crates.io/openssl-sys/0.9.99 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.9 \
+ crate://crates.io/pem/3.0.3 \
+ crate://crates.io/pkg-config/0.3.29 \
+ crate://crates.io/portable-atomic/1.6.0 \
+ crate://crates.io/proc-macro2/1.0.78 \
+ crate://crates.io/pyo3/0.20.3 \
+ crate://crates.io/pyo3-build-config/0.20.3 \
+ crate://crates.io/pyo3-ffi/0.20.3 \
+ crate://crates.io/pyo3-macros/0.20.3 \
+ crate://crates.io/pyo3-macros-backend/0.20.3 \
+ crate://crates.io/quote/1.0.35 \
+ crate://crates.io/redox_syscall/0.4.1 \
+ crate://crates.io/scopeguard/1.2.0 \
+ crate://crates.io/self_cell/1.0.3 \
+ crate://crates.io/smallvec/1.13.1 \
+ crate://crates.io/syn/2.0.48 \
+ crate://crates.io/target-lexicon/0.12.13 \
+ crate://crates.io/unicode-ident/1.0.12 \
+ crate://crates.io/unindent/0.2.3 \
+ crate://crates.io/vcpkg/0.2.15 \
+ crate://crates.io/windows-targets/0.48.5 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.5 \
+ crate://crates.io/windows_aarch64_msvc/0.48.5 \
+ crate://crates.io/windows_i686_gnu/0.48.5 \
+ crate://crates.io/windows_i686_msvc/0.48.5 \
+ crate://crates.io/windows_x86_64_gnu/0.48.5 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.5 \
+ crate://crates.io/windows_x86_64_msvc/0.48.5 \
+"
+
+SRC_URI[asn1-0.15.5.sha256sum] = "ae3ecbce89a22627b5e8e6e11d69715617138290289e385cde773b1fe50befdb"
+SRC_URI[asn1_derive-0.15.5.sha256sum] = "861af988fac460ac69a09f41e6217a8fb9178797b76fcc9478444be6a59be19c"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[base64-0.21.7.sha256sum] = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[bitflags-2.4.2.sha256sum] = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
+SRC_URI[cc-1.0.83.sha256sum] = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[foreign-types-0.3.2.sha256sum] = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+SRC_URI[foreign-types-shared-0.1.1.sha256sum] = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[indoc-2.0.4.sha256sum] = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+SRC_URI[libc-0.2.152.sha256sum] = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7"
+SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+SRC_URI[openssl-0.10.63.sha256sum] = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8"
+SRC_URI[openssl-macros-0.1.1.sha256sum] = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+SRC_URI[openssl-sys-0.9.99.sha256sum] = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+SRC_URI[pem-3.0.3.sha256sum] = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310"
+SRC_URI[pkg-config-0.3.29.sha256sum] = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb"
+SRC_URI[portable-atomic-1.6.0.sha256sum] = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+SRC_URI[proc-macro2-1.0.78.sha256sum] = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
+SRC_URI[pyo3-0.20.3.sha256sum] = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
+SRC_URI[pyo3-build-config-0.20.3.sha256sum] = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
+SRC_URI[pyo3-ffi-0.20.3.sha256sum] = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
+SRC_URI[pyo3-macros-0.20.3.sha256sum] = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
+SRC_URI[pyo3-macros-backend-0.20.3.sha256sum] = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
+SRC_URI[quote-1.0.35.sha256sum] = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
+SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+SRC_URI[self_cell-1.0.3.sha256sum] = "58bf37232d3bb9a2c4e641ca2a11d83b5062066f88df7fed36c28772046d65ba"
+SRC_URI[smallvec-1.13.1.sha256sum] = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
+SRC_URI[syn-2.0.48.sha256sum] = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
+SRC_URI[target-lexicon-0.12.13.sha256sum] = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae"
+SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+SRC_URI[unindent-0.2.3.sha256sum] = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
+SRC_URI[vcpkg-0.2.15.sha256sum] = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
diff --git a/meta/recipes-devtools/python/python3-cryptography-vectors_37.0.2.bb b/meta/recipes-devtools/python/python3-cryptography-vectors_37.0.2.bb
deleted file mode 100644
index 3dbdc8ce3e..0000000000
--- a/meta/recipes-devtools/python/python3-cryptography-vectors_37.0.2.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "Test vectors for the cryptography package."
-HOMEPAGE = "https://cryptography.io/"
-SECTION = "devel/python"
-LICENSE = "Apache-2.0 | BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=8c3617db4fb6fae01f1d253ab91511e4 \
- file://LICENSE.APACHE;md5=4e168cce331e5c827d4c2b68a6200e1b \
- file://LICENSE.BSD;md5=5ae30ba4123bc4f2fa49aa0b0dce887b"
-
-# NOTE: Make sure to keep this recipe at the same version as python3-cryptography
-# Upgrade both recipes at the same time
-
-SRC_URI[sha256sum] = "7c65d3de51756f418142df605417ec2c6e961c364f70cc8a103030889d5a3219"
-
-PYPI_PACKAGE = "cryptography_vectors"
-
-inherit pypi setuptools3
-
-DEPENDS += " \
- ${PYTHON_PN}-cryptography \
-"
-
-do_install:append () {
- # Remove the sha256 checksum lines for pycache files
- sed ${D}${PYTHON_SITEPACKAGES_DIR}/cryptography_vectors-${PV}.dist-info/RECORD -e '/__pycache__/d' -i
-}
-
-BBCLASSEXTEND = "native nativesdk"
-
-UPSTREAM_CHECK_REGEX = ""
diff --git a/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb b/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb
new file mode 100644
index 0000000000..ee522af08e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb
@@ -0,0 +1,31 @@
+SUMMARY = "Test vectors for the cryptography package."
+HOMEPAGE = "https://cryptography.io/"
+SECTION = "devel/python"
+LICENSE = "Apache-2.0 | BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=8c3617db4fb6fae01f1d253ab91511e4 \
+ file://LICENSE.APACHE;md5=4e168cce331e5c827d4c2b68a6200e1b \
+ file://LICENSE.BSD;md5=5ae30ba4123bc4f2fa49aa0b0dce887b"
+
+# NOTE: Make sure to keep this recipe at the same version as python3-cryptography
+# Upgrade both recipes at the same time
+
+SRC_URI[sha256sum] = "505cd5e3b0cb32da1526f07042b7fc38a4b6c356710cb73d2b5f76b037a38ed1"
+
+PYPI_PACKAGE = "cryptography_vectors"
+
+inherit pypi python_setuptools_build_meta
+
+DEPENDS += " \
+ python3-cryptography \
+"
+
+do_install:append () {
+ # Remove the sha256 checksum lines for pycache files
+ sed ${D}${PYTHON_SITEPACKAGES_DIR}/cryptography_vectors-${PV}.dist-info/RECORD -e '/__pycache__/d' -i
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_REGEX = ""
+
+RECIPE_NO_UPDATE_REASON = "Must be updated in sync with python3-cryptography."
diff --git a/meta/recipes-devtools/python/python3-cryptography/0001-Cargo.toml-specify-pem-version.patch b/meta/recipes-devtools/python/python3-cryptography/0001-Cargo.toml-specify-pem-version.patch
deleted file mode 100644
index 7266fd7bef..0000000000
--- a/meta/recipes-devtools/python/python3-cryptography/0001-Cargo.toml-specify-pem-version.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From d41203b9b79f5edc2d33b0d62921822294dfaa6b Mon Sep 17 00:00:00 2001
-From: Tim Orling <tim.orling@konsulko.com>
-Date: Fri, 14 Jan 2022 22:02:25 -0800
-Subject: [PATCH] Cargo.toml: specify pem version
-
-pem = "1.0" is not resolving, specify the current
-pem = { version: "1.0.2"}
-
-Upstream-Status: Pending
-
-Signed-off-by: Tim Orling <tim.orling@konsulko.com>
-
----
- src/rust/Cargo.toml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/rust/Cargo.toml b/src/rust/Cargo.toml
-index d17245d..f8f6416 100644
---- a/src/rust/Cargo.toml
-+++ b/src/rust/Cargo.toml
-@@ -9,7 +9,7 @@ publish = false
- lazy_static = "1"
- pyo3 = { version = "0.15.2" }
- asn1 = { version = "0.8.7", default-features = false, features = ["derive"] }
--pem = "1.0"
-+pem = { version = "1.0.2" }
- chrono = { version = "0.4", default-features = false, features = ["alloc", "clock"] }
- ouroboros = "0.15"
-
diff --git a/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch b/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch
index 481f595246..f9c8d1393d 100644
--- a/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch
+++ b/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch
@@ -1,4 +1,4 @@
-From ce972ea92d724f232323a9a6265a8b44d913d4d8 Mon Sep 17 00:00:00 2001
+From b7dd3ce1d75d1e6255e1aca82aa7f401d4246a75 Mon Sep 17 00:00:00 2001
From: Mingli Yu <mingli.yu@windriver.com>
Date: Tue, 17 May 2022 17:22:48 +0800
Subject: [PATCH] pyproject.toml: remove --benchmark-disable option
@@ -18,23 +18,26 @@ Fixes:
Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
+
+Refresh for 42.02
+Signed-off-by: Tim Orling <tim.orling@konsulko.com>
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
-index 4d58129..b011fca 100644
+index c9a7979bd..dec4b7157 100644
--- a/pyproject.toml
+++ b/pyproject.toml
-@@ -15,7 +15,7 @@ line-length = 79
- target-version = ["py36"]
+@@ -92,7 +92,7 @@ rust-version = ">=1.63.0"
+
[tool.pytest.ini_options]
-addopts = "-r s --capture=no --strict-markers --benchmark-disable"
+addopts = "-r s --capture=no --strict-markers"
+ console_output_style = "progress-even-when-capture-no"
markers = [
"skip_fips: this test is not executed in FIPS mode",
- "supported: parametrized test requiring only_if and skip_message",
--
-2.25.1
+2.34.1
diff --git a/meta/recipes-devtools/python/python3-cryptography/0002-Cargo.toml-edition-2018-2021.patch b/meta/recipes-devtools/python/python3-cryptography/0002-Cargo.toml-edition-2018-2021.patch
deleted file mode 100644
index 366e3a4d39..0000000000
--- a/meta/recipes-devtools/python/python3-cryptography/0002-Cargo.toml-edition-2018-2021.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 4b73298b214a5b69ea6edf3c2e21dd82b2b29708 Mon Sep 17 00:00:00 2001
-From: Tim Orling <tim.orling@konsulko.com>
-Date: Fri, 14 Jan 2022 22:34:59 -0800
-Subject: [PATCH 2/2] Cargo.toml: edition 2018 -> 2021
-
-Upstream-Status: Pending
-
-Signed-off-by: Tim Orling <tim.orling@konsulko.com>
----
- src/rust/Cargo.toml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/rust/Cargo.toml b/src/rust/Cargo.toml
-index 174eaa80..7ad053d9 100644
---- a/src/rust/Cargo.toml
-+++ b/src/rust/Cargo.toml
-@@ -2,7 +2,7 @@
- name = "cryptography-rust"
- version = "0.1.0"
- authors = ["The cryptography developers <cryptography-dev@python.org>"]
--edition = "2018"
-+edition = "2021"
- publish = false
-
- [dependencies]
---
-2.30.2
-
diff --git a/meta/recipes-devtools/python/python3-cryptography/check-memfree.py b/meta/recipes-devtools/python/python3-cryptography/check-memfree.py
index c111a9074c..ed680d8d5b 100755
--- a/meta/recipes-devtools/python/python3-cryptography/check-memfree.py
+++ b/meta/recipes-devtools/python/python3-cryptography/check-memfree.py
@@ -4,7 +4,7 @@ import sys
meminfo = dict((i.split()[0].rstrip(':'),int(i.split()[1])) for i in open('/proc/meminfo').readlines())
mem_free = meminfo['MemTotal']/1024./1024.
if mem_free < 2.:
- raise RuntimeError("Insufficient free memory({:.3f}): requires > 2 GB".format(mem_free))
+ print("Insufficient free memory({:.3f}): requires > 2 GB".format(mem_free))
sys.exit(1)
else:
print("Free memory: {:.3f} GB".format(mem_free))
diff --git a/meta/recipes-devtools/python/python3-cryptography/run-ptest b/meta/recipes-devtools/python/python3-cryptography/run-ptest
index 3089df8781..fe191a5dc4 100644
--- a/meta/recipes-devtools/python/python3-cryptography/run-ptest
+++ b/meta/recipes-devtools/python/python3-cryptography/run-ptest
@@ -1,4 +1,9 @@
#!/bin/sh
+
if ./check-memfree.py; then
- pytest -vvvv tests/ -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+ # Skip the bench test module, we don't yet have pytest3-benchmark in core
+ # and these are more benchmarks than unit tests.
+ pytest --automake -k 'not bench'
+else
+ echo "SKIP: crytography.not_enough_memory"
fi
diff --git a/meta/recipes-devtools/python/python3-cryptography_37.0.2.bb b/meta/recipes-devtools/python/python3-cryptography_37.0.2.bb
deleted file mode 100644
index d7a720bf2b..0000000000
--- a/meta/recipes-devtools/python/python3-cryptography_37.0.2.bb
+++ /dev/null
@@ -1,119 +0,0 @@
-SUMMARY = "Provides cryptographic recipes and primitives to python developers"
-HOMEPAGE = "https://cryptography.io/"
-SECTION = "devel/python"
-LICENSE = "( Apache-2.0 | BSD-3-Clause ) & PSF-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=bf405a8056a6647e7d077b0e7bc36aba \
- file://LICENSE.APACHE;md5=4e168cce331e5c827d4c2b68a6200e1b \
- file://LICENSE.BSD;md5=5ae30ba4123bc4f2fa49aa0b0dce887b \
- file://LICENSE.PSF;md5=43c37d21e1dbad10cddcd150ba2c0595 \
- "
-LDSHARED += "-pthread"
-
-SRC_URI[sha256sum] = "f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e"
-
-SRC_URI += "file://run-ptest \
- file://check-memfree.py \
- file://0001-Cargo.toml-specify-pem-version.patch \
- file://0002-Cargo.toml-edition-2018-2021.patch \
- file://0001-pyproject.toml-remove-benchmark-disable-option.patch \
- crate://crates.io/Inflector/0.11.4 \
- crate://crates.io/aliasable/0.1.3 \
- crate://crates.io/asn1/0.8.7 \
- crate://crates.io/asn1_derive/0.8.7 \
- crate://crates.io/autocfg/1.1.0 \
- crate://crates.io/base64/0.13.0 \
- crate://crates.io/bitflags/1.3.2 \
- crate://crates.io/cfg-if/1.0.0 \
- crate://crates.io/chrono/0.4.19 \
- crate://crates.io/indoc-impl/0.3.6 \
- crate://crates.io/indoc/0.3.6 \
- crate://crates.io/instant/0.1.12 \
- crate://crates.io/lazy_static/1.4.0 \
- crate://crates.io/libc/0.2.124 \
- crate://crates.io/lock_api/0.4.7 \
- crate://crates.io/num-integer/0.1.44 \
- crate://crates.io/num-traits/0.2.14 \
- crate://crates.io/once_cell/1.10.0 \
- crate://crates.io/ouroboros/0.15.0 \
- crate://crates.io/ouroboros_macro/0.15.0 \
- crate://crates.io/parking_lot/0.11.2 \
- crate://crates.io/parking_lot_core/0.8.5 \
- crate://crates.io/paste-impl/0.1.18 \
- crate://crates.io/paste/0.1.18 \
- crate://crates.io/pem/1.0.2 \
- crate://crates.io/proc-macro-error-attr/1.0.4 \
- crate://crates.io/proc-macro-error/1.0.4 \
- crate://crates.io/proc-macro-hack/0.5.19 \
- crate://crates.io/proc-macro2/1.0.37 \
- crate://crates.io/pyo3-build-config/0.15.2 \
- crate://crates.io/pyo3-macros-backend/0.15.2 \
- crate://crates.io/pyo3-macros/0.15.2 \
- crate://crates.io/pyo3/0.15.2 \
- crate://crates.io/quote/1.0.18 \
- crate://crates.io/redox_syscall/0.2.13 \
- crate://crates.io/scopeguard/1.1.0 \
- crate://crates.io/smallvec/1.8.0 \
- crate://crates.io/stable_deref_trait/1.2.0 \
- crate://crates.io/syn/1.0.91 \
- crate://crates.io/unicode-xid/0.2.2 \
- crate://crates.io/unindent/0.1.8 \
- crate://crates.io/version_check/0.9.4 \
- crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0 \
- crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0 \
- crate://crates.io/winapi/0.3.9 \
- "
-
-inherit pypi python_setuptools3_rust
-
-DEPENDS += " \
- ${PYTHON_PN}-asn1crypto-native \
- ${PYTHON_PN}-cffi-native \
- ${PYTHON_PN}-setuptools-rust-native \
- ${PYTHON_PN}-six-native \
-"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-asn1crypto \
- ${PYTHON_PN}-cffi \
- ${PYTHON_PN}-idna \
- ${PYTHON_PN}-setuptools \
- ${PYTHON_PN}-six \
-"
-
-RDEPENDS:${PN}:append:class-target = " \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-threading \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-bcrypt \
- ${PYTHON_PN}-cryptography-vectors (= ${PV}) \
- ${PYTHON_PN}-hypothesis \
- ${PYTHON_PN}-iso8601 \
- ${PYTHON_PN}-pretend \
- ${PYTHON_PN}-psutil \
- ${PYTHON_PN}-pytest \
- ${PYTHON_PN}-pytest-subtests \
- ${PYTHON_PN}-pytz \
- ${PYTHON_PN}-tomli \
-"
-
-inherit ptest
-
-do_install_ptest() {
- install -D ${WORKDIR}/check-memfree.py ${D}${PTEST_PATH}/
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
- install -d ${D}${PTEST_PATH}/tests/hazmat
- cp -rf ${S}/tests/hazmat/* ${D}${PTEST_PATH}/tests/hazmat/
- cp -r ${S}/pyproject.toml ${D}${PTEST_PATH}/
-}
-
-FILES:${PN}-ptest += " \
- ${PTEST_PATH}/check-memfree.py \
-"
-FILES:${PN}-dbg += " \
- ${PYTHON_SITEPACKAGES_DIR}/${SRCNAME}/hazmat/bindings/.debug \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-cryptography_42.0.5.bb b/meta/recipes-devtools/python/python3-cryptography_42.0.5.bb
new file mode 100644
index 0000000000..732f925d92
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-cryptography_42.0.5.bb
@@ -0,0 +1,67 @@
+SUMMARY = "Provides cryptographic recipes and primitives to python developers"
+HOMEPAGE = "https://cryptography.io/"
+SECTION = "devel/python"
+LICENSE = "Apache-2.0 | BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=8c3617db4fb6fae01f1d253ab91511e4 \
+ file://LICENSE.APACHE;md5=4e168cce331e5c827d4c2b68a6200e1b \
+ file://LICENSE.BSD;md5=5ae30ba4123bc4f2fa49aa0b0dce887b \
+ "
+LDSHARED += "-pthread"
+
+SRC_URI[sha256sum] = "6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"
+
+SRC_URI += "file://0001-pyproject.toml-remove-benchmark-disable-option.patch \
+ file://check-memfree.py \
+ file://run-ptest \
+ "
+
+require ${BPN}-crates.inc
+
+inherit pypi python_setuptools3_rust cargo-update-recipe-crates pkgconfig
+
+DEPENDS += " \
+ python3-cffi-native \
+"
+
+RDEPENDS:${PN} += " \
+ python3-cffi \
+"
+
+RDEPENDS:${PN}:append:class-target = " \
+ python3-numbers \
+ python3-threading \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-bcrypt \
+ python3-cryptography-vectors (= ${PV}) \
+ python3-hypothesis \
+ python3-iso8601 \
+ python3-mmap \
+ python3-pretend \
+ python3-psutil \
+ python3-pytest \
+ python3-unittest-automake-output \
+ python3-pytest-subtests \
+ python3-pytz \
+"
+
+inherit ptest
+
+do_install_ptest() {
+ install -D ${WORKDIR}/check-memfree.py ${D}${PTEST_PATH}/
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+ # remove test_x509.py as it needs benchmark and we don't
+ # want to introduce the benchmark dependency
+ rm -rf ${D}${PTEST_PATH}/tests/bench/test_x509.py
+ install -d ${D}${PTEST_PATH}/tests/hazmat
+ cp -rf ${S}/tests/hazmat/* ${D}${PTEST_PATH}/tests/hazmat/
+ cp -r ${S}/pyproject.toml ${D}${PTEST_PATH}/
+}
+
+FILES:${PN}-dbg += " \
+ ${PYTHON_SITEPACKAGES_DIR}/${SRCNAME}/hazmat/bindings/.debug \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-cython_0.29.28.bb b/meta/recipes-devtools/python/python3-cython_0.29.28.bb
deleted file mode 100644
index 26333cb271..0000000000
--- a/meta/recipes-devtools/python/python3-cython_0.29.28.bb
+++ /dev/null
@@ -1,37 +0,0 @@
-inherit setuptools3
-require python-cython.inc
-
-RDEPENDS:${PN} += "\
- python3-setuptools \
-"
-
-# running build_ext a second time during install fails, because Python
-# would then attempt to import cythonized modules built for the target
-# architecture.
-SETUPTOOLS_INSTALL_ARGS += "--skip-build"
-
-do_install:append() {
- # rename scripts that would conflict with the Python 2 build of Cython
- mv ${D}${bindir}/cython ${D}${bindir}/cython3
- mv ${D}${bindir}/cythonize ${D}${bindir}/cythonize3
- mv ${D}${bindir}/cygdb ${D}${bindir}/cygdb3
-}
-
-PACKAGEBUILDPKGD += "cython_fix_sources"
-
-cython_fix_sources () {
- for f in ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Compiler/FlowControl.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Compiler/FusedNode.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Compiler/Scanning.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Compiler/Visitor.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Plex/Actions.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Plex/Scanners.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Runtime/refnanny.c \
- ${PKGD}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/Cython-${PV}/Cython/Tempita/_tempita.c \
- ${PKGD}${libdir}/${PYTHON_DIR}/site-packages/Cython*/SOURCES.txt; do
- if [ -e $f ]; then
- sed -i -e 's#${WORKDIR}#/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}#g' $f
- fi
- done
-}
-
diff --git a/meta/recipes-devtools/python/python3-cython_3.0.9.bb b/meta/recipes-devtools/python/python3-cython_3.0.9.bb
new file mode 100644
index 0000000000..07638d7ad7
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-cython_3.0.9.bb
@@ -0,0 +1,37 @@
+inherit setuptools3
+require python-cython.inc
+
+RDEPENDS:${PN} += "\
+ python3-setuptools \
+"
+
+# running build_ext a second time during install fails, because Python
+# would then attempt to import cythonized modules built for the target
+# architecture.
+SETUPTOOLS_INSTALL_ARGS += "--skip-build"
+
+do_install:append() {
+ # rename scripts that would conflict with the Python 2 build of Cython
+ mv ${D}${bindir}/cython ${D}${bindir}/cython3
+ mv ${D}${bindir}/cythonize ${D}${bindir}/cythonize3
+ mv ${D}${bindir}/cygdb ${D}${bindir}/cygdb3
+}
+
+PACKAGESPLITFUNCS =+ "cython_fix_sources"
+
+cython_fix_sources () {
+ for f in ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Compiler/FlowControl.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Compiler/FusedNode.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Compiler/Scanning.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Compiler/Visitor.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Plex/Actions.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Plex/Scanners.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Runtime/refnanny.c \
+ ${PKGD}${TARGET_DBGSRC_DIR}/Cython/Tempita/_tempita.c \
+ ${PKGD}${libdir}/${PYTHON_DIR}/site-packages/Cython*/SOURCES.txt; do
+ if [ -e $f ]; then
+ sed -i -e 's#${WORKDIR}/Cython-${PV}#${TARGET_DBGSRC_DIR}#g' $f
+ fi
+ done
+}
+
diff --git a/meta/recipes-devtools/python/python3-dbus_1.2.18.bb b/meta/recipes-devtools/python/python3-dbus_1.2.18.bb
deleted file mode 100644
index c4687de13c..0000000000
--- a/meta/recipes-devtools/python/python3-dbus_1.2.18.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Python bindings for the DBus inter-process communication system"
-SECTION = "devel/python"
-HOMEPAGE = "http://www.freedesktop.org/Software/dbus"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b03240518994df6d8c974675675e5ca4"
-DEPENDS = "expat dbus glib-2.0 virtual/libintl"
-
-SRC_URI = "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-${PV}.tar.gz"
-
-SRC_URI[sha256sum] = "92bdd1e68b45596c833307a5ff4b217ee6929a1502f5341bae28fd120acf7260"
-
-S = "${WORKDIR}/dbus-python-${PV}"
-
-inherit setuptools3-base autotools pkgconfig
-
-# documentation needs python3-sphinx, which is not in oe-core or meta-python for now
-# change to use PACKAGECONFIG when python3-sphinx is added to oe-core
-EXTRA_OECONF += "--disable-documentation"
-
-
-RDEPENDS:${PN} = "python3-io python3-logging python3-stringold python3-threading python3-xml"
-
-FILES:${PN}-dev += "${libdir}/pkgconfig"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-dbus_1.3.2.bb b/meta/recipes-devtools/python/python3-dbus_1.3.2.bb
new file mode 100644
index 0000000000..e0cf64c70e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-dbus_1.3.2.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Python bindings for the DBus inter-process communication system"
+SECTION = "devel/python"
+HOMEPAGE = "http://www.freedesktop.org/Software/dbus"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=97f58951300aa52a9f9e3a62bd5c846c"
+DEPENDS = "expat dbus glib-2.0 virtual/libintl"
+
+SRC_URI = "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "ad67819308618b5069537be237f8e68ca1c7fcc95ee4a121fe6845b1418248f8"
+
+S = "${WORKDIR}/dbus-python-${PV}"
+
+inherit setuptools3-base meson pkgconfig
+
+# requires dbus-run-session
+EXTRA_OEMESON += "-Dtests=false"
+
+RDEPENDS:${PN} = "python3-io python3-logging python3-stringold python3-threading python3-xml"
+
+FILES:${PN}-dev += "${libdir}/pkgconfig"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-dbusmock_0.27.5.bb b/meta/recipes-devtools/python/python3-dbusmock_0.27.5.bb
deleted file mode 100644
index 852d1555be..0000000000
--- a/meta/recipes-devtools/python/python3-dbusmock_0.27.5.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "With this program/Python library you can easily create mock objects on D-Bus"
-HOMEPAGE = "https://pypi.org/project/python-dbusmock/"
-
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=e6a600fd5e1d9cbde2d983680233ad02"
-
-SRC_URI[sha256sum] = "0bff30d8d01e7eef491f75d359c87765abba7001307fa8fed19cb59f06ed3c2a"
-
-PYPI_PACKAGE = "python-dbusmock"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-dbus \
- ${PYTHON_PN}-pygobject \
- ${PYTHON_PN}-unittest \
- ${PYTHON_PN}-xml \
- "
diff --git a/meta/recipes-devtools/python/python3-dbusmock_0.31.1.bb b/meta/recipes-devtools/python/python3-dbusmock_0.31.1.bb
new file mode 100644
index 0000000000..fc32c6bbb6
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-dbusmock_0.31.1.bb
@@ -0,0 +1,22 @@
+SUMMARY = "With this program/Python library you can easily create mock objects on D-Bus"
+HOMEPAGE = "https://pypi.org/project/python-dbusmock/"
+
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=e6a600fd5e1d9cbde2d983680233ad02"
+
+SRC_URI[sha256sum] = "b23b8e1b51fe2a9b13e617fff6b60b3ed8e536c080cf3498019d223678d5ea49"
+
+PYPI_PACKAGE = "python-dbusmock"
+
+inherit pypi python_setuptools_build_meta
+DEPENDS += "python3-setuptools-scm-native"
+
+RDEPENDS:${PN} += "\
+ python3-dbus \
+ python3-unittest \
+ python3-xml \
+ "
+
+RRECOMMENDS:${PN} = "${@bb.utils.contains('DISTRO_FEATURES', 'gobject-introspection-data', '${MLPREFIX}python3-pygobject', '', d)}"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-docutils_0.18.1.bb b/meta/recipes-devtools/python/python3-docutils_0.18.1.bb
deleted file mode 100644
index e4ddd01978..0000000000
--- a/meta/recipes-devtools/python/python3-docutils_0.18.1.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-SUMMARY = "Docutils is a modular system for processing documentation into useful formats"
-HOMEPAGE = "http://docutils.sourceforge.net"
-SECTION = "devel/python"
-LICENSE = "PSF-2.0 & BSD-2-Clause & GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING.txt;md5=fecee07ad8df9116e1f739e2ed2ea513"
-
-SRC_URI[sha256sum] = "679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-docutils_0.20.1.bb b/meta/recipes-devtools/python/python3-docutils_0.20.1.bb
new file mode 100644
index 0000000000..6038732cc4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-docutils_0.20.1.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Docutils is a modular system for processing documentation into useful formats"
+HOMEPAGE = "http://docutils.sourceforge.net"
+SECTION = "devel/python"
+LICENSE = "PSF-2.0 & BSD-2-Clause & GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING.txt;md5=08f5f8aa6a1db2500c08a2bb558e45af"
+
+SRC_URI[sha256sum] = "f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"
+
+inherit pypi setuptools3
+
+do_install:append() {
+ for f in rst2html rst2html4 rst2html5 rst2latex rst2man \
+ rst2odt rst2odt_prepstyles rst2pseudoxml rst2s5 rst2xetex rst2xml \
+ rstpep2html
+ do
+ mv ${D}${bindir}/$f.py ${D}${bindir}/$f;
+ done
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-dtc/0001-Revert-libfdt-overlay-make-overlay_get_target-public.patch b/meta/recipes-devtools/python/python3-dtc/0001-Revert-libfdt-overlay-make-overlay_get_target-public.patch
new file mode 100644
index 0000000000..a2df482e3a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-dtc/0001-Revert-libfdt-overlay-make-overlay_get_target-public.patch
@@ -0,0 +1,132 @@
+From 4d4703e0199fb3556c37694e4d951785abca22fd Mon Sep 17 00:00:00 2001
+From: Bruce Ashfield <bruce.ashfield@gmail.com>
+Date: Wed, 19 Jan 2022 12:46:42 -0500
+Subject: [PATCH] Revert "libfdt: overlay: make overlay_get_target() public"
+
+This reverts commit 45f3d1a095dd3440578d5c6313eba555a791f3fb.
+
+Upstream-Status: Inappropriate [embedded specific]
+
+---
+ libfdt/fdt_overlay.c | 29 ++++++++++++++++++++++-------
+ libfdt/libfdt.h | 18 ------------------
+ libfdt/version.lds | 1 -
+ 3 files changed, 22 insertions(+), 26 deletions(-)
+
+diff --git a/libfdt/fdt_overlay.c b/libfdt/fdt_overlay.c
+index 5c0c398..d217e79 100644
+--- a/libfdt/fdt_overlay.c
++++ b/libfdt/fdt_overlay.c
+@@ -40,22 +40,37 @@ static uint32_t overlay_get_target_phandle(const void *fdto, int fragment)
+ return fdt32_to_cpu(*val);
+ }
+
+-int fdt_overlay_target_offset(const void *fdt, const void *fdto,
+- int fragment_offset, char const **pathp)
++/**
++ * overlay_get_target - retrieves the offset of a fragment's target
++ * @fdt: Base device tree blob
++ * @fdto: Device tree overlay blob
++ * @fragment: node offset of the fragment in the overlay
++ * @pathp: pointer which receives the path of the target (or NULL)
++ *
++ * overlay_get_target() retrieves the target offset in the base
++ * device tree of a fragment, no matter how the actual targeting is
++ * done (through a phandle or a path)
++ *
++ * returns:
++ * the targeted node offset in the base device tree
++ * Negative error code on error
++ */
++static int overlay_get_target(const void *fdt, const void *fdto,
++ int fragment, char const **pathp)
+ {
+ uint32_t phandle;
+ const char *path = NULL;
+ int path_len = 0, ret;
+
+ /* Try first to do a phandle based lookup */
+- phandle = overlay_get_target_phandle(fdto, fragment_offset);
++ phandle = overlay_get_target_phandle(fdto, fragment);
+ if (phandle == (uint32_t)-1)
+ return -FDT_ERR_BADPHANDLE;
+
+ /* no phandle, try path */
+ if (!phandle) {
+ /* And then a path based lookup */
+- path = fdt_getprop(fdto, fragment_offset, "target-path", &path_len);
++ path = fdt_getprop(fdto, fragment, "target-path", &path_len);
+ if (path)
+ ret = fdt_path_offset(fdt, path);
+ else
+@@ -621,7 +636,7 @@ static int overlay_merge(void *fdt, void *fdto)
+ if (overlay < 0)
+ return overlay;
+
+- target = fdt_overlay_target_offset(fdt, fdto, fragment, NULL);
++ target = overlay_get_target(fdt, fdto, fragment, NULL);
+ if (target < 0)
+ return target;
+
+@@ -764,7 +779,7 @@ static int overlay_symbol_update(void *fdt, void *fdto)
+ return -FDT_ERR_BADOVERLAY;
+
+ /* get the target of the fragment */
+- ret = fdt_overlay_target_offset(fdt, fdto, fragment, &target_path);
++ ret = overlay_get_target(fdt, fdto, fragment, &target_path);
+ if (ret < 0)
+ return ret;
+ target = ret;
+@@ -786,7 +801,7 @@ static int overlay_symbol_update(void *fdt, void *fdto)
+
+ if (!target_path) {
+ /* again in case setprop_placeholder changed it */
+- ret = fdt_overlay_target_offset(fdt, fdto, fragment, &target_path);
++ ret = overlay_get_target(fdt, fdto, fragment, &target_path);
+ if (ret < 0)
+ return ret;
+ target = ret;
+diff --git a/libfdt/libfdt.h b/libfdt/libfdt.h
+index a7f432c..7f117e8 100644
+--- a/libfdt/libfdt.h
++++ b/libfdt/libfdt.h
+@@ -2116,24 +2116,6 @@ int fdt_del_node(void *fdt, int nodeoffset);
+ */
+ int fdt_overlay_apply(void *fdt, void *fdto);
+
+-/**
+- * fdt_overlay_target_offset - retrieves the offset of a fragment's target
+- * @fdt: Base device tree blob
+- * @fdto: Device tree overlay blob
+- * @fragment_offset: node offset of the fragment in the overlay
+- * @pathp: pointer which receives the path of the target (or NULL)
+- *
+- * fdt_overlay_target_offset() retrieves the target offset in the base
+- * device tree of a fragment, no matter how the actual targeting is
+- * done (through a phandle or a path)
+- *
+- * returns:
+- * the targeted node offset in the base device tree
+- * Negative error code on error
+- */
+-int fdt_overlay_target_offset(const void *fdt, const void *fdto,
+- int fragment_offset, char const **pathp);
+-
+ /**********************************************************************/
+ /* Debugging / informational functions */
+ /**********************************************************************/
+diff --git a/libfdt/version.lds b/libfdt/version.lds
+index cbce5d4..7ab85f1 100644
+--- a/libfdt/version.lds
++++ b/libfdt/version.lds
+@@ -77,7 +77,6 @@ LIBFDT_1.2 {
+ fdt_appendprop_addrrange;
+ fdt_setprop_inplace_namelen_partial;
+ fdt_create_with_flags;
+- fdt_overlay_target_offset;
+ local:
+ *;
+ };
+--
+2.19.1
+
diff --git a/meta/recipes-devtools/python/python3-dtc_1.7.0.bb b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
new file mode 100644
index 0000000000..85e48d4694
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-dtc_1.7.0.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Python Library for the Device Tree Compiler"
+HOMEPAGE = "https://devicetree.org/"
+DESCRIPTION = "A python library for the Device Tree Compiler, a tool used to manipulate Device Tree files which contain a data structure for describing hardware."
+SECTION = "bootloader"
+LICENSE = "GPL-2.0-only | BSD-2-Clause"
+
+DEPENDS = "flex-native bison-native swig-native python3-setuptools-scm-native libyaml dtc"
+
+SRC_URI = "git://git.kernel.org/pub/scm/utils/dtc/dtc.git;branch=master \
+ file://0001-Revert-libfdt-overlay-make-overlay_get_target-public.patch \
+ "
+
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+LIC_FILES_CHKSUM = "file://pylibfdt/libfdt.i;beginline=1;endline=6;md5=afda088c974174a29108c8d80b5dce90"
+
+SRCREV = "039a99414e778332d8f9c04cbd3072e1dcc62798"
+
+S = "${WORKDIR}/git"
+
+PYPA_WHEEL = "${S}/dist/libfdt-1.6.2*.whl"
+
+inherit setuptools3 pkgconfig
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-dtschema_2022.4.bb b/meta/recipes-devtools/python/python3-dtschema_2022.4.bb
deleted file mode 100644
index 7bc89c844c..0000000000
--- a/meta/recipes-devtools/python/python3-dtschema_2022.4.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-DESCRIPTION = "Tooling for devicetree validation using YAML and jsonschema"
-HOMEPAGE = "https://github.com/devicetree-org/dt-schema"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=457495c8fa03540db4a576bf7869e811"
-
-inherit pypi setuptools3
-
-PYPI_PACKAGE = "dtschema"
-
-SRC_URI[sha256sum] = "c70a644e0100b5bacd44839a2316291d8eee91f6535a8419459de59fd0fcf6ce"
-
-DEPENDS += "python3-setuptools-scm-native"
-RDEPENDS:${PN} += "python3-ruamel-yaml python3-jsonschema python3-rfc3987"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-dtschema_2024.2.bb b/meta/recipes-devtools/python/python3-dtschema_2024.2.bb
new file mode 100644
index 0000000000..0e911dfb3d
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-dtschema_2024.2.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Tooling for devicetree validation using YAML and jsonschema"
+HOMEPAGE = "https://github.com/devicetree-org/dt-schema"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=457495c8fa03540db4a576bf7869e811"
+
+inherit pypi python_setuptools_build_meta
+
+PYPI_PACKAGE = "dtschema"
+
+SRC_URI[sha256sum] = "df4e5afb35bda93894209d2465e87fb7103f1a95a05909ebcb594fc4cf4fdd1e"
+
+DEPENDS += "python3-setuptools-scm-native"
+RDEPENDS:${PN} += "\
+ python3-dtc \
+ python3-jsonschema \
+ python3-rfc3987 \
+ python3-ruamel-yaml \
+ "
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-editables_0.5.bb b/meta/recipes-devtools/python/python3-editables_0.5.bb
new file mode 100644
index 0000000000..f3261183cb
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-editables_0.5.bb
@@ -0,0 +1,15 @@
+SUMMARY = "A Python library for creating editable wheels"
+HOMEPAGE = "https://github.com/pfmoore/editables"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=41bc1be47b7bb8240db3ef928c7cb0bf"
+
+SRC_URI[sha256sum] = "309627d9b5c4adc0e668d8c6fa7bac1ba7c8c5d415c2d27f60f081f8e80d1de2"
+
+inherit pypi python_setuptools_build_meta
+
+RDEPENDS:${PN} += "\
+ python3-io \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-flit-core_3.7.1.bb b/meta/recipes-devtools/python/python3-flit-core_3.7.1.bb
deleted file mode 100644
index 8d107384a6..0000000000
--- a/meta/recipes-devtools/python/python3-flit-core_3.7.1.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-SUMMARY = "This provides a PEP 517 build backend for packages using Flit."
-DESCRIPTION = "This provides a PEP 517 build backend for packages using \
-Flit. The only public interface is the API specified by PEP 517, at \
-flit_core.buildapi."
-HOMEPAGE = "https://github.com/pypa/flit"
-BUGTRACKER = "https://github.com/pypa/flit/issues"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=41eb78fa8a872983a882c694a8305f08"
-
-SRC_URI[sha256sum] = "3c9bd9c140515bfe62dd938c6610d10d6efb9e35cc647fc614fe5fb3a5036682"
-
-inherit pypi python_flit_core
-
-# Need to install by hand as there's a dependency loop
-DEPENDS:remove:class-native = " python3-installer-native"
-DEPENDS:append:class-native = " unzip-native"
-
-# We need the full flit tarball
-PYPI_PACKAGE = "flit"
-PEP517_SOURCE_PATH = "${S}/flit_core"
-
-do_install:class-native () {
- python_pep517_do_bootstrap_install
-}
-
-PACKAGES =+ "${PN}-tests"
-
-FILES:${PN}-tests += "\
- ${PYTHON_SITEPACKAGES_DIR}/flit_core/tests/* \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-flit-core_3.9.0.bb b/meta/recipes-devtools/python/python3-flit-core_3.9.0.bb
new file mode 100644
index 0000000000..b0bef4b6b6
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-flit-core_3.9.0.bb
@@ -0,0 +1,37 @@
+SUMMARY = "This provides a PEP 517 build backend for packages using Flit."
+DESCRIPTION = "This provides a PEP 517 build backend for packages using \
+Flit. The only public interface is the API specified by PEP 517, at \
+flit_core.buildapi."
+HOMEPAGE = "https://github.com/pypa/flit"
+BUGTRACKER = "https://github.com/pypa/flit/issues"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=41eb78fa8a872983a882c694a8305f08"
+
+SRC_URI[sha256sum] = "d75edf5eb324da20d53570a6a6f87f51e606eee8384925cd66a90611140844c7"
+
+inherit pypi python_flit_core
+
+# Need to install by hand as there's a dependency loop
+DEPENDS:remove:class-native = " python3-build-native python3-installer-native"
+DEPENDS:append:class-native = " unzip-native"
+
+# We need the full flit tarball
+PYPI_PACKAGE = "flit"
+PEP517_SOURCE_PATH = "${S}/flit_core"
+
+do_compile:class-native () {
+ python_flit_core_do_manual_build
+}
+
+do_install:class-native () {
+ python_pep517_do_bootstrap_install
+}
+
+PACKAGES =+ "${PN}-tests"
+
+FILES:${PN}-tests += "\
+ ${PYTHON_SITEPACKAGES_DIR}/flit_core/tests/* \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-git_3.1.27.bb b/meta/recipes-devtools/python/python3-git_3.1.27.bb
deleted file mode 100644
index fb1bae8f8e..0000000000
--- a/meta/recipes-devtools/python/python3-git_3.1.27.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Python library used to interact with Git repositories"
-DESCRIPTION = "GitPython provides object model read and write access to \
-a git repository. Access repository information conveniently, alter the \
-index directly, handle remotes, or go down to low-level object database \
-access with big-files support."
-HOMEPAGE = "http://github.com/gitpython-developers/GitPython"
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=8b8d26c37c1d5a04f9b0186edbebc183"
-
-PYPI_PACKAGE = "GitPython"
-
-inherit pypi python_setuptools_build_meta
-
-SRC_URI[sha256sum] = "1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"
-
-DEPENDS += " ${PYTHON_PN}-gitdb"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-gitdb \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-logging \
- ${PYTHON_PN}-math \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-stringold \
- ${PYTHON_PN}-unittest \
- ${PYTHON_PN}-unixadmin \
- git \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-git_3.1.43.bb b/meta/recipes-devtools/python/python3-git_3.1.43.bb
new file mode 100644
index 0000000000..45c988117b
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-git_3.1.43.bb
@@ -0,0 +1,32 @@
+SUMMARY = "Python library used to interact with Git repositories"
+DESCRIPTION = "GitPython provides object model read and write access to \
+a git repository. Access repository information conveniently, alter the \
+index directly, handle remotes, or go down to low-level object database \
+access with big-files support."
+HOMEPAGE = "http://github.com/gitpython-developers/GitPython"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5279a7ab369ba336989dcf2a107e5c8e"
+
+PYPI_PACKAGE = "GitPython"
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI[sha256sum] = "35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"
+
+DEPENDS += " python3-gitdb"
+
+RDEPENDS:${PN} += " \
+ python3-datetime \
+ python3-gitdb \
+ python3-io \
+ python3-logging \
+ python3-math \
+ python3-netclient \
+ python3-stringold \
+ python3-unittest \
+ python3-unixadmin \
+ git \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-gitdb_4.0.11.bb b/meta/recipes-devtools/python/python3-gitdb_4.0.11.bb
new file mode 100644
index 0000000000..25e1a2df7e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-gitdb_4.0.11.bb
@@ -0,0 +1,22 @@
+SUMMARY = "A pure-Python git object database"
+HOMEPAGE = "http://github.com/gitpython-developers/gitdb"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=59e5ecb13339a936eedf83282eaf4528"
+
+DEPENDS = "python3-smmap"
+
+inherit pypi setuptools3
+
+PYPI_PACKAGE = "gitdb"
+
+SRC_URI[sha256sum] = "bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"
+
+RDEPENDS:${PN} += "python3-compression \
+ python3-crypt \
+ python3-io \
+ python3-mmap \
+ python3-shell \
+ python3-smmap \
+"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-gitdb_4.0.9.bb b/meta/recipes-devtools/python/python3-gitdb_4.0.9.bb
deleted file mode 100644
index 2dcd9c8aff..0000000000
--- a/meta/recipes-devtools/python/python3-gitdb_4.0.9.bb
+++ /dev/null
@@ -1,3 +0,0 @@
-inherit setuptools3
-require python-gitdb.inc
-
diff --git a/meta/recipes-devtools/python/python3-hatch-fancy-pypi-readme_24.1.0.bb b/meta/recipes-devtools/python/python3-hatch-fancy-pypi-readme_24.1.0.bb
new file mode 100644
index 0000000000..1e5c67f832
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-hatch-fancy-pypi-readme_24.1.0.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Hatch plugin for fancy PyPI readmes "
+HOMEPAGE = "https://pypi.org/project/hatch-fancy-pypi-readme/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=ba5633c60bd3e243091013aa83b4d807"
+
+inherit pypi python_hatchling
+
+PYPI_PACKAGE = "hatch_fancy_pypi_readme"
+
+SRC_URI[sha256sum] = "44dd239f1a779b9dcf8ebc9401a611fd7f7e3e14578dcf22c265dfaf7c1514b8"
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_REGEX = "/hatch-fancy-pypi-readme/(?P<pver>(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-hatch-vcs_0.4.0.bb b/meta/recipes-devtools/python/python3-hatch-vcs_0.4.0.bb
new file mode 100644
index 0000000000..2e49aba469
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-hatch-vcs_0.4.0.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Hatch plugin for versioning with your preferred VCS"
+HOMEPAGE = "https://pypi.org/project/hatch-vcs/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=26501cfd0bbddf830ee820e95551fa3d"
+
+inherit pypi python_hatchling
+
+PYPI_PACKAGE = "hatch_vcs"
+
+SRC_URI[sha256sum] = "093810748fe01db0d451fabcf2c1ac2688caefd232d4ede967090b1c1b07d9f7"
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_REGEX = "/hatch-vcs/(?P<pver>(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-hatchling_1.22.4.bb b/meta/recipes-devtools/python/python3-hatchling_1.22.4.bb
new file mode 100644
index 0000000000..d2f32d8b1a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-hatchling_1.22.4.bb
@@ -0,0 +1,17 @@
+SUMMARY = "The extensible, standards compliant build backend used by Hatch"
+HOMEPAGE = "https://hatch.pypa.io/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=cbe2fd33fc9297692812fc94b7d27fd9"
+
+inherit pypi python_hatchling
+
+DEPENDS += "python3-pluggy-native python3-pathspec-native python3-packaging-native python3-editables-native python3-trove-classifiers-native"
+DEPENDS:remove:class-native = "python3-hatchling-native"
+
+SRC_URI[sha256sum] = "8a2dcec96d7fb848382ef5848e5ac43fdae641f35a08a3fab5116bd495f3416e"
+
+do_compile:prepend() {
+ export PYTHONPATH=src
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-hypothesis/run-ptest b/meta/recipes-devtools/python/python3-hypothesis/run-ptest
index 8d44d7c49a..54f6e7930f 100644
--- a/meta/recipes-devtools/python/python3-hypothesis/run-ptest
+++ b/meta/recipes-devtools/python/python3-hypothesis/run-ptest
@@ -7,4 +7,4 @@
#
# Instead we run two test suites imported from examples/
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-hypothesis_6.46.4.bb b/meta/recipes-devtools/python/python3-hypothesis_6.46.4.bb
deleted file mode 100644
index 86d6793670..0000000000
--- a/meta/recipes-devtools/python/python3-hypothesis_6.46.4.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "A library for property-based testing"
-HOMEPAGE = "https://github.com/HypothesisWorks/hypothesis/tree/master/hypothesis-python"
-LICENSE = "MPL-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=4ee62c16ebd0f4f99d906f36b7de8c3c"
-
-PYPI_PACKAGE = "hypothesis"
-
-inherit pypi setuptools3 ptest
-
-SRC_URI += " \
- file://run-ptest \
- file://test_binary_search.py \
- file://test_rle.py \
- "
-
-SRC_URI[sha256sum] = "f5c24a3d3f8a0d8de2dd33079bf1580a3cfbfe74763187d47570fc9beea84c9b"
-
-RDEPENDS:${PN} += " \
- python3-attrs \
- python3-compression \
- python3-core \
- python3-json \
- python3-sortedcontainers \
- python3-statistics \
- python3-unittest \
- "
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
- "
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/examples
- install -m 0755 ${WORKDIR}/test_binary_search.py ${D}${PTEST_PATH}/examples/
- install -m 0755 ${WORKDIR}/test_rle.py ${D}${PTEST_PATH}/examples/
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-hypothesis_6.99.4.bb b/meta/recipes-devtools/python/python3-hypothesis_6.99.4.bb
new file mode 100644
index 0000000000..64b8cf2c31
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-hypothesis_6.99.4.bb
@@ -0,0 +1,39 @@
+SUMMARY = "A library for property-based testing"
+HOMEPAGE = "https://github.com/HypothesisWorks/hypothesis/tree/master/hypothesis-python"
+LICENSE = "MPL-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=4ee62c16ebd0f4f99d906f36b7de8c3c"
+
+PYPI_PACKAGE = "hypothesis"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += " \
+ file://run-ptest \
+ file://test_binary_search.py \
+ file://test_rle.py \
+ "
+
+SRC_URI[sha256sum] = "edc8f984dba5d1b69a6a4564246b7850fa7ec351d2b27c9e7a43c91deab8d45c"
+
+RDEPENDS:${PN} += " \
+ python3-attrs \
+ python3-compression \
+ python3-core \
+ python3-json \
+ python3-pytest \
+ python3-sortedcontainers \
+ python3-statistics \
+ python3-unittest \
+ "
+
+RDEPENDS:${PN}-ptest += " \
+ python3-unittest-automake-output \
+ "
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/examples
+ install -m 0755 ${WORKDIR}/test_binary_search.py ${D}${PTEST_PATH}/examples/
+ install -m 0755 ${WORKDIR}/test_rle.py ${D}${PTEST_PATH}/examples/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-idna_3.3.bb b/meta/recipes-devtools/python/python3-idna_3.3.bb
deleted file mode 100644
index ee92f44fd5..0000000000
--- a/meta/recipes-devtools/python/python3-idna_3.3.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Internationalised Domain Names in Applications"
-HOMEPAGE = "https://github.com/kjd/idna"
-LICENSE = "BSD-3-Clause & Python-2.0 & Unicode-TOU"
-LIC_FILES_CHKSUM = "file://LICENSE.md;md5=239668a7c6066d9e0c5382e9c8c6c0e1"
-
-SRC_URI[sha256sum] = "9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
-
-inherit pypi setuptools3
-
-# Remove bundled egg-info
-do_compile:prepend() {
- rm -rf ${S}/idna.egg-info
-}
-
-RDEPENDS:${PN}:class-target = "\
- ${PYTHON_PN}-codecs \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-idna_3.6.bb b/meta/recipes-devtools/python/python3-idna_3.6.bb
new file mode 100644
index 0000000000..47c080cdf8
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-idna_3.6.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Internationalised Domain Names in Applications"
+HOMEPAGE = "https://github.com/kjd/idna"
+LICENSE = "BSD-3-Clause & Python-2.0 & Unicode-TOU"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=dbec47b98e1469f6a104c82ff9698cee"
+
+SRC_URI[sha256sum] = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"
+
+inherit pypi python_flit_core
+
+RDEPENDS:${PN}:class-target = "\
+ python3-codecs \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-imagesize_1.3.0.bb b/meta/recipes-devtools/python/python3-imagesize_1.3.0.bb
deleted file mode 100644
index 7d8eb80cf3..0000000000
--- a/meta/recipes-devtools/python/python3-imagesize_1.3.0.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-DESCRIPTION = "Parses image files’ header and return image size."
-HOMEPAGE = "https://github.com/shibukawa/imagesize_py"
-SECTION = "devel/python"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=0c128f0f7e8a02e1b83884c0b5a41cda"
-
-SRC_URI[sha256sum] = "cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"
-
-inherit setuptools3 pypi
-
-BBCLASSEXTEND = "native nativesdk"
-
-RDEPENDS:${PN} = "python3-xml"
diff --git a/meta/recipes-devtools/python/python3-imagesize_1.4.1.bb b/meta/recipes-devtools/python/python3-imagesize_1.4.1.bb
new file mode 100644
index 0000000000..4edac0a533
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-imagesize_1.4.1.bb
@@ -0,0 +1,13 @@
+SUMMARY = "Parses image files’ header and return image size."
+HOMEPAGE = "https://github.com/shibukawa/imagesize_py"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=0c128f0f7e8a02e1b83884c0b5a41cda"
+
+SRC_URI[sha256sum] = "69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"
+
+inherit setuptools3 pypi
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} = "python3-xml"
diff --git a/meta/recipes-devtools/python/python3-importlib-metadata_4.11.3.bb b/meta/recipes-devtools/python/python3-importlib-metadata_4.11.3.bb
deleted file mode 100644
index d8e56ccb25..0000000000
--- a/meta/recipes-devtools/python/python3-importlib-metadata_4.11.3.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-DESCRIPTION = "Read metadata from Python packages"
-HOMEPAGE = "https://pypi.org/project/importlib-metadata/"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=e88ae122f3925d8bde8319060f2ddb8e"
-
-inherit pypi python_setuptools_build_meta
-
-PYPI_PACKAGE = "importlib_metadata"
-UPSTREAM_CHECK_REGEX = "/importlib-metadata/(?P<pver>(\d+[\.\-_]*)+)/"
-
-SRC_URI[sha256sum] = "ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"
-
-S = "${WORKDIR}/importlib_metadata-${PV}"
-
-DEPENDS += "${PYTHON_PN}-setuptools-scm-native ${PYTHON_PN}-toml-native"
-RDEPENDS:${PN} += "${PYTHON_PN}-zipp ${PYTHON_PN}-pathlib2"
-RDEPENDS:${PN}:append:class-target = " python3-misc"
-RDEPENDS:${PN}:append:class-nativesdk = " python3-misc"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-importlib-metadata_7.1.0.bb b/meta/recipes-devtools/python/python3-importlib-metadata_7.1.0.bb
new file mode 100644
index 0000000000..fdb37cecef
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-importlib-metadata_7.1.0.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Read metadata from Python packages"
+HOMEPAGE = "https://pypi.org/project/importlib-metadata/"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+inherit pypi python_setuptools_build_meta
+
+PYPI_PACKAGE = "importlib_metadata"
+UPSTREAM_CHECK_REGEX = "/importlib-metadata/(?P<pver>(\d+[\.\-_]*)+)/"
+
+SRC_URI[sha256sum] = "b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"
+
+S = "${WORKDIR}/importlib_metadata-${PV}"
+
+DEPENDS += "python3-setuptools-scm-native python3-toml-native"
+RDEPENDS:${PN} += "python3-zipp python3-pathlib2"
+RDEPENDS:${PN}:append:class-target = " python3-misc"
+RDEPENDS:${PN}:append:class-nativesdk = " python3-misc"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-iniconfig_1.1.1.bb b/meta/recipes-devtools/python/python3-iniconfig_1.1.1.bb
deleted file mode 100644
index 4643437b80..0000000000
--- a/meta/recipes-devtools/python/python3-iniconfig_1.1.1.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-SUMMARY = "A small and simple INI-file parser module"
-HOMEPAGE = "https://pypi.org/project/iniconfig/"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=a6bb0320b04a0a503f12f69fea479de9"
-
-SRC_URI[md5sum] = "0b7f3be87481211c183eae095bcea6f1"
-SRC_URI[sha256sum] = "bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
-
-DEPENDS += "python3-setuptools-scm-native"
-
-inherit pypi python_setuptools_build_meta
diff --git a/meta/recipes-devtools/python/python3-iniconfig_2.0.0.bb b/meta/recipes-devtools/python/python3-iniconfig_2.0.0.bb
new file mode 100644
index 0000000000..2abeec62f4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-iniconfig_2.0.0.bb
@@ -0,0 +1,13 @@
+SUMMARY = "A small and simple INI-file parser module"
+HOMEPAGE = "https://pypi.org/project/iniconfig/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=a6bb0320b04a0a503f12f69fea479de9"
+
+SRC_URI[sha256sum] = "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"
+
+DEPENDS += "python3-hatch-vcs-native"
+
+inherit pypi python_hatchling
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-installer/interpreter.patch b/meta/recipes-devtools/python/python3-installer/interpreter.patch
index ef10ef1b45..7906769b90 100644
--- a/meta/recipes-devtools/python/python3-installer/interpreter.patch
+++ b/meta/recipes-devtools/python/python3-installer/interpreter.patch
@@ -1,3 +1,8 @@
+From 74fe171fa4a25c120607e9f8450cbdfee675c959 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Mon, 14 Mar 2022 14:39:22 +0000
+Subject: [PATCH] python3-installer: add installer module
+
Let us override the hashbang directly (possibly upstreamable), and don't
play games with hashbangs: for now assume that even hashbangs with spaces
are simple (assume the spaces are only used to separate arguments) and
@@ -6,13 +11,18 @@ we don't have long hashbangs.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ src/installer/__main__.py | 9 ++++++++-
+ src/installer/scripts.py | 15 +--------------
+ 2 files changed, 9 insertions(+), 15 deletions(-)
+
diff --git a/src/installer/__main__.py b/src/installer/__main__.py
-index 3357ec5..d2fd8d2 100644
+index 51014b9..38de286 100644
--- a/src/installer/__main__.py
+++ b/src/installer/__main__.py
-@@ -23,6 +23,13 @@ def _get_main_parser() -> argparse.ArgumentParser:
+@@ -30,6 +30,13 @@ def _get_main_parser() -> argparse.ArgumentParser:
type=str,
- help="destination directory (prefix to prepend to each file)",
+ help="override prefix to install packages to",
)
+ parser.add_argument(
+ "--interpreter",
@@ -24,10 +34,10 @@ index 3357ec5..d2fd8d2 100644
parser.add_argument(
"--compile-bytecode",
action="append",
-@@ -73,7 +80,7 @@ def _main(cli_args: Sequence[str], program: Optional[str] = None) -> None:
+@@ -86,7 +93,7 @@ def _main(cli_args: Sequence[str], program: Optional[str] = None) -> None:
with WheelFile.open(args.wheel) as source:
destination = SchemeDictionaryDestination(
- scheme_dict=_get_scheme_dict(source.distribution),
+ scheme_dict=_get_scheme_dict(source.distribution, prefix=args.prefix),
- interpreter=sys.executable,
+ interpreter=args.interpreter,
script_kind=get_launcher_kind(),
@@ -56,6 +66,6 @@ index 7e3c8fc..ba6ed5a 100644
- # I don't understand a lick what this is trying to do.
- return b"#!/bin/sh\n'''exec' " + quoted + b' "$0" "$@"\n' + b"' '''"
+ return b"#!" + executable_bytes
-
-
+
+
class InvalidScript(ValueError):
diff --git a/meta/recipes-devtools/python/python3-installer_0.5.1.bb b/meta/recipes-devtools/python/python3-installer_0.5.1.bb
deleted file mode 100644
index f4f9e1bde6..0000000000
--- a/meta/recipes-devtools/python/python3-installer_0.5.1.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Library and tool for installing Python wheels"
-DESCRIPTION = "A low-level library for installing a Python package from a wheel distribution."
-HOMEPAGE = "https://installer.readthedocs.io/"
-BUGTRACKER = "https://github.com/pypa/installer/issues"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5038641aec7a77451e31da828ebfae00"
-
-SRC_URI += "file://interpreter.patch"
-
-SRC_URI[sha256sum] = "f970995ec2bb815e2fdaf7977b26b2091e1e386f0f42eafd5ac811953dc5d445"
-
-inherit pypi python_flit_core
-
-DEPENDS:remove:class-native = "python3-installer-native"
-DEPENDS:append:class-native = " unzip-native"
-
-do_install:class-native () {
- python_pep517_do_bootstrap_install
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-installer_0.7.0.bb b/meta/recipes-devtools/python/python3-installer_0.7.0.bb
new file mode 100644
index 0000000000..9429705b1f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-installer_0.7.0.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Library and tool for installing Python wheels"
+DESCRIPTION = "A low-level library for installing a Python package from a wheel distribution."
+HOMEPAGE = "https://installer.readthedocs.io/"
+BUGTRACKER = "https://github.com/pypa/installer/issues"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5038641aec7a77451e31da828ebfae00"
+
+SRC_URI += "file://interpreter.patch"
+
+SRC_URI[sha256sum] = "a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"
+
+inherit pypi python_flit_core
+
+# Bootstrap the native build
+DEPENDS:remove:class-native = "python3-build-native python3-installer-native"
+
+RDEPENDS:${PN} += " \
+ python3-compile \
+ python3-compression \
+ python3-netclient \
+"
+
+INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
+
+do_compile:class-native () {
+ python_flit_core_do_manual_build
+}
+
+do_install:prepend:class-native() {
+ export PYTHONPATH="${S}/src"
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-iso8601_1.0.2.bb b/meta/recipes-devtools/python/python3-iso8601_1.0.2.bb
deleted file mode 100644
index 93af233d29..0000000000
--- a/meta/recipes-devtools/python/python3-iso8601_1.0.2.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-SUMMARY = "Simple module to parse ISO 8601 dates"
-HOMEPAGE = "http://pyiso8601.readthedocs.org/"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=b05625f2336fa024e8d57e65c6595844"
-
-SRC_URI[sha256sum] = "27f503220e6845d9db954fb212b95b0362d8b7e6c1b2326a87061c3de93594b1"
-
-inherit pypi python_poetry_core
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-numbers \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-iso8601_2.1.0.bb b/meta/recipes-devtools/python/python3-iso8601_2.1.0.bb
new file mode 100644
index 0000000000..d7ab4a5a77
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-iso8601_2.1.0.bb
@@ -0,0 +1,15 @@
+SUMMARY = "Simple module to parse ISO 8601 dates"
+HOMEPAGE = "http://pyiso8601.readthedocs.org/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367"
+
+SRC_URI[sha256sum] = "6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"
+
+inherit pypi python_poetry_core
+
+RDEPENDS:${PN} += "\
+ python3-datetime \
+ python3-numbers \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-isodate_0.6.1.bb b/meta/recipes-devtools/python/python3-isodate_0.6.1.bb
new file mode 100644
index 0000000000..293fb08277
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-isodate_0.6.1.bb
@@ -0,0 +1,16 @@
+SUMMARY = "ISO 8601 date/time parser"
+HOMEPAGE = "https://github.com/gweis/isodate/"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=e910b35b0ef4e1f665b9a75d6afb7709"
+
+SRC_URI[sha256sum] = "48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"
+
+inherit pypi setuptools3
+
+RDEPENDS:${PN} += " \
+ python3-numbers \
+ python3-six \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-jinja2/run-ptest b/meta/recipes-devtools/python/python3-jinja2/run-ptest
index 5cec711696..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-jinja2/run-ptest
+++ b/meta/recipes-devtools/python/python3-jinja2/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-jinja2_3.1.2.bb b/meta/recipes-devtools/python/python3-jinja2_3.1.2.bb
deleted file mode 100644
index 80e0b85670..0000000000
--- a/meta/recipes-devtools/python/python3-jinja2_3.1.2.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-DESCRIPTION = "Python Jinja2: A small but fast and easy to use stand-alone template engine written in pure python."
-HOMEPAGE = "https://pypi.org/project/Jinja2/"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=5dc88300786f1c214c1e9827a5229462"
-
-SRC_URI[sha256sum] = "31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"
-
-PYPI_PACKAGE = "Jinja2"
-
-CVE_PRODUCT = "jinja2 jinja"
-
-CLEANBROKEN = "1"
-
-inherit pypi setuptools3
-inherit ${@bb.utils.filter('DISTRO_FEATURES', 'ptest', d)}
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
- ${PYTHON_PN}-toml \
- ${PYTHON_PN}-unixadmin \
-"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-asyncio \
- ${PYTHON_PN}-crypt \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-markupsafe \
- ${PYTHON_PN}-math \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-numbers\
- ${PYTHON_PN}-pickle \
- ${PYTHON_PN}-pprint \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-threading \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-jinja2_3.1.3.bb b/meta/recipes-devtools/python/python3-jinja2_3.1.3.bb
new file mode 100644
index 0000000000..636fb35811
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-jinja2_3.1.3.bb
@@ -0,0 +1,48 @@
+SUMMARY = "Python Jinja2: A small but fast and easy to use stand-alone template engine written in pure python."
+HOMEPAGE = "https://pypi.org/project/Jinja2/"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=5dc88300786f1c214c1e9827a5229462"
+
+SRC_URI[sha256sum] = "ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"
+
+PYPI_PACKAGE = "Jinja2"
+
+CVE_PRODUCT = "jinja2 jinja"
+
+CLEANBROKEN = "1"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+ python3-toml \
+ python3-unixadmin \
+"
+
+RDEPENDS:${PN} += " \
+ python3-asyncio \
+ python3-crypt \
+ python3-io \
+ python3-json \
+ python3-markupsafe \
+ python3-math \
+ python3-netclient \
+ python3-numbers\
+ python3-pickle \
+ python3-pprint \
+ python3-shell \
+ python3-threading \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-jsonpointer/run-ptest b/meta/recipes-devtools/python/python3-jsonpointer/run-ptest
index 51e609f4ba..7ebd69231f 100644
--- a/meta/recipes-devtools/python/python3-jsonpointer/run-ptest
+++ b/meta/recipes-devtools/python/python3-jsonpointer/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-python3 tests.py
+python3 -mputao.unittest tests.py
diff --git a/meta/recipes-devtools/python/python3-jsonpointer_2.3.bb b/meta/recipes-devtools/python/python3-jsonpointer_2.3.bb
deleted file mode 100644
index 16d5cab4cf..0000000000
--- a/meta/recipes-devtools/python/python3-jsonpointer_2.3.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Resolve JSON Pointers in Python"
-HOMEPAGE = "https://github.com/stefankoegl/python-json-pointer"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=32b15c843b7a329130f4e266a281ebb3"
-
-inherit pypi ptest setuptools3
-
-SRC_URI[sha256sum] = "97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-json \
-"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- cp -f ${S}/tests.py ${D}${PTEST_PATH}/
-}
diff --git a/meta/recipes-devtools/python/python3-jsonpointer_2.4.bb b/meta/recipes-devtools/python/python3-jsonpointer_2.4.bb
new file mode 100644
index 0000000000..062fa5243f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-jsonpointer_2.4.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Resolve JSON Pointers in Python"
+HOMEPAGE = "https://github.com/stefankoegl/python-json-pointer"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=32b15c843b7a329130f4e266a281ebb3"
+
+inherit pypi ptest setuptools3
+
+SRC_URI[sha256sum] = "585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"
+
+RDEPENDS:${PN} += " \
+ python3-json \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-doctest \
+ python3-unittest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ cp -f ${S}/tests.py ${D}${PTEST_PATH}/
+}
diff --git a/meta/recipes-devtools/python/python3-jsonschema-specifications_2023.12.1.bb b/meta/recipes-devtools/python/python3-jsonschema-specifications_2023.12.1.bb
new file mode 100644
index 0000000000..4ee0dd9b12
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-jsonschema-specifications_2023.12.1.bb
@@ -0,0 +1,16 @@
+SUMMARY = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
+DESCRIPTION = "JSON support files from the JSON Schema Specifications (metaschemas, \
+vocabularies, etc.), packaged for runtime access from Python as a referencing-based Schema Registry."
+HOMEPAGE = "https://pypi.org/project/jsonschema-specifications/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=93eb9740964b59e9ba30281255b044e2"
+
+SRC_URI[sha256sum] = "48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"
+
+inherit pypi python_hatchling
+
+PYPI_PACKAGE = "jsonschema_specifications"
+
+DEPENDS += "python3-hatch-vcs-native"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb b/meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb
new file mode 100644
index 0000000000..381148f4bb
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb
@@ -0,0 +1,50 @@
+SUMMARY = "An implementation of JSON Schema validation for Python"
+HOMEPAGE = "https://github.com/python-jsonschema/jsonschema"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 \
+ file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af"
+
+SRC_URI[sha256sum] = "85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"
+
+inherit pypi python_hatchling
+
+PACKAGES =+ "${PN}-tests"
+FILES:${PN}-tests = "${libdir}/${PYTHON_DIR}/site-packages/jsonschema/tests"
+
+DEPENDS += "python3-hatch-fancy-pypi-readme-native python3-hatch-vcs-native "
+
+PACKAGECONFIG ??= "format"
+PACKAGECONFIG[format] = ",,,\
+ python3-idna \
+ python3-jsonpointer \
+ python3-webcolors \
+ python3-rfc3987 \
+ python3-rfc3339-validator \
+"
+PACKAGECONFIG[nongpl] = ",,,\
+ python3-idna \
+ python3-jsonpointer \
+ python3-webcolors \
+ python3-rfc3986-validator \
+ python3-rfc3339-validator \
+"
+
+RDEPENDS:${PN} += " \
+ python3-attrs \
+ python3-core \
+ python3-datetime \
+ python3-importlib-metadata \
+ python3-io \
+ python3-json \
+ python3-jsonschema-specifications \
+ python3-netclient \
+ python3-numbers \
+ python3-pprint \
+ python3-pyrsistent \
+ python3-referencing \
+ python3-zipp \
+"
+
+RDEPENDS:${PN}-tests = "${PN}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-jsonschema_4.5.1.bb b/meta/recipes-devtools/python/python3-jsonschema_4.5.1.bb
deleted file mode 100644
index 9f8719870c..0000000000
--- a/meta/recipes-devtools/python/python3-jsonschema_4.5.1.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "An implementation of JSON Schema validation for Python"
-HOMEPAGE = "https://github.com/Julian/jsonschema"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 \
- file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af"
-
-SRC_URI[sha256sum] = "7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"
-
-inherit pypi python_setuptools_build_meta
-
-DEPENDS += "${PYTHON_PN}-vcversioner-native ${PYTHON_PN}-setuptools-scm-native"
-
-PACKAGECONFIG ??= "format"
-PACKAGECONFIG[format] = ",,,\
- ${PYTHON_PN}-idna \
- ${PYTHON_PN}-jsonpointer \
- ${PYTHON_PN}-webcolors \
- ${PYTHON_PN}-rfc3987 \
- ${PYTHON_PN}-strict-rfc3339 \
-"
-PACKAGECONFIG[nongpl] = ",,,\
- ${PYTHON_PN}-idna \
- ${PYTHON_PN}-jsonpointer \
- ${PYTHON_PN}-webcolors \
- ${PYTHON_PN}-rfc3986-validator \
- ${PYTHON_PN}-rfc3339-validator \
-"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-attrs \
- ${PYTHON_PN}-core \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-importlib-metadata \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-pkgutil \
- ${PYTHON_PN}-pprint \
- ${PYTHON_PN}-pyrsistent \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-six \
- ${PYTHON_PN}-unittest \
- ${PYTHON_PN}-setuptools-scm \
- ${PYTHON_PN}-zipp \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-libarchive-c_4.0.bb b/meta/recipes-devtools/python/python3-libarchive-c_4.0.bb
deleted file mode 100644
index 3c6bc1e1ca..0000000000
--- a/meta/recipes-devtools/python/python3-libarchive-c_4.0.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-SUMMARY = "Python interface to libarchive"
-DESCRIPTION = "A Python interface to libarchive. It uses the standard ctypes module to \
- dynamically load and access the C library."
-HOMEPAGE = "https://github.com/Changaco/python-libarchive-c"
-LICENSE = "CC0-1.0"
-LIC_FILES_CHKSUM = "file://LICENSE.md;md5=bcab380227a83bc147350b40a81e6ffc"
-
-PYPI_PACKAGE = "libarchive-c"
-
-inherit pypi setuptools3
-
-SRC_URI[sha256sum] = "a5b41ade94ba58b198d778e68000f6b7de41da768de7140c984f71d7fa8416e5"
-
-RDEPENDS:${PN} += "\
- libarchive \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-mmap \
- ${PYTHON_PN}-logging \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-libarchive-c_5.1.bb b/meta/recipes-devtools/python/python3-libarchive-c_5.1.bb
new file mode 100644
index 0000000000..4e318e52f4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-libarchive-c_5.1.bb
@@ -0,0 +1,21 @@
+SUMMARY = "Python interface to libarchive"
+DESCRIPTION = "A Python interface to libarchive. It uses the standard ctypes module to \
+ dynamically load and access the C library."
+HOMEPAGE = "https://github.com/Changaco/python-libarchive-c"
+LICENSE = "CC0-1.0"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=bcab380227a83bc147350b40a81e6ffc"
+
+PYPI_PACKAGE = "libarchive-c"
+
+inherit pypi setuptools3
+
+SRC_URI[sha256sum] = "7bcce24ea6c0fa3bc62468476c6d2f6264156db2f04878a372027c10615a2721"
+
+RDEPENDS:${PN} += "\
+ libarchive \
+ python3-ctypes \
+ python3-mmap \
+ python3-logging \
+"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-license-expression/run-ptest b/meta/recipes-devtools/python/python3-license-expression/run-ptest
new file mode 100644
index 0000000000..8d2017d39c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-license-expression/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-license-expression_30.3.0.bb b/meta/recipes-devtools/python/python3-license-expression_30.3.0.bb
new file mode 100644
index 0000000000..f36336b592
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-license-expression_30.3.0.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Utility library to parse, compare, simplify and normalize license expressions"
+HOMEPAGE = "https://github.com/nexB/license-expression"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://apache-2.0.LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
+
+SRC_URI[sha256sum] = "1295406f736b4f395ff069aec1cebfad53c0fcb3cf57df0f5ec58fc7b905aea5"
+
+inherit pypi ptest python_setuptools_build_meta
+
+DEPENDS += "python3-setuptools-scm-native"
+
+RDEPENDS:${PN} += "\
+ python3-booleanpy \
+ python3-core \
+ python3-json \
+ python3-stringold \
+ python3-logging \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ install -d ${D}${PTEST_PATH}/src
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/src/* ${D}${PTEST_PATH}/src/
+ cp -rf ${S}/setup.cfg ${D}${PTEST_PATH}/
+}
diff --git a/meta/recipes-devtools/python/python3-lxml_5.1.0.bb b/meta/recipes-devtools/python/python3-lxml_5.1.0.bb
new file mode 100644
index 0000000000..43719086f0
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-lxml_5.1.0.bb
@@ -0,0 +1,42 @@
+SUMMARY = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+DESCRIPTION = "lxml is a Pythonic, mature binding for the libxml2 and \
+libxslt libraries. It provides safe and convenient access to these \
+libraries using the ElementTree API. It extends the ElementTree API \
+significantly to offer support for XPath, RelaxNG, XML Schema, XSLT, \
+C14N and much more."
+HOMEPAGE = "https://lxml.de/"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause & GPL-2.0-only & MIT & PSF-2.0"
+LIC_FILES_CHKSUM = "file://LICENSES.txt;md5=e4c045ebad958ead4b48008f70838403 \
+ file://doc/licenses/elementtree.txt;md5=eb34d036a6e3d56314ee49a6852ac891 \
+ file://doc/licenses/BSD.txt;md5=700a1fc17f4797d4f2d34970c8ee694b \
+ file://doc/licenses/GPL.txt;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://src/lxml/isoschematron/resources/rng/iso-schematron.rng;beginline=2;endline=7;md5=fc85684a8dd5fa272c086bceb0d99e10 \
+ file://src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl;beginline=2;endline=24;md5=cc86b7b2bbc678e13f58ea403eb9929b \
+ file://src/lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl;beginline=2;endline=7;md5=5b03236d293dc3784205542b409d2f53 \
+ "
+
+DEPENDS += "libxml2 libxslt"
+
+SRC_URI[sha256sum] = "3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"
+
+SRC_URI += "${PYPI_SRC_URI}"
+inherit pkgconfig pypi setuptools3
+
+# {standard input}: Assembler messages:
+# {standard input}:1488805: Error: branch out of range
+DEBUG_OPTIMIZATION:remove:mips = " -Og"
+DEBUG_OPTIMIZATION:append:mips = " -O"
+BUILD_OPTIMIZATION:remove:mips = " -Og"
+BUILD_OPTIMIZATION:append:mips = " -O"
+
+DEBUG_OPTIMIZATION:remove:mipsel = " -Og"
+DEBUG_OPTIMIZATION:append:mipsel = " -O"
+BUILD_OPTIMIZATION:remove:mipsel = " -Og"
+BUILD_OPTIMIZATION:append:mipsel = " -O"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += "libxml2 libxslt python3-compression"
+
+CLEANBROKEN = "1"
diff --git a/meta/recipes-devtools/python/python3-magic_0.4.25.bb b/meta/recipes-devtools/python/python3-magic_0.4.25.bb
deleted file mode 100644
index efc8b6daaa..0000000000
--- a/meta/recipes-devtools/python/python3-magic_0.4.25.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "File type identification using libmagic"
-DESCRIPTION = "This module uses ctypes to access the libmagic file type \
- identification library. It makes use of the local magic database and supports \
- both textual and MIME-type output."
-HOMEPAGE = "http://github.com/ahupp/python-magic"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=61495c152d794e6be5799a9edca149e3"
-
-PYPI_PACKAGE = "python-magic"
-
-inherit pypi setuptools3
-
-SRC_URI[sha256sum] = "21f5f542aa0330f5c8a64442528542f6215c8e18d2466b399b0d9d39356d83fc"
-
-RDEPENDS:${PN} += "file \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-logging \
- ${PYTHON_PN}-shell"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-magic_0.4.27.bb b/meta/recipes-devtools/python/python3-magic_0.4.27.bb
new file mode 100644
index 0000000000..2e561e69d7
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-magic_0.4.27.bb
@@ -0,0 +1,22 @@
+SUMMARY = "File type identification using libmagic"
+DESCRIPTION = "This module uses ctypes to access the libmagic file type \
+ identification library. It makes use of the local magic database and supports \
+ both textual and MIME-type output."
+HOMEPAGE = "http://github.com/ahupp/python-magic"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=61495c152d794e6be5799a9edca149e3"
+
+PYPI_PACKAGE = "python-magic"
+
+inherit pypi setuptools3
+
+SRC_URI[sha256sum] = "c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"
+
+RDEPENDS:${PN} += "file \
+ python3-ctypes \
+ python3-io \
+ python3-logging \
+ python3-shell"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-mako_1.2.0.bb b/meta/recipes-devtools/python/python3-mako_1.2.0.bb
deleted file mode 100644
index 11e5f326bd..0000000000
--- a/meta/recipes-devtools/python/python3-mako_1.2.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "Templating library for Python"
-HOMEPAGE = "http://www.makotemplates.org/"
-SECTION = "devel/python"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=b32291f107a8f1ea94c4a41e00a6a18d"
-
-PYPI_PACKAGE = "Mako"
-
-inherit pypi python_setuptools_build_meta
-
-SRC_URI[sha256sum] = "9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39"
-
-RDEPENDS:${PN} = "${PYTHON_PN}-html \
- ${PYTHON_PN}-markupsafe \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-pygments \
- ${PYTHON_PN}-threading \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-mako_1.3.2.bb b/meta/recipes-devtools/python/python3-mako_1.3.2.bb
new file mode 100644
index 0000000000..5b7df9192f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-mako_1.3.2.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Templating library for Python"
+HOMEPAGE = "http://www.makotemplates.org/"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=d0995d6f7ba3f186a03118f244e88f57"
+
+PYPI_PACKAGE = "Mako"
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI[sha256sum] = "2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"
+
+RDEPENDS:${PN} = "python3-html \
+ python3-markupsafe \
+ python3-netclient \
+ python3-pygments \
+ python3-threading \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-markdown_3.3.7.bb b/meta/recipes-devtools/python/python3-markdown_3.3.7.bb
deleted file mode 100644
index c456cecc78..0000000000
--- a/meta/recipes-devtools/python/python3-markdown_3.3.7.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-SUMMARY = "A Python implementation of John Gruber's Markdown."
-HOMEPAGE = "https://python-markdown.github.io/"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.md;md5=745aaad0c69c60039e638bff9ffc59ed"
-
-inherit pypi python_setuptools_build_meta
-
-PYPI_PACKAGE = "Markdown"
-SRC_URI[sha256sum] = "cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"
-
-BBCLASSEXTEND = "native"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-logging ${PYTHON_PN}-setuptools"
diff --git a/meta/recipes-devtools/python/python3-markdown_3.6.bb b/meta/recipes-devtools/python/python3-markdown_3.6.bb
new file mode 100644
index 0000000000..7c64837395
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-markdown_3.6.bb
@@ -0,0 +1,13 @@
+SUMMARY = "A Python implementation of John Gruber's Markdown."
+HOMEPAGE = "https://python-markdown.github.io/"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=ec58cdf7cfed06a21f7a9362627a5480"
+
+inherit pypi python_setuptools_build_meta
+
+PYPI_PACKAGE = "Markdown"
+SRC_URI[sha256sum] = "ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += "python3-logging python3-setuptools"
diff --git a/meta/recipes-devtools/python/python3-markupsafe/run-ptest b/meta/recipes-devtools/python/python3-markupsafe/run-ptest
index 5cec711696..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-markupsafe/run-ptest
+++ b/meta/recipes-devtools/python/python3-markupsafe/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-markupsafe_2.1.1.bb b/meta/recipes-devtools/python/python3-markupsafe_2.1.1.bb
deleted file mode 100644
index 0544dd1b83..0000000000
--- a/meta/recipes-devtools/python/python3-markupsafe_2.1.1.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-DESCRIPTION = "Implements a XML/HTML/XHTML Markup safe string for Python"
-HOMEPAGE = "http://github.com/mitsuhiko/markupsafe"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=ffeffa59c90c9c4a033c7574f8f3fb75"
-
-SRC_URI[sha256sum] = "7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"
-
-PYPI_PACKAGE = "MarkupSafe"
-inherit pypi setuptools3
-inherit ${@bb.utils.filter('DISTRO_FEATURES', 'ptest', d)}
-
-RDEPENDS:${PN} += "${PYTHON_PN}-stringold"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -f ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
diff --git a/meta/recipes-devtools/python/python3-markupsafe_2.1.5.bb b/meta/recipes-devtools/python/python3-markupsafe_2.1.5.bb
new file mode 100644
index 0000000000..821332fe7c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-markupsafe_2.1.5.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Implements a XML/HTML/XHTML Markup safe string for Python"
+HOMEPAGE = "http://github.com/mitsuhiko/markupsafe"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=ffeffa59c90c9c4a033c7574f8f3fb75"
+
+SRC_URI[sha256sum] = "d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"
+
+PYPI_PACKAGE = "MarkupSafe"
+inherit pypi python_setuptools_build_meta ptest
+
+RDEPENDS:${PN} += "python3-stringold"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -f ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
diff --git a/meta/recipes-devtools/python/python3-maturin-crates.inc b/meta/recipes-devtools/python/python3-maturin-crates.inc
new file mode 100644
index 0000000000..3cb614d52a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-maturin-crates.inc
@@ -0,0 +1,618 @@
+# Autogenerated with 'bitbake -c update_crates python3-maturin'
+
+# from Cargo.lock
+SRC_URI += " \
+ crate://crates.io/adler/1.0.2 \
+ crate://crates.io/ahash/0.8.6 \
+ crate://crates.io/aho-corasick/1.1.2 \
+ crate://crates.io/allocator-api2/0.2.16 \
+ crate://crates.io/anstream/0.3.2 \
+ crate://crates.io/anstream/0.6.4 \
+ crate://crates.io/anstyle/1.0.2 \
+ crate://crates.io/anstyle-parse/0.2.1 \
+ crate://crates.io/anstyle-query/1.0.0 \
+ crate://crates.io/anstyle-wincon/1.0.2 \
+ crate://crates.io/anstyle-wincon/3.0.1 \
+ crate://crates.io/anyhow/1.0.75 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/base64/0.13.1 \
+ crate://crates.io/base64/0.21.5 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/bitflags/2.4.1 \
+ crate://crates.io/block-buffer/0.10.4 \
+ crate://crates.io/bstr/1.8.0 \
+ crate://crates.io/byteorder/1.5.0 \
+ crate://crates.io/bytes/1.5.0 \
+ crate://crates.io/bytesize/1.3.0 \
+ crate://crates.io/bzip2/0.4.4 \
+ crate://crates.io/bzip2-sys/0.1.11+1.0.8 \
+ crate://crates.io/cab/0.4.1 \
+ crate://crates.io/camino/1.1.6 \
+ crate://crates.io/cargo-config2/0.1.16 \
+ crate://crates.io/cargo-options/0.7.2 \
+ crate://crates.io/cargo-platform/0.1.5 \
+ crate://crates.io/cargo-xwin/0.16.2 \
+ crate://crates.io/cargo-zigbuild/0.18.0 \
+ crate://crates.io/cargo_metadata/0.18.1 \
+ crate://crates.io/cbindgen/0.26.0 \
+ crate://crates.io/cc/1.0.83 \
+ crate://crates.io/cfb/0.9.0 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/charset/0.1.3 \
+ crate://crates.io/chumsky/0.9.3 \
+ crate://crates.io/clap/4.3.24 \
+ crate://crates.io/clap_builder/4.3.24 \
+ crate://crates.io/clap_complete/4.3.2 \
+ crate://crates.io/clap_complete_command/0.5.1 \
+ crate://crates.io/clap_complete_fig/4.3.1 \
+ crate://crates.io/clap_complete_nushell/0.1.11 \
+ crate://crates.io/clap_derive/4.3.12 \
+ crate://crates.io/clap_lex/0.5.0 \
+ crate://crates.io/cli-table/0.4.7 \
+ crate://crates.io/colorchoice/1.0.0 \
+ crate://crates.io/configparser/3.0.3 \
+ crate://crates.io/console/0.15.7 \
+ crate://crates.io/content_inspector/0.2.4 \
+ crate://crates.io/core-foundation/0.9.3 \
+ crate://crates.io/core-foundation-sys/0.8.4 \
+ crate://crates.io/cpufeatures/0.2.11 \
+ crate://crates.io/crc32fast/1.3.2 \
+ crate://crates.io/crossbeam-channel/0.5.8 \
+ crate://crates.io/crossbeam-deque/0.8.3 \
+ crate://crates.io/crossbeam-epoch/0.9.15 \
+ crate://crates.io/crossbeam-utils/0.8.16 \
+ crate://crates.io/crypto-common/0.1.6 \
+ crate://crates.io/data-encoding/2.5.0 \
+ crate://crates.io/deranged/0.3.9 \
+ crate://crates.io/dialoguer/0.11.0 \
+ crate://crates.io/diff/0.1.13 \
+ crate://crates.io/digest/0.10.7 \
+ crate://crates.io/dirs/5.0.1 \
+ crate://crates.io/dirs-sys/0.4.1 \
+ crate://crates.io/dissimilar/1.0.7 \
+ crate://crates.io/dunce/1.0.4 \
+ crate://crates.io/either/1.9.0 \
+ crate://crates.io/encode_unicode/0.3.6 \
+ crate://crates.io/encoding_rs/0.8.33 \
+ crate://crates.io/equivalent/1.0.1 \
+ crate://crates.io/errno/0.3.8 \
+ crate://crates.io/expect-test/1.4.1 \
+ crate://crates.io/fastrand/2.0.1 \
+ crate://crates.io/fat-macho/0.4.7 \
+ crate://crates.io/filetime/0.2.22 \
+ crate://crates.io/flate2/1.0.28 \
+ crate://crates.io/fnv/1.0.7 \
+ crate://crates.io/foreign-types/0.3.2 \
+ crate://crates.io/foreign-types-shared/0.1.1 \
+ crate://crates.io/form_urlencoded/1.2.1 \
+ crate://crates.io/fs-err/2.11.0 \
+ crate://crates.io/generic-array/0.14.7 \
+ crate://crates.io/getrandom/0.2.11 \
+ crate://crates.io/glob/0.3.1 \
+ crate://crates.io/globset/0.4.14 \
+ crate://crates.io/goblin/0.7.1 \
+ crate://crates.io/hashbrown/0.12.3 \
+ crate://crates.io/hashbrown/0.14.3 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/hermit-abi/0.3.3 \
+ crate://crates.io/home/0.5.5 \
+ crate://crates.io/humantime/2.1.0 \
+ crate://crates.io/humantime-serde/1.1.1 \
+ crate://crates.io/idna/0.5.0 \
+ crate://crates.io/ignore/0.4.20 \
+ crate://crates.io/indexmap/1.9.3 \
+ crate://crates.io/indexmap/2.1.0 \
+ crate://crates.io/indicatif/0.17.7 \
+ crate://crates.io/indoc/2.0.4 \
+ crate://crates.io/instant/0.1.12 \
+ crate://crates.io/io-lifetimes/1.0.11 \
+ crate://crates.io/is-terminal/0.4.9 \
+ crate://crates.io/itertools/0.11.0 \
+ crate://crates.io/itertools/0.12.0 \
+ crate://crates.io/itoa/1.0.9 \
+ crate://crates.io/keyring/2.0.5 \
+ crate://crates.io/lazy_static/1.4.0 \
+ crate://crates.io/lddtree/0.3.3 \
+ crate://crates.io/libc/0.2.150 \
+ crate://crates.io/libredox/0.0.1 \
+ crate://crates.io/linux-keyutils/0.2.3 \
+ crate://crates.io/linux-raw-sys/0.3.8 \
+ crate://crates.io/linux-raw-sys/0.4.11 \
+ crate://crates.io/lock_api/0.4.11 \
+ crate://crates.io/log/0.4.20 \
+ crate://crates.io/lzxd/0.1.4 \
+ crate://crates.io/mailparse/0.14.0 \
+ crate://crates.io/matchers/0.1.0 \
+ crate://crates.io/memchr/2.6.4 \
+ crate://crates.io/memoffset/0.9.0 \
+ crate://crates.io/mime/0.3.17 \
+ crate://crates.io/mime_guess/2.0.4 \
+ crate://crates.io/minijinja/1.0.10 \
+ crate://crates.io/minimal-lexical/0.2.1 \
+ crate://crates.io/miniz_oxide/0.7.1 \
+ crate://crates.io/msi/0.7.0 \
+ crate://crates.io/multipart/0.18.0 \
+ crate://crates.io/native-tls/0.2.11 \
+ crate://crates.io/nom/7.1.3 \
+ crate://crates.io/normalize-line-endings/0.3.0 \
+ crate://crates.io/normpath/1.1.1 \
+ crate://crates.io/nu-ansi-term/0.46.0 \
+ crate://crates.io/number_prefix/0.4.0 \
+ crate://crates.io/once_cell/1.18.0 \
+ crate://crates.io/openssl/0.10.60 \
+ crate://crates.io/openssl-macros/0.1.1 \
+ crate://crates.io/openssl-probe/0.1.5 \
+ crate://crates.io/openssl-sys/0.9.96 \
+ crate://crates.io/option-ext/0.2.0 \
+ crate://crates.io/os_pipe/1.1.4 \
+ crate://crates.io/overload/0.1.1 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.9 \
+ crate://crates.io/paste/1.0.14 \
+ crate://crates.io/path-slash/0.2.1 \
+ crate://crates.io/pep440_rs/0.3.12 \
+ crate://crates.io/pep508_rs/0.2.3 \
+ crate://crates.io/percent-encoding/2.3.1 \
+ crate://crates.io/pin-project-lite/0.2.13 \
+ crate://crates.io/pkg-config/0.3.27 \
+ crate://crates.io/plain/0.2.3 \
+ crate://crates.io/platform-info/2.0.2 \
+ crate://crates.io/portable-atomic/1.5.1 \
+ crate://crates.io/powerfmt/0.2.0 \
+ crate://crates.io/ppv-lite86/0.2.17 \
+ crate://crates.io/pretty_assertions/1.4.0 \
+ crate://crates.io/proc-macro2/1.0.70 \
+ crate://crates.io/psm/0.1.21 \
+ crate://crates.io/pyproject-toml/0.8.1 \
+ crate://crates.io/python-pkginfo/0.6.0 \
+ crate://crates.io/quote/1.0.33 \
+ crate://crates.io/quoted_printable/0.4.8 \
+ crate://crates.io/rand/0.8.5 \
+ crate://crates.io/rand_chacha/0.3.1 \
+ crate://crates.io/rand_core/0.6.4 \
+ crate://crates.io/rayon/1.8.0 \
+ crate://crates.io/rayon-core/1.12.0 \
+ crate://crates.io/redox_syscall/0.3.5 \
+ crate://crates.io/redox_syscall/0.4.1 \
+ crate://crates.io/redox_users/0.4.4 \
+ crate://crates.io/regex/1.10.2 \
+ crate://crates.io/regex-automata/0.1.10 \
+ crate://crates.io/regex-automata/0.4.3 \
+ crate://crates.io/regex-syntax/0.6.29 \
+ crate://crates.io/regex-syntax/0.8.2 \
+ crate://crates.io/rfc2047-decoder/0.2.2 \
+ crate://crates.io/ring/0.17.6 \
+ crate://crates.io/rustc_version/0.4.0 \
+ crate://crates.io/rustix/0.37.27 \
+ crate://crates.io/rustix/0.38.21 \
+ crate://crates.io/rustls/0.21.9 \
+ crate://crates.io/rustls-pemfile/2.0.0 \
+ crate://crates.io/rustls-pki-types/1.0.0 \
+ crate://crates.io/rustls-webpki/0.101.7 \
+ crate://crates.io/rustversion/1.0.14 \
+ crate://crates.io/ryu/1.0.15 \
+ crate://crates.io/same-file/1.0.6 \
+ crate://crates.io/schannel/0.1.22 \
+ crate://crates.io/scopeguard/1.2.0 \
+ crate://crates.io/scroll/0.11.0 \
+ crate://crates.io/scroll_derive/0.11.1 \
+ crate://crates.io/sct/0.7.1 \
+ crate://crates.io/security-framework/2.9.2 \
+ crate://crates.io/security-framework-sys/2.9.1 \
+ crate://crates.io/semver/1.0.20 \
+ crate://crates.io/serde/1.0.193 \
+ crate://crates.io/serde_derive/1.0.193 \
+ crate://crates.io/serde_json/1.0.108 \
+ crate://crates.io/serde_spanned/0.6.4 \
+ crate://crates.io/sha2/0.10.8 \
+ crate://crates.io/sharded-slab/0.1.7 \
+ crate://crates.io/shell-words/1.1.0 \
+ crate://crates.io/shlex/1.2.0 \
+ crate://crates.io/similar/2.3.0 \
+ crate://crates.io/smallvec/1.11.2 \
+ crate://crates.io/smawk/0.3.2 \
+ crate://crates.io/snapbox/0.4.14 \
+ crate://crates.io/snapbox-macros/0.3.6 \
+ crate://crates.io/socks/0.3.4 \
+ crate://crates.io/spin/0.9.8 \
+ crate://crates.io/stacker/0.1.15 \
+ crate://crates.io/static_assertions/1.1.0 \
+ crate://crates.io/strsim/0.10.0 \
+ crate://crates.io/syn/1.0.109 \
+ crate://crates.io/syn/2.0.39 \
+ crate://crates.io/tar/0.4.40 \
+ crate://crates.io/target-lexicon/0.12.12 \
+ crate://crates.io/tempfile/3.8.1 \
+ crate://crates.io/termcolor/1.4.0 \
+ crate://crates.io/terminal_size/0.2.6 \
+ crate://crates.io/textwrap/0.16.0 \
+ crate://crates.io/thiserror/1.0.50 \
+ crate://crates.io/thiserror-impl/1.0.50 \
+ crate://crates.io/thread_local/1.1.7 \
+ crate://crates.io/time/0.3.30 \
+ crate://crates.io/time-core/0.1.2 \
+ crate://crates.io/time-macros/0.2.15 \
+ crate://crates.io/tinyvec/1.6.0 \
+ crate://crates.io/tinyvec_macros/0.1.1 \
+ crate://crates.io/toml/0.5.11 \
+ crate://crates.io/toml/0.8.8 \
+ crate://crates.io/toml_datetime/0.6.5 \
+ crate://crates.io/toml_edit/0.20.7 \
+ crate://crates.io/toml_edit/0.21.0 \
+ crate://crates.io/tracing/0.1.40 \
+ crate://crates.io/tracing-attributes/0.1.27 \
+ crate://crates.io/tracing-core/0.1.32 \
+ crate://crates.io/tracing-log/0.2.0 \
+ crate://crates.io/tracing-serde/0.1.3 \
+ crate://crates.io/tracing-subscriber/0.3.18 \
+ crate://crates.io/trycmd/0.14.19 \
+ crate://crates.io/twox-hash/1.6.3 \
+ crate://crates.io/typenum/1.17.0 \
+ crate://crates.io/unicase/2.7.0 \
+ crate://crates.io/unicode-bidi/0.3.13 \
+ crate://crates.io/unicode-ident/1.0.12 \
+ crate://crates.io/unicode-linebreak/0.1.5 \
+ crate://crates.io/unicode-normalization/0.1.22 \
+ crate://crates.io/unicode-width/0.1.11 \
+ crate://crates.io/untrusted/0.9.0 \
+ crate://crates.io/ureq/2.9.1 \
+ crate://crates.io/url/2.5.0 \
+ crate://crates.io/utf8parse/0.2.1 \
+ crate://crates.io/uuid/1.6.1 \
+ crate://crates.io/valuable/0.1.0 \
+ crate://crates.io/vcpkg/0.2.15 \
+ crate://crates.io/version_check/0.9.4 \
+ crate://crates.io/versions/5.0.1 \
+ crate://crates.io/wait-timeout/0.2.0 \
+ crate://crates.io/walkdir/2.4.0 \
+ crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \
+ crate://crates.io/webpki-roots/0.25.3 \
+ crate://crates.io/which/5.0.0 \
+ crate://crates.io/wild/2.2.0 \
+ crate://crates.io/winapi/0.3.9 \
+ crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0 \
+ crate://crates.io/winapi-util/0.1.6 \
+ crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0 \
+ crate://crates.io/windows-sys/0.45.0 \
+ crate://crates.io/windows-sys/0.48.0 \
+ crate://crates.io/windows-sys/0.52.0 \
+ crate://crates.io/windows-targets/0.42.2 \
+ crate://crates.io/windows-targets/0.48.5 \
+ crate://crates.io/windows-targets/0.52.0 \
+ crate://crates.io/windows_aarch64_gnullvm/0.42.2 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.5 \
+ crate://crates.io/windows_aarch64_gnullvm/0.52.0 \
+ crate://crates.io/windows_aarch64_msvc/0.42.2 \
+ crate://crates.io/windows_aarch64_msvc/0.48.5 \
+ crate://crates.io/windows_aarch64_msvc/0.52.0 \
+ crate://crates.io/windows_i686_gnu/0.42.2 \
+ crate://crates.io/windows_i686_gnu/0.48.5 \
+ crate://crates.io/windows_i686_gnu/0.52.0 \
+ crate://crates.io/windows_i686_msvc/0.42.2 \
+ crate://crates.io/windows_i686_msvc/0.48.5 \
+ crate://crates.io/windows_i686_msvc/0.52.0 \
+ crate://crates.io/windows_x86_64_gnu/0.42.2 \
+ crate://crates.io/windows_x86_64_gnu/0.48.5 \
+ crate://crates.io/windows_x86_64_gnu/0.52.0 \
+ crate://crates.io/windows_x86_64_gnullvm/0.42.2 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.5 \
+ crate://crates.io/windows_x86_64_gnullvm/0.52.0 \
+ crate://crates.io/windows_x86_64_msvc/0.42.2 \
+ crate://crates.io/windows_x86_64_msvc/0.48.5 \
+ crate://crates.io/windows_x86_64_msvc/0.52.0 \
+ crate://crates.io/winnow/0.5.19 \
+ crate://crates.io/xattr/1.0.1 \
+ crate://crates.io/xwin/0.5.0 \
+ crate://crates.io/yansi/0.5.1 \
+ crate://crates.io/zerocopy/0.7.28 \
+ crate://crates.io/zerocopy-derive/0.7.28 \
+ crate://crates.io/zeroize/1.7.0 \
+ crate://crates.io/zip/0.6.6 \
+"
+
+SRC_URI[adler-1.0.2.sha256sum] = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+SRC_URI[ahash-0.8.6.sha256sum] = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
+SRC_URI[aho-corasick-1.1.2.sha256sum] = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+SRC_URI[allocator-api2-0.2.16.sha256sum] = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+SRC_URI[anstream-0.3.2.sha256sum] = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
+SRC_URI[anstream-0.6.4.sha256sum] = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44"
+SRC_URI[anstyle-1.0.2.sha256sum] = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea"
+SRC_URI[anstyle-parse-0.2.1.sha256sum] = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
+SRC_URI[anstyle-query-1.0.0.sha256sum] = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
+SRC_URI[anstyle-wincon-1.0.2.sha256sum] = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c"
+SRC_URI[anstyle-wincon-3.0.1.sha256sum] = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628"
+SRC_URI[anyhow-1.0.75.sha256sum] = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[base64-0.13.1.sha256sum] = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
+SRC_URI[base64-0.21.5.sha256sum] = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[bitflags-2.4.1.sha256sum] = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
+SRC_URI[block-buffer-0.10.4.sha256sum] = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+SRC_URI[bstr-1.8.0.sha256sum] = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c"
+SRC_URI[byteorder-1.5.0.sha256sum] = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+SRC_URI[bytes-1.5.0.sha256sum] = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+SRC_URI[bytesize-1.3.0.sha256sum] = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc"
+SRC_URI[bzip2-0.4.4.sha256sum] = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8"
+SRC_URI[bzip2-sys-0.1.11+1.0.8.sha256sum] = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
+SRC_URI[cab-0.4.1.sha256sum] = "ae6b4de23c7d39c0631fd3cc952d87951c86c75a13812d7247cb7a896e7b3551"
+SRC_URI[camino-1.1.6.sha256sum] = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c"
+SRC_URI[cargo-config2-0.1.16.sha256sum] = "7f387366785e82c7ec7ef5b845a7f61324fbf5d467d6a878469eac30c1c44b1b"
+SRC_URI[cargo-options-0.7.2.sha256sum] = "cad71bf996c8e5b9d28ef3472d7ee41f277edf4e38cd597f51ad0438d05d76ea"
+SRC_URI[cargo-platform-0.1.5.sha256sum] = "e34637b3140142bdf929fb439e8aa4ebad7651ebf7b1080b3930aa16ac1459ff"
+SRC_URI[cargo-xwin-0.16.2.sha256sum] = "02bb6bf59526935e47445f959a19c2168f151284dbf7e57f5577934334e9a61d"
+SRC_URI[cargo-zigbuild-0.18.0.sha256sum] = "edc9c2fe646a29983b5f7263bd789175f2aaad7ea42525affa40e2172be93286"
+SRC_URI[cargo_metadata-0.18.1.sha256sum] = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
+SRC_URI[cbindgen-0.26.0.sha256sum] = "da6bc11b07529f16944307272d5bd9b22530bc7d05751717c9d416586cedab49"
+SRC_URI[cc-1.0.83.sha256sum] = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+SRC_URI[cfb-0.9.0.sha256sum] = "b390793e912300f1aa713429f7fd0c391024e6c18b988962558bc4f96a349b1f"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[charset-0.1.3.sha256sum] = "18e9079d1a12a2cc2bffb5db039c43661836ead4082120d5844f02555aca2d46"
+SRC_URI[chumsky-0.9.3.sha256sum] = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9"
+SRC_URI[clap-4.3.24.sha256sum] = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487"
+SRC_URI[clap_builder-4.3.24.sha256sum] = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e"
+SRC_URI[clap_complete-4.3.2.sha256sum] = "5fc443334c81a804575546c5a8a79b4913b50e28d69232903604cada1de817ce"
+SRC_URI[clap_complete_command-0.5.1.sha256sum] = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
+SRC_URI[clap_complete_fig-4.3.1.sha256sum] = "99fee1d30a51305a6c2ed3fc5709be3c8af626c9c958e04dd9ae94e27bcbce9f"
+SRC_URI[clap_complete_nushell-0.1.11.sha256sum] = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e"
+SRC_URI[clap_derive-4.3.12.sha256sum] = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050"
+SRC_URI[clap_lex-0.5.0.sha256sum] = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
+SRC_URI[cli-table-0.4.7.sha256sum] = "adfbb116d9e2c4be7011360d0c0bee565712c11e969c9609b25b619366dc379d"
+SRC_URI[colorchoice-1.0.0.sha256sum] = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
+SRC_URI[configparser-3.0.3.sha256sum] = "e0e56e414a2a52ab2a104f85cd40933c2fbc278b83637facf646ecf451b49237"
+SRC_URI[console-0.15.7.sha256sum] = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8"
+SRC_URI[content_inspector-0.2.4.sha256sum] = "b7bda66e858c683005a53a9a60c69a4aca7eeaa45d124526e389f7aec8e62f38"
+SRC_URI[core-foundation-0.9.3.sha256sum] = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+SRC_URI[core-foundation-sys-0.8.4.sha256sum] = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
+SRC_URI[cpufeatures-0.2.11.sha256sum] = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
+SRC_URI[crc32fast-1.3.2.sha256sum] = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+SRC_URI[crossbeam-channel-0.5.8.sha256sum] = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
+SRC_URI[crossbeam-deque-0.8.3.sha256sum] = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
+SRC_URI[crossbeam-epoch-0.9.15.sha256sum] = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
+SRC_URI[crossbeam-utils-0.8.16.sha256sum] = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
+SRC_URI[crypto-common-0.1.6.sha256sum] = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+SRC_URI[data-encoding-2.5.0.sha256sum] = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
+SRC_URI[deranged-0.3.9.sha256sum] = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
+SRC_URI[dialoguer-0.11.0.sha256sum] = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
+SRC_URI[diff-0.1.13.sha256sum] = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+SRC_URI[digest-0.10.7.sha256sum] = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+SRC_URI[dirs-5.0.1.sha256sum] = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+SRC_URI[dirs-sys-0.4.1.sha256sum] = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
+SRC_URI[dissimilar-1.0.7.sha256sum] = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632"
+SRC_URI[dunce-1.0.4.sha256sum] = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b"
+SRC_URI[either-1.9.0.sha256sum] = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
+SRC_URI[encode_unicode-0.3.6.sha256sum] = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+SRC_URI[encoding_rs-0.8.33.sha256sum] = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
+SRC_URI[equivalent-1.0.1.sha256sum] = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+SRC_URI[errno-0.3.8.sha256sum] = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
+SRC_URI[expect-test-1.4.1.sha256sum] = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3"
+SRC_URI[fastrand-2.0.1.sha256sum] = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
+SRC_URI[fat-macho-0.4.7.sha256sum] = "63fa117c7dcabeb8c83d5c229764cfa46518545d2dba5a9a08912014711f997b"
+SRC_URI[filetime-0.2.22.sha256sum] = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
+SRC_URI[flate2-1.0.28.sha256sum] = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
+SRC_URI[fnv-1.0.7.sha256sum] = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+SRC_URI[foreign-types-0.3.2.sha256sum] = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+SRC_URI[foreign-types-shared-0.1.1.sha256sum] = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+SRC_URI[form_urlencoded-1.2.1.sha256sum] = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
+SRC_URI[fs-err-2.11.0.sha256sum] = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
+SRC_URI[generic-array-0.14.7.sha256sum] = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+SRC_URI[getrandom-0.2.11.sha256sum] = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
+SRC_URI[glob-0.3.1.sha256sum] = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
+SRC_URI[globset-0.4.14.sha256sum] = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
+SRC_URI[goblin-0.7.1.sha256sum] = "f27c1b4369c2cd341b5de549380158b105a04c331be5db9110eef7b6d2742134"
+SRC_URI[hashbrown-0.12.3.sha256sum] = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+SRC_URI[hashbrown-0.14.3.sha256sum] = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[hermit-abi-0.3.3.sha256sum] = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
+SRC_URI[home-0.5.5.sha256sum] = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
+SRC_URI[humantime-2.1.0.sha256sum] = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+SRC_URI[humantime-serde-1.1.1.sha256sum] = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c"
+SRC_URI[idna-0.5.0.sha256sum] = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+SRC_URI[ignore-0.4.20.sha256sum] = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492"
+SRC_URI[indexmap-1.9.3.sha256sum] = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+SRC_URI[indexmap-2.1.0.sha256sum] = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
+SRC_URI[indicatif-0.17.7.sha256sum] = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25"
+SRC_URI[indoc-2.0.4.sha256sum] = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+SRC_URI[instant-0.1.12.sha256sum] = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+SRC_URI[io-lifetimes-1.0.11.sha256sum] = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
+SRC_URI[is-terminal-0.4.9.sha256sum] = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
+SRC_URI[itertools-0.11.0.sha256sum] = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+SRC_URI[itertools-0.12.0.sha256sum] = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
+SRC_URI[itoa-1.0.9.sha256sum] = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
+SRC_URI[keyring-2.0.5.sha256sum] = "9549a129bd08149e0a71b2d1ce2729780d47127991bfd0a78cc1df697ec72492"
+SRC_URI[lazy_static-1.4.0.sha256sum] = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+SRC_URI[lddtree-0.3.3.sha256sum] = "2f5bfec46830ad3a95199ae6804dfe9f51fdad43d7a95fbb6c185efa9824c295"
+SRC_URI[libc-0.2.150.sha256sum] = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
+SRC_URI[libredox-0.0.1.sha256sum] = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8"
+SRC_URI[linux-keyutils-0.2.3.sha256sum] = "3f27bb67f6dd1d0bb5ab582868e4f65052e58da6401188a08f0da09cf512b84b"
+SRC_URI[linux-raw-sys-0.3.8.sha256sum] = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
+SRC_URI[linux-raw-sys-0.4.11.sha256sum] = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
+SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+SRC_URI[log-0.4.20.sha256sum] = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+SRC_URI[lzxd-0.1.4.sha256sum] = "784462f20dddd9dfdb45de963fa4ad4a288cb10a7889ac5d2c34fb6481c6b213"
+SRC_URI[mailparse-0.14.0.sha256sum] = "6b56570f5f8c0047260d1c8b5b331f62eb9c660b9dd4071a8c46f8c7d3f280aa"
+SRC_URI[matchers-0.1.0.sha256sum] = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+SRC_URI[memchr-2.6.4.sha256sum] = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
+SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+SRC_URI[mime-0.3.17.sha256sum] = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+SRC_URI[mime_guess-2.0.4.sha256sum] = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
+SRC_URI[minijinja-1.0.10.sha256sum] = "208758577ef2c86cf5dd3e85730d161413ec3284e2d73b2ef65d9a24d9971bcb"
+SRC_URI[minimal-lexical-0.2.1.sha256sum] = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+SRC_URI[miniz_oxide-0.7.1.sha256sum] = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+SRC_URI[msi-0.7.0.sha256sum] = "226b2404f03d2cf47375b9715c8adfae4e388bb2377cff908e8a40f31e421514"
+SRC_URI[multipart-0.18.0.sha256sum] = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182"
+SRC_URI[native-tls-0.2.11.sha256sum] = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+SRC_URI[nom-7.1.3.sha256sum] = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+SRC_URI[normalize-line-endings-0.3.0.sha256sum] = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+SRC_URI[normpath-1.1.1.sha256sum] = "ec60c60a693226186f5d6edf073232bfb6464ed97eb22cf3b01c1e8198fd97f5"
+SRC_URI[nu-ansi-term-0.46.0.sha256sum] = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+SRC_URI[number_prefix-0.4.0.sha256sum] = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
+SRC_URI[once_cell-1.18.0.sha256sum] = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+SRC_URI[openssl-0.10.60.sha256sum] = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800"
+SRC_URI[openssl-macros-0.1.1.sha256sum] = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+SRC_URI[openssl-probe-0.1.5.sha256sum] = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+SRC_URI[openssl-sys-0.9.96.sha256sum] = "3812c071ba60da8b5677cc12bcb1d42989a65553772897a7e0355545a819838f"
+SRC_URI[option-ext-0.2.0.sha256sum] = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
+SRC_URI[os_pipe-1.1.4.sha256sum] = "0ae859aa07428ca9a929b936690f8b12dc5f11dd8c6992a18ca93919f28bc177"
+SRC_URI[overload-0.1.1.sha256sum] = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+SRC_URI[paste-1.0.14.sha256sum] = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
+SRC_URI[path-slash-0.2.1.sha256sum] = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
+SRC_URI[pep440_rs-0.3.12.sha256sum] = "887f66cc62717ea72caac4f1eb4e6f392224da3ffff3f40ec13ab427802746d6"
+SRC_URI[pep508_rs-0.2.3.sha256sum] = "e4516b53d9ea6112ebb38b4af08d5707d30b994fb7f98ff133c5dcf7ed8fa854"
+SRC_URI[percent-encoding-2.3.1.sha256sum] = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
+SRC_URI[pin-project-lite-0.2.13.sha256sum] = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+SRC_URI[pkg-config-0.3.27.sha256sum] = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
+SRC_URI[plain-0.2.3.sha256sum] = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
+SRC_URI[platform-info-2.0.2.sha256sum] = "d6259c4860e53bf665016f1b2f46a8859cadfa717581dc9d597ae4069de6300f"
+SRC_URI[portable-atomic-1.5.1.sha256sum] = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b"
+SRC_URI[powerfmt-0.2.0.sha256sum] = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+SRC_URI[ppv-lite86-0.2.17.sha256sum] = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+SRC_URI[pretty_assertions-1.4.0.sha256sum] = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
+SRC_URI[proc-macro2-1.0.70.sha256sum] = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
+SRC_URI[psm-0.1.21.sha256sum] = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874"
+SRC_URI[pyproject-toml-0.8.1.sha256sum] = "46d4a5e69187f23a29f8aa0ea57491d104ba541bc55f76552c2a74962aa20e04"
+SRC_URI[python-pkginfo-0.6.0.sha256sum] = "037469c164f08c891bf6d69ca02f1d56210011451e229618669777df82124cfa"
+SRC_URI[quote-1.0.33.sha256sum] = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+SRC_URI[quoted_printable-0.4.8.sha256sum] = "5a3866219251662ec3b26fc217e3e05bf9c4f84325234dfb96bf0bf840889e49"
+SRC_URI[rand-0.8.5.sha256sum] = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+SRC_URI[rand_chacha-0.3.1.sha256sum] = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+SRC_URI[rand_core-0.6.4.sha256sum] = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+SRC_URI[rayon-1.8.0.sha256sum] = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
+SRC_URI[rayon-core-1.12.0.sha256sum] = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
+SRC_URI[redox_syscall-0.3.5.sha256sum] = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+SRC_URI[redox_users-0.4.4.sha256sum] = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4"
+SRC_URI[regex-1.10.2.sha256sum] = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
+SRC_URI[regex-automata-0.1.10.sha256sum] = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+SRC_URI[regex-automata-0.4.3.sha256sum] = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+SRC_URI[regex-syntax-0.6.29.sha256sum] = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+SRC_URI[regex-syntax-0.8.2.sha256sum] = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+SRC_URI[rfc2047-decoder-0.2.2.sha256sum] = "61fc4b4e52897c3e30b12b7e9b04461215b647fbe66f6def60dd8edbce14ec2e"
+SRC_URI[ring-0.17.6.sha256sum] = "684d5e6e18f669ccebf64a92236bb7db9a34f07be010e3627368182027180866"
+SRC_URI[rustc_version-0.4.0.sha256sum] = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
+SRC_URI[rustix-0.37.27.sha256sum] = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2"
+SRC_URI[rustix-0.38.21.sha256sum] = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
+SRC_URI[rustls-0.21.9.sha256sum] = "629648aced5775d558af50b2b4c7b02983a04b312126d45eeead26e7caa498b9"
+SRC_URI[rustls-pemfile-2.0.0.sha256sum] = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4"
+SRC_URI[rustls-pki-types-1.0.0.sha256sum] = "eb0a1f9b9efec70d32e6d6aa3e58ebd88c3754ec98dfe9145c63cf54cc829b83"
+SRC_URI[rustls-webpki-0.101.7.sha256sum] = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
+SRC_URI[rustversion-1.0.14.sha256sum] = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
+SRC_URI[ryu-1.0.15.sha256sum] = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
+SRC_URI[same-file-1.0.6.sha256sum] = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+SRC_URI[schannel-0.1.22.sha256sum] = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
+SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+SRC_URI[scroll-0.11.0.sha256sum] = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
+SRC_URI[scroll_derive-0.11.1.sha256sum] = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae"
+SRC_URI[sct-0.7.1.sha256sum] = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
+SRC_URI[security-framework-2.9.2.sha256sum] = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de"
+SRC_URI[security-framework-sys-2.9.1.sha256sum] = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"
+SRC_URI[semver-1.0.20.sha256sum] = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
+SRC_URI[serde-1.0.193.sha256sum] = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
+SRC_URI[serde_derive-1.0.193.sha256sum] = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
+SRC_URI[serde_json-1.0.108.sha256sum] = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
+SRC_URI[serde_spanned-0.6.4.sha256sum] = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80"
+SRC_URI[sha2-0.10.8.sha256sum] = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
+SRC_URI[sharded-slab-0.1.7.sha256sum] = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+SRC_URI[shell-words-1.1.0.sha256sum] = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
+SRC_URI[shlex-1.2.0.sha256sum] = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380"
+SRC_URI[similar-2.3.0.sha256sum] = "2aeaf503862c419d66959f5d7ca015337d864e9c49485d771b732e2a20453597"
+SRC_URI[smallvec-1.11.2.sha256sum] = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
+SRC_URI[smawk-0.3.2.sha256sum] = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
+SRC_URI[snapbox-0.4.14.sha256sum] = "4b377c0b6e4715c116473d8e40d51e3fa5b0a2297ca9b2a931ba800667b259ed"
+SRC_URI[snapbox-macros-0.3.6.sha256sum] = "ed1559baff8a696add3322b9be3e940d433e7bb4e38d79017205fd37ff28b28e"
+SRC_URI[socks-0.3.4.sha256sum] = "f0c3dbbd9ae980613c6dd8e28a9407b50509d3803b57624d5dfe8315218cd58b"
+SRC_URI[spin-0.9.8.sha256sum] = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
+SRC_URI[stacker-0.1.15.sha256sum] = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce"
+SRC_URI[static_assertions-1.1.0.sha256sum] = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+SRC_URI[strsim-0.10.0.sha256sum] = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+SRC_URI[syn-1.0.109.sha256sum] = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+SRC_URI[syn-2.0.39.sha256sum] = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
+SRC_URI[tar-0.4.40.sha256sum] = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb"
+SRC_URI[target-lexicon-0.12.12.sha256sum] = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a"
+SRC_URI[tempfile-3.8.1.sha256sum] = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
+SRC_URI[termcolor-1.4.0.sha256sum] = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449"
+SRC_URI[terminal_size-0.2.6.sha256sum] = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237"
+SRC_URI[textwrap-0.16.0.sha256sum] = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
+SRC_URI[thiserror-1.0.50.sha256sum] = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
+SRC_URI[thiserror-impl-1.0.50.sha256sum] = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
+SRC_URI[thread_local-1.1.7.sha256sum] = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
+SRC_URI[time-0.3.30.sha256sum] = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
+SRC_URI[time-core-0.1.2.sha256sum] = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+SRC_URI[time-macros-0.2.15.sha256sum] = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
+SRC_URI[tinyvec-1.6.0.sha256sum] = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+SRC_URI[tinyvec_macros-0.1.1.sha256sum] = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+SRC_URI[toml-0.5.11.sha256sum] = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+SRC_URI[toml-0.8.8.sha256sum] = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
+SRC_URI[toml_datetime-0.6.5.sha256sum] = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
+SRC_URI[toml_edit-0.20.7.sha256sum] = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81"
+SRC_URI[toml_edit-0.21.0.sha256sum] = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
+SRC_URI[tracing-0.1.40.sha256sum] = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+SRC_URI[tracing-attributes-0.1.27.sha256sum] = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+SRC_URI[tracing-core-0.1.32.sha256sum] = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+SRC_URI[tracing-log-0.2.0.sha256sum] = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+SRC_URI[tracing-serde-0.1.3.sha256sum] = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1"
+SRC_URI[tracing-subscriber-0.3.18.sha256sum] = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+SRC_URI[trycmd-0.14.19.sha256sum] = "ed009372a42fb103e6f8767b9222925485e03cca032b700d203e2c5b67bee4fb"
+SRC_URI[twox-hash-1.6.3.sha256sum] = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
+SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+SRC_URI[unicase-2.7.0.sha256sum] = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
+SRC_URI[unicode-bidi-0.3.13.sha256sum] = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
+SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+SRC_URI[unicode-linebreak-0.1.5.sha256sum] = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
+SRC_URI[unicode-normalization-0.1.22.sha256sum] = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+SRC_URI[unicode-width-0.1.11.sha256sum] = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
+SRC_URI[untrusted-0.9.0.sha256sum] = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+SRC_URI[ureq-2.9.1.sha256sum] = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97"
+SRC_URI[url-2.5.0.sha256sum] = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+SRC_URI[utf8parse-0.2.1.sha256sum] = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
+SRC_URI[uuid-1.6.1.sha256sum] = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
+SRC_URI[valuable-0.1.0.sha256sum] = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+SRC_URI[vcpkg-0.2.15.sha256sum] = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+SRC_URI[versions-5.0.1.sha256sum] = "c73a36bc44e3039f51fbee93e39f41225f6b17b380eb70cc2aab942df06b34dd"
+SRC_URI[wait-timeout-0.2.0.sha256sum] = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+SRC_URI[walkdir-2.4.0.sha256sum] = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
+SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+SRC_URI[webpki-roots-0.25.3.sha256sum] = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
+SRC_URI[which-5.0.0.sha256sum] = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14"
+SRC_URI[wild-2.2.0.sha256sum] = "10d01931a94d5a115a53f95292f51d316856b68a035618eb831bbba593a30b67"
+SRC_URI[winapi-0.3.9.sha256sum] = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+SRC_URI[winapi-i686-pc-windows-gnu-0.4.0.sha256sum] = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+SRC_URI[winapi-util-0.1.6.sha256sum] = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
+SRC_URI[winapi-x86_64-pc-windows-gnu-0.4.0.sha256sum] = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+SRC_URI[windows-sys-0.45.0.sha256sum] = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
+SRC_URI[windows-sys-0.48.0.sha256sum] = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+SRC_URI[windows-sys-0.52.0.sha256sum] = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+SRC_URI[windows-targets-0.42.2.sha256sum] = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
+SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+SRC_URI[windows-targets-0.52.0.sha256sum] = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+SRC_URI[windows_aarch64_gnullvm-0.42.2.sha256sum] = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
+SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+SRC_URI[windows_aarch64_gnullvm-0.52.0.sha256sum] = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+SRC_URI[windows_aarch64_msvc-0.42.2.sha256sum] = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
+SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+SRC_URI[windows_aarch64_msvc-0.52.0.sha256sum] = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+SRC_URI[windows_i686_gnu-0.42.2.sha256sum] = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
+SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+SRC_URI[windows_i686_gnu-0.52.0.sha256sum] = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+SRC_URI[windows_i686_msvc-0.42.2.sha256sum] = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
+SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+SRC_URI[windows_i686_msvc-0.52.0.sha256sum] = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+SRC_URI[windows_x86_64_gnu-0.42.2.sha256sum] = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
+SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+SRC_URI[windows_x86_64_gnu-0.52.0.sha256sum] = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+SRC_URI[windows_x86_64_gnullvm-0.42.2.sha256sum] = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
+SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+SRC_URI[windows_x86_64_gnullvm-0.52.0.sha256sum] = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+SRC_URI[windows_x86_64_msvc-0.42.2.sha256sum] = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
+SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+SRC_URI[windows_x86_64_msvc-0.52.0.sha256sum] = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+SRC_URI[winnow-0.5.19.sha256sum] = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b"
+SRC_URI[xattr-1.0.1.sha256sum] = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985"
+SRC_URI[xwin-0.5.0.sha256sum] = "c43e0202f5457b48558096cb7b36d0e473f267551a89c82ed72d73b01dfd4007"
+SRC_URI[yansi-0.5.1.sha256sum] = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+SRC_URI[zerocopy-0.7.28.sha256sum] = "7d6f15f7ade05d2a4935e34a457b936c23dc70a05cc1d97133dc99e7a3fe0f0e"
+SRC_URI[zerocopy-derive-0.7.28.sha256sum] = "dbbad221e3f78500350ecbd7dfa4e63ef945c05f4c61cb7f4d3f84cd0bba649b"
+SRC_URI[zeroize-1.7.0.sha256sum] = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
+SRC_URI[zip-0.6.6.sha256sum] = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
diff --git a/meta/recipes-devtools/python/python3-maturin/0001-Add-32-bit-RISC-V-support.patch b/meta/recipes-devtools/python/python3-maturin/0001-Add-32-bit-RISC-V-support.patch
new file mode 100644
index 0000000000..a0ef0c9e22
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-maturin/0001-Add-32-bit-RISC-V-support.patch
@@ -0,0 +1,102 @@
+From a945706bd610c5400fc85a248d5e0c96ebd2e953 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 27 Feb 2024 10:38:49 -0800
+Subject: [PATCH] Add 32-bit RISC-V support
+
+Tested with qemuriscv32 and it builds fine with all tests passed on a
+qemu machine.
+
+Upstream-Status: Submitted [https://github.com/PyO3/maturin/pull/1969]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/target.rs | 11 ++++++++++-
+ 1 file changed, 10 insertions(+), 1 deletion(-)
+
+diff --git a/src/target.rs b/src/target.rs
+index fbb93531..33fa9273 100644
+--- a/src/target.rs
++++ b/src/target.rs
+@@ -69,6 +69,7 @@ pub enum Arch {
+ X86_64,
+ S390X,
+ Wasm32,
++ Riscv32,
+ Riscv64,
+ Mips64el,
+ Mips64,
+@@ -91,6 +92,7 @@ impl fmt::Display for Arch {
+ Arch::X86_64 => write!(f, "x86_64"),
+ Arch::S390X => write!(f, "s390x"),
+ Arch::Wasm32 => write!(f, "wasm32"),
++ Arch::Riscv32 => write!(f, "riscv32"),
+ Arch::Riscv64 => write!(f, "riscv64"),
+ Arch::Mips64el => write!(f, "mips64el"),
+ Arch::Mips64 => write!(f, "mips64"),
+@@ -115,7 +117,7 @@ impl Arch {
+ Arch::Powerpc | Arch::Powerpc64Le | Arch::Powerpc64 => "powerpc",
+ Arch::X86 => "i386",
+ Arch::X86_64 => "amd64",
+- Arch::Riscv64 => "riscv",
++ Arch::Riscv32 | Arch::Riscv64 => "riscv",
+ Arch::Mips64el | Arch::Mips64 | Arch::Mipsel | Arch::Mips => "mips",
+ // sparc64 is unsupported since FreeBSD 13.0
+ Arch::Sparc64 => "sparc64",
+@@ -139,6 +141,7 @@ fn get_supported_architectures(os: &Os) -> Vec<Arch> {
+ Arch::S390X,
+ Arch::X86,
+ Arch::X86_64,
++ Arch::Riscv32,
+ Arch::Riscv64,
+ Arch::Mips64el,
+ Arch::Mips64,
+@@ -158,6 +161,7 @@ fn get_supported_architectures(os: &Os) -> Vec<Arch> {
+ Arch::Powerpc64Le,
+ Arch::X86,
+ Arch::X86_64,
++ Arch::Riscv32,
+ Arch::Riscv64,
+ Arch::Mips64el,
+ Arch::Mipsel,
+@@ -171,6 +175,7 @@ fn get_supported_architectures(os: &Os) -> Vec<Arch> {
+ Arch::Powerpc,
+ Arch::Powerpc64,
+ Arch::Powerpc64Le,
++ Arch::Riscv32,
+ Arch::Riscv64,
+ Arch::Sparc64,
+ ],
+@@ -255,6 +260,7 @@ impl Target {
+ Architecture::Powerpc64le => Arch::Powerpc64Le,
+ Architecture::S390x => Arch::S390X,
+ Architecture::Wasm32 => Arch::Wasm32,
++ Architecture::Riscv32(_) => Arch::Riscv32,
+ Architecture::Riscv64(_) => Arch::Riscv64,
+ Architecture::Mips64(mips64_arch) => match mips64_arch {
+ Mips64Architecture::Mips64el => Arch::Mips64el,
+@@ -343,6 +349,7 @@ impl Target {
+ Arch::X86_64 => "x86_64",
+ Arch::S390X => "s390x",
+ Arch::Wasm32 => "wasm32",
++ Arch::Riscv32 => "riscv32",
+ Arch::Riscv64 => "riscv64",
+ // It's kinda surprising that Python doesn't include the `el` suffix
+ Arch::Mips64el | Arch::Mips64 => "mips64",
+@@ -388,6 +395,7 @@ impl Target {
+ }
+ Arch::Armv6L
+ | Arch::Wasm32
++ | Arch::Riscv32
+ | Arch::Riscv64
+ | Arch::Mips64el
+ | Arch::Mips64
+@@ -418,6 +426,7 @@ impl Target {
+ | Arch::Wasm32
+ | Arch::Mipsel
+ | Arch::Mips
++ | Arch::Riscv32
+ | Arch::Powerpc => 32,
+ }
+ }
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/python/python3-maturin_1.4.0.bb b/meta/recipes-devtools/python/python3-maturin_1.4.0.bb
new file mode 100644
index 0000000000..ed19ee647a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-maturin_1.4.0.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Build and publish crates with pyo3, rust-cpython, cffi bindings and rust binaries as python packages"
+HOMEPAGE = "https://github.com/pyo3/maturin"
+SECTION = "devel/python"
+LICENSE = "MIT | Apache-2.0"
+LIC_FILES_CHKSUM = "file://license-apache;md5=1836efb2eb779966696f473ee8540542 \
+ file://license-mit;md5=85fd3b67069cff784d98ebfc7d5c0797"
+
+SRC_URI += "file://0001-Add-32-bit-RISC-V-support.patch"
+SRC_URI[sha256sum] = "ed12e1768094a7adeafc3a74ebdb8dc2201fa64c4e7e31f14cfc70378bf93790"
+
+S = "${WORKDIR}/maturin-${PV}"
+
+CFLAGS:append = " -fdebug-prefix-map=${CARGO_HOME}=${TARGET_DBGSRC_DIR}/cargo_home"
+
+DEPENDS += "\
+ python3-setuptools-rust-native \
+ python3-semantic-version-native \
+ python3-setuptools-rust \
+"
+
+require ${BPN}-crates.inc
+
+inherit pypi cargo-update-recipe-crates python_pyo3 python_setuptools_build_meta
+
+do_configure() {
+ python_pyo3_do_configure
+ cargo_common_do_configure
+ python_pep517_do_configure
+}
+
+RDEPENDS:${PN} += "\
+ cargo \
+ python3-json \
+ rust \
+"
+
+RRECOMMENDS:${PN} += "\
+ python3-ensurepip \
+ python3-pip \
+ python3-venv \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-meson-python_0.15.0.bb b/meta/recipes-devtools/python/python3-meson-python_0.15.0.bb
new file mode 100644
index 0000000000..ad3cfe17d9
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-meson-python_0.15.0.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Meson Python build backend (PEP 517)"
+HOMEPAGE = "https://github.com/mesonbuild/meson-python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=d580b27e67cc0892a5b005b0be114b60"
+
+DEPENDS = " \
+ meson-native \
+ ninja-native \
+ patchelf-native \
+ python3-pyproject-metadata-native \
+"
+
+PYPI_PACKAGE = "meson_python"
+
+inherit pypi python_mesonpy
+SRC_URI[sha256sum] = "fddb73eecd49e89c1c41c87937cd89c2d0b65a1c63ba28238681d4bd9484d26f"
+
+DEPENDS:remove:class-native = "python3-meson-python-native"
+
+RDEPENDS:${PN} = " \
+ meson \
+ ninja \
+ patchelf \
+ python3-pyproject-metadata \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-more-itertools/run-ptest b/meta/recipes-devtools/python/python3-more-itertools/run-ptest
index 3385d68939..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-more-itertools/run-ptest
+++ b/meta/recipes-devtools/python/python3-more-itertools/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-more-itertools_10.2.0.bb b/meta/recipes-devtools/python/python3-more-itertools_10.2.0.bb
new file mode 100644
index 0000000000..e1e1f5e18f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-more-itertools_10.2.0.bb
@@ -0,0 +1,29 @@
+SUMMARY = "More routines for operating on iterables, beyond itertools"
+HOMEPAGE = "https://github.com/erikrose/more-itertools"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3396ea30f9d21389d7857719816f83b5"
+
+SRC_URI[sha256sum] = "8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"
+
+inherit pypi python_flit_core ptest
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN} += " \
+ python3-asyncio \
+ "
+
+RDEPENDS:${PN}-ptest += " \
+ python3-statistics \
+ python3-pytest \
+ python3-unittest-automake-output \
+ "
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-more-itertools_8.13.0.bb b/meta/recipes-devtools/python/python3-more-itertools_8.13.0.bb
deleted file mode 100644
index 94f6f9e4d5..0000000000
--- a/meta/recipes-devtools/python/python3-more-itertools_8.13.0.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-DESCRIPTION = "More routines for operating on iterables, beyond itertools"
-HOMEPAGE = "https://github.com/erikrose/more-itertools"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3396ea30f9d21389d7857719816f83b5"
-
-SRC_URI[sha256sum] = "a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"
-
-inherit pypi python_flit_core ptest
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-asyncio \
- "
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
- "
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-ndg-httpsclient_0.5.1.bb b/meta/recipes-devtools/python/python3-ndg-httpsclient_0.5.1.bb
index 6f3c096cf0..68d8668a41 100644
--- a/meta/recipes-devtools/python/python3-ndg-httpsclient_0.5.1.bb
+++ b/meta/recipes-devtools/python/python3-ndg-httpsclient_0.5.1.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL"
+SUMMARY = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL"
HOMEPAGE = "https://github.com/cedadev/ndg_httpsclient/"
LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://MANIFEST.in;md5=ce22c0cd986d2de3f7073cd6b5523ae0"
@@ -11,15 +11,15 @@ inherit pypi setuptools3 update-alternatives
PYPI_PACKAGE = "ndg_httpsclient"
DEPENDS += " \
- ${PYTHON_PN}-pyopenssl \
- ${PYTHON_PN}-pyasn1 \
+ python3-pyopenssl \
+ python3-pyasn1 \
"
RDEPENDS:${PN} += " \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-logging \
- ${PYTHON_PN}-pyopenssl \
- ${PYTHON_PN}-pyasn1 \
+ python3-datetime \
+ python3-logging \
+ python3-pyopenssl \
+ python3-pyasn1 \
"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-numpy/0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch b/meta/recipes-devtools/python/python3-numpy/0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch
index 9f5f25f9b5..bbe309c556 100644
--- a/meta/recipes-devtools/python/python3-numpy/0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch
+++ b/meta/recipes-devtools/python/python3-numpy/0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch
@@ -1,4 +1,4 @@
-From 27f6687e49bf555fc494d2f14bae6ecd0fa30f14 Mon Sep 17 00:00:00 2001
+From 46eea664cf89d0602e7ff16d587c37c045b125b7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 10 Dec 2015 13:20:30 +0200
Subject: [PATCH] Don't search /usr and so on for libraries by default to
@@ -8,15 +8,16 @@ Subject: [PATCH] Don't search /usr and so on for libraries by default to
Upstream-Status: Inappropriate (As the code stands, this is a hack)
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+
---
numpy/distutils/system_info.py | 42 +++++-----------------------------
1 file changed, 6 insertions(+), 36 deletions(-)
diff --git a/numpy/distutils/system_info.py b/numpy/distutils/system_info.py
-index 82e864a..135246d 100644
+index feb28f6..a48d6d1 100644
--- a/numpy/distutils/system_info.py
+++ b/numpy/distutils/system_info.py
-@@ -323,44 +323,14 @@ if sys.platform == 'win32':
+@@ -327,44 +327,14 @@ def add_system_root(library_root):
add_system_root(os.path.join(conda_dir, 'Library'))
else:
@@ -67,6 +68,3 @@ index 82e864a..135246d 100644
if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
---
-2.25.1
-
diff --git a/meta/recipes-devtools/python/python3-numpy/fix_reproducibility.patch b/meta/recipes-devtools/python/python3-numpy/fix_reproducibility.patch
new file mode 100644
index 0000000000..d952aed00c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-numpy/fix_reproducibility.patch
@@ -0,0 +1,33 @@
+This regex decides whether to use O3 opimisation on numpy or not.
+
+It includes "od", which happens to be a substring of "reproducible"
+but not "qemux86-world".
+
+The regex will run against all compiler options including things like:
+
+-fmacro-prefix-map=/XXX/build/tmp/work/core2-64-poky-linux/python3-numpy/1.26.0/numpy-1.26.0=/usr/src/debug/python3-numpy/1.26.0-r0
+
+i.e. including build paths.
+
+Reduce the regex to something deterministic for our builds, assuming
+nobody builds in /home/debug:full/
+
+The autobuilder race depended upon whether qemux86-world or the
+reproducible target ran first and won the race to populate sstate.
+
+Upstream-Status: Inappropriate [upstream have dropped distutils and switched to meson]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: numpy-1.26.0/numpy/distutils/ccompiler_opt.py
+===================================================================
+--- numpy-1.26.0.orig/numpy/distutils/ccompiler_opt.py
++++ numpy-1.26.0/numpy/distutils/ccompiler_opt.py
+@@ -990,7 +990,7 @@ class _CCompiler:
+ ("cc_is_nocc", "", ""),
+ )
+ detect_args = (
+- ("cc_has_debug", ".*(O0|Od|ggdb|coverage|debug:full).*", ""),
++ ("cc_has_debug", ".*debug:full.*", ""),
+ ("cc_has_native",
+ ".*(-march=native|-xHost|/QxHost|-mcpu=a64fx).*", ""),
+ # in case if the class run with -DNPY_DISABLE_OPTIMIZATION
diff --git a/meta/recipes-devtools/python/python3-numpy_1.22.3.bb b/meta/recipes-devtools/python/python3-numpy_1.22.3.bb
deleted file mode 100644
index b5b1c4e714..0000000000
--- a/meta/recipes-devtools/python/python3-numpy_1.22.3.bb
+++ /dev/null
@@ -1,72 +0,0 @@
-SUMMARY = "A sophisticated Numeric Processing Package for Python"
-HOMEPAGE = "https://numpy.org/"
-DESCRIPTION = "NumPy is the fundamental package needed for scientific computing with Python."
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause & BSD-2-Clause & PSF-2.0 & Apache-2.0 & MIT"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=8026691468924fb6ec155dadfe2a1a7f"
-
-SRCNAME = "numpy"
-
-SRC_URI = "https://github.com/${SRCNAME}/${SRCNAME}/releases/download/v${PV}/${SRCNAME}-${PV}.tar.gz \
- file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \
- file://0001-numpy-core-Define-RISCV-32-support.patch \
- file://run-ptest \
- "
-SRC_URI[sha256sum] = "a906c0b4301a3d62ccf66d058fe779a65c1c34f6719ef2058f96e1856f48bca5"
-
-UPSTREAM_CHECK_URI = "https://github.com/numpy/numpy/releases"
-UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)\.tar"
-
-DEPENDS += "python3-cython-native"
-
-inherit ptest setuptools3
-
-S = "${WORKDIR}/numpy-${PV}"
-
-CLEANBROKEN = "1"
-
-do_compile:prepend() {
- export NPY_DISABLE_SVML=1
-}
-
-# Unfortunately the following pyc files are non-deterministc due to 'frozenset'
-# being written without strict ordering, even with PYTHONHASHSEED = 0
-# Upstream is discussing ways to solve the issue properly, until then let's
-# just not install the problematic files.
-# More info: http://benno.id.au/blog/2013/01/15/python-determinism
-do_install:append() {
- rm ${D}${PYTHON_SITEPACKAGES_DIR}/numpy/typing/tests/data/pass/__pycache__/literal.cpython*
-}
-
-FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a ${PYTHON_SITEPACKAGES_DIR}/numpy/random/lib/*.a"
-
-# install what is needed for numpy.test()
-RDEPENDS:${PN} = "${PYTHON_PN}-unittest \
- ${PYTHON_PN}-difflib \
- ${PYTHON_PN}-pprint \
- ${PYTHON_PN}-pickle \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-doctest \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-distutils \
- ${PYTHON_PN}-misc \
- ${PYTHON_PN}-mmap \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-pydoc \
- ${PYTHON_PN}-pkgutil \
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-compression \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-threading \
- ${PYTHON_PN}-multiprocessing \
- ${PYTHON_PN}-json \
-"
-RDEPENDS:${PN}-ptest += "${PYTHON_PN}-pytest \
- ${PYTHON_PN}-hypothesis \
- ${PYTHON_PN}-sortedcontainers \
- ${PYTHON_PN}-resource \
- ldd \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-numpy_1.26.4.bb b/meta/recipes-devtools/python/python3-numpy_1.26.4.bb
new file mode 100644
index 0000000000..ccd08147af
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-numpy_1.26.4.bb
@@ -0,0 +1,64 @@
+SUMMARY = "A sophisticated Numeric Processing Package for Python"
+HOMEPAGE = "https://numpy.org/"
+DESCRIPTION = "NumPy is the fundamental package needed for scientific computing with Python."
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause & BSD-2-Clause & PSF-2.0 & Apache-2.0 & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=a752eb20459cf74a9d84ee4825e8317c"
+
+SRCNAME = "numpy"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${SRCNAME}-${PV}.tar.gz \
+ file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \
+ file://0001-numpy-core-Define-RISCV-32-support.patch \
+ file://fix_reproducibility.patch \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"
+
+GITHUB_BASE_URI = "https://github.com/numpy/numpy/releases"
+UPSTREAM_CHECK_REGEX = "releases/tag/v?(?P<pver>\d+(\.\d+)+)$"
+
+DEPENDS += "python3-cython-native"
+
+inherit ptest setuptools3 github-releases
+
+S = "${WORKDIR}/numpy-${PV}"
+
+CLEANBROKEN = "1"
+
+do_compile:prepend() {
+ export NPY_DISABLE_SVML=1
+}
+
+FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a ${PYTHON_SITEPACKAGES_DIR}/numpy/random/lib/*.a"
+
+# install what is needed for numpy.test()
+RDEPENDS:${PN} = "python3-unittest \
+ python3-difflib \
+ python3-pprint \
+ python3-pickle \
+ python3-shell \
+ python3-doctest \
+ python3-datetime \
+ python3-misc \
+ python3-mmap \
+ python3-netclient \
+ python3-numbers \
+ python3-pydoc \
+ python3-pkgutil \
+ python3-email \
+ python3-compression \
+ python3-ctypes \
+ python3-threading \
+ python3-multiprocessing \
+ python3-json \
+"
+RDEPENDS:${PN}-ptest += "python3-pytest \
+ python3-hypothesis \
+ python3-sortedcontainers \
+ python3-resource \
+ python3-typing-extensions \
+ ldd \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-packaging_21.3.bb b/meta/recipes-devtools/python/python3-packaging_21.3.bb
deleted file mode 100644
index e3b9a440c4..0000000000
--- a/meta/recipes-devtools/python/python3-packaging_21.3.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = "Core utilities for Python packages"
-HOMEPAGE = "https://github.com/pypa/packaging"
-LICENSE = "Apache-2.0 | BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=faadaedca9251a90b205c9167578ce91"
-
-SRC_URI[sha256sum] = "dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"
-
-inherit pypi python_setuptools_build_meta
-
-BBCLASSEXTEND = "native nativesdk"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-pyparsing"
diff --git a/meta/recipes-devtools/python/python3-packaging_24.0.bb b/meta/recipes-devtools/python/python3-packaging_24.0.bb
new file mode 100644
index 0000000000..0942eeb15e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-packaging_24.0.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Core utilities for Python packages"
+HOMEPAGE = "https://github.com/pypa/packaging"
+LICENSE = "Apache-2.0 | BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=faadaedca9251a90b205c9167578ce91"
+
+SRC_URI[sha256sum] = "eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Bootstrap the native build
+DEPENDS:remove:class-native = "python3-build-native"
+RDEPENDS:${PN} += "python3-profile"
+
+do_compile:class-native () {
+ python_flit_core_do_manual_build
+}
diff --git a/meta/recipes-devtools/python/python3-pathlib2_2.3.7.bb b/meta/recipes-devtools/python/python3-pathlib2_2.3.7.bb
index 673b5c7ef0..d4de40893a 100644
--- a/meta/recipes-devtools/python/python3-pathlib2_2.3.7.bb
+++ b/meta/recipes-devtools/python/python3-pathlib2_2.3.7.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "Object-oriented filesystem paths"
+SUMMARY = "Object-oriented filesystem paths"
HOMEPAGE = "https://github.com/mcmtroffaes/pathlib2"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=2dc08586cce3ab91bfa091b655c0e440"
@@ -7,6 +7,6 @@ SRC_URI[sha256sum] = "7a4329d67beff9a712e1d3ae147e4e3e108b0bfd284ffdea03a635126c
inherit pypi setuptools3
-RDEPENDS:${PN} += "${PYTHON_PN}-six ${PYTHON_PN}-ctypes"
+RDEPENDS:${PN} += "python3-six python3-ctypes"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pathspec_0.12.1.bb b/meta/recipes-devtools/python/python3-pathspec_0.12.1.bb
new file mode 100644
index 0000000000..b61f673397
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pathspec_0.12.1.bb
@@ -0,0 +1,13 @@
+SUMMARY = "Utility library for gitignore style pattern matching of file paths."
+HOMEPAGE = "https://github.com/cpburnz/python-path-specification"
+SECTION = "devel/python"
+LICENSE = "MPL-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=815ca599c9df247a0c7f619bab123dad"
+
+SRC_URI[sha256sum] = "a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"
+
+inherit pypi setuptools3
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += "python3-profile"
diff --git a/meta/recipes-devtools/python/python3-pbr_5.9.0.bb b/meta/recipes-devtools/python/python3-pbr_5.9.0.bb
deleted file mode 100644
index c93b71dbd3..0000000000
--- a/meta/recipes-devtools/python/python3-pbr_5.9.0.bb
+++ /dev/null
@@ -1,4 +0,0 @@
-inherit setuptools3
-require python-pbr.inc
-
-SRC_URI[sha256sum] = "e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"
diff --git a/meta/recipes-devtools/python/python3-pbr_6.0.0.bb b/meta/recipes-devtools/python/python3-pbr_6.0.0.bb
new file mode 100644
index 0000000000..6c8171196a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pbr_6.0.0.bb
@@ -0,0 +1,4 @@
+inherit setuptools3
+require python-pbr.inc
+
+SRC_URI[sha256sum] = "d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"
diff --git a/meta/recipes-devtools/python/python3-pip/0001-change-shebang-to-python3.patch b/meta/recipes-devtools/python/python3-pip/0001-change-shebang-to-python3.patch
deleted file mode 100644
index d6c27b8461..0000000000
--- a/meta/recipes-devtools/python/python3-pip/0001-change-shebang-to-python3.patch
+++ /dev/null
@@ -1,115 +0,0 @@
-From dca973830d4eee3e0f79e61237c44a9dd8201641 Mon Sep 17 00:00:00 2001
-From: Trevor Gamblin <trevor.gamblin@windriver.com>
-Date: Tue, 22 Jun 2021 12:31:46 -0400
-Subject: [PATCH] change shebang to python3
-
-Upstream-Status: Inappropriate (OE-specific)
-
-Despite no longer supporting python2, some files in the pip source refer
-to "python" instead of "python3", so patch them as needed to ensure that
-they correctly reference the python3 binary.
-
-Signed-off-by: Trevor Gamblin <trevor.gamblin@windriver.com>
-
----
- src/pip/_vendor/chardet/langbulgarianmodel.py | 2 +-
- src/pip/_vendor/chardet/langgreekmodel.py | 2 +-
- src/pip/_vendor/chardet/langhebrewmodel.py | 2 +-
- src/pip/_vendor/chardet/langhungarianmodel.py | 2 +-
- src/pip/_vendor/chardet/langrussianmodel.py | 2 +-
- src/pip/_vendor/chardet/langthaimodel.py | 2 +-
- src/pip/_vendor/chardet/langturkishmodel.py | 2 +-
- src/pip/_vendor/chardet/metadata/languages.py | 2 +-
- src/pip/_vendor/requests/certs.py | 2 +-
- 9 files changed, 9 insertions(+), 9 deletions(-)
-
-diff --git a/src/pip/_vendor/chardet/langbulgarianmodel.py b/src/pip/_vendor/chardet/langbulgarianmodel.py
-index e963a50..97ea6ce 100644
---- a/src/pip/_vendor/chardet/langbulgarianmodel.py
-+++ b/src/pip/_vendor/chardet/langbulgarianmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langgreekmodel.py b/src/pip/_vendor/chardet/langgreekmodel.py
-index d99528e..4a127ea 100644
---- a/src/pip/_vendor/chardet/langgreekmodel.py
-+++ b/src/pip/_vendor/chardet/langgreekmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langhebrewmodel.py b/src/pip/_vendor/chardet/langhebrewmodel.py
-index 484c652..676c1a7 100644
---- a/src/pip/_vendor/chardet/langhebrewmodel.py
-+++ b/src/pip/_vendor/chardet/langhebrewmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langhungarianmodel.py b/src/pip/_vendor/chardet/langhungarianmodel.py
-index bbc5cda..042eae7 100644
---- a/src/pip/_vendor/chardet/langhungarianmodel.py
-+++ b/src/pip/_vendor/chardet/langhungarianmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langrussianmodel.py b/src/pip/_vendor/chardet/langrussianmodel.py
-index 5594452..564b02e 100644
---- a/src/pip/_vendor/chardet/langrussianmodel.py
-+++ b/src/pip/_vendor/chardet/langrussianmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langthaimodel.py b/src/pip/_vendor/chardet/langthaimodel.py
-index 9a37db5..c974879 100644
---- a/src/pip/_vendor/chardet/langthaimodel.py
-+++ b/src/pip/_vendor/chardet/langthaimodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/langturkishmodel.py b/src/pip/_vendor/chardet/langturkishmodel.py
-index 43f4230..7e710c3 100644
---- a/src/pip/_vendor/chardet/langturkishmodel.py
-+++ b/src/pip/_vendor/chardet/langturkishmodel.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
-diff --git a/src/pip/_vendor/chardet/metadata/languages.py b/src/pip/_vendor/chardet/metadata/languages.py
-index 3237d5a..aa2ec7c 100644
---- a/src/pip/_vendor/chardet/metadata/languages.py
-+++ b/src/pip/_vendor/chardet/metadata/languages.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- """
- Metadata about languages used by our model training code for our
-diff --git a/src/pip/_vendor/requests/certs.py b/src/pip/_vendor/requests/certs.py
-index 06a594e..bfa7839 100644
---- a/src/pip/_vendor/requests/certs.py
-+++ b/src/pip/_vendor/requests/certs.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- """
diff --git a/meta/recipes-devtools/python/python3-pip/no_shebang_mangling.patch b/meta/recipes-devtools/python/python3-pip/no_shebang_mangling.patch
index 920e22bc02..99fa14ee03 100644
--- a/meta/recipes-devtools/python/python3-pip/no_shebang_mangling.patch
+++ b/meta/recipes-devtools/python/python3-pip/no_shebang_mangling.patch
@@ -1,14 +1,22 @@
+From 2aa82aeb0783c5fa7777b32bfe1dd3da9ae8fc6e Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Wed, 23 Feb 2022 12:27:06 +0000
+Subject: [PATCH] python3-pip: Don't change shebang
+
Patch pip to disable shebang mangling and also force the python executable
to be python3 from the environment when building anything for the target
(or nativesdk). This avoids incorrect interpreter paths in the target scripts.
Upstream-Status: Inappropriate [OE specific config]
+---
+ src/pip/_vendor/distlib/scripts.py | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
-Index: pip-22.0.3/src/pip/_vendor/distlib/scripts.py
-===================================================================
---- pip-22.0.3.orig/src/pip/_vendor/distlib/scripts.py
-+++ pip-22.0.3/src/pip/_vendor/distlib/scripts.py
-@@ -135,6 +135,8 @@ class ScriptMaker(object):
+diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py
+index cfa45d2..313f499 100644
+--- a/src/pip/_vendor/distlib/scripts.py
++++ b/src/pip/_vendor/distlib/scripts.py
+@@ -144,6 +144,8 @@ class ScriptMaker(object):
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
https://hg.mozilla.org/mozilla-central/file/tip/mach
"""
@@ -17,7 +25,7 @@ Index: pip-22.0.3/src/pip/_vendor/distlib/scripts.py
if os.name != 'posix':
simple_shebang = True
else:
-@@ -340,7 +342,7 @@ class ScriptMaker(object):
+@@ -362,7 +364,7 @@ class ScriptMaker(object):
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
diff --git a/meta/recipes-devtools/python/python3-pip/reproducible.patch b/meta/recipes-devtools/python/python3-pip/reproducible.patch
deleted file mode 100644
index 538bb94f7a..0000000000
--- a/meta/recipes-devtools/python/python3-pip/reproducible.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-Pip installed wheels are not reproducible currently. The direct_url
-files encode an installation path and the installed wheels compile
-the python files at their location, not their final install location
-which is incorrect.
-
-To fix this, simply disable the direct_urls and pass the "root" to
-the python compile function to strip that path out of the compiled
-files.
-
-A version of this patch, perhaps stripping root from the direct_urls
-may be something that could be considered by upstream.
-
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Upstream-Status: Pending
-
-Index: pip-22.0.3/src/pip/_internal/req/req_install.py
-===================================================================
---- pip-22.0.3.orig/src/pip/_internal/req/req_install.py
-+++ pip-22.0.3/src/pip/_internal/req/req_install.py
-@@ -758,7 +758,9 @@ class InstallRequirement:
- if self.is_wheel:
- assert self.local_file_path
- direct_url = None
-- if self.editable:
-+ if '_PYTHON_SYSCONFIGDATA_NAME' in os.environ:
-+ direct_url = None
-+ elif self.editable:
- direct_url = direct_url_for_editable(self.unpacked_source_directory)
- elif self.original_link:
- direct_url = direct_url_from_link(
-@@ -775,6 +777,7 @@ class InstallRequirement:
- warn_script_location=warn_script_location,
- direct_url=direct_url,
- requested=self.user_supplied,
-+ root=root,
- )
- self.install_succeeded = True
- return
-Index: pip-22.0.3/src/pip/_internal/operations/install/wheel.py
-===================================================================
---- pip-22.0.3.orig/src/pip/_internal/operations/install/wheel.py
-+++ pip-22.0.3/src/pip/_internal/operations/install/wheel.py
-@@ -436,6 +436,7 @@ def _install_wheel(
- warn_script_location: bool = True,
- direct_url: Optional[DirectUrl] = None,
- requested: bool = False,
-+ root: str = None,
- ) -> None:
- """Install a wheel.
-
-@@ -612,7 +613,7 @@ def _install_wheel(
- with warnings.catch_warnings():
- warnings.filterwarnings("ignore")
- for path in pyc_source_file_paths():
-- success = compileall.compile_file(path, force=True, quiet=True)
-+ success = compileall.compile_file(path, force=True, quiet=True, stripdir=root)
- if success:
- pyc_path = pyc_output_path(path)
- assert os.path.exists(pyc_path)
-@@ -723,6 +724,7 @@ def install_wheel(
- warn_script_location: bool = True,
- direct_url: Optional[DirectUrl] = None,
- requested: bool = False,
-+ root: str = None,
- ) -> None:
- with ZipFile(wheel_path, allowZip64=True) as z:
- with req_error_context(req_description):
-@@ -735,4 +737,5 @@ def install_wheel(
- warn_script_location=warn_script_location,
- direct_url=direct_url,
- requested=requested,
-+ root=root,
- )
diff --git a/meta/recipes-devtools/python/python3-pip_22.1.bb b/meta/recipes-devtools/python/python3-pip_22.1.bb
deleted file mode 100644
index 1aef9bca33..0000000000
--- a/meta/recipes-devtools/python/python3-pip_22.1.bb
+++ /dev/null
@@ -1,63 +0,0 @@
-SUMMARY = "The PyPA recommended tool for installing Python packages"
-HOMEPAGE = "https://pypi.org/project/pip"
-SECTION = "devel/python"
-LICENSE = "MIT & Apache-2.0 & MPL-2.0 & LGPL-2.1-only & BSD-3-Clause & PSF-2.0 & BSD-2-Clause"
-LIC_FILES_CHKSUM = "\
- file://LICENSE.txt;md5=63ec52baf95163b597008bb46db68030 \
- file://src/pip/_vendor/cachecontrol/LICENSE.txt;md5=6572692148079ebbbd800be4b9f36c6d \
- file://src/pip/_vendor/certifi/LICENSE;md5=67da0714c3f9471067b729eca6c9fbe8 \
- file://src/pip/_vendor/chardet/LICENSE;md5=a6f89e2100d9b6cdffcea4f398e37343 \
- file://src/pip/_vendor/colorama/LICENSE.txt;md5=b4936429a56a652b84c5c01280dcaa26 \
- file://src/pip/_vendor/distlib/LICENSE.txt;md5=f6a11430d5cd6e2cd3832ee94f22ddfc \
- file://src/pip/_vendor/distro/LICENSE;md5=d2794c0df5b907fdace235a619d80314 \
- file://src/pip/_vendor/html5lib/LICENSE;md5=1ba5ada9e6fead1fdc32f43c9f10ba7c \
- file://src/pip/_vendor/idna/LICENSE.md;md5=239668a7c6066d9e0c5382e9c8c6c0e1 \
- file://src/pip/_vendor/msgpack/COPYING;md5=cd9523181d9d4fbf7ffca52eaa2a5751 \
- file://src/pip/_vendor/packaging/LICENSE;md5=faadaedca9251a90b205c9167578ce91 \
- file://src/pip/_vendor/packaging/LICENSE.APACHE;md5=2ee41112a44fe7014dce33e26468ba93 \
- file://src/pip/_vendor/pep517/LICENSE;md5=aad69c93f605003e3342b174d9b0708c \
- file://src/pip/_vendor/pkg_resources/LICENSE;md5=9a33897f1bca1160d7aad3835152e158 \
- file://src/pip/_vendor/platformdirs/LICENSE.txt;md5=282c970bb844954c8535dd6e9733db7f \
- file://src/pip/_vendor/pygments/LICENSE;md5=98419e351433ac106a24e3ad435930bc \
- file://src/pip/_vendor/pyparsing/LICENSE;md5=657a566233888513e1f07ba13e2f47f1 \
- file://src/pip/_vendor/requests/LICENSE;md5=34400b68072d710fecd0a2940a0d1658 \
- file://src/pip/_vendor/resolvelib/LICENSE;md5=78e1c0248051c32a38a7f820c30bd7a5 \
- file://src/pip/_vendor/rich/LICENSE;md5=b5f0b94fbc94f5ad9ae4efcf8a778303 \
- file://src/pip/_vendor/six.LICENSE;md5=43cfc9e4ac0e377acfb9b76f56b8415d \
- file://src/pip/_vendor/tenacity/LICENSE;md5=175792518e4ac015ab6696d16c4f607e \
- file://src/pip/_vendor/tomli/LICENSE;md5=aaaaf0879d17df0110d1aa8c8c9f46f5 \
- file://src/pip/_vendor/typing_extensions.LICENSE;md5=64fc2b30b67d0a8423c250e0386ed72f \
- file://src/pip/_vendor/urllib3/LICENSE.txt;md5=c2823cb995439c984fd62a973d79815c \
- file://src/pip/_vendor/webencodings/LICENSE;md5=81fb24cd7823cce23b69f721993dce4d \
-"
-
-inherit pypi python_setuptools_build_meta
-
-SRC_URI += "file://0001-change-shebang-to-python3.patch"
-SRC_URI += "file://no_shebang_mangling.patch"
-SRC_URI += "file://reproducible.patch"
-
-SRC_URI[sha256sum] = "2debf847016cfe643fa1512e2d781d3ca9e5c878ba0652583842d50cc2bcc605"
-
-do_install:append() {
- rm -f ${D}/${bindir}/pip
-}
-
-RDEPENDS:${PN} = "\
- python3-compile \
- python3-io \
- python3-html \
- python3-json \
- python3-multiprocessing \
- python3-netserver \
- python3-setuptools \
- python3-unixadmin \
- python3-xmlrpc \
- python3-pickle \
-"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# This used to use the bootstrap install which didn't compile. Until we bump the
-# tmpdir version we can't compile the native otherwise the sysroot unpack fails
-INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-pip_24.0.bb b/meta/recipes-devtools/python/python3-pip_24.0.bb
new file mode 100644
index 0000000000..be4a29500a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pip_24.0.bb
@@ -0,0 +1,60 @@
+SUMMARY = "The PyPA recommended tool for installing Python packages"
+HOMEPAGE = "https://pypi.org/project/pip"
+SECTION = "devel/python"
+LICENSE = "MIT & Apache-2.0 & MPL-2.0 & LGPL-2.1-only & BSD-3-Clause & PSF-2.0 & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=63ec52baf95163b597008bb46db68030 \
+ file://src/pip/_vendor/cachecontrol/LICENSE.txt;md5=6572692148079ebbbd800be4b9f36c6d \
+ file://src/pip/_vendor/certifi/LICENSE;md5=3c2b7404369c587c3559afb604fce2f2 \
+ file://src/pip/_vendor/chardet/LICENSE;md5=4fbd65380cdd255951079008b364516c \
+ file://src/pip/_vendor/colorama/LICENSE.txt;md5=b4936429a56a652b84c5c01280dcaa26 \
+ file://src/pip/_vendor/distlib/LICENSE.txt;md5=f6a11430d5cd6e2cd3832ee94f22ddfc \
+ file://src/pip/_vendor/distro/LICENSE;md5=d2794c0df5b907fdace235a619d80314 \
+ file://src/pip/_vendor/idna/LICENSE.md;md5=239668a7c6066d9e0c5382e9c8c6c0e1 \
+ file://src/pip/_vendor/msgpack/COPYING;md5=cd9523181d9d4fbf7ffca52eaa2a5751 \
+ file://src/pip/_vendor/packaging/LICENSE;md5=faadaedca9251a90b205c9167578ce91 \
+ file://src/pip/_vendor/packaging/LICENSE.APACHE;md5=2ee41112a44fe7014dce33e26468ba93 \
+ file://src/pip/_vendor/pkg_resources/LICENSE;md5=141643e11c48898150daa83802dbc65f \
+ file://src/pip/_vendor/platformdirs/LICENSE;md5=ea4f5a41454746a9ed111e3d8723d17a \
+ file://src/pip/_vendor/pygments/LICENSE;md5=36a13c90514e2899f1eba7f41c3ee592 \
+ file://src/pip/_vendor/pyparsing/LICENSE;md5=657a566233888513e1f07ba13e2f47f1 \
+ file://src/pip/_vendor/pyproject_hooks/LICENSE;md5=aad69c93f605003e3342b174d9b0708c \
+ file://src/pip/_vendor/requests/LICENSE;md5=34400b68072d710fecd0a2940a0d1658 \
+ file://src/pip/_vendor/resolvelib/LICENSE;md5=78e1c0248051c32a38a7f820c30bd7a5 \
+ file://src/pip/_vendor/rich/LICENSE;md5=b5f0b94fbc94f5ad9ae4efcf8a778303 \
+ file://src/pip/_vendor/six.LICENSE;md5=43cfc9e4ac0e377acfb9b76f56b8415d \
+ file://src/pip/_vendor/tenacity/LICENSE;md5=175792518e4ac015ab6696d16c4f607e \
+ file://src/pip/_vendor/tomli/LICENSE;md5=aaaaf0879d17df0110d1aa8c8c9f46f5 \
+ file://src/pip/_vendor/typing_extensions.LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2 \
+ file://src/pip/_vendor/urllib3/LICENSE.txt;md5=c2823cb995439c984fd62a973d79815c \
+ file://src/pip/_vendor/webencodings/LICENSE;md5=81fb24cd7823cce23b69f721993dce4d \
+ "
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI += "file://no_shebang_mangling.patch"
+
+SRC_URI[sha256sum] = "ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2"
+
+do_install:append() {
+ rm -f ${D}/${bindir}/pip
+}
+
+RDEPENDS:${PN} = "\
+ python3-compile \
+ python3-io \
+ python3-html \
+ python3-json \
+ python3-multiprocessing \
+ python3-netserver \
+ python3-setuptools \
+ python3-unixadmin \
+ python3-xmlrpc \
+ python3-pickle \
+ python3-image \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# This used to use the bootstrap install which didn't compile. Until we bump the
+# tmpdir version we can't compile the native otherwise the sysroot unpack fails
+INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-pluggy/run-ptest b/meta/recipes-devtools/python/python3-pluggy/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-pluggy/run-ptest
+++ b/meta/recipes-devtools/python/python3-pluggy/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-pluggy_1.0.0.bb b/meta/recipes-devtools/python/python3-pluggy_1.0.0.bb
deleted file mode 100644
index 3f2895a061..0000000000
--- a/meta/recipes-devtools/python/python3-pluggy_1.0.0.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Plugin and hook calling mechanisms for python"
-HOMEPAGE = "https://github.com/pytest-dev/pluggy"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=1c8206d16fd5cc02fa9b0bb98955e5c2"
-
-SRC_URI[sha256sum] = "4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"
-
-DEPENDS += "${PYTHON_PN}-setuptools-scm-native"
-RDEPENDS:${PN} += "${PYTHON_PN}-importlib-metadata \
- ${PYTHON_PN}-more-itertools \
-"
-
-inherit pypi ptest python_setuptools_build_meta
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/testing
- cp -rf ${S}/testing/* ${D}${PTEST_PATH}/testing/
-}
diff --git a/meta/recipes-devtools/python/python3-pluggy_1.4.0.bb b/meta/recipes-devtools/python/python3-pluggy_1.4.0.bb
new file mode 100644
index 0000000000..76acc4ae27
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pluggy_1.4.0.bb
@@ -0,0 +1,29 @@
+SUMMARY = "Plugin and hook calling mechanisms for python"
+HOMEPAGE = "https://github.com/pytest-dev/pluggy"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1c8206d16fd5cc02fa9b0bb98955e5c2"
+
+SRC_URI[sha256sum] = "8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"
+
+DEPENDS += "python3-setuptools-scm-native"
+RDEPENDS:${PN} += "python3-importlib-metadata \
+ python3-more-itertools \
+"
+
+inherit pypi ptest python_setuptools_build_meta
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/testing
+ cp -rf ${S}/testing/* ${D}${PTEST_PATH}/testing/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-ply_3.11.bb b/meta/recipes-devtools/python/python3-ply_3.11.bb
index 99c037bb73..a05bd6702d 100644
--- a/meta/recipes-devtools/python/python3-ply_3.11.bb
+++ b/meta/recipes-devtools/python/python3-ply_3.11.bb
@@ -11,8 +11,8 @@ SRC_URI[sha256sum] = "00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446
inherit pypi setuptools3
RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-shell \
+ python3-netclient \
+ python3-shell \
"
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-poetry-core_1.0.8.bb b/meta/recipes-devtools/python/python3-poetry-core_1.0.8.bb
deleted file mode 100644
index f3e9b05521..0000000000
--- a/meta/recipes-devtools/python/python3-poetry-core_1.0.8.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "Poetry PEP 517 Build Backend"
-DESCRIPTION = "Poetry PEP 517 Build Backend"
-HOMEPAGE = "https://github.com/python-poetry/poetry-core"
-BUGTRACKER = "https://github.com/python-poetry/poetry-core"
-CHANGELOG = "https://github.com/python-poetry/poetry-core/blob/master/CHANGELOG.md"
-
-LICENSE = "Apache-2.0 & BSD-2-Clause & MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=104d5c3c653aeededf4076773aa4c236 \
- file://poetry/core/_vendor/attrs.LICENSE;md5=75bb9823a2778b5a2bd9b793fac93ea2 \
- file://poetry/core/_vendor/jsonschema/COPYING;md5=6554d3a51d7cb0b611891317f3c69057 \
- file://poetry/core/_vendor/jsonschema/LICENSE;md5=2ecb81765361195731a6f72a89e449fd \
- file://poetry/core/_vendor/lark-parser.LICENSE;md5=b37b83a9cf129d92ee65aaa71c01ce72 \
- file://poetry/core/_vendor/packaging/LICENSE;md5=7a6e56c9d54ecd731ab31c52de7942f0 \
- file://poetry/core/_vendor/packaging/LICENSE.APACHE;md5=29256199be2a609aac596980ffc11996 \
- file://poetry/core/_vendor/packaging/LICENSE.BSD;md5=f405810d173a1618433827928768bcd2 \
- file://poetry/core/_vendor/pyparsing.LICENSE;md5=fb46329938e6bc829b256e37d5c1e31a \
- file://poetry/core/_vendor/pyrsistent/LICENSE.mit;md5=1211a1ac6eac40020d0f99c39b4e4270 \
- file://poetry/core/_vendor/six.LICENSE;md5=6a574656da93d9ef05431b45907e35b6 \
- file://poetry/core/_vendor/tomlkit/LICENSE;md5=be329e5ef9c9fe86738c9afe6ef3c11c \
- "
-
-SRC_URI[sha256sum] = "951fc7c1f8d710a94cb49019ee3742125039fc659675912ea614ac2aa405b118"
-
-inherit python_poetry_core pypi
-
-RDEPENDS:${PN}:append:class-target = "\
- python3-compression \
- python3-core \
- python3-crypt \
- python3-io \
- python3-json \
- python3-logging \
- python3-netclient \
- python3-pathlib2 \
- python3-pprint \
- python3-shell \
-"
-
-RDEPENDS:${PN} += "\
- python3-pip \
- python3-six \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-poetry-core_1.9.0.bb b/meta/recipes-devtools/python/python3-poetry-core_1.9.0.bb
new file mode 100644
index 0000000000..540fdffaed
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-poetry-core_1.9.0.bb
@@ -0,0 +1,42 @@
+SUMMARY = "Poetry PEP 517 Build Backend"
+DESCRIPTION = "Poetry PEP 517 Build Backend"
+HOMEPAGE = "https://github.com/python-poetry/poetry-core"
+BUGTRACKER = "https://github.com/python-poetry/poetry-core"
+CHANGELOG = "https://github.com/python-poetry/poetry-core/blob/master/CHANGELOG.md"
+
+LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & MIT"
+LIC_FILES_CHKSUM = "\
+ file://LICENSE;md5=78c39cfd009863ae44237a7ab1f9cedc \
+ file://src/poetry/core/_vendor/fastjsonschema/LICENSE;md5=18950e8362b69c0c617b42b8bd8e7532 \
+ file://src/poetry/core/_vendor/lark/LICENSE;md5=fcfbf1e2ecc0f37acbb5871aa0267500 \
+ file://src/poetry/core/_vendor/packaging/LICENSE;md5=faadaedca9251a90b205c9167578ce91 \
+ file://src/poetry/core/_vendor/packaging/LICENSE.APACHE;md5=2ee41112a44fe7014dce33e26468ba93 \
+ file://src/poetry/core/_vendor/packaging/LICENSE.BSD;md5=7bef9bf4a8e4263634d0597e7ba100b8 \
+ file://src/poetry/core/_vendor/tomli/LICENSE;md5=aaaaf0879d17df0110d1aa8c8c9f46f5 \
+"
+
+SRC_URI[sha256sum] = "fa7a4001eae8aa572ee84f35feb510b321bd652e5cf9293249d62853e1f935a2"
+
+inherit python_poetry_core pypi
+
+PYPI_PACKAGE = "poetry_core"
+
+RDEPENDS:${PN}:append:class-target = "\
+ python3-compression \
+ python3-core \
+ python3-crypt \
+ python3-io \
+ python3-json \
+ python3-logging \
+ python3-netclient \
+ python3-pathlib2 \
+ python3-pprint \
+ python3-shell \
+"
+
+RDEPENDS:${PN} += "\
+ python3-pip \
+ python3-six \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-psutil/0001-fix-failure-test-cases.patch b/meta/recipes-devtools/python/python3-psutil/0001-fix-failure-test-cases.patch
deleted file mode 100644
index 99bfccca3f..0000000000
--- a/meta/recipes-devtools/python/python3-psutil/0001-fix-failure-test-cases.patch
+++ /dev/null
@@ -1,197 +0,0 @@
-From 8b4e38958ff8bdbb3ece4796bfa2d3b6f7536f71 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Wed, 23 Feb 2022 11:54:40 +0800
-Subject: [PATCH] fix failure test cases
-
-The test cases is not robust enough. skip some cases that is
-not suitable for all conditions.
-
-* test_io_counters failed when kernel config CONFIG_TASKSTATS
- and CONFIG_TASK_IO_ACCOUNTING are not enable in OE
-* test_setup_script failed since oe don't install setup.py
-* test_used failed since oe use git source for free, so the version
- is 3.3.17-dirty
-* test_weird_environ failed since gcc not installed
-* test_debug failed since it is designed to run when PSUTIL_DEBUG is set
-* test_against_findmnt/test_comparisons/test_disk_partitions_mocked/
- test_disk_partitions is not suitable for Linux nfs boot
-
-Upstream-Status: Submitted [https://github.com/giampaolo/psutil/pull/2097]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- psutil/tests/test_contracts.py | 1 +
- psutil/tests/test_linux.py | 8 ++++++--
- psutil/tests/test_misc.py | 4 ++++
- psutil/tests/test_process.py | 5 +++++
- psutil/tests/test_system.py | 1 +
- psutil/tests/test_unicode.py | 4 +++-
- 6 files changed, 20 insertions(+), 3 deletions(-)
-
-diff --git a/psutil/tests/test_contracts.py b/psutil/tests/test_contracts.py
-index 7401cc1..bf0fca0 100755
---- a/psutil/tests/test_contracts.py
-+++ b/psutil/tests/test_contracts.py
-@@ -172,6 +172,7 @@ class TestAvailProcessAPIs(PsutilTestCase):
- def test_rlimit(self):
- self.assertEqual(hasattr(psutil.Process, "rlimit"), LINUX or FREEBSD)
-
-+ @unittest.skip("broken on OE since kernel config maye not be enabled")
- def test_io_counters(self):
- hasit = hasattr(psutil.Process, "io_counters")
- self.assertEqual(hasit, False if MACOS or SUNOS else True)
-diff --git a/psutil/tests/test_linux.py b/psutil/tests/test_linux.py
-index 20e28d2..66b6dda 100755
---- a/psutil/tests/test_linux.py
-+++ b/psutil/tests/test_linux.py
-@@ -196,8 +196,9 @@ def get_free_version_info():
- out = sh(["free", "-V"]).strip()
- if 'UNKNOWN' in out:
- raise unittest.SkipTest("can't determine free version")
-- return tuple(map(int, out.split()[-1].split('.')))
--
-+ vlist = out.split()[-1].split('.')
-+ vlist[:] = [n.split('-')[0] for n in vlist]
-+ return tuple(map(int, vlist))
-
- @contextlib.contextmanager
- def mock_open_content(for_path, content):
-@@ -1289,6 +1290,7 @@ class TestRootFsDeviceFinder(PsutilTestCase):
- finder.ask_sys_class_block()
-
- @unittest.skipIf(GITHUB_ACTIONS, "unsupported on GITHUB_ACTIONS")
-+ @unittest.skip("Broken for oe")
- def test_comparisons(self):
- finder = RootFsDeviceFinder()
- self.assertIsNotNone(finder.find())
-@@ -1311,11 +1313,13 @@ class TestRootFsDeviceFinder(PsutilTestCase):
-
- @unittest.skipIf(not which("findmnt"), "findmnt utility not available")
- @unittest.skipIf(GITHUB_ACTIONS, "unsupported on GITHUB_ACTIONS")
-+ @unittest.skip("Broken for oe")
- def test_against_findmnt(self):
- psutil_value = RootFsDeviceFinder().find()
- findmnt_value = sh("findmnt -o SOURCE -rn /")
- self.assertEqual(psutil_value, findmnt_value)
-
-+ @unittest.skip("Broken for oe")
- def test_disk_partitions_mocked(self):
- with mock.patch(
- 'psutil._pslinux.cext.disk_partitions',
-diff --git a/psutil/tests/test_misc.py b/psutil/tests/test_misc.py
-index d946eb6..121004a 100755
---- a/psutil/tests/test_misc.py
-+++ b/psutil/tests/test_misc.py
-@@ -54,6 +54,8 @@ from psutil.tests import unittest
- # ===================================================================
-
-
-+PSUTIL_DEBUG = bool(os.getenv('PSUTIL_DEBUG', 0))
-+
- class TestMisc(PsutilTestCase):
-
- def test_process__repr__(self, func=repr):
-@@ -368,6 +370,7 @@ class TestMisc(PsutilTestCase):
-
- # XXX: https://github.com/pypa/setuptools/pull/2896
- @unittest.skipIf(APPVEYOR, "temporarily disabled due to setuptools bug")
-+ @unittest.skip("OE run this test outof source tree")
- def test_setup_script(self):
- setup_py = os.path.join(ROOT_DIR, 'setup.py')
- if CI_TESTING and not os.path.exists(setup_py):
-@@ -401,6 +404,7 @@ class TestMisc(PsutilTestCase):
- reload_module(psutil)
- self.assertIn("version conflict", str(cm.exception).lower())
-
-+ @unittest.skipIf(not PSUTIL_DEBUG, "env PSUTIL_DEBUG not set")
- def test_debug(self):
- if PY3:
- from io import StringIO
-diff --git a/psutil/tests/test_process.py b/psutil/tests/test_process.py
-index c9059e3..a34ba3d 100755
---- a/psutil/tests/test_process.py
-+++ b/psutil/tests/test_process.py
-@@ -36,6 +36,7 @@ from psutil._compat import PY3
- from psutil._compat import FileNotFoundError
- from psutil._compat import long
- from psutil._compat import super
-+from psutil._compat import which
- from psutil.tests import APPVEYOR
- from psutil.tests import CI_TESTING
- from psutil.tests import GITHUB_ACTIONS
-@@ -726,6 +727,7 @@ class TestProcess(PsutilTestCase):
- self.assertEqual(' '.join(p.cmdline()), ' '.join(cmdline))
-
- @unittest.skipIf(PYPY, "broken on PYPY")
-+ @unittest.skipIf(not which("gcc"), "gcc not installed")
- def test_long_cmdline(self):
- testfn = self.get_testfn()
- create_exe(testfn)
-@@ -740,6 +742,7 @@ class TestProcess(PsutilTestCase):
- assert pyexe.startswith(name), (pyexe, name)
-
- @unittest.skipIf(PYPY, "unreliable on PYPY")
-+ @unittest.skipIf(not which("gcc"), "gcc not installed")
- def test_long_name(self):
- testfn = self.get_testfn(suffix="0123456789" * 2)
- create_exe(testfn)
-@@ -750,6 +753,7 @@ class TestProcess(PsutilTestCase):
- @unittest.skipIf(SUNOS, "broken on SUNOS")
- @unittest.skipIf(AIX, "broken on AIX")
- @unittest.skipIf(PYPY, "broken on PYPY")
-+ @unittest.skipIf(not which("gcc"), "gcc not installed")
- def test_prog_w_funky_name(self):
- # Test that name(), exe() and cmdline() correctly handle programs
- # with funky chars such as spaces and ")", see:
-@@ -1408,6 +1412,7 @@ class TestProcess(PsutilTestCase):
-
- @unittest.skipIf(not HAS_ENVIRON, "not supported")
- @unittest.skipIf(not POSIX, "POSIX only")
-+ @unittest.skipIf(not which("gcc"), "gcc not installed")
- def test_weird_environ(self):
- # environment variables can contain values without an equals sign
- code = textwrap.dedent("""
-diff --git a/psutil/tests/test_system.py b/psutil/tests/test_system.py
-index db2cb34..5ee519f 100755
---- a/psutil/tests/test_system.py
-+++ b/psutil/tests/test_system.py
-@@ -580,6 +580,7 @@ class TestDiskAPIs(PsutilTestCase):
- def test_disk_usage_bytes(self):
- psutil.disk_usage(b'.')
-
-+ @unittest.skip("Broken for oe")
- def test_disk_partitions(self):
- def check_ntuple(nt):
- self.assertIsInstance(nt.device, str)
-diff --git a/psutil/tests/test_unicode.py b/psutil/tests/test_unicode.py
-index e635726..7ba5b0f 100755
---- a/psutil/tests/test_unicode.py
-+++ b/psutil/tests/test_unicode.py
-@@ -86,6 +86,7 @@ from psutil import POSIX
- from psutil import WINDOWS
- from psutil._compat import PY3
- from psutil._compat import u
-+from psutil._compat import which
- from psutil.tests import APPVEYOR
- from psutil.tests import ASCII_FS
- from psutil.tests import CI_TESTING
-@@ -156,7 +157,7 @@ def try_unicode(suffix):
- # FS APIs
- # ===================================================================
-
--
-+@unittest.skipIf(not which("gcc"), "gcc not installed")
- class BaseUnicodeTest(PsutilTestCase):
- funky_suffix = None
-
-@@ -169,6 +170,7 @@ class BaseUnicodeTest(PsutilTestCase):
- @serialrun
- @unittest.skipIf(ASCII_FS, "ASCII fs")
- @unittest.skipIf(PYPY and not PY3, "too much trouble on PYPY2")
-+@unittest.skipIf(not which("gcc"), "gcc not installed")
- class TestFSAPIs(BaseUnicodeTest):
- """Test FS APIs with a funky, valid, UTF8 path name."""
-
---
-2.25.1
-
diff --git a/meta/recipes-devtools/python/python3-psutil_5.9.0.bb b/meta/recipes-devtools/python/python3-psutil_5.9.0.bb
deleted file mode 100644
index df304f01e6..0000000000
--- a/meta/recipes-devtools/python/python3-psutil_5.9.0.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "A cross-platform process and system utilities module for Python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=e35fd9f271d19d5f742f20a9d1f8bb8b"
-HOMEPAGE = "https://pypi.org/project/psutil/"
-
-SRC_URI[sha256sum] = "869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"
-
-inherit pypi setuptools3
-
-SRC_URI += "file://0001-fix-failure-test-cases.patch"
-
-PACKAGES =+ "${PN}-tests"
-
-FILES:${PN}-tests += " \
- ${PYTHON_SITEPACKAGES_DIR}/psutil/test* \
- ${PYTHON_SITEPACKAGES_DIR}/psutil/__pycache__/test* \
-"
-
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-threading \
- ${PYTHON_PN}-xml \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-resource \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-psutil_5.9.8.bb b/meta/recipes-devtools/python/python3-psutil_5.9.8.bb
new file mode 100644
index 0000000000..1a394eda33
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-psutil_5.9.8.bb
@@ -0,0 +1,41 @@
+SUMMARY = "A cross-platform process and system utilities module for Python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=a9c72113a843d0d732a0ac1c200d81b1"
+HOMEPAGE = "https://pypi.org/project/psutil/"
+
+SRC_URI[sha256sum] = "6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"
+
+inherit pypi setuptools3
+
+PACKAGES =+ "${PN}-tests"
+
+FILES:${PN}-tests += " \
+ ${PYTHON_SITEPACKAGES_DIR}/psutil/test* \
+ ${PYTHON_SITEPACKAGES_DIR}/psutil/__pycache__/test* \
+"
+
+
+RDEPENDS:${PN} += " \
+ python3-shell \
+ python3-threading \
+ python3-xml \
+ python3-netclient \
+ python3-ctypes \
+ python3-resource \
+"
+
+RDEPENDS:${PN}-tests += " \
+ ${PN} \
+ python3 \
+ coreutils \
+ procps \
+ binutils \
+ gcc \
+ gcc-symlinks \
+ libstdc++ \
+ libstdc++-dev \
+"
+
+INSANE_SKIP:${PN}-tests += "dev-deps"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-py_1.11.0.bb b/meta/recipes-devtools/python/python3-py_1.11.0.bb
index a2bd2d84fc..31d5a377a7 100644
--- a/meta/recipes-devtools/python/python3-py_1.11.0.bb
+++ b/meta/recipes-devtools/python/python3-py_1.11.0.bb
@@ -5,10 +5,10 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=a6bb0320b04a0a503f12f69fea479de9"
SRC_URI[sha256sum] = "51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"
-DEPENDS += "${PYTHON_PN}-setuptools-scm-native"
+DEPENDS += "python3-setuptools-scm-native"
inherit pypi python_setuptools_build_meta
BBCLASSEXTEND = "native nativesdk"
-RDEPENDS:${PN} += "${PYTHON_PN}-netclient"
+RDEPENDS:${PN} += "python3-netclient"
diff --git a/meta/recipes-devtools/python/python3-pyasn1/run-ptest b/meta/recipes-devtools/python/python3-pyasn1/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-pyasn1/run-ptest
+++ b/meta/recipes-devtools/python/python3-pyasn1/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-pyasn1_0.4.8.bb b/meta/recipes-devtools/python/python3-pyasn1_0.4.8.bb
deleted file mode 100644
index a5e2a71362..0000000000
--- a/meta/recipes-devtools/python/python3-pyasn1_0.4.8.bb
+++ /dev/null
@@ -1,2 +0,0 @@
-inherit pypi setuptools3
-require python-pyasn1.inc
diff --git a/meta/recipes-devtools/python/python3-pyasn1_0.6.0.bb b/meta/recipes-devtools/python/python3-pyasn1_0.6.0.bb
new file mode 100644
index 0000000000..0519ba5edb
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyasn1_0.6.0.bb
@@ -0,0 +1,3 @@
+inherit pypi setuptools3
+require python-pyasn1.inc
+
diff --git a/meta/recipes-devtools/python/python3-pycairo_1.21.0.bb b/meta/recipes-devtools/python/python3-pycairo_1.21.0.bb
deleted file mode 100644
index 29452c7660..0000000000
--- a/meta/recipes-devtools/python/python3-pycairo_1.21.0.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Python bindings for the Cairo canvas library"
-HOMEPAGE = "http://cairographics.org/pycairo"
-BUGTRACKER = "http://bugs.freedesktop.org"
-SECTION = "python-devel"
-LICENSE = "LGPL-2.1-only & MPL-1.1"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f3713ca2c28d9312ad718520b6dc3eee \
- file://COPYING-LGPL-2.1;md5=fad9b3332be894bab9bc501572864b29 \
- file://COPYING-MPL-1.1;md5=bfe1f75d606912a4111c90743d6c7325"
-
-# cairo >= 1.14
-DEPENDS = "cairo python3"
-
-SRC_URI = "https://github.com/pygobject/pycairo/releases/download/v${PV}/pycairo-${PV}.tar.gz"
-UPSTREAM_CHECK_URI = "https://github.com/pygobject/pycairo/releases/"
-
-SRC_URI[sha256sum] = "251907f18a552df938aa3386657ff4b5a4937dde70e11aa042bc297957f4b74b"
-
-S = "${WORKDIR}/pycairo-${PV}"
-
-inherit meson pkgconfig python3targetconfig
-
-CFLAGS += "-fPIC"
-
-BBCLASSEXTEND = "native"
-
-FILES:${PN} = "${PYTHON_SITEPACKAGES_DIR}/*"
diff --git a/meta/recipes-devtools/python/python3-pycairo_1.26.0.bb b/meta/recipes-devtools/python/python3-pycairo_1.26.0.bb
new file mode 100644
index 0000000000..ea8b81be76
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pycairo_1.26.0.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Python bindings for the Cairo canvas library"
+HOMEPAGE = "http://cairographics.org/pycairo"
+BUGTRACKER = "http://bugs.freedesktop.org"
+SECTION = "python-devel"
+LICENSE = "LGPL-2.1-only & MPL-1.1"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f3713ca2c28d9312ad718520b6dc3eee \
+ file://COPYING-LGPL-2.1;md5=fad9b3332be894bab9bc501572864b29 \
+ file://COPYING-MPL-1.1;md5=bfe1f75d606912a4111c90743d6c7325"
+
+# cairo >= 1.14
+DEPENDS = "cairo python3"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/pycairo-${PV}.tar.gz"
+GITHUB_BASE_URI = "https://github.com/pygobject/pycairo/releases/"
+
+SRC_URI[sha256sum] = "2dddd0a874fbddb21e14acd9b955881ee1dc6e63b9c549a192d613a907f9cbeb"
+
+S = "${WORKDIR}/pycairo-${PV}"
+
+inherit meson pkgconfig python3targetconfig github-releases
+
+CFLAGS += "-fPIC"
+
+BBCLASSEXTEND = "native"
+
+FILES:${PN} = "${PYTHON_SITEPACKAGES_DIR}/*"
diff --git a/meta/recipes-devtools/python/python3-pycparser_2.21.bb b/meta/recipes-devtools/python/python3-pycparser_2.21.bb
index 53b7367903..00deceebf9 100644
--- a/meta/recipes-devtools/python/python3-pycparser_2.21.bb
+++ b/meta/recipes-devtools/python/python3-pycparser_2.21.bb
@@ -10,9 +10,9 @@ inherit pypi setuptools3
BBCLASSEXTEND = "native nativesdk"
RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-ply \
- ${PYTHON_PN}-pprint \
+ python3-netclient \
+ python3-ply \
+ python3-pprint \
"
RSUGGESTS:${PN}:class-target += "\
diff --git a/meta/recipes-devtools/python/python3-pycryptodome_3.14.1.bb b/meta/recipes-devtools/python/python3-pycryptodome_3.14.1.bb
deleted file mode 100644
index c0324590c2..0000000000
--- a/meta/recipes-devtools/python/python3-pycryptodome_3.14.1.bb
+++ /dev/null
@@ -1,5 +0,0 @@
-require python-pycryptodome.inc
-inherit setuptools3
-
-SRC_URI[sha256sum] = "e04e40a7f8c1669195536a37979dd87da2c32dbdc73d6fe35f0077b0c17c803b"
-
diff --git a/meta/recipes-devtools/python/python3-pycryptodome_3.20.0.bb b/meta/recipes-devtools/python/python3-pycryptodome_3.20.0.bb
new file mode 100644
index 0000000000..d24fa58d43
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pycryptodome_3.20.0.bb
@@ -0,0 +1,5 @@
+require python-pycryptodome.inc
+inherit setuptools3
+
+SRC_URI[sha256sum] = "09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"
+
diff --git a/meta/recipes-devtools/python/python3-pycryptodomex_3.14.1.bb b/meta/recipes-devtools/python/python3-pycryptodomex_3.14.1.bb
deleted file mode 100644
index 79a3fee19c..0000000000
--- a/meta/recipes-devtools/python/python3-pycryptodomex_3.14.1.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-require python-pycryptodome.inc
-inherit setuptools3
-
-SRC_URI[sha256sum] = "2ce76ed0081fd6ac8c74edc75b9d14eca2064173af79843c24fa62573263c1f2"
-
-FILES:${PN}-tests = " \
- ${PYTHON_SITEPACKAGES_DIR}/Cryptodome/SelfTest/ \
- ${PYTHON_SITEPACKAGES_DIR}/Cryptodome/SelfTest/__pycache__/ \
-"
diff --git a/meta/recipes-devtools/python/python3-pycryptodomex_3.20.0.bb b/meta/recipes-devtools/python/python3-pycryptodomex_3.20.0.bb
new file mode 100644
index 0000000000..2673ea8326
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pycryptodomex_3.20.0.bb
@@ -0,0 +1,9 @@
+require python-pycryptodome.inc
+inherit setuptools3
+
+SRC_URI[sha256sum] = "7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"
+
+FILES:${PN}-tests = " \
+ ${PYTHON_SITEPACKAGES_DIR}/Cryptodome/SelfTest/ \
+ ${PYTHON_SITEPACKAGES_DIR}/Cryptodome/SelfTest/__pycache__/ \
+"
diff --git a/meta/recipes-devtools/python/python3-pyelftools_0.28.bb b/meta/recipes-devtools/python/python3-pyelftools_0.28.bb
deleted file mode 100644
index 0ceddcb68a..0000000000
--- a/meta/recipes-devtools/python/python3-pyelftools_0.28.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-DESCRIPTION = "pyelftools is a pure-Python library for parsing and analyzing ELF files and DWARF debugging information"
-HOMEPAGE = "https://github.com/eliben/pyelftools"
-SECTION = "devel/python"
-LICENSE = "PD"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5ce2a2b07fca326bc7c146d10105ccfc"
-
-SRC_URI[sha256sum] = "53e5609cac016471d40bd88dc410cd90755942c25e58a61021cfdf7abdfeacff"
-
-PYPI_PACKAGE = "pyelftools"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-debugger ${PYTHON_PN}-pprint"
diff --git a/meta/recipes-devtools/python/python3-pyelftools_0.31.bb b/meta/recipes-devtools/python/python3-pyelftools_0.31.bb
new file mode 100644
index 0000000000..551fed6876
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyelftools_0.31.bb
@@ -0,0 +1,15 @@
+SUMMARY = "pyelftools is a pure-Python library for parsing and analyzing ELF files and DWARF debugging information"
+HOMEPAGE = "https://github.com/eliben/pyelftools"
+SECTION = "devel/python"
+LICENSE = "PD"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5ce2a2b07fca326bc7c146d10105ccfc"
+
+SRC_URI[sha256sum] = "c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99"
+
+PYPI_PACKAGE = "pyelftools"
+
+inherit pypi setuptools3
+
+BBCLASSEXTEND = "native"
+
+RDEPENDS:${PN} += "python3-debugger python3-pprint"
diff --git a/meta/recipes-devtools/python/python3-pygments_2.12.0.bb b/meta/recipes-devtools/python/python3-pygments_2.12.0.bb
deleted file mode 100644
index b47e0aff67..0000000000
--- a/meta/recipes-devtools/python/python3-pygments_2.12.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Pygments is a syntax highlighting package written in Python."
-DESCRIPTION = "Pygments is a syntax highlighting package written in Python."
-HOMEPAGE = "http://pygments.org/"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=36a13c90514e2899f1eba7f41c3ee592"
-
-inherit setuptools3
-SRC_URI[sha256sum] = "5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"
-
-DEPENDS += "\
- ${PYTHON_PN} \
- "
-
-PYPI_PACKAGE = "Pygments"
-
-inherit pypi
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-devtools/python/python3-pygments_2.17.2.bb b/meta/recipes-devtools/python/python3-pygments_2.17.2.bb
new file mode 100644
index 0000000000..8b98064b78
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pygments_2.17.2.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Pygments is a syntax highlighting package written in Python."
+DESCRIPTION = "Pygments is a syntax highlighting package written in Python."
+HOMEPAGE = "http://pygments.org/"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=36a13c90514e2899f1eba7f41c3ee592"
+
+inherit python_hatchling
+SRC_URI[sha256sum] = "da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"
+
+UPSTREAM_CHECK_PYPI_PACKAGE = "Pygments"
+inherit pypi
+
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta/recipes-devtools/python/python3-pygobject/0001-Do-not-build-tests.patch b/meta/recipes-devtools/python/python3-pygobject/0001-Do-not-build-tests.patch
deleted file mode 100644
index 0f2465bb52..0000000000
--- a/meta/recipes-devtools/python/python3-pygobject/0001-Do-not-build-tests.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From c125a806de951359ab7e302b0584f7c92fa451ad Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Fri, 12 Apr 2019 16:25:58 +0200
-Subject: [PATCH] Do not build tests
-
-They require installing tests from g-i, which we do not do.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- meson.build | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/meson.build b/meson.build
-index 278fa16f..aacbd4a2 100644
---- a/meson.build
-+++ b/meson.build
-@@ -175,6 +175,6 @@ configure_file(input : 'PKG-INFO.in',
- subdir('gi')
- subdir('pygtkcompat')
- with_tests = get_option('tests')
--if with_tests
--subdir('tests')
--endif
-+#if with_tests
-+#subdir('tests')
-+#endif
---
-2.17.1
-
diff --git a/meta/recipes-devtools/python/python3-pygobject_3.42.1.bb b/meta/recipes-devtools/python/python3-pygobject_3.42.1.bb
deleted file mode 100644
index 443605ae26..0000000000
--- a/meta/recipes-devtools/python/python3-pygobject_3.42.1.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "Python GObject bindings"
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/pygobject"
-DESCRIPTION = "PyGObject is a Python package which provides bindings for GObject based libraries such as GTK, GStreamer, WebKitGTK, GLib, GIO and many more."
-SECTION = "devel/python"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7"
-
-GNOMEBASEBUILDCLASS = "meson"
-GIR_MESON_OPTION = ""
-
-inherit gnomebase setuptools3-base gobject-introspection upstream-version-is-even
-
-DEPENDS += "python3 glib-2.0"
-
-SRCNAME="pygobject"
-
-SRC_URI = " \
- http://ftp.gnome.org/pub/GNOME/sources/${SRCNAME}/${@gnome_verdir("${PV}")}/${SRCNAME}-${PV}.tar.xz \
- file://0001-Do-not-build-tests.patch \
-"
-SRC_URI[sha256sum] = "1f34b5f7624de35e44eb5a7eb428353285bd03004d55131a5f7f7fa9b90f3cc9"
-
-S = "${WORKDIR}/${SRCNAME}-${PV}"
-
-PACKAGECONFIG ??= "${@bb.utils.contains_any('DISTRO_FEATURES', [ 'directfb', 'wayland', 'x11' ], 'cairo', '', d)}"
-
-RDEPENDS:${PN} += "python3-pkgutil"
-
-# python3-pycairo is checked on configuration -> DEPENDS
-# we don't link against python3-pycairo -> RDEPENDS
-PACKAGECONFIG[cairo] = "-Dpycairo=enabled,-Dpycairo=disabled, cairo python3-pycairo, python3-pycairo"
-
-BBCLASSEXTEND = "native"
-PACKAGECONFIG:class-native = ""
diff --git a/meta/recipes-devtools/python/python3-pygobject_3.48.1.bb b/meta/recipes-devtools/python/python3-pygobject_3.48.1.bb
new file mode 100644
index 0000000000..4754e41c04
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pygobject_3.48.1.bb
@@ -0,0 +1,39 @@
+SUMMARY = "Python GObject bindings"
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/pygobject"
+DESCRIPTION = "PyGObject is a Python package which provides bindings for GObject based libraries such as GTK, GStreamer, WebKitGTK, GLib, GIO and many more."
+SECTION = "devel/python"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7"
+
+GIR_MESON_OPTION = ""
+
+inherit gnomebase setuptools3-base gobject-introspection upstream-version-is-even
+
+python() {
+ if d.getVar('CLASSOVERRIDE') == "class-target" and not bb.utils.to_boolean(d.getVar("GI_DATA_ENABLED")):
+ raise bb.parse.SkipRecipe("GI not available")
+}
+
+DEPENDS += "python3 glib-2.0"
+
+SRCNAME="pygobject"
+
+SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${SRCNAME}/${@gnome_verdir("${PV}")}/${SRCNAME}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "3a0a2c0c0f25931b5840649c54834b9e58a63148d37fa9f6308887b7027e15c2"
+
+S = "${WORKDIR}/${SRCNAME}-${PV}"
+
+PACKAGECONFIG ??= "${@bb.utils.contains_any('DISTRO_FEATURES', [ 'directfb', 'wayland', 'x11' ], 'cairo', '', d)}"
+
+RDEPENDS:${PN} += " \
+ python3-io \
+ python3-pkgutil \
+"
+
+# python3-pycairo is checked on configuration -> DEPENDS
+# we don't link against python3-pycairo -> RDEPENDS
+PACKAGECONFIG[cairo] = "-Dpycairo=enabled,-Dpycairo=disabled, cairo python3-pycairo, python3-pycairo"
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false,"
+
+BBCLASSEXTEND = "native"
+PACKAGECONFIG:class-native = ""
diff --git a/meta/recipes-devtools/python/python3-pyopenssl_22.0.0.bb b/meta/recipes-devtools/python/python3-pyopenssl_22.0.0.bb
deleted file mode 100644
index db0e809ef5..0000000000
--- a/meta/recipes-devtools/python/python3-pyopenssl_22.0.0.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Simple Python wrapper around the OpenSSL library"
-HOMEPAGE = "https://pyopenssl.org/"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-DEPENDS += "openssl ${PYTHON_PN}-cryptography"
-
-SRC_URI[sha256sum] = "660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf"
-
-PYPI_PACKAGE = "pyOpenSSL"
-inherit pypi setuptools3
-
-PACKAGES =+ "${PN}-tests"
-FILES:${PN}-tests = "${libdir}/${PYTHON_DIR}/site-packages/OpenSSL/test"
-
-RDEPENDS:${PN}:class-target = " \
- ${PYTHON_PN}-cryptography \
- ${PYTHON_PN}-six \
- ${PYTHON_PN}-threading \
-"
-RDEPENDS:${PN}-tests = "${PN}"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb b/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb
new file mode 100644
index 0000000000..e714ad838e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Simple Python wrapper around the OpenSSL library"
+HOMEPAGE = "https://pyopenssl.org/"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+DEPENDS += "openssl python3-cryptography"
+
+SRC_URI[sha256sum] = "cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"
+
+PYPI_PACKAGE = "pyOpenSSL"
+inherit pypi setuptools3
+
+PACKAGES =+ "${PN}-tests"
+FILES:${PN}-tests = "${libdir}/${PYTHON_DIR}/site-packages/OpenSSL/test"
+
+RDEPENDS:${PN}:class-target = " \
+ python3-cryptography \
+ python3-six \
+ python3-threading \
+"
+RDEPENDS:${PN}-tests = "${PN}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyparsing_3.0.9.bb b/meta/recipes-devtools/python/python3-pyparsing_3.0.9.bb
deleted file mode 100644
index b858073c3b..0000000000
--- a/meta/recipes-devtools/python/python3-pyparsing_3.0.9.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Python parsing module"
-DESCRIPTION = "The pyparsing module is an alternative approach to creating \
-and executing simple grammars, vs. the traditional lex/yacc approach, or \
-the use of regular expressions. The pyparsing module provides a library of \
-classes that client code uses to construct the grammar directly in Python \
-code."
-HOMEPAGE = "https://github.com/pyparsing/pyparsing/"
-BUGTRACKER = "https://github.com/pyparsing/pyparsing/issues"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=657a566233888513e1f07ba13e2f47f1"
-
-SRC_URI[sha256sum] = "2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"
-
-UPSTREAM_CHECK_REGEX = "pyparsing-(?P<pver>.*)\.tar"
-
-inherit pypi python_flit_core
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-debugger \
- ${PYTHON_PN}-html \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-pprint \
- ${PYTHON_PN}-stringold \
- ${PYTHON_PN}-threading \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyparsing_3.1.2.bb b/meta/recipes-devtools/python/python3-pyparsing_3.1.2.bb
new file mode 100644
index 0000000000..64210ade53
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyparsing_3.1.2.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Python parsing module"
+DESCRIPTION = "The pyparsing module is an alternative approach to creating \
+and executing simple grammars, vs. the traditional lex/yacc approach, or \
+the use of regular expressions. The pyparsing module provides a library of \
+classes that client code uses to construct the grammar directly in Python \
+code."
+HOMEPAGE = "https://github.com/pyparsing/pyparsing/"
+BUGTRACKER = "https://github.com/pyparsing/pyparsing/issues"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=657a566233888513e1f07ba13e2f47f1"
+
+SRC_URI[sha256sum] = "a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"
+
+UPSTREAM_CHECK_REGEX = "pyparsing-(?P<pver>.*)\.tar"
+
+inherit pypi python_flit_core
+
+RDEPENDS:${PN} += " \
+ python3-datetime \
+ python3-debugger \
+ python3-html \
+ python3-json \
+ python3-netclient \
+ python3-pprint \
+ python3-stringold \
+ python3-threading \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyproject-hooks_1.0.0.bb b/meta/recipes-devtools/python/python3-pyproject-hooks_1.0.0.bb
new file mode 100644
index 0000000000..9d08e7acc4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyproject-hooks_1.0.0.bb
@@ -0,0 +1,26 @@
+SUMMARY = "A low-level library for calling build-backends in pyproject.toml-based projects"
+HOMEPAGE = "https://github.com/pypa/pyproject-hooks"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=aad69c93f605003e3342b174d9b0708c"
+
+SRC_URI[sha256sum] = "f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"
+
+inherit pypi python_flit_core
+
+PYPI_PACKAGE = "pyproject_hooks"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Bootstrap the native build
+DEPENDS:remove:class-native = "python3-build-native"
+
+RDEPENDS:${PN} += " \
+ python3-io \
+ python3-json \
+"
+
+do_compile:class-native () {
+ python_flit_core_do_manual_build
+}
+
+UPSTREAM_CHECK_PYPI_PACKAGE = "${PYPI_PACKAGE}"
diff --git a/meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb b/meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb
new file mode 100644
index 0000000000..8b9549f3d0
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "PEP 621 metadata parsing"
+DESCRIPTION = "Dataclass for PEP 621 metadata with support for core \
+metadata generation \
+\
+This project does not implement the parsing of pyproject.toml containing \
+PEP 621 metadata.\
+\
+Instead, given a Python data structure representing PEP 621 metadata \
+(already parsed), it will validate this input and generate a \
+PEP 643-compliant metadata file (e.g. PKG-INFO)."
+HOMEPAGE = "https://github.com/FFY00/python-pyproject-metadata"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=310439af287b0fb4780b2ad6907c256c"
+
+PYPI_PACKAGE = "pyproject-metadata"
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI[sha256sum] = "0a94f18b108b9b21f3a26a3d541f056c34edcb17dc872a144a15618fed7aef67"
+
+RDEPENDS:${PN} += " \
+ python3-logging \
+ python3-packaging \
+ python3-profile \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyrsistent_0.18.1.bb b/meta/recipes-devtools/python/python3-pyrsistent_0.18.1.bb
deleted file mode 100644
index b4d9e3af39..0000000000
--- a/meta/recipes-devtools/python/python3-pyrsistent_0.18.1.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "Persistent/Immutable/Functional data structures for Python"
-HOMEPAGE = "https://github.com/tobgu/pyrsistent"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE.mit;md5=b695eb9c6e7a6fb1b1bc2d193c42776e"
-
-SRC_URI[sha256sum] = "d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"
-
-inherit pypi python_setuptools_build_meta
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-numbers \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyrsistent_0.20.0.bb b/meta/recipes-devtools/python/python3-pyrsistent_0.20.0.bb
new file mode 100644
index 0000000000..7de70942a9
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyrsistent_0.20.0.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Persistent/Immutable/Functional data structures for Python"
+HOMEPAGE = "https://github.com/tobgu/pyrsistent"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.mit;md5=f798dc4222a29fea881fa998cdf4a8c8"
+
+SRC_URI[sha256sum] = "4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"
+
+inherit pypi python_setuptools_build_meta
+
+RDEPENDS:${PN} += " \
+ python3-numbers \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pysocks_1.7.1.bb b/meta/recipes-devtools/python/python3-pysocks_1.7.1.bb
index 9144401703..7f2c217f19 100644
--- a/meta/recipes-devtools/python/python3-pysocks_1.7.1.bb
+++ b/meta/recipes-devtools/python/python3-pysocks_1.7.1.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "A Python SOCKS client module"
+SUMMARY = "A Python SOCKS client module"
HOMEPAGE = "http://python-requests.org"
LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://LICENSE;md5=1d457bcffb9661b45f799d4efee72f16"
@@ -10,11 +10,11 @@ PYPI_PACKAGE = "PySocks"
inherit pypi setuptools3
RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-logging \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-shell \
+ python3-email \
+ python3-io \
+ python3-logging \
+ python3-netclient \
+ python3-shell \
"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest-runner_6.0.0.bb b/meta/recipes-devtools/python/python3-pytest-runner_6.0.0.bb
deleted file mode 100644
index f7a2c1bb95..0000000000
--- a/meta/recipes-devtools/python/python3-pytest-runner_6.0.0.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "Invoke py.test as distutils command with dependency resolution"
-HOMEPAGE = "https://pypi.org/project/pytest-runner/"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=7a7126e068206290f3fe9f8d6c713ea6"
-
-SRC_URI[sha256sum] = "b4d85362ed29b4c348678de797df438f0f0509497ddb8c647096c02a6d87b685"
-
-inherit pypi python_setuptools_build_meta
-
-DEPENDS += " \
- ${PYTHON_PN}-setuptools-scm-native"
-
-RDEPENDS:${PN} = "${PYTHON_PN}-py ${PYTHON_PN}-setuptools ${PYTHON_PN}-debugger ${PYTHON_PN}-json \
- ${PYTHON_PN}-io ${PYTHON_PN}-distutils"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest-runner_6.0.1.bb b/meta/recipes-devtools/python/python3-pytest-runner_6.0.1.bb
new file mode 100644
index 0000000000..cce9ce33e2
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pytest-runner_6.0.1.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Invoke py.test as distutils command with dependency resolution"
+HOMEPAGE = "https://pypi.org/project/pytest-runner/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=7a7126e068206290f3fe9f8d6c713ea6"
+
+SRC_URI[sha256sum] = "70d4739585a7008f37bf4933c013fdb327b8878a5a69fcbb3316c88882f0f49b"
+
+inherit pypi python_setuptools_build_meta
+
+DEPENDS += " \
+ python3-setuptools-scm-native"
+
+RDEPENDS:${PN} = "python3-py python3-setuptools python3-debugger python3-json \
+ python3-io"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest-subtests_0.12.1.bb b/meta/recipes-devtools/python/python3-pytest-subtests_0.12.1.bb
new file mode 100644
index 0000000000..0590be705f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pytest-subtests_0.12.1.bb
@@ -0,0 +1,20 @@
+SUMMARY = "unittest subTest() support and subtests fixture."
+DESCRIPTION = "Adds support for TestCase.subTest.\
+New subtests fixture, providing similar functionality for pure pytest tests."
+HOMEPAGE = "https://github.com/pytest-dev/pytest-subtests"
+BUGTRACKER = "https://github.com/pytest-dev/pytest-subtests/issues"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=242b4e17fa287dcf7aef372f6bc3dcb1"
+
+SRC_URI[sha256sum] = "d6605dcb88647e0b7c1889d027f8ef1c17d7a2c60927ebfdc09c7b0d8120476d"
+
+inherit pypi python_setuptools_build_meta
+
+DEPENDS += "python3-setuptools-scm-native"
+
+RDEPENDS:${PN} += " \
+ python3-pytest \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest-subtests_0.7.0.bb b/meta/recipes-devtools/python/python3-pytest-subtests_0.7.0.bb
deleted file mode 100644
index 060a6ff6cf..0000000000
--- a/meta/recipes-devtools/python/python3-pytest-subtests_0.7.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "unittest subTest() support and subtests fixture."
-DESCRIPTION = "Adds support for TestCase.subTest.\
-New subtests fixture, providing similar functionality for pure pytest tests."
-HOMEPAGE = "https://github.com/pytest-dev/pytest-subtests"
-BUGTRACKER = "https://github.com/pytest-dev/pytest-subtests/issues"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=242b4e17fa287dcf7aef372f6bc3dcb1"
-
-SRC_URI[sha256sum] = "95c44c77e3fbede9848bb88ca90b384815fcba8090ef9a9f55659ab163b1681c"
-
-inherit pypi setuptools3
-
-DEPENDS += "${PYTHON_PN}-setuptools-scm-native"
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-pytest \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest_7.1.2.bb b/meta/recipes-devtools/python/python3-pytest_7.1.2.bb
deleted file mode 100644
index c642d9a232..0000000000
--- a/meta/recipes-devtools/python/python3-pytest_7.1.2.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-SUMMARY = "Simple powerful testing with python"
-HOMEPAGE = "https://pypi.org/project/pytest/"
-DESCRIPTION = "The pytest framework makes it easy to write small tests, yet scales to support complex functional testing for applications and libraries."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=bd27e41b6550fe0fc45356d1d81ee37c"
-
-SRC_URI[sha256sum] = "a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"
-
-DEPENDS += "python3-setuptools-scm-native"
-
-inherit update-alternatives pypi python_setuptools_build_meta
-
-RDEPENDS:${PN}:class-target += " \
- ${PYTHON_PN}-atomicwrites \
- ${PYTHON_PN}-attrs \
- ${PYTHON_PN}-debugger \
- ${PYTHON_PN}-doctest \
- ${PYTHON_PN}-importlib-metadata \
- ${PYTHON_PN}-iniconfig \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-more-itertools \
- ${PYTHON_PN}-packaging \
- ${PYTHON_PN}-pathlib2 \
- ${PYTHON_PN}-pluggy \
- ${PYTHON_PN}-py \
- ${PYTHON_PN}-setuptools \
- ${PYTHON_PN}-six \
- ${PYTHON_PN}-toml \
- ${PYTHON_PN}-wcwidth \
-"
-
-ALTERNATIVE:${PN} += "py.test pytest"
-
-NATIVE_LINK_NAME[pytest] = "${bindir}/pytest"
-ALTERNATIVE_TARGET[pytest] = "${bindir}/pytest"
-
-ALTERNATIVE_LINK_NAME[py.test] = "${bindir}/py.test"
-ALTERNATIVE_TARGET[py.test] = "${bindir}/py.test"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytest_8.1.1.bb b/meta/recipes-devtools/python/python3-pytest_8.1.1.bb
new file mode 100644
index 0000000000..b1cf23fbc4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pytest_8.1.1.bb
@@ -0,0 +1,41 @@
+SUMMARY = "Simple powerful testing with python"
+HOMEPAGE = "https://pypi.org/project/pytest/"
+DESCRIPTION = "The pytest framework makes it easy to write small tests, yet scales to support complex functional testing for applications and libraries."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=bd27e41b6550fe0fc45356d1d81ee37c"
+
+SRC_URI[sha256sum] = "ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"
+
+DEPENDS += "python3-setuptools-scm-native"
+
+inherit update-alternatives pypi python_setuptools_build_meta
+
+RDEPENDS:${PN} += " \
+ python3-atomicwrites \
+ python3-attrs \
+ python3-debugger \
+ python3-doctest \
+ python3-importlib-metadata \
+ python3-iniconfig \
+ python3-json \
+ python3-more-itertools \
+ python3-packaging \
+ python3-pathlib2 \
+ python3-pluggy \
+ python3-py \
+ python3-setuptools \
+ python3-six \
+ python3-tomllib \
+ python3-wcwidth \
+"
+
+ALTERNATIVE:${PN} += "py.test pytest"
+
+NATIVE_LINK_NAME[pytest] = "${bindir}/pytest"
+ALTERNATIVE_TARGET[pytest] = "${bindir}/pytest"
+
+ALTERNATIVE_LINK_NAME[py.test] = "${bindir}/py.test"
+ALTERNATIVE_TARGET[py.test] = "${bindir}/py.test"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pytz/run-ptest b/meta/recipes-devtools/python/python3-pytz/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-pytz/run-ptest
+++ b/meta/recipes-devtools/python/python3-pytz/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-pytz_2022.1.bb b/meta/recipes-devtools/python/python3-pytz_2022.1.bb
deleted file mode 100644
index a4bb4f5c8f..0000000000
--- a/meta/recipes-devtools/python/python3-pytz_2022.1.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "World timezone definitions, modern and historical"
-HOMEPAGE = "http://pythonhosted.org/pytz"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=1a67fc46c1b596cce5d21209bbe75999"
-
-inherit pypi setuptools3 ptest
-
-SRC_URI[sha256sum] = "1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"
-
-RDEPENDS:${PN}:class-target += "\
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-doctest \
- ${PYTHON_PN}-io \
- ${PYTHON_PN}-pickle \
- ${PYTHON_PN}-pprint \
- ${PYTHON_PN}-threading \
-"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/pytz
- install -d ${D}${PTEST_PATH}/pytz/tests
- cp -rf ${S}/pytz/tests/* ${D}${PTEST_PATH}/pytz/tests/
- cp -f ${S}/README.rst ${D}${PTEST_PATH}/
-
-}
diff --git a/meta/recipes-devtools/python/python3-pytz_2024.1.bb b/meta/recipes-devtools/python/python3-pytz_2024.1.bb
new file mode 100644
index 0000000000..158e800fed
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pytz_2024.1.bb
@@ -0,0 +1,36 @@
+SUMMARY = "World timezone definitions, modern and historical"
+HOMEPAGE = "http://pythonhosted.org/pytz"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=1a67fc46c1b596cce5d21209bbe75999"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI[sha256sum] = "2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"
+
+RDEPENDS:${PN}:class-target += "\
+ python3-datetime \
+ python3-doctest \
+ python3-io \
+ python3-pickle \
+ python3-pprint \
+ python3-threading \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/pytz
+ install -d ${D}${PTEST_PATH}/pytz/tests
+ cp -rf ${S}/pytz/tests/* ${D}${PTEST_PATH}/pytz/tests/
+ cp -f ${S}/README.rst ${D}${PTEST_PATH}/
+
+}
diff --git a/meta/recipes-devtools/python/python3-pyyaml/0001-Fix-builds-with-Cython-3.patch b/meta/recipes-devtools/python/python3-pyyaml/0001-Fix-builds-with-Cython-3.patch
new file mode 100644
index 0000000000..a87d588b6a
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyyaml/0001-Fix-builds-with-Cython-3.patch
@@ -0,0 +1,54 @@
+From 9cc23db56add79357b8f8257fe6fc0d6879d4579 Mon Sep 17 00:00:00 2001
+From: "Andrew J. Hesford" <ajh@sideband.org>
+Date: Fri, 21 Jul 2023 09:50:00 -0400
+Subject: [PATCH] Fix builds with Cython 3
+
+This is a *de minimis* fix for building with Cython 3. Recent Cython<3
+releases provided `Cython.Distutils.build_ext` as an alias to
+`Cython.Distutils.old_build_ext.old_build_ext`; Cython 3 drops this
+alias and instead uses a wholly new `Cython.Distutils.build_ext` that
+does not provide the `cython_sources` function used in `setup.py`.
+
+Explicitly importing `old_build_ext` preserves the existing behavior for
+recent Cython<3 and uses the correct behavior for Cython 3. Should the
+import fail (*e.g.*, because the version of Cython available predates
+the availability of `old_build_ext`), the import falls back to just
+`Cython.Distutils.build_ext`.
+
+Signed-off-by: Andrew J. Hesford <ajh@sideband.org>
+Upstream-Status: Denied [https://github.com/yaml/pyyaml/pull/731]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ pyproject.toml | 2 +-
+ setup.py | 6 +++++-
+ 2 files changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/pyproject.toml b/pyproject.toml
+index 4bc04c0..2bf5ec8 100644
+--- a/pyproject.toml
++++ b/pyproject.toml
+@@ -1,3 +1,3 @@
+ [build-system]
+-requires = ["setuptools", "wheel", "Cython<3.0"]
++requires = ["setuptools", "wheel", "Cython"]
+ build-backend = "setuptools.build_meta"
+diff --git a/setup.py b/setup.py
+index 65b0ea0..4461580 100644
+--- a/setup.py
++++ b/setup.py
+@@ -82,7 +82,11 @@ if 'sdist' in sys.argv or os.environ.get('PYYAML_FORCE_CYTHON') == '1':
+ with_cython = True
+ try:
+ from Cython.Distutils.extension import Extension as _Extension
+- from Cython.Distutils import build_ext as _build_ext
++ try:
++ from Cython.Distutils.old_build_ext import old_build_ext as _build_ext
++ except ImportError:
++ from Cython.Distutils import build_ext as _build_ext
++
+ with_cython = True
+ except ImportError:
+ if with_cython:
+--
+2.39.2
+
diff --git a/meta/recipes-devtools/python/python3-pyyaml/run-ptest b/meta/recipes-devtools/python/python3-pyyaml/run-ptest
new file mode 100644
index 0000000000..8d2017d39c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyyaml/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb b/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb
new file mode 100644
index 0000000000..3388312557
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb
@@ -0,0 +1,40 @@
+SUMMARY = "Python support for YAML"
+DEPENDS += "libyaml python3-cython-native"
+HOMEPAGE = "https://pyyaml.org/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=6d8242660a8371add5fe547adf083079"
+
+PYPI_PACKAGE = "PyYAML"
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI += "file://0001-Fix-builds-with-Cython-3.patch"
+SRC_URI[sha256sum] = "bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"
+
+PACKAGECONFIG ?= "libyaml"
+PACKAGECONFIG[libyaml] = "--with-libyaml,--without-libyaml,libyaml"
+
+RDEPENDS:${PN} += "\
+ python3-datetime \
+ python3-netclient \
+"
+
+inherit ptest
+SRC_URI += "\
+ https://raw.githubusercontent.com/yaml/pyyaml/a98fd6088e81d7aca571220c966bbfe2ac43c335/tests/test_dump_load.py;name=test \
+ file://run-ptest \
+"
+SRC_URI[test.sha256sum] = "b6a8a2825d89fdc8aee226560f66b8196e872012a0ea7118cbef1a832359434a"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${WORKDIR}/test_dump_load.py ${D}${PTEST_PATH}/tests/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-pyyaml_6.0.bb b/meta/recipes-devtools/python/python3-pyyaml_6.0.bb
deleted file mode 100644
index d142a0fc3e..0000000000
--- a/meta/recipes-devtools/python/python3-pyyaml_6.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Python support for YAML"
-DEPENDS += "libyaml ${PYTHON_PN}-cython-native"
-HOMEPAGE = "https://pyyaml.org/"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=6d8242660a8371add5fe547adf083079"
-
-PYPI_PACKAGE = "PyYAML"
-
-inherit pypi python_setuptools_build_meta
-
-SRC_URI[sha256sum] = "68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-netclient \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-rdflib_7.0.0.bb b/meta/recipes-devtools/python/python3-rdflib_7.0.0.bb
new file mode 100644
index 0000000000..4057c1dbbc
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-rdflib_7.0.0.bb
@@ -0,0 +1,21 @@
+SUMMARY = "RDFLib is a pure Python package for working with RDF"
+HOMEPAGE = "https://github.com/RDFLib/rdflib"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=37d489c0cefe52a17e1d5007e196464a"
+
+SRC_URI[sha256sum] = "9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"
+
+inherit pypi python_poetry_core
+
+RDEPENDS:${PN} += " \
+ python3-isodate \
+ python3-pyparsing \
+ python3-logging \
+ python3-numbers \
+ python3-xml \
+ python3-compression \
+ python3-core \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-referencing_0.34.0.bb b/meta/recipes-devtools/python/python3-referencing_0.34.0.bb
new file mode 100644
index 0000000000..6fbd10d9cf
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-referencing_0.34.0.bb
@@ -0,0 +1,14 @@
+SUMMARY = "An implementation-agnostic implementation of JSON reference resolution."
+HOMEPAGE = "https://github.com/python-jsonschema/referencing"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=93eb9740964b59e9ba30281255b044e2"
+
+SRC_URI[sha256sum] = "5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"
+
+inherit pypi python_hatchling
+
+DEPENDS += "python3-hatch-vcs-native"
+
+RDEPENDS:${PN} += "python3-rpds-py"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-requests_2.27.1.bb b/meta/recipes-devtools/python/python3-requests_2.27.1.bb
deleted file mode 100644
index af52b7caf5..0000000000
--- a/meta/recipes-devtools/python/python3-requests_2.27.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-DESCRIPTION = "Python HTTP for Humans."
-HOMEPAGE = "http://python-requests.org"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=34400b68072d710fecd0a2940a0d1658"
-
-SRC_URI[sha256sum] = "68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN} += " \
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-ndg-httpsclient \
- ${PYTHON_PN}-netserver \
- ${PYTHON_PN}-pyasn1 \
- ${PYTHON_PN}-pyopenssl \
- ${PYTHON_PN}-pysocks \
- ${PYTHON_PN}-urllib3 \
- ${PYTHON_PN}-chardet \
- ${PYTHON_PN}-idna \
-"
-
-CVE_PRODUCT = "requests"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-requests_2.31.0.bb b/meta/recipes-devtools/python/python3-requests_2.31.0.bb
new file mode 100644
index 0000000000..df48cd54c3
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-requests_2.31.0.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Python HTTP for Humans."
+HOMEPAGE = "http://python-requests.org"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=34400b68072d710fecd0a2940a0d1658"
+
+SRC_URI[sha256sum] = "942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
+
+inherit pypi setuptools3
+
+RDEPENDS:${PN} += " \
+ python3-email \
+ python3-json \
+ python3-ndg-httpsclient \
+ python3-netserver \
+ python3-pyasn1 \
+ python3-pyopenssl \
+ python3-pysocks \
+ python3-urllib3 \
+ python3-chardet \
+ python3-idna \
+ python3-compression \
+"
+
+CVE_PRODUCT = "requests"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-rfc3339-validator_0.1.4.bb b/meta/recipes-devtools/python/python3-rfc3339-validator_0.1.4.bb
index 9bc5fed921..e809c2280d 100644
--- a/meta/recipes-devtools/python/python3-rfc3339-validator_0.1.4.bb
+++ b/meta/recipes-devtools/python/python3-rfc3339-validator_0.1.4.bb
@@ -13,9 +13,9 @@ UPSTREAM_CHECK_REGEX = "/rfc3339-validator/(?P<pver>(\d+[\.\-_]*)+)/"
inherit pypi setuptools3
RDEPENDS:${PN} += "\
- ${PYTHON_PN}-core \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-six \
+ python3-core \
+ python3-datetime \
+ python3-six \
"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-rfc3986-validator_0.1.1.bb b/meta/recipes-devtools/python/python3-rfc3986-validator_0.1.1.bb
index 4abd181acf..e374979cb4 100644
--- a/meta/recipes-devtools/python/python3-rfc3986-validator_0.1.1.bb
+++ b/meta/recipes-devtools/python/python3-rfc3986-validator_0.1.1.bb
@@ -13,7 +13,7 @@ UPSTREAM_CHECK_REGEX = "/rfc3986-validator/(?P<pver>(\d+[\.\-_]*)+)/"
inherit pypi setuptools3
-SRC_URI:append = " \
+SRC_URI += "\
file://0001-setup.py-move-pytest-runner-to-test_requirements.patch \
"
diff --git a/meta/recipes-devtools/python/python3-rpds-py-crates.inc b/meta/recipes-devtools/python/python3-rpds-py-crates.inc
new file mode 100644
index 0000000000..b9048bde51
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-rpds-py-crates.inc
@@ -0,0 +1,80 @@
+# Autogenerated with 'bitbake -c update_crates python3-rpds-py'
+
+# from Cargo.lock
+SRC_URI += " \
+ crate://crates.io/archery/1.1.0 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/indoc/2.0.4 \
+ crate://crates.io/libc/0.2.147 \
+ crate://crates.io/lock_api/0.4.10 \
+ crate://crates.io/memoffset/0.9.0 \
+ crate://crates.io/once_cell/1.18.0 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.8 \
+ crate://crates.io/proc-macro2/1.0.66 \
+ crate://crates.io/pyo3/0.20.2 \
+ crate://crates.io/pyo3-build-config/0.20.2 \
+ crate://crates.io/pyo3-ffi/0.20.2 \
+ crate://crates.io/pyo3-macros/0.20.2 \
+ crate://crates.io/pyo3-macros-backend/0.20.2 \
+ crate://crates.io/quote/1.0.31 \
+ crate://crates.io/redox_syscall/0.3.5 \
+ crate://crates.io/rpds/1.1.0 \
+ crate://crates.io/scopeguard/1.1.0 \
+ crate://crates.io/smallvec/1.11.0 \
+ crate://crates.io/static_assertions/1.1.0 \
+ crate://crates.io/syn/2.0.32 \
+ crate://crates.io/target-lexicon/0.12.9 \
+ crate://crates.io/triomphe/0.1.9 \
+ crate://crates.io/unicode-ident/1.0.11 \
+ crate://crates.io/unindent/0.2.3 \
+ crate://crates.io/windows-targets/0.48.1 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.0 \
+ crate://crates.io/windows_aarch64_msvc/0.48.0 \
+ crate://crates.io/windows_i686_gnu/0.48.0 \
+ crate://crates.io/windows_i686_msvc/0.48.0 \
+ crate://crates.io/windows_x86_64_gnu/0.48.0 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.0 \
+ crate://crates.io/windows_x86_64_msvc/0.48.0 \
+"
+
+SRC_URI[archery-1.1.0.sha256sum] = "487955f60962765486ce000015a3492ca45c34a2ebbf12bc0aa2b5110ca6e7d2"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[indoc-2.0.4.sha256sum] = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+SRC_URI[libc-0.2.147.sha256sum] = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
+SRC_URI[lock_api-0.4.10.sha256sum] = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
+SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+SRC_URI[once_cell-1.18.0.sha256sum] = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.8.sha256sum] = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
+SRC_URI[proc-macro2-1.0.66.sha256sum] = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
+SRC_URI[pyo3-0.20.2.sha256sum] = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0"
+SRC_URI[pyo3-build-config-0.20.2.sha256sum] = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be"
+SRC_URI[pyo3-ffi-0.20.2.sha256sum] = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1"
+SRC_URI[pyo3-macros-0.20.2.sha256sum] = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3"
+SRC_URI[pyo3-macros-backend-0.20.2.sha256sum] = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f"
+SRC_URI[quote-1.0.31.sha256sum] = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
+SRC_URI[redox_syscall-0.3.5.sha256sum] = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+SRC_URI[rpds-1.1.0.sha256sum] = "a0e15515d3ce3313324d842629ea4905c25a13f81953eadb88f85516f59290a4"
+SRC_URI[scopeguard-1.1.0.sha256sum] = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+SRC_URI[smallvec-1.11.0.sha256sum] = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
+SRC_URI[static_assertions-1.1.0.sha256sum] = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+SRC_URI[syn-2.0.32.sha256sum] = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2"
+SRC_URI[target-lexicon-0.12.9.sha256sum] = "df8e77cb757a61f51b947ec4a7e3646efd825b73561db1c232a8ccb639e611a0"
+SRC_URI[triomphe-0.1.9.sha256sum] = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f"
+SRC_URI[unicode-ident-1.0.11.sha256sum] = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
+SRC_URI[unindent-0.2.3.sha256sum] = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
+SRC_URI[windows-targets-0.48.1.sha256sum] = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
+SRC_URI[windows_aarch64_gnullvm-0.48.0.sha256sum] = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+SRC_URI[windows_aarch64_msvc-0.48.0.sha256sum] = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+SRC_URI[windows_i686_gnu-0.48.0.sha256sum] = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+SRC_URI[windows_i686_msvc-0.48.0.sha256sum] = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+SRC_URI[windows_x86_64_gnu-0.48.0.sha256sum] = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+SRC_URI[windows_x86_64_gnullvm-0.48.0.sha256sum] = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+SRC_URI[windows_x86_64_msvc-0.48.0.sha256sum] = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
diff --git a/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb b/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb
new file mode 100644
index 0000000000..cece2cb8cc
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb
@@ -0,0 +1,15 @@
+SUMMARY = "Python bindings to the Rust rpds crate for persistent data structures."
+HOMEPAGE = "https://pypi.org/project/rpds-py/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=7767fa537c4596c54141f32882c4a984"
+
+SRC_URI[sha256sum] = "42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"
+
+require ${BPN}-crates.inc
+
+inherit pypi cargo-update-recipe-crates python_maturin
+
+PYPI_PACKAGE = "rpds_py"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-ruamel-yaml_0.17.21.bb b/meta/recipes-devtools/python/python3-ruamel-yaml_0.17.21.bb
deleted file mode 100644
index 7d65c4c2d8..0000000000
--- a/meta/recipes-devtools/python/python3-ruamel-yaml_0.17.21.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order."
-HOMEPAGE = "https://pypi.org/project/ruamel.yaml/"
-AUTHOR = "Anthon van der Neut"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=034154b7344d15438bc5ed5ee9cc075f"
-
-PYPI_PACKAGE = "ruamel.yaml"
-
-inherit pypi setuptools3
-
-SRC_URI[sha256sum] = "8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-datetime \
- ${PYTHON_PN}-netclient \
-"
-
-do_install:prepend() {
- export RUAMEL_NO_PIP_INSTALL_CHECK=1
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-ruamel-yaml_0.18.6.bb b/meta/recipes-devtools/python/python3-ruamel-yaml_0.18.6.bb
new file mode 100644
index 0000000000..197bf91d6c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-ruamel-yaml_0.18.6.bb
@@ -0,0 +1,23 @@
+SUMMARY = "YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order."
+HOMEPAGE = "https://pypi.org/project/ruamel.yaml/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=30cbbccd94bf3a2b0285ec35671a1938"
+
+PYPI_PACKAGE = "ruamel.yaml"
+
+inherit pypi setuptools3
+
+SRC_URI[sha256sum] = "8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"
+
+RDEPENDS:${PN} += "\
+ python3-shell \
+ python3-datetime \
+ python3-netclient \
+"
+
+do_install:prepend() {
+ export RUAMEL_NO_PIP_INSTALL_CHECK=1
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-scons_4.3.0.bb b/meta/recipes-devtools/python/python3-scons_4.3.0.bb
deleted file mode 100644
index ae91d980fe..0000000000
--- a/meta/recipes-devtools/python/python3-scons_4.3.0.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Software Construction tool (make/autotools replacement)"
-HOMEPAGE = "https://github.com/SCons/scons"
-SECTION = "devel/python"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=d903b0b8027f461402bac9b5169b36f7"
-
-SRC_URI += " file://0001-Fix-man-page-installation.patch"
-SRC_URI[sha256sum] = "d47081587e3675cc168f1f54f0d74a69b328a2fc90ec4feb85f728677419b879"
-
-PYPI_PACKAGE = "SCons"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN}:class-target = "\
- python3-core \
- python3-compression \
- python3-fcntl \
- python3-importlib-metadata \
- python3-io \
- python3-json \
- python3-shell \
- python3-pickle \
- python3-pkg-resources \
- python3-pprint \
- "
-
-do_install:append() {
- install -d ${D}${mandir}/man1
- mv ${D}${prefix}/scons*.1 ${D}${mandir}/man1/
-}
-
-do_install:append:class-native() {
- create_wrapper ${D}${bindir}/scons SCONS_LIB_DIR='${STAGING_DIR_HOST}/${PYTHON_SITEPACKAGES_DIR}' PYTHONNOUSERSITE='1'
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-scons_4.7.0.bb b/meta/recipes-devtools/python/python3-scons_4.7.0.bb
new file mode 100644
index 0000000000..bba6c799d4
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-scons_4.7.0.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Software Construction tool (make/autotools replacement)"
+HOMEPAGE = "https://github.com/SCons/scons"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=d903b0b8027f461402bac9b5169b36f7"
+
+SRC_URI += " file://0001-Fix-man-page-installation.patch"
+SRC_URI[sha256sum] = "d8b617f6610a73e46509de70dcf82f76861b79762ff602d546f4e80918ec81f3"
+
+PYPI_PACKAGE = "SCons"
+
+inherit pypi setuptools3
+
+RDEPENDS:${PN}:class-target = "\
+ python3-core \
+ python3-compression \
+ python3-fcntl \
+ python3-importlib-metadata \
+ python3-io \
+ python3-json \
+ python3-shell \
+ python3-pickle \
+ python3-pkg-resources \
+ python3-pprint \
+ "
+
+do_install:append() {
+ install -d ${D}${mandir}/man1
+ mv ${D}${prefix}/scons*.1 ${D}${mandir}/man1/
+}
+
+do_install:append:class-native() {
+ create_wrapper ${D}${bindir}/scons SCONS_LIB_DIR='${STAGING_DIR_HOST}/${PYTHON_SITEPACKAGES_DIR}' PYTHONNOUSERSITE='1'
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-semantic-version_2.10.0.bb b/meta/recipes-devtools/python/python3-semantic-version_2.10.0.bb
new file mode 100644
index 0000000000..27e3f534b8
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-semantic-version_2.10.0.bb
@@ -0,0 +1,18 @@
+SUMMARY = "A library implementing the 'SemVer' scheme."
+DESCRIPTION = "Semantic version comparison for Python (see http://semver.org/)"
+HOMEPAGE = "https://github.com/rbarrois/python-semanticversion"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=4fb31e3c1c7eeb8b5e8c07657cdd54e2"
+
+SRC_URI[sha256sum] = "bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"
+
+PYPI_PACKAGE = "semantic_version"
+inherit pypi setuptools3
+
+RDEPENDS:${PN} += " \
+ python3-pkg-resources \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_REGEX = "/semantic-version/(?P<pver>(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-semantic-version_2.9.0.bb b/meta/recipes-devtools/python/python3-semantic-version_2.9.0.bb
deleted file mode 100644
index 4101f48043..0000000000
--- a/meta/recipes-devtools/python/python3-semantic-version_2.9.0.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "A library implementing the 'SemVer' scheme."
-DESCRIPTION = "Semantic version comparison for Python (see http://semver.org/)"
-HOMEPAGE = "https://github.com/rbarrois/python-semanticversion"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=4fb31e3c1c7eeb8b5e8c07657cdd54e2"
-
-SRC_URI[sha256sum] = "abf54873553e5e07a6fd4d5f653b781f5ae41297a493666b59dcf214006a12b2"
-
-PYPI_PACKAGE = "semantic_version"
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
-
-UPSTREAM_CHECK_REGEX = "/semantic-version/(?P<pver>(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-setuptools-rust_1.3.0.bb b/meta/recipes-devtools/python/python3-setuptools-rust_1.3.0.bb
deleted file mode 100644
index 51e27b843e..0000000000
--- a/meta/recipes-devtools/python/python3-setuptools-rust_1.3.0.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Setuptools Rust extension plugin"
-DESCRIPTION = "setuptools-rust is a plugin for setuptools to build Rust \
-Python extensions implemented with PyO3 or rust-cpython.\
-\
-Compile and distribute Python extensions written in Rust as easily as if they were written in C."
-HOMEPAGE = "https://github.com/PyO3/setuptools-rust"
-BUGTRACKER = "https://github.com/PyO3/setuptools-rust/issues"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=011cd92e702dd9e6b1a26157b6fd53f5"
-
-SRC_URI = "https://files.pythonhosted.org/packages/67/08/e1aa2c582c62ac76e4d60f8e454bd3bba933781a06a88b4e38797445822a/setuptools-rust-${PV}.tar.gz"
-SRC_URI[sha256sum] = "958c5bf4ab6483d59dab888538121871cc5006354a42fb0fbd50acf03caad1de"
-
-inherit cargo pypi python_setuptools_build_meta
-
-DEPENDS += "python3-setuptools-scm-native python3-wheel-native"
-
-RDEPENDS:${PN}:class-native += " \
- python3-semantic-version-native \
- python3-setuptools-native \
- python3-setuptools-scm-native \
- python3-toml-native \
- python3-typing-extensions-native \
- python3-wheel-native \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python3-setuptools-rust_1.9.0.bb b/meta/recipes-devtools/python/python3-setuptools-rust_1.9.0.bb
new file mode 100644
index 0000000000..8eb2513d69
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-setuptools-rust_1.9.0.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Setuptools Rust extension plugin"
+DESCRIPTION = "setuptools-rust is a plugin for setuptools to build Rust \
+Python extensions implemented with PyO3 or rust-cpython.\
+\
+Compile and distribute Python extensions written in Rust as easily as if they were written in C."
+HOMEPAGE = "https://github.com/PyO3/setuptools-rust"
+BUGTRACKER = "https://github.com/PyO3/setuptools-rust/issues"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=011cd92e702dd9e6b1a26157b6fd53f5"
+
+SRC_URI = "${PYPI_SRC_URI} \
+ https://files.pythonhosted.org/packages/67/08/e1aa2c582c62ac76e4d60f8e454bd3bba933781a06a88b4e38797445822a/setuptools-rust-${PV}.tar.gz \
+ "
+SRC_URI[sha256sum] = "704df0948f2e4cc60c2596ad6e840ea679f4f43e58ed4ad0c1857807240eab96"
+
+inherit cargo pypi python_setuptools_build_meta
+
+DEPENDS += "python3-setuptools-scm-native python3-wheel-native"
+# remove when https://github.com/PyO3/setuptools-rust/commit/7ced8d2a8f36e1b4fc41b5544636defb7bd44bdf
+# is included
+DEPENDS += "python3-semantic-version-native"
+
+RDEPENDS:${PN} += " \
+ python3-json \
+ python3-semantic-version \
+ python3-setuptools \
+ python3-setuptools-scm \
+ python3-shell \
+ python3-toml \
+ python3-typing-extensions \
+ python3-wheel \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-setuptools-scm_6.4.2.bb b/meta/recipes-devtools/python/python3-setuptools-scm_6.4.2.bb
deleted file mode 100644
index 9aaae071d3..0000000000
--- a/meta/recipes-devtools/python/python3-setuptools-scm_6.4.2.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "the blessed package to manage your versions by scm tags"
-HOMEPAGE = "https://pypi.org/project/setuptools-scm/"
-DESCRIPTION = "setuptools_scm handles managing your Python package versions in SCM metadata instead of declaring them as the version argument or in a SCM managed file."
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=8227180126797a0148f94f483f3e1489"
-
-SRC_URI[sha256sum] = "6833ac65c6ed9711a4d5d2266f8024cfa07c533a0e55f4c12f6eff280a5a9e30"
-
-PYPI_PACKAGE = "setuptools_scm"
-inherit pypi python_setuptools_build_meta
-
-UPSTREAM_CHECK_REGEX = "setuptools_scm-(?P<pver>.*)\.tar"
-
-DEPENDS += "python3-tomli-native"
-
-RDEPENDS:${PN} = "\
- ${PYTHON_PN}-packaging \
- ${PYTHON_PN}-pyparsing \
- ${PYTHON_PN}-setuptools \
- ${PYTHON_PN}-tomli \
-"
-
-RDEPENDS:${PN}:append:class-target = " \
- ${PYTHON_PN}-debugger \
- ${PYTHON_PN}-json \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb b/meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb
new file mode 100644
index 0000000000..64b5050c3b
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb
@@ -0,0 +1,31 @@
+SUMMARY = "the blessed package to manage your versions by scm tags"
+HOMEPAGE = "https://pypi.org/project/setuptools-scm/"
+DESCRIPTION = "setuptools_scm handles managing your Python package \
+versions in SCM metadata instead of declaring them as the version \
+argument or in a SCM managed file."
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=838c366f69b72c5df05c96dff79b35f2"
+
+SRC_URI[sha256sum] = "b5f43ff6800669595193fd09891564ee9d1d7dcb196cab4b2506d53a2e1c95c7"
+
+inherit pypi python_setuptools_build_meta
+
+UPSTREAM_CHECK_REGEX = "scm-(?P<pver>.*)\.tar"
+
+DEPENDS += "python3-tomli-native python3-packaging-native python3-typing-extensions-native"
+
+RDEPENDS:${PN} = "\
+ python3-packaging \
+ python3-pip \
+ python3-pyparsing \
+ python3-setuptools \
+ python3-tomli \
+ python3-typing-extensions \
+"
+
+RDEPENDS:${PN}:append:class-target = " \
+ python3-debugger \
+ python3-json \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch b/meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch
index c1b3dd6a30..0f6c9d250d 100644
--- a/meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch
+++ b/meta/recipes-devtools/python/python3-setuptools/0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch
@@ -1,4 +1,4 @@
-From 41f78746cbe88d263400ee948abef5b3f89cce29 Mon Sep 17 00:00:00 2001
+From d393759315b189a738e4b6a2ce31dc18dbbfae29 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Wed, 11 May 2022 21:41:14 +0200
Subject: [PATCH] _distutils/sysconfig.py: make it possible to substite the
@@ -16,25 +16,23 @@ would happen to this module in light of distutils deprecation.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
---
- setuptools/_distutils/sysconfig.py | 14 +++++++++++---
- 1 file changed, 11 insertions(+), 3 deletions(-)
+ setuptools/_distutils/sysconfig.py | 12 ++++++++++--
+ 1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py
-index 55a42e1..ead63b9 100644
+index a40a723..14f35e7 100644
--- a/setuptools/_distutils/sysconfig.py
+++ b/setuptools/_distutils/sysconfig.py
-@@ -102,7 +102,9 @@ def get_python_inc(plat_specific=0, prefix=None):
- If 'prefix' is supplied, use it instead of sys.base_prefix or
+@@ -119,6 +119,8 @@ def get_python_inc(plat_specific=0, prefix=None):
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
"""
-- if prefix is None:
-+ if prefix is None and os.environ.get('STAGING_INCDIR', ""):
-+ prefix = os.environ['STAGING_INCDIR'].rstrip('include')
-+ elif prefix is None:
- prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
- if os.name == "posix":
- if IS_PYPY and sys.version_info < (3, 8):
-@@ -167,7 +169,13 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+ default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
++ if os.environ.get('STAGING_INCDIR', ""):
++ default_prefix = os.environ['STAGING_INCDIR'].rstrip('include')
+ resolved_prefix = prefix if prefix is not None else default_prefix
+ try:
+ getter = globals()[f'_get_python_inc_{os.name}']
+@@ -238,7 +240,13 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
early_prefix = prefix
@@ -49,12 +47,12 @@ index 55a42e1..ead63b9 100644
if standard_lib:
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
else:
-@@ -182,7 +190,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+@@ -253,7 +261,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
# Pure Python
libdir = "lib"
implementation = 'pypy' if IS_PYPY else 'python'
-- libpython = os.path.join(prefix, libdir,
-+ libpython = os.path.join(prefix, lib_basename,
- implementation + get_python_version())
+- libpython = os.path.join(prefix, libdir, implementation + get_python_version())
++ libpython = os.path.join(prefix, lib_basename, implementation + get_python_version())
return _posix_lib(standard_lib, libpython, early_prefix, prefix)
elif os.name == "nt":
+ if standard_lib:
diff --git a/meta/recipes-devtools/python/python3-setuptools/0001-change-shebang-to-python3.patch b/meta/recipes-devtools/python/python3-setuptools/0001-change-shebang-to-python3.patch
deleted file mode 100644
index 6dcf52771b..0000000000
--- a/meta/recipes-devtools/python/python3-setuptools/0001-change-shebang-to-python3.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From c39d0896930e25c224cc897660fc8511ccae30c8 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Thu, 23 Apr 2020 10:01:12 +0000
-Subject: [PATCH] change shebang to python3
-
-Upstream-Status: Pending
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- pkg_resources/_vendor/appdirs.py | 2 +-
- 1 files changed, 1 insertions(+), 1 deletions(-)
-
-diff --git a/pkg_resources/_vendor/appdirs.py b/pkg_resources/_vendor/appdirs.py
-index ae67001..933e398 100644
---- a/pkg_resources/_vendor/appdirs.py
-+++ b/pkg_resources/_vendor/appdirs.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- # Copyright (c) 2005-2010 ActiveState Software Inc.
- # Copyright (c) 2013 Eddy Petrișor
---
-2.24.1
-
diff --git a/meta/recipes-devtools/python/files/0001-conditionally-do-not-fetch-code-by-easy_install.patch b/meta/recipes-devtools/python/python3-setuptools/0001-conditionally-do-not-fetch-code-by-easy_install.patch
index a2b7a519af..e227c2889c 100644
--- a/meta/recipes-devtools/python/files/0001-conditionally-do-not-fetch-code-by-easy_install.patch
+++ b/meta/recipes-devtools/python/python3-setuptools/0001-conditionally-do-not-fetch-code-by-easy_install.patch
@@ -1,4 +1,4 @@
-From d1b4fa4a99774878035a0f664ec0d9686e7f0c89 Mon Sep 17 00:00:00 2001
+From 80fe63816eb3bfd1f5b6d354e1f2442805cff4e0 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 17 Jul 2018 10:13:38 +0800
Subject: [PATCH] conditionally do not fetch code by easy_install
@@ -9,17 +9,16 @@ internet by easy_install.
Upstream-Status: Inappropriate [oe specific]
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
---
setuptools/command/easy_install.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
-index 444d3b3..61e445a 100644
+index 858fb20..62bd853 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
-@@ -648,6 +648,11 @@ class easy_install(Command):
- os.path.exists(tmpdir) and rmtree(tmpdir)
+@@ -672,6 +672,11 @@ class easy_install(Command):
+ os.path.exists(tmpdir) and _rmtree(tmpdir)
def easy_install(self, spec, deps=False):
+ if os.environ.get('NO_FETCH_BUILD', None):
diff --git a/meta/recipes-devtools/python/python3-setuptools_62.3.1.bb b/meta/recipes-devtools/python/python3-setuptools_62.3.1.bb
deleted file mode 100644
index c9367c180f..0000000000
--- a/meta/recipes-devtools/python/python3-setuptools_62.3.1.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "Download, build, install, upgrade, and uninstall Python packages"
-HOMEPAGE = "https://pypi.org/project/setuptools"
-SECTION = "devel/python"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;beginline=1;endline=19;md5=7a7126e068206290f3fe9f8d6c713ea6"
-
-inherit pypi python_setuptools_build_meta
-
-SRC_URI:append:class-native = " file://0001-conditionally-do-not-fetch-code-by-easy_install.patch"
-
-SRC_URI += "file://0001-change-shebang-to-python3.patch \
- file://0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch"
-
-SRC_URI[sha256sum] = "28c79c24d83c42a5e6d6cc711e5e9a6c1b89326229feaa5807fc277040658600"
-
-DEPENDS += "${PYTHON_PN}"
-
-RDEPENDS:${PN} = "\
- ${PYTHON_PN}-2to3 \
- ${PYTHON_PN}-compile \
- ${PYTHON_PN}-compression \
- ${PYTHON_PN}-ctypes \
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-html \
- ${PYTHON_PN}-json \
- ${PYTHON_PN}-netserver \
- ${PYTHON_PN}-numbers \
- ${PYTHON_PN}-pickle \
- ${PYTHON_PN}-pkg-resources \
- ${PYTHON_PN}-pkgutil \
- ${PYTHON_PN}-plistlib \
- ${PYTHON_PN}-shell \
- ${PYTHON_PN}-stringold \
- ${PYTHON_PN}-threading \
- ${PYTHON_PN}-unittest \
- ${PYTHON_PN}-xml \
-"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# The pkg-resources module can be used by itself, without the package downloader
-# and easy_install. Ship it in a separate package so that it can be used by
-# minimal distributions.
-PACKAGES =+ "${PYTHON_PN}-pkg-resources "
-FILES:${PYTHON_PN}-pkg-resources = "${PYTHON_SITEPACKAGES_DIR}/pkg_resources/*"
-RDEPENDS:${PYTHON_PN}-pkg-resources = "\
- ${PYTHON_PN}-compression \
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-plistlib \
- ${PYTHON_PN}-pprint \
-"
-
-# This used to use the bootstrap install which didn't compile. Until we bump the
-# tmpdir version we can't compile the native otherwise the sysroot unpack fails
-INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-setuptools_69.2.0.bb b/meta/recipes-devtools/python/python3-setuptools_69.2.0.bb
new file mode 100644
index 0000000000..897398afc5
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-setuptools_69.2.0.bb
@@ -0,0 +1,55 @@
+SUMMARY = "Download, build, install, upgrade, and uninstall Python packages"
+HOMEPAGE = "https://pypi.org/project/setuptools"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=141643e11c48898150daa83802dbc65f"
+
+inherit pypi python_setuptools_build_meta
+
+SRC_URI:append:class-native = " file://0001-conditionally-do-not-fetch-code-by-easy_install.patch"
+
+SRC_URI += " \
+ file://0001-_distutils-sysconfig.py-make-it-possible-to-substite.patch"
+
+SRC_URI[sha256sum] = "0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"
+
+DEPENDS += "python3"
+
+RDEPENDS:${PN} = "\
+ python3-2to3 \
+ python3-compile \
+ python3-compression \
+ python3-ctypes \
+ python3-email \
+ python3-html \
+ python3-json \
+ python3-netserver \
+ python3-numbers \
+ python3-pickle \
+ python3-pkg-resources \
+ python3-pkgutil \
+ python3-plistlib \
+ python3-shell \
+ python3-stringold \
+ python3-threading \
+ python3-unittest \
+ python3-xml \
+"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# The pkg-resources module can be used by itself, without the package downloader
+# and easy_install. Ship it in a separate package so that it can be used by
+# minimal distributions.
+PACKAGES =+ "python3-pkg-resources "
+FILES:python3-pkg-resources = "${PYTHON_SITEPACKAGES_DIR}/pkg_resources/*"
+RDEPENDS:python3-pkg-resources = "\
+ python3-compression \
+ python3-email \
+ python3-plistlib \
+ python3-pprint \
+"
+
+# This used to use the bootstrap install which didn't compile. Until we bump the
+# tmpdir version we can't compile the native otherwise the sysroot unpack fails
+INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-smartypants_2.0.0.bb b/meta/recipes-devtools/python/python3-smartypants_2.0.0.bb
index 05c94c390f..d089a89b95 100644
--- a/meta/recipes-devtools/python/python3-smartypants_2.0.0.bb
+++ b/meta/recipes-devtools/python/python3-smartypants_2.0.0.bb
@@ -9,6 +9,6 @@ PYPI_PACKAGE = "smartypants"
SRC_URI += "file://0001-Change-hash-bang-to-python3.patch"
SRC_URI[sha256sum] = "7812353a32022699a1aa8cd5626e01c94a946dcaeedaee2d0b382bae4c4cbf36"
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
UPSTREAM_CHECK_REGEX = "/${PYPI_PACKAGE}/(?P<pver>(?!2\.0\.1)(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-smmap_5.0.0.bb b/meta/recipes-devtools/python/python3-smmap_5.0.0.bb
deleted file mode 100644
index ea131ef793..0000000000
--- a/meta/recipes-devtools/python/python3-smmap_5.0.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Python implementation of a sliding window memory map manager"
-DESCRIPTION = "A pure Python implementation of a sliding memory map to \
-help unifying memory mapped access on 32 and 64 bit systems and to help \
-managing resources more efficiently."
-HOMEPAGE = "http://github.com/gitpython-developers/GitPython"
-SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=e910b35b0ef4e1f665b9a75d6afb7709"
-
-inherit pypi setuptools3
-
-PYPI_PACKAGE = "smmap"
-
-SRC_URI[sha256sum] = "c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-codecs \
- ${PYTHON_PN}-mmap \
-"
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-smmap_6.0.0.bb b/meta/recipes-devtools/python/python3-smmap_6.0.0.bb
new file mode 100644
index 0000000000..6abed1205f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-smmap_6.0.0.bb
@@ -0,0 +1,19 @@
+SUMMARY = "Python implementation of a sliding window memory map manager"
+DESCRIPTION = "A pure Python implementation of a sliding memory map to \
+help unifying memory mapped access on 32 and 64 bit systems and to help \
+managing resources more efficiently."
+HOMEPAGE = "http://github.com/gitpython-developers/GitPython"
+SECTION = "devel/python"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=e910b35b0ef4e1f665b9a75d6afb7709"
+
+inherit pypi setuptools3
+
+PYPI_PACKAGE = "smmap"
+
+SRC_URI[sha256sum] = "8d79028ea6cc131da5eab099a5d95a998d43c6779956fffe3b455040911076da"
+
+RDEPENDS:${PN} += "python3-codecs \
+ python3-mmap \
+"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-spdx-tools_0.8.2.bb b/meta/recipes-devtools/python/python3-spdx-tools_0.8.2.bb
new file mode 100644
index 0000000000..53263ca032
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-spdx-tools_0.8.2.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Python tool to parse, validate and convert spdx files"
+HOMEPAGE = "https://github.com/spdx/tools-python"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=dc7f21ccff0f672f2a7cd6f412ae627d"
+
+SRC_URI[sha256sum] = "aea4ac9c2c375e7f439b1cef5ff32ef34914c083de0f61e08ed67cd3d9deb2a9"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit setuptools3 pypi
+
+# Dependency required for pyspdxtools : python3-click
+# Dependencies required for conversion to spdx3 : python3-semantic-version, python3-ply
+RDEPENDS:${PN} += "\
+ python3-core \
+ python3-beartype \
+ python3-click \
+ python3-datetime \
+ python3-json \
+ python3-license-expression \
+ python3-ply \
+ python3-pyyaml \
+ python3-rdflib \
+ python3-semantic-version \
+ python3-uritools \
+ python3-xmltodict \
+ "
diff --git a/meta/recipes-devtools/python/python3-sphinx-rtd-theme_1.0.0.bb b/meta/recipes-devtools/python/python3-sphinx-rtd-theme_1.0.0.bb
deleted file mode 100644
index df34e11bb7..0000000000
--- a/meta/recipes-devtools/python/python3-sphinx-rtd-theme_1.0.0.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-DESCRIPTION = "Sphinx Theme reader"
-HOMEPAGE = "https://github.com/readthedocs/sphinx_rtd_theme"
-SECTION = "devel/python"
-LICENSE = "MIT & OFL-1.1"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=a1db7d4ef426c2935227264e1d4ae8f9 \
- file://OFL-License.txt;md5=4534c22e0147eadb6828bd9fe86d4868 \
- file://Apache-License-2.0.txt;md5=8a75796f0ef19c3f601d69857f5a9a5b"
-
-DEPENDS = "python3-sphinx"
-
-PYPI_PACKAGE = "sphinx_rtd_theme"
-
-SRC_URI[sha256sum] = "eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"
-UPSTREAM_CHECK_REGEX ?= "/sphinx-rtd-theme/(?P<pver>(\d+[\.\-_]*)+)/"
-
-inherit setuptools3 pypi
-
-#Fake out the setup scipt
-export CI = "True"
-export TOX_ENV_NAME = "True"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinx-rtd-theme_2.0.0.bb b/meta/recipes-devtools/python/python3-sphinx-rtd-theme_2.0.0.bb
new file mode 100644
index 0000000000..22b4d96444
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinx-rtd-theme_2.0.0.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Sphinx Theme reader"
+HOMEPAGE = "https://github.com/readthedocs/sphinx_rtd_theme"
+SECTION = "devel/python"
+LICENSE = "MIT & OFL-1.1"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=a1db7d4ef426c2935227264e1d4ae8f9 \
+ file://OFL-License.txt;md5=4534c22e0147eadb6828bd9fe86d4868 \
+ file://Apache-License-2.0.txt;md5=8a75796f0ef19c3f601d69857f5a9a5b"
+
+RDEPENDS:${PN} += " \
+ python3-compile \
+ python3-sphinx \
+ python3-sphinxcontrib-jquery \
+"
+
+PYPI_PACKAGE = "sphinx_rtd_theme"
+
+SRC_URI[sha256sum] = "bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"
+UPSTREAM_CHECK_REGEX ?= "/sphinx-rtd-theme/(?P<pver>(\d+[\.\-_]*)+)/"
+
+inherit setuptools3 pypi
+
+#Fake out the setup scipt
+export CI = "True"
+export TOX_ENV_NAME = "True"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinx_4.5.0.bb b/meta/recipes-devtools/python/python3-sphinx_4.5.0.bb
deleted file mode 100644
index f4d27f3275..0000000000
--- a/meta/recipes-devtools/python/python3-sphinx_4.5.0.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-DESCRIPTION = "Python documentation generator"
-HOMEPAGE = "http://sphinx-doc.org/"
-SECTION = "devel/python"
-LICENSE = "BSD-2-Clause & MIT & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=72c536e78c21c567311b193fe00cd253"
-
-PYPI_PACKAGE = "Sphinx"
-
-SRC_URI[sha256sum] = "7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"
-
-inherit setuptools3 pypi
-
-
-do_install:append () {
- # The cache format of "{None, 'en', 'ja'}" doesn't seem to be consistent (dict ordering?)
- rm ${D}${libdir}/${PYTHON_DIR}/site-packages/sphinx/writers/__pycache__/*latex*
-}
-
-RDEPENDS:${PN} = "\
- python3-packaging python3-docutils python3-requests \
- python3-imagesize python3-alabaster python3-jinja2 \
- python3-babel python3-pygments python3-snowballstemmer \
- python3-sphinxcontrib-applehelp python3-sphinxcontrib-devhelp \
- python3-sphinxcontrib-jsmath python3-sphinxcontrib-htmlhelp \
- python3-sphinxcontrib-serializinghtml python3-sphinxcontrib-qthelp \
- "
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinx_7.2.6.bb b/meta/recipes-devtools/python/python3-sphinx_7.2.6.bb
new file mode 100644
index 0000000000..f4ed0720ff
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinx_7.2.6.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Python documentation generator"
+HOMEPAGE = "http://sphinx-doc.org/"
+SECTION = "devel/python"
+LICENSE = "BSD-2-Clause & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5eb6ac1b115a1ed24a12d9f15b633993"
+
+SRC_URI[sha256sum] = "9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"
+
+inherit python_flit_core pypi
+UPSTREAM_CHECK_REGEX = "/Sphinx/(?P<pver>(\d+[\.\-_]*)+)/"
+
+do_install:append () {
+ # The cache format of "{None, 'en', 'ja'}" doesn't seem to be consistent (dict ordering?)
+ rm ${D}${libdir}/${PYTHON_DIR}/site-packages/sphinx/writers/__pycache__/*latex*
+}
+
+RDEPENDS:${PN} = "\
+ python3-alabaster \
+ python3-babel \
+ python3-docutils \
+ python3-imagesize \
+ python3-jinja2 \
+ python3-packaging \
+ python3-pygments \
+ python3-requests \
+ python3-snowballstemmer \
+ python3-sphinxcontrib-applehelp \
+ python3-sphinxcontrib-devhelp \
+ python3-sphinxcontrib-htmlhelp \
+ python3-sphinxcontrib-jsmath \
+ python3-sphinxcontrib-qthelp \
+ python3-sphinxcontrib-serializinghtml \
+ "
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.2.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.2.bb
deleted file mode 100644
index cde0da8265..0000000000
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.2.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
-HOMEPAGE = "https://www.sphinx-doc.org"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=c7715857042d4c8c0105999ca0c072c5"
-
-SRC_URI[sha256sum] = "a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
-
-PYPI_PACKAGE = "sphinxcontrib-applehelp"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.8.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.8.bb
new file mode 100644
index 0000000000..67dd299b72
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-applehelp_1.0.8.bb
@@ -0,0 +1,12 @@
+SUMMARY = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
+HOMEPAGE = "https://www.sphinx-doc.org"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=c7715857042d4c8c0105999ca0c072c5"
+
+SRC_URI[sha256sum] = "c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"
+
+PYPI_PACKAGE = "sphinxcontrib_applehelp"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.2.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.2.bb
deleted file mode 100644
index 7b7bda7807..0000000000
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.2.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
-HOMEPAGE = "https://www.sphinx-doc.org"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=fd30d9972a142c857a80c9f312e92b93"
-
-SRC_URI[sha256sum] = "ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
-
-PYPI_PACKAGE = "sphinxcontrib-devhelp"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.6.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.6.bb
new file mode 100644
index 0000000000..31f77a0a14
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-devhelp_1.0.6.bb
@@ -0,0 +1,12 @@
+SUMMARY = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
+HOMEPAGE = "https://www.sphinx-doc.org"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=fd30d9972a142c857a80c9f312e92b93"
+
+SRC_URI[sha256sum] = "9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"
+
+PYPI_PACKAGE = "sphinxcontrib_devhelp"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.0.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.0.bb
deleted file mode 100644
index cd8b987e0c..0000000000
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.0.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
-HOMEPAGE = "https://www.sphinx-doc.org"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=24dce5ef6a13563241c24bc366f48886"
-
-SRC_URI[sha256sum] = "f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"
-
-PYPI_PACKAGE = "sphinxcontrib-htmlhelp"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.5.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.5.bb
new file mode 100644
index 0000000000..e30c61c398
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-htmlhelp_2.0.5.bb
@@ -0,0 +1,12 @@
+SUMMARY = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+HOMEPAGE = "https://www.sphinx-doc.org"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=24dce5ef6a13563241c24bc366f48886"
+
+SRC_URI[sha256sum] = "0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"
+
+PYPI_PACKAGE = "sphinxcontrib_htmlhelp"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-jquery_4.1.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-jquery_4.1.bb
new file mode 100644
index 0000000000..5f915663c2
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-jquery_4.1.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Extension to include jQuery on newer Sphinx releases"
+HOMEPAGE = "https://pypi.org/project/sphinxcontrib-jquery/"
+LICENSE = "0BSD"
+LIC_FILES_CHKSUM = "file://LICENCE;md5=926e8b7e89e3ebb2a2d1dfaf0873f241"
+
+SRC_URI[sha256sum] = "1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"
+
+PYPI_PACKAGE = "sphinxcontrib-jquery"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-jsmath_1.0.1.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-jsmath_1.0.1.bb
index 863458a1dc..7c11cfeec5 100644
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-jsmath_1.0.1.bb
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-jsmath_1.0.1.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "A sphinx extension which renders display math in HTML via JavaScript"
+SUMMARY = "A sphinx extension which renders display math in HTML via JavaScript"
HOMEPAGE = "https://www.sphinx-doc.org"
LICENSE = "BSD-2-Clause"
LIC_FILES_CHKSUM = "file://LICENSE;md5=f0064c10bd544bcffccbc67a41c108d3"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.3.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.3.bb
deleted file mode 100644
index 8ebfbba741..0000000000
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.3.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = "Is a sphinx extension which outputs QtHelp document."
-HOMEPAGE = "http://babel.edgewall.org/"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=f7a83b72ea86d04827575ec0b63430eb"
-
-SRC_URI[sha256sum] = "4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"
-
-PYPI_PACKAGE = "sphinxcontrib-qthelp"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.7.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.7.bb
new file mode 100644
index 0000000000..2225ec521e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-qthelp_1.0.7.bb
@@ -0,0 +1,12 @@
+SUMMARY = "Is a sphinx extension which outputs QtHelp document."
+HOMEPAGE = "http://babel.edgewall.org/"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=f7a83b72ea86d04827575ec0b63430eb"
+
+SRC_URI[sha256sum] = "053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"
+
+PYPI_PACKAGE = "sphinxcontrib_qthelp"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.10.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.10.bb
new file mode 100644
index 0000000000..49be5200f0
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.10.bb
@@ -0,0 +1,12 @@
+SUMMARY = 'sphinxcontrib-serializinghtml is a sphinx extension which outputs "serialized" HTML files (json and pickle).'
+HOMEPAGE = "https://www.sphinx-doc.org"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=32a84ac5cd3bbd10c4d479233ad588b6"
+
+SRC_URI[sha256sum] = "93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"
+
+PYPI_PACKAGE = "sphinxcontrib_serializinghtml"
+
+inherit pypi python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.5.bb b/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.5.bb
deleted file mode 100644
index 71dde37593..0000000000
--- a/meta/recipes-devtools/python/python3-sphinxcontrib-serializinghtml_1.1.5.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-DESCRIPTION = 'sphinxcontrib-serializinghtml is a sphinx extension which outputs "serialized" HTML files (json and pickle).'
-HOMEPAGE = "https://www.sphinx-doc.org"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=32a84ac5cd3bbd10c4d479233ad588b6"
-
-SRC_URI[sha256sum] = "aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"
-
-PYPI_PACKAGE = "sphinxcontrib-serializinghtml"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-strict-rfc3339/0001-setup.py-use-vendored-_distutils.patch b/meta/recipes-devtools/python/python3-strict-rfc3339/0001-setup.py-use-vendored-_distutils.patch
deleted file mode 100644
index ba1c3d3363..0000000000
--- a/meta/recipes-devtools/python/python3-strict-rfc3339/0001-setup.py-use-vendored-_distutils.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 857719e82daea0d85b734cac34cf569050724068 Mon Sep 17 00:00:00 2001
-From: Tim Orling <tim.orling@konsulko.com>
-Date: Sun, 20 Feb 2022 20:26:51 -0800
-Subject: [PATCH] setup.py: use vendored _distutils
-
-Deprecation warning of distutils is interferring with bdist_wheel build.
-
-For now, use the vendored setuptools._distutils.core.
-
-Upstream-Status: Pending [upstream appears unmaintained]
-
-Signed-off-by: Tim Orling <tim.orling@konsulko.com>
----
- setup.py | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/setup.py b/setup.py
-index 1bf87cf..a49fb8e 100644
---- a/setup.py
-+++ b/setup.py
-@@ -1,5 +1,5 @@
- import os.path
--from distutils.core import setup
-+from setuptools._distutils.core import setup
-
- readme_file = os.path.join(os.path.dirname(__file__), 'README.md')
- readme = open(readme_file).read()
diff --git a/meta/recipes-devtools/python/python3-strict-rfc3339_0.7.bb b/meta/recipes-devtools/python/python3-strict-rfc3339_0.7.bb
deleted file mode 100644
index c377c3bfb0..0000000000
--- a/meta/recipes-devtools/python/python3-strict-rfc3339_0.7.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-SUMMARY = "Strict, simple, lightweight RFC3339 function.s"
-HOMEPAGE = "https://pypi.org/project/strict-rfc3339/"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=8f0e2cd40e05189ec81232da84bd6e1a"
-
-SRC_URI += "file://0001-setup.py-use-vendored-_distutils.patch"
-SRC_URI[sha256sum] = "5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"
-
-inherit pypi setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-subunit_1.4.0.bb b/meta/recipes-devtools/python/python3-subunit_1.4.0.bb
deleted file mode 100644
index e4e3bf191d..0000000000
--- a/meta/recipes-devtools/python/python3-subunit_1.4.0.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-SUMMARY = "Python implementation of subunit test streaming protocol"
-HOMEPAGE = "https://pypi.org/project/python-subunit/"
-SECTION = "devel/python"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://README.rst;beginline=1;endline=20;md5=909c08e291647fd985fbe5d9836d51b6"
-
-PYPI_PACKAGE = "python-subunit"
-
-SRC_URI[sha256sum] = "042039928120fbf392e8c983d60f3d8ae1b88f90a9f8fd7188ddd9c26cad1e48"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN} = " python3-testtools"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/python/python3-subunit_1.4.4.bb b/meta/recipes-devtools/python/python3-subunit_1.4.4.bb
new file mode 100644
index 0000000000..11be10bab6
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-subunit_1.4.4.bb
@@ -0,0 +1,15 @@
+SUMMARY = "Python implementation of subunit test streaming protocol"
+HOMEPAGE = "https://pypi.org/project/python-subunit/"
+SECTION = "devel/python"
+LICENSE = "Apache-2.0 | BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;beginline=1;endline=20;md5=b1121e68d06c8d9ea544fcd895df0d39"
+
+PYPI_PACKAGE = "python-subunit"
+
+SRC_URI[sha256sum] = "1079363131aa1d3f45259237265bc2e61a77e35f20edfb6e3d1d2558a2cdea34"
+
+inherit pypi setuptools3
+
+RDEPENDS:${PN} = " python3-testtools python3-iso8601"
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-devtools/python/python3-testtools_2.5.0.bb b/meta/recipes-devtools/python/python3-testtools_2.5.0.bb
deleted file mode 100644
index 896ecee65c..0000000000
--- a/meta/recipes-devtools/python/python3-testtools_2.5.0.bb
+++ /dev/null
@@ -1,2 +0,0 @@
-inherit setuptools3
-require python-testtools.inc
diff --git a/meta/recipes-devtools/python/python3-testtools_2.7.1.bb b/meta/recipes-devtools/python/python3-testtools_2.7.1.bb
new file mode 100644
index 0000000000..cc7e055632
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-testtools_2.7.1.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Extensions to the Python standard library unit testing framework"
+HOMEPAGE = "https://pypi.org/project/testtools/"
+SECTION = "devel/python"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=e2c9d3e8ba7141c83bfef190e0b9379a"
+
+DEPENDS += "python3-hatch-vcs-native"
+
+inherit pypi python_hatchling
+
+SRC_URI[sha256sum] = "df6de96010e29ee21f637a147eabf30d50b25e3841dd1d68f93ee89ce77e366c"
+
+RDEPENDS:${PN} += "\
+ python3-doctest \
+ python3-extras \
+ python3-six \
+ "
+
+BBCLASSEXTEND = "nativesdk"
+
diff --git a/meta/recipes-devtools/python/python3-toml_0.10.2.bb b/meta/recipes-devtools/python/python3-toml_0.10.2.bb
index be29cac798..649464b961 100644
--- a/meta/recipes-devtools/python/python3-toml_0.10.2.bb
+++ b/meta/recipes-devtools/python/python3-toml_0.10.2.bb
@@ -11,5 +11,5 @@ inherit pypi setuptools3
BBCLASSEXTEND = "native nativesdk"
RDEPENDS:${PN} += " \
- ${PYTHON_PN}-misc \
+ python3-misc \
"
diff --git a/meta/recipes-devtools/python/python3-tomli_2.0.1.bb b/meta/recipes-devtools/python/python3-tomli_2.0.1.bb
index 6118a6a9c3..9401ed897f 100644
--- a/meta/recipes-devtools/python/python3-tomli_2.0.1.bb
+++ b/meta/recipes-devtools/python/python3-tomli_2.0.1.bb
@@ -11,3 +11,8 @@ inherit pypi python_flit_core
SRC_URI[sha256sum] = "de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} += " \
+ python3-datetime \
+ python3-stringold \
+"
diff --git a/meta/recipes-devtools/python/python3-trove-classifiers/run-ptest b/meta/recipes-devtools/python/python3-trove-classifiers/run-ptest
new file mode 100644
index 0000000000..8d2017d39c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-trove-classifiers/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-trove-classifiers_2024.3.3.bb b/meta/recipes-devtools/python/python3-trove-classifiers_2024.3.3.bb
new file mode 100644
index 0000000000..b912efcf55
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-trove-classifiers_2024.3.3.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Canonical source for classifiers on PyPI (pypi.org)."
+HOMEPAGE = "https://github.com/pypa/trove-classifiers"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
+
+SRC_URI[sha256sum] = "df7edff9c67ff86b733628998330b180e81d125b1e096536d83ac0fd79673fdc"
+
+inherit pypi python_setuptools_build_meta ptest
+
+DEPENDS += " python3-calver-native"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-typing-extensions_4.11.0.bb b/meta/recipes-devtools/python/python3-typing-extensions_4.11.0.bb
new file mode 100644
index 0000000000..ad45b669ec
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-typing-extensions_4.11.0.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Backported and Experimental Type Hints for Python 3.7+"
+DESCRIPTION = "The typing_extensions module serves two related purposes:\
+\
+* Enable use of new type system features on older Python versions. For \
+ example, typing.TypeGuard is new in Python 3.10, but typing_extensions \
+ allows users on previous Python versions to use it too.\
+* Enable experimentation with new type system PEPs before they are accepted \
+ and added to the typing module."
+HOMEPAGE = "https://github.com/python/typing_extensions"
+BUGTRACKER = "https://github.com/python/typing_extensions/issues"
+SECTIONS = "libs"
+LICENSE = "PSF-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2"
+
+# The name on PyPi is slightly different.
+PYPI_PACKAGE = "typing_extensions"
+
+SRC_URI[sha256sum] = "83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"
+
+inherit pypi python_flit_core
+
+UPSTREAM_CHECK_REGEX = "/typing-extensions/(?P<pver>(\d+[\.\-_]*)+)/"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-typing-extensions_4.2.0.bb b/meta/recipes-devtools/python/python3-typing-extensions_4.2.0.bb
deleted file mode 100644
index 3aac39b8ef..0000000000
--- a/meta/recipes-devtools/python/python3-typing-extensions_4.2.0.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-HOMEPAGE = "https://github.com/python/typing"
-LICENSE = "PSF-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=64fc2b30b67d0a8423c250e0386ed72f"
-
-# The name on PyPi is slightly different.
-PYPI_PACKAGE = "typing_extensions"
-
-SRC_URI[sha256sum] = "f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"
-
-inherit pypi python_flit_core
-
-BBCLASSEXTEND = "native nativesdk"
-
-UPSTREAM_CHECK_REGEX = "/typing-extensions/(?P<pver>(\d+[\.\-_]*)+)/"
diff --git a/meta/recipes-devtools/python/python3-typogrify_2.0.7.bb b/meta/recipes-devtools/python/python3-typogrify_2.0.7.bb
index 83e9b5eadb..8ba2788c9b 100644
--- a/meta/recipes-devtools/python/python3-typogrify_2.0.7.bb
+++ b/meta/recipes-devtools/python/python3-typogrify_2.0.7.bb
@@ -8,7 +8,7 @@ inherit pypi setuptools3
PYPI_PACKAGE = "typogrify"
SRC_URI[sha256sum] = "8be4668cda434163ce229d87ca273a11922cb1614cb359970b7dc96eed13cb38"
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
-RDEPENDS:${PN} += "${PYTHON_PN}-smartypants"
+RDEPENDS:${PN} += "python3-smartypants"
diff --git a/meta/recipes-devtools/python/python3-unittest-automake-output_0.2.bb b/meta/recipes-devtools/python/python3-unittest-automake-output_0.2.bb
new file mode 100644
index 0000000000..1fc6180d0e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-unittest-automake-output_0.2.bb
@@ -0,0 +1,13 @@
+SUMMARY = "Modules to make unittest and pytest look like Automake output, for ptest"
+HOMEPAGE = "https://gitlab.com/rossburton/python-unittest-automake-output"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=f6f16008d9fb7349f06609329f1ab93b"
+
+SRC_URI = "git://gitlab.com/rossburton/python-unittest-automake-output;protocol=https;branch=main"
+SRCREV = "aebdfb188e368c690ea55cf6c9c9ffa1a52def65"
+
+S = "${WORKDIR}/git"
+
+inherit python_flit_core
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-uritools_4.0.2.bb b/meta/recipes-devtools/python/python3-uritools_4.0.2.bb
new file mode 100644
index 0000000000..5ffedccb85
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-uritools_4.0.2.bb
@@ -0,0 +1,11 @@
+SUMMARY = "URI parsing, classification and composition"
+HOMEPAGE = "https://github.com/tkem/uritools/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1ec55353c80c662e4255f8889a0ca558"
+
+SRC_URI[sha256sum] = "04df2b787d0eb76200e8319382a03562fbfe4741fd66c15506b08d3b8211d573"
+
+inherit setuptools3 pypi
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-urllib3_1.26.9.bb b/meta/recipes-devtools/python/python3-urllib3_1.26.9.bb
deleted file mode 100644
index 95ae4a54a4..0000000000
--- a/meta/recipes-devtools/python/python3-urllib3_1.26.9.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Python HTTP library with thread-safe connection pooling, file post support, sanity friendly, and more"
-HOMEPAGE = "https://github.com/shazow/urllib3"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=c2823cb995439c984fd62a973d79815c"
-
-SRC_URI[sha256sum] = "aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"
-
-inherit pypi setuptools3
-
-RDEPENDS:${PN} += "\
- ${PYTHON_PN}-certifi \
- ${PYTHON_PN}-cryptography \
- ${PYTHON_PN}-email \
- ${PYTHON_PN}-idna \
- ${PYTHON_PN}-netclient \
- ${PYTHON_PN}-pyopenssl \
- ${PYTHON_PN}-threading \
-"
-
-CVE_PRODUCT = "urllib3"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-urllib3_2.2.1.bb b/meta/recipes-devtools/python/python3-urllib3_2.2.1.bb
new file mode 100644
index 0000000000..fc1828b4ee
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-urllib3_2.2.1.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Python HTTP library with thread-safe connection pooling, file post support, sanity friendly, and more"
+HOMEPAGE = "https://github.com/shazow/urllib3"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=52d273a3054ced561275d4d15260ecda"
+
+SRC_URI[sha256sum] = "d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"
+
+inherit pypi python_hatchling
+
+RDEPENDS:${PN} += "\
+ python3-certifi \
+ python3-cryptography \
+ python3-email \
+ python3-idna \
+ python3-json \
+ python3-netclient \
+ python3-pyopenssl \
+ python3-threading \
+ python3-logging \
+"
+
+CVE_PRODUCT = "urllib3"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-wcwidth/run-ptest b/meta/recipes-devtools/python/python3-wcwidth/run-ptest
index b63c4de0d9..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-wcwidth/run-ptest
+++ b/meta/recipes-devtools/python/python3-wcwidth/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-wcwidth_0.2.13.bb b/meta/recipes-devtools/python/python3-wcwidth_0.2.13.bb
new file mode 100644
index 0000000000..4a9bf75323
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-wcwidth_0.2.13.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Library for building powerful interactive command lines in Python"
+DESCRIPTION = "Measures the displayed width of unicode strings in a terminal"
+HOMEPAGE = "https://github.com/jquast/wcwidth"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=b15979c39a2543892fca8cd86b4b52cb"
+
+SRC_URI[sha256sum] = "72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+ install -d ${D}${PTEST_PATH}/bin
+ cp -rf ${S}/bin/* ${D}${PTEST_PATH}/bin/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-wcwidth_0.2.5.bb b/meta/recipes-devtools/python/python3-wcwidth_0.2.5.bb
deleted file mode 100644
index a532d3c5cb..0000000000
--- a/meta/recipes-devtools/python/python3-wcwidth_0.2.5.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Library for building powerful interactive command lines in Python"
-DESCRIPTION = "Measures the displayed width of unicode strings in a terminal"
-HOMEPAGE = "https://github.com/jquast/wcwidth"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=b15979c39a2543892fca8cd86b4b52cb"
-
-SRC_URI[md5sum] = "a07a75f99d316e14838ac760c831ea37"
-SRC_URI[sha256sum] = "c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
-
-inherit pypi setuptools3 ptest
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-webcolors/run-ptest b/meta/recipes-devtools/python/python3-webcolors/run-ptest
index 3385d68939..8d2017d39c 100644
--- a/meta/recipes-devtools/python/python3-webcolors/run-ptest
+++ b/meta/recipes-devtools/python/python3-webcolors/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
+pytest --automake
diff --git a/meta/recipes-devtools/python/python3-webcolors_1.11.1.bb b/meta/recipes-devtools/python/python3-webcolors_1.11.1.bb
deleted file mode 100644
index 26dbe51767..0000000000
--- a/meta/recipes-devtools/python/python3-webcolors_1.11.1.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Simple Python module for working with HTML/CSS color definitions."
-HOMEPAGE = "https://pypi.org/project/webcolors/"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=25b90379a52351261c51272e7923d240"
-
-SRC_URI[md5sum] = "54d28a7c80b3e4d974ec2fee86768be9"
-SRC_URI[sha256sum] = "76f360636957d1c976db7466bc71dcb713bb95ac8911944dffc55c01cb516de6"
-
-inherit pypi setuptools3 ptest
-
-RDEPENDS:${PN}:class-target = "\
- ${PYTHON_PN}-stringold \
-"
-
-SRC_URI += " \
- file://run-ptest \
-"
-
-RDEPENDS:${PN}-ptest += " \
- ${PYTHON_PN}-pytest \
-"
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-webcolors_1.13.bb b/meta/recipes-devtools/python/python3-webcolors_1.13.bb
new file mode 100644
index 0000000000..cea2a971d3
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-webcolors_1.13.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Simple Python module for working with HTML/CSS color definitions."
+HOMEPAGE = "https://pypi.org/project/webcolors/"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19"
+
+SRC_URI[sha256sum] = "c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"
+
+inherit pypi python_setuptools_build_meta ptest
+
+RDEPENDS:${PN}:class-target = "\
+ python3-stringold \
+"
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+ python3-unittest-automake-output \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-websockets_12.0.bb b/meta/recipes-devtools/python/python3-websockets_12.0.bb
new file mode 100644
index 0000000000..f89c3b629e
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-websockets_12.0.bb
@@ -0,0 +1,16 @@
+SUMMARY = "An implementation of the WebSocket Protocol (RFC 6455)"
+HOMEPAGE = "https://github.com/aaugustin/websockets"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=51924a6af4495b8cfaee1b1da869b6f4"
+
+inherit pypi setuptools3
+
+SRC_URI[sha256sum] = "81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN} = " \
+ python3-asyncio \
+ python3-profile \
+"
diff --git a/meta/recipes-devtools/python/python3-wheel/0001-Backport-pyproject.toml-from-flit-backend-branch.patch b/meta/recipes-devtools/python/python3-wheel/0001-Backport-pyproject.toml-from-flit-backend-branch.patch
deleted file mode 100644
index 023de0e6a8..0000000000
--- a/meta/recipes-devtools/python/python3-wheel/0001-Backport-pyproject.toml-from-flit-backend-branch.patch
+++ /dev/null
@@ -1,100 +0,0 @@
-From f00dd220346773bc088d403847ee7f06f2b4c30a Mon Sep 17 00:00:00 2001
-From: Tim Orling <tim.orling@konsulko.com>
-Date: Fri, 18 Feb 2022 11:09:16 -0800
-Subject: [PATCH] Backport pyproject.toml from flit-backend branch
-
-This allows us to bootstrap wheels and PEP-517 packaging.
-
-Upstream-Status: Backport from flit-backend branch
-https://raw.githubusercontent.com/pypa/wheel/4f6ba78fede38a8d9e35a14e38377a121033afb3/pyproject.toml
-
-Signed-off-by: Tim Orling <tim.orling@konsulko.com>
----
- pyproject.toml | 78 ++++++++++++++++++++++++++++++++++++++++++++++++++
- 1 file changed, 78 insertions(+)
- create mode 100644 pyproject.toml
-
-diff --git a/pyproject.toml b/pyproject.toml
-new file mode 100644
-index 0000000..749b8de
---- /dev/null
-+++ b/pyproject.toml
-@@ -0,0 +1,78 @@
-+[build-system]
-+requires = ["flit_core >=3.2,<4"]
-+build-backend = "flit_core.buildapi"
-+
-+[project]
-+name = "wheel"
-+description = "A built-package format for Python"
-+readme = "README.rst"
-+classifiers = [
-+ "Development Status :: 5 - Production/Stable",
-+ "Intended Audience :: Developers",
-+ "Topic :: System :: Archiving :: Packaging",
-+ "License :: OSI Approved :: MIT License",
-+ "Programming Language :: Python",
-+ "Programming Language :: Python :: 3 :: Only",
-+ "Programming Language :: Python :: 3.7",
-+ "Programming Language :: Python :: 3.8",
-+ "Programming Language :: Python :: 3.9",
-+ "Programming Language :: Python :: 3.10"
-+]
-+authors = [{name = "Daniel Holth", email = "dholth@fastmail.fm"}]
-+maintainers = [{name = "Alex Grönholm", email = "alex.gronholm@nextday.fi"}]
-+keywords = ["wheel", "packaging"]
-+license = {file = "LICENSE.txt"}
-+requires-python = ">=3.7"
-+dependencies = [
-+ "setuptools >= 45.2.0"
-+]
-+dynamic = ["version"]
-+
-+[project.urls]
-+Documentation = "https://wheel.readthedocs.io/"
-+Changelog = "https://wheel.readthedocs.io/en/stable/news.html"
-+"Issue Tracker" = "https://github.com/pypa/wheel/issues"
-+
-+[project.scripts]
-+wheel = "wheel.cli:main"
-+
-+[project.entry-points."distutils.commands"]
-+bdist_wheel = "wheel.bdist_wheel:bdist_wheel"
-+
-+[project.optional-dependencies]
-+test = [
-+ "pytest >= 3.0.0"
-+]
-+
-+[tool.flit.sdist]
-+exclude = [
-+ ".cirrus.yml",
-+ ".github/*",
-+ ".gitignore",
-+ ".pre-commit-config.yaml",
-+ ".readthedocs.yml"
-+]
-+
-+[tool.black]
-+target-version = ['py37']
-+extend-exclude = '''
-+^/src/wheel/vendored/
-+'''
-+
-+[tool.isort]
-+src_paths = ["src"]
-+profile = "black"
-+skip_gitignore = true
-+
-+[tool.flake8]
-+max-line-length = 88
-+
-+[tool.pytest.ini_options]
-+testpaths = "tests"
-+
-+[tool.coverage.run]
-+source = ["wheel"]
-+omit = ["*/vendored/*"]
-+
-+[tool.coverage.report]
-+show_missing = true
diff --git a/meta/recipes-devtools/python/python3-wheel_0.37.1.bb b/meta/recipes-devtools/python/python3-wheel_0.37.1.bb
deleted file mode 100644
index 2f7dd122ba..0000000000
--- a/meta/recipes-devtools/python/python3-wheel_0.37.1.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-SUMMARY = "The official binary distribution format for Python "
-HOMEPAGE = "https://github.com/pypa/wheel"
-SECTION = "devel/python"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=10;endline=10;md5=8227180126797a0148f94f483f3e1489"
-
-SRC_URI[sha256sum] = "e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"
-
-inherit python_flit_core pypi
-
-SRC_URI += " file://0001-Backport-pyproject.toml-from-flit-backend-branch.patch"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# This used to use the bootstrap install which didn't compile. Until we bump the
-# tmpdir version we can't compile the native otherwise the sysroot unpack fails
-INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-wheel_0.43.0.bb b/meta/recipes-devtools/python/python3-wheel_0.43.0.bb
new file mode 100644
index 0000000000..ba309ae5fc
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-wheel_0.43.0.bb
@@ -0,0 +1,15 @@
+SUMMARY = "The official binary distribution format for Python "
+HOMEPAGE = "https://github.com/pypa/wheel"
+SECTION = "devel/python"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7ffb0db04527cfe380e4f2726bd05ebf"
+
+SRC_URI[sha256sum] = "465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"
+
+inherit python_flit_core pypi
+
+BBCLASSEXTEND = "native nativesdk"
+
+# This used to use the bootstrap install which didn't compile. Until we bump the
+# tmpdir version we can't compile the native otherwise the sysroot unpack fails
+INSTALL_WHEEL_COMPILE_BYTECODE:class-native = "--no-compile-bytecode"
diff --git a/meta/recipes-devtools/python/python3-xmltodict/run-ptest b/meta/recipes-devtools/python/python3-xmltodict/run-ptest
new file mode 100644
index 0000000000..3385d68939
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-xmltodict/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}'
diff --git a/meta/recipes-devtools/python/python3-xmltodict_0.13.0.bb b/meta/recipes-devtools/python/python3-xmltodict_0.13.0.bb
new file mode 100644
index 0000000000..e8e275647c
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-xmltodict_0.13.0.bb
@@ -0,0 +1,31 @@
+SUMMARY = "Makes working with XML feel like you are working with JSON"
+HOMEPAGE = "https://github.com/martinblech/xmltodict"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=01441d50dc74476db58a41ac10cb9fa2"
+
+SRC_URI[sha256sum] = "341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"
+
+PYPI_PACKAGE = "xmltodict"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit pypi setuptools3 ptest
+
+SRC_URI += " \
+ file://run-ptest \
+"
+
+RDEPENDS:${PN} += " \
+ python3-core \
+ python3-xml \
+ python3-io \
+"
+
+RDEPENDS:${PN}-ptest += " \
+ python3-pytest \
+"
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/
+}
diff --git a/meta/recipes-devtools/python/python3-yamllint_1.35.1.bb b/meta/recipes-devtools/python/python3-yamllint_1.35.1.bb
new file mode 100644
index 0000000000..53188b0262
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-yamllint_1.35.1.bb
@@ -0,0 +1,15 @@
+SUMMARY = "A linter for YAML files."
+HOMEPAGE = "https://github.com/adrienverge/yamllint"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+inherit pypi setuptools3
+
+PYPI_PACKAGE = "yamllint"
+
+SRC_URI[sha256sum] = "7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"
+
+DEPENDS += "python3-setuptools-scm-native"
+RDEPENDS:${PN} += "python3-pathspec python3-pyyaml"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-zipp_3.18.1.bb b/meta/recipes-devtools/python/python3-zipp_3.18.1.bb
new file mode 100644
index 0000000000..e43432469d
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-zipp_3.18.1.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Backport of pathlib-compatible object wrapper for zip files"
+HOMEPAGE = "https://github.com/jaraco/zipp"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=141643e11c48898150daa83802dbc65f"
+
+SRC_URI[sha256sum] = "2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"
+
+DEPENDS += "python3-setuptools-scm-native"
+
+inherit pypi python_setuptools_build_meta
+
+DEPENDS += "python3-toml-native"
+
+RDEPENDS:${PN} += "python3-compression \
+ python3-math \
+ python3-more-itertools"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-zipp_3.8.0.bb b/meta/recipes-devtools/python/python3-zipp_3.8.0.bb
deleted file mode 100644
index f1fd383842..0000000000
--- a/meta/recipes-devtools/python/python3-zipp_3.8.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-DESCRIPTION = "Backport of pathlib-compatible object wrapper for zip files"
-HOMEPAGE = "https://github.com/jaraco/zipp"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=7a7126e068206290f3fe9f8d6c713ea6"
-
-SRC_URI[sha256sum] = "56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"
-
-DEPENDS += "${PYTHON_PN}-setuptools-scm-native"
-
-inherit pypi python_setuptools_build_meta
-
-DEPENDS += "${PYTHON_PN}-toml-native"
-
-RDEPENDS:${PN} += "${PYTHON_PN}-compression \
- ${PYTHON_PN}-math \
- ${PYTHON_PN}-more-itertools"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3/0001-Avoid-shebang-overflow-on-python-config.py.patch b/meta/recipes-devtools/python/python3/0001-Avoid-shebang-overflow-on-python-config.py.patch
new file mode 100644
index 0000000000..0d807db39f
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/0001-Avoid-shebang-overflow-on-python-config.py.patch
@@ -0,0 +1,30 @@
+From 365399f17d35719d828ddd49182dcb401fb7791c Mon Sep 17 00:00:00 2001
+From: Paulo Neves <ptsneves@gmail.com>
+Date: Tue, 7 Jun 2022 16:16:41 +0200
+Subject: [PATCH] Avoid shebang overflow on python-config.py
+
+The whole native path may be too big, leading to shebang
+overflow. Let's just use the env shebang.
+
+Denial reason: [1]
+
+Upstream-Status: Denied [distribution]
+
+[1] https://github.com/python/cpython/pull/93760#pullrequestreview-1005365737
+---
+ Makefile.pre.in | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/Makefile.pre.in b/Makefile.pre.in
+index 77bf09a..6353c57 100644
+--- a/Makefile.pre.in
++++ b/Makefile.pre.in
+@@ -2339,6 +2339,8 @@ python-config: $(srcdir)/Misc/python-config.in Misc/python-config.sh
+ @ # Substitution happens here, as the completely-expanded BINDIR
+ @ # is not available in configure
+ sed -e "s,@EXENAME@,$(EXENAME)," < $(srcdir)/Misc/python-config.in >python-config.py
++ @ # Otherwise we might get huge shebangs with native paths
++ sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' python-config.py
+ @ # Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR}
+ LC_ALL=C sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config
+ @ # On Darwin, always use the python version of the script, the shell
diff --git a/meta/recipes-devtools/python/python3/0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch b/meta/recipes-devtools/python/python3/0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch
deleted file mode 100644
index 62ef6efc28..0000000000
--- a/meta/recipes-devtools/python/python3/0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From 80f872e4573f542d33f91514538755557d566f79 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Fri, 25 Jan 2019 19:04:13 +0100
-Subject: [PATCH] Do not add /usr/lib/termcap to linker flags to avoid host
- contamination
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- setup.py | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/setup.py b/setup.py
-index 43e807f..11b5cf5 100644
---- a/setup.py
-+++ b/setup.py
-@@ -1149,7 +1149,6 @@ class PyBuildExt(build_ext):
- 'termcap'):
- readline_libs.append('termcap')
- self.add(Extension('readline', ['readline.c'],
-- library_dirs=['/usr/lib/termcap'],
- extra_link_args=readline_extra_link_args,
- libraries=readline_libs))
- else:
diff --git a/meta/recipes-devtools/python/python3/0001-Do-not-use-the-shell-version-of-python-config-that-w.patch b/meta/recipes-devtools/python/python3/0001-Do-not-use-the-shell-version-of-python-config-that-w.patch
deleted file mode 100644
index d98f243cb1..0000000000
--- a/meta/recipes-devtools/python/python3/0001-Do-not-use-the-shell-version-of-python-config-that-w.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 2406432449784243b7590009d42bd0e871253b2e Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 29 Jan 2019 15:03:01 +0100
-Subject: [PATCH] Do not use the shell version of python-config that was
- introduced in 3.4
-
-Revert instead to the original python version: it has our tweaks and
-outputs directories correctly.
-
-Upstream-Status: Inappropriate [oe-specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Makefile.pre.in | 9 +++------
- 1 file changed, 3 insertions(+), 6 deletions(-)
-
-diff --git a/Makefile.pre.in b/Makefile.pre.in
-index ee85f35..f0aedb7 100644
---- a/Makefile.pre.in
-+++ b/Makefile.pre.in
-@@ -1640,12 +1640,9 @@ python-config: $(srcdir)/Misc/python-config.in Misc/python-config.sh
- sed -e "s,@EXENAME@,$(BINDIR)/python$(LDVERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config.py
- @ # Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR}
- LC_ALL=C sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config
-- @ # On Darwin, always use the python version of the script, the shell
-- @ # version doesn't use the compiler customizations that are provided
-- @ # in python (_osx_support.py).
-- @if test `uname -s` = Darwin; then \
-- cp python-config.py python-config; \
-- fi
-+ @ # In OpenEmbedded, always use the python version of the script, the shell
-+ @ # version is broken in multiple ways, and doesn't return correct directories
-+ cp python-config.py python-config
-
-
- # Install the include files
diff --git a/meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch b/meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch
deleted file mode 100644
index 5485020eb4..0000000000
--- a/meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From b6ead2d17ceafed47e598b6f50f3ff669deec5ab Mon Sep 17 00:00:00 2001
-From: Jeremy Puhlman <jpuhlman@mvista.com>
-Date: Wed, 4 Mar 2020 00:06:42 +0000
-Subject: [PATCH] Don't search system for headers/libraries
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com>
-
----
- setup.py | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/setup.py b/setup.py
-index c190002..5ef368d 100644
---- a/setup.py
-+++ b/setup.py
-@@ -854,8 +854,8 @@ class PyBuildExt(build_ext):
- add_dir_to_list(self.compiler.include_dirs,
- sysconfig.get_config_var("INCLUDEDIR"))
-
-- system_lib_dirs = ['/lib64', '/usr/lib64', '/lib', '/usr/lib']
-- system_include_dirs = ['/usr/include']
-+ system_lib_dirs = []
-+ system_include_dirs = []
- # lib_dirs and inc_dirs are used to search for files;
- # if a file is found in one of those directories, it can
- # be assumed that no additional -I,-L directives are needed.
diff --git a/meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch b/meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch
index df5179e877..026150f0e2 100644
--- a/meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch
+++ b/meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch
@@ -1,4 +1,4 @@
-From 86061629f4a179e740a17e53dd2c98ab47af2fe2 Mon Sep 17 00:00:00 2001
+From f8a664cf1fc73e381d57d6927207286059744837 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Thu, 16 Sep 2021 16:35:37 +0200
Subject: [PATCH] Lib/pty.py: handle stdin I/O errors same way as master I/O
@@ -30,18 +30,18 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/Lib/pty.py b/Lib/pty.py
-index 8d8ce40..35439c6 100644
+index 1d97994..fa8821b 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
-@@ -154,7 +154,10 @@ def _copy(master_fd, master_read=_read, stdin_read=_read):
- os.write(STDOUT_FILENO, data)
+@@ -178,7 +178,10 @@ def _copy(master_fd, master_read=_read, stdin_read=_read):
+ i_buf = i_buf[n:]
- if STDIN_FILENO in rfds:
+ if stdin_avail and STDIN_FILENO in rfds:
- data = stdin_read(STDIN_FILENO)
+ try:
+ data = stdin_read(STDIN_FILENO)
+ except OSError:
+ data = b""
if not data:
- fds.remove(STDIN_FILENO)
+ stdin_avail = False
else:
diff --git a/meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch b/meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch
index a9240b3c8a..680254fab9 100644
--- a/meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch
+++ b/meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch
@@ -1,6 +1,6 @@
-From 01d209277e145072e478d8b9acfea3638ee16cdc Mon Sep 17 00:00:00 2001
+From 71c194077bb907bfe423d3f3275f33a6c8ca0e74 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
-Date: Fri, 10 Sep 2021 12:28:31 +0200
+Date: Fri, 17 Nov 2023 14:26:32 +0100
Subject: [PATCH] Lib/sysconfig.py: use prefix value from build configuration
file
@@ -15,18 +15,18 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
1 file changed, 5 insertions(+)
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
-index e64bcdc..40c6b3e 100644
+index 79c0510..91ebcb6 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
-@@ -613,6 +613,11 @@ def get_config_vars(*args):
- _init_non_posix(_CONFIG_VARS)
- if os.name == 'posix':
- _init_posix(_CONFIG_VARS)
-+ _CONFIG_VARS['installed_base'] = _CONFIG_VARS['prefix']
-+ _CONFIG_VARS['base'] = _CONFIG_VARS['prefix']
-+ _CONFIG_VARS['installed_platbase'] = _CONFIG_VARS['prefix']
-+ _CONFIG_VARS['platbase'] = _CONFIG_VARS['prefix']
-+ _CONFIG_VARS['platlibdir'] = _CONFIG_VARS['PLATLIBDIR']
- # For backward compatibility, see issue19555
- SO = _CONFIG_VARS.get('EXT_SUFFIX')
- if SO is not None:
+@@ -668,6 +668,11 @@ def _init_config_vars():
+ _CONFIG_VARS['VPATH'] = sys._vpath
+ if os.name == 'posix':
+ _init_posix(_CONFIG_VARS)
++ _CONFIG_VARS['installed_base'] = _CONFIG_VARS['prefix']
++ _CONFIG_VARS['base'] = _CONFIG_VARS['prefix']
++ _CONFIG_VARS['installed_platbase'] = _CONFIG_VARS['prefix']
++ _CONFIG_VARS['platbase'] = _CONFIG_VARS['prefix']
++ _CONFIG_VARS['platlibdir'] = _CONFIG_VARS['PLATLIBDIR']
+ if _HAS_USER_BASE:
+ # Setting 'userbase' is done below the call to the
+ # init function to enable using 'get_config_var' in
diff --git a/meta/recipes-devtools/python/python3/0001-Makefile-do-not-compile-.pyc-in-parallel.patch b/meta/recipes-devtools/python/python3/0001-Makefile-do-not-compile-.pyc-in-parallel.patch
deleted file mode 100644
index 2f037ecb09..0000000000
--- a/meta/recipes-devtools/python/python3/0001-Makefile-do-not-compile-.pyc-in-parallel.patch
+++ /dev/null
@@ -1,65 +0,0 @@
-From c960837b8fd83074bab5148236f3d0595468cea4 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Thu, 16 Jan 2020 12:34:20 +0100
-Subject: [PATCH] Makefile: do not compile .pyc in parallel
-
-This was found to lock up builds, break reproducibility, and produce strange file ownership
-races.
-
-The upstream commit introducing the change was:
-https://github.com/python/cpython/commit/1a2dd82f56bd813aacc570e172cefe55a8a41504
-
-The build lock up issue is reported here:
-https://bugs.python.org/issue45945
-
-The repro failures are documented here:
-https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20211130-yr_o1a8d/packages/diff-html/
-
-Upstream-Status: Inappropriate [see issues above]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Makefile.pre.in | 12 ++++++------
- 1 file changed, 6 insertions(+), 6 deletions(-)
-
-diff --git a/Makefile.pre.in b/Makefile.pre.in
-index edd70d4..5e13ba2 100644
---- a/Makefile.pre.in
-+++ b/Makefile.pre.in
-@@ -1601,30 +1601,30 @@ libinstall: build_all $(srcdir)/Modules/xxmodule.c
- fi
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST) -f \
-+ -d $(LIBDEST) -f \
- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \
- $(DESTDIR)$(LIBDEST)
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST) -f \
-+ -d $(LIBDEST) -f \
- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \
- $(DESTDIR)$(LIBDEST)
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST) -f \
-+ -d $(LIBDEST) -f \
- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \
- $(DESTDIR)$(LIBDEST)
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST)/site-packages -f \
-+ -d $(LIBDEST)/site-packages -f \
- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST)/site-packages -f \
-+ -d $(LIBDEST)/site-packages -f \
- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \
-- -j0 -d $(LIBDEST)/site-packages -f \
-+ -d $(LIBDEST)/site-packages -f \
- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
- $(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt
diff --git a/meta/recipes-devtools/python/python3/0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch b/meta/recipes-devtools/python/python3/0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch
index e1dabc92a3..ee33128fa1 100644
--- a/meta/recipes-devtools/python/python3/0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch
+++ b/meta/recipes-devtools/python/python3/0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch
@@ -1,26 +1,25 @@
-From 9f85089cc3a21d5ff235bb37c6c9758f2b70497d Mon Sep 17 00:00:00 2001
+From 38278339832a57dbf5fa3ef21accaa03e2c814d7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 30 Jan 2019 12:41:04 +0100
Subject: [PATCH] Makefile.pre: use qemu wrapper when gathering profile data
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
Makefile.pre.in | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/Makefile.pre.in b/Makefile.pre.in
-index f0aedb7..edd70d4 100644
+index dd5e69f..381feb0 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
-@@ -519,8 +519,7 @@ build_all_generate_profile:
- $(MAKE) @DEF_MAKE_RULE@ CFLAGS_NODIST="$(CFLAGS_NODIST) $(PGO_PROF_GEN_FLAG)" LDFLAGS_NODIST="$(LDFLAGS_NODIST) $(PGO_PROF_GEN_FLAG)" LIBS="$(LIBS)"
-
- run_profile_task:
+@@ -658,8 +658,7 @@ profile-run-stamp:
+ # enabled.
+ $(MAKE) profile-gen-stamp
+ # Next, run the profile task to generate the profile information.
- @ # FIXME: can't run for a cross build
- $(LLVM_PROF_FILE) $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) || true
+ ./pgo-wrapper ./python -m test.regrtest --pgo test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_support || true
-
- build_all_merge_profile:
$(LLVM_PROF_MERGER)
+ # Remove profile generation binary since we are done with it.
+ $(MAKE) clean-retain-profile
diff --git a/meta/recipes-devtools/python/python3/0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch b/meta/recipes-devtools/python/python3/0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch
index 96c5a3c840..197daa71a5 100644
--- a/meta/recipes-devtools/python/python3/0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch
+++ b/meta/recipes-devtools/python/python3/0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch
@@ -1,4 +1,4 @@
-From 7171aeee22a0b7ab57cdf3d1ae15530549f8f92a Mon Sep 17 00:00:00 2001
+From 3471e3478e0760c42e04f8046cee2367ab5706d2 Mon Sep 17 00:00:00 2001
From: Yi Fan Yu <yifan.yu@windriver.com>
Date: Thu, 1 Apr 2021 13:08:37 -0700
Subject: [PATCH] Skip failing tests due to load variability on YP AB
@@ -11,40 +11,62 @@ Upstream-Status: Inappropriate [OE-Specific]
Signed-off-by: Yi Fan Yu <yifan.yu@windriver.com>
+Skip two additional tests due to suspected load variability failures.
+
+[YOCTO #15131]
+[YOCTO #15177]
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
---
- Lib/test/_test_multiprocessing.py | 2 ++
- Lib/test/test_time.py | 1 +
- 2 files changed, 3 insertions(+)
+ Lib/test/_test_multiprocessing.py | 3 +++
+ Lib/test/test_time.py | 2 ++
+ 2 files changed, 5 insertions(+)
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
-index 3bc5b8f..a6e106d 100644
+index e42c7ab..dff5227 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
-@@ -568,6 +568,7 @@ class _TestProcess(BaseTestCase):
-
+@@ -682,6 +682,7 @@ class _TestProcess(BaseTestCase):
close_queue(q)
+ @support.requires_resource('walltime')
+ @unittest.skip('timing related test, dependent on load')
def test_many_processes(self):
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
-@@ -4817,6 +4818,7 @@ class TestWait(unittest.TestCase):
- sem.release()
+@@ -2066,6 +2067,7 @@ class _TestBarrier(BaseTestCase):
+ except threading.BrokenBarrierError:
+ results.append(True)
+
++ @unittest.skip('timing related test, dependent on load')
+ def test_timeout(self):
+ """
+ Test wait(timeout)
+@@ -5024,6 +5026,7 @@ class TestWait(unittest.TestCase):
time.sleep(period)
+ @support.requires_resource('walltime')
+ @unittest.skip('timing related test, dependent on load')
def test_wait_integer(self):
from multiprocessing.connection import wait
diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py
-index 875615a..aebaa8c 100644
+index 02cc3f4..51a4548 100644
--- a/Lib/test/test_time.py
+++ b/Lib/test/test_time.py
-@@ -474,6 +474,7 @@ class TimeTestCase(unittest.TestCase):
- def test_perf_counter(self):
- time.perf_counter()
-
+@@ -492,6 +492,7 @@ class TimeTestCase(unittest.TestCase):
+ @unittest.skipIf(
+ support.is_wasi, "process_time not available on WASI"
+ )
+ @unittest.skip('timing related test, dependent on load')
def test_process_time(self):
# process_time() should not include time spend during a sleep
start = time.process_time()
+@@ -505,6 +506,7 @@ class TimeTestCase(unittest.TestCase):
+ self.assertTrue(info.monotonic)
+ self.assertFalse(info.adjustable)
+
++ @unittest.skip('timing related test, dependent on load')
+ def test_thread_time(self):
+ if not hasattr(time, 'thread_time'):
+ if sys.platform.startswith(('linux', 'win')):
diff --git a/meta/recipes-devtools/python/python3/0001-Update-test_sysconfig-for-posix_user-purelib.patch b/meta/recipes-devtools/python/python3/0001-Update-test_sysconfig-for-posix_user-purelib.patch
new file mode 100644
index 0000000000..b6c6ac5a28
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/0001-Update-test_sysconfig-for-posix_user-purelib.patch
@@ -0,0 +1,37 @@
+From 37d058e841ba3bd89b5746cc5381afb014b11581 Mon Sep 17 00:00:00 2001
+From: Wentao Zhang <wentao.zhang@windriver.com>
+Date: Mon, 20 Mar 2023 13:39:52 +0800
+Subject: [PATCH] Update test_sysconfig for posix_user purelib
+
+Steps to trigger the failed test:
+Edit local.conf to add something as follows:
+ BASELIB = "lib64"
+ IMAGE_INSTALL:append = " python3-tests".
+bitbake core-image-sato
+runqemu qemux86-64 nographic slirp
+Reproducer:
+ $python3 -m test test_sysconfig
+
+Update test_sysconfig.test_user_similar() for the posix_user scheme:
+"purelib" doesn't use sys.platlibdir.
+
+Upstream-Status: Inappropriate [oe-core specific]
+Signed-off-by: Wentao Zhang <wentao.zhang@windriver.com>
+
+---
+ Lib/test/test_sysconfig.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
+index b6dbf3d..5672590 100644
+--- a/Lib/test/test_sysconfig.py
++++ b/Lib/test/test_sysconfig.py
+@@ -372,7 +372,7 @@ class TestSysConfig(unittest.TestCase):
+ expected = os.path.normpath(global_path.replace(base, user, 1))
+ # bpo-44860: platlib of posix_user doesn't use sys.platlibdir,
+ # whereas posix_prefix does.
+- if name == 'platlib':
++ if name == 'platlib' or name == 'purelib':
+ # Replace "/lib64/python3.11/site-packages" suffix
+ # with "/lib/python3.11/site-packages".
+ py_version_short = sysconfig.get_python_version()
diff --git a/meta/recipes-devtools/python/python3/0001-Use-FLAG_REF-always-for-interned-strings.patch b/meta/recipes-devtools/python/python3/0001-Use-FLAG_REF-always-for-interned-strings.patch
deleted file mode 100644
index 993ac243fc..0000000000
--- a/meta/recipes-devtools/python/python3/0001-Use-FLAG_REF-always-for-interned-strings.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From d7217b79a4e125d4fcc1087743171b94d91d1121 Mon Sep 17 00:00:00 2001
-From: Inada Naoki <songofacandy@gmail.com>
-Date: Sat, 14 Jul 2018 00:46:11 +0900
-Subject: [PATCH] Use FLAG_REF always for interned strings
-
-Upstream-Status: Submitted [https://github.com/python/cpython/pull/8226]
-Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
-
----
- Python/marshal.c | 9 +++++++--
- 1 file changed, 7 insertions(+), 2 deletions(-)
-
-diff --git a/Python/marshal.c b/Python/marshal.c
-index 4125240..341c9aa 100644
---- a/Python/marshal.c
-+++ b/Python/marshal.c
-@@ -298,9 +298,14 @@ w_ref(PyObject *v, char *flag, WFILE *p)
- if (p->version < 3 || p->hashtable == NULL)
- return 0; /* not writing object references */
-
-- /* if it has only one reference, it definitely isn't shared */
-- if (Py_REFCNT(v) == 1)
-+ /* If it has only one reference, it definitely isn't shared.
-+ * But we use TYPE_REF always for interned string, to PYC file stable
-+ * as possible.
-+ */
-+ if (Py_REFCNT(v) == 1 &&
-+ !(PyUnicode_CheckExact(v) && PyUnicode_CHECK_INTERNED(v))) {
- return 0;
-+ }
-
- entry = _Py_hashtable_get_entry(p->hashtable, v);
- if (entry != NULL) {
diff --git a/meta/recipes-devtools/python/python3/0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch b/meta/recipes-devtools/python/python3/0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch
deleted file mode 100644
index 6ab335a405..0000000000
--- a/meta/recipes-devtools/python/python3/0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch
+++ /dev/null
@@ -1,206 +0,0 @@
-From bb409432f03dd8256865292e382ad16613737829 Mon Sep 17 00:00:00 2001
-From: Matthias Schoepfer <matthias.schoepfer@ithinx.io>
-Date: Fri, 31 May 2019 15:34:34 +0200
-Subject: [PATCH] bpo-36852: proper detection of mips architecture for soft
-
- float
-
-When (cross) compiling for softfloat mips, __mips_hard_float will not be
-defined and detection of OS triplet in configure.ac / configure will fail.
-
-This also has to do with the custom detection of the build triplet. Trying
-to do this in a more autoconf/autotools manner.
-
-Upstream-Status: Submitted [https://github.com/python/cpython/pull/13196]
-Signed-off-by: Matthias Schoepfer <matthias.schoepfer@ithinx.io>
-
----
- configure.ac | 175 +++++++--------------------------------------------
- 1 file changed, 21 insertions(+), 154 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 4230ef2..ee08b1b 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -718,160 +718,27 @@ then
- fi
-
-
--AC_MSG_CHECKING([for the platform triplet based on compiler characteristics])
--cat >> conftest.c <<EOF
--#undef bfin
--#undef cris
--#undef fr30
--#undef linux
--#undef hppa
--#undef hpux
--#undef i386
--#undef mips
--#undef powerpc
--#undef sparc
--#undef unix
--#if defined(__ANDROID__)
-- # Android is not a multiarch system.
--#elif defined(__linux__)
--# if defined(__x86_64__) && defined(__LP64__)
-- x86_64-linux-gnu
--# elif defined(__x86_64__) && defined(__ILP32__)
-- x86_64-linux-gnux32
--# elif defined(__i386__)
-- i386-linux-gnu
--# elif defined(__aarch64__) && defined(__AARCH64EL__)
--# if defined(__ILP32__)
-- aarch64_ilp32-linux-gnu
--# else
-- aarch64-linux-gnu
--# endif
--# elif defined(__aarch64__) && defined(__AARCH64EB__)
--# if defined(__ILP32__)
-- aarch64_be_ilp32-linux-gnu
--# else
-- aarch64_be-linux-gnu
--# endif
--# elif defined(__alpha__)
-- alpha-linux-gnu
--# elif defined(__ARM_EABI__) && defined(__ARM_PCS_VFP)
--# if defined(__ARMEL__)
-- arm-linux-gnueabihf
--# else
-- armeb-linux-gnueabihf
--# endif
--# elif defined(__ARM_EABI__) && !defined(__ARM_PCS_VFP)
--# if defined(__ARMEL__)
-- arm-linux-gnueabi
--# else
-- armeb-linux-gnueabi
--# endif
--# elif defined(__hppa__)
-- hppa-linux-gnu
--# elif defined(__ia64__)
-- ia64-linux-gnu
--# elif defined(__m68k__) && !defined(__mcoldfire__)
-- m68k-linux-gnu
--# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6) && defined(_MIPSEL)
--# if _MIPS_SIM == _ABIO32
-- mipsisa32r6el-linux-gnu
--# elif _MIPS_SIM == _ABIN32
-- mipsisa64r6el-linux-gnuabin32
--# elif _MIPS_SIM == _ABI64
-- mipsisa64r6el-linux-gnuabi64
--# else
--# error unknown platform triplet
--# endif
--# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6)
--# if _MIPS_SIM == _ABIO32
-- mipsisa32r6-linux-gnu
--# elif _MIPS_SIM == _ABIN32
-- mipsisa64r6-linux-gnuabin32
--# elif _MIPS_SIM == _ABI64
-- mipsisa64r6-linux-gnuabi64
--# else
--# error unknown platform triplet
--# endif
--# elif defined(__mips_hard_float) && defined(_MIPSEL)
--# if _MIPS_SIM == _ABIO32
-- mipsel-linux-gnu
--# elif _MIPS_SIM == _ABIN32
-- mips64el-linux-gnuabin32
--# elif _MIPS_SIM == _ABI64
-- mips64el-linux-gnuabi64
--# else
--# error unknown platform triplet
--# endif
--# elif defined(__mips_hard_float)
--# if _MIPS_SIM == _ABIO32
-- mips-linux-gnu
--# elif _MIPS_SIM == _ABIN32
-- mips64-linux-gnuabin32
--# elif _MIPS_SIM == _ABI64
-- mips64-linux-gnuabi64
--# else
--# error unknown platform triplet
--# endif
--# elif defined(__or1k__)
-- or1k-linux-gnu
--# elif defined(__powerpc__) && defined(__SPE__)
-- powerpc-linux-gnuspe
--# elif defined(__powerpc64__)
--# if defined(__LITTLE_ENDIAN__)
-- powerpc64le-linux-gnu
--# else
-- powerpc64-linux-gnu
--# endif
--# elif defined(__powerpc__)
-- powerpc-linux-gnu
--# elif defined(__s390x__)
-- s390x-linux-gnu
--# elif defined(__s390__)
-- s390-linux-gnu
--# elif defined(__sh__) && defined(__LITTLE_ENDIAN__)
-- sh4-linux-gnu
--# elif defined(__sparc__) && defined(__arch64__)
-- sparc64-linux-gnu
--# elif defined(__sparc__)
-- sparc-linux-gnu
--# elif defined(__riscv)
--# if __riscv_xlen == 32
-- riscv32-linux-gnu
--# elif __riscv_xlen == 64
-- riscv64-linux-gnu
--# else
--# error unknown platform triplet
--# endif
--# else
--# error unknown platform triplet
--# endif
--#elif defined(__FreeBSD_kernel__)
--# if defined(__LP64__)
-- x86_64-kfreebsd-gnu
--# elif defined(__i386__)
-- i386-kfreebsd-gnu
--# else
--# error unknown platform triplet
--# endif
--#elif defined(__gnu_hurd__)
-- i386-gnu
--#elif defined(__APPLE__)
-- darwin
--#elif defined(__VXWORKS__)
-- vxworks
--#else
--# error unknown platform triplet
--#endif
--
--EOF
--
--if $CPP $CPPFLAGS conftest.c >conftest.out 2>/dev/null; then
-- PLATFORM_TRIPLET=`grep -v '^#' conftest.out | grep -v '^ *$' | tr -d ' '`
-- AC_MSG_RESULT([$PLATFORM_TRIPLET])
--else
-- AC_MSG_RESULT([none])
--fi
--rm -f conftest.c conftest.out
-+AC_CANONICAL_TARGET
-+## Not using $target to filter out vendor
-+## Need to handle macos, vxworks and hurd special (?) :-/
-+case ${target_os} in
-+ darwin*)
-+ PLATFORM_TRIPLET=darwin
-+ ;;
-+ hurd*)
-+ PLATFORM_TRIPLET=i386-gnu
-+ ;;
-+ vxworks*)
-+ PLATFORM_TRIPLET=vxworks
-+ ;;
-+ *)
-+ if test "${target_cpu}" != "i686"; then
-+ PLATFORM_TRIPLET=${target_cpu}-${target_os}
-+ else
-+ PLATFORM_TRIPLET=i386-${target_os}
-+ fi
-+ ;;
-+esac
-
- AC_MSG_CHECKING([for multiarch])
- AS_CASE([$ac_sys_system],
diff --git a/meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch b/meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch
deleted file mode 100644
index 3c62c2acb8..0000000000
--- a/meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 78dd1def953e18e7cda0325bb26d27c051bb6890 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Thu, 31 Jan 2019 16:46:30 +0100
-Subject: [PATCH] distutils/sysconfig: append
- STAGING_LIBDIR/python-sysconfigdata to sys.path
-
-So that target configuration can be used when running native python
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Lib/sysconfig.py | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
-index 40c6b3e..ac94cc7 100644
---- a/Lib/sysconfig.py
-+++ b/Lib/sysconfig.py
-@@ -474,6 +474,8 @@ def _init_posix(vars):
- """Initialize the module as appropriate for POSIX systems."""
- # _sysconfigdata is generated at build time, see _generate_posix_vars()
- name = _get_sysconfigdata_name()
-+ if 'STAGING_LIBDIR' in os.environ:
-+ sys.path.append(os.environ['STAGING_LIBDIR']+'/python-sysconfigdata')
- _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
- build_time_vars = _temp.build_time_vars
- vars.update(build_time_vars)
diff --git a/meta/recipes-devtools/python/python3/0001-gh-114492-Initialize-struct-termios-before-calling-t.patch b/meta/recipes-devtools/python/python3/0001-gh-114492-Initialize-struct-termios-before-calling-t.patch
new file mode 100644
index 0000000000..8406ef30a2
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/0001-gh-114492-Initialize-struct-termios-before-calling-t.patch
@@ -0,0 +1,26 @@
+From 439aa02f42d6e6715c172076261757fcb89a936a Mon Sep 17 00:00:00 2001
+From: "Miss Islington (bot)"
+ <31488909+miss-islington@users.noreply.github.com>
+Date: Tue, 23 Jan 2024 23:02:02 +0100
+Subject: [PATCH] gh-114492: Initialize struct termios before calling
+ tcgetattr() (GH-114495) (GH-114502)
+
+On Alpine Linux it could leave some field non-initialized.
+(cherry picked from commit d22c066b802592932f9eb18434782299e80ca42e)
+
+Upstream-Status: Backport [https://github.com/python/cpython/commit/386c72d9928c51aa2c855ce592bd8022da3b407f]
+Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ .../next/Library/2024-01-23-21-20-40.gh-issue-114492.vKxl5o.rst | 2 ++
+ 1 file changed, 2 insertions(+)
+ create mode 100644 Misc/NEWS.d/next/Library/2024-01-23-21-20-40.gh-issue-114492.vKxl5o.rst
+
+diff --git a/Misc/NEWS.d/next/Library/2024-01-23-21-20-40.gh-issue-114492.vKxl5o.rst b/Misc/NEWS.d/next/Library/2024-01-23-21-20-40.gh-issue-114492.vKxl5o.rst
+new file mode 100644
+index 0000000..8df8299
+--- /dev/null
++++ b/Misc/NEWS.d/next/Library/2024-01-23-21-20-40.gh-issue-114492.vKxl5o.rst
+@@ -0,0 +1,2 @@
++Make the result of :func:`termios.tcgetattr` reproducible on Alpine Linux.
++Previously it could leave a random garbage in some fields.
diff --git a/meta/recipes-devtools/python/python3/0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch b/meta/recipes-devtools/python/python3/0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch
index 6bb85fcb34..bbeabe4389 100644
--- a/meta/recipes-devtools/python/python3/0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch
+++ b/meta/recipes-devtools/python/python3/0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch
@@ -1,4 +1,4 @@
-From 9f68a27eb34394a00f1011c06900c609f15fb15c Mon Sep 17 00:00:00 2001
+From ababc7b1db8c406910766e11cdd04cbef7a706c9 Mon Sep 17 00:00:00 2001
From: Changqing Li <changqing.li@windriver.com>
Date: Mon, 22 Oct 2018 15:19:51 +0800
Subject: [PATCH] python3: use cc_basename to replace CC for checking compiler
@@ -14,40 +14,39 @@ x86_64-wrs-linux-gcc: error: unrecognized command line option '-fp-model'
Here use cc_basename to replace CC for checking compiler to avoid such
kind of issue.
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://github.com/python/cpython/pull/96399]
Signed-off-by: Li Zhou <li.zhou@windriver.com>
patch originally from Li Zhou, I just rework it to new version
Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
---
configure.ac | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/configure.ac b/configure.ac
-index 0c06914..299786b 100644
+index 384718d..5a1d58b 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -61,6 +61,7 @@ AC_CONFIG_HEADER(pyconfig.h)
+@@ -137,6 +137,7 @@ AC_CONFIG_HEADERS([pyconfig.h])
AC_CANONICAL_HOST
- AC_SUBST(build)
- AC_SUBST(host)
+ AC_SUBST([build])
+ AC_SUBST([host])
+LT_INIT
- # pybuilddir.txt will be created by --generate-posix-vars in the Makefile
- rm -f pybuilddir.txt
-@@ -688,7 +689,7 @@ AC_MSG_RESULT($with_cxx_main)
+ AS_VAR_IF([cross_compiling], [maybe],
+ [AC_MSG_ERROR([Cross compiling required --host=HOST-TUPLE and --build=ARCH])]
+@@ -896,7 +897,7 @@ AC_SUBST([CXX])
preset_cxx="$CXX"
if test -z "$CXX"
then
- case "$CC" in
+ case "$cc_basename" in
- gcc) AC_PATH_TOOL(CXX, [g++], [g++], [notfound]) ;;
- cc) AC_PATH_TOOL(CXX, [c++], [c++], [notfound]) ;;
- clang|*/clang) AC_PATH_TOOL(CXX, [clang++], [clang++], [notfound]) ;;
-@@ -976,7 +977,7 @@ rmdir CaseSensitiveTestDir
+ gcc) AC_PATH_TOOL([CXX], [g++], [g++], [notfound]) ;;
+ cc) AC_PATH_TOOL([CXX], [c++], [c++], [notfound]) ;;
+ clang|*/clang) AC_PATH_TOOL([CXX], [clang++], [clang++], [notfound]) ;;
+@@ -1328,7 +1329,7 @@ rmdir CaseSensitiveTestDir
case $ac_sys_system in
hp*|HP*)
@@ -56,16 +55,16 @@ index 0c06914..299786b 100644
cc|*/cc) CC="$CC -Ae";;
esac;;
esac
-@@ -1374,7 +1375,7 @@ else
- fi],
- [AC_MSG_RESULT(no)])
+@@ -1854,7 +1855,7 @@ esac
+ ],
+ [AC_MSG_RESULT([no])])
if test "$Py_LTO" = 'true' ; then
- case $CC in
+ case $cc_basename in
*clang*)
- AC_SUBST(LLVM_AR)
- AC_PATH_TOOL(LLVM_AR, llvm-ar, '', ${llvm_path})
-@@ -1467,7 +1468,7 @@ then
+ LDFLAGS_NOLTO="-fno-lto"
+ dnl Clang linker requires -flto in order to link objects with LTO information.
+@@ -1983,7 +1984,7 @@ then
fi
fi
LLVM_PROF_ERR=no
@@ -74,7 +73,7 @@ index 0c06914..299786b 100644
*clang*)
# Any changes made here should be reflected in the GCC+Darwin case below
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
-@@ -1528,7 +1529,7 @@ esac
+@@ -2147,7 +2148,7 @@ AC_MSG_RESULT([$BOLT_APPLY_FLAGS])
# compiler and platform. BASECFLAGS tweaks need to be made even if the
# user set OPT.
@@ -83,25 +82,25 @@ index 0c06914..299786b 100644
*clang*)
cc_is_clang=1
;;
-@@ -1664,7 +1665,7 @@ yes)
+@@ -2419,7 +2420,7 @@ yes)
# ICC doesn't recognize the option, but only emits a warning
## XXX does it emit an unused result warning and can it be disabled?
-- case "$CC" in
-+ case "$cc_basename" in
- *icc*)
- ac_cv_disable_unused_result_warning=no
- ;;
-@@ -2018,7 +2019,7 @@ yes)
+- AS_CASE([$CC],
++ AS_CASE([$cc_basename],
+ [*icc*], [ac_cv_disable_unused_result_warning=no]
+ [PY_CHECK_CC_WARNING([disable], [unused-result])])
+ AS_VAR_IF([ac_cv_disable_unused_result_warning], [yes],
+@@ -2665,7 +2666,7 @@ yes)
;;
esac
-case "$CC" in
+case "$cc_basename" in
- *icc*)
- # ICC needs -fp-model strict or floats behave badly
- CFLAGS_NODIST="$CFLAGS_NODIST -fp-model strict"
-@@ -2836,7 +2837,7 @@ then
+ *mpicc*)
+ CFLAGS_NODIST="$CFLAGS_NODIST"
+ ;;
+@@ -3482,7 +3483,7 @@ then
then
LINKFORSHARED="-Wl,--export-dynamic"
fi;;
@@ -110,12 +109,12 @@ index 0c06914..299786b 100644
*gcc*)
if $CC -Xlinker --help 2>&1 | grep export-dynamic >/dev/null
then
-@@ -5622,7 +5623,7 @@ if test "$have_gcc_asm_for_x87" = yes; then
+@@ -6803,7 +6804,7 @@ if test "$ac_cv_gcc_asm_for_x87" = yes; then
# Some versions of gcc miscompile inline asm:
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
# http://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
- case $CC in
+ case $cc_basename in
*gcc*)
- AC_MSG_CHECKING(for gcc ipa-pure-const bug)
+ AC_MSG_CHECKING([for gcc ipa-pure-const bug])
saved_cflags="$CFLAGS"
diff --git a/meta/recipes-devtools/python/python3/0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch b/meta/recipes-devtools/python/python3/0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch
deleted file mode 100644
index 1844e0efa3..0000000000
--- a/meta/recipes-devtools/python/python3/0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From dc966f1278c1077938626d682666767d2c8d0c72 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 9 Apr 2022 18:29:47 +0000
-Subject: [PATCH] setup.py: Do not detect multiarch paths when cross-compiling
-
-add_multiarch_paths() function relies on host tools like dpkg-configure
-to operate, which is not good when cross compiling, since it ends up
-adding native paths in includes in certain cases, e.g. when building
-for aarch64 targets using aarch64 build hosts running debian-like
-distributions e.g. ubuntu, it ends up adding native multiarch paths
--I/usr/include/aarch64-linux-gnu during cross compile and since arches
-are so similar, cross compiler (epecially clang) is inhererently configured
-with multiarch ends up adding these paths to compiler cmdline which
-works ok with gcc since headers are similar but clang barfs on some gcc
-extentions and build fails due to missing gnu extentions but it silently
-compiles when using cross gcc.
-
-Fixes python3 cross build by not running this funciton when cross compiling
-
-Upstream-Status: Inappropriate [OE-Specific]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- setup.py | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/setup.py b/setup.py
-index 2e7f263..f7a3d39 100644
---- a/setup.py
-+++ b/setup.py
-@@ -840,7 +840,8 @@ class PyBuildExt(build_ext):
- # only change this for cross builds for 3.3, issues on Mageia
- if CROSS_COMPILING:
- self.add_cross_compiling_paths()
-- self.add_multiarch_paths()
-+ if not CROSS_COMPILING:
-+ self.add_multiarch_paths()
- self.add_ldflags_cppflags()
-
- def init_inc_lib_dirs(self):
---
-2.25.1
-
diff --git a/meta/recipes-devtools/python/python3/0001-skip-no_stdout_fileno-test-due-to-load-variability.patch b/meta/recipes-devtools/python/python3/0001-skip-no_stdout_fileno-test-due-to-load-variability.patch
new file mode 100644
index 0000000000..2d7bca6a77
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/0001-skip-no_stdout_fileno-test-due-to-load-variability.patch
@@ -0,0 +1,29 @@
+From 217cea231462e7703e8c9ea39c0a6833f799a420 Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 15 Sep 2023 08:48:33 -0400
+Subject: [PATCH] skip no_stdout_fileno test due to load variability
+
+Skip test_input_no_stdout_fileno so that it doesn't fail on systems
+under heavy load.
+
+Upstream-Status: Inappropriate [OE-Specific]
+
+[YOCTO #15210]
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+---
+ Lib/test/test_builtin.py | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
+index 4d03c46..b329b7a 100644
+--- a/Lib/test/test_builtin.py
++++ b/Lib/test/test_builtin.py
+@@ -2326,6 +2326,7 @@ class PtyTests(unittest.TestCase):
+ # Check stdin/stdout error handler is used when invoking PyOS_Readline()
+ self.check_input_tty("prompté", b"quux\xe9", "ascii")
+
++ @unittest.skip("Test may fail under heavy load")
+ def test_input_no_stdout_fileno(self):
+ # Issue #24402: If stdin is the original terminal but stdout.fileno()
+ # fails, do not use the original stdout file descriptor
diff --git a/meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch b/meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch
index 4fb63a9b7a..fc52fdac26 100644
--- a/meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch
+++ b/meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch
@@ -1,4 +1,4 @@
-From 9162460d81ccc725fb04a14b27d0bf4afcfb69c9 Mon Sep 17 00:00:00 2001
+From a5d429a0e1a4809c1ded7be7e45dcabeb82c53d8 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Sun, 12 Sep 2021 21:44:36 +0200
Subject: [PATCH] sysconfig.py: use platlibdir also for purelib
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
-index daf9f00..e64bcdc 100644
+index 122d441..79c0510 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
-@@ -27,7 +27,7 @@ _INSTALL_SCHEMES = {
+@@ -28,7 +28,7 @@ _INSTALL_SCHEMES = {
'posix_prefix': {
'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
diff --git a/meta/recipes-devtools/python/python3/0001-test_ctypes.test_find-skip-without-tools-sdk.patch b/meta/recipes-devtools/python/python3/0001-test_ctypes.test_find-skip-without-tools-sdk.patch
index 371021c0a9..b4fe946cba 100644
--- a/meta/recipes-devtools/python/python3/0001-test_ctypes.test_find-skip-without-tools-sdk.patch
+++ b/meta/recipes-devtools/python/python3/0001-test_ctypes.test_find-skip-without-tools-sdk.patch
@@ -1,4 +1,4 @@
-From 13aa6449c47980c7270dad2527c3911517bf34e6 Mon Sep 17 00:00:00 2001
+From b64c131a576a4b4f821514e711ab91b1394fb4ff Mon Sep 17 00:00:00 2001
From: Tim Orling <timothy.t.orling@intel.com>
Date: Fri, 18 Jun 2021 11:56:50 -0700
Subject: [PATCH] test_ctypes.test_find: skip without tools-sdk
@@ -9,16 +9,16 @@ easiest way to dynamically check for that is looking for
Upstream-Status: Inappropriate [oe-specific]
-Signed-off-by: Tim Orling <timothy.t.orlign@intel.com>
+Signed-off-by: Tim Orling <timothy.t.orling@intel.com>
---
- Lib/ctypes/test/test_find.py | 2 ++
+ Lib/test/test_ctypes/test_find.py | 2 ++
1 file changed, 2 insertions(+)
-diff --git a/Lib/ctypes/test/test_find.py b/Lib/ctypes/test/test_find.py
+diff --git a/Lib/test/test_ctypes/test_find.py b/Lib/test/test_ctypes/test_find.py
index 1ff9d01..59def26 100644
---- a/Lib/ctypes/test/test_find.py
-+++ b/Lib/ctypes/test/test_find.py
+--- a/Lib/test/test_ctypes/test_find.py
++++ b/Lib/test/test_ctypes/test_find.py
@@ -113,10 +113,12 @@ class FindLibraryLinux(unittest.TestCase):
# LD_LIBRARY_PATH)
self.assertEqual(find_library(libname), 'lib%s.so' % libname)
diff --git a/meta/recipes-devtools/python/python3/0001-test_locale.py-correct-the-test-output-format.patch b/meta/recipes-devtools/python/python3/0001-test_locale.py-correct-the-test-output-format.patch
index c762f98307..410a9fc7f1 100644
--- a/meta/recipes-devtools/python/python3/0001-test_locale.py-correct-the-test-output-format.patch
+++ b/meta/recipes-devtools/python/python3/0001-test_locale.py-correct-the-test-output-format.patch
@@ -1,4 +1,4 @@
-From 46856e692377d21be3562f6f90c242f5c9594ae2 Mon Sep 17 00:00:00 2001
+From ef5728f0af14da5c9f80b0f038fe5bf6d44cb0e9 Mon Sep 17 00:00:00 2001
From: Mingli Yu <mingli.yu@windriver.com>
Date: Mon, 5 Aug 2019 15:57:39 +0800
Subject: [PATCH] test_locale.py: correct the test output format
@@ -32,10 +32,10 @@ Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
-index f844e62..04df0c2 100644
+index b0d7998..cb12153 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
-@@ -564,7 +564,7 @@ class TestMiscellaneous(unittest.TestCase):
+@@ -557,7 +557,7 @@ class TestMiscellaneous(unittest.TestCase):
self.skipTest('test needs Turkish locale')
loc = locale.getlocale(locale.LC_CTYPE)
if verbose:
diff --git a/meta/recipes-devtools/python/python3/0001-test_storlines-skip-due-to-load-variability.patch b/meta/recipes-devtools/python/python3/0001-test_storlines-skip-due-to-load-variability.patch
new file mode 100644
index 0000000000..0d0eb08459
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/0001-test_storlines-skip-due-to-load-variability.patch
@@ -0,0 +1,30 @@
+From dc69a1afdb3ba619705ff71e14f19ed3142e422f Mon Sep 17 00:00:00 2001
+From: Trevor Gamblin <tgamblin@baylibre.com>
+Date: Fri, 6 Oct 2023 10:59:44 -0400
+Subject: [PATCH] test_storlines: skip due to load variability
+
+This is yet another test that intermittently fails on the Yocto AB when
+a worker is under heavy load, so skip it during testing.
+
+Upstream-Status: Inappropriate [OE-Specific]
+
+[YOCTO #14933]
+
+Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
+
+---
+ Lib/test/test_ftplib.py | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
+index 2f191ea..dc29346 100644
+--- a/Lib/test/test_ftplib.py
++++ b/Lib/test/test_ftplib.py
+@@ -626,6 +626,7 @@ class TestFTPClass(TestCase):
+ self.client.storbinary('stor', f, rest=r)
+ self.assertEqual(self.server.handler_instance.rest, str(r))
+
++ @unittest.skip('timing related test, dependent on load')
+ def test_storlines(self):
+ data = RETR_DATA.replace('\r\n', '\n').encode(self.client.encoding)
+ f = io.BytesIO(data)
diff --git a/meta/recipes-devtools/python/python3/0017-setup.py-do-not-report-missing-dependencies-for-disa.patch b/meta/recipes-devtools/python/python3/0017-setup.py-do-not-report-missing-dependencies-for-disa.patch
deleted file mode 100644
index 0ead57e465..0000000000
--- a/meta/recipes-devtools/python/python3/0017-setup.py-do-not-report-missing-dependencies-for-disa.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 246c5ffe75a2d494e415d8a7522df9fe22056d41 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 7 Oct 2019 13:22:14 +0200
-Subject: [PATCH] setup.py: do not report missing dependencies for disabled
- modules
-
-Reporting those missing dependencies is misleading as the modules would not
-have been built anyway. This particularly matters in oe-core's automated
-build completeness checker which relies on the report.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-Signed-off-by: Alejandro Hernandez Samaniego <alejandro@enedino.org>
-
----
- setup.py | 8 ++++++++
- 1 file changed, 8 insertions(+)
-
-diff --git a/setup.py b/setup.py
-index 2be4738..62f0e18 100644
---- a/setup.py
-+++ b/setup.py
-@@ -517,6 +517,14 @@ class PyBuildExt(build_ext):
- print("%-*s %-*s %-*s" % (longest, e, longest, f,
- longest, g))
-
-+ # There is no need to report missing module dependencies,
-+ # if the modules have been disabled in the first place.
-+ # cannot use mods_disabled here, because remove_configured_extensions adds
-+ # only disabled extensions into it (doesn't cover _dbm, _gdbm, readline
-+ # we support disabling through PACKAGECONFIG)
-+ sysconf_dis = sysconfig.get_config_var('MODDISABLED_NAMES').split()
-+ self.missing = list(set(self.missing) - set(sysconf_dis))
-+
- if self.missing:
- print()
- print("Python build finished successfully!")
diff --git a/meta/recipes-devtools/python/python3/0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch b/meta/recipes-devtools/python/python3/0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch
index 30d2906439..0661249bfd 100644
--- a/meta/recipes-devtools/python/python3/0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch
+++ b/meta/recipes-devtools/python/python3/0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch
@@ -1,8 +1,7 @@
-From 788cd0464ee2b175493a0167ceee8c0045ce323c Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Sun, 16 Feb 2020 17:50:25 +0100
-Subject: [PATCH] configure.ac, setup.py: do not add a curses include path from
- the host
+From d0205c60d08f51d84bd8ddc07a57e8c71710fdad Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Fri, 17 Nov 2023 14:16:40 +0100
+Subject: [PATCH] configure.ac: do not add a curses include path from the host
This leads to host contamination, and particularly can cause
curses modules to fail at runtime if the host curses is configured
@@ -11,19 +10,17 @@ as dnf failures).
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
configure.ac | 6 ------
- setup.py | 2 --
- 2 files changed, 8 deletions(-)
+ 1 file changed, 6 deletions(-)
diff --git a/configure.ac b/configure.ac
-index e5e3df8..bfdd987 100644
+index c49cd4f..affdedf 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -5092,12 +5092,6 @@ then
- [Define if you have struct stat.st_mtimensec])
- fi
+@@ -6508,12 +6508,6 @@ AS_VAR_IF([have_panel], [no], [
+ AC_MSG_RESULT([$have_panel (CFLAGS: $PANEL_CFLAGS, LIBS: $PANEL_LIBS)])
+ ])
-# first curses header check
-ac_save_cppflags="$CPPFLAGS"
@@ -31,19 +28,6 @@ index e5e3df8..bfdd987 100644
- CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw"
-fi
-
- AC_CHECK_HEADERS(curses.h ncurses.h)
-
# On Solaris, term.h requires curses.h
-diff --git a/setup.py b/setup.py
-index 62f0e18..c190002 100644
---- a/setup.py
-+++ b/setup.py
-@@ -1169,8 +1169,6 @@ class PyBuildExt(build_ext):
- panel_library = 'panel'
- if curses_library == 'ncursesw':
- curses_defines.append(('HAVE_NCURSESW', '1'))
-- if not CROSS_COMPILING:
-- curses_includes.append('/usr/include/ncursesw')
- # Bug 1464056: If _curses.so links with ncursesw,
- # _curses_panel.so must link with panelw.
- panel_library = 'panelw'
+ AC_CHECK_HEADERS([term.h], [], [], [
+ #ifdef HAVE_CURSES_H
diff --git a/meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch b/meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch
deleted file mode 100644
index de4c6c4e19..0000000000
--- a/meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch
+++ /dev/null
@@ -1,58 +0,0 @@
-From 33b5a31df6050110f4481a24f5a0a0bf7fe80096 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 14 May 2013 15:00:26 -0700
-Subject: [PATCH] python3: Add target and native recipes
-
-Upstream-Status: Inappropriate [embedded specific]
-
-02/2015 Rebased for Python 3.4.2
-
-The proper prefix is inside our staging area.
-Signed-Off: Michael 'Mickey' Lauer <mickey@vanille-media.de>
-Signed-off-by: Phil Blundell <philb@gnu.org>
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
-
----
- Lib/distutils/sysconfig.py | 14 +++++++++++---
- 1 file changed, 11 insertions(+), 3 deletions(-)
-
-diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
-index 3414a76..361d3a1 100644
---- a/Lib/distutils/sysconfig.py
-+++ b/Lib/distutils/sysconfig.py
-@@ -277,7 +277,9 @@ def get_python_inc(plat_specific=0, prefix=None):
- If 'prefix' is supplied, use it instead of sys.base_prefix or
- sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
- """
-- if prefix is None:
-+ if prefix is None and os.environ.get('STAGING_INCDIR', ""):
-+ prefix = os.environ['STAGING_INCDIR'].rstrip('include')
-+ elif prefix is None:
- prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
- if os.name == "posix":
- if python_build:
-@@ -320,7 +322,13 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
- If 'prefix' is supplied, use it instead of sys.base_prefix or
- sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
- """
-- if prefix is None:
-+ if os.environ.get('STAGING_LIBDIR', ""):
-+ lib_basename = os.environ['STAGING_LIBDIR'].split('/')[-1]
-+ else:
-+ lib_basename = "lib"
-+ if prefix is None and os.environ.get('STAGING_LIBDIR', ""):
-+ prefix = os.environ['STAGING_LIBDIR'].rstrip(lib_basename)
-+ elif prefix is None:
- if standard_lib:
- prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
- else:
-@@ -334,7 +342,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
- else:
- # Pure Python
- libdir = "lib"
-- libpython = os.path.join(prefix, libdir,
-+ libpython = os.path.join(prefix, lib_basename,
- "python" + get_python_version())
- if standard_lib:
- return libpython
diff --git a/meta/recipes-devtools/python/python3/avoid_warning_about_tkinter.patch b/meta/recipes-devtools/python/python3/avoid_warning_about_tkinter.patch
deleted file mode 100644
index 2de72b7199..0000000000
--- a/meta/recipes-devtools/python/python3/avoid_warning_about_tkinter.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 6a23d52c905cd1f6a5944255903ec86ea8b904bb Mon Sep 17 00:00:00 2001
-From: Andrei Gherzan <andrei@gherzan.ro>
-Date: Mon, 28 Jan 2019 15:57:54 +0000
-Subject: [PATCH] _tkinter module needs tk module along with tcl. tk is not yet
- integrated in yocto so we skip the check for this module. Avoid a warning by
- not adding this module to missing variable.
-
-Upstream-Status: Inappropriate [distribution]
-
-Also simply disable the tk module since its not in DEPENDS.
-Signed-off-by: Andrei Gherzan <andrei@gherzan.ro>
-
----
- setup.py | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/setup.py b/setup.py
-index 11b5cf5..2be4738 100644
---- a/setup.py
-+++ b/setup.py
-@@ -1895,8 +1895,8 @@ class PyBuildExt(build_ext):
- self.detect_decimal()
- self.detect_ctypes()
- self.detect_multiprocessing()
-- if not self.detect_tkinter():
-- self.missing.append('_tkinter')
-+# if not self.detect_tkinter():
-+# self.missing.append('_tkinter')
- self.detect_uuid()
-
- ## # Uncomment these lines if you want to play with xxmodule.c
diff --git a/meta/recipes-devtools/python/python3/cgi_py.patch b/meta/recipes-devtools/python/python3/cgi_py.patch
index 81e6099cfb..8262c88e73 100644
--- a/meta/recipes-devtools/python/python3/cgi_py.patch
+++ b/meta/recipes-devtools/python/python3/cgi_py.patch
@@ -1,4 +1,4 @@
-From 5b0d1212d661e9a8a36738279fc9109f96eebd25 Mon Sep 17 00:00:00 2001
+From a56778372fe8dc7c42f5ffd911d89498c22dd064 Mon Sep 17 00:00:00 2001
From: Mark Hatle <mark.hatle@windriver.com>
Date: Wed, 21 Sep 2011 20:55:33 -0500
Subject: [PATCH] Lib/cgi.py: Update the script as mentioned in the comment
@@ -12,7 +12,7 @@ Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
1 file changed, 1 insertion(+), 10 deletions(-)
diff --git a/Lib/cgi.py b/Lib/cgi.py
-index 6cb8cf2..a873ff3 100755
+index 8787567..ebe8652 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -1,13 +1,4 @@
diff --git a/meta/recipes-devtools/python/python3/crosspythonpath.patch b/meta/recipes-devtools/python/python3/crosspythonpath.patch
index 5bb25264da..2c4aef0511 100644
--- a/meta/recipes-devtools/python/python3/crosspythonpath.patch
+++ b/meta/recipes-devtools/python/python3/crosspythonpath.patch
@@ -1,4 +1,4 @@
-From baa3a232e64e9bf5ae945366efdb8088ccf9b828 Mon Sep 17 00:00:00 2001
+From 5b66463c10fec1440e977d5a21a0167862d6d79c Mon Sep 17 00:00:00 2001
From: Ricardo Ribalda <ricardo@ribalda.com>
Date: Tue, 18 Nov 2014 03:35:33 -0500
Subject: [PATCH] configure.ac: add CROSSPYTHONPATH into PYTHONPATH for
@@ -20,15 +20,15 @@ Signed-off-by: Ricardo Ribalda <ricardo@ribalda.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index d0db062..e5e3df8 100644
+index cb9e198..d81c19a 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -83,7 +83,7 @@ if test "$cross_compiling" = yes; then
- AC_MSG_ERROR([python$PACKAGE_VERSION interpreter not found])
- fi
- AC_MSG_RESULT($interp)
-- PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$interp
-+ PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(CROSSPYTHONPATH):$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$interp
- fi
- elif test "$cross_compiling" = maybe; then
- AC_MSG_ERROR([Cross compiling required --host=HOST-TUPLE and --build=ARCH])
+@@ -165,7 +165,7 @@ AC_ARG_WITH([build-python],
+ dnl Build Python interpreter is used for regeneration and freezing.
+ ac_cv_prog_PYTHON_FOR_REGEN=$with_build_python
+ PYTHON_FOR_FREEZE="$with_build_python"
+- PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$with_build_python
++ PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(CROSSPYTHONPATH):$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$with_build_python
+ AC_MSG_RESULT([$with_build_python])
+ ], [
+ AS_VAR_IF([cross_compiling], [yes],
diff --git a/meta/recipes-devtools/python/python3/deterministic_imports.patch b/meta/recipes-devtools/python/python3/deterministic_imports.patch
new file mode 100644
index 0000000000..104df94964
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/deterministic_imports.patch
@@ -0,0 +1,40 @@
+From 039d5e652796b55f1132afa568c7432b6ed89afd Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Fri, 27 May 2022 17:05:44 +0100
+Subject: [PATCH] python3: Ensure stale empty python module directories don't
+
+There are two issues here. Firstly, the modules are accessed in on disk order. This
+means behaviour seen on one system might not reproduce on another and is a real headache.
+
+Secondly, empty directories left behind by previous modules might be looked at. This
+has caused a long string of different issues for us.
+
+As a result, patch this to a behaviour which works for us.
+
+Upstream-Status: Pending [need to talk to upstream to see if they'll take one or both fixes]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+---
+ Lib/importlib/metadata/__init__.py | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py
+index 82e0ce1..969cac4 100644
+--- a/Lib/importlib/metadata/__init__.py
++++ b/Lib/importlib/metadata/__init__.py
+@@ -710,7 +710,14 @@ class Lookup:
+ self.infos = FreezableDefaultDict(list)
+ self.eggs = FreezableDefaultDict(list)
+
+- for child in path.children():
++ for child in sorted(path.children()):
++ childpath = pathlib.Path(path.root, child)
++ try:
++ if childpath.is_dir() and not any(childpath.iterdir()):
++ # Empty directories aren't interesting
++ continue
++ except PermissionError:
++ continue
+ low = child.lower()
+ if low.endswith((".dist-info", ".egg-info")):
+ # rpartition is faster than splitext and suitable for this purpose.
diff --git a/meta/recipes-devtools/python/python3/get_module_deps3.py b/meta/recipes-devtools/python/python3/get_module_deps3.py
index 1f4c982aed..8e432b49af 100644
--- a/meta/recipes-devtools/python/python3/get_module_deps3.py
+++ b/meta/recipes-devtools/python/python3/get_module_deps3.py
@@ -32,7 +32,7 @@ def fix_path(dep_path):
dep_path = dep_path[dep_path.find(pivot)+len(pivot):]
if '/usr/bin' in dep_path:
- dep_path = dep_path.replace('/usr/bin''${bindir}')
+ dep_path = dep_path.replace('/usr/bin','${bindir}')
# Handle multilib, is there a better way?
if '/usr/lib32' in dep_path:
@@ -56,7 +56,7 @@ if debug == True:
try:
m = importlib.import_module(current_module)
# handle python packages which may not include all modules in the __init__
- if os.path.basename(m.__file__) == "__init__.py":
+ if hasattr(m, '__file__') and os.path.basename(m.__file__) == "__init__.py":
modulepath = os.path.dirname(m.__file__)
for i in os.listdir(modulepath):
if i.startswith("_") or not(i.endswith(".py")):
diff --git a/meta/recipes-devtools/python/python3/makerace.patch b/meta/recipes-devtools/python/python3/makerace.patch
index 2c06784ffc..c1b20703e6 100644
--- a/meta/recipes-devtools/python/python3/makerace.patch
+++ b/meta/recipes-devtools/python/python3/makerace.patch
@@ -1,4 +1,4 @@
-From 7cc02dfa593d1350a689d64a7a6f2dc6478afe24 Mon Sep 17 00:00:00 2001
+From 9f827c29adbe656af3c8fc963fdd8f47aec0c442 Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Tue, 13 Jul 2021 23:19:29 +0100
Subject: [PATCH] python3: Fix make race
@@ -12,21 +12,20 @@ Add a dependency to avoid the race.
Upstream-Status: Pending
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
---
Makefile.pre.in | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile.pre.in b/Makefile.pre.in
-index 5e13ba2..026bffd 100644
+index 381feb0..77bf09a 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
-@@ -1527,7 +1527,7 @@ TESTSUBDIRS= ctypes/test \
- unittest/test unittest/test/testmock
-
+@@ -2250,7 +2250,7 @@ COMPILEALL_OPTS=-j0
TEST_MODULES=@TEST_MODULES@
--libinstall: build_all $(srcdir)/Modules/xxmodule.c
-+libinstall: build_all $(srcdir)/Modules/xxmodule.c libainstall
+
+ .PHONY: libinstall
+-libinstall: all $(srcdir)/Modules/xxmodule.c
++libinstall: all $(srcdir)/Modules/xxmodule.c libainstall
@for i in $(SCRIPTDIR) $(LIBDEST); \
do \
if test ! -d $(DESTDIR)$$i; then \
diff --git a/meta/recipes-devtools/python/python3/python-config.patch b/meta/recipes-devtools/python/python3/python-config.patch
deleted file mode 100644
index 4da399e46e..0000000000
--- a/meta/recipes-devtools/python/python3/python-config.patch
+++ /dev/null
@@ -1,55 +0,0 @@
-From 8632f25ac4e2c53a3c2c8a1b4fc97fc86e8aad5a Mon Sep 17 00:00:00 2001
-From: Tyler Hall <tylerwhall@gmail.com>
-Date: Sun, 4 May 2014 20:06:43 -0400
-Subject: [PATCH] python-config: Revert to using distutils.sysconfig
-
-The newer sysconfig module shares some code with distutils.sysconfig, but the same modifications as in
-
-12-distutils-prefix-is-inside-staging-area.patch makes distutils.sysconfig
-
-affect the native runtime as well as cross building. Use the old, patched
-implementation which returns paths in the staging directory and for the target,
-as appropriate.
-
-Upstream-Status: Inappropriate [Embedded Specific]
-
-Signed-off-by: Tyler Hall <tylerwhall@gmail.com>
-
----
- Misc/python-config.in | 12 +++++++-----
- 1 file changed, 7 insertions(+), 5 deletions(-)
-
-diff --git a/Misc/python-config.in b/Misc/python-config.in
-index ebd99da..0492e08 100644
---- a/Misc/python-config.in
-+++ b/Misc/python-config.in
-@@ -6,7 +6,9 @@
- import getopt
- import os
- import sys
--import sysconfig
-+import warnings
-+warnings.filterwarnings("ignore", category=DeprecationWarning)
-+from distutils import sysconfig
-
- valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
- 'ldflags', 'extension-suffix', 'help', 'abiflags', 'configdir',
-@@ -35,14 +37,14 @@ if '--help' in opt_flags:
-
- for opt in opt_flags:
- if opt == '--prefix':
-- print(getvar('prefix'))
-+ print(sysconfig.PREFIX)
-
- elif opt == '--exec-prefix':
-- print(getvar('exec_prefix'))
-+ print(sysconfig.EXEC_PREFIX)
-
- elif opt in ('--includes', '--cflags'):
-- flags = ['-I' + sysconfig.get_path('include'),
-- '-I' + sysconfig.get_path('platinclude')]
-+ flags = ['-I' + sysconfig.get_python_inc(),
-+ '-I' + sysconfig.get_python_inc(plat_specific=True)]
- if opt == '--cflags':
- flags.extend(getvar('CFLAGS').split())
- print(' '.join(flags))
diff --git a/meta/recipes-devtools/python/python3/python3-manifest.json b/meta/recipes-devtools/python/python3/python3-manifest.json
index 2f5dad6486..46092d4004 100644
--- a/meta/recipes-devtools/python/python3/python3-manifest.json
+++ b/meta/recipes-devtools/python/python3/python3-manifest.json
@@ -152,6 +152,23 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/wave.*.pyc"
]
},
+ "cgitb": {
+ "summary": "Special exception handler for Python scripts",
+ "rdepends": [
+ "core",
+ "crypt",
+ "html",
+ "io",
+ "math",
+ "pydoc"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/cgitb.py"
+ ],
+ "cached": [
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/cgitb.*.pyc"
+ ]
+ },
"codecs": {
"summary": "Python codec",
"rdepends": [
@@ -189,12 +206,12 @@
"files": [
"${libdir}/python${PYTHON_MAJMIN}/gzip.py",
"${libdir}/python${PYTHON_MAJMIN}/tarfile.py",
- "${libdir}/python${PYTHON_MAJMIN}/zipfile.py"
+ "${libdir}/python${PYTHON_MAJMIN}/zipfile",
+ "${libdir}/python${PYTHON_MAJMIN}/zipfile/_path"
],
"cached": [
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/gzip.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/tarfile.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/zipfile.*.pyc"
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/tarfile.*.pyc"
]
},
"core": {
@@ -232,6 +249,7 @@
"${libdir}/python${PYTHON_MAJMIN}/copy.py",
"${libdir}/python${PYTHON_MAJMIN}/copyreg.py",
"${libdir}/python${PYTHON_MAJMIN}/csv.py",
+ "${libdir}/python${PYTHON_MAJMIN}/dataclasses.py",
"${libdir}/python${PYTHON_MAJMIN}/dis.py",
"${libdir}/python${PYTHON_MAJMIN}/encodings",
"${libdir}/python${PYTHON_MAJMIN}/encodings/aliases.py",
@@ -246,6 +264,7 @@
"${libdir}/python${PYTHON_MAJMIN}/heapq.py",
"${libdir}/python${PYTHON_MAJMIN}/imp.py",
"${libdir}/python${PYTHON_MAJMIN}/importlib",
+ "${libdir}/python${PYTHON_MAJMIN}/importlib/_abc.py",
"${libdir}/python${PYTHON_MAJMIN}/importlib/_bootstrap.py",
"${libdir}/python${PYTHON_MAJMIN}/importlib/_bootstrap_external.py",
"${libdir}/python${PYTHON_MAJMIN}/importlib/abc.py",
@@ -253,6 +272,7 @@
"${libdir}/python${PYTHON_MAJMIN}/importlib/util.py",
"${libdir}/python${PYTHON_MAJMIN}/inspect.py",
"${libdir}/python${PYTHON_MAJMIN}/io.py",
+ "${libdir}/python${PYTHON_MAJMIN}/ipaddress.py",
"${libdir}/python${PYTHON_MAJMIN}/keyword.py",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/__pycache__/_struct.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/__pycache__/binascii.*.so",
@@ -266,6 +286,7 @@
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_opcode.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_posixsubprocess.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_struct.*.so",
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_typing.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/array.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/binascii.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/grp.*.so",
@@ -290,7 +311,11 @@
"${libdir}/python${PYTHON_MAJMIN}/pkgutil.py",
"${libdir}/python${PYTHON_MAJMIN}/platform.py",
"${libdir}/python${PYTHON_MAJMIN}/posixpath.py",
- "${libdir}/python${PYTHON_MAJMIN}/re.py",
+ "${libdir}/python${PYTHON_MAJMIN}/re",
+ "${libdir}/python${PYTHON_MAJMIN}/re/_casefix.py",
+ "${libdir}/python${PYTHON_MAJMIN}/re/_compiler.py",
+ "${libdir}/python${PYTHON_MAJMIN}/re/_constants.py",
+ "${libdir}/python${PYTHON_MAJMIN}/re/_parser.py",
"${libdir}/python${PYTHON_MAJMIN}/reprlib.py",
"${libdir}/python${PYTHON_MAJMIN}/rlcompleter.py",
"${libdir}/python${PYTHON_MAJMIN}/runpy.py",
@@ -319,6 +344,7 @@
"${libdir}/python${PYTHON_MAJMIN}/urllib/parse.py",
"${libdir}/python${PYTHON_MAJMIN}/warnings.py",
"${libdir}/python${PYTHON_MAJMIN}/weakref.py",
+ "${libdir}/python${PYTHON_MAJMIN}/zipimport.py",
"${prefix}/lib/python${PYTHON_MAJMIN}/config*/*[!.a]"
],
"cached": [
@@ -343,6 +369,7 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/copy.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/copyreg.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/csv.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/dataclasses.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/dis.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/enum.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/fnmatch.*.pyc",
@@ -354,6 +381,7 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/imp.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/inspect.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/io.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/ipaddress.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/keyword.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/linecache.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/locale.*.pyc",
@@ -400,9 +428,15 @@
"${libdir}/python${PYTHON_MAJMIN}/encodings/__pycache__/latin_1.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/encodings/__pycache__/utf_8.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/importlib/__pycache__",
+ "${libdir}/python${PYTHON_MAJMIN}/importlib/__pycache__/_abc.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/importlib/__pycache__/abc.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/importlib/__pycache__/machinery.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/importlib/__pycache__/util.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/re/__pycache__",
+ "${libdir}/python${PYTHON_MAJMIN}/re/__pycache__/_casefix.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/re/__pycache__/_compiler.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/re/__pycache__/_constants.*.pyc",
+ "${libdir}/python${PYTHON_MAJMIN}/re/__pycache__/_parser.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/urllib/__pycache__",
"${libdir}/python${PYTHON_MAJMIN}/urllib/__pycache__/parse.*.pyc"
]
@@ -422,9 +456,8 @@
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_hashlib.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_md5.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha1.*.so",
- "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha256.*.so",
- "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha3.*.so",
- "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha512.*.so"
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha2.*.so",
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_sha3.*.so"
],
"cached": [
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/crypt.*.pyc",
@@ -519,8 +552,7 @@
"${libdir}/pkgconfig"
],
"rdepends": [
- "core",
- "distutils"
+ "core"
],
"summary": "Python development package"
},
@@ -536,27 +568,6 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/difflib.*.pyc"
]
},
- "distutils-windows": {
- "summary": "Python distribution utilities (Windows installer stubs)",
- "rdepends": [
- "core"
- ],
- "files": [],
- "cached": []
- },
- "distutils": {
- "summary": "Python Distribution Utilities",
- "rdepends": [
- "compression",
- "core",
- "email",
- "stringold"
- ],
- "files": [
- "${libdir}/python${PYTHON_MAJMIN}/distutils"
- ],
- "cached": []
- },
"doctest": {
"summary": "Python framework for running examples in docstrings",
"rdepends": [
@@ -595,6 +606,16 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/imaplib.*.pyc"
]
},
+ "ensurepip": {
+ "summary": "Support for bootstrapping the pip installer",
+ "rdepends": [
+ "core"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/ensurepip/"
+ ],
+ "cached": []
+ },
"fcntl": {
"summary": "Python's fcntl interface",
"rdepends": [
@@ -621,12 +642,9 @@
"core"
],
"files": [
- "${libdir}/python${PYTHON_MAJMIN}/formatter.py",
"${libdir}/python${PYTHON_MAJMIN}/html"
],
- "cached": [
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/formatter.*.pyc"
- ]
+ "cached": []
},
"idle": {
"summary": "Python Integrated Development Environment",
@@ -664,7 +682,6 @@
],
"files": [
"${libdir}/python${PYTHON_MAJMIN}/_pyio.py",
- "${libdir}/python${PYTHON_MAJMIN}/ipaddress.py",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_socket.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_ssl.*.so",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/termios.*.so",
@@ -675,7 +692,6 @@
],
"cached": [
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/_pyio.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/ipaddress.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/pipes.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/socket.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/ssl.*.pyc",
@@ -715,7 +731,6 @@
"crypt",
"datetime",
"email",
- "fcntl",
"io",
"math",
"mime",
@@ -786,9 +801,9 @@
"db",
"debugger",
"difflib",
- "distutils",
"doctest",
"email",
+ "ensurepip",
"fcntl",
"html",
"idle",
@@ -812,22 +827,20 @@
"pydoc",
"resource",
"shell",
- "smtpd",
"sqlite3",
- "statistics",
+ "statistics",
"stringold",
"syslog",
"terminal",
"threading",
"tkinter",
+ "tomllib",
"unittest",
"unixadmin",
"venv",
"xml",
- "xmlrpc"
- ],
- "rrecommends": [
- "distutils-windows"
+ "xmlrpc",
+ "zoneinfo"
],
"summary": "All Python modules"
},
@@ -876,6 +889,7 @@
"${libdir}/python${PYTHON_MAJMIN}/secrets.py",
"${libdir}/python${PYTHON_MAJMIN}/smtplib.py",
"${libdir}/python${PYTHON_MAJMIN}/telnetlib.py",
+ "${libdir}/python${PYTHON_MAJMIN}/urllib",
"${libdir}/python${PYTHON_MAJMIN}/uuid.py"
],
"cached": [
@@ -980,8 +994,7 @@
"pprint": {
"summary": "Python pretty-print support",
"rdepends": [
- "core",
- "profile"
+ "core"
],
"files": [
"${libdir}/python${PYTHON_MAJMIN}/pprint.py"
@@ -997,14 +1010,12 @@
],
"files": [
"${libdir}/python${PYTHON_MAJMIN}/cProfile.py",
- "${libdir}/python${PYTHON_MAJMIN}/dataclasses.py",
"${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_lsprof.*.so",
"${libdir}/python${PYTHON_MAJMIN}/profile.py",
"${libdir}/python${PYTHON_MAJMIN}/pstats.py"
],
"cached": [
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/cProfile.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/dataclasses.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/profile.*.pyc",
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/pstats.*.pyc"
]
@@ -1050,31 +1061,6 @@
"${libdir}/python${PYTHON_MAJMIN}/__pycache__/shlex.*.pyc"
]
},
- "smtpd": {
- "summary": "Python Simple Mail Transport Daemon",
- "rdepends": [
- "core",
- "crypt",
- "datetime",
- "email",
- "io",
- "math",
- "mime",
- "netclient",
- "stringold"
- ],
- "files": [
- "${bindir}/smtpd.py",
- "${libdir}/python${PYTHON_MAJMIN}/asynchat.py",
- "${libdir}/python${PYTHON_MAJMIN}/asyncore.py",
- "${libdir}/python${PYTHON_MAJMIN}/smtpd.py"
- ],
- "cached": [
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/asynchat.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/asyncore.*.pyc",
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/smtpd.*.pyc"
- ]
- },
"sqlite3": {
"summary": "Python Sqlite3 database support",
"rdepends": [
@@ -1088,18 +1074,20 @@
"cached": []
},
"statistics": {
- "summary": "Basic statistics module",
- "rdepends": [
- "core",
- "math",
- "numbers"
- ],
- "files": [
- "${libdir}/python${PYTHON_MAJMIN}/statistics.py"
- ],
- "cached": [
- "${libdir}/python${PYTHON_MAJMIN}/__pycache__/statistics.*.pyc"
- ]
+ "summary": "Basic statistics module",
+ "rdepends": [
+ "core",
+ "crypt",
+ "math",
+ "numbers"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_statistics.*.so",
+ "${libdir}/python${PYTHON_MAJMIN}/statistics.py"
+ ],
+ "cached": [
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/statistics.*.pyc"
+ ]
},
"stringold": {
"summary": "Python string APIs [deprecated]",
@@ -1159,11 +1147,33 @@
"core"
],
"files": [
- "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_tkinter.*.so",
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_tkinter.*.so",
"${libdir}/python${PYTHON_MAJMIN}/tkinter"
],
"cached": []
},
+ "tomllib": {
+ "summary": "Provides an interface for parsing TOML",
+ "rdepends": [
+ "core"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/tomllib/"
+ ],
+ "cached": []
+ },
+ "turtle": {
+ "summary": "Turtle graphics is a popular way for introducing programming to kids.",
+ "rdepends": [
+ "tkinter"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/turtle.py"
+ ],
+ "cached": [
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/turtle.*.pyc"
+ ]
+ },
"unittest": {
"summary": "Python unit testing framework",
"rdepends": [
@@ -1231,7 +1241,6 @@
"crypt",
"datetime",
"email",
- "fcntl",
"html",
"io",
"math",
@@ -1248,5 +1257,30 @@
"${libdir}/python${PYTHON_MAJMIN}/xmlrpc/__pycache__"
],
"cached": []
+ },
+ "zipapp": {
+ "summary": "Tools to manage the creation of zip files containing Python code",
+ "rdepends": [
+ "compression",
+ "core"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/zipapp.py"
+ ],
+ "cached": [
+ "${libdir}/python${PYTHON_MAJMIN}/__pycache__/zipapp.*.pyc"
+ ]
+ },
+ "zoneinfo": {
+ "summary": "IANA time zone support",
+ "rdepends": [
+ "core",
+ "datetime"
+ ],
+ "files": [
+ "${libdir}/python${PYTHON_MAJMIN}/lib-dynload/_zoneinfo.*.so",
+ "${libdir}/python${PYTHON_MAJMIN}/zoneinfo"
+ ],
+ "cached": []
}
}
diff --git a/meta/recipes-devtools/python/python3/run-ptest b/meta/recipes-devtools/python/python3/run-ptest
index 05396e91ab..d1c26c11e2 100644
--- a/meta/recipes-devtools/python/python3/run-ptest
+++ b/meta/recipes-devtools/python/python3/run-ptest
@@ -1,3 +1,3 @@
#!/bin/sh
-
-SETUPTOOLS_USE_DISTUTILS=nonlocal python3 -m test -v | sed -u -e '/\.\.\. ok/ s/^/PASS: /g' -r -e '/\.\.\. (ERROR|FAIL)/ s/^/FAIL: /g' -e '/\.\.\. skipped/ s/^/SKIP: /g' -e 's/ \.\.\. ok//g' -e 's/ \.\.\. ERROR//g' -e 's/ \.\.\. FAIL//g' -e 's/ \.\.\. skipped//g'
+SKIPPED_TESTS=
+{ SETUPTOOLS_USE_DISTUTILS=nonlocal python3 -m test $SKIPPED_TESTS -v -j 4 || echo "FAIL: python3" ; } | sed -u -e '/\.\.\. ok/ s/^/PASS: /g' -r -e '/\.\.\. (ERROR|FAIL)/ s/^/FAIL: /g' -e '/\.\.\. skipped/ s/^/SKIP: /g' -e 's/ \.\.\. ok//g' -e 's/ \.\.\. ERROR//g' -e 's/ \.\.\. FAIL//g' -e 's/ \.\.\. skipped//g'
diff --git a/meta/recipes-devtools/python/python3_3.10.4.bb b/meta/recipes-devtools/python/python3_3.10.4.bb
deleted file mode 100644
index 2ef320350e..0000000000
--- a/meta/recipes-devtools/python/python3_3.10.4.bb
+++ /dev/null
@@ -1,422 +0,0 @@
-SUMMARY = "The Python Programming Language"
-HOMEPAGE = "http://www.python.org"
-DESCRIPTION = "Python is a programming language that lets you work more quickly and integrate your systems more effectively."
-LICENSE = "PSF-2.0"
-SECTION = "devel/python"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=4b8801e752a2c70ac41a5f9aa243f766"
-
-SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \
- file://run-ptest \
- file://create_manifest3.py \
- file://get_module_deps3.py \
- file://python3-manifest.json \
- file://check_build_completeness.py \
- file://reformat_sysconfig.py \
- file://cgi_py.patch \
- file://0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch \
- ${@bb.utils.contains('PACKAGECONFIG', 'tk', '', 'file://avoid_warning_about_tkinter.patch', d)} \
- file://0001-Do-not-use-the-shell-version-of-python-config-that-w.patch \
- file://python-config.patch \
- file://0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch \
- file://0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch \
- file://0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch \
- file://crosspythonpath.patch \
- file://0001-Use-FLAG_REF-always-for-interned-strings.patch \
- file://0001-test_locale.py-correct-the-test-output-format.patch \
- file://0017-setup.py-do-not-report-missing-dependencies-for-disa.patch \
- file://0001-Makefile-do-not-compile-.pyc-in-parallel.patch \
- file://0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch \
- file://0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch \
- file://0001-test_ctypes.test_find-skip-without-tools-sdk.patch \
- file://makerace.patch \
- file://0001-sysconfig.py-use-platlibdir-also-for-purelib.patch \
- file://0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch \
- file://0001-setup.py-Do-not-detect-multiarch-paths-when-cross-co.patch \
- "
-
-SRC_URI:append:class-native = " \
- file://0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch \
- file://0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch \
- file://12-distutils-prefix-is-inside-staging-area.patch \
- file://0001-Don-t-search-system-for-headers-libraries.patch \
- "
-SRC_URI[sha256sum] = "80bf925f571da436b35210886cf79f6eb5fa5d6c571316b73568343451f77a19"
-
-# exclude pre-releases for both python 2.x and 3.x
-UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P<pver>\d+(\.\d+)+).tar"
-UPSTREAM_CHECK_URI = "https://www.python.org/downloads/source/"
-
-CVE_PRODUCT = "python"
-
-# Upstream consider this expected behaviour
-CVE_CHECK_IGNORE += "CVE-2007-4559"
-# This is not exploitable when glibc has CVE-2016-10739 fixed.
-CVE_CHECK_IGNORE += "CVE-2019-18348"
-# These are specific to Microsoft Windows
-CVE_CHECK_IGNORE += "CVE-2020-15523 CVE-2022-26488"
-# The mailcap module is insecure by design, so this can't be fixed in a meaningful way.
-# The module will be removed in the future and flaws documented.
-CVE_CHECK_IGNORE += "CVE-2015-20107"
-
-PYTHON_MAJMIN = "3.10"
-
-S = "${WORKDIR}/Python-${PV}"
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit autotools pkgconfig qemu ptest multilib_header update-alternatives
-
-MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
-
-ALTERNATIVE:${PN}-dev = "python3-config"
-ALTERNATIVE_LINK_NAME[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config"
-ALTERNATIVE_TARGET[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}"
-
-
-DEPENDS = "bzip2-replacement-native libffi bzip2 openssl sqlite3 zlib virtual/libintl xz virtual/crypt util-linux-libuuid libtirpc libnsl2 autoconf-archive-native ncurses"
-DEPENDS:append:class-target = " python3-native"
-DEPENDS:append:class-nativesdk = " python3-native"
-
-# force to use the mutex+cond implementation (https://bugs.python.org/issue41710)
-CFLAGS += "-DHAVE_BROKEN_POSIX_SEMAPHORES"
-
-EXTRA_OECONF = " --without-ensurepip --enable-shared --with-platlibdir=${baselib}"
-EXTRA_OECONF:append:class-native = " --bindir=${bindir}/${PN}"
-
-export CROSSPYTHONPATH="${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/"
-
-EXTRANATIVEPATH += "python3-native"
-
-# LTO will be enabled via packageconfig depending upong distro features
-LTO:class-target = ""
-
-CACHED_CONFIGUREVARS = " \
- ac_cv_file__dev_ptmx=yes \
- ac_cv_file__dev_ptc=no \
- ac_cv_working_tzset=yes \
-"
-
-# PGO currently causes builds to not be reproducible so disable by default, see YOCTO #13407
-PACKAGECONFIG:class-target ??= "readline gdbm ${@bb.utils.filter('DISTRO_FEATURES', 'lto', d)}"
-PACKAGECONFIG:class-native ??= "readline gdbm"
-PACKAGECONFIG:class-nativesdk ??= "readline gdbm"
-PACKAGECONFIG[readline] = ",,readline"
-# Use profile guided optimisation by running PyBench inside qemu-user
-PACKAGECONFIG[pgo] = "--enable-optimizations,,qemu-native"
-PACKAGECONFIG[tk] = ",,tk"
-PACKAGECONFIG[gdbm] = ",,gdbm"
-PACKAGECONFIG[lto] = "--with-lto,,"
-
-do_configure:prepend () {
- mkdir -p ${B}/Modules
- cat > ${B}/Modules/Setup.local << EOF
-*disabled*
-${@bb.utils.contains('PACKAGECONFIG', 'gdbm', '', '_gdbm _dbm', d)}
-${@bb.utils.contains('PACKAGECONFIG', 'readline', '', 'readline', d)}
-EOF
-}
-
-CPPFLAGS:append = " -I${STAGING_INCDIR}/ncursesw -I${STAGING_INCDIR}/uuid"
-
-EXTRA_OEMAKE = '\
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- STAGING_INCDIR=${STAGING_INCDIR} \
- LIB=${baselib} \
-'
-
-do_compile:prepend:class-target() {
- if ${@bb.utils.contains('PACKAGECONFIG', 'pgo', 'true', 'false', d)}; then
- qemu_binary="${@qemu_wrapper_cmdline(d, '${STAGING_DIR_TARGET}', ['${B}', '${STAGING_DIR_TARGET}/${base_libdir}'])}"
- cat >pgo-wrapper <<EOF
-#!/bin/sh
-cd ${B}
-$qemu_binary "\$@"
-EOF
- chmod +x pgo-wrapper
- fi
-}
-
-do_install:prepend() {
- ${WORKDIR}/check_build_completeness.py ${T}/log.do_compile
-}
-
-do_install:append:class-target() {
- oe_multilib_header python${PYTHON_MAJMIN}/pyconfig.h
-}
-
-do_install:append:class-native() {
- # Make sure we use /usr/bin/env python
- for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do
- sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
- done
- # Add a symlink to the native Python so that scripts can just invoke
- # "nativepython" and get the right one without needing absolute paths
- # (these often end up too long for the #! parser in the kernel as the
- # buffer is 128 bytes long).
- ln -s python3-native/python3 ${D}${bindir}/nativepython3
-
- # Remove the opt-1.pyc and opt-2.pyc files. There are over 3,000 of them
- # and the overhead in each recipe-sysroot-native isn't worth it, particularly
- # when they're only used for python called with -O or -OO.
- #find ${D} -name *opt-*.pyc -delete
- # Remove all pyc files. There are a ton of them and it is probably faster to let
- # python create the ones it wants at runtime rather than manage in the sstate
- # tarballs and sysroot creation.
- find ${D} -name *.pyc -delete
-
-}
-
-do_install:append() {
- for c in ${D}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py; do
- python3 ${WORKDIR}/reformat_sysconfig.py $c
- done
- rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/__pycache__/_sysconfigdata*.cpython*
-
- mkdir -p ${D}${libdir}/python-sysconfigdata
- sysconfigfile=`find ${D} -name _sysconfig*.py`
- cp $sysconfigfile ${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
-
- sed -i \
- -e "s,^ 'LIBDIR'.*, 'LIBDIR': '${STAGING_LIBDIR}'\,,g" \
- -e "s,^ 'INCLUDEDIR'.*, 'INCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
- -e "s,^ 'CONFINCLUDEDIR'.*, 'CONFINCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
- -e "/^ 'INCLDIRSTOMAKE'/{N; s,/usr/include,${STAGING_INCDIR},g}" \
- -e "/^ 'INCLUDEPY'/s,/usr/include,${STAGING_INCDIR},g" \
- ${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
-
- # Unfortunately the following pyc files are non-deterministc due to 'frozenset'
- # being written without strict ordering, even with PYTHONHASHSEED = 0
- # Upstream is discussing ways to solve the issue properly, until then let's
- # just not install the problematic files.
- # More info: http://benno.id.au/blog/2013/01/15/python-determinism
- rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/test/__pycache__/test_range.cpython*
- rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/test/__pycache__/test_xml_etree.cpython*
-
- # Similar to the above, we're getting reproducibility issues with
- # /usr/lib/python3.10/__pycache__/traceback.cpython-310.pyc
- # so remove it too
- rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/__pycache__/traceback.cpython*
-
- # Remove the opt-1.pyc and opt-2.pyc files. They effectively waste space on embedded
- # style targets as they're only used when python is called with the -O or -OO options
- # which is rare.
- find ${D} -name *opt-*.pyc -delete
-}
-
-do_install:append:class-nativesdk () {
- # Make sure we use /usr/bin/env python
- for PYTHSCRIPT in `grep -rIl ${bindir}/python ${D}${bindir}`; do
- sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
- done
- create_wrapper ${D}${bindir}/python${PYTHON_MAJMIN} TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' PYTHONNOUSERSITE='1'
-}
-
-SSTATE_SCAN_FILES += "Makefile _sysconfigdata.py"
-SSTATE_HASHEQUIV_FILEMAP = " \
- populate_sysroot:*/lib*/python3*/_sysconfigdata*.py:${TMPDIR} \
- populate_sysroot:*/lib*/python3*/_sysconfigdata*.py:${COREBASE} \
- populate_sysroot:*/lib*/python3*/config-*/Makefile:${TMPDIR} \
- populate_sysroot:*/lib*/python3*/config-*/Makefile:${COREBASE} \
- populate_sysroot:*/lib*/python-sysconfigdata/_sysconfigdata.py:${TMPDIR} \
- populate_sysroot:*/lib*/python-sysconfigdata/_sysconfigdata.py:${COREBASE} \
- "
-PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess"
-
-py_package_preprocess () {
- # Remove references to buildmachine paths in target Makefile and _sysconfigdata
- sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:${RECIPE_SYSROOT}::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}${PYTHON_ABI}*/Makefile \
- ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py \
- ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config
-
- # Reformat _sysconfigdata after modifying it so that it remains
- # reproducible
- for c in ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py; do
- python3 ${WORKDIR}/reformat_sysconfig.py $c
- done
-
- # Recompile _sysconfigdata after modifying it
- cd ${PKGD}
- sysconfigfile=`find . -name _sysconfigdata_*.py`
- ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
- -c "from py_compile import compile; compile('$sysconfigfile')"
- ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
- -c "from py_compile import compile; compile('$sysconfigfile', optimize=1)"
- ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
- -c "from py_compile import compile; compile('$sysconfigfile', optimize=2)"
- cd -
-
- mv ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}
-
- #Remove the unneeded copy of target sysconfig data
- rm -rf ${PKGD}/${libdir}/python-sysconfigdata
-}
-
-# We want bytecode precompiled .py files (.pyc's) by default
-# but the user may set it on their own conf
-INCLUDE_PYCS ?= "1"
-
-python(){
- import collections, json
-
- filename = os.path.join(d.getVar('THISDIR'), 'python3', 'python3-manifest.json')
- # This python changes the datastore based on the contents of a file, so mark
- # that dependency.
- bb.parse.mark_dependency(d, filename)
-
- with open(filename) as manifest_file:
- manifest_str = manifest_file.read()
- json_start = manifest_str.find('# EOC') + 6
- manifest_file.seek(json_start)
- manifest_str = manifest_file.read()
- python_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
-
- # First set RPROVIDES for -native case
- # Hardcoded since it cant be python3-native-foo, should be python3-foo-native
- pn = 'python3'
- rprovides = (d.getVar('RPROVIDES') or "").split()
-
- # ${PN}-misc-native is not in the manifest
- rprovides.append(pn + '-misc-native')
-
- for key in python_manifest:
- pypackage = pn + '-' + key + '-native'
- if pypackage not in rprovides:
- rprovides.append(pypackage)
-
- d.setVar('RPROVIDES:class-native', ' '.join(rprovides))
-
- # Then work on the target
- include_pycs = d.getVar('INCLUDE_PYCS')
-
- packages = d.getVar('PACKAGES').split()
- pn = d.getVar('PN')
-
- newpackages=[]
- for key in python_manifest:
- pypackage = pn + '-' + key
-
- if pypackage not in packages:
- # We need to prepend, otherwise python-misc gets everything
- # so we use a new variable
- newpackages.append(pypackage)
-
- # "Build" python's manifest FILES, RDEPENDS and SUMMARY
- d.setVar('FILES:' + pypackage, '')
- for value in python_manifest[key]['files']:
- d.appendVar('FILES:' + pypackage, ' ' + value)
-
- # Add cached files
- if include_pycs == '1':
- for value in python_manifest[key]['cached']:
- d.appendVar('FILES:' + pypackage, ' ' + value)
-
- for value in python_manifest[key]['rdepends']:
- # Make it work with or without $PN
- if '${PN}' in value:
- value=value.split('-', 1)[1]
- d.appendVar('RDEPENDS:' + pypackage, ' ' + pn + '-' + value)
-
- for value in python_manifest[key].get('rrecommends', ()):
- if '${PN}' in value:
- value=value.split('-', 1)[1]
- d.appendVar('RRECOMMENDS:' + pypackage, ' ' + pn + '-' + value)
-
- d.setVar('SUMMARY:' + pypackage, python_manifest[key]['summary'])
-
- # Prepending so to avoid python-misc getting everything
- packages = newpackages + packages
- d.setVar('PACKAGES', ' '.join(packages))
- d.setVar('ALLOW_EMPTY:${PN}-modules', '1')
- d.setVar('ALLOW_EMPTY:${PN}-pkgutil', '1')
-
- if "pgo" in d.getVar("PACKAGECONFIG").split() and not bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', True, False, d):
- bb.fatal("pgo cannot be enabled as there is no qemu-usermode support for this architecture/machine")
-}
-
-# Files needed to create a new manifest
-
-do_create_manifest() {
- # This task should be run with every new release of Python.
- # We must ensure that PACKAGECONFIG enables everything when creating
- # a new manifest, this is to base our new manifest on a complete
- # native python build, containing all dependencies, otherwise the task
- # wont be able to find the required files.
- # e.g. BerkeleyDB is an optional build dependency so it may or may not
- # be present, we must ensure it is.
-
- cd ${WORKDIR}
- # This needs to be executed by python-native and NOT by HOST's python
- nativepython3 create_manifest3.py ${PYTHON_MAJMIN}
- cp python3-manifest.json.new ${THISDIR}/python3/python3-manifest.json
-}
-
-# bitbake python -c create_manifest
-# Make sure we have native python ready when we create a new manifest
-addtask do_create_manifest after do_patch do_prepare_recipe_sysroot
-
-# manual dependency additions
-RRECOMMENDS:${PN}-core:append:class-nativesdk = " nativesdk-python3-modules"
-RRECOMMENDS:${PN}-crypt:append:class-target = " ${MLPREFIX}openssl ${MLPREFIX}ca-certificates"
-RRECOMMENDS:${PN}-crypt:append:class-nativesdk = " ${MLPREFIX}openssl ${MLPREFIX}ca-certificates"
-
-# For historical reasons PN is empty and provided by python3-modules
-FILES:${PN} = ""
-RPROVIDES:${PN}-modules = "${PN}"
-
-FILES:${PN}-pydoc += "${bindir}/pydoc${PYTHON_MAJMIN} ${bindir}/pydoc3"
-FILES:${PN}-idle += "${bindir}/idle3 ${bindir}/idle${PYTHON_MAJMIN}"
-
-# provide python-pyvenv from python3-venv
-RPROVIDES:${PN}-venv += "${MLPREFIX}python3-pyvenv"
-
-# package libpython3
-PACKAGES =+ "libpython3 libpython3-staticdev"
-FILES:libpython3 = "${libdir}/libpython*.so.*"
-FILES:libpython3-staticdev += "${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}-*/libpython${PYTHON_MAJMIN}.a"
-INSANE_SKIP:${PN}-dev += "dev-elf"
-INSANE_SKIP:${PN}-ptest = "dev-deps"
-
-# catch all the rest (unsorted)
-PACKAGES += "${PN}-misc"
-RDEPENDS:${PN}-misc += "\
- ${PN}-core \
- ${PN}-email \
- ${PN}-codecs \
- ${PN}-pydoc \
- ${PN}-pickle \
- ${PN}-audio \
- ${PN}-numbers \
-"
-RDEPENDS:${PN}-modules:append:class-target = " ${MLPREFIX}python3-misc"
-RDEPENDS:${PN}-modules:append:class-nativesdk = " ${MLPREFIX}python3-misc"
-FILES:${PN}-misc = "${libdir}/python${PYTHON_MAJMIN} ${libdir}/python${PYTHON_MAJMIN}/lib-dynload"
-
-# catch manpage
-PACKAGES += "${PN}-man"
-FILES:${PN}-man = "${datadir}/man"
-
-# See https://bugs.python.org/issue18748 and https://bugs.python.org/issue37395
-RDEPENDS:libpython3:append:libc-glibc = " libgcc"
-RDEPENDS:${PN}-ctypes:append:libc-glibc = " ${MLPREFIX}ldconfig"
-RDEPENDS:${PN}-ptest = "${PN}-modules ${PN}-tests ${PN}-dev unzip bzip2 libgcc tzdata-europe coreutils sed"
-RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-tr-tr.iso-8859-9"
-RDEPENDS:${PN}-tkinter += "${@bb.utils.contains('PACKAGECONFIG', 'tk', '${MLPREFIX}tk ${MLPREFIX}tk-lib', '', d)}"
-RDEPENDS:${PN}-idle += "${@bb.utils.contains('PACKAGECONFIG', 'tk', '${PN}-tkinter ${MLPREFIX}tcl', '', d)}"
-RDEPENDS:${PN}-dev = ""
-RDEPENDS:${PN}-pydoc += "${PN}-io"
-
-RDEPENDS:${PN}-tests:append:class-target = " ${MLPREFIX}bash"
-RDEPENDS:${PN}-tests:append:class-nativesdk = " ${MLPREFIX}bash"
-
-# Python's tests contain large numbers of files we don't need in the recipe sysroots
-SYSROOT_PREPROCESS_FUNCS += " py3_sysroot_cleanup"
-py3_sysroot_cleanup () {
- rm -rf ${SYSROOT_DESTDIR}${libdir}/python${PYTHON_MAJMIN}/test
-}
diff --git a/meta/recipes-devtools/python/python3_3.12.3.bb b/meta/recipes-devtools/python/python3_3.12.3.bb
new file mode 100644
index 0000000000..b49a58a101
--- /dev/null
+++ b/meta/recipes-devtools/python/python3_3.12.3.bb
@@ -0,0 +1,470 @@
+SUMMARY = "The Python Programming Language"
+HOMEPAGE = "http://www.python.org"
+DESCRIPTION = "Python is a programming language that lets you work more quickly and integrate your systems more effectively."
+LICENSE = "PSF-2.0"
+SECTION = "devel/python"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2"
+
+SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \
+ file://run-ptest \
+ file://create_manifest3.py \
+ file://get_module_deps3.py \
+ file://python3-manifest.json \
+ file://check_build_completeness.py \
+ file://reformat_sysconfig.py \
+ file://cgi_py.patch \
+ file://0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch \
+ file://0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch \
+ file://crosspythonpath.patch \
+ file://0001-test_locale.py-correct-the-test-output-format.patch \
+ file://0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch \
+ file://0001-Skip-failing-tests-due-to-load-variability-on-YP-AB.patch \
+ file://0001-test_ctypes.test_find-skip-without-tools-sdk.patch \
+ file://makerace.patch \
+ file://0001-sysconfig.py-use-platlibdir-also-for-purelib.patch \
+ file://0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch \
+ file://deterministic_imports.patch \
+ file://0001-Avoid-shebang-overflow-on-python-config.py.patch \
+ file://0001-Update-test_sysconfig-for-posix_user-purelib.patch \
+ file://0001-skip-no_stdout_fileno-test-due-to-load-variability.patch \
+ file://0001-test_storlines-skip-due-to-load-variability.patch \
+ file://0001-gh-114492-Initialize-struct-termios-before-calling-t.patch \
+ "
+
+SRC_URI:append:class-native = " \
+ file://0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch \
+ "
+
+SRC_URI[sha256sum] = "56bfef1fdfc1221ce6720e43a661e3eb41785dd914ce99698d8c7896af4bdaa1"
+
+# exclude pre-releases for both python 2.x and 3.x
+UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P<pver>\d+(\.\d+)+).tar"
+
+CVE_PRODUCT = "python cpython"
+
+CVE_STATUS[CVE-2007-4559] = "disputed: Upstream consider this expected behaviour"
+CVE_STATUS[CVE-2019-18348] = "not-applicable-config: This is not exploitable when glibc has CVE-2016-10739 fixed"
+CVE_STATUS[CVE-2020-15523] = "not-applicable-platform: Issue only applies on Windows"
+CVE_STATUS[CVE-2022-26488] = "not-applicable-platform: Issue only applies on Windows"
+# The module will be removed in the future and flaws documented.
+CVE_STATUS[CVE-2015-20107] = "upstream-wontfix: The mailcap module is insecure by design, so this can't be fixed in a meaningful way"
+CVE_STATUS[CVE-2023-36632] = "disputed: Not an issue, in fact expected behaviour"
+
+PYTHON_MAJMIN = "3.12"
+
+S = "${WORKDIR}/Python-${PV}"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit autotools pkgconfig qemu ptest multilib_header update-alternatives
+
+MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
+
+ALTERNATIVE:${PN}-dev = "python3-config"
+ALTERNATIVE_LINK_NAME[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config"
+ALTERNATIVE_TARGET[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}"
+
+
+DEPENDS = "bzip2-replacement-native expat libffi bzip2 openssl sqlite3 zlib virtual/libintl xz virtual/crypt util-linux-libuuid libtirpc libnsl2 autoconf-archive-native ncurses"
+DEPENDS:append:class-target = " python3-native"
+DEPENDS:append:class-nativesdk = " python3-native"
+
+EXTRA_OECONF = " --without-ensurepip --enable-shared --with-platlibdir=${baselib} --with-system-expat"
+EXTRA_OECONF:append:class-native = " --bindir=${bindir}/${PN}"
+EXTRA_OECONF:append:class-target = " --with-build-python=nativepython3"
+EXTRA_OECONF:append:class-nativesdk = " --with-build-python=nativepython3"
+
+export CROSSPYTHONPATH="${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/"
+
+EXTRANATIVEPATH += "python3-native"
+
+# LTO will be enabled via packageconfig depending upong distro features
+LTO:class-target = ""
+
+CACHED_CONFIGUREVARS = " \
+ ac_cv_file__dev_ptmx=yes \
+ ac_cv_file__dev_ptc=no \
+ ac_cv_working_tzset=yes \
+"
+# set thread stack size to 2MB on musl for interpreter and stdlib C extensions
+# so it does not run into stack limits due to musl's small thread stack
+# This is only needed to build interpreter and not the subsequent modules
+# Thats why CFLAGS_NODIST is modified instead of CFLAGS
+CACHED_CONFIGUREVARS:append:libc-musl = "\
+ CFLAGS_NODIST='${CFLAGS} -DTHREAD_STACK_SIZE=0x200000' \
+"
+
+# PGO currently causes builds to not be reproducible so disable by default, see YOCTO #13407
+PACKAGECONFIG ??= "editline gdbm ${@bb.utils.filter('DISTRO_FEATURES', 'lto', d)}"
+PACKAGECONFIG[readline] = "--with-readline=readline,,readline,,,editline"
+PACKAGECONFIG[editline] = "--with-readline=editline,,libedit,,,readline"
+# Use profile guided optimisation by running PyBench inside qemu-user
+PACKAGECONFIG[pgo] = "--enable-optimizations,,qemu-native"
+PACKAGECONFIG[tk] = ",,tk"
+PACKAGECONFIG[tcl] = ",,tcl"
+PACKAGECONFIG[gdbm] = ",,gdbm"
+PACKAGECONFIG[lto] = "--with-lto,--without-lto"
+
+do_configure:prepend () {
+ mkdir -p ${B}/Modules
+ cat > ${B}/Modules/Setup.local << EOF
+*disabled*
+${@bb.utils.contains('PACKAGECONFIG', 'gdbm', '', '_gdbm _dbm', d)}
+${@bb.utils.contains_any('PACKAGECONFIG', 'readline editline', '', 'readline', d)}
+${@bb.utils.contains('PACKAGECONFIG', 'tk', '', '_tkinter', d)}
+EOF
+}
+
+CPPFLAGS:append = " -I${STAGING_INCDIR}/ncursesw -I${STAGING_INCDIR}/uuid"
+
+# COMPILEALL_OPTS= ensures that .pyc are not compiled in parallel
+# This was found to lock up builds, break reproducibility, and produce strange file ownership
+# races.
+#
+# The upstream commit introducing the change was:
+# https://github.com/python/cpython/commit/1a2dd82f56bd813aacc570e172cefe55a8a41504
+#
+# The build lock up issue is reported here:
+# https://bugs.python.org/issue45945
+#
+# The repro failures are documented here:
+# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20211130-yr_o1a8d/packages/diff-html/
+
+EXTRA_OEMAKE = '\
+ STAGING_LIBDIR=${STAGING_LIBDIR} \
+ STAGING_INCDIR=${STAGING_INCDIR} \
+ LIB=${baselib} \
+ COMPILEALL_OPTS= \
+'
+
+# Generate a Profile Guided Optimisation wrapper script that uses qemu-user for
+# all cross builds.
+write_pgo_wrapper:class-native = ":"
+write_pgo_wrapper() {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'pgo', 'true', 'false', d)}; then
+ cat >pgo-wrapper <<EOF
+#!/bin/sh
+cd ${B}
+${@qemu_wrapper_cmdline(d, '${STAGING_DIR_TARGET}', ['${B}', '${STAGING_DIR_TARGET}/${base_libdir}'])} "\$@"
+EOF
+ chmod +x pgo-wrapper
+ fi
+}
+
+do_compile:prepend() {
+ write_pgo_wrapper
+}
+
+do_install:prepend() {
+ ${WORKDIR}/check_build_completeness.py ${T}/log.do_compile
+}
+
+do_install:append:class-target() {
+ oe_multilib_header python${PYTHON_MAJMIN}/pyconfig.h
+}
+
+do_install:append:class-native() {
+ # Make sure we use /usr/bin/env python
+ for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do
+ sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
+ done
+ # Add a symlink to the native Python so that scripts can just invoke
+ # "nativepython" and get the right one without needing absolute paths
+ # (these often end up too long for the #! parser in the kernel as the
+ # buffer is 128 bytes long).
+ ln -s python3-native/python3 ${D}${bindir}/nativepython3
+
+ # Remove the opt-1.pyc and opt-2.pyc files. There are over 3,000 of them
+ # and the overhead in each recipe-sysroot-native isn't worth it, particularly
+ # when they're only used for python called with -O or -OO.
+ #find ${D} -name *opt-*.pyc -delete
+ # Remove all pyc files. There are a ton of them and it is probably faster to let
+ # python create the ones it wants at runtime rather than manage in the sstate
+ # tarballs and sysroot creation.
+ find ${D} -name *.pyc -delete
+
+ # Nothing should be looking into ${B} for python3-native
+ sed -i -e 's:${B}:/build/path/unavailable/:g' \
+ ${D}/${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}${PYTHON_ABI}*/Makefile
+
+ # disable the lookup in user's site-packages globally
+ sed -i 's#ENABLE_USER_SITE = None#ENABLE_USER_SITE = False#' ${D}${libdir}/python${PYTHON_MAJMIN}/site.py
+
+ # python3-config needs to be in /usr/bin and not in a subdir of it to work properly
+ mv ${D}/${bindir}/${PN}/python*config ${D}/${bindir}/
+}
+
+do_install:append() {
+ for c in ${D}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py; do
+ python3 ${WORKDIR}/reformat_sysconfig.py $c
+ done
+ rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/__pycache__/_sysconfigdata*.cpython*
+
+ mkdir -p ${D}${libdir}/python-sysconfigdata
+ sysconfigfile=`find ${D} -name _sysconfig*.py`
+ sed -i \
+ -e "s,^ 'LIBDIR'.*, 'LIBDIR': '${STAGING_LIBDIR}'\,,g" \
+ -e "s,^ 'INCLUDEDIR'.*, 'INCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
+ -e "s,^ 'CONFINCLUDEDIR'.*, 'CONFINCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
+ -e "s,^ 'INCLUDEPY'.*, 'INCLUDEPY': '${STAGING_INCDIR}/python${PYTHON_MAJMIN}'\,,g" \
+ -e "s,^ 'CONFINCLUDEPY'.*, 'CONFINCLUDEPY': '${STAGING_INCDIR}/python${PYTHON_MAJMIN}'\,,g" \
+ -e "s,${B},/build/path/unavailable/,g" \
+ $sysconfigfile
+ cp $sysconfigfile ${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
+
+
+ # Unfortunately the following pyc files are non-deterministc due to 'frozenset'
+ # being written without strict ordering, even with PYTHONHASHSEED = 0
+ # Upstream is discussing ways to solve the issue properly, until then let's
+ # just not install the problematic files.
+ # More info: http://benno.id.au/blog/2013/01/15/python-determinism
+ rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/test/__pycache__/test_range.cpython*
+ rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/test/__pycache__/test_xml_etree.cpython*
+
+ # Similar to the above, we're getting reproducibility issues with
+ # /usr/lib/python3.10/__pycache__/traceback.cpython-310.pyc
+ # so remove it too
+ rm -f ${D}${libdir}/python${PYTHON_MAJMIN}/__pycache__/traceback.cpython*
+
+ # Remove the opt-1.pyc and opt-2.pyc files. They effectively waste space on embedded
+ # style targets as they're only used when python is called with the -O or -OO options
+ # which is rare.
+ find ${D} -name *opt-*.pyc -delete
+}
+
+do_install:append:class-nativesdk () {
+ # Make sure we use /usr/bin/env python
+ for PYTHSCRIPT in `grep -rIl ${bindir}/python ${D}${bindir}`; do
+ sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
+ done
+ create_wrapper ${D}${bindir}/python${PYTHON_MAJMIN} TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' PYTHONNOUSERSITE='1'
+}
+
+do_install_ptest:append:class-target:libc-musl () {
+ sed -i -e 's|SKIPPED_TESTS=|SKIPPED_TESTS="-x test__locale -x test_c_locale_coercion -x test_locale -x test_os test_re -x test__xxsubinterpreters -x test_threading"|' ${D}${PTEST_PATH}/run-ptest
+}
+
+SYSROOT_PREPROCESS_FUNCS:append:class-target = " provide_target_config_script"
+SYSROOT_PREPROCESS_FUNCS:append:class-nativesdk = " provide_target_config_script"
+
+# This is installed into /usr/python-target-config/ and not /usr/bin
+# because adding target sysroot's /usr/bin/ to PATH has unwanted side effects
+# in components erroneously picking up other target executables from it
+provide_target_config_script() {
+ install -d ${SYSROOT_DESTDIR}${prefix}/python-target-config/
+ install ${D}/${bindir}/python3-config ${SYSROOT_DESTDIR}/${prefix}/python-target-config/
+ install ${D}/${bindir}/python${PYTHON_MAJMIN}-config ${SYSROOT_DESTDIR}/${prefix}/python-target-config/
+}
+SYSROOT_DIRS += "${prefix}/python-target-config/"
+
+SSTATE_SCAN_FILES += "Makefile _sysconfigdata.py"
+SSTATE_HASHEQUIV_FILEMAP = " \
+ populate_sysroot:*/lib*/python3*/_sysconfigdata*.py:${TMPDIR} \
+ populate_sysroot:*/lib*/python3*/_sysconfigdata*.py:${COREBASE} \
+ populate_sysroot:*/lib*/python3*/config-*/Makefile:${TMPDIR} \
+ populate_sysroot:*/lib*/python3*/config-*/Makefile:${COREBASE} \
+ populate_sysroot:*/lib*/python-sysconfigdata/_sysconfigdata.py:${TMPDIR} \
+ populate_sysroot:*/lib*/python-sysconfigdata/_sysconfigdata.py:${COREBASE} \
+ "
+PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess"
+
+py_package_preprocess () {
+ # Remove references to buildmachine paths in target Makefile and _sysconfigdata
+ sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ -e 's:${RECIPE_SYSROOT}::g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}${PYTHON_ABI}*/Makefile \
+ ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py \
+ ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config
+
+ # Reformat _sysconfigdata after modifying it so that it remains
+ # reproducible
+ for c in ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py; do
+ python3 ${WORKDIR}/reformat_sysconfig.py $c
+ done
+
+ # Recompile _sysconfigdata after modifying it
+ cd ${PKGD}
+ sysconfigfile=`find . -name _sysconfigdata_*.py`
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
+ -c "from py_compile import compile; compile('$sysconfigfile')"
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
+ -c "from py_compile import compile; compile('$sysconfigfile', optimize=1)"
+ ${STAGING_BINDIR_NATIVE}/python3-native/python3 \
+ -c "from py_compile import compile; compile('$sysconfigfile', optimize=2)"
+ cd -
+
+ mv ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}
+
+ #Remove the unneeded copy of target sysconfig data
+ rm -rf ${PKGD}/${libdir}/python-sysconfigdata
+}
+
+# We want bytecode precompiled .py files (.pyc's) by default
+# but the user may set it on their own conf
+INCLUDE_PYCS ?= "1"
+
+python(){
+ import collections, json
+
+ filename = os.path.join(d.getVar('THISDIR'), 'python3', 'python3-manifest.json')
+ # This python changes the datastore based on the contents of a file, so mark
+ # that dependency.
+ bb.parse.mark_dependency(d, filename)
+
+ with open(filename) as manifest_file:
+ manifest_str = manifest_file.read()
+ json_start = manifest_str.find('# EOC') + 6
+ manifest_file.seek(json_start)
+ manifest_str = manifest_file.read()
+ python_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
+
+ # First set RPROVIDES for -native case
+ # Hardcoded since it cant be python3-native-foo, should be python3-foo-native
+ pn = 'python3'
+ rprovides = (d.getVar('RPROVIDES') or "").split()
+
+ # ${PN}-misc-native is not in the manifest
+ rprovides.append(pn + '-misc-native')
+
+ for key in python_manifest:
+ pypackage = pn + '-' + key + '-native'
+ if pypackage not in rprovides:
+ rprovides.append(pypackage)
+
+ d.setVar('RPROVIDES:class-native', ' '.join(rprovides))
+
+ # Then work on the target
+ include_pycs = d.getVar('INCLUDE_PYCS')
+
+ packages = d.getVar('PACKAGES').split()
+ pn = d.getVar('PN')
+
+ newpackages=[]
+ for key in python_manifest:
+ pypackage = pn + '-' + key
+
+ if pypackage not in packages:
+ # We need to prepend, otherwise python-misc gets everything
+ # so we use a new variable
+ newpackages.append(pypackage)
+
+ # "Build" python's manifest FILES, RDEPENDS and SUMMARY
+ d.setVar('FILES:' + pypackage, '')
+ for value in python_manifest[key]['files']:
+ d.appendVar('FILES:' + pypackage, ' ' + value)
+
+ # Add cached files
+ if include_pycs == '1':
+ for value in python_manifest[key]['cached']:
+ d.appendVar('FILES:' + pypackage, ' ' + value)
+
+ for value in python_manifest[key]['rdepends']:
+ # Make it work with or without $PN
+ if '${PN}' in value:
+ value=value.split('-', 1)[1]
+ d.appendVar('RDEPENDS:' + pypackage, ' ' + pn + '-' + value)
+
+ for value in python_manifest[key].get('rrecommends', ()):
+ if '${PN}' in value:
+ value=value.split('-', 1)[1]
+ d.appendVar('RRECOMMENDS:' + pypackage, ' ' + pn + '-' + value)
+
+ d.setVar('SUMMARY:' + pypackage, python_manifest[key]['summary'])
+
+ # Prepending so to avoid python-misc getting everything
+ packages = newpackages + packages
+ d.setVar('PACKAGES', ' '.join(packages))
+ d.setVar('ALLOW_EMPTY:${PN}-modules', '1')
+ d.setVar('ALLOW_EMPTY:${PN}-pkgutil', '1')
+
+ if "pgo" in d.getVar("PACKAGECONFIG").split() and not bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', True, False, d):
+ bb.fatal("pgo cannot be enabled as there is no qemu-usermode support for this architecture/machine")
+}
+
+# Files needed to create a new manifest
+
+do_create_manifest() {
+ # This task should be run with every new release of Python.
+ # We must ensure that PACKAGECONFIG enables everything when creating
+ # a new manifest, this is to base our new manifest on a complete
+ # native python build, containing all dependencies, otherwise the task
+ # wont be able to find the required files.
+ # e.g. BerkeleyDB is an optional build dependency so it may or may not
+ # be present, we must ensure it is.
+
+ cd ${WORKDIR}
+ # This needs to be executed by python-native and NOT by HOST's python
+ nativepython3 create_manifest3.py ${PYTHON_MAJMIN}
+ cp python3-manifest.json.new ${THISDIR}/python3/python3-manifest.json
+}
+
+# bitbake python -c create_manifest
+# Make sure we have native python ready when we create a new manifest
+addtask do_create_manifest after do_patch do_prepare_recipe_sysroot
+
+# manual dependency additions
+RRECOMMENDS:${PN}-core:append:class-nativesdk = " nativesdk-python3-modules"
+RRECOMMENDS:${PN}-crypt:append:class-target = " ${MLPREFIX}openssl ${MLPREFIX}ca-certificates"
+RRECOMMENDS:${PN}-crypt:append:class-nativesdk = " ${MLPREFIX}openssl ${MLPREFIX}ca-certificates"
+
+# For historical reasons PN is empty and provided by python3-modules
+FILES:${PN} = ""
+RPROVIDES:${PN}-modules = "${PN}"
+
+FILES:${PN}-pydoc += "${bindir}/pydoc${PYTHON_MAJMIN} ${bindir}/pydoc3"
+FILES:${PN}-idle += "${bindir}/idle3 ${bindir}/idle${PYTHON_MAJMIN}"
+
+# provide python-pyvenv from python3-venv
+RPROVIDES:${PN}-venv += "${MLPREFIX}python3-pyvenv"
+
+# package libpython3
+PACKAGES =+ "libpython3 libpython3-staticdev"
+FILES:libpython3 = "${libdir}/libpython*.so.*"
+FILES:libpython3-staticdev += "${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}-*/libpython${PYTHON_MAJMIN}.a"
+INSANE_SKIP:${PN}-dev += "dev-elf"
+INSANE_SKIP:${PN}-ptest = "dev-deps"
+
+# catch all the rest (unsorted)
+PACKAGES += "${PN}-misc"
+RDEPENDS:${PN}-misc += "\
+ ${PN}-audio \
+ ${PN}-codecs \
+ ${PN}-core \
+ ${PN}-email \
+ ${PN}-numbers \
+ ${PN}-pickle \
+ ${PN}-pydoc \
+"
+RDEPENDS:${PN}-modules:append:class-target = " ${MLPREFIX}python3-misc"
+RDEPENDS:${PN}-modules:append:class-nativesdk = " ${MLPREFIX}python3-misc"
+RDEPENDS:${PN}-modules:append:class-target = " ${@bb.utils.contains('PACKAGECONFIG', 'gdbm', '${MLPREFIX}python3-gdbm', '', d)}"
+FILES:${PN}-misc = "${libdir}/python${PYTHON_MAJMIN} ${libdir}/python${PYTHON_MAJMIN}/lib-dynload"
+
+# catch manpage
+PACKAGES += "${PN}-man"
+FILES:${PN}-man = "${datadir}/man"
+
+# See https://bugs.python.org/issue18748 and https://bugs.python.org/issue37395
+RDEPENDS:libpython3:append:libc-glibc = " libgcc"
+RDEPENDS:${PN}-ctypes:append:libc-glibc = " ${MLPREFIX}ldconfig"
+RDEPENDS:${PN}-ptest = "${PN}-modules ${PN}-tests ${PN}-dev ${PN}-cgitb ${PN}-zipapp unzip bzip2 libgcc tzdata coreutils sed gcc g++ binutils \
+ locale-base-fr-fr locale-base-en-us locale-base-de-de"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-tr-tr"
+RDEPENDS:${PN}-tkinter += "${@bb.utils.contains('PACKAGECONFIG', 'tk', '${MLPREFIX}tk ${MLPREFIX}tk-lib', '', d)}"
+RDEPENDS:${PN}-idle += "${@bb.utils.contains('PACKAGECONFIG', 'tk', '${PN}-tkinter ${MLPREFIX}tcl', '', d)}"
+DEV_PKG_DEPENDENCY = ""
+RDEPENDS:${PN}-pydoc += "${PN}-io"
+
+RDEPENDS:${PN}-tests:append:class-target = " ${MLPREFIX}bash"
+RDEPENDS:${PN}-tests:append:class-nativesdk = " ${MLPREFIX}bash"
+
+# Python's tests contain large numbers of files we don't need in the recipe sysroots
+SYSROOT_PREPROCESS_FUNCS += " py3_sysroot_cleanup"
+py3_sysroot_cleanup () {
+ rm -rf ${SYSROOT_DESTDIR}${libdir}/python${PYTHON_MAJMIN}/test
+}
diff --git a/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb b/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb
index abba7fe159..5d40784201 100644
--- a/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb
+++ b/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb
@@ -1,13 +1,11 @@
SUMMARY = "Qemu helper scripts"
LICENSE = "GPL-2.0-only"
-RDEPENDS:${PN} = "nativesdk-qemu \
+RDEPENDS:${PN} = "nativesdk-qemu nativesdk-unfs3 nativesdk-pseudo \
nativesdk-python3-shell nativesdk-python3-fcntl nativesdk-python3-logging \
"
-PR = "r9"
-LIC_FILES_CHKSUM = "file://${WORKDIR}/tunctl.c;endline=4;md5=ff3a09996bc5fff6bc5d4e0b4c28f999 \
- file://${COREBASE}/scripts/runqemu;beginline=5;endline=10;md5=ac2b489a58739c7628a2604698db5e7f"
+LIC_FILES_CHKSUM = "file://${COREBASE}/scripts/runqemu;beginline=5;endline=10;md5=ac2b489a58739c7628a2604698db5e7f"
SRC_URI = "file://${COREBASE}/scripts/runqemu \
@@ -18,7 +16,6 @@ SRC_URI = "file://${COREBASE}/scripts/runqemu \
file://${COREBASE}/scripts/oe-find-native-sysroot \
file://${COREBASE}/scripts/runqemu-extract-sdk \
file://${COREBASE}/scripts/runqemu-export-rootfs \
- file://tunctl.c \
"
S = "${WORKDIR}"
@@ -26,12 +23,11 @@ S = "${WORKDIR}"
inherit nativesdk
do_compile() {
- ${CC} tunctl.c -o tunctl
+ :
}
do_install() {
install -d ${D}${bindir}
install -m 0755 ${WORKDIR}${COREBASE}/scripts/oe-* ${D}${bindir}/
install -m 0755 ${WORKDIR}${COREBASE}/scripts/runqemu* ${D}${bindir}/
- install tunctl ${D}${bindir}/
}
diff --git a/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb b/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb
index aa9e499c77..1dfce0e4c7 100644
--- a/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb
+++ b/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb
@@ -1,29 +1,23 @@
SUMMARY = "Helper utilities needed by the runqemu script"
LICENSE = "GPL-2.0-only"
RDEPENDS:${PN} = "qemu-system-native"
-PR = "r1"
-LIC_FILES_CHKSUM = "file://${WORKDIR}/tunctl.c;endline=4;md5=ff3a09996bc5fff6bc5d4e0b4c28f999"
+LIC_FILES_CHKSUM = "file://${WORKDIR}/qemu-oe-bridge-helper.c;endline=4;md5=ae00a3bab86f2caaa8462eacda77f4d7"
-SRC_URI = "\
- file://tunctl.c \
- file://qemu-oe-bridge-helper \
- "
+SRC_URI = "file://qemu-oe-bridge-helper.c"
S = "${WORKDIR}"
inherit native
do_compile() {
- ${CC} ${CFLAGS} ${LDFLAGS} -Wall tunctl.c -o tunctl
+ ${CC} ${CFLAGS} ${LDFLAGS} -Wall qemu-oe-bridge-helper.c -o qemu-oe-bridge-helper
}
do_install() {
install -d ${D}${bindir}
- install tunctl ${D}${bindir}/
-
- install -m 755 ${WORKDIR}/qemu-oe-bridge-helper ${D}${bindir}/
+ install qemu-oe-bridge-helper ${D}${bindir}/
}
-DEPENDS += "qemu-system-native"
+DEPENDS += "qemu-system-native unfs3-native pseudo-native"
addtask addto_recipe_sysroot after do_populate_sysroot before do_build
diff --git a/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper b/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper
deleted file mode 100755
index f057d4eef0..0000000000
--- a/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper
+++ /dev/null
@@ -1,25 +0,0 @@
-#! /bin/sh
-# Copyright 2020 Garmin Ltd. or its subsidiaries
-#
-# SPDX-License-Identifier: GPL-2.0
-#
-# Attempts to find and exec the host qemu-bridge-helper program
-
-# If the QEMU_BRIDGE_HELPER variable is set by the user, exec it.
-if [ -n "$QEMU_BRIDGE_HELPER" ]; then
- exec "$QEMU_BRIDGE_HELPER" "$@"
-fi
-
-# Search common paths for the helper program
-BN="qemu-bridge-helper"
-PATHS="/usr/libexec/ /usr/lib/qemu/"
-
-for p in $PATHS; do
- if [ -e "$p/$BN" ]; then
- exec "$p/$BN" "$@"
- fi
-done
-
-echo "$BN not found!" > /dev/stderr
-exit 1
-
diff --git a/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper.c b/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper.c
new file mode 100644
index 0000000000..c34aa00567
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu-helper/qemu-oe-bridge-helper.c
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2022 Garmin Ltd. or its subsidiaries
+ *
+ * SPDX-License-Identifier: GPL-2.0
+ *
+ * Attempts to find and exec the host qemu-bridge-helper program
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <stdlib.h>
+
+void try_program(char const* path, char** args) {
+ if (access(path, X_OK) == 0) {
+ execv(path, args);
+ }
+}
+
+int main(int argc, char** argv) {
+ char* var;
+
+ var = getenv("QEMU_BRIDGE_HELPER");
+ if (var && var[0] != '\0') {
+ execvp(var, argv);
+ return 1;
+ }
+
+ if (argc == 2 && strcmp(argv[1], "--help") == 0) {
+ fprintf(stderr, "Helper function to find and exec qemu-bridge-helper. Set QEMU_BRIDGE_HELPER to override default search path\n");
+ return 0;
+ }
+
+ try_program("/usr/libexec/qemu-bridge-helper", argv);
+ try_program("/usr/lib/qemu/qemu-bridge-helper", argv);
+
+ fprintf(stderr, "No bridge helper found\n");
+ return 1;
+}
+
diff --git a/meta/recipes-devtools/qemu/qemu-helper/tunctl.c b/meta/recipes-devtools/qemu/qemu-helper/tunctl.c
deleted file mode 100644
index d745dd06cb..0000000000
--- a/meta/recipes-devtools/qemu/qemu-helper/tunctl.c
+++ /dev/null
@@ -1,158 +0,0 @@
-/* Copyright 2002 Jeff Dike
- * Licensed under the GPL
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <errno.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <pwd.h>
-#include <grp.h>
-#include <net/if.h>
-#include <sys/ioctl.h>
-#include <linux/if_tun.h>
-
-/* TUNSETGROUP appeared in 2.6.23 */
-#ifndef TUNSETGROUP
-#define TUNSETGROUP _IOW('T', 206, int)
-#endif
-
-static void Usage(char *name, int status)
-{
- fprintf(stderr, "Create: %s [-b] [-u owner] [-g group] [-t device-name] "
- "[-f tun-clone-device]\n", name);
- fprintf(stderr, "Delete: %s -d device-name [-f tun-clone-device]\n\n",
- name);
- fprintf(stderr, "The default tun clone device is /dev/net/tun - some systems"
- " use\n/dev/misc/net/tun instead\n\n");
- fprintf(stderr, "-b will result in brief output (just the device name)\n");
- exit(status);
-}
-
-int main(int argc, char **argv)
-{
- struct ifreq ifr;
- struct passwd *pw;
- struct group *gr;
- uid_t owner = -1;
- gid_t group = -1;
- int tap_fd, opt, delete = 0, brief = 0;
- char *tun = "", *file = "/dev/net/tun", *name = argv[0], *end;
-
- while((opt = getopt(argc, argv, "bd:f:t:u:g:h")) > 0){
- switch(opt) {
- case 'b':
- brief = 1;
- break;
- case 'd':
- delete = 1;
- tun = optarg;
- break;
- case 'f':
- file = optarg;
- break;
- case 'u':
- pw = getpwnam(optarg);
- if(pw != NULL){
- owner = pw->pw_uid;
- break;
- }
- owner = strtol(optarg, &end, 0);
- if(*end != '\0'){
- fprintf(stderr, "'%s' is neither a username nor a numeric uid.\n",
- optarg);
- Usage(name, 1);
- }
- break;
- case 'g':
- gr = getgrnam(optarg);
- if(gr != NULL){
- group = gr->gr_gid;
- break;
- }
- group = strtol(optarg, &end, 0);
- if(*end != '\0'){
- fprintf(stderr, "'%s' is neither a groupname nor a numeric group.\n",
- optarg);
- Usage(name, 1);
- }
- break;
-
- case 't':
- tun = optarg;
- break;
- case 'h':
- Usage(name, 0);
- break;
- default:
- Usage(name, 1);
- }
- }
-
- argv += optind;
- argc -= optind;
-
- if(argc > 0)
- Usage(name, 1);
-
- if((tap_fd = open(file, O_RDWR)) < 0){
- fprintf(stderr, "Failed to open '%s' : ", file);
- perror("");
- exit(1);
- }
-
- memset(&ifr, 0, sizeof(ifr));
-
- ifr.ifr_flags = IFF_TAP | IFF_NO_PI;
- strncpy(ifr.ifr_name, tun, sizeof(ifr.ifr_name) - 1);
- if(ioctl(tap_fd, TUNSETIFF, (void *) &ifr) < 0){
- perror("TUNSETIFF");
- exit(1);
- }
-
- if(delete){
- if(ioctl(tap_fd, TUNSETPERSIST, 0) < 0){
- perror("disabling TUNSETPERSIST");
- exit(1);
- }
- printf("Set '%s' nonpersistent\n", ifr.ifr_name);
- }
- else {
- /* emulate behaviour prior to TUNSETGROUP */
- if(owner == -1 && group == -1) {
- owner = geteuid();
- }
-
- if(owner != -1) {
- if(ioctl(tap_fd, TUNSETOWNER, owner) < 0){
- perror("TUNSETOWNER");
- exit(1);
- }
- }
- if(group != -1) {
- if(ioctl(tap_fd, TUNSETGROUP, group) < 0){
- perror("TUNSETGROUP");
- exit(1);
- }
- }
-
- if(ioctl(tap_fd, TUNSETPERSIST, 1) < 0){
- perror("enabling TUNSETPERSIST");
- exit(1);
- }
-
- if(brief)
- printf("%s\n", ifr.ifr_name);
- else {
- printf("Set '%s' persistent and owned by", ifr.ifr_name);
- if(owner != -1)
- printf(" uid %d", owner);
- if(group != -1)
- printf(" gid %d", group);
- printf("\n");
- }
- }
- return(0);
-}
diff --git a/meta/recipes-devtools/qemu/qemu-native.inc b/meta/recipes-devtools/qemu/qemu-native.inc
index d9acc613f9..891dc5e2a1 100644
--- a/meta/recipes-devtools/qemu/qemu-native.inc
+++ b/meta/recipes-devtools/qemu/qemu-native.inc
@@ -1,6 +1,6 @@
require qemu.inc
-inherit native
+inherit_defer native
EXTRA_OEMAKE:append = " LD='${LD}' AR='${AR}' OBJCOPY='${OBJCOPY}' LDFLAGS='${LDFLAGS}'"
diff --git a/meta/recipes-devtools/qemu/qemu-native_7.0.0.bb b/meta/recipes-devtools/qemu/qemu-native_7.0.0.bb
deleted file mode 100644
index a94dc0b61e..0000000000
--- a/meta/recipes-devtools/qemu/qemu-native_7.0.0.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-BPN = "qemu"
-
-DEPENDS = "glib-2.0-native zlib-native ninja-native meson-native"
-
-require qemu-native.inc
-
-EXTRA_OECONF:append = " --target-list=${@get_qemu_usermode_target_list(d)} --disable-tools --disable-blobs --disable-guest-agent"
-
-PACKAGECONFIG ??= "pie"
diff --git a/meta/recipes-devtools/qemu/qemu-native_8.2.1.bb b/meta/recipes-devtools/qemu/qemu-native_8.2.1.bb
new file mode 100644
index 0000000000..a77953529b
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu-native_8.2.1.bb
@@ -0,0 +1,9 @@
+BPN = "qemu"
+
+DEPENDS += "glib-2.0-native zlib-native"
+
+require qemu-native.inc
+
+EXTRA_OECONF:append = " --target-list=${@get_qemu_usermode_target_list(d)} --disable-tools --disable-install-blobs --disable-guest-agent"
+
+PACKAGECONFIG ??= "pie"
diff --git a/meta/recipes-devtools/qemu/qemu-system-native_7.0.0.bb b/meta/recipes-devtools/qemu/qemu-system-native_7.0.0.bb
deleted file mode 100644
index bc5384d472..0000000000
--- a/meta/recipes-devtools/qemu/qemu-system-native_7.0.0.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-BPN = "qemu"
-
-inherit python3-dir
-
-require qemu-native.inc
-
-# As some of the files installed by qemu-native and qemu-system-native
-# are the same, we depend on qemu-native to get the full installation set
-# and avoid file clashes
-DEPENDS = "glib-2.0-native zlib-native pixman-native qemu-native bison-native meson-native ninja-native"
-
-EXTRA_OECONF:append = " --target-list=${@get_qemu_system_target_list(d)}"
-
-PACKAGECONFIG ??= "fdt alsa kvm pie \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
-"
-
-# Handle distros such as CentOS 5 32-bit that do not have kvm support
-PACKAGECONFIG:remove = "${@'kvm' if not os.path.exists('/usr/include/linux/kvm.h') else ''}"
-
-do_install:append() {
- install -Dm 0755 ${WORKDIR}/powerpc_rom.bin ${D}${datadir}/qemu
-
- # The following is also installed by qemu-native
- rm -f ${D}${datadir}/qemu/trace-events-all
- rm -rf ${D}${datadir}/qemu/keymaps
- rm -rf ${D}${datadir}/icons/
- rm -rf ${D}${includedir}/qemu-plugin.h
-
- # Install qmp.py to be used with testimage
- install -D ${S}/python/qemu/qmp/__init__.py ${D}${libdir}/qemu-python/qmp.py
-}
diff --git a/meta/recipes-devtools/qemu/qemu-system-native_8.2.1.bb b/meta/recipes-devtools/qemu/qemu-system-native_8.2.1.bb
new file mode 100644
index 0000000000..0634b34242
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu-system-native_8.2.1.bb
@@ -0,0 +1,31 @@
+BPN = "qemu"
+
+require qemu-native.inc
+
+# As some of the files installed by qemu-native and qemu-system-native
+# are the same, we depend on qemu-native to get the full installation set
+# and avoid file clashes
+DEPENDS += "glib-2.0-native zlib-native pixman-native qemu-native"
+
+EXTRA_OECONF:append = " --target-list=${@get_qemu_system_target_list(d)}"
+
+PACKAGECONFIG ??= "fdt alsa kvm pie slirp png \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
+"
+
+# Handle distros such as CentOS 5 32-bit that do not have kvm support
+PACKAGECONFIG:remove = "${@'kvm' if not os.path.exists('/usr/include/linux/kvm.h') else ''}"
+
+do_install:append() {
+ install -Dm 0755 ${WORKDIR}/powerpc_rom.bin ${D}${datadir}/qemu
+
+ # The following is also installed by qemu-native
+ rm -f ${D}${datadir}/qemu/trace-events-all
+ rm -rf ${D}${datadir}/qemu/keymaps
+ rm -rf ${D}${datadir}/icons/
+ rm -rf ${D}${includedir}/qemu-plugin.h
+
+ # Install qmp.py to be used with testimage
+ install -d ${D}${libdir}/qemu-python/qmp/
+ install -D ${S}/python/qemu/qmp/* ${D}${libdir}/qemu-python/qmp/
+}
diff --git a/meta/recipes-devtools/qemu/qemu.inc b/meta/recipes-devtools/qemu/qemu.inc
index 5f303992be..4501f84c2b 100644
--- a/meta/recipes-devtools/qemu/qemu.inc
+++ b/meta/recipes-devtools/qemu/qemu.inc
@@ -6,10 +6,14 @@ a variety of guest operating systems"
HOMEPAGE = "http://qemu.org"
LICENSE = "GPL-2.0-only & LGPL-2.1-only"
+DEPENDS += "bison-native meson-native ninja-native"
+
RDEPENDS:${PN}-ptest = "bash"
require qemu-targets.inc
-inherit pkgconfig ptest python3-dir
+# https://gitlab.com/qemu-project/qemu/-/commit/81e2b198a8cb4ee5fdf108bd438f44b193ee3a36 means
+# we need a full python3-native setup
+inherit pkgconfig ptest update-rc.d systemd python3native
LIC_FILES_CHKSUM = "file://COPYING;md5=441c28d2cf86e15a37fa47e15a72fbac \
file://COPYING.LIB;endline=24;md5=8c5efda6cf1e1b03dcfd0e6c0d271c7f"
@@ -18,7 +22,6 @@ SRC_URI = "https://download.qemu.org/${BPN}-${PV}.tar.xz \
file://powerpc_rom.bin \
file://run-ptest \
file://0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch \
- file://0002-chardev-connect-socket-to-a-spawned-command.patch \
file://0003-apic-fixup-fallthrough-to-PIC.patch \
file://0004-configure-Add-pkg-config-handling-for-libgcrypt.patch \
file://0005-qemu-Do-not-include-file-if-not-exists.patch \
@@ -27,24 +30,53 @@ SRC_URI = "https://download.qemu.org/${BPN}-${PV}.tar.xz \
file://0008-tests-meson.build-use-relative-path-to-refer-to-file.patch \
file://0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch \
file://0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch \
+ file://0002-linux-user-Replace-use-of-lfs64-related-functions-an.patch \
+ file://fixedmeson.patch \
+ file://no-pip.patch \
+ file://4a8579ad8629b57a43daa62e46cc7af6e1078116.patch \
+ file://0001-linux-user-x86_64-Handle-the-vsyscall-page-in-open_s.patch \
+ file://0002-linux-user-loongarch64-Remove-TARGET_FORCE_SHMLBA.patch \
+ file://0003-linux-user-Add-strace-for-shmat.patch \
+ file://0004-linux-user-Rewrite-target_shmat.patch \
+ file://0005-tests-tcg-Check-that-shmat-does-not-break-proc-self-.patch \
+ file://CVE-2023-6683.patch \
+ file://qemu-guest-agent.init \
+ file://qemu-guest-agent.udev \
"
UPSTREAM_CHECK_REGEX = "qemu-(?P<pver>\d+(\.\d+)+)\.tar"
-SRC_URI[sha256sum] = "f6b375c7951f728402798b0baabb2d86478ca53d44cedbefabbe1c46bf46f839"
+# SDK_OLDEST_KERNEL is set below 4.17, which is the minimum version required by QEMU >= 8.1
+# This is due to two MMAP flags being used at certain points
+SRC_URI:append:class-nativesdk = " \
+ file://0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch \
+ file://0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch \
+ "
-SRC_URI:append:class-target = " file://cross.patch"
-SRC_URI:append:class-nativesdk = " file://cross.patch"
+# Support building and using native version on pre 4.17 kernels
+SRC_URI:append:class-native = " \
+ file://0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch \
+ file://0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch \
+ "
-# Applies against virglrender < 0.6.0 and not qemu itself
-CVE_CHECK_IGNORE += "CVE-2017-5957"
+SRC_URI[sha256sum] = "8562751158175f9d187c5f22b57555abe3c870f0325c8ced12c34c6d987729be"
-# The VNC server can expose host files uder some circumstances. We don't
-# enable it by default.
-CVE_CHECK_IGNORE += "CVE-2007-0998"
+CVE_STATUS[CVE-2007-0998] = "not-applicable-config: The VNC server can expose host files uder some circumstances. We don't enable it by default."
-# 'The issues identified by this CVE were determined to not constitute a vulnerability.'
# https://bugzilla.redhat.com/show_bug.cgi?id=1609015#c11
-CVE_CHECK_IGNORE += "CVE-2018-18438"
+CVE_STATUS[CVE-2018-18438] = "disputed: The issues identified by this CVE were determined to not constitute a vulnerability."
+
+# As per https://nvd.nist.gov/vuln/detail/CVE-2023-0664
+# https://bugzilla.redhat.com/show_bug.cgi?id=2167423
+CVE_STATUS[CVE-2023-0664] = "not-applicable-platform: Issue only applies on Windows"
+
+# As per https://bugzilla.redhat.com/show_bug.cgi?id=2203387
+CVE_STATUS[CVE-2023-2680] = "not-applicable-platform: RHEL specific issue."
+
+CVE_STATUS[CVE-2023-3019] = "cpe-incorrect: Applies only against versions before 8.2.0"
+
+CVE_STATUS[CVE-2023-5088] = "cpe-incorrect: Applies only against version 8.2.0 and earlier"
+
+CVE_STATUS[CVE-2023-6693] = "cpe-incorrect: Applies only against version 8.2.0 and earlier"
COMPATIBLE_HOST:mipsarchn32 = "null"
COMPATIBLE_HOST:mipsarchn64 = "null"
@@ -67,14 +99,23 @@ do_install_ptest() {
sed -i -e "1s,#!/usr/bin/bash,#!${base_bindir}/bash," ${D}${PTEST_PATH}/tests/data/acpi/disassemle-aml.sh
# Strip the paths from the QEMU variable, we can use PATH
- sed -i -e "s#^QEMU=.*/qemu-#QEMU=qemu-#g" ${D}${PTEST_PATH}/tests/tcg/*.mak
+ makfiles=$(find ${D}${PTEST_PATH} -name "*.mak")
+ sed -i -e "s#^QEMU=.*/qemu-#QEMU=qemu-#g" $makfiles
+
+ # Strip compiler flags as they break reproducibility
+ sed -i -e "s,^CC=.*,CC=gcc," \
+ -e "s,^CCAS=.*,CCAS=gcc," \
+ -e "s,^LD=.*,LD=ld," $makfiles
- # Strip compiler flags as they break reproducibility
- sed -i -e "s,CROSS_CC_GUEST=.*,CROSS_CC_GUEST=," ${D}${PTEST_PATH}/tests/tcg/*.mak
+ # Update SRC_PATH variable to the right place on target
+ sed -i -e "s#^SRC_PATH=.*#SRC_PATH=${PTEST_PATH}#g" $makfiles
+
+ # https://gitlab.com/qemu-project/qemu/-/issues/1403
+ rm ${D}${PTEST_PATH}/tests/unit/test-io-channel-command
}
# QEMU_TARGETS is overridable variable
-QEMU_TARGETS ?= "arm aarch64 i386 mips mipsel mips64 mips64el ppc ppc64 ppc64le riscv32 riscv64 sh4 x86_64"
+QEMU_TARGETS ?= "arm aarch64 i386 loongarch64 mips mipsel mips64 mips64el ppc ppc64 ppc64le riscv32 riscv64 sh4 x86_64"
EXTRA_OECONF = " \
--prefix=${prefix} \
@@ -92,12 +133,16 @@ EXTRA_OECONF = " \
--disable-werror \
--extra-cflags='${CFLAGS}' \
--extra-ldflags='${LDFLAGS}' \
- --with-git=/bin/false \
- --with-git-submodules=ignore \
- --meson=meson \
+ --disable-download \
+ --disable-docs \
+ --host-cc='${BUILD_CC}' \
+ --disable-af-xdp \
${PACKAGECONFIG_CONFARGS} \
"
+EXTRA_OECONF:append:class-target = " --cross-prefix=${HOST_PREFIX}"
+EXTRA_OECONF:append:class-nativesdk = " --cross-prefix=${HOST_PREFIX}"
+
B = "${WORKDIR}/build"
#EXTRA_OECONF:append = " --python=${HOSTTOOLS_DIR}/python3"
@@ -111,17 +156,32 @@ do_configure:prepend:class-native() {
}
do_configure() {
- # This is taken from meson.bbclass to avoid errors when updating to a
- # new version of meson.
- rmdir ${STAGING_LIBDIR_NATIVE}/${PYTHON_DIR}/site-packages/*.egg-info 2>/dev/null || :
-
- ${S}/configure ${EXTRA_OECONF}
+ export PKG_CONFIG=pkg-config
+ ${S}/configure ${EXTRA_OECONF}
}
do_configure[cleandirs] += "${B}"
do_install () {
export STRIP=""
oe_runmake 'DESTDIR=${D}' install
+
+ # If we built the guest agent, also install startup/udev rules
+ if [ -e "${D}${bindir}/qemu-ga" ]; then
+ install -d ${D}${sysconfdir}/init.d/
+ install -m 0755 ${WORKDIR}/qemu-guest-agent.init ${D}${sysconfdir}/init.d/qemu-guest-agent
+ sed -i 's:@bindir@:${bindir}:' ${D}${sysconfdir}/init.d/qemu-guest-agent
+
+ install -d ${D}${sysconfdir}/udev/rules.d/
+ install -m 0644 ${WORKDIR}/qemu-guest-agent.udev ${D}${sysconfdir}/udev/rules.d/60-qemu-guest-agent.rules
+
+ install -d ${D}${systemd_unitdir}/system/
+ install -m 0644 ${S}/contrib/systemd/qemu-guest-agent.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,-/usr/bin/,-${bindir}/,g' ${D}${systemd_unitdir}/system/qemu-guest-agent.service
+ fi
+ # ELF binary /usr/share/qemu/s390-netboot.img has relocations in .text
+ rm ${D}${datadir}/qemu/s390-netboot.img -f
+ # ELF binary /usr/share/qemu/s390-ccw.img has relocations in .text [textrel]
+ rm ${D}${datadir}/qemu/s390-ccw.img -f
}
# The following fragment will create a wrapper for qemu-mips user emulation
@@ -140,15 +200,16 @@ do_install:append() {
# Disable kvm/virgl/mesa on targets that do not support it
PACKAGECONFIG:remove:darwin = "kvm virglrenderer epoxy gtk+"
-PACKAGECONFIG:remove:mingw32 = "kvm virglrenderer epoxy gtk+"
+PACKAGECONFIG:remove:mingw32 = "kvm virglrenderer epoxy gtk+ pie"
PACKAGECONFIG[sdl] = "--enable-sdl,--disable-sdl,libsdl2"
+PACKAGECONFIG[png] = "--enable-png,--disable-png,libpng"
PACKAGECONFIG[virtfs] = "--enable-virtfs --enable-attr --enable-cap-ng,--disable-virtfs,libcap-ng attr,"
PACKAGECONFIG[aio] = "--enable-linux-aio,--disable-linux-aio,libaio,"
+PACKAGECONFIG[uring] = "--enable-linux-io-uring,--disable-linux-io-uring,liburing"
PACKAGECONFIG[xen] = "--enable-xen,--disable-xen,xen-tools,xen-tools-libxenstore xen-tools-libxenctrl xen-tools-libxenguest"
PACKAGECONFIG[vnc-sasl] = "--enable-vnc --enable-vnc-sasl,--disable-vnc-sasl,cyrus-sasl,"
PACKAGECONFIG[vnc-jpeg] = "--enable-vnc --enable-vnc-jpeg,--disable-vnc-jpeg,jpeg,"
-PACKAGECONFIG[vnc-png] = "--enable-vnc --enable-vnc-png,--disable-vnc-png,libpng,"
PACKAGECONFIG[libcurl] = "--enable-curl,--disable-curl,curl,"
PACKAGECONFIG[nss] = "--enable-smartcard,--disable-smartcard,nss,"
PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses,"
@@ -163,6 +224,7 @@ PACKAGECONFIG[fdt] = "--enable-fdt,--disable-fdt,dtc"
PACKAGECONFIG[alsa] = "--audio-drv-list=default,,alsa-lib"
PACKAGECONFIG[epoxy] = "--enable-opengl,--disable-opengl,libepoxy"
PACKAGECONFIG[lzo] = "--enable-lzo,--disable-lzo,lzo"
+PACKAGECONFIG[dax] = "--enable-libdaxctl,--disable-libdaxctl,ndctl"
PACKAGECONFIG[numa] = "--enable-numa,--disable-numa,numactl"
PACKAGECONFIG[gnutls] = "--enable-gnutls,--disable-gnutls,gnutls"
PACKAGECONFIG[bzip2] = "--enable-bzip2,--disable-bzip2,bzip2"
@@ -172,24 +234,77 @@ PACKAGECONFIG[virglrenderer] = "--enable-virglrenderer,--disable-virglrenderer,v
# spice will be in meta-networking layer
PACKAGECONFIG[spice] = "--enable-spice,--disable-spice,spice"
# usbredir will be in meta-networking layer
+PACKAGECONFIG[dbus-display] = "--enable-dbus-display,--disable-dbus-display,glib-2.0-native,dbus"
PACKAGECONFIG[usb-redir] = "--enable-usb-redir,--disable-usb-redir,usbredir"
PACKAGECONFIG[snappy] = "--enable-snappy,--disable-snappy,snappy"
PACKAGECONFIG[glusterfs] = "--enable-glusterfs,--disable-glusterfs,glusterfs"
PACKAGECONFIG[xkbcommon] = "--enable-xkbcommon,--disable-xkbcommon,libxkbcommon"
-PACKAGECONFIG[libudev] = "--enable-libudev,--disable-libudev,eudev"
+PACKAGECONFIG[libudev] = "--enable-libudev,--disable-libudev,udev"
PACKAGECONFIG[attr] = "--enable-attr,--disable-attr,attr,"
PACKAGECONFIG[rbd] = "--enable-rbd,--disable-rbd,ceph,ceph"
PACKAGECONFIG[vhost] = "--enable-vhost-net,--disable-vhost-net,,"
-PACKAGECONFIG[ust] = "--enable-trace-backend=ust,--enable-trace-backend=nop,lttng-ust,"
+PACKAGECONFIG[ust] = "--enable-trace-backends=ust,,lttng-ust,"
PACKAGECONFIG[pie] = "--enable-pie,--disable-pie,,"
PACKAGECONFIG[seccomp] = "--enable-seccomp,--disable-seccomp,libseccomp"
# libnfs is currently provided by meta-kodi
PACKAGECONFIG[libnfs] = "--enable-libnfs,--disable-libnfs,libnfs"
PACKAGECONFIG[pmem] = "--enable-libpmem,--disable-libpmem,pmdk"
-PACKAGECONFIG[pulsedio] = "--enable-pa,--disable-pa,pulseaudio"
+PACKAGECONFIG[pulseaudio] = "--enable-pa,--disable-pa,pulseaudio"
PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux"
PACKAGECONFIG[bpf] = "--enable-bpf,--disable-bpf,libbpf"
+PACKAGECONFIG[capstone] = "--enable-capstone,--disable-capstone"
+PACKAGECONFIG[rdma] = "--enable-rdma,--disable-rdma"
+PACKAGECONFIG[vde] = "--enable-vde,--disable-vde"
+PACKAGECONFIG[fuse] = "--enable-fuse --enable-fuse-lseek,--disable-fuse --disable-fuse-lseek,fuse3"
+PACKAGECONFIG[slirp] = "--enable-slirp,--disable-slirp,libslirp"
+PACKAGECONFIG[brlapi] = "--enable-brlapi,--disable-brlapi"
+PACKAGECONFIG[jack] = "--enable-jack,--disable-jack,jack,"
+PACKAGECONFIG[debuginfo] = "--enable-libdw,--disable-libdw,elfutils"
+PACKAGECONFIG[pipewire] = "--enable-pipewire,--disable-pipewire,pipewire"
+PACKAGECONFIG[sndio] = "--enable-sndio,--disable-sndio,sndio"
-INSANE_SKIP:${PN} = "arch"
+INSANE_SKIP:${PN}-common = "arch"
FILES:${PN} += "${datadir}/icons"
+
+# For user who want to install all arch packages
+PACKAGES =+ "${PN}-common"
+RDEPENDS:${PN} += "${PN}-common"
+
+ALLOW_EMPTY:${PN} = "1"
+FILES:${PN} = ""
+
+FILES:${PN}-common = "${bindir}/* ${includedir}/* ${libexecdir}/* ${datadir}/* ${localstatedir}"
+
+PACKAGES_DYNAMIC += "^${PN}-user-.* ^${PN}-system-.*"
+
+PACKAGESPLITFUNCS =+ "split_qemu_packages"
+
+python split_qemu_packages () {
+ archdir = d.expand('${bindir}/')
+ subpackages = do_split_packages(d, archdir, r'^qemu-system-(.*)$', '${PN}-system-%s', 'QEMU full system emulation binaries(%s)' , prepend=True, extra_depends='${PN}-common')
+
+ subpackages += do_split_packages(d, archdir, r'^qemu-((?!system|edid|ga|img|io|nbd|pr-helper|storage-daemon).*)$', '${PN}-user-%s', 'QEMU full user emulation binaries(%s)' , prepend=True, extra_depends='${PN}-common')
+ if subpackages:
+ d.appendVar('RDEPENDS:' + d.getVar('PN'), ' ' + ' '.join(subpackages))
+ mipspackage = d.getVar('PN') + "-user-mips"
+ if mipspackage in ' '.join(subpackages):
+ d.appendVar('RDEPENDS:' + mipspackage, ' ' + d.getVar("MLPREFIX") + 'bash')
+}
+
+# Put the guest agent in a separate package
+PACKAGES =+ "${PN}-guest-agent"
+SUMMARY:${PN}-guest-agent = "QEMU guest agent"
+FILES:${PN}-guest-agent += " \
+ ${bindir}/qemu-ga \
+ ${sysconfdir}/udev/rules.d/60-qemu-guest-agent.rules \
+ ${sysconfdir}/init.d/qemu-guest-agent \
+ ${systemd_unitdir}/system/qemu-guest-agent.service \
+"
+
+INITSCRIPT_PACKAGES = "${PN}-guest-agent"
+INITSCRIPT_NAME:${PN}-guest-agent = "qemu-guest-agent"
+INITSCRIPT_PARAMS:${PN}-guest-agent = "defaults"
+
+SYSTEMD_PACKAGES = "${PN}-guest-agent"
+SYSTEMD_SERVICE:${PN}-guest-agent = "qemu-guest-agent.service"
diff --git a/meta/recipes-devtools/qemu/qemu/0001-linux-user-x86_64-Handle-the-vsyscall-page-in-open_s.patch b/meta/recipes-devtools/qemu/qemu/0001-linux-user-x86_64-Handle-the-vsyscall-page-in-open_s.patch
new file mode 100644
index 0000000000..2eaebe883c
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0001-linux-user-x86_64-Handle-the-vsyscall-page-in-open_s.patch
@@ -0,0 +1,56 @@
+From 4517e2046610722879761bcdb60edbb2b929c848 Mon Sep 17 00:00:00 2001
+From: Richard Henderson <richard.henderson@linaro.org>
+Date: Wed, 28 Feb 2024 10:25:14 -1000
+Subject: [PATCH 1/5] linux-user/x86_64: Handle the vsyscall page in
+ open_self_maps_{2,4}
+
+This is the only case in which we expect to have no host memory backing
+for a guest memory page, because in general linux user processes cannot
+map any pages in the top half of the 64-bit address space.
+
+Upstream-Status: Submitted [https://www.mail-archive.com/qemu-devel@nongnu.org/msg1026793.html]
+
+Resolves: https://gitlab.com/qemu-project/qemu/-/issues/2170
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ linux-user/syscall.c | 16 ++++++++++++++++
+ 1 file changed, 16 insertions(+)
+
+diff --git a/linux-user/syscall.c b/linux-user/syscall.c
+index a114f29a8..8307a8a61 100644
+--- a/linux-user/syscall.c
++++ b/linux-user/syscall.c
+@@ -7922,6 +7922,10 @@ static void open_self_maps_4(const struct open_self_maps_data *d,
+ path = "[heap]";
+ } else if (start == info->vdso) {
+ path = "[vdso]";
++#ifdef TARGET_X86_64
++ } else if (start == TARGET_VSYSCALL_PAGE) {
++ path = "[vsyscall]";
++#endif
+ }
+
+ /* Except null device (MAP_ANON), adjust offset for this fragment. */
+@@ -8010,6 +8014,18 @@ static int open_self_maps_2(void *opaque, target_ulong guest_start,
+ uintptr_t host_start = (uintptr_t)g2h_untagged(guest_start);
+ uintptr_t host_last = (uintptr_t)g2h_untagged(guest_end - 1);
+
++#ifdef TARGET_X86_64
++ /*
++ * Because of the extremely high position of the page within the guest
++ * virtual address space, this is not backed by host memory at all.
++ * Therefore the loop below would fail. This is the only instance
++ * of not having host backing memory.
++ */
++ if (guest_start == TARGET_VSYSCALL_PAGE) {
++ return open_self_maps_3(opaque, guest_start, guest_end, flags);
++ }
++#endif
++
+ while (1) {
+ IntervalTreeNode *n =
+ interval_tree_iter_first(d->host_maps, host_start, host_start);
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch b/meta/recipes-devtools/qemu/qemu/0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch
index 6fb160e6d3..c65508017d 100644
--- a/meta/recipes-devtools/qemu/qemu/0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch
+++ b/meta/recipes-devtools/qemu/qemu/0001-qemu-Add-addition-environment-space-to-boot-loader-q.patch
@@ -18,11 +18,11 @@ Signed-off-by: Roy Li <rongqing.li@windriver.com>
hw/mips/malta.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/hw/mips/malta.c b/hw/mips/malta.c
-index 628851172..12d37f35d 100644
---- a/hw/mips/malta.c
-+++ b/hw/mips/malta.c
-@@ -61,7 +61,7 @@
+Index: qemu-8.0.0/hw/mips/malta.c
+===================================================================
+--- qemu-8.0.0.orig/hw/mips/malta.c
++++ qemu-8.0.0/hw/mips/malta.c
+@@ -64,7 +64,7 @@
#define ENVP_PADDR 0x2000
#define ENVP_VADDR cpu_mips_phys_to_kseg0(NULL, ENVP_PADDR)
#define ENVP_NB_ENTRIES 16
@@ -31,6 +31,3 @@ index 628851172..12d37f35d 100644
/* Hardware addresses */
#define FLASH_ADDRESS 0x1e000000ULL
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0002-chardev-connect-socket-to-a-spawned-command.patch b/meta/recipes-devtools/qemu/qemu/0002-chardev-connect-socket-to-a-spawned-command.patch
deleted file mode 100644
index 63a99c9620..0000000000
--- a/meta/recipes-devtools/qemu/qemu/0002-chardev-connect-socket-to-a-spawned-command.patch
+++ /dev/null
@@ -1,246 +0,0 @@
-From 14cd62607c9de232edf0a9b8503bd02783e03411 Mon Sep 17 00:00:00 2001
-From: Alistair Francis <alistair.francis@xilinx.com>
-Date: Thu, 21 Dec 2017 11:35:16 -0800
-Subject: [PATCH 02/12] chardev: connect socket to a spawned command
-
-The command is started in a shell (sh -c) with stdin connect to QEMU
-via a Unix domain stream socket. QEMU then exchanges data via its own
-end of the socket, just like it normally does.
-
-"-chardev socket" supports some ways of connecting via protocols like
-telnet, but that is only a subset of the functionality supported by
-tools socat. To use socat instead, for example to connect via a socks
-proxy, use:
-
- -chardev 'socket,id=socat,cmd=exec socat FD:0 SOCKS4A:socks-proxy.localdomain:example.com:9999,,socksuser=nobody' \
- -device usb-serial,chardev=socat
-
-Beware that commas in the command must be escaped as double commas.
-
-Or interactively in the console:
- (qemu) chardev-add socket,id=cat,cmd=cat
- (qemu) device_add usb-serial,chardev=cat
- ^ac
- # cat >/dev/ttyUSB0
- hello
- hello
-
-Another usage is starting swtpm from inside QEMU. swtpm will
-automatically shut down once it looses the connection to the parent
-QEMU, so there is no risk of lingering processes:
-
- -chardev 'socket,id=chrtpm0,cmd=exec swtpm socket --terminate --ctrl type=unixio,,clientfd=0 --tpmstate dir=... --log file=swtpm.log' \
- -tpmdev emulator,id=tpm0,chardev=chrtpm0 \
- -device tpm-tis,tpmdev=tpm0
-
-The patch was discussed upstream, but QEMU developers believe that the
-code calling QEMU should be responsible for managing additional
-processes. In OE-core, that would imply enhancing runqemu and
-oeqa. This patch is a simpler solution.
-
-Because it is not going upstream, the patch was written so that it is
-as simple as possible.
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Patrick Ohly <patrick.ohly@intel.com>
-
----
- chardev/char-socket.c | 100 ++++++++++++++++++++++++++++++++++++++++++
- chardev/char.c | 3 ++
- qapi/char.json | 5 +++
- 3 files changed, 108 insertions(+)
-
-diff --git a/chardev/char-socket.c b/chardev/char-socket.c
-index fab2d791d..c79641f24 100644
---- a/chardev/char-socket.c
-+++ b/chardev/char-socket.c
-@@ -1315,6 +1315,67 @@ static bool qmp_chardev_validate_socket(ChardevSocket *sock,
- return true;
- }
-
-+#ifndef _WIN32
-+static void chardev_open_socket_cmd(Chardev *chr,
-+ const char *cmd,
-+ Error **errp)
-+{
-+ int fds[2] = { -1, -1 };
-+ QIOChannelSocket *sioc = NULL;
-+ pid_t pid = -1;
-+ const char *argv[] = { "/bin/sh", "-c", cmd, NULL };
-+
-+ /*
-+ * We need a Unix domain socket for commands like swtpm and a single
-+ * connection, therefore we cannot use qio_channel_command_new_spawn()
-+ * without patching it first. Duplicating the functionality is easier.
-+ */
-+ if (socketpair(AF_UNIX, SOCK_STREAM|SOCK_CLOEXEC, 0, fds)) {
-+ error_setg_errno(errp, errno, "Error creating socketpair(AF_UNIX, SOCK_STREAM|SOCK_CLOEXEC)");
-+ goto error;
-+ }
-+
-+ pid = qemu_fork(errp);
-+ if (pid < 0) {
-+ goto error;
-+ }
-+
-+ if (!pid) {
-+ /* child */
-+ dup2(fds[1], STDIN_FILENO);
-+ execv(argv[0], (char * const *)argv);
-+ _exit(1);
-+ }
-+
-+ /*
-+ * Hand over our end of the socket pair to the qio channel.
-+ *
-+ * We don't reap the child because it is expected to keep
-+ * running. We also don't support the "reconnect" option for the
-+ * same reason.
-+ */
-+ sioc = qio_channel_socket_new_fd(fds[0], errp);
-+ if (!sioc) {
-+ goto error;
-+ }
-+ fds[0] = -1;
-+
-+ g_free(chr->filename);
-+ chr->filename = g_strdup_printf("cmd:%s", cmd);
-+ tcp_chr_new_client(chr, sioc);
-+
-+ error:
-+ if (fds[0] >= 0) {
-+ close(fds[0]);
-+ }
-+ if (fds[1] >= 0) {
-+ close(fds[1]);
-+ }
-+ if (sioc) {
-+ object_unref(OBJECT(sioc));
-+ }
-+}
-+#endif
-
- static void qmp_chardev_open_socket(Chardev *chr,
- ChardevBackend *backend,
-@@ -1323,6 +1384,9 @@ static void qmp_chardev_open_socket(Chardev *chr,
- {
- SocketChardev *s = SOCKET_CHARDEV(chr);
- ChardevSocket *sock = backend->u.socket.data;
-+#ifndef _WIN32
-+ const char *cmd = sock->cmd;
-+#endif
- bool do_nodelay = sock->has_nodelay ? sock->nodelay : false;
- bool is_listen = sock->has_server ? sock->server : true;
- bool is_telnet = sock->has_telnet ? sock->telnet : false;
-@@ -1393,6 +1457,14 @@ static void qmp_chardev_open_socket(Chardev *chr,
-
- update_disconnected_filename(s);
-
-+#ifndef _WIN32
-+ if (cmd) {
-+ chardev_open_socket_cmd(chr, cmd, errp);
-+
-+ /* everything ready (or failed permanently) before we return */
-+ *be_opened = true;
-+ } else
-+#endif
- if (s->is_listen) {
- if (qmp_chardev_open_socket_server(chr, is_telnet || is_tn3270,
- is_waitconnect, errp) < 0) {
-@@ -1412,6 +1484,9 @@ static void qemu_chr_parse_socket(QemuOpts *opts, ChardevBackend *backend,
- const char *host = qemu_opt_get(opts, "host");
- const char *port = qemu_opt_get(opts, "port");
- const char *fd = qemu_opt_get(opts, "fd");
-+#ifndef _WIN32
-+ const char *cmd = qemu_opt_get(opts, "cmd");
-+#endif
- #ifdef CONFIG_LINUX
- bool tight = qemu_opt_get_bool(opts, "tight", true);
- bool abstract = qemu_opt_get_bool(opts, "abstract", false);
-@@ -1419,6 +1494,20 @@ static void qemu_chr_parse_socket(QemuOpts *opts, ChardevBackend *backend,
- SocketAddressLegacy *addr;
- ChardevSocket *sock;
-
-+#ifndef _WIN32
-+ if (cmd) {
-+ /*
-+ * Here we have to ensure that no options are set which are incompatible with
-+ * spawning a command, otherwise unmodified code that doesn't know about
-+ * command spawning (like socket_reconnect_timeout()) might get called.
-+ */
-+ if (path || sock->server || sock->has_telnet || sock->has_tn3270 || sock->reconnect || host || port || sock->tls_creds) {
-+ error_setg(errp, "chardev: socket: cmd does not support any additional options");
-+ return;
-+ }
-+ } else
-+#endif
-+
- if ((!!path + !!fd + !!host) > 1) {
- error_setg(errp,
- "None or one of 'path', 'fd' or 'host' option required.");
-@@ -1469,13 +1558,24 @@ static void qemu_chr_parse_socket(QemuOpts *opts, ChardevBackend *backend,
- sock->tls_creds = g_strdup(qemu_opt_get(opts, "tls-creds"));
- sock->has_tls_authz = qemu_opt_get(opts, "tls-authz");
- sock->tls_authz = g_strdup(qemu_opt_get(opts, "tls-authz"));
-+#ifndef _WIN32
-+ sock->cmd = g_strdup(cmd);
-+#endif
-
- addr = g_new0(SocketAddressLegacy, 1);
-+#ifndef _WIN32
-+ if (path || cmd) {
-+#else
- if (path) {
-+#endif
- UnixSocketAddress *q_unix;
- addr->type = SOCKET_ADDRESS_TYPE_UNIX;
- q_unix = addr->u.q_unix.data = g_new0(UnixSocketAddress, 1);
-+#ifndef _WIN32
-+ q_unix->path = cmd ? g_strdup_printf("cmd:%s", cmd) : g_strdup(path);
-+#else
- q_unix->path = g_strdup(path);
-+#endif
- #ifdef CONFIG_LINUX
- q_unix->has_tight = true;
- q_unix->tight = tight;
-diff --git a/chardev/char.c b/chardev/char.c
-index 0169d8dde..ce9a21f41 100644
---- a/chardev/char.c
-+++ b/chardev/char.c
-@@ -835,6 +835,9 @@ QemuOptsList qemu_chardev_opts = {
- },{
- .name = "path",
- .type = QEMU_OPT_STRING,
-+ },{
-+ .name = "cmd",
-+ .type = QEMU_OPT_STRING,
- },{
- .name = "host",
- .type = QEMU_OPT_STRING,
-diff --git a/qapi/char.json b/qapi/char.json
-index 7b4215157..37feabdac 100644
---- a/qapi/char.json
-+++ b/qapi/char.json
-@@ -250,6 +250,10 @@
- #
- # @addr: socket address to listen on (server=true)
- # or connect to (server=false)
-+# @cmd: command to run via "sh -c" with stdin as one end of
-+# a AF_UNIX SOCK_DSTREAM socket pair. The other end
-+# is used by the chardev. Either an addr or a cmd can
-+# be specified, but not both.
- # @tls-creds: the ID of the TLS credentials object (since 2.6)
- # @tls-authz: the ID of the QAuthZ authorization object against which
- # the client's x509 distinguished name will be validated. This
-@@ -276,6 +280,7 @@
- ##
- { 'struct': 'ChardevSocket',
- 'data': { 'addr': 'SocketAddressLegacy',
-+ '*cmd': 'str',
- '*tls-creds': 'str',
- '*tls-authz' : 'str',
- '*server': 'bool',
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0002-linux-user-Replace-use-of-lfs64-related-functions-an.patch b/meta/recipes-devtools/qemu/qemu/0002-linux-user-Replace-use-of-lfs64-related-functions-an.patch
new file mode 100644
index 0000000000..ceae67be64
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0002-linux-user-Replace-use-of-lfs64-related-functions-an.patch
@@ -0,0 +1,355 @@
+From 71f14902256e3c3529710b713e1ea43100bf4c40 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 17 Dec 2022 08:37:46 -0800
+Subject: [PATCH 2/2] linux-user: Replace use of lfs64 related functions and
+ macros
+
+Builds defines -D_FILE_OFFSET_BITS=64 which makes the original functions
+anf macros behave same as their 64 suffixed counterparts. This also
+helps in compiling with latest musl C library, where these macros and
+functions are no more available under _GNU_SOURCE feature macro
+
+Upstream-Status: Submitted [https://lists.gnu.org/archive/html/qemu-devel/2022-12/msg02841.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Cc: Laurent Vivier <laurent@vivier.eu>
+---
+ linux-user/syscall.c | 153 +++++++++++--------------------------------
+ 1 file changed, 39 insertions(+), 114 deletions(-)
+
+Index: qemu-8.0.0/linux-user/syscall.c
+===================================================================
+--- qemu-8.0.0.orig/linux-user/syscall.c
++++ qemu-8.0.0/linux-user/syscall.c
+@@ -761,8 +761,8 @@ safe_syscall6(ssize_t, copy_file_range,
+ */
+ #define safe_ioctl(...) safe_syscall(__NR_ioctl, __VA_ARGS__)
+ /* Similarly for fcntl. Note that callers must always:
+- * pass the F_GETLK64 etc constants rather than the unsuffixed F_GETLK
+- * use the flock64 struct rather than unsuffixed flock
++ * pass the F_GETLK etc constants rather than the unsuffixed F_GETLK
++ * use the flock struct rather than unsuffixed flock
+ * This will then work and use a 64-bit offset for both 32-bit and 64-bit hosts.
+ */
+ #ifdef __NR_fcntl64
+@@ -6813,13 +6813,13 @@ static int target_to_host_fcntl_cmd(int
+ ret = cmd;
+ break;
+ case TARGET_F_GETLK:
+- ret = F_GETLK64;
++ ret = F_GETLK;
+ break;
+ case TARGET_F_SETLK:
+- ret = F_SETLK64;
++ ret = F_SETLK;
+ break;
+ case TARGET_F_SETLKW:
+- ret = F_SETLKW64;
++ ret = F_SETLKW;
+ break;
+ case TARGET_F_GETOWN:
+ ret = F_GETOWN;
+@@ -6833,17 +6833,6 @@ static int target_to_host_fcntl_cmd(int
+ case TARGET_F_SETSIG:
+ ret = F_SETSIG;
+ break;
+-#if TARGET_ABI_BITS == 32
+- case TARGET_F_GETLK64:
+- ret = F_GETLK64;
+- break;
+- case TARGET_F_SETLK64:
+- ret = F_SETLK64;
+- break;
+- case TARGET_F_SETLKW64:
+- ret = F_SETLKW64;
+- break;
+-#endif
+ case TARGET_F_SETLEASE:
+ ret = F_SETLEASE;
+ break;
+@@ -6895,8 +6884,8 @@ static int target_to_host_fcntl_cmd(int
+ * them to 5, 6 and 7 before making the syscall(). Since we make the
+ * syscall directly, adjust to what is supported by the kernel.
+ */
+- if (ret >= F_GETLK64 && ret <= F_SETLKW64) {
+- ret -= F_GETLK64 - 5;
++ if (ret >= F_GETLK && ret <= F_SETLKW) {
++ ret -= F_GETLK - 5;
+ }
+ #endif
+
+@@ -6929,55 +6918,11 @@ static int host_to_target_flock(int type
+ return type;
+ }
+
+-static inline abi_long copy_from_user_flock(struct flock64 *fl,
+- abi_ulong target_flock_addr)
+-{
+- struct target_flock *target_fl;
+- int l_type;
+-
+- if (!lock_user_struct(VERIFY_READ, target_fl, target_flock_addr, 1)) {
+- return -TARGET_EFAULT;
+- }
+-
+- __get_user(l_type, &target_fl->l_type);
+- l_type = target_to_host_flock(l_type);
+- if (l_type < 0) {
+- return l_type;
+- }
+- fl->l_type = l_type;
+- __get_user(fl->l_whence, &target_fl->l_whence);
+- __get_user(fl->l_start, &target_fl->l_start);
+- __get_user(fl->l_len, &target_fl->l_len);
+- __get_user(fl->l_pid, &target_fl->l_pid);
+- unlock_user_struct(target_fl, target_flock_addr, 0);
+- return 0;
+-}
+-
+-static inline abi_long copy_to_user_flock(abi_ulong target_flock_addr,
+- const struct flock64 *fl)
+-{
+- struct target_flock *target_fl;
+- short l_type;
+-
+- if (!lock_user_struct(VERIFY_WRITE, target_fl, target_flock_addr, 0)) {
+- return -TARGET_EFAULT;
+- }
+-
+- l_type = host_to_target_flock(fl->l_type);
+- __put_user(l_type, &target_fl->l_type);
+- __put_user(fl->l_whence, &target_fl->l_whence);
+- __put_user(fl->l_start, &target_fl->l_start);
+- __put_user(fl->l_len, &target_fl->l_len);
+- __put_user(fl->l_pid, &target_fl->l_pid);
+- unlock_user_struct(target_fl, target_flock_addr, 1);
+- return 0;
+-}
+-
+-typedef abi_long from_flock64_fn(struct flock64 *fl, abi_ulong target_addr);
+-typedef abi_long to_flock64_fn(abi_ulong target_addr, const struct flock64 *fl);
++typedef abi_long from_flock_fn(struct flock *fl, abi_ulong target_addr);
++typedef abi_long to_flock_fn(abi_ulong target_addr, const struct flock *fl);
+
+ #if defined(TARGET_ARM) && TARGET_ABI_BITS == 32
+-struct target_oabi_flock64 {
++struct target_oabi_flock {
+ abi_short l_type;
+ abi_short l_whence;
+ abi_llong l_start;
+@@ -6985,10 +6930,10 @@ struct target_oabi_flock64 {
+ abi_int l_pid;
+ } QEMU_PACKED;
+
+-static inline abi_long copy_from_user_oabi_flock64(struct flock64 *fl,
++static inline abi_long copy_from_user_oabi_flock(struct flock *fl,
+ abi_ulong target_flock_addr)
+ {
+- struct target_oabi_flock64 *target_fl;
++ struct target_oabi_flock *target_fl;
+ int l_type;
+
+ if (!lock_user_struct(VERIFY_READ, target_fl, target_flock_addr, 1)) {
+@@ -7009,10 +6954,10 @@ static inline abi_long copy_from_user_oa
+ return 0;
+ }
+
+-static inline abi_long copy_to_user_oabi_flock64(abi_ulong target_flock_addr,
+- const struct flock64 *fl)
++static inline abi_long copy_to_user_oabi_flock(abi_ulong target_flock_addr,
++ const struct flock *fl)
+ {
+- struct target_oabi_flock64 *target_fl;
++ struct target_oabi_flock *target_fl;
+ short l_type;
+
+ if (!lock_user_struct(VERIFY_WRITE, target_fl, target_flock_addr, 0)) {
+@@ -7030,10 +6975,10 @@ static inline abi_long copy_to_user_oabi
+ }
+ #endif
+
+-static inline abi_long copy_from_user_flock64(struct flock64 *fl,
++static inline abi_long copy_from_user_flock(struct flock *fl,
+ abi_ulong target_flock_addr)
+ {
+- struct target_flock64 *target_fl;
++ struct target_flock *target_fl;
+ int l_type;
+
+ if (!lock_user_struct(VERIFY_READ, target_fl, target_flock_addr, 1)) {
+@@ -7054,10 +6999,10 @@ static inline abi_long copy_from_user_fl
+ return 0;
+ }
+
+-static inline abi_long copy_to_user_flock64(abi_ulong target_flock_addr,
+- const struct flock64 *fl)
++static inline abi_long copy_to_user_flock(abi_ulong target_flock_addr,
++ const struct flock *fl)
+ {
+- struct target_flock64 *target_fl;
++ struct target_flock *target_fl;
+ short l_type;
+
+ if (!lock_user_struct(VERIFY_WRITE, target_fl, target_flock_addr, 0)) {
+@@ -7076,7 +7021,7 @@ static inline abi_long copy_to_user_floc
+
+ static abi_long do_fcntl(int fd, int cmd, abi_ulong arg)
+ {
+- struct flock64 fl64;
++ struct flock fl64;
+ #ifdef F_GETOWN_EX
+ struct f_owner_ex fox;
+ struct target_f_owner_ex *target_fox;
+@@ -7089,6 +7034,7 @@ static abi_long do_fcntl(int fd, int cmd
+
+ switch(cmd) {
+ case TARGET_F_GETLK:
++ case TARGET_F_OFD_GETLK:
+ ret = copy_from_user_flock(&fl64, arg);
+ if (ret) {
+ return ret;
+@@ -7098,32 +7044,11 @@ static abi_long do_fcntl(int fd, int cmd
+ ret = copy_to_user_flock(arg, &fl64);
+ }
+ break;
+-
+ case TARGET_F_SETLK:
+ case TARGET_F_SETLKW:
+- ret = copy_from_user_flock(&fl64, arg);
+- if (ret) {
+- return ret;
+- }
+- ret = get_errno(safe_fcntl(fd, host_cmd, &fl64));
+- break;
+-
+- case TARGET_F_GETLK64:
+- case TARGET_F_OFD_GETLK:
+- ret = copy_from_user_flock64(&fl64, arg);
+- if (ret) {
+- return ret;
+- }
+- ret = get_errno(safe_fcntl(fd, host_cmd, &fl64));
+- if (ret == 0) {
+- ret = copy_to_user_flock64(arg, &fl64);
+- }
+- break;
+- case TARGET_F_SETLK64:
+- case TARGET_F_SETLKW64:
+ case TARGET_F_OFD_SETLK:
+ case TARGET_F_OFD_SETLKW:
+- ret = copy_from_user_flock64(&fl64, arg);
++ ret = copy_from_user_flock(&fl64, arg);
+ if (ret) {
+ return ret;
+ }
+@@ -7348,7 +7273,7 @@ static inline abi_long target_truncate64
+ arg2 = arg3;
+ arg3 = arg4;
+ }
+- return get_errno(truncate64(arg1, target_offset64(arg2, arg3)));
++ return get_errno(truncate(arg1, target_offset64(arg2, arg3)));
+ }
+ #endif
+
+@@ -7362,7 +7287,7 @@ static inline abi_long target_ftruncate6
+ arg2 = arg3;
+ arg3 = arg4;
+ }
+- return get_errno(ftruncate64(arg1, target_offset64(arg2, arg3)));
++ return get_errno(ftruncate(arg1, target_offset64(arg2, arg3)));
+ }
+ #endif
+
+@@ -8598,7 +8523,7 @@ static int do_getdents(abi_long dirfd, a
+ void *tdirp;
+ int hlen, hoff, toff;
+ int hreclen, treclen;
+- off64_t prev_diroff = 0;
++ off_t prev_diroff = 0;
+
+ hdirp = g_try_malloc(count);
+ if (!hdirp) {
+@@ -8651,7 +8576,7 @@ static int do_getdents(abi_long dirfd, a
+ * Return what we have, resetting the file pointer to the
+ * location of the first record not returned.
+ */
+- lseek64(dirfd, prev_diroff, SEEK_SET);
++ lseek(dirfd, prev_diroff, SEEK_SET);
+ break;
+ }
+
+@@ -8685,7 +8610,7 @@ static int do_getdents64(abi_long dirfd,
+ void *tdirp;
+ int hlen, hoff, toff;
+ int hreclen, treclen;
+- off64_t prev_diroff = 0;
++ off_t prev_diroff = 0;
+
+ hdirp = g_try_malloc(count);
+ if (!hdirp) {
+@@ -8727,7 +8652,7 @@ static int do_getdents64(abi_long dirfd,
+ * Return what we have, resetting the file pointer to the
+ * location of the first record not returned.
+ */
+- lseek64(dirfd, prev_diroff, SEEK_SET);
++ lseek(dirfd, prev_diroff, SEEK_SET);
+ break;
+ }
+
+@@ -11158,7 +11083,7 @@ static abi_long do_syscall1(CPUArchState
+ return -TARGET_EFAULT;
+ }
+ }
+- ret = get_errno(pread64(arg1, p, arg3, target_offset64(arg4, arg5)));
++ ret = get_errno(pread(arg1, p, arg3, target_offset64(arg4, arg5)));
+ unlock_user(p, arg2, ret);
+ return ret;
+ case TARGET_NR_pwrite64:
+@@ -11175,7 +11100,7 @@ static abi_long do_syscall1(CPUArchState
+ return -TARGET_EFAULT;
+ }
+ }
+- ret = get_errno(pwrite64(arg1, p, arg3, target_offset64(arg4, arg5)));
++ ret = get_errno(pwrite(arg1, p, arg3, target_offset64(arg4, arg5)));
+ unlock_user(p, arg2, 0);
+ return ret;
+ #endif
+@@ -11998,14 +11923,14 @@ static abi_long do_syscall1(CPUArchState
+ case TARGET_NR_fcntl64:
+ {
+ int cmd;
+- struct flock64 fl;
+- from_flock64_fn *copyfrom = copy_from_user_flock64;
+- to_flock64_fn *copyto = copy_to_user_flock64;
++ struct flock fl;
++ from_flock_fn *copyfrom = copy_from_user_flock;
++ to_flock_fn *copyto = copy_to_user_flock;
+
+ #ifdef TARGET_ARM
+ if (!cpu_env->eabi) {
+- copyfrom = copy_from_user_oabi_flock64;
+- copyto = copy_to_user_oabi_flock64;
++ copyfrom = copy_from_user_oabi_flock;
++ copyto = copy_to_user_oabi_flock;
+ }
+ #endif
+
+@@ -12015,7 +11940,7 @@ static abi_long do_syscall1(CPUArchState
+ }
+
+ switch(arg2) {
+- case TARGET_F_GETLK64:
++ case TARGET_F_GETLK:
+ ret = copyfrom(&fl, arg3);
+ if (ret) {
+ break;
+@@ -12026,8 +11951,8 @@ static abi_long do_syscall1(CPUArchState
+ }
+ break;
+
+- case TARGET_F_SETLK64:
+- case TARGET_F_SETLKW64:
++ case TARGET_F_SETLK:
++ case TARGET_F_SETLKW:
+ ret = copyfrom(&fl, arg3);
+ if (ret) {
+ break;
diff --git a/meta/recipes-devtools/qemu/qemu/0002-linux-user-loongarch64-Remove-TARGET_FORCE_SHMLBA.patch b/meta/recipes-devtools/qemu/qemu/0002-linux-user-loongarch64-Remove-TARGET_FORCE_SHMLBA.patch
new file mode 100644
index 0000000000..3f01aaa644
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0002-linux-user-loongarch64-Remove-TARGET_FORCE_SHMLBA.patch
@@ -0,0 +1,43 @@
+From 5bf65b24414d3ff8339f6f1beb221c7c35c91e5d Mon Sep 17 00:00:00 2001
+From: Richard Henderson <richard.henderson@linaro.org>
+Date: Wed, 28 Feb 2024 10:25:15 -1000
+Subject: [PATCH 2/5] linux-user/loongarch64: Remove TARGET_FORCE_SHMLBA
+
+The kernel abi was changed with
+
+ commit d23b77953f5a4fbf94c05157b186aac2a247ae32
+ Author: Huacai Chen <chenhuacai@kernel.org>
+ Date: Wed Jan 17 12:43:08 2024 +0800
+
+ LoongArch: Change SHMLBA from SZ_64K to PAGE_SIZE
+
+during the v6.8 cycle.
+
+Upstream-Status: Submitted [https://www.mail-archive.com/qemu-devel@nongnu.org/msg1026793.html]
+
+Reviewed-by: Song Gao <gaosong@loongson.cn>
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ linux-user/loongarch64/target_syscall.h | 7 -------
+ 1 file changed, 7 deletions(-)
+
+diff --git a/linux-user/loongarch64/target_syscall.h b/linux-user/loongarch64/target_syscall.h
+index 8b5de5212..39f229bb9 100644
+--- a/linux-user/loongarch64/target_syscall.h
++++ b/linux-user/loongarch64/target_syscall.h
+@@ -38,11 +38,4 @@ struct target_pt_regs {
+ #define TARGET_MCL_FUTURE 2
+ #define TARGET_MCL_ONFAULT 4
+
+-#define TARGET_FORCE_SHMLBA
+-
+-static inline abi_ulong target_shmlba(CPULoongArchState *env)
+-{
+- return 64 * KiB;
+-}
+-
+ #endif
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/0003-apic-fixup-fallthrough-to-PIC.patch b/meta/recipes-devtools/qemu/qemu/0003-apic-fixup-fallthrough-to-PIC.patch
index f350ffce47..e85f8202e9 100644
--- a/meta/recipes-devtools/qemu/qemu/0003-apic-fixup-fallthrough-to-PIC.patch
+++ b/meta/recipes-devtools/qemu/qemu/0003-apic-fixup-fallthrough-to-PIC.patch
@@ -29,11 +29,11 @@ Signed-off-by: He Zhe <zhe.he@windriver.com>
hw/intc/apic.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/hw/intc/apic.c b/hw/intc/apic.c
-index 3df11c34d..9506c88ce 100644
---- a/hw/intc/apic.c
-+++ b/hw/intc/apic.c
-@@ -605,7 +605,7 @@ int apic_accept_pic_intr(DeviceState *dev)
+Index: qemu-8.0.0/hw/intc/apic.c
+===================================================================
+--- qemu-8.0.0.orig/hw/intc/apic.c
++++ qemu-8.0.0/hw/intc/apic.c
+@@ -607,7 +607,7 @@ int apic_accept_pic_intr(DeviceState *de
APICCommonState *s = APIC(dev);
uint32_t lvt0;
@@ -42,6 +42,3 @@ index 3df11c34d..9506c88ce 100644
return -1;
lvt0 = s->lvt[APIC_LVT_LINT0];
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0003-linux-user-Add-strace-for-shmat.patch b/meta/recipes-devtools/qemu/qemu/0003-linux-user-Add-strace-for-shmat.patch
new file mode 100644
index 0000000000..0c601c804a
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0003-linux-user-Add-strace-for-shmat.patch
@@ -0,0 +1,71 @@
+From e8f06676c6c88e12cd5f4f81a839b7111c683596 Mon Sep 17 00:00:00 2001
+From: Richard Henderson <richard.henderson@linaro.org>
+Date: Wed, 28 Feb 2024 10:25:16 -1000
+Subject: [PATCH 3/5] linux-user: Add strace for shmat
+
+Upstream-Status: Submitted [https://www.mail-archive.com/qemu-devel@nongnu.org/msg1026793.html]
+
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ linux-user/strace.c | 23 +++++++++++++++++++++++
+ linux-user/strace.list | 2 +-
+ 2 files changed, 24 insertions(+), 1 deletion(-)
+
+diff --git a/linux-user/strace.c b/linux-user/strace.c
+index cf26e5526..47d6ec326 100644
+--- a/linux-user/strace.c
++++ b/linux-user/strace.c
+@@ -670,6 +670,25 @@ print_semctl(CPUArchState *cpu_env, const struct syscallname *name,
+ }
+ #endif
+
++static void
++print_shmat(CPUArchState *cpu_env, const struct syscallname *name,
++ abi_long arg0, abi_long arg1, abi_long arg2,
++ abi_long arg3, abi_long arg4, abi_long arg5)
++{
++ static const struct flags shmat_flags[] = {
++ FLAG_GENERIC(SHM_RND),
++ FLAG_GENERIC(SHM_REMAP),
++ FLAG_GENERIC(SHM_RDONLY),
++ FLAG_GENERIC(SHM_EXEC),
++ };
++
++ print_syscall_prologue(name);
++ print_raw_param(TARGET_ABI_FMT_ld, arg0, 0);
++ print_pointer(arg1, 0);
++ print_flags(shmat_flags, arg2, 1);
++ print_syscall_epilogue(name);
++}
++
+ #ifdef TARGET_NR_ipc
+ static void
+ print_ipc(CPUArchState *cpu_env, const struct syscallname *name,
+@@ -683,6 +702,10 @@ print_ipc(CPUArchState *cpu_env, const struct syscallname *name,
+ print_ipc_cmd(arg3);
+ qemu_log(",0x" TARGET_ABI_FMT_lx ")", arg4);
+ break;
++ case IPCOP_shmat:
++ print_shmat(cpu_env, &(const struct syscallname){ .name = "shmat" },
++ arg1, arg4, arg2, 0, 0, 0);
++ break;
+ default:
+ qemu_log(("%s("
+ TARGET_ABI_FMT_ld ","
+diff --git a/linux-user/strace.list b/linux-user/strace.list
+index 6655d4f26..dfd4237d1 100644
+--- a/linux-user/strace.list
++++ b/linux-user/strace.list
+@@ -1398,7 +1398,7 @@
+ { TARGET_NR_sgetmask, "sgetmask" , NULL, NULL, NULL },
+ #endif
+ #ifdef TARGET_NR_shmat
+-{ TARGET_NR_shmat, "shmat" , NULL, NULL, print_syscall_ret_addr },
++{ TARGET_NR_shmat, "shmat" , NULL, print_shmat, print_syscall_ret_addr },
+ #endif
+ #ifdef TARGET_NR_shmctl
+ { TARGET_NR_shmctl, "shmctl" , NULL, NULL, NULL },
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/0004-configure-Add-pkg-config-handling-for-libgcrypt.patch b/meta/recipes-devtools/qemu/qemu/0004-configure-Add-pkg-config-handling-for-libgcrypt.patch
index 6faebd4e09..f981a64a54 100644
--- a/meta/recipes-devtools/qemu/qemu/0004-configure-Add-pkg-config-handling-for-libgcrypt.patch
+++ b/meta/recipes-devtools/qemu/qemu/0004-configure-Add-pkg-config-handling-for-libgcrypt.patch
@@ -14,19 +14,16 @@ Signed-off-by: He Zhe <zhe.he@windriver.com>
meson.build | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/meson.build b/meson.build
-index 861de93c4..d45ff2d7c 100644
---- a/meson.build
-+++ b/meson.build
-@@ -1063,7 +1063,7 @@ endif
+Index: qemu-8.1.0/meson.build
+===================================================================
+--- qemu-8.1.0.orig/meson.build
++++ qemu-8.1.0/meson.build
+@@ -1481,7 +1481,7 @@ endif
if not gnutls_crypto.found()
if (not get_option('gcrypt').auto() or have_system) and not get_option('nettle').enabled()
gcrypt = dependency('libgcrypt', version: '>=1.8',
- method: 'config-tool',
+ method: 'pkg-config',
- required: get_option('gcrypt'),
- kwargs: static_kwargs)
+ required: get_option('gcrypt'))
# Debian has removed -lgpg-error from libgcrypt-config
---
-2.30.2
-
+ # as it "spreads unnecessary dependencies" which in
diff --git a/meta/recipes-devtools/qemu/qemu/0004-linux-user-Rewrite-target_shmat.patch b/meta/recipes-devtools/qemu/qemu/0004-linux-user-Rewrite-target_shmat.patch
new file mode 100644
index 0000000000..88c3ed40b0
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0004-linux-user-Rewrite-target_shmat.patch
@@ -0,0 +1,236 @@
+From cb48d5d1592e63ebd0d4a3e300ef98e38e6306d7 Mon Sep 17 00:00:00 2001
+From: Richard Henderson <richard.henderson@linaro.org>
+Date: Wed, 28 Feb 2024 10:25:17 -1000
+Subject: [PATCH 4/5] linux-user: Rewrite target_shmat
+
+Handle combined host and guest alignment requirements.
+Handle host and guest page size differences.
+Handle SHM_EXEC.
+
+Upstream-Status: Submitted [https://www.mail-archive.com/qemu-devel@nongnu.org/msg1026793.html]
+
+Resolves: https://gitlab.com/qemu-project/qemu/-/issues/115
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ linux-user/mmap.c | 166 +++++++++++++++++++++++++++++++++++++---------
+ 1 file changed, 133 insertions(+), 33 deletions(-)
+
+diff --git a/linux-user/mmap.c b/linux-user/mmap.c
+index 18fb3aaf7..6a2f649bb 100644
+--- a/linux-user/mmap.c
++++ b/linux-user/mmap.c
+@@ -1062,69 +1062,161 @@ static inline abi_ulong target_shmlba(CPUArchState *cpu_env)
+ }
+ #endif
+
++#if defined(__arm__) || defined(__mips__) || defined(__sparc__)
++#define HOST_FORCE_SHMLBA 1
++#else
++#define HOST_FORCE_SHMLBA 0
++#endif
++
+ abi_ulong target_shmat(CPUArchState *cpu_env, int shmid,
+ abi_ulong shmaddr, int shmflg)
+ {
+ CPUState *cpu = env_cpu(cpu_env);
+- abi_ulong raddr;
+ struct shmid_ds shm_info;
+ int ret;
+- abi_ulong shmlba;
++ int h_pagesize;
++ int t_shmlba, h_shmlba, m_shmlba;
++ size_t t_len, h_len, m_len;
+
+ /* shmat pointers are always untagged */
+
+- /* find out the length of the shared memory segment */
++ /*
++ * Because we can't use host shmat() unless the address is sufficiently
++ * aligned for the host, we'll need to check both.
++ * TODO: Could be fixed with softmmu.
++ */
++ t_shmlba = target_shmlba(cpu_env);
++ h_pagesize = qemu_real_host_page_size();
++ h_shmlba = (HOST_FORCE_SHMLBA ? SHMLBA : h_pagesize);
++ m_shmlba = MAX(t_shmlba, h_shmlba);
++
++ if (shmaddr) {
++ if (shmaddr & (m_shmlba - 1)) {
++ if (shmflg & SHM_RND) {
++ /*
++ * The guest is allowing the kernel to round the address.
++ * Assume that the guest is ok with us rounding to the
++ * host required alignment too. Anyway if we don't, we'll
++ * get an error from the kernel.
++ */
++ shmaddr &= ~(m_shmlba - 1);
++ if (shmaddr == 0 && (shmflg & SHM_REMAP)) {
++ return -TARGET_EINVAL;
++ }
++ } else {
++ int require = TARGET_PAGE_SIZE;
++#ifdef TARGET_FORCE_SHMLBA
++ require = t_shmlba;
++#endif
++ /*
++ * Include host required alignment, as otherwise we cannot
++ * use host shmat at all.
++ */
++ require = MAX(require, h_shmlba);
++ if (shmaddr & (require - 1)) {
++ return -TARGET_EINVAL;
++ }
++ }
++ }
++ } else {
++ if (shmflg & SHM_REMAP) {
++ return -TARGET_EINVAL;
++ }
++ }
++ /* All rounding now manually concluded. */
++ shmflg &= ~SHM_RND;
++
++ /* Find out the length of the shared memory segment. */
+ ret = get_errno(shmctl(shmid, IPC_STAT, &shm_info));
+ if (is_error(ret)) {
+ /* can't get length, bail out */
+ return ret;
+ }
++ t_len = TARGET_PAGE_ALIGN(shm_info.shm_segsz);
++ h_len = ROUND_UP(shm_info.shm_segsz, h_pagesize);
++ m_len = MAX(t_len, h_len);
+
+- shmlba = target_shmlba(cpu_env);
+-
+- if (shmaddr & (shmlba - 1)) {
+- if (shmflg & SHM_RND) {
+- shmaddr &= ~(shmlba - 1);
+- } else {
+- return -TARGET_EINVAL;
+- }
+- }
+- if (!guest_range_valid_untagged(shmaddr, shm_info.shm_segsz)) {
++ if (!guest_range_valid_untagged(shmaddr, m_len)) {
+ return -TARGET_EINVAL;
+ }
+
+ WITH_MMAP_LOCK_GUARD() {
+- void *host_raddr;
++ bool mapped = false;
++ void *want, *test;
+ abi_ulong last;
+
+- if (shmaddr) {
+- host_raddr = shmat(shmid, (void *)g2h_untagged(shmaddr), shmflg);
++ if (!shmaddr) {
++ shmaddr = mmap_find_vma(0, m_len, m_shmlba);
++ if (shmaddr == -1) {
++ return -TARGET_ENOMEM;
++ }
++ mapped = !reserved_va;
++ } else if (shmflg & SHM_REMAP) {
++ /*
++ * If host page size > target page size, the host shmat may map
++ * more memory than the guest expects. Reject a mapping that
++ * would replace memory in the unexpected gap.
++ * TODO: Could be fixed with softmmu.
++ */
++ if (t_len < h_len &&
++ !page_check_range_empty(shmaddr + t_len,
++ shmaddr + h_len - 1)) {
++ return -TARGET_EINVAL;
++ }
+ } else {
+- abi_ulong mmap_start;
++ if (!page_check_range_empty(shmaddr, shmaddr + m_len - 1)) {
++ return -TARGET_EINVAL;
++ }
++ }
+
+- /* In order to use the host shmat, we need to honor host SHMLBA. */
+- mmap_start = mmap_find_vma(0, shm_info.shm_segsz,
+- MAX(SHMLBA, shmlba));
++ /* All placement is now complete. */
++ want = (void *)g2h_untagged(shmaddr);
+
+- if (mmap_start == -1) {
+- return -TARGET_ENOMEM;
++ /*
++ * Map anonymous pages across the entire range, then remap with
++ * the shared memory. This is required for a number of corner
++ * cases for which host and guest page sizes differ.
++ */
++ if (h_len != t_len) {
++ int mmap_p = PROT_READ | (shmflg & SHM_RDONLY ? 0 : PROT_WRITE);
++ int mmap_f = MAP_PRIVATE | MAP_ANONYMOUS
++ | (reserved_va || (shmflg & SHM_REMAP)
++ ? MAP_FIXED : MAP_FIXED_NOREPLACE);
++
++ test = mmap(want, m_len, mmap_p, mmap_f, -1, 0);
++ if (unlikely(test != want)) {
++ /* shmat returns EINVAL not EEXIST like mmap. */
++ ret = (test == MAP_FAILED && errno != EEXIST
++ ? get_errno(-1) : -TARGET_EINVAL);
++ if (mapped) {
++ do_munmap(want, m_len);
++ }
++ return ret;
+ }
+- host_raddr = shmat(shmid, g2h_untagged(mmap_start),
+- shmflg | SHM_REMAP);
++ mapped = true;
+ }
+
+- if (host_raddr == (void *)-1) {
+- return get_errno(-1);
++ if (reserved_va || mapped) {
++ shmflg |= SHM_REMAP;
++ }
++ test = shmat(shmid, want, shmflg);
++ if (test == MAP_FAILED) {
++ ret = get_errno(-1);
++ if (mapped) {
++ do_munmap(want, m_len);
++ }
++ return ret;
+ }
+- raddr = h2g(host_raddr);
+- last = raddr + shm_info.shm_segsz - 1;
++ assert(test == want);
+
+- page_set_flags(raddr, last,
++ last = shmaddr + m_len - 1;
++ page_set_flags(shmaddr, last,
+ PAGE_VALID | PAGE_RESET | PAGE_READ |
+- (shmflg & SHM_RDONLY ? 0 : PAGE_WRITE));
++ (shmflg & SHM_RDONLY ? 0 : PAGE_WRITE) |
++ (shmflg & SHM_EXEC ? PAGE_EXEC : 0));
+
+- shm_region_rm_complete(raddr, last);
+- shm_region_add(raddr, last);
++ shm_region_rm_complete(shmaddr, last);
++ shm_region_add(shmaddr, last);
+ }
+
+ /*
+@@ -1138,7 +1230,15 @@ abi_ulong target_shmat(CPUArchState *cpu_env, int shmid,
+ tb_flush(cpu);
+ }
+
+- return raddr;
++ if (qemu_loglevel_mask(CPU_LOG_PAGE)) {
++ FILE *f = qemu_log_trylock();
++ if (f) {
++ fprintf(f, "page layout changed following shmat\n");
++ page_dump(f);
++ qemu_log_unlock(f);
++ }
++ }
++ return shmaddr;
+ }
+
+ abi_long target_shmdt(abi_ulong shmaddr)
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/0005-qemu-Do-not-include-file-if-not-exists.patch b/meta/recipes-devtools/qemu/qemu/0005-qemu-Do-not-include-file-if-not-exists.patch
index 3f3c39f996..38aa4c3bbe 100644
--- a/meta/recipes-devtools/qemu/qemu/0005-qemu-Do-not-include-file-if-not-exists.patch
+++ b/meta/recipes-devtools/qemu/qemu/0005-qemu-Do-not-include-file-if-not-exists.patch
@@ -16,11 +16,11 @@ Signed-off-by: Sakib Sajal <sakib.sajal@windriver.com>
linux-user/syscall.c | 2 ++
1 file changed, 2 insertions(+)
-diff --git a/linux-user/syscall.c b/linux-user/syscall.c
-index f65045efe..340e0c6f0 100644
---- a/linux-user/syscall.c
-+++ b/linux-user/syscall.c
-@@ -113,7 +113,9 @@
+Index: qemu-8.0.0/linux-user/syscall.c
+===================================================================
+--- qemu-8.0.0.orig/linux-user/syscall.c
++++ qemu-8.0.0/linux-user/syscall.c
+@@ -115,7 +115,9 @@
#include <linux/blkpg.h>
#include <netpacket/packet.h>
#include <linux/netlink.h>
@@ -30,6 +30,3 @@ index f65045efe..340e0c6f0 100644
#include <linux/rtc.h>
#include <sound/asound.h>
#ifdef HAVE_BTRFS_H
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0005-tests-tcg-Check-that-shmat-does-not-break-proc-self-.patch b/meta/recipes-devtools/qemu/qemu/0005-tests-tcg-Check-that-shmat-does-not-break-proc-self-.patch
new file mode 100644
index 0000000000..5afb35ea0c
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0005-tests-tcg-Check-that-shmat-does-not-break-proc-self-.patch
@@ -0,0 +1,85 @@
+From 1234063488134ad1f541f56dd30caa7896905f06 Mon Sep 17 00:00:00 2001
+From: Ilya Leoshkevich <iii@linux.ibm.com>
+Date: Wed, 28 Feb 2024 10:25:18 -1000
+Subject: [PATCH 5/5] tests/tcg: Check that shmat() does not break
+ /proc/self/maps
+
+Add a regression test for a recently fixed issue, where shmat()
+desynced the guest and the host view of the address space and caused
+open("/proc/self/maps") to SEGV.
+
+Upstream-Status: Submitted [https://www.mail-archive.com/qemu-devel@nongnu.org/msg1026793.html]
+
+Signed-off-by: Ilya Leoshkevich <iii@linux.ibm.com>
+Message-Id: <jwyuvao4apydvykmsnvacwshdgy3ixv7qvkh4dbxm3jkwgnttw@k4wpaayou7oq>
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ tests/tcg/multiarch/linux/linux-shmat-maps.c | 55 ++++++++++++++++++++
+ 1 file changed, 55 insertions(+)
+ create mode 100644 tests/tcg/multiarch/linux/linux-shmat-maps.c
+
+diff --git a/tests/tcg/multiarch/linux/linux-shmat-maps.c b/tests/tcg/multiarch/linux/linux-shmat-maps.c
+new file mode 100644
+index 000000000..0ccf7a973
+--- /dev/null
++++ b/tests/tcg/multiarch/linux/linux-shmat-maps.c
+@@ -0,0 +1,55 @@
++/*
++ * Test that shmat() does not break /proc/self/maps.
++ *
++ * SPDX-License-Identifier: GPL-2.0-or-later
++ */
++#include <assert.h>
++#include <fcntl.h>
++#include <stdlib.h>
++#include <sys/ipc.h>
++#include <sys/shm.h>
++#include <unistd.h>
++
++int main(void)
++{
++ char buf[128];
++ int err, fd;
++ int shmid;
++ ssize_t n;
++ void *p;
++
++ shmid = shmget(IPC_PRIVATE, 1, IPC_CREAT | 0600);
++ assert(shmid != -1);
++
++ /*
++ * The original bug required a non-NULL address, which skipped the
++ * mmap_find_vma step, which could result in a host mapping smaller
++ * than the target mapping. Choose an address at random.
++ */
++ p = shmat(shmid, (void *)0x800000, SHM_RND);
++ if (p == (void *)-1) {
++ /*
++ * Because we are now running the testcase for all guests for which
++ * we have a cross-compiler, the above random address might conflict
++ * with the guest executable in some way. Rather than stopping,
++ * continue with a system supplied address, which should never fail.
++ */
++ p = shmat(shmid, NULL, 0);
++ assert(p != (void *)-1);
++ }
++
++ fd = open("/proc/self/maps", O_RDONLY);
++ assert(fd != -1);
++ do {
++ n = read(fd, buf, sizeof(buf));
++ assert(n >= 0);
++ } while (n != 0);
++ close(fd);
++
++ err = shmdt(p);
++ assert(err == 0);
++ err = shmctl(shmid, IPC_RMID, NULL);
++ assert(err == 0);
++
++ return EXIT_SUCCESS;
++}
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/0006-qemu-Add-some-user-space-mmap-tweaks-to-address-musl.patch b/meta/recipes-devtools/qemu/qemu/0006-qemu-Add-some-user-space-mmap-tweaks-to-address-musl.patch
index 75c0369318..5d1d7c6881 100644
--- a/meta/recipes-devtools/qemu/qemu/0006-qemu-Add-some-user-space-mmap-tweaks-to-address-musl.patch
+++ b/meta/recipes-devtools/qemu/qemu/0006-qemu-Add-some-user-space-mmap-tweaks-to-address-musl.patch
@@ -23,11 +23,11 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org
linux-user/mmap.c | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
-diff --git a/linux-user/mmap.c b/linux-user/mmap.c
-index c125031b9..e651834a5 100644
---- a/linux-user/mmap.c
-+++ b/linux-user/mmap.c
-@@ -749,12 +749,16 @@ abi_long target_mremap(abi_ulong old_addr, abi_ulong old_size,
+Index: qemu-8.0.0/linux-user/mmap.c
+===================================================================
+--- qemu-8.0.0.orig/linux-user/mmap.c
++++ qemu-8.0.0/linux-user/mmap.c
+@@ -776,12 +776,16 @@ abi_long target_mremap(abi_ulong old_add
int prot;
void *host_addr;
@@ -47,6 +47,3 @@ index c125031b9..e651834a5 100644
return -1;
}
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0007-qemu-Determinism-fixes.patch b/meta/recipes-devtools/qemu/qemu/0007-qemu-Determinism-fixes.patch
index 0d7dae3689..d3f965e070 100644
--- a/meta/recipes-devtools/qemu/qemu/0007-qemu-Determinism-fixes.patch
+++ b/meta/recipes-devtools/qemu/qemu/0007-qemu-Determinism-fixes.patch
@@ -16,10 +16,10 @@ RP 2021/3/1
scripts/decodetree.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/scripts/decodetree.py b/scripts/decodetree.py
-index a03dc6b5e..4ea24c1f3 100644
---- a/scripts/decodetree.py
-+++ b/scripts/decodetree.py
+Index: qemu-8.0.0/scripts/decodetree.py
+===================================================================
+--- qemu-8.0.0.orig/scripts/decodetree.py
++++ qemu-8.0.0/scripts/decodetree.py
@@ -1328,7 +1328,7 @@ def main():
toppat = ExcMultiPattern(0)
@@ -29,6 +29,3 @@ index a03dc6b5e..4ea24c1f3 100644
f = open(filename, 'rt', encoding='utf-8')
parse_file(f, toppat)
f.close()
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0008-tests-meson.build-use-relative-path-to-refer-to-file.patch b/meta/recipes-devtools/qemu/qemu/0008-tests-meson.build-use-relative-path-to-refer-to-file.patch
index 43d3c7cf1f..a84364ccc1 100644
--- a/meta/recipes-devtools/qemu/qemu/0008-tests-meson.build-use-relative-path-to-refer-to-file.patch
+++ b/meta/recipes-devtools/qemu/qemu/0008-tests-meson.build-use-relative-path-to-refer-to-file.patch
@@ -17,22 +17,25 @@ Signed-off-by: Changqing Li <changqing.li@windriver.com>
tests/unit/meson.build | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
-diff --git a/tests/unit/meson.build b/tests/unit/meson.build
-index 96b295263..e4c3246dc 100644
---- a/tests/unit/meson.build
-+++ b/tests/unit/meson.build
-@@ -44,9 +44,9 @@ tests = {
+Index: qemu-8.0.0/tests/unit/meson.build
+===================================================================
+--- qemu-8.0.0.orig/tests/unit/meson.build
++++ qemu-8.0.0/tests/unit/meson.build
+@@ -46,7 +46,7 @@ tests = {
'test-keyval': [testqapi],
'test-logging': [],
'test-uuid': [],
- 'ptimer-test': ['ptimer-test-stubs.c', meson.project_source_root() / 'hw/core/ptimer.c'],
+ 'ptimer-test': ['ptimer-test-stubs.c', '../../hw/core/ptimer.c'],
'test-qapi-util': [],
-- 'test-smp-parse': [qom, meson.project_source_root() / 'hw/core/machine-smp.c'],
-+ 'test-smp-parse': [qom, '../../hw/core/machine-smp.c'],
- }
-
- if have_system or have_tools
---
-2.30.2
-
+ 'test-interval-tree': [],
+ 'test-xs-node': [qom],
+@@ -136,7 +136,7 @@ if have_system
+ 'test-util-sockets': ['socket-helpers.c'],
+ 'test-base64': [],
+ 'test-bufferiszero': [],
+- 'test-smp-parse': [qom, meson.project_source_root() / 'hw/core/machine-smp.c'],
++ 'test-smp-parse': [qom, '../../hw/core/machine-smp.c'],
+ 'test-vmstate': [migration, io],
+ 'test-yank': ['socket-helpers.c', qom, io, chardev]
+ }
diff --git a/meta/recipes-devtools/qemu/qemu/0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch b/meta/recipes-devtools/qemu/qemu/0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch
index 23d0a69802..4de6cc2445 100644
--- a/meta/recipes-devtools/qemu/qemu/0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch
+++ b/meta/recipes-devtools/qemu/qemu/0009-Define-MAP_SYNC-and-MAP_SHARED_VALIDATE-on-needed-li.patch
@@ -18,10 +18,10 @@ Cc: Michael S. Tsirkin <mst@redhat.com>
util/mmap-alloc.c | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
-diff --git a/util/mmap-alloc.c b/util/mmap-alloc.c
-index 893d86435..86d3cda24 100644
---- a/util/mmap-alloc.c
-+++ b/util/mmap-alloc.c
+Index: qemu-8.0.0/util/mmap-alloc.c
+===================================================================
+--- qemu-8.0.0.orig/util/mmap-alloc.c
++++ qemu-8.0.0/util/mmap-alloc.c
@@ -10,14 +10,18 @@
* later. See the COPYING file in the top-level directory.
*/
@@ -44,6 +44,3 @@ index 893d86435..86d3cda24 100644
#include "qemu/mmap-alloc.h"
#include "qemu/host-utils.h"
#include "qemu/cutils.h"
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch b/meta/recipes-devtools/qemu/qemu/0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch
index 826d42fc20..6caf35b634 100644
--- a/meta/recipes-devtools/qemu/qemu/0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch
+++ b/meta/recipes-devtools/qemu/qemu/0010-hw-pvrdma-Protect-against-buggy-or-malicious-guest-d.patch
@@ -1,33 +1,33 @@
-From 52c38fa9f3a790a7c2805e7d8cce3ea9262d6ae2 Mon Sep 17 00:00:00 2001
+CVE: CVE-2022-1050
+Upstream-Status: Submitted [https://lore.kernel.org/qemu-devel/20220403095234.2210-1-yuval.shaia.ml@gmail.com/]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+From dbdef95c272e8f3ec037c3db4197c66002e30995 Mon Sep 17 00:00:00 2001
From: Yuval Shaia <yuval.shaia.ml@gmail.com>
-Date: Tue, 12 Apr 2022 11:01:51 +0100
-Subject: [PATCH 10/12] hw/pvrdma: Protect against buggy or malicious guest
- driver
+Date: Sun, 3 Apr 2022 12:52:34 +0300
+Subject: [PATCH] hw/pvrdma: Protect against buggy or malicious guest driver
Guest driver might execute HW commands when shared buffers are not yet
allocated.
-This might happen on purpose (malicious guest) or because some other
-guest/host address mapping.
+This could happen on purpose (malicious guest) or because of some other
+guest/host address mapping error.
We need to protect againts such case.
-Reported-by: Mauro Matteo Cascella <mcascell@redhat.com>
-Signed-off-by: Yuval Shaia <yuval.shaia.ml@gmail.com>
-
-CVE: CVE-2022-1050
-Upstream-Status: Submitted [https://lists.nongnu.org/archive/html/qemu-devel/2022-03/msg05197.html]
+Fixes: CVE-2022-1050
+Reported-by: Raven <wxhusst@gmail.com>
+Signed-off-by: Yuval Shaia <yuval.shaia.ml@gmail.com>
---
- hw/rdma/vmw/pvrdma_cmd.c | 6 ++++++
- hw/rdma/vmw/pvrdma_main.c | 3 ++-
- 2 files changed, 8 insertions(+), 1 deletion(-)
+ hw/rdma/vmw/pvrdma_cmd.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
-diff --git a/hw/rdma/vmw/pvrdma_cmd.c b/hw/rdma/vmw/pvrdma_cmd.c
-index da7ddfa54..89db963c4 100644
---- a/hw/rdma/vmw/pvrdma_cmd.c
-+++ b/hw/rdma/vmw/pvrdma_cmd.c
-@@ -796,6 +796,12 @@ int pvrdma_exec_cmd(PVRDMADev *dev)
-
- dsr_info = &dev->dsr_info;
+Index: qemu-8.0.0/hw/rdma/vmw/pvrdma_cmd.c
+===================================================================
+--- qemu-8.0.0.orig/hw/rdma/vmw/pvrdma_cmd.c
++++ qemu-8.0.0/hw/rdma/vmw/pvrdma_cmd.c
+@@ -782,6 +782,12 @@ int pvrdma_exec_cmd(PVRDMADev *dev)
+ goto out;
+ }
+ if (!dsr_info->dsr) {
+ /* Buggy or malicious guest driver */
@@ -38,20 +38,3 @@ index da7ddfa54..89db963c4 100644
if (dsr_info->req->hdr.cmd >= sizeof(cmd_handlers) /
sizeof(struct cmd_handler)) {
rdma_error_report("Unsupported command");
-diff --git a/hw/rdma/vmw/pvrdma_main.c b/hw/rdma/vmw/pvrdma_main.c
-index 91206dbb8..0b7d908e2 100644
---- a/hw/rdma/vmw/pvrdma_main.c
-+++ b/hw/rdma/vmw/pvrdma_main.c
-@@ -249,7 +249,8 @@ static void init_dsr_dev_caps(PVRDMADev *dev)
- {
- struct pvrdma_device_shared_region *dsr;
-
-- if (dev->dsr_info.dsr == NULL) {
-+ if (!dev->dsr_info.dsr) {
-+ /* Buggy or malicious guest driver */
- rdma_error_report("Can't initialized DSR");
- return;
- }
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch b/meta/recipes-devtools/qemu/qemu/0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch
new file mode 100644
index 0000000000..cc53b1eedd
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0011-linux-user-workaround-for-missing-MAP_FIXED_NOREPLAC.patch
@@ -0,0 +1,282 @@
+From fa9bcabe2387bb230ef82d62827ad6f93b8a1e61 Mon Sep 17 00:00:00 2001
+From: Frederic Konrad <fkonrad@amd.com>
+Date: Wed, 17 Jan 2024 18:15:06 +0000
+Subject: [PATCH 1/2] linux-user/*: workaround for missing MAP_FIXED_NOREPLACE
+
+QEMU v8.1.0 recently requires MAP_FIXED_NOREPLACE flags implementation for mmap.
+
+This is missing from ubuntu 18.04, thus this patch catches the mmap calls which
+could use that new flag and forwards them to mmap when MAP_FIXED_NOREPLACE
+flag isn't set or emulates them by checking the returned address w.r.t the
+requested address.
+
+Signed-off-by: Frederic Konrad <fkonrad@amd.com>
+Signed-off-by: Francisco Iglesias <francisco.iglesias@amd.com>
+
+Upstream-Status: Inappropriate [OE specific]
+
+The upstream only supports the last two major releases of an OS. The ones
+they have declared all have kernel 4.17 or newer.
+
+See:
+https://xilinx.slack.com/archives/D04G2647CTV/p1705074697942019
+
+https://www.qemu.org/docs/master/about/build-platforms.html
+
+ The project aims to support the most recent major version at all times for up
+ to five years after its initial release. Support for the previous major
+ version will be dropped 2 years after the new major version is released or
+ when the vendor itself drops support, whichever comes first.
+
+Signed-off-by: Mark Hatle <mark.hatle@amd.com>
+---
+ linux-user/elfload.c | 7 +++--
+ linux-user/meson.build | 1 +
+ linux-user/mmap-fixed.c | 63 +++++++++++++++++++++++++++++++++++++++++
+ linux-user/mmap-fixed.h | 39 +++++++++++++++++++++++++
+ linux-user/mmap.c | 31 +++++++++++---------
+ linux-user/syscall.c | 1 +
+ 6 files changed, 125 insertions(+), 17 deletions(-)
+ create mode 100644 linux-user/mmap-fixed.c
+ create mode 100644 linux-user/mmap-fixed.h
+
+Index: qemu-8.2.1/linux-user/elfload.c
+===================================================================
+--- qemu-8.2.1.orig/linux-user/elfload.c
++++ qemu-8.2.1/linux-user/elfload.c
+@@ -22,6 +22,7 @@
+ #include "qemu/error-report.h"
+ #include "target_signal.h"
+ #include "accel/tcg/debuginfo.h"
++#include "mmap-fixed.h"
+
+ #ifdef TARGET_ARM
+ #include "target/arm/cpu-features.h"
+@@ -2765,9 +2766,9 @@ static abi_ulong create_elf_tables(abi_u
+ static int pgb_try_mmap(uintptr_t addr, uintptr_t addr_last, bool keep)
+ {
+ size_t size = addr_last - addr + 1;
+- void *p = mmap((void *)addr, size, PROT_NONE,
+- MAP_ANONYMOUS | MAP_PRIVATE |
+- MAP_NORESERVE | MAP_FIXED_NOREPLACE, -1, 0);
++ void *p = mmap_fixed_noreplace((void *)addr, size, PROT_NONE,
++ MAP_ANONYMOUS | MAP_PRIVATE |
++ MAP_NORESERVE | MAP_FIXED_NOREPLACE, -1, 0);
+ int ret;
+
+ if (p == MAP_FAILED) {
+Index: qemu-8.2.1/linux-user/meson.build
+===================================================================
+--- qemu-8.2.1.orig/linux-user/meson.build
++++ qemu-8.2.1/linux-user/meson.build
+@@ -14,6 +14,7 @@ linux_user_ss.add(files(
+ 'linuxload.c',
+ 'main.c',
+ 'mmap.c',
++ 'mmap-fixed.c',
+ 'signal.c',
+ 'strace.c',
+ 'syscall.c',
+Index: qemu-8.2.1/linux-user/mmap-fixed.c
+===================================================================
+--- /dev/null
++++ qemu-8.2.1/linux-user/mmap-fixed.c
+@@ -0,0 +1,63 @@
++/*
++ * Workaround for MAP_FIXED_NOREPLACE
++ *
++ * Copyright (c) 2024, Advanced Micro Devices, Inc.
++ * Developed by Fred Konrad <fkonrad@amd.com>
++ *
++ * Permission is hereby granted, free of charge, to any person obtaining a copy
++ * of this software and associated documentation files (the "Software"), to deal
++ * in the Software without restriction, including without limitation the rights
++ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
++ * copies of the Software, and to permit persons to whom the Software is
++ * furnished to do so, subject to the following conditions:
++ *
++ * The above copyright notice and this permission notice shall be included in
++ * all copies or substantial portions of the Software.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
++ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
++ * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
++ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
++ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
++ * THE SOFTWARE.
++ */
++
++#include <sys/mman.h>
++#include <errno.h>
++
++#ifndef MAP_FIXED_NOREPLACE
++#include "mmap-fixed.h"
++
++void *mmap_fixed_noreplace(void *addr, size_t len, int prot, int flags,
++ int fd, off_t offset)
++{
++ void *retaddr;
++
++ if (!(flags & MAP_FIXED_NOREPLACE)) {
++ /* General case, use the regular mmap. */
++ return mmap(addr, len, prot, flags, fd, offset);
++ }
++
++ /* Since MAP_FIXED_NOREPLACE is not implemented, try to emulate it. */
++ flags = flags & ~(MAP_FIXED_NOREPLACE | MAP_FIXED);
++ retaddr = mmap(addr, len, prot, flags, fd, offset);
++ if ((retaddr == addr) || (retaddr == MAP_FAILED)) {
++ /*
++ * Either the map worked and we get the good address so it can be
++ * returned, or it failed and would have failed the same with
++ * MAP_FIXED*, in which case return MAP_FAILED.
++ */
++ return retaddr;
++ } else {
++ /*
++ * Page has been mapped but not at the requested address.. unmap it and
++ * return EEXIST.
++ */
++ munmap(retaddr, len);
++ errno = EEXIST;
++ return MAP_FAILED;
++ }
++}
++
++#endif
+Index: qemu-8.2.1/linux-user/mmap-fixed.h
+===================================================================
+--- /dev/null
++++ qemu-8.2.1/linux-user/mmap-fixed.h
+@@ -0,0 +1,39 @@
++/*
++ * Workaround for MAP_FIXED_NOREPLACE
++ *
++ * Copyright (c) 2024, Advanced Micro Devices, Inc.
++ * Developed by Fred Konrad <fkonrad@amd.com>
++ *
++ * Permission is hereby granted, free of charge, to any person obtaining a copy
++ * of this software and associated documentation files (the "Software"), to deal
++ * in the Software without restriction, including without limitation the rights
++ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
++ * copies of the Software, and to permit persons to whom the Software is
++ * furnished to do so, subject to the following conditions:
++ *
++ * The above copyright notice and this permission notice shall be included in
++ * all copies or substantial portions of the Software.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
++ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
++ * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
++ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
++ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
++ * THE SOFTWARE.
++ */
++
++#ifndef MMAP_FIXED_H
++#define MMAP_FIXED_H
++
++#ifndef MAP_FIXED_NOREPLACE
++#define MAP_FIXED_NOREPLACE 0x100000
++
++void *mmap_fixed_noreplace(void *addr, size_t len, int prot, int flags,
++ int fd, off_t offset);
++
++#else /* MAP_FIXED_NOREPLACE */
++#define mmap_fixed_noreplace mmap
++#endif /* MAP_FIXED_NOREPLACE */
++
++#endif /* MMAP_FIXED_H */
+Index: qemu-8.2.1/linux-user/mmap.c
+===================================================================
+--- qemu-8.2.1.orig/linux-user/mmap.c
++++ qemu-8.2.1/linux-user/mmap.c
+@@ -25,6 +25,7 @@
+ #include "user-mmap.h"
+ #include "target_mman.h"
+ #include "qemu/interval-tree.h"
++#include "mmap-fixed.h"
+
+ #ifdef TARGET_ARM
+ #include "target/arm/cpu-features.h"
+@@ -273,7 +274,7 @@ int target_mprotect(abi_ulong start, abi
+ static int do_munmap(void *addr, size_t len)
+ {
+ if (reserved_va) {
+- void *ptr = mmap(addr, len, PROT_NONE,
++ void *ptr = mmap_fixed_noreplace(addr, len, PROT_NONE,
+ MAP_FIXED | MAP_ANONYMOUS
+ | MAP_PRIVATE | MAP_NORESERVE, -1, 0);
+ return ptr == addr ? 0 : -1;
+@@ -319,9 +320,9 @@ static bool mmap_frag(abi_ulong real_sta
+ * outside of the fragment we need to map. Allocate a new host
+ * page to cover, discarding whatever else may have been present.
+ */
+- void *p = mmap(host_start, qemu_host_page_size,
+- target_to_host_prot(prot),
+- flags | MAP_ANONYMOUS, -1, 0);
++ void *p = mmap_fixed_noreplace(host_start, qemu_host_page_size,
++ target_to_host_prot(prot),
++ flags | MAP_ANONYMOUS, -1, 0);
+ if (p != host_start) {
+ if (p != MAP_FAILED) {
+ munmap(p, qemu_host_page_size);
+@@ -420,8 +421,9 @@ abi_ulong mmap_find_vma(abi_ulong start,
+ * - mremap() with MREMAP_FIXED flag
+ * - shmat() with SHM_REMAP flag
+ */
+- ptr = mmap(g2h_untagged(addr), size, PROT_NONE,
+- MAP_ANONYMOUS | MAP_PRIVATE | MAP_NORESERVE, -1, 0);
++ ptr = mmap_fixed_noreplace(g2h_untagged(addr), size, PROT_NONE,
++ MAP_ANONYMOUS | MAP_PRIVATE | MAP_NORESERVE,
++ -1, 0);
+
+ /* ENOMEM, if host address space has no memory */
+ if (ptr == MAP_FAILED) {
+@@ -615,16 +617,16 @@ abi_long target_mmap(abi_ulong start, ab
+ * especially important if qemu_host_page_size >
+ * qemu_real_host_page_size.
+ */
+- p = mmap(g2h_untagged(start), host_len, host_prot,
+- flags | MAP_FIXED | MAP_ANONYMOUS, -1, 0);
++ p = mmap_fixed_noreplace(g2h_untagged(start), host_len, host_prot,
++ flags | MAP_FIXED | MAP_ANONYMOUS, -1, 0);
+ if (p == MAP_FAILED) {
+ goto fail;
+ }
+ /* update start so that it points to the file position at 'offset' */
+ host_start = (uintptr_t)p;
+ if (!(flags & MAP_ANONYMOUS)) {
+- p = mmap(g2h_untagged(start), len, host_prot,
+- flags | MAP_FIXED, fd, host_offset);
++ p = mmap_fixed_noreplace(g2h_untagged(start), len, host_prot,
++ flags | MAP_FIXED, fd, host_offset);
+ if (p == MAP_FAILED) {
+ munmap(g2h_untagged(start), host_len);
+ goto fail;
+@@ -749,8 +751,9 @@ abi_long target_mmap(abi_ulong start, ab
+ len1 = real_last - real_start + 1;
+ want_p = g2h_untagged(real_start);
+
+- p = mmap(want_p, len1, target_to_host_prot(target_prot),
+- flags, fd, offset1);
++ p = mmap_fixed_noreplace(want_p, len1,
++ target_to_host_prot(target_prot),
++ flags, fd, offset1);
+ if (p != want_p) {
+ if (p != MAP_FAILED) {
+ munmap(p, len1);
+Index: qemu-8.2.1/linux-user/syscall.c
+===================================================================
+--- qemu-8.2.1.orig/linux-user/syscall.c
++++ qemu-8.2.1/linux-user/syscall.c
+@@ -145,6 +145,7 @@
+ #include "qapi/error.h"
+ #include "fd-trans.h"
+ #include "cpu_loop-common.h"
++#include "mmap-fixed.h"
+
+ #ifndef CLONE_IO
+ #define CLONE_IO 0x80000000 /* Clone io context */
diff --git a/meta/recipes-devtools/qemu/qemu/0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch b/meta/recipes-devtools/qemu/qemu/0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch
new file mode 100644
index 0000000000..48034a4680
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/0012-linux-user-workaround-for-missing-MAP_SHARED_VALIDAT.patch
@@ -0,0 +1,51 @@
+From 5c73e53997df800a742f9cd7355f3045861984bb Mon Sep 17 00:00:00 2001
+From: Frederic Konrad <fkonrad@amd.com>
+Date: Thu, 18 Jan 2024 10:43:44 +0000
+Subject: [PATCH 2/2] linux-user/*: workaround for missing MAP_SHARED_VALIDATE
+
+QEMU v8.1.0 recently requires MAP_SHARED_VALIDATE flags implementation for mmap.
+
+This is missing from the Ubuntu 18.04 compiler but looks like to be in the
+kernel source.
+
+Signed-off-by: Frederic Konrad <fkonrad@amd.com>
+Signed-off-by: Francisco Iglesias <francisco.iglesias@amd.com>
+
+Upstream-Status: Inappropriate [OE specific]
+
+The upstream only supports the last two major releases of an OS. The ones
+they have declared all have kernel 4.17 or newer.
+
+See:
+https://xilinx.slack.com/archives/D04G2647CTV/p1705074697942019
+
+https://www.qemu.org/docs/master/about/build-platforms.html
+
+ The project aims to support the most recent major version at all times for up
+ to five years after its initial release. Support for the previous major
+ version will be dropped 2 years after the new major version is released or
+ when the vendor itself drops support, whichever comes first.
+
+Signed-off-by: Mark Hatle <mark.hatle@amd.com>
+---
+ linux-user/mmap-fixed.h | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/linux-user/mmap-fixed.h b/linux-user/mmap-fixed.h
+index ef6eef5114..ec86586c1f 100644
+--- a/linux-user/mmap-fixed.h
++++ b/linux-user/mmap-fixed.h
+@@ -26,6 +26,10 @@
+ #ifndef MMAP_FIXED_H
+ #define MMAP_FIXED_H
+
++#ifndef MAP_SHARED_VALIDATE
++#define MAP_SHARED_VALIDATE 0x03
++#endif
++
+ #ifndef MAP_FIXED_NOREPLACE
+ #define MAP_FIXED_NOREPLACE 0x100000
+
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/qemu/qemu/4a8579ad8629b57a43daa62e46cc7af6e1078116.patch b/meta/recipes-devtools/qemu/qemu/4a8579ad8629b57a43daa62e46cc7af6e1078116.patch
new file mode 100644
index 0000000000..5ad859ebe6
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/4a8579ad8629b57a43daa62e46cc7af6e1078116.patch
@@ -0,0 +1,60 @@
+From 4a8579ad8629b57a43daa62e46cc7af6e1078116 Mon Sep 17 00:00:00 2001
+From: Richard Henderson <richard.henderson@linaro.org>
+Date: Tue, 13 Feb 2024 10:20:27 -1000
+Subject: [PATCH] linux-user: Split out do_munmap
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Upstream-Status: Submitted [https://gitlab.com/rth7680/qemu/-/commit/4a8579ad8629b57a43daa62e46cc7af6e1078116]
+
+Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
+Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
+---
+ linux-user/mmap.c | 23 ++++++++++++++++-------
+ 1 file changed, 16 insertions(+), 7 deletions(-)
+
+diff --git a/linux-user/mmap.c b/linux-user/mmap.c
+index 1bbfeb25b14..8ebcca44444 100644
+--- a/linux-user/mmap.c
++++ b/linux-user/mmap.c
+@@ -267,6 +267,21 @@ int target_mprotect(abi_ulong start, abi_ulong len, int target_prot)
+ return ret;
+ }
+
++/*
++ * Perform munmap on behalf of the target, with host parameters.
++ * If reserved_va, we must replace the memory reservation.
++ */
++static int do_munmap(void *addr, size_t len)
++{
++ if (reserved_va) {
++ void *ptr = mmap(addr, len, PROT_NONE,
++ MAP_FIXED | MAP_ANONYMOUS
++ | MAP_PRIVATE | MAP_NORESERVE, -1, 0);
++ return ptr == addr ? 0 : -1;
++ }
++ return munmap(addr, len);
++}
++
+ /* map an incomplete host page */
+ static bool mmap_frag(abi_ulong real_start, abi_ulong start, abi_ulong last,
+ int prot, int flags, int fd, off_t offset)
+@@ -854,13 +869,7 @@ static int mmap_reserve_or_unmap(abi_ulong start, abi_ulong len)
+ real_len = real_last - real_start + 1;
+ host_start = g2h_untagged(real_start);
+
+- if (reserved_va) {
+- void *ptr = mmap(host_start, real_len, PROT_NONE,
+- MAP_FIXED | MAP_ANONYMOUS
+- | MAP_PRIVATE | MAP_NORESERVE, -1, 0);
+- return ptr == host_start ? 0 : -1;
+- }
+- return munmap(host_start, real_len);
++ return do_munmap(host_start, real_len);
+ }
+
+ int target_munmap(abi_ulong start, abi_ulong len)
+--
+GitLab
+
diff --git a/meta/recipes-devtools/qemu/qemu/CVE-2023-6683.patch b/meta/recipes-devtools/qemu/qemu/CVE-2023-6683.patch
new file mode 100644
index 0000000000..732cb6af18
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/CVE-2023-6683.patch
@@ -0,0 +1,91 @@
+From 405484b29f6548c7b86549b0f961b906337aa68a Mon Sep 17 00:00:00 2001
+From: Fiona Ebner <f.ebner@proxmox.com>
+Date: Wed, 24 Jan 2024 11:57:48 +0100
+Subject: [PATCH] ui/clipboard: mark type as not available when there is no
+ data
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+With VNC, a client can send a non-extended VNC_MSG_CLIENT_CUT_TEXT
+message with len=0. In qemu_clipboard_set_data(), the clipboard info
+will be updated setting data to NULL (because g_memdup(data, size)
+returns NULL when size is 0). If the client does not set the
+VNC_ENCODING_CLIPBOARD_EXT feature when setting up the encodings, then
+the 'request' callback for the clipboard peer is not initialized.
+Later, because data is NULL, qemu_clipboard_request() can be reached
+via vdagent_chr_write() and vdagent_clipboard_recv_request() and
+there, the clipboard owner's 'request' callback will be attempted to
+be called, but that is a NULL pointer.
+
+In particular, this can happen when using the KRDC (22.12.3) VNC
+client.
+
+Another scenario leading to the same issue is with two clients (say
+noVNC and KRDC):
+
+The noVNC client sets the extension VNC_FEATURE_CLIPBOARD_EXT and
+initializes its cbpeer.
+
+The KRDC client does not, but triggers a vnc_client_cut_text() (note
+it's not the _ext variant)). There, a new clipboard info with it as
+the 'owner' is created and via qemu_clipboard_set_data() is called,
+which in turn calls qemu_clipboard_update() with that info.
+
+In qemu_clipboard_update(), the notifier for the noVNC client will be
+called, i.e. vnc_clipboard_notify() and also set vs->cbinfo for the
+noVNC client. The 'owner' in that clipboard info is the clipboard peer
+for the KRDC client, which did not initialize the 'request' function.
+That sounds correct to me, it is the owner of that clipboard info.
+
+Then when noVNC sends a VNC_MSG_CLIENT_CUT_TEXT message (it did set
+the VNC_FEATURE_CLIPBOARD_EXT feature correctly, so a check for it
+passes), that clipboard info is passed to qemu_clipboard_request() and
+the original segfault still happens.
+
+Fix the issue by handling updates with size 0 differently. In
+particular, mark in the clipboard info that the type is not available.
+
+While at it, switch to g_memdup2(), because g_memdup() is deprecated.
+
+Cc: qemu-stable@nongnu.org
+Fixes: CVE-2023-6683
+Reported-by: Markus Frank <m.frank@proxmox.com>
+Suggested-by: Marc-André Lureau <marcandre.lureau@redhat.com>
+Signed-off-by: Fiona Ebner <f.ebner@proxmox.com>
+Reviewed-by: Marc-André Lureau <marcandre.lureau@redhat.com>
+Tested-by: Markus Frank <m.frank@proxmox.com>
+Message-ID: <20240124105749.204610-1-f.ebner@proxmox.com>
+
+CVE: CVE-2023-6683
+
+Upstream-Status: Backport [https://github.com/qemu/qemu/commit/405484b29f6548c7b86549b0f961b906337aa68a]
+Signed-off-by: Simone Weiß <simone.p.weiss@posteo.com>
+
+---
+ ui/clipboard.c | 12 +++++++++---
+ 1 file changed, 9 insertions(+), 3 deletions(-)
+
+diff --git a/ui/clipboard.c b/ui/clipboard.c
+index 3d14bffaf80f..b3f6fa3c9e1f 100644
+--- a/ui/clipboard.c
++++ b/ui/clipboard.c
+@@ -163,9 +163,15 @@ void qemu_clipboard_set_data(QemuClipboardPeer *peer,
+ }
+
+ g_free(info->types[type].data);
+- info->types[type].data = g_memdup(data, size);
+- info->types[type].size = size;
+- info->types[type].available = true;
++ if (size) {
++ info->types[type].data = g_memdup2(data, size);
++ info->types[type].size = size;
++ info->types[type].available = true;
++ } else {
++ info->types[type].data = NULL;
++ info->types[type].size = 0;
++ info->types[type].available = false;
++ }
+
+ if (update) {
+ qemu_clipboard_update(info);
diff --git a/meta/recipes-devtools/qemu/qemu/cross.patch b/meta/recipes-devtools/qemu/qemu/cross.patch
deleted file mode 100644
index d1256a1229..0000000000
--- a/meta/recipes-devtools/qemu/qemu/cross.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From 76c3fc4c87231bed32974ebbbdb5079cff45a6b7 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Tue, 5 Jan 2021 23:00:14 +0000
-Subject: [PATCH 12/12] qemu: Upgrade 5.1.0->5.2.0
-
-We need to be able to trigger configure's cross code but we don't want
-to set cross_prefix as it does other things we don't want. Patch things
-so we can do what we need in the target config case.
-
-Upstream-Status: Inappropriate [may be rewritten in a way upstream may accept?]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
----
- configure | 4 ----
- 1 file changed, 4 deletions(-)
-
-diff --git a/configure b/configure
-index 7c08c1835..0613279f9 100755
---- a/configure
-+++ b/configure
-@@ -3118,7 +3118,6 @@ if test "$skip_meson" = no; then
- fi
- echo "strip = [$(meson_quote $strip)]" >> $cross
- echo "windres = [$(meson_quote $windres)]" >> $cross
-- if test "$cross_compile" = "yes"; then
- cross_arg="--cross-file config-meson.cross"
- echo "[host_machine]" >> $cross
- echo "system = '$targetos'" >> $cross
-@@ -3136,9 +3135,6 @@ if test "$skip_meson" = no; then
- else
- echo "endian = 'little'" >> $cross
- fi
-- else
-- cross_arg="--native-file config-meson.cross"
-- fi
- mv $cross config-meson.cross
-
- rm -rf meson-private meson-info meson-logs
---
-2.30.2
-
diff --git a/meta/recipes-devtools/qemu/qemu/fixedmeson.patch b/meta/recipes-devtools/qemu/qemu/fixedmeson.patch
new file mode 100644
index 0000000000..9047f66dc3
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/fixedmeson.patch
@@ -0,0 +1,20 @@
+Upstream-Status: Inappropriate [workaround, would need a real fix for upstream]
+
+Index: qemu-8.2.0/configure
+===================================================================
+--- qemu-8.2.0.orig/configure
++++ qemu-8.2.0/configure
+@@ -955,12 +955,7 @@ fi
+ $mkvenv ensuregroup --dir "${source_path}/python/wheels" \
+ ${source_path}/pythondeps.toml meson || exit 1
+
+-# At this point, we expect Meson to be installed and available.
+-# We expect mkvenv or pip to have created pyvenv/bin/meson for us.
+-# We ignore PATH completely here: we want to use the venv's Meson
+-# *exclusively*.
+-
+-meson="$(cd pyvenv/bin; pwd)/meson"
++meson=`which meson`
+
+ # Conditionally ensure Sphinx is installed.
+
diff --git a/meta/recipes-devtools/qemu/qemu/no-pip.patch b/meta/recipes-devtools/qemu/qemu/no-pip.patch
new file mode 100644
index 0000000000..92b2edbe9f
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/no-pip.patch
@@ -0,0 +1,45 @@
+qemu: Ensure pip and the python venv aren't used for meson
+
+Qemu wants to use a supported python version and a specific meson version
+to "help" users and uses pip and creates a venv to do this. This is a nightmare
+for us. Our versions stay up to date and should be supported so we don't
+really need/want this wrapping. Tweak things to disable it.
+
+There was breakage from the wrapper shown by:
+
+bitbake qemu-system-native
+<add DISTRO_FEATURES:remove = "opengl" to local.conf>
+bitbake qemu-system-native -c configure
+
+which would crash. The issue is the change in configuration removes pieces
+from the sysroot but pyc files remainm as do pieces of pip which causes
+problems.
+
+Ideally we'd convince upstream to allow some way to disable the venv on
+the understanding that if/when it breaks, we keep the pieces. The patch
+as it stands is a workaround.
+
+Upstream-Status: Inappropriate [oe specific]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: qemu-8.2.0/configure
+===================================================================
+--- qemu-8.2.0.orig/configure
++++ qemu-8.2.0/configure
+@@ -937,7 +937,7 @@ python="$(command -v "$python")"
+ echo "python determined to be '$python'"
+ echo "python version: $($python --version)"
+
+-python="$($python -B "${source_path}/python/scripts/mkvenv.py" create pyvenv)"
++python=python3
+ if test "$?" -ne 0 ; then
+ error_exit "python venv creation failed"
+ fi
+@@ -945,6 +945,7 @@ fi
+ # Suppress writing compiled files
+ python="$python -B"
+ mkvenv="$python ${source_path}/python/scripts/mkvenv.py"
++mkvenv=true
+
+ # Finish preparing the virtual environment using vendored .whl files
+
diff --git a/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.init b/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.init
new file mode 100644
index 0000000000..5ebaaddeae
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.init
@@ -0,0 +1,75 @@
+# SPDX-License-Identifier: GPL-2.0-only
+# Initially written by: Michael Tokarev <mjt@tls.msk.ru>
+# For QEMU Debian downstream package
+
+set -e
+
+. /etc/init.d/functions
+
+PATH=/sbin:/usr/sbin:/bin:/usr/bin
+DESC="QEMU Guest Agent"
+NAME=qemu-ga
+DAEMON=@bindir@/$NAME
+PIDFILE=/var/run/$NAME.pid
+
+# config
+DAEMON_ARGS=""
+# default transport
+TRANSPORT=virtio-serial:/dev/virtio-ports/org.qemu.guest_agent.0
+NO_START=0
+
+test ! -r /etc/default/qemu-guest-agent || . /etc/default/qemu-guest-agent
+test "$NO_START" = "0" || exit 0
+test -x "$DAEMON" || exit 0
+
+#
+# Function that checks whenever system has necessary environment
+# It also splits $TRANSPORT into $method and $path
+#
+do_check_transport() {
+ method=${TRANSPORT%%:*};
+ path=${TRANSPORT#*:}
+ case "$method" in
+ virtio-serial | isa-serial)
+ if [ ! -e "$path" ]; then
+ echo "$NAME: transport endpoint not found, not starting"
+ return 1
+ fi
+ ;;
+ esac
+}
+
+case "$1" in
+ start)
+ do_check_transport || exit 0
+ echo -n "Starting $DESC: "
+ start-stop-daemon -S -p $PIDFILE -x "$DAEMON" -- \
+ $DAEMON_ARGS -d -m "$method" -p "$path"
+ echo "$NAME."
+ ;;
+ stop)
+ echo -n "Stopping $DESC: "
+ start-stop-daemon -K -x "$DAEMON" -p $PIDFILE
+ echo "$NAME."
+ ;;
+ status)
+ status "$DAEMON"
+ exit $?
+ ;;
+ restart|force-reload)
+ do_check_transport || exit 0
+ echo -n "Restarting $DESC: "
+ start-stop-daemon -K -x "$DAEMON" -p $PIDFILE
+ sleep 1
+ start-stop-daemon -S -p $PIDFILE -x "$DAEMON" -- \
+ $DAEMON_ARGS -d -m "$method" -p "$path"
+ echo "$NAME."
+ ;;
+ *)
+ N=/etc/init.d/$NAME
+ echo "Usage: $N {start|stop|status|restart|force-reload}" >&2
+ exit 1
+ ;;
+esac
+
+exit 0
diff --git a/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.udev b/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.udev
new file mode 100644
index 0000000000..47097057e3
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/qemu-guest-agent.udev
@@ -0,0 +1,2 @@
+SUBSYSTEM=="virtio-ports", ATTR{name}=="org.qemu.guest_agent.0", \
+ TAG+="systemd", ENV{SYSTEMD_WANTS}="qemu-guest-agent.service"
diff --git a/meta/recipes-devtools/qemu/qemu_7.0.0.bb b/meta/recipes-devtools/qemu/qemu_7.0.0.bb
deleted file mode 100644
index 9f7fad9886..0000000000
--- a/meta/recipes-devtools/qemu/qemu_7.0.0.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-BBCLASSEXTEND = "nativesdk"
-
-require qemu.inc
-
-DEPENDS = "glib-2.0 zlib pixman bison-native ninja-native meson-native"
-
-DEPENDS:append:libc-musl = " libucontext"
-
-CFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', '-DEGL_NO_X11=1', d)}"
-
-RDEPENDS:${PN}:class-target += "bash"
-
-EXTRA_OECONF:append:class-target = " --target-list=${@get_qemu_target_list(d)}"
-EXTRA_OECONF:append:class-target:mipsarcho32 = "${@bb.utils.contains('BBEXTENDCURR', 'multilib', ' --disable-capstone', '', d)}"
-EXTRA_OECONF:append:class-nativesdk = " --target-list=${@get_qemu_target_list(d)}"
-
-PACKAGECONFIG ??= " \
- fdt sdl kvm pie \
- ${@bb.utils.filter('DISTRO_FEATURES', 'alsa xen', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'seccomp', d)} \
-"
-PACKAGECONFIG:class-nativesdk ??= "fdt sdl kvm pie \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
-"
-# ppc32 hosts are no longer supported in qemu
-COMPATIBLE_HOST:powerpc = "null"
diff --git a/meta/recipes-devtools/qemu/qemu_8.2.1.bb b/meta/recipes-devtools/qemu/qemu_8.2.1.bb
new file mode 100644
index 0000000000..dc1352232e
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu_8.2.1.bb
@@ -0,0 +1,27 @@
+BBCLASSEXTEND = "nativesdk"
+
+require qemu.inc
+
+DEPENDS += "glib-2.0 zlib pixman"
+
+DEPENDS:append:libc-musl = " libucontext"
+
+CFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', '-DEGL_NO_X11=1', d)}"
+
+RDEPENDS:${PN}-common:class-target += "bash"
+
+EXTRA_OECONF:append:class-target = " --target-list=${@get_qemu_target_list(d)}"
+EXTRA_OECONF:append:class-target:mipsarcho32 = "${@bb.utils.contains('BBEXTENDCURR', 'multilib', ' --disable-capstone', '', d)}"
+EXTRA_OECONF:append:class-nativesdk = " --target-list=${@get_qemu_target_list(d)}"
+
+PACKAGECONFIG ??= " \
+ fdt sdl kvm pie slirp \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'alsa pulseaudio xen', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'seccomp', d)} \
+"
+PACKAGECONFIG:class-nativesdk ??= "fdt sdl kvm pie slirp \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virglrenderer epoxy', '', d)} \
+"
+# ppc32 hosts are no longer supported in qemu
+COMPATIBLE_HOST:powerpc = "null"
diff --git a/meta/recipes-devtools/quilt/quilt.inc b/meta/recipes-devtools/quilt/quilt.inc
index 07611e6d85..3374f4dfbf 100644
--- a/meta/recipes-devtools/quilt/quilt.inc
+++ b/meta/recipes-devtools/quilt/quilt.inc
@@ -9,9 +9,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
SRC_URI = "${SAVANNAH_GNU_MIRROR}/quilt/quilt-${PV}.tar.gz \
file://run-ptest \
- file://Makefile \
file://test.sh \
file://0001-tests-Allow-different-output-from-mv.patch \
+ file://fix-grep-3.8.patch \
+ file://faildiff-order.patch \
+ file://0001-test-Fix-a-race-condition-in-merge.test.patch \
"
SRC_URI:append:class-target = " file://gnu_patch_test_fix_target.patch"
@@ -60,17 +62,24 @@ do_install:append:class-native () {
touch ${D}${sysconfdir}/quiltrc
}
-do_compile_ptest() {
- oe_runmake bin/patch-wrapper test/.depend
-}
+# The tests need to run as a non-root user, so pull in the ptest user
+DEPENDS:append:class-target = "${@bb.utils.contains('PTEST_ENABLED', '1', ' ptest-runner', '', d)}"
+PACKAGE_WRITE_DEPS += "ptest-runner"
do_install_ptest() {
- tar -c --exclude=\*.in bin/ | ( cd ${D}${PTEST_PATH} && tar -xf - )
- tar -c --exclude=\*.in compat/ | ( cd ${D}${PTEST_PATH} && tar -xf - )
- tar -c --exclude=\*.in quilt/ | ( cd ${D}${PTEST_PATH} && tar -xf - )
- tar -c --exclude=mail.test --exclude=delete.test test/ | ( cd ${D}${PTEST_PATH} && tar -xf - && chmod 777 test)
- cp ${WORKDIR}/Makefile ${D}${PTEST_PATH}
- cp ${WORKDIR}/test.sh ${D}${PTEST_PATH}
+ install ${WORKDIR}/test.sh ${D}${PTEST_PATH}
+ mkdir ${D}${PTEST_PATH}/test
+ install ${S}/test/* ${D}${PTEST_PATH}/test
+ # mail needs a MTA, and the patch-wrapper is disabled
+ rm -f ${D}${PTEST_PATH}/test/mail.test ${D}${PTEST_PATH}/test/patch-wrapper.test
+}
+
+# ptest.bbclass currently chowns the ptest directory explicitly, so we need to
+# change permission after that has happened so the ptest user can write a
+# temporary directory.
+do_install_ptest_base:append() {
+ chgrp ptest ${D}${PTEST_PATH}/test
+ chmod g+w ${D}${PTEST_PATH}/test
}
PACKAGES += "guards guards-doc"
@@ -84,9 +93,9 @@ FILES:guards-doc = "${mandir}/man1/guards.1"
RDEPENDS:${PN} = "bash patch diffstat bzip2 util-linux less"
RDEPENDS:${PN}:class-native = "diffstat-native patch-native bzip2-native"
-RDEPENDS:${PN}-ptest = "make file sed gawk diffutils findutils ed perl \
- perl-module-filehandle perl-module-getopt-std \
- perl-module-posix perl-module-file-temp \
- perl-module-text-parsewords perl-module-overloading \
- bash util-linux-getopt patch \
- "
+RDEPENDS:${PN}-ptest += "file sed gawk diffutils findutils ed perl \
+ perl-module-filehandle perl-module-getopt-std \
+ perl-module-posix perl-module-file-temp \
+ perl-module-text-parsewords perl-module-overloading \
+ bash util-linux-getopt ptest-runner \
+ "
diff --git a/meta/recipes-devtools/quilt/quilt/0001-test-Fix-a-race-condition-in-merge.test.patch b/meta/recipes-devtools/quilt/quilt/0001-test-Fix-a-race-condition-in-merge.test.patch
new file mode 100644
index 0000000000..01d4c8befc
--- /dev/null
+++ b/meta/recipes-devtools/quilt/quilt/0001-test-Fix-a-race-condition-in-merge.test.patch
@@ -0,0 +1,48 @@
+From c1ce964f3e9312100a60f03c1e1fdd601e1911f2 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?=C4=90o=C3=A0n=20Tr=E1=BA=A7n=20C=C3=B4ng=20Danh?=
+ <congdanhqx@gmail.com>
+Date: Tue, 28 Feb 2023 18:45:15 +0100
+Subject: [PATCH] test: Fix a race condition in merge.test
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Just like commit 4dfe7f9, (test: Fix a race condition, 2023-01-20),
+this fix a test race when stdout and stderr in any order.
+
+Upstream-Status: Backport [https://git.savannah.nongnu.org/cgit/quilt.git/commit/?id=c1ce964f3e9312100a60f03c1e1fdd601e1911f2]
+Signed-off-by: Đoàn Trần Công Danh <congdanhqx@gmail.com>
+Signed-off-by: Jean Delvare <jdelvare@suse.de>
+---
+ test/merge.test | 6 ++++--
+ 1 file changed, 4 insertions(+), 2 deletions(-)
+
+diff --git a/test/merge.test b/test/merge.test
+index c64b33d..2e67d4f 100644
+--- a/test/merge.test
++++ b/test/merge.test
+@@ -39,8 +39,9 @@ Test the patch merging functionality of `quilt diff'.
+ > Applying patch %{P}c.diff
+ > Now at patch %{P}c.diff
+
+- $ quilt diff -P b.diff | grep -v "^\\(---\\|+++\\)"
++ $ quilt diff -P b.diff >/dev/null
+ > Warning: more recent patches modify files in patch %{P}b.diff
++ $ quilt diff -P b.diff 2>/dev/null | grep -v "^\\(---\\|+++\\)"
+ >~ Index: [^/]+/abc\.txt
+ > ===================================================================
+ > @@ -1,3 +1,3 @@
+@@ -49,8 +50,9 @@ Test the patch merging functionality of `quilt diff'.
+ > +b+
+ > c
+
+- $ quilt diff --combine a.diff -P b.diff | grep -v "^\\(---\\|+++\\)"
++ $ quilt diff --combine a.diff -P b.diff >/dev/null
+ > Warning: more recent patches modify files in patch %{P}b.diff
++ $ quilt diff --combine a.diff -P b.diff 2>/dev/null | grep -v "^\\(---\\|+++\\)"
+ >~ Index: [^/]+/abc\.txt
+ > ===================================================================
+ > @@ -1,3 +1,3 @@
+--
+2.40.0
+
diff --git a/meta/recipes-devtools/quilt/quilt/Makefile b/meta/recipes-devtools/quilt/quilt/Makefile
deleted file mode 100644
index 1f6cd2479c..0000000000
--- a/meta/recipes-devtools/quilt/quilt/Makefile
+++ /dev/null
@@ -1,14 +0,0 @@
-PATH := $(CURDIR)/bin:$(CURDIR)/compat:$(PATH)
-QUILT_DIR := $(CURDIR)/quilt
-QUILTRC := $(CURDIR)/test/test.quiltrc
-export QUILT_DIR QUILTRC
-CHECK_ENV := P=patches/; _P=../patches/; export P _P;
-CHECK_ENV += QUILT_PC=.pc; export QUILT_PC
--include test/.depend
-
-check-% : test/%.test
- @LANG=C; LC_ALL=C; \
- export LANG LC_ALL; \
- $(CHECK_ENV); \
- cd $(<D); \
- ./run -q $(<F)
diff --git a/meta/recipes-devtools/quilt/quilt/faildiff-order.patch b/meta/recipes-devtools/quilt/quilt/faildiff-order.patch
new file mode 100644
index 0000000000..f22065a250
--- /dev/null
+++ b/meta/recipes-devtools/quilt/quilt/faildiff-order.patch
@@ -0,0 +1,41 @@
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+From 4dfe7f9e702c85243a71e4de267a13e434b6d6c2 Mon Sep 17 00:00:00 2001
+From: Jean Delvare <jdelvare@suse.de>
+Date: Fri, 20 Jan 2023 12:56:08 +0100
+Subject: [PATCH] test: Fix a race condition
+
+The test suite does not differentiate between stdout and stderr. When
+messages are printed to both, the order in which they will reach us
+is apparently not guaranteed. Ideally this would be deterministic, but
+until then, explicitly test stdout and stderr separately in the test
+case itself. Otherwise the test suite fails randomly, which is a pain
+for distribution package maintainers.
+
+This fixes bug #63651 reported by Ross Burton:
+https://savannah.nongnu.org/bugs/index.php?63651
+
+Signed-off-by: Jean Delvare <jdelvare@suse.de>
+---
+ test/faildiff.test | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/test/faildiff.test b/test/faildiff.test
+index 5afb8e3..0444c15 100644
+--- a/test/faildiff.test
++++ b/test/faildiff.test
+@@ -27,8 +27,9 @@ What happens on binary files?
+ > File test.bin added to patch %{P}test.diff
+
+ $ printf "\\003\\000\\001" > test.bin
+- $ quilt diff -pab --no-index
++ $ quilt diff -pab --no-index 2>/dev/null
+ >~ (Files|Binary files) a/test\.bin and b/test\.bin differ
++ $ quilt diff -pab --no-index >/dev/null
+ > Diff failed on file 'test.bin', aborting
+ $ echo %{?}
+ > 1
+--
+2.34.1
+
diff --git a/meta/recipes-devtools/quilt/quilt/fix-grep-3.8.patch b/meta/recipes-devtools/quilt/quilt/fix-grep-3.8.patch
new file mode 100644
index 0000000000..68a4b4c195
--- /dev/null
+++ b/meta/recipes-devtools/quilt/quilt/fix-grep-3.8.patch
@@ -0,0 +1,144 @@
+From f73f8d7f71de2878d3f92881a5fcb8eafd78cb5f Mon Sep 17 00:00:00 2001
+From: Jean Delvare <jdelvare@suse.de>
+Date: Fri, 9 Sep 2022 10:10:37 +0200
+Subject: Avoid warnings with grep 3.8
+
+GNU grep version 3.8 became more strict about needless quoting in
+patterns. We have one occurrence of that in quilt, where "/"
+characters are being quoted by default. There are cases where they
+indeed need to be quoted (typically when used in a sed s/// command)
+but most of the time they do not, and this results in the following
+warning:
+
+grep: warning: stray \ before /
+
+So rename quote_bre() to quote_sed_re(), and introduce
+quote_grep_re() which does not quote "/".
+
+Signed-off-by: Jean Delvare <jdelvare@suse.de>
+Upstream-Status: Backport [https://git.savannah.nongnu.org/cgit/quilt.git/commit/?id=f73f8d7f71de2878d3f92881a5fcb8eafd78cb5f]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ quilt/diff.in | 2 +-
+ quilt/patches.in | 2 +-
+ quilt/scripts/patchfns.in | 20 +++++++++++++-------
+ quilt/upgrade.in | 4 ++--
+ 4 files changed, 17 insertions(+), 11 deletions(-)
+
+diff --git a/quilt/diff.in b/quilt/diff.in
+index e90dc33..07788ff 100644
+--- a/quilt/diff.in
++++ b/quilt/diff.in
+@@ -255,7 +255,7 @@ then
+ # Add all files in the snapshot into the file list (they may all
+ # have changed).
+ files=( $(find $QUILT_PC/$snap_subdir -type f \
+- | sed -e "s/^$(quote_bre $QUILT_PC/$snap_subdir/)//" \
++ | sed -e "s/^$(quote_sed_re $QUILT_PC/$snap_subdir/)//" \
+ | sort) )
+ printf "%s\n" "${files[@]}" >&4
+ unset files
+diff --git a/quilt/patches.in b/quilt/patches.in
+index bb17a46..eac45a9 100644
+--- a/quilt/patches.in
++++ b/quilt/patches.in
+@@ -60,7 +60,7 @@ scan_unapplied()
+ # Quote each file name only once
+ for file in "${opt_files[@]}"
+ do
+- files_bre[${#files_bre[@]}]=$(quote_bre "$file")
++ files_bre[${#files_bre[@]}]=$(quote_grep_re "$file")
+ done
+
+ # "Or" all files in a single pattern
+diff --git a/quilt/scripts/patchfns.in b/quilt/scripts/patchfns.in
+index c2d5f9d..1bd7233 100644
+--- a/quilt/scripts/patchfns.in
++++ b/quilt/scripts/patchfns.in
+@@ -78,8 +78,14 @@ array_join()
+ done
+ }
+
+-# Quote a string for use in a basic regular expression.
+-quote_bre()
++# Quote a string for use in a regular expression for a grep pattern.
++quote_grep_re()
++{
++ echo "$1" | sed -e 's:\([][^$.*\\]\):\\\1:g'
++}
++
++# Quote a string for use in a regular expression for a sed s/// command.
++quote_sed_re()
+ {
+ echo "$1" | sed -e 's:\([][^$/.*\\]\):\\\1:g'
+ }
+@@ -215,7 +221,7 @@ patch_in_series()
+
+ if [ -e "$SERIES" ]
+ then
+- grep -q "^$(quote_bre $patch)\([ \t]\|$\)" "$SERIES"
++ grep -q "^$(quote_grep_re $patch)\([ \t]\|$\)" "$SERIES"
+ else
+ return 1
+ fi
+@@ -365,7 +371,7 @@ is_applied()
+ {
+ local patch=$1
+ [ -e $DB ] || return 1
+- grep -q "^$(quote_bre $patch)\$" $DB
++ grep -q "^$(quote_grep_re $patch)\$" $DB
+ }
+
+ applied_patches()
+@@ -465,7 +471,7 @@ remove_from_db()
+ local tmpfile
+ if tmpfile=$(gen_tempfile)
+ then
+- grep -v "^$(quote_bre $patch)\$" $DB > $tmpfile
++ grep -v "^$(quote_grep_re $patch)\$" $DB > $tmpfile
+ cat $tmpfile > $DB
+ rm -f $tmpfile
+ [ -s $DB ] || rm -f $DB
+@@ -520,7 +526,7 @@ find_patch()
+ fi
+
+ local patch=${1#$SUBDIR_DOWN$QUILT_PATCHES/}
+- local bre=$(quote_bre "$patch")
++ local bre=$(quote_sed_re "$patch")
+ set -- $(sed -e "/^$bre\(\|\.patch\|\.diff\?\)\(\|\.gz\|\.bz2\|\.xz\|\.lzma\|\.lz\)\([ "$'\t'"]\|$\)/!d" \
+ -e 's/[ '$'\t''].*//' "$SERIES")
+ if [ $# -eq 1 ]
+@@ -631,7 +637,7 @@ files_in_patch()
+ then
+ find "$path" -type f \
+ -a ! -path "$(quote_glob "$path")/.timestamp" |
+- sed -e "s/$(quote_bre "$path")\///"
++ sed -e "s/$(quote_sed_re "$path")\///"
+ fi
+ }
+
+diff --git a/quilt/upgrade.in b/quilt/upgrade.in
+index dbf7d05..866aa33 100644
+--- a/quilt/upgrade.in
++++ b/quilt/upgrade.in
+@@ -74,7 +74,7 @@ printf $"Converting meta-data to version %s\n" "$DB_VERSION"
+
+ for patch in $(applied_patches)
+ do
+- proper_name="$(grep "^$(quote_bre $patch)"'\(\|\.patch\|\.diff?\)\(\|\.gz\|\.bz2\)\([ \t]\|$\)' $SERIES)"
++ proper_name="$(grep "^$(quote_grep_re $patch)"'\(\|\.patch\|\.diff?\)\(\|\.gz\|\.bz2\)\([ \t]\|$\)' $SERIES)"
+ proper_name=${proper_name#$QUILT_PATCHES/}
+ proper_name=${proper_name%% *}
+ if [ -z "$proper_name" ]
+@@ -84,7 +84,7 @@ do
+ fi
+
+ if [ "$patch" != "$proper_name" -a -d $QUILT_PC/$patch ] \
+- && grep -q "^$(quote_bre $patch)\$" \
++ && grep -q "^$(quote_grep_re $patch)\$" \
+ $QUILT_PC/applied-patches
+ then
+ mv $QUILT_PC/$patch $QUILT_PC/$proper_name \
+--
+cgit v1.1
+
diff --git a/meta/recipes-devtools/quilt/quilt/run-ptest b/meta/recipes-devtools/quilt/quilt/run-ptest
index d2de5c855a..f35a756d6b 100755
--- a/meta/recipes-devtools/quilt/quilt/run-ptest
+++ b/meta/recipes-devtools/quilt/quilt/run-ptest
@@ -1,8 +1,3 @@
#!/bin/sh
-THIS_SH=/bin/sh
-ln -sf /bin/ed /usr/bin/ed
-/usr/sbin/adduser --disabled-password --gecos "" quilttest
-su -c "${THIS_SH} ./test.sh" quilttest
-/usr/sbin/deluser quilttest
-rm -f /usr/bin/ed
+su -c ./test.sh ptest
diff --git a/meta/recipes-devtools/quilt/quilt/test.sh b/meta/recipes-devtools/quilt/quilt/test.sh
index 6563e4a2fb..7dac8f4423 100755
--- a/meta/recipes-devtools/quilt/quilt/test.sh
+++ b/meta/recipes-devtools/quilt/quilt/test.sh
@@ -1 +1,24 @@
-for i in `ls test/*.test |awk -F. '{print $1}' |awk -F/ '{print $2}'`; do make check-$i; if [ $? -eq 0 ]; then echo PASS: $i.test; else echo FAIL: $i.test; fi; done
+#! /bin/sh
+
+set -e -u
+
+export LANG=C
+export LC_ALL=C
+export P=patches/
+export _P=../patches/
+export QUILTRC=$(pwd)/test/test.quiltrc
+export QUILT_PC=.pc
+export QUILT_DIR=/usr/share/quilt/
+
+# Specify on the commandline, else runs all of the tests
+TESTS=${@:-test/*.test}
+
+for FILENAME in $TESTS; do
+ TESTNAME=$(basename $FILENAME .test)
+ ./test/run $FILENAME
+ if [ $? -eq 0 ];
+ then echo PASS: $TESTNAME
+ else
+ echo FAIL: $TESTNAME
+ fi
+done
diff --git a/meta/recipes-devtools/repo/repo/0001-python3-shebang.patch b/meta/recipes-devtools/repo/repo/0001-python3-shebang.patch
deleted file mode 100644
index d3888c8bb2..0000000000
--- a/meta/recipes-devtools/repo/repo/0001-python3-shebang.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From b8e84b202cd302a7c99288d3835dc9c63071f8f2 Mon Sep 17 00:00:00 2001
-From: Jasper Orschulko <Jasper.Orschulko@iris-sensing.com>
-Date: Tue, 14 Sep 2021 16:46:51 +0200
-Subject: [PATCH] python3 shebang
-
-Yocto does not symlink from python to python3, thus change the shebang from
-python to python3.
-
-Upstream-Status: Inappropriate [configuration]
-Signed-off-by: Jasper Orschulko <Jasper.Orschulko@iris-sensing.com>
----
- repo | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/repo b/repo
-index b13e34c..205e0e5 100755
---- a/repo
-+++ b/repo
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding:utf-8 -*-
- #
- # Copyright (C) 2008 The Android Open Source Project
---
-2.33.0
diff --git a/meta/recipes-devtools/repo/repo_2.25.bb b/meta/recipes-devtools/repo/repo_2.25.bb
deleted file mode 100644
index b7332515c8..0000000000
--- a/meta/recipes-devtools/repo/repo_2.25.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-# SPDX-License-Identifier: MIT
-# Copyright (C) 2021 iris-GmbH infrared & intelligent sensors
-
-SUMMARY = "Tool for managing many Git repositories"
-DESCRIPTION = "Repo is a tool built on top of Git. Repo helps manage many Git repositories, does the uploads to revision control systems, and automates parts of the development workflow."
-HOMEPAGE = "https://android.googlesource.com/tools/repo"
-SECTION = "console/utils"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-SRC_URI = "git://gerrit.googlesource.com/git-repo.git;protocol=https;branch=main \
- file://0001-python3-shebang.patch \
- "
-SRCREV = "501733c2abb1180679f25b2f78970d73a2f8d413"
-
-MIRRORS += "git://gerrit.googlesource.com/git-repo.git git://github.com/GerritCodeReview/git-repo.git"
-
-S = "${WORKDIR}/git"
-
-do_configure:prepend() {
- sed -Ei "s/REPO_REV\s*=\s*('|\")stable('|\")/REPO_REV = '${SRCREV}'/g" ${S}/repo
-}
-
-do_install() {
- install -D ${WORKDIR}/git/repo ${D}${bindir}/repo
-}
-
-RDEPENDS:${PN} = "python3 git"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/repo/repo_2.44.bb b/meta/recipes-devtools/repo/repo_2.44.bb
new file mode 100644
index 0000000000..875897851a
--- /dev/null
+++ b/meta/recipes-devtools/repo/repo_2.44.bb
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: MIT
+# Copyright (C) 2021 iris-GmbH infrared & intelligent sensors
+
+SUMMARY = "Tool for managing many Git repositories"
+DESCRIPTION = "Repo is a tool built on top of Git. Repo helps manage many Git repositories, does the uploads to revision control systems, and automates parts of the development workflow."
+HOMEPAGE = "https://android.googlesource.com/tools/repo"
+SECTION = "console/utils"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+SRC_URI = "git://gerrit.googlesource.com/git-repo.git;protocol=https;branch=main \
+ "
+SRCREV = "fff1d2d74c2078b62cc9c2561330e41a842dc197"
+
+MIRRORS += "git://gerrit.googlesource.com/git-repo.git git://github.com/GerritCodeReview/git-repo.git"
+
+S = "${WORKDIR}/git"
+
+do_configure:prepend() {
+ sed -Ei "s/REPO_REV\s*=\s*('|\")stable('|\")/REPO_REV = '${SRCREV}'/g" ${S}/repo
+}
+
+do_install() {
+ install -D ${WORKDIR}/git/repo ${D}${bindir}/repo
+}
+
+RDEPENDS:${PN} = "python3 git"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch b/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch
index 331ea849e6..25aa69d7da 100644
--- a/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch
+++ b/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch
@@ -1,21 +1,20 @@
-From 5492ac3c716020a27a25253bbffe810db43202bf Mon Sep 17 00:00:00 2001
+From f4cf90b5a298d6a3199e8b4c07f520aaf593ce2b Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 9 Mar 2017 18:54:02 +0200
Subject: [PATCH] Add a color setting for mips64_n32 binaries
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
build/rpmfc.c | 4 ++++
rpmrc.in | 2 ++
2 files changed, 6 insertions(+)
diff --git a/build/rpmfc.c b/build/rpmfc.c
-index 10c380ee9..b7655aa93 100644
+index 4b67a9bae..ed7e4e623 100644
--- a/build/rpmfc.c
+++ b/build/rpmfc.c
-@@ -639,6 +639,7 @@ exit:
+@@ -660,6 +660,7 @@ exit:
static const struct rpmfcTokens_s rpmfcTokens[] = {
{ "directory", RPMFC_INCLUDE },
@@ -23,21 +22,21 @@ index 10c380ee9..b7655aa93 100644
{ "ELF 32-bit", RPMFC_ELF32|RPMFC_INCLUDE },
{ "ELF 64-bit", RPMFC_ELF64|RPMFC_INCLUDE },
-@@ -1149,6 +1150,9 @@ static uint32_t getElfColor(const char *fn)
+@@ -1158,6 +1159,9 @@ static uint32_t getElfColor(const char *fn)
color = RPMFC_ELF32;
break;
}
+ if (ehdr.e_machine == EM_MIPS || ehdr.e_machine == EM_MIPS_RS3_LE)
+ if (ehdr.e_flags & EF_MIPS_ABI2)
+ color = RPMFC_ELFMIPSN32;
- elf_end(elf);
}
- close(fd);
+ if (elf)
+ elf_end(elf);
diff --git a/rpmrc.in b/rpmrc.in
-index 5bd9ba3e5..f15bb8dad 100644
+index 8646a966b..7349fdfd3 100644
--- a/rpmrc.in
+++ b/rpmrc.in
-@@ -137,6 +137,8 @@ archcolor: mipsr6el 1
+@@ -142,6 +142,8 @@ archcolor: mipsr6el 1
archcolor: mips64r6 2
archcolor: mips64r6el 2
diff --git a/meta/recipes-devtools/rpm/files/0001-CMakeLists.txt-look-for-lua-with-pkg-config-rather-t.patch b/meta/recipes-devtools/rpm/files/0001-CMakeLists.txt-look-for-lua-with-pkg-config-rather-t.patch
new file mode 100644
index 0000000000..e4edc884b1
--- /dev/null
+++ b/meta/recipes-devtools/rpm/files/0001-CMakeLists.txt-look-for-lua-with-pkg-config-rather-t.patch
@@ -0,0 +1,27 @@
+From c39a074ff3c4d21c100d387661c7d725b5eae7b0 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Wed, 29 Nov 2023 14:06:15 +0100
+Subject: [PATCH] CMakeLists.txt: look for lua with pkg-config rather than
+ cmake modules
+
+Otherwise cmake will try to find libm, badly, and fail.
+
+Upstream-Status: Inappropriate [oe-core specific]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ CMakeLists.txt | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 4a383ceba..ed847c09a 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -190,7 +190,7 @@ set(REQFUNCS
+ )
+
+ find_package(PkgConfig REQUIRED)
+-find_package(Lua 5.2 REQUIRED)
++pkg_check_modules(LUA REQUIRED IMPORTED_TARGET lua>=5.2)
+ find_package(ZLIB REQUIRED)
+ find_package(BZip2)
+ find_package(Iconv)
diff --git a/meta/recipes-devtools/rpm/files/0001-CVE-2021-3521.patch b/meta/recipes-devtools/rpm/files/0001-CVE-2021-3521.patch
deleted file mode 100644
index 044b4dd2a0..0000000000
--- a/meta/recipes-devtools/rpm/files/0001-CVE-2021-3521.patch
+++ /dev/null
@@ -1,57 +0,0 @@
-From 9a6871126f472feea057d5f803505ec8cc78f083 Mon Sep 17 00:00:00 2001
-From: Panu Matilainen <pmatilai@redhat.com>
-Date: Thu, 30 Sep 2021 09:56:20 +0300
-Subject: [PATCH 1/3] Refactor pgpDigParams construction to helper function
-
-No functional changes, just to reduce code duplication and needed by
-the following commits.
-
-CVE: CVE-2021-3521
-Upstream-Status: Backport [https://github.com/rpm-software-management/rpm/commit/9f03f42e2]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- rpmio/rpmpgp.c | 13 +++++++++----
- 1 file changed, 9 insertions(+), 4 deletions(-)
-
-diff --git a/rpmio/rpmpgp.c b/rpmio/rpmpgp.c
-index d0688ebe9a..e472b5320f 100644
---- a/rpmio/rpmpgp.c
-+++ b/rpmio/rpmpgp.c
-@@ -1041,6 +1041,13 @@ unsigned int pgpDigParamsAlgo(pgpDigParams digp, unsigned int algotype)
- return algo;
- }
-
-+static pgpDigParams pgpDigParamsNew(uint8_t tag)
-+{
-+ pgpDigParams digp = xcalloc(1, sizeof(*digp));
-+ digp->tag = tag;
-+ return digp;
-+}
-+
- int pgpPrtParams(const uint8_t * pkts, size_t pktlen, unsigned int pkttype,
- pgpDigParams * ret)
- {
-@@ -1058,8 +1065,7 @@ int pgpPrtParams(const uint8_t * pkts, size_t pktlen, unsigned int pkttype,
- if (pkttype && pkt.tag != pkttype) {
- break;
- } else {
-- digp = xcalloc(1, sizeof(*digp));
-- digp->tag = pkt.tag;
-+ digp = pgpDigParamsNew(pkt.tag);
- }
- }
-
-@@ -1105,8 +1111,7 @@ int pgpPrtParamsSubkeys(const uint8_t *pkts, size_t pktlen,
- digps = xrealloc(digps, alloced * sizeof(*digps));
- }
-
-- digps[count] = xcalloc(1, sizeof(**digps));
-- digps[count]->tag = PGPTAG_PUBLIC_SUBKEY;
-+ digps[count] = pgpDigParamsNew(PGPTAG_PUBLIC_SUBKEY);
- /* Copy UID from main key to subkey */
- digps[count]->userid = xstrdup(mainkey->userid);
-
---
-2.17.1
-
diff --git a/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch b/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch
index 4029233fb7..d0ed711086 100644
--- a/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch
+++ b/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch
@@ -1,4 +1,4 @@
-From f39c28eb52f12ae6e82db360ffd5a903ac8faca5 Mon Sep 17 00:00:00 2001
+From 86e585cc0dd06dfa20f584af8b59d52a59accb45 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 9 Jan 2017 18:52:11 +0200
Subject: [PATCH] Do not add an unsatisfiable dependency when building rpms in
@@ -9,16 +9,15 @@ hand produces rpms that way by design.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
build/pack.c | 4 ----
1 file changed, 4 deletions(-)
diff --git a/build/pack.c b/build/pack.c
-index e6cec1816..810cd7351 100644
+index f7dac6d9a..f382c7da0 100644
--- a/build/pack.c
+++ b/build/pack.c
-@@ -724,10 +724,6 @@ static rpmRC packageBinary(rpmSpec spec, Package pkg, const char *cookie, int ch
+@@ -711,10 +711,6 @@ static rpmRC packageBinary(rpmSpec spec, Package pkg, const char *cookie, int ch
headerPutBin(pkg->header, RPMTAG_SOURCEPKGID, spec->sourcePkgId,16);
}
diff --git a/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch b/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch
index 6d236ac400..b571a0ae8c 100644
--- a/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch
+++ b/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch
@@ -1,4 +1,4 @@
-From 8d013fe154a162305f76141151baf767dd04b598 Mon Sep 17 00:00:00 2001
+From 73d6841d9ef2a8ac7bd63f9645a3efe8038dfdd4 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 27 Feb 2017 09:43:30 +0200
Subject: [PATCH] Do not hardcode "lib/rpm" as the installation path for
@@ -6,31 +6,29 @@ Subject: [PATCH] Do not hardcode "lib/rpm" as the installation path for
Upstream-Status: Denied [https://github.com/rpm-software-management/rpm/pull/263]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
- configure.ac | 2 +-
- macros.in | 2 +-
- rpm.am | 4 ++--
- 3 files changed, 4 insertions(+), 4 deletions(-)
+ CMakeLists.txt | 2 +-
+ macros.in | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
-diff --git a/configure.ac b/configure.ac
-index eb7d6941b..10a889b5d 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -871,7 +871,7 @@ else
- usrprefix=$prefix
- fi
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 7808115c1..4a383ceba 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -33,7 +33,7 @@ option(WITH_IMAEVM "Build with IMA support" OFF)
+ option(WITH_FAPOLICYD "Build with fapolicyd support" ON)
+ option(WITH_READLINE "Build with readline support" ON)
--RPMCONFIGDIR="`echo ${usrprefix}/lib/rpm`"
-+RPMCONFIGDIR="`echo ${libdir}/rpm`"
- AC_SUBST(RPMCONFIGDIR)
+-set(RPM_CONFIGDIR "${CMAKE_INSTALL_PREFIX}/lib/rpm" CACHE PATH "rpm home")
++set(RPM_CONFIGDIR "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}/rpm" CACHE PATH "rpm home")
+ set(RPM_VENDOR "vendor" CACHE STRING "rpm vendor string")
- AC_SUBST(OBJDUMP)
+ # Emulate libtool versioning. Before a public release:
diff --git a/macros.in b/macros.in
-index a1f795e5f..689e784ef 100644
+index b49ffaad4..3acbe78f6 100644
--- a/macros.in
+++ b/macros.in
-@@ -933,7 +933,7 @@ package or when debugging this package.\
+@@ -969,7 +969,7 @@ Supplements: (%{name} = %{version}-%{release} and langpacks-%{1})\
%_sharedstatedir %{_prefix}/com
%_localstatedir %{_prefix}/var
%_lib lib
@@ -39,20 +37,3 @@ index a1f795e5f..689e784ef 100644
%_includedir %{_prefix}/include
%_infodir %{_datadir}/info
%_mandir %{_datadir}/man
-diff --git a/rpm.am b/rpm.am
-index 7b57f433b..9bbb9ee96 100644
---- a/rpm.am
-+++ b/rpm.am
-@@ -1,10 +1,10 @@
- # Internal binaries
- ## HACK: It probably should be $(libexecdir)/rpm or $(libdir)/rpm
--rpmlibexecdir = $(prefix)/lib/rpm
-+rpmlibexecdir = $(libdir)/rpm
-
- # Host independent config files
- ## HACK: it probably should be $(datadir)/rpm
--rpmconfigdir = $(prefix)/lib/rpm
-+rpmconfigdir = $(libdir)/rpm
-
- # Libtool version (current-revision-age) for all our libraries
- rpm_version_info = 11:0:2
diff --git a/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch b/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch
index 96eb418952..796088df53 100644
--- a/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch
+++ b/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch
@@ -1,4 +1,4 @@
-From 35381b6cd6c1b571bf7e6b0640de0f54dbf94386 Mon Sep 17 00:00:00 2001
+From e210458d125793915abce30420d866a30305c37a Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 10 Jan 2017 14:11:30 +0200
Subject: [PATCH] Do not read config files from $HOME
@@ -10,29 +10,26 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/lib/rpmrc.c b/lib/rpmrc.c
-index 4ed991321..19fe80f98 100644
+index 9437a0ff1..483585ae4 100644
--- a/lib/rpmrc.c
+++ b/lib/rpmrc.c
-@@ -458,8 +458,7 @@ static void setDefaults(void)
+@@ -459,8 +459,7 @@ static void setDefaults(void)
if (!defrcfiles) {
defrcfiles = rstrscat(NULL, confdir, "/rpmrc", ":",
- confdir, "/" RPMCANONVENDOR "/rpmrc", ":",
+ confdir, "/" RPM_VENDOR "/rpmrc", ":",
- SYSCONFDIR "/rpmrc", ":",
- "~/.rpmrc", NULL);
-+ SYSCONFDIR "/rpmrc", ":");
++ SYSCONFDIR "/rpmrc", NULL);
}
#ifndef MACROFILES
-@@ -471,8 +470,7 @@ static void setDefaults(void)
- confdir, "/" RPMCANONVENDOR "/macros", ":",
+@@ -472,8 +471,7 @@ static void setDefaults(void)
+ confdir, "/" RPM_VENDOR "/macros", ":",
SYSCONFDIR "/rpm/macros.*", ":",
SYSCONFDIR "/rpm/macros", ":",
- SYSCONFDIR "/rpm/%{_target}/macros", ":",
- "~/.rpmmacros", NULL);
-+ SYSCONFDIR "/rpm/%{_target}/macros", ":");
++ SYSCONFDIR "/rpm/%{_target}/macros", NULL);
}
#else
macrofiles = MACROFILES;
---
-2.11.0
-
diff --git a/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch b/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch
index 41cdf6ed77..328fbf86ac 100644
--- a/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch
+++ b/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch
@@ -1,4 +1,4 @@
-From a674b9cc7af448d7c6748bc163bf37dc14a57f09 Mon Sep 17 00:00:00 2001
+From a8fe7a7a2e41c9f127ed26407d57076babcb89e8 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 20 Jan 2017 13:32:06 +0200
Subject: [PATCH] Do not reset the PATH environment variable before running
@@ -8,16 +8,15 @@ We add lots of native stuff into it and scriptlets rely on that.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
lib/rpmscript.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/rpmscript.c b/lib/rpmscript.c
-index 6a31e0d..2b0e438 100644
+index 57689bb68..7b7e26606 100644
--- a/lib/rpmscript.c
+++ b/lib/rpmscript.c
-@@ -184,7 +184,7 @@ static void doScriptExec(ARGV_const_t argv, ARGV_const_t prefixes,
+@@ -252,7 +252,7 @@ static void doScriptExec(ARGV_const_t argv, ARGV_const_t prefixes,
if (ipath && ipath[5] != '%')
path = ipath;
diff --git a/meta/recipes-devtools/rpm/files/0001-Rip-out-partial-support-for-unused-MD2-and-RIPEMD160.patch b/meta/recipes-devtools/rpm/files/0001-Rip-out-partial-support-for-unused-MD2-and-RIPEMD160.patch
deleted file mode 100644
index 734e38bb39..0000000000
--- a/meta/recipes-devtools/rpm/files/0001-Rip-out-partial-support-for-unused-MD2-and-RIPEMD160.patch
+++ /dev/null
@@ -1,81 +0,0 @@
-From 2d53d1e308a5bd15a16cc289fa7e1f264ea706be Mon Sep 17 00:00:00 2001
-From: Panu Matilainen <pmatilai@redhat.com>
-Date: Tue, 26 Jun 2018 10:46:14 +0300
-Subject: [PATCH] Rip out partial support for unused MD2 and RIPEMD160 digests
-
-Inspired by #453, adding configure-checks for unused digests algorithms
-seems nonsensical, at no point in rpm history have these algorithms been
-used for anything in rpm so there's not even backward compatibility to
-care about. So the question becomes why do we appear to have (some)
-support for those unused algorithms? So lets don't, problem solved...
-
-Upstream-Status: Backport [https://github.com/rpm-software-management/rpm/commit/ff4b9111aeba01dd025dd133ce617fb80f7398a0]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- rpmio/digest_beecrypt.c | 7 -------
- rpmio/digest_nss.c | 2 --
- rpmio/digest_openssl.c | 6 ------
- 3 files changed, 15 deletions(-)
-
-diff --git a/rpmio/digest_beecrypt.c b/rpmio/digest_beecrypt.c
-index 597027e25..653a39491 100644
---- a/rpmio/digest_beecrypt.c
-+++ b/rpmio/digest_beecrypt.c
-@@ -132,10 +132,6 @@ DIGEST_CTX rpmDigestInit(int hashalgo, rpmDigestFlags flags)
- ctx->Digest = (void *) sha512Digest;
- break;
- #endif
-- case PGPHASHALGO_RIPEMD160:
-- case PGPHASHALGO_MD2:
-- case PGPHASHALGO_TIGER192:
-- case PGPHASHALGO_HAVAL_5_160:
- default:
- free(ctx);
- return NULL;
-@@ -292,9 +288,6 @@ static int pgpVerifySigRSA(pgpDigAlg pgpkey, pgpDigAlg pgpsig, uint8_t *hash, si
- case PGPHASHALGO_SHA1:
- prefix = "3021300906052b0e03021a05000414";
- break;
-- case PGPHASHALGO_MD2:
-- prefix = "3020300c06082a864886f70d020205000410";
-- break;
- case PGPHASHALGO_SHA256:
- prefix = "3031300d060960864801650304020105000420";
- break;
-diff --git a/rpmio/digest_nss.c b/rpmio/digest_nss.c
-index e11920e3e..b3d2b5595 100644
---- a/rpmio/digest_nss.c
-+++ b/rpmio/digest_nss.c
-@@ -117,7 +117,6 @@ static HASH_HashType getHashType(int hashalgo)
- {
- switch (hashalgo) {
- case PGPHASHALGO_MD5: return HASH_AlgMD5;
-- case PGPHASHALGO_MD2: return HASH_AlgMD2;
- case PGPHASHALGO_SHA1: return HASH_AlgSHA1;
- #ifdef SHA224_LENGTH
- case PGPHASHALGO_SHA224: return HASH_AlgSHA224;
-@@ -217,7 +216,6 @@ static SECOidTag getHashAlg(unsigned int hashalgo)
- {
- switch (hashalgo) {
- case PGPHASHALGO_MD5: return SEC_OID_MD5;
-- case PGPHASHALGO_MD2: return SEC_OID_MD2;
- case PGPHASHALGO_SHA1: return SEC_OID_SHA1;
- #ifdef SHA224_LENGTH
- case PGPHASHALGO_SHA224: return SEC_OID_SHA224;
-diff --git a/rpmio/digest_openssl.c b/rpmio/digest_openssl.c
-index 18e52a724..0ae48dd1d 100644
---- a/rpmio/digest_openssl.c
-+++ b/rpmio/digest_openssl.c
-@@ -172,12 +172,6 @@ static const EVP_MD *getEVPMD(int hashalgo)
- case PGPHASHALGO_SHA1:
- return EVP_sha1();
-
-- case PGPHASHALGO_RIPEMD160:
-- return EVP_ripemd160();
--
-- case PGPHASHALGO_MD2:
-- return EVP_md2();
--
- case PGPHASHALGO_SHA256:
- return EVP_sha256();
-
diff --git a/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch b/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch
index 4020a31092..e4251a1a73 100644
--- a/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch
+++ b/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch
@@ -1,4 +1,4 @@
-From a89daa75ac970d8e247edc762d1181e9a5b0c5d0 Mon Sep 17 00:00:00 2001
+From 34c0d3263f3e0b366a2320e0823f46673f7ba928 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 17 Jan 2017 14:07:17 +0200
Subject: [PATCH] When cross-installing, execute package scriptlets without
@@ -29,10 +29,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 8 insertions(+), 3 deletions(-)
diff --git a/lib/rpmscript.c b/lib/rpmscript.c
-index cc98c4885..f8bd3df04 100644
+index 3f6313278..57689bb68 100644
--- a/lib/rpmscript.c
+++ b/lib/rpmscript.c
-@@ -394,8 +394,7 @@ exit:
+@@ -448,8 +448,7 @@ exit:
Fclose(out); /* XXX dup'd STDOUT_FILENO */
if (fn) {
@@ -42,21 +42,18 @@ index cc98c4885..f8bd3df04 100644
free(fn);
}
free(mline);
-@@ -428,7 +427,13 @@ rpmRC rpmScriptRun(rpmScript script, int arg1, int arg2, FD_t scriptFd,
+@@ -483,7 +482,13 @@ rpmRC rpmScriptRun(rpmScript script, int arg1, int arg2, FD_t scriptFd,
if (rc != RPMRC_FAIL) {
if (script_type & RPMSCRIPTLET_EXEC) {
-- rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc);
+- rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, script->nextFileFunc);
+ if (getenv("RPM_NO_CHROOT_FOR_SCRIPTS") != NULL) {
+ rpmChrootOut();
-+ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc);
++ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, script->nextFileFunc);
+ rpmChrootIn();
+ } else {
-+ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc);
++ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, script->nextFileFunc);
+ }
} else {
- rc = runLuaScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc);
+ rc = runLuaScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, script->nextFileFunc);
}
---
-2.11.0
-
diff --git a/meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch b/meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch
index 79b168257e..2f6397aa8a 100644
--- a/meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch
+++ b/meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch
@@ -1,4 +1,4 @@
-From 2d351c666f09cc1b9e368422653fb42ac8b86249 Mon Sep 17 00:00:00 2001
+From ae4fdd8e8d052835973e6ff4b7550f93bde30a98 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Tue, 31 Aug 2021 10:37:05 +0200
Subject: [PATCH] build/pack.c: do not insert payloadflags into .rpm metadata
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/build/pack.c b/build/pack.c
-index 932cb213e..b45d0726f 100644
+index f382c7da0..0889dd993 100644
--- a/build/pack.c
+++ b/build/pack.c
-@@ -328,7 +328,7 @@ static char *getIOFlags(Package pkg)
+@@ -330,7 +330,7 @@ static char *getIOFlags(Package pkg)
headerPutString(pkg->header, RPMTAG_PAYLOADCOMPRESSOR, compr);
buf = xstrdup(rpmio_flags);
buf[s - rpmio_flags] = '\0';
diff --git a/meta/recipes-devtools/rpm/files/0001-docs-do-not-build-manpages-requires-pandoc.patch b/meta/recipes-devtools/rpm/files/0001-docs-do-not-build-manpages-requires-pandoc.patch
deleted file mode 100644
index ced52d1007..0000000000
--- a/meta/recipes-devtools/rpm/files/0001-docs-do-not-build-manpages-requires-pandoc.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 9bf1693092385eba9841614613313010221ca01f Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 29 Jun 2021 20:11:26 +0200
-Subject: [PATCH] docs: do not build manpages (requires pandoc)
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- docs/Makefile.am | 2 --
- 1 file changed, 2 deletions(-)
-
-diff --git a/docs/Makefile.am b/docs/Makefile.am
-index 5a6bd203a..6257767fd 100644
---- a/docs/Makefile.am
-+++ b/docs/Makefile.am
-@@ -1,7 +1,5 @@
- ## Process this file with automake to produce Makefile.in
-
--SUBDIRS = man
--
- EXTRA_DIST =
-
- EXTRA_DIST += \
---
-2.32.0
-
diff --git a/meta/recipes-devtools/rpm/files/0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch b/meta/recipes-devtools/rpm/files/0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch
index 6678c105cd..98e52da3a8 100644
--- a/meta/recipes-devtools/rpm/files/0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch
+++ b/meta/recipes-devtools/rpm/files/0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch
@@ -1,4 +1,4 @@
-From 1ed066fc6fa7d7afffe3545c4e3ea937529e6c49 Mon Sep 17 00:00:00 2001
+From b04ecb793a3c859985eead5e261785b27a4c4a20 Mon Sep 17 00:00:00 2001
From: Changqing Li <changqing.li@windriver.com>
Date: Thu, 7 May 2020 17:40:58 +0800
Subject: [PATCH] lib/transaction.c: fix file conflicts for MIPS64 N32
@@ -32,10 +32,10 @@ Signed-off-by: Changqing Li <changqing.li@windriver.com>
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/lib/transaction.c b/lib/transaction.c
-index 67b9db5..82386b8 100644
+index 70d2587ac..b89b30060 100644
--- a/lib/transaction.c
+++ b/lib/transaction.c
-@@ -391,7 +391,18 @@ static int handleColorConflict(rpmts ts,
+@@ -400,7 +400,18 @@ static int handleColorConflict(rpmts ts,
rpmfsSetAction(ofs, ofx, FA_CREATE);
rpmfsSetAction(fs, fx, FA_SKIPCOLOR);
rConflicts = 0;
@@ -55,6 +55,3 @@ index 67b9db5..82386b8 100644
}
}
---
-2.7.4
-
diff --git a/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch b/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch
index a6c58699d3..55108e7f1c 100644
--- a/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch
+++ b/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch
@@ -1,4 +1,7 @@
-perl: disable auto requires
+From 58bf006646a063837c46b695f7e7ebb69bee7238 Mon Sep 17 00:00:00 2001
+From: Mark Hatle <mark.hatle@windriver.com>
+Date: Tue, 15 Aug 2017 16:41:57 -0500
+Subject: [PATCH] perl: disable auto requires
When generating automatic requirements, it's possible for perl scripts to
declare 'optional' dependencies. These seem to often be incorrect and will
@@ -9,20 +12,24 @@ generation. This matches the behavior from the previous RPM5 implementation.
Upstream-Status: Inappropriate [OE specific configuration]
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
+---
+ fileattrs/perl.attr | 2 +-
+ fileattrs/perllib.attr | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
-Index: git/fileattrs/perl.attr
-===================================================================
---- git.orig/fileattrs/perl.attr
-+++ git/fileattrs/perl.attr
+diff --git a/fileattrs/perl.attr b/fileattrs/perl.attr
+index 0daef58d5..81ddf5305 100644
+--- a/fileattrs/perl.attr
++++ b/fileattrs/perl.attr
@@ -1,3 +1,3 @@
-%__perl_requires %{_rpmconfigdir}/perl.req
+#__perl_requires %{_rpmconfigdir}/perl.req
%__perl_magic ^.*[Pp]erl .*$
%__perl_flags exeonly
-Index: git/fileattrs/perllib.attr
-===================================================================
---- git.orig/fileattrs/perllib.attr
-+++ git/fileattrs/perllib.attr
+diff --git a/fileattrs/perllib.attr b/fileattrs/perllib.attr
+index fcad48099..495a28927 100644
+--- a/fileattrs/perllib.attr
++++ b/fileattrs/perllib.attr
@@ -1,5 +1,5 @@
%__perllib_provides %{_rpmconfigdir}/perl.prov
-%__perllib_requires %{_rpmconfigdir}/perl.req
diff --git a/meta/recipes-devtools/rpm/files/0001-tools-Add-error.h-for-non-glibc-case.patch b/meta/recipes-devtools/rpm/files/0001-tools-Add-error.h-for-non-glibc-case.patch
deleted file mode 100644
index 9783396639..0000000000
--- a/meta/recipes-devtools/rpm/files/0001-tools-Add-error.h-for-non-glibc-case.patch
+++ /dev/null
@@ -1,71 +0,0 @@
-From 9b9d717f484ec913cdd3804e43489b3dc18bd77c Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 31 Oct 2020 22:14:05 -0700
-Subject: [PATCH] tools: Add error.h for non-glibc case
-
-error is glibc specific API, so this patch will mostly not accepted
-upstream given that elfutils has been closely tied to glibc
-
-Upstream-Status: Inappropriate [workaround for musl]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- tools/elfdeps.c | 6 +++++-
- tools/error.h | 27 +++++++++++++++++++++++++++
- 2 files changed, 32 insertions(+), 1 deletion(-)
- create mode 100644 tools/error.h
-
-diff --git a/tools/elfdeps.c b/tools/elfdeps.c
-index d205935bb..3a8945b33 100644
---- a/tools/elfdeps.c
-+++ b/tools/elfdeps.c
-@@ -5,10 +5,14 @@
- #include <unistd.h>
- #include <stdlib.h>
- #include <fcntl.h>
--#include <error.h>
- #include <errno.h>
- #include <popt.h>
- #include <gelf.h>
-+#ifdef __GLIBC__
-+#include <error.h>
-+#else
-+#include "error.h"
-+#endif
-
- #include <rpm/rpmstring.h>
- #include <rpm/argv.h>
-diff --git a/tools/error.h b/tools/error.h
-new file mode 100644
-index 000000000..ef06827a0
---- /dev/null
-+++ b/tools/error.h
-@@ -0,0 +1,27 @@
-+#ifndef _ERROR_H_
-+#define _ERROR_H_
-+
-+#include <stdarg.h>
-+#include <stdio.h>
-+#include <stdlib.h>
-+#include <string.h>
-+#include <errno.h>
-+
-+static unsigned int error_message_count = 0;
-+
-+static inline void error(int status, int errnum, const char* format, ...)
-+{
-+ va_list ap;
-+ fprintf(stderr, "%s: ", program_invocation_name);
-+ va_start(ap, format);
-+ vfprintf(stderr, format, ap);
-+ va_end(ap);
-+ if (errnum)
-+ fprintf(stderr, ": %s", strerror(errnum));
-+ fprintf(stderr, "\n");
-+ error_message_count++;
-+ if (status)
-+ exit(status);
-+}
-+
-+#endif /* _ERROR_H_ */
diff --git a/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch b/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch
index b3dbc319b6..c5caa7dc5e 100644
--- a/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch
+++ b/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch
@@ -1,7 +1,7 @@
-From 383c0b097b7eba16801a9e3c4b8e36a4b6de74ab Mon Sep 17 00:00:00 2001
+From d7143dc4e75c8bcc5cc4c852a4b972942b7e4d07 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 20 Jan 2017 13:33:05 +0200
-Subject: [PATCH 2/2] Add support for prefixing /etc from RPM_ETCCONFIGDIR
+Subject: [PATCH] Add support for prefixing /etc from RPM_ETCCONFIGDIR
environment variable
This is needed so that rpm can pick up target-specific configuration
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 14 insertions(+), 5 deletions(-)
diff --git a/lib/rpmrc.c b/lib/rpmrc.c
-index 19fe80f98..6b27b3941 100644
+index 483585ae4..ea858c290 100644
--- a/lib/rpmrc.c
+++ b/lib/rpmrc.c
-@@ -455,10 +455,14 @@ const char * lookupInDefaultTable(const char * name,
+@@ -456,10 +456,14 @@ const char * lookupInDefaultTable(const char * name,
static void setDefaults(void)
{
const char *confdir = rpmConfigDir();
@@ -27,26 +27,26 @@ index 19fe80f98..6b27b3941 100644
+
if (!defrcfiles) {
defrcfiles = rstrscat(NULL, confdir, "/rpmrc", ":",
- confdir, "/" RPMCANONVENDOR "/rpmrc", ":",
-- SYSCONFDIR "/rpmrc", ":");
-+ etcconfdir, SYSCONFDIR "/rpmrc", ":", NULL);
+ confdir, "/" RPM_VENDOR "/rpmrc", ":",
+- SYSCONFDIR "/rpmrc", NULL);
++ etcconfdir, SYSCONFDIR "/rpmrc", NULL);
}
#ifndef MACROFILES
-@@ -468,9 +472,9 @@ static void setDefaults(void)
+@@ -469,9 +473,9 @@ static void setDefaults(void)
confdir, "/platform/%{_target}/macros", ":",
confdir, "/fileattrs/*.attr", ":",
- confdir, "/" RPMCANONVENDOR "/macros", ":",
+ confdir, "/" RPM_VENDOR "/macros", ":",
- SYSCONFDIR "/rpm/macros.*", ":",
- SYSCONFDIR "/rpm/macros", ":",
-- SYSCONFDIR "/rpm/%{_target}/macros", ":");
+- SYSCONFDIR "/rpm/%{_target}/macros", NULL);
+ etcconfdir, SYSCONFDIR "/rpm/macros.*", ":",
+ etcconfdir, SYSCONFDIR "/rpm/macros", ":",
-+ etcconfdir, SYSCONFDIR "/rpm/%{_target}/macros", ":", NULL);
++ etcconfdir, SYSCONFDIR "/rpm/%{_target}/macros", NULL);
}
#else
macrofiles = MACROFILES;
-@@ -989,7 +993,11 @@ static void read_auxv(void)
+@@ -1115,7 +1119,11 @@ static void read_auxv(void)
*/
static void defaultMachine(rpmrcCtx ctx, const char ** arch, const char ** os)
{
@@ -59,7 +59,7 @@ index 19fe80f98..6b27b3941 100644
static struct utsname un;
char * chptr;
canonEntry canon;
-@@ -1286,6 +1294,7 @@ static void defaultMachine(rpmrcCtx ctx, const char ** arch, const char ** os)
+@@ -1435,6 +1443,7 @@ static void defaultMachine(rpmrcCtx ctx, const char ** arch, const char ** os)
if (arch) *arch = un.machine;
if (os) *os = un.sysname;
@@ -67,6 +67,3 @@ index 19fe80f98..6b27b3941 100644
}
static
---
-2.11.0
-
diff --git a/meta/recipes-devtools/rpm/files/0002-CVE-2021-3521.patch b/meta/recipes-devtools/rpm/files/0002-CVE-2021-3521.patch
deleted file mode 100644
index 683b57d455..0000000000
--- a/meta/recipes-devtools/rpm/files/0002-CVE-2021-3521.patch
+++ /dev/null
@@ -1,64 +0,0 @@
-From c4b1bee51bbdd732b94b431a951481af99117703 Mon Sep 17 00:00:00 2001
-From: Panu Matilainen <pmatilai@redhat.com>
-Date: Thu, 30 Sep 2021 09:51:10 +0300
-Subject: [PATCH 2/3] Process MPI's from all kinds of signatures
-
-No immediate effect but needed by the following commits.
-
-CVE: CVE-2021-3521
-Upstream-Status: Backport [https://github.com/rpm-software-management/rpm/commit/b5e8bc74b]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
----
- rpmio/rpmpgp.c | 13 +++++--------
- 1 file changed, 5 insertions(+), 8 deletions(-)
-
-diff --git a/rpmio/rpmpgp.c b/rpmio/rpmpgp.c
-index 25f67048fd..509e777e6d 100644
---- a/rpmio/rpmpgp.c
-+++ b/rpmio/rpmpgp.c
-@@ -543,7 +543,7 @@ pgpDigAlg pgpDigAlgFree(pgpDigAlg alg)
- return NULL;
- }
-
--static int pgpPrtSigParams(pgpTag tag, uint8_t pubkey_algo, uint8_t sigtype,
-+static int pgpPrtSigParams(pgpTag tag, uint8_t pubkey_algo,
- const uint8_t *p, const uint8_t *h, size_t hlen,
- pgpDigParams sigp)
- {
-@@ -556,10 +556,8 @@ static int pgpPrtSigParams(pgpTag tag, uint8_t pubkey_algo, uint8_t sigtype,
- int mpil = pgpMpiLen(p);
- if (pend - p < mpil)
- break;
-- if (sigtype == PGPSIGTYPE_BINARY || sigtype == PGPSIGTYPE_TEXT) {
-- if (sigalg->setmpi(sigalg, i, p))
-- break;
-- }
-+ if (sigalg->setmpi(sigalg, i, p))
-+ break;
- p += mpil;
- }
-
-@@ -619,7 +617,7 @@ static int pgpPrtSig(pgpTag tag, const uint8_t *h, size_t hlen,
- }
-
- p = ((uint8_t *)v) + sizeof(*v);
-- rc = pgpPrtSigParams(tag, v->pubkey_algo, v->sigtype, p, h, hlen, _digp);
-+ rc = pgpPrtSigParams(tag, v->pubkey_algo, p, h, hlen, _digp);
- } break;
- case 4:
- { pgpPktSigV4 v = (pgpPktSigV4)h;
-@@ -677,8 +675,7 @@ static int pgpPrtSig(pgpTag tag, const uint8_t *h, size_t hlen,
- p += 2;
- if (p > hend)
- return 1;
--
-- rc = pgpPrtSigParams(tag, v->pubkey_algo, v->sigtype, p, h, hlen, _digp);
-+ rc = pgpPrtSigParams(tag, v->pubkey_algo, p, h, hlen, _digp);
- } break;
- default:
- rpmlog(RPMLOG_WARNING, _("Unsupported version of signature: V%d\n"), version);
---
-2.17.1
-
diff --git a/meta/recipes-devtools/rpm/files/0002-rpmio-rpmglob.c-avoid-using-GLOB_BRACE-if-undefined-.patch b/meta/recipes-devtools/rpm/files/0002-rpmio-rpmglob.c-avoid-using-GLOB_BRACE-if-undefined-.patch
new file mode 100644
index 0000000000..23dce30086
--- /dev/null
+++ b/meta/recipes-devtools/rpm/files/0002-rpmio-rpmglob.c-avoid-using-GLOB_BRACE-if-undefined-.patch
@@ -0,0 +1,34 @@
+From 29c2a0c18b0c773128bf62c611b4c53fe4471105 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Tue, 16 Jan 2024 09:59:26 +0100
+Subject: [PATCH] rpmio/rpmglob.c: avoid using GLOB_BRACE if undefined by C
+ library
+
+This addresses musl failures; if there is code out there relying on
+those braces, it needs to be fixed when used on musl.
+
+This is unlikely to be trivially fixable upstream.
+
+Upstream-Status: Inappropriate [reported at https://github.com/rpm-software-management/rpm/issues/2844]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ rpmio/rpmglob.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
+
+diff --git a/rpmio/rpmglob.c b/rpmio/rpmglob.c
+index 243568766..43c27074a 100644
+--- a/rpmio/rpmglob.c
++++ b/rpmio/rpmglob.c
+@@ -33,6 +33,12 @@
+
+ #include "debug.h"
+
++/* Don't fail if the standard C library
+++ * doesn't provide brace expansion */
++#ifndef GLOB_BRACE
++#define GLOB_BRACE 0
++#endif
++
+ /* Return 1 if pattern contains a magic char, see glob(7) for a list */
+ static int ismagic(const char *pattern)
+ {
diff --git a/meta/recipes-devtools/rpm/files/0003-CVE-2021-3521.patch b/meta/recipes-devtools/rpm/files/0003-CVE-2021-3521.patch
deleted file mode 100644
index a5ec802501..0000000000
--- a/meta/recipes-devtools/rpm/files/0003-CVE-2021-3521.patch
+++ /dev/null
@@ -1,329 +0,0 @@
-From 07676ca03ad8afcf1ca95a2353c83fbb1d970b9b Mon Sep 17 00:00:00 2001
-From: Panu Matilainen <pmatilai@redhat.com>
-Date: Thu, 30 Sep 2021 09:59:30 +0300
-Subject: [PATCH 3/3] Validate and require subkey binding signatures on PGP
- public keys
-
-All subkeys must be followed by a binding signature by the primary key
-as per the OpenPGP RFC, enforce the presence and validity in the parser.
-
-The implementation is as kludgey as they come to work around our
-simple-minded parser structure without touching API, to maximise
-backportability. Store all the raw packets internally as we decode them
-to be able to access previous elements at will, needed to validate ordering
-and access the actual data. Add testcases for manipulated keys whose
-import previously would succeed.
-
-Depends on the two previous commits:
-7b399fcb8f52566e6f3b4327197a85facd08db91 and
-236b802a4aa48711823a191d1b7f753c82a89ec5
-
-Fixes CVE-2021-3521.
-
-Upstream-Status: Backport [https://github.com/rpm-software-management/rpm/commit/bd36c5dc9]
-CVE:CVE-2021-3521
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
----
- rpmio/rpmpgp.c | 99 +++++++++++++++++--
- tests/Makefile.am | 3 +
- tests/data/keys/CVE-2021-3521-badbind.asc | 25 +++++
- .../data/keys/CVE-2021-3521-nosubsig-last.asc | 25 +++++
- tests/data/keys/CVE-2021-3521-nosubsig.asc | 37 +++++++
- tests/rpmsigdig.at | 28 ++++++
- 6 files changed, 209 insertions(+), 8 deletions(-)
- create mode 100644 tests/data/keys/CVE-2021-3521-badbind.asc
- create mode 100644 tests/data/keys/CVE-2021-3521-nosubsig-last.asc
- create mode 100644 tests/data/keys/CVE-2021-3521-nosubsig.asc
-
-diff --git a/rpmio/rpmpgp.c b/rpmio/rpmpgp.c
-index 509e777e6d..371ad4d9b6 100644
---- a/rpmio/rpmpgp.c
-+++ b/rpmio/rpmpgp.c
-@@ -1061,33 +1061,116 @@ static pgpDigParams pgpDigParamsNew(uint8_t tag)
- return digp;
- }
-
-+static int hashKey(DIGEST_CTX hash, const struct pgpPkt *pkt, int exptag)
-+{
-+ int rc = -1;
-+ if (pkt->tag == exptag) {
-+ uint8_t head[] = {
-+ 0x99,
-+ (pkt->blen >> 8),
-+ (pkt->blen ),
-+ };
-+
-+ rpmDigestUpdate(hash, head, 3);
-+ rpmDigestUpdate(hash, pkt->body, pkt->blen);
-+ rc = 0;
-+ }
-+ return rc;
-+}
-+
-+static int pgpVerifySelf(pgpDigParams key, pgpDigParams selfsig,
-+ const struct pgpPkt *all, int i)
-+{
-+ int rc = -1;
-+ DIGEST_CTX hash = NULL;
-+
-+ switch (selfsig->sigtype) {
-+ case PGPSIGTYPE_SUBKEY_BINDING:
-+ hash = rpmDigestInit(selfsig->hash_algo, 0);
-+ if (hash) {
-+ rc = hashKey(hash, &all[0], PGPTAG_PUBLIC_KEY);
-+ if (!rc)
-+ rc = hashKey(hash, &all[i-1], PGPTAG_PUBLIC_SUBKEY);
-+ }
-+ break;
-+ default:
-+ /* ignore types we can't handle */
-+ rc = 0;
-+ break;
-+ }
-+
-+ if (hash && rc == 0)
-+ rc = pgpVerifySignature(key, selfsig, hash);
-+
-+ rpmDigestFinal(hash, NULL, NULL, 0);
-+
-+ return rc;
-+}
-+
- int pgpPrtParams(const uint8_t * pkts, size_t pktlen, unsigned int pkttype,
- pgpDigParams * ret)
- {
- const uint8_t *p = pkts;
- const uint8_t *pend = pkts + pktlen;
- pgpDigParams digp = NULL;
-- struct pgpPkt pkt;
-+ pgpDigParams selfsig = NULL;
-+ int i = 0;
-+ int alloced = 16; /* plenty for normal cases */
-+ struct pgpPkt *all = xmalloc(alloced * sizeof(*all));
- int rc = -1; /* assume failure */
-+ int expect = 0;
-+ int prevtag = 0;
-
- while (p < pend) {
-- if (decodePkt(p, (pend - p), &pkt))
-+ struct pgpPkt *pkt = &all[i];
-+ if (decodePkt(p, (pend - p), pkt))
- break;
-
- if (digp == NULL) {
-- if (pkttype && pkt.tag != pkttype) {
-+ if (pkttype && pkt->tag != pkttype) {
- break;
- } else {
-- digp = pgpDigParamsNew(pkt.tag);
-+ digp = pgpDigParamsNew(pkt->tag);
- }
- }
-
-- if (pgpPrtPkt(&pkt, digp))
-+ if (expect) {
-+ if (pkt->tag != expect)
-+ break;
-+ selfsig = pgpDigParamsNew(pkt->tag);
-+ }
-+ if (pgpPrtPkt(pkt, selfsig ? selfsig : digp))
- break;
-
-- p += (pkt.body - pkt.head) + pkt.blen;
-- if (pkttype == PGPTAG_SIGNATURE)
-- break;
-+ if (selfsig) {
-+ /* subkeys must be followed by binding signature */
-+ if (prevtag == PGPTAG_PUBLIC_SUBKEY) {
-+ if (selfsig->sigtype != PGPSIGTYPE_SUBKEY_BINDING)
-+ break;
-+ }
-+
-+ int xx = pgpVerifySelf(digp, selfsig, all, i);
-+
-+ selfsig = pgpDigParamsFree(selfsig);
-+ if (xx)
-+ break;
-+ expect = 0;
-+ }
-+
-+ if (pkt->tag == PGPTAG_PUBLIC_SUBKEY)
-+ expect = PGPTAG_SIGNATURE;
-+ prevtag = pkt->tag;
-+
-+ i++;
-+ p += (pkt->body - pkt->head) + pkt->blen;
-+ if (pkttype == PGPTAG_SIGNATURE)
-+ break;
-+
-+ if (alloced <= i) {
-+ alloced *= 2;
-+ all = xrealloc(all, alloced * sizeof(*all));
-+ }
-+
- }
-
- rc = (digp && (p == pend)) ? 0 : -1;
-diff --git a/tests/Makefile.am b/tests/Makefile.am
-index a41ce10de8..7bb23247f1 100644
---- a/tests/Makefile.am
-+++ b/tests/Makefile.am
-@@ -107,6 +107,9 @@ EXTRA_DIST += data/SPECS/hello-config-buildid.spec
- EXTRA_DIST += data/SPECS/hello-cd.spec
- EXTRA_DIST += data/keys/rpm.org-rsa-2048-test.pub
- EXTRA_DIST += data/keys/rpm.org-rsa-2048-test.secret
-+EXTRA_DIST += data/keys/CVE-2021-3521-badbind.asc
-+EXTRA_DIST += data/keys/CVE-2022-3521-nosubsig.asc
-+EXTRA_DIST += data/keys/CVE-2022-3521-nosubsig-last.asc
- EXTRA_DIST += data/macros.testfile
- EXTRA_DIST += data/macros.debug
- EXTRA_DIST += data/SOURCES/foo.c
-diff --git a/tests/data/keys/CVE-2021-3521-badbind.asc b/tests/data/keys/CVE-2021-3521-badbind.asc
-new file mode 100644
-index 0000000000..aea00f9d7a
---- /dev/null
-+++ b/tests/data/keys/CVE-2021-3521-badbind.asc
-@@ -0,0 +1,25 @@
-+-----BEGIN PGP PUBLIC KEY BLOCK-----
-+Version: rpm-4.17.90 (NSS-3)
-+
-+mQENBFjmORgBCAC7TMEk6wnjSs8Dr4yqSScWdU2pjcqrkTxuzdWvowcIUPZI0w/g
-+HkRqGd4apjvY2V15kjL10gk3QhFP3pZ/9p7zh8o8NHX7aGdSGDK7NOq1eFaErPRY
-+91LW9RiZ0lbOjXEzIL0KHxUiTQEmdXJT43DJMFPyW9fkCWg0OltiX618FUdWWfI8
-+eySdLur1utnqBvdEbCUvWK2RX3vQZQdvEBODnNk2pxqTyV0w6VPQ96W++lF/5Aas
-+7rUv3HIyIXxIggc8FRrnH+y9XvvHDonhTIlGnYZN4ubm9i4y3gOkrZlGTrEw7elQ
-+1QeMyG2QQEbze8YjpTm4iLABCBrRfPRaQpwrABEBAAG0IXJwbS5vcmcgUlNBIHRl
-+c3RrZXkgPHJzYUBycG0ub3JnPokBNwQTAQgAIQUCWOY5GAIbAwULCQgHAgYVCAkK
-+CwIEFgIDAQIeAQIXgAAKCRBDRFkeGWTF/MxxCACnjqFL+MmPh9W9JQKT2DcLbBzf
-+Cqo6wcEBoCOcwgRSk8dSikhARoteoa55JRJhuMyeKhhEAogE9HRmCPFdjezFTwgB
-+BDVBpO2dZ023mLXDVCYX3S8pShOgCP6Tn4wqCnYeAdLcGg106N4xcmgtcssJE+Pr
-+XzTZksbZsrTVEmL/Ym+R5w5jBfFnGk7Yw7ndwfQsfNXQb5AZynClFxnX546lcyZX
-+fEx3/e6ezw57WNOUK6WT+8b+EGovPkbetK/rGxNXuWaP6X4A/QUm8O98nCuHYFQq
-++mvNdsCBqGf7mhaRGtpHk/JgCn5rFvArMDqLVrR9hX0LdCSsH7EGE+bR3r7wuQEN
-+BFjmORgBCACk+vDZrIXQuFXEYToZVwb2attzbbJJCqD71vmZTLsW0QxuPKRgbcYY
-+zp4K4lVBnHhFrF8MOUOxJ7kQWIJZMZFt+BDcptCYurbD2H4W2xvnWViiC+LzCMzz
-+iMJT6165uefL4JHTDPxC2fFiM9yrc72LmylJNkM/vepT128J5Qv0gRUaQbHiQuS6
-+Dm/+WRnUfx3i89SV4mnBxb/Ta93GVqoOciWwzWSnwEnWYAvOb95JL4U7c5J5f/+c
-+KnQDHsW7sIiIdscsWzvgf6qs2Ra1Zrt7Fdk4+ZS2f/adagLhDO1C24sXf5XfMk5m
-+L0OGwZSr9m5s17VXxfspgU5ugc8kBJfzABEBAAE=
-+=WCfs
-+-----END PGP PUBLIC KEY BLOCK-----
-+
-diff --git a/tests/data/keys/CVE-2021-3521-nosubsig-last.asc b/tests/data/keys/CVE-2021-3521-nosubsig-last.asc
-new file mode 100644
-index 0000000000..aea00f9d7a
---- /dev/null
-+++ b/tests/data/keys/CVE-2021-3521-nosubsig-last.asc
-@@ -0,0 +1,25 @@
-+-----BEGIN PGP PUBLIC KEY BLOCK-----
-+Version: rpm-4.17.90 (NSS-3)
-+
-+mQENBFjmORgBCAC7TMEk6wnjSs8Dr4yqSScWdU2pjcqrkTxuzdWvowcIUPZI0w/g
-+HkRqGd4apjvY2V15kjL10gk3QhFP3pZ/9p7zh8o8NHX7aGdSGDK7NOq1eFaErPRY
-+91LW9RiZ0lbOjXEzIL0KHxUiTQEmdXJT43DJMFPyW9fkCWg0OltiX618FUdWWfI8
-+eySdLur1utnqBvdEbCUvWK2RX3vQZQdvEBODnNk2pxqTyV0w6VPQ96W++lF/5Aas
-+7rUv3HIyIXxIggc8FRrnH+y9XvvHDonhTIlGnYZN4ubm9i4y3gOkrZlGTrEw7elQ
-+1QeMyG2QQEbze8YjpTm4iLABCBrRfPRaQpwrABEBAAG0IXJwbS5vcmcgUlNBIHRl
-+c3RrZXkgPHJzYUBycG0ub3JnPokBNwQTAQgAIQUCWOY5GAIbAwULCQgHAgYVCAkK
-+CwIEFgIDAQIeAQIXgAAKCRBDRFkeGWTF/MxxCACnjqFL+MmPh9W9JQKT2DcLbBzf
-+Cqo6wcEBoCOcwgRSk8dSikhARoteoa55JRJhuMyeKhhEAogE9HRmCPFdjezFTwgB
-+BDVBpO2dZ023mLXDVCYX3S8pShOgCP6Tn4wqCnYeAdLcGg106N4xcmgtcssJE+Pr
-+XzTZksbZsrTVEmL/Ym+R5w5jBfFnGk7Yw7ndwfQsfNXQb5AZynClFxnX546lcyZX
-+fEx3/e6ezw57WNOUK6WT+8b+EGovPkbetK/rGxNXuWaP6X4A/QUm8O98nCuHYFQq
-++mvNdsCBqGf7mhaRGtpHk/JgCn5rFvArMDqLVrR9hX0LdCSsH7EGE+bR3r7wuQEN
-+BFjmORgBCACk+vDZrIXQuFXEYToZVwb2attzbbJJCqD71vmZTLsW0QxuPKRgbcYY
-+zp4K4lVBnHhFrF8MOUOxJ7kQWIJZMZFt+BDcptCYurbD2H4W2xvnWViiC+LzCMzz
-+iMJT6165uefL4JHTDPxC2fFiM9yrc72LmylJNkM/vepT128J5Qv0gRUaQbHiQuS6
-+Dm/+WRnUfx3i89SV4mnBxb/Ta93GVqoOciWwzWSnwEnWYAvOb95JL4U7c5J5f/+c
-+KnQDHsW7sIiIdscsWzvgf6qs2Ra1Zrt7Fdk4+ZS2f/adagLhDO1C24sXf5XfMk5m
-+L0OGwZSr9m5s17VXxfspgU5ugc8kBJfzABEBAAE=
-+=WCfs
-+-----END PGP PUBLIC KEY BLOCK-----
-+
-diff --git a/tests/data/keys/CVE-2021-3521-nosubsig.asc b/tests/data/keys/CVE-2021-3521-nosubsig.asc
-new file mode 100644
-index 0000000000..3a2e7417f8
---- /dev/null
-+++ b/tests/data/keys/CVE-2021-3521-nosubsig.asc
-@@ -0,0 +1,37 @@
-+-----BEGIN PGP PUBLIC KEY BLOCK-----
-+Version: rpm-4.17.90 (NSS-3)
-+
-+mQENBFjmORgBCAC7TMEk6wnjSs8Dr4yqSScWdU2pjcqrkTxuzdWvowcIUPZI0w/g
-+HkRqGd4apjvY2V15kjL10gk3QhFP3pZ/9p7zh8o8NHX7aGdSGDK7NOq1eFaErPRY
-+91LW9RiZ0lbOjXEzIL0KHxUiTQEmdXJT43DJMFPyW9fkCWg0OltiX618FUdWWfI8
-+eySdLur1utnqBvdEbCUvWK2RX3vQZQdvEBODnNk2pxqTyV0w6VPQ96W++lF/5Aas
-+7rUv3HIyIXxIggc8FRrnH+y9XvvHDonhTIlGnYZN4ubm9i4y3gOkrZlGTrEw7elQ
-+1QeMyG2QQEbze8YjpTm4iLABCBrRfPRaQpwrABEBAAG0IXJwbS5vcmcgUlNBIHRl
-+c3RrZXkgPHJzYUBycG0ub3JnPokBNwQTAQgAIQUCWOY5GAIbAwULCQgHAgYVCAkK
-+CwIEFgIDAQIeAQIXgAAKCRBDRFkeGWTF/MxxCACnjqFL+MmPh9W9JQKT2DcLbBzf
-+Cqo6wcEBoCOcwgRSk8dSikhARoteoa55JRJhuMyeKhhEAogE9HRmCPFdjezFTwgB
-+BDVBpO2dZ023mLXDVCYX3S8pShOgCP6Tn4wqCnYeAdLcGg106N4xcmgtcssJE+Pr
-+XzTZksbZsrTVEmL/Ym+R5w5jBfFnGk7Yw7ndwfQsfNXQb5AZynClFxnX546lcyZX
-+fEx3/e6ezw57WNOUK6WT+8b+EGovPkbetK/rGxNXuWaP6X4A/QUm8O98nCuHYFQq
-++mvNdsCBqGf7mhaRGtpHk/JgCn5rFvArMDqLVrR9hX0LdCSsH7EGE+bR3r7wuQEN
-+BFjmORgBCACk+vDZrIXQuFXEYToZVwb2attzbbJJCqD71vmZTLsW0QxuPKRgbcYY
-+zp4K4lVBnHhFrF8MOUOxJ7kQWIJZMZFt+BDcptCYurbD2H4W2xvnWViiC+LzCMzz
-+iMJT6165uefL4JHTDPxC2fFiM9yrc72LmylJNkM/vepT128J5Qv0gRUaQbHiQuS6
-+Dm/+WRnUfx3i89SV4mnBxb/Ta93GVqoOciWwzWSnwEnWYAvOb95JL4U7c5J5f/+c
-+KnQDHsW7sIiIdscsWzvgf6qs2Ra1Zrt7Fdk4+ZS2f/adagLhDO1C24sXf5XfMk5m
-+L0OGwZSr9m5s17VXxfspgU5ugc8kBJfzABEBAAG5AQ0EWOY5GAEIAKT68NmshdC4
-+VcRhOhlXBvZq23NtskkKoPvW+ZlMuxbRDG48pGBtxhjOngriVUGceEWsXww5Q7En
-+uRBYglkxkW34ENym0Ji6tsPYfhbbG+dZWKIL4vMIzPOIwlPrXrm558vgkdMM/ELZ
-+8WIz3KtzvYubKUk2Qz+96lPXbwnlC/SBFRpBseJC5LoOb/5ZGdR/HeLz1JXiacHF
-+v9Nr3cZWqg5yJbDNZKfASdZgC85v3kkvhTtzknl//5wqdAMexbuwiIh2xyxbO+B/
-+qqzZFrVmu3sV2Tj5lLZ/9p1qAuEM7ULbixd/ld8yTmYvQ4bBlKv2bmzXtVfF+ymB
-+Tm6BzyQEl/MAEQEAAYkBHwQYAQgACQUCWOY5GAIbDAAKCRBDRFkeGWTF/PANB/9j
-+mifmj6z/EPe0PJFhrpISt9PjiUQCt0IPtiL5zKAkWjHePIzyi+0kCTBF6DDLFxos
-+3vN4bWnVKT1kBhZAQlPqpJTg+m74JUYeDGCdNx9SK7oRllATqyu+5rncgxjWVPnQ
-+zu/HRPlWJwcVFYEVXYL8xzfantwQTqefjmcRmBRdA2XJITK+hGWwAmrqAWx+q5xX
-+Pa8wkNMxVzNS2rUKO9SoVuJ/wlUvfoShkJ/VJ5HDp3qzUqncADfdGN35TDzscngQ
-+gHvnMwVBfYfSCABV1hNByoZcc/kxkrWMmsd/EnIyLd1Q1baKqc3cEDuC6E6/o4yJ
-+E4XX4jtDmdZPreZALsiB
-+=rRop
-+-----END PGP PUBLIC KEY BLOCK-----
-+
-diff --git a/tests/rpmsigdig.at b/tests/rpmsigdig.at
-index 8e7c759b8f..e2d30a7f1b 100644
---- a/tests/rpmsigdig.at
-+++ b/tests/rpmsigdig.at
-@@ -2,6 +2,34 @@
-
- AT_BANNER([RPM signatures and digests])
-
-+AT_SETUP([rpmkeys --import invalid keys])
-+AT_KEYWORDS([rpmkeys import])
-+RPMDB_INIT
-+
-+AT_CHECK([
-+runroot rpmkeys --import /data/keys/CVE-2021-3521-badbind.asc
-+],
-+[1],
-+[],
-+[error: /data/keys/CVE-2021-3521-badbind.asc: key 1 import failed.]
-+)
-+AT_CHECK([
-+runroot rpmkeys --import /data/keys/CVE-2021-3521-nosubsig.asc
-+],
-+[1],
-+[],
-+[error: /data/keys/CVE-2021-3521-nosubsig.asc: key 1 import failed.]
-+)
-+
-+AT_CHECK([
-+runroot rpmkeys --import /data/keys/CVE-2021-3521-nosubsig-last.asc
-+],
-+[1],
-+[],
-+[error: /data/keys/CVE-2021-3521-nosubsig-last.asc: key 1 import failed.]
-+)
-+AT_CLEANUP
-+
- # ------------------------------
- # Test pre-built package verification
- AT_SETUP([rpmkeys -Kv <unsigned> 1])
---
-2.17.1
-
diff --git a/meta/recipes-devtools/rpm/files/0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch b/meta/recipes-devtools/rpm/files/0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch
index 43e9859ef3..732202c46f 100644
--- a/meta/recipes-devtools/rpm/files/0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch
+++ b/meta/recipes-devtools/rpm/files/0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch
@@ -1,4 +1,4 @@
-From 989e425d416474c191b020d0825895e3df4bd033 Mon Sep 17 00:00:00 2001
+From f01d9c24bb86bc47ad2453483518dbb25953cac7 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 10 Jan 2019 18:14:18 +0100
Subject: [PATCH] rpmscript.c: change logging level around scriptlets to INFO
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/lib/rpmscript.c b/lib/rpmscript.c
-index 2b0e43862..e319673f1 100644
+index 7b7e26606..1dcd23be5 100644
--- a/lib/rpmscript.c
+++ b/lib/rpmscript.c
-@@ -226,7 +226,7 @@ static char * writeScript(const char *cmd, const char *script)
+@@ -291,7 +291,7 @@ static char * writeScript(const char *cmd, const char *script)
if (Ferror(fd))
goto exit;
@@ -26,7 +26,7 @@ index 2b0e43862..e319673f1 100644
static const char set_x[] = "set -x\n";
/* Assume failures will be caught by the write below */
Fwrite(set_x, sizeof(set_x[0]), sizeof(set_x)-1, fd);
-@@ -258,7 +258,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
+@@ -323,7 +323,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
char *mline = NULL;
rpmRC rc = RPMRC_FAIL;
@@ -35,7 +35,7 @@ index 2b0e43862..e319673f1 100644
if (script) {
fn = writeScript(*argvp[0], script);
-@@ -310,7 +310,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
+@@ -375,7 +375,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
sname, strerror(errno));
goto exit;
} else if (pid == 0) {/* Child */
@@ -44,7 +44,7 @@ index 2b0e43862..e319673f1 100644
sname, *argvp[0], (unsigned)getpid());
fclose(in);
-@@ -353,7 +353,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
+@@ -418,7 +418,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes,
reaped = waitpid(pid, &status, 0);
} while (reaped == -1 && errno == EINTR);
diff --git a/meta/recipes-devtools/rpm/files/environment.d-rpm.sh b/meta/recipes-devtools/rpm/files/environment.d-rpm.sh
deleted file mode 100644
index 9b669a18d1..0000000000
--- a/meta/recipes-devtools/rpm/files/environment.d-rpm.sh
+++ /dev/null
@@ -1 +0,0 @@
-export RPM_CONFIGDIR="$OECORE_NATIVE_SYSROOT/usr/lib/rpm"
diff --git a/meta/recipes-devtools/rpm/rpm_4.17.0.bb b/meta/recipes-devtools/rpm/rpm_4.17.0.bb
deleted file mode 100644
index c392ac0db4..0000000000
--- a/meta/recipes-devtools/rpm/rpm_4.17.0.bb
+++ /dev/null
@@ -1,208 +0,0 @@
-SUMMARY = "The RPM package management system"
-DESCRIPTION = "The RPM Package Manager (RPM) is a powerful command line driven \
-package management system capable of installing, uninstalling, \
-verifying, querying, and updating software packages. Each software \
-package consists of an archive of files along with information about \
-the package like its version, a description, etc."
-
-SUMMARY:${PN}-dev = "Development files for manipulating RPM packages"
-DESCRIPTION:${PN}-dev = "This package contains the RPM C library and header files. These \
-development files will simplify the process of writing programs that \
-manipulate RPM packages and databases. These files are intended to \
-simplify the process of creating graphical package managers or any \
-other tools that need an intimate knowledge of RPM packages in order \
-to function."
-
-SUMMARY:python3-rpm = "Python bindings for apps which will manupulate RPM packages"
-DESCRIPTION:python3-rpm = "The python3-rpm package contains a module that permits applications \
-written in the Python programming language to use the interface \
-supplied by the RPM Package Manager libraries."
-
-HOMEPAGE = "http://www.rpm.org"
-
-# libraries are also LGPL - how to express this?
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c4eec0c20c6034b9407a09945b48a43f"
-
-SRC_URI = "git://github.com/rpm-software-management/rpm;branch=rpm-4.17.x;protocol=https \
- file://environment.d-rpm.sh \
- file://0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch \
- file://0001-Do-not-read-config-files-from-HOME.patch \
- file://0001-When-cross-installing-execute-package-scriptlets-wit.patch \
- file://0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch \
- file://0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch \
- file://0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch \
- file://0001-Add-a-color-setting-for-mips64_n32-binaries.patch \
- file://0001-perl-disable-auto-reqs.patch \
- file://0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch \
- file://0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch \
- file://0001-tools-Add-error.h-for-non-glibc-case.patch \
- file://0001-docs-do-not-build-manpages-requires-pandoc.patch \
- file://0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch \
- file://0001-CVE-2021-3521.patch \
- file://0002-CVE-2021-3521.patch \
- file://0003-CVE-2021-3521.patch \
- "
-
-PE = "1"
-SRCREV = "3e74e8ba2dd5e76a5353d238dc7fc38651ce27b3"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "lua libgcrypt file popt xz bzip2 elfutils python3"
-DEPENDS:append:class-native = " file-replacement-native bzip2-replacement-native"
-
-inherit autotools gettext pkgconfig python3native
-export PYTHON_ABI
-
-AUTOTOOLS_AUXDIR = "${S}/build-aux"
-
-# OE-core patches autoreconf to additionally run gnu-configize, which fails with this recipe
-EXTRA_AUTORECONF:append = " --exclude=gnu-configize"
-
-# Vendor is detected differently on x86 and aarch64 hosts and can feed into target packages
-EXTRA_OECONF:append = " --enable-python --with-crypto=libgcrypt --with-vendor=pc"
-EXTRA_OECONF:append:libc-musl = " --disable-nls --disable-openmp"
-
-# --sysconfdir prevents rpm from attempting to access machine-specific configuration in sysroot/etc; we need to have it in rootfs
-# --localstatedir prevents rpm from writing its database to native sysroot when building images
-# Forcibly disable plugins for native/nativesdk, as the inhibit and prioreset
-# plugins both behave badly inside builds.
-EXTRA_OECONF:append:class-native = " --sysconfdir=/etc --localstatedir=/var --disable-plugins"
-EXTRA_OECONF:append:class-nativesdk = " --sysconfdir=/etc --disable-plugins"
-
-BBCLASSEXTEND = "native nativesdk"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'inhibit', '', d)} sqlite zstd"
-# The inhibit plugin serves no purpose outside of the target
-PACKAGECONFIG:remove:class-native = "inhibit"
-PACKAGECONFIG:remove:class-nativesdk = "inhibit"
-
-PACKAGECONFIG[imaevm] = "--with-imaevm,,ima-evm-utils"
-PACKAGECONFIG[inhibit] = "--enable-inhibit-plugin,--disable-inhibit-plugin,dbus"
-PACKAGECONFIG[rpm2archive] = "--with-archive,--without-archive,libarchive"
-PACKAGECONFIG[sqlite] = "--enable-sqlite=yes,--enable-sqlite=no,sqlite3"
-PACKAGECONFIG[ndb] = "--enable-ndb,--disable-ndb"
-PACKAGECONFIG[bdb-ro] = "--enable-bdb-ro,--disable-bdb-ro"
-PACKAGECONFIG[zstd] = "--enable-zstd=yes,--enable-zstd=no,zstd"
-
-ASNEEDED = ""
-
-# Direct rpm-native to read configuration from our sysroot, not the one it was compiled in
-# libmagic also has sysroot path contamination, so override it
-
-WRAPPER_TOOLS = " \
- ${bindir}/rpm \
- ${bindir}/rpm2archive \
- ${bindir}/rpm2cpio \
- ${bindir}/rpmbuild \
- ${bindir}/rpmdb \
- ${bindir}/rpmgraph \
- ${bindir}/rpmkeys \
- ${bindir}/rpmsign \
- ${bindir}/rpmspec \
- ${libdir}/rpm/rpmdeps \
-"
-
-do_configure:prepend() {
- mkdir -p ${S}/build-aux
-}
-
-do_install:append:class-native() {
- for tool in ${WRAPPER_TOOLS}; do
- test -x ${D}$tool && create_wrapper ${D}$tool \
- RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
- RPM_ETCCONFIGDIR=${STAGING_DIR_NATIVE} \
- MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc \
- RPM_NO_CHROOT_FOR_SCRIPTS=1
- done
-}
-
-do_install:append:class-nativesdk() {
- for tool in ${WRAPPER_TOOLS}; do
- test -x ${D}$tool && create_wrapper ${D}$tool \
- RPM_CONFIGDIR='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir'), d.getVar('bindir'))}/rpm \
- RPM_ETCCONFIGDIR='$'{RPM_ETCCONFIGDIR-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir'), d.getVar('bindir'))}/..} \
- MAGIC='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir'), d.getVar('bindir'))}/misc/magic.mgc \
- RPM_NO_CHROOT_FOR_SCRIPTS=1
- done
-
- rm -rf ${D}/var
-
- mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
- install -m 644 ${WORKDIR}/environment.d-rpm.sh ${D}${SDKPATHNATIVE}/environment-setup.d/rpm.sh
-}
-
-# Rpm's make install creates var/tmp which clashes with base-files packaging
-do_install:append:class-target() {
- rm -rf ${D}/var
-}
-do_install:append:class-nativesdk() {
- rm -rf ${D}${SDKPATHNATIVE}/var
-}
-
-do_install:append () {
- sed -i -e 's:${HOSTTOOLS_DIR}/::g' \
- ${D}/${libdir}/rpm/macros
-
-}
-
-FILES:${PN} += "${libdir}/rpm-plugins/*.so \
- "
-FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}/environment-setup.d/rpm.sh"
-
-FILES:${PN}-dev += "${libdir}/rpm-plugins/*.la \
- "
-PACKAGE_BEFORE_PN += "${PN}-build ${PN}-sign ${PN}-archive"
-
-RRECOMMENDS:${PN} += "rpm-sign rpm-archive"
-
-FILES:${PN}-build = "\
- ${bindir}/rpmbuild \
- ${bindir}/gendiff \
- ${bindir}/rpmspec \
- ${libdir}/librpmbuild.so.* \
- ${libdir}/rpm/brp-* \
- ${libdir}/rpm/check-* \
- ${libdir}/rpm/debugedit \
- ${libdir}/rpm/sepdebugcrcfix \
- ${libdir}/rpm/find-debuginfo.sh \
- ${libdir}/rpm/find-lang.sh \
- ${libdir}/rpm/*provides* \
- ${libdir}/rpm/*requires* \
- ${libdir}/rpm/*deps* \
- ${libdir}/rpm/*.prov \
- ${libdir}/rpm/*.req \
- ${libdir}/rpm/config.* \
- ${libdir}/rpm/mkinstalldirs \
- ${libdir}/rpm/macros.p* \
- ${libdir}/rpm/fileattrs/* \
-"
-
-FILES:${PN}-sign = "\
- ${bindir}/rpmsign \
- ${libdir}/librpmsign.so.* \
-"
-
-FILES:${PN}-archive = "\
- ${bindir}/rpm2archive \
-"
-
-PACKAGES += "python3-rpm"
-PROVIDES += "python3-rpm"
-FILES:python3-rpm = "${PYTHON_SITEPACKAGES_DIR}/rpm/*"
-
-RDEPENDS:${PN}-build = "bash perl python3-core"
-
-PACKAGE_PREPROCESS_FUNCS += "rpm_package_preprocess"
-
-# Do not specify a sysroot when compiling on a target.
-rpm_package_preprocess () {
- sed -i -e 's:--sysroot[^ ]*::g' \
- ${PKGD}/${libdir}/rpm/macros
-}
-
-SSTATE_HASHEQUIV_FILEMAP = " \
- populate_sysroot:*/rpm/macros:${TMPDIR} \
- populate_sysroot:*/rpm/macros:${COREBASE} \
- "
diff --git a/meta/recipes-devtools/rpm/rpm_4.19.1.1.bb b/meta/recipes-devtools/rpm/rpm_4.19.1.1.bb
new file mode 100644
index 0000000000..0802f26295
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm_4.19.1.1.bb
@@ -0,0 +1,197 @@
+SUMMARY = "The RPM package management system"
+DESCRIPTION = "The RPM Package Manager (RPM) is a powerful command line driven \
+package management system capable of installing, uninstalling, \
+verifying, querying, and updating software packages. Each software \
+package consists of an archive of files along with information about \
+the package like its version, a description, etc."
+
+SUMMARY:${PN}-dev = "Development files for manipulating RPM packages"
+DESCRIPTION:${PN}-dev = "This package contains the RPM C library and header files. These \
+development files will simplify the process of writing programs that \
+manipulate RPM packages and databases. These files are intended to \
+simplify the process of creating graphical package managers or any \
+other tools that need an intimate knowledge of RPM packages in order \
+to function."
+
+SUMMARY:python3-rpm = "Python bindings for apps which will manupulate RPM packages"
+DESCRIPTION:python3-rpm = "The python3-rpm package contains a module that permits applications \
+written in the Python programming language to use the interface \
+supplied by the RPM Package Manager libraries."
+
+HOMEPAGE = "http://www.rpm.org"
+
+# libraries are also LGPL - how to express this?
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c4eec0c20c6034b9407a09945b48a43f"
+
+SRC_URI = "git://github.com/rpm-software-management/rpm;branch=rpm-4.19.x;protocol=https \
+ file://0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch \
+ file://0001-Do-not-read-config-files-from-HOME.patch \
+ file://0001-When-cross-installing-execute-package-scriptlets-wit.patch \
+ file://0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch \
+ file://0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch \
+ file://0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch \
+ file://0001-Add-a-color-setting-for-mips64_n32-binaries.patch \
+ file://0001-perl-disable-auto-reqs.patch \
+ file://0016-rpmscript.c-change-logging-level-around-scriptlets-t.patch \
+ file://0001-lib-transaction.c-fix-file-conflicts-for-MIPS64-N32.patch \
+ file://0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch \
+ file://0001-CMakeLists.txt-look-for-lua-with-pkg-config-rather-t.patch \
+ file://0002-rpmio-rpmglob.c-avoid-using-GLOB_BRACE-if-undefined-.patch \
+ "
+
+PE = "1"
+SRCREV = "13b4521341781293c41ac898aa9c2d2f6bc1f21d"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "lua libgcrypt file popt xz bzip2 elfutils python3 sqlite3 zstd"
+DEPENDS:append:class-native = " file-replacement-native bzip2-replacement-native"
+
+EXTRA_OECMAKE:append = " -D__CURL:FILEPATH=curl"
+EXTRA_OECMAKE:append:libc-musl = " -DENABLE_NLS=OFF -DENABLE_OPENMP=OFF"
+
+# --sysconfdir prevents rpm from attempting to access machine-specific configuration in sysroot/etc; we need to have it in rootfs
+# --localstatedir prevents rpm from writing its database to native sysroot when building images
+EXTRA_OECMAKE:append:class-native = " -DCMAKE_INSTALL_SYSCONFDIR:PATH=/etc -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=/var"
+EXTRA_OECMAKE:append:class-nativesdk = " -DCMAKE_INSTALL_SYSCONFDIR:PATH=/etc -DCMAKE_INSTALL_FULL_SYSCONFDIR=/etc"
+
+inherit cmake gettext pkgconfig python3targetconfig
+OECMAKE_GENERATOR = "Unix Makefiles"
+
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGECONFIG ??= "internal-openpgp"
+
+PACKAGECONFIG[plugins] = "-DENABLE_PLUGINS=ON,-DENABLE_PLUGINS=OFF"
+PACKAGECONFIG[testsuite] = "-DENABLE_TESTSUITE=ON,-DENABLE_TESTSUITE=OFF"
+
+# Deprecated! https://fedoraproject.org/wiki/Changes/RpmSequoia
+PACKAGECONFIG[internal-openpgp] = "-DWITH_INTERNAL_OPENPGP=ON,-DWITH_INTERNAL_OPENPGP=OFF"
+
+PACKAGECONFIG[cap] = "-DWITH_CAP=ON,-DWITH_CAP=OFF"
+PACKAGECONFIG[acl] = "-DWITH_ACL=ON,-DWITH_ACL=OFF"
+PACKAGECONFIG[archive] = "-DWITH_ARCHIVE=ON,-DWITH_ARCHIVE=OFF,libarchive"
+PACKAGECONFIG[selinux] = "-DWITH_SELINUX=ON,-DWITH_SELINUX=OFF,libselinux"
+PACKAGECONFIG[dbus] = "-DWITH_DBUS=ON,-DWITH_DBUS=OFF"
+PACKAGECONFIG[audit] = "-DWITH_AUDIT=ON,-DWITH_AUDIT=OFF,audit"
+PACKAGECONFIG[fsverity] = "-DWITH_FSVERITY=ON,-DWITH_FSVERITY=OFF"
+PACKAGECONFIG[imaevm] = "-DWITH_IMAEVM=ON,-DWITH_IMAEVM=OFF,ima-evm-utils"
+PACKAGECONFIG[fapolicyd] = "-DWITH_FAPOLICYD=ON,-DWITH_FAPOLICYD=OFF"
+PACKAGECONFIG[readline] = "-DWITH_READLINE=ON,-DWITH_READLINE=OFF,readline"
+
+# Direct rpm-native to read configuration from our sysroot, not the one it was compiled in
+# libmagic also has sysroot path contamination, so override it
+
+WRAPPER_TOOLS = " \
+ ${bindir}/rpm \
+ ${bindir}/rpm2archive \
+ ${bindir}/rpm2cpio \
+ ${bindir}/rpmbuild \
+ ${bindir}/rpmdb \
+ ${bindir}/rpmgraph \
+ ${bindir}/rpmkeys \
+ ${bindir}/rpmsign \
+ ${bindir}/rpmspec \
+ ${libdir}/rpm/rpmdeps \
+"
+
+do_install:append:class-native() {
+ for tool in ${WRAPPER_TOOLS}; do
+ test -x ${D}$tool && create_wrapper ${D}$tool \
+ RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
+ RPM_ETCCONFIGDIR=${STAGING_DIR_NATIVE} \
+ MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc \
+ RPM_NO_CHROOT_FOR_SCRIPTS=1
+ done
+}
+
+do_install:append:class-nativesdk() {
+ rm -rf ${D}/var
+
+ mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
+ cat <<- EOF > ${D}${SDKPATHNATIVE}/environment-setup.d/rpm.sh
+ export RPM_CONFIGDIR="${libdir}/rpm"
+ export RPM_ETCCONFIGDIR="${SDKPATHNATIVE}"
+ export RPM_NO_CHROOT_FOR_SCRIPTS=1
+ EOF
+}
+
+# Rpm's make install creates var/tmp which clashes with base-files packaging
+do_install:append:class-target() {
+ rm -rf ${D}/var
+}
+do_install:append:class-nativesdk() {
+ rm -rf ${D}${SDKPATHNATIVE}/var
+ # Ensure find-debuginfo is located correctly inside SDK
+ mkdir -p ${D}${libdir}/rpm
+ echo "%__find_debuginfo ${SDKPATHNATIVE}/usr/bin/find-debuginfo" >> ${D}${libdir}/rpm/macros
+}
+
+do_install:append () {
+ sed -i -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${STAGING_DIR_NATIVE}/::g' \
+ ${D}/${libdir}/rpm/macros
+ sed -i -e 's:${RECIPE_SYSROOT}/::g' \
+ ${D}/${libdir}/cmake/rpm/rpm-targets.cmake
+
+}
+
+FILES:${PN} += "${libdir}/rpm-plugins/*.so \
+ "
+FILES:${PN}:append:class-nativesdk = " ${SDKPATHNATIVE}/environment-setup.d/rpm.sh"
+
+FILES:${PN}-dev += "${libdir}/rpm-plugins/*.la \
+ "
+PACKAGE_BEFORE_PN += "${PN}-build ${PN}-sign ${PN}-archive"
+
+RRECOMMENDS:${PN} += "rpm-sign rpm-archive"
+
+FILES:${PN}-build = "\
+ ${bindir}/rpmbuild \
+ ${bindir}/gendiff \
+ ${bindir}/rpmspec \
+ ${libdir}/librpmbuild.so.* \
+ ${libdir}/rpm/brp-* \
+ ${libdir}/rpm/check-* \
+ ${libdir}/rpm/sepdebugcrcfix \
+ ${libdir}/rpm/find-lang.sh \
+ ${libdir}/rpm/sysusers.sh \
+ ${libdir}/rpm/*provides* \
+ ${libdir}/rpm/*requires* \
+ ${libdir}/rpm/*deps* \
+ ${libdir}/rpm/*.prov \
+ ${libdir}/rpm/*.req \
+ ${libdir}/rpm/config.* \
+ ${libdir}/rpm/mkinstalldirs \
+ ${libdir}/rpm/macros.p* \
+ ${libdir}/rpm/fileattrs/* \
+"
+
+FILES:${PN}-sign = "\
+ ${bindir}/rpmsign \
+ ${libdir}/librpmsign.so.* \
+"
+
+FILES:${PN}-archive = "\
+ ${bindir}/rpm2archive \
+"
+
+PACKAGES += "python3-rpm"
+PROVIDES += "python3-rpm"
+FILES:python3-rpm = "${PYTHON_SITEPACKAGES_DIR}/rpm/* ${PYTHON_SITEPACKAGES_DIR}/rpm-*.egg-info"
+
+RDEPENDS:${PN}-build = "bash perl python3-core debugedit"
+
+PACKAGE_PREPROCESS_FUNCS += "rpm_package_preprocess"
+
+# Do not specify a sysroot when compiling on a target.
+rpm_package_preprocess () {
+ sed -i -e 's:--sysroot[^ ]*::g' \
+ ${PKGD}/${libdir}/rpm/macros
+}
+
+SSTATE_HASHEQUIV_FILEMAP = " \
+ populate_sysroot:*/rpm/macros:${TMPDIR} \
+ populate_sysroot:*/rpm/macros:${COREBASE} \
+ "
diff --git a/meta/recipes-devtools/rsync/files/0001-Add-missing-prototypes-to-function-declarations.patch b/meta/recipes-devtools/rsync/files/0001-Add-missing-prototypes-to-function-declarations.patch
new file mode 100644
index 0000000000..2379de84f2
--- /dev/null
+++ b/meta/recipes-devtools/rsync/files/0001-Add-missing-prototypes-to-function-declarations.patch
@@ -0,0 +1,170 @@
+From 2beb35c34c45320144f37b12ef4d72fb8734280e Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 29 Aug 2022 19:53:28 -0700
+Subject: [PATCH] Add missing prototypes to function declarations
+
+With Clang 15+ compiler -Wstrict-prototypes is triggering warnings which
+are turned into errors with -Werror, this fixes the problem by adding
+missing prototypes
+
+Fixes errors like
+| log.c:134:24: error: a function declaration without a prototype is deprecated in all versions of C [-Werror,-Wstrict-prototypes]
+| static void syslog_init()
+| ^
+| void
+
+Upstream-Status: Submitted [https://lists.samba.org/archive/rsync/2022-August/032858.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ checksum.c | 2 +-
+ exclude.c | 2 +-
+ hlink.c | 3 +--
+ lib/pool_alloc.c | 2 +-
+ log.c | 2 +-
+ main.c | 2 +-
+ syscall.c | 4 ++--
+ zlib/crc32.c | 2 +-
+ zlib/trees.c | 2 +-
+ zlib/zutil.c | 4 ++--
+ 10 files changed, 12 insertions(+), 13 deletions(-)
+
+diff --git a/checksum.c b/checksum.c
+index cb21882..736818b 100644
+--- a/checksum.c
++++ b/checksum.c
+@@ -779,7 +779,7 @@ static void verify_digest(struct name_num_item *nni, BOOL check_auth_list)
+ }
+ #endif
+
+-void init_checksum_choices()
++void init_checksum_choices(void)
+ {
+ #if defined SUPPORT_XXH3 || defined USE_OPENSSL
+ struct name_num_item *nni;
+diff --git a/exclude.c b/exclude.c
+index 87edbcf..ae0de2f 100644
+--- a/exclude.c
++++ b/exclude.c
+@@ -363,7 +363,7 @@ void implied_include_partial_string(const char *s_start, const char *s_end)
+ memcpy(partial_string_buf, s_start, partial_string_len);
+ }
+
+-void free_implied_include_partial_string()
++void free_implied_include_partial_string(void)
+ {
+ if (partial_string_buf) {
+ if (partial_string_len)
+diff --git a/hlink.c b/hlink.c
+index 20291f2..5c26a6b 100644
+--- a/hlink.c
++++ b/hlink.c
+@@ -117,8 +117,7 @@ static void match_gnums(int32 *ndx_list, int ndx_count)
+ struct ht_int32_node *node = NULL;
+ int32 gnum, gnum_next;
+
+- qsort(ndx_list, ndx_count, sizeof ndx_list[0], (int (*)()) hlink_compare_gnum);
+-
++ qsort(ndx_list, ndx_count, sizeof ndx_list[0], (int (*)(const void *, const void *)) hlink_compare_gnum);
+ for (from = 0; from < ndx_count; from++) {
+ file = hlink_flist->sorted[ndx_list[from]];
+ gnum = F_HL_GNUM(file);
+diff --git a/lib/pool_alloc.c b/lib/pool_alloc.c
+index a1a7245..4eae062 100644
+--- a/lib/pool_alloc.c
++++ b/lib/pool_alloc.c
+@@ -9,7 +9,7 @@ struct alloc_pool
+ size_t size; /* extent size */
+ size_t quantum; /* allocation quantum */
+ struct pool_extent *extents; /* top extent is "live" */
+- void (*bomb)(); /* called if malloc fails */
++ void (*bomb)(const char *, const char *, int); /* called if malloc fails */
+ int flags;
+
+ /* statistical data */
+diff --git a/log.c b/log.c
+index e4ba1cc..8482b71 100644
+--- a/log.c
++++ b/log.c
+@@ -131,7 +131,7 @@ static void logit(int priority, const char *buf)
+ }
+ }
+
+-static void syslog_init()
++static void syslog_init(void)
+ {
+ int options = LOG_PID;
+
+diff --git a/main.c b/main.c
+index 0c60b86..4bc664a 100644
+--- a/main.c
++++ b/main.c
+@@ -246,7 +246,7 @@ void read_del_stats(int f)
+ stats.deleted_files += stats.deleted_specials = read_varint(f);
+ }
+
+-static void become_copy_as_user()
++static void become_copy_as_user(void)
+ {
+ char *gname;
+ uid_t uid;
+diff --git a/syscall.c b/syscall.c
+index d92074a..92ca86d 100644
+--- a/syscall.c
++++ b/syscall.c
+@@ -389,9 +389,9 @@ OFF_T do_lseek(int fd, OFF_T offset, int whence)
+ {
+ #ifdef HAVE_LSEEK64
+ #if !SIZEOF_OFF64_T
+- OFF_T lseek64();
++ OFF_T lseek64(int fd, OFF_T offset, int whence);
+ #else
+- off64_t lseek64();
++ off64_t lseek64(int fd, off64_t offset, int whence);
+ #endif
+ return lseek64(fd, offset, whence);
+ #else
+diff --git a/zlib/crc32.c b/zlib/crc32.c
+index 05733f4..50c6c02 100644
+--- a/zlib/crc32.c
++++ b/zlib/crc32.c
+@@ -187,7 +187,7 @@ local void write_table(out, table)
+ /* =========================================================================
+ * This function can be used by asm versions of crc32()
+ */
+-const z_crc_t FAR * ZEXPORT get_crc_table()
++const z_crc_t FAR * ZEXPORT get_crc_table(void)
+ {
+ #ifdef DYNAMIC_CRC_TABLE
+ if (crc_table_empty)
+diff --git a/zlib/trees.c b/zlib/trees.c
+index 9c66770..0d9047e 100644
+--- a/zlib/trees.c
++++ b/zlib/trees.c
+@@ -231,7 +231,7 @@ local void send_bits(s, value, length)
+ /* ===========================================================================
+ * Initialize the various 'constant' tables.
+ */
+-local void tr_static_init()
++local void tr_static_init(void)
+ {
+ #if defined(GEN_TREES_H) || !defined(STDC)
+ static int static_init_done = 0;
+diff --git a/zlib/zutil.c b/zlib/zutil.c
+index bbba7b2..61f8dc9 100644
+--- a/zlib/zutil.c
++++ b/zlib/zutil.c
+@@ -27,12 +27,12 @@ z_const char * const z_errmsg[10] = {
+ ""};
+
+
+-const char * ZEXPORT zlibVersion()
++const char * ZEXPORT zlibVersion(void)
+ {
+ return ZLIB_VERSION;
+ }
+
+-uLong ZEXPORT zlibCompileFlags()
++uLong ZEXPORT zlibCompileFlags(void)
+ {
+ uLong flags;
+
diff --git a/meta/recipes-devtools/rsync/files/makefile-no-rebuild.patch b/meta/recipes-devtools/rsync/files/makefile-no-rebuild.patch
index 92ed1f4419..0c9ce8b8e3 100644
--- a/meta/recipes-devtools/rsync/files/makefile-no-rebuild.patch
+++ b/meta/recipes-devtools/rsync/files/makefile-no-rebuild.patch
@@ -1,4 +1,4 @@
-From 81700d1a0e51391028c761cc8ef1cd660084d114 Mon Sep 17 00:00:00 2001
+From f446686c26c499e15ef17d495a93cfbc20e16090 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Tue, 12 Apr 2016 15:51:54 +0100
Subject: [PATCH] rsync: remove upstream's rebuild logic
@@ -8,16 +8,15 @@ generally overcomplicated, and we ensure that autoreconf is invoked if required.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@intel.com>
-
---
Makefile.in | 54 -----------------------------------------------------
1 file changed, 54 deletions(-)
diff --git a/Makefile.in b/Makefile.in
-index 3cde955..d963a70 100644
+index a1253e5..a084935 100644
--- a/Makefile.in
+++ b/Makefile.in
-@@ -190,60 +190,6 @@ gensend: gen
+@@ -192,60 +192,6 @@ gensend: gen
fi
rsync -aic $(GENFILES) git-version.h $${SAMBA_HOST-samba.org}:/home/ftp/pub/rsync/generated-files/ || true
diff --git a/meta/recipes-devtools/rsync/rsync_3.2.4.bb b/meta/recipes-devtools/rsync/rsync_3.2.4.bb
deleted file mode 100644
index e6f917b5cd..0000000000
--- a/meta/recipes-devtools/rsync/rsync_3.2.4.bb
+++ /dev/null
@@ -1,70 +0,0 @@
-SUMMARY = "File synchronization tool"
-HOMEPAGE = "http://rsync.samba.org/"
-DESCRIPTION = "rsync is an open source utility that provides fast incremental file transfer."
-BUGTRACKER = "http://rsync.samba.org/bugzilla.html"
-SECTION = "console/network"
-# GPL-2.0-or-later (<< 3.0.0), GPL-3.0-or-later (>= 3.0.0)
-# Includes opennsh and xxhash dynamic link exception
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=24423708fe159c9d12be1ea29fcb18c7"
-
-DEPENDS = "popt"
-
-SRC_URI = "https://download.samba.org/pub/${BPN}/src/${BP}.tar.gz \
- file://rsyncd.conf \
- file://makefile-no-rebuild.patch \
- file://determism.patch \
- "
-
-SRC_URI[sha256sum] = "6f761838d08052b0b6579cf7f6737d93e47f01f4da04c5d24d3447b7f2a5fad1"
-
-# -16548 required for v3.1.3pre1. Already in v3.1.3.
-CVE_CHECK_IGNORE += " CVE-2017-16548 "
-
-inherit autotools-brokensep
-
-PACKAGECONFIG ??= "acl attr \
- ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
-"
-
-PACKAGECONFIG[acl] = "--enable-acl-support,--disable-acl-support,acl,"
-PACKAGECONFIG[attr] = "--enable-xattr-support,--disable-xattr-support,attr,"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-PACKAGECONFIG[lz4] = "--enable-lz4,--disable-lz4,lz4"
-PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
-PACKAGECONFIG[xxhash] = "--enable-xxhash,--disable-xxhash,xxhash"
-PACKAGECONFIG[zstd] = "--enable-zstd,--disable-zstd,zstd"
-
-# By default, if crosscompiling, rsync disables a number of
-# capabilities, hardlinking symlinks and special files (i.e. devices)
-CACHED_CONFIGUREVARS += "rsync_cv_can_hardlink_special=yes rsync_cv_can_hardlink_symlink=yes"
-
-EXTRA_OEMAKE = 'STRIP=""'
-EXTRA_OECONF = "--disable-md2man --with-nobody-group=nogroup"
-
-#| ./simd-checksum-x86_64.cpp: In function 'uint32_t get_checksum1_cpp(char*, int32_t)':
-#| ./simd-checksum-x86_64.cpp:89:52: error: multiversioning needs 'ifunc' which is not supported on this target
-#| 89 | __attribute__ ((target("default"))) MVSTATIC int32 get_checksum1_avx2_64(schar* buf, int32 len, int32 i, uint32* ps1, uint32* ps2) { return i; }
-#| | ^~~~~~~~~~~~~~~~~~~~~
-#| ./simd-checksum-x86_64.cpp:480:1: error: use of multiversioned function without a default
-#| 480 | }
-#| | ^
-#| If you can't fix the issue, re-run ./configure with --disable-roll-simd.
-EXTRA_OECONF:append:libc-musl = " --disable-roll-simd"
-
-# rsync 3.0 uses configure.sh instead of configure, and
-# makefile checks the existence of configure.sh
-do_configure:prepend () {
- rm -f ${S}/configure ${S}/configure.sh
-}
-
-do_configure:append () {
- cp -f ${S}/configure ${S}/configure.sh
-}
-
-do_install:append() {
- install -d ${D}${sysconfdir}
- install -m 0644 ${WORKDIR}/rsyncd.conf ${D}${sysconfdir}
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/rsync/rsync_3.3.0.bb b/meta/recipes-devtools/rsync/rsync_3.3.0.bb
new file mode 100644
index 0000000000..b42026331d
--- /dev/null
+++ b/meta/recipes-devtools/rsync/rsync_3.3.0.bb
@@ -0,0 +1,67 @@
+SUMMARY = "File synchronization tool"
+HOMEPAGE = "http://rsync.samba.org/"
+DESCRIPTION = "rsync is an open source utility that provides fast incremental file transfer."
+BUGTRACKER = "http://rsync.samba.org/bugzilla.html"
+SECTION = "console/network"
+# GPL-2.0-or-later (<< 3.0.0), GPL-3.0-or-later (>= 3.0.0)
+# Includes opennsh and xxhash dynamic link exception
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=24423708fe159c9d12be1ea29fcb18c7"
+
+DEPENDS = "popt"
+
+SRC_URI = "https://download.samba.org/pub/${BPN}/src/${BP}.tar.gz \
+ file://rsyncd.conf \
+ file://makefile-no-rebuild.patch \
+ file://determism.patch \
+ file://0001-Add-missing-prototypes-to-function-declarations.patch \
+ "
+SRC_URI[sha256sum] = "7399e9a6708c32d678a72a63219e96f23be0be2336e50fd1348498d07041df90"
+
+inherit autotools-brokensep
+
+PACKAGECONFIG ??= "acl attr \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
+"
+
+PACKAGECONFIG[acl] = "--enable-acl-support,--disable-acl-support,acl,"
+PACKAGECONFIG[attr] = "--enable-xattr-support,--disable-xattr-support,attr,"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+PACKAGECONFIG[lz4] = "--enable-lz4,--disable-lz4,lz4"
+PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
+PACKAGECONFIG[xxhash] = "--enable-xxhash,--disable-xxhash,xxhash"
+PACKAGECONFIG[zstd] = "--enable-zstd,--disable-zstd,zstd"
+
+# By default, if crosscompiling, rsync disables a number of
+# capabilities, hardlinking symlinks and special files (i.e. devices)
+CACHED_CONFIGUREVARS += "rsync_cv_can_hardlink_special=yes rsync_cv_can_hardlink_symlink=yes"
+
+EXTRA_OEMAKE = 'STRIP=""'
+EXTRA_OECONF = "--disable-md2man --with-nobody-group=nogroup"
+
+#| ./simd-checksum-x86_64.cpp: In function 'uint32_t get_checksum1_cpp(char*, int32_t)':
+#| ./simd-checksum-x86_64.cpp:89:52: error: multiversioning needs 'ifunc' which is not supported on this target
+#| 89 | __attribute__ ((target("default"))) MVSTATIC int32 get_checksum1_avx2_64(schar* buf, int32 len, int32 i, uint32* ps1, uint32* ps2) { return i; }
+#| | ^~~~~~~~~~~~~~~~~~~~~
+#| ./simd-checksum-x86_64.cpp:480:1: error: use of multiversioned function without a default
+#| 480 | }
+#| | ^
+#| If you can't fix the issue, re-run ./configure with --disable-roll-simd.
+EXTRA_OECONF:append:libc-musl = " --disable-roll-simd"
+
+# rsync 3.0 uses configure.sh instead of configure, and
+# makefile checks the existence of configure.sh
+do_configure:prepend () {
+ rm -f ${S}/configure ${S}/configure.sh
+}
+
+do_configure:append () {
+ cp -f ${S}/configure ${S}/configure.sh
+}
+
+do_install:append() {
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${WORKDIR}/rsyncd.conf ${D}${sysconfdir}
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/ruby/ruby.inc b/meta/recipes-devtools/ruby/ruby.inc
deleted file mode 100644
index ebff5efd1f..0000000000
--- a/meta/recipes-devtools/ruby/ruby.inc
+++ /dev/null
@@ -1,39 +0,0 @@
-SUMMARY = "An interpreter of object-oriented scripting language"
-DESCRIPTION = "Ruby is an interpreted scripting language for quick \
-and easy object-oriented programming. It has many features to process \
-text files and to do system management tasks (as in Perl). \
-It is simple, straight-forward, and extensible. \
-"
-HOMEPAGE = "http://www.ruby-lang.org/"
-SECTION = "devel/ruby"
-LICENSE = "Ruby | BSD-2-Clause | BSD-3-Clause | GPL-2.0-only | ISC | MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5b8c87559868796979806100db3f3805 \
- file://BSDL;md5=8b50bc6de8f586dc66790ba11d064d75 \
- file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://LEGAL;md5=f260190bc1e92e363f0ee3c0463d4c7c \
- "
-
-DEPENDS = "zlib openssl libyaml gdbm readline libffi"
-DEPENDS:append:class-target = " ruby-native"
-
-SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-SRC_URI = "http://cache.ruby-lang.org/pub/ruby/${SHRT_VER}/ruby-${PV}.tar.gz \
- file://0001-extmk-fix-cross-compilation-of-external-gems.patch \
- file://0002-Obey-LDFLAGS-for-the-link-of-libruby.patch \
- "
-UPSTREAM_CHECK_URI = "https://www.ruby-lang.org/en/downloads/"
-
-inherit autotools ptest pkgconfig
-
-
-# This snippet lets compiled extensions which rely on external libraries,
-# such as zlib, compile properly. If we don't do this, then when extmk.rb
-# runs, it uses the native libraries instead of the target libraries, and so
-# none of the linking operations succeed -- which makes extconf.rb think
-# that the libraries aren't available and hence that the extension can't be
-# built.
-
-do_configure:prepend() {
- sed -i "s#%%TARGET_CFLAGS%%#$CFLAGS#; s#%%TARGET_LDFLAGS%%#$LDFLAGS#" ${S}/common.mk
- rm -rf ${S}/ruby/
-}
diff --git a/meta/recipes-devtools/ruby/ruby/0001-extmk-fix-cross-compilation-of-external-gems.patch b/meta/recipes-devtools/ruby/ruby/0001-extmk-fix-cross-compilation-of-external-gems.patch
index 2e3156880e..7402e76333 100644
--- a/meta/recipes-devtools/ruby/ruby/0001-extmk-fix-cross-compilation-of-external-gems.patch
+++ b/meta/recipes-devtools/ruby/ruby/0001-extmk-fix-cross-compilation-of-external-gems.patch
@@ -1,7 +1,7 @@
-From a6e12b25a54d112c899b70c89c0bec9c5e5ebf3c Mon Sep 17 00:00:00 2001
+From caa03f46a3204a7e0f0e5d9d9cc9113304dc0382 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Andr=C3=A9=20Draszik?= <andre.draszik@jci.com>
Date: Mon, 30 Sep 2019 16:57:01 +0100
-Subject: [PATCH 1/3] extmk: fix cross-compilation of external gems
+Subject: [PATCH] extmk: fix cross-compilation of external gems
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
@@ -16,10 +16,10 @@ Signed-off-by: André Draszik <andre.draszik@jci.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ext/extmk.rb b/ext/extmk.rb
-index 1389dc4117..e4d923d7a7 100755
+index 428ffc9..87eff71 100755
--- a/ext/extmk.rb
+++ b/ext/extmk.rb
-@@ -413,8 +413,8 @@ def $mflags.defined?(var)
+@@ -420,8 +420,8 @@ else
end
$ruby = [$ruby]
$ruby << "-I'$(topdir)'"
@@ -29,6 +29,3 @@ index 1389dc4117..e4d923d7a7 100755
$ruby << "-I'$(extout)/$(arch)'" << "-I'$(extout)/common'" if $extout
ENV["RUBYLIB"] = "-"
end
---
-2.23.0.rc1
-
diff --git a/meta/recipes-devtools/ruby/ruby/0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch b/meta/recipes-devtools/ruby/ruby/0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch
deleted file mode 100644
index 226ef3af75..0000000000
--- a/meta/recipes-devtools/ruby/ruby/0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 2368d07660a93a2c41d63f3ab6054ca4daeef820 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 17 Nov 2020 18:31:40 +0000
-Subject: [PATCH] template/Makefile.in: do not write host cross-cc items into
- target config
-
-This helps reproducibility.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- template/Makefile.in | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/template/Makefile.in b/template/Makefile.in
-index 10dc826..940ee07 100644
---- a/template/Makefile.in
-+++ b/template/Makefile.in
-@@ -657,11 +657,11 @@ mjit_config.h:
- echo '#endif'; \
- quote MJIT_MIN_HEADER_NAME "$(MJIT_MIN_HEADER_NAME)"; \
- sep=,; \
-- quote "MJIT_CC_COMMON " $(MJIT_CC); \
-+ quote "MJIT_CC_COMMON " ; \
- quote "MJIT_CFLAGS MJIT_ARCHFLAG" $(MJIT_CFLAGS); \
- quote "MJIT_OPTFLAGS " $(MJIT_OPTFLAGS); \
- quote "MJIT_DEBUGFLAGS " $(MJIT_DEBUGFLAGS); \
-- quote "MJIT_LDSHARED " $(MJIT_LDSHARED); \
-+ quote "MJIT_LDSHARED " ; \
- quote "MJIT_DLDFLAGS MJIT_ARCHFLAG" $(MJIT_DLDFLAGS); \
- quote "MJIT_LIBS " $(LIBRUBYARG_SHARED); \
- quote 'PRELOADENV "@PRELOADENV@"'; \
diff --git a/meta/recipes-devtools/ruby/ruby/0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch b/meta/recipes-devtools/ruby/ruby/0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch
index f7b7adb3fd..67054d6553 100644
--- a/meta/recipes-devtools/ruby/ruby/0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch
+++ b/meta/recipes-devtools/ruby/ruby/0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch
@@ -1,4 +1,4 @@
-From dfb22e4d6662bf72879eda806eaa78c7b52b519e Mon Sep 17 00:00:00 2001
+From 980dcc5380db6f03451357140ae1487117300156 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 25 Jan 2022 20:29:14 -0800
Subject: [PATCH] vm_dump.c: Define REG_S1 and REG_S2 for musl/riscv
@@ -14,7 +14,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
1 file changed, 5 insertions(+)
diff --git a/vm_dump.c b/vm_dump.c
-index a98f5aa..957b785 100644
+index 444be4a..8a081a7 100644
--- a/vm_dump.c
+++ b/vm_dump.c
@@ -39,6 +39,11 @@
@@ -29,6 +29,3 @@ index a98f5aa..957b785 100644
#define VM_CFP_CNT(ec, cfp) \
((rb_control_frame_t *)((ec)->vm_stack + (ec)->vm_stack_size) - \
(rb_control_frame_t *)(cfp))
---
-2.35.0
-
diff --git a/meta/recipes-devtools/ruby/ruby/0002-Obey-LDFLAGS-for-the-link-of-libruby.patch b/meta/recipes-devtools/ruby/ruby/0002-Obey-LDFLAGS-for-the-link-of-libruby.patch
index 4b954e439b..f3a65e785d 100644
--- a/meta/recipes-devtools/ruby/ruby/0002-Obey-LDFLAGS-for-the-link-of-libruby.patch
+++ b/meta/recipes-devtools/ruby/ruby/0002-Obey-LDFLAGS-for-the-link-of-libruby.patch
@@ -1,20 +1,19 @@
-From 07fd1ada322eda6c05ac45c08fc814976f31b596 Mon Sep 17 00:00:00 2001
+From 7f7facb85bd65adec24230fe8ca7f6a9863a1fd0 Mon Sep 17 00:00:00 2001
From: Christopher Larson <chris_larson@mentor.com>
Date: Thu, 5 May 2016 10:59:07 -0700
Subject: [PATCH] Obey LDFLAGS for the link of libruby
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Upstream-Status: Pending
-
---
template/Makefile.in | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/template/Makefile.in b/template/Makefile.in
-index f5a3149..5cc75ae 100644
+index 8c462f2..2200c8c 100644
--- a/template/Makefile.in
+++ b/template/Makefile.in
-@@ -114,7 +114,7 @@ ENABLE_SHARED = @ENABLE_SHARED@
+@@ -115,7 +115,7 @@ ENABLE_SHARED = @ENABLE_SHARED@
LDSHARED = @LIBRUBY_LDSHARED@
DLDSHARED = @DLDSHARED@
XDLDFLAGS = @DLDFLAGS@
diff --git a/meta/recipes-devtools/ruby/ruby/0002-template-Makefile.in-filter-out-f-prefix-map.patch b/meta/recipes-devtools/ruby/ruby/0002-template-Makefile.in-filter-out-f-prefix-map.patch
deleted file mode 100644
index 2efbad7513..0000000000
--- a/meta/recipes-devtools/ruby/ruby/0002-template-Makefile.in-filter-out-f-prefix-map.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-Subject: [PATCH] template/Makefile.in: filter out -f*prefix-map
-
-If we add DEBUG_PREFIX_MAP into LDFLAGS, ruby and ruby-dbg are no longer
-reproducible. Fix this.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Tony Battersby <tonyb@cybernetics.com>
----
---- a/tool/mjit_archflag.sh
-+++ b/tool/mjit_archflag.sh
-@@ -7,6 +7,20 @@ quote() {
- echo
- }
-
-+quote_filtered() {
-+ printf "#${indent}define $1"
-+ while shift && [ "$#" -gt 0 ]; do
-+ case "$1" in
-+ -ffile-prefix-map=*|-fdebug-prefix-map=*|-fmacro-prefix-map=*)
-+ ;;
-+ *)
-+ printf ' "%s"'$sep "$1"
-+ ;;
-+ esac
-+ done
-+ echo
-+}
-+
- archs=""
- arch_flag=""
-
---- a/template/Makefile.in
-+++ b/template/Makefile.in
-@@ -666,7 +666,7 @@ mjit_config.h:
- quote "MJIT_OPTFLAGS " $(MJIT_OPTFLAGS); \
- quote "MJIT_DEBUGFLAGS " $(MJIT_DEBUGFLAGS); \
- quote "MJIT_LDSHARED " ; \
-- quote "MJIT_DLDFLAGS MJIT_ARCHFLAG" $(MJIT_DLDFLAGS); \
-+ quote_filtered "MJIT_DLDFLAGS MJIT_ARCHFLAG" $(MJIT_DLDFLAGS); \
- quote "MJIT_LIBS " $(LIBRUBYARG_SHARED); \
- quote 'PRELOADENV "@PRELOADENV@"'; \
- indent=$${archs:+' '}; \
diff --git a/meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch b/meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch
index f92f0e1ba6..e2d5b57c25 100644
--- a/meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch
+++ b/meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch
@@ -1,6 +1,7 @@
+From 5079e678ce2a81416088c04f9123cd8207d5def2 Mon Sep 17 00:00:00 2001
From: Christian Hofstaedtler <zeha@debian.org>
Date: Tue, 10 Oct 2017 15:04:34 -0300
-Subject: rdoc: build reproducible documentation
+Subject: [PATCH] rdoc: build reproducible documentation
- provide a fixed timestamp to the gzip compression
@@ -10,23 +11,24 @@ Signed-off-by: Antonio Terceiro <terceiro@debian.org>
Signed-off-by: Christian Hofstaedtler <zeha@debian.org>
---
lib/rdoc/generator/json_index.rb | 4 ++--
- lib/rdoc/rdoc.rb | 2 +-
- 2 files changed, 3 insertions(+), 3 deletions(-)
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+diff --git a/lib/rdoc/generator/json_index.rb b/lib/rdoc/generator/json_index.rb
+index c454910..24feab0 100644
--- a/lib/rdoc/generator/json_index.rb
+++ b/lib/rdoc/generator/json_index.rb
-@@ -178,7 +178,7 @@
+@@ -178,7 +178,7 @@ class RDoc::Generator::JsonIndex
debug_msg "Writing gzipped search index to %s" % outfile
-
+
Zlib::GzipWriter.open(outfile) do |gz|
- gz.mtime = File.mtime(search_index_file)
+ gz.mtime = -1
gz.orig_name = search_index_file.basename.to_s
gz.write search_index
gz.close
-@@ -196,7 +196,7 @@
+@@ -196,7 +196,7 @@ class RDoc::Generator::JsonIndex
debug_msg "Writing gzipped file to %s" % outfile
-
+
Zlib::GzipWriter.open(outfile) do |gz|
- gz.mtime = File.mtime(dest)
+ gz.mtime = -1
diff --git a/meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch b/meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch
index e0aca0dcfc..b14a731cfb 100644
--- a/meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch
+++ b/meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch
@@ -1,6 +1,7 @@
+From 99734381652602f76075017576a819c427ebb5f2 Mon Sep 17 00:00:00 2001
From: Reiner Herrmann <reiner@reiner-h.de>
Date: Tue, 10 Oct 2017 15:06:13 -0300
-Subject: lib/mkmf.rb: sort list of object files in generated Makefile
+Subject: [PATCH] lib/mkmf.rb: sort list of object files in generated Makefile
Without sorting the list explicitly, its order is indeterministic,
because readdir() is also not deterministic.
@@ -15,9 +16,11 @@ Signed-off-by: Reiner Herrmann <reiner@reiner-h.de>
lib/mkmf.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
+diff --git a/lib/mkmf.rb b/lib/mkmf.rb
+index 6da7dde..3af30a9 100644
--- a/lib/mkmf.rb
+++ b/lib/mkmf.rb
-@@ -2315,7 +2315,7 @@
+@@ -2368,7 +2368,7 @@ LOCAL_LIBS = #{$LOCAL_LIBS}
LIBS = #{$LIBRUBYARG} #{$libs} #{$LIBS}
ORIG_SRCS = #{orig_srcs.collect(&File.method(:basename)).join(' ')}
SRCS = $(ORIG_SRCS) #{(srcs - orig_srcs).collect(&File.method(:basename)).join(' ')}
diff --git a/meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch b/meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch
index 41f206523e..24268625a2 100644
--- a/meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch
+++ b/meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch
@@ -1,4 +1,4 @@
-From 6e1dc610724a7aa8368cbcddf4bbe21cccc0f731 Mon Sep 17 00:00:00 2001
+From 3bc324379aa3e322bad9353da8c0064cd671cc74 Mon Sep 17 00:00:00 2001
From: Lucas Kanashiro <kanashiro@debian.org>
Date: Fri, 1 Nov 2019 15:25:17 -0300
Subject: [PATCH] Make gemspecs reproducible
@@ -12,20 +12,20 @@ Upstream-Status: Backport [debian]
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/lib/rubygems/specification.rb b/lib/rubygems/specification.rb
-index 0d72cee..eb7bc25 100644
+index a0c7faa..f0722d9 100644
--- a/lib/rubygems/specification.rb
+++ b/lib/rubygems/specification.rb
-@@ -1691,7 +1691,9 @@ class Gem::Specification < Gem::BasicSpecification
- raise(Gem::InvalidSpecificationException,
- "invalid date format in specification: #{date.inspect}")
- end
-- when Time, DateLike then
-+ when Time then
-+ Time.utc(date.utc.year, date.utc.month, date.utc.day)
-+ when DateLike then
- Time.utc(date.year, date.month, date.day)
- else
- TODAY
+@@ -1774,7 +1774,9 @@ class Gem::Specification < Gem::BasicSpecification
+ raise(Gem::InvalidSpecificationException,
+ "invalid date format in specification: #{date.inspect}")
+ end
+- when Time, DateLike then
++ when Time then
++ Time.utc(date.utc.year, date.utc.month, date.utc.day)
++ when DateLike then
+ Time.utc(date.year, date.month, date.day)
+ else
+ TODAY
--
-2.25.1
+2.39.2
diff --git a/meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch b/meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch
index 1e4a298317..21604dfc34 100644
--- a/meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch
+++ b/meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch
@@ -1,4 +1,4 @@
-From 6e1dc610724a7aa8368cbcddf4bbe21cccc0f731 Mon Sep 17 00:00:00 2001
+From 1dc7ef09c3c567c4adb09ccfd97e0e59c58edb9f Mon Sep 17 00:00:00 2001
From: Lucas Kanashiro <kanashiro@debian.org>
Date: Fri, 1 Nov 2019 15:25:17 -0300
Subject: [PATCH] Make gemspecs reproducible
@@ -7,7 +7,6 @@ Without an explicit date, they will get the current date and make the
build unreproducible
Upstream-Status: Backport [debian]
-
---
ext/bigdecimal/bigdecimal.gemspec | 1 +
ext/fiddle/fiddle.gemspec | 1 +
@@ -17,19 +16,19 @@ Upstream-Status: Backport [debian]
5 files changed, 5 insertions(+)
diff --git a/ext/bigdecimal/bigdecimal.gemspec b/ext/bigdecimal/bigdecimal.gemspec
-index fd49c1b..5b8bb00 100644
+index f9f3b45..b9a469d 100644
--- a/ext/bigdecimal/bigdecimal.gemspec
+++ b/ext/bigdecimal/bigdecimal.gemspec
-@@ -4,6 +4,7 @@ Gem::Specification.new do |s|
- s.name = "bigdecimal"
- s.version = "3.1.1"
+@@ -14,6 +14,7 @@ Gem::Specification.new do |s|
+ s.name = name
+ s.version = source_version
s.authors = ["Kenta Murata", "Zachary Scott", "Shigeo Kobayashi"]
+ s.date = RUBY_RELEASE_DATE
s.email = ["mrkn@mrkn.jp"]
s.summary = "Arbitrary-precision decimal floating-point number library."
diff --git a/ext/fiddle/fiddle.gemspec b/ext/fiddle/fiddle.gemspec
-index a9c0ec4..89da078 100644
+index 8781093..efdca32 100644
--- a/ext/fiddle/fiddle.gemspec
+++ b/ext/fiddle/fiddle.gemspec
@@ -8,6 +8,7 @@ end
@@ -41,10 +40,10 @@ index a9c0ec4..89da078 100644
spec.email = ["aaron@tenderlovemaking.com", "hsbt@ruby-lang.org"]
diff --git a/ext/io/console/io-console.gemspec b/ext/io/console/io-console.gemspec
-index aa57f8a..ba7f8e5 100644
+index d4f5276..8f89611 100644
--- a/ext/io/console/io-console.gemspec
+++ b/ext/io/console/io-console.gemspec
-@@ -4,6 +4,7 @@ _VERSION = "0.5.11"
+@@ -4,6 +4,7 @@ _VERSION = "0.7.1"
Gem::Specification.new do |s|
s.name = "io-console"
s.version = _VERSION
@@ -65,7 +64,7 @@ index 1f4798e..48743cf 100644
spec.email = ["knu@idaemons.org", "ume@mahoroba.org"]
diff --git a/lib/rdoc/rdoc.gemspec b/lib/rdoc/rdoc.gemspec
-index 525a15f..f6d0e22 100644
+index 93a281c..cc5c155 100644
--- a/lib/rdoc/rdoc.gemspec
+++ b/lib/rdoc/rdoc.gemspec
@@ -7,6 +7,7 @@ end
@@ -76,6 +75,3 @@ index 525a15f..f6d0e22 100644
s.version = RDoc::VERSION
s.authors = [
---
-2.25.1
-
diff --git a/meta/recipes-devtools/ruby/ruby/remove_has_include_macros.patch b/meta/recipes-devtools/ruby/ruby/remove_has_include_macros.patch
deleted file mode 100644
index b78e3db892..0000000000
--- a/meta/recipes-devtools/ruby/ruby/remove_has_include_macros.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From e74b57febec9bd806e29025e6eeb8091e7021d75 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 26 Jan 2020 11:27:40 -0800
-Subject: [PATCH] Filter out __has_include* compiler defines
-
-They are internal to compiler and this header is later on includes in C
-files, but newer gcc >= 10 complains about it.
-
-error in initial header file:
-| In file included from /tmp/20200124-86625-14hiju4.c:1:
-| /tmp/20200124-86625-11y6l6i.h:13849:9: error: "__has_include" cannot be used as a macro name
-| 13849 | #define __has_include __has_include
-| | ^~~~~~~~~~~~~
-| compilation terminated due to -Wfatal-errors.
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- common.mk | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/common.mk b/common.mk
-index 664f750..3b8fbe6 100644
---- a/common.mk
-+++ b/common.mk
-@@ -238,6 +238,8 @@ $(TIMESTAMPDIR)/$(MJIT_HEADER:.h=)$(MJIT_HEADER_SUFFIX).time: probes.h vm.$(OBJE
- $(ECHO) building $(@F:.time=.h)
- $(Q)$(MINIRUBY) $(tooldir)/mjit_tabs.rb "$(MJIT_TABS)" \
- $(CPP) -DMJIT_HEADER $(MJIT_HEADER_FLAGS) $(CFLAGS) $(XCFLAGS) $(CPPFLAGS) $(srcdir)/vm.c $(CPPOUTFLAG)$(@F:.time=.h).new
-+ $(Q)sed -i -e "/#define __has_include __has_include/d" $(@F:.time=.h).new
-+ $(Q)sed -i -e "/#define __has_include_next __has_include_next/d" $(@F:.time=.h).new
- $(Q) $(IFCHANGE) "--timestamp=$@" $(@F:.time=.h) $(@F:.time=.h).new
-
- $(MJIT_HEADER:.h=)$(MJIT_HEADER_SUFFIX).h: $(TIMESTAMPDIR)/$(MJIT_HEADER:.h=)$(MJIT_HEADER_SUFFIX).time
diff --git a/meta/recipes-devtools/ruby/ruby_3.1.2.bb b/meta/recipes-devtools/ruby/ruby_3.1.2.bb
deleted file mode 100644
index 38ba46731b..0000000000
--- a/meta/recipes-devtools/ruby/ruby_3.1.2.bb
+++ /dev/null
@@ -1,107 +0,0 @@
-require ruby.inc
-
-DEPENDS:append:libc-musl = " libucontext"
-
-SRC_URI += " \
- file://remove_has_include_macros.patch \
- file://run-ptest \
- file://0001-template-Makefile.in-do-not-write-host-cross-cc-item.patch \
- file://0002-template-Makefile.in-filter-out-f-prefix-map.patch \
- file://0003-rdoc-build-reproducible-documentation.patch \
- file://0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch \
- file://0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch \
- file://0006-Make-gemspecs-reproducible.patch \
- file://0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch \
- "
-
-SRC_URI[sha256sum] = "61843112389f02b735428b53bb64cf988ad9fb81858b8248e22e57336f24a83e"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG += "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-
-PACKAGECONFIG[valgrind] = "--with-valgrind=yes, --with-valgrind=no, valgrind"
-PACKAGECONFIG[gmp] = "--with-gmp=yes, --with-gmp=no, gmp"
-PACKAGECONFIG[ipv6] = "--enable-ipv6, --disable-ipv6,"
-# rdoc is off by default due to non-reproducibility reported in
-# https://bugs.ruby-lang.org/issues/18456
-PACKAGECONFIG[rdoc] = "--enable-install-rdoc,--disable-install-rdoc,"
-
-EXTRA_OECONF = "\
- --disable-versioned-paths \
- --disable-rpath \
- --disable-dtrace \
- --enable-shared \
- --enable-load-relative \
- --with-pkg-config=pkg-config \
- --with-static-linked-ext \
-"
-
-EXTRA_OECONF:append:libc-musl = "\
- ac_cv_func_isnan=yes \
- ac_cv_func_isinf=yes \
-"
-
-PARALLEL_MAKEINST = ""
-
-do_install:append:class-target () {
- # Find out rbconfig.rb from .installed.list
- rbconfig_rb=`grep rbconfig.rb ${B}/.installed.list`
- # Remove build host directories
- sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' \
- -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:${RECIPE_SYSROOT}::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- ${D}$rbconfig_rb
-
- sed -i -e 's|${DEBUG_PREFIX_MAP}||g' \
- ${D}${libdir}/pkgconfig/*.pc
-
- # logs that may contain host-specific paths
- find ${D} -name gem_make.out -delete
-}
-
-do_install_ptest () {
- cp -rf ${S}/test ${D}${PTEST_PATH}/
-
- install -D ${S}/tool/test/runner.rb ${D}${PTEST_PATH}/tool/test/runner.rb
- cp -r ${S}/tool/lib ${D}${PTEST_PATH}/tool/
- mkdir -p ${D}${PTEST_PATH}/lib
- cp -r ${S}/lib/did_you_mean ${S}/lib/rdoc ${D}${PTEST_PATH}/lib
-
- # install test-binaries
- # These .so files have sporadic reproducibility fails as seen here:
- # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20220107-rm1diuww/packages/diff-html/
- # As they are needed only in ruby-ptest, and that is currently altogether disabled, let's take them out.
- # If someone wants to look at where the non-determinism comes from, one possible reason is use of
- # -rdynamic -Wl,-export-dynamic
- #find $(find ./.ext -path '*/-test-') -name '*.so' -print0 \
- # | tar --no-recursion --null -T - --no-same-owner --preserve-permissions -cf - \
- # | tar -C ${D}${libdir}/ruby/${SHRT_VER}.0/ --no-same-owner --preserve-permissions --strip-components=2 -xf -
- # adjust path to not assume build directory layout
- sed -e 's|File.expand_path(.*\.\./bin/erb[^)]*|File.expand_path("${bindir}/erb"|g' \
- -i ${D}${PTEST_PATH}/test/erb/test_erb_command.rb
-
- cp -r ${S}/include ${D}/${libdir}/ruby/
-}
-
-PACKAGES =+ "${PN}-ri-docs ${PN}-rdoc"
-
-SUMMARY:${PN}-ri-docs = "ri (Ruby Interactive) documentation for the Ruby standard library"
-RDEPENDS:${PN}-ri-docs = "${PN}"
-FILES:${PN}-ri-docs += "${datadir}/ri"
-
-SUMMARY:${PN}-rdoc = "RDoc documentation generator from Ruby source"
-RDEPENDS:${PN}-rdoc = "${PN}"
-FILES:${PN}-rdoc += "${libdir}/ruby/*/rdoc ${bindir}/rdoc"
-
-FILES:${PN} += "${datadir}/rubygems"
-
-FILES:${PN}-ptest:append:class-target = "\
- ${libdir}/ruby/include \
- ${libdir}/ruby/${SHRT_VER}.0/*/-test- \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/ruby/ruby_3.3.0.bb b/meta/recipes-devtools/ruby/ruby_3.3.0.bb
new file mode 100644
index 0000000000..17eb6d73c2
--- /dev/null
+++ b/meta/recipes-devtools/ruby/ruby_3.3.0.bb
@@ -0,0 +1,140 @@
+SUMMARY = "An interpreter of object-oriented scripting language"
+DESCRIPTION = "Ruby is an interpreted scripting language for quick \
+and easy object-oriented programming. It has many features to process \
+text files and to do system management tasks (as in Perl). \
+It is simple, straight-forward, and extensible. \
+"
+HOMEPAGE = "http://www.ruby-lang.org/"
+SECTION = "devel/ruby"
+LICENSE = "Ruby | BSD-2-Clause | BSD-3-Clause | GPL-2.0-only | ISC | MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5b8c87559868796979806100db3f3805 \
+ file://BSDL;md5=8b50bc6de8f586dc66790ba11d064d75 \
+ file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://LEGAL;md5=81e6a4d81533b9263da4c3485a0ad883 \
+ "
+
+DEPENDS = "zlib openssl libyaml gdbm readline libffi"
+DEPENDS:append:class-target = " ruby-native"
+DEPENDS:append:class-nativesdk = " ruby-native"
+
+SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+SRC_URI = "http://cache.ruby-lang.org/pub/ruby/${SHRT_VER}/ruby-${PV}.tar.gz \
+ file://0001-extmk-fix-cross-compilation-of-external-gems.patch \
+ file://0002-Obey-LDFLAGS-for-the-link-of-libruby.patch \
+ file://run-ptest \
+ file://0003-rdoc-build-reproducible-documentation.patch \
+ file://0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch \
+ file://0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch \
+ file://0006-Make-gemspecs-reproducible.patch \
+ file://0001-vm_dump.c-Define-REG_S1-and-REG_S2-for-musl-riscv.patch \
+ "
+UPSTREAM_CHECK_URI = "https://www.ruby-lang.org/en/downloads/"
+
+inherit autotools ptest pkgconfig
+
+
+# This snippet lets compiled extensions which rely on external libraries,
+# such as zlib, compile properly. If we don't do this, then when extmk.rb
+# runs, it uses the native libraries instead of the target libraries, and so
+# none of the linking operations succeed -- which makes extconf.rb think
+# that the libraries aren't available and hence that the extension can't be
+# built.
+
+do_configure:prepend() {
+ sed -i "s#%%TARGET_CFLAGS%%#$CFLAGS#; s#%%TARGET_LDFLAGS%%#$LDFLAGS#" ${S}/common.mk
+ rm -rf ${S}/ruby/
+}
+
+DEPENDS:append:libc-musl = " libucontext"
+
+SRC_URI[sha256sum] = "96518814d9832bece92a85415a819d4893b307db5921ae1f0f751a9a89a56b7d"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG += "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+
+PACKAGECONFIG[valgrind] = "--with-valgrind=yes, --with-valgrind=no, valgrind"
+PACKAGECONFIG[gmp] = "--with-gmp=yes, --with-gmp=no, gmp"
+PACKAGECONFIG[ipv6] = "--enable-ipv6, --disable-ipv6,"
+# rdoc is off by default due to non-reproducibility reported in
+# https://bugs.ruby-lang.org/issues/18456
+PACKAGECONFIG[rdoc] = "--enable-install-rdoc,--disable-install-rdoc,"
+
+EXTRA_OECONF = "\
+ --disable-versioned-paths \
+ --disable-rpath \
+ --disable-dtrace \
+ --enable-shared \
+ --enable-load-relative \
+ --with-pkg-config=pkg-config \
+ --with-static-linked-ext \
+"
+
+EXTRA_OECONF:append:libc-musl = "\
+ ac_cv_func_isnan=yes \
+ ac_cv_func_isinf=yes \
+"
+
+PARALLEL_MAKEINST = ""
+
+do_install:append:class-target () {
+ # Find out rbconfig.rb from .installed.list
+ rbconfig_rb=`grep rbconfig.rb ${B}/.installed.list`
+ # Remove build host directories
+ sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' \
+ -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ -e 's:${RECIPE_SYSROOT}::g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ ${D}$rbconfig_rb
+
+ sed -i -e 's|${DEBUG_PREFIX_MAP}||g' \
+ ${D}${libdir}/pkgconfig/*.pc
+
+ # logs that may contain host-specific paths
+ find ${D} -name gem_make.out -delete
+}
+
+do_install_ptest () {
+ cp -rf ${S}/test ${D}${PTEST_PATH}/
+
+ install -D ${S}/tool/test/runner.rb ${D}${PTEST_PATH}/tool/test/runner.rb
+ cp -r ${S}/tool/lib ${D}${PTEST_PATH}/tool/
+ mkdir -p ${D}${PTEST_PATH}/lib
+ cp -r ${S}/lib/did_you_mean ${S}/lib/rdoc ${D}${PTEST_PATH}/lib
+
+ # install test-binaries
+ # These .so files have sporadic reproducibility fails as seen here:
+ # https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20220107-rm1diuww/packages/diff-html/
+ # As they are needed only in ruby-ptest, and that is currently altogether disabled, let's take them out.
+ # If someone wants to look at where the non-determinism comes from, one possible reason is use of
+ # -rdynamic -Wl,-export-dynamic
+ #find $(find ./.ext -path '*/-test-') -name '*.so' -print0 \
+ # | tar --no-recursion --null -T - --no-same-owner --preserve-permissions -cf - \
+ # | tar -C ${D}${libdir}/ruby/${SHRT_VER}.0/ --no-same-owner --preserve-permissions --strip-components=2 -xf -
+ # adjust path to not assume build directory layout
+ sed -e 's|File.expand_path(.*\.\./bin/erb[^)]*|File.expand_path("${bindir}/erb"|g' \
+ -i ${D}${PTEST_PATH}/test/erb/test_erb_command.rb
+
+ cp -r ${S}/include ${D}/${libdir}/ruby/
+}
+
+PACKAGES =+ "${PN}-ri-docs ${PN}-rdoc"
+
+SUMMARY:${PN}-ri-docs = "ri (Ruby Interactive) documentation for the Ruby standard library"
+RDEPENDS:${PN}-ri-docs = "${PN}"
+FILES:${PN}-ri-docs += "${datadir}/ri"
+
+SUMMARY:${PN}-rdoc = "RDoc documentation generator from Ruby source"
+RDEPENDS:${PN}-rdoc = "${PN}"
+FILES:${PN}-rdoc += "${libdir}/ruby/*/rdoc ${bindir}/rdoc"
+
+FILES:${PN} += "${datadir}/rubygems"
+
+FILES:${PN}-ptest:append:class-target = "\
+ ${libdir}/ruby/include \
+ ${libdir}/ruby/${SHRT_VER}.0/*/-test- \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts
index 95dccb9cae..1f3e692029 100755
--- a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts
+++ b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts
@@ -81,11 +81,18 @@ remove_rcsd_link=1
if $pm_installed; then
case $pm in
"ipk")
- eval opkg configure $append_log
+ if ! `fcntl-lock --wait 30 /run/opkg.lock true`; then
+ eval echo "Unable to obtain the opkg lock, deadlock?" $append_log
+ fi
+ if ! eval "opkg configure $append_log"; then
+ exit 1
+ fi
;;
"deb")
- eval dpkg --configure -a $append_log
+ if ! eval "eval dpkg --configure -a $append_log"; then
+ exit 1
+ fi
;;
esac
else
diff --git a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
index 7f72f3388a..b6b81d5c1a 100644
--- a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
+++ b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
@@ -1,7 +1,7 @@
[Unit]
Description=Run pending postinsts
DefaultDependencies=no
-After=systemd-remount-fs.service systemd-tmpfiles-setup.service tmp.mount
+After=systemd-remount-fs.service systemd-tmpfiles-setup.service tmp.mount ldconfig.service
Before=sysinit.target
[Service]
diff --git a/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb b/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb
index db353d607b..e977942de8 100644
--- a/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb
+++ b/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb
@@ -1,7 +1,6 @@
SUMMARY = "Runs postinstall scripts on first boot of the target device"
DESCRIPTION = "${SUMMARY}"
SECTION = "devel"
-PR = "r10"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
@@ -13,6 +12,8 @@ S = "${WORKDIR}"
inherit allarch systemd update-rc.d
+RDEPENDS:${PN} = "util-linux-fcntl-lock"
+
INITSCRIPT_NAME = "run-postinsts"
INITSCRIPT_PARAMS = "start 99 S ."
diff --git a/meta/recipes-devtools/rust/README-rust.md b/meta/recipes-devtools/rust/README-rust.md
index b87637c3b0..209836ab65 100644
--- a/meta/recipes-devtools/rust/README-rust.md
+++ b/meta/recipes-devtools/rust/README-rust.md
@@ -3,22 +3,6 @@
This provides the Rust compiler, tools for building packages (cargo), and
a few example projects.
-## What works:
-
- - Building `rust-native` and `cargo-native`
- - Building Rust based projects with Cargo for the TARGET
- - e.g. `rustfmt` which is used by the CI system
- - `-buildsdk` and `-crosssdk` packages
-
-## What doesn't:
-
- - Using anything but x86_64 or arm64 as the build environment
- - rust (built for target) [issue #81](https://github.com/meta-rust/meta-rust/issues/81)
-
-## What's untested:
-
- - cargo (built for target)
-
## Building a rust package
When building a rust package in bitbake, it's usually easiest to build with
@@ -36,11 +20,11 @@ may also be added to the SDK.
NOTE: You will have to edit the generated recipe based on the comments
contained within it
-## TODO
-
## Pitfalls
- - TARGET_SYS _must_ be different from BUILD_SYS. This is due to the way Rust configuration options are tracked for different targets. This is the reason we use the Yocto triples instead of the native Rust triples. See rust-lang/cargo#3349.
+ - TARGET_SYS _must_ be different from BUILD_SYS. This is due to the way Rust
+ configuration options are tracked for different targets. This is the reason
+ we use the Yocto triples instead of the native Rust triples. See rust-lang/cargo#3349.
## Dependencies
@@ -52,7 +36,3 @@ On the target:
- Any `-sys` packages your project might need must have RDEPENDs for
the native library.
-## Copyright
-
-MIT OR Apache-2.0 - Same as rust
-
diff --git a/meta/recipes-devtools/rust/cargo-c-crates.inc b/meta/recipes-devtools/rust/cargo-c-crates.inc
new file mode 100644
index 0000000000..f6da3e654e
--- /dev/null
+++ b/meta/recipes-devtools/rust/cargo-c-crates.inc
@@ -0,0 +1,654 @@
+# Autogenerated with 'bitbake -c update_crates cargo-c-native'
+
+# from Cargo.lock
+SRC_URI += " \
+ crate://crates.io/adler/1.0.2 \
+ crate://crates.io/ahash/0.8.7 \
+ crate://crates.io/aho-corasick/1.1.2 \
+ crate://crates.io/allocator-api2/0.2.16 \
+ crate://crates.io/anstream/0.6.11 \
+ crate://crates.io/anstyle/1.0.6 \
+ crate://crates.io/anstyle-parse/0.2.3 \
+ crate://crates.io/anstyle-query/1.0.2 \
+ crate://crates.io/anstyle-wincon/3.0.2 \
+ crate://crates.io/anyhow/1.0.79 \
+ crate://crates.io/arc-swap/1.6.0 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/base16ct/0.2.0 \
+ crate://crates.io/base64/0.21.7 \
+ crate://crates.io/base64ct/1.6.0 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/bitflags/2.4.2 \
+ crate://crates.io/bitmaps/2.1.0 \
+ crate://crates.io/block-buffer/0.10.4 \
+ crate://crates.io/bstr/1.9.0 \
+ crate://crates.io/btoi/0.4.3 \
+ crate://crates.io/bumpalo/3.14.0 \
+ crate://crates.io/bytes/1.5.0 \
+ crate://crates.io/bytesize/1.3.0 \
+ crate://crates.io/cargo/0.77.0 \
+ crate://crates.io/cargo-credential/0.4.2 \
+ crate://crates.io/cargo-credential-libsecret/0.4.2 \
+ crate://crates.io/cargo-credential-macos-keychain/0.4.2 \
+ crate://crates.io/cargo-credential-wincred/0.4.2 \
+ crate://crates.io/cargo-platform/0.1.7 \
+ crate://crates.io/cargo-util/0.2.9 \
+ crate://crates.io/cbindgen/0.26.0 \
+ crate://crates.io/cc/1.0.83 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/clap/4.5.0 \
+ crate://crates.io/clap_builder/4.5.0 \
+ crate://crates.io/clap_derive/4.5.0 \
+ crate://crates.io/clap_lex/0.7.0 \
+ crate://crates.io/clru/0.6.1 \
+ crate://crates.io/color-print/0.3.5 \
+ crate://crates.io/color-print-proc-macro/0.3.5 \
+ crate://crates.io/colorchoice/1.0.0 \
+ crate://crates.io/const-oid/0.9.6 \
+ crate://crates.io/core-foundation/0.9.4 \
+ crate://crates.io/core-foundation-sys/0.8.6 \
+ crate://crates.io/cpufeatures/0.2.12 \
+ crate://crates.io/crates-io/0.39.2 \
+ crate://crates.io/crc32fast/1.3.2 \
+ crate://crates.io/crossbeam-channel/0.5.11 \
+ crate://crates.io/crossbeam-deque/0.8.5 \
+ crate://crates.io/crossbeam-epoch/0.9.18 \
+ crate://crates.io/crossbeam-utils/0.8.19 \
+ crate://crates.io/crypto-bigint/0.5.5 \
+ crate://crates.io/crypto-common/0.1.6 \
+ crate://crates.io/ct-codecs/1.1.1 \
+ crate://crates.io/curl/0.4.45 \
+ crate://crates.io/curl-sys/0.4.72+curl-8.6.0 \
+ crate://crates.io/der/0.7.8 \
+ crate://crates.io/deranged/0.3.11 \
+ crate://crates.io/digest/0.10.7 \
+ crate://crates.io/dunce/1.0.4 \
+ crate://crates.io/ecdsa/0.16.9 \
+ crate://crates.io/ed25519-compact/2.1.1 \
+ crate://crates.io/either/1.9.0 \
+ crate://crates.io/elliptic-curve/0.13.8 \
+ crate://crates.io/encoding_rs/0.8.33 \
+ crate://crates.io/equivalent/1.0.1 \
+ crate://crates.io/erased-serde/0.4.2 \
+ crate://crates.io/errno/0.3.8 \
+ crate://crates.io/fallible-iterator/0.3.0 \
+ crate://crates.io/fallible-streaming-iterator/0.1.9 \
+ crate://crates.io/faster-hex/0.9.0 \
+ crate://crates.io/fastrand/2.0.1 \
+ crate://crates.io/ff/0.13.0 \
+ crate://crates.io/fiat-crypto/0.2.6 \
+ crate://crates.io/filetime/0.2.23 \
+ crate://crates.io/flate2/1.0.28 \
+ crate://crates.io/foreign-types/0.3.2 \
+ crate://crates.io/foreign-types-shared/0.1.1 \
+ crate://crates.io/form_urlencoded/1.2.1 \
+ crate://crates.io/generic-array/0.14.7 \
+ crate://crates.io/getrandom/0.2.12 \
+ crate://crates.io/git2/0.18.2 \
+ crate://crates.io/git2-curl/0.19.0 \
+ crate://crates.io/gix/0.56.0 \
+ crate://crates.io/gix-actor/0.28.1 \
+ crate://crates.io/gix-attributes/0.20.1 \
+ crate://crates.io/gix-bitmap/0.2.10 \
+ crate://crates.io/gix-chunk/0.4.7 \
+ crate://crates.io/gix-command/0.3.4 \
+ crate://crates.io/gix-commitgraph/0.22.1 \
+ crate://crates.io/gix-config/0.32.1 \
+ crate://crates.io/gix-config-value/0.14.4 \
+ crate://crates.io/gix-credentials/0.22.0 \
+ crate://crates.io/gix-date/0.8.3 \
+ crate://crates.io/gix-diff/0.38.0 \
+ crate://crates.io/gix-discover/0.27.0 \
+ crate://crates.io/gix-features/0.35.0 \
+ crate://crates.io/gix-features/0.36.1 \
+ crate://crates.io/gix-filter/0.7.0 \
+ crate://crates.io/gix-fs/0.8.1 \
+ crate://crates.io/gix-glob/0.14.1 \
+ crate://crates.io/gix-hash/0.13.3 \
+ crate://crates.io/gix-hashtable/0.4.1 \
+ crate://crates.io/gix-ignore/0.9.1 \
+ crate://crates.io/gix-index/0.27.1 \
+ crate://crates.io/gix-lock/11.0.1 \
+ crate://crates.io/gix-macros/0.1.3 \
+ crate://crates.io/gix-negotiate/0.10.0 \
+ crate://crates.io/gix-object/0.39.0 \
+ crate://crates.io/gix-odb/0.55.0 \
+ crate://crates.io/gix-pack/0.45.0 \
+ crate://crates.io/gix-packetline/0.17.3 \
+ crate://crates.io/gix-packetline-blocking/0.17.3 \
+ crate://crates.io/gix-path/0.10.5 \
+ crate://crates.io/gix-pathspec/0.4.1 \
+ crate://crates.io/gix-prompt/0.8.2 \
+ crate://crates.io/gix-protocol/0.42.0 \
+ crate://crates.io/gix-quote/0.4.10 \
+ crate://crates.io/gix-ref/0.39.1 \
+ crate://crates.io/gix-refspec/0.20.0 \
+ crate://crates.io/gix-revision/0.24.0 \
+ crate://crates.io/gix-revwalk/0.10.0 \
+ crate://crates.io/gix-sec/0.10.4 \
+ crate://crates.io/gix-submodule/0.6.0 \
+ crate://crates.io/gix-tempfile/11.0.1 \
+ crate://crates.io/gix-trace/0.1.7 \
+ crate://crates.io/gix-transport/0.39.0 \
+ crate://crates.io/gix-traverse/0.35.0 \
+ crate://crates.io/gix-url/0.25.2 \
+ crate://crates.io/gix-utils/0.1.9 \
+ crate://crates.io/gix-validate/0.8.3 \
+ crate://crates.io/gix-worktree/0.28.0 \
+ crate://crates.io/glob/0.3.1 \
+ crate://crates.io/globset/0.4.14 \
+ crate://crates.io/group/0.13.0 \
+ crate://crates.io/hashbrown/0.12.3 \
+ crate://crates.io/hashbrown/0.14.3 \
+ crate://crates.io/hashlink/0.8.4 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/hermit-abi/0.3.5 \
+ crate://crates.io/hex/0.4.3 \
+ crate://crates.io/hkdf/0.12.4 \
+ crate://crates.io/hmac/0.12.1 \
+ crate://crates.io/home/0.5.9 \
+ crate://crates.io/http-auth/0.1.9 \
+ crate://crates.io/humantime/2.1.0 \
+ crate://crates.io/idna/0.5.0 \
+ crate://crates.io/ignore/0.4.22 \
+ crate://crates.io/im-rc/15.1.0 \
+ crate://crates.io/indexmap/1.9.3 \
+ crate://crates.io/indexmap/2.2.2 \
+ crate://crates.io/is-terminal/0.4.11 \
+ crate://crates.io/itertools/0.12.1 \
+ crate://crates.io/itoa/1.0.10 \
+ crate://crates.io/jobserver/0.1.28 \
+ crate://crates.io/js-sys/0.3.68 \
+ crate://crates.io/kstring/2.0.0 \
+ crate://crates.io/lazy_static/1.4.0 \
+ crate://crates.io/lazycell/1.3.0 \
+ crate://crates.io/libc/0.2.153 \
+ crate://crates.io/libgit2-sys/0.16.2+1.7.2 \
+ crate://crates.io/libloading/0.8.1 \
+ crate://crates.io/libnghttp2-sys/0.1.9+1.58.0 \
+ crate://crates.io/libsqlite3-sys/0.27.0 \
+ crate://crates.io/libssh2-sys/0.3.0 \
+ crate://crates.io/libz-sys/1.1.15 \
+ crate://crates.io/linux-raw-sys/0.4.13 \
+ crate://crates.io/lock_api/0.4.11 \
+ crate://crates.io/log/0.4.20 \
+ crate://crates.io/matchers/0.1.0 \
+ crate://crates.io/maybe-async/0.2.9 \
+ crate://crates.io/memchr/2.7.1 \
+ crate://crates.io/memmap2/0.9.4 \
+ crate://crates.io/minimal-lexical/0.2.1 \
+ crate://crates.io/miniz_oxide/0.7.2 \
+ crate://crates.io/miow/0.6.0 \
+ crate://crates.io/nom/7.1.3 \
+ crate://crates.io/normpath/1.1.1 \
+ crate://crates.io/nu-ansi-term/0.46.0 \
+ crate://crates.io/num-conv/0.1.0 \
+ crate://crates.io/num-traits/0.2.18 \
+ crate://crates.io/num_threads/0.1.6 \
+ crate://crates.io/once_cell/1.19.0 \
+ crate://crates.io/opener/0.6.1 \
+ crate://crates.io/openssl/0.10.63 \
+ crate://crates.io/openssl-macros/0.1.1 \
+ crate://crates.io/openssl-probe/0.1.5 \
+ crate://crates.io/openssl-src/300.2.2+3.2.1 \
+ crate://crates.io/openssl-sys/0.9.99 \
+ crate://crates.io/ordered-float/2.10.1 \
+ crate://crates.io/orion/0.17.6 \
+ crate://crates.io/os_info/3.7.0 \
+ crate://crates.io/overload/0.1.1 \
+ crate://crates.io/p384/0.13.0 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.9 \
+ crate://crates.io/pasetors/0.6.8 \
+ crate://crates.io/pathdiff/0.2.1 \
+ crate://crates.io/pem-rfc7468/0.7.0 \
+ crate://crates.io/percent-encoding/2.3.1 \
+ crate://crates.io/pin-project-lite/0.2.13 \
+ crate://crates.io/pkcs8/0.10.2 \
+ crate://crates.io/pkg-config/0.3.29 \
+ crate://crates.io/powerfmt/0.2.0 \
+ crate://crates.io/ppv-lite86/0.2.17 \
+ crate://crates.io/primeorder/0.13.6 \
+ crate://crates.io/proc-macro2/1.0.78 \
+ crate://crates.io/prodash/26.2.2 \
+ crate://crates.io/pulldown-cmark/0.9.6 \
+ crate://crates.io/quote/1.0.35 \
+ crate://crates.io/rand/0.8.5 \
+ crate://crates.io/rand_chacha/0.3.1 \
+ crate://crates.io/rand_core/0.6.4 \
+ crate://crates.io/rand_xoshiro/0.6.0 \
+ crate://crates.io/redox_syscall/0.4.1 \
+ crate://crates.io/regex/1.10.3 \
+ crate://crates.io/regex-automata/0.1.10 \
+ crate://crates.io/regex-automata/0.4.5 \
+ crate://crates.io/regex-syntax/0.6.29 \
+ crate://crates.io/regex-syntax/0.8.2 \
+ crate://crates.io/rfc6979/0.4.0 \
+ crate://crates.io/rusqlite/0.30.0 \
+ crate://crates.io/rustfix/0.7.0 \
+ crate://crates.io/rustix/0.38.31 \
+ crate://crates.io/ryu/1.0.16 \
+ crate://crates.io/same-file/1.0.6 \
+ crate://crates.io/schannel/0.1.23 \
+ crate://crates.io/scopeguard/1.2.0 \
+ crate://crates.io/sec1/0.7.3 \
+ crate://crates.io/security-framework/2.9.2 \
+ crate://crates.io/security-framework-sys/2.9.1 \
+ crate://crates.io/semver/1.0.21 \
+ crate://crates.io/serde/1.0.196 \
+ crate://crates.io/serde-untagged/0.1.5 \
+ crate://crates.io/serde-value/0.7.0 \
+ crate://crates.io/serde_derive/1.0.196 \
+ crate://crates.io/serde_ignored/0.1.10 \
+ crate://crates.io/serde_json/1.0.113 \
+ crate://crates.io/serde_spanned/0.6.5 \
+ crate://crates.io/sha1/0.10.6 \
+ crate://crates.io/sha1_smol/1.0.0 \
+ crate://crates.io/sha2/0.10.8 \
+ crate://crates.io/sharded-slab/0.1.7 \
+ crate://crates.io/shell-escape/0.1.5 \
+ crate://crates.io/shell-words/1.1.0 \
+ crate://crates.io/signature/2.2.0 \
+ crate://crates.io/sized-chunks/0.6.5 \
+ crate://crates.io/smallvec/1.13.1 \
+ crate://crates.io/socket2/0.5.5 \
+ crate://crates.io/spki/0.7.3 \
+ crate://crates.io/static_assertions/1.1.0 \
+ crate://crates.io/strsim/0.11.0 \
+ crate://crates.io/subtle/2.5.0 \
+ crate://crates.io/supports-hyperlinks/2.1.0 \
+ crate://crates.io/syn/1.0.109 \
+ crate://crates.io/syn/2.0.48 \
+ crate://crates.io/tar/0.4.40 \
+ crate://crates.io/tempfile/3.10.0 \
+ crate://crates.io/terminal_size/0.3.0 \
+ crate://crates.io/thiserror/1.0.56 \
+ crate://crates.io/thiserror-impl/1.0.56 \
+ crate://crates.io/thread_local/1.1.7 \
+ crate://crates.io/time/0.3.34 \
+ crate://crates.io/time-core/0.1.2 \
+ crate://crates.io/time-macros/0.2.17 \
+ crate://crates.io/tinyvec/1.6.0 \
+ crate://crates.io/tinyvec_macros/0.1.1 \
+ crate://crates.io/toml/0.5.11 \
+ crate://crates.io/toml/0.8.10 \
+ crate://crates.io/toml_datetime/0.6.5 \
+ crate://crates.io/toml_edit/0.21.1 \
+ crate://crates.io/toml_edit/0.22.4 \
+ crate://crates.io/tracing/0.1.40 \
+ crate://crates.io/tracing-attributes/0.1.27 \
+ crate://crates.io/tracing-core/0.1.32 \
+ crate://crates.io/tracing-log/0.2.0 \
+ crate://crates.io/tracing-subscriber/0.3.18 \
+ crate://crates.io/typenum/1.17.0 \
+ crate://crates.io/unicase/2.7.0 \
+ crate://crates.io/unicode-bidi/0.3.15 \
+ crate://crates.io/unicode-bom/2.0.3 \
+ crate://crates.io/unicode-ident/1.0.12 \
+ crate://crates.io/unicode-normalization/0.1.22 \
+ crate://crates.io/unicode-width/0.1.11 \
+ crate://crates.io/unicode-xid/0.2.4 \
+ crate://crates.io/url/2.5.0 \
+ crate://crates.io/utf8parse/0.2.1 \
+ crate://crates.io/valuable/0.1.0 \
+ crate://crates.io/vcpkg/0.2.15 \
+ crate://crates.io/version_check/0.9.4 \
+ crate://crates.io/walkdir/2.4.0 \
+ crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \
+ crate://crates.io/wasm-bindgen/0.2.91 \
+ crate://crates.io/wasm-bindgen-backend/0.2.91 \
+ crate://crates.io/wasm-bindgen-macro/0.2.91 \
+ crate://crates.io/wasm-bindgen-macro-support/0.2.91 \
+ crate://crates.io/wasm-bindgen-shared/0.2.91 \
+ crate://crates.io/winapi/0.3.9 \
+ crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0 \
+ crate://crates.io/winapi-util/0.1.6 \
+ crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0 \
+ crate://crates.io/windows-sys/0.48.0 \
+ crate://crates.io/windows-sys/0.52.0 \
+ crate://crates.io/windows-targets/0.48.5 \
+ crate://crates.io/windows-targets/0.52.0 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.5 \
+ crate://crates.io/windows_aarch64_gnullvm/0.52.0 \
+ crate://crates.io/windows_aarch64_msvc/0.48.5 \
+ crate://crates.io/windows_aarch64_msvc/0.52.0 \
+ crate://crates.io/windows_i686_gnu/0.48.5 \
+ crate://crates.io/windows_i686_gnu/0.52.0 \
+ crate://crates.io/windows_i686_msvc/0.48.5 \
+ crate://crates.io/windows_i686_msvc/0.52.0 \
+ crate://crates.io/windows_x86_64_gnu/0.48.5 \
+ crate://crates.io/windows_x86_64_gnu/0.52.0 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.5 \
+ crate://crates.io/windows_x86_64_gnullvm/0.52.0 \
+ crate://crates.io/windows_x86_64_msvc/0.48.5 \
+ crate://crates.io/windows_x86_64_msvc/0.52.0 \
+ crate://crates.io/winnow/0.5.39 \
+ crate://crates.io/zerocopy/0.7.32 \
+ crate://crates.io/zerocopy-derive/0.7.32 \
+ crate://crates.io/zeroize/1.7.0 \
+"
+
+SRC_URI[adler-1.0.2.sha256sum] = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+SRC_URI[ahash-0.8.7.sha256sum] = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
+SRC_URI[aho-corasick-1.1.2.sha256sum] = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+SRC_URI[allocator-api2-0.2.16.sha256sum] = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+SRC_URI[anstream-0.6.11.sha256sum] = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5"
+SRC_URI[anstyle-1.0.6.sha256sum] = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
+SRC_URI[anstyle-parse-0.2.3.sha256sum] = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
+SRC_URI[anstyle-query-1.0.2.sha256sum] = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
+SRC_URI[anstyle-wincon-3.0.2.sha256sum] = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
+SRC_URI[anyhow-1.0.79.sha256sum] = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
+SRC_URI[arc-swap-1.6.0.sha256sum] = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[base16ct-0.2.0.sha256sum] = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
+SRC_URI[base64-0.21.7.sha256sum] = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
+SRC_URI[base64ct-1.6.0.sha256sum] = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[bitflags-2.4.2.sha256sum] = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
+SRC_URI[bitmaps-2.1.0.sha256sum] = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
+SRC_URI[block-buffer-0.10.4.sha256sum] = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+SRC_URI[bstr-1.9.0.sha256sum] = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
+SRC_URI[btoi-0.4.3.sha256sum] = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad"
+SRC_URI[bumpalo-3.14.0.sha256sum] = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
+SRC_URI[bytes-1.5.0.sha256sum] = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+SRC_URI[bytesize-1.3.0.sha256sum] = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc"
+SRC_URI[cargo-0.77.0.sha256sum] = "4a399e5bde59d144aa2c7ba643765e2f8c6c3c601daa2da03202caf66f2552b3"
+SRC_URI[cargo-credential-0.4.2.sha256sum] = "ec27ad011c37339b865c765fa28096cd63d5b25fab680c04d9e410cb586c327d"
+SRC_URI[cargo-credential-libsecret-0.4.2.sha256sum] = "26b0ff7a44dd0af0fcd8d09bb1a6d7f7652847cba10aad017a6ea0a25ba7f00f"
+SRC_URI[cargo-credential-macos-keychain-0.4.2.sha256sum] = "4b7cf89a47dc2c20ae3a7c94335e151be32c20f85cc2790defdb1f5dac818de5"
+SRC_URI[cargo-credential-wincred-0.4.2.sha256sum] = "341df45dc893bdffa36e2f7cbe3da90b38c5c74e7f4c0088ac801fd055b6df5b"
+SRC_URI[cargo-platform-0.1.7.sha256sum] = "694c8807f2ae16faecc43dc17d74b3eb042482789fd0eb64b39a2e04e087053f"
+SRC_URI[cargo-util-0.2.9.sha256sum] = "74862c3c6e53a1c1f8f0178f9d38ab41e49746cd3a7cafc239b3d0248fd4e342"
+SRC_URI[cbindgen-0.26.0.sha256sum] = "da6bc11b07529f16944307272d5bd9b22530bc7d05751717c9d416586cedab49"
+SRC_URI[cc-1.0.83.sha256sum] = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[clap-4.5.0.sha256sum] = "80c21025abd42669a92efc996ef13cfb2c5c627858421ea58d5c3b331a6c134f"
+SRC_URI[clap_builder-4.5.0.sha256sum] = "458bf1f341769dfcf849846f65dffdf9146daa56bcd2a47cb4e1de9915567c99"
+SRC_URI[clap_derive-4.5.0.sha256sum] = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
+SRC_URI[clap_lex-0.7.0.sha256sum] = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
+SRC_URI[clru-0.6.1.sha256sum] = "b8191fa7302e03607ff0e237d4246cc043ff5b3cb9409d995172ba3bea16b807"
+SRC_URI[color-print-0.3.5.sha256sum] = "7a858372ff14bab9b1b30ea504f2a4bc534582aee3e42ba2d41d2a7baba63d5d"
+SRC_URI[color-print-proc-macro-0.3.5.sha256sum] = "57e37866456a721d0a404439a1adae37a31be4e0055590d053dfe6981e05003f"
+SRC_URI[colorchoice-1.0.0.sha256sum] = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
+SRC_URI[const-oid-0.9.6.sha256sum] = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
+SRC_URI[core-foundation-0.9.4.sha256sum] = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
+SRC_URI[core-foundation-sys-0.8.6.sha256sum] = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
+SRC_URI[cpufeatures-0.2.12.sha256sum] = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
+SRC_URI[crates-io-0.39.2.sha256sum] = "6622f902c3c338eced1f000091f034846ae36aadaf35d0acd1ab0469a2d8ef1f"
+SRC_URI[crc32fast-1.3.2.sha256sum] = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+SRC_URI[crossbeam-channel-0.5.11.sha256sum] = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b"
+SRC_URI[crossbeam-deque-0.8.5.sha256sum] = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
+SRC_URI[crossbeam-epoch-0.9.18.sha256sum] = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
+SRC_URI[crossbeam-utils-0.8.19.sha256sum] = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
+SRC_URI[crypto-bigint-0.5.5.sha256sum] = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
+SRC_URI[crypto-common-0.1.6.sha256sum] = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+SRC_URI[ct-codecs-1.1.1.sha256sum] = "f3b7eb4404b8195a9abb6356f4ac07d8ba267045c8d6d220ac4dc992e6cc75df"
+SRC_URI[curl-0.4.45.sha256sum] = "f8e5123ab8c31200ce725939049ecd4a090b242608f24048131dedf9dd195aed"
+SRC_URI[curl-sys-0.4.72+curl-8.6.0.sha256sum] = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea"
+SRC_URI[der-0.7.8.sha256sum] = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c"
+SRC_URI[deranged-0.3.11.sha256sum] = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
+SRC_URI[digest-0.10.7.sha256sum] = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+SRC_URI[dunce-1.0.4.sha256sum] = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b"
+SRC_URI[ecdsa-0.16.9.sha256sum] = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
+SRC_URI[ed25519-compact-2.1.1.sha256sum] = "e9b3460f44bea8cd47f45a0c70892f1eff856d97cd55358b2f73f663789f6190"
+SRC_URI[either-1.9.0.sha256sum] = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
+SRC_URI[elliptic-curve-0.13.8.sha256sum] = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
+SRC_URI[encoding_rs-0.8.33.sha256sum] = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
+SRC_URI[equivalent-1.0.1.sha256sum] = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+SRC_URI[erased-serde-0.4.2.sha256sum] = "55d05712b2d8d88102bc9868020c9e5c7a1f5527c452b9b97450a1d006140ba7"
+SRC_URI[errno-0.3.8.sha256sum] = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
+SRC_URI[fallible-iterator-0.3.0.sha256sum] = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
+SRC_URI[fallible-streaming-iterator-0.1.9.sha256sum] = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
+SRC_URI[faster-hex-0.9.0.sha256sum] = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183"
+SRC_URI[fastrand-2.0.1.sha256sum] = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
+SRC_URI[ff-0.13.0.sha256sum] = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449"
+SRC_URI[fiat-crypto-0.2.6.sha256sum] = "1676f435fc1dadde4d03e43f5d62b259e1ce5f40bd4ffb21db2b42ebe59c1382"
+SRC_URI[filetime-0.2.23.sha256sum] = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
+SRC_URI[flate2-1.0.28.sha256sum] = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
+SRC_URI[foreign-types-0.3.2.sha256sum] = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+SRC_URI[foreign-types-shared-0.1.1.sha256sum] = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+SRC_URI[form_urlencoded-1.2.1.sha256sum] = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
+SRC_URI[generic-array-0.14.7.sha256sum] = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+SRC_URI[getrandom-0.2.12.sha256sum] = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"
+SRC_URI[git2-0.18.2.sha256sum] = "1b3ba52851e73b46a4c3df1d89343741112003f0f6f13beb0dfac9e457c3fdcd"
+SRC_URI[git2-curl-0.19.0.sha256sum] = "78e26b61608c573ffd26fc79061a823aa5147449a1afe1f61679a21e2031f7c3"
+SRC_URI[gix-0.56.0.sha256sum] = "5b0dcdc9c60d66535897fa40a7ea2a635e72f99456b1d9ae86b7e170e80618cb"
+SRC_URI[gix-actor-0.28.1.sha256sum] = "2eadca029ef716b4378f7afb19f7ee101fde9e58ba1f1445971315ac866db417"
+SRC_URI[gix-attributes-0.20.1.sha256sum] = "0f395469d38c76ec47cd1a6c5a53fbc3f13f737b96eaf7535f4e6b367e643381"
+SRC_URI[gix-bitmap-0.2.10.sha256sum] = "78b6cd0f246180034ddafac9b00a112f19178135b21eb031b3f79355891f7325"
+SRC_URI[gix-chunk-0.4.7.sha256sum] = "003ec6deacf68076a0c157271a127e0bb2c031c1a41f7168cbe5d248d9b85c78"
+SRC_URI[gix-command-0.3.4.sha256sum] = "c82b5e9494e61983e61049bbd15fe0fa6b70672dd236362bdb5b2b50fc428f10"
+SRC_URI[gix-commitgraph-0.22.1.sha256sum] = "85a7007ba021f059803afaf6f8a48872422abc20550ac12ede6ddea2936cec36"
+SRC_URI[gix-config-0.32.1.sha256sum] = "0341471d55d8676e98b88e121d7065dfa4c9c5acea4b6d6ecdd2846e85cce0c3"
+SRC_URI[gix-config-value-0.14.4.sha256sum] = "5b8a1e7bfb37a46ed0b8468db37a6d8a0a61d56bdbe4603ae492cb322e5f3958"
+SRC_URI[gix-credentials-0.22.0.sha256sum] = "513dac42450b27946bd0a0535a3a5a88e473d6522e5e3439a129cab779c88f3d"
+SRC_URI[gix-date-0.8.3.sha256sum] = "fb7f3dfb72bebe3449b5e642be64e3c6ccbe9821c8b8f19f487cf5bfbbf4067e"
+SRC_URI[gix-diff-0.38.0.sha256sum] = "8119a985887cfe68f4bdf92e51bd64bc758a73882d82fcfc03ebcb164441c85d"
+SRC_URI[gix-discover-0.27.0.sha256sum] = "6fad89416ebe0b3b7df78464124e2a02417b6cd3743d48ad93df86f4d2929c07"
+SRC_URI[gix-features-0.35.0.sha256sum] = "9b9ff423ae4983f762659040d13dd7a5defbd54b6a04ac3cc7347741cec828cd"
+SRC_URI[gix-features-0.36.1.sha256sum] = "4d46a4a5c6bb5bebec9c0d18b65ada20e6517dbd7cf855b87dd4bbdce3a771b2"
+SRC_URI[gix-filter-0.7.0.sha256sum] = "6d6a5c9d8e55c364e7c226919c19c9a28be1392d6208b5008059fa94ff7e2bf0"
+SRC_URI[gix-fs-0.8.1.sha256sum] = "20e86eb040f5776a5ade092282e51cdcad398adb77d948b88d17583c2ae4e107"
+SRC_URI[gix-glob-0.14.1.sha256sum] = "5db19298c5eeea2961e5b3bf190767a2d1f09b8802aeb5f258e42276350aff19"
+SRC_URI[gix-hash-0.13.3.sha256sum] = "1f8cf8c2266f63e582b7eb206799b63aa5fa68ee510ad349f637dfe2d0653de0"
+SRC_URI[gix-hashtable-0.4.1.sha256sum] = "feb61880816d7ec4f0b20606b498147d480860ddd9133ba542628df2f548d3ca"
+SRC_URI[gix-ignore-0.9.1.sha256sum] = "a215cc8cf21645bca131fcf6329d3ebd46299c47dbbe27df71bb1ca9e328b879"
+SRC_URI[gix-index-0.27.1.sha256sum] = "f3f308f5cd2992e96a274b0d1931e9a0e44fdcba87695ead3f6df30d8a697e9c"
+SRC_URI[gix-lock-11.0.1.sha256sum] = "7e5c65e6a29830a435664891ced3f3c1af010f14900226019590ee0971a22f37"
+SRC_URI[gix-macros-0.1.3.sha256sum] = "d75e7ab728059f595f6ddc1ad8771b8d6a231971ae493d9d5948ecad366ee8bb"
+SRC_URI[gix-negotiate-0.10.0.sha256sum] = "979f6accd9c051b3dd018b50adf29c0a2459edddf6105cc70b767976cd6f8014"
+SRC_URI[gix-object-0.39.0.sha256sum] = "febf79c5825720c1c63fe974c7bbe695d0cb54aabad73f45671c60ce0e501e33"
+SRC_URI[gix-odb-0.55.0.sha256sum] = "1fae5f971540c99c6ecc8d4368ecc9d18a9dc8b9391025c68c4399747dc93bac"
+SRC_URI[gix-pack-0.45.0.sha256sum] = "4569491c92446fddf373456ff360aff9a9effd627b40a70f2d7914dcd75a3205"
+SRC_URI[gix-packetline-0.17.3.sha256sum] = "09ff45eef7747bde4986429a3e813478d50c2688b8f239e57bd3aa81065b285f"
+SRC_URI[gix-packetline-blocking-0.17.3.sha256sum] = "ca8ef6dd3ea50e26f3bf572e90c034d033c804d340cd1eb386392f184a9ba2f7"
+SRC_URI[gix-path-0.10.5.sha256sum] = "97e9ad649bf5e109562d6acba657ca428661ec08e77eaf3a755d8fa55485be9c"
+SRC_URI[gix-pathspec-0.4.1.sha256sum] = "1dbbb92f75a38ef043c8bb830b339b38d0698d7f3746968b5fcbade7a880494d"
+SRC_URI[gix-prompt-0.8.2.sha256sum] = "02bd89d058258e53e0fd6c57f13ee16c5673a83066a68e11f88626fc8cfda5f6"
+SRC_URI[gix-protocol-0.42.0.sha256sum] = "95736ef407db0bd15a5bdea791fbfcf523b9f13b96c852c240cd86a9ee0ef817"
+SRC_URI[gix-quote-0.4.10.sha256sum] = "9f7dc10303d73a960d10fb82f81188b036ac3e6b11b5795b20b1a60b51d1321f"
+SRC_URI[gix-ref-0.39.1.sha256sum] = "3b2069adc212cf7f3317ef55f6444abd06c50f28479dbbac5a86acf3b05cbbfe"
+SRC_URI[gix-refspec-0.20.0.sha256sum] = "76d9d3b82e1ee78fc0dc1c37ea5ea76c2dbc73f407db155f0dfcea285e583bee"
+SRC_URI[gix-revision-0.24.0.sha256sum] = "fe5dd51710ce5434bc315ea30394fab483c5377276494edd79222b321a5a9544"
+SRC_URI[gix-revwalk-0.10.0.sha256sum] = "69d4ed2493ca94a475fdf147138e1ef8bab3b6ebb56abf3d9bda1c05372ec1dd"
+SRC_URI[gix-sec-0.10.4.sha256sum] = "f8d9bf462feaf05f2121cba7399dbc6c34d88a9cad58fc1e95027791d6a3c6d2"
+SRC_URI[gix-submodule-0.6.0.sha256sum] = "02a3d7f60a95bdcaeb8981663c99d1c9f4de42aab1169524c949e948989809f9"
+SRC_URI[gix-tempfile-11.0.1.sha256sum] = "388dd29114a86ec69b28d1e26d6d63a662300ecf61ab3f4cc578f7d7dc9e7e23"
+SRC_URI[gix-trace-0.1.7.sha256sum] = "02b202d766a7fefc596e2cc6a89cda8ad8ad733aed82da635ac120691112a9b1"
+SRC_URI[gix-transport-0.39.0.sha256sum] = "f731cfefc4d62468c6dd2053f5c6707828256a6d2f5488c1811e3f42c178b144"
+SRC_URI[gix-traverse-0.35.0.sha256sum] = "df2112088122a0206592c84fbd42020db63b2ccaed66a0293779f2e5fbf80474"
+SRC_URI[gix-url-0.25.2.sha256sum] = "0c427a1a11ccfa53a4a2da47d9442c2241deee63a154bc15cc14b8312fbc4005"
+SRC_URI[gix-utils-0.1.9.sha256sum] = "56e839f3d0798b296411263da6bee780a176ef8008a5dfc31287f7eda9266ab8"
+SRC_URI[gix-validate-0.8.3.sha256sum] = "ac7cc36f496bd5d96cdca0f9289bb684480725d40db60f48194aa7723b883854"
+SRC_URI[gix-worktree-0.28.0.sha256sum] = "7f1d0ae01dee14abe8c8117d78d7518f9a507de2dc4522546fbf4c444e9860b4"
+SRC_URI[glob-0.3.1.sha256sum] = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
+SRC_URI[globset-0.4.14.sha256sum] = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
+SRC_URI[group-0.13.0.sha256sum] = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
+SRC_URI[hashbrown-0.12.3.sha256sum] = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+SRC_URI[hashbrown-0.14.3.sha256sum] = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
+SRC_URI[hashlink-0.8.4.sha256sum] = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[hermit-abi-0.3.5.sha256sum] = "d0c62115964e08cb8039170eb33c1d0e2388a256930279edca206fff675f82c3"
+SRC_URI[hex-0.4.3.sha256sum] = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+SRC_URI[hkdf-0.12.4.sha256sum] = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
+SRC_URI[hmac-0.12.1.sha256sum] = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+SRC_URI[home-0.5.9.sha256sum] = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
+SRC_URI[http-auth-0.1.9.sha256sum] = "643c9bbf6a4ea8a656d6b4cd53d34f79e3f841ad5203c1a55fb7d761923bc255"
+SRC_URI[humantime-2.1.0.sha256sum] = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+SRC_URI[idna-0.5.0.sha256sum] = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+SRC_URI[ignore-0.4.22.sha256sum] = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1"
+SRC_URI[im-rc-15.1.0.sha256sum] = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
+SRC_URI[indexmap-1.9.3.sha256sum] = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+SRC_URI[indexmap-2.2.2.sha256sum] = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520"
+SRC_URI[is-terminal-0.4.11.sha256sum] = "fe8f25ce1159c7740ff0b9b2f5cdf4a8428742ba7c112b9f20f22cd5219c7dab"
+SRC_URI[itertools-0.12.1.sha256sum] = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
+SRC_URI[itoa-1.0.10.sha256sum] = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
+SRC_URI[jobserver-0.1.28.sha256sum] = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6"
+SRC_URI[js-sys-0.3.68.sha256sum] = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee"
+SRC_URI[kstring-2.0.0.sha256sum] = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747"
+SRC_URI[lazy_static-1.4.0.sha256sum] = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+SRC_URI[lazycell-1.3.0.sha256sum] = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
+SRC_URI[libc-0.2.153.sha256sum] = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
+SRC_URI[libgit2-sys-0.16.2+1.7.2.sha256sum] = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8"
+SRC_URI[libloading-0.8.1.sha256sum] = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
+SRC_URI[libnghttp2-sys-0.1.9+1.58.0.sha256sum] = "b57e858af2798e167e709b9d969325b6d8e9d50232fcbc494d7d54f976854a64"
+SRC_URI[libsqlite3-sys-0.27.0.sha256sum] = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716"
+SRC_URI[libssh2-sys-0.3.0.sha256sum] = "2dc8a030b787e2119a731f1951d6a773e2280c660f8ec4b0f5e1505a386e71ee"
+SRC_URI[libz-sys-1.1.15.sha256sum] = "037731f5d3aaa87a5675e895b63ddff1a87624bc29f77004ea829809654e48f6"
+SRC_URI[linux-raw-sys-0.4.13.sha256sum] = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
+SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+SRC_URI[log-0.4.20.sha256sum] = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+SRC_URI[matchers-0.1.0.sha256sum] = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+SRC_URI[maybe-async-0.2.9.sha256sum] = "afc95a651c82daf7004c824405aa1019723644950d488571bd718e3ed84646ed"
+SRC_URI[memchr-2.7.1.sha256sum] = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
+SRC_URI[memmap2-0.9.4.sha256sum] = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322"
+SRC_URI[minimal-lexical-0.2.1.sha256sum] = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+SRC_URI[miniz_oxide-0.7.2.sha256sum] = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
+SRC_URI[miow-0.6.0.sha256sum] = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
+SRC_URI[nom-7.1.3.sha256sum] = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+SRC_URI[normpath-1.1.1.sha256sum] = "ec60c60a693226186f5d6edf073232bfb6464ed97eb22cf3b01c1e8198fd97f5"
+SRC_URI[nu-ansi-term-0.46.0.sha256sum] = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+SRC_URI[num-conv-0.1.0.sha256sum] = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
+SRC_URI[num-traits-0.2.18.sha256sum] = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
+SRC_URI[num_threads-0.1.6.sha256sum] = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
+SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+SRC_URI[opener-0.6.1.sha256sum] = "6c62dcb6174f9cb326eac248f07e955d5d559c272730b6c03e396b443b562788"
+SRC_URI[openssl-0.10.63.sha256sum] = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8"
+SRC_URI[openssl-macros-0.1.1.sha256sum] = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+SRC_URI[openssl-probe-0.1.5.sha256sum] = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+SRC_URI[openssl-src-300.2.2+3.2.1.sha256sum] = "8bbfad0063610ac26ee79f7484739e2b07555a75c42453b89263830b5c8103bc"
+SRC_URI[openssl-sys-0.9.99.sha256sum] = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae"
+SRC_URI[ordered-float-2.10.1.sha256sum] = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
+SRC_URI[orion-0.17.6.sha256sum] = "7abdb10181903c8c4b016ba45d6d6d5af1a1e2a461aa4763a83b87f5df4695e5"
+SRC_URI[os_info-3.7.0.sha256sum] = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e"
+SRC_URI[overload-0.1.1.sha256sum] = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+SRC_URI[p384-0.13.0.sha256sum] = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+SRC_URI[pasetors-0.6.8.sha256sum] = "6b36d47c66f2230dd1b7143d9afb2b4891879020210eddf2ccb624e529b96dba"
+SRC_URI[pathdiff-0.2.1.sha256sum] = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
+SRC_URI[pem-rfc7468-0.7.0.sha256sum] = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
+SRC_URI[percent-encoding-2.3.1.sha256sum] = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
+SRC_URI[pin-project-lite-0.2.13.sha256sum] = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+SRC_URI[pkcs8-0.10.2.sha256sum] = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
+SRC_URI[pkg-config-0.3.29.sha256sum] = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb"
+SRC_URI[powerfmt-0.2.0.sha256sum] = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+SRC_URI[ppv-lite86-0.2.17.sha256sum] = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+SRC_URI[primeorder-0.13.6.sha256sum] = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
+SRC_URI[proc-macro2-1.0.78.sha256sum] = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
+SRC_URI[prodash-26.2.2.sha256sum] = "794b5bf8e2d19b53dcdcec3e4bba628e20f5b6062503ba89281fa7037dd7bbcf"
+SRC_URI[pulldown-cmark-0.9.6.sha256sum] = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
+SRC_URI[quote-1.0.35.sha256sum] = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
+SRC_URI[rand-0.8.5.sha256sum] = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+SRC_URI[rand_chacha-0.3.1.sha256sum] = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+SRC_URI[rand_core-0.6.4.sha256sum] = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+SRC_URI[rand_xoshiro-0.6.0.sha256sum] = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
+SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+SRC_URI[regex-1.10.3.sha256sum] = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
+SRC_URI[regex-automata-0.1.10.sha256sum] = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+SRC_URI[regex-automata-0.4.5.sha256sum] = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
+SRC_URI[regex-syntax-0.6.29.sha256sum] = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+SRC_URI[regex-syntax-0.8.2.sha256sum] = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+SRC_URI[rfc6979-0.4.0.sha256sum] = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
+SRC_URI[rusqlite-0.30.0.sha256sum] = "a78046161564f5e7cd9008aff3b2990b3850dc8e0349119b98e8f251e099f24d"
+SRC_URI[rustfix-0.7.0.sha256sum] = "7ec10cbeb92a2e494ef354d66126882da8c0a244ad769e2a7193efc5de625175"
+SRC_URI[rustix-0.38.31.sha256sum] = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949"
+SRC_URI[ryu-1.0.16.sha256sum] = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"
+SRC_URI[same-file-1.0.6.sha256sum] = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+SRC_URI[schannel-0.1.23.sha256sum] = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534"
+SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+SRC_URI[sec1-0.7.3.sha256sum] = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
+SRC_URI[security-framework-2.9.2.sha256sum] = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de"
+SRC_URI[security-framework-sys-2.9.1.sha256sum] = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"
+SRC_URI[semver-1.0.21.sha256sum] = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"
+SRC_URI[serde-1.0.196.sha256sum] = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
+SRC_URI[serde-untagged-0.1.5.sha256sum] = "6a160535368dfc353348e7eaa299156bd508c60c45a9249725f5f6d370d82a66"
+SRC_URI[serde-value-0.7.0.sha256sum] = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
+SRC_URI[serde_derive-1.0.196.sha256sum] = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
+SRC_URI[serde_ignored-0.1.10.sha256sum] = "a8e319a36d1b52126a0d608f24e93b2d81297091818cd70625fcf50a15d84ddf"
+SRC_URI[serde_json-1.0.113.sha256sum] = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79"
+SRC_URI[serde_spanned-0.6.5.sha256sum] = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
+SRC_URI[sha1-0.10.6.sha256sum] = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+SRC_URI[sha1_smol-1.0.0.sha256sum] = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
+SRC_URI[sha2-0.10.8.sha256sum] = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
+SRC_URI[sharded-slab-0.1.7.sha256sum] = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+SRC_URI[shell-escape-0.1.5.sha256sum] = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f"
+SRC_URI[shell-words-1.1.0.sha256sum] = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
+SRC_URI[signature-2.2.0.sha256sum] = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
+SRC_URI[sized-chunks-0.6.5.sha256sum] = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
+SRC_URI[smallvec-1.13.1.sha256sum] = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
+SRC_URI[socket2-0.5.5.sha256sum] = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9"
+SRC_URI[spki-0.7.3.sha256sum] = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
+SRC_URI[static_assertions-1.1.0.sha256sum] = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+SRC_URI[strsim-0.11.0.sha256sum] = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01"
+SRC_URI[subtle-2.5.0.sha256sum] = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
+SRC_URI[supports-hyperlinks-2.1.0.sha256sum] = "f84231692eb0d4d41e4cdd0cabfdd2e6cd9e255e65f80c9aa7c98dd502b4233d"
+SRC_URI[syn-1.0.109.sha256sum] = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+SRC_URI[syn-2.0.48.sha256sum] = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
+SRC_URI[tar-0.4.40.sha256sum] = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb"
+SRC_URI[tempfile-3.10.0.sha256sum] = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67"
+SRC_URI[terminal_size-0.3.0.sha256sum] = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7"
+SRC_URI[thiserror-1.0.56.sha256sum] = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
+SRC_URI[thiserror-impl-1.0.56.sha256sum] = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
+SRC_URI[thread_local-1.1.7.sha256sum] = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
+SRC_URI[time-0.3.34.sha256sum] = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
+SRC_URI[time-core-0.1.2.sha256sum] = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+SRC_URI[time-macros-0.2.17.sha256sum] = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774"
+SRC_URI[tinyvec-1.6.0.sha256sum] = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+SRC_URI[tinyvec_macros-0.1.1.sha256sum] = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+SRC_URI[toml-0.5.11.sha256sum] = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+SRC_URI[toml-0.8.10.sha256sum] = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290"
+SRC_URI[toml_datetime-0.6.5.sha256sum] = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
+SRC_URI[toml_edit-0.21.1.sha256sum] = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1"
+SRC_URI[toml_edit-0.22.4.sha256sum] = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951"
+SRC_URI[tracing-0.1.40.sha256sum] = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+SRC_URI[tracing-attributes-0.1.27.sha256sum] = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+SRC_URI[tracing-core-0.1.32.sha256sum] = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+SRC_URI[tracing-log-0.2.0.sha256sum] = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+SRC_URI[tracing-subscriber-0.3.18.sha256sum] = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+SRC_URI[unicase-2.7.0.sha256sum] = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
+SRC_URI[unicode-bidi-0.3.15.sha256sum] = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
+SRC_URI[unicode-bom-2.0.3.sha256sum] = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217"
+SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+SRC_URI[unicode-normalization-0.1.22.sha256sum] = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+SRC_URI[unicode-width-0.1.11.sha256sum] = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
+SRC_URI[unicode-xid-0.2.4.sha256sum] = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+SRC_URI[url-2.5.0.sha256sum] = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+SRC_URI[utf8parse-0.2.1.sha256sum] = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
+SRC_URI[valuable-0.1.0.sha256sum] = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+SRC_URI[vcpkg-0.2.15.sha256sum] = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+SRC_URI[walkdir-2.4.0.sha256sum] = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
+SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+SRC_URI[wasm-bindgen-0.2.91.sha256sum] = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+SRC_URI[wasm-bindgen-backend-0.2.91.sha256sum] = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+SRC_URI[wasm-bindgen-macro-0.2.91.sha256sum] = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+SRC_URI[wasm-bindgen-macro-support-0.2.91.sha256sum] = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+SRC_URI[wasm-bindgen-shared-0.2.91.sha256sum] = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+SRC_URI[winapi-0.3.9.sha256sum] = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+SRC_URI[winapi-i686-pc-windows-gnu-0.4.0.sha256sum] = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+SRC_URI[winapi-util-0.1.6.sha256sum] = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
+SRC_URI[winapi-x86_64-pc-windows-gnu-0.4.0.sha256sum] = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+SRC_URI[windows-sys-0.48.0.sha256sum] = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+SRC_URI[windows-sys-0.52.0.sha256sum] = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+SRC_URI[windows-targets-0.52.0.sha256sum] = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+SRC_URI[windows_aarch64_gnullvm-0.52.0.sha256sum] = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+SRC_URI[windows_aarch64_msvc-0.52.0.sha256sum] = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+SRC_URI[windows_i686_gnu-0.52.0.sha256sum] = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+SRC_URI[windows_i686_msvc-0.52.0.sha256sum] = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+SRC_URI[windows_x86_64_gnu-0.52.0.sha256sum] = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+SRC_URI[windows_x86_64_gnullvm-0.52.0.sha256sum] = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+SRC_URI[windows_x86_64_msvc-0.52.0.sha256sum] = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+SRC_URI[winnow-0.5.39.sha256sum] = "5389a154b01683d28c77f8f68f49dea75f0a4da32557a58f68ee51ebba472d29"
+SRC_URI[zerocopy-0.7.32.sha256sum] = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
+SRC_URI[zerocopy-derive-0.7.32.sha256sum] = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
+SRC_URI[zeroize-1.7.0.sha256sum] = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
diff --git a/meta/recipes-devtools/rust/cargo-c-native_0.9.30+cargo-0.77.0.bb b/meta/recipes-devtools/rust/cargo-c-native_0.9.30+cargo-0.77.0.bb
new file mode 100644
index 0000000000..8e17606b73
--- /dev/null
+++ b/meta/recipes-devtools/rust/cargo-c-native_0.9.30+cargo-0.77.0.bb
@@ -0,0 +1,17 @@
+SUMMARY = "cargo applet to build and install C-ABI compatible dynamic and static libraries."
+HOMEPAGE = "https://crates.io/crates/cargo-c"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = " \
+ file://LICENSE;md5=384ed0e2e0b2dac094e51fbf93fdcbe0 \
+"
+
+SRC_URI = "crate://crates.io/cargo-c/${PV};name=cargo-c"
+SRC_URI[cargo-c.sha256sum] = "ec77e3635cdb0d211f88f22d0460eef0ba031eb616ded57f2ffb98a90365c445"
+S = "${CARGO_VENDORING_DIRECTORY}/cargo-c-${PV}"
+
+inherit cargo cargo-update-recipe-crates pkgconfig native
+
+DEPENDS = "openssl curl"
+
+require ${BPN}-crates.inc
+
diff --git a/meta/recipes-devtools/rust/cargo_1.75.0.bb b/meta/recipes-devtools/rust/cargo_1.75.0.bb
new file mode 100644
index 0000000000..50b7e7c7b4
--- /dev/null
+++ b/meta/recipes-devtools/rust/cargo_1.75.0.bb
@@ -0,0 +1,73 @@
+SUMMARY = "Cargo, a package manager for Rust."
+HOMEPAGE = "https://crates.io"
+LICENSE = "MIT | Apache-2.0"
+SECTION = "devel"
+
+DEPENDS = "openssl zlib curl ca-certificates libssh2"
+
+LIC_FILES_CHKSUM = " \
+ file://LICENSE-MIT;md5=b377b220f43d747efdec40d69fcaa69d \
+ file://LICENSE-APACHE;md5=71b224ca933f0676e26d5c2e2271331c \
+ file://LICENSE-THIRD-PARTY;md5=f257ad009884cb88a3a87d6920e7180a \
+"
+
+require rust-source.inc
+require rust-snapshot.inc
+
+S = "${RUSTSRC}/src/tools/cargo"
+CARGO_VENDORING_DIRECTORY = "${RUSTSRC}/vendor"
+
+inherit cargo pkgconfig
+
+DEBUG_PREFIX_MAP += "-fdebug-prefix-map=${RUSTSRC}/vendor=${TARGET_DBGSRC_DIR}"
+
+do_cargo_setup_snapshot () {
+ ${WORKDIR}/rust-snapshot-components/${CARGO_SNAPSHOT}/install.sh --prefix="${WORKDIR}/${CARGO_SNAPSHOT}" --disable-ldconfig
+ # Need to use uninative's loader if enabled/present since the library paths
+ # are used internally by rust and result in symbol mismatches if we don't
+ if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then
+ patchelf-uninative ${WORKDIR}/${CARGO_SNAPSHOT}/bin/cargo --set-interpreter ${UNINATIVE_LOADER}
+ fi
+}
+
+addtask cargo_setup_snapshot after do_unpack before do_configure
+do_cargo_setup_snapshot[dirs] += "${WORKDIR}/${CARGO_SNAPSHOT}"
+do_cargo_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER"
+
+
+do_compile:prepend () {
+ export RUSTC_BOOTSTRAP="1"
+}
+
+do_install () {
+ install -d "${D}${bindir}"
+ install -m 755 "${B}/target/${CARGO_TARGET_SUBDIR}/cargo" "${D}${bindir}"
+}
+
+do_install:append:class-nativesdk() {
+ # To quote the cargo docs, "Cargo also sets the dynamic library path when compiling
+ # and running binaries with commands like `cargo run` and `cargo test`". Sadly it
+ # sets to libdir but not base_libdir leading to symbol mismatches depending on the
+ # host OS. Fully set LD_LIBRARY_PATH to contain both to avoid this.
+ create_wrapper ${D}/${bindir}/cargo LD_LIBRARY_PATH=${libdir}:${base_libdir}
+}
+
+# Disabled due to incompatibility with libgit2 0.28.x (https://github.com/rust-lang/git2-rs/issues/458, https://bugs.gentoo.org/707746#c1)
+# as shipped by Yocto Dunfell.
+# According to https://github.com/rust-lang/git2-rs/issues/458#issuecomment-522567539, there are no compatibility guarantees between
+# libgit2-sys and arbitrary system libgit2 versions, so better keep this turned off.
+#export LIBGIT2_SYS_USE_PKG_CONFIG = "1"
+
+# Needed for pkg-config to be used
+export LIBSSH2_SYS_USE_PKG_CONFIG = "1"
+
+# When building cargo-native we don't have cargo-native to use and depend on,
+# so we must use the locally set up snapshot to bootstrap the build.
+BASEDEPENDS:remove:class-native = "cargo-native"
+CARGO:class-native = "${WORKDIR}/${CARGO_SNAPSHOT}/bin/cargo"
+
+DEPENDS:append:class-nativesdk = " nativesdk-rust"
+RUSTLIB:append:class-nativesdk = " -L ${STAGING_DIR_HOST}/${SDKPATHNATIVE}/usr/lib/rustlib/${RUST_HOST_SYS}/lib"
+RUSTLIB_DEP:class-nativesdk = ""
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/rust/files/0001-Handle-vendored-sources-when-remapping-paths.patch b/meta/recipes-devtools/rust/files/0001-Handle-vendored-sources-when-remapping-paths.patch
new file mode 100644
index 0000000000..a46ee14402
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/0001-Handle-vendored-sources-when-remapping-paths.patch
@@ -0,0 +1,46 @@
+From 5e37ea7cb9d99d91f2c5ac6edf19ff777f95bb88 Mon Sep 17 00:00:00 2001
+From: Arlo Siemsen <arsiem@microsoft.com>
+Date: Thu, 4 Jan 2024 11:40:56 -0600
+Subject: [PATCH] Handle vendored sources when remapping paths
+
+Upstream-Status: Submitted [https://github.com/rust-lang/rust/pull/119582]
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+---
+ src/bootstrap/src/core/builder.rs | 19 ++++++++++++-------
+ 1 file changed, 12 insertions(+), 7 deletions(-)
+
+diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs
+index cd276674dee6..48fdb2c7f7b7 100644
+--- a/src/bootstrap/src/core/builder.rs
++++ b/src/bootstrap/src/core/builder.rs
+@@ -1789,15 +1789,20 @@ pub fn cargo(
+ }
+
+ if self.config.rust_remap_debuginfo {
+- // FIXME: handle vendored sources
+- let registry_src = t!(home::cargo_home()).join("registry").join("src");
+ let mut env_var = OsString::new();
+- for entry in t!(std::fs::read_dir(registry_src)) {
+- if !env_var.is_empty() {
+- env_var.push("\t");
+- }
+- env_var.push(t!(entry).path());
++ if self.config.vendor {
++ let vendor = self.build.src.join("vendor");
++ env_var.push(vendor);
+ env_var.push("=/rust/deps");
++ } else {
++ let registry_src = t!(home::cargo_home()).join("registry").join("src");
++ for entry in t!(std::fs::read_dir(registry_src)) {
++ if !env_var.is_empty() {
++ env_var.push("\t");
++ }
++ env_var.push(t!(entry).path());
++ env_var.push("=/rust/deps");
++ }
+ }
+ cargo.env("RUSTC_CARGO_REGISTRY_SRC_TO_REMAP", env_var);
+ }
+--
+2.39.0
+
diff --git a/meta/recipes-devtools/rust/files/0001-Revert-Map-source-absolute-paths-to-OUT_DIR-as-relat.patch b/meta/recipes-devtools/rust/files/0001-Revert-Map-source-absolute-paths-to-OUT_DIR-as-relat.patch
new file mode 100644
index 0000000000..f75a75fa4e
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/0001-Revert-Map-source-absolute-paths-to-OUT_DIR-as-relat.patch
@@ -0,0 +1,67 @@
+From 8bf0c566387e6a48d854d5f69c43c8bfa45092f9 Mon Sep 17 00:00:00 2001
+From: Alex Kiernan <alexk@zuma.ai>
+Date: Sun, 24 Dec 2023 09:40:01 +0000
+Subject: [PATCH] Revert "Map source absolute paths to OUT_DIR as relative.
+ (#684)"
+
+This reverts commit c4f414f449bb7cffba3bc923f277704d1d08a8ec.
+
+Upstream-Status: Inappropriate [patches need rework]
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+---
+ src/lib.rs | 22 ++--------------------
+ 1 file changed, 2 insertions(+), 20 deletions(-)
+
+Index: rustc-1.72.0-src/vendor/cc/src/lib.rs
+===================================================================
+--- rustc-1.72.0-src.orig/vendor/cc/src/lib.rs
++++ rustc-1.72.0-src/vendor/cc/src/lib.rs
+@@ -56,12 +56,11 @@
+ #![allow(deprecated)]
+ #![deny(missing_docs)]
+
+-use std::collections::{hash_map, HashMap};
++use std::collections::HashMap;
+ use std::env;
+ use std::ffi::{OsStr, OsString};
+ use std::fmt::{self, Display, Formatter};
+ use std::fs;
+-use std::hash::Hasher;
+ use std::io::{self, BufRead, BufReader, Read, Write};
+ use std::path::{Component, Path, PathBuf};
+ use std::process::{Child, Command, Stdio};
+@@ -1037,24 +1036,7 @@ impl Build {
+
+ let mut objects = Vec::new();
+ for file in self.files.iter() {
+- let obj = if file.has_root() {
+- // If `file` is an absolute path, prefix the `basename`
+- // with the `dirname`'s hash to ensure name uniqueness.
+- let basename = file
+- .file_name()
+- .ok_or_else(|| Error::new(ErrorKind::InvalidArgument, "file_name() failure"))?
+- .to_string_lossy();
+- let dirname = file
+- .parent()
+- .ok_or_else(|| Error::new(ErrorKind::InvalidArgument, "parent() failure"))?
+- .to_string_lossy();
+- let mut hasher = hash_map::DefaultHasher::new();
+- hasher.write(dirname.to_string().as_bytes());
+- dst.join(format!("{:016x}-{}", hasher.finish(), basename))
+- .with_extension("o")
+- } else {
+- dst.join(file).with_extension("o")
+- };
++ let obj = dst.join(file).with_extension("o");
+ let obj = if !obj.starts_with(&dst) {
+ dst.join(obj.file_name().ok_or_else(|| {
+ Error::new(ErrorKind::IOError, "Getting object file details failed.")
+Index: rustc-1.72.0-src/vendor/cc/.cargo-checksum.json
+===================================================================
+--- rustc-1.72.0-src.orig/vendor/cc/.cargo-checksum.json
++++ rustc-1.72.0-src/vendor/cc/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.lock":"dddb9c49058d411a098e98dc1c06e3bc89f859a2080d96c11b11aec67394bb8c","Cargo.toml":"1953a8bc4b98e351fe75917c151b1e08a46531d562aebba25a90add4aadecac2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"58af5106352aafa62175a90f8a5f25fa114028bf909220dc0735d79745999ec1","src/bin/gcc-shim.rs":"36dc4e447428e73c548cc7106ca1e8f282c098463b014e13a729a44445de4880","src/com.rs":"29d0dee08a656ab1a4cc3e5fe24542e0fab5c1373cbc9b05059f7572cf9b8313","src/lib.rs":"17a4659710aa290c4ed9c23063c7b202c5bcf2a84de33aa1f01fc6fded69a1f8","src/registry.rs":"98ae2b71781acc49297e5544fa0cf059f735636f8f1338edef8dbf7232443945","src/setup_config.rs":"72deaf1927c0b713fd5c2b2d5b8f0ea3a303a00fda1579427895cac26a94122d","src/vs_instances.rs":"2d3f8278a803b0e7052f4eeb1979b29f963dd0143f4458e2cb5f33c4e5f0963b","src/winapi.rs":"e128e95b2d39ae7a02f54a7e25d33c488c14759b9f1a50a449e10545856950c3","src/windows_registry.rs":"1f973f804b4b451e48ff6d98ce660355772f164dfdf79a6ae514645c7c764005","tests/cc_env.rs":"e02b3b0824ad039b47e4462c5ef6dbe6c824c28e7953af94a0f28f7b5158042e","tests/cflags.rs":"57f06eb5ce1557e5b4a032d0c4673e18fbe6f8d26c1deb153126e368b96b41b3","tests/cxxflags.rs":"c2c6c6d8a0d7146616fa1caed26876ee7bc9fcfffd525eb4743593cade5f3371","tests/support/mod.rs":"a3c8d116973bb16066bf6ec4de5143183f97de7aad085d85f8118a2eaac3e1e0","tests/test.rs":"61fb35ae6dd5cf506ada000bdd82c92e9f8eac9cc053b63e83d3f897436fbf8f"},"package":"50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"}
+\ No newline at end of file
++{"files":{"Cargo.lock":"dddb9c49058d411a098e98dc1c06e3bc89f859a2080d96c11b11aec67394bb8c","Cargo.toml":"1953a8bc4b98e351fe75917c151b1e08a46531d562aebba25a90add4aadecac2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"58af5106352aafa62175a90f8a5f25fa114028bf909220dc0735d79745999ec1","src/bin/gcc-shim.rs":"36dc4e447428e73c548cc7106ca1e8f282c098463b014e13a729a44445de4880","src/com.rs":"29d0dee08a656ab1a4cc3e5fe24542e0fab5c1373cbc9b05059f7572cf9b8313","src/lib.rs":"dfb36b17362e9a5b266cb19a229d982e8c0bba784b1e99769f690692b0cd5c4e","src/registry.rs":"98ae2b71781acc49297e5544fa0cf059f735636f8f1338edef8dbf7232443945","src/setup_config.rs":"72deaf1927c0b713fd5c2b2d5b8f0ea3a303a00fda1579427895cac26a94122d","src/vs_instances.rs":"2d3f8278a803b0e7052f4eeb1979b29f963dd0143f4458e2cb5f33c4e5f0963b","src/winapi.rs":"e128e95b2d39ae7a02f54a7e25d33c488c14759b9f1a50a449e10545856950c3","src/windows_registry.rs":"1f973f804b4b451e48ff6d98ce660355772f164dfdf79a6ae514645c7c764005","tests/cc_env.rs":"e02b3b0824ad039b47e4462c5ef6dbe6c824c28e7953af94a0f28f7b5158042e","tests/cflags.rs":"57f06eb5ce1557e5b4a032d0c4673e18fbe6f8d26c1deb153126e368b96b41b3","tests/cxxflags.rs":"c2c6c6d8a0d7146616fa1caed26876ee7bc9fcfffd525eb4743593cade5f3371","tests/support/mod.rs":"a3c8d116973bb16066bf6ec4de5143183f97de7aad085d85f8118a2eaac3e1e0","tests/test.rs":"61fb35ae6dd5cf506ada000bdd82c92e9f8eac9cc053b63e83d3f897436fbf8f"},"package":"50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"}
+\ No newline at end of file
diff --git a/meta/recipes-devtools/rust/files/cargo-path.patch b/meta/recipes-devtools/rust/files/cargo-path.patch
new file mode 100644
index 0000000000..9a50c40220
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/cargo-path.patch
@@ -0,0 +1,37 @@
+Fix the cargo binary path error and ensure that it is fetched
+during rustc bootstrap in rust oe-selftest.
+
+======================================================================
+ERROR: test_cargoflags (bootstrap_test.BuildBootstrap)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap_test.py", line 157, in test_cargoflags
+ args, _ = self.build_args(env={"CARGOFLAGS": "--timings"})
+ File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap_test.py", line 154, in build_args
+ return build.build_bootstrap_cmd(env), env
+ File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap.py", line 960, in build_bootstrap_cmd
+ raise Exception("no cargo executable found at `{}`".format(
+Exception: no cargo executable found at `/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/build/x86_64-unknown-linux-gnu/stage0/bin/cargo`
+
+Upstream-Status: Submitted [https://github.com/rust-lang/rust/pull/120125]
+
+Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
+---
+diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
+--- a/src/bootstrap/bootstrap.py
++++ b/src/bootstrap/bootstrap.py
+@@ -954,9 +954,11 @@
+ if "RUSTFLAGS_BOOTSTRAP" in env:
+ env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"]
+
+- env["PATH"] = os.path.join(self.bin_root(), "bin") + \
+- os.pathsep + env["PATH"]
+- if not os.path.isfile(self.cargo()):
++ cargo_bin_path = os.path.join(self.bin_root(), "bin", "cargo")
++ if not os.path.isfile(cargo_bin_path):
++ cargo_bin_path = os.getenv("RUST_TARGET_PATH") + "rust-snapshot/bin/cargo"
++ env["PATH"] = os.path.dirname(cargo_bin_path) + os.pathsep + env["PATH"]
++ else:
+ raise Exception("no cargo executable found at `{}`".format(
+ self.cargo()))
+ args = [self.cargo(), "build", "--manifest-path",
diff --git a/meta/recipes-devtools/rust/files/custom-target-cfg.patch b/meta/recipes-devtools/rust/files/custom-target-cfg.patch
new file mode 100644
index 0000000000..15a7f252cc
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/custom-target-cfg.patch
@@ -0,0 +1,90 @@
+Detect and fetch custom target configurations when rustc is
+bootstrapped in rust oe-selftest.
+
+Upstream-Status: Backport [https://github.com/rust-lang/rust/pull/119619/commits/26c71cbcf1a9bce6ceb962d753c467d098f63cf6]
+
+Signed-off-by: onur-ozkan <work@onurozkan.dev>
+Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
+---
+diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
+index e85f6319936..c45c0b3c652 100644
+--- a/src/tools/compiletest/src/common.rs
++++ b/src/tools/compiletest/src/common.rs
+@@ -479,6 +479,7 @@ fn new(config: &Config) -> TargetCfgs {
+ let mut targets: HashMap<String, TargetCfg> = serde_json::from_str(&rustc_output(
+ config,
+ &["--print=all-target-specs-json", "-Zunstable-options"],
++ Default::default(),
+ ))
+ .unwrap();
+
+@@ -491,16 +492,33 @@ fn new(config: &Config) -> TargetCfgs {
+ let mut all_families = HashSet::new();
+ let mut all_pointer_widths = HashSet::new();
+
+- // Handle custom target specs, which are not included in `--print=all-target-specs-json`.
+- if config.target.ends_with(".json") {
+- targets.insert(
+- config.target.clone(),
+- serde_json::from_str(&rustc_output(
+- config,
+- &["--print=target-spec-json", "-Zunstable-options", "--target", &config.target],
+- ))
+- .unwrap(),
+- );
++ // If current target is not included in the `--print=all-target-specs-json` output,
++ // we check whether it is a custom target from the user or a synthetic target from bootstrap.
++ if !targets.contains_key(&config.target) {
++ let mut envs: HashMap<String, String> = HashMap::new();
++
++ if let Ok(t) = std::env::var("RUST_TARGET_PATH") {
++ envs.insert("RUST_TARGET_PATH".into(), t);
++ }
++
++ // This returns false only when the target is neither a synthetic target
++ // nor a custom target from the user, indicating it is most likely invalid.
++ if config.target.ends_with(".json") || !envs.is_empty() {
++ targets.insert(
++ config.target.clone(),
++ serde_json::from_str(&rustc_output(
++ config,
++ &[
++ "--print=target-spec-json",
++ "-Zunstable-options",
++ "--target",
++ &config.target,
++ ],
++ envs,
++ ))
++ .unwrap(),
++ );
++ }
+ }
+
+ for (target, cfg) in targets.iter() {
+@@ -545,7 +563,9 @@ fn get_current_target_config(
+ // code below extracts them from `--print=cfg`: make sure to only override fields that can
+ // actually be changed with `-C` flags.
+ for config in
+- rustc_output(config, &["--print=cfg", "--target", &config.target]).trim().lines()
++ rustc_output(config, &["--print=cfg", "--target", &config.target], Default::default())
++ .trim()
++ .lines()
+ {
+ let (name, value) = config
+ .split_once("=\"")
+@@ -624,11 +644,12 @@ pub enum Endian {
+ Big,
+ }
+
+-fn rustc_output(config: &Config, args: &[&str]) -> String {
++fn rustc_output(config: &Config, args: &[&str], envs: HashMap<String, String>) -> String {
+ let mut command = Command::new(&config.rustc_path);
+ add_dylib_path(&mut command, iter::once(&config.compile_lib_path));
+ command.args(&config.target_rustcflags).args(args);
+ command.env("RUSTC_BOOTSTRAP", "1");
++ command.envs(envs);
+
+ let output = match command.output() {
+ Ok(output) => output,
+
diff --git a/meta/recipes-devtools/rust/files/hardcodepaths.patch b/meta/recipes-devtools/rust/files/hardcodepaths.patch
new file mode 100644
index 0000000000..a043095f62
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/hardcodepaths.patch
@@ -0,0 +1,59 @@
+When building for the target, some build paths end up embedded in the binaries.
+These changes remove that. Further investigation is needed to work out the way
+to resolve these issues properly upstream.
+
+Upstream-Status: Inappropriate [patches need rework]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
+
+diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs
+index b4b2ab1e1f8a..8bb3e3f0557c 100644
+--- a/compiler/rustc_codegen_llvm/src/context.rs
++++ b/compiler/rustc_codegen_llvm/src/context.rs
+@@ -158,46 +158,6 @@ pub unsafe fn create_module<'ll>(
+ }
+ }
+
+- // Ensure the data-layout values hardcoded remain the defaults.
+- if sess.target.is_builtin {
+- // tm is disposed by its drop impl
+- let tm = crate::back::write::create_informational_target_machine(tcx.sess);
+- llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, &tm);
+-
+- let llvm_data_layout = llvm::LLVMGetDataLayoutStr(llmod);
+- let llvm_data_layout = str::from_utf8(CStr::from_ptr(llvm_data_layout).to_bytes())
+- .expect("got a non-UTF8 data-layout from LLVM");
+-
+- // Unfortunately LLVM target specs change over time, and right now we
+- // don't have proper support to work with any more than one
+- // `data_layout` than the one that is in the rust-lang/rust repo. If
+- // this compiler is configured against a custom LLVM, we may have a
+- // differing data layout, even though we should update our own to use
+- // that one.
+- //
+- // As an interim hack, if CFG_LLVM_ROOT is not an empty string then we
+- // disable this check entirely as we may be configured with something
+- // that has a different target layout.
+- //
+- // Unsure if this will actually cause breakage when rustc is configured
+- // as such.
+- //
+- // FIXME(#34960)
+- let cfg_llvm_root = option_env!("CFG_LLVM_ROOT").unwrap_or("");
+- let custom_llvm_used = !cfg_llvm_root.trim().is_empty();
+-
+- if !custom_llvm_used && target_data_layout != llvm_data_layout {
+- bug!(
+- "data-layout for target `{rustc_target}`, `{rustc_layout}`, \
+- differs from LLVM target's `{llvm_target}` default layout, `{llvm_layout}`",
+- rustc_target = sess.opts.target_triple,
+- rustc_layout = target_data_layout,
+- llvm_target = sess.target.llvm_target,
+- llvm_layout = llvm_data_layout
+- );
+- }
+- }
+-
+ let data_layout = SmallCStr::new(&target_data_layout);
+ llvm::LLVMSetDataLayout(llmod, data_layout.as_ptr());
+
diff --git a/meta/recipes-devtools/rust/files/repro-issue-fix-with-v175.patch b/meta/recipes-devtools/rust/files/repro-issue-fix-with-v175.patch
new file mode 100644
index 0000000000..fe0507c981
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/repro-issue-fix-with-v175.patch
@@ -0,0 +1,36 @@
+rust: reproducibility issue fix with v1.75
+
+With 1.75 rust release, the '.rustc' section of shared object libs are embedded with absolute path names which is casuing reproducibility issues.
+This change will fix the path name format back to '/rust/$hash' as in earlier versions.
+
+Below are the links for detailed bug description & discusssion with upstream rust.
+https://github.com/rust-lang/rust/issues/120825#issuecomment-1964307219
+https://github.com/rust-lang/rust/issues/120825#issuecomment-1964652656
+
+Upstream-Status: Backport [https://github.com/rust-lang/rust/pull/121959/commits/a9a979839bbdfec48c75d618ab0dce8a953589b8]
+Signed-off-by: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
+---
+--- a/compiler/rustc_session/src/session.rs 2023-12-21 08:55:28.000000000 -0800
++++ b/compiler/rustc_session/src/session.rs 2024-02-26 07:29:15.527577022 -0800
+@@ -1260,19 +1260,6 @@
+ }
+
+ pub fn should_prefer_remapped_for_codegen(&self) -> bool {
+- // bail out, if any of the requested crate types aren't:
+- // "compiled executables or libraries"
+- for crate_type in &self.opts.crate_types {
+- match crate_type {
+- CrateType::Executable
+- | CrateType::Dylib
+- | CrateType::Rlib
+- | CrateType::Staticlib
+- | CrateType::Cdylib => continue,
+- CrateType::ProcMacro => return false,
+- }
+- }
+-
+ let has_split_debuginfo = match self.split_debuginfo() {
+ SplitDebuginfo::Off => false,
+ SplitDebuginfo::Packed => true,
+
+
diff --git a/meta/recipes-devtools/rust/files/rustc-bootstrap.patch b/meta/recipes-devtools/rust/files/rustc-bootstrap.patch
new file mode 100644
index 0000000000..1271d75e60
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/rustc-bootstrap.patch
@@ -0,0 +1,21 @@
+When rust.channel is set to either beta or stable, we can't use
+nightly features on bootstrap without RUSTC_BOOTSTRAP. Set RUSTC_BOOTSTRAP=1
+to use nightly features on stable or beta.
+
+Upstream-Status: Backport [https://github.com/rust-lang/rust/pull/119619/commits/8aa7dd06f6e50621dc10f9f9490681be8a45876f]
+
+Signed-off-by: onur-ozkan <work@onurozkan.dev>
+Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
+---
+diff --git a/src/bootstrap/synthetic_targets.rs b/ src/bootstrap/synthetic_targets.rs
+index d2c65b740da..45baf56f46b 100644
+--- a/src/bootstrap/src/core/build_steps/synthetic_targets.rs
++++ b/src/bootstrap/src/core/build_steps/synthetic_targets.rs
+@@ -59,6 +59,7 @@ fn create_synthetic_target(
+ let mut cmd = Command::new(builder.rustc(compiler));
+ cmd.arg("--target").arg(base.rustc_target_arg());
+ cmd.args(["-Zunstable-options", "--print", "target-spec-json"]);
++ cmd.env("RUSTC_BOOTSTRAP", "1");
+ cmd.stdout(Stdio::piped());
+
+ let output = cmd.spawn().unwrap().wait_with_output().unwrap();
diff --git a/meta/recipes-devtools/rust/files/rv32-cargo-rustix-0.38.19-fix.patch b/meta/recipes-devtools/rust/files/rv32-cargo-rustix-0.38.19-fix.patch
new file mode 100644
index 0000000000..a7f3d11ede
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/rv32-cargo-rustix-0.38.19-fix.patch
@@ -0,0 +1,70 @@
+Fix cargo build on riscv32
+
+riscv32 support is added to latest rustix already
+therefore this patch is limited to rustix-0.38.19
+once cargo/rust moves beyond rustix-0.38.19 as dependency
+we can remove this patch
+
+Upstream-Status: Inappropriate [Specific to rustix-0.38.19]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+--- a/vendor/rustix-0.38.19/src/backend/libc/termios/syscalls.rs
++++ b/vendor/rustix-0.38.19/src/backend/libc/termios/syscalls.rs
+@@ -138,7 +138,7 @@ pub(crate) fn tcsetattr(
+ use crate::utils::default_array;
+ use linux_raw_sys::general::{termios2, BOTHER, CBAUD, IBSHIFT};
+
+- #[cfg(not(any(target_arch = "sparc", target_arch = "sparc64")))]
++ #[cfg(not(any(target_arch = "sparc", target_arch = "sparc64", target_arch = "riscv32")))]
+ use linux_raw_sys::ioctl::{TCSETS, TCSETS2};
+
+ // linux-raw-sys' ioctl-generation script for sparc isn't working yet,
+@@ -147,6 +147,10 @@ pub(crate) fn tcsetattr(
+ const TCSETS: u32 = 0x80245409;
+ #[cfg(any(target_arch = "sparc", target_arch = "sparc64"))]
+ const TCSETS2: u32 = 0x802c540d;
++ #[cfg(any(target_arch = "riscv32"))]
++ const TCSETS: u32 = 0x5402;
++ #[cfg(any(target_arch = "riscv32"))]
++ const TCSETS2: u32 = 0x402C542B;
+
+ // Translate from `optional_actions` into an ioctl request code. On
+ // MIPS, `optional_actions` already has `TCGETS` added to it.
+--- a/vendor/rustix-0.38.19/.cargo-checksum.json
++++ b/vendor/rustix-0.38.19/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"CODE_OF_CONDUCT.md":"f210602311e3f74b32f46237fd55f4ce36d798e85e3db1432ec667f63a7ffc44","CONTRIBUTING.md":"3fd57de5c678db1c972da676a8231d2fde9820695ef1f0d53f1e55a3e81d9de0","COPYRIGHT":"377c2e7c53250cc5905c0b0532d35973392af16ffb9596a41d99d202cf3617c9","Cargo.toml":"c994094b77085196394cd7cc03161f4810af7da799cfb40c696973e912929a8f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"58d2ee38dad64ff142e98872860b282b36a09076df537af57aba39d675d84a28","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/mod.rs":"e260e67273aa0a37cffdc5cd451699335b8ee656c17275a0d0f2b4563c6018ca","build.rs":"5f93559819ca7fe233f01579d51ff5b58cb6e96ef9e7817a3358a9b410d5bbf6","src/backend/libc/c.rs":"63a6b16d4c13db9946c58ef643dd92ac4c81c4be8b5d2adf9bb0f5c797593d3b","src/backend/libc/conv.rs":"c4b4a6fdcf2f9295ab0498229a780942b031a2847ddf2466f90884bfb30255f8","src/backend/libc/event/epoll.rs":"f35eb0fddf9c4327551e1995f109a537f9899b9231e914671bdf26b616048370","src/backend/libc/event/mod.rs":"7f8547c599b8263eb791890bbe4a0b22fe2676d007ffdcc3e07b2e48d1c994db","src/backend/libc/event/poll_fd.rs":"06e2ce2fb30a45b90279ae753a67085e9f818a1b16d99cc92c525e2c46d61d51","src/backend/libc/event/syscalls.rs":"295a930521b345192caaf916b39189299f81f5f7c9f1aaed235baf3bf6f2f5fa","src/backend/libc/event/types.rs":"09e813690f44752708485e5a5ee59a43834a4a3e9577f2739b310c3ea8b4b64e","src/backend/libc/event/windows_syscalls.rs":"ebfac665c6676c4b803134ab8806be8aa2e96bdbc7799a19c544cd9069b35787","src/backend/libc/fs/dir.rs":"6f9526714c525e219399371d5acbe2666f0ac3ee88f73dc03fb31cf195ced95d","src/backend/libc/fs/inotify.rs":"f4e471484c4633dbb91c875ba8df87b687dd174cc45fdd2a1f717ffcb1591b00","src/backend/libc/fs/makedev.rs":"89c679a0ef18dd41b3c6223bce0f329ad35bf6cadbf16e47b33fad3f312ba4a6","src/backend/libc/fs/mod.rs":"576b6fcce723fcd5fc76637abd3b3565b00ca0bac309b63aecd6b8ee005fa81d","src/backend/libc/fs/syscalls.rs":"bba2a6a475fcd980f9352007f2b0b74b97abffe9d025e4c311e6f1f2f84eee3a","src/backend/libc/fs/types.rs":"20618e1a675fff5a8d2f08b4278425a3ec5c43d83518a5cbf3a934f135709a7a","src/backend/libc/io/errno.rs":"048d4350b9836fb9c9400127509705835a4992da86410e33a9e67cc43e22f35a","src/backend/libc/io/mod.rs":"746647bd864e4ec7717925b6d176cebdb392b7d015070244cc48d92780351dd6","src/backend/libc/io/syscalls.rs":"380d9be17ec7e46fdc24682aeae6410dcd8b10fd129090b479bb8ee405d9b5c6","src/backend/libc/io/types.rs":"8df53d41d72a39f334ed124707cb501e83b3624b769c80b78c62bd45179bc04a","src/backend/libc/io/windows_syscalls.rs":"fab3fa099aa89cea56a6edc651dd37750e581534f4f45b04a7e425f130b08468","src/backend/libc/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/io_uring/syscalls.rs":"5af8146d5971c833e6fd657f652c618b31f854e1b0811864fba9b658cb633e19","src/backend/libc/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mm/syscalls.rs":"9b7c383f8ef1e258800c9797abbcd45374b97b6680637b1a247cb93878213b5a","src/backend/libc/mm/types.rs":"a427ff490880e92f1ddaa4aea55bb3ff2f43bd29712a6af09b25be2984ecb3e1","src/backend/libc/mod.rs":"2b396834f47b8771d2274eaa2632fbecebc17316d20f26d5e94f060bb8796f7d","src/backend/libc/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mount/syscalls.rs":"1bc87501a078616d0190d2e85de55f3f968b8cb79d49bd9eb839a350eed26089","src/backend/libc/mount/types.rs":"8e2b66822cd1148e5fe4023f5608f36afb0d47bad454db9560fbca4b0dbd2b6f","src/backend/libc/net/addr.rs":"b6644e0169c6681084704e9ca231863364d81e3d3cb5d3689e0df8acf7b4ec16","src/backend/libc/net/ext.rs":"0ec74196ace553c9bff355e2e35ad3b70cab46e32f8f7194715e4528fe1f54bc","src/backend/libc/net/mod.rs":"4e2d6c72bef19985a2935e7891288f0045d1ff1287ec50492882b4dfbaf0fea3","src/backend/libc/net/msghdr.rs":"f82825d40d8872e2e804cde32bbcc68bef43a5f48ad857fb1030550c4eb44838","src/backend/libc/net/read_sockaddr.rs":"af90cf19e2bebc04976b9ac97010c38db91dcd90fd81b48a14528d45be97fe20","src/backend/libc/net/send_recv.rs":"52ee998bc8422a074e5105607440631e569e531caa7d356157529c3ddb07b684","src/backend/libc/net/sockopt.rs":"c1e9974478f027d9053fed5b4f28e6dce49dd15f05e1ed574cb4eddf1be47b7c","src/backend/libc/net/syscalls.rs":"b876541609622003a9e8cd2787ecbb93b0a155cfd61914c043ac2401ed3fd923","src/backend/libc/net/write_sockaddr.rs":"14e941b5a11e4d7a17182860c47b1ee972b56ff7f0aadbcc6ab2c8740b13cea9","src/backend/libc/param/auxv.rs":"fdc85b7c33bcd05a16471e42eb4228c48c98c642443635ba5fc3e9a45438d6d3","src/backend/libc/param/mod.rs":"5234b8f1bcb886cca6ea003d411d75eaeebe58deedd80e3441354bf46ed85d4d","src/backend/libc/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pid/syscalls.rs":"49ea679b96c0741d048e82964038f9a931bc3cf3a0b59c7db3df89629b9c49e6","src/backend/libc/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/pipe/syscalls.rs":"8affde100f6a9dfc762b79d1e48be5c1039be414f8ef7d5a6acaba882a68d259","src/backend/libc/pipe/types.rs":"1e797beb383bb2e4038f1de21bea204caec4a4922b25a6ef84dbbe6e953e78b0","src/backend/libc/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/prctl/syscalls.rs":"8a2684f444a7555098dce2b92270d81cefdae902716c6e5d59bd7b0657e8a29d","src/backend/libc/process/cpu_set.rs":"b3d36b01b53b0b6c61a20ed8a69d48eccdd90cc17f82f2926ef1e844f002d0b7","src/backend/libc/process/mod.rs":"d7dc401255bad2e55ffff365339cdc3aad306861d269ad727a817d3cd7763166","src/backend/libc/process/syscalls.rs":"17b7eab2e0ea2898d41ec4e0d2a84baf57eae9f6e3e016e6b74f0e174440cffb","src/backend/libc/process/types.rs":"c26796486e9f9e7bbc44715ecaac285eb808fd9a8c08d237b2ac19f34954d608","src/backend/libc/process/wait.rs":"0cc556aed976b4bbb3965f74fd76b8216c755fce25043b7b21ce54afa07c9773","src/backend/libc/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pty/syscalls.rs":"699a4c325fc590b8b5dabfe5a9ff386809be14dd24bf35aa6ef581b2bd75457b","src/backend/libc/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/rand/syscalls.rs":"78c7201e6bcb75e9cab9486d1878861319f865de2b2c46437be68690bd17bf13","src/backend/libc/rand/types.rs":"7763e6b40e4b9df825fdd2aa5ba1770da999137d04776561167116c58f8fa618","src/backend/libc/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/shm/syscalls.rs":"60d797d4e85e08e6330e6b8d80094356ce377e5484952f88ae2a6e49231c268c","src/backend/libc/shm/types.rs":"be4034353ab434568053260712fa5f55ed6d8a1a9fae64fa4fe1ea4f2e82120b","src/backend/libc/system/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/system/syscalls.rs":"4c632a6d59fef0bb516d89f119f7f37aa18f8a75f63aeba73d71c464473874cb","src/backend/libc/system/types.rs":"6871e16aee14fe2ae03cea798c3e509ffe44778a9c0e5608fd73e2e015876d7e","src/backend/libc/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/termios/syscalls.rs":"dbe38f61f52f78dabf39e35a2fed3fb5f958270afa102e6bc3a374dc2621faf4","src/backend/libc/thread/futex.rs":"b666828653b12634bbd7fd709acf69641b648ec40962a1d4f904c5db14d2eff5","src/backend/libc/thread/mod.rs":"fa710053974d7f16a6c49242ee6c10a3b9e1143452b9daeaed8837302a679fff","src/backend/libc/thread/syscalls.rs":"120cb13fe7ffdef7a52ca1d141d31bf189176e9f5862f87bb171d54370a682a9","src/backend/libc/time/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/time/syscalls.rs":"f82e0725c5af8a52e61ee83aad2c77694f2f7a72ac1e6eb284109a70ac6edc38","src/backend/libc/time/types.rs":"d4e55d2b9fb8de772c8bf5bba157472c49a12c25502e62a9c599a2e6d3196a5b","src/backend/libc/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/ugid/syscalls.rs":"8edf91b8790add23902c9f5418da6b0723a371677f29f490e0c8af852f0f1a0c","src/backend/libc/winsock_c.rs":"3bf3884fd250eca806ffdf96da68e29c133a697810b78b333ea449e523e58562","src/backend/linux_raw/arch/aarch64.rs":"2a255c9135bc8a321c180f52b88eb2b158bc9170cd222149caeae63c24587d44","src/backend/linux_raw/arch/arm.rs":"165bccb5883d0136e55d42091183765f83d86e9d37a7cb2cec9ae8af32774db6","src/backend/linux_raw/arch/mips.rs":"24af364aa93fd4b9917639d473336490a143f0d1723b09f388e72d534160ee51","src/backend/linux_raw/arch/mips32r6.rs":"e436a2ade34f2f7c58b8924462b07a2499dfc951e1e1318d51759444fb8b658e","src/backend/linux_raw/arch/mips64.rs":"897da9ddc877963ad59464d7f81dc59df8a7e91251e6adea14cfd946e2740a1c","src/backend/linux_raw/arch/mips64r6.rs":"3c08aea13c1139fb0dfbe74ca3d0147f007c3aa2eda641afb82c46f6aa4f6cd1","src/backend/linux_raw/arch/mod.rs":"37eaeea601d39be00d170856a31a8ec6a27b0d3ac22ab3ee642ab0e45dee237d","src/backend/linux_raw/arch/powerpc64.rs":"dfb001f8636a5e46d728900b0804fe6c374e5e18a6f0e76d7d62e0c07da74477","src/backend/linux_raw/arch/riscv64.rs":"41d33242d941030f46077dc2b1bc4c7913fe7630d693a5a7eef966bcf38f9d8b","src/backend/linux_raw/arch/thumb.rs":"2fd979ab421248c0a4c592bc0cefee63edc26528f469b71b63eaed35356e42e8","src/backend/linux_raw/arch/x86.rs":"81c73c0a13925854b54db754f533dec565b1db2d9586e210a2450c6f5915171d","src/backend/linux_raw/arch/x86_64.rs":"e929036a1f3cf93ba538f4523b241605cc7b5e61f84ffe6d9d5cdbcb6f73e543","src/backend/linux_raw/c.rs":"74cdf34fcd5bba9628c04585fa13721073e2bffe6740315c7655d1a55150230f","src/backend/linux_raw/conv.rs":"3b452e19c6b8285df205baa790e19f32061952ae7c7c4e39ba01726de6beb4fb","src/backend/linux_raw/event/epoll.rs":"18980136ee6d3e327d6c46a33306fa934a80eaee609a329098935569507440d0","src/backend/linux_raw/event/mod.rs":"72e46b04637e2d1d2a6b97af616144995399e489d1fe916faf835d72fc8c64cd","src/backend/linux_raw/event/poll_fd.rs":"78d040f7ce0574fc18cebf6b7766c210a3254abdbafe175f51c6b460fa19c514","src/backend/linux_raw/event/syscalls.rs":"8782cdf978bff0773bf5f35c1056d5c29a40742cf6f99d9606d951b52596cd34","src/backend/linux_raw/event/types.rs":"2ffdcb728587dbc25d302527f4a52296b96013d5199c946291b059ba4f7d466f","src/backend/linux_raw/fs/dir.rs":"c675dc5413428d2defd6752e99d210da83639779e853db209de6a1c08d35e0e7","src/backend/linux_raw/fs/inotify.rs":"42a6ed7b091eee20d7fd9becfea240deb99d0b8ca3e8e083cf365d484d8eeeeb","src/backend/linux_raw/fs/makedev.rs":"c6b4505c4bcbbc2460e80f3097eb15e2c8ef38d6c6e7abd78e39c53c372139e2","src/backend/linux_raw/fs/mod.rs":"8689e46f9c4c3e1190ae5fe39176c573cccac10b3739472128ca237f41e2686a","src/backend/linux_raw/fs/syscalls.rs":"a4512f58e2ef01bf0799c96fa7821284866b6826594642565f46d93e566e2e12","src/backend/linux_raw/fs/types.rs":"c015cf3cde945ad4a42ec706c6e1140c7342041ace1da6e0ef1bbf5e9e019ce2","src/backend/linux_raw/io/errno.rs":"8f15d735435b722466696094aed8b612b8526658ab7e079f386037cf8a3b1a6e","src/backend/linux_raw/io/mod.rs":"7ae2324427892cca6f5ab53858d847b165f790a72ec25f3d99fb15f0506c9f27","src/backend/linux_raw/io/syscalls.rs":"026f99b397ae521ac4b2ba41acec6547018073a9da5fa355aa8f9ac61298c172","src/backend/linux_raw/io/types.rs":"0cfffdc2fc9c33faa4055f9686db5922676f17d5c069362bae3745f39da3594a","src/backend/linux_raw/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/io_uring/syscalls.rs":"0f7c9cb7ccddf5687e4b9e5b23558871a452a29ac6095a0184a06c47b9b18eb6","src/backend/linux_raw/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mm/syscalls.rs":"7d43752e8abaa8cec02cd3329e08303c56a9e6ac97aec447293397b9953c3ba3","src/backend/linux_raw/mm/types.rs":"4407b4b54c4048bb1d4e702575e20037c24483a22f6a5cfcc940e4738fcff381","src/backend/linux_raw/mod.rs":"bb26fe4783f834a4624c99ecf9e4ea8fa4d1c7fe9adfb6d7310fac689e7797bc","src/backend/linux_raw/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mount/syscalls.rs":"3947261b5d46b9737f02dc5352c3a3a35c63c461fd75bcd8ae6619dfc0bfb54d","src/backend/linux_raw/mount/types.rs":"8d39c4d6e555c88de5da3115a301562fafdcb3942aa69a0e01d42de8edb1c78f","src/backend/linux_raw/net/addr.rs":"fbb6c071a8ebc2c557c7013d36d4b540c1ff6ed63103d0a8abcacd8977fbf5c8","src/backend/linux_raw/net/mod.rs":"904b2ccd4228ebcda25db0223b92fb03e4042bcc7b0a202f19410a8a5ff517a5","src/backend/linux_raw/net/msghdr.rs":"3198339ccd00224aefdba74e9144818fe387222066a387a9dfd95fd8c48cf9d9","src/backend/linux_raw/net/read_sockaddr.rs":"24075ac4c05fab5fe44aae4445cdd12ec7e474f047150baa9b768741d6b9693d","src/backend/linux_raw/net/send_recv.rs":"85b1a78ecc920e919ce7a0a9f7ad79cba5ad09af4f53018df8c46c7c112bd7a9","src/backend/linux_raw/net/sockopt.rs":"2ea8625ecd68a799c14ed48dec2f3e01f085222b6cc36f4916b20143e08ae575","src/backend/linux_raw/net/syscalls.rs":"eb897e0074ec53c5b18594c39cbb9ec5bd796c9120bf029f0b05145a7eee1804","src/backend/linux_raw/net/write_sockaddr.rs":"0eeeb7ec8dadf95503a2f89fdc933b8f9c7e45e03fdb343ce490b6cc8eec0f96","src/backend/linux_raw/param/auxv.rs":"c1b1a628d6c51ce8adc981f58c6e08abba4a151356ff98cdbc0b3e75abeadb8a","src/backend/linux_raw/param/init.rs":"a5024f9afe361884f22fb4d65e632ccd8a60f1138ff6bd526df600d40e0d9b7e","src/backend/linux_raw/param/libc_auxv.rs":"e567f471ab21068141acb9fb2f88b9ba553f630945bc8f01b689955b48414512","src/backend/linux_raw/param/mod.rs":"2e6a1a1c00351b9c88bd615aa923f71d76208df5626dd9bea03067f28f81dc31","src/backend/linux_raw/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pid/syscalls.rs":"eef6aa01830ddd510b83f507da2002c03e58318b73744be2c06ebbe33c4f194f","src/backend/linux_raw/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/pipe/syscalls.rs":"366c730fc3e991bddb9f5a15b8c3917a8e6ace6d1d5a9113b2749e476faf6f83","src/backend/linux_raw/pipe/types.rs":"9d4bd3da7cd078d42574ad7d6b7554e301a1a7e97b292a77993a6d263726ef1e","src/backend/linux_raw/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/prctl/syscalls.rs":"a18b224307e0d27bda9b7b85c75cd6c7ddfe39f5ce52efb7bb0bf0585b757343","src/backend/linux_raw/process/cpu_set.rs":"dfdcbdf35aff6a3e08e7d38193bf18c12ca8aa64eb0dc417667be82dcc0f7c55","src/backend/linux_raw/process/mod.rs":"fb393c70a9c63ef9a6bf1fb5a2dc94f07d6b0b6987cc5231c15c607015dafd68","src/backend/linux_raw/process/syscalls.rs":"3730c155663d727d5b239f57f220727ef33dd257e74a1b697274de0f94e1da79","src/backend/linux_raw/process/types.rs":"6811ba822bc12a1a6336649151b4adb1f5d3365684a31c07f01953ea9547743d","src/backend/linux_raw/process/wait.rs":"921aee4b0048746087f52615a98edc2aa0fb4b53d6df44be4533098df55d1b05","src/backend/linux_raw/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pty/syscalls.rs":"ae09c4aecc0ae87b1ca58d82efc58007b9dddaae78460d615f48da19d1cd0f89","src/backend/linux_raw/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/rand/syscalls.rs":"a84f70251672e92f253838bf05e989434f67373233e88d21d8835cbb792b1fe3","src/backend/linux_raw/rand/types.rs":"b7217c98f2da4ecf89225f36eb52c396ba2a73debdeca739ba864a9030b6a23d","src/backend/linux_raw/reg.rs":"39b6234971122d247054bda8c2dc3b44493be30482635baa9e2fcbe048e78cbd","src/backend/linux_raw/runtime/mod.rs":"b2cae8cce3822c3c92942f06ea0b68464040dcac33c6f0f7ee392c6269993347","src/backend/linux_raw/runtime/syscalls.rs":"fc9c9edbe07dd7644ab75c51a1597a31467a3421301503a072bef93e6df07643","src/backend/linux_raw/runtime/tls.rs":"6316060560a112c2e9cd9807cdba6e8c91414113a04a739160929ae9a67bba1f","src/backend/linux_raw/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/shm/syscalls.rs":"3ebf91610b02de0312e1f0f0cc5d56e12b4d93794540087b3182cbdf3cb9c8db","src/backend/linux_raw/shm/types.rs":"76f587e0edc78e3b780b66ddf9bdf56d5bcb57822111e9f8f66a7eec07c2c0fa","src/backend/linux_raw/system/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/system/syscalls.rs":"2af6c2672a6528b404b52f501d9803a63e9f47886d3b5d2d2dc5a71cefa7327a","src/backend/linux_raw/system/types.rs":"1ceab8d738a71043473b26e97fa3fd79d588a86d4774cbc9b9e1d4f1447a016e","src/backend/linux_raw/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/termios/syscalls.rs":"01c4c8028595d8f53197ac3f2accb83057cb80fb5f78db63bbf85eb46d40e1c9","src/backend/linux_raw/thread/futex.rs":"0aa0f9f5be6a79de81e82c9f11f1bf8831f682a4b2f6cb29669e1591636f084e","src/backend/linux_raw/thread/mod.rs":"6ad4a4b90b9234e79900b27ebbe8837e3a7a36aec532912e3e253edce5225067","src/backend/linux_raw/thread/syscalls.rs":"0e3dca63be7322e1e9c58456b28eb8548abb9f08b4436b87b139e891c01c446d","src/backend/linux_raw/time/mod.rs":"672724f55b7b7be6a7452bb1cc2d28b5f0aaa840a2856fe363acce624e1beefc","src/backend/linux_raw/time/syscalls.rs":"451da0bbb66accdf58ee7f2f8746a93b22ee985907a08717a3d3b1650457d878","src/backend/linux_raw/time/types.rs":"56cb936dc36af759e06d0b7bcfa7db02818dff6084c1d1d9a538a6d5945032d5","src/backend/linux_raw/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/ugid/syscalls.rs":"8c86d251db33b399a1d1cbb8e87afe650b78db84f444d3251309b7a0480b54f7","src/backend/linux_raw/vdso.rs":"056314f72c71d4b041739337ca2098afb7fe070f844327f462c293f43481b75b","src/backend/linux_raw/vdso_wrappers.rs":"6bd4b8c15761e16defff7d2eed18c008c9f980ebee6c9bdc2e104b16b9b7c631","src/bitcast.rs":"e21c87c292c781b27256c6f5dcf9fd52dd69ed6e21016cbd67ac31a8219ebf8e","src/check_types.rs":"bc31f9b293bfbf272284ed4d0da24e941206a1ac3f18d3abb0b1a61f1ede1dae","src/clockid.rs":"1d2e1cfcf23160b55d6b046d235edf2eb2408f072a8bdef3e3a3871885abdd5a","src/cstr.rs":"41af2a4429fe6c67a8883f8a8f63d66c90df566abda74fd2b03bcef7b9f24218","src/event/eventfd.rs":"81cbd08f7bdf40a6ce1ca692b63da1dc8ba925282990668d9d68f1203e839fa1","src/event/kqueue.rs":"f13f40b97c4413902115408fc2f340e7c82bba42d2468015b007e8ac6ef23132","src/event/mod.rs":"326a7ddfeb33bb95dd31391d65f400fa622e5d0ab827c096cab16af4b6843b51","src/event/poll.rs":"0ee583dbd457a573a82a06c04a2a24bd2c76e751d27a435507d55338e2871327","src/event/port.rs":"4e51ff150e5d17cbd44aa64a38b99c15e26eaaf2e350768b5dcacdfde4fa5212","src/ffi.rs":"c2b8b38c02d72749aceb715c496726caba1f1fa989ad3856d0103a2fafed89ed","src/fs/abs.rs":"6f0b57878f61c009fd0a3698a3983aa8076ba4fe3b5c28feec0307d65a77891c","src/fs/at.rs":"9b5f35afcaca9dd19d501cee3ad305b1d1c1b588c5fd925b18b6d3b614f05011","src/fs/constants.rs":"24076a01f8bfc126b0905e9bc0521d2c3a3abc6c3b8c86ddb1e545070d097127","src/fs/copy_file_range.rs":"d3b644374390d482b2ff749a2459458872b57d0dcf9670368739b7833509a7c2","src/fs/cwd.rs":"9f429a79ace6e17455634da09216ee0ad3d067a4541518b3193ae6a8d9ff1e26","src/fs/dir.rs":"347a52f4ca9ac6321c52e802e97ec90d1b4c62ec955c8996fc17f8f5aed69966","src/fs/fadvise.rs":"beef66ebe1310fb92628240b2cde68f744c78e50f6ff61bb1404bd4868d9cae8","src/fs/fcntl.rs":"2085102d05d0ba963e100ab3e3f19dac4ff27d142fbf798626d20a2a596ba96d","src/fs/fcntl_apple.rs":"e2f23f038083621bcdecc98d02ce1023508afaecdb2ed0fba5c8b70f955301e5","src/fs/fcopyfile.rs":"ec95929cbbe02cf49233a785e5238931bb107b7903cc5bc95e4231de960995f2","src/fs/fd.rs":"68d19bb99da0444444acbafd27ba10a69fc2ac35ede4e0106ac2043c30ce4f8f","src/fs/file_type.rs":"fefd865f91011f66126213b0994773d99e373b468c31e866002228c98c64ad85","src/fs/getpath.rs":"28f6970fc1bbc37bb35c84724b59eac436ea7407a4522e18c2bdacb1fdd2edd9","src/fs/id.rs":"1b5c8a8baf9a9bb1f895f97189cea4e5982a0d35b192afeec6340a6c6222e0cb","src/fs/ioctl.rs":"e798eb41bbc201c375bc295ad2928c2467b45b4fbbff3c82236dccbc0d6391a5","src/fs/makedev.rs":"85520b484cb7c15ab71ea1c368578ea3b7e484d82f8510db92b6ce9f7ca341ae","src/fs/memfd_create.rs":"15a8f28e040cffd8c24c7903483440f88853b2e538ad48d80f3c00b4b2befdea","src/fs/mod.rs":"ecd20bf01d651b1866b198c4fc0dc8e7d681eb581e28708c8a0abd062de137ce","src/fs/mount.rs":"8f6ea2b997dd83c50c90291b9ada3ed77a9ce1ad701c9b0d533b5113b317be5b","src/fs/openat2.rs":"4a95c15dab533a41201b5fa25c8a212956b7571d58cad696bdaf45af8aef96db","src/fs/raw_dir.rs":"18ad797876d6230c38d4cacbed081c4028e03ace477d0520d9b226b40de942f0","src/fs/seek_from.rs":"a9efa0feb9ac789cf47667e91efee2e3f2dcde16cb3b7a928c99da640fa0e0d6","src/fs/sendfile.rs":"e3b2058741cf4b1698f34d84bb37130cf2b72806d522a16fe541e832cde136cb","src/fs/statx.rs":"c5192f54311faeb5a8e7ba7da1acd4fa5a8f69f6c15c4d78b4b7a495d21d0864","src/fs/sync.rs":"a3b23543834281f347b0f873bd38154d31d404871188ac08f2b20b9196234cfd","src/fs/xattr.rs":"b976eece822f69e768d91e61d86ee990ea18097b20f7ebeedaaf718c61b48962","src/io/close.rs":"0aa3cd05a8fed8e5244f97b8b6c2e7f65ed93a4e5435c6329852bb3da7514440","src/io/dup.rs":"a8a59c5d345dc54c57ded890720c33eb78c4d53917c71e8bb6317f7ed122cb87","src/io/errno.rs":"58a4d20ba0924e4d514e3c876fbe08982f1623187642ae14780815e65989c8c8","src/io/fcntl.rs":"c0f7bd7fce1119b0c1d0085b7ab77d5df02470ae3e06035428a2452dacbec296","src/io/ioctl.rs":"a47a5ec14607142f2c2ffcb93d016b8e86c8b15ba94b56b35382b545db62ea34","src/io/is_read_write.rs":"1bfb9ee5d58e0b29b44af12fe2668c7bccc841358698dcde47f1519ff9bb73b4","src/io/mod.rs":"75f1d0646be1d4c7c08b5887d8119b0103be8c25c43ccd4e0e97015508c0bb8f","src/io/read_write.rs":"54ba528b11601af1338bb0c71a41b256a0033076d30b3946c3fd0bdfa61dd021","src/io_uring.rs":"61e6dce7a53ce141604b5f577e20ec5aa73a20e2c2101825c7c52396ba221886","src/ioctl/bsd.rs":"e33772530b1b2d1e7d61ce53d9267e68d88f7f5df13301f90412d89a46c6faaa","src/ioctl/linux.rs":"256aef9fb51c67cd7f43a581b702aa62757e3bdd7c38f71bf849f818194c2285","src/ioctl/mod.rs":"739a4f6d4e9a816745e42133c1b4c51c1285bf5cde3e1ee3ea72ef0b802e3806","src/ioctl/patterns.rs":"26dda154aabf67d45362d9378875f95c1593391ecbc62be02d187b7e19e7f9da","src/lib.rs":"7430fd1db444a72dc8f9b762f2fb61226dd00c89c7c932481e94958f60678383","src/maybe_polyfill/no_std/io/mod.rs":"77889bb5c5a4f2e50e38379cdaa5d0fef4b0cafc3da056735df01f6deae75747","src/maybe_polyfill/no_std/mod.rs":"ec94a4aab4bc475785e469d10fd6bc95667e1d47d958e9cff3a19049d88c8c80","src/maybe_polyfill/no_std/net/ip_addr.rs":"046327ee244f758f2bc31d3be305d8cd0dfd8342aac1add8259e999b4b46c4a7","src/maybe_polyfill/no_std/net/mod.rs":"b0ee611c454679226a15bf647e7779995f3fe9c8e0507930a0d0613eb414b7c2","src/maybe_polyfill/no_std/net/socket_addr.rs":"bfeb32d32c176cde76323abcffebfc47e9898fb8d7ce3668c602dc8451086a2d","src/maybe_polyfill/no_std/os/fd/mod.rs":"27ef0afbcb0695cbb15101070f417eb51e0ef85ae66ec967d95e80771d507c47","src/maybe_polyfill/no_std/os/fd/owned.rs":"ab86ffa2693a04f3085770faf395f95e5303001711be8b19c44a47a0ac574091","src/maybe_polyfill/no_std/os/fd/raw.rs":"f3648c7bd4a6ff94bd823ed9e0d99d398e02f24875cf9b25962736999e7c6943","src/maybe_polyfill/no_std/os/mod.rs":"27dab639a765827644005d5f2fcc7c825310606b889cc8dd83f54c9528350dc0","src/maybe_polyfill/no_std/os/windows/io/mod.rs":"5bbcc05c83fee5026dd744a994e0458469466d5be39081baa62df07753b92fd2","src/maybe_polyfill/no_std/os/windows/io/raw.rs":"4c32609a489dd938a49328b5637cb3bafb96437f2f9f269ab66d7d3cb90247f6","src/maybe_polyfill/no_std/os/windows/io/socket.rs":"c658f42f24eff44a661f2adfd24a11af80fe9897f3e2af4dc5d2c64808308d65","src/maybe_polyfill/no_std/os/windows/mod.rs":"fdb416f8f231a4e778b5f985b9ae712ece5e1a1402963ad1a5f6a8b9843795f4","src/maybe_polyfill/std/mod.rs":"7c16c86cc73e226e65ead598e4018238b22000a345040b706bf1e1b3eba115fc","src/mm/madvise.rs":"3c262b3713a73fafcedf1b04bb12c048bb11d47ca43c959e5dfa48c27651f4f0","src/mm/mmap.rs":"28523bceb2b7e3bc1207f351d8a1771867159e1b9ff1d9c37fbdc05aeb696b50","src/mm/mod.rs":"b3a6cb838986d45825b912355cedead761211a494ca6f89b2367a2d2157e340e","src/mm/msync.rs":"9dcfe5f54235e9314a595edb8d548ac79d222bbcc58bb3263cf7e96d603b23ad","src/mm/userfaultfd.rs":"8073443bd181ff0b3ba4d0b1ae67370b4864035a0c8b4898cd709dc47c518ae7","src/mount/fsopen.rs":"160e384e9175fd98669cda1cf3590bb195c2ba7e1c724e9ea06e692595e58ba1","src/mount/mod.rs":"5f0c9df4727592695deb1cd63ae1de021b03dcd9d0d1b68e1f34d12a7136cb19","src/mount/mount_unmount.rs":"8ad11675e5d762d33fbefbed06a6a9f9e52a9b689bd06662446152614321ab77","src/mount/types.rs":"601ae3e10b7dc496fed7f3b40a80e81c6edd7bf13189d7be45c3212d4c684c39","src/net/mod.rs":"a6bc55f9e086caf46a7c00783498d73a328a66f2a991f1ec65d5f13931377b0f","src/net/send_recv/mod.rs":"f33e39c7b228cd8109823b0a0a1aa397cddad504d49e69b36f74c5b84e5070e5","src/net/send_recv/msg.rs":"6a73e4fd2b78fbeb683f48bcd8404a362245c6cd66e0d59e6378467600b44268","src/net/socket.rs":"bc825a94831557bf4c2cabd8ef5ae21ffe0986c24f9ada564eefa66270f69ae9","src/net/socket_addr_any.rs":"a9af81e967a91b45e51aec4f46a068fade7035c5d19dfaf05bfdcd3b3c32e9bf","src/net/socketpair.rs":"56f4885c31d2664cd16e18a9a88792a4912fedd953cec36dba67e8581fd57921","src/net/sockopt.rs":"58fbabf73ede7afaeac279241ff874dcee5604f63b3f38e4435a61a354da2196","src/net/types.rs":"5914e786d6a24a6efc355023fd71a28bc29fc85fb672e5388370243fe83e4191","src/net/wsa.rs":"6e546b42f50a851fc833c57cda76cfb347203ed4b0dea574a3d325bf5a2ebf80","src/param/auxv.rs":"886e57d52adf0199b8146f849c2db1e3136c07fc4ec61f60dc781fea11070604","src/param/init.rs":"a31c0e5cea61a1a999767fe74f87c0d59eeb6bce66578b842fe0e0c32be27a55","src/param/mod.rs":"25b10acd5b1da8faa6f5204e6b0379b38bfab667916e886cca64bea01a42dec2","src/path/arg.rs":"b1329485ad525e8d067792b0890a1241c20f6c08d29f0661f2a9f429581a7818","src/path/dec_int.rs":"fad9793b89eac526953b994cbed6b614f01c25108f9763e19fb98029feda93a4","src/path/mod.rs":"6b1b949c94bcc47e0f08a3f8e8db5b61ff497d0dfd3e0655f51c01d3e4b7dfd6","src/pid.rs":"f1c486000c5b1311b2d720cee88f089c17ef9a171709673dd06e6f35f4ff98a3","src/pipe.rs":"fd021deeacf78790886ae03f1e8f0fe5a13909c7532b2246581667eb3d0483d7","src/prctl.rs":"c9daa9397460a27ed6e31fc454fc40dff83720312a002b66f4c178fc005379ec","src/process/chdir.rs":"6947b80d468d906d528de328f30fe7509acfe4976a0de3fa600a7cc39618c049","src/process/chroot.rs":"2b5f6124eb19f26ad2705174f7ad50cdc0a5d15abd59ffcf55421228d82130b4","src/process/exit.rs":"48de66e5504a00cb375d8f415ce63b6225a3f5204268d40726a7d0fbba43f587","src/process/id.rs":"e4733f9e8e4b5f50e98ef7a23802e126f1f14ece8b3d7ae7446c6a66affc6bc1","src/process/ioctl.rs":"23ad0285671e8d7ca71a63c50655dbf732ccea8af11d754a0558e0236db37e76","src/process/kill.rs":"7b879e5cff8a064acd0e7488f4e21bd4e4a8506ce380688b72cc48d283ff9c89","src/process/membarrier.rs":"d6c8821ec73019040c926f4e1018f399c286e87074ab9c6692370c88772af044","src/process/mod.rs":"0ef104be820068409648ed83739a51a7dcf07612088f9a6ed6c7ebdb6ad54092","src/process/pidfd.rs":"4be2fba21430ba84244e11c636c91201bc1d1dffff3e63a4da84cc96f501786b","src/process/pidfd_getfd.rs":"cbc42a1548280ae76c3c66e851fd53cc2a3d4c089cb5798c17a2cdf654f0e229","src/process/prctl.rs":"3396b39a0f161385f02f6a4a17db0a3fcd2d70f4d6d2e1d8d1b9091b36d5704c","src/process/priority.rs":"f135482e71ea8aa0daf92b9f238051178a4c904070fa8409622f94155df3c544","src/process/procctl.rs":"0ca2f5fd4cd39335c141758cac132a3271e318c688f8882e527f58c02d5ab0e2","src/process/rlimit.rs":"10b79de3ced0e64059a94c879742d46a35a6176c776d8eed75031d5e6340283d","src/process/sched.rs":"7c3bfc5be624e7d7f5e043c3ee0b0566fcab3d684d61c272e7b4233410ab1c42","src/process/sched_yield.rs":"6565faa3928b66ddc74a65e893e15edfa4b9be4f7e5f5f68527501a7f6bc3350","src/process/umask.rs":"1a0f31a842303c978e3f05ec191e2b5e96104c09c6596473b42b1fac34898a50","src/process/wait.rs":"cef08e74f5d87df925fa6a6acc95e8de0e689fb420ba514b9fbf94a9ca403465","src/procfs.rs":"63b286dd3302be7f426841eb3b9261ef4785c3159ed78c24734bd5094c9b0b2e","src/pty.rs":"ee697b226230b65c0432a42cd82b2fad885ca70285dbb0a7b8ec0ff81d23e3a0","src/rand/getrandom.rs":"1c8166a02a74f5593bb4673ef907524df04cbc1568020a5ab2ff7f4aa1283f8b","src/rand/mod.rs":"cab59332aadd9b679f5b22cbb222d48ee028af5eb9fd4a4d43922da659b895d7","src/runtime.rs":"9aed98071bf8124c6913e8517b28ef63c7b88001042f0522d877ab65ac88099a","src/shm.rs":"b96fe8a05ee5d4536464a8843a776d43a938abaf22c772fc35b5373d95644a8d","src/signal.rs":"c071b4f011deef19a679d7a832d5408a3cd68627161d6510008d6312266a2611","src/static_assertions.rs":"39986672f489949be1d6469f0c30fb7d2eaa21bdaa2702a8c150b2b150bf5535","src/stdio.rs":"a5de2d7d9c3c5a901f88b6acf4754687c958a2f3a93c7945c2b8fcb948d468af","src/system.rs":"e594c16b4600a8ebea01c5850cd9e1521acf242f1a9f083403cfe95adcde4679","src/termios/ioctl.rs":"a1ac967f7811a482f8bb53847c37c8359f518cd26da9df7b816ba678a0139623","src/termios/mod.rs":"b44b7caa60b6f458657ed58a0e0eca41bb4e6d6be4b0f042bbb8ab7056cebe4b","src/termios/tc.rs":"e41312d15464b83b2457c2502fc3f3b9998cfb02ba68739026dd4285cc7130ac","src/termios/tty.rs":"906ee160ffb5ba3a92ea3a61374cf2cd617cebae51f5490073b6700c438d5eee","src/termios/types.rs":"8c4f029e8dbb9b2c59082bd13d628a741f6f0a31f5ecf485c93795b73c1daeca","src/thread/clock.rs":"469326c822dfb63405ee8537552cedde0b344978280e6645bbadd47dedc71e18","src/thread/futex.rs":"985f9a0dce1e2a4892ae7f26bd1bf119ceae3f9fa6b4707e166624fc1cee76d4","src/thread/id.rs":"ad72db4fea9fccb728310bbfd01ef8c00f6cc60fa2a750f6349646a134f7009b","src/thread/libcap.rs":"a3e316e6d0f58b075125fa939b9b824e1595de20a23d341a36999416b0d10d36","src/thread/mod.rs":"98634ece0b882f123ad887017692f2a4d94a23a1dec278ed660b3497cac5cceb","src/thread/prctl.rs":"10e2878ef856557bc5bd24c77f5699266e746f912e73690980371cc99c7417e8","src/thread/setns.rs":"730badd8db8ce7b905767b341211f1226eeeb38019727bf994eb64f2531b7b7f","src/time/clock.rs":"e59a29f1bed8c31c3d5b6fad60f2d4fa6cab8dd8e86148bb3693a5e3a1ce735f","src/time/mod.rs":"43afee938c80d124d04d4ba190c03f4d21d1e3bfc154fff309211e4f6eabe940","src/time/timerfd.rs":"f17092b84553741aa2d2b44c6992b5d2c8c96cc2c2007fc9a2c6b2064485e53f","src/timespec.rs":"32a4d930cbc0f6dbd23153290db920671cf4ce65a4a127e176f897c1cde42d7d","src/ugid.rs":"6616c6e35b7e43aee5b150f1efae7a50711e0947943c9a96833dbe214ad9e85f","src/utils.rs":"9ae76f8a41d6cc350cdd58c9084b5c3a5a708eeecd769783debdbcbaef442182","src/weak.rs":"c7cf03bf2aeba494b1999ab32183fa8c603ab72e254c0e312a67f168877e410d"},"package":"745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed"}
+\ No newline at end of file
++{"files":{"CODE_OF_CONDUCT.md":"f210602311e3f74b32f46237fd55f4ce36d798e85e3db1432ec667f63a7ffc44","CONTRIBUTING.md":"3fd57de5c678db1c972da676a8231d2fde9820695ef1f0d53f1e55a3e81d9de0","COPYRIGHT":"377c2e7c53250cc5905c0b0532d35973392af16ffb9596a41d99d202cf3617c9","Cargo.toml":"c994094b77085196394cd7cc03161f4810af7da799cfb40c696973e912929a8f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"58d2ee38dad64ff142e98872860b282b36a09076df537af57aba39d675d84a28","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/mod.rs":"e260e67273aa0a37cffdc5cd451699335b8ee656c17275a0d0f2b4563c6018ca","build.rs":"5f93559819ca7fe233f01579d51ff5b58cb6e96ef9e7817a3358a9b410d5bbf6","src/backend/libc/c.rs":"22550be154471f160ebfdb9e7b45a1e06452ec426a8d70b23a648a069c0216f4","src/backend/libc/conv.rs":"c4b4a6fdcf2f9295ab0498229a780942b031a2847ddf2466f90884bfb30255f8","src/backend/libc/event/epoll.rs":"f35eb0fddf9c4327551e1995f109a537f9899b9231e914671bdf26b616048370","src/backend/libc/event/mod.rs":"7f8547c599b8263eb791890bbe4a0b22fe2676d007ffdcc3e07b2e48d1c994db","src/backend/libc/event/poll_fd.rs":"06e2ce2fb30a45b90279ae753a67085e9f818a1b16d99cc92c525e2c46d61d51","src/backend/libc/event/syscalls.rs":"295a930521b345192caaf916b39189299f81f5f7c9f1aaed235baf3bf6f2f5fa","src/backend/libc/event/types.rs":"09e813690f44752708485e5a5ee59a43834a4a3e9577f2739b310c3ea8b4b64e","src/backend/libc/event/windows_syscalls.rs":"ebfac665c6676c4b803134ab8806be8aa2e96bdbc7799a19c544cd9069b35787","src/backend/libc/fs/dir.rs":"6f9526714c525e219399371d5acbe2666f0ac3ee88f73dc03fb31cf195ced95d","src/backend/libc/fs/inotify.rs":"f4e471484c4633dbb91c875ba8df87b687dd174cc45fdd2a1f717ffcb1591b00","src/backend/libc/fs/makedev.rs":"89c679a0ef18dd41b3c6223bce0f329ad35bf6cadbf16e47b33fad3f312ba4a6","src/backend/libc/fs/mod.rs":"576b6fcce723fcd5fc76637abd3b3565b00ca0bac309b63aecd6b8ee005fa81d","src/backend/libc/fs/syscalls.rs":"bba2a6a475fcd980f9352007f2b0b74b97abffe9d025e4c311e6f1f2f84eee3a","src/backend/libc/fs/types.rs":"20618e1a675fff5a8d2f08b4278425a3ec5c43d83518a5cbf3a934f135709a7a","src/backend/libc/io/errno.rs":"048d4350b9836fb9c9400127509705835a4992da86410e33a9e67cc43e22f35a","src/backend/libc/io/mod.rs":"746647bd864e4ec7717925b6d176cebdb392b7d015070244cc48d92780351dd6","src/backend/libc/io/syscalls.rs":"380d9be17ec7e46fdc24682aeae6410dcd8b10fd129090b479bb8ee405d9b5c6","src/backend/libc/io/types.rs":"8df53d41d72a39f334ed124707cb501e83b3624b769c80b78c62bd45179bc04a","src/backend/libc/io/windows_syscalls.rs":"fab3fa099aa89cea56a6edc651dd37750e581534f4f45b04a7e425f130b08468","src/backend/libc/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/io_uring/syscalls.rs":"5af8146d5971c833e6fd657f652c618b31f854e1b0811864fba9b658cb633e19","src/backend/libc/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mm/syscalls.rs":"9b7c383f8ef1e258800c9797abbcd45374b97b6680637b1a247cb93878213b5a","src/backend/libc/mm/types.rs":"a427ff490880e92f1ddaa4aea55bb3ff2f43bd29712a6af09b25be2984ecb3e1","src/backend/libc/mod.rs":"2b396834f47b8771d2274eaa2632fbecebc17316d20f26d5e94f060bb8796f7d","src/backend/libc/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mount/syscalls.rs":"1bc87501a078616d0190d2e85de55f3f968b8cb79d49bd9eb839a350eed26089","src/backend/libc/mount/types.rs":"8e2b66822cd1148e5fe4023f5608f36afb0d47bad454db9560fbca4b0dbd2b6f","src/backend/libc/net/addr.rs":"b6644e0169c6681084704e9ca231863364d81e3d3cb5d3689e0df8acf7b4ec16","src/backend/libc/net/ext.rs":"0ec74196ace553c9bff355e2e35ad3b70cab46e32f8f7194715e4528fe1f54bc","src/backend/libc/net/mod.rs":"4e2d6c72bef19985a2935e7891288f0045d1ff1287ec50492882b4dfbaf0fea3","src/backend/libc/net/msghdr.rs":"f82825d40d8872e2e804cde32bbcc68bef43a5f48ad857fb1030550c4eb44838","src/backend/libc/net/read_sockaddr.rs":"af90cf19e2bebc04976b9ac97010c38db91dcd90fd81b48a14528d45be97fe20","src/backend/libc/net/send_recv.rs":"52ee998bc8422a074e5105607440631e569e531caa7d356157529c3ddb07b684","src/backend/libc/net/sockopt.rs":"c1e9974478f027d9053fed5b4f28e6dce49dd15f05e1ed574cb4eddf1be47b7c","src/backend/libc/net/syscalls.rs":"b876541609622003a9e8cd2787ecbb93b0a155cfd61914c043ac2401ed3fd923","src/backend/libc/net/write_sockaddr.rs":"14e941b5a11e4d7a17182860c47b1ee972b56ff7f0aadbcc6ab2c8740b13cea9","src/backend/libc/param/auxv.rs":"fdc85b7c33bcd05a16471e42eb4228c48c98c642443635ba5fc3e9a45438d6d3","src/backend/libc/param/mod.rs":"5234b8f1bcb886cca6ea003d411d75eaeebe58deedd80e3441354bf46ed85d4d","src/backend/libc/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pid/syscalls.rs":"49ea679b96c0741d048e82964038f9a931bc3cf3a0b59c7db3df89629b9c49e6","src/backend/libc/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/pipe/syscalls.rs":"8affde100f6a9dfc762b79d1e48be5c1039be414f8ef7d5a6acaba882a68d259","src/backend/libc/pipe/types.rs":"1e797beb383bb2e4038f1de21bea204caec4a4922b25a6ef84dbbe6e953e78b0","src/backend/libc/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/prctl/syscalls.rs":"8a2684f444a7555098dce2b92270d81cefdae902716c6e5d59bd7b0657e8a29d","src/backend/libc/process/cpu_set.rs":"b3d36b01b53b0b6c61a20ed8a69d48eccdd90cc17f82f2926ef1e844f002d0b7","src/backend/libc/process/mod.rs":"d7dc401255bad2e55ffff365339cdc3aad306861d269ad727a817d3cd7763166","src/backend/libc/process/syscalls.rs":"17b7eab2e0ea2898d41ec4e0d2a84baf57eae9f6e3e016e6b74f0e174440cffb","src/backend/libc/process/types.rs":"c26796486e9f9e7bbc44715ecaac285eb808fd9a8c08d237b2ac19f34954d608","src/backend/libc/process/wait.rs":"0cc556aed976b4bbb3965f74fd76b8216c755fce25043b7b21ce54afa07c9773","src/backend/libc/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pty/syscalls.rs":"699a4c325fc590b8b5dabfe5a9ff386809be14dd24bf35aa6ef581b2bd75457b","src/backend/libc/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/rand/syscalls.rs":"78c7201e6bcb75e9cab9486d1878861319f865de2b2c46437be68690bd17bf13","src/backend/libc/rand/types.rs":"7763e6b40e4b9df825fdd2aa5ba1770da999137d04776561167116c58f8fa618","src/backend/libc/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/shm/syscalls.rs":"60d797d4e85e08e6330e6b8d80094356ce377e5484952f88ae2a6e49231c268c","src/backend/libc/shm/types.rs":"be4034353ab434568053260712fa5f55ed6d8a1a9fae64fa4fe1ea4f2e82120b","src/backend/libc/system/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/system/syscalls.rs":"4c632a6d59fef0bb516d89f119f7f37aa18f8a75f63aeba73d71c464473874cb","src/backend/libc/system/types.rs":"6871e16aee14fe2ae03cea798c3e509ffe44778a9c0e5608fd73e2e015876d7e","src/backend/libc/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/termios/syscalls.rs":"19c3a0cfd146ada30e04bfc6b8bc8cc2319890851c803fa0024ff05085137151","src/backend/libc/thread/futex.rs":"b666828653b12634bbd7fd709acf69641b648ec40962a1d4f904c5db14d2eff5","src/backend/libc/thread/mod.rs":"fa710053974d7f16a6c49242ee6c10a3b9e1143452b9daeaed8837302a679fff","src/backend/libc/thread/syscalls.rs":"120cb13fe7ffdef7a52ca1d141d31bf189176e9f5862f87bb171d54370a682a9","src/backend/libc/time/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/time/syscalls.rs":"f82e0725c5af8a52e61ee83aad2c77694f2f7a72ac1e6eb284109a70ac6edc38","src/backend/libc/time/types.rs":"d4e55d2b9fb8de772c8bf5bba157472c49a12c25502e62a9c599a2e6d3196a5b","src/backend/libc/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/ugid/syscalls.rs":"8edf91b8790add23902c9f5418da6b0723a371677f29f490e0c8af852f0f1a0c","src/backend/libc/winsock_c.rs":"3bf3884fd250eca806ffdf96da68e29c133a697810b78b333ea449e523e58562","src/backend/linux_raw/arch/aarch64.rs":"2a255c9135bc8a321c180f52b88eb2b158bc9170cd222149caeae63c24587d44","src/backend/linux_raw/arch/arm.rs":"165bccb5883d0136e55d42091183765f83d86e9d37a7cb2cec9ae8af32774db6","src/backend/linux_raw/arch/mips.rs":"24af364aa93fd4b9917639d473336490a143f0d1723b09f388e72d534160ee51","src/backend/linux_raw/arch/mips32r6.rs":"e436a2ade34f2f7c58b8924462b07a2499dfc951e1e1318d51759444fb8b658e","src/backend/linux_raw/arch/mips64.rs":"897da9ddc877963ad59464d7f81dc59df8a7e91251e6adea14cfd946e2740a1c","src/backend/linux_raw/arch/mips64r6.rs":"3c08aea13c1139fb0dfbe74ca3d0147f007c3aa2eda641afb82c46f6aa4f6cd1","src/backend/linux_raw/arch/mod.rs":"37eaeea601d39be00d170856a31a8ec6a27b0d3ac22ab3ee642ab0e45dee237d","src/backend/linux_raw/arch/powerpc64.rs":"dfb001f8636a5e46d728900b0804fe6c374e5e18a6f0e76d7d62e0c07da74477","src/backend/linux_raw/arch/riscv64.rs":"41d33242d941030f46077dc2b1bc4c7913fe7630d693a5a7eef966bcf38f9d8b","src/backend/linux_raw/arch/thumb.rs":"2fd979ab421248c0a4c592bc0cefee63edc26528f469b71b63eaed35356e42e8","src/backend/linux_raw/arch/x86.rs":"81c73c0a13925854b54db754f533dec565b1db2d9586e210a2450c6f5915171d","src/backend/linux_raw/arch/x86_64.rs":"e929036a1f3cf93ba538f4523b241605cc7b5e61f84ffe6d9d5cdbcb6f73e543","src/backend/linux_raw/c.rs":"74cdf34fcd5bba9628c04585fa13721073e2bffe6740315c7655d1a55150230f","src/backend/linux_raw/conv.rs":"3b452e19c6b8285df205baa790e19f32061952ae7c7c4e39ba01726de6beb4fb","src/backend/linux_raw/event/epoll.rs":"18980136ee6d3e327d6c46a33306fa934a80eaee609a329098935569507440d0","src/backend/linux_raw/event/mod.rs":"72e46b04637e2d1d2a6b97af616144995399e489d1fe916faf835d72fc8c64cd","src/backend/linux_raw/event/poll_fd.rs":"78d040f7ce0574fc18cebf6b7766c210a3254abdbafe175f51c6b460fa19c514","src/backend/linux_raw/event/syscalls.rs":"8782cdf978bff0773bf5f35c1056d5c29a40742cf6f99d9606d951b52596cd34","src/backend/linux_raw/event/types.rs":"2ffdcb728587dbc25d302527f4a52296b96013d5199c946291b059ba4f7d466f","src/backend/linux_raw/fs/dir.rs":"c675dc5413428d2defd6752e99d210da83639779e853db209de6a1c08d35e0e7","src/backend/linux_raw/fs/inotify.rs":"42a6ed7b091eee20d7fd9becfea240deb99d0b8ca3e8e083cf365d484d8eeeeb","src/backend/linux_raw/fs/makedev.rs":"c6b4505c4bcbbc2460e80f3097eb15e2c8ef38d6c6e7abd78e39c53c372139e2","src/backend/linux_raw/fs/mod.rs":"8689e46f9c4c3e1190ae5fe39176c573cccac10b3739472128ca237f41e2686a","src/backend/linux_raw/fs/syscalls.rs":"a4512f58e2ef01bf0799c96fa7821284866b6826594642565f46d93e566e2e12","src/backend/linux_raw/fs/types.rs":"c015cf3cde945ad4a42ec706c6e1140c7342041ace1da6e0ef1bbf5e9e019ce2","src/backend/linux_raw/io/errno.rs":"8f15d735435b722466696094aed8b612b8526658ab7e079f386037cf8a3b1a6e","src/backend/linux_raw/io/mod.rs":"7ae2324427892cca6f5ab53858d847b165f790a72ec25f3d99fb15f0506c9f27","src/backend/linux_raw/io/syscalls.rs":"026f99b397ae521ac4b2ba41acec6547018073a9da5fa355aa8f9ac61298c172","src/backend/linux_raw/io/types.rs":"0cfffdc2fc9c33faa4055f9686db5922676f17d5c069362bae3745f39da3594a","src/backend/linux_raw/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/io_uring/syscalls.rs":"0f7c9cb7ccddf5687e4b9e5b23558871a452a29ac6095a0184a06c47b9b18eb6","src/backend/linux_raw/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mm/syscalls.rs":"7d43752e8abaa8cec02cd3329e08303c56a9e6ac97aec447293397b9953c3ba3","src/backend/linux_raw/mm/types.rs":"4407b4b54c4048bb1d4e702575e20037c24483a22f6a5cfcc940e4738fcff381","src/backend/linux_raw/mod.rs":"bb26fe4783f834a4624c99ecf9e4ea8fa4d1c7fe9adfb6d7310fac689e7797bc","src/backend/linux_raw/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mount/syscalls.rs":"3947261b5d46b9737f02dc5352c3a3a35c63c461fd75bcd8ae6619dfc0bfb54d","src/backend/linux_raw/mount/types.rs":"8d39c4d6e555c88de5da3115a301562fafdcb3942aa69a0e01d42de8edb1c78f","src/backend/linux_raw/net/addr.rs":"fbb6c071a8ebc2c557c7013d36d4b540c1ff6ed63103d0a8abcacd8977fbf5c8","src/backend/linux_raw/net/mod.rs":"904b2ccd4228ebcda25db0223b92fb03e4042bcc7b0a202f19410a8a5ff517a5","src/backend/linux_raw/net/msghdr.rs":"3198339ccd00224aefdba74e9144818fe387222066a387a9dfd95fd8c48cf9d9","src/backend/linux_raw/net/read_sockaddr.rs":"24075ac4c05fab5fe44aae4445cdd12ec7e474f047150baa9b768741d6b9693d","src/backend/linux_raw/net/send_recv.rs":"85b1a78ecc920e919ce7a0a9f7ad79cba5ad09af4f53018df8c46c7c112bd7a9","src/backend/linux_raw/net/sockopt.rs":"2ea8625ecd68a799c14ed48dec2f3e01f085222b6cc36f4916b20143e08ae575","src/backend/linux_raw/net/syscalls.rs":"eb897e0074ec53c5b18594c39cbb9ec5bd796c9120bf029f0b05145a7eee1804","src/backend/linux_raw/net/write_sockaddr.rs":"0eeeb7ec8dadf95503a2f89fdc933b8f9c7e45e03fdb343ce490b6cc8eec0f96","src/backend/linux_raw/param/auxv.rs":"c1b1a628d6c51ce8adc981f58c6e08abba4a151356ff98cdbc0b3e75abeadb8a","src/backend/linux_raw/param/init.rs":"a5024f9afe361884f22fb4d65e632ccd8a60f1138ff6bd526df600d40e0d9b7e","src/backend/linux_raw/param/libc_auxv.rs":"e567f471ab21068141acb9fb2f88b9ba553f630945bc8f01b689955b48414512","src/backend/linux_raw/param/mod.rs":"2e6a1a1c00351b9c88bd615aa923f71d76208df5626dd9bea03067f28f81dc31","src/backend/linux_raw/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pid/syscalls.rs":"eef6aa01830ddd510b83f507da2002c03e58318b73744be2c06ebbe33c4f194f","src/backend/linux_raw/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/pipe/syscalls.rs":"366c730fc3e991bddb9f5a15b8c3917a8e6ace6d1d5a9113b2749e476faf6f83","src/backend/linux_raw/pipe/types.rs":"9d4bd3da7cd078d42574ad7d6b7554e301a1a7e97b292a77993a6d263726ef1e","src/backend/linux_raw/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/prctl/syscalls.rs":"a18b224307e0d27bda9b7b85c75cd6c7ddfe39f5ce52efb7bb0bf0585b757343","src/backend/linux_raw/process/cpu_set.rs":"dfdcbdf35aff6a3e08e7d38193bf18c12ca8aa64eb0dc417667be82dcc0f7c55","src/backend/linux_raw/process/mod.rs":"fb393c70a9c63ef9a6bf1fb5a2dc94f07d6b0b6987cc5231c15c607015dafd68","src/backend/linux_raw/process/syscalls.rs":"3730c155663d727d5b239f57f220727ef33dd257e74a1b697274de0f94e1da79","src/backend/linux_raw/process/types.rs":"6811ba822bc12a1a6336649151b4adb1f5d3365684a31c07f01953ea9547743d","src/backend/linux_raw/process/wait.rs":"921aee4b0048746087f52615a98edc2aa0fb4b53d6df44be4533098df55d1b05","src/backend/linux_raw/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pty/syscalls.rs":"ae09c4aecc0ae87b1ca58d82efc58007b9dddaae78460d615f48da19d1cd0f89","src/backend/linux_raw/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/rand/syscalls.rs":"a84f70251672e92f253838bf05e989434f67373233e88d21d8835cbb792b1fe3","src/backend/linux_raw/rand/types.rs":"b7217c98f2da4ecf89225f36eb52c396ba2a73debdeca739ba864a9030b6a23d","src/backend/linux_raw/reg.rs":"39b6234971122d247054bda8c2dc3b44493be30482635baa9e2fcbe048e78cbd","src/backend/linux_raw/runtime/mod.rs":"b2cae8cce3822c3c92942f06ea0b68464040dcac33c6f0f7ee392c6269993347","src/backend/linux_raw/runtime/syscalls.rs":"fc9c9edbe07dd7644ab75c51a1597a31467a3421301503a072bef93e6df07643","src/backend/linux_raw/runtime/tls.rs":"6316060560a112c2e9cd9807cdba6e8c91414113a04a739160929ae9a67bba1f","src/backend/linux_raw/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/shm/syscalls.rs":"3ebf91610b02de0312e1f0f0cc5d56e12b4d93794540087b3182cbdf3cb9c8db","src/backend/linux_raw/shm/types.rs":"76f587e0edc78e3b780b66ddf9bdf56d5bcb57822111e9f8f66a7eec07c2c0fa","src/backend/linux_raw/system/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/system/syscalls.rs":"2af6c2672a6528b404b52f501d9803a63e9f47886d3b5d2d2dc5a71cefa7327a","src/backend/linux_raw/system/types.rs":"1ceab8d738a71043473b26e97fa3fd79d588a86d4774cbc9b9e1d4f1447a016e","src/backend/linux_raw/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/termios/syscalls.rs":"01c4c8028595d8f53197ac3f2accb83057cb80fb5f78db63bbf85eb46d40e1c9","src/backend/linux_raw/thread/futex.rs":"0aa0f9f5be6a79de81e82c9f11f1bf8831f682a4b2f6cb29669e1591636f084e","src/backend/linux_raw/thread/mod.rs":"6ad4a4b90b9234e79900b27ebbe8837e3a7a36aec532912e3e253edce5225067","src/backend/linux_raw/thread/syscalls.rs":"0e3dca63be7322e1e9c58456b28eb8548abb9f08b4436b87b139e891c01c446d","src/backend/linux_raw/time/mod.rs":"672724f55b7b7be6a7452bb1cc2d28b5f0aaa840a2856fe363acce624e1beefc","src/backend/linux_raw/time/syscalls.rs":"451da0bbb66accdf58ee7f2f8746a93b22ee985907a08717a3d3b1650457d878","src/backend/linux_raw/time/types.rs":"56cb936dc36af759e06d0b7bcfa7db02818dff6084c1d1d9a538a6d5945032d5","src/backend/linux_raw/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/ugid/syscalls.rs":"8c86d251db33b399a1d1cbb8e87afe650b78db84f444d3251309b7a0480b54f7","src/backend/linux_raw/vdso.rs":"056314f72c71d4b041739337ca2098afb7fe070f844327f462c293f43481b75b","src/backend/linux_raw/vdso_wrappers.rs":"6bd4b8c15761e16defff7d2eed18c008c9f980ebee6c9bdc2e104b16b9b7c631","src/bitcast.rs":"e21c87c292c781b27256c6f5dcf9fd52dd69ed6e21016cbd67ac31a8219ebf8e","src/check_types.rs":"bc31f9b293bfbf272284ed4d0da24e941206a1ac3f18d3abb0b1a61f1ede1dae","src/clockid.rs":"1d2e1cfcf23160b55d6b046d235edf2eb2408f072a8bdef3e3a3871885abdd5a","src/cstr.rs":"41af2a4429fe6c67a8883f8a8f63d66c90df566abda74fd2b03bcef7b9f24218","src/event/eventfd.rs":"81cbd08f7bdf40a6ce1ca692b63da1dc8ba925282990668d9d68f1203e839fa1","src/event/kqueue.rs":"f13f40b97c4413902115408fc2f340e7c82bba42d2468015b007e8ac6ef23132","src/event/mod.rs":"326a7ddfeb33bb95dd31391d65f400fa622e5d0ab827c096cab16af4b6843b51","src/event/poll.rs":"0ee583dbd457a573a82a06c04a2a24bd2c76e751d27a435507d55338e2871327","src/event/port.rs":"4e51ff150e5d17cbd44aa64a38b99c15e26eaaf2e350768b5dcacdfde4fa5212","src/ffi.rs":"c2b8b38c02d72749aceb715c496726caba1f1fa989ad3856d0103a2fafed89ed","src/fs/abs.rs":"6f0b57878f61c009fd0a3698a3983aa8076ba4fe3b5c28feec0307d65a77891c","src/fs/at.rs":"9b5f35afcaca9dd19d501cee3ad305b1d1c1b588c5fd925b18b6d3b614f05011","src/fs/constants.rs":"24076a01f8bfc126b0905e9bc0521d2c3a3abc6c3b8c86ddb1e545070d097127","src/fs/copy_file_range.rs":"d3b644374390d482b2ff749a2459458872b57d0dcf9670368739b7833509a7c2","src/fs/cwd.rs":"9f429a79ace6e17455634da09216ee0ad3d067a4541518b3193ae6a8d9ff1e26","src/fs/dir.rs":"347a52f4ca9ac6321c52e802e97ec90d1b4c62ec955c8996fc17f8f5aed69966","src/fs/fadvise.rs":"beef66ebe1310fb92628240b2cde68f744c78e50f6ff61bb1404bd4868d9cae8","src/fs/fcntl.rs":"2085102d05d0ba963e100ab3e3f19dac4ff27d142fbf798626d20a2a596ba96d","src/fs/fcntl_apple.rs":"e2f23f038083621bcdecc98d02ce1023508afaecdb2ed0fba5c8b70f955301e5","src/fs/fcopyfile.rs":"ec95929cbbe02cf49233a785e5238931bb107b7903cc5bc95e4231de960995f2","src/fs/fd.rs":"68d19bb99da0444444acbafd27ba10a69fc2ac35ede4e0106ac2043c30ce4f8f","src/fs/file_type.rs":"fefd865f91011f66126213b0994773d99e373b468c31e866002228c98c64ad85","src/fs/getpath.rs":"28f6970fc1bbc37bb35c84724b59eac436ea7407a4522e18c2bdacb1fdd2edd9","src/fs/id.rs":"1b5c8a8baf9a9bb1f895f97189cea4e5982a0d35b192afeec6340a6c6222e0cb","src/fs/ioctl.rs":"71327efab1e73c7844c681ea7e822b5124738196b2fc687aabc786b90be9fe32","src/fs/makedev.rs":"85520b484cb7c15ab71ea1c368578ea3b7e484d82f8510db92b6ce9f7ca341ae","src/fs/memfd_create.rs":"15a8f28e040cffd8c24c7903483440f88853b2e538ad48d80f3c00b4b2befdea","src/fs/mod.rs":"ecd20bf01d651b1866b198c4fc0dc8e7d681eb581e28708c8a0abd062de137ce","src/fs/mount.rs":"8f6ea2b997dd83c50c90291b9ada3ed77a9ce1ad701c9b0d533b5113b317be5b","src/fs/openat2.rs":"4a95c15dab533a41201b5fa25c8a212956b7571d58cad696bdaf45af8aef96db","src/fs/raw_dir.rs":"18ad797876d6230c38d4cacbed081c4028e03ace477d0520d9b226b40de942f0","src/fs/seek_from.rs":"a9efa0feb9ac789cf47667e91efee2e3f2dcde16cb3b7a928c99da640fa0e0d6","src/fs/sendfile.rs":"e3b2058741cf4b1698f34d84bb37130cf2b72806d522a16fe541e832cde136cb","src/fs/statx.rs":"c5192f54311faeb5a8e7ba7da1acd4fa5a8f69f6c15c4d78b4b7a495d21d0864","src/fs/sync.rs":"a3b23543834281f347b0f873bd38154d31d404871188ac08f2b20b9196234cfd","src/fs/xattr.rs":"b976eece822f69e768d91e61d86ee990ea18097b20f7ebeedaaf718c61b48962","src/io/close.rs":"0aa3cd05a8fed8e5244f97b8b6c2e7f65ed93a4e5435c6329852bb3da7514440","src/io/dup.rs":"a8a59c5d345dc54c57ded890720c33eb78c4d53917c71e8bb6317f7ed122cb87","src/io/errno.rs":"58a4d20ba0924e4d514e3c876fbe08982f1623187642ae14780815e65989c8c8","src/io/fcntl.rs":"c0f7bd7fce1119b0c1d0085b7ab77d5df02470ae3e06035428a2452dacbec296","src/io/ioctl.rs":"a47a5ec14607142f2c2ffcb93d016b8e86c8b15ba94b56b35382b545db62ea34","src/io/is_read_write.rs":"1bfb9ee5d58e0b29b44af12fe2668c7bccc841358698dcde47f1519ff9bb73b4","src/io/mod.rs":"75f1d0646be1d4c7c08b5887d8119b0103be8c25c43ccd4e0e97015508c0bb8f","src/io/read_write.rs":"54ba528b11601af1338bb0c71a41b256a0033076d30b3946c3fd0bdfa61dd021","src/io_uring.rs":"61e6dce7a53ce141604b5f577e20ec5aa73a20e2c2101825c7c52396ba221886","src/ioctl/bsd.rs":"e33772530b1b2d1e7d61ce53d9267e68d88f7f5df13301f90412d89a46c6faaa","src/ioctl/linux.rs":"256aef9fb51c67cd7f43a581b702aa62757e3bdd7c38f71bf849f818194c2285","src/ioctl/mod.rs":"739a4f6d4e9a816745e42133c1b4c51c1285bf5cde3e1ee3ea72ef0b802e3806","src/ioctl/patterns.rs":"26dda154aabf67d45362d9378875f95c1593391ecbc62be02d187b7e19e7f9da","src/lib.rs":"7430fd1db444a72dc8f9b762f2fb61226dd00c89c7c932481e94958f60678383","src/maybe_polyfill/no_std/io/mod.rs":"77889bb5c5a4f2e50e38379cdaa5d0fef4b0cafc3da056735df01f6deae75747","src/maybe_polyfill/no_std/mod.rs":"ec94a4aab4bc475785e469d10fd6bc95667e1d47d958e9cff3a19049d88c8c80","src/maybe_polyfill/no_std/net/ip_addr.rs":"046327ee244f758f2bc31d3be305d8cd0dfd8342aac1add8259e999b4b46c4a7","src/maybe_polyfill/no_std/net/mod.rs":"b0ee611c454679226a15bf647e7779995f3fe9c8e0507930a0d0613eb414b7c2","src/maybe_polyfill/no_std/net/socket_addr.rs":"bfeb32d32c176cde76323abcffebfc47e9898fb8d7ce3668c602dc8451086a2d","src/maybe_polyfill/no_std/os/fd/mod.rs":"27ef0afbcb0695cbb15101070f417eb51e0ef85ae66ec967d95e80771d507c47","src/maybe_polyfill/no_std/os/fd/owned.rs":"ab86ffa2693a04f3085770faf395f95e5303001711be8b19c44a47a0ac574091","src/maybe_polyfill/no_std/os/fd/raw.rs":"f3648c7bd4a6ff94bd823ed9e0d99d398e02f24875cf9b25962736999e7c6943","src/maybe_polyfill/no_std/os/mod.rs":"27dab639a765827644005d5f2fcc7c825310606b889cc8dd83f54c9528350dc0","src/maybe_polyfill/no_std/os/windows/io/mod.rs":"5bbcc05c83fee5026dd744a994e0458469466d5be39081baa62df07753b92fd2","src/maybe_polyfill/no_std/os/windows/io/raw.rs":"4c32609a489dd938a49328b5637cb3bafb96437f2f9f269ab66d7d3cb90247f6","src/maybe_polyfill/no_std/os/windows/io/socket.rs":"c658f42f24eff44a661f2adfd24a11af80fe9897f3e2af4dc5d2c64808308d65","src/maybe_polyfill/no_std/os/windows/mod.rs":"fdb416f8f231a4e778b5f985b9ae712ece5e1a1402963ad1a5f6a8b9843795f4","src/maybe_polyfill/std/mod.rs":"7c16c86cc73e226e65ead598e4018238b22000a345040b706bf1e1b3eba115fc","src/mm/madvise.rs":"3c262b3713a73fafcedf1b04bb12c048bb11d47ca43c959e5dfa48c27651f4f0","src/mm/mmap.rs":"28523bceb2b7e3bc1207f351d8a1771867159e1b9ff1d9c37fbdc05aeb696b50","src/mm/mod.rs":"b3a6cb838986d45825b912355cedead761211a494ca6f89b2367a2d2157e340e","src/mm/msync.rs":"9dcfe5f54235e9314a595edb8d548ac79d222bbcc58bb3263cf7e96d603b23ad","src/mm/userfaultfd.rs":"8073443bd181ff0b3ba4d0b1ae67370b4864035a0c8b4898cd709dc47c518ae7","src/mount/fsopen.rs":"160e384e9175fd98669cda1cf3590bb195c2ba7e1c724e9ea06e692595e58ba1","src/mount/mod.rs":"5f0c9df4727592695deb1cd63ae1de021b03dcd9d0d1b68e1f34d12a7136cb19","src/mount/mount_unmount.rs":"8ad11675e5d762d33fbefbed06a6a9f9e52a9b689bd06662446152614321ab77","src/mount/types.rs":"601ae3e10b7dc496fed7f3b40a80e81c6edd7bf13189d7be45c3212d4c684c39","src/net/mod.rs":"a6bc55f9e086caf46a7c00783498d73a328a66f2a991f1ec65d5f13931377b0f","src/net/send_recv/mod.rs":"f33e39c7b228cd8109823b0a0a1aa397cddad504d49e69b36f74c5b84e5070e5","src/net/send_recv/msg.rs":"6a73e4fd2b78fbeb683f48bcd8404a362245c6cd66e0d59e6378467600b44268","src/net/socket.rs":"bc825a94831557bf4c2cabd8ef5ae21ffe0986c24f9ada564eefa66270f69ae9","src/net/socket_addr_any.rs":"a9af81e967a91b45e51aec4f46a068fade7035c5d19dfaf05bfdcd3b3c32e9bf","src/net/socketpair.rs":"56f4885c31d2664cd16e18a9a88792a4912fedd953cec36dba67e8581fd57921","src/net/sockopt.rs":"58fbabf73ede7afaeac279241ff874dcee5604f63b3f38e4435a61a354da2196","src/net/types.rs":"5914e786d6a24a6efc355023fd71a28bc29fc85fb672e5388370243fe83e4191","src/net/wsa.rs":"6e546b42f50a851fc833c57cda76cfb347203ed4b0dea574a3d325bf5a2ebf80","src/param/auxv.rs":"886e57d52adf0199b8146f849c2db1e3136c07fc4ec61f60dc781fea11070604","src/param/init.rs":"a31c0e5cea61a1a999767fe74f87c0d59eeb6bce66578b842fe0e0c32be27a55","src/param/mod.rs":"25b10acd5b1da8faa6f5204e6b0379b38bfab667916e886cca64bea01a42dec2","src/path/arg.rs":"b1329485ad525e8d067792b0890a1241c20f6c08d29f0661f2a9f429581a7818","src/path/dec_int.rs":"fad9793b89eac526953b994cbed6b614f01c25108f9763e19fb98029feda93a4","src/path/mod.rs":"6b1b949c94bcc47e0f08a3f8e8db5b61ff497d0dfd3e0655f51c01d3e4b7dfd6","src/pid.rs":"f1c486000c5b1311b2d720cee88f089c17ef9a171709673dd06e6f35f4ff98a3","src/pipe.rs":"fd021deeacf78790886ae03f1e8f0fe5a13909c7532b2246581667eb3d0483d7","src/prctl.rs":"c9daa9397460a27ed6e31fc454fc40dff83720312a002b66f4c178fc005379ec","src/process/chdir.rs":"6947b80d468d906d528de328f30fe7509acfe4976a0de3fa600a7cc39618c049","src/process/chroot.rs":"2b5f6124eb19f26ad2705174f7ad50cdc0a5d15abd59ffcf55421228d82130b4","src/process/exit.rs":"48de66e5504a00cb375d8f415ce63b6225a3f5204268d40726a7d0fbba43f587","src/process/id.rs":"e4733f9e8e4b5f50e98ef7a23802e126f1f14ece8b3d7ae7446c6a66affc6bc1","src/process/ioctl.rs":"23ad0285671e8d7ca71a63c50655dbf732ccea8af11d754a0558e0236db37e76","src/process/kill.rs":"7b879e5cff8a064acd0e7488f4e21bd4e4a8506ce380688b72cc48d283ff9c89","src/process/membarrier.rs":"d6c8821ec73019040c926f4e1018f399c286e87074ab9c6692370c88772af044","src/process/mod.rs":"0ef104be820068409648ed83739a51a7dcf07612088f9a6ed6c7ebdb6ad54092","src/process/pidfd.rs":"4be2fba21430ba84244e11c636c91201bc1d1dffff3e63a4da84cc96f501786b","src/process/pidfd_getfd.rs":"cbc42a1548280ae76c3c66e851fd53cc2a3d4c089cb5798c17a2cdf654f0e229","src/process/prctl.rs":"3396b39a0f161385f02f6a4a17db0a3fcd2d70f4d6d2e1d8d1b9091b36d5704c","src/process/priority.rs":"f135482e71ea8aa0daf92b9f238051178a4c904070fa8409622f94155df3c544","src/process/procctl.rs":"0ca2f5fd4cd39335c141758cac132a3271e318c688f8882e527f58c02d5ab0e2","src/process/rlimit.rs":"10b79de3ced0e64059a94c879742d46a35a6176c776d8eed75031d5e6340283d","src/process/sched.rs":"7c3bfc5be624e7d7f5e043c3ee0b0566fcab3d684d61c272e7b4233410ab1c42","src/process/sched_yield.rs":"6565faa3928b66ddc74a65e893e15edfa4b9be4f7e5f5f68527501a7f6bc3350","src/process/umask.rs":"1a0f31a842303c978e3f05ec191e2b5e96104c09c6596473b42b1fac34898a50","src/process/wait.rs":"cef08e74f5d87df925fa6a6acc95e8de0e689fb420ba514b9fbf94a9ca403465","src/procfs.rs":"63b286dd3302be7f426841eb3b9261ef4785c3159ed78c24734bd5094c9b0b2e","src/pty.rs":"ee697b226230b65c0432a42cd82b2fad885ca70285dbb0a7b8ec0ff81d23e3a0","src/rand/getrandom.rs":"1c8166a02a74f5593bb4673ef907524df04cbc1568020a5ab2ff7f4aa1283f8b","src/rand/mod.rs":"cab59332aadd9b679f5b22cbb222d48ee028af5eb9fd4a4d43922da659b895d7","src/runtime.rs":"9aed98071bf8124c6913e8517b28ef63c7b88001042f0522d877ab65ac88099a","src/shm.rs":"b96fe8a05ee5d4536464a8843a776d43a938abaf22c772fc35b5373d95644a8d","src/signal.rs":"c071b4f011deef19a679d7a832d5408a3cd68627161d6510008d6312266a2611","src/static_assertions.rs":"39986672f489949be1d6469f0c30fb7d2eaa21bdaa2702a8c150b2b150bf5535","src/stdio.rs":"a5de2d7d9c3c5a901f88b6acf4754687c958a2f3a93c7945c2b8fcb948d468af","src/system.rs":"e594c16b4600a8ebea01c5850cd9e1521acf242f1a9f083403cfe95adcde4679","src/termios/ioctl.rs":"a1ac967f7811a482f8bb53847c37c8359f518cd26da9df7b816ba678a0139623","src/termios/mod.rs":"b44b7caa60b6f458657ed58a0e0eca41bb4e6d6be4b0f042bbb8ab7056cebe4b","src/termios/tc.rs":"e41312d15464b83b2457c2502fc3f3b9998cfb02ba68739026dd4285cc7130ac","src/termios/tty.rs":"906ee160ffb5ba3a92ea3a61374cf2cd617cebae51f5490073b6700c438d5eee","src/termios/types.rs":"8c4f029e8dbb9b2c59082bd13d628a741f6f0a31f5ecf485c93795b73c1daeca","src/thread/clock.rs":"469326c822dfb63405ee8537552cedde0b344978280e6645bbadd47dedc71e18","src/thread/futex.rs":"985f9a0dce1e2a4892ae7f26bd1bf119ceae3f9fa6b4707e166624fc1cee76d4","src/thread/id.rs":"ad72db4fea9fccb728310bbfd01ef8c00f6cc60fa2a750f6349646a134f7009b","src/thread/libcap.rs":"a3e316e6d0f58b075125fa939b9b824e1595de20a23d341a36999416b0d10d36","src/thread/mod.rs":"98634ece0b882f123ad887017692f2a4d94a23a1dec278ed660b3497cac5cceb","src/thread/prctl.rs":"10e2878ef856557bc5bd24c77f5699266e746f912e73690980371cc99c7417e8","src/thread/setns.rs":"730badd8db8ce7b905767b341211f1226eeeb38019727bf994eb64f2531b7b7f","src/time/clock.rs":"e59a29f1bed8c31c3d5b6fad60f2d4fa6cab8dd8e86148bb3693a5e3a1ce735f","src/time/mod.rs":"43afee938c80d124d04d4ba190c03f4d21d1e3bfc154fff309211e4f6eabe940","src/time/timerfd.rs":"f17092b84553741aa2d2b44c6992b5d2c8c96cc2c2007fc9a2c6b2064485e53f","src/timespec.rs":"32a4d930cbc0f6dbd23153290db920671cf4ce65a4a127e176f897c1cde42d7d","src/ugid.rs":"6616c6e35b7e43aee5b150f1efae7a50711e0947943c9a96833dbe214ad9e85f","src/utils.rs":"9ae76f8a41d6cc350cdd58c9084b5c3a5a708eeecd769783debdbcbaef442182","src/weak.rs":"c7cf03bf2aeba494b1999ab32183fa8c603ab72e254c0e312a67f168877e410d"},"package":"745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed"}
+--- a/vendor/rustix-0.38.19/src/fs/ioctl.rs
++++ b/vendor/rustix-0.38.19/src/fs/ioctl.rs
+@@ -48,7 +48,7 @@ pub fn ioctl_blkpbszget<Fd: AsFd>(fd: Fd
+ /// - [Linux]
+ ///
+ /// [Linux]: https://man7.org/linux/man-pages/man2/ioctl_ficlone.2.html
+-#[cfg(all(linux_kernel, not(any(target_arch = "sparc", target_arch = "sparc64"))))]
++#[cfg(all(linux_kernel, not(any(target_arch = "sparc", target_arch = "sparc64", target_arch = "riscv32"))))]
+ #[inline]
+ #[doc(alias = "FICLONE")]
+ pub fn ioctl_ficlone<Fd: AsFd, SrcFd: AsFd>(fd: Fd, src_fd: SrcFd) -> io::Result<()> {
+@@ -77,7 +77,7 @@ unsafe impl ioctl::Ioctl for Ficlone<'_>
+ type Output = ();
+
+ const IS_MUTATING: bool = false;
+- const OPCODE: ioctl::Opcode = ioctl::Opcode::old(c::FICLONE as ioctl::RawOpcode);
++ const OPCODE: ioctl::Opcode = ioctl::Opcode::old(linux_raw_sys::ioctl::FICLONE as ioctl::RawOpcode);
+
+ fn as_ptr(&mut self) -> *mut c::c_void {
+ self.0.as_raw_fd() as *mut c::c_void
+--- a/vendor/rustix-0.38.19/src/backend/libc/c.rs
++++ b/vendor/rustix-0.38.19/src/backend/libc/c.rs
+@@ -6,6 +6,8 @@
+ // things below.
+ pub(crate) use libc::*;
+
++use linux_raw_sys::ioctl::FICLONE;
++
+ /// `PROC_SUPER_MAGIC`—The magic number for the procfs filesystem.
+ #[cfg(all(linux_kernel, target_env = "musl"))]
+ pub(crate) const PROC_SUPER_MAGIC: u32 = 0x0000_9fa0;
diff --git a/meta/recipes-devtools/rust/files/rv32-missing-syscalls.patch b/meta/recipes-devtools/rust/files/rv32-missing-syscalls.patch
new file mode 100644
index 0000000000..ccadbb9b91
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/rv32-missing-syscalls.patch
@@ -0,0 +1,1503 @@
+Backport rv32 support
+
+Upstream-Status: Backport [https://github.com/sunfishcode/linux-raw-sys/commit/6f86540e73bd45c9d13730ba0121d1820db0eeee]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+--- a/vendor/linux-raw-sys/src/riscv32/ioctl.rs
++++ b/vendor/linux-raw-sys/src/riscv32/ioctl.rs
+@@ -1,3 +1,1489 @@
+ /* automatically generated by rust-bindgen 0.66.1 */
+
+-
++pub const FIONREAD: u32 = 21531;
++pub const FIONBIO: u32 = 21537;
++pub const FIOCLEX: u32 = 21585;
++pub const FIONCLEX: u32 = 21584;
++pub const FIOASYNC: u32 = 21586;
++pub const FIOQSIZE: u32 = 21600;
++pub const TCXONC: u32 = 21514;
++pub const TCFLSH: u32 = 21515;
++pub const TIOCSCTTY: u32 = 21518;
++pub const TIOCSPGRP: u32 = 21520;
++pub const TIOCOUTQ: u32 = 21521;
++pub const TIOCSTI: u32 = 21522;
++pub const TIOCSWINSZ: u32 = 21524;
++pub const TIOCMGET: u32 = 21525;
++pub const TIOCMBIS: u32 = 21526;
++pub const TIOCMBIC: u32 = 21527;
++pub const TIOCMSET: u32 = 21528;
++pub const TIOCSSOFTCAR: u32 = 21530;
++pub const TIOCLINUX: u32 = 21532;
++pub const TIOCCONS: u32 = 21533;
++pub const TIOCSSERIAL: u32 = 21535;
++pub const TIOCPKT: u32 = 21536;
++pub const TIOCNOTTY: u32 = 21538;
++pub const TIOCSETD: u32 = 21539;
++pub const TIOCSBRK: u32 = 21543;
++pub const TIOCCBRK: u32 = 21544;
++pub const TIOCSRS485: u32 = 21551;
++pub const TIOCSPTLCK: u32 = 1074025521;
++pub const TIOCSIG: u32 = 1074025526;
++pub const TIOCVHANGUP: u32 = 21559;
++pub const TIOCSERCONFIG: u32 = 21587;
++pub const TIOCSERGWILD: u32 = 21588;
++pub const TIOCSERSWILD: u32 = 21589;
++pub const TIOCSLCKTRMIOS: u32 = 21591;
++pub const TIOCSERGSTRUCT: u32 = 21592;
++pub const TIOCSERGETLSR: u32 = 21593;
++pub const TIOCSERGETMULTI: u32 = 21594;
++pub const TIOCSERSETMULTI: u32 = 21595;
++pub const TIOCMIWAIT: u32 = 21596;
++pub const TCGETS: u32 = 21505;
++pub const TCGETA: u32 = 21509;
++pub const TCSBRK: u32 = 21513;
++pub const TCSBRKP: u32 = 21541;
++pub const TCSETA: u32 = 21510;
++pub const TCSETAF: u32 = 21512;
++pub const TCSETAW: u32 = 21511;
++pub const TIOCEXCL: u32 = 21516;
++pub const TIOCNXCL: u32 = 21517;
++pub const TIOCGDEV: u32 = 2147767346;
++pub const TIOCGEXCL: u32 = 2147767360;
++pub const TIOCGICOUNT: u32 = 21597;
++pub const TIOCGLCKTRMIOS: u32 = 21590;
++pub const TIOCGPGRP: u32 = 21519;
++pub const TIOCGPKT: u32 = 2147767352;
++pub const TIOCGPTLCK: u32 = 2147767353;
++pub const TIOCGPTN: u32 = 2147767344;
++pub const TIOCGPTPEER: u32 = 21569;
++pub const TIOCGRS485: u32 = 21550;
++pub const TIOCGSERIAL: u32 = 21534;
++pub const TIOCGSID: u32 = 21545;
++pub const TIOCGSOFTCAR: u32 = 21529;
++pub const TIOCGWINSZ: u32 = 21523;
++pub const TCGETS2: u32 = 2150388778;
++pub const TCGETX: u32 = 21554;
++pub const TCSETS: u32 = 21506;
++pub const TCSETS2: u32 = 1076646955;
++pub const TCSETSF: u32 = 21508;
++pub const TCSETSF2: u32 = 1076646957;
++pub const TCSETSW: u32 = 21507;
++pub const TCSETSW2: u32 = 1076646956;
++pub const TCSETX: u32 = 21555;
++pub const TCSETXF: u32 = 21556;
++pub const TCSETXW: u32 = 21557;
++pub const TIOCGETD: u32 = 21540;
++pub const MTIOCGET: u32 = 2150657282;
++pub const BLKSSZGET: u32 = 4712;
++pub const BLKPBSZGET: u32 = 4731;
++pub const BLKROSET: u32 = 4701;
++pub const BLKROGET: u32 = 4702;
++pub const BLKRRPART: u32 = 4703;
++pub const BLKGETSIZE: u32 = 4704;
++pub const BLKFLSBUF: u32 = 4705;
++pub const BLKRASET: u32 = 4706;
++pub const BLKRAGET: u32 = 4707;
++pub const BLKFRASET: u32 = 4708;
++pub const BLKFRAGET: u32 = 4709;
++pub const BLKSECTSET: u32 = 4710;
++pub const BLKSECTGET: u32 = 4711;
++pub const BLKPG: u32 = 4713;
++pub const BLKBSZGET: u32 = 2148012656;
++pub const BLKBSZSET: u32 = 1074270833;
++pub const BLKGETSIZE64: u32 = 2148012658;
++pub const BLKTRACESETUP: u32 = 3225948787;
++pub const BLKTRACESTART: u32 = 4724;
++pub const BLKTRACESTOP: u32 = 4725;
++pub const BLKTRACETEARDOWN: u32 = 4726;
++pub const BLKDISCARD: u32 = 4727;
++pub const BLKIOMIN: u32 = 4728;
++pub const BLKIOOPT: u32 = 4729;
++pub const BLKALIGNOFF: u32 = 4730;
++pub const BLKDISCARDZEROES: u32 = 4732;
++pub const BLKSECDISCARD: u32 = 4733;
++pub const BLKROTATIONAL: u32 = 4734;
++pub const BLKZEROOUT: u32 = 4735;
++pub const UFFDIO_REGISTER: u32 = 3223366144;
++pub const UFFDIO_UNREGISTER: u32 = 2148575745;
++pub const UFFDIO_WAKE: u32 = 2148575746;
++pub const UFFDIO_COPY: u32 = 3223890435;
++pub const UFFDIO_ZEROPAGE: u32 = 3223366148;
++pub const UFFDIO_WRITEPROTECT: u32 = 3222841862;
++pub const UFFDIO_API: u32 = 3222841919;
++pub const NS_GET_USERNS: u32 = 46849;
++pub const NS_GET_PARENT: u32 = 46850;
++pub const NS_GET_NSTYPE: u32 = 46851;
++pub const KDGETLED: u32 = 19249;
++pub const KDSETLED: u32 = 19250;
++pub const KDGKBLED: u32 = 19300;
++pub const KDSKBLED: u32 = 19301;
++pub const KDGKBTYPE: u32 = 19251;
++pub const KDADDIO: u32 = 19252;
++pub const KDDELIO: u32 = 19253;
++pub const KDENABIO: u32 = 19254;
++pub const KDDISABIO: u32 = 19255;
++pub const KDSETMODE: u32 = 19258;
++pub const KDGETMODE: u32 = 19259;
++pub const KDMKTONE: u32 = 19248;
++pub const KIOCSOUND: u32 = 19247;
++pub const GIO_CMAP: u32 = 19312;
++pub const PIO_CMAP: u32 = 19313;
++pub const GIO_FONT: u32 = 19296;
++pub const GIO_FONTX: u32 = 19307;
++pub const PIO_FONT: u32 = 19297;
++pub const PIO_FONTX: u32 = 19308;
++pub const PIO_FONTRESET: u32 = 19309;
++pub const GIO_SCRNMAP: u32 = 19264;
++pub const GIO_UNISCRNMAP: u32 = 19305;
++pub const PIO_SCRNMAP: u32 = 19265;
++pub const PIO_UNISCRNMAP: u32 = 19306;
++pub const GIO_UNIMAP: u32 = 19302;
++pub const PIO_UNIMAP: u32 = 19303;
++pub const PIO_UNIMAPCLR: u32 = 19304;
++pub const KDGKBMODE: u32 = 19268;
++pub const KDSKBMODE: u32 = 19269;
++pub const KDGKBMETA: u32 = 19298;
++pub const KDSKBMETA: u32 = 19299;
++pub const KDGKBENT: u32 = 19270;
++pub const KDSKBENT: u32 = 19271;
++pub const KDGKBSENT: u32 = 19272;
++pub const KDSKBSENT: u32 = 19273;
++pub const KDGKBDIACR: u32 = 19274;
++pub const KDGETKEYCODE: u32 = 19276;
++pub const KDSETKEYCODE: u32 = 19277;
++pub const KDSIGACCEPT: u32 = 19278;
++pub const VT_OPENQRY: u32 = 22016;
++pub const VT_GETMODE: u32 = 22017;
++pub const VT_SETMODE: u32 = 22018;
++pub const VT_GETSTATE: u32 = 22019;
++pub const VT_RELDISP: u32 = 22021;
++pub const VT_ACTIVATE: u32 = 22022;
++pub const VT_WAITACTIVE: u32 = 22023;
++pub const VT_DISALLOCATE: u32 = 22024;
++pub const VT_RESIZE: u32 = 22025;
++pub const VT_RESIZEX: u32 = 22026;
++pub const FIOSETOWN: u32 = 35073;
++pub const SIOCSPGRP: u32 = 35074;
++pub const FIOGETOWN: u32 = 35075;
++pub const SIOCGPGRP: u32 = 35076;
++pub const SIOCATMARK: u32 = 35077;
++pub const SIOCGSTAMP: u32 = 35078;
++pub const TIOCINQ: u32 = 21531;
++pub const SIOCADDRT: u32 = 35083;
++pub const SIOCDELRT: u32 = 35084;
++pub const SIOCGIFNAME: u32 = 35088;
++pub const SIOCSIFLINK: u32 = 35089;
++pub const SIOCGIFCONF: u32 = 35090;
++pub const SIOCGIFFLAGS: u32 = 35091;
++pub const SIOCSIFFLAGS: u32 = 35092;
++pub const SIOCGIFADDR: u32 = 35093;
++pub const SIOCSIFADDR: u32 = 35094;
++pub const SIOCGIFDSTADDR: u32 = 35095;
++pub const SIOCSIFDSTADDR: u32 = 35096;
++pub const SIOCGIFBRDADDR: u32 = 35097;
++pub const SIOCSIFBRDADDR: u32 = 35098;
++pub const SIOCGIFNETMASK: u32 = 35099;
++pub const SIOCSIFNETMASK: u32 = 35100;
++pub const SIOCGIFMETRIC: u32 = 35101;
++pub const SIOCSIFMETRIC: u32 = 35102;
++pub const SIOCGIFMEM: u32 = 35103;
++pub const SIOCSIFMEM: u32 = 35104;
++pub const SIOCGIFMTU: u32 = 35105;
++pub const SIOCSIFMTU: u32 = 35106;
++pub const SIOCSIFHWADDR: u32 = 35108;
++pub const SIOCGIFENCAP: u32 = 35109;
++pub const SIOCSIFENCAP: u32 = 35110;
++pub const SIOCGIFHWADDR: u32 = 35111;
++pub const SIOCGIFSLAVE: u32 = 35113;
++pub const SIOCSIFSLAVE: u32 = 35120;
++pub const SIOCADDMULTI: u32 = 35121;
++pub const SIOCDELMULTI: u32 = 35122;
++pub const SIOCDARP: u32 = 35155;
++pub const SIOCGARP: u32 = 35156;
++pub const SIOCSARP: u32 = 35157;
++pub const SIOCDRARP: u32 = 35168;
++pub const SIOCGRARP: u32 = 35169;
++pub const SIOCSRARP: u32 = 35170;
++pub const SIOCGIFMAP: u32 = 35184;
++pub const SIOCSIFMAP: u32 = 35185;
++pub const SIOCRTMSG: u32 = 35085;
++pub const SIOCSIFNAME: u32 = 35107;
++pub const SIOCGIFINDEX: u32 = 35123;
++pub const SIOGIFINDEX: u32 = 35123;
++pub const SIOCSIFPFLAGS: u32 = 35124;
++pub const SIOCGIFPFLAGS: u32 = 35125;
++pub const SIOCDIFADDR: u32 = 35126;
++pub const SIOCSIFHWBROADCAST: u32 = 35127;
++pub const SIOCGIFCOUNT: u32 = 35128;
++pub const SIOCGIFBR: u32 = 35136;
++pub const SIOCSIFBR: u32 = 35137;
++pub const SIOCGIFTXQLEN: u32 = 35138;
++pub const SIOCSIFTXQLEN: u32 = 35139;
++pub const SIOCADDDLCI: u32 = 35200;
++pub const SIOCDELDLCI: u32 = 35201;
++pub const SIOCDEVPRIVATE: u32 = 35312;
++pub const SIOCPROTOPRIVATE: u32 = 35296;
++pub const FIBMAP: u32 = 1;
++pub const FIGETBSZ: u32 = 2;
++pub const FIFREEZE: u32 = 3221510263;
++pub const FITHAW: u32 = 3221510264;
++pub const FITRIM: u32 = 3222820985;
++pub const FICLONE: u32 = 1074041865;
++pub const FICLONERANGE: u32 = 1075876877;
++pub const FIDEDUPERANGE: u32 = 3222836278;
++pub const FS_IOC_GETFLAGS: u32 = 2148034049;
++pub const FS_IOC_SETFLAGS: u32 = 1074292226;
++pub const FS_IOC_GETVERSION: u32 = 2148038145;
++pub const FS_IOC_SETVERSION: u32 = 1074296322;
++pub const FS_IOC_FIEMAP: u32 = 3223348747;
++pub const FS_IOC32_GETFLAGS: u32 = 2147771905;
++pub const FS_IOC32_SETFLAGS: u32 = 1074030082;
++pub const FS_IOC32_GETVERSION: u32 = 2147776001;
++pub const FS_IOC32_SETVERSION: u32 = 1074034178;
++pub const FS_IOC_FSGETXATTR: u32 = 2149341215;
++pub const FS_IOC_FSSETXATTR: u32 = 1075599392;
++pub const FS_IOC_GETFSLABEL: u32 = 2164298801;
++pub const FS_IOC_SETFSLABEL: u32 = 1090556978;
++pub const EXT4_IOC_GETVERSION: u32 = 2148034051;
++pub const EXT4_IOC_SETVERSION: u32 = 1074292228;
++pub const EXT4_IOC_GETVERSION_OLD: u32 = 2148038145;
++pub const EXT4_IOC_SETVERSION_OLD: u32 = 1074296322;
++pub const EXT4_IOC_GETRSVSZ: u32 = 2148034053;
++pub const EXT4_IOC_SETRSVSZ: u32 = 1074292230;
++pub const EXT4_IOC_GROUP_EXTEND: u32 = 1074292231;
++pub const EXT4_IOC_MIGRATE: u32 = 26121;
++pub const EXT4_IOC_ALLOC_DA_BLKS: u32 = 26124;
++pub const EXT4_IOC_RESIZE_FS: u32 = 1074292240;
++pub const EXT4_IOC_SWAP_BOOT: u32 = 26129;
++pub const EXT4_IOC_PRECACHE_EXTENTS: u32 = 26130;
++pub const EXT4_IOC_CLEAR_ES_CACHE: u32 = 26152;
++pub const EXT4_IOC_GETSTATE: u32 = 1074030121;
++pub const EXT4_IOC_GET_ES_CACHE: u32 = 3223348778;
++pub const EXT4_IOC_CHECKPOINT: u32 = 1074030123;
++pub const EXT4_IOC_SHUTDOWN: u32 = 2147768445;
++pub const EXT4_IOC32_GETVERSION: u32 = 2147771907;
++pub const EXT4_IOC32_SETVERSION: u32 = 1074030084;
++pub const EXT4_IOC32_GETRSVSZ: u32 = 2147771909;
++pub const EXT4_IOC32_SETRSVSZ: u32 = 1074030086;
++pub const EXT4_IOC32_GROUP_EXTEND: u32 = 1074030087;
++pub const EXT4_IOC32_GETVERSION_OLD: u32 = 2147776001;
++pub const EXT4_IOC32_SETVERSION_OLD: u32 = 1074034178;
++pub const VIDIOC_SUBDEV_QUERYSTD: u32 = 2148030015;
++pub const AUTOFS_DEV_IOCTL_CLOSEMOUNT: u32 = 3222836085;
++pub const LIRC_SET_SEND_CARRIER: u32 = 1074030867;
++pub const AUTOFS_IOC_PROTOSUBVER: u32 = 2147783527;
++pub const PTP_SYS_OFFSET_PRECISE: u32 = 3225435400;
++pub const FSI_SCOM_WRITE: u32 = 3223352066;
++pub const ATM_GETCIRANGE: u32 = 1074815370;
++pub const DMA_BUF_SET_NAME_B: u32 = 1074291201;
++pub const RIO_CM_EP_GET_LIST_SIZE: u32 = 3221512961;
++pub const TUNSETPERSIST: u32 = 1074025675;
++pub const FS_IOC_GET_ENCRYPTION_POLICY: u32 = 1074554389;
++pub const CEC_RECEIVE: u32 = 3224920326;
++pub const MGSL_IOCGPARAMS: u32 = 2150657281;
++pub const ENI_SETMULT: u32 = 1074815335;
++pub const RIO_GET_EVENT_MASK: u32 = 2147773710;
++pub const LIRC_GET_MAX_TIMEOUT: u32 = 2147772681;
++pub const USBDEVFS_CLAIMINTERFACE: u32 = 2147767567;
++pub const CHIOMOVE: u32 = 1075077889;
++pub const SONYPI_IOCGBATFLAGS: u32 = 2147579399;
++pub const BTRFS_IOC_SYNC: u32 = 37896;
++pub const VIDIOC_TRY_FMT: u32 = 3234879040;
++pub const LIRC_SET_REC_MODE: u32 = 1074030866;
++pub const VIDIOC_DQEVENT: u32 = 2156418649;
++pub const RPMSG_DESTROY_EPT_IOCTL: u32 = 46338;
++pub const UVCIOC_CTRL_MAP: u32 = 3227546912;
++pub const VHOST_SET_BACKEND_FEATURES: u32 = 1074310949;
++pub const VHOST_VSOCK_SET_GUEST_CID: u32 = 1074311008;
++pub const UI_SET_KEYBIT: u32 = 1074025829;
++pub const LIRC_SET_REC_TIMEOUT: u32 = 1074030872;
++pub const FS_IOC_GET_ENCRYPTION_KEY_STATUS: u32 = 3229640218;
++pub const BTRFS_IOC_TREE_SEARCH_V2: u32 = 3228603409;
++pub const VHOST_SET_VRING_BASE: u32 = 1074310930;
++pub const RIO_ENABLE_DOORBELL_RANGE: u32 = 1074294025;
++pub const VIDIOC_TRY_EXT_CTRLS: u32 = 3223344713;
++pub const LIRC_GET_REC_MODE: u32 = 2147772674;
++pub const PPGETTIME: u32 = 2148561045;
++pub const BTRFS_IOC_RM_DEV: u32 = 1342215179;
++pub const ATM_SETBACKEND: u32 = 1073897970;
++pub const FSL_HV_IOCTL_PARTITION_START: u32 = 3222318851;
++pub const FBIO_WAITEVENT: u32 = 18056;
++pub const SWITCHTEC_IOCTL_PORT_TO_PFF: u32 = 3222034245;
++pub const NVME_IOCTL_IO_CMD: u32 = 3225964099;
++pub const IPMICTL_RECEIVE_MSG_TRUNC: u32 = 3224398091;
++pub const FDTWADDLE: u32 = 601;
++pub const NVME_IOCTL_SUBMIT_IO: u32 = 1076907586;
++pub const NILFS_IOCTL_SYNC: u32 = 2148036234;
++pub const VIDIOC_SUBDEV_S_DV_TIMINGS: u32 = 3229898327;
++pub const ASPEED_LPC_CTRL_IOCTL_GET_SIZE: u32 = 3222319616;
++pub const DM_DEV_STATUS: u32 = 3241737479;
++pub const TEE_IOC_CLOSE_SESSION: u32 = 2147787781;
++pub const NS_GETPSTAT: u32 = 3222298977;
++pub const UI_SET_PROPBIT: u32 = 1074025838;
++pub const TUNSETFILTEREBPF: u32 = 2147767521;
++pub const RIO_MPORT_MAINT_COMPTAG_SET: u32 = 1074031874;
++pub const AUTOFS_DEV_IOCTL_VERSION: u32 = 3222836081;
++pub const WDIOC_SETOPTIONS: u32 = 2147768068;
++pub const VHOST_SCSI_SET_ENDPOINT: u32 = 1088991040;
++pub const MGSL_IOCGTXIDLE: u32 = 27907;
++pub const ATM_ADDLECSADDR: u32 = 1074815374;
++pub const FSL_HV_IOCTL_GETPROP: u32 = 3223891719;
++pub const FDGETPRM: u32 = 2149581316;
++pub const HIDIOCAPPLICATION: u32 = 18434;
++pub const ENI_MEMDUMP: u32 = 1074815328;
++pub const PTP_SYS_OFFSET2: u32 = 1128283406;
++pub const VIDIOC_SUBDEV_G_DV_TIMINGS: u32 = 3229898328;
++pub const DMA_BUF_SET_NAME_A: u32 = 1074029057;
++pub const PTP_PIN_GETFUNC: u32 = 3227532550;
++pub const PTP_SYS_OFFSET_EXTENDED: u32 = 3300932873;
++pub const DFL_FPGA_PORT_UINT_SET_IRQ: u32 = 1074312776;
++pub const RTC_EPOCH_READ: u32 = 2148036621;
++pub const VIDIOC_SUBDEV_S_SELECTION: u32 = 3225441854;
++pub const VIDIOC_QUERY_EXT_CTRL: u32 = 3236451943;
++pub const ATM_GETLECSADDR: u32 = 1074815376;
++pub const FSL_HV_IOCTL_PARTITION_STOP: u32 = 3221794564;
++pub const SONET_GETDIAG: u32 = 2147770644;
++pub const ATMMPC_DATA: u32 = 25049;
++pub const IPMICTL_UNREGISTER_FOR_CMD_CHANS: u32 = 2148296989;
++pub const HIDIOCGCOLLECTIONINDEX: u32 = 1075333136;
++pub const RPMSG_CREATE_EPT_IOCTL: u32 = 1076409601;
++pub const GPIOHANDLE_GET_LINE_VALUES_IOCTL: u32 = 3225465864;
++pub const UI_DEV_SETUP: u32 = 1079792899;
++pub const ISST_IF_IO_CMD: u32 = 1074331138;
++pub const RIO_MPORT_MAINT_READ_REMOTE: u32 = 2149084423;
++pub const VIDIOC_OMAP3ISP_HIST_CFG: u32 = 3224393412;
++pub const BLKGETNRZONES: u32 = 2147750533;
++pub const VIDIOC_G_MODULATOR: u32 = 3225703990;
++pub const VBG_IOCTL_WRITE_CORE_DUMP: u32 = 3223082515;
++pub const USBDEVFS_SETINTERFACE: u32 = 2148029700;
++pub const PPPIOCGCHAN: u32 = 2147775543;
++pub const EVIOCGVERSION: u32 = 2147763457;
++pub const VHOST_NET_SET_BACKEND: u32 = 1074310960;
++pub const USBDEVFS_REAPURBNDELAY: u32 = 1074287885;
++pub const RNDZAPENTCNT: u32 = 20996;
++pub const VIDIOC_G_PARM: u32 = 3234616853;
++pub const TUNGETDEVNETNS: u32 = 21731;
++pub const LIRC_SET_MEASURE_CARRIER_MODE: u32 = 1074030877;
++pub const VHOST_SET_VRING_ERR: u32 = 1074310946;
++pub const VDUSE_VQ_SETUP: u32 = 1075872020;
++pub const AUTOFS_IOC_SETTIMEOUT: u32 = 3221787492;
++pub const VIDIOC_S_FREQUENCY: u32 = 1076647481;
++pub const F2FS_IOC_SEC_TRIM_FILE: u32 = 1075377428;
++pub const FS_IOC_REMOVE_ENCRYPTION_KEY: u32 = 3225445912;
++pub const WDIOC_GETPRETIMEOUT: u32 = 2147768073;
++pub const USBDEVFS_DROP_PRIVILEGES: u32 = 1074025758;
++pub const BTRFS_IOC_SNAP_CREATE_V2: u32 = 1342215191;
++pub const VHOST_VSOCK_SET_RUNNING: u32 = 1074048865;
++pub const STP_SET_OPTIONS: u32 = 1074275586;
++pub const FBIO_RADEON_GET_MIRROR: u32 = 2148024323;
++pub const IVTVFB_IOC_DMA_FRAME: u32 = 1075336896;
++pub const IPMICTL_SEND_COMMAND: u32 = 2150131981;
++pub const VIDIOC_G_ENC_INDEX: u32 = 2283296332;
++pub const DFL_FPGA_FME_PORT_PR: u32 = 46720;
++pub const CHIOSVOLTAG: u32 = 1076912914;
++pub const ATM_SETESIF: u32 = 1074815373;
++pub const FW_CDEV_IOC_SEND_RESPONSE: u32 = 1075323652;
++pub const PMU_IOC_GET_MODEL: u32 = 2148024835;
++pub const JSIOCGBTNMAP: u32 = 2214619700;
++pub const USBDEVFS_HUB_PORTINFO: u32 = 2155894035;
++pub const VBG_IOCTL_INTERRUPT_ALL_WAIT_FOR_EVENTS: u32 = 3222820363;
++pub const FDCLRPRM: u32 = 577;
++pub const BTRFS_IOC_SCRUB: u32 = 3288372251;
++pub const USBDEVFS_DISCONNECT: u32 = 21782;
++pub const TUNSETVNETBE: u32 = 1074025694;
++pub const ATMTCP_REMOVE: u32 = 24975;
++pub const VHOST_VDPA_GET_CONFIG: u32 = 2148052851;
++pub const PPPIOCGNPMODE: u32 = 3221779532;
++pub const FDGETDRVPRM: u32 = 2155872785;
++pub const TUNSETVNETLE: u32 = 1074025692;
++pub const PHN_SETREG: u32 = 1074294790;
++pub const PPPIOCDETACH: u32 = 1074033724;
++pub const MMTIMER_GETRES: u32 = 2148035841;
++pub const VIDIOC_SUBDEV_ENUMSTD: u32 = 3225966105;
++pub const PPGETFLAGS: u32 = 2147774618;
++pub const VDUSE_DEV_GET_FEATURES: u32 = 2148040977;
++pub const CAPI_MANUFACTURER_CMD: u32 = 3222291232;
++pub const VIDIOC_G_TUNER: u32 = 3226752541;
++pub const DM_TABLE_STATUS: u32 = 3241737484;
++pub const DM_DEV_ARM_POLL: u32 = 3241737488;
++pub const NE_CREATE_VM: u32 = 2148052512;
++pub const MEDIA_IOC_ENUM_LINKS: u32 = 3223878658;
++pub const F2FS_IOC_PRECACHE_EXTENTS: u32 = 62735;
++pub const DFL_FPGA_PORT_DMA_MAP: u32 = 46659;
++pub const MGSL_IOCGXCTRL: u32 = 27926;
++pub const FW_CDEV_IOC_SEND_REQUEST: u32 = 1076372225;
++pub const SONYPI_IOCGBLUE: u32 = 2147579400;
++pub const F2FS_IOC_DECOMPRESS_FILE: u32 = 62743;
++pub const I2OHTML: u32 = 3224398089;
++pub const VFIO_GET_API_VERSION: u32 = 15204;
++pub const IDT77105_GETSTATZ: u32 = 1074815283;
++pub const I2OPARMSET: u32 = 3223873795;
++pub const TEE_IOC_CANCEL: u32 = 2148049924;
++pub const PTP_SYS_OFFSET_PRECISE2: u32 = 3225435409;
++pub const DFL_FPGA_PORT_RESET: u32 = 46656;
++pub const PPPIOCGASYNCMAP: u32 = 2147775576;
++pub const EVIOCGKEYCODE_V2: u32 = 2150122756;
++pub const DM_DEV_SET_GEOMETRY: u32 = 3241737487;
++pub const HIDIOCSUSAGE: u32 = 1075333132;
++pub const FW_CDEV_IOC_DEALLOCATE_ISO_RESOURCE_ONCE: u32 = 1075323664;
++pub const PTP_EXTTS_REQUEST: u32 = 1074806018;
++pub const SWITCHTEC_IOCTL_EVENT_CTL: u32 = 3223869251;
++pub const WDIOC_SETPRETIMEOUT: u32 = 3221509896;
++pub const VHOST_SCSI_CLEAR_ENDPOINT: u32 = 1088991041;
++pub const JSIOCGAXES: u32 = 2147576337;
++pub const HIDIOCSFLAG: u32 = 1074022415;
++pub const PTP_PEROUT_REQUEST2: u32 = 1077427468;
++pub const PPWDATA: u32 = 1073836166;
++pub const PTP_CLOCK_GETCAPS: u32 = 2152742145;
++pub const FDGETMAXERRS: u32 = 2148794894;
++pub const TUNSETQUEUE: u32 = 1074025689;
++pub const PTP_ENABLE_PPS: u32 = 1074019588;
++pub const SIOCSIFATMTCP: u32 = 24960;
++pub const CEC_ADAP_G_LOG_ADDRS: u32 = 2153537795;
++pub const ND_IOCTL_ARS_CAP: u32 = 3223342593;
++pub const NBD_SET_BLKSIZE: u32 = 43777;
++pub const NBD_SET_TIMEOUT: u32 = 43785;
++pub const VHOST_SCSI_GET_ABI_VERSION: u32 = 1074048834;
++pub const RIO_UNMAP_INBOUND: u32 = 1074294034;
++pub const ATM_QUERYLOOP: u32 = 1074815316;
++pub const DFL_FPGA_GET_API_VERSION: u32 = 46592;
++pub const USBDEVFS_WAIT_FOR_RESUME: u32 = 21795;
++pub const FBIO_CURSOR: u32 = 3228059144;
++pub const RNDCLEARPOOL: u32 = 20998;
++pub const VIDIOC_QUERYSTD: u32 = 2148030015;
++pub const DMA_BUF_IOCTL_SYNC: u32 = 1074291200;
++pub const SCIF_RECV: u32 = 3222827783;
++pub const PTP_PIN_GETFUNC2: u32 = 3227532559;
++pub const FW_CDEV_IOC_ALLOCATE: u32 = 3223331586;
++pub const CEC_ADAP_G_CAPS: u32 = 3226231040;
++pub const VIDIOC_G_FBUF: u32 = 2150651402;
++pub const PTP_ENABLE_PPS2: u32 = 1074019597;
++pub const PCITEST_CLEAR_IRQ: u32 = 20496;
++pub const IPMICTL_SET_GETS_EVENTS_CMD: u32 = 2147772688;
++pub const BTRFS_IOC_DEVICES_READY: u32 = 2415957031;
++pub const JSIOCGAXMAP: u32 = 2151705138;
++pub const FW_CDEV_IOC_GET_CYCLE_TIMER: u32 = 2148541196;
++pub const FW_CDEV_IOC_SET_ISO_CHANNELS: u32 = 1074799383;
++pub const RTC_WIE_OFF: u32 = 28688;
++pub const PPGETMODE: u32 = 2147774616;
++pub const VIDIOC_DBG_G_REGISTER: u32 = 3224917584;
++pub const PTP_SYS_OFFSET: u32 = 1128283397;
++pub const BTRFS_IOC_SPACE_INFO: u32 = 3222311956;
++pub const VIDIOC_SUBDEV_ENUM_FRAME_SIZE: u32 = 3225441866;
++pub const ND_IOCTL_VENDOR: u32 = 3221769737;
++pub const SCIF_VREADFROM: u32 = 3223876364;
++pub const BTRFS_IOC_TRANS_START: u32 = 37894;
++pub const INOTIFY_IOC_SETNEXTWD: u32 = 1074022656;
++pub const SNAPSHOT_GET_IMAGE_SIZE: u32 = 2148021006;
++pub const TUNDETACHFILTER: u32 = 1074812118;
++pub const ND_IOCTL_CLEAR_ERROR: u32 = 3223342596;
++pub const IOC_PR_CLEAR: u32 = 1074819277;
++pub const SCIF_READFROM: u32 = 3223876362;
++pub const PPPIOCGDEBUG: u32 = 2147775553;
++pub const BLKGETZONESZ: u32 = 2147750532;
++pub const HIDIOCGUSAGES: u32 = 3491514387;
++pub const SONYPI_IOCGTEMP: u32 = 2147579404;
++pub const UI_SET_MSCBIT: u32 = 1074025832;
++pub const APM_IOC_SUSPEND: u32 = 16642;
++pub const BTRFS_IOC_TREE_SEARCH: u32 = 3489698833;
++pub const RTC_PLL_GET: u32 = 2149609489;
++pub const RIO_CM_EP_GET_LIST: u32 = 3221512962;
++pub const USBDEVFS_DISCSIGNAL: u32 = 2148553998;
++pub const LIRC_GET_MIN_TIMEOUT: u32 = 2147772680;
++pub const SWITCHTEC_IOCTL_EVENT_SUMMARY_LEGACY: u32 = 2174244674;
++pub const DM_TARGET_MSG: u32 = 3241737486;
++pub const SONYPI_IOCGBAT1REM: u32 = 2147644931;
++pub const EVIOCSFF: u32 = 1076905344;
++pub const TUNSETGROUP: u32 = 1074025678;
++pub const EVIOCGKEYCODE: u32 = 2148025604;
++pub const KCOV_REMOTE_ENABLE: u32 = 1075340134;
++pub const ND_IOCTL_GET_CONFIG_SIZE: u32 = 3222031876;
++pub const FDEJECT: u32 = 602;
++pub const TUNSETOFFLOAD: u32 = 1074025680;
++pub const PPPIOCCONNECT: u32 = 1074033722;
++pub const ATM_ADDADDR: u32 = 1074815368;
++pub const VDUSE_DEV_INJECT_CONFIG_IRQ: u32 = 33043;
++pub const AUTOFS_DEV_IOCTL_ASKUMOUNT: u32 = 3222836093;
++pub const VHOST_VDPA_GET_STATUS: u32 = 2147594097;
++pub const CCISS_PASSTHRU: u32 = 3227009547;
++pub const MGSL_IOCCLRMODCOUNT: u32 = 27919;
++pub const TEE_IOC_SUPPL_SEND: u32 = 2148574215;
++pub const ATMARPD_CTRL: u32 = 25057;
++pub const UI_ABS_SETUP: u32 = 1075598596;
++pub const UI_DEV_DESTROY: u32 = 21762;
++pub const BTRFS_IOC_QUOTA_CTL: u32 = 3222311976;
++pub const RTC_AIE_ON: u32 = 28673;
++pub const AUTOFS_IOC_EXPIRE: u32 = 2165085029;
++pub const PPPIOCSDEBUG: u32 = 1074033728;
++pub const GPIO_V2_LINE_SET_VALUES_IOCTL: u32 = 3222320143;
++pub const PPPIOCSMRU: u32 = 1074033746;
++pub const CCISS_DEREGDISK: u32 = 16908;
++pub const UI_DEV_CREATE: u32 = 21761;
++pub const FUSE_DEV_IOC_CLONE: u32 = 2147804416;
++pub const BTRFS_IOC_START_SYNC: u32 = 2148045848;
++pub const NILFS_IOCTL_DELETE_CHECKPOINT: u32 = 1074294401;
++pub const SNAPSHOT_AVAIL_SWAP_SIZE: u32 = 2148021011;
++pub const DM_TABLE_CLEAR: u32 = 3241737482;
++pub const CCISS_GETINTINFO: u32 = 2148024834;
++pub const PPPIOCSASYNCMAP: u32 = 1074033751;
++pub const I2OEVTGET: u32 = 2154326283;
++pub const NVME_IOCTL_RESET: u32 = 20036;
++pub const PPYIELD: u32 = 28813;
++pub const NVME_IOCTL_IO64_CMD: u32 = 3226488392;
++pub const TUNSETCARRIER: u32 = 1074025698;
++pub const DM_DEV_WAIT: u32 = 3241737480;
++pub const RTC_WIE_ON: u32 = 28687;
++pub const MEDIA_IOC_DEVICE_INFO: u32 = 3238034432;
++pub const RIO_CM_CHAN_CREATE: u32 = 3221381891;
++pub const MGSL_IOCSPARAMS: u32 = 1076915456;
++pub const RTC_SET_TIME: u32 = 1076129802;
++pub const VHOST_RESET_OWNER: u32 = 44802;
++pub const IOC_OPAL_PSID_REVERT_TPR: u32 = 1091072232;
++pub const AUTOFS_DEV_IOCTL_OPENMOUNT: u32 = 3222836084;
++pub const UDF_GETEABLOCK: u32 = 2148035649;
++pub const VFIO_IOMMU_MAP_DMA: u32 = 15217;
++pub const VIDIOC_SUBSCRIBE_EVENT: u32 = 1075861082;
++pub const HIDIOCGFLAG: u32 = 2147764238;
++pub const HIDIOCGUCODE: u32 = 3222816781;
++pub const VIDIOC_OMAP3ISP_AF_CFG: u32 = 3226228421;
++pub const DM_REMOVE_ALL: u32 = 3241737473;
++pub const ASPEED_LPC_CTRL_IOCTL_MAP: u32 = 1074835969;
++pub const CCISS_GETFIRMVER: u32 = 2147762696;
++pub const ND_IOCTL_ARS_START: u32 = 3223342594;
++pub const PPPIOCSMRRU: u32 = 1074033723;
++pub const CEC_ADAP_S_LOG_ADDRS: u32 = 3227279620;
++pub const RPROC_GET_SHUTDOWN_ON_RELEASE: u32 = 2147792642;
++pub const DMA_HEAP_IOCTL_ALLOC: u32 = 3222816768;
++pub const PPSETTIME: u32 = 1074819222;
++pub const RTC_ALM_READ: u32 = 2149871624;
++pub const VDUSE_SET_API_VERSION: u32 = 1074299137;
++pub const RIO_MPORT_MAINT_WRITE_REMOTE: u32 = 1075342600;
++pub const VIDIOC_SUBDEV_S_CROP: u32 = 3224917564;
++pub const USBDEVFS_CONNECT: u32 = 21783;
++pub const SYNC_IOC_FILE_INFO: u32 = 3224911364;
++pub const ATMARP_MKIP: u32 = 25058;
++pub const VFIO_IOMMU_SPAPR_TCE_GET_INFO: u32 = 15216;
++pub const CCISS_GETHEARTBEAT: u32 = 2147762694;
++pub const ATM_RSTADDR: u32 = 1074815367;
++pub const NBD_SET_SIZE: u32 = 43778;
++pub const UDF_GETVOLIDENT: u32 = 2148035650;
++pub const GPIO_V2_LINE_GET_VALUES_IOCTL: u32 = 3222320142;
++pub const MGSL_IOCSTXIDLE: u32 = 27906;
++pub const FSL_HV_IOCTL_SETPROP: u32 = 3223891720;
++pub const BTRFS_IOC_GET_DEV_STATS: u32 = 3288896564;
++pub const PPRSTATUS: u32 = 2147577985;
++pub const MGSL_IOCTXENABLE: u32 = 27908;
++pub const UDF_GETEASIZE: u32 = 2147773504;
++pub const NVME_IOCTL_ADMIN64_CMD: u32 = 3226488391;
++pub const VHOST_SET_OWNER: u32 = 44801;
++pub const RIO_ALLOC_DMA: u32 = 3222826259;
++pub const RIO_CM_CHAN_ACCEPT: u32 = 3221775111;
++pub const I2OHRTGET: u32 = 3222825217;
++pub const ATM_SETCIRANGE: u32 = 1074815371;
++pub const HPET_IE_ON: u32 = 26625;
++pub const PERF_EVENT_IOC_ID: u32 = 2148017159;
++pub const TUNSETSNDBUF: u32 = 1074025684;
++pub const PTP_PIN_SETFUNC: u32 = 1080048903;
++pub const PPPIOCDISCONN: u32 = 29753;
++pub const VIDIOC_QUERYCTRL: u32 = 3225703972;
++pub const PPEXCL: u32 = 28815;
++pub const PCITEST_MSI: u32 = 1074024451;
++pub const FDWERRORCLR: u32 = 598;
++pub const AUTOFS_IOC_FAIL: u32 = 37729;
++pub const USBDEVFS_IOCTL: u32 = 3222295826;
++pub const VIDIOC_S_STD: u32 = 1074288152;
++pub const F2FS_IOC_RESIZE_FS: u32 = 1074328848;
++pub const SONET_SETDIAG: u32 = 3221512466;
++pub const BTRFS_IOC_DEFRAG: u32 = 1342215170;
++pub const CCISS_GETDRIVVER: u32 = 2147762697;
++pub const IPMICTL_GET_TIMING_PARMS_CMD: u32 = 2148034839;
++pub const HPET_IRQFREQ: u32 = 1074292742;
++pub const ATM_GETESI: u32 = 1074815365;
++pub const CCISS_GETLUNINFO: u32 = 2148286993;
++pub const AUTOFS_DEV_IOCTL_ISMOUNTPOINT: u32 = 3222836094;
++pub const TEE_IOC_SHM_ALLOC: u32 = 3222316033;
++pub const PERF_EVENT_IOC_SET_BPF: u32 = 1074013192;
++pub const UDMABUF_CREATE_LIST: u32 = 1074296131;
++pub const VHOST_SET_LOG_BASE: u32 = 1074310916;
++pub const ZATM_GETPOOL: u32 = 1074815329;
++pub const BR2684_SETFILT: u32 = 1075601808;
++pub const RNDGETPOOL: u32 = 2148028930;
++pub const PPS_GETPARAMS: u32 = 2148036769;
++pub const IOC_PR_RESERVE: u32 = 1074819273;
++pub const VIDIOC_TRY_DECODER_CMD: u32 = 3225966177;
++pub const RIO_CM_CHAN_CLOSE: u32 = 1073898244;
++pub const VIDIOC_DV_TIMINGS_CAP: u32 = 3230684772;
++pub const IOCTL_MEI_CONNECT_CLIENT_VTAG: u32 = 3222554628;
++pub const PMU_IOC_GET_BACKLIGHT: u32 = 2148024833;
++pub const USBDEVFS_GET_CAPABILITIES: u32 = 2147767578;
++pub const SCIF_WRITETO: u32 = 3223876363;
++pub const UDF_RELOCATE_BLOCKS: u32 = 3221777475;
++pub const FSL_HV_IOCTL_PARTITION_RESTART: u32 = 3221794561;
++pub const CCISS_REGNEWD: u32 = 16910;
++pub const FAT_IOCTL_SET_ATTRIBUTES: u32 = 1074033169;
++pub const VIDIOC_CREATE_BUFS: u32 = 3238024796;
++pub const CAPI_GET_VERSION: u32 = 3222291207;
++pub const SWITCHTEC_IOCTL_EVENT_SUMMARY: u32 = 2228770626;
++pub const VFIO_EEH_PE_OP: u32 = 15225;
++pub const FW_CDEV_IOC_CREATE_ISO_CONTEXT: u32 = 3223331592;
++pub const F2FS_IOC_RELEASE_COMPRESS_BLOCKS: u32 = 2148070674;
++pub const NBD_SET_SIZE_BLOCKS: u32 = 43783;
++pub const IPMI_BMC_IOCTL_SET_SMS_ATN: u32 = 45312;
++pub const ASPEED_P2A_CTRL_IOCTL_GET_MEMORY_CONFIG: u32 = 3222319873;
++pub const VIDIOC_S_AUDOUT: u32 = 1077171762;
++pub const VIDIOC_S_FMT: u32 = 3234878981;
++pub const PPPIOCATTACH: u32 = 1074033725;
++pub const VHOST_GET_VRING_BUSYLOOP_TIMEOUT: u32 = 1074310948;
++pub const FS_IOC_MEASURE_VERITY: u32 = 3221513862;
++pub const CCISS_BIG_PASSTHRU: u32 = 3227533842;
++pub const IPMICTL_SET_MY_LUN_CMD: u32 = 2147772691;
++pub const PCITEST_LEGACY_IRQ: u32 = 20482;
++pub const USBDEVFS_SUBMITURB: u32 = 2151175434;
++pub const AUTOFS_IOC_READY: u32 = 37728;
++pub const BTRFS_IOC_SEND: u32 = 1078498342;
++pub const VIDIOC_G_EXT_CTRLS: u32 = 3223344711;
++pub const JSIOCSBTNMAP: u32 = 1140877875;
++pub const PPPIOCSFLAGS: u32 = 1074033753;
++pub const NVRAM_INIT: u32 = 28736;
++pub const RFKILL_IOCTL_NOINPUT: u32 = 20993;
++pub const BTRFS_IOC_BALANCE: u32 = 1342215180;
++pub const FS_IOC_GETFSMAP: u32 = 3233830971;
++pub const IPMICTL_GET_MY_CHANNEL_LUN_CMD: u32 = 2147772699;
++pub const STP_POLICY_ID_GET: u32 = 2148541697;
++pub const PPSETFLAGS: u32 = 1074032795;
++pub const CEC_ADAP_S_PHYS_ADDR: u32 = 1073897730;
++pub const ATMTCP_CREATE: u32 = 24974;
++pub const IPMI_BMC_IOCTL_FORCE_ABORT: u32 = 45314;
++pub const PPPIOCGXASYNCMAP: u32 = 2149610576;
++pub const VHOST_SET_VRING_CALL: u32 = 1074310945;
++pub const LIRC_GET_FEATURES: u32 = 2147772672;
++pub const GSMIOC_DISABLE_NET: u32 = 18179;
++pub const AUTOFS_IOC_CATATONIC: u32 = 37730;
++pub const NBD_DO_IT: u32 = 43779;
++pub const LIRC_SET_REC_CARRIER_RANGE: u32 = 1074030879;
++pub const IPMICTL_GET_MY_CHANNEL_ADDRESS_CMD: u32 = 2147772697;
++pub const EVIOCSCLOCKID: u32 = 1074021792;
++pub const USBDEVFS_FREE_STREAMS: u32 = 2148029725;
++pub const FSI_SCOM_RESET: u32 = 1074033411;
++pub const PMU_IOC_GRAB_BACKLIGHT: u32 = 2148024838;
++pub const VIDIOC_SUBDEV_S_FMT: u32 = 3227014661;
++pub const FDDEFPRM: u32 = 1075839555;
++pub const TEE_IOC_INVOKE: u32 = 2148574211;
++pub const USBDEVFS_BULK: u32 = 3222820098;
++pub const SCIF_VWRITETO: u32 = 3223876365;
++pub const SONYPI_IOCSBRT: u32 = 1073837568;
++pub const BTRFS_IOC_FILE_EXTENT_SAME: u32 = 3222836278;
++pub const RTC_PIE_ON: u32 = 28677;
++pub const BTRFS_IOC_SCAN_DEV: u32 = 1342215172;
++pub const PPPIOCXFERUNIT: u32 = 29774;
++pub const WDIOC_GETTIMEOUT: u32 = 2147768071;
++pub const BTRFS_IOC_SET_RECEIVED_SUBVOL: u32 = 3234370597;
++pub const DFL_FPGA_PORT_ERR_SET_IRQ: u32 = 1074312774;
++pub const FBIO_WAITFORVSYNC: u32 = 1074021920;
++pub const RTC_PIE_OFF: u32 = 28678;
++pub const EVIOCGRAB: u32 = 1074021776;
++pub const PMU_IOC_SET_BACKLIGHT: u32 = 1074283010;
++pub const EVIOCGREP: u32 = 2148025603;
++pub const PERF_EVENT_IOC_MODIFY_ATTRIBUTES: u32 = 1074275339;
++pub const UFFDIO_CONTINUE: u32 = 3223366151;
++pub const VDUSE_GET_API_VERSION: u32 = 2148040960;
++pub const RTC_RD_TIME: u32 = 2149871625;
++pub const FDMSGOFF: u32 = 582;
++pub const IPMICTL_REGISTER_FOR_CMD_CHANS: u32 = 2148296988;
++pub const CAPI_GET_ERRCODE: u32 = 2147631905;
++pub const PCITEST_SET_IRQTYPE: u32 = 1074024456;
++pub const VIDIOC_SUBDEV_S_EDID: u32 = 3223868969;
++pub const MATROXFB_SET_OUTPUT_MODE: u32 = 1074294522;
++pub const RIO_DEV_ADD: u32 = 1075866903;
++pub const VIDIOC_ENUM_FREQ_BANDS: u32 = 3225441893;
++pub const FBIO_RADEON_SET_MIRROR: u32 = 1074282500;
++pub const PCITEST_GET_IRQTYPE: u32 = 20489;
++pub const JSIOCGVERSION: u32 = 2147772929;
++pub const SONYPI_IOCSBLUE: u32 = 1073837577;
++pub const SNAPSHOT_PREF_IMAGE_SIZE: u32 = 13074;
++pub const F2FS_IOC_GET_FEATURES: u32 = 2147808524;
++pub const SCIF_REG: u32 = 3223876360;
++pub const NILFS_IOCTL_CLEAN_SEGMENTS: u32 = 1081634440;
++pub const FW_CDEV_IOC_INITIATE_BUS_RESET: u32 = 1074012933;
++pub const RIO_WAIT_FOR_ASYNC: u32 = 1074294038;
++pub const VHOST_SET_VRING_NUM: u32 = 1074310928;
++pub const AUTOFS_DEV_IOCTL_PROTOVER: u32 = 3222836082;
++pub const RIO_FREE_DMA: u32 = 1074294036;
++pub const MGSL_IOCRXENABLE: u32 = 27909;
++pub const IOCTL_VM_SOCKETS_GET_LOCAL_CID: u32 = 1977;
++pub const IPMICTL_SET_TIMING_PARMS_CMD: u32 = 2148034838;
++pub const PPPIOCGL2TPSTATS: u32 = 2152231990;
++pub const PERF_EVENT_IOC_PERIOD: u32 = 1074275332;
++pub const PTP_PIN_SETFUNC2: u32 = 1080048912;
++pub const CHIOEXCHANGE: u32 = 1075602178;
++pub const NILFS_IOCTL_GET_SUINFO: u32 = 2149084804;
++pub const CEC_DQEVENT: u32 = 3226493191;
++pub const UI_SET_SWBIT: u32 = 1074025837;
++pub const VHOST_VDPA_SET_CONFIG: u32 = 1074311028;
++pub const TUNSETIFF: u32 = 1074025674;
++pub const CHIOPOSITION: u32 = 1074553603;
++pub const IPMICTL_SET_MAINTENANCE_MODE_CMD: u32 = 1074030879;
++pub const BTRFS_IOC_DEFAULT_SUBVOL: u32 = 1074304019;
++pub const RIO_UNMAP_OUTBOUND: u32 = 1076391184;
++pub const CAPI_CLR_FLAGS: u32 = 2147762981;
++pub const FW_CDEV_IOC_ALLOCATE_ISO_RESOURCE_ONCE: u32 = 1075323663;
++pub const MATROXFB_GET_OUTPUT_CONNECTION: u32 = 2148036344;
++pub const EVIOCSMASK: u32 = 1074808211;
++pub const BTRFS_IOC_FORGET_DEV: u32 = 1342215173;
++pub const CXL_MEM_QUERY_COMMANDS: u32 = 2148060673;
++pub const CEC_S_MODE: u32 = 1074028809;
++pub const MGSL_IOCSIF: u32 = 27914;
++pub const SWITCHTEC_IOCTL_PFF_TO_PORT: u32 = 3222034244;
++pub const PPSETMODE: u32 = 1074032768;
++pub const VFIO_DEVICE_SET_IRQS: u32 = 15214;
++pub const VIDIOC_PREPARE_BUF: u32 = 3227014749;
++pub const CEC_ADAP_G_CONNECTOR_INFO: u32 = 2151964938;
++pub const IOC_OPAL_WRITE_SHADOW_MBR: u32 = 1092645098;
++pub const VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL: u32 = 3225441867;
++pub const UDMABUF_CREATE: u32 = 1075344706;
++pub const SONET_CLRDIAG: u32 = 3221512467;
++pub const PHN_SET_REG: u32 = 1074294785;
++pub const RNDADDTOENTCNT: u32 = 1074024961;
++pub const VBG_IOCTL_CHECK_BALLOON: u32 = 3223344657;
++pub const VIDIOC_OMAP3ISP_STAT_REQ: u32 = 3223869126;
++pub const PPS_FETCH: u32 = 3221778596;
++pub const RTC_AIE_OFF: u32 = 28674;
++pub const VFIO_GROUP_SET_CONTAINER: u32 = 15208;
++pub const FW_CDEV_IOC_RECEIVE_PHY_PACKETS: u32 = 1074275094;
++pub const VFIO_IOMMU_SPAPR_TCE_REMOVE: u32 = 15224;
++pub const VFIO_IOMMU_GET_INFO: u32 = 15216;
++pub const DM_DEV_SUSPEND: u32 = 3241737478;
++pub const F2FS_IOC_GET_COMPRESS_OPTION: u32 = 2147677461;
++pub const FW_CDEV_IOC_STOP_ISO: u32 = 1074012939;
++pub const GPIO_V2_GET_LINEINFO_IOCTL: u32 = 3238048773;
++pub const ATMMPC_CTRL: u32 = 25048;
++pub const PPPIOCSXASYNCMAP: u32 = 1075868751;
++pub const CHIOGSTATUS: u32 = 1074815752;
++pub const FW_CDEV_IOC_ALLOCATE_ISO_RESOURCE: u32 = 3222807309;
++pub const RIO_MPORT_MAINT_PORT_IDX_GET: u32 = 2147773699;
++pub const CAPI_SET_FLAGS: u32 = 2147762980;
++pub const VFIO_GROUP_GET_DEVICE_FD: u32 = 15210;
++pub const VHOST_SET_MEM_TABLE: u32 = 1074310915;
++pub const MATROXFB_SET_OUTPUT_CONNECTION: u32 = 1074294520;
++pub const DFL_FPGA_PORT_GET_REGION_INFO: u32 = 46658;
++pub const VHOST_GET_FEATURES: u32 = 2148052736;
++pub const LIRC_GET_REC_RESOLUTION: u32 = 2147772679;
++pub const PACKET_CTRL_CMD: u32 = 3222820865;
++pub const LIRC_SET_TRANSMITTER_MASK: u32 = 1074030871;
++pub const BTRFS_IOC_ADD_DEV: u32 = 1342215178;
++pub const JSIOCGCORR: u32 = 2149870114;
++pub const VIDIOC_G_FMT: u32 = 3234878980;
++pub const RTC_EPOCH_SET: u32 = 1074294798;
++pub const CAPI_GET_PROFILE: u32 = 3225436937;
++pub const ATM_GETLOOP: u32 = 1074815314;
++pub const SCIF_LISTEN: u32 = 1074033410;
++pub const NBD_CLEAR_QUE: u32 = 43781;
++pub const F2FS_IOC_MOVE_RANGE: u32 = 3223385353;
++pub const LIRC_GET_LENGTH: u32 = 2147772687;
++pub const I8K_SET_FAN: u32 = 3221776775;
++pub const FDSETMAXERRS: u32 = 1075053132;
++pub const VIDIOC_SUBDEV_QUERYCAP: u32 = 2151699968;
++pub const SNAPSHOT_SET_SWAP_AREA: u32 = 1074541325;
++pub const LIRC_GET_REC_TIMEOUT: u32 = 2147772708;
++pub const EVIOCRMFF: u32 = 1074021761;
++pub const GPIO_GET_LINEEVENT_IOCTL: u32 = 3224417284;
++pub const PPRDATA: u32 = 2147577989;
++pub const RIO_MPORT_GET_PROPERTIES: u32 = 2150657284;
++pub const TUNSETVNETHDRSZ: u32 = 1074025688;
++pub const GPIO_GET_LINEINFO_IOCTL: u32 = 3225990146;
++pub const GSMIOC_GETCONF: u32 = 2152482560;
++pub const LIRC_GET_SEND_MODE: u32 = 2147772673;
++pub const PPPIOCSACTIVE: u32 = 1074820166;
++pub const SIOCGSTAMPNS_NEW: u32 = 2148567303;
++pub const IPMICTL_RECEIVE_MSG: u32 = 3224398092;
++pub const LIRC_SET_SEND_DUTY_CYCLE: u32 = 1074030869;
++pub const UI_END_FF_ERASE: u32 = 1074550219;
++pub const SWITCHTEC_IOCTL_FLASH_PART_INFO: u32 = 3222296385;
++pub const FW_CDEV_IOC_SEND_PHY_PACKET: u32 = 3222807317;
++pub const NBD_SET_FLAGS: u32 = 43786;
++pub const VFIO_DEVICE_GET_REGION_INFO: u32 = 15212;
++pub const REISERFS_IOC_UNPACK: u32 = 1074318593;
++pub const FW_CDEV_IOC_REMOVE_DESCRIPTOR: u32 = 1074012935;
++pub const RIO_SET_EVENT_MASK: u32 = 1074031885;
++pub const SNAPSHOT_ALLOC_SWAP_PAGE: u32 = 2148021012;
++pub const VDUSE_VQ_INJECT_IRQ: u32 = 1074037015;
++pub const I2OPASSTHRU: u32 = 2148559116;
++pub const IOC_OPAL_SET_PW: u32 = 1109422304;
++pub const FSI_SCOM_READ: u32 = 3223352065;
++pub const VHOST_VDPA_GET_DEVICE_ID: u32 = 2147790704;
++pub const VIDIOC_QBUF: u32 = 3227014671;
++pub const VIDIOC_S_TUNER: u32 = 1079268894;
++pub const TUNGETVNETHDRSZ: u32 = 2147767511;
++pub const CAPI_NCCI_GETUNIT: u32 = 2147762983;
++pub const DFL_FPGA_PORT_UINT_GET_IRQ_NUM: u32 = 2147792455;
++pub const VIDIOC_OMAP3ISP_STAT_EN: u32 = 3221771975;
++pub const GPIO_V2_LINE_SET_CONFIG_IOCTL: u32 = 3239097357;
++pub const TEE_IOC_VERSION: u32 = 2148312064;
++pub const VIDIOC_LOG_STATUS: u32 = 22086;
++pub const IPMICTL_SEND_COMMAND_SETTIME: u32 = 2150656277;
++pub const VHOST_SET_LOG_FD: u32 = 1074048775;
++pub const SCIF_SEND: u32 = 3222827782;
++pub const VIDIOC_SUBDEV_G_FMT: u32 = 3227014660;
++pub const NS_ADJBUFLEV: u32 = 24931;
++pub const VIDIOC_DBG_S_REGISTER: u32 = 1077433935;
++pub const NILFS_IOCTL_RESIZE: u32 = 1074294411;
++pub const PHN_GETREG: u32 = 3221778437;
++pub const I2OSWDL: u32 = 3224398085;
++pub const VBG_IOCTL_VMMDEV_REQUEST_BIG: u32 = 22019;
++pub const JSIOCGBUTTONS: u32 = 2147576338;
++pub const VFIO_IOMMU_ENABLE: u32 = 15219;
++pub const DM_DEV_RENAME: u32 = 3241737477;
++pub const MEDIA_IOC_SETUP_LINK: u32 = 3224665091;
++pub const VIDIOC_ENUMOUTPUT: u32 = 3225966128;
++pub const STP_POLICY_ID_SET: u32 = 3222283520;
++pub const VHOST_VDPA_SET_CONFIG_CALL: u32 = 1074048887;
++pub const VIDIOC_SUBDEV_G_CROP: u32 = 3224917563;
++pub const VIDIOC_S_CROP: u32 = 1075074620;
++pub const WDIOC_GETTEMP: u32 = 2147768067;
++pub const IOC_OPAL_ADD_USR_TO_LR: u32 = 1092120804;
++pub const UI_SET_LEDBIT: u32 = 1074025833;
++pub const NBD_SET_SOCK: u32 = 43776;
++pub const BTRFS_IOC_SNAP_DESTROY_V2: u32 = 1342215231;
++pub const HIDIOCGCOLLECTIONINFO: u32 = 3222292497;
++pub const I2OSWUL: u32 = 3224398086;
++pub const IOCTL_MEI_NOTIFY_GET: u32 = 2147764227;
++pub const FDFMTTRK: u32 = 1074528840;
++pub const MMTIMER_GETBITS: u32 = 27908;
++pub const VIDIOC_ENUMSTD: u32 = 3225966105;
++pub const VHOST_GET_VRING_BASE: u32 = 3221794578;
++pub const VFIO_DEVICE_IOEVENTFD: u32 = 15220;
++pub const ATMARP_SETENTRY: u32 = 25059;
++pub const CCISS_REVALIDVOLS: u32 = 16906;
++pub const MGSL_IOCLOOPTXDONE: u32 = 27913;
++pub const RTC_VL_READ: u32 = 2147774483;
++pub const ND_IOCTL_ARS_STATUS: u32 = 3224391171;
++pub const RIO_DEV_DEL: u32 = 1075866904;
++pub const VBG_IOCTL_ACQUIRE_GUEST_CAPABILITIES: u32 = 3223606797;
++pub const VIDIOC_SUBDEV_DV_TIMINGS_CAP: u32 = 3230684772;
++pub const SONYPI_IOCSFAN: u32 = 1073837579;
++pub const SPIOCSTYPE: u32 = 1074295041;
++pub const IPMICTL_REGISTER_FOR_CMD: u32 = 2147641614;
++pub const I8K_GET_FAN: u32 = 3221776774;
++pub const TUNGETVNETBE: u32 = 2147767519;
++pub const AUTOFS_DEV_IOCTL_FAIL: u32 = 3222836087;
++pub const UI_END_FF_UPLOAD: u32 = 1080579529;
++pub const TOSH_SMM: u32 = 3222828176;
++pub const SONYPI_IOCGBAT2REM: u32 = 2147644933;
++pub const F2FS_IOC_GET_COMPRESS_BLOCKS: u32 = 2148070673;
++pub const PPPIOCSNPMODE: u32 = 1074295883;
++pub const USBDEVFS_CONTROL: u32 = 3222820096;
++pub const HIDIOCGUSAGE: u32 = 3222816779;
++pub const TUNSETTXFILTER: u32 = 1074025681;
++pub const TUNGETVNETLE: u32 = 2147767517;
++pub const VIDIOC_ENUM_DV_TIMINGS: u32 = 3230946914;
++pub const BTRFS_IOC_INO_PATHS: u32 = 3224933411;
++pub const MGSL_IOCGXSYNC: u32 = 27924;
++pub const HIDIOCGFIELDINFO: u32 = 3224913930;
++pub const VIDIOC_SUBDEV_G_STD: u32 = 2148029975;
++pub const I2OVALIDATE: u32 = 2147772680;
++pub const VIDIOC_TRY_ENCODER_CMD: u32 = 3223869006;
++pub const NILFS_IOCTL_GET_CPINFO: u32 = 2149084802;
++pub const VIDIOC_G_FREQUENCY: u32 = 3224131128;
++pub const VFAT_IOCTL_READDIR_SHORT: u32 = 2184212994;
++pub const ND_IOCTL_GET_CONFIG_DATA: u32 = 3222031877;
++pub const F2FS_IOC_RESERVE_COMPRESS_BLOCKS: u32 = 2148070675;
++pub const FDGETDRVSTAT: u32 = 2152727058;
++pub const SYNC_IOC_MERGE: u32 = 3224387075;
++pub const VIDIOC_S_DV_TIMINGS: u32 = 3229898327;
++pub const PPPIOCBRIDGECHAN: u32 = 1074033717;
++pub const LIRC_SET_SEND_MODE: u32 = 1074030865;
++pub const RIO_ENABLE_PORTWRITE_RANGE: u32 = 1074818315;
++pub const ATM_GETTYPE: u32 = 1074815364;
++pub const PHN_GETREGS: u32 = 3223875591;
++pub const FDSETEMSGTRESH: u32 = 586;
++pub const NILFS_IOCTL_GET_VINFO: u32 = 3222826630;
++pub const MGSL_IOCWAITEVENT: u32 = 3221515528;
++pub const CAPI_INSTALLED: u32 = 2147631906;
++pub const EVIOCGMASK: u32 = 2148550034;
++pub const BTRFS_IOC_SUBVOL_GETFLAGS: u32 = 2148045849;
++pub const FSL_HV_IOCTL_PARTITION_GET_STATUS: u32 = 3222056706;
++pub const MEDIA_IOC_ENUM_ENTITIES: u32 = 3238034433;
++pub const GSMIOC_GETFIRST: u32 = 2147763972;
++pub const FW_CDEV_IOC_FLUSH_ISO: u32 = 1074012952;
++pub const VIDIOC_DBG_G_CHIP_INFO: u32 = 3234354790;
++pub const F2FS_IOC_RELEASE_VOLATILE_WRITE: u32 = 62724;
++pub const CAPI_GET_SERIAL: u32 = 3221504776;
++pub const FDSETDRVPRM: u32 = 1082131088;
++pub const IOC_OPAL_SAVE: u32 = 1092120796;
++pub const VIDIOC_G_DV_TIMINGS: u32 = 3229898328;
++pub const TUNSETIFINDEX: u32 = 1074025690;
++pub const CCISS_SETINTINFO: u32 = 1074283011;
++pub const CM_IOSDBGLVL: u32 = 1074291706;
++pub const RTC_VL_CLR: u32 = 28692;
++pub const VIDIOC_REQBUFS: u32 = 3222558216;
++pub const USBDEVFS_REAPURBNDELAY32: u32 = 1074025741;
++pub const TEE_IOC_SHM_REGISTER: u32 = 3222840329;
++pub const USBDEVFS_SETCONFIGURATION: u32 = 2147767557;
++pub const CCISS_GETNODENAME: u32 = 2148549124;
++pub const VIDIOC_SUBDEV_S_FRAME_INTERVAL: u32 = 3224393238;
++pub const VIDIOC_ENUM_FRAMESIZES: u32 = 3224131146;
++pub const VFIO_DEVICE_PCI_HOT_RESET: u32 = 15217;
++pub const FW_CDEV_IOC_SEND_BROADCAST_REQUEST: u32 = 1076372242;
++pub const LPSETTIMEOUT_NEW: u32 = 1074791951;
++pub const RIO_CM_MPORT_GET_LIST: u32 = 3221512971;
++pub const FW_CDEV_IOC_QUEUE_ISO: u32 = 3222807305;
++pub const FDRAWCMD: u32 = 600;
++pub const SCIF_UNREG: u32 = 3222303497;
++pub const PPPIOCGIDLE64: u32 = 2148561983;
++pub const USBDEVFS_RELEASEINTERFACE: u32 = 2147767568;
++pub const VIDIOC_CROPCAP: u32 = 3224131130;
++pub const DFL_FPGA_PORT_GET_INFO: u32 = 46657;
++pub const PHN_SET_REGS: u32 = 1074294787;
++pub const ATMLEC_DATA: u32 = 25041;
++pub const PPPOEIOCDFWD: u32 = 45313;
++pub const VIDIOC_S_SELECTION: u32 = 3225441887;
++pub const SNAPSHOT_FREE_SWAP_PAGES: u32 = 13065;
++pub const BTRFS_IOC_LOGICAL_INO: u32 = 3224933412;
++pub const VIDIOC_S_CTRL: u32 = 3221771804;
++pub const ZATM_SETPOOL: u32 = 1074815331;
++pub const MTIOCPOS: u32 = 2148035843;
++pub const PMU_IOC_SLEEP: u32 = 16896;
++pub const AUTOFS_DEV_IOCTL_PROTOSUBVER: u32 = 3222836083;
++pub const VBG_IOCTL_CHANGE_FILTER_MASK: u32 = 3223344652;
++pub const NILFS_IOCTL_GET_SUSTAT: u32 = 2150657669;
++pub const VIDIOC_QUERYCAP: u32 = 2154321408;
++pub const HPET_INFO: u32 = 2149083139;
++pub const VIDIOC_AM437X_CCDC_CFG: u32 = 1074288321;
++pub const DM_LIST_DEVICES: u32 = 3241737474;
++pub const TUNSETOWNER: u32 = 1074025676;
++pub const VBG_IOCTL_CHANGE_GUEST_CAPABILITIES: u32 = 3223344654;
++pub const RNDADDENTROPY: u32 = 1074287107;
++pub const USBDEVFS_RESET: u32 = 21780;
++pub const BTRFS_IOC_SUBVOL_CREATE: u32 = 1342215182;
++pub const USBDEVFS_FORBID_SUSPEND: u32 = 21793;
++pub const FDGETDRVTYP: u32 = 2148532751;
++pub const PPWCONTROL: u32 = 1073836164;
++pub const VIDIOC_ENUM_FRAMEINTERVALS: u32 = 3224655435;
++pub const KCOV_DISABLE: u32 = 25445;
++pub const IOC_OPAL_ACTIVATE_LSP: u32 = 1092120799;
++pub const VHOST_VDPA_GET_IOVA_RANGE: u32 = 2148577144;
++pub const PPPIOCSPASS: u32 = 1074820167;
++pub const RIO_CM_CHAN_CONNECT: u32 = 1074291464;
++pub const I2OSWDEL: u32 = 3224398087;
++pub const FS_IOC_SET_ENCRYPTION_POLICY: u32 = 2148296211;
++pub const IOC_OPAL_MBR_DONE: u32 = 1091596521;
++pub const PPPIOCSMAXCID: u32 = 1074033745;
++pub const PPSETPHASE: u32 = 1074032788;
++pub const VHOST_VDPA_SET_VRING_ENABLE: u32 = 1074311029;
++pub const USBDEVFS_GET_SPEED: u32 = 21791;
++pub const SONET_GETFRAMING: u32 = 2147770646;
++pub const VIDIOC_QUERYBUF: u32 = 3227014665;
++pub const VIDIOC_S_EDID: u32 = 3223868969;
++pub const BTRFS_IOC_QGROUP_ASSIGN: u32 = 1075352617;
++pub const PPS_GETCAP: u32 = 2148036771;
++pub const SNAPSHOT_PLATFORM_SUPPORT: u32 = 13071;
++pub const LIRC_SET_REC_TIMEOUT_REPORTS: u32 = 1074030873;
++pub const SCIF_GET_NODEIDS: u32 = 3222827790;
++pub const NBD_DISCONNECT: u32 = 43784;
++pub const VIDIOC_SUBDEV_G_FRAME_INTERVAL: u32 = 3224393237;
++pub const VFIO_IOMMU_DISABLE: u32 = 15220;
++pub const SNAPSHOT_CREATE_IMAGE: u32 = 1074017041;
++pub const SNAPSHOT_POWER_OFF: u32 = 13072;
++pub const APM_IOC_STANDBY: u32 = 16641;
++pub const PPPIOCGUNIT: u32 = 2147775574;
++pub const AUTOFS_IOC_EXPIRE_MULTI: u32 = 1074041702;
++pub const SCIF_BIND: u32 = 3221779201;
++pub const IOC_WATCH_QUEUE_SET_SIZE: u32 = 22368;
++pub const NILFS_IOCTL_CHANGE_CPMODE: u32 = 1074818688;
++pub const IOC_OPAL_LOCK_UNLOCK: u32 = 1092120797;
++pub const F2FS_IOC_SET_PIN_FILE: u32 = 1074066701;
++pub const PPPIOCGRASYNCMAP: u32 = 2147775573;
++pub const MMTIMER_MMAPAVAIL: u32 = 27910;
++pub const I2OPASSTHRU32: u32 = 2148034828;
++pub const DFL_FPGA_FME_PORT_RELEASE: u32 = 1074050689;
++pub const VIDIOC_SUBDEV_QUERY_DV_TIMINGS: u32 = 2156156515;
++pub const UI_SET_SNDBIT: u32 = 1074025834;
++pub const VIDIOC_G_AUDOUT: u32 = 2150913585;
++pub const RTC_PLL_SET: u32 = 1075867666;
++pub const VIDIOC_ENUMAUDIO: u32 = 3224655425;
++pub const AUTOFS_DEV_IOCTL_TIMEOUT: u32 = 3222836090;
++pub const VBG_IOCTL_DRIVER_VERSION_INFO: u32 = 3224131072;
++pub const VHOST_SCSI_GET_EVENTS_MISSED: u32 = 1074048836;
++pub const VHOST_SET_VRING_ADDR: u32 = 1076408081;
++pub const VDUSE_CREATE_DEV: u32 = 1095794946;
++pub const FDFLUSH: u32 = 587;
++pub const VBG_IOCTL_WAIT_FOR_EVENTS: u32 = 3223344650;
++pub const DFL_FPGA_FME_ERR_SET_IRQ: u32 = 1074312836;
++pub const F2FS_IOC_GET_PIN_FILE: u32 = 2147808526;
++pub const SCIF_CONNECT: u32 = 3221779203;
++pub const BLKREPORTZONE: u32 = 3222278786;
++pub const AUTOFS_IOC_ASKUMOUNT: u32 = 2147783536;
++pub const ATM_ADDPARTY: u32 = 1074815476;
++pub const FDSETPRM: u32 = 1075839554;
++pub const ATM_GETSTATZ: u32 = 1074815313;
++pub const ISST_IF_MSR_COMMAND: u32 = 3221814788;
++pub const BTRFS_IOC_GET_SUBVOL_INFO: u32 = 2180551740;
++pub const VIDIOC_UNSUBSCRIBE_EVENT: u32 = 1075861083;
++pub const SEV_ISSUE_CMD: u32 = 3222295296;
++pub const GPIOHANDLE_SET_LINE_VALUES_IOCTL: u32 = 3225465865;
++pub const PCITEST_COPY: u32 = 1074286598;
++pub const IPMICTL_GET_MY_ADDRESS_CMD: u32 = 2147772690;
++pub const CHIOGPICKER: u32 = 2147771140;
++pub const CAPI_NCCI_OPENCOUNT: u32 = 2147762982;
++pub const CXL_MEM_SEND_COMMAND: u32 = 3224423938;
++pub const PERF_EVENT_IOC_SET_FILTER: u32 = 1074275334;
++pub const IOC_OPAL_REVERT_TPR: u32 = 1091072226;
++pub const CHIOGVPARAMS: u32 = 2154849043;
++pub const PTP_PEROUT_REQUEST: u32 = 1077427459;
++pub const FSI_SCOM_CHECK: u32 = 2147775232;
++pub const RTC_IRQP_READ: u32 = 2148036619;
++pub const RIO_MPORT_MAINT_READ_LOCAL: u32 = 2149084421;
++pub const HIDIOCGRDESCSIZE: u32 = 2147764225;
++pub const UI_GET_VERSION: u32 = 2147767597;
++pub const NILFS_IOCTL_GET_CPSTAT: u32 = 2149084803;
++pub const CCISS_GETBUSTYPES: u32 = 2147762695;
++pub const VFIO_IOMMU_SPAPR_TCE_CREATE: u32 = 15223;
++pub const VIDIOC_EXPBUF: u32 = 3225441808;
++pub const UI_SET_RELBIT: u32 = 1074025830;
++pub const VFIO_SET_IOMMU: u32 = 15206;
++pub const VIDIOC_S_MODULATOR: u32 = 1078220343;
++pub const TUNGETFILTER: u32 = 2148553947;
++pub const MEYEIOC_SYNC: u32 = 3221518019;
++pub const CCISS_SETNODENAME: u32 = 1074807301;
++pub const FBIO_GETCONTROL2: u32 = 2148025993;
++pub const TUNSETDEBUG: u32 = 1074025673;
++pub const DM_DEV_REMOVE: u32 = 3241737476;
++pub const HIDIOCSUSAGES: u32 = 1344030740;
++pub const FS_IOC_ADD_ENCRYPTION_KEY: u32 = 3226494487;
++pub const FBIOGET_VBLANK: u32 = 2149598738;
++pub const ATM_GETSTAT: u32 = 1074815312;
++pub const VIDIOC_G_JPEGCOMP: u32 = 2156680765;
++pub const TUNATTACHFILTER: u32 = 1074812117;
++pub const UI_SET_ABSBIT: u32 = 1074025831;
++pub const DFL_FPGA_PORT_ERR_GET_IRQ_NUM: u32 = 2147792453;
++pub const USBDEVFS_REAPURB32: u32 = 1074025740;
++pub const BTRFS_IOC_TRANS_END: u32 = 37895;
++pub const CAPI_REGISTER: u32 = 1074545409;
++pub const F2FS_IOC_COMPRESS_FILE: u32 = 62744;
++pub const USBDEVFS_DISCARDURB: u32 = 21771;
++pub const HE_GET_REG: u32 = 1074815328;
++pub const ATM_SETLOOP: u32 = 1074815315;
++pub const ATMSIGD_CTRL: u32 = 25072;
++pub const CIOC_KERNEL_VERSION: u32 = 3221775114;
++pub const BTRFS_IOC_CLONE_RANGE: u32 = 1075876877;
++pub const SNAPSHOT_UNFREEZE: u32 = 13058;
++pub const F2FS_IOC_START_VOLATILE_WRITE: u32 = 62723;
++pub const PMU_IOC_HAS_ADB: u32 = 2148024836;
++pub const I2OGETIOPS: u32 = 2149607680;
++pub const VIDIOC_S_FBUF: u32 = 1076909579;
++pub const PPRCONTROL: u32 = 2147577987;
++pub const CHIOSPICKER: u32 = 1074029317;
++pub const VFIO_IOMMU_SPAPR_REGISTER_MEMORY: u32 = 15221;
++pub const TUNGETSNDBUF: u32 = 2147767507;
++pub const GSMIOC_SETCONF: u32 = 1078740737;
++pub const IOC_PR_PREEMPT: u32 = 1075343563;
++pub const KCOV_INIT_TRACE: u32 = 2148033281;
++pub const SONYPI_IOCGBAT1CAP: u32 = 2147644930;
++pub const SWITCHTEC_IOCTL_FLASH_INFO: u32 = 2148554560;
++pub const MTIOCTOP: u32 = 1074294017;
++pub const VHOST_VDPA_SET_STATUS: u32 = 1073852274;
++pub const VHOST_SCSI_SET_EVENTS_MISSED: u32 = 1074048835;
++pub const VFIO_IOMMU_DIRTY_PAGES: u32 = 15221;
++pub const BTRFS_IOC_SCRUB_PROGRESS: u32 = 3288372253;
++pub const PPPIOCGMRU: u32 = 2147775571;
++pub const BTRFS_IOC_DEV_REPLACE: u32 = 3391657013;
++pub const PPPIOCGFLAGS: u32 = 2147775578;
++pub const NILFS_IOCTL_SET_SUINFO: u32 = 1075342989;
++pub const FW_CDEV_IOC_GET_CYCLE_TIMER2: u32 = 3222807316;
++pub const ATM_DELLECSADDR: u32 = 1074815375;
++pub const FW_CDEV_IOC_GET_SPEED: u32 = 8977;
++pub const PPPIOCGIDLE32: u32 = 2148037695;
++pub const VFIO_DEVICE_RESET: u32 = 15215;
++pub const GPIO_GET_LINEINFO_UNWATCH_IOCTL: u32 = 3221533708;
++pub const WDIOC_GETSTATUS: u32 = 2147768065;
++pub const BTRFS_IOC_SET_FEATURES: u32 = 1076925497;
++pub const IOCTL_MEI_CONNECT_CLIENT: u32 = 3222292481;
++pub const VIDIOC_OMAP3ISP_AEWB_CFG: u32 = 3223344835;
++pub const PCITEST_READ: u32 = 1074286597;
++pub const VFIO_GROUP_GET_STATUS: u32 = 15207;
++pub const MATROXFB_GET_ALL_OUTPUTS: u32 = 2148036347;
++pub const USBDEVFS_CLEAR_HALT: u32 = 2147767573;
++pub const VIDIOC_DECODER_CMD: u32 = 3225966176;
++pub const VIDIOC_G_AUDIO: u32 = 2150913569;
++pub const CCISS_RESCANDISK: u32 = 16912;
++pub const RIO_DISABLE_PORTWRITE_RANGE: u32 = 1074818316;
++pub const IOC_OPAL_SECURE_ERASE_LR: u32 = 1091596519;
++pub const USBDEVFS_REAPURB: u32 = 1074287884;
++pub const DFL_FPGA_CHECK_EXTENSION: u32 = 46593;
++pub const AUTOFS_IOC_PROTOVER: u32 = 2147783523;
++pub const FSL_HV_IOCTL_MEMCPY: u32 = 3223891717;
++pub const BTRFS_IOC_GET_FEATURES: u32 = 2149094457;
++pub const PCITEST_MSIX: u32 = 1074024455;
++pub const BTRFS_IOC_DEFRAG_RANGE: u32 = 1076925456;
++pub const UI_BEGIN_FF_ERASE: u32 = 3222033866;
++pub const DM_GET_TARGET_VERSION: u32 = 3241737489;
++pub const PPPIOCGIDLE: u32 = 2148561983;
++pub const NVRAM_SETCKS: u32 = 28737;
++pub const WDIOC_GETSUPPORT: u32 = 2150127360;
++pub const GSMIOC_ENABLE_NET: u32 = 1077167874;
++pub const GPIO_GET_CHIPINFO_IOCTL: u32 = 2151986177;
++pub const NE_ADD_VCPU: u32 = 3221532193;
++pub const EVIOCSKEYCODE_V2: u32 = 1076380932;
++pub const PTP_SYS_OFFSET_EXTENDED2: u32 = 3300932882;
++pub const SCIF_FENCE_WAIT: u32 = 3221517072;
++pub const RIO_TRANSFER: u32 = 3222826261;
++pub const FSL_HV_IOCTL_DOORBELL: u32 = 3221794566;
++pub const RIO_MPORT_MAINT_WRITE_LOCAL: u32 = 1075342598;
++pub const I2OEVTREG: u32 = 1074555146;
++pub const I2OPARMGET: u32 = 3223873796;
++pub const EVIOCGID: u32 = 2148025602;
++pub const BTRFS_IOC_QGROUP_CREATE: u32 = 1074828330;
++pub const AUTOFS_DEV_IOCTL_SETPIPEFD: u32 = 3222836088;
++pub const VIDIOC_S_PARM: u32 = 3234616854;
++pub const TUNSETSTEERINGEBPF: u32 = 2147767520;
++pub const ATM_GETNAMES: u32 = 1074815363;
++pub const VIDIOC_QUERYMENU: u32 = 3224131109;
++pub const DFL_FPGA_PORT_DMA_UNMAP: u32 = 46660;
++pub const I2OLCTGET: u32 = 3222825218;
++pub const FS_IOC_GET_ENCRYPTION_PWSALT: u32 = 1074816532;
++pub const NS_SETBUFLEV: u32 = 1074815330;
++pub const BLKCLOSEZONE: u32 = 1074795143;
++pub const SONET_GETFRSENSE: u32 = 2147901719;
++pub const UI_SET_EVBIT: u32 = 1074025828;
++pub const DM_LIST_VERSIONS: u32 = 3241737485;
++pub const HIDIOCGSTRING: u32 = 2164541444;
++pub const PPPIOCATTCHAN: u32 = 1074033720;
++pub const VDUSE_DEV_SET_CONFIG: u32 = 1074299154;
++pub const TUNGETFEATURES: u32 = 2147767503;
++pub const VFIO_GROUP_UNSET_CONTAINER: u32 = 15209;
++pub const IPMICTL_SET_MY_ADDRESS_CMD: u32 = 2147772689;
++pub const CCISS_REGNEWDISK: u32 = 1074020877;
++pub const VIDIOC_QUERY_DV_TIMINGS: u32 = 2156156515;
++pub const PHN_SETREGS: u32 = 1076391944;
++pub const FAT_IOCTL_GET_ATTRIBUTES: u32 = 2147774992;
++pub const FSL_MC_SEND_MC_COMMAND: u32 = 3225440992;
++pub const TUNGETIFF: u32 = 2147767506;
++pub const PTP_CLOCK_GETCAPS2: u32 = 2152742154;
++pub const BTRFS_IOC_RESIZE: u32 = 1342215171;
++pub const VHOST_SET_VRING_ENDIAN: u32 = 1074310931;
++pub const PPS_KC_BIND: u32 = 1074294949;
++pub const F2FS_IOC_WRITE_CHECKPOINT: u32 = 62727;
++pub const UI_SET_FFBIT: u32 = 1074025835;
++pub const IPMICTL_GET_MY_LUN_CMD: u32 = 2147772692;
++pub const CEC_ADAP_G_PHYS_ADDR: u32 = 2147639553;
++pub const CEC_G_MODE: u32 = 2147770632;
++pub const USBDEVFS_RESETEP: u32 = 2147767555;
++pub const MEDIA_REQUEST_IOC_QUEUE: u32 = 31872;
++pub const USBDEVFS_ALLOC_STREAMS: u32 = 2148029724;
++pub const MGSL_IOCSXCTRL: u32 = 27925;
++pub const MEDIA_IOC_G_TOPOLOGY: u32 = 3225975812;
++pub const PPPIOCUNBRIDGECHAN: u32 = 29748;
++pub const F2FS_IOC_COMMIT_ATOMIC_WRITE: u32 = 62722;
++pub const ISST_IF_GET_PLATFORM_INFO: u32 = 2148072960;
++pub const SCIF_FENCE_MARK: u32 = 3222303503;
++pub const USBDEVFS_RELEASE_PORT: u32 = 2147767577;
++pub const VFIO_CHECK_EXTENSION: u32 = 15205;
++pub const BTRFS_IOC_QGROUP_LIMIT: u32 = 2150667307;
++pub const FAT_IOCTL_GET_VOLUME_ID: u32 = 2147774995;
++pub const UI_SET_PHYS: u32 = 1074287980;
++pub const FDWERRORGET: u32 = 2150105623;
++pub const VIDIOC_SUBDEV_G_EDID: u32 = 3223868968;
++pub const MGSL_IOCGSTATS: u32 = 27911;
++pub const RPROC_SET_SHUTDOWN_ON_RELEASE: u32 = 1074050817;
++pub const SIOCGSTAMP_NEW: u32 = 2148567302;
++pub const RTC_WKALM_RD: u32 = 2150133776;
++pub const PHN_GET_REG: u32 = 3221778432;
++pub const DELL_WMI_SMBIOS_CMD: u32 = 3224655616;
++pub const PHN_NOT_OH: u32 = 28676;
++pub const PPGETMODES: u32 = 2147774615;
++pub const CHIOGPARAMS: u32 = 2148819718;
++pub const VFIO_DEVICE_GET_GFX_DMABUF: u32 = 15219;
++pub const VHOST_SET_VRING_BUSYLOOP_TIMEOUT: u32 = 1074310947;
++pub const VIDIOC_SUBDEV_G_SELECTION: u32 = 3225441853;
++pub const BTRFS_IOC_RM_DEV_V2: u32 = 1342215226;
++pub const MGSL_IOCWAITGPIO: u32 = 3222301970;
++pub const PMU_IOC_CAN_SLEEP: u32 = 2148024837;
++pub const KCOV_ENABLE: u32 = 25444;
++pub const BTRFS_IOC_CLONE: u32 = 1074041865;
++pub const F2FS_IOC_DEFRAGMENT: u32 = 3222336776;
++pub const FW_CDEV_IOC_DEALLOCATE_ISO_RESOURCE: u32 = 1074012942;
++pub const AGPIOC_ALLOCATE: u32 = 3221766406;
++pub const NE_SET_USER_MEMORY_REGION: u32 = 1075359267;
++pub const MGSL_IOCTXABORT: u32 = 27910;
++pub const MGSL_IOCSGPIO: u32 = 1074818320;
++pub const LIRC_SET_REC_CARRIER: u32 = 1074030868;
++pub const F2FS_IOC_FLUSH_DEVICE: u32 = 1074328842;
++pub const SNAPSHOT_ATOMIC_RESTORE: u32 = 13060;
++pub const RTC_UIE_OFF: u32 = 28676;
++pub const BT_BMC_IOCTL_SMS_ATN: u32 = 45312;
++pub const NVME_IOCTL_ID: u32 = 20032;
++pub const NE_START_ENCLAVE: u32 = 3222318628;
++pub const VIDIOC_STREAMON: u32 = 1074026002;
++pub const FDPOLLDRVSTAT: u32 = 2152727059;
++pub const AUTOFS_DEV_IOCTL_READY: u32 = 3222836086;
++pub const VIDIOC_ENUMAUDOUT: u32 = 3224655426;
++pub const VIDIOC_SUBDEV_S_STD: u32 = 1074288152;
++pub const WDIOC_GETTIMELEFT: u32 = 2147768074;
++pub const ATM_GETLINKRATE: u32 = 1074815361;
++pub const RTC_WKALM_SET: u32 = 1076391951;
++pub const VHOST_GET_BACKEND_FEATURES: u32 = 2148052774;
++pub const ATMARP_ENCAP: u32 = 25061;
++pub const CAPI_GET_FLAGS: u32 = 2147762979;
++pub const IPMICTL_SET_MY_CHANNEL_ADDRESS_CMD: u32 = 2147772696;
++pub const DFL_FPGA_FME_PORT_ASSIGN: u32 = 1074050690;
++pub const NS_GET_OWNER_UID: u32 = 46852;
++pub const VIDIOC_OVERLAY: u32 = 1074025998;
++pub const BTRFS_IOC_WAIT_SYNC: u32 = 1074304022;
++pub const GPIOHANDLE_SET_CONFIG_IOCTL: u32 = 3226776586;
++pub const VHOST_GET_VRING_ENDIAN: u32 = 1074310932;
++pub const ATM_GETADDR: u32 = 1074815366;
++pub const PHN_GET_REGS: u32 = 3221778434;
++pub const AUTOFS_DEV_IOCTL_REQUESTER: u32 = 3222836091;
++pub const AUTOFS_DEV_IOCTL_EXPIRE: u32 = 3222836092;
++pub const SNAPSHOT_S2RAM: u32 = 13067;
++pub const JSIOCSAXMAP: u32 = 1077963313;
++pub const F2FS_IOC_SET_COMPRESS_OPTION: u32 = 1073935638;
++pub const VBG_IOCTL_HGCM_DISCONNECT: u32 = 3223082501;
++pub const SCIF_FENCE_SIGNAL: u32 = 3223876369;
++pub const VFIO_DEVICE_GET_PCI_HOT_RESET_INFO: u32 = 15216;
++pub const VIDIOC_SUBDEV_ENUM_MBUS_CODE: u32 = 3224393218;
++pub const MMTIMER_GETOFFSET: u32 = 27904;
++pub const RIO_CM_CHAN_LISTEN: u32 = 1073898246;
++pub const ATM_SETSC: u32 = 1074029041;
++pub const F2FS_IOC_SHUTDOWN: u32 = 2147768445;
++pub const NVME_IOCTL_RESCAN: u32 = 20038;
++pub const BLKOPENZONE: u32 = 1074795142;
++pub const DM_VERSION: u32 = 3241737472;
++pub const CEC_TRANSMIT: u32 = 3224920325;
++pub const FS_IOC_GET_ENCRYPTION_POLICY_EX: u32 = 3221841430;
++pub const SIOCMKCLIP: u32 = 25056;
++pub const IPMI_BMC_IOCTL_CLEAR_SMS_ATN: u32 = 45313;
++pub const HIDIOCGVERSION: u32 = 2147764225;
++pub const VIDIOC_S_INPUT: u32 = 3221509671;
++pub const VIDIOC_G_CROP: u32 = 3222558267;
++pub const LIRC_SET_WIDEBAND_RECEIVER: u32 = 1074030883;
++pub const EVIOCGEFFECTS: u32 = 2147763588;
++pub const UVCIOC_CTRL_QUERY: u32 = 3222304033;
++pub const IOC_OPAL_GENERIC_TABLE_RW: u32 = 1094217963;
++pub const FS_IOC_READ_VERITY_METADATA: u32 = 3223873159;
++pub const ND_IOCTL_SET_CONFIG_DATA: u32 = 3221769734;
++pub const USBDEVFS_GETDRIVER: u32 = 1090802952;
++pub const IDT77105_GETSTAT: u32 = 1074815282;
++pub const HIDIOCINITREPORT: u32 = 18437;
++pub const VFIO_DEVICE_GET_INFO: u32 = 15211;
++pub const RIO_CM_CHAN_RECEIVE: u32 = 3222299402;
++pub const RNDGETENTCNT: u32 = 2147766784;
++pub const PPPIOCNEWUNIT: u32 = 3221517374;
++pub const BTRFS_IOC_INO_LOOKUP: u32 = 3489698834;
++pub const FDRESET: u32 = 596;
++pub const IOC_PR_REGISTER: u32 = 1075343560;
++pub const HIDIOCSREPORT: u32 = 1074546696;
++pub const TEE_IOC_OPEN_SESSION: u32 = 2148574210;
++pub const TEE_IOC_SUPPL_RECV: u32 = 2148574214;
++pub const BTRFS_IOC_BALANCE_CTL: u32 = 1074041889;
++pub const GPIO_GET_LINEINFO_WATCH_IOCTL: u32 = 3225990155;
++pub const HIDIOCGRAWINFO: u32 = 2148026371;
++pub const PPPIOCSCOMPRESS: u32 = 1074820173;
++pub const USBDEVFS_CONNECTINFO: u32 = 1074287889;
++pub const BLKRESETZONE: u32 = 1074795139;
++pub const CHIOINITELEM: u32 = 25361;
++pub const NILFS_IOCTL_SET_ALLOC_RANGE: u32 = 1074818700;
++pub const AUTOFS_DEV_IOCTL_CATATONIC: u32 = 3222836089;
++pub const RIO_MPORT_MAINT_HDID_SET: u32 = 1073900801;
++pub const PPGETPHASE: u32 = 2147774617;
++pub const USBDEVFS_DISCONNECT_CLAIM: u32 = 2164806939;
++pub const FDMSGON: u32 = 581;
++pub const VIDIOC_G_SLICED_VBI_CAP: u32 = 3228849733;
++pub const BTRFS_IOC_BALANCE_V2: u32 = 3288372256;
++pub const MEDIA_REQUEST_IOC_REINIT: u32 = 31873;
++pub const IOC_OPAL_ERASE_LR: u32 = 1091596518;
++pub const FDFMTBEG: u32 = 583;
++pub const RNDRESEEDCRNG: u32 = 20999;
++pub const ISST_IF_GET_PHY_ID: u32 = 3221814785;
++pub const TUNSETNOCSUM: u32 = 1074025672;
++pub const SONET_GETSTAT: u32 = 2149867792;
++pub const TFD_IOC_SET_TICKS: u32 = 1074287616;
++pub const PPDATADIR: u32 = 1074032784;
++pub const IOC_OPAL_ENABLE_DISABLE_MBR: u32 = 1091596517;
++pub const GPIO_V2_GET_LINE_IOCTL: u32 = 3260068871;
++pub const RIO_CM_CHAN_SEND: u32 = 1074815753;
++pub const PPWCTLONIRQ: u32 = 1073836178;
++pub const SONYPI_IOCGBRT: u32 = 2147579392;
++pub const IOC_PR_RELEASE: u32 = 1074819274;
++pub const PPCLRIRQ: u32 = 2147774611;
++pub const IPMICTL_SET_MY_CHANNEL_LUN_CMD: u32 = 2147772698;
++pub const MGSL_IOCSXSYNC: u32 = 27923;
++pub const HPET_IE_OFF: u32 = 26626;
++pub const IOC_OPAL_ACTIVATE_USR: u32 = 1091596513;
++pub const SONET_SETFRAMING: u32 = 1074028821;
++pub const PERF_EVENT_IOC_PAUSE_OUTPUT: u32 = 1074013193;
++pub const BTRFS_IOC_LOGICAL_INO_V2: u32 = 3224933435;
++pub const VBG_IOCTL_HGCM_CONNECT: u32 = 3231471108;
++pub const BLKFINISHZONE: u32 = 1074795144;
++pub const EVIOCREVOKE: u32 = 1074021777;
++pub const VFIO_DEVICE_FEATURE: u32 = 15221;
++pub const CCISS_GETPCIINFO: u32 = 2148024833;
++pub const ISST_IF_MBOX_COMMAND: u32 = 3221814787;
++pub const SCIF_ACCEPTREQ: u32 = 3222303492;
++pub const PERF_EVENT_IOC_QUERY_BPF: u32 = 3221758986;
++pub const VIDIOC_STREAMOFF: u32 = 1074026003;
++pub const VDUSE_DESTROY_DEV: u32 = 1090552067;
++pub const FDGETFDCSTAT: u32 = 2150105621;
++pub const CM_IOCGATR: u32 = 3221775105;
++pub const VIDIOC_S_PRIORITY: u32 = 1074026052;
++pub const SNAPSHOT_FREEZE: u32 = 13057;
++pub const VIDIOC_ENUMINPUT: u32 = 3226490394;
++pub const ZATM_GETPOOLZ: u32 = 1074815330;
++pub const RIO_DISABLE_DOORBELL_RANGE: u32 = 1074294026;
++pub const GPIO_V2_GET_LINEINFO_WATCH_IOCTL: u32 = 3238048774;
++pub const VIDIOC_G_STD: u32 = 2148029975;
++pub const USBDEVFS_ALLOW_SUSPEND: u32 = 21794;
++pub const SONET_GETSTATZ: u32 = 2149867793;
++pub const SCIF_ACCEPTREG: u32 = 3221779205;
++pub const VIDIOC_ENCODER_CMD: u32 = 3223869005;
++pub const PPPIOCSRASYNCMAP: u32 = 1074033748;
++pub const IOCTL_MEI_NOTIFY_SET: u32 = 1074022402;
++pub const BTRFS_IOC_QUOTA_RESCAN_STATUS: u32 = 2151715885;
++pub const F2FS_IOC_GARBAGE_COLLECT: u32 = 1074066694;
++pub const ATMLEC_CTRL: u32 = 25040;
++pub const MATROXFB_GET_AVAILABLE_OUTPUTS: u32 = 2148036345;
++pub const DM_DEV_CREATE: u32 = 3241737475;
++pub const VHOST_VDPA_GET_VRING_NUM: u32 = 2147659638;
++pub const VIDIOC_G_CTRL: u32 = 3221771803;
++pub const NBD_CLEAR_SOCK: u32 = 43780;
++pub const VFIO_DEVICE_QUERY_GFX_PLANE: u32 = 15218;
++pub const WDIOC_KEEPALIVE: u32 = 2147768069;
++pub const NVME_IOCTL_SUBSYS_RESET: u32 = 20037;
++pub const PTP_EXTTS_REQUEST2: u32 = 1074806027;
++pub const PCITEST_BAR: u32 = 20481;
++pub const MGSL_IOCGGPIO: u32 = 2148560145;
++pub const EVIOCSREP: u32 = 1074283779;
++pub const VFIO_DEVICE_GET_IRQ_INFO: u32 = 15213;
++pub const HPET_DPI: u32 = 26629;
++pub const VDUSE_VQ_SETUP_KICKFD: u32 = 1074299158;
++pub const ND_IOCTL_CALL: u32 = 3225439754;
++pub const HIDIOCGDEVINFO: u32 = 2149337091;
++pub const DM_TABLE_DEPS: u32 = 3241737483;
++pub const BTRFS_IOC_DEV_INFO: u32 = 3489698846;
++pub const VDUSE_IOTLB_GET_FD: u32 = 3223355664;
++pub const FW_CDEV_IOC_GET_INFO: u32 = 3223855872;
++pub const VIDIOC_G_PRIORITY: u32 = 2147767875;
++pub const ATM_NEWBACKENDIF: u32 = 1073897971;
++pub const VIDIOC_S_EXT_CTRLS: u32 = 3223344712;
++pub const VIDIOC_SUBDEV_ENUM_DV_TIMINGS: u32 = 3230946914;
++pub const VIDIOC_OMAP3ISP_CCDC_CFG: u32 = 3224917697;
++pub const VIDIOC_S_HW_FREQ_SEEK: u32 = 1076909650;
++pub const DM_TABLE_LOAD: u32 = 3241737481;
++pub const F2FS_IOC_START_ATOMIC_WRITE: u32 = 62721;
++pub const VIDIOC_G_OUTPUT: u32 = 2147767854;
++pub const ATM_DROPPARTY: u32 = 1074029045;
++pub const CHIOGELEM: u32 = 1080845072;
++pub const BTRFS_IOC_GET_SUPPORTED_FEATURES: u32 = 2152240185;
++pub const EVIOCSKEYCODE: u32 = 1074283780;
++pub const NE_GET_IMAGE_LOAD_INFO: u32 = 3222318626;
++pub const TUNSETLINK: u32 = 1074025677;
++pub const FW_CDEV_IOC_ADD_DESCRIPTOR: u32 = 3222807302;
++pub const BTRFS_IOC_SCRUB_CANCEL: u32 = 37916;
++pub const PPS_SETPARAMS: u32 = 1074294946;
++pub const IOC_OPAL_LR_SETUP: u32 = 1093169379;
++pub const FW_CDEV_IOC_DEALLOCATE: u32 = 1074012931;
++pub const WDIOC_SETTIMEOUT: u32 = 3221509894;
++pub const IOC_WATCH_QUEUE_SET_FILTER: u32 = 22369;
++pub const CAPI_GET_MANUFACTURER: u32 = 3221504774;
++pub const VFIO_IOMMU_SPAPR_UNREGISTER_MEMORY: u32 = 15222;
++pub const ASPEED_P2A_CTRL_IOCTL_SET_WINDOW: u32 = 1074836224;
++pub const VIDIOC_G_EDID: u32 = 3223868968;
++pub const F2FS_IOC_GARBAGE_COLLECT_RANGE: u32 = 1075377419;
++pub const RIO_MAP_INBOUND: u32 = 3223874833;
++pub const IOC_OPAL_TAKE_OWNERSHIP: u32 = 1091072222;
++pub const USBDEVFS_CLAIM_PORT: u32 = 2147767576;
++pub const VIDIOC_S_AUDIO: u32 = 1077171746;
++pub const FS_IOC_GET_ENCRYPTION_NONCE: u32 = 2148558363;
++pub const FW_CDEV_IOC_SEND_STREAM_PACKET: u32 = 1076372243;
++pub const BTRFS_IOC_SNAP_DESTROY: u32 = 1342215183;
++pub const SNAPSHOT_FREE: u32 = 13061;
++pub const I8K_GET_SPEED: u32 = 3221776773;
++pub const HIDIOCGREPORT: u32 = 1074546695;
++pub const HPET_EPI: u32 = 26628;
++pub const JSIOCSCORR: u32 = 1076128289;
++pub const IOC_PR_PREEMPT_ABORT: u32 = 1075343564;
++pub const RIO_MAP_OUTBOUND: u32 = 3223874831;
++pub const ATM_SETESI: u32 = 1074815372;
++pub const FW_CDEV_IOC_START_ISO: u32 = 1074799370;
++pub const ATM_DELADDR: u32 = 1074815369;
++pub const PPFCONTROL: u32 = 1073901710;
++pub const SONYPI_IOCGFAN: u32 = 2147579402;
++pub const RTC_IRQP_SET: u32 = 1074294796;
++pub const PCITEST_WRITE: u32 = 1074286596;
++pub const PPCLAIM: u32 = 28811;
++pub const VIDIOC_S_JPEGCOMP: u32 = 1082938942;
++pub const IPMICTL_UNREGISTER_FOR_CMD: u32 = 2147641615;
++pub const VHOST_SET_FEATURES: u32 = 1074310912;
++pub const TOSHIBA_ACPI_SCI: u32 = 3222828177;
++pub const VIDIOC_DQBUF: u32 = 3227014673;
++pub const BTRFS_IOC_BALANCE_PROGRESS: u32 = 2214630434;
++pub const BTRFS_IOC_SUBVOL_SETFLAGS: u32 = 1074304026;
++pub const ATMLEC_MCAST: u32 = 25042;
++pub const MMTIMER_GETFREQ: u32 = 2148035842;
++pub const VIDIOC_G_SELECTION: u32 = 3225441886;
++pub const RTC_ALM_SET: u32 = 1076129799;
++pub const PPPOEIOCSFWD: u32 = 1074311424;
++pub const IPMICTL_GET_MAINTENANCE_MODE_CMD: u32 = 2147772702;
++pub const FS_IOC_ENABLE_VERITY: u32 = 1082156677;
++pub const NILFS_IOCTL_GET_BDESCS: u32 = 3222826631;
++pub const FDFMTEND: u32 = 585;
++pub const DMA_BUF_SET_NAME: u32 = 1074291201;
++pub const UI_BEGIN_FF_UPLOAD: u32 = 3228063176;
++pub const RTC_UIE_ON: u32 = 28675;
++pub const PPRELEASE: u32 = 28812;
++pub const VFIO_IOMMU_UNMAP_DMA: u32 = 15218;
++pub const VIDIOC_OMAP3ISP_PRV_CFG: u32 = 3228587714;
++pub const GPIO_GET_LINEHANDLE_IOCTL: u32 = 3245126659;
++pub const VFAT_IOCTL_READDIR_BOTH: u32 = 2184212993;
++pub const NVME_IOCTL_ADMIN_CMD: u32 = 3225964097;
++pub const VHOST_SET_VRING_KICK: u32 = 1074310944;
++pub const BTRFS_IOC_SUBVOL_CREATE_V2: u32 = 1342215192;
++pub const BTRFS_IOC_SNAP_CREATE: u32 = 1342215169;
++pub const SONYPI_IOCGBAT2CAP: u32 = 2147644932;
++pub const PPNEGOT: u32 = 1074032785;
++pub const NBD_PRINT_DEBUG: u32 = 43782;
++pub const BTRFS_IOC_INO_LOOKUP_USER: u32 = 3489698878;
++pub const BTRFS_IOC_GET_SUBVOL_ROOTREF: u32 = 3489698877;
++pub const FS_IOC_REMOVE_ENCRYPTION_KEY_ALL_USERS: u32 = 3225445913;
++pub const BTRFS_IOC_FS_INFO: u32 = 2214630431;
++pub const VIDIOC_ENUM_FMT: u32 = 3225441794;
++pub const VIDIOC_G_INPUT: u32 = 2147767846;
++pub const VTPM_PROXY_IOC_NEW_DEV: u32 = 3222577408;
++pub const DFL_FPGA_FME_ERR_GET_IRQ_NUM: u32 = 2147792515;
++pub const ND_IOCTL_DIMM_FLAGS: u32 = 3221769731;
++pub const BTRFS_IOC_QUOTA_RESCAN: u32 = 1077974060;
++pub const MMTIMER_GETCOUNTER: u32 = 2148035849;
++pub const MATROXFB_GET_OUTPUT_MODE: u32 = 3221778170;
++pub const BTRFS_IOC_QUOTA_RESCAN_WAIT: u32 = 37934;
++pub const RIO_CM_CHAN_BIND: u32 = 1074291461;
++pub const HIDIOCGRDESC: u32 = 2416199682;
++pub const MGSL_IOCGIF: u32 = 27915;
++pub const VIDIOC_S_OUTPUT: u32 = 3221509679;
++pub const HIDIOCGREPORTINFO: u32 = 3222030345;
++pub const WDIOC_GETBOOTSTATUS: u32 = 2147768066;
++pub const VDUSE_VQ_GET_INFO: u32 = 3224404245;
++pub const ACRN_IOCTL_ASSIGN_PCIDEV: u32 = 1076142677;
++pub const BLKGETDISKSEQ: u32 = 2148012672;
++pub const ACRN_IOCTL_PM_GET_CPU_STATE: u32 = 3221791328;
++pub const ACRN_IOCTL_DESTROY_VM: u32 = 41489;
++pub const ACRN_IOCTL_SET_PTDEV_INTR: u32 = 1075094099;
++pub const ACRN_IOCTL_CREATE_IOREQ_CLIENT: u32 = 41522;
++pub const ACRN_IOCTL_IRQFD: u32 = 1075356273;
++pub const ACRN_IOCTL_CREATE_VM: u32 = 3224412688;
++pub const ACRN_IOCTL_INJECT_MSI: u32 = 1074831907;
++pub const ACRN_IOCTL_ATTACH_IOREQ_CLIENT: u32 = 41523;
++pub const ACRN_IOCTL_RESET_PTDEV_INTR: u32 = 1075094100;
++pub const ACRN_IOCTL_NOTIFY_REQUEST_FINISH: u32 = 1074307633;
++pub const ACRN_IOCTL_SET_IRQLINE: u32 = 1074307621;
++pub const ACRN_IOCTL_START_VM: u32 = 41490;
++pub const ACRN_IOCTL_SET_VCPU_REGS: u32 = 1093181974;
++pub const ACRN_IOCTL_SET_MEMSEG: u32 = 1075880513;
++pub const ACRN_IOCTL_PAUSE_VM: u32 = 41491;
++pub const ACRN_IOCTL_CLEAR_VM_IOREQ: u32 = 41525;
++pub const ACRN_IOCTL_UNSET_MEMSEG: u32 = 1075880514;
++pub const ACRN_IOCTL_IOEVENTFD: u32 = 1075880560;
++pub const ACRN_IOCTL_DEASSIGN_PCIDEV: u32 = 1076142678;
++pub const ACRN_IOCTL_RESET_VM: u32 = 41493;
++pub const ACRN_IOCTL_DESTROY_IOREQ_CLIENT: u32 = 41524;
++pub const ACRN_IOCTL_VM_INTR_MONITOR: u32 = 1074307620;
+--- a/vendor/linux-raw-sys/.cargo-checksum.json
++++ b/vendor/linux-raw-sys/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"CODE_OF_CONDUCT.md":"ccd9e538ce44713a2486cc8e4c01b8c8b846d1ccff15de728d8d6fa9a7f846cd","COPYRIGHT":"3290ae0fbc9ddb77d2239121d710f0bb9d31b3b4744e6d97fe01e652b4c1870b","Cargo.toml":"0d02ea4bc018631a52900423cb7466bc905571bd1d767769739bde626360c123","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"3d4ae2ec9429adfd329b4506f7ca6faa84dca30d1c3b416f8f4e6f57bdb3aa96","src/aarch64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/aarch64/general.rs":"0cc1ed00b1ef7efb16a7109138acdf679d591325458938cee150b221af063822","src/aarch64/if_ether.rs":"f558bfec1f5e80235db79f507e4afd3120647a5caddf17a87b7ad5540e5c1462","src/aarch64/io_uring.rs":"9ce4c9005b5b99e041b9d58be6866367605e4bee1e30a7096235cb234ed73e1b","src/aarch64/ioctl.rs":"f83af7f7cab6611c9f316b2289998d3276d844ac8f96111bc65f6c3340c3301a","src/aarch64/net.rs":"9c5fe44f91072b08899a6304cb22a51545034adde444740b67b8b310f3906a30","src/aarch64/netlink.rs":"f6642310b8bd37b5775ba1a52cfe638beddd7c880f8dfacab62c3180255fb75f","src/aarch64/prctl.rs":"a80e2f89aa978210d980538e1d9c5b922bb073d118ee51b1f14c1f397c7cd576","src/aarch64/system.rs":"851a95c0d1b76782970a28c373a313954eea0db35f6ebce562e6047095f587c4","src/arm/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/arm/general.rs":"bdbb40ba5630044c9555fe38a3640638f3ff7f437d94f82428e2bedad32e180f","src/arm/if_ether.rs":"3873beb1f57bdcc52da345608c5deda5937d7b92b114b414ce6161784173b925","src/arm/io_uring.rs":"5e2e254d5853ed30568e17fab184f8cc458607465d19dcee6c87ca1b565d6d2d","src/arm/ioctl.rs":"54cb9bdc5fe4ee7cff30e087bb0d04f1d811b55be7604bb056387f859120935a","src/arm/net.rs":"c79f443ab6585f81a71eff3350700ece82098264206eb79f78714153d6e43529","src/arm/netlink.rs":"2f98b8c0349c2d3d538164e4d06c28fa1fb9308f7691aa097526ce2fb3ac176f","src/arm/prctl.rs":"5477c74fec958d9f9d2028f58e95657840ead28445956e53fe0894e45922f8a5","src/arm/system.rs":"9aca94a4a553e09e6d2d43741fb8c120c05a193822e850951dc469bbd89cc703","src/csky/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/csky/general.rs":"94aba82d2a76a9ca272b4766f8d2f5f14e10bc2166e03d695a5b8b5d207e6755","src/csky/if_ether.rs":"aac7a069000195b9b18c6d25b2d7c338efb1062347a84e31cda7c93de374b3c6","src/csky/io_uring.rs":"bb7541d82f627cb681ea11f4e5c4162b0d0616ca08fcc6d31bcf22ef813f754d","src/csky/ioctl.rs":"77f30e67f9a4748bc31184454a0ff45fd821c3536cd98dc2411def8eeb59a268","src/csky/net.rs":"2796ca3744e82cecd816b4d1b1fba0bcb389fbc4603e0af7b6cdc562c666b512","src/csky/netlink.rs":"9fc99e88df0efc15d39b397c9d653c32b4729e31a2434e730435fbdfcb362da4","src/csky/prctl.rs":"8df2f33387d5b4865d9417f796cc4d43b3cf549cfa41d7ce7d41dd367ed8b6c9","src/csky/system.rs":"a2913685b359e60bbfae413b89a6fd37ea38fa448f980ffadef42d227b33ada5","src/elf.rs":"441bcf378b5603f957e053369d18d95c1efe981665846c220bca82123c04843a","src/lib.rs":"f3399ba35206764ac4e2d755cfc7e0c580464b8fcc76968f6c4b9f64523a1d02","src/loongarch64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/loongarch64/general.rs":"fadd5884c6ad645300540dd30150984857819a986772fca4eba1e1fc0c3f4f1e","src/loongarch64/if_ether.rs":"a7b0918afc9dbf8e79490f83a809e4bd563af20ab2b3432d58b763ff31418daf","src/loongarch64/io_uring.rs":"ef851913c86bc2d629048e781edc76b59c63e659f8bd4bcd7fa7ff982186ed6d","src/loongarch64/ioctl.rs":"6f631e0ac37f2c2398168683a1523d1559b85369ffe4cdcc284ffe7455672c18","src/loongarch64/net.rs":"21add3f1bfa5898d9a5ef91342f6cd94ad4585df6525b9a4cf6cdf743345a9c7","src/loongarch64/netlink.rs":"789357343b94276e6f8fc8f3a32f06f3d3efd464c7850b2568dc609a49d1f305","src/loongarch64/prctl.rs":"624e4d3b8f3b4f1d6dcb1540c124ed03ce47f0580aa3425c0baed431dcd267ea","src/loongarch64/system.rs":"4cfcd4c28481b9ff5fc0330d5c713d637d3615c0184830d3a4391882eaed68f5","src/mips/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips/general.rs":"1c16f4e0a015eaf27e9a30f4b74ec824220798f47c535d3aaf322ad799c054ab","src/mips/if_ether.rs":"1ecdc654cb3eda633bb7192829b1001a0a5ae5a126110b4208bbee5b41f87f5e","src/mips/io_uring.rs":"c69479bb1f862febe78e0989745c202146aaef7e8ceaa0d7bf291af584748992","src/mips/ioctl.rs":"f7464d2cd0548ab4735a1ca8deba47f13ea0946a9bf86adfec1a77feb3a0f475","src/mips/net.rs":"843204e91d8d6c34ddf4d29e1e822980b3e058ff7fc55ecc5f2b0fdf4a72825d","src/mips/netlink.rs":"5045dece259abaa5b5974500a20a9325e4cd6eb87c344dd7448056dff469994f","src/mips/prctl.rs":"cd59615adf6f561ef6c542df59cf25f75877693ca1c4c489f5e460eb7e798d4d","src/mips/system.rs":"acbc69a1fe7299860f8bd142bc2ff5595a410e1528994139062fc680d1a87e71","src/mips32r6/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips32r6/general.rs":"1c16f4e0a015eaf27e9a30f4b74ec824220798f47c535d3aaf322ad799c054ab","src/mips32r6/if_ether.rs":"1ecdc654cb3eda633bb7192829b1001a0a5ae5a126110b4208bbee5b41f87f5e","src/mips32r6/io_uring.rs":"c69479bb1f862febe78e0989745c202146aaef7e8ceaa0d7bf291af584748992","src/mips32r6/ioctl.rs":"f7464d2cd0548ab4735a1ca8deba47f13ea0946a9bf86adfec1a77feb3a0f475","src/mips32r6/net.rs":"843204e91d8d6c34ddf4d29e1e822980b3e058ff7fc55ecc5f2b0fdf4a72825d","src/mips32r6/netlink.rs":"5045dece259abaa5b5974500a20a9325e4cd6eb87c344dd7448056dff469994f","src/mips32r6/prctl.rs":"cd59615adf6f561ef6c542df59cf25f75877693ca1c4c489f5e460eb7e798d4d","src/mips32r6/system.rs":"acbc69a1fe7299860f8bd142bc2ff5595a410e1528994139062fc680d1a87e71","src/mips64/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips64/general.rs":"700b52614c604ce58aa8a5bb1ebd46221c0106ec2d8d6600d681d1594a89118a","src/mips64/if_ether.rs":"6d24518acba5bc8fa2a775d039ed9594de8349d6028621134626ccaa8ea18bcd","src/mips64/io_uring.rs":"18bd5d96c53aff44f4a37a71f16fa12c11b7c934b395bf841e47d98283146a50","src/mips64/ioctl.rs":"39f3276ec6f36ba78f5635bd418310b21bc228902d4da889fa01094c26f9b57d","src/mips64/net.rs":"e641379a7004ded90413b084504468148ecdbea4f753cbca83a4d22698cd7bab","src/mips64/netlink.rs":"18f139573337be18fb9bd56de4d0bd8af5c80828cdf68ef68751c18dcb39c761","src/mips64/prctl.rs":"a8d9ae240ce582d7b66e13937b99df73c7247ce9f69bfdd12109903d7c6dfb0e","src/mips64/system.rs":"95805c533a8eb8f53c28f138e6a2d12064681cc9db55d3042ee748c655a5d7cd","src/mips64r6/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips64r6/general.rs":"700b52614c604ce58aa8a5bb1ebd46221c0106ec2d8d6600d681d1594a89118a","src/mips64r6/if_ether.rs":"6d24518acba5bc8fa2a775d039ed9594de8349d6028621134626ccaa8ea18bcd","src/mips64r6/io_uring.rs":"18bd5d96c53aff44f4a37a71f16fa12c11b7c934b395bf841e47d98283146a50","src/mips64r6/ioctl.rs":"39f3276ec6f36ba78f5635bd418310b21bc228902d4da889fa01094c26f9b57d","src/mips64r6/net.rs":"e641379a7004ded90413b084504468148ecdbea4f753cbca83a4d22698cd7bab","src/mips64r6/netlink.rs":"18f139573337be18fb9bd56de4d0bd8af5c80828cdf68ef68751c18dcb39c761","src/mips64r6/prctl.rs":"a8d9ae240ce582d7b66e13937b99df73c7247ce9f69bfdd12109903d7c6dfb0e","src/mips64r6/system.rs":"95805c533a8eb8f53c28f138e6a2d12064681cc9db55d3042ee748c655a5d7cd","src/powerpc/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/powerpc/general.rs":"7940ad742585d501f4742af51c6310ecf651863a8aaa0305193e9c8ee07aca90","src/powerpc/if_ether.rs":"56ab5fbf8c9d0dd0654412fabdca6759cbb421a876e5cebe2fec10e25728b55c","src/powerpc/io_uring.rs":"38ead79180c96e8af69929dacbc36e8cfbccec96875c61f094e531e6860639db","src/powerpc/ioctl.rs":"513a1dda33b2cd6c57212263c1bf0d8d443e54af6efe75e938fd700c1ca2b630","src/powerpc/net.rs":"b0228b15161fb750596d6e7e8e612381d9cf2acfed208715b93e8bb4b3340dc7","src/powerpc/netlink.rs":"4f10870fbd23de17efd4cba6d5a40ba85564effc5d0b71f9d7f928a42cb77b20","src/powerpc/prctl.rs":"f236987bc8ae993ab2b0e88b2eacbae51eee4da170ac075552070a76c717d803","src/powerpc/system.rs":"3e01e5e145adb87263f04c9e053faac0c0758689fd32917d2c7c2eec1d89f9e6","src/powerpc64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/powerpc64/general.rs":"7862eb3dfac845331b624b3322bdeb3441bb86e914719e125dd5dc7b3860db43","src/powerpc64/if_ether.rs":"15e12d56b1367cb5c9d8fd1b9995acf1429a923bf603698bd732efe1acc30cd6","src/powerpc64/io_uring.rs":"50018e504d78c584beaec3b1a6d477d30d566d25cb1a40b0a2adfb23d726674f","src/powerpc64/ioctl.rs":"513a1dda33b2cd6c57212263c1bf0d8d443e54af6efe75e938fd700c1ca2b630","src/powerpc64/net.rs":"0a1468b8a270470a619e492fd6c50c59ea9eeaccd1c7827a0a0d55a0ae981cd4","src/powerpc64/netlink.rs":"352a06995f7de6e4fc1c2ff54c1eccb93a3d833a923e834ec75c6a4d30ea5051","src/powerpc64/prctl.rs":"1fcccc5b6dac472a30b7701b5c9e436c238410077cfe158d600b51d55bcc074a","src/powerpc64/system.rs":"500116b7613a504ebad028cec1ba37323310e29864d885ae62191ccb1f86d333","src/riscv32/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/riscv32/general.rs":"16ffe747a8f75ac6594dc7afddef4fbb89735da50af9f19f152fd2f04b509463","src/riscv32/if_ether.rs":"aac7a069000195b9b18c6d25b2d7c338efb1062347a84e31cda7c93de374b3c6","src/riscv32/io_uring.rs":"5c67dde8fdb3641407d4ebd744171606b82596176343fe6766afd3220eda2ab1","src/riscv32/ioctl.rs":"b2a4c7d649b18a4d2a1142400de687d48c195dcf8a86b58ac261413c33809d7b","src/riscv32/net.rs":"2796ca3744e82cecd816b4d1b1fba0bcb389fbc4603e0af7b6cdc562c666b512","src/riscv32/netlink.rs":"9fc99e88df0efc15d39b397c9d653c32b4729e31a2434e730435fbdfcb362da4","src/riscv32/prctl.rs":"8df2f33387d5b4865d9417f796cc4d43b3cf549cfa41d7ce7d41dd367ed8b6c9","src/riscv32/system.rs":"a2913685b359e60bbfae413b89a6fd37ea38fa448f980ffadef42d227b33ada5","src/riscv64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/riscv64/general.rs":"8ec4907ac81b62e21451731a468e5f4fe7a37a46d80c56b33e0969d2d059d442","src/riscv64/if_ether.rs":"a7b0918afc9dbf8e79490f83a809e4bd563af20ab2b3432d58b763ff31418daf","src/riscv64/io_uring.rs":"ef851913c86bc2d629048e781edc76b59c63e659f8bd4bcd7fa7ff982186ed6d","src/riscv64/ioctl.rs":"6f631e0ac37f2c2398168683a1523d1559b85369ffe4cdcc284ffe7455672c18","src/riscv64/net.rs":"21add3f1bfa5898d9a5ef91342f6cd94ad4585df6525b9a4cf6cdf743345a9c7","src/riscv64/netlink.rs":"789357343b94276e6f8fc8f3a32f06f3d3efd464c7850b2568dc609a49d1f305","src/riscv64/prctl.rs":"624e4d3b8f3b4f1d6dcb1540c124ed03ce47f0580aa3425c0baed431dcd267ea","src/riscv64/system.rs":"4cfcd4c28481b9ff5fc0330d5c713d637d3615c0184830d3a4391882eaed68f5","src/s390x/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/s390x/general.rs":"687f6e3d4502d31aaec08a41335b6a368f0949049875d939065a66df86e06b8a","src/s390x/if_ether.rs":"2a6b2132be1aebe126eb64ea230a82c2ab18433ece37226c9dacd81392ad6ca2","src/s390x/io_uring.rs":"57790b4ce46a4a2a87a329ff935033dfb430117bcc2154fa066c5d681f115319","src/s390x/ioctl.rs":"f806407c91520bc994c7cd638576686710700a4cfa30c8ca3f81d55ce1d6ecdd","src/s390x/net.rs":"3f89cf2040475eb446daa3f54b5cd016ca8d2d3966f02efb3f0f2f425ccc7adb","src/s390x/netlink.rs":"c3c11102a55d15dfb534d748c8bc8ece58c9d56170b6afe07d1f2cc783fd6372","src/s390x/prctl.rs":"6fe9f5349f9a694b391f309f208235c1d4669430d3f051ef64467576e91df7a1","src/s390x/system.rs":"f68e8afbd02bc87a8fec2cae4e367f296873f2c1a651032da4ccc4f1c0ed4670","src/sparc/errno.rs":"b3c5e1a6c8378d613c6ef3dd44e2199ea7455da65e83d6bb19582a79de880f80","src/sparc/general.rs":"a38a371e8ceeecc4384999b4cc8ee6d65a35a20542c96c62e04c24b2b2953858","src/sparc/if_ether.rs":"c574c6d1ea11a8bb213b18928fe2d342c897d8ff04458991a5192eb401fbbc57","src/sparc/io_uring.rs":"d813aabc93f383c8522773ed431209627d810c78bcdb4dae931a702aae55e09f","src/sparc/ioctl.rs":"b2a4c7d649b18a4d2a1142400de687d48c195dcf8a86b58ac261413c33809d7b","src/sparc/net.rs":"00115a0deec75249c34bf61c76d4a95181be21ac3a1b5200875186d5ea501dac","src/sparc/netlink.rs":"0f176c24de7b2b19ac16dd95625947b8047d48bc7b871e12a3acd31f8e97c6f5","src/sparc/prctl.rs":"5f5686fe04d8cc5e746d789340331095044f244c123ecf1468dcb089bbb931cd","src/sparc/system.rs":"6f1f8ce9978693845621880031ac4805f00027315a691cfe1d466703c7a96e8d","src/sparc64/errno.rs":"b3c5e1a6c8378d613c6ef3dd44e2199ea7455da65e83d6bb19582a79de880f80","src/sparc64/general.rs":"d4138f0e663833811365fdc2e1008525df0f21044b5e31ab80d97d63fcb9f45f","src/sparc64/if_ether.rs":"e51661cb54922c3c01853617a61b3d141768adf5b94162c4aefb326d31ef55f5","src/sparc64/io_uring.rs":"688f4678c259eb940aa0216c0183843d436b918d214e13bed56a38ddb55132db","src/sparc64/ioctl.rs":"b2a4c7d649b18a4d2a1142400de687d48c195dcf8a86b58ac261413c33809d7b","src/sparc64/net.rs":"e1b51c9efe4a00b645100f2b4aa6f2349a50627521fa020370b1bf8ab0b3a575","src/sparc64/netlink.rs":"7cec2a77af7ec17cad96d63027fd4a9164f0e6503f2e6cc2a99e79aa1bba3147","src/sparc64/prctl.rs":"ff897312b99274ce4eefecccc9797e34accc69efce1299712ab631e9542cb8ca","src/sparc64/system.rs":"becb2e706ed6df77860a2a3884b28181dca606db22fcd604a8a1803f937e8baf","src/x32/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x32/general.rs":"c2aeaf3096d559a86d3f72e0ec4a91245edfce35e85c51b50d13e115ec357974","src/x32/if_ether.rs":"859047581be4a3ae923b88516e86230a4f9a447a5203285dd8bcc539038c5c4f","src/x32/io_uring.rs":"40834d29cf819e43a23f9a2e075e2ea0c001be817f7200aa059c24eae6d29123","src/x32/ioctl.rs":"085227ec906237903b5e8b2e90b80fd078ddb4f9e44bbd6c9bee54ea7d5d6b1a","src/x32/net.rs":"e2d77e5bbae1fb298e48382204db55e4e49de2039369e3b2b77097029006c9d1","src/x32/netlink.rs":"f74042f6cf0eabccfa6b8008a89ee7ab347fb7eaa89499757a786b13b2403982","src/x32/prctl.rs":"2d1eb6873dc5b72a8252f12e695d98a045e01026f730302a6515f53a394bba7c","src/x32/system.rs":"69c125aea6b21f2c4981f3386602283a691524671fef1da7d67de3b67fffdc42","src/x86/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x86/general.rs":"a9b002ea136dabb9fd1c489c999cd1bfeca62490202aab095221fc70c2b6e30e","src/x86/if_ether.rs":"3873beb1f57bdcc52da345608c5deda5937d7b92b114b414ce6161784173b925","src/x86/io_uring.rs":"3f60400dfc11a5eafa5a54466f7f9ca74a63219feda2278c388529bf40c0a6a3","src/x86/ioctl.rs":"8dd0afaa7c3017c3866e579705699be75acc1a095694232b2903a12b3c07a770","src/x86/net.rs":"c79f443ab6585f81a71eff3350700ece82098264206eb79f78714153d6e43529","src/x86/netlink.rs":"2f98b8c0349c2d3d538164e4d06c28fa1fb9308f7691aa097526ce2fb3ac176f","src/x86/prctl.rs":"5477c74fec958d9f9d2028f58e95657840ead28445956e53fe0894e45922f8a5","src/x86/system.rs":"9aca94a4a553e09e6d2d43741fb8c120c05a193822e850951dc469bbd89cc703","src/x86_64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x86_64/general.rs":"f0c763abb8d7e7bd594c674d2774b272844829ecee4e3ed42932004616a2380e","src/x86_64/if_ether.rs":"feb5362d90fbd5113b6f1f22fcc77a7b3bb7c8f5081be7eacb2e270d2f24a293","src/x86_64/io_uring.rs":"6c008c59b6905b7872b788e30d546efa3b5bb03a0fe17e33b90bfc8beec23f29","src/x86_64/ioctl.rs":"085227ec906237903b5e8b2e90b80fd078ddb4f9e44bbd6c9bee54ea7d5d6b1a","src/x86_64/net.rs":"0f81db11e3497486f0d2022a8302716816d2442bf7ffd25fa88a25a9bdc1ca83","src/x86_64/netlink.rs":"d47b8b117007b1c29db325dad68ed1b92562b98f90916b1b192f57a935a98038","src/x86_64/prctl.rs":"6d0f63fff8d76cc2f4678f38e40a68d072d7da4094d5217ce92872370bb4df9b","src/x86_64/system.rs":"6a415f104e7dd34829b36c19ad89e0cfcfbc69a846c87f81e202411386d46966"},"package":"da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"}
+\ No newline at end of file
++{"files":{"CODE_OF_CONDUCT.md":"ccd9e538ce44713a2486cc8e4c01b8c8b846d1ccff15de728d8d6fa9a7f846cd","COPYRIGHT":"3290ae0fbc9ddb77d2239121d710f0bb9d31b3b4744e6d97fe01e652b4c1870b","Cargo.toml":"0d02ea4bc018631a52900423cb7466bc905571bd1d767769739bde626360c123","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"3d4ae2ec9429adfd329b4506f7ca6faa84dca30d1c3b416f8f4e6f57bdb3aa96","src/aarch64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/aarch64/general.rs":"0cc1ed00b1ef7efb16a7109138acdf679d591325458938cee150b221af063822","src/aarch64/if_ether.rs":"f558bfec1f5e80235db79f507e4afd3120647a5caddf17a87b7ad5540e5c1462","src/aarch64/io_uring.rs":"9ce4c9005b5b99e041b9d58be6866367605e4bee1e30a7096235cb234ed73e1b","src/aarch64/ioctl.rs":"f83af7f7cab6611c9f316b2289998d3276d844ac8f96111bc65f6c3340c3301a","src/aarch64/net.rs":"9c5fe44f91072b08899a6304cb22a51545034adde444740b67b8b310f3906a30","src/aarch64/netlink.rs":"f6642310b8bd37b5775ba1a52cfe638beddd7c880f8dfacab62c3180255fb75f","src/aarch64/prctl.rs":"a80e2f89aa978210d980538e1d9c5b922bb073d118ee51b1f14c1f397c7cd576","src/aarch64/system.rs":"851a95c0d1b76782970a28c373a313954eea0db35f6ebce562e6047095f587c4","src/arm/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/arm/general.rs":"bdbb40ba5630044c9555fe38a3640638f3ff7f437d94f82428e2bedad32e180f","src/arm/if_ether.rs":"3873beb1f57bdcc52da345608c5deda5937d7b92b114b414ce6161784173b925","src/arm/io_uring.rs":"5e2e254d5853ed30568e17fab184f8cc458607465d19dcee6c87ca1b565d6d2d","src/arm/ioctl.rs":"54cb9bdc5fe4ee7cff30e087bb0d04f1d811b55be7604bb056387f859120935a","src/arm/net.rs":"c79f443ab6585f81a71eff3350700ece82098264206eb79f78714153d6e43529","src/arm/netlink.rs":"2f98b8c0349c2d3d538164e4d06c28fa1fb9308f7691aa097526ce2fb3ac176f","src/arm/prctl.rs":"5477c74fec958d9f9d2028f58e95657840ead28445956e53fe0894e45922f8a5","src/arm/system.rs":"9aca94a4a553e09e6d2d43741fb8c120c05a193822e850951dc469bbd89cc703","src/csky/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/csky/general.rs":"94aba82d2a76a9ca272b4766f8d2f5f14e10bc2166e03d695a5b8b5d207e6755","src/csky/if_ether.rs":"aac7a069000195b9b18c6d25b2d7c338efb1062347a84e31cda7c93de374b3c6","src/csky/io_uring.rs":"bb7541d82f627cb681ea11f4e5c4162b0d0616ca08fcc6d31bcf22ef813f754d","src/csky/ioctl.rs":"77f30e67f9a4748bc31184454a0ff45fd821c3536cd98dc2411def8eeb59a268","src/csky/net.rs":"2796ca3744e82cecd816b4d1b1fba0bcb389fbc4603e0af7b6cdc562c666b512","src/csky/netlink.rs":"9fc99e88df0efc15d39b397c9d653c32b4729e31a2434e730435fbdfcb362da4","src/csky/prctl.rs":"8df2f33387d5b4865d9417f796cc4d43b3cf549cfa41d7ce7d41dd367ed8b6c9","src/csky/system.rs":"a2913685b359e60bbfae413b89a6fd37ea38fa448f980ffadef42d227b33ada5","src/elf.rs":"441bcf378b5603f957e053369d18d95c1efe981665846c220bca82123c04843a","src/lib.rs":"f3399ba35206764ac4e2d755cfc7e0c580464b8fcc76968f6c4b9f64523a1d02","src/loongarch64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/loongarch64/general.rs":"fadd5884c6ad645300540dd30150984857819a986772fca4eba1e1fc0c3f4f1e","src/loongarch64/if_ether.rs":"a7b0918afc9dbf8e79490f83a809e4bd563af20ab2b3432d58b763ff31418daf","src/loongarch64/io_uring.rs":"ef851913c86bc2d629048e781edc76b59c63e659f8bd4bcd7fa7ff982186ed6d","src/loongarch64/ioctl.rs":"6f631e0ac37f2c2398168683a1523d1559b85369ffe4cdcc284ffe7455672c18","src/loongarch64/net.rs":"21add3f1bfa5898d9a5ef91342f6cd94ad4585df6525b9a4cf6cdf743345a9c7","src/loongarch64/netlink.rs":"789357343b94276e6f8fc8f3a32f06f3d3efd464c7850b2568dc609a49d1f305","src/loongarch64/prctl.rs":"624e4d3b8f3b4f1d6dcb1540c124ed03ce47f0580aa3425c0baed431dcd267ea","src/loongarch64/system.rs":"4cfcd4c28481b9ff5fc0330d5c713d637d3615c0184830d3a4391882eaed68f5","src/mips/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips/general.rs":"1c16f4e0a015eaf27e9a30f4b74ec824220798f47c535d3aaf322ad799c054ab","src/mips/if_ether.rs":"1ecdc654cb3eda633bb7192829b1001a0a5ae5a126110b4208bbee5b41f87f5e","src/mips/io_uring.rs":"c69479bb1f862febe78e0989745c202146aaef7e8ceaa0d7bf291af584748992","src/mips/ioctl.rs":"f7464d2cd0548ab4735a1ca8deba47f13ea0946a9bf86adfec1a77feb3a0f475","src/mips/net.rs":"843204e91d8d6c34ddf4d29e1e822980b3e058ff7fc55ecc5f2b0fdf4a72825d","src/mips/netlink.rs":"5045dece259abaa5b5974500a20a9325e4cd6eb87c344dd7448056dff469994f","src/mips/prctl.rs":"cd59615adf6f561ef6c542df59cf25f75877693ca1c4c489f5e460eb7e798d4d","src/mips/system.rs":"acbc69a1fe7299860f8bd142bc2ff5595a410e1528994139062fc680d1a87e71","src/mips32r6/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips32r6/general.rs":"1c16f4e0a015eaf27e9a30f4b74ec824220798f47c535d3aaf322ad799c054ab","src/mips32r6/if_ether.rs":"1ecdc654cb3eda633bb7192829b1001a0a5ae5a126110b4208bbee5b41f87f5e","src/mips32r6/io_uring.rs":"c69479bb1f862febe78e0989745c202146aaef7e8ceaa0d7bf291af584748992","src/mips32r6/ioctl.rs":"f7464d2cd0548ab4735a1ca8deba47f13ea0946a9bf86adfec1a77feb3a0f475","src/mips32r6/net.rs":"843204e91d8d6c34ddf4d29e1e822980b3e058ff7fc55ecc5f2b0fdf4a72825d","src/mips32r6/netlink.rs":"5045dece259abaa5b5974500a20a9325e4cd6eb87c344dd7448056dff469994f","src/mips32r6/prctl.rs":"cd59615adf6f561ef6c542df59cf25f75877693ca1c4c489f5e460eb7e798d4d","src/mips32r6/system.rs":"acbc69a1fe7299860f8bd142bc2ff5595a410e1528994139062fc680d1a87e71","src/mips64/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips64/general.rs":"700b52614c604ce58aa8a5bb1ebd46221c0106ec2d8d6600d681d1594a89118a","src/mips64/if_ether.rs":"6d24518acba5bc8fa2a775d039ed9594de8349d6028621134626ccaa8ea18bcd","src/mips64/io_uring.rs":"18bd5d96c53aff44f4a37a71f16fa12c11b7c934b395bf841e47d98283146a50","src/mips64/ioctl.rs":"39f3276ec6f36ba78f5635bd418310b21bc228902d4da889fa01094c26f9b57d","src/mips64/net.rs":"e641379a7004ded90413b084504468148ecdbea4f753cbca83a4d22698cd7bab","src/mips64/netlink.rs":"18f139573337be18fb9bd56de4d0bd8af5c80828cdf68ef68751c18dcb39c761","src/mips64/prctl.rs":"a8d9ae240ce582d7b66e13937b99df73c7247ce9f69bfdd12109903d7c6dfb0e","src/mips64/system.rs":"95805c533a8eb8f53c28f138e6a2d12064681cc9db55d3042ee748c655a5d7cd","src/mips64r6/errno.rs":"9ab1ed2bf371a27401745c2a6f9692f7e66dfd481d032cb40a354826d394ea8e","src/mips64r6/general.rs":"700b52614c604ce58aa8a5bb1ebd46221c0106ec2d8d6600d681d1594a89118a","src/mips64r6/if_ether.rs":"6d24518acba5bc8fa2a775d039ed9594de8349d6028621134626ccaa8ea18bcd","src/mips64r6/io_uring.rs":"18bd5d96c53aff44f4a37a71f16fa12c11b7c934b395bf841e47d98283146a50","src/mips64r6/ioctl.rs":"39f3276ec6f36ba78f5635bd418310b21bc228902d4da889fa01094c26f9b57d","src/mips64r6/net.rs":"e641379a7004ded90413b084504468148ecdbea4f753cbca83a4d22698cd7bab","src/mips64r6/netlink.rs":"18f139573337be18fb9bd56de4d0bd8af5c80828cdf68ef68751c18dcb39c761","src/mips64r6/prctl.rs":"a8d9ae240ce582d7b66e13937b99df73c7247ce9f69bfdd12109903d7c6dfb0e","src/mips64r6/system.rs":"95805c533a8eb8f53c28f138e6a2d12064681cc9db55d3042ee748c655a5d7cd","src/powerpc/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/powerpc/general.rs":"7940ad742585d501f4742af51c6310ecf651863a8aaa0305193e9c8ee07aca90","src/powerpc/if_ether.rs":"56ab5fbf8c9d0dd0654412fabdca6759cbb421a876e5cebe2fec10e25728b55c","src/powerpc/io_uring.rs":"38ead79180c96e8af69929dacbc36e8cfbccec96875c61f094e531e6860639db","src/powerpc/ioctl.rs":"513a1dda33b2cd6c57212263c1bf0d8d443e54af6efe75e938fd700c1ca2b630","src/powerpc/net.rs":"b0228b15161fb750596d6e7e8e612381d9cf2acfed208715b93e8bb4b3340dc7","src/powerpc/netlink.rs":"4f10870fbd23de17efd4cba6d5a40ba85564effc5d0b71f9d7f928a42cb77b20","src/powerpc/prctl.rs":"f236987bc8ae993ab2b0e88b2eacbae51eee4da170ac075552070a76c717d803","src/powerpc/system.rs":"3e01e5e145adb87263f04c9e053faac0c0758689fd32917d2c7c2eec1d89f9e6","src/powerpc64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/powerpc64/general.rs":"7862eb3dfac845331b624b3322bdeb3441bb86e914719e125dd5dc7b3860db43","src/powerpc64/if_ether.rs":"15e12d56b1367cb5c9d8fd1b9995acf1429a923bf603698bd732efe1acc30cd6","src/powerpc64/io_uring.rs":"50018e504d78c584beaec3b1a6d477d30d566d25cb1a40b0a2adfb23d726674f","src/powerpc64/ioctl.rs":"513a1dda33b2cd6c57212263c1bf0d8d443e54af6efe75e938fd700c1ca2b630","src/powerpc64/net.rs":"0a1468b8a270470a619e492fd6c50c59ea9eeaccd1c7827a0a0d55a0ae981cd4","src/powerpc64/netlink.rs":"352a06995f7de6e4fc1c2ff54c1eccb93a3d833a923e834ec75c6a4d30ea5051","src/powerpc64/prctl.rs":"1fcccc5b6dac472a30b7701b5c9e436c238410077cfe158d600b51d55bcc074a","src/powerpc64/system.rs":"500116b7613a504ebad028cec1ba37323310e29864d885ae62191ccb1f86d333","src/riscv32/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/riscv32/general.rs":"16ffe747a8f75ac6594dc7afddef4fbb89735da50af9f19f152fd2f04b509463","src/riscv32/if_ether.rs":"aac7a069000195b9b18c6d25b2d7c338efb1062347a84e31cda7c93de374b3c6","src/riscv32/io_uring.rs":"5c67dde8fdb3641407d4ebd744171606b82596176343fe6766afd3220eda2ab1","src/riscv32/ioctl.rs":"6f631e0ac37f2c2398168683a1523d1559b85369ffe4cdcc284ffe7455672c18","src/riscv32/net.rs":"2796ca3744e82cecd816b4d1b1fba0bcb389fbc4603e0af7b6cdc562c666b512","src/riscv32/netlink.rs":"9fc99e88df0efc15d39b397c9d653c32b4729e31a2434e730435fbdfcb362da4","src/riscv32/prctl.rs":"8df2f33387d5b4865d9417f796cc4d43b3cf549cfa41d7ce7d41dd367ed8b6c9","src/riscv32/system.rs":"a2913685b359e60bbfae413b89a6fd37ea38fa448f980ffadef42d227b33ada5","src/riscv64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/riscv64/general.rs":"8ec4907ac81b62e21451731a468e5f4fe7a37a46d80c56b33e0969d2d059d442","src/riscv64/if_ether.rs":"a7b0918afc9dbf8e79490f83a809e4bd563af20ab2b3432d58b763ff31418daf","src/riscv64/io_uring.rs":"ef851913c86bc2d629048e781edc76b59c63e659f8bd4bcd7fa7ff982186ed6d","src/riscv64/ioctl.rs":"6f631e0ac37f2c2398168683a1523d1559b85369ffe4cdcc284ffe7455672c18","src/riscv64/net.rs":"21add3f1bfa5898d9a5ef91342f6cd94ad4585df6525b9a4cf6cdf743345a9c7","src/riscv64/netlink.rs":"789357343b94276e6f8fc8f3a32f06f3d3efd464c7850b2568dc609a49d1f305","src/riscv64/prctl.rs":"624e4d3b8f3b4f1d6dcb1540c124ed03ce47f0580aa3425c0baed431dcd267ea","src/riscv64/system.rs":"4cfcd4c28481b9ff5fc0330d5c713d637d3615c0184830d3a4391882eaed68f5","src/s390x/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/s390x/general.rs":"687f6e3d4502d31aaec08a41335b6a368f0949049875d939065a66df86e06b8a","src/s390x/if_ether.rs":"2a6b2132be1aebe126eb64ea230a82c2ab18433ece37226c9dacd81392ad6ca2","src/s390x/io_uring.rs":"57790b4ce46a4a2a87a329ff935033dfb430117bcc2154fa066c5d681f115319","src/s390x/ioctl.rs":"f806407c91520bc994c7cd638576686710700a4cfa30c8ca3f81d55ce1d6ecdd","src/s390x/net.rs":"3f89cf2040475eb446daa3f54b5cd016ca8d2d3966f02efb3f0f2f425ccc7adb","src/s390x/netlink.rs":"c3c11102a55d15dfb534d748c8bc8ece58c9d56170b6afe07d1f2cc783fd6372","src/s390x/prctl.rs":"6fe9f5349f9a694b391f309f208235c1d4669430d3f051ef64467576e91df7a1","src/s390x/system.rs":"f68e8afbd02bc87a8fec2cae4e367f296873f2c1a651032da4ccc4f1c0ed4670","src/sparc/errno.rs":"b3c5e1a6c8378d613c6ef3dd44e2199ea7455da65e83d6bb19582a79de880f80","src/sparc/general.rs":"a38a371e8ceeecc4384999b4cc8ee6d65a35a20542c96c62e04c24b2b2953858","src/sparc/if_ether.rs":"c574c6d1ea11a8bb213b18928fe2d342c897d8ff04458991a5192eb401fbbc57","src/sparc/io_uring.rs":"d813aabc93f383c8522773ed431209627d810c78bcdb4dae931a702aae55e09f","src/sparc/ioctl.rs":"b2a4c7d649b18a4d2a1142400de687d48c195dcf8a86b58ac261413c33809d7b","src/sparc/net.rs":"00115a0deec75249c34bf61c76d4a95181be21ac3a1b5200875186d5ea501dac","src/sparc/netlink.rs":"0f176c24de7b2b19ac16dd95625947b8047d48bc7b871e12a3acd31f8e97c6f5","src/sparc/prctl.rs":"5f5686fe04d8cc5e746d789340331095044f244c123ecf1468dcb089bbb931cd","src/sparc/system.rs":"6f1f8ce9978693845621880031ac4805f00027315a691cfe1d466703c7a96e8d","src/sparc64/errno.rs":"b3c5e1a6c8378d613c6ef3dd44e2199ea7455da65e83d6bb19582a79de880f80","src/sparc64/general.rs":"d4138f0e663833811365fdc2e1008525df0f21044b5e31ab80d97d63fcb9f45f","src/sparc64/if_ether.rs":"e51661cb54922c3c01853617a61b3d141768adf5b94162c4aefb326d31ef55f5","src/sparc64/io_uring.rs":"688f4678c259eb940aa0216c0183843d436b918d214e13bed56a38ddb55132db","src/sparc64/ioctl.rs":"b2a4c7d649b18a4d2a1142400de687d48c195dcf8a86b58ac261413c33809d7b","src/sparc64/net.rs":"e1b51c9efe4a00b645100f2b4aa6f2349a50627521fa020370b1bf8ab0b3a575","src/sparc64/netlink.rs":"7cec2a77af7ec17cad96d63027fd4a9164f0e6503f2e6cc2a99e79aa1bba3147","src/sparc64/prctl.rs":"ff897312b99274ce4eefecccc9797e34accc69efce1299712ab631e9542cb8ca","src/sparc64/system.rs":"becb2e706ed6df77860a2a3884b28181dca606db22fcd604a8a1803f937e8baf","src/x32/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x32/general.rs":"c2aeaf3096d559a86d3f72e0ec4a91245edfce35e85c51b50d13e115ec357974","src/x32/if_ether.rs":"859047581be4a3ae923b88516e86230a4f9a447a5203285dd8bcc539038c5c4f","src/x32/io_uring.rs":"40834d29cf819e43a23f9a2e075e2ea0c001be817f7200aa059c24eae6d29123","src/x32/ioctl.rs":"085227ec906237903b5e8b2e90b80fd078ddb4f9e44bbd6c9bee54ea7d5d6b1a","src/x32/net.rs":"e2d77e5bbae1fb298e48382204db55e4e49de2039369e3b2b77097029006c9d1","src/x32/netlink.rs":"f74042f6cf0eabccfa6b8008a89ee7ab347fb7eaa89499757a786b13b2403982","src/x32/prctl.rs":"2d1eb6873dc5b72a8252f12e695d98a045e01026f730302a6515f53a394bba7c","src/x32/system.rs":"69c125aea6b21f2c4981f3386602283a691524671fef1da7d67de3b67fffdc42","src/x86/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x86/general.rs":"a9b002ea136dabb9fd1c489c999cd1bfeca62490202aab095221fc70c2b6e30e","src/x86/if_ether.rs":"3873beb1f57bdcc52da345608c5deda5937d7b92b114b414ce6161784173b925","src/x86/io_uring.rs":"3f60400dfc11a5eafa5a54466f7f9ca74a63219feda2278c388529bf40c0a6a3","src/x86/ioctl.rs":"8dd0afaa7c3017c3866e579705699be75acc1a095694232b2903a12b3c07a770","src/x86/net.rs":"c79f443ab6585f81a71eff3350700ece82098264206eb79f78714153d6e43529","src/x86/netlink.rs":"2f98b8c0349c2d3d538164e4d06c28fa1fb9308f7691aa097526ce2fb3ac176f","src/x86/prctl.rs":"5477c74fec958d9f9d2028f58e95657840ead28445956e53fe0894e45922f8a5","src/x86/system.rs":"9aca94a4a553e09e6d2d43741fb8c120c05a193822e850951dc469bbd89cc703","src/x86_64/errno.rs":"db283e5ea7fb63e0f57acb241e6471a951b4218660c01ca37b80be9a53629b92","src/x86_64/general.rs":"f0c763abb8d7e7bd594c674d2774b272844829ecee4e3ed42932004616a2380e","src/x86_64/if_ether.rs":"feb5362d90fbd5113b6f1f22fcc77a7b3bb7c8f5081be7eacb2e270d2f24a293","src/x86_64/io_uring.rs":"6c008c59b6905b7872b788e30d546efa3b5bb03a0fe17e33b90bfc8beec23f29","src/x86_64/ioctl.rs":"085227ec906237903b5e8b2e90b80fd078ddb4f9e44bbd6c9bee54ea7d5d6b1a","src/x86_64/net.rs":"0f81db11e3497486f0d2022a8302716816d2442bf7ffd25fa88a25a9bdc1ca83","src/x86_64/netlink.rs":"d47b8b117007b1c29db325dad68ed1b92562b98f90916b1b192f57a935a98038","src/x86_64/prctl.rs":"6d0f63fff8d76cc2f4678f38e40a68d072d7da4094d5217ce92872370bb4df9b","src/x86_64/system.rs":"6a415f104e7dd34829b36c19ad89e0cfcfbc69a846c87f81e202411386d46966"},"package":"da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"}
diff --git a/meta/recipes-devtools/rust/files/rv32-rustix-libc-backend.patch b/meta/recipes-devtools/rust/files/rv32-rustix-libc-backend.patch
new file mode 100644
index 0000000000..7e99b50834
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/rv32-rustix-libc-backend.patch
@@ -0,0 +1,32 @@
+Use FICLONE from linux_raw_sys module
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+--- a/vendor/rustix/src/backend/libc/c.rs
++++ b/vendor/rustix/src/backend/libc/c.rs
+@@ -6,6 +6,8 @@
+ // things below.
+ pub(crate) use libc::*;
+
++use linux_raw_sys::ioctl::FICLONE;
++
+ /// `PROC_SUPER_MAGIC`—The magic number for the procfs filesystem.
+ #[cfg(all(linux_kernel, target_env = "musl"))]
+ pub(crate) const PROC_SUPER_MAGIC: u32 = 0x0000_9fa0;
+--- a/vendor/rustix/.cargo-checksum.json
++++ b/vendor/rustix/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"CODE_OF_CONDUCT.md":"f210602311e3f74b32f46237fd55f4ce36d798e85e3db1432ec667f63a7ffc44","CONTRIBUTING.md":"3fd57de5c678db1c972da676a8231d2fde9820695ef1f0d53f1e55a3e81d9de0","COPYRIGHT":"377c2e7c53250cc5905c0b0532d35973392af16ffb9596a41d99d202cf3617c9","Cargo.toml":"5a6eafc41863c968362a1b8424dd0a5e1ce6502496b2999ef97ceac81f30fbcd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"0acc443488c1d6f7a0a80a91385d06c7a1920c1f5c847214dd6c23a6bd027f75","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/mod.rs":"e260e67273aa0a37cffdc5cd451699335b8ee656c17275a0d0f2b4563c6018ca","build.rs":"5f93559819ca7fe233f01579d51ff5b58cb6e96ef9e7817a3358a9b410d5bbf6","src/backend/libc/c.rs":"9a929ab2bff99843e8c663b92b93def5c752e96d2e6228520cb50eeec483586b","src/backend/libc/conv.rs":"b66d080db3a4c2756fe722462b543982bf88e5bc8071b9dc98d28ec2aee3dbfc","src/backend/libc/event/epoll.rs":"b6f00ec602f9a6b9a2b7a66a242efda088b67ed66053692dcbba683df4fdaf19","src/backend/libc/event/mod.rs":"7f8547c599b8263eb791890bbe4a0b22fe2676d007ffdcc3e07b2e48d1c994db","src/backend/libc/event/poll_fd.rs":"1186dd69fc78d474975f1b8c55c9dd261229cb0c53a0dd2a88754ef62529a917","src/backend/libc/event/syscalls.rs":"295a930521b345192caaf916b39189299f81f5f7c9f1aaed235baf3bf6f2f5fa","src/backend/libc/event/types.rs":"e81130bc07e8f762ea2942bca1bde048ade0df24aac619907f3055d2dda7d522","src/backend/libc/event/windows_syscalls.rs":"ebfac665c6676c4b803134ab8806be8aa2e96bdbc7799a19c544cd9069b35787","src/backend/libc/fs/dir.rs":"71332c794f4e2438e7c700a0d2206fe0088cf98473030e3f8ccddf252ad7f5b2","src/backend/libc/fs/inotify.rs":"a027a718db8bf6ff9a778c2cfa706850b466240b0a2296d72b9834c0096968e8","src/backend/libc/fs/makedev.rs":"89c679a0ef18dd41b3c6223bce0f329ad35bf6cadbf16e47b33fad3f312ba4a6","src/backend/libc/fs/mod.rs":"3d28b803011b57da6315bb747daf0117218687c0cc610358f5dafddf0b7d44d3","src/backend/libc/fs/syscalls.rs":"229d1c27d53bfbd35310b05aaf8b3307d6b6e9948faf0655713bc662f901bd02","src/backend/libc/fs/types.rs":"ca25c8271e1fd2ff6c28082884ca112fd5e0523b2aa3cd289cd9a399c962a172","src/backend/libc/io/errno.rs":"d33978855c2e0e97d5e1f7a04ab10894f9f2c3e329e949f259a757f89942074e","src/backend/libc/io/mod.rs":"746647bd864e4ec7717925b6d176cebdb392b7d015070244cc48d92780351dd6","src/backend/libc/io/syscalls.rs":"cbd9e9db566597de0a709247c8ba3bd1e6ce266a128d639273353c706f0592b7","src/backend/libc/io/types.rs":"2efd39bb3df19a9db5496217284f3d2235ddb354fac81dd71861109a56bede0b","src/backend/libc/io/windows_syscalls.rs":"fab3fa099aa89cea56a6edc651dd37750e581534f4f45b04a7e425f130b08468","src/backend/libc/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/io_uring/syscalls.rs":"5af8146d5971c833e6fd657f652c618b31f854e1b0811864fba9b658cb633e19","src/backend/libc/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mm/syscalls.rs":"c04c61ad3d7f3b24d13f89144d7fa0a05658cea4763207f9250db4f9362de2fe","src/backend/libc/mm/types.rs":"48406d44d79b102ae9d17860749611d26c1f04e3630e9680dfcb20e2efc148a8","src/backend/libc/mod.rs":"778083f85aaca00aa5c8ec36b541e0e8a3e693660a6acf4722b4508f015c278c","src/backend/libc/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mount/syscalls.rs":"1bc87501a078616d0190d2e85de55f3f968b8cb79d49bd9eb839a350eed26089","src/backend/libc/mount/types.rs":"1b0d527ae5a272ae441fa35192616a867090204404324a341a7864ae1f5ea3fd","src/backend/libc/net/addr.rs":"9bdc2febb20785bc26a820eaba52cb546c67c79cb547f08105087cd0f79d5861","src/backend/libc/net/ext.rs":"0dd64877abe1ba86b47a2ab34340e3f6cc7b53b22d0bb5e237daf6a82edd46b0","src/backend/libc/net/mod.rs":"5f0bd80a6575aba4a714443ca99af76ad15b053f1f8029aa1d9c7fa10e6d9242","src/backend/libc/net/msghdr.rs":"64825203c09cb60ac4e869cd27a44697f1efe53455c41684279d50d8f0a96701","src/backend/libc/net/read_sockaddr.rs":"b1a72b7bd56cd152ed0731f571e4d9411b2974f860fc045319dc74b5b077e569","src/backend/libc/net/send_recv.rs":"1450ca958431a5bf3a85cdeef88b387f30d5c1215de2c56d164ccda38f21b36c","src/backend/libc/net/sockopt.rs":"178a6622bab25bf1813d6c17151f48d820e238db8cbd05be3aae563902d6c53e","src/backend/libc/net/syscalls.rs":"1c703ea3d7a1d0bfa43ffe07425882f310260589d513e1a936de434fa653cf3a","src/backend/libc/net/write_sockaddr.rs":"6f06f6cf6089fbc43f93f4f3830468735c92277595769867cb6c89e1fe1c299a","src/backend/libc/param/auxv.rs":"fc9476c85482b1d44190289224ccf40c96fbf3a2fe3d8554ddb42acb2e97a8ae","src/backend/libc/param/mod.rs":"5234b8f1bcb886cca6ea003d411d75eaeebe58deedd80e3441354bf46ed85d4d","src/backend/libc/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pid/syscalls.rs":"49ea679b96c0741d048e82964038f9a931bc3cf3a0b59c7db3df89629b9c49e6","src/backend/libc/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/pipe/syscalls.rs":"8affde100f6a9dfc762b79d1e48be5c1039be414f8ef7d5a6acaba882a68d259","src/backend/libc/pipe/types.rs":"bcf2751691748b5084a6d8d8851b496bc2ee9011fc1c717839b09b07d423eb7f","src/backend/libc/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/prctl/syscalls.rs":"8a2684f444a7555098dce2b92270d81cefdae902716c6e5d59bd7b0657e8a29d","src/backend/libc/process/cpu_set.rs":"b3d36b01b53b0b6c61a20ed8a69d48eccdd90cc17f82f2926ef1e844f002d0b7","src/backend/libc/process/mod.rs":"3f376060815d2ed78081ddf6fb0cddc97dff5dac2f4934f0672cb09736e16377","src/backend/libc/process/syscalls.rs":"278c87df6321cad2bd37049a7cfeecad7dd54f5e3a8449368da4c3409637e6af","src/backend/libc/process/types.rs":"c011d60d93130e1233ee1d69de1e834a6ce05628220a1c0bea9b5c6eb4c853a2","src/backend/libc/process/wait.rs":"0cc556aed976b4bbb3965f74fd76b8216c755fce25043b7b21ce54afa07c9773","src/backend/libc/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pty/syscalls.rs":"699a4c325fc590b8b5dabfe5a9ff386809be14dd24bf35aa6ef581b2bd75457b","src/backend/libc/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/rand/syscalls.rs":"78c7201e6bcb75e9cab9486d1878861319f865de2b2c46437be68690bd17bf13","src/backend/libc/rand/types.rs":"4eb0b4cdd0a9b089d1c9f6a25ad1ca97be28a38b7b07a705ec605b773f63f880","src/backend/libc/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/shm/syscalls.rs":"60d797d4e85e08e6330e6b8d80094356ce377e5484952f88ae2a6e49231c268c","src/backend/libc/shm/types.rs":"2206eac8ee74951b995e1e80f10bf235cc6b04e0a099f4adefce546378838233","src/backend/libc/system/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/system/syscalls.rs":"abe1093f3495668d8576ae55073c74976cffb04d2e1bc20583d7ec12ac848b06","src/backend/libc/system/types.rs":"6871e16aee14fe2ae03cea798c3e509ffe44778a9c0e5608fd73e2e015876d7e","src/backend/libc/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/termios/syscalls.rs":"77c4c6fdfe036251ee1540df5b398a38d87597a5e7e94abaf9b983419ae4ca24","src/backend/libc/thread/futex.rs":"b666828653b12634bbd7fd709acf69641b648ec40962a1d4f904c5db14d2eff5","src/backend/libc/thread/mod.rs":"fa710053974d7f16a6c49242ee6c10a3b9e1143452b9daeaed8837302a679fff","src/backend/libc/thread/syscalls.rs":"c8e84ad232ec1317be989529c24e204c51bb4e1e0212de2c1937b00bc92e5483","src/backend/libc/time/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/time/syscalls.rs":"f82e0725c5af8a52e61ee83aad2c77694f2f7a72ac1e6eb284109a70ac6edc38","src/backend/libc/time/types.rs":"47ab4178a51c246ecc5f2a2fcca7f6ad9c8a1a45d11dadcb95ee0e4f82e8dfe2","src/backend/libc/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/ugid/syscalls.rs":"8edf91b8790add23902c9f5418da6b0723a371677f29f490e0c8af852f0f1a0c","src/backend/libc/winsock_c.rs":"3bf3884fd250eca806ffdf96da68e29c133a697810b78b333ea449e523e58562","src/backend/linux_raw/arch/aarch64.rs":"2a255c9135bc8a321c180f52b88eb2b158bc9170cd222149caeae63c24587d44","src/backend/linux_raw/arch/arm.rs":"165bccb5883d0136e55d42091183765f83d86e9d37a7cb2cec9ae8af32774db6","src/backend/linux_raw/arch/mips.rs":"24af364aa93fd4b9917639d473336490a143f0d1723b09f388e72d534160ee51","src/backend/linux_raw/arch/mips32r6.rs":"e436a2ade34f2f7c58b8924462b07a2499dfc951e1e1318d51759444fb8b658e","src/backend/linux_raw/arch/mips64.rs":"897da9ddc877963ad59464d7f81dc59df8a7e91251e6adea14cfd946e2740a1c","src/backend/linux_raw/arch/mips64r6.rs":"3c08aea13c1139fb0dfbe74ca3d0147f007c3aa2eda641afb82c46f6aa4f6cd1","src/backend/linux_raw/arch/mod.rs":"37eaeea601d39be00d170856a31a8ec6a27b0d3ac22ab3ee642ab0e45dee237d","src/backend/linux_raw/arch/powerpc64.rs":"dfb001f8636a5e46d728900b0804fe6c374e5e18a6f0e76d7d62e0c07da74477","src/backend/linux_raw/arch/riscv64.rs":"41d33242d941030f46077dc2b1bc4c7913fe7630d693a5a7eef966bcf38f9d8b","src/backend/linux_raw/arch/thumb.rs":"2fd979ab421248c0a4c592bc0cefee63edc26528f469b71b63eaed35356e42e8","src/backend/linux_raw/arch/x86.rs":"81c73c0a13925854b54db754f533dec565b1db2d9586e210a2450c6f5915171d","src/backend/linux_raw/arch/x86_64.rs":"e929036a1f3cf93ba538f4523b241605cc7b5e61f84ffe6d9d5cdbcb6f73e543","src/backend/linux_raw/c.rs":"74cdf34fcd5bba9628c04585fa13721073e2bffe6740315c7655d1a55150230f","src/backend/linux_raw/conv.rs":"fd339c2e78e6470cb03614540bea2958eec83f1b6cbecc68696a2d2061365f90","src/backend/linux_raw/event/epoll.rs":"4ae8f9de28c6875ab29cde7da9b4b01a6578e905c7a0e0b461d26025cb24ab52","src/backend/linux_raw/event/mod.rs":"72e46b04637e2d1d2a6b97af616144995399e489d1fe916faf835d72fc8c64cd","src/backend/linux_raw/event/poll_fd.rs":"fe1c289980384edf7334a4afe92f92cb6f51b7c0431ecb19930426a3bdb89fa5","src/backend/linux_raw/event/syscalls.rs":"8782cdf978bff0773bf5f35c1056d5c29a40742cf6f99d9606d951b52596cd34","src/backend/linux_raw/event/types.rs":"9538403f2e2c5ffcc939769d83fbfcc2db03874b45c36a2858ba07a05a3e3635","src/backend/linux_raw/fs/dir.rs":"c675dc5413428d2defd6752e99d210da83639779e853db209de6a1c08d35e0e7","src/backend/linux_raw/fs/inotify.rs":"9fc5edea36e347041a39e583cb473dd84af40c63fff3dfbb85a1a97d2833d8e5","src/backend/linux_raw/fs/makedev.rs":"c6b4505c4bcbbc2460e80f3097eb15e2c8ef38d6c6e7abd78e39c53c372139e2","src/backend/linux_raw/fs/mod.rs":"8689e46f9c4c3e1190ae5fe39176c573cccac10b3739472128ca237f41e2686a","src/backend/linux_raw/fs/syscalls.rs":"a4512f58e2ef01bf0799c96fa7821284866b6826594642565f46d93e566e2e12","src/backend/linux_raw/fs/types.rs":"ccbce0b7971064c71b16a341b7f39bf2c375262ea199d6fcf9d1b073b032ccf1","src/backend/linux_raw/io/errno.rs":"2681fe1f7da132414606a7c0dbc1c2cbc43e45f326f00d06cdff51d9724c8c5e","src/backend/linux_raw/io/mod.rs":"7ae2324427892cca6f5ab53858d847b165f790a72ec25f3d99fb15f0506c9f27","src/backend/linux_raw/io/syscalls.rs":"831ce424f0032e6891d46503e972313fd7a46f15cb2fbb1f87e40fb05a018e9a","src/backend/linux_raw/io/types.rs":"d0df5a62248e1ba19af0f89f86da7ba2312d97065c2814ddf6b08a4a0db23bad","src/backend/linux_raw/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/io_uring/syscalls.rs":"0f7c9cb7ccddf5687e4b9e5b23558871a452a29ac6095a0184a06c47b9b18eb6","src/backend/linux_raw/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mm/syscalls.rs":"ac77ee94bb26bc959d3b077ef60e72863ef845c1438646b61755bdb861467bc6","src/backend/linux_raw/mm/types.rs":"46abdd2492301b7fe542ea3949eef2cfe28959cdd1f571ee1350e35b6517349b","src/backend/linux_raw/mod.rs":"bb26fe4783f834a4624c99ecf9e4ea8fa4d1c7fe9adfb6d7310fac689e7797bc","src/backend/linux_raw/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mount/syscalls.rs":"3947261b5d46b9737f02dc5352c3a3a35c63c461fd75bcd8ae6619dfc0bfb54d","src/backend/linux_raw/mount/types.rs":"af364e3f054b0fa55562827944235055beb752716347645cd1775d1a4d5615c1","src/backend/linux_raw/net/addr.rs":"fbb6c071a8ebc2c557c7013d36d4b540c1ff6ed63103d0a8abcacd8977fbf5c8","src/backend/linux_raw/net/mod.rs":"904b2ccd4228ebcda25db0223b92fb03e4042bcc7b0a202f19410a8a5ff517a5","src/backend/linux_raw/net/msghdr.rs":"a88e8395dd5a25c103dc2172d4e7013e5104a6d70091ad78efd286ebc8284acf","src/backend/linux_raw/net/read_sockaddr.rs":"4ab5d470f03f19a928327603e1d55de8b9e21cbe4bfc6ed65d856803092b1c90","src/backend/linux_raw/net/send_recv.rs":"6d5d2aec61a3c1b4a5fef1a8a487dc8e163da8988d1237541ed008baa01128cc","src/backend/linux_raw/net/sockopt.rs":"2ea8625ecd68a799c14ed48dec2f3e01f085222b6cc36f4916b20143e08ae575","src/backend/linux_raw/net/syscalls.rs":"b231754af1c94f61d206839b45f51516e0210761bbbcc1ef68c1fe6fc8a1bbbb","src/backend/linux_raw/net/write_sockaddr.rs":"a9374256cb47b3f4d5fec308986a109ce51a521abab31590df1f66c04c07d437","src/backend/linux_raw/param/auxv.rs":"c6a74c0b41aa28c850a80aad7cbad677df65c1f3eb78415872f6f3d1166716f4","src/backend/linux_raw/param/init.rs":"a5024f9afe361884f22fb4d65e632ccd8a60f1138ff6bd526df600d40e0d9b7e","src/backend/linux_raw/param/libc_auxv.rs":"167621c0b1bfd06c91c1201e9404d6624aa0a702ade17c6e37d9370c636be589","src/backend/linux_raw/param/mod.rs":"2e6a1a1c00351b9c88bd615aa923f71d76208df5626dd9bea03067f28f81dc31","src/backend/linux_raw/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pid/syscalls.rs":"eef6aa01830ddd510b83f507da2002c03e58318b73744be2c06ebbe33c4f194f","src/backend/linux_raw/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/pipe/syscalls.rs":"366c730fc3e991bddb9f5a15b8c3917a8e6ace6d1d5a9113b2749e476faf6f83","src/backend/linux_raw/pipe/types.rs":"caee78eaf13af02e5c4024c7459f23287dfd2c544ebab1dc5f728106fafe24da","src/backend/linux_raw/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/prctl/syscalls.rs":"a18b224307e0d27bda9b7b85c75cd6c7ddfe39f5ce52efb7bb0bf0585b757343","src/backend/linux_raw/process/cpu_set.rs":"dfdcbdf35aff6a3e08e7d38193bf18c12ca8aa64eb0dc417667be82dcc0f7c55","src/backend/linux_raw/process/mod.rs":"fb393c70a9c63ef9a6bf1fb5a2dc94f07d6b0b6987cc5231c15c607015dafd68","src/backend/linux_raw/process/syscalls.rs":"a2e446ba4e09727d11fc3b3d4bb316a659399aad186c08aa5bba80e62741265a","src/backend/linux_raw/process/types.rs":"6811ba822bc12a1a6336649151b4adb1f5d3365684a31c07f01953ea9547743d","src/backend/linux_raw/process/wait.rs":"921aee4b0048746087f52615a98edc2aa0fb4b53d6df44be4533098df55d1b05","src/backend/linux_raw/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pty/syscalls.rs":"ae09c4aecc0ae87b1ca58d82efc58007b9dddaae78460d615f48da19d1cd0f89","src/backend/linux_raw/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/rand/syscalls.rs":"a84f70251672e92f253838bf05e989434f67373233e88d21d8835cbb792b1fe3","src/backend/linux_raw/rand/types.rs":"a21dbb1ab31eed6b59a57520be5a3c603c36d18ea74e67f7527f40835ba209a3","src/backend/linux_raw/reg.rs":"def5f88730bd625b2298c86559b5a378fce4bf6ce225204740ba087e034abce6","src/backend/linux_raw/runtime/mod.rs":"b2cae8cce3822c3c92942f06ea0b68464040dcac33c6f0f7ee392c6269993347","src/backend/linux_raw/runtime/syscalls.rs":"90849c8e429f215c225d430e7f550b5da0bb96054ef01745d3184f6c890ba8cf","src/backend/linux_raw/runtime/tls.rs":"6316060560a112c2e9cd9807cdba6e8c91414113a04a739160929ae9a67bba1f","src/backend/linux_raw/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/shm/syscalls.rs":"3ebf91610b02de0312e1f0f0cc5d56e12b4d93794540087b3182cbdf3cb9c8db","src/backend/linux_raw/shm/types.rs":"b831b474aba7eb97167c9289f5257776a72b39134b44e67a0ecfcef2394dcd47","src/backend/linux_raw/system/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/system/syscalls.rs":"2af6c2672a6528b404b52f501d9803a63e9f47886d3b5d2d2dc5a71cefa7327a","src/backend/linux_raw/system/types.rs":"1ceab8d738a71043473b26e97fa3fd79d588a86d4774cbc9b9e1d4f1447a016e","src/backend/linux_raw/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/termios/syscalls.rs":"151f03b938aec36a38fbf4e4b507ddc84d0999a246ee48abd5893464daa736a8","src/backend/linux_raw/thread/futex.rs":"0aa0f9f5be6a79de81e82c9f11f1bf8831f682a4b2f6cb29669e1591636f084e","src/backend/linux_raw/thread/mod.rs":"6ad4a4b90b9234e79900b27ebbe8837e3a7a36aec532912e3e253edce5225067","src/backend/linux_raw/thread/syscalls.rs":"0e3dca63be7322e1e9c58456b28eb8548abb9f08b4436b87b139e891c01c446d","src/backend/linux_raw/time/mod.rs":"672724f55b7b7be6a7452bb1cc2d28b5f0aaa840a2856fe363acce624e1beefc","src/backend/linux_raw/time/syscalls.rs":"451da0bbb66accdf58ee7f2f8746a93b22ee985907a08717a3d3b1650457d878","src/backend/linux_raw/time/types.rs":"028d2993df8fffe5581f7082b8849381bb21928930dc93e4a536a9c7a316c71d","src/backend/linux_raw/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/ugid/syscalls.rs":"8c86d251db33b399a1d1cbb8e87afe650b78db84f444d3251309b7a0480b54f7","src/backend/linux_raw/vdso.rs":"056314f72c71d4b041739337ca2098afb7fe070f844327f462c293f43481b75b","src/backend/linux_raw/vdso_wrappers.rs":"3bec7ee7daacc93b26c15e829e3ecdea62f9fc62244841ec80ee47a55806a69e","src/bitcast.rs":"e21c87c292c781b27256c6f5dcf9fd52dd69ed6e21016cbd67ac31a8219ebf8e","src/check_types.rs":"e52f710e1cfc12ca13a495f2b43c227b293ff295e1ce3ab332935b28a7579872","src/clockid.rs":"e41172c87401ef7b8ad37ec09b5b690c55c51b37009414a2089b34dcac9d93a9","src/cstr.rs":"41af2a4429fe6c67a8883f8a8f63d66c90df566abda74fd2b03bcef7b9f24218","src/event/eventfd.rs":"81cbd08f7bdf40a6ce1ca692b63da1dc8ba925282990668d9d68f1203e839fa1","src/event/kqueue.rs":"222842df22e66f41d93dacd3cba933810d9313d5ca9959fddabedda9c4312bf2","src/event/mod.rs":"326a7ddfeb33bb95dd31391d65f400fa622e5d0ab827c096cab16af4b6843b51","src/event/poll.rs":"0ee583dbd457a573a82a06c04a2a24bd2c76e751d27a435507d55338e2871327","src/event/port.rs":"4e51ff150e5d17cbd44aa64a38b99c15e26eaaf2e350768b5dcacdfde4fa5212","src/ffi.rs":"c2b8b38c02d72749aceb715c496726caba1f1fa989ad3856d0103a2fafed89ed","src/fs/abs.rs":"10e1a148a5b05476efbb591dcc969c1a6caf3d2d9592741eef22341a97c8f4a3","src/fs/at.rs":"a12f382f7b544ae4bbb7a967de43178dad796e05fd6eb8136e1a34236b48f9dc","src/fs/constants.rs":"23923e0fce3221bdac371fe2d05028f30a87dba4f9b4a573b69dc2d6d39320b2","src/fs/copy_file_range.rs":"d3b644374390d482b2ff749a2459458872b57d0dcf9670368739b7833509a7c2","src/fs/cwd.rs":"9f429a79ace6e17455634da09216ee0ad3d067a4541518b3193ae6a8d9ff1e26","src/fs/dir.rs":"347a52f4ca9ac6321c52e802e97ec90d1b4c62ec955c8996fc17f8f5aed69966","src/fs/fadvise.rs":"1220e2cf5cf58fc7cc950d48738050a052c504f745ca67130daa6410e248230a","src/fs/fcntl.rs":"1d1ee1e0bc962779e1cc1b8b76f39c820746240557daa9e14b47b1799120dc39","src/fs/fcntl_apple.rs":"e2f23f038083621bcdecc98d02ce1023508afaecdb2ed0fba5c8b70f955301e5","src/fs/fcopyfile.rs":"ce565f61e1fbf2e31086077c2f1d01b6bb3b048915edda87fe9a4a4f5e8ff7e4","src/fs/fd.rs":"fe53f211e91352d522ac4f3b553a9366506458e2158295a93c5e746618f7a5fe","src/fs/getpath.rs":"28f6970fc1bbc37bb35c84724b59eac436ea7407a4522e18c2bdacb1fdd2edd9","src/fs/id.rs":"1b5c8a8baf9a9bb1f895f97189cea4e5982a0d35b192afeec6340a6c6222e0cb","src/fs/ioctl.rs":"e798eb41bbc201c375bc295ad2928c2467b45b4fbbff3c82236dccbc0d6391a5","src/fs/makedev.rs":"85520b484cb7c15ab71ea1c368578ea3b7e484d82f8510db92b6ce9f7ca341ae","src/fs/memfd_create.rs":"00df492a3afdfc65a07d6849422596bf6cd4608a653d5d25ba54d6640eacda11","src/fs/mod.rs":"2192ea7f6f81a18d892d3526df00c43d83669f1a1734957ba94cf8269980792e","src/fs/mount.rs":"8f6ea2b997dd83c50c90291b9ada3ed77a9ce1ad701c9b0d533b5113b317be5b","src/fs/openat2.rs":"4a95c15dab533a41201b5fa25c8a212956b7571d58cad696bdaf45af8aef96db","src/fs/raw_dir.rs":"18ad797876d6230c38d4cacbed081c4028e03ace477d0520d9b226b40de942f0","src/fs/seek_from.rs":"a9efa0feb9ac789cf47667e91efee2e3f2dcde16cb3b7a928c99da640fa0e0d6","src/fs/sendfile.rs":"e3b2058741cf4b1698f34d84bb37130cf2b72806d522a16fe541e832cde136cb","src/fs/statx.rs":"f925be3d9a179a903549b3ac18038d004f4f2021e46dad4aa2757907aebefeca","src/fs/sync.rs":"a3b23543834281f347b0f873bd38154d31d404871188ac08f2b20b9196234cfd","src/fs/xattr.rs":"5e222adb52caf69d949ab3f944fb2a482dd6ca3a47200532ca6e72d44cbe8334","src/io/close.rs":"0aa3cd05a8fed8e5244f97b8b6c2e7f65ed93a4e5435c6329852bb3da7514440","src/io/dup.rs":"bbebf4633120e21c7c49ecb93576cffa7e908f8089deb260f8d97426b469a0d4","src/io/errno.rs":"58a4d20ba0924e4d514e3c876fbe08982f1623187642ae14780815e65989c8c8","src/io/fcntl.rs":"5b7696e40757615940745ecc7f33d84678d24f55eba978a8efdd0805cba7b1c1","src/io/ioctl.rs":"3dffbda413fd380f1580e2e75c531a5f4a0487417ea1c235c23fe46b70e46bd9","src/io/is_read_write.rs":"1bfb9ee5d58e0b29b44af12fe2668c7bccc841358698dcde47f1519ff9bb73b4","src/io/mod.rs":"75f1d0646be1d4c7c08b5887d8119b0103be8c25c43ccd4e0e97015508c0bb8f","src/io/read_write.rs":"77b8058769dc0bf5a88d73acd7ce70af1a89c549b00b790e281dc20364bcb6af","src/io_uring.rs":"4e641c9aaeba14890e4a354ad8b1fda8fe617923b4d3d685da22721d66a14bdf","src/ioctl/bsd.rs":"e33772530b1b2d1e7d61ce53d9267e68d88f7f5df13301f90412d89a46c6faaa","src/ioctl/linux.rs":"96df90bbc926783f387e8fe1656841d4c7a857fd4e9f41f95492f7dcece33074","src/ioctl/mod.rs":"d2233ba2a81738828d28d4297f8dafaf254c475174de48bd0fe3891d29a71bfe","src/ioctl/patterns.rs":"af24c47597e619a8e0fd3ca071f21db1963a629b511d9279ef9d36a8030a6fe8","src/lib.rs":"ee2b60b7b29c9f4e0f474534a76961c2ce5c9f3640eea67fa395e633b64efa6e","src/maybe_polyfill/no_std/io/mod.rs":"77889bb5c5a4f2e50e38379cdaa5d0fef4b0cafc3da056735df01f6deae75747","src/maybe_polyfill/no_std/mod.rs":"ec94a4aab4bc475785e469d10fd6bc95667e1d47d958e9cff3a19049d88c8c80","src/maybe_polyfill/no_std/net/ip_addr.rs":"046327ee244f758f2bc31d3be305d8cd0dfd8342aac1add8259e999b4b46c4a7","src/maybe_polyfill/no_std/net/mod.rs":"486555be5c56cf3e049e65a1ea73aa60839c6f6ca667833e88ee4f360f9606dd","src/maybe_polyfill/no_std/net/socket_addr.rs":"bfeb32d32c176cde76323abcffebfc47e9898fb8d7ce3668c602dc8451086a2d","src/maybe_polyfill/no_std/os/fd/mod.rs":"27ef0afbcb0695cbb15101070f417eb51e0ef85ae66ec967d95e80771d507c47","src/maybe_polyfill/no_std/os/fd/owned.rs":"ab86ffa2693a04f3085770faf395f95e5303001711be8b19c44a47a0ac574091","src/maybe_polyfill/no_std/os/fd/raw.rs":"f3648c7bd4a6ff94bd823ed9e0d99d398e02f24875cf9b25962736999e7c6943","src/maybe_polyfill/no_std/os/mod.rs":"27dab639a765827644005d5f2fcc7c825310606b889cc8dd83f54c9528350dc0","src/maybe_polyfill/no_std/os/windows/io/mod.rs":"5bbcc05c83fee5026dd744a994e0458469466d5be39081baa62df07753b92fd2","src/maybe_polyfill/no_std/os/windows/io/raw.rs":"4c32609a489dd938a49328b5637cb3bafb96437f2f9f269ab66d7d3cb90247f6","src/maybe_polyfill/no_std/os/windows/io/socket.rs":"c658f42f24eff44a661f2adfd24a11af80fe9897f3e2af4dc5d2c64808308d65","src/maybe_polyfill/no_std/os/windows/mod.rs":"fdb416f8f231a4e778b5f985b9ae712ece5e1a1402963ad1a5f6a8b9843795f4","src/maybe_polyfill/std/mod.rs":"7c16c86cc73e226e65ead598e4018238b22000a345040b706bf1e1b3eba115fc","src/mm/madvise.rs":"69481cd3354dbffe6cd93b234448e59de6d0fe6440bcf8b12f951f37745bc1dc","src/mm/mmap.rs":"8cd1a8278e138d0805726474bb3016fdaa09a8074bbc8f20d1e842e7fc17efa3","src/mm/mod.rs":"b3a6cb838986d45825b912355cedead761211a494ca6f89b2367a2d2157e340e","src/mm/msync.rs":"a9092be024ecbfa9c14edb935404513498b0da2ac6c99fc31fe4e58196a95f02","src/mm/userfaultfd.rs":"8073443bd181ff0b3ba4d0b1ae67370b4864035a0c8b4898cd709dc47c518ae7","src/mount/fsopen.rs":"160e384e9175fd98669cda1cf3590bb195c2ba7e1c724e9ea06e692595e58ba1","src/mount/mod.rs":"5f0c9df4727592695deb1cd63ae1de021b03dcd9d0d1b68e1f34d12a7136cb19","src/mount/mount_unmount.rs":"8ad11675e5d762d33fbefbed06a6a9f9e52a9b689bd06662446152614321ab77","src/mount/types.rs":"601ae3e10b7dc496fed7f3b40a80e81c6edd7bf13189d7be45c3212d4c684c39","src/net/mod.rs":"a6bc55f9e086caf46a7c00783498d73a328a66f2a991f1ec65d5f13931377b0f","src/net/send_recv/mod.rs":"7b77a70c0ad2601b5da30e8b202e34c6bfc9e10df4ce2b1a35b111522ae123a3","src/net/send_recv/msg.rs":"f100a88648b9eba88272aca77d76351432636022a1d4b0e9485e002a3489d05a","src/net/socket.rs":"1296706d964d110be7cd46b61a0a06dabf382a2377907ba41badfe94807cb50f","src/net/socket_addr_any.rs":"a9af81e967a91b45e51aec4f46a068fade7035c5d19dfaf05bfdcd3b3c32e9bf","src/net/socketpair.rs":"56f4885c31d2664cd16e18a9a88792a4912fedd953cec36dba67e8581fd57921","src/net/sockopt.rs":"4f00ff76d3cd3fd2e915f51eba59827fb60885d6b0c6d37b32ca4306cb8fe836","src/net/types.rs":"35878a8ff12cd7252ed735cb640bad03e4e13c6016769c2d154c0fab08505248","src/net/wsa.rs":"6e546b42f50a851fc833c57cda76cfb347203ed4b0dea574a3d325bf5a2ebf80","src/param/auxv.rs":"8602af47a39bb340d319807bdecdb9be8b467101a9ed96061277b90234801913","src/param/init.rs":"a31c0e5cea61a1a999767fe74f87c0d59eeb6bce66578b842fe0e0c32be27a55","src/param/mod.rs":"25b10acd5b1da8faa6f5204e6b0379b38bfab667916e886cca64bea01a42dec2","src/path/arg.rs":"d87117157ec21f61a5e50b2779b4284fd13dd7db11b20a6bc9e475d0e4a25357","src/path/dec_int.rs":"8ff8e14442c46f8e7a9b80d73973619b4271549b9defd538479bf8c2d93aa72e","src/path/mod.rs":"6b1b949c94bcc47e0f08a3f8e8db5b61ff497d0dfd3e0655f51c01d3e4b7dfd6","src/pid.rs":"f1c486000c5b1311b2d720cee88f089c17ef9a171709673dd06e6f35f4ff98a3","src/pipe.rs":"cec0a831237ab2652556b149767d1b382e9739a1bb21ea401a35b36df7f7a3e7","src/prctl.rs":"19aa584895874ea48b9bbe1bf695b81257b0281df64dfdd91e1b4bfa298661b7","src/process/chdir.rs":"911216459aa429fe9f125f1357d6900b43b0007835b85c719875d00f79a74664","src/process/chroot.rs":"2b5f6124eb19f26ad2705174f7ad50cdc0a5d15abd59ffcf55421228d82130b4","src/process/exit.rs":"48de66e5504a00cb375d8f415ce63b6225a3f5204268d40726a7d0fbba43f587","src/process/id.rs":"e4733f9e8e4b5f50e98ef7a23802e126f1f14ece8b3d7ae7446c6a66affc6bc1","src/process/ioctl.rs":"23ad0285671e8d7ca71a63c50655dbf732ccea8af11d754a0558e0236db37e76","src/process/kill.rs":"96d5ce432c19cf2b600d5248c681c117abf53ae94bbfca7e75ac533a40e3968e","src/process/membarrier.rs":"1c4c39b359d1d0e9bbe16352eedfca9278d9ef298ade8ec00e998617bbcbfed8","src/process/mod.rs":"70e7fc79e4edf3ffd56c20643707cafdc595330db68e4ff8f75dcb3756ad5b1d","src/process/pidfd.rs":"39de2dc7919eeeb53a5980622616140327671cd7e36bcf597aee1749df3a5b5b","src/process/pidfd_getfd.rs":"1faace75bdbcda57a296806dcfa5487811f8d11dad9e6199c98df72feae66724","src/process/prctl.rs":"0130d05362a17a9282f3391189095e1f4f51fb56d7a5205906a0011842df4576","src/process/priority.rs":"f135482e71ea8aa0daf92b9f238051178a4c904070fa8409622f94155df3c544","src/process/procctl.rs":"430ec397782772d5a028903c2813d3e11f7e577af144b9effd9c42629ac8d3d2","src/process/rlimit.rs":"10b79de3ced0e64059a94c879742d46a35a6176c776d8eed75031d5e6340283d","src/process/sched.rs":"b49ef463f44b5e90a04e9a08569d0ae20eecb8667f38f31c786df1eb099db466","src/process/sched_yield.rs":"6565faa3928b66ddc74a65e893e15edfa4b9be4f7e5f5f68527501a7f6bc3350","src/process/umask.rs":"1a0f31a842303c978e3f05ec191e2b5e96104c09c6596473b42b1fac34898a50","src/process/wait.rs":"612920489b5618bf82a9a615947e0acbed6eb310acbb9bb70e934d457dd1dd49","src/procfs.rs":"63b286dd3302be7f426841eb3b9261ef4785c3159ed78c24734bd5094c9b0b2e","src/pty.rs":"652c412e7280d0344b3f9bfe376c106a7f1b42b898b7af344e6e72da268e2bf0","src/rand/getrandom.rs":"1c8166a02a74f5593bb4673ef907524df04cbc1568020a5ab2ff7f4aa1283f8b","src/rand/mod.rs":"cab59332aadd9b679f5b22cbb222d48ee028af5eb9fd4a4d43922da659b895d7","src/runtime.rs":"695e60b9cc22755ce0e653fd63a0c73024a14eb8f576c6071dad6806c32f5552","src/shm.rs":"b96fe8a05ee5d4536464a8843a776d43a938abaf22c772fc35b5373d95644a8d","src/signal.rs":"a294b49d487dafaa42e534f8b1d93e87bec135087676b2ba2ef865cf2fccdaf2","src/static_assertions.rs":"504cf66f0d5b8e335be02f9ae8b0a355abc98c2c82eec0098bdf0988e662e260","src/stdio.rs":"a5de2d7d9c3c5a901f88b6acf4754687c958a2f3a93c7945c2b8fcb948d468af","src/system.rs":"4d7d1eff18094ec85a8ead70a7ccbe8ef78fd7f5705b7dfe3fa52541e9494887","src/termios/ioctl.rs":"a1ac967f7811a482f8bb53847c37c8359f518cd26da9df7b816ba678a0139623","src/termios/mod.rs":"b44b7caa60b6f458657ed58a0e0eca41bb4e6d6be4b0f042bbb8ab7056cebe4b","src/termios/tc.rs":"e41312d15464b83b2457c2502fc3f3b9998cfb02ba68739026dd4285cc7130ac","src/termios/tty.rs":"35a5fc2d26501e0e6dde1a755eeccf7b4f3b4702c5bac30e0953220808ef7034","src/termios/types.rs":"38167a38fb83e84f2845f86b373ecaaa3ebf6c2f91dccd07c10539201d6f5070","src/thread/clock.rs":"f49eb7271eb3e6831d9b0c3a01bfddbe4bd69dee237bbaa059884de452b29e79","src/thread/futex.rs":"985f9a0dce1e2a4892ae7f26bd1bf119ceae3f9fa6b4707e166624fc1cee76d4","src/thread/id.rs":"ad72db4fea9fccb728310bbfd01ef8c00f6cc60fa2a750f6349646a134f7009b","src/thread/libcap.rs":"4c51b7df566d38dd0f85f81ef53279a745a39f0f4e1154791fd38438e2ca7db1","src/thread/mod.rs":"98634ece0b882f123ad887017692f2a4d94a23a1dec278ed660b3497cac5cceb","src/thread/prctl.rs":"6caec7eb4fe122e80ba1ecafc56b710c79ab8b3c97529d5beba74d87af356875","src/thread/setns.rs":"ea9142b1f3a5b3f329683be185f960c50e5c1636149e91fbf59e88a897fc607d","src/time/clock.rs":"e59a29f1bed8c31c3d5b6fad60f2d4fa6cab8dd8e86148bb3693a5e3a1ce735f","src/time/mod.rs":"43afee938c80d124d04d4ba190c03f4d21d1e3bfc154fff309211e4f6eabe940","src/time/timerfd.rs":"f17092b84553741aa2d2b44c6992b5d2c8c96cc2c2007fc9a2c6b2064485e53f","src/timespec.rs":"32a4d930cbc0f6dbd23153290db920671cf4ce65a4a127e176f897c1cde42d7d","src/ugid.rs":"6616c6e35b7e43aee5b150f1efae7a50711e0947943c9a96833dbe214ad9e85f","src/utils.rs":"9ae76f8a41d6cc350cdd58c9084b5c3a5a708eeecd769783debdbcbaef442182","src/weak.rs":"c7cf03bf2aeba494b1999ab32183fa8c603ab72e254c0e312a67f168877e410d"},"package":"2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"}
+\ No newline at end of file
++{"files":{"CODE_OF_CONDUCT.md":"f210602311e3f74b32f46237fd55f4ce36d798e85e3db1432ec667f63a7ffc44","CONTRIBUTING.md":"3fd57de5c678db1c972da676a8231d2fde9820695ef1f0d53f1e55a3e81d9de0","COPYRIGHT":"377c2e7c53250cc5905c0b0532d35973392af16ffb9596a41d99d202cf3617c9","Cargo.toml":"5a6eafc41863c968362a1b8424dd0a5e1ce6502496b2999ef97ceac81f30fbcd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-Apache-2.0_WITH_LLVM-exception":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","ORG_CODE_OF_CONDUCT.md":"a62b69bf86e605ee1bcbb2f0a12ba79e4cebb6983a7b6491949750aecc4f2178","README.md":"0acc443488c1d6f7a0a80a91385d06c7a1920c1f5c847214dd6c23a6bd027f75","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/mod.rs":"e260e67273aa0a37cffdc5cd451699335b8ee656c17275a0d0f2b4563c6018ca","build.rs":"5f93559819ca7fe233f01579d51ff5b58cb6e96ef9e7817a3358a9b410d5bbf6","src/backend/libc/c.rs":"92d9c17e26569a9d09cd05a83a06df514ffbb02795765fc8bb0d74bda7711a51","src/backend/libc/conv.rs":"b66d080db3a4c2756fe722462b543982bf88e5bc8071b9dc98d28ec2aee3dbfc","src/backend/libc/event/epoll.rs":"b6f00ec602f9a6b9a2b7a66a242efda088b67ed66053692dcbba683df4fdaf19","src/backend/libc/event/mod.rs":"7f8547c599b8263eb791890bbe4a0b22fe2676d007ffdcc3e07b2e48d1c994db","src/backend/libc/event/poll_fd.rs":"1186dd69fc78d474975f1b8c55c9dd261229cb0c53a0dd2a88754ef62529a917","src/backend/libc/event/syscalls.rs":"295a930521b345192caaf916b39189299f81f5f7c9f1aaed235baf3bf6f2f5fa","src/backend/libc/event/types.rs":"e81130bc07e8f762ea2942bca1bde048ade0df24aac619907f3055d2dda7d522","src/backend/libc/event/windows_syscalls.rs":"ebfac665c6676c4b803134ab8806be8aa2e96bdbc7799a19c544cd9069b35787","src/backend/libc/fs/dir.rs":"71332c794f4e2438e7c700a0d2206fe0088cf98473030e3f8ccddf252ad7f5b2","src/backend/libc/fs/inotify.rs":"a027a718db8bf6ff9a778c2cfa706850b466240b0a2296d72b9834c0096968e8","src/backend/libc/fs/makedev.rs":"89c679a0ef18dd41b3c6223bce0f329ad35bf6cadbf16e47b33fad3f312ba4a6","src/backend/libc/fs/mod.rs":"3d28b803011b57da6315bb747daf0117218687c0cc610358f5dafddf0b7d44d3","src/backend/libc/fs/syscalls.rs":"229d1c27d53bfbd35310b05aaf8b3307d6b6e9948faf0655713bc662f901bd02","src/backend/libc/fs/types.rs":"ca25c8271e1fd2ff6c28082884ca112fd5e0523b2aa3cd289cd9a399c962a172","src/backend/libc/io/errno.rs":"d33978855c2e0e97d5e1f7a04ab10894f9f2c3e329e949f259a757f89942074e","src/backend/libc/io/mod.rs":"746647bd864e4ec7717925b6d176cebdb392b7d015070244cc48d92780351dd6","src/backend/libc/io/syscalls.rs":"cbd9e9db566597de0a709247c8ba3bd1e6ce266a128d639273353c706f0592b7","src/backend/libc/io/types.rs":"2efd39bb3df19a9db5496217284f3d2235ddb354fac81dd71861109a56bede0b","src/backend/libc/io/windows_syscalls.rs":"fab3fa099aa89cea56a6edc651dd37750e581534f4f45b04a7e425f130b08468","src/backend/libc/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/io_uring/syscalls.rs":"5af8146d5971c833e6fd657f652c618b31f854e1b0811864fba9b658cb633e19","src/backend/libc/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mm/syscalls.rs":"c04c61ad3d7f3b24d13f89144d7fa0a05658cea4763207f9250db4f9362de2fe","src/backend/libc/mm/types.rs":"48406d44d79b102ae9d17860749611d26c1f04e3630e9680dfcb20e2efc148a8","src/backend/libc/mod.rs":"778083f85aaca00aa5c8ec36b541e0e8a3e693660a6acf4722b4508f015c278c","src/backend/libc/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/mount/syscalls.rs":"1bc87501a078616d0190d2e85de55f3f968b8cb79d49bd9eb839a350eed26089","src/backend/libc/mount/types.rs":"1b0d527ae5a272ae441fa35192616a867090204404324a341a7864ae1f5ea3fd","src/backend/libc/net/addr.rs":"9bdc2febb20785bc26a820eaba52cb546c67c79cb547f08105087cd0f79d5861","src/backend/libc/net/ext.rs":"0dd64877abe1ba86b47a2ab34340e3f6cc7b53b22d0bb5e237daf6a82edd46b0","src/backend/libc/net/mod.rs":"5f0bd80a6575aba4a714443ca99af76ad15b053f1f8029aa1d9c7fa10e6d9242","src/backend/libc/net/msghdr.rs":"64825203c09cb60ac4e869cd27a44697f1efe53455c41684279d50d8f0a96701","src/backend/libc/net/read_sockaddr.rs":"b1a72b7bd56cd152ed0731f571e4d9411b2974f860fc045319dc74b5b077e569","src/backend/libc/net/send_recv.rs":"1450ca958431a5bf3a85cdeef88b387f30d5c1215de2c56d164ccda38f21b36c","src/backend/libc/net/sockopt.rs":"178a6622bab25bf1813d6c17151f48d820e238db8cbd05be3aae563902d6c53e","src/backend/libc/net/syscalls.rs":"1c703ea3d7a1d0bfa43ffe07425882f310260589d513e1a936de434fa653cf3a","src/backend/libc/net/write_sockaddr.rs":"6f06f6cf6089fbc43f93f4f3830468735c92277595769867cb6c89e1fe1c299a","src/backend/libc/param/auxv.rs":"fc9476c85482b1d44190289224ccf40c96fbf3a2fe3d8554ddb42acb2e97a8ae","src/backend/libc/param/mod.rs":"5234b8f1bcb886cca6ea003d411d75eaeebe58deedd80e3441354bf46ed85d4d","src/backend/libc/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pid/syscalls.rs":"49ea679b96c0741d048e82964038f9a931bc3cf3a0b59c7db3df89629b9c49e6","src/backend/libc/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/pipe/syscalls.rs":"8affde100f6a9dfc762b79d1e48be5c1039be414f8ef7d5a6acaba882a68d259","src/backend/libc/pipe/types.rs":"bcf2751691748b5084a6d8d8851b496bc2ee9011fc1c717839b09b07d423eb7f","src/backend/libc/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/prctl/syscalls.rs":"8a2684f444a7555098dce2b92270d81cefdae902716c6e5d59bd7b0657e8a29d","src/backend/libc/process/cpu_set.rs":"b3d36b01b53b0b6c61a20ed8a69d48eccdd90cc17f82f2926ef1e844f002d0b7","src/backend/libc/process/mod.rs":"3f376060815d2ed78081ddf6fb0cddc97dff5dac2f4934f0672cb09736e16377","src/backend/libc/process/syscalls.rs":"278c87df6321cad2bd37049a7cfeecad7dd54f5e3a8449368da4c3409637e6af","src/backend/libc/process/types.rs":"c011d60d93130e1233ee1d69de1e834a6ce05628220a1c0bea9b5c6eb4c853a2","src/backend/libc/process/wait.rs":"0cc556aed976b4bbb3965f74fd76b8216c755fce25043b7b21ce54afa07c9773","src/backend/libc/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/pty/syscalls.rs":"699a4c325fc590b8b5dabfe5a9ff386809be14dd24bf35aa6ef581b2bd75457b","src/backend/libc/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/rand/syscalls.rs":"78c7201e6bcb75e9cab9486d1878861319f865de2b2c46437be68690bd17bf13","src/backend/libc/rand/types.rs":"4eb0b4cdd0a9b089d1c9f6a25ad1ca97be28a38b7b07a705ec605b773f63f880","src/backend/libc/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/libc/shm/syscalls.rs":"60d797d4e85e08e6330e6b8d80094356ce377e5484952f88ae2a6e49231c268c","src/backend/libc/shm/types.rs":"2206eac8ee74951b995e1e80f10bf235cc6b04e0a099f4adefce546378838233","src/backend/libc/system/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/system/syscalls.rs":"abe1093f3495668d8576ae55073c74976cffb04d2e1bc20583d7ec12ac848b06","src/backend/libc/system/types.rs":"6871e16aee14fe2ae03cea798c3e509ffe44778a9c0e5608fd73e2e015876d7e","src/backend/libc/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/termios/syscalls.rs":"77c4c6fdfe036251ee1540df5b398a38d87597a5e7e94abaf9b983419ae4ca24","src/backend/libc/thread/futex.rs":"b666828653b12634bbd7fd709acf69641b648ec40962a1d4f904c5db14d2eff5","src/backend/libc/thread/mod.rs":"fa710053974d7f16a6c49242ee6c10a3b9e1143452b9daeaed8837302a679fff","src/backend/libc/thread/syscalls.rs":"c8e84ad232ec1317be989529c24e204c51bb4e1e0212de2c1937b00bc92e5483","src/backend/libc/time/mod.rs":"38563ea68829ca5a4b1b0695ac8a5c05718e85bdc88a36dc805efdfce45d3909","src/backend/libc/time/syscalls.rs":"f82e0725c5af8a52e61ee83aad2c77694f2f7a72ac1e6eb284109a70ac6edc38","src/backend/libc/time/types.rs":"47ab4178a51c246ecc5f2a2fcca7f6ad9c8a1a45d11dadcb95ee0e4f82e8dfe2","src/backend/libc/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/libc/ugid/syscalls.rs":"8edf91b8790add23902c9f5418da6b0723a371677f29f490e0c8af852f0f1a0c","src/backend/libc/winsock_c.rs":"3bf3884fd250eca806ffdf96da68e29c133a697810b78b333ea449e523e58562","src/backend/linux_raw/arch/aarch64.rs":"2a255c9135bc8a321c180f52b88eb2b158bc9170cd222149caeae63c24587d44","src/backend/linux_raw/arch/arm.rs":"165bccb5883d0136e55d42091183765f83d86e9d37a7cb2cec9ae8af32774db6","src/backend/linux_raw/arch/mips.rs":"24af364aa93fd4b9917639d473336490a143f0d1723b09f388e72d534160ee51","src/backend/linux_raw/arch/mips32r6.rs":"e436a2ade34f2f7c58b8924462b07a2499dfc951e1e1318d51759444fb8b658e","src/backend/linux_raw/arch/mips64.rs":"897da9ddc877963ad59464d7f81dc59df8a7e91251e6adea14cfd946e2740a1c","src/backend/linux_raw/arch/mips64r6.rs":"3c08aea13c1139fb0dfbe74ca3d0147f007c3aa2eda641afb82c46f6aa4f6cd1","src/backend/linux_raw/arch/mod.rs":"37eaeea601d39be00d170856a31a8ec6a27b0d3ac22ab3ee642ab0e45dee237d","src/backend/linux_raw/arch/powerpc64.rs":"dfb001f8636a5e46d728900b0804fe6c374e5e18a6f0e76d7d62e0c07da74477","src/backend/linux_raw/arch/riscv64.rs":"41d33242d941030f46077dc2b1bc4c7913fe7630d693a5a7eef966bcf38f9d8b","src/backend/linux_raw/arch/thumb.rs":"2fd979ab421248c0a4c592bc0cefee63edc26528f469b71b63eaed35356e42e8","src/backend/linux_raw/arch/x86.rs":"81c73c0a13925854b54db754f533dec565b1db2d9586e210a2450c6f5915171d","src/backend/linux_raw/arch/x86_64.rs":"e929036a1f3cf93ba538f4523b241605cc7b5e61f84ffe6d9d5cdbcb6f73e543","src/backend/linux_raw/c.rs":"74cdf34fcd5bba9628c04585fa13721073e2bffe6740315c7655d1a55150230f","src/backend/linux_raw/conv.rs":"fd339c2e78e6470cb03614540bea2958eec83f1b6cbecc68696a2d2061365f90","src/backend/linux_raw/event/epoll.rs":"4ae8f9de28c6875ab29cde7da9b4b01a6578e905c7a0e0b461d26025cb24ab52","src/backend/linux_raw/event/mod.rs":"72e46b04637e2d1d2a6b97af616144995399e489d1fe916faf835d72fc8c64cd","src/backend/linux_raw/event/poll_fd.rs":"fe1c289980384edf7334a4afe92f92cb6f51b7c0431ecb19930426a3bdb89fa5","src/backend/linux_raw/event/syscalls.rs":"8782cdf978bff0773bf5f35c1056d5c29a40742cf6f99d9606d951b52596cd34","src/backend/linux_raw/event/types.rs":"9538403f2e2c5ffcc939769d83fbfcc2db03874b45c36a2858ba07a05a3e3635","src/backend/linux_raw/fs/dir.rs":"c675dc5413428d2defd6752e99d210da83639779e853db209de6a1c08d35e0e7","src/backend/linux_raw/fs/inotify.rs":"9fc5edea36e347041a39e583cb473dd84af40c63fff3dfbb85a1a97d2833d8e5","src/backend/linux_raw/fs/makedev.rs":"c6b4505c4bcbbc2460e80f3097eb15e2c8ef38d6c6e7abd78e39c53c372139e2","src/backend/linux_raw/fs/mod.rs":"8689e46f9c4c3e1190ae5fe39176c573cccac10b3739472128ca237f41e2686a","src/backend/linux_raw/fs/syscalls.rs":"a4512f58e2ef01bf0799c96fa7821284866b6826594642565f46d93e566e2e12","src/backend/linux_raw/fs/types.rs":"ccbce0b7971064c71b16a341b7f39bf2c375262ea199d6fcf9d1b073b032ccf1","src/backend/linux_raw/io/errno.rs":"2681fe1f7da132414606a7c0dbc1c2cbc43e45f326f00d06cdff51d9724c8c5e","src/backend/linux_raw/io/mod.rs":"7ae2324427892cca6f5ab53858d847b165f790a72ec25f3d99fb15f0506c9f27","src/backend/linux_raw/io/syscalls.rs":"831ce424f0032e6891d46503e972313fd7a46f15cb2fbb1f87e40fb05a018e9a","src/backend/linux_raw/io/types.rs":"d0df5a62248e1ba19af0f89f86da7ba2312d97065c2814ddf6b08a4a0db23bad","src/backend/linux_raw/io_uring/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/io_uring/syscalls.rs":"0f7c9cb7ccddf5687e4b9e5b23558871a452a29ac6095a0184a06c47b9b18eb6","src/backend/linux_raw/mm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mm/syscalls.rs":"ac77ee94bb26bc959d3b077ef60e72863ef845c1438646b61755bdb861467bc6","src/backend/linux_raw/mm/types.rs":"46abdd2492301b7fe542ea3949eef2cfe28959cdd1f571ee1350e35b6517349b","src/backend/linux_raw/mod.rs":"bb26fe4783f834a4624c99ecf9e4ea8fa4d1c7fe9adfb6d7310fac689e7797bc","src/backend/linux_raw/mount/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/mount/syscalls.rs":"3947261b5d46b9737f02dc5352c3a3a35c63c461fd75bcd8ae6619dfc0bfb54d","src/backend/linux_raw/mount/types.rs":"af364e3f054b0fa55562827944235055beb752716347645cd1775d1a4d5615c1","src/backend/linux_raw/net/addr.rs":"fbb6c071a8ebc2c557c7013d36d4b540c1ff6ed63103d0a8abcacd8977fbf5c8","src/backend/linux_raw/net/mod.rs":"904b2ccd4228ebcda25db0223b92fb03e4042bcc7b0a202f19410a8a5ff517a5","src/backend/linux_raw/net/msghdr.rs":"a88e8395dd5a25c103dc2172d4e7013e5104a6d70091ad78efd286ebc8284acf","src/backend/linux_raw/net/read_sockaddr.rs":"4ab5d470f03f19a928327603e1d55de8b9e21cbe4bfc6ed65d856803092b1c90","src/backend/linux_raw/net/send_recv.rs":"6d5d2aec61a3c1b4a5fef1a8a487dc8e163da8988d1237541ed008baa01128cc","src/backend/linux_raw/net/sockopt.rs":"2ea8625ecd68a799c14ed48dec2f3e01f085222b6cc36f4916b20143e08ae575","src/backend/linux_raw/net/syscalls.rs":"b231754af1c94f61d206839b45f51516e0210761bbbcc1ef68c1fe6fc8a1bbbb","src/backend/linux_raw/net/write_sockaddr.rs":"a9374256cb47b3f4d5fec308986a109ce51a521abab31590df1f66c04c07d437","src/backend/linux_raw/param/auxv.rs":"c6a74c0b41aa28c850a80aad7cbad677df65c1f3eb78415872f6f3d1166716f4","src/backend/linux_raw/param/init.rs":"a5024f9afe361884f22fb4d65e632ccd8a60f1138ff6bd526df600d40e0d9b7e","src/backend/linux_raw/param/libc_auxv.rs":"167621c0b1bfd06c91c1201e9404d6624aa0a702ade17c6e37d9370c636be589","src/backend/linux_raw/param/mod.rs":"2e6a1a1c00351b9c88bd615aa923f71d76208df5626dd9bea03067f28f81dc31","src/backend/linux_raw/pid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pid/syscalls.rs":"eef6aa01830ddd510b83f507da2002c03e58318b73744be2c06ebbe33c4f194f","src/backend/linux_raw/pipe/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/pipe/syscalls.rs":"366c730fc3e991bddb9f5a15b8c3917a8e6ace6d1d5a9113b2749e476faf6f83","src/backend/linux_raw/pipe/types.rs":"caee78eaf13af02e5c4024c7459f23287dfd2c544ebab1dc5f728106fafe24da","src/backend/linux_raw/prctl/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/prctl/syscalls.rs":"a18b224307e0d27bda9b7b85c75cd6c7ddfe39f5ce52efb7bb0bf0585b757343","src/backend/linux_raw/process/cpu_set.rs":"dfdcbdf35aff6a3e08e7d38193bf18c12ca8aa64eb0dc417667be82dcc0f7c55","src/backend/linux_raw/process/mod.rs":"fb393c70a9c63ef9a6bf1fb5a2dc94f07d6b0b6987cc5231c15c607015dafd68","src/backend/linux_raw/process/syscalls.rs":"a2e446ba4e09727d11fc3b3d4bb316a659399aad186c08aa5bba80e62741265a","src/backend/linux_raw/process/types.rs":"6811ba822bc12a1a6336649151b4adb1f5d3365684a31c07f01953ea9547743d","src/backend/linux_raw/process/wait.rs":"921aee4b0048746087f52615a98edc2aa0fb4b53d6df44be4533098df55d1b05","src/backend/linux_raw/pty/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/pty/syscalls.rs":"ae09c4aecc0ae87b1ca58d82efc58007b9dddaae78460d615f48da19d1cd0f89","src/backend/linux_raw/rand/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/rand/syscalls.rs":"a84f70251672e92f253838bf05e989434f67373233e88d21d8835cbb792b1fe3","src/backend/linux_raw/rand/types.rs":"a21dbb1ab31eed6b59a57520be5a3c603c36d18ea74e67f7527f40835ba209a3","src/backend/linux_raw/reg.rs":"def5f88730bd625b2298c86559b5a378fce4bf6ce225204740ba087e034abce6","src/backend/linux_raw/runtime/mod.rs":"b2cae8cce3822c3c92942f06ea0b68464040dcac33c6f0f7ee392c6269993347","src/backend/linux_raw/runtime/syscalls.rs":"90849c8e429f215c225d430e7f550b5da0bb96054ef01745d3184f6c890ba8cf","src/backend/linux_raw/runtime/tls.rs":"6316060560a112c2e9cd9807cdba6e8c91414113a04a739160929ae9a67bba1f","src/backend/linux_raw/shm/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/shm/syscalls.rs":"3ebf91610b02de0312e1f0f0cc5d56e12b4d93794540087b3182cbdf3cb9c8db","src/backend/linux_raw/shm/types.rs":"b831b474aba7eb97167c9289f5257776a72b39134b44e67a0ecfcef2394dcd47","src/backend/linux_raw/system/mod.rs":"8aa966faf3853d1a93d0ed91f7e5f4a53539b0287b25a5bfe489fa1d07f7cfd7","src/backend/linux_raw/system/syscalls.rs":"2af6c2672a6528b404b52f501d9803a63e9f47886d3b5d2d2dc5a71cefa7327a","src/backend/linux_raw/system/types.rs":"1ceab8d738a71043473b26e97fa3fd79d588a86d4774cbc9b9e1d4f1447a016e","src/backend/linux_raw/termios/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/termios/syscalls.rs":"151f03b938aec36a38fbf4e4b507ddc84d0999a246ee48abd5893464daa736a8","src/backend/linux_raw/thread/futex.rs":"0aa0f9f5be6a79de81e82c9f11f1bf8831f682a4b2f6cb29669e1591636f084e","src/backend/linux_raw/thread/mod.rs":"6ad4a4b90b9234e79900b27ebbe8837e3a7a36aec532912e3e253edce5225067","src/backend/linux_raw/thread/syscalls.rs":"0e3dca63be7322e1e9c58456b28eb8548abb9f08b4436b87b139e891c01c446d","src/backend/linux_raw/time/mod.rs":"672724f55b7b7be6a7452bb1cc2d28b5f0aaa840a2856fe363acce624e1beefc","src/backend/linux_raw/time/syscalls.rs":"451da0bbb66accdf58ee7f2f8746a93b22ee985907a08717a3d3b1650457d878","src/backend/linux_raw/time/types.rs":"028d2993df8fffe5581f7082b8849381bb21928930dc93e4a536a9c7a316c71d","src/backend/linux_raw/ugid/mod.rs":"2c6478857a0751625edabd61acb841819bfba1093b1faeded15693c805d84952","src/backend/linux_raw/ugid/syscalls.rs":"8c86d251db33b399a1d1cbb8e87afe650b78db84f444d3251309b7a0480b54f7","src/backend/linux_raw/vdso.rs":"056314f72c71d4b041739337ca2098afb7fe070f844327f462c293f43481b75b","src/backend/linux_raw/vdso_wrappers.rs":"3bec7ee7daacc93b26c15e829e3ecdea62f9fc62244841ec80ee47a55806a69e","src/bitcast.rs":"e21c87c292c781b27256c6f5dcf9fd52dd69ed6e21016cbd67ac31a8219ebf8e","src/check_types.rs":"e52f710e1cfc12ca13a495f2b43c227b293ff295e1ce3ab332935b28a7579872","src/clockid.rs":"e41172c87401ef7b8ad37ec09b5b690c55c51b37009414a2089b34dcac9d93a9","src/cstr.rs":"41af2a4429fe6c67a8883f8a8f63d66c90df566abda74fd2b03bcef7b9f24218","src/event/eventfd.rs":"81cbd08f7bdf40a6ce1ca692b63da1dc8ba925282990668d9d68f1203e839fa1","src/event/kqueue.rs":"222842df22e66f41d93dacd3cba933810d9313d5ca9959fddabedda9c4312bf2","src/event/mod.rs":"326a7ddfeb33bb95dd31391d65f400fa622e5d0ab827c096cab16af4b6843b51","src/event/poll.rs":"0ee583dbd457a573a82a06c04a2a24bd2c76e751d27a435507d55338e2871327","src/event/port.rs":"4e51ff150e5d17cbd44aa64a38b99c15e26eaaf2e350768b5dcacdfde4fa5212","src/ffi.rs":"c2b8b38c02d72749aceb715c496726caba1f1fa989ad3856d0103a2fafed89ed","src/fs/abs.rs":"10e1a148a5b05476efbb591dcc969c1a6caf3d2d9592741eef22341a97c8f4a3","src/fs/at.rs":"a12f382f7b544ae4bbb7a967de43178dad796e05fd6eb8136e1a34236b48f9dc","src/fs/constants.rs":"23923e0fce3221bdac371fe2d05028f30a87dba4f9b4a573b69dc2d6d39320b2","src/fs/copy_file_range.rs":"d3b644374390d482b2ff749a2459458872b57d0dcf9670368739b7833509a7c2","src/fs/cwd.rs":"9f429a79ace6e17455634da09216ee0ad3d067a4541518b3193ae6a8d9ff1e26","src/fs/dir.rs":"347a52f4ca9ac6321c52e802e97ec90d1b4c62ec955c8996fc17f8f5aed69966","src/fs/fadvise.rs":"1220e2cf5cf58fc7cc950d48738050a052c504f745ca67130daa6410e248230a","src/fs/fcntl.rs":"1d1ee1e0bc962779e1cc1b8b76f39c820746240557daa9e14b47b1799120dc39","src/fs/fcntl_apple.rs":"e2f23f038083621bcdecc98d02ce1023508afaecdb2ed0fba5c8b70f955301e5","src/fs/fcopyfile.rs":"ce565f61e1fbf2e31086077c2f1d01b6bb3b048915edda87fe9a4a4f5e8ff7e4","src/fs/fd.rs":"fe53f211e91352d522ac4f3b553a9366506458e2158295a93c5e746618f7a5fe","src/fs/getpath.rs":"28f6970fc1bbc37bb35c84724b59eac436ea7407a4522e18c2bdacb1fdd2edd9","src/fs/id.rs":"1b5c8a8baf9a9bb1f895f97189cea4e5982a0d35b192afeec6340a6c6222e0cb","src/fs/ioctl.rs":"18b44e1708c7160c3bb7efaf27eefb6d11671f91d5008144fc74695d75a226c6","src/fs/makedev.rs":"85520b484cb7c15ab71ea1c368578ea3b7e484d82f8510db92b6ce9f7ca341ae","src/fs/memfd_create.rs":"00df492a3afdfc65a07d6849422596bf6cd4608a653d5d25ba54d6640eacda11","src/fs/mod.rs":"2192ea7f6f81a18d892d3526df00c43d83669f1a1734957ba94cf8269980792e","src/fs/mount.rs":"8f6ea2b997dd83c50c90291b9ada3ed77a9ce1ad701c9b0d533b5113b317be5b","src/fs/openat2.rs":"4a95c15dab533a41201b5fa25c8a212956b7571d58cad696bdaf45af8aef96db","src/fs/raw_dir.rs":"18ad797876d6230c38d4cacbed081c4028e03ace477d0520d9b226b40de942f0","src/fs/seek_from.rs":"a9efa0feb9ac789cf47667e91efee2e3f2dcde16cb3b7a928c99da640fa0e0d6","src/fs/sendfile.rs":"e3b2058741cf4b1698f34d84bb37130cf2b72806d522a16fe541e832cde136cb","src/fs/statx.rs":"f925be3d9a179a903549b3ac18038d004f4f2021e46dad4aa2757907aebefeca","src/fs/sync.rs":"a3b23543834281f347b0f873bd38154d31d404871188ac08f2b20b9196234cfd","src/fs/xattr.rs":"5e222adb52caf69d949ab3f944fb2a482dd6ca3a47200532ca6e72d44cbe8334","src/io/close.rs":"0aa3cd05a8fed8e5244f97b8b6c2e7f65ed93a4e5435c6329852bb3da7514440","src/io/dup.rs":"bbebf4633120e21c7c49ecb93576cffa7e908f8089deb260f8d97426b469a0d4","src/io/errno.rs":"58a4d20ba0924e4d514e3c876fbe08982f1623187642ae14780815e65989c8c8","src/io/fcntl.rs":"5b7696e40757615940745ecc7f33d84678d24f55eba978a8efdd0805cba7b1c1","src/io/ioctl.rs":"3dffbda413fd380f1580e2e75c531a5f4a0487417ea1c235c23fe46b70e46bd9","src/io/is_read_write.rs":"1bfb9ee5d58e0b29b44af12fe2668c7bccc841358698dcde47f1519ff9bb73b4","src/io/mod.rs":"75f1d0646be1d4c7c08b5887d8119b0103be8c25c43ccd4e0e97015508c0bb8f","src/io/read_write.rs":"77b8058769dc0bf5a88d73acd7ce70af1a89c549b00b790e281dc20364bcb6af","src/io_uring.rs":"4e641c9aaeba14890e4a354ad8b1fda8fe617923b4d3d685da22721d66a14bdf","src/ioctl/bsd.rs":"e33772530b1b2d1e7d61ce53d9267e68d88f7f5df13301f90412d89a46c6faaa","src/ioctl/linux.rs":"96df90bbc926783f387e8fe1656841d4c7a857fd4e9f41f95492f7dcece33074","src/ioctl/mod.rs":"d2233ba2a81738828d28d4297f8dafaf254c475174de48bd0fe3891d29a71bfe","src/ioctl/patterns.rs":"af24c47597e619a8e0fd3ca071f21db1963a629b511d9279ef9d36a8030a6fe8","src/lib.rs":"ee2b60b7b29c9f4e0f474534a76961c2ce5c9f3640eea67fa395e633b64efa6e","src/maybe_polyfill/no_std/io/mod.rs":"77889bb5c5a4f2e50e38379cdaa5d0fef4b0cafc3da056735df01f6deae75747","src/maybe_polyfill/no_std/mod.rs":"ec94a4aab4bc475785e469d10fd6bc95667e1d47d958e9cff3a19049d88c8c80","src/maybe_polyfill/no_std/net/ip_addr.rs":"046327ee244f758f2bc31d3be305d8cd0dfd8342aac1add8259e999b4b46c4a7","src/maybe_polyfill/no_std/net/mod.rs":"486555be5c56cf3e049e65a1ea73aa60839c6f6ca667833e88ee4f360f9606dd","src/maybe_polyfill/no_std/net/socket_addr.rs":"bfeb32d32c176cde76323abcffebfc47e9898fb8d7ce3668c602dc8451086a2d","src/maybe_polyfill/no_std/os/fd/mod.rs":"27ef0afbcb0695cbb15101070f417eb51e0ef85ae66ec967d95e80771d507c47","src/maybe_polyfill/no_std/os/fd/owned.rs":"ab86ffa2693a04f3085770faf395f95e5303001711be8b19c44a47a0ac574091","src/maybe_polyfill/no_std/os/fd/raw.rs":"f3648c7bd4a6ff94bd823ed9e0d99d398e02f24875cf9b25962736999e7c6943","src/maybe_polyfill/no_std/os/mod.rs":"27dab639a765827644005d5f2fcc7c825310606b889cc8dd83f54c9528350dc0","src/maybe_polyfill/no_std/os/windows/io/mod.rs":"5bbcc05c83fee5026dd744a994e0458469466d5be39081baa62df07753b92fd2","src/maybe_polyfill/no_std/os/windows/io/raw.rs":"4c32609a489dd938a49328b5637cb3bafb96437f2f9f269ab66d7d3cb90247f6","src/maybe_polyfill/no_std/os/windows/io/socket.rs":"c658f42f24eff44a661f2adfd24a11af80fe9897f3e2af4dc5d2c64808308d65","src/maybe_polyfill/no_std/os/windows/mod.rs":"fdb416f8f231a4e778b5f985b9ae712ece5e1a1402963ad1a5f6a8b9843795f4","src/maybe_polyfill/std/mod.rs":"7c16c86cc73e226e65ead598e4018238b22000a345040b706bf1e1b3eba115fc","src/mm/madvise.rs":"69481cd3354dbffe6cd93b234448e59de6d0fe6440bcf8b12f951f37745bc1dc","src/mm/mmap.rs":"8cd1a8278e138d0805726474bb3016fdaa09a8074bbc8f20d1e842e7fc17efa3","src/mm/mod.rs":"b3a6cb838986d45825b912355cedead761211a494ca6f89b2367a2d2157e340e","src/mm/msync.rs":"a9092be024ecbfa9c14edb935404513498b0da2ac6c99fc31fe4e58196a95f02","src/mm/userfaultfd.rs":"8073443bd181ff0b3ba4d0b1ae67370b4864035a0c8b4898cd709dc47c518ae7","src/mount/fsopen.rs":"160e384e9175fd98669cda1cf3590bb195c2ba7e1c724e9ea06e692595e58ba1","src/mount/mod.rs":"5f0c9df4727592695deb1cd63ae1de021b03dcd9d0d1b68e1f34d12a7136cb19","src/mount/mount_unmount.rs":"8ad11675e5d762d33fbefbed06a6a9f9e52a9b689bd06662446152614321ab77","src/mount/types.rs":"601ae3e10b7dc496fed7f3b40a80e81c6edd7bf13189d7be45c3212d4c684c39","src/net/mod.rs":"a6bc55f9e086caf46a7c00783498d73a328a66f2a991f1ec65d5f13931377b0f","src/net/send_recv/mod.rs":"7b77a70c0ad2601b5da30e8b202e34c6bfc9e10df4ce2b1a35b111522ae123a3","src/net/send_recv/msg.rs":"f100a88648b9eba88272aca77d76351432636022a1d4b0e9485e002a3489d05a","src/net/socket.rs":"1296706d964d110be7cd46b61a0a06dabf382a2377907ba41badfe94807cb50f","src/net/socket_addr_any.rs":"a9af81e967a91b45e51aec4f46a068fade7035c5d19dfaf05bfdcd3b3c32e9bf","src/net/socketpair.rs":"56f4885c31d2664cd16e18a9a88792a4912fedd953cec36dba67e8581fd57921","src/net/sockopt.rs":"4f00ff76d3cd3fd2e915f51eba59827fb60885d6b0c6d37b32ca4306cb8fe836","src/net/types.rs":"35878a8ff12cd7252ed735cb640bad03e4e13c6016769c2d154c0fab08505248","src/net/wsa.rs":"6e546b42f50a851fc833c57cda76cfb347203ed4b0dea574a3d325bf5a2ebf80","src/param/auxv.rs":"8602af47a39bb340d319807bdecdb9be8b467101a9ed96061277b90234801913","src/param/init.rs":"a31c0e5cea61a1a999767fe74f87c0d59eeb6bce66578b842fe0e0c32be27a55","src/param/mod.rs":"25b10acd5b1da8faa6f5204e6b0379b38bfab667916e886cca64bea01a42dec2","src/path/arg.rs":"d87117157ec21f61a5e50b2779b4284fd13dd7db11b20a6bc9e475d0e4a25357","src/path/dec_int.rs":"8ff8e14442c46f8e7a9b80d73973619b4271549b9defd538479bf8c2d93aa72e","src/path/mod.rs":"6b1b949c94bcc47e0f08a3f8e8db5b61ff497d0dfd3e0655f51c01d3e4b7dfd6","src/pid.rs":"f1c486000c5b1311b2d720cee88f089c17ef9a171709673dd06e6f35f4ff98a3","src/pipe.rs":"cec0a831237ab2652556b149767d1b382e9739a1bb21ea401a35b36df7f7a3e7","src/prctl.rs":"19aa584895874ea48b9bbe1bf695b81257b0281df64dfdd91e1b4bfa298661b7","src/process/chdir.rs":"911216459aa429fe9f125f1357d6900b43b0007835b85c719875d00f79a74664","src/process/chroot.rs":"2b5f6124eb19f26ad2705174f7ad50cdc0a5d15abd59ffcf55421228d82130b4","src/process/exit.rs":"48de66e5504a00cb375d8f415ce63b6225a3f5204268d40726a7d0fbba43f587","src/process/id.rs":"e4733f9e8e4b5f50e98ef7a23802e126f1f14ece8b3d7ae7446c6a66affc6bc1","src/process/ioctl.rs":"23ad0285671e8d7ca71a63c50655dbf732ccea8af11d754a0558e0236db37e76","src/process/kill.rs":"96d5ce432c19cf2b600d5248c681c117abf53ae94bbfca7e75ac533a40e3968e","src/process/membarrier.rs":"1c4c39b359d1d0e9bbe16352eedfca9278d9ef298ade8ec00e998617bbcbfed8","src/process/mod.rs":"70e7fc79e4edf3ffd56c20643707cafdc595330db68e4ff8f75dcb3756ad5b1d","src/process/pidfd.rs":"39de2dc7919eeeb53a5980622616140327671cd7e36bcf597aee1749df3a5b5b","src/process/pidfd_getfd.rs":"1faace75bdbcda57a296806dcfa5487811f8d11dad9e6199c98df72feae66724","src/process/prctl.rs":"0130d05362a17a9282f3391189095e1f4f51fb56d7a5205906a0011842df4576","src/process/priority.rs":"f135482e71ea8aa0daf92b9f238051178a4c904070fa8409622f94155df3c544","src/process/procctl.rs":"430ec397782772d5a028903c2813d3e11f7e577af144b9effd9c42629ac8d3d2","src/process/rlimit.rs":"10b79de3ced0e64059a94c879742d46a35a6176c776d8eed75031d5e6340283d","src/process/sched.rs":"b49ef463f44b5e90a04e9a08569d0ae20eecb8667f38f31c786df1eb099db466","src/process/sched_yield.rs":"6565faa3928b66ddc74a65e893e15edfa4b9be4f7e5f5f68527501a7f6bc3350","src/process/umask.rs":"1a0f31a842303c978e3f05ec191e2b5e96104c09c6596473b42b1fac34898a50","src/process/wait.rs":"612920489b5618bf82a9a615947e0acbed6eb310acbb9bb70e934d457dd1dd49","src/procfs.rs":"63b286dd3302be7f426841eb3b9261ef4785c3159ed78c24734bd5094c9b0b2e","src/pty.rs":"652c412e7280d0344b3f9bfe376c106a7f1b42b898b7af344e6e72da268e2bf0","src/rand/getrandom.rs":"1c8166a02a74f5593bb4673ef907524df04cbc1568020a5ab2ff7f4aa1283f8b","src/rand/mod.rs":"cab59332aadd9b679f5b22cbb222d48ee028af5eb9fd4a4d43922da659b895d7","src/runtime.rs":"695e60b9cc22755ce0e653fd63a0c73024a14eb8f576c6071dad6806c32f5552","src/shm.rs":"b96fe8a05ee5d4536464a8843a776d43a938abaf22c772fc35b5373d95644a8d","src/signal.rs":"a294b49d487dafaa42e534f8b1d93e87bec135087676b2ba2ef865cf2fccdaf2","src/static_assertions.rs":"504cf66f0d5b8e335be02f9ae8b0a355abc98c2c82eec0098bdf0988e662e260","src/stdio.rs":"a5de2d7d9c3c5a901f88b6acf4754687c958a2f3a93c7945c2b8fcb948d468af","src/system.rs":"4d7d1eff18094ec85a8ead70a7ccbe8ef78fd7f5705b7dfe3fa52541e9494887","src/termios/ioctl.rs":"a1ac967f7811a482f8bb53847c37c8359f518cd26da9df7b816ba678a0139623","src/termios/mod.rs":"b44b7caa60b6f458657ed58a0e0eca41bb4e6d6be4b0f042bbb8ab7056cebe4b","src/termios/tc.rs":"e41312d15464b83b2457c2502fc3f3b9998cfb02ba68739026dd4285cc7130ac","src/termios/tty.rs":"35a5fc2d26501e0e6dde1a755eeccf7b4f3b4702c5bac30e0953220808ef7034","src/termios/types.rs":"38167a38fb83e84f2845f86b373ecaaa3ebf6c2f91dccd07c10539201d6f5070","src/thread/clock.rs":"f49eb7271eb3e6831d9b0c3a01bfddbe4bd69dee237bbaa059884de452b29e79","src/thread/futex.rs":"985f9a0dce1e2a4892ae7f26bd1bf119ceae3f9fa6b4707e166624fc1cee76d4","src/thread/id.rs":"ad72db4fea9fccb728310bbfd01ef8c00f6cc60fa2a750f6349646a134f7009b","src/thread/libcap.rs":"4c51b7df566d38dd0f85f81ef53279a745a39f0f4e1154791fd38438e2ca7db1","src/thread/mod.rs":"98634ece0b882f123ad887017692f2a4d94a23a1dec278ed660b3497cac5cceb","src/thread/prctl.rs":"6caec7eb4fe122e80ba1ecafc56b710c79ab8b3c97529d5beba74d87af356875","src/thread/setns.rs":"ea9142b1f3a5b3f329683be185f960c50e5c1636149e91fbf59e88a897fc607d","src/time/clock.rs":"e59a29f1bed8c31c3d5b6fad60f2d4fa6cab8dd8e86148bb3693a5e3a1ce735f","src/time/mod.rs":"43afee938c80d124d04d4ba190c03f4d21d1e3bfc154fff309211e4f6eabe940","src/time/timerfd.rs":"f17092b84553741aa2d2b44c6992b5d2c8c96cc2c2007fc9a2c6b2064485e53f","src/timespec.rs":"32a4d930cbc0f6dbd23153290db920671cf4ce65a4a127e176f897c1cde42d7d","src/ugid.rs":"6616c6e35b7e43aee5b150f1efae7a50711e0947943c9a96833dbe214ad9e85f","src/utils.rs":"9ae76f8a41d6cc350cdd58c9084b5c3a5a708eeecd769783debdbcbaef442182","src/weak.rs":"c7cf03bf2aeba494b1999ab32183fa8c603ab72e254c0e312a67f168877e410d"},"package":"2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"}
+--- a/vendor/rustix/src/fs/ioctl.rs
++++ b/vendor/rustix/src/fs/ioctl.rs
+@@ -77,7 +77,7 @@ unsafe impl ioctl::Ioctl for Ficlone<'_>
+ type Output = ();
+
+ const IS_MUTATING: bool = false;
+- const OPCODE: ioctl::Opcode = ioctl::Opcode::old(c::FICLONE as ioctl::RawOpcode);
++ const OPCODE: ioctl::Opcode = ioctl::Opcode::old(linux_raw_sys::ioctl::FICLONE as ioctl::RawOpcode);
+
+ fn as_ptr(&mut self) -> *mut c::c_void {
+ self.0.as_raw_fd() as *mut c::c_void
diff --git a/meta/recipes-devtools/rust/files/target-build-value.patch b/meta/recipes-devtools/rust/files/target-build-value.patch
new file mode 100644
index 0000000000..cf59afba5a
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/target-build-value.patch
@@ -0,0 +1,26 @@
+Add correct build value for cross-compiled targets on stage1 when
+bootstapping rustc.
+
+Upstream-Status: Backport [https://github.com/rust-lang/rust/pull/119619/commits/b888e2f82b9dbe81875f50d13adbc0271a9401ff]
+
+Signed-off-by: onur-ozkan <work@onurozkan.dev>
+Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com>
+---
+diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
+--- a/src/bootstrap/src/core/build_steps/test.rs
++++ b/src/bootstrap/src/core/build_steps/test.rs
+@@ -1489,8 +1489,12 @@
+ // NOTE: Only stage 1 is special cased because we need the rustc_private artifacts to match the
+ // running compiler in stage 2 when plugins run.
+ let stage_id = if suite == "ui-fulldeps" && compiler.stage == 1 {
+- compiler = builder.compiler(compiler.stage - 1, target);
+- format!("stage{}-{}", compiler.stage + 1, target)
++ // At stage 0 (stage - 1) we are using the beta compiler. Using `self.target` can lead finding
++ // an incorrect compiler path on cross-targets, as the stage 0 beta compiler is always equal
++ // to `build.build` in the configuration.
++ let build = builder.build.build;
++ compiler = builder.compiler(compiler.stage - 1, build);
++ format!("stage{}-{}", compiler.stage + 1, build)
+ } else {
+ format!("stage{}-{}", compiler.stage, target)
+ };
diff --git a/meta/recipes-devtools/rust/files/target-rust-ccld.c b/meta/recipes-devtools/rust/files/target-rust-ccld.c
new file mode 100644
index 0000000000..d3d491fb60
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/target-rust-ccld.c
@@ -0,0 +1,19 @@
+/*
+*
+* Copyright (C) 2022 Wind River Systems
+*
+* SPDX-License-Identifier: MIT
+*
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+int main (int argc, char *argv[])
+{
+ unsetenv("LD_LIBRARY_PATH");
+ execvp("target-rust-ccld-wrapper", argv);
+
+ return 0;
+}
diff --git a/meta/recipes-devtools/rust/files/zlib-off64_t.patch b/meta/recipes-devtools/rust/files/zlib-off64_t.patch
new file mode 100644
index 0000000000..6c74da2435
--- /dev/null
+++ b/meta/recipes-devtools/rust/files/zlib-off64_t.patch
@@ -0,0 +1,36 @@
+Do not undefine _FILE_OFFSET_BITS when
+_LARGEFILE64_SOURCE is defined. This ends up
+causing 64bit time_t to fail the build because
+it needs 64 bit off_t
+
+Upstream-Status: Submitted [https://github.com/madler/zlib/pull/764]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Index: rustc-1.72.0-src/vendor/libz-sys/src/zlib/gzguts.h
+===================================================================
+--- rustc-1.72.0-src.orig/vendor/libz-sys/src/zlib/gzguts.h
++++ rustc-1.72.0-src/vendor/libz-sys/src/zlib/gzguts.h
+@@ -3,15 +3,6 @@
+ * For conditions of distribution and use, see copyright notice in zlib.h
+ */
+
+-#ifdef _LARGEFILE64_SOURCE
+-# ifndef _LARGEFILE_SOURCE
+-# define _LARGEFILE_SOURCE 1
+-# endif
+-# ifdef _FILE_OFFSET_BITS
+-# undef _FILE_OFFSET_BITS
+-# endif
+-#endif
+-
+ #ifdef HAVE_HIDDEN
+ # define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
+ #else
+Index: rustc-1.72.0-src/vendor/libz-sys/.cargo-checksum.json
+===================================================================
+--- rustc-1.72.0-src.orig/vendor/libz-sys/.cargo-checksum.json
++++ rustc-1.72.0-src/vendor/libz-sys/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"5fc1259b26541f617473d6b741816705c91322db9740e347a8686e3c0b30ab2e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"34c467b6945a22696d060b8fdd2379d464acb3408d4c599d3dc5fe4fa1b4c84f","README-zng.md":"2f9f34e6b388a401b8d8318b64997a7521e4198c5c314f8cea11433623628515","README.md":"75701bfcd7158e924f51ece8debb6d4425ccd6ad5d2806004b5f174423f4b2af","build.rs":"b383e60f71c9b40ecc807ac58473f9b85d7036e8359796634cba2701224493a3","build_zng.rs":"b7768e19f0bf876f29eabb6ad6511f530e61d8aa92bfbe89a7cf3818e4824ce7","src/lib.rs":"7c4a3394e17e6250c1f4f2067efecc56b1850827596432ad0ce75e5eea800446","src/smoke.c":"10607c81b73811bfcb9718767312bf97ba2ccf7048ea2f18a2085aa06ad7f91b","src/zlib-ng/CMakeLists.txt":"5840d2c44e335af0f58f8a2545da60be403946b1181641b35ea7425b2e0f44db","src/zlib-ng/FAQ.zlib":"c524f4f86d336b3de71dd6977afddffa9c02fda5c26db4dfefae44959e7614a2","src/zlib-ng/INDEX.md":"989545e90d8e9ac149034f762ce78ed8976ebf9324326228dea37ca190154609","src/zlib-ng/LICENSE.md":"d3c80be055d94d798eaa786116e84fa0b010bc11420b5d2060d978ea77845436","src/zlib-ng/Makefile.in":"1f56adbf5fac7fa36c6e4c11b5f061acb971984c941154cbf0344e2b68b99e7d","src/zlib-ng/PORTING.md":"4105267b5e00f8d608f31dcf4fe2cfede15cc94568211691419e6cba3d8e539e","src/zlib-ng/README.md":"ba04244ad8eea94d834d25aa75b40e7f849844a33c68ed180c2a631378e1f211","src/zlib-ng/adler32.c":"82ffa1b4fc4b198ba8004135f79b8819d9f2b28e851c30c0ab41e6d32dfbf70d","src/zlib-ng/adler32_p.h":"f56915c59a345baf4558374447385a317e29365a4db2fbb38af4de3e1a1a0201","src/zlib-ng/arch/arm/Makefile.in":"95464884ba75a7b12c9ceda5845d8d68d5a7d7dac8a8dc24b27beb2192e5b97b","src/zlib-ng/arch/arm/adler32_neon.c":"3990b8d5570b12c2162218fe0e9bc723a03f1c89b5ed3ba70a74a98976260ee7","src/zlib-ng/arch/arm/arm.h":"855adbb02d7b9a5714a17d9dcff493610e7cd2b9a1f4e58e1c99626ab536e868","src/zlib-ng/arch/arm/armfeature.c":"4800228414695b632b9ceca14409e782d6fc3b357ba7ab00858925fc66b5532e","src/zlib-ng/arch/arm/chunkset_neon.c":"95fc7917d1d30094e15a35c56d1e9c189c5ca3758553a3467d4da793eaed656f","src/zlib-ng/arch/arm/crc32_acle.c":"e2be53267a2a59fc79c4b3bab00e8b25bf64a8fc8bf2c6684e5b1b1fd1480f9d","src/zlib-ng/arch/arm/ctzl.h":"feb70d55e66025fff806e30e48002b35cfff79533d352585cfa5f118edbc90b1","src/zlib-ng/arch/arm/insert_string_acle.c":"d1b1dae5aeada70f2b03c2cbf3112ce55a92401c2d87709081b04dcf5992e1ad","src/zlib-ng/arch/arm/slide_neon.c":"19d8cf5c742ac6b82164c7a183538ad1129f9f17e9b8bce8b40daac3820fb6c4","src/zlib-ng/arch/generic/Makefile.in":"f41a34839986eac8dd52cf91fada0efff4171c059ab5d7db6347c91bd6d9db09","src/zlib-ng/arch/power/Makefile.in":"69644d1a0ff8e7f38005c0a55cdbaf3f0d87f42abf8fc4f4136271c4fedfb846","src/zlib-ng/arch/power/adler32_power8.c":"79b75e98ad3a62facbbdd8c0b178d3f993b57f6e34d320bf47eca33aa8c330a1","src/zlib-ng/arch/power/power.c":"0647afb3b3b7ce2a19b4815ec8fdeee0b37c759413e5ef0a668a2dba22d94803","src/zlib-ng/arch/power/power.h":"f3f15f94fed98a2f7dd5d4568c5172e597228be4141d6895062703c3f70024da","src/zlib-ng/arch/power/slide_hash_power8.c":"932ea533d25e2f5478afe0c47830e7ef24276cad0d75fd91f2d8c799bd4b5d36","src/zlib-ng/arch/s390/Makefile.in":"eef6c3169723f089b0b5f852423ec55bf0364caeddd7cda991f2e76bc1682107","src/zlib-ng/arch/s390/README.md":"730b9a0230609988fbd1bdd52a7abdaa1fa5c65253ac78163dd4a5eccb966abc","src/zlib-ng/arch/s390/dfltcc_common.c":"3d460448ad4c5b687da6b7c0ad8498ece92b771dc7ddd0189e096acca5a1cad4","src/zlib-ng/arch/s390/dfltcc_common.h":"de8902d3863c8a7a3f6ea27dec2ee5a4f17ef5d8646e48a586d0b29fe94c9a0b","src/zlib-ng/arch/s390/dfltcc_deflate.c":"d6941d3c5ada225ec39b98b35bce1d203aa1f2d994a47c8487d377d9ef2f6efc","src/zlib-ng/arch/s390/dfltcc_deflate.h":"5c90a812e2a2f2b842dba027e5640791e52206e74b8423cb78e0b8ea12ed29ad","src/zlib-ng/arch/s390/dfltcc_detail.h":"fe66cd700a1d017eba86c2c6e95f53e9a4d1cb491de9cb3963b2a2907098baa9","src/zlib-ng/arch/s390/dfltcc_inflate.c":"83643b5605cdc2d1d7780e1bdeb007f9dc6a1cca633157abbfb5d3232f2b8816","src/zlib-ng/arch/s390/dfltcc_inflate.h":"d7a4a5ae79abd1a5456521926b918becfe86c253a4fc23723fbc09f7c3303128","src/zlib-ng/arch/s390/self-hosted-builder/actions-runner.Dockerfile":"999c962c49508ebf61414e6f9ffea059926ac500d4c6d707ea1f9e77402f7374","src/zlib-ng/arch/s390/self-hosted-builder/actions-runner.service":"33a359eb58d76152f916b40ee1357f7edfda75e8dfb55a5b12ac83bcd6ed7055","src/zlib-ng/arch/s390/self-hosted-builder/fs/usr/bin/actions-runner":"f647e18728ea15fe927ac9f8cba83a5b343654a0e91b5ebe653bae7af7375110","src/zlib-ng/arch/s390/self-hosted-builder/fs/usr/bin/entrypoint":"add4ebdc4f06ed15bb1de12a8c9ceb370a60baebb0932a1026a75433940ad3df","src/zlib-ng/arch/s390/self-hosted-builder/qemu-user-static.service":"54551049f6181da88700a2a944a72b0af3b8abde876fa28e1348deb5eb96c91b","src/zlib-ng/arch/x86/INDEX.md":"c12f9bf0d66743a6472fb756bf46def8eea1dd235be7fca994dcb22f693a3524","src/zlib-ng/arch/x86/Makefile.in":"9f6fe7567a99e81aaa3bef8ccfa1ad40f524efc285cf8dfe0f497a1530f8016c","src/zlib-ng/arch/x86/adler32_avx.c":"99056732c7bd5d53dc108f282811a40bf21570926781af5dc7b17cb9218963de","src/zlib-ng/arch/x86/adler32_ssse3.c":"883a5520b4481225d097c90c5359106a3c8eb7b921499c94276e999b7c39adc5","src/zlib-ng/arch/x86/chunkset_avx.c":"13c83149146c408ffdc9358bcb5355259f6196e6cc6fe025b7ea3647e313cd0a","src/zlib-ng/arch/x86/chunkset_sse.c":"f14d0557634b53af8cd6e2a1ce9d57df50244a72e85ff3b100b5ca287d1cfa8a","src/zlib-ng/arch/x86/compare258_avx.c":"8b2838d168de4608327f25fe52d53763a82413ee911d87947d3fcd72c7f9bf26","src/zlib-ng/arch/x86/compare258_sse.c":"b5049722ffd4a43a96868eeba5e000271cfc5fcbf3c2657026ead15b1df28a10","src/zlib-ng/arch/x86/crc_folding.c":"defb5a7067562612651f693c910db53cf228b7cd7fef11991504767a7d84f224","src/zlib-ng/arch/x86/crc_folding.h":"939212546611917e9e066e8ed30cdda95680ec1f5fe0890cc4865b4e6d7fc215","src/zlib-ng/arch/x86/insert_string_sse.c":"9e84a75b6a565422eb105261b6729d2a02b89133bd14372c949d5381b5deed3e","src/zlib-ng/arch/x86/slide_avx.c":"5e448e439ac24e7cb10eee176ca37f2c63f73c135c0a2af040e232bad490997d","src/zlib-ng/arch/x86/slide_sse.c":"1946cabb634c905fddef0a22b2fad19dfd99110169567c3beceef71145b2e316","src/zlib-ng/arch/x86/x86.c":"1af56e27b2e951e1ad1344e62c2f7a8c49a776fcdd1cb0f4ea9d6152118a479e","src/zlib-ng/arch/x86/x86.h":"4d2d20ea0087089141e250e77bb3d419954b9092810028b151581b9115a5fe8c","src/zlib-ng/chunkset.c":"cbf26582fff56726cc28bee05ff0a1680c50308b8dd9bb8cfb57d7f0a587d0bd","src/zlib-ng/chunkset_tpl.h":"eaaf0804f6162ab26b2b6de263a478ffb111559e653372e96e400acba9c63563","src/zlib-ng/cmake/detect-arch.c":"e0da3d16195eefb54bef77163db737a66453f25ae16648aa8f6beeac70787662","src/zlib-ng/cmake/detect-arch.cmake":"27fa8da497b39ac70d881e2d345749611dae4c30f7b7a9c9e32f2c042672189a","src/zlib-ng/cmake/detect-coverage.cmake":"e4e372991ba80a16ad47df2716708a56013cc628aa7ed01573a2360c60610125","src/zlib-ng/cmake/detect-install-dirs.cmake":"87031a40428a104f5cf38ecdb8a5028d8c679cfa772a58adde8380c809b34eff","src/zlib-ng/cmake/detect-sanitizer.cmake":"a8f7a4515278532b251b567d82ed576fe1ca7e698992ed92d1beb8e8dd22237f","src/zlib-ng/cmake/run-and-compare.cmake":"13d85c12c9d6c7b1b148bd0c5a5b4faa6a4b56f3823bf03c4f8d914c9c5949d8","src/zlib-ng/cmake/run-and-redirect.cmake":"7f08d18c09aa58113882ec760735a62a1723a5bfcae9f73bd3713a4dbaeab898","src/zlib-ng/cmake/test-compress.cmake":"0d2d1595859ccfb6795bb98700a4f7c1652b025cc344a1291524601087957888","src/zlib-ng/cmake/test-tools.cmake":"63aabfffd53970b8e145870b2a1c03bffa3595f7df04bd86f94e97b6f2a387e7","src/zlib-ng/cmake/toolchain-aarch64.cmake":"46be0bf580a49a528c72005484655afad1de3705b39a66a7b0c213b0fa81cee6","src/zlib-ng/cmake/toolchain-arm.cmake":"05e38076fd6ffb9785ff9844ccecd26436c9dc4c25b7777b62e5f52e788c3882","src/zlib-ng/cmake/toolchain-armhf.cmake":"1a2029163a57415eec9a5dd5f45d3254d349e97b1beb5d16876b741717673341","src/zlib-ng/cmake/toolchain-mingw-i686.cmake":"df9000354b820d3713d1469edc9f94cd095389b0cca83965730b8e64857fdf3f","src/zlib-ng/cmake/toolchain-mingw-x86_64.cmake":"ee316e6e3202919da5d497f9e246466fd715fcf079cb5b4afc4774089d1fefad","src/zlib-ng/cmake/toolchain-powerpc.cmake":"9bd6fc58ce5b70603657f2c195c4a5cf52fae96ad63ac787978831c5858f762c","src/zlib-ng/cmake/toolchain-powerpc64.cmake":"917fc5eef84921d8b38f43c2b4f60870965b4eecc8f018c7b3499e1142c715af","src/zlib-ng/cmake/toolchain-powerpc64le.cmake":"5b2edd36d62de513db2d32bfbf779979d81ac527b981cc3379a4e933fc5a94d1","src/zlib-ng/cmake/toolchain-s390x.cmake":"cf52cecea7bd2a9d1ff5fd8edcb03c531e3b404bbcd15a15dec2e0e19936f2ac","src/zlib-ng/cmake/toolchain-sparc64.cmake":"e543062485d06a7e0fec8135887c5e73363517fa4babc23ef7b780916d75afda","src/zlib-ng/compare258.c":"56bfd48d5ff9ca422fbb728df7a373436c73796561dff118c7d4039fe70d29e2","src/zlib-ng/compress.c":"41df6eb62d6fb1334ecfe0a0c3e50a7ee89528719857f2b8297cbc512149759c","src/zlib-ng/configure":"160f69a1e51c49f6454ece92e4c5e08675ca5d90cf22b8f79cbe54c4381d93c2","src/zlib-ng/crc32.c":"98440be8a99381151a2d740f2e2228e8c1b23b9193c3642c52a4e34799506336","src/zlib-ng/crc32_comb.c":"11a36a6088fb520a58e0304fc99cf12fc8437519e8a70fe74dad58f00af696ec","src/zlib-ng/crc32_comb_tbl.h":"d6615d209d6c7d5248c6f7fe4e5dbded13c0eb87997b37693032c2902927407d","src/zlib-ng/crc32_p.h":"1fa91375a18e090c0a0dfda39de3df36346a0b1be36c808be6b6c29c32eba922","src/zlib-ng/crc32_tbl.h":"d629378ba38ff5775095b64e277bcd41c4b89fab9b5647a9fb29e15da0db0161","src/zlib-ng/deflate.c":"6fb8979ee8bc43f6e12a649708c7eb50e60bb9bdc2e55c45ce3b15aefe779179","src/zlib-ng/deflate.h":"7b3c649965c54446097d6157dd31d3685aa7df1082e9aa64cb3cdf6ac2c4d023","src/zlib-ng/deflate_fast.c":"d51e1368fc997673c64b5ab9a620439df25f313f8274529d974c5f80b89702b8","src/zlib-ng/deflate_medium.c":"1c3d95cbac76052d39595ea750c5536541c18302b9abb398c27b58955318bba8","src/zlib-ng/deflate_p.h":"2e739301e8c53038c2a958c8c8693584cd8dae464ffef05a22db6d6fa9985676","src/zlib-ng/deflate_quick.c":"280905a191d2b2a7274f2453ac537e01a0fb6e7540a0b212c1514bfb8c9415ea","src/zlib-ng/deflate_slow.c":"a2c66723e1e71ffd6ff856407459ab311a4c6546ecf50285081fc7afcd0ccd2e","src/zlib-ng/doc/algorithm.txt":"0d21a0a4c47e512743389628d1385a831a5e5ff716491095a382b923287f4223","src/zlib-ng/doc/rfc1950.txt":"8f0475a5c984657bf26277f73df9456c9b97f175084f0c1748f1eb1f0b9b10b9","src/zlib-ng/doc/rfc1951.txt":"5ebf4b5b7fe1c3a0c0ab9aa3ac8c0f3853a7dc484905e76e03b0b0f301350009","src/zlib-ng/doc/rfc1952.txt":"f7c810fd9d719d002d605207a9b880600f71d039b9626c5b4b03f2122438dd2d","src/zlib-ng/doc/txtvsbin.txt":"47c273bb22c9773248d380549a330e5c262266b1292154b0d3014f731cc73f47","src/zlib-ng/fallback_builtins.h":"1d2c2da88009a58f240bac33f562fe5a0a39c1e773813a2d75b45283ff1396cd","src/zlib-ng/functable.c":"d9db6530035a06f95982ff3d7680a84f4b54b8425874ccbe2ab10b906bd5708a","src/zlib-ng/functable.h":"e5a2d0c10411d23f04295bcb9ddb9889388974b723caef65aa5c4ea4739f4aa7","src/zlib-ng/gzguts.h":"7b69b2f35264169bc794d0d5c00247d93c203f751d226302966c33b524ed9fb0","src/zlib-ng/gzlib.c":"7e6ad5d9d32e6429d56a5303e2c6e6870d69c023d6647a52fb95902828de4011","src/zlib-ng/gzread.c":"d5d47d24dc463b978fe828320dab140494803fd86b511300f903c7c2eabd4d25","src/zlib-ng/gzwrite.c":"1685ad2c88239b3434cd2c4a9d66b67842310b2d1dfd01aec0fc293eef20e858","src/zlib-ng/infback.c":"4decaa412219fc8adb935754c54a4dedf3952aaf67107a12512451c65eadee23","src/zlib-ng/inffast.c":"a134d4aa6a46eebe975ca0cd5ef18894fc852b6a840be21ca7243ddbe6c9d8f9","src/zlib-ng/inffast.h":"42e74a92b496ab0726be317e8497a12bf3c3cf3d0d533440ce65befd3929c71c","src/zlib-ng/inffixed_tbl.h":"a94225335396245e9f0ccb2e9b4b334fe7ee0111ed8e32a26bcd52187f364314","src/zlib-ng/inflate.c":"f33e2e7eeaa4b33ba6a2c327f8c9939e6b847afbdad349da65c97bf81c6083b5","src/zlib-ng/inflate.h":"eb25527d1bdedaa45167926dce4c39d9aaa3147b0f4a95f38f5916528c30a09b","src/zlib-ng/inflate_p.h":"4a94c51194da119770cf662ef289994f0c78d95184d54d6ae5d50a393e8f5a62","src/zlib-ng/inftrees.c":"7a777f5ff02ce60fbad6cb843ceadd7b3a8a8a0476ae010c87a0377c2e88f780","src/zlib-ng/inftrees.h":"fa80eb11c2290b345470a03cb861843e2cb1365135233ea8243e9fd79d3618a1","src/zlib-ng/insert_string.c":"aa22ba53a1e75821499809277f9ca0e5ef92b07a618136dd11ae1734e233b7c9","src/zlib-ng/insert_string_tpl.h":"1ceba9903324d10aad6e1d83653c4d534a5b06fd09076414a06215482be00bac","src/zlib-ng/match_tpl.h":"eeab4c6eea8511a7579738e622af062ad16f4016312e93ad34bc5903d8b3c4a1","src/zlib-ng/test/CVE-2002-0059/test.gz":"60bf96b8f433bd7e057ce3496aceaccd70ec80f596a4aa8bcc7786056705ce66","src/zlib-ng/test/CVE-2003-0107.c":"6ed6fba710f8f2b898750f0ec17720fbf01e45c39e8adbba6409681b34914140","src/zlib-ng/test/CVE-2004-0797/test.gz":"38caae524705f676bde13a8df9fc8c7d2fe105ba6bdbab62a405b0276fd3aa2e","src/zlib-ng/test/CVE-2005-1849/test.gz":"e4d5a60617df4b5dd44eda94751ce1eacdb325792bba6e3cc4676719a3adf742","src/zlib-ng/test/CVE-2005-2096/test.gz":"8f702d4861aa3ec98ac03a59ff26b430939630cb5cd4266d2658d3b836d576f9","src/zlib-ng/test/CVE-2018-25032/default.txt":"d7f8278db331c47bd1208bf41e7903cbddee4f7b47c666c40afdd3c96237752e","src/zlib-ng/test/CVE-2018-25032/fixed.txt":"3b27a98edd2f3f580033f9add11d3469d7808c969a1128ee00c18ac7a12cef57","src/zlib-ng/test/GH-361/test.txt":"358497d0a7251ea42101dc77b02337f46fd89af09643a8288e2a3082e5d24128","src/zlib-ng/test/GH-364/test.bin":"af5570f5a1810b7af78caf4bc70a660f0df51e42baf91d4de5b2328de0e83dfc","src/zlib-ng/test/GH-382/defneg3.dat":"b22bef6b7392401c9e7b079402c4a4074053d7a914d050400e37fd7af6fe26d5","src/zlib-ng/test/GH-751/test.txt":"b83d833803b7bc3124fb2a0034081f0b999ad10c33a8dfa3bfd181dc078ae3ee","src/zlib-ng/test/GH-979/pigz-2.6.tar.gz":"2eed7b0d7449d1d70903f2a62cd6005d262eb3a8c9e98687bc8cbb5809db2a7d","src/zlib-ng/test/Makefile.in":"48d033f2dbb62635624bf2c9e3e7fe279b72afc3411d14cb7cfdbf40f5b80e19","src/zlib-ng/test/README.md":"d60ef4851222ebc2a9fbc23f292ab11bc7fee40ba6171ea768b2ffa005df5b1d","src/zlib-ng/test/abi/ignore":"02aa87f77656dbc1fbddd23f436cd15465a92df0722da4055cae1bc8bf013097","src/zlib-ng/test/abi/zlib-v1.2.11-arm-linux-gnueabihf.abi":"f5e91f25b558a891fecbeb6e2e9575698630ab700d055a38f3bc4fe66257f513","src/zlib-ng/test/abi/zlib-v1.2.11-x86_64-linux-gnu.abi":"038337383cf780587d810cf5400d632f3a1f8517e63ac4a71b6e5224db8b1413","src/zlib-ng/test/abicheck.md":"6b4a87d760b3848fb1ded6782e02a1d074d9e487bdabb29274a62b31cdf48772","src/zlib-ng/test/abicheck.sh":"7ca2884ff37c697d380f620554525f9b9dc7fa76b45f866d284b2ea5b98c65cc","src/zlib-ng/test/adler32_test.c":"db3e8ad9a4e2ecce0c052b0bfe19834d3ff2fb2e9239cc3438a2c95db00b1d21","src/zlib-ng/test/crc32_test.c":"8f1223d8aa4c52a5e7323f422023f6b892ce684eaf7439ad905b855293f40143","src/zlib-ng/test/data/fireworks.jpg":"93b986ce7d7e361f0d3840f9d531b5f40fb6ca8c14d6d74364150e255f126512","src/zlib-ng/test/data/lcet10.txt":"1eb5d7bddb1c3cb68064d5b5f7f27814949674b6702564ff7025ced60795a6d9","src/zlib-ng/test/data/paper-100k.pdf":"60f73a051b7ca35bfec44734b2eed7736cb5c0b7f728beb7b97ade6c5e44849b","src/zlib-ng/test/deflate_quick_bi_valid.c":"a36697e5779a645354823f14540bd60b9378c2f4c5f2bb981d86bb34f29fcbb0","src/zlib-ng/test/deflate_quick_block_open.c":"455bd347bb88debdfacb409846170274991ec9ba71c52b8fd0e526daf57265eb","src/zlib-ng/test/example.c":"1c8d9d14128da9fb5415683aa7318ae0aa94b743f75905288a2a9decd4ead98d","src/zlib-ng/test/fuzz/checksum_fuzzer.c":"65a96358c9a82efc4b251b4f322b02fade7b69f9bc6ac07294e641e3fe1ccdb1","src/zlib-ng/test/fuzz/compress_fuzzer.c":"1ab70608075c4bc60f89aa2f327cff88362ee7b1d31da88ed54ca51e5f99e5c9","src/zlib-ng/test/fuzz/example_dict_fuzzer.c":"be68f9eee3deae7f9163c6288742e5455bc28f659f80fdb276fafe215f028b97","src/zlib-ng/test/fuzz/example_flush_fuzzer.c":"f12246a184dcfe0a19a98cdc742a1fe8da388ad20b406635d63f1fa10d45b9ca","src/zlib-ng/test/fuzz/example_large_fuzzer.c":"f490abcd332fb4e7921292adf6876d38d7f71c8d2443212c781ba88957ff9303","src/zlib-ng/test/fuzz/example_small_fuzzer.c":"a9b3436b291ace821b6013311a1100e19a9e1c67fefd3f97dbd60688f9bf22b1","src/zlib-ng/test/fuzz/minigzip_fuzzer.c":"5faecfe9e6ecc47e746151bd1cc24a2e2dba8b7ffeb270d2c88cb126273ab446","src/zlib-ng/test/fuzz/standalone_fuzz_target_runner.c":"f25649ed35b8b7a3899c8d7ff52f9972dfc7bf274889e0a7a77fbfdf1c1cfef0","src/zlib-ng/test/gh1235.c":"8310ef780dc483a1708750cd7c120b8e9cc0e1614767d24c01869e529074e981","src/zlib-ng/test/hash_head_0.c":"448def3e8ea13fbcac86202e50b8a71b6cea585d7bdbca0bc6cf6056e4059f98","src/zlib-ng/test/infcover.c":"9c0e8068fdc614b1852e8d274231b41ce3ce975d4419ed31e700a0b05e702303","src/zlib-ng/test/inflate_adler32.c":"ab430c97ae8f569784710118038e8ebf53f4136d1a957e1277c0904f9218340b","src/zlib-ng/test/minideflate.c":"34fdce39628ffd173f7736d9fb65dfa40d0b0289def64b935075f6c6cffe1999","src/zlib-ng/test/minigzip.c":"7dbce6528601f7fdd586280885ed439cb539e15f36dd3974274729bfcdd41928","src/zlib-ng/test/pigz/CMakeLists.txt":"aa70f1025adc004985bfe0accee9b7a80e04786d82705e27c377a5e8d4ecbaaa","src/zlib-ng/test/pkgcheck.sh":"581b3de9c58e96038af94c73cbdb30eed32900f7abb8fa7692426fa68059b0ef","src/zlib-ng/test/switchlevels.c":"ceb6cc4d48a637562009d8f7f82635fa9942acd1bfd597acd99454a03a3a98e3","src/zlib-ng/test/testCVEinputs.sh":"5de6198444c16726f8e0a8f2beb5b89c5ae7e7e3736ce760b9fbc719493e7e4f","src/zlib-ng/tools/codecov-upload.sh":"ec7a8f1405820810e486e3d7e2fda7eb958c17877b0000b93abdf09d87732a2f","src/zlib-ng/tools/config.sub":"32186cfeb5db62c99e1dfbfb07f24c1a50977447b4c26d2907021c74422a70d2","src/zlib-ng/tools/makecrct.c":"55c8f7b8e29393e95988a29de8cb1a1bdf2738a69d53627bd0f9d7bf169bf0a8","src/zlib-ng/tools/makefixed.c":"bffd02540231304f9bcc755b8cb9ae5cfbc48975857bbb4547f1d6acce21ef57","src/zlib-ng/tools/maketrees.c":"30e9f70addf691d1241e594a7f31fc78b119b65e8af9ac8e20fe6da01635d3b3","src/zlib-ng/trees.c":"2cd9a1dc8d9231e9fc4e53e56b87307989c1b7f33212cde4ee434ef71c28af2a","src/zlib-ng/trees.h":"24174f3543b01ee1ef370bbf6d15551a21871cded18b2aadf09a71e7904b6f99","src/zlib-ng/trees_emit.h":"2e93093ae5362523a26877d6fd663bb05793795889d2bfb987cbada9a9dc4517","src/zlib-ng/trees_tbl.h":"35f4fd0ec080c1ade342e2dd1b0f5cdc7e9f18990faa48d7a8a69bc318ebe607","src/zlib-ng/uncompr.c":"4ebb486b27930f8a6ec4a3cc90a207d0bcf8a4779d1dbf3b2184a2b2a5735cd1","src/zlib-ng/win32/DLL_FAQ.txt":"f17fd3823726adbae63b91c00d5db1dccae2e289258edabbbbebde04bb6e7e8c","src/zlib-ng/win32/Makefile.a64":"775d6902373d1583430b5d7467f001746be323610c89be27e02bbfe0205994f3","src/zlib-ng/win32/Makefile.arm":"7535e022f482920c3fa7a267e84e39ad790d150f72e5c30414baa156c2fdd9b6","src/zlib-ng/win32/Makefile.msc":"d769a00c0ad4cb5fc624d2ae004dfa3785a2f4310324b03afd2156e759003a06","src/zlib-ng/win32/README-WIN32.txt":"cdcca6e7a5d2d23618a48fafb8eea347227f8ecf1f38a6aa90f0e7e455bc6574","src/zlib-ng/win32/zlib-ng.def":"f240276caf805a10d024fc6a66efe915c435734c69732818d92fb04d08ab350c","src/zlib-ng/win32/zlib-ng1.rc":"ea0ea4d116b583510b113a27fdec2ad4f0890206963f0e3838f275b8005dde5d","src/zlib-ng/win32/zlib.def":"d9c371ff2677567350386441a2e3d0258010d6502290bbac5ac42ea168bd5212","src/zlib-ng/win32/zlib1.rc":"ec5021dba35f9fae5f5f82ad6b6bd059928548e0608e4ede0bcffccf5c1210a1","src/zlib-ng/win32/zlibcompat.def":"73728b9df4379dc70ebd9b2a9f20d6e4ed7c031fa1f351cdeae1de7d1db05bd1","src/zlib-ng/zbuild.h":"d4d52d3296cc949a5d694e7349a8236854f2ec116c184a310e4e62b28caf5b63","src/zlib-ng/zconf-ng.h.in":"f206ac69c1fa48c670648d26028263372a539ed1243a9a26e5b35bf52e2363ff","src/zlib-ng/zconf.h.in":"dbf08736c3bc5e41242b09e13d0a523b440250410476dd58747c14e28984f1e5","src/zlib-ng/zendian.h":"f5cfa865281d2c5d0b097d318500f27daeec346e7882de68e279486d79c52e77","src/zlib-ng/zlib-ng.h":"d51896e8411868ed195d5cf41fda4f1c5a9c891832dfd16b559a5ed6beedd890","src/zlib-ng/zlib-ng.map":"03ef4439594619e215dbb1717f8c13e16159308ef3817761ba1a3cca7f7834df","src/zlib-ng/zlib.h":"7e3666971e08019fc7097f11d593aac9ff6824a1ecc945c48f76009f7c27d55a","src/zlib-ng/zlib.map":"9997aa913dec6da106ab2089d2a72ca5e1b7fafe0807ac0bc1318ce8c8defab9","src/zlib-ng/zlib.pc.cmakein":"17668e07edbe5971043bea26a2f2b92c4c7cf4724620f1156f3ea1436d2aac93","src/zlib-ng/zlib.pc.in":"cf94c9aa44878a62e27c2f75354c08326b3bb5250a9b11496855cf59691177bb","src/zlib-ng/zutil.c":"53418b23c7878e968b4d04df8ebac74f64f60d32277f2343d16da52059dbc782","src/zlib-ng/zutil.h":"a14c18dd4a96909aaf0aa016cb6df97d77cf5b735283527c906181eead22f0e9","src/zlib-ng/zutil_p.h":"c259b33614007463b41d4184e0bdf10d62325445ee9308e1e1885862d201657a","src/zlib/CMakeLists.txt":"d3ea46cd350c74c21c2dd97f6d0ad354db76b2b43cc91ec1144b88267f67a588","src/zlib/ChangeLog":"6933f4ab74360476bc80d9eda2afd98f93588a5d276e1197926267421dd6959e","src/zlib/FAQ":"1e8a0078be0ff1b60d57561a9e4a8cad72892318a8831946cba1abd30d65521c","src/zlib/INDEX":"3b4e325d47ae66456d43fcf143ba21ab67a02a4f81be7ef2da480ba30d774266","src/zlib/LICENSE":"845efc77857d485d91fb3e0b884aaa929368c717ae8186b66fe1ed2495753243","src/zlib/Makefile":"ef23b08ce01239843f1ded3f373bfc432627a477d62f945cbf63b2ac03db118a","src/zlib/Makefile.in":"77a662b885182111d7731eef75176b4c5061002f278b58bf9bf217e2fa16cadb","src/zlib/README":"4bb4d5664fb9d06ef0d47e8ef73104bd545a5a57eb7241be4f2e0be904966322","src/zlib/adler32.c":"d7f1b6e44fee20ab41cef1d650776a039a2348935eb96bcbd294a4096139be3a","src/zlib/amiga/Makefile.pup":"a65cb3cd40b1b8ec77e288974dd9dc53d91ed78bbe495e94ccc84ddd423edf1f","src/zlib/amiga/Makefile.sas":"0e63cf88b505a1a04327bb666af3a985c5e11835c0c00aed4058c0dcc315d60e","src/zlib/compress.c":"6d0f0d0784744acca2678ce325c8d7c4c030e86f057adb78adcee111d2248c0d","src/zlib/configure":"2d964a697f9060d3a8fc5b4272c9d07b22e5fe6f5cf327e5c29f62f67d935759","src/zlib/contrib/README.contrib":"b925ae08d371b33c4b5ffd67c707150729a476caf47cfe2eafc002291f23f931","src/zlib/contrib/ada/buffer_demo.adb":"469cf566a6965767fee6b987a239ed8cedcc66614940d45a9b434331fbb435ce","src/zlib/contrib/ada/mtest.adb":"41b6f31684770334afdc4375871eb1408542f37a823a073556fdbfdb63753160","src/zlib/contrib/ada/read.adb":"fa5b989aef0c5715a3fcb15de93985f7f10aeb0a7f5716745c95ed820eb9af9c","src/zlib/contrib/ada/readme.txt":"8fe9e5303f2e8e8b746c78250e74b7c4aeb7ce6212fdce751fc3a0ce56a47fe2","src/zlib/contrib/ada/test.adb":"5e3abe79b387e09a9a42bd0543105e228f39a335240cffc33d71f0ba66ff2511","src/zlib/contrib/ada/zlib-streams.adb":"f45988e2bac76eb25a0dc981f46576e7432c35dde1790bbc2b650f0090b7fa72","src/zlib/contrib/ada/zlib-streams.ads":"969e8edb0611810fb52159dcb7c40228f4e5da810a7a3576b778116a93038c6b","src/zlib/contrib/ada/zlib-thin.adb":"03d89244ee5ec9771d9b5050e586c609f851af551b2e64eb151f1d5be0b63ae9","src/zlib/contrib/ada/zlib-thin.ads":"631ef170bde16c3ca8d412b54a0e519815b80197d208f8f393e6fe017bb0968e","src/zlib/contrib/ada/zlib.adb":"c9ca5dc34fbcdf06e2dc777b7e9dcd0ba31085b772b440eb0e12421323ab672c","src/zlib/contrib/ada/zlib.ads":"02634bec0d5e4c69d8d2859124380074a57de8d8bd928398379bfacc514236d2","src/zlib/contrib/ada/zlib.gpr":"859bb69dce38dbe9dca06753cf7ae7bd16d48f4fece8b87582dab8e30681d3de","src/zlib/contrib/blast/Makefile":"17d5d26c24bf51cad51045a38ffb73cc3539d29e89885aa249fcfd45a8659d5c","src/zlib/contrib/blast/README":"baa763ae03d88ef7ece6eb80d9a099b43d0b57639d6d281e1c7c6ca79d81daba","src/zlib/contrib/blast/blast.c":"1ab3e479d342bfc144167b808fb00142264bc50f24a110ca88cc774e351c218e","src/zlib/contrib/blast/blast.h":"9c1c422b76311d4cb06863ffc056668b6240f3dd998bc02e89ee590d482bfdc2","src/zlib/contrib/blast/test.pk":"5f5c262c545574a5c221132d5ef832478d222d70b015341795b3860204140d7c","src/zlib/contrib/blast/test.txt":"9679b2c98e1283222d0782b25a1c198dc64ba9ebd1addd6dc6f643a45947cda3","src/zlib/contrib/delphi/ZLib.pas":"6dcc65866e3fb3d33d2a2328c547458156883a3e6749d52ded209357a49d61de","src/zlib/contrib/delphi/ZLibConst.pas":"84bcc580bdf397e570f86f3f5a5b8c7bf537828f30b4b72648b81911f6bf5095","src/zlib/contrib/delphi/readme.txt":"f7420ed2de77d4b498eefbbe6402a1d17dc2d411735289c78a265c7f10fdaee5","src/zlib/contrib/delphi/zlibd32.mak":"850e91b6c9ea05de61a411cbda16fa0f10118cd88bb32c4b7226988776f8d511","src/zlib/contrib/dotzlib/DotZLib.build":"b96137097669644ecb9f42cdd3399d1fce9c512788374609303f7e50abf597f0","src/zlib/contrib/dotzlib/DotZLib.chm":"20d0e3edd57f849143255a7f0df1cd59d41db464a72c0d5ab42846438a729579","src/zlib/contrib/dotzlib/DotZLib.sln":"a979198c5b8d144c1ac8f993bfb6f4085d135aa58ca9dcf63ebabf52b5c695f7","src/zlib/contrib/dotzlib/DotZLib/AssemblyInfo.cs":"314afcfb339ea95f5431047b7ab24631b11c3532c7ce5dc2094ed0cf80a7c16d","src/zlib/contrib/dotzlib/DotZLib/ChecksumImpl.cs":"e7c047a2c3bcf88d3d002ee3d2d05af414acf53cb4451efacc0f2e95a474ea0f","src/zlib/contrib/dotzlib/DotZLib/CircularBuffer.cs":"be84c9736fe7bdc2bfae70466d8fff582504e928d5b5e110fd758090090c8cb7","src/zlib/contrib/dotzlib/DotZLib/CodecBase.cs":"259bdda1b7d6052134e631fa24bfd9dca6e2362563496c8b85257b56c848908c","src/zlib/contrib/dotzlib/DotZLib/Deflater.cs":"06ba6696a3c15c53ba5fd5a1c2bf50b51f217010228fc1e4c8495ee578f480de","src/zlib/contrib/dotzlib/DotZLib/DotZLib.cs":"9837fe993fd631233cc5e53ff084d86754b97f05ec77c54b0764c2706f186134","src/zlib/contrib/dotzlib/DotZLib/DotZLib.csproj":"21606db31dfef6410dd438b73f1db68856eacabcce6c0f0411fc4f17e17001f3","src/zlib/contrib/dotzlib/DotZLib/GZipStream.cs":"8d1de9755c77046b4ac71340a0a54434ebf4fd11b085c44454d7663a9b4df1c5","src/zlib/contrib/dotzlib/DotZLib/Inflater.cs":"9016ca73818f5b6a28791abc3af6da7c4d2773b6a3804f593f6d5737a62b99ad","src/zlib/contrib/dotzlib/DotZLib/UnitTests.cs":"c95048d763c7e367ba0bb7c31981e0610131fa12356bbd9bfdb13376778e9a0c","src/zlib/contrib/dotzlib/LICENSE_1_0.txt":"36266a8fd073568394cb81cdb2b124f7fdae2c64c1a7ed09db34b4d22efa2951","src/zlib/contrib/dotzlib/readme.txt":"d04972a91b1563fb4b7acab4b9ff2b84e57368953cc0596d5f5ea17d97315fd0","src/zlib/contrib/gcc_gvmat64/gvmat64.S":"22ff411b8b1d1b04aeaa8418b68245400267dc43c6f44104f6ccd37f0daee89f","src/zlib/contrib/infback9/README":"890288f02bb3b1f9cc654b87a07fcea695f90f6b9bd672d25bf6be1da2ec1688","src/zlib/contrib/infback9/infback9.c":"0a715c85a1ce3bb8b5a18d60941ffabc0186a886bcc66ba2ee0c4115a8e274e9","src/zlib/contrib/infback9/infback9.h":"dda2302f28157fe43a6143f84802af1740393572c2766559593996fd7a5a3245","src/zlib/contrib/infback9/inffix9.h":"84a2ba4727767c18af6505f0e81d9c814489c8b9ed330a25dad433db72997e43","src/zlib/contrib/infback9/inflate9.h":"32a907676cc36e27d0fdc0d99adb83a0b23f20ab61896269216d40fecf08d349","src/zlib/contrib/infback9/inftree9.c":"1f262e5ae8094c9d8b172241e567c86be560327b840ca8fb771e98461bcb158a","src/zlib/contrib/infback9/inftree9.h":"145072793141cb313c91cdf9dee9d4b8e8a38d77099f87e9cd05c7b5ead8f099","src/zlib/contrib/iostream/test.cpp":"0f3c77e013949eb9c91e6b690ea894e19d97944d6b0885b82806fc3ad99680cf","src/zlib/contrib/iostream/zfstream.cpp":"8ebb9b3d521cc3392953f27658cf1f6dcb763216079f69a1518ec5ca0e42a63b","src/zlib/contrib/iostream/zfstream.h":"4369c35e66f63f52ca4a5e1759bf720507ccabb8f3f132e2f18e68686c812401","src/zlib/contrib/iostream2/zstream.h":"d0343e0c57ff58008b6f29643d289c72713aa2d653fe3dcd2e939fc77e7e20b6","src/zlib/contrib/iostream2/zstream_test.cpp":"f789df183cc58b78751985466380c656308490a9036eb48a7ef79704c3d3f229","src/zlib/contrib/iostream3/README":"43ec48ecbd95a8c45db20b107fac73b740bb11595a4737329188f06b713972cc","src/zlib/contrib/iostream3/TODO":"af5ebc83fb88f69706c8af896733784753dead147687e1c046f410c0997fd88b","src/zlib/contrib/iostream3/test.cc":"8e17fc48dfdbc6e268838b8b427491b5843b6d18bc97caa6924de9fad7abe3da","src/zlib/contrib/iostream3/zfstream.cc":"8cdd67ed0b13c192c11e5ea90e9d5782d6627eb303fbc4aa5ebda2531ec00ff8","src/zlib/contrib/iostream3/zfstream.h":"1bd74778fac45ee090dfc0f182a23e8a849152deb630606884b2635987b357b1","src/zlib/contrib/minizip/Makefile":"0f59cf07531cf34cb359f9dbe26d8207a2bbbdad618557894eb629925f7e8899","src/zlib/contrib/minizip/Makefile.am":"2313a3480a2c3745fa7ce216829cd0367058907d3a0902e5832c66c84a2fdfc6","src/zlib/contrib/minizip/MiniZip64_Changes.txt":"302c62b328647f5472fb7755249a83459be7f8ffb1fae07e8ba318fce8f4126c","src/zlib/contrib/minizip/MiniZip64_info.txt":"122719c32ef1763a5f6ba9c8cdefc1d78a76f7156b09e7b6f69b73f968e0dac3","src/zlib/contrib/minizip/configure.ac":"959e4762ddcb36dcf30512611ca9fbcbcd0c943228a6ac2975708798ae09a438","src/zlib/contrib/minizip/crypt.h":"1d25a0fab3189dc3c6ae43c7813e1e5d07d0d049bd32bd7bd0e9ccd752bfdd5e","src/zlib/contrib/minizip/ioapi.c":"f6878a3ecf6802f0f75cadb41a114fa274636c386bac794c66cbb27a24d9a29f","src/zlib/contrib/minizip/ioapi.h":"9f5448f8d5e8894d6f397dd09d24f7ff39cb818cd493a8bd90dda19553b814ea","src/zlib/contrib/minizip/iowin32.c":"103cdef91d57ceca7a1c1973772ff7e1d44c7b3e227a3640171957302bd9e974","src/zlib/contrib/minizip/iowin32.h":"586f22b9c3c64da253ce2b518e0fad61f19a7b47b289fc704cc9708242294c49","src/zlib/contrib/minizip/make_vms.com":"65736d9c4888f2373d3db0a13864d150c5040453f5bc2a5c8784379a7ea67590","src/zlib/contrib/minizip/miniunz.c":"b29dfb4cff9763497d8f0656c97027995e1ea0b4104e4a217ba7882337ae7a7a","src/zlib/contrib/minizip/miniunzip.1":"66d8684392167091ef0fe01598d6a0daa26e7e448e2df6c3cb257487735b83f7","src/zlib/contrib/minizip/minizip.1":"5404596e8e5587a52f563906119f32ceee30a6d97a966afa5c7afbe4d373e210","src/zlib/contrib/minizip/minizip.c":"b5b8f380297be0d90265356704df1e41bee0e903a2169263a2b50dc22cc3180a","src/zlib/contrib/minizip/minizip.pc.in":"8b6670b42d8e5e519e1cc89db093efc07ba23cb1ddfedd3c93ff2df08c3ce8ac","src/zlib/contrib/minizip/mztools.c":"cd887c4af6d20823bd15f24008b10acf01969b4165d7848656bde843a92428d7","src/zlib/contrib/minizip/mztools.h":"6f82c52279e8f79165f4446be652e5741a49992ac58632470335aa34c564072a","src/zlib/contrib/minizip/unzip.c":"fc9e8d752618a05c1f3a2ce61ebf76d0c8053dd5579458f836834a36e8690bbe","src/zlib/contrib/minizip/unzip.h":"20cdc47658a3e41db897d31650e46cd2c8cca3c83ddaaeb6c7a48dd8b7f18e03","src/zlib/contrib/minizip/zip.c":"bee2d6fa310c9451b24c036d5840a8c7c90b7cf8d12d5767175c207e56fefcc3","src/zlib/contrib/minizip/zip.h":"75b635dca8294790ab7ec1f72e9f1fd352d75b189c3c9b61c68f76bd7e612043","src/zlib/contrib/pascal/example.pas":"d842d456ecb6ff80e34cee2da31deb2072cc69ca837497bea8b8bee203403474","src/zlib/contrib/pascal/readme.txt":"02f997c37991ddae0cb986039f7b4f6fc816b3fd0ffd332cad371d04c12cf1b9","src/zlib/contrib/pascal/zlibd32.mak":"850e91b6c9ea05de61a411cbda16fa0f10118cd88bb32c4b7226988776f8d511","src/zlib/contrib/pascal/zlibpas.pas":"720346d2f40429de31bb16a895f42e878f259b1aff7d46c63e6616e629b3f7d5","src/zlib/contrib/puff/Makefile":"d9d738030464aaae354196c14fd928adf591832fce7d71ac1977c1d8d4923a4b","src/zlib/contrib/puff/README":"c5b9852fb11e0d6b6e916e5134cf034524d901b95368972133e0381e480eb479","src/zlib/contrib/puff/puff.c":"433f7f4495481dd95576dbb548b1bcfc5ca129d30421695fa609f5f6c14908b6","src/zlib/contrib/puff/puff.h":"969b7be2a930db0cdcb19b0e5b29ae6741f5a8f663b6dba6d647e12ec60cfa8e","src/zlib/contrib/puff/pufftest.c":"d24e31c1d277d07c268f34e9490050c6b53c68b128da3efbb1d05fc5b31004f7","src/zlib/contrib/puff/zeros.raw":"b7b0887089f7af1f6d1e0b4c0a1e8eddd10223b23554299455c6c9be71b653a3","src/zlib/contrib/testzlib/testzlib.c":"c6c37b35c6ecc9986a9041f86d879cc37a9e4d8315af9d725071eb3b2cade0c5","src/zlib/contrib/testzlib/testzlib.txt":"2359bbdc84eb8a04e0f1cd16cd81a2896e957f2ad58dab3ca78ef55b7d0dc577","src/zlib/contrib/untgz/Makefile":"8f5ab1564813e091cea8f1bb63da32fd80ac763d029277b0cabf50f60aceefe1","src/zlib/contrib/untgz/Makefile.msc":"d0f537de11d9e0e36e2a98b3971c537265f4b533b4c48797094365ad9ae8388b","src/zlib/contrib/untgz/untgz.c":"9a12d774301d252dcd38bba07ac369319da4c04c4fef8a50fcbf40aebf29c2a1","src/zlib/contrib/vstudio/readme.txt":"df5fe112bef3c23d5767602736f6d0ce43cbb49b584210fe57f6f59e634a49d0","src/zlib/contrib/vstudio/vc10/miniunz.vcxproj":"dd607d43c64581172c20c22112821924dfe862f56b2e5eb8780bdd0714d9527b","src/zlib/contrib/vstudio/vc10/miniunz.vcxproj.filters":"4b8466bf00c70b81c31cc903e756e04151fd90fdcbe102f3568a2c8b6190ea27","src/zlib/contrib/vstudio/vc10/minizip.vcxproj":"af73f2cf8ae51e65e85342faeb40849a2310c97bc77def42b38d7070460a6cf0","src/zlib/contrib/vstudio/vc10/minizip.vcxproj.filters":"f2815f9e3386c393d0a351632823b221ef9689da1f422ecaa561dba2a612fb0a","src/zlib/contrib/vstudio/vc10/testzlib.vcxproj":"c21e64259bf9efe97e1103212e7a6e1b7372b50067b4ba14cfa678e1f491095f","src/zlib/contrib/vstudio/vc10/testzlib.vcxproj.filters":"a7caddbac3ba90b5d482e6d926ef35cc40dc3553ed3776ef6b68a528fd5b0631","src/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj":"3f317d8964f17901c3e68bff5deaec10b6ccc50a572235999e8097292692984c","src/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj.filters":"29c9535775aa76320ee4efd001d41961faf6c58cedd8b29d3986e85f73d2f6fb","src/zlib/contrib/vstudio/vc10/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc10/zlibstat.vcxproj":"50402ab8c63f746c034d6ce51d9612aff5b6af9aa27790cffa4b7deed4b30eb8","src/zlib/contrib/vstudio/vc10/zlibstat.vcxproj.filters":"eeb1de64c252c46b822f73f272127f6f9f0570ef22d234e093070ba95a4dde24","src/zlib/contrib/vstudio/vc10/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc10/zlibvc.sln":"e659860f705f31b87ea9139a3cb4ebe1561e120bce495383a54614fc82b49990","src/zlib/contrib/vstudio/vc10/zlibvc.vcxproj":"efad8cb150c0e5122f8c700d95c5de659dff92b171917c66bdbd082fff500b58","src/zlib/contrib/vstudio/vc10/zlibvc.vcxproj.filters":"c801732b7c7017796add50d2b71a228f99f95a46650baad307ff7e8358a2bfb0","src/zlib/contrib/vstudio/vc11/miniunz.vcxproj":"746e4c11fb8af4bcd6a9d68ba81ed1dc366a5de3bed56b291ee969ad733a7bb0","src/zlib/contrib/vstudio/vc11/minizip.vcxproj":"340617cae9cf4fcb003308021d3782ec3639e60d62d79a3aafc0a50bb55b061e","src/zlib/contrib/vstudio/vc11/testzlib.vcxproj":"99eadfdf2e41bc036141c174c4d0035d87572ce5795dcc28f39133f818a79d08","src/zlib/contrib/vstudio/vc11/testzlibdll.vcxproj":"583bdef522b0176829f0d8139ea2a88b9cbc14379d1334f3a863989ed3df9b67","src/zlib/contrib/vstudio/vc11/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc11/zlibstat.vcxproj":"b07f792843d05ac883391075bc3b9625437490d8d40944ad359aa2134a09a3aa","src/zlib/contrib/vstudio/vc11/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc11/zlibvc.sln":"27389b515997defd080519f95aff87e89fcbe8b26d73c5ebb73c544cfef4d60e","src/zlib/contrib/vstudio/vc11/zlibvc.vcxproj":"d02d014ef957119a6fd0ab243c892b74d1592b117750b95fed21097c8ed922d9","src/zlib/contrib/vstudio/vc12/miniunz.vcxproj":"1494af54570f6e93852932956d49a8c25e57b5abc1ac979945605ca9143df9f8","src/zlib/contrib/vstudio/vc12/minizip.vcxproj":"9bf128ed6760ca5f019006f178b1c65f4c7ff122dba8d297b64b0eb72feeb120","src/zlib/contrib/vstudio/vc12/testzlib.vcxproj":"be88bc1220c0447c2379fdab3ac88055f58a8a788d3e9cec494342187e760eaf","src/zlib/contrib/vstudio/vc12/testzlibdll.vcxproj":"93416510256935d79625dc9fd349cfce6968c062d42a138bec404a26b2f92f5e","src/zlib/contrib/vstudio/vc12/zlib.rc":"90067be57a8c5df594a850352642f8b1dcb32e3d088d3805ebafe75a27412b74","src/zlib/contrib/vstudio/vc12/zlibstat.vcxproj":"faa229a851c76b77d65bb4742d8369efe566652bb6a1447d1e3539f289b5313d","src/zlib/contrib/vstudio/vc12/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc12/zlibvc.sln":"162e0faa80a56d89eea71a0b89377708eec2faa0dc72091cc0abb07fbdea49a0","src/zlib/contrib/vstudio/vc12/zlibvc.vcxproj":"8ac8cb2d29b880a738011d29d0511af9b14f321bed90f674109c446f4108d442","src/zlib/contrib/vstudio/vc14/miniunz.vcxproj":"0312511d4a30cea979c4e36edf994a537ed8a9d924f6b5c536cbcd094773c11f","src/zlib/contrib/vstudio/vc14/minizip.vcxproj":"9e7bb7a6ac723e4b2db900627c366f9bb93a351381995d9c69a50c0126f64233","src/zlib/contrib/vstudio/vc14/testzlib.vcxproj":"88667873d9d61d65016b9501ca925532eb55f56230e5911d3e2a01cd8a9fb2a4","src/zlib/contrib/vstudio/vc14/testzlibdll.vcxproj":"69f544898b4275cd3d8e19b8f1f8cb39c1cb98a30cdb033242e4b94c57bfa150","src/zlib/contrib/vstudio/vc14/zlib.rc":"90067be57a8c5df594a850352642f8b1dcb32e3d088d3805ebafe75a27412b74","src/zlib/contrib/vstudio/vc14/zlibstat.vcxproj":"5629eb0cc30674a39aa3636f1cdd190393b0dbd4c69a35e36ad85b6340055605","src/zlib/contrib/vstudio/vc14/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc14/zlibvc.sln":"47a50bbde8ca6336cecd8c0e4b65e515fc46ae84c7b61008ac9864162f777286","src/zlib/contrib/vstudio/vc14/zlibvc.vcxproj":"09f496a2ad3afdd5e3f36b7285440369dcac4559656edc00ed7a74c7ec9fa10f","src/zlib/contrib/vstudio/vc9/miniunz.vcproj":"7db9b2ef5ff05d3de4ba633feab10e85d45434c865d520ffa1974421904996f3","src/zlib/contrib/vstudio/vc9/minizip.vcproj":"7797a9ad3c0056f3a3cf8fcde7618acd1d151c65d15f841fccd8d9d878ae7bb0","src/zlib/contrib/vstudio/vc9/testzlib.vcproj":"8df405917800adccee6bad2116022c2c82d661b37ea40ea16405fe4dbcb4b69f","src/zlib/contrib/vstudio/vc9/testzlibdll.vcproj":"cde6806f5c81d1fc311f9921c17ba56f8e386d097783a6a90875d385837c47e7","src/zlib/contrib/vstudio/vc9/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc9/zlibstat.vcproj":"d393d418d827ad9fb9c6516f1a7620371d15e3f5afef8ba60b51e50acc7199e9","src/zlib/contrib/vstudio/vc9/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc9/zlibvc.sln":"26e58d4b2cfcd941c367fb2a18537b3b9f002f2ac1278b700ea1129c50501452","src/zlib/contrib/vstudio/vc9/zlibvc.vcproj":"eaca98fcf166738b59fcdbd179dac9f98f985c6ba49212b186343a998816f081","src/zlib/crc32.c":"ec3ff0f97858b228513027a490e4330cbb23c6fbdd24d839902ffa89854f209c","src/zlib/crc32.h":"9a2223575183ac2ee8a247f20bf3ac066e8bd0140369556bdbdffc777435749e","src/zlib/deflate.c":"4470e36709ce7d6067fa3e8f60bb7f693b055bee42a0d6655ed71faa2db87fde","src/zlib/deflate.h":"0db1b5ef79ca6ba0f508b7b8bdaa11af45c5ebe2c89ab4f1086dc22b963a52fa","src/zlib/doc/algorithm.txt":"992590931e982c0765286c2d83f6e9ff0a95aabb08e28c30c52bae3e8c4bd5ad","src/zlib/doc/crc-doc.1.0.pdf":"064f9252d6e2e15ea56c2bd18e160e5c9c84bcd137c11a7af497aaa511ace998","src/zlib/doc/rfc1950.txt":"8f0475a5c984657bf26277f73df9456c9b97f175084f0c1748f1eb1f0b9b10b9","src/zlib/doc/rfc1951.txt":"5ebf4b5b7fe1c3a0c0ab9aa3ac8c0f3853a7dc484905e76e03b0b0f301350009","src/zlib/doc/rfc1952.txt":"164ef0897b4cbec63abf1b57f069f3599bd0fb7c72c2a4dee21bd7e03ec9af67","src/zlib/doc/txtvsbin.txt":"d1549fb75137f03102798f70fd34ff76285e717ddd520dd82274c1c0510eacf0","src/zlib/examples/README.examples":"1bc1c677bbebe1aa5e85015bb62f0cf3fcdbf95652d30494159bee6166c1854a","src/zlib/examples/enough.c":"c14a257c60bbe0d65bb54746dd97774a1853ef9e3f78db118a27d8bc0d26d738","src/zlib/examples/fitblk.c":"fd8aaaefd5eb3d9fc388bdc5b715d1c6993ecc9367f5432d3b120a0278904edc","src/zlib/examples/gun.c":"3bfd36b06284ba97d6105b8a6a5d18b2b34b75b3a1285f16d018680fb174915f","src/zlib/examples/gzappend.c":"6de91c8305e37560117bff44136abff72b16b028c0bda0bbac7ea07e4988b0ce","src/zlib/examples/gzjoin.c":"90b9d6c39a5fc91cf1cc9b96b025a508a8015dc502cd9374c754b44078593f57","src/zlib/examples/gzlog.c":"196872021c96099fd30c880ac2cccd1350fdbd81179731f3914153a26ebf72e9","src/zlib/examples/gzlog.h":"681f280437f867820bf39880e2f4fc641d402879e399ba2e6a31d73feefe8edc","src/zlib/examples/gznorm.c":"e5a8f5c3b107f27212f7d5fbfcf072a337a1b4ea32929ae31c168997438a5cc0","src/zlib/examples/zlib_how.html":"80fb647be8450bd7a07d8495244e1f061dfbdbdb53172ca24e7ffff8ace9c72f","src/zlib/examples/zpipe.c":"68140a82582ede938159630bca0fb13a93b4bf1cb2e85b08943c26242cf8f3a6","src/zlib/examples/zran.c":"10f9568b1f54cdb7474a38c5bc479aa0edb07a0eed2e999bdad4c521f6b25330","src/zlib/examples/zran.h":"9a0d4c15f898c43deae2c5e98a5c66c637a1b25573d662fe91a789c386eaf971","src/zlib/gzclose.c":"94446cf8cde67c30e64d0a335b0c941fd3fbad2e77f30180d12e61f9c2a5a6b8","src/zlib/gzguts.h":"fa85c9dabe24e42ba95c702870416ff67ecc58906321f8e74b72a50dfd7df400","src/zlib/gzlib.c":"635b7b6df79a5ce6e0f951669e4c82704d7972d8afb87278b9155c2cb4c5066f","src/zlib/gzread.c":"41c69d43fb3974bae58d9169aea3514221f70dc77bb7a35c79626dd3be01adf2","src/zlib/gzwrite.c":"c7454689751c8f41ec63a1381a0053fb149095abe1c3b89c8a996b2d7ac8adce","src/zlib/infback.c":"6a6cfe3d7e239d590692bc2664ac58d3ef92be30ff4cb3c6dbf5deed28f79eb5","src/zlib/inffast.c":"41d93aefdbfee5455809130af74fcc76cf7259b1aa8b34d0060d14e57463e8bb","src/zlib/inffast.h":"7d8c1c873ce9bc346ad6005bb9d75cce5c6352aaf7395385be216a9452a34908","src/zlib/inffixed.h":"237ba710f090e432b62ebf963bee8b302867e9691406b2d3f8ee89ee7bfef9b0","src/zlib/inflate.c":"f1679575fef1717d908dd09d7bfe8fff89c21941cadd7c255a2ccccfba3a287e","src/zlib/inflate.h":"e8d4a51b07694bf48cb91979c19974cf6a5ab0b8a09d26ec0d14df349230673e","src/zlib/inftrees.c":"b9db40bbb68b63dccbcdfa78d687751e33178af8669f1c1236309cfd5d2edc0e","src/zlib/inftrees.h":"44084a93673386db6282dcb61d739c84518e10dff66d1c6850715137c827464c","src/zlib/make_vms.com":"14ed54bdd391c1648cedfb69d8a73a26dcc7f1187d59b0f18d944b7665cec85b","src/zlib/msdos/Makefile.bor":"292ab363f7ffbc4ae84d37cd9bdffd2dac1003bee52d223a8489844870f20702","src/zlib/msdos/Makefile.dj2":"9208450c2ae6dcbfcc25560b5b9ca763f461e7246e37b0552474edf8fa898906","src/zlib/msdos/Makefile.emx":"c749d6ec7f88e8e639d4f03bdbdcbbe9d1c304210be4c4be621ceb22961d3d64","src/zlib/msdos/Makefile.msc":"0e021a6f42212415b060e4ad468eb415d0a8c1f343137fb9dff2cb8f9ead3027","src/zlib/msdos/Makefile.tc":"2ae12ee2a3e62f7c5a0520d0fbe4adee772bc07fe816002b07ccb43db3daa76a","src/zlib/nintendods/Makefile":"ea5823efe6830132294eddf2f56dbd7db8712244c210bb4968c431b1a91bd066","src/zlib/nintendods/README":"e362426c47b39ff6a7d6c75c6660b20abf076cdfa5e1e421716dc629a71aef95","src/zlib/old/Makefile.emx":"d811f032272aae50123a889297af3a02fbd60d1e42bbef11466462f627ff7b5b","src/zlib/old/Makefile.riscos":"d1a488b160fbfd53272b68a913283a4be08ba9d490796b196dddb2ba535b41e0","src/zlib/old/README":"551a0f4d91fe0f827a31cbdfbb4a71d1f3dc4d06564d80a3f526b749dd104d11","src/zlib/old/descrip.mms":"8ff08c35c056df9c986f23c09cf8936db63ccf12c3c42f7d18a48b36f060cff7","src/zlib/old/os2/Makefile.os2":"6ad247c00f00ff42fd2d62555e86251cef06e4079378241b5f320c227507d51d","src/zlib/old/os2/zlib.def":"ea9c61876d2e20b67ef2d9495991a32798eb40d13ede95859a2f4f03b65b9b61","src/zlib/old/visual-basic.txt":"1727650acbde9a9e6aec9438896377e46a12699cca5d46c5399cef524dedc614","src/zlib/os400/README400":"5eb702a0dd460e2bea59ee83014c3f975e892057850c639f793bb740044a38ba","src/zlib/os400/bndsrc":"3c36a17975eed5a8d33bc5443b39fead1e68c01393496be9c1f4a61444bcb0f6","src/zlib/os400/make.sh":"143394d1e3876c61c29078c0e47310e726e1f5bd42739fe92df9ece65711655f","src/zlib/os400/zlib.inc":"dede38961ae2e7a2590343bf1ff558c6f51e46714dec33f2d11d8c34899b3875","src/zlib/qnx/package.qpg":"d521336be75bdd145281c6d166241905751ec97093ecd6fec97a313f631ac0e1","src/zlib/test/example.c":"64ae90d60b40a8aec4700e5c4e7a71898ebb92948b7a07f939b3e763cb3e8b35","src/zlib/test/infcover.c":"f654f3fcc74b33bd95cda63d13fe0ce589bcfe965544e0c17ee597d75efbd090","src/zlib/test/minigzip.c":"f9777d1e8b337573e12daa8091dcf22e88a9b155fc0acad15b8224c377bfe027","src/zlib/treebuild.xml":"89b50165782643554a38d5c58c203d9648b540e5a455531dcb58b5676a019955","src/zlib/trees.c":"b338f1ec9038bd77efc09c8fdb99ef27b5db5b3da9baa301e544adc8e3b6a662","src/zlib/trees.h":"bb0a9d3ca88ee00c81adb7c636e73b97085f6ef1b52d6d58edbe2b6dc3adeb4d","src/zlib/uncompr.c":"7b3d8ca0f10ef7c74044c3172ca8f9f50389cd0f270ee4517f438e7e06be5623","src/zlib/watcom/watcom_f.mak":"7e039b912f9cffaa40835281430bb284fa9042b0a0d12f6b34700a06bca6576e","src/zlib/watcom/watcom_l.mak":"d11b4064604a034725860e63e3f6d347056372e4b1675b183e20a93533b20cc9","src/zlib/win32/DLL_FAQ.txt":"9e00778319381e6275691dd3a89410c99065b8c0c5db96473abe8c859cbdefd8","src/zlib/win32/Makefile.bor":"7d73a0d2c3e38b7c610bbc9c22f683a4fe1ab9b8b65649a3a8ac4ff7fcc14ba6","src/zlib/win32/Makefile.gcc":"97140c30506a8f6b2edb6b3d8a1b6b539d7929d4b957deba9950301090f579bf","src/zlib/win32/Makefile.msc":"235529bd529d4690d5d4b7871fdd0a1f118f2fe18862cbdec5f5ac674c55a60d","src/zlib/win32/README-WIN32.txt":"f414b3702f8d3bf1de42e0f41604bd78c44e537aae16b6107e3cdaa5759caa16","src/zlib/win32/VisualC.txt":"9ec0babd46eaa012371dee2d3a8a55d9c7130f7895512c3371c737e4a7f6a997","src/zlib/win32/zlib.def":"c00693a5c825f8bfbdb68124fd03cb2fa5269338071147bdaa14434aaf3962b9","src/zlib/win32/zlib1.rc":"54e161029b59e99a4f9cb2281b956f00ecfb1814318ddef9c741ff4f832c5c1d","src/zlib/zconf.h":"80e0a31a4c0e6f20d1bad0df99271b9d535aa9f7c4e62f1a54f643adb4c6dfa2","src/zlib/zconf.h.cmakein":"bb12900d39488e6a9ed67ebd7cf5599f3ced8937b7077d4d5001e470c7a1392e","src/zlib/zconf.h.in":"80e0a31a4c0e6f20d1bad0df99271b9d535aa9f7c4e62f1a54f643adb4c6dfa2","src/zlib/zlib.3":"aefd0162070fcb0379dc18e27b039253cd98c148104c1097dd60e0d0b435e564","src/zlib/zlib.3.pdf":"91343dffd2876dcf4af567f299ce99872b066232451093d6d12e02e4654873d8","src/zlib/zlib.h":"a980a0d104198a53cc220c51ab5856e5be901bec8a2d02e0ee79a8754219dfed","src/zlib/zlib.map":"33e2a7c4defd6222945bb0f7191b6380afb4f518e804af86a44aad4a9090bf9e","src/zlib/zlib.pc.cmakein":"2f1d0b18ce37c2af415a469857f02aee2c41a58877aff21d29e9c6db32b55cb7","src/zlib/zlib.pc.in":"04c01cc2e1a0ed123518b5855f585c93a24526dd88982c414111ea1fc9f07997","src/zlib/zlib2ansi":"b3f9c88abbdf16143e5d5110e44fff198bcda9ee1358e036c8d445e9d0cbce85","src/zlib/zutil.c":"8108af451ad14271065844736ac7c436275b92826c319318070508d769371428","src/zlib/zutil.h":"cf94d865e3a9162c0571cba7f74c8f01efbdca26b981d6cc9c545d4c3991e3c2"},"package":"56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"5fc1259b26541f617473d6b741816705c91322db9740e347a8686e3c0b30ab2e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"34c467b6945a22696d060b8fdd2379d464acb3408d4c599d3dc5fe4fa1b4c84f","README-zng.md":"2f9f34e6b388a401b8d8318b64997a7521e4198c5c314f8cea11433623628515","README.md":"75701bfcd7158e924f51ece8debb6d4425ccd6ad5d2806004b5f174423f4b2af","build.rs":"b383e60f71c9b40ecc807ac58473f9b85d7036e8359796634cba2701224493a3","build_zng.rs":"b7768e19f0bf876f29eabb6ad6511f530e61d8aa92bfbe89a7cf3818e4824ce7","src/lib.rs":"7c4a3394e17e6250c1f4f2067efecc56b1850827596432ad0ce75e5eea800446","src/smoke.c":"10607c81b73811bfcb9718767312bf97ba2ccf7048ea2f18a2085aa06ad7f91b","src/zlib-ng/CMakeLists.txt":"5840d2c44e335af0f58f8a2545da60be403946b1181641b35ea7425b2e0f44db","src/zlib-ng/FAQ.zlib":"c524f4f86d336b3de71dd6977afddffa9c02fda5c26db4dfefae44959e7614a2","src/zlib-ng/INDEX.md":"989545e90d8e9ac149034f762ce78ed8976ebf9324326228dea37ca190154609","src/zlib-ng/LICENSE.md":"d3c80be055d94d798eaa786116e84fa0b010bc11420b5d2060d978ea77845436","src/zlib-ng/Makefile.in":"1f56adbf5fac7fa36c6e4c11b5f061acb971984c941154cbf0344e2b68b99e7d","src/zlib-ng/PORTING.md":"4105267b5e00f8d608f31dcf4fe2cfede15cc94568211691419e6cba3d8e539e","src/zlib-ng/README.md":"ba04244ad8eea94d834d25aa75b40e7f849844a33c68ed180c2a631378e1f211","src/zlib-ng/adler32.c":"82ffa1b4fc4b198ba8004135f79b8819d9f2b28e851c30c0ab41e6d32dfbf70d","src/zlib-ng/adler32_p.h":"f56915c59a345baf4558374447385a317e29365a4db2fbb38af4de3e1a1a0201","src/zlib-ng/arch/arm/Makefile.in":"95464884ba75a7b12c9ceda5845d8d68d5a7d7dac8a8dc24b27beb2192e5b97b","src/zlib-ng/arch/arm/adler32_neon.c":"3990b8d5570b12c2162218fe0e9bc723a03f1c89b5ed3ba70a74a98976260ee7","src/zlib-ng/arch/arm/arm.h":"855adbb02d7b9a5714a17d9dcff493610e7cd2b9a1f4e58e1c99626ab536e868","src/zlib-ng/arch/arm/armfeature.c":"4800228414695b632b9ceca14409e782d6fc3b357ba7ab00858925fc66b5532e","src/zlib-ng/arch/arm/chunkset_neon.c":"95fc7917d1d30094e15a35c56d1e9c189c5ca3758553a3467d4da793eaed656f","src/zlib-ng/arch/arm/crc32_acle.c":"e2be53267a2a59fc79c4b3bab00e8b25bf64a8fc8bf2c6684e5b1b1fd1480f9d","src/zlib-ng/arch/arm/ctzl.h":"feb70d55e66025fff806e30e48002b35cfff79533d352585cfa5f118edbc90b1","src/zlib-ng/arch/arm/insert_string_acle.c":"d1b1dae5aeada70f2b03c2cbf3112ce55a92401c2d87709081b04dcf5992e1ad","src/zlib-ng/arch/arm/slide_neon.c":"19d8cf5c742ac6b82164c7a183538ad1129f9f17e9b8bce8b40daac3820fb6c4","src/zlib-ng/arch/generic/Makefile.in":"f41a34839986eac8dd52cf91fada0efff4171c059ab5d7db6347c91bd6d9db09","src/zlib-ng/arch/power/Makefile.in":"69644d1a0ff8e7f38005c0a55cdbaf3f0d87f42abf8fc4f4136271c4fedfb846","src/zlib-ng/arch/power/adler32_power8.c":"79b75e98ad3a62facbbdd8c0b178d3f993b57f6e34d320bf47eca33aa8c330a1","src/zlib-ng/arch/power/power.c":"0647afb3b3b7ce2a19b4815ec8fdeee0b37c759413e5ef0a668a2dba22d94803","src/zlib-ng/arch/power/power.h":"f3f15f94fed98a2f7dd5d4568c5172e597228be4141d6895062703c3f70024da","src/zlib-ng/arch/power/slide_hash_power8.c":"932ea533d25e2f5478afe0c47830e7ef24276cad0d75fd91f2d8c799bd4b5d36","src/zlib-ng/arch/s390/Makefile.in":"eef6c3169723f089b0b5f852423ec55bf0364caeddd7cda991f2e76bc1682107","src/zlib-ng/arch/s390/README.md":"730b9a0230609988fbd1bdd52a7abdaa1fa5c65253ac78163dd4a5eccb966abc","src/zlib-ng/arch/s390/dfltcc_common.c":"3d460448ad4c5b687da6b7c0ad8498ece92b771dc7ddd0189e096acca5a1cad4","src/zlib-ng/arch/s390/dfltcc_common.h":"de8902d3863c8a7a3f6ea27dec2ee5a4f17ef5d8646e48a586d0b29fe94c9a0b","src/zlib-ng/arch/s390/dfltcc_deflate.c":"d6941d3c5ada225ec39b98b35bce1d203aa1f2d994a47c8487d377d9ef2f6efc","src/zlib-ng/arch/s390/dfltcc_deflate.h":"5c90a812e2a2f2b842dba027e5640791e52206e74b8423cb78e0b8ea12ed29ad","src/zlib-ng/arch/s390/dfltcc_detail.h":"fe66cd700a1d017eba86c2c6e95f53e9a4d1cb491de9cb3963b2a2907098baa9","src/zlib-ng/arch/s390/dfltcc_inflate.c":"83643b5605cdc2d1d7780e1bdeb007f9dc6a1cca633157abbfb5d3232f2b8816","src/zlib-ng/arch/s390/dfltcc_inflate.h":"d7a4a5ae79abd1a5456521926b918becfe86c253a4fc23723fbc09f7c3303128","src/zlib-ng/arch/s390/self-hosted-builder/actions-runner.Dockerfile":"999c962c49508ebf61414e6f9ffea059926ac500d4c6d707ea1f9e77402f7374","src/zlib-ng/arch/s390/self-hosted-builder/actions-runner.service":"33a359eb58d76152f916b40ee1357f7edfda75e8dfb55a5b12ac83bcd6ed7055","src/zlib-ng/arch/s390/self-hosted-builder/fs/usr/bin/actions-runner":"f647e18728ea15fe927ac9f8cba83a5b343654a0e91b5ebe653bae7af7375110","src/zlib-ng/arch/s390/self-hosted-builder/fs/usr/bin/entrypoint":"add4ebdc4f06ed15bb1de12a8c9ceb370a60baebb0932a1026a75433940ad3df","src/zlib-ng/arch/s390/self-hosted-builder/qemu-user-static.service":"54551049f6181da88700a2a944a72b0af3b8abde876fa28e1348deb5eb96c91b","src/zlib-ng/arch/x86/INDEX.md":"c12f9bf0d66743a6472fb756bf46def8eea1dd235be7fca994dcb22f693a3524","src/zlib-ng/arch/x86/Makefile.in":"9f6fe7567a99e81aaa3bef8ccfa1ad40f524efc285cf8dfe0f497a1530f8016c","src/zlib-ng/arch/x86/adler32_avx.c":"99056732c7bd5d53dc108f282811a40bf21570926781af5dc7b17cb9218963de","src/zlib-ng/arch/x86/adler32_ssse3.c":"883a5520b4481225d097c90c5359106a3c8eb7b921499c94276e999b7c39adc5","src/zlib-ng/arch/x86/chunkset_avx.c":"13c83149146c408ffdc9358bcb5355259f6196e6cc6fe025b7ea3647e313cd0a","src/zlib-ng/arch/x86/chunkset_sse.c":"f14d0557634b53af8cd6e2a1ce9d57df50244a72e85ff3b100b5ca287d1cfa8a","src/zlib-ng/arch/x86/compare258_avx.c":"8b2838d168de4608327f25fe52d53763a82413ee911d87947d3fcd72c7f9bf26","src/zlib-ng/arch/x86/compare258_sse.c":"b5049722ffd4a43a96868eeba5e000271cfc5fcbf3c2657026ead15b1df28a10","src/zlib-ng/arch/x86/crc_folding.c":"defb5a7067562612651f693c910db53cf228b7cd7fef11991504767a7d84f224","src/zlib-ng/arch/x86/crc_folding.h":"939212546611917e9e066e8ed30cdda95680ec1f5fe0890cc4865b4e6d7fc215","src/zlib-ng/arch/x86/insert_string_sse.c":"9e84a75b6a565422eb105261b6729d2a02b89133bd14372c949d5381b5deed3e","src/zlib-ng/arch/x86/slide_avx.c":"5e448e439ac24e7cb10eee176ca37f2c63f73c135c0a2af040e232bad490997d","src/zlib-ng/arch/x86/slide_sse.c":"1946cabb634c905fddef0a22b2fad19dfd99110169567c3beceef71145b2e316","src/zlib-ng/arch/x86/x86.c":"1af56e27b2e951e1ad1344e62c2f7a8c49a776fcdd1cb0f4ea9d6152118a479e","src/zlib-ng/arch/x86/x86.h":"4d2d20ea0087089141e250e77bb3d419954b9092810028b151581b9115a5fe8c","src/zlib-ng/chunkset.c":"cbf26582fff56726cc28bee05ff0a1680c50308b8dd9bb8cfb57d7f0a587d0bd","src/zlib-ng/chunkset_tpl.h":"eaaf0804f6162ab26b2b6de263a478ffb111559e653372e96e400acba9c63563","src/zlib-ng/cmake/detect-arch.c":"e0da3d16195eefb54bef77163db737a66453f25ae16648aa8f6beeac70787662","src/zlib-ng/cmake/detect-arch.cmake":"27fa8da497b39ac70d881e2d345749611dae4c30f7b7a9c9e32f2c042672189a","src/zlib-ng/cmake/detect-coverage.cmake":"e4e372991ba80a16ad47df2716708a56013cc628aa7ed01573a2360c60610125","src/zlib-ng/cmake/detect-install-dirs.cmake":"87031a40428a104f5cf38ecdb8a5028d8c679cfa772a58adde8380c809b34eff","src/zlib-ng/cmake/detect-sanitizer.cmake":"a8f7a4515278532b251b567d82ed576fe1ca7e698992ed92d1beb8e8dd22237f","src/zlib-ng/cmake/run-and-compare.cmake":"13d85c12c9d6c7b1b148bd0c5a5b4faa6a4b56f3823bf03c4f8d914c9c5949d8","src/zlib-ng/cmake/run-and-redirect.cmake":"7f08d18c09aa58113882ec760735a62a1723a5bfcae9f73bd3713a4dbaeab898","src/zlib-ng/cmake/test-compress.cmake":"0d2d1595859ccfb6795bb98700a4f7c1652b025cc344a1291524601087957888","src/zlib-ng/cmake/test-tools.cmake":"63aabfffd53970b8e145870b2a1c03bffa3595f7df04bd86f94e97b6f2a387e7","src/zlib-ng/cmake/toolchain-aarch64.cmake":"46be0bf580a49a528c72005484655afad1de3705b39a66a7b0c213b0fa81cee6","src/zlib-ng/cmake/toolchain-arm.cmake":"05e38076fd6ffb9785ff9844ccecd26436c9dc4c25b7777b62e5f52e788c3882","src/zlib-ng/cmake/toolchain-armhf.cmake":"1a2029163a57415eec9a5dd5f45d3254d349e97b1beb5d16876b741717673341","src/zlib-ng/cmake/toolchain-mingw-i686.cmake":"df9000354b820d3713d1469edc9f94cd095389b0cca83965730b8e64857fdf3f","src/zlib-ng/cmake/toolchain-mingw-x86_64.cmake":"ee316e6e3202919da5d497f9e246466fd715fcf079cb5b4afc4774089d1fefad","src/zlib-ng/cmake/toolchain-powerpc.cmake":"9bd6fc58ce5b70603657f2c195c4a5cf52fae96ad63ac787978831c5858f762c","src/zlib-ng/cmake/toolchain-powerpc64.cmake":"917fc5eef84921d8b38f43c2b4f60870965b4eecc8f018c7b3499e1142c715af","src/zlib-ng/cmake/toolchain-powerpc64le.cmake":"5b2edd36d62de513db2d32bfbf779979d81ac527b981cc3379a4e933fc5a94d1","src/zlib-ng/cmake/toolchain-s390x.cmake":"cf52cecea7bd2a9d1ff5fd8edcb03c531e3b404bbcd15a15dec2e0e19936f2ac","src/zlib-ng/cmake/toolchain-sparc64.cmake":"e543062485d06a7e0fec8135887c5e73363517fa4babc23ef7b780916d75afda","src/zlib-ng/compare258.c":"56bfd48d5ff9ca422fbb728df7a373436c73796561dff118c7d4039fe70d29e2","src/zlib-ng/compress.c":"41df6eb62d6fb1334ecfe0a0c3e50a7ee89528719857f2b8297cbc512149759c","src/zlib-ng/configure":"160f69a1e51c49f6454ece92e4c5e08675ca5d90cf22b8f79cbe54c4381d93c2","src/zlib-ng/crc32.c":"98440be8a99381151a2d740f2e2228e8c1b23b9193c3642c52a4e34799506336","src/zlib-ng/crc32_comb.c":"11a36a6088fb520a58e0304fc99cf12fc8437519e8a70fe74dad58f00af696ec","src/zlib-ng/crc32_comb_tbl.h":"d6615d209d6c7d5248c6f7fe4e5dbded13c0eb87997b37693032c2902927407d","src/zlib-ng/crc32_p.h":"1fa91375a18e090c0a0dfda39de3df36346a0b1be36c808be6b6c29c32eba922","src/zlib-ng/crc32_tbl.h":"d629378ba38ff5775095b64e277bcd41c4b89fab9b5647a9fb29e15da0db0161","src/zlib-ng/deflate.c":"6fb8979ee8bc43f6e12a649708c7eb50e60bb9bdc2e55c45ce3b15aefe779179","src/zlib-ng/deflate.h":"7b3c649965c54446097d6157dd31d3685aa7df1082e9aa64cb3cdf6ac2c4d023","src/zlib-ng/deflate_fast.c":"d51e1368fc997673c64b5ab9a620439df25f313f8274529d974c5f80b89702b8","src/zlib-ng/deflate_medium.c":"1c3d95cbac76052d39595ea750c5536541c18302b9abb398c27b58955318bba8","src/zlib-ng/deflate_p.h":"2e739301e8c53038c2a958c8c8693584cd8dae464ffef05a22db6d6fa9985676","src/zlib-ng/deflate_quick.c":"280905a191d2b2a7274f2453ac537e01a0fb6e7540a0b212c1514bfb8c9415ea","src/zlib-ng/deflate_slow.c":"a2c66723e1e71ffd6ff856407459ab311a4c6546ecf50285081fc7afcd0ccd2e","src/zlib-ng/doc/algorithm.txt":"0d21a0a4c47e512743389628d1385a831a5e5ff716491095a382b923287f4223","src/zlib-ng/doc/rfc1950.txt":"8f0475a5c984657bf26277f73df9456c9b97f175084f0c1748f1eb1f0b9b10b9","src/zlib-ng/doc/rfc1951.txt":"5ebf4b5b7fe1c3a0c0ab9aa3ac8c0f3853a7dc484905e76e03b0b0f301350009","src/zlib-ng/doc/rfc1952.txt":"f7c810fd9d719d002d605207a9b880600f71d039b9626c5b4b03f2122438dd2d","src/zlib-ng/doc/txtvsbin.txt":"47c273bb22c9773248d380549a330e5c262266b1292154b0d3014f731cc73f47","src/zlib-ng/fallback_builtins.h":"1d2c2da88009a58f240bac33f562fe5a0a39c1e773813a2d75b45283ff1396cd","src/zlib-ng/functable.c":"d9db6530035a06f95982ff3d7680a84f4b54b8425874ccbe2ab10b906bd5708a","src/zlib-ng/functable.h":"e5a2d0c10411d23f04295bcb9ddb9889388974b723caef65aa5c4ea4739f4aa7","src/zlib-ng/gzguts.h":"7b69b2f35264169bc794d0d5c00247d93c203f751d226302966c33b524ed9fb0","src/zlib-ng/gzlib.c":"7e6ad5d9d32e6429d56a5303e2c6e6870d69c023d6647a52fb95902828de4011","src/zlib-ng/gzread.c":"d5d47d24dc463b978fe828320dab140494803fd86b511300f903c7c2eabd4d25","src/zlib-ng/gzwrite.c":"1685ad2c88239b3434cd2c4a9d66b67842310b2d1dfd01aec0fc293eef20e858","src/zlib-ng/infback.c":"4decaa412219fc8adb935754c54a4dedf3952aaf67107a12512451c65eadee23","src/zlib-ng/inffast.c":"a134d4aa6a46eebe975ca0cd5ef18894fc852b6a840be21ca7243ddbe6c9d8f9","src/zlib-ng/inffast.h":"42e74a92b496ab0726be317e8497a12bf3c3cf3d0d533440ce65befd3929c71c","src/zlib-ng/inffixed_tbl.h":"a94225335396245e9f0ccb2e9b4b334fe7ee0111ed8e32a26bcd52187f364314","src/zlib-ng/inflate.c":"f33e2e7eeaa4b33ba6a2c327f8c9939e6b847afbdad349da65c97bf81c6083b5","src/zlib-ng/inflate.h":"eb25527d1bdedaa45167926dce4c39d9aaa3147b0f4a95f38f5916528c30a09b","src/zlib-ng/inflate_p.h":"4a94c51194da119770cf662ef289994f0c78d95184d54d6ae5d50a393e8f5a62","src/zlib-ng/inftrees.c":"7a777f5ff02ce60fbad6cb843ceadd7b3a8a8a0476ae010c87a0377c2e88f780","src/zlib-ng/inftrees.h":"fa80eb11c2290b345470a03cb861843e2cb1365135233ea8243e9fd79d3618a1","src/zlib-ng/insert_string.c":"aa22ba53a1e75821499809277f9ca0e5ef92b07a618136dd11ae1734e233b7c9","src/zlib-ng/insert_string_tpl.h":"1ceba9903324d10aad6e1d83653c4d534a5b06fd09076414a06215482be00bac","src/zlib-ng/match_tpl.h":"eeab4c6eea8511a7579738e622af062ad16f4016312e93ad34bc5903d8b3c4a1","src/zlib-ng/test/CVE-2002-0059/test.gz":"60bf96b8f433bd7e057ce3496aceaccd70ec80f596a4aa8bcc7786056705ce66","src/zlib-ng/test/CVE-2003-0107.c":"6ed6fba710f8f2b898750f0ec17720fbf01e45c39e8adbba6409681b34914140","src/zlib-ng/test/CVE-2004-0797/test.gz":"38caae524705f676bde13a8df9fc8c7d2fe105ba6bdbab62a405b0276fd3aa2e","src/zlib-ng/test/CVE-2005-1849/test.gz":"e4d5a60617df4b5dd44eda94751ce1eacdb325792bba6e3cc4676719a3adf742","src/zlib-ng/test/CVE-2005-2096/test.gz":"8f702d4861aa3ec98ac03a59ff26b430939630cb5cd4266d2658d3b836d576f9","src/zlib-ng/test/CVE-2018-25032/default.txt":"d7f8278db331c47bd1208bf41e7903cbddee4f7b47c666c40afdd3c96237752e","src/zlib-ng/test/CVE-2018-25032/fixed.txt":"3b27a98edd2f3f580033f9add11d3469d7808c969a1128ee00c18ac7a12cef57","src/zlib-ng/test/GH-361/test.txt":"358497d0a7251ea42101dc77b02337f46fd89af09643a8288e2a3082e5d24128","src/zlib-ng/test/GH-364/test.bin":"af5570f5a1810b7af78caf4bc70a660f0df51e42baf91d4de5b2328de0e83dfc","src/zlib-ng/test/GH-382/defneg3.dat":"b22bef6b7392401c9e7b079402c4a4074053d7a914d050400e37fd7af6fe26d5","src/zlib-ng/test/GH-751/test.txt":"b83d833803b7bc3124fb2a0034081f0b999ad10c33a8dfa3bfd181dc078ae3ee","src/zlib-ng/test/GH-979/pigz-2.6.tar.gz":"2eed7b0d7449d1d70903f2a62cd6005d262eb3a8c9e98687bc8cbb5809db2a7d","src/zlib-ng/test/Makefile.in":"48d033f2dbb62635624bf2c9e3e7fe279b72afc3411d14cb7cfdbf40f5b80e19","src/zlib-ng/test/README.md":"d60ef4851222ebc2a9fbc23f292ab11bc7fee40ba6171ea768b2ffa005df5b1d","src/zlib-ng/test/abi/ignore":"02aa87f77656dbc1fbddd23f436cd15465a92df0722da4055cae1bc8bf013097","src/zlib-ng/test/abi/zlib-v1.2.11-arm-linux-gnueabihf.abi":"f5e91f25b558a891fecbeb6e2e9575698630ab700d055a38f3bc4fe66257f513","src/zlib-ng/test/abi/zlib-v1.2.11-x86_64-linux-gnu.abi":"038337383cf780587d810cf5400d632f3a1f8517e63ac4a71b6e5224db8b1413","src/zlib-ng/test/abicheck.md":"6b4a87d760b3848fb1ded6782e02a1d074d9e487bdabb29274a62b31cdf48772","src/zlib-ng/test/abicheck.sh":"7ca2884ff37c697d380f620554525f9b9dc7fa76b45f866d284b2ea5b98c65cc","src/zlib-ng/test/adler32_test.c":"db3e8ad9a4e2ecce0c052b0bfe19834d3ff2fb2e9239cc3438a2c95db00b1d21","src/zlib-ng/test/crc32_test.c":"8f1223d8aa4c52a5e7323f422023f6b892ce684eaf7439ad905b855293f40143","src/zlib-ng/test/data/fireworks.jpg":"93b986ce7d7e361f0d3840f9d531b5f40fb6ca8c14d6d74364150e255f126512","src/zlib-ng/test/data/lcet10.txt":"1eb5d7bddb1c3cb68064d5b5f7f27814949674b6702564ff7025ced60795a6d9","src/zlib-ng/test/data/paper-100k.pdf":"60f73a051b7ca35bfec44734b2eed7736cb5c0b7f728beb7b97ade6c5e44849b","src/zlib-ng/test/deflate_quick_bi_valid.c":"a36697e5779a645354823f14540bd60b9378c2f4c5f2bb981d86bb34f29fcbb0","src/zlib-ng/test/deflate_quick_block_open.c":"455bd347bb88debdfacb409846170274991ec9ba71c52b8fd0e526daf57265eb","src/zlib-ng/test/example.c":"1c8d9d14128da9fb5415683aa7318ae0aa94b743f75905288a2a9decd4ead98d","src/zlib-ng/test/fuzz/checksum_fuzzer.c":"65a96358c9a82efc4b251b4f322b02fade7b69f9bc6ac07294e641e3fe1ccdb1","src/zlib-ng/test/fuzz/compress_fuzzer.c":"1ab70608075c4bc60f89aa2f327cff88362ee7b1d31da88ed54ca51e5f99e5c9","src/zlib-ng/test/fuzz/example_dict_fuzzer.c":"be68f9eee3deae7f9163c6288742e5455bc28f659f80fdb276fafe215f028b97","src/zlib-ng/test/fuzz/example_flush_fuzzer.c":"f12246a184dcfe0a19a98cdc742a1fe8da388ad20b406635d63f1fa10d45b9ca","src/zlib-ng/test/fuzz/example_large_fuzzer.c":"f490abcd332fb4e7921292adf6876d38d7f71c8d2443212c781ba88957ff9303","src/zlib-ng/test/fuzz/example_small_fuzzer.c":"a9b3436b291ace821b6013311a1100e19a9e1c67fefd3f97dbd60688f9bf22b1","src/zlib-ng/test/fuzz/minigzip_fuzzer.c":"5faecfe9e6ecc47e746151bd1cc24a2e2dba8b7ffeb270d2c88cb126273ab446","src/zlib-ng/test/fuzz/standalone_fuzz_target_runner.c":"f25649ed35b8b7a3899c8d7ff52f9972dfc7bf274889e0a7a77fbfdf1c1cfef0","src/zlib-ng/test/gh1235.c":"8310ef780dc483a1708750cd7c120b8e9cc0e1614767d24c01869e529074e981","src/zlib-ng/test/hash_head_0.c":"448def3e8ea13fbcac86202e50b8a71b6cea585d7bdbca0bc6cf6056e4059f98","src/zlib-ng/test/infcover.c":"9c0e8068fdc614b1852e8d274231b41ce3ce975d4419ed31e700a0b05e702303","src/zlib-ng/test/inflate_adler32.c":"ab430c97ae8f569784710118038e8ebf53f4136d1a957e1277c0904f9218340b","src/zlib-ng/test/minideflate.c":"34fdce39628ffd173f7736d9fb65dfa40d0b0289def64b935075f6c6cffe1999","src/zlib-ng/test/minigzip.c":"7dbce6528601f7fdd586280885ed439cb539e15f36dd3974274729bfcdd41928","src/zlib-ng/test/pigz/CMakeLists.txt":"aa70f1025adc004985bfe0accee9b7a80e04786d82705e27c377a5e8d4ecbaaa","src/zlib-ng/test/pkgcheck.sh":"581b3de9c58e96038af94c73cbdb30eed32900f7abb8fa7692426fa68059b0ef","src/zlib-ng/test/switchlevels.c":"ceb6cc4d48a637562009d8f7f82635fa9942acd1bfd597acd99454a03a3a98e3","src/zlib-ng/test/testCVEinputs.sh":"5de6198444c16726f8e0a8f2beb5b89c5ae7e7e3736ce760b9fbc719493e7e4f","src/zlib-ng/tools/codecov-upload.sh":"ec7a8f1405820810e486e3d7e2fda7eb958c17877b0000b93abdf09d87732a2f","src/zlib-ng/tools/config.sub":"32186cfeb5db62c99e1dfbfb07f24c1a50977447b4c26d2907021c74422a70d2","src/zlib-ng/tools/makecrct.c":"55c8f7b8e29393e95988a29de8cb1a1bdf2738a69d53627bd0f9d7bf169bf0a8","src/zlib-ng/tools/makefixed.c":"bffd02540231304f9bcc755b8cb9ae5cfbc48975857bbb4547f1d6acce21ef57","src/zlib-ng/tools/maketrees.c":"30e9f70addf691d1241e594a7f31fc78b119b65e8af9ac8e20fe6da01635d3b3","src/zlib-ng/trees.c":"2cd9a1dc8d9231e9fc4e53e56b87307989c1b7f33212cde4ee434ef71c28af2a","src/zlib-ng/trees.h":"24174f3543b01ee1ef370bbf6d15551a21871cded18b2aadf09a71e7904b6f99","src/zlib-ng/trees_emit.h":"2e93093ae5362523a26877d6fd663bb05793795889d2bfb987cbada9a9dc4517","src/zlib-ng/trees_tbl.h":"35f4fd0ec080c1ade342e2dd1b0f5cdc7e9f18990faa48d7a8a69bc318ebe607","src/zlib-ng/uncompr.c":"4ebb486b27930f8a6ec4a3cc90a207d0bcf8a4779d1dbf3b2184a2b2a5735cd1","src/zlib-ng/win32/DLL_FAQ.txt":"f17fd3823726adbae63b91c00d5db1dccae2e289258edabbbbebde04bb6e7e8c","src/zlib-ng/win32/Makefile.a64":"775d6902373d1583430b5d7467f001746be323610c89be27e02bbfe0205994f3","src/zlib-ng/win32/Makefile.arm":"7535e022f482920c3fa7a267e84e39ad790d150f72e5c30414baa156c2fdd9b6","src/zlib-ng/win32/Makefile.msc":"d769a00c0ad4cb5fc624d2ae004dfa3785a2f4310324b03afd2156e759003a06","src/zlib-ng/win32/README-WIN32.txt":"cdcca6e7a5d2d23618a48fafb8eea347227f8ecf1f38a6aa90f0e7e455bc6574","src/zlib-ng/win32/zlib-ng.def":"f240276caf805a10d024fc6a66efe915c435734c69732818d92fb04d08ab350c","src/zlib-ng/win32/zlib-ng1.rc":"ea0ea4d116b583510b113a27fdec2ad4f0890206963f0e3838f275b8005dde5d","src/zlib-ng/win32/zlib.def":"d9c371ff2677567350386441a2e3d0258010d6502290bbac5ac42ea168bd5212","src/zlib-ng/win32/zlib1.rc":"ec5021dba35f9fae5f5f82ad6b6bd059928548e0608e4ede0bcffccf5c1210a1","src/zlib-ng/win32/zlibcompat.def":"73728b9df4379dc70ebd9b2a9f20d6e4ed7c031fa1f351cdeae1de7d1db05bd1","src/zlib-ng/zbuild.h":"d4d52d3296cc949a5d694e7349a8236854f2ec116c184a310e4e62b28caf5b63","src/zlib-ng/zconf-ng.h.in":"f206ac69c1fa48c670648d26028263372a539ed1243a9a26e5b35bf52e2363ff","src/zlib-ng/zconf.h.in":"dbf08736c3bc5e41242b09e13d0a523b440250410476dd58747c14e28984f1e5","src/zlib-ng/zendian.h":"f5cfa865281d2c5d0b097d318500f27daeec346e7882de68e279486d79c52e77","src/zlib-ng/zlib-ng.h":"d51896e8411868ed195d5cf41fda4f1c5a9c891832dfd16b559a5ed6beedd890","src/zlib-ng/zlib-ng.map":"03ef4439594619e215dbb1717f8c13e16159308ef3817761ba1a3cca7f7834df","src/zlib-ng/zlib.h":"7e3666971e08019fc7097f11d593aac9ff6824a1ecc945c48f76009f7c27d55a","src/zlib-ng/zlib.map":"9997aa913dec6da106ab2089d2a72ca5e1b7fafe0807ac0bc1318ce8c8defab9","src/zlib-ng/zlib.pc.cmakein":"17668e07edbe5971043bea26a2f2b92c4c7cf4724620f1156f3ea1436d2aac93","src/zlib-ng/zlib.pc.in":"cf94c9aa44878a62e27c2f75354c08326b3bb5250a9b11496855cf59691177bb","src/zlib-ng/zutil.c":"53418b23c7878e968b4d04df8ebac74f64f60d32277f2343d16da52059dbc782","src/zlib-ng/zutil.h":"a14c18dd4a96909aaf0aa016cb6df97d77cf5b735283527c906181eead22f0e9","src/zlib-ng/zutil_p.h":"c259b33614007463b41d4184e0bdf10d62325445ee9308e1e1885862d201657a","src/zlib/CMakeLists.txt":"d3ea46cd350c74c21c2dd97f6d0ad354db76b2b43cc91ec1144b88267f67a588","src/zlib/ChangeLog":"6933f4ab74360476bc80d9eda2afd98f93588a5d276e1197926267421dd6959e","src/zlib/FAQ":"1e8a0078be0ff1b60d57561a9e4a8cad72892318a8831946cba1abd30d65521c","src/zlib/INDEX":"3b4e325d47ae66456d43fcf143ba21ab67a02a4f81be7ef2da480ba30d774266","src/zlib/LICENSE":"845efc77857d485d91fb3e0b884aaa929368c717ae8186b66fe1ed2495753243","src/zlib/Makefile":"ef23b08ce01239843f1ded3f373bfc432627a477d62f945cbf63b2ac03db118a","src/zlib/Makefile.in":"77a662b885182111d7731eef75176b4c5061002f278b58bf9bf217e2fa16cadb","src/zlib/README":"4bb4d5664fb9d06ef0d47e8ef73104bd545a5a57eb7241be4f2e0be904966322","src/zlib/adler32.c":"d7f1b6e44fee20ab41cef1d650776a039a2348935eb96bcbd294a4096139be3a","src/zlib/amiga/Makefile.pup":"a65cb3cd40b1b8ec77e288974dd9dc53d91ed78bbe495e94ccc84ddd423edf1f","src/zlib/amiga/Makefile.sas":"0e63cf88b505a1a04327bb666af3a985c5e11835c0c00aed4058c0dcc315d60e","src/zlib/compress.c":"6d0f0d0784744acca2678ce325c8d7c4c030e86f057adb78adcee111d2248c0d","src/zlib/configure":"2d964a697f9060d3a8fc5b4272c9d07b22e5fe6f5cf327e5c29f62f67d935759","src/zlib/contrib/README.contrib":"b925ae08d371b33c4b5ffd67c707150729a476caf47cfe2eafc002291f23f931","src/zlib/contrib/ada/buffer_demo.adb":"469cf566a6965767fee6b987a239ed8cedcc66614940d45a9b434331fbb435ce","src/zlib/contrib/ada/mtest.adb":"41b6f31684770334afdc4375871eb1408542f37a823a073556fdbfdb63753160","src/zlib/contrib/ada/read.adb":"fa5b989aef0c5715a3fcb15de93985f7f10aeb0a7f5716745c95ed820eb9af9c","src/zlib/contrib/ada/readme.txt":"8fe9e5303f2e8e8b746c78250e74b7c4aeb7ce6212fdce751fc3a0ce56a47fe2","src/zlib/contrib/ada/test.adb":"5e3abe79b387e09a9a42bd0543105e228f39a335240cffc33d71f0ba66ff2511","src/zlib/contrib/ada/zlib-streams.adb":"f45988e2bac76eb25a0dc981f46576e7432c35dde1790bbc2b650f0090b7fa72","src/zlib/contrib/ada/zlib-streams.ads":"969e8edb0611810fb52159dcb7c40228f4e5da810a7a3576b778116a93038c6b","src/zlib/contrib/ada/zlib-thin.adb":"03d89244ee5ec9771d9b5050e586c609f851af551b2e64eb151f1d5be0b63ae9","src/zlib/contrib/ada/zlib-thin.ads":"631ef170bde16c3ca8d412b54a0e519815b80197d208f8f393e6fe017bb0968e","src/zlib/contrib/ada/zlib.adb":"c9ca5dc34fbcdf06e2dc777b7e9dcd0ba31085b772b440eb0e12421323ab672c","src/zlib/contrib/ada/zlib.ads":"02634bec0d5e4c69d8d2859124380074a57de8d8bd928398379bfacc514236d2","src/zlib/contrib/ada/zlib.gpr":"859bb69dce38dbe9dca06753cf7ae7bd16d48f4fece8b87582dab8e30681d3de","src/zlib/contrib/blast/Makefile":"17d5d26c24bf51cad51045a38ffb73cc3539d29e89885aa249fcfd45a8659d5c","src/zlib/contrib/blast/README":"baa763ae03d88ef7ece6eb80d9a099b43d0b57639d6d281e1c7c6ca79d81daba","src/zlib/contrib/blast/blast.c":"1ab3e479d342bfc144167b808fb00142264bc50f24a110ca88cc774e351c218e","src/zlib/contrib/blast/blast.h":"9c1c422b76311d4cb06863ffc056668b6240f3dd998bc02e89ee590d482bfdc2","src/zlib/contrib/blast/test.pk":"5f5c262c545574a5c221132d5ef832478d222d70b015341795b3860204140d7c","src/zlib/contrib/blast/test.txt":"9679b2c98e1283222d0782b25a1c198dc64ba9ebd1addd6dc6f643a45947cda3","src/zlib/contrib/delphi/ZLib.pas":"6dcc65866e3fb3d33d2a2328c547458156883a3e6749d52ded209357a49d61de","src/zlib/contrib/delphi/ZLibConst.pas":"84bcc580bdf397e570f86f3f5a5b8c7bf537828f30b4b72648b81911f6bf5095","src/zlib/contrib/delphi/readme.txt":"f7420ed2de77d4b498eefbbe6402a1d17dc2d411735289c78a265c7f10fdaee5","src/zlib/contrib/delphi/zlibd32.mak":"850e91b6c9ea05de61a411cbda16fa0f10118cd88bb32c4b7226988776f8d511","src/zlib/contrib/dotzlib/DotZLib.build":"b96137097669644ecb9f42cdd3399d1fce9c512788374609303f7e50abf597f0","src/zlib/contrib/dotzlib/DotZLib.chm":"20d0e3edd57f849143255a7f0df1cd59d41db464a72c0d5ab42846438a729579","src/zlib/contrib/dotzlib/DotZLib.sln":"a979198c5b8d144c1ac8f993bfb6f4085d135aa58ca9dcf63ebabf52b5c695f7","src/zlib/contrib/dotzlib/DotZLib/AssemblyInfo.cs":"314afcfb339ea95f5431047b7ab24631b11c3532c7ce5dc2094ed0cf80a7c16d","src/zlib/contrib/dotzlib/DotZLib/ChecksumImpl.cs":"e7c047a2c3bcf88d3d002ee3d2d05af414acf53cb4451efacc0f2e95a474ea0f","src/zlib/contrib/dotzlib/DotZLib/CircularBuffer.cs":"be84c9736fe7bdc2bfae70466d8fff582504e928d5b5e110fd758090090c8cb7","src/zlib/contrib/dotzlib/DotZLib/CodecBase.cs":"259bdda1b7d6052134e631fa24bfd9dca6e2362563496c8b85257b56c848908c","src/zlib/contrib/dotzlib/DotZLib/Deflater.cs":"06ba6696a3c15c53ba5fd5a1c2bf50b51f217010228fc1e4c8495ee578f480de","src/zlib/contrib/dotzlib/DotZLib/DotZLib.cs":"9837fe993fd631233cc5e53ff084d86754b97f05ec77c54b0764c2706f186134","src/zlib/contrib/dotzlib/DotZLib/DotZLib.csproj":"21606db31dfef6410dd438b73f1db68856eacabcce6c0f0411fc4f17e17001f3","src/zlib/contrib/dotzlib/DotZLib/GZipStream.cs":"8d1de9755c77046b4ac71340a0a54434ebf4fd11b085c44454d7663a9b4df1c5","src/zlib/contrib/dotzlib/DotZLib/Inflater.cs":"9016ca73818f5b6a28791abc3af6da7c4d2773b6a3804f593f6d5737a62b99ad","src/zlib/contrib/dotzlib/DotZLib/UnitTests.cs":"c95048d763c7e367ba0bb7c31981e0610131fa12356bbd9bfdb13376778e9a0c","src/zlib/contrib/dotzlib/LICENSE_1_0.txt":"36266a8fd073568394cb81cdb2b124f7fdae2c64c1a7ed09db34b4d22efa2951","src/zlib/contrib/dotzlib/readme.txt":"d04972a91b1563fb4b7acab4b9ff2b84e57368953cc0596d5f5ea17d97315fd0","src/zlib/contrib/gcc_gvmat64/gvmat64.S":"22ff411b8b1d1b04aeaa8418b68245400267dc43c6f44104f6ccd37f0daee89f","src/zlib/contrib/infback9/README":"890288f02bb3b1f9cc654b87a07fcea695f90f6b9bd672d25bf6be1da2ec1688","src/zlib/contrib/infback9/infback9.c":"0a715c85a1ce3bb8b5a18d60941ffabc0186a886bcc66ba2ee0c4115a8e274e9","src/zlib/contrib/infback9/infback9.h":"dda2302f28157fe43a6143f84802af1740393572c2766559593996fd7a5a3245","src/zlib/contrib/infback9/inffix9.h":"84a2ba4727767c18af6505f0e81d9c814489c8b9ed330a25dad433db72997e43","src/zlib/contrib/infback9/inflate9.h":"32a907676cc36e27d0fdc0d99adb83a0b23f20ab61896269216d40fecf08d349","src/zlib/contrib/infback9/inftree9.c":"1f262e5ae8094c9d8b172241e567c86be560327b840ca8fb771e98461bcb158a","src/zlib/contrib/infback9/inftree9.h":"145072793141cb313c91cdf9dee9d4b8e8a38d77099f87e9cd05c7b5ead8f099","src/zlib/contrib/iostream/test.cpp":"0f3c77e013949eb9c91e6b690ea894e19d97944d6b0885b82806fc3ad99680cf","src/zlib/contrib/iostream/zfstream.cpp":"8ebb9b3d521cc3392953f27658cf1f6dcb763216079f69a1518ec5ca0e42a63b","src/zlib/contrib/iostream/zfstream.h":"4369c35e66f63f52ca4a5e1759bf720507ccabb8f3f132e2f18e68686c812401","src/zlib/contrib/iostream2/zstream.h":"d0343e0c57ff58008b6f29643d289c72713aa2d653fe3dcd2e939fc77e7e20b6","src/zlib/contrib/iostream2/zstream_test.cpp":"f789df183cc58b78751985466380c656308490a9036eb48a7ef79704c3d3f229","src/zlib/contrib/iostream3/README":"43ec48ecbd95a8c45db20b107fac73b740bb11595a4737329188f06b713972cc","src/zlib/contrib/iostream3/TODO":"af5ebc83fb88f69706c8af896733784753dead147687e1c046f410c0997fd88b","src/zlib/contrib/iostream3/test.cc":"8e17fc48dfdbc6e268838b8b427491b5843b6d18bc97caa6924de9fad7abe3da","src/zlib/contrib/iostream3/zfstream.cc":"8cdd67ed0b13c192c11e5ea90e9d5782d6627eb303fbc4aa5ebda2531ec00ff8","src/zlib/contrib/iostream3/zfstream.h":"1bd74778fac45ee090dfc0f182a23e8a849152deb630606884b2635987b357b1","src/zlib/contrib/minizip/Makefile":"0f59cf07531cf34cb359f9dbe26d8207a2bbbdad618557894eb629925f7e8899","src/zlib/contrib/minizip/Makefile.am":"2313a3480a2c3745fa7ce216829cd0367058907d3a0902e5832c66c84a2fdfc6","src/zlib/contrib/minizip/MiniZip64_Changes.txt":"302c62b328647f5472fb7755249a83459be7f8ffb1fae07e8ba318fce8f4126c","src/zlib/contrib/minizip/MiniZip64_info.txt":"122719c32ef1763a5f6ba9c8cdefc1d78a76f7156b09e7b6f69b73f968e0dac3","src/zlib/contrib/minizip/configure.ac":"959e4762ddcb36dcf30512611ca9fbcbcd0c943228a6ac2975708798ae09a438","src/zlib/contrib/minizip/crypt.h":"1d25a0fab3189dc3c6ae43c7813e1e5d07d0d049bd32bd7bd0e9ccd752bfdd5e","src/zlib/contrib/minizip/ioapi.c":"f6878a3ecf6802f0f75cadb41a114fa274636c386bac794c66cbb27a24d9a29f","src/zlib/contrib/minizip/ioapi.h":"9f5448f8d5e8894d6f397dd09d24f7ff39cb818cd493a8bd90dda19553b814ea","src/zlib/contrib/minizip/iowin32.c":"103cdef91d57ceca7a1c1973772ff7e1d44c7b3e227a3640171957302bd9e974","src/zlib/contrib/minizip/iowin32.h":"586f22b9c3c64da253ce2b518e0fad61f19a7b47b289fc704cc9708242294c49","src/zlib/contrib/minizip/make_vms.com":"65736d9c4888f2373d3db0a13864d150c5040453f5bc2a5c8784379a7ea67590","src/zlib/contrib/minizip/miniunz.c":"b29dfb4cff9763497d8f0656c97027995e1ea0b4104e4a217ba7882337ae7a7a","src/zlib/contrib/minizip/miniunzip.1":"66d8684392167091ef0fe01598d6a0daa26e7e448e2df6c3cb257487735b83f7","src/zlib/contrib/minizip/minizip.1":"5404596e8e5587a52f563906119f32ceee30a6d97a966afa5c7afbe4d373e210","src/zlib/contrib/minizip/minizip.c":"b5b8f380297be0d90265356704df1e41bee0e903a2169263a2b50dc22cc3180a","src/zlib/contrib/minizip/minizip.pc.in":"8b6670b42d8e5e519e1cc89db093efc07ba23cb1ddfedd3c93ff2df08c3ce8ac","src/zlib/contrib/minizip/mztools.c":"cd887c4af6d20823bd15f24008b10acf01969b4165d7848656bde843a92428d7","src/zlib/contrib/minizip/mztools.h":"6f82c52279e8f79165f4446be652e5741a49992ac58632470335aa34c564072a","src/zlib/contrib/minizip/unzip.c":"fc9e8d752618a05c1f3a2ce61ebf76d0c8053dd5579458f836834a36e8690bbe","src/zlib/contrib/minizip/unzip.h":"20cdc47658a3e41db897d31650e46cd2c8cca3c83ddaaeb6c7a48dd8b7f18e03","src/zlib/contrib/minizip/zip.c":"bee2d6fa310c9451b24c036d5840a8c7c90b7cf8d12d5767175c207e56fefcc3","src/zlib/contrib/minizip/zip.h":"75b635dca8294790ab7ec1f72e9f1fd352d75b189c3c9b61c68f76bd7e612043","src/zlib/contrib/pascal/example.pas":"d842d456ecb6ff80e34cee2da31deb2072cc69ca837497bea8b8bee203403474","src/zlib/contrib/pascal/readme.txt":"02f997c37991ddae0cb986039f7b4f6fc816b3fd0ffd332cad371d04c12cf1b9","src/zlib/contrib/pascal/zlibd32.mak":"850e91b6c9ea05de61a411cbda16fa0f10118cd88bb32c4b7226988776f8d511","src/zlib/contrib/pascal/zlibpas.pas":"720346d2f40429de31bb16a895f42e878f259b1aff7d46c63e6616e629b3f7d5","src/zlib/contrib/puff/Makefile":"d9d738030464aaae354196c14fd928adf591832fce7d71ac1977c1d8d4923a4b","src/zlib/contrib/puff/README":"c5b9852fb11e0d6b6e916e5134cf034524d901b95368972133e0381e480eb479","src/zlib/contrib/puff/puff.c":"433f7f4495481dd95576dbb548b1bcfc5ca129d30421695fa609f5f6c14908b6","src/zlib/contrib/puff/puff.h":"969b7be2a930db0cdcb19b0e5b29ae6741f5a8f663b6dba6d647e12ec60cfa8e","src/zlib/contrib/puff/pufftest.c":"d24e31c1d277d07c268f34e9490050c6b53c68b128da3efbb1d05fc5b31004f7","src/zlib/contrib/puff/zeros.raw":"b7b0887089f7af1f6d1e0b4c0a1e8eddd10223b23554299455c6c9be71b653a3","src/zlib/contrib/testzlib/testzlib.c":"c6c37b35c6ecc9986a9041f86d879cc37a9e4d8315af9d725071eb3b2cade0c5","src/zlib/contrib/testzlib/testzlib.txt":"2359bbdc84eb8a04e0f1cd16cd81a2896e957f2ad58dab3ca78ef55b7d0dc577","src/zlib/contrib/untgz/Makefile":"8f5ab1564813e091cea8f1bb63da32fd80ac763d029277b0cabf50f60aceefe1","src/zlib/contrib/untgz/Makefile.msc":"d0f537de11d9e0e36e2a98b3971c537265f4b533b4c48797094365ad9ae8388b","src/zlib/contrib/untgz/untgz.c":"9a12d774301d252dcd38bba07ac369319da4c04c4fef8a50fcbf40aebf29c2a1","src/zlib/contrib/vstudio/readme.txt":"df5fe112bef3c23d5767602736f6d0ce43cbb49b584210fe57f6f59e634a49d0","src/zlib/contrib/vstudio/vc10/miniunz.vcxproj":"dd607d43c64581172c20c22112821924dfe862f56b2e5eb8780bdd0714d9527b","src/zlib/contrib/vstudio/vc10/miniunz.vcxproj.filters":"4b8466bf00c70b81c31cc903e756e04151fd90fdcbe102f3568a2c8b6190ea27","src/zlib/contrib/vstudio/vc10/minizip.vcxproj":"af73f2cf8ae51e65e85342faeb40849a2310c97bc77def42b38d7070460a6cf0","src/zlib/contrib/vstudio/vc10/minizip.vcxproj.filters":"f2815f9e3386c393d0a351632823b221ef9689da1f422ecaa561dba2a612fb0a","src/zlib/contrib/vstudio/vc10/testzlib.vcxproj":"c21e64259bf9efe97e1103212e7a6e1b7372b50067b4ba14cfa678e1f491095f","src/zlib/contrib/vstudio/vc10/testzlib.vcxproj.filters":"a7caddbac3ba90b5d482e6d926ef35cc40dc3553ed3776ef6b68a528fd5b0631","src/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj":"3f317d8964f17901c3e68bff5deaec10b6ccc50a572235999e8097292692984c","src/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj.filters":"29c9535775aa76320ee4efd001d41961faf6c58cedd8b29d3986e85f73d2f6fb","src/zlib/contrib/vstudio/vc10/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc10/zlibstat.vcxproj":"50402ab8c63f746c034d6ce51d9612aff5b6af9aa27790cffa4b7deed4b30eb8","src/zlib/contrib/vstudio/vc10/zlibstat.vcxproj.filters":"eeb1de64c252c46b822f73f272127f6f9f0570ef22d234e093070ba95a4dde24","src/zlib/contrib/vstudio/vc10/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc10/zlibvc.sln":"e659860f705f31b87ea9139a3cb4ebe1561e120bce495383a54614fc82b49990","src/zlib/contrib/vstudio/vc10/zlibvc.vcxproj":"efad8cb150c0e5122f8c700d95c5de659dff92b171917c66bdbd082fff500b58","src/zlib/contrib/vstudio/vc10/zlibvc.vcxproj.filters":"c801732b7c7017796add50d2b71a228f99f95a46650baad307ff7e8358a2bfb0","src/zlib/contrib/vstudio/vc11/miniunz.vcxproj":"746e4c11fb8af4bcd6a9d68ba81ed1dc366a5de3bed56b291ee969ad733a7bb0","src/zlib/contrib/vstudio/vc11/minizip.vcxproj":"340617cae9cf4fcb003308021d3782ec3639e60d62d79a3aafc0a50bb55b061e","src/zlib/contrib/vstudio/vc11/testzlib.vcxproj":"99eadfdf2e41bc036141c174c4d0035d87572ce5795dcc28f39133f818a79d08","src/zlib/contrib/vstudio/vc11/testzlibdll.vcxproj":"583bdef522b0176829f0d8139ea2a88b9cbc14379d1334f3a863989ed3df9b67","src/zlib/contrib/vstudio/vc11/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc11/zlibstat.vcxproj":"b07f792843d05ac883391075bc3b9625437490d8d40944ad359aa2134a09a3aa","src/zlib/contrib/vstudio/vc11/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc11/zlibvc.sln":"27389b515997defd080519f95aff87e89fcbe8b26d73c5ebb73c544cfef4d60e","src/zlib/contrib/vstudio/vc11/zlibvc.vcxproj":"d02d014ef957119a6fd0ab243c892b74d1592b117750b95fed21097c8ed922d9","src/zlib/contrib/vstudio/vc12/miniunz.vcxproj":"1494af54570f6e93852932956d49a8c25e57b5abc1ac979945605ca9143df9f8","src/zlib/contrib/vstudio/vc12/minizip.vcxproj":"9bf128ed6760ca5f019006f178b1c65f4c7ff122dba8d297b64b0eb72feeb120","src/zlib/contrib/vstudio/vc12/testzlib.vcxproj":"be88bc1220c0447c2379fdab3ac88055f58a8a788d3e9cec494342187e760eaf","src/zlib/contrib/vstudio/vc12/testzlibdll.vcxproj":"93416510256935d79625dc9fd349cfce6968c062d42a138bec404a26b2f92f5e","src/zlib/contrib/vstudio/vc12/zlib.rc":"90067be57a8c5df594a850352642f8b1dcb32e3d088d3805ebafe75a27412b74","src/zlib/contrib/vstudio/vc12/zlibstat.vcxproj":"faa229a851c76b77d65bb4742d8369efe566652bb6a1447d1e3539f289b5313d","src/zlib/contrib/vstudio/vc12/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc12/zlibvc.sln":"162e0faa80a56d89eea71a0b89377708eec2faa0dc72091cc0abb07fbdea49a0","src/zlib/contrib/vstudio/vc12/zlibvc.vcxproj":"8ac8cb2d29b880a738011d29d0511af9b14f321bed90f674109c446f4108d442","src/zlib/contrib/vstudio/vc14/miniunz.vcxproj":"0312511d4a30cea979c4e36edf994a537ed8a9d924f6b5c536cbcd094773c11f","src/zlib/contrib/vstudio/vc14/minizip.vcxproj":"9e7bb7a6ac723e4b2db900627c366f9bb93a351381995d9c69a50c0126f64233","src/zlib/contrib/vstudio/vc14/testzlib.vcxproj":"88667873d9d61d65016b9501ca925532eb55f56230e5911d3e2a01cd8a9fb2a4","src/zlib/contrib/vstudio/vc14/testzlibdll.vcxproj":"69f544898b4275cd3d8e19b8f1f8cb39c1cb98a30cdb033242e4b94c57bfa150","src/zlib/contrib/vstudio/vc14/zlib.rc":"90067be57a8c5df594a850352642f8b1dcb32e3d088d3805ebafe75a27412b74","src/zlib/contrib/vstudio/vc14/zlibstat.vcxproj":"5629eb0cc30674a39aa3636f1cdd190393b0dbd4c69a35e36ad85b6340055605","src/zlib/contrib/vstudio/vc14/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc14/zlibvc.sln":"47a50bbde8ca6336cecd8c0e4b65e515fc46ae84c7b61008ac9864162f777286","src/zlib/contrib/vstudio/vc14/zlibvc.vcxproj":"09f496a2ad3afdd5e3f36b7285440369dcac4559656edc00ed7a74c7ec9fa10f","src/zlib/contrib/vstudio/vc9/miniunz.vcproj":"7db9b2ef5ff05d3de4ba633feab10e85d45434c865d520ffa1974421904996f3","src/zlib/contrib/vstudio/vc9/minizip.vcproj":"7797a9ad3c0056f3a3cf8fcde7618acd1d151c65d15f841fccd8d9d878ae7bb0","src/zlib/contrib/vstudio/vc9/testzlib.vcproj":"8df405917800adccee6bad2116022c2c82d661b37ea40ea16405fe4dbcb4b69f","src/zlib/contrib/vstudio/vc9/testzlibdll.vcproj":"cde6806f5c81d1fc311f9921c17ba56f8e386d097783a6a90875d385837c47e7","src/zlib/contrib/vstudio/vc9/zlib.rc":"6041a4727ea47520058a5b4bb8de87592883eb7f26dd39df62879c347f3888d1","src/zlib/contrib/vstudio/vc9/zlibstat.vcproj":"d393d418d827ad9fb9c6516f1a7620371d15e3f5afef8ba60b51e50acc7199e9","src/zlib/contrib/vstudio/vc9/zlibvc.def":"a228e521a561d4456c83c7081b4e9950cfce99133af7d5fdd27f12a8fd53efde","src/zlib/contrib/vstudio/vc9/zlibvc.sln":"26e58d4b2cfcd941c367fb2a18537b3b9f002f2ac1278b700ea1129c50501452","src/zlib/contrib/vstudio/vc9/zlibvc.vcproj":"eaca98fcf166738b59fcdbd179dac9f98f985c6ba49212b186343a998816f081","src/zlib/crc32.c":"ec3ff0f97858b228513027a490e4330cbb23c6fbdd24d839902ffa89854f209c","src/zlib/crc32.h":"9a2223575183ac2ee8a247f20bf3ac066e8bd0140369556bdbdffc777435749e","src/zlib/deflate.c":"4470e36709ce7d6067fa3e8f60bb7f693b055bee42a0d6655ed71faa2db87fde","src/zlib/deflate.h":"0db1b5ef79ca6ba0f508b7b8bdaa11af45c5ebe2c89ab4f1086dc22b963a52fa","src/zlib/doc/algorithm.txt":"992590931e982c0765286c2d83f6e9ff0a95aabb08e28c30c52bae3e8c4bd5ad","src/zlib/doc/crc-doc.1.0.pdf":"064f9252d6e2e15ea56c2bd18e160e5c9c84bcd137c11a7af497aaa511ace998","src/zlib/doc/rfc1950.txt":"8f0475a5c984657bf26277f73df9456c9b97f175084f0c1748f1eb1f0b9b10b9","src/zlib/doc/rfc1951.txt":"5ebf4b5b7fe1c3a0c0ab9aa3ac8c0f3853a7dc484905e76e03b0b0f301350009","src/zlib/doc/rfc1952.txt":"164ef0897b4cbec63abf1b57f069f3599bd0fb7c72c2a4dee21bd7e03ec9af67","src/zlib/doc/txtvsbin.txt":"d1549fb75137f03102798f70fd34ff76285e717ddd520dd82274c1c0510eacf0","src/zlib/examples/README.examples":"1bc1c677bbebe1aa5e85015bb62f0cf3fcdbf95652d30494159bee6166c1854a","src/zlib/examples/enough.c":"c14a257c60bbe0d65bb54746dd97774a1853ef9e3f78db118a27d8bc0d26d738","src/zlib/examples/fitblk.c":"fd8aaaefd5eb3d9fc388bdc5b715d1c6993ecc9367f5432d3b120a0278904edc","src/zlib/examples/gun.c":"3bfd36b06284ba97d6105b8a6a5d18b2b34b75b3a1285f16d018680fb174915f","src/zlib/examples/gzappend.c":"6de91c8305e37560117bff44136abff72b16b028c0bda0bbac7ea07e4988b0ce","src/zlib/examples/gzjoin.c":"90b9d6c39a5fc91cf1cc9b96b025a508a8015dc502cd9374c754b44078593f57","src/zlib/examples/gzlog.c":"196872021c96099fd30c880ac2cccd1350fdbd81179731f3914153a26ebf72e9","src/zlib/examples/gzlog.h":"681f280437f867820bf39880e2f4fc641d402879e399ba2e6a31d73feefe8edc","src/zlib/examples/gznorm.c":"e5a8f5c3b107f27212f7d5fbfcf072a337a1b4ea32929ae31c168997438a5cc0","src/zlib/examples/zlib_how.html":"80fb647be8450bd7a07d8495244e1f061dfbdbdb53172ca24e7ffff8ace9c72f","src/zlib/examples/zpipe.c":"68140a82582ede938159630bca0fb13a93b4bf1cb2e85b08943c26242cf8f3a6","src/zlib/examples/zran.c":"10f9568b1f54cdb7474a38c5bc479aa0edb07a0eed2e999bdad4c521f6b25330","src/zlib/examples/zran.h":"9a0d4c15f898c43deae2c5e98a5c66c637a1b25573d662fe91a789c386eaf971","src/zlib/gzclose.c":"94446cf8cde67c30e64d0a335b0c941fd3fbad2e77f30180d12e61f9c2a5a6b8","src/zlib/gzguts.h":"40d57fbcef5d4d849e416e03b680026b18302e4b0da5907a96a012d7ced91b8c","src/zlib/gzlib.c":"635b7b6df79a5ce6e0f951669e4c82704d7972d8afb87278b9155c2cb4c5066f","src/zlib/gzread.c":"41c69d43fb3974bae58d9169aea3514221f70dc77bb7a35c79626dd3be01adf2","src/zlib/gzwrite.c":"c7454689751c8f41ec63a1381a0053fb149095abe1c3b89c8a996b2d7ac8adce","src/zlib/infback.c":"6a6cfe3d7e239d590692bc2664ac58d3ef92be30ff4cb3c6dbf5deed28f79eb5","src/zlib/inffast.c":"41d93aefdbfee5455809130af74fcc76cf7259b1aa8b34d0060d14e57463e8bb","src/zlib/inffast.h":"7d8c1c873ce9bc346ad6005bb9d75cce5c6352aaf7395385be216a9452a34908","src/zlib/inffixed.h":"237ba710f090e432b62ebf963bee8b302867e9691406b2d3f8ee89ee7bfef9b0","src/zlib/inflate.c":"f1679575fef1717d908dd09d7bfe8fff89c21941cadd7c255a2ccccfba3a287e","src/zlib/inflate.h":"e8d4a51b07694bf48cb91979c19974cf6a5ab0b8a09d26ec0d14df349230673e","src/zlib/inftrees.c":"b9db40bbb68b63dccbcdfa78d687751e33178af8669f1c1236309cfd5d2edc0e","src/zlib/inftrees.h":"44084a93673386db6282dcb61d739c84518e10dff66d1c6850715137c827464c","src/zlib/make_vms.com":"14ed54bdd391c1648cedfb69d8a73a26dcc7f1187d59b0f18d944b7665cec85b","src/zlib/msdos/Makefile.bor":"292ab363f7ffbc4ae84d37cd9bdffd2dac1003bee52d223a8489844870f20702","src/zlib/msdos/Makefile.dj2":"9208450c2ae6dcbfcc25560b5b9ca763f461e7246e37b0552474edf8fa898906","src/zlib/msdos/Makefile.emx":"c749d6ec7f88e8e639d4f03bdbdcbbe9d1c304210be4c4be621ceb22961d3d64","src/zlib/msdos/Makefile.msc":"0e021a6f42212415b060e4ad468eb415d0a8c1f343137fb9dff2cb8f9ead3027","src/zlib/msdos/Makefile.tc":"2ae12ee2a3e62f7c5a0520d0fbe4adee772bc07fe816002b07ccb43db3daa76a","src/zlib/nintendods/Makefile":"ea5823efe6830132294eddf2f56dbd7db8712244c210bb4968c431b1a91bd066","src/zlib/nintendods/README":"e362426c47b39ff6a7d6c75c6660b20abf076cdfa5e1e421716dc629a71aef95","src/zlib/old/Makefile.emx":"d811f032272aae50123a889297af3a02fbd60d1e42bbef11466462f627ff7b5b","src/zlib/old/Makefile.riscos":"d1a488b160fbfd53272b68a913283a4be08ba9d490796b196dddb2ba535b41e0","src/zlib/old/README":"551a0f4d91fe0f827a31cbdfbb4a71d1f3dc4d06564d80a3f526b749dd104d11","src/zlib/old/descrip.mms":"8ff08c35c056df9c986f23c09cf8936db63ccf12c3c42f7d18a48b36f060cff7","src/zlib/old/os2/Makefile.os2":"6ad247c00f00ff42fd2d62555e86251cef06e4079378241b5f320c227507d51d","src/zlib/old/os2/zlib.def":"ea9c61876d2e20b67ef2d9495991a32798eb40d13ede95859a2f4f03b65b9b61","src/zlib/old/visual-basic.txt":"1727650acbde9a9e6aec9438896377e46a12699cca5d46c5399cef524dedc614","src/zlib/os400/README400":"5eb702a0dd460e2bea59ee83014c3f975e892057850c639f793bb740044a38ba","src/zlib/os400/bndsrc":"3c36a17975eed5a8d33bc5443b39fead1e68c01393496be9c1f4a61444bcb0f6","src/zlib/os400/make.sh":"143394d1e3876c61c29078c0e47310e726e1f5bd42739fe92df9ece65711655f","src/zlib/os400/zlib.inc":"dede38961ae2e7a2590343bf1ff558c6f51e46714dec33f2d11d8c34899b3875","src/zlib/qnx/package.qpg":"d521336be75bdd145281c6d166241905751ec97093ecd6fec97a313f631ac0e1","src/zlib/test/example.c":"64ae90d60b40a8aec4700e5c4e7a71898ebb92948b7a07f939b3e763cb3e8b35","src/zlib/test/infcover.c":"f654f3fcc74b33bd95cda63d13fe0ce589bcfe965544e0c17ee597d75efbd090","src/zlib/test/minigzip.c":"f9777d1e8b337573e12daa8091dcf22e88a9b155fc0acad15b8224c377bfe027","src/zlib/treebuild.xml":"89b50165782643554a38d5c58c203d9648b540e5a455531dcb58b5676a019955","src/zlib/trees.c":"b338f1ec9038bd77efc09c8fdb99ef27b5db5b3da9baa301e544adc8e3b6a662","src/zlib/trees.h":"bb0a9d3ca88ee00c81adb7c636e73b97085f6ef1b52d6d58edbe2b6dc3adeb4d","src/zlib/uncompr.c":"7b3d8ca0f10ef7c74044c3172ca8f9f50389cd0f270ee4517f438e7e06be5623","src/zlib/watcom/watcom_f.mak":"7e039b912f9cffaa40835281430bb284fa9042b0a0d12f6b34700a06bca6576e","src/zlib/watcom/watcom_l.mak":"d11b4064604a034725860e63e3f6d347056372e4b1675b183e20a93533b20cc9","src/zlib/win32/DLL_FAQ.txt":"9e00778319381e6275691dd3a89410c99065b8c0c5db96473abe8c859cbdefd8","src/zlib/win32/Makefile.bor":"7d73a0d2c3e38b7c610bbc9c22f683a4fe1ab9b8b65649a3a8ac4ff7fcc14ba6","src/zlib/win32/Makefile.gcc":"97140c30506a8f6b2edb6b3d8a1b6b539d7929d4b957deba9950301090f579bf","src/zlib/win32/Makefile.msc":"235529bd529d4690d5d4b7871fdd0a1f118f2fe18862cbdec5f5ac674c55a60d","src/zlib/win32/README-WIN32.txt":"f414b3702f8d3bf1de42e0f41604bd78c44e537aae16b6107e3cdaa5759caa16","src/zlib/win32/VisualC.txt":"9ec0babd46eaa012371dee2d3a8a55d9c7130f7895512c3371c737e4a7f6a997","src/zlib/win32/zlib.def":"c00693a5c825f8bfbdb68124fd03cb2fa5269338071147bdaa14434aaf3962b9","src/zlib/win32/zlib1.rc":"54e161029b59e99a4f9cb2281b956f00ecfb1814318ddef9c741ff4f832c5c1d","src/zlib/zconf.h":"80e0a31a4c0e6f20d1bad0df99271b9d535aa9f7c4e62f1a54f643adb4c6dfa2","src/zlib/zconf.h.cmakein":"bb12900d39488e6a9ed67ebd7cf5599f3ced8937b7077d4d5001e470c7a1392e","src/zlib/zconf.h.in":"80e0a31a4c0e6f20d1bad0df99271b9d535aa9f7c4e62f1a54f643adb4c6dfa2","src/zlib/zlib.3":"aefd0162070fcb0379dc18e27b039253cd98c148104c1097dd60e0d0b435e564","src/zlib/zlib.3.pdf":"91343dffd2876dcf4af567f299ce99872b066232451093d6d12e02e4654873d8","src/zlib/zlib.h":"a980a0d104198a53cc220c51ab5856e5be901bec8a2d02e0ee79a8754219dfed","src/zlib/zlib.map":"33e2a7c4defd6222945bb0f7191b6380afb4f518e804af86a44aad4a9090bf9e","src/zlib/zlib.pc.cmakein":"2f1d0b18ce37c2af415a469857f02aee2c41a58877aff21d29e9c6db32b55cb7","src/zlib/zlib.pc.in":"04c01cc2e1a0ed123518b5855f585c93a24526dd88982c414111ea1fc9f07997","src/zlib/zlib2ansi":"b3f9c88abbdf16143e5d5110e44fff198bcda9ee1358e036c8d445e9d0cbce85","src/zlib/zutil.c":"8108af451ad14271065844736ac7c436275b92826c319318070508d769371428","src/zlib/zutil.h":"cf94d865e3a9162c0571cba7f74c8f01efbdca26b981d6cc9c545d4c3991e3c2"},"package":"56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db"}
+\ No newline at end of file
diff --git a/meta/recipes-devtools/rust/libstd-rs.inc b/meta/recipes-devtools/rust/libstd-rs.inc
deleted file mode 100644
index 987956344a..0000000000
--- a/meta/recipes-devtools/rust/libstd-rs.inc
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "Rust standard libaries"
-HOMEPAGE = "http://www.rust-lang.org"
-SECTION = "devel"
-LICENSE = "MIT | Apache-2.0"
-LIC_FILES_CHKSUM = "file://../../COPYRIGHT;md5=93a95682d51b4cb0a633a97046940ef0"
-
-RUSTLIB_DEP = ""
-inherit cargo
-
-DEPENDS:append:libc-musl = " libunwind"
-# rv32 does not have libunwind ported yet
-DEPENDS:remove:riscv32 = "libunwind"
-DEPENDS:remove:riscv64 = "libunwind"
-
-# Embed bitcode in order to allow compiling both with and without LTO
-RUSTFLAGS += "-Cembed-bitcode=yes"
-# Needed so cargo can find libbacktrace
-RUSTFLAGS += "-L ${STAGING_LIBDIR} -C link-arg=-Wl,-soname,libstd.so"
-
-S = "${RUSTSRC}/src/libstd"
-
-CARGO_FEATURES ?= "panic-unwind backtrace"
-CARGO_BUILD_FLAGS += "--features '${CARGO_FEATURES}'"
-CARGO_VENDORING_DIRECTORY = "${RUSTSRC}/vendor"
-
-do_compile:prepend () {
- export CARGO_TARGET_DIR="${B}"
- # For Rust 1.13.0 and newer
- export RUSTC_BOOTSTRAP="1"
-}
-
-do_install () {
- mkdir -p ${D}${rustlibdir}
-
- # With the incremental build support added in 1.24, the libstd deps directory also includes dependency
- # files that get installed. Those are really only needed to incrementally rebuild the libstd library
- # itself and don't need to be installed.
- rm -f ${B}/${TARGET_SYS}/${BUILD_DIR}/deps/*.d
- cp ${B}/${TARGET_SYS}/${BUILD_DIR}/deps/* ${D}${rustlibdir}
-}
diff --git a/meta/recipes-devtools/rust/libstd-rs/0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch b/meta/recipes-devtools/rust/libstd-rs/0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch
deleted file mode 100644
index 2f2ca27910..0000000000
--- a/meta/recipes-devtools/rust/libstd-rs/0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 7b3bc1de0c79a1b410105ce36bbe9f774438d263 Mon Sep 17 00:00:00 2001
-From: Ross Schulman <ross@rbs.io>
-Date: Tue, 1 Feb 2022 09:13:16 -0500
-Subject: [PATCH] Add 400-series syscalls to musl riscv64 definitions
-
-Upstream-Status: Backport [https://github.com/rust-lang/libc/commit/7b3bc1de0c79a1b410105ce36bbe9f774438d263]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- .../linux_like/linux/musl/b64/riscv64/mod.rs | 19 +++++++++++++++++++
- 1 file changed, 19 insertions(+)
-
-diff --git a/vendor/libc-0.2.116/src/unix/linux_like/linux/musl/b64/riscv64/mod.rs b/vendor/libc-0.2.116/src/unix/linux_like/linux/musl/b64/riscv64/mod.rs
-index 6b17621c7..2036583d5 100644
---- a/vendor/libc-0.2.116/src/unix/linux_like/linux/musl/b64/riscv64/mod.rs
-+++ b/vendor/libc-0.2.116/src/unix/linux_like/linux/musl/b64/riscv64/mod.rs
-@@ -465,6 +465,25 @@ pub const SYS_pkey_mprotect: ::c_long = 288;
- pub const SYS_pkey_alloc: ::c_long = 289;
- pub const SYS_pkey_free: ::c_long = 290;
- pub const SYS_statx: ::c_long = 291;
-+pub const SYS_pidfd_send_signal: ::c_long = 424;
-+pub const SYS_io_uring_setup: ::c_long = 425;
-+pub const SYS_io_uring_enter: ::c_long = 426;
-+pub const SYS_io_uring_register: ::c_long = 427;
-+pub const SYS_open_tree: ::c_long = 428;
-+pub const SYS_move_mount: ::c_long = 429;
-+pub const SYS_fsopen: ::c_long = 430;
-+pub const SYS_fsconfig: ::c_long = 431;
-+pub const SYS_fsmount: ::c_long = 432;
-+pub const SYS_fspick: ::c_long = 433;
-+pub const SYS_pidfd_open: ::c_long = 434;
-+pub const SYS_clone3: ::c_long = 435;
-+pub const SYS_close_range: ::c_long = 436;
-+pub const SYS_openat2: ::c_long = 437;
-+pub const SYS_pidfd_getfd: ::c_long = 438;
-+pub const SYS_faccessat2: ::c_long = 439;
-+pub const SYS_process_madvise: ::c_long = 440;
-+pub const SYS_epoll_pwait2: ::c_long = 441;
-+pub const SYS_mount_setattr: ::c_long = 442;
-
- pub const O_APPEND: ::c_int = 1024;
- pub const O_DIRECT: ::c_int = 0x4000;
---
-2.35.1
-
diff --git a/meta/recipes-devtools/rust/libstd-rs/0001-Update-checksums-for-modified-vendored-libc.patch b/meta/recipes-devtools/rust/libstd-rs/0001-Update-checksums-for-modified-vendored-libc.patch
deleted file mode 100644
index 923a8fbf84..0000000000
--- a/meta/recipes-devtools/rust/libstd-rs/0001-Update-checksums-for-modified-vendored-libc.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-From 7d16c193959f306b50978b415cc3718029c0d8d5 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 14 Mar 2022 11:06:39 -0700
-Subject: [PATCH] Update checksums for modified vendored libc
-
-Upstream-Status: Inappropriate [Relevant until backported patch is needed]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- vendor/libc-0.2.116/.cargo-checksum.json | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
---- a/vendor/libc-0.2.116/.cargo-checksum.json
-+++ b/vendor/libc-0.2.116/.cargo-checksum.json
-@@ -1 +1 @@
--{"files":{"CONTRIBUTING.md":"752eea5a703d11b485c6b5f195f51bd2c79aa5159b619ce09555c779e1fb586b","Cargo.toml":"5bb1b7c99c97f4a5c211e2803482b3bac420044ad1c53a24c06630d4c8df3348","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"a8d47ff51ca256f56a8932dba07660672dbfe3004257ca8de708aac1415937a1","README.md":"8228847944f1332882fbb00275b6f30e4a8aad08a13569c25d52cac012cc2a47","build.rs":"0a4edcc040533d370a2a736f2e218516182471e8b0d9ed5dc2a6c08d5d852a83","rustfmt.toml":"eaa2ea84fc1ba0359b77680804903e07bb38d257ab11986b95b158e460f787b2","src/fixed_width_ints.rs":"34c60f12ec5eeb90f13ec3b954427532111c2446e69617616a97aefc1086a9f1","src/fuchsia/aarch64.rs":"378776a9e40766154a54c94c2a7b4675b5c302a38e6e42da99e67bfbaee60e56","src/fuchsia/align.rs":"ae1cf8f011a99737eabeb14ffff768e60f13b13363d7646744dbb0f443dab3d6","src/fuchsia/mod.rs":"bc8c46531bd1a2429f36aaf2bc137b50e42505b798de83f34eecfa94ad89179b","src/fuchsia/no_align.rs":"303f3f1b255e0088b5715094353cf00476131d8e94e6aebb3f469557771c8b8a","src/fuchsia/x86_64.rs":"93a3632b5cf67d2a6bcb7dc0a558605252d5fe689e0f38d8aa2ec5852255ac87","src/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/hermit/mod.rs":"d3bfce41e4463d4be8020a2d063c9bfa8b665f45f1cc6cbf3163f5d01e7cb21f","src/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/lib.rs":"adc4f6af60e57eadc01042ec4312bd592fa3002850b8c70740bfd95ddd2d77fb","src/macros.rs":"148cf62f43a1732a8f37c5e7c9673e51b69cee124c27ee9883c94d2da8edff20","src/psp.rs":"dd31aabd46171d474ec5828372e28588935120e7355c90c105360d8fa9264c1c","src/sgx.rs":"16a95cdefc81c5ee00d8353a60db363c4cc3e0f75abcd5d0144723f2a306ed1b","src/solid/aarch64.rs":"a726e47f324adf73a4a0b67a2c183408d0cad105ae66acf36db37a42ab7f8707","src/solid/arm.rs":"e39a4f74ebbef3b97b8c95758ad741123d84ed3eb48d9cf4f1f4872097fc27fe","src/solid/mod.rs":"98dc29c3e5773318ba258a4a2b7407f25aa52d40885a0024fbe43676fe54d14b","src/switch.rs":"9da3dd39b3de45a7928789926e8572d00e1e11a39e6f7289a1349aadce90edba","src/unix/align.rs":"2cdc7c826ef7ae61f5171c5ae8c445a743d86f1a7f2d9d7e4ceeec56d6874f65","src/unix/bsd/apple/b32/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b32/mod.rs":"2546ad3eb6aecb95f916648bc63264117c92b4b4859532b34cb011e4c75a5a72","src/unix/bsd/apple/b64/aarch64/align.rs":"f0c321265dd7671f16106b84951ac7dd77ed2e65c6623cbf2d29e76531984770","src/unix/bsd/apple/b64/aarch64/mod.rs":"44c217a4f263afe7a97435de9323d20a96c37836f899ca0925306d4b7e073c27","src/unix/bsd/apple/b64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/mod.rs":"f5e278a1af7fb358891d1c9be4eb7e815aaca0c5cb738d0c3604ba2208a856f7","src/unix/bsd/apple/b64/x86_64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/x86_64/mod.rs":"8c87c5855038aae5d433c8f5eb3b29b0a175879a0245342b3bfd83bdf4cfd936","src/unix/bsd/apple/mod.rs":"394a28c9924c4d973d980dc529627cdccf6ca710f7aec46500a868fa76f493df","src/unix/bsd/freebsdlike/dragonfly/errno.rs":"8295b8bb0dfd38d2cdb4d9192cdeeb534cc6c3b208170e64615fa3e0edb3e578","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"829c863803f2029396177026b6f6bd7f91e857032c5a133aa48247a6c3c01431","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"2a215bd6136b8617aacedf9be738ccee94da9d29b418e9a78101d6291c182352","src/unix/bsd/freebsdlike/freebsd/arm.rs":"59d6a670eea562fb87686e243e0a84603d29a2028a3d4b3f99ccc01bd04d2f47","src/unix/bsd/freebsdlike/freebsd/freebsd11/b64.rs":"9808d152c1196aa647f1b0f0cf84dac8c930da7d7f897a44975545e3d9d17681","src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs":"a6eee615e6ca5a6e04b526bb6b22d13b9356e87e51825cda33476c37a46cb0ef","src/unix/bsd/freebsdlike/freebsd/freebsd12/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs":"266fe364867d858610b51a950e936fb10c7990f5e627cd59f7947f4b232ab259","src/unix/bsd/freebsdlike/freebsd/freebsd12/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd13/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs":"12bf5c9aef74d43005aca762a48c2c6d64c21d138bd789b20f7143fc20a6db95","src/unix/bsd/freebsdlike/freebsd/freebsd13/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd14/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd14/mod.rs":"fdbc49a641a247aabb6e725647826bc5341447d9bff1319bfe092ba5fcda7b26","src/unix/bsd/freebsdlike/freebsd/freebsd14/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/mod.rs":"9e6d77f35ff7d9f3cdd8596ca88f2eb39fd74cd8a135f99cc487fdb164422f83","src/unix/bsd/freebsdlike/freebsd/powerpc.rs":"9ca3f82f88974e6db5569f2d76a5a3749b248a31747a6c0da5820492bdfeca42","src/unix/bsd/freebsdlike/freebsd/powerpc64.rs":"2dae3ecc87eac3b11657aa98915def55fc4b5c0de11fe26aae23329a54628a9a","src/unix/bsd/freebsdlike/freebsd/riscv64.rs":"8f591bd273464d684c4f64365f8ed56a8138175daa70d96008541393057a0dae","src/unix/bsd/freebsdlike/freebsd/x86.rs":"c5005e3249eb7c93cfbac72a9e9272320d80ce7983da990ceb05a447f59a02c5","src/unix/bsd/freebsdlike/freebsd/x86_64/align.rs":"0e1f69a88fca1c32874b1daf5db3d446fefbe518dca497f096cc9168c39dde70","src/unix/bsd/freebsdlike/freebsd/x86_64/mod.rs":"51e4dd0c8ae247bb652feda5adad9333ea3bb30c750c3a3935e0b0e47d7803eb","src/unix/bsd/freebsdlike/mod.rs":"365ab1c1ce8b7ca061b1bdb38aac534cb600e978746eb309f6d1792b3f67db39","src/unix/bsd/mod.rs":"2fed08973739d7f6a7412b204724af64b1a915c712689c250cf9a08263ba05ff","src/unix/bsd/netbsdlike/mod.rs":"07b97b75fa72215b54c9869f50eee3167ea835fd674cf3fa036bdbd6904c563b","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"65dcb58d11e8d8028401a9d07ca3eb4cb4f053e04249cc877353449d84ccc4cb","src/unix/bsd/netbsdlike/netbsd/arm.rs":"58cdbb70b0d6f536551f0f3bb3725d2d75c4690db12c26c034e7d6ec4a924452","src/unix/bsd/netbsdlike/netbsd/mod.rs":"5f927d3d26165689b3ae7a7535ea5504068a406ef76705572c610995dbea1f0e","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"ee7ff5d89d0ed22f531237b5059aa669df93a3b5c489fa641465ace8d405bf41","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"9489f4b3e4566f43bb12dfb92238960613dac7f6a45cc13068a8d152b902d7d9","src/unix/bsd/netbsdlike/netbsd/x86.rs":"20692320e36bfe028d1a34d16fe12ca77aa909cb02bda167376f98f1a09aefe7","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"1afe5ef46b14397cdd68664b5b232e4f5b035b6db1d4cf411c899d51ebca9f30","src/unix/bsd/netbsdlike/openbsd/aarch64.rs":"dd91931d373b7ecaf6e2de25adadee10d16fa9b12c2cbacdff3eb291e1ba36af","src/unix/bsd/netbsdlike/openbsd/arm.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/mips64.rs":"8532a189ae10c7d668d9d4065da8b05d124e09bd39442c9f74a7f231c43eca48","src/unix/bsd/netbsdlike/openbsd/mod.rs":"98c95365b892679b4976b928714a0b6fc61f01fe662e7066170448d6c1eaef5e","src/unix/bsd/netbsdlike/openbsd/powerpc.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/powerpc64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/riscv64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/sparc64.rs":"d04fd287afbaa2c5df9d48c94e8374a532a3ba491b424ddf018270c7312f4085","src/unix/bsd/netbsdlike/openbsd/x86.rs":"6f7f5c4fde2a2259eb547890cbd86570cea04ef85347d7569e94e679448bec87","src/unix/bsd/netbsdlike/openbsd/x86_64.rs":"d31db31630289c85af3339dbe357998a21ca584cbae31607448fe2cf7675a4e1","src/unix/haiku/b32.rs":"a2efdbf7158a6da341e1db9176b0ab193ba88b449616239ed95dced11f54d87b","src/unix/haiku/b64.rs":"ff8115367d3d7d354f792d6176dfaaa26353f57056197b563bf4681f91ff7985","src/unix/haiku/mod.rs":"41c1cc641a21a2433fe38e9b4038c4ac94ef10a00c38351c79c4e7f3affadc6e","src/unix/haiku/native.rs":"44855f52906f607de137fc4baa8c6b1b9a26baaa666f25d5f7a7ec5e017c8be6","src/unix/haiku/x86_64.rs":"3ec3aeeb7ed208b8916f3e32d42bfd085ff5e16936a1a35d9a52789f043b7237","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"859814f5df89e28fd4b345db399d181e11e7ed413841b6ff703a1fcbdbf013ae","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/linux_like/android/b32/arm.rs":"433c1530f602cc5ed26610c58055dde0c4ceea5e00150063b24ddc60768332a4","src/unix/linux_like/android/b32/mod.rs":"d971b98530a96f5892f98e1edc3133cf278d1b3939d77ab0a27a6323e0961715","src/unix/linux_like/android/b32/x86/align.rs":"812914e4241df82e32b12375ca3374615dc3a4bdd4cf31f0423c5815320c0dab","src/unix/linux_like/android/b32/x86/mod.rs":"8388bd3a0fcb5636bf965eee6dc95ae6860b85a2b555b387c868aa4d4e01ec89","src/unix/linux_like/android/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/android/b64/aarch64/mod.rs":"78b837d764c5ab6d262f12c3824d8ef05a94c03b9342b88462454ca7a52d203d","src/unix/linux_like/android/b64/mod.rs":"d7bbbadafdb2cb2ff8e9cde3d89a03b9facaabb6b2d45705225d3ece1c5cce37","src/unix/linux_like/android/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/android/b64/x86_64/mod.rs":"06b22dc184e85a988ab969c75cadc52b9878faeab0eb9d21215878c95fe07c19","src/unix/linux_like/android/mod.rs":"f19d7ce918bd273709578b3cfc3be02192ff1b0d4561de0018796db3fe04b179","src/unix/linux_like/emscripten/align.rs":"86c95cbed7a7161b1f23ee06843e7b0e2340ad92b2cb86fe2a8ef3e0e8c36216","src/unix/linux_like/emscripten/mod.rs":"b71d37106750f57bc2dae4e9bcb473ff098ef48235827e41a1687a39825f0aa4","src/unix/linux_like/emscripten/no_align.rs":"0128e4aa721a9902754828b61b5ec7d8a86619983ed1e0544a85d35b1051fad6","src/unix/linux_like/linux/align.rs":"213e70ebed2703e14a9cf17666b21ecbf180b7bff7fa22fdbb36dbbd52df326d","src/unix/linux_like/linux/arch/generic/mod.rs":"b6efaded9b9631410a265dd52398d9bddb387d35b9f513e9c0432dc5233dda39","src/unix/linux_like/linux/arch/mips/mod.rs":"4588078cd2b1c046379c8a55f728b838ff7f9151fcd8a1a5389683f1bda13550","src/unix/linux_like/linux/arch/mod.rs":"466a29622e47c6c7f1500682b2eb17f5566dd81b322cd6348f0fdd355cec593a","src/unix/linux_like/linux/arch/powerpc/mod.rs":"c447e417cdb966a4da64e2755fd91f64ab14098f529098bf437053d7e08843db","src/unix/linux_like/linux/arch/sparc/mod.rs":"9b89a4e798bb83b3e0f4052fa2d2bb6f5e9508da69a2e5b40b40795e4b4256ae","src/unix/linux_like/linux/gnu/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/gnu/b32/arm/align.rs":"3fed009dc9af3cc81be7087da9d2d7d1f39845e4497e290259c5cdbae25f039d","src/unix/linux_like/linux/gnu/b32/arm/mod.rs":"877ff42aa56e599084df9a8374e1f3eaf8ec05a445f2887b10aee5744c093f02","src/unix/linux_like/linux/gnu/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/gnu/b32/mips/mod.rs":"9fdf7cf98bc92153854ba04b2892009ec8bd855d12e1f68e91d3df2418ff6364","src/unix/linux_like/linux/gnu/b32/mod.rs":"ad23b70a5f849d5a6dbbf6d608221d2928b1834956d71d072bcc0eb941b0d856","src/unix/linux_like/linux/gnu/b32/powerpc.rs":"240468afe2fe42fd60a5568add147dc0ff097a6469ffadfdd5b9756dfd0323f2","src/unix/linux_like/linux/gnu/b32/riscv32/mod.rs":"dc99518121e911db120b157ad289e5e6964f3a42df53b091eaf071dbf5c04098","src/unix/linux_like/linux/gnu/b32/sparc/align.rs":"21adbed27df73e2d1ed934aaf733a643003d7baf2bde9c48ea440895bcca6d41","src/unix/linux_like/linux/gnu/b32/sparc/mod.rs":"ae203726810da9312b484b0b7891a35527748c069f11e91d6d9afc73297b1cbb","src/unix/linux_like/linux/gnu/b32/x86/align.rs":"e4bafdc4a519a7922a81b37a62bbfd1177a2f620890eef8f1fbc47162e9eb413","src/unix/linux_like/linux/gnu/b32/x86/mod.rs":"eb8d002ccb3dfcf6c94e8826afe9f8c496a813fa3d770c7dbc1024b45f870c2f","src/unix/linux_like/linux/gnu/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/linux/gnu/b64/aarch64/ilp32.rs":"21a21503ef2e095f4371044915d4bfb07a8578011cb5c713cd9f45947b0b5730","src/unix/linux_like/linux/gnu/b64/aarch64/lp64.rs":"e78c3cd197f44832338b414d1a9bc0d194f44c74db77bd7bf830c1fff62b2690","src/unix/linux_like/linux/gnu/b64/aarch64/mod.rs":"79991b28010bfa01501dc5aa0ba2f7d8394539447398169a052c541cb21e20bd","src/unix/linux_like/linux/gnu/b64/mips64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/mips64/mod.rs":"506050a20f8de53907a893d2711f1bed6e6c019580d5dde55f5dbdcfa40802f5","src/unix/linux_like/linux/gnu/b64/mod.rs":"6336065423c26b59681fd2ce77f1117ea36af13e163fdadaefd108bd8191e8c8","src/unix/linux_like/linux/gnu/b64/powerpc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/powerpc64/mod.rs":"b53ee7b973b91c2175d16476c87547f8b36c2f0b716f456018fdd2728ab4d59f","src/unix/linux_like/linux/gnu/b64/riscv64/mod.rs":"7cbadd208b719c3cbab0432cb6bb236f1e79a01823790aa6b07366433ca0991b","src/unix/linux_like/linux/gnu/b64/s390x.rs":"59228db655d6a54d20b9431c705ed7966f94b98d61e53b992c8f8ee05e176e77","src/unix/linux_like/linux/gnu/b64/sparc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/sparc64/mod.rs":"cb9750d9153428712eb32928fb85a1e67f9687f42ef698c3bb79951c219361d0","src/unix/linux_like/linux/gnu/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/x86_64/mod.rs":"e4cbe0f5a3d100490f4f648bcadcd26899cf2f4760e6ee8ccb56ce16d598cc18","src/unix/linux_like/linux/gnu/b64/x86_64/not_x32.rs":"f775ac2b754f90b63053fe22afe1d19d306b5404995568d6805baa9249fb617f","src/unix/linux_like/linux/gnu/b64/x86_64/x32.rs":"4ba1b58468f55254717366f50fdfd3e4114fde6dc442a56681926c4d7e5b6b0d","src/unix/linux_like/linux/gnu/mod.rs":"13899330d3118c5e41854f897c2e455f1b7db949384d36cff1d3042fd25a4475","src/unix/linux_like/linux/gnu/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/mod.rs":"3aaa8a783a1ec2134e27a45d7de6296b97f94e543bf5ec2c6c4ea5d24afb167f","src/unix/linux_like/linux/musl/b32/arm/align.rs":"3e8ac052c1043764776b54c93ba4260e061df998631737a897d9d47d54f7b80c","src/unix/linux_like/linux/musl/b32/arm/mod.rs":"8625290735a1294a50b34bb2b8d3b515305325187027cc577c72b0762f3a265e","src/unix/linux_like/linux/musl/b32/hexagon.rs":"dbc8aeb2bd3cd2269e0e95858c5ba0e0204b1ca0bcc5df517fb3f7f71134d044","src/unix/linux_like/linux/musl/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/musl/b32/mips/mod.rs":"65e3917900a9dfa0b4602e87d17305ebfa56a0c0dea61e3975099c9a6b9e967b","src/unix/linux_like/linux/musl/b32/mod.rs":"bac24312f6629ef76aa12dd41123061e3a77a244e7fda7bbfcdf5c57996b61e6","src/unix/linux_like/linux/musl/b32/powerpc.rs":"e898b490365c84c0f1856ea8640463c558b5a247d845ca0c11677679272ea20f","src/unix/linux_like/linux/musl/b32/x86/align.rs":"08e77fbd7435d7dec2ff56932433bece3f02e47ce810f89004a275a86d39cbe1","src/unix/linux_like/linux/musl/b32/x86/mod.rs":"9a9f4926d5a1fbe406f5ab84742680fe4941ab929005e7fefc4e2bc6686ba5eb","src/unix/linux_like/linux/musl/b64/aarch64/align.rs":"798a9229d70ce235394f2dd625f6c4c1e10519a94382dc5b091952b638ae2928","src/unix/linux_like/linux/musl/b64/aarch64/mod.rs":"cddcde68c13104f18edd611ce23e60a4f9b4fcae1baa57fb92284495d3a77850","src/unix/linux_like/linux/musl/b64/mips64.rs":"4a5d543195da94aaa45be067d6d145c8092bebf3611898aa2beccc0cd55d5f3f","src/unix/linux_like/linux/musl/b64/mod.rs":"d847206d9f2d594c8febe780a938cdccf40d985dafc11e90f235947735a09bac","src/unix/linux_like/linux/musl/b64/powerpc64.rs":"37b08e5f5f84bc72b62c439ac9282db71ff59b80cd2775c5588941b7a135e6bc","src/unix/linux_like/linux/musl/b64/riscv64/mod.rs":"88d58ec165c404ab8e44ae43ee51ae6982924dae07309dbc504404f142515b7a","src/unix/linux_like/linux/musl/b64/s390x.rs":"9b05b1fae6bcb7cb6d909b9973977fde01684175f3e26c27dcb44223cc3933d9","src/unix/linux_like/linux/musl/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/musl/b64/x86_64/mod.rs":"d3762f374cd3aaa1e0e3f79d36f9edc7fd4c8492e561ee4bd0e80e727eef4e9e","src/unix/linux_like/linux/musl/mod.rs":"c9a24828d62f20e0cc0b835197f48bc31aa51db7ca4880341292271cc2df7aae","src/unix/linux_like/linux/no_align.rs":"5ed04c53bf9d27da9b4d65ba7625c6ac53330162683d1b3df98950caafa3507b","src/unix/linux_like/linux/non_exhaustive.rs":"181a05bf94fdb911db83ce793b993bd6548a4115b306a7ef3c10f745a8fea3e9","src/unix/linux_like/linux/uclibc/align.rs":"9ed16138d8e439bd90930845a65eafa7ebd67366e6bf633936d44014f6e4c959","src/unix/linux_like/linux/uclibc/arm/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/arm/mod.rs":"6d83b37ebe54384269b5c5cc3c9c370f97bdc8c646c9aee8705f0de1aca80067","src/unix/linux_like/linux/uclibc/arm/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips32/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/mips/mips32/mod.rs":"edb86245bcc0e340de277e20752463cb4004104fe97737a71afdcc8b06bee703","src/unix/linux_like/linux/uclibc/mips/mips32/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips64/align.rs":"a7bdcb18a37a2d91e64d5fad83ea3edc78f5412adb28f77ab077dbb26dd08b2d","src/unix/linux_like/linux/uclibc/mips/mips64/mod.rs":"57d5e24ceb98e1ef90de231c4cf7f0275ac410d407607b240af6744504ec1f2a","src/unix/linux_like/linux/uclibc/mips/mips64/no_align.rs":"4a18e3875698c85229599225ac3401a2a40da87e77b2ad4ef47c6fcd5a24ed30","src/unix/linux_like/linux/uclibc/mips/mod.rs":"4eea5256e47e485dd1e50c147a56697d286e0395a77674317e83a28fb43a12d8","src/unix/linux_like/linux/uclibc/mod.rs":"d2f5d6a3381bceec0b036bb3979c113c9c4144b134368e39b3f9f2fbef79f5cc","src/unix/linux_like/linux/uclibc/no_align.rs":"3f28637046524618adaa1012e26cb7ffe94b9396e6b518cccdc69d59f274d709","src/unix/linux_like/linux/uclibc/x86_64/l4re.rs":"024eba5753e852dbdd212427351affe7e83f9916c1864bce414d7aa2618f192e","src/unix/linux_like/linux/uclibc/x86_64/mod.rs":"583349563b537104e2265be893f65e302e568a54f4022b8f14a2cf3200ac39ba","src/unix/linux_like/linux/uclibc/x86_64/other.rs":"42c3f71e58cabba373f6a55a623f3c31b85049eb64824c09c2b082b3b2d6a0a8","src/unix/linux_like/mod.rs":"dd4f7a1d66d8501b4a2c4e75e6e9305ed69f1002ae99e410596a6c636878595a","src/unix/mod.rs":"22300f25d8f3adcdcd419222a2d5657e44c835eb4a0f90e05b691c7bcc3a787e","src/unix/newlib/aarch64/mod.rs":"bb269c1468a9676442554600e87417079a787fe6220dfc23b3109639259e8710","src/unix/newlib/align.rs":"28aaf87fafbc6b312622719d472d8cf65f9e5467d15339df5f73e66d8502b28a","src/unix/newlib/arm/mod.rs":"c71be856bfd7f576b2db28af9f680211cbe6c1cac7d537bbc8020b39591af07c","src/unix/newlib/espidf/mod.rs":"c198cb4beccdab483be61c102da74dc51ac80f766797e33021f3110394ed5a3d","src/unix/newlib/horizon/mod.rs":"95cb1e5f469a3d90ce27f03d8f70121c82edd26ab762db4aa373857b87e12d98","src/unix/newlib/mod.rs":"c33c46ad76627c24927b63953d9271e01bbc7f4967381675baf61442470a41cb","src/unix/newlib/no_align.rs":"e0743b2179495a9514bc3a4d1781e492878c4ec834ee0085d0891dd1712e82fb","src/unix/newlib/powerpc/mod.rs":"2d0f7af28b47f7a2a6c210ebd1c1f33ed8eac62e56b5af2b856de2ad3fdc5187","src/unix/no_align.rs":"c06e95373b9088266e0b14bba0954eef95f93fb2b01d951855e382d22de78e53","src/unix/redox/mod.rs":"cc4794afa4fbed9d6612894ea476228f9d8533950162d6416fc4d16073e1dac4","src/unix/solarish/compat.rs":"b07a5bfac925eb012003a459ba6bddbd3bfa9c44b3394da2ac5a602e54beae9c","src/unix/solarish/illumos.rs":"a092f6676a0d208e803819bf32f928714e93606fb9062cf55b5fb7a121d47481","src/unix/solarish/mod.rs":"01dc4208b5b0d901550dc9a6f060e7d446a303919287b328fda556d9dc569b34","src/unix/solarish/solaris.rs":"65b005453aefa9b9d4fc860fe77cfec80d8c97a51342b15daf55fc3e808bb384","src/unix/solarish/x86_64.rs":"9d761573bdccbdd1ac61da571f7e96b5374df70241d9b72d45a069611f495085","src/vxworks/aarch64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/arm.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/mod.rs":"aea3da66f2140f2a82dfc9c58f6e6531d2dd9c15ea696e0f95a0d4a2a187b5b6","src/vxworks/powerpc.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/powerpc64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/x86.rs":"552f007f38317620b23889cb7c49d1d115841252439060122f52f434fbc6e5ba","src/vxworks/x86_64.rs":"018d92be3ad628a129eff9f2f5dfbc0883d8b8e5f2fa917b900a7f98ed6b514a","src/wasi.rs":"817e7592e47f06ece5266fb35625c1aba0a23128e4015a9de2fbf94aba5f1312","src/windows/gnu/align.rs":"b2c13ec1b9f3b39a75c452c80c951dff9d0215e31d77e883b4502afb31794647","src/windows/gnu/mod.rs":"3c8c7edb7cdf5d0c44af936db2a94869585c69dfabeef30571b4f4e38375767a","src/windows/mod.rs":"e3ad95ba54f76e74c301611fe868d3d94f6b8939b03be672f568b06b10ae71c7","src/windows/msvc/mod.rs":"c068271e00fca6b62bc4bf44bcf142cfc38caeded9b6c4e01d1ceef3ccf986f4","tests/const_fn.rs":"cb75a1f0864f926aebe79118fc34d51a0d1ade2c20a394e7774c7e545f21f1f4"},"package":"565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74"}
-\ No newline at end of file
-+{"files":{"CONTRIBUTING.md":"752eea5a703d11b485c6b5f195f51bd2c79aa5159b619ce09555c779e1fb586b","Cargo.toml":"5bb1b7c99c97f4a5c211e2803482b3bac420044ad1c53a24c06630d4c8df3348","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"a8d47ff51ca256f56a8932dba07660672dbfe3004257ca8de708aac1415937a1","README.md":"8228847944f1332882fbb00275b6f30e4a8aad08a13569c25d52cac012cc2a47","build.rs":"0a4edcc040533d370a2a736f2e218516182471e8b0d9ed5dc2a6c08d5d852a83","rustfmt.toml":"eaa2ea84fc1ba0359b77680804903e07bb38d257ab11986b95b158e460f787b2","src/fixed_width_ints.rs":"34c60f12ec5eeb90f13ec3b954427532111c2446e69617616a97aefc1086a9f1","src/fuchsia/aarch64.rs":"378776a9e40766154a54c94c2a7b4675b5c302a38e6e42da99e67bfbaee60e56","src/fuchsia/align.rs":"ae1cf8f011a99737eabeb14ffff768e60f13b13363d7646744dbb0f443dab3d6","src/fuchsia/mod.rs":"bc8c46531bd1a2429f36aaf2bc137b50e42505b798de83f34eecfa94ad89179b","src/fuchsia/no_align.rs":"303f3f1b255e0088b5715094353cf00476131d8e94e6aebb3f469557771c8b8a","src/fuchsia/x86_64.rs":"93a3632b5cf67d2a6bcb7dc0a558605252d5fe689e0f38d8aa2ec5852255ac87","src/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/hermit/mod.rs":"d3bfce41e4463d4be8020a2d063c9bfa8b665f45f1cc6cbf3163f5d01e7cb21f","src/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/lib.rs":"adc4f6af60e57eadc01042ec4312bd592fa3002850b8c70740bfd95ddd2d77fb","src/macros.rs":"148cf62f43a1732a8f37c5e7c9673e51b69cee124c27ee9883c94d2da8edff20","src/psp.rs":"dd31aabd46171d474ec5828372e28588935120e7355c90c105360d8fa9264c1c","src/sgx.rs":"16a95cdefc81c5ee00d8353a60db363c4cc3e0f75abcd5d0144723f2a306ed1b","src/solid/aarch64.rs":"a726e47f324adf73a4a0b67a2c183408d0cad105ae66acf36db37a42ab7f8707","src/solid/arm.rs":"e39a4f74ebbef3b97b8c95758ad741123d84ed3eb48d9cf4f1f4872097fc27fe","src/solid/mod.rs":"98dc29c3e5773318ba258a4a2b7407f25aa52d40885a0024fbe43676fe54d14b","src/switch.rs":"9da3dd39b3de45a7928789926e8572d00e1e11a39e6f7289a1349aadce90edba","src/unix/align.rs":"2cdc7c826ef7ae61f5171c5ae8c445a743d86f1a7f2d9d7e4ceeec56d6874f65","src/unix/bsd/apple/b32/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b32/mod.rs":"2546ad3eb6aecb95f916648bc63264117c92b4b4859532b34cb011e4c75a5a72","src/unix/bsd/apple/b64/aarch64/align.rs":"f0c321265dd7671f16106b84951ac7dd77ed2e65c6623cbf2d29e76531984770","src/unix/bsd/apple/b64/aarch64/mod.rs":"44c217a4f263afe7a97435de9323d20a96c37836f899ca0925306d4b7e073c27","src/unix/bsd/apple/b64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/mod.rs":"f5e278a1af7fb358891d1c9be4eb7e815aaca0c5cb738d0c3604ba2208a856f7","src/unix/bsd/apple/b64/x86_64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/x86_64/mod.rs":"8c87c5855038aae5d433c8f5eb3b29b0a175879a0245342b3bfd83bdf4cfd936","src/unix/bsd/apple/mod.rs":"394a28c9924c4d973d980dc529627cdccf6ca710f7aec46500a868fa76f493df","src/unix/bsd/freebsdlike/dragonfly/errno.rs":"8295b8bb0dfd38d2cdb4d9192cdeeb534cc6c3b208170e64615fa3e0edb3e578","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"829c863803f2029396177026b6f6bd7f91e857032c5a133aa48247a6c3c01431","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"2a215bd6136b8617aacedf9be738ccee94da9d29b418e9a78101d6291c182352","src/unix/bsd/freebsdlike/freebsd/arm.rs":"59d6a670eea562fb87686e243e0a84603d29a2028a3d4b3f99ccc01bd04d2f47","src/unix/bsd/freebsdlike/freebsd/freebsd11/b64.rs":"9808d152c1196aa647f1b0f0cf84dac8c930da7d7f897a44975545e3d9d17681","src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs":"a6eee615e6ca5a6e04b526bb6b22d13b9356e87e51825cda33476c37a46cb0ef","src/unix/bsd/freebsdlike/freebsd/freebsd12/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs":"266fe364867d858610b51a950e936fb10c7990f5e627cd59f7947f4b232ab259","src/unix/bsd/freebsdlike/freebsd/freebsd12/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd13/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs":"12bf5c9aef74d43005aca762a48c2c6d64c21d138bd789b20f7143fc20a6db95","src/unix/bsd/freebsdlike/freebsd/freebsd13/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd14/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd14/mod.rs":"fdbc49a641a247aabb6e725647826bc5341447d9bff1319bfe092ba5fcda7b26","src/unix/bsd/freebsdlike/freebsd/freebsd14/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/mod.rs":"9e6d77f35ff7d9f3cdd8596ca88f2eb39fd74cd8a135f99cc487fdb164422f83","src/unix/bsd/freebsdlike/freebsd/powerpc.rs":"9ca3f82f88974e6db5569f2d76a5a3749b248a31747a6c0da5820492bdfeca42","src/unix/bsd/freebsdlike/freebsd/powerpc64.rs":"2dae3ecc87eac3b11657aa98915def55fc4b5c0de11fe26aae23329a54628a9a","src/unix/bsd/freebsdlike/freebsd/riscv64.rs":"8f591bd273464d684c4f64365f8ed56a8138175daa70d96008541393057a0dae","src/unix/bsd/freebsdlike/freebsd/x86.rs":"c5005e3249eb7c93cfbac72a9e9272320d80ce7983da990ceb05a447f59a02c5","src/unix/bsd/freebsdlike/freebsd/x86_64/align.rs":"0e1f69a88fca1c32874b1daf5db3d446fefbe518dca497f096cc9168c39dde70","src/unix/bsd/freebsdlike/freebsd/x86_64/mod.rs":"51e4dd0c8ae247bb652feda5adad9333ea3bb30c750c3a3935e0b0e47d7803eb","src/unix/bsd/freebsdlike/mod.rs":"365ab1c1ce8b7ca061b1bdb38aac534cb600e978746eb309f6d1792b3f67db39","src/unix/bsd/mod.rs":"2fed08973739d7f6a7412b204724af64b1a915c712689c250cf9a08263ba05ff","src/unix/bsd/netbsdlike/mod.rs":"07b97b75fa72215b54c9869f50eee3167ea835fd674cf3fa036bdbd6904c563b","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"65dcb58d11e8d8028401a9d07ca3eb4cb4f053e04249cc877353449d84ccc4cb","src/unix/bsd/netbsdlike/netbsd/arm.rs":"58cdbb70b0d6f536551f0f3bb3725d2d75c4690db12c26c034e7d6ec4a924452","src/unix/bsd/netbsdlike/netbsd/mod.rs":"5f927d3d26165689b3ae7a7535ea5504068a406ef76705572c610995dbea1f0e","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"ee7ff5d89d0ed22f531237b5059aa669df93a3b5c489fa641465ace8d405bf41","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"9489f4b3e4566f43bb12dfb92238960613dac7f6a45cc13068a8d152b902d7d9","src/unix/bsd/netbsdlike/netbsd/x86.rs":"20692320e36bfe028d1a34d16fe12ca77aa909cb02bda167376f98f1a09aefe7","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"1afe5ef46b14397cdd68664b5b232e4f5b035b6db1d4cf411c899d51ebca9f30","src/unix/bsd/netbsdlike/openbsd/aarch64.rs":"dd91931d373b7ecaf6e2de25adadee10d16fa9b12c2cbacdff3eb291e1ba36af","src/unix/bsd/netbsdlike/openbsd/arm.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/mips64.rs":"8532a189ae10c7d668d9d4065da8b05d124e09bd39442c9f74a7f231c43eca48","src/unix/bsd/netbsdlike/openbsd/mod.rs":"98c95365b892679b4976b928714a0b6fc61f01fe662e7066170448d6c1eaef5e","src/unix/bsd/netbsdlike/openbsd/powerpc.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/powerpc64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/riscv64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/sparc64.rs":"d04fd287afbaa2c5df9d48c94e8374a532a3ba491b424ddf018270c7312f4085","src/unix/bsd/netbsdlike/openbsd/x86.rs":"6f7f5c4fde2a2259eb547890cbd86570cea04ef85347d7569e94e679448bec87","src/unix/bsd/netbsdlike/openbsd/x86_64.rs":"d31db31630289c85af3339dbe357998a21ca584cbae31607448fe2cf7675a4e1","src/unix/haiku/b32.rs":"a2efdbf7158a6da341e1db9176b0ab193ba88b449616239ed95dced11f54d87b","src/unix/haiku/b64.rs":"ff8115367d3d7d354f792d6176dfaaa26353f57056197b563bf4681f91ff7985","src/unix/haiku/mod.rs":"41c1cc641a21a2433fe38e9b4038c4ac94ef10a00c38351c79c4e7f3affadc6e","src/unix/haiku/native.rs":"44855f52906f607de137fc4baa8c6b1b9a26baaa666f25d5f7a7ec5e017c8be6","src/unix/haiku/x86_64.rs":"3ec3aeeb7ed208b8916f3e32d42bfd085ff5e16936a1a35d9a52789f043b7237","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"859814f5df89e28fd4b345db399d181e11e7ed413841b6ff703a1fcbdbf013ae","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/linux_like/android/b32/arm.rs":"433c1530f602cc5ed26610c58055dde0c4ceea5e00150063b24ddc60768332a4","src/unix/linux_like/android/b32/mod.rs":"d971b98530a96f5892f98e1edc3133cf278d1b3939d77ab0a27a6323e0961715","src/unix/linux_like/android/b32/x86/align.rs":"812914e4241df82e32b12375ca3374615dc3a4bdd4cf31f0423c5815320c0dab","src/unix/linux_like/android/b32/x86/mod.rs":"8388bd3a0fcb5636bf965eee6dc95ae6860b85a2b555b387c868aa4d4e01ec89","src/unix/linux_like/android/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/android/b64/aarch64/mod.rs":"78b837d764c5ab6d262f12c3824d8ef05a94c03b9342b88462454ca7a52d203d","src/unix/linux_like/android/b64/mod.rs":"d7bbbadafdb2cb2ff8e9cde3d89a03b9facaabb6b2d45705225d3ece1c5cce37","src/unix/linux_like/android/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/android/b64/x86_64/mod.rs":"06b22dc184e85a988ab969c75cadc52b9878faeab0eb9d21215878c95fe07c19","src/unix/linux_like/android/mod.rs":"f19d7ce918bd273709578b3cfc3be02192ff1b0d4561de0018796db3fe04b179","src/unix/linux_like/emscripten/align.rs":"86c95cbed7a7161b1f23ee06843e7b0e2340ad92b2cb86fe2a8ef3e0e8c36216","src/unix/linux_like/emscripten/mod.rs":"b71d37106750f57bc2dae4e9bcb473ff098ef48235827e41a1687a39825f0aa4","src/unix/linux_like/emscripten/no_align.rs":"0128e4aa721a9902754828b61b5ec7d8a86619983ed1e0544a85d35b1051fad6","src/unix/linux_like/linux/align.rs":"213e70ebed2703e14a9cf17666b21ecbf180b7bff7fa22fdbb36dbbd52df326d","src/unix/linux_like/linux/arch/generic/mod.rs":"b6efaded9b9631410a265dd52398d9bddb387d35b9f513e9c0432dc5233dda39","src/unix/linux_like/linux/arch/mips/mod.rs":"4588078cd2b1c046379c8a55f728b838ff7f9151fcd8a1a5389683f1bda13550","src/unix/linux_like/linux/arch/mod.rs":"466a29622e47c6c7f1500682b2eb17f5566dd81b322cd6348f0fdd355cec593a","src/unix/linux_like/linux/arch/powerpc/mod.rs":"c447e417cdb966a4da64e2755fd91f64ab14098f529098bf437053d7e08843db","src/unix/linux_like/linux/arch/sparc/mod.rs":"9b89a4e798bb83b3e0f4052fa2d2bb6f5e9508da69a2e5b40b40795e4b4256ae","src/unix/linux_like/linux/gnu/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/gnu/b32/arm/align.rs":"3fed009dc9af3cc81be7087da9d2d7d1f39845e4497e290259c5cdbae25f039d","src/unix/linux_like/linux/gnu/b32/arm/mod.rs":"877ff42aa56e599084df9a8374e1f3eaf8ec05a445f2887b10aee5744c093f02","src/unix/linux_like/linux/gnu/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/gnu/b32/mips/mod.rs":"9fdf7cf98bc92153854ba04b2892009ec8bd855d12e1f68e91d3df2418ff6364","src/unix/linux_like/linux/gnu/b32/mod.rs":"ad23b70a5f849d5a6dbbf6d608221d2928b1834956d71d072bcc0eb941b0d856","src/unix/linux_like/linux/gnu/b32/powerpc.rs":"240468afe2fe42fd60a5568add147dc0ff097a6469ffadfdd5b9756dfd0323f2","src/unix/linux_like/linux/gnu/b32/riscv32/mod.rs":"dc99518121e911db120b157ad289e5e6964f3a42df53b091eaf071dbf5c04098","src/unix/linux_like/linux/gnu/b32/sparc/align.rs":"21adbed27df73e2d1ed934aaf733a643003d7baf2bde9c48ea440895bcca6d41","src/unix/linux_like/linux/gnu/b32/sparc/mod.rs":"ae203726810da9312b484b0b7891a35527748c069f11e91d6d9afc73297b1cbb","src/unix/linux_like/linux/gnu/b32/x86/align.rs":"e4bafdc4a519a7922a81b37a62bbfd1177a2f620890eef8f1fbc47162e9eb413","src/unix/linux_like/linux/gnu/b32/x86/mod.rs":"eb8d002ccb3dfcf6c94e8826afe9f8c496a813fa3d770c7dbc1024b45f870c2f","src/unix/linux_like/linux/gnu/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/linux/gnu/b64/aarch64/ilp32.rs":"21a21503ef2e095f4371044915d4bfb07a8578011cb5c713cd9f45947b0b5730","src/unix/linux_like/linux/gnu/b64/aarch64/lp64.rs":"e78c3cd197f44832338b414d1a9bc0d194f44c74db77bd7bf830c1fff62b2690","src/unix/linux_like/linux/gnu/b64/aarch64/mod.rs":"79991b28010bfa01501dc5aa0ba2f7d8394539447398169a052c541cb21e20bd","src/unix/linux_like/linux/gnu/b64/mips64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/mips64/mod.rs":"506050a20f8de53907a893d2711f1bed6e6c019580d5dde55f5dbdcfa40802f5","src/unix/linux_like/linux/gnu/b64/mod.rs":"6336065423c26b59681fd2ce77f1117ea36af13e163fdadaefd108bd8191e8c8","src/unix/linux_like/linux/gnu/b64/powerpc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/powerpc64/mod.rs":"b53ee7b973b91c2175d16476c87547f8b36c2f0b716f456018fdd2728ab4d59f","src/unix/linux_like/linux/gnu/b64/riscv64/mod.rs":"7cbadd208b719c3cbab0432cb6bb236f1e79a01823790aa6b07366433ca0991b","src/unix/linux_like/linux/gnu/b64/s390x.rs":"59228db655d6a54d20b9431c705ed7966f94b98d61e53b992c8f8ee05e176e77","src/unix/linux_like/linux/gnu/b64/sparc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/sparc64/mod.rs":"cb9750d9153428712eb32928fb85a1e67f9687f42ef698c3bb79951c219361d0","src/unix/linux_like/linux/gnu/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/x86_64/mod.rs":"e4cbe0f5a3d100490f4f648bcadcd26899cf2f4760e6ee8ccb56ce16d598cc18","src/unix/linux_like/linux/gnu/b64/x86_64/not_x32.rs":"f775ac2b754f90b63053fe22afe1d19d306b5404995568d6805baa9249fb617f","src/unix/linux_like/linux/gnu/b64/x86_64/x32.rs":"4ba1b58468f55254717366f50fdfd3e4114fde6dc442a56681926c4d7e5b6b0d","src/unix/linux_like/linux/gnu/mod.rs":"13899330d3118c5e41854f897c2e455f1b7db949384d36cff1d3042fd25a4475","src/unix/linux_like/linux/gnu/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/mod.rs":"3aaa8a783a1ec2134e27a45d7de6296b97f94e543bf5ec2c6c4ea5d24afb167f","src/unix/linux_like/linux/musl/b32/arm/align.rs":"3e8ac052c1043764776b54c93ba4260e061df998631737a897d9d47d54f7b80c","src/unix/linux_like/linux/musl/b32/arm/mod.rs":"8625290735a1294a50b34bb2b8d3b515305325187027cc577c72b0762f3a265e","src/unix/linux_like/linux/musl/b32/hexagon.rs":"dbc8aeb2bd3cd2269e0e95858c5ba0e0204b1ca0bcc5df517fb3f7f71134d044","src/unix/linux_like/linux/musl/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/musl/b32/mips/mod.rs":"65e3917900a9dfa0b4602e87d17305ebfa56a0c0dea61e3975099c9a6b9e967b","src/unix/linux_like/linux/musl/b32/mod.rs":"bac24312f6629ef76aa12dd41123061e3a77a244e7fda7bbfcdf5c57996b61e6","src/unix/linux_like/linux/musl/b32/powerpc.rs":"e898b490365c84c0f1856ea8640463c558b5a247d845ca0c11677679272ea20f","src/unix/linux_like/linux/musl/b32/x86/align.rs":"08e77fbd7435d7dec2ff56932433bece3f02e47ce810f89004a275a86d39cbe1","src/unix/linux_like/linux/musl/b32/x86/mod.rs":"9a9f4926d5a1fbe406f5ab84742680fe4941ab929005e7fefc4e2bc6686ba5eb","src/unix/linux_like/linux/musl/b64/aarch64/align.rs":"798a9229d70ce235394f2dd625f6c4c1e10519a94382dc5b091952b638ae2928","src/unix/linux_like/linux/musl/b64/aarch64/mod.rs":"cddcde68c13104f18edd611ce23e60a4f9b4fcae1baa57fb92284495d3a77850","src/unix/linux_like/linux/musl/b64/mips64.rs":"4a5d543195da94aaa45be067d6d145c8092bebf3611898aa2beccc0cd55d5f3f","src/unix/linux_like/linux/musl/b64/mod.rs":"d847206d9f2d594c8febe780a938cdccf40d985dafc11e90f235947735a09bac","src/unix/linux_like/linux/musl/b64/powerpc64.rs":"37b08e5f5f84bc72b62c439ac9282db71ff59b80cd2775c5588941b7a135e6bc","src/unix/linux_like/linux/musl/b64/riscv64/mod.rs":"0c7ee2f0ef9868b2ee628748d20cfb2f5ed52da2c862500c9b8406269a89dc52","src/unix/linux_like/linux/musl/b64/s390x.rs":"9b05b1fae6bcb7cb6d909b9973977fde01684175f3e26c27dcb44223cc3933d9","src/unix/linux_like/linux/musl/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/musl/b64/x86_64/mod.rs":"d3762f374cd3aaa1e0e3f79d36f9edc7fd4c8492e561ee4bd0e80e727eef4e9e","src/unix/linux_like/linux/musl/mod.rs":"c9a24828d62f20e0cc0b835197f48bc31aa51db7ca4880341292271cc2df7aae","src/unix/linux_like/linux/no_align.rs":"5ed04c53bf9d27da9b4d65ba7625c6ac53330162683d1b3df98950caafa3507b","src/unix/linux_like/linux/non_exhaustive.rs":"181a05bf94fdb911db83ce793b993bd6548a4115b306a7ef3c10f745a8fea3e9","src/unix/linux_like/linux/uclibc/align.rs":"9ed16138d8e439bd90930845a65eafa7ebd67366e6bf633936d44014f6e4c959","src/unix/linux_like/linux/uclibc/arm/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/arm/mod.rs":"6d83b37ebe54384269b5c5cc3c9c370f97bdc8c646c9aee8705f0de1aca80067","src/unix/linux_like/linux/uclibc/arm/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips32/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/mips/mips32/mod.rs":"edb86245bcc0e340de277e20752463cb4004104fe97737a71afdcc8b06bee703","src/unix/linux_like/linux/uclibc/mips/mips32/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips64/align.rs":"a7bdcb18a37a2d91e64d5fad83ea3edc78f5412adb28f77ab077dbb26dd08b2d","src/unix/linux_like/linux/uclibc/mips/mips64/mod.rs":"57d5e24ceb98e1ef90de231c4cf7f0275ac410d407607b240af6744504ec1f2a","src/unix/linux_like/linux/uclibc/mips/mips64/no_align.rs":"4a18e3875698c85229599225ac3401a2a40da87e77b2ad4ef47c6fcd5a24ed30","src/unix/linux_like/linux/uclibc/mips/mod.rs":"4eea5256e47e485dd1e50c147a56697d286e0395a77674317e83a28fb43a12d8","src/unix/linux_like/linux/uclibc/mod.rs":"d2f5d6a3381bceec0b036bb3979c113c9c4144b134368e39b3f9f2fbef79f5cc","src/unix/linux_like/linux/uclibc/no_align.rs":"3f28637046524618adaa1012e26cb7ffe94b9396e6b518cccdc69d59f274d709","src/unix/linux_like/linux/uclibc/x86_64/l4re.rs":"024eba5753e852dbdd212427351affe7e83f9916c1864bce414d7aa2618f192e","src/unix/linux_like/linux/uclibc/x86_64/mod.rs":"583349563b537104e2265be893f65e302e568a54f4022b8f14a2cf3200ac39ba","src/unix/linux_like/linux/uclibc/x86_64/other.rs":"42c3f71e58cabba373f6a55a623f3c31b85049eb64824c09c2b082b3b2d6a0a8","src/unix/linux_like/mod.rs":"dd4f7a1d66d8501b4a2c4e75e6e9305ed69f1002ae99e410596a6c636878595a","src/unix/mod.rs":"22300f25d8f3adcdcd419222a2d5657e44c835eb4a0f90e05b691c7bcc3a787e","src/unix/newlib/aarch64/mod.rs":"bb269c1468a9676442554600e87417079a787fe6220dfc23b3109639259e8710","src/unix/newlib/align.rs":"28aaf87fafbc6b312622719d472d8cf65f9e5467d15339df5f73e66d8502b28a","src/unix/newlib/arm/mod.rs":"c71be856bfd7f576b2db28af9f680211cbe6c1cac7d537bbc8020b39591af07c","src/unix/newlib/espidf/mod.rs":"c198cb4beccdab483be61c102da74dc51ac80f766797e33021f3110394ed5a3d","src/unix/newlib/horizon/mod.rs":"95cb1e5f469a3d90ce27f03d8f70121c82edd26ab762db4aa373857b87e12d98","src/unix/newlib/mod.rs":"c33c46ad76627c24927b63953d9271e01bbc7f4967381675baf61442470a41cb","src/unix/newlib/no_align.rs":"e0743b2179495a9514bc3a4d1781e492878c4ec834ee0085d0891dd1712e82fb","src/unix/newlib/powerpc/mod.rs":"2d0f7af28b47f7a2a6c210ebd1c1f33ed8eac62e56b5af2b856de2ad3fdc5187","src/unix/no_align.rs":"c06e95373b9088266e0b14bba0954eef95f93fb2b01d951855e382d22de78e53","src/unix/redox/mod.rs":"cc4794afa4fbed9d6612894ea476228f9d8533950162d6416fc4d16073e1dac4","src/unix/solarish/compat.rs":"b07a5bfac925eb012003a459ba6bddbd3bfa9c44b3394da2ac5a602e54beae9c","src/unix/solarish/illumos.rs":"a092f6676a0d208e803819bf32f928714e93606fb9062cf55b5fb7a121d47481","src/unix/solarish/mod.rs":"01dc4208b5b0d901550dc9a6f060e7d446a303919287b328fda556d9dc569b34","src/unix/solarish/solaris.rs":"65b005453aefa9b9d4fc860fe77cfec80d8c97a51342b15daf55fc3e808bb384","src/unix/solarish/x86_64.rs":"9d761573bdccbdd1ac61da571f7e96b5374df70241d9b72d45a069611f495085","src/vxworks/aarch64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/arm.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/mod.rs":"aea3da66f2140f2a82dfc9c58f6e6531d2dd9c15ea696e0f95a0d4a2a187b5b6","src/vxworks/powerpc.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/powerpc64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/x86.rs":"552f007f38317620b23889cb7c49d1d115841252439060122f52f434fbc6e5ba","src/vxworks/x86_64.rs":"018d92be3ad628a129eff9f2f5dfbc0883d8b8e5f2fa917b900a7f98ed6b514a","src/wasi.rs":"817e7592e47f06ece5266fb35625c1aba0a23128e4015a9de2fbf94aba5f1312","src/windows/gnu/align.rs":"b2c13ec1b9f3b39a75c452c80c951dff9d0215e31d77e883b4502afb31794647","src/windows/gnu/mod.rs":"3c8c7edb7cdf5d0c44af936db2a94869585c69dfabeef30571b4f4e38375767a","src/windows/mod.rs":"e3ad95ba54f76e74c301611fe868d3d94f6b8939b03be672f568b06b10ae71c7","src/windows/msvc/mod.rs":"c068271e00fca6b62bc4bf44bcf142cfc38caeded9b6c4e01d1ceef3ccf986f4","tests/const_fn.rs":"cb75a1f0864f926aebe79118fc34d51a0d1ade2c20a394e7774c7e545f21f1f4"},"package":"565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74"}
diff --git a/meta/recipes-devtools/rust/libstd-rs_1.60.0.bb b/meta/recipes-devtools/rust/libstd-rs_1.60.0.bb
deleted file mode 100644
index 0ff1fbd678..0000000000
--- a/meta/recipes-devtools/rust/libstd-rs_1.60.0.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-require rust-source.inc
-require libstd-rs.inc
-
-# Check if libc crate is >= 0.2.17 before dropping this patch
-SRC_URI += " \
- file://0001-Add-400-series-syscalls-to-musl-riscv64-definitions.patch;patchdir=../../ \
- file://0001-Update-checksums-for-modified-vendored-libc.patch;patchdir=../../ \
-"
-# libstd moved from src/libstd to library/std in 1.47+
-S = "${RUSTSRC}/library/std"
-
-BBCLASSEXTEND = "nativesdk" \ No newline at end of file
diff --git a/meta/recipes-devtools/rust/libstd-rs_1.75.0.bb b/meta/recipes-devtools/rust/libstd-rs_1.75.0.bb
new file mode 100644
index 0000000000..d2bf266f9d
--- /dev/null
+++ b/meta/recipes-devtools/rust/libstd-rs_1.75.0.bb
@@ -0,0 +1,53 @@
+SUMMARY = "Rust standard libaries"
+HOMEPAGE = "http://www.rust-lang.org"
+SECTION = "devel"
+LICENSE = "(MIT | Apache-2.0) & Unicode-TOU"
+LIC_FILES_CHKSUM = "file://../../COPYRIGHT;md5=c2cccf560306876da3913d79062a54b9"
+
+require rust-source.inc
+
+# The dummy crate named `sysroot` represents the standard library target.
+#
+# See fd4c81f4c19e ("Add a `sysroot` crate to represent the standard library crates")
+# https://github.com/rust-lang/rust/pull/108865/
+S = "${RUSTSRC}/library/sysroot"
+
+RUSTLIB_DEP = ""
+inherit cargo
+
+DEPENDS:append:libc-musl = " libunwind"
+# rv32 does not have libunwind ported yet
+DEPENDS:remove:riscv32 = "libunwind"
+DEPENDS:remove:riscv64 = "libunwind"
+
+# Embed bitcode in order to allow compiling both with and without LTO
+RUSTFLAGS += "-Cembed-bitcode=yes"
+# Needed so cargo can find libbacktrace
+RUSTFLAGS += "-L ${STAGING_LIBDIR} -C link-arg=-Wl,-soname,libstd.so"
+
+CARGO_FEATURES ?= "panic-unwind backtrace"
+CARGO_BUILD_FLAGS += "--features '${CARGO_FEATURES}'"
+CARGO_VENDORING_DIRECTORY = "${RUSTSRC}/vendor"
+
+do_compile:prepend () {
+ export CARGO_TARGET_DIR="${B}"
+ # For Rust 1.13.0 and newer
+ export RUSTC_BOOTSTRAP="1"
+}
+
+do_install () {
+ mkdir -p ${D}${rustlibdir}
+
+ # With the incremental build support added in 1.24, the libstd deps directory also includes dependency
+ # files that get installed. Those are really only needed to incrementally rebuild the libstd library
+ # itself and don't need to be installed.
+ rm -f ${B}/${RUST_TARGET_SYS}/${BUILD_DIR}/deps/*.d
+ cp ${B}/${RUST_TARGET_SYS}/${BUILD_DIR}/deps/* ${D}${rustlibdir}
+}
+
+BBCLASSEXTEND = "nativesdk"
+
+# Since 1.70.0 upgrade this fails to build with gold:
+# http://errors.yoctoproject.org/Errors/Details/708194/
+# ld: error: version script assignment of to symbol __rust_alloc_error_handler_should_panic failed: symbol not defined
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd', '', d)}"
diff --git a/meta/recipes-devtools/rust/rust-common.inc b/meta/recipes-devtools/rust/rust-common.inc
deleted file mode 100644
index 621cd4ad57..0000000000
--- a/meta/recipes-devtools/rust/rust-common.inc
+++ /dev/null
@@ -1,363 +0,0 @@
-
-# Right now this is focused on arm-specific tune features.
-# We get away with this for now as one can only use x86-64 as the build host
-# (not arm).
-# Note that TUNE_FEATURES is _always_ refering to the target, so we really
-# don't want to use this for the host/build.
-def llvm_features_from_tune(d):
- f = []
- feat = d.getVar('TUNE_FEATURES')
- if not feat:
- return []
- feat = frozenset(feat.split())
-
- mach_overrides = d.getVar('MACHINEOVERRIDES')
- mach_overrides = frozenset(mach_overrides.split(':'))
-
- if 'vfpv4' in feat:
- f.append("+vfp4")
- if 'vfpv3' in feat:
- f.append("+vfp3")
- if 'vfpv3d16' in feat:
- f.append("+d16")
-
- if 'vfpv2' in feat or 'vfp' in feat:
- f.append("+vfp2")
-
- if 'neon' in feat:
- f.append("+neon")
-
- if 'mips32' in feat:
- f.append("+mips32")
-
- if 'mips32r2' in feat:
- f.append("+mips32r2")
-
- if target_is_armv7(d):
- f.append('+v7')
-
- if ('armv6' in mach_overrides) or ('armv6' in feat):
- f.append("+v6")
- if 'armv5te' in feat:
- f.append("+strict-align")
- f.append("+v5te")
- elif 'armv5' in feat:
- f.append("+strict-align")
- f.append("+v5")
-
- if ('armv4' in mach_overrides) or ('armv4' in feat):
- f.append("+strict-align")
-
- if 'dsp' in feat:
- f.append("+dsp")
-
- if 'thumb' in feat:
- if d.getVar('ARM_THUMB_OPT') == "thumb":
- if target_is_armv7(d):
- f.append('+thumb2')
- f.append("+thumb-mode")
-
- if 'cortexa5' in feat:
- f.append("+a5")
- if 'cortexa7' in feat:
- f.append("+a7")
- if 'cortexa9' in feat:
- f.append("+a9")
- if 'cortexa15' in feat:
- f.append("+a15")
- if 'cortexa17' in feat:
- f.append("+a17")
- if ('riscv64' in feat) or ('riscv32' in feat):
- f.append("+a,+c,+d,+f,+m")
- return f
-llvm_features_from_tune[vardepvalue] = "${@llvm_features_from_tune(d)}"
-
-# TARGET_CC_ARCH changes from build/cross/target so it'll do the right thing
-# this should go away when https://github.com/rust-lang/rust/pull/31709 is
-# stable (1.9.0?)
-def llvm_features_from_cc_arch(d):
- f = []
- feat = d.getVar('TARGET_CC_ARCH')
- if not feat:
- return []
- feat = frozenset(feat.split())
-
- if '-mmmx' in feat:
- f.append("+mmx")
- if '-msse' in feat:
- f.append("+sse")
- if '-msse2' in feat:
- f.append("+sse2")
- if '-msse3' in feat:
- f.append("+sse3")
- if '-mssse3' in feat:
- f.append("+ssse3")
- if '-msse4.1' in feat:
- f.append("+sse4.1")
- if '-msse4.2' in feat:
- f.append("+sse4.2")
- if '-msse4a' in feat:
- f.append("+sse4a")
- if '-mavx' in feat:
- f.append("+avx")
- if '-mavx2' in feat:
- f.append("+avx2")
-
- return f
-
-def llvm_features_from_target_fpu(d):
- # TARGET_FPU can be hard or soft. +soft-float tell llvm to use soft float
- # ABI. There is no option for hard.
-
- fpu = d.getVar('TARGET_FPU', True)
- return ["+soft-float"] if fpu == "soft" else []
-
-def llvm_features(d):
- return ','.join(llvm_features_from_tune(d) +
- llvm_features_from_cc_arch(d) +
- llvm_features_from_target_fpu(d))
-
-
-## arm-unknown-linux-gnueabihf
-DATA_LAYOUT[arm] = "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64"
-TARGET_ENDIAN[arm] = "little"
-TARGET_POINTER_WIDTH[arm] = "32"
-TARGET_C_INT_WIDTH[arm] = "32"
-MAX_ATOMIC_WIDTH[arm] = "64"
-FEATURES[arm] = "+v6,+vfp2"
-
-## armv7-unknown-linux-gnueabihf
-DATA_LAYOUT[armv7-eabi] = "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64"
-TARGET_ENDIAN[armv7-eabi] = "little"
-TARGET_POINTER_WIDTH[armv7-eabi] = "32"
-TARGET_C_INT_WIDTH[armv7-eabi] = "32"
-MAX_ATOMIC_WIDTH[armv7-eabi] = "64"
-FEATURES[armv7-eabi] = "+v7,+vfp2,+thumb2"
-
-## aarch64-unknown-linux-{gnu, musl}
-DATA_LAYOUT[aarch64] = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
-TARGET_ENDIAN[aarch64] = "little"
-TARGET_POINTER_WIDTH[aarch64] = "64"
-TARGET_C_INT_WIDTH[aarch64] = "32"
-MAX_ATOMIC_WIDTH[aarch64] = "128"
-
-## x86_64-unknown-linux-{gnu, musl}
-DATA_LAYOUT[x86_64] = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
-TARGET_ENDIAN[x86_64] = "little"
-TARGET_POINTER_WIDTH[x86_64] = "64"
-TARGET_C_INT_WIDTH[x86_64] = "32"
-MAX_ATOMIC_WIDTH[x86_64] = "64"
-
-## x86_64-unknown-linux-gnux32
-DATA_LAYOUT[x86_64-x32] = "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
-TARGET_ENDIAN[x86_64-x32] = "little"
-TARGET_POINTER_WIDTH[x86_64-x32] = "32"
-TARGET_C_INT_WIDTH[x86_64-x32] = "32"
-MAX_ATOMIC_WIDTH[x86_64-x32] = "64"
-
-## i686-unknown-linux-{gnu, musl}
-DATA_LAYOUT[i686] = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128"
-TARGET_ENDIAN[i686] = "little"
-TARGET_POINTER_WIDTH[i686] = "32"
-TARGET_C_INT_WIDTH[i686] = "32"
-MAX_ATOMIC_WIDTH[i686] = "64"
-
-## XXX: a bit of a hack so qemux86 builds, clone of i686-unknown-linux-{gnu, musl} above
-DATA_LAYOUT[i586] = "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128"
-TARGET_ENDIAN[i586] = "little"
-TARGET_POINTER_WIDTH[i586] = "32"
-TARGET_C_INT_WIDTH[i586] = "32"
-MAX_ATOMIC_WIDTH[i586] = "64"
-
-## mips-unknown-linux-{gnu, musl}
-DATA_LAYOUT[mips] = "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64"
-TARGET_ENDIAN[mips] = "big"
-TARGET_POINTER_WIDTH[mips] = "32"
-TARGET_C_INT_WIDTH[mips] = "32"
-MAX_ATOMIC_WIDTH[mips] = "32"
-
-## mipsel-unknown-linux-{gnu, musl}
-DATA_LAYOUT[mipsel] = "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64"
-TARGET_ENDIAN[mipsel] = "little"
-TARGET_POINTER_WIDTH[mipsel] = "32"
-TARGET_C_INT_WIDTH[mipsel] = "32"
-MAX_ATOMIC_WIDTH[mipsel] = "32"
-
-## mips64-unknown-linux-{gnu, musl}
-DATA_LAYOUT[mips64] = "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128"
-TARGET_ENDIAN[mips64] = "big"
-TARGET_POINTER_WIDTH[mips64] = "64"
-TARGET_C_INT_WIDTH[mips64] = "64"
-MAX_ATOMIC_WIDTH[mips64] = "64"
-
-## mips64el-unknown-linux-{gnu, musl}
-DATA_LAYOUT[mips64el] = "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128"
-TARGET_ENDIAN[mips64el] = "little"
-TARGET_POINTER_WIDTH[mips64el] = "64"
-TARGET_C_INT_WIDTH[mips64el] = "64"
-MAX_ATOMIC_WIDTH[mips64el] = "64"
-
-## powerpc-unknown-linux-{gnu, musl}
-DATA_LAYOUT[powerpc] = "E-m:e-p:32:32-i64:64-n32"
-TARGET_ENDIAN[powerpc] = "big"
-TARGET_POINTER_WIDTH[powerpc] = "32"
-TARGET_C_INT_WIDTH[powerpc] = "32"
-MAX_ATOMIC_WIDTH[powerpc] = "32"
-
-## powerpc64-unknown-linux-{gnu, musl}
-DATA_LAYOUT[powerpc64] = "E-m:e-i64:64-n32:64-S128-v256:256:256-v512:512:512"
-TARGET_ENDIAN[powerpc64] = "big"
-TARGET_POINTER_WIDTH[powerpc64] = "64"
-TARGET_C_INT_WIDTH[powerpc64] = "64"
-MAX_ATOMIC_WIDTH[powerpc64] = "64"
-
-## powerpc64le-unknown-linux-{gnu, musl}
-DATA_LAYOUT[powerpc64le] = "e-m:e-i64:64-n32:64-v256:256:256-v512:512:512"
-TARGET_ENDIAN[powerpc64le] = "little"
-TARGET_POINTER_WIDTH[powerpc64le] = "64"
-TARGET_C_INT_WIDTH[powerpc64le] = "64"
-MAX_ATOMIC_WIDTH[powerpc64le] = "64"
-
-## riscv32-unknown-linux-{gnu, musl}
-DATA_LAYOUT[riscv32] = "e-m:e-p:32:32-i64:64-n32-S128"
-TARGET_ENDIAN[riscv32] = "little"
-TARGET_POINTER_WIDTH[riscv32] = "32"
-TARGET_C_INT_WIDTH[riscv32] = "32"
-MAX_ATOMIC_WIDTH[riscv32] = "32"
-
-## riscv64-unknown-linux-{gnu, musl}
-DATA_LAYOUT[riscv64] = "e-m:e-p:64:64-i64:64-i128:128-n64-S128"
-TARGET_ENDIAN[riscv64] = "little"
-TARGET_POINTER_WIDTH[riscv64] = "64"
-TARGET_C_INT_WIDTH[riscv64] = "64"
-MAX_ATOMIC_WIDTH[riscv64] = "64"
-
-def sys_for(d, thing):
- return d.getVar('{}_SYS'.format(thing))
-
-def prefix_for(d, thing):
- return d.getVar('{}_PREFIX'.format(thing))
-
-# Convert a normal arch (HOST_ARCH, TARGET_ARCH, BUILD_ARCH, etc) to something
-# rust's internals won't choke on.
-def arch_to_rust_target_arch(arch):
- if arch == "i586" or arch == "i686":
- return "x86"
- elif arch == "mipsel":
- return "mips"
- elif arch == "mip64sel":
- return "mips64"
- elif arch == "armv7":
- return "arm"
- elif arch == "powerpc64le":
- return "powerpc64"
- else:
- return arch
-
-# generates our target CPU value
-def llvm_cpu(d):
- cpu = d.getVar('PACKAGE_ARCH')
- target = d.getVar('TRANSLATED_TARGET_ARCH')
-
- trans = {}
- trans['corei7-64'] = "corei7"
- trans['core2-32'] = "core2"
- trans['x86-64'] = "x86-64"
- trans['i686'] = "i686"
- trans['i586'] = "i586"
- trans['powerpc'] = "powerpc"
- trans['mips64'] = "mips64"
- trans['mips64el'] = "mips64"
- trans['riscv64'] = "generic-rv64"
- trans['riscv32'] = "generic-rv32"
-
- if target in ["mips", "mipsel"]:
- feat = frozenset(d.getVar('TUNE_FEATURES').split())
- if "mips32r2" in feat:
- trans['mipsel'] = "mips32r2"
- trans['mips'] = "mips32r2"
- elif "mips32" in feat:
- trans['mipsel'] = "mips32"
- trans['mips'] = "mips32"
-
- try:
- return trans[cpu]
- except:
- return trans.get(target, "generic")
-
-TARGET_LLVM_CPU="${@llvm_cpu(d)}"
-TARGET_LLVM_FEATURES = "${@llvm_features(d)}"
-
-# class-native implies TARGET=HOST, and TUNE_FEATURES only describes the real
-# (original) target.
-TARGET_LLVM_FEATURES:class-native = "${@','.join(llvm_features_from_cc_arch(d))}"
-
-def rust_gen_target(d, thing, wd, features, cpu, arch, abi=""):
- import json
- sys = sys_for(d, thing)
- prefix = prefix_for(d, thing)
-
- rust_arch = oe.rust.arch_to_rust_arch(arch)
-
- if abi:
- arch_abi = "{}-{}".format(rust_arch, abi)
- else:
- arch_abi = rust_arch
-
- features = features or d.getVarFlag('FEATURES', arch_abi) or ""
- features = features.strip()
-
- # build tspec
- tspec = {}
- if bb.data.inherits_class('cross-canadian', d):
- tspec['llvm-target'] = d.getVar('RUST_HOST_SYS', arch_abi)
- else:
- tspec['llvm-target'] = d.getVar('RUST_TARGET_SYS', arch_abi)
- tspec['data-layout'] = d.getVarFlag('DATA_LAYOUT', arch_abi)
- tspec['max-atomic-width'] = int(d.getVarFlag('MAX_ATOMIC_WIDTH', arch_abi))
- tspec['target-pointer-width'] = d.getVarFlag('TARGET_POINTER_WIDTH', arch_abi)
- tspec['target-c-int-width'] = d.getVarFlag('TARGET_C_INT_WIDTH', arch_abi)
- tspec['target-endian'] = d.getVarFlag('TARGET_ENDIAN', arch_abi)
- tspec['arch'] = arch_to_rust_target_arch(rust_arch)
- tspec['os'] = "linux"
- if "musl" in tspec['llvm-target']:
- tspec['env'] = "musl"
- else:
- tspec['env'] = "gnu"
- if "riscv64" in tspec['llvm-target']:
- tspec['llvm-abiname'] = "lp64d"
- if "riscv32" in tspec['llvm-target']:
- tspec['llvm-abiname'] = "ilp32d"
- tspec['vendor'] = "unknown"
- tspec['target-family'] = "unix"
- tspec['linker'] = "{}{}gcc".format(d.getVar('CCACHE'), prefix)
- tspec['cpu'] = cpu
- if features != "":
- tspec['features'] = features
- tspec['dynamic-linking'] = True
- tspec['executables'] = True
- tspec['linker-is-gnu'] = True
- tspec['linker-flavor'] = "gcc"
- tspec['has-rpath'] = True
- tspec['has-elf-tls'] = True
- tspec['position-independent-executables'] = True
- tspec['panic-strategy'] = d.getVar("RUST_PANIC_STRATEGY")
-
- # write out the target spec json file
- with open(wd + sys + '.json', 'w') as f:
- json.dump(tspec, f, indent=4)
-
-# These are accounted for in tmpdir path names so don't need to be in the task sig
-rust_gen_target[vardepsexclude] += "RUST_HOST_SYS RUST_TARGET_SYS"
-
-do_rust_gen_targets[vardeps] += "DATA_LAYOUT TARGET_ENDIAN TARGET_POINTER_WIDTH TARGET_C_INT_WIDTH MAX_ATOMIC_WIDTH FEATURES"
-
-python do_rust_gen_targets () {
- wd = d.getVar('WORKDIR') + '/targets/'
- build_arch = d.getVar('BUILD_ARCH')
- rust_gen_target(d, 'BUILD', wd, "", "generic", build_arch)
-}
-
-addtask rust_gen_targets after do_patch before do_compile
-do_rust_gen_targets[dirs] += "${WORKDIR}/targets"
-
diff --git a/meta/recipes-devtools/rust/rust-cross-canadian-common.inc b/meta/recipes-devtools/rust/rust-cross-canadian-common.inc
deleted file mode 100644
index 1f21c8af26..0000000000
--- a/meta/recipes-devtools/rust/rust-cross-canadian-common.inc
+++ /dev/null
@@ -1,54 +0,0 @@
-
-RUST_ALTERNATE_EXE_PATH = "${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config"
-
-require rust.inc
-
-DEPENDS += "rust-llvm (=${PV})"
-
-inherit cross-canadian
-
-DEPENDS += " \
- virtual/${HOST_PREFIX}gcc-crosssdk \
- virtual/nativesdk-libc rust-llvm-native \
- virtual/${TARGET_PREFIX}compilerlibs \
- virtual/nativesdk-${HOST_PREFIX}compilerlibs \
- gcc-cross-${TARGET_ARCH} \
- "
-
-# The host tools are likely not to be able to do the necessary operation on
-# the target architecturea. Alternatively one could check compatibility
-# between host/target.
-EXCLUDE_FROM_SHLIBS_${RUSTLIB_TARGET_PN} = "1"
-
-DEBUG_PREFIX_MAP = "-fdebug-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR} \
- -fdebug-prefix-map=${STAGING_DIR_HOST}= \
- -fdebug-prefix-map=${STAGING_DIR_NATIVE}= \
- "
-
-python do_rust_gen_targets () {
- wd = d.getVar('WORKDIR') + '/targets/'
- rust_gen_target(d, 'TARGET', wd, d.getVar('TARGET_LLVM_FEATURES') or "", d.getVar('TARGET_LLVM_CPU'), d.getVar('TARGET_ARCH'))
- rust_gen_target(d, 'HOST', wd, "", "generic", d.getVar('HOST_ARCH'))
- rust_gen_target(d, 'BUILD', wd, "", "generic", d.getVar('BUILD_ARCH'))
-}
-
-INHIBIT_DEFAULT_RUST_DEPS = "1"
-
-export WRAPPER_TARGET_CC = "${CCACHE}${TARGET_PREFIX}gcc --sysroot=${STAGING_DIR_TARGET} ${TARGET_CC_ARCH} ${SECURITY_NOPIE_CFLAGS}"
-export WRAPPER_TARGET_CXX = "${CCACHE}${TARGET_PREFIX}g++ --sysroot=${STAGING_DIR_TARGET} ${TARGET_CC_ARCH} ${SECURITY_NOPIE_CFLAGS}"
-export WRAPPER_TARGET_CCLD = "${TARGET_PREFIX}gcc --sysroot=${STAGING_DIR_TARGET} ${TARGET_CC_ARCH} ${SECURITY_NOPIE_CFLAGS}"
-export WRAPPER_TARGET_LDFLAGS = "${TARGET_LDFLAGS}"
-export WRAPPER_TARGET_AR = "${TARGET_PREFIX}ar"
-
-python do_configure:prepend() {
- targets = [d.getVar("TARGET_SYS", True), "{}-unknown-linux-gnu".format(d.getVar("HOST_ARCH", True))]
- hosts = ["{}-unknown-linux-gnu".format(d.getVar("HOST_ARCH", True))]
-}
-
-INSANE_SKIP:${RUSTLIB_TARGET_PN} = "file-rdeps arch ldflags"
-SKIP_FILEDEPS:${RUSTLIB_TARGET_PN} = "1"
-
-INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
-INHIBIT_PACKAGE_STRIP = "1"
-INHIBIT_SYSROOT_STRIP = "1"
-
diff --git a/meta/recipes-devtools/rust/rust-cross-canadian.inc b/meta/recipes-devtools/rust/rust-cross-canadian.inc
index 8bbbd61bdc..7bfef6d175 100644
--- a/meta/recipes-devtools/rust/rust-cross-canadian.inc
+++ b/meta/recipes-devtools/rust/rust-cross-canadian.inc
@@ -1,78 +1,91 @@
+SUMMARY = "Rust compiler and runtime libaries (cross-canadian for ${TARGET_ARCH} target)"
+PN = "rust-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-require rust-cross-canadian-common.inc
+inherit rust-target-config
+inherit rust-common
-RUSTLIB_TARGET_PN = "rust-cross-canadian-rustlib-target-${TRANSLATED_TARGET_ARCH}"
-RUSTLIB_HOST_PN = "rust-cross-canadian-rustlib-host-${TRANSLATED_TARGET_ARCH}"
-RUSTLIB_SRC_PN = "rust-cross-canadian-src"
-RUSTLIB_PKGS = "${RUSTLIB_SRC_PN} ${RUSTLIB_TARGET_PN} ${RUSTLIB_HOST_PN}"
-PN = "rust-cross-canadian-${TRANSLATED_TARGET_ARCH}"
+LICENSE = "MIT"
+
+MODIFYTOS = "0"
+
+DEPENDS += "virtual/${SDK_PREFIX}gcc virtual/nativesdk-libc virtual/nativesdk-${SDK_PREFIX}compilerlibs"
-PACKAGES = "${RUSTLIB_PKGS} ${PN}"
-RDEPENDS:${PN} += "${RUSTLIB_PKGS}"
+SRC_URI += "file://target-rust-ccld.c"
+LIC_FILES_CHKSUM = "file://target-rust-ccld.c;md5=af4e0e29f81a34cffe05aa07c89e93e9;endline=7"
+S = "${WORKDIR}"
-# The default behaviour of x.py changed in 1.47+ so now we need to
-# explicitly ask for the stage 2 compiler to be assembled.
-do_compile () {
- rust_runx build --stage 2
+# Need to use our SDK's sh here, see #14878
+create_sdk_wrapper () {
+ file="$1"
+ shift
+ cat <<- EOF > "${file}"
+ #!/bin/sh
+ \$$1 \$@
+ EOF
+
+ chmod +x "$file"
}
do_install () {
# Rust requires /usr/lib to contain the libs.
- # Similar story is with /usr/bin ruquiring `lib` to be at the same level.
# The required structure is retained for simplicity.
SYS_LIBDIR=$(dirname ${D}${libdir})
SYS_BINDIR=$(dirname ${D}${bindir})
RUSTLIB_DIR=${SYS_LIBDIR}/${TARGET_SYS}/rustlib
- install -d "${SYS_BINDIR}"
- cp build/${SNAPSHOT_BUILD_SYS}/stage2/bin/* ${SYS_BINDIR}
- for i in ${SYS_BINDIR}/*; do
- chrpath -r "\$ORIGIN/../lib" ${i}
- done
-
- install -d "${D}${libdir}"
- cp -pRd build/${SNAPSHOT_BUILD_SYS}/stage2/lib/${TARGET_SYS}/*.so ${SYS_LIBDIR}
- cp -pRd build/${SNAPSHOT_BUILD_SYS}/stage2/lib/${TARGET_SYS}/rustlib ${RUSTLIB_DIR}
-
- for i in ${SYS_LIBDIR}/*.so; do
- chrpath -r "\$ORIGIN/../lib" ${i}
- done
- for i in ${RUSTLIB_DIR}/*/lib/*.so; do
- chrpath -d ${i}
- done
-
- install -m 0644 "${WORKDIR}/targets/${TARGET_SYS}.json" "${RUSTLIB_DIR}"
-
- SRC_DIR=${RUSTLIB_DIR}/src/rust
- install -d ${SRC_DIR}/src/llvm-project
- cp -R --no-dereference build/${SNAPSHOT_BUILD_SYS}/stage2/lib/rustlib/src/rust/src/llvm-project/libunwind ${SRC_DIR}/src/llvm-project
- cp -R --no-dereference build/${SNAPSHOT_BUILD_SYS}/stage2/lib/rustlib/src/rust/library ${SRC_DIR}
- cp --no-dereference build/${SNAPSHOT_BUILD_SYS}/stage2/lib/rustlib/src/rust/Cargo.lock ${SRC_DIR}
- # Remove executable bit from any files so then SDK doesn't try to relocate.
- chmod -R -x+X ${SRC_DIR}
+ install -d ${RUSTLIB_DIR}
+ install -m 0644 "${RUST_TARGETS_DIR}/${RUST_HOST_SYS}.json" "${RUSTLIB_DIR}"
+ install -m 0644 "${RUST_TARGETS_DIR}/${RUST_TARGET_SYS}.json" "${RUSTLIB_DIR}"
+
+ # Uses SDK's CC as linker so linked binaries works out of box.
+ # We have a problem as rust sets LD_LIBRARY_PATH and this will break running host
+ # binaries (even /bin/sh) in the SDK as they detect a newer glibc from the SDK
+ # in those paths and we hit symbol errors. We saw particular problems with symbol
+ # mismatch on ubuntu1804 during development. To avoid this we have an SDK built
+ # binary which unsets LD_LIBRARY_PATH, which can then call the wrapper script
+ # where the context is easier to do the env maniupations needed
+ install -d ${SYS_BINDIR}
+ outfile="${SYS_BINDIR}/target-rust-ccld"
+ ${CC} ${WORKDIR}/target-rust-ccld.c -o $outfile
+ chmod +x "$outfile"
+ create_sdk_wrapper "${SYS_BINDIR}/target-rust-ccld-wrapper" "CC"
ENV_SETUP_DIR=${D}${base_prefix}/environment-setup.d
mkdir "${ENV_SETUP_DIR}"
- ENV_SETUP_SH="${ENV_SETUP_DIR}/rust.sh"
+ RUST_ENV_SETUP_SH="${ENV_SETUP_DIR}/rust.sh"
+
+ RUST_TARGET_TRIPLE=`echo ${RUST_TARGET_SYS} | tr '[:lower:]' '[:upper:]' | sed 's/-/_/g'`
+ RUST_HOST_TRIPLE=`echo ${RUST_HOST_SYS} | tr '[:lower:]' '[:upper:]' | sed 's/-/_/g'`
+ SDKLOADER=${@bb.utils.contains('SDK_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', '', d)}${@bb.utils.contains('SDK_ARCH', 'i686', 'ld-linux.so.2', '', d)}${@bb.utils.contains('SDK_ARCH', 'aarch64', 'ld-linux-aarch64.so.1', '', d)}${@bb.utils.contains('SDK_ARCH', 'ppc64le', 'ld64.so.2', '', d)}${@bb.utils.contains('SDK_ARCH', 'riscv64', 'ld-linux-riscv64-lp64d.so.1', '', d)}
- cat <<- EOF > "${ENV_SETUP_SH}"
- export RUSTFLAGS="--sysroot=\$OECORE_NATIVE_SYSROOT/usr -C link-arg=--sysroot=\$OECORE_TARGET_SYSROOT -L\$OECORE_NATIVE_SYSROOT/usr/lib/${TARGET_SYS}/rustlib/${TARGET_SYS}/lib"
+ cat <<- EOF > "${RUST_ENV_SETUP_SH}"
+ export CARGO_TARGET_${RUST_TARGET_TRIPLE}_RUSTFLAGS="--sysroot=\$OECORE_TARGET_SYSROOT/usr -C link-arg=--sysroot=\$OECORE_TARGET_SYSROOT"
+ export CARGO_TARGET_${RUST_HOST_TRIPLE}_RUNNER="\$OECORE_NATIVE_SYSROOT/lib/${SDKLOADER}"
export RUST_TARGET_PATH="\$OECORE_NATIVE_SYSROOT/usr/lib/${TARGET_SYS}/rustlib"
EOF
chown -R root.root ${D}
+
+ CARGO_ENV_SETUP_SH="${ENV_SETUP_DIR}/cargo.sh"
+ cat <<- EOF > "${CARGO_ENV_SETUP_SH}"
+ export CARGO_HOME="\$OECORE_TARGET_SYSROOT/home/cargo"
+ mkdir -p "\$CARGO_HOME"
+ # Init the default target once, it might be otherwise user modified.
+ if [ ! -f "\$CARGO_HOME/config" ]; then
+ touch "\$CARGO_HOME/config"
+ echo "[build]" >> "\$CARGO_HOME/config"
+ echo 'target = "'${RUST_TARGET_SYS}'"' >> "\$CARGO_HOME/config"
+ echo '# TARGET_SYS' >> "\$CARGO_HOME/config"
+ echo '[target.'${RUST_TARGET_SYS}']' >> "\$CARGO_HOME/config"
+ echo 'linker = "target-rust-ccld"' >> "\$CARGO_HOME/config"
+ fi
+
+ # Keep the below off as long as HTTP/2 is disabled.
+ export CARGO_HTTP_MULTIPLEXING=false
+
+ export CARGO_HTTP_CAINFO="\$OECORE_NATIVE_SYSROOT/etc/ssl/certs/ca-certificates.crt"
+ EOF
}
-PKG_SYS_LIBDIR = "${SDKPATHNATIVE}/usr/lib"
-PKG_SYS_BINDIR = "${SDKPATHNATIVE}/usr/bin"
-PKG_RUSTLIB_DIR = "${PKG_SYS_LIBDIR}/${TARGET_SYS}/rustlib"
-FILES:${PN} = "${PKG_SYS_LIBDIR}/*.so ${PKG_SYS_BINDIR} ${base_prefix}/environment-setup.d"
-FILES:${RUSTLIB_TARGET_PN} = "${PKG_RUSTLIB_DIR}/${TARGET_SYS} ${PKG_RUSTLIB_DIR}/${TARGET_SYS}.json"
-FILES:${RUSTLIB_HOST_PN} = "${PKG_RUSTLIB_DIR}/${BUILD_ARCH}-unknown-linux-gnu"
-FILES:${RUSTLIB_SRC_PN} = "${PKG_RUSTLIB_DIR}/src"
-
-SUMMARY:${RUSTLIB_TARGET_PN} = "Rust cross canadian libaries for ${TARGET_SYS}"
-SUMMARY:${RUSTLIB_HOST_PN} = "Rust cross canadian libaries for ${HOST_SYS}"
-SUMMARY:${RUSTLIB_SRC_PN} = "Rust standard library sources for cross canadian toolchain"
-SUMMARY:${PN} = "Rust crost canadian compiler"
+FILES:${PN} += "${base_prefix}/environment-setup.d"
diff --git a/meta/recipes-devtools/rust/rust-cross-canadian_1.60.0.bb b/meta/recipes-devtools/rust/rust-cross-canadian_1.60.0.bb
deleted file mode 100644
index 766912c019..0000000000
--- a/meta/recipes-devtools/rust/rust-cross-canadian_1.60.0.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require rust-cross-canadian.inc
-require rust-source.inc
-require rust-snapshot.inc
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/rust:"
-
diff --git a/meta/recipes-devtools/rust/rust-cross-canadian_1.75.0.bb b/meta/recipes-devtools/rust/rust-cross-canadian_1.75.0.bb
new file mode 100644
index 0000000000..55865238ab
--- /dev/null
+++ b/meta/recipes-devtools/rust/rust-cross-canadian_1.75.0.bb
@@ -0,0 +1,2 @@
+inherit cross-canadian
+require rust-cross-canadian.inc \ No newline at end of file
diff --git a/meta/recipes-devtools/rust/rust-cross.inc b/meta/recipes-devtools/rust/rust-cross.inc
deleted file mode 100644
index f6babfeeda..0000000000
--- a/meta/recipes-devtools/rust/rust-cross.inc
+++ /dev/null
@@ -1,66 +0,0 @@
-python do_rust_gen_targets () {
- wd = d.getVar('WORKDIR') + '/targets/'
- # It is important 'TARGET' is last here so that it overrides our less
- # informed choices for BUILD & HOST if TARGET happens to be the same as
- # either of them.
- for thing in ['BUILD', 'HOST', 'TARGET']:
- bb.debug(1, "rust_gen_target for " + thing)
- features = ""
- cpu = "generic"
- arch = d.getVar('{}_ARCH'.format(thing))
- abi = ""
- if thing is "TARGET":
- abi = d.getVar('ABIEXTENSION')
- # arm and armv7 have different targets in llvm
- if arch == "arm" and target_is_armv7(d):
- arch = 'armv7'
- features = d.getVar('TARGET_LLVM_FEATURES') or ""
- cpu = d.getVar('TARGET_LLVM_CPU')
- rust_gen_target(d, thing, wd, features, cpu, arch, abi)
-}
-
-# Otherwise we'll depend on what we provide
-INHIBIT_DEFAULT_RUST_DEPS = "1"
-
-# Unlike native (which nicely maps it's DEPENDS) cross wipes them out completely.
-# Generally, we (and cross in general) need the same things that native needs,
-# so it might make sense to take it's mapping. For now, though, we just mention
-# the bits we need explicitly.
-DEPENDS += "rust-llvm-native"
-DEPENDS += "rust-native"
-
-# In the cross compilation case, rustc doesn't seem to get the rpath quite
-# right. It manages to include '../../lib/${TARGET_PREFIX}', but doesn't
-# include the '../../lib' (ie: relative path from cross_bindir to normal
-# libdir. As a result, we end up not being able to properly reference files in normal ${libdir}.
-# Most of the time this happens to work fine as the systems libraries are
-# subsituted, but sometimes a host system will lack a library, or the right
-# version of a library (libtinfo was how I noticed this).
-#
-# FIXME: this should really be fixed in rust itself.
-# FIXME: using hard-coded relative paths is wrong, we should ask bitbake for
-# the relative path between 2 of it's vars.
-HOST_POST_LINK_ARGS:append = " -Wl,-rpath=../../lib"
-BUILD_POST_LINK_ARGS:append = " -Wl,-rpath=../../lib"
-
-# We need the same thing for the calls to the compiler when building the runtime crap
-TARGET_CC_ARCH:append = " --sysroot=${STAGING_DIR_TARGET}"
-
-do_rust_setup_snapshot () {
-}
-
-do_configure () {
-}
-
-do_compile () {
-}
-
-do_install () {
- mkdir -p ${D}${prefix}/${base_libdir_native}/rustlib
- cp ${WORKDIR}/targets/${TARGET_SYS}.json ${D}${prefix}/${base_libdir_native}/rustlib
-}
-
-rust_cross_sysroot_preprocess() {
- sysroot_stage_dir ${D}${prefix}/${base_libdir_native}/rustlib ${SYSROOT_DESTDIR}${prefix}/${base_libdir_native}/rustlib
-}
-SYSROOT_PREPROCESS_FUNCS += "rust_cross_sysroot_preprocess"
diff --git a/meta/recipes-devtools/rust/rust-cross_1.60.0.bb b/meta/recipes-devtools/rust/rust-cross_1.60.0.bb
deleted file mode 100644
index 5358d98da8..0000000000
--- a/meta/recipes-devtools/rust/rust-cross_1.60.0.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require rust.inc
-inherit cross
-require rust-cross.inc
-require rust-source.inc
-
-DEPENDS += "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
-PROVIDES = "virtual/${TARGET_PREFIX}rust"
-PN = "rust-cross-${TUNE_PKGARCH}-${TCLIBC}"
diff --git a/meta/recipes-devtools/rust/rust-crosssdk_1.60.0.bb b/meta/recipes-devtools/rust/rust-crosssdk_1.60.0.bb
deleted file mode 100644
index 6ea8cb09b2..0000000000
--- a/meta/recipes-devtools/rust/rust-crosssdk_1.60.0.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require rust.inc
-inherit crosssdk
-require rust-cross.inc
-require rust-source.inc
-
-DEPENDS += "virtual/${TARGET_PREFIX}gcc-crosssdk virtual/nativesdk-${TARGET_PREFIX}compilerlibs virtual/nativesdk-libc"
-PROVIDES = "virtual/nativesdk-${TARGET_PREFIX}rust"
-PN = "rust-crosssdk-${TUNE_PKGARCH}-${RUST_LIBC}"
diff --git a/meta/recipes-devtools/rust/rust-llvm.inc b/meta/recipes-devtools/rust/rust-llvm.inc
deleted file mode 100644
index 9baad12dc8..0000000000
--- a/meta/recipes-devtools/rust/rust-llvm.inc
+++ /dev/null
@@ -1,71 +0,0 @@
-SUMMARY = "LLVM compiler framework (packaged with rust)"
-LICENSE ?= "Apache-2.0-with-LLVM-exception"
-HOMEPAGE = "http://www.rust-lang.org"
-
-SRC_URI += "file://0002-llvm-allow-env-override-of-exe-path.patch;striplevel=2 \
- file://0001-AsmMatcherEmitter-sort-ClassInfo-lists-by-name-as-we.patch;striplevel=2 \
- file://0003-llvm-fix-include-benchmarks.patch;striplevel=2"
-
-S = "${RUSTSRC}/src/llvm-project/llvm"
-
-LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=8a15a0759ef07f2682d2ba4b893c9afe"
-
-inherit cmake python3native
-
-DEPENDS += "ninja-native rust-llvm-native"
-
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv4t = "arm"
-
-# rustc_llvm with debug info is not recognized as a valid crate that's
-# generated by rust-llvm-native.
-CFLAGS:remove = "-g"
-CXXFLAGS:remove = "-g"
-
-LLVM_DIR = "llvm${LLVM_RELEASE}"
-
-EXTRA_OECMAKE = " \
- -DCMAKE_BUILD_TYPE=Release \
- -DLLVM_TARGETS_TO_BUILD='ARM;AArch64;Mips;PowerPC;RISCV;X86' \
- -DLLVM_BUILD_DOCS=OFF \
- -DLLVM_ENABLE_TERMINFO=OFF \
- -DLLVM_ENABLE_ZLIB=OFF \
- -DLLVM_ENABLE_LIBXML2=OFF \
- -DLLVM_ENABLE_FFI=OFF \
- -DLLVM_INSTALL_UTILS=ON \
- -DLLVM_BUILD_EXAMPLES=OFF \
- -DLLVM_INCLUDE_EXAMPLES=OFF \
- -DLLVM_BUILD_TESTS=OFF \
- -DLLVM_INCLUDE_TESTS=OFF \
- -DLLVM_TARGET_ARCH=${TARGET_ARCH} \
- -DCMAKE_INSTALL_PREFIX:PATH=${libdir}/llvm-rust \
-"
-EXTRA_OECMAKE:append:class-target = "\
- -DCMAKE_CROSSCOMPILING:BOOL=ON \
- -DLLVM_BUILD_TOOLS=OFF \
- -DLLVM_TABLEGEN=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-tblgen \
- -DLLVM_CONFIG_PATH=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config \
-"
-
-# The debug symbols are huge here (>2GB) so suppress them since they
-# provide almost no value. If you really need them then override this
-INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
-
-export YOCTO_ALTERNATE_EXE_PATH = "${STAGING_LIBDIR}/llvm-rust/bin/llvm-config"
-
-do_install:append () {
- # we don't need any of this stuff to build Rust
- rm -rf "${D}/usr/lib/cmake"
-}
-
-PACKAGES =+ "${PN}-bugpointpasses ${PN}-llvmhello ${PN}-liblto"
-
-# Add the extra locations to avoid the complaints about unpackaged files
-FILES:${PN}-bugpointpasses = "${libdir}/llvm-rust/lib/BugpointPasses.so"
-FILES:${PN}-llvmhello = "${libdir}/llvm-rust/lib/LLVMHello.so"
-FILES:${PN}-liblto = "${libdir}/llvm-rust/lib/libLTO.so.*"
-FILES:${PN}-staticdev =+ "${libdir}/llvm-rust/*/*.a"
-FILES:${PN} += "${libdir}/libLLVM*.so.* ${libdir}/llvm-rust/lib/*.so.* ${libdir}/llvm-rust/bin"
-FILES:${PN}-dev += "${datadir}/llvm ${libdir}/llvm-rust/lib/*.so ${libdir}/llvm-rust/include ${libdir}/llvm-rust/share ${libdir}/llvm-rust/lib/cmake"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/rust/rust-llvm_1.60.0.bb b/meta/recipes-devtools/rust/rust-llvm_1.60.0.bb
deleted file mode 100644
index 5b94e22f7b..0000000000
--- a/meta/recipes-devtools/rust/rust-llvm_1.60.0.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-# check src/llvm-project/llvm/CMakeLists.txt for llvm version in use
-#
-LLVM_RELEASE = "13.0.0"
-require rust-source.inc
-require rust-llvm.inc
-
diff --git a/meta/recipes-devtools/rust/rust-llvm_1.75.0.bb b/meta/recipes-devtools/rust/rust-llvm_1.75.0.bb
new file mode 100644
index 0000000000..13bdadb5e7
--- /dev/null
+++ b/meta/recipes-devtools/rust/rust-llvm_1.75.0.bb
@@ -0,0 +1,94 @@
+SUMMARY = "LLVM compiler framework (packaged with rust)"
+LICENSE ?= "Apache-2.0-with-LLVM-exception"
+HOMEPAGE = "http://www.rust-lang.org"
+
+# check src/llvm-project/llvm/CMakeLists.txt for llvm version in use
+#
+LLVM_RELEASE = "17.0.6"
+
+require rust-source.inc
+
+SRC_URI += "file://0002-llvm-allow-env-override-of-exe-path.patch;striplevel=2 \
+ file://0001-AsmMatcherEmitter-sort-ClassInfo-lists-by-name-as-we.patch;striplevel=2 \
+ file://0003-llvm-fix-include-benchmarks.patch;striplevel=2"
+
+S = "${RUSTSRC}/src/llvm-project/llvm"
+
+LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=8a15a0759ef07f2682d2ba4b893c9afe"
+
+inherit cmake
+
+DEPENDS += "ninja-native rust-llvm-native"
+
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv4t = "arm"
+
+# rustc_llvm with debug info is not recognized as a valid crate that's
+# generated by rust-llvm-native.
+CFLAGS:remove = "-g"
+CXXFLAGS:remove = "-g"
+
+LLVM_DIR = "llvm${LLVM_RELEASE}"
+
+RUST_LLVM_TARGETS ?= "ARM;AArch64;Mips;PowerPC;RISCV;X86"
+
+EXTRA_OECMAKE = " \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_TARGETS_TO_BUILD='${RUST_LLVM_TARGETS}' \
+ -DLLVM_BUILD_DOCS=OFF \
+ -DLLVM_ENABLE_TERMINFO=OFF \
+ -DLLVM_ENABLE_ZLIB=OFF \
+ -DLLVM_ENABLE_ZSTD=OFF \
+ -DLLVM_ENABLE_LIBXML2=OFF \
+ -DLLVM_ENABLE_FFI=OFF \
+ -DLLVM_INSTALL_UTILS=ON \
+ -DLLVM_BUILD_EXAMPLES=OFF \
+ -DLLVM_INCLUDE_EXAMPLES=OFF \
+ -DLLVM_BUILD_TESTS=OFF \
+ -DLLVM_INCLUDE_TESTS=OFF \
+ -DLLVM_TARGET_ARCH=${TARGET_ARCH} \
+ -DCMAKE_INSTALL_PREFIX:PATH=${libdir}/llvm-rust \
+"
+
+# Forcibly disable the detection of these packages as otherwise
+# it will look at the host Python install
+EXTRA_OECMAKE += "\
+ -DPY_PYGMENTS_FOUND=OFF \
+ -DPY_PYGMENTS_LEXERS_C_CPP_FOUND=OFF \
+ -DPY_YAML_FOUND=OFF \
+"
+
+EXTRA_OECMAKE:append:class-target = "\
+ -DLLVM_BUILD_TOOLS=OFF \
+ -DLLVM_TABLEGEN=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-tblgen \
+ -DLLVM_CONFIG_PATH=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config \
+"
+
+EXTRA_OECMAKE:append:class-nativesdk = "\
+ -DLLVM_BUILD_TOOLS=OFF \
+ -DLLVM_TABLEGEN=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-tblgen \
+ -DLLVM_CONFIG_PATH=${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config \
+"
+
+# The debug symbols are huge here (>2GB) so suppress them since they
+# provide almost no value. If you really need them then override this
+INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
+
+export YOCTO_ALTERNATE_EXE_PATH = "${STAGING_LIBDIR}/llvm-rust/bin/llvm-config"
+
+do_install:append () {
+ # we don't need any of this stuff to build Rust
+ rm -rf "${D}/usr/lib/cmake"
+}
+
+PACKAGES =+ "${PN}-bugpointpasses ${PN}-llvmhello ${PN}-liblto"
+
+# Add the extra locations to avoid the complaints about unpackaged files
+FILES:${PN}-bugpointpasses = "${libdir}/llvm-rust/lib/BugpointPasses.so"
+FILES:${PN}-llvmhello = "${libdir}/llvm-rust/lib/LLVMHello.so"
+FILES:${PN}-liblto = "${libdir}/llvm-rust/lib/libLTO.so.*"
+FILES:${PN}-staticdev =+ "${libdir}/llvm-rust/*/*.a"
+FILES:${PN} += "${libdir}/libLLVM*.so.* ${libdir}/llvm-rust/lib/*.so.* ${libdir}/llvm-rust/bin"
+FILES:${PN}-dev += "${datadir}/llvm ${libdir}/llvm-rust/lib/*.so ${libdir}/llvm-rust/include ${libdir}/llvm-rust/share ${libdir}/llvm-rust/lib/cmake"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/rust/rust-snapshot.inc b/meta/recipes-devtools/rust/rust-snapshot.inc
index c69f34141b..a52cb30c48 100644
--- a/meta/recipes-devtools/rust/rust-snapshot.inc
+++ b/meta/recipes-devtools/rust/rust-snapshot.inc
@@ -1,32 +1,58 @@
## This is information on the rust-snapshot (binary) used to build our current release.
-## snapshot info is taken from rust/src/stage0.txt
+## snapshot info is taken from rust/src/stage0.json
## Rust is self-hosting and bootstraps itself with a pre-built previous version of itself.
## The exact (previous) version that has been used is specified in the source tarball.
## The version is replicated here.
-## TODO: find a way to add additional SRC_URIs based on the contents of an
-## earlier SRC_URI.
-RS_VERSION = "1.59.0"
-CARGO_VERSION = "1.59.0"
-# TODO: Add hashes for other architecture toolchains as well. Make a script?
-SRC_URI[rust-std-snapshot-x86_64.sha256sum] = "c854a9ee3dd8e5be9522c1581f75838c1cbae6dece3934b0004f138c4a5024a3"
-SRC_URI[rustc-snapshot-x86_64.sha256sum] = "838de1fef855ef7733a87862c2575e8da9f3fa11fd0a8ce05c293038ea92356e"
-SRC_URI[cargo-snapshot-x86_64.sha256sum] = "f56ebfb333ea46e4429377bf4b16a2ec889d61640a41c3093577cdd8f3c80b96"
+SNAPSHOT_VERSION = "1.74.0"
-SRC_URI[rust-std-snapshot-aarch64.sha256sum] = "68e50dee4f6dddeab7330906e46022f57f2c004c847eae3f5b1bc82c59e43fc0"
-SRC_URI[rustc-snapshot-aarch64.sha256sum] = "d9789013ef6edd76eae3e7427b48f420a036ab3ee2af883e60baa33a9e1c23d7"
-SRC_URI[cargo-snapshot-aarch64.sha256sum] = "11b8da4b90ff74c6c796a3d6c1f5150de23c411ed2546e10b301077b904191fd"
+SRC_URI[cargo-snapshot-aarch64.sha256sum] = "a18dc9132cf76ccba90bcbb53b56a4d37ebfb34845f61e79f7b5d4710a269647"
+SRC_URI[rust-std-snapshot-aarch64.sha256sum] = "c5ad01692bc08ce6f4db2ac815be63498b45013380c71f22b3d33bf3be767270"
+SRC_URI[rustc-snapshot-aarch64.sha256sum] = "a49bb365481913ead305658e7e9dc621da7895036b840fb57b1bc85c721d07e6"
-SRC_URI[rust-std-snapshot-powerpc64le.sha256sum] = "15dee7705967e3351aff11c64ada30c4957b54066c7ee49b87be4b8155bd0a7d"
-SRC_URI[rustc-snapshot-powerpc64le.sha256sum] = "e5458f55e51f8baecab4ff1e43fae5c6e1c40d8171098ab633747ee0684b37bb"
-SRC_URI[cargo-snapshot-powerpc64le.sha256sum] = "fdab44481286db3ea8f4b6d409e648355009ef458188352d5e2c5799f7614fbd"
+SRC_URI[cargo-snapshot-i686.sha256sum] = "9f5b5226a69f95950a381ec5bb15dde7a90865a6df8aa0b470082a40d42d9f38"
+SRC_URI[rust-std-snapshot-i686.sha256sum] = "69757b72def9c433753e8bb575c817fc1ba389cf1a9c25276db1491ec025e495"
+SRC_URI[rustc-snapshot-i686.sha256sum] = "7a2bc1bf7e51942d32e82f461eacebe7f929c3eec210dcb7dc6624efd997d7da"
+
+SRC_URI[cargo-snapshot-loongarch64.sha256sum] = "77d6d55122150d8fc56d31fb166fd1b2ae48bff7376459c1b0030727fc604998"
+SRC_URI[rust-std-snapshot-loongarch64.sha256sum] = "13b85a882e912d0d8b3228feb5c263d34ec353d483c9defbd3e6bba38935553b"
+SRC_URI[rustc-snapshot-loongarch64.sha256sum] = "703e8c81f9ca3100fc459db92fd5899de62cf77393f334f98159cd97feb11633"
+
+SRC_URI[cargo-snapshot-powerpc.sha256sum] = "08ea8a345839f34d26f21b94ed6d458e6a38513999f7ddc05175c371983e6deb"
+SRC_URI[rust-std-snapshot-powerpc.sha256sum] = "458ee056fbeccf1cf96c20506654e5e9104c4e8f23d46cd4bb9b97ff5b3f4d55"
+SRC_URI[rustc-snapshot-powerpc.sha256sum] = "d4095cbe26ec197274dae9409e68843653e8c08c0b79e8cd74e72d9907e99816"
+
+SRC_URI[cargo-snapshot-powerpc64.sha256sum] = "696863642318f139634e6856f5e946ea970318ce79d4d9b1595871a70a662a89"
+SRC_URI[rust-std-snapshot-powerpc64.sha256sum] = "7ec56629b7d887753ce3a895fb73b77d2d395acac30207c2b69237ef63279872"
+SRC_URI[rustc-snapshot-powerpc64.sha256sum] = "ca162463db262df9d646687386a1c19f15c8ca9bf1f29eea94f2a8a6d7a6102d"
+
+SRC_URI[cargo-snapshot-powerpc64le.sha256sum] = "2eccd404aabe5137a8e45b6173c27d08862a0e674d5866be71aff1434f271d50"
+SRC_URI[rust-std-snapshot-powerpc64le.sha256sum] = "785956d68855de18546c87d6d06cd2505cb8a10edba84327bf2b448420a31d55"
+SRC_URI[rustc-snapshot-powerpc64le.sha256sum] = "8727b1a92e88ac1ce05198ee185dac86553edd7f50b726781c9ab64544b59809"
+
+SRC_URI[cargo-snapshot-riscv64gc.sha256sum] = "5b224e465e006b5fe959ad64d0df0540c4318ba4e39edd89794d520eef60b026"
+SRC_URI[rust-std-snapshot-riscv64gc.sha256sum] = "2a500156825dde03a53c965e5764a440b1ebce973b8a31f21e8bd8104271d56e"
+SRC_URI[rustc-snapshot-riscv64gc.sha256sum] = "f4f27f1c40208b61ea7e61f9edf2de1787aea78a1edb7fe15bceb20de5c7a4a3"
+
+SRC_URI[cargo-snapshot-s390x.sha256sum] = "06267377c811271d6e4ba6feea1d4b84a9f4c5c8d1dbd46092d0a0595f24e9b6"
+SRC_URI[rust-std-snapshot-s390x.sha256sum] = "35142541b88a1244c8225c64ee18585446d7e67841a9335ccaa95acf2d34dde5"
+SRC_URI[rustc-snapshot-s390x.sha256sum] = "41eae7788549aec58a6980ae6222d3330a01a37d1e7856d087a4e9c8a19aa890"
+
+SRC_URI[cargo-snapshot-x86_64.sha256sum] = "f219386d4569c40b660518e99267afff428c13bf980bda7a614c8d4038d013f6"
+SRC_URI[rust-std-snapshot-x86_64.sha256sum] = "548413213012e2f62b08ed8a913a51210ae7402619027224580176031f2789ea"
+SRC_URI[rustc-snapshot-x86_64.sha256sum] = "7d464be2ae0d6ce69f056d1ea9a8ce2b3b1d537418caea216fdd303903972181"
+
+SRC_URI[rust-std-snapshot-i586.sha256sum] = "bd4502462c5e2b2617b23f28862e544f14c4d02658f6d331f0cfbbba914aa4c0"
+
+SRC_URI[rust-std-snapshot-sparc64.sha256sum] = "68e96875ca7fc6ed0e023fcf752f28b95e9cc7d9881af4e8e167259fdaec7168"
SRC_URI += " \
- https://static.rust-lang.org/dist/${RUST_STD_SNAPSHOT}.tar.xz;name=rust-std-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
- https://static.rust-lang.org/dist/${RUSTC_SNAPSHOT}.tar.xz;name=rustc-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
- https://static.rust-lang.org/dist/${CARGO_SNAPSHOT}.tar.xz;name=cargo-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
+ ${RUST_DIST_SERVER}/dist/${RUST_STD_SNAPSHOT}.tar.xz;name=rust-std-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
+ ${RUST_DIST_SERVER}/dist/${RUSTC_SNAPSHOT}.tar.xz;name=rustc-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
+ ${RUST_DIST_SERVER}/dist/${CARGO_SNAPSHOT}.tar.xz;name=cargo-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \
"
-RUST_STD_SNAPSHOT = "rust-std-${RS_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
-RUSTC_SNAPSHOT = "rustc-${RS_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
-CARGO_SNAPSHOT = "cargo-${CARGO_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+RUST_DIST_SERVER = "https://static.rust-lang.org"
+RUST_STD_SNAPSHOT = "rust-std-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+RUSTC_SNAPSHOT = "rustc-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+CARGO_SNAPSHOT = "cargo-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
diff --git a/meta/recipes-devtools/rust/rust-source.inc b/meta/recipes-devtools/rust/rust-source.inc
index f6f8b4bab5..b14221b6cb 100644
--- a/meta/recipes-devtools/rust/rust-source.inc
+++ b/meta/recipes-devtools/rust/rust-source.inc
@@ -1,7 +1,24 @@
-SRC_URI += "https://static.rust-lang.org/dist/rustc-${PV}-src.tar.xz;name=rust"
-SRC_URI[rust.sha256sum] = "a025876deccbcb3f288d8e02623ea321f94623f31305d3c5c6f17855bb9685db"
+RUST_VERSION ?= "${@d.getVar('PV').split('-')[0]}"
-RUSTSRC = "${WORKDIR}/rustc-${PV}-src"
+SRC_URI += "https://static.rust-lang.org/dist/rustc-${RUST_VERSION}-src.tar.xz;name=rust \
+ file://hardcodepaths.patch;patchdir=${RUSTSRC} \
+ file://zlib-off64_t.patch;patchdir=${RUSTSRC} \
+ file://0001-Revert-Map-source-absolute-paths-to-OUT_DIR-as-relat.patch;patchdir=${RUSTSRC} \
+ file://rv32-missing-syscalls.patch;patchdir=${RUSTSRC} \
+ file://rv32-rustix-libc-backend.patch;patchdir=${RUSTSRC} \
+ file://rv32-cargo-rustix-0.38.19-fix.patch;patchdir=${RUSTSRC} \
+ file://cargo-path.patch;patchdir=${RUSTSRC} \
+ file://custom-target-cfg.patch;patchdir=${RUSTSRC} \
+ file://rustc-bootstrap.patch;patchdir=${RUSTSRC} \
+ file://target-build-value.patch;patchdir=${RUSTSRC} \
+ file://0001-Handle-vendored-sources-when-remapping-paths.patch;patchdir=${RUSTSRC} \
+ file://repro-issue-fix-with-v175.patch;patchdir=${RUSTSRC} \
+"
+SRC_URI[rust.sha256sum] = "4526f786d673e4859ff2afa0bab2ba13c918b796519a25c1acce06dba9542340"
+
+RUSTSRC = "${WORKDIR}/rustc-${RUST_VERSION}-src"
UPSTREAM_CHECK_URI = "https://forge.rust-lang.org/infra/other-installation-methods.html"
UPSTREAM_CHECK_REGEX = "rustc-(?P<pver>\d+(\.\d+)+)-src"
+
+CVE_STATUS[CVE-2024-24576] = "not-applicable-platform: Issue only applies on Windows"
diff --git a/meta/recipes-devtools/rust/rust-target.inc b/meta/recipes-devtools/rust/rust-target.inc
deleted file mode 100644
index 3f637b3ba5..0000000000
--- a/meta/recipes-devtools/rust/rust-target.inc
+++ /dev/null
@@ -1,10 +0,0 @@
-require rust.inc
-
-DEPENDS += "rust-llvm (=${PV})"
-
-# Otherwise we'll depend on what we provide
-INHIBIT_DEFAULT_RUST_DEPS:class-native = "1"
-# We don't need to depend on gcc-native because yocto assumes it exists
-PROVIDES:class-native = "virtual/${TARGET_PREFIX}rust"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/rust/rust-tools-cross-canadian.inc b/meta/recipes-devtools/rust/rust-tools-cross-canadian.inc
deleted file mode 100644
index f0358551ae..0000000000
--- a/meta/recipes-devtools/rust/rust-tools-cross-canadian.inc
+++ /dev/null
@@ -1,38 +0,0 @@
-
-require rust-cross-canadian-common.inc
-
-RUST_TOOLS_CLIPPY_PN = "rust-tools-clippy-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-RUST_TOOLS_RUSTFMT_PN = "rust-tools-rustfmt-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-RUST_TOOLS_PKGS = "${RUST_TOOLS_CLIPPY_PN} ${RUST_TOOLS_RUSTFMT_PN}"
-PN = "rust-tools-cross-canadian-${TRANSLATED_TARGET_ARCH}"
-
-PACKAGES = "${RUST_TOOLS_CLIPPY_PN} ${RUST_TOOLS_RUSTFMT_PN} ${PN}"
-RDEPENDS:${PN} += "${RUST_TOOLS_PKGS}"
-
-do_compile () {
- rust_runx build --stage 2 src/tools/clippy
- rust_runx build --stage 2 src/tools/rustfmt
-}
-
-do_install () {
- SYS_BINDIR=$(dirname ${D}${bindir})
-
- install -d "${SYS_BINDIR}"
- cp build/${SNAPSHOT_BUILD_SYS}/stage2-tools-bin/* ${SYS_BINDIR}
- for i in ${SYS_BINDIR}/*; do
- chrpath -r "\$ORIGIN/../lib" ${i}
- done
-
- chown -R root.root ${D}
-}
-
-ALLOW_EMPTY:${PN} = "1"
-
-PKG_SYS_BINDIR = "${SDKPATHNATIVE}/usr/bin"
-FILES:${RUST_TOOLS_CLIPPY_PN} = "${PKG_SYS_BINDIR}/cargo-clippy ${PKG_SYS_BINDIR}/clippy-driver"
-FILES:${RUST_TOOLS_RUSTFMT_PN} = "${PKG_SYS_BINDIR}/rustfmt"
-
-SUMMARY:${PN} = "Rust helper tools"
-SUMMARY:${RUST_TOOLS_CLIPPY_PN} = "A collection of lints to catch common mistakes and improve your Rust code"
-SUMMARY:${RUST_TOOLS_RUSTFMT_PN} = "A tool for formatting Rust code according to style guidelines"
-
diff --git a/meta/recipes-devtools/rust/rust-tools-cross-canadian_1.60.0.bb b/meta/recipes-devtools/rust/rust-tools-cross-canadian_1.60.0.bb
deleted file mode 100644
index 2d809d68f5..0000000000
--- a/meta/recipes-devtools/rust/rust-tools-cross-canadian_1.60.0.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require rust-tools-cross-canadian.inc
-require rust-source.inc
-require rust-snapshot.inc
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/rust:"
-
diff --git a/meta/recipes-devtools/rust/rust.inc b/meta/recipes-devtools/rust/rust.inc
deleted file mode 100644
index f39228e3c0..0000000000
--- a/meta/recipes-devtools/rust/rust.inc
+++ /dev/null
@@ -1,202 +0,0 @@
-SUMMARY = "Rust compiler and runtime libaries"
-HOMEPAGE = "http://www.rust-lang.org"
-SECTION = "devel"
-LICENSE = "MIT | Apache-2.0"
-LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=93a95682d51b4cb0a633a97046940ef0"
-
-inherit rust
-inherit cargo_common
-
-DEPENDS += "file-native python3-native"
-DEPENDS:append:class-native = " rust-llvm-native"
-
-S = "${RUSTSRC}"
-
-# We generate local targets, and need to be able to locate them
-export RUST_TARGET_PATH="${WORKDIR}/targets/"
-
-export FORCE_CRATE_HASH="${BB_TASKHASH}"
-
-RUST_ALTERNATE_EXE_PATH ?= "${STAGING_LIBDIR}/llvm-rust/bin/llvm-config"
-export YOCTO_ALTERNATE_EXE_PATH = "${RUST_ALTERNATE_EXE_PATH}"
-export YOCTO_ALTERNATE_MULTILIB_NAME = "/${BASELIB}"
-
-# We don't want to use bitbakes vendoring because the rust sources do their
-# own vendoring.
-CARGO_DISABLE_BITBAKE_VENDORING = "1"
-
-# We can't use RUST_BUILD_SYS here because that may be "musl" if
-# TCLIBC="musl". Snapshots are always -unknown-linux-gnu
-SNAPSHOT_BUILD_SYS = "${RUST_BUILD_ARCH}-unknown-linux-gnu"
-setup_cargo_environment () {
- # The first step is to build bootstrap and some early stage tools,
- # these are build for the same target as the snapshot, e.g.
- # x86_64-unknown-linux-gnu.
- # Later stages are build for the native target (i.e. target.x86_64-linux)
- cargo_common_do_configure
-
- printf '[target.%s]\n' "${SNAPSHOT_BUILD_SYS}" >> ${CARGO_HOME}/config
- printf "linker = '%s'\n" "${RUST_BUILD_CCLD}" >> ${CARGO_HOME}/config
-}
-
-include rust-common.inc
-
-do_rust_setup_snapshot () {
- for installer in "${WORKDIR}/rust-snapshot-components/"*"/install.sh"; do
- "${installer}" --prefix="${WORKDIR}/rust-snapshot" --disable-ldconfig
- done
-
- # Some versions of rust (e.g. 1.18.0) tries to find cargo in stage0/bin/cargo
- # and fail without it there.
- mkdir -p ${RUSTSRC}/build/${BUILD_SYS}
- ln -sf ${WORKDIR}/rust-snapshot/ ${RUSTSRC}/build/${BUILD_SYS}/stage0
-
- # Need to use uninative's loader if enabled/present since the library paths
- # are used internally by rust and result in symbol mismatches if we don't
- if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then
- for bin in cargo rustc rustdoc; do
- patchelf-uninative ${WORKDIR}/rust-snapshot/bin/$bin --set-interpreter ${UNINATIVE_LOADER}
- done
- fi
-}
-addtask rust_setup_snapshot after do_unpack before do_configure
-do_rust_setup_snapshot[dirs] += "${WORKDIR}/rust-snapshot"
-do_rust_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER"
-
-python do_configure() {
- import json
- try:
- import configparser
- except ImportError:
- import ConfigParser as configparser
-
- # toml is rather similar to standard ini like format except it likes values
- # that look more JSON like. So for our purposes simply escaping all values
- # as JSON seem to work fine.
-
- e = lambda s: json.dumps(s)
-
- config = configparser.RawConfigParser()
-
- # [target.ARCH-poky-linux]
- target_section = "target.{}".format(d.getVar('TARGET_SYS', True))
- config.add_section(target_section)
-
- llvm_config = d.expand("${YOCTO_ALTERNATE_EXE_PATH}")
- config.set(target_section, "llvm-config", e(llvm_config))
-
- config.set(target_section, "cxx", e(d.expand("${RUST_TARGET_CXX}")))
- config.set(target_section, "cc", e(d.expand("${RUST_TARGET_CC}")))
-
- # If we don't do this rust-native will compile it's own llvm for BUILD.
- # [target.${BUILD_ARCH}-unknown-linux-gnu]
- target_section = "target.{}".format(d.getVar('SNAPSHOT_BUILD_SYS', True))
- config.add_section(target_section)
-
- config.set(target_section, "llvm-config", e(llvm_config))
-
- config.set(target_section, "cxx", e(d.expand("${RUST_BUILD_CXX}")))
- config.set(target_section, "cc", e(d.expand("${RUST_BUILD_CC}")))
-
- # [rust]
- config.add_section("rust")
- config.set("rust", "rpath", e(True))
- config.set("rust", "channel", e("stable"))
-
- # Whether or not to optimize the compiler and standard library
- config.set("rust", "optimize", e(True))
-
- # [build]
- config.add_section("build")
- config.set("build", "submodules", e(False))
- config.set("build", "docs", e(False))
-
- rustc = d.expand("${WORKDIR}/rust-snapshot/bin/rustc")
- config.set("build", "rustc", e(rustc))
-
- # Support for the profiler runtime to generate e.g. coverage report,
- # PGO etc.
- config.set("build", "profiler", e(True))
-
- cargo = d.expand("${WORKDIR}/rust-snapshot/bin/cargo")
- config.set("build", "cargo", e(cargo))
-
- config.set("build", "vendor", e(True))
-
- if not "targets" in locals():
- targets = [d.getVar("TARGET_SYS", True)]
- config.set("build", "target", e(targets))
-
- if not "hosts" in locals():
- hosts = [d.getVar("HOST_SYS", True)]
- config.set("build", "host", e(hosts))
-
- # We can't use BUILD_SYS since that is something the rust snapshot knows
- # nothing about when trying to build some stage0 tools (like fabricate)
- config.set("build", "build", e(d.getVar("SNAPSHOT_BUILD_SYS", True)))
-
- # [install]
- config.add_section("install")
- # ./x.py install doesn't have any notion of "destdir"
- # but we can prepend ${D} to all the directories instead
- config.set("install", "prefix", e(d.getVar("D", True) + d.getVar("prefix", True)))
- config.set("install", "bindir", e(d.getVar("D", True) + d.getVar("bindir", True)))
- config.set("install", "libdir", e(d.getVar("D", True) + d.getVar("libdir", True)))
- config.set("install", "datadir", e(d.getVar("D", True) + d.getVar("datadir", True)))
- config.set("install", "mandir", e(d.getVar("D", True) + d.getVar("mandir", True)))
-
- with open("config.toml", "w") as f:
- f.write('changelog-seen = 2\n\n')
- config.write(f)
-
- # set up ${WORKDIR}/cargo_home
- bb.build.exec_func("setup_cargo_environment", d)
-}
-
-
-rust_runx () {
- echo "COMPILE ${PN}" "$@"
-
- # CFLAGS, LDFLAGS, CXXFLAGS, CPPFLAGS are used by rust's build for a
- # wide range of targets (not just TARGET). Yocto's settings for them will
- # be inappropriate, avoid using.
- unset CFLAGS
- unset LDFLAGS
- unset CXXFLAGS
- unset CPPFLAGS
-
- oe_cargo_fix_env
-
- python3 src/bootstrap/bootstrap.py ${@oe.utils.parallel_make_argument(d, '-j %d')} "$@" --verbose
-}
-rust_runx[vardepsexclude] += "PARALLEL_MAKE"
-
-do_compile () {
- rust_runx build
-}
-
-rust_do_install () {
- mkdir -p ${D}${bindir}
- cp build/${HOST_SYS}/stage2/bin/* ${D}${bindir}
-
- mkdir -p ${D}${libdir}/rustlib
- cp -pRd build/${HOST_SYS}/stage2/lib/* ${D}${libdir}
- # Remove absolute symlink so bitbake doesn't complain
- rm -f ${D}${libdir}/rustlib/src/rust
-}
-
-rust_install_targets() {
- # Install our custom target.json files
- local td="${D}${libdir}/rustlib/"
- install -d "$td"
- for tgt in "${WORKDIR}/targets/"* ; do
- install -m 0644 "$tgt" "$td"
- done
-}
-
-
-do_install () {
- rust_do_install
- rust_install_targets
-}
-# ex: sts=4 et sw=4 ts=8
diff --git a/meta/recipes-devtools/rust/rust_1.60.0.bb b/meta/recipes-devtools/rust/rust_1.60.0.bb
deleted file mode 100644
index b505ad46ca..0000000000
--- a/meta/recipes-devtools/rust/rust_1.60.0.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-require rust-target.inc
-require rust-source.inc
-require rust-snapshot.inc
-
-INSANE_SKIP:${PN}:class-native = "already-stripped"
-
-do_compile () {
- rust_runx build --stage 2
-}
-
-rust_do_install() {
- rust_runx install
-}
-
-python () {
- pn = d.getVar('PN')
-
- if not pn.endswith("-native"):
- raise bb.parse.SkipRecipe("Rust recipe doesn't work for target builds at this time. Fixes welcome.")
-}
-
diff --git a/meta/recipes-devtools/rust/rust_1.75.0.bb b/meta/recipes-devtools/rust/rust_1.75.0.bb
new file mode 100644
index 0000000000..76e1fe2d84
--- /dev/null
+++ b/meta/recipes-devtools/rust/rust_1.75.0.bb
@@ -0,0 +1,361 @@
+SUMMARY = "Rust compiler and runtime libaries"
+HOMEPAGE = "http://www.rust-lang.org"
+SECTION = "devel"
+LICENSE = "(MIT | Apache-2.0) & Unicode-TOU"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=c2cccf560306876da3913d79062a54b9"
+
+inherit rust
+inherit cargo_common
+
+DEPENDS += "file-native python3-native"
+DEPENDS:append:class-native = " rust-llvm-native"
+DEPENDS:append:class-nativesdk = " nativesdk-rust-llvm"
+
+DEPENDS += "rust-llvm (=${PV})"
+
+RDEPENDS:${PN}:append:class-target = " gcc g++ binutils"
+
+# Otherwise we'll depend on what we provide
+INHIBIT_DEFAULT_RUST_DEPS:class-native = "1"
+# We don't need to depend on gcc-native because yocto assumes it exists
+PROVIDES:class-native = "virtual/${TARGET_PREFIX}rust"
+
+S = "${RUSTSRC}"
+
+# Use at your own risk, accepted values are stable, beta and nightly
+RUST_CHANNEL ?= "stable"
+PV .= "${@bb.utils.contains('RUST_CHANNEL', 'stable', '', '-${RUST_CHANNEL}', d)}"
+
+export FORCE_CRATE_HASH="${BB_TASKHASH}"
+
+RUST_ALTERNATE_EXE_PATH ?= "${STAGING_LIBDIR}/llvm-rust/bin/llvm-config"
+RUST_ALTERNATE_EXE_PATH_NATIVE = "${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-config"
+
+# We don't want to use bitbakes vendoring because the rust sources do their
+# own vendoring.
+CARGO_DISABLE_BITBAKE_VENDORING = "1"
+
+# We can't use RUST_BUILD_SYS here because that may be "musl" if
+# TCLIBC="musl". Snapshots are always -unknown-linux-gnu
+setup_cargo_environment () {
+ # The first step is to build bootstrap and some early stage tools,
+ # these are build for the same target as the snapshot, e.g.
+ # x86_64-unknown-linux-gnu.
+ # Later stages are build for the native target (i.e. target.x86_64-linux)
+ cargo_common_do_configure
+}
+
+inherit rust-target-config
+
+do_rust_setup_snapshot () {
+ for installer in "${WORKDIR}/rust-snapshot-components/"*"/install.sh"; do
+ "${installer}" --prefix="${WORKDIR}/rust-snapshot" --disable-ldconfig
+ done
+
+ # Some versions of rust (e.g. 1.18.0) tries to find cargo in stage0/bin/cargo
+ # and fail without it there.
+ mkdir -p ${RUSTSRC}/build/${BUILD_SYS}
+ ln -sf ${WORKDIR}/rust-snapshot/ ${RUSTSRC}/build/${BUILD_SYS}/stage0
+
+ # Need to use uninative's loader if enabled/present since the library paths
+ # are used internally by rust and result in symbol mismatches if we don't
+ if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then
+ for bin in cargo rustc rustdoc; do
+ patchelf-uninative ${WORKDIR}/rust-snapshot/bin/$bin --set-interpreter ${UNINATIVE_LOADER}
+ done
+ fi
+}
+addtask rust_setup_snapshot after do_unpack before do_configure
+addtask do_test_compile after do_configure do_rust_gen_targets
+do_rust_setup_snapshot[dirs] += "${WORKDIR}/rust-snapshot"
+do_rust_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER"
+
+python do_configure() {
+ import json
+ import configparser
+
+ # toml is rather similar to standard ini like format except it likes values
+ # that look more JSON like. So for our purposes simply escaping all values
+ # as JSON seem to work fine.
+
+ e = lambda s: json.dumps(s)
+
+ config = configparser.RawConfigParser()
+
+ # [target.ARCH-poky-linux]
+ host_section = "target.{}".format(d.getVar('RUST_HOST_SYS'))
+ config.add_section(host_section)
+
+ llvm_config_target = d.expand("${RUST_ALTERNATE_EXE_PATH}")
+ llvm_config_build = d.expand("${RUST_ALTERNATE_EXE_PATH_NATIVE}")
+ config.set(host_section, "llvm-config", e(llvm_config_target))
+
+ config.set(host_section, "cxx", e(d.expand("${RUST_TARGET_CXX}")))
+ config.set(host_section, "cc", e(d.expand("${RUST_TARGET_CC}")))
+ config.set(host_section, "linker", e(d.expand("${RUST_TARGET_CCLD}")))
+ if "musl" in host_section:
+ config.set(host_section, "musl-root", e(d.expand("${STAGING_DIR_HOST}${exec_prefix}")))
+
+ # If we don't do this rust-native will compile it's own llvm for BUILD.
+ # [target.${BUILD_ARCH}-unknown-linux-gnu]
+ build_section = "target.{}".format(d.getVar('RUST_BUILD_SYS'))
+ if build_section != host_section:
+ config.add_section(build_section)
+
+ config.set(build_section, "llvm-config", e(llvm_config_build))
+
+ config.set(build_section, "cxx", e(d.expand("${RUST_BUILD_CXX}")))
+ config.set(build_section, "cc", e(d.expand("${RUST_BUILD_CC}")))
+ config.set(build_section, "linker", e(d.expand("${RUST_BUILD_CCLD}")))
+
+ target_section = "target.{}".format(d.getVar('RUST_TARGET_SYS'))
+ if target_section != host_section and target_section != build_section:
+ config.add_section(target_section)
+
+ config.set(target_section, "llvm-config", e(llvm_config_target))
+
+ config.set(target_section, "cxx", e(d.expand("${RUST_TARGET_CXX}")))
+ config.set(target_section, "cc", e(d.expand("${RUST_TARGET_CC}")))
+ config.set(target_section, "linker", e(d.expand("${RUST_TARGET_CCLD}")))
+
+ # [llvm]
+ config.add_section("llvm")
+ config.set("llvm", "static-libstdcpp", e(False))
+ if "llvm" in (d.getVar('TC_CXX_RUNTIME') or ""):
+ config.set("llvm", "use-libcxx", e(True))
+
+ # [rust]
+ config.add_section("rust")
+ config.set("rust", "rpath", e(True))
+ config.set("rust", "remap-debuginfo", e(True))
+ config.set("rust", "channel", e(d.expand("${RUST_CHANNEL}")))
+
+ # Whether or not to optimize the compiler and standard library
+ config.set("rust", "optimize", e(True))
+
+ # Emits extraneous output from tests to ensure that failures of the test
+ # harness are debuggable just from logfiles
+ config.set("rust", "verbose-tests", e(True))
+
+ # [build]
+ config.add_section("build")
+ config.set("build", "submodules", e(False))
+ config.set("build", "docs", e(False))
+ config.set("build", "tools", ["rust-demangler",])
+
+ rustc = d.expand("${WORKDIR}/rust-snapshot/bin/rustc")
+ config.set("build", "rustc", e(rustc))
+
+ cargo = d.expand("${WORKDIR}/rust-snapshot/bin/cargo")
+ config.set("build", "cargo", e(cargo))
+
+ config.set("build", "vendor", e(True))
+
+ config.set("build", "target", e([d.getVar("RUST_TARGET_SYS")]))
+
+ config.set("build", "host", e([d.getVar("RUST_HOST_SYS")]))
+
+ # We can't use BUILD_SYS since that is something the rust snapshot knows
+ # nothing about when trying to build some stage0 tools (like fabricate)
+ config.set("build", "build", e(d.getVar("RUST_BUILD_SYS")))
+
+ # [install]
+ config.add_section("install")
+ # ./x.py install doesn't have any notion of "destdir"
+ # but we can prepend ${D} to all the directories instead
+ config.set("install", "prefix", e(d.getVar("D") + d.getVar("prefix")))
+ config.set("install", "bindir", e(d.getVar("D") + d.getVar("bindir")))
+ config.set("install", "libdir", e(d.getVar("D") + d.getVar("libdir")))
+ config.set("install", "datadir", e(d.getVar("D") + d.getVar("datadir")))
+ config.set("install", "mandir", e(d.getVar("D") + d.getVar("mandir")))
+ config.set("install", "sysconfdir", e(d.getVar("D") + d.getVar("sysconfdir")))
+
+ with open("config.toml", "w") as f:
+ f.write('change-id = 116881\n\n')
+ config.write(f)
+
+ # set up ${WORKDIR}/cargo_home
+ bb.build.exec_func("setup_cargo_environment", d)
+}
+
+rust_runx () {
+ echo "COMPILE ${PN}" "$@"
+
+ # CFLAGS, LDFLAGS, CXXFLAGS, CPPFLAGS are used by rust's build for a
+ # wide range of targets (not just TARGET). Yocto's settings for them will
+ # be inappropriate, avoid using.
+ unset CFLAGS
+ unset LDFLAGS
+ unset CXXFLAGS
+ unset CPPFLAGS
+
+ export RUSTFLAGS="${RUST_DEBUG_REMAP}"
+
+ # Copy the natively built llvm-config into the target so we can run it. Horrible,
+ # but works!
+ if [ ${RUST_ALTERNATE_EXE_PATH_NATIVE} != ${RUST_ALTERNATE_EXE_PATH} -a ! -f ${RUST_ALTERNATE_EXE_PATH} ]; then
+ mkdir -p `dirname ${RUST_ALTERNATE_EXE_PATH}`
+ cp ${RUST_ALTERNATE_EXE_PATH_NATIVE} ${RUST_ALTERNATE_EXE_PATH}
+ if [ -e ${STAGING_LIBDIR_NATIVE}/libc++.so.1 ]; then
+ chrpath -r \$ORIGIN/../../../../../`basename ${STAGING_DIR_NATIVE}`${libdir_native} ${RUST_ALTERNATE_EXE_PATH}
+ else
+ chrpath -d ${RUST_ALTERNATE_EXE_PATH}
+ fi
+ fi
+
+ oe_cargo_fix_env
+
+ python3 src/bootstrap/bootstrap.py ${@oe.utils.parallel_make_argument(d, '-j %d')} "$@" --verbose
+}
+rust_runx[vardepsexclude] += "PARALLEL_MAKE"
+
+require rust-source.inc
+require rust-snapshot.inc
+
+INSANE_SKIP:${PN}:class-native = "already-stripped"
+FILES:${PN} += "${libdir}/rustlib"
+FILES:${PN} += "${libdir}/*.so"
+FILES:${PN}-dev = ""
+
+do_compile () {
+}
+
+do_test_compile[dirs] = "${B}"
+do_test_compile () {
+ rust_runx build src/tools/remote-test-server --target "${RUST_TARGET_SYS}"
+}
+
+ALLOW_EMPTY:${PN} = "1"
+
+PACKAGES =+ "${PN}-rustdoc ${PN}-tools-clippy ${PN}-tools-rustfmt"
+FILES:${PN}-rustdoc = "${bindir}/rustdoc"
+FILES:${PN}-tools-clippy = "${bindir}/cargo-clippy ${bindir}/clippy-driver"
+FILES:${PN}-tools-rustfmt = "${bindir}/rustfmt"
+RDEPENDS:${PN}-rustdoc = "${PN}"
+RDEPENDS:${PN}-tools-clippy = "${PN}"
+RDEPENDS:${PN}-tools-rustfmt = "${PN}"
+
+SUMMARY:${PN}-tools-clippy = "A collection of lints to catch common mistakes and improve your Rust code"
+SUMMARY:${PN}-tools-rustfmt = "A tool for formatting Rust code according to style guidelines"
+
+do_install () {
+ rust_do_install
+}
+
+rust_do_install() {
+ rust_runx install
+}
+
+rust_do_install:class-nativesdk() {
+ export PSEUDO_UNLOAD=1
+ rust_runx install
+ rust_runx install clippy
+ rust_runx install rustfmt
+ unset PSEUDO_UNLOAD
+
+ install -d ${D}${bindir}
+ for i in cargo-clippy clippy-driver rustfmt; do
+ cp build/${RUST_BUILD_SYS}/stage2-tools/${RUST_HOST_SYS}/release/$i ${D}${bindir}
+ chrpath -r "\$ORIGIN/../lib" ${D}${bindir}/$i
+ done
+
+ chown root:root ${D}/ -R
+ rm ${D}${libdir}/rustlib/uninstall.sh
+ rm ${D}${libdir}/rustlib/install.log
+ rm ${D}${libdir}/rustlib/manifest*
+}
+
+EXTRA_TOOLS ?= "cargo-clippy clippy-driver rustfmt"
+rust_do_install:class-target() {
+ export PSEUDO_UNLOAD=1
+ rust_runx install
+ rust_runx install clippy
+ rust_runx install rustfmt
+ unset PSEUDO_UNLOAD
+
+ install -d ${D}${bindir}
+ for i in ${EXTRA_TOOLS}; do
+ cp build/${RUST_BUILD_SYS}/stage2-tools/${RUST_HOST_SYS}/release/$i ${D}${bindir}
+ chrpath -r "\$ORIGIN/../lib" ${D}${bindir}/$i
+ done
+
+ install -d ${D}${libdir}/rustlib/${RUST_HOST_SYS}
+ install -m 0644 ${WORKDIR}/rust-targets/${RUST_HOST_SYS}.json ${D}${libdir}/rustlib/${RUST_HOST_SYS}/target.json
+
+ chown root:root ${D}/ -R
+ rm ${D}${libdir}/rustlib/uninstall.sh
+ rm ${D}${libdir}/rustlib/install.log
+ rm ${D}${libdir}/rustlib/manifest*
+}
+
+addtask do_update_snapshot after do_patch
+do_update_snapshot[nostamp] = "1"
+
+# Run with `bitbake -c update_snapshot rust` to update `rust-snapshot.inc`
+# with the checksums for the rust snapshot associated with this rustc-src
+# tarball.
+python do_update_snapshot() {
+ import json
+ import re
+ import sys
+
+ from collections import defaultdict
+
+ with open(os.path.join(d.getVar("S"), "src", "stage0.json")) as f:
+ j = json.load(f)
+
+ config_dist_server = j['config']['dist_server']
+ compiler_date = j['compiler']['date']
+ compiler_version = j['compiler']['version']
+
+ src_uri = defaultdict(list)
+ for k, v in j['checksums_sha256'].items():
+ m = re.search(f"dist/{compiler_date}/(?P<component>.*)-{compiler_version}-(?P<arch>.*)-unknown-linux-gnu\\.tar\\.xz", k)
+ if m:
+ component = m.group('component')
+ arch = m.group('arch')
+ src_uri[arch].append(f"SRC_URI[{component}-snapshot-{arch}.sha256sum] = \"{v}\"")
+
+ snapshot = """\
+## This is information on the rust-snapshot (binary) used to build our current release.
+## snapshot info is taken from rust/src/stage0.json
+## Rust is self-hosting and bootstraps itself with a pre-built previous version of itself.
+## The exact (previous) version that has been used is specified in the source tarball.
+## The version is replicated here.
+
+SNAPSHOT_VERSION = "%s"
+
+""" % compiler_version
+
+ for arch, components in src_uri.items():
+ snapshot += "\n".join(components) + "\n\n"
+
+ snapshot += """\
+SRC_URI += " \\
+ ${RUST_DIST_SERVER}/dist/${RUST_STD_SNAPSHOT}.tar.xz;name=rust-std-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \\
+ ${RUST_DIST_SERVER}/dist/${RUSTC_SNAPSHOT}.tar.xz;name=rustc-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \\
+ ${RUST_DIST_SERVER}/dist/${CARGO_SNAPSHOT}.tar.xz;name=cargo-snapshot-${RUST_BUILD_ARCH};subdir=rust-snapshot-components \\
+"
+
+RUST_DIST_SERVER = "%s"
+
+RUST_STD_SNAPSHOT = "rust-std-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+RUSTC_SNAPSHOT = "rustc-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+CARGO_SNAPSHOT = "cargo-${SNAPSHOT_VERSION}-${RUST_BUILD_ARCH}-unknown-linux-gnu"
+""" % config_dist_server
+
+ with open(os.path.join(d.getVar("THISDIR"), "rust-snapshot.inc"), "w") as f:
+ f.write(snapshot)
+}
+
+RUSTLIB_DEP:class-nativesdk = ""
+
+# musl builds include libunwind.a
+INSANE_SKIP:${PN} = "staticdev"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Since 1.70.0 upgrade this fails to build with gold:
+# http://errors.yoctoproject.org/Errors/Details/708196/
+# ld: error: version script assignment of to symbol __rust_alloc_error_handler_should_panic failed: symbol not defined
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd', '', d)}"
diff --git a/meta/recipes-devtools/squashfs-tools/files/0001-install-manpages.sh-do-not-write-original-timestamps.patch b/meta/recipes-devtools/squashfs-tools/files/0001-install-manpages.sh-do-not-write-original-timestamps.patch
deleted file mode 100644
index ed1d2f5b3b..0000000000
--- a/meta/recipes-devtools/squashfs-tools/files/0001-install-manpages.sh-do-not-write-original-timestamps.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From b44b00dae195d8587857c7e8054e9be4eaa1f8b3 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Thu, 7 Apr 2022 09:26:09 +0200
-Subject: [PATCH] install-manpages.sh: do not write original timestamps into
- .gz metadata
-
-This helps binary reproducibility.
-
-Upstream-Status: Submitted [https://github.com/plougher/squashfs-tools/pull/177]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- generate-manpages/install-manpages.sh | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/generate-manpages/install-manpages.sh b/generate-manpages/install-manpages.sh
-index d4c9e91..6a43b89 100755
---- a/generate-manpages/install-manpages.sh
-+++ b/generate-manpages/install-manpages.sh
-@@ -56,7 +56,7 @@ for i in mksquashfs unsquashfs sqfstar sqfscat; do
- exit 1
- fi
-
-- if ! gzip -f9 $2/$i.1; then
-+ if ! gzip -n -f9 $2/$i.1; then
- echo "$0: Compressing installed manpage failed. Aborting" >&2
- exit 1
- fi
---
-2.30.2
-
diff --git a/meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb b/meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb
index 4e009d2625..982a43f889 100644
--- a/meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb
+++ b/meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb
@@ -7,11 +7,9 @@ SECTION = "base"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-PV = "4.5.1"
-SRCREV = "afdd63fc386919b4aa40d573b0a6069414d14317"
-SRC_URI = "git://github.com/plougher/squashfs-tools.git;protocol=https;branch=master \
- file://0001-install-manpages.sh-do-not-write-original-timestamps.patch \
- "
+PV = "4.6.1"
+SRCREV = "d8cb82d9840330f9344ec37b992595b5d7b44184"
+SRC_URI = "git://github.com/plougher/squashfs-tools.git;protocol=https;branch=v6.1.1"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-devtools/strace/strace/0001-caps-abbrev.awk-fix-gawk-s-path.patch b/meta/recipes-devtools/strace/strace/0001-caps-abbrev.awk-fix-gawk-s-path.patch
deleted file mode 100644
index 235e803641..0000000000
--- a/meta/recipes-devtools/strace/strace/0001-caps-abbrev.awk-fix-gawk-s-path.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 597cc206d982e7237eb93fdc33e8c4bb6bb2d796 Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 9 Feb 2017 01:27:49 -0800
-Subject: [PATCH] caps-abbrev.awk: fix gawk's path
-
-It should be /usr/bin/gawk as other scripts use in this package.
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
-
----
- tests-m32/caps-abbrev.awk | 2 +-
- tests-mx32/caps-abbrev.awk | 2 +-
- tests/caps-abbrev.awk | 2 +-
- 3 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/tests-m32/caps-abbrev.awk b/tests-m32/caps-abbrev.awk
-index c00023b..a56cd56 100644
---- a/tests-m32/caps-abbrev.awk
-+++ b/tests-m32/caps-abbrev.awk
-@@ -1,4 +1,4 @@
--#!/bin/gawk
-+#!/usr/bin/gawk
- #
- # This file is part of caps strace test.
- #
-diff --git a/tests-mx32/caps-abbrev.awk b/tests-mx32/caps-abbrev.awk
-index c00023b..a56cd56 100644
---- a/tests-mx32/caps-abbrev.awk
-+++ b/tests-mx32/caps-abbrev.awk
-@@ -1,4 +1,4 @@
--#!/bin/gawk
-+#!/usr/bin/gawk
- #
- # This file is part of caps strace test.
- #
-diff --git a/tests/caps-abbrev.awk b/tests/caps-abbrev.awk
-index c00023b..a56cd56 100644
---- a/tests/caps-abbrev.awk
-+++ b/tests/caps-abbrev.awk
-@@ -1,4 +1,4 @@
--#!/bin/gawk
-+#!/usr/bin/gawk
- #
- # This file is part of caps strace test.
- #
diff --git a/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch b/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch
new file mode 100644
index 0000000000..76ca7a76a8
--- /dev/null
+++ b/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch
@@ -0,0 +1,27 @@
+From 470399f3636c412b74f9daf6ae430b13c3126f02 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 15 Dec 2022 15:54:27 -0800
+Subject: [PATCH] configure: Use autoconf macro to detect largefile support
+
+Adds --enable-largefile/--disable-largefile configure knobs
+where default is to detect the support
+
+Upstream-Status: Submitted [https://github.com/strace/strace/pull/230]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ configure.ac | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/configure.ac b/configure.ac
+index 4797b42dd..7d57fb254 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -43,6 +43,8 @@ AC_PROG_INSTALL
+ AC_PROG_RANLIB
+ AC_PROG_LN_S
+
++AC_SYS_LARGEFILE
++
+ AC_USE_SYSTEM_EXTENSIONS
+ AX_CODE_COVERAGE
+
diff --git a/meta/recipes-devtools/strace/strace/0001-landlock-update-expected-string.patch b/meta/recipes-devtools/strace/strace/0001-landlock-update-expected-string.patch
deleted file mode 100644
index 9d67d68331..0000000000
--- a/meta/recipes-devtools/strace/strace/0001-landlock-update-expected-string.patch
+++ /dev/null
@@ -1,67 +0,0 @@
-From d0dae2fb30b907bc9bf70382f37c9e00207ae036 Mon Sep 17 00:00:00 2001
-From: Bruce Ashfield <bruce.ashfield@gmail.com>
-Date: Sat, 30 Apr 2022 01:09:42 -0400
-Subject: [PATCH] landlock: update expected string
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Kernel commit:
-
- commit 3d4b396a616d0d67bf95d6823ad1197f6247292e
- Author: Christian Brauner <christian.brauner@ubuntu.com>
- Date: Mon Oct 11 15:37:04 2021 +0200
-
- landlock: Use square brackets around "landlock-ruleset"
-
- commit aea0b9f2486da8497f35c7114b764bf55e17c7ea upstream.
-
- Make the name of the anon inode fd "[landlock-ruleset]" instead of
- "landlock-ruleset". This is minor but most anon inode fds already
- carry square brackets around their name:
-
- [eventfd]
- [eventpoll]
- [fanotify]
- [fscontext]
- [io_uring]
- [pidfd]
- [signalfd]
- [timerfd]
- [userfaultfd]
-
- For the sake of consistency lets do the same for the landlock-ruleset anon
- inode fd that comes with landlock. We did the same in
- 1cdc415f1083 ("uapi, fsopen: use square brackets around "fscontext" [ver #2]")
- for the new mount api.
-
- Cc: linux-security-module@vger.kernel.org
- Signed-off-by: Christian Brauner <christian.brauner@ubuntu.com>
- Link: https://lore.kernel.org/r/20211011133704.1704369-1-brauner@kernel.org
- Cc: stable@vger.kernel.org
- Signed-off-by: Mickaël Salaün <mic@linux.microsoft.com>
- Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
-
-Changed the format of the landlock tracing. We need to update the strace
-expected string to match.
-
-Upstream-Status: Submitted [https://lists.strace.io/pipermail/strace-devel/2022-April/011064.html]
-
-Signed-off-by: Bruce Ashfield <bruce.ashfield@gmail.com>
----
- tests/landlock_create_ruleset-y.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/tests/landlock_create_ruleset-y.c b/tests/landlock_create_ruleset-y.c
-index a30966b..50e19c2 100644
---- a/tests/landlock_create_ruleset-y.c
-+++ b/tests/landlock_create_ruleset-y.c
-@@ -1,4 +1,4 @@
--#define FD_PATH "<anon_inode:landlock-ruleset>"
-+#define FD_PATH "<anon_inode:[landlock-ruleset]>"
- #define SKIP_IF_PROC_IS_UNAVAILABLE skip_if_unavailable("/proc/self/fd/")
-
- #include "landlock_create_ruleset.c"
---
-2.19.1
-
diff --git a/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch b/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch
new file mode 100644
index 0000000000..0cabdfe99f
--- /dev/null
+++ b/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch
@@ -0,0 +1,52 @@
+From 579b2ebe52d4b97f954e6188df2d07e137820075 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 15 Dec 2022 15:56:13 -0800
+Subject: [PATCH] tests: Replace off64_t with off_t
+
+when _FILE_OFFSET_BITS=64 then off_t is 64bit wide, this also fixes
+build on musl where off64_t is not available without _LARGEFILE64_SOURCE
+
+Upstream-Status: Submitted [https://github.com/strace/strace/pull/230]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ tests/readahead.c | 2 +-
+ tests/sync_file_range2.c | 4 ++--
+ 2 files changed, 3 insertions(+), 3 deletions(-)
+
+--- a/tests/readahead.c
++++ b/tests/readahead.c
+@@ -42,7 +42,7 @@ static const int fds[] = {
+ 0x7fffffff,
+ };
+
+-static const off64_t offsets[] = {
++static const off_t offsets[] = {
+ -0x8000000000000000LL,
+ -0x5060708090a0b0c0LL,
+ -1LL,
+--- a/tests/sync_file_range2.c
++++ b/tests/sync_file_range2.c
+@@ -20,8 +20,8 @@ int
+ main(void)
+ {
+ const int fd = -1;
+- const off64_t offset = 0xdeadbeefbadc0ded;
+- const off64_t nbytes = 0xfacefeedcafef00d;
++ const off_t offset = 0xdeadbeefbadc0ded;
++ const off_t nbytes = 0xfacefeedcafef00d;
+ const unsigned int flags = -1;
+
+ int rc = sync_file_range(fd, offset, nbytes, flags);
+--- a/tests/sync_file_range.c
++++ b/tests/sync_file_range.c
+@@ -20,8 +20,8 @@ int
+ main(void)
+ {
+ const int fd = -1;
+- const off64_t offset = 0xdeadbeefbadc0dedULL;
+- const off64_t nbytes = 0xfacefeedcafef00dULL;
++ const off_t offset = 0xdeadbeefbadc0dedULL;
++ const off_t nbytes = 0xfacefeedcafef00dULL;
+ const unsigned int flags = -1;
+
+ int rc = sync_file_range(fd, offset, nbytes, flags);
diff --git a/meta/recipes-devtools/strace/strace/ptest-spacesave.patch b/meta/recipes-devtools/strace/strace/ptest-spacesave.patch
index c5d8ff9207..4e86ccadc5 100644
--- a/meta/recipes-devtools/strace/strace/ptest-spacesave.patch
+++ b/meta/recipes-devtools/strace/strace/ptest-spacesave.patch
@@ -22,7 +22,7 @@ index 3540204..5e1e7c9 100755
$hdr
. "\${srcdir=.}/init.sh"
run_strace_match_diff $arg0 $args
-+ rm -rf log exp
++ rm -rf log exp out
EOF
;;
diff --git a/meta/recipes-devtools/strace/strace/run-ptest b/meta/recipes-devtools/strace/strace/run-ptest
index 86daed9220..1224229e8f 100755
--- a/meta/recipes-devtools/strace/strace/run-ptest
+++ b/meta/recipes-devtools/strace/strace/run-ptest
@@ -3,7 +3,7 @@
set -u
export TIMEOUT_DURATION=240
-make -B -C tests -k test-suite.log
+make -j4 -B -C tests -k test-suite.log
res=$?
if [ $res -ne 0 ]; then
cat tests/test-suite.log
diff --git a/meta/recipes-devtools/strace/strace/skip-load.patch b/meta/recipes-devtools/strace/strace/skip-load.patch
index fa3ddb8ddc..b1acfda5d8 100644
--- a/meta/recipes-devtools/strace/strace/skip-load.patch
+++ b/meta/recipes-devtools/strace/strace/skip-load.patch
@@ -4,6 +4,12 @@ care about timing.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ tests/clock_nanosleep.gen.test | 1 +
+ tests/delay.test | 1 +
+ tests/strace-r.test | 1 +
+ 3 files changed, 3 insertions(+)
+
diff --git a/tests/clock_nanosleep.gen.test b/tests/clock_nanosleep.gen.test
index 7a6025b..f0e6dbb 100755
--- a/tests/clock_nanosleep.gen.test
@@ -15,41 +21,29 @@ index 7a6025b..f0e6dbb 100755
+skip_ "Test not reliable under load"
run_strace_match_diff -e trace=clock_nanosleep,clock_gettime
diff --git a/tests/delay.test b/tests/delay.test
-index f74e27f..328087e 100755
+index f74e27f..6172c04 100755
--- a/tests/delay.test
+++ b/tests/delay.test
-@@ -9,6 +9,8 @@
+@@ -8,6 +8,7 @@
+ # SPDX-License-Identifier: GPL-2.0-or-later
. "${srcdir=.}/init.sh"
-
+skip_ "Test not reliable under load"
-+
+
while read -r denter dexit denter_us dexit_us; do
[ -n "$denter" ] || continue
-
-diff --git a/tests/strace-T.test b/tests/strace-T.test
-index adca3e8..d4acfe9 100755
---- a/tests/strace-T.test
-+++ b/tests/strace-T.test
-@@ -9,6 +9,8 @@
-
- . "${srcdir=.}/init.sh"
-
-+skip_ "Test not reliable under load"
-+
- T_opt="${1:--T}"
-
- run_prog ../sleep 0
diff --git a/tests/strace-r.test b/tests/strace-r.test
-index 8299737..c360344 100755
+index 8299737..d89c7df 100755
--- a/tests/strace-r.test
+++ b/tests/strace-r.test
-@@ -9,6 +9,8 @@
+@@ -8,6 +8,7 @@
+ # SPDX-License-Identifier: GPL-2.0-or-later
. "${srcdir=.}/init.sh"
-
+skip_ "Test not reliable under load"
-+
+
r_opt="${1:--r}"
- run_prog ../sleep 0
+--
+2.25.1
+
diff --git a/meta/recipes-devtools/strace/strace/update-gawk-paths.patch b/meta/recipes-devtools/strace/strace/update-gawk-paths.patch
index 0c683496ae..a16ede95c2 100644
--- a/meta/recipes-devtools/strace/strace/update-gawk-paths.patch
+++ b/meta/recipes-devtools/strace/strace/update-gawk-paths.patch
@@ -125,3 +125,33 @@ index dce78f5..573d9ea 100644
#
# Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io>
# Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com>
+diff --git a/tests-m32/caps-abbrev.awk b/tests-m32/caps-abbrev.awk
+index c00023b..a56cd56 100644
+--- a/tests-m32/caps-abbrev.awk
++++ b/tests-m32/caps-abbrev.awk
+@@ -1,4 +1,4 @@
+-#!/bin/gawk
++#!/usr/bin/gawk
+ #
+ # This file is part of caps strace test.
+ #
+diff --git a/tests-mx32/caps-abbrev.awk b/tests-mx32/caps-abbrev.awk
+index c00023b..a56cd56 100644
+--- a/tests-mx32/caps-abbrev.awk
++++ b/tests-mx32/caps-abbrev.awk
+@@ -1,4 +1,4 @@
+-#!/bin/gawk
++#!/usr/bin/gawk
+ #
+ # This file is part of caps strace test.
+ #
+diff --git a/tests/caps-abbrev.awk b/tests/caps-abbrev.awk
+index c00023b..a56cd56 100644
+--- a/tests/caps-abbrev.awk
++++ b/tests/caps-abbrev.awk
+@@ -1,4 +1,4 @@
+-#!/bin/gawk
++#!/usr/bin/gawk
+ #
+ # This file is part of caps strace test.
+ #
diff --git a/meta/recipes-devtools/strace/strace_5.17.bb b/meta/recipes-devtools/strace/strace_5.17.bb
deleted file mode 100644
index c7fd9edf40..0000000000
--- a/meta/recipes-devtools/strace/strace_5.17.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "System call tracing tool"
-HOMEPAGE = "http://strace.io"
-DESCRIPTION = "strace is a diagnostic, debugging and instructional userspace utility for Linux. It is used to monitor and tamper with interactions between processes and the Linux kernel, which include system calls, signal deliveries, and changes of process state."
-SECTION = "console/utils"
-LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=59a33f0a3e6122d67c0b3befccbdaa6b"
-
-SRC_URI = "https://strace.io/files/${PV}/strace-${PV}.tar.xz \
- file://update-gawk-paths.patch \
- file://Makefile-ptest.patch \
- file://run-ptest \
- file://0001-caps-abbrev.awk-fix-gawk-s-path.patch \
- file://ptest-spacesave.patch \
- file://0001-strace-fix-reproducibilty-issues.patch \
- file://skip-load.patch \
- file://0001-landlock-update-expected-string.patch \
- "
-SRC_URI[sha256sum] = "5fb298dbd1331fd1e1bc94c5c32395860d376101b87c6cd3d1ba9f9aa15c161f"
-
-inherit autotools ptest
-
-PACKAGECONFIG:class-target ??= "\
- ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
-"
-
-PACKAGECONFIG[bluez] = "ac_cv_header_bluetooth_bluetooth_h=yes,ac_cv_header_bluetooth_bluetooth_h=no,bluez5"
-PACKAGECONFIG[libunwind] = "--with-libunwind,--without-libunwind,libunwind"
-
-EXTRA_OECONF += "--enable-mpers=no --disable-gcc-Werror"
-
-CFLAGS:append:libc-musl = " -Dsigcontext_struct=sigcontext"
-
-TESTDIR = "tests"
-PTEST_BUILD_HOST_PATTERN = "^(DEB_CHANGELOGTIME|RPM_CHANGELOGTIME|WARN_CFLAGS_FOR_BUILD|LDFLAGS_FOR_BUILD)"
-
-do_compile_ptest() {
- oe_runmake ${PARALLEL_MAKE} -C ${TESTDIR} buildtest-TESTS
-}
-
-do_install_ptest() {
- oe_runmake -C ${TESTDIR} install-ptest BUILDDIR=${B} DESTDIR=${D}${PTEST_PATH} TESTDIR=${TESTDIR}
- mkdir -p ${D}${PTEST_PATH}/build-aux
- mkdir -p ${D}${PTEST_PATH}/src
- install -m 755 ${S}/build-aux/test-driver ${D}${PTEST_PATH}/build-aux/
- install -m 644 ${B}/src/config.h ${D}${PTEST_PATH}/src/
- sed -i -e '/^src/s/strace.*[0-9]/ptest/' ${D}/${PTEST_PATH}/${TESTDIR}/Makefile
-}
-
-RDEPENDS:${PN}-ptest += "make coreutils grep gawk sed"
-
-RDEPENDS:${PN}-ptest:append:libc-glibc = "\
- locale-base-en-us.iso-8859-1 \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/strace/strace_6.7.bb b/meta/recipes-devtools/strace/strace_6.7.bb
new file mode 100644
index 0000000000..f365477ccd
--- /dev/null
+++ b/meta/recipes-devtools/strace/strace_6.7.bb
@@ -0,0 +1,56 @@
+SUMMARY = "System call tracing tool"
+HOMEPAGE = "http://strace.io"
+DESCRIPTION = "strace is a diagnostic, debugging and instructional userspace utility for Linux. It is used to monitor and tamper with interactions between processes and the Linux kernel, which include system calls, signal deliveries, and changes of process state."
+SECTION = "console/utils"
+LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2433d82e1432a76dc3eadd9002bfe304"
+
+SRC_URI = "https://strace.io/files/${PV}/strace-${PV}.tar.xz \
+ file://update-gawk-paths.patch \
+ file://Makefile-ptest.patch \
+ file://run-ptest \
+ file://ptest-spacesave.patch \
+ file://0001-strace-fix-reproducibilty-issues.patch \
+ file://skip-load.patch \
+ file://0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch \
+ file://0002-tests-Replace-off64_t-with-off_t.patch \
+ "
+SRC_URI[sha256sum] = "2090201e1a3ff32846f4fe421c1163b15f440bb38e31355d09f82d3949922af7"
+
+inherit autotools ptest
+
+# Not yet ported to rv32
+COMPATIBLE_HOST:riscv32 = "null"
+
+# bluez is not enabled by default due to build dependency creep in smaller builds
+# like core-image-minimal leading to significantly more tasks being executed
+PACKAGECONFIG[bluez] = "ac_cv_header_bluetooth_bluetooth_h=yes,ac_cv_header_bluetooth_bluetooth_h=no,bluez5"
+PACKAGECONFIG[libunwind] = "--with-libunwind,--without-libunwind,libunwind"
+
+EXTRA_OECONF += "--enable-mpers=no --disable-gcc-Werror"
+
+CFLAGS:append:libc-musl = " -Dsigcontext_struct=sigcontext"
+
+TESTDIR = "tests"
+PTEST_BUILD_HOST_PATTERN = "^(DEB_CHANGELOGTIME|RPM_CHANGELOGTIME|WARN_CFLAGS_FOR_BUILD|LDFLAGS_FOR_BUILD)"
+
+do_compile_ptest() {
+ oe_runmake -C ${TESTDIR} buildtest-TESTS
+}
+
+do_install_ptest() {
+ oe_runmake -C ${TESTDIR} install-ptest BUILDDIR=${B} DESTDIR=${D}${PTEST_PATH} TESTDIR=${TESTDIR}
+ mkdir -p ${D}${PTEST_PATH}/build-aux
+ mkdir -p ${D}${PTEST_PATH}/src
+ install -m 755 ${S}/build-aux/test-driver ${D}${PTEST_PATH}/build-aux/
+ install -m 644 ${B}/src/config.h ${D}${PTEST_PATH}/src/
+ sed -i -e '/^src/s/strace.*[0-9]/ptest/' ${D}/${PTEST_PATH}/${TESTDIR}/Makefile
+}
+
+RDEPENDS:${PN}-ptest += "make coreutils grep gawk sed"
+
+RDEPENDS:${PN}-ptest:append:libc-glibc = "\
+ locale-base-en-us.iso-8859-1 \
+"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/subversion/subversion_1.14.2.bb b/meta/recipes-devtools/subversion/subversion_1.14.2.bb
deleted file mode 100644
index ba208d922f..0000000000
--- a/meta/recipes-devtools/subversion/subversion_1.14.2.bb
+++ /dev/null
@@ -1,61 +0,0 @@
-SUMMARY = "Subversion (svn) version control system client"
-HOMEPAGE = "http://subversion.apache.org"
-DESCRIPTION = "Subversion is an open source version control system."
-SECTION = "console/network"
-LICENSE = "Apache-2.0 & MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=6487ae7094d359fa90fb9c4096e52e2b"
-
-DEPENDS = "apr-util serf sqlite3 file lz4"
-DEPENDS:append:class-native = " file-replacement-native"
-
-SRC_URI = "${APACHE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://serfmacro.patch \
- "
-
-SRC_URI[sha256sum] = "c9130e8d0b75728a66f0e7038fc77052e671830d785b5616aad53b4810d3cc28"
-
-inherit autotools pkgconfig gettext python3native
-
-CVE_PRODUCT = "apache:subversion"
-
-PACKAGECONFIG ?= ""
-
-PACKAGECONFIG[boost] = "--with-boost=${RECIPE_SYSROOT}${exec_prefix},--without-boost,boost"
-PACKAGECONFIG[sasl] = "--with-sasl,--without-sasl,cyrus-sasl"
-PACKAGECONFIG[gnome-keyring] = "--with-gnome-keyring,--without-gnome-keyring,glib-2.0 gnome-keyring"
-
-EXTRA_OECONF = " \
- --with-apr=${STAGING_BINDIR_CROSS} \
- --with-apr-util=${STAGING_BINDIR_CROSS} \
- --without-apxs \
- --without-berkeley-db \
- --without-swig \
- --disable-keychain \
- --with-utf8proc=internal \
- ac_cv_path_RUBY=none \
-"
-
-EXTRA_OEMAKE += "pkgconfig_dir=${libdir}/pkgconfig"
-
-acpaths = "-I build/ -I build/ac-macros/"
-
-CPPFLAGS += "-P"
-BUILD_CPPFLAGS += "-P"
-
-do_configure:prepend () {
- rm -f ${S}/libtool
- rm -f ${S}/build/libtool.m4 ${S}/build/ltmain.sh ${S}/build/ltoptions.m4 ${S}/build/ltsugar.m4 ${S}/build/ltversion.m4 ${S}/build/lt~obsolete.m4
- rm -f ${S}/aclocal.m4
- sed -i -e 's:with_sasl="/usr/local":with_sasl="${STAGING_DIR}":' ${S}/build/ac-macros/sasl.m4
-}
-
-#| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_local/libsvn_ra_local-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_repos/libsvn_repos-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| /usr/bin/ld: cannot find -lsvn_delta-1| collect2: ld returned 1 exit status| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_svn/libsvn_ra_svn-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_serf/libsvn_ra_serf-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'
-#| x86_64-linux-libtool: install: error: relink `libsvn_ra_serf-1.la' with the above command before installing it
-#| x86_64-linux-libtool: install: warning: `../../subversion/libsvn_repos/libsvn_repos-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'
-#| /home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/subversion-1.8.9/build-outputs.mk:1090: recipe for target 'install-serf-lib' failed
-#| make: *** [install-serf-lib] Error 1
-PARALLEL_MAKEINST = ""
-
-RDEPENDS:${PN} = "serf"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/subversion/subversion_1.14.3.bb b/meta/recipes-devtools/subversion/subversion_1.14.3.bb
new file mode 100644
index 0000000000..1cf4e1734b
--- /dev/null
+++ b/meta/recipes-devtools/subversion/subversion_1.14.3.bb
@@ -0,0 +1,61 @@
+SUMMARY = "Subversion (svn) version control system client"
+HOMEPAGE = "http://subversion.apache.org"
+DESCRIPTION = "Subversion is an open source version control system."
+SECTION = "console/network"
+LICENSE = "Apache-2.0 & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=6487ae7094d359fa90fb9c4096e52e2b"
+
+DEPENDS = "apr-util serf sqlite3 file lz4"
+DEPENDS:append:class-native = " file-replacement-native"
+
+SRC_URI = "${APACHE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://serfmacro.patch \
+ "
+
+SRC_URI[sha256sum] = "949efd451a09435f7e8573574c71c7b71b194d844890fa49cd61d2262ea1a440"
+
+inherit autotools pkgconfig gettext python3native
+
+CVE_PRODUCT = "apache:subversion"
+
+PACKAGECONFIG ?= ""
+
+PACKAGECONFIG[boost] = "--with-boost=${RECIPE_SYSROOT}${exec_prefix},--without-boost,boost"
+PACKAGECONFIG[sasl] = "--with-sasl,--without-sasl,cyrus-sasl"
+PACKAGECONFIG[gnome-keyring] = "--with-gnome-keyring,--without-gnome-keyring,glib-2.0 gnome-keyring"
+
+EXTRA_OECONF = " \
+ --with-apr=${STAGING_BINDIR_CROSS} \
+ --with-apr-util=${STAGING_BINDIR_CROSS} \
+ --without-apxs \
+ --without-berkeley-db \
+ --without-swig \
+ --disable-keychain \
+ --with-utf8proc=internal \
+ ac_cv_path_RUBY=none \
+"
+
+EXTRA_OEMAKE += "pkgconfig_dir=${libdir}/pkgconfig"
+
+acpaths = "-I build/ -I build/ac-macros/"
+
+CPPFLAGS += "-P"
+BUILD_CPPFLAGS += "-P"
+
+do_configure:prepend () {
+ rm -f ${S}/libtool
+ rm -f ${S}/build/libtool.m4 ${S}/build/ltmain.sh ${S}/build/ltoptions.m4 ${S}/build/ltsugar.m4 ${S}/build/ltversion.m4 ${S}/build/lt~obsolete.m4
+ rm -f ${S}/aclocal.m4
+ sed -i -e 's:with_sasl="/usr/local":with_sasl="${STAGING_DIR}":' ${S}/build/ac-macros/sasl.m4
+}
+
+#| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_local/libsvn_ra_local-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_repos/libsvn_repos-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| /usr/bin/ld: cannot find -lsvn_delta-1| collect2: ld returned 1 exit status| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_svn/libsvn_ra_svn-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'| x86_64-linux-libtool: install: warning: `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/build/subversion/libsvn_ra_serf/libsvn_ra_serf-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'
+#| x86_64-linux-libtool: install: error: relink `libsvn_ra_serf-1.la' with the above command before installing it
+#| x86_64-linux-libtool: install: warning: `../../subversion/libsvn_repos/libsvn_repos-1.la' has not been installed in `/home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/sysroots/x86_64-linux/usr/lib'
+#| /home/pokybuild/yocto-autobuilder/yocto-worker/nightly-qa-logrotate/build/build/tmp/work/x86_64-linux/subversion-native/1.8.9-r0/subversion-1.8.9/build-outputs.mk:1090: recipe for target 'install-serf-lib' failed
+#| make: *** [install-serf-lib] Error 1
+PARALLEL_MAKEINST = ""
+
+RDEPENDS:${PN} = "serf"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/swig/swig.inc b/meta/recipes-devtools/swig/swig.inc
index 13470c1094..7a6c4d7359 100644
--- a/meta/recipes-devtools/swig/swig.inc
+++ b/meta/recipes-devtools/swig/swig.inc
@@ -10,7 +10,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=e7807a6282784a7dde4c846626b08fc6 \
SECTION = "devel"
-DEPENDS = "libpcre bison-native"
+DEPENDS = "libpcre2 bison-native"
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz"
UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/swig/files/swig/"
diff --git a/meta/recipes-devtools/swig/swig/0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch b/meta/recipes-devtools/swig/swig/0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch
index f27f80ea18..5e83e92725 100644
--- a/meta/recipes-devtools/swig/swig/0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch
+++ b/meta/recipes-devtools/swig/swig/0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch
@@ -1,4 +1,4 @@
-From a4a0440a644c6c5e5da096efe3cf05ba309a284f Mon Sep 17 00:00:00 2001
+From b88a98348b3841f0b702e314631883d46f9f362d Mon Sep 17 00:00:00 2001
From: "NODA, Kai" <nodakai@gmail.com>
Date: Sun, 22 Apr 2012 17:01:02 +0900
Subject: [PATCH] Use /proc/self/exe for "swig -swiglib" on non-Win32
@@ -8,11 +8,12 @@ If it wasn't found, then fall back to a fixed string just as before.
Upstream-Status: Submitted
http://sourceforge.net/mailarchive/message.php?msg_id=29179733
-
---
- Source/Modules/main.cxx | 24 ++++++++++++++++++++++--
+ Source/Modules/main.cxx | 24 ++++++++++++++++++++++--
1 file changed, 22 insertions(+), 2 deletions(-)
+diff --git a/Source/Modules/main.cxx b/Source/Modules/main.cxx
+index 76b4f9d..de0a512 100644
--- a/Source/Modules/main.cxx
+++ b/Source/Modules/main.cxx
@@ -25,6 +25,11 @@
@@ -27,7 +28,7 @@ http://sourceforge.net/mailarchive/message.php?msg_id=29179733
// Global variables
-@@ -934,9 +939,9 @@ int SWIG_main(int argc, char *argv[], co
+@@ -895,9 +900,9 @@ int SWIG_main(int argc, char *argv[], const TargetLanguageModule *tlm) {
// Check for SWIG_LIB environment variable
if ((c = getenv("SWIG_LIB")) == (char *) 0) {
@@ -38,7 +39,7 @@ http://sourceforge.net/mailarchive/message.php?msg_id=29179733
if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) {
*(p + 1) = '\0';
SwigLib = NewStringf("%sLib", buf); // Native windows installation path
-@@ -946,7 +951,22 @@ int SWIG_main(int argc, char *argv[], co
+@@ -907,7 +912,22 @@ int SWIG_main(int argc, char *argv[], const TargetLanguageModule *tlm) {
if (Len(SWIG_LIB_WIN_UNIX) > 0)
SwigLibWinUnix = NewString(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw)
#else
diff --git a/meta/recipes-devtools/swig/swig/0001-configure-use-pkg-config-for-pcre-detection.patch b/meta/recipes-devtools/swig/swig/0001-configure-use-pkg-config-for-pcre-detection.patch
index fdb9760e89..9e16caecca 100644
--- a/meta/recipes-devtools/swig/swig/0001-configure-use-pkg-config-for-pcre-detection.patch
+++ b/meta/recipes-devtools/swig/swig/0001-configure-use-pkg-config-for-pcre-detection.patch
@@ -1,52 +1,56 @@
-From 5c4d6d8538994d5fe9b3b46bfafaf0a605e3bda6 Mon Sep 17 00:00:00 2001
+From 57a15651b46a0f1f84a4dd15d67d104fbfbe3f6e Mon Sep 17 00:00:00 2001
From: Koen Kooi <koen.kooi@linaro.org>
Date: Tue, 17 Jun 2014 08:18:17 +0200
Subject: [PATCH] configure: use pkg-config for pcre detection
Signed-off-by: Koen Kooi <koen.kooi@linaro.org>
Upstream-Status: Pending
+
---
- configure.ac | 38 +++++++-------------------------------
- 1 file changed, 7 insertions(+), 31 deletions(-)
+ configure.ac | 39 +++++++--------------------------------
+ 1 file changed, 7 insertions(+), 32 deletions(-)
+diff --git a/configure.ac b/configure.ac
+index c060028..a330266 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -52,38 +52,14 @@ AC_MSG_RESULT([$with_pcre])
+@@ -49,39 +49,14 @@ AC_MSG_RESULT([$with_pcre])
dnl To make configuring easier, check for a locally built PCRE using the Tools/pcre-build.sh script
if test x"${with_pcre}" = xyes ; then
-- AC_MSG_CHECKING([whether to use local PCRE])
+- AC_MSG_CHECKING([whether to use local PCRE2])
- local_pcre_config=no
-- if test -z $PCRE_CONFIG; then
-- if test -f `pwd`/pcre/pcre-swig-install/bin/pcre-config; then
-- PCRE_CONFIG=`pwd`/pcre/pcre-swig-install/bin/pcre-config
-- local_pcre_config=$PCRE_CONFIG
+- if test -z "$PCRE2_CONFIG"; then
+- if test -f `pwd`/pcre/pcre-swig-install/bin/pcre2-config; then
+- PCRE2_CONFIG=`pwd`/pcre/pcre-swig-install/bin/pcre2-config
+- local_pcre_config=$PCRE2_CONFIG
- fi
- fi
- AC_MSG_RESULT([$local_pcre_config])
-fi
-AS_IF([test "x$with_pcre" != xno],
-- [AX_PATH_GENERIC([pcre],
+- [AX_PATH_GENERIC([pcre2],
- [], dnl Minimal version of PCRE we need -- accept any
- [], dnl custom sed script for version parsing is not needed
-- [AC_DEFINE([HAVE_PCRE], [1], [Define if you have PCRE library])
-- LIBS="$LIBS $PCRE_LIBS"
-- CPPFLAGS="$CPPFLAGS $PCRE_CFLAGS"
+- [AC_DEFINE([HAVE_PCRE], [1], [Define if you have PCRE2 library])
+- LIBS="$LIBS $PCRE2_LIBS"
+- CPPFLAGS="$CPPFLAGS $PCRE2_CFLAGS"
- ],
- [AC_MSG_FAILURE([
-- Cannot find pcre-config script from PCRE (Perl Compatible Regular Expressions)
+- Cannot find pcre2-config script from PCRE2 (Perl Compatible Regular Expressions)
- library package. This dependency is needed for configure to complete,
- Either:
-- - Install the PCRE developer package on your system (preferred approach).
-- - Download the PCRE source tarball, build and install on your system
+- - Install the PCRE2 developer package on your system (preferred approach).
+- - Download the PCRE2 source tarball, build and install on your system
- as you would for any package built from source distribution.
-- - Use the Tools/pcre-build.sh script to build PCRE just for SWIG to statically
+- - Use the Tools/pcre-build.sh script to build PCRE2 just for SWIG to statically
- link against. Run 'Tools/pcre-build.sh --help' for instructions.
-- (quite easy and does not require privileges to install PCRE on your system)
+- (quite easy and does not require privileges to install PCRE2 on your system)
- - Use configure --without-pcre to disable regular expressions support in SWIG
- (not recommended).])
-- ])
-+ PKG_CHECK_MODULES([PCRE], [libpcre], [
+- ],
+- [],[],[--libs8])
++ PKG_CHECK_MODULES([PCRE], [libpcre2], [
+ AC_DEFINE([HAVE_PCRE], [1], [Define if you have PCRE library])
+ LIBS="$LIBS $PCRE_LIBS"
+ CPPFLAGS="$CPPFLAGS $PCRE_CFLAGS"
diff --git a/meta/recipes-devtools/swig/swig/determinism.patch b/meta/recipes-devtools/swig/swig/determinism.patch
index 84c399182a..9c49414ad7 100644
--- a/meta/recipes-devtools/swig/swig/determinism.patch
+++ b/meta/recipes-devtools/swig/swig/determinism.patch
@@ -1,13 +1,22 @@
+From 28648b3873d83e26bd19b64ce2c0a41ced9292d3 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Mon, 1 Mar 2021 00:11:10 +0000
+Subject: [PATCH] swig: Fix reproducibility issue
+
Remove the compiler commandline/platform from the compiled binary as this
breaks reproducibilty.
Upstream-Status: Inappropriate [OE reproducibiity fix upstream unlikely to take]
RP 2021/3/1
+---
+ Source/Modules/main.cxx | 1 -
+ 1 file changed, 1 deletion(-)
-
+diff --git a/Source/Modules/main.cxx b/Source/Modules/main.cxx
+index de0a512..ac9e825 100644
--- a/Source/Modules/main.cxx
+++ b/Source/Modules/main.cxx
-@@ -642,7 +642,6 @@ static void getoptions(int argc, char *a
+@@ -638,7 +638,6 @@ static void getoptions(int argc, char *argv[]) {
}
} else if (strcmp(argv[i], "-version") == 0) {
fprintf(stdout, "\nSWIG Version %s\n", Swig_package_version());
diff --git a/meta/recipes-devtools/swig/swig_4.0.2.bb b/meta/recipes-devtools/swig/swig_4.0.2.bb
deleted file mode 100644
index 718ad89a5d..0000000000
--- a/meta/recipes-devtools/swig/swig_4.0.2.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-require ${BPN}.inc
-
-SRC_URI += "file://0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch \
- file://0001-configure-use-pkg-config-for-pcre-detection.patch \
- file://determinism.patch \
- "
-SRC_URI[sha256sum] = "d53be9730d8d58a16bf0cbd1f8ac0c0c3e1090573168bfa151b01eb47fa906fc"
diff --git a/meta/recipes-devtools/swig/swig_4.2.1.bb b/meta/recipes-devtools/swig/swig_4.2.1.bb
new file mode 100644
index 0000000000..4b438a7298
--- /dev/null
+++ b/meta/recipes-devtools/swig/swig_4.2.1.bb
@@ -0,0 +1,7 @@
+require ${BPN}.inc
+
+SRC_URI += "file://0001-Use-proc-self-exe-for-swig-swiglib-on-non-Win32-plat.patch \
+ file://0001-configure-use-pkg-config-for-pcre-detection.patch \
+ file://determinism.patch \
+ "
+SRC_URI[sha256sum] = "fa045354e2d048b2cddc69579e4256245d4676894858fcf0bab2290ecf59b7d8"
diff --git a/meta/recipes-devtools/syslinux/syslinux/0001-install-don-t-install-obsolete-file-com32.ld.patch b/meta/recipes-devtools/syslinux/syslinux/0001-install-don-t-install-obsolete-file-com32.ld.patch
deleted file mode 100644
index bfd7f41b13..0000000000
--- a/meta/recipes-devtools/syslinux/syslinux/0001-install-don-t-install-obsolete-file-com32.ld.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From bf6db5b48ec25f83939f1fdebb59028bc3c40b00 Mon Sep 17 00:00:00 2001
-From: "H. Peter Anvin (Intel)" <hpa@zytor.com>
-Date: Wed, 6 Feb 2019 11:30:51 -0800
-Subject: [PATCH] install: don't install obsolete file com32.ld
-
-com32.ld has been obsolete for a long time, and has been removed now;
-don't install it either.
-
-Reported-by: Joakim Tjernlund <Joakim.Tjernlund@infinera.com>
-Signed-off-by: H. Peter Anvin (Intel) <hpa@zytor.com>
-
-Upstream-Status: Backport
-Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
----
- com32/lib/Makefile | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/com32/lib/Makefile b/com32/lib/Makefile
-index 74fff149..6a931492 100644
---- a/com32/lib/Makefile
-+++ b/com32/lib/Makefile
-@@ -113,7 +113,6 @@ spotless: clean
-
- install: all
- mkdir -m 755 -p $(INSTALLROOT)$(COM32DIR)
-- install -m 644 $(SRC)/com32.ld $(INSTALLROOT)$(COM32DIR)
- -rm -rf $(INSTALLROOT)$(COM32DIR)/include
- cp -r $(SRC)/../include $(INSTALLROOT)$(COM32DIR)
-
---
-2.17.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch b/meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch
index 47a8dac10e..1a4a4e3755 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0001-linux-syslinux-support-ext2-3-4-device.patch
@@ -1,7 +1,7 @@
-From 60f3833ab2b5899771b4eab654e88f9888b99501 Mon Sep 17 00:00:00 2001
+From a469ce05055c44fdca1ca094ff3a735cc059480d Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 16:01:55 +0800
-Subject: [PATCH 1/9] linux/syslinux: support ext2/3/4 device
+Subject: [PATCH] linux/syslinux: support ext2/3/4 device
* Support ext2/3/4 deivce.
* The open_ext2_fs() checks whether it is an ext2/3/4 device,
@@ -19,10 +19,10 @@ Tested-by: Du Dolpher <dolpher.du@intel.com>
1 file changed, 36 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index 912de71..36fc202 100755
+index 46d5624..1cc276b 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -256,6 +256,23 @@ int do_open_file(char *name)
+@@ -257,6 +257,23 @@ int do_open_file(char *name)
return fd;
}
@@ -46,7 +46,7 @@ index 912de71..36fc202 100755
int main(int argc, char *argv[])
{
static unsigned char sectbuf[SECTOR_SIZE];
-@@ -313,6 +330,24 @@ int main(int argc, char *argv[])
+@@ -314,6 +331,24 @@ int main(int argc, char *argv[])
die("can't combine an offset with a block device");
}
@@ -71,7 +71,7 @@ index 912de71..36fc202 100755
xpread(dev_fd, sectbuf, SECTOR_SIZE, opt.offset);
fsync(dev_fd);
-@@ -322,6 +357,7 @@ int main(int argc, char *argv[])
+@@ -323,6 +358,7 @@ int main(int argc, char *argv[])
*/
if ((errmsg = syslinux_check_bootsect(sectbuf, &fs_type))) {
fprintf(stderr, "%s: %s\n", opt.device, errmsg);
@@ -79,6 +79,3 @@ index 912de71..36fc202 100755
exit(1);
}
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch b/meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch
index 77cf060451..1acd9b0b69 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0002-linux-syslinux-implement-open_ext2_fs.patch
@@ -1,7 +1,7 @@
-From 07fb737fb60c08eaaa41989d531fc23009523546 Mon Sep 17 00:00:00 2001
+From c6ddb179577dd4c4ea4d1d154f979e90e53d6bf1 Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 16:09:18 +0800
-Subject: [PATCH 2/9] linux/syslinux: implement open_ext2_fs()
+Subject: [PATCH] linux/syslinux: implement open_ext2_fs()
The open_ext2_fs() checks whether it is an ext2/ext3/ext4 device, and
return:
@@ -15,14 +15,14 @@ Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Tested-by: Du Dolpher <dolpher.du@intel.com>
---
linux/Makefile | 2 +-
- linux/syslinux.c | 80 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 80 ++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 81 insertions(+), 1 deletion(-)
diff --git a/linux/Makefile b/linux/Makefile
-index 11667e1..ac1ac58 100644
+index 5a49d81..67cbbb4 100644
--- a/linux/Makefile
+++ b/linux/Makefile
-@@ -51,7 +51,7 @@ spotless: clean
+@@ -52,7 +52,7 @@ spotless: clean
installer: syslinux syslinux-nomtools
syslinux: $(OBJS)
@@ -32,10 +32,10 @@ index 11667e1..ac1ac58 100644
syslinux-nomtools: syslinux
ln -f $< $@
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index 36fc202..cc4e7da 100755
+index 1cc276b..f3727ea 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -72,6 +72,7 @@
+@@ -73,6 +73,7 @@
#include "syslxfs.h"
#include "setadv.h"
#include "syslxopt.h" /* unified options */
@@ -43,7 +43,7 @@ index 36fc202..cc4e7da 100755
extern const char *program; /* Name of program */
-@@ -82,6 +83,9 @@ char *mntpath = NULL; /* Path on which to mount */
+@@ -83,6 +84,9 @@ char *mntpath = NULL; /* Path on which to mount */
int loop_fd = -1; /* Loop device */
#endif
@@ -53,7 +53,7 @@ index 36fc202..cc4e7da 100755
void __attribute__ ((noreturn)) die(const char *msg)
{
fprintf(stderr, "%s: %s\n", program, msg);
-@@ -266,6 +270,82 @@ int do_open_file(char *name)
+@@ -267,6 +271,82 @@ int do_open_file(char *name)
*/
static int open_ext2_fs(const char *device, const char *subdir)
{
@@ -136,6 +136,3 @@ index 36fc202..cc4e7da 100755
}
/* The install func for ext2, ext3 and ext4 */
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch b/meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch
index 84ba10526a..8d2fef2d49 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0003-linux-syslinux-implement-install_to_ext2.patch
@@ -1,7 +1,7 @@
-From 64d856b243812907068776b204a003a3a8fa122a Mon Sep 17 00:00:00 2001
+From 9110cf47d04ca1958d14228908a5c57a23769e7d Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 16:17:42 +0800
-Subject: [PATCH 3/9] linux/syslinux: implement install_to_ext2()
+Subject: [PATCH] linux/syslinux: implement install_to_ext2()
* The handle_adv_on_ext() checks whether we only need update adv.
* The write_to_ext() installs files (ldlinux.sys or ldlinux.c32) to the
@@ -13,14 +13,14 @@ Upstream-Status: Submitted
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Tested-by: Du Dolpher <dolpher.du@intel.com>
---
- linux/syslinux.c | 79 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 79 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 79 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index cc4e7da..45f080d 100755
+index f3727ea..fc5edb1 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -346,11 +346,90 @@ static int open_ext2_fs(const char *device, const char *subdir)
+@@ -347,11 +347,90 @@ static int open_ext2_fs(const char *device, const char *subdir)
fail:
(void) ext2fs_close(e2fs);
return -1;
@@ -111,6 +111,3 @@ index cc4e7da..45f080d 100755
}
int main(int argc, char *argv[])
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch b/meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch
index 64b56d92e0..0a32969154 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0004-linux-syslinux-add-ext_file_read-and-ext_file_write.patch
@@ -1,7 +1,7 @@
-From 35d3842cc4b930c5102eed2921e0189b7f4fd069 Mon Sep 17 00:00:00 2001
+From 1957fc6c069493c6789557936adb675f5e7e51ba Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 16:43:37 +0800
-Subject: [PATCH 4/9] linux/syslinux: add ext_file_read() and ext_file_write()
+Subject: [PATCH] linux/syslinux: add ext_file_read() and ext_file_write()
Will use them to read and write on the extX device.
@@ -10,14 +10,14 @@ Upstream-Status: Submitted
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Tested-by: Du Dolpher <dolpher.du@intel.com>
---
- linux/syslinux.c | 62 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 62 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 62 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index 45f080d..247c86a 100755
+index fc5edb1..c7c1994 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -349,6 +349,68 @@ fail:
+@@ -350,6 +350,68 @@ fail:
}
@@ -86,6 +86,3 @@ index 45f080d..247c86a 100755
/*
* Install the boot block on the specified device.
* Must be run AFTER file installed.
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch b/meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch
index 829e7c4ca1..76885f762b 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0005-linux-syslinux-implement-handle_adv_on_ext.patch
@@ -1,7 +1,7 @@
-From cdb980b37f40dc2c41891434c7736e49da53756e Mon Sep 17 00:00:00 2001
+From ee3a60829edc9d3344dc872fb0158e7b006f02be Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 16:47:52 +0800
-Subject: [PATCH 5/9] linux/syslinux: implement handle_adv_on_ext()
+Subject: [PATCH] linux/syslinux: implement handle_adv_on_ext()
It reads adv if found on the device, or resets syslinux_adv, or update
the adv if update adv only.
@@ -11,14 +11,14 @@ Upstream-Status: Submitted
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Tested-by: Du Dolpher <dolpher.du@intel.com>
---
- linux/syslinux.c | 97 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 97 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 97 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index 247c86a..de5d272 100755
+index c7c1994..90b8edd 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -421,6 +421,103 @@ int install_bootblock(int fd, const char *device)
+@@ -422,6 +422,103 @@ int install_bootblock(int fd, const char *device)
static int handle_adv_on_ext(void)
{
@@ -122,6 +122,3 @@ index 247c86a..de5d272 100755
}
/* Write files, adv, boot sector */
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch b/meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch
index cba87252a5..ba6d29d3bb 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0006-linux-syslinux-implement-write_to_ext-and-add-syslin.patch
@@ -1,7 +1,7 @@
-From 922e56c10e36d876777580c84daef9a66bea6525 Mon Sep 17 00:00:00 2001
+From 758731ce2432ab29a73505bbeb99a960996ab686 Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Wed, 31 Dec 2014 17:20:43 +0800
-Subject: [PATCH 6/9] linux/syslinux: implement write_to_ext() and add
+Subject: [PATCH] linux/syslinux: implement write_to_ext() and add
syslinuxext.c
* The write_to_ext() write file to the extX device, and handle the boot
@@ -17,7 +17,7 @@ Tested-by: Du Dolpher <dolpher.du@intel.com>
libinstaller/syslinuxext.c | 7 +++
libinstaller/syslinuxext.h | 5 ++
linux/Makefile | 3 +-
- linux/syslinux.c | 118 +++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 118 +++++++++++++++++++++++++++++++++++++
4 files changed, 132 insertions(+), 1 deletion(-)
create mode 100644 libinstaller/syslinuxext.c
create mode 100644 libinstaller/syslinuxext.h
@@ -47,10 +47,10 @@ index 0000000..8abd8b9
+
+void syslinux_patch_bootsect(int dev_fd);
diff --git a/linux/Makefile b/linux/Makefile
-index ac1ac58..3b23867 100644
+index 67cbbb4..567134c 100644
--- a/linux/Makefile
+++ b/linux/Makefile
-@@ -30,7 +30,8 @@ SRCS = syslinux.c \
+@@ -31,7 +31,8 @@ SRCS = syslinux.c \
../libinstaller/syslxmod.c \
../libinstaller/bootsect_bin.c \
../libinstaller/ldlinuxc32_bin.c \
@@ -61,7 +61,7 @@ index ac1ac58..3b23867 100644
.SUFFIXES: .c .o .i .s .S
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index de5d272..f0c97a8 100755
+index 90b8edd..7a20fe6 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
@@ -46,6 +46,7 @@
@@ -72,7 +72,7 @@ index de5d272..f0c97a8 100755
#include "linuxioctl.h"
-@@ -72,6 +73,7 @@
+@@ -73,6 +74,7 @@
#include "syslxfs.h"
#include "setadv.h"
#include "syslxopt.h" /* unified options */
@@ -80,7 +80,7 @@ index de5d272..f0c97a8 100755
#include <ext2fs/ext2fs.h>
extern const char *program; /* Name of program */
-@@ -419,6 +421,12 @@ int install_bootblock(int fd, const char *device)
+@@ -420,6 +422,12 @@ int install_bootblock(int fd, const char *device)
{
}
@@ -93,7 +93,7 @@ index de5d272..f0c97a8 100755
static int handle_adv_on_ext(void)
{
int i, retval, found_file;
-@@ -524,6 +532,116 @@ fail:
+@@ -525,6 +533,116 @@ fail:
static int write_to_ext(const char *filename, const char *str, int length,
int i_flags, int dev_fd, const char *subdir)
{
@@ -210,6 +210,3 @@ index de5d272..f0c97a8 100755
}
/* The install func for ext2, ext3 and ext4 */
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch b/meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch
index 3913811917..57cdaf437b 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch
@@ -1,7 +1,7 @@
-From a95b831e18dd123f859bc5e6c4cecdcc0184ee37 Mon Sep 17 00:00:00 2001
+From 906205015601d5d1190e7326f51ea4316a74a479 Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Fri, 2 Jan 2015 12:18:02 +0800
-Subject: [PATCH 7/9] linux/syslinux: implement ext_construct_sectmap_fs()
+Subject: [PATCH] linux/syslinux: implement ext_construct_sectmap_fs()
The ext_construct_sectmap_fs() constucts the sector according to the
bmap.
@@ -11,14 +11,14 @@ Upstream-Status: Submitted
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Tested-by: Du Dolpher <dolpher.du@intel.com>
---
- linux/syslinux.c | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++
+ linux/syslinux.c | 50 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 50 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index f0c97a8..c741750 100755
+index 7a20fe6..4e43921 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -421,10 +421,60 @@ int install_bootblock(int fd, const char *device)
+@@ -422,10 +422,60 @@ int install_bootblock(int fd, const char *device)
{
}
@@ -79,6 +79,3 @@ index f0c97a8..c741750 100755
}
static int handle_adv_on_ext(void)
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch b/meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch
index f1d01fa43c..b026eba5ad 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch
@@ -1,4 +1,4 @@
-From efce87e5ab98664c57e5f4e3955a2f3747df5737 Mon Sep 17 00:00:00 2001
+From acfc8214d3d60b7e251ae66a59b81cdd1ff7a6dc Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Fri, 2 Jan 2015 12:26:46 +0800
Subject: [PATCH] libinstaller/syslinuxext: implement syslinux_patch_bootsect()
@@ -22,7 +22,7 @@ Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
3 files changed, 176 insertions(+), 165 deletions(-)
diff --git a/extlinux/Makefile b/extlinux/Makefile
-index 1721ee54..62a49728 100644
+index 1721ee5..62a4972 100644
--- a/extlinux/Makefile
+++ b/extlinux/Makefile
@@ -32,7 +32,8 @@ SRCS = main.c \
@@ -36,7 +36,7 @@ index 1721ee54..62a49728 100644
.SUFFIXES: .c .o .i .s .S
diff --git a/extlinux/main.c b/extlinux/main.c
-index ebff7eae..9add50fb 100644
+index ebff7ea..9add50f 100644
--- a/extlinux/main.c
+++ b/extlinux/main.c
@@ -62,6 +62,7 @@
@@ -244,7 +244,7 @@ index ebff7eae..9add50fb 100644
/* Construct the boot file map */
diff --git a/libinstaller/syslinuxext.c b/libinstaller/syslinuxext.c
-index bb54cefc..9ae82884 100644
+index bb54cef..9ae8288 100644
--- a/libinstaller/syslinuxext.c
+++ b/libinstaller/syslinuxext.c
@@ -1,7 +1,178 @@
@@ -426,6 +426,3 @@ index bb54cefc..9ae82884 100644
+ set_32(&sbs->bsHiddenSecs, geo.start);
}
---
-2.17.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch b/meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch
index cd89d92485..1c875e81f6 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0009-linux-syslinux-implement-install_bootblock.patch
@@ -1,7 +1,7 @@
-From 76c465e87312dbc6cffd05427f1f4d2ebdee4f13 Mon Sep 17 00:00:00 2001
+From c28aae8bd381f77e66e6bac79761df7a484b054c Mon Sep 17 00:00:00 2001
From: Robert Yang <liezhi.yang@windriver.com>
Date: Fri, 2 Jan 2015 12:28:35 +0800
-Subject: [PATCH 9/9] linux/syslinux: implement install_bootblock()
+Subject: [PATCH] linux/syslinux: implement install_bootblock()
Refer to the install_bootblock() in extlinux/main.c to make
linux/syslinux.c's install_bootblock() which only supports ext2/3/4.
@@ -15,10 +15,10 @@ Tested-by: Du Dolpher <dolpher.du@intel.com>
1 file changed, 20 insertions(+)
diff --git a/linux/syslinux.c b/linux/syslinux.c
-index c741750..917f83a 100755
+index 4e43921..93ed880 100755
--- a/linux/syslinux.c
+++ b/linux/syslinux.c
-@@ -419,6 +419,26 @@ static int ext_file_write(ext2_file_t e2_file, const void *buf, size_t count,
+@@ -420,6 +420,26 @@ static int ext_file_write(ext2_file_t e2_file, const void *buf, size_t count,
*/
int install_bootblock(int fd, const char *device)
{
@@ -45,6 +45,3 @@ index c741750..917f83a 100755
}
/* The file's block count */
---
-1.9.1
-
diff --git a/meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch b/meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch
index 44cb153276..813d10ba5c 100644
--- a/meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch
+++ b/meta/recipes-devtools/syslinux/syslinux/0010-Workaround-multiple-definition-of-symbol-errors.patch
@@ -1,13 +1,12 @@
-From 951928f2cad5682c2844e6bd0f7201236c5d9b66 Mon Sep 17 00:00:00 2001
+From f2a5b64785958226c022cac9931b059b98f4e896 Mon Sep 17 00:00:00 2001
From: Merlin Mathesius <mmathesi@redhat.com>
Date: Wed, 13 May 2020 08:02:27 -0500
Subject: [PATCH] Workaround multiple definition of symbol errors
Lifted from Fedora https://src.fedoraproject.org/rpms/syslinux/blob/master/f/0005-Workaround-multiple-definition-of-symbol-errors.patch
-Upstream-Status: Pending
+Upstream-Status: Inactive-Upstream
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---
com32/cmenu/Makefile | 2 +-
com32/elflink/ldlinux/Makefile | 2 +-
@@ -18,6 +17,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
efi/Makefile | 2 +-
7 files changed, 7 insertions(+), 7 deletions(-)
+diff --git a/com32/cmenu/Makefile b/com32/cmenu/Makefile
+index b81b68e..2ae989c 100644
--- a/com32/cmenu/Makefile
+++ b/com32/cmenu/Makefile
@@ -49,7 +49,7 @@ makeoutputdirs:
@@ -29,6 +30,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
-o $@ $^
tidy dist:
+diff --git a/com32/elflink/ldlinux/Makefile b/com32/elflink/ldlinux/Makefile
+index 87c0d36..2be2a01 100644
--- a/com32/elflink/ldlinux/Makefile
+++ b/com32/elflink/ldlinux/Makefile
@@ -33,7 +33,7 @@ endif
@@ -40,6 +43,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
LNXCFLAGS += -D__export='__attribute__((visibility("default")))'
LNXLIBOBJS = get_key.lo
+diff --git a/com32/gpllib/Makefile b/com32/gpllib/Makefile
+index 1fec914..2d764d0 100644
--- a/com32/gpllib/Makefile
+++ b/com32/gpllib/Makefile
@@ -24,7 +24,7 @@ makeoutputdirs:
@@ -51,6 +56,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
tidy dist clean:
find . \( -name \*.o -o -name .\*.d -o -name \*.tmp \) -print0 | \
+diff --git a/com32/hdt/Makefile b/com32/hdt/Makefile
+index 61736d0..1d94785 100644
--- a/com32/hdt/Makefile
+++ b/com32/hdt/Makefile
@@ -52,7 +52,7 @@ QEMU ?= qemu-kvm
@@ -62,6 +69,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
memtest:
-[ ! -f $(FLOPPY_DIR)/$(MEMTEST) ] && $(WGET) $(MEMTEST_URL) -O $(FLOPPY_DIR)/$(MEMTEST)
+diff --git a/core/Makefile b/core/Makefile
+index 50ff35a..f0a5562 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -156,7 +156,7 @@ LDSCRIPT = $(SRC)/$(ARCH)/syslinux.ld
@@ -73,6 +82,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
-T $(LDSCRIPT) \
--unresolved-symbols=report-all \
-E --hash-style=gnu -M -o $@ $< \
+diff --git a/dos/Makefile b/dos/Makefile
+index 4c930d1..5d1c72c 100644
--- a/dos/Makefile
+++ b/dos/Makefile
@@ -19,7 +19,7 @@ include $(MAKEDIR)/embedded.mk
@@ -84,6 +95,8 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
OPTFLAGS = -g
INCLUDES = -include code16.h -nostdinc -iwithprefix include \
-I$(SRC) -I$(SRC)/.. -I$(SRC)/../libfat \
+diff --git a/efi/Makefile b/efi/Makefile
+index f4501e7..72e081e 100644
--- a/efi/Makefile
+++ b/efi/Makefile
@@ -71,7 +71,7 @@ $(OBJS): | $(OBJ)/$(ARCH)
diff --git a/meta/recipes-devtools/syslinux/syslinux/0011-install-don-t-install-obsolete-file-com32.ld.patch b/meta/recipes-devtools/syslinux/syslinux/0011-install-don-t-install-obsolete-file-com32.ld.patch
new file mode 100644
index 0000000000..4bc423a1de
--- /dev/null
+++ b/meta/recipes-devtools/syslinux/syslinux/0011-install-don-t-install-obsolete-file-com32.ld.patch
@@ -0,0 +1,29 @@
+From 66447f7c5c6996481ebd68ce8224d3de7525aad8 Mon Sep 17 00:00:00 2001
+From: "H. Peter Anvin (Intel)" <hpa@zytor.com>
+Date: Wed, 6 Feb 2019 11:30:51 -0800
+Subject: [PATCH] install: don't install obsolete file com32.ld
+
+com32.ld has been obsolete for a long time, and has been removed now;
+don't install it either.
+
+Reported-by: Joakim Tjernlund <Joakim.Tjernlund@infinera.com>
+Signed-off-by: H. Peter Anvin (Intel) <hpa@zytor.com>
+
+Upstream-Status: Backport
+Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
+---
+ com32/lib/Makefile | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/com32/lib/Makefile b/com32/lib/Makefile
+index 74fff14..6a93149 100644
+--- a/com32/lib/Makefile
++++ b/com32/lib/Makefile
+@@ -113,7 +113,6 @@ spotless: clean
+
+ install: all
+ mkdir -m 755 -p $(INSTALLROOT)$(COM32DIR)
+- install -m 644 $(SRC)/com32.ld $(INSTALLROOT)$(COM32DIR)
+ -rm -rf $(INSTALLROOT)$(COM32DIR)/include
+ cp -r $(SRC)/../include $(INSTALLROOT)$(COM32DIR)
+
diff --git a/meta/recipes-devtools/syslinux/syslinux/0012-libinstaller-Fix-build-with-glibc-2.36.patch b/meta/recipes-devtools/syslinux/syslinux/0012-libinstaller-Fix-build-with-glibc-2.36.patch
new file mode 100644
index 0000000000..21b83e49dc
--- /dev/null
+++ b/meta/recipes-devtools/syslinux/syslinux/0012-libinstaller-Fix-build-with-glibc-2.36.patch
@@ -0,0 +1,56 @@
+From 821d31148c07a8318277be32bc6a943c7fd2ba3f Mon Sep 17 00:00:00 2001
+From: Martin Jansa <Martin.Jansa@gmail.com>
+Date: Sat, 6 Aug 2022 11:53:55 +0000
+Subject: [PATCH] libinstaller: Fix build with glibc-2.36
+
+* add only necessary definitions from linux/fs.h, because including whole
+ causes conflicts with sys/mount.h:
+ http://errors.yoctoproject.org/Errors/Details/664535/
+
+In file included from TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/recipe-sysroot/usr/include/linux/fs.h:19,
+ from TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/syslinux-6.04-pre2/linux/../libinstaller/linuxioctl.h:19,
+ from TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/syslinux-6.04-pre2/linux/../libinstaller/syslxcom.c:34:
+TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/recipe-sysroot/usr/include/linux/mount.h:95:6: error: redeclaration of 'enum fsconfig_command'
+ 95 | enum fsconfig_command {
+ | ^~~~~~~~~~~~~~~~
+In file included from TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/syslinux-6.04-pre2/linux/../libinstaller/syslxcom.c:31:
+TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/recipe-sysroot/usr/include/sys/mount.h:189:6: note: originally defined here
+ 189 | enum fsconfig_command
+ | ^~~~~~~~~~~~~~~~
+TOPDIR/tmp-glibc/work/core2-64-oe-linux/syslinux/6.04-pre2-r1/recipe-sysroot/usr/include/linux/mount.h:96:9: error: redeclaration of enumerator 'FSCONFIG_SET_FLAG'
+ 96 | FSCONFIG_SET_FLAG = 0, /* Set parameter, supplying no value */
+ | ^~~~~~~~~~~~~~~~~
+...
+
+Upstream-Status: Inactive-Upstream
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+ libinstaller/linuxioctl.h | 15 ++++++++++++++-
+ 1 file changed, 14 insertions(+), 1 deletion(-)
+
+diff --git a/libinstaller/linuxioctl.h b/libinstaller/linuxioctl.h
+index e2731c7..f4a6703 100644
+--- a/libinstaller/linuxioctl.h
++++ b/libinstaller/linuxioctl.h
+@@ -16,7 +16,20 @@
+ #include <linux/fd.h> /* Floppy geometry */
+ #include <linux/hdreg.h> /* Hard disk geometry */
+
+-#include <linux/fs.h> /* FIGETBSZ, FIBMAP, FS_IOC_* */
++// #include <linux/fs.h> /* FIGETBSZ, FIBMAP, FS_IOC_* */
++// linux/fs.h unfortunately causes conflict with sys/mount.h since glibc-2.36
++// https://sourceware.org/glibc/wiki/Release/2.36#Usage_of_.3Clinux.2Fmount.h.3E_and_.3Csys.2Fmount.h.3E
++// add the necessary definitions
++
++#define FS_IOC_GETFLAGS _IOR('f', 1, long)
++#define FS_IOC_SETFLAGS _IOW('f', 2, long)
++#define FIBMAP _IO(0x00,1) /* bmap access */
++#define FIGETBSZ _IO(0x00,2) /* get the block size used for bmap */
++#define FS_IMMUTABLE_FL 0x00000010 /* Immutable file */
++#define BLKGETSIZE _IO(0x12,96) /* return device size /512 (long *arg) */
++
++// for musl we also need limits.h for PATH_MAX
++#include <linux/limits.h>
+
+ #undef SECTOR_SIZE /* Defined in msdos_fs.h for no good reason */
+ #undef SECTOR_BITS
diff --git a/meta/recipes-devtools/syslinux/syslinux/0013-remove-clean-script.patch b/meta/recipes-devtools/syslinux/syslinux/0013-remove-clean-script.patch
new file mode 100644
index 0000000000..c0af7eff86
--- /dev/null
+++ b/meta/recipes-devtools/syslinux/syslinux/0013-remove-clean-script.patch
@@ -0,0 +1,27 @@
+From a11c8f88de6b6c42c805ba76e70532977bfd24bf Mon Sep 17 00:00:00 2001
+From: Saul Wold <sgw@linux.intel.com>
+Date: Wed, 10 Dec 2014 10:26:33 -0800
+Subject: [PATCH] remove clean script
+
+This script try to call git submodule, since we are downloading
+the tarball it seems in-correct to do this.
+
+Upstream-Status: Inappropriate [OE-Specific]
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+ efi/Makefile | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/efi/Makefile b/efi/Makefile
+index 72e081e..3cfb3f6 100644
+--- a/efi/Makefile
++++ b/efi/Makefile
+@@ -102,7 +102,6 @@ tidy dist:
+ rm -f *.so *.o wrapper
+ find . \( -name \*.o -o -name \*.a -o -name .\*.d -o -name \*.tmp \) -print0 | \
+ xargs -0r rm -f
+- $(topdir)/efi/clean-gnu-efi.sh $(EFI_SUBARCH) $(objdir)
+
+ clean: tidy
+
diff --git a/meta/recipes-devtools/syslinux/syslinux/0014-Fix-reproducibility-issues.patch b/meta/recipes-devtools/syslinux/syslinux/0014-Fix-reproducibility-issues.patch
new file mode 100644
index 0000000000..bc48160cba
--- /dev/null
+++ b/meta/recipes-devtools/syslinux/syslinux/0014-Fix-reproducibility-issues.patch
@@ -0,0 +1,32 @@
+From e49e86bd3199f51ada8a4a1d51aa8d627645279e Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Sat, 27 Feb 2021 23:42:03 +0000
+Subject: [PATCH] Fix reproducibility issues
+
+In order to build deterministic binaries, we need to sort the wildcard expansion
+so the libraries are linked in the same order each time. This fixes reproducibility
+issues within syslinux builds.
+
+Upstream-Status: Inactive-Upstream
+RP 2021/3/1
+
+Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+---
+ mk/lib.mk | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/mk/lib.mk b/mk/lib.mk
+index f3fb07c..815698c 100644
+--- a/mk/lib.mk
++++ b/mk/lib.mk
+@@ -130,8 +130,8 @@ LIBENTRY_OBJS = \
+ exit.o
+
+ LIBGCC_OBJS = \
+- $(patsubst $(com32)/lib/%.c,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.c)) \
+- $(patsubst $(com32)/lib/%.S,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.S))
++ $(sort $(patsubst $(com32)/lib/%.c,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.c))) \
++ $(sort $(patsubst $(com32)/lib/%.S,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.S)))
+
+ LIBCONSOLE_OBJS = \
+ \
diff --git a/meta/recipes-devtools/syslinux/syslinux/determinism.patch b/meta/recipes-devtools/syslinux/syslinux/determinism.patch
deleted file mode 100644
index 2fb8c64df3..0000000000
--- a/meta/recipes-devtools/syslinux/syslinux/determinism.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-In order to build deterministic binaries, we need to sort the wildcard expansion
-so the libraries are linked in the same order each time. This fixes reproducibility
-issues within syslinux builds.
-
-Upstream-Status: Pending
-RP 2021/3/1
-
-Index: syslinux-6.04-pre2/mk/lib.mk
-===================================================================
---- syslinux-6.04-pre2.orig/mk/lib.mk
-+++ syslinux-6.04-pre2/mk/lib.mk
-@@ -130,8 +130,8 @@ LIBENTRY_OBJS = \
- exit.o
-
- LIBGCC_OBJS = \
-- $(patsubst $(com32)/lib/%.c,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.c)) \
-- $(patsubst $(com32)/lib/%.S,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.S))
-+ $(sort $(patsubst $(com32)/lib/%.c,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.c))) \
-+ $(sort $(patsubst $(com32)/lib/%.S,%.o,$(wildcard $(com32)/lib/$(ARCH)/libgcc/*.S)))
-
- LIBCONSOLE_OBJS = \
- \
diff --git a/meta/recipes-devtools/syslinux/syslinux/syslinux-remove-clean-script.patch b/meta/recipes-devtools/syslinux/syslinux/syslinux-remove-clean-script.patch
deleted file mode 100644
index 7c003e165b..0000000000
--- a/meta/recipes-devtools/syslinux/syslinux/syslinux-remove-clean-script.patch
+++ /dev/null
@@ -1,17 +0,0 @@
-This script try to call git submodule, since we are downloading
-the tarball it seems in-correct to do this.
-
-Upstream-Status: Inappropriate [OE-Specific]
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
-Index: syslinux-6.03/efi/Makefile
-===================================================================
---- syslinux-6.03.orig/efi/Makefile
-+++ syslinux-6.03/efi/Makefile
-@@ -101,7 +101,6 @@ tidy dist:
- rm -f *.so *.o wrapper
- find . \( -name \*.o -o -name \*.a -o -name .\*.d -o -name \*.tmp \) -print0 | \
- xargs -0r rm -f
-- $(topdir)/efi/clean-gnu-efi.sh $(EFI_SUBARCH) $(objdir)
-
- clean: tidy
-
diff --git a/meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb b/meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb
index 0e4a23c1b4..c8e7f25d2b 100644
--- a/meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb
+++ b/meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb
@@ -8,7 +8,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
DEPENDS = "nasm-native util-linux e2fsprogs"
SRC_URI = "https://www.zytor.com/pub/syslinux/Testing/6.04/syslinux-${PV}.tar.xz \
- file://syslinux-remove-clean-script.patch \
file://0001-linux-syslinux-support-ext2-3-4-device.patch \
file://0002-linux-syslinux-implement-open_ext2_fs.patch \
file://0003-linux-syslinux-implement-install_to_ext2.patch \
@@ -19,15 +18,16 @@ SRC_URI = "https://www.zytor.com/pub/syslinux/Testing/6.04/syslinux-${PV}.tar.xz
file://0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch \
file://0009-linux-syslinux-implement-install_bootblock.patch \
file://0010-Workaround-multiple-definition-of-symbol-errors.patch \
- file://0001-install-don-t-install-obsolete-file-com32.ld.patch \
- file://determinism.patch \
- "
+ file://0011-install-don-t-install-obsolete-file-com32.ld.patch \
+ file://0012-libinstaller-Fix-build-with-glibc-2.36.patch \
+ file://0013-remove-clean-script.patch \
+ file://0014-Fix-reproducibility-issues.patch \
+"
SRC_URI[md5sum] = "2b31c78f087f99179feb357da312d7ec"
SRC_URI[sha256sum] = "4441a5d593f85bb6e8d578cf6653fb4ec30f9e8f4a2315a3d8f2d0a8b3fadf94"
# remove at next version upgrade or when output changes
-PR = "r1"
RECIPE_NO_UPDATE_REASON = "6.04-pre3 is broken"
UPSTREAM_CHECK_URI = "https://www.zytor.com/pub/syslinux/"
@@ -48,7 +48,7 @@ TARGET_LDFLAGS = ""
SECURITY_LDFLAGS = ""
LDFLAGS_SECTION_REMOVAL = ""
-CFLAGS:append = " -DNO_INLINE_FUNCS"
+CFLAGS:append = " -DNO_INLINE_FUNCS -Wno-error=implicit-function-declaration"
EXTRA_OEMAKE = " \
BINDIR=${bindir} SBINDIR=${sbindir} LIBDIR=${libdir} \
@@ -63,6 +63,10 @@ EXTRA_OEMAKE = " \
RANLIB="${RANLIB}" \
"
+# mtools allows non-root users to install syslinux
+PACKAGECONFIG ??= "mtools"
+PACKAGECONFIG[mtools] = ",,,"
+
#
# Tasks for native/nativesdk which just build the installer.
#
@@ -77,10 +81,15 @@ do_compile() {
do_install() {
install -d ${D}${bindir}
install \
- ${B}/bios/mtools/syslinux \
${B}/bios/extlinux/extlinux \
${B}/bios/utils/isohybrid \
${D}${bindir}
+
+ if ${@bb.utils.contains("PACKAGECONFIG", "mtools", "true", "false", d)}; then
+ install ${B}/bios/mtools/syslinux ${D}${bindir}
+ else
+ install ${B}/bios/linux/syslinux ${D}${bindir}
+ fi
}
#
@@ -104,14 +113,12 @@ do_install:class-target() {
install -m 644 ${S}/bios/core/ldlinux.bss ${D}${datadir}/syslinux/
}
-PACKAGES += "${PN}-nomtools ${PN}-extlinux ${PN}-mbr ${PN}-chain ${PN}-pxelinux ${PN}-isolinux ${PN}-misc"
+PACKAGES += "${PN}-extlinux ${PN}-mbr ${PN}-chain ${PN}-pxelinux ${PN}-isolinux ${PN}-misc"
-RDEPENDS:${PN} += "mtools"
-RDEPENDS:${PN}-nomtools += "libext2fs"
+RDEPENDS:${PN} += "${@bb.utils.contains("PACKAGECONFIG", "mtools", "mtools", "", d)}"
RDEPENDS:${PN}-misc += "perl"
FILES:${PN} = "${bindir}/syslinux"
-FILES:${PN}-nomtools = "${bindir}/syslinux-nomtools"
FILES:${PN}-extlinux = "${sbindir}/extlinux"
FILES:${PN}-mbr = "${datadir}/${BPN}/mbr.bin"
FILES:${PN}-chain = "${datadir}/${BPN}/chain.c32"
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-architecture-Recognise-RISCV-32-RISCV-64.patch b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-architecture-Recognise-RISCV-32-RISCV-64.patch
deleted file mode 100644
index fc03812bb9..0000000000
--- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-architecture-Recognise-RISCV-32-RISCV-64.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 4a6ace0a965965ea15e88c3418c7158ca5cc9f8f Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 21 Nov 2019 10:12:05 -0800
-Subject: [PATCH] architecture: Recognise RISCV-32/RISCV-64
-
-Upstream-Status: Backport [https://github.com/systemd/systemd/commit/171b53380085b1288b03b19a2b978f36a5c003d0]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/architecture.h | 13 +++++++++++++
- 1 file changed, 13 insertions(+)
-
-diff --git a/src/architecture.h b/src/architecture.h
-index 26679e2..89c7d32 100644
---- a/src/architecture.h
-+++ b/src/architecture.h
-@@ -57,6 +57,8 @@ enum {
- ARCHITECTURE_M68K,
- ARCHITECTURE_TILEGX,
- ARCHITECTURE_CRIS,
-+ ARCHITECTURE_RISCV32,
-+ ARCHITECTURE_RISCV64,
- _ARCHITECTURE_MAX,
- _ARCHITECTURE_INVALID = -1
- };
-@@ -194,6 +196,17 @@ int uname_architecture(void);
- #elif defined(__cris__)
- # define native_architecture() ARCHITECTURE_CRIS
- # error "Missing LIB_ARCH_TUPLE for CRIS"
-+#elif defined(__riscv)
-+# if __SIZEOF_POINTER__ == 4
-+# define native_architecture() ARCHITECTURE_RISCV32
-+# define LIB_ARCH_TUPLE "riscv32-linux-gnu"
-+# elif __SIZEOF_POINTER__ == 8
-+# define native_architecture() ARCHITECTURE_RISCV64
-+# define LIB_ARCH_TUPLE "riscv64-linux-gnu"
-+# else
-+# error "Unrecognized riscv architecture variant"
-+# endif
-+# define PROC_CPUINFO_MODEL "cpu model"
- #else
- # error "Please register your architecture here!"
- #endif
---
-2.24.0
-
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch
index 12eecc989b..812900051a 100644
--- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch
+++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch
@@ -12,7 +12,7 @@ systemd/0013-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch
Based on work by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Tim Orling <timothy.t.orling@linux.intel.com>
-Upstream-Status: Submitted [https://github.com/systemd/systemd-bootchart/pull/47]
+Upstream-Status: Denied [https://github.com/systemd/systemd-bootchart/pull/47]
---
src/util.h | 2 +-
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0002-musl-does-not-provide-printf-h.patch b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0002-musl-does-not-provide-printf-h.patch
index 8be3bed395..2fac76a549 100644
--- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0002-musl-does-not-provide-printf-h.patch
+++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0002-musl-does-not-provide-printf-h.patch
@@ -10,7 +10,7 @@ Original patch author: Emil Renner Berthing <systemd@esmil.dk>
Includes work by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Tim Orling <timothy.t.orling@linux.intel.com>
-Upstream-Status: Submitted [https://github.com/systemd/systemd-bootchart/pull/47]
+Upstream-Status: Denied [https://github.com/systemd/systemd-bootchart/pull/47]
---
Makefile.am | 4 +
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0003-musl-does-not-provide-canonicalize_file_name.patch b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0003-musl-does-not-provide-canonicalize_file_name.patch
index c2c276e458..fbe9c93d2d 100644
--- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0003-musl-does-not-provide-canonicalize_file_name.patch
+++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0003-musl-does-not-provide-canonicalize_file_name.patch
@@ -9,7 +9,7 @@ systemd/0007-check-for-missing-canonicalize_file_name.patch
Based on work by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Tim Orling <timothy.t.orling@linux.intel.com>
-Upstream-Status: Submitted [https://github.com/systemd/systemd-bootchart/pull/47]
+Upstream-Status: Denied [https://github.com/systemd/systemd-bootchart/pull/47]
---
src/path-util.c | 2 +-
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_234.bb b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_234.bb
deleted file mode 100644
index bc3eee2093..0000000000
--- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_234.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "Boot performance graphing tool"
-DESCRIPTION = "For systemd-bootchart, several proc debug interfaces are required in the kernel config: \
- CONFIG_SCHEDSTATS \
-below is optional, for additional info: \
- CONFIG_SCHED_DEBUG"
-HOMEPAGE = "https://github.com/systemd/systemd-bootchart"
-LICENSE = "LGPL-2.1-only & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE.LGPL2.1;md5=4fbd65380cdd255951079008b364516c \
- file://LICENSE.GPL2;md5=751419260aa954499f7abaabaa882bbe"
-
-SRC_URI = "git://github.com/systemd/systemd-bootchart.git;protocol=https;branch=master \
- file://0001-architecture-Recognise-RISCV-32-RISCV-64.patch \
- file://mips64.patch \
- file://no_lto.patch \
-"
-
-SRC_URI:append:libc-musl = " \
- file://0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch \
- file://0002-musl-does-not-provide-printf-h.patch \
- file://0003-musl-does-not-provide-canonicalize_file_name.patch \
- "
-
-
-SRCREV = "8183cfd9dad8beca5434d625cf6b2df87775e956"
-
-S = "${WORKDIR}/git"
-
-DEPENDS = "systemd libxslt-native xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native intltool"
-
-inherit pkgconfig autotools systemd features_check
-
-REQUIRED_DISTRO_FEATURES = "systemd"
-
-SYSTEMD_SERVICE:${PN} = "systemd-bootchart.service"
-
-do_configure:prepend() {
- # intltool.m4 is a soft link to /usr/share/aclocal/m4, delete it and use the one in our sysroot
- rm -f ${S}/m4/intltool.m4
-}
-
-FILES:${PN} += "${systemd_unitdir}/systemd-bootchart"
-
-EXTRA_OECONF = " --with-rootprefix=${root_prefix} \
- --with-rootlibdir=${base_libdir}"
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb
new file mode 100644
index 0000000000..25544029d5
--- /dev/null
+++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Boot performance graphing tool"
+DESCRIPTION = "For systemd-bootchart, several proc debug interfaces are required in the kernel config: \
+ CONFIG_SCHEDSTATS \
+below is optional, for additional info: \
+ CONFIG_SCHED_DEBUG"
+HOMEPAGE = "https://github.com/systemd/systemd-bootchart"
+LICENSE = "LGPL-2.1-only & GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE.LGPL2.1;md5=4fbd65380cdd255951079008b364516c \
+ file://LICENSE.GPL2;md5=751419260aa954499f7abaabaa882bbe"
+
+SRC_URI = "git://github.com/systemd/systemd-bootchart.git;protocol=https;branch=main \
+ file://mips64.patch \
+ file://no_lto.patch \
+"
+
+SRC_URI:append:libc-musl = " \
+ file://0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch \
+ file://0002-musl-does-not-provide-printf-h.patch \
+ file://0003-musl-does-not-provide-canonicalize_file_name.patch \
+ "
+
+
+SRCREV = "8ab9680a1bd5eb8fe7a7dcc44897af7ee41e56e7"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "systemd libxslt-native xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native intltool"
+
+inherit pkgconfig autotools systemd features_check
+
+REQUIRED_DISTRO_FEATURES = "systemd"
+
+SYSTEMD_SERVICE:${PN} = "systemd-bootchart.service"
+
+do_configure:prepend() {
+ # intltool.m4 is a soft link to /usr/share/aclocal/m4, delete it and use the one in our sysroot
+ rm -f ${S}/m4/intltool.m4
+}
+
+FILES:${PN} += "${systemd_unitdir}/systemd-bootchart"
+
+EXTRA_OECONF = " --with-rootprefix=${root_prefix} \
+ --with-rootlibdir=${base_libdir}"
diff --git a/meta/recipes-devtools/tcf-agent/tcf-agent/ldflags.patch b/meta/recipes-devtools/tcf-agent/tcf-agent/ldflags.patch
index dfe492091c..d701e8dec0 100644
--- a/meta/recipes-devtools/tcf-agent/tcf-agent/ldflags.patch
+++ b/meta/recipes-devtools/tcf-agent/tcf-agent/ldflags.patch
@@ -1,7 +1,7 @@
We need LDFLAGS to be respected otherwise there are QA warnings as the right
flags don't get used.
-Upstream-Status: Inappropriate
+Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Ross Burton <ross.burton@arm.com>
From d92af0483c20365fd0af740d0baef8870b4aa374 Mon Sep 17 00:00:00 2001
@@ -10,23 +10,26 @@ Date: Wed, 26 Aug 2015 19:18:11 +0500
Subject: [PATCH] tcf-agent: obey LDFLAGS
Signed-off-by: Abdur Rehman <abdur_rehman@mentor.com>
+
+Rebased to 4a2c4baaccbc8c29ce0297705de9a4e096d57ce5 version
+
+Signed-off-by: Mark hatle <mark.hatle@amd.com>
+
+
---
Makefile.inc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/Makefile.inc b/Makefile.inc
-index 959028f..3148942 100644
---- a/Makefile.inc
-+++ b/Makefile.inc
-@@ -96,7 +96,7 @@ NO_LINK_F ?= -c
+Index: agent/Makefile.inc
+===================================================================
+--- agent.orig/Makefile.inc
++++ agent/Makefile.inc
+@@ -111,7 +111,7 @@ NO_LINK_F ?= -c
# Linker definition and flags
- LINK ?= $(CC)
--LINK_FLAGS ?= $(CFLAGS)
-+LINK_FLAGS ?= $(LDFLAGS) $(CFLAGS)
- LINK_OUT_F ?= $(OUT_OBJ_F)
+ LINK ?= $(CC)
+-LINK_FLAGS ?= $(CFLAGS)
++LINK_FLAGS ?= $(LDFLAGS) $(CFLAGS)
+ LINK_OUT_F ?= $(OUT_OBJ_F)
# Archiver definition and flags
---
-1.7.9.5
-
diff --git a/meta/recipes-devtools/tcf-agent/tcf-agent_git.bb b/meta/recipes-devtools/tcf-agent/tcf-agent_git.bb
index 0d627f4e23..59b9d7de78 100644
--- a/meta/recipes-devtools/tcf-agent/tcf-agent_git.bb
+++ b/meta/recipes-devtools/tcf-agent/tcf-agent_git.bb
@@ -6,8 +6,8 @@ BUGTRACKER = "https://bugs.eclipse.org/bugs/"
LICENSE = "EPL-1.0 | EDL-1.0"
LIC_FILES_CHKSUM = "file://edl-v10.html;md5=522a390a83dc186513f0500543ad3679"
-SRCREV = "2735e3d6b7eccb05ab232825c618c837d27a5010"
-PV = "1.7.0+git${SRCPV}"
+SRCREV = "1f11747e83ebf4f53e8d17f430136f92ec378709"
+PV = "1.8.0+git"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
SRC_URI = "git://git.eclipse.org/r/tcf/org.eclipse.tcf.agent.git;protocol=https;branch=master \
@@ -49,6 +49,7 @@ CFLAGS:append:powerpc64 = " ${LCL_STOP_SERVICES}"
CFLAGS:append:powerpc64le = " ${LCL_STOP_SERVICES}"
CFLAGS:append:riscv64 = " ${LCL_STOP_SERVICES}"
CFLAGS:append:riscv32 = " ${LCL_STOP_SERVICES}"
+CFLAGS:append:loongarch64 = " ${LCL_STOP_SERVICES}"
do_install() {
oe_runmake install INSTALLROOT=${D}
diff --git a/meta/recipes-devtools/tcltk/tcl/alter-includedir.patch b/meta/recipes-devtools/tcltk/tcl/alter-includedir.patch
index 5b25af2c0c..bfc718cfd3 100644
--- a/meta/recipes-devtools/tcltk/tcl/alter-includedir.patch
+++ b/meta/recipes-devtools/tcltk/tcl/alter-includedir.patch
@@ -1,3 +1,8 @@
+From 3130dca60636dc12d0d12df75b002fd123349e21 Mon Sep 17 00:00:00 2001
+From: Mingli Yu <mingli.yu@windriver.com>
+Date: Tue, 22 Nov 2022 18:48:27 +0800
+Subject: [PATCH] tcl: update the header location
+
Lets install the include header and private header files into
usr/include/tcl8.6 when version of tcl is 8.6.x
@@ -7,17 +12,21 @@ Signed-off-by: Khem Raj <raj.khem@gmai.com>
Fixed the TCL_INCLUDE_SPEC
+Also update the header location in tcl.pc to correct the header
+location in case some package such python3 which use pkg-config
+to detect tcl doesn't find the header.
+
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
-
+Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
---
- Makefile.in | 2 +-
- configure | 4 ++--
- configure.in | 4 ++--
- 3 files changed, 5 insertions(+), 5 deletions(-)
+ unix/Makefile.in | 2 +-
+ unix/configure.in | 4 ++--
+ unix/tcl.pc.in | 2 +-
+ 3 files changed, 4 insertions(+), 4 deletions(-)
-diff --git a/Makefile.in b/Makefile.in
-index 0370491..daa569a 100644
+diff --git a/unix/Makefile.in b/unix/Makefile.in
+index a3b7d69..969ddb8 100644
--- a/unix/Makefile.in
+++ b/unix/Makefile.in
@@ -57,7 +57,7 @@ SCRIPT_INSTALL_DIR = $(INSTALL_ROOT)$(TCL_LIBRARY)
@@ -29,11 +38,11 @@ index 0370491..daa569a 100644
# Path to the private tcl header dir:
PRIVATE_INCLUDE_DIR = @PRIVATE_INCLUDE_DIR@
-diff --git a/configure.in b/configure.in
-index 9f96e2b..e323e02 100644
+diff --git a/unix/configure.in b/unix/configure.in
+index 4974fb6..a72934f 100644
--- a/unix/configure.in
+++ b/unix/configure.in
-@@ -773,7 +773,7 @@ eval "TCL_LIB_FILE=libtcl${LIB_SUFFIX}"
+@@ -776,7 +776,7 @@ eval "TCL_LIB_FILE=libtcl${LIB_SUFFIX}"
eval "TCL_LIB_FILE=${TCL_LIB_FILE}"
test -z "$TCL_LIBRARY" && TCL_LIBRARY='$(libdir)/tcl$(VERSION)'
@@ -42,7 +51,7 @@ index 9f96e2b..e323e02 100644
HTML_DIR='$(DISTDIR)/html'
# Note: in the following variable, it's important to use the absolute
-@@ -894,7 +894,7 @@ TCL_BUILD_STUB_LIB_PATH="`pwd`/${TCL_STUB_LIB_FILE}"
+@@ -897,7 +897,7 @@ TCL_BUILD_STUB_LIB_PATH="`pwd`/${TCL_STUB_LIB_FILE}"
TCL_STUB_LIB_PATH="${TCL_STUB_LIB_DIR}/${TCL_STUB_LIB_FILE}"
# Install time header dir can be set via --includedir
@@ -51,6 +60,16 @@ index 9f96e2b..e323e02 100644
#------------------------------------------------------------------------
# tclConfig.sh refers to this by a different name
---
-2.25.1
-
+diff --git a/unix/tcl.pc.in b/unix/tcl.pc.in
+index 93b5e69..dcd51d7 100644
+--- a/unix/tcl.pc.in
++++ b/unix/tcl.pc.in
+@@ -3,7 +3,7 @@
+ prefix=@prefix@
+ exec_prefix=@exec_prefix@
+ libdir=@libdir@
+-includedir=@includedir@
++includedir=@includedir@/tcl@PACKAGE_VERSION@
+ libfile=@TCL_LIB_FILE@
+
+ Name: Tool Command Language
diff --git a/meta/recipes-devtools/tcltk/tcl/fix_issue_with_old_distro_glibc.patch b/meta/recipes-devtools/tcltk/tcl/fix_issue_with_old_distro_glibc.patch
deleted file mode 100644
index 2c31cec8e3..0000000000
--- a/meta/recipes-devtools/tcltk/tcl/fix_issue_with_old_distro_glibc.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-Upstream-Status: Inappropriate [embedded specific]
-
-Fixes tcl target recipe build on old distros which have glibc older than 2.14
-
-| + echo 'NOTE: make DESTDIR=/srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image install'
-| NOTE: make DESTDIR=/srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image install
-| + make DESTDIR=/srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image install
-| Making directory /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image/usr/lib
-| Installing message catalogs
-| Making directory /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image/usr/share/man
-| tclsh: /lib64/libc.so.6: version `GLIBC_2.14' not found (required by /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/tcl8.5.11/unix/libtcl8.5.so)
-| Making directory /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-poky-linux/tcl-8.5.11-r5/image/usr/bin
-| make: *** [install-msgs] Error 1
-
-Signed-off-by: Nitin A Kamble <nitin.a.kamble@intel.com>
-2012/04/26
-
-Index: unix/Makefile.in
-===================================================================
---- a/unix.orig/Makefile.in 2013-11-10 23:38:01.787425628 -0800
-+++ b/unix/Makefile.in 2013-11-10 23:37:59.807425578 -0800
-@@ -686,7 +686,7 @@
- # tcltest executable gets the build directory burned into its ld search path.
- # This keeps tcltest from picking up an already installed version of the Tcl
- # library.
--SHELL_ENV = @LD_LIBRARY_PATH_VAR@=`pwd`:${@LD_LIBRARY_PATH_VAR@} \
-+SHELL_ENV = @LD_LIBRARY_PATH_VAR@=${@LD_LIBRARY_PATH_VAR@} \
- TCLLIBPATH="@abs_builddir@/pkgs" \
- TCL_LIBRARY="${TCL_BUILDTIME_LIBRARY}"
-
-@@ -712,7 +712,7 @@
- $(SHELL_ENV) ${TCLTEST_EXE} $(TOP_DIR)/tests/all.tcl $(TESTFLAGS)
-
- gdb-test: ${TCLTEST_EXE}
-- @echo "set env @LD_LIBRARY_PATH_VAR@=`pwd`:$${@LD_LIBRARY_PATH_VAR@}" > gdb.run
-+ @echo "set env @LD_LIBRARY_PATH_VAR@=$${@LD_LIBRARY_PATH_VAR@}" > gdb.run
- @echo "set env TCL_LIBRARY=${TCL_BUILDTIME_LIBRARY}" >> gdb.run
- @echo "set args $(TOP_DIR)/tests/all.tcl $(TESTFLAGS) -singleproc 1" >> gdb.run
- $(GDB) ${TCLTEST_EXE} --command=gdb.run
diff --git a/meta/recipes-devtools/tcltk/tcl/fix_non_native_build_issue.patch b/meta/recipes-devtools/tcltk/tcl/fix_non_native_build_issue.patch
index 44b2ce0a30..09c49daa2c 100644
--- a/meta/recipes-devtools/tcltk/tcl/fix_non_native_build_issue.patch
+++ b/meta/recipes-devtools/tcltk/tcl/fix_non_native_build_issue.patch
@@ -1,10 +1,18 @@
-Upstream-Status: Pending
+From 371aa300369e9ea3234cba22d5c0babc7d40dfdf Mon Sep 17 00:00:00 2001
+From: Nitin A Kamble <nitin.a.kamble@intel.com>
+Date: Fri, 13 Aug 2010 12:24:00 -0700
+Subject: [PATCH] tcl: fix a build issue
-Index: unix/Makefile.in
-===================================================================
---- a/unix.orig/Makefile.in 2013-11-10 23:37:34.243424934 -0800
-+++ b/unix/Makefile.in 2013-11-10 23:37:34.243424934 -0800
-@@ -709,23 +709,23 @@
+Upstream-Status: Inappropriate [upstream does not support installed tests]
+---
+ unix/Makefile.in | 20 ++++++++++----------
+ 1 file changed, 10 insertions(+), 10 deletions(-)
+
+diff --git a/unix/Makefile.in b/unix/Makefile.in
+index 9dd053d..a3b7d69 100644
+--- a/unix/Makefile.in
++++ b/unix/Makefile.in
+@@ -815,7 +815,7 @@ tcltest-real:
test: test-tcl test-packages
test-tcl: ${TCLTEST_EXE}
@@ -12,9 +20,11 @@ Index: unix/Makefile.in
+ $(SHELL_ENV) ${TCLTEST_EXE} $(TOP_DIR)/tests/all.tcl $(TESTFLAGS)
gdb-test: ${TCLTEST_EXE}
- @echo "set env @LD_LIBRARY_PATH_VAR@=`pwd`:$${@LD_LIBRARY_PATH_VAR@}" > gdb.run
- @echo "set env TCL_LIBRARY=${TCL_BUILDTIME_LIBRARY}" >> gdb.run
- @echo "set args $(TOP_DIR)/tests/all.tcl $(TESTFLAGS) -singleproc 1" >> gdb.run
+ @printf '%s ' set env @LD_LIBRARY_PATH_VAR@=\"`pwd`$${@LD_LIBRARY_PATH_VAR@:+:$${@LD_LIBRARY_PATH_VAR}}\" > gdb.run
+@@ -824,17 +824,17 @@ gdb-test: ${TCLTEST_EXE}
+ @printf '\n' >>gdb.run
+ @printf '%s ' set args $(call shquotequote,$(TOP_DIR))/tests/all.tcl\
+ $(call shquotequote,$(TESTFLAGS)) -singleproc 1 >> gdb.run
- $(GDB) ./${TCLTEST_EXE} --command=gdb.run
+ $(GDB) ${TCLTEST_EXE} --command=gdb.run
rm gdb.run
@@ -32,7 +42,7 @@ Index: unix/Makefile.in
# The following target generates the shared libraries in dltest/ that are used
# for testing; they are included as part of the "tcltest" target (via the
-@@ -743,23 +743,23 @@
+@@ -852,28 +852,28 @@ dltest.marker: ${STUB_LIB_FILE}
# This target can be used to run tclsh from the build directory
# via `make shell SCRIPT=/tmp/foo.tcl`
shell: ${TCL_EXE}
@@ -44,9 +54,14 @@ Index: unix/Makefile.in
- $(SHELL_ENV) $(GDB) ./${TCL_EXE}
+ $(SHELL_ENV) $(GDB) ${TCL_EXE}
+ lldb: ${TCL_EXE}
+ $(SHELL_ENV) $(LLDB) ./${TCL_EXE}
+
valgrind: ${TCL_EXE} ${TCLTEST_EXE}
-- $(SHELL_ENV) $(VALGRIND) $(VALGRINDARGS) ./${TCLTEST_EXE} $(TOP_DIR)/tests/all.tcl -singleproc 1 -constraints valgrind $(TESTFLAGS)
-+ $(SHELL_ENV) $(VALGRIND) $(VALGRINDARGS) ${TCLTEST_EXE} $(TOP_DIR)/tests/all.tcl -singleproc 1 -constraints valgrind $(TESTFLAGS)
+- $(SHELL_ENV) $(VALGRIND) $(VALGRINDARGS) ./${TCLTEST_EXE} \
++ $(SHELL_ENV) $(VALGRIND) $(VALGRINDARGS) ${TCLTEST_EXE} \
+ $(TOP_DIR)/tests/all.tcl -singleproc 1 -constraints valgrind \
+ $(TESTFLAGS)
valgrindshell: ${TCL_EXE}
- $(SHELL_ENV) $(VALGRIND) $(VALGRINDARGS) ./${TCL_EXE} $(SCRIPT)
diff --git a/meta/recipes-devtools/tcltk/tcl/interp.patch b/meta/recipes-devtools/tcltk/tcl/interp.patch
index 95d6318f64..2e0dc94cff 100644
--- a/meta/recipes-devtools/tcltk/tcl/interp.patch
+++ b/meta/recipes-devtools/tcltk/tcl/interp.patch
@@ -1,11 +1,19 @@
+From 426aa2ff62dda77fd011e8f630b9d4ea17984817 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Mon, 12 Jul 2021 14:50:13 +0100
+Subject: [PATCH] tcl: fix race in interp.test
+
The interp-36.7 patch has race conditions and is missing cleanup. This patch by
a Tcl maintainer should improve matters.
Upstream-Status: Pending
Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ tests/interp.test | 7 ++++---
+ 1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/tests/interp.test b/tests/interp.test
-index d7424847f..fc90990f3 100644
+index d742484..fc90990 100644
--- a/tests/interp.test
+++ b/tests/interp.test
@@ -3595,17 +3595,18 @@ test interp-36.7 {ChildBgerror sets error handler of child [1999035]} -setup {
diff --git a/meta/recipes-devtools/tcltk/tcl/run-ptest b/meta/recipes-devtools/tcltk/tcl/run-ptest
index a62b703082..a403a74bb6 100644
--- a/meta/recipes-devtools/tcltk/tcl/run-ptest
+++ b/meta/recipes-devtools/tcltk/tcl/run-ptest
@@ -3,15 +3,27 @@
# clock.test needs a timezone to be set
export TZ="Europe/London"
export TCL_LIBRARY=library
+export ERROR_ON_FAILURES=1
-for i in `ls tests/*.test | awk -F/ '{print $2}'`; do
- ./tcltest tests/all.tcl -file $i >$i.log 2>&1
- grep -q -F -e "Files with failing tests:" -e "Test files exiting with errors:" $i.log
+# Some tests are overly strict with timings and fail on loaded systems.
+SKIP=""
+# 15321
+SKIP="$SKIP async-\* event-\*"
+# 14882
+SKIP="$SKIP cmdMZ-6.6"
+# 15081
+SKIP="$SKIP exit-1.\*"
+# 15407 15421
+SKIP="$SKIP \*io-46.1"
+# 14825
+SKIP="$SKIP socket-\* socket_inet-\*"
+
+for i in tests/*.test; do
+ i=$(basename $i)
+ ./tcltest tests/all.tcl -file $i -skip "$SKIP"
if [ $? -eq 0 ]; then
- echo "FAIL: $i"
- cat $i.log
- else
echo "PASS: $i"
+ else
+ echo "FAIL: $i"
fi
- rm -f $i.log
done
diff --git a/meta/recipes-devtools/tcltk/tcl/tcl-add-soname.patch b/meta/recipes-devtools/tcltk/tcl/tcl-add-soname.patch
index c4283c4f34..a0195e263c 100644
--- a/meta/recipes-devtools/tcltk/tcl/tcl-add-soname.patch
+++ b/meta/recipes-devtools/tcltk/tcl/tcl-add-soname.patch
@@ -1,25 +1,20 @@
+From b89fd73daf9b3eb2f889f65baba5f90d8a930c82 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <rpurdie@linux.intel.com>
+Date: Wed, 9 Dec 2009 23:59:44 +0000
+Subject: [PATCH] tcl: Add tcltk from OE.dev but with legacy staging function
+
Upstream-Status: Pending
+---
+ unix/Makefile.in | 5 ++++-
+ unix/tcl.m4 | 3 +++
+ 2 files changed, 7 insertions(+), 1 deletion(-)
-Index: unix/tcl.m4
-===================================================================
---- a/unix.orig/tcl.m4 2013-09-19 13:17:13.000000000 -0700
-+++ b/unix/tcl.m4 2013-11-11 00:17:24.263485123 -0800
-@@ -1415,6 +1415,9 @@
- # get rid of the warnings.
- #CFLAGS_OPTIMIZE="${CFLAGS_OPTIMIZE} -D__NO_STRING_INLINES -D__NO_MATH_INLINES"
-
-+ # following line added by CW for Debian GNU/Linux
-+ TCL_SHLIB_LD_EXTRAS="-Wl,-soname,\${TCL_LIB_FILE}.0"
-+
- SHLIB_LD='${CC} ${CFLAGS} ${LDFLAGS} -shared'
- DL_OBJS="tclLoadDl.o"
- DL_LIBS="-ldl"
-Index: unix/Makefile.in
-===================================================================
---- a/unix.orig/Makefile.in 2013-09-19 13:17:13.000000000 -0700
-+++ b/unix/Makefile.in 2013-11-11 00:20:32.423489861 -0800
-@@ -796,7 +796,10 @@
- done;
+diff --git a/unix/Makefile.in b/unix/Makefile.in
+index 7619afc..9dd053d 100644
+--- a/unix/Makefile.in
++++ b/unix/Makefile.in
+@@ -904,7 +904,10 @@ install-binaries: binaries
+ done
@echo "Installing $(LIB_FILE) to $(DLL_INSTALL_DIR)/"
@@INSTALL_LIB@
- @chmod 555 "$(DLL_INSTALL_DIR)/$(LIB_FILE)"
@@ -30,3 +25,17 @@ Index: unix/Makefile.in
@echo "Installing ${TCL_EXE} as $(BIN_INSTALL_DIR)/tclsh$(VERSION)${EXE_SUFFIX}"
@$(INSTALL_PROGRAM) ${TCL_EXE} "$(BIN_INSTALL_DIR)/tclsh$(VERSION)${EXE_SUFFIX}"
@echo "Installing tclConfig.sh to $(CONFIG_INSTALL_DIR)/"
+diff --git a/unix/tcl.m4 b/unix/tcl.m4
+index 0307a06..37c4d67 100644
+--- a/unix/tcl.m4
++++ b/unix/tcl.m4
+@@ -1378,6 +1378,9 @@ AC_DEFUN([SC_CONFIG_CFLAGS], [
+ # get rid of the warnings.
+ #CFLAGS_OPTIMIZE="${CFLAGS_OPTIMIZE} -D__NO_STRING_INLINES -D__NO_MATH_INLINES"
+
++ # following line added by CW for Debian GNU/Linux
++ TCL_SHLIB_LD_EXTRAS="-Wl,-soname,\${TCL_LIB_FILE}.0"
++
+ SHLIB_LD='${CC} ${CFLAGS} ${LDFLAGS} -shared'
+ DL_OBJS="tclLoadDl.o"
+ DL_LIBS="-ldl"
diff --git a/meta/recipes-devtools/tcltk/tcl/tcl-remove-hardcoded-install-path.patch b/meta/recipes-devtools/tcltk/tcl/tcl-remove-hardcoded-install-path.patch
index 99c5faf02c..93e7877256 100644
--- a/meta/recipes-devtools/tcltk/tcl/tcl-remove-hardcoded-install-path.patch
+++ b/meta/recipes-devtools/tcltk/tcl/tcl-remove-hardcoded-install-path.patch
@@ -1,4 +1,4 @@
-From 6efc98774681795712073c2b91e5e9d1763239b8 Mon Sep 17 00:00:00 2001
+From 050fc597fbfa4da2c31bd0df58c871892a490470 Mon Sep 17 00:00:00 2001
From: "Song.Li" <Song.Li@windriver.com>
Date: Wed, 1 Aug 2012 19:05:51 +0800
Subject: [PATCH] tcl:install tcl to lib64 instead of lib on 64bit target
@@ -13,15 +13,14 @@ Signed-off-by: Song.Li <Song.Li@windriver.com>
Signed-off-by: Kai Kang <kai.kang@windriver.com>
Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
---
- configure | 2 +-
- configure.in | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
+ unix/configure.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
-Index: unix/configure.in
-===================================================================
---- a/unix.orig/configure.in 2013-11-10 23:20:50.000000000 -0800
-+++ b/unix/configure.in 2013-11-10 23:39:41.199428131 -0800
-@@ -790,7 +790,7 @@
+diff --git a/unix/configure.in b/unix/configure.in
+index 4f62510..4974fb6 100644
+--- a/unix/configure.in
++++ b/unix/configure.in
+@@ -775,7 +775,7 @@ eval "TCL_LIB_FILE=libtcl${LIB_SUFFIX}"
eval "TCL_LIB_FILE=${TCL_LIB_FILE}"
diff --git a/meta/recipes-devtools/tcltk/tcl_8.6.11.bb b/meta/recipes-devtools/tcltk/tcl_8.6.11.bb
deleted file mode 100644
index 9f6b003ffb..0000000000
--- a/meta/recipes-devtools/tcltk/tcl_8.6.11.bb
+++ /dev/null
@@ -1,103 +0,0 @@
-SUMMARY = "Tool Command Language"
-HOMEPAGE = "http://tcl.sourceforge.net"
-DESCRIPTION = "Tool Command Language, is an open-source multi-purpose C library which includes a powerful dynamic scripting language. Together they provide ideal cross-platform development environment for any programming project."
-SECTION = "devel/tcltk"
-
-# http://www.tcl.tk/software/tcltk/license.html
-LICENSE = "TCL & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://license.terms;md5=058f6229798281bbcac4239c788cfa38 \
- file://compat/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
- file://library/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
- file://macosx/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
- file://tests/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
- file://win/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
-"
-
-DEPENDS = "tcl-native zlib"
-
-BASE_SRC_URI = "${SOURCEFORGE_MIRROR}/tcl/tcl-core${PV}-src.tar.gz \
- file://tcl-add-soname.patch"
-SRC_URI = "${BASE_SRC_URI} \
- file://fix_non_native_build_issue.patch \
- file://fix_issue_with_old_distro_glibc.patch \
- file://tcl-remove-hardcoded-install-path.patch \
- file://alter-includedir.patch \
- file://interp.patch \
- file://run-ptest \
-"
-SRC_URI[sha256sum] = "cfb49aab82bd179651e23eeeb69606f51b0ddc575ca55c3d35e2457469024cfa"
-
-SRC_URI:class-native = "${BASE_SRC_URI}"
-
-# Upstream don't believe this is an exploitable issue
-# https://core.tcl-lang.org/tcl/info/7079e4f91601e9c7
-CVE_CHECK_IGNORE += "CVE-2021-35331"
-
-UPSTREAM_CHECK_REGEX = "tcl(?P<pver>\d+(\.\d+)+)-src"
-
-S = "${WORKDIR}/${BPN}${PV}"
-
-VER = "${PV}"
-
-inherit autotools ptest binconfig
-
-AUTOTOOLS_SCRIPT_PATH = "${S}/unix"
-EXTRA_OECONF = "--enable-threads --disable-rpath --enable-man-suffix"
-
-do_install() {
- autotools_do_install
- oe_runmake 'DESTDIR=${D}' install-private-headers
- ln -sf ./tclsh${VER} ${D}${bindir}/tclsh
- ln -sf tclsh8.6 ${D}${bindir}/tclsh${VER}
- sed -i "s;-L${B};-L${STAGING_LIBDIR};g" tclConfig.sh
- sed -i "s;'${WORKDIR};'${STAGING_INCDIR};g" tclConfig.sh
- install -d ${D}${bindir_crossscripts}
- install -m 0755 tclConfig.sh ${D}${bindir_crossscripts}
- install -m 0755 tclConfig.sh ${D}${libdir}
- for dir in compat generic unix; do
- install -d ${D}${includedir}/${BPN}${VER}/$dir
- install -m 0644 ${S}/$dir/*.h ${D}${includedir}/${BPN}${VER}/$dir/
- done
-}
-
-SYSROOT_DIRS += "${bindir_crossscripts}"
-
-PACKAGES =+ "tcl-lib"
-FILES:tcl-lib = "${libdir}/libtcl8.6.so.*"
-FILES:${PN} += "${libdir}/tcl${VER} ${libdir}/tcl8.6 ${libdir}/tcl8"
-FILES:${PN}-dev += "${libdir}/tclConfig.sh ${libdir}/tclooConfig.sh"
-
-# isn't getting picked up by shlibs code
-RDEPENDS:${PN} += "tcl-lib"
-RDEPENDS:${PN}-ptest += "libgcc"
-
-BBCLASSEXTEND = "native nativesdk"
-
-do_compile_ptest() {
- oe_runmake tcltest
-}
-
-do_install_ptest() {
- cp ${B}/tcltest ${D}${PTEST_PATH}
- cp -r ${S}/library ${D}${PTEST_PATH}
- cp -r ${S}/tests ${D}${PTEST_PATH}
-}
-
-# Fix some paths that might be used by Tcl extensions
-BINCONFIG_GLOB = "*Config.sh"
-
-# Fix the path in sstate
-SSTATE_SCAN_FILES += "*Config.sh"
-
-# Cleanup host path from ${libdir}/tclConfig.sh and remove the
-# ${bindir_crossscripts}/tclConfig.sh from target
-PACKAGE_PREPROCESS_FUNCS += "tcl_package_preprocess"
-tcl_package_preprocess() {
- sed -i -e "s;${DEBUG_PREFIX_MAP};;g" \
- -e "s;-L${STAGING_LIBDIR};-L${libdir};g" \
- -e "s;${STAGING_INCDIR};${includedir};g" \
- -e "s;--sysroot=${RECIPE_SYSROOT};;g" \
- ${PKGD}${libdir}/tclConfig.sh
-
- rm -f ${PKGD}${bindir_crossscripts}/tclConfig.sh
-}
diff --git a/meta/recipes-devtools/tcltk/tcl_8.6.14.bb b/meta/recipes-devtools/tcltk/tcl_8.6.14.bb
new file mode 100644
index 0000000000..de4f5b878f
--- /dev/null
+++ b/meta/recipes-devtools/tcltk/tcl_8.6.14.bb
@@ -0,0 +1,110 @@
+SUMMARY = "Tool Command Language"
+HOMEPAGE = "http://tcl.sourceforge.net"
+DESCRIPTION = "Tool Command Language, is an open-source multi-purpose C library which includes a powerful dynamic scripting language. Together they provide ideal cross-platform development environment for any programming project."
+SECTION = "devel/tcltk"
+
+# http://www.tcl.tk/software/tcltk/license.html
+LICENSE = "TCL & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+ file://compat/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+ file://library/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+ file://macosx/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+ file://tests/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+ file://win/license.terms;md5=058f6229798281bbcac4239c788cfa38 \
+"
+
+DEPENDS = "tcl-native zlib"
+
+BASE_SRC_URI = "${SOURCEFORGE_MIRROR}/tcl/tcl-core${PV}-src.tar.gz \
+ file://tcl-add-soname.patch"
+SRC_URI = "${BASE_SRC_URI} \
+ file://fix_non_native_build_issue.patch \
+ file://tcl-remove-hardcoded-install-path.patch \
+ file://alter-includedir.patch \
+ file://interp.patch \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "ff604f43862a778827d7ecd1ad7686950ac2ef48d9cf69d3424cea9de08d9a72"
+
+SRC_URI:class-native = "${BASE_SRC_URI}"
+
+UPSTREAM_CHECK_URI = "https://www.tcl.tk/software/tcltk/download.html"
+UPSTREAM_CHECK_REGEX = "tcl(?P<pver>\d+(\.\d+)+)-src"
+
+S = "${WORKDIR}/${BPN}${PV}"
+
+VER = "${PV}"
+
+inherit autotools ptest binconfig
+
+AUTOTOOLS_SCRIPT_PATH = "${S}/unix"
+EXTRA_OECONF = "--enable-threads --disable-rpath --enable-man-suffix"
+
+# Prevent installing copy of tzdata based on tzdata installation on the build host
+# It doesn't install tzdata if one of the following files exist on the host:
+# /usr/share/zoneinfo/UTC /usr/share/zoneinfo/GMT /usr/share/lib/zoneinfo/UTC /usr/share/lib/zoneinfo/GMT /usr/lib/zoneinfo/UTC /usr/lib/zoneinfo/GMT
+# otherwise "/usr/lib/tcl8.6/tzdata" is included in tcl package
+EXTRA_OECONF += "--with-tzdata=no"
+
+do_install() {
+ autotools_do_install
+ oe_runmake 'DESTDIR=${D}' install-private-headers
+ ln -sf ./tclsh${VER} ${D}${bindir}/tclsh
+ ln -sf tclsh8.6 ${D}${bindir}/tclsh${VER}
+ sed -i "s;-L${B};-L${STAGING_LIBDIR};g" tclConfig.sh
+ sed -i "s;'${WORKDIR};'${STAGING_INCDIR};g" tclConfig.sh
+ install -d ${D}${bindir_crossscripts}
+ install -m 0755 tclConfig.sh ${D}${bindir_crossscripts}
+ install -m 0755 tclConfig.sh ${D}${libdir}
+ for dir in compat generic unix; do
+ install -d ${D}${includedir}/${BPN}${VER}/$dir
+ install -m 0644 ${S}/$dir/*.h ${D}${includedir}/${BPN}${VER}/$dir/
+ done
+}
+
+SYSROOT_DIRS += "${bindir_crossscripts}"
+
+PACKAGES =+ "tcl-lib"
+FILES:tcl-lib = "${libdir}/libtcl8.6.so.*"
+FILES:${PN} += "${libdir}/tcl${VER} ${libdir}/tcl8.6 ${libdir}/tcl8"
+FILES:${PN}-dev += "${libdir}/tclConfig.sh ${libdir}/tclooConfig.sh"
+
+# isn't getting picked up by shlibs code
+RDEPENDS:${PN} += "tcl-lib"
+RDEPENDS:${PN}-ptest += "libgcc"
+
+BBCLASSEXTEND = "native nativesdk"
+
+do_compile_ptest() {
+ oe_runmake tcltest
+}
+
+do_install_ptest() {
+ cp ${B}/tcltest ${D}${PTEST_PATH}
+ cp -r ${S}/library ${D}${PTEST_PATH}
+ cp -r ${S}/tests ${D}${PTEST_PATH}
+}
+
+do_install_ptest:append:libc-musl () {
+ # Assumes locales other than provided by musl-locales
+ sed -i '/SKIP="$SKIP socket.*$/a # unixInit-3* is suppressed due to hardcoded locale assumptions\nSKIP="$SKIP unixInit-3\\\*"' ${D}${PTEST_PATH}/run-ptest
+}
+
+# Fix some paths that might be used by Tcl extensions
+BINCONFIG_GLOB = "*Config.sh"
+
+# Fix the path in sstate
+SSTATE_SCAN_FILES += "*Config.sh"
+
+# Cleanup host path from ${libdir}/tclConfig.sh and remove the
+# ${bindir_crossscripts}/tclConfig.sh from target
+PACKAGE_PREPROCESS_FUNCS += "tcl_package_preprocess"
+tcl_package_preprocess() {
+ sed -i -e "s;${DEBUG_PREFIX_MAP};;g" \
+ -e "s;-L${STAGING_LIBDIR};-L${libdir};g" \
+ -e "s;${STAGING_INCDIR};${includedir};g" \
+ -e "s;--sysroot=${RECIPE_SYSROOT};;g" \
+ ${PKGD}${libdir}/tclConfig.sh
+
+ rm -f ${PKGD}${bindir_crossscripts}/tclConfig.sh
+}
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-Add-listen-action-for-a-tcp-socket.patch b/meta/recipes-devtools/unfs3/unfs3/0001-Add-listen-action-for-a-tcp-socket.patch
deleted file mode 100644
index e9b9d3df46..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/0001-Add-listen-action-for-a-tcp-socket.patch
+++ /dev/null
@@ -1,54 +0,0 @@
-From b42ab8e1aca951dd06c113159491b3fd5cf06f2e Mon Sep 17 00:00:00 2001
-From: Haiqing Bai <Haiqing.Bai@windriver.com>
-Date: Thu, 24 Oct 2019 09:39:04 +0800
-Subject: [PATCH] Add "listen" action for a tcp socket which does not call
- 'listen' after 'bind'
-
-It is found that /usr/bin/unfsd customus 100% cpu after starting qemu with 'nfs'
-option, and below lots of error messages shows when strace the process:
-
-poll([{fd=3, events=POLLIN|POLLPRI|POLLRDNORM|POLLRDBAND},{fd=4, events=POLLIN|POLLPRI|POLLRDNORM|POLLRDBAND},
-{fd=5, events=POLLIN|POLLPRI|POLLRDNORM|POLLRDBAND},{fd=6, events =POLLIN|POLLPRI|POLLRDNORM|POLLRDBAND}],
-4, 2000) = 2 ([{fd=4, revents=POLLHUP},{fd=6, revents=POLLHUP}])
-accept(4, 0x7ffd5e6dddc0, [128]) = -1 EINVAL (Invalid argument)
-accept(6, 0x7ffd5e6dddc0, [128]) = -1 EINVAL (Invalid argument)
-
-% time seconds usecs/call calls errors syscall
------- ----------- ----------- --------- --------- ----------------
- 70.87 0.005392 0 513886 513886 accept
- 29.13 0.002216 0 256943 poll
- 0.00 0.000000 0 4 read
-
-The root cause is that 'listen' is not called for the binded
-socket. The depended libtipc does not call 'listen' if found
-the incomming socket is binded, so 'accept' reports the error
-in the 'for' loop and cpu consumed.
-
-Upstream-Status: Pending
-
-Signed-off-by: Haiqing Bai <Haiqing.Bai@windriver.com>
----
- daemon.c | 7 +++++++
- 1 file changed, 7 insertions(+)
-
-diff --git a/daemon.c b/daemon.c
-index 028a181..4c85903 100644
---- a/daemon.c
-+++ b/daemon.c
-@@ -814,6 +814,13 @@ static SVCXPRT *create_tcp_transport(unsigned int port)
- fprintf(stderr, "Couldn't bind to tcp port %d\n", port);
- exit(1);
- }
-+
-+ if (listen(sock, SOMAXCONN) < 0) {
-+ perror("listen");
-+ fprintf(stderr, "Couldn't listen on the address \n");
-+ close(sock);
-+ exit(1);
-+ }
- }
-
- transp = svctcp_create(sock, 0, 0);
---
-1.9.1
-
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-Alias-off64_t-to-off_t-on-linux-if-not-defined.patch b/meta/recipes-devtools/unfs3/unfs3/0001-Alias-off64_t-to-off_t-on-linux-if-not-defined.patch
new file mode 100644
index 0000000000..91909fa236
--- /dev/null
+++ b/meta/recipes-devtools/unfs3/unfs3/0001-Alias-off64_t-to-off_t-on-linux-if-not-defined.patch
@@ -0,0 +1,28 @@
+From 949db882e487d728c44bb68139682b38396dd275 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 14 Dec 2022 14:50:10 -0800
+Subject: [PATCH] Alias off64_t to off_t on linux if not defined
+
+Musl C library does not define off64_t and has 64-bit default off_t
+therefore define off64_t as an alias on linux as well when configure
+detects that off64_t is not provided by a linux system
+
+Upstream-Status: Submitted [https://github.com/unfs3/unfs3/pull/29]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ nfs.h | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/nfs.h b/nfs.h
+index aded011..7996c67 100644
+--- a/nfs.h
++++ b/nfs.h
+@@ -62,7 +62,7 @@ typedef int32_t int32;
+ #endif
+
+ #ifndef HAVE_OFF64_T
+-#ifdef __APPLE__
++#if defined(__APPLE__) || defined(__linux__)
+ typedef off_t off64_t;
+ #endif
+ #endif
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-attr-fix-utime-for-symlink.patch b/meta/recipes-devtools/unfs3/unfs3/0001-attr-fix-utime-for-symlink.patch
index 6957d102b8..a0f3740d6a 100644
--- a/meta/recipes-devtools/unfs3/unfs3/0001-attr-fix-utime-for-symlink.patch
+++ b/meta/recipes-devtools/unfs3/unfs3/0001-attr-fix-utime-for-symlink.patch
@@ -1,6 +1,6 @@
-From 3f4fcb62661059bad77a2e957b4621137797bc2f Mon Sep 17 00:00:00 2001
+From 7e789895919d57d573ebb8faa147d1286104cd01 Mon Sep 17 00:00:00 2001
From: Rui Wang <rui.wang@windriver.com>
-Date: Fri, 15 Jun 2018 14:19:10 +0800
+Date: Mon, 24 Apr 2023 02:57:57 -0700
Subject: [PATCH] attr: fix utime for symlink
unfs3 has an old defect that it can not change the timestamps of a
@@ -15,9 +15,9 @@ Making unfs3 support lutimes(), which can modify the symlink file
itself. Considering not every system support this function, so a
function checking is necessary.
-Upstream-Status: Submitted [https://sourceforge.net/p/unfs3/bugs/12/]
+Upstream-Status: Submitted [https://github.com/unfs3/unfs3/pull/35]
-Signed-off-by: Rui Wang <rui.wang@windriver.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
---
attr.c | 15 +++++++++++----
backend_unix.h | 2 ++
@@ -25,22 +25,22 @@ Signed-off-by: Rui Wang <rui.wang@windriver.com>
3 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/attr.c b/attr.c
-index 73e5c75..427d0e2 100644
+index 0ce9375..930ce6e 100644
--- a/attr.c
+++ b/attr.c
-@@ -280,7 +280,7 @@ post_op_attr get_post_cached(struct svc_req * req)
+@@ -285,7 +285,7 @@ post_op_attr get_post_cached(struct svc_req * req)
static nfsstat3 set_time(const char *path, backend_statstruct buf, sattr3 new)
{
time_t new_atime, new_mtime;
- struct utimbuf utim;
+ struct timeval stamps[2];
int res;
-
+
/* set atime and mtime */
-@@ -302,10 +302,17 @@ static nfsstat3 set_time(const char *path, backend_statstruct buf, sattr3 new)
- else /* DONT_CHANGE */
- new_mtime = buf.st_mtime;
-
+@@ -307,10 +307,17 @@ static nfsstat3 set_time(const char *path, backend_statstruct buf, sattr3 new)
+ else /* DONT_CHANGE */
+ new_mtime = buf.st_mtime;
+
- utim.actime = new_atime;
- utim.modtime = new_mtime;
+ stamps[0].tv_sec = new_atime;
@@ -53,13 +53,13 @@ index 73e5c75..427d0e2 100644
+#else
+ res = backend_utimes(path, stamps);
+#endif
-
+
- res = backend_utime(path, &utim);
- if (res == -1)
- return setattr_err();
+ if (res == -1)
+ return setattr_err();
}
diff --git a/backend_unix.h b/backend_unix.h
-index fbc2af3..813ffd3 100644
+index 4db72ae..9cce9ab 100644
--- a/backend_unix.h
+++ b/backend_unix.h
@@ -61,6 +61,8 @@
@@ -72,14 +72,17 @@ index fbc2af3..813ffd3 100644
#define backend_dirstream DIR
#define backend_statvfsstruct struct statvfs
diff --git a/configure.ac b/configure.ac
-index aeec598..ea7f167 100644
+index d46c905..c21afe3 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -37,6 +37,7 @@ AC_CHECK_FUNCS(setresuid setresgid)
+@@ -32,6 +32,7 @@ AC_CHECK_FUNCS(setresuid setresgid)
AC_CHECK_FUNCS(vsyslog)
AC_CHECK_FUNCS(lchown)
AC_CHECK_FUNCS(setgroups)
+AC_CHECK_FUNCS(lutimes)
- UNFS3_SOLARIS_RPC
- UNFS3_PORTMAP_DEFINE
UNFS3_COMPILE_WARNINGS
+
+ PKG_CHECK_MODULES([TIRPC], [libtirpc])
+--
+2.40.0
+
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Fix-race-window-for-writing-of-the-pid-file.patch b/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Fix-race-window-for-writing-of-the-pid-file.patch
new file mode 100644
index 0000000000..20bbee90a0
--- /dev/null
+++ b/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Fix-race-window-for-writing-of-the-pid-file.patch
@@ -0,0 +1,68 @@
+From 212a947e776e7a25c1f2259615f461179bcb3663 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Wed, 23 Nov 2022 21:38:38 +0100
+Subject: [PATCH] daemon.c: Fix race window for writing of the pid file
+
+The parent process should write the pid file such that the pid file
+will can be checked immediately following exit of the fork from the
+parent.
+
+This allows external monitoring applications to watch the daemon
+without having to add sleep calls to wait for the pid file be written
+on a busy system.
+
+Upstream-Status: Submitted [https://github.com/unfs3/unfs3/pull/28]
+Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ daemon.c | 12 +++++++++---
+ 1 file changed, 9 insertions(+), 3 deletions(-)
+
+diff --git a/daemon.c b/daemon.c
+index ff53b7a..13b06a4 100644
+--- a/daemon.c
++++ b/daemon.c
+@@ -166,7 +166,7 @@ int get_socket_type(struct svc_req *rqstp)
+ /*
+ * write current pid to a file
+ */
+-static void create_pid_file(void)
++static void create_pid_file(int pid)
+ {
+ char buf[16];
+ int fd, res, len;
+@@ -188,7 +188,7 @@ static void create_pid_file(void)
+ }
+ #endif
+
+- sprintf(buf, "%i\n", backend_getpid());
++ sprintf(buf, "%i\n", pid);
+ len = strlen(buf);
+
+ res = backend_pwrite(fd, buf, len, 0);
+@@ -1122,6 +1122,10 @@ int main(int argc, char **argv)
+ fprintf(stderr, "could not fork into background\n");
+ daemon_exit(0);
+ }
++ if (pid)
++ create_pid_file(pid);
++ } else {
++ create_pid_file(backend_getpid());
+ }
+ #endif /* WIN32 */
+
+@@ -1161,8 +1165,10 @@ int main(int argc, char **argv)
+ /* no umask to not screw up create modes */
+ umask(0);
+
++#ifdef WIN32
+ /* create pid file if wanted */
+- create_pid_file();
++ create_pid_file(backend_getpid());
++#endif
+
+ /* initialize internal stuff */
+ fh_cache_init();
+--
+2.30.2
+
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Libtirpc-porting-fixes.patch b/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Libtirpc-porting-fixes.patch
deleted file mode 100644
index 6eee6748f9..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/0001-daemon.c-Libtirpc-porting-fixes.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From c7a2a65d6c2a433312540c207860740d6e4e7629 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 11 Mar 2018 17:32:54 -0700
-Subject: [PATCH] daemon.c: Libtirpc porting fixes
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
-Upstream-Status: Pending
-
- daemon.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/daemon.c b/daemon.c
-index 22f30f6..028a181 100644
---- a/daemon.c
-+++ b/daemon.c
-@@ -117,7 +117,7 @@ void logmsg(int prio, const char *fmt, ...)
- */
- struct in_addr get_remote(struct svc_req *rqstp)
- {
-- return (svc_getcaller(rqstp->rq_xprt))->sin_addr;
-+ return ((struct sockaddr_in*)svc_getcaller(rqstp->rq_xprt))->sin_addr;
- }
-
- /*
-@@ -125,7 +125,7 @@ struct in_addr get_remote(struct svc_req *rqstp)
- */
- short get_port(struct svc_req *rqstp)
- {
-- return (svc_getcaller(rqstp->rq_xprt))->sin_port;
-+ return ((struct sockaddr_in*)svc_getcaller(rqstp->rq_xprt))->sin_port;
- }
-
- /*
---
-2.16.2
-
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-fix-building-on-macOS.patch b/meta/recipes-devtools/unfs3/unfs3/0001-fix-building-on-macOS.patch
new file mode 100644
index 0000000000..f18ffd3711
--- /dev/null
+++ b/meta/recipes-devtools/unfs3/unfs3/0001-fix-building-on-macOS.patch
@@ -0,0 +1,27 @@
+From 989b87ae46b3183a742031373fbb3e912ab9b666 Mon Sep 17 00:00:00 2001
+From: Andrey Filipenkov <decapitator@ukr.net>
+Date: Wed, 2 Nov 2022 13:38:40 +0300
+Subject: [PATCH] fix building on macOS
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-Status: Backport [https://github.com/unfs3/unfs3/commit/989b87ae46b3183a742031373fbb3e912ab9b666]
+---
+ attr.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/attr.c b/attr.c
+index 6253e84..0ce9375 100644
+--- a/attr.c
++++ b/attr.c
+@@ -18,6 +18,8 @@
+ #include <utime.h>
+ #include <errno.h>
+ #include <dirent.h>
++#include <stdlib.h>
++#include <string.h>
+
+ #include "backend.h"
+ #include "nfs.h"
+--
+2.39.1
+
diff --git a/meta/recipes-devtools/unfs3/unfs3/0001-locate.c-Include-attr.h.patch b/meta/recipes-devtools/unfs3/unfs3/0001-locate.c-Include-attr.h.patch
new file mode 100644
index 0000000000..076e08fadc
--- /dev/null
+++ b/meta/recipes-devtools/unfs3/unfs3/0001-locate.c-Include-attr.h.patch
@@ -0,0 +1,28 @@
+From 63e0785bb379a8f2c41f34f5cd938ca38555e605 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 13 Jan 2023 23:41:01 -0800
+Subject: [PATCH] locate.c: Include attr.h
+
+Its needed for fix_dir_times() API declarations
+
+Upstream-Status: Submitted [https://github.com/unfs3/unfs3/pull/32]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ locate.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/locate.c b/locate.c
+index 6bbe71f..84e0fe5 100644
+--- a/locate.c
++++ b/locate.c
+@@ -27,6 +27,7 @@
+ #include "nfs.h"
+ #include "fh.h"
+ #include "daemon.h"
++#include "attr.h"
+
+ /*
+ * these are the brute-force file searching routines that are used
+--
+2.39.0
+
diff --git a/meta/recipes-devtools/unfs3/unfs3/alternate_rpc_ports.patch b/meta/recipes-devtools/unfs3/unfs3/alternate_rpc_ports.patch
deleted file mode 100644
index ff745d4774..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/alternate_rpc_ports.patch
+++ /dev/null
@@ -1,158 +0,0 @@
-Add ability to specify rcp port numbers
-
-In order to run more than one unfs server on a host system, you must
-be able to specify alternate rpc port numbers.
-
-Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- daemon.c | 44 +++++++++++++++++++++++++++++++-------------
- mount.c | 4 ++--
- 2 files changed, 33 insertions(+), 15 deletions(-)
-
---- a/daemon.c
-+++ b/daemon.c
-@@ -78,6 +78,8 @@ int opt_testconfig = FALSE;
- struct in_addr opt_bind_addr;
- int opt_readable_executables = FALSE;
- char *opt_pid_file = NULL;
-+int nfs_prog = NFS3_PROGRAM;
-+int mount_prog = MOUNTPROG;
-
- /* Register with portmapper? */
- int opt_portmapper = TRUE;
-@@ -206,7 +208,7 @@ static void parse_options(int argc, char
- {
-
- int opt = 0;
-- char *optstring = "bcC:de:hl:m:n:prstTuwi:";
-+ char *optstring = "bcC:de:hl:m:n:prstTuwi:x:y:";
-
- while (opt != -1) {
- opt = getopt(argc, argv, optstring);
-@@ -261,8 +263,24 @@ static void parse_options(int argc, char
- printf
- ("\t-r report unreadable executables as readable\n");
- printf("\t-T test exports file and exit\n");
-+ printf("\t-x <port> alternate NFS RPC port\n");
-+ printf("\t-y <port> alternate MOUNTD RPC port\n");
- exit(0);
- break;
-+ case 'x':
-+ nfs_prog = strtol(optarg, NULL, 10);
-+ if (nfs_prog == 0) {
-+ fprintf(stderr, "Invalid NFS RPC port\n");
-+ exit(1);
-+ }
-+ break;
-+ case 'y':
-+ mount_prog = strtol(optarg, NULL, 10);
-+ if (mount_prog == 0) {
-+ fprintf(stderr, "Invalid MOUNTD RPC port\n");
-+ exit(1);
-+ }
-+ break;
- case 'l':
- opt_bind_addr.s_addr = inet_addr(optarg);
- if (opt_bind_addr.s_addr == (unsigned) -1) {
-@@ -347,12 +365,12 @@ void daemon_exit(int error)
- #endif /* WIN32 */
-
- if (opt_portmapper) {
-- svc_unregister(MOUNTPROG, MOUNTVERS1);
-- svc_unregister(MOUNTPROG, MOUNTVERS3);
-+ svc_unregister(mount_prog, MOUNTVERS1);
-+ svc_unregister(mount_prog, MOUNTVERS3);
- }
-
- if (opt_portmapper) {
-- svc_unregister(NFS3_PROGRAM, NFS_V3);
-+ svc_unregister(nfs_prog, NFS_V3);
- }
-
- if (error == SIGSEGV)
-@@ -657,13 +675,13 @@ static void mountprog_3(struct svc_req *
- static void register_nfs_service(SVCXPRT * udptransp, SVCXPRT * tcptransp)
- {
- if (opt_portmapper) {
-- pmap_unset(NFS3_PROGRAM, NFS_V3);
-+ pmap_unset(nfs_prog, NFS_V3);
- }
-
- if (udptransp != NULL) {
- /* Register NFS service for UDP */
- if (!svc_register
-- (udptransp, NFS3_PROGRAM, NFS_V3, nfs3_program_3,
-+ (udptransp, nfs_prog, NFS_V3, nfs3_program_3,
- opt_portmapper ? IPPROTO_UDP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (NFS3_PROGRAM, NFS_V3, udp).");
-@@ -674,7 +692,7 @@ static void register_nfs_service(SVCXPRT
- if (tcptransp != NULL) {
- /* Register NFS service for TCP */
- if (!svc_register
-- (tcptransp, NFS3_PROGRAM, NFS_V3, nfs3_program_3,
-+ (tcptransp, nfs_prog, NFS_V3, nfs3_program_3,
- opt_portmapper ? IPPROTO_TCP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (NFS3_PROGRAM, NFS_V3, tcp).");
-@@ -686,14 +704,14 @@ static void register_nfs_service(SVCXPRT
- static void register_mount_service(SVCXPRT * udptransp, SVCXPRT * tcptransp)
- {
- if (opt_portmapper) {
-- pmap_unset(MOUNTPROG, MOUNTVERS1);
-- pmap_unset(MOUNTPROG, MOUNTVERS3);
-+ pmap_unset(mount_prog, MOUNTVERS1);
-+ pmap_unset(mount_prog, MOUNTVERS3);
- }
-
- if (udptransp != NULL) {
- /* Register MOUNT service (v1) for UDP */
- if (!svc_register
-- (udptransp, MOUNTPROG, MOUNTVERS1, mountprog_3,
-+ (udptransp, mount_prog, MOUNTVERS1, mountprog_3,
- opt_portmapper ? IPPROTO_UDP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (MOUNTPROG, MOUNTVERS1, udp).");
-@@ -702,7 +720,7 @@ static void register_mount_service(SVCXP
-
- /* Register MOUNT service (v3) for UDP */
- if (!svc_register
-- (udptransp, MOUNTPROG, MOUNTVERS3, mountprog_3,
-+ (udptransp, mount_prog, MOUNTVERS3, mountprog_3,
- opt_portmapper ? IPPROTO_UDP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (MOUNTPROG, MOUNTVERS3, udp).");
-@@ -713,7 +731,7 @@ static void register_mount_service(SVCXP
- if (tcptransp != NULL) {
- /* Register MOUNT service (v1) for TCP */
- if (!svc_register
-- (tcptransp, MOUNTPROG, MOUNTVERS1, mountprog_3,
-+ (tcptransp, mount_prog, MOUNTVERS1, mountprog_3,
- opt_portmapper ? IPPROTO_TCP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (MOUNTPROG, MOUNTVERS1, tcp).");
-@@ -722,7 +740,7 @@ static void register_mount_service(SVCXP
-
- /* Register MOUNT service (v3) for TCP */
- if (!svc_register
-- (tcptransp, MOUNTPROG, MOUNTVERS3, mountprog_3,
-+ (tcptransp, mount_prog, MOUNTVERS3, mountprog_3,
- opt_portmapper ? IPPROTO_TCP : 0)) {
- fprintf(stderr, "%s\n",
- "unable to register (MOUNTPROG, MOUNTVERS3, tcp).");
---- a/mount.c
-+++ b/mount.c
-@@ -155,8 +155,8 @@ mountres3 *mountproc_mnt_3_svc(dirpath *
- /* error out if not version 3 */
- if (rqstp->rq_vers != 3) {
- logmsg(LOG_INFO,
-- "%s attempted mount with unsupported protocol version",
-- inet_ntoa(get_remote(rqstp)));
-+ "%s attempted mount with unsupported protocol version: %i",
-+ inet_ntoa(get_remote(rqstp)), rqstp->rq_vers);
- result.fhs_status = MNT3ERR_INVAL;
- return &result;
- }
diff --git a/meta/recipes-devtools/unfs3/unfs3/fix_compile_warning.patch b/meta/recipes-devtools/unfs3/unfs3/fix_compile_warning.patch
deleted file mode 100644
index aada014117..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/fix_compile_warning.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-daemon.c: Check exit code of chdir()
-
-Stop the compile warning and fix the code to act on a chdir() failure.
-If this one does fail something is very, very wrong.
-
-Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- daemon.c | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
---- a/daemon.c
-+++ b/daemon.c
-@@ -964,7 +964,8 @@ int main(int argc, char **argv)
- sigaction(SIGALRM, &act, NULL);
-
- /* don't make directory we started in busy */
-- chdir("/");
-+ if(chdir("/") < 0)
-+ daemon_exit(0);
-
- /* detach from terminal */
- if (opt_detach) {
diff --git a/meta/recipes-devtools/unfs3/unfs3/fix_pid_race_parent_writes_child_pid.patch b/meta/recipes-devtools/unfs3/unfs3/fix_pid_race_parent_writes_child_pid.patch
deleted file mode 100644
index 46b187e5f3..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/fix_pid_race_parent_writes_child_pid.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-daemon.c: Fix race window for writing of the pid file
-
-The parent process should write the pid file such that the pid file
-will can be checked immediately following exit of the fork from the
-parent.
-
-This allows external monitoring applications to watch the daemon
-without having to add sleep calls to wait for the pid file be written
-on a busy system.
-
-Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- daemon.c | 12 +++++++++---
- 1 file changed, 9 insertions(+), 3 deletions(-)
-
---- a/daemon.c
-+++ b/daemon.c
-@@ -153,7 +153,7 @@ int get_socket_type(struct svc_req *rqst
- /*
- * write current pid to a file
- */
--static void create_pid_file(void)
-+static void create_pid_file(int pid)
- {
- char buf[16];
- int fd, res, len;
-@@ -175,7 +175,7 @@ static void create_pid_file(void)
- }
- #endif
-
-- sprintf(buf, "%i\n", backend_getpid());
-+ sprintf(buf, "%i\n", pid);
- len = strlen(buf);
-
- res = backend_pwrite(fd, buf, len, 0);
-@@ -970,6 +970,10 @@ int main(int argc, char **argv)
- fprintf(stderr, "could not fork into background\n");
- daemon_exit(0);
- }
-+ if (pid)
-+ create_pid_file(pid);
-+ } else {
-+ create_pid_file(backend_getpid());
- }
- #endif /* WIN32 */
-
-@@ -1006,8 +1010,10 @@ int main(int argc, char **argv)
- /* no umask to not screw up create modes */
- umask(0);
-
-+#ifdef WIN32
- /* create pid file if wanted */
-- create_pid_file();
-+ create_pid_file(backend_getpid());
-+#endif
-
- /* initialize internal stuff */
- fh_cache_init();
diff --git a/meta/recipes-devtools/unfs3/unfs3/no-yywrap.patch b/meta/recipes-devtools/unfs3/unfs3/no-yywrap.patch
deleted file mode 100644
index e3496814d8..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/no-yywrap.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-Upstream-Status: Backport [https://github.com/unfs3/unfs3/commit/3fa0568e6ef96e045286afe18444bc28fe93962b]
-
-diff --git a/Config/exports.l b/Config/exports.l
-index 662603c..7e7c4fc 100644
---- a/Config/exports.l
-+++ b/Config/exports.l
-@@ -50,6 +50,7 @@ OLDNET {IP}"/"{IP}
-
- %option nounput
- %option noinput
-+%option noyywrap
-
- %%
-
diff --git a/meta/recipes-devtools/unfs3/unfs3/relative_max_socket_path_len.patch b/meta/recipes-devtools/unfs3/unfs3/relative_max_socket_path_len.patch
deleted file mode 100644
index 219dd35aec..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/relative_max_socket_path_len.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-nfs.c: Allow max sa.sun_path for a localdomain socket with the user nfs-server
-
-There is a hard limit for the kernel of 108 characters for a
-localdomain socket name. To avoid problems with the user nfs
-server it should maximize the number of characters by using
-a relative path on the server side.
-
-Previously the nfs-server used the absolute path name passed to
-the sa.sunpath arg for binding the socket and this has caused
-problems for both the X server and UST binaries which make
-heavy use of named sockets with long names.
-
-Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- nfs.c | 29 +++++++++++++++++++++++++++--
- 1 file changed, 27 insertions(+), 2 deletions(-)
-
---- a/nfs.c
-+++ b/nfs.c
-@@ -672,6 +672,7 @@ SYMLINK3res *nfsproc3_symlink_3_svc(SYML
- }
-
- #ifndef WIN32
-+static char pathbuf_tmp[NFS_MAXPATHLEN + NFS_MAXNAMLEN + 1];
-
- /*
- * create Unix socket
-@@ -680,17 +681,41 @@ static int mksocket(const char *path, mo
- {
- int res, sock;
- struct sockaddr_un addr;
-+ unsigned int len = strlen(path);
-
- sock = socket(PF_UNIX, SOCK_STREAM, 0);
-- addr.sun_family = AF_UNIX;
-- strcpy(addr.sun_path, path);
- res = sock;
- if (res != -1) {
-+ addr.sun_family = AF_UNIX;
-+ if (len < sizeof(addr.sun_path) -1) {
-+ strcpy(addr.sun_path, path);
-+ } else {
-+ char *ptr;
-+ res = -1;
-+ if (len >= sizeof(path))
-+ goto out;
-+ strcpy(pathbuf_tmp, path);
-+ ptr = strrchr(pathbuf_tmp,'/');
-+ if (ptr) {
-+ *ptr = '\0';
-+ ptr++;
-+ if (chdir(pathbuf_tmp))
-+ goto out;
-+ } else {
-+ ptr = pathbuf_tmp;
-+ }
-+ if (strlen(ptr) >= sizeof(addr.sun_path))
-+ goto out;
-+ strcpy(addr.sun_path, ptr);
-+ }
- umask(~mode);
- res =
- bind(sock, (struct sockaddr *) &addr,
- sizeof(addr.sun_family) + strlen(addr.sun_path));
- umask(0);
-+out:
-+ if (chdir("/"))
-+ fprintf(stderr, "Internal failure to chdir /\n");
- close(sock);
- }
- return res;
diff --git a/meta/recipes-devtools/unfs3/unfs3/rename_fh_cache.patch b/meta/recipes-devtools/unfs3/unfs3/rename_fh_cache.patch
deleted file mode 100644
index e6d89530f8..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/rename_fh_cache.patch
+++ /dev/null
@@ -1,64 +0,0 @@
-From: Jason Wessel <jason.wessel@windriver.com>
-Date: Sat, 23 Feb 2013 08:49:08 -0600
-Subject: [PATCH] fh_cache: fix statle nfs handle on rename problem
-
-The following test case fails with modern linunx kernels which cache
-the renamed inode.
-
- % mkdir a;mkdir b;mv b a/;ls -l a
- ls: a/b: Stale NFS file handle
-
-The issue is that nfserver was not updating the fh_cache with the new
-location of the inode, when it moves directories.
-
-Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- fh_cache.c | 12 ++++++++++++
- fh_cache.h | 1 +
- nfs.c | 2 ++
- 3 files changed, 15 insertions(+)
-
---- a/fh_cache.c
-+++ b/fh_cache.c
-@@ -199,6 +199,18 @@ static char *fh_cache_lookup(uint32 dev,
- }
-
- /*
-+ * update a fh inode cache for an operation like rename
-+ */
-+void fh_cache_update(nfs_fh3 fh, char *path)
-+{
-+ unfs3_fh_t *obj = (void *) fh.data.data_val;
-+ backend_statstruct buf;
-+
-+ if (backend_lstat(path, &buf) != -1) {
-+ fh_cache_add(obj->dev, buf.st_ino, path);
-+ }
-+}
-+/*
- * resolve a filename into a path
- * cache-using wrapper for fh_decomp_raw
- */
---- a/fh_cache.h
-+++ b/fh_cache.h
-@@ -19,5 +19,6 @@ unfs3_fh_t fh_comp(const char *path, str
- unfs3_fh_t *fh_comp_ptr(const char *path, struct svc_req *rqstp, int need_dir);
-
- char *fh_cache_add(uint32 dev, uint64 ino, const char *path);
-+void fh_cache_update(nfs_fh3 fh, char *path);
-
- #endif
---- a/nfs.c
-+++ b/nfs.c
-@@ -876,6 +876,8 @@ RENAME3res *nfsproc3_rename_3_svc(RENAME
- res = backend_rename(from_obj, to_obj);
- if (res == -1)
- result.status = rename_err();
-+ /* Update the fh_cache with moved inode value */
-+ fh_cache_update(argp->to.dir, to_obj);
- }
- }
-
diff --git a/meta/recipes-devtools/unfs3/unfs3/tcp_no_delay.patch b/meta/recipes-devtools/unfs3/unfs3/tcp_no_delay.patch
deleted file mode 100644
index b3521c63eb..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/tcp_no_delay.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-daemon.c: Add option for tcp no delay
-
-Allow the NFS tcp sockets to conditionally use TCP_NODELAY
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- daemon.c | 9 ++++++++-
- 1 file changed, 8 insertions(+), 1 deletion(-)
-
---- a/daemon.c
-+++ b/daemon.c
-@@ -17,6 +17,7 @@
- #ifndef WIN32
- #include <sys/socket.h>
- #include <netinet/in.h>
-+#include <netinet/tcp.h>
- #include <arpa/inet.h>
- #include <syslog.h>
- #else /* WIN32 */
-@@ -75,6 +76,7 @@ unsigned int opt_mount_port = NFS_PORT;
- int opt_singleuser = FALSE;
- int opt_brute_force = FALSE;
- int opt_testconfig = FALSE;
-+int opt_tcp_nodelay = FALSE;
- struct in_addr opt_bind_addr;
- int opt_readable_executables = FALSE;
- char *opt_pid_file = NULL;
-@@ -208,7 +210,7 @@ static void parse_options(int argc, char
- {
-
- int opt = 0;
-- char *optstring = "bcC:de:hl:m:n:prstTuwi:x:y:";
-+ char *optstring = "bcC:de:hl:m:Nn:prstTuwi:x:y:";
-
- while (opt != -1) {
- opt = getopt(argc, argv, optstring);
-@@ -295,6 +297,9 @@ static void parse_options(int argc, char
- exit(1);
- }
- break;
-+ case 'N':
-+ opt_tcp_nodelay = TRUE;
-+ break;
- case 'n':
- opt_nfs_port = strtol(optarg, NULL, 10);
- if (opt_nfs_port == 0) {
-@@ -802,6 +807,8 @@ static SVCXPRT *create_tcp_transport(uns
- sin.sin_addr.s_addr = opt_bind_addr.s_addr;
- sock = socket(PF_INET, SOCK_STREAM, 0);
- setsockopt(sock, SOL_SOCKET, SO_REUSEADDR, (const char *) &on, sizeof(on));
-+ if (opt_tcp_nodelay)
-+ setsockopt(sock, IPPROTO_TCP, TCP_NODELAY, &on, sizeof(on));
- if (bind(sock, (struct sockaddr *) &sin, sizeof(struct sockaddr))) {
- perror("bind");
- fprintf(stderr, "Couldn't bind to tcp port %d\n", port);
diff --git a/meta/recipes-devtools/unfs3/unfs3/unfs3_parallel_build.patch b/meta/recipes-devtools/unfs3/unfs3/unfs3_parallel_build.patch
deleted file mode 100644
index 6f64dd5b3e..0000000000
--- a/meta/recipes-devtools/unfs3/unfs3/unfs3_parallel_build.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-Fix parallel build dependency issue
-
-If building with make -j2 the lib.a will not get built in time.
-
-Jason Wessel <jason.wessel@windriver.com>
-
-Upstream-Status: Submitted http://sourceforge.net/p/unfs3/bugs/5/
-
----
- Config/Makefile.in | 3 +++
- Makefile.in | 3 ++-
- 2 files changed, 5 insertions(+), 1 deletion(-)
-
---- a/Makefile.in
-+++ b/Makefile.in
-@@ -29,7 +29,8 @@ DESTDIR =
-
- VPATH = $(srcdir)
-
--all: subdirs unfsd$(EXEEXT)
-+all: subdirs
-+ $(MAKE) unfsd$(EXEEXT)
-
- unfsd$(EXEEXT): $(OBJS) $(CONFOBJ) $(EXTRAOBJ)
- $(CC) -o $@ $(OBJS) $(CONFOBJ) $(EXTRAOBJ) $(LDFLAGS)
---- a/Config/Makefile.in
-+++ b/Config/Makefile.in
-@@ -16,6 +16,9 @@ lib.a: $(OBJS)
- $(AR) crs lib.a $(OBJS)
-
- y.tab.h y.tab.c: $(srcdir)/exports.y
-+y.tab.h: y.tab.c
-+
-+y.tab.c: $(srcdir)/exports.y
- $(YACC) -d $(srcdir)/exports.y
-
- y.tab.o: y.tab.c $(srcdir)/exports.h $(top_srcdir)/nfs.h $(top_srcdir)/mount.h $(top_srcdir)/daemon.h
diff --git a/meta/recipes-devtools/unfs3/unfs3_git.bb b/meta/recipes-devtools/unfs3/unfs3_git.bb
index 7a5d273851..c5b7898b3c 100644
--- a/meta/recipes-devtools/unfs3/unfs3_git.bb
+++ b/meta/recipes-devtools/unfs3/unfs3_git.bb
@@ -11,35 +11,20 @@ DEPENDS = "flex-native bison-native flex"
DEPENDS += "libtirpc"
DEPENDS:append:class-nativesdk = " flex-nativesdk"
-ASNEEDED = ""
-
S = "${WORKDIR}/git"
SRC_URI = "git://github.com/unfs3/unfs3.git;protocol=https;branch=master \
- file://unfs3_parallel_build.patch \
- file://alternate_rpc_ports.patch \
- file://fix_pid_race_parent_writes_child_pid.patch \
- file://fix_compile_warning.patch \
- file://rename_fh_cache.patch \
- file://relative_max_socket_path_len.patch \
- file://tcp_no_delay.patch \
- file://0001-daemon.c-Libtirpc-porting-fixes.patch \
+ file://0001-daemon.c-Fix-race-window-for-writing-of-the-pid-file.patch \
+ file://0001-Alias-off64_t-to-off_t-on-linux-if-not-defined.patch \
+ file://0001-locate.c-Include-attr.h.patch \
+ file://0001-fix-building-on-macOS.patch \
file://0001-attr-fix-utime-for-symlink.patch \
- file://0001-Add-listen-action-for-a-tcp-socket.patch \
- file://no-yywrap.patch \
- "
-SRCREV = "c12a5c69a8d59be6916cbd0e0f41c159f1962425"
-UPSTREAM_CHECK_GITTAGREGEX = "unfs3\-(?P<pver>.+)"
+ "
+SRCREV = "c8f2d2cd4529955419bad0e163f88d47ff176b8d"
+UPSTREAM_CHECK_GITTAGREGEX = "unfs3\-(?P<pver>\d+(\.\d+)+)"
-PV = "0.9.22+${SRCPV}"
+PV = "0.10.0"
BBCLASSEXTEND = "native nativesdk"
-inherit autotools
+inherit autotools pkgconfig
EXTRA_OECONF:append:class-native = " --sbindir=${bindir}"
-CFLAGS:append = " -I${STAGING_INCDIR}/tirpc"
-EXTRA_OECONF:append = " LIBS=-ltirpc"
-
-# Turn off these header detects else the inode search
-# will walk entire file systems and this is a real problem
-# if you have 2 TB of files to walk in your file system
-CACHED_CONFIGUREVARS = "ac_cv_header_mntent_h=no ac_cv_header_sys_mnttab_h=no"
diff --git a/meta/recipes-devtools/unifdef/unifdef_2.12.bb b/meta/recipes-devtools/unifdef/unifdef_2.12.bb
index b42051b8b6..3e08b3a0a4 100644
--- a/meta/recipes-devtools/unifdef/unifdef_2.12.bb
+++ b/meta/recipes-devtools/unifdef/unifdef_2.12.bb
@@ -18,4 +18,4 @@ do_install() {
oe_runmake install DESTDIR=${D} prefix=${prefix}
}
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/vala/vala.inc b/meta/recipes-devtools/vala/vala.inc
deleted file mode 100644
index 90e0b77de0..0000000000
--- a/meta/recipes-devtools/vala/vala.inc
+++ /dev/null
@@ -1,62 +0,0 @@
-SUMMARY = "C#-like programming language for easing GObject programming"
-HOMEPAGE = "http://vala-project.org"
-DESCRIPTION = "Vala is a C#-like language dedicated to ease GObject programming. \
-Vala compiles to plain C and has no runtime environment nor penalities whatsoever."
-SECTION = "devel"
-DEPENDS = "bison-native flex-native glib-2.0"
-
-# Appending libxslt-native to dependencies has an effect
-# of rebuilding the manual, which is very slow. Let's do this
-# only when api-documentation distro feature is enabled.
-DEPENDS:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'libxslt-native', '', d)}"
-
-# vala-native contains a native version of vapigen, which we use instead of the target one
-DEPENDS:append:class-target = " vala-native"
-BBCLASSEXTEND = "native"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
-
-SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
-
-SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${BPN}/${SHRT_VER}/${BP}.tar.xz"
-inherit autotools pkgconfig upstream-version-is-even
-
-FILES:${PN} += "${datadir}/${BPN}-${SHRT_VER}/vapi ${libdir}/${BPN}-${SHRT_VER}/"
-FILES:${PN}-doc += "${datadir}/devhelp"
-
-# .gir files from gobject-introspection are installed to ${libdir} when multilib is enabled
-GIRDIR_OPT = "${@'--girdir=${STAGING_LIBDIR}/gir-1.0' if d.getVar('MULTILIBS') else ''}"
-
-do_configure:prepend:class-target() {
- # Write out a vapigen wrapper that will be provided by pkg-config file installed in target sysroot
- # The wrapper will call a native vapigen
- cat > ${B}/vapigen-wrapper << EOF
-#!/bin/sh
-vapigen-${SHRT_VER} ${GIRDIR_OPT} "\$@"
-EOF
- chmod +x ${B}/vapigen-wrapper
-}
-
-EXTRA_OECONF += " --disable-valadoc"
-
-# Vapigen wrapper needs to be available system-wide, because it will be used
-# to build vapi files from all other packages with vala support
-do_install:append:class-target() {
- install -d ${D}${bindir}/
- install ${B}/vapigen-wrapper ${D}${bindir}/
-}
-
-# Put vapigen wrapper into target sysroot so that it can be used when building
-# vapi files.
-SYSROOT_DIRS:append:class-target = " ${bindir}"
-
-SYSROOT_PREPROCESS_FUNCS:append:class-target = " vapigen_sysroot_preprocess"
-vapigen_sysroot_preprocess() {
- # Tweak the vapigen name in the vapigen pkgconfig file, so that it picks
- # up our wrapper.
- sed -i \
- -e "s|vapigen=.*|vapigen=${bindir}/vapigen-wrapper|" \
- ${SYSROOT_DESTDIR}${libdir}/pkgconfig/vapigen-${SHRT_VER}.pc
-}
-
-SSTATE_SCAN_FILES += "vapigen-wrapper"
diff --git a/meta/recipes-devtools/vala/vala_0.56.1.bb b/meta/recipes-devtools/vala/vala_0.56.1.bb
deleted file mode 100644
index 48d21685e8..0000000000
--- a/meta/recipes-devtools/vala/vala_0.56.1.bb
+++ /dev/null
@@ -1,3 +0,0 @@
-require ${BPN}.inc
-
-SRC_URI[sha256sum] = "c518b81dfdda82d1cdf586b3f9b2323162cb96bd3cb5a2c03650cea025d91fb9"
diff --git a/meta/recipes-devtools/vala/vala_0.56.16.bb b/meta/recipes-devtools/vala/vala_0.56.16.bb
new file mode 100644
index 0000000000..1c8e4fc673
--- /dev/null
+++ b/meta/recipes-devtools/vala/vala_0.56.16.bb
@@ -0,0 +1,73 @@
+SUMMARY = "C#-like programming language for easing GObject programming"
+HOMEPAGE = "http://vala-project.org"
+DESCRIPTION = "Vala is a C#-like language dedicated to ease GObject programming. \
+Vala compiles to plain C and has no runtime environment nor penalities whatsoever."
+SECTION = "devel"
+DEPENDS = "bison-native flex-native glib-2.0 gobject-introspection"
+
+# Appending libxslt-native to dependencies has an effect
+# of rebuilding the manual, which is very slow. Let's do this
+# only when api-documentation distro feature is enabled.
+DEPENDS:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'libxslt-native', '', d)}"
+
+# vala-native contains a native version of vapigen, which we use instead of the target one
+DEPENDS:append:class-target = " vala-native"
+BBCLASSEXTEND = "native"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
+
+SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
+
+SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${BPN}/${SHRT_VER}/${BP}.tar.xz"
+SRC_URI[sha256sum] = "05487b5600f5d2f09e66a753cccd8f39c1bff9f148aea1b7774d505b9c8bca9b"
+
+inherit autotools pkgconfig upstream-version-is-even
+
+FILES:${PN} += "${datadir}/${BPN}-${SHRT_VER}/vapi ${libdir}/${BPN}-${SHRT_VER}/"
+FILES:${PN}-doc += "${datadir}/devhelp"
+
+# .gir files from gobject-introspection are installed to ${libdir} when multilib is enabled
+GIRDIR_OPT = "${@'--girdir=${STAGING_LIBDIR}/gir-1.0' if d.getVar('MULTILIBS') else ''}"
+
+do_configure:prepend:class-target() {
+ # Write out a vapigen wrapper that will be provided by pkg-config file installed in target sysroot
+ # The wrapper will call a native vapigen
+ cat > ${B}/vapigen-wrapper << EOF
+#!/bin/sh
+vapigen-${SHRT_VER} ${GIRDIR_OPT} "\$@"
+EOF
+ chmod +x ${B}/vapigen-wrapper
+}
+
+EXTRA_OECONF += " --disable-valadoc"
+
+# Vapigen wrapper needs to be available system-wide, because it will be used
+# to build vapi files from all other packages with vala support
+do_install:append:class-target() {
+ install -d ${D}${bindir_crossscripts}/
+ install ${B}/vapigen-wrapper ${D}${bindir_crossscripts}/
+}
+
+# Put vapigen wrapper into target sysroot so that it can be used when building
+# vapi files.
+SYSROOT_DIRS += "${bindir_crossscripts}"
+
+inherit multilib_script
+MULTILIB_SCRIPTS = "${PN}:${bindir}/vala-gen-introspect-0.56"
+
+SYSROOT_PREPROCESS_FUNCS:append:class-target = " vapigen_sysroot_preprocess"
+vapigen_sysroot_preprocess() {
+ # Tweak the vapigen name in the vapigen pkgconfig file, so that it picks
+ # up our wrapper.
+ sed -i \
+ -e "s|vapigen=.*|vapigen=${bindir_crossscripts}/vapigen-wrapper|" \
+ ${SYSROOT_DESTDIR}${libdir}/pkgconfig/vapigen-${SHRT_VER}.pc
+}
+
+SSTATE_SCAN_FILES += "vapigen-wrapper"
+
+PACKAGE_PREPROCESS_FUNCS += "vala_package_preprocess"
+
+vala_package_preprocess () {
+ rm -rf ${PKGD}${bindir_crossscripts}
+}
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-Fix-drd-tests-shared_timed_mutex.cpp.patch b/meta/recipes-devtools/valgrind/valgrind/0001-Fix-drd-tests-shared_timed_mutex.cpp.patch
deleted file mode 100644
index 738bf87c92..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0001-Fix-drd-tests-shared_timed_mutex.cpp.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 900b3f9b0765488f0e1f457ff5d5d1e3efe897de Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Wed, 27 Apr 2022 10:01:22 +0200
-Subject: [PATCH] Fix drd/tests/shared_timed_mutex.cpp
-
-Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=453055]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- drd/tests/shared_timed_mutex.cpp | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/drd/tests/shared_timed_mutex.cpp b/drd/tests/shared_timed_mutex.cpp
-index 9741169..dfd97a4 100644
---- a/drd/tests/shared_timed_mutex.cpp
-+++ b/drd/tests/shared_timed_mutex.cpp
-@@ -43,6 +43,7 @@ void g()
- if (test_mutex.try_lock_shared_until(then))
- {
- test_mutex.unlock_shared();
-+ break;
- }
- }
- if (i == 3)
---
-2.30.2
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-Make-local-functions-static-to-avoid-assembler-error.patch b/meta/recipes-devtools/valgrind/valgrind/0001-Make-local-functions-static-to-avoid-assembler-error.patch
deleted file mode 100644
index 8d2ca5733e..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0001-Make-local-functions-static-to-avoid-assembler-error.patch
+++ /dev/null
@@ -1,182 +0,0 @@
-From 2155c1b2cf00e744e280c493eb74bf457dfcc3b1 Mon Sep 17 00:00:00 2001
-From: Randy MacLeod <Randy.MacLeod@windriver.com>
-Date: Sun, 21 Oct 2018 15:09:31 -0400
-Subject: [PATCH] Make local functions static to avoid assembler error
-
-Avoid mips32 x-compiler warnings such as:
-
-| ../../../valgrind-3.14.0/helgrind/tests/annotate_hbefore.c:360:6: warning: no previous prototype for 'do_signal' [-Wmissing-prototypes]
-| void do_signal ( UWord* w )
-| ^~~~~~~~~
-
-by making functions and global variables that are file scope be static
-and more importantly also avoid an assembler error:
-
-/tmp/cce22iiw.s: Assembler messages:
-/tmp/cce22iiw.s:446: Error: symbol `exit_0' is already defined
-/tmp/cce22iiw.s:448: Error: symbol `exit' is already defined
-/tmp/cce22iiw.s:915: Error: symbol `exit_0' is already defined
-/tmp/cce22iiw.s:917: Error: symbol `exit' is already defined
-
-Upstream-Status: Submitted https://bugs.kde.org/show_bug.cgi?id=400164
-
-Signed-off-by: Randy MacLeod <Randy.MacLeod@windriver.com>
----
- helgrind/tests/annotate_hbefore.c | 34 +++++++++++++++----------------
- 1 file changed, 17 insertions(+), 17 deletions(-)
-
-diff --git a/helgrind/tests/annotate_hbefore.c b/helgrind/tests/annotate_hbefore.c
-index e311714f7..f55514e45 100644
---- a/helgrind/tests/annotate_hbefore.c
-+++ b/helgrind/tests/annotate_hbefore.c
-@@ -24,7 +24,7 @@ typedef unsigned long int UWord;
-
- // ppc64
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord old, success;
-
-@@ -57,7 +57,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // ppc32
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord old, success;
-
-@@ -90,7 +90,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // amd64
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord block[4] = { (UWord)addr, expected, nyu, 2 };
- __asm__ __volatile__(
-@@ -113,7 +113,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // x86
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord block[4] = { (UWord)addr, expected, nyu, 2 };
- __asm__ __volatile__(
-@@ -138,7 +138,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // arm
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord old, success;
- UWord block[2] = { (UWord)addr, nyu };
-@@ -171,7 +171,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // arm64
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord old, success;
- UWord block[2] = { (UWord)addr, nyu };
-@@ -204,7 +204,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // s390x
- /* return 1 if success, 0 if failure */
--UWord do_acasW(UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW(UWord* addr, UWord expected, UWord nyu )
- {
- int cc;
-
-@@ -223,7 +223,7 @@ UWord do_acasW(UWord* addr, UWord expected, UWord nyu )
-
- // mips32
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord success;
- UWord block[3] = { (UWord)addr, nyu, expected};
-@@ -256,7 +256,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- // mips64
- /* return 1 if success, 0 if failure */
--UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-+static UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
- {
- UWord success;
- UWord block[3] = { (UWord)addr, nyu, expected};
-@@ -287,7 +287,7 @@ UWord do_acasW ( UWord* addr, UWord expected, UWord nyu )
-
- #endif
-
--void atomic_incW ( UWord* w )
-+static void atomic_incW ( UWord* w )
- {
- while (1) {
- UWord old = *w;
-@@ -301,7 +301,7 @@ void atomic_incW ( UWord* w )
-
- #define NNN 1000000
-
--void* thread_fn ( void* arg )
-+static void* thread_fn ( void* arg )
- {
- UWord* w = (UWord*)arg;
- int i;
-@@ -331,10 +331,10 @@ int main ( void )
-
- #endif
-
--int shared_var = 0; // is not raced upon
-+static int shared_var = 0; // is not raced upon
-
-
--void delayXms ( int i )
-+static void delayXms ( int i )
- {
- struct timespec ts = { 0, 1 * 1000 * 1000 };
- // We do the sleep in small pieces to have scheduling
-@@ -348,7 +348,7 @@ void delayXms ( int i )
- }
- }
-
--void do_wait ( UWord* w )
-+static void do_wait ( UWord* w )
- {
- UWord w0 = *w;
- UWord volatile * wV = w;
-@@ -357,7 +357,7 @@ void do_wait ( UWord* w )
- ANNOTATE_HAPPENS_AFTER(w);
- }
-
--void do_signal ( UWord* w )
-+static void do_signal ( UWord* w )
- {
- ANNOTATE_HAPPENS_BEFORE(w);
- atomic_incW(w);
-@@ -365,7 +365,7 @@ void do_signal ( UWord* w )
-
-
-
--void* thread_fn1 ( void* arg )
-+static void* thread_fn1 ( void* arg )
- {
- UWord* w = (UWord*)arg;
- delayXms(500); // ensure t2 gets to its wait first
-@@ -376,7 +376,7 @@ void* thread_fn1 ( void* arg )
- return NULL;
- }
-
--void* thread_fn2 ( void* arg )
-+static void* thread_fn2 ( void* arg )
- {
- UWord* w = (UWord*)arg;
- do_wait(w); // wait for h-b edge from first thread
---
-2.17.0
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-fix-opcode-not-supported-on-mips32-linux.patch b/meta/recipes-devtools/valgrind/valgrind/0001-fix-opcode-not-supported-on-mips32-linux.patch
deleted file mode 100644
index 39b624d9f6..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0001-fix-opcode-not-supported-on-mips32-linux.patch
+++ /dev/null
@@ -1,82 +0,0 @@
-From fb5362f205b37c5060fcd764a7ed393abe4f2f3d Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Fri, 27 Jul 2018 17:39:37 +0800
-Subject: [PATCH 1/2] fix opcode not supported on mips32-linux
-
-While build tests(`make check') on mips32-linux, there are
-serial failures such as:
-[snip]
-| mips-wrsmllib32-linux-gcc -meb -mabi=32 -mhard-float -c
--o atomic_incs-atomic_incs.o `test -f 'atomic_incs.c' || echo
-'../../../valgrind-3.13.0/memcheck/tests/'`atomic_incs.c
-| /tmp/ccqrmINN.s: Assembler messages:
-| /tmp/ccqrmINN.s:247: Error: opcode not supported on this
-processor: mips1 (mips1) `ll $t3,0($t1)'
-| /tmp/ccqrmINN.s:249: Error: opcode not supported on this
-processor: mips1 (mips1) `sc $t3,0($t1)'
-[snip]
-
-Since the following commit applied, it defines CLFAGS for mips32,
-but missed to pass them to tests which caused the above failure
-...
-3e344c57f Merge in a port for mips32-linux
-...
-
-Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=396905]
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- helgrind/tests/Makefile.am | 5 +++++
- memcheck/tests/Makefile.am | 5 +++++
- none/tests/mips32/Makefile.am | 4 ++++
- 3 files changed, 14 insertions(+)
-
-diff --git a/helgrind/tests/Makefile.am b/helgrind/tests/Makefile.am
-index ad1af191a..6209d35a7 100644
---- a/helgrind/tests/Makefile.am
-+++ b/helgrind/tests/Makefile.am
-@@ -214,6 +214,11 @@ check_PROGRAMS += annotate_rwlock
- endif
-
- AM_CFLAGS += $(AM_FLAG_M3264_PRI)
-+
-+if VGCONF_PLATFORMS_INCLUDE_MIPS32_LINUX
-+AM_CFLAGS += $(AM_CFLAGS_MIPS32_LINUX)
-+endif
-+
- AM_CXXFLAGS += $(AM_FLAG_M3264_PRI)
-
- LDADD = -lpthread
-diff --git a/memcheck/tests/Makefile.am b/memcheck/tests/Makefile.am
-index 84e49405f..aff861a32 100644
---- a/memcheck/tests/Makefile.am
-+++ b/memcheck/tests/Makefile.am
-@@ -443,6 +443,11 @@ check_PROGRAMS += reach_thread_register
- endif
-
- AM_CFLAGS += $(AM_FLAG_M3264_PRI)
-+
-+if VGCONF_PLATFORMS_INCLUDE_MIPS32_LINUX
-+AM_CFLAGS += $(AM_CFLAGS_MIPS32_LINUX)
-+endif
-+
- AM_CXXFLAGS += $(AM_FLAG_M3264_PRI)
-
- if VGCONF_PLATFORMS_INCLUDE_ARM_LINUX
-diff --git a/none/tests/mips32/Makefile.am b/none/tests/mips32/Makefile.am
-index d11591d45..602cd26f6 100644
---- a/none/tests/mips32/Makefile.am
-+++ b/none/tests/mips32/Makefile.am
-@@ -99,6 +99,10 @@ check_PROGRAMS = \
- round_fpu64 \
- fpu_branches
-
-+if VGCONF_PLATFORMS_INCLUDE_MIPS32_LINUX
-+AM_CFLAGS += $(AM_CFLAGS_MIPS32_LINUX)
-+endif
-+
- AM_CFLAGS += @FLAG_M32@
- AM_CXXFLAGS += @FLAG_M32@
- AM_CCASFLAGS += @FLAG_M32@
---
-2.17.1
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch b/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch
index 9f1da7bac6..073713c139 100644
--- a/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch
@@ -10,7 +10,7 @@ implementation.
Fixes
| cc1: warning: switch -mcpu=cortex-a8 conflicts with -march=armv7ve switch
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=454346]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-arm64-Define-__THROW-if-not-already-defined.patch b/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-arm64-Define-__THROW-if-not-already-defined.patch
deleted file mode 100644
index a48d7db070..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-arm64-Define-__THROW-if-not-already-defined.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 3409dc35c15bb14c8a525239806322648e079ab1 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 5 Jul 2017 17:12:43 -0700
-Subject: [PATCH 1/3] memcheck/arm64: Define __THROW if not already defined
-
-Helps compiling with musl where __THROW is not available
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
-Upstream-Status: Submitted
-
- memcheck/tests/arm64-linux/scalar.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/memcheck/tests/arm64-linux/scalar.h b/memcheck/tests/arm64-linux/scalar.h
-index 9008816..8ef050f 100644
---- a/memcheck/tests/arm64-linux/scalar.h
-+++ b/memcheck/tests/arm64-linux/scalar.h
-@@ -12,6 +12,10 @@
- #include <sys/types.h>
- #include <sys/mman.h>
-
-+#ifndef __THROW
-+#define __THROW
-+#endif
-+
- // Since we use vki_unistd.h, we can't include <unistd.h>. So we have to
- // declare this ourselves.
- extern long int syscall (long int __sysno, ...) __THROW;
---
-2.13.2
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch b/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch
new file mode 100644
index 0000000000..80a8e3a10b
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch
@@ -0,0 +1,64 @@
+From 027b649fdb831868e71be01cafdacc49a5f419ab Mon Sep 17 00:00:00 2001
+From: Mark Wielaard <mark@klomp.org>
+Date: Fri, 17 Nov 2023 14:01:21 +0100
+Subject: [PATCH 1/4] valgrind-monitor.py regular expressions should use raw
+ strings
+
+With python 3.12 gdb will produce the following SyntaxWarning when
+loading valgrind-monitor-def.py:
+
+ /usr/share/gdb/auto-load/valgrind-monitor-def.py:214:
+ SyntaxWarning: invalid escape sequence '\['
+ if re.fullmatch("^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str):
+
+In a future python version this will become an SyntaxError.
+
+Use a raw strings for the regular expression.
+
+https://bugs.kde.org/show_bug.cgi?id=476708
+(cherry picked from commit 0fbfbe05028ad18efda786a256a2738d2c231ed4)
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=027b649fdb831868e71be01cafdacc49a5f419ab]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ NEWS | 13 +++++++++++++
+ coregrind/m_gdbserver/valgrind-monitor-def.py | 2 +-
+ 2 files changed, 14 insertions(+), 1 deletion(-)
+
+diff --git a/NEWS b/NEWS
+index f11da4be8..ee5b4ff11 100644
+--- a/NEWS
++++ b/NEWS
+@@ -1,3 +1,16 @@
++Branch 3.22
++~~~~~~~~~~~
++
++* ==================== FIXED BUGS ====================
++
++The following bugs have been fixed or resolved on this branch.
++
++476708 valgrind-monitor.py regular expressions should use raw strings
++
++To see details of a given bug, visit
++ https://bugs.kde.org/show_bug.cgi?id=XXXXXX
++where XXXXXX is the bug number as listed above.
++
+ Release 3.22.0 (31 Oct 2023)
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+diff --git a/coregrind/m_gdbserver/valgrind-monitor-def.py b/coregrind/m_gdbserver/valgrind-monitor-def.py
+index b4e7b992d..d74b1590c 100644
+--- a/coregrind/m_gdbserver/valgrind-monitor-def.py
++++ b/coregrind/m_gdbserver/valgrind-monitor-def.py
+@@ -211,7 +211,7 @@ class Valgrind_ADDR_LEN_opt(Valgrind_Command):
+ For compatibility reason with the Valgrind gdbserver monitor command,
+ we detect and accept usages such as 0x1234ABCD[10]."""
+ def invoke(self, arg_str : str, from_tty : bool) -> None:
+- if re.fullmatch("^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str):
++ if re.fullmatch(r"^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str):
+ arg_str = arg_str.replace("[", " ")
+ arg_str = arg_str.replace("]", " ")
+ eval_execute_2(self, arg_str,
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch b/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch
new file mode 100644
index 0000000000..5759fa039a
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch
@@ -0,0 +1,147 @@
+From 1d00e5ce0fb069911c4b525ec38289fb5d9021b0 Mon Sep 17 00:00:00 2001
+From: Paul Floyd <pjfloyd@wanadoo.fr>
+Date: Sat, 18 Nov 2023 08:49:34 +0100
+Subject: [PATCH 2/4] Bug 476548 - valgrind 3.22.0 fails on assertion when
+ loading debuginfo file produced by mold
+
+(cherry picked from commit 9ea4ae66707a4dcc6f4328e11911652e4418c585)
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=1d00e5ce0fb069911c4b525ec38289fb5d9021b0]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ NEWS | 2 ++
+ coregrind/m_debuginfo/image.c | 14 +++++++++
+ coregrind/m_debuginfo/priv_image.h | 4 +++
+ coregrind/m_debuginfo/readelf.c | 49 ++++++++++++++++++++++++++++--
+ 4 files changed, 66 insertions(+), 3 deletions(-)
+
+diff --git a/NEWS b/NEWS
+index ee5b4ff11..6cd13429a 100644
+--- a/NEWS
++++ b/NEWS
+@@ -5,6 +5,8 @@ Branch 3.22
+
+ The following bugs have been fixed or resolved on this branch.
+
++476548 valgrind 3.22.0 fails on assertion when loading debuginfo
++ file produced by mold
+ 476708 valgrind-monitor.py regular expressions should use raw strings
+
+ To see details of a given bug, visit
+diff --git a/coregrind/m_debuginfo/image.c b/coregrind/m_debuginfo/image.c
+index 02e509071..445f95555 100644
+--- a/coregrind/m_debuginfo/image.c
++++ b/coregrind/m_debuginfo/image.c
+@@ -1221,6 +1221,20 @@ Int ML_(img_strcmp_c)(DiImage* img, DiOffT off1, const HChar* str2)
+ }
+ }
+
++Int ML_(img_strcmp_n)(DiImage* img, DiOffT off1, const HChar* str2, Word n)
++{
++ ensure_valid(img, off1, 1, "ML_(img_strcmp_c)");
++ while (n) {
++ UChar c1 = get(img, off1);
++ UChar c2 = *(const UChar*)str2;
++ if (c1 < c2) return -1;
++ if (c1 > c2) return 1;
++ if (c1 == 0) return 0;
++ off1++; str2++; --n;
++ }
++ return 0;
++}
++
+ UChar ML_(img_get_UChar)(DiImage* img, DiOffT offset)
+ {
+ ensure_valid(img, offset, 1, "ML_(img_get_UChar)");
+diff --git a/coregrind/m_debuginfo/priv_image.h b/coregrind/m_debuginfo/priv_image.h
+index a49846f14..c91e49f01 100644
+--- a/coregrind/m_debuginfo/priv_image.h
++++ b/coregrind/m_debuginfo/priv_image.h
+@@ -115,6 +115,10 @@ Int ML_(img_strcmp)(DiImage* img, DiOffT off1, DiOffT off2);
+ cast to HChar before comparison. */
+ Int ML_(img_strcmp_c)(DiImage* img, DiOffT off1, const HChar* str2);
+
++/* Do strncmp of a C string in the image vs a normal one. Chars are
++ cast to HChar before comparison. */
++Int ML_(img_strcmp_n)(DiImage* img, DiOffT off1, const HChar* str2, Word n);
++
+ /* Do strlen of a C string in the image. */
+ SizeT ML_(img_strlen)(DiImage* img, DiOffT off);
+
+diff --git a/coregrind/m_debuginfo/readelf.c b/coregrind/m_debuginfo/readelf.c
+index fb64ed976..46f8c8343 100644
+--- a/coregrind/m_debuginfo/readelf.c
++++ b/coregrind/m_debuginfo/readelf.c
+@@ -2501,8 +2501,7 @@ Bool ML_(read_elf_object) ( struct _DebugInfo* di )
+ di->rodata_avma += inrw1->bias;
+ di->rodata_bias = inrw1->bias;
+ di->rodata_debug_bias = inrw1->bias;
+- }
+- else {
++ } else {
+ BAD(".rodata"); /* should not happen? */
+ }
+ di->rodata_present = True;
+@@ -2977,6 +2976,46 @@ Bool ML_(read_elf_object) ( struct _DebugInfo* di )
+ return retval;
+ }
+
++static void find_rodata(Word i, Word shnum, DiImage* dimg, struct _DebugInfo* di, DiOffT shdr_dioff,
++ UWord shdr_dent_szB, DiOffT shdr_strtab_dioff, PtrdiffT rw_dbias)
++{
++ ElfXX_Shdr a_shdr;
++ ElfXX_Shdr a_extra_shdr;
++ ML_(img_get)(&a_shdr, dimg,
++ INDEX_BIS(shdr_dioff, i, shdr_dent_szB),
++ sizeof(a_shdr));
++ if (di->rodata_present &&
++ 0 == ML_(img_strcmp_c)(dimg, shdr_strtab_dioff
++ + a_shdr.sh_name, ".rodata")) {
++ Word sh_size = a_shdr.sh_size;
++ Word j;
++ Word next_addr = a_shdr.sh_addr + a_shdr.sh_size;
++ for (j = i + 1; j < shnum; ++j) {
++ ML_(img_get)(&a_extra_shdr, dimg,
++ INDEX_BIS(shdr_dioff, j, shdr_dent_szB),
++ sizeof(a_shdr));
++ if (0 == ML_(img_strcmp_n)(dimg, shdr_strtab_dioff
++ + a_extra_shdr.sh_name, ".rodata", 7)) {
++ if (a_extra_shdr.sh_addr ==
++ VG_ROUNDUP(next_addr, a_extra_shdr.sh_addralign)) {
++ sh_size = VG_ROUNDUP(sh_size, a_extra_shdr.sh_addralign) + a_extra_shdr.sh_size;
++ }
++ next_addr = a_extra_shdr.sh_addr + a_extra_shdr.sh_size;
++ } else {
++ break;
++ }
++ }
++ vg_assert(di->rodata_size == sh_size);
++ vg_assert(di->rodata_avma + a_shdr.sh_addr + rw_dbias);
++ di->rodata_debug_svma = a_shdr.sh_addr;
++ di->rodata_debug_bias = di->rodata_bias +
++ di->rodata_svma - di->rodata_debug_svma;
++ TRACE_SYMTAB("acquiring .rodata debug svma = %#lx .. %#lx\n",
++ di->rodata_debug_svma,
++ di->rodata_debug_svma + di->rodata_size - 1);
++ TRACE_SYMTAB("acquiring .rodata debug bias = %#lx\n", (UWord)di->rodata_debug_bias);
++ }
++}
+ Bool ML_(read_elf_debug) ( struct _DebugInfo* di )
+ {
+ Word i, j;
+@@ -3391,7 +3430,11 @@ Bool ML_(read_elf_debug) ( struct _DebugInfo* di )
+ FIND(text, rx)
+ FIND(data, rw)
+ FIND(sdata, rw)
+- FIND(rodata, rw)
++ // https://bugs.kde.org/show_bug.cgi?id=476548
++ // special handling for rodata as adjacent
++ // rodata sections may have been merged in ML_(read_elf_object)
++ //FIND(rodata, rw)
++ find_rodata(i, ehdr_dimg.e_shnum, dimg, di, shdr_dioff, shdr_dent_szB, shdr_strtab_dioff, rw_dbias);
+ FIND(bss, rw)
+ FIND(sbss, rw)
+
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/valgrind/valgrind/0002-context-APIs-are-not-available-on-musl.patch b/meta/recipes-devtools/valgrind/valgrind/0002-context-APIs-are-not-available-on-musl.patch
deleted file mode 100644
index 7f0e38cb95..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0002-context-APIs-are-not-available-on-musl.patch
+++ /dev/null
@@ -1,92 +0,0 @@
-From 26c104adf6c5162572b7aa2fac89d0835b7f8f0b Mon Sep 17 00:00:00 2001
-From: Randy MacLeod <Randy.MacLeod@windriver.com>
-Date: Tue, 16 Oct 2018 21:27:46 -0400
-Subject: [PATCH] context APIs are not available on musl
-
-Updated patch for valgrind-3.14
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Signed-off-by: Randy MacLeod <Randy.MacLeod@windriver.com>
-
-Apply same patch to drd/tests/swapcontext.c
-for valgrind-3.17.
-
-Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=434775]
-
-Signed-off-by: Yi Fan Yu <yifan.yu@windriver.com>
----
- drd/tests/swapcontext.c | 6 ++++++
- memcheck/tests/linux/stack_changes.c | 7 ++++++-
- 2 files changed, 12 insertions(+), 1 deletion(-)
-
-diff --git a/drd/tests/swapcontext.c b/drd/tests/swapcontext.c
-index 622c70bc5..5e72bb0f3 100644
---- a/drd/tests/swapcontext.c
-+++ b/drd/tests/swapcontext.c
-@@ -20,6 +20,7 @@
-
- #define STACKSIZE (PTHREAD_STACK_MIN + 4096)
-
-+#ifdef __GLIBC__
- typedef struct thread_local {
- ucontext_t uc[3];
- size_t nrsw;
-@@ -67,9 +68,11 @@ void *worker(void *data)
- swapcontext(&tlocal->uc[0], &tlocal->uc[1]);
- return NULL;
- }
-+#endif
-
- int main(int argc, char *argv[])
- {
-+#ifdef __GLIBC__
- enum { NR = 32 };
- thread_local_t tlocal[NR];
- pthread_t thread[NR];
-@@ -94,6 +97,9 @@ int main(int argc, char *argv[])
-
- for (i = 0; i < NR; i++)
- pthread_join(thread[i], NULL);
-+#else
-+ printf("libc context call APIs e.g. getcontext() are deprecated by posix\n");
-+#endif
-
- return 0;
- }
-diff --git a/memcheck/tests/linux/stack_changes.c b/memcheck/tests/linux/stack_changes.c
-index 7f97b90a5..a26cb4ae6 100644
---- a/memcheck/tests/linux/stack_changes.c
-+++ b/memcheck/tests/linux/stack_changes.c
-@@ -10,6 +10,7 @@
- // This test is checking the libc context calls (setcontext, etc.) and
- // checks that Valgrind notices their stack changes properly.
-
-+#ifdef __GLIBC__
- typedef ucontext_t mycontext;
-
- mycontext ctx1, ctx2, oldc;
-@@ -51,9 +52,11 @@ int init_context(mycontext *uc)
-
- return ret;
- }
-+#endif
-
- int main(int argc, char **argv)
- {
-+#ifdef __GLIBC__
- int c1 = init_context(&ctx1);
- int c2 = init_context(&ctx2);
-
-@@ -66,6 +69,8 @@ int main(int argc, char **argv)
- //free(ctx1.uc_stack.ss_sp);
- VALGRIND_STACK_DEREGISTER(c2);
- //free(ctx2.uc_stack.ss_sp);
--
-+#else
-+ printf("libc context call APIs e.g. getcontext() are deprecated by posix\n");
-+#endif
- return 0;
- }
---
-2.17.1
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0002-memcheck-x86-Define-__THROW-if-not-defined.patch b/meta/recipes-devtools/valgrind/valgrind/0002-memcheck-x86-Define-__THROW-if-not-defined.patch
deleted file mode 100644
index 5433472291..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0002-memcheck-x86-Define-__THROW-if-not-defined.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 67d199dbdcbb3feff5f8928f87725fc64c0307d7 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 5 Jul 2017 17:36:42 -0700
-Subject: [PATCH 2/3] memcheck/x86: Define __THROW if not defined
-
-musl does not have __THROW, therefore make it null
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
-Upstream-Status: Submitted
-
- memcheck/tests/x86-linux/scalar.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/memcheck/tests/x86-linux/scalar.h b/memcheck/tests/x86-linux/scalar.h
-index ef28b03..52f742e 100644
---- a/memcheck/tests/x86-linux/scalar.h
-+++ b/memcheck/tests/x86-linux/scalar.h
-@@ -11,6 +11,10 @@
- #include <sys/types.h>
- #include <sys/mman.h>
-
-+#ifndef __THROW
-+#define __THROW
-+#endif
-+
- // Since we use vki_unistd.h, we can't include <unistd.h>. So we have to
- // declare this ourselves.
- extern long int syscall (long int __sysno, ...) __THROW;
---
-2.13.2
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch b/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch
new file mode 100644
index 0000000000..2a09ca52b7
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch
@@ -0,0 +1,221 @@
+From a43e62dddcf51ec6578a90c5988a41e856b44b05 Mon Sep 17 00:00:00 2001
+From: Mark Wielaard <mark@klomp.org>
+Date: Sat, 18 Nov 2023 21:17:02 +0100
+Subject: [PATCH 3/4] Add fchmodat2 syscall on linux
+
+fchmodat2 is a new syscall on linux 6.6. It is a variant of fchmodat
+that takes an extra flags argument.
+
+https://bugs.kde.org/show_bug.cgi?id=477198
+
+(cherry picked from commit 372d09fd9a8d76847c81092ebff71c80fd6c145d)
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=a43e62dddcf51ec6578a90c5988a41e856b44b05]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ NEWS | 1 +
+ coregrind/m_syswrap/priv_syswrap-linux.h | 3 +++
+ coregrind/m_syswrap/syswrap-amd64-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-arm-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-arm64-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-linux.c | 11 +++++++++++
+ coregrind/m_syswrap/syswrap-mips32-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-mips64-linux.c | 1 +
+ coregrind/m_syswrap/syswrap-nanomips-linux.c | 1 +
+ coregrind/m_syswrap/syswrap-ppc32-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-ppc64-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-s390x-linux.c | 2 ++
+ coregrind/m_syswrap/syswrap-x86-linux.c | 2 ++
+ include/vki/vki-scnums-shared-linux.h | 2 ++
+ 14 files changed, 35 insertions(+)
+
+diff --git a/NEWS b/NEWS
+index 6cd13429a..da0f8c1aa 100644
+--- a/NEWS
++++ b/NEWS
+@@ -8,6 +8,7 @@ The following bugs have been fixed or resolved on this branch.
+ 476548 valgrind 3.22.0 fails on assertion when loading debuginfo
+ file produced by mold
+ 476708 valgrind-monitor.py regular expressions should use raw strings
++477198 Add fchmodat2 syscall on linux
+
+ To see details of a given bug, visit
+ https://bugs.kde.org/show_bug.cgi?id=XXXXXX
+diff --git a/coregrind/m_syswrap/priv_syswrap-linux.h b/coregrind/m_syswrap/priv_syswrap-linux.h
+index 7c9decf5a..798c456c9 100644
+--- a/coregrind/m_syswrap/priv_syswrap-linux.h
++++ b/coregrind/m_syswrap/priv_syswrap-linux.h
+@@ -331,6 +331,9 @@ DECL_TEMPLATE(linux, sys_openat2);
+ // Linux-specific (new in Linux 5.14)
+ DECL_TEMPLATE(linux, sys_memfd_secret);
+
++// Since Linux 6.6
++DECL_TEMPLATE(linux, sys_fchmodat2);
++
+ /* ---------------------------------------------------------------------
+ Wrappers for sockets and ipc-ery. These are split into standalone
+ procedures because x86-linux hides them inside multiplexors
+diff --git a/coregrind/m_syswrap/syswrap-amd64-linux.c b/coregrind/m_syswrap/syswrap-amd64-linux.c
+index 008600798..fe17d118b 100644
+--- a/coregrind/m_syswrap/syswrap-amd64-linux.c
++++ b/coregrind/m_syswrap/syswrap-amd64-linux.c
+@@ -886,6 +886,8 @@ static SyscallTableEntry syscall_table[] = {
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
+
+ LINXY(__NR_memfd_secret, sys_memfd_secret), // 447
++
++ LINX_(__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/coregrind/m_syswrap/syswrap-arm-linux.c b/coregrind/m_syswrap/syswrap-arm-linux.c
+index 9a7a1e0d2..811931d3b 100644
+--- a/coregrind/m_syswrap/syswrap-arm-linux.c
++++ b/coregrind/m_syswrap/syswrap-arm-linux.c
+@@ -1059,6 +1059,8 @@ static SyscallTableEntry syscall_main_table[] = {
+ LINX_(__NR_faccessat2, sys_faccessat2), // 439
+
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
++
++ LINX_(__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+
+diff --git a/coregrind/m_syswrap/syswrap-arm64-linux.c b/coregrind/m_syswrap/syswrap-arm64-linux.c
+index 6af7bab83..3307bc2ca 100644
+--- a/coregrind/m_syswrap/syswrap-arm64-linux.c
++++ b/coregrind/m_syswrap/syswrap-arm64-linux.c
+@@ -840,6 +840,8 @@ static SyscallTableEntry syscall_main_table[] = {
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
+
+ LINXY(__NR_memfd_secret, sys_memfd_secret), // 447
++
++ LINX_(__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+
+diff --git a/coregrind/m_syswrap/syswrap-linux.c b/coregrind/m_syswrap/syswrap-linux.c
+index d571fc327..efa47f2e6 100644
+--- a/coregrind/m_syswrap/syswrap-linux.c
++++ b/coregrind/m_syswrap/syswrap-linux.c
+@@ -6059,6 +6059,17 @@ PRE(sys_fchmodat)
+ PRE_MEM_RASCIIZ( "fchmodat(path)", ARG2 );
+ }
+
++PRE(sys_fchmodat2)
++{
++ PRINT("sys_fchmodat2 ( %ld, %#" FMT_REGWORD "x(%s), %" FMT_REGWORD "u, %"
++ FMT_REGWORD "u )",
++ SARG1, ARG2, (HChar*)(Addr)ARG2, ARG3, ARG4);
++ PRE_REG_READ4(long, "fchmodat2",
++ int, dfd, const char *, path, vki_mode_t, mode,
++ unsigned int, flags);
++ PRE_MEM_RASCIIZ( "fchmodat2(pathname)", ARG2 );
++}
++
+ PRE(sys_faccessat)
+ {
+ PRINT("sys_faccessat ( %ld, %#" FMT_REGWORD "x(%s), %ld )",
+diff --git a/coregrind/m_syswrap/syswrap-mips32-linux.c b/coregrind/m_syswrap/syswrap-mips32-linux.c
+index 6268a00dd..74a1f6eac 100644
+--- a/coregrind/m_syswrap/syswrap-mips32-linux.c
++++ b/coregrind/m_syswrap/syswrap-mips32-linux.c
+@@ -1143,6 +1143,8 @@ static SyscallTableEntry syscall_main_table[] = {
+ LINX_ (__NR_faccessat2, sys_faccessat2), // 439
+
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
++
++ LINX_(__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) (UInt sysno)
+diff --git a/coregrind/m_syswrap/syswrap-mips64-linux.c b/coregrind/m_syswrap/syswrap-mips64-linux.c
+index 6cdf25893..4e8508b7a 100644
+--- a/coregrind/m_syswrap/syswrap-mips64-linux.c
++++ b/coregrind/m_syswrap/syswrap-mips64-linux.c
+@@ -820,6 +820,7 @@ static SyscallTableEntry syscall_main_table[] = {
+ LINXY (__NR_close_range, sys_close_range),
+ LINX_ (__NR_faccessat2, sys_faccessat2),
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2),
++ LINX_ (__NR_fchmodat2, sys_fchmodat2),
+ };
+
+ SyscallTableEntry * ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/coregrind/m_syswrap/syswrap-nanomips-linux.c b/coregrind/m_syswrap/syswrap-nanomips-linux.c
+index d724cde74..7859900c1 100644
+--- a/coregrind/m_syswrap/syswrap-nanomips-linux.c
++++ b/coregrind/m_syswrap/syswrap-nanomips-linux.c
+@@ -829,6 +829,7 @@ static SyscallTableEntry syscall_main_table[] = {
+ LINXY (__NR_close_range, sys_close_range),
+ LINX_ (__NR_faccessat2, sys_faccessat2),
+ LINXY (__NR_epoll_pwait2, sys_epoll_pwait2),
++ LINX_ (__NR_fchmodat2, sys_fchmodat2),
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) (UInt sysno)
+diff --git a/coregrind/m_syswrap/syswrap-ppc32-linux.c b/coregrind/m_syswrap/syswrap-ppc32-linux.c
+index c0cfef235..1e19116ee 100644
+--- a/coregrind/m_syswrap/syswrap-ppc32-linux.c
++++ b/coregrind/m_syswrap/syswrap-ppc32-linux.c
+@@ -1063,6 +1063,8 @@ static SyscallTableEntry syscall_table[] = {
+ LINX_(__NR_faccessat2, sys_faccessat2), // 439
+
+ LINXY (__NR_epoll_pwait2, sys_epoll_pwait2), // 441
++
++ LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/coregrind/m_syswrap/syswrap-ppc64-linux.c b/coregrind/m_syswrap/syswrap-ppc64-linux.c
+index f5976f30c..1097212a4 100644
+--- a/coregrind/m_syswrap/syswrap-ppc64-linux.c
++++ b/coregrind/m_syswrap/syswrap-ppc64-linux.c
+@@ -1032,6 +1032,8 @@ static SyscallTableEntry syscall_table[] = {
+ LINX_(__NR_faccessat2, sys_faccessat2), // 439
+
+ LINXY (__NR_epoll_pwait2, sys_epoll_pwait2), // 441
++
++ LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/coregrind/m_syswrap/syswrap-s390x-linux.c b/coregrind/m_syswrap/syswrap-s390x-linux.c
+index afba154e7..3588672c7 100644
+--- a/coregrind/m_syswrap/syswrap-s390x-linux.c
++++ b/coregrind/m_syswrap/syswrap-s390x-linux.c
+@@ -873,6 +873,8 @@ static SyscallTableEntry syscall_table[] = {
+ LINX_(__NR_faccessat2, sys_faccessat2), // 439
+
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
++
++ LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/coregrind/m_syswrap/syswrap-x86-linux.c b/coregrind/m_syswrap/syswrap-x86-linux.c
+index da4fd8fa2..58badc6b0 100644
+--- a/coregrind/m_syswrap/syswrap-x86-linux.c
++++ b/coregrind/m_syswrap/syswrap-x86-linux.c
+@@ -1658,6 +1658,8 @@ static SyscallTableEntry syscall_table[] = {
+ LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441
+
+ LINXY(__NR_memfd_secret, sys_memfd_secret), // 447
++
++ LINX_(__NR_fchmodat2, sys_fchmodat2), // 452
+ };
+
+ SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno )
+diff --git a/include/vki/vki-scnums-shared-linux.h b/include/vki/vki-scnums-shared-linux.h
+index 542382b53..a4cd87149 100644
+--- a/include/vki/vki-scnums-shared-linux.h
++++ b/include/vki/vki-scnums-shared-linux.h
+@@ -50,4 +50,6 @@
+
+ #define __NR_memfd_secret 447
+
++#define __NR_fchmodat2 452
++
+ #endif
+--
+2.44.0
+
diff --git a/meta/recipes-devtools/valgrind/valgrind/0003-tests-seg_override-Replace-__modify_ldt-with-syscall.patch b/meta/recipes-devtools/valgrind/valgrind/0003-tests-seg_override-Replace-__modify_ldt-with-syscall.patch
deleted file mode 100644
index fa1344c853..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/0003-tests-seg_override-Replace-__modify_ldt-with-syscall.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-From d103475875858ab8a2e6b53ce178bb2f63883d4c Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 5 Jul 2017 17:37:56 -0700
-Subject: [PATCH 3/3] tests/seg_override: Replace __modify_ldt() with syscall()
-
-__modify_ldt() is specific to glibc, replacing it with syscall()
-makes it more portable.
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
-Upstream-Status: Submitted
-
- none/tests/x86-linux/seg_override.c | 15 ++++++---------
- 1 file changed, 6 insertions(+), 9 deletions(-)
-
-diff --git a/none/tests/x86-linux/seg_override.c b/none/tests/x86-linux/seg_override.c
-index b7619c9..c89874b 100644
---- a/none/tests/x86-linux/seg_override.c
-+++ b/none/tests/x86-linux/seg_override.c
-@@ -2,6 +2,8 @@
- #include <stdio.h>
- #include <errno.h>
- #include <string.h>
-+#include <unistd.h>
-+#include <syscall.h>
-
- /* Stuff from Wine. */
-
-@@ -52,14 +54,11 @@ inline static unsigned int wine_ldt_get_limit( const LDT_ENTRY *ent )
- /* our copy of the ldt */
- LDT_ENTRY ldt_copy[8192];
-
--/* System call to set LDT entry. */
--//extern int __modify_ldt (int, struct modify_ldt_ldt_s *, size_t);
--extern int __modify_ldt (int, void *, size_t);
--
- void print_ldt ( void )
- {
- int res;
-- res = __modify_ldt( 0, ldt_copy, 8192*sizeof(LDT_ENTRY) );
-+ /* System call to set LDT entry. */
-+ res = syscall(SYS_modify_ldt, 0, ldt_copy, 8192*sizeof(LDT_ENTRY) );
- printf("got %d bytes\n", res );
- perror("error is");
- }
-@@ -83,9 +82,6 @@ struct modify_ldt_ldt_s
- unsigned int empty:25;
- };
-
--/* System call to set LDT entry. */
--//extern int __modify_ldt (int, struct modify_ldt_ldt_s *, size_t);
--
- void set_ldt1 ( void* base )
- {
- int stat;
-@@ -102,7 +98,8 @@ void set_ldt1 ( void* base )
- ldt_entry.read_exec_only = 0;
- ldt_entry.limit_in_pages = 0;
- ldt_entry.seg_not_present = 0;
-- stat = __modify_ldt (1, &ldt_entry, sizeof (ldt_entry));
-+ /* System call to set LDT entry. */
-+ stat = syscall(SYS_modify_ldt, 1, &ldt_entry, sizeof (ldt_entry));
- printf("stat = %d\n", stat);
- }
-
---
-2.13.2
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch b/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch
new file mode 100644
index 0000000000..4e9185508a
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch
@@ -0,0 +1,137 @@
+From 41ff9aa49f6c54c66d0e6b37f265fd9cb0176057 Mon Sep 17 00:00:00 2001
+From: Paul Floyd <pjfloyd@wanadoo.fr>
+Date: Sun, 17 Dec 2023 14:18:51 +0100
+Subject: [PATCH 4/4] Bug 478624 - Valgrind incompatibility with binutils-2.42
+ on x86 with new nop patterns (unhandled instruction bytes: 0x2E 0x8D 0xB4
+ 0x26)
+
+It was a bit of a struggle to get the testcase to build
+with both clang and gcc (oddly enough gcc was more difficult) so
+I just resorted to using .byte arrays.
+
+(cherry picked from commit d35005cef8ad8207542738812705ceabf137d7e0)
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=41ff9aa49f6c54c66d0e6b37f265fd9cb0176057]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ .gitignore | 1 +
+ NEWS | 2 ++
+ VEX/priv/guest_x86_toIR.c | 22 +++++++++++++-
+ none/tests/x86/Makefile.am | 2 ++
+ none/tests/x86/gnu_binutils_nop.c | 34 ++++++++++++++++++++++
+ none/tests/x86/gnu_binutils_nop.stderr.exp | 0
+ none/tests/x86/gnu_binutils_nop.vgtest | 2 ++
+ 7 files changed, 62 insertions(+), 1 deletion(-)
+ create mode 100644 none/tests/x86/gnu_binutils_nop.c
+ create mode 100644 none/tests/x86/gnu_binutils_nop.stderr.exp
+ create mode 100644 none/tests/x86/gnu_binutils_nop.vgtest
+
+--- a/NEWS
++++ b/NEWS
+@@ -9,6 +9,8 @@ The following bugs have been fixed or re
+ file produced by mold
+ 476708 valgrind-monitor.py regular expressions should use raw strings
+ 477198 Add fchmodat2 syscall on linux
++478624 Valgrind incompatibility with binutils-2.42 on x86 with new nop patterns
++ (unhandled instruction bytes: 0x2E 0x8D 0xB4 0x26)
+
+ To see details of a given bug, visit
+ https://bugs.kde.org/show_bug.cgi?id=XXXXXX
+--- a/VEX/priv/guest_x86_toIR.c
++++ b/VEX/priv/guest_x86_toIR.c
+@@ -8198,7 +8198,7 @@ DisResult disInstr_X86_WRK (
+ delta += 5;
+ goto decode_success;
+ }
+- /* Don't barf on recent binutils padding,
++ /* Don't barf on recent (2010) binutils padding,
+ all variants of which are: nopw %cs:0x0(%eax,%eax,1)
+ 66 2e 0f 1f 84 00 00 00 00 00
+ 66 66 2e 0f 1f 84 00 00 00 00 00
+@@ -8222,6 +8222,26 @@ DisResult disInstr_X86_WRK (
+ goto decode_success;
+ }
+ }
++
++ /* bug478624 GNU binutils uses a leal of esi into itself with
++ a zero offset and CS prefix as an 8 byte no-op (Dec 2023).
++ Since the CS prefix is hardly ever used we don't do much
++ to decode it, just a few cases for conditional branches.
++ So add handling here with other pseudo-no-ops.
++ */
++ if (code[0] == 0x2E && code[1] == 0x8D) {
++ if (code[2] == 0x74 && code[3] == 0x26 && code[4] == 0x00) {
++ DIP("leal %%cs:0(%%esi,%%eiz,1),%%esi\n");
++ delta += 5;
++ goto decode_success;
++ }
++ if (code[2] == 0xB4 && code[3] == 0x26 && code[4] == 0x00
++ && code[5] == 0x00 && code[6] == 0x00 && code[7] == 0x00) {
++ DIP("leal %%cs:0(%%esi,%%eiz,1),%%esi\n");
++ delta += 8;
++ goto decode_success;
++ }
++ }
+
+ // Intel CET requires the following opcodes to be treated as NOPs
+ // with any prefix and ModRM, SIB and disp combination:
+--- a/none/tests/x86/Makefile.am
++++ b/none/tests/x86/Makefile.am
+@@ -52,6 +52,7 @@ EXTRA_DIST = \
+ fxtract.stdout.exp fxtract.stderr.exp fxtract.vgtest \
+ fxtract.stdout.exp-older-glibc \
+ getseg.stdout.exp getseg.stderr.exp getseg.vgtest \
++ gnu_binutils_nop.stderr.exp gnu_binutils_nop.vgtest \
+ incdec_alt.stdout.exp incdec_alt.stderr.exp incdec_alt.vgtest \
+ int.stderr.exp int.stdout.exp int.disabled \
+ $(addsuffix .stderr.exp,$(INSN_TESTS)) \
+@@ -100,6 +101,7 @@ check_PROGRAMS = \
+ fpu_lazy_eflags \
+ fxtract \
+ getseg \
++ gnu_binutils_nop \
+ incdec_alt \
+ $(INSN_TESTS) \
+ int \
+--- /dev/null
++++ b/none/tests/x86/gnu_binutils_nop.c
+@@ -0,0 +1,34 @@
++int main(void)
++{
++ // GNU binutils uses various opcodes as alternatives for nop
++ // the idea is that it is faster to execute one large opcode
++ // with no side-effects than multiple repetitions of the
++ // single byte 'nop'. This gives more choice when code
++ // needs to be padded.
++
++ // the following is based on
++ // https://sourceware.org/cgit/binutils-gdb/tree/gas/config/tc-i386.c#n1256
++
++ // one byte
++ __asm__ __volatile__("nop");
++ // two bytes
++ __asm__ __volatile__("xchg %ax,%ax");
++ // three bytes
++ //__asm__ __volatile__("leal 0(%esi),%esi");
++ __asm__ __volatile__(".byte 0x8d,0x76,0x00");
++ // four bytes
++ //__asm__ __volatile__("leal 0(%esi,%eiz),%esi");
++ __asm__ __volatile__(".byte 0x8d,0x74,0x26,0x00");
++ // five bytes
++ //__asm__ __volatile__("leal %cs:0(%esi,%eiz),%esi");
++ __asm__ __volatile__(".byte 0x2e,0x8d,0x74,0x26,0x00");
++ // six bytes
++ //__asm__ __volatile__("leal 0L(%esi),%esi");
++ __asm__ __volatile__(".byte 0x8d,0xb6,0x00,0x00,0x00,0x00");
++ // seven bytes
++ //__asm__ __volatile__("leal 0L(%esi,%eiz),%esi");
++ __asm__ __volatile__(".byte 0x8d,0xb4,0x26,0x00,0x00,0x00,0x00");
++ // eight bytes
++ //__asm__ __volatile__("leal %cs:0L(%esi,%eiz),%esi");
++ __asm__ __volatile__(".byte 0x2e,0x8d,0xb4,0x26,0x00,0x00,0x00,0x00");
++}
+--- /dev/null
++++ b/none/tests/x86/gnu_binutils_nop.vgtest
+@@ -0,0 +1,2 @@
++prog: gnu_binutils_nop
++vgopts: -q
diff --git a/meta/recipes-devtools/valgrind/valgrind/0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch b/meta/recipes-devtools/valgrind/valgrind/0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch
index a3637ea846..fea3b00f62 100644
--- a/meta/recipes-devtools/valgrind/valgrind/0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch
@@ -1,7 +1,7 @@
-From f49f27f1bc67d07440b0ac9a7d767a8ea1589bfe Mon Sep 17 00:00:00 2001
+From e244a72c6f8803550f37e81f72bbae039651013b Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 15 Dec 2015 15:50:44 +0200
-Subject: [PATCH 5/5] Modify vg_test wrapper to support PTEST formats
+Subject: [PATCH] Modify vg_test wrapper to support PTEST formats
Change the valgrind regression test script vg_regtest to
support the yocto ptest stdout reporting format. The commit adds
@@ -25,11 +25,11 @@ Increase time limit to 90 s.
Signed-off-by: Yi Fan Yu <yifan.yu@windriver.com>
---
- tests/vg_regtest.in | 75 +++++++++++++++++++++++++++++++++++++++--------------
+ tests/vg_regtest.in | 75 +++++++++++++++++++++++++++++++++------------
1 file changed, 55 insertions(+), 20 deletions(-)
diff --git a/tests/vg_regtest.in b/tests/vg_regtest.in
-index a441f42..cb05b52 100755
+index ad18800..e4bd8cb 100755
--- a/tests/vg_regtest.in
+++ b/tests/vg_regtest.in
@@ -47,6 +47,7 @@
@@ -49,7 +49,7 @@ index a441f42..cb05b52 100755
. " Use EXTRA_REGTEST_OPTS to supply extra args for all tests\n"
. "\n";
-@@ -186,6 +187,7 @@ my $outer_args;
+@@ -187,6 +188,7 @@ my $run_outer_args = "";
my $valgrind_lib = "$tests_dir/.in_place";
my $keepunfiltered = 0;
my $looptillfail = 0;
@@ -57,7 +57,7 @@ index a441f42..cb05b52 100755
# default filter is the one named "filter_stderr" in the test's directory
my $default_stderr_filter = "filter_stderr";
-@@ -244,6 +246,8 @@ sub process_command_line()
+@@ -245,6 +247,8 @@ sub process_command_line()
$keepunfiltered = 1;
} elsif ($arg =~ /^--loop-till-fail$/) {
$looptillfail = 1;
@@ -66,7 +66,7 @@ index a441f42..cb05b52 100755
} else {
die $usage;
}
-@@ -365,13 +369,28 @@ sub read_vgtest_file($)
+@@ -376,13 +380,28 @@ sub read_vgtest_file($)
#----------------------------------------------------------------------------
# Since most of the program time is spent in system() calls, need this to
# propagate a Ctrl-C enabling us to quit.
@@ -100,7 +100,7 @@ index a441f42..cb05b52 100755
# if $keepunfiltered, copies $1 to $1.unfiltered.out
# renames $0 tp $1
sub filtered_rename($$)
-@@ -419,23 +438,25 @@ sub do_diffs($$$$)
+@@ -430,23 +449,25 @@ sub do_diffs($$$$)
# A match; remove .out and any previously created .diff files.
unlink("$name.$mid.out");
unlink(<$name.$mid.diff*>);
@@ -128,7 +128,7 @@ index a441f42..cb05b52 100755
$vgtest =~ /^(.*)\.vgtest/;
my $name = $1;
my $fullname = "$dir/$name";
-@@ -454,7 +475,11 @@ sub do_one_test($$)
+@@ -465,7 +486,11 @@ sub do_one_test($$)
} elsif (256 == $prereq_res) {
# Nb: weird Perl-ism -- exit code of '1' is seen by Perl as 256...
# Prereq failed, skip.
@@ -141,7 +141,7 @@ index a441f42..cb05b52 100755
return;
} else {
# Bad prereq; abort.
-@@ -472,7 +497,7 @@ sub do_one_test($$)
+@@ -483,7 +508,7 @@ sub do_one_test($$)
}
# If there is a progB, let's start it in background:
printf("%-16s valgrind $extraopts $vgopts $prog $args (progB: $progB $argsB)\n",
@@ -150,7 +150,7 @@ index a441f42..cb05b52 100755
# progB.done used to detect child has finished. See below.
# Note: redirection of stdout and stderr is before $progB to allow argsB
# to e.g. redirect stdoutB to stderrB
-@@ -488,7 +513,8 @@ sub do_one_test($$)
+@@ -499,7 +524,8 @@ sub do_one_test($$)
. "touch progB.done) &");
}
} else {
@@ -160,7 +160,7 @@ index a441f42..cb05b52 100755
}
# Collect environment variables, if any.
-@@ -529,7 +555,7 @@ sub do_one_test($$)
+@@ -540,7 +566,7 @@ sub do_one_test($$)
# Find all the .stdout.exp files. If none, use /dev/null.
my @stdout_exps = <$name.stdout.exp*>;
@stdout_exps = ( "/dev/null" ) if (0 == scalar @stdout_exps);
@@ -169,7 +169,7 @@ index a441f42..cb05b52 100755
# Filter stderr
$stderr_filter_args = $name if (! defined $stderr_filter_args);
-@@ -538,7 +564,7 @@ sub do_one_test($$)
+@@ -549,7 +575,7 @@ sub do_one_test($$)
# Find all the .stderr.exp files. At least one must exist.
my @stderr_exps = <$name.stderr.exp*>;
(0 != scalar @stderr_exps) or die "Could not find `$name.stderr.exp*'\n";
@@ -178,7 +178,7 @@ index a441f42..cb05b52 100755
if (defined $progB) {
# wait for the child to be finished
-@@ -562,7 +588,7 @@ sub do_one_test($$)
+@@ -573,7 +599,7 @@ sub do_one_test($$)
# Find all the .stdoutB.exp files. If none, use /dev/null.
my @stdoutB_exps = <$name.stdoutB.exp*>;
@stdoutB_exps = ( "/dev/null" ) if (0 == scalar @stdoutB_exps);
@@ -187,7 +187,7 @@ index a441f42..cb05b52 100755
# Filter stderr
$stderrB_filter_args = $name if (! defined $stderrB_filter_args);
-@@ -571,7 +597,7 @@ sub do_one_test($$)
+@@ -582,7 +608,7 @@ sub do_one_test($$)
# Find all the .stderrB.exp files. At least one must exist.
my @stderrB_exps = <$name.stderrB.exp*>;
(0 != scalar @stderrB_exps) or die "Could not find `$name.stderrB.exp*'\n";
@@ -196,7 +196,7 @@ index a441f42..cb05b52 100755
}
# Maybe do post-test check
-@@ -583,7 +609,7 @@ sub do_one_test($$)
+@@ -594,7 +620,7 @@ sub do_one_test($$)
# Find all the .post.exp files. If none, use /dev/null.
my @post_exps = <$name.post.exp*>;
@post_exps = ( "/dev/null" ) if (0 == scalar @post_exps);
@@ -205,7 +205,7 @@ index a441f42..cb05b52 100755
}
}
-@@ -592,6 +618,13 @@ sub do_one_test($$)
+@@ -603,6 +629,13 @@ sub do_one_test($$)
print("(cleanup operation failed: $cleanup)\n");
}
@@ -219,25 +219,25 @@ index a441f42..cb05b52 100755
$num_tests_done++;
}
-@@ -631,7 +664,7 @@ sub test_one_dir($$)
- my $found_tests = (0 != (grep { $_ =~ /\.vgtest$/ } @fs));
+@@ -643,7 +676,7 @@ sub test_one_dir($$)
+ my $tests_start_time = time;
if ($found_tests) {
- print "-- Running tests in $full_dir $dashes\n";
+ print "-- Running tests in $full_dir $dashes\n" if ($yoctoptest == 0);
}
foreach my $f (@fs) {
if (-d $f) {
-@@ -641,7 +674,7 @@ sub test_one_dir($$)
- }
- }
- if ($found_tests) {
-- print "-- Finished tests in $full_dir $dashes\n";
+@@ -657,7 +690,7 @@ sub test_one_dir($$)
+ my $end_time = "(in $tests_cost_time sec)";
+ my $end_dashes = "-" x (50 - (length $full_dir)
+ - (length $end_time) - 1);
+- print "-- Finished tests in $full_dir $end_time $end_dashes\n";
+ print "-- Finished tests in $full_dir $dashes\n" if ($yoctoptest == 0);
}
chdir("..");
-@@ -667,10 +700,12 @@ sub summarise_results
+@@ -683,10 +716,12 @@ sub summarise_results
$num_failures{"stdout"}, plural($num_failures{"stdout"}),
$num_failures{"stderrB"}, plural($num_failures{"stderrB"}),
$num_failures{"stdoutB"}, plural($num_failures{"stdoutB"}),
@@ -253,5 +253,5 @@ index a441f42..cb05b52 100755
print "\n";
}
--
-2.6.2
+2.30.2
diff --git a/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch b/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch
index 07774f38a9..51cd3532d3 100644
--- a/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch
@@ -3,14 +3,14 @@ From: Aneesh Bansal <aneesh.bansal@freescale.com>
Date: Mon, 21 Nov 2011 17:31:39 +0530
Subject: [PATCH] Added support for PPC instructions mfatbu, mfatbl.
-Upstream-Status: Pending
-
-Signed-off-by: Aneesh Bansal <aneesh.bansal@freescale.com>
----
Currently Valgrind 3.7.0 does not have support for PPC instructions mfatbu and mfatbl. When we run a USDPAA application with VALGRIND, the following error is given by valgrind :
dis_proc_ctl(ppc)(mfspr,SPR)(0x20F)
disInstr(ppc): unhandled instruction: 0x7C0F82A6
+Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=289836]
+
+Signed-off-by: Aneesh Bansal <aneesh.bansal@freescale.com>
+---
VEX/priv/guest_ppc_defs.h | 2 ++
VEX/priv/guest_ppc_helpers.c | 18 ++++++++++++++++++
diff --git a/meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch b/meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch
index 5fcfec0015..82b8344279 100644
--- a/meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/avoid-neon-for-targets-which-don-t-support-it.patch
@@ -8,7 +8,7 @@ test 64-bit float copies when building for ARM. Allow it to do so if
possible, but fallback to C when building for ARM targets which don't
support neon.
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=454346]
Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
---
diff --git a/meta/recipes-devtools/valgrind/valgrind/fixed-perl-path.patch b/meta/recipes-devtools/valgrind/valgrind/fixed-perl-path.patch
index db6867f625..b9804e7451 100644
--- a/meta/recipes-devtools/valgrind/valgrind/fixed-perl-path.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/fixed-perl-path.patch
@@ -1,4 +1,4 @@
-From d85cc45e0ddeda68adf594dead715964cb32d0e7 Mon Sep 17 00:00:00 2001
+From beaa5b64c60d501fa9cd59fdc563e5f5bccf6882 Mon Sep 17 00:00:00 2001
From: Qing He <qing.he@intel.com>
Date: Tue, 31 Aug 2010 22:51:58 +0800
Subject: [PATCH] valgrind: fix perl scripts
@@ -17,34 +17,13 @@ a better fix would need:
Upstream-Status: Inappropriate [configuration]
Signed-off-by: Maxin B. John <maxin.john@intel.com>
+
---
- cachegrind/cg_annotate.in | 2 +-
- cachegrind/cg_diff.in | 2 +-
- massif/ms_print.in | 2 +-
- perf/vg_perf.in | 2 +-
- tests/vg_regtest.in | 2 +-
- 5 files changed, 5 insertions(+), 5 deletions(-)
+ massif/ms_print.in | 2 +-
+ perf/vg_perf.in | 2 +-
+ tests/vg_regtest.in | 2 +-
+ 3 files changed, 3 insertions(+), 3 deletions(-)
-diff --git a/cachegrind/cg_annotate.in b/cachegrind/cg_annotate.in
-index fea114b..5e814fd 100644
---- a/cachegrind/cg_annotate.in
-+++ b/cachegrind/cg_annotate.in
-@@ -1,4 +1,4 @@
--#! @PERL@
-+#! /usr/bin/perl
-
- ##--------------------------------------------------------------------##
- ##--- Cachegrind's annotator. cg_annotate.in ---##
-diff --git a/cachegrind/cg_diff.in b/cachegrind/cg_diff.in
-index 9d9258e..d0f0ec7 100755
---- a/cachegrind/cg_diff.in
-+++ b/cachegrind/cg_diff.in
-@@ -1,4 +1,4 @@
--#! @PERL@
-+#! /usr/bin/perl
-
- ##--------------------------------------------------------------------##
- ##--- Cachegrind's differencer. cg_diff.in ---##
diff --git a/massif/ms_print.in b/massif/ms_print.in
index a206ce4..df1bc31 100755
--- a/massif/ms_print.in
@@ -66,7 +45,7 @@ index 90ee1d2..c585096 100644
##--- Valgrind performance testing script vg_perf ---##
##--------------------------------------------------------------------##
diff --git a/tests/vg_regtest.in b/tests/vg_regtest.in
-index 0fe6341..1b45eb7 100755
+index 7152765..ad18800 100755
--- a/tests/vg_regtest.in
+++ b/tests/vg_regtest.in
@@ -1,4 +1,4 @@
@@ -75,6 +54,3 @@ index 0fe6341..1b45eb7 100755
##--------------------------------------------------------------------##
##--- Valgrind regression testing script vg_regtest ---##
##--------------------------------------------------------------------##
---
-2.29.2
-
diff --git a/meta/recipes-devtools/valgrind/valgrind/remove-for-aarch64 b/meta/recipes-devtools/valgrind/valgrind/remove-for-aarch64
index 887bfd2766..5bfba1c8d3 100644
--- a/meta/recipes-devtools/valgrind/valgrind/remove-for-aarch64
+++ b/meta/recipes-devtools/valgrind/valgrind/remove-for-aarch64
@@ -1,211 +1,8 @@
-gdbserver_tests/hgtls
-cachegrind/tests/ann1
-callgrind/tests/simwork1
-callgrind/tests/simwork2
-callgrind/tests/simwork3
-callgrind/tests/simwork-both
-callgrind/tests/simwork-cache
-callgrind/tests/threads
-callgrind/tests/threads-use
-drd/tests/annotate_barrier
-drd/tests/annotate_barrier_xml
-drd/tests/annotate_hbefore
-drd/tests/annotate_hb_err
-drd/tests/annotate_hb_race
-drd/tests/annotate_ignore_read
-drd/tests/annotate_ignore_rw
-drd/tests/annotate_ignore_rw2
-drd/tests/annotate_ignore_write
-drd/tests/annotate_ignore_write2
-drd/tests/annotate_order_1
-drd/tests/annotate_order_2
-drd/tests/annotate_order_3
-drd/tests/annotate_publish_hg
-drd/tests/annotate_rwlock
-drd/tests/annotate_rwlock_hg
drd/tests/annotate_sem
-drd/tests/annotate_smart_pointer
-drd/tests/annotate_smart_pointer2
-drd/tests/annotate_spinlock
-drd/tests/annotate_static
-drd/tests/annotate_trace_memory
-drd/tests/annotate_trace_memory_xml
-drd/tests/atomic_var
-drd/tests/bar_bad
-drd/tests/bar_trivial
-drd/tests/boost_thread
-drd/tests/bug-235681
-drd/tests/bug322621
-drd/tests/circular_buffer
-drd/tests/concurrent_close
-drd/tests/custom_alloc
-drd/tests/custom_alloc_fiw
-drd/tests/dlopen
-drd/tests/fork-parallel
-drd/tests/fork-serial
-drd/tests/fp_race
-drd/tests/fp_race2
-drd/tests/fp_race_xml
-drd/tests/free_is_write
-drd/tests/free_is_write2
-drd/tests/hg01_all_ok
-drd/tests/hg02_deadlock
-drd/tests/hg03_inherit
-drd/tests/hg04_race
-drd/tests/hg05_race2
-drd/tests/hg06_readshared
-drd/tests/hold_lock_1
-drd/tests/hold_lock_2
-drd/tests/linuxthreads_det
-drd/tests/matinv
-drd/tests/memory_allocation
-drd/tests/monitor_example
-drd/tests/new_delete
-drd/tests/pth_barrier
-drd/tests/pth_barrier2
-drd/tests/pth_barrier3
-drd/tests/pth_barrier_race
-drd/tests/pth_barrier_reinit
-drd/tests/pth_broadcast
-drd/tests/pth_cancel_locked
-drd/tests/pth_cleanup_handler
-drd/tests/pth_cond_race
-drd/tests/pth_cond_race2
-drd/tests/pth_detached2
-drd/tests/pth_detached3
-drd/tests/pth_detached_sem
-drd/tests/pth_inconsistent_cond_wait
-drd/tests/pth_mutex_reinit
-drd/tests/pth_once
-drd/tests/pth_process_shared_mutex
-drd/tests/pth_spinlock
-drd/tests/pth_uninitialized_cond
-drd/tests/read_and_free_race
-drd/tests/recursive_mutex
-drd/tests/rwlock_race
-drd/tests/rwlock_test
-drd/tests/rwlock_type_checking
-drd/tests/sem_as_mutex
-drd/tests/sem_as_mutex2
-drd/tests/sem_as_mutex3
-drd/tests/sem_open
-drd/tests/sem_open2
-drd/tests/sem_open3
-drd/tests/sem_open_traced
-drd/tests/sem_wait
-drd/tests/sigalrm
-drd/tests/sigaltstack
-drd/tests/std_atomic
-drd/tests/std_string
-drd/tests/std_thread
-drd/tests/std_thread2
-drd/tests/str_tester
-drd/tests/tc01_simple_race
-drd/tests/tc02_simple_tls
-drd/tests/tc03_re_excl
-drd/tests/tc04_free_lock
-drd/tests/tc05_simple_race
-drd/tests/tc06_two_races
-drd/tests/tc07_hbl1
-drd/tests/tc08_hbl2
-drd/tests/tc10_rec_lock
-drd/tests/tc11_XCHG
-drd/tests/tc12_rwl_trivial
-drd/tests/tc13_laog1
-drd/tests/tc15_laog_lockdel
-drd/tests/tc16_byterace
-drd/tests/tc17_sembar
-drd/tests/tc18_semabuse
-drd/tests/tc19_shadowmem
-drd/tests/tc21_pthonce
-drd/tests/tc22_exit_w_lock
-drd/tests/tc23_bogus_condwait
-helgrind/tests/annotate_rwlock
-helgrind/tests/annotate_smart_pointer
-helgrind/tests/bar_bad
-helgrind/tests/bar_trivial
-helgrind/tests/bug322621
-helgrind/tests/cond_init_destroy
-helgrind/tests/cond_timedwait_invalid
-helgrind/tests/cond_timedwait_test
-helgrind/tests/free_is_write
-helgrind/tests/hg01_all_ok
-helgrind/tests/hg03_inherit
-helgrind/tests/hg04_race
+gdbserver_tests/hgtls
helgrind/tests/hg05_race2
-helgrind/tests/hg06_readshared
-helgrind/tests/locked_vs_unlocked1_fwd
-helgrind/tests/locked_vs_unlocked1_rev
-helgrind/tests/locked_vs_unlocked2
-helgrind/tests/locked_vs_unlocked3
-helgrind/tests/pth_barrier1
-helgrind/tests/pth_barrier2
-helgrind/tests/pth_barrier3
-helgrind/tests/pth_destroy_cond
-helgrind/tests/rwlock_race
-helgrind/tests/rwlock_test
-helgrind/tests/shmem_abits
-helgrind/tests/stackteardown
-helgrind/tests/t2t_laog
-helgrind/tests/tc01_simple_race
-helgrind/tests/tc02_simple_tls
-helgrind/tests/tc03_re_excl
-helgrind/tests/tc04_free_lock
-helgrind/tests/tc05_simple_race
-helgrind/tests/tc06_two_races
-helgrind/tests/tc06_two_races_xml
-helgrind/tests/tc07_hbl1
-helgrind/tests/tc08_hbl2
-helgrind/tests/tc09_bad_unlock
-helgrind/tests/tc10_rec_lock
-helgrind/tests/tc11_XCHG
-helgrind/tests/tc12_rwl_trivial
-helgrind/tests/tc13_laog1
-helgrind/tests/tc14_laog_dinphils
-helgrind/tests/tc15_laog_lockdel
-helgrind/tests/tc16_byterace
-helgrind/tests/tc17_sembar
-helgrind/tests/tc18_semabuse
-helgrind/tests/tc19_shadowmem
helgrind/tests/tc20_verifywrap
-helgrind/tests/tc21_pthonce
-helgrind/tests/tc22_exit_w_lock
-helgrind/tests/tc23_bogus_condwait
-helgrind/tests/tc24_nonzero_sem
-memcheck/tests/accounting
-memcheck/tests/addressable
-memcheck/tests/arm64-linux/scalar
-memcheck/tests/atomic_incs
-memcheck/tests/badaddrvalue
-memcheck/tests/badfree
-memcheck/tests/badfree-2trace
-memcheck/tests/badfree3
-memcheck/tests/badjump
-memcheck/tests/badjump2
-memcheck/tests/badloop
-memcheck/tests/badpoll
-memcheck/tests/badrw
-memcheck/tests/big_blocks_freed_list
-memcheck/tests/brk2
memcheck/tests/dw4
-memcheck/tests/err_disable4
-memcheck/tests/err_disable_arange1
-memcheck/tests/leak-autofreepool-5
-memcheck/tests/linux/lsframe1
-memcheck/tests/linux/lsframe2
-memcheck/tests/linux/with-space
-memcheck/tests/origin5-bz2
-memcheck/tests/origin6-fp
-memcheck/tests/partial_load_dflt
-memcheck/tests/pdb-realloc2
-memcheck/tests/sh-mem
-memcheck/tests/sh-mem-random
-memcheck/tests/sigaltstack
-memcheck/tests/sigkill
-memcheck/tests/signal2
-memcheck/tests/threadname
-memcheck/tests/threadname_xml
-memcheck/tests/unit_oset
memcheck/tests/varinfo1
memcheck/tests/varinfo2
memcheck/tests/varinfo3
@@ -213,21 +10,23 @@ memcheck/tests/varinfo4
memcheck/tests/varinfo5
memcheck/tests/varinfo6
memcheck/tests/varinforestrict
-memcheck/tests/vcpu_bz2
-memcheck/tests/vcpu_fbench
-memcheck/tests/vcpu_fnfns
-memcheck/tests/wcs
-memcheck/tests/wrap1
-memcheck/tests/wrap2
-memcheck/tests/wrap3
-memcheck/tests/wrap4
-memcheck/tests/wrap5
-memcheck/tests/wrap6
-memcheck/tests/wrap7
-memcheck/tests/wrap8
-memcheck/tests/wrapmalloc
-memcheck/tests/wrapmallocstatic
-memcheck/tests/writev1
-memcheck/tests/xml1
-memcheck/tests/linux/stack_changes
-memcheck/tests/linux/timerfd-syscall
+memcheck/tests/atomic_incs
+memcheck/tests/bug464969_d_demangle
+memcheck/tests/cxx17_aligned_new
+memcheck/tests/demangle
+memcheck/tests/long_namespace_xml
+memcheck/tests/mismatches
+memcheck/tests/mismatches_xml
+memcheck/tests/new_aligned_delete_default
+memcheck/tests/new_delete_mismatch_size
+memcheck/tests/new_nothrow
+memcheck/tests/realloc_size_zero_mismatch
+memcheck/tests/sized_aligned_new_delete_args
+memcheck/tests/sized_aligned_new_delete_misaligned1
+memcheck/tests/sized_aligned_new_delete_misaligned1_xml
+memcheck/tests/sized_aligned_new_delete_misaligned2
+memcheck/tests/sized_aligned_new_delete_misaligned2_xml
+memcheck/tests/sized_aligned_new_delete_misaligned3
+memcheck/tests/sized_aligned_new_delete_misaligned3_xml
+memcheck/tests/sized_delete
+none/tests/bigcode
diff --git a/meta/recipes-devtools/valgrind/valgrind/remove-for-all b/meta/recipes-devtools/valgrind/valgrind/remove-for-all
index cb8d10b18f..8435a6d41d 100644
--- a/meta/recipes-devtools/valgrind/valgrind/remove-for-all
+++ b/meta/recipes-devtools/valgrind/valgrind/remove-for-all
@@ -1,8 +1,29 @@
+cachegrind/tests/wrap5
+drd/tests/boost_thread
none/tests/amd64/fb_test_amd64
+none/tests/tls
gdbserver_tests/hginfo
+gdbserver_tests/mcinvokeRU
+memcheck/tests/linux/dlclose_leak-no-keep
+memcheck/tests/linux/dlclose_leak
+memcheck/tests/linux/timerfd-syscall
memcheck/tests/supp_unknown
+memcheck/tests/wrap1
+memcheck/tests/wrap2
+memcheck/tests/wrap3
+memcheck/tests/wrap4
+memcheck/tests/wrap5
+memcheck/tests/wrap6
+memcheck/tests/wrap7
+memcheck/tests/wrap8
helgrind/tests/tls_threads
+helgrind/tests/pth_mempcpy_false_races
+drd/tests/bar_bad
drd/tests/bar_bad_xml
drd/tests/pth_barrier_thr_cr
+drd/tests/std_thread2
drd/tests/thread_name_xml
massif/tests/deep-D
+massif/tests/bug469146
+massif/tests/new-cpp
+massif/tests/overloaded-new
diff --git a/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch b/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch
index adea405213..f15d04b173 100644
--- a/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch
@@ -15,7 +15,7 @@ over-ride that).
See similar cases in none/tests/arm/Makefile.am
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=454346]
Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
---
diff --git a/meta/recipes-devtools/valgrind/valgrind_3.19.0.bb b/meta/recipes-devtools/valgrind/valgrind_3.19.0.bb
deleted file mode 100644
index 6e3234be6b..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind_3.19.0.bb
+++ /dev/null
@@ -1,255 +0,0 @@
-SUMMARY = "Valgrind memory debugger and instrumentation framework"
-HOMEPAGE = "http://valgrind.org/"
-DESCRIPTION = "Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can automatically detect many memory management and threading bugs, and profile your programs in detail."
-BUGTRACKER = "http://valgrind.org/support/bug_reports.html"
-LICENSE = "GPL-2.0-only & GPL-2.0-or-later & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://include/pub_tool_basics.h;beginline=6;endline=29;md5=41c410e8d3f305aee7aaa666b2e4f366 \
- file://include/valgrind.h;beginline=1;endline=56;md5=ad3b317f3286b6b704575d9efe6ca5df \
- file://COPYING.DOCS;md5=24ea4c7092233849b4394699333b5c56"
-
-DEPENDS = " \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'boost', '', d)} \
- "
-
-SRC_URI = "https://sourceware.org/pub/valgrind/valgrind-${PV}.tar.bz2 \
- file://fixed-perl-path.patch \
- file://Added-support-for-PPC-instructions-mfatbu-mfatbl.patch \
- file://run-ptest \
- file://remove-for-aarch64 \
- file://remove-for-all \
- file://taskset_nondeterministic_tests \
- file://0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch \
- file://use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch \
- file://avoid-neon-for-targets-which-don-t-support-it.patch \
- file://valgrind-make-ld-XXX.so-strlen-intercept-optional.patch \
- file://0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch \
- file://0001-sigqueue-Rename-_sifields-to-__si_fields-on-musl.patch \
- file://0002-context-APIs-are-not-available-on-musl.patch \
- file://0003-correct-include-directive-path-for-config.h.patch \
- file://0001-memcheck-arm64-Define-__THROW-if-not-already-defined.patch \
- file://0002-memcheck-x86-Define-__THROW-if-not-defined.patch \
- file://0003-tests-seg_override-Replace-__modify_ldt-with-syscall.patch \
- file://0001-fix-opcode-not-supported-on-mips32-linux.patch \
- file://0001-Make-local-functions-static-to-avoid-assembler-error.patch \
- file://0001-Return-a-valid-exit_code-from-vg_regtest.patch \
- file://0001-valgrind-filter_xml_frames-do-not-filter-usr.patch \
- file://0001-memcheck-vgtests-remove-fullpath-after-flags.patch \
- file://s390x_vec_op_t.patch \
- file://0001-none-tests-fdleak_cmsg.stderr.exp-adjust-tmp-paths.patch \
- file://0001-memcheck-tests-Fix-timerfd-syscall-test.patch \
- file://0001-docs-Disable-manual-validation.patch \
- file://0001-Fix-drd-tests-shared_timed_mutex.cpp.patch \
- "
-SRC_URI[sha256sum] = "dd5e34486f1a483ff7be7300cc16b4d6b24690987877c3278d797534d6738f02"
-UPSTREAM_CHECK_REGEX = "valgrind-(?P<pver>\d+(\.\d+)+)\.tar"
-
-COMPATIBLE_HOST = '(i.86|x86_64|arm|aarch64|mips|powerpc|powerpc64).*-linux'
-
-# patch 0001-memcheck-vgtests-remove-fullpath-after-flags.patch removes relative path
-# argument. Change expected stderr files accordingly.
-do_patch:append() {
- bb.build.exec_func('do_sed_paths', d)
-}
-
-do_sed_paths() {
- sed -i -e 's|tests/||' ${S}/memcheck/tests/badfree3.stderr.exp
- sed -i -e 's|tests/||' ${S}/memcheck/tests/varinfo5.stderr.exp
-}
-
-# valgrind supports armv7 and above
-COMPATIBLE_HOST:armv4 = 'null'
-COMPATIBLE_HOST:armv5 = 'null'
-COMPATIBLE_HOST:armv6 = 'null'
-
-# valgrind fails with powerpc soft-float
-COMPATIBLE_HOST:powerpc = "${@bb.utils.contains('TARGET_FPU', 'soft', 'null', '.*-linux', d)}"
-
-# X32 isn't supported by valgrind at this time
-COMPATIBLE_HOST:linux-gnux32 = 'null'
-COMPATIBLE_HOST:linux-muslx32 = 'null'
-
-# Disable for some MIPS variants
-COMPATIBLE_HOST:mipsarchr6 = 'null'
-COMPATIBLE_HOST:linux-gnun32 = 'null'
-
-# Disable for powerpc64 with musl
-COMPATIBLE_HOST:libc-musl:powerpc64 = 'null'
-
-# brokenseip is unfortunately required by ptests to pass
-inherit autotools-brokensep ptest multilib_header
-
-EXTRA_OECONF = "--enable-tls --without-mpicc"
-EXTRA_OECONF += "${@['--enable-only32bit','--enable-only64bit'][d.getVar('SITEINFO_BITS') != '32']}"
-
-# valgrind checks host_cpu "armv7*)", so we need to over-ride the autotools.bbclass default --host option
-EXTRA_OECONF:append:arm = " --host=armv7${HOST_VENDOR}-${HOST_OS}"
-
-EXTRA_OEMAKE = "-w"
-
-CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl'"
-
-# valgrind likes to control its own optimisation flags. It generally defaults
-# to -O2 but uses -O0 for some specific test apps etc. Passing our own flags
-# (via CFLAGS) means we interfere with that. Only pass DEBUG_FLAGS to it
-# which fixes build path issue in DWARF.
-SELECTED_OPTIMIZATION = "${DEBUG_FLAGS}"
-
-do_configure:prepend () {
- rm -rf ${S}/config.h
- sed -i -e 's:$(abs_top_builddir):$(pkglibdir)/ptest:g' ${S}/none/tests/Makefile.am
- sed -i -e 's:$(top_builddir):$(pkglibdir)/ptest:g' ${S}/memcheck/tests/Makefile.am
-}
-
-do_install:append () {
- install -m 644 ${B}/default.supp ${D}/${libexecdir}/valgrind/
- oe_multilib_header valgrind/config.h
-}
-
-VALGRINDARCH ?= "${TARGET_ARCH}"
-VALGRINDARCH:aarch64 = "arm64"
-VALGRINDARCH:x86-64 = "amd64"
-VALGRINDARCH:x86 = "x86"
-VALGRINDARCH:mips = "mips32"
-VALGRINDARCH:mipsel = "mips32"
-VALGRINDARCH:mips64el = "mips64"
-VALGRINDARCH:powerpc = "ppc"
-VALGRINDARCH:powerpc64 = "ppc64"
-VALGRINDARCH:powerpc64le = "ppc64le"
-
-INHIBIT_PACKAGE_STRIP_FILES = "${PKGD}${libexecdir}/valgrind/vgpreload_memcheck-${VALGRINDARCH}-linux.so"
-
-RDEPENDS:${PN} += "perl"
-
-# valgrind needs debug information for ld.so at runtime in order to
-# redirect functions like strlen.
-RRECOMMENDS:${PN} += "${TCLIBC}-dbg"
-
-RDEPENDS:${PN}-ptest += " bash coreutils curl file \
- gdb libgomp \
- perl \
- perl-module-file-basename perl-module-file-glob perl-module-getopt-long \
- perl-module-overloading perl-module-cwd perl-module-ipc-open3 \
- perl-module-carp perl-module-symbol \
- procps sed ${PN}-dbg ${PN}-src ${TCLIBC}-src gcc-runtime-dbg \
- util-linux-taskset"
-RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils"
-
-# One of the tests contains a bogus interpreter path on purpose.
-# Skip file dependency check
-SKIP_FILEDEPS:${PN}-ptest = '1'
-INSANE_SKIP:${PN}-ptest = "debug-deps"
-
-do_compile_ptest() {
- oe_runmake ${PARALLEL_MAKE} check
-}
-
-
-do_install_ptest() {
- chmod +x ${B}/tests/vg_regtest
-
- # The test application binaries are not automatically installed.
- # Grab them from the build directory.
- #
- # The regression tests require scripts and data files that are not
- # copied to the build directory. They must be copied from the
- # source directory.
- saved_dir=$PWD
- for parent_dir in ${S} ${B} ; do
- cd $parent_dir
-
- subdirs=" \
- .in_place \
- cachegrind/tests \
- callgrind/tests \
- dhat/tests \
- drd/tests \
- gdbserver_tests \
- helgrind/tests \
- lackey/tests \
- massif/tests \
- memcheck/tests \
- none/tests \
- tests \
- exp-bbv/tests \
- "
- # Get the vg test scripts, filters, and expected files
- for dir in $subdirs ; do
- find $dir | cpio -pvdu ${D}${PTEST_PATH}
- done
- cd $saved_dir
- done
-
- # The scripts reference config.h so add it to the top ptest dir.
- cp ${B}/config.h ${D}${PTEST_PATH}
- install -D ${WORKDIR}/remove-for-aarch64 ${D}${PTEST_PATH}
- install -D ${WORKDIR}/remove-for-all ${D}${PTEST_PATH}
- install -D ${WORKDIR}/taskset_nondeterministic_tests ${D}${PTEST_PATH}
-
- # Add an executable need by none/tests/bigcode
- mkdir ${D}${PTEST_PATH}/perf
- cp ${B}/perf/bigcode ${D}${PTEST_PATH}/perf
-
- # Add an executable needed by memcheck/tests/vcpu_bz2
- cp ${B}/perf/bz2 ${D}${PTEST_PATH}/perf
-
- # Make the ptest dir look like the top level valgrind src dir
- # This is checked by the gdbserver_tests/make_local_links script
- mkdir ${D}${PTEST_PATH}/coregrind
- cp ${B}/coregrind/vgdb ${D}${PTEST_PATH}/coregrind
-
- # Add an executable needed by massif tests
- cp ${B}/massif/ms_print ${D}${PTEST_PATH}/massif/ms_print
-
- find ${D}${PTEST_PATH} \
- \( \
- -name "Makefile*" \
- -o -name "*.o" \
- \) \
- -exec rm {} \;
-
- # These files need to be newer so touch them.
- touch ${D}${PTEST_PATH}/cachegrind/tests/a.c -r ${D}${PTEST_PATH}/cachegrind/tests/cgout-test
-
- # find *_annotate in ${bindir} for yocto build
- sed -i s:\.\./\.\./cachegrind/cg_annotate:${bindir}/cg_annotate: ${D}${PTEST_PATH}/cachegrind/tests/ann1.vgtest
- sed -i s:\.\./\.\./cachegrind/cg_annotate:${bindir}/cg_annotate: ${D}${PTEST_PATH}/cachegrind/tests/ann2.vgtest
-
- sed -i s:\.\./\.\./callgrind/callgrind_annotate:${bindir}/callgrind_annotate: ${D}${PTEST_PATH}/callgrind/tests/ann1.vgtest
- sed -i s:\.\./\.\./callgrind/callgrind_annotate:${bindir}/callgrind_annotate: ${D}${PTEST_PATH}/callgrind/tests/ann2.vgtest
-
- # point the expanded @abs_top_builddir@ of the host to PTEST_PATH
- sed -i s:${S}:${PTEST_PATH}:g \
- ${D}${PTEST_PATH}/memcheck/tests/linux/debuginfod-check.vgtest
-
- # handle multilib
- sed -i s:@libdir@:${libdir}:g ${D}${PTEST_PATH}/run-ptest
- sed -i s:@libexecdir@:${libexecdir}:g ${D}${PTEST_PATH}/run-ptest
- sed -i s:@bindir@:${bindir}:g ${D}${PTEST_PATH}/run-ptest
-
- # This test fails on the host as well, using both 3.15 and git master (as of Jan 24 2020)
- # https://bugs.kde.org/show_bug.cgi?id=402833
- rm ${D}${PTEST_PATH}/memcheck/tests/overlap.vgtest
-
- # This is known failure see https://bugs.kde.org/show_bug.cgi?id=435732
- rm ${D}${PTEST_PATH}/memcheck/tests/leak_cpp_interior.vgtest
-
- # https://bugs.kde.org/show_bug.cgi?id=445743
- rm ${D}${PTEST_PATH}/drd/tests/pth_mutex_signal
-
- # As the binary isn't stripped or debug-splitted, the source file isn't fetched
- # via dwarfsrcfiles either, so it needs to be installed manually.
- mkdir -p ${D}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/${BP}/none/tests/
- install ${S}/none/tests/tls.c ${D}/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/${BP}/none/tests/
-}
-
-# avoid stripping some generated binaries otherwise some of the tests will fail
-# run-strip-reloc.sh, run-strip-strmerge.sh and so on will fail
-INHIBIT_PACKAGE_STRIP_FILES += "\
- ${PKGD}${PTEST_PATH}/none/tests/tls \
- ${PKGD}${PTEST_PATH}/none/tests/tls.so \
- ${PKGD}${PTEST_PATH}/none/tests/tls2.so \
- ${PKGD}${PTEST_PATH}/helgrind/tests/tc09_bad_unlock \
- ${PKGD}${PTEST_PATH}/memcheck/tests/manuel1 \
- ${PKGD}${PTEST_PATH}/drd/tests/pth_detached3 \
-"
diff --git a/meta/recipes-devtools/valgrind/valgrind_3.22.0.bb b/meta/recipes-devtools/valgrind/valgrind_3.22.0.bb
new file mode 100644
index 0000000000..563d99f0e2
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind_3.22.0.bb
@@ -0,0 +1,276 @@
+SUMMARY = "Valgrind memory debugger and instrumentation framework"
+HOMEPAGE = "http://valgrind.org/"
+DESCRIPTION = "Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can automatically detect many memory management and threading bugs, and profile your programs in detail."
+BUGTRACKER = "http://valgrind.org/support/bug_reports.html"
+LICENSE = "GPL-2.0-only & GPL-2.0-or-later & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://include/pub_tool_basics.h;beginline=6;endline=29;md5=41c410e8d3f305aee7aaa666b2e4f366 \
+ file://include/valgrind.h;beginline=1;endline=56;md5=ad3b317f3286b6b704575d9efe6ca5df \
+ file://COPYING.DOCS;md5=24ea4c7092233849b4394699333b5c56"
+
+DEPENDS = " \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'boost', '', d)} \
+ "
+
+SRC_URI = "https://sourceware.org/pub/valgrind/valgrind-${PV}.tar.bz2 \
+ file://fixed-perl-path.patch \
+ file://Added-support-for-PPC-instructions-mfatbu-mfatbl.patch \
+ file://run-ptest \
+ file://remove-for-aarch64 \
+ file://remove-for-all \
+ file://taskset_nondeterministic_tests \
+ file://0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch \
+ file://use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch \
+ file://avoid-neon-for-targets-which-don-t-support-it.patch \
+ file://valgrind-make-ld-XXX.so-strlen-intercept-optional.patch \
+ file://0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch \
+ file://0001-sigqueue-Rename-_sifields-to-__si_fields-on-musl.patch \
+ file://0003-correct-include-directive-path-for-config.h.patch \
+ file://0001-Return-a-valid-exit_code-from-vg_regtest.patch \
+ file://0001-valgrind-filter_xml_frames-do-not-filter-usr.patch \
+ file://0001-memcheck-vgtests-remove-fullpath-after-flags.patch \
+ file://s390x_vec_op_t.patch \
+ file://0001-none-tests-fdleak_cmsg.stderr.exp-adjust-tmp-paths.patch \
+ file://0001-memcheck-tests-Fix-timerfd-syscall-test.patch \
+ file://0001-docs-Disable-manual-validation.patch \
+ file://0001-valgrind-monitor.py-regular-expressions-should-use-r.patch \
+ file://0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch \
+ file://0003-Add-fchmodat2-syscall-on-linux.patch \
+ file://0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch \
+ "
+SRC_URI[sha256sum] = "c811db5add2c5f729944caf47c4e7a65dcaabb9461e472b578765dd7bf6d2d4c"
+UPSTREAM_CHECK_REGEX = "valgrind-(?P<pver>\d+(\.\d+)+)\.tar"
+
+COMPATIBLE_HOST = '(i.86|x86_64|arm|aarch64|mips|powerpc|powerpc64).*-linux'
+
+# patch 0001-memcheck-vgtests-remove-fullpath-after-flags.patch removes relative path
+# argument. Change expected stderr files accordingly.
+do_patch:append() {
+ bb.build.exec_func('do_sed_paths', d)
+}
+
+do_sed_paths() {
+ sed -i -e 's|tests/||' ${S}/memcheck/tests/badfree3.stderr.exp
+ sed -i -e 's|tests/||' ${S}/memcheck/tests/varinfo5.stderr.exp
+}
+
+# valgrind supports armv7 and above
+COMPATIBLE_HOST:armv4 = 'null'
+COMPATIBLE_HOST:armv5 = 'null'
+COMPATIBLE_HOST:armv6 = 'null'
+
+# valgrind fails with powerpc soft-float
+COMPATIBLE_HOST:powerpc = "${@bb.utils.contains('TARGET_FPU', 'soft', 'null', '.*-linux', d)}"
+
+# X32 isn't supported by valgrind at this time
+COMPATIBLE_HOST:linux-gnux32 = 'null'
+COMPATIBLE_HOST:linux-muslx32 = 'null'
+
+# Disable for some MIPS variants
+COMPATIBLE_HOST:mipsarchr6 = 'null'
+COMPATIBLE_HOST:linux-gnun32 = 'null'
+
+# Disable for powerpc64 with musl
+COMPATIBLE_HOST:libc-musl:powerpc64 = 'null'
+
+# brokenseip is unfortunately required by ptests to pass
+inherit autotools-brokensep ptest multilib_header
+
+EXTRA_OECONF = "--enable-tls --without-mpicc"
+EXTRA_OECONF += "${@['--enable-only32bit','--enable-only64bit'][d.getVar('SITEINFO_BITS') != '32']}"
+
+# valgrind checks host_cpu "armv7*)", so we need to over-ride the autotools.bbclass default --host option
+EXTRA_OECONF:append:arm = " --host=armv7${HOST_VENDOR}-${HOST_OS}"
+
+EXTRA_OEMAKE = "-w"
+
+CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl'"
+
+# valgrind likes to control its own optimisation flags. It generally defaults
+# to -O2 but uses -O0 for some specific test apps etc. Passing our own flags
+# (via CFLAGS) means we interfere with that. Only pass DEBUG_FLAGS to it
+# which fixes build path issue in DWARF.
+SELECTED_OPTIMIZATION = "${DEBUG_FLAGS}"
+
+# Split out various helper scripts to separate packages to avoid the
+# main package depending on perl and python.
+PACKAGES =+ "${PN}-cachegrind ${PN}-massif ${PN}-callgrind"
+
+FILES:${PN}-cachegrind = "${bindir}/cg_*"
+FILES:${PN}-massif = "${bindir}/ms_*"
+FILES:${PN}-callgrind = "${bindir}/callgrind_*"
+
+RDEPENDS:${PN}-cachegrind = "${PN} python3-core"
+RDEPENDS:${PN}-massif = "${PN} perl"
+RDEPENDS:${PN}-callgrind = "${PN} perl"
+
+do_configure:prepend () {
+ rm -rf ${S}/config.h
+ sed -i -e 's:$(abs_top_builddir):$(pkglibdir)/ptest:g' ${S}/none/tests/Makefile.am
+ sed -i -e 's:$(top_builddir):$(pkglibdir)/ptest:g' ${S}/memcheck/tests/Makefile.am
+}
+
+do_install:append () {
+ install -m 644 ${B}/default.supp ${D}/${libexecdir}/valgrind/
+ oe_multilib_header valgrind/config.h
+}
+
+VALGRINDARCH ?= "${TARGET_ARCH}"
+VALGRINDARCH:aarch64 = "arm64"
+VALGRINDARCH:x86-64 = "amd64"
+VALGRINDARCH:x86 = "x86"
+VALGRINDARCH:mips = "mips32"
+VALGRINDARCH:mipsel = "mips32"
+VALGRINDARCH:mips64el = "mips64"
+VALGRINDARCH:powerpc = "ppc"
+VALGRINDARCH:powerpc64 = "ppc64"
+VALGRINDARCH:powerpc64le = "ppc64le"
+
+INHIBIT_PACKAGE_STRIP_FILES = "${PKGD}${libexecdir}/valgrind/vgpreload_memcheck-${VALGRINDARCH}-linux.so"
+
+# valgrind needs debug information for ld.so at runtime in order to
+# redirect functions like strlen.
+RRECOMMENDS:${PN} += "${TCLIBC}-dbg"
+
+RDEPENDS:${PN}-ptest += " bash coreutils curl file \
+ gdb \
+ ${TCLIBC}-src gcc-runtime-dbg \
+ libgomp \
+ perl \
+ perl-module-file-basename perl-module-file-glob perl-module-getopt-long \
+ perl-module-overloading perl-module-cwd perl-module-ipc-open3 \
+ perl-module-carp perl-module-symbol \
+ procps \
+ python3-compile \
+ sed \
+ util-linux-taskset \
+ ${PN}-dbg ${PN}-src \
+ ${PN}-cachegrind ${PN}-massif ${PN}-callgrind \
+"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils glibc-gconv-utf-32"
+
+# One of the tests contains a bogus interpreter path on purpose.
+# Skip file dependency check
+SKIP_FILEDEPS:${PN}-ptest = '1'
+INSANE_SKIP:${PN}-ptest = "debug-deps"
+
+do_compile_ptest() {
+ oe_runmake check
+}
+
+
+do_install_ptest() {
+ chmod +x ${B}/tests/vg_regtest
+
+ # The test application binaries are not automatically installed.
+ # Grab them from the build directory.
+ #
+ # The regression tests require scripts and data files that are not
+ # copied to the build directory. They must be copied from the
+ # source directory.
+ saved_dir=$PWD
+ for parent_dir in ${S} ${B} ; do
+ cd $parent_dir
+
+ subdirs=" \
+ .in_place \
+ cachegrind/tests \
+ callgrind/tests \
+ dhat/tests \
+ drd/tests \
+ gdbserver_tests \
+ helgrind/tests \
+ lackey/tests \
+ massif/tests \
+ memcheck/tests \
+ none/tests \
+ tests \
+ exp-bbv/tests \
+ "
+ # Get the vg test scripts, filters, and expected files
+ for dir in $subdirs ; do
+ find $dir | cpio -pvdu ${D}${PTEST_PATH}
+ done
+ cd $saved_dir
+ done
+
+ # The scripts reference config.h so add it to the top ptest dir.
+ cp ${B}/config.h ${D}${PTEST_PATH}
+ install -D ${WORKDIR}/remove-for-aarch64 ${D}${PTEST_PATH}
+ install -D ${WORKDIR}/remove-for-all ${D}${PTEST_PATH}
+ install -D ${WORKDIR}/taskset_nondeterministic_tests ${D}${PTEST_PATH}
+
+ # Add an executable need by none/tests/bigcode
+ mkdir ${D}${PTEST_PATH}/perf
+ cp ${B}/perf/bigcode ${D}${PTEST_PATH}/perf
+
+ # Add an executable needed by memcheck/tests/vcpu_bz2
+ cp ${B}/perf/bz2 ${D}${PTEST_PATH}/perf
+
+ # Make the ptest dir look like the top level valgrind src dir
+ # This is checked by the gdbserver_tests/make_local_links script
+ mkdir ${D}${PTEST_PATH}/coregrind
+ cp ${B}/coregrind/vgdb ${D}${PTEST_PATH}/coregrind
+
+ # Add an executable needed by massif tests
+ cp ${B}/massif/ms_print ${D}${PTEST_PATH}/massif/ms_print
+
+ find ${D}${PTEST_PATH} \
+ \( \
+ -name "Makefile*" \
+ -o -name "*.o" \
+ \) \
+ -exec rm {} \;
+
+ sed -i s:\.\./\.\./callgrind/callgrind_annotate:${bindir}/callgrind_annotate: ${D}${PTEST_PATH}/callgrind/tests/ann1.vgtest
+ sed -i s:\.\./\.\./callgrind/callgrind_annotate:${bindir}/callgrind_annotate: ${D}${PTEST_PATH}/callgrind/tests/ann2.vgtest
+
+ # point the expanded @abs_top_builddir@ of the host to PTEST_PATH
+ sed -i s:${S}:${PTEST_PATH}:g \
+ ${D}${PTEST_PATH}/memcheck/tests/linux/debuginfod-check.vgtest
+
+ # handle multilib
+ sed -i s:@libdir@:${libdir}:g ${D}${PTEST_PATH}/run-ptest
+ sed -i s:@libexecdir@:${libexecdir}:g ${D}${PTEST_PATH}/run-ptest
+ sed -i s:@bindir@:${bindir}:g ${D}${PTEST_PATH}/run-ptest
+
+ # enable cachegrind ptests
+ ln -s ${bindir}/cg_annotate ${D}/${PTEST_PATH}/cachegrind/cg_annotate
+ ln -s ${bindir}/cg_diff ${D}/${PTEST_PATH}/cachegrind/cg_diff
+ ln -s ${bindir}/cg_merge ${D}/${PTEST_PATH}/cachegrind/cg_merge
+
+ # This test fails on the host as well, using both 3.15 and git master (as of Jan 24 2020)
+ # https://bugs.kde.org/show_bug.cgi?id=402833
+ rm ${D}${PTEST_PATH}/memcheck/tests/overlap.vgtest
+
+ # This is known failure see https://bugs.kde.org/show_bug.cgi?id=435732
+ rm ${D}${PTEST_PATH}/memcheck/tests/leak_cpp_interior.vgtest
+
+ # https://bugs.kde.org/show_bug.cgi?id=445743
+ rm ${D}${PTEST_PATH}/drd/tests/pth_mutex_signal
+
+ # As the binary isn't stripped or debug-splitted, the source file isn't fetched
+ # via dwarfsrcfiles either, so it needs to be installed manually.
+ mkdir -p ${D}${TARGET_DBGSRC_DIR}/none/tests/
+ install ${S}/none/tests/tls.c ${D}${TARGET_DBGSRC_DIR}/none/tests/
+}
+
+do_install_ptest:append:x86-64 () {
+ # https://bugs.kde.org/show_bug.cgi?id=463456
+ rm ${D}${PTEST_PATH}/memcheck/tests/origin6-fp.vgtest
+ # https://bugs.kde.org/show_bug.cgi?id=463458
+ rm ${D}${PTEST_PATH}/memcheck/tests/vcpu_fnfns.vgtest
+ # https://bugs.kde.org/show_bug.cgi?id=463463
+ rm ${D}${PTEST_PATH}/none/tests/amd64/fma.vgtest
+}
+
+# avoid stripping some generated binaries otherwise some of the tests will fail
+# run-strip-reloc.sh, run-strip-strmerge.sh and so on will fail
+INHIBIT_PACKAGE_STRIP_FILES += "\
+ ${PKGD}${PTEST_PATH}/none/tests/tls \
+ ${PKGD}${PTEST_PATH}/none/tests/tls.so \
+ ${PKGD}${PTEST_PATH}/none/tests/tls2.so \
+ ${PKGD}${PTEST_PATH}/helgrind/tests/tc09_bad_unlock \
+ ${PKGD}${PTEST_PATH}/memcheck/tests/manuel1 \
+ ${PKGD}${PTEST_PATH}/drd/tests/pth_detached3 \
+"
diff --git a/meta/recipes-devtools/xmlto/xmlto-0.0.28/configure.in-drop-the-test-of-xmllint-and-xsltproc.patch b/meta/recipes-devtools/xmlto/xmlto-0.0.28/configure.in-drop-the-test-of-xmllint-and-xsltproc.patch
deleted file mode 100644
index 6d547a6c99..0000000000
--- a/meta/recipes-devtools/xmlto/xmlto-0.0.28/configure.in-drop-the-test-of-xmllint-and-xsltproc.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-configure.in: drop the test of xmllint and xsltproc
-
-The test is unnecessary, the xmllint and xsltproc were explicitly
-added to RDEPENDS.
-
-Upstream-Status: Inappropriate
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- configure.in | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/configure.in b/configure.in
---- a/configure.in
-+++ b/configure.in
-@@ -42,10 +42,10 @@ AC_ARG_VAR([LOCALE], [Name and path of the `locale' program.])
- AC_PATH_PROG([LOCALE], [locale], [locale])
-
- AC_ARG_VAR([XMLLINT], [Name and path of the `xmllint' program.])
--AC_PATH_PROG([XMLLINT], [xmllint], [xmllint])
-+dnl AC_PATH_PROG([XMLLINT], [xmllint], [xmllint])
-
- AC_ARG_VAR([XSLTPROC], [Name and path of the `xsltproc' program.])
--AC_PATH_PROG([XSLTPROC], [xsltproc], [xsltproc])
-+dnl AC_PATH_PROG([XSLTPROC], [xsltproc], [xsltproc])
-
- dnl
- dnl toolchains
---
-1.8.1.2
-
diff --git a/meta/recipes-devtools/xmlto/xmlto_0.0.28.bb b/meta/recipes-devtools/xmlto/xmlto_0.0.28.bb
index 5cb9a4c57b..d5a0e69849 100644
--- a/meta/recipes-devtools/xmlto/xmlto_0.0.28.bb
+++ b/meta/recipes-devtools/xmlto/xmlto_0.0.28.bb
@@ -6,17 +6,18 @@ LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552"
-SRC_URI = "https://releases.pagure.org/xmlto/xmlto-${PV}.tar.gz \
- file://configure.in-drop-the-test-of-xmllint-and-xsltproc.patch \
-"
-SRC_URI[md5sum] = "a1fefad9d83499a15576768f60f847c6"
-SRC_URI[sha256sum] = "2f986b7c9a0e9ac6728147668e776d405465284e13c74d4146c9cbc51fd8aad3"
+SRCREV = "6fa6a0e07644f20abf2596f78a60112713e11cbe"
+UPSTREAM_CHECK_COMMITS = "1"
+SRC_URI = "git://pagure.io/xmlto.git;protocol=https;branch=master"
+S = "${WORKDIR}/git"
+
+PV .= "+0.0.29+git"
inherit autotools
CLEANBROKEN = "1"
-DEPENDS = "libxml2-native"
+DEPENDS = "libxml2-native libxslt-native flex-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
RDEPENDS:${PN} = "docbook-xml-dtd4 \
docbook-xsl-stylesheets \
@@ -30,11 +31,15 @@ RDEPENDS:${PN}:append:class-target = " \
libxslt-bin \
coreutils \
"
-CACHED_CONFIGUREVARS += "ac_cv_path_TAIL=tail ac_cv_path_GREP=grep"
+CACHED_CONFIGUREVARS += "ac_cv_path_TAIL=tail ac_cv_path_GREP=grep ac_cv_path_XMLLINT=xmllint ac_cv_path_XSLTPROC=xsltproc"
BBCLASSEXTEND = "native"
-EXTRA_OECONF:append = " BASH=/bin/bash GCP=/bin/cp XMLLINT=xmllint XSLTPROC=xsltproc"
+EXTRA_OECONF:append = " BASH=/bin/bash GCP=/bin/cp"
+
+do_configure:prepend() {
+ (cd ${S} && flex -o xmlif/xmlif.c xmlif/xmlif.l)
+}
do_install:append:class-native() {
create_wrapper ${D}${bindir}/xmlto XML_CATALOG_FILES=${sysconfdir}/xml/catalog
diff --git a/meta/recipes-example/rust-hello-world/rust-hello-world/0001-enable-LTO.patch b/meta/recipes-example/rust-hello-world/rust-hello-world/0001-enable-LTO.patch
deleted file mode 100644
index f319545ee1..0000000000
--- a/meta/recipes-example/rust-hello-world/rust-hello-world/0001-enable-LTO.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-From fa40b874f6470ec11a8fd7b0c9909d0cdd2d6feb Mon Sep 17 00:00:00 2001
-From: Dan Callaghan <dan.callaghan@opengear.com>
-Date: Fri, 5 Feb 2021 08:56:34 +1000
-Subject: [PATCH] enable LTO
-
-Upstream-Status: Pending
----
- Cargo.toml | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/Cargo.toml b/Cargo.toml
-index 7a2f6c8..cdb6b5d 100644
---- a/Cargo.toml
-+++ b/Cargo.toml
-@@ -3,3 +3,6 @@
- name = "rust-hello-world"
- version = "0.0.1"
- authors = ["Cody P Schafer <dev@codyps.com>"]
-+
-+[profile.release]
-+lto = true
---
-2.28.0
-
diff --git a/meta/recipes-example/rust-hello-world/rust-hello-world_git.bb b/meta/recipes-example/rust-hello-world/rust-hello-world_git.bb
deleted file mode 100644
index 1d91109b51..0000000000
--- a/meta/recipes-example/rust-hello-world/rust-hello-world_git.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-inherit cargo
-
-SRC_URI = "git://github.com/meta-rust/rust-hello-world.git;protocol=https;branch=master"
-SRCREV="e0fa23f1a3cb1eb1407165bd2fc36d2f6e6ad728"
-LIC_FILES_CHKSUM="file://COPYRIGHT;md5=e6b2207ac3740d2d01141c49208c2147"
-
-SRC_URI += "\
- file://0001-enable-LTO.patch \
- "
-
-UPSTREAM_CHECK_COMMITS = "1"
-
-SUMMARY = "Hello World by Cargo for Rust"
-HOMEPAGE = "https://github.com/meta-rust/rust-hello-world"
-LICENSE = "MIT | Apache-2.0"
-
-S = "${WORKDIR}/git"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/acpica/acpica_20220331.bb b/meta/recipes-extended/acpica/acpica_20220331.bb
deleted file mode 100644
index 2c554f863a..0000000000
--- a/meta/recipes-extended/acpica/acpica_20220331.bb
+++ /dev/null
@@ -1,51 +0,0 @@
-SUMMARY = "ACPICA tools for the development and debug of ACPI tables"
-DESCRIPTION = "The ACPI Component Architecture (ACPICA) project provides an \
-OS-independent reference implementation of the Advanced Configuration and \
-Power Interface Specification (ACPI). ACPICA code contains those portions of \
-ACPI meant to be directly integrated into the host OS as a kernel-resident \
-subsystem, and a small set of tools to assist in developing and debugging \
-ACPI tables."
-
-HOMEPAGE = "http://www.acpica.org/"
-SECTION = "console/tools"
-
-LICENSE = "Intel | BSD-3-Clause | GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://source/compiler/aslcompile.c;beginline=7;endline=150;md5=41a76b4b1f816240f090cf010fefebf0"
-
-COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
-
-DEPENDS = "m4-native flex-native bison-native"
-
-SRC_URI = "https://acpica.org/sites/acpica/files/acpica-unix-${PV}.tar.gz"
-SRC_URI[sha256sum] = "acaff68b14f1e0804ebbfc4b97268a4ccbefcfa053b02ed9924f2b14d8a98e21"
-
-UPSTREAM_CHECK_URI = "https://acpica.org/downloads"
-
-S = "${WORKDIR}/acpica-unix-${PV}"
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE:${PN} = "acpixtract acpidump"
-
-EXTRA_OEMAKE = "CC='${CC}' \
- OPT_CFLAGS=-Wall \
- DESTDIR=${D} \
- PREFIX=${prefix} \
- INSTALLDIR=${bindir} \
- INSTALLFLAGS= \
- YACC=bison \
- YFLAGS='-y --file-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}' \
- "
-
-do_install() {
- oe_runmake install
-}
-
-# iasl*.bb is a subset of this recipe, so RREPLACE it
-PROVIDES = "iasl"
-RPROVIDES:${PN} += "iasl"
-RREPLACES:${PN} += "iasl"
-RCONFLICTS:${PN} += "iasl"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/acpica/acpica_20240322.bb b/meta/recipes-extended/acpica/acpica_20240322.bb
new file mode 100644
index 0000000000..90e3599d32
--- /dev/null
+++ b/meta/recipes-extended/acpica/acpica_20240322.bb
@@ -0,0 +1,49 @@
+SUMMARY = "ACPICA tools for the development and debug of ACPI tables"
+DESCRIPTION = "The ACPI Component Architecture (ACPICA) project provides an \
+OS-independent reference implementation of the Advanced Configuration and \
+Power Interface Specification (ACPI). ACPICA code contains those portions of \
+ACPI meant to be directly integrated into the host OS as a kernel-resident \
+subsystem, and a small set of tools to assist in developing and debugging \
+ACPI tables."
+
+HOMEPAGE = "https://www.intel.com/content/www/us/en/developer/topic-technology/open/acpica/overview.html"
+SECTION = "console/tools"
+
+LICENSE = "Intel | BSD-3-Clause | GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://source/compiler/aslcompile.c;beginline=7;endline=150;md5=05eb845b15a27440410f456adc2ed082"
+
+COMPATIBLE_HOST = "(i.86|x86_64|arm|aarch64).*-linux"
+
+DEPENDS = "m4-native flex-native bison-native"
+
+SRC_URI = "git://github.com/acpica/acpica;protocol=https;branch=master"
+SRCREV = "170fc3076a86777077637f10b05c32ac21ac13aa"
+
+S = "${WORKDIR}/git"
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE:${PN} = "acpixtract acpidump"
+
+EXTRA_OEMAKE = "CC='${CC}' \
+ OPT_CFLAGS=-Wall \
+ DESTDIR=${D} \
+ PREFIX=${prefix} \
+ INSTALLDIR=${bindir} \
+ INSTALLFLAGS= \
+ YACC=bison \
+ YFLAGS='-y --file-prefix-map=${WORKDIR}=${TARGET_DBGSRC_DIR}' \
+ "
+
+do_install() {
+ oe_runmake install
+}
+
+# iasl*.bb is a subset of this recipe, so RREPLACE it
+PROVIDES = "iasl"
+RPROVIDES:${PN} += "iasl"
+RREPLACES:${PN} += "iasl"
+RCONFLICTS:${PN} += "iasl"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/asciidoc/asciidoc_10.1.4.bb b/meta/recipes-extended/asciidoc/asciidoc_10.1.4.bb
deleted file mode 100644
index 4ab9edbce4..0000000000
--- a/meta/recipes-extended/asciidoc/asciidoc_10.1.4.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Tool for creating HTML, PDF, EPUB, man pages"
-DESCRIPTION = "AsciiDoc is a text document format for writing short documents, \
-articles, books and UNIX man pages."
-
-HOMEPAGE = "http://asciidoc.org/"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=aaee33adce0fc7cc40fee23f82f7f101 \
- file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- "
-
-SRC_URI = "git://github.com/asciidoc/asciidoc-py;protocol=https;branch=main"
-SRCREV = "c724bbf95b1840b04bb298a61a72b6a5ea94c2ef"
-
-DEPENDS = "libxml2-native libxslt-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
-
-S = "${WORKDIR}/git"
-
-# Tell xmllint where to find the DocBook XML catalogue, because right now it
-# opens /etc/xml/catalog on the host. Depends on auto-catalogs.patch
-export SGML_CATALOG_FILES="file://${STAGING_ETCDIR_NATIVE}/xml/catalog"
-
-inherit setuptools3
-CLEANBROKEN = "1"
-
-BBCLASSEXTEND = "native nativesdk"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))$"
diff --git a/meta/recipes-extended/asciidoc/asciidoc_10.2.0.bb b/meta/recipes-extended/asciidoc/asciidoc_10.2.0.bb
new file mode 100644
index 0000000000..e112eb513d
--- /dev/null
+++ b/meta/recipes-extended/asciidoc/asciidoc_10.2.0.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Tool for creating HTML, PDF, EPUB, man pages"
+DESCRIPTION = "AsciiDoc is a text document format for writing short documents, \
+articles, books and UNIX man pages."
+
+HOMEPAGE = "http://asciidoc.org/"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=aaee33adce0fc7cc40fee23f82f7f101 \
+ file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ "
+
+SRC_URI = "git://github.com/asciidoc/asciidoc-py;protocol=https;branch=main"
+SRCREV = "545b79b8d7dae70d12bf0657359bdd36de0c5c26"
+
+DEPENDS = "libxml2-native libxslt-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
+
+S = "${WORKDIR}/git"
+
+# Tell xmllint where to find the DocBook XML catalogue, because right now it
+# opens /etc/xml/catalog on the host. Depends on auto-catalogs.patch
+export SGML_CATALOG_FILES="file://${STAGING_ETCDIR_NATIVE}/xml/catalog"
+
+inherit setuptools3
+CLEANBROKEN = "1"
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))$"
diff --git a/meta/recipes-extended/at/at_3.2.5.bb b/meta/recipes-extended/at/at_3.2.5.bb
index 6769eb364b..c0c876a644 100644
--- a/meta/recipes-extended/at/at_3.2.5.bb
+++ b/meta/recipes-extended/at/at_3.2.5.bb
@@ -22,7 +22,7 @@ PAM_DEPS = "libpam libpam-runtime pam-plugin-env pam-plugin-limits"
RCONFLICTS:${PN} = "atd"
RREPLACES:${PN} = "atd"
-SRC_URI = "http://software.calhariz.com/at/${BPN}_${PV}.orig.tar.gz \
+SRC_URI = "${DEBIAN_MIRROR}/main/a/at/${BPN}_${PV}.orig.tar.gz \
file://posixtm.c \
file://posixtm.h \
file://file_replacement_with_gplv2.patch \
@@ -52,8 +52,10 @@ INITSCRIPT_PARAMS = "defaults"
SYSTEMD_SERVICE:${PN} = "atd.service"
-do_configure:prepend() {
- cp -f ${WORKDIR}/posixtm.[ch] ${S}
+do_patch[postfuncs] += "copy_posix_files"
+
+copy_posix_files() {
+ cp -f ${WORKDIR}/posixtm.[ch] ${S}
}
do_install () {
diff --git a/meta/recipes-extended/baremetal-example/baremetal-helloworld_git.bb b/meta/recipes-extended/baremetal-example/baremetal-helloworld_git.bb
new file mode 100644
index 0000000000..6832ccc541
--- /dev/null
+++ b/meta/recipes-extended/baremetal-example/baremetal-helloworld_git.bb
@@ -0,0 +1,72 @@
+SUMMARY = "Baremetal examples to work with the several QEMU architectures supported on OpenEmbedded"
+HOMEPAGE = "https://github.com/aehs29/baremetal-helloqemu"
+DESCRIPTION = "These are introductory examples to showcase the use of QEMU to run baremetal applications."
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=39346640a23c701e4f459e05f56f4449"
+
+SRCREV = "db2bf750eaef7fc0832e13ada8291343bbcc3afe"
+PV = "0.1+git"
+
+SRC_URI = "git://github.com/ahcbb6/baremetal-helloqemu.git;protocol=https;branch=master"
+UPSTREAM_VERSION_UNKNOWN="1"
+
+S = "${WORKDIR}/git"
+
+# The following variables should be set to accomodate each application
+BAREMETAL_BINNAME ?= "hello_baremetal_${MACHINE}"
+IMAGE_LINK_NAME ?= "baremetal-helloworld-image-${MACHINE}"
+IMAGE_NAME_SUFFIX ?= ""
+
+# Baremetal-Image creates the proper wiring, assumes the output is provided in
+# binary and ELF format, installed on ${base_libdir}/firmware/ , we want a
+# package to be created since we might have some way of updating the baremetal
+# firmware from Linux
+inherit baremetal-image
+
+
+# startup code for x86 uses NASM syntax
+DEPENDS:qemux86:append = " nasm-native"
+
+# These parameters are app specific for this example
+# This will be translated automatically to the architecture and
+# machine that QEMU uses on OE, e.g. -machine virt -cpu cortex-a57
+# but the examples can also be run on other architectures/machines
+# such as vexpress-a15 by overriding the setting on the machine.conf
+COMPATIBLE_MACHINE = "qemuarmv5|qemuarm|qemuarm64|qemuriscv64|qemuriscv32|qemux86|qemux86-64"
+
+BAREMETAL_QEMUARCH ?= ""
+BAREMETAL_QEMUARCH:qemuarmv5 = "versatile"
+BAREMETAL_QEMUARCH:qemuarm = "arm"
+BAREMETAL_QEMUARCH:qemuarm64 = "aarch64"
+BAREMETAL_QEMUARCH:qemuriscv64 = "riscv64"
+BAREMETAL_QEMUARCH:qemuriscv32 = "riscv32"
+BAREMETAL_QEMUARCH:qemux86 = "x86"
+BAREMETAL_QEMUARCH:qemux86-64 = "x86-64"
+
+EXTRA_OEMAKE:append = " QEMUARCH=${BAREMETAL_QEMUARCH} V=1"
+
+# qemux86-64 uses a different Makefile
+do_compile:prepend:qemux86-64(){
+ cd x86-64
+}
+
+# Install binaries on the proper location for baremetal-image to fetch and deploy
+do_install(){
+ install -d ${D}/${base_libdir}/firmware
+ install -m 755 ${B}/build/hello_baremetal_${BAREMETAL_QEMUARCH}.bin ${D}/${base_libdir}/firmware/${BAREMETAL_BINNAME}.bin
+ install -m 755 ${B}/build/hello_baremetal_${BAREMETAL_QEMUARCH}.elf ${D}/${base_libdir}/firmware/${BAREMETAL_BINNAME}.elf
+}
+
+FILES:${PN} += " \
+ ${base_libdir}/firmware/${BAREMETAL_BINNAME}.bin \
+ ${base_libdir}/firmware/${BAREMETAL_BINNAME}.elf \
+"
+
+# qemux86-64 boots from iso rather than -kernel, create image to boot from
+do_image:append:qemux86-64(){
+ dd if=/dev/zero of=${B}/build/img.iso bs=1M count=10 status=none
+ dd if=${B}/build/stage1.bin of=${B}/build/img.iso bs=512 count=1 conv=notrunc
+ dd if=${B}/build/stage2.bin of=${B}/build/img.iso bs=512 seek=1 count=64 conv=notrunc
+ dd if=${B}/build/hello_baremetal_x86-64.bin of=${B}/build/img.iso bs=512 seek=65 conv=notrunc
+ install ${B}/build/img.iso ${IMGDEPLOYDIR}/${IMAGE_LINK_NAME}.iso
+}
diff --git a/meta/recipes-extended/bash/bash.inc b/meta/recipes-extended/bash/bash.inc
index 18874a0e24..e541161c75 100644
--- a/meta/recipes-extended/bash/bash.inc
+++ b/meta/recipes-extended/bash/bash.inc
@@ -74,7 +74,8 @@ do_install:append () {
mv ${D}${bindir}/bash ${D}${base_bindir}
fi
}
-do_install:append:class-target () {
+
+fix_absolute_paths () {
# Clean buildhost references in bashbug
sed -i -e "s,--sysroot=${STAGING_DIR_TARGET},,g" \
-e "s,-I${WORKDIR}/\S* ,,g" \
@@ -93,6 +94,14 @@ do_install:append:class-target () {
${D}${libdir}/bash/Makefile.inc
}
+do_install:append:class-target () {
+ fix_absolute_paths
+}
+
+do_install:append:class-nativesdk () {
+ fix_absolute_paths
+}
+
do_install_ptest () {
make INSTALL_TEST_DIR=${D}${PTEST_PATH}/tests install-test
cp ${B}/Makefile ${D}${PTEST_PATH}
@@ -102,6 +111,7 @@ do_install_ptest () {
install -D ${WORKDIR}/run-bash-ptests ${D}${PTEST_PATH}/run-bash-ptests
sed -i -e 's/^Makefile/_Makefile/' -e "s,--sysroot=${STAGING_DIR_TARGET},,g" \
-e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's|${BUILD_LDFLAGS}||g' \
-e "s,${S},,g" -e "s,${B},,g" -e "s,${STAGING_DIR_NATIVE},,g" \
-e 's:${HOSTTOOLS_DIR}/::g' \
-e 's:${UNINATIVE_LOADER}:${base_bindir}/false:g' \
@@ -128,4 +138,6 @@ PACKAGE_BEFORE_PN += "${PN}-loadable"
RDEPENDS:${PN}-loadable += "${PN}"
FILES:${PN}-loadable += "${libdir}/bash/*"
-RPROVIDES:${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', '/bin/sh /bin/bash', '', d)}"
+# Limit the RPROVIDES here to class target so that if usrmerge is enabled for nativesdk, it does not
+# include host system paths in /bin/
+RPROVIDES:${PN}:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', '/bin/sh /bin/bash', '', d)}"
diff --git a/meta/recipes-extended/bash/bash/0001-changes-to-SIGINT-handler-while-waiting-for-a-child-.patch b/meta/recipes-extended/bash/bash/0001-changes-to-SIGINT-handler-while-waiting-for-a-child-.patch
new file mode 100644
index 0000000000..77d770b364
--- /dev/null
+++ b/meta/recipes-extended/bash/bash/0001-changes-to-SIGINT-handler-while-waiting-for-a-child-.patch
@@ -0,0 +1,226 @@
+From 721d5be99eb37d31e48bd66d61808a66a4c5ab84 Mon Sep 17 00:00:00 2001
+From: Chet Ramey <chet.ramey@case.edu>
+Date: Mon, 30 Oct 2023 12:16:07 -0400
+Subject: [PATCH] changes to SIGINT handler while waiting for a child; skip
+ vertical whitespace after translating an integer
+
+Upstream-Status: Backport from
+[https://git.savannah.gnu.org/cgit/bash.git/commit/?h=devel&id=fe24a6a55e8850298b496c5b9d82f1866eba190e]
+
+[Adjust and drop some codes to be applicable the tree]
+
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+---
+ general.c | 5 +++--
+ jobs.c | 24 ++++++++++++++++--------
+ tests/redir.right | 4 ++--
+ tests/redir11.sub | 2 ++
+ tests/type.right | 16 ++++++++--------
+ tests/type.tests | 24 ++++++++++++------------
+ 6 files changed, 43 insertions(+), 32 deletions(-)
+
+diff --git a/general.c b/general.c
+index 85c5a8b6..65e2ee06 100644
+--- a/general.c
++++ b/general.c
+@@ -262,8 +262,9 @@ legal_number (string, result)
+ if (errno || ep == string)
+ return 0; /* errno is set on overflow or underflow */
+
+- /* Skip any trailing whitespace, since strtoimax does not. */
+- while (whitespace (*ep))
++ /* Skip any trailing whitespace, since strtoimax does not, using the same
++ test that strtoimax uses for leading whitespace. */
++ while (isspace ((unsigned char) *ep))
+ ep++;
+
+ /* If *string is not '\0' but *ep is '\0' on return, the entire string
+diff --git a/jobs.c b/jobs.c
+index 6b986ed7..262d78de 100644
+--- a/jobs.c
++++ b/jobs.c
+@@ -2718,6 +2718,10 @@ wait_for_background_pids (ps)
+ #define INVALID_SIGNAL_HANDLER (SigHandler *)wait_for_background_pids
+ static SigHandler *old_sigint_handler = INVALID_SIGNAL_HANDLER;
+
++/* The current SIGINT handler as set by restore_sigint_handler. Only valid
++ immediately after restore_sigint_handler, used for continuations. */
++static SigHandler *cur_sigint_handler = INVALID_SIGNAL_HANDLER;
++
+ static int wait_sigint_received;
+ static int child_caught_sigint;
+
+@@ -2735,6 +2739,7 @@ wait_sigint_cleanup ()
+ static void
+ restore_sigint_handler ()
+ {
++ cur_sigint_handler = old_sigint_handler;
+ if (old_sigint_handler != INVALID_SIGNAL_HANDLER)
+ {
+ set_signal_handler (SIGINT, old_sigint_handler);
+@@ -2758,8 +2763,7 @@ wait_sigint_handler (sig)
+ restore_sigint_handler ();
+ /* If we got a SIGINT while in `wait', and SIGINT is trapped, do
+ what POSIX.2 says (see builtins/wait.def for more info). */
+- if (this_shell_builtin && this_shell_builtin == wait_builtin &&
+- signal_is_trapped (SIGINT) &&
++ if (signal_is_trapped (SIGINT) &&
+ ((sigint_handler = trap_to_sighandler (SIGINT)) == trap_handler))
+ {
+ trap_handler (SIGINT); /* set pending_traps[SIGINT] */
+@@ -2782,6 +2786,8 @@ wait_sigint_handler (sig)
+ {
+ set_exit_status (128+SIGINT);
+ restore_sigint_handler ();
++ if (cur_sigint_handler == INVALID_SIGNAL_HANDLER)
++ set_sigint_handler (); /* XXX - only do this in one place */
+ kill (getpid (), SIGINT);
+ }
+
+@@ -2926,11 +2932,13 @@ wait_for (pid, flags)
+ {
+ SigHandler *temp_sigint_handler;
+
+- temp_sigint_handler = set_signal_handler (SIGINT, wait_sigint_handler);
+- if (temp_sigint_handler == wait_sigint_handler)
+- internal_debug ("wait_for: recursively setting old_sigint_handler to wait_sigint_handler: running_trap = %d", running_trap);
+- else
+- old_sigint_handler = temp_sigint_handler;
++ temp_sigint_handler = old_sigint_handler;
++ old_sigint_handler = set_signal_handler (SIGINT, wait_sigint_handler);
++ if (old_sigint_handler == wait_sigint_handler)
++ {
++ internal_debug ("wait_for: recursively setting old_sigint_handler to wait_sigint_handler: running_trap = %d", running_trap);
++ old_sigint_handler = temp_sigint_handler;
++ }
+ waiting_for_child = 0;
+ if (old_sigint_handler == SIG_IGN)
+ set_signal_handler (SIGINT, old_sigint_handler);
+@@ -4136,7 +4144,7 @@ set_job_status_and_cleanup (job)
+ SIGINT (if we reset the sighandler to the default).
+ In this case, we have to fix things up. What a crock. */
+ if (temp_handler == trap_handler && signal_is_trapped (SIGINT) == 0)
+- temp_handler = trap_to_sighandler (SIGINT);
++ temp_handler = trap_to_sighandler (SIGINT);
+ restore_sigint_handler ();
+ if (temp_handler == SIG_DFL)
+ termsig_handler (SIGINT); /* XXX */
+diff --git a/tests/redir.right b/tests/redir.right
+index 8db10414..9e1403c8 100644
+--- a/tests/redir.right
++++ b/tests/redir.right
+@@ -154,10 +154,10 @@ foo
+ 1
+ 7
+ after: 42
+-./redir11.sub: line 53: $(ss= declare -i ss): ambiguous redirect
++./redir11.sub: line 55: $(ss= declare -i ss): ambiguous redirect
+ after: 42
+ a+=3
+ foo
+ foo
+-./redir11.sub: line 75: 42: No such file or directory
++./redir11.sub: line 77: 42: No such file or directory
+ 42
+diff --git a/tests/redir11.sub b/tests/redir11.sub
+index d417cdb6..ca9854cd 100644
+--- a/tests/redir11.sub
++++ b/tests/redir11.sub
+@@ -34,6 +34,8 @@ a=4 b=7 ss=4 declare -i ss
+ a=4 b=7 foo
+ echo after: $a
+
++exec 7>&- 4>&-
++
+ unset a
+ a=4 echo foo 2>&1 >&$(foo) | { grep -q 'Bad file' || echo 'redir11 bad 3'; }
+ a=1 echo foo 2>&1 >&$(foo) | { grep -q 'Bad file' || echo 'redir11 bad 4'; }
+diff --git a/tests/type.right b/tests/type.right
+index bbc228e8..e0a66745 100644
+--- a/tests/type.right
++++ b/tests/type.right
+@@ -24,15 +24,15 @@ func ()
+ }
+ while
+ while is a shell keyword
+-./type.tests: line 56: type: m: not found
+-alias m='more'
+-alias m='more'
+-m is aliased to `more'
++./type.tests: line 56: type: morealias: not found
++alias morealias='more'
++alias morealias='more'
++morealias is aliased to `more'
+ alias
+-alias m='more'
+-alias m='more'
+-alias m='more'
+-m is aliased to `more'
++alias morealias='more'
++alias morealias='more'
++alias morealias='more'
++morealias is aliased to `more'
+ builtin
+ builtin is a shell builtin
+ /bin/sh
+diff --git a/tests/type.tests b/tests/type.tests
+index fd39c18a..ddc15407 100644
+--- a/tests/type.tests
++++ b/tests/type.tests
+@@ -25,8 +25,6 @@ type -r ${THIS_SH}
+ type notthere
+ command -v notthere
+
+-alias m=more
+-
+ unset -f func 2>/dev/null
+ func() { echo this is func; }
+
+@@ -49,24 +47,26 @@ command -V func
+ command -v while
+ command -V while
+
++alias morealias=more
++
+ # the following two lines should produce the same output
+ # post-3.0 patch makes command -v silent, as posix specifies
+ # first test with alias expansion off (should all fail or produce no output)
+-type -t m
+-type m
+-command -v m
++type -t morealias
++type morealias
++command -v morealias
+ alias -p
+-alias m
++alias morealias
+
+ # then test with alias expansion on
+ shopt -s expand_aliases
+-type m
+-type -t m
+-command -v m
++type morealias
++type -t morealias
++command -v morealias
+ alias -p
+-alias m
++alias morealias
+
+-command -V m
++command -V morealias
+ shopt -u expand_aliases
+
+ command -v builtin
+@@ -76,7 +76,7 @@ command -V /bin/sh
+
+ unset -f func
+ type func
+-unalias m
++unalias morealias
+ type m
+
+ hash -r
+--
+2.35.5
+
diff --git a/meta/recipes-extended/bash/bash/execute_cmd.patch b/meta/recipes-extended/bash/bash/execute_cmd.patch
deleted file mode 100644
index 7a9e9a902f..0000000000
--- a/meta/recipes-extended/bash/bash/execute_cmd.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-Upstream-Status: Inappropriate [embedded specific]
-
-Rebase to 5.0
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- execute_cmd.c | 6 +++++-
- 1 file changed, 5 insertions(+), 1 deletion(-)
-
-diff --git a/execute_cmd.c b/execute_cmd.c
-index f1d74bf..31674b4 100644
---- a/execute_cmd.c
-+++ b/execute_cmd.c
-@@ -2567,7 +2567,11 @@ execute_pipeline (command, asynchronous, pipe_in, pipe_out, fds_to_close)
- /* If the `lastpipe' option is set with shopt, and job control is not
- enabled, execute the last element of non-async pipelines in the
- current shell environment. */
-- if (lastpipe_opt && job_control == 0 && asynchronous == 0 && pipe_out == NO_PIPE && prev > 0)
-+ if (lastpipe_opt &&
-+#if defined(JOB_CONTROL)
-+ job_control == 0 &&
-+#endif
-+ asynchronous == 0 && pipe_out == NO_PIPE && prev > 0)
- {
- lstdin = move_to_high_fd (0, 1, -1);
- if (lstdin > 0)
---
-2.7.4
-
diff --git a/meta/recipes-extended/bash/bash/makerace.patch b/meta/recipes-extended/bash/bash/makerace.patch
deleted file mode 100644
index 9bd7c280fe..0000000000
--- a/meta/recipes-extended/bash/bash/makerace.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-We're seeing pipesize.h being created in parallel:
-
-/bin/sh ../../bash-5.1/builtins/psize.sh > pipesize.h
-/bin/sh ../../bash-5.1/builtins/psize.sh > pipesize.h
-
-./mkbuiltins -D ../../bash-5.1/builtins ../../bash-5.1/builtins/ulimit.def
-x86_64-pokysdk-linux-gcc --sysroot=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot -c -DHAVE_CONFIG_H -DSHELL -I. -I.. -I../../bash-5.1 -I../../bash-5.1/include -I../../bash-5.1/lib -I../../bash-5.1/builtins -O2 -pipe -fmacro-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0=/usr/src/debug/nativesdk-bash/5.1-r0 -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0=/usr/src/debug/nativesdk-bash/5.1-r0 -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot= -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot-native= ulimit.c || ( rm -f ulimit.c ; exit 1 )
-make[1]: Leaving directory '/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/build/builtins'
-rm -f redir.o
-x86_64-pokysdk-linux-gcc --sysroot=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot -DPROGRAM='"bash"' -DCONF_HOSTTYPE='"x86_64"' -DCONF_OSTYPE='"linux-gnu"' -DCONF_MACHTYPE='"x86_64-pokysdk-linux-gnu"' -DCONF_VENDOR='"pokysdk"' -DLOCALEDIR='"/opt/poky/3.2+snapshot/sysroots/x86_64-pokysdk-linux/usr/share/locale"' -DPACKAGE='"bash"' -DSHELL -DHAVE_CONFIG_H -I. -I../bash-5.1 -I../bash-5.1/include -I../bash-5.1/lib -O2 -pipe -fmacro-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0=/usr/src/debug/nativesdk-bash/5.1-r0 -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0=/usr/src/debug/nativesdk-bash/5.1-r0 -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot= -fdebug-prefix-map=/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/recipe-sysroot-native= -c ../bash-5.1/redir.c
-In file included from ../../bash-5.1/builtins/../../bash-5.1/builtins/ulimit.def:95:
-pipesize.h:9:5: error: expected '=', ',', ';', 'asm' or '__attribute__' before '-' token
- 9 | bash-5.1/builtins/psize.sh: 37: ../../bash-5.1/builtins/psize.sh: ./psize.aux: Text file busy
- | ^
-make[1]: *** [Makefile:119: ulimit.o] Error 1
-make[1]: Leaving directory '/home/pokybuild/yocto-worker/multilib/build/build/tmp/work/x86_64-nativesdk-pokysdk-linux/nativesdk-bash/5.1-r0/build/builtins'
-make: *** [Makefile:737: builtins/libbuiltins.a] Error 1
-make: *** Waiting for unfinished jobs....
-In file included from ../bash-5.1/redir.c:61:
-./builtins/pipesize.h:9:5: error: expected '=', ',', ';', 'asm' or '__attribute__' before '-' token
- 9 | bash-5.1/builtins/psize.sh: 37: ../../bash-5.1/builtins/psize.sh: ./psize.aux: Text file busy
- | ^
-make: *** [Makefile:101: redir.o] Error 1
-WARNING: exit code 1 from a shell command.
-
-which happens since builtins/ulimit.o depends on pipesize.h as well as a top
-level dependency. This means:
-
- @(cd $(DEFDIR) && $(MAKE) $(MFLAGS) pipesize.h ) || exit 1
-
-races with:
-
- @(cd $(DEFDIR) && $(MAKE) $(MFLAGS) DEBUG=${DEBUG} targets ) || exit 1
-
-Hack around this by forcing BUILTINS_LIBRARY onto pipesize.h as a dependency.
-
-Upstream-Status: Submitted [https://lists.gnu.org/archive/html/bug-bash/2021-01/msg00152.html]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: bash-5.1/Makefile.in
-===================================================================
---- bash-5.1.orig/Makefile.in
-+++ bash-5.1/Makefile.in
-@@ -746,7 +746,7 @@ ${DEFDIR}/bashgetopt.o: $(BUILTIN_SRCDIR
- ${DEFDIR}/builtext.h: $(BUILTIN_DEFS)
- @(cd $(DEFDIR) && $(MAKE) $(MFLAGS) builtext.h ) || exit 1
-
--${DEFDIR}/pipesize.h:
-+${DEFDIR}/pipesize.h: $(BUILTINS_LIBRARY)
- @(cd $(DEFDIR) && $(MAKE) $(MFLAGS) pipesize.h ) || exit 1
-
- $(SDIR)/man2html$(EXEEXT): ${SUPPORT_SRC}/man2html.c
diff --git a/meta/recipes-extended/bash/bash/makerace2.patch b/meta/recipes-extended/bash/bash/makerace2.patch
deleted file mode 100644
index 43cdd04157..0000000000
--- a/meta/recipes-extended/bash/bash/makerace2.patch
+++ /dev/null
@@ -1,98 +0,0 @@
-The main makefile can call mkbuiltins from multiple different codepaths in parallel.
-When called, it moves the existing files out the way and creates new ones, then
-compares which will break the build if timing is unlucky.
-
-The root of the problem is mkbuiltins.c creating a file but also referencing that
-file under the same name. By modifing it to allow the final name and the temp name
-to be specified, we can avoid the original reason for the moving of files around.
-This allows them to be created under a new name and then replaced if changed,
-removing any race windows around accessing the files whilst they've been
-moved or are being rewritten.
-
-See [YOCTO #14227]
-
-Upstream-Status: Pending
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: bash-5.1.8/builtins/Makefile.in
-===================================================================
---- bash-5.1.8.orig/builtins/Makefile.in
-+++ bash-5.1.8/builtins/Makefile.in
-@@ -185,19 +185,17 @@ gen-helpfiles: tmpbuiltins.o gen-helpfil
- $(CC_FOR_BUILD) ${CCFLAGS_FOR_BUILD} $(LDFLAGS_FOR_BUILD) -o $@ gen-helpfiles.o tmpbuiltins.o $(LIBS_FOR_BUILD)
-
- builtext.h builtins.c: $(MKBUILTINS) $(DEFSRC)
-- @-if test -f builtins.c; then mv -f builtins.c old-builtins.c; fi
-- @-if test -f builtext.h; then mv -f builtext.h old-builtext.h; fi
-- ./$(MKBUILTINS) -externfile builtext.h -structfile builtins.c \
-+ ./$(MKBUILTINS) -externfile builtext-new.h -externfinalfile builtext.h -structfile builtins-new.c \
- -noproduction $(DIRECTDEFINE) $(HELPDIRDEFINE) $(HELPSTRINGS) $(DEFSRC)
-- @-if cmp -s old-builtext.h builtext.h 2>/dev/null; then \
-- mv old-builtext.h builtext.h; \
-+ @-if ! cmp -s builtext.h builtext-new.h 2>/dev/null; then \
-+ mv builtext-new.h builtext.h; \
- else \
-- $(RM) old-builtext.h; \
-+ $(RM) builtext-new.h; \
- fi
-- @-if cmp -s old-builtins.c builtins.c 2>/dev/null; then \
-- mv old-builtins.c builtins.c; \
-+ @-if ! cmp -s builtins.c builtins-new.c 2>/dev/null; then \
-+ mv builtins-new.c builtins.c; \
- else \
-- $(RM) old-builtins.c; \
-+ $(RM) builtins-new.c; \
- fi
-
- helpdoc: gen-helpfiles
-Index: bash-5.1.8/builtins/mkbuiltins.c
-===================================================================
---- bash-5.1.8.orig/builtins/mkbuiltins.c
-+++ bash-5.1.8/builtins/mkbuiltins.c
-@@ -113,6 +113,9 @@ char *struct_filename = (char *)NULL;
- /* The name of the external declaration file. */
- char *extern_filename = (char *)NULL;
-
-+/* The final name of the external declaration file. */
-+char *extern_final_filename = (char *)NULL;
-+
- /* Here is a structure for manipulating arrays of data. */
- typedef struct {
- int size; /* Number of slots allocated to array. */
-@@ -230,6 +233,8 @@ main (argc, argv)
-
- if (strcmp (arg, "-externfile") == 0)
- extern_filename = argv[arg_index++];
-+ else if (strcmp (arg, "-externfinalfile") == 0)
-+ extern_final_filename = argv[arg_index++];
- else if (strcmp (arg, "-structfile") == 0)
- struct_filename = argv[arg_index++];
- else if (strcmp (arg, "-noproduction") == 0)
-@@ -273,6 +278,9 @@ main (argc, argv)
- }
- }
-
-+ if (!extern_final_filename)
-+ extern_final_filename = extern_filename;
-+
- /* If there are no files to process, just quit now. */
- if (arg_index == argc)
- exit (0);
-@@ -1174,7 +1182,7 @@ write_file_headers (structfile, externfi
- fprintf (structfile, "%s\n", structfile_header[i]);
-
- fprintf (structfile, "#include \"%s\"\n",
-- extern_filename ? extern_filename : "builtext.h");
-+ extern_final_filename ? extern_final_filename : "builtext.h");
-
- fprintf (structfile, "#include \"bashintl.h\"\n");
-
-@@ -1184,7 +1192,7 @@ write_file_headers (structfile, externfi
- if (externfile)
- fprintf (externfile,
- "/* %s - The list of builtins found in libbuiltins.a. */\n",
-- extern_filename ? extern_filename : "builtext.h");
-+ extern_final_filename ? extern_final_filename : "builtext.h");
- }
-
- /* Write out any necessary closing information for
diff --git a/meta/recipes-extended/bash/bash/use_aclocal.patch b/meta/recipes-extended/bash/bash/use_aclocal.patch
index bebaa08bfe..bd6870b386 100644
--- a/meta/recipes-extended/bash/bash/use_aclocal.patch
+++ b/meta/recipes-extended/bash/bash/use_aclocal.patch
@@ -1,3 +1,8 @@
+From d1bf23817afffd5917b74da6946e0c3b7e63e336 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Mon, 28 Dec 2020 21:04:27 +0100
+Subject: [PATCH] bash: update 5.0 -> 5.1
+
Including m4 files directly like this confuses autotools.bbclass, remove
the references and rely upon aclocal to collect the m4 files together
as needed instead making it work like other autotools based projects.
@@ -5,17 +10,23 @@ as needed instead making it work like other autotools based projects.
Upstream-Status: Inappropriate [OE configuration specific]
RP 2021/1/20
-Index: bash-5.1/configure.ac
-===================================================================
---- bash-5.1.orig/configure.ac
-+++ bash-5.1/configure.ac
-@@ -688,47 +688,6 @@ if test x$SIZE = x; then
+---
+ configure.ac | 43 -------------------------------------------
+ 1 file changed, 43 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 50a6e20..a3b5bd7 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -710,49 +710,6 @@ if test x$SIZE = x; then
fi
AC_SUBST(SIZE)
-m4_include([m4/stat-time.m4])
-m4_include([m4/timespec.m4])
-
+-m4_include([m4/strtoimax.m4])
+-
-dnl include files for gettext
-
-m4_include([m4/codeset.m4])
@@ -54,6 +65,6 @@ Index: bash-5.1/configure.ac
-m4_include([m4/wint_t.m4])
-m4_include([m4/xsize.m4])
-
- dnl Turn on any extensions available in the GNU C library.
- AC_DEFINE(_GNU_SOURCE, 1)
-
+ dnl C compiler characteristics
+ AC_C_CONST
+ AC_C_INLINE
diff --git a/meta/recipes-extended/bash/bash_5.1.16.bb b/meta/recipes-extended/bash/bash_5.1.16.bb
deleted file mode 100644
index d046faa4e5..0000000000
--- a/meta/recipes-extended/bash/bash_5.1.16.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-require bash.inc
-
-# GPL-2.0-or-later (< 4.0), GPL-3.0-or-later (>= 4.0)
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-SRC_URI = "${GNU_MIRROR}/bash/${BP}.tar.gz;name=tarball \
- file://execute_cmd.patch \
- file://mkbuiltins_have_stringize.patch \
- file://build-tests.patch \
- file://test-output.patch \
- file://run-ptest \
- file://run-bash-ptests \
- file://fix-run-builtins.patch \
- file://use_aclocal.patch \
- file://makerace.patch \
- file://makerace2.patch \
- "
-
-SRC_URI[tarball.sha256sum] = "5bac17218d3911834520dad13cd1f85ab944e1c09ae1aba55906be1f8192f558"
-
-DEBUG_OPTIMIZATION:append:armv4 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
-DEBUG_OPTIMIZATION:append:armv5 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/bash/bash_5.2.21.bb b/meta/recipes-extended/bash/bash_5.2.21.bb
new file mode 100644
index 0000000000..46d921bbe6
--- /dev/null
+++ b/meta/recipes-extended/bash/bash_5.2.21.bb
@@ -0,0 +1,23 @@
+require bash.inc
+
+# GPL-2.0-or-later (< 4.0), GPL-3.0-or-later (>= 4.0)
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+SRC_URI = "${GNU_MIRROR}/bash/${BP}.tar.gz;name=tarball \
+ file://mkbuiltins_have_stringize.patch \
+ file://build-tests.patch \
+ file://test-output.patch \
+ file://run-ptest \
+ file://run-bash-ptests \
+ file://fix-run-builtins.patch \
+ file://use_aclocal.patch \
+ file://0001-changes-to-SIGINT-handler-while-waiting-for-a-child-.patch \
+ "
+
+SRC_URI[tarball.sha256sum] = "c8e31bdc59b69aaffc5b36509905ba3e5cbb12747091d27b4b977f078560d5b8"
+
+DEBUG_OPTIMIZATION:append:armv4 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
+DEBUG_OPTIMIZATION:append:armv5 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/bc/bc/run-ptest b/meta/recipes-extended/bc/bc/run-ptest
new file mode 100644
index 0000000000..ba5abe6805
--- /dev/null
+++ b/meta/recipes-extended/bc/bc/run-ptest
@@ -0,0 +1,9 @@
+#! /bin/sh
+
+for TEST in *.b; do
+ if bc -l $TEST </dev/null; then
+ echo "PASS: bc/$TEST"
+ else
+ echo "FAIL: bc/$TEST"
+ fi
+done
diff --git a/meta/recipes-extended/bc/bc_1.07.1.bb b/meta/recipes-extended/bc/bc_1.07.1.bb
index 1bec76bb2a..819da6cda6 100644
--- a/meta/recipes-extended/bc/bc_1.07.1.bb
+++ b/meta/recipes-extended/bc/bc_1.07.1.bb
@@ -15,11 +15,12 @@ DEPENDS = "flex-native"
SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.gz \
file://no-gen-libmath.patch \
file://libmath.h \
- file://0001-dc-fix-exit-code-of-q-command.patch"
+ file://0001-dc-fix-exit-code-of-q-command.patch \
+ file://run-ptest"
SRC_URI[md5sum] = "cda93857418655ea43590736fc3ca9fc"
SRC_URI[sha256sum] = "62adfca89b0a1c0164c2cdca59ca210c1d44c3ffc46daf9931cf4942664cb02a"
-inherit autotools texinfo update-alternatives
+inherit autotools texinfo update-alternatives ptest
PACKAGECONFIG ??= "readline"
PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
@@ -29,7 +30,11 @@ do_compile:prepend() {
cp -f ${WORKDIR}/libmath.h ${B}/bc/libmath.h
}
+do_install_ptest() {
+ install ${S}/Test/*.b ${D}${PTEST_PATH}
+}
+
ALTERNATIVE:${PN} = "bc dc"
ALTERNATIVE_PRIORITY = "100"
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/bzip2/bzip2_1.0.8.bb b/meta/recipes-extended/bzip2/bzip2_1.0.8.bb
index 78138d1543..4e3a06f240 100644
--- a/meta/recipes-extended/bzip2/bzip2_1.0.8.bb
+++ b/meta/recipes-extended/bzip2/bzip2_1.0.8.bb
@@ -22,7 +22,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;beginline=4;endline=37;md5=600af43c50f1fcb82e
"
SRC_URI = "https://sourceware.org/pub/${BPN}/${BPN}-${PV}.tar.gz \
- git://sourceware.org/git/bzip2-tests.git;name=bzip2-tests;branch=master \
+ git://sourceware.org/git/bzip2-tests.git;name=bzip2-tests;branch=master;protocol=https \
file://configure.ac;subdir=${BP} \
file://Makefile.am;subdir=${BP} \
file://run-ptest \
diff --git a/meta/recipes-extended/cpio/cpio-2.13/0001-Unset-need_charset_alias-when-building-for-musl.patch b/meta/recipes-extended/cpio/cpio-2.13/0001-Unset-need_charset_alias-when-building-for-musl.patch
deleted file mode 100644
index 6ae213942c..0000000000
--- a/meta/recipes-extended/cpio/cpio-2.13/0001-Unset-need_charset_alias-when-building-for-musl.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From b9565dc2fe0c4f7daaec91b7e83bc7313dee2f4a Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 13 Apr 2015 17:02:13 -0700
-Subject: [PATCH] Unset need_charset_alias when building for musl
-
-localcharset uses ac_cv_gnu_library_2_1 from glibc21.m4
-which actually shoudl be fixed in gnulib and then all downstream
-projects will get it eventually. For now we apply the fix to
-coreutils
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- lib/gnulib.mk | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-Index: cpio-2.11/gnu/Makefile.am
-===================================================================
---- cpio-2.11.orig/gnu/Makefile.am
-+++ cpio-2.11/gnu/Makefile.am
-@@ -734,7 +734,7 @@ install-exec-localcharset: all-local
- case '$(host_os)' in \
- darwin[56]*) \
- need_charset_alias=true ;; \
-- darwin* | cygwin* | mingw* | pw32* | cegcc*) \
-+ darwin* | cygwin* | mingw* | pw32* | cegcc* | linux-musl*) \
- need_charset_alias=false ;; \
- *) \
- need_charset_alias=true ;; \
diff --git a/meta/recipes-extended/cpio/cpio-2.13/0002-src-global.c-Remove-superfluous-declaration-of-progr.patch b/meta/recipes-extended/cpio/cpio-2.13/0002-src-global.c-Remove-superfluous-declaration-of-progr.patch
deleted file mode 100644
index 478324c1c4..0000000000
--- a/meta/recipes-extended/cpio/cpio-2.13/0002-src-global.c-Remove-superfluous-declaration-of-progr.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 33e6cb5a28fab3d99bd6818f8c01e6f33805390f Mon Sep 17 00:00:00 2001
-From: Sergey Poznyakoff <gray@gnu.org>
-Date: Mon, 20 Jan 2020 07:45:39 +0200
-Subject: [PATCH] src/global.c: Remove superfluous declaration of program_name
-
-Upstream-Status: Backport (commit 641d3f4)
-Signed-off-by: Richard Leitner <richard.leitner@skidata.com>
----
- src/global.c | 3 ---
- 1 file changed, 3 deletions(-)
-
-diff --git a/src/global.c b/src/global.c
-index fb3abe9..acf92bc 100644
---- a/src/global.c
-+++ b/src/global.c
-@@ -184,9 +184,6 @@ unsigned int warn_option = 0;
- /* Extract to standard output? */
- bool to_stdout_option = false;
-
--/* The name this program was run with. */
--char *program_name;
--
- /* A pointer to either lstat or stat, depending on whether
- dereferencing of symlinks is done for input files. */
- int (*xstat) ();
---
-2.26.2
-
diff --git a/meta/recipes-extended/cpio/cpio-2.13/CVE-2021-38185.patch b/meta/recipes-extended/cpio/cpio-2.13/CVE-2021-38185.patch
deleted file mode 100644
index 6ceafeee49..0000000000
--- a/meta/recipes-extended/cpio/cpio-2.13/CVE-2021-38185.patch
+++ /dev/null
@@ -1,581 +0,0 @@
-GNU cpio through 2.13 allows attackers to execute arbitrary code via a crafted
-pattern file, because of a dstring.c ds_fgetstr integer overflow that triggers
-an out-of-bounds heap write.
-
-CVE: CVE-2021-38185
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From e494c68a3a0951b1eaba77e2db93f71a890e15d8 Mon Sep 17 00:00:00 2001
-From: Sergey Poznyakoff <gray@gnu.org>
-Date: Sat, 7 Aug 2021 12:52:21 +0300
-Subject: [PATCH 1/3] Rewrite dynamic string support.
-
-* src/dstring.c (ds_init): Take a single argument.
-(ds_free): New function.
-(ds_resize): Take a single argument. Use x2nrealloc to expand
-the storage.
-(ds_reset,ds_append,ds_concat,ds_endswith): New function.
-(ds_fgetstr): Rewrite. In particular, this fixes integer overflow.
-* src/dstring.h (dynamic_string): Keep both the allocated length
-(ds_size) and index of the next free byte in the string (ds_idx).
-(ds_init,ds_resize): Change signature.
-(ds_len): New macro.
-(ds_free,ds_reset,ds_append,ds_concat,ds_endswith): New protos.
-* src/copyin.c: Use new ds_ functions.
-* src/copyout.c: Likewise.
-* src/copypass.c: Likewise.
-* src/util.c: Likewise.
----
- src/copyin.c | 40 +++++++++++------------
- src/copyout.c | 16 ++++-----
- src/copypass.c | 34 +++++++++----------
- src/dstring.c | 88 ++++++++++++++++++++++++++++++++++++--------------
- src/dstring.h | 31 +++++++++---------
- src/util.c | 6 ++--
- 6 files changed, 123 insertions(+), 92 deletions(-)
-
-diff --git a/src/copyin.c b/src/copyin.c
-index b29f348..37e503a 100644
---- a/src/copyin.c
-+++ b/src/copyin.c
-@@ -55,11 +55,12 @@ query_rename(struct cpio_file_stat* file_hdr, FILE *tty_in, FILE *tty_out,
- char *str_res; /* Result for string function. */
- static dynamic_string new_name; /* New file name for rename option. */
- static int initialized_new_name = false;
-+
- if (!initialized_new_name)
-- {
-- ds_init (&new_name, 128);
-- initialized_new_name = true;
-- }
-+ {
-+ ds_init (&new_name);
-+ initialized_new_name = true;
-+ }
-
- if (rename_flag)
- {
-@@ -779,37 +780,36 @@ long_format (struct cpio_file_stat *file_hdr, char const *link_name)
- already in `save_patterns' (from the command line) are preserved. */
-
- static void
--read_pattern_file ()
-+read_pattern_file (void)
- {
-- int max_new_patterns;
-- char **new_save_patterns;
-- int new_num_patterns;
-+ char **new_save_patterns = NULL;
-+ size_t max_new_patterns;
-+ size_t new_num_patterns;
- int i;
-- dynamic_string pattern_name;
-+ dynamic_string pattern_name = DYNAMIC_STRING_INITIALIZER;
- FILE *pattern_fp;
-
- if (num_patterns < 0)
- num_patterns = 0;
-- max_new_patterns = 1 + num_patterns;
-- new_save_patterns = (char **) xmalloc (max_new_patterns * sizeof (char *));
- new_num_patterns = num_patterns;
-- ds_init (&pattern_name, 128);
-+ max_new_patterns = num_patterns;
-+ new_save_patterns = xcalloc (max_new_patterns, sizeof (new_save_patterns[0]));
-
- pattern_fp = fopen (pattern_file_name, "r");
- if (pattern_fp == NULL)
- open_fatal (pattern_file_name);
- while (ds_fgetstr (pattern_fp, &pattern_name, '\n') != NULL)
- {
-- if (new_num_patterns >= max_new_patterns)
-- {
-- max_new_patterns += 1;
-- new_save_patterns = (char **)
-- xrealloc ((char *) new_save_patterns,
-- max_new_patterns * sizeof (char *));
-- }
-+ if (new_num_patterns == max_new_patterns)
-+ new_save_patterns = x2nrealloc (new_save_patterns,
-+ &max_new_patterns,
-+ sizeof (new_save_patterns[0]));
- new_save_patterns[new_num_patterns] = xstrdup (pattern_name.ds_string);
- ++new_num_patterns;
- }
-+
-+ ds_free (&pattern_name);
-+
- if (ferror (pattern_fp) || fclose (pattern_fp) == EOF)
- close_error (pattern_file_name);
-
-@@ -1196,7 +1196,7 @@ swab_array (char *ptr, int count)
- in the file system. */
-
- void
--process_copy_in ()
-+process_copy_in (void)
- {
- char done = false; /* True if trailer reached. */
- FILE *tty_in = NULL; /* Interactive file for rename option. */
-diff --git a/src/copyout.c b/src/copyout.c
-index 8b0beb6..26e3dda 100644
---- a/src/copyout.c
-+++ b/src/copyout.c
-@@ -594,9 +594,10 @@ assign_string (char **pvar, char *value)
- The format of the header depends on the compatibility (-c) flag. */
-
- void
--process_copy_out ()
-+process_copy_out (void)
- {
-- dynamic_string input_name; /* Name of file read from stdin. */
-+ dynamic_string input_name = DYNAMIC_STRING_INITIALIZER;
-+ /* Name of file read from stdin. */
- struct stat file_stat; /* Stat record for file. */
- struct cpio_file_stat file_hdr = CPIO_FILE_STAT_INITIALIZER;
- /* Output header information. */
-@@ -605,7 +606,6 @@ process_copy_out ()
- char *orig_file_name = NULL;
-
- /* Initialize the copy out. */
-- ds_init (&input_name, 128);
- file_hdr.c_magic = 070707;
-
- /* Check whether the output file might be a tape. */
-@@ -657,14 +657,9 @@ process_copy_out ()
- {
- if (file_hdr.c_mode & CP_IFDIR)
- {
-- int len = strlen (input_name.ds_string);
- /* Make sure the name ends with a slash */
-- if (input_name.ds_string[len-1] != '/')
-- {
-- ds_resize (&input_name, len + 2);
-- input_name.ds_string[len] = '/';
-- input_name.ds_string[len+1] = 0;
-- }
-+ if (!ds_endswith (&input_name, '/'))
-+ ds_append (&input_name, '/');
- }
- }
-
-@@ -875,6 +870,7 @@ process_copy_out ()
- (unsigned long) blocks), (unsigned long) blocks);
- }
- cpio_file_stat_free (&file_hdr);
-+ ds_free (&input_name);
- }
-
-
-diff --git a/src/copypass.c b/src/copypass.c
-index dc13b5b..62f31c6 100644
---- a/src/copypass.c
-+++ b/src/copypass.c
-@@ -48,10 +48,12 @@ set_copypass_perms (int fd, const char *name, struct stat *st)
- If `link_flag', link instead of copying. */
-
- void
--process_copy_pass ()
-+process_copy_pass (void)
- {
-- dynamic_string input_name; /* Name of file from stdin. */
-- dynamic_string output_name; /* Name of new file. */
-+ dynamic_string input_name = DYNAMIC_STRING_INITIALIZER;
-+ /* Name of file from stdin. */
-+ dynamic_string output_name = DYNAMIC_STRING_INITIALIZER;
-+ /* Name of new file. */
- size_t dirname_len; /* Length of `directory_name'. */
- int res; /* Result of functions. */
- char *slash; /* For moving past slashes in input name. */
-@@ -65,25 +67,18 @@ process_copy_pass ()
- created files */
-
- /* Initialize the copy pass. */
-- ds_init (&input_name, 128);
-
- dirname_len = strlen (directory_name);
- if (change_directory_option && !ISSLASH (directory_name[0]))
- {
- char *pwd = xgetcwd ();
--
-- dirname_len += strlen (pwd) + 1;
-- ds_init (&output_name, dirname_len + 2);
-- strcpy (output_name.ds_string, pwd);
-- strcat (output_name.ds_string, "/");
-- strcat (output_name.ds_string, directory_name);
-+
-+ ds_concat (&output_name, pwd);
-+ ds_append (&output_name, '/');
- }
-- else
-- {
-- ds_init (&output_name, dirname_len + 2);
-- strcpy (output_name.ds_string, directory_name);
-- }
-- output_name.ds_string[dirname_len] = '/';
-+ ds_concat (&output_name, directory_name);
-+ ds_append (&output_name, '/');
-+ dirname_len = ds_len (&output_name);
- output_is_seekable = true;
-
- change_dir ();
-@@ -116,8 +111,8 @@ process_copy_pass ()
- /* Make the name of the new file. */
- for (slash = input_name.ds_string; *slash == '/'; ++slash)
- ;
-- ds_resize (&output_name, dirname_len + strlen (slash) + 2);
-- strcpy (output_name.ds_string + dirname_len + 1, slash);
-+ ds_reset (&output_name, dirname_len);
-+ ds_concat (&output_name, slash);
-
- existing_dir = false;
- if (lstat (output_name.ds_string, &out_file_stat) == 0)
-@@ -333,6 +328,9 @@ process_copy_pass ()
- (unsigned long) blocks),
- (unsigned long) blocks);
- }
-+
-+ ds_free (&input_name);
-+ ds_free (&output_name);
- }
-
- /* Try and create a hard link from FILE_NAME to another file
-diff --git a/src/dstring.c b/src/dstring.c
-index e9c063f..358f356 100644
---- a/src/dstring.c
-+++ b/src/dstring.c
-@@ -20,8 +20,8 @@
- #if defined(HAVE_CONFIG_H)
- # include <config.h>
- #endif
--
- #include <stdio.h>
-+#include <stdlib.h>
- #if defined(HAVE_STRING_H) || defined(STDC_HEADERS)
- #include <string.h>
- #else
-@@ -33,24 +33,41 @@
- /* Initialiaze dynamic string STRING with space for SIZE characters. */
-
- void
--ds_init (dynamic_string *string, int size)
-+ds_init (dynamic_string *string)
-+{
-+ memset (string, 0, sizeof *string);
-+}
-+
-+/* Free the dynamic string storage. */
-+
-+void
-+ds_free (dynamic_string *string)
- {
-- string->ds_length = size;
-- string->ds_string = (char *) xmalloc (size);
-+ free (string->ds_string);
- }
-
--/* Expand dynamic string STRING, if necessary, to hold SIZE characters. */
-+/* Expand dynamic string STRING, if necessary. */
-
- void
--ds_resize (dynamic_string *string, int size)
-+ds_resize (dynamic_string *string)
- {
-- if (size > string->ds_length)
-+ if (string->ds_idx == string->ds_size)
- {
-- string->ds_length = size;
-- string->ds_string = (char *) xrealloc ((char *) string->ds_string, size);
-+ string->ds_string = x2nrealloc (string->ds_string, &string->ds_size,
-+ 1);
- }
- }
-
-+/* Reset the index of the dynamic string S to LEN. */
-+
-+void
-+ds_reset (dynamic_string *s, size_t len)
-+{
-+ while (len > s->ds_size)
-+ ds_resize (s);
-+ s->ds_idx = len;
-+}
-+
- /* Dynamic string S gets a string terminated by the EOS character
- (which is removed) from file F. S will increase
- in size during the function if the string from F is longer than
-@@ -61,34 +78,50 @@ ds_resize (dynamic_string *string, int size)
- char *
- ds_fgetstr (FILE *f, dynamic_string *s, char eos)
- {
-- int insize; /* Amount needed for line. */
-- int strsize; /* Amount allocated for S. */
- int next_ch;
-
- /* Initialize. */
-- insize = 0;
-- strsize = s->ds_length;
-+ s->ds_idx = 0;
-
- /* Read the input string. */
-- next_ch = getc (f);
-- while (next_ch != eos && next_ch != EOF)
-+ while ((next_ch = getc (f)) != eos && next_ch != EOF)
- {
-- if (insize >= strsize - 1)
-- {
-- ds_resize (s, strsize * 2 + 2);
-- strsize = s->ds_length;
-- }
-- s->ds_string[insize++] = next_ch;
-- next_ch = getc (f);
-+ ds_resize (s);
-+ s->ds_string[s->ds_idx++] = next_ch;
- }
-- s->ds_string[insize++] = '\0';
-+ ds_resize (s);
-+ s->ds_string[s->ds_idx] = '\0';
-
-- if (insize == 1 && next_ch == EOF)
-+ if (s->ds_idx == 0 && next_ch == EOF)
- return NULL;
- else
- return s->ds_string;
- }
-
-+void
-+ds_append (dynamic_string *s, int c)
-+{
-+ ds_resize (s);
-+ s->ds_string[s->ds_idx] = c;
-+ if (c)
-+ {
-+ s->ds_idx++;
-+ ds_resize (s);
-+ s->ds_string[s->ds_idx] = 0;
-+ }
-+}
-+
-+void
-+ds_concat (dynamic_string *s, char const *str)
-+{
-+ size_t len = strlen (str);
-+ while (len + 1 > s->ds_size)
-+ ds_resize (s);
-+ memcpy (s->ds_string + s->ds_idx, str, len);
-+ s->ds_idx += len;
-+ s->ds_string[s->ds_idx] = 0;
-+}
-+
- char *
- ds_fgets (FILE *f, dynamic_string *s)
- {
-@@ -100,3 +133,10 @@ ds_fgetname (FILE *f, dynamic_string *s)
- {
- return ds_fgetstr (f, s, '\0');
- }
-+
-+/* Return true if the dynamic string S ends with character C. */
-+int
-+ds_endswith (dynamic_string *s, int c)
-+{
-+ return (s->ds_idx > 0 && s->ds_string[s->ds_idx - 1] == c);
-+}
-diff --git a/src/dstring.h b/src/dstring.h
-index b5135fe..f5b04ef 100644
---- a/src/dstring.h
-+++ b/src/dstring.h
-@@ -17,10 +17,6 @@
- Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
- Boston, MA 02110-1301 USA. */
-
--#ifndef NULL
--#define NULL 0
--#endif
--
- /* A dynamic string consists of record that records the size of an
- allocated string and the pointer to that string. The actual string
- is a normal zero byte terminated string that can be used with the
-@@ -30,22 +26,25 @@
-
- typedef struct
- {
-- int ds_length; /* Actual amount of storage allocated. */
-- char *ds_string; /* String. */
-+ size_t ds_size; /* Actual amount of storage allocated. */
-+ size_t ds_idx; /* Index of the next free byte in the string. */
-+ char *ds_string; /* String storage. */
- } dynamic_string;
-
-+#define DYNAMIC_STRING_INITIALIZER { 0, 0, NULL }
-
--/* Macros that look similar to the original string functions.
-- WARNING: These macros work only on pointers to dynamic string records.
-- If used with a real record, an "&" must be used to get the pointer. */
--#define ds_strlen(s) strlen ((s)->ds_string)
--#define ds_strcmp(s1, s2) strcmp ((s1)->ds_string, (s2)->ds_string)
--#define ds_strncmp(s1, s2, n) strncmp ((s1)->ds_string, (s2)->ds_string, n)
--#define ds_index(s, c) index ((s)->ds_string, c)
--#define ds_rindex(s, c) rindex ((s)->ds_string, c)
-+void ds_init (dynamic_string *string);
-+void ds_free (dynamic_string *string);
-+void ds_reset (dynamic_string *s, size_t len);
-
--void ds_init (dynamic_string *string, int size);
--void ds_resize (dynamic_string *string, int size);
-+/* All functions below guarantee that s->ds_string[s->ds_idx] == '\0' */
- char *ds_fgetname (FILE *f, dynamic_string *s);
- char *ds_fgets (FILE *f, dynamic_string *s);
- char *ds_fgetstr (FILE *f, dynamic_string *s, char eos);
-+void ds_append (dynamic_string *s, int c);
-+void ds_concat (dynamic_string *s, char const *str);
-+
-+#define ds_len(s) ((s)->ds_idx)
-+
-+int ds_endswith (dynamic_string *s, int c);
-+
-diff --git a/src/util.c b/src/util.c
-index 4421b20..6d6bbaa 100644
---- a/src/util.c
-+++ b/src/util.c
-@@ -846,11 +846,9 @@ get_next_reel (int tape_des)
- FILE *tty_out; /* File for interacting with user. */
- int old_tape_des;
- char *next_archive_name;
-- dynamic_string new_name;
-+ dynamic_string new_name = DYNAMIC_STRING_INITIALIZER;
- char *str_res;
-
-- ds_init (&new_name, 128);
--
- /* Open files for interactive communication. */
- tty_in = fopen (TTY_NAME, "r");
- if (tty_in == NULL)
-@@ -925,7 +923,7 @@ get_next_reel (int tape_des)
- error (PAXEXIT_FAILURE, 0, _("internal error: tape descriptor changed from %d to %d"),
- old_tape_des, tape_des);
-
-- free (new_name.ds_string);
-+ ds_free (&new_name);
- fclose (tty_in);
- fclose (tty_out);
- }
---
-2.25.1
-
-
-From fb7a51bf85b8e6f045cacb4fb783db4a414741bf Mon Sep 17 00:00:00 2001
-From: Sergey Poznyakoff <gray@gnu.org>
-Date: Wed, 11 Aug 2021 18:10:38 +0300
-Subject: [PATCH 2/3] Fix previous commit
-
-* src/dstring.c (ds_reset,ds_concat): Don't call ds_resize in a
-loop.
----
- src/dstring.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/dstring.c b/src/dstring.c
-index 358f356..90c691c 100644
---- a/src/dstring.c
-+++ b/src/dstring.c
-@@ -64,7 +64,7 @@ void
- ds_reset (dynamic_string *s, size_t len)
- {
- while (len > s->ds_size)
-- ds_resize (s);
-+ s->ds_string = x2nrealloc (s->ds_string, &s->ds_size, 1);
- s->ds_idx = len;
- }
-
-@@ -116,7 +116,7 @@ ds_concat (dynamic_string *s, char const *str)
- {
- size_t len = strlen (str);
- while (len + 1 > s->ds_size)
-- ds_resize (s);
-+ s->ds_string = x2nrealloc (s->ds_string, &s->ds_size, 1);
- memcpy (s->ds_string + s->ds_idx, str, len);
- s->ds_idx += len;
- s->ds_string[s->ds_idx] = 0;
---
-2.25.1
-
-
-From 86b37d74b15f9bb5fe62fd1642cc126d3ace0189 Mon Sep 17 00:00:00 2001
-From: Sergey Poznyakoff <gray@gnu.org>
-Date: Wed, 18 Aug 2021 09:41:39 +0300
-Subject: [PATCH 3/3] Fix dynamic string reallocations
-
-* src/dstring.c (ds_resize): Take additional argument: number of
-bytes to leave available after ds_idx. All uses changed.
----
- src/dstring.c | 18 ++++++++----------
- 1 file changed, 8 insertions(+), 10 deletions(-)
-
-diff --git a/src/dstring.c b/src/dstring.c
-index 90c691c..0f597cc 100644
---- a/src/dstring.c
-+++ b/src/dstring.c
-@@ -49,9 +49,9 @@ ds_free (dynamic_string *string)
- /* Expand dynamic string STRING, if necessary. */
-
- void
--ds_resize (dynamic_string *string)
-+ds_resize (dynamic_string *string, size_t len)
- {
-- if (string->ds_idx == string->ds_size)
-+ while (len + string->ds_idx >= string->ds_size)
- {
- string->ds_string = x2nrealloc (string->ds_string, &string->ds_size,
- 1);
-@@ -63,8 +63,7 @@ ds_resize (dynamic_string *string)
- void
- ds_reset (dynamic_string *s, size_t len)
- {
-- while (len > s->ds_size)
-- s->ds_string = x2nrealloc (s->ds_string, &s->ds_size, 1);
-+ ds_resize (s, len);
- s->ds_idx = len;
- }
-
-@@ -86,10 +85,10 @@ ds_fgetstr (FILE *f, dynamic_string *s, char eos)
- /* Read the input string. */
- while ((next_ch = getc (f)) != eos && next_ch != EOF)
- {
-- ds_resize (s);
-+ ds_resize (s, 0);
- s->ds_string[s->ds_idx++] = next_ch;
- }
-- ds_resize (s);
-+ ds_resize (s, 0);
- s->ds_string[s->ds_idx] = '\0';
-
- if (s->ds_idx == 0 && next_ch == EOF)
-@@ -101,12 +100,12 @@ ds_fgetstr (FILE *f, dynamic_string *s, char eos)
- void
- ds_append (dynamic_string *s, int c)
- {
-- ds_resize (s);
-+ ds_resize (s, 0);
- s->ds_string[s->ds_idx] = c;
- if (c)
- {
- s->ds_idx++;
-- ds_resize (s);
-+ ds_resize (s, 0);
- s->ds_string[s->ds_idx] = 0;
- }
- }
-@@ -115,8 +114,7 @@ void
- ds_concat (dynamic_string *s, char const *str)
- {
- size_t len = strlen (str);
-- while (len + 1 > s->ds_size)
-- s->ds_string = x2nrealloc (s->ds_string, &s->ds_size, 1);
-+ ds_resize (s, len);
- memcpy (s->ds_string + s->ds_idx, str, len);
- s->ds_idx += len;
- s->ds_string[s->ds_idx] = 0;
---
-2.25.1
-
diff --git a/meta/recipes-extended/cpio/cpio_2.13.bb b/meta/recipes-extended/cpio/cpio_2.13.bb
deleted file mode 100644
index e72a114de9..0000000000
--- a/meta/recipes-extended/cpio/cpio_2.13.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "GNU cpio is a program to manage archives of files"
-DESCRIPTION = "GNU cpio is a tool for creating and extracting archives, or copying files from one place to \
-another. It handles a number of cpio formats as well as reading and writing tar files."
-HOMEPAGE = "http://www.gnu.org/software/cpio/"
-SECTION = "base"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
-
-SRC_URI = "${GNU_MIRROR}/cpio/cpio-${PV}.tar.gz \
- file://0001-Unset-need_charset_alias-when-building-for-musl.patch \
- file://0002-src-global.c-Remove-superfluous-declaration-of-progr.patch \
- file://CVE-2021-38185.patch \
- "
-
-SRC_URI[md5sum] = "389c5452d667c23b5eceb206f5000810"
-SRC_URI[sha256sum] = "e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88"
-
-inherit autotools gettext texinfo
-
-# Issue applies to use of cpio in SUSE/OBS, doesn't apply to us
-CVE_CHECK_IGNORE += "CVE-2010-4226"
-
-EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}"
-
-do_install () {
- autotools_do_install
- if [ "${base_bindir}" != "${bindir}" ]; then
- install -d ${D}${base_bindir}/
- mv "${D}${bindir}/cpio" "${D}${base_bindir}/cpio"
- if [ "${sbindir}" != "${bindir}" ]; then
- rmdir ${D}${bindir}/
- fi
- fi
-
- # Avoid conflicts with the version from tar
- mv "${D}${mandir}/man8/rmt.8" "${D}${mandir}/man8/rmt-cpio.8"
-}
-
-PACKAGES =+ "${PN}-rmt"
-
-FILES:${PN}-rmt = "${sbindir}/rmt*"
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN} = "cpio"
-ALTERNATIVE:${PN}-rmt = "rmt"
-
-ALTERNATIVE_LINK_NAME[cpio] = "${base_bindir}/cpio"
-
-ALTERNATIVE_PRIORITY[rmt] = "50"
-ALTERNATIVE_LINK_NAME[rmt] = "${sbindir}/rmt"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/cpio/cpio_2.15.bb b/meta/recipes-extended/cpio/cpio_2.15.bb
new file mode 100644
index 0000000000..52070f59a2
--- /dev/null
+++ b/meta/recipes-extended/cpio/cpio_2.15.bb
@@ -0,0 +1,87 @@
+SUMMARY = "GNU cpio is a program to manage archives of files"
+DESCRIPTION = "GNU cpio is a tool for creating and extracting archives, or copying files from one place to \
+another. It handles a number of cpio formats as well as reading and writing tar files."
+HOMEPAGE = "http://www.gnu.org/software/cpio/"
+SECTION = "base"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
+
+SRC_URI = "${GNU_MIRROR}/cpio/cpio-${PV}.tar.gz \
+ file://run-ptest \
+ file://test.sh \
+ "
+
+SRC_URI[sha256sum] = "efa50ef983137eefc0a02fdb51509d624b5e3295c980aa127ceee4183455499e"
+
+inherit autotools gettext texinfo ptest
+
+CVE_STATUS[CVE-2010-4226] = "not-applicable-platform: Issue applies to use of cpio in SUSE/OBS"
+
+EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}"
+
+do_install () {
+ autotools_do_install
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ install -d ${D}${base_bindir}/
+ mv "${D}${bindir}/cpio" "${D}${base_bindir}/cpio"
+ if [ "${sbindir}" != "${bindir}" ]; then
+ rmdir ${D}${bindir}/
+ fi
+ fi
+
+ # Avoid conflicts with the version from tar
+ mv "${D}${mandir}/man8/rmt.8" "${D}${mandir}/man8/rmt-cpio.8"
+}
+
+do_compile_ptest() {
+ oe_runmake -C ${B}/gnu/ check
+ oe_runmake -C ${B}/lib/ check
+ oe_runmake -C ${B}/rmt/ check
+ oe_runmake -C ${B}/src/ check
+ oe_runmake -C ${B}/tests/ genfile
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests/
+ sed -i "/abs_/d" ${B}/tests/atconfig
+ install --mode=755 ${B}/tests/atconfig ${D}${PTEST_PATH}/tests/
+ sed -i "s%${B}/tests:%%g" ${B}/tests/atlocal
+ sed -i "s%${B}/src:%%g" ${B}/tests/atlocal
+ install --mode=755 ${B}/tests/atlocal ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${B}/tests/genfile ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${S}/tests/testsuite ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${WORKDIR}/test.sh ${D}${PTEST_PATH}/test.sh
+ sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/test.sh
+}
+
+# ptest.bbclass currently chowns the ptest directory explicitly, so we need to
+# change permission after that has happened so the ptest user can write a
+# temporary directory.
+do_install_ptest_base:append() {
+ chgrp -R ptest ${D}${PTEST_PATH}/
+ chmod -R g+w ${D}${PTEST_PATH}/
+}
+
+# The tests need to run as a non-root user, so pull in the ptest user
+DEPENDS:append:class-target = "${@bb.utils.contains('PTEST_ENABLED', '1', ' ptest-runner', '', d)}"
+PACKAGE_WRITE_DEPS += "ptest-runner"
+
+RDEPENDS:${PN}-ptest += "ptest-runner"
+
+PACKAGES =+ "${PN}-rmt"
+
+FILES:${PN}-rmt = "${sbindir}/rmt*"
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN} = "cpio"
+ALTERNATIVE:${PN}-rmt = "rmt"
+
+ALTERNATIVE_LINK_NAME[cpio] = "${base_bindir}/cpio"
+
+ALTERNATIVE_PRIORITY[rmt] = "50"
+ALTERNATIVE_LINK_NAME[rmt] = "${sbindir}/rmt"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/cpio/files/run-ptest b/meta/recipes-extended/cpio/files/run-ptest
new file mode 100755
index 0000000000..f35a756d6b
--- /dev/null
+++ b/meta/recipes-extended/cpio/files/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+su -c ./test.sh ptest
diff --git a/meta/recipes-extended/cpio/files/test.sh b/meta/recipes-extended/cpio/files/test.sh
new file mode 100644
index 0000000000..f027574e86
--- /dev/null
+++ b/meta/recipes-extended/cpio/files/test.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+# Define cpio test work dir
+WORKDIR=@PTEST_PATH@/tests/
+
+# Run test
+cd ${WORKDIR}
+./atconfig ./atlocal ./testsuite
+
+./testsuite 2>&1 | grep -E '[0-9]{1,3}: ' | sed -e 's/^.....//' -e '/[ok]$/s/^/PASS: /;/FAILED (.*)/s/^/FAIL: /;/skipped (.*)/s/^/SKIP: /;/expected failure/ s/^/PASS: /;/UNEXPECTED PASS/s/^/FAIL: /' -e 's/ok$//g' -e 's/FAILED.*//g' -e 's/skipped.*//g' -e 's/expected failure.*//g' -e 's/UNEXPECTED PASS.*//g'
diff --git a/meta/recipes-extended/cracklib/cracklib/0001-packlib.c-support-dictionary-byte-order-dependent.patch b/meta/recipes-extended/cracklib/cracklib/0001-packlib.c-support-dictionary-byte-order-dependent.patch
index 8fb512a224..20572b55c4 100644
--- a/meta/recipes-extended/cracklib/cracklib/0001-packlib.c-support-dictionary-byte-order-dependent.patch
+++ b/meta/recipes-extended/cracklib/cracklib/0001-packlib.c-support-dictionary-byte-order-dependent.patch
@@ -26,7 +26,7 @@ Signed-off-by: Lei Maohui <leimaohui@cn.fujitsu.com>
1 file changed, 210 insertions(+), 4 deletions(-)
diff --git a/lib/packlib.c b/lib/packlib.c
-index 8acb7be..a9d8750 100644
+index 9396e1d..d0bb181 100644
--- a/lib/packlib.c
+++ b/lib/packlib.c
@@ -16,6 +16,12 @@
@@ -41,8 +41,8 @@ index 8acb7be..a9d8750 100644
+#include <byteswap.h>
#include "packer.h"
- static const char vers_id[] = "packlib.c : v2.3p2 Alec Muffett 18 May 1993";
-@@ -45,6 +51,185 @@ typedef struct
+ #define DEBUG 0
+@@ -43,6 +49,185 @@ typedef struct
char data_get[NUMWORDS][MAXWORDLEN];
} PWDICT64;
@@ -228,7 +228,7 @@ index 8acb7be..a9d8750 100644
static int
_PWIsBroken64(FILE *ifp)
-@@ -57,6 +242,7 @@ _PWIsBroken64(FILE *ifp)
+@@ -55,6 +240,7 @@ _PWIsBroken64(FILE *ifp)
return 0;
}
@@ -236,7 +236,7 @@ index 8acb7be..a9d8750 100644
return (pdesc64.header.pih_magic == PIH_MAGIC);
}
-@@ -149,7 +335,11 @@ PWOpen(prefix, mode)
+@@ -147,7 +333,11 @@ PWOpen(prefix, mode)
pdesc.header.pih_blocklen = NUMWORDS;
pdesc.header.pih_numwords = 0;
@@ -249,7 +249,7 @@ index 8acb7be..a9d8750 100644
} else
{
pdesc.flags &= ~PFOR_WRITE;
-@@ -173,6 +363,7 @@ PWOpen(prefix, mode)
+@@ -171,6 +361,7 @@ PWOpen(prefix, mode)
return NULL;
}
@@ -257,7 +257,7 @@ index 8acb7be..a9d8750 100644
if ((pdesc.header.pih_magic == 0) || (pdesc.header.pih_numwords == 0))
{
/* uh-oh. either a broken "64-bit" file or a garbage file. */
-@@ -195,6 +386,7 @@ PWOpen(prefix, mode)
+@@ -193,6 +384,7 @@ PWOpen(prefix, mode)
}
return NULL;
}
@@ -265,7 +265,7 @@ index 8acb7be..a9d8750 100644
if (pdesc64.header.pih_magic != PIH_MAGIC)
{
/* nope, not "64-bit" after all */
-@@ -290,6 +482,7 @@ PWOpen(prefix, mode)
+@@ -288,6 +480,7 @@ PWOpen(prefix, mode)
{
pdesc.flags &= ~PFOR_USEHWMS;
}
@@ -273,7 +273,7 @@ index 8acb7be..a9d8750 100644
for (i = 0; i < sizeof(pdesc.hwms) / sizeof(pdesc.hwms[0]); i++)
{
pdesc.hwms[i] = pdesc64.hwms[i];
-@@ -299,6 +492,7 @@ PWOpen(prefix, mode)
+@@ -297,6 +490,7 @@ PWOpen(prefix, mode)
{
pdesc.flags &= ~PFOR_USEHWMS;
}
@@ -281,7 +281,7 @@ index 8acb7be..a9d8750 100644
#if DEBUG
for (i=1; i<=0xff; i++)
{
-@@ -332,7 +526,11 @@ PWClose(pwp)
+@@ -330,7 +524,11 @@ PWClose(pwp)
return (-1);
}
@@ -294,7 +294,7 @@ index 8acb7be..a9d8750 100644
{
fprintf(stderr, "index magic fwrite failed\n");
return (-1);
-@@ -351,7 +549,12 @@ PWClose(pwp)
+@@ -349,7 +547,12 @@ PWClose(pwp)
printf("hwm[%02x] = %d\n", i, pwp->hwms[i]);
#endif
}
@@ -308,7 +308,7 @@ index 8acb7be..a9d8750 100644
}
}
-@@ -405,7 +608,8 @@ PutPW(pwp, string)
+@@ -403,7 +606,8 @@ PutPW(pwp, string)
datum = (uint32_t) ftell(pwp->dfp);
@@ -318,7 +318,7 @@ index 8acb7be..a9d8750 100644
fputs(pwp->data_put[0], pwp->dfp);
putc(0, (FILE*) pwp->dfp);
-@@ -464,6 +668,7 @@ GetPW(pwp, number)
+@@ -462,6 +666,7 @@ GetPW(pwp, number)
perror("(index fread failed)");
return NULL;
}
@@ -326,7 +326,7 @@ index 8acb7be..a9d8750 100644
datum = datum64;
} else {
if (fseek(pwp->ifp, sizeof(struct pi_header) + (thisblock * sizeof(uint32_t)), 0))
-@@ -477,6 +682,7 @@ GetPW(pwp, number)
+@@ -475,6 +680,7 @@ GetPW(pwp, number)
perror("(index fread failed)");
return NULL;
}
diff --git a/meta/recipes-extended/cracklib/cracklib/0002-craklib-fix-testnum-and-teststr-failed.patch b/meta/recipes-extended/cracklib/cracklib/0002-craklib-fix-testnum-and-teststr-failed.patch
deleted file mode 100644
index 1ee97357d0..0000000000
--- a/meta/recipes-extended/cracklib/cracklib/0002-craklib-fix-testnum-and-teststr-failed.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-From 7250328d7f77069726603ef7132826c9260d3c92 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Sat, 27 Apr 2013 16:02:30 +0800
-Subject: [PATCH 2/2] craklib:fix testnum and teststr failed
-
-Error log:
-...
-$ ./testnum
-(null).pwd.gz: No such file or directory
-PWOpen: No such file or directory
-
-$ ./util/teststr
-(null).pwd.gz: No such file or directory
-PWOpen: No such file or directory
-...
-Set DEFAULT_CRACKLIB_DICT as the path of PWOpen
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-Upstream-Status: Submitted [https://github.com/cracklib/cracklib/pull/42]
----
- util/testnum.c | 2 +-
- util/teststr.c | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/util/testnum.c b/util/testnum.c
-index ae2246d..ca210ff 100644
---- a/util/testnum.c
-+++ b/util/testnum.c
-@@ -20,7 +20,7 @@ main ()
- PWDICT *pwp;
- char buffer[STRINGSIZE];
-
-- if (!(pwp = PWOpen (NULL, "r")))
-+ if (!(pwp = PWOpen (DEFAULT_CRACKLIB_DICT, "r")))
- {
- perror ("PWOpen");
- return (-1);
-diff --git a/util/teststr.c b/util/teststr.c
-index 2a31fa4..9fb9cda 100644
---- a/util/teststr.c
-+++ b/util/teststr.c
-@@ -15,7 +15,7 @@ main ()
- PWDICT *pwp;
- char buffer[STRINGSIZE];
-
-- if (!(pwp = PWOpen (NULL, "r")))
-+ if (!(pwp = PWOpen (DEFAULT_CRACKLIB_DICT, "r")))
- {
- perror ("PWOpen");
- return (-1);
---
-2.20.1
-
diff --git a/meta/recipes-extended/cracklib/cracklib_2.9.11.bb b/meta/recipes-extended/cracklib/cracklib_2.9.11.bb
new file mode 100644
index 0000000000..34ef2b65a1
--- /dev/null
+++ b/meta/recipes-extended/cracklib/cracklib_2.9.11.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Password strength checker library"
+HOMEPAGE = "https://github.com/cracklib/cracklib"
+DESCRIPTION = "${SUMMARY}"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=e3eda01d9815f8d24aae2dbd89b68b06"
+
+DEPENDS = "cracklib-native zlib"
+
+EXTRA_OECONF = "--without-python --libdir=${base_libdir}"
+
+SRC_URI = "git://github.com/cracklib/cracklib;protocol=https;branch=main \
+ file://0001-packlib.c-support-dictionary-byte-order-dependent.patch \
+ "
+
+SRCREV = "4cf5125250c6325ef0a2dc085eabff875227edc3"
+S = "${WORKDIR}/git/src"
+
+inherit autotools gettext
+
+# This is custom stuff from upstream's autogen.sh
+do_configure:prepend() {
+ mkdir -p ${S}/m4
+ echo EXTRA_DIST = *.m4 > ${S}/m4/Makefile.am
+ touch ${S}/ABOUT-NLS
+}
+
+do_install:append:class-target() {
+ create-cracklib-dict -o ${D}${datadir}/cracklib/pw_dict ${D}${datadir}/cracklib/cracklib-small
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta/recipes-extended/cracklib/cracklib_2.9.7.bb b/meta/recipes-extended/cracklib/cracklib_2.9.7.bb
deleted file mode 100644
index 629069e844..0000000000
--- a/meta/recipes-extended/cracklib/cracklib_2.9.7.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-SUMMARY = "Password strength checker library"
-HOMEPAGE = "https://github.com/cracklib/cracklib"
-DESCRIPTION = "${SUMMARY}"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=e3eda01d9815f8d24aae2dbd89b68b06"
-
-DEPENDS = "cracklib-native zlib"
-
-EXTRA_OECONF = "--without-python --libdir=${base_libdir}"
-
-SRC_URI = "git://github.com/cracklib/cracklib;protocol=https;branch=master \
- file://0001-packlib.c-support-dictionary-byte-order-dependent.patch \
- file://0002-craklib-fix-testnum-and-teststr-failed.patch"
-
-SRCREV = "f83934cf3cced0c9600c7d81332f4169f122a2cf"
-S = "${WORKDIR}/git/src"
-
-inherit autotools gettext
-
-# This is custom stuff from upstream's autogen.sh
-do_configure:prepend() {
- mkdir -p ${S}/m4
- echo EXTRA_DIST = *.m4 > ${S}/m4/Makefile.am
- touch ${S}/ABOUT-NLS
-}
-
-do_install:append:class-target() {
- create-cracklib-dict -o ${D}${datadir}/cracklib/pw_dict ${D}${datadir}/cracklib/cracklib-small
-}
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-extended/cronie/cronie/crond_pam_config.patch b/meta/recipes-extended/cronie/cronie/crond_pam_config.patch
index c374790d1d..464d1470e3 100644
--- a/meta/recipes-extended/cronie/cronie/crond_pam_config.patch
+++ b/meta/recipes-extended/cronie/cronie/crond_pam_config.patch
@@ -1,9 +1,19 @@
+From f5b325cba73018e5be984570fd4e680e59e7865d Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Wed, 20 Jul 2011 02:42:28 +0000
+Subject: [PATCH] cronie: enable PAM support for cronie
+
password-auth is the Fedora's common pam configure file, use oe common pam
configure files instead.
Upstream-Status: Pending
Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+
+---
+ pam/crond | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
diff --git a/pam/crond b/pam/crond
index 560529d..95a6457 100644
--- a/pam/crond
diff --git a/meta/recipes-extended/cronie/cronie_1.6.1.bb b/meta/recipes-extended/cronie/cronie_1.6.1.bb
deleted file mode 100644
index 6d150dd3c7..0000000000
--- a/meta/recipes-extended/cronie/cronie_1.6.1.bb
+++ /dev/null
@@ -1,83 +0,0 @@
-SUMMARY = "Cron daemon for executing programs at set times"
-DESCRIPTION = "Cronie contains the standard UNIX daemon crond that runs \
-specified programs at scheduled times and related tools. It is based on the \
-original cron and has security and configuration enhancements like the \
-ability to use pam and SELinux."
-HOMEPAGE = "https://github.com/cronie-crond/cronie/"
-BUGTRACKER = "https://bugzilla.redhat.com"
-
-# Internet Systems Consortium License
-LICENSE = "ISC & BSD-3-Clause & BSD-2-Clause & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=dd2a592170760e1386c769e1043b3722 \
- file://src/cron.c;endline=20;md5=b425c334265026177128353a142633b4 \
- file://src/popen.c;beginline=3;endline=31;md5=edd50742d8def712e9472dba353668a9"
-
-SECTION = "utils"
-
-UPSTREAM_CHECK_URI = "https://github.com/cronie-crond/${BPN}/releases/"
-
-SRC_URI = "https://github.com/cronie-crond/cronie/releases/download/cronie-${PV}/cronie-${PV}.tar.gz \
- file://crond.init \
- file://crontab \
- file://crond.service \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)}"
-
-PAM_SRC_URI = "file://crond_pam_config.patch"
-PAM_DEPS = "libpam libpam-runtime pam-plugin-access pam-plugin-loginuid"
-
-SRC_URI[sha256sum] = "2cd0f0dd1680e6b9c39bf1e3a5e7ad6df76aa940de1ee90a453633aa59984e62"
-
-inherit autotools update-rc.d useradd systemd
-
-PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
-
-PACKAGECONFIG[audit] = "--with-audit,--without-audit,audit,"
-PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam,${PAM_DEPS}"
-PACKAGECONFIG[anacron] = "--enable-anacron,--disable-anacron,anacron"
-
-INITSCRIPT_NAME = "crond"
-INITSCRIPT_PARAMS = "start 90 2 3 4 5 . stop 60 0 1 6 ."
-
-USERADD_PACKAGES = "${PN}"
-GROUPADD_PARAM:${PN} = "--system crontab"
-
-SYSTEMD_SERVICE:${PN} = "crond.service"
-
-do_install:append () {
- install -d ${D}${sysconfdir}/sysconfig/
- install -d ${D}${sysconfdir}/init.d/
- install -m 0644 ${S}/crond.sysconfig ${D}${sysconfdir}/sysconfig/crond
- install -m 0755 ${WORKDIR}/crond.init ${D}${sysconfdir}/init.d/crond
-
- # install systemd unit files
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/crond.service ${D}${systemd_system_unitdir}
- sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
- -e 's,@SBINDIR@,${sbindir},g' \
- ${D}${systemd_system_unitdir}/crond.service
-
- # below are necessary for a complete cron environment
- install -d ${D}${localstatedir}/spool/cron
- install -m 0755 ${WORKDIR}/crontab ${D}${sysconfdir}/
- mkdir -p ${D}${sysconfdir}/cron.d
- mkdir -p ${D}${sysconfdir}/cron.hourly
- mkdir -p ${D}${sysconfdir}/cron.daily
- mkdir -p ${D}${sysconfdir}/cron.weekly
- mkdir -p ${D}${sysconfdir}/cron.monthly
- touch ${D}${sysconfdir}/cron.deny
-
- # below setting is necessary to allow normal user using crontab
-
- # setgid for crontab binary
- chown root:crontab ${D}${bindir}/crontab
- chmod 2755 ${D}${bindir}/crontab
-
- # allow 'crontab' group write to /var/spool/cron
- chown root:crontab ${D}${localstatedir}/spool/cron
- chmod 770 ${D}${localstatedir}/spool/cron
-
- chmod 600 ${D}${sysconfdir}/crontab
-}
-
-FILES:${PN} += "${sysconfdir}/cron*"
-CONFFILES:${PN} += "${sysconfdir}/crontab"
diff --git a/meta/recipes-extended/cronie/cronie_1.7.1.bb b/meta/recipes-extended/cronie/cronie_1.7.1.bb
new file mode 100644
index 0000000000..854b68163c
--- /dev/null
+++ b/meta/recipes-extended/cronie/cronie_1.7.1.bb
@@ -0,0 +1,85 @@
+SUMMARY = "Cron daemon for executing programs at set times"
+DESCRIPTION = "Cronie contains the standard UNIX daemon crond that runs \
+specified programs at scheduled times and related tools. It is based on the \
+original cron and has security and configuration enhancements like the \
+ability to use pam and SELinux."
+HOMEPAGE = "https://github.com/cronie-crond/cronie/"
+BUGTRACKER = "https://bugzilla.redhat.com"
+
+# Internet Systems Consortium License
+LICENSE = "ISC & BSD-3-Clause & BSD-2-Clause & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=dd2a592170760e1386c769e1043b3722 \
+ file://src/cron.c;endline=20;md5=b425c334265026177128353a142633b4 \
+ file://src/popen.c;beginline=3;endline=31;md5=edd50742d8def712e9472dba353668a9"
+
+SECTION = "utils"
+
+GITHUB_BASE_URI = "https://github.com/cronie-crond/${BPN}/releases/"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/cronie-${PV}/cronie-${PV}.tar.gz \
+ file://crond.init \
+ file://crontab \
+ file://crond.service \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)}"
+
+PAM_SRC_URI = "file://crond_pam_config.patch"
+PAM_DEPS = "libpam libpam-runtime pam-plugin-access pam-plugin-loginuid"
+
+SRC_URI[sha256sum] = "78033100c24413f0c40f93e6138774d6a4f55bc31050567b90db45a2f9f1b954"
+
+inherit autotools update-rc.d useradd systemd github-releases
+UPSTREAM_CHECK_REGEX = "releases/tag/cronie-(?P<pver>\d+(\.\d+)+)"
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
+
+PACKAGECONFIG[audit] = "--with-audit,--without-audit,audit,"
+PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam,${PAM_DEPS}"
+PACKAGECONFIG[anacron] = "--enable-anacron,--disable-anacron,anacron"
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
+
+INITSCRIPT_NAME = "crond"
+INITSCRIPT_PARAMS = "start 90 2 3 4 5 . stop 60 0 1 6 ."
+
+USERADD_PACKAGES = "${PN}"
+GROUPADD_PARAM:${PN} = "--system crontab"
+
+SYSTEMD_SERVICE:${PN} = "crond.service"
+
+do_install:append () {
+ install -d ${D}${sysconfdir}/sysconfig/
+ install -d ${D}${sysconfdir}/init.d/
+ install -m 0644 ${S}/crond.sysconfig ${D}${sysconfdir}/sysconfig/crond
+ install -m 0755 ${WORKDIR}/crond.init ${D}${sysconfdir}/init.d/crond
+
+ # install systemd unit files
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/crond.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${systemd_system_unitdir}/crond.service
+
+ # below are necessary for a complete cron environment
+ install -d ${D}${localstatedir}/spool/cron
+ install -m 0755 ${WORKDIR}/crontab ${D}${sysconfdir}/
+ mkdir -p ${D}${sysconfdir}/cron.d
+ mkdir -p ${D}${sysconfdir}/cron.hourly
+ mkdir -p ${D}${sysconfdir}/cron.daily
+ mkdir -p ${D}${sysconfdir}/cron.weekly
+ mkdir -p ${D}${sysconfdir}/cron.monthly
+ touch ${D}${sysconfdir}/cron.deny
+
+ # below setting is necessary to allow normal user using crontab
+
+ # setgid for crontab binary
+ chown root:crontab ${D}${bindir}/crontab
+ chmod 2755 ${D}${bindir}/crontab
+
+ # allow 'crontab' group write to /var/spool/cron
+ chown root:crontab ${D}${localstatedir}/spool/cron
+ chmod 770 ${D}${localstatedir}/spool/cron
+
+ chmod 600 ${D}${sysconfdir}/crontab
+}
+
+FILES:${PN} += "${sysconfdir}/cron*"
+CONFFILES:${PN} += "${sysconfdir}/crontab"
diff --git a/meta/recipes-extended/cups/cups.inc b/meta/recipes-extended/cups/cups.inc
index 8f2ad8a009..b70ba3ae58 100644
--- a/meta/recipes-extended/cups/cups.inc
+++ b/meta/recipes-extended/cups/cups.inc
@@ -8,7 +8,7 @@ SECTION = "console/utils"
LICENSE = "Apache-2.0"
DEPENDS = "libpng jpeg dbus zlib libusb1"
-SRC_URI = "https://github.com/OpenPrinting/cups/releases/download/v${PV}/cups-${PV}-source.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/cups-${PV}-source.tar.gz \
file://0001-use-echo-only-in-init.patch \
file://0002-don-t-try-to-run-generated-binaries.patch \
file://libexecdir.patch \
@@ -17,21 +17,19 @@ SRC_URI = "https://github.com/OpenPrinting/cups/releases/download/v${PV}/cups-${
file://cups-volatiles.conf \
"
-UPSTREAM_CHECK_URI = "https://github.com/OpenPrinting/cups/releases"
-UPSTREAM_CHECK_REGEX = "cups-(?P<pver>(?!.+\d(b|rc)\d.+).+)-source.tar"
+GITHUB_BASE_URI = "https://github.com/OpenPrinting/cups/releases"
-# Issue only applies to MacOS
-CVE_CHECK_IGNORE += "CVE-2008-1033"
-# Issue affects pdfdistiller plugin used with but not part of cups
-CVE_CHECK_IGNORE += "CVE-2009-0032"
-# This is an Ubuntu only issue.
-CVE_CHECK_IGNORE += "CVE-2018-6553"
+CVE_STATUS[CVE-2008-1033] = "not-applicable-platform: Issue only applies to MacOS"
+CVE_STATUS[CVE-2009-0032] = "cpe-incorrect: Issue affects pdfdistiller plugin used with but not part of cups"
+CVE_STATUS[CVE-2018-6553] = "not-applicable-platform: This is an Ubuntu only issue"
+CVE_STATUS[CVE-2022-26691] = "fixed-version: This is fixed in 2.4.2 but the cve-check class still reports it"
+CVE_STATUS[CVE-2021-25317] = "not-applicable-config: This concerns /var/log/cups having lp ownership, our /var/log/cups is root:root, so this doesn't apply."
LEAD_SONAME = "libcupsdriver.so"
CLEANBROKEN = "1"
-inherit autotools-brokensep binconfig useradd systemd pkgconfig multilib_script
+inherit autotools-brokensep binconfig useradd systemd pkgconfig multilib_script github-releases
USERADD_PACKAGES = "${PN}"
GROUPADD_PARAM:${PN} = "--system lpadmin"
@@ -39,13 +37,17 @@ GROUPADD_PARAM:${PN} = "--system lpadmin"
SYSTEMD_SERVICE:${PN} = "cups.socket cups.path cups.service cups-lpd.socket"
PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'zeroconf', 'avahi', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'pam systemd', d)}"
+ ${@bb.utils.filter('DISTRO_FEATURES', 'pam systemd', d)} \
+ openssl \
+"
PACKAGECONFIG[avahi] = "--with-dnssd=avahi,--with-dnssd=no,avahi"
PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl"
-PACKAGECONFIG[gnutls] = "--with-tls=gnutls,--with-tls=no,gnutls"
+PACKAGECONFIG[gnutls] = "--with-tls=gnutls,,gnutls,,,openssl"
+PACKAGECONFIG[openssl] = "--with-tls=openssl,,openssl,,,gnutls"
PACKAGECONFIG[pam] = "--enable-pam --with-pam-module=unix, --disable-pam, libpam"
PACKAGECONFIG[systemd] = "--with-systemd=${systemd_system_unitdir},--without-systemd,systemd"
PACKAGECONFIG[xinetd] = "--with-xinetd=${sysconfdir}/xinetd.d,--without-xinetd,xinetd"
+PACKAGECONFIG[webif] = "--enable-webif,--disable-webif"
EXTRA_OECONF = " \
--enable-dbus \
@@ -55,7 +57,7 @@ EXTRA_OECONF = " \
--enable-debug \
--disable-relro \
--enable-libusb \
- --with-system-groups=lpadmin \
+ --with-system-groups=lpadmin,root,sys,wheel \
--with-cups-group=lp \
--with-domainsocket=/run/cups/cups.sock \
--with-pkgconfpath=${libdir}/pkgconfig \
@@ -65,7 +67,7 @@ EXTRA_OECONF = " \
EXTRA_AUTORECONF += "--exclude=autoheader"
do_install () {
- oe_runmake "DESTDIR=${D}" install
+ oe_runmake "BUILDROOT=${D}" install
# Remove /var/run from package as cupsd will populate it on startup
rm -fr ${D}/${localstatedir}/run
@@ -73,7 +75,7 @@ do_install () {
rmdir ${D}/${libexecdir}/${BPN}/driver
# Fix the pam configuration file permissions
- if ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'true', 'false', d)}; then
+ if ${@bb.utils.contains('PACKAGECONFIG', 'pam', 'true', 'false', d)}; then
chmod 0644 ${D}${sysconfdir}/pam.d/cups
fi
@@ -91,7 +93,7 @@ do_install () {
fi
}
-PACKAGES =+ "${PN}-lib ${PN}-libimage"
+PACKAGES =+ "${PN}-lib ${PN}-libimage ${PN}-webif"
RDEPENDS:${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'procps', '', d)}"
FILES:${PN} += "${libexecdir}/cups/"
@@ -100,22 +102,17 @@ FILES:${PN}-lib = "${libdir}/libcups.so.*"
FILES:${PN}-libimage = "${libdir}/libcupsimage.so.*"
-#package the html for the webgui inside the main packages (~1MB uncompressed)
+# put the html for the web interface into its own PACKAGE
+FILES:${PN}-webif += "${datadir}/doc/cups/ ${datadir}/icons/"
+RRECOMMENDS:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'webif', '${PN}-webif', '', d)}"
-FILES:${PN} += "${datadir}/doc/cups/images \
- ${datadir}/doc/cups/*html \
- ${datadir}/doc/cups/*.css \
- ${datadir}/icons/ \
- "
CONFFILES:${PN} += "${sysconfdir}/cups/cupsd.conf"
MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/cups-config"
+LOCALE_PATHS += "${datadir}/cups/templates"
+
SYSROOT_PREPROCESS_FUNCS += "cups_sysroot_preprocess"
cups_sysroot_preprocess () {
sed -i ${SYSROOT_DESTDIR}${bindir_crossscripts}/cups-config -e 's:cups_datadir=.*:cups_datadir=${datadir}/cups:' -e 's:cups_serverbin=.*:cups_serverbin=${libexecdir}/cups:'
}
-
-# -25317 concerns /var/log/cups having lp ownership. Our /var/log/cups is
-# root:root, so this doesn't apply.
-CVE_CHECK_IGNORE += "CVE-2021-25317"
diff --git a/meta/recipes-extended/cups/cups_2.4.1.bb b/meta/recipes-extended/cups/cups_2.4.1.bb
deleted file mode 100644
index 27c88f82c7..0000000000
--- a/meta/recipes-extended/cups/cups_2.4.1.bb
+++ /dev/null
@@ -1,5 +0,0 @@
-require cups.inc
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-SRC_URI[sha256sum] = "c7339f75f8d4f2dec50c673341a45fc06b6885bb6d4366d6bf59a4e6c10ae178"
diff --git a/meta/recipes-extended/cups/cups_2.4.7.bb b/meta/recipes-extended/cups/cups_2.4.7.bb
new file mode 100644
index 0000000000..f4b0282e4c
--- /dev/null
+++ b/meta/recipes-extended/cups/cups_2.4.7.bb
@@ -0,0 +1,5 @@
+require cups.inc
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+SRC_URI[sha256sum] = "dd54228dd903526428ce7e37961afaed230ad310788141da75cebaa08362cf6c"
diff --git a/meta/recipes-extended/diffutils/diffutils/0001-Skip-strip-trailing-cr-test-case.patch b/meta/recipes-extended/diffutils/diffutils/0001-Skip-strip-trailing-cr-test-case.patch
index aac1c43465..32793233f9 100644
--- a/meta/recipes-extended/diffutils/diffutils/0001-Skip-strip-trailing-cr-test-case.patch
+++ b/meta/recipes-extended/diffutils/diffutils/0001-Skip-strip-trailing-cr-test-case.patch
@@ -1,4 +1,4 @@
-From bd7fb8be2ae2d75347cf7733302d5093046ffa85 Mon Sep 17 00:00:00 2001
+From f31395c931bc633206eccfcfaaaa5d15021a3e86 Mon Sep 17 00:00:00 2001
From: Peiran Hong <peiran.hong@windriver.com>
Date: Thu, 5 Sep 2019 15:42:22 -0400
Subject: [PATCH] Skip strip-trailing-cr test case
@@ -10,26 +10,20 @@ package.
Upstream-Status: Inappropriate [embedded specific]
Signed-off-by: Peiran Hong <peiran.hong@windriver.com>
+
---
- tests/Makefile.am | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
+ tests/Makefile.am | 1 -
+ 1 file changed, 1 deletion(-)
diff --git a/tests/Makefile.am b/tests/Makefile.am
-index 83a7c9d..04d51b5 100644
+index 79bacfb..4adb4d7 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
-@@ -21,8 +21,10 @@ TESTS = \
+@@ -22,7 +22,6 @@ TESTS = \
stdin \
strcoll-0-names \
filename-quoting \
- strip-trailing-cr \
- colors
-+# Skipping this test since it requires valgrind
-+# and thus is too heavy for diffutils package
-+# strip-trailing-cr
-
- XFAIL_TESTS = large-subopt
-
---
-2.21.0
-
+ timezone \
+ colors \
+ y2038-vs-32bit
diff --git a/meta/recipes-extended/diffutils/diffutils/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch b/meta/recipes-extended/diffutils/diffutils/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch
deleted file mode 100644
index 4928e1eaff..0000000000
--- a/meta/recipes-extended/diffutils/diffutils/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From f385ad6639380eb6dfa8b8eb4a5ba65dd12db744 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 25 Mar 2022 13:43:19 -0700
-Subject: [PATCH] mcontext is not a standard layout so glibc and musl differ
-
-This is already applied to libsigsegv upstream, hopefully next version
-of grep will update its internal copy and we can drop this patch
-
-Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=libsigsegv.git;a=commitdiff;h=a6ff69873110c0a8ba6f7fd90532dbc11224828c]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- lib/sigsegv.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/lib/sigsegv.c b/lib/sigsegv.c
-index 998c827..b6f4841 100644
---- a/lib/sigsegv.c
-+++ b/lib/sigsegv.c
-@@ -219,8 +219,8 @@ int libsigsegv_version = LIBSIGSEGV_VERSION;
- # define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.gp_regs[1]
- # else /* 32-bit */
- /* both should be equivalent */
--# if 0
--# define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.regs->gpr[1]
-+# if ! defined __GLIBC__
-+# define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_regs->gregs[1]
- # else
- # define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.uc_regs->gregs[1]
- # endif
---
-2.35.1
-
diff --git a/meta/recipes-extended/diffutils/diffutils_3.10.bb b/meta/recipes-extended/diffutils/diffutils_3.10.bb
new file mode 100644
index 0000000000..08e8305612
--- /dev/null
+++ b/meta/recipes-extended/diffutils/diffutils_3.10.bb
@@ -0,0 +1,43 @@
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+require diffutils.inc
+
+SRC_URI = "${GNU_MIRROR}/diffutils/diffutils-${PV}.tar.xz \
+ file://run-ptest \
+ file://0001-Skip-strip-trailing-cr-test-case.patch \
+ "
+
+SRC_URI[sha256sum] = "90e5e93cc724e4ebe12ede80df1634063c7a855692685919bfe60b556c9bd09e"
+
+EXTRA_OECONF += "ac_cv_path_PR_PROGRAM=${bindir}/pr --without-libsigsegv-prefix"
+
+# latest gnulib is no longer able to handle this - I dare not try to fix that maze of abstractions and generators
+CFLAGS:mingw32 = " -DSA_RESTART=0"
+
+# Fix "Argument list too long" error when len(TMPDIR) = 410
+acpaths = "-I ./m4"
+
+EXTRA_OEMAKE:append:mingw32 = " LIBS='-lbcrypt'"
+inherit ptest
+
+RDEPENDS:${PN}-ptest += "make perl"
+
+do_install_ptest() {
+ t=${D}${PTEST_PATH}
+ install -D ${S}/build-aux/test-driver $t/build-aux/test-driver
+ cp -r ${S}/tests $t/
+ install ${B}/tests/Makefile $t/tests/
+ sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ -e 's|^Makefile:|_Makefile:|' \
+ -e 's|bash|sh|' \
+ -e 's|^top_srcdir = \(.*\)|top_srcdir = ..\/|' \
+ -e 's|^srcdir = \(.*\)|srcdir = .|' \
+ -e 's|"`$(built_programs)`"|diff|' \
+ -e 's|gawk|awk|g' \
+ -i $t/tests/Makefile
+}
diff --git a/meta/recipes-extended/diffutils/diffutils_3.8.bb b/meta/recipes-extended/diffutils/diffutils_3.8.bb
deleted file mode 100644
index 8889c83ee2..0000000000
--- a/meta/recipes-extended/diffutils/diffutils_3.8.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-require diffutils.inc
-
-SRC_URI = "${GNU_MIRROR}/diffutils/diffutils-${PV}.tar.xz \
- file://run-ptest \
- file://0001-Skip-strip-trailing-cr-test-case.patch \
- file://0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch \
- "
-
-SRC_URI[sha256sum] = "a6bdd7d1b31266d11c4f4de6c1b748d4607ab0231af5188fc2533d0ae2438fec"
-
-EXTRA_OECONF += "ac_cv_path_PR_PROGRAM=${bindir}/pr --without-libsigsegv-prefix"
-
-# latest gnulib is no longer able to handle this - I dare not try to fix that maze of abstractions and generators
-CFLAGS:mingw32 = " -DSA_RESTART=0"
-
-# Fix "Argument list too long" error when len(TMPDIR) = 410
-acpaths = "-I ./m4"
-
-EXTRA_OEMAKE:append:mingw32 = " LIBS='-lbcrypt'"
-inherit ptest
-
-RDEPENDS:${PN}-ptest += "make perl"
-
-do_install_ptest() {
- t=${D}${PTEST_PATH}
- install -D ${S}/build-aux/test-driver $t/build-aux/test-driver
- cp -r ${S}/tests $t/
- install ${B}/tests/Makefile $t/tests/
- sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- -e 's|^Makefile:|_Makefile:|' \
- -e 's|bash|sh|' \
- -e 's|^top_srcdir = \(.*\)|top_srcdir = ..\/|' \
- -e 's|^srcdir = \(.*\)|srcdir = .|' \
- -e 's|"`$(built_programs)`"|diff|' \
- -e 's|gawk|awk|g' \
- -i $t/tests/Makefile
-}
diff --git a/meta/recipes-extended/ed/ed_1.18.bb b/meta/recipes-extended/ed/ed_1.18.bb
deleted file mode 100644
index e1548df15f..0000000000
--- a/meta/recipes-extended/ed/ed_1.18.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "Line-oriented text editor"
-HOMEPAGE = "http://www.gnu.org/software/ed/"
-DESCRIPTION = "GNU ed is a line-oriented text editor. It is used to create, display, modify and otherwise manipulate text files, both interactively and via shell scripts. A restricted version of ed, red, can only edit files in the current directory and cannot execute shell commands."
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=76d6e300ffd8fb9d18bd9b136a9bba13 \
- file://ed.h;endline=20;md5=6e6a818b0593f937fc63ba08d5e314bf \
- file://main.c;endline=17;md5=8419a08bb12936f32384a0d1c0f9e74c \
- "
-
-SECTION = "base"
-
-CVE_PRODUCT = "gnu:ed"
-
-# LSB states that ed should be in /bin/
-bindir = "${base_bindir}"
-
-# Upstream regularly removes previous releases from https://ftp.gnu.org/gnu/ed/
-SRC_URI = "${GNU_MIRROR}/ed/${BP}.tar.lz"
-UPSTREAM_CHECK_URI = "${GNU_MIRROR}/ed/"
-
-SRC_URI[sha256sum] = "aca8efad9800c587724a20b97aa8fc47e6b5a47df81606feaba831b074462b4f"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS="
-
-inherit texinfo
-
-do_configure() {
- ${S}/configure
-}
-
-do_install() {
- oe_runmake 'DESTDIR=${D}' install
- # Info dir listing isn't interesting at this point so remove it if it exists.
- if [ -e "${D}${infodir}/dir" ]; then
- rm -f ${D}${infodir}/dir
- fi
-}
diff --git a/meta/recipes-extended/ed/ed_1.20.1.bb b/meta/recipes-extended/ed/ed_1.20.1.bb
new file mode 100644
index 0000000000..9ae53002c3
--- /dev/null
+++ b/meta/recipes-extended/ed/ed_1.20.1.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Line-oriented text editor"
+HOMEPAGE = "http://www.gnu.org/software/ed/"
+DESCRIPTION = "GNU ed is a line-oriented text editor. It is used to create, display, modify and otherwise manipulate text files, both interactively and via shell scripts. A restricted version of ed, red, can only edit files in the current directory and cannot execute shell commands."
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=76d6e300ffd8fb9d18bd9b136a9bba13 \
+ file://ed.h;endline=20;md5=c3212b6c53b09668107420af9368c0ef \
+ file://main.c;endline=17;md5=e5d2ae5ddd1ecb87dc71702c06dd06dc \
+ "
+
+SECTION = "base"
+
+CVE_PRODUCT = "gnu:ed"
+
+# LSB states that ed should be in /bin/
+bindir = "${base_bindir}"
+
+# Upstream regularly removes previous releases from https://ftp.gnu.org/gnu/ed/
+SRC_URI = "${GNU_MIRROR}/ed/${BP}.tar.lz"
+UPSTREAM_CHECK_URI = "${GNU_MIRROR}/ed/"
+
+SRC_URI[sha256sum] = "b1a463b297a141f9876c4b1fcd01477f645cded92168090e9a35db2af4babbca"
+
+EXTRA_OEMAKE = "-e MAKEFLAGS="
+
+inherit texinfo
+
+do_configure() {
+ ${S}/configure
+}
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' install
+ # Info dir listing isn't interesting at this point so remove it if it exists.
+ if [ -e "${D}${infodir}/dir" ]; then
+ rm -f ${D}${infodir}/dir
+ fi
+}
diff --git a/meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch b/meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch
index 52d2a93449..69df8632f2 100644
--- a/meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch
+++ b/meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch
@@ -1,4 +1,4 @@
-From ef2ad68becea77dfdbd5a344e30b3700cc440c0f Mon Sep 17 00:00:00 2001
+From 758fde7186730ee331a0ee6149276cd412766ee0 Mon Sep 17 00:00:00 2001
From: Tudor Florea <tudor.florea@enea.com>
Date: Wed, 28 May 2014 18:59:54 +0200
Subject: [PATCH] ethtool: use serial-tests config needed by ptest.
@@ -15,12 +15,12 @@ Upstream-Status: Inappropriate
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index 7a9b5b9..dc8ae35 100644
+index c871558..8ea1eaa 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -2,7 +2,7 @@ dnl Process this file with autoconf to produce a configure script.
- AC_INIT(ethtool, 5.17, netdev@vger.kernel.org)
+@@ -3,7 +3,7 @@ AC_INIT(ethtool, 6.7, netdev@vger.kernel.org)
AC_PREREQ(2.52)
+ AC_CONFIG_MACRO_DIR([m4])
AC_CONFIG_SRCDIR([ethtool.c])
-AM_INIT_AUTOMAKE([gnu subdir-objects])
+AM_INIT_AUTOMAKE([gnu subdir-objects serial-tests])
diff --git a/meta/recipes-extended/ethtool/ethtool_5.17.bb b/meta/recipes-extended/ethtool/ethtool_5.17.bb
deleted file mode 100644
index 61032d4836..0000000000
--- a/meta/recipes-extended/ethtool/ethtool_5.17.bb
+++ /dev/null
@@ -1,37 +0,0 @@
-SUMMARY = "Display or change ethernet card settings"
-DESCRIPTION = "A small utility for examining and tuning the settings of your ethernet-based network interfaces."
-HOMEPAGE = "http://www.kernel.org/pub/software/network/ethtool/"
-SECTION = "console/network"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://ethtool.c;beginline=4;endline=17;md5=c19b30548c582577fc6b443626fc1216"
-
-SRC_URI = "${KERNELORG_MIRROR}/software/network/ethtool/ethtool-${PV}.tar.gz \
- file://run-ptest \
- file://avoid_parallel_tests.patch \
- "
-
-SRC_URI[sha256sum] = "3e32735d13aa19e2be32d3528ef8a135fc99782950ab3fa602198e72992e9450"
-
-UPSTREAM_CHECK_URI = "https://www.kernel.org/pub/software/network/ethtool/"
-
-inherit autotools ptest bash-completion pkgconfig
-
-RDEPENDS:${PN}-ptest += "make"
-
-PACKAGECONFIG ?= "netlink"
-PACKAGECONFIG[netlink] = "--enable-netlink,--disable-netlink,libmnl,"
-
-do_compile_ptest() {
- oe_runmake buildtest-TESTS
-}
-
-do_install_ptest () {
- cp ${B}/Makefile ${D}${PTEST_PATH}
- install ${B}/test-cmdline ${D}${PTEST_PATH}
- if ${@bb.utils.contains('PACKAGECONFIG', 'netlink', 'false', 'true', d)}; then
- install ${B}/test-features ${D}${PTEST_PATH}
- fi
- install ${B}/ethtool ${D}${PTEST_PATH}/ethtool
- sed -i 's/^Makefile/_Makefile/' ${D}${PTEST_PATH}/Makefile
-}
diff --git a/meta/recipes-extended/ethtool/ethtool_6.7.bb b/meta/recipes-extended/ethtool/ethtool_6.7.bb
new file mode 100644
index 0000000000..33df0e0e80
--- /dev/null
+++ b/meta/recipes-extended/ethtool/ethtool_6.7.bb
@@ -0,0 +1,37 @@
+SUMMARY = "Display or change ethernet card settings"
+DESCRIPTION = "A small utility for examining and tuning the settings of your ethernet-based network interfaces."
+HOMEPAGE = "http://www.kernel.org/pub/software/network/ethtool/"
+SECTION = "console/network"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://ethtool.c;beginline=4;endline=17;md5=c19b30548c582577fc6b443626fc1216"
+
+SRC_URI = "${KERNELORG_MIRROR}/software/network/ethtool/ethtool-${PV}.tar.gz \
+ file://run-ptest \
+ file://avoid_parallel_tests.patch \
+ "
+
+SRC_URI[sha256sum] = "ace0e95a03b38208af4ebacb415244568ace44c5d5a279ea434844f900179c75"
+
+UPSTREAM_CHECK_URI = "https://www.kernel.org/pub/software/network/ethtool/"
+
+inherit autotools ptest bash-completion pkgconfig
+
+RDEPENDS:${PN}-ptest += "make bash"
+
+PACKAGECONFIG ?= "netlink"
+PACKAGECONFIG[netlink] = "--enable-netlink,--disable-netlink,libmnl,"
+
+do_compile_ptest() {
+ oe_runmake buildtest-TESTS
+}
+
+do_install_ptest () {
+ cp ${B}/Makefile ${D}${PTEST_PATH}
+ install ${B}/test-cmdline ${D}${PTEST_PATH}
+ if ${@bb.utils.contains('PACKAGECONFIG', 'netlink', 'false', 'true', d)}; then
+ install ${B}/test-features ${D}${PTEST_PATH}
+ fi
+ install ${B}/ethtool ${D}${PTEST_PATH}/ethtool
+ sed -i 's/^Makefile/_Makefile/' ${D}${PTEST_PATH}/Makefile
+}
diff --git a/meta/recipes-extended/findutils/findutils.inc b/meta/recipes-extended/findutils/findutils.inc
index ddcc05750b..03cf2a8d72 100644
--- a/meta/recipes-extended/findutils/findutils.inc
+++ b/meta/recipes-extended/findutils/findutils.inc
@@ -7,6 +7,7 @@ BUGTRACKER = "http://savannah.gnu.org/bugs/?group=findutils"
SECTION = "console/utils"
SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.xz \
+ file://autoconf-2.73.patch \
file://run-ptest \
"
diff --git a/meta/recipes-extended/findutils/findutils/autoconf-2.73.patch b/meta/recipes-extended/findutils/findutils/autoconf-2.73.patch
new file mode 100644
index 0000000000..63728b5f91
--- /dev/null
+++ b/meta/recipes-extended/findutils/findutils/autoconf-2.73.patch
@@ -0,0 +1,24 @@
+The gnulib largefile macro needs updating to work with autoconf 2.73. Rather
+than the full code:
+
+https://git.savannah.gnu.org/cgit/gnulib.git/commit/m4/largefile.m4?id=f91f633858cf132e50924224c50d6264a92caabb
+
+Just tweak the exiting code to work with 2.73. The next findutils upgrade should
+update to new gnulib
+
+Upstream-Status: Inappropriate
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: findutils-4.9.0/gl/m4/largefile.m4
+===================================================================
+--- findutils-4.9.0.orig/gl/m4/largefile.m4
++++ findutils-4.9.0/gl/m4/largefile.m4
+@@ -26,7 +26,7 @@ AC_DEFUN([gl_SET_LARGEFILE_SOURCE],
+ # with _TIME_BITS. Also, work around a problem in autoconf <= 2.69:
+ # AC_SYS_LARGEFILE does not configure for large inodes on Mac OS X 10.5,
+ # or configures them incorrectly in some cases.
+-m4_version_prereq([2.70], [], [
++m4_version_prereq([2.73], [], [
+
+ # _AC_SYS_LARGEFILE_TEST_INCLUDES
+ # -------------------------------
diff --git a/meta/recipes-extended/findutils/findutils_4.9.0.bb b/meta/recipes-extended/findutils/findutils_4.9.0.bb
index e4ce8ee4eb..074817df27 100644
--- a/meta/recipes-extended/findutils/findutils_4.9.0.bb
+++ b/meta/recipes-extended/findutils/findutils_4.9.0.bb
@@ -14,7 +14,8 @@ CACHED_CONFIGUREVARS += "gl_cv_func_wcwidth_works=yes"
EXTRA_OECONF += "ac_cv_path_SORT=${bindir}/sort"
-RDEPENDS:${PN}-ptest += "bash sed grep"
+# need od from coreutils for -t option
+RDEPENDS:${PN}-ptest += "bash sed grep coreutils"
do_install_ptest:class-target() {
mkdir -p ${D}${PTEST_PATH}/tests/
diff --git a/meta/recipes-extended/gawk/gawk/remove-sensitive-tests.patch b/meta/recipes-extended/gawk/gawk/remove-sensitive-tests.patch
deleted file mode 100644
index 167c0787ee..0000000000
--- a/meta/recipes-extended/gawk/gawk/remove-sensitive-tests.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-These tests require an unloaded host as otherwise timing sensitive tests can fail
-https://bugzilla.yoctoproject.org/show_bug.cgi?id=14371
-
-Upstream-Status: Inappropriate
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
---- a/test/Maketests~
-+++ b/test/Maketests
-@@ -2069,7 +2069,2 @@
-
--timeout:
-- @echo $@ $(ZOS_FAIL)
-- @AWKPATH="$(srcdir)" $(AWK) -f $@.awk >_$@ 2>&1 || echo EXIT CODE: $$? >>_$@
-- @-$(CMP) "$(srcdir)"/$@.ok _$@ && rm -f _$@
--
- typedregex1:
-@@ -2297,7 +2292,2 @@
- @-$(CMP) "$(srcdir)"/$@.ok _$@ && rm -f _$@
--
--time:
-- @echo $@
-- @AWKPATH="$(srcdir)" $(AWK) -f $@.awk >_$@ 2>&1 || echo EXIT CODE: $$? >>_$@
-- @-$(CMP) "$(srcdir)"/$@.ok _$@ && rm -f _$@
-
diff --git a/meta/recipes-extended/gawk/gawk/run-ptest b/meta/recipes-extended/gawk/gawk/run-ptest
index f67a95874f..f4ef3e7bd4 100644
--- a/meta/recipes-extended/gawk/gawk/run-ptest
+++ b/meta/recipes-extended/gawk/gawk/run-ptest
@@ -2,7 +2,12 @@
cd test
for i in `grep -E "^[a-z0-9_-]*:$" Maketests |awk -F: '{print $1}'`; do
- #LC_ALL=${GAWKLOCALE:-C} LANG=${GAWKLOCALE:-C}
+ unset LANG
+ grep -q "^$i$" skipped.txt
+ if [ $? -eq 0 ]; then
+ echo "SKIP: $i"
+ continue
+ fi
srcdir=`pwd` AWKPROG=gawk AWK=gawk CMP=cmp make -f Maketests $i >$i.tmp 2>&1
if [ -e _$i ]; then
cat _$i
diff --git a/meta/recipes-extended/gawk/gawk_5.1.1.bb b/meta/recipes-extended/gawk/gawk_5.1.1.bb
deleted file mode 100644
index fe339805d0..0000000000
--- a/meta/recipes-extended/gawk/gawk_5.1.1.bb
+++ /dev/null
@@ -1,64 +0,0 @@
-SUMMARY = "GNU awk text processing utility"
-DESCRIPTION = "The GNU version of awk, a text processing utility. \
-Awk interprets a special-purpose programming language to do \
-quick and easy text pattern matching and reformatting jobs."
-HOMEPAGE = "https://www.gnu.org/software/gawk/"
-BUGTRACKER = "bug-gawk@gnu.org"
-SECTION = "console/utils"
-
-# gawk <= 3.1.5: GPL-2.0-only
-# gawk >= 3.1.6: GPL-3.0-only
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-PACKAGECONFIG ??= "readline"
-PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
-PACKAGECONFIG[mpfr] = "--with-mpfr,--without-mpfr, mpfr"
-
-SRC_URI = "${GNU_MIRROR}/gawk/gawk-${PV}.tar.gz \
- file://remove-sensitive-tests.patch \
- file://run-ptest \
- "
-
-SRC_URI[sha256sum] = "6168d8d1dc8f74bd17d9dc22fa9634c49070f232343b744901da15fb4f06bffd"
-
-inherit autotools gettext texinfo update-alternatives
-
-FILES:${PN} += "${datadir}/awk"
-FILES:${PN}-dev += "${libdir}/${BPN}/*.la"
-
-ALTERNATIVE:${PN} = "awk"
-ALTERNATIVE_TARGET[awk] = "${bindir}/gawk"
-ALTERNATIVE_PRIORITY = "100"
-
-do_install:append() {
- # remove the link since we don't package it
- rm ${D}${bindir}/awk
-}
-
-inherit ptest
-
-do_install_ptest() {
- mkdir ${D}${PTEST_PATH}/test
- ln -s ${bindir}/gawk ${D}${PTEST_PATH}/gawk
- # The list of tests is all targets in Maketests, apart from the dummy Gt-dummy
- TESTS=$(awk -F: '$1 == "Gt-dummy" { next } /[[:alnum:]]+:$/ { print $1 }' ${S}/test/Maketests)
- for i in $TESTS Maketests inclib.awk; do
- cp ${S}/test/$i* ${D}${PTEST_PATH}/test
- done
- sed -i -e 's|/usr/local/bin|${bindir}|g' \
- -e 's|#!${base_bindir}/awk|#!${bindir}/awk|g' ${D}${PTEST_PATH}/test/*.awk
-
- sed -i -e "s|GAWKLOCALE|LANG|g" ${D}${PTEST_PATH}/test/Maketests
-
- # These tests require an unloaded host as otherwise timing sensitive tests can fail
- # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14371
- rm -f ${D}${PTEST_PATH}/test/time.*
- rm -f ${D}${PTEST_PATH}/test/timeout.*
-}
-
-RDEPENDS:${PN}-ptest += "make"
-
-RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us.iso-8859-1"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/gawk/gawk_5.3.0.bb b/meta/recipes-extended/gawk/gawk_5.3.0.bb
new file mode 100644
index 0000000000..d7a0fc616d
--- /dev/null
+++ b/meta/recipes-extended/gawk/gawk_5.3.0.bb
@@ -0,0 +1,87 @@
+SUMMARY = "GNU awk text processing utility"
+DESCRIPTION = "The GNU version of awk, a text processing utility. \
+Awk interprets a special-purpose programming language to do \
+quick and easy text pattern matching and reformatting jobs."
+HOMEPAGE = "https://www.gnu.org/software/gawk/"
+BUGTRACKER = "bug-gawk@gnu.org"
+SECTION = "console/utils"
+
+# gawk <= 3.1.5: GPL-2.0-only
+# gawk >= 3.1.6: GPL-3.0-only
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+PACKAGECONFIG ??= "readline"
+PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
+PACKAGECONFIG[mpfr] = "--with-mpfr,--without-mpfr, mpfr"
+
+SRC_URI = "${GNU_MIRROR}/gawk/gawk-${PV}.tar.gz \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "378f8864ec21cfceaa048f7e1869ac9b4597b449087caf1eb55e440d30273336"
+
+inherit autotools gettext texinfo update-alternatives
+
+FILES:${PN} += "${datadir}/awk"
+FILES:${PN}-dev += "${libdir}/${BPN}/*.la"
+
+PACKAGES =+ "${PN}-gawkbug"
+FILES:${PN}-gawkbug += "${bindir}/gawkbug"
+
+ALTERNATIVE:${PN} = "awk"
+ALTERNATIVE_TARGET[awk] = "${bindir}/gawk"
+ALTERNATIVE_PRIORITY = "100"
+
+do_install:append() {
+ # remove the link since we don't package it
+ rm ${D}${bindir}/awk
+ # Strip non-reproducible build flags (containing build paths)
+ sed -i -e 's|^CC.*|CC=""|g' -e 's|^CFLAGS.*|CFLAGS=""|g' ${D}${bindir}/gawkbug
+}
+
+inherit ptest
+
+do_install_ptest() {
+ mkdir ${D}${PTEST_PATH}/test
+ ln -s ${bindir}/gawk ${D}${PTEST_PATH}/gawk
+ # The list of tests is all targets in Maketests, apart from the dummy Gt-dummy
+ TESTS=$(awk -F: '$1 == "Gt-dummy" { next } /[[:alnum:]]+:$/ { print $1 }' ${S}/test/Maketests)
+ for i in $TESTS Maketests inclib.awk; do
+ cp ${S}/test/$i* ${D}${PTEST_PATH}/test
+ done
+ sed -i \
+ -e 's|#!${base_bindir}/awk|#!${bindir}/awk|g' ${D}${PTEST_PATH}/test/*.awk
+
+ sed -i -e "s|GAWKLOCALE|LANG|g" ${D}${PTEST_PATH}/test/Maketests
+
+ # These tests require an unloaded host as otherwise timing sensitive tests can fail
+ # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14371
+ rm -f ${D}${PTEST_PATH}/test/time.*
+ rm -f ${D}${PTEST_PATH}/test/timeout.*
+ for t in time timeout; do
+ echo $t >> ${D}${PTEST_PATH}/test/skipped.txt
+ done
+}
+
+do_install_ptest:append:libc-musl() {
+ # Reported https://lists.gnu.org/archive/html/bug-gawk/2021-02/msg00005.html
+ rm -f ${D}${PTEST_PATH}/test/clos1way6.*
+ # Needs en_US.UTF-8 but then does not work with musl
+ rm -f ${D}${PTEST_PATH}/test/backsmalls1.*
+ # Needs en_US.UTF-8 but then does not work with musl
+ rm -f ${D}${PTEST_PATH}/test/commas.*
+ # The below two need LANG=C inside the make rule for musl
+ rm -f ${D}${PTEST_PATH}/test/rebt8b1.*
+ rm -f ${D}${PTEST_PATH}/test/regx8bit.*
+ for t in clos1way6 backsmalls1 commas rebt8b1 regx8bit; do
+ echo $t >> ${D}${PTEST_PATH}/test/skipped.txt
+ done
+}
+
+RDEPENDS:${PN}-ptest += "make locale-base-en-us coreutils"
+
+RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us.iso-8859-1"
+RDEPENDS:${PN}-ptest:append:libc-musl = " musl-locales"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/ghostscript/files/do-not-check-local-libpng-source.patch b/meta/recipes-extended/ghostscript/files/do-not-check-local-libpng-source.patch
deleted file mode 100644
index a9afb9948c..0000000000
--- a/meta/recipes-extended/ghostscript/files/do-not-check-local-libpng-source.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 2adaa7366064a8f18af864eda74e52877a89620c Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Mon, 18 Jan 2016 01:00:30 -0500
-Subject: [PATCH] configure.ac: do not check local png source
-
-In oe-core, it did not need to compile local libpng
-source in ghostscript, so do not check local png
-source, and directly check the existance of shared
-libpng library.
-
-Upstream-Status: Inappropriate [OE-Core specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
----
- configure.ac | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index 698abd3..e65ac8b 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -1241,7 +1241,7 @@ else
- PNGDEVS=''
- PNGDEVS_ALL='png48 png16m pnggray pngmono pngmonod png256 png16 pngalpha'
- AC_MSG_CHECKING([for local png library source])
-- if test -f $srcdir/libpng/pngread.c; then
-+ if false; then
- AC_MSG_RESULT([yes])
- SHARE_LIBPNG=0
- LIBPNGDIR=$srcdir/libpng
diff --git a/meta/recipes-extended/ghostscript/ghostscript/avoid-host-contamination.patch b/meta/recipes-extended/ghostscript/ghostscript/avoid-host-contamination.patch
index e8cb16c36b..67f14bd368 100644
--- a/meta/recipes-extended/ghostscript/ghostscript/avoid-host-contamination.patch
+++ b/meta/recipes-extended/ghostscript/ghostscript/avoid-host-contamination.patch
@@ -1,12 +1,11 @@
-From 0ccbaa134093bf6afc79f2d20d061bca5a8754ed Mon Sep 17 00:00:00 2001
+From b36713c8f1ba0e5755b78845a433354a63663b1a Mon Sep 17 00:00:00 2001
From: Kai Kang <kai.kang@windriver.com>
Date: Thu, 29 Mar 2018 16:02:05 +0800
-Subject: [PATCH 04/10] avoid host contamination
+Subject: [PATCH] avoid host contamination
Remove hardcode path refer to host to avoid host contamination.
-Upstream-Status: Inappropriate [embedded specific]
-
+Upstream-Status: Pending
Signed-off-by: Kai Kang <kai.kang@windriver.com>
Rebase to 9.23
@@ -16,10 +15,10 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/devices/devs.mak b/devices/devs.mak
-index 846aa50..9570182 100644
+index 186f704..88ab8c9 100644
--- a/devices/devs.mak
+++ b/devices/devs.mak
-@@ -393,7 +393,7 @@ $(DEVOBJ)gdevxalt.$(OBJ) : $(DEVSRC)gdevxalt.c $(GDEVX) $(math__h) $(memory__h)\
+@@ -397,7 +397,7 @@ $(DEVOBJ)gdevxalt.$(OBJ) : $(DEVSRC)gdevxalt.c $(GDEVX) $(math__h) $(memory__h)\
### NON PORTABLE, ONLY UNIX WITH GCC SUPPORT
$(DEVOBJ)X11.so : $(x11alt_) $(x11_) $(DEVS_MAK) $(MAKEDIRS)
@@ -28,6 +27,3 @@ index 846aa50..9570182 100644
###### --------------- Memory-buffered printer devices --------------- ######
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript/base-genht.c-add-a-preprocessor-define-to-allow-fope.patch b/meta/recipes-extended/ghostscript/ghostscript/base-genht.c-add-a-preprocessor-define-to-allow-fope.patch
deleted file mode 100644
index 7d80066a80..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/base-genht.c-add-a-preprocessor-define-to-allow-fope.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From 9ca6f795409b988d38dd98bc2a6ecb68a9392312 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Thu, 29 Mar 2018 16:37:40 +0800
-Subject: [PATCH 10/10] base/genht.c: add a preprocessor define to allow fopen
- calling
-
-The commit in upstream:
-http://git.ghostscript.com/?p=ghostpdl.git;a=commitdiff;h=773c69e46e70bdd5482676437dafd2ca83397643
-
-Replace all fopen calls with gp_fopen and add a preprocessor define so
-that any unintential calls directly to fopen will cause an error.
-
-Only exceptions are those in the platform specific code, and mkromfs.c.
-This patch add a preprocessor define to allow fopen calling in
-base/genht.c.
-
-Upstream-Status: Pending
-
-Rebase to 9.23
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- base/genht.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/base/genht.c b/base/genht.c
-index e597e72..e96bfb5 100644
---- a/base/genht.c
-+++ b/base/genht.c
-@@ -16,6 +16,10 @@
-
- /* Generate C code for compiling halftones into ROM. */
- #include "malloc_.h"
-+
-+/* prevent gp.h from defining fopen */
-+#define fopen fopen
-+
- #include "stdio_.h"
- #include "string_.h"
- #include "gscdefs.h"
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch b/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch
deleted file mode 100644
index 4c9bb22fa2..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 9129eb7fa9dc160d64a7d9df9279a3b1dae4d793 Mon Sep 17 00:00:00 2001
-From: Jackie Huang <jackie.huang@windriver.com>
-Date: Thu, 29 Mar 2018 16:16:18 +0800
-Subject: [PATCH 08/10] cups no gcrypt
-
-Don't build-depend on libgcrypt, as nothing is used from it
-
-Backported from
-http://www.cups.org/strfiles.php/3308/cups-no-gcrypt.patch
-
-This addresses the cryto dependency seen during build.
-
-Upstream-Status: Backport
-
-Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
-
-Rebase to 9.23
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- cups/libs/cups/http-private.h | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/cups/libs/cups/http-private.h b/cups/libs/cups/http-private.h
-index 99a85c3..a674852 100644
---- a/cups/libs/cups/http-private.h
-+++ b/cups/libs/cups/http-private.h
-@@ -80,7 +80,6 @@ typedef int socklen_t;
- # elif defined HAVE_GNUTLS
- # include <gnutls/gnutls.h>
- # include <gnutls/x509.h>
--# include <gcrypt.h>
- # elif defined(HAVE_CDSASSL)
- # include <CoreFoundation/CoreFoundation.h>
- # include <Security/Security.h>
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.15-parallel-make.patch b/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.15-parallel-make.patch
deleted file mode 100644
index 5b57da2a97..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.15-parallel-make.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 04a86a613e0f9bfbbad99874f72217f75e8c53a3 Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 29 Mar 2018 15:59:05 +0800
-Subject: [PATCH] contrib.mak: fix for parallel build
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
-
-Rebase to 9.23
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
----
- contrib/contrib.mak | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/contrib/contrib.mak b/contrib/contrib.mak
-index 2edee7a..c9100e8 100644
---- a/contrib/contrib.mak
-+++ b/contrib/contrib.mak
-@@ -1241,6 +1241,7 @@ $(DEVOBJ)gdevalps.$(OBJ) : $(JAPSRC)gdevalps.c $(PDEVH) \
- ### ----------------- Additional .upp files ---------------- ###
-
- extra-upp-install: install-libdata
-+ mkdir -p $(DESTDIR)$(gsdatadir)$(D)lib
- for f in $(CONTRIBSRC)uniprint$(D)*.upp; do \
- $(INSTALL_DATA) $$f $(DESTDIR)$(gsdatadir)$(D)lib || exit 1; \
- done
diff --git a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-native-fix-disable-system-libtiff.patch b/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-native-fix-disable-system-libtiff.patch
deleted file mode 100644
index a382c7f891..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-native-fix-disable-system-libtiff.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 0124b1c29b9cfe46d73ae82ce023dd7c5b055744 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Thu, 29 Mar 2018 16:36:12 +0800
-Subject: [PATCH 09/10] ghostscript-native:fix disable-system-libtiff
-
-Modify configure to add the check to make sure
-ghostscrip could work while system-libtiff is
-disabled.
-
-Upstream-Status: Pending
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
-Rebase to ghostscript 9.25.
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-Signed-off-by: Jagadeesh Krishnanjanappa <jkrishnanjanappa@mvista.com>
----
- configure.ac | 5 +++++
- 1 file changed, 5 insertions(+)
-
-diff --git a/configure.ac b/configure.ac
-index 80a60b1..f3e9efb 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -1319,6 +1319,7 @@ AC_TRY_COMPILE([], [return 0;],
- CFLAGS=$CGLAGS_STORE
-
- if test x"$SHARE_LIBTIFF" = x"0" ; then
-+ if test -e $LIBTIFFDIR/configure; then
- echo "Running libtiff configure script..."
- olddir=`pwd`
- if ! test -d "$LIBTIFFCONFDIR" ; then
-@@ -1337,6 +1338,10 @@ if test x"$SHARE_LIBTIFF" = x"0" ; then
-
- echo
- echo "Continuing with Ghostscript configuration..."
-+ else
-+ AC_MSG_NOTICE([Could not find local copy of libtiff.
-+Disabling tiff output devices.])
-+ fi
- fi
-
- AC_SUBST(SHARE_LIBTIFF)
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-prevent_recompiling.patch b/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-prevent_recompiling.patch
deleted file mode 100644
index c76915fb81..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.21-prevent_recompiling.patch
+++ /dev/null
@@ -1,78 +0,0 @@
-From 239d681306a8d97ed10954788d32ba2f4b55f77c Mon Sep 17 00:00:00 2001
-From: Kang Kai <kai.kang@windriver.com>
-Date: Thu, 29 Mar 2018 16:10:16 +0800
-Subject: [PATCH 06/10] prevent recompiling
-
-Just use commands provided by ghostscript-native, preventing recompile
-them when compile ghostscript. Way to enable cross compile.
-
-Upstream-Status: Pending
-
-Signed-off-by: Kang Kai <kai.kang@windriver.com>
-Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
-
-Rebase to 9.25
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-Signed-off-by: Jagadeesh Krishnanjanappa <jkrishnanjanappa@mvista.com>
----
- base/unix-aux.mak | 44 --------------------------------------------
- 1 file changed, 44 deletions(-)
-
-diff --git a/base/unix-aux.mak b/base/unix-aux.mak
-index 5bf72e9..9cb39d7 100644
---- a/base/unix-aux.mak
-+++ b/base/unix-aux.mak
-@@ -54,50 +54,6 @@ $(AUX)gp_stdia.$(OBJ): $(GLSRC)gp_stdia.
- $(stdio__h) $(time__h) $(unistd__h) $(gx_h) $(gp_h) $(UNIX_AUX_MAK) $(MAKEDIRS)
- $(GLCCAUX) $(AUXO_)gp_stdia.$(OBJ) $(C_) $(GLSRC)gp_stdia.c
-
--# -------------------------- Auxiliary programs --------------------------- #
--
--$(ECHOGS_XE): $(GLSRC)echogs.c $(AK) $(stdpre_h) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(I_)$(GLSRCDIR)$(_I) $(O_)$(ECHOGS_XE) $(GLSRC)echogs.c $(AUXEXTRALIBS)
--
--$(PACKPS_XE): $(GLSRC)pack_ps.c $(stdpre_h) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(I_)$(GLSRCDIR)$(_I) $(O_)$(PACKPS_XE) $(GLSRC)pack_ps.c $(AUXEXTRALIBS)
--
--# On the RS/6000 (at least), compiling genarch.c with gcc with -O
--# produces a buggy executable.
--$(GENARCH_XE): $(GLSRC)genarch.c $(AK) $(GENARCH_DEPS) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(I_)$(GLSRCDIR)$(_I) $(O_)$(GENARCH_XE) $(GLSRC)genarch.c $(AUXEXTRALIBS)
--
--$(GENCONF_XE): $(GLSRC)genconf.c $(AK) $(GENCONF_DEPS) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(I_)$(GLSRCDIR)$(_I) $(O_)$(GENCONF_XE) $(GLSRC)genconf.c $(AUXEXTRALIBS)
--
--$(GENDEV_XE): $(GLSRC)gendev.c $(AK) $(GENDEV_DEPS) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(I_)$(GLSRCDIR)$(_I) $(O_)$(GENDEV_XE) $(GLSRC)gendev.c $(AUXEXTRALIBS)
--
--$(GENHT_XE): $(GLSRC)genht.c $(AK) $(GENHT_DEPS) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(GENHT_CFLAGS) $(O_)$(GENHT_XE) $(GLSRC)genht.c $(AUXEXTRALIBS)
--
--# To get GS to use the system zlib, you remove/hide the gs/zlib directory
--# which means that the mkromfs build can't find the zlib source it needs.
--# So it's split into two targets, one using the zlib source directly.....
--MKROMFS_OBJS_0=$(MKROMFS_ZLIB_OBJS) $(AUX)gpmisc.$(OBJ) $(AUX)gp_getnv.$(OBJ) \
-- $(AUX)gscdefs.$(OBJ) $(AUX)gp_unix.$(OBJ) $(AUX)gp_unifs.$(OBJ) $(AUX)gp_unifn.$(OBJ) \
-- $(AUX)gp_stdia.$(OBJ) $(AUX)gsutil.$(OBJ) $(AUX)memento.$(OBJ)
--
--$(MKROMFS_XE)_0: $(GLSRC)mkromfs.c $(MKROMFS_COMMON_DEPS) $(MKROMFS_OBJS_0) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(GENOPTAUX) $(I_)$(GLSRCDIR)$(_I) $(I_)$(GLOBJ)$(_I) $(I_)$(ZSRCDIR)$(_I) $(GLSRC)mkromfs.c $(O_)$(MKROMFS_XE)_0 $(MKROMFS_OBJS_0) $(AUXEXTRALIBS)
--
--# .... and one using the zlib library linked via the command line
--MKROMFS_OBJS_1=$(AUX)gscdefs.$(OBJ) \
-- $(AUX)gpmisc.$(OBJ) $(AUX)gp_getnv.$(OBJ) \
-- $(AUX)gp_unix.$(OBJ) $(AUX)gp_unifs.$(OBJ) $(AUX)gp_unifn.$(OBJ) \
-- $(AUX)gp_stdia.$(OBJ) $(AUX)gsutil.$(OBJ)
--
--$(MKROMFS_XE)_1: $(GLSRC)mkromfs.c $(MKROMFS_COMMON_DEPS) $(MKROMFS_OBJS_1) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CCAUX_) $(GENOPTAUX) $(I_)$(GLSRCDIR)$(_I) $(I_)$(GLOBJ)$(_I) $(I_)$(ZSRCDIR)$(_I) $(GLSRC)mkromfs.c $(O_)$(MKROMFS_XE)_1 $(MKROMFS_OBJS_1) $(AUXEXTRALIBS)
--
--$(MKROMFS_XE): $(MKROMFS_XE)_$(SHARE_ZLIB) $(UNIX_AUX_MAK) $(MAKEDIRS)
-- $(CP_) $(MKROMFS_XE)_$(SHARE_ZLIB) $(MKROMFS_XE)
--
- # Query the environment to construct gconfig_.h.
- # These are all defined conditionally (except the JasPER one), so that
- # they can be overridden by settings from the configure script.
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript/mkdir-p.patch b/meta/recipes-extended/ghostscript/ghostscript/mkdir-p.patch
deleted file mode 100644
index 3e6d3e3c48..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript/mkdir-p.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-From 2b23026f8e2a352417fb1c4da94bf69b19bef267 Mon Sep 17 00:00:00 2001
-From: Joe Slater <joe.slater@windriver.com>
-Date: Thu, 29 Mar 2018 16:04:32 +0800
-Subject: [PATCH 05/10] ghostscript: allow directories to be created more than
- once
-
-When doing parallel builds, we might try to create directories
-more than once. This should not cause an error.
-
-Upstream-Status: Pending
-
-Signed-off-by: Joe Slater <joe.slater@windriver.com>
-
-Rebase to 9.23
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- base/unix-end.mak | 17 ++++++++---------
- 1 file changed, 8 insertions(+), 9 deletions(-)
-
-diff --git a/base/unix-end.mak b/base/unix-end.mak
-index 9ce599a..feff5a6 100644
---- a/base/unix-end.mak
-+++ b/base/unix-end.mak
-@@ -17,15 +17,14 @@
- UNIX_END_MAK=$(GLSRC)unix-end.mak $(TOP_MAKEFILES)
- # Define the rule for building standard configurations.
- directories: $(UNIX_END_MAK)
-- @if test "$(BINDIR)" != "" -a ! -d $(BINDIR); then mkdir $(BINDIR); fi
-- @if test "$(GLGENDIR)" != "" -a ! -d $(GLGENDIR); then mkdir $(GLGENDIR); fi
-- @if test "$(GLOBJDIR)" != "" -a ! -d $(GLOBJDIR); then mkdir $(GLOBJDIR); fi
-- @if test "$(DEVGENDIR)" != "" -a ! -d $(DEVGENDIR); then mkdir $(DEVGENDIR); fi
-- @if test "$(DEVOBJDIR)" != "" -a ! -d $(DEVOBJDIR); then mkdir $(DEVOBJDIR); fi
-- @if test "$(AUXDIR)" != "" -a ! -d $(AUXDIR); then mkdir $(AUXDIR); fi
-- @if test "$(PSGENDIR)" != "" -a ! -d $(PSGENDIR); then mkdir $(PSGENDIR); fi
-- @if test "$(PSGENDIR)" != "" -a ! -d $(PSGENDIR)/cups; then mkdir $(PSGENDIR)/cups; fi
-- @if test "$(PSOBJDIR)" != "" -a ! -d $(PSOBJDIR); then mkdir $(PSOBJDIR); fi
-+ @if test "$(BINDIR)" != "" -a ! -d $(BINDIR); then mkdir -p $(BINDIR); fi
-+ @if test "$(GLGENDIR)" != "" -a ! -d $(GLGENDIR); then mkdir -p $(GLGENDIR); fi
-+ @if test "$(GLOBJDIR)" != "" -a ! -d $(GLOBJDIR); then mkdir -p $(GLOBJDIR); fi
-+ @if test "$(DEVGENDIR)" != "" -a ! -d $(DEVGENDIR); then mkdir -p $(DEVGENDIR); fi
-+ @if test "$(DEVOBJDIR)" != "" -a ! -d $(DEVOBJDIR); then mkdir -p $(DEVOBJDIR); fi
-+ @if test "$(AUXDIR)" != "" -a ! -d $(AUXDIR); then mkdir -p $(AUXDIR); fi
-+ @if test "$(PSGENDIR)" != "" -a ! -d $(PSGENDIR)/cups; then mkdir -p $(PSGENDIR)/cups; fi
-+ @if test "$(PSOBJDIR)" != "" -a ! -d $(PSOBJDIR); then mkdir -p $(PSOBJDIR); fi
-
-
- gs: .gssubtarget $(UNIX_END_MAK)
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb b/meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb
new file mode 100644
index 0000000000..ff7d38676e
--- /dev/null
+++ b/meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb
@@ -0,0 +1,75 @@
+SUMMARY = "The GPL Ghostscript PostScript/PDF interpreter"
+DESCRIPTION = "Ghostscript is used for PostScript/PDF preview and printing. Usually as \
+a back-end to a program such as ghostview, it can display PostScript and PDF \
+documents in an X11 environment. \
+\
+Furthermore, it can render PostScript and PDF files as graphics to be printed \
+on non-PostScript printers. Supported printers include common \
+dot-matrix, inkjet and laser models. \
+"
+HOMEPAGE = "http://www.ghostscript.com"
+SECTION = "console/utils"
+
+LICENSE = "AGPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=f98ffa763e50cded76f49bce73aade16"
+
+DEPENDS = "tiff jpeg fontconfig cups libpng freetype zlib"
+
+UPSTREAM_CHECK_URI = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases"
+UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)\.tar"
+
+def gs_verdir(v):
+ return "".join(v.split("."))
+
+
+SRC_URI = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs${@gs_verdir("${PV}")}/${BPN}-${PV}.tar.gz \
+ file://ghostscript-9.16-Werror-return-type.patch \
+ file://avoid-host-contamination.patch \
+ "
+
+SRC_URI[sha256sum] = "6f2bc61023469fcf7c7c2d7f1bdd75b75f2b41836aa1d5e641396246d4abbb59"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[gtk] = "--enable-gtk,--disable-gtk,gtk+3"
+PACKAGECONFIG[libidn] = "--with-libidn,--without-libidn,libidn"
+PACKAGECONFIG[libpaper] = "--with-libpaper,--without-libpaper,libpaper"
+PACKAGECONFIG[x11] = "--with-x --x-includes=${STAGING_INCDIR} --x-libraries=${STAGING_LIBDIR}, \
+ --without-x, virtual/libx11 libxext libxt"
+
+EXTRA_OECONF = "--with-jbig2dec \
+ --with-fontpath=${datadir}/fonts \
+ CUPSCONFIG="${STAGING_BINDIR_CROSS}/cups-config" \
+ PKGCONFIG=pkg-config \
+ "
+
+EXTRA_OECONF:append:mipsarcho32 = " --with-large_color_index=0"
+
+EXTRA_OECONF:append:armv7a = "${@bb.utils.contains('TUNE_FEATURES','neon','',' --disable-neon',d)}"
+EXTRA_OECONF:append:armv7ve = "${@bb.utils.contains('TUNE_FEATURES','neon','',' --disable-neon',d)}"
+
+# Uses autoconf but not automake, can't do out-of-tree
+inherit autotools-brokensep pkgconfig
+
+# Prune the source tree of libraries that we're using our packaging of, so that
+# ghostscript can't link to them. Can't prune zlib as that's needed for the
+# native tools.
+prune_sources() {
+ rm -rf ${S}/jpeg/ ${S}/libpng/ ${S}/tiff/ ${S}/expat/ ${S}/freetype/ ${S}/cups/lib
+}
+do_unpack[postfuncs] += "prune_sources"
+
+do_install:append () {
+ oe_runmake DESTDIR=${D} install-so
+ oe_runmake DESTDIR=${D} install-data
+ cp -r ${S}/Resource ${D}${datadir}/ghostscript/${PV}/
+ cp -r ${S}/iccprofiles ${D}${datadir}/ghostscript/${PV}/
+}
+
+# ghostscript does not supports "arc"
+COMPATIBLE_HOST = "^(?!arc).*"
+
+# some entries in NVD uses gpl_ghostscript
+CVE_PRODUCT = "ghostscript gpl_ghostscript"
+
+CVE_STATUS[CVE-2023-38560] = "not-applicable-config: PCL isn't part of the Ghostscript release"
+CVE_STATUS[CVE-2023-38559] = "cpe-incorrect: Issue only appears in versions before 10.02.0"
diff --git a/meta/recipes-extended/ghostscript/ghostscript_9.56.1.bb b/meta/recipes-extended/ghostscript/ghostscript_9.56.1.bb
deleted file mode 100644
index b2e741b44f..0000000000
--- a/meta/recipes-extended/ghostscript/ghostscript_9.56.1.bb
+++ /dev/null
@@ -1,129 +0,0 @@
-SUMMARY = "The GPL Ghostscript PostScript/PDF interpreter"
-DESCRIPTION = "Ghostscript is used for PostScript/PDF preview and printing. Usually as \
-a back-end to a program such as ghostview, it can display PostScript and PDF \
-documents in an X11 environment. \
-\
-Furthermore, it can render PostScript and PDF files as graphics to be printed \
-on non-PostScript printers. Supported printers include common \
-dot-matrix, inkjet and laser models. \
-"
-HOMEPAGE = "http://www.ghostscript.com"
-SECTION = "console/utils"
-
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=f98ffa763e50cded76f49bce73aade16"
-
-DEPENDS = "ghostscript-native tiff jpeg fontconfig cups libpng"
-DEPENDS:class-native = "libpng-native"
-
-UPSTREAM_CHECK_URI = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases"
-UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)\.tar"
-
-# As of ghostscript 9.54.0 the jpeg issue in the CVE is present in the gs jpeg sources
-# however we use an external jpeg which doesn't have the issue.
-CVE_CHECK_IGNORE += "CVE-2013-6629"
-
-def gs_verdir(v):
- return "".join(v.split("."))
-
-
-SRC_URI_BASE = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs${@gs_verdir("${PV}")}/${BPN}-${PV}.tar.gz \
- file://ghostscript-9.15-parallel-make.patch \
- file://ghostscript-9.16-Werror-return-type.patch \
- file://do-not-check-local-libpng-source.patch \
- file://avoid-host-contamination.patch \
- file://mkdir-p.patch \
-"
-
-SRC_URI = "${SRC_URI_BASE} \
- file://ghostscript-9.21-prevent_recompiling.patch \
- file://cups-no-gcrypt.patch \
- "
-
-SRC_URI:class-native = "${SRC_URI_BASE} \
- file://ghostscript-9.21-native-fix-disable-system-libtiff.patch \
- file://base-genht.c-add-a-preprocessor-define-to-allow-fope.patch \
- "
-
-SRC_URI[sha256sum] = "1598b9a38659cce8448d42a73054b2f9cbfcc40a9b97eeec5f22d4d6cd1de8e6"
-
-# Put something like
-#
-# PACKAGECONFIG:append:pn-ghostscript = " x11"
-#
-# in local.conf to enable building with X11. Be careful. The order
-# of the overrides matters!
-#
-#PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}"
-PACKAGECONFIG:class-native = ""
-
-PACKAGECONFIG[x11] = "--with-x --x-includes=${STAGING_INCDIR} --x-libraries=${STAGING_LIBDIR}, \
- --without-x, virtual/libx11 libxext libxt gtk+3\
- "
-
-EXTRA_OECONF = "--without-libpaper --with-system-libtiff --with-jbig2dec \
- --with-fontpath=${datadir}/fonts \
- --without-libidn --with-cups-serverbin=${exec_prefix}/lib/cups \
- --with-cups-datadir=${datadir}/cups \
- CUPSCONFIG="${STAGING_BINDIR_CROSS}/cups-config" \
- "
-
-EXTRA_OECONF:append:mipsarcho32 = " --with-large_color_index=0"
-
-# Explicity disable libtiff, fontconfig,
-# freetype, cups for ghostscript-native
-EXTRA_OECONF:class-native = "--without-x --with-system-libtiff=no \
- --without-jbig2dec --without-libpaper \
- --with-fontpath=${datadir}/fonts \
- --without-libidn --disable-fontconfig \
- --enable-freetype --disable-cups "
-
-# This has been fixed upstream but for now we need to subvert the check for time.h
-# http://bugs.ghostscript.com/show_bug.cgi?id=692443
-# http://bugs.ghostscript.com/show_bug.cgi?id=692426
-CFLAGS += "-DHAVE_SYS_TIME_H=1"
-BUILD_CFLAGS += "-DHAVE_SYS_TIME_H=1"
-
-inherit autotools-brokensep
-
-do_configure:prepend:class-target () {
- rm -rf ${S}/jpeg/
-}
-
-do_configure:append () {
- # copy tools from the native ghostscript build
- if [ "${PN}" != "ghostscript-native" ]; then
- mkdir -p obj/aux soobj
- for i in genarch genconf mkromfs echogs gendev genht packps; do
- cp ${STAGING_BINDIR_NATIVE}/ghostscript-${PV}/$i obj/aux/$i
- done
- fi
-}
-
-do_install:append () {
- mkdir -p ${D}${datadir}/ghostscript/${PV}/
- cp -r ${S}/Resource ${D}${datadir}/ghostscript/${PV}/
- cp -r ${S}/iccprofiles ${D}${datadir}/ghostscript/${PV}/
-}
-
-do_compile:class-native () {
- mkdir -p obj
- for i in genarch genconf mkromfs echogs gendev genht packps; do
- oe_runmake obj/aux/$i
- done
-}
-
-do_install:class-native () {
- install -d ${D}${bindir}/ghostscript-${PV}
- for i in genarch genconf mkromfs echogs gendev genht packps; do
- install -m 755 obj/aux/$i ${D}${bindir}/ghostscript-${PV}/$i
- done
-}
-
-BBCLASSEXTEND = "native"
-
-# ghostscript does not supports "arc"
-COMPATIBLE_HOST = "^(?!arc).*"
-
-# some entries in NVD uses gpl_ghostscript
-CVE_PRODUCT = "ghostscript gpl_ghostscript"
diff --git a/meta/recipes-extended/go-examples/go-helloworld_0.1.bb b/meta/recipes-extended/go-examples/go-helloworld_0.1.bb
index d0de035921..74f3520eae 100644
--- a/meta/recipes-extended/go-examples/go-helloworld_0.1.bb
+++ b/meta/recipes-extended/go-examples/go-helloworld_0.1.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "This is a simple example recipe that cross-compiles a Go program."
+SUMMARY = "This is a simple example recipe that cross-compiles a Go program."
SECTION = "examples"
HOMEPAGE = "https://golang.org/"
@@ -6,7 +6,7 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
SRC_URI = "git://go.googlesource.com/example;branch=master;protocol=https"
-SRCREV = "2e68773dfca072cb81f219fc3b97ad34fe9d9f94"
+SRCREV = "32022caedd6a177a7717aa8680cbe179e1045935"
UPSTREAM_CHECK_COMMITS = "1"
GO_IMPORT = "golang.org/x/example"
diff --git a/meta/recipes-extended/gperf/gperf/0001-Make-the-code-C-17-compliant.patch b/meta/recipes-extended/gperf/gperf/0001-Make-the-code-C-17-compliant.patch
new file mode 100644
index 0000000000..96e70cdb36
--- /dev/null
+++ b/meta/recipes-extended/gperf/gperf/0001-Make-the-code-C-17-compliant.patch
@@ -0,0 +1,29 @@
+From 6194f0027045433598a61965758b4531a3d06d1f Mon Sep 17 00:00:00 2001
+From: Bruno Haible <bruno@clisp.org>
+Date: Sun, 30 Aug 2020 12:36:15 +0200
+Subject: [PATCH] Make the code C++17 compliant.
+
+* lib/getline.cc (getstr): Don't use the 'register' keyword.
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=gperf.git;a=commit;h=a63b830554920476881837eeacd4a6b507632b19]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ lib/getline.cc | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/lib/getline.cc b/lib/getline.cc
+index c57c633..0984a7c 100644
+--- a/lib/getline.cc
++++ b/lib/getline.cc
+@@ -55,7 +55,7 @@ getstr (char **lineptr, size_t *n, FILE *stream, char terminator, size_t offset)
+
+ for (;;)
+ {
+- register int c = getc (stream);
++ int c = getc (stream);
+
+ /* We always want at least one char left in the buffer, since we
+ always (unless we get an error while reading the first char)
+--
+2.39.0
+
diff --git a/meta/recipes-extended/gperf/gperf/1862c6e57a308a05889c80c048dbc58bdc378dcb.patch b/meta/recipes-extended/gperf/gperf/1862c6e57a308a05889c80c048dbc58bdc378dcb.patch
new file mode 100644
index 0000000000..98959db0a8
--- /dev/null
+++ b/meta/recipes-extended/gperf/gperf/1862c6e57a308a05889c80c048dbc58bdc378dcb.patch
@@ -0,0 +1,181 @@
+From 1862c6e57a308a05889c80c048dbc58bdc378dcb Mon Sep 17 00:00:00 2001
+From: Bruno Haible <bruno@clisp.org>
+Date: Tue, 5 Jul 2022 07:51:46 +0200
+Subject: [PATCH] Add support for reproducible builds.
+
+Suggested by Richard Purdie <richard.purdie@linuxfoundation.org> in
+<https://lists.gnu.org/archive/html/bug-gperf/2022-07/msg00000.html>.
+
+* autogen.sh: Import also lib/filename.h.
+* Makefile.in (IMPORTED_FILES): Add lib/filename.h.
+* src/options.cc: Include filename.h.
+(Options::print_options): Print only the base name of the program name.
+* tests/*.exp: Update.
+
+Upstream-Status: Backport
+
+Index: gperf-3.1/ChangeLog
+===================================================================
+--- gperf-3.1.orig/ChangeLog
++++ gperf-3.1/ChangeLog
+@@ -1,3 +1,14 @@
++2022-07-05 Bruno Haible <bruno@clisp.org>
++
++ Add support for reproducible builds.
++ Suggested by Richard Purdie <richard.purdie@linuxfoundation.org> in
++ <https://lists.gnu.org/archive/html/bug-gperf/2022-07/msg00000.html>.
++ * autogen.sh: Import also lib/filename.h.
++ * Makefile.in (IMPORTED_FILES): Add lib/filename.h.
++ * src/options.cc: Include filename.h.
++ (Options::print_options): Print only the base name of the program name.
++ * tests/*.exp: Update.
++
+ 2017-01-02 Marcel Schaible <marcel.schaible@studium.fernuni-hagen.de>
+
+ * gperf-3.1 released.
+Index: gperf-3.1/src/options.cc
+===================================================================
+--- gperf-3.1.orig/src/options.cc
++++ gperf-3.1/src/options.cc
+@@ -26,6 +26,7 @@
+ #include <string.h> /* declares strcmp() */
+ #include <ctype.h> /* declares isdigit() */
+ #include <limits.h> /* defines CHAR_MAX */
++#include "filename.h"
+ #include "getopt.h"
+ #include "version.h"
+
+@@ -280,6 +281,16 @@ Options::print_options () const
+ {
+ const char *arg = _argument_vector[i];
+
++ if (i == 0)
++ {
++ /* _argument_vector[0] is the program name. Print only its base name.
++ This is useful for reproducible builds. */
++ const char *p = arg + strlen (arg);
++ while (p > arg && ! ISSLASH (p[-1]))
++ p--;
++ arg = p;
++ }
++
+ /* Escape arg if it contains shell metacharacters. */
+ if (*arg == '-')
+ {
+Index: gperf-3.1/lib/filename.h
+===================================================================
+--- /dev/null
++++ gperf-3.1/lib/filename.h
+@@ -0,0 +1,112 @@
++/* Basic filename support macros.
++ Copyright (C) 2001-2022 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, see
++ <https://www.gnu.org/licenses/>. */
++
++/* From Paul Eggert and Jim Meyering. */
++
++#ifndef _FILENAME_H
++#define _FILENAME_H
++
++#include <string.h>
++
++#ifdef __cplusplus
++extern "C" {
++#endif
++
++
++/* Filename support.
++ ISSLASH(C) tests whether C is a directory separator
++ character.
++ HAS_DEVICE(Filename) tests whether Filename contains a device
++ specification.
++ FILE_SYSTEM_PREFIX_LEN(Filename) length of the device specification
++ at the beginning of Filename,
++ index of the part consisting of
++ alternating components and slashes.
++ FILE_SYSTEM_DRIVE_PREFIX_CAN_BE_RELATIVE
++ 1 when a non-empty device specification
++ can be followed by an empty or relative
++ part,
++ 0 when a non-empty device specification
++ must be followed by a slash,
++ 0 when device specification don't exist.
++ IS_ABSOLUTE_FILE_NAME(Filename)
++ tests whether Filename is independent of
++ any notion of "current directory".
++ IS_RELATIVE_FILE_NAME(Filename)
++ tests whether Filename may be concatenated
++ to a directory filename.
++ Note: On native Windows, OS/2, DOS, "c:" is neither an absolute nor a
++ relative file name!
++ IS_FILE_NAME_WITH_DIR(Filename) tests whether Filename contains a device
++ or directory specification.
++ */
++#if defined _WIN32 || defined __CYGWIN__ \
++ || defined __EMX__ || defined __MSDOS__ || defined __DJGPP__
++ /* Native Windows, Cygwin, OS/2, DOS */
++# define ISSLASH(C) ((C) == '/' || (C) == '\\')
++ /* Internal macro: Tests whether a character is a drive letter. */
++# define _IS_DRIVE_LETTER(C) \
++ (((C) >= 'A' && (C) <= 'Z') || ((C) >= 'a' && (C) <= 'z'))
++ /* Help the compiler optimizing it. This assumes ASCII. */
++# undef _IS_DRIVE_LETTER
++# define _IS_DRIVE_LETTER(C) \
++ (((unsigned int) (C) | ('a' - 'A')) - 'a' <= 'z' - 'a')
++# define HAS_DEVICE(Filename) \
++ (_IS_DRIVE_LETTER ((Filename)[0]) && (Filename)[1] == ':')
++# define FILE_SYSTEM_PREFIX_LEN(Filename) (HAS_DEVICE (Filename) ? 2 : 0)
++# ifdef __CYGWIN__
++# define FILE_SYSTEM_DRIVE_PREFIX_CAN_BE_RELATIVE 0
++# else
++ /* On native Windows, OS/2, DOS, the system has the notion of a
++ "current directory" on each drive. */
++# define FILE_SYSTEM_DRIVE_PREFIX_CAN_BE_RELATIVE 1
++# endif
++# if FILE_SYSTEM_DRIVE_PREFIX_CAN_BE_RELATIVE
++# define IS_ABSOLUTE_FILE_NAME(Filename) \
++ ISSLASH ((Filename)[FILE_SYSTEM_PREFIX_LEN (Filename)])
++# else
++# define IS_ABSOLUTE_FILE_NAME(Filename) \
++ (ISSLASH ((Filename)[0]) || HAS_DEVICE (Filename))
++# endif
++# define IS_RELATIVE_FILE_NAME(Filename) \
++ (! (ISSLASH ((Filename)[0]) || HAS_DEVICE (Filename)))
++# define IS_FILE_NAME_WITH_DIR(Filename) \
++ (strchr ((Filename), '/') != NULL || strchr ((Filename), '\\') != NULL \
++ || HAS_DEVICE (Filename))
++#else
++ /* Unix */
++# define ISSLASH(C) ((C) == '/')
++# define HAS_DEVICE(Filename) ((void) (Filename), 0)
++# define FILE_SYSTEM_PREFIX_LEN(Filename) ((void) (Filename), 0)
++# define FILE_SYSTEM_DRIVE_PREFIX_CAN_BE_RELATIVE 0
++# define IS_ABSOLUTE_FILE_NAME(Filename) ISSLASH ((Filename)[0])
++# define IS_RELATIVE_FILE_NAME(Filename) (! ISSLASH ((Filename)[0]))
++# define IS_FILE_NAME_WITH_DIR(Filename) (strchr ((Filename), '/') != NULL)
++#endif
++
++/* Deprecated macros. For backward compatibility with old users of the
++ 'filename' module. */
++#define IS_ABSOLUTE_PATH IS_ABSOLUTE_FILE_NAME
++#define IS_PATH_WITH_DIR IS_FILE_NAME_WITH_DIR
++
++
++#ifdef __cplusplus
++}
++#endif
++
++#endif /* _FILENAME_H */
diff --git a/meta/recipes-extended/gperf/gperf_3.1.bb b/meta/recipes-extended/gperf/gperf_3.1.bb
index 82750fca05..c57ade9fe3 100644
--- a/meta/recipes-extended/gperf/gperf_3.1.bb
+++ b/meta/recipes-extended/gperf/gperf_3.1.bb
@@ -5,14 +5,17 @@ LICENSE = "GPL-3.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
file://src/main.cc;beginline=8;endline=19;md5=dec8f611845d047387ed56b5b85fa99b"
-SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.gz"
+SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.gz \
+ file://0001-Make-the-code-C-17-compliant.patch"
SRC_URI[md5sum] = "9e251c0a618ad0824b51117d5d9db87e"
SRC_URI[sha256sum] = "588546b945bba4b70b6a3a616e80b4ab466e3f33024a352fc2198112cdbb3ae2"
+SRC_URI += "file://1862c6e57a308a05889c80c048dbc58bdc378dcb.patch"
+
inherit autotools
# The nested configures don't find the parent aclocal.m4 out of the box, so tell
# it where to look explicitly (mirroring the behaviour of upstream's Makefile.devel).
EXTRA_AUTORECONF += " -I ${S} --exclude=aclocal"
-BBCLASSEXTEND = "native"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/grep/grep/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch b/meta/recipes-extended/grep/grep/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch
deleted file mode 100644
index d8283d31b1..0000000000
--- a/meta/recipes-extended/grep/grep/0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 981385237834bdf3e468421147eece68d9bfc7e7 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 25 Mar 2022 13:34:26 -0700
-Subject: [PATCH] mcontext is not a standard layout so glibc and musl differ
-
-This is already applied to libsigsegv upstream, hopefully next version
-of grep will update its internal copy and we can drop this patch
-
-Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=libsigsegv.git;a=commitdiff;h=a6ff69873110c0a8ba6f7fd90532dbc11224828c]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- lib/sigsegv.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/lib/sigsegv.c b/lib/sigsegv.c
-index 998c827..b6f4841 100644
---- a/lib/sigsegv.c
-+++ b/lib/sigsegv.c
-@@ -219,8 +219,8 @@ int libsigsegv_version = LIBSIGSEGV_VERSION;
- # define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.gp_regs[1]
- # else /* 32-bit */
- /* both should be equivalent */
--# if 0
--# define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.regs->gpr[1]
-+# if ! defined __GLIBC__
-+# define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_regs->gregs[1]
- # else
- # define SIGSEGV_FAULT_STACKPOINTER ((ucontext_t *) ucp)->uc_mcontext.uc_regs->gregs[1]
- # endif
---
-2.35.1
-
diff --git a/meta/recipes-extended/grep/grep_3.11.bb b/meta/recipes-extended/grep/grep_3.11.bb
new file mode 100644
index 0000000000..cb3105de04
--- /dev/null
+++ b/meta/recipes-extended/grep/grep_3.11.bb
@@ -0,0 +1,46 @@
+SUMMARY = "GNU grep utility"
+HOMEPAGE = "http://savannah.gnu.org/projects/grep/"
+DESCRIPTION = "Grep searches one or more input files for lines containing a match to a specified pattern. By default, grep prints the matching lines."
+BUGTRACKER = "http://savannah.gnu.org/bugs/?group=grep"
+SECTION = "console/utils"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+SRC_URI = "${GNU_MIRROR}/grep/grep-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "1db2aedde89d0dea42b16d9528f894c8d15dae4e190b59aecc78f5a951276eab"
+
+inherit autotools gettext texinfo pkgconfig
+
+# Fix "Argument list too long" error when len(TMPDIR) = 410
+acpaths = "-I ./m4"
+
+do_configure:prepend () {
+ sed -i -e '1s,#!@SHELL@,#!/bin/sh,' ${S}/src/egrep.sh
+ rm -f ${S}/m4/init.m4
+}
+
+do_install () {
+ autotools_do_install
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ install -d ${D}${base_bindir}
+ mv ${D}${bindir}/grep ${D}${base_bindir}/grep
+ mv ${D}${bindir}/egrep ${D}${base_bindir}/egrep
+ mv ${D}${bindir}/fgrep ${D}${base_bindir}/fgrep
+ rmdir ${D}${bindir}/
+ fi
+}
+
+inherit update-alternatives
+
+PACKAGECONFIG ??= "pcre"
+PACKAGECONFIG[pcre] = "--enable-perl-regexp,--disable-perl-regexp,libpcre2"
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN} = "grep egrep fgrep"
+ALTERNATIVE_LINK_NAME[grep] = "${base_bindir}/grep"
+ALTERNATIVE_LINK_NAME[egrep] = "${base_bindir}/egrep"
+ALTERNATIVE_LINK_NAME[fgrep] = "${base_bindir}/fgrep"
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/grep/grep_3.7.bb b/meta/recipes-extended/grep/grep_3.7.bb
deleted file mode 100644
index 7005cbe164..0000000000
--- a/meta/recipes-extended/grep/grep_3.7.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "GNU grep utility"
-HOMEPAGE = "http://savannah.gnu.org/projects/grep/"
-DESCRIPTION = "Grep searches one or more input files for lines containing a match to a specified pattern. By default, grep prints the matching lines."
-BUGTRACKER = "http://savannah.gnu.org/bugs/?group=grep"
-SECTION = "console/utils"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
-
-SRC_URI = "${GNU_MIRROR}/grep/grep-${PV}.tar.xz \
- file://0001-mcontext-is-not-a-standard-layout-so-glibc-and-musl-.patch \
- "
-
-SRC_URI[sha256sum] = "5c10da312460aec721984d5d83246d24520ec438dd48d7ab5a05dbc0d6d6823c"
-
-inherit autotools gettext texinfo pkgconfig
-
-# Fix "Argument list too long" error when len(TMPDIR) = 410
-acpaths = "-I ./m4"
-
-do_configure:prepend () {
- sed -i -e '1s,#!@SHELL@,#!/bin/sh,' ${S}/src/egrep.sh
- rm -f ${S}/m4/init.m4
-}
-
-do_install () {
- autotools_do_install
- if [ "${base_bindir}" != "${bindir}" ]; then
- install -d ${D}${base_bindir}
- mv ${D}${bindir}/grep ${D}${base_bindir}/grep
- mv ${D}${bindir}/egrep ${D}${base_bindir}/egrep
- mv ${D}${bindir}/fgrep ${D}${base_bindir}/fgrep
- rmdir ${D}${bindir}/
- fi
-}
-
-inherit update-alternatives
-
-PACKAGECONFIG ??= "pcre"
-PACKAGECONFIG[pcre] = "--enable-perl-regexp,--disable-perl-regexp,libpcre"
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN} = "grep egrep fgrep"
-ALTERNATIVE_LINK_NAME[grep] = "${base_bindir}/grep"
-ALTERNATIVE_LINK_NAME[egrep] = "${base_bindir}/egrep"
-ALTERNATIVE_LINK_NAME[fgrep] = "${base_bindir}/fgrep"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/groff/files/0001-Include-config.h.patch b/meta/recipes-extended/groff/files/0001-Include-config.h.patch
deleted file mode 100644
index 99f590bef3..0000000000
--- a/meta/recipes-extended/groff/files/0001-Include-config.h.patch
+++ /dev/null
@@ -1,212 +0,0 @@
-From 6cfa9f8126c1d6ec26f120d273e714fb19108873 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 4 Aug 2019 16:32:41 -0700
-Subject: [PATCH] Include config.h
-
-This helps avoid the include conflicts where <stdlib.h> is including
-<math.h> and since -I./lib is used and a local math.h wrapper is
-residing in there, the build breaks since stdlib.h really wants the
-standard system math.h to be included, this ensures that right macros
-are predefined and included before stdlib.h is included
-
-fixes
-In file included from src/libs/libgroff/assert.cpp:20:
-In file included from TOPDIR/build/tmp/work/aarch64-yoe-linux-musl/groff/1.22.4-r0/recipe-sysroot/usr/include/c++/v1/stdlib.h:100:
-./lib/math.h:38:3: error: "Please include config.h first."
- #error "Please include config.h first."
- ^
-./lib/math.h:40:1: error: unknown type name '_GL_INLINE_HEADER_BEGIN'
-
-We delete eqn.cpp and qen.hpp in do_configure
-to ensure they're regenerated and deterministic.
-
-Issue is fixed upstream with similar patches:
-https://git.savannah.gnu.org/cgit/groff.git/commit/?id=979f3f4266151c7681a68a40d2c4913842a7271d
-https://git.savannah.gnu.org/cgit/groff.git/commit/?id=fe121eeacd53c96105f23209b2c205f436f97359
-
-Upstream-Status: Backport [see links above]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/libs/libgroff/assert.cpp | 4 +
- src/libs/libgroff/curtime.cpp | 4 +
- src/libs/libgroff/device.cpp | 4 +
- src/libs/libgroff/error.cpp | 4 +
- src/libs/libgroff/fatal.cpp | 4 +
- src/libs/libgroff/string.cpp | 4 +
- src/libs/libgroff/strsave.cpp | 4 +
- src/preproc/eqn/eqn.cpp | 450 ++++++++++++++++++----------------
- src/preproc/eqn/eqn.hpp | 12 +-
- src/preproc/eqn/eqn.ypp | 4 +
- src/preproc/eqn/other.cpp | 4 +
- src/preproc/eqn/text.cpp | 4 +
- src/preproc/pic/object.cpp | 4 +
- 13 files changed, 285 insertions(+), 221 deletions(-)
-
-diff --git a/src/libs/libgroff/assert.cpp b/src/libs/libgroff/assert.cpp
-index aceed05..97780d6 100644
---- a/src/libs/libgroff/assert.cpp
-+++ b/src/libs/libgroff/assert.cpp
-@@ -16,6 +16,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdio.h>
- #include <stdlib.h>
- #include "assert.h"
-diff --git a/src/libs/libgroff/curtime.cpp b/src/libs/libgroff/curtime.cpp
-index 72fe067..9ddba08 100644
---- a/src/libs/libgroff/curtime.cpp
-+++ b/src/libs/libgroff/curtime.cpp
-@@ -15,6 +15,10 @@ for more details.
- The GNU General Public License version 2 (GPL2) is available in the
- internet at <http://www.gnu.org/licenses/gpl-2.0.txt>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <errno.h>
- #include <limits.h>
- #include <stdlib.h>
-diff --git a/src/libs/libgroff/device.cpp b/src/libs/libgroff/device.cpp
-index 0d28b85..c211f85 100644
---- a/src/libs/libgroff/device.cpp
-+++ b/src/libs/libgroff/device.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdlib.h>
- #include "device.h"
- #include "defs.h"
-diff --git a/src/libs/libgroff/error.cpp b/src/libs/libgroff/error.cpp
-index 9a18803..7b63d3d 100644
---- a/src/libs/libgroff/error.cpp
-+++ b/src/libs/libgroff/error.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdio.h>
- #include <stdlib.h>
- #include <string.h>
-diff --git a/src/libs/libgroff/fatal.cpp b/src/libs/libgroff/fatal.cpp
-index c0dcb35..fd6003e 100644
---- a/src/libs/libgroff/fatal.cpp
-+++ b/src/libs/libgroff/fatal.cpp
-@@ -16,6 +16,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdlib.h>
-
- #define FATAL_ERROR_EXIT_CODE 3
-diff --git a/src/libs/libgroff/string.cpp b/src/libs/libgroff/string.cpp
-index 46c015c..449f3a6 100644
---- a/src/libs/libgroff/string.cpp
-+++ b/src/libs/libgroff/string.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdlib.h>
-
- #include "lib.h"
-diff --git a/src/libs/libgroff/strsave.cpp b/src/libs/libgroff/strsave.cpp
-index f95c05e..d875045 100644
---- a/src/libs/libgroff/strsave.cpp
-+++ b/src/libs/libgroff/strsave.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <string.h>
- #include <stdlib.h>
-
-diff --git a/src/preproc/eqn/eqn.ypp b/src/preproc/eqn/eqn.ypp
-index fb318c3..b7b647e 100644
---- a/src/preproc/eqn/eqn.ypp
-+++ b/src/preproc/eqn/eqn.ypp
-@@ -16,6 +16,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
- %{
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdio.h>
- #include <string.h>
- #include <stdlib.h>
-diff --git a/src/preproc/eqn/other.cpp b/src/preproc/eqn/other.cpp
-index 8db993f..38db396 100644
---- a/src/preproc/eqn/other.cpp
-+++ b/src/preproc/eqn/other.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdlib.h>
-
- #include "eqn.h"
-diff --git a/src/preproc/eqn/text.cpp b/src/preproc/eqn/text.cpp
-index f3d06f9..3b244d5 100644
---- a/src/preproc/eqn/text.cpp
-+++ b/src/preproc/eqn/text.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <ctype.h>
- #include <stdlib.h>
- #include "eqn.h"
-diff --git a/src/preproc/pic/object.cpp b/src/preproc/pic/object.cpp
-index d8ba610..f26a831 100644
---- a/src/preproc/pic/object.cpp
-+++ b/src/preproc/pic/object.cpp
-@@ -17,6 +17,10 @@ for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>. */
-
-+#if HAVE_CONFIG_H
-+# include <config.h>
-+#endif
-+
- #include <stdlib.h>
-
- #include "pic.h"
---
-2.22.0
-
diff --git a/meta/recipes-extended/groff/files/0001-Make-manpages-mulitlib-identical.patch b/meta/recipes-extended/groff/files/0001-Make-manpages-mulitlib-identical.patch
index 9105da6457..6dc84c9062 100644
--- a/meta/recipes-extended/groff/files/0001-Make-manpages-mulitlib-identical.patch
+++ b/meta/recipes-extended/groff/files/0001-Make-manpages-mulitlib-identical.patch
@@ -1,27 +1,25 @@
-From e738f9185ba90f2083c846ade3551234bb5a7cbc Mon Sep 17 00:00:00 2001
+From aa1f37f1e0ff0dc0eeb199b52959e0deb275721e Mon Sep 17 00:00:00 2001
From: Jeremy Puhlman <jpuhlman@mvista.com>
Date: Sat, 7 Mar 2020 00:59:13 +0000
Subject: [PATCH] Make manpages mulitlib identical
-Upstream-Status: Pending
+Upstream-Status: Submitted [by email to g.branden.robinson@gmail.com]
Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com>
+
---
Makefile.am | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile.am b/Makefile.am
-index d18c49b..6175fe9 100644
+index f7ab410..7e1f3fb 100644
--- a/Makefile.am
+++ b/Makefile.am
-@@ -917,7 +917,7 @@ SUFFIXES += .man
- -e "s|[@]MDATE[@]|`$(PERL) $(top_srcdir)/mdate.pl $<`|g" \
- -e "s|[@]OLDFONTDIR[@]|`echo $(oldfontdir) | sed -f $(makevarescape)`|g" \
+@@ -891,7 +891,7 @@ SUFFIXES += .man
+ -e "s|[@]PAGE[@]|\\\\%$(PAGE)|g" \
-e "s|[@]PDFDOCDIR[@]|`echo $(pdfdocdir) | sed -f $(makevarescape)`|g" \
+ -e "s|[@]PSPRINT[@]|`echo $(PSPRINT) | sed -f $(makevarescape)`|g" \
- -e "s|[@]SYSTEMMACRODIR[@]|`echo $(systemtmacdir) | sed -f $(makevarescape)`|g" \
+ -e "s|[@]SYSTEMMACRODIR[@]|`echo $(systemtmacdir) | sed -e 's,$(libdir),$(prefix)/lib*,' | sed -f $(makevarescape)`|g" \
- -e "s|[@]TMAC_AN_PREFIX[@]|$(tmac_an_prefix)|g" \
- -e "s|[@]TMAC_M_PREFIX[@]|$(tmac_m_prefix)|g" \
- -e "s|[@]TMAC_MDIR[@]|$(tmacdir)/mm|g" \
---
-2.23.0
-
+ -e "s|[@]TMAC_AN_PREFIX[@]|\\\\%$(tmac_an_prefix)|g" \
+ -e "s|[@]TMAC_M_PREFIX[@]|\\\\%$(tmac_m_prefix)|g" \
+ -e "s|[@]TMAC_MDIR[@]|`echo $(tmacdir) | sed -f $(makevarescape)`/mm|g" \
diff --git a/meta/recipes-extended/groff/files/0001-build-Fix-Savannah-64681-webpage.ps-deps.patch b/meta/recipes-extended/groff/files/0001-build-Fix-Savannah-64681-webpage.ps-deps.patch
new file mode 100644
index 0000000000..eae5dc9998
--- /dev/null
+++ b/meta/recipes-extended/groff/files/0001-build-Fix-Savannah-64681-webpage.ps-deps.patch
@@ -0,0 +1,51 @@
+From c75965053124149381ada3c394da74be078076cf Mon Sep 17 00:00:00 2001
+From: "G. Branden Robinson" <g.branden.robinson@gmail.com>
+Date: Sat, 16 Sep 2023 16:28:00 -0500
+Subject: [PATCH] [build]: Fix Savannah #64681 (webpage.ps deps).
+
+* doc/doc.am (doc/webpage.ps, doc/webpage.html): Update and parallelize
+ target dependencies. Resolve race by requiring "grn" and "soelim" to
+ be built first. Also add dependency on `$(TMAC_PACKAGE_MS)`.
+
+Fixes <https://savannah.gnu.org/bugs/?64681>. Thanks to Alexander
+Kanavin for the report.
+
+ANNOUNCE: Acknowledge Alexander.
+
+Upstream-Status: Backport
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ doc/doc.am | 12 ++++++++----
+ 1 file changed, 8 insertions(+), 4 deletions(-)
+
+diff --git a/doc/doc.am b/doc/doc.am
+index cddc51907..d3c9ab6b7 100644
+--- a/doc/doc.am
++++ b/doc/doc.am
+@@ -346,6 +346,9 @@ doc/pic.ps: $(doc_srcdir)/pic.ms eqn pic tbl
+ $(GROFF_V)$(MKDIR_P) `dirname $@` \
+ && $(DOC_GROFF) -pet -Tps -ms $(doc_srcdir)/pic.ms >$@
+
++# groff(1)'s `-I` implies `-g` and `-s`, so we must depend on grn and
++# soelim even though the document doesn't require them.
++doc/webpage.ps: grn soelim
+ doc/webpage.ps: $(DOC_GNU_EPS) tmac/www.tmac tbl
+ doc/webpage.ps: $(doc_srcdir)/webpage.ms
+ $(GROFF_V)$(MKDIR_P) `dirname $@` \
+@@ -365,11 +368,12 @@ doc/pic.html: $(doc_srcdir)/pic.ms
+ && $(DOC_GROFF) -pet -P-Ipic -P-Dimg -P-jpic -Thtml -ms \
+ $(doc_srcdir)/pic.ms > pic.html
+
+-doc/webpage.html: tbl
+-doc/webpage.html: tmac/www.tmac
+-doc/webpage.html: $(DOC_GNU_EPS)
++# groff(1)'s `-I` implies `-g` and `-s`, so we must depend on grn and
++# soelim even though the document doesn't require them.
++doc/webpage.html: grn soelim
++doc/webpage.html: $(DOC_GNU_EPS) tmac/www.tmac tbl
+ doc/webpage.html: $(doc_srcdir)/groff.css
+-doc/webpage.html: $(doc_srcdir)/webpage.ms
++doc/webpage.html: $(doc_srcdir)/webpage.ms $(TMAC_PACKAGE_MS)
+ $(GROFF_V)$(MKDIR_P) $(doc_builddir) \
+ && cd $(doc_builddir) \
+ && $(DOC_GROFF) -t -I $(doc_srcdir) -P-jwebpage -P-nrb \
diff --git a/meta/recipes-extended/groff/files/0001-build-meintro_fr.ps-depends-on-tbl.patch b/meta/recipes-extended/groff/files/0001-build-meintro_fr.ps-depends-on-tbl.patch
new file mode 100644
index 0000000000..3e81b86ac8
--- /dev/null
+++ b/meta/recipes-extended/groff/files/0001-build-meintro_fr.ps-depends-on-tbl.patch
@@ -0,0 +1,31 @@
+From f21e9f13beb57a1e0666edf9693d7c83f2189897 Mon Sep 17 00:00:00 2001
+From: "G. Branden Robinson" <g.branden.robinson@gmail.com>
+Date: Fri, 22 Sep 2023 01:27:57 -0500
+Subject: [PATCH] [build]: meintro_fr.ps depends on tbl.
+
+* doc/doc.am (doc/meintro_fr.ps): Depend on tbl, resolving race in
+ sufficiently parallelized builds. Overlooked in commit 92349ae223,
+ 2022-05-30.
+
+Fixes <https://savannah.gnu.org/bugs/?64695>. Thanks to Alexander
+Kanavin for the report.
+
+Upstream-Status: Backport
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ doc/doc.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/doc/doc.am b/doc/doc.am
+index d3c9ab6b7..0f95c7774 100644
+--- a/doc/doc.am
++++ b/doc/doc.am
+@@ -334,7 +334,7 @@ SUFFIXES += .me.in .me
+
+ # Use '-K utf8', not '-k', in case 'configure' didn't find uchardet.
+ # The French translation uses tbl; its English counterpart does not.
+-doc/meintro_fr.ps: doc/meintro_fr.me preconv
++doc/meintro_fr.ps: doc/meintro_fr.me preconv tbl
+ $(GROFF_V)$(MKDIR_P) `dirname $@` \
+ && $(DOC_GROFF) -K utf8 -t -Tps -me -mfr $< >$@
+
diff --git a/meta/recipes-extended/groff/files/0001-replace-perl-w-with-use-warnings.patch b/meta/recipes-extended/groff/files/0001-replace-perl-w-with-use-warnings.patch
deleted file mode 100644
index eda6a40f51..0000000000
--- a/meta/recipes-extended/groff/files/0001-replace-perl-w-with-use-warnings.patch
+++ /dev/null
@@ -1,106 +0,0 @@
-From 6821a23e6cf34df37c351b45be413a8da9115f9f Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Sat, 11 May 2019 17:03:03 +0800
-Subject: [PATCH 1/2] replace "perl -w" with "use warnings"
-
-The shebang's max length is usually 128 as defined in
-/usr/include/linux/binfmts.h:
- #define BINPRM_BUF_SIZE 128
-
-There would be errors when @PERL@ is longer than 128, use
-'/usr/bin/env perl' can fix the problem, but '/usr/bin/env perl -w'
-doesn't work:
-
-/usr/bin/env: perl -w: No such file or directory
-
-So replace "perl -w" with "use warnings" to make it work.
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
-
-Rebase to 1.22.4.
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- font/devpdf/util/BuildFoundries.pl | 3 ++-
- src/devices/gropdf/gropdf.pl | 3 ++-
- src/devices/gropdf/pdfmom.pl | 3 ++-
- src/utils/afmtodit/afmtodit.pl | 3 ++-
- 4 files changed, 8 insertions(+), 4 deletions(-)
-
-diff --git a/font/devpdf/util/BuildFoundries.pl b/font/devpdf/util/BuildFoundries.pl
-index f8af826..9584e28 100644
---- a/font/devpdf/util/BuildFoundries.pl
-+++ b/font/devpdf/util/BuildFoundries.pl
-@@ -1,4 +1,4 @@
--#!/usr/bin/perl -w
-+#!/usr/bin/perl
- #
- # BuildFoundries : Given a Foundry file generate groff and download files
- # Deri James : Monday 07 Feb 2011
-@@ -22,6 +22,7 @@
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
-
- use strict;
-+use warnings;
-
- (my $progname = $0) =~s @.*/@@;
- my $where=shift||'';
-diff --git a/src/devices/gropdf/gropdf.pl b/src/devices/gropdf/gropdf.pl
-index 2ec52d0..ce5a06f 100644
---- a/src/devices/gropdf/gropdf.pl
-+++ b/src/devices/gropdf/gropdf.pl
-@@ -1,4 +1,4 @@
--#!@PERL@ -w
-+#!@PERL@
- #
- # gropdf : PDF post processor for groff
- #
-@@ -21,6 +21,7 @@
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
-
- use strict;
-+use warnings;
- use Getopt::Long qw(:config bundling);
-
- use constant
-diff --git a/src/devices/gropdf/pdfmom.pl b/src/devices/gropdf/pdfmom.pl
-index c9b08b2..61124f3 100644
---- a/src/devices/gropdf/pdfmom.pl
-+++ b/src/devices/gropdf/pdfmom.pl
-@@ -1,4 +1,4 @@
--#!@PERL@ -w
-+#!@PERL@
- #
- # pdfmom : Frontend to run groff -mom to produce PDFs
- # Deri James : Friday 16 Mar 2012
-@@ -23,6 +23,7 @@
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
-
- use strict;
-+use warnings;
- use File::Temp qw/tempfile/;
- my @cmd;
- my $dev='pdf';
-diff --git a/src/utils/afmtodit/afmtodit.pl b/src/utils/afmtodit/afmtodit.pl
-index 954c58e..81a6c97 100644
---- a/src/utils/afmtodit/afmtodit.pl
-+++ b/src/utils/afmtodit/afmtodit.pl
-@@ -1,4 +1,4 @@
--#! /usr/bin/perl -w
-+#! /usr/bin/perl
- # -*- Perl -*-
- # Copyright (C) 1989-2018 Free Software Foundation, Inc.
- # Written by James Clark (jjc@jclark.com)
-@@ -19,6 +19,7 @@
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
-
- use strict;
-+use warnings;
-
- @afmtodit.tables@
-
---
-2.7.4
-
diff --git a/meta/recipes-extended/groff/files/0001-support-musl.patch b/meta/recipes-extended/groff/files/0001-support-musl.patch
deleted file mode 100644
index a837b11b1b..0000000000
--- a/meta/recipes-extended/groff/files/0001-support-musl.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From 695965c27be74acb5968f19d51af86065c4b71a9 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Mon, 13 May 2019 09:48:14 +0800
-Subject: [PATCH] support musl
-
-...
-|./lib/math.h:2877:1: error: 'int signbit(float)' conflicts with a previous declaration
-| _GL_MATH_CXX_REAL_FLOATING_DECL_2 (signbit)
-| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-|In file included from recipe-sysroot/usr/include/c++/8.3.0/math.h:36,
-| from ./lib/math.h:27,
-| from ./src/include/driver.h:27,
-| from src/devices/grodvi/dvi.cpp:20:
-|recipe-sysroot/usr/include/c++/8.3.0/cmath:661:3: note: previous declaration 'constexpr bool std::signbit(float)'
-| signbit(float __x)
-| ^~~~~~~
-...
-
-Upstream-Status: Backport [http://git.savannah.gnu.org/gitweb/?p=gnulib.git;a=commit;h=453ff940449bbbde9ec00f0bbf82a359c5598fc7]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- gnulib_m4/signbit.m4 | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/gnulib_m4/signbit.m4 b/gnulib_m4/signbit.m4
-index 9e7884d..8b9c70c 100644
---- a/gnulib_m4/signbit.m4
-+++ b/gnulib_m4/signbit.m4
-@@ -31,6 +31,8 @@ AC_DEFUN([gl_SIGNBIT],
- [case "$host_os" in
- # Guess yes on glibc systems.
- *-gnu* | gnu*) gl_cv_func_signbit="guessing yes" ;;
-+ # Guess yes on musl systems.
-+ *-musl*) gl_cv_func_signbit="guessing yes" ;;
- # Guess yes on native Windows.
- mingw*) gl_cv_func_signbit="guessing yes" ;;
- # If we don't know, assume the worst.
---
-2.7.4
-
diff --git a/meta/recipes-extended/groff/files/groff-not-search-fonts-on-build-host.patch b/meta/recipes-extended/groff/files/groff-not-search-fonts-on-build-host.patch
index c80a2a5c38..23992576f9 100644
--- a/meta/recipes-extended/groff/files/groff-not-search-fonts-on-build-host.patch
+++ b/meta/recipes-extended/groff/files/groff-not-search-fonts-on-build-host.patch
@@ -1,7 +1,7 @@
-From 75761ae7adc88412de4379d1cf5484b055cd5f18 Mon Sep 17 00:00:00 2001
+From eb16276c3e2e34aa2e57f6a0e68554657b90cd28 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Sat, 11 May 2019 17:06:29 +0800
-Subject: [PATCH 2/2] groff searchs fonts which are provided by ghostscript on
+Subject: [PATCH] groff searchs fonts which are provided by ghostscript on
build host. It causes non-determinism issue. So not search font dirs on host.
Upstream-Status: Inappropriate [cross build specific]
@@ -10,23 +10,30 @@ Signed-off-by: Kai Kang <kai.kang@windriver.com>
Rebase to 1.22.4
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
- font/devpdf/Foundry.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
+ font/devpdf/Foundry.in | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/font/devpdf/Foundry.in b/font/devpdf/Foundry.in
-index 93e9b66..235b23b 100644
+index e5aba65..5441734 100644
--- a/font/devpdf/Foundry.in
+++ b/font/devpdf/Foundry.in
-@@ -65,7 +65,7 @@ ZD|Y||||Dingbats!d050000l.pfb
- #======================================================================
+@@ -20,7 +20,7 @@
- #Foundry|Name|Searchpath
--foundry|U|(gs):@urwfontsdir@ :/usr/share/fonts/type1/gsfonts :/opt/local/share/fonts/urw-fonts # the URW fonts delivered with ghostscript (may be different)
-+foundry|U|(gs) # the URW fonts delivered with ghostscript (may be different)
- #Define Flags for afmtodit
+ #=======================================================================
+ #Foundry|Name|Search path
+-foundry||@urwfontsdir@:(gs):/usr/share/fonts/type1/gsfonts:/usr/share/fonts/default/Type1:/usr/share/fonts/default/Type1/adobestd35:/usr/share/fonts/type1/urw-base35:/opt/local/share/fonts/urw-fonts:/usr/local/share/fonts/ghostscript
++foundry||(gs)
+
+ # Enable the font description files for grops (generated from Adobe
+ # foundry font files) to be used with gropdf. afmtodit must not be
+@@ -72,7 +72,7 @@ EURO|N||||*../devps/freeeuro.pfa
+ # URW fonts are typically shipped with Ghostscript, but can be replaced.
+
+ #Foundry|Name|Search path
+-foundry|U|@urwfontsdir@:/usr/share/fonts/type1/gsfonts:/usr/share/fonts/default/Type1:/usr/share/fonts/default/Type1/adobestd35:/usr/share/fonts/type1/urw-base35:/opt/local/share/fonts/urw-fonts:/usr/local/share/fonts/ghostscript:(gs)
++foundry|U|(gs)
+
+ # Define flags for afmtodit.
- r=-i 0 -m
---
-2.7.4
-
diff --git a/meta/recipes-extended/groff/groff_1.22.4.bb b/meta/recipes-extended/groff/groff_1.22.4.bb
deleted file mode 100644
index b281544aab..0000000000
--- a/meta/recipes-extended/groff/groff_1.22.4.bb
+++ /dev/null
@@ -1,80 +0,0 @@
-SUMMARY = "GNU Troff software"
-DESCRIPTION = "The groff (GNU troff) software is a typesetting package which reads plain text mixed with \
-formatting commands and produces formatted output."
-SECTION = "base"
-HOMEPAGE = "http://www.gnu.org/software/groff/"
-LICENSE = "GPL-3.0-only"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-SRC_URI = "${GNU_MIRROR}/groff/groff-${PV}.tar.gz \
- file://0001-replace-perl-w-with-use-warnings.patch \
- file://groff-not-search-fonts-on-build-host.patch \
- file://0001-support-musl.patch \
- file://0001-Include-config.h.patch \
- file://0001-Make-manpages-mulitlib-identical.patch \
-"
-
-SRC_URI[md5sum] = "08fb04335e2f5e73f23ea4c3adbf0c5f"
-SRC_URI[sha256sum] = "e78e7b4cb7dec310849004fa88847c44701e8d133b5d4c13057d876c1bad0293"
-
-# Remove at the next upgrade
-PR = "r1"
-
-DEPENDS = "bison-native"
-RDEPENDS:${PN} += "perl sed"
-
-inherit autotools-brokensep texinfo multilib_script pkgconfig
-
-MULTILIB_SCRIPTS = "${PN}:${bindir}/gpinyin ${PN}:${bindir}/groffer ${PN}:${bindir}/grog"
-
-EXTRA_OECONF = "--without-x --without-doc"
-PARALLEL_MAKE = ""
-
-CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl' ac_cv_path_BASH_PROG='no' PAGE=A4"
-
-# Delete these generated files since we depend on bison-native
-# and regenerate them. Do it deterministically (always).
-do_configure:prepend() {
- rm -f ${S}/src/preproc/eqn/eqn.cpp
- rm -f ${S}/src/preproc/eqn/eqn.hpp
-}
-
-do_install:append() {
- # Some distros have both /bin/perl and /usr/bin/perl, but we set perl location
- # for target as /usr/bin/perl, so fix it to /usr/bin/perl.
- for i in afmtodit mmroff gropdf pdfmom grog; do
- if [ -f ${D}${bindir}/$i ]; then
- sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' ${D}${bindir}/$i
- fi
- done
- if [ -e ${D}${libdir}/charset.alias ]; then
- rm -rf ${D}${libdir}/charset.alias
- fi
-
- # awk is located at /usr/bin/, not /bin/
- SPECIAL_AWK=`find ${D} -name special.awk`
- if [ -f ${SPECIAL_AWK} ]; then
- sed -i -e 's:#!.*awk:#! ${USRBINPATH}/awk:' ${SPECIAL_AWK}
- fi
-
- # not ship /usr/bin/glilypond and its releated files in embedded target system
- rm -rf ${D}${bindir}/glilypond
- rm -rf ${D}${libdir}/groff/glilypond
- rm -rf ${D}${mandir}/man1/glilypond*
-
- # not ship /usr/bin/grap2graph and its releated man files
- rm -rf ${D}${bindir}/grap2graph
- rm -rf ${D}${mandir}/man1/grap2graph*
-}
-
-do_install:append:class-native() {
- create_cmdline_wrapper ${D}/${bindir}/groff \
- -F${STAGING_DIR_NATIVE}${datadir_native}/groff/${PV}/font \
- -M${STAGING_DIR_NATIVE}${datadir_native}/groff/${PV}/tmac
-}
-
-FILES:${PN} += "${libdir}/${BPN}/site-tmac \
- ${libdir}/${BPN}/groffer/"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/groff/groff_1.23.0.bb b/meta/recipes-extended/groff/groff_1.23.0.bb
new file mode 100644
index 0000000000..0fc4f831d8
--- /dev/null
+++ b/meta/recipes-extended/groff/groff_1.23.0.bb
@@ -0,0 +1,78 @@
+SUMMARY = "GNU Troff software"
+DESCRIPTION = "The groff (GNU troff) software is a typesetting package which reads plain text mixed with \
+formatting commands and produces formatted output."
+SECTION = "base"
+HOMEPAGE = "http://www.gnu.org/software/groff/"
+LICENSE = "GPL-3.0-only"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+SRC_URI = "${GNU_MIRROR}/groff/groff-${PV}.tar.gz \
+ file://groff-not-search-fonts-on-build-host.patch \
+ file://0001-Make-manpages-mulitlib-identical.patch \
+ file://0001-build-Fix-Savannah-64681-webpage.ps-deps.patch \
+ file://0001-build-meintro_fr.ps-depends-on-tbl.patch \
+ "
+
+SRC_URI[sha256sum] = "6b9757f592b7518b4902eb6af7e54570bdccba37a871fddb2d30ae3863511c13"
+
+DEPENDS = "bison-native groff-native"
+RDEPENDS:${PN} += "perl sed"
+
+inherit autotools-brokensep texinfo multilib_script pkgconfig
+
+MULTILIB_SCRIPTS = "${PN}:${bindir}/gpinyin ${PN}:${bindir}/grog"
+
+EXTRA_OECONF = "--without-x --with-urw-fonts-dir=/completely/bogus/dir/"
+EXTRA_OEMAKE:class-target = "GROFFBIN=groff GROFF_BIN_PATH=${STAGING_BINDIR_NATIVE}"
+
+CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl' ac_cv_path_BASH_PROG='no' PAGE=A4"
+
+# Delete these generated files since we depend on bison-native
+# and regenerate them. Do it deterministically (always).
+do_configure:prepend() {
+ rm -f ${S}/src/preproc/eqn/eqn.cpp
+ rm -f ${S}/src/preproc/eqn/eqn.hpp
+}
+
+do_install:append() {
+ # Some distros have both /bin/perl and /usr/bin/perl, but we set perl location
+ # for target as /usr/bin/perl, so fix it to /usr/bin/perl.
+ for i in afmtodit mmroff gropdf pdfmom grog; do
+ if [ -f ${D}${bindir}/$i ]; then
+ sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' ${D}${bindir}/$i
+ fi
+ done
+ if [ -e ${D}${libdir}/charset.alias ]; then
+ rm -rf ${D}${libdir}/charset.alias
+ fi
+
+ # awk is located at /usr/bin/, not /bin/
+ SPECIAL_AWK=`find ${D} -name special.awk`
+ if [ -f ${SPECIAL_AWK} ]; then
+ sed -i -e 's:#!.*awk:#! ${USRBINPATH}/awk:' ${SPECIAL_AWK}
+ fi
+
+ # not ship /usr/bin/glilypond and its releated files in embedded target system
+ rm -rf ${D}${bindir}/glilypond
+ rm -rf ${D}${libdir}/groff/glilypond
+ rm -rf ${D}${mandir}/man1/glilypond*
+
+ # not ship /usr/bin/grap2graph and its releated man files
+ rm -rf ${D}${bindir}/grap2graph
+ rm -rf ${D}${mandir}/man1/grap2graph*
+
+ # strip hosttool path out of generated files
+ sed -i -e 's:${HOSTTOOLS_DIR}/::g' ${D}${docdir}/${BP}/examples/hdtbl/*.roff
+}
+
+do_install:append:class-native() {
+ create_cmdline_wrapper ${D}/${bindir}/groff \
+ -F${STAGING_DIR_NATIVE}${datadir_native}/groff/${PV}/font \
+ -M${STAGING_DIR_NATIVE}${datadir_native}/groff/${PV}/tmac
+}
+
+FILES:${PN} += "${libdir}/${BPN}/site-tmac \
+ ${libdir}/${BPN}/groffer/"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/gzip/gzip-1.12/wrong-path-fix.patch b/meta/recipes-extended/gzip/gzip-1.13/wrong-path-fix.patch
index 7f9e249de8..7f9e249de8 100644
--- a/meta/recipes-extended/gzip/gzip-1.12/wrong-path-fix.patch
+++ b/meta/recipes-extended/gzip/gzip-1.13/wrong-path-fix.patch
diff --git a/meta/recipes-extended/gzip/gzip.inc b/meta/recipes-extended/gzip/gzip.inc
index b32584033b..62b3e2f4f9 100644
--- a/meta/recipes-extended/gzip/gzip.inc
+++ b/meta/recipes-extended/gzip/gzip.inc
@@ -8,6 +8,7 @@ inherit autotools texinfo
export DEFS="NO_ASM"
EXTRA_OEMAKE:class-target = "GREP=${base_bindir}/grep"
+EXTRA_OEMAKE:append:class-nativesdk = " GREP=grep"
EXTRA_OECONF:append:libc-musl = " gl_cv_func_fflush_stdin=yes "
do_install:append () {
diff --git a/meta/recipes-extended/gzip/gzip_1.12.bb b/meta/recipes-extended/gzip/gzip_1.12.bb
deleted file mode 100644
index 14de50b230..0000000000
--- a/meta/recipes-extended/gzip/gzip_1.12.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-require gzip.inc
-
-# change to GPL-3.0-or-later in 2007/07. Previous GPL-2.0-or-later version is
-# 1.3.12
-LICENSE = "GPL-3.0-or-later"
-
-SRC_URI = "${GNU_MIRROR}/gzip/${BP}.tar.gz \
- file://run-ptest \
- "
-SRC_URI:append:class-target = " file://wrong-path-fix.patch"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://gzip.h;beginline=8;endline=20;md5=6e47caaa630e0c8bf9f1bc8d94a8ed0e"
-
-PROVIDES:append:class-native = " gzip-replacement-native"
-
-RDEPENDS:${PN}-ptest += "make perl grep diffutils"
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit ptest
-
-do_install_ptest() {
- mkdir -p ${D}${PTEST_PATH}/src/build-aux
- cp ${S}/build-aux/test-driver ${D}${PTEST_PATH}/src/build-aux/
- mkdir -p ${D}${PTEST_PATH}/src/tests
- cp -r ${S}/tests/* ${D}${PTEST_PATH}/src/tests
- sed -e 's/^abs_srcdir = ..*/abs_srcdir = \.\./' \
- -e 's/^top_srcdir = ..*/top_srcdir = \.\./' \
- -e 's/^GREP = ..*/GREP = grep/' \
- -e 's/^AWK = ..*/AWK = awk/' \
- -e 's/^srcdir = ..*/srcdir = \./' \
- -e 's/^Makefile: ..*/Makefile: /' \
- -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- ${B}/tests/Makefile > ${D}${PTEST_PATH}/src/tests/Makefile
-}
-
-SRC_URI[sha256sum] = "5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085"
diff --git a/meta/recipes-extended/gzip/gzip_1.13.bb b/meta/recipes-extended/gzip/gzip_1.13.bb
new file mode 100644
index 0000000000..fd846b30a5
--- /dev/null
+++ b/meta/recipes-extended/gzip/gzip_1.13.bb
@@ -0,0 +1,41 @@
+require gzip.inc
+
+# change to GPL-3.0-or-later in 2007/07. Previous GPL-2.0-or-later version is
+# 1.3.12
+LICENSE = "GPL-3.0-or-later"
+
+SRC_URI = "${GNU_MIRROR}/gzip/${BP}.tar.gz \
+ file://run-ptest \
+ "
+SRC_URI:append:class-target = " file://wrong-path-fix.patch"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://gzip.h;beginline=8;endline=20;md5=6e47caaa630e0c8bf9f1bc8d94a8ed0e"
+
+PROVIDES:append:class-native = " gzip-replacement-native"
+
+RDEPENDS:${PN}-ptest += "make perl grep diffutils"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit ptest
+
+do_install_ptest() {
+ mkdir -p ${D}${PTEST_PATH}/src/build-aux
+ cp ${S}/build-aux/test-driver ${D}${PTEST_PATH}/src/build-aux/
+ mkdir -p ${D}${PTEST_PATH}/src/tests
+ cp -r ${S}/tests/* ${D}${PTEST_PATH}/src/tests
+ sed -e 's/^abs_srcdir = ..*/abs_srcdir = \.\./' \
+ -e 's/^top_srcdir = ..*/top_srcdir = \.\./' \
+ -e 's/^GREP = ..*/GREP = grep/' \
+ -e 's/^AWK = ..*/AWK = awk/' \
+ -e 's/^srcdir = ..*/srcdir = \./' \
+ -e 's/^Makefile: ..*/Makefile: /' \
+ -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ ${B}/tests/Makefile > ${D}${PTEST_PATH}/src/tests/Makefile
+}
+
+SRC_URI[sha256sum] = "20fc818aeebae87cdbf209d35141ad9d3cf312b35a5e6be61bfcfbf9eddd212a"
diff --git a/meta/recipes-extended/hdparm/hdparm_9.63.bb b/meta/recipes-extended/hdparm/hdparm_9.63.bb
deleted file mode 100644
index 7900a310a3..0000000000
--- a/meta/recipes-extended/hdparm/hdparm_9.63.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "Utility for viewing/manipulating IDE disk drive/driver parameters"
-HOMEPAGE = "http://sourceforge.net/projects/hdparm/"
-DESCRIPTION = "hdparm is a Linux shell utility for viewing \
-and manipulating various IDE drive and driver parameters."
-SECTION = "console/utils"
-
-LICENSE = "BSD-2-Clause & GPL-2.0-only & hdparm"
-LICENSE:${PN} = "BSD-2-Clause & hdparm"
-LICENSE:${PN}-dbg = "BSD-2-Clause & hdparm"
-LICENSE:wiper = "GPL-2.0-only"
-NO_GENERIC_LICENSE[hdparm] = "LICENSE.TXT"
-
-LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=495d03e50dc6c89d6a30107ab0df5b03 \
- file://debian/copyright;md5=a82d7ba3ade9e8ec902749db98c592f3 \
- file://wiper/GPLv2.txt;md5=fcb02dc552a041dee27e4b85c7396067 \
- file://wiper/wiper.sh;beginline=7;endline=31;md5=b7bc642addc152ea307505bf1a296f09"
-
-
-PACKAGES =+ "wiper"
-
-FILES:wiper = "${bindir}/wiper.sh"
-
-RDEPENDS:wiper = "bash gawk coreutils"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/hdparm/${BP}.tar.gz \
- file://wiper.sh-fix-stat-path.patch \
- "
-
-SRC_URI[sha256sum] = "70785deaebba5877a89c123568b41dee990da55fc51420f13f609a1072899691"
-
-EXTRA_OEMAKE = 'STRIP="echo" LDFLAGS="${LDFLAGS}"'
-
-inherit update-alternatives
-
-ALTERNATIVE:${PN} = "hdparm"
-ALTERNATIVE_LINK_NAME[hdparm] = "${base_sbindir}/hdparm"
-ALTERNATIVE_PRIORITY = "100"
-
-do_install () {
- install -d ${D}/${base_sbindir} ${D}/${mandir}/man8 ${D}/${bindir}
- oe_runmake 'DESTDIR=${D}' 'sbindir=${base_sbindir}' install
- cp ${S}/wiper/wiper.sh ${D}/${bindir}
-}
diff --git a/meta/recipes-extended/hdparm/hdparm_9.65.bb b/meta/recipes-extended/hdparm/hdparm_9.65.bb
new file mode 100644
index 0000000000..d5e6ab6065
--- /dev/null
+++ b/meta/recipes-extended/hdparm/hdparm_9.65.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Utility for viewing/manipulating IDE disk drive/driver parameters"
+HOMEPAGE = "http://sourceforge.net/projects/hdparm/"
+DESCRIPTION = "hdparm is a Linux shell utility for viewing \
+and manipulating various IDE drive and driver parameters."
+SECTION = "console/utils"
+
+LICENSE = "BSD-2-Clause & GPL-2.0-only & hdparm"
+LICENSE:${PN} = "BSD-2-Clause & hdparm"
+LICENSE:${PN}-dbg = "BSD-2-Clause & hdparm"
+LICENSE:wiper = "GPL-2.0-only"
+NO_GENERIC_LICENSE[hdparm] = "LICENSE.TXT"
+
+LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=495d03e50dc6c89d6a30107ab0df5b03 \
+ file://debian/copyright;md5=a82d7ba3ade9e8ec902749db98c592f3 \
+ file://wiper/GPLv2.txt;md5=fcb02dc552a041dee27e4b85c7396067 \
+ file://wiper/wiper.sh;beginline=7;endline=31;md5=b7bc642addc152ea307505bf1a296f09"
+
+
+PACKAGES =+ "wiper"
+
+FILES:wiper = "${bindir}/wiper.sh"
+
+RDEPENDS:wiper = "bash gawk coreutils"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/hdparm/${BP}.tar.gz \
+ file://wiper.sh-fix-stat-path.patch \
+ "
+
+SRC_URI[sha256sum] = "d14929f910d060932e717e9382425d47c2e7144235a53713d55a94f7de535a4b"
+
+EXTRA_OEMAKE = 'STRIP="echo" LDFLAGS="${LDFLAGS}"'
+
+inherit update-alternatives
+
+ALTERNATIVE:${PN} = "hdparm"
+ALTERNATIVE_LINK_NAME[hdparm] = "${base_sbindir}/hdparm"
+ALTERNATIVE_PRIORITY = "100"
+
+do_install () {
+ install -d ${D}/${base_sbindir} ${D}/${mandir}/man8 ${D}/${bindir}
+ oe_runmake 'DESTDIR=${D}' 'sbindir=${base_sbindir}' install
+ cp ${S}/wiper/wiper.sh ${D}/${bindir}
+}
diff --git a/meta/recipes-extended/images/core-image-full-cmdline.bb b/meta/recipes-extended/images/core-image-full-cmdline.bb
index 4d69073d9d..b034cd0aeb 100644
--- a/meta/recipes-extended/images/core-image-full-cmdline.bb
+++ b/meta/recipes-extended/images/core-image-full-cmdline.bb
@@ -1,7 +1,7 @@
-DESCRIPTION = "A console-only image with more full-featured Linux system \
+SUMMARY = "A console-only image with more full-featured Linux system \
functionality installed."
-IMAGE_FEATURES += "splash ssh-server-openssh"
+IMAGE_FEATURES += "splash ssh-server-openssh package-management"
IMAGE_INSTALL = "\
packagegroup-core-boot \
diff --git a/meta/recipes-extended/images/core-image-testcontroller-initramfs.bb b/meta/recipes-extended/images/core-image-testcontroller-initramfs.bb
index 2bc035a538..3b81afd7c0 100644
--- a/meta/recipes-extended/images/core-image-testcontroller-initramfs.bb
+++ b/meta/recipes-extended/images/core-image-testcontroller-initramfs.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "Small image capable of booting a device with custom install scripts, \
+SUMMARY = "Small image capable of booting a device with custom install scripts, \
adding a second rootfs, used for testing."
# use -testfs live-install scripts
@@ -7,7 +7,6 @@ PACKAGE_INSTALL = "initramfs-live-boot initramfs-live-install-testfs initramfs-l
# Do not pollute the initrd image with rootfs features
IMAGE_FEATURES = ""
-export IMAGE_BASENAME = "core-image-testcontroller-initramfs"
IMAGE_NAME_SUFFIX ?= ""
IMAGE_LINGUAS = ""
diff --git a/meta/recipes-extended/images/core-image-testcontroller.bb b/meta/recipes-extended/images/core-image-testcontroller.bb
index 20edbd5630..ff79bde0c5 100644
--- a/meta/recipes-extended/images/core-image-testcontroller.bb
+++ b/meta/recipes-extended/images/core-image-testcontroller.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "A test controller image to be deployed on a target useful for testing other images using the OEQA runtime tests"
+SUMMARY = "A test controller image to be deployed on a target useful for testing other images using the OEQA runtime tests"
IMAGE_FEATURES += "ssh-server-openssh package-management"
diff --git a/meta/recipes-extended/iptables/iptables/0001-Makefile.am-do-not-install-etc-ethertypes.patch b/meta/recipes-extended/iptables/iptables/0001-Makefile.am-do-not-install-etc-ethertypes.patch
deleted file mode 100644
index aa51265822..0000000000
--- a/meta/recipes-extended/iptables/iptables/0001-Makefile.am-do-not-install-etc-ethertypes.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From a4ed9fc8da720585f853d2ca6ffd30e2fa4d1247 Mon Sep 17 00:00:00 2001
-From: Trevor Gamblin <trevor.gamblin@windriver.com>
-Date: Wed, 9 Mar 2022 12:50:39 -0500
-Subject: [PATCH] Makefile.am: do not install /etc/ethertypes
-
-The /etc/ethertypes is provided by netbase since 6.0[1].
-Do not instal the file in ebtables, otherwise there would be a conflict:
-Error: Transaction test error:
- file /etc/ethertypes conflicts between attempted installs of
-netbase-1:6.2-r0.corei7_64 and iptables-1.8.7-r0.corei7_64
-
-[1]
-https://salsa.debian.org/md/netbase/-/commit/316680c6a2c3641b6abc76b3eebf88781f609d35)
-
-This patch is based off of the same change made for the ebtables recipe:
-
-http://cgit.openembedded.org/meta-openembedded/tree/meta-networking/recipes-filter/ebtables/ebtables-2.0.11/0001-Makefile.am-do-not-install-etc-ethertypes.patch?h=master
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Trevor Gamblin <trevor.gamblin@windriver.com>
----
- Makefile.am | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index 799bf8b8..2eb1843f 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -18,7 +18,6 @@ SUBDIRS += iptables
-
- if ENABLE_NFTABLES
- confdir = $(sysconfdir)
--dist_conf_DATA = etc/ethertypes
- endif
-
- .PHONY: tarball
---
-2.35.1
-
diff --git a/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch b/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch
index 03c36ccbc2..8824bf2af7 100644
--- a/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch
+++ b/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch
@@ -1,4 +1,4 @@
-From c46db7c2e1f63ec525835553587e70c635565310 Mon Sep 17 00:00:00 2001
+From 0096c854d5015918ed154dccb3ad472fd06c1010 Mon Sep 17 00:00:00 2001
From: "Maxin B. John" <maxin.john@intel.com>
Date: Tue, 21 Feb 2017 11:16:31 +0200
Subject: [PATCH] configure: Add option to enable/disable libnfnetlink
@@ -10,12 +10,13 @@ Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Maxin B. John <maxin.john@intel.com>
+
---
configure.ac | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/configure.ac b/configure.ac
-index eda7871..03ddc50 100644
+index d99fa3b..d607772 100644
--- a/configure.ac
+++ b/configure.ac
@@ -63,6 +63,9 @@ AC_ARG_WITH([pkgconfigdir], AS_HELP_STRING([--with-pkgconfigdir=PATH],
@@ -28,9 +29,9 @@ index eda7871..03ddc50 100644
AC_ARG_ENABLE([connlabel],
AS_HELP_STRING([--disable-connlabel],
[Do not build libnetfilter_conntrack]),
-@@ -115,9 +118,10 @@ if test "x$enable_bpfc" = "xyes" || test "x$enable_nfsynproxy" = "xyes"; then
- AC_CHECK_LIB(pcap, pcap_compile,, AC_MSG_ERROR(missing libpcap library required by bpf compiler or nfsynproxy tool))
- fi
+@@ -113,9 +116,10 @@ AM_CONDITIONAL([ENABLE_SYNCONF], [test "$enable_nfsynproxy" = "yes"])
+ AM_CONDITIONAL([ENABLE_NFTABLES], [test "$enable_nftables" = "yes"])
+ AM_CONDITIONAL([ENABLE_CONNLABEL], [test "$enable_connlabel" = "yes"])
-PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0],
- [nfnetlink=1], [nfnetlink=0])
@@ -40,8 +41,5 @@ index eda7871..03ddc50 100644
+ ])
+AM_CONDITIONAL([HAVE_LIBNFNETLINK], [test "x$enable_libnfnetlink" = "xyes"])
- if test "x$enable_nftables" = "xyes"; then
- PKG_CHECK_MODULES([libmnl], [libmnl >= 1.0], [mnl=1], [mnl=0])
---
-2.4.0
-
+ if test "x$enable_bpfc" = "xyes" || test "x$enable_nfsynproxy" = "xyes"; then
+ PKG_CHECK_MODULES([libpcap], [libpcap], [], [
diff --git a/meta/recipes-extended/iptables/iptables/0001-iptables-xshared.h-add-missing-sys.types.h-include.patch b/meta/recipes-extended/iptables/iptables/0001-iptables-xshared.h-add-missing-sys.types.h-include.patch
deleted file mode 100644
index 17dd032434..0000000000
--- a/meta/recipes-extended/iptables/iptables/0001-iptables-xshared.h-add-missing-sys.types.h-include.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From 796b8f6fc1e584c27c42ba302f623fd1c5aa0667 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 17 May 2022 10:56:59 +0200
-Subject: [PATCH] iptables/xshared.h: add missing sys.types.h include
-
-This resolves the build error under musl:
-
-| ../../../../../../../workspace/sources/iptables/iptables/xshared.h:83:56: error: unknown type name 'u_int16_t'; did you mean 'uint16_t'?
-| 83 | set_option(unsigned int *options, unsigned int option, u_int16_t *invflg,
-| | ^~~~~~~~~
-| | uint16_t
-
-Upstream-Status: Submitted [via email to phil@nwl.cc]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- iptables/xshared.h | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/iptables/xshared.h b/iptables/xshared.h
-index 14568bb..73b1017 100644
---- a/iptables/xshared.h
-+++ b/iptables/xshared.h
-@@ -6,6 +6,7 @@
- #include <stdint.h>
- #include <netinet/in.h>
- #include <net/if.h>
-+#include <sys/types.h>
- #include <linux/netfilter_arp/arp_tables.h>
- #include <linux/netfilter_ipv4/ip_tables.h>
- #include <linux/netfilter_ipv6/ip6_tables.h>
diff --git a/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch b/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch
deleted file mode 100644
index 7842c6408a..0000000000
--- a/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch
+++ /dev/null
@@ -1,51 +0,0 @@
-From 26090b3dbcdf6a11e60535da949b726a6e86426d Mon Sep 17 00:00:00 2001
-From: "Maxin B. John" <maxin.john@intel.com>
-Date: Tue, 21 Feb 2017 11:49:07 +0200
-Subject: [PATCH] configure.ac:
- only-check-conntrack-when-libnfnetlink-enabled.patch
-
-Package libnetfilter-conntrack depends on package libnfnetlink. iptables
-checks package libnetfilter-conntrack whatever its package config
-libnfnetlink is enabled or not. When libnfnetlink is disabled but
-package libnetfilter-conntrack exists, it fails randomly with:
-
-In file included from
-.../iptables/1.4.21-r0/iptables-1.4.21/extensions/libxt_connlabel.c:8:0:
-
-.../tmp/sysroots/qemumips/usr/include/libnetfilter_conntrack/libnetfilter_conntrack.h:14:42:
-fatal error: libnfnetlink/linux_nfnetlink.h: No such file or directory
-
-compilation terminated.
-GNUmakefile:96: recipe for target 'libxt_connlabel.oo' failed
-Only check libnetfilter-conntrack when libnfnetlink is enabled to fix it.
-
-Upstream-Status: Pending
-
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-Signed-off-by: Maxin B. John <maxin.john@intel.com>
----
- configure.ac | 6 ++++--
- 1 file changed, 4 insertions(+), 2 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 03ddc50..523caea 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -172,10 +172,12 @@ if test "$nftables" != 1; then
- fi
-
- if test "x$enable_connlabel" = "xyes"; then
-- PKG_CHECK_MODULES([libnetfilter_conntrack],
-+ nfconntrack=0
-+ AS_IF([test "x$enable_libnfnetlink" = "xyes"], [
-+ PKG_CHECK_MODULES([libnetfilter_conntrack],
- [libnetfilter_conntrack >= 1.0.6],
- [nfconntrack=1], [nfconntrack=0])
--
-+ ])
- if test "$nfconntrack" -ne 1; then
- blacklist_modules="$blacklist_modules connlabel";
- echo "WARNING: libnetfilter_conntrack not found, connlabel match will not be built";
---
-2.4.0
-
diff --git a/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch b/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch
new file mode 100644
index 0000000000..a190c7e8ae
--- /dev/null
+++ b/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch
@@ -0,0 +1,31 @@
+From 465e3ef77f1763d225adc76220e43ee9bd73b178 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Tue, 17 May 2022 10:56:59 +0200
+Subject: [PATCH] iptables/xshared.h: add missing sys.types.h include
+
+This resolves the build error under musl:
+
+| ../../../../../../../workspace/sources/iptables/iptables/xshared.h:83:56: error: unknown type name 'u_int16_t'; did you mean 'uint16_t'?
+| 83 | set_option(unsigned int *options, unsigned int option, u_int16_t *invflg,
+| | ^~~~~~~~~
+| | uint16_t
+
+Upstream-Status: Submitted [via email to phil@nwl.cc]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
+---
+ iptables/xshared.h | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/iptables/xshared.h b/iptables/xshared.h
+index a200e0d..f543dbf 100644
+--- a/iptables/xshared.h
++++ b/iptables/xshared.h
+@@ -6,6 +6,7 @@
+ #include <stdint.h>
+ #include <netinet/in.h>
+ #include <net/if.h>
++#include <sys/types.h>
+ #include <linux/netfilter_arp/arp_tables.h>
+ #include <linux/netfilter_ipv4/ip_tables.h>
+ #include <linux/netfilter_ipv6/ip6_tables.h>
diff --git a/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch b/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch
new file mode 100644
index 0000000000..5a022ebc8c
--- /dev/null
+++ b/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch
@@ -0,0 +1,49 @@
+From 6832501bbb90a3dab977a4625d0391804c0e795c Mon Sep 17 00:00:00 2001
+From: "Maxin B. John" <maxin.john@intel.com>
+Date: Tue, 21 Feb 2017 11:49:07 +0200
+Subject: [PATCH] configure.ac:
+ only-check-conntrack-when-libnfnetlink-enabled.patch
+
+Package libnetfilter-conntrack depends on package libnfnetlink. iptables
+checks package libnetfilter-conntrack whatever its package config
+libnfnetlink is enabled or not. When libnfnetlink is disabled but
+package libnetfilter-conntrack exists, it fails randomly with:
+
+In file included from
+.../iptables/1.4.21-r0/iptables-1.4.21/extensions/libxt_connlabel.c:8:0:
+
+.../tmp/sysroots/qemumips/usr/include/libnetfilter_conntrack/libnetfilter_conntrack.h:14:42:
+fatal error: libnfnetlink/linux_nfnetlink.h: No such file or directory
+
+compilation terminated.
+GNUmakefile:96: recipe for target 'libxt_connlabel.oo' failed
+Only check libnetfilter-conntrack when libnfnetlink is enabled to fix it.
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+Signed-off-by: Maxin B. John <maxin.john@intel.com>
+
+---
+ configure.ac | 6 ++++--
+ 1 file changed, 4 insertions(+), 2 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index d607772..25a8e75 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -159,10 +159,12 @@ if test "$nftables" != 1; then
+ fi
+
+ if test "x$enable_connlabel" = "xyes"; then
+- PKG_CHECK_MODULES([libnetfilter_conntrack],
++ nfconntrack=0
++ AS_IF([test "x$enable_libnfnetlink" = "xyes"], [
++ PKG_CHECK_MODULES([libnetfilter_conntrack],
+ [libnetfilter_conntrack >= 1.0.6],
+ [nfconntrack=1], [nfconntrack=0])
+-
++ ])
+ if test "$nfconntrack" -ne 1; then
+ blacklist_modules="$blacklist_modules connlabel";
+ echo "WARNING: libnetfilter_conntrack not found, connlabel match will not be built";
diff --git a/meta/recipes-extended/iptables/iptables/format-security.patch b/meta/recipes-extended/iptables/iptables/format-security.patch
deleted file mode 100644
index be1e077b49..0000000000
--- a/meta/recipes-extended/iptables/iptables/format-security.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From b72eb12ea5a61df0655ad99d5048994e916be83a Mon Sep 17 00:00:00 2001
-From: Phil Sutter <phil@nwl.cc>
-Date: Fri, 13 May 2022 16:51:58 +0200
-Subject: xshared: Fix build for -Werror=format-security
-
-Gcc complains about the omitted format string.
-
-Signed-off-by: Phil Sutter <phil@nwl.cc>
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- iptables/xshared.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/iptables/xshared.c b/iptables/xshared.c
-index fae5ddd5..a8512d38 100644
---- a/iptables/xshared.c
-+++ b/iptables/xshared.c
-@@ -1307,7 +1307,7 @@ static void check_empty_interface(struct xtables_args *args, const char *arg)
- return;
-
- if (args->family != NFPROTO_ARP)
-- xtables_error(PARAMETER_PROBLEM, msg);
-+ xtables_error(PARAMETER_PROBLEM, "%s", msg);
-
- fprintf(stderr, "%s", msg);
- }
---
-cgit v1.2.3
-
diff --git a/meta/recipes-extended/iptables/iptables_1.8.10.bb b/meta/recipes-extended/iptables/iptables_1.8.10.bb
new file mode 100644
index 0000000000..cd2f3bce0b
--- /dev/null
+++ b/meta/recipes-extended/iptables/iptables_1.8.10.bb
@@ -0,0 +1,123 @@
+SUMMARY = "Tools for managing kernel packet filtering capabilities"
+DESCRIPTION = "iptables is the userspace command line program used to configure and control network packet \
+filtering code in Linux."
+HOMEPAGE = "http://www.netfilter.org/"
+BUGTRACKER = "http://bugzilla.netfilter.org/"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://iptables/iptables.c;beginline=13;endline=25;md5=c5cffd09974558cf27d0f763df2a12dc \
+"
+
+SRC_URI = "http://netfilter.org/projects/iptables/files/iptables-${PV}.tar.xz \
+ file://iptables.service \
+ file://iptables.rules \
+ file://ip6tables.service \
+ file://ip6tables.rules \
+ file://0001-configure-Add-option-to-enable-disable-libnfnetlink.patch \
+ file://0002-iptables-xshared.h-add-missing-sys.types.h-include.patch \
+ file://0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch \
+ "
+SRC_URI[sha256sum] = "5cc255c189356e317d070755ce9371eb63a1b783c34498fb8c30264f3cc59c9c"
+
+SYSTEMD_SERVICE:${PN} = "\
+ iptables.service \
+ ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', 'ip6tables.service', '', d)} \
+"
+
+inherit autotools pkgconfig systemd
+
+EXTRA_OECONF = "--with-kernel=${STAGING_INCDIR}"
+
+CFLAGS:append:libc-musl = " -D__UAPI_DEF_ETHHDR=0"
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+
+# libnfnetlink recipe is in meta-networking layer
+PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink,--disable-libnfnetlink,libnfnetlink libnetfilter-conntrack"
+
+# libnftnl recipe is in meta-networking layer(previously known as libnftables)
+PACKAGECONFIG[libnftnl] = "--enable-nftables,--disable-nftables,libnftnl"
+
+do_configure:prepend() {
+ # Remove some libtool m4 files
+ # Keep ax_check_linker_flags.m4 which belongs to autoconf-archive.
+ rm -f libtool.m4 lt~obsolete.m4 ltoptions.m4 ltsugar.m4 ltversion.m4
+
+ # Copy a header to fix out of tree builds
+ cp -f ${S}/libiptc/linux_list.h ${S}/include/libiptc/
+}
+
+IPTABLES_RULES_DIR ?= "${sysconfdir}/${BPN}"
+
+do_install:append() {
+ install -d ${D}${IPTABLES_RULES_DIR}
+ install -m 0644 ${WORKDIR}/iptables.rules ${D}${IPTABLES_RULES_DIR}
+
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/iptables.service ${D}${systemd_system_unitdir}
+
+ sed -i \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@RULESDIR@,${IPTABLES_RULES_DIR},g' \
+ ${D}${systemd_system_unitdir}/iptables.service
+
+ if ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', 'true', 'false', d)} ; then
+ install -m 0644 ${WORKDIR}/ip6tables.rules ${D}${IPTABLES_RULES_DIR}
+ install -m 0644 ${WORKDIR}/ip6tables.service ${D}${systemd_system_unitdir}
+
+ sed -i \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@RULESDIR@,${IPTABLES_RULES_DIR},g' \
+ ${D}${systemd_system_unitdir}/ip6tables.service
+ fi
+
+ # if libnftnl is included, make the iptables symlink point to the nft-based binary by default
+ if ${@bb.utils.contains('PACKAGECONFIG', 'libnftnl', 'true', 'false', d)} ; then
+ ln -sf ${sbindir}/xtables-nft-multi ${D}${sbindir}/iptables
+ fi
+}
+
+PACKAGES =+ "${PN}-modules ${PN}-apply"
+PACKAGES_DYNAMIC += "^${PN}-module-.*"
+
+python populate_packages:prepend() {
+ modules = do_split_packages(d, '${libdir}/xtables', r'lib(.*)\.so$', '${PN}-module-%s', '${PN} module %s', extra_depends='')
+ if modules:
+ metapkg = d.getVar('PN') + '-modules'
+ d.appendVar('RDEPENDS:' + metapkg, ' ' + ' '.join(modules))
+}
+
+RDEPENDS:${PN} = "${PN}-module-xt-standard"
+RRECOMMENDS:${PN} = " \
+ ${PN}-modules \
+ kernel-module-x-tables \
+ kernel-module-ip-tables \
+ kernel-module-iptable-filter \
+ kernel-module-iptable-nat \
+ kernel-module-nf-defrag-ipv4 \
+ kernel-module-nf-conntrack \
+ kernel-module-nf-conntrack-ipv4 \
+ kernel-module-nf-nat \
+ kernel-module-ipt-masquerade \
+ ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', '\
+ kernel-module-ip6table-filter \
+ kernel-module-ip6-tables \
+ ', '', d)} \
+"
+
+FILES:${PN} += "${datadir}/xtables"
+
+FILES:${PN}-apply = "${sbindir}/ip*-apply"
+RDEPENDS:${PN}-apply = "${PN} bash"
+
+# Include the symlinks as well in respective packages
+FILES:${PN}-module-xt-conntrack += "${libdir}/xtables/libxt_state.so"
+FILES:${PN}-module-xt-ct += "${libdir}/xtables/libxt_NOTRACK.so ${libdir}/xtables/libxt_REDIRECT.so"
+FILES:${PN}-module-xt-nat += "${libdir}/xtables/libxt_SNAT.so ${libdir}/xtables/libxt_DNAT.so ${libdir}/xtables/libxt_MASQUERADE.so"
+
+ALLOW_EMPTY:${PN}-modules = "1"
+
+INSANE_SKIP:${PN}-module-xt-conntrack = "dev-so"
+INSANE_SKIP:${PN}-module-xt-ct = "dev-so"
+INSANE_SKIP:${PN}-module-xt-nat = "dev-so"
diff --git a/meta/recipes-extended/iptables/iptables_1.8.8.bb b/meta/recipes-extended/iptables/iptables_1.8.8.bb
deleted file mode 100644
index 54d027220b..0000000000
--- a/meta/recipes-extended/iptables/iptables_1.8.8.bb
+++ /dev/null
@@ -1,123 +0,0 @@
-SUMMARY = "Tools for managing kernel packet filtering capabilities"
-DESCRIPTION = "iptables is the userspace command line program used to configure and control network packet \
-filtering code in Linux."
-HOMEPAGE = "http://www.netfilter.org/"
-BUGTRACKER = "http://bugzilla.netfilter.org/"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://iptables/iptables.c;beginline=13;endline=25;md5=c5cffd09974558cf27d0f763df2a12dc \
-"
-
-SRC_URI = "http://netfilter.org/projects/iptables/files/iptables-${PV}.tar.bz2 \
- file://0001-configure-Add-option-to-enable-disable-libnfnetlink.patch \
- file://0001-Makefile.am-do-not-install-etc-ethertypes.patch \
- file://0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch \
- file://format-security.patch \
- file://iptables.service \
- file://iptables.rules \
- file://ip6tables.service \
- file://ip6tables.rules \
- file://0001-iptables-xshared.h-add-missing-sys.types.h-include.patch \
- "
-SRC_URI[sha256sum] = "71c75889dc710676631553eb1511da0177bbaaf1b551265b912d236c3f51859f"
-
-SYSTEMD_SERVICE:${PN} = "\
- iptables.service \
- ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', 'ip6tables.service', '', d)} \
-"
-
-inherit autotools pkgconfig systemd
-
-EXTRA_OECONF = "--with-kernel=${STAGING_INCDIR}"
-
-CFLAGS:append:libc-musl = " -D__UAPI_DEF_ETHHDR=0"
-
-PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-
-# libnfnetlink recipe is in meta-networking layer
-PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink,--disable-libnfnetlink,libnfnetlink libnetfilter-conntrack"
-
-# libnftnl recipe is in meta-networking layer(previously known as libnftables)
-PACKAGECONFIG[libnftnl] = "--enable-nftables,--disable-nftables,libnftnl"
-
-do_configure:prepend() {
- # Remove some libtool m4 files
- # Keep ax_check_linker_flags.m4 which belongs to autoconf-archive.
- rm -f libtool.m4 lt~obsolete.m4 ltoptions.m4 ltsugar.m4 ltversion.m4
-
- # Copy a header to fix out of tree builds
- cp -f ${S}/libiptc/linux_list.h ${S}/include/libiptc/
-}
-
-IPTABLES_RULES_DIR ?= "${sysconfdir}/${BPN}"
-
-do_install:append() {
- install -d ${D}${IPTABLES_RULES_DIR}
- install -m 0644 ${WORKDIR}/iptables.rules ${D}${IPTABLES_RULES_DIR}
-
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/iptables.service ${D}${systemd_system_unitdir}
-
- sed -i \
- -e 's,@SBINDIR@,${sbindir},g' \
- -e 's,@RULESDIR@,${IPTABLES_RULES_DIR},g' \
- ${D}${systemd_system_unitdir}/iptables.service
-
- if ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', 'true', 'false', d)} ; then
- install -m 0644 ${WORKDIR}/ip6tables.rules ${D}${IPTABLES_RULES_DIR}
- install -m 0644 ${WORKDIR}/ip6tables.service ${D}${systemd_system_unitdir}
-
- sed -i \
- -e 's,@SBINDIR@,${sbindir},g' \
- -e 's,@RULESDIR@,${IPTABLES_RULES_DIR},g' \
- ${D}${systemd_system_unitdir}/ip6tables.service
- fi
-
- # if libnftnl is included, make the iptables symlink point to the nft-based binary by default
- if ${@bb.utils.contains('PACKAGECONFIG', 'libnftnl', 'true', 'false', d)} ; then
- ln -sf ${sbindir}/xtables-nft-multi ${D}${sbindir}/iptables
- fi
-}
-
-PACKAGES =+ "${PN}-modules ${PN}-apply"
-PACKAGES_DYNAMIC += "^${PN}-module-.*"
-
-python populate_packages:prepend() {
- modules = do_split_packages(d, '${libdir}/xtables', r'lib(.*)\.so$', '${PN}-module-%s', '${PN} module %s', extra_depends='')
- if modules:
- metapkg = d.getVar('PN') + '-modules'
- d.appendVar('RDEPENDS:' + metapkg, ' ' + ' '.join(modules))
-}
-
-RDEPENDS:${PN} = "${PN}-module-xt-standard"
-RRECOMMENDS:${PN} = " \
- ${PN}-modules \
- kernel-module-x-tables \
- kernel-module-ip-tables \
- kernel-module-iptable-filter \
- kernel-module-iptable-nat \
- kernel-module-nf-defrag-ipv4 \
- kernel-module-nf-conntrack \
- kernel-module-nf-conntrack-ipv4 \
- kernel-module-nf-nat \
- kernel-module-ipt-masquerade \
- ${@bb.utils.contains('PACKAGECONFIG', 'ipv6', '\
- kernel-module-ip6table-filter \
- kernel-module-ip6-tables \
- ', '', d)} \
-"
-
-FILES:${PN} += "${datadir}/xtables"
-
-FILES:${PN}-apply = "${sbindir}/ip*-apply"
-RDEPENDS:${PN}-apply = "${PN} bash"
-
-# Include the symlinks as well in respective packages
-FILES:${PN}-module-xt-conntrack += "${libdir}/xtables/libxt_state.so"
-FILES:${PN}-module-xt-ct += "${libdir}/xtables/libxt_NOTRACK.so ${libdir}/xtables/libxt_REDIRECT.so"
-
-ALLOW_EMPTY:${PN}-modules = "1"
-
-INSANE_SKIP:${PN}-module-xt-conntrack = "dev-so"
-INSANE_SKIP:${PN}-module-xt-ct = "dev-so"
diff --git a/meta/recipes-extended/iputils/iputils/0001-rarpd-rdisc-Drop-PrivateUsers.patch b/meta/recipes-extended/iputils/iputils/0001-rarpd-rdisc-Drop-PrivateUsers.patch
deleted file mode 100644
index c61e39dc80..0000000000
--- a/meta/recipes-extended/iputils/iputils/0001-rarpd-rdisc-Drop-PrivateUsers.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From dfeeb3f1328d09f516edeb6349bd63e3c87f9397 Mon Sep 17 00:00:00 2001
-From: Alex Kiernan <alex.kiernan@gmail.com>
-Date: Thu, 13 Feb 2020 06:08:45 +0000
-Subject: [PATCH] rarpd:Drop PrivateUsers
-
-rarpd cannot gain the necessary capabilities with
-PrivateUsers enabled.
-
-Upstream-Status: Pending
-Signed-off-by: Alex Kiernan <alex.kiernan@gmail.com>
-
----
- systemd/rarpd.service.in | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/systemd/rarpd.service.in b/systemd/rarpd.service.in
-index e600c10..f5d7621 100644
---- a/systemd/rarpd.service.in
-+++ b/systemd/rarpd.service.in
-@@ -12,7 +12,6 @@ AmbientCapabilities=CAP_NET_RAW
- DynamicUser=yes
- PrivateTmp=yes
- PrivateDevices=yes
--PrivateUsers=yes
- ProtectSystem=strict
- ProtectHome=yes
- ProtectControlGroups=yes
diff --git a/meta/recipes-extended/iputils/iputils_20211215.bb b/meta/recipes-extended/iputils/iputils_20211215.bb
deleted file mode 100644
index 3ddce0be54..0000000000
--- a/meta/recipes-extended/iputils/iputils_20211215.bb
+++ /dev/null
@@ -1,66 +0,0 @@
-SUMMARY = "Network monitoring tools"
-DESCRIPTION = "Utilities for the IP protocol, including \
-tracepath, tracepath6, ping, ping6 and arping."
-HOMEPAGE = "https://github.com/iputils/iputils"
-SECTION = "console/network"
-
-LICENSE = "BSD-3-Clause & GPL-2.0-or-later"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=bb64c89bb0e23b72930d2380894c47a1"
-
-DEPENDS = "gnutls"
-
-SRC_URI = "git://github.com/iputils/iputils;branch=master;protocol=https \
- file://0001-rarpd-rdisc-Drop-PrivateUsers.patch \
- "
-SRCREV = "1d1e7c43210d8af316a41cb2c53d612a4c16f34d"
-
-S = "${WORKDIR}/git"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>20\d+)"
-
-# Fixed in 2000-10-10, but the versioning of iputils
-# breaks the version order.
-CVE_CHECK_IGNORE += "CVE-2000-1213 CVE-2000-1214"
-
-PACKAGECONFIG ??= "libcap rarpd \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ninfod', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
-PACKAGECONFIG[libcap] = "-DUSE_CAP=true, -DUSE_CAP=false -DNO_SETCAP_OR_SUID=true, libcap libcap-native"
-PACKAGECONFIG[libidn] = "-DUSE_IDN=true, -DUSE_IDN=false, libidn2"
-PACKAGECONFIG[gettext] = "-DUSE_GETTEXT=true, -DUSE_GETTEXT=false, gettext"
-PACKAGECONFIG[ninfod] = "-DBUILD_NINFOD=true,-DBUILD_NINFOD=false,"
-PACKAGECONFIG[rarpd] = "-DBUILD_RARPD=true,-DBUILD_RARPD=false,"
-PACKAGECONFIG[systemd] = "-Dsystemdunitdir=${systemd_system_unitdir},,systemd"
-PACKAGECONFIG[docs] = "-DBUILD_HTML_MANS=true -DBUILD_MANS=true,-DBUILD_HTML_MANS=false -DBUILD_MANS=false, libxslt"
-
-inherit meson systemd update-alternatives pkgconfig
-
-EXTRA_OEMESON += "--prefix=${root_prefix}/ -DSKIP_TESTS=true"
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN}-ping = "ping"
-ALTERNATIVE_LINK_NAME[ping] = "${base_bindir}/ping"
-
-SPLITPKGS = "${PN}-ping ${PN}-arping ${PN}-tracepath ${PN}-clockdiff ${PN}-rdisc \
- ${@bb.utils.contains('PACKAGECONFIG', 'rarpd', '${PN}-rarpd', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', '${PN}-ninfod', '', d)}"
-PACKAGES += "${SPLITPKGS}"
-
-ALLOW_EMPTY:${PN} = "1"
-RDEPENDS:${PN} += "${SPLITPKGS}"
-
-FILES:${PN} = ""
-FILES:${PN}-ping = "${base_bindir}/ping.${BPN}"
-FILES:${PN}-arping = "${base_bindir}/arping"
-FILES:${PN}-tracepath = "${base_bindir}/tracepath"
-FILES:${PN}-clockdiff = "${base_bindir}/clockdiff"
-FILES:${PN}-rarpd = "${base_sbindir}/rarpd ${systemd_system_unitdir}/rarpd@.service"
-FILES:${PN}-rdisc = "${base_sbindir}/rdisc"
-FILES:${PN}-ninfod = "${base_sbindir}/ninfod ${sysconfdir}/init.d/ninfod.sh"
-
-SYSTEMD_PACKAGES = "${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', '${PN}-ninfod', '', d)} \
- ${PN}-rdisc"
-SYSTEMD_SERVICE:${PN}-ninfod = "ninfod.service"
-SYSTEMD_SERVICE:${PN}-rdisc = "rdisc.service"
diff --git a/meta/recipes-extended/iputils/iputils_20240117.bb b/meta/recipes-extended/iputils/iputils_20240117.bb
new file mode 100644
index 0000000000..5a5e15528e
--- /dev/null
+++ b/meta/recipes-extended/iputils/iputils_20240117.bb
@@ -0,0 +1,48 @@
+SUMMARY = "Network monitoring tools"
+DESCRIPTION = "Utilities for the IP protocol, including \
+tracepath, tracepath6, ping, ping6 and arping."
+HOMEPAGE = "https://github.com/iputils/iputils"
+SECTION = "console/network"
+
+LICENSE = "BSD-3-Clause & GPL-2.0-or-later"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=627cc07ec86a45951d43e30658bbd819"
+
+DEPENDS = "gnutls"
+
+SRC_URI = "git://github.com/iputils/iputils;branch=master;protocol=https"
+SRCREV = "8372f355bdf7a9b0c79338dd8ef8464c00a5c4e2"
+
+S = "${WORKDIR}/git"
+
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>20\d+)"
+
+CVE_STATUS[CVE-2000-1213] = "fixed-version: Fixed in 2000-10-10, but the versioning of iputils breaks the version order."
+CVE_STATUS[CVE-2000-1214] = "fixed-version: Fixed in 2000-10-10, but the versioning of iputils breaks the version order."
+
+PACKAGECONFIG ??= "libcap"
+PACKAGECONFIG[libcap] = "-DUSE_CAP=true, -DUSE_CAP=false -DNO_SETCAP_OR_SUID=true, libcap libcap-native"
+PACKAGECONFIG[libidn] = "-DUSE_IDN=true, -DUSE_IDN=false, libidn2"
+PACKAGECONFIG[gettext] = "-DUSE_GETTEXT=true, -DUSE_GETTEXT=false, gettext"
+PACKAGECONFIG[docs] = "-DBUILD_HTML_MANS=true -DBUILD_MANS=true,-DBUILD_HTML_MANS=false -DBUILD_MANS=false, libxslt"
+
+inherit meson update-alternatives pkgconfig
+
+EXTRA_OEMESON += "--prefix=${root_prefix}/ -DSKIP_TESTS=true"
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN}-ping = "ping"
+ALTERNATIVE_LINK_NAME[ping] = "${base_bindir}/ping"
+
+SPLITPKGS = "${PN}-ping ${PN}-arping ${PN}-tracepath ${PN}-clockdiff"
+PACKAGES += "${SPLITPKGS}"
+
+ALLOW_EMPTY:${PN} = "1"
+RDEPENDS:${PN} += "${SPLITPKGS}"
+
+FILES:${PN} = ""
+FILES:${PN}-ping = "${base_bindir}/ping.${BPN}"
+FILES:${PN}-arping = "${base_bindir}/arping"
+FILES:${PN}-tracepath = "${base_bindir}/tracepath"
+FILES:${PN}-clockdiff = "${base_bindir}/clockdiff"
diff --git a/meta/recipes-extended/less/files/run-ptest b/meta/recipes-extended/less/files/run-ptest
new file mode 100644
index 0000000000..42a2869c84
--- /dev/null
+++ b/meta/recipes-extended/less/files/run-ptest
@@ -0,0 +1,3 @@
+#! /bin/sh
+
+./runtest -l /usr/bin/less -t lesstest -O d lt/*
diff --git a/meta/recipes-extended/less/less_600.bb b/meta/recipes-extended/less/less_600.bb
deleted file mode 100644
index 9ebe39daab..0000000000
--- a/meta/recipes-extended/less/less_600.bb
+++ /dev/null
@@ -1,42 +0,0 @@
-SUMMARY = "Text file viewer similar to more"
-DESCRIPTION = "Less is a program similar to more, i.e. a terminal \
-based program for viewing text files and the output from other \
-programs. Less offers many features beyond those that more does."
-HOMEPAGE = "http://www.greenwoodsoftware.com/"
-SECTION = "console/utils"
-
-# (GPL-2.0-or-later (<< 418), GPL-3.0-or-later (>= 418)) | less
-# Including email author giving permissing to use BSD
-#
-# From: Mark Nudelman <markn@greenwoodsoftware.com>
-# To: Elizabeth Flanagan <elizabeth.flanagan@intel.com
-# Date: 12/19/11
-#
-# Hi Elizabeth,
-# Using a generic BSD license for less is fine with me.
-# Thanks,
-#
-# --Mark
-#
-
-LICENSE = "GPL-3.0-or-later | BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
- file://LICENSE;md5=38fc26d78ca8d284a2a5a4bbc263d29b \
- "
-DEPENDS = "ncurses"
-
-SRC_URI = "http://www.greenwoodsoftware.com/${BPN}/${BPN}-${PV}.tar.gz \
- "
-
-SRC_URI[sha256sum] = "6633d6aa2b3cc717afb2c205778c7c42c4620f63b1d682f3d12c98af0be74d20"
-
-UPSTREAM_CHECK_URI = "http://www.greenwoodsoftware.com/less/download.html"
-
-inherit autotools update-alternatives
-
-do_install () {
- oe_runmake 'bindir=${D}${bindir}' 'mandir=${D}${mandir}' install
-}
-
-ALTERNATIVE:${PN} = "less"
-ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-extended/less/less_643.bb b/meta/recipes-extended/less/less_643.bb
new file mode 100644
index 0000000000..67834bdd58
--- /dev/null
+++ b/meta/recipes-extended/less/less_643.bb
@@ -0,0 +1,61 @@
+SUMMARY = "Text file viewer similar to more"
+DESCRIPTION = "Less is a program similar to more, i.e. a terminal \
+based program for viewing text files and the output from other \
+programs. Less offers many features beyond those that more does."
+HOMEPAGE = "http://www.greenwoodsoftware.com/"
+SECTION = "console/utils"
+
+# (GPL-2.0-or-later (<< 418), GPL-3.0-or-later (>= 418)) | less
+# Including email author giving permissing to use BSD
+#
+# From: Mark Nudelman <markn@greenwoodsoftware.com>
+# To: Elizabeth Flanagan <elizabeth.flanagan@intel.com
+# Date: 12/19/11
+#
+# Hi Elizabeth,
+# Using a generic BSD license for less is fine with me.
+# Thanks,
+#
+# --Mark
+#
+
+LICENSE = "GPL-3.0-or-later | BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://LICENSE;md5=1b2446f5c8632bf63a97d7a49750e1c6 \
+ "
+DEPENDS = "ncurses"
+
+SRC_URI = "http://www.greenwoodsoftware.com/${BPN}/${BPN}-${PV}.tar.gz \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "2911b5432c836fa084c8a2e68f6cd6312372c026a58faaa98862731c8b6052e8"
+
+UPSTREAM_CHECK_URI = "http://www.greenwoodsoftware.com/less/download.html"
+
+inherit autotools ptest update-alternatives
+
+EXTRA_OEMAKE += " LESSTEST=1"
+
+inherit autotools update-alternatives
+
+do_compile_ptest () {
+ cd ${S}/lesstest
+ oe_runmake
+}
+
+do_install () {
+ oe_runmake 'bindir=${D}${bindir}' 'mandir=${D}${mandir}' install
+}
+
+do_install_ptest () {
+ cp ${S}/lesstest/lesstest ${D}${PTEST_PATH}
+ cp ${S}/lesstest/runtest ${D}${PTEST_PATH}
+ cp ${S}/lesstest/lt_screen ${D}${PTEST_PATH}
+ cp -r ${S}/lesstest/lt ${D}${PTEST_PATH}
+}
+
+RDEPENDS:${PN}-ptest:append = " perl-module-getopt-std perl-module-cwd locale-base-en-us"
+
+ALTERNATIVE:${PN} = "less"
+ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-extended/libarchive/libarchive/configurehack.patch b/meta/recipes-extended/libarchive/libarchive/configurehack.patch
new file mode 100644
index 0000000000..f3989d99eb
--- /dev/null
+++ b/meta/recipes-extended/libarchive/libarchive/configurehack.patch
@@ -0,0 +1,49 @@
+To work with autoconf 2.73, tweak the macro ordering in configure.in.
+
+Upstream-Status: Pending
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: libarchive-3.6.2/configure.ac
+===================================================================
+--- libarchive-3.6.2.orig/configure.ac
++++ libarchive-3.6.2/configure.ac
+@@ -357,6 +357,19 @@ if test "x$with_bz2lib" != "xno"; then
+ esac
+ fi
+
++# Checks for typedefs, structures, and compiler characteristics.
++AC_C_CONST
++# la_TYPE_UID_T defaults to "int", which is incorrect for MinGW
++# and MSVC. Use a customized version.
++la_TYPE_UID_T
++AC_TYPE_MODE_T
++# AC_TYPE_OFF_T defaults to "long", which limits us to 4GB files on
++# most systems... default to "long long" instead.
++AC_CHECK_TYPE(off_t, [long long])
++AC_TYPE_SIZE_T
++AC_CHECK_TYPE(id_t, [unsigned long])
++AC_CHECK_TYPE(uintptr_t, [unsigned int])
++
+ AC_ARG_WITH([libb2],
+ AS_HELP_STRING([--without-libb2], [Don't build support for BLAKE2 through libb2]))
+
+@@ -558,19 +571,6 @@ LDFLAGS=$save_LDFLAGS
+
+ AC_SUBST(GC_SECTIONS)
+
+-# Checks for typedefs, structures, and compiler characteristics.
+-AC_C_CONST
+-# la_TYPE_UID_T defaults to "int", which is incorrect for MinGW
+-# and MSVC. Use a customized version.
+-la_TYPE_UID_T
+-AC_TYPE_MODE_T
+-# AC_TYPE_OFF_T defaults to "long", which limits us to 4GB files on
+-# most systems... default to "long long" instead.
+-AC_CHECK_TYPE(off_t, [long long])
+-AC_TYPE_SIZE_T
+-AC_CHECK_TYPE(id_t, [unsigned long])
+-AC_CHECK_TYPE(uintptr_t, [unsigned int])
+-
+ # Check for tm_gmtoff in struct tm
+ AC_CHECK_MEMBERS([struct tm.tm_gmtoff, struct tm.__tm_gmtoff],,,
+ [
diff --git a/meta/recipes-extended/libarchive/libarchive_3.6.1.bb b/meta/recipes-extended/libarchive/libarchive_3.6.1.bb
deleted file mode 100644
index c795b41628..0000000000
--- a/meta/recipes-extended/libarchive/libarchive_3.6.1.bb
+++ /dev/null
@@ -1,67 +0,0 @@
-SUMMARY = "Support for reading various archive formats"
-DESCRIPTION = "C library and command-line tools for reading and writing tar, cpio, zip, ISO, and other archive formats"
-HOMEPAGE = "http://www.libarchive.org/"
-SECTION = "devel"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d499814247adaee08d88080841cb5665"
-
-DEPENDS = "e2fsprogs-native"
-
-PACKAGECONFIG ?= "zlib bz2 xz lzo zstd"
-
-PACKAGECONFIG:append:class-target = "\
- ${@bb.utils.filter('DISTRO_FEATURES', 'acl xattr', d)} \
-"
-
-DEPENDS_BZIP2 = "bzip2-replacement-native"
-DEPENDS_BZIP2:class-target = "bzip2"
-
-PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl,"
-PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr,"
-PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib,"
-PACKAGECONFIG[bz2] = "--with-bz2lib,--without-bz2lib,${DEPENDS_BZIP2},"
-PACKAGECONFIG[xz] = "--with-lzma,--without-lzma,xz,"
-PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl,"
-PACKAGECONFIG[libxml2] = "--with-xml2,--without-xml2,libxml2,"
-PACKAGECONFIG[expat] = "--with-expat,--without-expat,expat,"
-PACKAGECONFIG[lzo] = "--with-lzo2,--without-lzo2,lzo,"
-PACKAGECONFIG[nettle] = "--with-nettle,--without-nettle,nettle,"
-PACKAGECONFIG[lz4] = "--with-lz4,--without-lz4,lz4,"
-PACKAGECONFIG[mbedtls] = "--with-mbedtls,--without-mbedtls,mbedtls,"
-PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd,"
-
-EXTRA_OECONF += "--enable-largefile"
-
-SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz"
-UPSTREAM_CHECK_URI = "http://libarchive.org/"
-
-SRC_URI[sha256sum] = "c676146577d989189940f1959d9e3980d28513d74eedfbc6b7f15ea45fe54ee2"
-
-inherit autotools update-alternatives pkgconfig
-
-CPPFLAGS += "-I${WORKDIR}/extra-includes"
-
-do_configure[cleandirs] += "${WORKDIR}/extra-includes"
-do_configure:prepend() {
- # We just need the headers for some type constants, so no need to
- # build all of e2fsprogs for the target
- cp -R ${STAGING_INCDIR_NATIVE}/ext2fs ${WORKDIR}/extra-includes/
-}
-
-ALTERNATIVE_PRIORITY = "80"
-
-PACKAGES =+ "bsdtar"
-FILES:bsdtar = "${bindir}/bsdtar"
-
-ALTERNATIVE:bsdtar = "tar"
-ALTERNATIVE_LINK_NAME[tar] = "${base_bindir}/tar"
-ALTERNATIVE_TARGET[tar] = "${bindir}/bsdtar"
-
-PACKAGES =+ "bsdcpio"
-FILES:bsdcpio = "${bindir}/bsdcpio"
-
-ALTERNATIVE:bsdcpio = "cpio"
-ALTERNATIVE_LINK_NAME[cpio] = "${base_bindir}/cpio"
-ALTERNATIVE_TARGET[cpio] = "${bindir}/bsdcpio"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libarchive/libarchive_3.7.2.bb b/meta/recipes-extended/libarchive/libarchive_3.7.2.bb
new file mode 100644
index 0000000000..91f521fa4d
--- /dev/null
+++ b/meta/recipes-extended/libarchive/libarchive_3.7.2.bb
@@ -0,0 +1,67 @@
+SUMMARY = "Support for reading various archive formats"
+DESCRIPTION = "C library and command-line tools for reading and writing tar, cpio, zip, ISO, and other archive formats"
+HOMEPAGE = "http://www.libarchive.org/"
+SECTION = "devel"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d499814247adaee08d88080841cb5665"
+
+DEPENDS = "e2fsprogs-native"
+
+PACKAGECONFIG ?= "zlib bz2 xz zstd ${@bb.utils.filter('DISTRO_FEATURES', 'acl xattr', d)}"
+
+DEPENDS_BZIP2 = "bzip2-replacement-native"
+DEPENDS_BZIP2:class-target = "bzip2"
+
+PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl,"
+PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr,"
+PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib,"
+PACKAGECONFIG[bz2] = "--with-bz2lib,--without-bz2lib,${DEPENDS_BZIP2},"
+PACKAGECONFIG[xz] = "--with-lzma,--without-lzma,xz,"
+PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl,"
+PACKAGECONFIG[libb2] = "--with-libb2,--without-libb2,libb2,"
+PACKAGECONFIG[libxml2] = "--with-xml2,--without-xml2,libxml2,"
+PACKAGECONFIG[expat] = "--with-expat,--without-expat,expat,"
+PACKAGECONFIG[lzo] = "--with-lzo2,--without-lzo2,lzo,"
+PACKAGECONFIG[nettle] = "--with-nettle,--without-nettle,nettle,"
+PACKAGECONFIG[lz4] = "--with-lz4,--without-lz4,lz4,"
+PACKAGECONFIG[mbedtls] = "--with-mbedtls,--without-mbedtls,mbedtls,"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd,"
+
+EXTRA_OECONF += "--enable-largefile --without-iconv"
+
+SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz"
+SRC_URI += "file://configurehack.patch"
+UPSTREAM_CHECK_URI = "http://libarchive.org/"
+
+SRC_URI[sha256sum] = "df404eb7222cf30b4f8f93828677890a2986b66ff8bf39dac32a804e96ddf104"
+
+CVE_STATUS[CVE-2023-30571] = "upstream-wontfix: upstream has documented that reported function is not thread-safe"
+
+inherit autotools update-alternatives pkgconfig
+
+CPPFLAGS += "-I${WORKDIR}/extra-includes"
+
+do_configure[cleandirs] += "${WORKDIR}/extra-includes"
+do_configure:prepend() {
+ # We just need the headers for some type constants, so no need to
+ # build all of e2fsprogs for the target
+ cp -R ${STAGING_INCDIR_NATIVE}/ext2fs ${WORKDIR}/extra-includes/
+}
+
+ALTERNATIVE_PRIORITY = "80"
+
+PACKAGES =+ "bsdtar"
+FILES:bsdtar = "${bindir}/bsdtar"
+
+ALTERNATIVE:bsdtar = "tar"
+ALTERNATIVE_LINK_NAME[tar] = "${base_bindir}/tar"
+ALTERNATIVE_TARGET[tar] = "${bindir}/bsdtar"
+
+PACKAGES =+ "bsdcpio"
+FILES:bsdcpio = "${bindir}/bsdcpio"
+
+ALTERNATIVE:bsdcpio = "cpio"
+ALTERNATIVE_LINK_NAME[cpio] = "${base_bindir}/cpio"
+ALTERNATIVE_TARGET[cpio] = "${bindir}/bsdcpio"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libidn/libidn2_2.3.2.bb b/meta/recipes-extended/libidn/libidn2_2.3.2.bb
deleted file mode 100644
index e1b25b4b8c..0000000000
--- a/meta/recipes-extended/libidn/libidn2_2.3.2.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Internationalized Domain Name support library"
-DESCRIPTION = "Implementation of the Stringprep, Punycode and IDNA specifications defined by the IETF Internationalized Domain Names (IDN) working group."
-HOMEPAGE = "http://www.gnu.org/software/libidn/"
-SECTION = "libs"
-LICENSE = "(GPL-2.0-or-later | LGPL-3.0-only) & GPL-3.0-or-later & Unicode-DFS-2016"
-LIC_FILES_CHKSUM = "file://COPYING;md5=2d834ea7d480438ada04e5d846152395 \
- file://COPYING.LESSERv3;md5=e6a600fd5e1d9cbde2d983680233ad02 \
- file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.unicode;md5=684cf5f7e3fded3546679424528261a9 \
- file://src/idn2.c;endline=16;md5=e4b6d628a84a55f1fd8ae4c76c5f6509 \
- file://lib/idn2.h.in;endline=27;md5=d0fc8ec628be130a1d5b889107e92477"
-
-SRC_URI = "${GNU_MIRROR}/libidn/${BPN}-${PV}.tar.gz"
-SRC_URI[sha256sum] = "76940cd4e778e8093579a9d195b25fff5e936e9dc6242068528b437a76764f91"
-
-DEPENDS = "virtual/libiconv libunistring"
-
-inherit pkgconfig autotools gettext texinfo gtk-doc lib_package
-
-EXTRA_OECONF += "--disable-rpath \
- --with-libunistring-prefix=${STAGING_EXECPREFIXDIR} \
- "
-
-do_install:append() {
- # Need to remove any duplicate whitespace too for reproducibility
- sed -i -e 's|-L${STAGING_LIBDIR}||' -e 's/ */ /g' ${D}${libdir}/pkgconfig/libidn2.pc
-}
-
-LICENSE:${PN} = "(GPL-2.0-or-later | LGPL-3.0-only) & Unicode-DFS-2016"
-LICENSE:${PN}-bin = "GPL-3.0-or-later"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libidn/libidn2_2.3.7.bb b/meta/recipes-extended/libidn/libidn2_2.3.7.bb
new file mode 100644
index 0000000000..7ae933e047
--- /dev/null
+++ b/meta/recipes-extended/libidn/libidn2_2.3.7.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Internationalized Domain Name support library"
+DESCRIPTION = "Implementation of the Stringprep, Punycode and IDNA specifications defined by the IETF Internationalized Domain Names (IDN) working group."
+HOMEPAGE = "http://www.gnu.org/software/libidn/"
+SECTION = "libs"
+LICENSE = "(GPL-2.0-or-later | LGPL-3.0-only) & GPL-3.0-or-later & Unicode-DFS-2016"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2d834ea7d480438ada04e5d846152395 \
+ file://COPYING.LESSERv3;md5=e6a600fd5e1d9cbde2d983680233ad02 \
+ file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.unicode;md5=684cf5f7e3fded3546679424528261a9 \
+ file://src/idn2.c;endline=16;md5=afc1531bda991ba6338e33a7eff758a0 \
+ file://lib/idn2.h.in;endline=27;md5=f88d218005a5c45b68a83cecb5bd7f26 \
+ "
+
+SRC_URI = "${GNU_MIRROR}/libidn/${BPN}-${PV}.tar.gz"
+SRC_URI[sha256sum] = "4c21a791b610b9519b9d0e12b8097bf2f359b12f8dd92647611a929e6bfd7d64"
+
+DEPENDS = "virtual/libiconv libunistring"
+
+inherit pkgconfig autotools gettext texinfo gtk-doc lib_package
+
+EXTRA_OECONF += "--disable-rpath \
+ --with-libunistring-prefix=${STAGING_EXECPREFIXDIR} \
+ "
+
+do_install:append() {
+ # Need to remove any duplicate whitespace too for reproducibility
+ sed -i -e 's|-L${STAGING_LIBDIR}||' -e 's/ */ /g' ${D}${libdir}/pkgconfig/libidn2.pc
+}
+
+LICENSE:${PN} = "(GPL-2.0-or-later | LGPL-3.0-only) & Unicode-DFS-2016"
+LICENSE:${PN}-bin = "GPL-3.0-or-later"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libmnl/libmnl_1.0.5.bb b/meta/recipes-extended/libmnl/libmnl_1.0.5.bb
index 3c5bde3319..748326c0a0 100644
--- a/meta/recipes-extended/libmnl/libmnl_1.0.5.bb
+++ b/meta/recipes-extended/libmnl/libmnl_1.0.5.bb
@@ -6,8 +6,8 @@ SECTION = "libs"
LICENSE = "LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-SRC_URI = "https://netfilter.org/projects/libmnl/files/libmnl-${PV}.tar.bz2;name=tar"
-SRC_URI[tar.sha256sum] = "274b9b919ef3152bfb3da3a13c950dd60d6e2bcd54230ffeca298d03b40d0525"
+SRC_URI = "https://netfilter.org/projects/libmnl/files/libmnl-${PV}.tar.bz2"
+SRC_URI[sha256sum] = "274b9b919ef3152bfb3da3a13c950dd60d6e2bcd54230ffeca298d03b40d0525"
inherit autotools pkgconfig
diff --git a/meta/recipes-extended/libnsl/libnsl2_git.bb b/meta/recipes-extended/libnsl/libnsl2_git.bb
index 7919ef9b24..8cc1f7cec3 100644
--- a/meta/recipes-extended/libnsl/libnsl2_git.bb
+++ b/meta/recipes-extended/libnsl/libnsl2_git.bb
@@ -10,9 +10,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
SECTION = "libs"
DEPENDS = "libtirpc"
-PV = "2.0.0"
+PV = "2.0.1"
-SRCREV = "82245c0c58add79a8e34ab0917358217a70e5100"
+SRCREV = "d4b22e54b5e6637a69b26eab5faad2a326c9b182"
SRC_URI = "git://github.com/thkukuk/libnsl;branch=master;protocol=https \
"
diff --git a/meta/recipes-extended/libnss-nis/libnss-nis.bb b/meta/recipes-extended/libnss-nis/libnss-nis.bb
index d0afb3ca0a..f0e687c330 100644
--- a/meta/recipes-extended/libnss-nis/libnss-nis.bb
+++ b/meta/recipes-extended/libnss-nis/libnss-nis.bb
@@ -13,9 +13,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
SECTION = "libs"
DEPENDS += "libtirpc libnsl2"
-PV = "3.1+git${SRCPV}"
+PV = "3.2"
-SRCREV = "062f31999b35393abf7595cb89dfc9590d5a42ad"
+SRCREV = "cd0d391af9535b56e612ed227c1b89be269f3d59"
SRC_URI = "git://github.com/thkukuk/libnss_nis;branch=master;protocol=https \
"
diff --git a/meta/recipes-extended/libpipeline/libpipeline/autoconf-2.73.patch b/meta/recipes-extended/libpipeline/libpipeline/autoconf-2.73.patch
new file mode 100644
index 0000000000..e61e5aa869
--- /dev/null
+++ b/meta/recipes-extended/libpipeline/libpipeline/autoconf-2.73.patch
@@ -0,0 +1,24 @@
+The gnulib largefile macro needs updating to work with autoconf 2.73. Rather
+than the full code:
+
+https://git.savannah.gnu.org/cgit/gnulib.git/commit/m4/largefile.m4?id=f91f633858cf132e50924224c50d6264a92caabb
+
+Just tweak the exiting code to work with 2.73. The next libpipeline upgrade should
+update to new gnulib
+
+Upstream-Status: Inappropriate
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: findutils-4.9.0/gl/m4/largefile.m4
+===================================================================
+--- findutils-4.9.0.orig/gl/m4/largefile.m4
++++ findutils-4.9.0/gl/m4/largefile.m4
+@@ -26,7 +26,7 @@ AC_DEFUN([gl_SET_LARGEFILE_SOURCE],
+ # with _TIME_BITS. Also, work around a problem in autoconf <= 2.69:
+ # AC_SYS_LARGEFILE does not configure for large inodes on Mac OS X 10.5,
+ # or configures them incorrectly in some cases.
+-m4_version_prereq([2.70], [], [
++m4_version_prereq([2.73], [], [
+
+ # _AC_SYS_LARGEFILE_TEST_INCLUDES
+ # -------------------------------
diff --git a/meta/recipes-extended/libpipeline/libpipeline_1.5.6.bb b/meta/recipes-extended/libpipeline/libpipeline_1.5.6.bb
deleted file mode 100644
index 7007608a39..0000000000
--- a/meta/recipes-extended/libpipeline/libpipeline_1.5.6.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "pipeline manipulation library"
-DESCRIPTION = "This is a C library for setting up and running pipelines of processes, \
-without needing to involve shell command-line parsing which is often \
-error-prone and insecure."
-HOMEPAGE = "http://libpipeline.nongnu.org/"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
-
-SRC_URI = "${SAVANNAH_GNU_MIRROR}/libpipeline/libpipeline-${PV}.tar.gz"
-SRC_URI[sha256sum] = "60fbb9e7dc398528e5f3a776af57bb28ca3fe5d9f0cd8a961ac6cebfe6e9b797"
-
-inherit pkgconfig autotools
-
-acpaths = "-I ${S}/gl/m4 -I ${S}/m4"
diff --git a/meta/recipes-extended/libpipeline/libpipeline_1.5.7.bb b/meta/recipes-extended/libpipeline/libpipeline_1.5.7.bb
new file mode 100644
index 0000000000..7e3f13ebde
--- /dev/null
+++ b/meta/recipes-extended/libpipeline/libpipeline_1.5.7.bb
@@ -0,0 +1,15 @@
+SUMMARY = "pipeline manipulation library"
+DESCRIPTION = "This is a C library for setting up and running pipelines of processes, \
+without needing to involve shell command-line parsing which is often \
+error-prone and insecure."
+HOMEPAGE = "http://libpipeline.nongnu.org/"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+SRC_URI = "${SAVANNAH_GNU_MIRROR}/libpipeline/libpipeline-${PV}.tar.gz"
+SRC_URI += "file://autoconf-2.73.patch"
+SRC_URI[sha256sum] = "b8b45194989022a79ec1317f64a2a75b1551b2a55bea06f67704cb2a2e4690b0"
+
+inherit pkgconfig autotools
+
+acpaths = "-I ${S}/gl/m4 -I ${S}/m4"
diff --git a/meta/recipes-extended/libsolv/libsolv_0.7.22.bb b/meta/recipes-extended/libsolv/libsolv_0.7.22.bb
deleted file mode 100644
index c27bf9c67e..0000000000
--- a/meta/recipes-extended/libsolv/libsolv_0.7.22.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-SUMMARY = "Library for solving packages and reading repositories"
-DESCRIPTION = "This is libsolv, a free package dependency solver using a satisfiability algorithm for solving packages and reading repositories"
-HOMEPAGE = "https://github.com/openSUSE/libsolv"
-BUGTRACKER = "https://github.com/openSUSE/libsolv/issues"
-SECTION = "devel"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.BSD;md5=62272bd11c97396d4aaf1c41bc11f7d8"
-
-DEPENDS = "expat zlib"
-
-SRC_URI = "git://github.com/openSUSE/libsolv.git;branch=master;protocol=https \
- file://0001-utils-Conside-musl-when-wrapping-qsort_r.patch \
-"
-
-SRCREV = "ea114b257b78bda6ee3e4238118ed93bc57c5984"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)"
-
-S = "${WORKDIR}/git"
-
-inherit cmake
-
-PACKAGECONFIG ??= "${@bb.utils.contains('PACKAGE_CLASSES','package_rpm','rpm','',d)}"
-PACKAGECONFIG[rpm] = "-DENABLE_RPMMD=ON -DENABLE_RPMDB=ON,,rpm"
-
-EXTRA_OECMAKE = "-DMULTI_SEMANTICS=ON -DENABLE_COMPLEX_DEPS=ON"
-
-PACKAGES =+ "${PN}-tools ${PN}ext"
-
-FILES:${PN}-tools = "${bindir}/*"
-FILES:${PN}ext = "${libdir}/${PN}ext.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libsolv/libsolv_0.7.28.bb b/meta/recipes-extended/libsolv/libsolv_0.7.28.bb
new file mode 100644
index 0000000000..201059323a
--- /dev/null
+++ b/meta/recipes-extended/libsolv/libsolv_0.7.28.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Library for solving packages and reading repositories"
+DESCRIPTION = "This is libsolv, a free package dependency solver using a satisfiability algorithm for solving packages and reading repositories"
+HOMEPAGE = "https://github.com/openSUSE/libsolv"
+BUGTRACKER = "https://github.com/openSUSE/libsolv/issues"
+SECTION = "devel"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.BSD;md5=62272bd11c97396d4aaf1c41bc11f7d8"
+
+DEPENDS = "expat zlib zstd"
+
+SRC_URI = "git://github.com/openSUSE/libsolv.git;branch=master;protocol=https \
+ file://0001-utils-Conside-musl-when-wrapping-qsort_r.patch \
+"
+
+SRCREV = "c8dbb3a77c86600ce09d4f80a504cf4e78a3c359"
+
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)"
+
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+PACKAGECONFIG ??= "${@bb.utils.contains('PACKAGE_CLASSES','package_rpm','rpm','',d)}"
+PACKAGECONFIG[rpm] = "-DENABLE_RPMMD=ON -DENABLE_RPMDB=ON,,rpm"
+
+EXTRA_OECMAKE = "-DMULTI_SEMANTICS=ON -DENABLE_COMPLEX_DEPS=ON -DENABLE_ZSTD_COMPRESSION=ON"
+
+PACKAGES =+ "${PN}-tools ${PN}ext"
+
+FILES:${PN}-tools = "${bindir}/*"
+FILES:${PN}ext = "${libdir}/${PN}ext.so.*"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libtirpc/libtirpc/ipv6.patch b/meta/recipes-extended/libtirpc/libtirpc/ipv6.patch
new file mode 100644
index 0000000000..f746f986f4
--- /dev/null
+++ b/meta/recipes-extended/libtirpc/libtirpc/ipv6.patch
@@ -0,0 +1,52 @@
+From 077bbd32e8b7474dc5f153997732e1e6aec7fad6 Mon Sep 17 00:00:00 2001
+Message-Id: <077bbd32e8b7474dc5f153997732e1e6aec7fad6.1697120796.git.joerg.sommer@navimatix.de>
+From: =?UTF-8?q?J=C3=B6rg=20Sommer?= <joerg.sommer@navimatix.de>
+Date: Thu, 12 Oct 2023 16:22:59 +0200
+Subject: [PATCH] netconfig: remove tcp6, udp6 on --disable-ipv6
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+If the configuration for IPv6 is disabled, the netconfig should not contain
+settings for tcp6 and udp6.
+
+The test for the configure option didn't work, because it check the wrong
+variable.
+
+Signed-off-by: Jörg Sommer <joerg.sommer@navimatix.de>
+Upstream-Status: Submitted [libtirpc-devel@lists.sourceforge.net]
+Upstream-Status: Submitted [linux-nfs@vger.kernel.org]
+---
+ configure.ac | 2 +-
+ doc/Makefile.am | 5 +++++
+ 2 files changed, 6 insertions(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index fe6c517..b687f8d 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -64,7 +64,7 @@ fi
+ AC_ARG_ENABLE(ipv6,
+ [AC_HELP_STRING([--disable-ipv6], [Disable IPv6 support @<:@default=no@:>@])],
+ [],[enable_ipv6=yes])
+-AM_CONDITIONAL(INET6, test "x$disable_ipv6" != xno)
++AM_CONDITIONAL(INET6, test "x$enable_ipv6" != xno)
+ if test "x$enable_ipv6" != xno; then
+ AC_DEFINE(INET6, 1, [Define to 1 if IPv6 is available])
+ fi
+diff --git a/doc/Makefile.am b/doc/Makefile.am
+index d42ab90..b9678f6 100644
+--- a/doc/Makefile.am
++++ b/doc/Makefile.am
+@@ -2,3 +2,8 @@ dist_sysconf_DATA = netconfig bindresvport.blacklist
+
+ CLEANFILES = cscope.* *~
+ DISTCLEANFILES = Makefile.in
++
++if ! INET6
++install-exec-hook:
++ $(SED) -i '/^tcp6\|^udp6/d' "$(DESTDIR)$(sysconfdir)"/netconfig
++endif
+--
+2.34.1
+
diff --git a/meta/recipes-extended/libtirpc/libtirpc_1.3.2.bb b/meta/recipes-extended/libtirpc/libtirpc_1.3.2.bb
deleted file mode 100644
index 45b3d2befc..0000000000
--- a/meta/recipes-extended/libtirpc/libtirpc_1.3.2.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Transport-Independent RPC library"
-DESCRIPTION = "Libtirpc is a port of Suns Transport-Independent RPC library to Linux"
-SECTION = "libs/network"
-HOMEPAGE = "http://sourceforge.net/projects/libtirpc/"
-BUGTRACKER = "http://sourceforge.net/tracker/?group_id=183075&atid=903784"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f835cce8852481e4b2bbbdd23b5e47f3 \
- file://src/netname.c;beginline=1;endline=27;md5=f8a8cd2cb25ac5aa16767364fb0e3c24"
-
-PROVIDES = "virtual/librpc"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2"
-UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/libtirpc/files/libtirpc/"
-UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)/"
-SRC_URI[sha256sum] = "e24eb88b8ce7db3b7ca6eb80115dd1284abc5ec32a8deccfed2224fc2532b9fd"
-
-inherit autotools pkgconfig
-
-EXTRA_OECONF = "--disable-gssapi"
-
-do_install:append() {
- chown root:root ${D}${sysconfdir}/netconfig
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/libtirpc/libtirpc_1.3.4.bb b/meta/recipes-extended/libtirpc/libtirpc_1.3.4.bb
new file mode 100644
index 0000000000..aa526e9e1c
--- /dev/null
+++ b/meta/recipes-extended/libtirpc/libtirpc_1.3.4.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Transport-Independent RPC library"
+DESCRIPTION = "Libtirpc is a port of Suns Transport-Independent RPC library to Linux"
+SECTION = "libs/network"
+HOMEPAGE = "http://sourceforge.net/projects/libtirpc/"
+BUGTRACKER = "http://sourceforge.net/tracker/?group_id=183075&atid=903784"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f835cce8852481e4b2bbbdd23b5e47f3 \
+ file://src/netname.c;beginline=1;endline=27;md5=f8a8cd2cb25ac5aa16767364fb0e3c24"
+
+PROVIDES = "virtual/librpc"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2 \
+ file://ipv6.patch \
+"
+UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/libtirpc/files/libtirpc/"
+UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)/"
+SRC_URI[sha256sum] = "1e0b0c7231c5fa122e06c0609a76723664d068b0dba3b8219b63e6340b347860"
+
+CVE_STATUS[CVE-2021-46828] = "fixed-version: fixed in 1.3.3rc1 so not present in 1.3.3"
+
+inherit autotools pkgconfig
+
+PACKAGECONFIG ??= "\
+ ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \
+"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6"
+PACKAGECONFIG[gssapi] = "--enable-gssapi,--disable-gssapi,krb5"
+
+do_install:append() {
+ test -e ${D}${sysconfdir}/netconfig && chown root:root ${D}${sysconfdir}/netconfig
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/lighttpd/lighttpd/lighttpd b/meta/recipes-extended/lighttpd/lighttpd/lighttpd
index 82fbaa523b..f369dce42c 100644
--- a/meta/recipes-extended/lighttpd/lighttpd/lighttpd
+++ b/meta/recipes-extended/lighttpd/lighttpd/lighttpd
@@ -6,9 +6,14 @@ NAME=lighttpd
DESC="Lighttpd Web Server"
OPTS="-f /etc/lighttpd/lighttpd.conf"
+configtest() {
+ "$DAEMON" $OPTS -tt || exit 1
+}
+
case "$1" in
start)
echo -n "Starting $DESC: "
+ configtest
start-stop-daemon --start -x "$DAEMON" -- $OPTS
echo "$NAME."
;;
@@ -17,16 +22,26 @@ case "$1" in
start-stop-daemon --stop -x "$DAEMON"
echo "$NAME."
;;
- restart|force-reload)
+ restart)
echo -n "Restarting $DESC: "
+ configtest
start-stop-daemon --stop -x "$DAEMON"
sleep 1
start-stop-daemon --start -x "$DAEMON" -- $OPTS
echo "$NAME."
;;
+ reload|force-reload)
+ echo -n "Reloading $DESC: "
+ configtest
+ killall -USR1 "${DAEMON##*/}"
+ echo "$NAME."
+ ;;
+ configtest)
+ configtest
+ ;;
*)
N=/etc/init.d/$NAME
- echo "Usage: $N {start|stop|restart|force-reload}" >&2
+ echo "Usage: $N {start|stop|restart|reload|force-reload|configtest}" >&2
exit 1
;;
esac
diff --git a/meta/recipes-extended/lighttpd/lighttpd/lighttpd.conf b/meta/recipes-extended/lighttpd/lighttpd/lighttpd.conf
index 6e8402d242..47a6c93349 100644
--- a/meta/recipes-extended/lighttpd/lighttpd/lighttpd.conf
+++ b/meta/recipes-extended/lighttpd/lighttpd/lighttpd.conf
@@ -16,8 +16,6 @@ server.modules = (
# "mod_redirect",
# "mod_alias",
"mod_access",
-# "mod_cml",
-# "mod_trigger_b4_dl",
# "mod_auth",
# "mod_status",
# "mod_setenv",
@@ -27,11 +25,9 @@ server.modules = (
# "mod_evhost",
# "mod_userdir",
# "mod_cgi",
-# "mod_compress",
# "mod_ssi",
-# "mod_usertrack",
# "mod_expire",
-# "mod_secdownload",
+# "mod_deflate",
# "mod_rrdtool",
# "mod_webdav",
"mod_accesslog" )
@@ -47,9 +43,6 @@ server.errorlog = "/www/logs/lighttpd.error.log"
index-file.names = ( "index.php", "index.html",
"index.htm", "default.htm" )
-## set the event-handler (read the performance section in the manual)
-# server.event-handler = "freebsd-kqueue" # needed on OS X
-
# mimetype mapping
mimetype.assign = (
".pdf" => "application/pdf",
@@ -115,7 +108,6 @@ mimetype.assign = (
#### accesslog module
accesslog.filename = "/www/logs/access.log"
-debug.log-request-handling = "enable"
@@ -127,10 +119,6 @@ debug.log-request-handling = "enable"
# of the document-root
url.access-deny = ( "~", ".inc" )
-$HTTP["url"] =~ "\.pdf$" {
- server.range-requests = "disable"
-}
-
##
# which extensions should not be handle via static-file transfer
#
@@ -177,6 +165,7 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
#dir-listing.activate = "enable"
## enable debugging
+#debug.log-request-header-on-error = "enable"
#debug.log-request-header = "enable"
#debug.log-response-header = "enable"
#debug.log-request-handling = "enable"
@@ -194,8 +183,9 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
#server.groupname = "wwwrun"
#### compress module
-#compress.cache-dir = "/tmp/lighttpd/cache/compress/"
-#compress.filetype = ("text/plain", "text/html")
+#deflate.cache-dir = "/tmp/lighttpd/cache/compress/"
+#deflate.mimetypes = ("text/plain", "text/html")
+#deflate.allowed-encodings = ("gzip")
#### proxy module
## read proxy.txt for more info
@@ -227,7 +217,8 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
#### SSL engine
#ssl.engine = "enable"
-#ssl.pemfile = "server.pem"
+#ssl.pemfile = "/path/to/fullchain.pem"
+#ssl.privkey = "/path/to/privkey.pem"
#### status module
#status.status-url = "/server-status"
@@ -291,19 +282,6 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
#setenv.add-request-header = ( "TRAV_ENV" => "mysql://user@host/db" )
#setenv.add-response-header = ( "X-Secret-Message" => "42" )
-## for mod_trigger_b4_dl
-# trigger-before-download.gdbm-filename = "/home/weigon/testbase/trigger.db"
-# trigger-before-download.memcache-hosts = ( "127.0.0.1:11211" )
-# trigger-before-download.trigger-url = "^/trigger/"
-# trigger-before-download.download-url = "^/download/"
-# trigger-before-download.deny-url = "http://127.0.0.1/index.html"
-# trigger-before-download.trigger-timeout = 10
-
-## for mod_cml
-## don't forget to add index.cml to server.indexfiles
-# cml.extension = ".cml"
-# cml.memcache-hosts = ( "127.0.0.1:11211" )
-
#### variable usage:
## variable name without "." is auto prefixed by "var." and becomes "var.bar"
#bar = 1
@@ -328,4 +306,4 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
#var.a=1
# include other config file fragments from lighttpd.d subdir
-include_shell "find /etc/lighttpd.d -maxdepth 1 -name '*.conf' -exec cat {} \;"
+include "/etc/lighttpd.d/*.conf"
diff --git a/meta/recipes-extended/lighttpd/lighttpd_1.4.64.bb b/meta/recipes-extended/lighttpd/lighttpd_1.4.64.bb
deleted file mode 100644
index 8d2e77e011..0000000000
--- a/meta/recipes-extended/lighttpd/lighttpd_1.4.64.bb
+++ /dev/null
@@ -1,79 +0,0 @@
-SUMMARY = "Lightweight high-performance web server"
-HOMEPAGE = "http://www.lighttpd.net/"
-DESCRIPTION = "Lightweight high-performance web server is designed and optimized for high performance environments. With a small memory footprint compared to other web-servers, effective management of the cpu-load, and advanced feature set (FastCGI, SCGI, Auth, Output-Compression, URL-Rewriting and many more)"
-BUGTRACKER = "http://redmine.lighttpd.net/projects/lighttpd/issues"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=e4dac5c6ab169aa212feb5028853a579"
-
-SECTION = "net"
-RDEPENDS:${PN} = "lighttpd-module-dirlisting \
- lighttpd-module-indexfile \
- lighttpd-module-staticfile"
-RRECOMMENDS:${PN} = "lighttpd-module-access \
- lighttpd-module-accesslog"
-
-SRC_URI = "http://download.lighttpd.net/lighttpd/releases-1.4.x/lighttpd-${PV}.tar.xz \
- file://index.html.lighttpd \
- file://lighttpd.conf \
- file://lighttpd \
- "
-
-SRC_URI[sha256sum] = "e1489d9fa7496fbf2e071c338b593b2300d38c23f1e5967e52c9ef482e1b0e26"
-
-DEPENDS = "virtual/crypt"
-
-PACKAGECONFIG ??= "openssl pcre zlib \
- ${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'attr', '', d)} \
-"
-
-PACKAGECONFIG[libev] = "-Dwith_libev=true,-Dwith_libev=false,libev"
-PACKAGECONFIG[mysql] = "-Dwith_mysql=true,-Dwith_mysql=false,mariadb"
-PACKAGECONFIG[ldap] = "-Dwith_ldap=true,-Dwith_ldap=false,openldap"
-PACKAGECONFIG[attr] = "-Dwith_xattr=true,-Dwith_xattr=false,attr"
-PACKAGECONFIG[openssl] = "-Dwith_openssl=true,-Dwith_openssl=false,openssl"
-PACKAGECONFIG[krb5] = "-Dwith_krb5=true,-Dwith_krb5=false,krb5"
-PACKAGECONFIG[pcre] = "-Dwith_pcre=true,-Dwith_pcre=false,libpcre"
-PACKAGECONFIG[zlib] = "-Dwith_zlib=true,-Dwith_zlib=false,zlib"
-PACKAGECONFIG[bzip2] = "-Dwith_bzip=true,-Dwith_bzip=false,bzip2"
-PACKAGECONFIG[webdav-props] = "-Dwith_webdav_props=true,-Dwith_webdav_props=false,libxml2 sqlite3"
-PACKAGECONFIG[webdav-locks] = "-Dwith_webdav_locks=true,-Dwith_webdav_locks=false,util-linux"
-PACKAGECONFIG[lua] = "-Dwith_lua=true,-Dwith_lua=false,lua"
-PACKAGECONFIG[zstd] = "-Dwith_zstd=true,-Dwith_zstd=false,zstd"
-
-inherit meson pkgconfig update-rc.d gettext systemd
-
-INITSCRIPT_NAME = "lighttpd"
-INITSCRIPT_PARAMS = "defaults 70"
-
-SYSTEMD_SERVICE:${PN} = "lighttpd.service"
-
-do_install:append() {
- install -d ${D}${sysconfdir}/init.d ${D}${sysconfdir}/lighttpd ${D}${sysconfdir}/lighttpd.d ${D}/www/pages/dav
- install -m 0755 ${WORKDIR}/lighttpd ${D}${sysconfdir}/init.d
- install -m 0644 ${WORKDIR}/lighttpd.conf ${D}${sysconfdir}/lighttpd
- install -m 0644 ${WORKDIR}/index.html.lighttpd ${D}/www/pages/index.html
-
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${S}/doc/systemd/lighttpd.service ${D}${systemd_system_unitdir}
- sed -i -e 's,@SBINDIR@,${sbindir},g' \
- -e 's,@SYSCONFDIR@,${sysconfdir},g' \
- -e 's,@BASE_BINDIR@,${base_bindir},g' \
- ${D}${systemd_system_unitdir}/lighttpd.service
- #For FHS compliance, create symbolic links to /var/log and /var/tmp for logs and temporary data
- ln -sf ${localstatedir}/log ${D}/www/logs
- ln -sf ${localstatedir}/tmp ${D}/www/var
-}
-
-# bitbake.conf sets ${libdir}/${BPN}/* in FILES, which messes up the module split.
-# So we re-do the variable.
-FILES:${PN} = "${sysconfdir} /www ${sbindir}"
-
-CONFFILES:${PN} = "${sysconfdir}/lighttpd/lighttpd.conf"
-
-PACKAGES_DYNAMIC += "^lighttpd-module-.*"
-
-python populate_packages:prepend () {
- lighttpd_libdir = d.expand('${prefix}/lib/lighttpd')
- do_split_packages(d, lighttpd_libdir, r'^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
-}
diff --git a/meta/recipes-extended/lighttpd/lighttpd_1.4.75.bb b/meta/recipes-extended/lighttpd/lighttpd_1.4.75.bb
new file mode 100644
index 0000000000..fc3b7e005d
--- /dev/null
+++ b/meta/recipes-extended/lighttpd/lighttpd_1.4.75.bb
@@ -0,0 +1,76 @@
+SUMMARY = "Lightweight high-performance web server"
+HOMEPAGE = "http://www.lighttpd.net/"
+DESCRIPTION = "Lightweight high-performance web server is designed and optimized for high performance environments. With a small memory footprint compared to other web-servers, effective management of the cpu-load, and advanced feature set (FastCGI, SCGI, Auth, Output-Compression, URL-Rewriting and many more)"
+BUGTRACKER = "http://redmine.lighttpd.net/projects/lighttpd/issues"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=e4dac5c6ab169aa212feb5028853a579"
+
+SECTION = "net"
+RDEPENDS:${PN} = "lighttpd-module-dirlisting"
+RRECOMMENDS:${PN} = "lighttpd-module-accesslog"
+
+SRC_URI = "http://download.lighttpd.net/lighttpd/releases-1.4.x/lighttpd-${PV}.tar.xz \
+ file://index.html.lighttpd \
+ file://lighttpd.conf \
+ file://lighttpd \
+ "
+
+SRC_URI[sha256sum] = "8b721ca939d312afaa6ef31dcbd6afb5161ed385ac828e6fccd4c5b76be189d6"
+
+DEPENDS = "virtual/crypt"
+
+PACKAGECONFIG ??= "openssl pcre zlib \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'attr', '', d)} \
+"
+
+PACKAGECONFIG[libev] = "-Dwith_libev=enabled,-Dwith_libev=disabled,libev"
+PACKAGECONFIG[mysql] = "-Dwith_mysql=enabled,-Dwith_mysql=disabled,mariadb"
+PACKAGECONFIG[ldap] = "-Dwith_ldap=enabled,-Dwith_ldap=disabled,openldap"
+PACKAGECONFIG[attr] = "-Dwith_xattr=true,-Dwith_xattr=false,attr"
+PACKAGECONFIG[openssl] = "-Dwith_openssl=true,-Dwith_openssl=false,openssl"
+PACKAGECONFIG[krb5] = "-Dwith_krb5=enabled,-Dwith_krb5=disabled,krb5"
+PACKAGECONFIG[pcre] = "-Dwith_pcre=pcre2,-Dwith_pcre=disabled,libpcre2"
+PACKAGECONFIG[zlib] = "-Dwith_zlib=enabled,-Dwith_zlib=disabled,zlib"
+PACKAGECONFIG[bzip2] = "-Dwith_bzip=enabled,-Dwith_bzip=disabled,bzip2"
+PACKAGECONFIG[webdav-props] = "-Dwith_webdav_props=enabled,-Dwith_webdav_props=disabled,libxml2 sqlite3"
+PACKAGECONFIG[webdav-locks] = "-Dwith_webdav_locks=enabled,-Dwith_webdav_locks=disabled,util-linux"
+PACKAGECONFIG[lua] = "-Dwith_lua=true,-Dwith_lua=false,lua"
+PACKAGECONFIG[zstd] = "-Dwith_zstd=enabled,-Dwith_zstd=disabled,zstd"
+
+inherit meson pkgconfig update-rc.d gettext systemd
+
+INITSCRIPT_NAME = "lighttpd"
+INITSCRIPT_PARAMS = "defaults 70"
+
+SYSTEMD_SERVICE:${PN} = "lighttpd.service"
+
+do_install:append() {
+ install -d ${D}${sysconfdir}/init.d ${D}${sysconfdir}/lighttpd ${D}${sysconfdir}/lighttpd.d ${D}/www/pages/dav
+ install -m 0755 ${WORKDIR}/lighttpd ${D}${sysconfdir}/init.d
+ install -m 0644 ${WORKDIR}/lighttpd.conf ${D}${sysconfdir}/lighttpd
+ install -m 0644 ${WORKDIR}/index.html.lighttpd ${D}/www/pages/index.html
+
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${S}/doc/systemd/lighttpd.service ${D}${systemd_system_unitdir}
+ sed -i -e 's,@SBINDIR@,${sbindir},g' \
+ -e 's,@SYSCONFDIR@,${sysconfdir},g' \
+ -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ ${D}${systemd_system_unitdir}/lighttpd.service
+ #For FHS compliance, create symbolic links to /var/log and /var/tmp for logs and temporary data
+ ln -sf ${localstatedir}/log ${D}/www/logs
+ ln -sf ${localstatedir}/tmp ${D}/www/var
+}
+
+# bitbake.conf sets ${libdir}/${BPN}/* in FILES, which messes up the module split.
+# So we re-do the variable.
+FILES:${PN} = "${sysconfdir} /www ${sbindir}"
+
+CONFFILES:${PN} = "${sysconfdir}/lighttpd/lighttpd.conf"
+
+PACKAGES_DYNAMIC += "^lighttpd-module-.*"
+
+python populate_packages:prepend () {
+ lighttpd_libdir = d.expand('${prefix}/lib/lighttpd')
+ do_split_packages(d, lighttpd_libdir, r'^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
+}
diff --git a/meta/recipes-extended/logrotate/logrotate/run-ptest b/meta/recipes-extended/logrotate/logrotate/run-ptest
new file mode 100755
index 0000000000..b272def65f
--- /dev/null
+++ b/meta/recipes-extended/logrotate/logrotate/run-ptest
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+set -u
+
+make -k check
diff --git a/meta/recipes-extended/logrotate/logrotate_3.19.0.bb b/meta/recipes-extended/logrotate/logrotate_3.19.0.bb
deleted file mode 100644
index 2a60d9b31f..0000000000
--- a/meta/recipes-extended/logrotate/logrotate_3.19.0.bb
+++ /dev/null
@@ -1,91 +0,0 @@
-SUMMARY = "Rotates, compresses, removes and mails system log files"
-SECTION = "console/utils"
-HOMEPAGE = "https://github.com/logrotate/logrotate/"
-DESCRIPTION = "The logrotate utility is designed to simplify the administration of log files on a system which generates a lot of log files."
-LICENSE = "GPL-2.0-only"
-
-# TODO: Document coreutils dependency. Why not RDEPENDS? Why not busybox?
-
-DEPENDS="coreutils popt"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-UPSTREAM_CHECK_URI = "https://github.com/${BPN}/${BPN}/releases"
-UPSTREAM_CHECK_REGEX = "logrotate-(?P<pver>\d+(\.\d+)+).tar"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/${PV}/${BP}.tar.xz"
-
-SRC_URI[sha256sum] = "ddd5274d684c5c99ca724e8069329f343ebe376e07493d537d9effdc501214ba"
-
-# These CVEs are debian, gentoo or SUSE specific on the way logrotate was installed/used
-CVE_CHECK_IGNORE += "CVE-2011-1548 CVE-2011-1549 CVE-2011-1550"
-
-PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'acl selinux', d)}"
-
-PACKAGECONFIG[acl] = ",,acl"
-PACKAGECONFIG[selinux] = ",,libselinux"
-
-CONFFILES:${PN} += "${localstatedir}/lib/logrotate.status \
- ${sysconfdir}/logrotate.conf \
- ${sysconfdir}/logrotate.d/btmp \
- ${sysconfdir}/logrotate.d/wtmp"
-
-# If RPM_OPT_FLAGS is unset, it adds -g itself rather than obeying our
-# optimization variables, so use it rather than EXTRA_CFLAGS.
-EXTRA_OEMAKE = "\
- LFS= \
- OS_NAME='${OS_NAME}' \
- 'CC=${CC}' \
- 'RPM_OPT_FLAGS=${CFLAGS}' \
- 'EXTRA_LDFLAGS=${LDFLAGS}' \
- ${@bb.utils.contains('PACKAGECONFIG', 'acl', 'WITH_ACL=yes', '', d)} \
- ${@bb.utils.contains('PACKAGECONFIG', 'selinux', 'WITH_SELINUX=yes', '', d)} \
-"
-
-# OS_NAME in the makefile defaults to `uname -s`. The behavior for
-# freebsd/netbsd is questionable, so leave it as Linux, which only sets
-# INSTALL=install and BASEDIR=/usr.
-OS_NAME = "Linux"
-
-inherit autotools systemd
-
-SYSTEMD_SERVICE:${PN} = "\
- ${BPN}.service \
- ${BPN}.timer \
-"
-
-LOGROTATE_OPTIONS ?= ""
-
-LOGROTATE_SYSTEMD_TIMER_BASIS ?= "daily"
-LOGROTATE_SYSTEMD_TIMER_ACCURACY ?= "12h"
-LOGROTATE_SYSTEMD_TIMER_PERSISTENT ?= "true"
-
-do_install(){
- oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir}
- mkdir -p ${D}${sysconfdir}/logrotate.d
- mkdir -p ${D}${localstatedir}/lib
- install -p -m 644 ${S}/examples/logrotate.conf ${D}${sysconfdir}/logrotate.conf
- install -p -m 644 ${S}/examples/btmp ${D}${sysconfdir}/logrotate.d/btmp
- install -p -m 644 ${S}/examples/wtmp ${D}${sysconfdir}/logrotate.d/wtmp
- touch ${D}${localstatedir}/lib/logrotate.status
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${S}/examples/logrotate.service ${D}${systemd_system_unitdir}/logrotate.service
- install -m 0644 ${S}/examples/logrotate.timer ${D}${systemd_system_unitdir}/logrotate.timer
- [ -z "${LOGROTATE_OPTIONS}" ] ||
- sed -ri \
- -e 's|(ExecStart=.*/logrotate.*)$|\1 ${LOGROTATE_OPTIONS}|g' \
- ${D}${systemd_system_unitdir}/logrotate.service
- sed -ri \
- -e 's|(OnCalendar=).*$|\1${LOGROTATE_SYSTEMD_TIMER_BASIS}|g' \
- -e 's|(AccuracySec=).*$|\1${LOGROTATE_SYSTEMD_TIMER_ACCURACY}|g' \
- -e 's|(Persistent=).*$|\1${LOGROTATE_SYSTEMD_TIMER_PERSISTENT}|g' \
- ${D}${systemd_system_unitdir}/logrotate.timer
- fi
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- mkdir -p ${D}${sysconfdir}/cron.daily
- install -p -m 0755 ${S}/examples/logrotate.cron ${D}${sysconfdir}/cron.daily/logrotate
- fi
-}
diff --git a/meta/recipes-extended/logrotate/logrotate_3.21.0.bb b/meta/recipes-extended/logrotate/logrotate_3.21.0.bb
new file mode 100644
index 0000000000..10a6149abc
--- /dev/null
+++ b/meta/recipes-extended/logrotate/logrotate_3.21.0.bb
@@ -0,0 +1,114 @@
+SUMMARY = "Rotates, compresses, removes and mails system log files"
+SECTION = "console/utils"
+HOMEPAGE = "https://github.com/logrotate/logrotate/"
+DESCRIPTION = "The logrotate utility is designed to simplify the administration of log files on a system which generates a lot of log files."
+LICENSE = "GPL-2.0-only"
+
+# TODO: Document coreutils dependency. Why not RDEPENDS? Why not busybox?
+
+DEPENDS="coreutils popt"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BP}.tar.xz \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "8fa12015e3b8415c121fc9c0ca53aa872f7b0702f543afda7e32b6c4900f6516"
+
+CVE_STATUS_GROUPS = "CVE_STATUS_RECIPE"
+CVE_STATUS_RECIPE = "CVE-2011-1548 CVE-2011-1549 CVE-2011-1550"
+CVE_STATUS_RECIPE[status] = "not-applicable-platform: CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'acl selinux', d)}"
+
+PACKAGECONFIG[acl] = ",,acl"
+PACKAGECONFIG[selinux] = ",,libselinux"
+
+CONFFILES:${PN} += "${localstatedir}/lib/logrotate.status \
+ ${sysconfdir}/logrotate.conf \
+ ${sysconfdir}/logrotate.d/btmp \
+ ${sysconfdir}/logrotate.d/wtmp"
+
+# If RPM_OPT_FLAGS is unset, it adds -g itself rather than obeying our
+# optimization variables, so use it rather than EXTRA_CFLAGS.
+EXTRA_OEMAKE = "\
+ LFS= \
+ OS_NAME='${OS_NAME}' \
+ 'CC=${CC}' \
+ 'RPM_OPT_FLAGS=${CFLAGS}' \
+ 'EXTRA_LDFLAGS=${LDFLAGS}' \
+ ${@bb.utils.contains('PACKAGECONFIG', 'acl', 'WITH_ACL=yes', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'selinux', 'WITH_SELINUX=yes', '', d)} \
+"
+
+# OS_NAME in the makefile defaults to `uname -s`. The behavior for
+# freebsd/netbsd is questionable, so leave it as Linux, which only sets
+# INSTALL=install and BASEDIR=/usr.
+OS_NAME = "Linux"
+
+inherit autotools systemd github-releases ptest
+
+SYSTEMD_SERVICE:${PN} = "\
+ ${BPN}.service \
+ ${BPN}.timer \
+"
+
+LOGROTATE_OPTIONS ?= ""
+
+LOGROTATE_SYSTEMD_TIMER_BASIS ?= "daily"
+LOGROTATE_SYSTEMD_TIMER_ACCURACY ?= "12h"
+LOGROTATE_SYSTEMD_TIMER_PERSISTENT ?= "true"
+
+do_install(){
+ oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir}
+ mkdir -p ${D}${sysconfdir}/logrotate.d
+ mkdir -p ${D}${localstatedir}/lib
+ install -p -m 644 ${S}/examples/logrotate.conf ${D}${sysconfdir}/logrotate.conf
+ install -p -m 644 ${S}/examples/btmp ${D}${sysconfdir}/logrotate.d/btmp
+ install -p -m 644 ${S}/examples/wtmp ${D}${sysconfdir}/logrotate.d/wtmp
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${S}/examples/logrotate.service ${D}${systemd_system_unitdir}/logrotate.service
+ install -m 0644 ${S}/examples/logrotate.timer ${D}${systemd_system_unitdir}/logrotate.timer
+ [ -z "${LOGROTATE_OPTIONS}" ] ||
+ sed -ri \
+ -e 's|(ExecStart=.*/logrotate.*)$|\1 ${LOGROTATE_OPTIONS}|g' \
+ ${D}${systemd_system_unitdir}/logrotate.service
+ sed -ri \
+ -e 's|(OnCalendar=).*$|\1${LOGROTATE_SYSTEMD_TIMER_BASIS}|g' \
+ -e 's|(AccuracySec=).*$|\1${LOGROTATE_SYSTEMD_TIMER_ACCURACY}|g' \
+ -e 's|(Persistent=).*$|\1${LOGROTATE_SYSTEMD_TIMER_PERSISTENT}|g' \
+ ${D}${systemd_system_unitdir}/logrotate.timer
+ fi
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ mkdir -p ${D}${sysconfdir}/cron.daily
+ install -p -m 0755 ${S}/examples/logrotate.cron ${D}${sysconfdir}/cron.daily/logrotate
+ fi
+}
+
+do_install_ptest() {
+ cp -r ${S}/test/* ${D}${PTEST_PATH}
+ cp ${S}/test-driver ${D}${PTEST_PATH}
+ cp ${B}/test/Makefile ${D}${PTEST_PATH}
+
+ # Do not rebuild Makefile
+ sed -i 's/^Makefile:/_Makefile:/' ${D}${PTEST_PATH}/Makefile
+
+ # Fix top_builddir and top_srcdir
+ sed -e 's/^top_builddir = \(.*\)/top_builddir = ./' \
+ -e 's/^top_srcdir = \(.*\)/top_srcdir = ./' \
+ -i ${D}${PTEST_PATH}/Makefile
+
+ # Replace bash with sh
+ sed -i 's,/bin/bash,/bin/sh,' ${D}${PTEST_PATH}/Makefile
+
+ # Replace gawk with awk
+ sed -i 's/gawk/awk/' ${D}${PTEST_PATH}/Makefile
+ ln -s ${sbindir}/logrotate ${D}${PTEST_PATH}
+}
+
+# coreutils is needed to have "readlink"
+RDEPENDS:${PN}-ptest += "make coreutils"
diff --git a/meta/recipes-extended/lsb/lsb-release_1.4.bb b/meta/recipes-extended/lsb/lsb-release_1.4.bb
index ad16554e98..00d8183a4f 100644
--- a/meta/recipes-extended/lsb/lsb-release_1.4.bb
+++ b/meta/recipes-extended/lsb/lsb-release_1.4.bb
@@ -14,10 +14,9 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/project/lsb/lsb_release/1.4/lsb-release-1.4.tar
file://help2man-reproducibility.patch \
"
-SRC_URI[md5sum] = "30537ef5a01e0ca94b7b8eb6a36bb1e4"
SRC_URI[sha256sum] = "99321288f8d62e7a1d485b7c6bdccf06766fb8ca603c6195806e4457fdf17172"
-UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/lsb/files/lsb_release/"
+UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/lsb/files/lsb_release/"
UPSTREAM_CHECK_REGEX = "/lsb_release/(?P<pver>(\d+[\.\-_]*)+)/"
CLEANBROKEN = "1"
diff --git a/meta/recipes-extended/lsof/files/lsof-remove-host-information.patch b/meta/recipes-extended/lsof/files/lsof-remove-host-information.patch
deleted file mode 100644
index 08f083cf83..0000000000
--- a/meta/recipes-extended/lsof/files/lsof-remove-host-information.patch
+++ /dev/null
@@ -1,75 +0,0 @@
-From 4233b5ac1629c225a7a80f33efc0eff527a95851 Mon Sep 17 00:00:00 2001
-From: Li Wang <li.wang@windriver.com>
-Date: Wed, 30 Aug 2017 15:05:16 +0800
-Subject: [PATCH] Remove host information from version.h
-
-make lsof not include host information
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Li Wang <li.wang@windriver.com>
-
----
- dialects/linux/Makefile | 48 +++++++----------------------------------
- 1 file changed, 8 insertions(+), 40 deletions(-)
-
-diff --git a/dialects/linux/Makefile b/dialects/linux/Makefile
-index 46c83c2..50f03cc 100644
---- a/dialects/linux/Makefile
-+++ b/dialects/linux/Makefile
-@@ -84,47 +84,15 @@ version.h: FRC
- @echo Constructing version.h
- @rm -f version.h
- @echo '#define LSOF_BLDCMT "${LSOF_BLDCMT}"' > version.h;
-- @echo '#define LSOF_CC "${CC}"' >> version.h
-- @echo '#define LSOF_CCV "${CCV}"' >> version.h
-- @echo '#define LSOF_CCFLAGS "'`echo ${CFLAGS} | sed 's/\\\\(/\\(/g' | sed 's/\\\\)/\\)/g' | sed 's/"/\\\\"/g'`'"' >> version.h
-+ @echo '#define LSOF_CC ""' >> version.h
-+ @echo '#define LSOF_CCV ""' >> version.h
-+ @echo '#define LSOF_CCFLAGS ""' >> version.h
- @echo '#define LSOF_CINFO "${CINFO}"' >> version.h
-- @if [ "X${LSOF_HOST}" = "X" ]; then \
-- echo '#define LSOF_HOST "'`uname -n`'"' >> version.h; \
-- else \
-- if [ "${LSOF_HOST}" = "none" ]; then \
-- echo '#define LSOF_HOST ""' >> version.h; \
-- else \
-- echo '#define LSOF_HOST "${LSOF_HOST}"' >> version.h; \
-- fi \
-- fi
-- @echo '#define LSOF_LDFLAGS "${CFGL}"' >> version.h
-- @if [ "X${LSOF_LOGNAME}" = "X" ]; then \
-- echo '#define LSOF_LOGNAME "${LOGNAME}"' >> version.h; \
-- else \
-- if [ "${LSOF_LOGNAME}" = "none" ]; then \
-- echo '#define LSOF_LOGNAME ""' >> version.h; \
-- else \
-- echo '#define LSOF_LOGNAME "${LSOF_LOGNAME}"' >> version.h; \
-- fi; \
-- fi
-- @if [ "X${LSOF_SYSINFO}" = "X" ]; then \
-- echo '#define LSOF_SYSINFO "'`uname -a`'"' >> version.h; \
-- else \
-- if [ "${LSOF_SYSINFO}" = "none" ]; then \
-- echo '#define LSOF_SYSINFO ""' >> version.h; \
-- else \
-- echo '#define LSOF_SYSINFO "${LSOF_SYSINFO}"' >> version.h; \
-- fi \
-- fi
-- @if [ "X${LSOF_USER}" = "X" ]; then \
-- echo '#define LSOF_USER "${USER}"' >> version.h; \
-- else \
-- if [ "${LSOF_USER}" = "none" ]; then \
-- echo '#define LSOF_USER ""' >> version.h; \
-- else \
-- echo '#define LSOF_USER "${LSOF_USER}"' >> version.h; \
-- fi \
-- fi
-+ @echo '#define LSOF_HOST ""' >> version.h;
-+ @echo '#define LSOF_LDFLAGS ""' >> version.h
-+ @echo '#define LSOF_LOGNAME ""' >> version.h;
-+ @echo '#define LSOF_SYSINFO ""' >> version.h;
-+ @echo '#define LSOF_USER ""' >> version.h;
- @sed '/VN/s/.ds VN \(.*\)/#define LSOF_VERSION "\1"/' < version >> version.h
-
- FRC:
diff --git a/meta/recipes-extended/lsof/files/remove-host-information.patch b/meta/recipes-extended/lsof/files/remove-host-information.patch
new file mode 100644
index 0000000000..1f2b4afa69
--- /dev/null
+++ b/meta/recipes-extended/lsof/files/remove-host-information.patch
@@ -0,0 +1,123 @@
+From 4f05492ac07a7bbaf06dd8265b9cba329e1b53e9 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Wed, 30 Aug 2017 15:05:16 +0800
+Subject: [PATCH] lsof: remove host information from version.h
+
+lsof doesn't embed the username or hostname in the build if SOURCE_DATE_EPOCH is
+defined, but this still embeds build paths. Delete all of the host details to
+ensure that no host information is leaked into the binary.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ autotools/version.h.in | 16 ++++-----
+ lib/dialects/linux/Makefile | 69 ++++++-------------------------------
+ 2 files changed, 19 insertions(+), 66 deletions(-)
+
+diff --git a/autotools/version.h.in b/autotools/version.h.in
+index aac0b80..2e635a5 100644
+--- a/autotools/version.h.in
++++ b/autotools/version.h.in
+@@ -37,13 +37,13 @@
+
+ #include "config.h"
+ #define LSOF_VERSION PACKAGE_VERSION
+-#define LSOF_HOST "@host@"
+-#define LSOF_LOGNAME "@logname@"
+-#define LSOF_USER "@user@"
+-#define LSOF_CC "@cc@"
+-#define LSOF_CCV "@ccv@"
+-#define LSOF_CCFLAGS "@ccflags@"
+-#define LSOF_LDFLAGS "@ldflags@"
+-#define LSOF_SYSINFO "@sysinfo@"
++#define LSOF_HOST ""
++#define LSOF_LOGNAME ""
++#define LSOF_USER ""
++#define LSOF_CC ""
++#define LSOF_CCV ""
++#define LSOF_CCFLAGS ""
++#define LSOF_LDFLAGS ""
++#define LSOF_SYSINFO ""
+
+ #endif
+diff --git a/lib/dialects/linux/Makefile b/lib/dialects/linux/Makefile
+index f8adaa6..7a79ca7 100644
+--- a/lib/dialects/linux/Makefile
++++ b/lib/dialects/linux/Makefile
+@@ -83,64 +83,17 @@ ${LIB}: FRC
+ version.h: FRC
+ @echo Constructing version.h
+ @rm -f version.h
+- @echo '#define LSOF_BLDCMT "${LSOF_BLDCMT}"' > version.h;
+- @echo '#define LSOF_CC "${CC}"' >> version.h
+- @echo '#define LSOF_CCV "${CCV}"' >> version.h
+- @echo '#define LSOF_CCFLAGS "'`echo ${CFLAGS} | sed 's/\\\\(/\\(/g' | sed 's/\\\\)/\\)/g' | sed 's/"/\\\\"/g'`'"' >> version.h
+- @echo '#define LSOF_CINFO "${CINFO}"' >> version.h
+- @if [ "X${LSOF_HOST}" = "X" ]; then \
+- if [ "X${SOURCE_DATE_EPOCH}" = "X" ]; then \
+- echo '#define LSOF_HOST "'`uname -n`'"' >> version.h; \
+- else \
+- echo '#define LSOF_HOST ""' >> version.h; \
+- fi \
+- else \
+- if [ "${LSOF_HOST}" = "none" ]; then \
+- echo '#define LSOF_HOST ""' >> version.h; \
+- else \
+- echo '#define LSOF_HOST "${LSOF_HOST}"' >> version.h; \
+- fi \
+- fi
+- @echo '#define LSOF_LDFLAGS "${CFGL}"' >> version.h
+- @if [ "X${LSOF_LOGNAME}" = "X" ]; then \
+- if [ "X${SOURCE_DATE_EPOCH}" = "X" ]; then \
+- echo '#define LSOF_LOGNAME "${LOGNAME}"' >> version.h; \
+- else \
+- echo '#define LSOF_LOGNAME ""' >> version.h; \
+- fi \
+- else \
+- if [ "${LSOF_LOGNAME}" = "none" ]; then \
+- echo '#define LSOF_LOGNAME ""' >> version.h; \
+- else \
+- echo '#define LSOF_LOGNAME "${LSOF_LOGNAME}"' >> version.h; \
+- fi; \
+- fi
+- @if [ "X${LSOF_SYSINFO}" = "X" ]; then \
+- if [ "X${SOURCE_DATE_EPOCH}" = "X" ]; then \
+- echo '#define LSOF_SYSINFO "'`uname -a`'"' >> version.h; \
+- else \
+- echo '#define LSOF_SYSINFO ""' >> version.h; \
+- fi \
+- else \
+- if [ "${LSOF_SYSINFO}" = "none" ]; then \
+- echo '#define LSOF_SYSINFO ""' >> version.h; \
+- else \
+- echo '#define LSOF_SYSINFO "${LSOF_SYSINFO}"' >> version.h; \
+- fi \
+- fi
+- @if [ "X${LSOF_USER}" = "X" ]; then \
+- if [ "X${SOURCE_DATE_EPOCH}" = "X" ]; then \
+- echo '#define LSOF_USER "${USER}"' >> version.h; \
+- else \
+- echo '#define LSOF_USER ""' >> version.h; \
+- fi \
+- else \
+- if [ "${LSOF_USER}" = "none" ]; then \
+- echo '#define LSOF_USER ""' >> version.h; \
+- else \
+- echo '#define LSOF_USER "${LSOF_USER}"' >> version.h; \
+- fi \
+- fi
++
++ @echo '#define LSOF_BLDCMT ""' > version.h;
++ @echo '#define LSOF_CC ""' >> version.h
++ @echo '#define LSOF_CCV ""' >> version.h
++ @echo '#define LSOF_CCFLAGS ""' >> version.h
++ @echo '#define LSOF_CINFO ""' >> version.h
++ @echo '#define LSOF_HOST ""' >> version.h
++ @echo '#define LSOF_LDFLAGS ""' >> version.h
++ @echo '#define LSOF_LOGNAME ""' >> version.h
++ @echo '#define LSOF_SYSINFO ""' >> version.h
++ @echo '#define LSOF_USER ""' >> version.h
+ @sed '/VN/s/.ds VN \(.*\)/#define LSOF_VERSION "\1"/' < version >> version.h
+
+ FRC:
diff --git a/meta/recipes-extended/lsof/lsof_4.95.0.bb b/meta/recipes-extended/lsof/lsof_4.95.0.bb
deleted file mode 100644
index f380de0b6b..0000000000
--- a/meta/recipes-extended/lsof/lsof_4.95.0.bb
+++ /dev/null
@@ -1,46 +0,0 @@
-SUMMARY = "LiSt Open Files tool"
-DESCRIPTION = "Lsof is a Unix-specific diagnostic tool. \
-Its name stands for LiSt Open Files, and it does just that."
-HOMEPAGE = "http://people.freebsd.org/~abe/"
-SECTION = "devel"
-LICENSE = "Spencer-94"
-LIC_FILES_CHKSUM = "file://00README;beginline=645;endline=679;md5=964df275d26429ba3b39dbb9f205172a"
-
-# Upstream lsof releases are hosted on an ftp server which times out download
-# attempts from hosts for which it can not perform a DNS reverse-lookup (See:
-# https://people.freebsd.org/~abe/ ). http://www.mirrorservice.org seems to be
-# the most commonly used alternative.
-
-SRC_URI = "git://github.com/lsof-org/lsof;branch=master;protocol=https \
- file://lsof-remove-host-information.patch \
- "
-
-SRCREV = "67d8c828e7bdc01ba93f8ff79765dd424da0c9d7"
-
-S = "${WORKDIR}/git"
-
-export LSOF_INCLUDE = "${STAGING_INCDIR}"
-
-do_configure () {
- export LSOF_AR="${AR} cr"
- export LSOF_RANLIB="${RANLIB}"
- if [ "x${GLIBCVERSION}" != "x" ]; then
- LINUX_CLIB=`echo ${GLIBCVERSION} |sed -e 's,\.,,g'`
- LINUX_CLIB="-DGLIBCV=${LINUX_CLIB}"
- export LINUX_CLIB
- fi
- yes | ./Configure linux
-}
-
-export I = "${STAGING_INCDIR}"
-export L = "${STAGING_INCDIR}"
-
-do_compile () {
- oe_runmake 'CC=${CC}' 'CFGL=${LDFLAGS} -L./lib -llsof' 'DEBUG=' 'INCL=${CFLAGS}'
-}
-
-do_install () {
- install -d ${D}${sbindir} ${D}${mandir}/man8
- install -m 0755 lsof ${D}${sbindir}/lsof
- install -m 0644 Lsof.8 ${D}${mandir}/man8/lsof.8
-}
diff --git a/meta/recipes-extended/lsof/lsof_4.99.3.bb b/meta/recipes-extended/lsof/lsof_4.99.3.bb
new file mode 100644
index 0000000000..93e91fb427
--- /dev/null
+++ b/meta/recipes-extended/lsof/lsof_4.99.3.bb
@@ -0,0 +1,23 @@
+SUMMARY = "LiSt Open Files tool"
+DESCRIPTION = "Lsof is a Unix-specific diagnostic tool. \
+Its name stands for LiSt Open Files, and it does just that."
+HOMEPAGE = "http://people.freebsd.org/~abe/"
+SECTION = "devel"
+LICENSE = "Spencer-94"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a48ac97a8550eff12395a2c0d6151510"
+
+SRC_URI = "git://github.com/lsof-org/lsof;branch=master;protocol=https \
+ file://remove-host-information.patch"
+SRCREV = "2e4c7a1a9bc7258dc5b6a3ab28ebca44174279a8"
+
+S = "${WORKDIR}/git"
+
+inherit update-alternatives autotools pkgconfig manpages
+PACKAGECONFIG[manpages] = ""
+
+DEPENDS += "groff-native"
+
+ALTERNATIVE:${PN} = "lsof"
+ALTERNATIVE_LINK_NAME[lsof] = "${bindir}/lsof"
+# Make our priority higher than busybox
+ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch b/meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch
index 6d945700be..cc98079651 100644
--- a/meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch
+++ b/meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch
@@ -14,10 +14,10 @@ Signed-off-by: Petr Vorel <petr.vorel@gmail.com>
1 file changed, 6 deletions(-)
diff --git a/runtest/mm b/runtest/mm
-index 6537666a9..e5a091a5a 100644
+index d859b331c..3c2962f44 100644
--- a/runtest/mm
+++ b/runtest/mm
-@@ -73,12 +73,6 @@ ksm06_2 ksm06 -n 8000
+@@ -74,12 +74,6 @@ ksm07 ksm07
cpuset01 cpuset01
@@ -31,5 +31,5 @@ index 6537666a9..e5a091a5a 100644
thp01 thp01 -I 120
--
-2.33.0
+2.43.0
diff --git a/meta/recipes-extended/ltp/ltp/0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch b/meta/recipes-extended/ltp/ltp/0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch
deleted file mode 100644
index e8d9f212a9..0000000000
--- a/meta/recipes-extended/ltp/ltp/0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 4aad23f208cc7725cd61bbe5aaadb9994c794cd0 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Wed, 26 Jan 2022 20:58:46 +0100
-Subject: [PATCH] metadata/parse.sh: sort filelist for reproducibility
-
-find does not guarantee the order of the files.
-
-Upstream-Status: Submitted [https://github.com/linux-test-project/ltp/pull/907]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- metadata/parse.sh | 2 +-
-
-diff --git a/metadata/parse.sh b/metadata/parse.sh
-index b43d024c68..1811665bfe 100755
---- a/metadata/parse.sh
-+++ b/metadata/parse.sh
-@@ -29,7 +29,7 @@ echo ' "tests": {'
-
- first=1
-
--for test in `find testcases/ -name '*.c'`; do
-+for test in `find testcases/ -name '*.c'|sort`; do
- a=$($top_builddir/metadata/metaparse -Iinclude -Itestcases/kernel/syscalls/utils/ "$test")
- if [ -n "$a" ]; then
- if [ -z "$first" ]; then
---
-2.20.1
-
diff --git a/meta/recipes-extended/ltp/ltp/0001-scenario_groups-default-remove-connectors.patch b/meta/recipes-extended/ltp/ltp/0001-scenario_groups-default-remove-connectors.patch
new file mode 100644
index 0000000000..4a50d1ae38
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/0001-scenario_groups-default-remove-connectors.patch
@@ -0,0 +1,34 @@
+From 14c710cae38aa96bd8d681e891b6a1d691f99988 Mon Sep 17 00:00:00 2001
+From: Xiangyu Chen <xiangyu.chen@windriver.com>
+Date: Thu, 14 Mar 2024 09:47:10 +0800
+Subject: [PATCH] scenario_groups/default: remove connectors
+
+runtest/connectors was removed in 9b642d89c, thus update scenario_groups/default.
+
+Fixes: 9b642d89c ("runtest: Merge runtest/connectors to kernel_misc")
+Closes: https://github.com/linux-test-project/ltp/pull/1144
+
+Upstream-Status: Backport from
+[https://github.com/linux-test-project/ltp/commit/14c710ca]
+
+Reviewed-by: Petr Vorel <pvorel@suse.cz>
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+---
+ scenario_groups/default | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/scenario_groups/default b/scenario_groups/default
+index f17b2061a..ec77d8fb8 100644
+--- a/scenario_groups/default
++++ b/scenario_groups/default
+@@ -16,7 +16,6 @@ controllers
+ filecaps
+ cap_bounds
+ fcntl-locktests
+-connectors
+ power_management_tests
+ hugetlb
+ commands
+--
+2.34.1
+
diff --git a/meta/recipes-extended/ltp/ltp/disable_hanging_tests.patch b/meta/recipes-extended/ltp/ltp/disable_hanging_tests.patch
deleted file mode 100644
index f25a692dc0..0000000000
--- a/meta/recipes-extended/ltp/ltp/disable_hanging_tests.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-This patch disables tests which we've found "hang" on our infrastructure.
-
-proc01:
-https://autobuilder.yoctoproject.org/typhoon/#/builders/96/builds/1748 (arm)
-https://autobuilder.yoctoproject.org/typhoon/#/builders/96/builds/1781 (arm)
-https://autobuilder.yoctoproject.org/typhoon/#/builders/96/builds/2782 (arm)
-(was trying to read /proc/kmsg, looks like a horrible test anyway)
-
-Upstream-Status: Inappropriate [OE Configuration]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: git/runtest/fs
-===================================================================
---- git.orig/runtest/fs
-+++ git/runtest/fs
-@@ -64,11 +64,6 @@ writetest01 writetest
- #Also run the fs_di (Data Integrity tests)
- fs_di fs_di -d $TMPDIR
-
--# Read every file in /proc. Not likely to crash, but does enough
--# to disturb the kernel. A good kernel latency killer too.
--# Was not sure why it should reside in runtest/crashme and won't get tested ever
--proc01 proc01 -m 128
--
- read_all_dev read_all -d /dev -p -q -r 3
- read_all_proc read_all -d /proc -q -r 3
- read_all_sys read_all -d /sys -q -r 3
diff --git a/meta/recipes-extended/ltp/ltp_20220121.bb b/meta/recipes-extended/ltp/ltp_20220121.bb
deleted file mode 100644
index 8a13dcf9d0..0000000000
--- a/meta/recipes-extended/ltp/ltp_20220121.bb
+++ /dev/null
@@ -1,138 +0,0 @@
-SUMMARY = "Linux Test Project"
-DESCRIPTION = "The Linux Test Project is a joint project with SGI, IBM, OSDL, and Bull with a goal to deliver test suites to the open source community that validate the reliability, robustness, and stability of Linux. The Linux Test Project is a collection of tools for testing the Linux kernel and related features."
-HOMEPAGE = "https://linux-test-project.github.io/"
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-only & GPL-2.0-or-later & LGPL-2.0-or-later & LGPL-2.1-or-later & BSD-2-Clause"
-LIC_FILES_CHKSUM = "\
- file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://testcases/open_posix_testsuite/COPYING;md5=48b1c5ec633e3e30ec2cf884ae699947 \
- file://testcases/network/can/filter-tests/COPYING;md5=5b155ea7d7f86eae8e8832955d8b70bc \
-"
-
-DEPENDS = "attr libaio libcap acl openssl zip-native"
-DEPENDS:append:libc-musl = " fts "
-EXTRA_OEMAKE:append:libc-musl = " LIBC=musl "
-EXTRA_OECONF:append:libc-musl = " LIBS=-lfts "
-
-# since ltp contains x86-64 assembler which uses the frame-pointer register,
-# set -fomit-frame-pointer x86-64 to handle cases where optimisation
-# is set to -O0 or frame pointers have been enabled by -fno-omit-frame-pointer
-# earlier in CFLAGS, etc.
-CFLAGS:append:x86-64 = " -fomit-frame-pointer"
-
-CFLAGS:append:powerpc64 = " -D__SANE_USERSPACE_TYPES__"
-CFLAGS:append:mipsarchn64 = " -D__SANE_USERSPACE_TYPES__"
-SRCREV = "b0561ad8d9ee9fe1244b5385e941eb65a21e91a1"
-
-SRC_URI = "git://github.com/linux-test-project/ltp.git;branch=master;protocol=https \
- file://0001-Remove-OOM-tests-from-runtest-mm.patch \
- file://0001-metadata-parse.sh-sort-filelist-for-reproducibility.patch \
- file://disable_hanging_tests.patch \
- "
-
-S = "${WORKDIR}/git"
-
-inherit autotools-brokensep pkgconfig
-
-TARGET_CC_ARCH += "${LDFLAGS}"
-
-export prefix = "/opt/${PN}"
-export exec_prefix = "/opt/${PN}"
-
-PACKAGECONFIG[numa] = "--with-numa, --without-numa, numactl,"
-EXTRA_AUTORECONF += "-I ${S}/testcases/realtime/m4"
-EXTRA_OECONF = " --with-realtime-testsuite --with-open-posix-testsuite "
-# ltp network/rpc test cases ftbfs when libtirpc is found
-EXTRA_OECONF += " --without-tirpc "
-
-do_compile() {
- oe_runmake HOSTCC="${CC_FOR_BUILD}" HOST_CFLAGS="${CFLAGS_FOR_BUILD}" HOST_LDFLAGS="${LDFLAGS_FOR_BUILD}"
-}
-
-do_install(){
- install -d ${D}${prefix}/
- oe_runmake DESTDIR=${D} SKIP_IDCHECK=1 install include-install
-
- # fixup not deploy STPfailure_report.pl to avoid confusing about it fails to run
- # as it lacks dependency on some perl moudle such as LWP::Simple
- # And this script previously works as a tool for analyzing failures from LTP
- # runs on the OSDL's Scaleable Test Platform (STP) and it mainly accesses
- # http://khack.osdl.org to retrieve ltp test results run on
- # OSDL's Scaleable Test Platform, but now http://khack.osdl.org unaccessible
- rm -rf ${D}${prefix}/bin/STPfailure_report.pl
-
- # Copy POSIX test suite into ${D}${prefix}/testcases by manual
- cp -r testcases/open_posix_testsuite ${D}${prefix}/testcases
-
- # Makefile were configured in the build system
- find ${D}${prefix} -name Makefile | xargs -n 1 sed -i \
- -e 's@[^ ]*-fdebug-prefix-map=[^ "]*@@g' \
- -e 's@[^ ]*-fmacro-prefix-map=[^ "]*@@g' \
- -e 's@[^ ]*-ffile-prefix-map=[^ "]*@@g' \
- -e 's@[^ ]*--sysroot=[^ "]*@@g'
-
- # The controllers memcg_stree test seems to cause us hangs and takes 900s
- # (maybe we expect more regular output?), anyhow, skip it
- sed -e '/^memcg_stress/d' -i ${D}${prefix}/runtest/controllers
-}
-
-RDEPENDS:${PN} = "\
- attr \
- bash \
- bc \
- coreutils \
- cpio \
- cronie \
- curl \
- e2fsprogs \
- e2fsprogs-mke2fs \
- expect \
- file \
- gawk \
- gdb \
- gzip \
- iproute2 \
- ldd \
- libaio \
- logrotate \
- net-tools \
- perl \
- python3-core \
- procps \
- quota \
- unzip \
- util-linux \
- which \
- tar \
-"
-
-FILES:${PN} += "${prefix}/* ${prefix}/runtest/* ${prefix}/scenario_groups/* ${prefix}/testcases/bin/* ${prefix}/testcases/bin/*/bin/* ${prefix}/testscripts/* ${prefix}/testcases/open_posix_testsuite/* ${prefix}/testcases/open_posix_testsuite/conformance/* ${prefix}/testcases/open_posix_testsuite/Documentation/* ${prefix}/testcases/open_posix_testsuite/functional/* ${prefix}/testcases/open_posix_testsuite/include/* ${prefix}/testcases/open_posix_testsuite/scripts/* ${prefix}/testcases/open_posix_testsuite/stress/* ${prefix}/testcases/open_posix_testsuite/tools/* ${prefix}/testcases/data/nm01/lib.a ${prefix}/lib/libmem.a"
-
-# Avoid stripping some generated binaries otherwise some of the ltp tests such as ldd01 & nm01 fail
-INHIBIT_PACKAGE_STRIP_FILES = "${prefix}/testcases/bin/nm01 ${prefix}/testcases/bin/ldd01"
-INSANE_SKIP:${PN} += "already-stripped staticdev"
-
-remove_broken_musl_sources() {
- [ "${TCLIBC}" = "musl" ] || return 0
-
- cd ${S}
- echo "WARNING: remove unsupported tests (until they're fixed)"
-
- # sync with upstream
- # https://github.com/linux-test-project/ltp/blob/master/ci/alpine.sh#L33
- rm -rfv \
- testcases/kernel/syscalls/confstr/confstr01.c \
- testcases/kernel/syscalls/fmtmsg/fmtmsg01.c \
- testcases/kernel/syscalls/getcontext/getcontext01.c \
- testcases/kernel/syscalls/rt_tgsigqueueinfo/rt_tgsigqueueinfo01.c \
- testcases/kernel/syscalls/timer_create/timer_create01.c \
- testcases/kernel/syscalls/timer_create/timer_create03.c \
- utils/benchmark/ebizzy-0.3
-}
-do_patch[postfuncs] += "remove_broken_musl_sources"
-
-# Avoid file dependency scans, as LTP checks for things that may or may not
-# exist on the running system. For instance it has specific checks for
-# csh and ksh which are not typically part of OpenEmbedded systems (but
-# can be added via additional layers.)
-SKIP_FILEDEPS:${PN} = '1'
diff --git a/meta/recipes-extended/ltp/ltp_20240129.bb b/meta/recipes-extended/ltp/ltp_20240129.bb
new file mode 100644
index 0000000000..3e896957d1
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp_20240129.bb
@@ -0,0 +1,144 @@
+SUMMARY = "Linux Test Project"
+DESCRIPTION = "The Linux Test Project is a joint project with SGI, IBM, OSDL, and Bull with a goal to deliver test suites to the open source community that validate the reliability, robustness, and stability of Linux. The Linux Test Project is a collection of tools for testing the Linux kernel and related features."
+HOMEPAGE = "https://linux-test-project.github.io/"
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-only & GPL-2.0-or-later & LGPL-2.0-or-later & LGPL-2.1-or-later & BSD-2-Clause"
+LIC_FILES_CHKSUM = "\
+ file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://testcases/open_posix_testsuite/COPYING;md5=48b1c5ec633e3e30ec2cf884ae699947 \
+ file://testcases/network/can/filter-tests/COPYING;md5=5b155ea7d7f86eae8e8832955d8b70bc \
+"
+
+DEPENDS = "attr libaio libcap acl openssl zip-native"
+DEPENDS:append:libc-musl = " fts "
+EXTRA_OEMAKE:append:libc-musl = " LIBC=musl "
+EXTRA_OECONF:append:libc-musl = " LIBS=-lfts "
+
+# since ltp contains x86-64 assembler which uses the frame-pointer register,
+# set -fomit-frame-pointer x86-64 to handle cases where optimisation
+# is set to -O0 or frame pointers have been enabled by -fno-omit-frame-pointer
+# earlier in CFLAGS, etc.
+CFLAGS:append:x86-64 = " -fomit-frame-pointer"
+TUNE_CCARGS:remove:x86 = "-mfpmath=sse"
+TUNE_CCARGS:remove:x86-64 = "-mfpmath=sse"
+
+CFLAGS:append:powerpc64 = " -D__SANE_USERSPACE_TYPES__"
+CFLAGS:append:mipsarchn64 = " -D__SANE_USERSPACE_TYPES__"
+SRCREV = "68737d20556d37364c95776044b1119c0912a36a"
+
+SRC_URI = "git://github.com/linux-test-project/ltp.git;branch=master;protocol=https \
+ file://0001-Remove-OOM-tests-from-runtest-mm.patch \
+ file://0001-scenario_groups-default-remove-connectors.patch \
+ "
+
+S = "${WORKDIR}/git"
+
+inherit autotools-brokensep pkgconfig
+
+# Version 20220527 added KVM test infrastructure which currently fails to build with gold due to
+# SORT_NONE in linker script which isn't supported by gold:
+# https://sourceware.org/bugzilla/show_bug.cgi?id=18097
+# https://github.com/linux-test-project/ltp/commit/3fce2064b54843218d085aae326c8f7ecf3a8c41#diff-39268f0855c634ca48c8993fcd2c95b12a65b79e8d9fa5ccd6b0f5a8785c0dd6R36
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd', '', d)}"
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-lld', ' -fuse-ld=bfd', '', d)}"
+
+# After 0002-kvm-use-LD-instead-of-hardcoding-ld.patch
+# https://github.com/linux-test-project/ltp/commit/f94e0ef3b7280f886384703ef9019aaf2f2dfebb
+# it fails with gold also a bit later when trying to use *-payload.bin
+# http://errors.yoctoproject.org/Errors/Details/663094/
+# work around this by forcing .bfd linked in LD when ld-is-gold is in DISTRO_FEATURES
+KVM_LD = "${@bb.utils.contains_any('DISTRO_FEATURES', 'ld-is-gold ld-is-lld', '${HOST_PREFIX}ld.bfd${TOOLCHAIN_OPTIONS} ${HOST_LD_ARCH}', '${LD}', d)}"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
+export prefix = "/opt/${PN}"
+export exec_prefix = "/opt/${PN}"
+
+PACKAGECONFIG[numa] = "--with-numa, --without-numa, numactl,"
+EXTRA_AUTORECONF += "-I ${S}/testcases/realtime/m4"
+EXTRA_OECONF = " --with-realtime-testsuite --with-open-posix-testsuite "
+# ltp network/rpc test cases ftbfs when libtirpc is found
+EXTRA_OECONF += " --without-tirpc "
+
+do_compile() {
+ oe_runmake HOSTCC="${CC_FOR_BUILD}" HOST_CFLAGS="${CFLAGS_FOR_BUILD}" HOST_LDFLAGS="${LDFLAGS_FOR_BUILD}" KVM_LD="${KVM_LD}"
+}
+
+do_install(){
+ install -d ${D}${prefix}/
+ oe_runmake DESTDIR=${D} SKIP_IDCHECK=1 install include-install
+
+ # fixup not deploy STPfailure_report.pl to avoid confusing about it fails to run
+ # as it lacks dependency on some perl moudle such as LWP::Simple
+ # And this script previously works as a tool for analyzing failures from LTP
+ # runs on the OSDL's Scaleable Test Platform (STP) and it mainly accesses
+ # http://khack.osdl.org to retrieve ltp test results run on
+ # OSDL's Scaleable Test Platform, but now http://khack.osdl.org unaccessible
+ rm -rf ${D}${prefix}/bin/STPfailure_report.pl
+
+ # The controllers memcg_stree test seems to cause us hangs and takes 900s
+ # (maybe we expect more regular output?), anyhow, skip it
+ sed -e '/^memcg_stress/d' -i ${D}${prefix}/runtest/controllers
+}
+
+RDEPENDS:${PN} = "\
+ attr \
+ bash \
+ bc \
+ coreutils \
+ cpio \
+ cronie \
+ curl \
+ e2fsprogs \
+ e2fsprogs-mke2fs \
+ expect \
+ file \
+ findutils \
+ gawk \
+ gdb \
+ gzip \
+ iproute2 \
+ ldd \
+ libaio \
+ logrotate \
+ net-tools \
+ perl \
+ python3-core \
+ procps \
+ quota \
+ unzip \
+ util-linux \
+ which \
+ tar \
+"
+
+RRECOMMENDS:${PN} += "kernel-module-loop"
+
+FILES:${PN} += "${prefix}/* ${prefix}/runtest/* ${prefix}/scenario_groups/* ${prefix}/testcases/bin/* ${prefix}/testcases/bin/*/bin/* ${prefix}/testscripts/* ${prefix}/testcases/open_posix_testsuite/* ${prefix}/testcases/open_posix_testsuite/conformance/* ${prefix}/testcases/open_posix_testsuite/Documentation/* ${prefix}/testcases/open_posix_testsuite/functional/* ${prefix}/testcases/open_posix_testsuite/include/* ${prefix}/testcases/open_posix_testsuite/scripts/* ${prefix}/testcases/open_posix_testsuite/stress/* ${prefix}/testcases/open_posix_testsuite/tools/* ${prefix}/testcases/data/nm01/lib.a ${prefix}/lib/libmem.a"
+
+# Avoid stripping some generated binaries otherwise some of the ltp tests such as ldd01 & nm01 fail
+INHIBIT_PACKAGE_STRIP_FILES = "${prefix}/testcases/bin/nm01 ${prefix}/testcases/bin/ldd01"
+INSANE_SKIP:${PN} += "already-stripped staticdev"
+
+remove_broken_musl_sources() {
+ [ "${TCLIBC}" = "musl" ] || return 0
+
+ cd ${S}
+ echo "WARNING: remove unsupported tests (until they're fixed)"
+
+ # sync with upstream
+ # https://github.com/linux-test-project/ltp/blob/master/ci/alpine.sh#L33
+ rm -rfv \
+ testcases/kernel/syscalls/fmtmsg/fmtmsg01.c \
+ testcases/kernel/syscalls/getcontext/getcontext01.c \
+ testcases/kernel/syscalls/rt_tgsigqueueinfo/rt_tgsigqueueinfo01.c \
+ testcases/kernel/syscalls/timer_create/timer_create01.c \
+ testcases/kernel/syscalls/timer_create/timer_create03.c
+}
+do_patch[postfuncs] += "remove_broken_musl_sources"
+
+# Avoid file dependency scans, as LTP checks for things that may or may not
+# exist on the running system. For instance it has specific checks for
+# csh and ksh which are not typically part of OpenEmbedded systems (but
+# can be added via additional layers.)
+SKIP_FILEDEPS:${PN} = '1'
diff --git a/meta/recipes-extended/lzip/lzip_1.23.bb b/meta/recipes-extended/lzip/lzip_1.23.bb
deleted file mode 100644
index 12e8fa6c58..0000000000
--- a/meta/recipes-extended/lzip/lzip_1.23.bb
+++ /dev/null
@@ -1,42 +0,0 @@
-SUMMARY = "Lossless data compressor based on the LZMA algorithm"
-HOMEPAGE = "http://lzip.nongnu.org/lzip.html"
-DESCRIPTION = "Lzip is a lossless data compressor with a user interface similar to the one of gzip or bzip2. Lzip uses a simplified form of the Lempel-Ziv-Markov chain-Algorithm (LZMA) stream format, chosen to maximize safety and interoperability."
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=76d6e300ffd8fb9d18bd9b136a9bba13 \
- file://decoder.cc;beginline=3;endline=16;md5=18c279060cd0be128188404b45837f88 \
- "
-
-SRC_URI = "${SAVANNAH_GNU_MIRROR}/lzip/lzip-${PV}.tar.gz"
-SRC_URI[sha256sum] = "4792c047ddf15ef29d55ba8e68a1a21e0cb7692d87ecdf7204419864582f280d"
-
-B = "${WORKDIR}/build"
-do_configure[cleandirs] = "${B}"
-
-CONFIGUREOPTS = "\
- '--srcdir=${S}' \
- '--prefix=${prefix}' \
- '--exec-prefix=${exec_prefix}' \
- '--bindir=${bindir}' \
- '--datadir=${datadir}' \
- '--infodir=${infodir}' \
- '--sysconfdir=${sysconfdir}' \
- 'CXX=${CXX}' \
- 'CPPFLAGS=${CPPFLAGS}' \
- 'CXXFLAGS=${CXXFLAGS}' \
- 'LDFLAGS=${LDFLAGS}' \
-"
-
-do_configure () {
- ${S}/configure ${CONFIGUREOPTS}
-}
-
-do_install () {
- oe_runmake 'DESTDIR=${D}' install
- # Info dir listing isn't interesting at this point so remove it if it exists.
- if [ -e "${D}${infodir}/dir" ]; then
- rm -f ${D}${infodir}/dir
- fi
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/lzip/lzip_1.24.1.bb b/meta/recipes-extended/lzip/lzip_1.24.1.bb
new file mode 100644
index 0000000000..6eda012734
--- /dev/null
+++ b/meta/recipes-extended/lzip/lzip_1.24.1.bb
@@ -0,0 +1,42 @@
+SUMMARY = "Lossless data compressor based on the LZMA algorithm"
+HOMEPAGE = "http://lzip.nongnu.org/lzip.html"
+DESCRIPTION = "Lzip is a lossless data compressor with a user interface similar to the one of gzip or bzip2. Lzip uses a simplified form of the Lempel-Ziv-Markov chain-Algorithm (LZMA) stream format, chosen to maximize safety and interoperability."
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=76d6e300ffd8fb9d18bd9b136a9bba13 \
+ file://decoder.cc;beginline=3;endline=16;md5=18c279060cd0be128188404b45837f88 \
+ "
+
+SRC_URI = "${SAVANNAH_GNU_MIRROR}/lzip/lzip-${PV}.tar.gz"
+SRC_URI[sha256sum] = "30c9cb6a0605f479c496c376eb629a48b0a1696d167e3c1e090c5defa481b162"
+
+B = "${WORKDIR}/build"
+do_configure[cleandirs] = "${B}"
+
+CONFIGUREOPTS = "\
+ '--srcdir=${S}' \
+ '--prefix=${prefix}' \
+ '--exec-prefix=${exec_prefix}' \
+ '--bindir=${bindir}' \
+ '--datadir=${datadir}' \
+ '--infodir=${infodir}' \
+ '--sysconfdir=${sysconfdir}' \
+ 'CXX=${CXX}' \
+ 'CPPFLAGS=${CPPFLAGS}' \
+ 'CXXFLAGS=${CXXFLAGS}' \
+ 'LDFLAGS=${LDFLAGS}' \
+"
+
+do_configure () {
+ ${S}/configure ${CONFIGUREOPTS}
+}
+
+do_install () {
+ oe_runmake 'DESTDIR=${D}' install
+ # Info dir listing isn't interesting at this point so remove it if it exists.
+ if [ -e "${D}${infodir}/dir" ]; then
+ rm -f ${D}${infodir}/dir
+ fi
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/lzip/lzlib_1.14.bb b/meta/recipes-extended/lzip/lzlib_1.14.bb
new file mode 100644
index 0000000000..a6010bbf27
--- /dev/null
+++ b/meta/recipes-extended/lzip/lzlib_1.14.bb
@@ -0,0 +1,39 @@
+SUMMARY = "Data compression library providing in-memory LZMA compression and decompression functions"
+HOMEPAGE = "https://www.nongnu.org/lzip/lzlib.html"
+DESCRIPTION = "Lzlib is a data compression library providing in-memory LZMA compression and decompression functions, including integrity checking of the decompressed data. The compressed data format used by the library is the lzip format. Lzlib is written in C. "
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=04d943636aa1482e0a97d924d9f4f68f \
+ "
+
+SRC_URI = "${SAVANNAH_GNU_MIRROR}/lzip/lzlib/lzlib-${PV}.tar.gz"
+SRC_URI[sha256sum] = "5acac8714ed4f306020bae660dddce706e5f8a795863679037da9fe6bf4dcf6f"
+
+B = "${WORKDIR}/build"
+do_configure[cleandirs] = "${B}"
+
+CONFIGUREOPTS = "\
+ '--srcdir=${S}' \
+ '--prefix=${prefix}' \
+ '--exec-prefix=${exec_prefix}' \
+ '--bindir=${bindir}' \
+ '--datadir=${datadir}' \
+ '--infodir=${infodir}' \
+ '--libdir=${libdir}' \
+ '--sysconfdir=${sysconfdir}' \
+ '--enable-shared' \
+ '--disable-static' \
+ 'CC=${CC}' \
+ 'CPPFLAGS=${CPPFLAGS}' \
+ 'CXXFLAGS=${CXXFLAGS}' \
+ 'LDFLAGS=${LDFLAGS}' \
+"
+
+do_configure () {
+ ${S}/configure ${CONFIGUREOPTS}
+}
+
+do_install () {
+ oe_runmake 'DESTDIR=${D}' install
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/man-db/files/0001-man-Move-local-variable-declaration-to-function-scop.patch b/meta/recipes-extended/man-db/files/0001-man-Move-local-variable-declaration-to-function-scop.patch
index dc6966629c..57ecd48b1f 100644
--- a/meta/recipes-extended/man-db/files/0001-man-Move-local-variable-declaration-to-function-scop.patch
+++ b/meta/recipes-extended/man-db/files/0001-man-Move-local-variable-declaration-to-function-scop.patch
@@ -1,4 +1,4 @@
-From 126dfefb5fddf411ad0a1316209e9c1b47abfcd2 Mon Sep 17 00:00:00 2001
+From e4125223631f0d555fc327da6d8705bcc8ee5ba5 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Wed, 9 Feb 2022 17:30:16 -0800
Subject: [PATCH] man: Move local variable declaration to function scope
@@ -10,16 +10,15 @@ code without changing the logic, until its fixed in clang
Upstream-Status: Inappropriate [Inappropriate: Clang bug]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---
src/man.c | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/man.c b/src/man.c
-index f16fae8..333df03 100644
+index 195d35d..6870989 100644
--- a/src/man.c
+++ b/src/man.c
-@@ -352,7 +352,7 @@ static void init_html_pager (void)
+@@ -379,7 +379,7 @@ static void init_html_pager (void)
static error_t parse_opt (int key, char *arg, struct argp_state *state)
{
static bool apropos, whatis; /* retain values between calls */
@@ -28,7 +27,7 @@ index f16fae8..333df03 100644
/* Please keep these keys in the same order as in options above. */
switch (key) {
case 'C':
-@@ -384,7 +384,7 @@ static error_t parse_opt (int key, char *arg, struct argp_state *state)
+@@ -411,7 +411,7 @@ static error_t parse_opt (int key, char *arg, struct argp_state *state)
case OPT_WARNINGS:
#ifdef NROFF_WARNINGS
{
diff --git a/meta/recipes-extended/man-db/files/man_db.conf-avoid-multilib-install-file-conflict.patch b/meta/recipes-extended/man-db/files/man_db.conf-avoid-multilib-install-file-conflict.patch
deleted file mode 100644
index 03273d3b49..0000000000
--- a/meta/recipes-extended/man-db/files/man_db.conf-avoid-multilib-install-file-conflict.patch
+++ /dev/null
@@ -1,16 +0,0 @@
-The first line of man_db.conf will be replaced by package name. And it causes
-multilib install file conflict. Remove the line to avoid the issue.
-
-Upstream-Status: Pending
-
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-
-diff --git a/src/man_db.conf.in b/src/man_db.conf.in
-index bc8bb7e..04cfb31 100644
---- a/src/man_db.conf.in
-+++ b/src/man_db.conf.in
-@@ -1,4 +1,3 @@
--# @config_file_basename@
- #
- # This file is used by the man-db package to configure the man and cat paths.
- # It is also used to provide a manpath for those without one by examining
diff --git a/meta/recipes-extended/man-db/man-db_2.10.2.bb b/meta/recipes-extended/man-db/man-db_2.10.2.bb
deleted file mode 100644
index a41e2dd4d8..0000000000
--- a/meta/recipes-extended/man-db/man-db_2.10.2.bb
+++ /dev/null
@@ -1,68 +0,0 @@
-SUMMARY = "An implementation of the standard Unix documentation system accessed using the man command"
-HOMEPAGE = "http://man-db.nongnu.org/"
-DESCRIPTION = "man-db is an implementation of the standard Unix documentation system accessed using the man command. It uses a Berkeley DB database in place of the traditional flat-text whatis databases."
-LICENSE = "LGPL-2.1-only & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c \
- file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/man-db/man-db-${PV}.tar.xz \
- file://99_mandb \
- file://0001-man-Move-local-variable-declaration-to-function-scop.patch \
- file://man_db.conf-avoid-multilib-install-file-conflict.patch"
-SRC_URI[sha256sum] = "ee97954d492a13731903c9d0727b9b01e5089edbd695f0cdb58d405a5af5514d"
-
-DEPENDS = "libpipeline gdbm groff-native base-passwd"
-RDEPENDS:${PN} += "base-passwd"
-PACKAGE_WRITE_DEPS += "base-passwd"
-
-# | /usr/src/debug/man-db/2.8.0-r0/man-db-2.8.0/src/whatis.c:939: undefined reference to `_nl_msg_cat_cntr'
-USE_NLS:libc-musl = "no"
-
-inherit gettext pkgconfig autotools systemd
-
-EXTRA_OECONF = "--with-pager=less --with-systemdsystemunitdir=${systemd_system_unitdir}"
-EXTRA_AUTORECONF += "-I ${S}/gl/m4"
-
-PACKAGECONFIG[bzip2] = "--with-bzip2=bzip2,ac_cv_prog_have_bzip2='',bzip2"
-PACKAGECONFIG[gzip] = "--with-gzip=gzip,ac_cv_prog_have_gzip='',gzip"
-PACKAGECONFIG[lzip] = "--with-lzip=lzip,ac_cv_prog_have_lzip='',lzip"
-PACKAGECONFIG[lzma] = "--with-lzma=lzma,ac_cv_prog_have_lzma='',xz"
-PACKAGECONFIG[zstd] = "--with-zstd=zstd,ac_cv_prog_have_zstd='',zstd"
-PACKAGECONFIG[xz] = "--with-xz=xz,ac_cv_prog_have_xz='',xz"
-
-do_install() {
- autotools_do_install
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- install -d ${D}/etc/default/volatiles
- install -m 0644 ${WORKDIR}/99_mandb ${D}/etc/default/volatiles
- fi
-}
-
-do_install:append:libc-musl() {
- rm -f ${D}${libdir}/charset.alias
-}
-
-FILES:${PN} += "${prefix}/lib/tmpfiles.d"
-
-FILES:${PN}-dev += "${libdir}/man-db/libman.so ${libdir}/${BPN}/libmandb.so"
-
-RDEPENDS:${PN} += "groff"
-RRECOMMENDS:${PN} += "less"
-RPROVIDES:${PN} += " man"
-
-def compress_pkg(d):
- if bb.utils.contains("INHERIT", "compress_doc", True, False, d):
- compress = d.getVar("DOC_COMPRESS")
- if compress == "gz":
- return "gzip"
- elif compress == "bz2":
- return "bzip2"
- elif compress == "xz":
- return "xz"
- return ""
-
-RDEPENDS:${PN} += "${@compress_pkg(d)}"
-
-SYSTEMD_SERVICE:${PN} = "man-db.timer man-db.service"
-SYSTEMD_AUTO_ENABLE ?= "disable"
diff --git a/meta/recipes-extended/man-db/man-db_2.12.1.bb b/meta/recipes-extended/man-db/man-db_2.12.1.bb
new file mode 100644
index 0000000000..27b47a7f47
--- /dev/null
+++ b/meta/recipes-extended/man-db/man-db_2.12.1.bb
@@ -0,0 +1,72 @@
+SUMMARY = "An implementation of the standard Unix documentation system accessed using the man command"
+HOMEPAGE = "http://man-db.nongnu.org/"
+DESCRIPTION = "man-db is an implementation of the standard Unix documentation system accessed using the man command. It uses a Berkeley DB database in place of the traditional flat-text whatis databases."
+LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later & GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://docs/COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://docs/COPYING.LIB;md5=4fbd65380cdd255951079008b364516c \
+ "
+
+SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/man-db/man-db-${PV}.tar.xz \
+ file://99_mandb \
+ file://0001-man-Move-local-variable-declaration-to-function-scop.patch \
+ "
+SRC_URI[sha256sum] = "ddee249daeb78cf92bab794ccd069cc8b575992265ea20e239e887156e880265"
+
+DEPENDS = "libpipeline gdbm groff-native base-passwd"
+RDEPENDS:${PN} += "base-passwd"
+PACKAGE_WRITE_DEPS += "base-passwd"
+
+# | /usr/src/debug/man-db/2.8.0-r0/man-db-2.8.0/src/whatis.c:939: undefined reference to `_nl_msg_cat_cntr'
+USE_NLS:libc-musl = "no"
+
+inherit gettext pkgconfig autotools systemd
+
+EXTRA_OECONF = "--with-pager=less --with-systemdsystemunitdir=${systemd_system_unitdir}"
+EXTRA_AUTORECONF += "-I ${S}/gl/m4"
+
+PACKAGECONFIG[bzip2] = "--with-bzip2=bzip2,ac_cv_prog_have_bzip2='',bzip2"
+PACKAGECONFIG[gzip] = "--with-gzip=gzip,ac_cv_prog_have_gzip='',gzip"
+PACKAGECONFIG[lzip] = "--with-lzip=lzip,ac_cv_prog_have_lzip='',lzip"
+PACKAGECONFIG[lzma] = "--with-lzma=lzma,ac_cv_prog_have_lzma='',xz"
+PACKAGECONFIG[zstd] = "--with-zstd=zstd,ac_cv_prog_have_zstd='',zstd"
+PACKAGECONFIG[xz] = "--with-xz=xz,ac_cv_prog_have_xz='',xz"
+
+do_install() {
+ autotools_do_install
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ install -d ${D}/etc/default/volatiles
+ install -m 0644 ${WORKDIR}/99_mandb ${D}/etc/default/volatiles
+ fi
+}
+
+do_install:append:libc-musl() {
+ rm -f ${D}${libdir}/charset.alias
+}
+
+FILES:${PN} += "${prefix}/lib/tmpfiles.d"
+
+FILES:${PN}-dev += "${libdir}/man-db/libman.so ${libdir}/${BPN}/libmandb.so"
+
+RDEPENDS:${PN} += "groff"
+RRECOMMENDS:${PN} += "less"
+# iconv from glibc-utils can be used to transform encoding
+RRECOMMENDS:${PN}:append:libc-glibc = " glibc-utils"
+RPROVIDES:${PN} += "man"
+
+def compress_pkg(d):
+ if bb.utils.contains("INHERIT", "compress_doc", True, False, d):
+ compress = d.getVar("DOC_COMPRESS")
+ if compress == "gz":
+ return "gzip"
+ elif compress == "bz2":
+ return "bzip2"
+ elif compress == "xz":
+ return "xz"
+ return ""
+
+RDEPENDS:${PN} += "${@compress_pkg(d)}"
+
+SYSTEMD_SERVICE:${PN} = "man-db.timer man-db.service"
+SYSTEMD_AUTO_ENABLE ?= "disable"
diff --git a/meta/recipes-extended/man-pages/man-pages/0001-GNUmakefile-use-env-from-PATH.patch b/meta/recipes-extended/man-pages/man-pages/0001-GNUmakefile-use-env-from-PATH.patch
new file mode 100644
index 0000000000..a644d24328
--- /dev/null
+++ b/meta/recipes-extended/man-pages/man-pages/0001-GNUmakefile-use-env-from-PATH.patch
@@ -0,0 +1,31 @@
+From fbf0544138af02be4d6dd3085e7ecf98ad060bb4 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Wed, 13 Mar 2024 17:30:02 +0100
+Subject: [PATCH] GNUmakefile: use env from PATH
+
+This allows using env from coreutils-native (rather than host env),
+which resolves the problem of missing -S option on distributions
+with much older coreutils such as Ubuntu 18.04.
+
+Once all autobuilder distros are newer than that, this patch
+can be dropped, together with corutils-native dependency.
+
+Upstream-Status: Inappropriate [made for obsolete host distros]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ GNUmakefile | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/GNUmakefile b/GNUmakefile
+index 1234194..65ba327 100644
+--- a/GNUmakefile
++++ b/GNUmakefile
+@@ -21,7 +21,7 @@
+ ########################################################################
+
+
+-SHELL := /usr/bin/env
++SHELL := env
+ .SHELLFLAGS := -S bash -Eeuo pipefail -c
+
+
diff --git a/meta/recipes-extended/man-pages/man-pages/0001-man.ml-do-not-use-dev-stdin.patch b/meta/recipes-extended/man-pages/man-pages/0001-man.ml-do-not-use-dev-stdin.patch
new file mode 100644
index 0000000000..ec2d0600c7
--- /dev/null
+++ b/meta/recipes-extended/man-pages/man-pages/0001-man.ml-do-not-use-dev-stdin.patch
@@ -0,0 +1,33 @@
+From f16cd8c4986b7f250e8465dfbe4bda8d366e05dd Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Mon, 4 Mar 2024 14:35:16 +0100
+Subject: [PATCH] man.ml: do not use /dev/stdin
+
+This was introduced in https://git.kernel.org/pub/scm/docs/man-pages/man-pages.git/commit/?id=30c38a8bf8ae8f4a6e71d3b8ac4abf0a40778f1e
+(presumably to build a 'sed | install' pipeline instead of previous 'install; sed' sequence),
+but it doesn't work under pseudo where /dev/stdin is absent.
+
+Upstream-Status: Inappropriate [yocto specific]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ share/mk/install/man.mk | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/share/mk/install/man.mk b/share/mk/install/man.mk
+index b9865b9..06880f0 100644
+--- a/share/mk/install/man.mk
++++ b/share/mk/install/man.mk
+@@ -60,10 +60,10 @@ $(foreach s, $(MANSECTIONS), \
+
+ $(_manpages):
+ $(info INSTALL $@)
+- <$< \
+- $(SED) $(foreach s, $(MANSECTIONS), \
++ $(INSTALL_DATA) -T $< $@
++ $(SED) -i $(foreach s, $(MANSECTIONS), \
+ -e '/^\.so /s, man$(s)/\(.*\)\.$(s)$$, $(notdir $(man$(s)dir))/\1$(man$(s)ext)$(Z),') \
+- | $(INSTALL_DATA) -T /dev/stdin $@
++ $@
+ ifeq ($(LINK_PAGES),symlink)
+ if $(GREP) '^\.so ' <$@ >/dev/null; then \
+ $(GREP) '^\.so ' <$@ \
diff --git a/meta/recipes-extended/man-pages/man-pages_5.13.bb b/meta/recipes-extended/man-pages/man-pages_5.13.bb
deleted file mode 100644
index f8602c699e..0000000000
--- a/meta/recipes-extended/man-pages/man-pages_5.13.bb
+++ /dev/null
@@ -1,37 +0,0 @@
-SUMMARY = "Linux man-pages"
-DESCRIPTION = "The Linux man-pages project documents the Linux kernel and C library interfaces that are employed by user programs"
-SECTION = "console/utils"
-HOMEPAGE = "http://www.kernel.org/pub/linux/docs/man-pages"
-LICENSE = "GPL-2.0-or-later"
-
-LIC_FILES_CHKSUM = "file://README;md5=92cd5ee2e0b35d782817e7e277b6ce4b"
-SRC_URI = "${KERNELORG_MIRROR}/linux/docs/${BPN}/${BP}.tar.gz"
-
-SRC_URI[sha256sum] = "ed615e9a31978833f59e7316667b9aeffbbdb0a92d1391f9c66fadc6e77d0da8"
-
-inherit manpages
-
-MAN_PKG = "${PN}"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[manpages] = ""
-
-do_configure[noexec] = "1"
-do_compile[noexec] = "1"
-
-do_install() {
- oe_runmake install prefix=${prefix} DESTDIR=${D}
-}
-
-# Only deliveres man-pages so FILES:${PN} gets everything
-FILES:${PN}-doc = ""
-FILES:${PN} = "${mandir}/*"
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE:${PN} = "crypt.3 crypt_r.3 getspnam.3 passwd.5"
-ALTERNATIVE_LINK_NAME[crypt.3] = "${mandir}/man3/crypt.3"
-ALTERNATIVE_LINK_NAME[crypt_r.3] = "${mandir}/man3/crypt_r.3"
-ALTERNATIVE_LINK_NAME[getspnam.3] = "${mandir}/man3/getspnam.3"
-ALTERNATIVE_LINK_NAME[passwd.5] = "${mandir}/man5/passwd.5"
diff --git a/meta/recipes-extended/man-pages/man-pages_6.06.bb b/meta/recipes-extended/man-pages/man-pages_6.06.bb
new file mode 100644
index 0000000000..0f091bbd7d
--- /dev/null
+++ b/meta/recipes-extended/man-pages/man-pages_6.06.bb
@@ -0,0 +1,50 @@
+SUMMARY = "Linux man-pages"
+DESCRIPTION = "The Linux man-pages project documents the Linux kernel and C library interfaces that are employed by user programs"
+SECTION = "console/utils"
+HOMEPAGE = "http://www.kernel.org/pub/linux/docs/man-pages"
+LICENSE = "GPL-2.0-or-later & GPL-2.0-only & GPL-1.0-or-later & BSD-2-Clause & BSD-3-Clause & BSD-4-Clause & MIT"
+
+LIC_FILES_CHKSUM = "file://README;md5=72cff06b7954222c24d38bc2c41b234e \
+ file://LICENSES/BSD-2-Clause.txt;md5=9e16594a228301089d759b4f178db91f \
+ file://LICENSES/BSD-3-Clause.txt;md5=407426fcc1a243b7b2eff6e35c56aca9 \
+ file://LICENSES/BSD-4-Clause-UC.txt;md5=1da3cf8ad50cd8d5d1de3cfc53196d01 \
+ file://LICENSES/GPL-1.0-or-later.txt;md5=e5b7c80002ef72ab868b43ce47b65125 \
+ file://LICENSES/GPL-2.0-only.txt;md5=3d26203303a722dedc6bf909d95ba815 \
+ file://LICENSES/GPL-2.0-or-later.txt;md5=3d26203303a722dedc6bf909d95ba815 \
+ file://LICENSES/Linux-man-pages-1-para.txt;md5=97ab07585ce6700273bc66461bf46bf2 \
+ file://LICENSES/Linux-man-pages-copyleft-2-para.txt;md5=1cafc230857da5e43f3d509c425d3c64 \
+ file://LICENSES/Linux-man-pages-copyleft.txt;md5=173b960c686ff2d26f043ddaeb63f6ce \
+ file://LICENSES/Linux-man-pages-copyleft-var.txt;md5=d33708712c5918521f47f23b0c4e0d20 \
+ file://LICENSES/MIT.txt;md5=7dda4e90ded66ab88b86f76169f28663 \
+ "
+SRC_URI = "${KERNELORG_MIRROR}/linux/docs/${BPN}/${BP}.tar.gz \
+ file://0001-man.ml-do-not-use-dev-stdin.patch \
+ file://0001-GNUmakefile-use-env-from-PATH.patch \
+ "
+
+SRC_URI[sha256sum] = "006906e7be81a71c2d347809597bcb91485fa7fa488acdaa79e681ddfa894568"
+
+inherit manpages
+
+# can be dropped when ubuntu 18.04 is not in use anymore
+DEPENDS += "coreutils-native"
+
+MAN_PKG = "${PN}"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[manpages] = ""
+
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+
+do_install() {
+ oe_runmake install prefix=${prefix} DESTDIR=${D}
+ rm -rf ${D}${mandir}/man3/crypt.3
+ rm -rf ${D}${mandir}/man3/crypt_r.3
+ rm -rf ${D}${mandir}/man3/getspnam.3
+ rm -rf ${D}${mandir}/man5/passwd.5
+}
+
+# Only deliveres man-pages so FILES:${PN} gets everything
+FILES:${PN}-doc = ""
+FILES:${PN} = "${mandir}/*"
diff --git a/meta/recipes-extended/mc/files/0001-mc-replace-perl-w-with-use-warnings.patch b/meta/recipes-extended/mc/files/0001-mc-replace-perl-w-with-use-warnings.patch
deleted file mode 100644
index bf8037cd28..0000000000
--- a/meta/recipes-extended/mc/files/0001-mc-replace-perl-w-with-use-warnings.patch
+++ /dev/null
@@ -1,129 +0,0 @@
-From cdc7c278212ae836eecb4cc9d42c29443cc128a0 Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 6 Apr 2017 02:24:28 -0700
-Subject: [PATCH] mc: replace "perl -w" with "use warnings"
-
-The shebang's max length is usually 128 as defined in
-/usr/include/linux/binfmts.h:
- #define BINPRM_BUF_SIZE 128
-
-There would be errors when @PERL@ is longer than 128, use
-'/usr/bin/env perl' can fix the problem, but '/usr/bin/env perl -w'
-doesn't work:
-
-/usr/bin/env: perl -w: No such file or directory
-
-So replace "perl -w" with "use warnings" to make it work.
-
-The man2hlp.in already has "use warnings;", so just remove '-w' is OK.
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
----
- src/man2hlp/man2hlp.in | 2 +-
- src/vfs/extfs/helpers/a+.in | 4 +++-
- src/vfs/extfs/helpers/mailfs.in | 3 ++-
- src/vfs/extfs/helpers/patchfs.in | 3 ++-
- src/vfs/extfs/helpers/ulib.in | 4 +++-
- src/vfs/extfs/helpers/uzip.in | 3 ++-
- 6 files changed, 13 insertions(+), 6 deletions(-)
-
-diff --git a/src/man2hlp/man2hlp.in b/src/man2hlp/man2hlp.in
-index f095830..558a674 100644
---- a/src/man2hlp/man2hlp.in
-+++ b/src/man2hlp/man2hlp.in
-@@ -1,4 +1,4 @@
--#! @PERL@ -w
-+#! @PERL@
- #
- # Man page to help file converter
- # Copyright (C) 1994, 1995, 1998, 2000, 2001, 2002, 2003, 2004, 2005,
-diff --git a/src/vfs/extfs/helpers/a+.in b/src/vfs/extfs/helpers/a+.in
-index 579441c..fe446f4 100644
---- a/src/vfs/extfs/helpers/a+.in
-+++ b/src/vfs/extfs/helpers/a+.in
-@@ -1,4 +1,4 @@
--#! @PERL@ -w
-+#! @PERL@
- #
- # External filesystem for mc, using mtools
- # Written Ludek Brukner <lubr@barco.cz>, 1997
-@@ -9,6 +9,8 @@
-
- # These mtools components must be in PATH for this to work
-
-+use warnings;
-+
- sub quote {
- $_ = shift(@_);
- s/([^\w\/.+-])/\\$1/g;
-diff --git a/src/vfs/extfs/helpers/mailfs.in b/src/vfs/extfs/helpers/mailfs.in
-index e9455be..059f41f 100644
---- a/src/vfs/extfs/helpers/mailfs.in
-+++ b/src/vfs/extfs/helpers/mailfs.in
-@@ -1,6 +1,7 @@
--#! @PERL@ -w
-+#! @PERL@
-
- use bytes;
-+use warnings;
-
- # MC extfs for (possibly compressed) Berkeley style mailbox files
- # Peter Daum <gator@cs.tu-berlin.de> (Jan 1998, mc-4.1.24)
-diff --git a/src/vfs/extfs/helpers/patchfs.in b/src/vfs/extfs/helpers/patchfs.in
-index ef407de..3ad4b53 100644
---- a/src/vfs/extfs/helpers/patchfs.in
-+++ b/src/vfs/extfs/helpers/patchfs.in
-@@ -1,4 +1,4 @@
--#! @PERL@ -w
-+#! @PERL@
- #
- # Written by Adam Byrtek <alpha@debian.org>, 2002
- # Rewritten by David Sterba <dave@jikos.cz>, 2009
-@@ -9,6 +9,7 @@
-
- use bytes;
- use strict;
-+use warnings;
- use POSIX;
- use File::Temp 'tempfile';
-
-diff --git a/src/vfs/extfs/helpers/ulib.in b/src/vfs/extfs/helpers/ulib.in
-index 418611f..82c7ccf 100644
---- a/src/vfs/extfs/helpers/ulib.in
-+++ b/src/vfs/extfs/helpers/ulib.in
-@@ -1,9 +1,11 @@
--#! @PERL@ -w
-+#! @PERL@
- #
- # VFS to manage the gputils archives.
- # Written by Molnár Károly (proton7@freemail.hu) 2012
- #
-
-+use warnings;
-+
- my %month = ('jan' => '01', 'feb' => '02', 'mar' => '03',
- 'apr' => '04', 'may' => '05', 'jun' => '06',
- 'jul' => '07', 'aug' => '08', 'sep' => '09',
-diff --git a/src/vfs/extfs/helpers/uzip.in b/src/vfs/extfs/helpers/uzip.in
-index b1c4f90..c8eb335 100644
---- a/src/vfs/extfs/helpers/uzip.in
-+++ b/src/vfs/extfs/helpers/uzip.in
-@@ -1,4 +1,4 @@
--#! @PERL@ -w
-+#! @PERL@
- #
- # zip file archive Virtual File System for Midnight Commander
- # Version 1.4.0 (2001-08-07).
-@@ -9,6 +9,7 @@
- use POSIX;
- use File::Basename;
- use strict;
-+use warnings;
-
- #
- # Configuration options
---
-2.10.2
-
diff --git a/meta/recipes-extended/mc/files/nomandate.patch b/meta/recipes-extended/mc/files/nomandate.patch
index c01d065b0e..92fa443865 100644
--- a/meta/recipes-extended/mc/files/nomandate.patch
+++ b/meta/recipes-extended/mc/files/nomandate.patch
@@ -1,4 +1,4 @@
-From f02010965af21db018c4e108b2193c872406a314 Mon Sep 17 00:00:00 2001
+From 78c5fd90a052f95157b3914c708a08b2eeab0154 Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Tue, 4 Feb 2020 18:12:15 +0000
Subject: [PATCH] mc: Fix manpage date indeterminism
@@ -15,13 +15,12 @@ RP 2020/2/4
Upstream-Status: Inappropriate [OE specficic reproducibility workaround]
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
---
doc/man/date-of-man-include.am | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/man/date-of-man-include.am b/doc/man/date-of-man-include.am
-index 3a4e599..f11e9b3 100644
+index 96f9f10..d734c5d 100644
--- a/doc/man/date-of-man-include.am
+++ b/doc/man/date-of-man-include.am
@@ -1,5 +1,5 @@
@@ -29,5 +28,5 @@ index 3a4e599..f11e9b3 100644
- -e "s/%DATE_OF_MAN_PAGE%/$${MAN_DATE}/g" \
+ -e "s/%DATE_OF_MAN_PAGE%//g" \
-e "s/%MAN_VERSION%/@MAN_VERSION@/g" \
- -e "s{%prefix%{@prefix@{g" \
-e "s{%sysconfdir%{@sysconfdir@{g" \
+ -e "s{%libexecdir%{@libexecdir@{g" \
diff --git a/meta/recipes-extended/mc/mc_4.8.28.bb b/meta/recipes-extended/mc/mc_4.8.28.bb
deleted file mode 100644
index 9a950a8cd0..0000000000
--- a/meta/recipes-extended/mc/mc_4.8.28.bb
+++ /dev/null
@@ -1,60 +0,0 @@
-SUMMARY = "Midnight Commander is an ncurses based file manager"
-HOMEPAGE = "http://www.midnight-commander.org/"
-DESCRIPTION = "GNU Midnight Commander is a visual file manager, licensed under GNU General Public License and therefore qualifies as Free Software. It's a feature rich full-screen text mode application that allows you to copy, move and delete files and whole directory trees, search for files and run commands in the subshell. Internal viewer and editor are included."
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=270bbafe360e73f9840bd7981621f9c2"
-SECTION = "console/utils"
-DEPENDS = "ncurses glib-2.0 util-linux file-replacement-native"
-RDEPENDS:${PN} = "ncurses-terminfo-base"
-RRECOMMENDS:${PN} = "ncurses-terminfo"
-
-SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2 \
- file://0001-mc-replace-perl-w-with-use-warnings.patch \
- file://nomandate.patch \
- "
-SRC_URI[sha256sum] = "6bb47533d7a55bb21e46292d2f94786c9037bd7a70bf02b6a3c48adb0c9ce20c"
-
-# remove at next version upgrade or when output changes
-HASHEQUIV_HASH_VERSION .= ".2"
-
-inherit autotools gettext pkgconfig
-
-#
-# Both Samba (smb) and sftp require package delivered from meta-openembedded
-#
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[sftp] = "--enable-vfs-sftp,--disable-vfs-sftp,libssh2,"
-
-# enable NCURSES_WIDECHAR=1 only if ENABLE_WIDEC has not been explicitly disabled (e.g. by the distro config).
-# When compiling against the ncurses library, NCURSES_WIDECHAR needs to explicitly set to 0 in this case.
-CFLAGS:append:libc-musl = "${@' -DNCURSES_WIDECHAR=1' if bb.utils.to_boolean((d.getVar('ENABLE_WIDEC') or 'True')) else ' -DNCURSES_WIDECHAR=0'}"
-EXTRA_OECONF = "--with-screen=ncurses --without-gpm-mouse --without-x --disable-configure-args"
-EXTRANATIVEPATH += "file-native"
-
-CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl'"
-CACHED_CONFIGUREVARS += "ac_cv_path_PYTHON='/usr/bin/env python'"
-CACHED_CONFIGUREVARS += "ac_cv_path_GREP='/usr/bin/env grep'"
-CACHED_CONFIGUREVARS += "mc_cv_have_zipinfo=yes"
-
-do_install:append () {
- sed -i -e '1s,#!.*perl,#!${bindir}/env perl,' ${D}${libexecdir}/mc/extfs.d/*
-
- rm ${D}${libexecdir}/mc/extfs.d/s3+ ${D}${libexecdir}/mc/extfs.d/uc1541
-}
-
-PACKAGES =+ "${BPN}-helpers-perl ${BPN}-helpers ${BPN}-fish"
-
-SUMMARY:${BPN}-helpers-perl = "Midnight Commander Perl-based helper scripts"
-FILES:${BPN}-helpers-perl = "${libexecdir}/mc/extfs.d/a+ ${libexecdir}/mc/extfs.d/apt+ \
- ${libexecdir}/mc/extfs.d/deb ${libexecdir}/mc/extfs.d/deba \
- ${libexecdir}/mc/extfs.d/debd ${libexecdir}/mc/extfs.d/dpkg+ \
- ${libexecdir}/mc/extfs.d/mailfs ${libexecdir}/mc/extfs.d/patchfs \
- ${libexecdir}/mc/extfs.d/rpms+ ${libexecdir}/mc/extfs.d/ulib \
- ${libexecdir}/mc/extfs.d/uzip"
-RDEPENDS:${BPN}-helpers-perl = "perl"
-
-SUMMARY:${BPN}-helpers = "Midnight Commander shell helper scripts"
-FILES:${BPN}-helpers = "${libexecdir}/mc/extfs.d/* ${libexecdir}/mc/ext.d/*"
-
-SUMMARY:${BPN}-fish = "Midnight Commander Fish scripts"
-FILES:${BPN}-fish = "${libexecdir}/mc/fish"
diff --git a/meta/recipes-extended/mc/mc_4.8.31.bb b/meta/recipes-extended/mc/mc_4.8.31.bb
new file mode 100644
index 0000000000..69c32887a2
--- /dev/null
+++ b/meta/recipes-extended/mc/mc_4.8.31.bb
@@ -0,0 +1,57 @@
+SUMMARY = "Midnight Commander is an ncurses based file manager"
+HOMEPAGE = "http://www.midnight-commander.org/"
+DESCRIPTION = "GNU Midnight Commander is a visual file manager, licensed under GNU General Public License and therefore qualifies as Free Software. It's a feature rich full-screen text mode application that allows you to copy, move and delete files and whole directory trees, search for files and run commands in the subshell. Internal viewer and editor are included."
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=270bbafe360e73f9840bd7981621f9c2"
+SECTION = "console/utils"
+DEPENDS = "ncurses glib-2.0 util-linux file-replacement-native"
+RDEPENDS:${PN} = "ncurses-terminfo-base"
+RRECOMMENDS:${PN} = "ncurses-terminfo"
+
+SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2 \
+ file://nomandate.patch \
+ "
+SRC_URI[sha256sum] = "f42f4114ed42f6cf9995f1d896fa6c797ccb36dac57760dda8dd9f78ac462841"
+
+inherit autotools gettext pkgconfig
+
+#
+# Both Samba (smb) and sftp require package delivered from meta-openembedded
+#
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[sftp] = "--enable-vfs-sftp,--disable-vfs-sftp,libssh2,"
+
+# enable NCURSES_WIDECHAR=1 only if ENABLE_WIDEC has not been explicitly disabled (e.g. by the distro config).
+# When compiling against the ncurses library, NCURSES_WIDECHAR needs to explicitly set to 0 in this case.
+CFLAGS:append:libc-musl = "${@' -DNCURSES_WIDECHAR=1' if bb.utils.to_boolean((d.getVar('ENABLE_WIDEC') or 'True')) else ' -DNCURSES_WIDECHAR=0'}"
+EXTRA_OECONF = "--with-screen=ncurses --without-gpm-mouse --without-x --disable-configure-args"
+EXTRANATIVEPATH += "file-native"
+
+CACHED_CONFIGUREVARS += "ac_cv_path_PERL='/usr/bin/env perl'"
+CACHED_CONFIGUREVARS += "ac_cv_path_PERL_FOR_BUILD='/usr/bin/env perl'"
+CACHED_CONFIGUREVARS += "ac_cv_path_PYTHON='/usr/bin/env python'"
+CACHED_CONFIGUREVARS += "ac_cv_path_GREP='/usr/bin/env grep'"
+CACHED_CONFIGUREVARS += "mc_cv_have_zipinfo=yes"
+
+do_install:append () {
+ sed -i -e '1s,#!.*perl,#!${bindir}/env perl,' ${D}${libexecdir}/mc/extfs.d/*
+
+ rm ${D}${libexecdir}/mc/extfs.d/s3+ ${D}${libexecdir}/mc/extfs.d/uc1541
+}
+
+PACKAGES =+ "${BPN}-helpers-perl ${BPN}-helpers ${BPN}-shell"
+
+SUMMARY:${BPN}-helpers-perl = "Midnight Commander Perl-based helper scripts"
+FILES:${BPN}-helpers-perl = "${libexecdir}/mc/extfs.d/a+ ${libexecdir}/mc/extfs.d/apt+ \
+ ${libexecdir}/mc/extfs.d/deb ${libexecdir}/mc/extfs.d/deba \
+ ${libexecdir}/mc/extfs.d/debd ${libexecdir}/mc/extfs.d/dpkg+ \
+ ${libexecdir}/mc/extfs.d/mailfs ${libexecdir}/mc/extfs.d/patchfs \
+ ${libexecdir}/mc/extfs.d/rpms+ ${libexecdir}/mc/extfs.d/ulib \
+ ${libexecdir}/mc/extfs.d/uzip"
+RDEPENDS:${BPN}-helpers-perl = "perl"
+
+SUMMARY:${BPN}-helpers = "Midnight Commander shell helper scripts"
+FILES:${BPN}-helpers = "${libexecdir}/mc/extfs.d/* ${libexecdir}/mc/ext.d/*"
+
+SUMMARY:${BPN}-shell = "Midnight Commander Shell scripts"
+FILES:${BPN}-shell = "${libexecdir}/mc/shell"
diff --git a/meta/recipes-extended/mdadm/files/0001-Fix-parsing-of-r-in-monitor-manager-mode.patch b/meta/recipes-extended/mdadm/files/0001-Fix-parsing-of-r-in-monitor-manager-mode.patch
deleted file mode 100644
index 3fb46cc60a..0000000000
--- a/meta/recipes-extended/mdadm/files/0001-Fix-parsing-of-r-in-monitor-manager-mode.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-From 969fbb35e40100f599d4a9781911251f21792698 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Thu, 27 Jan 2022 17:53:01 +0800
-Subject: [PATCH] Fix parsing of "-r" in monitor/manager mode
-
-This revert commit 546047688e1 [mdadm: fix coredump of mdadm --monitor
--r], and fix the coredump issue of 'mdadm --monitor -r'.
-
-commit 546047688e1 make -r not work in manager mode, and testcase
-00multipath failed.
-
-Upstream-Status: Submitted [send to maintainer jsorensen@fb.com]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
----
- ReadMe.c | 8 +++++---
- mdadm.c | 2 ++
- mdadm.h | 1 +
- 3 files changed, 8 insertions(+), 3 deletions(-)
-
-diff --git a/ReadMe.c b/ReadMe.c
-index 8139976..070eea5 100644
---- a/ReadMe.c
-+++ b/ReadMe.c
-@@ -81,11 +81,13 @@ char Version[] = "mdadm - v" VERSION " - " VERS_DATE EXTRAVERSION "\n";
- * found, it is started.
- */
-
--char short_options[]="-ABCDEFGIQhVXYWZ:vqbc:i:l:p:m:r:n:x:u:c:d:z:U:N:safRSow1tye:k";
-+char short_options[]="-ABCDEFGIQhVXYWZ:vqbc:i:l:p:m:n:x:u:c:d:z:U:N:sarfRSow1tye:k:";
- char short_bitmap_options[]=
-- "-ABCDEFGIQhVXYWZ:vqb:c:i:l:p:m:r:n:x:u:c:d:z:U:N:sarfRSow1tye:k:";
-+ "-ABCDEFGIQhVXYWZ:vqb:c:i:l:p:m:n:x:u:c:d:z:U:N:sarfRSow1tye:k:";
- char short_bitmap_auto_options[]=
-- "-ABCDEFGIQhVXYWZ:vqb:c:i:l:p:m:r:n:x:u:c:d:z:U:N:sa:rfRSow1tye:k:";
-+ "-ABCDEFGIQhVXYWZ:vqb:c:i:l:p:m:n:x:u:c:d:z:U:N:sa:rfRSow1tye:k:";
-+char short_increment_options[]=
-+ "-ABCDEFGIQhVXYWZ:vqbc:i:l:r:p:m:n:x:u:c:d:z:U:N:safRSow1tye:k:";
-
- struct option long_options[] = {
- {"manage", 0, 0, ManageOpt},
-diff --git a/mdadm.c b/mdadm.c
-index 26299b2..2a3b2ee 100644
---- a/mdadm.c
-+++ b/mdadm.c
-@@ -227,6 +227,7 @@ int main(int argc, char *argv[])
- shortopt = short_bitmap_auto_options;
- break;
- case 'F': newmode = MONITOR;
-+ shortopt = short_increment_options;
- break;
- case 'G': newmode = GROW;
- shortopt = short_bitmap_options;
-@@ -268,6 +269,7 @@ int main(int argc, char *argv[])
-
- case NoSharing:
- newmode = MONITOR;
-+ shortopt = short_increment_options;
- break;
- }
- if (mode && newmode == mode) {
-diff --git a/mdadm.h b/mdadm.h
-index ecfc137..42148dd 100644
---- a/mdadm.h
-+++ b/mdadm.h
-@@ -421,6 +421,7 @@ enum mode {
- extern char short_options[];
- extern char short_bitmap_options[];
- extern char short_bitmap_auto_options[];
-+extern char short_increment_options[];
- extern struct option long_options[];
- extern char Version[], Usage[], Help[], OptionHelp[],
- *mode_help[],
diff --git a/meta/recipes-extended/mdadm/files/0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch b/meta/recipes-extended/mdadm/files/0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch
index 298f276cd6..fa1f0aa520 100644
--- a/meta/recipes-extended/mdadm/files/0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch
+++ b/meta/recipes-extended/mdadm/files/0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch
@@ -1,4 +1,4 @@
-From a9166bf422da1001bac9cc819386bf39b7cd1b73 Mon Sep 17 00:00:00 2001
+From 76856a34a4e339e4a53b09d028f89fcc520e3127 Mon Sep 17 00:00:00 2001
From: "Maxin B. John" <maxin.john@intel.com>
Date: Tue, 9 Feb 2016 11:44:01 +0200
Subject: [PATCH] Fix the path of corosync and dlm header files check
@@ -9,16 +9,15 @@ Fix it.
Upstream-Status: Inappropriate [Yocto specific]
Signed-off-by: Maxin B. John <maxin.john@intel.com>
-
---
Makefile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Makefile b/Makefile
-index 2767ac6..46bf57b 100644
+index cbdba49..7bfd336 100644
--- a/Makefile
+++ b/Makefile
-@@ -91,8 +91,8 @@ FAILED_SLOTS_DIR = $(RUN_DIR)/failed-slots
+@@ -109,8 +109,8 @@ FAILED_SLOTS_DIR = $(RUN_DIR)/failed-slots
SYSTEMD_DIR=/lib/systemd/system
LIB_DIR=/usr/libexec/mdadm
diff --git a/meta/recipes-extended/mdadm/files/0001-Makefile-install-mdcheck.patch b/meta/recipes-extended/mdadm/files/0001-Makefile-install-mdcheck.patch
index 3f76ef54d8..a4be1aa8a1 100644
--- a/meta/recipes-extended/mdadm/files/0001-Makefile-install-mdcheck.patch
+++ b/meta/recipes-extended/mdadm/files/0001-Makefile-install-mdcheck.patch
@@ -1,4 +1,4 @@
-From 97e776724ab9763c5bca9816370bb1635b7a8232 Mon Sep 17 00:00:00 2001
+From 0be066d57a7dd1aead5488d0a095863608f2e559 Mon Sep 17 00:00:00 2001
From: Chen Qi <Qi.Chen@windriver.com>
Date: Tue, 25 Jan 2022 16:25:01 +0800
Subject: [PATCH] Makefile: install mdcheck
@@ -14,10 +14,10 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
1 file changed, 1 insertion(+)
diff --git a/Makefile b/Makefile
-index 2a51d813..db40c7fd 100644
+index 1141971..f4059e2 100644
--- a/Makefile
+++ b/Makefile
-@@ -303,6 +303,7 @@ install-systemd: systemd/mdmon@.service
+@@ -325,6 +325,7 @@ install-systemd: systemd/mdmon@.service
install-bin: mdadm mdmon
$(INSTALL) -D $(STRIP) -m 755 mdadm $(DESTDIR)$(BINDIR)/mdadm
$(INSTALL) -D $(STRIP) -m 755 mdmon $(DESTDIR)$(BINDIR)/mdmon
@@ -25,6 +25,3 @@ index 2a51d813..db40c7fd 100644
uninstall:
rm -f $(DESTDIR)$(MAN8DIR)/mdadm.8 $(DESTDIR)$(MAN8DIR)/mdmon.8 $(DESTDIR)$(MAN4DIR)/md.4 $(DESTDIR)$(MAN5DIR)/mdadm.conf.5 $(DESTDIR)$(BINDIR)/mdadm
---
-2.17.1
-
diff --git a/meta/recipes-extended/mdadm/files/0001-Revert-tests-wait-for-complete-rebuild-in-integrity-.patch b/meta/recipes-extended/mdadm/files/0001-Revert-tests-wait-for-complete-rebuild-in-integrity-.patch
deleted file mode 100644
index fb4bc165fb..0000000000
--- a/meta/recipes-extended/mdadm/files/0001-Revert-tests-wait-for-complete-rebuild-in-integrity-.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-From 02a41c3fd560fb5250186dd6b3cff6b21daa2e2b Mon Sep 17 00:00:00 2001
-From: Mingli Yu <Mingli.Yu@windriver.com>
-Date: Mon, 15 Jul 2019 14:12:24 +0800
-Subject: [PATCH] Revert "tests: wait for complete rebuild in integrity checks"
-
-This reverts commit e2a8e9dcf67a28bc722fa5ab2c49b0bc452d4d74
-as the logic "check state 'U*'" will make the test enters
-infinite loop especially in qemu env, so revert it to
-use the previous logic "check wait" which also used
-commonly by other tests such as tests/02r5grow, tests/07revert-grow
-and etc.
-
-Upstream-Status: Submitted [https://marc.info/?l=linux-raid&m=156317157314030&w=2]
-
-Signed-off-by: Mingli Yu <Mingli.Yu@windriver.com>
----
- tests/01r5integ | 2 +-
- tests/01raid6integ | 4 ++--
- 2 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/tests/01r5integ b/tests/01r5integ
-index 48676a2..ffb30ce 100644
---- a/tests/01r5integ
-+++ b/tests/01r5integ
-@@ -27,7 +27,7 @@ do
- exit 1
- fi
- mdadm $md0 -a $i
-- while ! (check state 'U*'); do check wait; sleep 0.2; done
-+ check wait
- done
- mdadm -S $md0
- done
-diff --git a/tests/01raid6integ b/tests/01raid6integ
-index 12f4d81..c6fcdae 100644
---- a/tests/01raid6integ
-+++ b/tests/01raid6integ
-@@ -47,10 +47,10 @@ do
- exit 1
- fi
- mdadm $md0 -a $first
-- while ! (check state 'U*_U*'); do check wait; sleep 0.2; done
-+ check wait
- done
- mdadm $md0 -a $second
-- while ! (check state 'U*'); do check wait; sleep 0.2; done
-+ check wait
- totest="$totest $second"
- done
- mdadm -S $md0
---
-2.7.4
-
diff --git a/meta/recipes-extended/mdadm/files/0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch b/meta/recipes-extended/mdadm/files/0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch
index 12bf6a5920..16fdefbbd1 100644
--- a/meta/recipes-extended/mdadm/files/0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch
+++ b/meta/recipes-extended/mdadm/files/0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch
@@ -1,4 +1,4 @@
-From 37c35f94d9d95dbd2b5f8a919f5478be51453590 Mon Sep 17 00:00:00 2001
+From c29d086714b49a6d76ccca83b4a6fa2f139bad6e Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 13 Oct 2017 10:27:34 -0700
Subject: [PATCH] Use CC to check for implicit-fallthrough warning support
@@ -10,19 +10,35 @@ cross compile used for compiling mdadm is < version 7
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
---
- Makefile | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
+ Makefile | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/Makefile b/Makefile
-index 46bf57b..a075912 100644
+index 7bfd336..9ab6a65 100644
--- a/Makefile
+++ b/Makefile
-@@ -53,7 +53,7 @@ ifdef WARN_UNUSED
- CWFLAGS += -Wp,-D_FORTIFY_SOURCE=2 -O3
+@@ -56,21 +56,21 @@ CWFLAGS += -Wp -O3
endif
--FALLTHROUGH := $(shell gcc -v --help 2>&1 | grep "implicit-fallthrough" | wc -l)
-+FALLTHROUGH := $(shell ${CC} -v --help 2>&1 | grep "implicit-fallthrough" | wc -l)
- ifneq "$(FALLTHROUGH)" "0"
- CWFLAGS += -Wimplicit-fallthrough=0
+ ifeq ($(origin FALLTHROUGH), undefined)
+- FALLTHROUGH := $(shell gcc -Q --help=warnings 2>&1 | grep "implicit-fallthrough" | wc -l)
++ FALLTHROUGH := $(shell ${CC} -Q --help=warnings 2>&1 | grep "implicit-fallthrough" | wc -l)
+ ifneq "$(FALLTHROUGH)" "0"
+ CWFLAGS += -Wimplicit-fallthrough=0
+ endif
endif
+
+ ifeq ($(origin FORMATOVERFLOW), undefined)
+- FORMATOVERFLOW := $(shell gcc -Q --help=warnings 2>&1 | grep "format-overflow" | wc -l)
++ FORMATOVERFLOW := $(shell ${CC} -Q --help=warnings 2>&1 | grep "format-overflow" | wc -l)
+ ifneq "$(FORMATOVERFLOW)" "0"
+ CWFLAGS += -Wformat-overflow
+ endif
+ endif
+
+ ifeq ($(origin STRINGOPOVERFLOW), undefined)
+- STRINGOPOVERFLOW := $(shell gcc -Q --help=warnings 2>&1 | grep "stringop-overflow" | wc -l)
++ STRINGOPOVERFLOW := $(shell ${CC} -Q --help=warnings 2>&1 | grep "stringop-overflow" | wc -l)
+ ifneq "$(STRINGOPOVERFLOW)" "0"
+ CWFLAGS += -Wstringop-overflow
+ endif
diff --git a/meta/recipes-extended/mdadm/files/0001-fix-gcc-8-format-truncation-warning.patch b/meta/recipes-extended/mdadm/files/0001-fix-gcc-8-format-truncation-warning.patch
index fa9c8cc835..3cf295106f 100644
--- a/meta/recipes-extended/mdadm/files/0001-fix-gcc-8-format-truncation-warning.patch
+++ b/meta/recipes-extended/mdadm/files/0001-fix-gcc-8-format-truncation-warning.patch
@@ -1,4 +1,4 @@
-From 3158d3788c2e0fb75ace2c89840bd8a977fb4cb0 Mon Sep 17 00:00:00 2001
+From 78e5bb08971a5644a56af60d51ef35e13522e811 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Fri, 14 Dec 2018 15:12:31 +0800
Subject: [PATCH] fix gcc-8 format-truncation warning
@@ -22,7 +22,7 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/super0.c b/super0.c
-index 756cab5..12c28da 100644
+index a7c5f81..a79b6bd 100644
--- a/super0.c
+++ b/super0.c
@@ -229,7 +229,7 @@ static void examine_super0(struct supertype *st, char *homehost)
@@ -34,6 +34,3 @@ index 756cab5..12c28da 100644
int wonly, failfast;
if (d>=0) dp = &sb->disks[d];
else dp = &sb->this_disk;
---
-2.7.4
-
diff --git a/meta/recipes-extended/mdadm/files/0001-include-libgen.h-for-basename-API.patch b/meta/recipes-extended/mdadm/files/0001-include-libgen.h-for-basename-API.patch
new file mode 100644
index 0000000000..70be43c15e
--- /dev/null
+++ b/meta/recipes-extended/mdadm/files/0001-include-libgen.h-for-basename-API.patch
@@ -0,0 +1,56 @@
+From 7759ceda978aba38861d4846d0c1657465b72f04 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 24 Mar 2024 23:13:32 -0700
+Subject: [PATCH] include libgen.h for basename API
+
+Musl does no more provide it via string.h therefore builds with newer
+compilers e.g. clang-18 fails due to missing prototype for basename
+therefore add libgen.h to included headers list
+
+Upstream-Status: Submitted [https://lore.kernel.org/linux-raid/20240325061537.275811-1-raj.khem@gmail.com/T/#u]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ Monitor.c | 1 +
+ platform-intel.c | 1 +
+ super-intel.c | 1 +
+ 3 files changed, 3 insertions(+)
+
+diff --git a/Monitor.c b/Monitor.c
+index 824a69f..e3942e1 100644
+--- a/Monitor.c
++++ b/Monitor.c
+@@ -26,6 +26,7 @@
+ #include "udev.h"
+ #include "md_p.h"
+ #include "md_u.h"
++#include <libgen.h>
+ #include <sys/wait.h>
+ #include <limits.h>
+ #include <syslog.h>
+diff --git a/platform-intel.c b/platform-intel.c
+index ac282bc..5d6687d 100644
+--- a/platform-intel.c
++++ b/platform-intel.c
+@@ -19,6 +19,7 @@
+ #include "mdadm.h"
+ #include "platform-intel.h"
+ #include "probe_roms.h"
++#include <libgen.h>
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+diff --git a/super-intel.c b/super-intel.c
+index dbea235..881dbda 100644
+--- a/super-intel.c
++++ b/super-intel.c
+@@ -23,6 +23,7 @@
+ #include "dlink.h"
+ #include "sha1.h"
+ #include "platform-intel.h"
++#include <libgen.h>
+ #include <values.h>
+ #include <scsi/sg.h>
+ #include <ctype.h>
+--
+2.44.0
+
diff --git a/meta/recipes-extended/mdadm/files/0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch b/meta/recipes-extended/mdadm/files/0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch
index e00287cab1..f224d0008d 100644
--- a/meta/recipes-extended/mdadm/files/0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch
+++ b/meta/recipes-extended/mdadm/files/0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch
@@ -1,4 +1,4 @@
-From 5fdc0173cb4fcf8656f0889ad364d2549795607f Mon Sep 17 00:00:00 2001
+From c27e128fdc062ec3fcdf7b48a8c5078615c538df Mon Sep 17 00:00:00 2001
From: Changqing Li <changqing.li@windriver.com>
Date: Mon, 1 Jul 2019 11:34:49 +0800
Subject: [PATCH] mdadm: add option -y for use syslog to recive event report
@@ -14,15 +14,12 @@ Signed-off-by: Changqing Li <changqing.li@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/systemd/mdmonitor.service b/systemd/mdmonitor.service
-index 46f7b88..3fc4687 100644
+index 9c36478..d289846 100644
--- a/systemd/mdmonitor.service
+++ b/systemd/mdmonitor.service
-@@ -13,4 +13,4 @@ DefaultDependencies=no
+@@ -14,4 +14,4 @@ Documentation=man:mdadm(8)
Environment= MDADM_MONITOR_ARGS=--scan
EnvironmentFile=-/run/sysconfig/mdadm
ExecStartPre=-/usr/lib/mdadm/mdadm_env.sh
-ExecStart=BINDIR/mdadm --monitor $MDADM_MONITOR_ARGS
+ExecStart=BINDIR/mdadm --monitor -y $MDADM_MONITOR_ARGS
---
-2.7.4
-
diff --git a/meta/recipes-extended/mdadm/files/0001-mdadm-skip-test-11spare-migration.patch b/meta/recipes-extended/mdadm/files/0001-mdadm-skip-test-11spare-migration.patch
deleted file mode 100644
index 84517caade..0000000000
--- a/meta/recipes-extended/mdadm/files/0001-mdadm-skip-test-11spare-migration.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-From 1b83afa7c3121f819e72ea74883f8b6d61d6548e Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Fri, 6 Sep 2019 10:59:02 +0800
-Subject: [PATCH] mdadm: skip test 11spare-migration
-
-11spare-migration is a test series to check mdadm Monitor migrates spares
-according to rules in /etc/mdadm.conf defined by POLICY lines.
-
-[snip]
-for scan in no yes; do
- for platform in 1.2 imsm; do
- try
- done
-done
-[snip]
-
-"try" includes near 20 sub testcase, so there are nearly 80 subcases need to run,
-so it will take long time than ptest-runner timeout limit, skip it as workaround.
-
-Upstream-Status: Inappropriate [oe-specific]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- test | 3 +++
- 1 file changed, 3 insertions(+)
-
-diff --git a/test b/test
-index 711a3c7..880dd1d 100755
---- a/test
-+++ b/test
-@@ -272,6 +272,9 @@ main() {
- else
- for script in $testdir/$prefix $testdir/$prefix*[^~]
- do
-+ if [ $script == "$testdir/11spare-migration" ];then
-+ continue
-+ fi
- do_test $script
- done
- fi
---
-2.7.4
-
diff --git a/meta/recipes-extended/mdadm/files/0001-mdadm.h-Undefine-dprintf-before-redefining.patch b/meta/recipes-extended/mdadm/files/0001-mdadm.h-Undefine-dprintf-before-redefining.patch
index a1e7e59323..52daea2a49 100644
--- a/meta/recipes-extended/mdadm/files/0001-mdadm.h-Undefine-dprintf-before-redefining.patch
+++ b/meta/recipes-extended/mdadm/files/0001-mdadm.h-Undefine-dprintf-before-redefining.patch
@@ -1,4 +1,4 @@
-From b431cb4e1ed060122fa300dc0008f74080d38f73 Mon Sep 17 00:00:00 2001
+From 4dd5c2659722b44409cee28b4cea68cdeaa1f987 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 9 May 2016 22:03:57 +0000
Subject: [PATCH] mdadm.h: Undefine dprintf before redefining
@@ -20,10 +20,10 @@ Upstream-Status: Pending
1 file changed, 2 insertions(+)
diff --git a/mdadm.h b/mdadm.h
-index 387e681..bb943bf 100644
+index 1f28b3e..04996e2 100644
--- a/mdadm.h
+++ b/mdadm.h
-@@ -1649,11 +1649,13 @@ static inline char *to_subarray(struct mdstat_ent *ent, char *container)
+@@ -1869,11 +1869,13 @@ static inline sighandler_t signal_s(int sig, sighandler_t handler)
}
#ifdef DEBUG
diff --git a/meta/recipes-extended/mdadm/files/0001-restripe.c-Use-_FILE_OFFSET_BITS-to-enable-largefile.patch b/meta/recipes-extended/mdadm/files/0001-restripe.c-Use-_FILE_OFFSET_BITS-to-enable-largefile.patch
new file mode 100644
index 0000000000..13435ee418
--- /dev/null
+++ b/meta/recipes-extended/mdadm/files/0001-restripe.c-Use-_FILE_OFFSET_BITS-to-enable-largefile.patch
@@ -0,0 +1,155 @@
+From aa86de05cd6a75222b38e0789ac96fe00f705430 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 10 Nov 2022 12:31:22 -0800
+Subject: [PATCH] restripe.c: Use _FILE_OFFSET_BITS to enable largefile support
+
+Instead of using the lseek64 and friends, its better to enable it via
+the feature macro _FILE_OFFSET_BITS = 64 and let the C library deal with
+the width of types
+
+Upstream-Status: Submitted [https://lore.kernel.org/linux-raid/20221110225546.337164-1-raj.khem@gmail.com/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ raid6check.c | 11 +++++++----
+ restripe.c | 13 ++++++++-----
+ swap_super.c | 13 +++++++------
+ 3 files changed, 22 insertions(+), 15 deletions(-)
+
+diff --git a/raid6check.c b/raid6check.c
+index 9947776..8e7f142 100644
+--- a/raid6check.c
++++ b/raid6check.c
+@@ -22,6 +22,9 @@
+ * Based on "restripe.c" from "mdadm" codebase
+ */
+
++/* Enable largefile support */
++#define _FILE_OFFSET_BITS 64
++
+ #include "mdadm.h"
+ #include <stdint.h>
+ #include <sys/mman.h>
+@@ -284,9 +287,9 @@ int manual_repair(int chunk_size, int syndrome_disks,
+ }
+
+ int write_res1, write_res2;
+- off64_t seek_res;
++ off_t seek_res;
+
+- seek_res = lseek64(source[fd1],
++ seek_res = lseek(source[fd1],
+ offsets[fd1] + start * chunk_size, SEEK_SET);
+ if (seek_res < 0) {
+ fprintf(stderr, "lseek failed for failed_disk1\n");
+@@ -294,7 +297,7 @@ int manual_repair(int chunk_size, int syndrome_disks,
+ }
+ write_res1 = write(source[fd1], blocks[failed_slot1], chunk_size);
+
+- seek_res = lseek64(source[fd2],
++ seek_res = lseek(source[fd2],
+ offsets[fd2] + start * chunk_size, SEEK_SET);
+ if (seek_res < 0) {
+ fprintf(stderr, "lseek failed for failed_disk2\n");
+@@ -379,7 +382,7 @@ int check_stripes(struct mdinfo *info, int *source, unsigned long long *offsets,
+ goto exitCheck;
+ }
+ for (i = 0 ; i < raid_disks ; i++) {
+- off64_t seek_res = lseek64(source[i], offsets[i] + start * chunk_size,
++ off_t seek_res = lseek(source[i], offsets[i] + start * chunk_size,
+ SEEK_SET);
+ if (seek_res < 0) {
+ fprintf(stderr, "lseek to source %d failed\n", i);
+diff --git a/restripe.c b/restripe.c
+index a7a7229..1c03577 100644
+--- a/restripe.c
++++ b/restripe.c
+@@ -22,6 +22,9 @@
+ * Email: <neilb@suse.de>
+ */
+
++/* Enable largefile support */
++#define _FILE_OFFSET_BITS 64
++
+ #include "mdadm.h"
+ #include <stdint.h>
+
+@@ -581,7 +584,7 @@ int save_stripes(int *source, unsigned long long *offsets,
+ raid_disks, level, layout);
+ if (dnum < 0) abort();
+ if (source[dnum] < 0 ||
+- lseek64(source[dnum],
++ lseek(source[dnum],
+ offsets[dnum] + offset, 0) < 0 ||
+ read(source[dnum], buf+disk * chunk_size,
+ chunk_size) != chunk_size) {
+@@ -754,8 +757,8 @@ int restore_stripes(int *dest, unsigned long long *offsets,
+ raid_disks, level, layout);
+ if (src_buf == NULL) {
+ /* read from file */
+- if (lseek64(source, read_offset, 0) !=
+- (off64_t)read_offset) {
++ if (lseek(source, read_offset, 0) !=
++ (off_t)read_offset) {
+ rv = -1;
+ goto abort;
+ }
+@@ -816,7 +819,7 @@ int restore_stripes(int *dest, unsigned long long *offsets,
+ }
+ for (i=0; i < raid_disks ; i++)
+ if (dest[i] >= 0) {
+- if (lseek64(dest[i],
++ if (lseek(dest[i],
+ offsets[i]+offset, 0) < 0) {
+ rv = -1;
+ goto abort;
+@@ -866,7 +869,7 @@ int test_stripes(int *source, unsigned long long *offsets,
+ int disk;
+
+ for (i = 0 ; i < raid_disks ; i++) {
+- if ((lseek64(source[i], offsets[i]+start, 0) < 0) ||
++ if ((lseek(source[i], offsets[i]+start, 0) < 0) ||
+ (read(source[i], stripes[i], chunk_size) !=
+ chunk_size)) {
+ free(q);
+diff --git a/swap_super.c b/swap_super.c
+index b6db574..18c89e2 100644
+--- a/swap_super.c
++++ b/swap_super.c
+@@ -1,3 +1,6 @@
++/* Enable largefile support */
++#define _FILE_OFFSET_BITS 64
++
+ #include <unistd.h>
+ #include <stdlib.h>
+ #include <fcntl.h>
+@@ -16,8 +19,6 @@
+
+ #define MD_NEW_SIZE_SECTORS(x) ((x & ~(MD_RESERVED_SECTORS - 1)) - MD_RESERVED_SECTORS)
+
+-extern long long lseek64(int, long long, int);
+-
+ int main(int argc, char *argv[])
+ {
+ int fd, i;
+@@ -38,8 +39,8 @@ int main(int argc, char *argv[])
+ exit(1);
+ }
+ offset = MD_NEW_SIZE_SECTORS(size) * 512LL;
+- if (lseek64(fd, offset, 0) < 0LL) {
+- perror("lseek64");
++ if (lseek(fd, offset, 0) < 0LL) {
++ perror("lseek");
+ exit(1);
+ }
+ if (read(fd, super, 4096) != 4096) {
+@@ -68,8 +69,8 @@ int main(int argc, char *argv[])
+ super[32*4+10*4 +i] = t;
+ }
+
+- if (lseek64(fd, offset, 0) < 0LL) {
+- perror("lseek64");
++ if (lseek(fd, offset, 0) < 0LL) {
++ perror("lseek");
+ exit(1);
+ }
+ if (write(fd, super, 4096) != 4096) {
diff --git a/meta/recipes-extended/mdadm/files/0001-util.c-add-limits.h-include-for-NAME_MAX-definition.patch b/meta/recipes-extended/mdadm/files/0001-util.c-add-limits.h-include-for-NAME_MAX-definition.patch
new file mode 100644
index 0000000000..0916efafdf
--- /dev/null
+++ b/meta/recipes-extended/mdadm/files/0001-util.c-add-limits.h-include-for-NAME_MAX-definition.patch
@@ -0,0 +1,24 @@
+From 8fa7d3cb96e8833743b635fb198675ad6c020b6e Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Tue, 12 Mar 2024 10:51:51 +0100
+Subject: [PATCH] util.c: add limits.h include for NAME_MAX definition
+
+Upstream-Status: Submitted [mariusz.tkaczyk@linux.intel.com,linux-raid@vger.kernel.org]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ util.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/util.c b/util.c
+index b145447..a9cb6c4 100644
+--- a/util.c
++++ b/util.c
+@@ -36,7 +36,7 @@
+ #include <ctype.h>
+ #include <dirent.h>
+ #include <dlfcn.h>
+-
++#include <limits.h>
+
+ /*
+ * following taken from linux/blkpg.h because they aren't
diff --git a/meta/recipes-extended/mdadm/files/0002-Create.c-include-linux-falloc.h-for-FALLOC_FL_ZERO_R.patch b/meta/recipes-extended/mdadm/files/0002-Create.c-include-linux-falloc.h-for-FALLOC_FL_ZERO_R.patch
new file mode 100644
index 0000000000..145c65477a
--- /dev/null
+++ b/meta/recipes-extended/mdadm/files/0002-Create.c-include-linux-falloc.h-for-FALLOC_FL_ZERO_R.patch
@@ -0,0 +1,27 @@
+From a22b2345b9773d362acd85dd4c4a6a3cda9100d4 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Tue, 12 Mar 2024 10:54:08 +0100
+Subject: [PATCH] Create.c: include linux/falloc.h for FALLOC_FL_ZERO_RANGE
+ definition
+
+glibc provides this through fcntl.h but musl does not - should
+be reported and fixed there.
+
+Upstream-Status: Inappropriate [musl-specific issue]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ Create.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/Create.c b/Create.c
+index 8082f54..7071f0a 100644
+--- a/Create.c
++++ b/Create.c
+@@ -31,6 +31,7 @@
+ #include <signal.h>
+ #include <sys/signalfd.h>
+ #include <sys/wait.h>
++#include <linux/falloc.h>
+
+ static int round_size_and_verify(unsigned long long *size, int chunk)
+ {
diff --git a/meta/recipes-extended/mdadm/files/debian-no-Werror.patch b/meta/recipes-extended/mdadm/files/debian-no-Werror.patch
index fa90647489..b758fcd0ab 100644
--- a/meta/recipes-extended/mdadm/files/debian-no-Werror.patch
+++ b/meta/recipes-extended/mdadm/files/debian-no-Werror.patch
@@ -1,4 +1,4 @@
-From adb75f0bdec97dbe4aa15cc988d349775f7995ff Mon Sep 17 00:00:00 2001
+From 319b3191f088cea7b0fb6038ab7625d5e049dcf7 Mon Sep 17 00:00:00 2001
From: "martin f. krafft" <madduck@debian.org>
Date: Mon, 3 Jan 2022 19:14:12 +0000
Subject: [PATCH] Remove -Werror from compiler flags
@@ -10,21 +10,20 @@ use it to beautify the code, but remove it for out builds.
Signed-off-by: martin f. krafft <madduck@debian.org>
Upstream-Status: Pending
-
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
-index 716c97c..40354ea 100644
+index 9ab6a65..1141971 100644
--- a/Makefile
+++ b/Makefile
@@ -50,7 +50,7 @@ ifeq ($(origin CC),default)
CC := $(CROSS_COMPILE)gcc
endif
CXFLAGS ?= -ggdb
--CWFLAGS = -Wall -Werror -Wstrict-prototypes -Wextra -Wno-unused-parameter
-+CWFLAGS = -Wall -Wstrict-prototypes -Wextra -Wno-unused-parameter
+-CWFLAGS ?= -Wall -Werror -Wstrict-prototypes -Wextra -Wno-unused-parameter -Wformat -Wformat-security -Werror=format-security -fstack-protector-strong -fPIE -Warray-bounds
++CWFLAGS ?= -Wall -Wstrict-prototypes -Wextra -Wno-unused-parameter -Wformat -Wformat-security -Werror=format-security -fstack-protector-strong -fPIE -Warray-bounds
ifdef WARN_UNUSED
- CWFLAGS += -Wp,-D_FORTIFY_SOURCE=2 -O3
+ CWFLAGS += -Wp -O3
endif
diff --git a/meta/recipes-extended/mdadm/files/include_sysmacros.patch b/meta/recipes-extended/mdadm/files/include_sysmacros.patch
deleted file mode 100644
index 8a1d8342d8..0000000000
--- a/meta/recipes-extended/mdadm/files/include_sysmacros.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-include sys/sysmacros.h for major/minor macro definitions
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
---- a/mdadm.h
-+++ b/mdadm.h
-@@ -35,6 +35,7 @@ extern __off64_t lseek64 __P ((int __fd,
-
- #include <sys/types.h>
- #include <sys/stat.h>
-+#include <sys/sysmacros.h>
- #include <stdint.h>
- #include <stdlib.h>
- #include <time.h>
diff --git a/meta/recipes-extended/mdadm/files/mdadm-3.3.2_x32_abi_time_t.patch b/meta/recipes-extended/mdadm/files/mdadm-3.3.2_x32_abi_time_t.patch
index 7a2c888701..ecd1f037d0 100644
--- a/meta/recipes-extended/mdadm/files/mdadm-3.3.2_x32_abi_time_t.patch
+++ b/meta/recipes-extended/mdadm/files/mdadm-3.3.2_x32_abi_time_t.patch
@@ -1,4 +1,4 @@
-From e37f7f6a0f1ef1b594574d11a8b90b8c861d047b Mon Sep 17 00:00:00 2001
+From ca91d9fc07943f209988411f2596e4b69828f208 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?An=C3=ADbal=20Lim=C3=B3n?= <anibal.limon@linux.intel.com>
Date: Sun, 15 Mar 2015 09:02:14 +0000
Subject: [PATCH] mdadm: Fix build in x32 ABI
@@ -12,16 +12,15 @@ data type in x32 ABI is long long int.
Upstream-Status: Pending
Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
-
---
monitor.c | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/monitor.c b/monitor.c
-index 81537ed..7c33382 100644
+index 4acec67..8dcdfd6 100644
--- a/monitor.c
+++ b/monitor.c
-@@ -445,9 +445,12 @@ static int read_and_act(struct active_array *a, fd_set *fds)
+@@ -447,9 +447,12 @@ static int read_and_act(struct active_array *a, fd_set *fds)
if (FD_ISSET(mdi->bb_fd, fds))
check_for_cleared_bb(a, mdi);
}
diff --git a/meta/recipes-extended/mdadm/files/run-ptest b/meta/recipes-extended/mdadm/files/run-ptest
index fae8071d43..c3f201491e 100644
--- a/meta/recipes-extended/mdadm/files/run-ptest
+++ b/meta/recipes-extended/mdadm/files/run-ptest
@@ -1,7 +1,10 @@
#!/bin/sh
-mkdir -p /mdadm-testing-dir
-# make the test continue to execute even one fail
-dir=. ./test --keep-going --disable-integrity
+logdir=$PWD/logs
+targetdir=/mdadm-testing-dir
+
+rm -rf $targetdir $logdir
+mkdir -p $targetdir $logdir
-rm -rf /mdadm-testing-dir/*
+# make the test continue to execute even one fail
+./test --keep-going --disable-integrity --skip-broken --save-logs --logdir=$logdir
diff --git a/meta/recipes-extended/mdadm/mdadm_4.2.bb b/meta/recipes-extended/mdadm/mdadm_4.2.bb
deleted file mode 100644
index 19035caaec..0000000000
--- a/meta/recipes-extended/mdadm/mdadm_4.2.bb
+++ /dev/null
@@ -1,108 +0,0 @@
-SUMMARY = "Tool for managing software RAID under Linux"
-HOMEPAGE = "http://www.kernel.org/pub/linux/utils/raid/mdadm/"
-DESCRIPTION = "mdadm is a Linux utility used to manage and monitor software RAID devices."
-
-# Some files are GPL-2.0-only while others are GPL-2.0-or-later.
-LICENSE = "GPL-2.0-only & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://mdmon.c;beginline=4;endline=18;md5=af7d8444d9c4d3e5c7caac0d9d34039d \
- file://mdadm.h;beglinlne=4;endline=22;md5=462bc9936ac0d3da110191a3f9994161"
-
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/raid/mdadm/${BPN}-${PV}.tar.xz \
- file://run-ptest \
- file://mdadm-3.3.2_x32_abi_time_t.patch \
- file://0001-mdadm.h-Undefine-dprintf-before-redefining.patch \
- file://0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch \
- file://0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch \
- file://0001-fix-gcc-8-format-truncation-warning.patch \
- file://debian-no-Werror.patch \
- file://0001-Revert-tests-wait-for-complete-rebuild-in-integrity-.patch \
- file://mdadm.init \
- file://0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch \
- file://include_sysmacros.patch \
- file://0001-mdadm-skip-test-11spare-migration.patch \
- file://0001-Fix-parsing-of-r-in-monitor-manager-mode.patch \
- file://0001-Makefile-install-mdcheck.patch \
- "
-
-SRC_URI[sha256sum] = "461c215670864bb74a4d1a3620684aa2b2f8296dffa06743f26dda5557acf01d"
-
-inherit autotools-brokensep ptest systemd
-
-DEPENDS = "udev"
-
-SYSTEMD_SERVICE:${PN} = "mdmonitor.service"
-SYSTEMD_AUTO_ENABLE = "disable"
-
-# PPC64 and MIPS64 uses long long for u64 in the kernel, but powerpc's asm/types.h
-# prevents 64-bit userland from seeing this definition, instead defaulting
-# to u64 == long in userspace. Define __SANE_USERSPACE_TYPES__ to get
-# int-ll64.h included
-CFLAGS:append:powerpc64 = ' -D__SANE_USERSPACE_TYPES__'
-CFLAGS:append:mipsarchn64 = ' -D__SANE_USERSPACE_TYPES__'
-CFLAGS:append:mipsarchn32 = ' -D__SANE_USERSPACE_TYPES__'
-
-EXTRA_OEMAKE = 'CHECK_RUN_DIR=0 CXFLAGS="${CFLAGS}" SYSTEMD_DIR=${systemd_system_unitdir} \
- BINDIR="${base_sbindir}" UDEVDIR="${nonarch_base_libdir}/udev"'
-
-DEBUG_OPTIMIZATION:append = " -Wno-error"
-
-do_compile() {
- oe_runmake SYSROOT="${STAGING_DIR_TARGET}"
-}
-
-do_install() {
- export STRIP=""
- autotools_do_install
-}
-
-do_install:append() {
- install -d ${D}/${sysconfdir}/
- install -m 644 ${S}/mdadm.conf-example ${D}${sysconfdir}/mdadm.conf
- install -d ${D}/${sysconfdir}/init.d
- install -m 755 ${WORKDIR}/mdadm.init ${D}${sysconfdir}/init.d/mdmonitor
-}
-
-do_install:append() {
- oe_runmake install-systemd DESTDIR=${D}
-}
-
-do_compile_ptest() {
- oe_runmake test
-}
-
-do_install_ptest() {
- cp -R --no-dereference --preserve=mode,links -v ${S}/tests ${D}${PTEST_PATH}/tests
- cp ${S}/test ${D}${PTEST_PATH}
- sed -e 's!sleep 0.*!sleep 1!g; s!/var/tmp!/mdadm-testing-dir!g' -i ${D}${PTEST_PATH}/test
- sed -e 's!/var/tmp!/mdadm-testing-dir!g' -i ${D}${PTEST_PATH}/tests/*
- sed -i -e '/echo -ne "$_script... "/d' \
- -e 's/echo "succeeded"/echo -e "PASS: $_script"/g' \
- -e '/save_log fail/N; /_fail=1/i\\t\t\techo -ne "FAIL: $_script"' \
- -e '/die "dmesg prints errors when testing $_basename!"/i\\t\t\t\techo -ne "FAIL: $_script" &&' \
- ${D}${PTEST_PATH}/test
-
- chmod +x ${D}${PTEST_PATH}/test
-
- ln -s ${base_sbindir}/mdadm ${D}${PTEST_PATH}/mdadm
- for prg in test_stripe swap_super raid6check
- do
- install -D -m 755 $prg ${D}${PTEST_PATH}/
- done
-}
-
-RDEPENDS:${PN} += "bash"
-RDEPENDS:${PN}-ptest += "bash e2fsprogs-mke2fs"
-RRECOMMENDS:${PN}-ptest += " \
- coreutils \
- util-linux \
- kernel-module-loop \
- kernel-module-linear \
- kernel-module-raid0 \
- kernel-module-raid1 \
- kernel-module-raid10 \
- kernel-module-raid456 \
-"
-
-FILES:${PN} += "${systemd_unitdir}/*"
diff --git a/meta/recipes-extended/mdadm/mdadm_4.3.bb b/meta/recipes-extended/mdadm/mdadm_4.3.bb
new file mode 100644
index 0000000000..228fc6f84e
--- /dev/null
+++ b/meta/recipes-extended/mdadm/mdadm_4.3.bb
@@ -0,0 +1,127 @@
+SUMMARY = "Tool for managing software RAID under Linux"
+HOMEPAGE = "http://www.kernel.org/pub/linux/utils/raid/mdadm/"
+DESCRIPTION = "mdadm is a Linux utility used to manage and monitor software RAID devices."
+
+# Some files are GPL-2.0-only while others are GPL-2.0-or-later.
+LICENSE = "GPL-2.0-only & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://mdmon.c;beginline=4;endline=18;md5=af7d8444d9c4d3e5c7caac0d9d34039d \
+ file://mdadm.h;beglinlne=4;endline=22;md5=462bc9936ac0d3da110191a3f9994161"
+
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/raid/mdadm/${BPN}-${PV}.tar.xz \
+ file://run-ptest \
+ file://mdadm-3.3.2_x32_abi_time_t.patch \
+ file://0001-mdadm.h-Undefine-dprintf-before-redefining.patch \
+ file://0001-Fix-the-path-of-corosync-and-dlm-header-files-check.patch \
+ file://0001-Use-CC-to-check-for-implicit-fallthrough-warning-sup.patch \
+ file://0001-fix-gcc-8-format-truncation-warning.patch \
+ file://debian-no-Werror.patch \
+ file://mdadm.init \
+ file://0001-mdadm-add-option-y-for-use-syslog-to-recive-event-re.patch \
+ file://0001-Makefile-install-mdcheck.patch \
+ file://0001-restripe.c-Use-_FILE_OFFSET_BITS-to-enable-largefile.patch \
+ file://0002-Create.c-include-linux-falloc.h-for-FALLOC_FL_ZERO_R.patch \
+ file://0001-util.c-add-limits.h-include-for-NAME_MAX-definition.patch \
+ file://0001-include-libgen.h-for-basename-API.patch \
+ "
+
+SRC_URI[sha256sum] = "416727ae1f1080ea6e3090cea36dd076826fc369151e36ab736557ba92196f9f"
+
+inherit autotools-brokensep ptest systemd
+
+DEPENDS = "udev"
+
+SYSTEMD_SERVICE:${PN} = "mdmonitor.service"
+SYSTEMD_AUTO_ENABLE = "disable"
+
+# PPC64 and MIPS64 uses long long for u64 in the kernel, but powerpc's asm/types.h
+# prevents 64-bit userland from seeing this definition, instead defaulting
+# to u64 == long in userspace. Define __SANE_USERSPACE_TYPES__ to get
+# int-ll64.h included
+CFLAGS:append:powerpc64 = ' -D__SANE_USERSPACE_TYPES__'
+CFLAGS:append:mipsarchn64 = ' -D__SANE_USERSPACE_TYPES__'
+CFLAGS:append:mipsarchn32 = ' -D__SANE_USERSPACE_TYPES__'
+
+EXTRA_OEMAKE = 'CHECK_RUN_DIR=0 CXFLAGS="${CFLAGS}" SYSTEMD_DIR=${systemd_system_unitdir} \
+ BINDIR="${base_sbindir}" UDEVDIR="${nonarch_base_libdir}/udev" LDFLAGS="${LDFLAGS}"'
+
+DEBUG_OPTIMIZATION:append = " -Wno-error"
+
+do_compile() {
+ oe_runmake SYSROOT="${STAGING_DIR_TARGET}"
+}
+
+do_install() {
+ export STRIP=""
+ autotools_do_install
+}
+
+do_install:append() {
+ install -d ${D}/${sysconfdir}/
+ install -m 644 ${S}/mdadm.conf-example ${D}${sysconfdir}/mdadm.conf
+ install -d ${D}/${sysconfdir}/init.d
+ install -m 755 ${WORKDIR}/mdadm.init ${D}${sysconfdir}/init.d/mdmonitor
+}
+
+do_install:append() {
+ oe_runmake install-systemd DESTDIR=${D}
+}
+
+do_compile_ptest() {
+ oe_runmake test
+}
+
+do_install_ptest() {
+ cp -R --no-dereference --preserve=mode,links -v ${S}/tests ${D}${PTEST_PATH}/tests
+ cp ${S}/test ${D}${PTEST_PATH}
+ sed -e 's!sleep 0.*!sleep 1!g; s!/var/tmp!/mdadm-testing-dir!g' -i ${D}${PTEST_PATH}/test
+ sed -i -e '/echo -ne "$_script... "/d' \
+ -e 's/echo "succeeded"/echo -e "PASS: $_script"/g' \
+ -e '/save_log fail/N; /_fail=1/i\\t\t\techo -ne "FAIL: $_script"' \
+ -e '/die "dmesg prints errors when testing $_basename!"/i\\t\t\t\techo -ne "FAIL: $_script" &&' \
+ ${D}${PTEST_PATH}/test
+
+ chmod +x ${D}${PTEST_PATH}/test
+
+ ln -s ${base_sbindir}/mdadm ${D}${PTEST_PATH}/mdadm
+ for prg in test_stripe swap_super raid6check
+ do
+ install -D -m 755 $prg ${D}${PTEST_PATH}/
+ done
+
+ # Disable tests causing intermittent autobuilder failures
+ echo "intermittent failure on autobuilder" > ${D}${PTEST_PATH}/tests/19raid6check.broken
+ echo "intermittent failure on autobuilder" > ${D}${PTEST_PATH}/tests/20raid5journal.broken
+ echo "intermittent failure on autobuilder" > ${D}${PTEST_PATH}/tests/21raid5cache.broken
+ echo "intermittent failure on autobuilder" > ${D}${PTEST_PATH}/tests/10ddf-fail-spare.broken
+ echo "intermittent failure on autobuilder" > ${D}${PTEST_PATH}/tests/10ddf-fail-stop-readd.broken
+}
+
+RDEPENDS:${PN} += "bash"
+RDEPENDS:${PN}-ptest += " \
+ bash \
+ e2fsprogs-mke2fs \
+ util-linux-lsblk \
+ util-linux-losetup \
+ util-linux-blockdev \
+ strace \
+"
+RRECOMMENDS:${PN}-ptest += " \
+ coreutils \
+ kernel-module-loop \
+ kernel-module-linear \
+ kernel-module-raid0 \
+ kernel-module-raid1 \
+ kernel-module-raid10 \
+ kernel-module-raid456 \
+"
+
+FILES:${PN} += "${systemd_unitdir}/*"
+
+# strace is not yet ported to rv32
+RDEPENDS:${PN}-ptest:remove:riscv32 = "strace"
+do_install_ptest:append:riscv32 () {
+ echo "disabled, no strace" > ${D}${PTEST_PATH}/tests/07revert-grow.broken
+ echo "disabled, no strace" > ${D}${PTEST_PATH}/tests/07revert-inplace.broken
+}
diff --git a/meta/recipes-extended/mingetty/mingetty_1.08.bb b/meta/recipes-extended/mingetty/mingetty_1.08.bb
index 7a16c65299..aa5a989df2 100644
--- a/meta/recipes-extended/mingetty/mingetty_1.08.bb
+++ b/meta/recipes-extended/mingetty/mingetty_1.08.bb
@@ -3,7 +3,6 @@ SECTION = "console/utils"
HOMEPAGE = "http://sourceforge.net/projects/mingetty/"
DESCRIPTION = "This is a small Linux console getty that is started on the Linux text console, asks for a login name and then tranfers over to login directory. Is extended to allow automatic login and starting any app."
LICENSE = "GPL-2.0-only"
-PR = "r3"
LIC_FILES_CHKSUM = "file://COPYING;md5=0c56db0143f4f80c369ee3af7425af6e"
SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.gz"
diff --git a/meta/recipes-extended/minicom/minicom/0001-Drop-superfluous-global-variable-definitions.patch b/meta/recipes-extended/minicom/minicom/0001-Drop-superfluous-global-variable-definitions.patch
deleted file mode 100644
index 01b23898e7..0000000000
--- a/meta/recipes-extended/minicom/minicom/0001-Drop-superfluous-global-variable-definitions.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From b65152ebc03832972115e6d98e50cb6190d01793 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Ond=C5=99ej=20Lyson=C4=9Bk?= <olysonek@redhat.com>
-Date: Mon, 3 Feb 2020 13:18:13 +0100
-Subject: [PATCH 1/3] Drop superfluous global variable definitions
-
-The file minicom.c, by including the minicom.h header, already defines
-the global variables 'dial_user' and 'dial_pass'. The object file
-minicom.o is always linked to dial.o. Thus the definitions in dial.c
-can be dropped.
-
-This fixes linking with gcc 10 which uses -fno-common by default,
-disallowing multiple global variable definitions.
-
-Upstream-Status: Backport [https://salsa.debian.org/minicom-team/minicom/-/commit/db269bba2a68fde03f5df45ac8372a8f1248ca96]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/dial.c | 2 --
- 1 file changed, 2 deletions(-)
-
-diff --git a/src/dial.c b/src/dial.c
-index eada5ee..d9d481f 100644
---- a/src/dial.c
-+++ b/src/dial.c
-@@ -146,8 +146,6 @@ static int newtype;
- /* Access to ".dialdir" denied? */
- static int dendd = 0;
- static char *tagged;
--char *dial_user;
--char *dial_pass;
-
- /* Change the baud rate. Treat all characters in the given array as if
- * they were key presses within the comm parameters dialog (C-A P) and
---
-2.24.1
-
diff --git a/meta/recipes-extended/minicom/minicom/0001-fix-minicom-h-v-return-value-is-not-0.patch b/meta/recipes-extended/minicom/minicom/0001-fix-minicom-h-v-return-value-is-not-0.patch
deleted file mode 100644
index 9e67126f3e..0000000000
--- a/meta/recipes-extended/minicom/minicom/0001-fix-minicom-h-v-return-value-is-not-0.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-Exit normally for help/verison options
-
-If -v or -h is used for the help/version information, it is a normal exit situation,
-not an error condition. Sometimes these are used as a simple operation test of the
-resulting binary so the exit code does matter.
-
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://salsa.debian.org/minicom-team/minicom/-/merge_requests/14]
----
- src/minicom.c | 6 ++----
- 1 file changed, 2 insertions(+), 4 deletions(-)
-
-Index: minicom-2.8/src/minicom.c
-===================================================================
---- minicom-2.8.orig/src/minicom.c
-+++ minicom-2.8/src/minicom.c
-@@ -1257,14 +1257,14 @@ int main(int argc, char **argv)
- "modify it under the terms of the GNU General Public License\n"
- "as published by the Free Software Foundation; either version\n"
- "2 of the License, or (at your option) any later version.\n\n"));
-- exit(1);
-+ exit(0);
- break;
- case 's': /* setup mode */
- dosetup = 1;
- break;
- case 'h':
- helpthem();
-- exit(1);
-+ exit(0);
- break;
- case 'p': /* Pseudo terminal to use. */
- if (strncmp(optarg, "/dev/", 5) == 0)
diff --git a/meta/recipes-extended/minicom/minicom/0002-Drop-superfluous-global-variable-definitions.patch b/meta/recipes-extended/minicom/minicom/0002-Drop-superfluous-global-variable-definitions.patch
deleted file mode 100644
index e86b470b7e..0000000000
--- a/meta/recipes-extended/minicom/minicom/0002-Drop-superfluous-global-variable-definitions.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From 924bd2da3a00e030e29d82b74ef82900bd50b475 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Ond=C5=99ej=20Lyson=C4=9Bk?= <olysonek@redhat.com>
-Date: Mon, 3 Feb 2020 13:18:33 +0100
-Subject: [PATCH 2/3] Drop superfluous global variable definitions
-
-The only place where the EXTERN macro mechanism is used to define the
-global variables 'vt_outmap' and 'vt_inmap' is minicom.c (by defining
-an empty EXTERN macro and including the minicom.h header). The file
-vt100.c already defines these variables. The vt100.o object file is
-always linked to minicom.o. Thus it is safe not to define the
-variables in minicom.c and only declare them in the minicom.h header.
-
-This fixes linking with gcc 10 which uses -fno-common by default,
-disallowing multiple global variable definitions.
-
-Upstream-Status: Backport [https://salsa.debian.org/minicom-team/minicom/-/commit/c69cad5b5dda85d361a3a0c1fddc65e933f26d11]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/minicom.h | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/minicom.h b/src/minicom.h
-index 061c013..0f9693b 100644
---- a/src/minicom.h
-+++ b/src/minicom.h
-@@ -141,7 +141,7 @@ EXTERN int sbcolor; /* Status Bar Background Color */
- EXTERN int st_attr; /* Status Bar attributes. */
-
- /* jl 04.09.97 conversion tables */
--EXTERN unsigned char vt_outmap[256], vt_inmap[256];
-+extern unsigned char vt_outmap[256], vt_inmap[256];
-
- /* MARK updated 02/17/95 - history buffer */
- EXTERN int num_hist_lines; /* History buffer size */
---
-2.24.1
-
diff --git a/meta/recipes-extended/minicom/minicom/0003-Drop-superfluous-global-variable-definitions.patch b/meta/recipes-extended/minicom/minicom/0003-Drop-superfluous-global-variable-definitions.patch
deleted file mode 100644
index 3225a0c32a..0000000000
--- a/meta/recipes-extended/minicom/minicom/0003-Drop-superfluous-global-variable-definitions.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From a4fc603b3641d2efe31479116eb7ba66932901c7 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Ond=C5=99ej=20Lyson=C4=9Bk?= <olysonek@redhat.com>
-Date: Mon, 3 Feb 2020 13:21:41 +0100
-Subject: [PATCH 3/3] Drop superfluous global variable definitions
-
-The only place where the EXTERN macro mechanism is used to define the
-global variables 'portfd_is_socket', 'portfd_is_connected' and
-'portfd_sock_addr' is minicom.c (by defining an empty EXTERN macro and
-including the minicom.h header). The source file sysdep1_s.c already
-defines these variables. The sysdep1_s.o object file is always linked
-to minicom.o. Thus it is safe to drop the definitions from minicom.c
-and only declare the variables in the minicom.h header.
-
-This fixes linking with gcc 10 which uses -fno-common by default,
-disallowing multiple global variable definitions.
-
-Upstream-Status: Backport [https://salsa.debian.org/minicom-team/minicom/-/commit/c8382374c5d340aa4115d527aed76e876ee5456b]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/minicom.h | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/minicom.h b/src/minicom.h
-index 0f9693b..1e7cb8c 100644
---- a/src/minicom.h
-+++ b/src/minicom.h
-@@ -113,9 +113,9 @@ EXTERN char *dial_user; /* Our username there */
- EXTERN char *dial_pass; /* Our password */
-
- #ifdef USE_SOCKET
--EXTERN int portfd_is_socket; /* File descriptor is a unix socket */
--EXTERN int portfd_is_connected; /* 1 if the socket is connected */
--EXTERN struct sockaddr_un portfd_sock_addr; /* the unix socket address */
-+extern int portfd_is_socket; /* File descriptor is a unix socket */
-+extern int portfd_is_connected; /* 1 if the socket is connected */
-+extern struct sockaddr_un portfd_sock_addr; /* the unix socket address */
- #define portfd_connected ((portfd_is_socket && !portfd_is_connected) \
- ? -1 : portfd)
- #else
---
-2.24.1
-
diff --git a/meta/recipes-extended/minicom/minicom/allow.to.disable.lockdev.patch b/meta/recipes-extended/minicom/minicom/allow.to.disable.lockdev.patch
deleted file mode 100644
index 39dc5c0492..0000000000
--- a/meta/recipes-extended/minicom/minicom/allow.to.disable.lockdev.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-configure: Allow lockdev to be disabled
-
-When the pkgconfig dependencies may be present, it is useful to be
-able to explictly disable the lockdev dependency. This adds such an
-option.
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Upstream-Status: Submitted [https://salsa.debian.org/minicom-team/minicom/-/merge_requests/14]
-
-Index: minicom-2.8/configure.ac
-===================================================================
---- minicom-2.8.orig/configure.ac
-+++ minicom-2.8/configure.ac
-@@ -43,7 +43,13 @@ if test "x$enable_socket" = xyes; then
- fi
-
- PKG_PROG_PKG_CONFIG
--if test -n "$PKG_CONFIG"; then
-+
-+AC_ARG_ENABLE([lockdev],
-+ AS_HELP_STRING([--enable-lockdev],
-+ [Enable lockdev support (def: enabled)]),
-+ [], [enable_lockdev="yes"])
-+
-+if test -n "$PKG_CONFIG" && test "x$enable_lockdev" = xyes; then
- PKG_CHECK_MODULES([LOCKDEV], [lockdev], AC_DEFINE([HAVE_LOCKDEV],[1],[Define if you have lockdev]),[:])
- fi
-
diff --git a/meta/recipes-extended/minicom/minicom_2.8.bb b/meta/recipes-extended/minicom/minicom_2.8.bb
deleted file mode 100644
index 2640c6b057..0000000000
--- a/meta/recipes-extended/minicom/minicom_2.8.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Text-based modem control and terminal emulation program"
-HOMEPAGE = "https://salsa.debian.org/minicom-team/minicom"
-DESCRIPTION = "Minicom is a text-based modem control and terminal emulation program for Unix-like operating systems"
-SECTION = "console/network"
-DEPENDS = "ncurses virtual/libiconv"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=420477abc567404debca0a2a1cb6b645 \
- file://src/minicom.h;beginline=1;endline=12;md5=a58838cb709f0db517f4e42730c49e81"
-
-SRC_URI = "${DEBIAN_MIRROR}/main/m/${BPN}/${BPN}_${PV}.orig.tar.bz2 \
- file://allow.to.disable.lockdev.patch \
- file://0001-fix-minicom-h-v-return-value-is-not-0.patch \
-"
-
-SRC_URI[sha256sum] = "38cea30913a20349326ff3f1763ee1512b7b41601c24f065f365e18e9db0beba"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[lockdev] = "--enable-lockdev,--disable-lockdev,lockdev"
-
-inherit autotools gettext pkgconfig
-
-do_install() {
- for d in doc extras man lib src; do make -C $d DESTDIR=${D} install; done
-}
-
-RRECOMMENDS:${PN} += "lrzsz"
-
-RDEPENDS:${PN} += "ncurses-terminfo-base"
diff --git a/meta/recipes-extended/minicom/minicom_2.9.bb b/meta/recipes-extended/minicom/minicom_2.9.bb
new file mode 100644
index 0000000000..907d5c79c9
--- /dev/null
+++ b/meta/recipes-extended/minicom/minicom_2.9.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Text-based modem control and terminal emulation program"
+HOMEPAGE = "https://salsa.debian.org/minicom-team/minicom"
+DESCRIPTION = "Minicom is a text-based modem control and terminal emulation program for Unix-like operating systems"
+SECTION = "console/network"
+DEPENDS = "ncurses virtual/libiconv"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=420477abc567404debca0a2a1cb6b645 \
+ file://src/minicom.h;beginline=1;endline=12;md5=a58838cb709f0db517f4e42730c49e81"
+
+SRC_URI = "${DEBIAN_MIRROR}/main/m/${BPN}/${BPN}_${PV}.orig.tar.bz2"
+
+SRC_URI[sha256sum] = "9efbb6458140e5a0de445613f0e76bcf12cbf7a9892b2f53e075c2e7beaba86c"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[lockdev] = "--enable-lockdev,--disable-lockdev,lockdev"
+
+inherit autotools gettext pkgconfig
+
+do_install() {
+ for d in doc extras man lib src; do make -C $d DESTDIR=${D} install; done
+}
+
+RRECOMMENDS:${PN} += "lrzsz"
+
+RDEPENDS:${PN} += "ncurses-terminfo-base"
diff --git a/meta/recipes-extended/msmtp/msmtp_1.8.20.bb b/meta/recipes-extended/msmtp/msmtp_1.8.20.bb
deleted file mode 100644
index da3f70a163..0000000000
--- a/meta/recipes-extended/msmtp/msmtp_1.8.20.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "msmtp is an SMTP client"
-DESCRIPTION = "A sendmail replacement for use in MTAs like mutt"
-HOMEPAGE = "https://marlam.de/msmtp/"
-SECTION = "console/network"
-
-LICENSE = "GPL-3.0-only"
-DEPENDS = "zlib gnutls"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-UPSTREAM_CHECK_URI = "https://marlam.de/msmtp/download/"
-
-SRC_URI = "https://marlam.de/${BPN}/releases/${BP}.tar.xz"
-SRC_URI[sha256sum] = "d93ae2aafc0f48af7dc9d0b394df1bb800588b8b4e8d096d8b3cf225344eb111"
-
-inherit gettext autotools update-alternatives pkgconfig
-
-EXTRA_OECONF += "--without-libsecret --without-libgsasl --without-libidn"
-
-ALTERNATIVE:${PN} = "sendmail"
-# /usr/lib/sendmial is required by LSB core test
-ALTERNATIVE:${PN}:linuxstdbase = "sendmail usr-lib-sendmail"
-ALTERNATIVE_TARGET[sendmail] = "${bindir}/msmtp"
-ALTERNATIVE_LINK_NAME[sendmail] = "${sbindir}/sendmail"
-ALTERNATIVE_TARGET[usr-lib-sendmail] = "${bindir}/msmtp"
-ALTERNATIVE_LINK_NAME[usr-lib-sendmail] = "/usr/lib/sendmail"
-ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-extended/msmtp/msmtp_1.8.25.bb b/meta/recipes-extended/msmtp/msmtp_1.8.25.bb
new file mode 100644
index 0000000000..b575fad5e1
--- /dev/null
+++ b/meta/recipes-extended/msmtp/msmtp_1.8.25.bb
@@ -0,0 +1,27 @@
+SUMMARY = "msmtp is an SMTP client"
+DESCRIPTION = "A sendmail replacement for use in MTAs like mutt"
+HOMEPAGE = "https://marlam.de/msmtp/"
+SECTION = "console/network"
+
+LICENSE = "GPL-3.0-only"
+DEPENDS = "zlib gnutls"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+UPSTREAM_CHECK_URI = "https://marlam.de/msmtp/download/"
+
+SRC_URI = "https://marlam.de/${BPN}/releases/${BP}.tar.xz"
+SRC_URI[sha256sum] = "2dfe1dbbb397d26fe0b0b6b2e9cd2efdf9d72dd42d18e70d7f363ada2652d738"
+
+inherit gettext autotools update-alternatives pkgconfig
+
+EXTRA_OECONF += "--without-libsecret --without-libgsasl --without-libidn"
+
+ALTERNATIVE:${PN} = "sendmail"
+# /usr/lib/sendmial is required by LSB core test
+ALTERNATIVE:${PN}:linuxstdbase = "sendmail usr-lib-sendmail"
+ALTERNATIVE_TARGET[sendmail] = "${bindir}/msmtp"
+ALTERNATIVE_LINK_NAME[sendmail] = "${sbindir}/sendmail"
+ALTERNATIVE_TARGET[usr-lib-sendmail] = "${bindir}/msmtp"
+ALTERNATIVE_LINK_NAME[usr-lib-sendmail] = "/usr/lib/sendmail"
+ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-extended/newt/files/0001-detect-gold-as-GNU-linker-too.patch b/meta/recipes-extended/newt/files/0001-detect-gold-as-GNU-linker-too.patch
index a4b3afd959..090ed5c1c9 100644
--- a/meta/recipes-extended/newt/files/0001-detect-gold-as-GNU-linker-too.patch
+++ b/meta/recipes-extended/newt/files/0001-detect-gold-as-GNU-linker-too.patch
@@ -1,4 +1,4 @@
-From 58245b859ffbcb1780575bf1b0a018d55e74e434 Mon Sep 17 00:00:00 2001
+From 08ba909500412611953aea0fa2fe0d8fe76b6e24 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Andreas=20M=C3=BCller?= <schnitzeltony@googlemail.com>
Date: Wed, 21 Sep 2016 21:14:40 +0200
Subject: [PATCH] detect gold as GNU linker too
@@ -9,23 +9,21 @@ Content-Transfer-Encoding: 8bit
Upstream-Status: Pending
Signed-off-by: Andreas Müller <schnitzeltony@googlemail.com>
+
---
configure.ac | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index 03e8bda..c2fce51 100644
+index 468c718..cd93f30 100644
--- a/configure.ac
+++ b/configure.ac
@@ -28,7 +28,7 @@ AC_CHECK_SIZEOF([void *])
AC_MSG_CHECKING([for GNU ld])
- LD=`$CC -print-prog-name=ld 2>&5`
+ LD=$($CC -print-prog-name=ld 2>&5)
--if test `$LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ld"` = 0; then
-+if test `$LD -v 2>&1 | $ac_cv_path_GREP -c "GNU "` = 0; then
+-if test $($LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ld") = 0; then
++if test $($LD -v 2>&1 | $ac_cv_path_GREP -c "GNU ") = 0; then
# Not
GNU_LD=""
AC_MSG_RESULT([no])
---
-2.5.5
-
diff --git a/meta/recipes-extended/newt/files/0002-don-t-ignore-CFLAGS-when-building-snack.patch b/meta/recipes-extended/newt/files/0002-don-t-ignore-CFLAGS-when-building-snack.patch
deleted file mode 100644
index ca235d5108..0000000000
--- a/meta/recipes-extended/newt/files/0002-don-t-ignore-CFLAGS-when-building-snack.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From f60dc1063607ca1f201ba4cbda467d8af3f78f64 Mon Sep 17 00:00:00 2001
-From: Miroslav Lichvar <mlichvar@redhat.com>
-Date: Tue, 1 Oct 2019 16:37:55 +0200
-Subject: [PATCH] don't ignore CFLAGS when building snack
-
-In addition to the flags returned by python-config --cflags, use the
-user-specified CFLAGS when building the snack object.
-
-Upstream-Status: Backport from master
-Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
----
- Makefile.in | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/Makefile.in b/Makefile.in
-index be5f87b..6facd5e 100644
---- a/Makefile.in
-+++ b/Makefile.in
-@@ -96,8 +96,8 @@ _snack.$(SOEXT): snack.c $(LIBNEWTSH)
- PIFLAGS=`$$pyconfig --includes`; \
- PLDFLAGS=`$$pyconfig --ldflags`; \
- PLFLAGS=`$$pyconfig --libs`; \
-- echo $(CC) $(SHCFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
-- $(CC) $(SHCFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
-+ echo $(CC) $(SHCFLAGS) $(CFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
-+ $(CC) $(SHCFLAGS) $(CFLAGS) $(CPPFLAGS) $$PIFLAGS $$PCFLAGS -c -o $$ver/snack.o snack.c; \
- echo $(CC) --shared $$PLDFLAGS $$PLFLAGS $(LDFLAGS) -o $$ver/_snack.$(SOEXT) $$ver/snack.o -L. -lnewt $(LIBS); \
- $(CC) --shared $$PLDFLAGS $$PLFLAGS $(LDFLAGS) -o $$ver/_snack.$(SOEXT) $$ver/snack.o -L. -lnewt $(LIBS); \
- done || :
diff --git a/meta/recipes-extended/newt/libnewt_0.52.21.bb b/meta/recipes-extended/newt/libnewt_0.52.21.bb
deleted file mode 100644
index 430e481b36..0000000000
--- a/meta/recipes-extended/newt/libnewt_0.52.21.bb
+++ /dev/null
@@ -1,58 +0,0 @@
-SUMMARY = "A library for text mode user interfaces"
-
-DESCRIPTION = "Newt is a programming library for color text mode, widget based user \
-interfaces. Newt can be used to add stacked windows, entry widgets, \
-checkboxes, radio buttons, labels, plain text fields, scrollbars, \
-etc., to text mode user interfaces. This package also contains the \
-shared library needed by programs built with newt, as well as a \
-/usr/bin/dialog replacement called whiptail. Newt is based on the \
-slang library."
-
-HOMEPAGE = "https://releases.pagure.org/newt/"
-SECTION = "libs"
-
-LICENSE = "LGPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
-
-# slang needs to be >= 2.2
-DEPENDS = "slang popt python3"
-
-SRC_URI = "https://releases.pagure.org/newt/newt-${PV}.tar.gz \
- file://cross_ar.patch \
- file://Makefile.in-Add-tinfo-library-to-the-linking-librari.patch \
- file://0001-detect-gold-as-GNU-linker-too.patch \
- file://0002-don-t-ignore-CFLAGS-when-building-snack.patch \
- "
-
-SRC_URI[md5sum] = "a0a5fd6b53bb167a65e15996b249ebb5"
-SRC_URI[sha256sum] = "265eb46b55d7eaeb887fca7a1d51fe115658882dfe148164b6c49fccac5abb31"
-
-S = "${WORKDIR}/newt-${PV}"
-
-inherit autotools-brokensep python3native python3-dir python3targetconfig
-
-EXTRA_OECONF = "--without-tcl --with-python"
-
-EXTRA_OEMAKE += "PYTHONVERS=${PYTHON_DIR}"
-
-CLEANBROKEN = "1"
-
-export CPPFLAGS
-
-PACKAGES:prepend = "whiptail ${PN}-python "
-
-RDEPENDS:${PN}-python += "python3-core"
-FILES:${PN}-python = "${PYTHON_SITEPACKAGES_DIR}/*"
-
-do_configure:prepend() {
- sh autogen.sh
-}
-
-do_compile:prepend() {
- # Make sure the recompile is OK
- rm -f ${B}/.depend
-}
-
-FILES:whiptail = "${bindir}/whiptail"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/newt/libnewt_0.52.24.bb b/meta/recipes-extended/newt/libnewt_0.52.24.bb
new file mode 100644
index 0000000000..1e39a1c5ca
--- /dev/null
+++ b/meta/recipes-extended/newt/libnewt_0.52.24.bb
@@ -0,0 +1,56 @@
+SUMMARY = "A library for text mode user interfaces"
+
+DESCRIPTION = "Newt is a programming library for color text mode, widget based user \
+interfaces. Newt can be used to add stacked windows, entry widgets, \
+checkboxes, radio buttons, labels, plain text fields, scrollbars, \
+etc., to text mode user interfaces. This package also contains the \
+shared library needed by programs built with newt, as well as a \
+/usr/bin/dialog replacement called whiptail. Newt is based on the \
+slang library."
+
+HOMEPAGE = "https://releases.pagure.org/newt/"
+SECTION = "libs"
+
+LICENSE = "LGPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
+
+# slang needs to be >= 2.2
+DEPENDS = "slang popt python3"
+
+SRC_URI = "https://releases.pagure.org/newt/newt-${PV}.tar.gz \
+ file://cross_ar.patch \
+ file://Makefile.in-Add-tinfo-library-to-the-linking-librari.patch \
+ file://0001-detect-gold-as-GNU-linker-too.patch \
+ "
+
+SRC_URI[sha256sum] = "5ded7e221f85f642521c49b1826c8de19845aa372baf5d630a51774b544fbdbb"
+
+S = "${WORKDIR}/newt-${PV}"
+
+inherit autotools-brokensep python3native python3-dir python3targetconfig
+
+EXTRA_OECONF = "--without-tcl --with-python"
+
+EXTRA_OEMAKE += "PYTHONVERS=${PYTHON_DIR}"
+
+CLEANBROKEN = "1"
+
+export CPPFLAGS
+
+PACKAGES:prepend = "whiptail ${PN}-python "
+
+RDEPENDS:${PN}-python += "python3-core"
+FILES:${PN}-python = "${PYTHON_SITEPACKAGES_DIR}/*"
+
+do_configure:prepend() {
+ sh autogen.sh
+}
+
+do_compile:prepend() {
+ # Make sure the recompile is OK
+ rm -f ${B}/.depend
+}
+
+FILES:whiptail = "${bindir}/whiptail"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb b/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
index b66617fbf6..8dc62954a2 100644
--- a/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
+++ b/meta/recipes-extended/packagegroups/packagegroup-core-full-cmdline.bb
@@ -4,7 +4,6 @@
SUMMARY = "Standard full-featured Linux system"
DESCRIPTION = "Package group bringing in packages needed for a more traditional full-featured Linux system"
-PR = "r6"
inherit packagegroup
@@ -43,7 +42,7 @@ RDEPENDS:packagegroup-core-full-cmdline-utils = "\
less \
makedevs \
mc \
- mc-fish \
+ mc-shell \
mc-helpers \
mc-helpers-perl \
ncurses \
diff --git a/meta/recipes-extended/pam/libpam/0001-pam_namespace-include-stdint-h.patch b/meta/recipes-extended/pam/libpam/0001-pam_namespace-include-stdint-h.patch
new file mode 100644
index 0000000000..124e5f1c3c
--- /dev/null
+++ b/meta/recipes-extended/pam/libpam/0001-pam_namespace-include-stdint-h.patch
@@ -0,0 +1,42 @@
+From cc9d40b7cdbd3e15ccaa324a0dda1680ef9dea13 Mon Sep 17 00:00:00 2001
+From: Jacob Heider <jacob@pkgx.dev>
+Date: Wed, 17 Jan 2024 11:49:26 -0500
+Subject: [PATCH] pam_namespace: include stdint.h
+
+pam_namespace.c makes use of SIZE_MAX but doesn't include stdint.h,
+resulting in the following build failures on 1.6.0:
+
+ pam_namespace.c: In function 'process_line':
+ pam_namespace.c:649:41: error: 'SIZE_MAX' undeclared (first use in this function)
+ 649 | if (count > UINT_MAX || count > SIZE_MAX / sizeof(uid_t)) {
+ | ^~~~~~~~
+ pam_namespace.c:41:1: note: 'SIZE_MAX' is defined in header '<stdint.h>'; did you forget to '#include <stdint.h>'?
+ 40 | #include "argv_parse.h"
+ +++ |+#include <stdint.h>
+ 41 |
+ pam_namespace.c:649:41: note: each undeclared identifier is reported only once for each function it appears in
+ 649 | if (count > UINT_MAX || count > SIZE_MAX / sizeof(uid_t)) {
+ | ^~~~~~~~
+
+Fixes: v1.6.0~100 ("pam_namespace: validate amount of uids in config")
+Resolves: https://github.com/linux-pam/linux-pam/issues/733
+
+Upstream-Status: Backport [https://github.com/linux-pam/linux-pam/commit/cc9d40b7cdbd3e15ccaa324a0dda1680ef9dea13]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ modules/pam_namespace/pam_namespace.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/modules/pam_namespace/pam_namespace.c b/modules/pam_namespace/pam_namespace.c
+index f72d67189..b16731c22 100644
+--- a/modules/pam_namespace/pam_namespace.c
++++ b/modules/pam_namespace/pam_namespace.c
+@@ -34,6 +34,8 @@
+
+ #define _ATFILE_SOURCE
+
++#include "config.h"
++#include <stdint.h>
+ #include "pam_cc_compat.h"
+ #include "pam_inline.h"
+ #include "pam_namespace.h"
diff --git a/meta/recipes-extended/pam/libpam/0001-run-xtests.sh-check-whether-files-exist.patch b/meta/recipes-extended/pam/libpam/0001-run-xtests.sh-check-whether-files-exist.patch
deleted file mode 100644
index 40040a873a..0000000000
--- a/meta/recipes-extended/pam/libpam/0001-run-xtests.sh-check-whether-files-exist.patch
+++ /dev/null
@@ -1,65 +0,0 @@
-From e8e8ccfd57e0274b431bc5717bf37c488285b07b Mon Sep 17 00:00:00 2001
-From: Mingli Yu <mingli.yu@windriver.com>
-Date: Wed, 27 Oct 2021 10:30:46 +0800
-Subject: [PATCH] run-xtests.sh: check whether files exist
-
-Fixes:
- # ./run-xtests.sh . tst-pam_access1
- mv: cannot stat '/etc/security/opasswd': No such file or directory
- PASS: tst-pam_access1
- mv: cannot stat '/etc/security/opasswd-pam-xtests': No such file or directory
- ==================
- 1 tests passed
- 0 tests not run
- ==================
-
-Upstream-Status: Backport [https://github.com/linux-pam/linux-pam/commit/e8e8ccfd57e0274b431bc5717bf37c488285b07b]
-
-Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
----
- xtests/run-xtests.sh | 20 +++++++++++++-------
- 1 file changed, 13 insertions(+), 7 deletions(-)
-
-diff --git a/xtests/run-xtests.sh b/xtests/run-xtests.sh
-index 14f585d9..ff9a4dc1 100755
---- a/xtests/run-xtests.sh
-+++ b/xtests/run-xtests.sh
-@@ -18,10 +18,12 @@ all=0
-
- mkdir -p /etc/security
- for config in access.conf group.conf time.conf limits.conf ; do
-- cp /etc/security/$config /etc/security/$config-pam-xtests
-+ [ -f "/etc/security/$config" ] &&
-+ mv /etc/security/$config /etc/security/$config-pam-xtests
- install -m 644 "${SRCDIR}"/$config /etc/security/$config
- done
--mv /etc/security/opasswd /etc/security/opasswd-pam-xtests
-+[ -f /etc/security/opasswd ] &&
-+ mv /etc/security/opasswd /etc/security/opasswd-pam-xtests
-
- for testname in $XTESTS ; do
- for cfg in "${SRCDIR}"/$testname*.pamd ; do
-@@ -47,11 +49,15 @@ for testname in $XTESTS ; do
- all=`expr $all + 1`
- rm -f /etc/pam.d/$testname*
- done
--mv /etc/security/access.conf-pam-xtests /etc/security/access.conf
--mv /etc/security/group.conf-pam-xtests /etc/security/group.conf
--mv /etc/security/time.conf-pam-xtests /etc/security/time.conf
--mv /etc/security/limits.conf-pam-xtests /etc/security/limits.conf
--mv /etc/security/opasswd-pam-xtests /etc/security/opasswd
-+
-+for config in access.conf group.conf time.conf limits.conf opasswd ; do
-+ if [ -f "/etc/security/$config-pam-xtests" ]; then
-+ mv /etc/security/$config-pam-xtests /etc/security/$config
-+ else
-+ rm -f /etc/security/$config
-+ fi
-+done
-+
- if test "$failed" -ne 0; then
- echo "==================="
- echo "$failed of $all tests failed"
---
-2.32.0
-
diff --git a/meta/recipes-extended/pam/libpam/99_pam b/meta/recipes-extended/pam/libpam/99_pam
index 97e990d10b..a88247be13 100644
--- a/meta/recipes-extended/pam/libpam/99_pam
+++ b/meta/recipes-extended/pam/libpam/99_pam
@@ -1 +1 @@
-d root root 0755 /var/run/sepermit none
+d root root 0755 /run/sepermit none
diff --git a/meta/recipes-extended/pam/libpam/libpam-xtests.patch b/meta/recipes-extended/pam/libpam/libpam-xtests.patch
index ea145899b4..f2dafa72a5 100644
--- a/meta/recipes-extended/pam/libpam/libpam-xtests.patch
+++ b/meta/recipes-extended/pam/libpam/libpam-xtests.patch
@@ -1,13 +1,21 @@
-This patch is used to create a new sub package libpam-xtests to do more checks.
+From 060726f7e60c8ecb5bf50fd776910b290d9a0a69 Mon Sep 17 00:00:00 2001
+From: Kang Kai <kai.kang@windriver.com>
+Date: Tue, 19 Jul 2011 17:08:31 +0800
+Subject: [PATCH] This patch is used to create a new sub package libpam-xtests
+ to do more checks.
Upstream-Status: Pending
Signed-off-by: Kang Kai <kai.kang@windriver.com>
-Index: Linux-PAM-1.3.0/xtests/Makefile.am
-===================================================================
---- Linux-PAM-1.3.0.orig/xtests/Makefile.am
-+++ Linux-PAM-1.3.0/xtests/Makefile.am
-@@ -7,7 +7,7 @@ AM_CFLAGS = -DLIBPAM_COMPILE -I$(top_src
+---
+ xtests/Makefile.am | 17 ++++++++++++++++-
+ 1 file changed, 16 insertions(+), 1 deletion(-)
+
+diff --git a/xtests/Makefile.am b/xtests/Makefile.am
+index acf9746..9826c9f 100644
+--- a/xtests/Makefile.am
++++ b/xtests/Makefile.am
+@@ -8,7 +8,7 @@ AM_CFLAGS = -DLIBPAM_COMPILE -I$(top_srcdir)/libpam/include \
LDADD = $(top_builddir)/libpam/libpam.la \
$(top_builddir)/libpam_misc/libpam_misc.la
@@ -16,7 +24,7 @@ Index: Linux-PAM-1.3.0/xtests/Makefile.am
EXTRA_DIST = run-xtests.sh tst-pam_dispatch1.pamd tst-pam_dispatch2.pamd \
tst-pam_dispatch3.pamd tst-pam_dispatch4.pamd \
-@@ -51,3 +51,18 @@ EXTRA_PROGRAMS = $(XTESTS)
+@@ -55,3 +55,18 @@ EXTRA_PROGRAMS = $(XTESTS)
xtests: $(XTESTS) run-xtests.sh
"$(srcdir)"/run-xtests.sh "$(srcdir)" ${XTESTS} ${NOSRCTESTS}
diff --git a/meta/recipes-extended/pam/libpam_1.5.2.bb b/meta/recipes-extended/pam/libpam_1.5.2.bb
deleted file mode 100644
index 081986ef43..0000000000
--- a/meta/recipes-extended/pam/libpam_1.5.2.bb
+++ /dev/null
@@ -1,185 +0,0 @@
-DISABLE_STATIC = ""
-SUMMARY = "Linux-PAM (Pluggable Authentication Modules)"
-DESCRIPTION = "Linux-PAM (Pluggable Authentication Modules for Linux), a flexible mechanism for authenticating users"
-HOMEPAGE = "https://fedorahosted.org/linux-pam/"
-BUGTRACKER = "https://fedorahosted.org/linux-pam/newticket"
-SECTION = "base"
-# PAM is dual licensed under GPL and BSD.
-# /etc/pam.d comes from Debian libpam-runtime in 2009-11 (at that time
-# libpam-runtime-1.0.1 is GPL-2.0-or-later), by openembedded
-LICENSE = "GPL-2.0-or-later | BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7eb5c1bf854e8881005d673599ee74d3 \
- file://libpamc/License;md5=a4da476a14c093fdc73be3c3c9ba8fb3 \
- "
-
-SRC_URI = "https://github.com/linux-pam/linux-pam/releases/download/v${PV}/Linux-PAM-${PV}.tar.xz \
- file://99_pam \
- file://pam.d/common-account \
- file://pam.d/common-auth \
- file://pam.d/common-password \
- file://pam.d/common-session \
- file://pam.d/common-session-noninteractive \
- file://pam.d/other \
- file://libpam-xtests.patch \
- file://0001-run-xtests.sh-check-whether-files-exist.patch \
- file://run-ptest \
- file://pam-volatiles.conf \
- "
-
-SRC_URI[sha256sum] = "e4ec7131a91da44512574268f493c6d8ca105c87091691b8e9b56ca685d4f94d"
-
-DEPENDS = "bison-native flex flex-native cracklib libxml2-native virtual/crypt"
-
-EXTRA_OECONF = "--includedir=${includedir}/security \
- --libdir=${base_libdir} \
- --with-systemdunitdir=${systemd_system_unitdir} \
- --disable-nis \
- --disable-regenerate-docu \
- --disable-doc \
- --disable-prelude"
-
-CFLAGS:append = " -fPIC "
-
-S = "${WORKDIR}/Linux-PAM-${PV}"
-
-inherit autotools gettext pkgconfig systemd ptest
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[audit] = "--enable-audit,--disable-audit,audit,"
-PACKAGECONFIG[userdb] = "--enable-db=db,--enable-db=no,db,"
-
-PACKAGES += "${PN}-runtime ${PN}-xtests"
-FILES:${PN} = "${base_libdir}/lib*${SOLIBS}"
-FILES:${PN}-dev += "${base_libdir}/security/*.la ${base_libdir}/*.la ${base_libdir}/lib*${SOLIBSDEV}"
-FILES:${PN}-runtime = "${sysconfdir} ${sbindir} ${systemd_system_unitdir}"
-FILES:${PN}-xtests = "${datadir}/Linux-PAM/xtests"
-
-PACKAGES_DYNAMIC += "^${MLPREFIX}pam-plugin-.*"
-
-def get_multilib_bit(d):
- baselib = d.getVar('baselib') or ''
- return baselib.replace('lib', '')
-
-libpam_suffix = "suffix${@get_multilib_bit(d)}"
-
-RPROVIDES:${PN} += "${PN}-${libpam_suffix}"
-RPROVIDES:${PN}-runtime += "${PN}-runtime-${libpam_suffix}"
-
-RDEPENDS:${PN}-runtime = "${PN}-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-deny-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-permit-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-warn-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-unix-${libpam_suffix} \
- "
-RDEPENDS:${PN}-xtests = "${PN}-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-access-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-debug-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-pwhistory-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-succeed-if-${libpam_suffix} \
- ${MLPREFIX}pam-plugin-time-${libpam_suffix} \
- bash coreutils"
-
-# FIXME: Native suffix breaks here, disable it for now
-RRECOMMENDS:${PN} = "${PN}-runtime-${libpam_suffix}"
-RRECOMMENDS:${PN}:class-native = ""
-
-python populate_packages:prepend () {
- def pam_plugin_hook(file, pkg, pattern, format, basename):
- pn = d.getVar('PN')
- libpam_suffix = d.getVar('libpam_suffix')
-
- rdeps = d.getVar('RDEPENDS:' + pkg)
- if rdeps:
- rdeps = rdeps + " " + pn + "-" + libpam_suffix
- else:
- rdeps = pn + "-" + libpam_suffix
- d.setVar('RDEPENDS:' + pkg, rdeps)
-
- provides = d.getVar('RPROVIDES:' + pkg)
- if provides:
- provides = provides + " " + pkg + "-" + libpam_suffix
- else:
- provides = pkg + "-" + libpam_suffix
- d.setVar('RPROVIDES:' + pkg, provides)
-
- mlprefix = d.getVar('MLPREFIX') or ''
- dvar = d.expand('${WORKDIR}/package')
- pam_libdir = d.expand('${base_libdir}/security')
- pam_sbindir = d.expand('${sbindir}')
- pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
- pam_pkgname = mlprefix + 'pam-plugin%s'
-
- do_split_packages(d, pam_libdir, r'^pam(.*)\.so$', pam_pkgname,
- 'PAM plugin for %s', hook=pam_plugin_hook, extra_depends='')
- do_split_packages(d, pam_filterdir, r'^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
-}
-
-do_compile_ptest() {
- cd tests
- sed -i -e 's/$(MAKE) $(AM_MAKEFLAGS) check-TESTS//' Makefile
- oe_runmake check-am
- cd -
-}
-
-do_install() {
- autotools_do_install
-
- # don't install /var/run when populating rootfs. Do it through volatile
- rm -rf ${D}${localstatedir}
-
- if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','false','true',d)}; then
- rm -rf ${D}${sysconfdir}/init.d/
- rm -rf ${D}${sysconfdir}/rc*
- install -d ${D}${sysconfdir}/tmpfiles.d
- install -m 0644 ${WORKDIR}/pam-volatiles.conf \
- ${D}${sysconfdir}/tmpfiles.d/pam.conf
- else
- install -d ${D}${sysconfdir}/default/volatiles
- install -m 0644 ${WORKDIR}/99_pam \
- ${D}${sysconfdir}/default/volatiles/
- fi
-
- install -d ${D}${sysconfdir}/pam.d/
- install -m 0644 ${WORKDIR}/pam.d/* ${D}${sysconfdir}/pam.d/
-
- # The lsb requires unix_chkpwd has setuid permission
- chmod 4755 ${D}${sbindir}/unix_chkpwd
-
- if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
- echo "session optional pam_systemd.so" >> ${D}${sysconfdir}/pam.d/common-session
- fi
- if ${@bb.utils.contains('DISTRO_FEATURES','usrmerge','false','true',d)}; then
- install -d ${D}/${libdir}/
- mv ${D}/${base_libdir}/pkgconfig ${D}/${libdir}/
- fi
-}
-
-do_install_ptest() {
- if [ ${PTEST_ENABLED} = "1" ]; then
- mkdir -p ${D}${PTEST_PATH}/tests
- install -m 0755 ${B}/tests/.libs/* ${D}${PTEST_PATH}/tests
- install -m 0644 ${S}/tests/confdir ${D}${PTEST_PATH}/tests
- fi
-}
-
-pkg_postinst:${PN}() {
- if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then
- /etc/init.d/populate-volatile.sh update
- fi
-}
-
-inherit features_check
-REQUIRED_DISTRO_FEATURES = "pam"
-
-BBCLASSEXTEND = "nativesdk native"
-
-CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-session"
-CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-auth"
-CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-password"
-CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-session-noninteractive"
-CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-account"
-CONFFILES:${PN}-runtime += "${sysconfdir}/security/limits.conf"
-
-UPSTREAM_CHECK_URI = "https://github.com/linux-pam/linux-pam/releases"
-
-CVE_PRODUCT = "linux-pam"
diff --git a/meta/recipes-extended/pam/libpam_1.6.0.bb b/meta/recipes-extended/pam/libpam_1.6.0.bb
new file mode 100644
index 0000000000..e1ed940d1e
--- /dev/null
+++ b/meta/recipes-extended/pam/libpam_1.6.0.bb
@@ -0,0 +1,191 @@
+DISABLE_STATIC = ""
+SUMMARY = "Linux-PAM (Pluggable Authentication Modules)"
+DESCRIPTION = "Linux-PAM (Pluggable Authentication Modules for Linux), a flexible mechanism for authenticating users"
+HOMEPAGE = "https://fedorahosted.org/linux-pam/"
+BUGTRACKER = "https://fedorahosted.org/linux-pam/newticket"
+SECTION = "base"
+# PAM is dual licensed under GPL and BSD.
+# /etc/pam.d comes from Debian libpam-runtime in 2009-11 (at that time
+# libpam-runtime-1.0.1 is GPL-2.0-or-later), by openembedded
+LICENSE = "GPL-2.0-or-later | BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7eb5c1bf854e8881005d673599ee74d3 \
+ file://libpamc/License;md5=a4da476a14c093fdc73be3c3c9ba8fb3 \
+ "
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/Linux-PAM-${PV}.tar.xz \
+ file://99_pam \
+ file://pam.d/common-account \
+ file://pam.d/common-auth \
+ file://pam.d/common-password \
+ file://pam.d/common-session \
+ file://pam.d/common-session-noninteractive \
+ file://pam.d/other \
+ file://libpam-xtests.patch \
+ file://run-ptest \
+ file://pam-volatiles.conf \
+ file://0001-pam_namespace-include-stdint-h.patch \
+ "
+
+SRC_URI[sha256sum] = "fff4a34e5bbee77e2e8f1992f27631e2329bcbf8a0563ddeb5c3389b4e3169ad"
+
+DEPENDS = "bison-native flex-native cracklib libxml2-native virtual/crypt"
+
+EXTRA_OECONF = "--includedir=${includedir}/security \
+ --libdir=${base_libdir} \
+ --with-systemdunitdir=${systemd_system_unitdir} \
+ --disable-nis \
+ --disable-regenerate-docu \
+ --disable-doc \
+ --disable-prelude"
+
+CFLAGS:append = " -fPIC "
+
+S = "${WORKDIR}/Linux-PAM-${PV}"
+
+inherit autotools gettext pkgconfig systemd ptest github-releases
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[audit] = "--enable-audit,--disable-audit,audit,"
+PACKAGECONFIG[userdb] = "--enable-db=db,--enable-db=no,db,"
+
+PACKAGES += "${PN}-runtime ${PN}-xtests"
+FILES:${PN} = " \
+ ${base_libdir}/lib*${SOLIBS} \
+ ${nonarch_libdir}/tmpfiles.d/*.conf \
+"
+FILES:${PN}-dev += "${base_libdir}/security/*.la ${base_libdir}/*.la ${base_libdir}/lib*${SOLIBSDEV}"
+FILES:${PN}-runtime = "${sysconfdir} ${sbindir} ${systemd_system_unitdir}"
+FILES:${PN}-xtests = "${datadir}/Linux-PAM/xtests"
+
+# libpam installs /etc/environment for use with the pam_env plugin. Make sure it is
+# packaged with the pam-plugin-env package to avoid breaking installations which
+# install that file via other packages
+FILES:pam-plugin-env = "${sysconfdir}/environment"
+
+PACKAGES_DYNAMIC += "^${MLPREFIX}pam-plugin-.*"
+
+def get_multilib_bit(d):
+ baselib = d.getVar('baselib') or ''
+ return baselib.replace('lib', '')
+
+libpam_suffix = "suffix${@get_multilib_bit(d)}"
+
+RPROVIDES:${PN} += "${PN}-${libpam_suffix}"
+RPROVIDES:${PN}-runtime += "${PN}-runtime-${libpam_suffix}"
+
+RDEPENDS:${PN}-runtime = "${PN}-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-deny-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-permit-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-warn-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-unix-${libpam_suffix} \
+ "
+RDEPENDS:${PN}-xtests = "${PN}-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-access-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-debug-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-pwhistory-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-succeed-if-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-time-${libpam_suffix} \
+ bash coreutils"
+
+# FIXME: Native suffix breaks here, disable it for now
+RRECOMMENDS:${PN} = "${PN}-runtime-${libpam_suffix}"
+RRECOMMENDS:${PN}:class-native = ""
+
+python populate_packages:prepend () {
+ def pam_plugin_hook(file, pkg, pattern, format, basename):
+ pn = d.getVar('PN')
+ libpam_suffix = d.getVar('libpam_suffix')
+
+ rdeps = d.getVar('RDEPENDS:' + pkg)
+ if rdeps:
+ rdeps = rdeps + " " + pn + "-" + libpam_suffix
+ else:
+ rdeps = pn + "-" + libpam_suffix
+ d.setVar('RDEPENDS:' + pkg, rdeps)
+
+ provides = d.getVar('RPROVIDES:' + pkg)
+ if provides:
+ provides = provides + " " + pkg + "-" + libpam_suffix
+ else:
+ provides = pkg + "-" + libpam_suffix
+ d.setVar('RPROVIDES:' + pkg, provides)
+
+ mlprefix = d.getVar('MLPREFIX') or ''
+ dvar = d.expand('${WORKDIR}/package')
+ pam_libdir = d.expand('${base_libdir}/security')
+ pam_sbindir = d.expand('${sbindir}')
+ pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
+ pam_pkgname = mlprefix + 'pam-plugin%s'
+
+ do_split_packages(d, pam_libdir, r'^pam(.*)\.so$', pam_pkgname,
+ 'PAM plugin for %s', hook=pam_plugin_hook, extra_depends='', prepend=True)
+ do_split_packages(d, pam_filterdir, r'^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
+}
+
+do_compile_ptest() {
+ cd tests
+ sed -i -e 's/$(MAKE) $(AM_MAKEFLAGS) check-TESTS//' Makefile
+ oe_runmake check-am
+ cd -
+}
+
+do_install() {
+ autotools_do_install
+
+ # don't install /var/run when populating rootfs. Do it through volatile
+ rm -rf ${D}${localstatedir}
+
+ if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','false','true',d)}; then
+ rm -rf ${D}${sysconfdir}/init.d/
+ rm -rf ${D}${sysconfdir}/rc*
+ install -d ${D}${nonarch_libdir}/tmpfiles.d
+ install -m 0644 ${WORKDIR}/pam-volatiles.conf \
+ ${D}${nonarch_libdir}/tmpfiles.d/pam.conf
+ else
+ install -d ${D}${sysconfdir}/default/volatiles
+ install -m 0644 ${WORKDIR}/99_pam \
+ ${D}${sysconfdir}/default/volatiles/
+ fi
+
+ install -d ${D}${sysconfdir}/pam.d/
+ install -m 0644 ${WORKDIR}/pam.d/* ${D}${sysconfdir}/pam.d/
+
+ # The lsb requires unix_chkpwd has setuid permission
+ chmod 4755 ${D}${sbindir}/unix_chkpwd
+
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ echo "session optional pam_systemd.so" >> ${D}${sysconfdir}/pam.d/common-session
+ fi
+ if ${@bb.utils.contains('DISTRO_FEATURES','usrmerge','false','true',d)}; then
+ install -d ${D}/${libdir}/
+ mv ${D}/${base_libdir}/pkgconfig ${D}/${libdir}/
+ fi
+}
+
+do_install_ptest() {
+ mkdir -p ${D}${PTEST_PATH}/tests
+ install -m 0755 ${B}/tests/.libs/* ${D}${PTEST_PATH}/tests
+ install -m 0644 ${S}/tests/confdir ${D}${PTEST_PATH}/tests
+}
+
+pkg_postinst:${PN}() {
+ if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then
+ /etc/init.d/populate-volatile.sh update
+ fi
+}
+
+inherit features_check
+ANY_OF_DISTRO_FEATURES = "pam systemd"
+
+BBCLASSEXTEND = "nativesdk native"
+
+CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-session"
+CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-auth"
+CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-password"
+CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-session-noninteractive"
+CONFFILES:${PN}-runtime += "${sysconfdir}/pam.d/common-account"
+CONFFILES:${PN}-runtime += "${sysconfdir}/security/limits.conf"
+
+GITHUB_BASE_URI = "https://github.com/linux-pam/linux-pam/releases"
+
+CVE_PRODUCT = "linux-pam"
diff --git a/meta/recipes-extended/parted/files/0001-fs-Add-libuuid-to-linker-flags-for-libparted-fs-resi.patch b/meta/recipes-extended/parted/files/0001-fs-Add-libuuid-to-linker-flags-for-libparted-fs-resi.patch
new file mode 100644
index 0000000000..10354f1ed9
--- /dev/null
+++ b/meta/recipes-extended/parted/files/0001-fs-Add-libuuid-to-linker-flags-for-libparted-fs-resi.patch
@@ -0,0 +1,34 @@
+From 1fc88332f7e906294fd889287b9e84cefc7f1586 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 12 Jun 2023 10:40:07 -0700
+Subject: [PATCH] fs: Add libuuid to linker flags for libparted-fs-resize
+ library
+
+This library uses uuid_generate function which comes from libuuid and
+hence it should be mentioned on linker cmdline
+
+fixes
+| aarch64-yoe-linux-ld.lld: error: undefined reference due to --no-allow-shlib-undefined: uuid_generate
+| >>> referenced by /mnt/b/yoe/master/build/tmp/work/cortexa72-cortexa53-crypto-yoe-linux/fatresize/1.1.0-r0/recipe-sysroot/usr/lib/libparted-fs-resize.so
+
+Upstream-Status: Submitted [https://alioth-lists.debian.net/pipermail/parted-devel/2023-June/005873.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libparted/fs/Makefile.am | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/libparted/fs/Makefile.am b/libparted/fs/Makefile.am
+index 2f345f3..a8970eb 100644
+--- a/libparted/fs/Makefile.am
++++ b/libparted/fs/Makefile.am
+@@ -75,6 +75,7 @@ libparted_fs_resize_la_LDFLAGS = \
+ EXTRA_DIST += fsresize.sym
+ libparted_fs_resize_la_DEPENDENCIES = $(sym_file)
+
++libparted_fs_resize_la_LIBADD = $(UUID_LIBS)
+ libparted_fs_resize_la_SOURCES = \
+ r/filesys.c \
+ r/fat/bootsector.c \
+--
+2.41.0
+
diff --git a/meta/recipes-extended/parted/files/autoconf-2.73.patch b/meta/recipes-extended/parted/files/autoconf-2.73.patch
new file mode 100644
index 0000000000..63dea88bfc
--- /dev/null
+++ b/meta/recipes-extended/parted/files/autoconf-2.73.patch
@@ -0,0 +1,22 @@
+The gnulib largefile macro needs updating to work with autoconf 2.73. Rather
+than the full code:
+
+https://git.savannah.gnu.org/cgit/gnulib.git/commit/m4/largefile.m4?id=f91f633858cf132e50924224c50d6264a92caabb
+
+Just tweak the exiting code to work with 2.73. The next parted upgrade should
+update to new gnulib
+
+Upstream-Status: Inappropriate
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+--- a/m4/largefile.m4
++++ b/m4/largefile.m4
+@@ -27,7 +27,7 @@ AC_DEFUN([gl_SET_LARGEFILE_SOURCE],
+ # Work around a problem in autoconf <= 2.69:
+ # AC_SYS_LARGEFILE does not configure for large inodes on Mac OS X 10.5,
+ # or configures them incorrectly in some cases.
+-m4_version_prereq([2.70], [], [
++m4_version_prereq([2.73], [], [
+
+ # _AC_SYS_LARGEFILE_TEST_INCLUDES
+ # -------------------------------
diff --git a/meta/recipes-extended/parted/parted_3.5.bb b/meta/recipes-extended/parted/parted_3.5.bb
deleted file mode 100644
index ea2b68bbd8..0000000000
--- a/meta/recipes-extended/parted/parted_3.5.bb
+++ /dev/null
@@ -1,60 +0,0 @@
-SUMMARY = "Disk partition editing/resizing utility"
-HOMEPAGE = "http://www.gnu.org/software/parted/parted.html"
-DESCRIPTION = "GNU Parted manipulates partition tables. This is useful for creating space for new operating systems, reorganizing disk usage, copying data on hard disks and disk imaging."
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=2f31b266d3440dd7ee50f92cf67d8e6c"
-SECTION = "console/tools"
-DEPENDS = "ncurses util-linux virtual/libiconv"
-
-SRC_URI = "${GNU_MIRROR}/parted/parted-${PV}.tar.xz \
- file://fix-doc-mandir.patch \
- file://run-ptest \
- "
-
-SRC_URI[sha256sum] = "4938dd5c1c125f6c78b1f4b3e297526f18ee74aa43d45c248578b1d2470c05a2"
-
-inherit autotools pkgconfig gettext texinfo ptest
-
-PACKAGECONFIG ?= "readline"
-PACKAGECONFIG[device-mapper] = "--enable-device-mapper,--disable-device-mapper,libdevmapper lvm2"
-PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
-
-BBCLASSEXTEND = "native nativesdk"
-
-do_compile_ptest() {
- oe_runmake -C tests print-align print-max dup-clobber duplicate fs-resize print-flags
-}
-
-do_install_ptest() {
- t=${D}${PTEST_PATH}
- mkdir $t/build-aux
- cp ${S}/build-aux/test-driver $t/build-aux/
- cp -r ${S}/tests $t
- cp ${B}/tests/Makefile $t/tests/
- mkdir $t/lib
- cp ${B}/lib/config.h $t/lib
- sed -i "s|^VERSION.*|VERSION = ${PV}|g" $t/tests/Makefile
- sed -i "s|^srcdir =.*|srcdir = \.|g" $t/tests/Makefile
- sed -i "s|^abs_srcdir =.*|abs_srcdir = \.|g" $t/tests/Makefile
- sed -i "s|^abs_top_srcdir =.*|abs_top_srcdir = "${PTEST_PATH}"|g" $t/tests/Makefile
- sed -i "s|^abs_top_builddir =.*|abs_top_builddir = "${PTEST_PATH}"|g" $t/tests/Makefile
- sed -i "s|^Makefile:.*|Makefile:|g" $t/tests/Makefile
- sed -i "/^BUILDINFO.*$/d" $t/tests/Makefile
- for i in print-align print-max print-flags dup-clobber duplicate fs-resize; \
- do cp ${B}/tests/.libs/$i $t/tests/; \
- done
- sed -e 's| ../parted||' -i $t/tests/*.sh
-}
-
-RDEPENDS:${PN}-ptest = "bash coreutils perl util-linux-losetup util-linux-mkswap python3 make gawk e2fsprogs-mke2fs e2fsprogs-tune2fs python3-core dosfstools"
-RRECOMMENDS:${PN}-ptest += "kernel-module-scsi-debug kernel-module-loop kernel-module-vfat"
-RDEPENDS:${PN}-ptest:append:libc-glibc = "\
- glibc-utils \
- locale-base-en-us \
- "
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE:${PN} = "partprobe"
-ALTERNATIVE_LINK_NAME[partprobe] = "${sbindir}/partprobe"
diff --git a/meta/recipes-extended/parted/parted_3.6.bb b/meta/recipes-extended/parted/parted_3.6.bb
new file mode 100644
index 0000000000..a537ef74db
--- /dev/null
+++ b/meta/recipes-extended/parted/parted_3.6.bb
@@ -0,0 +1,62 @@
+SUMMARY = "Disk partition editing/resizing utility"
+HOMEPAGE = "http://www.gnu.org/software/parted/parted.html"
+DESCRIPTION = "GNU Parted manipulates partition tables. This is useful for creating space for new operating systems, reorganizing disk usage, copying data on hard disks and disk imaging."
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2f31b266d3440dd7ee50f92cf67d8e6c"
+SECTION = "console/tools"
+DEPENDS = "ncurses util-linux virtual/libiconv"
+
+SRC_URI = "${GNU_MIRROR}/parted/parted-${PV}.tar.xz \
+ file://fix-doc-mandir.patch \
+ file://0001-fs-Add-libuuid-to-linker-flags-for-libparted-fs-resi.patch \
+ file://autoconf-2.73.patch \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "3b43dbe33cca0f9a18601ebab56b7852b128ec1a3df3a9b30ccde5e73359e612"
+
+inherit autotools pkgconfig gettext texinfo ptest
+
+PACKAGECONFIG ?= "readline"
+PACKAGECONFIG[device-mapper] = "--enable-device-mapper,--disable-device-mapper,libdevmapper lvm2"
+PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline"
+
+BBCLASSEXTEND = "native nativesdk"
+
+do_compile_ptest() {
+ oe_runmake -C tests print-align print-max dup-clobber duplicate fs-resize print-flags
+}
+
+do_install_ptest() {
+ t=${D}${PTEST_PATH}
+ mkdir $t/build-aux
+ cp ${S}/build-aux/test-driver $t/build-aux/
+ cp -r ${S}/tests $t
+ cp ${B}/tests/Makefile $t/tests/
+ mkdir $t/lib
+ cp ${B}/lib/config.h $t/lib
+ sed -i "s|^VERSION.*|VERSION = ${PV}|g" $t/tests/Makefile
+ sed -i "s|^srcdir =.*|srcdir = \.|g" $t/tests/Makefile
+ sed -i "s|^abs_srcdir =.*|abs_srcdir = \.|g" $t/tests/Makefile
+ sed -i "s|^abs_top_srcdir =.*|abs_top_srcdir = "${PTEST_PATH}"|g" $t/tests/Makefile
+ sed -i "s|^abs_top_builddir =.*|abs_top_builddir = "${PTEST_PATH}"|g" $t/tests/Makefile
+ sed -i "s|^Makefile:.*|Makefile:|g" $t/tests/Makefile
+ sed -i "/^BUILDINFO.*$/d" $t/tests/Makefile
+ for i in print-align print-max print-flags dup-clobber duplicate fs-resize; \
+ do cp ${B}/tests/.libs/$i $t/tests/; \
+ done
+ sed -e 's| ../parted||' -i $t/tests/*.sh
+}
+
+RDEPENDS:${PN}-ptest = "bash coreutils perl util-linux-losetup util-linux-mkswap python3 make gawk e2fsprogs-mke2fs e2fsprogs-tune2fs python3-core dosfstools"
+RRECOMMENDS:${PN}-ptest += "kernel-module-scsi-debug kernel-module-loop kernel-module-vfat"
+RDEPENDS:${PN}-ptest:append:libc-glibc = "\
+ glibc-utils \
+ locale-base-en-us \
+ "
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE:${PN} = "partprobe"
+ALTERNATIVE_LINK_NAME[partprobe] = "${sbindir}/partprobe"
diff --git a/meta/recipes-extended/perl/libconvert-asn1-perl_0.33.bb b/meta/recipes-extended/perl/libconvert-asn1-perl_0.33.bb
deleted file mode 100644
index 2714582957..0000000000
--- a/meta/recipes-extended/perl/libconvert-asn1-perl_0.33.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-SUMMARY = "Convert::ASN1 - Perl ASN.1 Encode/Decode library"
-SECTION = "libs"
-HOMEPAGE = "http://search.cpan.org/dist/Convert-ASN1/"
-DESCRIPTION = "Convert::ASN1 is a perl library for encoding/decoding data using ASN.1 definitions."
-LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
-LIC_FILES_CHKSUM = "file://README.md;beginline=91;endline=97;md5=ceff7fd286eb6d8e8e0d3d23e096a63f"
-
-SRC_URI = "https://cpan.metacpan.org/authors/id/T/TI/TIMLEGGE/Convert-ASN1-${PV}.tar.gz"
-
-SRC_URI[sha256sum] = "1fdf004520c79e3a244cf9688616293516c11793d746c761f367496eb3d06076"
-
-S = "${WORKDIR}/Convert-ASN1-${PV}"
-
-inherit cpan ptest-perl
-
-EXTRA_PERLFLAGS = "-I ${PERLHOSTLIB}"
-
-RDEPENDS:${PN} += "perl-module-exporter perl-module-constant perl-module-encode perl-module-encode-encoding perl-module-utf8 perl-module-socket perl-module-time-local perl-module-posix"
-RDEPENDS:${PN}-ptest += "perl-module-math-bigint perl-module-io-socket perl-module-data-dumper perl-module-math-bigint-calc"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/perl/libconvert-asn1-perl_0.34.bb b/meta/recipes-extended/perl/libconvert-asn1-perl_0.34.bb
new file mode 100644
index 0000000000..6e9f881773
--- /dev/null
+++ b/meta/recipes-extended/perl/libconvert-asn1-perl_0.34.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Convert::ASN1 - Perl ASN.1 Encode/Decode library"
+SECTION = "libs"
+HOMEPAGE = "http://search.cpan.org/dist/Convert-ASN1/"
+DESCRIPTION = "Convert::ASN1 is a perl library for encoding/decoding data using ASN.1 definitions."
+LICENSE = "Artistic-1.0 | GPL-1.0-or-later"
+LIC_FILES_CHKSUM = "file://README.md;beginline=91;endline=97;md5=ceff7fd286eb6d8e8e0d3d23e096a63f"
+
+SRC_URI = "https://cpan.metacpan.org/authors/id/T/TI/TIMLEGGE/Convert-ASN1-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "a628d7c9d390568fb76359975fa03f626ce57f10dc17980e8e3587d7713e4ee7"
+
+S = "${WORKDIR}/Convert-ASN1-${PV}"
+
+inherit cpan ptest-perl
+
+EXTRA_PERLFLAGS = "-I ${PERLHOSTLIB}"
+
+RDEPENDS:${PN} += "perl-module-exporter perl-module-constant perl-module-encode perl-module-encode-encoding perl-module-utf8 \
+ perl-module-socket perl-module-time-local perl-module-posix perl-module-scalar-util perl-module-test-more"
+RDEPENDS:${PN}-ptest += "perl-module-math-bigint perl-module-io-socket perl-module-data-dumper perl-module-math-bigint-calc"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/perl/libxml-sax-perl_1.02.bb b/meta/recipes-extended/perl/libxml-sax-perl_1.02.bb
index bc2aa0821e..b5b111d227 100644
--- a/meta/recipes-extended/perl/libxml-sax-perl_1.02.bb
+++ b/meta/recipes-extended/perl/libxml-sax-perl_1.02.bb
@@ -29,7 +29,7 @@ do_install_ptest() {
chown -R root:root ${D}${PTEST_PATH}/testfiles
}
-RDEPENDS:${PN} += "perl-module-encode perl-module-perlio"
+RDEPENDS:${PN} += "perl-module-encode perl-module-perlio perl-module-encode-encoding"
RDEPENDS:${PN}-ptest += " \
perl-module-base \
diff --git a/meta/recipes-extended/pigz/pigz_2.7.bb b/meta/recipes-extended/pigz/pigz_2.7.bb
deleted file mode 100644
index 9a1c591b27..0000000000
--- a/meta/recipes-extended/pigz/pigz_2.7.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "A parallel implementation of gzip"
-DESCRIPTION = "pigz, which stands for parallel implementation of gzip, is a \
-fully functional replacement for gzip that exploits multiple processors and \
-multiple cores to the hilt when compressing data. pigz was written by Mark \
-Adler, and uses the zlib and pthread libraries."
-HOMEPAGE = "http://zlib.net/pigz/"
-SECTION = "console/utils"
-LICENSE = "Zlib & Apache-2.0"
-LIC_FILES_CHKSUM = "file://pigz.c;md5=9ae6dee8ceba9610596ed0ada493d142;beginline=7;endline=21"
-
-SRC_URI = "http://zlib.net/${BPN}/fossils/${BP}.tar.gz"
-SRC_URI[sha256sum] = "b4c9e60344a08d5db37ca7ad00a5b2c76ccb9556354b722d56d55ca7e8b1c707"
-PROVIDES:class-native += "gzip-native"
-
-# Point this at the homepage in case /fossils/ isn't updated
-UPSTREAM_CHECK_URI = "http://zlib.net/${BPN}/"
-UPSTREAM_CHECK_REGEX = "pigz-(?P<pver>.*)\.tar"
-
-DEPENDS = "zlib"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS="
-
-inherit update-alternatives
-
-do_install() {
- # Install files into /bin (FHS), which is typical place for gzip
- install -d ${D}${base_bindir}
- install ${B}/pigz ${D}${base_bindir}/pigz
- ln -nsf pigz ${D}${base_bindir}/unpigz
- ln -nsf pigz ${D}${base_bindir}/pigzcat
-}
-
-do_install:append:class-native() {
- install -d ${D}${bindir}
- install ${B}/pigz ${D}${bindir}/gzip
- ln -nsf gzip ${D}${bindir}/gunzip
- ln -nsf gzip ${D}${bindir}/zcat
-}
-
-ALTERNATIVE_PRIORITY = "110"
-ALTERNATIVE:${PN} = "gunzip gzip zcat"
-ALTERNATIVE:${PN}:class-nativesdk = ""
-ALTERNATIVE_LINK_NAME[gunzip] = "${base_bindir}/gunzip"
-ALTERNATIVE_LINK_NAME[gzip] = "${base_bindir}/gzip"
-ALTERNATIVE_LINK_NAME[zcat] = "${base_bindir}/zcat"
-ALTERNATIVE_TARGET = "${base_bindir}/pigz"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/pigz/pigz_2.8.bb b/meta/recipes-extended/pigz/pigz_2.8.bb
new file mode 100644
index 0000000000..fcf0c93e41
--- /dev/null
+++ b/meta/recipes-extended/pigz/pigz_2.8.bb
@@ -0,0 +1,48 @@
+SUMMARY = "A parallel implementation of gzip"
+DESCRIPTION = "pigz, which stands for parallel implementation of gzip, is a \
+fully functional replacement for gzip that exploits multiple processors and \
+multiple cores to the hilt when compressing data. pigz was written by Mark \
+Adler, and uses the zlib and pthread libraries."
+HOMEPAGE = "http://zlib.net/pigz/"
+SECTION = "console/utils"
+LICENSE = "Zlib & Apache-2.0"
+LIC_FILES_CHKSUM = "file://pigz.c;md5=9ae6dee8ceba9610596ed0ada493d142;beginline=7;endline=21"
+
+SRC_URI = "http://zlib.net/${BPN}/fossils/${BP}.tar.gz"
+SRC_URI[sha256sum] = "eb872b4f0e1f0ebe59c9f7bd8c506c4204893ba6a8492de31df416f0d5170fd0"
+PROVIDES:class-native += "gzip-native"
+
+# Point this at the homepage in case /fossils/ isn't updated
+UPSTREAM_CHECK_URI = "http://zlib.net/${BPN}/"
+UPSTREAM_CHECK_REGEX = "pigz-(?P<pver>.*)\.tar"
+
+DEPENDS = "zlib"
+
+EXTRA_OEMAKE = "-e MAKEFLAGS="
+
+inherit update-alternatives
+
+do_install() {
+ # Install files into /bin (FHS), which is typical place for gzip
+ install -d ${D}${base_bindir}
+ install ${B}/pigz ${D}${base_bindir}/pigz
+ ln -nsf pigz ${D}${base_bindir}/unpigz
+ ln -nsf pigz ${D}${base_bindir}/pigzcat
+}
+
+do_install:append:class-native() {
+ install -d ${D}${bindir}
+ install ${B}/pigz ${D}${bindir}/gzip
+ ln -nsf gzip ${D}${bindir}/gunzip
+ ln -nsf gzip ${D}${bindir}/zcat
+}
+
+ALTERNATIVE_PRIORITY = "110"
+ALTERNATIVE:${PN} = "gunzip gzip zcat"
+ALTERNATIVE:${PN}:class-nativesdk = ""
+ALTERNATIVE_LINK_NAME[gunzip] = "${base_bindir}/gunzip"
+ALTERNATIVE_LINK_NAME[gzip] = "${base_bindir}/gzip"
+ALTERNATIVE_LINK_NAME[zcat] = "${base_bindir}/zcat"
+ALTERNATIVE_TARGET = "${base_bindir}/pigz"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/procps/procps/0001-w.c-correct-musl-builds.patch b/meta/recipes-extended/procps/procps/0001-w.c-correct-musl-builds.patch
deleted file mode 100644
index c92ad28e4f..0000000000
--- a/meta/recipes-extended/procps/procps/0001-w.c-correct-musl-builds.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 22f8d25567b8d64bdbab0fb0b4915b4362561d9b Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Wed, 24 Feb 2021 21:14:31 +0000
-Subject: [PATCH] w.c: correct musl builds
-
-No need to redefine UT_ stuff to something that does not exist.
-
-UT_ is already provided in musl but via utmp.h header, so include
-it always.
-
-Upstream-Status: Submitted [https://gitlab.com/procps-ng/procps/-/merge_requests/126]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- w.c | 9 +--------
- 1 file changed, 1 insertion(+), 8 deletions(-)
-
-diff --git a/w.c b/w.c
-index 9d07ac9..d10639b 100644
---- a/w.c
-+++ b/w.c
-@@ -57,9 +57,8 @@
- #include <unistd.h>
- #ifdef HAVE_UTMPX_H
- # include <utmpx.h>
--#else
--# include <utmp.h>
- #endif
-+#include <utmp.h>
- #include <arpa/inet.h>
-
- static int ignoreuser = 0; /* for '-u' */
-@@ -72,12 +71,6 @@ typedef struct utmpx utmp_t;
- typedef struct utmp utmp_t;
- #endif
-
--#if !defined(UT_HOSTSIZE) || defined(__UT_HOSTSIZE)
--# define UT_HOSTSIZE __UT_HOSTSIZE
--# define UT_LINESIZE __UT_LINESIZE
--# define UT_NAMESIZE __UT_NAMESIZE
--#endif
--
- #ifdef W_SHOWFROM
- # define FROM_STRING "on"
- #else
diff --git a/meta/recipes-extended/procps/procps/0002-proc-escape.c-add-missing-include.patch b/meta/recipes-extended/procps/procps/0002-proc-escape.c-add-missing-include.patch
deleted file mode 100644
index 5fa1ac9d78..0000000000
--- a/meta/recipes-extended/procps/procps/0002-proc-escape.c-add-missing-include.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-From 4f964821398dff7ab21fec63da15e1e00b2e9277 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Wed, 24 Feb 2021 21:16:14 +0000
-Subject: [PATCH] proc/escape.c: add missing include
-
-Upstream-Status: Submitted [https://gitlab.com/procps-ng/procps/-/merge_requests/126]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- proc/escape.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/proc/escape.c b/proc/escape.c
-index 2e8fb7d..e1f4612 100644
---- a/proc/escape.c
-+++ b/proc/escape.c
-@@ -21,6 +21,7 @@
- #include <sys/types.h>
- #include <string.h>
- #include <limits.h>
-+#include <langinfo.h>
- #include "procps.h"
- #include "escape.h"
- #include "readproc.h"
diff --git a/meta/recipes-extended/procps/procps_3.3.17.bb b/meta/recipes-extended/procps/procps_3.3.17.bb
deleted file mode 100644
index 0f5575c9ab..0000000000
--- a/meta/recipes-extended/procps/procps_3.3.17.bb
+++ /dev/null
@@ -1,103 +0,0 @@
-SUMMARY = "System and process monitoring utilities"
-DESCRIPTION = "Procps contains a set of system utilities that provide system information about processes using \
-the /proc filesystem. The package includes the programs ps, top, vmstat, w, kill, and skill."
-HOMEPAGE = "https://gitlab.com/procps-ng/procps"
-SECTION = "base"
-LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.LIB;md5=4cf66a4984120007c9881cc871cf49db \
- "
-
-DEPENDS = "ncurses"
-
-inherit autotools gettext pkgconfig update-alternatives
-
-SRC_URI = "git://gitlab.com/procps-ng/procps.git;protocol=https;branch=master \
- file://sysctl.conf \
- file://0001-w.c-correct-musl-builds.patch \
- file://0002-proc-escape.c-add-missing-include.patch \
- "
-SRCREV = "19a508ea121c0c4ac6d0224575a036de745eaaf8"
-
-S = "${WORKDIR}/git"
-
-# Upstream has a custom autogen.sh which invokes po/update-potfiles as they
-# don't ship a po/POTFILES.in (which is silly). Without that file gettext
-# doesn't believe po/ is a gettext directory and won't generate po/Makefile.
-do_configure:prepend() {
- ( cd ${S} && po/update-potfiles )
-}
-
-EXTRA_OECONF = "--enable-skill --disable-modern-top"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
-PACKAGECONFIG[systemd] = "--with-systemd,--without-systemd,systemd"
-
-do_install:append () {
- install -d ${D}${base_bindir}
- [ "${bindir}" != "${base_bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i; done
- install -d ${D}${base_sbindir}
- [ "${sbindir}" != "${base_sbindir}" ] && for i in ${base_sbindir_progs}; do mv ${D}${sbindir}/$i ${D}${base_sbindir}/$i; done
- if [ "${base_sbindir}" != "${sbindir}" ]; then
- rmdir ${D}${sbindir}
- fi
-
- install -d ${D}${sysconfdir}
- install -m 0644 ${WORKDIR}/sysctl.conf ${D}${sysconfdir}/sysctl.conf
- if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
- install -d ${D}${sysconfdir}/sysctl.d
- ln -sf ../sysctl.conf ${D}${sysconfdir}/sysctl.d/99-sysctl.conf
- fi
-}
-
-CONFFILES:${PN} = "${sysconfdir}/sysctl.conf"
-
-bindir_progs = "free pkill pmap pgrep pwdx skill snice top uptime w"
-base_bindir_progs += "kill pidof ps watch"
-base_sbindir_progs += "sysctl"
-
-ALTERNATIVE_PRIORITY = "200"
-ALTERNATIVE_PRIORITY[pidof] = "150"
-
-ALTERNATIVE:${PN} = "${bindir_progs} ${base_bindir_progs} ${base_sbindir_progs}"
-
-ALTERNATIVE:${PN}-doc = "kill.1 uptime.1"
-ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
-ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1"
-
-python __anonymous() {
- for prog in d.getVar('base_bindir_progs').split():
- d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
-
- for prog in d.getVar('base_sbindir_progs').split():
- d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir'), prog))
-}
-
-# 'ps' isn't suitable for use as a security tool so whitelist this CVE.
-# https://bugzilla.redhat.com/show_bug.cgi?id=1575473#c3
-CVE_CHECK_IGNORE += "CVE-2018-1121"
-
-PROCPS_PACKAGES = "${PN}-lib \
- ${PN}-ps \
- ${PN}-sysctl"
-
-PACKAGE_BEFORE_PN = "${PROCPS_PACKAGES}"
-RDEPENDS:${PN} += "${PROCPS_PACKAGES}"
-
-RDEPENDS:${PN}-ps += "${PN}-lib"
-RDEPENDS:${PN}-sysctl += "${PN}-lib"
-
-FILES:${PN}-lib = "${libdir}"
-FILES:${PN}-ps = "${base_bindir}/ps.${BPN}"
-FILES:${PN}-sysctl = "${base_sbindir}/sysctl.${BPN} ${sysconfdir}/sysctl.conf ${sysconfdir}/sysctl.d"
-
-ALTERNATIVE:${PN}:remove = "ps"
-ALTERNATIVE:${PN}:remove = "sysctl"
-
-ALTERNATIVE:${PN}-ps = "ps"
-ALTERNATIVE_TARGET[ps] = "${base_bindir}/ps"
-ALTERNATIVE_LINK_NAME[ps] = "${base_bindir}/ps"
-
-ALTERNATIVE:${PN}-sysctl = "sysctl"
-ALTERNATIVE_TARGET[sysctl] = "${base_sbindir}/sysctl"
-ALTERNATIVE_LINK_NAME[sysctl] = "${base_sbindir}/sysctl" \ No newline at end of file
diff --git a/meta/recipes-extended/procps/procps_4.0.4.bb b/meta/recipes-extended/procps/procps_4.0.4.bb
new file mode 100644
index 0000000000..800384f22f
--- /dev/null
+++ b/meta/recipes-extended/procps/procps_4.0.4.bb
@@ -0,0 +1,97 @@
+SUMMARY = "System and process monitoring utilities"
+DESCRIPTION = "Procps contains a set of system utilities that provide system information about processes using \
+the /proc filesystem. The package includes the programs ps, top, vmstat, w, kill, and skill."
+HOMEPAGE = "https://gitlab.com/procps-ng/procps"
+SECTION = "base"
+LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.LIB;md5=4cf66a4984120007c9881cc871cf49db \
+ "
+
+DEPENDS = "ncurses"
+
+inherit autotools gettext pkgconfig update-alternatives
+
+SRC_URI = "git://gitlab.com/procps-ng/procps.git;protocol=https;branch=master \
+ file://sysctl.conf \
+ "
+SRCREV = "4ddcef2fd843170c8e2d59a83042978f41037a2b"
+
+S = "${WORKDIR}/git"
+
+# Upstream has a custom autogen.sh which invokes po/update-potfiles as they
+# don't ship a po/POTFILES.in (which is silly). Without that file gettext
+# doesn't believe po/ is a gettext directory and won't generate po/Makefile.
+do_configure:prepend() {
+ ( cd ${S} && po/update-potfiles )
+}
+
+EXTRA_OECONF = "--enable-skill --disable-modern-top"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
+PACKAGECONFIG[systemd] = "--with-systemd,--without-systemd,systemd"
+
+do_install:append () {
+ install -d ${D}${base_bindir}
+ [ "${bindir}" != "${base_bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i; done
+ install -d ${D}${base_sbindir}
+ [ "${sbindir}" != "${base_sbindir}" ] && for i in ${base_sbindir_progs}; do mv ${D}${sbindir}/$i ${D}${base_sbindir}/$i; done
+ if [ "${base_sbindir}" != "${sbindir}" ]; then
+ rmdir ${D}${sbindir}
+ fi
+
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${WORKDIR}/sysctl.conf ${D}${sysconfdir}/sysctl.conf
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ install -d ${D}${sysconfdir}/sysctl.d
+ ln -sf ../sysctl.conf ${D}${sysconfdir}/sysctl.d/99-sysctl.conf
+ fi
+}
+
+CONFFILES:${PN} = "${sysconfdir}/sysctl.conf"
+
+bindir_progs = "free pkill pmap pgrep pwdx skill snice top uptime w"
+base_bindir_progs += "kill pidof ps watch"
+base_sbindir_progs += "sysctl"
+
+ALTERNATIVE_PRIORITY = "200"
+ALTERNATIVE_PRIORITY[pidof] = "150"
+
+ALTERNATIVE:${PN} = "${bindir_progs} ${base_bindir_progs} ${base_sbindir_progs}"
+
+ALTERNATIVE:${PN}-doc = "kill.1 uptime.1"
+ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1"
+ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1"
+
+python __anonymous() {
+ for prog in d.getVar('base_bindir_progs').split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog))
+
+ for prog in d.getVar('base_sbindir_progs').split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir'), prog))
+}
+
+PROCPS_PACKAGES = "${PN}-lib \
+ ${PN}-ps \
+ ${PN}-sysctl"
+
+PACKAGE_BEFORE_PN = "${PROCPS_PACKAGES}"
+RDEPENDS:${PN} += "${PROCPS_PACKAGES}"
+
+RDEPENDS:${PN}-ps += "${PN}-lib"
+RDEPENDS:${PN}-sysctl += "${PN}-lib"
+
+FILES:${PN}-lib = "${libdir}"
+FILES:${PN}-ps = "${base_bindir}/ps.${BPN}"
+FILES:${PN}-sysctl = "${base_sbindir}/sysctl.${BPN} ${sysconfdir}/sysctl.conf ${sysconfdir}/sysctl.d"
+
+ALTERNATIVE:${PN}:remove = "ps"
+ALTERNATIVE:${PN}:remove = "sysctl"
+
+ALTERNATIVE:${PN}-ps = "ps"
+ALTERNATIVE_TARGET[ps] = "${base_bindir}/ps"
+ALTERNATIVE_LINK_NAME[ps] = "${base_bindir}/ps"
+
+ALTERNATIVE:${PN}-sysctl = "sysctl"
+ALTERNATIVE_TARGET[sysctl] = "${base_sbindir}/sysctl"
+ALTERNATIVE_LINK_NAME[sysctl] = "${base_sbindir}/sysctl"
diff --git a/meta/recipes-extended/psmisc/psmisc.inc b/meta/recipes-extended/psmisc/psmisc.inc
index 12539dad53..23e98d21be 100644
--- a/meta/recipes-extended/psmisc/psmisc.inc
+++ b/meta/recipes-extended/psmisc/psmisc.inc
@@ -26,6 +26,7 @@ do_configure:prepend() {
PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
ALLOW_EMPTY:${PN} = "1"
@@ -54,3 +55,5 @@ ALTERNATIVE_PRIORITY = "90"
ALTERNATIVE:killall = "killall"
ALTERNATIVE:fuser = "fuser"
+
+ALTERNATIVE:pstree = "pstree"
diff --git a/meta/recipes-extended/psmisc/psmisc/0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch b/meta/recipes-extended/psmisc/psmisc/0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch
index ca13bdbd15..01335bdf40 100644
--- a/meta/recipes-extended/psmisc/psmisc/0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch
+++ b/meta/recipes-extended/psmisc/psmisc/0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch
@@ -1,4 +1,4 @@
-From 115fcf1daff18aa2f2e130d63704f04031878db0 Mon Sep 17 00:00:00 2001
+From 338d2d46d1c20ebadf317938af98d0532a62f8d4 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Thu, 24 Mar 2016 15:46:14 +0000
Subject: [PATCH] Use UINTPTR_MAX instead of __WORDSIZE
@@ -13,7 +13,7 @@ Upstream-Status: Pending
1 file changed, 8 insertions(+), 5 deletions(-)
diff --git a/src/peekfd.c b/src/peekfd.c
-index 5aa990a..7e8e3fc 100644
+index 36dff04..2b4b1dc 100644
--- a/src/peekfd.c
+++ b/src/peekfd.c
@@ -30,8 +30,11 @@
@@ -28,7 +28,7 @@ index 5aa990a..7e8e3fc 100644
#include <getopt.h>
#include <ctype.h>
#include <dirent.h>
-@@ -266,11 +269,11 @@ int main(int argc, char **argv)
+@@ -341,11 +344,11 @@ int main(int argc, char **argv)
if (WIFSTOPPED(status)) {
#ifdef PPC
struct pt_regs regs;
diff --git a/meta/recipes-extended/psmisc/psmisc_23.5.bb b/meta/recipes-extended/psmisc/psmisc_23.5.bb
deleted file mode 100644
index 1c390f3615..0000000000
--- a/meta/recipes-extended/psmisc/psmisc_23.5.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-require psmisc.inc
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
-
-SRC_URI = "git://gitlab.com/psmisc/psmisc.git;protocol=https;branch=master \
- file://0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch \
- "
-SRCREV = "17353f3fddef6b360bc47f7834f2cd011bea2b87"
-S = "${WORKDIR}/git"
diff --git a/meta/recipes-extended/psmisc/psmisc_23.7.bb b/meta/recipes-extended/psmisc/psmisc_23.7.bb
new file mode 100644
index 0000000000..ea272cd92d
--- /dev/null
+++ b/meta/recipes-extended/psmisc/psmisc_23.7.bb
@@ -0,0 +1,9 @@
+require psmisc.inc
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
+
+SRC_URI = "git://gitlab.com/psmisc/psmisc.git;protocol=https;branch=master \
+ file://0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch \
+ "
+SRCREV = "9091d6dbcce3d8fb87adf9249a2eb346d25a562c"
+S = "${WORKDIR}/git"
diff --git a/meta/recipes-extended/quota/quota/0001-quota-Use-realloc-3-instead-of-reallocarray-3.patch b/meta/recipes-extended/quota/quota/0001-quota-Use-realloc-3-instead-of-reallocarray-3.patch
deleted file mode 100644
index 34ded2d857..0000000000
--- a/meta/recipes-extended/quota/quota/0001-quota-Use-realloc-3-instead-of-reallocarray-3.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-From 02b222a335527f1031cc9495d8c5ebc1bc5b1d4e Mon Sep 17 00:00:00 2001
-From: Fabrice Fontaine <fontaine.fabrice@gmail.com>
-Date: Wed, 11 Nov 2020 15:00:47 +0100
-Subject: [PATCH] quota: Use realloc(3) instead of reallocarray(3)
-
-reallocarray(3) has been added to glibc relatively recently (version
-2.26, from 2017) and apparently not all users run new enough glibc. Just
-use realloc(3) for now since in this case there's no real risk of
-overflow.
-
-Signed-off-by: Fabrice Fontaine <fontaine.fabrice@gmail.com>
-Signed-off-by: Jan Kara <jack@suse.cz>
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- quota.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/quota.c b/quota.c
-index a6ed61f..a60de12 100644
---- a/quota.c
-+++ b/quota.c
-@@ -385,7 +385,7 @@ int main(int argc, char **argv)
- break;
- case 259:
- fscount++;
-- fsnames = reallocarray(fsnames, fscount, sizeof(char *));
-+ fsnames = realloc(fsnames, fscount * sizeof(char *));
- if (!fsnames)
- die(1, _("Not enough memory for filesystem names"));
- fsnames[fscount - 1] = optarg;
---
-2.17.1
-
diff --git a/meta/recipes-extended/quota/quota/fcntl.patch b/meta/recipes-extended/quota/quota/fcntl.patch
index 51a770ce6e..09a0c687b6 100644
--- a/meta/recipes-extended/quota/quota/fcntl.patch
+++ b/meta/recipes-extended/quota/quota/fcntl.patch
@@ -1,3 +1,8 @@
+From 00a456145531d194d3993c9f4cd404d5ca16c9df Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 6 Apr 2015 17:36:44 +0000
+Subject: [PATCH] quota: Fix build with musl
+
Include fcntl.h to pacify compiler errors on musl
like
@@ -9,20 +14,14 @@ Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-diff --git a/quota.h b/quota.h
-index 4c21411..d20c217 100644
---- a/quota.h
-+++ b/quota.h
-@@ -182,6 +182,6 @@ enum {
- #endif
- #endif
-
--long quotactl __P((int, const char *, qid_t, caddr_t));
-+long quotactl (int, const char *, qid_t, caddr_t);
-
- #endif /* _QUOTA_ */
+---
+ quotacheck.c | 1 +
+ quotaio.c | 1 +
+ rquota_client.c | 4 ++++
+ 3 files changed, 6 insertions(+)
+
diff --git a/quotacheck.c b/quotacheck.c
-index 2cdf475..07c18a7 100644
+index bd62d9a..772a27d 100644
--- a/quotacheck.c
+++ b/quotacheck.c
@@ -19,6 +19,7 @@
@@ -46,7 +45,7 @@ index 94ae458..d57fc1a 100644
#include <sys/stat.h>
#include <sys/file.h>
diff --git a/rquota_client.c b/rquota_client.c
-index a3a4ae3..0ffe7a9 100644
+index 7f8e821..d48505a 100644
--- a/rquota_client.c
+++ b/rquota_client.c
@@ -19,7 +19,9 @@
diff --git a/meta/recipes-extended/quota/quota_4.06.bb b/meta/recipes-extended/quota/quota_4.06.bb
deleted file mode 100644
index 0535d14c20..0000000000
--- a/meta/recipes-extended/quota/quota_4.06.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "Tools for monitoring & limiting user disk usage per filesystem"
-SECTION = "base"
-HOMEPAGE = "http://sourceforge.net/projects/linuxquota/"
-DESCRIPTION = "Tools and patches for the Linux Diskquota system as part of the Linux kernel"
-BUGTRACKER = "http://sourceforge.net/tracker/?group_id=18136&atid=118136"
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://rquota_server.c;beginline=1;endline=20;md5=fe7e0d7e11c6f820f8fa62a5af71230f \
- file://svc_socket.c;beginline=1;endline=17;md5=24d5a8792da45910786eeac750be8ceb"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/linuxquota/quota-tools/${PV}/quota-${PV}.tar.gz \
- file://fcntl.patch \
- file://0001-quota-Use-realloc-3-instead-of-reallocarray-3.patch \
- "
-SRC_URI[sha256sum] = "2f3e03039f378d4f0d97acdb49daf581dcaad64d2e1ddf129495fd579fbd268d"
-
-CVE_PRODUCT = "linux_diskquota"
-
-UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/linuxquota/files/quota-tools/"
-UPSTREAM_CHECK_REGEX = "/quota-tools/(?P<pver>(\d+[\.\-_]*)+)/"
-
-DEPENDS = "gettext-native e2fsprogs libnl dbus"
-
-inherit autotools-brokensep gettext pkgconfig
-
-CFLAGS += "${@bb.utils.contains('PACKAGECONFIG', 'rpc', '-I${STAGING_INCDIR}/tirpc', '', d)}"
-LDFLAGS += "${@bb.utils.contains('PACKAGECONFIG', 'rpc', '-ltirpc', '', d)}"
-ASNEEDED = ""
-
-PACKAGECONFIG ??= "tcp-wrappers rpc bsd"
-PACKAGECONFIG:libc-musl = "tcp-wrappers rpc"
-
-PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers"
-PACKAGECONFIG[rpc] = "--enable-rpc,--disable-rpc,libtirpc"
-PACKAGECONFIG[bsd] = "--enable-bsd_behaviour=yes,--enable-bsd_behaviour=no,"
-PACKAGECONFIG[ldapmail] = "--enable-ldapmail,--disable-ldapmail,openldap"
diff --git a/meta/recipes-extended/quota/quota_4.09.bb b/meta/recipes-extended/quota/quota_4.09.bb
new file mode 100644
index 0000000000..b779657dfc
--- /dev/null
+++ b/meta/recipes-extended/quota/quota_4.09.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Tools for monitoring & limiting user disk usage per filesystem"
+SECTION = "base"
+HOMEPAGE = "http://sourceforge.net/projects/linuxquota/"
+DESCRIPTION = "Tools and patches for the Linux Diskquota system as part of the Linux kernel"
+BUGTRACKER = "http://sourceforge.net/tracker/?group_id=18136&atid=118136"
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://rquota_server.c;beginline=1;endline=20;md5=fe7e0d7e11c6f820f8fa62a5af71230f \
+ file://svc_socket.c;beginline=1;endline=17;md5=24d5a8792da45910786eeac750be8ceb"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/project/linuxquota/quota-tools/${PV}/quota-${PV}.tar.gz \
+ file://fcntl.patch \
+ "
+SRC_URI[sha256sum] = "9cdaca154bc92afc3117f0e5f5b3208dd5f84583af1cf061c39baa0a2bb142f9"
+
+CVE_PRODUCT = "linux_diskquota"
+
+UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/linuxquota/files/quota-tools/"
+UPSTREAM_CHECK_REGEX = "/quota-tools/(?P<pver>(\d+[\.\-_]*)+)/"
+
+DEPENDS = "gettext-native e2fsprogs libnl dbus"
+
+inherit autotools-brokensep gettext pkgconfig
+
+CFLAGS += "${@bb.utils.contains('PACKAGECONFIG', 'rpc', '-I${STAGING_INCDIR}/tirpc', '', d)}"
+LDFLAGS += "${@bb.utils.contains('PACKAGECONFIG', 'rpc', '-ltirpc', '', d)}"
+ASNEEDED = ""
+
+PACKAGECONFIG ??= "tcp-wrappers rpc bsd"
+PACKAGECONFIG:libc-musl = "tcp-wrappers rpc"
+
+PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers"
+PACKAGECONFIG[rpc] = "--enable-rpc,--disable-rpc,libtirpc"
+PACKAGECONFIG[bsd] = "--enable-bsd_behaviour=yes,--enable-bsd_behaviour=no,"
+PACKAGECONFIG[ldapmail] = "--enable-ldapmail,--disable-ldapmail,openldap"
diff --git a/meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb b/meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb
index dd89726afc..dbd4d32e0a 100644
--- a/meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb
+++ b/meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb
@@ -40,7 +40,7 @@ PACKAGECONFIG[systemd] = "--with-systemdsystemunitdir=${systemd_system_unitdir}/
systemd \
"
-EXTRA_OECONF += " --enable-warmstarts --with-rpcuser=rpc"
+EXTRA_OECONF += " --enable-warmstarts --with-rpcuser=rpc --with-statedir=${runtimedir}/rpcbind"
do_install:append () {
install -d ${D}${sysconfdir}/init.d
diff --git a/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto.bb b/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto.bb
index dd7bd2b1be..20933153a3 100644
--- a/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto.bb
+++ b/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto.bb
@@ -15,13 +15,13 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0daaf958d5531ab86169ec6e275e1517"
SECTION = "libs"
DEPENDS += "rpcsvc-proto-native"
-PV = "1.4.3"
+PV = "1.4.4"
-SRCREV = "71e0a12c04d130a78674ac6309eefffa6ecee612"
+SRCREV = "c65926005e50da02a4da3e26abc42eded36cd19d"
SRC_URI = "git://github.com/thkukuk/${BPN};branch=master;protocol=https \
file://0001-Use-cross-compiled-rpcgen.patch \
- "
+ "
S = "${WORKDIR}/git"
diff --git a/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto/0001-Use-cross-compiled-rpcgen.patch b/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto/0001-Use-cross-compiled-rpcgen.patch
index 208974004b..8e459b5634 100644
--- a/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto/0001-Use-cross-compiled-rpcgen.patch
+++ b/meta/recipes-extended/rpcsvc-proto/rpcsvc-proto/0001-Use-cross-compiled-rpcgen.patch
@@ -10,14 +10,11 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
rpcsvc/Makefile.am | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-Index: git/rpcsvc/Makefile.am
-===================================================================
---- git.orig/rpcsvc/Makefile.am
-+++ git/rpcsvc/Makefile.am
-@@ -12,5 +12,5 @@ nodist_rpcsvc_HEADERS = klm_prot.h nlm_p
+--- a/rpcsvc/Makefile.am
++++ b/rpcsvc/Makefile.am
+@@ -12,4 +12,4 @@ nodist_rpcsvc_HEADERS = klm_prot.h nlm_p
nfs_prot.h rquota.h sm_inter.h
- %.h: %.x
+ .x.h:
- $(top_builddir)/rpcgen/rpcgen -h -o $@ $<
+ rpcgen -h -o $@ $<
-
diff --git a/meta/recipes-extended/screen/screen_4.9.0.bb b/meta/recipes-extended/screen/screen_4.9.0.bb
deleted file mode 100644
index b36173b8de..0000000000
--- a/meta/recipes-extended/screen/screen_4.9.0.bb
+++ /dev/null
@@ -1,49 +0,0 @@
-SUMMARY = "Multiplexing terminal manager"
-DESCRIPTION = "Screen is a full-screen window manager \
-that multiplexes a physical terminal between several \
-processes, typically interactive shells."
-HOMEPAGE = "http://www.gnu.org/software/screen/"
-BUGTRACKER = "https://savannah.gnu.org/bugs/?func=additem&group=screen"
-
-SECTION = "console/utils"
-
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://screen.h;endline=26;md5=b8dc717c9a3dba842ae6c44ca0f73f52 \
- "
-
-DEPENDS = "ncurses virtual/crypt \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
-RDEPENDS:${PN} = "base-files"
-
-SRC_URI = "${GNU_MIRROR}/screen/screen-${PV}.tar.gz \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'file://screen.pam', '', d)} \
- file://0002-comm.h-now-depends-on-term.h.patch \
- file://0001-fix-for-multijob-build.patch \
- file://0001-Remove-more-compatibility-stuff.patch \
- "
-
-SRC_URI[sha256sum] = "f9335281bb4d1538ed078df78a20c2f39d3af9a4e91c57d084271e0289c730f4"
-
-inherit autotools texinfo
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[utempter] = "ac_cv_header_utempter_h=yes,ac_cv_header_utempter_h=no,libutempter,"
-
-EXTRA_OECONF = "--with-pty-mode=0620 --with-pty-group=5 --with-sys-screenrc=${sysconfdir}/screenrc \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--enable-pam', '--disable-pam', d)}"
-
-do_install:append () {
- install -D -m 644 ${S}/etc/etcscreenrc ${D}/${sysconfdir}/screenrc
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
- install -D -m 644 ${WORKDIR}/screen.pam ${D}/${sysconfdir}/pam.d/screen
- fi
-}
-
-pkg_postinst:${PN} () {
- grep -q "^${bindir}/screen$" $D${sysconfdir}/shells || echo ${bindir}/screen >> $D${sysconfdir}/shells
-}
-
-pkg_postrm:${PN} () {
- printf "$(grep -v "^${bindir}/screen$" $D${sysconfdir}/shells)\n" > $D${sysconfdir}/shells
-}
diff --git a/meta/recipes-extended/screen/screen_4.9.1.bb b/meta/recipes-extended/screen/screen_4.9.1.bb
new file mode 100644
index 0000000000..7b040e6b57
--- /dev/null
+++ b/meta/recipes-extended/screen/screen_4.9.1.bb
@@ -0,0 +1,49 @@
+SUMMARY = "Multiplexing terminal manager"
+DESCRIPTION = "Screen is a full-screen window manager \
+that multiplexes a physical terminal between several \
+processes, typically interactive shells."
+HOMEPAGE = "http://www.gnu.org/software/screen/"
+BUGTRACKER = "https://savannah.gnu.org/bugs/?func=additem&group=screen"
+
+SECTION = "console/utils"
+
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
+ file://screen.h;endline=26;md5=b8dc717c9a3dba842ae6c44ca0f73f52 \
+ "
+
+DEPENDS = "ncurses virtual/crypt \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
+RDEPENDS:${PN} = "base-files"
+
+SRC_URI = "${GNU_MIRROR}/screen/screen-${PV}.tar.gz \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'file://screen.pam', '', d)} \
+ file://0002-comm.h-now-depends-on-term.h.patch \
+ file://0001-fix-for-multijob-build.patch \
+ file://0001-Remove-more-compatibility-stuff.patch \
+ "
+
+SRC_URI[sha256sum] = "26cef3e3c42571c0d484ad6faf110c5c15091fbf872b06fa7aa4766c7405ac69"
+
+inherit autotools texinfo
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[utempter] = "ac_cv_header_utempter_h=yes,ac_cv_header_utempter_h=no,libutempter,"
+
+EXTRA_OECONF = "--with-pty-mode=0620 --with-pty-group=5 --with-sys-screenrc=${sysconfdir}/screenrc \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--enable-pam', '--disable-pam', d)}"
+
+do_install:append () {
+ install -D -m 644 ${S}/etc/etcscreenrc ${D}/${sysconfdir}/screenrc
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
+ install -D -m 644 ${WORKDIR}/screen.pam ${D}/${sysconfdir}/pam.d/screen
+ fi
+}
+
+pkg_postinst:${PN} () {
+ grep -q "^${bindir}/screen$" $D${sysconfdir}/shells || echo ${bindir}/screen >> $D${sysconfdir}/shells
+}
+
+pkg_postrm:${PN} () {
+ printf "$(grep -v "^${bindir}/screen$" $D${sysconfdir}/shells)\n" > $D${sysconfdir}/shells
+}
diff --git a/meta/recipes-extended/sed/sed_4.8.bb b/meta/recipes-extended/sed/sed_4.8.bb
deleted file mode 100644
index 31e971aeac..0000000000
--- a/meta/recipes-extended/sed/sed_4.8.bb
+++ /dev/null
@@ -1,68 +0,0 @@
-SUMMARY = "Stream EDitor (text filtering utility)"
-HOMEPAGE = "http://www.gnu.org/software/sed/"
-DESCRIPTION = "sed (stream editor) is a non-interactive command-line text editor."
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c678957b0c8e964aa6c70fd77641a71e \
- file://sed/sed.h;beginline=1;endline=15;md5=fb3c7e6fbca6f66943859153d4be8efe \
- "
-SECTION = "console/utils"
-
-SRC_URI = "${GNU_MIRROR}/sed/sed-${PV}.tar.xz \
- file://run-ptest \
-"
-
-SRC_URI[md5sum] = "6d906edfdb3202304059233f51f9a71d"
-SRC_URI[sha256sum] = "f79b0cfea71b37a8eeec8490db6c5f7ae7719c35587f21edb0617f370eeff633"
-
-inherit autotools texinfo update-alternatives gettext ptest
-
-PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
-
-RDEPENDS:${PN}-ptest += "make gawk perl perl-module-filehandle perl-module-file-compare perl-module-file-find perl-module-file-temp perl-module-file-stat"
-RRECOMMENDS:${PN}-ptest:append:libc-glibc = " locale-base-ru-ru locale-base-en-us locale-base-el-gr.iso-8859-7"
-
-EXTRA_OECONF = "--disable-acl \
- "
-
-do_install () {
- autotools_do_install
- install -d ${D}${base_bindir}
- if [ ! ${D}${bindir} -ef ${D}${base_bindir} ]; then
- mv ${D}${bindir}/sed ${D}${base_bindir}/sed
- rmdir ${D}${bindir}/
- fi
-}
-
-ALTERNATIVE:${PN} = "sed"
-ALTERNATIVE_LINK_NAME[sed] = "${base_bindir}/sed"
-ALTERNATIVE_PRIORITY = "100"
-
-do_compile_ptest() {
- oe_runmake testsuite/get-mb-cur-max testsuite/test-mbrtowc
-}
-
-do_install_ptest() {
- cp -rf ${S}/testsuite/ ${D}${PTEST_PATH}
- cp -rf ${B}/testsuite/* ${D}${PTEST_PATH}/testsuite/
- cp -rf ${S}/build-aux/ ${D}${PTEST_PATH}/
- cp ${B}/Makefile ${D}${PTEST_PATH}
- cp ${S}/init.cfg ${D}${PTEST_PATH}
-
- sed -e 's/^Makefile:/_Makefile:/' -e 's/^srcdir = \(.*\)/srcdir = ./' -e 's/bash/sh/' -i ${D}${PTEST_PATH}/Makefile
- for i in `grep -rl "sed/sed" ${D}${PTEST_PATH}`; do sed -e 's/..\/sed\/sed/sed/' -i $i; done
-
- sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:abs_top_builddir =.*:abs_top_builddir = ..:g' \
- -e 's:abs_top_srcdir =.*:abs_top_srcdir = ..:g' \
- -e 's:abs_srcdir =.*:abs_srcdir = ..:g' \
- -e 's:top_srcdir =.*:top_srcdir = ..:g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- -i ${D}${PTEST_PATH}/Makefile
-}
-
-RPROVIDES:${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', '/bin/sed', '', d)}"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/sed/sed_4.9.bb b/meta/recipes-extended/sed/sed_4.9.bb
new file mode 100644
index 0000000000..c4d89c34b2
--- /dev/null
+++ b/meta/recipes-extended/sed/sed_4.9.bb
@@ -0,0 +1,67 @@
+SUMMARY = "Stream EDitor (text filtering utility)"
+HOMEPAGE = "http://www.gnu.org/software/sed/"
+DESCRIPTION = "sed (stream editor) is a non-interactive command-line text editor."
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://sed/sed.h;beginline=1;endline=15;md5=4e8e0f77bc4c1c2c02c2b90d3d24c670 \
+ "
+SECTION = "console/utils"
+
+SRC_URI = "${GNU_MIRROR}/sed/sed-${PV}.tar.xz \
+ file://run-ptest \
+"
+
+SRC_URI[sha256sum] = "6e226b732e1cd739464ad6862bd1a1aba42d7982922da7a53519631d24975181"
+
+inherit autotools texinfo update-alternatives gettext ptest
+
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
+
+RDEPENDS:${PN}-ptest += "make gawk perl perl-module-filehandle perl-module-file-compare perl-module-file-find perl-module-file-temp perl-module-file-stat"
+RRECOMMENDS:${PN}-ptest:append:libc-glibc = " locale-base-ru-ru locale-base-en-us locale-base-el-gr.iso-8859-7"
+
+EXTRA_OECONF = "--disable-acl \
+ "
+
+do_install () {
+ autotools_do_install
+ install -d ${D}${base_bindir}
+ if [ ! ${D}${bindir} -ef ${D}${base_bindir} ]; then
+ mv ${D}${bindir}/sed ${D}${base_bindir}/sed
+ rmdir ${D}${bindir}/
+ fi
+}
+
+ALTERNATIVE:${PN} = "sed"
+ALTERNATIVE_LINK_NAME[sed] = "${base_bindir}/sed"
+ALTERNATIVE_PRIORITY = "100"
+
+do_compile_ptest() {
+ oe_runmake testsuite/get-mb-cur-max testsuite/test-mbrtowc
+}
+
+do_install_ptest() {
+ cp -rf ${S}/testsuite/ ${D}${PTEST_PATH}
+ cp -rf ${B}/testsuite/* ${D}${PTEST_PATH}/testsuite/
+ cp -rf ${S}/build-aux/ ${D}${PTEST_PATH}/
+ cp ${B}/Makefile ${D}${PTEST_PATH}
+ cp ${S}/init.cfg ${D}${PTEST_PATH}
+
+ sed -e 's/^Makefile:/_Makefile:/' -e 's/^srcdir = \(.*\)/srcdir = ./' -e 's/bash/sh/' -i ${D}${PTEST_PATH}/Makefile
+ for i in `grep -rl "sed/sed" ${D}${PTEST_PATH}`; do sed -e 's/..\/sed\/sed/sed/' -i $i; done
+
+ sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ -e 's:abs_top_builddir =.*:abs_top_builddir = ..:g' \
+ -e 's:abs_top_srcdir =.*:abs_top_srcdir = ..:g' \
+ -e 's:abs_srcdir =.*:abs_srcdir = ..:g' \
+ -e 's:top_srcdir =.*:top_srcdir = ..:g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ -i ${D}${PTEST_PATH}/Makefile
+}
+
+RPROVIDES:${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', '/bin/sed', '', d)}"
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/shadow/files/0001-Disable-use-of-syslog-for-sysroot.patch b/meta/recipes-extended/shadow/files/0001-Disable-use-of-syslog-for-sysroot.patch
deleted file mode 100644
index 628db42136..0000000000
--- a/meta/recipes-extended/shadow/files/0001-Disable-use-of-syslog-for-sysroot.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-From 8b845fff891798a03bdf21354b52e4487c2c0200 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Thu, 14 Apr 2022 23:11:53 +0000
-Subject: [PATCH] Disable use of syslog for shadow-native tools
-
-Disable use of syslog to prevent sysroot user and group additions from
-writing entries to the host's syslog. This patch should only be used
-with the shadow-native recipe.
-
-Upstream-Status: Inappropriate [OE specific configuration]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
-
----
- configure.ac | 2 +-
- src/login_nopam.c | 3 ++-
- 2 files changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 5dcae19..b2c58f5 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -204,7 +204,7 @@ AC_DEFINE_UNQUOTED(PASSWD_PROGRAM, "$shadow_cv_passwd_dir/passwd",
- [Path to passwd program.])
-
- dnl XXX - quick hack, should disappear before anyone notices :).
--AC_DEFINE(USE_SYSLOG, 1, [Define to use syslog().])
-+#AC_DEFINE(USE_SYSLOG, 1, [Define to use syslog().])
- if test "$ac_cv_func_ruserok" = "yes"; then
- AC_DEFINE(RLOGIN, 1, [Define if login should support the -r flag for rlogind.])
- AC_DEFINE(RUSEROK, 0, [Define to the ruserok() "success" return value (0 or 1).])
-diff --git a/src/login_nopam.c b/src/login_nopam.c
-index df6ba88..fc24e13 100644
---- a/src/login_nopam.c
-+++ b/src/login_nopam.c
-@@ -29,7 +29,6 @@
- #ifndef USE_PAM
- #ident "$Id$"
-
--#include "prototypes.h"
- /*
- * This module implements a simple but effective form of login access
- * control based on login names and on host (or domain) names, internet
-@@ -57,6 +56,8 @@
- #include <netinet/in.h>
- #include <arpa/inet.h> /* for inet_ntoa() */
-
-+#include "prototypes.h"
-+
- #if !defined(MAXHOSTNAMELEN) || (MAXHOSTNAMELEN < 64)
- #undef MAXHOSTNAMELEN
- #define MAXHOSTNAMELEN 256
diff --git a/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch b/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch
new file mode 100644
index 0000000000..2e5503bfd4
--- /dev/null
+++ b/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch
@@ -0,0 +1,37 @@
+From af4b8cb780587aa736692a3baa76b60474f19c5d Mon Sep 17 00:00:00 2001
+From: Enrico Scholz <enrico.scholz@sigma-chemnitz.de>
+Date: Mon, 18 Mar 2024 12:14:21 +0100
+Subject: [PATCH] lib/copydir:copy_entry(): use temporary stat buffer
+
+There are no guarantees that fstatat() does not clobber the stat
+buffer on errors.
+
+Use a temporary buffer so that the following code sees correct
+attributes of the source entry.
+
+Upstream-Status: Submitted [https://github.com/shadow-maint/shadow/pull/974]
+
+Signed-off-by: Enrico Scholz <enrico.scholz@sigma-chemnitz.de>
+---
+ lib/copydir.c | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+--- a/lib/copydir.c
++++ b/lib/copydir.c
+@@ -400,6 +400,7 @@ static int copy_entry (const struct path
+ {
+ int err = 0;
+ struct stat sb;
++ struct stat tmp_sb;
+ struct link_name *lp;
+ struct timespec mt[2];
+
+@@ -423,7 +424,7 @@ static int copy_entry (const struct path
+ * If the destination already exists do nothing.
+ * This is after the copy_dir above to still iterate into subdirectories.
+ */
+- if (fstatat(dst->dirfd, dst->name, &sb, AT_SYMLINK_NOFOLLOW) != -1) {
++ if (fstatat(dst->dirfd, dst->name, &tmp_sb, AT_SYMLINK_NOFOLLOW) != -1) {
+ return err;
+ }
+
diff --git a/meta/recipes-extended/shadow/files/commonio.c-fix-unexpected-open-failure-in-chroot-env.patch b/meta/recipes-extended/shadow/files/commonio.c-fix-unexpected-open-failure-in-chroot-env.patch
index 173e8a937d..cd99aad135 100644
--- a/meta/recipes-extended/shadow/files/commonio.c-fix-unexpected-open-failure-in-chroot-env.patch
+++ b/meta/recipes-extended/shadow/files/commonio.c-fix-unexpected-open-failure-in-chroot-env.patch
@@ -1,4 +1,4 @@
-From d767f776e631f1493fd7b266f2026d630ecf70fe Mon Sep 17 00:00:00 2001
+From f512071dd3a4c29d4bf048c5a89c4ba9160e37b1 Mon Sep 17 00:00:00 2001
From: Chen Qi <Qi.Chen@windriver.com>
Date: Thu, 17 Jul 2014 15:53:34 +0800
Subject: [PATCH] commonio.c-fix-unexpected-open-failure-in-chroot-env
@@ -15,32 +15,31 @@ Note that this patch doesn't change the logic in the code, it just expands
the codes.
Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
-
---
lib/commonio.c | 16 ++++++++++++----
1 file changed, 12 insertions(+), 4 deletions(-)
diff --git a/lib/commonio.c b/lib/commonio.c
-index 9e0fde6..7c3a1da 100644
+index 01a26c9..82b2868 100644
--- a/lib/commonio.c
+++ b/lib/commonio.c
-@@ -624,10 +624,18 @@ int commonio_open (struct commonio_db *db, int mode)
+@@ -601,10 +601,18 @@ int commonio_open (struct commonio_db *db, int mode)
db->cursor = NULL;
db->changed = false;
- fd = open (db->filename,
- (db->readonly ? O_RDONLY : O_RDWR)
-- | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW);
+- | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW | O_CLOEXEC);
- saved_errno = errno;
+ if (db->readonly) {
+ fd = open (db->filename,
+ (true ? O_RDONLY : O_RDWR)
-+ | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW);
++ | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW | O_CLOEXEC);
+ saved_errno = errno;
+ } else {
+ fd = open (db->filename,
+ (false ? O_RDONLY : O_RDWR)
-+ | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW);
++ | O_NOCTTY | O_NONBLOCK | O_NOFOLLOW| O_CLOEXEC);
+ saved_errno = errno;
+ }
+
diff --git a/meta/recipes-extended/shadow/files/login.defs_shadow-sysroot b/meta/recipes-extended/shadow/files/login.defs_shadow-sysroot
index 8a68dd341a..09df77d2e7 100644
--- a/meta/recipes-extended/shadow/files/login.defs_shadow-sysroot
+++ b/meta/recipes-extended/shadow/files/login.defs_shadow-sysroot
@@ -1,3 +1,4 @@
+# SPDX-License-Identifier: BSD-3-Clause OR Artistic-1.0
#
# /etc/login.defs - Configuration control definitions for the shadow package.
#
diff --git a/meta/recipes-extended/shadow/files/pam.d/login b/meta/recipes-extended/shadow/files/pam.d/login
index b340058539..d39e09b1ea 100644
--- a/meta/recipes-extended/shadow/files/pam.d/login
+++ b/meta/recipes-extended/shadow/files/pam.d/login
@@ -57,10 +57,6 @@ auth optional pam_group.so
# (Replaces the use of /etc/limits in old login)
session required pam_limits.so
-# Prints the last login info upon succesful login
-# (Replaces the `LASTLOG_ENAB' option from login.defs)
-session optional pam_lastlog.so
-
# Prints the motd upon succesful login
# (Replaces the `MOTD_FILE' option in login.defs)
session optional pam_motd.so
diff --git a/meta/recipes-extended/shadow/files/securetty b/meta/recipes-extended/shadow/files/securetty
index 2be341a216..820728faa6 100644
--- a/meta/recipes-extended/shadow/files/securetty
+++ b/meta/recipes-extended/shadow/files/securetty
@@ -7,6 +7,7 @@ ttyS0
ttyS1
ttyS2
ttyS3
+ttyS4
# ARM AMBA SoCs
ttyAM0
diff --git a/meta/recipes-extended/shadow/files/shadow-4.1.3-dots-in-usernames.patch b/meta/recipes-extended/shadow/files/shadow-4.1.3-dots-in-usernames.patch
deleted file mode 100644
index a7bb0a9290..0000000000
--- a/meta/recipes-extended/shadow/files/shadow-4.1.3-dots-in-usernames.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-# commit message copied from openembedded:
-# commit 246c80637b135f3a113d319b163422f98174ee6c
-# Author: Khem Raj <raj.khem@gmail.com>
-# Date: Wed Jun 9 13:37:03 2010 -0700
-#
-# shadow-4.1.4.2: Add patches to support dots in login id.
-#
-# Signed-off-by: Khem Raj <raj.khem@gmail.com>
-#
-# comment added by Kevin Tian <kevin.tian@intel.com>, 2010-08-11
-
-Upstream-Status: Pending
-
-Signed-off-by: Scott Garman <scott.a.garman@intel.com>
-
-Index: shadow-4.1.4.2/libmisc/chkname.c
-===================================================================
---- shadow-4.1.4.2.orig/libmisc/chkname.c 2009-04-28 12:14:04.000000000 -0700
-+++ shadow-4.1.4.2/libmisc/chkname.c 2010-06-03 17:43:20.638973857 -0700
-@@ -61,6 +61,7 @@ static bool is_valid_name (const char *n
- ( ('0' <= *name) && ('9' >= *name) ) ||
- ('_' == *name) ||
- ('-' == *name) ||
-+ ('.' == *name) ||
- ( ('$' == *name) && ('\0' == *(name + 1)) )
- )) {
- return false;
diff --git a/meta/recipes-extended/shadow/files/shadow-relaxed-usernames.patch b/meta/recipes-extended/shadow/files/shadow-relaxed-usernames.patch
deleted file mode 100644
index cc833362e9..0000000000
--- a/meta/recipes-extended/shadow/files/shadow-relaxed-usernames.patch
+++ /dev/null
@@ -1,111 +0,0 @@
-From ca472d6866e545aaa70a70020e3226f236a8aafc Mon Sep 17 00:00:00 2001
-From: Shan Hai <shan.hai@windriver.com>
-Date: Tue, 13 Sep 2016 13:45:46 +0800
-Subject: [PATCH] shadow: use relaxed usernames
-
-The groupadd from shadow does not allow upper case group names, the
-same is true for the upstream shadow. But distributions like
-Debian/Ubuntu/CentOS has their own way to cope with this problem,
-this patch is picked up from CentOS release 7.0 to relax the usernames
-restrictions to allow the upper case group names, and the relaxation is
-POSIX compliant because POSIX indicate that usernames are composed of
-characters from the portable filename character set [A-Za-z0-9._-].
-
-Upstream-Status: Pending
-
-Signed-off-by: Shan Hai <shan.hai@windriver.com>
-
----
- libmisc/chkname.c | 30 ++++++++++++++++++------------
- man/groupadd.8.xml | 6 ------
- man/useradd.8.xml | 8 +-------
- 3 files changed, 19 insertions(+), 25 deletions(-)
-
-diff --git a/libmisc/chkname.c b/libmisc/chkname.c
-index 90f185c..65762b4 100644
---- a/libmisc/chkname.c
-+++ b/libmisc/chkname.c
-@@ -55,22 +55,28 @@ static bool is_valid_name (const char *name)
- }
-
- /*
-- * User/group names must match [a-z_][a-z0-9_-]*[$]
-- */
--
-- if (('\0' == *name) ||
-- !((('a' <= *name) && ('z' >= *name)) || ('_' == *name))) {
-+ * User/group names must match gnu e-regex:
-+ * [a-zA-Z0-9_.][a-zA-Z0-9_.-]{0,30}[a-zA-Z0-9_.$-]?
-+ *
-+ * as a non-POSIX, extension, allow "$" as the last char for
-+ * sake of Samba 3.x "add machine script"
-+ */
-+ if ( ('\0' == *name) ||
-+ !((*name >= 'a' && *name <= 'z') ||
-+ (*name >= 'A' && *name <= 'Z') ||
-+ (*name >= '0' && *name <= '9') ||
-+ (*name == '_') || (*name == '.')
-+ )) {
- return false;
- }
-
- while ('\0' != *++name) {
-- if (!(( ('a' <= *name) && ('z' >= *name) ) ||
-- ( ('0' <= *name) && ('9' >= *name) ) ||
-- ('_' == *name) ||
-- ('-' == *name) ||
-- ('.' == *name) ||
-- ( ('$' == *name) && ('\0' == *(name + 1)) )
-- )) {
-+ if (!( (*name >= 'a' && *name <= 'z') ||
-+ (*name >= 'A' && *name <= 'Z') ||
-+ (*name >= '0' && *name <= '9') ||
-+ (*name == '_') || (*name == '.') || (*name == '-') ||
-+ (*name == '$' && *(name + 1) == '\0')
-+ )) {
- return false;
- }
- }
-diff --git a/man/groupadd.8.xml b/man/groupadd.8.xml
-index 1e58f09..d804b61 100644
---- a/man/groupadd.8.xml
-+++ b/man/groupadd.8.xml
-@@ -272,12 +272,6 @@
-
- <refsect1 id='caveats'>
- <title>CAVEATS</title>
-- <para>
-- Groupnames must start with a lower case letter or an underscore,
-- followed by lower case letters, digits, underscores, or dashes.
-- They can end with a dollar sign.
-- In regular expression terms: [a-z_][a-z0-9_-]*[$]?
-- </para>
- <para>
- Groupnames may only be up to &GROUP_NAME_MAX_LENGTH; characters long.
- </para>
-diff --git a/man/useradd.8.xml b/man/useradd.8.xml
-index a16d730..c0bd777 100644
---- a/man/useradd.8.xml
-+++ b/man/useradd.8.xml
-@@ -366,7 +366,7 @@
- </term>
- <listitem>
- <para>
-- Do no create the user's home directory, even if the system
-+ Do not create the user's home directory, even if the system
- wide setting from <filename>/etc/login.defs</filename>
- (<option>CREATE_HOME</option>) is set to
- <replaceable>yes</replaceable>.
-@@ -660,12 +660,6 @@
- the user account creation request.
- </para>
-
-- <para>
-- Usernames must start with a lower case letter or an underscore,
-- followed by lower case letters, digits, underscores, or dashes.
-- They can end with a dollar sign.
-- In regular expression terms: [a-z_][a-z0-9_-]*[$]?
-- </para>
- <para>
- Usernames may only be up to 32 characters long.
- </para>
diff --git a/meta/recipes-extended/shadow/files/shadow-update-pam-conf.patch b/meta/recipes-extended/shadow/files/shadow-update-pam-conf.patch
index 15f8044fa2..1eacb8a53f 100644
--- a/meta/recipes-extended/shadow/files/shadow-update-pam-conf.patch
+++ b/meta/recipes-extended/shadow/files/shadow-update-pam-conf.patch
@@ -1,88 +1,115 @@
+From 38882ab288fd4d2cc2e45dff222ae3412c8fe357 Mon Sep 17 00:00:00 2001
+From: Kang Kai <kai.kang@windriver.com>
+Date: Wed, 20 Jul 2011 19:18:14 +0800
+Subject: [PATCH] shadow: update pam related configure files
+
The system-auth in the configure files is from Fedora which put all the 4 pam type rules
in one file.
In yocto it obey the way with Debian/Ubuntu, and the names are common-auth, common-account,
common-password and common-session.
So update them with oe way.
-Upstream-Status: Pending
+See meta/recipes-extended/pam/libpam/pam.d/common-password
+
+Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Kang Kai <kai.kang@windriver.com>
+---
+ etc/pam.d/chage | 2 +-
+ etc/pam.d/chgpasswd | 2 +-
+ etc/pam.d/groupadd | 2 +-
+ etc/pam.d/groupdel | 2 +-
+ etc/pam.d/groupmems | 2 +-
+ etc/pam.d/groupmod | 2 +-
+ etc/pam.d/useradd | 2 +-
+ etc/pam.d/userdel | 2 +-
+ etc/pam.d/usermod | 2 +-
+ 9 files changed, 9 insertions(+), 9 deletions(-)
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/chage shadow-4.1.4.3/etc/pam.d/chage
---- shadow-4.1.4.3/etc/pam.d.orig/chage 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/chage 2011-07-20 19:03:08.964844958 +0800
+diff --git a/etc/pam.d/chage b/etc/pam.d/chage
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/chage
++++ b/etc/pam.d/chage
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/chgpasswd shadow-4.1.4.3/etc/pam.d/chgpasswd
---- shadow-4.1.4.3/etc/pam.d.orig/chgpasswd 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/chgpasswd 2011-07-20 19:03:26.544844958 +0800
+diff --git a/etc/pam.d/chgpasswd b/etc/pam.d/chgpasswd
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/chgpasswd
++++ b/etc/pam.d/chgpasswd
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/groupadd shadow-4.1.4.3/etc/pam.d/groupadd
---- shadow-4.1.4.3/etc/pam.d.orig/groupadd 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/groupadd 2011-07-20 19:04:08.124844958 +0800
+diff --git a/etc/pam.d/groupadd b/etc/pam.d/groupadd
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/groupadd
++++ b/etc/pam.d/groupadd
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/groupdel shadow-4.1.4.3/etc/pam.d/groupdel
---- shadow-4.1.4.3/etc/pam.d.orig/groupdel 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/groupdel 2011-07-20 19:04:26.114844958 +0800
+diff --git a/etc/pam.d/groupdel b/etc/pam.d/groupdel
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/groupdel
++++ b/etc/pam.d/groupdel
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/groupmems shadow-4.1.4.3/etc/pam.d/groupmems
---- shadow-4.1.4.3/etc/pam.d.orig/groupmems 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/groupmems 2011-07-20 19:04:35.074844958 +0800
+diff --git a/etc/pam.d/groupmems b/etc/pam.d/groupmems
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/groupmems
++++ b/etc/pam.d/groupmems
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/groupmod shadow-4.1.4.3/etc/pam.d/groupmod
---- shadow-4.1.4.3/etc/pam.d.orig/groupmod 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/groupmod 2011-07-20 19:04:44.864844958 +0800
+diff --git a/etc/pam.d/groupmod b/etc/pam.d/groupmod
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/groupmod
++++ b/etc/pam.d/groupmod
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/useradd shadow-4.1.4.3/etc/pam.d/useradd
---- shadow-4.1.4.3/etc/pam.d.orig/useradd 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/useradd 2011-07-20 19:07:26.244844958 +0800
+diff --git a/etc/pam.d/useradd b/etc/pam.d/useradd
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/useradd
++++ b/etc/pam.d/useradd
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/userdel shadow-4.1.4.3/etc/pam.d/userdel
---- shadow-4.1.4.3/etc/pam.d.orig/userdel 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/userdel 2011-07-20 19:07:35.734844958 +0800
+diff --git a/etc/pam.d/userdel b/etc/pam.d/userdel
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/userdel
++++ b/etc/pam.d/userdel
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
account required pam_permit.so
-password include system-auth
+password include common-password
-diff -Nur shadow-4.1.4.3/etc/pam.d.orig/usermod shadow-4.1.4.3/etc/pam.d/usermod
---- shadow-4.1.4.3/etc/pam.d.orig/usermod 2011-07-20 19:02:27.384844958 +0800
-+++ shadow-4.1.4.3/etc/pam.d/usermod 2011-07-20 19:07:42.024844958 +0800
+diff --git a/etc/pam.d/usermod b/etc/pam.d/usermod
+index 8f49f5c..b1f365d 100644
+--- a/etc/pam.d/usermod
++++ b/etc/pam.d/usermod
@@ -1,4 +1,4 @@
#%PAM-1.0
auth sufficient pam_rootok.so
diff --git a/meta/recipes-extended/shadow/shadow-securetty_4.6.bb b/meta/recipes-extended/shadow/shadow-securetty_4.6.bb
index c78f888cf4..fe51ea1874 100644
--- a/meta/recipes-extended/shadow/shadow-securetty_4.6.bb
+++ b/meta/recipes-extended/shadow/shadow-securetty_4.6.bb
@@ -5,7 +5,6 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384
INHIBIT_DEFAULT_DEPS = "1"
-PR = "r3"
SRC_URI = "file://securetty"
diff --git a/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb b/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb
index e05fa237a2..00ab58b38c 100644
--- a/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb
+++ b/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb
@@ -3,11 +3,10 @@ HOMEPAGE = "http://github.com/shadow-maint/shadow"
BUGTRACKER = "http://github.com/shadow-maint/shadow/issues"
SECTION = "base utils"
LICENSE = "BSD-3-Clause | Artistic-1.0"
-LIC_FILES_CHKSUM = "file://login.defs_shadow-sysroot;md5=25e2f2de4dfc8f966ac5cdfce45cd7d5"
+LIC_FILES_CHKSUM = "file://login.defs_shadow-sysroot;endline=1;md5=ceddfb61608e4db87012499555184aed"
DEPENDS = "base-passwd"
-PR = "r3"
# The sole purpose of this recipe is to provide the /etc/login.defs
# file for the target sysroot - needed so the shadow-native utilities
diff --git a/meta/recipes-extended/shadow/shadow.inc b/meta/recipes-extended/shadow/shadow.inc
index f5fdf436f7..7b9763d6db 100644
--- a/meta/recipes-extended/shadow/shadow.inc
+++ b/meta/recipes-extended/shadow/shadow.inc
@@ -5,16 +5,15 @@ BUGTRACKER = "http://github.com/shadow-maint/shadow/issues"
SECTION = "base/utils"
LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://COPYING;md5=c9a450b7be84eac23e6353efecb60b5b \
- file://src/passwd.c;beginline=2;endline=30;md5=758c26751513b6795395275969dd3be1 \
+ file://src/passwd.c;beginline=2;endline=7;md5=67bcf314687820b2f010d4863fce3fc5 \
"
DEPENDS = "virtual/crypt"
-UPSTREAM_CHECK_URI = "https://github.com/shadow-maint/shadow/releases"
-SRC_URI = "https://github.com/shadow-maint/shadow/releases/download/v${PV}/${BP}.tar.gz \
- file://shadow-4.1.3-dots-in-usernames.patch \
+GITHUB_BASE_URI = "https://github.com/shadow-maint/shadow/releases"
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BP}.tar.gz \
+ file://0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch \
${@bb.utils.contains('PACKAGECONFIG', 'pam', '${PAM_SRC_URI}', '', d)} \
- file://shadow-relaxed-usernames.patch \
file://useradd \
"
@@ -24,14 +23,9 @@ SRC_URI:append:class-target = " \
"
SRC_URI:append:class-native = " \
- file://0001-Disable-use-of-syslog-for-sysroot.patch \
file://commonio.c-fix-unexpected-open-failure-in-chroot-env.patch \
"
-SRC_URI:append:class-nativesdk = " \
- file://0001-Disable-use-of-syslog-for-sysroot.patch \
- "
-
-SRC_URI[sha256sum] = "f262089be6a1011d50ec7849e14571b7b2e788334368f3dccb718513f17935ed"
+SRC_URI[sha256sum] = "377fe0d7c1a0aa5e3514c08fdf5ddc70c9dcbb391678c2134445ed97326bcc26"
# Additional Policy files for PAM
PAM_SRC_URI = "file://pam.d/chfn \
@@ -42,16 +36,18 @@ PAM_SRC_URI = "file://pam.d/chfn \
file://pam.d/passwd \
file://pam.d/su"
-inherit autotools gettext
+inherit autotools gettext github-releases pkgconfig
export CONFIG_SHELL="/bin/sh"
-EXTRA_OECONF += "--without-libcrack \
+EXTRA_OECONF += " \
--with-group-name-max-length=24 \
--enable-subordinate-ids=yes \
--without-sssd \
${NSCDOPT}"
+CFLAGS:append:libc-musl = " -DLIBBSD_OVERLAY"
+
NSCDOPT = ""
NSCDOPT:class-native = "--without-nscd"
NSCDOPT:class-nativesdk = "--without-nscd"
@@ -64,23 +60,22 @@ PAM_PLUGINS = "libpam-runtime \
pam-plugin-env \
pam-plugin-group \
pam-plugin-limits \
- pam-plugin-lastlog \
pam-plugin-motd \
pam-plugin-mail \
pam-plugin-shells \
pam-plugin-rootok"
-PAM_PLUGINS:remove:libc-musl = "pam-plugin-lastlog"
-
PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'attr', '', d)}"
-PACKAGECONFIG:class-native ??= "${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'attr', '', d)}"
+PACKAGECONFIG:class-native ??= "${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'attr', '', d)} libbsd"
PACKAGECONFIG:class-nativesdk = ""
PACKAGECONFIG[pam] = "--with-libpam,--without-libpam,libpam,${PAM_PLUGINS}"
PACKAGECONFIG[attr] = "--with-attr,--without-attr,attr"
PACKAGECONFIG[acl] = "--with-acl,--without-acl,acl"
PACKAGECONFIG[audit] = "--with-audit,--without-audit,audit"
PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux libsemanage"
+PACKAGECONFIG[libbsd] = "--with-libbsd,--without-libbsd,libbsd"
+PACKAGECONFIG[logind] = "--enable-logind,--disable-logind,systemd"
RDEPENDS:${PN} = "shadow-securetty \
base-passwd \
@@ -149,8 +144,30 @@ do_install:append() {
# Handle link properly after rename, otherwise missing files would
# lead rpm failed dependencies.
ln -sf newgrp.${BPN} ${D}${bindir}/sg
+
+ # usermod requires the subuid/subgid files to be in place before being
+ # able to use the -v/-V flags otherwise it fails:
+ # usermod: /etc/subuid does not exist, you cannot use the flags -v or -V
+ install -d ${D}${sysconfdir}
+ touch ${D}${sysconfdir}/subuid
+ touch ${D}${sysconfdir}/subgid
+}
+
+# Make executables look for dynamically linked libraries in a custom location, and install
+# the needed libraries there. That way we can use them from sstate
+# in setscene tasks without worrying about the dependency libraries being available.
+do_install:append:class-native() {
+ binaries=$(find ${D}${base_bindir}/ ${D}${base_sbindir}/ ${D}${bindir}/ ${D}${sbindir}/ -executable -type f)
+ chrpath -k -r ${STAGING_DIR_NATIVE}/lib-shadow-deps $binaries
+ mkdir -p ${D}${STAGING_DIR_NATIVE}/lib-shadow-deps/
+ libattr=${@bb.utils.contains('DISTRO_FEATURES', 'xattr', "${STAGING_LIBDIR_NATIVE}/libattr.so.*", '', d)}
+ install $libattr ${STAGING_LIBDIR_NATIVE}/libbsd.so.* ${STAGING_LIBDIR_NATIVE}/libmd.so.* ${D}${STAGING_DIR_NATIVE}/lib-shadow-deps/
+ install ${D}${libdir}/*.so.* ${D}${STAGING_DIR_NATIVE}/lib-shadow-deps/
}
+SYSROOT_DIRS:append:class-native = " ${STAGING_DIR_NATIVE}/lib-shadow-deps/"
+INSANE_SKIP:${PN}:class-native = "already-stripped"
+
PACKAGES =+ "${PN}-base"
FILES:${PN}-base = "\
${base_bindir}/login.shadow \
diff --git a/meta/recipes-extended/shadow/shadow_4.11.1.bb b/meta/recipes-extended/shadow/shadow_4.11.1.bb
deleted file mode 100644
index 40b11345c9..0000000000
--- a/meta/recipes-extended/shadow/shadow_4.11.1.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require shadow.inc
-
-# Build falsely assumes that if --enable-libpam is set, we don't need to link against
-# libcrypt. This breaks chsh.
-BUILD_LDFLAGS:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '-lcrypt', '', d)}"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# Severity is low and marked as closed and won't fix.
-# https://bugzilla.redhat.com/show_bug.cgi?id=884658
-CVE_CHECK_IGNORE += "CVE-2013-4235"
diff --git a/meta/recipes-extended/shadow/shadow_4.15.0.bb b/meta/recipes-extended/shadow/shadow_4.15.0.bb
new file mode 100644
index 0000000000..e57676c1da
--- /dev/null
+++ b/meta/recipes-extended/shadow/shadow_4.15.0.bb
@@ -0,0 +1,10 @@
+require shadow.inc
+
+# Build falsely assumes that if --enable-libpam is set, we don't need to link against
+# libcrypt. This breaks chsh.
+BUILD_LDFLAGS:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '-lcrypt', '', d)}"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# https://bugzilla.redhat.com/show_bug.cgi?id=884658
+CVE_STATUS[CVE-2013-4235] = "upstream-wontfix: Severity is low and marked as closed and won't fix."
diff --git a/meta/recipes-extended/slang/slang/dont-link-to-host.patch b/meta/recipes-extended/slang/slang/dont-link-to-host.patch
index 42dba0fae4..4b02068991 100644
--- a/meta/recipes-extended/slang/slang/dont-link-to-host.patch
+++ b/meta/recipes-extended/slang/slang/dont-link-to-host.patch
@@ -1,3 +1,8 @@
+From b4a6e3c8309cff0f2311cd959c5091213b633851 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Tue, 7 Feb 2017 14:35:43 +0000
+Subject: [PATCH] slang: rewrite recipe to run autoconf
+
SLANG_INST_LIB is the location of where slang will end up, but when building for
packaging this doesn't have DESTDIR appended so can potentially link to the host
for cross builds and will trigger QA errors.
@@ -7,10 +12,20 @@ As this is obviously wrong, delete it.
Upstream-Status: Pending
Signed-off-by: Ross Burton <ross.burton@intel.com>
+---
+ slsh/Makefile.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
diff --git a/slsh/Makefile.in b/slsh/Makefile.in
-index cba9d81..4c1c370 100644
+index addd343..63a5c9b 100644
--- a/slsh/Makefile.in
+++ b/slsh/Makefile.in
-@@ -80 +80 @@ SHELL = /bin/sh
--INST_LIBS = $(DEST_LIB_DIR) $(RPATH) $(SLANG_INST_LIB) -lslang $(READLINE_LIB) $(DYNAMIC_LIBS)
-+INST_LIBS = $(DEST_LIB_DIR) $(RPATH) -lslang $(READLINE_LIB) $(DYNAMIC_LIBS)
+@@ -77,7 +77,7 @@ SLSYSWRAP_LIB = @LIB_SLSYSWRAP@
+ #----------------------------------------------------------------------------
+ @SET_MAKE@
+ SHELL = /bin/sh
+-INST_LIBS = $(DEST_LIB_DIR) $(RPATH) $(SLANG_INST_LIB) -lslang $(LDFLAGS) $(READLINE_LIB) $(DYNAMIC_LIBS)
++INST_LIBS = $(DEST_LIB_DIR) $(RPATH) -lslang $(LDFLAGS) $(READLINE_LIB) $(DYNAMIC_LIBS)
+ DEFS = -DSLSH_CONF_DIR='"$(SLSH_CONF_DIR)"' -DSLSH_PATH='"$(SLSH_LIB_DIR)"' \
+ -DSLSH_CONF_DIR_ENV='$(SLSH_CONF_DIR_ENV)' -DSLSH_LIB_DIR_ENV='$(SLSH_LIB_DIR_ENV)' \
+ -DSLSH_PATH_ENV='$(SLSH_PATH_ENV)' $(SLSYSWRAP_DEF)
diff --git a/meta/recipes-extended/slang/slang/terminfo_fixes.patch b/meta/recipes-extended/slang/slang/terminfo_fixes.patch
index 3ca20a8cab..331b7f02e4 100644
--- a/meta/recipes-extended/slang/slang/terminfo_fixes.patch
+++ b/meta/recipes-extended/slang/slang/terminfo_fixes.patch
@@ -1,3 +1,8 @@
+From 2a75095638002d37a2f9c7aeb0ec54f271b0a1c4 Mon Sep 17 00:00:00 2001
+From: Joe Slater <joe.slater@windriver.com>
+Date: Tue, 1 Aug 2017 12:36:53 -0700
+Subject: [PATCH] slang: fix terminfo related problems
+
Do not use the JD_TERMCAP macro since we cannot get the terminfo from
ncurses pkg-config, but fix the macro to not reference host directories.
Also add src/test/Makefile.in so that we can use -ltermcap if we want to.
@@ -8,10 +13,18 @@ Upstream-Status: Inappropriate [see above]
Signed-off-by: Joe Slater <joe.slater@windriver.com>
+---
+ autoconf/aclocal.m4 | 8 +---
+ autoconf/configure.ac | 11 +++++-
+ src/test/Makefile.in | 90 +++++++++++++++++++++++++++++++++++++++++++
+ 3 files changed, 100 insertions(+), 9 deletions(-)
+ create mode 100644 src/test/Makefile.in
+diff --git a/autoconf/aclocal.m4 b/autoconf/aclocal.m4
+index b2dfcd3..5f94ed3 100644
--- a/autoconf/aclocal.m4
+++ b/autoconf/aclocal.m4
-@@ -506,14 +506,10 @@ then
+@@ -509,15 +509,9 @@ then
else
MISC_TERMINFO_DIRS=""
fi
@@ -19,8 +32,8 @@ Signed-off-by: Joe Slater <joe.slater@windriver.com>
- /usr/lib/terminfo \
- /usr/share/terminfo \
- /usr/share/lib/terminfo \
-- /usr/local/lib/terminfo"
-+
+- /usr/local/lib/terminfo \
+- /etc/terminfo /lib/terminfo"
TERMCAP=-ltermcap
-for terminfo_dir in $JD_Terminfo_Dirs
@@ -28,9 +41,11 @@ Signed-off-by: Joe Slater <joe.slater@windriver.com>
do
if test -d $terminfo_dir
then
+diff --git a/autoconf/configure.ac b/autoconf/configure.ac
+index 8e11e13..9e6402c 100644
--- a/autoconf/configure.ac
+++ b/autoconf/configure.ac
-@@ -249,7 +249,14 @@ AC_CHECK_SIZEOF(size_t)
+@@ -250,7 +250,14 @@ AC_CHECK_SIZEOF(size_t)
JD_CHECK_LONG_LONG
JD_LARGE_FILE_SUPPORT
@@ -46,7 +61,7 @@ Signed-off-by: Joe Slater <joe.slater@windriver.com>
JD_GCC_WARNINGS
JD_SET_OBJ_SRC_DIR(src)
-@@ -364,7 +371,7 @@ AC_CONFIG_HEADER(src/sysconf.h:src/confi
+@@ -365,7 +372,7 @@ AC_CONFIG_HEADER(src/sysconf.h:src/config.hin)
dnl AC_CONFIG_SUBDIRS(demo)
AC_OUTPUT(Makefile:autoconf/Makefile.in \
@@ -55,6 +70,9 @@ Signed-off-by: Joe Slater <joe.slater@windriver.com>
slang.pc:autoconf/slangpc.in \
)
+diff --git a/src/test/Makefile.in b/src/test/Makefile.in
+new file mode 100644
+index 0000000..4b7307f
--- /dev/null
+++ b/src/test/Makefile.in
@@ -0,0 +1,90 @@
diff --git a/meta/recipes-extended/slang/slang_2.3.2.bb b/meta/recipes-extended/slang/slang_2.3.2.bb
deleted file mode 100644
index 08cc9670dc..0000000000
--- a/meta/recipes-extended/slang/slang_2.3.2.bb
+++ /dev/null
@@ -1,84 +0,0 @@
-SUMMARY = "The shared library for the S-Lang extension language"
-
-DESCRIPTION = "S-Lang is an interpreted language and a programming library. The \
-S-Lang language was designed so that it can be easily embedded into \
-a program to provide the program with a powerful extension language. \
-The S-Lang library, provided in this package, provides the S-Lang \
-extension language. S-Lang's syntax resembles C, which makes it easy \
-to recode S-Lang procedures in C if you need to."
-
-HOMEPAGE = "http://www.jedsoft.org/slang/"
-SECTION = "libs"
-DEPENDS = "ncurses virtual/libiconv"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a52a18a472d4f7e45479b06563717c02"
-
-SRC_URI = "http://www.jedsoft.org/releases/${BPN}/${BP}.tar.bz2 \
- file://no-x.patch \
- file://dont-link-to-host.patch \
- file://test-add-output-in-the-format-result-testname.patch \
- file://terminfo_fixes.patch \
- file://array_test.patch \
- file://run-ptest \
- "
-
-SRC_URI[md5sum] = "c2d5a7aa0246627da490be4e399c87cb"
-SRC_URI[sha256sum] = "fc9e3b0fc4f67c3c1f6d43c90c16a5c42d117b8e28457c5b46831b8b5d3ae31a"
-
-UPSTREAM_CHECK_URI = "http://www.jedsoft.org/releases/slang/"
-PREMIRRORS:append = " http://www.jedsoft.org/releases/slang/.* http://www.jedsoft.org/releases/slang/old/"
-
-inherit autotools-brokensep ptest
-CLEANBROKEN = "1"
-
-EXTRA_OECONF = "--without-onig"
-# There's no way to turn off rpaths and slang will -rpath to the default search
-# path. Unset RPATH to stop this.
-EXTRA_OEMAKE = "RPATH=''"
-
-PACKAGECONFIG ??= "pcre"
-PACKAGECONFIG[pcre] = "--with-pcre=${STAGING_DIR_HOST}${prefix},--without-pcre,pcre"
-PACKAGECONFIG[png] = "--with-png=${STAGING_DIR_HOST}${prefix},--without-png,libpng"
-PACKAGECONFIG[zlib] = "--with-z=${STAGING_DIR_HOST}${prefix},--without-z,zlib"
-
-do_configure:prepend() {
- cd ${S}/autoconf
- # slang keeps configure.ac and rest of autoconf files in autoconf/ directory
- # we have to go there to be able to run gnu-configize cause it expects configure.{in,ac}
- # to be present. Resulting files land in autoconf/autoconf/ so we need to move them.
- gnu-configize --force && mv autoconf/config.* .
- # For the same reason we also need to run autoconf manually.
- autoconf && mv configure ..
- cd ${B}
-}
-
-do_compile_ptest() {
- oe_runmake -C src static
- oe_runmake -C src/test sltest
-}
-
-do_install_ptest() {
- mkdir ${D}${PTEST_PATH}/test
- for f in Makefile sltest runtests.sh *.sl *.inc; do
- cp ${S}/src/test/$f ${D}${PTEST_PATH}/test/
- done
- sed -e 's/\ \$(TEST_PGM)\.c\ assoc\.c\ list\.c\ \$(SLANGLIB)\/libslang\.a//' \
- -e '/\$(CC).*(TEST_PGM)/d' \
- -i ${D}${PTEST_PATH}/test/Makefile
-
- cp ${S}/slsh/lib/require.sl ${D}${PTEST_PATH}/test/
- sed -i 's/\.\.\/\.\.\/slsh\/lib\/require\.sl/require\.sl/' ${D}${PTEST_PATH}/test/req.sl
-
- cp ${S}/doc/text/slangfun.txt ${D}${PTEST_PATH}/test/
- sed -i 's/\.\.\/\.\.\/doc\/text\/slangfun\.txt/slangfun\.txt/' ${D}${PTEST_PATH}/test/docfun.sl
-}
-
-FILES:${PN} += "${libdir}/${BPN}/v2/modules/ ${datadir}/slsh/"
-
-RDEPENDS:${PN}-ptest += "make"
-
-PARALLEL_MAKE = ""
-PARALLEL_MAKEINST = ""
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/slang/slang_2.3.3.bb b/meta/recipes-extended/slang/slang_2.3.3.bb
new file mode 100644
index 0000000000..05b8aff0f2
--- /dev/null
+++ b/meta/recipes-extended/slang/slang_2.3.3.bb
@@ -0,0 +1,83 @@
+SUMMARY = "The shared library for the S-Lang extension language"
+
+DESCRIPTION = "S-Lang is an interpreted language and a programming library. The \
+S-Lang language was designed so that it can be easily embedded into \
+a program to provide the program with a powerful extension language. \
+The S-Lang library, provided in this package, provides the S-Lang \
+extension language. S-Lang's syntax resembles C, which makes it easy \
+to recode S-Lang procedures in C if you need to."
+
+HOMEPAGE = "http://www.jedsoft.org/slang/"
+SECTION = "libs"
+DEPENDS = "ncurses virtual/libiconv"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a52a18a472d4f7e45479b06563717c02"
+
+SRC_URI = "http://www.jedsoft.org/releases/${BPN}/${BP}.tar.bz2 \
+ file://no-x.patch \
+ file://dont-link-to-host.patch \
+ file://test-add-output-in-the-format-result-testname.patch \
+ file://terminfo_fixes.patch \
+ file://array_test.patch \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "f9145054ae131973c61208ea82486d5dd10e3c5cdad23b7c4a0617743c8f5a18"
+
+UPSTREAM_CHECK_URI = "http://www.jedsoft.org/releases/slang/"
+PREMIRRORS:append = " http://www.jedsoft.org/releases/slang/.* http://www.jedsoft.org/releases/slang/old/"
+
+inherit autotools-brokensep ptest
+CLEANBROKEN = "1"
+
+EXTRA_OECONF = "--without-onig"
+# There's no way to turn off rpaths and slang will -rpath to the default search
+# path. Unset RPATH to stop this.
+EXTRA_OEMAKE = "RPATH=''"
+
+PACKAGECONFIG ??= "pcre"
+PACKAGECONFIG[pcre] = "--with-pcre=${STAGING_DIR_HOST}${prefix},--without-pcre,pcre"
+PACKAGECONFIG[png] = "--with-png=${STAGING_DIR_HOST}${prefix},--without-png,libpng"
+PACKAGECONFIG[zlib] = "--with-z=${STAGING_DIR_HOST}${prefix},--without-z,zlib"
+
+do_configure:prepend() {
+ cd ${S}/autoconf
+ # slang keeps configure.ac and rest of autoconf files in autoconf/ directory
+ # we have to go there to be able to run gnu-configize cause it expects configure.{in,ac}
+ # to be present. Resulting files land in autoconf/autoconf/ so we need to move them.
+ gnu-configize --force && mv autoconf/config.* .
+ # For the same reason we also need to run autoconf manually.
+ autoconf && mv configure ..
+ cd ${B}
+}
+
+do_compile_ptest() {
+ oe_runmake -C src static
+ oe_runmake -C src/test sltest
+}
+
+do_install_ptest() {
+ mkdir ${D}${PTEST_PATH}/test
+ for f in Makefile sltest runtests.sh *.sl *.inc; do
+ cp ${S}/src/test/$f ${D}${PTEST_PATH}/test/
+ done
+ sed -e 's/\ \$(TEST_PGM)\.c\ assoc\.c\ list\.c\ \$(SLANGLIB)\/libslang\.a//' \
+ -e '/\$(CC).*(TEST_PGM)/d' \
+ -i ${D}${PTEST_PATH}/test/Makefile
+
+ cp ${S}/slsh/lib/require.sl ${D}${PTEST_PATH}/test/
+ sed -i 's/\.\.\/\.\.\/slsh\/lib\/require\.sl/require\.sl/' ${D}${PTEST_PATH}/test/req.sl
+
+ cp ${S}/doc/text/slangfun.txt ${D}${PTEST_PATH}/test/
+ sed -i 's/\.\.\/\.\.\/doc\/text\/slangfun\.txt/slangfun\.txt/' ${D}${PTEST_PATH}/test/docfun.sl
+}
+
+FILES:${PN} += "${libdir}/${BPN}/v2/modules/ ${datadir}/slsh/"
+
+RDEPENDS:${PN}-ptest += "make"
+
+PARALLEL_MAKE = ""
+PARALLEL_MAKEINST = ""
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/stress-ng/stress-ng_0.14.01.bb b/meta/recipes-extended/stress-ng/stress-ng_0.14.01.bb
deleted file mode 100644
index cba15e947e..0000000000
--- a/meta/recipes-extended/stress-ng/stress-ng_0.14.01.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "System load testing utility"
-DESCRIPTION = "Deliberately simple workload generator for POSIX systems. It \
-imposes a configurable amount of CPU, memory, I/O, and disk stress on the system."
-HOMEPAGE = "https://github.com/ColinIanKing/stress-ng#readme"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "git://github.com/ColinIanKing/stress-ng.git;protocol=https;branch=master"
-SRCREV = "597da6154263c9317291f2dd0ed71a8ff1e8b2dc"
-S = "${WORKDIR}/git"
-
-DEPENDS = "coreutils-native"
-
-PROVIDES = "stress"
-RPROVIDES:${PN} = "stress"
-RREPLACES:${PN} = "stress"
-RCONFLICTS:${PN} = "stress"
-
-inherit bash-completion
-
-do_compile:prepend() {
- mkdir -p configs
- touch configs/HAVE_APPARMOR
-}
-
-do_install() {
- oe_runmake DESTDIR=${D} install
- ln -s stress-ng ${D}${bindir}/stress
-}
diff --git a/meta/recipes-extended/stress-ng/stress-ng_0.17.06.bb b/meta/recipes-extended/stress-ng/stress-ng_0.17.06.bb
new file mode 100644
index 0000000000..a52b70d22f
--- /dev/null
+++ b/meta/recipes-extended/stress-ng/stress-ng_0.17.06.bb
@@ -0,0 +1,36 @@
+SUMMARY = "System load testing utility"
+DESCRIPTION = "Deliberately simple workload generator for POSIX systems. It \
+imposes a configurable amount of CPU, memory, I/O, and disk stress on the system."
+HOMEPAGE = "https://github.com/ColinIanKing/stress-ng#readme"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "git://github.com/ColinIanKing/stress-ng.git;protocol=https;branch=master \
+ "
+SRCREV = "e6bda983cb48a201b6af173204372c7b37d6411f"
+S = "${WORKDIR}/git"
+
+DEPENDS = "coreutils-native libbsd"
+
+PROVIDES = "stress"
+RPROVIDES:${PN} = "stress"
+RREPLACES:${PN} = "stress"
+RCONFLICTS:${PN} = "stress"
+
+inherit bash-completion
+
+EXTRA_OEMAKE = "VERBOSE=1"
+
+do_configure() {
+ mkdir -p configs
+ touch configs/HAVE_APPARMOR
+ oe_runmake makeconfig
+}
+
+do_install() {
+ oe_runmake DESTDIR=${D} BINDIR=${bindir} install
+ ln -s stress-ng ${D}${bindir}/stress
+}
+
+# upstream issue: https://github.com/ColinIanKing/stress-ng/issues/315
+DEBUG_BUILD = "0"
diff --git a/meta/recipes-extended/sudo/files/0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch b/meta/recipes-extended/sudo/files/0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch
deleted file mode 100644
index f63ed553be..0000000000
--- a/meta/recipes-extended/sudo/files/0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From f993c5c88faacc43971899aae2168ffb3e34dc80 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Fri, 24 Sep 2021 13:36:24 +0200
-Subject: [PATCH] lib/util/mksigname.c: correctly include header for out of
- tree builds
-
-Upstream-Status: Submitted [https://github.com/sudo-project/sudo/pull/123]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- lib/util/mksigname.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/lib/util/mksigname.c b/lib/util/mksigname.c
-index de8b1ad..0a69e7e 100644
---- a/lib/util/mksigname.c
-+++ b/lib/util/mksigname.c
-@@ -36,7 +36,7 @@ main(int argc, char *argv[])
- {
- unsigned int i;
-
--#include "mksigname.h"
-+#include "lib/util/mksigname.h"
-
- printf("const char *const sudo_sys_signame[] = {\n");
- for (i = 0; i < nitems(sudo_sys_signame); i++) {
diff --git a/meta/recipes-extended/sudo/files/0001-sudo.conf.in-fix-conflict-with-multilib.patch b/meta/recipes-extended/sudo/files/0001-sudo.conf.in-fix-conflict-with-multilib.patch
index f4fc376bb8..041c717e00 100644
--- a/meta/recipes-extended/sudo/files/0001-sudo.conf.in-fix-conflict-with-multilib.patch
+++ b/meta/recipes-extended/sudo/files/0001-sudo.conf.in-fix-conflict-with-multilib.patch
@@ -1,4 +1,7 @@
-sudo.conf.in: fix conflict with multilib
+From 6e835350b7413210c410d3578cfab804186b7a4f Mon Sep 17 00:00:00 2001
+From: Kai Kang <kai.kang@windriver.com>
+Date: Tue, 17 Nov 2020 11:13:40 +0800
+Subject: [PATCH] sudo.conf.in: fix conflict with multilib
When pass ${libdir} to --libexecdir of sudo, it fails to install sudo
and lib32-sudo at same time:
@@ -12,12 +15,13 @@ Update the comments in sudo.conf.in to avoid the conflict.
Signed-off-by: Kai Kang <kai.kang@windriver.com>
Upstream-Status: Inappropriate [OE configuration specific]
+
---
examples/sudo.conf.in | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/examples/sudo.conf.in b/examples/sudo.conf.in
-index 6535d3a..50afc8f 100644
+index 2187457..0908d24 100644
--- a/examples/sudo.conf.in
+++ b/examples/sudo.conf.in
@@ -4,7 +4,7 @@
@@ -33,8 +37,8 @@ index 6535d3a..50afc8f 100644
# The compiled-in value is usually sufficient and should only be changed
# if you rename or move the sudo_intercept.so file.
#
--#Path intercept @plugindir@/sudo_intercept.so
-+#Path intercept $plugindir/sudo_intercept.so
+-#Path intercept @intercept_file@
++#Path intercept $intercept_file
#
# Sudo noexec:
@@ -42,8 +46,8 @@ index 6535d3a..50afc8f 100644
# The compiled-in value is usually sufficient and should only be changed
# if you rename or move the sudo_noexec.so file.
#
--#Path noexec @plugindir@/sudo_noexec.so
-+#Path noexec $plugindir/sudo_noexec.so
+-#Path noexec @noexec_file@
++#Path noexec $noexec_file
#
# Sudo plugin directory:
@@ -55,7 +59,4 @@ index 6535d3a..50afc8f 100644
+#Path plugin_dir $plugindir
#
- # Sudo developer mode:
---
-2.17.1
-
+ # Core dumps:
diff --git a/meta/recipes-extended/sudo/sudo.inc b/meta/recipes-extended/sudo/sudo.inc
index 8947c46129..feb1cf35a7 100644
--- a/meta/recipes-extended/sudo/sudo.inc
+++ b/meta/recipes-extended/sudo/sudo.inc
@@ -4,11 +4,10 @@ HOMEPAGE = "http://www.sudo.ws"
BUGTRACKER = "http://www.sudo.ws/bugs/"
SECTION = "admin"
LICENSE = "ISC & BSD-3-Clause & BSD-2-Clause & Zlib"
-LIC_FILES_CHKSUM = "file://LICENSE.md;md5=16cf60b466f3a0606427a7b624a3a670 \
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=5100e20d35f9015f9eef6bdb27ba194f \
file://plugins/sudoers/redblack.c;beginline=1;endline=46;md5=03e35317699ba00b496251e0dfe9f109 \
file://lib/util/reallocarray.c;beginline=3;endline=15;md5=397dd45c7683e90b9f8bf24638cf03bf \
file://lib/util/fnmatch.c;beginline=3;endline=27;md5=004d7d2866ba1f5b41174906849d2e0f \
- file://lib/util/getcwd.c;beginline=2;endline=27;md5=50f8d9667750e18dea4e84a935c12009 \
file://lib/util/glob.c;beginline=2;endline=31;md5=2852f68687544e3eb8a0a61665506f0e \
file://lib/util/snprintf.c;beginline=3;endline=33;md5=b70df6179969e38fcf68da91b53b8029 \
file://include/sudo_queue.h;beginline=2;endline=27;md5=ad578e9664d17a010b63e4bc0576ee8d \
@@ -23,18 +22,20 @@ inherit autotools
PACKAGECONFIG ??= ""
PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib"
PACKAGECONFIG[pam-wheel] = ",,,pam-plugin-wheel"
+PACKAGECONFIG[audit] = "--with-linux-audit,--without-linux-audit,audit"
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
EXTRA_OECONF = "--with-editor=${base_bindir}/vi --with-env-editor"
EXTRA_OECONF:append:libc-musl = " --disable-hardening "
do_compile:prepend () {
- # Remove build host references from sudo_usage.h
+ # Remove build host references from config.h
sed -i \
-e 's,--with-libtool-sysroot=${STAGING_DIR_TARGET},,g' \
-e 's,--build=${BUILD_SYS},,g' \
-e 's,--host=${HOST_SYS},,g' \
- ${B}/src/sudo_usage.h
+ ${B}/config.h
}
# Explicitly create ${localstatedir}/lib before do_install to ensure
diff --git a/meta/recipes-extended/sudo/sudo_1.9.10.bb b/meta/recipes-extended/sudo/sudo_1.9.10.bb
deleted file mode 100644
index aa0d814ed7..0000000000
--- a/meta/recipes-extended/sudo/sudo_1.9.10.bb
+++ /dev/null
@@ -1,62 +0,0 @@
-require sudo.inc
-
-SRC_URI = "https://www.sudo.ws/dist/sudo-${PV}.tar.gz \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
- file://0001-sudo.conf.in-fix-conflict-with-multilib.patch \
- file://0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch \
- "
-
-PAM_SRC_URI = "file://sudo.pam"
-
-SRC_URI[sha256sum] = "44a1461098e7c7b8e6ac597499c24fb2e43748c0c139a8b4944e57d1349a64f4"
-
-DEPENDS += " virtual/crypt ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
-RDEPENDS:${PN} += " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-limits pam-plugin-keyinit', '', d)}"
-
-CACHED_CONFIGUREVARS = " \
- ac_cv_type_rsize_t=no \
- ac_cv_path_MVPROG=${base_bindir}/mv \
- ac_cv_path_BSHELLPROG=${base_bindir}/sh \
- ac_cv_path_SENDMAILPROG=${sbindir}/sendmail \
- ac_cv_path_VIPROG=${base_bindir}/vi \
- "
-
-EXTRA_OECONF += " \
- ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--enable-tmpfiles.d=${nonarch_libdir}/tmpfiles.d', '--disable-tmpfiles.d', d)} \
- --with-rundir=/run/sudo \
- --with-vardir=/var/lib/sudo \
- --libexecdir=${libdir} \
- "
-
-do_install:append () {
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
- install -D -m 644 ${WORKDIR}/sudo.pam ${D}/${sysconfdir}/pam.d/sudo
- if ${@bb.utils.contains('PACKAGECONFIG', 'pam-wheel', 'true', 'false', d)} ; then
- echo 'auth required pam_wheel.so use_uid' >>${D}${sysconfdir}/pam.d/sudo
- sed -i 's/# \(%wheel ALL=(ALL) ALL\)/\1/' ${D}${sysconfdir}/sudoers
- fi
- fi
-
- chmod 4111 ${D}${bindir}/sudo
- chmod 0440 ${D}${sysconfdir}/sudoers
-
- # Explicitly remove the /sudo directory to avoid QA error
- rmdir -p --ignore-fail-on-non-empty ${D}/run/sudo
-}
-
-FILES:${PN}-dev += "${libdir}/${BPN}/lib*${SOLIBSDEV} ${libdir}/${BPN}/*.la \
- ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la"
-
-CONFFILES:${PN}-lib = "${sysconfdir}/sudoers"
-
-SUDO_PACKAGES = "${PN}-sudo\
- ${PN}-lib"
-
-PACKAGE_BEFORE_PN = "${SUDO_PACKAGES}"
-
-RDEPENDS:${PN}-sudo = "${PN}-lib"
-RDEPENDS:${PN} += "${SUDO_PACKAGES}"
-
-FILES:${PN}-sudo = "${bindir}/sudo ${bindir}/sudoedit"
-FILES:${PN}-lib = "${localstatedir} ${libexecdir} ${sysconfdir} ${libdir} ${nonarch_libdir}"
diff --git a/meta/recipes-extended/sudo/sudo_1.9.15p5.bb b/meta/recipes-extended/sudo/sudo_1.9.15p5.bb
new file mode 100644
index 0000000000..8e542015ad
--- /dev/null
+++ b/meta/recipes-extended/sudo/sudo_1.9.15p5.bb
@@ -0,0 +1,61 @@
+require sudo.inc
+
+SRC_URI = "https://www.sudo.ws/dist/sudo-${PV}.tar.gz \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
+ file://0001-sudo.conf.in-fix-conflict-with-multilib.patch \
+ "
+
+PAM_SRC_URI = "file://sudo.pam"
+
+SRC_URI[sha256sum] = "558d10b9a1991fb3b9fa7fa7b07ec4405b7aefb5b3cb0b0871dbc81e3a88e558"
+
+DEPENDS += " virtual/crypt ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
+RDEPENDS:${PN} += " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-limits pam-plugin-keyinit', '', d)}"
+
+CACHED_CONFIGUREVARS = " \
+ ac_cv_type_rsize_t=no \
+ ac_cv_path_MVPROG=${base_bindir}/mv \
+ ac_cv_path_BSHELLPROG=${base_bindir}/sh \
+ ac_cv_path_SENDMAILPROG=${sbindir}/sendmail \
+ ac_cv_path_VIPROG=${base_bindir}/vi \
+ "
+
+EXTRA_OECONF += " \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--enable-tmpfiles.d=${nonarch_libdir}/tmpfiles.d', '--disable-tmpfiles.d', d)} \
+ --with-rundir=/run/sudo \
+ --with-vardir=/var/lib/sudo \
+ --libexecdir=${libdir} \
+ "
+
+do_install:append () {
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
+ install -D -m 644 ${WORKDIR}/sudo.pam ${D}/${sysconfdir}/pam.d/sudo
+ if ${@bb.utils.contains('PACKAGECONFIG', 'pam-wheel', 'true', 'false', d)} ; then
+ echo 'auth required pam_wheel.so use_uid' >>${D}${sysconfdir}/pam.d/sudo
+ sed -i 's/# \(%wheel ALL=(ALL) ALL\)/\1/' ${D}${sysconfdir}/sudoers
+ fi
+ fi
+
+ chmod 4111 ${D}${bindir}/sudo
+ chmod 0440 ${D}${sysconfdir}/sudoers
+
+ # Explicitly remove the /sudo directory to avoid QA error
+ rmdir -p --ignore-fail-on-non-empty ${D}/run/sudo
+}
+
+FILES:${PN}-dev += "${libdir}/${BPN}/lib*${SOLIBSDEV} ${libdir}/${BPN}/*.la \
+ ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la"
+
+CONFFILES:${PN}-lib = "${sysconfdir}/sudoers"
+
+SUDO_PACKAGES = "${PN}-sudo\
+ ${PN}-lib"
+
+PACKAGE_BEFORE_PN = "${SUDO_PACKAGES}"
+
+RDEPENDS:${PN}-sudo = "${PN}-lib"
+RDEPENDS:${PN} += "${SUDO_PACKAGES}"
+
+FILES:${PN}-sudo = "${bindir}/sudo ${bindir}/sudoedit"
+FILES:${PN}-lib = "${localstatedir} ${libexecdir} ${sysconfdir} ${libdir} ${nonarch_libdir}"
diff --git a/meta/recipes-extended/sysklogd/sysklogd_2.3.0.bb b/meta/recipes-extended/sysklogd/sysklogd_2.3.0.bb
deleted file mode 100644
index 7043f3d391..0000000000
--- a/meta/recipes-extended/sysklogd/sysklogd_2.3.0.bb
+++ /dev/null
@@ -1,56 +0,0 @@
-SUMMARY = "System Log Daemons"
-DESCRIPTION = "The sysklogd package implements system log daemons: syslogd"
-HOMEPAGE = "http://www.infodrom.org/projects/sysklogd/"
-SECTION = "base"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5b4be4b2549338526758ef479c040943 \
- file://src/syslogd.c;beginline=2;endline=15;md5=a880fecbc04503f071c494a9c0dd4f97 \
- "
-
-inherit update-rc.d update-alternatives systemd autotools
-
-SRC_URI = "git://github.com/troglobit/sysklogd.git;branch=master;protocol=https \
- file://sysklogd \
- "
-
-SRCREV = "03c2c9c68d5d02675326527774e7e9cba3490ba0"
-
-S = "${WORKDIR}/git"
-
-EXTRA_OECONF = "--with-systemd=${systemd_system_unitdir} --without-logger"
-
-do_install:append () {
- install -d ${D}${sysconfdir}
- install -m 644 ${S}/syslog.conf ${D}${sysconfdir}/syslog.conf
- install -d ${D}${sysconfdir}/init.d
- install -m 755 ${WORKDIR}/sysklogd ${D}${sysconfdir}/init.d/syslog
-}
-
-SYSTEMD_PACKAGES = "${PN}"
-SYSTEMD_SERVICE:${PN} = "syslogd.service"
-SYSTEMD_AUTO_ENABLE = "enable"
-
-INITSCRIPT_NAME = "syslog"
-CONFFILES:${PN} = "${sysconfdir}/syslog.conf"
-RCONFLICTS:${PN} = "rsyslog busybox-syslog syslog-ng"
-
-FILES:${PN} += "${@bb.utils.contains('DISTRO_FEATURES','systemd','${exec_prefix}/lib/tmpfiles.d/sysklogd.conf', '', d)}"
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN}-doc = "syslogd.8"
-ALTERNATIVE_LINK_NAME[syslogd.8] = "${mandir}/man8/syslogd.8"
-
-pkg_prerm:${PN} () {
- if test "x$D" = "x"; then
- if test "$1" = "upgrade" -o "$1" = "remove"; then
- /etc/init.d/syslog stop || :
- fi
- fi
-}
-
-python () {
- if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
- d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
-}
diff --git a/meta/recipes-extended/sysklogd/sysklogd_2.5.2.bb b/meta/recipes-extended/sysklogd/sysklogd_2.5.2.bb
new file mode 100644
index 0000000000..c9c9055142
--- /dev/null
+++ b/meta/recipes-extended/sysklogd/sysklogd_2.5.2.bb
@@ -0,0 +1,56 @@
+SUMMARY = "System Log Daemons"
+DESCRIPTION = "The sysklogd package implements system log daemons: syslogd"
+HOMEPAGE = "http://www.infodrom.org/projects/sysklogd/"
+SECTION = "base"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=5b4be4b2549338526758ef479c040943 \
+ file://src/syslogd.c;beginline=2;endline=15;md5=a880fecbc04503f071c494a9c0dd4f97 \
+ "
+
+inherit update-rc.d update-alternatives systemd autotools
+
+SRC_URI = "git://github.com/troglobit/sysklogd.git;branch=master;protocol=https \
+ file://sysklogd \
+ "
+
+SRCREV = "3332c550f1a68393daec5d64cd81f3b7674c0af5"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OECONF = "--with-systemd=${systemd_system_unitdir} --without-logger"
+
+do_install:append () {
+ install -d ${D}${sysconfdir}
+ install -m 644 ${S}/syslog.conf ${D}${sysconfdir}/syslog.conf
+ install -d ${D}${sysconfdir}/init.d
+ install -m 755 ${WORKDIR}/sysklogd ${D}${sysconfdir}/init.d/syslog
+}
+
+SYSTEMD_PACKAGES = "${PN}"
+SYSTEMD_SERVICE:${PN} = "syslogd.service"
+SYSTEMD_AUTO_ENABLE = "enable"
+
+INITSCRIPT_NAME = "syslog"
+CONFFILES:${PN} = "${sysconfdir}/syslog.conf"
+RCONFLICTS:${PN} = "rsyslog busybox-syslog syslog-ng"
+
+FILES:${PN} += "${@bb.utils.contains('DISTRO_FEATURES','systemd','${exec_prefix}/lib/tmpfiles.d/sysklogd.conf', '', d)}"
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN}-doc = "syslogd.8"
+ALTERNATIVE_LINK_NAME[syslogd.8] = "${mandir}/man8/syslogd.8"
+
+pkg_prerm:${PN} () {
+ if test "x$D" = "x"; then
+ if test "$1" = "upgrade" -o "$1" = "remove"; then
+ /etc/init.d/syslog stop || :
+ fi
+ fi
+}
+
+python () {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+}
diff --git a/meta/recipes-extended/sysstat/sysstat.inc b/meta/recipes-extended/sysstat/sysstat.inc
deleted file mode 100644
index 77337722ea..0000000000
--- a/meta/recipes-extended/sysstat/sysstat.inc
+++ /dev/null
@@ -1,71 +0,0 @@
-SUMMARY = "System performance tools"
-DESCRIPTION = "The sysstat utilities are a collection of performance monitoring tools for Linux."
-HOMEPAGE = "http://sebastien.godard.pagesperso-orange.fr/"
-LICENSE = "GPL-2.0-or-later"
-SECTION = "console/utils"
-
-SRC_URI = "http://pagesperso-orange.fr/sebastien.godard/${BP}.tar.xz \
- file://99_sysstat \
- file://sysstat.service \
- "
-
-UPSTREAM_CHECK_URI = "http://sebastien.godard.pagesperso-orange.fr/download.html"
-
-DEPENDS += "base-passwd"
-
-# autotools-brokensep as this package doesn't use automake
-inherit autotools-brokensep gettext systemd upstream-version-is-even
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
-PACKAGECONFIG[lm-sensors] = "--enable-sensors,--disable-sensors,lmsensors,lmsensors-libsensors"
-PACKAGECONFIG[cron] = "--enable-install-cron --enable-copy-only,--disable-install-cron --disable-copy-only"
-PACKAGECONFIG[systemd] = "--with-systemdsystemunitdir=${systemd_system_unitdir}"
-
-EXTRA_OECONF += "--disable-stripping"
-
-SYSTEMD_PACKAGES = "${PN}"
-SYSTEMD_SERVICE:${PN} = "sysstat.service"
-SYSTEMD_AUTO_ENABLE = "enable"
-
-do_configure:prepend() {
- export sa_lib_dir=${libexecdir}/sa
-}
-
-do_install() {
- autotools_do_install
-
- # Don't version the documentation
- mv ${D}${docdir}/${BP} ${D}${docdir}/${BPN}
-
- # don't install /var/log/sa when populating rootfs. Do it through volatile
- rm -rf ${D}/var
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- install -d ${D}/etc/default/volatiles
- install -m 0644 ${WORKDIR}/99_sysstat ${D}/etc/default/volatiles
- fi
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -d ${D}${sysconfdir}/tmpfiles.d
- echo "d ${localstatedir}/log/sa - - - -" \
- > ${D}${sysconfdir}/tmpfiles.d/sysstat.conf
-
- # Unless both cron and systemd are enabled, install our own
- # systemd unit file. Otherwise the package will install one.
- if ${@bb.utils.contains('PACKAGECONFIG', 'cron systemd', 'false', 'true', d)}; then
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/sysstat.service ${D}${systemd_system_unitdir}
- sed -i -e 's#@LIBEXECDIR@#${libexecdir}#g' ${D}${systemd_system_unitdir}/sysstat.service
- fi
- fi
-}
-
-pkg_postinst:${PN} () {
- if [ ! -n "$D" ]; then
- if [ -e /etc/init.d/populate-volatile.sh ]; then
- /etc/init.d/populate-volatile.sh update
- fi
- fi
-}
-
-FILES:${PN} += "${systemd_system_unitdir} ${nonarch_base_libdir}/systemd"
-
-TARGET_CC_ARCH += "${LDFLAGS}"
diff --git a/meta/recipes-extended/sysstat/sysstat/0001-configure.in-remove-check-for-chkconfig.patch b/meta/recipes-extended/sysstat/sysstat/0001-configure.in-remove-check-for-chkconfig.patch
index 4067bb983b..84383f955f 100644
--- a/meta/recipes-extended/sysstat/sysstat/0001-configure.in-remove-check-for-chkconfig.patch
+++ b/meta/recipes-extended/sysstat/sysstat/0001-configure.in-remove-check-for-chkconfig.patch
@@ -1,7 +1,7 @@
From 1590cc614aaf0fb81cd804414d6c9d5a9227352c Mon Sep 17 00:00:00 2001
From: Wenlin Kang <wenlin.kang@windriver.com>
Date: Tue, 5 Nov 2019 16:16:44 +0800
-Subject: [PATCH] configure.in: remove check for chkconfig
+Subject: [PATCH] configure.ac: remove check for chkconfig
chkconfig can't work on cross-platform, so should remove check for it.
@@ -9,13 +9,13 @@ Upstream-Status: Inappropriate [ embedded specific ]
Signed-off-by: Wenlin Kang <wenlin.kang@windriver.com>
---
- configure.in | 3 ++-
+ configure.ac | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
-diff --git a/configure.in b/configure.in
+diff --git a/configure.ac b/configure.ac
index 48b9a31..cedeb43 100644
---- a/configure.in
-+++ b/configure.in
+--- a/configure.ac
++++ b/configure.ac
@@ -42,7 +42,8 @@ AC_SUBST(VER_JSON)
AC_SUBST(VER_XML)
diff --git a/meta/recipes-extended/sysstat/sysstat_12.4.5.bb b/meta/recipes-extended/sysstat/sysstat_12.4.5.bb
deleted file mode 100644
index fe3db4d8a5..0000000000
--- a/meta/recipes-extended/sysstat/sysstat_12.4.5.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-require sysstat.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=a23a74b3f4caf9616230789d94217acb"
-
-SRC_URI += "file://0001-configure.in-remove-check-for-chkconfig.patch"
-
-SRC_URI[sha256sum] = "ef445acea301bbb996e410842f6290a8d049e884d4868cfef7e85dc04b7eee5b"
diff --git a/meta/recipes-extended/sysstat/sysstat_12.7.5.bb b/meta/recipes-extended/sysstat/sysstat_12.7.5.bb
new file mode 100644
index 0000000000..150f4932d8
--- /dev/null
+++ b/meta/recipes-extended/sysstat/sysstat_12.7.5.bb
@@ -0,0 +1,80 @@
+SUMMARY = "System performance tools"
+DESCRIPTION = "The sysstat utilities are a collection of performance monitoring tools for Linux."
+HOMEPAGE = "https://sysstat.github.io/"
+LICENSE = "GPL-2.0-or-later"
+SECTION = "console/utils"
+
+SRC_URI = "git://github.com/sysstat/sysstat.git;protocol=https;branch=master \
+ file://99_sysstat \
+ file://sysstat.service \
+ file://0001-configure.in-remove-check-for-chkconfig.patch \
+ "
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=a23a74b3f4caf9616230789d94217acb"
+
+SRCREV = "2d7682f26f42cef9127b123e319349b330c4ab8f"
+S = "${WORKDIR}/git"
+
+DEPENDS += "base-passwd"
+
+# autotools-brokensep as this package doesn't use automake
+inherit autotools-brokensep gettext systemd
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
+PACKAGECONFIG[lm-sensors] = "--enable-sensors,--disable-sensors,lmsensors,lmsensors-libsensors"
+PACKAGECONFIG[cron] = "--enable-install-cron --enable-copy-only,--disable-install-cron --disable-copy-only"
+PACKAGECONFIG[systemd] = "--with-systemdsystemunitdir=${systemd_system_unitdir}"
+
+EXTRA_OECONF += "--disable-stripping"
+
+SYSTEMD_PACKAGES = "${PN}"
+SYSTEMD_SERVICE:${PN} = "sysstat.service"
+SYSTEMD_AUTO_ENABLE = "enable"
+
+do_configure:prepend() {
+ export sa_lib_dir=${libexecdir}/sa
+}
+
+do_install() {
+ autotools_do_install
+
+ # Don't version the documentation
+ mv ${D}${docdir}/${BP} ${D}${docdir}/${BPN}
+
+ # don't install /var/log/sa when populating rootfs. Do it through volatile
+ rm -rf ${D}/var
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ install -d ${D}/etc/default/volatiles
+ install -m 0644 ${WORKDIR}/99_sysstat ${D}/etc/default/volatiles
+ fi
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -d ${D}${nonarch_libdir}/tmpfiles.d
+ echo "d ${localstatedir}/log/sa - - - -" \
+ > ${D}${nonarch_libdir}/tmpfiles.d/sysstat.conf
+
+ # Unless both cron and systemd are enabled, install our own
+ # systemd unit file. Otherwise the package will install one.
+ if ${@bb.utils.contains('PACKAGECONFIG', 'cron systemd', 'false', 'true', d)}; then
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/sysstat.service ${D}${systemd_system_unitdir}
+ sed -i -e 's#@LIBEXECDIR@#${libexecdir}#g' ${D}${systemd_system_unitdir}/sysstat.service
+ fi
+ fi
+}
+
+pkg_postinst:${PN} () {
+ if [ ! -n "$D" ]; then
+ if [ -e /etc/init.d/populate-volatile.sh ]; then
+ /etc/init.d/populate-volatile.sh update
+ fi
+ fi
+}
+
+FILES:${PN} += " \
+ ${systemd_system_unitdir} \
+ ${nonarch_base_libdir}/systemd \
+ ${nonarch_libdir}/tmpfiles.d \
+"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
diff --git a/meta/recipes-extended/tar/tar/0001-tests-fix-TESTSUITE_AT.patch b/meta/recipes-extended/tar/tar/0001-tests-fix-TESTSUITE_AT.patch
new file mode 100644
index 0000000000..27d4d9aebf
--- /dev/null
+++ b/meta/recipes-extended/tar/tar/0001-tests-fix-TESTSUITE_AT.patch
@@ -0,0 +1,228 @@
+From 39849e9d91f477d3fb839f93cd0815d0cb3273e9 Mon Sep 17 00:00:00 2001
+From: Paul Eggert <eggert@cs.ucla.edu>
+Date: Tue, 18 Jul 2023 09:15:03 -0700
+Subject: tests: fix TESTSUITE_AT
+
+Problem reported by Lukas Javorsky <ljavorsk@redhat.com> in:
+https://lists.gnu.org/r/bug-tar/2023-07/msg00002.html
+* tests/Makefile.am (TESTSUITE_AT): Add exclude17.at, exclude18.at.
+Remove compress.m4; all uses changed. Add a comment saying how
+to rederive this. Sort.
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/tar.git/commit/?id=39849e9d91f477d3fb839f93cd0815d0cb3273e9]
+---
+ tests/Makefile.am | 93 ++++++++++++++++++++++++++++---------------------------
+ 1 file changed, 48 insertions(+), 45 deletions(-)
+
+diff --git a/tests/Makefile.am b/tests/Makefile.am
+index 4a8f501..1884b72 100644
+--- a/tests/Makefile.am
++++ b/tests/Makefile.am
+@@ -45,21 +45,24 @@ $(srcdir)/package.m4: $(top_srcdir)/configure.ac
+ ## Test suite. ##
+ ## ------------ ##
+
++# You can generate the body of this macro with the following shell command:
++# LC_ALL=C ls *.at */*.at | sed -e 's/^/ /' -e '$!s/$/\\/'
+ TESTSUITE_AT = \
+- testsuite.at\
+- compress.m4\
+ T-cd.at\
+ T-dir00.at\
+ T-dir01.at\
+ T-empty.at\
++ T-mult.at\
++ T-nest.at\
++ T-nonl.at\
+ T-null.at\
+ T-null2.at\
+ T-rec.at\
+ T-recurse.at\
+ T-zfile.at\
+- T-nonl.at\
+- T-mult.at\
+- T-nest.at\
++ acls01.at\
++ acls02.at\
++ acls03.at\
+ add-file.at\
+ append.at\
+ append01.at\
+@@ -68,14 +71,15 @@ TESTSUITE_AT = \
+ append04.at\
+ append05.at\
+ backup01.at\
+- chtype.at\
+- comprec.at\
+- comperr.at\
++ capabs_raw01.at\
+ checkpoint/defaults.at\
+- checkpoint/interval.at\
+- checkpoint/dot.at\
+ checkpoint/dot-compat.at\
+ checkpoint/dot-int.at\
++ checkpoint/dot.at\
++ checkpoint/interval.at\
++ chtype.at\
++ comperr.at\
++ comprec.at\
+ delete01.at\
+ delete02.at\
+ delete03.at\
+@@ -83,6 +87,8 @@ TESTSUITE_AT = \
+ delete05.at\
+ delete06.at\
+ difflink.at\
++ dirrem01.at\
++ dirrem02.at\
+ exclude.at\
+ exclude01.at\
+ exclude02.at\
+@@ -100,6 +106,8 @@ TESTSUITE_AT = \
+ exclude14.at\
+ exclude15.at\
+ exclude16.at\
++ exclude17.at\
++ exclude18.at\
+ extrac01.at\
+ extrac02.at\
+ extrac03.at\
+@@ -127,11 +135,9 @@ TESTSUITE_AT = \
+ extrac25.at\
+ filerem01.at\
+ filerem02.at\
+- dirrem01.at\
+- dirrem02.at\
+- gzip.at\
+ grow.at\
+- incremental.at\
++ gzip.at\
++ ignfail.at\
+ incr01.at\
+ incr02.at\
+ incr03.at\
+@@ -143,8 +149,8 @@ TESTSUITE_AT = \
+ incr09.at\
+ incr10.at\
+ incr11.at\
++ incremental.at\
+ indexfile.at\
+- ignfail.at\
+ label01.at\
+ label02.at\
+ label03.at\
+@@ -188,22 +194,16 @@ TESTSUITE_AT = \
+ opcomp04.at\
+ opcomp05.at\
+ opcomp06.at\
+- positional01.at\
+- positional02.at\
+- positional03.at\
+ options.at\
+ options02.at\
+ options03.at\
+ owner.at\
+ pipe.at\
+- recurse.at\
++ positional01.at\
++ positional02.at\
++ positional03.at\
+ recurs02.at\
+- rename01.at\
+- rename02.at\
+- rename03.at\
+- rename04.at\
+- rename05.at\
+- rename06.at\
++ recurse.at\
+ remfiles01.at\
+ remfiles02.at\
+ remfiles03.at\
+@@ -226,11 +226,19 @@ TESTSUITE_AT = \
+ remfiles09b.at\
+ remfiles09c.at\
+ remfiles10.at\
++ rename01.at\
++ rename02.at\
++ rename03.at\
++ rename04.at\
++ rename05.at\
++ rename06.at\
+ same-order01.at\
+ same-order02.at\
++ selacl01.at\
++ selnx01.at\
+ shortfile.at\
+- shortupd.at\
+ shortrec.at\
++ shortupd.at\
+ sigpipe.at\
+ sparse01.at\
+ sparse02.at\
+@@ -247,6 +255,13 @@ TESTSUITE_AT = \
+ sptrcreat.at\
+ sptrdiff00.at\
+ sptrdiff01.at\
++ star/gtarfail.at\
++ star/gtarfail2.at\
++ star/multi-fail.at\
++ star/pax-big-10g.at\
++ star/ustar-big-2g.at\
++ star/ustar-big-8g.at\
++ testsuite.at\
+ time01.at\
+ time02.at\
+ truncate.at\
+@@ -255,21 +270,11 @@ TESTSUITE_AT = \
+ update02.at\
+ update03.at\
+ update04.at\
+- volsize.at\
+- volume.at\
+ verbose.at\
+ verify.at\
+ version.at\
+- xform-h.at\
+- xform01.at\
+- xform02.at\
+- xform03.at\
+- star/gtarfail.at\
+- star/gtarfail2.at\
+- star/multi-fail.at\
+- star/ustar-big-2g.at\
+- star/ustar-big-8g.at\
+- star/pax-big-10g.at\
++ volsize.at\
++ volume.at\
+ xattr01.at\
+ xattr02.at\
+ xattr03.at\
+@@ -278,12 +283,10 @@ TESTSUITE_AT = \
+ xattr06.at\
+ xattr07.at\
+ xattr08.at\
+- acls01.at\
+- acls02.at\
+- acls03.at\
+- selnx01.at\
+- selacl01.at\
+- capabs_raw01.at
++ xform-h.at\
++ xform01.at\
++ xform02.at\
++ xform03.at
+
+ distclean-local:
+ -rm -rf download
+@@ -291,7 +294,7 @@ distclean-local:
+ TESTSUITE = $(srcdir)/testsuite
+
+ AUTOTEST = $(AUTOM4TE) --language=autotest
+-$(TESTSUITE): package.m4 $(TESTSUITE_AT)
++$(TESTSUITE): compress.m4 package.m4 $(TESTSUITE_AT)
+ $(AUTOTEST) -I $(srcdir) testsuite.at -o $@.tmp
+ mv $@.tmp $@
+
+--
+cgit v1.1
+
diff --git a/meta/recipes-extended/tar/tar/0002-tests-check-for-recently-fixed-bug.patch b/meta/recipes-extended/tar/tar/0002-tests-check-for-recently-fixed-bug.patch
new file mode 100644
index 0000000000..6cd8c5510f
--- /dev/null
+++ b/meta/recipes-extended/tar/tar/0002-tests-check-for-recently-fixed-bug.patch
@@ -0,0 +1,60 @@
+From 0f0722df45ec520d0dac7c9ad7e69165e9140931 Mon Sep 17 00:00:00 2001
+From: Paul Eggert <eggert@cs.ucla.edu>
+Date: Fri, 7 Oct 2022 15:22:07 -0700
+Subject: tests: check for recently-fixed bug
+
+* tests/exclude17.at: New file.
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/tar.git/commit/?id=0f0722df45ec520d0dac7c9ad7e69165e9140931]
+
+Signed-off-by: Qiu Tingting <qiutt@fujitsu.com>
+
+---
+ tests/exclude17.at | 35 +++++++++++++++++++++++++++++++++++
+ 1 files changed, 35 insertions(+)
+ create mode 100644 tests/exclude17.at
+
+diff --git a/tests/exclude17.at b/tests/exclude17.at
+new file mode 100644
+index 0000000..4162b2b
+--- /dev/null
++++ b/tests/exclude17.at
+@@ -0,0 +1,35 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++#
++# Test suite for GNU tar.
++# Copyright 2013-2022 Free Software Foundation, Inc.
++
++# This file is part of GNU tar.
++
++# GNU tar is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3 of the License, or
++# (at your option) any later version.
++
++# GNU tar is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program. If not, see <http://www.gnu.org/licenses/>.
++
++AT_SETUP([--exclude-vcs-ignores memory allocation])
++AT_KEYWORDS([exclude exclude17])
++
++AT_TAR_CHECK([
++mkdir dir
++cd dir
++echo '*.o' >.cvsignore
++tar -cf - --exclude-vcs-ignores . | tar -tf -
++],
++[0],
++[./
++./.cvsignore
++])
++
++AT_CLEANUP
+--
+cgit v1.1
+
diff --git a/meta/recipes-extended/tar/tar/0003-Exclude-VCS-directory-with-writing-from-an-archive.patch b/meta/recipes-extended/tar/tar/0003-Exclude-VCS-directory-with-writing-from-an-archive.patch
new file mode 100644
index 0000000000..577a9ba997
--- /dev/null
+++ b/meta/recipes-extended/tar/tar/0003-Exclude-VCS-directory-with-writing-from-an-archive.patch
@@ -0,0 +1,112 @@
+From 4f814e0e4c673f86dc65a557f7e55f6b5efd1529 Mon Sep 17 00:00:00 2001
+From: Anton Makrushin <makrusan@gmail.com>
+Date: Mon, 20 Mar 2023 20:05:42 +0530
+Subject: Exclude VCS directory with writing from an archive
+
+See https://savannah.gnu.org/bugs/?62859
+
+Upstream-Status: Backport [https://git.savannah.gnu.org/cgit/tar.git/commit/?id=4f814e0e4c673f86dc65a557f7e55f6b5efd1529]
+
+Signed-off-by: Qiu Tingting <qiutt@fujitsu.com>
+
+---
+ tests/exclude18.at | 87 ++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ 1 files changed, 87 insertions(+)
+ create mode 100644 tests/exclude18.at
+
+diff --git a/tests/exclude18.at b/tests/exclude18.at
+new file mode 100644
+index 0000000..64aaa52
+--- /dev/null
++++ b/tests/exclude18.at
+@@ -0,0 +1,87 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++
++# Test suite for GNU tar.
++# Copyright 2004-2023 Free Software Foundation, Inc.
++
++# This file is part of GNU tar.
++
++# GNU tar is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3 of the License, or
++# (at your option) any later version.
++
++# GNU tar is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program. If not, see <http://www.gnu.org/licenses/>.
++
++# Test --exclude-vcs option with subcommands: EXTRACT, LIST, DIFF.
++# Check VCS directory with files, and empty.
++#
++# Ref: https://savannah.gnu.org/bugs/?62859
++# Wed 03 Aug 2022 04:06:28 PM UTC, original submission: Quote
++# Mohamed Akram <mohdakram>
++# > The --exclude-vcs flag seems to exclude .gitignore but not .git when
++# extracting.
++
++AT_SETUP([--exclude-vcs extract list compare])
++AT_KEYWORDS([exclude-vcs extract list compare exclude18])
++
++AT_TAR_CHECK([
++AT_SORT_PREREQ
++mkdir gitrepo
++cd gitrepo
++
++# Make an empty VCS directory:
++mkdir .svn
++
++# Make a VCS directory with a file:
++mkdir .git
++touch .git/_A
++
++# Make a VCS file:
++touch .gitignore
++
++# Make non-VCS files:
++touch .git_B
++touch _C
++
++# Create an archive, include VCS:
++cd ..
++tar -cf gitrepo.tar gitrepo
++rm -r gitrepo
++
++echo Extract:
++tar -xvf gitrepo.tar --exclude-vcs | sort
++
++echo
++echo List:
++tar -tf gitrepo.tar --exclude-vcs | sort
++
++echo
++echo Diff:
++tar -dvf gitrepo.tar --exclude-vcs gitrepo | sort
++
++],
++[0],
++[Extract:
++gitrepo/
++gitrepo/.git_B
++gitrepo/_C
++
++List:
++gitrepo/
++gitrepo/.git_B
++gitrepo/_C
++
++Diff:
++gitrepo/
++gitrepo/.git_B
++gitrepo/_C
++],
++[])
++
++AT_CLEANUP
+--
+cgit v1.1
+
diff --git a/meta/recipes-extended/tar/tar/run-ptest b/meta/recipes-extended/tar/tar/run-ptest
new file mode 100644
index 0000000000..185b33d61a
--- /dev/null
+++ b/meta/recipes-extended/tar/tar/run-ptest
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# Define tar test work dir
+WORKDIR=@PTEST_PATH@/tests/
+
+# Run test
+cd ${WORKDIR}
+./atconfig ./atlocal ./testsuite
+
+# clear log
+rm -rf testsuite.dir
+rm -rf testsuite.log
+
+./testsuite --am-fmt
diff --git a/meta/recipes-extended/tar/tar_1.34.bb b/meta/recipes-extended/tar/tar_1.34.bb
deleted file mode 100644
index 7307cd57a2..0000000000
--- a/meta/recipes-extended/tar/tar_1.34.bb
+++ /dev/null
@@ -1,68 +0,0 @@
-SUMMARY = "GNU file archiving program"
-DESCRIPTION = "GNU tar saves many files together into a single tape \
-or disk archive, and can restore individual files from the archive."
-HOMEPAGE = "http://www.gnu.org/software/tar/"
-SECTION = "base"
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-SRC_URI = "${GNU_MIRROR}/tar/tar-${PV}.tar.bz2"
-
-SRC_URI[sha256sum] = "b44cc67f8a1f6b0250b7c860e952b37e8ed932a90bd9b1862a511079255646ff"
-
-inherit autotools gettext texinfo
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG:append:class-target = " ${@bb.utils.filter('DISTRO_FEATURES', 'acl', d)}"
-
-PACKAGECONFIG[acl] = "--with-posix-acls,--without-posix-acls,acl"
-PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
-
-EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}"
-
-CACHED_CONFIGUREVARS += "tar_cv_path_RSH=no"
-
-# Let aclocal use the relative path for the m4 file rather than the
-# absolute since tar has a lot of m4 files, otherwise there might
-# be an "Argument list too long" error when it is built in a long/deep
-# directory.
-acpaths = "-I ./m4"
-
-do_install () {
- autotools_do_install
- ln -s tar ${D}${bindir}/gtar
-}
-
-do_install:append:class-target() {
- if [ "${base_bindir}" != "${bindir}" ]; then
- install -d ${D}${base_bindir}
- mv ${D}${bindir}/tar ${D}${base_bindir}/tar
- mv ${D}${bindir}/gtar ${D}${base_bindir}/gtar
- rmdir ${D}${bindir}/
- fi
-}
-
-PACKAGES =+ "${PN}-rmt"
-
-FILES:${PN}-rmt = "${sbindir}/rmt*"
-
-inherit update-alternatives
-
-ALTERNATIVE_PRIORITY = "100"
-
-ALTERNATIVE:${PN} = "tar"
-ALTERNATIVE:${PN}-rmt = "rmt"
-ALTERNATIVE:${PN}:class-nativesdk = ""
-ALTERNATIVE:${PN}-rmt:class-nativesdk = ""
-
-ALTERNATIVE_LINK_NAME[tar] = "${base_bindir}/tar"
-ALTERNATIVE_LINK_NAME[rmt] = "${sbindir}/rmt"
-
-PROVIDES:append:class-native = " tar-replacement-native"
-NATIVE_PACKAGE_PATH_SUFFIX = "/${PN}"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# Avoid false positives from CVEs in node-tar package
-# For example CVE-2021-{32803,32804,37701,37712,37713}
-CVE_PRODUCT = "gnu:tar"
diff --git a/meta/recipes-extended/tar/tar_1.35.bb b/meta/recipes-extended/tar/tar_1.35.bb
new file mode 100644
index 0000000000..c7bd1d195e
--- /dev/null
+++ b/meta/recipes-extended/tar/tar_1.35.bb
@@ -0,0 +1,102 @@
+SUMMARY = "GNU file archiving program"
+DESCRIPTION = "GNU tar saves many files together into a single tape \
+or disk archive, and can restore individual files from the archive."
+HOMEPAGE = "http://www.gnu.org/software/tar/"
+SECTION = "base"
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+SRC_URI = "${GNU_MIRROR}/tar/tar-${PV}.tar.bz2"
+
+SRC_URI[sha256sum] = "7edb8886a3dc69420a1446e1e2d061922b642f1cf632d2cd0f9ee7e690775985"
+
+inherit autotools gettext texinfo
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG:append:class-target = " ${@bb.utils.filter('DISTRO_FEATURES', 'acl', d)}"
+
+PACKAGECONFIG[acl] = "--with-posix-acls,--without-posix-acls,acl"
+PACKAGECONFIG[selinux] = "--with-selinux,--without-selinux,libselinux"
+
+EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}"
+
+CACHED_CONFIGUREVARS += "tar_cv_path_RSH=no"
+
+# Let aclocal use the relative path for the m4 file rather than the
+# absolute since tar has a lot of m4 files, otherwise there might
+# be an "Argument list too long" error when it is built in a long/deep
+# directory.
+acpaths = "-I ./m4"
+
+do_install () {
+ autotools_do_install
+ ln -s tar ${D}${bindir}/gtar
+}
+
+do_install:append:class-target() {
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ install -d ${D}${base_bindir}
+ mv ${D}${bindir}/tar ${D}${base_bindir}/tar
+ mv ${D}${bindir}/gtar ${D}${base_bindir}/gtar
+ rmdir ${D}${bindir}/
+ fi
+}
+
+# add for ptest support
+SRC_URI += " \
+ file://run-ptest \
+ file://0001-tests-fix-TESTSUITE_AT.patch \
+ file://0002-tests-check-for-recently-fixed-bug.patch \
+ file://0003-Exclude-VCS-directory-with-writing-from-an-archive.patch \
+"
+
+inherit ptest
+
+do_compile_ptest() {
+ oe_runmake -C ${B}/gnu/ check
+ oe_runmake -C ${B}/lib/ check
+ oe_runmake -C ${B}/rmt/ check
+ oe_runmake -C ${B}/src/ check
+ rm -rf ${S}/tests/testsuite
+ oe_runmake -C ${B}/tests/ testsuite
+ oe_runmake -C ${B}/tests/ genfile checkseekhole ckmtime
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${B}/tests/atconfig ${D}${PTEST_PATH}/tests/
+ sed -i "/abs_/d" ${D}${PTEST_PATH}/tests/atconfig
+ echo "abs_builddir=${PTEST_PATH}/tests/" >> ${D}${PTEST_PATH}/tests/atconfig
+ install --mode=755 ${B}/tests/atlocal ${D}${PTEST_PATH}/tests/
+ sed -i "/PATH=/d" ${D}${PTEST_PATH}/tests/atlocal
+ install --mode=755 ${B}/tests/genfile ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${B}/tests/checkseekhole ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${B}/tests/ckmtime ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${S}/tests/testsuite ${D}${PTEST_PATH}/tests/
+ sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/run-ptest
+}
+
+PACKAGES =+ "${PN}-rmt"
+
+FILES:${PN}-rmt = "${sbindir}/rmt*"
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE:${PN} = "tar"
+ALTERNATIVE:${PN}-rmt = "rmt"
+ALTERNATIVE:${PN}:class-nativesdk = ""
+ALTERNATIVE:${PN}-rmt:class-nativesdk = ""
+
+ALTERNATIVE_LINK_NAME[tar] = "${base_bindir}/tar"
+ALTERNATIVE_LINK_NAME[rmt] = "${sbindir}/rmt"
+
+PROVIDES:append:class-native = " tar-replacement-native"
+NATIVE_PACKAGE_PATH_SUFFIX = "/${PN}"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Avoid false positives from CVEs in node-tar package
+# For example CVE-2021-{32803,32804,37701,37712,37713}
+CVE_PRODUCT = "gnu:tar"
diff --git a/meta/recipes-extended/tcp-wrappers/tcp-wrappers-7.6/0001-Fix-implicit-function-declaration-warnings.patch b/meta/recipes-extended/tcp-wrappers/tcp-wrappers-7.6/0001-Fix-implicit-function-declaration-warnings.patch
new file mode 100644
index 0000000000..474703885d
--- /dev/null
+++ b/meta/recipes-extended/tcp-wrappers/tcp-wrappers-7.6/0001-Fix-implicit-function-declaration-warnings.patch
@@ -0,0 +1,114 @@
+From 9c97b5db237a793e0d1b6b0241570bdc6e35ee24 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 7 Aug 2022 17:42:24 -0700
+Subject: [PATCH] Fix implicit-function-declaration warnings
+
+These are seen with clang-15+
+
+Upstream-Status: Inappropriate [upstream is dead]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ hosts_access.c | 3 +++
+ safe_finger.c | 1 +
+ shell_cmd.c | 3 +++
+ tcpd.c | 2 +-
+ tcpdchk.c | 1 +
+ workarounds.c | 1 +
+ 6 files changed, 10 insertions(+), 1 deletion(-)
+
+diff --git a/hosts_access.c b/hosts_access.c
+index 0133e5e..58697ea 100644
+--- a/hosts_access.c
++++ b/hosts_access.c
+@@ -33,6 +33,12 @@ static char sccsid[] = "@(#) hosts_access.c 1.21 97/02/12 02:13:22";
+ #endif
+ #include <netinet/in.h>
+ #include <arpa/inet.h>
++#ifdef USE_GETDOMAIN
++/* defined in workarounds.c */
++extern int yp_get_default_domain(char **ptr);
++#else
++# include <rpcsvc/ypclnt.h>
++#endif /* USE_GETDOMAIN */
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <syslog.h>
+@@ -45,6 +46,8 @@ static char sccsid[] = "@(#) hosts_access.c 1.21 97/02/12 02:13:22";
+ #endif
+
+ extern int errno;
++extern int match_pattern_ylo(const char *s, const char *pattern);
++extern unsigned long cidr_mask_addr(char* str);
+
+ #ifndef INADDR_NONE
+ #define INADDR_NONE (-1) /* XXX should be 0xffffffff */
+diff --git a/safe_finger.c b/safe_finger.c
+index 23afab1..a6458fb 100644
+--- a/safe_finger.c
++++ b/safe_finger.c
+@@ -34,6 +34,7 @@ static char sccsid[] = "@(#) safe_finger.c 1.4 94/12/28 17:42:41";
+ #include <syslog.h>
+
+ extern void exit();
++extern int pipe_stdin(char **argv);
+
+ /* Local stuff */
+
+diff --git a/shell_cmd.c b/shell_cmd.c
+index 62d31bc..a566092 100644
+--- a/shell_cmd.c
++++ b/shell_cmd.c
+@@ -16,10 +16,13 @@ static char sccsid[] = "@(#) shell_cmd.c 1.5 94/12/28 17:42:44";
+
+ #include <sys/types.h>
+ #include <sys/param.h>
++#include <sys/wait.h>
++#include <fcntl.h>
+ #include <signal.h>
+ #include <stdio.h>
+ #include <syslog.h>
+ #include <string.h>
++#include <unistd.h>
+
+ extern void exit();
+
+diff --git a/tcpd.c b/tcpd.c
+index dc9ff17..4353caa 100644
+--- a/tcpd.c
++++ b/tcpd.c
+@@ -46,7 +46,7 @@ void fix_options(struct request_info *);
+ int allow_severity = SEVERITY; /* run-time adjustable */
+ int deny_severity = LOG_WARNING; /* ditto */
+
+-main(argc, argv)
++void main(argc, argv)
+ int argc;
+ char **argv;
+ {
+diff --git a/tcpdchk.c b/tcpdchk.c
+index 5dca8bd..67c12ce 100644
+--- a/tcpdchk.c
++++ b/tcpdchk.c
+@@ -38,6 +38,7 @@ static char sccsid[] = "@(#) tcpdchk.c 1.8 97/02/12 02:13:25";
+
+ extern int errno;
+ extern void exit();
++extern unsigned long cidr_mask_addr(char* str);
+ extern int optind;
+ extern char *optarg;
+
+diff --git a/workarounds.c b/workarounds.c
+index b22b378..6335049 100644
+--- a/workarounds.c
++++ b/workarounds.c
+@@ -21,6 +21,7 @@ char sccsid[] = "@(#) workarounds.c 1.6 96/03/19 16:22:25";
+ #include <stdio.h>
+ #include <syslog.h>
+ #include <string.h>
++#include <unistd.h>
+
+ extern int errno;
+
+--
+2.37.1
+
diff --git a/meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb b/meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb
index 814d7fd913..bcd1d6f792 100644
--- a/meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb
+++ b/meta/recipes-extended/tcp-wrappers/tcp-wrappers_7.6.bb
@@ -6,9 +6,6 @@ SECTION = "console/network"
LICENSE = "BSD-1-Clause"
LIC_FILES_CHKSUM = "file://DISCLAIMER;md5=071bd69cb78b18888ea5e3da5c3127fa"
-PR ="r10"
-
-DEPENDS += "libnsl2"
PACKAGES = "${PN}-dbg libwrap libwrap-doc libwrap-dev libwrap-staticdev ${PN} ${PN}-doc"
FILES:libwrap = "${base_libdir}/lib*${SOLIBS}"
@@ -50,6 +47,7 @@ SRC_URI = "http://ftp.porcupine.org/pub/security/tcp_wrappers_${PV}.tar.gz \
file://fix_warnings.patch \
file://fix_warnings2.patch \
file://0001-Remove-fgets-extern-declaration.patch \
+ file://0001-Fix-implicit-function-declaration-warnings.patch \
"
SRC_URI[md5sum] = "e6fa25f71226d090f34de3f6b122fb5a"
diff --git a/meta/recipes-extended/texinfo/texinfo/0001-gnulib-Update.patch b/meta/recipes-extended/texinfo/texinfo/0001-gnulib-Update.patch
deleted file mode 100644
index 470212cabd..0000000000
--- a/meta/recipes-extended/texinfo/texinfo/0001-gnulib-Update.patch
+++ /dev/null
@@ -1,11765 +0,0 @@
-From 4908050c39dbcdcbd59955ea23d692f25f342307 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 7 Jul 2021 13:42:35 -0700
-Subject: [PATCH] gnulib: Update
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- gnulib/lib/Makefile.am | 782 +++++++++++--------
- gnulib/lib/_Noreturn.h | 8 +-
- gnulib/lib/alloca.in.h | 20 +-
- gnulib/lib/arg-nonnull.h | 8 +-
- gnulib/lib/argz.c | 16 +-
- gnulib/lib/argz.in.h | 14 +-
- gnulib/lib/asnprintf.c | 16 +-
- gnulib/lib/asprintf.c | 16 +-
- gnulib/lib/attribute.h | 16 +-
- gnulib/lib/basename-lgpl.c | 14 +-
- gnulib/lib/basename-lgpl.h | 26 +-
- gnulib/lib/btowc.c | 14 +-
- gnulib/lib/c++defs.h | 8 +-
- gnulib/lib/calloc.c | 55 ++
- gnulib/lib/cdefs.h | 26 +-
- gnulib/lib/cloexec.c | 18 +-
- gnulib/lib/cloexec.h | 18 +-
- gnulib/lib/close.c | 14 +-
- gnulib/lib/dup2.c | 14 +-
- gnulib/lib/dynarray.h | 24 +-
- gnulib/lib/errno.in.h | 16 +-
- gnulib/lib/error.c | 14 +-
- gnulib/lib/error.h | 14 +-
- gnulib/lib/exitfail.c | 14 +-
- gnulib/lib/exitfail.h | 14 +-
- gnulib/lib/fcntl.c | 14 +-
- gnulib/lib/fcntl.in.h | 14 +-
- gnulib/lib/fd-hook.c | 16 +-
- gnulib/lib/fd-hook.h | 16 +-
- gnulib/lib/filename.h | 8 +-
- gnulib/lib/float+.h | 16 +-
- gnulib/lib/float.c | 14 +-
- gnulib/lib/float.in.h | 14 +-
- gnulib/lib/free.c | 53 ++
- gnulib/lib/fstat.c | 14 +-
- gnulib/lib/getdtablesize.c | 14 +-
- gnulib/lib/getopt-cdefs.in.h | 21 +-
- gnulib/lib/getopt-core.h | 8 +-
- gnulib/lib/getopt-ext.h | 8 +-
- gnulib/lib/getopt-pfx-core.h | 21 +-
- gnulib/lib/getopt-pfx-ext.h | 21 +-
- gnulib/lib/getopt.c | 8 +-
- gnulib/lib/getopt.in.h | 24 +-
- gnulib/lib/getopt1.c | 8 +-
- gnulib/lib/getopt_int.h | 8 +-
- gnulib/lib/getprogname.c | 18 +-
- gnulib/lib/getprogname.h | 8 +-
- gnulib/lib/gettext.h | 16 +-
- gnulib/lib/glthread/lock.c | 16 +-
- gnulib/lib/glthread/lock.h | 16 +-
- gnulib/lib/glthread/threadlib.c | 16 +-
- gnulib/lib/hard-locale.c | 14 +-
- gnulib/lib/hard-locale.h | 14 +-
- gnulib/lib/ialloc.c | 21 +
- gnulib/lib/ialloc.h | 94 +++
- gnulib/lib/idx.h | 114 +++
- gnulib/lib/intprops.h | 26 +-
- gnulib/lib/inttypes.in.h | 14 +-
- gnulib/lib/iswblank.c | 16 +-
- gnulib/lib/iswdigit.c | 16 +-
- gnulib/lib/iswxdigit.c | 16 +-
- gnulib/lib/itold.c | 14 +-
- gnulib/lib/langinfo.in.h | 16 +-
- gnulib/lib/lc-charset-dispatch.c | 14 +-
- gnulib/lib/lc-charset-dispatch.h | 14 +-
- gnulib/lib/libc-config.h | 25 +-
- gnulib/lib/limits.in.h | 16 +-
- gnulib/lib/localcharset.c | 16 +-
- gnulib/lib/localcharset.h | 16 +-
- gnulib/lib/locale.in.h | 14 +-
- gnulib/lib/localeconv.c | 14 +-
- gnulib/lib/malloc.c | 51 +-
- gnulib/lib/malloc/dynarray-skeleton.c | 8 +-
- gnulib/lib/malloc/dynarray.h | 8 +-
- gnulib/lib/malloc/dynarray_at_failure.c | 8 +-
- gnulib/lib/malloc/dynarray_emplace_enlarge.c | 8 +-
- gnulib/lib/malloc/dynarray_finalize.c | 8 +-
- gnulib/lib/malloc/dynarray_resize.c | 8 +-
- gnulib/lib/malloc/dynarray_resize_clear.c | 8 +-
- gnulib/lib/malloca.c | 24 +-
- gnulib/lib/malloca.h | 21 +-
- gnulib/lib/mbchar.c | 14 +-
- gnulib/lib/mbchar.h | 14 +-
- gnulib/lib/mbiter.c | 18 +
- gnulib/lib/mbiter.h | 14 +-
- gnulib/lib/mbrtowc-impl-utf8.h | 16 +-
- gnulib/lib/mbrtowc-impl.h | 14 +-
- gnulib/lib/mbrtowc.c | 14 +-
- gnulib/lib/mbscasecmp.c | 14 +-
- gnulib/lib/mbschr.c | 14 +-
- gnulib/lib/mbsinit.c | 14 +-
- gnulib/lib/mbslen.c | 14 +-
- gnulib/lib/mbsncasecmp.c | 14 +-
- gnulib/lib/mbsstr.c | 14 +-
- gnulib/lib/mbtowc-impl.h | 14 +-
- gnulib/lib/mbtowc-lock.c | 14 +-
- gnulib/lib/mbtowc-lock.h | 14 +-
- gnulib/lib/mbtowc.c | 14 +-
- gnulib/lib/mbuiter.c | 17 +
- gnulib/lib/mbuiter.h | 14 +-
- gnulib/lib/memchr.c | 24 +-
- gnulib/lib/memchr.valgrind | 14 +-
- gnulib/lib/mempcpy.c | 16 +-
- gnulib/lib/memrchr.c | 14 +-
- gnulib/lib/minmax.h | 60 ++
- gnulib/lib/msvc-inval.c | 16 +-
- gnulib/lib/msvc-inval.h | 16 +-
- gnulib/lib/msvc-nothrow.c | 16 +-
- gnulib/lib/msvc-nothrow.h | 16 +-
- gnulib/lib/nl_langinfo-lock.c | 14 +-
- gnulib/lib/nl_langinfo.c | 14 +-
- gnulib/lib/open.c | 14 +-
- gnulib/lib/pathmax.h | 16 +-
- gnulib/lib/printf-args.c | 16 +-
- gnulib/lib/printf-args.h | 16 +-
- gnulib/lib/printf-parse.c | 27 +-
- gnulib/lib/printf-parse.h | 16 +-
- gnulib/lib/realloc.c | 63 ++
- gnulib/lib/reallocarray.c | 39 +
- gnulib/lib/regcomp.c | 8 +-
- gnulib/lib/regex.c | 8 +-
- gnulib/lib/regex.h | 8 +-
- gnulib/lib/regex_internal.c | 8 +-
- gnulib/lib/regex_internal.h | 8 +-
- gnulib/lib/regexec.c | 20 +-
- gnulib/lib/setlocale-lock.c | 14 +-
- gnulib/lib/setlocale_null.c | 14 +-
- gnulib/lib/setlocale_null.h | 14 +-
- gnulib/lib/size_max.h | 16 +-
- gnulib/lib/stat-time.c | 18 +
- gnulib/lib/stat-time.h | 14 +-
- gnulib/lib/stat-w32.c | 14 +-
- gnulib/lib/stat-w32.h | 14 +-
- gnulib/lib/stat.c | 14 +-
- gnulib/lib/stdarg.in.h | 16 +-
- gnulib/lib/stdbool.in.h | 16 +-
- gnulib/lib/stddef.in.h | 16 +-
- gnulib/lib/stdint.in.h | 20 +-
- gnulib/lib/stdio.in.h | 24 +-
- gnulib/lib/stdlib.in.h | 146 +++-
- gnulib/lib/stpcpy.c | 14 +-
- gnulib/lib/str-kmp.h | 26 +-
- gnulib/lib/str-two-way.h | 16 +-
- gnulib/lib/strcasecmp.c | 16 +-
- gnulib/lib/strcasestr.c | 16 +-
- gnulib/lib/strdup.c | 16 +-
- gnulib/lib/streq.h | 16 +-
- gnulib/lib/strerror-override.c | 100 +--
- gnulib/lib/strerror-override.h | 15 +-
- gnulib/lib/strerror.c | 14 +-
- gnulib/lib/string.in.h | 18 +-
- gnulib/lib/strings.in.h | 16 +-
- gnulib/lib/strncasecmp.c | 16 +-
- gnulib/lib/strndup.c | 16 +-
- gnulib/lib/strnlen.c | 16 +-
- gnulib/lib/strnlen1.c | 14 +-
- gnulib/lib/strnlen1.h | 14 +-
- gnulib/lib/strstr.c | 16 +-
- gnulib/lib/sys_stat.in.h | 16 +-
- gnulib/lib/sys_types.in.h | 16 +-
- gnulib/lib/time.in.h | 58 +-
- gnulib/lib/unistd.c | 18 +
- gnulib/lib/unistd.in.h | 29 +-
- gnulib/lib/unitypes.in.h | 16 +-
- gnulib/lib/uniwidth.in.h | 16 +-
- gnulib/lib/uniwidth/cjk.h | 16 +-
- gnulib/lib/uniwidth/width.c | 16 +-
- gnulib/lib/vasnprintf.c | 71 +-
- gnulib/lib/vasnprintf.h | 16 +-
- gnulib/lib/vasprintf.c | 16 +-
- gnulib/lib/verify.h | 14 +-
- gnulib/lib/warn-on-use.h | 8 +-
- gnulib/lib/wchar.in.h | 18 +-
- gnulib/lib/wcrtomb.c | 14 +-
- gnulib/lib/wctype-h.c | 19 +
- gnulib/lib/wctype.in.h | 26 +-
- gnulib/lib/wcwidth.c | 14 +-
- gnulib/lib/windows-initguard.h | 16 +-
- gnulib/lib/windows-mutex.c | 16 +-
- gnulib/lib/windows-mutex.h | 16 +-
- gnulib/lib/windows-once.c | 16 +-
- gnulib/lib/windows-once.h | 16 +-
- gnulib/lib/windows-recmutex.c | 16 +-
- gnulib/lib/windows-recmutex.h | 16 +-
- gnulib/lib/windows-rwlock.c | 16 +-
- gnulib/lib/windows-rwlock.h | 16 +-
- gnulib/lib/xalloc-oversized.h | 53 +-
- gnulib/lib/xalloc.h | 143 +---
- gnulib/lib/xmalloc.c | 293 +++++--
- gnulib/lib/xsize.c | 18 +
- gnulib/lib/xsize.h | 16 +-
- gnulib/m4/calloc.m4 | 82 ++
- gnulib/m4/fcntl_h.m4 | 39 +-
- gnulib/m4/free.m4 | 52 ++
- gnulib/m4/fstat.m4 | 4 +-
- gnulib/m4/gnulib-common.m4 | 84 +-
- gnulib/m4/gnulib-comp.m4 | 192 ++++-
- gnulib/m4/inttypes.m4 | 31 +-
- gnulib/m4/iswdigit.m4 | 6 +-
- gnulib/m4/iswxdigit.m4 | 4 +-
- gnulib/m4/langinfo_h.m4 | 25 +-
- gnulib/m4/largefile.m4 | 28 +-
- gnulib/m4/locale_h.m4 | 37 +-
- gnulib/m4/malloc.m4 | 152 +++-
- gnulib/m4/math_h.m4 | 227 +++---
- gnulib/m4/mbslen.m4 | 4 +-
- gnulib/m4/memchr.m4 | 4 +-
- gnulib/m4/mempcpy.m4 | 4 +-
- gnulib/m4/memrchr.m4 | 4 +-
- gnulib/m4/minmax.m4 | 44 ++
- gnulib/m4/printf.m4 | 5 +-
- gnulib/m4/realloc.m4 | 63 ++
- gnulib/m4/reallocarray.m4 | 23 +
- gnulib/m4/stat.m4 | 4 +-
- gnulib/m4/stddef_h.m4 | 23 +-
- gnulib/m4/stdint.m4 | 6 +-
- gnulib/m4/stdio_h.m4 | 168 ++--
- gnulib/m4/stdlib_h.m4 | 122 +--
- gnulib/m4/stpcpy.m4 | 4 +-
- gnulib/m4/strcase.m4 | 6 +-
- gnulib/m4/strcasestr.m4 | 4 +-
- gnulib/m4/strdup.m4 | 6 +-
- gnulib/m4/strerror.m4 | 4 +-
- gnulib/m4/string_h.m4 | 124 +--
- gnulib/m4/strings_h.m4 | 38 +-
- gnulib/m4/strndup.m4 | 4 +-
- gnulib/m4/strnlen.m4 | 4 +-
- gnulib/m4/strstr.m4 | 4 +-
- gnulib/m4/sys_socket_h.m4 | 53 +-
- gnulib/m4/sys_stat_h.m4 | 65 +-
- gnulib/m4/sys_types_h.m4 | 16 +-
- gnulib/m4/time_h.m4 | 62 +-
- gnulib/m4/unistd_h.m4 | 194 ++---
- gnulib/m4/visibility.m4 | 6 +-
- gnulib/m4/wchar_h.m4 | 109 +--
- gnulib/m4/wctype_h.m4 | 39 +-
- gnulib/m4/wint_t.m4 | 10 +-
- gnulib/m4/year2038.m4 | 112 +++
- 238 files changed, 4521 insertions(+), 2636 deletions(-)
- create mode 100644 gnulib/lib/calloc.c
- create mode 100644 gnulib/lib/free.c
- create mode 100644 gnulib/lib/ialloc.c
- create mode 100644 gnulib/lib/ialloc.h
- create mode 100644 gnulib/lib/idx.h
- create mode 100644 gnulib/lib/minmax.h
- create mode 100644 gnulib/lib/realloc.c
- create mode 100644 gnulib/lib/reallocarray.c
- create mode 100644 gnulib/m4/calloc.m4
- create mode 100644 gnulib/m4/free.m4
- create mode 100644 gnulib/m4/minmax.m4
- create mode 100644 gnulib/m4/realloc.m4
- create mode 100644 gnulib/m4/reallocarray.m4
- create mode 100644 gnulib/m4/year2038.m4
-
---- a/gnulib/lib/Makefile.am
-+++ b/gnulib/lib/Makefile.am
-@@ -165,6 +165,24 @@ EXTRA_libgnu_a_SOURCES += btowc.c
-
- ## end gnulib module btowc
-
-+## begin gnulib module calloc-gnu
-+
-+
-+EXTRA_DIST += calloc.c
-+
-+EXTRA_libgnu_a_SOURCES += calloc.c
-+
-+## end gnulib module calloc-gnu
-+
-+## begin gnulib module calloc-posix
-+
-+
-+EXTRA_DIST += calloc.c
-+
-+EXTRA_libgnu_a_SOURCES += calloc.c
-+
-+## end gnulib module calloc-posix
-+
- ## begin gnulib module cloexec
-
- if gl_GNULIB_ENABLED_cloexec
-@@ -200,6 +218,32 @@ EXTRA_libgnu_a_SOURCES += dup2.c
- ## begin gnulib module dynarray
-
- if gl_GNULIB_ENABLED_dynarray
-+BUILT_SOURCES += malloc/dynarray.gl.h malloc/dynarray-skeleton.gl.h
-+
-+malloc/dynarray.gl.h: malloc/dynarray.h
-+ $(AM_V_at)$(MKDIR_P) malloc
-+ $(AM_V_GEN)rm -f $@-t $@ && \
-+ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
-+ sed -e '/libc_hidden_proto/d' < $(srcdir)/malloc/dynarray.h; \
-+ } > $@-t && \
-+ mv $@-t $@
-+MOSTLYCLEANFILES += malloc/dynarray.gl.h malloc/dynarray.gl.h-t
-+
-+malloc/dynarray-skeleton.gl.h: malloc/dynarray-skeleton.c
-+ $(AM_V_at)$(MKDIR_P) malloc
-+ $(AM_V_GEN)rm -f $@-t $@ && \
-+ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
-+ sed -e 's|<malloc/dynarray\.h>|<malloc/dynarray.gl.h>|g' \
-+ -e 's|__attribute_maybe_unused__|_GL_ATTRIBUTE_MAYBE_UNUSED|g' \
-+ -e 's|__attribute_nonnull__|_GL_ATTRIBUTE_NONNULL|g' \
-+ -e 's|__attribute_warn_unused_result__|_GL_ATTRIBUTE_NODISCARD|g' \
-+ -e 's|__glibc_likely|_GL_LIKELY|g' \
-+ -e 's|__glibc_unlikely|_GL_UNLIKELY|g' \
-+ < $(srcdir)/malloc/dynarray-skeleton.c; \
-+ } > $@-t && \
-+ mv $@-t $@
-+MOSTLYCLEANFILES += malloc/dynarray-skeleton.gl.h malloc/dynarray-skeleton.gl.h-t
-+
- libgnu_a_SOURCES += malloc/dynarray_at_failure.c malloc/dynarray_emplace_enlarge.c malloc/dynarray_finalize.c malloc/dynarray_resize.c malloc/dynarray_resize_clear.c
-
- endif
-@@ -285,13 +329,13 @@ fcntl.h: fcntl.in.h $(top_builddir)/conf
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_FCNTL_H''@|$(NEXT_FCNTL_H)|g' \
-- -e 's/@''GNULIB_CREAT''@/$(GNULIB_CREAT)/g' \
-- -e 's/@''GNULIB_FCNTL''@/$(GNULIB_FCNTL)/g' \
-- -e 's/@''GNULIB_NONBLOCKING''@/$(GNULIB_NONBLOCKING)/g' \
-- -e 's/@''GNULIB_OPEN''@/$(GNULIB_OPEN)/g' \
-- -e 's/@''GNULIB_OPENAT''@/$(GNULIB_OPENAT)/g' \
-- -e 's/@''GNULIB_MDA_CREAT''@/$(GNULIB_MDA_CREAT)/g' \
-- -e 's/@''GNULIB_MDA_OPEN''@/$(GNULIB_MDA_OPEN)/g' \
-+ -e 's/@''GNULIB_CREAT''@/$(GL_GNULIB_CREAT)/g' \
-+ -e 's/@''GNULIB_FCNTL''@/$(GL_GNULIB_FCNTL)/g' \
-+ -e 's/@''GNULIB_NONBLOCKING''@/$(GL_GNULIB_NONBLOCKING)/g' \
-+ -e 's/@''GNULIB_OPEN''@/$(GL_GNULIB_OPEN)/g' \
-+ -e 's/@''GNULIB_OPENAT''@/$(GL_GNULIB_OPENAT)/g' \
-+ -e 's/@''GNULIB_MDA_CREAT''@/$(GL_GNULIB_MDA_CREAT)/g' \
-+ -e 's/@''GNULIB_MDA_OPEN''@/$(GL_GNULIB_MDA_OPEN)/g' \
- -e 's|@''HAVE_FCNTL''@|$(HAVE_FCNTL)|g' \
- -e 's|@''HAVE_OPENAT''@|$(HAVE_OPENAT)|g' \
- -e 's|@''REPLACE_CREAT''@|$(REPLACE_CREAT)|g' \
-@@ -358,6 +402,17 @@ EXTRA_libgnu_a_SOURCES += float.c itold.
-
- ## end gnulib module float
-
-+## begin gnulib module free-posix
-+
-+if gl_GNULIB_ENABLED_ef07dc4b3077c11ea9cef586db4e5955
-+
-+endif
-+EXTRA_DIST += free.c
-+
-+EXTRA_libgnu_a_SOURCES += free.c
-+
-+## end gnulib module free-posix
-+
- ## begin gnulib module fstat
-
- if gl_GNULIB_ENABLED_fstat
-@@ -445,11 +500,23 @@ EXTRA_DIST += $(top_srcdir)/build-aux/co
-
- ## end gnulib module havelib
-
-+## begin gnulib module ialloc
-+
-+libgnu_a_SOURCES += ialloc.c
-+
-+EXTRA_DIST += ialloc.h
-+
-+## end gnulib module ialloc
-+
-+## begin gnulib module idx
-+
-+libgnu_a_SOURCES += idx.h
-+
-+## end gnulib module idx
-+
- ## begin gnulib module intprops
-
--if gl_GNULIB_ENABLED_intprops
-
--endif
- EXTRA_DIST += intprops.h
-
- ## end gnulib module intprops
-@@ -470,10 +537,10 @@ inttypes.h: inttypes.in.h $(top_builddir
- -e 's|@''NEXT_INTTYPES_H''@|$(NEXT_INTTYPES_H)|g' \
- -e 's/@''APPLE_UNIVERSAL_BUILD''@/$(APPLE_UNIVERSAL_BUILD)/g' \
- -e 's/@''PRIPTR_PREFIX''@/$(PRIPTR_PREFIX)/g' \
-- -e 's/@''GNULIB_IMAXABS''@/$(GNULIB_IMAXABS)/g' \
-- -e 's/@''GNULIB_IMAXDIV''@/$(GNULIB_IMAXDIV)/g' \
-- -e 's/@''GNULIB_STRTOIMAX''@/$(GNULIB_STRTOIMAX)/g' \
-- -e 's/@''GNULIB_STRTOUMAX''@/$(GNULIB_STRTOUMAX)/g' \
-+ -e 's/@''GNULIB_IMAXABS''@/$(GL_GNULIB_IMAXABS)/g' \
-+ -e 's/@''GNULIB_IMAXDIV''@/$(GL_GNULIB_IMAXDIV)/g' \
-+ -e 's/@''GNULIB_STRTOIMAX''@/$(GL_GNULIB_STRTOIMAX)/g' \
-+ -e 's/@''GNULIB_STRTOUMAX''@/$(GL_GNULIB_STRTOUMAX)/g' \
- -e 's/@''HAVE_DECL_IMAXABS''@/$(HAVE_DECL_IMAXABS)/g' \
- -e 's/@''HAVE_DECL_IMAXDIV''@/$(HAVE_DECL_IMAXDIV)/g' \
- -e 's/@''HAVE_DECL_STRTOIMAX''@/$(HAVE_DECL_STRTOIMAX)/g' \
-@@ -540,7 +607,7 @@ langinfo.h: langinfo.in.h $(top_builddir
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_LANGINFO_H''@|$(NEXT_LANGINFO_H)|g' \
-- -e 's/@''GNULIB_NL_LANGINFO''@/$(GNULIB_NL_LANGINFO)/g' \
-+ -e 's/@''GNULIB_NL_LANGINFO''@/$(GL_GNULIB_NL_LANGINFO)/g' \
- -e 's|@''HAVE_LANGINFO_CODESET''@|$(HAVE_LANGINFO_CODESET)|g' \
- -e 's|@''HAVE_LANGINFO_T_FMT_AMPM''@|$(HAVE_LANGINFO_T_FMT_AMPM)|g' \
- -e 's|@''HAVE_LANGINFO_ALTMON''@|$(HAVE_LANGINFO_ALTMON)|g' \
-@@ -622,11 +689,11 @@ locale.h: locale.in.h $(top_builddir)/co
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_LOCALE_H''@|$(NEXT_LOCALE_H)|g' \
-- -e 's/@''GNULIB_LOCALECONV''@/$(GNULIB_LOCALECONV)/g' \
-- -e 's/@''GNULIB_SETLOCALE''@/$(GNULIB_SETLOCALE)/g' \
-- -e 's/@''GNULIB_SETLOCALE_NULL''@/$(GNULIB_SETLOCALE_NULL)/g' \
-- -e 's/@''GNULIB_DUPLOCALE''@/$(GNULIB_DUPLOCALE)/g' \
-- -e 's/@''GNULIB_LOCALENAME''@/$(GNULIB_LOCALENAME)/g' \
-+ -e 's/@''GNULIB_LOCALECONV''@/$(GL_GNULIB_LOCALECONV)/g' \
-+ -e 's/@''GNULIB_SETLOCALE''@/$(GL_GNULIB_SETLOCALE)/g' \
-+ -e 's/@''GNULIB_SETLOCALE_NULL''@/$(GL_GNULIB_SETLOCALE_NULL)/g' \
-+ -e 's/@''GNULIB_DUPLOCALE''@/$(GL_GNULIB_DUPLOCALE)/g' \
-+ -e 's/@''GNULIB_LOCALENAME''@/$(GL_GNULIB_LOCALENAME)/g' \
- -e 's|@''HAVE_NEWLOCALE''@|$(HAVE_NEWLOCALE)|g' \
- -e 's|@''HAVE_DUPLOCALE''@|$(HAVE_DUPLOCALE)|g' \
- -e 's|@''HAVE_FREELOCALE''@|$(HAVE_FREELOCALE)|g' \
-@@ -670,11 +737,18 @@ libgnu_a_SOURCES += glthread/lock.h glth
- endif
- ## end gnulib module lock
-
-+## begin gnulib module malloc-gnu
-+
-+
-+EXTRA_DIST += malloc.c
-+
-+EXTRA_libgnu_a_SOURCES += malloc.c
-+
-+## end gnulib module malloc-gnu
-+
- ## begin gnulib module malloc-posix
-
--if gl_GNULIB_ENABLED_ef455225c00f5049c808c2eda3e76866
-
--endif
- EXTRA_DIST += malloc.c
-
- EXTRA_libgnu_a_SOURCES += malloc.c
-@@ -805,6 +879,12 @@ EXTRA_libgnu_a_SOURCES += memrchr.c
-
- ## end gnulib module memrchr
-
-+## begin gnulib module minmax
-+
-+libgnu_a_SOURCES += minmax.h
-+
-+## end gnulib module minmax
-+
- ## begin gnulib module msvc-inval
-
- if gl_GNULIB_ENABLED_f691f076f650964c9f5598c3ee487616
-@@ -856,6 +936,33 @@ EXTRA_DIST += pathmax.h
-
- ## end gnulib module pathmax
-
-+## begin gnulib module realloc-gnu
-+
-+
-+EXTRA_DIST += realloc.c
-+
-+EXTRA_libgnu_a_SOURCES += realloc.c
-+
-+## end gnulib module realloc-gnu
-+
-+## begin gnulib module realloc-posix
-+
-+
-+EXTRA_DIST += realloc.c
-+
-+EXTRA_libgnu_a_SOURCES += realloc.c
-+
-+## end gnulib module realloc-posix
-+
-+## begin gnulib module reallocarray
-+
-+
-+EXTRA_DIST += reallocarray.c
-+
-+EXTRA_libgnu_a_SOURCES += reallocarray.c
-+
-+## end gnulib module reallocarray
-+
- ## begin gnulib module regex
-
-
-@@ -1072,7 +1179,7 @@ stdint.h: stdint.in.h $(top_builddir)/co
- -e 's/@''BITSIZEOF_WINT_T''@/$(BITSIZEOF_WINT_T)/g' \
- -e 's/@''HAVE_SIGNED_WINT_T''@/$(HAVE_SIGNED_WINT_T)/g' \
- -e 's/@''WINT_T_SUFFIX''@/$(WINT_T_SUFFIX)/g' \
-- -e 's/@''GNULIB_OVERRIDES_WINT_T''@/$(GNULIB_OVERRIDES_WINT_T)/g' \
-+ -e 's/@''GNULIBHEADERS_OVERRIDE_WINT_T''@/$(GNULIBHEADERS_OVERRIDE_WINT_T)/g' \
- < $(srcdir)/stdint.in.h; \
- } > $@-t && \
- mv $@-t $@
-@@ -1100,65 +1207,65 @@ stdio.h: stdio.in.h $(top_builddir)/conf
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_STDIO_H''@|$(NEXT_STDIO_H)|g' \
-- -e 's/@''GNULIB_DPRINTF''@/$(GNULIB_DPRINTF)/g' \
-- -e 's/@''GNULIB_FCLOSE''@/$(GNULIB_FCLOSE)/g' \
-- -e 's/@''GNULIB_FDOPEN''@/$(GNULIB_FDOPEN)/g' \
-- -e 's/@''GNULIB_FFLUSH''@/$(GNULIB_FFLUSH)/g' \
-- -e 's/@''GNULIB_FGETC''@/$(GNULIB_FGETC)/g' \
-- -e 's/@''GNULIB_FGETS''@/$(GNULIB_FGETS)/g' \
-- -e 's/@''GNULIB_FOPEN''@/$(GNULIB_FOPEN)/g' \
-- -e 's/@''GNULIB_FPRINTF''@/$(GNULIB_FPRINTF)/g' \
-- -e 's/@''GNULIB_FPRINTF_POSIX''@/$(GNULIB_FPRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_FPURGE''@/$(GNULIB_FPURGE)/g' \
-- -e 's/@''GNULIB_FPUTC''@/$(GNULIB_FPUTC)/g' \
-- -e 's/@''GNULIB_FPUTS''@/$(GNULIB_FPUTS)/g' \
-- -e 's/@''GNULIB_FREAD''@/$(GNULIB_FREAD)/g' \
-- -e 's/@''GNULIB_FREOPEN''@/$(GNULIB_FREOPEN)/g' \
-- -e 's/@''GNULIB_FSCANF''@/$(GNULIB_FSCANF)/g' \
-- -e 's/@''GNULIB_FSEEK''@/$(GNULIB_FSEEK)/g' \
-- -e 's/@''GNULIB_FSEEKO''@/$(GNULIB_FSEEKO)/g' \
-- -e 's/@''GNULIB_FTELL''@/$(GNULIB_FTELL)/g' \
-- -e 's/@''GNULIB_FTELLO''@/$(GNULIB_FTELLO)/g' \
-- -e 's/@''GNULIB_FWRITE''@/$(GNULIB_FWRITE)/g' \
-- -e 's/@''GNULIB_GETC''@/$(GNULIB_GETC)/g' \
-- -e 's/@''GNULIB_GETCHAR''@/$(GNULIB_GETCHAR)/g' \
-- -e 's/@''GNULIB_GETDELIM''@/$(GNULIB_GETDELIM)/g' \
-- -e 's/@''GNULIB_GETLINE''@/$(GNULIB_GETLINE)/g' \
-- -e 's/@''GNULIB_OBSTACK_PRINTF''@/$(GNULIB_OBSTACK_PRINTF)/g' \
-- -e 's/@''GNULIB_OBSTACK_PRINTF_POSIX''@/$(GNULIB_OBSTACK_PRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_PCLOSE''@/$(GNULIB_PCLOSE)/g' \
-- -e 's/@''GNULIB_PERROR''@/$(GNULIB_PERROR)/g' \
-- -e 's/@''GNULIB_POPEN''@/$(GNULIB_POPEN)/g' \
-- -e 's/@''GNULIB_PRINTF''@/$(GNULIB_PRINTF)/g' \
-- -e 's/@''GNULIB_PRINTF_POSIX''@/$(GNULIB_PRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_PUTC''@/$(GNULIB_PUTC)/g' \
-- -e 's/@''GNULIB_PUTCHAR''@/$(GNULIB_PUTCHAR)/g' \
-- -e 's/@''GNULIB_PUTS''@/$(GNULIB_PUTS)/g' \
-- -e 's/@''GNULIB_REMOVE''@/$(GNULIB_REMOVE)/g' \
-- -e 's/@''GNULIB_RENAME''@/$(GNULIB_RENAME)/g' \
-- -e 's/@''GNULIB_RENAMEAT''@/$(GNULIB_RENAMEAT)/g' \
-- -e 's/@''GNULIB_SCANF''@/$(GNULIB_SCANF)/g' \
-- -e 's/@''GNULIB_SNPRINTF''@/$(GNULIB_SNPRINTF)/g' \
-- -e 's/@''GNULIB_SPRINTF_POSIX''@/$(GNULIB_SPRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_STDIO_H_NONBLOCKING''@/$(GNULIB_STDIO_H_NONBLOCKING)/g' \
-- -e 's/@''GNULIB_STDIO_H_SIGPIPE''@/$(GNULIB_STDIO_H_SIGPIPE)/g' \
-- -e 's/@''GNULIB_TMPFILE''@/$(GNULIB_TMPFILE)/g' \
-- -e 's/@''GNULIB_VASPRINTF''@/$(GNULIB_VASPRINTF)/g' \
-- -e 's/@''GNULIB_VDPRINTF''@/$(GNULIB_VDPRINTF)/g' \
-- -e 's/@''GNULIB_VFPRINTF''@/$(GNULIB_VFPRINTF)/g' \
-- -e 's/@''GNULIB_VFPRINTF_POSIX''@/$(GNULIB_VFPRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_VFSCANF''@/$(GNULIB_VFSCANF)/g' \
-- -e 's/@''GNULIB_VSCANF''@/$(GNULIB_VSCANF)/g' \
-- -e 's/@''GNULIB_VPRINTF''@/$(GNULIB_VPRINTF)/g' \
-- -e 's/@''GNULIB_VPRINTF_POSIX''@/$(GNULIB_VPRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_VSNPRINTF''@/$(GNULIB_VSNPRINTF)/g' \
-- -e 's/@''GNULIB_VSPRINTF_POSIX''@/$(GNULIB_VSPRINTF_POSIX)/g' \
-- -e 's/@''GNULIB_MDA_FCLOSEALL''@/$(GNULIB_MDA_FCLOSEALL)/g' \
-- -e 's/@''GNULIB_MDA_FDOPEN''@/$(GNULIB_MDA_FDOPEN)/g' \
-- -e 's/@''GNULIB_MDA_FILENO''@/$(GNULIB_MDA_FILENO)/g' \
-- -e 's/@''GNULIB_MDA_GETW''@/$(GNULIB_MDA_GETW)/g' \
-- -e 's/@''GNULIB_MDA_PUTW''@/$(GNULIB_MDA_PUTW)/g' \
-- -e 's/@''GNULIB_MDA_TEMPNAM''@/$(GNULIB_MDA_TEMPNAM)/g' \
-+ -e 's/@''GNULIB_DPRINTF''@/$(GL_GNULIB_DPRINTF)/g' \
-+ -e 's/@''GNULIB_FCLOSE''@/$(GL_GNULIB_FCLOSE)/g' \
-+ -e 's/@''GNULIB_FDOPEN''@/$(GL_GNULIB_FDOPEN)/g' \
-+ -e 's/@''GNULIB_FFLUSH''@/$(GL_GNULIB_FFLUSH)/g' \
-+ -e 's/@''GNULIB_FGETC''@/$(GL_GNULIB_FGETC)/g' \
-+ -e 's/@''GNULIB_FGETS''@/$(GL_GNULIB_FGETS)/g' \
-+ -e 's/@''GNULIB_FOPEN''@/$(GL_GNULIB_FOPEN)/g' \
-+ -e 's/@''GNULIB_FPRINTF''@/$(GL_GNULIB_FPRINTF)/g' \
-+ -e 's/@''GNULIB_FPRINTF_POSIX''@/$(GL_GNULIB_FPRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_FPURGE''@/$(GL_GNULIB_FPURGE)/g' \
-+ -e 's/@''GNULIB_FPUTC''@/$(GL_GNULIB_FPUTC)/g' \
-+ -e 's/@''GNULIB_FPUTS''@/$(GL_GNULIB_FPUTS)/g' \
-+ -e 's/@''GNULIB_FREAD''@/$(GL_GNULIB_FREAD)/g' \
-+ -e 's/@''GNULIB_FREOPEN''@/$(GL_GNULIB_FREOPEN)/g' \
-+ -e 's/@''GNULIB_FSCANF''@/$(GL_GNULIB_FSCANF)/g' \
-+ -e 's/@''GNULIB_FSEEK''@/$(GL_GNULIB_FSEEK)/g' \
-+ -e 's/@''GNULIB_FSEEKO''@/$(GL_GNULIB_FSEEKO)/g' \
-+ -e 's/@''GNULIB_FTELL''@/$(GL_GNULIB_FTELL)/g' \
-+ -e 's/@''GNULIB_FTELLO''@/$(GL_GNULIB_FTELLO)/g' \
-+ -e 's/@''GNULIB_FWRITE''@/$(GL_GNULIB_FWRITE)/g' \
-+ -e 's/@''GNULIB_GETC''@/$(GL_GNULIB_GETC)/g' \
-+ -e 's/@''GNULIB_GETCHAR''@/$(GL_GNULIB_GETCHAR)/g' \
-+ -e 's/@''GNULIB_GETDELIM''@/$(GL_GNULIB_GETDELIM)/g' \
-+ -e 's/@''GNULIB_GETLINE''@/$(GL_GNULIB_GETLINE)/g' \
-+ -e 's/@''GNULIB_OBSTACK_PRINTF''@/$(GL_GNULIB_OBSTACK_PRINTF)/g' \
-+ -e 's/@''GNULIB_OBSTACK_PRINTF_POSIX''@/$(GL_GNULIB_OBSTACK_PRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_PCLOSE''@/$(GL_GNULIB_PCLOSE)/g' \
-+ -e 's/@''GNULIB_PERROR''@/$(GL_GNULIB_PERROR)/g' \
-+ -e 's/@''GNULIB_POPEN''@/$(GL_GNULIB_POPEN)/g' \
-+ -e 's/@''GNULIB_PRINTF''@/$(GL_GNULIB_PRINTF)/g' \
-+ -e 's/@''GNULIB_PRINTF_POSIX''@/$(GL_GNULIB_PRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_PUTC''@/$(GL_GNULIB_PUTC)/g' \
-+ -e 's/@''GNULIB_PUTCHAR''@/$(GL_GNULIB_PUTCHAR)/g' \
-+ -e 's/@''GNULIB_PUTS''@/$(GL_GNULIB_PUTS)/g' \
-+ -e 's/@''GNULIB_REMOVE''@/$(GL_GNULIB_REMOVE)/g' \
-+ -e 's/@''GNULIB_RENAME''@/$(GL_GNULIB_RENAME)/g' \
-+ -e 's/@''GNULIB_RENAMEAT''@/$(GL_GNULIB_RENAMEAT)/g' \
-+ -e 's/@''GNULIB_SCANF''@/$(GL_GNULIB_SCANF)/g' \
-+ -e 's/@''GNULIB_SNPRINTF''@/$(GL_GNULIB_SNPRINTF)/g' \
-+ -e 's/@''GNULIB_SPRINTF_POSIX''@/$(GL_GNULIB_SPRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_STDIO_H_NONBLOCKING''@/$(GL_GNULIB_STDIO_H_NONBLOCKING)/g' \
-+ -e 's/@''GNULIB_STDIO_H_SIGPIPE''@/$(GL_GNULIB_STDIO_H_SIGPIPE)/g' \
-+ -e 's/@''GNULIB_TMPFILE''@/$(GL_GNULIB_TMPFILE)/g' \
-+ -e 's/@''GNULIB_VASPRINTF''@/$(GL_GNULIB_VASPRINTF)/g' \
-+ -e 's/@''GNULIB_VDPRINTF''@/$(GL_GNULIB_VDPRINTF)/g' \
-+ -e 's/@''GNULIB_VFPRINTF''@/$(GL_GNULIB_VFPRINTF)/g' \
-+ -e 's/@''GNULIB_VFPRINTF_POSIX''@/$(GL_GNULIB_VFPRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_VFSCANF''@/$(GL_GNULIB_VFSCANF)/g' \
-+ -e 's/@''GNULIB_VSCANF''@/$(GL_GNULIB_VSCANF)/g' \
-+ -e 's/@''GNULIB_VPRINTF''@/$(GL_GNULIB_VPRINTF)/g' \
-+ -e 's/@''GNULIB_VPRINTF_POSIX''@/$(GL_GNULIB_VPRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_VSNPRINTF''@/$(GL_GNULIB_VSNPRINTF)/g' \
-+ -e 's/@''GNULIB_VSPRINTF_POSIX''@/$(GL_GNULIB_VSPRINTF_POSIX)/g' \
-+ -e 's/@''GNULIB_MDA_FCLOSEALL''@/$(GL_GNULIB_MDA_FCLOSEALL)/g' \
-+ -e 's/@''GNULIB_MDA_FDOPEN''@/$(GL_GNULIB_MDA_FDOPEN)/g' \
-+ -e 's/@''GNULIB_MDA_FILENO''@/$(GL_GNULIB_MDA_FILENO)/g' \
-+ -e 's/@''GNULIB_MDA_GETW''@/$(GL_GNULIB_MDA_GETW)/g' \
-+ -e 's/@''GNULIB_MDA_PUTW''@/$(GL_GNULIB_MDA_PUTW)/g' \
-+ -e 's/@''GNULIB_MDA_TEMPNAM''@/$(GL_GNULIB_MDA_TEMPNAM)/g' \
- < $(srcdir)/stdio.in.h | \
- sed -e 's|@''HAVE_DECL_FCLOSEALL''@|$(HAVE_DECL_FCLOSEALL)|g' \
- -e 's|@''HAVE_DECL_FPURGE''@|$(HAVE_DECL_FPURGE)|g' \
-@@ -1236,49 +1343,51 @@ stdlib.h: stdlib.in.h $(top_builddir)/co
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_STDLIB_H''@|$(NEXT_STDLIB_H)|g' \
-- -e 's/@''GNULIB__EXIT''@/$(GNULIB__EXIT)/g' \
-- -e 's/@''GNULIB_ALIGNED_ALLOC''@/$(GNULIB_ALIGNED_ALLOC)/g' \
-- -e 's/@''GNULIB_ATOLL''@/$(GNULIB_ATOLL)/g' \
-- -e 's/@''GNULIB_CALLOC_POSIX''@/$(GNULIB_CALLOC_POSIX)/g' \
-- -e 's/@''GNULIB_CANONICALIZE_FILE_NAME''@/$(GNULIB_CANONICALIZE_FILE_NAME)/g' \
-- -e 's/@''GNULIB_FREE_POSIX''@/$(GNULIB_FREE_POSIX)/g' \
-- -e 's/@''GNULIB_GETLOADAVG''@/$(GNULIB_GETLOADAVG)/g' \
-- -e 's/@''GNULIB_GETSUBOPT''@/$(GNULIB_GETSUBOPT)/g' \
-- -e 's/@''GNULIB_GRANTPT''@/$(GNULIB_GRANTPT)/g' \
-- -e 's/@''GNULIB_MALLOC_POSIX''@/$(GNULIB_MALLOC_POSIX)/g' \
-- -e 's/@''GNULIB_MBTOWC''@/$(GNULIB_MBTOWC)/g' \
-- -e 's/@''GNULIB_MKDTEMP''@/$(GNULIB_MKDTEMP)/g' \
-- -e 's/@''GNULIB_MKOSTEMP''@/$(GNULIB_MKOSTEMP)/g' \
-- -e 's/@''GNULIB_MKOSTEMPS''@/$(GNULIB_MKOSTEMPS)/g' \
-- -e 's/@''GNULIB_MKSTEMP''@/$(GNULIB_MKSTEMP)/g' \
-- -e 's/@''GNULIB_MKSTEMPS''@/$(GNULIB_MKSTEMPS)/g' \
-- -e 's/@''GNULIB_POSIX_MEMALIGN''@/$(GNULIB_POSIX_MEMALIGN)/g' \
-- -e 's/@''GNULIB_POSIX_OPENPT''@/$(GNULIB_POSIX_OPENPT)/g' \
-- -e 's/@''GNULIB_PTSNAME''@/$(GNULIB_PTSNAME)/g' \
-- -e 's/@''GNULIB_PTSNAME_R''@/$(GNULIB_PTSNAME_R)/g' \
-- -e 's/@''GNULIB_PUTENV''@/$(GNULIB_PUTENV)/g' \
-- -e 's/@''GNULIB_QSORT_R''@/$(GNULIB_QSORT_R)/g' \
-- -e 's/@''GNULIB_RANDOM''@/$(GNULIB_RANDOM)/g' \
-- -e 's/@''GNULIB_RANDOM_R''@/$(GNULIB_RANDOM_R)/g' \
-- -e 's/@''GNULIB_REALLOC_POSIX''@/$(GNULIB_REALLOC_POSIX)/g' \
-- -e 's/@''GNULIB_REALLOCARRAY''@/$(GNULIB_REALLOCARRAY)/g' \
-- -e 's/@''GNULIB_REALPATH''@/$(GNULIB_REALPATH)/g' \
-- -e 's/@''GNULIB_RPMATCH''@/$(GNULIB_RPMATCH)/g' \
-- -e 's/@''GNULIB_SECURE_GETENV''@/$(GNULIB_SECURE_GETENV)/g' \
-- -e 's/@''GNULIB_SETENV''@/$(GNULIB_SETENV)/g' \
-- -e 's/@''GNULIB_STRTOD''@/$(GNULIB_STRTOD)/g' \
-- -e 's/@''GNULIB_STRTOLD''@/$(GNULIB_STRTOLD)/g' \
-- -e 's/@''GNULIB_STRTOLL''@/$(GNULIB_STRTOLL)/g' \
-- -e 's/@''GNULIB_STRTOULL''@/$(GNULIB_STRTOULL)/g' \
-- -e 's/@''GNULIB_SYSTEM_POSIX''@/$(GNULIB_SYSTEM_POSIX)/g' \
-- -e 's/@''GNULIB_UNLOCKPT''@/$(GNULIB_UNLOCKPT)/g' \
-- -e 's/@''GNULIB_UNSETENV''@/$(GNULIB_UNSETENV)/g' \
-- -e 's/@''GNULIB_WCTOMB''@/$(GNULIB_WCTOMB)/g' \
-- -e 's/@''GNULIB_MDA_ECVT''@/$(GNULIB_MDA_ECVT)/g' \
-- -e 's/@''GNULIB_MDA_FCVT''@/$(GNULIB_MDA_FCVT)/g' \
-- -e 's/@''GNULIB_MDA_GCVT''@/$(GNULIB_MDA_GCVT)/g' \
-- -e 's/@''GNULIB_MDA_MKTEMP''@/$(GNULIB_MDA_MKTEMP)/g' \
-- -e 's/@''GNULIB_MDA_PUTENV''@/$(GNULIB_MDA_PUTENV)/g' \
-+ -e 's/@''GNULIB__EXIT''@/$(GL_GNULIB__EXIT)/g' \
-+ -e 's/@''GNULIB_ALIGNED_ALLOC''@/$(GL_GNULIB_ALIGNED_ALLOC)/g' \
-+ -e 's/@''GNULIB_ATOLL''@/$(GL_GNULIB_ATOLL)/g' \
-+ -e 's/@''GNULIB_CALLOC_POSIX''@/$(GL_GNULIB_CALLOC_POSIX)/g' \
-+ -e 's/@''GNULIB_CANONICALIZE_FILE_NAME''@/$(GL_GNULIB_CANONICALIZE_FILE_NAME)/g' \
-+ -e 's/@''GNULIB_FREE_POSIX''@/$(GL_GNULIB_FREE_POSIX)/g' \
-+ -e 's/@''GNULIB_GETLOADAVG''@/$(GL_GNULIB_GETLOADAVG)/g' \
-+ -e 's/@''GNULIB_GETSUBOPT''@/$(GL_GNULIB_GETSUBOPT)/g' \
-+ -e 's/@''GNULIB_GRANTPT''@/$(GL_GNULIB_GRANTPT)/g' \
-+ -e 's/@''GNULIB_MALLOC_POSIX''@/$(GL_GNULIB_MALLOC_POSIX)/g' \
-+ -e 's/@''GNULIB_MBTOWC''@/$(GL_GNULIB_MBTOWC)/g' \
-+ -e 's/@''GNULIB_MKDTEMP''@/$(GL_GNULIB_MKDTEMP)/g' \
-+ -e 's/@''GNULIB_MKOSTEMP''@/$(GL_GNULIB_MKOSTEMP)/g' \
-+ -e 's/@''GNULIB_MKOSTEMPS''@/$(GL_GNULIB_MKOSTEMPS)/g' \
-+ -e 's/@''GNULIB_MKSTEMP''@/$(GL_GNULIB_MKSTEMP)/g' \
-+ -e 's/@''GNULIB_MKSTEMPS''@/$(GL_GNULIB_MKSTEMPS)/g' \
-+ -e 's/@''GNULIB_POSIX_MEMALIGN''@/$(GL_GNULIB_POSIX_MEMALIGN)/g' \
-+ -e 's/@''GNULIB_POSIX_OPENPT''@/$(GL_GNULIB_POSIX_OPENPT)/g' \
-+ -e 's/@''GNULIB_PTSNAME''@/$(GL_GNULIB_PTSNAME)/g' \
-+ -e 's/@''GNULIB_PTSNAME_R''@/$(GL_GNULIB_PTSNAME_R)/g' \
-+ -e 's/@''GNULIB_PUTENV''@/$(GL_GNULIB_PUTENV)/g' \
-+ -e 's/@''GNULIB_QSORT_R''@/$(GL_GNULIB_QSORT_R)/g' \
-+ -e 's/@''GNULIB_RANDOM''@/$(GL_GNULIB_RANDOM)/g' \
-+ -e 's/@''GNULIB_RANDOM_R''@/$(GL_GNULIB_RANDOM_R)/g' \
-+ -e 's/@''GNULIB_REALLOC_POSIX''@/$(GL_GNULIB_REALLOC_POSIX)/g' \
-+ -e 's/@''GNULIB_REALLOCARRAY''@/$(GL_GNULIB_REALLOCARRAY)/g' \
-+ -e 's/@''GNULIB_REALPATH''@/$(GL_GNULIB_REALPATH)/g' \
-+ -e 's/@''GNULIB_RPMATCH''@/$(GL_GNULIB_RPMATCH)/g' \
-+ -e 's/@''GNULIB_SECURE_GETENV''@/$(GL_GNULIB_SECURE_GETENV)/g' \
-+ -e 's/@''GNULIB_SETENV''@/$(GL_GNULIB_SETENV)/g' \
-+ -e 's/@''GNULIB_STRTOD''@/$(GL_GNULIB_STRTOD)/g' \
-+ -e 's/@''GNULIB_STRTOL''@/$(GL_GNULIB_STRTOL)/g' \
-+ -e 's/@''GNULIB_STRTOLD''@/$(GL_GNULIB_STRTOLD)/g' \
-+ -e 's/@''GNULIB_STRTOLL''@/$(GL_GNULIB_STRTOLL)/g' \
-+ -e 's/@''GNULIB_STRTOUL''@/$(GL_GNULIB_STRTOUL)/g' \
-+ -e 's/@''GNULIB_STRTOULL''@/$(GL_GNULIB_STRTOULL)/g' \
-+ -e 's/@''GNULIB_SYSTEM_POSIX''@/$(GL_GNULIB_SYSTEM_POSIX)/g' \
-+ -e 's/@''GNULIB_UNLOCKPT''@/$(GL_GNULIB_UNLOCKPT)/g' \
-+ -e 's/@''GNULIB_UNSETENV''@/$(GL_GNULIB_UNSETENV)/g' \
-+ -e 's/@''GNULIB_WCTOMB''@/$(GL_GNULIB_WCTOMB)/g' \
-+ -e 's/@''GNULIB_MDA_ECVT''@/$(GL_GNULIB_MDA_ECVT)/g' \
-+ -e 's/@''GNULIB_MDA_FCVT''@/$(GL_GNULIB_MDA_FCVT)/g' \
-+ -e 's/@''GNULIB_MDA_GCVT''@/$(GL_GNULIB_MDA_GCVT)/g' \
-+ -e 's/@''GNULIB_MDA_MKTEMP''@/$(GL_GNULIB_MDA_MKTEMP)/g' \
-+ -e 's/@''GNULIB_MDA_PUTENV''@/$(GL_GNULIB_MDA_PUTENV)/g' \
- < $(srcdir)/stdlib.in.h | \
- sed -e 's|@''HAVE__EXIT''@|$(HAVE__EXIT)|g' \
- -e 's|@''HAVE_ALIGNED_ALLOC''@|$(HAVE_ALIGNED_ALLOC)|g' \
-@@ -1314,8 +1423,10 @@ stdlib.h: stdlib.in.h $(top_builddir)/co
- -e 's|@''HAVE_SETSTATE''@|$(HAVE_SETSTATE)|g' \
- -e 's|@''HAVE_DECL_SETSTATE''@|$(HAVE_DECL_SETSTATE)|g' \
- -e 's|@''HAVE_STRTOD''@|$(HAVE_STRTOD)|g' \
-+ -e 's|@''HAVE_STRTOL''@|$(HAVE_STRTOL)|g' \
- -e 's|@''HAVE_STRTOLD''@|$(HAVE_STRTOLD)|g' \
- -e 's|@''HAVE_STRTOLL''@|$(HAVE_STRTOLL)|g' \
-+ -e 's|@''HAVE_STRTOUL''@|$(HAVE_STRTOUL)|g' \
- -e 's|@''HAVE_STRTOULL''@|$(HAVE_STRTOULL)|g' \
- -e 's|@''HAVE_STRUCT_RANDOM_DATA''@|$(HAVE_STRUCT_RANDOM_DATA)|g' \
- -e 's|@''HAVE_SYS_LOADAVG_H''@|$(HAVE_SYS_LOADAVG_H)|g' \
-@@ -1337,11 +1448,16 @@ stdlib.h: stdlib.in.h $(top_builddir)/co
- -e 's|@''REPLACE_RANDOM''@|$(REPLACE_RANDOM)|g' \
- -e 's|@''REPLACE_RANDOM_R''@|$(REPLACE_RANDOM_R)|g' \
- -e 's|@''REPLACE_REALLOC''@|$(REPLACE_REALLOC)|g' \
-+ -e 's|@''REPLACE_REALLOCARRAY''@|$(REPLACE_REALLOCARRAY)|g' \
- -e 's|@''REPLACE_REALPATH''@|$(REPLACE_REALPATH)|g' \
- -e 's|@''REPLACE_SETENV''@|$(REPLACE_SETENV)|g' \
- -e 's|@''REPLACE_SETSTATE''@|$(REPLACE_SETSTATE)|g' \
- -e 's|@''REPLACE_STRTOD''@|$(REPLACE_STRTOD)|g' \
-+ -e 's|@''REPLACE_STRTOL''@|$(REPLACE_STRTOL)|g' \
- -e 's|@''REPLACE_STRTOLD''@|$(REPLACE_STRTOLD)|g' \
-+ -e 's|@''REPLACE_STRTOLL''@|$(REPLACE_STRTOLL)|g' \
-+ -e 's|@''REPLACE_STRTOUL''@|$(REPLACE_STRTOUL)|g' \
-+ -e 's|@''REPLACE_STRTOULL''@|$(REPLACE_STRTOULL)|g' \
- -e 's|@''REPLACE_UNSETENV''@|$(REPLACE_UNSETENV)|g' \
- -e 's|@''REPLACE_WCTOMB''@|$(REPLACE_WCTOMB)|g' \
- -e '/definitions of _GL_FUNCDECL_RPL/r $(CXXDEFS_H)' \
-@@ -1444,49 +1560,49 @@ string.h: string.in.h $(top_builddir)/co
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_STRING_H''@|$(NEXT_STRING_H)|g' \
-- -e 's/@''GNULIB_EXPLICIT_BZERO''@/$(GNULIB_EXPLICIT_BZERO)/g' \
-- -e 's/@''GNULIB_FFSL''@/$(GNULIB_FFSL)/g' \
-- -e 's/@''GNULIB_FFSLL''@/$(GNULIB_FFSLL)/g' \
-- -e 's/@''GNULIB_MBSLEN''@/$(GNULIB_MBSLEN)/g' \
-- -e 's/@''GNULIB_MBSNLEN''@/$(GNULIB_MBSNLEN)/g' \
-- -e 's/@''GNULIB_MBSCHR''@/$(GNULIB_MBSCHR)/g' \
-- -e 's/@''GNULIB_MBSRCHR''@/$(GNULIB_MBSRCHR)/g' \
-- -e 's/@''GNULIB_MBSSTR''@/$(GNULIB_MBSSTR)/g' \
-- -e 's/@''GNULIB_MBSCASECMP''@/$(GNULIB_MBSCASECMP)/g' \
-- -e 's/@''GNULIB_MBSNCASECMP''@/$(GNULIB_MBSNCASECMP)/g' \
-- -e 's/@''GNULIB_MBSPCASECMP''@/$(GNULIB_MBSPCASECMP)/g' \
-- -e 's/@''GNULIB_MBSCASESTR''@/$(GNULIB_MBSCASESTR)/g' \
-- -e 's/@''GNULIB_MBSCSPN''@/$(GNULIB_MBSCSPN)/g' \
-- -e 's/@''GNULIB_MBSPBRK''@/$(GNULIB_MBSPBRK)/g' \
-- -e 's/@''GNULIB_MBSSPN''@/$(GNULIB_MBSSPN)/g' \
-- -e 's/@''GNULIB_MBSSEP''@/$(GNULIB_MBSSEP)/g' \
-- -e 's/@''GNULIB_MBSTOK_R''@/$(GNULIB_MBSTOK_R)/g' \
-- -e 's/@''GNULIB_MEMCHR''@/$(GNULIB_MEMCHR)/g' \
-- -e 's/@''GNULIB_MEMMEM''@/$(GNULIB_MEMMEM)/g' \
-- -e 's/@''GNULIB_MEMPCPY''@/$(GNULIB_MEMPCPY)/g' \
-- -e 's/@''GNULIB_MEMRCHR''@/$(GNULIB_MEMRCHR)/g' \
-- -e 's/@''GNULIB_RAWMEMCHR''@/$(GNULIB_RAWMEMCHR)/g' \
-- -e 's/@''GNULIB_STPCPY''@/$(GNULIB_STPCPY)/g' \
-- -e 's/@''GNULIB_STPNCPY''@/$(GNULIB_STPNCPY)/g' \
-- -e 's/@''GNULIB_STRCHRNUL''@/$(GNULIB_STRCHRNUL)/g' \
-- -e 's/@''GNULIB_STRDUP''@/$(GNULIB_STRDUP)/g' \
-- -e 's/@''GNULIB_STRNCAT''@/$(GNULIB_STRNCAT)/g' \
-- -e 's/@''GNULIB_STRNDUP''@/$(GNULIB_STRNDUP)/g' \
-- -e 's/@''GNULIB_STRNLEN''@/$(GNULIB_STRNLEN)/g' \
-- -e 's/@''GNULIB_STRPBRK''@/$(GNULIB_STRPBRK)/g' \
-- -e 's/@''GNULIB_STRSEP''@/$(GNULIB_STRSEP)/g' \
-- -e 's/@''GNULIB_STRSTR''@/$(GNULIB_STRSTR)/g' \
-- -e 's/@''GNULIB_STRCASESTR''@/$(GNULIB_STRCASESTR)/g' \
-- -e 's/@''GNULIB_STRTOK_R''@/$(GNULIB_STRTOK_R)/g' \
-- -e 's/@''GNULIB_STRERROR''@/$(GNULIB_STRERROR)/g' \
-- -e 's/@''GNULIB_STRERROR_R''@/$(GNULIB_STRERROR_R)/g' \
-- -e 's/@''GNULIB_STRERRORNAME_NP''@/$(GNULIB_STRERRORNAME_NP)/g' \
-- -e 's/@''GNULIB_SIGABBREV_NP''@/$(GNULIB_SIGABBREV_NP)/g' \
-- -e 's/@''GNULIB_SIGDESCR_NP''@/$(GNULIB_SIGDESCR_NP)/g' \
-- -e 's/@''GNULIB_STRSIGNAL''@/$(GNULIB_STRSIGNAL)/g' \
-- -e 's/@''GNULIB_STRVERSCMP''@/$(GNULIB_STRVERSCMP)/g' \
-- -e 's/@''GNULIB_MDA_MEMCCPY''@/$(GNULIB_MDA_MEMCCPY)/g' \
-- -e 's/@''GNULIB_MDA_STRDUP''@/$(GNULIB_MDA_STRDUP)/g' \
-+ -e 's/@''GNULIB_EXPLICIT_BZERO''@/$(GL_GNULIB_EXPLICIT_BZERO)/g' \
-+ -e 's/@''GNULIB_FFSL''@/$(GL_GNULIB_FFSL)/g' \
-+ -e 's/@''GNULIB_FFSLL''@/$(GL_GNULIB_FFSLL)/g' \
-+ -e 's/@''GNULIB_MBSLEN''@/$(GL_GNULIB_MBSLEN)/g' \
-+ -e 's/@''GNULIB_MBSNLEN''@/$(GL_GNULIB_MBSNLEN)/g' \
-+ -e 's/@''GNULIB_MBSCHR''@/$(GL_GNULIB_MBSCHR)/g' \
-+ -e 's/@''GNULIB_MBSRCHR''@/$(GL_GNULIB_MBSRCHR)/g' \
-+ -e 's/@''GNULIB_MBSSTR''@/$(GL_GNULIB_MBSSTR)/g' \
-+ -e 's/@''GNULIB_MBSCASECMP''@/$(GL_GNULIB_MBSCASECMP)/g' \
-+ -e 's/@''GNULIB_MBSNCASECMP''@/$(GL_GNULIB_MBSNCASECMP)/g' \
-+ -e 's/@''GNULIB_MBSPCASECMP''@/$(GL_GNULIB_MBSPCASECMP)/g' \
-+ -e 's/@''GNULIB_MBSCASESTR''@/$(GL_GNULIB_MBSCASESTR)/g' \
-+ -e 's/@''GNULIB_MBSCSPN''@/$(GL_GNULIB_MBSCSPN)/g' \
-+ -e 's/@''GNULIB_MBSPBRK''@/$(GL_GNULIB_MBSPBRK)/g' \
-+ -e 's/@''GNULIB_MBSSPN''@/$(GL_GNULIB_MBSSPN)/g' \
-+ -e 's/@''GNULIB_MBSSEP''@/$(GL_GNULIB_MBSSEP)/g' \
-+ -e 's/@''GNULIB_MBSTOK_R''@/$(GL_GNULIB_MBSTOK_R)/g' \
-+ -e 's/@''GNULIB_MEMCHR''@/$(GL_GNULIB_MEMCHR)/g' \
-+ -e 's/@''GNULIB_MEMMEM''@/$(GL_GNULIB_MEMMEM)/g' \
-+ -e 's/@''GNULIB_MEMPCPY''@/$(GL_GNULIB_MEMPCPY)/g' \
-+ -e 's/@''GNULIB_MEMRCHR''@/$(GL_GNULIB_MEMRCHR)/g' \
-+ -e 's/@''GNULIB_RAWMEMCHR''@/$(GL_GNULIB_RAWMEMCHR)/g' \
-+ -e 's/@''GNULIB_STPCPY''@/$(GL_GNULIB_STPCPY)/g' \
-+ -e 's/@''GNULIB_STPNCPY''@/$(GL_GNULIB_STPNCPY)/g' \
-+ -e 's/@''GNULIB_STRCHRNUL''@/$(GL_GNULIB_STRCHRNUL)/g' \
-+ -e 's/@''GNULIB_STRDUP''@/$(GL_GNULIB_STRDUP)/g' \
-+ -e 's/@''GNULIB_STRNCAT''@/$(GL_GNULIB_STRNCAT)/g' \
-+ -e 's/@''GNULIB_STRNDUP''@/$(GL_GNULIB_STRNDUP)/g' \
-+ -e 's/@''GNULIB_STRNLEN''@/$(GL_GNULIB_STRNLEN)/g' \
-+ -e 's/@''GNULIB_STRPBRK''@/$(GL_GNULIB_STRPBRK)/g' \
-+ -e 's/@''GNULIB_STRSEP''@/$(GL_GNULIB_STRSEP)/g' \
-+ -e 's/@''GNULIB_STRSTR''@/$(GL_GNULIB_STRSTR)/g' \
-+ -e 's/@''GNULIB_STRCASESTR''@/$(GL_GNULIB_STRCASESTR)/g' \
-+ -e 's/@''GNULIB_STRTOK_R''@/$(GL_GNULIB_STRTOK_R)/g' \
-+ -e 's/@''GNULIB_STRERROR''@/$(GL_GNULIB_STRERROR)/g' \
-+ -e 's/@''GNULIB_STRERROR_R''@/$(GL_GNULIB_STRERROR_R)/g' \
-+ -e 's/@''GNULIB_STRERRORNAME_NP''@/$(GL_GNULIB_STRERRORNAME_NP)/g' \
-+ -e 's/@''GNULIB_SIGABBREV_NP''@/$(GL_GNULIB_SIGABBREV_NP)/g' \
-+ -e 's/@''GNULIB_SIGDESCR_NP''@/$(GL_GNULIB_SIGDESCR_NP)/g' \
-+ -e 's/@''GNULIB_STRSIGNAL''@/$(GL_GNULIB_STRSIGNAL)/g' \
-+ -e 's/@''GNULIB_STRVERSCMP''@/$(GL_GNULIB_STRVERSCMP)/g' \
-+ -e 's/@''GNULIB_MDA_MEMCCPY''@/$(GL_GNULIB_MDA_MEMCCPY)/g' \
-+ -e 's/@''GNULIB_MDA_STRDUP''@/$(GL_GNULIB_MDA_STRDUP)/g' \
- < $(srcdir)/string.in.h | \
- sed -e 's|@''HAVE_EXPLICIT_BZERO''@|$(HAVE_EXPLICIT_BZERO)|g' \
- -e 's|@''HAVE_FFSL''@|$(HAVE_FFSL)|g' \
-@@ -1556,7 +1672,7 @@ strings.h: strings.in.h $(top_builddir)/
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_STRINGS_H''@|$(NEXT_STRINGS_H)|g' \
-- -e 's/@''GNULIB_FFS''@/$(GNULIB_FFS)/g' \
-+ -e 's/@''GNULIB_FFS''@/$(GL_GNULIB_FFS)/g' \
- -e 's|@''HAVE_FFS''@|$(HAVE_FFS)|g' \
- -e 's|@''HAVE_STRCASECMP''@|$(HAVE_STRCASECMP)|g' \
- -e 's|@''HAVE_DECL_STRNCASECMP''@|$(HAVE_DECL_STRNCASECMP)|g' \
-@@ -1638,25 +1754,25 @@ sys/stat.h: sys_stat.in.h $(top_builddir
- -e 's|@''NEXT_SYS_STAT_H''@|$(NEXT_SYS_STAT_H)|g' \
- -e 's|@''WINDOWS_64_BIT_ST_SIZE''@|$(WINDOWS_64_BIT_ST_SIZE)|g' \
- -e 's|@''WINDOWS_STAT_TIMESPEC''@|$(WINDOWS_STAT_TIMESPEC)|g' \
-- -e 's/@''GNULIB_FCHMODAT''@/$(GNULIB_FCHMODAT)/g' \
-- -e 's/@''GNULIB_FSTAT''@/$(GNULIB_FSTAT)/g' \
-- -e 's/@''GNULIB_FSTATAT''@/$(GNULIB_FSTATAT)/g' \
-- -e 's/@''GNULIB_FUTIMENS''@/$(GNULIB_FUTIMENS)/g' \
-- -e 's/@''GNULIB_GETUMASK''@/$(GNULIB_GETUMASK)/g' \
-- -e 's/@''GNULIB_LCHMOD''@/$(GNULIB_LCHMOD)/g' \
-- -e 's/@''GNULIB_LSTAT''@/$(GNULIB_LSTAT)/g' \
-- -e 's/@''GNULIB_MKDIR''@/$(GNULIB_MKDIR)/g' \
-- -e 's/@''GNULIB_MKDIRAT''@/$(GNULIB_MKDIRAT)/g' \
-- -e 's/@''GNULIB_MKFIFO''@/$(GNULIB_MKFIFO)/g' \
-- -e 's/@''GNULIB_MKFIFOAT''@/$(GNULIB_MKFIFOAT)/g' \
-- -e 's/@''GNULIB_MKNOD''@/$(GNULIB_MKNOD)/g' \
-- -e 's/@''GNULIB_MKNODAT''@/$(GNULIB_MKNODAT)/g' \
-- -e 's/@''GNULIB_STAT''@/$(GNULIB_STAT)/g' \
-- -e 's/@''GNULIB_UTIMENSAT''@/$(GNULIB_UTIMENSAT)/g' \
-- -e 's/@''GNULIB_OVERRIDES_STRUCT_STAT''@/$(GNULIB_OVERRIDES_STRUCT_STAT)/g' \
-- -e 's/@''GNULIB_MDA_CHMOD''@/$(GNULIB_MDA_CHMOD)/g' \
-- -e 's/@''GNULIB_MDA_MKDIR''@/$(GNULIB_MDA_MKDIR)/g' \
-- -e 's/@''GNULIB_MDA_UMASK''@/$(GNULIB_MDA_UMASK)/g' \
-+ -e 's/@''GNULIB_FCHMODAT''@/$(GL_GNULIB_FCHMODAT)/g' \
-+ -e 's/@''GNULIB_FSTAT''@/$(GL_GNULIB_FSTAT)/g' \
-+ -e 's/@''GNULIB_FSTATAT''@/$(GL_GNULIB_FSTATAT)/g' \
-+ -e 's/@''GNULIB_FUTIMENS''@/$(GL_GNULIB_FUTIMENS)/g' \
-+ -e 's/@''GNULIB_GETUMASK''@/$(GL_GNULIB_GETUMASK)/g' \
-+ -e 's/@''GNULIB_LCHMOD''@/$(GL_GNULIB_LCHMOD)/g' \
-+ -e 's/@''GNULIB_LSTAT''@/$(GL_GNULIB_LSTAT)/g' \
-+ -e 's/@''GNULIB_MKDIR''@/$(GL_GNULIB_MKDIR)/g' \
-+ -e 's/@''GNULIB_MKDIRAT''@/$(GL_GNULIB_MKDIRAT)/g' \
-+ -e 's/@''GNULIB_MKFIFO''@/$(GL_GNULIB_MKFIFO)/g' \
-+ -e 's/@''GNULIB_MKFIFOAT''@/$(GL_GNULIB_MKFIFOAT)/g' \
-+ -e 's/@''GNULIB_MKNOD''@/$(GL_GNULIB_MKNOD)/g' \
-+ -e 's/@''GNULIB_MKNODAT''@/$(GL_GNULIB_MKNODAT)/g' \
-+ -e 's/@''GNULIB_STAT''@/$(GL_GNULIB_STAT)/g' \
-+ -e 's/@''GNULIB_UTIMENSAT''@/$(GL_GNULIB_UTIMENSAT)/g' \
-+ -e 's/@''GNULIB_OVERRIDES_STRUCT_STAT''@/$(GL_GNULIB_OVERRIDES_STRUCT_STAT)/g' \
-+ -e 's/@''GNULIB_MDA_CHMOD''@/$(GL_GNULIB_MDA_CHMOD)/g' \
-+ -e 's/@''GNULIB_MDA_MKDIR''@/$(GL_GNULIB_MDA_MKDIR)/g' \
-+ -e 's/@''GNULIB_MDA_UMASK''@/$(GL_GNULIB_MDA_UMASK)/g' \
- -e 's|@''HAVE_FCHMODAT''@|$(HAVE_FCHMODAT)|g' \
- -e 's|@''HAVE_FSTATAT''@|$(HAVE_FSTATAT)|g' \
- -e 's|@''HAVE_FUTIMENS''@|$(HAVE_FUTIMENS)|g' \
-@@ -1744,18 +1860,18 @@ time.h: time.in.h $(top_builddir)/config
- -e 's|@''PRAGMA_SYSTEM_HEADER''@|@PRAGMA_SYSTEM_HEADER@|g' \
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_TIME_H''@|$(NEXT_TIME_H)|g' \
-- -e 's/@''GNULIB_CTIME''@/$(GNULIB_CTIME)/g' \
-- -e 's/@''GNULIB_LOCALTIME''@/$(GNULIB_LOCALTIME)/g' \
-- -e 's/@''GNULIB_MKTIME''@/$(GNULIB_MKTIME)/g' \
-- -e 's/@''GNULIB_NANOSLEEP''@/$(GNULIB_NANOSLEEP)/g' \
-- -e 's/@''GNULIB_STRFTIME''@/$(GNULIB_STRFTIME)/g' \
-- -e 's/@''GNULIB_STRPTIME''@/$(GNULIB_STRPTIME)/g' \
-- -e 's/@''GNULIB_TIMEGM''@/$(GNULIB_TIMEGM)/g' \
-- -e 's/@''GNULIB_TIMESPEC_GET''@/$(GNULIB_TIMESPEC_GET)/g' \
-- -e 's/@''GNULIB_TIME_R''@/$(GNULIB_TIME_R)/g' \
-- -e 's/@''GNULIB_TIME_RZ''@/$(GNULIB_TIME_RZ)/g' \
-- -e 's/@''GNULIB_TZSET''@/$(GNULIB_TZSET)/g' \
-- -e 's/@''GNULIB_MDA_TZSET''@/$(GNULIB_MDA_TZSET)/g' \
-+ -e 's/@''GNULIB_CTIME''@/$(GL_GNULIB_CTIME)/g' \
-+ -e 's/@''GNULIB_LOCALTIME''@/$(GL_GNULIB_LOCALTIME)/g' \
-+ -e 's/@''GNULIB_MKTIME''@/$(GL_GNULIB_MKTIME)/g' \
-+ -e 's/@''GNULIB_NANOSLEEP''@/$(GL_GNULIB_NANOSLEEP)/g' \
-+ -e 's/@''GNULIB_STRFTIME''@/$(GL_GNULIB_STRFTIME)/g' \
-+ -e 's/@''GNULIB_STRPTIME''@/$(GL_GNULIB_STRPTIME)/g' \
-+ -e 's/@''GNULIB_TIMEGM''@/$(GL_GNULIB_TIMEGM)/g' \
-+ -e 's/@''GNULIB_TIMESPEC_GET''@/$(GL_GNULIB_TIMESPEC_GET)/g' \
-+ -e 's/@''GNULIB_TIME_R''@/$(GL_GNULIB_TIME_R)/g' \
-+ -e 's/@''GNULIB_TIME_RZ''@/$(GL_GNULIB_TIME_RZ)/g' \
-+ -e 's/@''GNULIB_TZSET''@/$(GL_GNULIB_TZSET)/g' \
-+ -e 's/@''GNULIB_MDA_TZSET''@/$(GL_GNULIB_MDA_TZSET)/g' \
- -e 's|@''HAVE_DECL_LOCALTIME_R''@|$(HAVE_DECL_LOCALTIME_R)|g' \
- -e 's|@''HAVE_NANOSLEEP''@|$(HAVE_NANOSLEEP)|g' \
- -e 's|@''HAVE_STRPTIME''@|$(HAVE_STRPTIME)|g' \
-@@ -1806,89 +1922,89 @@ unistd.h: unistd.in.h $(top_builddir)/co
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_UNISTD_H''@|$(NEXT_UNISTD_H)|g' \
- -e 's|@''WINDOWS_64_BIT_OFF_T''@|$(WINDOWS_64_BIT_OFF_T)|g' \
-- -e 's/@''GNULIB_ACCESS''@/$(GNULIB_ACCESS)/g' \
-- -e 's/@''GNULIB_CHDIR''@/$(GNULIB_CHDIR)/g' \
-- -e 's/@''GNULIB_CHOWN''@/$(GNULIB_CHOWN)/g' \
-- -e 's/@''GNULIB_CLOSE''@/$(GNULIB_CLOSE)/g' \
-- -e 's/@''GNULIB_COPY_FILE_RANGE''@/$(GNULIB_COPY_FILE_RANGE)/g' \
-- -e 's/@''GNULIB_DUP''@/$(GNULIB_DUP)/g' \
-- -e 's/@''GNULIB_DUP2''@/$(GNULIB_DUP2)/g' \
-- -e 's/@''GNULIB_DUP3''@/$(GNULIB_DUP3)/g' \
-- -e 's/@''GNULIB_ENVIRON''@/$(GNULIB_ENVIRON)/g' \
-- -e 's/@''GNULIB_EUIDACCESS''@/$(GNULIB_EUIDACCESS)/g' \
-- -e 's/@''GNULIB_EXECL''@/$(GNULIB_EXECL)/g' \
-- -e 's/@''GNULIB_EXECLE''@/$(GNULIB_EXECLE)/g' \
-- -e 's/@''GNULIB_EXECLP''@/$(GNULIB_EXECLP)/g' \
-- -e 's/@''GNULIB_EXECV''@/$(GNULIB_EXECV)/g' \
-- -e 's/@''GNULIB_EXECVE''@/$(GNULIB_EXECVE)/g' \
-- -e 's/@''GNULIB_EXECVP''@/$(GNULIB_EXECVP)/g' \
-- -e 's/@''GNULIB_EXECVPE''@/$(GNULIB_EXECVPE)/g' \
-- -e 's/@''GNULIB_FACCESSAT''@/$(GNULIB_FACCESSAT)/g' \
-- -e 's/@''GNULIB_FCHDIR''@/$(GNULIB_FCHDIR)/g' \
-- -e 's/@''GNULIB_FCHOWNAT''@/$(GNULIB_FCHOWNAT)/g' \
-- -e 's/@''GNULIB_FDATASYNC''@/$(GNULIB_FDATASYNC)/g' \
-- -e 's/@''GNULIB_FSYNC''@/$(GNULIB_FSYNC)/g' \
-- -e 's/@''GNULIB_FTRUNCATE''@/$(GNULIB_FTRUNCATE)/g' \
-- -e 's/@''GNULIB_GETCWD''@/$(GNULIB_GETCWD)/g' \
-- -e 's/@''GNULIB_GETDOMAINNAME''@/$(GNULIB_GETDOMAINNAME)/g' \
-- -e 's/@''GNULIB_GETDTABLESIZE''@/$(GNULIB_GETDTABLESIZE)/g' \
-- -e 's/@''GNULIB_GETENTROPY''@/$(GNULIB_GETENTROPY)/g' \
-- -e 's/@''GNULIB_GETGROUPS''@/$(GNULIB_GETGROUPS)/g' \
-- -e 's/@''GNULIB_GETHOSTNAME''@/$(GNULIB_GETHOSTNAME)/g' \
-- -e 's/@''GNULIB_GETLOGIN''@/$(GNULIB_GETLOGIN)/g' \
-- -e 's/@''GNULIB_GETLOGIN_R''@/$(GNULIB_GETLOGIN_R)/g' \
-- -e 's/@''GNULIB_GETOPT_POSIX''@/$(GNULIB_GETOPT_POSIX)/g' \
-- -e 's/@''GNULIB_GETPAGESIZE''@/$(GNULIB_GETPAGESIZE)/g' \
-- -e 's/@''GNULIB_GETPASS''@/$(GNULIB_GETPASS)/g' \
-- -e 's/@''GNULIB_GETUSERSHELL''@/$(GNULIB_GETUSERSHELL)/g' \
-- -e 's/@''GNULIB_GROUP_MEMBER''@/$(GNULIB_GROUP_MEMBER)/g' \
-- -e 's/@''GNULIB_ISATTY''@/$(GNULIB_ISATTY)/g' \
-- -e 's/@''GNULIB_LCHOWN''@/$(GNULIB_LCHOWN)/g' \
-- -e 's/@''GNULIB_LINK''@/$(GNULIB_LINK)/g' \
-- -e 's/@''GNULIB_LINKAT''@/$(GNULIB_LINKAT)/g' \
-- -e 's/@''GNULIB_LSEEK''@/$(GNULIB_LSEEK)/g' \
-- -e 's/@''GNULIB_PIPE''@/$(GNULIB_PIPE)/g' \
-- -e 's/@''GNULIB_PIPE2''@/$(GNULIB_PIPE2)/g' \
-- -e 's/@''GNULIB_PREAD''@/$(GNULIB_PREAD)/g' \
-- -e 's/@''GNULIB_PWRITE''@/$(GNULIB_PWRITE)/g' \
-- -e 's/@''GNULIB_READ''@/$(GNULIB_READ)/g' \
-- -e 's/@''GNULIB_READLINK''@/$(GNULIB_READLINK)/g' \
-- -e 's/@''GNULIB_READLINKAT''@/$(GNULIB_READLINKAT)/g' \
-- -e 's/@''GNULIB_RMDIR''@/$(GNULIB_RMDIR)/g' \
-- -e 's/@''GNULIB_SETHOSTNAME''@/$(GNULIB_SETHOSTNAME)/g' \
-- -e 's/@''GNULIB_SLEEP''@/$(GNULIB_SLEEP)/g' \
-- -e 's/@''GNULIB_SYMLINK''@/$(GNULIB_SYMLINK)/g' \
-- -e 's/@''GNULIB_SYMLINKAT''@/$(GNULIB_SYMLINKAT)/g' \
-- -e 's/@''GNULIB_TRUNCATE''@/$(GNULIB_TRUNCATE)/g' \
-- -e 's/@''GNULIB_TTYNAME_R''@/$(GNULIB_TTYNAME_R)/g' \
-- -e 's/@''GNULIB_UNISTD_H_GETOPT''@/0$(GNULIB_GL_UNISTD_H_GETOPT)/g' \
-- -e 's/@''GNULIB_UNISTD_H_NONBLOCKING''@/$(GNULIB_UNISTD_H_NONBLOCKING)/g' \
-- -e 's/@''GNULIB_UNISTD_H_SIGPIPE''@/$(GNULIB_UNISTD_H_SIGPIPE)/g' \
-- -e 's/@''GNULIB_UNLINK''@/$(GNULIB_UNLINK)/g' \
-- -e 's/@''GNULIB_UNLINKAT''@/$(GNULIB_UNLINKAT)/g' \
-- -e 's/@''GNULIB_USLEEP''@/$(GNULIB_USLEEP)/g' \
-- -e 's/@''GNULIB_WRITE''@/$(GNULIB_WRITE)/g' \
-- -e 's/@''GNULIB_MDA_ACCESS''@/$(GNULIB_MDA_ACCESS)/g' \
-- -e 's/@''GNULIB_MDA_CHDIR''@/$(GNULIB_MDA_CHDIR)/g' \
-- -e 's/@''GNULIB_MDA_CLOSE''@/$(GNULIB_MDA_CLOSE)/g' \
-- -e 's/@''GNULIB_MDA_DUP''@/$(GNULIB_MDA_DUP)/g' \
-- -e 's/@''GNULIB_MDA_DUP2''@/$(GNULIB_MDA_DUP2)/g' \
-- -e 's/@''GNULIB_MDA_EXECL''@/$(GNULIB_MDA_EXECL)/g' \
-- -e 's/@''GNULIB_MDA_EXECLE''@/$(GNULIB_MDA_EXECLE)/g' \
-- -e 's/@''GNULIB_MDA_EXECLP''@/$(GNULIB_MDA_EXECLP)/g' \
-- -e 's/@''GNULIB_MDA_EXECV''@/$(GNULIB_MDA_EXECV)/g' \
-- -e 's/@''GNULIB_MDA_EXECVE''@/$(GNULIB_MDA_EXECVE)/g' \
-- -e 's/@''GNULIB_MDA_EXECVP''@/$(GNULIB_MDA_EXECVP)/g' \
-- -e 's/@''GNULIB_MDA_EXECVPE''@/$(GNULIB_MDA_EXECVPE)/g' \
-- -e 's/@''GNULIB_MDA_GETCWD''@/$(GNULIB_MDA_GETCWD)/g' \
-- -e 's/@''GNULIB_MDA_GETPID''@/$(GNULIB_MDA_GETPID)/g' \
-- -e 's/@''GNULIB_MDA_ISATTY''@/$(GNULIB_MDA_ISATTY)/g' \
-- -e 's/@''GNULIB_MDA_LSEEK''@/$(GNULIB_MDA_LSEEK)/g' \
-- -e 's/@''GNULIB_MDA_READ''@/$(GNULIB_MDA_READ)/g' \
-- -e 's/@''GNULIB_MDA_RMDIR''@/$(GNULIB_MDA_RMDIR)/g' \
-- -e 's/@''GNULIB_MDA_SWAB''@/$(GNULIB_MDA_SWAB)/g' \
-- -e 's/@''GNULIB_MDA_UNLINK''@/$(GNULIB_MDA_UNLINK)/g' \
-- -e 's/@''GNULIB_MDA_WRITE''@/$(GNULIB_MDA_WRITE)/g' \
-+ -e 's/@''GNULIB_ACCESS''@/$(GL_GNULIB_ACCESS)/g' \
-+ -e 's/@''GNULIB_CHDIR''@/$(GL_GNULIB_CHDIR)/g' \
-+ -e 's/@''GNULIB_CHOWN''@/$(GL_GNULIB_CHOWN)/g' \
-+ -e 's/@''GNULIB_CLOSE''@/$(GL_GNULIB_CLOSE)/g' \
-+ -e 's/@''GNULIB_COPY_FILE_RANGE''@/$(GL_GNULIB_COPY_FILE_RANGE)/g' \
-+ -e 's/@''GNULIB_DUP''@/$(GL_GNULIB_DUP)/g' \
-+ -e 's/@''GNULIB_DUP2''@/$(GL_GNULIB_DUP2)/g' \
-+ -e 's/@''GNULIB_DUP3''@/$(GL_GNULIB_DUP3)/g' \
-+ -e 's/@''GNULIB_ENVIRON''@/$(GL_GNULIB_ENVIRON)/g' \
-+ -e 's/@''GNULIB_EUIDACCESS''@/$(GL_GNULIB_EUIDACCESS)/g' \
-+ -e 's/@''GNULIB_EXECL''@/$(GL_GNULIB_EXECL)/g' \
-+ -e 's/@''GNULIB_EXECLE''@/$(GL_GNULIB_EXECLE)/g' \
-+ -e 's/@''GNULIB_EXECLP''@/$(GL_GNULIB_EXECLP)/g' \
-+ -e 's/@''GNULIB_EXECV''@/$(GL_GNULIB_EXECV)/g' \
-+ -e 's/@''GNULIB_EXECVE''@/$(GL_GNULIB_EXECVE)/g' \
-+ -e 's/@''GNULIB_EXECVP''@/$(GL_GNULIB_EXECVP)/g' \
-+ -e 's/@''GNULIB_EXECVPE''@/$(GL_GNULIB_EXECVPE)/g' \
-+ -e 's/@''GNULIB_FACCESSAT''@/$(GL_GNULIB_FACCESSAT)/g' \
-+ -e 's/@''GNULIB_FCHDIR''@/$(GL_GNULIB_FCHDIR)/g' \
-+ -e 's/@''GNULIB_FCHOWNAT''@/$(GL_GNULIB_FCHOWNAT)/g' \
-+ -e 's/@''GNULIB_FDATASYNC''@/$(GL_GNULIB_FDATASYNC)/g' \
-+ -e 's/@''GNULIB_FSYNC''@/$(GL_GNULIB_FSYNC)/g' \
-+ -e 's/@''GNULIB_FTRUNCATE''@/$(GL_GNULIB_FTRUNCATE)/g' \
-+ -e 's/@''GNULIB_GETCWD''@/$(GL_GNULIB_GETCWD)/g' \
-+ -e 's/@''GNULIB_GETDOMAINNAME''@/$(GL_GNULIB_GETDOMAINNAME)/g' \
-+ -e 's/@''GNULIB_GETDTABLESIZE''@/$(GL_GNULIB_GETDTABLESIZE)/g' \
-+ -e 's/@''GNULIB_GETENTROPY''@/$(GL_GNULIB_GETENTROPY)/g' \
-+ -e 's/@''GNULIB_GETGROUPS''@/$(GL_GNULIB_GETGROUPS)/g' \
-+ -e 's/@''GNULIB_GETHOSTNAME''@/$(GL_GNULIB_GETHOSTNAME)/g' \
-+ -e 's/@''GNULIB_GETLOGIN''@/$(GL_GNULIB_GETLOGIN)/g' \
-+ -e 's/@''GNULIB_GETLOGIN_R''@/$(GL_GNULIB_GETLOGIN_R)/g' \
-+ -e 's/@''GNULIB_GETOPT_POSIX''@/$(GL_GNULIB_GETOPT_POSIX)/g' \
-+ -e 's/@''GNULIB_GETPAGESIZE''@/$(GL_GNULIB_GETPAGESIZE)/g' \
-+ -e 's/@''GNULIB_GETPASS''@/$(GL_GNULIB_GETPASS)/g' \
-+ -e 's/@''GNULIB_GETUSERSHELL''@/$(GL_GNULIB_GETUSERSHELL)/g' \
-+ -e 's/@''GNULIB_GROUP_MEMBER''@/$(GL_GNULIB_GROUP_MEMBER)/g' \
-+ -e 's/@''GNULIB_ISATTY''@/$(GL_GNULIB_ISATTY)/g' \
-+ -e 's/@''GNULIB_LCHOWN''@/$(GL_GNULIB_LCHOWN)/g' \
-+ -e 's/@''GNULIB_LINK''@/$(GL_GNULIB_LINK)/g' \
-+ -e 's/@''GNULIB_LINKAT''@/$(GL_GNULIB_LINKAT)/g' \
-+ -e 's/@''GNULIB_LSEEK''@/$(GL_GNULIB_LSEEK)/g' \
-+ -e 's/@''GNULIB_PIPE''@/$(GL_GNULIB_PIPE)/g' \
-+ -e 's/@''GNULIB_PIPE2''@/$(GL_GNULIB_PIPE2)/g' \
-+ -e 's/@''GNULIB_PREAD''@/$(GL_GNULIB_PREAD)/g' \
-+ -e 's/@''GNULIB_PWRITE''@/$(GL_GNULIB_PWRITE)/g' \
-+ -e 's/@''GNULIB_READ''@/$(GL_GNULIB_READ)/g' \
-+ -e 's/@''GNULIB_READLINK''@/$(GL_GNULIB_READLINK)/g' \
-+ -e 's/@''GNULIB_READLINKAT''@/$(GL_GNULIB_READLINKAT)/g' \
-+ -e 's/@''GNULIB_RMDIR''@/$(GL_GNULIB_RMDIR)/g' \
-+ -e 's/@''GNULIB_SETHOSTNAME''@/$(GL_GNULIB_SETHOSTNAME)/g' \
-+ -e 's/@''GNULIB_SLEEP''@/$(GL_GNULIB_SLEEP)/g' \
-+ -e 's/@''GNULIB_SYMLINK''@/$(GL_GNULIB_SYMLINK)/g' \
-+ -e 's/@''GNULIB_SYMLINKAT''@/$(GL_GNULIB_SYMLINKAT)/g' \
-+ -e 's/@''GNULIB_TRUNCATE''@/$(GL_GNULIB_TRUNCATE)/g' \
-+ -e 's/@''GNULIB_TTYNAME_R''@/$(GL_GNULIB_TTYNAME_R)/g' \
-+ -e 's/@''GNULIB_UNISTD_H_GETOPT''@/0$(GL_GNULIB_UNISTD_H_GETOPT)/g' \
-+ -e 's/@''GNULIB_UNISTD_H_NONBLOCKING''@/$(GL_GNULIB_UNISTD_H_NONBLOCKING)/g' \
-+ -e 's/@''GNULIB_UNISTD_H_SIGPIPE''@/$(GL_GNULIB_UNISTD_H_SIGPIPE)/g' \
-+ -e 's/@''GNULIB_UNLINK''@/$(GL_GNULIB_UNLINK)/g' \
-+ -e 's/@''GNULIB_UNLINKAT''@/$(GL_GNULIB_UNLINKAT)/g' \
-+ -e 's/@''GNULIB_USLEEP''@/$(GL_GNULIB_USLEEP)/g' \
-+ -e 's/@''GNULIB_WRITE''@/$(GL_GNULIB_WRITE)/g' \
-+ -e 's/@''GNULIB_MDA_ACCESS''@/$(GL_GNULIB_MDA_ACCESS)/g' \
-+ -e 's/@''GNULIB_MDA_CHDIR''@/$(GL_GNULIB_MDA_CHDIR)/g' \
-+ -e 's/@''GNULIB_MDA_CLOSE''@/$(GL_GNULIB_MDA_CLOSE)/g' \
-+ -e 's/@''GNULIB_MDA_DUP''@/$(GL_GNULIB_MDA_DUP)/g' \
-+ -e 's/@''GNULIB_MDA_DUP2''@/$(GL_GNULIB_MDA_DUP2)/g' \
-+ -e 's/@''GNULIB_MDA_EXECL''@/$(GL_GNULIB_MDA_EXECL)/g' \
-+ -e 's/@''GNULIB_MDA_EXECLE''@/$(GL_GNULIB_MDA_EXECLE)/g' \
-+ -e 's/@''GNULIB_MDA_EXECLP''@/$(GL_GNULIB_MDA_EXECLP)/g' \
-+ -e 's/@''GNULIB_MDA_EXECV''@/$(GL_GNULIB_MDA_EXECV)/g' \
-+ -e 's/@''GNULIB_MDA_EXECVE''@/$(GL_GNULIB_MDA_EXECVE)/g' \
-+ -e 's/@''GNULIB_MDA_EXECVP''@/$(GL_GNULIB_MDA_EXECVP)/g' \
-+ -e 's/@''GNULIB_MDA_EXECVPE''@/$(GL_GNULIB_MDA_EXECVPE)/g' \
-+ -e 's/@''GNULIB_MDA_GETCWD''@/$(GL_GNULIB_MDA_GETCWD)/g' \
-+ -e 's/@''GNULIB_MDA_GETPID''@/$(GL_GNULIB_MDA_GETPID)/g' \
-+ -e 's/@''GNULIB_MDA_ISATTY''@/$(GL_GNULIB_MDA_ISATTY)/g' \
-+ -e 's/@''GNULIB_MDA_LSEEK''@/$(GL_GNULIB_MDA_LSEEK)/g' \
-+ -e 's/@''GNULIB_MDA_READ''@/$(GL_GNULIB_MDA_READ)/g' \
-+ -e 's/@''GNULIB_MDA_RMDIR''@/$(GL_GNULIB_MDA_RMDIR)/g' \
-+ -e 's/@''GNULIB_MDA_SWAB''@/$(GL_GNULIB_MDA_SWAB)/g' \
-+ -e 's/@''GNULIB_MDA_UNLINK''@/$(GL_GNULIB_MDA_UNLINK)/g' \
-+ -e 's/@''GNULIB_MDA_WRITE''@/$(GL_GNULIB_MDA_WRITE)/g' \
- < $(srcdir)/unistd.in.h | \
- sed -e 's|@''HAVE_CHOWN''@|$(HAVE_CHOWN)|g' \
- -e 's|@''HAVE_COPY_FILE_RANGE''@|$(HAVE_COPY_FILE_RANGE)|g' \
-@@ -2080,49 +2196,49 @@ wchar.h: wchar.in.h $(top_builddir)/conf
- -e 's|@''NEXT_WCHAR_H''@|$(NEXT_WCHAR_H)|g' \
- -e 's|@''HAVE_WCHAR_H''@|$(HAVE_WCHAR_H)|g' \
- -e 's/@''HAVE_CRTDEFS_H''@/$(HAVE_CRTDEFS_H)/g' \
-- -e 's/@''GNULIB_OVERRIDES_WINT_T''@/$(GNULIB_OVERRIDES_WINT_T)/g' \
-- -e 's/@''GNULIB_BTOWC''@/$(GNULIB_BTOWC)/g' \
-- -e 's/@''GNULIB_WCTOB''@/$(GNULIB_WCTOB)/g' \
-- -e 's/@''GNULIB_MBSINIT''@/$(GNULIB_MBSINIT)/g' \
-- -e 's/@''GNULIB_MBRTOWC''@/$(GNULIB_MBRTOWC)/g' \
-- -e 's/@''GNULIB_MBRLEN''@/$(GNULIB_MBRLEN)/g' \
-- -e 's/@''GNULIB_MBSRTOWCS''@/$(GNULIB_MBSRTOWCS)/g' \
-- -e 's/@''GNULIB_MBSNRTOWCS''@/$(GNULIB_MBSNRTOWCS)/g' \
-- -e 's/@''GNULIB_WCRTOMB''@/$(GNULIB_WCRTOMB)/g' \
-- -e 's/@''GNULIB_WCSRTOMBS''@/$(GNULIB_WCSRTOMBS)/g' \
-- -e 's/@''GNULIB_WCSNRTOMBS''@/$(GNULIB_WCSNRTOMBS)/g' \
-- -e 's/@''GNULIB_WCWIDTH''@/$(GNULIB_WCWIDTH)/g' \
-- -e 's/@''GNULIB_WMEMCHR''@/$(GNULIB_WMEMCHR)/g' \
-- -e 's/@''GNULIB_WMEMCMP''@/$(GNULIB_WMEMCMP)/g' \
-- -e 's/@''GNULIB_WMEMCPY''@/$(GNULIB_WMEMCPY)/g' \
-- -e 's/@''GNULIB_WMEMMOVE''@/$(GNULIB_WMEMMOVE)/g' \
-- -e 's/@''GNULIB_WMEMPCPY''@/$(GNULIB_WMEMPCPY)/g' \
-- -e 's/@''GNULIB_WMEMSET''@/$(GNULIB_WMEMSET)/g' \
-- -e 's/@''GNULIB_WCSLEN''@/$(GNULIB_WCSLEN)/g' \
-- -e 's/@''GNULIB_WCSNLEN''@/$(GNULIB_WCSNLEN)/g' \
-- -e 's/@''GNULIB_WCSCPY''@/$(GNULIB_WCSCPY)/g' \
-- -e 's/@''GNULIB_WCPCPY''@/$(GNULIB_WCPCPY)/g' \
-- -e 's/@''GNULIB_WCSNCPY''@/$(GNULIB_WCSNCPY)/g' \
-- -e 's/@''GNULIB_WCPNCPY''@/$(GNULIB_WCPNCPY)/g' \
-- -e 's/@''GNULIB_WCSCAT''@/$(GNULIB_WCSCAT)/g' \
-- -e 's/@''GNULIB_WCSNCAT''@/$(GNULIB_WCSNCAT)/g' \
-- -e 's/@''GNULIB_WCSCMP''@/$(GNULIB_WCSCMP)/g' \
-- -e 's/@''GNULIB_WCSNCMP''@/$(GNULIB_WCSNCMP)/g' \
-- -e 's/@''GNULIB_WCSCASECMP''@/$(GNULIB_WCSCASECMP)/g' \
-- -e 's/@''GNULIB_WCSNCASECMP''@/$(GNULIB_WCSNCASECMP)/g' \
-- -e 's/@''GNULIB_WCSCOLL''@/$(GNULIB_WCSCOLL)/g' \
-- -e 's/@''GNULIB_WCSXFRM''@/$(GNULIB_WCSXFRM)/g' \
-- -e 's/@''GNULIB_WCSDUP''@/$(GNULIB_WCSDUP)/g' \
-- -e 's/@''GNULIB_WCSCHR''@/$(GNULIB_WCSCHR)/g' \
-- -e 's/@''GNULIB_WCSRCHR''@/$(GNULIB_WCSRCHR)/g' \
-- -e 's/@''GNULIB_WCSCSPN''@/$(GNULIB_WCSCSPN)/g' \
-- -e 's/@''GNULIB_WCSSPN''@/$(GNULIB_WCSSPN)/g' \
-- -e 's/@''GNULIB_WCSPBRK''@/$(GNULIB_WCSPBRK)/g' \
-- -e 's/@''GNULIB_WCSSTR''@/$(GNULIB_WCSSTR)/g' \
-- -e 's/@''GNULIB_WCSTOK''@/$(GNULIB_WCSTOK)/g' \
-- -e 's/@''GNULIB_WCSWIDTH''@/$(GNULIB_WCSWIDTH)/g' \
-- -e 's/@''GNULIB_WCSFTIME''@/$(GNULIB_WCSFTIME)/g' \
-- -e 's/@''GNULIB_MDA_WCSDUP''@/$(GNULIB_MDA_WCSDUP)/g' \
-+ -e 's/@''GNULIBHEADERS_OVERRIDE_WINT_T''@/$(GNULIBHEADERS_OVERRIDE_WINT_T)/g' \
-+ -e 's/@''GNULIB_BTOWC''@/$(GL_GNULIB_BTOWC)/g' \
-+ -e 's/@''GNULIB_WCTOB''@/$(GL_GNULIB_WCTOB)/g' \
-+ -e 's/@''GNULIB_MBSINIT''@/$(GL_GNULIB_MBSINIT)/g' \
-+ -e 's/@''GNULIB_MBRTOWC''@/$(GL_GNULIB_MBRTOWC)/g' \
-+ -e 's/@''GNULIB_MBRLEN''@/$(GL_GNULIB_MBRLEN)/g' \
-+ -e 's/@''GNULIB_MBSRTOWCS''@/$(GL_GNULIB_MBSRTOWCS)/g' \
-+ -e 's/@''GNULIB_MBSNRTOWCS''@/$(GL_GNULIB_MBSNRTOWCS)/g' \
-+ -e 's/@''GNULIB_WCRTOMB''@/$(GL_GNULIB_WCRTOMB)/g' \
-+ -e 's/@''GNULIB_WCSRTOMBS''@/$(GL_GNULIB_WCSRTOMBS)/g' \
-+ -e 's/@''GNULIB_WCSNRTOMBS''@/$(GL_GNULIB_WCSNRTOMBS)/g' \
-+ -e 's/@''GNULIB_WCWIDTH''@/$(GL_GNULIB_WCWIDTH)/g' \
-+ -e 's/@''GNULIB_WMEMCHR''@/$(GL_GNULIB_WMEMCHR)/g' \
-+ -e 's/@''GNULIB_WMEMCMP''@/$(GL_GNULIB_WMEMCMP)/g' \
-+ -e 's/@''GNULIB_WMEMCPY''@/$(GL_GNULIB_WMEMCPY)/g' \
-+ -e 's/@''GNULIB_WMEMMOVE''@/$(GL_GNULIB_WMEMMOVE)/g' \
-+ -e 's/@''GNULIB_WMEMPCPY''@/$(GL_GNULIB_WMEMPCPY)/g' \
-+ -e 's/@''GNULIB_WMEMSET''@/$(GL_GNULIB_WMEMSET)/g' \
-+ -e 's/@''GNULIB_WCSLEN''@/$(GL_GNULIB_WCSLEN)/g' \
-+ -e 's/@''GNULIB_WCSNLEN''@/$(GL_GNULIB_WCSNLEN)/g' \
-+ -e 's/@''GNULIB_WCSCPY''@/$(GL_GNULIB_WCSCPY)/g' \
-+ -e 's/@''GNULIB_WCPCPY''@/$(GL_GNULIB_WCPCPY)/g' \
-+ -e 's/@''GNULIB_WCSNCPY''@/$(GL_GNULIB_WCSNCPY)/g' \
-+ -e 's/@''GNULIB_WCPNCPY''@/$(GL_GNULIB_WCPNCPY)/g' \
-+ -e 's/@''GNULIB_WCSCAT''@/$(GL_GNULIB_WCSCAT)/g' \
-+ -e 's/@''GNULIB_WCSNCAT''@/$(GL_GNULIB_WCSNCAT)/g' \
-+ -e 's/@''GNULIB_WCSCMP''@/$(GL_GNULIB_WCSCMP)/g' \
-+ -e 's/@''GNULIB_WCSNCMP''@/$(GL_GNULIB_WCSNCMP)/g' \
-+ -e 's/@''GNULIB_WCSCASECMP''@/$(GL_GNULIB_WCSCASECMP)/g' \
-+ -e 's/@''GNULIB_WCSNCASECMP''@/$(GL_GNULIB_WCSNCASECMP)/g' \
-+ -e 's/@''GNULIB_WCSCOLL''@/$(GL_GNULIB_WCSCOLL)/g' \
-+ -e 's/@''GNULIB_WCSXFRM''@/$(GL_GNULIB_WCSXFRM)/g' \
-+ -e 's/@''GNULIB_WCSDUP''@/$(GL_GNULIB_WCSDUP)/g' \
-+ -e 's/@''GNULIB_WCSCHR''@/$(GL_GNULIB_WCSCHR)/g' \
-+ -e 's/@''GNULIB_WCSRCHR''@/$(GL_GNULIB_WCSRCHR)/g' \
-+ -e 's/@''GNULIB_WCSCSPN''@/$(GL_GNULIB_WCSCSPN)/g' \
-+ -e 's/@''GNULIB_WCSSPN''@/$(GL_GNULIB_WCSSPN)/g' \
-+ -e 's/@''GNULIB_WCSPBRK''@/$(GL_GNULIB_WCSPBRK)/g' \
-+ -e 's/@''GNULIB_WCSSTR''@/$(GL_GNULIB_WCSSTR)/g' \
-+ -e 's/@''GNULIB_WCSTOK''@/$(GL_GNULIB_WCSTOK)/g' \
-+ -e 's/@''GNULIB_WCSWIDTH''@/$(GL_GNULIB_WCSWIDTH)/g' \
-+ -e 's/@''GNULIB_WCSFTIME''@/$(GL_GNULIB_WCSFTIME)/g' \
-+ -e 's/@''GNULIB_MDA_WCSDUP''@/$(GL_GNULIB_MDA_WCSDUP)/g' \
- < $(srcdir)/wchar.in.h | \
- sed -e 's|@''HAVE_WINT_T''@|$(HAVE_WINT_T)|g' \
- -e 's|@''HAVE_BTOWC''@|$(HAVE_BTOWC)|g' \
-@@ -2222,14 +2338,14 @@ wctype.h: wctype.in.h $(top_builddir)/co
- -e 's|@''PRAGMA_COLUMNS''@|@PRAGMA_COLUMNS@|g' \
- -e 's|@''NEXT_WCTYPE_H''@|$(NEXT_WCTYPE_H)|g' \
- -e 's/@''HAVE_CRTDEFS_H''@/$(HAVE_CRTDEFS_H)/g' \
-- -e 's/@''GNULIB_OVERRIDES_WINT_T''@/$(GNULIB_OVERRIDES_WINT_T)/g' \
-- -e 's/@''GNULIB_ISWBLANK''@/$(GNULIB_ISWBLANK)/g' \
-- -e 's/@''GNULIB_ISWDIGIT''@/$(GNULIB_ISWDIGIT)/g' \
-- -e 's/@''GNULIB_ISWXDIGIT''@/$(GNULIB_ISWXDIGIT)/g' \
-- -e 's/@''GNULIB_WCTYPE''@/$(GNULIB_WCTYPE)/g' \
-- -e 's/@''GNULIB_ISWCTYPE''@/$(GNULIB_ISWCTYPE)/g' \
-- -e 's/@''GNULIB_WCTRANS''@/$(GNULIB_WCTRANS)/g' \
-- -e 's/@''GNULIB_TOWCTRANS''@/$(GNULIB_TOWCTRANS)/g' \
-+ -e 's/@''GNULIBHEADERS_OVERRIDE_WINT_T''@/$(GNULIBHEADERS_OVERRIDE_WINT_T)/g' \
-+ -e 's/@''GNULIB_ISWBLANK''@/$(GL_GNULIB_ISWBLANK)/g' \
-+ -e 's/@''GNULIB_ISWDIGIT''@/$(GL_GNULIB_ISWDIGIT)/g' \
-+ -e 's/@''GNULIB_ISWXDIGIT''@/$(GL_GNULIB_ISWXDIGIT)/g' \
-+ -e 's/@''GNULIB_WCTYPE''@/$(GL_GNULIB_WCTYPE)/g' \
-+ -e 's/@''GNULIB_ISWCTYPE''@/$(GL_GNULIB_ISWCTYPE)/g' \
-+ -e 's/@''GNULIB_WCTRANS''@/$(GL_GNULIB_WCTRANS)/g' \
-+ -e 's/@''GNULIB_TOWCTRANS''@/$(GL_GNULIB_TOWCTRANS)/g' \
- -e 's/@''HAVE_ISWBLANK''@/$(HAVE_ISWBLANK)/g' \
- -e 's/@''HAVE_ISWCNTRL''@/$(HAVE_ISWCNTRL)/g' \
- -e 's/@''HAVE_WCTYPE_T''@/$(HAVE_WCTYPE_T)/g' \
---- a/gnulib/lib/_Noreturn.h
-+++ b/gnulib/lib/_Noreturn.h
-@@ -2,16 +2,16 @@
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-+ under the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _Noreturn
---- a/gnulib/lib/alloca.in.h
-+++ b/gnulib/lib/alloca.in.h
-@@ -3,20 +3,18 @@
- Copyright (C) 1995, 1999, 2001-2004, 2006-2021 Free Software Foundation,
- Inc.
-
-- This program is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-- License along with this program; if not, see
-- <https://www.gnu.org/licenses/>.
-- */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Avoid using the symbol _ALLOCA_H here, as Bison assumes _ALLOCA_H
- means there is a real alloca function. */
---- a/gnulib/lib/arg-nonnull.h
-+++ b/gnulib/lib/arg-nonnull.h
-@@ -2,16 +2,16 @@
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-+ under the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* _GL_ARG_NONNULL((n,...,m)) tells the compiler and static analyzer tools
---- a/gnulib/lib/argz.c
-+++ b/gnulib/lib/argz.c
-@@ -3,18 +3,18 @@
- Foundation, Inc.
- This file is part of the GNU C Library.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/argz.in.h
-+++ b/gnulib/lib/argz.in.h
-@@ -3,17 +3,17 @@
- Inc.
- This file is part of the GNU C Library.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _ARGZ_H
---- a/gnulib/lib/asnprintf.c
-+++ b/gnulib/lib/asnprintf.c
-@@ -1,18 +1,18 @@
- /* Formatted output to strings.
- Copyright (C) 1999, 2002, 2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/asprintf.c
-+++ b/gnulib/lib/asprintf.c
-@@ -2,18 +2,18 @@
- Copyright (C) 1999, 2002, 2006-2007, 2009-2021 Free Software Foundation,
- Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/attribute.h
-+++ b/gnulib/lib/attribute.h
-@@ -2,17 +2,17 @@
-
- Copyright 2020-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Paul Eggert. */
---- a/gnulib/lib/basename-lgpl.c
-+++ b/gnulib/lib/basename-lgpl.c
-@@ -3,17 +3,17 @@
- Copyright (C) 1990, 1998-2001, 2003-2006, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/basename-lgpl.h
-+++ b/gnulib/lib/basename-lgpl.h
-@@ -1,20 +1,20 @@
--/* Extract the last component (base name) of a file name.
-+/* Extract the last component (base name) of a file name.
-
-- Copyright (C) 1998, 2001, 2003-2006, 2009-2021 Free Software Foundation,
-- Inc.
-+ Copyright (C) 1998, 2001, 2003-2006, 2009-2021 Free Software Foundation,
-+ Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _BASENAME_LGPL_H
- #define _BASENAME_LGPL_H
---- a/gnulib/lib/btowc.c
-+++ b/gnulib/lib/btowc.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2008, 2010-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2008.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/c++defs.h
-+++ b/gnulib/lib/c++defs.h
-@@ -2,16 +2,16 @@
- Copyright (C) 2010-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-+ under the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GL_CXXDEFS_H
---- /dev/null
-+++ b/gnulib/lib/calloc.c
-@@ -0,0 +1,55 @@
-+/* calloc() function that is glibc compatible.
-+ This wrapper function is required at least on Tru64 UNIX 5.1 and mingw.
-+ Copyright (C) 2004-2007, 2009-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+/* written by Jim Meyering and Bruno Haible */
-+
-+#include <config.h>
-+
-+/* Specification. */
-+#include <stdlib.h>
-+
-+#include <errno.h>
-+
-+#include "xalloc-oversized.h"
-+
-+/* Call the system's calloc below. */
-+#undef calloc
-+
-+/* Allocate and zero-fill an NxS-byte block of memory from the heap,
-+ even if N or S is zero. */
-+
-+void *
-+rpl_calloc (size_t n, size_t s)
-+{
-+ if (n == 0 || s == 0)
-+ n = s = 1;
-+
-+ if (xalloc_oversized (n, s))
-+ {
-+ errno = ENOMEM;
-+ return NULL;
-+ }
-+
-+ void *result = calloc (n, s);
-+
-+#if !HAVE_MALLOC_POSIX
-+ if (result == NULL)
-+ errno = ENOMEM;
-+#endif
-+
-+ return result;
-+}
---- a/gnulib/lib/cdefs.h
-+++ b/gnulib/lib/cdefs.h
-@@ -2,16 +2,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
-@@ -259,10 +259,12 @@
- # define __attribute_const__ /* Ignore */
- #endif
-
--#if defined __STDC_VERSION__ && 201710L < __STDC_VERSION__
--# define __attribute_maybe_unused__ [[__maybe_unused__]]
--#elif __GNUC_PREREQ (2,7) || __glibc_has_attribute (__unused__)
-+#if __GNUC_PREREQ (2,7) || __glibc_has_attribute (__unused__)
- # define __attribute_maybe_unused__ __attribute__ ((__unused__))
-+/* Once the next version of the C standard comes out, we can
-+ do something like the following here:
-+ #elif defined __STDC_VERSION__ && 202???L <= __STDC_VERSION__
-+ # define __attribute_maybe_unused__ [[__maybe_unused__]] */
- #else
- # define __attribute_maybe_unused__ /* Ignore */
- #endif
-@@ -320,7 +322,9 @@
- #endif
-
- /* The nonnull function attribute marks pointer parameters that
-- must not be NULL. */
-+ must not be NULL. This has the name __nonnull in glibc,
-+ and __attribute_nonnull__ in files shared with Gnulib to avoid
-+ collision with a different __nonnull in DragonFlyBSD 5.9. */
- #ifndef __attribute_nonnull__
- # if __GNUC_PREREQ (3,3) || __glibc_has_attribute (__nonnull__)
- # define __attribute_nonnull__(params) __attribute__ ((__nonnull__ params))
-@@ -485,9 +489,9 @@
- [!!sizeof (struct { int __error_if_negative: (expr) ? 2 : -1; })]
- #endif
-
--/* The #ifndef lets Gnulib avoid including these on non-glibc
-- platforms, where the includes typically do not exist. */
--#ifndef __WORDSIZE
-+/* Gnulib avoids including these, as they don't work on non-glibc or
-+ older glibc platforms. */
-+#ifndef __GNULIB_CDEFS
- # include <bits/wordsize.h>
- # include <bits/long-double.h>
- #endif
---- a/gnulib/lib/cloexec.c
-+++ b/gnulib/lib/cloexec.c
-@@ -2,20 +2,20 @@
-
- Copyright (C) 1991, 2004-2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <https://www.gnu.org/licenses/>.
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
-- The code is taken from glibc/manual/llio.texi */
-+/* The code is taken from glibc/manual/llio.texi */
-
- #include <config.h>
-
---- a/gnulib/lib/cloexec.h
-+++ b/gnulib/lib/cloexec.h
-@@ -2,20 +2,18 @@
-
- Copyright (C) 2004, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <https://www.gnu.org/licenses/>.
--
--*/
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <stdbool.h>
-
---- a/gnulib/lib/close.c
-+++ b/gnulib/lib/close.c
-@@ -1,17 +1,17 @@
- /* close replacement.
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/dup2.c
-+++ b/gnulib/lib/dup2.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 1999, 2004-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* written by Paul Eggert */
---- a/gnulib/lib/dynarray.h
-+++ b/gnulib/lib/dynarray.h
-@@ -1,17 +1,17 @@
- /* Type-safe arrays which grow dynamically.
- Copyright 2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Paul Eggert and Bruno Haible, 2021. */
-@@ -257,18 +257,22 @@ static DYNARRAY_ELEMENT *
-
- #if defined DYNARRAY_STRUCT || defined DYNARRAY_ELEMENT || defined DYNARRAY_PREFIX
-
--# include <libc-config.h>
-+# ifndef _GL_LIKELY
-+/* Rely on __builtin_expect, as provided by the module 'builtin-expect'. */
-+# define _GL_LIKELY(cond) __builtin_expect ((cond), 1)
-+# define _GL_UNLIKELY(cond) __builtin_expect ((cond), 0)
-+# endif
-
- /* Define auxiliary structs and declare auxiliary functions, common to all
- instantiations of dynarray. */
--# include <malloc/dynarray.h>
-+# include <malloc/dynarray.gl.h>
-
- /* Define the instantiation, specified through
- DYNARRAY_STRUCT
- DYNARRAY_ELEMENT
- DYNARRAY_PREFIX
- etc. */
--# include <malloc/dynarray-skeleton.c>
-+# include <malloc/dynarray-skeleton.gl.h>
-
- #else
-
---- a/gnulib/lib/errno.in.h
-+++ b/gnulib/lib/errno.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_ERRNO_H
-
---- a/gnulib/lib/error.c
-+++ b/gnulib/lib/error.c
-@@ -2,17 +2,17 @@
- Copyright (C) 1990-1998, 2000-2007, 2009-2021 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by David MacKenzie <djm@gnu.ai.mit.edu>. */
---- a/gnulib/lib/error.h
-+++ b/gnulib/lib/error.h
-@@ -3,17 +3,17 @@
- Inc.
- This file is part of the GNU C Library.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _ERROR_H
---- a/gnulib/lib/exitfail.c
-+++ b/gnulib/lib/exitfail.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2002-2003, 2005-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/exitfail.h
-+++ b/gnulib/lib/exitfail.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2002, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- extern int volatile exit_failure;
---- a/gnulib/lib/fcntl.c
-+++ b/gnulib/lib/fcntl.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Eric Blake <ebb9@byu.net>. */
---- a/gnulib/lib/fcntl.in.h
-+++ b/gnulib/lib/fcntl.in.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* written by Paul Eggert */
---- a/gnulib/lib/fd-hook.c
-+++ b/gnulib/lib/fd-hook.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2009.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/fd-hook.h
-+++ b/gnulib/lib/fd-hook.h
-@@ -1,17 +1,17 @@
- /* Hook for making file descriptor functions close(), ioctl() extensible.
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
-
---- a/gnulib/lib/filename.h
-+++ b/gnulib/lib/filename.h
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/float+.h
-+++ b/gnulib/lib/float+.h
-@@ -2,18 +2,18 @@
- Copyright (C) 2007, 2009-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2007.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _FLOATPLUS_H
- #define _FLOATPLUS_H
---- a/gnulib/lib/float.c
-+++ b/gnulib/lib/float.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2011.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/float.in.h
-+++ b/gnulib/lib/float.in.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_FLOAT_H
---- /dev/null
-+++ b/gnulib/lib/free.c
-@@ -0,0 +1,53 @@
-+/* Make free() preserve errno.
-+
-+ Copyright (C) 2003, 2006, 2009-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+/* written by Paul Eggert */
-+
-+#include <config.h>
-+
-+/* Specification. */
-+#include <stdlib.h>
-+
-+/* A function definition is only needed if HAVE_FREE_POSIX is not defined. */
-+#if !HAVE_FREE_POSIX
-+
-+# include <errno.h>
-+
-+void
-+rpl_free (void *p)
-+# undef free
-+{
-+# if defined __GNUC__ && !defined __clang__
-+ /* An invalid GCC optimization
-+ <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=98396>
-+ would optimize away the assignments in the code below, when link-time
-+ optimization (LTO) is enabled. Make the code more complicated, so that
-+ GCC does not grok how to optimize it. */
-+ int err[2];
-+ err[0] = errno;
-+ err[1] = errno;
-+ errno = 0;
-+ free (p);
-+ errno = err[errno == 0];
-+# else
-+ int err = errno;
-+ free (p);
-+ errno = err;
-+# endif
-+}
-+
-+#endif
---- a/gnulib/lib/fstat.c
-+++ b/gnulib/lib/fstat.c
-@@ -1,17 +1,17 @@
- /* fstat() replacement.
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* If the user's config.h happens to include <sys/stat.h>, let it include only
---- a/gnulib/lib/getdtablesize.c
-+++ b/gnulib/lib/getdtablesize.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2008.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/getopt-cdefs.in.h
-+++ b/gnulib/lib/getopt-cdefs.in.h
-@@ -4,19 +4,18 @@
- Unlike most of the getopt implementation, it is NOT shared
- with the GNU C Library.
-
-- This file is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This file is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-- License along with gnulib; if not, see
-- <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GETOPT_CDEFS_H
- #define _GETOPT_CDEFS_H 1
---- a/gnulib/lib/getopt-core.h
-+++ b/gnulib/lib/getopt-core.h
-@@ -4,16 +4,16 @@
- Patches to this file should be submitted to both projects.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/getopt-ext.h
-+++ b/gnulib/lib/getopt-ext.h
-@@ -4,16 +4,16 @@
- Patches to this file should be submitted to both projects.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/getopt-pfx-core.h
-+++ b/gnulib/lib/getopt-pfx-core.h
-@@ -4,19 +4,18 @@
- Unlike most of the getopt implementation, it is NOT shared
- with the GNU C Library.
-
-- This file is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This file is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-- License along with gnulib; if not, see
-- <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GETOPT_PFX_CORE_H
- #define _GETOPT_PFX_CORE_H 1
---- a/gnulib/lib/getopt-pfx-ext.h
-+++ b/gnulib/lib/getopt-pfx-ext.h
-@@ -4,19 +4,18 @@
- Unlike most of the getopt implementation, it is NOT shared
- with the GNU C Library.
-
-- This file is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This file is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-- License along with gnulib; if not, see
-- <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GETOPT_PFX_EXT_H
- #define _GETOPT_PFX_EXT_H 1
---- a/gnulib/lib/getopt.c
-+++ b/gnulib/lib/getopt.c
-@@ -4,16 +4,16 @@
- Patches to this file should be submitted to both projects.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/getopt.in.h
-+++ b/gnulib/lib/getopt.in.h
-@@ -5,18 +5,18 @@
- with the GNU C Library, which supplies a different version of
- this file.
-
-- This file is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This file is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-- License along with gnulib; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_GETOPT_H
-
---- a/gnulib/lib/getopt1.c
-+++ b/gnulib/lib/getopt1.c
-@@ -4,16 +4,16 @@
- Patches to this file should be submitted to both projects.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/getopt_int.h
-+++ b/gnulib/lib/getopt_int.h
-@@ -4,16 +4,16 @@
- Patches to this file should be submitted to both projects.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/getprogname.c
-+++ b/gnulib/lib/getprogname.c
-@@ -2,16 +2,16 @@
- Copyright (C) 2016-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-+ it under the terms of the GNU Lesser General Public License as published by
-+ the Free Software Foundation; either version 2.1 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-@@ -43,7 +43,7 @@
- # include <string.h>
- #endif
-
--#ifdef __sgi
-+#if defined __sgi || defined __osf__
- # include <string.h>
- # include <unistd.h>
- # include <stdio.h>
-@@ -224,11 +224,15 @@ getprogname (void)
- free (buf.ps_pathptr);
- }
- return p;
--# elif defined __sgi /* IRIX */
-+# elif defined __sgi || defined __osf__ /* IRIX or Tru64 */
- char filename[50];
- int fd;
-
-- sprintf (filename, "/proc/pinfo/%d", (int) getpid ());
-+ # if defined __sgi
-+ sprintf (filename, "/proc/pinfo/%d", (int) getpid ());
-+ # else
-+ sprintf (filename, "/proc/%d", (int) getpid ());
-+ # endif
- fd = open (filename, O_RDONLY | O_CLOEXEC);
- if (0 <= fd)
- {
---- a/gnulib/lib/getprogname.h
-+++ b/gnulib/lib/getprogname.h
-@@ -2,16 +2,16 @@
- Copyright (C) 2016-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-+ it under the terms of the GNU Lesser General Public License as published by
-+ the Free Software Foundation; either version 2.1 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GL_GETPROGNAME_H
---- a/gnulib/lib/gettext.h
-+++ b/gnulib/lib/gettext.h
-@@ -2,18 +2,18 @@
- Copyright (C) 1995-1998, 2000-2002, 2004-2006, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _LIBGETTEXT_H
- #define _LIBGETTEXT_H 1
---- a/gnulib/lib/glthread/lock.c
-+++ b/gnulib/lib/glthread/lock.c
-@@ -1,18 +1,18 @@
- /* Locking in multithreaded situations.
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-posix.h, gthr-posix95.h. */
---- a/gnulib/lib/glthread/lock.h
-+++ b/gnulib/lib/glthread/lock.h
-@@ -1,18 +1,18 @@
- /* Locking in multithreaded situations.
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-posix.h, gthr-posix95.h, gthr-win32.h. */
---- a/gnulib/lib/glthread/threadlib.c
-+++ b/gnulib/lib/glthread/threadlib.c
-@@ -1,18 +1,18 @@
- /* Multithreading primitives.
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005. */
-
---- a/gnulib/lib/hard-locale.c
-+++ b/gnulib/lib/hard-locale.c
-@@ -3,17 +3,17 @@
- Copyright (C) 1997-1999, 2002-2004, 2006-2007, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/hard-locale.h
-+++ b/gnulib/lib/hard-locale.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 1999, 2003-2004, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef HARD_LOCALE_H_
---- /dev/null
-+++ b/gnulib/lib/ialloc.c
-@@ -0,0 +1,21 @@
-+/* malloc with idx_t rather than size_t
-+
-+ Copyright 2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+#include <config.h>
-+
-+#define IALLOC_INLINE _GL_EXTERN_INLINE
-+#include "ialloc.h"
---- /dev/null
-+++ b/gnulib/lib/ialloc.h
-@@ -0,0 +1,94 @@
-+/* ialloc.h -- malloc with idx_t rather than size_t
-+
-+ Copyright 2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+#ifndef IALLOC_H_
-+#define IALLOC_H_
-+
-+#include "idx.h"
-+
-+#include <errno.h>
-+#include <stdint.h>
-+#include <stdlib.h>
-+
-+#ifndef _GL_INLINE_HEADER_BEGIN
-+ #error "Please include config.h first."
-+#endif
-+_GL_INLINE_HEADER_BEGIN
-+#ifndef IALLOC_INLINE
-+# define IALLOC_INLINE _GL_INLINE
-+#endif
-+
-+#ifdef __cplusplus
-+extern "C" {
-+#endif
-+
-+IALLOC_INLINE void * _GL_ATTRIBUTE_COLD
-+_gl_alloc_nomem (void)
-+{
-+ errno = ENOMEM;
-+ return NULL;
-+}
-+
-+IALLOC_INLINE void *
-+imalloc (idx_t s)
-+{
-+ return s <= SIZE_MAX ? malloc (s) : _gl_alloc_nomem ();
-+}
-+
-+IALLOC_INLINE void *
-+irealloc (void *p, idx_t s)
-+{
-+ /* Work around GNU realloc glitch by treating a zero size as if it
-+ were 1, so that returning NULL is equivalent to failing. */
-+ return s <= SIZE_MAX ? realloc (p, s | !s) : _gl_alloc_nomem ();
-+}
-+
-+IALLOC_INLINE void *
-+icalloc (idx_t n, idx_t s)
-+{
-+ if (SIZE_MAX < n)
-+ {
-+ if (s != 0)
-+ return _gl_alloc_nomem ();
-+ n = 0;
-+ }
-+ if (SIZE_MAX < s)
-+ {
-+ if (n != 0)
-+ return _gl_alloc_nomem ();
-+ s = 0;
-+ }
-+ return calloc (n, s);
-+}
-+
-+IALLOC_INLINE void *
-+ireallocarray (void *p, idx_t n, idx_t s)
-+{
-+ /* Work around GNU reallocarray glitch by treating a zero size as if
-+ it were 1, so that returning NULL is equivalent to failing. */
-+ if (n == 0 || s == 0)
-+ n = s = 1;
-+ return (n <= SIZE_MAX && s <= SIZE_MAX
-+ ? reallocarray (p, n, s)
-+ : _gl_alloc_nomem ());
-+}
-+
-+#ifdef __cplusplus
-+}
-+#endif
-+
-+#endif
---- /dev/null
-+++ b/gnulib/lib/idx.h
-@@ -0,0 +1,114 @@
-+/* A type for indices and sizes.
-+ Copyright (C) 2020-2021 Free Software Foundation, Inc.
-+ This file is part of the GNU C Library.
-+
-+ The GNU C Library is free software; you can redistribute it and/or
-+ modify it under the terms of the GNU Lesser General Public
-+ License as published by the Free Software Foundation; either
-+ version 2.1 of the License, or (at your option) any later version.
-+
-+ The GNU C Library is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-+ Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public
-+ License along with the GNU C Library; if not, see
-+ <https://www.gnu.org/licenses/>. */
-+
-+#ifndef _IDX_H
-+#define _IDX_H
-+
-+/* Get ptrdiff_t. */
-+#include <stddef.h>
-+
-+/* Get PTRDIFF_MAX. */
-+#include <stdint.h>
-+
-+/* The type 'idx_t' holds an (array) index or an (object) size.
-+ Its implementation promotes to a signed integer type,
-+ which can hold the values
-+ 0..2^63-1 (on 64-bit platforms) or
-+ 0..2^31-1 (on 32-bit platforms).
-+
-+ Why a signed integer type?
-+
-+ * Security: Signed types can be checked for overflow via
-+ '-fsanitize=undefined', but unsigned types cannot.
-+
-+ * Comparisons without surprises: ISO C99 § 6.3.1.8 specifies a few
-+ surprising results for comparisons, such as
-+
-+ (int) -3 < (unsigned long) 7 => false
-+ (int) -3 < (unsigned int) 7 => false
-+ and on 32-bit machines:
-+ (long) -3 < (unsigned int) 7 => false
-+
-+ This is surprising because the natural comparison order is by
-+ value in the realm of infinite-precision signed integers (ℤ).
-+
-+ The best way to get rid of such surprises is to use signed types
-+ for numerical integer values, and use unsigned types only for
-+ bit masks and enums.
-+
-+ Why not use 'size_t' directly?
-+
-+ * Because 'size_t' is an unsigned type, and a signed type is better.
-+ See above.
-+
-+ Why not use 'ptrdiff_t' directly?
-+
-+ * Maintainability: When reading and modifying code, it helps to know that
-+ a certain variable cannot have negative values. For example, when you
-+ have a loop
-+
-+ int n = ...;
-+ for (int i = 0; i < n; i++) ...
-+
-+ or
-+
-+ ptrdiff_t n = ...;
-+ for (ptrdiff_t i = 0; i < n; i++) ...
-+
-+ you have to ask yourself "what if n < 0?". Whereas in
-+
-+ idx_t n = ...;
-+ for (idx_t i = 0; i < n; i++) ...
-+
-+ you know that this case cannot happen.
-+
-+ Similarly, when a programmer writes
-+
-+ idx_t = ptr2 - ptr1;
-+
-+ there is an implied assertion that ptr1 and ptr2 point into the same
-+ object and that ptr1 <= ptr2.
-+
-+ * Being future-proof: In the future, range types (integers which are
-+ constrained to a certain range of values) may be added to C compilers
-+ or to the C standard. Several programming languages (Ada, Haskell,
-+ Common Lisp, Pascal) already have range types. Such range types may
-+ help producing good code and good warnings. The type 'idx_t' could
-+ then be typedef'ed to a range type that is signed after promotion. */
-+
-+/* In the future, idx_t could be typedef'ed to a signed range type.
-+ The clang "extended integer types", supported in Clang 11 or newer
-+ <https://clang.llvm.org/docs/LanguageExtensions.html#extended-integer-types>,
-+ are a special case of range types. However, these types don't support binary
-+ operators with plain integer types (e.g. expressions such as x > 1).
-+ Therefore, they don't behave like signed types (and not like unsigned types
-+ either). So, we cannot use them here. */
-+
-+/* Use the signed type 'ptrdiff_t'. */
-+/* Note: ISO C does not mandate that 'size_t' and 'ptrdiff_t' have the same
-+ size, but it is so on all platforms we have seen since 1990. */
-+typedef ptrdiff_t idx_t;
-+
-+/* IDX_MAX is the maximum value of an idx_t. */
-+#define IDX_MAX PTRDIFF_MAX
-+
-+/* So far no need has been found for an IDX_WIDTH macro.
-+ Perhaps there should be another macro IDX_VALUE_BITS that does not
-+ count the sign bit and is therefore one less than PTRDIFF_WIDTH. */
-+
-+#endif /* _IDX_H */
---- a/gnulib/lib/intprops.h
-+++ b/gnulib/lib/intprops.h
-@@ -3,16 +3,16 @@
- Copyright (C) 2001-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-+ under the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 2.1 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Paul Eggert. */
-@@ -133,7 +133,8 @@
- operators might not yield numerically correct answers due to
- arithmetic overflow. They do not rely on undefined or
- implementation-defined behavior. Their implementations are simple
-- and straightforward, but they are a bit harder to use than the
-+ and straightforward, but they are harder to use and may be less
-+ efficient than the INT_<op>_WRAPV, INT_<op>_OK, and
- INT_<op>_OVERFLOW macros described below.
-
- Example usage:
-@@ -158,6 +159,9 @@
- must have minimum value MIN and maximum MAX. Unsigned types should
- use a zero MIN of the proper type.
-
-+ Because all arguments are subject to integer promotions, these
-+ macros typically do not work on types narrower than 'int'.
-+
- These macros are tuned for constant MIN and MAX. For commutative
- operations such as A + B, they are also tuned for constant B. */
-
-@@ -339,9 +343,15 @@
- arguments should not have side effects.
-
- The WRAPV macros are not constant expressions. They support only
-- +, binary -, and *. Because the WRAPV macros convert the result,
-- they report overflow in different circumstances than the OVERFLOW
-- macros do.
-+ +, binary -, and *.
-+
-+ Because the WRAPV macros convert the result, they report overflow
-+ in different circumstances than the OVERFLOW macros do. For
-+ example, in the typical case with 16-bit 'short' and 32-bit 'int',
-+ if A, B and R are all of type 'short' then INT_ADD_OVERFLOW (A, B)
-+ returns false because the addition cannot overflow after A and B
-+ are converted to 'int', whereas INT_ADD_WRAPV (A, B, &R) returns
-+ true or false depending on whether the sum fits into 'short'.
-
- These macros are tuned for their last input argument being a constant.
-
---- a/gnulib/lib/inttypes.in.h
-+++ b/gnulib/lib/inttypes.in.h
-@@ -2,17 +2,17 @@
- Written by Paul Eggert, Bruno Haible, Derek Price.
- This file is part of gnulib.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /*
---- a/gnulib/lib/iswblank.c
-+++ b/gnulib/lib/iswblank.c
-@@ -1,18 +1,18 @@
- /* Test wide character for being blank.
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/iswdigit.c
-+++ b/gnulib/lib/iswdigit.c
-@@ -1,18 +1,18 @@
- /* Test wide character for being a digit.
- Copyright (C) 2020-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/iswxdigit.c
-+++ b/gnulib/lib/iswxdigit.c
-@@ -1,18 +1,18 @@
- /* Test wide character for being a hexadecimal digit.
- Copyright (C) 2020-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/itold.c
-+++ b/gnulib/lib/itold.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2011.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/langinfo.in.h
-+++ b/gnulib/lib/langinfo.in.h
-@@ -1,18 +1,18 @@
- /* Substitute for and wrapper around <langinfo.h>.
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /*
- * POSIX <langinfo.h> for platforms that lack it or have an incomplete one.
---- a/gnulib/lib/lc-charset-dispatch.c
-+++ b/gnulib/lib/lc-charset-dispatch.c
-@@ -1,17 +1,17 @@
- /* Dispatching based on the current locale's character encoding.
- Copyright (C) 2018-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2018. */
---- a/gnulib/lib/lc-charset-dispatch.h
-+++ b/gnulib/lib/lc-charset-dispatch.h
-@@ -1,17 +1,17 @@
- /* Dispatching based on the current locale's character encoding.
- Copyright (C) 2018-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2018. */
---- a/gnulib/lib/libc-config.h
-+++ b/gnulib/lib/libc-config.h
-@@ -3,16 +3,16 @@
- Copyright 2017-2021 Free Software Foundation, Inc.
-
- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with this program; if not, see
- <https://www.gnu.org/licenses/>. */
-
-@@ -28,7 +28,10 @@
-
- When compiled as part of glibc this is a no-op; when compiled as
- part of Gnulib this includes Gnulib's <config.h> and defines macros
-- that glibc library code would normally assume. */
-+ that glibc library code would normally assume.
-+
-+ Note: This header file MUST NOT be included by public header files
-+ of Gnulib. */
-
- #include <config.h>
-
-@@ -71,7 +74,7 @@
- # endif
- #endif
-
--#ifndef __attribute_maybe_unused__
-+#ifndef __attribute_nonnull__
- /* <sys/cdefs.h> either does not exist, or is too old for Gnulib.
- Prepare to include <cdefs.h>, which is Gnulib's version of a
- more-recent glibc <sys/cdefs.h>. */
-@@ -80,13 +83,9 @@
- # ifndef _FEATURES_H
- # define _FEATURES_H 1
- # endif
--/* Define __WORDSIZE so that <cdefs.h> does not attempt to include
-- nonexistent files. Make it a syntax error, since Gnulib does not
-- use __WORDSIZE now, and if Gnulib uses it later the syntax error
-- will let us know that __WORDSIZE needs configuring. */
--# ifndef __WORDSIZE
--# define __WORDSIZE %%%
--# endif
-+/* Define __GNULIB_CDEFS so that <cdefs.h> does not attempt to include
-+ nonexistent files. */
-+# define __GNULIB_CDEFS
- /* Undef the macros unconditionally defined by our copy of glibc
- <sys/cdefs.h>, so that they do not clash with any system-defined
- versions. */
---- a/gnulib/lib/limits.in.h
-+++ b/gnulib/lib/limits.in.h
-@@ -2,18 +2,18 @@
-
- Copyright 2016-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License
-- as published by the Free Software Foundation; either version 3, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
- @PRAGMA_SYSTEM_HEADER@
---- a/gnulib/lib/localcharset.c
-+++ b/gnulib/lib/localcharset.c
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2000-2006, 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>. */
-
---- a/gnulib/lib/localcharset.h
-+++ b/gnulib/lib/localcharset.h
-@@ -2,18 +2,18 @@
- Copyright (C) 2000-2003, 2009-2021 Free Software Foundation, Inc.
- This file is part of the GNU CHARSET Library.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _LOCALCHARSET_H
- #define _LOCALCHARSET_H
---- a/gnulib/lib/locale.in.h
-+++ b/gnulib/lib/locale.in.h
-@@ -1,17 +1,17 @@
- /* A POSIX <locale.h>.
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
---- a/gnulib/lib/localeconv.c
-+++ b/gnulib/lib/localeconv.c
-@@ -1,17 +1,17 @@
- /* Query locale dependent information for formatting numbers.
- Copyright (C) 2012-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/malloc.c
-+++ b/gnulib/lib/malloc.c
-@@ -2,61 +2,50 @@
-
- Copyright (C) 1997-1998, 2006-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* written by Jim Meyering and Bruno Haible */
-
- #define _GL_USE_STDLIB_ALLOC 1
- #include <config.h>
--/* Only the AC_FUNC_MALLOC macro defines 'malloc' already in config.h. */
--#ifdef malloc
--# define NEED_MALLOC_GNU 1
--# undef malloc
--/* Whereas the gnulib module 'malloc-gnu' defines HAVE_MALLOC_GNU. */
--#elif GNULIB_MALLOC_GNU && !HAVE_MALLOC_GNU
--# define NEED_MALLOC_GNU 1
--#endif
-
- #include <stdlib.h>
-
--/* A function definition is only needed if NEED_MALLOC_GNU is defined above
-- or if the module 'malloc-posix' requests it. */
--#if NEED_MALLOC_GNU || (GNULIB_MALLOC_POSIX && !HAVE_MALLOC_POSIX)
-+#include <errno.h>
-
--# include <errno.h>
-+#include "xalloc-oversized.h"
-
--/* Allocate an N-byte block of memory from the heap.
-- If N is zero, allocate a 1-byte block. */
-+/* Allocate an N-byte block of memory from the heap, even if N is 0. */
-
- void *
- rpl_malloc (size_t n)
- {
-- void *result;
--
--# if NEED_MALLOC_GNU
- if (n == 0)
- n = 1;
--# endif
-
-- result = malloc (n);
-+ if (xalloc_oversized (n, 1))
-+ {
-+ errno = ENOMEM;
-+ return NULL;
-+ }
-+
-+ void *result = malloc (n);
-
--# if !HAVE_MALLOC_POSIX
-+#if !HAVE_MALLOC_POSIX
- if (result == NULL)
- errno = ENOMEM;
--# endif
-+#endif
-
- return result;
- }
--
--#endif
---- a/gnulib/lib/malloc/dynarray-skeleton.c
-+++ b/gnulib/lib/malloc/dynarray-skeleton.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray.h
-+++ b/gnulib/lib/malloc/dynarray.h
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray_at_failure.c
-+++ b/gnulib/lib/malloc/dynarray_at_failure.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray_emplace_enlarge.c
-+++ b/gnulib/lib/malloc/dynarray_emplace_enlarge.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray_finalize.c
-+++ b/gnulib/lib/malloc/dynarray_finalize.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray_resize.c
-+++ b/gnulib/lib/malloc/dynarray_resize.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloc/dynarray_resize_clear.c
-+++ b/gnulib/lib/malloc/dynarray_resize_clear.c
-@@ -3,16 +3,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/malloca.c
-+++ b/gnulib/lib/malloca.c
-@@ -2,18 +2,18 @@
- Copyright (C) 2003, 2006-2007, 2009-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2003, 2018.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #define _GL_USE_STDLIB_ALLOC 1
- #include <config.h>
-@@ -21,6 +21,8 @@
- /* Specification. */
- #include "malloca.h"
-
-+#include "idx.h"
-+#include "intprops.h"
- #include "verify.h"
-
- /* The speed critical point in this file is freea() applied to an alloca()
-@@ -45,9 +47,9 @@ mmalloca (size_t n)
- #if HAVE_ALLOCA
- /* Allocate one more word, used to determine the address to pass to freea(),
- and room for the alignment ≡ sa_alignment_max mod 2*sa_alignment_max. */
-- size_t nplus = n + sizeof (small_t) + 2 * sa_alignment_max - 1;
--
-- if (nplus >= n)
-+ int plus = sizeof (small_t) + 2 * sa_alignment_max - 1;
-+ idx_t nplus;
-+ if (!INT_ADD_WRAPV (n, plus, &nplus) && !xalloc_oversized (nplus, 1))
- {
- char *mem = (char *) malloc (nplus);
-
---- a/gnulib/lib/malloca.h
-+++ b/gnulib/lib/malloca.h
-@@ -2,18 +2,18 @@
- Copyright (C) 2003-2007, 2009-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2003.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _MALLOCA_H
- #define _MALLOCA_H
-@@ -76,9 +76,10 @@ extern void freea (void *p);
-
- /* nmalloca(N,S) is an overflow-safe variant of malloca (N * S).
- It allocates an array of N objects, each with S bytes of memory,
-- on the stack. S must be positive and N must be nonnegative.
-+ on the stack. N and S should be nonnegative and free of side effects.
- The array must be freed using freea() before the function returns. */
--#define nmalloca(n, s) (xalloc_oversized (n, s) ? NULL : malloca ((n) * (s)))
-+#define nmalloca(n, s) \
-+ (xalloc_oversized (n, s) ? NULL : malloca ((n) * (size_t) (s)))
-
-
- #ifdef __cplusplus
---- a/gnulib/lib/mbchar.c
-+++ b/gnulib/lib/mbchar.c
-@@ -1,16 +1,16 @@
- /* Copyright (C) 2001, 2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
-
---- a/gnulib/lib/mbchar.h
-+++ b/gnulib/lib/mbchar.h
-@@ -1,17 +1,17 @@
- /* Multibyte character data type.
- Copyright (C) 2001, 2005-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>. */
---- a/gnulib/lib/mbiter.c
-+++ b/gnulib/lib/mbiter.c
-@@ -1,3 +1,21 @@
-+/* Iterating through multibyte strings: macros for multi-byte encodings.
-+
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- #include <config.h>
-+
- #define MBITER_INLINE _GL_EXTERN_INLINE
- #include "mbiter.h"
---- a/gnulib/lib/mbiter.h
-+++ b/gnulib/lib/mbiter.h
-@@ -1,17 +1,17 @@
- /* Iterating through multibyte strings: macros for multi-byte encodings.
- Copyright (C) 2001, 2005, 2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>. */
---- a/gnulib/lib/mbrtowc-impl-utf8.h
-+++ b/gnulib/lib/mbrtowc-impl-utf8.h
-@@ -1,17 +1,17 @@
- /* Convert multibyte character to wide character.
- Copyright (C) 1999-2002, 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2008. */
-@@ -96,7 +96,7 @@
-
- if ((c2 ^ 0x80) < 0x40
- && (c >= 0xf1 || c2 >= 0x90)
-- && (c < 0xf4 || (c == 0xf4 && c2 < 0x90)))
-+ && (c < 0xf4 || (/* c == 0xf4 && */ c2 < 0x90)))
- {
- if (m == 2)
- goto incomplete;
---- a/gnulib/lib/mbrtowc-impl.h
-+++ b/gnulib/lib/mbrtowc-impl.h
-@@ -1,17 +1,17 @@
- /* Convert multibyte character to wide character.
- Copyright (C) 1999-2002, 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2008. */
---- a/gnulib/lib/mbrtowc.c
-+++ b/gnulib/lib/mbrtowc.c
-@@ -2,17 +2,17 @@
- Copyright (C) 1999-2002, 2005-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2008.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbscasecmp.c
-+++ b/gnulib/lib/mbscasecmp.c
-@@ -3,17 +3,17 @@
- Written by Bruno Haible <bruno@clisp.org>, 2005,
- based on earlier glibc code.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbschr.c
-+++ b/gnulib/lib/mbschr.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2007.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbsinit.c
-+++ b/gnulib/lib/mbsinit.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2008.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbslen.c
-+++ b/gnulib/lib/mbslen.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2007.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbsncasecmp.c
-+++ b/gnulib/lib/mbsncasecmp.c
-@@ -3,17 +3,17 @@
- Written by Bruno Haible <bruno@clisp.org>, 2005,
- based on earlier glibc code.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbsstr.c
-+++ b/gnulib/lib/mbsstr.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2005.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbtowc-impl.h
-+++ b/gnulib/lib/mbtowc-impl.h
-@@ -2,17 +2,17 @@
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2011.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* We don't need a static internal state, because the encoding is not state
---- a/gnulib/lib/mbtowc-lock.c
-+++ b/gnulib/lib/mbtowc-lock.c
-@@ -1,17 +1,17 @@
- /* Return the internal lock used by mbrtowc and mbrtoc32.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019-2020. */
---- a/gnulib/lib/mbtowc-lock.h
-+++ b/gnulib/lib/mbtowc-lock.h
-@@ -1,17 +1,17 @@
- /* Use the internal lock used by mbrtowc and mbrtoc32.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019-2020. */
---- a/gnulib/lib/mbtowc.c
-+++ b/gnulib/lib/mbtowc.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2011.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/mbuiter.c
-+++ b/gnulib/lib/mbuiter.c
-@@ -1,3 +1,20 @@
-+/* Iterating through multibyte strings: macros for multi-byte encodings.
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- #include <config.h>
-+
- #define MBUITER_INLINE _GL_EXTERN_INLINE
- #include "mbuiter.h"
---- a/gnulib/lib/mbuiter.h
-+++ b/gnulib/lib/mbuiter.h
-@@ -1,17 +1,17 @@
- /* Iterating through multibyte strings: macros for multi-byte encodings.
- Copyright (C) 2001, 2005, 2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>. */
---- a/gnulib/lib/memchr.c
-+++ b/gnulib/lib/memchr.c
-@@ -7,21 +7,21 @@
- adaptation to memchr suggested by Dick Karpinski (dick@cca.ucsf.edu),
- and implemented by Roland McGrath (roland@ai.mit.edu).
-
--NOTE: The canonical source of this file is maintained with the GNU C Library.
--Bugs can be reported to bug-glibc@prep.ai.mit.edu.
-+ NOTE: The canonical source of this file is maintained with the GNU C Library.
-+ Bugs can be reported to bug-glibc@prep.ai.mit.edu.
-
--This program is free software: you can redistribute it and/or modify it
--under the terms of the GNU General Public License as published by the
--Free Software Foundation; either version 3 of the License, or any
--later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
--This program is distributed in the hope that it will be useful,
--but WITHOUT ANY WARRANTY; without even the implied warranty of
--MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
--GNU General Public License for more details.
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
--You should have received a copy of the GNU General Public License
--along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _LIBC
- # include <config.h>
---- a/gnulib/lib/memchr.valgrind
-+++ b/gnulib/lib/memchr.valgrind
-@@ -2,17 +2,17 @@
-
- # Copyright (C) 2009-2021 Free Software Foundation, Inc.
- #
--# This program is free software: you can redistribute it and/or modify
--# it under the terms of the GNU General Public License as published by
--# the Free Software Foundation; either version 3 of the License, or
--# (at your option) any later version.
-+# This file is free software: you can redistribute it and/or modify
-+# it under the terms of the GNU Lesser General Public License as
-+# published by the Free Software Foundation; either version 2.1 of the
-+# License, or (at your option) any later version.
- #
--# This program is distributed in the hope that it will be useful,
-+# This file is distributed in the hope that it will be useful,
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
--# GNU General Public License for more details.
-+# GNU Lesser General Public License for more details.
- #
--# You should have received a copy of the GNU General Public License
-+# You should have received a copy of the GNU Lesser General Public License
- # along with this program. If not, see <https://www.gnu.org/licenses/>.
-
- # POSIX states that when the character is found, memchr must not read extra
---- a/gnulib/lib/mempcpy.c
-+++ b/gnulib/lib/mempcpy.c
-@@ -1,18 +1,18 @@
- /* Copy memory area and return pointer after last written byte.
- Copyright (C) 2003, 2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/memrchr.c
-+++ b/gnulib/lib/memrchr.c
-@@ -9,17 +9,17 @@
- adaptation to memchr suggested by Dick Karpinski (dick@cca.ucsf.edu),
- and implemented by Roland McGrath (roland@ai.mit.edu).
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 3 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if defined _LIBC
---- /dev/null
-+++ b/gnulib/lib/minmax.h
-@@ -0,0 +1,60 @@
-+/* MIN, MAX macros.
-+ Copyright (C) 1995, 1998, 2001, 2003, 2005, 2009-2021 Free Software
-+ Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+#ifndef _MINMAX_H
-+#define _MINMAX_H
-+
-+/* Note: MIN, MAX are also defined in <sys/param.h> on some systems
-+ (glibc, IRIX, HP-UX, OSF/1). Therefore you might get warnings about
-+ MIN, MAX macro redefinitions on some systems; the workaround is to
-+ #include this file as the last one among the #include list. */
-+
-+/* Before we define the following symbols we get the <limits.h> file
-+ since otherwise we get redefinitions on some systems if <limits.h> is
-+ included after this file. Likewise for <sys/param.h>.
-+ If more than one of these system headers define MIN and MAX, pick just
-+ one of the headers (because the definitions most likely are the same). */
-+#if HAVE_MINMAX_IN_LIMITS_H
-+# include <limits.h>
-+#elif HAVE_MINMAX_IN_SYS_PARAM_H
-+# include <sys/param.h>
-+#endif
-+
-+/* Note: MIN and MAX should be used with two arguments of the
-+ same type. They might not return the minimum and maximum of their two
-+ arguments, if the arguments have different types or have unusual
-+ floating-point values. For example, on a typical host with 32-bit 'int',
-+ 64-bit 'long long', and 64-bit IEEE 754 'double' types:
-+
-+ MAX (-1, 2147483648) returns 4294967295.
-+ MAX (9007199254740992.0, 9007199254740993) returns 9007199254740992.0.
-+ MAX (NaN, 0.0) returns 0.0.
-+ MAX (+0.0, -0.0) returns -0.0.
-+
-+ and in each case the answer is in some sense bogus. */
-+
-+/* MAX(a,b) returns the maximum of A and B. */
-+#ifndef MAX
-+# define MAX(a,b) ((a) > (b) ? (a) : (b))
-+#endif
-+
-+/* MIN(a,b) returns the minimum of A and B. */
-+#ifndef MIN
-+# define MIN(a,b) ((a) < (b) ? (a) : (b))
-+#endif
-+
-+#endif /* _MINMAX_H */
---- a/gnulib/lib/msvc-inval.c
-+++ b/gnulib/lib/msvc-inval.c
-@@ -1,18 +1,18 @@
- /* Invalid parameter handler for MSVC runtime libraries.
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/msvc-inval.h
-+++ b/gnulib/lib/msvc-inval.h
-@@ -1,18 +1,18 @@
- /* Invalid parameter handler for MSVC runtime libraries.
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _MSVC_INVAL_H
- #define _MSVC_INVAL_H
---- a/gnulib/lib/msvc-nothrow.c
-+++ b/gnulib/lib/msvc-nothrow.c
-@@ -2,18 +2,18 @@
- with MSVC runtime libraries.
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/msvc-nothrow.h
-+++ b/gnulib/lib/msvc-nothrow.h
-@@ -2,18 +2,18 @@
- with MSVC runtime libraries.
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _MSVC_NOTHROW_H
- #define _MSVC_NOTHROW_H
---- a/gnulib/lib/nl_langinfo-lock.c
-+++ b/gnulib/lib/nl_langinfo-lock.c
-@@ -1,17 +1,17 @@
- /* Return the internal lock used by nl_langinfo.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019-2020. */
---- a/gnulib/lib/nl_langinfo.c
-+++ b/gnulib/lib/nl_langinfo.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/open.c
-+++ b/gnulib/lib/open.c
-@@ -1,17 +1,17 @@
- /* Open a descriptor to a file.
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2007. */
---- a/gnulib/lib/pathmax.h
-+++ b/gnulib/lib/pathmax.h
-@@ -2,18 +2,18 @@
- Copyright (C) 1992, 1999, 2001, 2003, 2005, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _PATHMAX_H
- # define _PATHMAX_H
---- a/gnulib/lib/printf-args.c
-+++ b/gnulib/lib/printf-args.c
-@@ -2,18 +2,18 @@
- Copyright (C) 1999, 2002-2003, 2005-2007, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* This file can be parametrized with the following macros:
- ENABLE_UNISTDIO Set to 1 to enable the unistdio extensions.
---- a/gnulib/lib/printf-args.h
-+++ b/gnulib/lib/printf-args.h
-@@ -2,18 +2,18 @@
- Copyright (C) 1999, 2002-2003, 2006-2007, 2011-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _PRINTF_ARGS_H
- #define _PRINTF_ARGS_H
---- a/gnulib/lib/printf-parse.c
-+++ b/gnulib/lib/printf-parse.c
-@@ -1,18 +1,18 @@
- /* Formatted output to strings.
- Copyright (C) 1999-2000, 2002-2003, 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* This file can be parametrized with the following macros:
- CHAR_T The element type of the format string.
-@@ -48,16 +48,7 @@
- #include <stddef.h>
-
- /* Get intmax_t. */
--#if defined IN_LIBINTL || defined IN_LIBASPRINTF
--# if HAVE_STDINT_H_WITH_UINTMAX
--# include <stdint.h>
--# endif
--# if HAVE_INTTYPES_H_WITH_UINTMAX
--# include <inttypes.h>
--# endif
--#else
--# include <stdint.h>
--#endif
-+#include <stdint.h>
-
- /* malloc(), realloc(), free(). */
- #include <stdlib.h>
---- a/gnulib/lib/printf-parse.h
-+++ b/gnulib/lib/printf-parse.h
-@@ -2,18 +2,18 @@
- Copyright (C) 1999, 2002-2003, 2005, 2007, 2010-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _PRINTF_PARSE_H
- #define _PRINTF_PARSE_H
---- /dev/null
-+++ b/gnulib/lib/realloc.c
-@@ -0,0 +1,63 @@
-+/* realloc() function that is glibc compatible.
-+
-+ Copyright (C) 1997, 2003-2004, 2006-2007, 2009-2021 Free Software
-+ Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+/* written by Jim Meyering and Bruno Haible */
-+
-+#include <config.h>
-+
-+#include <stdlib.h>
-+
-+#include <errno.h>
-+
-+#include "xalloc-oversized.h"
-+
-+/* Call the system's realloc below. This file does not define
-+ _GL_USE_STDLIB_ALLOC because it needs Gnulib's malloc if present. */
-+#undef realloc
-+
-+/* Change the size of an allocated block of memory P to N bytes,
-+ with error checking. If P is NULL, use malloc. Otherwise if N is zero,
-+ free P and return NULL. */
-+
-+void *
-+rpl_realloc (void *p, size_t n)
-+{
-+ if (p == NULL)
-+ return malloc (n);
-+
-+ if (n == 0)
-+ {
-+ free (p);
-+ return NULL;
-+ }
-+
-+ if (xalloc_oversized (n, 1))
-+ {
-+ errno = ENOMEM;
-+ return NULL;
-+ }
-+
-+ void *result = realloc (p, n);
-+
-+#if !HAVE_MALLOC_POSIX
-+ if (result == NULL)
-+ errno = ENOMEM;
-+#endif
-+
-+ return result;
-+}
---- /dev/null
-+++ b/gnulib/lib/reallocarray.c
-@@ -0,0 +1,39 @@
-+/* reallocarray function that is glibc compatible.
-+
-+ Copyright (C) 2017-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
-+/* written by Darshit Shah */
-+
-+#include <config.h>
-+
-+#include <stdlib.h>
-+#include <errno.h>
-+
-+#include "intprops.h"
-+
-+void *
-+reallocarray (void *ptr, size_t nmemb, size_t size)
-+{
-+ size_t nbytes;
-+ if (INT_MULTIPLY_WRAPV (nmemb, size, &nbytes))
-+ {
-+ errno = ENOMEM;
-+ return NULL;
-+ }
-+
-+ /* Rely on the semantics of GNU realloc. */
-+ return realloc (ptr, nbytes);
-+}
---- a/gnulib/lib/regcomp.c
-+++ b/gnulib/lib/regcomp.c
-@@ -4,16 +4,16 @@
- Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/regex.c
-+++ b/gnulib/lib/regex.c
-@@ -4,16 +4,16 @@
- Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/regex.h
-+++ b/gnulib/lib/regex.h
-@@ -4,16 +4,16 @@
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/regex_internal.c
-+++ b/gnulib/lib/regex_internal.c
-@@ -4,16 +4,16 @@
- Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/regex_internal.h
-+++ b/gnulib/lib/regex_internal.h
-@@ -4,16 +4,16 @@
- Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
---- a/gnulib/lib/regexec.c
-+++ b/gnulib/lib/regexec.c
-@@ -4,16 +4,16 @@
- Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
-
- The GNU C Library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public
-+ modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
-- version 3 of the License, or (at your option) any later version.
-+ version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public
-+ You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
-@@ -1220,9 +1220,13 @@ proceed_next_node (const re_match_contex
- {
- re_node_set *cur_nodes = &mctx->state_log[*pidx]->nodes;
- re_node_set *edests = &dfa->edests[node];
-- bool ok = re_node_set_insert (eps_via_nodes, node);
-- if (__glibc_unlikely (! ok))
-- return -2;
-+
-+ if (! re_node_set_contains (eps_via_nodes, node))
-+ {
-+ bool ok = re_node_set_insert (eps_via_nodes, node);
-+ if (__glibc_unlikely (! ok))
-+ return -2;
-+ }
-
- /* Pick a valid destination, or return -1 if none is found. */
- Idx dest_node = -1;
-@@ -1414,7 +1418,7 @@ set_regs (const regex_t *preg, const re_
- update_regs (dfa, pmatch, prev_idx_match, cur_node, idx, nmatch);
-
- if ((idx == pmatch[0].rm_eo && cur_node == mctx->last_node)
-- || re_node_set_contains (&eps_via_nodes, cur_node))
-+ || (fs && re_node_set_contains (&eps_via_nodes, cur_node)))
- {
- Idx reg_idx;
- cur_node = -1;
---- a/gnulib/lib/setlocale-lock.c
-+++ b/gnulib/lib/setlocale-lock.c
-@@ -1,17 +1,17 @@
- /* Return the internal lock used by setlocale_null_r.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019. */
---- a/gnulib/lib/setlocale_null.c
-+++ b/gnulib/lib/setlocale_null.c
-@@ -1,17 +1,17 @@
- /* Query the name of the current global locale.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019. */
---- a/gnulib/lib/setlocale_null.h
-+++ b/gnulib/lib/setlocale_null.h
-@@ -1,17 +1,17 @@
- /* Query the name of the current global locale.
- Copyright (C) 2019-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2019. */
---- a/gnulib/lib/size_max.h
-+++ b/gnulib/lib/size_max.h
-@@ -2,18 +2,18 @@
- Copyright (C) 2005-2006, 2009-2021 Free Software Foundation, Inc.
- Written by Simon Josefsson.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef GNULIB_SIZE_MAX_H
- #define GNULIB_SIZE_MAX_H
---- a/gnulib/lib/stat-time.c
-+++ b/gnulib/lib/stat-time.c
-@@ -1,3 +1,21 @@
-+/* stat-related time functions.
-+
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- #include <config.h>
-+
- #define _GL_STAT_TIME_INLINE _GL_EXTERN_INLINE
- #include "stat-time.h"
---- a/gnulib/lib/stat-time.h
-+++ b/gnulib/lib/stat-time.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2005, 2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Paul Eggert. */
---- a/gnulib/lib/stat-w32.c
-+++ b/gnulib/lib/stat-w32.c
-@@ -1,17 +1,17 @@
- /* Core of implementation of fstat and stat for native Windows.
- Copyright (C) 2017-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible. */
---- a/gnulib/lib/stat-w32.h
-+++ b/gnulib/lib/stat-w32.h
-@@ -1,17 +1,17 @@
- /* Core of implementation of fstat and stat for native Windows.
- Copyright (C) 2017-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _STAT_W32_H
---- a/gnulib/lib/stat.c
-+++ b/gnulib/lib/stat.c
-@@ -1,17 +1,17 @@
- /* Work around platform bugs in stat.
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Eric Blake and Bruno Haible. */
---- a/gnulib/lib/stdarg.in.h
-+++ b/gnulib/lib/stdarg.in.h
-@@ -1,18 +1,18 @@
- /* Substitute for and wrapper around <stdarg.h>.
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_STDARG_H
-
---- a/gnulib/lib/stdbool.in.h
-+++ b/gnulib/lib/stdbool.in.h
-@@ -1,18 +1,18 @@
- /* Copyright (C) 2001-2003, 2006-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <haible@clisp.cons.org>, 2001.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GL_STDBOOL_H
- #define _GL_STDBOOL_H
---- a/gnulib/lib/stddef.in.h
-+++ b/gnulib/lib/stddef.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Eric Blake. */
-
---- a/gnulib/lib/stdint.in.h
-+++ b/gnulib/lib/stdint.in.h
-@@ -2,18 +2,18 @@
- Written by Paul Eggert, Bruno Haible, Sam Steingold, Peter Burwood.
- This file is part of gnulib.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /*
- * ISO C 99 <stdint.h> for platforms that lack it.
-@@ -85,7 +85,7 @@
-
- /* Override WINT_MIN and WINT_MAX if gnulib's <wchar.h> or <wctype.h> overrides
- wint_t. */
--#if @GNULIB_OVERRIDES_WINT_T@
-+#if @GNULIBHEADERS_OVERRIDE_WINT_T@
- # undef WINT_MIN
- # undef WINT_MAX
- # define WINT_MIN 0x0U
-@@ -598,7 +598,7 @@ typedef int _verify_intmax_size[sizeof (
- /* wint_t limits */
- /* If gnulib's <wchar.h> or <wctype.h> overrides wint_t, @WINT_T_SUFFIX@ is not
- accurate, therefore use the definitions from above. */
--# if !@GNULIB_OVERRIDES_WINT_T@
-+# if !@GNULIBHEADERS_OVERRIDE_WINT_T@
- # undef WINT_MIN
- # undef WINT_MAX
- # if @HAVE_SIGNED_WINT_T@
---- a/gnulib/lib/stdio.in.h
-+++ b/gnulib/lib/stdio.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2004, 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
- @PRAGMA_SYSTEM_HEADER@
-@@ -242,7 +242,7 @@ _GL_WARN_ON_USE (fclose, "fclose is not
- _GL_CXXALIAS_MDA (fcloseall, int, (void));
- # else
- # if @HAVE_DECL_FCLOSEALL@
--# if defined __FreeBSD__
-+# if defined __FreeBSD__ || defined __DragonFly__
- _GL_CXXALIAS_SYS (fcloseall, void, (void));
- # else
- _GL_CXXALIAS_SYS (fcloseall, int, (void));
-@@ -1257,6 +1257,7 @@ _GL_CXXALIASWARN (scanf);
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define snprintf rpl_snprintf
- # endif
-+# define GNULIB_overrides_snprintf 1
- _GL_FUNCDECL_RPL (snprintf, int,
- (char *restrict str, size_t size,
- const char *restrict format, ...)
-@@ -1302,6 +1303,7 @@ _GL_WARN_ON_USE (snprintf, "snprintf is
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define sprintf rpl_sprintf
- # endif
-+# define GNULIB_overrides_sprintf 1
- _GL_FUNCDECL_RPL (sprintf, int,
- (char *restrict str, const char *restrict format, ...)
- _GL_ATTRIBUTE_FORMAT_PRINTF_STANDARD (2, 3)
-@@ -1369,6 +1371,7 @@ _GL_WARN_ON_USE (tmpfile, "tmpfile is no
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define asprintf rpl_asprintf
- # endif
-+# define GNULIB_overrides_asprintf
- _GL_FUNCDECL_RPL (asprintf, int,
- (char **result, const char *format, ...)
- _GL_ATTRIBUTE_FORMAT_PRINTF_STANDARD (2, 3)
-@@ -1390,6 +1393,7 @@ _GL_CXXALIASWARN (asprintf);
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define vasprintf rpl_vasprintf
- # endif
-+# define GNULIB_overrides_vasprintf 1
- _GL_FUNCDECL_RPL (vasprintf, int,
- (char **result, const char *format, va_list args)
- _GL_ATTRIBUTE_FORMAT_PRINTF_STANDARD (2, 0)
-@@ -1573,6 +1577,7 @@ _GL_CXXALIASWARN (vscanf);
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define vsnprintf rpl_vsnprintf
- # endif
-+# define GNULIB_overrides_vsnprintf 1
- _GL_FUNCDECL_RPL (vsnprintf, int,
- (char *restrict str, size_t size,
- const char *restrict format, va_list args)
-@@ -1609,6 +1614,7 @@ _GL_WARN_ON_USE (vsnprintf, "vsnprintf i
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # define vsprintf rpl_vsprintf
- # endif
-+# define GNULIB_overrides_vsprintf 1
- _GL_FUNCDECL_RPL (vsprintf, int,
- (char *restrict str,
- const char *restrict format, va_list args)
---- a/gnulib/lib/stdlib.in.h
-+++ b/gnulib/lib/stdlib.in.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 1995, 2001-2004, 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
-@@ -1032,12 +1032,23 @@ _GL_WARN_ON_USE (realloc, "realloc is no
-
-
- #if @GNULIB_REALLOCARRAY@
--# if ! @HAVE_REALLOCARRAY@
-+# if @REPLACE_REALLOCARRAY@
-+# if !(defined __cplusplus && defined GNULIB_NAMESPACE)
-+# undef reallocarray
-+# define reallocarray rpl_reallocarray
-+# endif
-+_GL_FUNCDECL_RPL (reallocarray, void *,
-+ (void *ptr, size_t nmemb, size_t size));
-+_GL_CXXALIAS_RPL (reallocarray, void *,
-+ (void *ptr, size_t nmemb, size_t size));
-+# else
-+# if ! @HAVE_REALLOCARRAY@
- _GL_FUNCDECL_SYS (reallocarray, void *,
- (void *ptr, size_t nmemb, size_t size));
--# endif
-+# endif
- _GL_CXXALIAS_SYS (reallocarray, void *,
- (void *ptr, size_t nmemb, size_t size));
-+# endif
- _GL_CXXALIASWARN (reallocarray);
- #elif defined GNULIB_POSIXCHECK
- # undef reallocarray
-@@ -1202,6 +1213,47 @@ _GL_WARN_ON_USE (strtold, "strtold is un
- # endif
- #endif
-
-+#if @GNULIB_STRTOL@
-+/* Parse a signed integer whose textual representation starts at STRING.
-+ The integer is expected to be in base BASE (2 <= BASE <= 36); if BASE == 0,
-+ it may be decimal or octal (with prefix "0") or hexadecimal (with prefix
-+ "0x").
-+ If ENDPTR is not NULL, the address of the first byte after the integer is
-+ stored in *ENDPTR.
-+ Upon overflow, the return value is LONG_MAX or LONG_MIN, and errno is set
-+ to ERANGE. */
-+# if @REPLACE_STRTOL@
-+# if !(defined __cplusplus && defined GNULIB_NAMESPACE)
-+# define strtol rpl_strtol
-+# endif
-+# define GNULIB_defined_strtol_function 1
-+_GL_FUNCDECL_RPL (strtol, long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+_GL_CXXALIAS_RPL (strtol, long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# else
-+# if !@HAVE_STRTOL@
-+_GL_FUNCDECL_SYS (strtol, long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+# endif
-+_GL_CXXALIAS_SYS (strtol, long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# endif
-+_GL_CXXALIASWARN (strtol);
-+#elif defined GNULIB_POSIXCHECK
-+# undef strtol
-+# if HAVE_RAW_DECL_STRTOL
-+_GL_WARN_ON_USE (strtol, "strtol is unportable - "
-+ "use gnulib module strtol for portability");
-+# endif
-+#endif
-+
- #if @GNULIB_STRTOLL@
- /* Parse a signed integer whose textual representation starts at STRING.
- The integer is expected to be in base BASE (2 <= BASE <= 36); if BASE == 0,
-@@ -1211,15 +1263,29 @@ _GL_WARN_ON_USE (strtold, "strtold is un
- stored in *ENDPTR.
- Upon overflow, the return value is LLONG_MAX or LLONG_MIN, and errno is set
- to ERANGE. */
--# if !@HAVE_STRTOLL@
-+# if @REPLACE_STRTOLL@
-+# if !(defined __cplusplus && defined GNULIB_NAMESPACE)
-+# define strtoll rpl_strtoll
-+# endif
-+# define GNULIB_defined_strtoll_function 1
-+_GL_FUNCDECL_RPL (strtoll, long long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+_GL_CXXALIAS_RPL (strtoll, long long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# else
-+# if !@HAVE_STRTOLL@
- _GL_FUNCDECL_SYS (strtoll, long long,
- (const char *restrict string, char **restrict endptr,
- int base)
- _GL_ARG_NONNULL ((1)));
--# endif
-+# endif
- _GL_CXXALIAS_SYS (strtoll, long long,
- (const char *restrict string, char **restrict endptr,
- int base));
-+# endif
- _GL_CXXALIASWARN (strtoll);
- #elif defined GNULIB_POSIXCHECK
- # undef strtoll
-@@ -1229,6 +1295,46 @@ _GL_WARN_ON_USE (strtoll, "strtoll is un
- # endif
- #endif
-
-+#if @GNULIB_STRTOUL@
-+/* Parse an unsigned integer whose textual representation starts at STRING.
-+ The integer is expected to be in base BASE (2 <= BASE <= 36); if BASE == 0,
-+ it may be decimal or octal (with prefix "0") or hexadecimal (with prefix
-+ "0x").
-+ If ENDPTR is not NULL, the address of the first byte after the integer is
-+ stored in *ENDPTR.
-+ Upon overflow, the return value is ULONG_MAX, and errno is set to ERANGE. */
-+# if @REPLACE_STRTOUL@
-+# if !(defined __cplusplus && defined GNULIB_NAMESPACE)
-+# define strtoul rpl_strtoul
-+# endif
-+# define GNULIB_defined_strtoul_function 1
-+_GL_FUNCDECL_RPL (strtoul, unsigned long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+_GL_CXXALIAS_RPL (strtoul, unsigned long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# else
-+# if !@HAVE_STRTOUL@
-+_GL_FUNCDECL_SYS (strtoul, unsigned long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+# endif
-+_GL_CXXALIAS_SYS (strtoul, unsigned long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# endif
-+_GL_CXXALIASWARN (strtoul);
-+#elif defined GNULIB_POSIXCHECK
-+# undef strtoul
-+# if HAVE_RAW_DECL_STRTOUL
-+_GL_WARN_ON_USE (strtoul, "strtoul is unportable - "
-+ "use gnulib module strtoul for portability");
-+# endif
-+#endif
-+
- #if @GNULIB_STRTOULL@
- /* Parse an unsigned integer whose textual representation starts at STRING.
- The integer is expected to be in base BASE (2 <= BASE <= 36); if BASE == 0,
-@@ -1238,15 +1344,29 @@ _GL_WARN_ON_USE (strtoll, "strtoll is un
- stored in *ENDPTR.
- Upon overflow, the return value is ULLONG_MAX, and errno is set to
- ERANGE. */
--# if !@HAVE_STRTOULL@
-+# if @REPLACE_STRTOULL@
-+# if !(defined __cplusplus && defined GNULIB_NAMESPACE)
-+# define strtoull rpl_strtoull
-+# endif
-+# define GNULIB_defined_strtoull_function 1
-+_GL_FUNCDECL_RPL (strtoull, unsigned long long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base)
-+ _GL_ARG_NONNULL ((1)));
-+_GL_CXXALIAS_RPL (strtoull, unsigned long long,
-+ (const char *restrict string, char **restrict endptr,
-+ int base));
-+# else
-+# if !@HAVE_STRTOULL@
- _GL_FUNCDECL_SYS (strtoull, unsigned long long,
- (const char *restrict string, char **restrict endptr,
- int base)
- _GL_ARG_NONNULL ((1)));
--# endif
-+# endif
- _GL_CXXALIAS_SYS (strtoull, unsigned long long,
- (const char *restrict string, char **restrict endptr,
- int base));
-+# endif
- _GL_CXXALIASWARN (strtoull);
- #elif defined GNULIB_POSIXCHECK
- # undef strtoull
---- a/gnulib/lib/stpcpy.c
-+++ b/gnulib/lib/stpcpy.c
-@@ -5,17 +5,17 @@
- NOTE: The canonical source of this file is maintained with the GNU C Library.
- Bugs can be reported to bug-glibc@prep.ai.mit.edu.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published by the
-- Free Software Foundation; either version 3 of the License, or any
-- later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/str-kmp.h
-+++ b/gnulib/lib/str-kmp.h
-@@ -3,18 +3,26 @@
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2005.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software.
-+ It is dual-licensed under "the GNU LGPLv3+ or the GNU GPLv2+".
-+ You can redistribute it and/or modify it under either
-+ - the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 3, or (at your
-+ option) any later version, or
-+ - the terms of the GNU General Public License as published by the
-+ Free Software Foundation; either version 2, or (at your option)
-+ any later version, or
-+ - the same dual license "the GNU LGPLv3+ or the GNU GPLv2+".
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-+ Lesser General Public License and the GNU General Public License
-+ for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public
-+ License and of the GNU General Public License along with this
-+ program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Before including this file, you need to define:
- UNIT The element type of the needle and haystack.
---- a/gnulib/lib/str-two-way.h
-+++ b/gnulib/lib/str-two-way.h
-@@ -3,18 +3,18 @@
- This file is part of the GNU C Library.
- Written by Eric Blake <ebb9@byu.net>, 2008.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Before including this file, you need to include <config.h> and
- <string.h>, and define:
---- a/gnulib/lib/strcasecmp.c
-+++ b/gnulib/lib/strcasecmp.c
-@@ -1,18 +1,18 @@
- /* Case-insensitive string comparison function.
- Copyright (C) 1998-1999, 2005-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/strcasestr.c
-+++ b/gnulib/lib/strcasestr.c
-@@ -2,18 +2,18 @@
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2005.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/strdup.c
-+++ b/gnulib/lib/strdup.c
-@@ -3,18 +3,18 @@
-
- This file is part of the GNU C Library.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _LIBC
- # include <config.h>
---- a/gnulib/lib/streq.h
-+++ b/gnulib/lib/streq.h
-@@ -1,17 +1,17 @@
- /* Optimized string comparison.
- Copyright (C) 2001-2002, 2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>. */
---- a/gnulib/lib/strerror-override.c
-+++ b/gnulib/lib/strerror-override.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2010-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2010. */
-@@ -29,6 +29,8 @@
- # endif
- #endif
-
-+#if !GNULIB_defined_strerror_override_macro
-+
- /* If ERRNUM maps to an errno value defined by gnulib, return a string
- describing the error. Otherwise return NULL. */
- const char *
-@@ -37,12 +39,12 @@ strerror_override (int errnum)
- /* These error messages are taken from glibc/sysdeps/gnu/errlist.c. */
- switch (errnum)
- {
--#if REPLACE_STRERROR_0
-+# if REPLACE_STRERROR_0
- case 0:
- return "Success";
--#endif
-+# endif
-
--#if GNULIB_defined_ESOCK /* native Windows platforms with older <errno.h> */
-+# if GNULIB_defined_ESOCK /* native Windows platforms with older <errno.h> */
- case EINPROGRESS:
- return "Operation now in progress";
- case EALREADY:
-@@ -89,8 +91,8 @@ strerror_override (int errnum)
- return "No route to host";
- case EWOULDBLOCK:
- return "Operation would block";
--#endif
--#if GNULIB_defined_ESTREAMS /* native Windows platforms with older <errno.h> */
-+# endif
-+# if GNULIB_defined_ESTREAMS /* native Windows platforms with older <errno.h> */
- case ETXTBSY:
- return "Text file busy";
- case ENODATA:
-@@ -103,8 +105,8 @@ strerror_override (int errnum)
- return "Timer expired";
- case EOTHER:
- return "Other error";
--#endif
--#if GNULIB_defined_EWINSOCK /* native Windows platforms */
-+# endif
-+# if GNULIB_defined_EWINSOCK /* native Windows platforms */
- case ESOCKTNOSUPPORT:
- return "Socket type not supported";
- case EPFNOSUPPORT:
-@@ -125,7 +127,7 @@ strerror_override (int errnum)
- return "Stale NFS file handle";
- case EREMOTE:
- return "Object is remote";
--# if HAVE_WINSOCK2_H
-+# if HAVE_WINSOCK2_H
- /* WSA_INVALID_HANDLE maps to EBADF */
- /* WSA_NOT_ENOUGH_MEMORY maps to ENOMEM */
- /* WSA_INVALID_PARAMETER maps to EINVAL */
-@@ -213,90 +215,92 @@ strerror_override (int errnum)
- case WSANO_DATA:
- return "Valid name, no data record of requested type";
- /* WSA_QOS_* omitted */
-+# endif
- # endif
--#endif
-
--#if GNULIB_defined_ENOMSG
-+# if GNULIB_defined_ENOMSG
- case ENOMSG:
- return "No message of desired type";
--#endif
-+# endif
-
--#if GNULIB_defined_EIDRM
-+# if GNULIB_defined_EIDRM
- case EIDRM:
- return "Identifier removed";
--#endif
-+# endif
-
--#if GNULIB_defined_ENOLINK
-+# if GNULIB_defined_ENOLINK
- case ENOLINK:
- return "Link has been severed";
--#endif
-+# endif
-
--#if GNULIB_defined_EPROTO
-+# if GNULIB_defined_EPROTO
- case EPROTO:
- return "Protocol error";
--#endif
-+# endif
-
--#if GNULIB_defined_EMULTIHOP
-+# if GNULIB_defined_EMULTIHOP
- case EMULTIHOP:
- return "Multihop attempted";
--#endif
-+# endif
-
--#if GNULIB_defined_EBADMSG
-+# if GNULIB_defined_EBADMSG
- case EBADMSG:
- return "Bad message";
--#endif
-+# endif
-
--#if GNULIB_defined_EOVERFLOW
-+# if GNULIB_defined_EOVERFLOW
- case EOVERFLOW:
- return "Value too large for defined data type";
--#endif
-+# endif
-
--#if GNULIB_defined_ENOTSUP
-+# if GNULIB_defined_ENOTSUP
- case ENOTSUP:
- return "Not supported";
--#endif
-+# endif
-
--#if GNULIB_defined_ENETRESET
-+# if GNULIB_defined_ENETRESET
- case ENETRESET:
- return "Network dropped connection on reset";
--#endif
-+# endif
-
--#if GNULIB_defined_ECONNABORTED
-+# if GNULIB_defined_ECONNABORTED
- case ECONNABORTED:
- return "Software caused connection abort";
--#endif
-+# endif
-
--#if GNULIB_defined_ESTALE
-+# if GNULIB_defined_ESTALE
- case ESTALE:
- return "Stale NFS file handle";
--#endif
-+# endif
-
--#if GNULIB_defined_EDQUOT
-+# if GNULIB_defined_EDQUOT
- case EDQUOT:
- return "Disk quota exceeded";
--#endif
-+# endif
-
--#if GNULIB_defined_ECANCELED
-+# if GNULIB_defined_ECANCELED
- case ECANCELED:
- return "Operation canceled";
--#endif
-+# endif
-
--#if GNULIB_defined_EOWNERDEAD
-+# if GNULIB_defined_EOWNERDEAD
- case EOWNERDEAD:
- return "Owner died";
--#endif
-+# endif
-
--#if GNULIB_defined_ENOTRECOVERABLE
-+# if GNULIB_defined_ENOTRECOVERABLE
- case ENOTRECOVERABLE:
- return "State not recoverable";
--#endif
-+# endif
-
--#if GNULIB_defined_EILSEQ
-+# if GNULIB_defined_EILSEQ
- case EILSEQ:
- return "Invalid or incomplete multibyte or wide character";
--#endif
-+# endif
-
- default:
- return NULL;
- }
- }
-+
-+#endif
---- a/gnulib/lib/strerror-override.h
-+++ b/gnulib/lib/strerror-override.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2010-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _GL_STRERROR_OVERRIDE_H
-@@ -51,6 +51,7 @@
- extern const char *strerror_override (int errnum) _GL_ATTRIBUTE_CONST;
- # else
- # define strerror_override(ignored) NULL
-+# define GNULIB_defined_strerror_override_macro 1
- # endif
-
- #endif /* _GL_STRERROR_OVERRIDE_H */
---- a/gnulib/lib/strerror.c
-+++ b/gnulib/lib/strerror.c
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/string.in.h
-+++ b/gnulib/lib/string.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 1995-1996, 2001-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
- @PRAGMA_SYSTEM_HEADER@
-@@ -446,7 +446,7 @@ _GL_WARN_ON_USE (strdup, "strdup is unpo
- #elif @GNULIB_MDA_STRDUP@
- /* On native Windows, map 'creat' to '_creat', so that -loldnames is not
- required. In C++ with GNULIB_NAMESPACE, avoid differences between
-- platforms by defining GNULIB_NAMESPACE::creat always. */
-+ platforms by defining GNULIB_NAMESPACE::strdup always. */
- # if defined _WIN32 && !defined __CYGWIN__
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # undef strdup
---- a/gnulib/lib/strings.in.h
-+++ b/gnulib/lib/strings.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_STRINGS_H
-
---- a/gnulib/lib/strncasecmp.c
-+++ b/gnulib/lib/strncasecmp.c
-@@ -1,18 +1,18 @@
- /* strncasecmp.c -- case insensitive string comparator
- Copyright (C) 1998-1999, 2005-2007, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/strndup.c
-+++ b/gnulib/lib/strndup.c
-@@ -3,18 +3,18 @@
- Copyright (C) 1996-1998, 2001-2003, 2005-2007, 2009-2021 Free Software
- Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published by the
-- Free Software Foundation; either version 3, or (at your option) any
-- later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/strnlen.c
-+++ b/gnulib/lib/strnlen.c
-@@ -2,18 +2,18 @@
- Copyright (C) 2005-2007, 2009-2021 Free Software Foundation, Inc.
- Written by Simon Josefsson.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/strnlen1.c
-+++ b/gnulib/lib/strnlen1.c
-@@ -1,17 +1,17 @@
- /* Find the length of STRING + 1, but scan at most MAXLEN bytes.
- Copyright (C) 2005-2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/strnlen1.h
-+++ b/gnulib/lib/strnlen1.h
-@@ -1,17 +1,17 @@
- /* Find the length of STRING + 1, but scan at most MAXLEN bytes.
- Copyright (C) 2005, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _STRNLEN1_H
---- a/gnulib/lib/strstr.c
-+++ b/gnulib/lib/strstr.c
-@@ -2,18 +2,18 @@
- Foundation, Inc.
- This file is part of the GNU C Library.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* This particular implementation was written by Eric Blake, 2008. */
-
---- a/gnulib/lib/sys_stat.in.h
-+++ b/gnulib/lib/sys_stat.in.h
-@@ -1,18 +1,18 @@
- /* Provide a more complete sys/stat.h header file.
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Eric Blake, Paul Eggert, and Jim Meyering. */
-
---- a/gnulib/lib/sys_types.in.h
-+++ b/gnulib/lib/sys_types.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2011-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
- @PRAGMA_SYSTEM_HEADER@
---- a/gnulib/lib/time.in.h
-+++ b/gnulib/lib/time.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #if __GNUC__ >= 3
- @PRAGMA_SYSTEM_HEADER@
-@@ -340,22 +340,60 @@ _GL_CXXALIASWARN (strftime);
- # endif
-
- # if defined _GNU_SOURCE && @GNULIB_TIME_RZ@ && ! @HAVE_TIMEZONE_T@
-+/* Functions that use a first-class time zone data type, instead of
-+ relying on an implicit global time zone.
-+ Inspired by NetBSD. */
-+
-+/* Represents a time zone.
-+ (timezone_t) NULL stands for UTC. */
- typedef struct tm_zone *timezone_t;
-+
-+/* tzalloc (name)
-+ Returns a time zone object for the given time zone NAME. This object
-+ represents the time zone that other functions would use it the TZ
-+ environment variable was set to NAME.
-+ If NAME is NULL, the result represents the time zone that other functions
-+ would use it the TZ environment variable was unset.
-+ May return NULL if NAME is invalid (this is platform dependent) or
-+ upon memory allocation failure. */
- _GL_FUNCDECL_SYS (tzalloc, timezone_t, (char const *__name));
- _GL_CXXALIAS_SYS (tzalloc, timezone_t, (char const *__name));
-+
-+/* tzfree (tz)
-+ Frees a time zone object.
-+ The argument must have been returned by tzalloc(). */
- _GL_FUNCDECL_SYS (tzfree, void, (timezone_t __tz));
- _GL_CXXALIAS_SYS (tzfree, void, (timezone_t __tz));
-+
-+/* localtime_rz (tz, &t, &result)
-+ Converts an absolute time T to a broken-down time RESULT, assuming the
-+ time zone TZ.
-+ This function is like 'localtime_r', but relies on the argument TZ instead
-+ of an implicit global time zone. */
- _GL_FUNCDECL_SYS (localtime_rz, struct tm *,
- (timezone_t __tz, time_t const *restrict __timer,
- struct tm *restrict __result) _GL_ARG_NONNULL ((2, 3)));
- _GL_CXXALIAS_SYS (localtime_rz, struct tm *,
- (timezone_t __tz, time_t const *restrict __timer,
- struct tm *restrict __result));
-+
-+/* mktime_z (tz, &tm)
-+ Normalizes the broken-down time TM and converts it to an absolute time,
-+ assuming the time zone TZ. Returns the absolute time.
-+ This function is like 'mktime', but relies on the argument TZ instead
-+ of an implicit global time zone. */
- _GL_FUNCDECL_SYS (mktime_z, time_t,
-- (timezone_t __tz, struct tm *restrict __result)
-+ (timezone_t __tz, struct tm *restrict __tm)
- _GL_ARG_NONNULL ((2)));
- _GL_CXXALIAS_SYS (mktime_z, time_t,
-- (timezone_t __tz, struct tm *restrict __result));
-+ (timezone_t __tz, struct tm *restrict __tm));
-+
-+/* Time zone abbreviation strings (returned by 'localtime_rz' or 'mktime_z'
-+ in the 'tm_zone' member of 'struct tm') are valid as long as
-+ - the 'struct tm' argument is not destroyed or overwritten,
-+ and
-+ - the 'timezone_t' argument is not freed through tzfree(). */
-+
- # endif
-
- /* Convert TM to a time_t value, assuming UTC. */
---- a/gnulib/lib/unistd.c
-+++ b/gnulib/lib/unistd.c
-@@ -1,4 +1,22 @@
-+/* Inline functions for <unistd.h>.
-+
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- #include <config.h>
-+
- #define _GL_UNISTD_INLINE _GL_EXTERN_INLINE
- #include "unistd.h"
- typedef int dummy;
---- a/gnulib/lib/unistd.in.h
-+++ b/gnulib/lib/unistd.in.h
-@@ -1,18 +1,18 @@
- /* Substitute for and wrapper around <unistd.h>.
- Copyright (C) 2003-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _@GUARD_PREFIX@_UNISTD_H
-
-@@ -1521,6 +1521,7 @@ _GL_WARN_ON_USE (group_member, "group_me
- # undef isatty
- # define isatty rpl_isatty
- # endif
-+# define GNULIB_defined_isatty 1
- _GL_FUNCDECL_RPL (isatty, int, (int fd));
- _GL_CXXALIAS_RPL (isatty, int, (int fd));
- # elif defined _WIN32 && !defined __CYGWIN__
-@@ -2027,15 +2028,23 @@ _GL_WARN_ON_USE (sleep, "sleep is unport
- #if @GNULIB_MDA_SWAB@
- /* On native Windows, map 'swab' to '_swab', so that -loldnames is not
- required. In C++ with GNULIB_NAMESPACE, avoid differences between
-- platforms by defining GNULIB_NAMESPACE::creat always. */
-+ platforms by defining GNULIB_NAMESPACE::swab always. */
- # if defined _WIN32 && !defined __CYGWIN__
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # undef swab
- # define swab _swab
- # endif
--_GL_CXXALIAS_MDA (swab, void, (char *from, char *to, int n));
--# else
-+/* Need to cast, because in old mingw the arguments are
-+ (const char *from, char *to, size_t n). */
-+_GL_CXXALIAS_MDA_CAST (swab, void, (char *from, char *to, int n));
-+# else
-+# if defined __hpux /* HP-UX */
-+_GL_CXXALIAS_SYS (swab, void, (const char *from, char *to, int n));
-+# elif defined __sun && !defined _XPG4 /* Solaris */
-+_GL_CXXALIAS_SYS (swab, void, (const char *from, char *to, ssize_t n));
-+# else
- _GL_CXXALIAS_SYS (swab, void, (const void *from, void *to, ssize_t n));
-+# endif
- # endif
- _GL_CXXALIASWARN (swab);
- #endif
---- a/gnulib/lib/unitypes.in.h
-+++ b/gnulib/lib/unitypes.in.h
-@@ -1,17 +1,17 @@
- /* Elementary types and macros for the GNU UniString library.
- Copyright (C) 2002, 2005-2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _UNITYPES_H
---- a/gnulib/lib/uniwidth.in.h
-+++ b/gnulib/lib/uniwidth.in.h
-@@ -2,17 +2,17 @@
- Copyright (C) 2001-2002, 2005, 2007, 2009-2021 Free Software Foundation,
- Inc.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _UNIWIDTH_H
---- a/gnulib/lib/uniwidth/cjk.h
-+++ b/gnulib/lib/uniwidth/cjk.h
-@@ -2,17 +2,17 @@
- Copyright (C) 2001-2002, 2005-2007, 2009-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2002.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include "streq.h"
---- a/gnulib/lib/uniwidth/width.c
-+++ b/gnulib/lib/uniwidth/width.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2001-2002, 2006-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2002.
-
-- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/vasnprintf.c
-+++ b/gnulib/lib/vasnprintf.c
-@@ -1,18 +1,18 @@
- /* vsprintf with automatic memory allocation.
- Copyright (C) 1999, 2002-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* This file can be parametrized with the following macros:
- VASNPRINTF The name of the function being defined.
-@@ -60,9 +60,7 @@
- #ifndef VASNPRINTF
- # include <config.h>
- #endif
--#ifndef IN_LIBINTL
--# include <alloca.h>
--#endif
-+#include <alloca.h>
-
- /* Specification. */
- #ifndef VASNPRINTF
-@@ -1859,6 +1857,7 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- /* errno is already set. */
- return NULL;
-
-+ /* Frees the memory allocated by this function. Preserves errno. */
- #define CLEANUP() \
- if (d.dir != d.direct_alloc_dir) \
- free (d.dir); \
-@@ -1923,7 +1922,7 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
-
- /* Ensures that allocated >= needed. Aborts through a jump to
- out_of_memory if needed is SIZE_MAX or otherwise too big. */
--#define ENSURE_ALLOCATION(needed) \
-+#define ENSURE_ALLOCATION_ELSE(needed, oom_statement) \
- if ((needed) > allocated) \
- { \
- size_t memory_size; \
-@@ -1934,17 +1933,19 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- allocated = (needed); \
- memory_size = xtimes (allocated, sizeof (DCHAR_T)); \
- if (size_overflow_p (memory_size)) \
-- goto out_of_memory; \
-+ oom_statement \
- if (result == resultbuf || result == NULL) \
- memory = (DCHAR_T *) malloc (memory_size); \
- else \
- memory = (DCHAR_T *) realloc (result, memory_size); \
- if (memory == NULL) \
-- goto out_of_memory; \
-+ oom_statement \
- if (result == resultbuf && length > 0) \
- DCHAR_CPY (memory, result, length); \
- result = memory; \
- }
-+#define ENSURE_ALLOCATION(needed) \
-+ ENSURE_ALLOCATION_ELSE((needed), goto out_of_memory; )
-
- for (cp = format, i = 0, dp = &d.dir[0]; ; cp = dp->dir_end, i++, dp++)
- {
-@@ -2183,18 +2184,17 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- # endif
- if (converted == NULL)
- {
-- int saved_errno = errno;
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
- if (converted != result + length)
- {
-- ENSURE_ALLOCATION (xsum (length, converted_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, converted_len),
-+ { free (converted); goto out_of_memory; });
- DCHAR_CPY (result + length, converted, converted_len);
- free (converted);
- }
-@@ -2309,18 +2309,17 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- # endif
- if (converted == NULL)
- {
-- int saved_errno = errno;
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
- if (converted != result + length)
- {
-- ENSURE_ALLOCATION (xsum (length, converted_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, converted_len),
-+ { free (converted); goto out_of_memory; });
- DCHAR_CPY (result + length, converted, converted_len);
- free (converted);
- }
-@@ -2435,18 +2434,17 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- # endif
- if (converted == NULL)
- {
-- int saved_errno = errno;
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
- if (converted != result + length)
- {
-- ENSURE_ALLOCATION (xsum (length, converted_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, converted_len),
-+ { free (converted); goto out_of_memory; });
- DCHAR_CPY (result + length, converted, converted_len);
- free (converted);
- }
-@@ -2852,14 +2850,12 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- NULL, &tmpdst_len);
- if (tmpdst == NULL)
- {
-- int saved_errno = errno;
- free (tmpsrc);
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
- free (tmpsrc);
-@@ -2951,7 +2947,8 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- }
- }
- # else
-- ENSURE_ALLOCATION (xsum (length, tmpdst_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, tmpdst_len),
-+ { free (tmpdst); goto out_of_memory; });
- DCHAR_CPY (result + length, tmpdst, tmpdst_len);
- free (tmpdst);
- length += tmpdst_len;
-@@ -3079,13 +3076,11 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- NULL, &tmpdst_len);
- if (tmpdst == NULL)
- {
-- int saved_errno = errno;
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
- # endif
-@@ -3156,7 +3151,8 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- }
- }
- # else
-- ENSURE_ALLOCATION (xsum (length, tmpdst_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, tmpdst_len),
-+ { free (tmpdst); goto out_of_memory; });
- DCHAR_CPY (result + length, tmpdst, tmpdst_len);
- free (tmpdst);
- length += tmpdst_len;
-@@ -5449,15 +5445,14 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- /* Attempt to handle failure. */
- if (count < 0)
- {
-- /* SNPRINTF or sprintf failed. Save and use the errno
-- that it has set, if any. */
-- int saved_errno = errno;
-- if (saved_errno == 0)
-+ /* SNPRINTF or sprintf failed. Use the errno that it
-+ has set, if any. */
-+ if (errno == 0)
- {
- if (dp->conversion == 'c' || dp->conversion == 's')
-- saved_errno = EILSEQ;
-+ errno = EILSEQ;
- else
-- saved_errno = EINVAL;
-+ errno = EINVAL;
- }
-
- if (!(result == resultbuf || result == NULL))
-@@ -5466,7 +5461,6 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- free (buf_malloced);
- CLEANUP ();
-
-- errno = saved_errno;
- return NULL;
- }
-
-@@ -5602,16 +5596,15 @@ VASNPRINTF (DCHAR_T *resultbuf, size_t *
- NULL, &tmpdst_len);
- if (tmpdst == NULL)
- {
-- int saved_errno = errno;
- if (!(result == resultbuf || result == NULL))
- free (result);
- if (buf_malloced != NULL)
- free (buf_malloced);
- CLEANUP ();
-- errno = saved_errno;
- return NULL;
- }
-- ENSURE_ALLOCATION (xsum (length, tmpdst_len));
-+ ENSURE_ALLOCATION_ELSE (xsum (length, tmpdst_len),
-+ { free (tmpdst); goto out_of_memory; });
- DCHAR_CPY (result + length, tmpdst, tmpdst_len);
- free (tmpdst);
- count = tmpdst_len;
---- a/gnulib/lib/vasnprintf.h
-+++ b/gnulib/lib/vasnprintf.h
-@@ -1,18 +1,18 @@
- /* vsprintf with automatic memory allocation.
- Copyright (C) 2002-2004, 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _VASNPRINTF_H
- #define _VASNPRINTF_H
---- a/gnulib/lib/vasprintf.c
-+++ b/gnulib/lib/vasprintf.c
-@@ -1,18 +1,18 @@
- /* Formatted output to strings.
- Copyright (C) 1999, 2002, 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
-
---- a/gnulib/lib/verify.h
-+++ b/gnulib/lib/verify.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 2005-2006, 2009-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Paul Eggert, Bruno Haible, and Jim Meyering. */
---- a/gnulib/lib/warn-on-use.h
-+++ b/gnulib/lib/warn-on-use.h
-@@ -2,16 +2,16 @@
- Copyright (C) 2010-2021 Free Software Foundation, Inc.
-
- This program is free software: you can redistribute it and/or modify it
-- under the terms of the GNU General Public License as published
-- by the Free Software Foundation; either version 3 of the License, or
-+ under the terms of the GNU Lesser General Public License as published
-+ by the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-+ Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* _GL_WARN_ON_USE (function, "literal string") issues a declaration
---- a/gnulib/lib/wchar.in.h
-+++ b/gnulib/lib/wchar.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2007-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Eric Blake. */
-
-@@ -111,7 +111,7 @@
- /* mingw and MSVC define wint_t as 'unsigned short' in <crtdefs.h> or
- <stddef.h>. This is too small: ISO C 99 section 7.24.1.(2) says that
- wint_t must be "unchanged by default argument promotions". Override it. */
--# if @GNULIB_OVERRIDES_WINT_T@
-+# if @GNULIBHEADERS_OVERRIDE_WINT_T@
- # if !GNULIB_defined_wint_t
- # if @HAVE_CRTDEFS_H@
- # include <crtdefs.h>
---- a/gnulib/lib/wcrtomb.c
-+++ b/gnulib/lib/wcrtomb.c
-@@ -2,17 +2,17 @@
- Copyright (C) 2008-2021 Free Software Foundation, Inc.
- Written by Bruno Haible <bruno@clisp.org>, 2008.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/wctype-h.c
-+++ b/gnulib/lib/wctype-h.c
-@@ -1,4 +1,23 @@
-+/* Inline functions for <wctype.h>.
-+
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- /* Normally this would be wctype.c, but that name's already taken. */
-+
- #include <config.h>
-+
- #define _GL_WCTYPE_INLINE _GL_EXTERN_INLINE
- #include "wctype.h"
---- a/gnulib/lib/wctype.in.h
-+++ b/gnulib/lib/wctype.in.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible and Paul Eggert. */
-
-@@ -103,7 +103,7 @@ _GL_INLINE_HEADER_BEGIN
- /* mingw and MSVC define wint_t as 'unsigned short' in <crtdefs.h> or
- <stddef.h>. This is too small: ISO C 99 section 7.24.1.(2) says that
- wint_t must be "unchanged by default argument promotions". Override it. */
--# if @GNULIB_OVERRIDES_WINT_T@
-+# if @GNULIBHEADERS_OVERRIDE_WINT_T@
- # if !GNULIB_defined_wint_t
- # if @HAVE_CRTDEFS_H@
- # include <crtdefs.h>
-@@ -132,7 +132,7 @@ typedef unsigned int rpl_wint_t;
- same way, or not at all. */
- # if ! @HAVE_ISWCNTRL@ || @REPLACE_ISWCNTRL@
-
--# if @GNULIB_OVERRIDES_WINT_T@ /* implies @REPLACE_ISWCNTRL@ */
-+# if @GNULIBHEADERS_OVERRIDE_WINT_T@ /* implies @REPLACE_ISWCNTRL@ */
-
- _GL_WCTYPE_INLINE int
- rpl_iswalnum (wint_t wc)
-@@ -496,7 +496,7 @@ _GL_FUNCDECL_RPL (iswxdigit, int, (wint_
-
- # endif
-
--# if defined __MINGW32__ && !@GNULIB_OVERRIDES_WINT_T@
-+# if defined __MINGW32__ && !@GNULIBHEADERS_OVERRIDE_WINT_T@
-
- /* On native Windows, wchar_t is uint16_t, and wint_t is uint32_t.
- The functions towlower and towupper are implemented in the MSVCRT library
-@@ -529,7 +529,7 @@ rpl_towupper (wint_t wc)
- # define towupper rpl_towupper
- # endif
-
--# endif /* __MINGW32__ && !@GNULIB_OVERRIDES_WINT_T@ */
-+# endif /* __MINGW32__ && !@GNULIBHEADERS_OVERRIDE_WINT_T@ */
-
- # define GNULIB_defined_wctype_functions 1
- #endif
-@@ -646,7 +646,7 @@ _GL_WARN_ON_USE (wctype, "wctype is unpo
- The argument WC must be either a wchar_t value or WEOF.
- The argument DESC must have been returned by the wctype() function. */
- #if @GNULIB_ISWCTYPE@
--# if @GNULIB_OVERRIDES_WINT_T@
-+# if @GNULIBHEADERS_OVERRIDE_WINT_T@
- # if !(defined __cplusplus && defined GNULIB_NAMESPACE)
- # undef iswctype
- # define iswctype rpl_iswctype
---- a/gnulib/lib/wcwidth.c
-+++ b/gnulib/lib/wcwidth.c
-@@ -1,17 +1,17 @@
- /* Determine the number of screen columns needed for a character.
- Copyright (C) 2006-2007, 2010-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #include <config.h>
---- a/gnulib/lib/windows-initguard.h
-+++ b/gnulib/lib/windows-initguard.h
-@@ -1,18 +1,18 @@
- /* Init guards, somewhat like spinlocks (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-mutex.c
-+++ b/gnulib/lib/windows-mutex.c
-@@ -1,18 +1,18 @@
- /* Plain mutexes (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-mutex.h
-+++ b/gnulib/lib/windows-mutex.h
-@@ -1,18 +1,18 @@
- /* Plain mutexes (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-once.c
-+++ b/gnulib/lib/windows-once.c
-@@ -1,18 +1,18 @@
- /* Once-only control (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-once.h
-+++ b/gnulib/lib/windows-once.h
-@@ -1,18 +1,18 @@
- /* Once-only control (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-recmutex.c
-+++ b/gnulib/lib/windows-recmutex.c
-@@ -1,18 +1,18 @@
- /* Plain recursive mutexes (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-recmutex.h
-+++ b/gnulib/lib/windows-recmutex.h
-@@ -1,18 +1,18 @@
- /* Plain recursive mutexes (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-rwlock.c
-+++ b/gnulib/lib/windows-rwlock.c
-@@ -1,18 +1,18 @@
- /* Read-write locks (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/windows-rwlock.h
-+++ b/gnulib/lib/windows-rwlock.h
-@@ -1,18 +1,18 @@
- /* Read-write locks (native Windows implementation).
- Copyright (C) 2005-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- /* Written by Bruno Haible <bruno@clisp.org>, 2005.
- Based on GCC's gthr-win32.h. */
---- a/gnulib/lib/xalloc-oversized.h
-+++ b/gnulib/lib/xalloc-oversized.h
-@@ -2,17 +2,17 @@
-
- Copyright (C) 1990-2000, 2003-2004, 2006-2021 Free Software Foundation, Inc.
-
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3 of the License, or
-- (at your option) any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-+ You should have received a copy of the GNU Lesser General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef XALLOC_OVERSIZED_H_
-@@ -21,34 +21,39 @@
- #include <stddef.h>
- #include <stdint.h>
-
--/* True if N * S would overflow in a size_t calculation,
-- or would generate a value larger than PTRDIFF_MAX.
-+/* True if N * S does not fit into both ptrdiff_t and size_t.
-+ N and S should be nonnegative and free of side effects.
- This expands to a constant expression if N and S are both constants.
-- By gnulib convention, SIZE_MAX represents overflow in size
-+ By gnulib convention, SIZE_MAX represents overflow in size_t
- calculations, so the conservative size_t-based dividend to use here
- is SIZE_MAX - 1. */
- #define __xalloc_oversized(n, s) \
-- ((size_t) (PTRDIFF_MAX < SIZE_MAX ? PTRDIFF_MAX : SIZE_MAX - 1) / (s) < (n))
--
--#if PTRDIFF_MAX < SIZE_MAX
--typedef ptrdiff_t __xalloc_count_type;
--#else
--typedef size_t __xalloc_count_type;
--#endif
--
--/* Return 1 if an array of N objects, each of size S, cannot exist
-- reliably due to size or ptrdiff_t arithmetic overflow. S must be
-- positive and N must be nonnegative. This is a macro, not a
-- function, so that it works correctly even when SIZE_MAX < N. */
--
--#if 7 <= __GNUC__ && !defined __clang__
-+ ((s) != 0 \
-+ && ((size_t) (PTRDIFF_MAX < SIZE_MAX ? PTRDIFF_MAX : SIZE_MAX - 1) / (s) \
-+ < (n)))
-+
-+/* Return 1 if and only if an array of N objects, each of size S,
-+ cannot exist reliably because its total size in bytes would exceed
-+ MIN (PTRDIFF_MAX, SIZE_MAX - 1).
-+
-+ N and S should be nonnegative and free of side effects.
-+
-+ Warning: (xalloc_oversized (N, S) ? NULL : malloc (N * S)) can
-+ misbehave if N and S are both narrower than ptrdiff_t and size_t,
-+ and can be rewritten as (xalloc_oversized (N, S) ? NULL
-+ : malloc (N * (size_t) S)).
-+
-+ This is a macro, not a function, so that it works even if an
-+ argument exceeds MAX (PTRDIFF_MAX, SIZE_MAX). */
-+#if 7 <= __GNUC__ && !defined __clang__ && PTRDIFF_MAX < SIZE_MAX
- # define xalloc_oversized(n, s) \
-- __builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
--#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
-+ __builtin_mul_overflow_p (n, s, (ptrdiff_t) 1)
-+#elif (5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ \
-+ && PTRDIFF_MAX < SIZE_MAX)
- # define xalloc_oversized(n, s) \
- (__builtin_constant_p (n) && __builtin_constant_p (s) \
- ? __xalloc_oversized (n, s) \
-- : ({ __xalloc_count_type __xalloc_count; \
-+ : ({ ptrdiff_t __xalloc_count; \
- __builtin_mul_overflow (n, s, &__xalloc_count); }))
-
- /* Other compilers use integer division; this may be slower but is
---- a/gnulib/lib/xalloc.h
-+++ b/gnulib/lib/xalloc.h
-@@ -21,7 +21,10 @@
- #include <stddef.h>
- #include <stdint.h>
-
--#include "xalloc-oversized.h"
-+#if GNULIB_XALLOC
-+# include "idx.h"
-+# include "intprops.h"
-+#endif
-
- #ifndef _GL_INLINE_HEADER_BEGIN
- #error "Please include config.h first."
-@@ -50,17 +53,26 @@ extern "C" {
-
- #if GNULIB_XALLOC
-
--void *xmalloc (size_t s)
-- _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
--void *xzalloc (size_t s)
-- _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
-+void *xmalloc (size_t s) _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
-+void *ximalloc (idx_t s) _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
-+void *xzalloc (size_t s) _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
-+void *xizalloc (idx_t s) _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1));
- void *xcalloc (size_t n, size_t s)
-- _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1, 2));
--void *xrealloc (void *p, size_t s)
-- _GL_ATTRIBUTE_ALLOC_SIZE ((2));
--void *x2realloc (void *p, size_t *pn);
--void *xmemdup (void const *p, size_t s)
-- _GL_ATTRIBUTE_ALLOC_SIZE ((2));
-+ _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1, 2));
-+void *xicalloc (idx_t n, idx_t s)
-+ _GL_ATTRIBUTE_MALLOC _GL_ATTRIBUTE_ALLOC_SIZE ((1, 2));
-+void *xrealloc (void *p, size_t s) _GL_ATTRIBUTE_ALLOC_SIZE ((2));
-+void *xirealloc (void *p, idx_t s) _GL_ATTRIBUTE_ALLOC_SIZE ((2));
-+void *xreallocarray (void *p, size_t n, size_t s)
-+ _GL_ATTRIBUTE_ALLOC_SIZE ((2, 3));
-+void *xireallocarray (void *p, idx_t n, idx_t s)
-+ _GL_ATTRIBUTE_ALLOC_SIZE ((2, 3));
-+void *x2realloc (void *p, size_t *ps); /* superseded by xpalloc */
-+void *x2nrealloc (void *p, size_t *pn, size_t s); /* superseded by xpalloc */
-+void *xpalloc (void *pa, idx_t *pn, idx_t n_incr_min, ptrdiff_t n_max, idx_t s);
-+void *xmemdup (void const *p, size_t s) _GL_ATTRIBUTE_ALLOC_SIZE ((2));
-+void *ximemdup (void const *p, idx_t s) _GL_ATTRIBUTE_ALLOC_SIZE ((2));
-+char *ximemdup0 (void const *p, idx_t s) _GL_ATTRIBUTE_MALLOC;
- char *xstrdup (char const *str)
- _GL_ATTRIBUTE_MALLOC;
-
-@@ -98,11 +110,10 @@ XALLOC_INLINE void *xnmalloc (size_t n,
- XALLOC_INLINE void *
- xnmalloc (size_t n, size_t s)
- {
-- if (xalloc_oversized (n, s))
-- xalloc_die ();
-- return xmalloc (n * s);
-+ return xreallocarray (NULL, n, s);
- }
-
-+/* FIXME: Deprecate this in favor of xreallocarray? */
- /* Change the size of an allocated block of memory P to an array of N
- objects each of S bytes, with error checking. S must be nonzero. */
-
-@@ -111,100 +122,7 @@ XALLOC_INLINE void *xnrealloc (void *p,
- XALLOC_INLINE void *
- xnrealloc (void *p, size_t n, size_t s)
- {
-- if (xalloc_oversized (n, s))
-- xalloc_die ();
-- return xrealloc (p, n * s);
--}
--
--/* If P is null, allocate a block of at least *PN such objects;
-- otherwise, reallocate P so that it contains more than *PN objects
-- each of S bytes. S must be nonzero. Set *PN to the new number of
-- objects, and return the pointer to the new block. *PN is never set
-- to zero, and the returned pointer is never null.
--
-- Repeated reallocations are guaranteed to make progress, either by
-- allocating an initial block with a nonzero size, or by allocating a
-- larger block.
--
-- In the following implementation, nonzero sizes are increased by a
-- factor of approximately 1.5 so that repeated reallocations have
-- O(N) overall cost rather than O(N**2) cost, but the
-- specification for this function does not guarantee that rate.
--
-- Here is an example of use:
--
-- int *p = NULL;
-- size_t used = 0;
-- size_t allocated = 0;
--
-- void
-- append_int (int value)
-- {
-- if (used == allocated)
-- p = x2nrealloc (p, &allocated, sizeof *p);
-- p[used++] = value;
-- }
--
-- This causes x2nrealloc to allocate a block of some nonzero size the
-- first time it is called.
--
-- To have finer-grained control over the initial size, set *PN to a
-- nonzero value before calling this function with P == NULL. For
-- example:
--
-- int *p = NULL;
-- size_t used = 0;
-- size_t allocated = 0;
-- size_t allocated1 = 1000;
--
-- void
-- append_int (int value)
-- {
-- if (used == allocated)
-- {
-- p = x2nrealloc (p, &allocated1, sizeof *p);
-- allocated = allocated1;
-- }
-- p[used++] = value;
-- }
--
-- */
--
--XALLOC_INLINE void *
--x2nrealloc (void *p, size_t *pn, size_t s)
--{
-- size_t n = *pn;
--
-- if (! p)
-- {
-- if (! n)
-- {
-- /* The approximate size to use for initial small allocation
-- requests, when the invoking code specifies an old size of
-- zero. This is the largest "small" request for the GNU C
-- library malloc. */
-- enum { DEFAULT_MXFAST = 64 * sizeof (size_t) / 4 };
--
-- n = DEFAULT_MXFAST / s;
-- n += !n;
-- }
-- if (xalloc_oversized (n, s))
-- xalloc_die ();
-- }
-- else
-- {
-- /* Set N = floor (1.5 * N) + 1 so that progress is made even if N == 0.
-- Check for overflow, so that N * S stays in both ptrdiff_t and
-- size_t range. The check may be slightly conservative, but an
-- exact check isn't worth the trouble. */
-- if ((PTRDIFF_MAX < SIZE_MAX ? PTRDIFF_MAX : SIZE_MAX) / 3 * 2 / s
-- <= n)
-- xalloc_die ();
-- n += n / 2 + 1;
-- }
--
-- *pn = n;
-- return xrealloc (p, n * s);
-+ return xreallocarray (p, n, s);
- }
-
- /* Return a pointer to a new buffer of N bytes. This is like xmalloc,
-@@ -239,9 +157,16 @@ xrealloc (T *p, size_t s)
- }
-
- template <typename T> inline T *
-+xreallocarray (T *p, size_t n, size_t s)
-+{
-+ return (T *) xreallocarray ((void *) p, n, s);
-+}
-+
-+/* FIXME: Deprecate this in favor of xreallocarray? */
-+template <typename T> inline T *
- xnrealloc (T *p, size_t n, size_t s)
- {
-- return (T *) xnrealloc ((void *) p, n, s);
-+ return xreallocarray (p, n, s);
- }
-
- template <typename T> inline T *
---- a/gnulib/lib/xmalloc.c
-+++ b/gnulib/lib/xmalloc.c
-@@ -21,80 +21,250 @@
-
- #include "xalloc.h"
-
-+#include "ialloc.h"
-+#include "intprops.h"
-+#include "minmax.h"
-+
- #include <stdlib.h>
- #include <string.h>
-
--/* 1 if calloc, malloc and realloc are known to be compatible with GNU.
-- This matters if we are not also using the calloc-gnu, malloc-gnu
-- and realloc-gnu modules, which define HAVE_CALLOC_GNU,
-- HAVE_MALLOC_GNU and HAVE_REALLOC_GNU and support the GNU API even
-- on non-GNU platforms. */
--#if defined HAVE_CALLOC_GNU || (defined __GLIBC__ && !defined __UCLIBC__)
--enum { HAVE_GNU_CALLOC = 1 };
--#else
--enum { HAVE_GNU_CALLOC = 0 };
--#endif
--#if defined HAVE_MALLOC_GNU || (defined __GLIBC__ && !defined __UCLIBC__)
--enum { HAVE_GNU_MALLOC = 1 };
--#else
--enum { HAVE_GNU_MALLOC = 0 };
--#endif
--#if defined HAVE_REALLOC_GNU || (defined __GLIBC__ && !defined __UCLIBC__)
--enum { HAVE_GNU_REALLOC = 1 };
--#else
--enum { HAVE_GNU_REALLOC = 0 };
--#endif
-+static void * _GL_ATTRIBUTE_PURE
-+nonnull (void *p)
-+{
-+ if (!p)
-+ xalloc_die ();
-+ return p;
-+}
-
--/* Allocate N bytes of memory dynamically, with error checking. */
-+/* Allocate S bytes of memory dynamically, with error checking. */
-
- void *
--xmalloc (size_t n)
-+xmalloc (size_t s)
- {
-- void *p = malloc (n);
-- if (!p && (HAVE_GNU_MALLOC || n))
-- xalloc_die ();
-- return p;
-+ return nonnull (malloc (s));
-+}
-+
-+void *
-+ximalloc (idx_t s)
-+{
-+ return nonnull (imalloc (s));
- }
-
--/* Change the size of an allocated block of memory P to N bytes,
-+/* Change the size of an allocated block of memory P to S bytes,
- with error checking. */
-
- void *
--xrealloc (void *p, size_t n)
-+xrealloc (void *p, size_t s)
- {
-- if (!HAVE_GNU_REALLOC && !n && p)
-- {
-- /* The GNU and C99 realloc behaviors disagree here. Act like GNU. */
-- free (p);
-- return NULL;
-- }
-+ void *r = realloc (p, s);
-+ if (!r && (!p || s))
-+ xalloc_die ();
-+ return r;
-+}
-+
-+void *
-+xirealloc (void *p, idx_t s)
-+{
-+ return nonnull (irealloc (p, s));
-+}
-+
-+/* Change the size of an allocated block of memory P to an array of N
-+ objects each of S bytes, with error checking. */
-
-- void *r = realloc (p, n);
-- if (!r && (n || (HAVE_GNU_REALLOC && !p)))
-+void *
-+xreallocarray (void *p, size_t n, size_t s)
-+{
-+ void *r = reallocarray (p, n, s);
-+ if (!r && (!p || (n && s)))
- xalloc_die ();
- return r;
- }
-
--/* If P is null, allocate a block of at least *PN bytes; otherwise,
-- reallocate P so that it contains more than *PN bytes. *PN must be
-- nonzero unless P is null. Set *PN to the new block's size, and
-- return the pointer to the new block. *PN is never set to zero, and
-+void *
-+xireallocarray (void *p, idx_t n, idx_t s)
-+{
-+ return nonnull (ireallocarray (p, n, s));
-+}
-+
-+/* If P is null, allocate a block of at least *PS bytes; otherwise,
-+ reallocate P so that it contains more than *PS bytes. *PS must be
-+ nonzero unless P is null. Set *PS to the new block's size, and
-+ return the pointer to the new block. *PS is never set to zero, and
- the returned pointer is never null. */
-
- void *
--x2realloc (void *p, size_t *pn)
-+x2realloc (void *p, size_t *ps)
-+{
-+ return x2nrealloc (p, ps, 1);
-+}
-+
-+/* If P is null, allocate a block of at least *PN such objects;
-+ otherwise, reallocate P so that it contains more than *PN objects
-+ each of S bytes. S must be nonzero. Set *PN to the new number of
-+ objects, and return the pointer to the new block. *PN is never set
-+ to zero, and the returned pointer is never null.
-+
-+ Repeated reallocations are guaranteed to make progress, either by
-+ allocating an initial block with a nonzero size, or by allocating a
-+ larger block.
-+
-+ In the following implementation, nonzero sizes are increased by a
-+ factor of approximately 1.5 so that repeated reallocations have
-+ O(N) overall cost rather than O(N**2) cost, but the
-+ specification for this function does not guarantee that rate.
-+
-+ Here is an example of use:
-+
-+ int *p = NULL;
-+ size_t used = 0;
-+ size_t allocated = 0;
-+
-+ void
-+ append_int (int value)
-+ {
-+ if (used == allocated)
-+ p = x2nrealloc (p, &allocated, sizeof *p);
-+ p[used++] = value;
-+ }
-+
-+ This causes x2nrealloc to allocate a block of some nonzero size the
-+ first time it is called.
-+
-+ To have finer-grained control over the initial size, set *PN to a
-+ nonzero value before calling this function with P == NULL. For
-+ example:
-+
-+ int *p = NULL;
-+ size_t used = 0;
-+ size_t allocated = 0;
-+ size_t allocated1 = 1000;
-+
-+ void
-+ append_int (int value)
-+ {
-+ if (used == allocated)
-+ {
-+ p = x2nrealloc (p, &allocated1, sizeof *p);
-+ allocated = allocated1;
-+ }
-+ p[used++] = value;
-+ }
-+
-+ */
-+
-+void *
-+x2nrealloc (void *p, size_t *pn, size_t s)
- {
-- return x2nrealloc (p, pn, 1);
-+ size_t n = *pn;
-+
-+ if (! p)
-+ {
-+ if (! n)
-+ {
-+ /* The approximate size to use for initial small allocation
-+ requests, when the invoking code specifies an old size of
-+ zero. This is the largest "small" request for the GNU C
-+ library malloc. */
-+ enum { DEFAULT_MXFAST = 64 * sizeof (size_t) / 4 };
-+
-+ n = DEFAULT_MXFAST / s;
-+ n += !n;
-+ }
-+ }
-+ else
-+ {
-+ /* Set N = floor (1.5 * N) + 1 to make progress even if N == 0. */
-+ if (INT_ADD_WRAPV (n, (n >> 1) + 1, &n))
-+ xalloc_die ();
-+ }
-+
-+ p = xreallocarray (p, n, s);
-+ *pn = n;
-+ return p;
- }
-
--/* Allocate N bytes of zeroed memory dynamically, with error checking.
-+/* Grow PA, which points to an array of *PN items, and return the
-+ location of the reallocated array, updating *PN to reflect its
-+ new size. The new array will contain at least N_INCR_MIN more
-+ items, but will not contain more than N_MAX items total.
-+ S is the size of each item, in bytes.
-+
-+ S and N_INCR_MIN must be positive. *PN must be
-+ nonnegative. If N_MAX is -1, it is treated as if it were
-+ infinity.
-+
-+ If PA is null, then allocate a new array instead of reallocating
-+ the old one.
-+
-+ Thus, to grow an array A without saving its old contents, do
-+ { free (A); A = xpalloc (NULL, &AITEMS, ...); }. */
-+
-+void *
-+xpalloc (void *pa, idx_t *pn, idx_t n_incr_min, ptrdiff_t n_max, idx_t s)
-+{
-+ idx_t n0 = *pn;
-+
-+ /* The approximate size to use for initial small allocation
-+ requests. This is the largest "small" request for the GNU C
-+ library malloc. */
-+ enum { DEFAULT_MXFAST = 64 * sizeof (size_t) / 4 };
-+
-+ /* If the array is tiny, grow it to about (but no greater than)
-+ DEFAULT_MXFAST bytes. Otherwise, grow it by about 50%.
-+ Adjust the growth according to three constraints: N_INCR_MIN,
-+ N_MAX, and what the C language can represent safely. */
-+
-+ idx_t n;
-+ if (INT_ADD_WRAPV (n0, n0 >> 1, &n))
-+ n = IDX_MAX;
-+ if (0 <= n_max && n_max < n)
-+ n = n_max;
-+
-+ /* NBYTES is of a type suitable for holding the count of bytes in an object.
-+ This is typically idx_t, but it should be size_t on (theoretical?)
-+ platforms where SIZE_MAX < IDX_MAX so xpalloc does not pass
-+ values greater than SIZE_MAX to xrealloc. */
-+#if IDX_MAX <= SIZE_MAX
-+ idx_t nbytes;
-+#else
-+ size_t nbytes;
-+#endif
-+ idx_t adjusted_nbytes
-+ = (INT_MULTIPLY_WRAPV (n, s, &nbytes)
-+ ? MIN (IDX_MAX, SIZE_MAX)
-+ : nbytes < DEFAULT_MXFAST ? DEFAULT_MXFAST : 0);
-+ if (adjusted_nbytes)
-+ {
-+ n = adjusted_nbytes / s;
-+ nbytes = adjusted_nbytes - adjusted_nbytes % s;
-+ }
-+
-+ if (! pa)
-+ *pn = 0;
-+ if (n - n0 < n_incr_min
-+ && (INT_ADD_WRAPV (n0, n_incr_min, &n)
-+ || (0 <= n_max && n_max < n)
-+ || INT_MULTIPLY_WRAPV (n, s, &nbytes)))
-+ xalloc_die ();
-+ pa = xrealloc (pa, nbytes);
-+ *pn = n;
-+ return pa;
-+}
-+
-+/* Allocate S bytes of zeroed memory dynamically, with error checking.
- There's no need for xnzalloc (N, S), since it would be equivalent
- to xcalloc (N, S). */
-
- void *
--xzalloc (size_t n)
-+xzalloc (size_t s)
-+{
-+ return xcalloc (s, 1);
-+}
-+
-+void *
-+xizalloc (idx_t s)
- {
-- return xcalloc (n, 1);
-+ return xicalloc (s, 1);
- }
-
- /* Allocate zeroed memory for N elements of S bytes, with error
-@@ -103,15 +273,13 @@ xzalloc (size_t n)
- void *
- xcalloc (size_t n, size_t s)
- {
-- void *p;
-- /* Test for overflow, since objects with size greater than
-- PTRDIFF_MAX cause pointer subtraction to go awry. Omit size-zero
-- tests if HAVE_GNU_CALLOC, since GNU calloc never returns NULL if
-- successful. */
-- if (xalloc_oversized (n, s)
-- || (! (p = calloc (n, s)) && (HAVE_GNU_CALLOC || n != 0)))
-- xalloc_die ();
-- return p;
-+ return nonnull (calloc (n, s));
-+}
-+
-+void *
-+xicalloc (idx_t n, idx_t s)
-+{
-+ return nonnull (icalloc (n, s));
- }
-
- /* Clone an object P of size S, with error checking. There's no need
-@@ -124,6 +292,23 @@ xmemdup (void const *p, size_t s)
- return memcpy (xmalloc (s), p, s);
- }
-
-+void *
-+ximemdup (void const *p, idx_t s)
-+{
-+ return memcpy (ximalloc (s), p, s);
-+}
-+
-+/* Clone an object P of size S, with error checking. Append
-+ a terminating NUL byte. */
-+
-+char *
-+ximemdup0 (void const *p, idx_t s)
-+{
-+ char *result = ximalloc (s + 1);
-+ result[s] = 0;
-+ return memcpy (result, p, s);
-+}
-+
- /* Clone STRING. */
-
- char *
---- a/gnulib/lib/xsize.c
-+++ b/gnulib/lib/xsize.c
-@@ -1,3 +1,21 @@
-+/* Checked size_t computations.
-+
-+ Copyright (C) 2012-2021 Free Software Foundation, Inc.
-+
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-+
-+ This file is distributed in the hope that it will be useful,
-+ but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-+ GNU Lesser General Public License for more details.
-+
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-+
- #include <config.h>
-+
- #define XSIZE_INLINE _GL_EXTERN_INLINE
- #include "xsize.h"
---- a/gnulib/lib/xsize.h
-+++ b/gnulib/lib/xsize.h
-@@ -2,18 +2,18 @@
-
- Copyright (C) 2003, 2008-2021 Free Software Foundation, Inc.
-
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 3, or (at your option)
-- any later version.
-+ This file is free software: you can redistribute it and/or modify
-+ it under the terms of the GNU Lesser General Public License as
-+ published by the Free Software Foundation; either version 2.1 of the
-+ License, or (at your option) any later version.
-
-- This program is distributed in the hope that it will be useful,
-+ This file is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-+ GNU Lesser General Public License for more details.
-
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, see <https://www.gnu.org/licenses/>. */
-+ You should have received a copy of the GNU Lesser General Public License
-+ along with this program. If not, see <https://www.gnu.org/licenses/>. */
-
- #ifndef _XSIZE_H
- #define _XSIZE_H
---- /dev/null
-+++ b/gnulib/m4/calloc.m4
-@@ -0,0 +1,82 @@
-+# calloc.m4 serial 27
-+
-+# Copyright (C) 2004-2021 Free Software Foundation, Inc.
-+# This file is free software; the Free Software Foundation
-+# gives unlimited permission to copy and/or distribute it,
-+# with or without modifications, as long as this notice is preserved.
-+
-+# Written by Jim Meyering.
-+
-+# Determine whether calloc (N, S) returns non-NULL when N*S is zero,
-+# and returns NULL when N*S overflows.
-+# If so, define HAVE_CALLOC. Otherwise, define calloc to rpl_calloc
-+# and arrange to use a calloc wrapper function that does work in that case.
-+
-+# _AC_FUNC_CALLOC_IF([IF-WORKS], [IF-NOT])
-+# -------------------------------------
-+# If calloc is compatible with GNU calloc, run IF-WORKS, otherwise, IF-NOT.
-+AC_DEFUN([_AC_FUNC_CALLOC_IF],
-+[
-+ AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles
-+ AC_CACHE_CHECK([whether calloc (0, n) and calloc (n, 0) return nonnull],
-+ [ac_cv_func_calloc_0_nonnull],
-+ [if test $cross_compiling != yes; then
-+ ac_cv_func_calloc_0_nonnull=yes
-+ AC_RUN_IFELSE(
-+ [AC_LANG_PROGRAM(
-+ [AC_INCLUDES_DEFAULT],
-+ [[int result = 0;
-+ char * volatile p = calloc (0, 0);
-+ if (!p)
-+ result |= 1;
-+ free (p);
-+ return result;
-+ ]])],
-+ [],
-+ [ac_cv_func_calloc_0_nonnull=no])
-+ else
-+ case "$host_os" in
-+ # Guess yes on glibc systems.
-+ *-gnu* | gnu*) ac_cv_func_calloc_0_nonnull="guessing yes" ;;
-+ # Guess yes on musl systems.
-+ *-musl*) ac_cv_func_calloc_0_nonnull="guessing yes" ;;
-+ # Guess yes on native Windows.
-+ mingw*) ac_cv_func_calloc_0_nonnull="guessing yes" ;;
-+ # If we don't know, obey --enable-cross-guesses.
-+ *) ac_cv_func_calloc_0_nonnull="$gl_cross_guess_normal" ;;
-+ esac
-+ fi
-+ ])
-+ AS_CASE([$ac_cv_func_calloc_0_nonnull], [*yes], [$1], [$2])
-+])
-+
-+
-+# gl_FUNC_CALLOC_GNU
-+# ------------------
-+# Replace calloc if it is not compatible with GNU libc.
-+AC_DEFUN([gl_FUNC_CALLOC_GNU],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_FUNC_CALLOC_POSIX])
-+ if test $REPLACE_CALLOC = 0; then
-+ _AC_FUNC_CALLOC_IF([], [REPLACE_CALLOC=1])
-+ fi
-+])# gl_FUNC_CALLOC_GNU
-+
-+# gl_FUNC_CALLOC_POSIX
-+# --------------------
-+# Test whether 'calloc' is POSIX compliant (sets errno to ENOMEM when it
-+# fails, and doesn't mess up with ptrdiff_t or size_t overflow),
-+# and replace calloc if it is not.
-+AC_DEFUN([gl_FUNC_CALLOC_POSIX],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_FUNC_MALLOC_POSIX])
-+ if test $REPLACE_MALLOC = 1; then
-+ REPLACE_CALLOC=1
-+ fi
-+ dnl Although in theory we should also test for size_t overflow,
-+ dnl in practice testing for ptrdiff_t overflow suffices
-+ dnl since PTRDIFF_MAX <= SIZE_MAX on all known Gnulib porting targets.
-+ dnl A separate size_t test would slow down 'configure'.
-+])
---- a/gnulib/m4/fcntl_h.m4
-+++ b/gnulib/m4/fcntl_h.m4
-@@ -1,4 +1,4 @@
--# serial 17
-+# serial 20
- # Configure fcntl.h.
- dnl Copyright (C) 2006-2007, 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
-
- dnl Written by Paul Eggert.
-
--AC_DEFUN([gl_FCNTL_H],
-+AC_DEFUN_ONCE([gl_FCNTL_H],
- [
- AC_REQUIRE([gl_FCNTL_H_DEFAULTS])
- AC_REQUIRE([gl_FCNTL_O_FLAGS])
-@@ -26,25 +26,40 @@ AC_DEFUN([gl_FCNTL_H],
- ]], [fcntl openat])
- ])
-
-+# gl_FCNTL_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_FCNTL_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_FCNTL_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_FCNTL_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_FCNTL_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_FCNTL_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CREAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FCNTL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_NONBLOCKING])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_OPEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_OPENAT])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_CREAT], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_OPEN], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_FCNTL_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_FCNTL_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_FCNTL_H_DEFAULTS],
- [
-- GNULIB_CREAT=0; AC_SUBST([GNULIB_CREAT])
-- GNULIB_FCNTL=0; AC_SUBST([GNULIB_FCNTL])
-- GNULIB_NONBLOCKING=0; AC_SUBST([GNULIB_NONBLOCKING])
-- GNULIB_OPEN=0; AC_SUBST([GNULIB_OPEN])
-- GNULIB_OPENAT=0; AC_SUBST([GNULIB_OPENAT])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_CREAT=1; AC_SUBST([GNULIB_MDA_CREAT])
-- GNULIB_MDA_OPEN=1; AC_SUBST([GNULIB_MDA_OPEN])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_FCNTL=1; AC_SUBST([HAVE_FCNTL])
- HAVE_OPENAT=1; AC_SUBST([HAVE_OPENAT])
---- /dev/null
-+++ b/gnulib/m4/free.m4
-@@ -0,0 +1,52 @@
-+# free.m4 serial 6
-+# Copyright (C) 2003-2005, 2009-2021 Free Software Foundation, Inc.
-+# This file is free software; the Free Software Foundation
-+# gives unlimited permission to copy and/or distribute it,
-+# with or without modifications, as long as this notice is preserved.
-+
-+# Written by Paul Eggert and Bruno Haible.
-+
-+AC_DEFUN([gl_FUNC_FREE],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+
-+ dnl In the next release of POSIX, free must preserve errno.
-+ dnl https://www.austingroupbugs.net/view.php?id=385
-+ dnl https://sourceware.org/bugzilla/show_bug.cgi?id=17924
-+ dnl So far, we know of three platforms that do this:
-+ dnl * glibc >= 2.33, thanks to the fix for this bug:
-+ dnl <https://sourceware.org/bugzilla/show_bug.cgi?id=17924>
-+ dnl * OpenBSD >= 4.5, thanks to this commit:
-+ dnl <https://cvsweb.openbsd.org/cgi-bin/cvsweb/src/lib/libc/stdlib/malloc.c.diff?r1=1.100&r2=1.101&f=h>
-+ dnl * Solaris, because its malloc() implementation is based on brk(),
-+ dnl not mmap(); hence its free() implementation makes no system calls.
-+ dnl For other platforms, you can only be sure if they state it in their
-+ dnl documentation, or by code inspection of the free() implementation in libc.
-+ AC_CACHE_CHECK([whether free is known to preserve errno],
-+ [gl_cv_func_free_preserves_errno],
-+ [AC_COMPILE_IFELSE(
-+ [AC_LANG_PROGRAM(
-+ [[#include <stdlib.h>
-+ ]],
-+ [[#if 2 < __GLIBC__ + (33 <= __GLIBC_MINOR__)
-+ #elif defined __OpenBSD__
-+ #elif defined __sun
-+ #else
-+ #error "'free' is not known to preserve errno"
-+ #endif
-+ ]])],
-+ [gl_cv_func_free_preserves_errno=yes],
-+ [gl_cv_func_free_preserves_errno=no])
-+ ])
-+
-+ case $gl_cv_func_free_preserves_errno in
-+ *yes)
-+ AC_DEFINE([HAVE_FREE_POSIX], [1],
-+ [Define if the 'free' function is guaranteed to preserve errno.])
-+ ;;
-+ *) REPLACE_FREE=1 ;;
-+ esac
-+])
-+
-+# Prerequisites of lib/free.c.
-+AC_DEFUN([gl_PREREQ_FREE], [:])
---- a/gnulib/m4/fstat.m4
-+++ b/gnulib/m4/fstat.m4
-@@ -1,4 +1,4 @@
--# fstat.m4 serial 7
-+# fstat.m4 serial 8
- dnl Copyright (C) 2011-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -34,7 +34,7 @@ AC_DEFUN([gl_FUNC_FSTAT],
-
- # Prerequisites of lib/fstat.c and lib/stat-w32.c.
- AC_DEFUN([gl_PREREQ_FSTAT], [
-- AC_REQUIRE([gl_HEADER_SYS_STAT_H])
-+ AC_REQUIRE([gl_SYS_STAT_H])
- AC_REQUIRE([gl_PREREQ_STAT_W32])
- :
- ])
---- a/gnulib/m4/gnulib-common.m4
-+++ b/gnulib/m4/gnulib-common.m4
-@@ -1,4 +1,4 @@
--# gnulib-common.m4 serial 63
-+# gnulib-common.m4 serial 66
- dnl Copyright (C) 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -357,6 +357,16 @@ AC_DEFUN([gl_COMMON_BODY], [
- export LIBC_FATAL_STDERR_
- ])
-
-+# gl_MODULE_INDICATOR_INIT_VARIABLE([variablename])
-+# gl_MODULE_INDICATOR_INIT_VARIABLE([variablename], [initialvalue])
-+# initializes the shell variable that indicates the presence of the given module
-+# as a C preprocessor expression.
-+AC_DEFUN([gl_MODULE_INDICATOR_INIT_VARIABLE],
-+[
-+ GL_MODULE_INDICATOR_PREFIX[]_[$1]=m4_if([$2], , [0], [$2])
-+ AC_SUBST(GL_MODULE_INDICATOR_PREFIX[]_[$1])
-+])
-+
- # gl_MODULE_INDICATOR_CONDITION
- # expands to a C preprocessor expression that evaluates to 1 or 0, depending
- # whether a gnulib module that has been requested shall be considered present
-@@ -369,9 +379,9 @@ m4_define([gl_MODULE_INDICATOR_CONDITION
- AC_DEFUN([gl_MODULE_INDICATOR_SET_VARIABLE],
- [
- gl_MODULE_INDICATOR_SET_VARIABLE_AUX(
-- [GNULIB_[]m4_translit([[$1]],
-- [abcdefghijklmnopqrstuvwxyz./-],
-- [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])],
-+ [GL_MODULE_INDICATOR_PREFIX[]_GNULIB_[]m4_translit([[$1]],
-+ [abcdefghijklmnopqrstuvwxyz./-],
-+ [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])],
- [gl_MODULE_INDICATOR_CONDITION])
- ])
-
-@@ -656,6 +666,72 @@ AC_DEFUN([gl_CACHE_VAL_SILENT],
- ])
- ])
-
-+# gl_CC_ALLOW_WARNINGS
-+# sets and substitutes a variable GL_CFLAG_ALLOW_WARNINGS, to a $(CC) option
-+# that reverts a preceding '-Werror' option, if available.
-+# This is expected to be '-Wno-error' on gcc, clang (except clang/MSVC), xlclang
-+# and empty otherwise.
-+AC_DEFUN([gl_CC_ALLOW_WARNINGS],
-+[
-+ AC_REQUIRE([AC_PROG_CC])
-+ AC_CACHE_CHECK([for C compiler option to allow warnings],
-+ [gl_cv_cc_wallow],
-+ [rm -f conftest*
-+ echo 'int dummy;' > conftest.c
-+ AC_TRY_COMMAND([${CC-cc} $CFLAGS $CPPFLAGS -c conftest.c 2>conftest1.err]) >/dev/null
-+ AC_TRY_COMMAND([${CC-cc} $CFLAGS $CPPFLAGS -Wno-error -c conftest.c 2>conftest2.err]) >/dev/null
-+ dnl Test the number of error output lines, because AIX xlc accepts the
-+ dnl option '-Wno-error', just to produce a warning
-+ dnl "Option -Wno-error was incorrectly specified. The option will be ignored."
-+ dnl afterwards.
-+ if test $? = 0 && test `wc -l < conftest1.err` = `wc -l < conftest2.err`; then
-+ gl_cv_cc_wallow='-Wno-error'
-+ else
-+ gl_cv_cc_wallow=none
-+ fi
-+ rm -f conftest*
-+ ])
-+ case "$gl_cv_cc_wallow" in
-+ none) GL_CFLAG_ALLOW_WARNINGS='' ;;
-+ *) GL_CFLAG_ALLOW_WARNINGS="$gl_cv_cc_wallow" ;;
-+ esac
-+ AC_SUBST([GL_CFLAG_ALLOW_WARNINGS])
-+])
-+
-+# gl_CXX_ALLOW_WARNINGS
-+# sets and substitutes a variable GL_CXXFLAG_ALLOW_WARNINGS, to a $(CC) option
-+# that reverts a preceding '-Werror' option, if available.
-+AC_DEFUN([gl_CXX_ALLOW_WARNINGS],
-+[
-+ dnl Requires AC_PROG_CXX or gl_PROG_ANSI_CXX.
-+ if test -n "$CXX" && test "$CXX" != no; then
-+ AC_CACHE_CHECK([for C++ compiler option to allow warnings],
-+ [gl_cv_cxx_wallow],
-+ [rm -f conftest*
-+ echo 'int dummy;' > conftest.cc
-+ AC_TRY_COMMAND([${CXX-c++} $CXXFLAGS $CPPFLAGS -c conftest.cc 2>conftest1.err]) >/dev/null
-+ AC_TRY_COMMAND([${CXX-c++} $CXXFLAGS $CPPFLAGS -Wno-error -c conftest.cc 2>conftest2.err]) >/dev/null
-+ dnl Test the number of error output lines, because AIX xlC accepts the
-+ dnl option '-Wno-error', just to produce a warning
-+ dnl "Option -Wno-error was incorrectly specified. The option will be ignored."
-+ dnl afterwards.
-+ if test $? = 0 && test `wc -l < conftest1.err` = `wc -l < conftest2.err`; then
-+ gl_cv_cxx_wallow='-Wno-error'
-+ else
-+ gl_cv_cxx_wallow=none
-+ fi
-+ rm -f conftest*
-+ ])
-+ case "$gl_cv_cxx_wallow" in
-+ none) GL_CXXFLAG_ALLOW_WARNINGS='' ;;
-+ *) GL_CXXFLAG_ALLOW_WARNINGS="$gl_cv_cxx_wallow" ;;
-+ esac
-+ else
-+ GL_CXXFLAG_ALLOW_WARNINGS=''
-+ fi
-+ AC_SUBST([GL_CXXFLAG_ALLOW_WARNINGS])
-+])
-+
- dnl Expands to some code for use in .c programs that, on native Windows, defines
- dnl the Microsoft deprecated alias function names to the underscore-prefixed
- dnl actual function names. With this macro, these function names are available
---- a/gnulib/m4/gnulib-comp.m4
-+++ b/gnulib/m4/gnulib-comp.m4
-@@ -51,6 +51,8 @@ AC_DEFUN([gl_EARLY],
- # Code from module btowc:
- # Code from module builtin-expect:
- # Code from module c99:
-+ # Code from module calloc-gnu:
-+ # Code from module calloc-posix:
- # Code from module cloexec:
- # Code from module close:
- # Code from module double-slash-root:
-@@ -66,6 +68,7 @@ AC_DEFUN([gl_EARLY],
- # Code from module fd-hook:
- # Code from module filename:
- # Code from module float:
-+ # Code from module free-posix:
- # Code from module fstat:
- # Code from module getdtablesize:
- # Code from module getopt-gnu:
-@@ -74,7 +77,9 @@ AC_DEFUN([gl_EARLY],
- # Code from module gettext-h:
- # Code from module hard-locale:
- # Code from module havelib:
-+ # Code from module ialloc:
- # Code from module iconv:
-+ # Code from module idx:
- # Code from module include_next:
- # Code from module intprops:
- # Code from module inttypes-incomplete:
-@@ -84,12 +89,14 @@ AC_DEFUN([gl_EARLY],
- # Code from module langinfo:
- # Code from module largefile:
- AC_REQUIRE([AC_SYS_LARGEFILE])
-+ AC_REQUIRE([gl_YEAR2038_EARLY])
- # Code from module libc-config:
- # Code from module limits-h:
- # Code from module localcharset:
- # Code from module locale:
- # Code from module localeconv:
- # Code from module lock:
-+ # Code from module malloc-gnu:
- # Code from module malloc-posix:
- # Code from module malloca:
- # Code from module mbchar:
-@@ -107,6 +114,7 @@ AC_DEFUN([gl_EARLY],
- # Code from module memchr:
- # Code from module mempcpy:
- # Code from module memrchr:
-+ # Code from module minmax:
- # Code from module msvc-inval:
- # Code from module msvc-nothrow:
- # Code from module multiarch:
-@@ -114,6 +122,9 @@ AC_DEFUN([gl_EARLY],
- # Code from module nocrash:
- # Code from module open:
- # Code from module pathmax:
-+ # Code from module realloc-gnu:
-+ # Code from module realloc-posix:
-+ # Code from module reallocarray:
- # Code from module regex:
- # Code from module setlocale-null:
- # Code from module size_max:
-@@ -189,6 +200,8 @@ AC_DEFUN([gl_INIT],
- m4_pushdef([AC_LIBSOURCES], m4_defn([gl_LIBSOURCES]))
- m4_pushdef([gl_LIBSOURCES_LIST], [])
- m4_pushdef([gl_LIBSOURCES_DIR], [])
-+ m4_pushdef([GL_MACRO_PREFIX], [gl])
-+ m4_pushdef([GL_MODULE_INDICATOR_PREFIX], [GL])
- gl_COMMON
- gl_source_base='gnulib/lib'
- gl_FUNC_ALLOCA
-@@ -196,6 +209,15 @@ AC_DEFUN([gl_INIT],
- if test -n "$ARGZ_H"; then
- AC_LIBOBJ([argz])
- fi
-+ gl_FUNC_CALLOC_GNU
-+ if test $REPLACE_CALLOC = 1; then
-+ AC_LIBOBJ([calloc])
-+ fi
-+ gl_FUNC_CALLOC_POSIX
-+ if test $REPLACE_CALLOC = 1; then
-+ AC_LIBOBJ([calloc])
-+ fi
-+ gl_STDLIB_MODULE_INDICATOR([calloc-posix])
- gl_DOUBLE_SLASH_ROOT
- gl_HEADER_ERRNO_H
- gl_ERROR
-@@ -208,6 +230,7 @@ AC_DEFUN([gl_INIT],
- AM_][XGETTEXT_OPTION([--flag=error_at_line:5:c-format])])
- AC_REQUIRE([gl_EXTERN_INLINE])
- gl_FCNTL_H
-+ gl_FCNTL_H_REQUIRE_DEFAULTS
- gl_FLOAT_H
- if test $REPLACE_FLOAT_LDBL = 1; then
- AC_LIBOBJ([float])
-@@ -223,10 +246,10 @@ AC_DEFUN([gl_INIT],
- if test $REPLACE_GETOPT = 1; then
- AC_LIBOBJ([getopt])
- AC_LIBOBJ([getopt1])
-- dnl Arrange for unistd.h to include getopt.h.
-- GNULIB_GL_UNISTD_H_GETOPT=1
-+ dnl Define the substituted variable GNULIB_UNISTD_H_GETOPT to 1.
-+ gl_UNISTD_H_REQUIRE_DEFAULTS
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNISTD_H_GETOPT], [1])
- fi
-- AC_SUBST([GNULIB_GL_UNISTD_H_GETOPT])
- gl_UNISTD_MODULE_INDICATOR([getopt-posix])
- gl_FUNC_GETPROGNAME
- AC_SUBST([LIBINTL])
-@@ -236,6 +259,7 @@ AC_DEFUN([gl_INIT],
- m4_ifdef([gl_ICONV_MODULE_INDICATOR],
- [gl_ICONV_MODULE_INDICATOR([iconv])])
- gl_INTTYPES_INCOMPLETE
-+ gl_INTTYPES_H_REQUIRE_DEFAULTS
- gl_FUNC_ISWBLANK
- if test $HAVE_ISWCNTRL = 0 || test $REPLACE_ISWCNTRL = 1; then
- :
-@@ -265,6 +289,15 @@ AC_DEFUN([gl_INIT],
- gl_WCTYPE_MODULE_INDICATOR([iswxdigit])
- AC_REQUIRE([gl_LARGEFILE])
- gl_LIMITS_H
-+ gl_FUNC_MALLOC_GNU
-+ if test $REPLACE_MALLOC = 1; then
-+ AC_LIBOBJ([malloc])
-+ fi
-+ AC_REQUIRE([gl_FUNC_MALLOC_POSIX])
-+ if test $REPLACE_MALLOC = 1; then
-+ AC_LIBOBJ([malloc])
-+ fi
-+ gl_STDLIB_MODULE_INDICATOR([malloc-posix])
- gl_MALLOCA
- gl_MBCHAR
- gl_MBITER
-@@ -305,6 +338,7 @@ AC_DEFUN([gl_INIT],
- gl_PREREQ_MEMRCHR
- fi
- gl_STRING_MODULE_INDICATOR([memrchr])
-+ gl_MINMAX
- gl_MULTIARCH
- gl_FUNC_OPEN
- if test $REPLACE_OPEN = 1; then
-@@ -312,6 +346,22 @@ AC_DEFUN([gl_INIT],
- gl_PREREQ_OPEN
- fi
- gl_FCNTL_MODULE_INDICATOR([open])
-+ gl_FUNC_REALLOC_GNU
-+ if test $REPLACE_REALLOC = 1; then
-+ AC_LIBOBJ([realloc])
-+ fi
-+ gl_FUNC_REALLOC_POSIX
-+ if test $REPLACE_REALLOC = 1; then
-+ AC_LIBOBJ([realloc])
-+ fi
-+ gl_STDLIB_MODULE_INDICATOR([realloc-posix])
-+ gl_FUNC_REALLOCARRAY
-+ if test $HAVE_REALLOCARRAY = 0 || test $REPLACE_REALLOCARRAY = 1; then
-+ AC_LIBOBJ([reallocarray])
-+ gl_PREREQ_REALLOCARRAY
-+ fi
-+ gl_MODULE_INDICATOR([reallocarray])
-+ gl_STDLIB_MODULE_INDICATOR([reallocarray])
- gl_REGEX
- if test $ac_use_included_regex = yes; then
- AC_LIBOBJ([regex])
-@@ -321,9 +371,35 @@ AC_DEFUN([gl_INIT],
- gl_STDARG_H
- AM_STDBOOL_H
- gl_STDDEF_H
-+ gl_STDDEF_H_REQUIRE_DEFAULTS
- gl_STDINT_H
- gl_STDIO_H
-+ gl_STDIO_H_REQUIRE_DEFAULTS
-+ dnl No need to create extra modules for these functions. Everyone who uses
-+ dnl <stdio.h> likely needs them.
-+ gl_STDIO_MODULE_INDICATOR([fscanf])
-+ gl_MODULE_INDICATOR([fscanf])
-+ gl_STDIO_MODULE_INDICATOR([scanf])
-+ gl_MODULE_INDICATOR([scanf])
-+ gl_STDIO_MODULE_INDICATOR([fgetc])
-+ gl_STDIO_MODULE_INDICATOR([getc])
-+ gl_STDIO_MODULE_INDICATOR([getchar])
-+ gl_STDIO_MODULE_INDICATOR([fgets])
-+ gl_STDIO_MODULE_INDICATOR([fread])
-+ dnl No need to create extra modules for these functions. Everyone who uses
-+ dnl <stdio.h> likely needs them.
-+ gl_STDIO_MODULE_INDICATOR([fprintf])
-+ gl_STDIO_MODULE_INDICATOR([printf])
-+ gl_STDIO_MODULE_INDICATOR([vfprintf])
-+ gl_STDIO_MODULE_INDICATOR([vprintf])
-+ gl_STDIO_MODULE_INDICATOR([fputc])
-+ gl_STDIO_MODULE_INDICATOR([putc])
-+ gl_STDIO_MODULE_INDICATOR([putchar])
-+ gl_STDIO_MODULE_INDICATOR([fputs])
-+ gl_STDIO_MODULE_INDICATOR([puts])
-+ gl_STDIO_MODULE_INDICATOR([fwrite])
- gl_STDLIB_H
-+ gl_STDLIB_H_REQUIRE_DEFAULTS
- gl_STRCASE
- if test $HAVE_STRCASECMP = 0; then
- AC_LIBOBJ([strcasecmp])
-@@ -356,8 +432,10 @@ AC_DEFUN([gl_INIT],
- fi
- gl_MODULE_INDICATOR([strerror])
- gl_STRING_MODULE_INDICATOR([strerror])
-- gl_HEADER_STRING_H
-- gl_HEADER_STRINGS_H
-+ gl_STRING_H
-+ gl_STRING_H_REQUIRE_DEFAULTS
-+ gl_STRINGS_H
-+ gl_STRINGS_H_REQUIRE_DEFAULTS
- gl_FUNC_STRNLEN
- if test $HAVE_DECL_STRNLEN = 0 || test $REPLACE_STRNLEN = 1; then
- AC_LIBOBJ([strnlen])
-@@ -365,10 +443,27 @@ AC_DEFUN([gl_INIT],
- fi
- gl_STRING_MODULE_INDICATOR([strnlen])
- gl_SYS_TYPES_H
-+ gl_SYS_TYPES_H_REQUIRE_DEFAULTS
- AC_PROG_MKDIR_P
- gl_UNISTD_H
-- gl_LIBUNISTRING_LIBHEADER([0.9.4], [unitypes.h])
-- gl_LIBUNISTRING_LIBHEADER([0.9.4], [uniwidth.h])
-+ gl_UNISTD_H_REQUIRE_DEFAULTS
-+ gl_LIBUNISTRING_LIBHEADER([0.9.11], [unitypes.h])
-+ AH_VERBATIM([unitypes_restrict], [
-+ /* This definition is a duplicate of the one in unitypes.h.
-+ It is here so that we can cope with an older version of unitypes.h
-+ that does not contain this definition and that is pre-installed among
-+ the public header files. */
-+ # if defined __restrict \
-+ || 2 < __GNUC__ + (95 <= __GNUC_MINOR__) \
-+ || __clang_major__ >= 3
-+ # define _UC_RESTRICT __restrict
-+ # elif 199901L <= __STDC_VERSION__ || defined restrict
-+ # define _UC_RESTRICT restrict
-+ # else
-+ # define _UC_RESTRICT
-+ # endif
-+ ])
-+ gl_LIBUNISTRING_LIBHEADER([0.9.11], [uniwidth.h])
- gl_LIBUNISTRING_MODULE([0.9.8], [uniwidth/width])
- gl_FUNC_VASPRINTF
- gl_STDIO_MODULE_INDICATOR([vasprintf])
-@@ -376,7 +471,9 @@ AC_DEFUN([gl_INIT],
- [AM_][XGETTEXT_OPTION([--flag=asprintf:2:c-format])
- AM_][XGETTEXT_OPTION([--flag=vasprintf:2:c-format])])
- gl_WCHAR_H
-+ gl_WCHAR_H_REQUIRE_DEFAULTS
- gl_WCTYPE_H
-+ gl_WCTYPE_H_REQUIRE_DEFAULTS
- gl_FUNC_WCWIDTH
- if test $HAVE_WCWIDTH = 0 || test $REPLACE_WCWIDTH = 1; then
- AC_LIBOBJ([wcwidth])
-@@ -395,17 +492,16 @@ AC_DEFUN([gl_INIT],
- gl_gnulib_enabled_dynarray=false
- gl_gnulib_enabled_fcntl=false
- gl_gnulib_enabled_43fe87a341d9b4b93c47c3ad819a5239=false
-+ gl_gnulib_enabled_ef07dc4b3077c11ea9cef586db4e5955=false
- gl_gnulib_enabled_fstat=false
- gl_gnulib_enabled_getdtablesize=false
- gl_gnulib_enabled_30838f5439487421042f2225bed3af76=false
-- gl_gnulib_enabled_intprops=false
- gl_gnulib_enabled_langinfo=false
- gl_gnulib_enabled_21ee726a3540c09237a8e70c0baf7467=false
- gl_gnulib_enabled_localcharset=false
- gl_gnulib_enabled_locale=false
- gl_gnulib_enabled_localeconv=false
- gl_gnulib_enabled_lock=false
-- gl_gnulib_enabled_ef455225c00f5049c808c2eda3e76866=false
- gl_gnulib_enabled_mbtowc=false
- gl_gnulib_enabled_mempcpy=false
- gl_gnulib_enabled_f691f076f650964c9f5598c3ee487616=false
-@@ -506,8 +602,9 @@ AC_DEFUN([gl_INIT],
- func_gl_gnulib_m4code_dynarray ()
- {
- if ! $gl_gnulib_enabled_dynarray; then
-+ AC_PROG_MKDIR_P
- gl_gnulib_enabled_dynarray=true
-- func_gl_gnulib_m4code_intprops
-+ func_gl_gnulib_m4code_37f71b604aa9c54446783d80f42fe547
- func_gl_gnulib_m4code_21ee726a3540c09237a8e70c0baf7467
- fi
- }
-@@ -540,6 +637,18 @@ AC_DEFUN([gl_INIT],
- gl_gnulib_enabled_43fe87a341d9b4b93c47c3ad819a5239=true
- fi
- }
-+ func_gl_gnulib_m4code_ef07dc4b3077c11ea9cef586db4e5955 ()
-+ {
-+ if ! $gl_gnulib_enabled_ef07dc4b3077c11ea9cef586db4e5955; then
-+ gl_FUNC_FREE
-+ if test $REPLACE_FREE = 1; then
-+ AC_LIBOBJ([free])
-+ gl_PREREQ_FREE
-+ fi
-+ gl_STDLIB_MODULE_INDICATOR([free-posix])
-+ gl_gnulib_enabled_ef07dc4b3077c11ea9cef586db4e5955=true
-+ fi
-+ }
- func_gl_gnulib_m4code_fstat ()
- {
- if ! $gl_gnulib_enabled_fstat; then
-@@ -595,16 +704,11 @@ AC_DEFUN([gl_INIT],
- func_gl_gnulib_m4code_e7e881d32ca02f1c997b13c737c64bbd
- fi
- }
-- func_gl_gnulib_m4code_intprops ()
-- {
-- if ! $gl_gnulib_enabled_intprops; then
-- gl_gnulib_enabled_intprops=true
-- fi
-- }
- func_gl_gnulib_m4code_langinfo ()
- {
- if ! $gl_gnulib_enabled_langinfo; then
- gl_LANGINFO_H
-+ gl_LANGINFO_H_REQUIRE_DEFAULTS
- gl_gnulib_enabled_langinfo=true
- fi
- }
-@@ -629,6 +733,7 @@ AC_DEFUN([gl_INIT],
- {
- if ! $gl_gnulib_enabled_locale; then
- gl_LOCALE_H
-+ gl_LOCALE_H_REQUIRE_DEFAULTS
- gl_gnulib_enabled_locale=true
- fi
- }
-@@ -666,18 +771,6 @@ AC_DEFUN([gl_INIT],
- fi
- fi
- }
-- func_gl_gnulib_m4code_ef455225c00f5049c808c2eda3e76866 ()
-- {
-- if ! $gl_gnulib_enabled_ef455225c00f5049c808c2eda3e76866; then
-- gl_FUNC_MALLOC_POSIX
-- if test $REPLACE_MALLOC = 1; then
-- AC_LIBOBJ([malloc])
-- fi
-- gl_STDLIB_MODULE_INDICATOR([malloc-posix])
-- gl_MODULE_INDICATOR([malloc-posix])
-- gl_gnulib_enabled_ef455225c00f5049c808c2eda3e76866=true
-- fi
-- }
- func_gl_gnulib_m4code_mbtowc ()
- {
- if ! $gl_gnulib_enabled_mbtowc; then
-@@ -803,7 +896,6 @@ AC_DEFUN([gl_INIT],
- gl_STAT_TIME
- gl_STAT_BIRTHTIME
- gl_gnulib_enabled_0137e3d3638b33e5819d132d0b23165c=true
-- func_gl_gnulib_m4code_intprops
- func_gl_gnulib_m4code_time
- fi
- }
-@@ -870,7 +962,8 @@ AC_DEFUN([gl_INIT],
- func_gl_gnulib_m4code_sys_stat ()
- {
- if ! $gl_gnulib_enabled_sys_stat; then
-- gl_HEADER_SYS_STAT_H
-+ gl_SYS_STAT_H
-+ gl_SYS_STAT_H_REQUIRE_DEFAULTS
- AC_PROG_MKDIR_P
- gl_gnulib_enabled_sys_stat=true
- func_gl_gnulib_m4code_time
-@@ -886,7 +979,8 @@ AC_DEFUN([gl_INIT],
- func_gl_gnulib_m4code_time ()
- {
- if ! $gl_gnulib_enabled_time; then
-- gl_HEADER_TIME_H
-+ gl_TIME_H
-+ gl_TIME_H_REQUIRE_DEFAULTS
- gl_gnulib_enabled_time=true
- fi
- }
-@@ -897,6 +991,7 @@ AC_DEFUN([gl_INIT],
- gl_FUNC_VASNPRINTF
- gl_gnulib_enabled_vasnprintf=true
- func_gl_gnulib_m4code_attribute
-+ func_gl_gnulib_m4code_ef07dc4b3077c11ea9cef586db4e5955
- func_gl_gnulib_m4code_xsize
- fi
- }
-@@ -1002,6 +1097,9 @@ AC_DEFUN([gl_INIT],
- if test $REPLACE_OPEN = 1; then
- func_gl_gnulib_m4code_stat
- fi
-+ if test $REPLACE_REALLOC = 1; then
-+ func_gl_gnulib_m4code_ef07dc4b3077c11ea9cef586db4e5955
-+ fi
- if test $ac_use_included_regex = yes; then
- func_gl_gnulib_m4code_attribute
- fi
-@@ -1015,9 +1113,6 @@ AC_DEFUN([gl_INIT],
- func_gl_gnulib_m4code_dynarray
- fi
- if test $ac_use_included_regex = yes; then
-- func_gl_gnulib_m4code_intprops
-- fi
-- if test $ac_use_included_regex = yes; then
- func_gl_gnulib_m4code_langinfo
- fi
- if test $ac_use_included_regex = yes; then
-@@ -1032,12 +1127,6 @@ AC_DEFUN([gl_INIT],
- if test $ac_use_included_regex = yes; then
- func_gl_gnulib_m4code_wcrtomb
- fi
-- if test $REPLACE_STRDUP = 1; then
-- func_gl_gnulib_m4code_ef455225c00f5049c808c2eda3e76866
-- fi
-- if test $REPLACE_STRERROR = 1; then
-- func_gl_gnulib_m4code_intprops
-- fi
- if test $REPLACE_STRERROR = 1; then
- func_gl_gnulib_m4code_dbb57f49352be8fb86869629a254fb72
- fi
-@@ -1057,17 +1146,16 @@ AC_DEFUN([gl_INIT],
- AM_CONDITIONAL([gl_GNULIB_ENABLED_dynarray], [$gl_gnulib_enabled_dynarray])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_fcntl], [$gl_gnulib_enabled_fcntl])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_43fe87a341d9b4b93c47c3ad819a5239], [$gl_gnulib_enabled_43fe87a341d9b4b93c47c3ad819a5239])
-+ AM_CONDITIONAL([gl_GNULIB_ENABLED_ef07dc4b3077c11ea9cef586db4e5955], [$gl_gnulib_enabled_ef07dc4b3077c11ea9cef586db4e5955])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_fstat], [$gl_gnulib_enabled_fstat])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_getdtablesize], [$gl_gnulib_enabled_getdtablesize])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_30838f5439487421042f2225bed3af76], [$gl_gnulib_enabled_30838f5439487421042f2225bed3af76])
-- AM_CONDITIONAL([gl_GNULIB_ENABLED_intprops], [$gl_gnulib_enabled_intprops])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_langinfo], [$gl_gnulib_enabled_langinfo])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_21ee726a3540c09237a8e70c0baf7467], [$gl_gnulib_enabled_21ee726a3540c09237a8e70c0baf7467])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_localcharset], [$gl_gnulib_enabled_localcharset])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_locale], [$gl_gnulib_enabled_locale])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_localeconv], [$gl_gnulib_enabled_localeconv])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_lock], [$gl_gnulib_enabled_lock])
-- AM_CONDITIONAL([gl_GNULIB_ENABLED_ef455225c00f5049c808c2eda3e76866], [$gl_gnulib_enabled_ef455225c00f5049c808c2eda3e76866])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_mbtowc], [$gl_gnulib_enabled_mbtowc])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_mempcpy], [$gl_gnulib_enabled_mempcpy])
- AM_CONDITIONAL([gl_GNULIB_ENABLED_f691f076f650964c9f5598c3ee487616], [$gl_gnulib_enabled_f691f076f650964c9f5598c3ee487616])
-@@ -1105,6 +1193,8 @@ AC_DEFUN([gl_INIT],
- m4_if(m4_sysval, [0], [],
- [AC_FATAL([expected source file, required through AC_LIBSOURCES, not found])])
- ])
-+ m4_popdef([GL_MODULE_INDICATOR_PREFIX])
-+ m4_popdef([GL_MACRO_PREFIX])
- m4_popdef([gl_LIBSOURCES_DIR])
- m4_popdef([gl_LIBSOURCES_LIST])
- m4_popdef([AC_LIBSOURCES])
-@@ -1131,6 +1221,8 @@ AC_DEFUN([gl_INIT],
- m4_pushdef([AC_LIBSOURCES], m4_defn([gltests_LIBSOURCES]))
- m4_pushdef([gltests_LIBSOURCES_LIST], [])
- m4_pushdef([gltests_LIBSOURCES_DIR], [])
-+ m4_pushdef([GL_MACRO_PREFIX], [gltests])
-+ m4_pushdef([GL_MODULE_INDICATOR_PREFIX], [GL])
- gl_COMMON
- gl_source_base='tests'
- changequote(,)dnl
-@@ -1152,6 +1244,8 @@ changequote([, ])dnl
- m4_if(m4_sysval, [0], [],
- [AC_FATAL([expected source file, required through AC_LIBSOURCES, not found])])
- ])
-+ m4_popdef([GL_MODULE_INDICATOR_PREFIX])
-+ m4_popdef([GL_MACRO_PREFIX])
- m4_popdef([gltests_LIBSOURCES_DIR])
- m4_popdef([gltests_LIBSOURCES_LIST])
- m4_popdef([AC_LIBSOURCES])
-@@ -1247,6 +1341,7 @@ AC_DEFUN([gl_FILE_LIST], [
- lib/basename-lgpl.h
- lib/btowc.c
- lib/c++defs.h
-+ lib/calloc.c
- lib/cdefs.h
- lib/cloexec.c
- lib/cloexec.h
-@@ -1266,6 +1361,7 @@ AC_DEFUN([gl_FILE_LIST], [
- lib/float+.h
- lib/float.c
- lib/float.in.h
-+ lib/free.c
- lib/fstat.c
- lib/getdtablesize.c
- lib/getopt-cdefs.in.h
-@@ -1285,6 +1381,9 @@ AC_DEFUN([gl_FILE_LIST], [
- lib/glthread/threadlib.c
- lib/hard-locale.c
- lib/hard-locale.h
-+ lib/ialloc.c
-+ lib/ialloc.h
-+ lib/idx.h
- lib/intprops.h
- lib/inttypes.in.h
- lib/iswblank.c
-@@ -1335,6 +1434,7 @@ AC_DEFUN([gl_FILE_LIST], [
- lib/memchr.valgrind
- lib/mempcpy.c
- lib/memrchr.c
-+ lib/minmax.h
- lib/msvc-inval.c
- lib/msvc-inval.h
- lib/msvc-nothrow.c
-@@ -1347,6 +1447,8 @@ AC_DEFUN([gl_FILE_LIST], [
- lib/printf-args.h
- lib/printf-parse.c
- lib/printf-parse.h
-+ lib/realloc.c
-+ lib/reallocarray.c
- lib/regcomp.c
- lib/regex.c
- lib/regex.h
-@@ -1427,6 +1529,7 @@ AC_DEFUN([gl_FILE_LIST], [
- m4/argz.m4
- m4/btowc.m4
- m4/builtin-expect.m4
-+ m4/calloc.m4
- m4/close.m4
- m4/codeset.m4
- m4/double-slash-root.m4
-@@ -1441,6 +1544,7 @@ AC_DEFUN([gl_FILE_LIST], [
- m4/fcntl.m4
- m4/fcntl_h.m4
- m4/float_h.m4
-+ m4/free.m4
- m4/fstat.m4
- m4/getdtablesize.m4
- m4/getopt.m4
-@@ -1483,6 +1587,7 @@ AC_DEFUN([gl_FILE_LIST], [
- m4/memchr.m4
- m4/mempcpy.m4
- m4/memrchr.m4
-+ m4/minmax.m4
- m4/mmap-anon.m4
- m4/mode_t.m4
- m4/msvc-inval.m4
-@@ -1498,6 +1603,8 @@ AC_DEFUN([gl_FILE_LIST], [
- m4/pid_t.m4
- m4/printf.m4
- m4/pthread_rwlock_rdlock.m4
-+ m4/realloc.m4
-+ m4/reallocarray.m4
- m4/regex.m4
- m4/setlocale_null.m4
- m4/size_max.m4
-@@ -1540,5 +1647,6 @@ AC_DEFUN([gl_FILE_LIST], [
- m4/wint_t.m4
- m4/xalloc.m4
- m4/xsize.m4
-+ m4/year2038.m4
- m4/zzgnulib.m4
- ])
---- a/gnulib/m4/inttypes.m4
-+++ b/gnulib/m4/inttypes.m4
-@@ -1,4 +1,4 @@
--# inttypes.m4 serial 32
-+# inttypes.m4 serial 35
- dnl Copyright (C) 2006-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
- dnl From Derek Price, Bruno Haible.
- dnl Test whether <inttypes.h> is supported or must be substituted.
-
--AC_DEFUN([gl_INTTYPES_H],
-+AC_DEFUN_ONCE([gl_INTTYPES_H],
- [
- AC_REQUIRE([gl_INTTYPES_INCOMPLETE])
- gl_INTTYPES_PRI_SCN
-@@ -136,19 +136,34 @@ AC_DEFUN([gl_INTTYPES_CHECK_LONG_LONG_IN
- AC_SUBST([$1])
- ])
-
-+# gl_INTTYPES_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_INTTYPES_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_INTTYPES_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_INTTYPES_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_INTTYPES_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_INTTYPES_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_IMAXABS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_IMAXDIV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOIMAX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOUMAX])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_INTTYPES_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_INTTYPES_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_INTTYPES_H_DEFAULTS],
- [
-- GNULIB_IMAXABS=0; AC_SUBST([GNULIB_IMAXABS])
-- GNULIB_IMAXDIV=0; AC_SUBST([GNULIB_IMAXDIV])
-- GNULIB_STRTOIMAX=0; AC_SUBST([GNULIB_STRTOIMAX])
-- GNULIB_STRTOUMAX=0; AC_SUBST([GNULIB_STRTOUMAX])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_DECL_IMAXABS=1; AC_SUBST([HAVE_DECL_IMAXABS])
- HAVE_DECL_IMAXDIV=1; AC_SUBST([HAVE_DECL_IMAXDIV])
---- a/gnulib/m4/iswdigit.m4
-+++ b/gnulib/m4/iswdigit.m4
-@@ -1,4 +1,4 @@
--# iswdigit.m4 serial 2
-+# iswdigit.m4 serial 3
- dnl Copyright (C) 2020-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -81,12 +81,12 @@ main (int argc, char *argv[])
- }
- if (setlocale (LC_ALL, "$LOCALE_FR_UTF8") != NULL)
- {
-- /* This fails on FreeBSD 12, NetBSD 8.0, MSVC 14. */
-+ /* This fails on FreeBSD 13.0, NetBSD 8.0, MSVC 14. */
- /* U+0663 ARABIC-INDIC DIGIT THREE */
- is = for_character ("\331\243", 2);
- if (!(is == 0))
- result |= 4;
-- /* This fails on FreeBSD 12, NetBSD 8.0, MSVC 14. */
-+ /* This fails on FreeBSD 13.0, NetBSD 8.0, MSVC 14. */
- /* U+FF11 FULLWIDTH DIGIT ONE */
- is = for_character ("\357\274\221", 3);
- if (!(is == 0))
---- a/gnulib/m4/iswxdigit.m4
-+++ b/gnulib/m4/iswxdigit.m4
-@@ -1,4 +1,4 @@
--# iswxdigit.m4 serial 2
-+# iswxdigit.m4 serial 3
- dnl Copyright (C) 2020-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -72,7 +72,7 @@ main (int argc, char *argv[])
- }
- if (setlocale (LC_ALL, "$LOCALE_FR_UTF8") != NULL)
- {
-- /* This fails on FreeBSD 12. */
-+ /* This fails on FreeBSD 13.0. */
- /* U+0663 ARABIC-INDIC DIGIT THREE */
- is = for_character ("\331\243", 2);
- if (!(is == 0))
---- a/gnulib/m4/langinfo_h.m4
-+++ b/gnulib/m4/langinfo_h.m4
-@@ -1,10 +1,10 @@
--# langinfo_h.m4 serial 9
-+# langinfo_h.m4 serial 12
- dnl Copyright (C) 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
--AC_DEFUN([gl_LANGINFO_H],
-+AC_DEFUN_ONCE([gl_LANGINFO_H],
- [
- AC_REQUIRE([gl_LANGINFO_H_DEFAULTS])
-
-@@ -104,18 +104,33 @@ int a = YESEXPR;
- ]], [nl_langinfo])
- ])
-
-+# gl_LANGINFO_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_LANGINFO_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_LANGINFO_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_LANGINFO_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_LANGINFO_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_LANGINFO_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_NL_LANGINFO])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_LANGINFO_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_LANGINFO_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_LANGINFO_H_DEFAULTS],
- [
-- GNULIB_NL_LANGINFO=0; AC_SUBST([GNULIB_NL_LANGINFO])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_NL_LANGINFO=1; AC_SUBST([HAVE_NL_LANGINFO])
- REPLACE_NL_LANGINFO=0; AC_SUBST([REPLACE_NL_LANGINFO])
---- a/gnulib/m4/largefile.m4
-+++ b/gnulib/m4/largefile.m4
-@@ -22,7 +22,8 @@ AC_DEFUN([gl_SET_LARGEFILE_SOURCE],
- esac
- ])
-
--# The following implementation works around a problem in autoconf <= 2.69;
-+# Work around a problem in Autoconf through at least 2.71 on glibc 2.34+
-+# with _TIME_BITS. Also, work around a problem in autoconf <= 2.69:
- # AC_SYS_LARGEFILE does not configure for large inodes on Mac OS X 10.5,
- # or configures them incorrectly in some cases.
- m4_version_prereq([2.70], [], [
-@@ -40,6 +41,7 @@ m4_define([_AC_SYS_LARGEFILE_TEST_INCLUD
- && LARGE_OFF_T % 2147483647 == 1)
- ? 1 : -1]];[]dnl
- ])
-+])# m4_version_prereq 2.70
-
-
- # _AC_SYS_LARGEFILE_MACRO_VALUE(C-MACRO, VALUE,
-@@ -54,7 +56,8 @@ m4_define([_AC_SYS_LARGEFILE_MACRO_VALUE
- [AC_LANG_PROGRAM([$5], [$6])],
- [$3=no; break])
- m4_ifval([$6], [AC_LINK_IFELSE], [AC_COMPILE_IFELSE])(
-- [AC_LANG_PROGRAM([#define $1 $2
-+ [AC_LANG_PROGRAM([#undef $1
-+#define $1 $2
- $5], [$6])],
- [$3=$2; break])
- $3=unknown
-@@ -80,9 +83,8 @@ rm -rf conftest*[]dnl
- AC_DEFUN([AC_SYS_LARGEFILE],
- [AC_ARG_ENABLE(largefile,
- [ --disable-largefile omit support for large files])
--if test "$enable_largefile" != no; then
--
-- AC_CACHE_CHECK([for special C compiler options needed for large files],
-+AS_IF([test "$enable_largefile" != no],
-+ [AC_CACHE_CHECK([for special C compiler options needed for large files],
- ac_cv_sys_largefile_CC,
- [ac_cv_sys_largefile_CC=no
- if test "$GCC" != yes; then
-@@ -107,15 +109,15 @@ if test "$enable_largefile" != no; then
- ac_cv_sys_file_offset_bits,
- [Number of bits in a file offset, on hosts where this is settable.],
- [_AC_SYS_LARGEFILE_TEST_INCLUDES])
-- if test $ac_cv_sys_file_offset_bits = unknown; then
-- _AC_SYS_LARGEFILE_MACRO_VALUE(_LARGE_FILES, 1,
-- ac_cv_sys_large_files,
-- [Define for large files, on AIX-style hosts.],
-- [_AC_SYS_LARGEFILE_TEST_INCLUDES])
-- fi
--fi
-+ AS_CASE([$ac_cv_sys_file_offset_bits],
-+ [unknown],
-+ [_AC_SYS_LARGEFILE_MACRO_VALUE([_LARGE_FILES], [1],
-+ [ac_cv_sys_large_files],
-+ [Define for large files, on AIX-style hosts.],
-+ [_AC_SYS_LARGEFILE_TEST_INCLUDES])],
-+ [64],
-+ [gl_YEAR2038_BODY([])])])
- ])# AC_SYS_LARGEFILE
--])# m4_version_prereq 2.70
-
- # Enable large files on systems where this is implemented by Gnulib, not by the
- # system headers.
---- a/gnulib/m4/locale_h.m4
-+++ b/gnulib/m4/locale_h.m4
-@@ -1,13 +1,13 @@
--# locale_h.m4 serial 25
-+# locale_h.m4 serial 28
- dnl Copyright (C) 2007, 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
--AC_DEFUN([gl_LOCALE_H],
-+AC_DEFUN_ONCE([gl_LOCALE_H],
- [
-- dnl Use AC_REQUIRE here, so that the default behavior below is expanded
-- dnl once only, before all statements that occur in other macros.
-+ dnl Ensure to expand the default settings once only, before all statements
-+ dnl that occur in other macros.
- AC_REQUIRE([gl_LOCALE_H_DEFAULTS])
-
- dnl Persuade glibc <locale.h> to define locale_t and the int_p_*, int_n_*
-@@ -129,22 +129,37 @@ AC_DEFUN([gl_LOCALE_T],
- AC_SUBST([HAVE_XLOCALE_H])
- ])
-
-+# gl_LOCALE_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_LOCALE_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_LOCALE_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_LOCALE_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_LOCALE_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_LOCALE_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LOCALECONV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SETLOCALE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SETLOCALE_NULL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_DUPLOCALE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LOCALENAME])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_LOCALE_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_LOCALE_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_LOCALE_H_DEFAULTS],
- [
-- GNULIB_LOCALECONV=0; AC_SUBST([GNULIB_LOCALECONV])
-- GNULIB_SETLOCALE=0; AC_SUBST([GNULIB_SETLOCALE])
-- GNULIB_SETLOCALE_NULL=0; AC_SUBST([GNULIB_SETLOCALE_NULL])
-- GNULIB_DUPLOCALE=0; AC_SUBST([GNULIB_DUPLOCALE])
-- GNULIB_LOCALENAME=0; AC_SUBST([GNULIB_LOCALENAME])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_NEWLOCALE=1; AC_SUBST([HAVE_NEWLOCALE])
- HAVE_DUPLOCALE=1; AC_SUBST([HAVE_DUPLOCALE])
---- a/gnulib/m4/malloc.m4
-+++ b/gnulib/m4/malloc.m4
-@@ -1,21 +1,21 @@
--# malloc.m4 serial 22
-+# malloc.m4 serial 27
- dnl Copyright (C) 2007, 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
- # This is adapted with modifications from upstream Autoconf here:
--# https://git.savannah.gnu.org/cgit/autoconf.git/commit/?id=04be2b7a29d65d9a08e64e8e56e594c91749598c
-+# https://git.savannah.gnu.org/cgit/autoconf.git/tree/lib/autoconf/functions.m4?id=v2.70#n949
- AC_DEFUN([_AC_FUNC_MALLOC_IF],
- [
- AC_REQUIRE([AC_CANONICAL_HOST])dnl for cross-compiles
-- AC_CACHE_CHECK([for GNU libc compatible malloc],
-+ AC_CACHE_CHECK([whether malloc (0) returns nonnull],
- [ac_cv_func_malloc_0_nonnull],
- [AC_RUN_IFELSE(
- [AC_LANG_PROGRAM(
- [[#include <stdlib.h>
- ]],
-- [[char *p = malloc (0);
-+ [[void *p = malloc (0);
- int result = !p;
- free (p);
- return result;]])
-@@ -24,75 +24,151 @@ AC_DEFUN([_AC_FUNC_MALLOC_IF],
- [ac_cv_func_malloc_0_nonnull=no],
- [case "$host_os" in
- # Guess yes on platforms where we know the result.
-- *-gnu* | gnu* | *-musl* | freebsd* | midnightbsd* | netbsd* | openbsd* \
-- | hpux* | solaris* | cygwin* | mingw*)
-+ *-gnu* | freebsd* | netbsd* | openbsd* | bitrig* \
-+ | gnu* | *-musl* | midnightbsd* \
-+ | hpux* | solaris* | cygwin* | mingw* | msys* )
- ac_cv_func_malloc_0_nonnull="guessing yes" ;;
- # If we don't know, obey --enable-cross-guesses.
- *) ac_cv_func_malloc_0_nonnull="$gl_cross_guess_normal" ;;
- esac
- ])
- ])
-- case "$ac_cv_func_malloc_0_nonnull" in
-- *yes)
-- $1
-- ;;
-- *)
-- $2
-- ;;
-- esac
-+ AS_CASE([$ac_cv_func_malloc_0_nonnull], [*yes], [$1], [$2])
- ])# _AC_FUNC_MALLOC_IF
-
- # gl_FUNC_MALLOC_GNU
- # ------------------
--# Test whether 'malloc (0)' is handled like in GNU libc, and replace malloc if
--# it is not.
-+# Replace malloc if it is not compatible with GNU libc.
- AC_DEFUN([gl_FUNC_MALLOC_GNU],
- [
- AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-- dnl _AC_FUNC_MALLOC_IF is defined in Autoconf.
-- _AC_FUNC_MALLOC_IF(
-- [AC_DEFINE([HAVE_MALLOC_GNU], [1],
-- [Define to 1 if your system has a GNU libc compatible 'malloc'
-- function, and to 0 otherwise.])],
-- [AC_DEFINE([HAVE_MALLOC_GNU], [0])
-- REPLACE_MALLOC=1
-+ AC_REQUIRE([gl_FUNC_MALLOC_POSIX])
-+ if test $REPLACE_MALLOC = 0; then
-+ _AC_FUNC_MALLOC_IF([], [REPLACE_MALLOC=1])
-+ fi
-+])
-+
-+# gl_FUNC_MALLOC_PTRDIFF
-+# ----------------------
-+# Test whether malloc (N) reliably fails when N exceeds PTRDIFF_MAX,
-+# and replace malloc otherwise.
-+AC_DEFUN([gl_FUNC_MALLOC_PTRDIFF],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_CHECK_MALLOC_PTRDIFF])
-+ test "$gl_cv_malloc_ptrdiff" = yes || REPLACE_MALLOC=1
-+])
-+
-+# Test whether malloc, realloc, calloc refuse to create objects
-+# larger than what can be expressed in ptrdiff_t.
-+# Set gl_cv_func_malloc_gnu to yes or no accordingly.
-+AC_DEFUN([gl_CHECK_MALLOC_PTRDIFF],
-+[
-+ AC_CACHE_CHECK([whether malloc is ptrdiff_t safe],
-+ [gl_cv_malloc_ptrdiff],
-+ [AC_COMPILE_IFELSE(
-+ [AC_LANG_PROGRAM(
-+ [[#include <stdint.h>
-+ ]],
-+ [[/* 64-bit ptrdiff_t is so wide that no practical platform
-+ can exceed it. */
-+ #define WIDE_PTRDIFF (PTRDIFF_MAX >> 31 >> 31 != 0)
-+
-+ /* On rare machines where size_t fits in ptrdiff_t there
-+ is no problem. */
-+ #define NARROW_SIZE (SIZE_MAX <= PTRDIFF_MAX)
-+
-+ /* glibc 2.30 and later malloc refuses to exceed ptrdiff_t
-+ bounds even on 32-bit platforms. We don't know which
-+ non-glibc systems are safe. */
-+ #define KNOWN_SAFE (2 < __GLIBC__ + (30 <= __GLIBC_MINOR__))
-+
-+ #if WIDE_PTRDIFF || NARROW_SIZE || KNOWN_SAFE
-+ return 0;
-+ #else
-+ #error "malloc might not be ptrdiff_t safe"
-+ syntax error
-+ #endif
-+ ]])],
-+ [gl_cv_malloc_ptrdiff=yes],
-+ [gl_cv_malloc_ptrdiff=no])
- ])
- ])
-
- # gl_FUNC_MALLOC_POSIX
- # --------------------
- # Test whether 'malloc' is POSIX compliant (sets errno to ENOMEM when it
--# fails), and replace malloc if it is not.
-+# fails, and doesn't mess up with ptrdiff_t overflow), and replace
-+# malloc if it is not.
- AC_DEFUN([gl_FUNC_MALLOC_POSIX],
- [
- AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_FUNC_MALLOC_PTRDIFF])
- AC_REQUIRE([gl_CHECK_MALLOC_POSIX])
-- if test $gl_cv_func_malloc_posix = yes; then
-+ if test "$gl_cv_func_malloc_posix" = yes; then
- AC_DEFINE([HAVE_MALLOC_POSIX], [1],
-- [Define if the 'malloc' function is POSIX compliant.])
-+ [Define if malloc, realloc, and calloc set errno on allocation failure.])
- else
- REPLACE_MALLOC=1
- fi
- ])
-
--# Test whether malloc, realloc, calloc are POSIX compliant,
-+# Test whether malloc, realloc, calloc set errno to ENOMEM on failure.
- # Set gl_cv_func_malloc_posix to yes or no accordingly.
- AC_DEFUN([gl_CHECK_MALLOC_POSIX],
- [
-- AC_CACHE_CHECK([whether malloc, realloc, calloc are POSIX compliant],
-+ AC_REQUIRE([AC_CANONICAL_HOST])
-+ AC_CACHE_CHECK([whether malloc, realloc, calloc set errno on failure],
- [gl_cv_func_malloc_posix],
- [
- dnl It is too dangerous to try to allocate a large amount of memory:
- dnl some systems go to their knees when you do that. So assume that
-- dnl all Unix implementations of the function are POSIX compliant.
-- AC_COMPILE_IFELSE(
-- [AC_LANG_PROGRAM(
-- [[]],
-- [[#if defined _WIN32 && ! defined __CYGWIN__
-- choke me
-- #endif
-- ]])],
-- [gl_cv_func_malloc_posix=yes],
-- [gl_cv_func_malloc_posix=no])
-+ dnl all Unix implementations of the function set errno on failure,
-+ dnl except on those platforms where we have seen 'test-malloc-gnu',
-+ dnl 'test-realloc-gnu', 'test-calloc-gnu' fail.
-+ case "$host_os" in
-+ mingw*)
-+ gl_cv_func_malloc_posix=no ;;
-+ irix* | solaris*)
-+ dnl On IRIX 6.5, the three functions return NULL with errno unset
-+ dnl when the argument is larger than PTRDIFF_MAX.
-+ dnl On Solaris 11.3, the three functions return NULL with errno set
-+ dnl to EAGAIN, not ENOMEM, when the argument is larger than
-+ dnl PTRDIFF_MAX.
-+ dnl Here is a test program:
-+m4_divert_push([KILL])
-+#include <errno.h>
-+#include <stdio.h>
-+#include <stdlib.h>
-+#define ptrdiff_t long
-+#ifndef PTRDIFF_MAX
-+# define PTRDIFF_MAX ((ptrdiff_t) ((1UL << (8 * sizeof (ptrdiff_t) - 1)) - 1))
-+#endif
-+
-+int main ()
-+{
-+ void *p;
-+
-+ fprintf (stderr, "PTRDIFF_MAX = %lu\n", (unsigned long) PTRDIFF_MAX);
-+
-+ errno = 0;
-+ p = malloc ((unsigned long) PTRDIFF_MAX + 1);
-+ fprintf (stderr, "p=%p errno=%d\n", p, errno);
-+
-+ errno = 0;
-+ p = calloc (PTRDIFF_MAX / 2 + 1, 2);
-+ fprintf (stderr, "p=%p errno=%d\n", p, errno);
-+
-+ errno = 0;
-+ p = realloc (NULL, (unsigned long) PTRDIFF_MAX + 1);
-+ fprintf (stderr, "p=%p errno=%d\n", p, errno);
-+
-+ return 0;
-+}
-+m4_divert_pop([KILL])
-+ gl_cv_func_malloc_posix=no ;;
-+ *)
-+ gl_cv_func_malloc_posix=yes ;;
-+ esac
- ])
- ])
---- a/gnulib/m4/mbslen.m4
-+++ b/gnulib/m4/mbslen.m4
-@@ -1,4 +1,4 @@
--# mbslen.m4 serial 2
-+# mbslen.m4 serial 3
- dnl Copyright (C) 2010-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,7 +6,7 @@ dnl with or without modifications, as lo
-
- AC_DEFUN([gl_FUNC_MBSLEN],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_FUNCS_ONCE([mbslen])
- if test $ac_cv_func_mbslen = yes; then
- HAVE_MBSLEN=1
---- a/gnulib/m4/memchr.m4
-+++ b/gnulib/m4/memchr.m4
-@@ -1,4 +1,4 @@
--# memchr.m4 serial 17
-+# memchr.m4 serial 18
- dnl Copyright (C) 2002-2004, 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -13,7 +13,7 @@ AC_DEFUN_ONCE([gl_FUNC_MEMCHR],
- AC_CHECK_HEADERS_ONCE([sys/mman.h])
- AC_CHECK_FUNCS_ONCE([mprotect])
-
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- # Detect platform-specific bugs in some versions of glibc:
- # memchr should not dereference anything with length 0
- # https://bugzilla.redhat.com/show_bug.cgi?id=499689
---- a/gnulib/m4/mempcpy.m4
-+++ b/gnulib/m4/mempcpy.m4
-@@ -1,4 +1,4 @@
--# mempcpy.m4 serial 11
-+# mempcpy.m4 serial 12
- dnl Copyright (C) 2003-2004, 2006-2007, 2009-2021 Free Software Foundation,
- dnl Inc.
- dnl This file is free software; the Free Software Foundation
-@@ -13,7 +13,7 @@ AC_DEFUN([gl_FUNC_MEMPCPY],
- dnl The mempcpy() declaration in lib/string.in.h uses 'restrict'.
- AC_REQUIRE([AC_C_RESTRICT])
-
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_FUNCS([mempcpy])
- if test $ac_cv_func_mempcpy = no; then
- HAVE_MEMPCPY=0
---- a/gnulib/m4/memrchr.m4
-+++ b/gnulib/m4/memrchr.m4
-@@ -1,4 +1,4 @@
--# memrchr.m4 serial 10
-+# memrchr.m4 serial 11
- dnl Copyright (C) 2002-2003, 2005-2007, 2009-2021 Free Software Foundation,
- dnl Inc.
- dnl This file is free software; the Free Software Foundation
-@@ -10,7 +10,7 @@ AC_DEFUN([gl_FUNC_MEMRCHR],
- dnl Persuade glibc <string.h> to declare memrchr().
- AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])
-
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_DECLS_ONCE([memrchr])
- if test $ac_cv_have_decl_memrchr = no; then
- HAVE_DECL_MEMRCHR=0
---- /dev/null
-+++ b/gnulib/m4/minmax.m4
-@@ -0,0 +1,44 @@
-+# minmax.m4 serial 4
-+dnl Copyright (C) 2005, 2009-2021 Free Software Foundation, Inc.
-+dnl This file is free software; the Free Software Foundation
-+dnl gives unlimited permission to copy and/or distribute it,
-+dnl with or without modifications, as long as this notice is preserved.
-+
-+AC_PREREQ([2.53])
-+
-+AC_DEFUN([gl_MINMAX],
-+[
-+ AC_REQUIRE([gl_PREREQ_MINMAX])
-+])
-+
-+# Prerequisites of lib/minmax.h.
-+AC_DEFUN([gl_PREREQ_MINMAX],
-+[
-+ gl_MINMAX_IN_HEADER([limits.h])
-+ gl_MINMAX_IN_HEADER([sys/param.h])
-+])
-+
-+dnl gl_MINMAX_IN_HEADER(HEADER)
-+dnl The parameter has to be a literal header name; it cannot be macro,
-+dnl nor a shell variable. (Because autoheader collects only AC_DEFINE
-+dnl invocations with a literal macro name.)
-+AC_DEFUN([gl_MINMAX_IN_HEADER],
-+[
-+ m4_pushdef([header], AS_TR_SH([$1]))
-+ m4_pushdef([HEADER], AS_TR_CPP([$1]))
-+ AC_CACHE_CHECK([whether <$1> defines MIN and MAX],
-+ [gl_cv_minmax_in_]header,
-+ [AC_COMPILE_IFELSE(
-+ [AC_LANG_PROGRAM(
-+ [[#include <$1>
-+ int x = MIN (42, 17);]],
-+ [[]])],
-+ [gl_cv_minmax_in_]header[=yes],
-+ [gl_cv_minmax_in_]header[=no])])
-+ if test $gl_cv_minmax_in_[]header = yes; then
-+ AC_DEFINE([HAVE_MINMAX_IN_]HEADER, 1,
-+ [Define to 1 if <$1> defines the MIN and MAX macros.])
-+ fi
-+ m4_popdef([HEADER])
-+ m4_popdef([header])
-+])
---- a/gnulib/m4/printf.m4
-+++ b/gnulib/m4/printf.m4
-@@ -1,4 +1,4 @@
--# printf.m4 serial 72
-+# printf.m4 serial 73
- dnl Copyright (C) 2003, 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -537,7 +537,7 @@ int main ()
- && strcmp (buf, "-0X6.488P-1 33") != 0
- && strcmp (buf, "-0XC.91P-2 33") != 0))
- result |= 2;
-- /* This catches a FreeBSD 6.1 bug: it doesn't round. */
-+ /* This catches a FreeBSD 13.0 bug: it doesn't round. */
- if (sprintf (buf, "%.2a %d", 1.51, 33, 44, 55) < 0
- || (strcmp (buf, "0x1.83p+0 33") != 0
- && strcmp (buf, "0x3.05p-1 33") != 0
-@@ -1690,6 +1690,7 @@ dnl
- dnl 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
- dnl glibc 2.5 . . . . . . . . . . . . . . . . . . . .
- dnl glibc 2.3.6 . . . . # . . . . . . . . . . . . . . .
-+dnl FreeBSD 13.0 . . . . # . . . . . . . . # . . . . . .
- dnl FreeBSD 5.4, 6.1 . . . . # . . . . . . # . # . . . . . .
- dnl Mac OS X 10.13.5 . . . # # . # . . . . . . . . . . # . .
- dnl Mac OS X 10.5.8 . . . # # . . . . . . # . . . . . . . .
---- /dev/null
-+++ b/gnulib/m4/realloc.m4
-@@ -0,0 +1,63 @@
-+# realloc.m4 serial 24
-+dnl Copyright (C) 2007, 2009-2021 Free Software Foundation, Inc.
-+dnl This file is free software; the Free Software Foundation
-+dnl gives unlimited permission to copy and/or distribute it,
-+dnl with or without modifications, as long as this notice is preserved.
-+
-+# This is adapted with modifications from upstream Autoconf here:
-+# https://git.savannah.gnu.org/cgit/autoconf.git/tree/lib/autoconf/functions.m4?id=v2.70#n1455
-+AC_DEFUN([_AC_FUNC_REALLOC_IF],
-+[
-+ AC_REQUIRE([AC_CANONICAL_HOST])dnl for cross-compiles
-+ AC_CACHE_CHECK([whether realloc (0, 0) returns nonnull],
-+ [ac_cv_func_realloc_0_nonnull],
-+ [AC_RUN_IFELSE(
-+ [AC_LANG_PROGRAM(
-+ [[#include <stdlib.h>
-+ ]],
-+ [[void *p = realloc (0, 0);
-+ int result = !p;
-+ free (p);
-+ return result;]])
-+ ],
-+ [ac_cv_func_realloc_0_nonnull=yes],
-+ [ac_cv_func_realloc_0_nonnull=no],
-+ [case "$host_os" in
-+ # Guess yes on platforms where we know the result.
-+ *-gnu* | freebsd* | netbsd* | openbsd* | bitrig* \
-+ | gnu* | *-musl* | midnightbsd* \
-+ | hpux* | solaris* | cygwin* | mingw* | msys* )
-+ ac_cv_func_realloc_0_nonnull="guessing yes" ;;
-+ # If we don't know, obey --enable-cross-guesses.
-+ *) ac_cv_func_realloc_0_nonnull="$gl_cross_guess_normal" ;;
-+ esac
-+ ])
-+ ])
-+ AS_CASE([$ac_cv_func_realloc_0_nonnull], [*yes], [$1], [$2])
-+])# AC_FUNC_REALLOC
-+
-+# gl_FUNC_REALLOC_GNU
-+# -------------------
-+# Replace realloc if it is not compatible with GNU libc.
-+AC_DEFUN([gl_FUNC_REALLOC_GNU],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_FUNC_REALLOC_POSIX])
-+ if test $REPLACE_REALLOC = 0; then
-+ _AC_FUNC_REALLOC_IF([], [REPLACE_REALLOC=1])
-+ fi
-+])# gl_FUNC_REALLOC_GNU
-+
-+# gl_FUNC_REALLOC_POSIX
-+# ---------------------
-+# Test whether 'realloc' is POSIX compliant (sets errno to ENOMEM when it
-+# fails, and doesn't mess up with ptrdiff_t overflow),
-+# and replace realloc if it is not.
-+AC_DEFUN([gl_FUNC_REALLOC_POSIX],
-+[
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_FUNC_MALLOC_POSIX])
-+ if test $REPLACE_MALLOC = 1; then
-+ REPLACE_REALLOC=1
-+ fi
-+])
---- /dev/null
-+++ b/gnulib/m4/reallocarray.m4
-@@ -0,0 +1,23 @@
-+# reallocarray.m4 serial 3
-+dnl Copyright (C) 2017-2021 Free Software Foundation, Inc.
-+dnl This file is free software; the Free Software Foundation
-+dnl gives unlimited permission to copy and/or distribute it,
-+dnl with or without modifications, as long as this notice is preserved.
-+
-+AC_DEFUN([gl_FUNC_REALLOCARRAY],
-+[
-+ dnl Persuade glibc <stdlib.h> to declare reallocarray.
-+ AC_REQUIRE([gl_USE_SYSTEM_EXTENSIONS])
-+
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ AC_REQUIRE([gl_CHECK_MALLOC_PTRDIFF])
-+ AC_CHECK_FUNCS([reallocarray])
-+ if test "$ac_cv_func_reallocarray" = no; then
-+ HAVE_REALLOCARRAY=0
-+ elif test "$gl_cv_malloc_ptrdiff" = no; then
-+ REPLACE_REALLOCARRAY=1
-+ fi
-+])
-+
-+# Prerequisites of lib/reallocarray.c.
-+AC_DEFUN([gl_PREREQ_REALLOCARRAY], [:])
---- a/gnulib/m4/stat.m4
-+++ b/gnulib/m4/stat.m4
-@@ -1,4 +1,4 @@
--# serial 17
-+# serial 18
-
- # Copyright (C) 2009-2021 Free Software Foundation, Inc.
- #
-@@ -69,7 +69,7 @@ AC_DEFUN([gl_FUNC_STAT],
-
- # Prerequisites of lib/stat.c and lib/stat-w32.c.
- AC_DEFUN([gl_PREREQ_STAT], [
-- AC_REQUIRE([gl_HEADER_SYS_STAT_H])
-+ AC_REQUIRE([gl_SYS_STAT_H])
- AC_REQUIRE([gl_PREREQ_STAT_W32])
- :
- ])
---- a/gnulib/m4/stddef_h.m4
-+++ b/gnulib/m4/stddef_h.m4
-@@ -1,4 +1,4 @@
--# stddef_h.m4 serial 9
-+# stddef_h.m4 serial 11
- dnl Copyright (C) 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,7 +6,7 @@ dnl with or without modifications, as lo
-
- dnl A placeholder for <stddef.h>, for platforms that have issues.
-
--AC_DEFUN([gl_STDDEF_H],
-+AC_DEFUN_ONCE([gl_STDDEF_H],
- [
- AC_REQUIRE([gl_STDDEF_H_DEFAULTS])
- AC_REQUIRE([gt_TYPE_WCHAR_T])
-@@ -68,13 +68,28 @@ AC_DEFUN([gl_STDDEF_H],
- fi
- ])
-
-+# gl_STDDEF_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_STDDEF_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_STDDEF_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_STDDEF_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_STDDEF_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_STDDEF_H_MODULE_INDICATOR_DEFAULTS], [
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_STDDEF_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_STDDEF_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_STDDEF_H_DEFAULTS],
- [
- dnl Assume proper GNU behavior unless another module says otherwise.
---- a/gnulib/m4/stdint.m4
-+++ b/gnulib/m4/stdint.m4
-@@ -1,4 +1,4 @@
--# stdint.m4 serial 58
-+# stdint.m4 serial 60
- dnl Copyright (C) 2001-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -170,7 +170,7 @@ struct s {
- PTRDIFF_MIN == TYPE_MINIMUM (ptrdiff_t)
- && PTRDIFF_MAX == TYPE_MAXIMUM (ptrdiff_t)
- ? 1 : -1;
-- /* Detect bug in FreeBSD 6.0 / ia64. */
-+ /* Detect bug in FreeBSD 6.0/ia64 and FreeBSD 13.0/arm64. */
- int check_SIG_ATOMIC:
- SIG_ATOMIC_MIN == TYPE_MINIMUM (sig_atomic_t)
- && SIG_ATOMIC_MAX == TYPE_MAXIMUM (sig_atomic_t)
-@@ -527,7 +527,7 @@ AC_DEFUN([gl_STDINT_TYPE_PROPERTIES],
- dnl requirement that wint_t is "unchanged by default argument promotions".
- dnl In this case gnulib's <wchar.h> and <wctype.h> override wint_t.
- dnl Set the variable BITSIZEOF_WINT_T accordingly.
-- if test $GNULIB_OVERRIDES_WINT_T = 1; then
-+ if test $GNULIBHEADERS_OVERRIDE_WINT_T = 1; then
- BITSIZEOF_WINT_T=32
- fi
- ])
---- a/gnulib/m4/stdio_h.m4
-+++ b/gnulib/m4/stdio_h.m4
-@@ -1,11 +1,12 @@
--# stdio_h.m4 serial 52
-+# stdio_h.m4 serial 56
- dnl Copyright (C) 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
--AC_DEFUN([gl_STDIO_H],
-+AC_DEFUN_ONCE([gl_STDIO_H],
- [
-+ AC_REQUIRE([gl_STDIO_H_DEFAULTS])
- AH_VERBATIM([MINGW_ANSI_STDIO],
- [/* Use GNU style printf and scanf. */
- #ifndef __USE_MINGW_ANSI_STDIO
-@@ -13,7 +14,6 @@ AC_DEFUN([gl_STDIO_H],
- #endif
- ])
- AC_DEFINE([__USE_MINGW_ANSI_STDIO])
-- AC_REQUIRE([gl_STDIO_H_DEFAULTS])
- gl_NEXT_HEADERS([stdio.h])
-
- dnl Determine whether __USE_MINGW_ANSI_STDIO makes printf and
-@@ -40,17 +40,6 @@ AC_DEFUN([gl_STDIO_H],
- attribute "__gnu_printf__" instead of "__printf__"])
- fi
-
-- dnl No need to create extra modules for these functions. Everyone who uses
-- dnl <stdio.h> likely needs them.
-- GNULIB_FSCANF=1
-- gl_MODULE_INDICATOR([fscanf])
-- GNULIB_SCANF=1
-- gl_MODULE_INDICATOR([scanf])
-- GNULIB_FGETC=1
-- GNULIB_GETC=1
-- GNULIB_GETCHAR=1
-- GNULIB_FGETS=1
-- GNULIB_FREAD=1
- dnl This ifdef is necessary to avoid an error "missing file lib/stdio-read.c"
- dnl "expected source file, required through AC_LIBSOURCES, not found". It is
- dnl also an optimization, to avoid performing a configure check whose result
-@@ -64,18 +53,6 @@ AC_DEFUN([gl_STDIO_H],
- fi
- ])
-
-- dnl No need to create extra modules for these functions. Everyone who uses
-- dnl <stdio.h> likely needs them.
-- GNULIB_FPRINTF=1
-- GNULIB_PRINTF=1
-- GNULIB_VFPRINTF=1
-- GNULIB_VPRINTF=1
-- GNULIB_FPUTC=1
-- GNULIB_PUTC=1
-- GNULIB_PUTCHAR=1
-- GNULIB_FPUTS=1
-- GNULIB_PUTS=1
-- GNULIB_FWRITE=1
- dnl This ifdef is necessary to avoid an error "missing file lib/stdio-write.c"
- dnl "expected source file, required through AC_LIBSOURCES, not found". It is
- dnl also an optimization, to avoid performing a configure check whose result
-@@ -116,77 +93,92 @@ AC_DEFUN([gl_STDIO_H],
- fi
- ])
-
-+# gl_STDIO_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_STDIO_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_STDIO_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_STDIO_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_STDIO_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_STDIO_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_DPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FCLOSE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FDOPEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FFLUSH])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FGETC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FGETS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FOPEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FPRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FPURGE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FPUTC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FPUTS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FREAD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FREOPEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSCANF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSEEK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSEEKO])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FTELL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FTELLO])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FWRITE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETCHAR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETDELIM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETLINE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_OBSTACK_PRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_OBSTACK_PRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PCLOSE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PERROR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_POPEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PUTC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PUTCHAR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PUTS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_REMOVE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RENAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RENAMEAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SCANF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SNPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SPRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STDIO_H_NONBLOCKING])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STDIO_H_SIGPIPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TMPFILE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VASPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VFSCANF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VSCANF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VDPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VFPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VFPRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VPRINTF_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VSNPRINTF])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_VSPRINTF_POSIX])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_FCLOSEALL], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_FDOPEN], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_FILENO], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_GETW], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_PUTW], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_TEMPNAM], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_STDIO_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_STDIO_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_STDIO_H_DEFAULTS],
- [
-- GNULIB_DPRINTF=0; AC_SUBST([GNULIB_DPRINTF])
-- GNULIB_FCLOSE=0; AC_SUBST([GNULIB_FCLOSE])
-- GNULIB_FDOPEN=0; AC_SUBST([GNULIB_FDOPEN])
-- GNULIB_FFLUSH=0; AC_SUBST([GNULIB_FFLUSH])
-- GNULIB_FGETC=0; AC_SUBST([GNULIB_FGETC])
-- GNULIB_FGETS=0; AC_SUBST([GNULIB_FGETS])
-- GNULIB_FOPEN=0; AC_SUBST([GNULIB_FOPEN])
-- GNULIB_FPRINTF=0; AC_SUBST([GNULIB_FPRINTF])
-- GNULIB_FPRINTF_POSIX=0; AC_SUBST([GNULIB_FPRINTF_POSIX])
-- GNULIB_FPURGE=0; AC_SUBST([GNULIB_FPURGE])
-- GNULIB_FPUTC=0; AC_SUBST([GNULIB_FPUTC])
-- GNULIB_FPUTS=0; AC_SUBST([GNULIB_FPUTS])
-- GNULIB_FREAD=0; AC_SUBST([GNULIB_FREAD])
-- GNULIB_FREOPEN=0; AC_SUBST([GNULIB_FREOPEN])
-- GNULIB_FSCANF=0; AC_SUBST([GNULIB_FSCANF])
-- GNULIB_FSEEK=0; AC_SUBST([GNULIB_FSEEK])
-- GNULIB_FSEEKO=0; AC_SUBST([GNULIB_FSEEKO])
-- GNULIB_FTELL=0; AC_SUBST([GNULIB_FTELL])
-- GNULIB_FTELLO=0; AC_SUBST([GNULIB_FTELLO])
-- GNULIB_FWRITE=0; AC_SUBST([GNULIB_FWRITE])
-- GNULIB_GETC=0; AC_SUBST([GNULIB_GETC])
-- GNULIB_GETCHAR=0; AC_SUBST([GNULIB_GETCHAR])
-- GNULIB_GETDELIM=0; AC_SUBST([GNULIB_GETDELIM])
-- GNULIB_GETLINE=0; AC_SUBST([GNULIB_GETLINE])
-- GNULIB_OBSTACK_PRINTF=0; AC_SUBST([GNULIB_OBSTACK_PRINTF])
-- GNULIB_OBSTACK_PRINTF_POSIX=0; AC_SUBST([GNULIB_OBSTACK_PRINTF_POSIX])
-- GNULIB_PCLOSE=0; AC_SUBST([GNULIB_PCLOSE])
-- GNULIB_PERROR=0; AC_SUBST([GNULIB_PERROR])
-- GNULIB_POPEN=0; AC_SUBST([GNULIB_POPEN])
-- GNULIB_PRINTF=0; AC_SUBST([GNULIB_PRINTF])
-- GNULIB_PRINTF_POSIX=0; AC_SUBST([GNULIB_PRINTF_POSIX])
-- GNULIB_PUTC=0; AC_SUBST([GNULIB_PUTC])
-- GNULIB_PUTCHAR=0; AC_SUBST([GNULIB_PUTCHAR])
-- GNULIB_PUTS=0; AC_SUBST([GNULIB_PUTS])
-- GNULIB_REMOVE=0; AC_SUBST([GNULIB_REMOVE])
-- GNULIB_RENAME=0; AC_SUBST([GNULIB_RENAME])
-- GNULIB_RENAMEAT=0; AC_SUBST([GNULIB_RENAMEAT])
-- GNULIB_SCANF=0; AC_SUBST([GNULIB_SCANF])
-- GNULIB_SNPRINTF=0; AC_SUBST([GNULIB_SNPRINTF])
-- GNULIB_SPRINTF_POSIX=0; AC_SUBST([GNULIB_SPRINTF_POSIX])
-- GNULIB_STDIO_H_NONBLOCKING=0; AC_SUBST([GNULIB_STDIO_H_NONBLOCKING])
-- GNULIB_STDIO_H_SIGPIPE=0; AC_SUBST([GNULIB_STDIO_H_SIGPIPE])
-- GNULIB_TMPFILE=0; AC_SUBST([GNULIB_TMPFILE])
-- GNULIB_VASPRINTF=0; AC_SUBST([GNULIB_VASPRINTF])
-- GNULIB_VFSCANF=0; AC_SUBST([GNULIB_VFSCANF])
-- GNULIB_VSCANF=0; AC_SUBST([GNULIB_VSCANF])
-- GNULIB_VDPRINTF=0; AC_SUBST([GNULIB_VDPRINTF])
-- GNULIB_VFPRINTF=0; AC_SUBST([GNULIB_VFPRINTF])
-- GNULIB_VFPRINTF_POSIX=0; AC_SUBST([GNULIB_VFPRINTF_POSIX])
-- GNULIB_VPRINTF=0; AC_SUBST([GNULIB_VPRINTF])
-- GNULIB_VPRINTF_POSIX=0; AC_SUBST([GNULIB_VPRINTF_POSIX])
-- GNULIB_VSNPRINTF=0; AC_SUBST([GNULIB_VSNPRINTF])
-- GNULIB_VSPRINTF_POSIX=0; AC_SUBST([GNULIB_VSPRINTF_POSIX])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_FCLOSEALL=1; AC_SUBST([GNULIB_MDA_FCLOSEALL])
-- GNULIB_MDA_FDOPEN=1; AC_SUBST([GNULIB_MDA_FDOPEN])
-- GNULIB_MDA_FILENO=1; AC_SUBST([GNULIB_MDA_FILENO])
-- GNULIB_MDA_GETW=1; AC_SUBST([GNULIB_MDA_GETW])
-- GNULIB_MDA_PUTW=1; AC_SUBST([GNULIB_MDA_PUTW])
-- GNULIB_MDA_TEMPNAM=1; AC_SUBST([GNULIB_MDA_TEMPNAM])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_DECL_FCLOSEALL=1; AC_SUBST([HAVE_DECL_FCLOSEALL])
- HAVE_DECL_FPURGE=1; AC_SUBST([HAVE_DECL_FPURGE])
---- a/gnulib/m4/stdlib_h.m4
-+++ b/gnulib/m4/stdlib_h.m4
-@@ -1,10 +1,10 @@
--# stdlib_h.m4 serial 55
-+# stdlib_h.m4 serial 63
- dnl Copyright (C) 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
- dnl with or without modifications, as long as this notice is preserved.
-
--AC_DEFUN([gl_STDLIB_H],
-+AC_DEFUN_ONCE([gl_STDLIB_H],
- [
- AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
- gl_NEXT_HEADERS([stdlib.h])
-@@ -28,7 +28,7 @@ AC_DEFUN([gl_STDLIB_H],
- posix_memalign posix_openpt ptsname ptsname_r qsort_r
- random random_r reallocarray realpath rpmatch secure_getenv setenv
- setstate setstate_r srandom srandom_r
-- strtod strtold strtoll strtoull unlockpt unsetenv])
-+ strtod strtol strtold strtoll strtoul strtoull unlockpt unsetenv])
-
- AC_REQUIRE([AC_C_RESTRICT])
-
-@@ -46,61 +46,78 @@ AC_DEFUN([gl_STDLIB_H],
- fi
- ])
-
-+# gl_STDLIB_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_STDLIB_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_STDLIB_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_STDLIB_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_STDLIB_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB__EXIT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ALIGNED_ALLOC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ATOLL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CALLOC_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CANONICALIZE_FILE_NAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FREE_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETLOADAVG])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETSUBOPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GRANTPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MALLOC_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBTOWC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKDTEMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKOSTEMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKOSTEMPS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKSTEMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKSTEMPS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_POSIX_MEMALIGN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_POSIX_OPENPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PTSNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PTSNAME_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PUTENV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_QSORT_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RANDOM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RANDOM_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_REALLOCARRAY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_REALLOC_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_REALPATH])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RPMATCH])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SECURE_GETENV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SETENV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOLD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOLL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOUL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOULL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SYSTEM_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNLOCKPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNSETENV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCTOMB])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_ECVT], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_FCVT], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_GCVT], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_MKTEMP], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_PUTENV], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_STDLIB_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_STDLIB_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_STDLIB_H_DEFAULTS],
- [
-- GNULIB__EXIT=0; AC_SUBST([GNULIB__EXIT])
-- GNULIB_ALIGNED_ALLOC=0; AC_SUBST([GNULIB_ALIGNED_ALLOC])
-- GNULIB_ATOLL=0; AC_SUBST([GNULIB_ATOLL])
-- GNULIB_CALLOC_POSIX=0; AC_SUBST([GNULIB_CALLOC_POSIX])
-- GNULIB_CANONICALIZE_FILE_NAME=0; AC_SUBST([GNULIB_CANONICALIZE_FILE_NAME])
-- GNULIB_FREE_POSIX=0; AC_SUBST([GNULIB_FREE_POSIX])
-- GNULIB_GETLOADAVG=0; AC_SUBST([GNULIB_GETLOADAVG])
-- GNULIB_GETSUBOPT=0; AC_SUBST([GNULIB_GETSUBOPT])
-- GNULIB_GRANTPT=0; AC_SUBST([GNULIB_GRANTPT])
-- GNULIB_MALLOC_POSIX=0; AC_SUBST([GNULIB_MALLOC_POSIX])
-- GNULIB_MBTOWC=0; AC_SUBST([GNULIB_MBTOWC])
-- GNULIB_MKDTEMP=0; AC_SUBST([GNULIB_MKDTEMP])
-- GNULIB_MKOSTEMP=0; AC_SUBST([GNULIB_MKOSTEMP])
-- GNULIB_MKOSTEMPS=0; AC_SUBST([GNULIB_MKOSTEMPS])
-- GNULIB_MKSTEMP=0; AC_SUBST([GNULIB_MKSTEMP])
-- GNULIB_MKSTEMPS=0; AC_SUBST([GNULIB_MKSTEMPS])
-- GNULIB_POSIX_MEMALIGN=0;AC_SUBST([GNULIB_POSIX_MEMALIGN])
-- GNULIB_POSIX_OPENPT=0; AC_SUBST([GNULIB_POSIX_OPENPT])
-- GNULIB_PTSNAME=0; AC_SUBST([GNULIB_PTSNAME])
-- GNULIB_PTSNAME_R=0; AC_SUBST([GNULIB_PTSNAME_R])
-- GNULIB_PUTENV=0; AC_SUBST([GNULIB_PUTENV])
-- GNULIB_QSORT_R=0; AC_SUBST([GNULIB_QSORT_R])
-- GNULIB_RANDOM=0; AC_SUBST([GNULIB_RANDOM])
-- GNULIB_RANDOM_R=0; AC_SUBST([GNULIB_RANDOM_R])
-- GNULIB_REALLOCARRAY=0; AC_SUBST([GNULIB_REALLOCARRAY])
-- GNULIB_REALLOC_POSIX=0; AC_SUBST([GNULIB_REALLOC_POSIX])
-- GNULIB_REALPATH=0; AC_SUBST([GNULIB_REALPATH])
-- GNULIB_RPMATCH=0; AC_SUBST([GNULIB_RPMATCH])
-- GNULIB_SECURE_GETENV=0; AC_SUBST([GNULIB_SECURE_GETENV])
-- GNULIB_SETENV=0; AC_SUBST([GNULIB_SETENV])
-- GNULIB_STRTOD=0; AC_SUBST([GNULIB_STRTOD])
-- GNULIB_STRTOLD=0; AC_SUBST([GNULIB_STRTOLD])
-- GNULIB_STRTOLL=0; AC_SUBST([GNULIB_STRTOLL])
-- GNULIB_STRTOULL=0; AC_SUBST([GNULIB_STRTOULL])
-- GNULIB_SYSTEM_POSIX=0; AC_SUBST([GNULIB_SYSTEM_POSIX])
-- GNULIB_UNLOCKPT=0; AC_SUBST([GNULIB_UNLOCKPT])
-- GNULIB_UNSETENV=0; AC_SUBST([GNULIB_UNSETENV])
-- GNULIB_WCTOMB=0; AC_SUBST([GNULIB_WCTOMB])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_ECVT=1; AC_SUBST([GNULIB_MDA_ECVT])
-- GNULIB_MDA_FCVT=1; AC_SUBST([GNULIB_MDA_FCVT])
-- GNULIB_MDA_GCVT=1; AC_SUBST([GNULIB_MDA_GCVT])
-- GNULIB_MDA_MKTEMP=1; AC_SUBST([GNULIB_MDA_MKTEMP])
-- GNULIB_MDA_PUTENV=1; AC_SUBST([GNULIB_MDA_PUTENV])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE__EXIT=1; AC_SUBST([HAVE__EXIT])
- HAVE_ALIGNED_ALLOC=1; AC_SUBST([HAVE_ALIGNED_ALLOC])
-@@ -137,8 +154,10 @@ AC_DEFUN([gl_STDLIB_H_DEFAULTS],
- HAVE_SETSTATE=1; AC_SUBST([HAVE_SETSTATE])
- HAVE_DECL_SETSTATE=1; AC_SUBST([HAVE_DECL_SETSTATE])
- HAVE_STRTOD=1; AC_SUBST([HAVE_STRTOD])
-+ HAVE_STRTOL=1; AC_SUBST([HAVE_STRTOL])
- HAVE_STRTOLD=1; AC_SUBST([HAVE_STRTOLD])
- HAVE_STRTOLL=1; AC_SUBST([HAVE_STRTOLL])
-+ HAVE_STRTOUL=1; AC_SUBST([HAVE_STRTOUL])
- HAVE_STRTOULL=1; AC_SUBST([HAVE_STRTOULL])
- HAVE_STRUCT_RANDOM_DATA=1; AC_SUBST([HAVE_STRUCT_RANDOM_DATA])
- HAVE_SYS_LOADAVG_H=0; AC_SUBST([HAVE_SYS_LOADAVG_H])
-@@ -160,11 +179,16 @@ AC_DEFUN([gl_STDLIB_H_DEFAULTS],
- REPLACE_RANDOM=0; AC_SUBST([REPLACE_RANDOM])
- REPLACE_RANDOM_R=0; AC_SUBST([REPLACE_RANDOM_R])
- REPLACE_REALLOC=0; AC_SUBST([REPLACE_REALLOC])
-+ REPLACE_REALLOCARRAY=0; AC_SUBST([REPLACE_REALLOCARRAY])
- REPLACE_REALPATH=0; AC_SUBST([REPLACE_REALPATH])
- REPLACE_SETENV=0; AC_SUBST([REPLACE_SETENV])
- REPLACE_SETSTATE=0; AC_SUBST([REPLACE_SETSTATE])
- REPLACE_STRTOD=0; AC_SUBST([REPLACE_STRTOD])
-+ REPLACE_STRTOL=0; AC_SUBST([REPLACE_STRTOL])
- REPLACE_STRTOLD=0; AC_SUBST([REPLACE_STRTOLD])
-+ REPLACE_STRTOLL=0; AC_SUBST([REPLACE_STRTOLL])
-+ REPLACE_STRTOUL=0; AC_SUBST([REPLACE_STRTOUL])
-+ REPLACE_STRTOULL=0; AC_SUBST([REPLACE_STRTOULL])
- REPLACE_UNSETENV=0; AC_SUBST([REPLACE_UNSETENV])
- REPLACE_WCTOMB=0; AC_SUBST([REPLACE_WCTOMB])
- ])
---- a/gnulib/m4/stpcpy.m4
-+++ b/gnulib/m4/stpcpy.m4
-@@ -1,4 +1,4 @@
--# stpcpy.m4 serial 8
-+# stpcpy.m4 serial 9
- dnl Copyright (C) 2002, 2007, 2009-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -12,7 +12,7 @@ AC_DEFUN([gl_FUNC_STPCPY],
- dnl The stpcpy() declaration in lib/string.in.h uses 'restrict'.
- AC_REQUIRE([AC_C_RESTRICT])
-
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_FUNCS([stpcpy])
- if test $ac_cv_func_stpcpy = no; then
- HAVE_STPCPY=0
---- a/gnulib/m4/strcase.m4
-+++ b/gnulib/m4/strcase.m4
-@@ -1,4 +1,4 @@
--# strcase.m4 serial 11
-+# strcase.m4 serial 12
- dnl Copyright (C) 2002, 2005-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -12,7 +12,7 @@ AC_DEFUN([gl_STRCASE],
-
- AC_DEFUN([gl_FUNC_STRCASECMP],
- [
-- AC_REQUIRE([gl_HEADER_STRINGS_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRINGS_H_DEFAULTS])
- AC_CHECK_FUNCS([strcasecmp])
- if test $ac_cv_func_strcasecmp = no; then
- HAVE_STRCASECMP=0
-@@ -21,7 +21,7 @@ AC_DEFUN([gl_FUNC_STRCASECMP],
-
- AC_DEFUN([gl_FUNC_STRNCASECMP],
- [
-- AC_REQUIRE([gl_HEADER_STRINGS_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRINGS_H_DEFAULTS])
- AC_CHECK_FUNCS([strncasecmp])
- if test $ac_cv_func_strncasecmp = yes; then
- HAVE_STRNCASECMP=1
---- a/gnulib/m4/strcasestr.m4
-+++ b/gnulib/m4/strcasestr.m4
-@@ -1,4 +1,4 @@
--# strcasestr.m4 serial 26
-+# strcasestr.m4 serial 27
- dnl Copyright (C) 2005, 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
- dnl Check that strcasestr is present and works.
- AC_DEFUN([gl_FUNC_STRCASESTR_SIMPLE],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
-
- dnl Persuade glibc <string.h> to declare strcasestr().
- AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])
---- a/gnulib/m4/strdup.m4
-+++ b/gnulib/m4/strdup.m4
-@@ -1,4 +1,4 @@
--# strdup.m4 serial 14
-+# strdup.m4 serial 15
-
- dnl Copyright (C) 2002-2021 Free Software Foundation, Inc.
-
-@@ -8,7 +8,7 @@ dnl with or without modifications, as lo
-
- AC_DEFUN([gl_FUNC_STRDUP],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_DECLS_ONCE([strdup])
- if test $ac_cv_have_decl_strdup = no; then
- HAVE_DECL_STRDUP=0
-@@ -17,7 +17,7 @@ AC_DEFUN([gl_FUNC_STRDUP],
-
- AC_DEFUN([gl_FUNC_STRDUP_POSIX],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_REQUIRE([gl_CHECK_MALLOC_POSIX])
- if test $gl_cv_func_malloc_posix != yes; then
- REPLACE_STRDUP=1
---- a/gnulib/m4/strerror.m4
-+++ b/gnulib/m4/strerror.m4
-@@ -1,4 +1,4 @@
--# strerror.m4 serial 21
-+# strerror.m4 serial 22
- dnl Copyright (C) 2002, 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,7 +6,7 @@ dnl with or without modifications, as lo
-
- AC_DEFUN([gl_FUNC_STRERROR],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_REQUIRE([gl_HEADER_ERRNO_H])
- AC_REQUIRE([gl_FUNC_STRERROR_0])
- AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles
---- a/gnulib/m4/string_h.m4
-+++ b/gnulib/m4/string_h.m4
-@@ -5,20 +5,15 @@
- # gives unlimited permission to copy and/or distribute it,
- # with or without modifications, as long as this notice is preserved.
-
--# serial 29
-+# serial 32
-
- # Written by Paul Eggert.
-
--AC_DEFUN([gl_HEADER_STRING_H],
-+AC_DEFUN_ONCE([gl_STRING_H],
- [
-- dnl Use AC_REQUIRE here, so that the default behavior below is expanded
-- dnl once only, before all statements that occur in other macros.
-- AC_REQUIRE([gl_HEADER_STRING_H_BODY])
--])
--
--AC_DEFUN([gl_HEADER_STRING_H_BODY],
--[
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only, before all statements
-+ dnl that occur in other macros.
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- gl_NEXT_HEADERS([string.h])
-
- dnl Check for declarations of anything we want to poison if the
-@@ -33,62 +28,77 @@ AC_DEFUN([gl_HEADER_STRING_H_BODY],
- AC_REQUIRE([AC_C_RESTRICT])
- ])
-
-+# gl_STRING_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_STRING_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_STRING_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
--AC_DEFUN([gl_HEADER_STRING_H_DEFAULTS],
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_STRING_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_STRING_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXPLICIT_BZERO])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FFSL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FFSLL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MEMCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MEMMEM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MEMPCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MEMRCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RAWMEMCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STPCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STPNCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRCHRNUL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRDUP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRNCAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRNDUP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRNLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRPBRK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRSEP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRSTR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRCASESTR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRTOK_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSNLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSRCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSSTR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSCASECMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSNCASECMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSPCASECMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSCASESTR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSCSPN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSPBRK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSSPN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSSEP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSTOK_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRERROR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRERROR_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRERRORNAME_NP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SIGABBREV_NP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SIGDESCR_NP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRSIGNAL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRVERSCMP])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_MEMCCPY], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_STRDUP], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_STRING_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
-+])
-+
-+AC_DEFUN([gl_STRING_H_DEFAULTS],
- [
-- GNULIB_EXPLICIT_BZERO=0; AC_SUBST([GNULIB_EXPLICIT_BZERO])
-- GNULIB_FFSL=0; AC_SUBST([GNULIB_FFSL])
-- GNULIB_FFSLL=0; AC_SUBST([GNULIB_FFSLL])
-- GNULIB_MEMCHR=0; AC_SUBST([GNULIB_MEMCHR])
-- GNULIB_MEMMEM=0; AC_SUBST([GNULIB_MEMMEM])
-- GNULIB_MEMPCPY=0; AC_SUBST([GNULIB_MEMPCPY])
-- GNULIB_MEMRCHR=0; AC_SUBST([GNULIB_MEMRCHR])
-- GNULIB_RAWMEMCHR=0; AC_SUBST([GNULIB_RAWMEMCHR])
-- GNULIB_STPCPY=0; AC_SUBST([GNULIB_STPCPY])
-- GNULIB_STPNCPY=0; AC_SUBST([GNULIB_STPNCPY])
-- GNULIB_STRCHRNUL=0; AC_SUBST([GNULIB_STRCHRNUL])
-- GNULIB_STRDUP=0; AC_SUBST([GNULIB_STRDUP])
-- GNULIB_STRNCAT=0; AC_SUBST([GNULIB_STRNCAT])
-- GNULIB_STRNDUP=0; AC_SUBST([GNULIB_STRNDUP])
-- GNULIB_STRNLEN=0; AC_SUBST([GNULIB_STRNLEN])
-- GNULIB_STRPBRK=0; AC_SUBST([GNULIB_STRPBRK])
-- GNULIB_STRSEP=0; AC_SUBST([GNULIB_STRSEP])
-- GNULIB_STRSTR=0; AC_SUBST([GNULIB_STRSTR])
-- GNULIB_STRCASESTR=0; AC_SUBST([GNULIB_STRCASESTR])
-- GNULIB_STRTOK_R=0; AC_SUBST([GNULIB_STRTOK_R])
-- GNULIB_MBSLEN=0; AC_SUBST([GNULIB_MBSLEN])
-- GNULIB_MBSNLEN=0; AC_SUBST([GNULIB_MBSNLEN])
-- GNULIB_MBSCHR=0; AC_SUBST([GNULIB_MBSCHR])
-- GNULIB_MBSRCHR=0; AC_SUBST([GNULIB_MBSRCHR])
-- GNULIB_MBSSTR=0; AC_SUBST([GNULIB_MBSSTR])
-- GNULIB_MBSCASECMP=0; AC_SUBST([GNULIB_MBSCASECMP])
-- GNULIB_MBSNCASECMP=0; AC_SUBST([GNULIB_MBSNCASECMP])
-- GNULIB_MBSPCASECMP=0; AC_SUBST([GNULIB_MBSPCASECMP])
-- GNULIB_MBSCASESTR=0; AC_SUBST([GNULIB_MBSCASESTR])
-- GNULIB_MBSCSPN=0; AC_SUBST([GNULIB_MBSCSPN])
-- GNULIB_MBSPBRK=0; AC_SUBST([GNULIB_MBSPBRK])
-- GNULIB_MBSSPN=0; AC_SUBST([GNULIB_MBSSPN])
-- GNULIB_MBSSEP=0; AC_SUBST([GNULIB_MBSSEP])
-- GNULIB_MBSTOK_R=0; AC_SUBST([GNULIB_MBSTOK_R])
-- GNULIB_STRERROR=0; AC_SUBST([GNULIB_STRERROR])
-- GNULIB_STRERROR_R=0; AC_SUBST([GNULIB_STRERROR_R])
-- GNULIB_STRERRORNAME_NP=0; AC_SUBST([GNULIB_STRERRORNAME_NP])
-- GNULIB_SIGABBREV_NP=0; AC_SUBST([GNULIB_SIGABBREV_NP])
-- GNULIB_SIGDESCR_NP=0; AC_SUBST([GNULIB_SIGDESCR_NP])
-- GNULIB_STRSIGNAL=0; AC_SUBST([GNULIB_STRSIGNAL])
-- GNULIB_STRVERSCMP=0; AC_SUBST([GNULIB_STRVERSCMP])
- HAVE_MBSLEN=0; AC_SUBST([HAVE_MBSLEN])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_MEMCCPY=1; AC_SUBST([GNULIB_MDA_MEMCCPY])
-- GNULIB_MDA_STRDUP=1; AC_SUBST([GNULIB_MDA_STRDUP])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_EXPLICIT_BZERO=1; AC_SUBST([HAVE_EXPLICIT_BZERO])
- HAVE_FFSL=1; AC_SUBST([HAVE_FFSL])
---- a/gnulib/m4/strings_h.m4
-+++ b/gnulib/m4/strings_h.m4
-@@ -1,21 +1,16 @@
- # Configure a replacement for <strings.h>.
--# serial 6
-+# serial 9
-
- # Copyright (C) 2007, 2009-2021 Free Software Foundation, Inc.
- # This file is free software; the Free Software Foundation
- # gives unlimited permission to copy and/or distribute it,
- # with or without modifications, as long as this notice is preserved.
-
--AC_DEFUN([gl_HEADER_STRINGS_H],
-+AC_DEFUN_ONCE([gl_STRINGS_H],
- [
-- dnl Use AC_REQUIRE here, so that the default behavior below is expanded
-- dnl once only, before all statements that occur in other macros.
-- AC_REQUIRE([gl_HEADER_STRINGS_H_BODY])
--])
--
--AC_DEFUN([gl_HEADER_STRINGS_H_BODY],
--[
-- AC_REQUIRE([gl_HEADER_STRINGS_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only, before all statements
-+ dnl that occur in other macros.
-+ AC_REQUIRE([gl_STRINGS_H_DEFAULTS])
-
- gl_CHECK_NEXT_HEADERS([strings.h])
- if test $ac_cv_header_strings_h = yes; then
-@@ -35,16 +30,31 @@ AC_DEFUN([gl_HEADER_STRINGS_H_BODY],
- ]], [ffs strcasecmp strncasecmp])
- ])
-
-+# gl_STRINGS_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_STRINGS_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_HEADER_STRINGS_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_STRINGS_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- ])
-
--AC_DEFUN([gl_HEADER_STRINGS_H_DEFAULTS],
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_STRINGS_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_STRINGS_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FFS])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_STRINGS_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_STRINGS_H_DEFAULTS])
-+])
-+
-+AC_DEFUN([gl_STRINGS_H_DEFAULTS],
- [
-- GNULIB_FFS=0; AC_SUBST([GNULIB_FFS])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_FFS=1; AC_SUBST([HAVE_FFS])
- HAVE_STRCASECMP=1; AC_SUBST([HAVE_STRCASECMP])
---- a/gnulib/m4/strndup.m4
-+++ b/gnulib/m4/strndup.m4
-@@ -1,4 +1,4 @@
--# strndup.m4 serial 22
-+# strndup.m4 serial 23
- dnl Copyright (C) 2002-2003, 2005-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -10,7 +10,7 @@ AC_DEFUN([gl_FUNC_STRNDUP],
- AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])
-
- AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_CHECK_DECLS_ONCE([strndup])
- AC_CHECK_FUNCS_ONCE([strndup])
- if test $ac_cv_have_decl_strndup = no; then
---- a/gnulib/m4/strnlen.m4
-+++ b/gnulib/m4/strnlen.m4
-@@ -1,4 +1,4 @@
--# strnlen.m4 serial 13
-+# strnlen.m4 serial 14
- dnl Copyright (C) 2002-2003, 2005-2007, 2009-2021 Free Software Foundation,
- dnl Inc.
- dnl This file is free software; the Free Software Foundation
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
-
- AC_DEFUN([gl_FUNC_STRNLEN],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
-
- dnl Persuade glibc <string.h> to declare strnlen().
- AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])
---- a/gnulib/m4/strstr.m4
-+++ b/gnulib/m4/strstr.m4
-@@ -1,4 +1,4 @@
--# strstr.m4 serial 22
-+# strstr.m4 serial 23
- dnl Copyright (C) 2008-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
- dnl Check that strstr works.
- AC_DEFUN([gl_FUNC_STRSTR_SIMPLE],
- [
-- AC_REQUIRE([gl_HEADER_STRING_H_DEFAULTS])
-+ AC_REQUIRE([gl_STRING_H_DEFAULTS])
- AC_REQUIRE([gl_FUNC_MEMCHR])
- if test $REPLACE_MEMCHR = 1; then
- REPLACE_STRSTR=1
---- a/gnulib/m4/sys_socket_h.m4
-+++ b/gnulib/m4/sys_socket_h.m4
-@@ -1,4 +1,4 @@
--# sys_socket_h.m4 serial 25
-+# sys_socket_h.m4 serial 28
- dnl Copyright (C) 2005-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,7 +6,7 @@ dnl with or without modifications, as lo
-
- dnl From Simon Josefsson.
-
--AC_DEFUN([gl_HEADER_SYS_SOCKET],
-+AC_DEFUN_ONCE([gl_SYS_SOCKET_H],
- [
- AC_REQUIRE([gl_SYS_SOCKET_H_DEFAULTS])
- AC_REQUIRE([AC_CANONICAL_HOST])
-@@ -156,32 +156,47 @@ AC_DEFUN([gl_PREREQ_SYS_H_WS2TCPIP],
- AC_SUBST([HAVE_WS2TCPIP_H])
- ])
-
-+# gl_SYS_SOCKET_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_SYS_SOCKET_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_SYS_SOCKET_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_SYS_SOCKET_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_SYS_SOCKET_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_SYS_SOCKET_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SOCKET])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CONNECT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ACCEPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_BIND])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETPEERNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETSOCKNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETSOCKOPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LISTEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RECV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SEND])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RECVFROM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SENDTO])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SETSOCKOPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SHUTDOWN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ACCEPT4])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_SYS_SOCKET_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_SYS_SOCKET_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_SYS_SOCKET_H_DEFAULTS],
- [
-- GNULIB_SOCKET=0; AC_SUBST([GNULIB_SOCKET])
-- GNULIB_CONNECT=0; AC_SUBST([GNULIB_CONNECT])
-- GNULIB_ACCEPT=0; AC_SUBST([GNULIB_ACCEPT])
-- GNULIB_BIND=0; AC_SUBST([GNULIB_BIND])
-- GNULIB_GETPEERNAME=0; AC_SUBST([GNULIB_GETPEERNAME])
-- GNULIB_GETSOCKNAME=0; AC_SUBST([GNULIB_GETSOCKNAME])
-- GNULIB_GETSOCKOPT=0; AC_SUBST([GNULIB_GETSOCKOPT])
-- GNULIB_LISTEN=0; AC_SUBST([GNULIB_LISTEN])
-- GNULIB_RECV=0; AC_SUBST([GNULIB_RECV])
-- GNULIB_SEND=0; AC_SUBST([GNULIB_SEND])
-- GNULIB_RECVFROM=0; AC_SUBST([GNULIB_RECVFROM])
-- GNULIB_SENDTO=0; AC_SUBST([GNULIB_SENDTO])
-- GNULIB_SETSOCKOPT=0; AC_SUBST([GNULIB_SETSOCKOPT])
-- GNULIB_SHUTDOWN=0; AC_SUBST([GNULIB_SHUTDOWN])
-- GNULIB_ACCEPT4=0; AC_SUBST([GNULIB_ACCEPT4])
- HAVE_STRUCT_SOCKADDR_STORAGE=1; AC_SUBST([HAVE_STRUCT_SOCKADDR_STORAGE])
- HAVE_STRUCT_SOCKADDR_STORAGE_SS_FAMILY=1;
- AC_SUBST([HAVE_STRUCT_SOCKADDR_STORAGE_SS_FAMILY])
---- a/gnulib/m4/sys_stat_h.m4
-+++ b/gnulib/m4/sys_stat_h.m4
-@@ -1,4 +1,4 @@
--# sys_stat_h.m4 serial 38 -*- Autoconf -*-
-+# sys_stat_h.m4 serial 41 -*- Autoconf -*-
- dnl Copyright (C) 2006-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -7,7 +7,7 @@ dnl with or without modifications, as lo
- dnl From Eric Blake.
- dnl Provide a GNU-like <sys/stat.h>.
-
--AC_DEFUN([gl_HEADER_SYS_STAT_H],
-+AC_DEFUN_ONCE([gl_SYS_STAT_H],
- [
- AC_REQUIRE([gl_SYS_STAT_H_DEFAULTS])
-
-@@ -52,38 +52,53 @@ AC_DEFUN([gl_HEADER_SYS_STAT_H],
- AC_REQUIRE([AC_C_RESTRICT])
- ])
-
-+# gl_SYS_STAT_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_SYS_STAT_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_SYS_STAT_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_SYS_STAT_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_SYS_STAT_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_SYS_STAT_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_UNISTD_H_REQUIRE_DEFAULTS dnl for REPLACE_FCHDIR
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FCHMODAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSTAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSTATAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FUTIMENS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETUMASK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LCHMOD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LSTAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKDIR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKDIRAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKFIFO])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKFIFOAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKNOD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKNODAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UTIMENSAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_OVERRIDES_STRUCT_STAT])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_CHMOD], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_MKDIR], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_UMASK], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_SYS_STAT_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_SYS_STAT_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_SYS_STAT_H_DEFAULTS],
- [
-- AC_REQUIRE([gl_UNISTD_H_DEFAULTS]) dnl for REPLACE_FCHDIR
-- GNULIB_FCHMODAT=0; AC_SUBST([GNULIB_FCHMODAT])
-- GNULIB_FSTAT=0; AC_SUBST([GNULIB_FSTAT])
-- GNULIB_FSTATAT=0; AC_SUBST([GNULIB_FSTATAT])
-- GNULIB_FUTIMENS=0; AC_SUBST([GNULIB_FUTIMENS])
-- GNULIB_GETUMASK=0; AC_SUBST([GNULIB_GETUMASK])
-- GNULIB_LCHMOD=0; AC_SUBST([GNULIB_LCHMOD])
-- GNULIB_LSTAT=0; AC_SUBST([GNULIB_LSTAT])
-- GNULIB_MKDIR=0; AC_SUBST([GNULIB_MKDIR])
-- GNULIB_MKDIRAT=0; AC_SUBST([GNULIB_MKDIRAT])
-- GNULIB_MKFIFO=0; AC_SUBST([GNULIB_MKFIFO])
-- GNULIB_MKFIFOAT=0; AC_SUBST([GNULIB_MKFIFOAT])
-- GNULIB_MKNOD=0; AC_SUBST([GNULIB_MKNOD])
-- GNULIB_MKNODAT=0; AC_SUBST([GNULIB_MKNODAT])
-- GNULIB_STAT=0; AC_SUBST([GNULIB_STAT])
-- GNULIB_UTIMENSAT=0; AC_SUBST([GNULIB_UTIMENSAT])
-- GNULIB_OVERRIDES_STRUCT_STAT=0; AC_SUBST([GNULIB_OVERRIDES_STRUCT_STAT])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_CHMOD=1; AC_SUBST([GNULIB_MDA_CHMOD])
-- GNULIB_MDA_MKDIR=1; AC_SUBST([GNULIB_MDA_MKDIR])
-- GNULIB_MDA_UMASK=1; AC_SUBST([GNULIB_MDA_UMASK])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_FCHMODAT=1; AC_SUBST([HAVE_FCHMODAT])
- HAVE_FSTATAT=1; AC_SUBST([HAVE_FSTATAT])
---- a/gnulib/m4/sys_types_h.m4
-+++ b/gnulib/m4/sys_types_h.m4
-@@ -1,4 +1,4 @@
--# sys_types_h.m4 serial 11
-+# sys_types_h.m4 serial 13
- dnl Copyright (C) 2011-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,10 +6,11 @@ dnl with or without modifications, as lo
-
- AC_DEFUN_ONCE([gl_SYS_TYPES_H],
- [
-+ AC_REQUIRE([gl_SYS_TYPES_H_DEFAULTS])
-+
- dnl Use sane struct stat types in OpenVMS 8.2 and later.
- AC_DEFINE([_USE_STD_STAT], 1, [For standard stat data types on VMS.])
-
-- AC_REQUIRE([gl_SYS_TYPES_H_DEFAULTS])
- gl_NEXT_HEADERS([sys/types.h])
-
- dnl Ensure the type pid_t gets defined.
-@@ -30,6 +31,17 @@ AC_DEFUN_ONCE([gl_SYS_TYPES_H],
- AC_SUBST([WINDOWS_STAT_INODES])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_SYS_TYPES_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_SYS_TYPE_H_MODULE_INDICATOR_DEFAULTS], [
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_SYS_TYPE_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_SYS_TYPES_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_SYS_TYPES_H_DEFAULTS],
- [
- ])
---- a/gnulib/m4/time_h.m4
-+++ b/gnulib/m4/time_h.m4
-@@ -2,7 +2,7 @@
-
- # Copyright (C) 2000-2001, 2003-2007, 2009-2021 Free Software Foundation, Inc.
-
--# serial 15
-+# serial 18
-
- # This file is free software; the Free Software Foundation
- # gives unlimited permission to copy and/or distribute it,
-@@ -10,16 +10,11 @@
-
- # Written by Paul Eggert and Jim Meyering.
-
--AC_DEFUN([gl_HEADER_TIME_H],
-+AC_DEFUN_ONCE([gl_TIME_H],
- [
-- dnl Use AC_REQUIRE here, so that the default behavior below is expanded
-- dnl once only, before all statements that occur in other macros.
-- AC_REQUIRE([gl_HEADER_TIME_H_BODY])
--])
--
--AC_DEFUN([gl_HEADER_TIME_H_BODY],
--[
-- AC_REQUIRE([gl_HEADER_TIME_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only, before all statements
-+ dnl that occur in other macros.
-+ AC_REQUIRE([gl_TIME_H_DEFAULTS])
-
- gl_NEXT_HEADERS([time.h])
- AC_REQUIRE([gl_CHECK_TYPE_STRUCT_TIMESPEC])
-@@ -111,30 +106,45 @@ AC_DEFUN([gl_CHECK_TYPE_STRUCT_TIMESPEC]
- AC_SUBST([UNISTD_H_DEFINES_STRUCT_TIMESPEC])
- ])
-
-+# gl_TIME_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_TIME_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_HEADER_TIME_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_TIME_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
--AC_DEFUN([gl_HEADER_TIME_H_DEFAULTS],
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_TIME_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_TIME_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CTIME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MKTIME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LOCALTIME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_NANOSLEEP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRFTIME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_STRPTIME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TIMEGM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TIMESPEC_GET])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TIME_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TIME_RZ])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TZSET])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_TZSET], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_TIME_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_TIME_H_DEFAULTS])
-+])
-+
-+AC_DEFUN([gl_TIME_H_DEFAULTS],
- [
-- GNULIB_CTIME=0; AC_SUBST([GNULIB_CTIME])
-- GNULIB_MKTIME=0; AC_SUBST([GNULIB_MKTIME])
-- GNULIB_LOCALTIME=0; AC_SUBST([GNULIB_LOCALTIME])
-- GNULIB_NANOSLEEP=0; AC_SUBST([GNULIB_NANOSLEEP])
-- GNULIB_STRFTIME=0; AC_SUBST([GNULIB_STRFTIME])
-- GNULIB_STRPTIME=0; AC_SUBST([GNULIB_STRPTIME])
-- GNULIB_TIMEGM=0; AC_SUBST([GNULIB_TIMEGM])
-- GNULIB_TIMESPEC_GET=0; AC_SUBST([GNULIB_TIMESPEC_GET])
-- GNULIB_TIME_R=0; AC_SUBST([GNULIB_TIME_R])
-- GNULIB_TIME_RZ=0; AC_SUBST([GNULIB_TIME_RZ])
-- GNULIB_TZSET=0; AC_SUBST([GNULIB_TZSET])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_TZSET=1; AC_SUBST([GNULIB_MDA_TZSET])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_DECL_LOCALTIME_R=1; AC_SUBST([HAVE_DECL_LOCALTIME_R])
- HAVE_NANOSLEEP=1; AC_SUBST([HAVE_NANOSLEEP])
---- a/gnulib/m4/unistd_h.m4
-+++ b/gnulib/m4/unistd_h.m4
-@@ -1,4 +1,4 @@
--# unistd_h.m4 serial 85
-+# unistd_h.m4 serial 89
- dnl Copyright (C) 2006-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -6,10 +6,10 @@ dnl with or without modifications, as lo
-
- dnl Written by Simon Josefsson, Bruno Haible.
-
--AC_DEFUN([gl_UNISTD_H],
-+AC_DEFUN_ONCE([gl_UNISTD_H],
- [
-- dnl Use AC_REQUIRE here, so that the default behavior below is expanded
-- dnl once only, before all statements that occur in other macros.
-+ dnl Ensure to expand the default settings once only, before all statements
-+ dnl that occur in other macros.
- AC_REQUIRE([gl_UNISTD_H_DEFAULTS])
-
- gl_CHECK_NEXT_HEADERS([unistd.h])
-@@ -59,100 +59,116 @@ AC_DEFUN([gl_UNISTD_H],
- fi
- ])
-
-+# gl_UNISTD_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_UNISTD_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_UNISTD_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_UNISTD_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_UNISTD_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_UNISTD_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ACCESS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CHDIR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CHOWN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_CLOSE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_COPY_FILE_RANGE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_DUP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_DUP2])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_DUP3])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ENVIRON])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EUIDACCESS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECLE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECLP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECV])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECVE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECVP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_EXECVPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FACCESSAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FCHDIR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FCHOWNAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FDATASYNC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FSYNC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_FTRUNCATE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETCWD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETDOMAINNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETDTABLESIZE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETENTROPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETGROUPS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETHOSTNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETLOGIN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETLOGIN_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETOPT_POSIX])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETPAGESIZE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETPASS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GETUSERSHELL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_GROUP_MEMBER])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ISATTY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LCHOWN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LINK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LINKAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_LSEEK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PIPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PIPE2])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PREAD])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_PWRITE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_READ])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_READLINK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_READLINKAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_RMDIR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SETHOSTNAME])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SLEEP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SYMLINK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_SYMLINKAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TRUNCATE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TTYNAME_R])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNISTD_H_GETOPT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNISTD_H_NONBLOCKING])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNISTD_H_SIGPIPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNLINK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_UNLINKAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_USLEEP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WRITE])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_ACCESS], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_CHDIR], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_CLOSE], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_DUP], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_DUP2], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECL], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECLE], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECLP], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECV], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECVE], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECVP], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_EXECVPE], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_GETCWD], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_GETPID], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_ISATTY], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_LSEEK], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_READ], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_RMDIR], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_SWAB], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_UNLINK], [1])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_WRITE], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_UNISTD_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_UNISTD_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_UNISTD_H_DEFAULTS],
- [
-- GNULIB_ACCESS=0; AC_SUBST([GNULIB_ACCESS])
-- GNULIB_CHDIR=0; AC_SUBST([GNULIB_CHDIR])
-- GNULIB_CHOWN=0; AC_SUBST([GNULIB_CHOWN])
-- GNULIB_CLOSE=0; AC_SUBST([GNULIB_CLOSE])
-- GNULIB_COPY_FILE_RANGE=0; AC_SUBST([GNULIB_COPY_FILE_RANGE])
-- GNULIB_DUP=0; AC_SUBST([GNULIB_DUP])
-- GNULIB_DUP2=0; AC_SUBST([GNULIB_DUP2])
-- GNULIB_DUP3=0; AC_SUBST([GNULIB_DUP3])
-- GNULIB_ENVIRON=0; AC_SUBST([GNULIB_ENVIRON])
-- GNULIB_EUIDACCESS=0; AC_SUBST([GNULIB_EUIDACCESS])
-- GNULIB_EXECL=0; AC_SUBST([GNULIB_EXECL])
-- GNULIB_EXECLE=0; AC_SUBST([GNULIB_EXECLE])
-- GNULIB_EXECLP=0; AC_SUBST([GNULIB_EXECLP])
-- GNULIB_EXECV=0; AC_SUBST([GNULIB_EXECV])
-- GNULIB_EXECVE=0; AC_SUBST([GNULIB_EXECVE])
-- GNULIB_EXECVP=0; AC_SUBST([GNULIB_EXECVP])
-- GNULIB_EXECVPE=0; AC_SUBST([GNULIB_EXECVPE])
-- GNULIB_FACCESSAT=0; AC_SUBST([GNULIB_FACCESSAT])
-- GNULIB_FCHDIR=0; AC_SUBST([GNULIB_FCHDIR])
-- GNULIB_FCHOWNAT=0; AC_SUBST([GNULIB_FCHOWNAT])
-- GNULIB_FDATASYNC=0; AC_SUBST([GNULIB_FDATASYNC])
-- GNULIB_FSYNC=0; AC_SUBST([GNULIB_FSYNC])
-- GNULIB_FTRUNCATE=0; AC_SUBST([GNULIB_FTRUNCATE])
-- GNULIB_GETCWD=0; AC_SUBST([GNULIB_GETCWD])
-- GNULIB_GETDOMAINNAME=0; AC_SUBST([GNULIB_GETDOMAINNAME])
-- GNULIB_GETDTABLESIZE=0; AC_SUBST([GNULIB_GETDTABLESIZE])
-- GNULIB_GETENTROPY=0; AC_SUBST([GNULIB_GETENTROPY])
-- GNULIB_GETGROUPS=0; AC_SUBST([GNULIB_GETGROUPS])
-- GNULIB_GETHOSTNAME=0; AC_SUBST([GNULIB_GETHOSTNAME])
-- GNULIB_GETLOGIN=0; AC_SUBST([GNULIB_GETLOGIN])
-- GNULIB_GETLOGIN_R=0; AC_SUBST([GNULIB_GETLOGIN_R])
-- GNULIB_GETOPT_POSIX=0; AC_SUBST([GNULIB_GETOPT_POSIX])
-- GNULIB_GETPAGESIZE=0; AC_SUBST([GNULIB_GETPAGESIZE])
-- GNULIB_GETPASS=0; AC_SUBST([GNULIB_GETPASS])
-- GNULIB_GETUSERSHELL=0; AC_SUBST([GNULIB_GETUSERSHELL])
-- GNULIB_GROUP_MEMBER=0; AC_SUBST([GNULIB_GROUP_MEMBER])
-- GNULIB_ISATTY=0; AC_SUBST([GNULIB_ISATTY])
-- GNULIB_LCHOWN=0; AC_SUBST([GNULIB_LCHOWN])
-- GNULIB_LINK=0; AC_SUBST([GNULIB_LINK])
-- GNULIB_LINKAT=0; AC_SUBST([GNULIB_LINKAT])
-- GNULIB_LSEEK=0; AC_SUBST([GNULIB_LSEEK])
-- GNULIB_PIPE=0; AC_SUBST([GNULIB_PIPE])
-- GNULIB_PIPE2=0; AC_SUBST([GNULIB_PIPE2])
-- GNULIB_PREAD=0; AC_SUBST([GNULIB_PREAD])
-- GNULIB_PWRITE=0; AC_SUBST([GNULIB_PWRITE])
-- GNULIB_READ=0; AC_SUBST([GNULIB_READ])
-- GNULIB_READLINK=0; AC_SUBST([GNULIB_READLINK])
-- GNULIB_READLINKAT=0; AC_SUBST([GNULIB_READLINKAT])
-- GNULIB_RMDIR=0; AC_SUBST([GNULIB_RMDIR])
-- GNULIB_SETHOSTNAME=0; AC_SUBST([GNULIB_SETHOSTNAME])
-- GNULIB_SLEEP=0; AC_SUBST([GNULIB_SLEEP])
-- GNULIB_SYMLINK=0; AC_SUBST([GNULIB_SYMLINK])
-- GNULIB_SYMLINKAT=0; AC_SUBST([GNULIB_SYMLINKAT])
-- GNULIB_TRUNCATE=0; AC_SUBST([GNULIB_TRUNCATE])
-- GNULIB_TTYNAME_R=0; AC_SUBST([GNULIB_TTYNAME_R])
-- GNULIB_UNISTD_H_NONBLOCKING=0; AC_SUBST([GNULIB_UNISTD_H_NONBLOCKING])
-- GNULIB_UNISTD_H_SIGPIPE=0; AC_SUBST([GNULIB_UNISTD_H_SIGPIPE])
-- GNULIB_UNLINK=0; AC_SUBST([GNULIB_UNLINK])
-- GNULIB_UNLINKAT=0; AC_SUBST([GNULIB_UNLINKAT])
-- GNULIB_USLEEP=0; AC_SUBST([GNULIB_USLEEP])
-- GNULIB_WRITE=0; AC_SUBST([GNULIB_WRITE])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_ACCESS=1; AC_SUBST([GNULIB_MDA_ACCESS])
-- GNULIB_MDA_CHDIR=1; AC_SUBST([GNULIB_MDA_CHDIR])
-- GNULIB_MDA_CLOSE=1; AC_SUBST([GNULIB_MDA_CLOSE])
-- GNULIB_MDA_DUP=1; AC_SUBST([GNULIB_MDA_DUP])
-- GNULIB_MDA_DUP2=1; AC_SUBST([GNULIB_MDA_DUP2])
-- GNULIB_MDA_EXECL=1; AC_SUBST([GNULIB_MDA_EXECL])
-- GNULIB_MDA_EXECLE=1; AC_SUBST([GNULIB_MDA_EXECLE])
-- GNULIB_MDA_EXECLP=1; AC_SUBST([GNULIB_MDA_EXECLP])
-- GNULIB_MDA_EXECV=1; AC_SUBST([GNULIB_MDA_EXECV])
-- GNULIB_MDA_EXECVE=1; AC_SUBST([GNULIB_MDA_EXECVE])
-- GNULIB_MDA_EXECVP=1; AC_SUBST([GNULIB_MDA_EXECVP])
-- GNULIB_MDA_EXECVPE=1; AC_SUBST([GNULIB_MDA_EXECVPE])
-- GNULIB_MDA_GETCWD=1; AC_SUBST([GNULIB_MDA_GETCWD])
-- GNULIB_MDA_GETPID=1; AC_SUBST([GNULIB_MDA_GETPID])
-- GNULIB_MDA_ISATTY=1; AC_SUBST([GNULIB_MDA_ISATTY])
-- GNULIB_MDA_LSEEK=1; AC_SUBST([GNULIB_MDA_LSEEK])
-- GNULIB_MDA_READ=1; AC_SUBST([GNULIB_MDA_READ])
-- GNULIB_MDA_RMDIR=1; AC_SUBST([GNULIB_MDA_RMDIR])
-- GNULIB_MDA_SWAB=1; AC_SUBST([GNULIB_MDA_SWAB])
-- GNULIB_MDA_UNLINK=1; AC_SUBST([GNULIB_MDA_UNLINK])
-- GNULIB_MDA_WRITE=1; AC_SUBST([GNULIB_MDA_WRITE])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_CHOWN=1; AC_SUBST([HAVE_CHOWN])
- HAVE_COPY_FILE_RANGE=1; AC_SUBST([HAVE_COPY_FILE_RANGE])
---- a/gnulib/m4/visibility.m4
-+++ b/gnulib/m4/visibility.m4
-@@ -1,4 +1,4 @@
--# visibility.m4 serial 7
-+# visibility.m4 serial 8
- dnl Copyright (C) 2005, 2008, 2010-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -59,6 +59,10 @@ AC_DEFUN([gl_VISIBILITY],
- extern __attribute__((__visibility__("hidden"))) int hiddenfunc (void);
- extern __attribute__((__visibility__("default"))) int exportedfunc (void);
- void dummyfunc (void);
-+ int hiddenvar;
-+ int exportedvar;
-+ int hiddenfunc (void) { return 51; }
-+ int exportedfunc (void) { return 1225736919; }
- void dummyfunc (void) {}
- ]],
- [[]])],
---- a/gnulib/m4/wchar_h.m4
-+++ b/gnulib/m4/wchar_h.m4
-@@ -7,9 +7,9 @@ dnl with or without modifications, as lo
-
- dnl Written by Eric Blake.
-
--# wchar_h.m4 serial 50
-+# wchar_h.m4 serial 53
-
--AC_DEFUN([gl_WCHAR_H],
-+AC_DEFUN_ONCE([gl_WCHAR_H],
- [
- AC_REQUIRE([gl_WCHAR_H_DEFAULTS])
- AC_REQUIRE([gl_WCHAR_H_INLINE_OK])
-@@ -125,60 +125,75 @@ Configuration aborted.])
- fi
- ])
-
-+# gl_WCHAR_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_WCHAR_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_WCHAR_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_WCHAR_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_WCHAR_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_WCHAR_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_BTOWC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCTOB])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSINIT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBRTOWC])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBRLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSRTOWCS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MBSNRTOWCS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCRTOMB])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSRTOMBS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNRTOMBS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCWIDTH])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMCMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMMOVE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMPCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WMEMSET])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNLEN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCPCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCPNCPY])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNCAT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNCMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCASECMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSNCASECMP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCOLL])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSXFRM])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSDUP])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSRCHR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSCSPN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSSPN])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSPBRK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSSTR])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSTOK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSWIDTH])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCSFTIME])
-+ dnl Support Microsoft deprecated alias function names by default.
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_MDA_WCSDUP], [1])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_WCHAR_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_WCHAR_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_WCHAR_H_DEFAULTS],
- [
-- GNULIB_BTOWC=0; AC_SUBST([GNULIB_BTOWC])
-- GNULIB_WCTOB=0; AC_SUBST([GNULIB_WCTOB])
-- GNULIB_MBSINIT=0; AC_SUBST([GNULIB_MBSINIT])
-- GNULIB_MBRTOWC=0; AC_SUBST([GNULIB_MBRTOWC])
-- GNULIB_MBRLEN=0; AC_SUBST([GNULIB_MBRLEN])
-- GNULIB_MBSRTOWCS=0; AC_SUBST([GNULIB_MBSRTOWCS])
-- GNULIB_MBSNRTOWCS=0; AC_SUBST([GNULIB_MBSNRTOWCS])
-- GNULIB_WCRTOMB=0; AC_SUBST([GNULIB_WCRTOMB])
-- GNULIB_WCSRTOMBS=0; AC_SUBST([GNULIB_WCSRTOMBS])
-- GNULIB_WCSNRTOMBS=0; AC_SUBST([GNULIB_WCSNRTOMBS])
-- GNULIB_WCWIDTH=0; AC_SUBST([GNULIB_WCWIDTH])
-- GNULIB_WMEMCHR=0; AC_SUBST([GNULIB_WMEMCHR])
-- GNULIB_WMEMCMP=0; AC_SUBST([GNULIB_WMEMCMP])
-- GNULIB_WMEMCPY=0; AC_SUBST([GNULIB_WMEMCPY])
-- GNULIB_WMEMMOVE=0; AC_SUBST([GNULIB_WMEMMOVE])
-- GNULIB_WMEMPCPY=0; AC_SUBST([GNULIB_WMEMPCPY])
-- GNULIB_WMEMSET=0; AC_SUBST([GNULIB_WMEMSET])
-- GNULIB_WCSLEN=0; AC_SUBST([GNULIB_WCSLEN])
-- GNULIB_WCSNLEN=0; AC_SUBST([GNULIB_WCSNLEN])
-- GNULIB_WCSCPY=0; AC_SUBST([GNULIB_WCSCPY])
-- GNULIB_WCPCPY=0; AC_SUBST([GNULIB_WCPCPY])
-- GNULIB_WCSNCPY=0; AC_SUBST([GNULIB_WCSNCPY])
-- GNULIB_WCPNCPY=0; AC_SUBST([GNULIB_WCPNCPY])
-- GNULIB_WCSCAT=0; AC_SUBST([GNULIB_WCSCAT])
-- GNULIB_WCSNCAT=0; AC_SUBST([GNULIB_WCSNCAT])
-- GNULIB_WCSCMP=0; AC_SUBST([GNULIB_WCSCMP])
-- GNULIB_WCSNCMP=0; AC_SUBST([GNULIB_WCSNCMP])
-- GNULIB_WCSCASECMP=0; AC_SUBST([GNULIB_WCSCASECMP])
-- GNULIB_WCSNCASECMP=0; AC_SUBST([GNULIB_WCSNCASECMP])
-- GNULIB_WCSCOLL=0; AC_SUBST([GNULIB_WCSCOLL])
-- GNULIB_WCSXFRM=0; AC_SUBST([GNULIB_WCSXFRM])
-- GNULIB_WCSDUP=0; AC_SUBST([GNULIB_WCSDUP])
-- GNULIB_WCSCHR=0; AC_SUBST([GNULIB_WCSCHR])
-- GNULIB_WCSRCHR=0; AC_SUBST([GNULIB_WCSRCHR])
-- GNULIB_WCSCSPN=0; AC_SUBST([GNULIB_WCSCSPN])
-- GNULIB_WCSSPN=0; AC_SUBST([GNULIB_WCSSPN])
-- GNULIB_WCSPBRK=0; AC_SUBST([GNULIB_WCSPBRK])
-- GNULIB_WCSSTR=0; AC_SUBST([GNULIB_WCSSTR])
-- GNULIB_WCSTOK=0; AC_SUBST([GNULIB_WCSTOK])
-- GNULIB_WCSWIDTH=0; AC_SUBST([GNULIB_WCSWIDTH])
-- GNULIB_WCSFTIME=0; AC_SUBST([GNULIB_WCSFTIME])
-- dnl Support Microsoft deprecated alias function names by default.
-- GNULIB_MDA_WCSDUP=1; AC_SUBST([GNULIB_MDA_WCSDUP])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_BTOWC=1; AC_SUBST([HAVE_BTOWC])
- HAVE_MBSINIT=1; AC_SUBST([HAVE_MBSINIT])
---- a/gnulib/m4/wctype_h.m4
-+++ b/gnulib/m4/wctype_h.m4
-@@ -1,4 +1,4 @@
--# wctype_h.m4 serial 26
-+# wctype_h.m4 serial 30
-
- dnl A placeholder for ISO C99 <wctype.h>, for platforms that lack it.
-
-@@ -9,7 +9,7 @@ dnl with or without modifications, as lo
-
- dnl Written by Paul Eggert.
-
--AC_DEFUN([gl_WCTYPE_H],
-+AC_DEFUN_ONCE([gl_WCTYPE_H],
- [
- AC_REQUIRE([gl_WCTYPE_H_DEFAULTS])
- AC_REQUIRE([AC_PROG_CC])
-@@ -62,7 +62,7 @@ AC_DEFUN([gl_WCTYPE_H],
- fi
- AC_SUBST([HAVE_WCTYPE_H])
-
-- if test $GNULIB_OVERRIDES_WINT_T = 1; then
-+ if test $GNULIBHEADERS_OVERRIDE_WINT_T = 1; then
- REPLACE_ISWCNTRL=1
- else
- case "$gl_cv_func_iswcntrl_works" in
-@@ -157,24 +157,39 @@ AC_DEFUN([gl_WCTYPE_H],
- ])
- ])
-
-+# gl_WCTYPE_MODULE_INDICATOR([modulename])
-+# sets the shell variable that indicates the presence of the given module
-+# to a C preprocessor expression that will evaluate to 1.
-+# This macro invocation must not occur in macros that are AC_REQUIREd.
- AC_DEFUN([gl_WCTYPE_MODULE_INDICATOR],
- [
-- dnl Use AC_REQUIRE here, so that the default settings are expanded once only.
-- AC_REQUIRE([gl_WCTYPE_H_DEFAULTS])
-+ dnl Ensure to expand the default settings once only.
-+ gl_WCTYPE_H_REQUIRE_DEFAULTS
- gl_MODULE_INDICATOR_SET_VARIABLE([$1])
- dnl Define it also as a C macro, for the benefit of the unit tests.
- gl_MODULE_INDICATOR_FOR_TESTS([$1])
- ])
-
-+# Initializes the default values for AC_SUBSTed shell variables.
-+# This macro must not be AC_REQUIREd. It must only be invoked, and only
-+# outside of macros or in macros that are not AC_REQUIREd.
-+AC_DEFUN([gl_WCTYPE_H_REQUIRE_DEFAULTS],
-+[
-+ m4_defun(GL_MODULE_INDICATOR_PREFIX[_WCTYPE_H_MODULE_INDICATOR_DEFAULTS], [
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ISWBLANK])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ISWDIGIT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ISWXDIGIT])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCTYPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_ISWCTYPE])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_WCTRANS])
-+ gl_MODULE_INDICATOR_INIT_VARIABLE([GNULIB_TOWCTRANS])
-+ ])
-+ m4_require(GL_MODULE_INDICATOR_PREFIX[_WCTYPE_H_MODULE_INDICATOR_DEFAULTS])
-+ AC_REQUIRE([gl_WCTYPE_H_DEFAULTS])
-+])
-+
- AC_DEFUN([gl_WCTYPE_H_DEFAULTS],
- [
-- GNULIB_ISWBLANK=0; AC_SUBST([GNULIB_ISWBLANK])
-- GNULIB_ISWDIGIT=0; AC_SUBST([GNULIB_ISWDIGIT])
-- GNULIB_ISWXDIGIT=0; AC_SUBST([GNULIB_ISWXDIGIT])
-- GNULIB_WCTYPE=0; AC_SUBST([GNULIB_WCTYPE])
-- GNULIB_ISWCTYPE=0; AC_SUBST([GNULIB_ISWCTYPE])
-- GNULIB_WCTRANS=0; AC_SUBST([GNULIB_WCTRANS])
-- GNULIB_TOWCTRANS=0; AC_SUBST([GNULIB_TOWCTRANS])
- dnl Assume proper GNU behavior unless another module says otherwise.
- HAVE_ISWBLANK=1; AC_SUBST([HAVE_ISWBLANK])
- HAVE_WCTYPE_T=1; AC_SUBST([HAVE_WCTYPE_T])
---- a/gnulib/m4/wint_t.m4
-+++ b/gnulib/m4/wint_t.m4
-@@ -1,4 +1,4 @@
--# wint_t.m4 serial 10
-+# wint_t.m4 serial 11
- dnl Copyright (C) 2003, 2007-2021 Free Software Foundation, Inc.
- dnl This file is free software; the Free Software Foundation
- dnl gives unlimited permission to copy and/or distribute it,
-@@ -34,14 +34,14 @@ AC_DEFUN([gt_TYPE_WINT_T],
- [gl_cv_type_wint_t_large_enough=yes],
- [gl_cv_type_wint_t_large_enough=no])])
- if test $gl_cv_type_wint_t_large_enough = no; then
-- GNULIB_OVERRIDES_WINT_T=1
-+ GNULIBHEADERS_OVERRIDE_WINT_T=1
- else
-- GNULIB_OVERRIDES_WINT_T=0
-+ GNULIBHEADERS_OVERRIDE_WINT_T=0
- fi
- else
-- GNULIB_OVERRIDES_WINT_T=0
-+ GNULIBHEADERS_OVERRIDE_WINT_T=0
- fi
-- AC_SUBST([GNULIB_OVERRIDES_WINT_T])
-+ AC_SUBST([GNULIBHEADERS_OVERRIDE_WINT_T])
- ])
-
- dnl Prerequisites of the 'wint_t' override.
---- /dev/null
-+++ b/gnulib/m4/year2038.m4
-@@ -0,0 +1,112 @@
-+# year2038.m4 serial 5
-+dnl Copyright (C) 2017-2021 Free Software Foundation, Inc.
-+dnl This file is free software; the Free Software Foundation
-+dnl gives unlimited permission to copy and/or distribute it,
-+dnl with or without modifications, as long as this notice is preserved.
-+
-+dnl Attempt to ensure that 'time_t' is a 64-bit type
-+dnl and that the functions time(), stat(), etc. return 64-bit times.
-+
-+AC_DEFUN([gl_YEAR2038_EARLY],
-+[
-+ AC_REQUIRE([AC_CANONICAL_HOST])
-+ case "$host_os" in
-+ mingw*)
-+ AC_DEFINE([__MINGW_USE_VC2005_COMPAT], [1],
-+ [For 64-bit time_t on 32-bit mingw.])
-+ ;;
-+ esac
-+])
-+
-+# gl_YEAR2038_TEST_INCLUDES
-+# -------------------------
-+AC_DEFUN([gl_YEAR2038_TEST_INCLUDES],
-+[[
-+ #include <time.h>
-+ /* Check that time_t can represent 2**63 - 1 correctly.
-+ We can't simply define LARGE_TIME_T to be 9223372036854775807,
-+ since some C++ compilers masquerading as C compilers
-+ incorrectly reject 9223372036854775807. */
-+ #define LARGE_TIME_T (((time_t) 1 << 31 << 31) - 1 + ((time_t) 1 << 31 << 31))
-+ int verify_time_t_range[(LARGE_TIME_T % 2147483629 == 721
-+ && LARGE_TIME_T % 2147483647 == 1)
-+ ? 1 : -1];
-+]])
-+
-+# gl_YEAR2038_BODY(REQUIRE-64-BIT)
-+----------------------------------
-+AC_DEFUN([gl_YEAR2038_BODY],
-+[
-+ AC_ARG_ENABLE([year2038],
-+ [ --disable-year2038 omit support for timestamps past the year 2038])
-+ AS_IF([test "$enable_year2038" != no],
-+ [
-+ dnl On many systems, time_t is already a 64-bit type.
-+ dnl On those systems where time_t is still 32-bit, it requires kernel
-+ dnl and libc support to make it 64-bit. For glibc 2.34 and later on Linux,
-+ dnl defining _TIME_BITS=64 and _FILE_OFFSET_BITS=64 is needed on x86 and ARM.
-+ dnl
-+ dnl On native Windows, the system include files define types __time32_t
-+ dnl and __time64_t. By default, time_t is an alias of
-+ dnl - __time32_t on 32-bit mingw,
-+ dnl - __time64_t on 64-bit mingw and on MSVC (since MSVC 8).
-+ dnl But when compiling with -D__MINGW_USE_VC2005_COMPAT, time_t is an
-+ dnl alias of __time64_t.
-+ dnl And when compiling with -D_USE_32BIT_TIME_T, time_t is an alias of
-+ dnl __time32_t.
-+ AC_CACHE_CHECK([for 64-bit time_t], [gl_cv_type_time_t_64],
-+ [AC_COMPILE_IFELSE(
-+ [AC_LANG_SOURCE([gl_YEAR2038_TEST_INCLUDES])],
-+ [gl_cv_type_time_t_64=yes], [gl_cv_type_time_t_64=no])
-+ ])
-+ if test "$gl_cv_type_time_t_64" = no; then
-+ AC_CACHE_CHECK([for 64-bit time_t with _TIME_BITS=64],
-+ [gl_cv_type_time_t_bits_macro],
-+ [AC_COMPILE_IFELSE(
-+ [AC_LANG_SOURCE([[#define _TIME_BITS 64
-+ #define _FILE_OFFSET_BITS 64
-+ ]gl_YEAR2038_TEST_INCLUDES])],
-+ [gl_cv_type_time_t_bits_macro=yes],
-+ [gl_cv_type_time_t_bits_macro=no])
-+ ])
-+ if test "$gl_cv_type_time_t_bits_macro" = yes; then
-+ AC_DEFINE([_TIME_BITS], [64],
-+ [Number of bits in a timestamp, on hosts where this is settable.])
-+ dnl AC_SYS_LARGFILE also defines this; it's OK if we do too.
-+ AC_DEFINE([_FILE_OFFSET_BITS], [64],
-+ [Number of bits in a file offset, on hosts where this is settable.])
-+ gl_cv_type_time_t_64=yes
-+ fi
-+ fi
-+ if test $gl_cv_type_time_t_64 = no; then
-+ AC_COMPILE_IFELSE(
-+ [AC_LANG_SOURCE(
-+ [[#ifdef _USE_32BIT_TIME_T
-+ int ok;
-+ #else
-+ error fail
-+ #endif
-+ ]])],
-+ [AC_MSG_FAILURE([This package requires a 64-bit 'time_t' type. Remove _USE_32BIT_TIME_T from the compiler flags.])],
-+ [# If not cross-compiling and $1 says we should check,
-+ # and 'touch' works with a large timestamp, then evidently 64-bit time_t
-+ # is desired and supported, so fail and ask the builder to fix the
-+ # problem. Otherwise, just warn the builder.
-+ m4_ifval([$1],
-+ [if test $cross_compiling = no \
-+ && TZ=UTC0 touch -t 210602070628.16 conftest.time 2>/dev/null; then
-+ rm -f conftest.time
-+ AC_MSG_FAILURE([This package requires a 64-bit 'time_t' type, which your system appears to support. You might try configuring with 'CPPFLAGS="-m64" LDFLAGS="-m64"'. To build with a 32-bit time_t anyway (not recommended), configure with '--disable-year2038'.])
-+ fi])
-+ if test "$gl_warned_about_64_bit_time_t" != yes; then
-+ AC_MSG_WARN([This package requires a 64-bit 'time_t' type if there is any way to access timestamps outside the year range 1901-2038 on your platform. Perhaps you should configure with 'CPPFLAGS="-m64" LDFLAGS="-m64"'?])
-+ gl_warned_about_64_bit_time_t=yes
-+ fi
-+ ])
-+ fi])
-+])
-+
-+AC_DEFUN([gl_YEAR2038],
-+[
-+ gl_YEAR2038_BODY([require-64-bit])
-+])
diff --git a/meta/recipes-extended/texinfo/texinfo/0001-texinfo-several-changes-to-build-without-zlib-and-nc.patch b/meta/recipes-extended/texinfo/texinfo/0001-texinfo-several-changes-to-build-without-zlib-and-nc.patch
new file mode 100644
index 0000000000..b43a115b23
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo/0001-texinfo-several-changes-to-build-without-zlib-and-nc.patch
@@ -0,0 +1,55 @@
+From ee9d23373b488c4a499c561d71e6b6ba7ca1bd31 Mon Sep 17 00:00:00 2001
+From: Joshua Lock <josh@linux.intel.com>
+Date: Fri, 16 Sep 2011 15:35:48 -0700
+Subject: [PATCH 1/3] texinfo: several changes to build without zlib and
+ ncurses
+
+We already DEPEND on the native texinfo being present before building so
+there isn't any need to try and build the required native texinfo binaries
+before cross-compiling. This simplifies the recipe somewhat!
+
+Upstream-Status: Inappropriate oe specific
+
+Signed-off-by: Joshua Lock <josh@linux.intel.com>
+---
+ configure.ac | 24 +-----------------------
+ 1 file changed, 1 insertion(+), 23 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 8094498..5b72fc1 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -247,29 +247,7 @@ AC_CANONICAL_BUILD
+ # $native_tools is also added to SUBDIRS in the main Makefile.am,
+ # so that make compiles the native tools first.
+ #
+-if test "$cross_compiling" = no; then
+- native_tools=
+-else
+- native_tools=tools
+- test -d "$native_tools" || mkdir "$native_tools"
+- confdir=`(cd "$srcdir";pwd)`
+- # Make sure the secondary configure won't fail with
+- # "error: source directory already configured".
+- rm -f config.status
+- AC_MSG_NOTICE([[Doing configure of native tools (${build}).]])
+- cd "$native_tools" || exit 1
+- # Run secondary configure in alternate environment or
+- # it gets the wrong CC etc.
+- # env -i gives this build host configure a clean environment;
+- # consequently, we have to re-initialize $PATH.
+- env -i CC="$BUILD_CC" AR="$BUILD_AR" RANLIB="$BUILD_RANLIB" \
+- PATH="$PATH" \
+- tools_only=1 \
+- ${confdir}/configure --build=${build} --host=${build} \
+- --disable-rpath --disable-nls
+- cd .. || exit 1
+- AC_MSG_NOTICE([[Continuing with main configure (${host}).]])
+-fi
++native_tools=
+ AC_SUBST(native_tools)
+ AM_CONDITIONAL(TOOLS_ONLY, [[test "x$tools_only" = x1]])
+
+--
+2.39.2
+
diff --git a/meta/recipes-extended/texinfo/texinfo/0002-dont-depend-on-help2man.patch b/meta/recipes-extended/texinfo/texinfo/0002-dont-depend-on-help2man.patch
new file mode 100644
index 0000000000..f3b6827d58
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo/0002-dont-depend-on-help2man.patch
@@ -0,0 +1,68 @@
+From e02be81fa68ddc7f939abd99de4e42759a0d5d8c Mon Sep 17 00:00:00 2001
+From: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
+Date: Tue, 29 Nov 2016 13:43:24 -0600
+Subject: [PATCH 2/3] dont-depend-on-help2man
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
+Signed-off-by: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
+---
+ doc/Makefile.am | 2 +-
+ man/Makefile.am | 12 ++++++------
+ 2 files changed, 7 insertions(+), 7 deletions(-)
+
+diff --git a/doc/Makefile.am b/doc/Makefile.am
+index e9e6298..f1b9895 100644
+--- a/doc/Makefile.am
++++ b/doc/Makefile.am
+@@ -63,7 +63,7 @@ refcard/txirefcard.pdf refcard/txirefcard-a4.pdf: refcard/txirefcard.tex
+ # Include our texinfo.tex, not Automake's.
+ EXTRA_DIST = epsf.tex texinfo.tex \
+ fdl.texi \
+- $(man_MANS) $(TXI_XLATE) \
++ $(TXI_XLATE) \
+ $(refcard_files) \
+ texinfo-tex-test.texi texinfo-tex-test.WIDOWs \
+ texinfo-ja.tex short-sample-ja.texi \
+diff --git a/man/Makefile.am b/man/Makefile.am
+index f2c703f..61caeeb 100644
+--- a/man/Makefile.am
++++ b/man/Makefile.am
+@@ -11,27 +11,27 @@
+ # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ # These are generated using help2man.
+-man_MANS = install-info.1 makeinfo.1 texindex.1 texi2dvi.1
++#man_MANS = install-info.1 makeinfo.1 texindex.1 texi2dvi.1
+
+ # These require the build in info/, thus can't do if we failed to find a
+ # terminal library.
+ if HAVE_TERMLIBS
+-man_MANS += info.1
++#man_MANS += info.1
+ endif
+
+ # These are hand-written.
+-man_MANS += info.5 texinfo.5
++#man_MANS += info.5 texinfo.5
+
+-man_MANS += pod2texi.1
++#man_MANS += pod2texi.1
+
+ pod2texi.1: $(top_srcdir)/Pod-Simple-Texinfo/pod2texi.pl
+ $(POD2MAN) $(top_srcdir)/Pod-Simple-Texinfo/pod2texi.pl >"$@"
+
+
+ # These are just .so's to the common program.
+-man_MANS += texi2any.1 texi2pdf.1 pdftexi2dvi.1
++#man_MANS += texi2any.1 texi2pdf.1 pdftexi2dvi.1
+
+-EXTRA_DIST = $(man_MANS) ginfo.h2m
++EXTRA_DIST = ginfo.h2m
+
+ # Maintainers should be able to regenerate.
+ MAINTAINERCLEANFILES = $(man_MANS)
+--
+2.39.2
+
diff --git a/meta/recipes-extended/texinfo/texinfo/0003-texinfo-Update-to-5.1.patch b/meta/recipes-extended/texinfo/texinfo/0003-texinfo-Update-to-5.1.patch
new file mode 100644
index 0000000000..f99f8b87d5
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo/0003-texinfo-Update-to-5.1.patch
@@ -0,0 +1,28 @@
+From 33b85a3928895b812b37dc759c6de711802db45f Mon Sep 17 00:00:00 2001
+From: Saul Wold <sgw@linux.intel.com>
+Date: Mon, 29 Jul 2013 15:02:34 -0700
+Subject: [PATCH 3/3] texinfo: Update to 5.1
+
+Upstream-Status: Inappropriate [cross build specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+---
+ info/Makefile.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/info/Makefile.am b/info/Makefile.am
+index f57b341..a019aa7 100644
+--- a/info/Makefile.am
++++ b/info/Makefile.am
+@@ -77,7 +77,7 @@ cmd_sources = $(srcdir)/session.c $(srcdir)/echo-area.c $(srcdir)/infodoc.c \
+ # more than once.
+ funs.h: makedoc$(EXEEXT) $(cmd_sources)
+ rm -f $(generated_sources)
+- $(top_builddir)/$(native_tools)/info/makedoc $(cmd_sources)
++ makedoc $(cmd_sources)
+
+ # The following hack is necessary to hint make before the automatic
+ # dependencies are built.
+--
+2.39.2
+
diff --git a/meta/recipes-extended/texinfo/texinfo/disable-native-tools.patch b/meta/recipes-extended/texinfo/texinfo/disable-native-tools.patch
deleted file mode 100644
index ab6f1658aa..0000000000
--- a/meta/recipes-extended/texinfo/texinfo/disable-native-tools.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-We already DEPEND on the native texinfo being present before building so
-there isn't any need to try and build the required native texinfo binaries
-before cross-compiling. This simplifies the recipe somewhat!
-
-Upstream-Status: Inappropriate oe specific
-
-Signed-off-by: Joshua Lock <josh@linux.intel.com>
-
-Index: texinfo-4.13/configure.ac
-===================================================================
---- texinfo-4.13.orig/configure.ac
-+++ texinfo-4.13/configure.ac
-@@ -100,29 +100,7 @@ AC_CANONICAL_BUILD
- # $native_tools is also added to SUBDIRS in the main Makefile.am,
- # so that make compiles the native tools first.
- #
--if test "$cross_compiling" = no; then
-- native_tools=
--else
-- native_tools=tools
-- test -d "$native_tools" || mkdir "$native_tools"
-- confdir=`(cd "$srcdir";pwd)`
-- # Make sure the secondary configure won't fail with
-- # "error: source directory already configured".
-- rm -f config.status
-- AC_MSG_NOTICE([[Doing configure of native tools (${build}).]])
-- cd "$native_tools" || exit 1
-- # Run secondary configure in alternate environment or
-- # it gets the wrong CC etc.
-- # env -i gives this build host configure a clean environment;
-- # consequently, we have to re-initialize $PATH.
-- env -i CC="$BUILD_CC" AR="$BUILD_AR" RANLIB="$BUILD_RANLIB" \
-- PATH="$PATH" \
-- tools_only=1 \
-- ${confdir}/configure --build=${build} --host=${build} \
-- --disable-rpath --disable-nls
-- cd .. || exit 1
-- AC_MSG_NOTICE([[Continuing with main configure (${host}).]])
--fi
-+native_tools=
- AC_SUBST(native_tools)
- AM_CONDITIONAL(TOOLS_ONLY, [[test "x$tools_only" = x1]])
-
diff --git a/meta/recipes-extended/texinfo/texinfo/dont-depend-on-help2man.patch b/meta/recipes-extended/texinfo/texinfo/dont-depend-on-help2man.patch
deleted file mode 100644
index 0d6bbafadd..0000000000
--- a/meta/recipes-extended/texinfo/texinfo/dont-depend-on-help2man.patch
+++ /dev/null
@@ -1,66 +0,0 @@
-From 63a803ead3656353329a801846a9a3beb7210c46 Mon Sep 17 00:00:00 2001
-From: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
-Date: Tue, 29 Nov 2016 13:43:24 -0600
-Subject: [PATCH] dont-depend-on-help2man
-
-Upstream-Status: Inappropriate
-
-Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
-Signed-off-by: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
-
----
- doc/Makefile.am | 2 +-
- man/Makefile.am | 12 ++++++------
- 2 files changed, 7 insertions(+), 7 deletions(-)
-
-diff --git a/doc/Makefile.am b/doc/Makefile.am
-index 732833e..041cb9b 100644
---- a/doc/Makefile.am
-+++ b/doc/Makefile.am
-@@ -37,7 +37,7 @@ refcard_files = refcard/Makefile refcard/txicmdcheck \
- # Include our texinfo.tex, not Automake's.
- EXTRA_DIST = epsf.tex texinfo.tex \
- fdl.texi \
-- $(man_MANS) $(TXI_XLATE) \
-+ $(TXI_XLATE) \
- $(refcard_files) \
- texinfo-tex-test.texi texinfo-tex-test.WIDOWs \
- texinfo-ja.tex short-sample-ja.texi
-diff --git a/man/Makefile.am b/man/Makefile.am
-index d0cd72c..a19e52a 100644
---- a/man/Makefile.am
-+++ b/man/Makefile.am
-@@ -11,27 +11,27 @@
- # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
- # These are generated using help2man.
--man_MANS = install-info.1 makeinfo.1 texindex.1 texi2dvi.1
-+#man_MANS = install-info.1 makeinfo.1 texindex.1 texi2dvi.1
-
- # These require the build in info/, thus can't do if we failed to find a
- # terminal library.
- if HAVE_TERMLIBS
--man_MANS += info.1
-+#man_MANS += info.1
- endif
-
- # These are hand-written.
--man_MANS += info.5 texinfo.5
-+#man_MANS += info.5 texinfo.5
-
--man_MANS += pod2texi.1
-+#man_MANS += pod2texi.1
-
- pod2texi.1: $(top_srcdir)/Pod-Simple-Texinfo/pod2texi.pl
- $(POD2MAN) $(top_srcdir)/Pod-Simple-Texinfo/pod2texi.pl >"$@"
-
-
- # These are just .so's to the common program.
--man_MANS += texi2any.1 texi2pdf.1 pdftexi2dvi.1
-+#man_MANS += texi2any.1 texi2pdf.1 pdftexi2dvi.1
-
--EXTRA_DIST = $(man_MANS) ginfo.h2m
-+EXTRA_DIST = ginfo.h2m
-
- # Maintainers should be able to regenerate.
- MAINTAINERCLEANFILES = $(man_MANS)
diff --git a/meta/recipes-extended/texinfo/texinfo/link-zip.patch b/meta/recipes-extended/texinfo/texinfo/link-zip.patch
deleted file mode 100644
index 15c030c0c6..0000000000
--- a/meta/recipes-extended/texinfo/texinfo/link-zip.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-From c1002138033fcccb2d1abfc41f5c2e46c836f7a0 Mon Sep 17 00:00:00 2001
-From: Joshua Lock <josh@linux.intel.com>
-Date: Mon, 29 Jul 2013 15:02:34 -0700
-Subject: [PATCH] install-info uses symbols from zlib so must link against it.
-
-Upstream-Status: Pending
-
-Signed-off-by: Joshua Lock <josh@linux.intel.com>
-
----
- install-info/Makefile.am | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/install-info/Makefile.am b/install-info/Makefile.am
-index 9bcff71..68247f9 100644
---- a/install-info/Makefile.am
-+++ b/install-info/Makefile.am
-@@ -33,4 +33,4 @@ AM_CPPFLAGS = \
- -I$(top_srcdir)/gnulib/lib \
- -I$(top_builddir)/gnulib/lib \
- -DLOCALEDIR=\"$(localedir)\"
--LDADD = $(top_builddir)/gnulib/lib/libgnu.a $(LIBINTL) $(LIBTHREAD)
-+LDADD = $(top_builddir)/gnulib/lib/libgnu.a $(LIBINTL) $(LIBTHREAD) -lz
diff --git a/meta/recipes-extended/texinfo/texinfo/use_host_makedoc.patch b/meta/recipes-extended/texinfo/texinfo/use_host_makedoc.patch
deleted file mode 100644
index 5b7f32d9a9..0000000000
--- a/meta/recipes-extended/texinfo/texinfo/use_host_makedoc.patch
+++ /dev/null
@@ -1,17 +0,0 @@
-Upstream-Status: Inappropriate [cross build specific]
-
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
-
-Index: texinfo-5.1/info/Makefile.am
-===================================================================
---- texinfo-5.1.orig/info/Makefile.am
-+++ texinfo-5.1/info/Makefile.am
-@@ -76,7 +76,7 @@ cmd_sources = $(srcdir)/session.c $(srcd
- # more than once.
- funs.h: makedoc$(EXEEXT) $(cmd_sources)
- rm -f $(generated_sources)
-- $(top_builddir)/$(native_tools)/info/makedoc $(cmd_sources)
-+ makedoc $(cmd_sources)
-
- # The following hack is necessary to hint make before the automatic
- # dependencies are built.
diff --git a/meta/recipes-extended/texinfo/texinfo_6.8.bb b/meta/recipes-extended/texinfo/texinfo_6.8.bb
deleted file mode 100644
index 33dcd943b4..0000000000
--- a/meta/recipes-extended/texinfo/texinfo_6.8.bb
+++ /dev/null
@@ -1,90 +0,0 @@
-SUMMARY = "Documentation system for on-line information and printed output"
-DESCRIPTION = "Texinfo is a documentation system that can produce both \
-online information and printed output from a single source file. The \
-GNU Project uses the Texinfo file format for most of its documentation."
-HOMEPAGE = "http://www.gnu.org/software/texinfo/"
-SECTION = "console/utils"
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
-
-PROVIDES:append:class-native = " texinfo-replacement-native"
-
-def compress_pkg(d):
- if bb.data.inherits_class('compress_doc', d):
- compress = d.getVar("DOC_COMPRESS")
- if compress == "gz":
- return "gzip"
- elif compress == "bz2":
- return "bzip2"
- elif compress == "xz":
- return "xz"
- return ""
-
-RDEPENDS:info += "${@compress_pkg(d)}"
-
-DEPENDS = "zlib ncurses texinfo-replacement-native"
-DEPENDS:class-native = "zlib-native ncurses-native"
-
-TARGET_PATCH = "file://use_host_makedoc.patch"
-TARGET_PATCH:class-native = ""
-
-SRC_URI = "${GNU_MIRROR}/texinfo/${BP}.tar.gz \
- file://0001-gnulib-Update.patch \
- file://disable-native-tools.patch \
- file://link-zip.patch \
- file://dont-depend-on-help2man.patch \
- ${TARGET_PATCH} \
- "
-
-SRC_URI[sha256sum] = "8e09cf753ad1833695d2bac0f57dc3bd6bcbbfbf279450e1ba3bc2d7fb297d08"
-
-tex_texinfo = "texmf/tex/texinfo"
-
-inherit gettext autotools multilib_script
-
-MULTILIB_SCRIPTS = "${PN}:${bindir}/texi2any"
-
-EXTRA_AUTORECONF += "-I ${S}/gnulib/m4"
-
-do_configure:prepend () {
- # autotools_do_configure updates po/Makefile.in.in, we also need
- # update po_document.
- cp -f ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in ${S}/po_document/
-}
-
-do_compile:prepend() {
- if [ -d tools ];then
- oe_runmake -C tools/gnulib/lib
- fi
-}
-
-do_install:append() {
- mkdir -p ${D}${datadir}/${tex_texinfo}
- install -p -m644 ${S}/doc/texinfo.tex ${S}/doc/txi-??.tex ${D}${datadir}/${tex_texinfo}
- sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' ${D}${bindir}/texi2any ${D}${bindir}/pod2texi
-}
-
-do_install:append:class-native() {
- install -m 755 info/makedoc ${D}${bindir}
-}
-
-PACKAGES += "info info-doc"
-
-FILES:info = "${bindir}/info ${bindir}/infokey ${bindir}/install-info"
-FILES:info-doc = "${infodir}/info.info* ${infodir}/dir ${infodir}/info-*.info* \
- ${mandir}/man1/info.1* ${mandir}/man5/info.5* \
- ${mandir}/man1/infokey.1* ${mandir}/man1/install-info.1*"
-
-FILES:${PN} = "${bindir}/makeinfo ${bindir}/texi* ${bindir}/pdftexi2dvi ${bindir}/pod2texi ${datadir}/texinfo"
-RDEPENDS:${PN} = "perl"
-FILES:${PN}-doc = "${infodir}/texinfo* \
- ${datadir}/${tex_texinfo} \
- ${mandir}/man1 ${mandir}/man5"
-
-# Lie about providing the Locale::gettext_xs module. It is not actually built,
-# but the code will test for it and if not found use Locale::gettext_pp instead.
-# However, this causes a file dependency on perl(Locale::gettext_xs) to be
-# generated, which must be satisfied.
-RPROVIDES:${PN} += "perl(Locale::gettext_xs)"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/texinfo/texinfo_7.1.bb b/meta/recipes-extended/texinfo/texinfo_7.1.bb
new file mode 100644
index 0000000000..65d8252fb9
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo_7.1.bb
@@ -0,0 +1,90 @@
+SUMMARY = "Documentation system for on-line information and printed output"
+DESCRIPTION = "Texinfo is a documentation system that can produce both \
+online information and printed output from a single source file. The \
+GNU Project uses the Texinfo file format for most of its documentation."
+HOMEPAGE = "http://www.gnu.org/software/texinfo/"
+SECTION = "console/utils"
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464"
+
+PROVIDES:append:class-native = " texinfo-replacement-native"
+
+def compress_pkg(d):
+ if bb.data.inherits_class('compress_doc', d):
+ compress = d.getVar("DOC_COMPRESS")
+ if compress == "gz":
+ return "gzip"
+ elif compress == "bz2":
+ return "bzip2"
+ elif compress == "xz":
+ return "xz"
+ return ""
+
+RDEPENDS:info += "${@compress_pkg(d)}"
+
+DEPENDS = "zlib ncurses texinfo-replacement-native"
+DEPENDS:class-native = "zlib-native ncurses-native"
+
+TARGET_PATCH = "file://0003-texinfo-Update-to-5.1.patch"
+TARGET_PATCH:class-native = ""
+
+SRC_URI = "${GNU_MIRROR}/texinfo/${BP}.tar.gz \
+ file://0001-texinfo-several-changes-to-build-without-zlib-and-nc.patch \
+ file://0002-dont-depend-on-help2man.patch \
+ ${TARGET_PATCH} \
+ "
+
+SRC_URI[sha256sum] = "dd5710b3a53ac002644677a06145748e260592a35be182dc830ebebb79c5d5a0"
+
+tex_texinfo = "texmf/tex/texinfo"
+
+inherit gettext autotools multilib_script
+
+MULTILIB_SCRIPTS = "${PN}:${bindir}/texi2any"
+
+EXTRA_AUTORECONF += "-I ${S}/gnulib/m4"
+CACHED_CONFIGUREVARS += "texinfo_cv_sys_iconv_converts_euc_cn=yes"
+
+do_configure:prepend () {
+ # autotools_do_configure updates po/Makefile.in.in, we also need
+ # update po_document.
+ cp -f ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in ${S}/po_document/
+}
+
+do_compile:prepend() {
+ if [ -d tools ];then
+ oe_runmake -C tools/gnulib/lib
+ fi
+}
+
+do_install:append() {
+ mkdir -p ${D}${datadir}/${tex_texinfo}
+ install -p -m644 ${S}/doc/texinfo.tex ${S}/doc/txi-??.tex ${D}${datadir}/${tex_texinfo}
+ sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' ${D}${bindir}/texi2any ${D}${bindir}/pod2texi
+ sed -i -e 's,${HOSTTOOLS_DIR},,' ${D}${bindir}/texindex
+}
+
+do_install:append:class-native() {
+ install -m 755 info/makedoc ${D}${bindir}
+}
+
+PACKAGES += "info info-doc"
+
+FILES:info = "${bindir}/info ${bindir}/infokey ${bindir}/install-info"
+FILES:info-doc = "${infodir}/info.info* ${infodir}/dir ${infodir}/info-*.info* \
+ ${mandir}/man1/info.1* ${mandir}/man5/info.5* \
+ ${mandir}/man1/infokey.1* ${mandir}/man1/install-info.1*"
+
+FILES:${PN} = "${bindir}/makeinfo ${bindir}/texi* ${bindir}/pdftexi2dvi ${bindir}/pod2texi ${datadir}/texinfo"
+RDEPENDS:${PN} = "perl"
+FILES:${PN}-doc = "${infodir}/texi* \
+ ${datadir}/${tex_texinfo} \
+ ${mandir}/man1 ${mandir}/man5"
+
+# Lie about providing the Locale::gettext_xs module. It is not actually built,
+# but the code will test for it and if not found use Locale::gettext_pp instead.
+# However, this causes a file dependency on perl(Locale::gettext_xs) to be
+# generated, which must be satisfied.
+RPROVIDES:${PN} += "perl(Locale::gettext_xs)"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/time/time/0001-include-string.h-for-memset.patch b/meta/recipes-extended/time/time/0001-include-string.h-for-memset.patch
new file mode 100644
index 0000000000..f6ea212667
--- /dev/null
+++ b/meta/recipes-extended/time/time/0001-include-string.h-for-memset.patch
@@ -0,0 +1,27 @@
+From c8deae54f92d636878097063b411af9fb5262ad3 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 15 Aug 2022 07:24:24 -0700
+Subject: [PATCH] include string.h for memset()
+
+Fixes implicit function declaration warning e.g.
+
+resuse.c:103:3: error: call to undeclared library function 'memset' with type 'void *(void *, int, unsigned long)'
+
+Upstream-Status: Submitted [https://lists.gnu.org/archive/html/bug-time/2022-08/msg00001.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/resuse.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/resuse.c b/src/resuse.c
+index cf5a08c..9d3d18a 100644
+--- a/src/resuse.c
++++ b/src/resuse.c
+@@ -22,6 +22,7 @@
+ */
+
+ #include "config.h"
++#include <string.h>
+ #include <sys/time.h>
+ #include <sys/wait.h>
+ #include <sys/resource.h>
diff --git a/meta/recipes-extended/time/time_1.9.bb b/meta/recipes-extended/time/time_1.9.bb
index 706605fe06..8364210e61 100644
--- a/meta/recipes-extended/time/time_1.9.bb
+++ b/meta/recipes-extended/time/time_1.9.bb
@@ -13,7 +13,9 @@ ALTERNATIVE_PRIORITY = "100"
BBCLASSEXTEND = "native nativesdk"
-SRC_URI = "${GNU_MIRROR}/time/time-${PV}.tar.gz"
+SRC_URI = "${GNU_MIRROR}/time/time-${PV}.tar.gz \
+ file://0001-include-string.h-for-memset.patch \
+ "
SRC_URI[md5sum] = "d2356e0fe1c0b85285d83c6b2ad51b5f"
SRC_URI[sha256sum] = "fbacf0c81e62429df3e33bda4cee38756604f18e01d977338e23306a3e3b521e"
diff --git a/meta/recipes-extended/timezone/timezone.inc b/meta/recipes-extended/timezone/timezone.inc
index cdd1a2ac3c..4734adcc08 100644
--- a/meta/recipes-extended/timezone/timezone.inc
+++ b/meta/recipes-extended/timezone/timezone.inc
@@ -6,14 +6,15 @@ SECTION = "base"
LICENSE = "PD & BSD-3-Clause"
LIC_FILES_CHKSUM = "file://LICENSE;md5=c679c9d6b02bc2757b3eaf8f53c43fba"
-PV = "2022a"
+PV = "2024a"
-SRC_URI =" http://www.iana.org/time-zones/repository/releases/tzcode${PV}.tar.gz;name=tzcode \
- http://www.iana.org/time-zones/repository/releases/tzdata${PV}.tar.gz;name=tzdata \
+SRC_URI =" http://www.iana.org/time-zones/repository/releases/tzcode${PV}.tar.gz;name=tzcode;subdir=tz \
+ http://www.iana.org/time-zones/repository/releases/tzdata${PV}.tar.gz;name=tzdata;subdir=tz \
"
-UPSTREAM_CHECK_URI = "http://www.iana.org/time-zones"
+S = "${WORKDIR}/tz"
-SRC_URI[tzcode.sha256sum] = "f8575e7e33be9ee265df2081092526b81c80abac3f4a04399ae9d4d91cdadac7"
-SRC_URI[tzdata.sha256sum] = "ef7fffd9f4f50f4f58328b35022a32a5a056b245c5cb3d6791dddb342f871664"
+UPSTREAM_CHECK_URI = "http://www.iana.org/time-zones"
+SRC_URI[tzcode.sha256sum] = "80072894adff5a458f1d143e16e4ca1d8b2a122c9c5399da482cb68cba6a1ff8"
+SRC_URI[tzdata.sha256sum] = "0d0434459acbd2059a7a8da1f3304a84a86591f6ed69c6248fffa502b6edffe3"
diff --git a/meta/recipes-extended/timezone/tzcode-native.bb b/meta/recipes-extended/timezone/tzcode-native.bb
index e3582ba674..d0b23a9d80 100644
--- a/meta/recipes-extended/timezone/tzcode-native.bb
+++ b/meta/recipes-extended/timezone/tzcode-native.bb
@@ -1,10 +1,7 @@
require timezone.inc
-#
SUMMARY = "tzcode, timezone zoneinfo utils -- zic, zdump, tzselect"
-S = "${WORKDIR}"
-
inherit native
EXTRA_OEMAKE += "cc='${CC}'"
diff --git a/meta/recipes-extended/timezone/tzdata.bb b/meta/recipes-extended/timezone/tzdata.bb
index 7f4322d867..dd1960ffa7 100644
--- a/meta/recipes-extended/timezone/tzdata.bb
+++ b/meta/recipes-extended/timezone/tzdata.bb
@@ -4,8 +4,6 @@ DEPENDS = "tzcode-native"
inherit allarch
-S = "${WORKDIR}"
-
DEFAULT_TIMEZONE ?= "Universal"
INSTALL_TIMEZONE_FILE ?= "1"
@@ -18,17 +16,21 @@ TZONES = " \
# "fat" is needed by e.g. MariaDB's mysql_tzinfo_to_sql
ZIC_FMT ?= "slim"
+do_configure[cleandirs] = "${B}"
+B = "${WORKDIR}/build"
+
do_compile() {
for zone in ${TZONES}; do
- ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${WORKDIR}${datadir}/zoneinfo -L /dev/null ${S}/${zone}
- ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${WORKDIR}${datadir}/zoneinfo/posix -L /dev/null ${S}/${zone}
- ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${WORKDIR}${datadir}/zoneinfo/right -L ${S}/leapseconds ${S}/${zone}
+ ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo -L /dev/null ${S}/${zone}
+ ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo/posix -L /dev/null ${S}/${zone}
+ ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo/right -L ${S}/leapseconds ${S}/${zone}
done
}
do_install() {
- install -d ${D}$exec_prefix ${D}${datadir}/zoneinfo
- cp -pPR ${WORKDIR}$exec_prefix ${D}${base_prefix}
+ install -d ${D}${datadir}/zoneinfo
+ cp -pPR ${B}/zoneinfo/* ${D}${datadir}/zoneinfo
+
# libc is removing zoneinfo files from package
cp -pP "${S}/zone.tab" ${D}${datadir}/zoneinfo
cp -pP "${S}/zone1970.tab" ${D}${datadir}/zoneinfo
diff --git a/meta/recipes-extended/unzip/unzip/0001-configure-Add-correct-system-headers-and-prototypes-.patch b/meta/recipes-extended/unzip/unzip/0001-configure-Add-correct-system-headers-and-prototypes-.patch
new file mode 100644
index 0000000000..f7e0854cd9
--- /dev/null
+++ b/meta/recipes-extended/unzip/unzip/0001-configure-Add-correct-system-headers-and-prototypes-.patch
@@ -0,0 +1,112 @@
+From 5ac5885d35257888d0e4a9dda903405314f9fc84 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 10 Aug 2022 17:53:13 -0700
+Subject: [PATCH] configure: Add correct system headers and prototypes to tests
+
+Newer compilers e.g. clang-15+ have turned stricter towards these
+warnings and turned them into errors which results in subtle failures
+during build, therefore make the testcases use the needed headers and
+modern C
+
+Upstream-Status: Inactive-Upstream
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ unix/configure | 51 +++++++++++++++++++++++++++++++++++++++-----------
+ 1 file changed, 40 insertions(+), 11 deletions(-)
+
+diff --git a/unix/configure b/unix/configure
+index 49579f3..8fd82dd 100755
+--- a/unix/configure
++++ b/unix/configure
+@@ -379,14 +379,37 @@ $CC $CFLAGS -c conftest.c >/dev/null 2>/dev/null
+
+ # Check for missing functions
+ # add NO_'function_name' to flags if missing
+-for func in fchmod fchown lchown nl_langinfo
+-do
+- echo Check for $func
+- echo "int main(){ $func(); return 0; }" > conftest.c
+- $CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
+- [ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_`echo $func | tr '[a-z]' '[A-Z]'`"
+-done
++echo Check for fchmod
++cat > conftest.c << _EOF_
++#include <sys/stat.h>
++int main(){ fchmod(0,0); return 0; }
++_EOF_
++$CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_FCHMOD"
+
++echo Check for fchown
++cat > conftest.c << _EOF_
++#include <unistd.h>
++int main(){ fchown(0,0,0); return 0; }
++_EOF_
++$CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_FCHOWN"
++
++echo Check for lchown
++cat > conftest.c << _EOF_
++#include <unistd.h>
++int main(){ lchown(NULL,0,0); return 0; }
++_EOF_
++$CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_LCHOWN"
++
++echo Check for nl_langinfo
++cat > conftest.c << _EOF_
++#include <langinfo.h>
++int main(){ nl_langinfo(0); return 0; }
++_EOF_
++$CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_NL_LANGINFO"
+ # Check (seriously) for a working lchmod.
+ echo 'Check for lchmod'
+ temp_file="/tmp/unzip_test_$$"
+@@ -401,14 +424,17 @@ ln -s "${temp_link}" "${temp_file}" && \
+ rm -f "${temp_file}"
+
+ echo Check for memset
+-echo "int main(){ char k; memset(&k,0,0); return 0; }" > conftest.c
++cat > conftest.c << _EOF_
++#include <string.h>
++int main(){ char k; memset(&k,0,0); return 0; }
++_EOF_
+ $CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
+ [ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DZMEM"
+
+ echo Check for errno declaration
+ cat > conftest.c << _EOF_
+ #include <errno.h>
+-main()
++int main()
+ {
+ errno = 0;
+ return 0;
+@@ -419,6 +445,8 @@ $CC $CFLAGS -c conftest.c >/dev/null 2>/dev/null
+
+ echo Check for directory libraries
+ cat > conftest.c << _EOF_
++#include <sys/types.h>
++#include <dirent.h>
+ int main() { return closedir(opendir(".")); }
+ _EOF_
+
+@@ -523,10 +551,11 @@ fi
+ # needed for AIX (and others ?) when mmap is used
+ echo Check for valloc
+ cat > conftest.c << _EOF_
+-main()
++#include <stdlib.h>
++int main()
+ {
+ #ifdef MMAP
+- valloc();
++ valloc(0);
+ #endif
+ }
+ _EOF_
+--
+2.37.1
+
diff --git a/meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch b/meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch
index 716766de29..5a6d1946f6 100644
--- a/meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch
+++ b/meta/recipes-extended/unzip/unzip/0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch
@@ -6,7 +6,7 @@ Subject: [PATCH] configure: Pass LDFLAGS to tests doing link step
Ensures that right flags from recipes are honored, otherwise tests fail
which otherwise should not.
-Upstream-Status: Pending
+Upstream-Status: Inactive-Upstream
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
unix/configure | 28 ++++++++++++++--------------
diff --git a/meta/recipes-extended/unzip/unzip/0001-unix-configure-fix-detection-for-cross-compilation.patch b/meta/recipes-extended/unzip/unzip/0001-unix-configure-fix-detection-for-cross-compilation.patch
new file mode 100644
index 0000000000..2fa7f481b7
--- /dev/null
+++ b/meta/recipes-extended/unzip/unzip/0001-unix-configure-fix-detection-for-cross-compilation.patch
@@ -0,0 +1,103 @@
+From 5cbf901b5c3b6a7d1d0ed91b6df4194bb6d25a40 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 15 Jun 2023 07:14:17 -0700
+Subject: [PATCH] unix/configure: fix detection for cross compilation
+
+We're doing cross compilation, running a cross-compiled problem
+on host to detemine feature is not correct. So we change runtime
+check into compile-time check to detect the features.
+
+Upstream-Status: Inactive-Upstream
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ unix/configure | 44 +++++++++++++++-----------------------------
+ 1 file changed, 15 insertions(+), 29 deletions(-)
+
+diff --git a/unix/configure b/unix/configure
+index 8fd82dd..68dee98 100755
+--- a/unix/configure
++++ b/unix/configure
+@@ -259,6 +259,10 @@ cat > conftest.c << _EOF_
+ #include <sys/stat.h>
+ #include <unistd.h>
+ #include <stdio.h>
++
++_Static_assert(sizeof(off_t) < 8, "sizeof off_t < 8 failed");
++_Static_assert(sizeof((struct stat){0}.st_size) < 8, "sizeof st_size < 8 failed");
++
+ int main()
+ {
+ off_t offset;
+@@ -278,21 +282,10 @@ _EOF_
+ # compile it
+ $CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
+ if [ $? -ne 0 ]; then
+- echo -- no Large File Support
++ echo -- yes we have Large File Support!
++ CFLAGSR="${CFLAGSR} -DLARGE_FILE_SUPPORT"
+ else
+-# run it
+- ./conftest
+- r=$?
+- if [ $r -eq 1 ]; then
+- echo -- no Large File Support - no 64-bit off_t
+- elif [ $r -eq 2 ]; then
+- echo -- no Large File Support - no 64-bit stat
+- elif [ $r -eq 3 ]; then
+- echo -- yes we have Large File Support!
+- CFLAGSR="${CFLAGSR} -DLARGE_FILE_SUPPORT"
+- else
+- echo -- no Large File Support - conftest returned $r
+- fi
++ echo -- no Large File Support
+ fi
+
+ # Added 11/24/2005 EG
+@@ -302,6 +295,11 @@ cat > conftest.c << _EOF_
+ #include <stdlib.h>
+ #include <stdio.h>
+ #include <wchar.h>
++
++#ifndef __STDC_ISO_10646__
++#error "__STDC_ISO_10646__ not defined
++#endif
++
+ int main()
+ {
+ size_t wsize;
+@@ -327,19 +325,8 @@ if [ $? -ne 0 ]; then
+ echo "-- no Unicode (wchar_t) support"
+ else
+ # have wide char support
+-# run it
+- ./conftest
+- r=$?
+- if [ $r -eq 0 ]; then
+- echo -- no Unicode wchar_t support - wchar_t allocation error
+- elif [ $r -eq 1 ]; then
+- echo -- no Unicode support - wchar_t encoding unspecified
+- elif [ $r -eq 2 ]; then
+- echo -- have wchar_t with known UCS encoding - enabling Unicode support!
+- CFLAGSR="${CFLAGSR} -DUNICODE_SUPPORT -DUNICODE_WCHAR"
+- else
+- echo "-- no Unicode (wchar_t) support - conftest returned $r"
+- fi
++ echo -- have wchar_t with known UCS encoding - enabling Unicode support!
++ CFLAGSR="${CFLAGSR} -DUNICODE_SUPPORT -DUNICODE_WCHAR"
+ fi
+
+ echo "Check for setlocale support (needed for UNICODE Native check)"
+@@ -418,8 +405,7 @@ temp_link="link_$$"
+ echo "int main() { lchmod(\"${temp_file}\", 0666); }" \
+ ) > conftest.c
+ ln -s "${temp_link}" "${temp_file}" && \
+- $CC $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null && \
+- ./conftest
++ $CC -Werror=implicit-function-declaration $BFLAG $LDFLAGS -o conftest conftest.c >/dev/null
+ [ $? -ne 0 ] && CFLAGSR="${CFLAGSR} -DNO_LCHMOD"
+ rm -f "${temp_file}"
+
+--
+2.34.1
+
diff --git a/meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch b/meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
index 6ba2b879a3..c0103444fc 100644
--- a/meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
+++ b/meta/recipes-extended/unzip/unzip/CVE-2021-4217.patch
@@ -20,7 +20,7 @@ Regenerated to apply without offsets.
CVE: CVE-2021-4217
-Upstream-Status: Pending [infozip upstream inactive]
+Upstream-Status: Inactive-Upstream [infozip upstream inactive]
Signed-off-by: Joe Slater <joe.slater@windriver.com>
diff --git a/meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch b/meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
new file mode 100644
index 0000000000..1c1e120deb
--- /dev/null
+++ b/meta/recipes-extended/unzip/unzip/CVE-2022-0529.patch
@@ -0,0 +1,39 @@
+https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
+
+CVE: CVE-2022-0529
+Upstream-Status: Inactive-Upstream [need a new release]
+
+diff --git a/process.c b/process.c
+index d2a846e..99b9c7b 100644
+--- a/process.c
++++ b/process.c
+@@ -2507,13 +2507,15 @@ char *wide_to_local_string(wide_string, escape_all)
+ char buf[9];
+ char *buffer = NULL;
+ char *local_string = NULL;
++ size_t buffer_size;
+
+ for (wsize = 0; wide_string[wsize]; wsize++) ;
+
+ if (max_bytes < MAX_ESCAPE_BYTES)
+ max_bytes = MAX_ESCAPE_BYTES;
+
+- if ((buffer = (char *)malloc(wsize * max_bytes + 1)) == NULL) {
++ buffer_size = wsize * max_bytes + 1;
++ if ((buffer = (char *)malloc(buffer_size)) == NULL) {
+ return NULL;
+ }
+
+@@ -2552,7 +2554,11 @@ char *wide_to_local_string(wide_string, escape_all)
+ /* no MB for this wide */
+ /* use escape for wide character */
+ char *escape_string = wide_to_escape_string(wide_string[i]);
+- strcat(buffer, escape_string);
++ size_t buffer_len = strlen(buffer);
++ size_t escape_string_len = strlen(escape_string);
++ if (buffer_len + escape_string_len + 1 > buffer_size)
++ escape_string_len = buffer_size - buffer_len - 1;
++ strncat(buffer, escape_string, escape_string_len);
+ free(escape_string);
+ }
+ }
diff --git a/meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch b/meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
new file mode 100644
index 0000000000..363dafddc9
--- /dev/null
+++ b/meta/recipes-extended/unzip/unzip/CVE-2022-0530.patch
@@ -0,0 +1,33 @@
+https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1010355
+
+CVE: CVE-2022-0530
+Upstream-Status: Inactive-Upstream [need a new release]
+
+diff --git a/fileio.c b/fileio.c
+index 6290824..77e4b5f 100644
+--- a/fileio.c
++++ b/fileio.c
+@@ -2361,6 +2361,9 @@ int do_string(__G__ length, option) /* return PK-type error code */
+ /* convert UTF-8 to local character set */
+ fn = utf8_to_local_string(G.unipath_filename,
+ G.unicode_escape_all);
++ if (fn == NULL)
++ return PK_ERR;
++
+ /* make sure filename is short enough */
+ if (strlen(fn) >= FILNAMSIZ) {
+ fn[FILNAMSIZ - 1] = '\0';
+diff --git a/process.c b/process.c
+index d2a846e..715bc0f 100644
+--- a/process.c
++++ b/process.c
+@@ -2605,6 +2605,8 @@ char *utf8_to_local_string(utf8_string, escape_all)
+ int escape_all;
+ {
+ zwchar *wide = utf8_to_wide_string(utf8_string);
++ if (wide == NULL)
++ return NULL;
+ char *loc = wide_to_local_string(wide, escape_all);
+ free(wide);
+ return loc;
+
diff --git a/meta/recipes-extended/unzip/unzip/avoid-strip.patch b/meta/recipes-extended/unzip/unzip/avoid-strip.patch
index e0c89d81b7..70bedc8381 100644
--- a/meta/recipes-extended/unzip/unzip/avoid-strip.patch
+++ b/meta/recipes-extended/unzip/unzip/avoid-strip.patch
@@ -1,4 +1,4 @@
-Upstream-Status: Inappropriate [need a new release]
+Upstream-Status: Inactive-Upstream [need a new release]
unix/Makefile: remove hard coded strip commands
diff --git a/meta/recipes-extended/unzip/unzip/define-ldflags.patch b/meta/recipes-extended/unzip/unzip/define-ldflags.patch
index dc554c32cb..dd01c01400 100644
--- a/meta/recipes-extended/unzip/unzip/define-ldflags.patch
+++ b/meta/recipes-extended/unzip/unzip/define-ldflags.patch
@@ -1,6 +1,6 @@
Pass LDFLAGS to the linker
-Upstream-Status: Inappropriate [need a new release]
+Upstream-Status: Inactive-Upstream [need a new release]
Signed-off-by: Mikhail Durnev <Mikhail_Durnev@mentor.com>
diff --git a/meta/recipes-extended/unzip/unzip/fix-security-format.patch b/meta/recipes-extended/unzip/unzip/fix-security-format.patch
index ba6ead3b5e..2889c652d4 100644
--- a/meta/recipes-extended/unzip/unzip/fix-security-format.patch
+++ b/meta/recipes-extended/unzip/unzip/fix-security-format.patch
@@ -5,7 +5,7 @@ Fix security formatting issues related to sprintf parameters expeted.
[YOCTO #9551]
[https://bugzilla.yoctoproject.org/show_bug.cgi?id=9551]
-Upstream-Status: Inappropriate [need a new release]
+Upstream-Status: Inactive-Upstream [need a new release]
Signed-off-by: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
diff --git a/meta/recipes-extended/unzip/unzip/symlink.patch b/meta/recipes-extended/unzip/unzip/symlink.patch
index c1d82ac187..26f1c8ba86 100644
--- a/meta/recipes-extended/unzip/unzip/symlink.patch
+++ b/meta/recipes-extended/unzip/unzip/symlink.patch
@@ -6,7 +6,7 @@ a symlink entry."
This patch is taken from Fedora (https://bugzilla.redhat.com/show_bug.cgi?id=972427)
-Upstream-Status: Inappropriate [need a new release]
+Upstream-Status: Inactive-Upstream [need a new release]
Signed-off-by: Ross Burton <ross.burton@intel.com>
--- unzip60/process.c.sav 2013-06-09 12:08:57.070392264 +0200
diff --git a/meta/recipes-extended/unzip/unzip_6.0.bb b/meta/recipes-extended/unzip/unzip_6.0.bb
index c222a684b4..27076d5d9b 100644
--- a/meta/recipes-extended/unzip/unzip_6.0.bb
+++ b/meta/recipes-extended/unzip/unzip_6.0.bb
@@ -5,7 +5,6 @@ SECTION = "console/utils"
LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://LICENSE;md5=94caec5a51ef55ef711ee4e8b1c69e29"
PE = "1"
-PR = "r5"
SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip60.tar.gz \
file://avoid-strip.patch \
@@ -29,14 +28,17 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/UnZip%206.x%20%28latest%29/UnZip%206.0/
file://unzip_optimization.patch \
file://0001-configure-Pass-LDFLAGS-to-tests-doing-link-step.patch \
file://CVE-2021-4217.patch \
+ file://CVE-2022-0529.patch \
+ file://CVE-2022-0530.patch \
+ file://0001-configure-Add-correct-system-headers-and-prototypes-.patch \
+ file://0001-unix-configure-fix-detection-for-cross-compilation.patch \
"
UPSTREAM_VERSION_UNKNOWN = "1"
SRC_URI[md5sum] = "62b490407489521db863b523a7f86375"
SRC_URI[sha256sum] = "036d96991646d0449ed0aa952e4fbe21b476ce994abc276e49d30e686708bd37"
-# Patch from https://bugzilla.redhat.com/attachment.cgi?id=293893&action=diff applied to 6.0 source
-CVE_CHECK_IGNORE += "CVE-2008-0888"
+CVE_STATUS[CVE-2008-0888] = "fixed-version: Patch from https://bugzilla.redhat.com/attachment.cgi?id=293893&action=diff applied to 6.0 source"
# exclude version 5.5.2 which triggers a false positive
UPSTREAM_CHECK_REGEX = "unzip(?P<pver>(?!552).+)\.tgz"
diff --git a/meta/recipes-extended/watchdog/watchdog-config.bb b/meta/recipes-extended/watchdog/watchdog-config.bb
index a28d28033b..f138952128 100644
--- a/meta/recipes-extended/watchdog/watchdog-config.bb
+++ b/meta/recipes-extended/watchdog/watchdog-config.bb
@@ -13,8 +13,15 @@ SRC_URI = " \
file://watchdog.conf \
"
+# The default value is 60 seconds when null.
+WATCHDOG_TIMEOUT ??= ""
+
do_install() {
install -Dm 0644 ${WORKDIR}/watchdog.default ${D}${sysconfdir}/default/watchdog
install -Dm 0644 ${WORKDIR}/watchdog.conf ${D}${sysconfdir}/watchdog.conf
+
+ if [ -n "${WATCHDOG_TIMEOUT}" ]; then
+ echo "watchdog-timeout = ${WATCHDOG_TIMEOUT}" >> ${D}/etc/watchdog.conf
+ fi
}
diff --git a/meta/recipes-extended/watchdog/watchdog/0001-shutdown-Do-not-guard-sys-quota.h-sys-swap.h-and-sys.patch b/meta/recipes-extended/watchdog/watchdog/0001-shutdown-Do-not-guard-sys-quota.h-sys-swap.h-and-sys.patch
new file mode 100644
index 0000000000..8c419e1d11
--- /dev/null
+++ b/meta/recipes-extended/watchdog/watchdog/0001-shutdown-Do-not-guard-sys-quota.h-sys-swap.h-and-sys.patch
@@ -0,0 +1,37 @@
+From ca1d379fa13c4055d42d2ff3a647b4397768efcd Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 23 Aug 2022 19:23:26 -0700
+Subject: [PATCH] shutdown: Do not guard sys/quota.h sys/swap.h and
+ sys/reboot.h with __GLIBC__
+
+These headers are provided by uclibc/musl/glibc and bionic so we can
+assume they are not needed to be glibc specific includes. This also
+ensures that we get proper declaration of reboot() API
+
+Upstream-Status: Submitted [https://sourceforge.net/p/watchdog/patches/12/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/shutdown.c | 4 ----
+ 1 file changed, 4 deletions(-)
+
+diff --git a/src/shutdown.c b/src/shutdown.c
+index 1d9a857..6aea0d0 100644
+--- a/src/shutdown.c
++++ b/src/shutdown.c
+@@ -29,13 +29,9 @@
+ #include "extern.h"
+ #include "ext2_mnt.h"
+
+-#if defined __GLIBC__
+ #include <sys/quota.h>
+ #include <sys/swap.h>
+ #include <sys/reboot.h>
+-#else /* __GLIBC__ */
+-#include <linux/quota.h>
+-#endif /* __GLIBC__ */
+
+ #include <unistd.h>
+
+--
+2.37.2
+
diff --git a/meta/recipes-extended/watchdog/watchdog_5.16.bb b/meta/recipes-extended/watchdog/watchdog_5.16.bb
index 1163846ed8..5325ccafda 100644
--- a/meta/recipes-extended/watchdog/watchdog_5.16.bb
+++ b/meta/recipes-extended/watchdog/watchdog_5.16.bb
@@ -13,6 +13,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/watchdog/watchdog-${PV}.tar.gz \
file://watchdog.init \
file://wd_keepalive.init \
file://0001-wd_keepalive.service-use-run-instead-of-var-run.patch \
+ file://0001-shutdown-Do-not-guard-sys-quota.h-sys-swap.h-and-sys.patch \
"
SRC_URI[md5sum] = "1b4f51cabc64d1bee2fce7cdd626831f"
@@ -20,17 +21,12 @@ SRC_URI[sha256sum] = "b8e7c070e1b72aee2663bdc13b5cc39f76c9232669cfbb1ac0adc7275a
# Can be dropped when the output next changes, avoids failures after
# reproducibility issues
-PR = "r1"
UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/watchdog/files/watchdog/"
UPSTREAM_CHECK_REGEX = "/watchdog/(?P<pver>(\d+[\.\-_]*)+)/"
inherit autotools update-rc.d systemd pkgconfig
-DEPENDS += "libtirpc"
-CFLAGS += "-I${STAGING_INCDIR}/tirpc"
-LDFLAGS += "-ltirpc"
-
EXTRA_OECONF += " --disable-nfs "
CACHED_CONFIGUREVARS += "ac_cv_path_PATH_SENDMAIL=${sbindir}/sendmail"
diff --git a/meta/recipes-extended/wget/wget.inc b/meta/recipes-extended/wget/wget.inc
index 58cb5ca73d..51926e7296 100644
--- a/meta/recipes-extended/wget/wget.inc
+++ b/meta/recipes-extended/wget/wget.inc
@@ -7,11 +7,11 @@ FTP sites"
HOMEPAGE = "https://www.gnu.org/software/wget/"
SECTION = "console/network"
LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c678957b0c8e964aa6c70fd77641a71e"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6f65012d1daf98cb09b386cfb68df26b"
inherit autotools gettext texinfo update-alternatives pkgconfig
-DEPENDS += "autoconf-archive"
+DEPENDS += "autoconf-archive-native"
EXTRA_OECONF = "--without-libgnutls-prefix --without-libssl-prefix \
--disable-rpath"
diff --git a/meta/recipes-extended/wget/wget/0002-improve-reproducibility.patch b/meta/recipes-extended/wget/wget/0002-improve-reproducibility.patch
index 050fc2c7e1..5438bafdcb 100644
--- a/meta/recipes-extended/wget/wget/0002-improve-reproducibility.patch
+++ b/meta/recipes-extended/wget/wget/0002-improve-reproducibility.patch
@@ -1,4 +1,4 @@
-From 7f1357529d23b356b45fbb0dd7388588162e4cb8 Mon Sep 17 00:00:00 2001
+From b86e57b68363d108fe77c6fd588a275d2696cabe Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Wed, 10 Jan 2018 14:43:20 +0800
Subject: [PATCH] src/Makefile.am: improve reproducibility
@@ -44,10 +44,10 @@ Signed-off-by: Joe Slater <jslater@windriver.com>
1 file changed, 4 insertions(+)
diff --git a/src/Makefile.am b/src/Makefile.am
-index 28c0be2..44084a3 100644
+index 18ec622..38d252d 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
-@@ -87,9 +87,13 @@ version.c: $(wget_SOURCES) ../lib/libgnu.a
+@@ -108,9 +108,13 @@ version.c: $(wget_SOURCES) ../lib/libgnu.a
echo '#include "version.h"' >> $@
echo 'const char *version_string = "@VERSION@";' >> $@
echo 'const char *compilation_string = "'$(COMPILE)'";' \
@@ -61,6 +61,3 @@ index 28c0be2..44084a3 100644
| $(ESCAPEQUOTE) >> $@
css.c: $(srcdir)/css.l
---
-1.8.3.1
-
diff --git a/meta/recipes-extended/wget/wget_1.21.3.bb b/meta/recipes-extended/wget/wget_1.21.3.bb
deleted file mode 100644
index f176a1546c..0000000000
--- a/meta/recipes-extended/wget/wget_1.21.3.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-SRC_URI = "${GNU_MIRROR}/wget/wget-${PV}.tar.gz \
- file://0002-improve-reproducibility.patch \
- "
-
-SRC_URI[sha256sum] = "5726bb8bc5ca0f6dc7110f6416e4bb7019e2d2ff5bf93d1ca2ffcc6656f220e5"
-
-require wget.inc
diff --git a/meta/recipes-extended/wget/wget_1.24.5.bb b/meta/recipes-extended/wget/wget_1.24.5.bb
new file mode 100644
index 0000000000..64e6ee80af
--- /dev/null
+++ b/meta/recipes-extended/wget/wget_1.24.5.bb
@@ -0,0 +1,7 @@
+SRC_URI = "${GNU_MIRROR}/wget/wget-${PV}.tar.gz \
+ file://0002-improve-reproducibility.patch \
+ "
+
+SRC_URI[sha256sum] = "fa2dc35bab5184ecbc46a9ef83def2aaaa3f4c9f3c97d4bd19dcb07d4da637de"
+
+require wget.inc
diff --git a/meta/recipes-extended/which/which_2.21.bb b/meta/recipes-extended/which/which_2.21.bb
index c8a50735b1..77861370e5 100644
--- a/meta/recipes-extended/which/which_2.21.bb
+++ b/meta/recipes-extended/which/which_2.21.bb
@@ -13,7 +13,6 @@ DEPENDS = "cwautomacros-native"
inherit autotools texinfo update-alternatives
-PR = "r3"
EXTRA_OECONF = "--disable-iberty"
diff --git a/meta/recipes-extended/xdg-utils/xdg-utils/CVE-2022-4055.patch b/meta/recipes-extended/xdg-utils/xdg-utils/CVE-2022-4055.patch
new file mode 100644
index 0000000000..b236030108
--- /dev/null
+++ b/meta/recipes-extended/xdg-utils/xdg-utils/CVE-2022-4055.patch
@@ -0,0 +1,145 @@
+xdg-email does not parse mailto uris properly for thunderbird
+
+When using thunderbird as mailto handler xdg-email translates mailto uris into an 'thunderbird -compose' argument. While to, cc and bcc values are properly enclosed in single quotes this is not the case for subject or body. This breaks functionality and allows to use all thunderbird -compose arguments within a mailto uri, e.g.
+
+xdg-email 'mailto:test@example.com?subject=Test,attachment=~/.thunderbird/profiles.ini,message=/home/test/test.txt'
+
+translates into
+
+thunderbird -compose to='test@example.com,',subject=Test,attachment=~/.thunderbird/profiles.ini,message=/home/test/test.txt
+
+with working attachment and message. (And, yes, ~ expands to the home directory.)
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/xdg/xdg-utils/-/issues/205]
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+CVE: CVE-2022-4055
+
+
+Index: xdg-utils-1.1.3/scripts/xdg-email.in
+===================================================================
+--- xdg-utils-1.1.3.orig/scripts/xdg-email.in
++++ xdg-utils-1.1.3/scripts/xdg-email.in
+@@ -30,53 +30,6 @@ _USAGE
+
+ #@xdg-utils-common@
+
+-run_thunderbird()
+-{
+- local THUNDERBIRD MAILTO NEWMAILTO TO CC BCC SUBJECT BODY
+- THUNDERBIRD="$1"
+- MAILTO=$(echo "$2" | sed 's/^mailto://')
+- echo "$MAILTO" | grep -qs "^?"
+- if [ "$?" = "0" ] ; then
+- MAILTO=$(echo "$MAILTO" | sed 's/^?//')
+- else
+- MAILTO=$(echo "$MAILTO" | sed 's/^/to=/' | sed 's/?/\&/')
+- fi
+-
+- MAILTO=$(echo "$MAILTO" | sed 's/&/\n/g')
+- TO=$(/bin/echo -e $(echo "$MAILTO" | grep '^to=' | sed 's/^to=//;s/%\(..\)/\\x\1/g' | awk '{ printf "%s,",$0 }'))
+- CC=$(/bin/echo -e $(echo "$MAILTO" | grep '^cc=' | sed 's/^cc=//;s/%\(..\)/\\x\1/g' | awk '{ printf "%s,",$0 }'))
+- BCC=$(/bin/echo -e $(echo "$MAILTO" | grep '^bcc=' | sed 's/^bcc=//;s/%\(..\)/\\x\1/g' | awk '{ printf "%s,",$0 }'))
+- SUBJECT=$(echo "$MAILTO" | grep '^subject=' | tail -n 1)
+- BODY=$(echo "$MAILTO" | grep '^body=' | tail -n 1)
+-
+- if [ -z "$TO" ] ; then
+- NEWMAILTO=
+- else
+- NEWMAILTO="to='$TO'"
+- fi
+- if [ -n "$CC" ] ; then
+- NEWMAILTO="${NEWMAILTO},cc='$CC'"
+- fi
+- if [ -n "$BCC" ] ; then
+- NEWMAILTO="${NEWMAILTO},bcc='$BCC'"
+- fi
+- if [ -n "$SUBJECT" ] ; then
+- NEWMAILTO="${NEWMAILTO},$SUBJECT"
+- fi
+- if [ -n "$BODY" ] ; then
+- NEWMAILTO="${NEWMAILTO},$BODY"
+- fi
+-
+- NEWMAILTO=$(echo "$NEWMAILTO" | sed 's/^,//')
+- DEBUG 1 "Running $THUNDERBIRD -compose \"$NEWMAILTO\""
+- "$THUNDERBIRD" -compose "$NEWMAILTO"
+- if [ $? -eq 0 ]; then
+- exit_success
+- else
+- exit_failure_operation_failed
+- fi
+-}
+-
+ open_kde()
+ {
+ if [ -n "$KDE_SESSION_VERSION" ] && [ "$KDE_SESSION_VERSION" -ge 5 ]; then
+@@ -130,15 +83,6 @@ open_kde()
+
+ open_gnome3()
+ {
+- local client
+- local desktop
+- desktop=`xdg-mime query default "x-scheme-handler/mailto"`
+- client=`desktop_file_to_binary "$desktop"`
+- echo $client | grep -E 'thunderbird|icedove' > /dev/null 2>&1
+- if [ $? -eq 0 ] ; then
+- run_thunderbird "$client" "$1"
+- fi
+-
+ if gio help open 2>/dev/null 1>&2; then
+ DEBUG 1 "Running gio open \"$1\""
+ gio open "$1"
+@@ -159,13 +103,6 @@ open_gnome3()
+
+ open_gnome()
+ {
+- local client
+- client=`gconftool-2 --get /desktop/gnome/url-handlers/mailto/command | cut -d ' ' -f 1` || ""
+- echo $client | grep -E 'thunderbird|icedove' > /dev/null 2>&1
+- if [ $? -eq 0 ] ; then
+- run_thunderbird "$client" "$1"
+- fi
+-
+ if gio help open 2>/dev/null 1>&2; then
+ DEBUG 1 "Running gio open \"$1\""
+ gio open "$1"
+@@ -231,15 +168,6 @@ open_flatpak()
+
+ open_generic()
+ {
+- local client
+- local desktop
+- desktop=`xdg-mime query default "x-scheme-handler/mailto"`
+- client=`desktop_file_to_binary "$desktop"`
+- echo $client | grep -E 'thunderbird|icedove' > /dev/null 2>&1
+- if [ $? -eq 0 ] ; then
+- run_thunderbird "$client" "$1"
+- fi
+-
+ xdg-open "$1"
+ local ret=$?
+
+@@ -364,21 +292,6 @@ while [ $# -gt 0 ] ; do
+ shift
+ ;;
+
+- --attach)
+- if [ -z "$1" ] ; then
+- exit_failure_syntax "file argument missing for --attach option"
+- fi
+- check_input_file "$1"
+- file=`readlink -f "$1"` # Normalize path
+- if [ -z "$file" ] || [ ! -f "$file" ] ; then
+- exit_failure_file_missing "file '$1' does not exist"
+- fi
+-
+- url_encode "$file"
+- options="${options}attach=${result}&"
+- shift
+- ;;
+-
+ -*)
+ exit_failure_syntax "unexpected option '$parm'"
+ ;;
diff --git a/meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb b/meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb
index 73acf6b744..4d93180535 100644
--- a/meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb
+++ b/meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb
@@ -21,6 +21,7 @@ SRC_URI = "https://portland.freedesktop.org/download/${BPN}-${PV}.tar.gz \
file://0001-Reinstate-xdg-terminal.patch \
file://0001-Don-t-build-the-in-script-manual.patch \
file://1f199813e0eb0246f63b54e9e154970e609575af.patch \
+ file://CVE-2022-4055.patch \
"
SRC_URI[md5sum] = "902042508b626027a3709d105f0b63ff"
diff --git a/meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb b/meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb
index 62ee70d244..72eb1ae067 100644
--- a/meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb
+++ b/meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb
@@ -18,7 +18,7 @@ SRCREV = "6a4af7786630ce48747d9687e2f18f45ea6684c4"
S = "${WORKDIR}/git"
# https://github.com/xinetd-org/xinetd/pull/10 is merged into this git tree revision
-CVE_CHECK_IGNORE += "CVE-2013-4342"
+CVE_STATUS[CVE-2013-4342] = "fixed-version: Fixed directly in git tree revision"
inherit autotools update-rc.d systemd pkgconfig
@@ -30,6 +30,8 @@ INITSCRIPT_PARAMS = "defaults"
PACKAGECONFIG ??= "tcp-wrappers"
PACKAGECONFIG[tcp-wrappers] = "--with-libwrap,,tcp-wrappers"
+CFLAGS += "-D_GNU_SOURCE"
+
CONFFILES:${PN} = "${sysconfdir}/xinetd.conf"
do_install:append() {
@@ -46,4 +48,7 @@ do_install:append() {
${D}${systemd_system_unitdir}/xinetd.service
}
-RDEPENDS:${PN} += "perl"
+# Script for converting inetd.conf files into xinetd.conf files
+PACKAGES =+ "${PN}-xconv"
+FILES:${PN}-xconv = "${bindir}/xconv.pl"
+RDEPENDS:${PN}-xconv += "perl"
diff --git a/meta/recipes-extended/xz/xz/CVE-2022-1271.patch b/meta/recipes-extended/xz/xz/CVE-2022-1271.patch
deleted file mode 100644
index e43e73cf12..0000000000
--- a/meta/recipes-extended/xz/xz/CVE-2022-1271.patch
+++ /dev/null
@@ -1,96 +0,0 @@
-From dc932a1e9c0d9f1db71be11a9b82496e3a72f112 Mon Sep 17 00:00:00 2001
-From: Lasse Collin <lasse.collin@tukaani.org>
-Date: Tue, 29 Mar 2022 19:19:12 +0300
-Subject: [PATCH] xzgrep: Fix escaping of malicious filenames (ZDI-CAN-16587).
-
-Malicious filenames can make xzgrep to write to arbitrary files
-or (with a GNU sed extension) lead to arbitrary code execution.
-
-xzgrep from XZ Utils versions up to and including 5.2.5 are
-affected. 5.3.1alpha and 5.3.2alpha are affected as well.
-This patch works for all of them.
-
-This bug was inherited from gzip's zgrep. gzip 1.12 includes
-a fix for zgrep.
-
-The issue with the old sed script is that with multiple newlines,
-the N-command will read the second line of input, then the
-s-commands will be skipped because it's not the end of the
-file yet, then a new sed cycle starts and the pattern space
-is printed and emptied. So only the last line or two get escaped.
-
-One way to fix this would be to read all lines into the pattern
-space first. However, the included fix is even simpler: All lines
-except the last line get a backslash appended at the end. To ensure
-that shell command substitution doesn't eat a possible trailing
-newline, a colon is appended to the filename before escaping.
-The colon is later used to separate the filename from the grep
-output so it is fine to add it here instead of a few lines later.
-
-The old code also wasn't POSIX compliant as it used \n in the
-replacement section of the s-command. Using \<newline> is the
-POSIX compatible method.
-
-LC_ALL=C was added to the two critical sed commands. POSIX sed
-manual recommends it when using sed to manipulate pathnames
-because in other locales invalid multibyte sequences might
-cause issues with some sed implementations. In case of GNU sed,
-these particular sed scripts wouldn't have such problems but some
-other scripts could have, see:
-
- info '(sed)Locale Considerations'
-
-This vulnerability was discovered by:
-cleemy desu wayo working with Trend Micro Zero Day Initiative
-
-Thanks to Jim Meyering and Paul Eggert discussing the different
-ways to fix this and for coordinating the patch release schedule
-with gzip.
-
-Upstream-Status: Backport [https://tukaani.org/xz/xzgrep-ZDI-CAN-16587.patch]
-CVE: CVE-2022-1271
-
-Signed-off-by: Ralph Siemsen <ralph.siemsen@linaro.org>
----
- src/scripts/xzgrep.in | 20 ++++++++++++--------
- 1 file changed, 12 insertions(+), 8 deletions(-)
-
-diff --git a/src/scripts/xzgrep.in b/src/scripts/xzgrep.in
-index 9db5c3a..f64dddb 100644
---- a/src/scripts/xzgrep.in
-+++ b/src/scripts/xzgrep.in
-@@ -179,22 +179,26 @@ for i; do
- { test $# -eq 1 || test $no_filename -eq 1; }; then
- eval "$grep"
- else
-+ # Append a colon so that the last character will never be a newline
-+ # which would otherwise get lost in shell command substitution.
-+ i="$i:"
-+
-+ # Escape & \ | and newlines only if such characters are present
-+ # (speed optimization).
- case $i in
- (*'
- '* | *'&'* | *'\'* | *'|'*)
-- i=$(printf '%s\n' "$i" |
-- sed '
-- $!N
-- $s/[&\|]/\\&/g
-- $s/\n/\\n/g
-- ');;
-+ i=$(printf '%s\n' "$i" | LC_ALL=C sed 's/[&\|]/\\&/g; $!s/$/\\/');;
- esac
-- sed_script="s|^|$i:|"
-+
-+ # $i already ends with a colon so don't add it here.
-+ sed_script="s|^|$i|"
-
- # Fail if grep or sed fails.
- r=$(
- exec 4>&1
-- (eval "$grep" 4>&-; echo $? >&4) 3>&- | sed "$sed_script" >&3 4>&-
-+ (eval "$grep" 4>&-; echo $? >&4) 3>&- |
-+ LC_ALL=C sed "$sed_script" >&3 4>&-
- ) || r=2
- exit $r
- fi >&3 5>&-
diff --git a/meta/recipes-extended/xz/xz/run-ptest b/meta/recipes-extended/xz/xz/run-ptest
new file mode 100644
index 0000000000..cc8ba92016
--- /dev/null
+++ b/meta/recipes-extended/xz/xz/run-ptest
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+ptestdir=$(dirname "$(readlink -f "$0")")
+cd "$ptestdir"/tests || exit
+
+# executables test cases
+tests_exec=$(file $(ls test_*) | grep "ELF" | awk -F: '{print $1}')
+for i in ${tests_exec}; do
+ ./$i
+done
+
+# POSIX shell script test cases
+export srcdir=${ptestdir}/tests
+test_shell="test_files.sh test_scripts.sh test_compress.sh"
+for j in ${test_shell}; do
+ if [ $j == "test_compress.sh" ]; then
+ ./$j create_compress_files
+ else
+ ./$j
+ fi
+ if [ $? -eq 0 ]; then
+ echo "PASS: $j"
+ else
+ echo "FAIL: $j"
+ fi
+done
diff --git a/meta/recipes-extended/xz/xz_5.2.5.bb b/meta/recipes-extended/xz/xz_5.2.5.bb
deleted file mode 100644
index 720e070f4a..0000000000
--- a/meta/recipes-extended/xz/xz_5.2.5.bb
+++ /dev/null
@@ -1,47 +0,0 @@
-SUMMARY = "Utilities for managing LZMA compressed files"
-HOMEPAGE = "https://tukaani.org/xz/"
-DESCRIPTION = "XZ Utils is free general-purpose data compression software with a high compression ratio. XZ Utils were written for POSIX-like systems, but also work on some not-so-POSIX systems. XZ Utils are the successor to LZMA Utils."
-SECTION = "base"
-
-# The source includes bits of PD, GPL-2.0, GPL-3.0, LGPL-2.1-or-later, but the
-# only file which is GPL-3.0 is an m4 macro which isn't shipped in any of our
-# packages, and the LGPL bits are under lib/, which appears to be used for
-# libgnu, which appears to be used for DOS builds. So we're left with
-# GPL-2.0-or-later and PD.
-LICENSE = "GPL-2.0-or-later & GPL-3.0-with-autoconf-exception & LGPL-2.1-or-later & PD"
-LICENSE:${PN} = "GPL-2.0-or-later"
-LICENSE:${PN}-dev = "GPL-2.0-or-later"
-LICENSE:${PN}-staticdev = "GPL-2.0-or-later"
-LICENSE:${PN}-doc = "GPL-2.0-or-later"
-LICENSE:${PN}-dbg = "GPL-2.0-or-later"
-LICENSE:${PN}-locale = "GPL-2.0-or-later"
-LICENSE:liblzma = "PD"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=97d554a32881fee0aa283d96e47cb24a \
- file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.GPLv3;md5=d32239bcb673463ab874e80d47fae504 \
- file://COPYING.LGPLv2.1;md5=4fbd65380cdd255951079008b364516c \
- file://lib/getopt.c;endline=23;md5=2069b0ee710572c03bb3114e4532cd84 \
- "
-
-SRC_URI = "https://tukaani.org/xz/xz-${PV}.tar.gz \
- file://CVE-2022-1271.patch \
- "
-SRC_URI[md5sum] = "0d270c997aff29708c74d53f599ef717"
-SRC_URI[sha256sum] = "f6f4910fd033078738bd82bfba4f49219d03b17eb0794eb91efbae419f4aba10"
-UPSTREAM_CHECK_REGEX = "xz-(?P<pver>\d+(\.\d+)+)\.tar"
-
-CACHED_CONFIGUREVARS += "gl_cv_posix_shell=/bin/sh"
-
-inherit autotools gettext
-
-PACKAGES =+ "liblzma"
-
-FILES:liblzma = "${libdir}/liblzma*${SOLIBS}"
-
-inherit update-alternatives
-ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE:${PN} = "xz xzcat unxz \
- lzma lzcat unlzma"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/xz/xz_5.4.6.bb b/meta/recipes-extended/xz/xz_5.4.6.bb
new file mode 100644
index 0000000000..da3b75a10b
--- /dev/null
+++ b/meta/recipes-extended/xz/xz_5.4.6.bb
@@ -0,0 +1,69 @@
+SUMMARY = "Utilities for managing LZMA compressed files"
+HOMEPAGE = "https://tukaani.org/xz/"
+DESCRIPTION = "XZ Utils is free general-purpose data compression software with a high compression ratio. XZ Utils were written for POSIX-like systems, but also work on some not-so-POSIX systems. XZ Utils are the successor to LZMA Utils."
+SECTION = "base"
+
+# The source includes bits of PD, GPL-2.0, GPL-3.0, LGPL-2.1-or-later, but the
+# only file which is GPL-3.0 is an m4 macro which isn't shipped in any of our
+# packages, and the LGPL bits are under lib/, which appears to be used for
+# libgnu, which appears to be used for DOS builds. So we're left with
+# GPL-2.0-or-later and PD.
+LICENSE = "GPL-2.0-or-later & GPL-3.0-with-autoconf-exception & LGPL-2.1-or-later & PD"
+LICENSE:${PN} = "GPL-2.0-or-later"
+LICENSE:${PN}-dev = "GPL-2.0-or-later"
+LICENSE:${PN}-staticdev = "GPL-2.0-or-later"
+LICENSE:${PN}-doc = "GPL-2.0-or-later"
+LICENSE:${PN}-dbg = "GPL-2.0-or-later"
+LICENSE:${PN}-locale = "GPL-2.0-or-later"
+LICENSE:liblzma = "PD"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d4378ea9d5d1fc9ab0ae10d7948827d9 \
+ file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.GPLv3;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://COPYING.LGPLv2.1;md5=4fbd65380cdd255951079008b364516c \
+ file://lib/getopt.c;endline=23;md5=2069b0ee710572c03bb3114e4532cd84 \
+ "
+
+SRC_URI = "https://github.com/tukaani-project/xz/releases/download/v${PV}/xz-${PV}.tar.gz \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "aeba3e03bf8140ddedf62a0a367158340520f6b384f75ca6045ccc6c0d43fd5c"
+UPSTREAM_CHECK_REGEX = "releases/tag/v(?P<pver>\d+(\.\d+)+)"
+UPSTREAM_CHECK_URI = "https://github.com/tukaani-project/xz/releases/"
+
+CACHED_CONFIGUREVARS += "gl_cv_posix_shell=/bin/sh"
+
+inherit autotools gettext ptest
+
+PACKAGES =+ "liblzma"
+
+FILES:liblzma = "${libdir}/liblzma*${SOLIBS}"
+
+inherit update-alternatives
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE:${PN} = "xz xzcat unxz \
+ lzma lzcat unlzma"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN}-ptest += "bash file"
+
+do_compile_ptest() {
+ oe_runmake check TESTS=
+}
+
+do_install_ptest () {
+ install -d ${D}${PTEST_PATH}/tests
+ find ${B}/tests/.libs -type f -executable -exec cp {} ${D}${PTEST_PATH}/tests \;
+ cp ${B}/config.h ${D}${PTEST_PATH}
+ for i in files xzgrep_expected_output test_files.sh test_scripts.sh test_compress.sh; do
+ cp -r ${S}/tests/$i ${D}${PTEST_PATH}/tests
+ done
+ mkdir -p ${D}${PTEST_PATH}/src/xz
+ ln -s ${bindir}/xz ${D}${PTEST_PATH}/src/xz/xz
+ mkdir -p ${D}${PTEST_PATH}/src/xzdec
+ ln -s ${bindir}/xzdec ${D}${PTEST_PATH}/src/xzdec/xzdec
+ mkdir -p ${D}${PTEST_PATH}/src/scripts
+ ln -s ${bindir}/xzdiff ${D}${PTEST_PATH}/src/scripts/xzdiff
+ ln -s ${bindir}/xzgrep ${D}${PTEST_PATH}/src/scripts/xzgrep
+}
diff --git a/meta/recipes-extended/zip/zip-3.0/0001-configure-Specify-correct-function-signatures-and-de.patch b/meta/recipes-extended/zip/zip-3.0/0001-configure-Specify-correct-function-signatures-and-de.patch
new file mode 100644
index 0000000000..a4f8382625
--- /dev/null
+++ b/meta/recipes-extended/zip/zip-3.0/0001-configure-Specify-correct-function-signatures-and-de.patch
@@ -0,0 +1,134 @@
+From 8810f2643c9372a8083272dc1fc157427646d961 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 10 Aug 2022 17:16:23 -0700
+Subject: [PATCH 1/2] configure: Specify correct function signatures and
+ declarations
+
+Include needed system headers in configure tests, this is needed because
+newer compilers are getting stricter about the C99 specs and turning
+-Wimplicit-function-declaration into hard error e.g. clang-15+
+
+Upstream-Status: Inactive-Upstream
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ unix/configure | 79 +++++++++++++++++++++++++++++++++++++++++---------
+ 1 file changed, 66 insertions(+), 13 deletions(-)
+
+diff --git a/unix/configure b/unix/configure
+index 1d9a9bb..f2b3d02 100644
+--- a/unix/configure
++++ b/unix/configure
+@@ -513,21 +513,70 @@ $CC $CFLAGS -c conftest.c >/dev/null 2>/dev/null
+ # Check for missing functions
+ # add NO_'function_name' to flags if missing
+
+-for func in rmdir strchr strrchr rename mktemp mktime mkstemp
+-do
+- echo Check for $func
+- echo "int main(){ $func(); return 0; }" > conftest.c
+- $CC $CFLAGS $LDFLAGS $BFLAG -o conftest conftest.c >/dev/null 2>/dev/null
+- [ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_`echo $func | tr '[a-z]' '[A-Z]'`"
+-done
++echo Check for rmdir
++cat > conftest.c << _EOF_
++#include <unistd.h>
++int main(){ rmdir(NULL); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_RMDIR"
++
++echo Check for strchr
++cat > conftest.c << _EOF_
++#include <string.h>
++int main(){ strchr(NULL,0); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_STRCHR"
+
++echo Check for strrchr
++cat > conftest.c << _EOF_
++#include <string.h>
++int main(){ strrchr(NULL,0); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_STRRCHR"
++
++echo Check for rename
++cat > conftest.c << _EOF_
++#include <stdio.h>
++int main(){ rename(NULL,NULL); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_RENAME"
++
++echo Check for mktemp
++cat > conftest.c << _EOF_
++#include <stdlib.h>
++int main(){ mktemp(NULL); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_MKTEMP"
++
++echo Check for mktime
++cat > conftest.c << _EOF_
++#include <time.h>
++int main(){ mktime(NULL); return 0; }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_MKTIME"
++
++echo Check for mkstemp
++cat > conftest.c << _EOF_
++#include <stdlib.h>
++int main(){ return mkstemp(NULL); }
++_EOF_
++$CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
++[ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_MKSTEMP"
+
+ echo Check for memset
+-echo "int main(){ char k; memset(&k,0,0); return 0; }" > conftest.c
++cat > conftest.c << _EOF_
++#include <string.h>
++int main(){ char k; memset(&k,0,0); return 0; }
++_EOF_
+ $CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
+ [ $? -ne 0 ] && CFLAGS="${CFLAGS} -DZMEM"
+
+-
+ echo Check for memmove
+ cat > conftest.c << _EOF_
+ #include <string.h>
+@@ -548,7 +597,7 @@ $CC $CFLAGS $LDFLAGS -o conftest conftest.c >/dev/null 2>/dev/null
+ echo Check for errno declaration
+ cat > conftest.c << _EOF_
+ #include <errno.h>
+-main()
++int main()
+ {
+ errno = 0;
+ return 0;
+@@ -625,14 +674,18 @@ CFLAGS="${CFLAGS} ${OPT}"
+
+ echo Check for valloc
+ cat > conftest.c << _EOF_
+-main()
++#include <stdlib.h>
++int main()
+ {
+ #ifdef MMAP
+- valloc();
++ valloc(0);
+ #endif
++ return 0;
+ }
+ _EOF_
+-$CC ${CFLAGS} -c conftest.c > /dev/null 2>/dev/null
++#$CC ${CFLAGS} -c conftest.c > /dev/null 2>/dev/null
++$CC ${CFLAGS} -c conftest.c
++echo "==========================================="
+ [ $? -ne 0 ] && CFLAGS="${CFLAGS} -DNO_VALLOC"
+
+
+--
+2.37.1
+
diff --git a/meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch b/meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch
index 475a653f28..92d0d5db58 100644
--- a/meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch
+++ b/meta/recipes-extended/zip/zip-3.0/0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch
@@ -8,7 +8,7 @@ linking, link fails otherwise without them, which can result in
configure detection go wrong, ensure these flags are used along with CC
when tests involve linking
-Upstream-Status: Pending
+Upstream-Status: Inactive-Upstream
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
unix/configure | 16 ++++++++--------
diff --git a/meta/recipes-extended/zip/zip-3.0/0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch b/meta/recipes-extended/zip/zip-3.0/0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch
new file mode 100644
index 0000000000..106f246a7c
--- /dev/null
+++ b/meta/recipes-extended/zip/zip-3.0/0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch
@@ -0,0 +1,96 @@
+From 9916fc6f1f93f3e092e3c6937c30dc8137c26d34 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Thu, 15 Jun 2023 18:31:26 +0800
+Subject: [PATCH] unix/configure: use _Static_assert to do correct detection
+
+We're doing cross compilation, running a cross-compiled problem
+on host to detemine feature is not correct. Use _Static_assert
+to do the detection correctly.
+
+Upstream-Status: Inactive-Upstream
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ unix/configure | 42 ++++++++++++------------------------------
+ 1 file changed, 12 insertions(+), 30 deletions(-)
+
+diff --git a/unix/configure b/unix/configure
+index f2b3d02..f917086 100644
+--- a/unix/configure
++++ b/unix/configure
+@@ -361,6 +361,10 @@ cat > conftest.c << _EOF_
+ #include <sys/stat.h>
+ #include <unistd.h>
+ #include <stdio.h>
++
++_Static_assert(sizeof((struct stat){0}.st_uid) == 2, "sizeof st_uid is not 16 bit");
++_Static_assert(sizeof((struct stat){0}.st_gid) == 2, "sizeof st_gid is not 16 bit");
++
+ int main()
+ {
+ struct stat s;
+@@ -385,21 +389,7 @@ if [ $? -ne 0 ]; then
+ echo -- UID/GID test failed on compile - disabling old 16-bit UID/GID support
+ CFLAGS="${CFLAGS} -DUIDGID_NOT_16BIT"
+ else
+-# run it
+- ./conftest
+- r=$?
+- if [ $r -eq 1 ]; then
+- echo -- UID not 2 bytes - disabling old 16-bit UID/GID support
+- CFLAGS="${CFLAGS} -DUIDGID_NOT_16BIT"
+- elif [ $r -eq 2 ]; then
+- echo -- GID not 2 bytes - disabling old 16-bit UID/GID support
+- CFLAGS="${CFLAGS} -DUIDGID_NOT_16BIT"
+- elif [ $r -eq 3 ]; then
+- echo -- 16-bit UIDs and GIDs - keeping old 16-bit UID/GID support
+- else
+- echo -- test failed - conftest returned $r - disabling old 16-bit UID/GID support
+- CFLAGS="${CFLAGS} -DUIDGID_NOT_16BIT"
+- fi
++ echo -- 16-bit UIDs and GIDs - keeping old 16-bit UID/GID support
+ fi
+
+
+@@ -417,6 +407,10 @@ cat > conftest.c << _EOF_
+ #include <sys/stat.h>
+ #include <unistd.h>
+ #include <stdio.h>
++
++_Static_assert(sizeof(off_t) < 8, "sizeof off_t < 8 failed");
++_Static_assert(sizeof((struct stat){0}.st_size) < 8, "sizeof st_size < 8 failed");
++
+ int main()
+ {
+ off_t offset;
+@@ -436,24 +430,12 @@ _EOF_
+ # compile it
+ $CC -o conftest conftest.c >/dev/null 2>/dev/null
+ if [ $? -ne 0 ]; then
+- echo -- no Large File Support
++ echo -- yes we have Large File Support!
++ CFLAGS="${CFLAGS} -DLARGE_FILE_SUPPORT"
+ else
+-# run it
+- ./conftest
+- r=$?
+- if [ $r -eq 1 ]; then
+- echo -- no Large File Support - no 64-bit off_t
+- elif [ $r -eq 2 ]; then
+- echo -- no Large File Support - no 64-bit stat
+- elif [ $r -eq 3 ]; then
+- echo -- yes we have Large File Support!
+- CFLAGS="${CFLAGS} -DLARGE_FILE_SUPPORT"
+- else
+- echo -- no Large File Support - conftest returned $r
+- fi
++ echo -- no Large File Support
+ fi
+
+-
+ # Check for wide char for Unicode support
+ # Added 11/24/2005 EG
+
+--
+2.34.1
+
diff --git a/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch b/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch
new file mode 100644
index 0000000000..a86e03e620
--- /dev/null
+++ b/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch
@@ -0,0 +1,35 @@
+From 76f5bf3546d826dcbc03acbefcf0b10b972bf136 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 10 Aug 2022 17:19:38 -0700
+Subject: [PATCH 2/2] unix.c: Do not redefine DIR as FILE
+
+DIR is already provided on Linux via
+/usr/include/dirent.h system header
+
+Upstream-Status: Inactive-Upstream
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ unix/unix.c | 2 --
+ 1 file changed, 2 deletions(-)
+
+diff --git a/unix/unix.c b/unix/unix.c
+index ba87614..6e6f4d2 100644
+--- a/unix/unix.c
++++ b/unix/unix.c
+@@ -61,13 +61,11 @@ local time_t label_utim = 0;
+ /* Local functions */
+ local char *readd OF((DIR *));
+
+-
+ #ifdef NO_DIR /* for AT&T 3B1 */
+ #include <sys/dir.h>
+ #ifndef dirent
+ # define dirent direct
+ #endif
+-typedef FILE DIR;
+ /*
+ ** Apparently originally by Rich Salz.
+ ** Cleaned up and modified by James W. Birdsall.
+--
+2.37.1
+
diff --git a/meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch b/meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch
index 244ddea363..6fd04df1c6 100644
--- a/meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch
+++ b/meta/recipes-extended/zip/zip-3.0/10-remove-build-date.patch
@@ -2,7 +2,7 @@ From: Santiago Vila <sanvila@debian.org>
Subject: Remove (optional) build date to make the build reproducible
Bug-Debian: http://bugs.debian.org/779042
-Upstream-Status: Inappropriate [no upstream]
+Upstream-Status: Inactive-Upstream [no upstream]
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
diff --git a/meta/recipes-extended/zip/zip-3.0/fix-security-format.patch b/meta/recipes-extended/zip/zip-3.0/fix-security-format.patch
index 5cdbf22a54..f85fddbc60 100644
--- a/meta/recipes-extended/zip/zip-3.0/fix-security-format.patch
+++ b/meta/recipes-extended/zip/zip-3.0/fix-security-format.patch
@@ -14,7 +14,7 @@ zip.c:1228:5: error: format not a string literal and no format arguments [-Werro
[YOCTO #9552]
[https://bugzilla.yoctoproject.org/show_bug.cgi?id=9552]
-Upstream-Status: Inappropriate [need a new release]
+Upstream-Status: Inactive-Upstream [need a new release]
Signed-off-by: Edwin Plauchu <edwin.plauchu.camacho@intel.com>
diff --git a/meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch b/meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch
index ce6caff83e..77ade40a04 100644
--- a/meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch
+++ b/meta/recipes-extended/zip/zip-3.0/zipnote-crashes-with-segfault.patch
@@ -4,7 +4,7 @@ https://bugs.archlinux.org/task/47713
Signed-off-by: Jate Sujjavanich <jatedev@gmail.com>
-Upstream-Status: Inappropriate [no upstream]
+Upstream-Status: Inactive-Upstream [no upstream]
diff --git a/zipnote.c b/zipnote.c
index 5e02cb6..996f012 100644
diff --git a/meta/recipes-extended/zip/zip_3.0.bb b/meta/recipes-extended/zip/zip_3.0.bb
index 07a67b9634..70df5ab872 100644
--- a/meta/recipes-extended/zip/zip_3.0.bb
+++ b/meta/recipes-extended/zip/zip_3.0.bb
@@ -6,7 +6,6 @@ SECTION = "console/utils"
LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://LICENSE;md5=04d43c5d70b496c032308106e26ae17d"
-PR = "r2"
S = "${WORKDIR}/zip30"
@@ -17,17 +16,17 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/Zip%203.x%20%28latest%29/3.0/zip30.tar.
file://0001-configure-use-correct-CPP.patch \
file://0002-configure-support-PIC-code-build.patch \
file://0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch \
+ file://0001-configure-Specify-correct-function-signatures-and-de.patch \
+ file://0002-unix.c-Do-not-redefine-DIR-as-FILE.patch \
+ file://0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch \
"
UPSTREAM_VERSION_UNKNOWN = "1"
SRC_URI[md5sum] = "7b74551e63f8ee6aab6fbc86676c0d37"
SRC_URI[sha256sum] = "f0e8bb1f9b7eb0b01285495a2699df3a4b766784c1765a8f1aeedf63c0806369"
-# Disputed and also Debian doesn't consider a vulnerability
-CVE_CHECK_IGNORE += "CVE-2018-13410"
-
-# Not for zip but for smart contract implementation for it
-CVE_CHECK_IGNORE += "CVE-2018-13684"
+CVE_STATUS[CVE-2018-13410] = "disputed: Disputed and also Debian doesn't consider a vulnerability"
+CVE_STATUS[CVE-2018-13684] = "cpe-incorrect: Not for zip but for smart contract implementation for it"
# zip.inc sets CFLAGS, but what Makefile actually uses is
# CFLAGS_NOOPT. It will also force -O3 optimization, overriding
diff --git a/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch b/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch
new file mode 100644
index 0000000000..847a641691
--- /dev/null
+++ b/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch
@@ -0,0 +1,39 @@
+From 121ef5253a49065dea6a89536ca7bd3dabd40e25 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Mon, 19 Jun 2023 17:10:09 +0200
+Subject: [PATCH] pzstd: use c++14 without conditions
+
+Doing this check with a direct c++ snippet is prone to portability problems:
+
+- \043 is not portable between shells: dash expands it to #,
+bash does not;
+
+- using # directly works with make 4.3 but does not with make 4.2.
+
+Let's just use the c++ version that covers both the code and the gtest.
+
+Upstream-Status: Submitted [https://github.com/facebook/zstd/pull/3682]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
+---
+ contrib/pzstd/Makefile | 7 ++-----
+ 1 file changed, 2 insertions(+), 5 deletions(-)
+
+diff --git a/contrib/pzstd/Makefile b/contrib/pzstd/Makefile
+index e62f8e87..58fb82a1 100644
+--- a/contrib/pzstd/Makefile
++++ b/contrib/pzstd/Makefile
+@@ -37,11 +37,8 @@ CFLAGS += -Wno-deprecated-declarations
+ PZSTD_INC = -I$(ZSTDDIR) -I$(ZSTDDIR)/common -I$(PROGDIR) -I.
+ GTEST_INC = -isystem googletest/googletest/include
+
+-# If default C++ version is older than C++11, explicitly set C++11, which is the
+-# minimum required by the code.
+-ifeq ($(shell echo "\043if __cplusplus < 201103L\n\043error\n\043endif" | $(CXX) -x c++ -Werror -c - -o /dev/null 2>/dev/null && echo 1 || echo 0),0)
+-PZSTD_CXX_STD := -std=c++11
+-endif
++# Set the minimum required by gtest
++PZSTD_CXX_STD := -std=c++14
+
+ PZSTD_CPPFLAGS = $(PZSTD_INC)
+ PZSTD_CCXXFLAGS =
diff --git a/meta/recipes-extended/zstd/zstd_1.5.2.bb b/meta/recipes-extended/zstd/zstd_1.5.2.bb
deleted file mode 100644
index 3887f31ecf..0000000000
--- a/meta/recipes-extended/zstd/zstd_1.5.2.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "Zstandard - Fast real-time compression algorithm"
-DESCRIPTION = "Zstandard is a fast lossless compression algorithm, targeting \
-real-time compression scenarios at zlib-level and better compression ratios. \
-It's backed by a very fast entropy stage, provided by Huff0 and FSE library."
-HOMEPAGE = "http://www.zstd.net/"
-SECTION = "console/utils"
-
-LICENSE = "BSD-3-Clause & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=c7f0b161edbe52f5f345a3d1311d0b32 \
- file://COPYING;md5=39bba7d2cf0ba1036f2a6e2be52fe3f0"
-
-SRC_URI = "git://github.com/facebook/zstd.git;branch=release;protocol=https"
-
-SRCREV = "e47e674cd09583ff0503f0f6defd6d23d8b718d3"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
-
-CVE_PRODUCT = "zstandard"
-
-S = "${WORKDIR}/git"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[lz4] = "HAVE_LZ4=1,HAVE_LZ4=0,lz4"
-PACKAGECONFIG[lzma] = "HAVE_LZMA=1,HAVE_LZMA=0,xz"
-PACKAGECONFIG[zlib] = "HAVE_ZLIB=1,HAVE_ZLIB=0,zlib"
-
-# See programs/README.md for how to use this
-ZSTD_LEGACY_SUPPORT ??= "4"
-
-do_compile () {
- oe_runmake ${PACKAGECONFIG_CONFARGS} ZSTD_LEGACY_SUPPORT=${ZSTD_LEGACY_SUPPORT}
- oe_runmake ${PACKAGECONFIG_CONFARGS} ZSTD_LEGACY_SUPPORT=${ZSTD_LEGACY_SUPPORT} -C contrib/pzstd
-}
-
-do_install () {
- oe_runmake install 'DESTDIR=${D}'
- oe_runmake install 'DESTDIR=${D}' PREFIX=${prefix} -C contrib/pzstd
-}
-
-PACKAGE_BEFORE_PN = "libzstd"
-
-FILES:libzstd = "${libdir}/libzstd${SOLIBS}"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-extended/zstd/zstd_1.5.5.bb b/meta/recipes-extended/zstd/zstd_1.5.5.bb
new file mode 100644
index 0000000000..2d72af50a4
--- /dev/null
+++ b/meta/recipes-extended/zstd/zstd_1.5.5.bb
@@ -0,0 +1,47 @@
+SUMMARY = "Zstandard - Fast real-time compression algorithm"
+DESCRIPTION = "Zstandard is a fast lossless compression algorithm, targeting \
+real-time compression scenarios at zlib-level and better compression ratios. \
+It's backed by a very fast entropy stage, provided by Huff0 and FSE library."
+HOMEPAGE = "http://www.zstd.net/"
+SECTION = "console/utils"
+
+LICENSE = "BSD-3-Clause | GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=0822a32f7acdbe013606746641746ee8 \
+ file://COPYING;md5=39bba7d2cf0ba1036f2a6e2be52fe3f0 \
+ "
+
+SRC_URI = "git://github.com/facebook/zstd.git;branch=release;protocol=https \
+ file://0001-pzstd-use-directly-for-the-test-c-snippet.patch"
+
+SRCREV = "63779c798237346c2b245c546c40b72a5a5913fe"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+CVE_PRODUCT = "zstandard"
+
+S = "${WORKDIR}/git"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[lz4] = "HAVE_LZ4=1,HAVE_LZ4=0,lz4"
+PACKAGECONFIG[lzma] = "HAVE_LZMA=1,HAVE_LZMA=0,xz"
+PACKAGECONFIG[zlib] = "HAVE_ZLIB=1,HAVE_ZLIB=0,zlib"
+
+# See programs/README.md for how to use this
+ZSTD_LEGACY_SUPPORT ??= "4"
+
+EXTRA_OEMAKE += "V=1"
+
+do_compile () {
+ oe_runmake ${PACKAGECONFIG_CONFARGS} ZSTD_LEGACY_SUPPORT=${ZSTD_LEGACY_SUPPORT}
+ oe_runmake ${PACKAGECONFIG_CONFARGS} ZSTD_LEGACY_SUPPORT=${ZSTD_LEGACY_SUPPORT} -C contrib/pzstd
+}
+
+do_install () {
+ oe_runmake install 'DESTDIR=${D}'
+ oe_runmake install 'DESTDIR=${D}' PREFIX=${prefix} -C contrib/pzstd
+}
+
+PACKAGE_BEFORE_PN = "libzstd"
+
+FILES:libzstd = "${libdir}/libzstd${SOLIBS}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/epiphany/epiphany_42.2.bb b/meta/recipes-gnome/epiphany/epiphany_42.2.bb
deleted file mode 100644
index dc1b34ac92..0000000000
--- a/meta/recipes-gnome/epiphany/epiphany_42.2.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "WebKit based web browser for GNOME"
-DESCRIPTION = "Epiphany is an open source web browser for the Linux desktop environment. \
-It provides a simple and easy-to-use internet browsing experience."
-HOMEPAGE = "https://wiki.gnome.org/Apps/Web"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/epiphany"
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-DEPENDS = " \
- webkitgtk \
- gcr \
- gsettings-desktop-schemas \
- nettle \
- json-glib \
- libarchive \
- libdazzle \
- libhandy \
- glib-2.0-native \
- coreutils-native \
- "
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase gsettings features_check gettext mime-xdg
-REQUIRED_DISTRO_FEATURES = "x11 opengl"
-
-SRC_URI = "${GNOME_MIRROR}/${GNOMEBN}/${@oe.utils.trim_version("${PV}", 1)}/${GNOMEBN}-${PV}.tar.${GNOME_COMPRESS_TYPE};name=archive \
- file://0002-help-meson.build-disable-the-use-of-yelp.patch \
- file://migrator.patch \
- file://distributor.patch \
- "
-SRC_URI[archive.sha256sum] = "92c02cf886d10d2ccff5de658e1a420eab31d20bb50e746d430e9535b485192d"
-
-PACKAGECONFIG_SOUP ?= "soup2"
-PACKAGECONFIG ??= "${PACKAGECONFIG_SOUP}"
-
-# Developer mode enables debugging
-PACKAGECONFIG[developer-mode] = "-Ddeveloper_mode=true,-Ddeveloper_mode=false"
-PACKAGECONFIG[soup2] = "-Dsoup2=enabled,-Dsoup2=disabled,libsoup-2.4,,,soup3"
-PACKAGECONFIG[soup3] = ",,libsoup,,,soup2"
-PACKAGECONFIG[libportal] = "-Dlibportal=enabled,-Dlibportal=disabled,libportal"
-
-FILES:${PN} += "${datadir}/dbus-1 ${datadir}/gnome-shell/search-providers ${datadir}/metainfo"
-RDEPENDS:${PN} = "iso-codes adwaita-icon-theme gsettings-desktop-schemas"
diff --git a/meta/recipes-gnome/epiphany/epiphany_46.0.bb b/meta/recipes-gnome/epiphany/epiphany_46.0.bb
new file mode 100644
index 0000000000..9052fe170b
--- /dev/null
+++ b/meta/recipes-gnome/epiphany/epiphany_46.0.bb
@@ -0,0 +1,43 @@
+SUMMARY = "WebKit based web browser for GNOME"
+DESCRIPTION = "Epiphany is an open source web browser for the Linux desktop environment. \
+It provides a simple and easy-to-use internet browsing experience."
+HOMEPAGE = "https://wiki.gnome.org/Apps/Web"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/epiphany"
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+DEPENDS = " \
+ webkitgtk \
+ gcr \
+ gsettings-desktop-schemas \
+ nettle \
+ json-glib \
+ libadwaita \
+ libarchive \
+ libdazzle \
+ libhandy \
+ libportal \
+ libsoup \
+ glib-2.0-native \
+ coreutils-native \
+ desktop-file-utils-native \
+ "
+
+inherit gnomebase gsettings features_check gettext mime-xdg gtk-icon-cache
+REQUIRED_DISTRO_FEATURES = "x11 opengl"
+
+SRC_URI = "${GNOME_MIRROR}/${GNOMEBN}/${@oe.utils.trim_version("${PV}", 1)}/${GNOMEBN}-${PV}.tar.${GNOME_COMPRESS_TYPE};name=archive \
+ file://0002-help-meson.build-disable-the-use-of-yelp.patch \
+ file://migrator.patch \
+ file://distributor.patch \
+ "
+SRC_URI[archive.sha256sum] = "f4348f2cf51c07c0c106d130172d4d23f2cd4068771e1de007b758ca2ade5660"
+
+# Developer mode enables debugging
+PACKAGECONFIG[developer-mode] = "-Ddeveloper_mode=true,-Ddeveloper_mode=false"
+
+FILES:${PN} += "${datadir}/dbus-1 ${datadir}/gnome-shell/search-providers ${datadir}/metainfo"
+RDEPENDS:${PN} = "iso-codes adwaita-icon-theme gsettings-desktop-schemas"
+
+# ANGLE requires SSE support as of webkit 2.40.x on 32 bit x86
+COMPATIBLE_HOST:x86 = "${@bb.utils.contains_any('TUNE_FEATURES', 'core2 corei7', '.*', 'null', d)}"
diff --git a/meta/recipes-gnome/epiphany/files/0002-help-meson.build-disable-the-use-of-yelp.patch b/meta/recipes-gnome/epiphany/files/0002-help-meson.build-disable-the-use-of-yelp.patch
index a6c4f92c86..e505466764 100644
--- a/meta/recipes-gnome/epiphany/files/0002-help-meson.build-disable-the-use-of-yelp.patch
+++ b/meta/recipes-gnome/epiphany/files/0002-help-meson.build-disable-the-use-of-yelp.patch
@@ -1,4 +1,4 @@
-From 77c9f87dc2b3ad0854a678e234e22dfb31902b82 Mon Sep 17 00:00:00 2001
+From bd45c75fe691e275d7a7d7e641ac66b153369a1c Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Wed, 31 Jan 2018 15:50:38 +0200
Subject: [PATCH] help/meson.build: disable the use of yelp
@@ -7,7 +7,6 @@ In particular this avoids calling itstool which oe-core doesn't provide.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
help/meson.build | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/meta/recipes-gnome/epiphany/files/distributor.patch b/meta/recipes-gnome/epiphany/files/distributor.patch
index b09c9b38d2..a2800eb62d 100644
--- a/meta/recipes-gnome/epiphany/files/distributor.patch
+++ b/meta/recipes-gnome/epiphany/files/distributor.patch
@@ -1,12 +1,19 @@
-Don't encode the distro from /etc/os-release into the binaries.
+From c297f5b89d7c8e98ac98e1d9a7506df6db6fc025 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Fri, 26 Feb 2021 10:17:52 +0000
+Subject: [PATCH] Don't encode the distro from /etc/os-release into the
+ binaries.
Upstream-Status: Pending
RP 2021/2/26
+---
+ meson.build | 1 +
+ 1 file changed, 1 insertion(+)
-Index: epiphany-3.38.2/meson.build
-===================================================================
---- epiphany-3.38.2.orig/meson.build
-+++ epiphany-3.38.2/meson.build
+diff --git a/meson.build b/meson.build
+index e416cc7..ee5f7b1 100644
+--- a/meson.build
++++ b/meson.build
@@ -15,6 +15,7 @@ if r.returncode() == 0
else
distributor_name = 'GNOME Web'
diff --git a/meta/recipes-gnome/epiphany/files/migrator.patch b/meta/recipes-gnome/epiphany/files/migrator.patch
index a9a650a64a..1b4404c8ca 100644
--- a/meta/recipes-gnome/epiphany/files/migrator.patch
+++ b/meta/recipes-gnome/epiphany/files/migrator.patch
@@ -1,15 +1,23 @@
+From b38a4ceaeec05efb1b822d52c43590e7fd518113 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Thu, 25 Feb 2021 14:52:32 +0000
+Subject: [PATCH] epiphany: Fix reproducibility issue
+
We don't want to encide BUILD_ROOT into target packages. This is used
for build time tests but in our case those would be on target anyway
do use the target paths.
Upstream-Status: Pending
RP 2021/2/25
+---
+ lib/ephy-profile-utils.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
-Index: epiphany-3.38.2/lib/ephy-profile-utils.c
-===================================================================
---- epiphany-3.38.2.orig/lib/ephy-profile-utils.c
-+++ epiphany-3.38.2/lib/ephy-profile-utils.c
-@@ -130,10 +130,10 @@ ephy_profile_utils_do_migration (const c
+diff --git a/lib/ephy-profile-utils.c b/lib/ephy-profile-utils.c
+index a2f6181..5218077 100644
+--- a/lib/ephy-profile-utils.c
++++ b/lib/ephy-profile-utils.c
+@@ -130,10 +130,10 @@ ephy_profile_utils_do_migration (const char *profile_directory,
argv[i++] = NULL;
#if DEVELOPER_MODE
diff --git a/meta/recipes-gnome/gcr/gcr/0001-gcr-meson.build-fix-one-parallel-build-failure.patch b/meta/recipes-gnome/gcr/gcr/0001-gcr-meson.build-fix-one-parallel-build-failure.patch
deleted file mode 100644
index 81c4bdbbcc..0000000000
--- a/meta/recipes-gnome/gcr/gcr/0001-gcr-meson.build-fix-one-parallel-build-failure.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From cb3708bad88e713e4ccf705cb8c14c5996cd9d06 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Fri, 23 Apr 2021 16:32:38 +0800
-Subject: [PATCH] gcr/meson.build: fix one parallel build failure
-
-ui/gcr-live-search.c includes gcr/gcr-marshal.h. Because missing
-dependency, following error occurred intermittently during doing parallel
-build:
-
- -o ui/libgcr-ui-3.so.1.0.0.p/gcr-live-search.c.o -c ../gcr-3.38.1/ui/gcr-live-search.c
-../gcr-3.38.1/ui/gcr-live-search.c:32:10: fatal error: gcr/gcr-marshal.h: No such file or directory
- 32 | #include "gcr/gcr-marshal.h"
- | ^~~~~~~~~~~~~~~~~~~
-compilation terminated.
-
-Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gcr/-/merge_requests/68/diffs]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- gcr/meson.build | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/gcr/meson.build b/gcr/meson.build
-index 06c3a63..d9d4b8f 100644
---- a/gcr/meson.build
-+++ b/gcr/meson.build
-@@ -181,6 +181,7 @@ gcr_base_dep = declare_dependency(
- sources: [
- gcr_enums_gen[1],
- gcr_oids[1],
-+ gcr_marshal_gen[1],
- ],
- )
-
---
-2.17.1
-
diff --git a/meta/recipes-gnome/gcr/gcr/b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8.patch b/meta/recipes-gnome/gcr/gcr/b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8.patch
deleted file mode 100644
index ae4e2ac1eb..0000000000
--- a/meta/recipes-gnome/gcr/gcr/b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-From b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8 Mon Sep 17 00:00:00 2001
-From: Jakub Jirutka <jakub@jirutka.cz>
-Date: Wed, 12 Jan 2022 00:24:20 +0100
-Subject: [PATCH] meson: Fix unknown kw argument in gnome.generate_gir
-
-This argument has been removed in Meson 0.61.0:
-
- gck/meson.build:130:2: ERROR: gnome.generate_gir got unknown keyword arguments "packages"
-
-https://github.com/mesonbuild/meson/commit/f8fc5cb860465718fe7c79a1bf1fe00659f138de:
-
-> The packages argument to gnome.generate_gir was allowed, but never did anything, so stop passing it.
-
-Fixes #89
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- gck/meson.build | 1 -
- gcr/meson.build | 1 -
- ui/meson.build | 1 -
- 3 files changed, 3 deletions(-)
-
-diff --git a/gck/meson.build b/gck/meson.build
-index 756b486..a21a1e9 100644
---- a/gck/meson.build
-+++ b/gck/meson.build
-@@ -131,7 +131,6 @@ if get_option('introspection')
- sources: gck_gir_sources,
- namespace: 'Gck',
- nsversion: '@0@'.format(gck_major_version),
-- packages: gck_deps,
- export_packages: 'gck-@0@'.format(gck_major_version),
- includes: [ 'GObject-2.0', 'Gio-2.0' ],
- header: 'gck/gck.h',
-diff --git a/gcr/meson.build b/gcr/meson.build
-index 2233a44..c83641b 100644
---- a/gcr/meson.build
-+++ b/gcr/meson.build
-@@ -190,7 +190,6 @@ if get_option('introspection')
- sources: [ gcr_base_public_sources, gcr_base_headers ],
- namespace: 'Gcr',
- nsversion: '@0@'.format(gcr_major_version),
-- packages: gcr_base_deps,
- export_packages: 'gcr-base-@0@'.format(gcr_major_version),
- includes: [
- 'GObject-2.0',
-diff --git a/ui/meson.build b/ui/meson.build
-index e656ea2..32ee057 100644
---- a/ui/meson.build
-+++ b/ui/meson.build
-@@ -152,7 +152,6 @@ if get_option('introspection')
- export_packages: 'gcr-ui-@0@'.format(gcr_major_version),
- identifier_prefix: 'Gcr',
- symbol_prefix: 'gcr',
-- packages: gcr_ui_deps,
- includes: [
- 'GObject-2.0',
- 'Gio-2.0',
---
-GitLab
-
diff --git a/meta/recipes-gnome/gcr/gcr_3.40.0.bb b/meta/recipes-gnome/gcr/gcr_3.40.0.bb
deleted file mode 100644
index 717c31c325..0000000000
--- a/meta/recipes-gnome/gcr/gcr_3.40.0.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "A library for bits of crypto UI and parsing etc"
-DESCRIPTION = "GCR is a library for displaying certificates, and crypto UI, \
-accessing key stores. It also provides the viewer for crypto files on the \
-GNOME desktop."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/gcr"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/gcr/issues"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=55ca817ccb7d5b5b66355690e9abc605"
-
-DEPENDS = "p11-kit glib-2.0 libgcrypt gnupg-native \
- ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'libxslt-native', '', d)}"
-
-CACHED_CONFIGUREVARS += "ac_cv_path_GPG='gpg2'"
-
-GNOMEBASEBUILDCLASS = "meson"
-GTKDOC_MESON_OPTION = "gtk_doc"
-inherit gnomebase gtk-icon-cache gtk-doc features_check upstream-version-is-even vala gobject-introspection gettext mime mime-xdg
-
-SRC_URI += "file://0001-gcr-meson.build-fix-one-parallel-build-failure.patch \
- file://b3ca1d02bb0148ca787ac4aead164d7c8ce2c4d8.patch"
-
-SRC_URI[archive.sha256sum] = "b9d3645a5fd953a54285cc64d4fc046736463dbd4dcc25caf5c7b59bed3027f5"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'gtk', '', d)}"
-PACKAGECONFIG[gtk] = "-Dgtk=true,-Dgtk=false,gtk+3"
-
-FILES:${PN} += " \
- ${datadir}/dbus-1 \
- ${datadir}/gcr-3 \
-"
-
-# http://errors.yoctoproject.org/Errors/Details/20229/
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv6 = "arm"
-
-EXTRA_OEMESON += "--cross-file ${WORKDIR}/meson-${PN}.cross"
-do_write_config:append() {
- cat >${WORKDIR}/meson-${PN}.cross <<EOF
-[binaries]
-gpg2 = '${bindir}/gpg2'
-EOF
-}
diff --git a/meta/recipes-gnome/gcr/gcr_4.2.1.bb b/meta/recipes-gnome/gcr/gcr_4.2.1.bb
new file mode 100644
index 0000000000..26dc1d1bc6
--- /dev/null
+++ b/meta/recipes-gnome/gcr/gcr_4.2.1.bb
@@ -0,0 +1,58 @@
+SUMMARY = "A library for bits of crypto UI and parsing etc"
+DESCRIPTION = "GCR is a library for displaying certificates, and crypto UI, \
+accessing key stores. It also provides the viewer for crypto files on the \
+GNOME desktop."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/gcr"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/gcr/issues"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=55ca817ccb7d5b5b66355690e9abc605"
+
+DEPENDS = "p11-kit glib-2.0 libgcrypt gnupg-native \
+ ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'libxslt-native', '', d)}"
+
+CACHED_CONFIGUREVARS += "ac_cv_path_GPG='gpg2'"
+
+CFLAGS += "-D_GNU_SOURCE"
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+inherit gnomebase gtk-icon-cache gi-docgen features_check vala gobject-introspection gettext mime mime-xdg
+
+REQUIRED_DISTRO_FEATURES = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'opengl', '', d)}"
+
+SRC_URI[archive.sha256sum] = "ed783b5c80373cd058c02ea9e3e2a64e558599ca190a5abd598122e479967de5"
+
+PACKAGECONFIG ??= " \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'gtk', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'gtk', '', d)} \
+ ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'vapi', '', d)} \
+"
+PACKAGECONFIG[gtk] = "-Dgtk4=true,-Dgtk4=false,gtk4"
+PACKAGECONFIG[ssh_agent] = "-Dssh_agent=true,-Dssh_agent=false,libsecret,openssh"
+#'Use systemd socket activation for server programs'
+PACKAGECONFIG[systemd] = "-Dsystemd=enabled,-Dsystemd=disabled,systemd"
+PACKAGECONFIG[vapi] = "-Dvapi=true,-Dvapi=false,"
+
+FILES:${PN} += " \
+ ${datadir}/dbus-1 \
+ ${datadir}/gcr-4 \
+ ${systemd_user_unitdir}/gcr-ssh-agent.socket \
+ ${systemd_user_unitdir}/gcr-ssh-agent.service \
+"
+
+# http://errors.yoctoproject.org/Errors/Details/20229/
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv6 = "arm"
+
+EXTRA_OEMESON += "--cross-file=${WORKDIR}/meson-${PN}.cross"
+
+do_write_config:append() {
+ cat >${WORKDIR}/meson-${PN}.cross <<EOF
+[binaries]
+gpg2 = '${bindir}/gpg2'
+ssh-add = '${bindir}/ssh-add'
+ssh-agent = '${bindir}/ssh-agent'
+EOF
+}
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-Add-use_prebuilt_tools-option.patch b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-Add-use_prebuilt_tools-option.patch
deleted file mode 100644
index a8206a4507..0000000000
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-Add-use_prebuilt_tools-option.patch
+++ /dev/null
@@ -1,171 +0,0 @@
-From ba73bb0f3d2023839bc3b681c49b7ec1192cceb4 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Sat, 8 May 2021 21:58:54 +0200
-Subject: [PATCH] Add use_prebuilt_tools option
-
-This allows using the gdk-pixbuf tools from the host to
-build and install tests in a cross-compile scenarion.
-
-Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/merge_requests/119]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- gdk-pixbuf/meson.build | 11 +++++++++--
- meson.build | 6 +++---
- meson_options.txt | 4 ++++
- tests/meson.build | 16 ++++++++--------
- thumbnailer/meson.build | 24 ++++++++++++++++++------
- 5 files changed, 42 insertions(+), 19 deletions(-)
-
-diff --git a/gdk-pixbuf/meson.build b/gdk-pixbuf/meson.build
-index 8b0590b..7331491 100644
---- a/gdk-pixbuf/meson.build
-+++ b/gdk-pixbuf/meson.build
-@@ -342,13 +342,20 @@ foreach bin: gdkpixbuf_bin
- include_directories: [ root_inc, gdk_pixbuf_inc ],
- c_args: common_cflags + gdk_pixbuf_cflags,
- install: true)
-- meson.override_find_program(bin_name, bin)
-+ if not get_option('use_prebuilt_tools')
-+ meson.override_find_program(bin_name, bin)
-+ endif
-
- # Used in tests
- set_variable(bin_name.underscorify(), bin)
- endforeach
-
--if not meson.is_cross_build()
-+if get_option('use_prebuilt_tools')
-+ gdk_pixbuf_query_loaders = find_program('gdk-pixbuf-query-loaders', required: true)
-+ gdk_pixbuf_pixdata = find_program('gdk-pixbuf-pixdata', required: true)
-+endif
-+
-+if not meson.is_cross_build() or get_option('use_prebuilt_tools')
- # The 'loaders.cache' used for testing, so we don't accidentally
- # load the installed cache; we always build it by default
- loaders_cache = custom_target('loaders.cache',
-diff --git a/meson.build b/meson.build
-index 7a1409b..0bc73eb 100644
---- a/meson.build
-+++ b/meson.build
-@@ -403,16 +403,16 @@ subdir('gdk-pixbuf')
- # i18n
- subdir('po')
-
--if not meson.is_cross_build()
-+if not meson.is_cross_build() or get_option('use_prebuilt_tools')
- subdir('tests')
-- subdir('thumbnailer')
- endif
-+subdir('thumbnailer')
-
- # Documentation
- build_docs = get_option('gtk_doc') or get_option('docs')
- subdir('docs')
-
--if not meson.is_cross_build()
-+if not meson.is_cross_build() or get_option('use_prebuilt_tools')
- meson.add_install_script('build-aux/post-install.py',
- gdk_pixbuf_bindir,
- gdk_pixbuf_libdir,
-diff --git a/meson_options.txt b/meson_options.txt
-index 0ee6718..cc29855 100644
---- a/meson_options.txt
-+++ b/meson_options.txt
-@@ -49,4 +49,8 @@ option('gio_sniffing',
- description: 'Perform file type detection using GIO (Unused on MacOS and Windows)',
- type: 'boolean',
- value: true)
-+option('use_prebuilt_tools',
-+ description: 'Use prebuilt gdk-pixbuf tools from the host for cross-compilation',
-+ type: 'boolean',
-+ value: false)
-
-diff --git a/tests/meson.build b/tests/meson.build
-index 7c6cb11..1029e6a 100644
---- a/tests/meson.build
-+++ b/tests/meson.build
-@@ -5,6 +5,12 @@
- # $PATH. Ideally we should use gnome.compile_resources() and let Meson deal with
- # this problem: See https://github.com/mesonbuild/meson/issues/8266.
- if enabled_loaders.contains('png') and host_system != 'windows'
-+
-+ resources_deps = [loaders_cache,]
-+ if not get_option('use_prebuilt_tools')
-+ resources_deps += [gdk_pixbuf_pixdata,]
-+ endif
-+
- # Resources; we cannot use gnome.compile_resources() here, because we need to
- # override the environment in order to use the utilities we just built instead
- # of the system ones
-@@ -21,10 +27,7 @@ if enabled_loaders.contains('png') and host_system != 'windows'
- '@INPUT@',
- '@OUTPUT@',
- ],
-- depends: [
-- gdk_pixbuf_pixdata,
-- loaders_cache,
-- ],
-+ depends: resources_deps,
- )
-
- resources_h = custom_target('resources.h',
-@@ -40,10 +43,7 @@ if enabled_loaders.contains('png') and host_system != 'windows'
- '@INPUT@',
- '@OUTPUT@',
- ],
-- depends: [
-- gdk_pixbuf_pixdata,
-- loaders_cache,
-- ],
-+ depends: resources_deps,
- )
- no_resources = false
- else
-diff --git a/thumbnailer/meson.build b/thumbnailer/meson.build
-index b6a206d..9336c21 100644
---- a/thumbnailer/meson.build
-+++ b/thumbnailer/meson.build
-@@ -6,13 +6,29 @@ bin = executable('gdk-pixbuf-thumbnailer',
- ],
- dependencies: gdk_pixbuf_deps + [ gdkpixbuf_dep ],
- install: true)
--meson.override_find_program('gdk-pixbuf-thumbnailer', bin)
-+if not get_option('use_prebuilt_tools')
-+ meson.override_find_program('gdk-pixbuf-thumbnailer', bin)
-+endif
-
- gdk_pixbuf_print_mime_types = executable('gdk-pixbuf-print-mime-types',
- 'gdk-pixbuf-print-mime-types.c',
-+ install: true,
- c_args: common_cflags,
- dependencies: gdk_pixbuf_deps + [ gdkpixbuf_dep ])
-
-+if get_option('use_prebuilt_tools')
-+ gdk_pixbuf_print_mime_types = find_program('gdk-pixbuf-print-mime-types', required: true)
-+endif
-+
-+thumbnailer_deps = [loaders_cache,]
-+
-+if not get_option('use_prebuilt_tools')
-+ thumbnailer_deps += [
-+ gdk_pixbuf_print_mime_types,
-+ gdk_pixbuf_pixdata,
-+ ]
-+endif
-+
- custom_target('thumbnailer',
- input: 'gdk-pixbuf-thumbnailer.thumbnailer.in',
- output: 'gdk-pixbuf-thumbnailer.thumbnailer',
-@@ -25,10 +41,6 @@ custom_target('thumbnailer',
- '@INPUT@',
- '@OUTPUT@',
- ],
-- depends: [
-- gdk_pixbuf_print_mime_types,
-- gdk_pixbuf_pixdata,
-- loaders_cache,
-- ],
-+ depends: thumbnailer_deps,
- install: true,
- install_dir: join_paths(gdk_pixbuf_datadir, 'thumbnailers'))
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch
new file mode 100644
index 0000000000..7250fa3f62
--- /dev/null
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch
@@ -0,0 +1,66 @@
+From 9d3b374e75692da3d1d05344a1693c85a3098f47 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Thu, 26 Jan 2023 20:29:46 +0100
+Subject: [PATCH] meson.build: allow (a subset of) tests in cross compile
+ settings
+
+There is no need to completely disable tests: most of them
+do not require running target executables at build time,
+and so can be built and installed.
+
+This requires inserting a couple of specific guards around
+items that do require running target executables.
+
+Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/merge_requests/150]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ meson.build | 6 +++---
+ tests/meson.build | 10 ++++++----
+ 2 files changed, 9 insertions(+), 7 deletions(-)
+
+diff --git a/meson.build b/meson.build
+index 8a16c8f..7c8b20f 100644
+--- a/meson.build
++++ b/meson.build
+@@ -369,10 +369,10 @@ subdir('gdk-pixbuf')
+ # i18n
+ subdir('po')
+
++if get_option('tests')
++ subdir('tests')
++endif
+ if not meson.is_cross_build()
+- if get_option('tests')
+- subdir('tests')
+- endif
+ subdir('thumbnailer')
+ endif
+
+diff --git a/tests/meson.build b/tests/meson.build
+index 28c2525..c45e765 100644
+--- a/tests/meson.build
++++ b/tests/meson.build
+@@ -4,7 +4,7 @@
+ # gdk-pixbuf-pixdata from build directory because it needs all DLL locations in
+ # $PATH. Ideally we should use gnome.compile_resources() and let Meson deal with
+ # this problem: See https://github.com/mesonbuild/meson/issues/8266.
+-if enabled_loaders.contains('png') and host_system != 'windows'
++if enabled_loaders.contains('png') and host_system != 'windows' and not meson.is_cross_build()
+ # Resources; we cannot use gnome.compile_resources() here, because we need to
+ # override the environment in order to use the utilities we just built instead
+ # of the system ones
+@@ -166,9 +166,11 @@ endif
+ test_deps = gdk_pixbuf_deps + [ gdkpixbuf_dep, ]
+ test_args = [ '-k' ]
+ test_env = environment()
+-test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
+-test_env.set('G_TEST_BUILDDIR', meson.current_build_dir())
+-test_env.set('GDK_PIXBUF_MODULE_FILE', loaders_cache.full_path())
++if not meson.is_cross_build()
++ test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
++ test_env.set('G_TEST_BUILDDIR', meson.current_build_dir())
++ test_env.set('GDK_PIXBUF_MODULE_FILE', loaders_cache.full_path())
++endif
+
+ foreach test_name, test_data: installed_tests
+ test_sources = [ test_name + '.c', 'test-common.c' ]
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch
index 25410b11ea..23c68a0923 100644
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch
@@ -1,4 +1,4 @@
-From f00603d58d844422363b896ea7d07aaf48ddaa66 Mon Sep 17 00:00:00 2001
+From b511bd1efb43ffc49c753e309717a242ec686ef1 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Tue, 1 Apr 2014 17:23:36 +0100
Subject: [PATCH] gdk-pixbuf: add an option so that loader errors are fatal
@@ -6,7 +6,7 @@ Subject: [PATCH] gdk-pixbuf: add an option so that loader errors are fatal
If an environment variable is specified set the return value from main() to
non-zero if the loader had errors (missing libraries, generally).
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/merge_requests/144]
Signed-off-by: Ross Burton <ross.burton@intel.com>
---
@@ -14,10 +14,10 @@ Signed-off-by: Ross Burton <ross.burton@intel.com>
1 file changed, 15 insertions(+), 4 deletions(-)
diff --git a/gdk-pixbuf/queryloaders.c b/gdk-pixbuf/queryloaders.c
-index 312aa78..b813d99 100644
+index 1d39b44..2b00815 100644
--- a/gdk-pixbuf/queryloaders.c
+++ b/gdk-pixbuf/queryloaders.c
-@@ -212,7 +212,7 @@ write_loader_info (GString *contents, const char *path, GdkPixbufFormat *info)
+@@ -216,7 +216,7 @@ write_loader_info (GString *contents, const char *path, GdkPixbufFormat *info)
g_string_append_c (contents, '\n');
}
@@ -26,7 +26,7 @@ index 312aa78..b813d99 100644
query_module (GString *contents, const char *dir, const char *file)
{
char *path;
-@@ -221,6 +221,7 @@ query_module (GString *contents, const char *dir, const char *file)
+@@ -225,6 +225,7 @@ query_module (GString *contents, const char *dir, const char *file)
void (*fill_vtable) (GdkPixbufModule *module);
gpointer fill_info_ptr;
gpointer fill_vtable_ptr;
@@ -34,7 +34,7 @@ index 312aa78..b813d99 100644
if (g_path_is_absolute (file))
path = g_strdup (file);
-@@ -270,10 +271,13 @@ query_module (GString *contents, const char *dir, const char *file)
+@@ -274,10 +275,13 @@ query_module (GString *contents, const char *dir, const char *file)
g_module_error());
else
g_fprintf (stderr, "Cannot load loader %s\n", path);
@@ -47,8 +47,8 @@ index 312aa78..b813d99 100644
+ return ret;
}
- #ifdef G_OS_WIN32
-@@ -314,6 +318,7 @@ int main (int argc, char **argv)
+ #if defined(G_OS_WIN32) && defined(GDK_PIXBUF_RELOCATABLE)
+@@ -318,6 +322,7 @@ int main (int argc, char **argv)
gint first_file = 1;
GFile *pixbuf_libdir_file;
gchar *pixbuf_libdir;
@@ -56,7 +56,7 @@ index 312aa78..b813d99 100644
#ifdef G_OS_WIN32
gchar *libdir;
-@@ -452,7 +457,9 @@ int main (int argc, char **argv)
+@@ -456,7 +461,9 @@ int main (int argc, char **argv)
}
modules = g_list_sort (modules, (GCompareFunc)strcmp);
for (l = modules; l != NULL; l = l->next)
@@ -67,7 +67,7 @@ index 312aa78..b813d99 100644
g_list_free_full (modules, g_free);
g_free (moduledir);
#else
-@@ -468,7 +475,8 @@ int main (int argc, char **argv)
+@@ -472,7 +479,8 @@ int main (int argc, char **argv)
infilename = g_locale_to_utf8 (infilename,
-1, NULL, NULL, NULL);
#endif
@@ -77,7 +77,7 @@ index 312aa78..b813d99 100644
}
g_free (cwd);
}
-@@ -486,5 +494,8 @@ int main (int argc, char **argv)
+@@ -490,5 +498,8 @@ int main (int argc, char **argv)
g_free (pixbuf_libdir);
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.10.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.10.bb
new file mode 100644
index 0000000000..cca89a9059
--- /dev/null
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.10.bb
@@ -0,0 +1,119 @@
+SUMMARY = "Image loading library for GTK+"
+DESCRIPTION = "The GDK Pixbuf library provides: Image loading and saving \
+facilities, fast scaling and compositing of pixbufs and Simple animation \
+loading (ie. animated GIFs)"
+HOMEPAGE = "https://wiki.gnome.org/Projects/GdkPixbuf"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/gdk-pixbuf/issues"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
+ file://gdk-pixbuf/gdk-pixbuf.h;endline=26;md5=72b39da7cbdde2e665329fef618e1d6b \
+ "
+
+SECTION = "libs"
+
+DEPENDS = "glib-2.0 shared-mime-info"
+
+MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+
+SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
+ file://run-ptest \
+ file://fatal-loader.patch \
+ file://0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch \
+ "
+
+SRC_URI[sha256sum] = "ee9b6c75d13ba096907a2e3c6b27b61bcd17f5c7ebeab5a5b439d2f2e39fe44b"
+
+inherit meson pkgconfig gettext pixbufcache ptest-gnome upstream-version-is-even gobject-introspection gi-docgen lib_package
+
+GIR_MESON_OPTION = 'introspection'
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
+
+LIBV = "2.10.0"
+
+GDK_PIXBUF_LOADERS ?= "png jpeg"
+
+PACKAGECONFIG = "${GDK_PIXBUF_LOADERS} \
+ ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+PACKAGECONFIG:class-native = "${GDK_PIXBUF_LOADERS}"
+
+PACKAGECONFIG[png] = "-Dpng=enabled,-Dpng=disabled,libpng"
+PACKAGECONFIG[jpeg] = "-Djpeg=enabled,-Djpeg=disabled,jpeg"
+PACKAGECONFIG[tiff] = "-Dtiff=enabled,-Dtiff=disabled,tiff"
+PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false"
+
+EXTRA_OEMESON = "-Dman=false"
+
+PACKAGES =+ "${PN}-xlib"
+
+# For GIO image type sniffing
+RDEPENDS:${PN} = "shared-mime-info"
+
+FILES:${PN}-xlib = "${libdir}/*pixbuf_xlib*${SOLIBS}"
+ALLOW_EMPTY:${PN}-xlib = "1"
+
+FILES:${PN} += "${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders"
+
+FILES:${PN}-bin += "${datadir}/thumbnailers/gdk-pixbuf-thumbnailer.thumbnailer"
+
+FILES:${PN}-dev += " \
+ ${bindir}/gdk-pixbuf-csource \
+ ${bindir}/gdk-pixbuf-pixdata \
+ ${bindir}/gdk-pixbuf-print-mime-types \
+ ${includedir}/* \
+ ${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders/*.la \
+"
+
+PACKAGES_DYNAMIC += "^gdk-pixbuf-loader-.*"
+PACKAGES_DYNAMIC:class-native = ""
+
+python populate_packages:prepend () {
+ postinst_pixbufloader = d.getVar("postinst_pixbufloader")
+
+ loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
+
+ packages = ' '.join(do_split_packages(d, loaders_root, r'^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s'))
+ d.setVar('PIXBUF_PACKAGES', packages)
+
+ # The test suite exercises all the loaders, so ensure they are all
+ # dependencies of the ptest package.
+ d.appendVar("RDEPENDS:%s-ptest" % d.getVar('PN'), " " + packages)
+}
+
+do_install:append() {
+ # Copy gdk-pixbuf-query-loaders into libdir so it is always available
+ # in multilib builds.
+ cp ${D}/${bindir}/gdk-pixbuf-query-loaders ${D}/${libdir}/gdk-pixbuf-2.0/
+
+}
+
+do_install_ptest() {
+ # Remove a bad fuzzing attempt that sporadically fails without a way to reproduce
+ rm ${D}/${datadir}/installed-tests/gdk-pixbuf/pixbuf-randomly-modified.test
+ # https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/issues/215
+ rm ${D}/${datadir}/installed-tests/gdk-pixbuf/pixbuf-jpeg.test
+}
+
+do_install:append:class-native() {
+ find ${D}${libdir} -name "libpixbufloader-*.la" -exec rm \{\} \;
+
+ create_wrapper ${D}/${bindir}/gdk-pixbuf-csource \
+ XDG_DATA_DIRS=${STAGING_DATADIR} \
+ GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
+
+ create_wrapper ${D}/${bindir}/gdk-pixbuf-pixdata \
+ XDG_DATA_DIRS=${STAGING_DATADIR} \
+ GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
+
+ create_wrapper ${D}/${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders \
+ XDG_DATA_DIRS=${STAGING_DATADIR} \
+ GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache \
+ GDK_PIXBUF_MODULEDIR=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders
+
+ create_wrapper ${D}/${bindir}/gdk-pixbuf-query-loaders \
+ XDG_DATA_DIRS=${STAGING_DATADIR} \
+ GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache \
+ GDK_PIXBUF_MODULEDIR=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders
+}
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.8.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.8.bb
deleted file mode 100644
index fb6829a7d1..0000000000
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.8.bb
+++ /dev/null
@@ -1,128 +0,0 @@
-SUMMARY = "Image loading library for GTK+"
-DESCRIPTION = "The GDK Pixbuf library provides: Image loading and saving \
-facilities, fast scaling and compositing of pixbufs and Simple animation \
-loading (ie. animated GIFs)"
-HOMEPAGE = "https://wiki.gnome.org/Projects/GdkPixbuf"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/gdk-pixbuf/issues"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
- file://gdk-pixbuf/gdk-pixbuf.h;endline=26;md5=72b39da7cbdde2e665329fef618e1d6b \
- "
-
-SECTION = "libs"
-
-DEPENDS = "glib-2.0 gdk-pixbuf-native shared-mime-info"
-DEPENDS:remove:class-native = "gdk-pixbuf-native"
-
-MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-
-SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
- file://run-ptest \
- file://fatal-loader.patch \
- file://0001-Add-use_prebuilt_tools-option.patch \
- "
-
-SRC_URI[sha256sum] = "84acea3acb2411b29134b32015a5b1aaa62844b19c4b1ef8b8971c6b0759f4c6"
-
-inherit meson pkgconfig gettext pixbufcache ptest-gnome upstream-version-is-even gobject-introspection gi-docgen lib_package
-
-GIR_MESON_OPTION = 'introspection'
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
-
-LIBV = "2.10.0"
-
-GDK_PIXBUF_LOADERS ?= "png jpeg"
-
-PACKAGECONFIG = "${GDK_PIXBUF_LOADERS} \
- ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
-PACKAGECONFIG:class-native = "${GDK_PIXBUF_LOADERS}"
-
-PACKAGECONFIG[png] = "-Dpng=enabled,-Dpng=disabled,libpng"
-PACKAGECONFIG[jpeg] = "-Djpeg=enabled,-Djpeg=disabled,jpeg"
-PACKAGECONFIG[tiff] = "-Dtiff=enabled,-Dtiff=disabled,tiff"
-PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false"
-
-EXTRA_OEMESON:class-target = " \
- -Duse_prebuilt_tools=true \
-"
-
-EXTRA_OEMESON:class-nativesdk = " \
- -Duse_prebuilt_tools=true \
-"
-
-PACKAGES =+ "${PN}-xlib"
-
-# For GIO image type sniffing
-RDEPENDS:${PN} = "shared-mime-info"
-
-FILES:${PN}-xlib = "${libdir}/*pixbuf_xlib*${SOLIBS}"
-ALLOW_EMPTY:${PN}-xlib = "1"
-
-FILES:${PN} += "${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders"
-
-FILES:${PN}-bin += "${datadir}/thumbnailers/gdk-pixbuf-thumbnailer.thumbnailer"
-
-FILES:${PN}-dev += " \
- ${bindir}/gdk-pixbuf-csource \
- ${bindir}/gdk-pixbuf-pixdata \
- ${bindir}/gdk-pixbuf-print-mime-types \
- ${includedir}/* \
- ${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders/*.la \
-"
-
-PACKAGES_DYNAMIC += "^gdk-pixbuf-loader-.*"
-PACKAGES_DYNAMIC:class-native = ""
-
-python populate_packages:prepend () {
- postinst_pixbufloader = d.getVar("postinst_pixbufloader")
-
- loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
-
- packages = ' '.join(do_split_packages(d, loaders_root, r'^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s'))
- d.setVar('PIXBUF_PACKAGES', packages)
-
- # The test suite exercises all the loaders, so ensure they are all
- # dependencies of the ptest package.
- d.appendVar("RDEPENDS:%s-ptest" % d.getVar('PN'), " " + packages)
-}
-
-do_install:append() {
- # Copy gdk-pixbuf-query-loaders into libdir so it is always available
- # in multilib builds.
- cp ${D}/${bindir}/gdk-pixbuf-query-loaders ${D}/${libdir}/gdk-pixbuf-2.0/
-
-}
-
-# Remove a bad fuzzing attempt that sporadically fails without a way to reproduce
-do_install_ptest() {
- rm ${D}/${datadir}/installed-tests/gdk-pixbuf/pixbuf-randomly-modified.test
-}
-
-do_install:append:class-native() {
- find ${D}${libdir} -name "libpixbufloader-*.la" -exec rm \{\} \;
-
- create_wrapper ${D}/${bindir}/gdk-pixbuf-csource \
- XDG_DATA_DIRS=${STAGING_DATADIR} \
- GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
-
- create_wrapper ${D}/${bindir}/gdk-pixbuf-pixdata \
- XDG_DATA_DIRS=${STAGING_DATADIR} \
- GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
-
- create_wrapper ${D}/${bindir}/gdk-pixbuf-print-mime-types \
- XDG_DATA_DIRS=${STAGING_DATADIR} \
- GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
-
- create_wrapper ${D}/${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders \
- XDG_DATA_DIRS=${STAGING_DATADIR} \
- GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache \
- GDK_PIXBUF_MODULEDIR=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders
-
- create_wrapper ${D}/${bindir}/gdk-pixbuf-query-loaders \
- XDG_DATA_DIRS=${STAGING_DATADIR} \
- GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache \
- GDK_PIXBUF_MODULEDIR=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders
-}
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/gi-docgen/gi-docgen_2023.3.bb b/meta/recipes-gnome/gi-docgen/gi-docgen_2023.3.bb
new file mode 100644
index 0000000000..54d7ef7513
--- /dev/null
+++ b/meta/recipes-gnome/gi-docgen/gi-docgen_2023.3.bb
@@ -0,0 +1,21 @@
+SUMMARY = "Documentation tool for GObject-based libraries"
+DESCRIPTION = "GI-DocGen is a document generator for GObject-based libraries. GObject is \
+the base type system of the GNOME project. GI-Docgen reuses the \
+introspection data generated by GObject-based libraries to generate the API \
+reference of these libraries, as well as other ancillary documentation."
+HOMEPAGE = "https://gnome.pages.gitlab.gnome.org/gi-docgen/"
+
+LICENSE = "GPL-3.0-or-later & Apache-2.0"
+LIC_FILES_CHKSUM = "file://gi-docgen.py;beginline=1;endline=5;md5=2dc0f1f01202478cfe813c0e7f80b326"
+
+SRC_URI = "git://gitlab.gnome.org/GNOME/gi-docgen.git;protocol=https;branch=main"
+
+SRCREV = "96f2e9b93e1d8a5338eb05b87fd879856ab7b3cc"
+
+S = "${WORKDIR}/git"
+
+inherit setuptools3
+
+RDEPENDS:${PN} += "python3-asyncio python3-core python3-jinja2 python3-json python3-markdown python3-markupsafe python3-pygments python3-toml python3-typogrify python3-xml"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/gi-docgen/gi-docgen_git.bb b/meta/recipes-gnome/gi-docgen/gi-docgen_git.bb
deleted file mode 100644
index 6a7124c3fe..0000000000
--- a/meta/recipes-gnome/gi-docgen/gi-docgen_git.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Documentation tool for GObject-based libraries"
-DESCRIPTION = "GI-DocGen is a document generator for GObject-based libraries. GObject is \
-the base type system of the GNOME project. GI-Docgen reuses the \
-introspection data generated by GObject-based libraries to generate the API \
-reference of these libraries, as well as other ancillary documentation."
-HOMEPAGE = "https://gnome.pages.gitlab.gnome.org/gi-docgen/"
-
-LICENSE = "GPL-3.0-or-later & Apache-2.0"
-LIC_FILES_CHKSUM = "file://gi-docgen.py;beginline=1;endline=5;md5=2dc0f1f01202478cfe813c0e7f80b326"
-
-SRC_URI = "git://gitlab.gnome.org/GNOME/gi-docgen.git;protocol=https;branch=main"
-
-PV = "2022.1"
-SRCREV = "37b04455ff58cb2ec3f58917d0737c435344f2fb"
-
-S = "${WORKDIR}/git"
-
-inherit setuptools3
-
-RDEPENDS:${PN} += "python3-asyncio python3-core python3-jinja2 python3-json python3-markdown python3-markupsafe python3-pygments python3-toml python3-typogrify python3-xml"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch b/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch
deleted file mode 100644
index 5afba3c644..0000000000
--- a/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 63d97fefdbc90f5c68f67bdc30844776d9a1b720 Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Tue, 30 May 2017 14:55:49 +0300
-Subject: [PATCH] Don't use AC_CANONICAL_HOST
-
-This won't work when building allarch (and is only used to find out if
-target is windows).
-
-Upstream-Status: Inappropriate [embedded specific]
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
----
- configure.ac | 1 -
- 1 file changed, 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index f2659a5..1e8b016 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -3,7 +3,6 @@ AC_PREREQ(2.53)
-
- AC_INIT([adwaita-icon-theme], [41.0],
- [http://bugzilla.gnome.org/enter_bug.cgi?product=adwaita-icon-theme])
--AC_CANONICAL_HOST
- AC_CONFIG_MACRO_DIR([m4])
- AC_CONFIG_SRCDIR([index.theme.in])
-
diff --git a/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch b/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch
deleted file mode 100644
index a1d39cf558..0000000000
--- a/meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-From 79da031e9811f3eef34b14cce419be93fea34319 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 13 Jun 2017 18:10:06 +0300
-Subject: [PATCH] Run installation commands as shell jobs
-
-This greatly speeds up installation time on multi-core systems.
-
-Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/adwaita-icon-theme/-/merge_requests/39]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- src/fullcolor/Makefile.am | 5 +++--
- src/spinner/Makefile.am | 7 ++++---
- src/symbolic/Makefile.am | 11 ++++++-----
- 3 files changed, 13 insertions(+), 10 deletions(-)
-
-diff --git a/src/fullcolor/Makefile.am b/src/fullcolor/Makefile.am
-index d73529b..b7d0808 100644
---- a/src/fullcolor/Makefile.am
-+++ b/src/fullcolor/Makefile.am
-@@ -9,9 +9,10 @@ install-data-local:
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/$$size && find . -name "*.png"`; do \
- context="`dirname $$file`"; \
- $(mkdir_p) $(DESTDIR)$(themedir)/$$size/$$context; \
-- $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file; \
-+ $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file & \
- done; \
-- done;
-+ done; \
-+ wait
-
- ## FIXME we should add a way to remove links generated by icon mapping
- uninstall-local:
-diff --git a/src/spinner/Makefile.am b/src/spinner/Makefile.am
-index c14caf6..322dc0d 100644
---- a/src/spinner/Makefile.am
-+++ b/src/spinner/Makefile.am
-@@ -24,13 +24,14 @@ install-data-local:
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/$$size; find . -name "*.png"`; do \
- context="`dirname $$file`"; \
- $(mkdir_p) $(DESTDIR)$(themedir)/$$size/$$context; \
-- $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file; \
-+ $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file & \
- done; \
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/scalable-up-to-32; find . -name "*.svg"`; do \
- context="`dirname $$file`"; \
- $(mkdir_p) $(DESTDIR)$(themedir)/scalable-up-to-32/$$context; \
-- $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/scalable-up-to-32/$$file $(DESTDIR)$(themedir)/scalable-up-to-32/$$file; \
-- done
-+ $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/scalable-up-to-32/$$file $(DESTDIR)$(themedir)/scalable-up-to-32/$$file & \
-+ done; \
-+ wait
-
- uninstall-local:
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/scalable-up-to-32; find . -name "*.svg"`; do \
-diff --git a/src/symbolic/Makefile.am b/src/symbolic/Makefile.am
-index 957c0ee..e1f8818 100644
---- a/src/symbolic/Makefile.am
-+++ b/src/symbolic/Makefile.am
-@@ -34,18 +34,19 @@ install-data-local:
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/$$size; find . -name "*.png"`; do \
- context="`dirname $$file`"; \
- $(mkdir_p) $(DESTDIR)$(themedir)/$$size/$$context; \
-- $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file; \
-+ $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/$$size/$$file $(DESTDIR)$(themedir)/$$size/$$file & \
- done; \
-- done
-+ done; \
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/scalable; find . -name "*.svg"`; do \
- context="`dirname $$file`"; \
- $(mkdir_p) $(DESTDIR)$(themedir)/scalable/$$context; \
-- $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/scalable/$$file $(DESTDIR)$(themedir)/scalable/$$file; \
-+ $(install_sh_DATA) $(top_srcdir)/$(SVGOUTDIR)/scalable/$$file $(DESTDIR)$(themedir)/scalable/$$file & \
- for size in $(symbolic_encode_sizes); do \
- $(mkdir_p) $(DESTDIR)$(themedir)/$$size/$$context; \
-- $(GTK_ENCODE_SYMBOLIC_SVG) $(top_srcdir)/$(SVGOUTDIR)/scalable/$$file $$size -o $(DESTDIR)$(themedir)/$$size/$$context; \
-+ $(GTK_ENCODE_SYMBOLIC_SVG) $(top_srcdir)/$(SVGOUTDIR)/scalable/$$file $$size -o $(DESTDIR)$(themedir)/$$size/$$context & \
- done \
-- done
-+ done; \
-+ wait
-
- uninstall-local:
- for file in `cd $(top_srcdir)/$(SVGOUTDIR)/scalable; find . -name "*.svg"`; do \
diff --git a/meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb b/meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb
deleted file mode 100644
index 46f9f4772c..0000000000
--- a/meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "GTK+ icon theme"
-DESCRIPTION = "The Adwaita icon theme is the default icon theme of the GNOME desktop \
-This package package contains an icon theme for Gtk+ 3 applications."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/adwaita-icon-theme"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/adwaita-icon-theme/issues"
-SECTION = "x11/gnome"
-
-LICENSE = "LGPL-3.0-only | CC-BY-SA-3.0"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c84cac88e46fc07647ea07e6c24eeb7c \
- file://COPYING_CCBYSA3;md5=96143d33de3a79321b1006c4e8ed07e7 \
- file://COPYING_LGPL;md5=e6a600fd5e1d9cbde2d983680233ad02"
-
-inherit allarch autotools pkgconfig gettext gtk-icon-cache gnomebase
-
-SRC_URI += " \
- file://0001-Don-t-use-AC_CANONICAL_HOST.patch \
- file://0001-Run-installation-commands-as-shell-jobs.patch \
- "
-
-SRC_URI[archive.sha256sum] = "ef5339d8c35fcad5d10481b70480803f0fa20b3d3cbc339238fcaceeaee01eba"
-
-DEPENDS += "librsvg-native"
-
-PACKAGES = "${PN}-cursors ${PN}-symbolic-hires ${PN}-symbolic ${PN}-hires ${PN}"
-
-RREPLACES:${PN} = "gnome-icon-theme"
-RCONFLICTS:${PN} = "gnome-icon-theme"
-RPROVIDES:${PN} = "gnome-icon-theme"
-
-FILES:${PN}-cursors = "${prefix}/share/icons/Adwaita/cursors/"
-FILES:${PN}-symbolic-hires = "${prefix}/share/icons/Adwaita/96x96/*/*.symbolic.png \
- ${prefix}/share/icons/Adwaita/64x64/*/*.symbolic.png \
- ${prefix}/share/icons/Adwaita/48x48/*/*.symbolic.png \
- ${prefix}/share/icons/Adwaita/32x32/*/*.symbolic.png"
-FILES:${PN}-symbolic = "${prefix}/share/icons/Adwaita/16x16/*/*.symbolic.png \
- ${prefix}/share/icons/Adwaita/24x24/*/*.symbolic.png \
- ${prefix}/share/icons/Adwaita/scalable/*/*-symbolic*.svg"
-FILES:${PN}-hires = "${prefix}/share/icons/Adwaita/256x256/ \
- ${prefix}/share/icons/Adwaita/512x512/"
-FILES:${PN} = "${prefix}/share/icons/Adwaita/ \
- ${prefix}/share/pkgconfig/adwaita-icon-theme.pc"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/gnome/adwaita-icon-theme_46.0.bb b/meta/recipes-gnome/gnome/adwaita-icon-theme_46.0.bb
new file mode 100644
index 0000000000..2f3e4e7b85
--- /dev/null
+++ b/meta/recipes-gnome/gnome/adwaita-icon-theme_46.0.bb
@@ -0,0 +1,29 @@
+SUMMARY = "GTK+ icon theme"
+DESCRIPTION = "The Adwaita icon theme is the default icon theme of the GNOME desktop \
+This package package contains an icon theme for Gtk+ 3 applications."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/adwaita-icon-theme"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/adwaita-icon-theme/issues"
+SECTION = "x11/gnome"
+
+LICENSE = "LGPL-3.0-only | CC-BY-SA-3.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c84cac88e46fc07647ea07e6c24eeb7c \
+ file://COPYING_CCBYSA3;md5=96143d33de3a79321b1006c4e8ed07e7 \
+ file://COPYING_LGPL;md5=e6a600fd5e1d9cbde2d983680233ad02"
+
+inherit gnomebase allarch gtk-icon-cache
+
+SRC_URI[archive.sha256sum] = "4bcb539bd75d64da385d6fa08cbaa9ddeaceb6ac8e82b85ba6c41117bf5ba64e"
+
+DEPENDS += "librsvg-native"
+
+PACKAGES =+ "${PN}-cursors ${PN}-symbolic"
+
+RREPLACES:${PN} = "gnome-icon-theme"
+RCONFLICTS:${PN} = "gnome-icon-theme"
+RPROVIDES:${PN} = "gnome-icon-theme"
+
+FILES:${PN}-cursors = "${datadir}/icons/Adwaita/cursors/"
+FILES:${PN}-symbolic = "${datadir}/icons/Adwaita/symbolic*/"
+FILES:${PN}-doc += "${datadir}/licenses/adwaita-icon-theme"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-gnome/gnome/gconf_3.2.6.bb b/meta/recipes-gnome/gnome/gconf_3.2.6.bb
index 776f6091aa..1e29bd9bb0 100644
--- a/meta/recipes-gnome/gnome/gconf_3.2.6.bb
+++ b/meta/recipes-gnome/gnome/gconf_3.2.6.bb
@@ -7,8 +7,9 @@ HOMEPAGE = "https://gitlab.gnome.org/Archive/gconf"
LICENSE = "LGPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=55ca817ccb7d5b5b66355690e9abc605"
-DEPENDS = "glib-2.0 dbus dbus-glib libxml2 intltool-native"
+DEPENDS = "glib-2.0 glib-2.0-native dbus dbus-glib libxml2 intltool-native"
+GNOMEBASEBUILDCLASS = "autotools"
inherit gnomebase gtk-doc gettext gobject-introspection gio-module-cache
SRC_URI = "${GNOME_MIRROR}/GConf/${@gnome_verdir("${PV}")}/GConf-${PV}.tar.xz;name=archive \
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch b/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch
index ba965092e1..c9e1afffd0 100644
--- a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch
+++ b/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch
@@ -1,4 +1,4 @@
-From 74a0fee892235c722ac60ddea6ee79bc3d7a93f5 Mon Sep 17 00:00:00 2001
+From aeb5532f8be42d42f4e8725ca42e239b36983a4d Mon Sep 17 00:00:00 2001
From: Sascha Silbe <x-yo17@se-silbe.de>
Date: Fri, 8 Jun 2018 13:55:10 +0200
Subject: [PATCH] Relocate the repository directory for native builds
@@ -21,7 +21,7 @@ Signed-off-by: Sascha Silbe <x-yo17@se-silbe.de>
2 files changed, 14 insertions(+), 3 deletions(-)
diff --git a/girepository/girepository.c b/girepository/girepository.c
-index 7d03485..20f4813 100644
+index a0754f4..2d456f9 100644
--- a/girepository/girepository.c
+++ b/girepository/girepository.c
@@ -21,6 +21,8 @@
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-g-ir-tool-template.in-fix-girdir-path.patch b/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-g-ir-tool-template.in-fix-girdir-path.patch
deleted file mode 100644
index 5c78649de4..0000000000
--- a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-g-ir-tool-template.in-fix-girdir-path.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From b01b448613b76f9acefdfd89ee01686dc7a67df4 Mon Sep 17 00:00:00 2001
-From: Chen Qi <Qi.Chen@windriver.com>
-Date: Tue, 13 Jul 2021 02:05:11 -0700
-Subject: [PATCH] g-ir-tool-template.in: fix girdir path
-
-In case gir_dir_prefix is set, it's possible that g-ir-scanner
-cannot find the .gir files. This is because that the girdir
-is set to gir_dir_prefix, which is wrong. It's not a prefix,
-it the actual gir dir.
-
-Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gobject-introspection/-/merge_requests/329]
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
----
- tools/g-ir-tool-template.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/tools/g-ir-tool-template.in b/tools/g-ir-tool-template.in
-index 6e98f52e..62c07c31 100755
---- a/tools/g-ir-tool-template.in
-+++ b/tools/g-ir-tool-template.in
-@@ -55,7 +55,7 @@ builtins.__dict__['DATADIR'] = datadir
-
- # Respect gir_dir_prefix
- girdir = ''
--girdir = os.path.abspath(os.path.join(filedir, '..', '@gir_dir_prefix@'))
-+girdir = os.path.abspath(os.path.join(filedir, '..', '@gir_dir_prefix@', 'gir-1.0'))
- builtins.__dict__['GIRDIR'] = [girdir]
-
- # Again, relative paths first so that the installation prefix is relocatable
---
-2.30.2
-
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.72.0.bb b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.72.0.bb
deleted file mode 100644
index 355e77d107..0000000000
--- a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.72.0.bb
+++ /dev/null
@@ -1,197 +0,0 @@
-SUMMARY = "Middleware layer between GObject-using C libraries and language bindings"
-DESCRIPTION = "GObject Introspection is a project for providing machine \
-readable introspection data of the API of C libraries. This introspection \
-data can be used in several different use cases, for example automatic code \
-generation for bindings, API verification and documentation generation."
-HOMEPAGE = "https://wiki.gnome.org/action/show/Projects/GObjectIntrospection"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/gobject-introspection/issues"
-SECTION = "libs"
-LICENSE = "LGPL-2.0-or-later & GPL-2.0-or-later & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c434e8128a68bedd59b80b2ac1eb1c4a \
- file://tools/compiler.c;endline=20;md5=fc5007fc20022720e6c0b0cdde41fabd \
- file://giscanner/sourcescanner.c;endline=22;md5=194d6e0c1d00662f32d030ce44de8d39 \
- file://girepository/giregisteredtypeinfo.c;endline=21;md5=661847611ae6979465415f31a759ba27 \
- "
-
-SRC_URI = "${GNOME_MIRROR}/${BPN}/${@oe.utils.trim_version("${PV}", 2)}/${BPN}-${PV}.tar.xz \
- file://0001-g-ir-tool-template.in-fix-girdir-path.patch \
- "
-
-SRC_URI[sha256sum] = "02fe8e590861d88f83060dd39cda5ccaa60b2da1d21d0f95499301b186beaabc"
-
-SRC_URI:append:class-native = " file://0001-Relocate-the-repository-directory-for-native-builds.patch"
-
-inherit meson pkgconfig gtk-doc python3native qemu gobject-introspection-data upstream-version-is-even multilib_script
-
-GTKDOC_MESON_OPTION = "gtk_doc"
-
-MULTILIB_SCRIPTS = "${PN}:${bindir}/g-ir-annotation-tool ${PN}:${bindir}/g-ir-scanner"
-
-DEPENDS += " libffi zlib glib-2.0 python3 flex-native bison-native autoconf-archive"
-
-# target build needs qemu to run temporary introspection binaries created
-# on the fly by g-ir-scanner and a native version of itself to run
-# native versions of its own tools during build.
-DEPENDS:append:class-target = " gobject-introspection-native qemu-native"
-
-# needed for writing out the qemu wrapper script
-export STAGING_DIR_HOST
-export B
-
-PACKAGECONFIG ?= ""
-PACKAGECONFIG[doctool] = "-Ddoctool=enabled,-Ddoctool=disabled,python3-mako,"
-
-# Configure target build to use native tools of itself and to use a qemu wrapper
-# and optionally to generate introspection data
-EXTRA_OEMESON:class-target = " \
- -Dgi_cross_use_prebuilt_gi=true \
- -Dgi_cross_binary_wrapper=${B}/g-ir-scanner-qemuwrapper \
- -Dgi_cross_ldd_wrapper=${B}/g-ir-scanner-lddwrapper \
- -Dgi_cross_pkgconfig_sysroot_path=${PKG_CONFIG_SYSROOT_DIR} \
- ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dbuild_introspection_data=true', '-Dbuild_introspection_data=false', d)} \
- ${@'-Dgir_dir_prefix=${libdir}' if d.getVar('MULTILIBS') else ''} \
-"
-
-do_configure:prepend:class-native() {
- # Tweak the native python scripts so that they don't refer to the
- # full path of native python binary (the solution is taken from glib-2.0 recipe)
- # This removes the risk of exceeding Linux kernel's shebang line limit (128 bytes)
- sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/tools/g-ir-tool-template.in
-}
-
-do_configure:prepend:class-target() {
- # Write out a qemu wrapper that will be given to gi-scanner so that it
- # can run target helper binaries through that.
- qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\\$GIR_EXTRA_LIBS_PATH','.libs','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
- cat > ${B}/g-ir-scanner-qemuwrapper << EOF
-#!/bin/sh
-# Use a modules directory which doesn't exist so we don't load random things
-# which may then get deleted (or their dependencies) and potentially segfault
-export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
-
-$qemu_binary "\$@"
-if [ \$? -ne 0 ]; then
- echo "If the above error message is about missing .so libraries, then setting up GIR_EXTRA_LIBS_PATH in the recipe should help."
- echo "(typically like this: GIR_EXTRA_LIBS_PATH=\"$""{B}/something/.libs\" )"
- exit 1
-fi
-EOF
- chmod +x ${B}/g-ir-scanner-qemuwrapper
-
- # Write out a wrapper for g-ir-scanner itself, which will be used when building introspection files
- # for glib-based packages. This wrapper calls the native version of the scanner, and tells it to use
- # a qemu wrapper for running transient target binaries produced by the scanner, and an include directory
- # from the target sysroot.
- cat > ${B}/g-ir-scanner-wrapper << EOF
-#!/bin/sh
-# This prevents g-ir-scanner from writing cache data to user's HOME dir
-export GI_SCANNER_DISABLE_CACHE=1
-
-g-ir-scanner --lib-dirs-envvar=GIR_EXTRA_LIBS_PATH --use-binary-wrapper=${STAGING_BINDIR}/g-ir-scanner-qemuwrapper --use-ldd-wrapper=${STAGING_BINDIR}/g-ir-scanner-lddwrapper --add-include-path=${STAGING_DATADIR}/gir-1.0 --add-include-path=${STAGING_LIBDIR}/gir-1.0 "\$@"
-EOF
- chmod +x ${B}/g-ir-scanner-wrapper
-
- # Write out a wrapper for g-ir-compiler, which runs the target version of it through qemu.
- # g-ir-compiler writes out the raw content of a C struct to disk, and therefore is architecture dependent.
- cat > ${B}/g-ir-compiler-wrapper << EOF
-#!/bin/sh
-${STAGING_BINDIR}/g-ir-scanner-qemuwrapper ${STAGING_BINDIR}/g-ir-compiler "\$@"
-EOF
- chmod +x ${B}/g-ir-compiler-wrapper
-
- # Write out a wrapper to use instead of ldd, which does not work when a binary is built
- # for a different architecture
- cat > ${B}/g-ir-scanner-lddwrapper << EOF
-#!/bin/sh
-$OBJDUMP -p "\$@"
-EOF
- chmod +x ${B}/g-ir-scanner-lddwrapper
-
- # Also tweak the target python scripts so that they don't refer to the
- # native version of python binary (the solution is taken from glib-2.0 recipe)
- sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/tools/g-ir-tool-template.in
-}
-
-do_compile:prepend() {
- # This prevents g-ir-scanner from writing cache data to $HOME
- export GI_SCANNER_DISABLE_CACHE=1
-
- # Needed to run g-ir unit tests, which won't be able to find the built libraries otherwise
- export GIR_EXTRA_LIBS_PATH=$B/.libs
-}
-
-do_install:prepend() {
- # This prevents g-ir-scanner from writing cache data to $HOME
- export GI_SCANNER_DISABLE_CACHE=1
-}
-
-# Our wrappers need to be available system-wide, because they will be used
-# to build introspection files for all other gobject-based packages
-do_install:append:class-target() {
- install -d ${D}${bindir}/
- install ${B}/g-ir-scanner-qemuwrapper ${D}${bindir}/
- install ${B}/g-ir-scanner-wrapper ${D}${bindir}/
- install ${B}/g-ir-compiler-wrapper ${D}${bindir}/
- install ${B}/g-ir-scanner-lddwrapper ${D}${bindir}/
-}
-
-# we need target versions of introspection tools in sysroot so that they can be run via qemu
-# when building introspection files in other packages
-SYSROOT_DIRS:append:class-target = " ${bindir}"
-
-SYSROOT_PREPROCESS_FUNCS:append:class-target = " gi_binaries_sysroot_preprocess"
-gi_binaries_sysroot_preprocess() {
- # Tweak the binary names in the introspection pkgconfig file, so that it
- # picks up our wrappers which do the cross-compile and qemu magic.
- sed -i \
- -e "s|g_ir_scanner=.*|g_ir_scanner=${bindir}/g-ir-scanner-wrapper|" \
- -e "s|g_ir_compiler=.*|g_ir_compiler=${bindir}/g-ir-compiler-wrapper|" \
- ${SYSROOT_DESTDIR}${libdir}/pkgconfig/gobject-introspection-1.0.pc
-}
-
-SYSROOT_PREPROCESS_FUNCS:append = " gi_ldsoconf_sysroot_preprocess"
-gi_ldsoconf_sysroot_preprocess () {
- mkdir -p ${SYSROOT_DESTDIR}${bindir}
- dest=${SYSROOT_DESTDIR}${bindir}/postinst-ldsoconf-${PN}
- echo "#!/bin/sh" > $dest
- echo "mkdir -p ${STAGING_DIR_TARGET}${sysconfdir}" >> $dest
- echo "echo ${base_libdir} >> ${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf" >> $dest
- echo "echo ${libdir} >> ${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf" >> $dest
- chmod 755 $dest
-}
-
-# Remove wrapper files from the package, only used for cross-compiling
-PACKAGE_PREPROCESS_FUNCS += "gi_package_preprocess"
-gi_package_preprocess() {
- rm -f ${PKGD}${bindir}/g-ir-scanner-qemuwrapper
- rm -f ${PKGD}${bindir}/g-ir-scanner-wrapper
- rm -f ${PKGD}${bindir}/g-ir-compiler-wrapper
- rm -f ${PKGD}${bindir}/g-ir-scanner-lddwrapper
-}
-
-SSTATE_SCAN_FILES += "g-ir-scanner-qemuwrapper g-ir-scanner-wrapper g-ir-compiler-wrapper g-ir-scanner-lddwrapper Gio-2.0.gir postinst-ldsoconf-${PN}"
-
-# .typelib files are needed at runtime and so they go to the main package
-FILES:${PN}:append = " ${libdir}/girepository-*/*.typelib"
-
-# .gir files go to dev package, as they're needed for developing (but not for running)
-# things that depends on introspection.
-FILES:${PN}-dev:append = " ${datadir}/gir-*/*.gir ${libdir}/gir-*/*.gir"
-FILES:${PN}-dev:append = " ${datadir}/gir-*/*.rnc"
-
-# These are used by gobject-based packages
-# to generate transient introspection binaries
-FILES:${PN}-dev:append = " ${datadir}/gobject-introspection-1.0/gdump.c \
- ${datadir}/gobject-introspection-1.0/Makefile.introspection"
-
-# These are used by dependent packages (e.g. pygobject) to build their
-# testsuites.
-FILES:${PN}-dev:append = " ${datadir}/gobject-introspection-1.0/tests/*.c \
- ${datadir}/gobject-introspection-1.0/tests/*.h"
-
-FILES:${PN}-dbg += "${libdir}/gobject-introspection/giscanner/.debug/"
-FILES:${PN}-staticdev += "${libdir}/gobject-introspection/giscanner/*.a"
-
-RDEPENDS:${PN} = "python3-pickle python3-xml"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb
new file mode 100644
index 0000000000..05a08a50e0
--- /dev/null
+++ b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb
@@ -0,0 +1,194 @@
+SUMMARY = "Middleware layer between GObject-using C libraries and language bindings"
+DESCRIPTION = "GObject Introspection is a project for providing machine \
+readable introspection data of the API of C libraries. This introspection \
+data can be used in several different use cases, for example automatic code \
+generation for bindings, API verification and documentation generation."
+HOMEPAGE = "https://wiki.gnome.org/action/show/Projects/GObjectIntrospection"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/gobject-introspection/issues"
+SECTION = "libs"
+LICENSE = "LGPL-2.0-or-later & GPL-2.0-or-later & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c434e8128a68bedd59b80b2ac1eb1c4a \
+ file://tools/compiler.c;endline=20;md5=fc5007fc20022720e6c0b0cdde41fabd \
+ file://giscanner/sourcescanner.c;endline=22;md5=194d6e0c1d00662f32d030ce44de8d39 \
+ file://girepository/giregisteredtypeinfo.c;endline=21;md5=661847611ae6979465415f31a759ba27 \
+ "
+
+SRC_URI = "${GNOME_MIRROR}/${BPN}/${@oe.utils.trim_version("${PV}", 2)}/${BPN}-${PV}.tar.xz \
+ "
+
+SRC_URI[sha256sum] = "bd7babd99af7258e76819e45ba4a6bc399608fe762d83fde3cac033c50841bb4"
+
+SRC_URI:append:class-native = " file://0001-Relocate-the-repository-directory-for-native-builds.patch"
+
+inherit meson pkgconfig gtk-doc python3targetconfig qemu gobject-introspection-data upstream-version-is-even multilib_script
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+
+MULTILIB_SCRIPTS = "${PN}:${bindir}/g-ir-annotation-tool ${PN}:${bindir}/g-ir-scanner"
+
+DEPENDS += " libffi zlib glib-2.0 python3 flex-native bison-native"
+
+# target build needs qemu to run temporary introspection binaries created
+# on the fly by g-ir-scanner and a native version of itself to run
+# native versions of its own tools during build.
+DEPENDS:append:class-target = " gobject-introspection-native qemu-native"
+
+# needed for writing out the qemu wrapper script
+export STAGING_DIR_HOST
+export B
+
+PACKAGECONFIG ?= ""
+PACKAGECONFIG[doctool] = "-Ddoctool=enabled,-Ddoctool=disabled,python3-mako,"
+
+# Configure target build to use native tools of itself and to use a qemu wrapper
+# and optionally to generate introspection data
+EXTRA_OEMESON:class-target = " \
+ -Dgi_cross_use_prebuilt_gi=true \
+ -Dgi_cross_binary_wrapper=${B}/g-ir-scanner-qemuwrapper \
+ -Dgi_cross_ldd_wrapper=${B}/g-ir-scanner-lddwrapper \
+ -Dgi_cross_pkgconfig_sysroot_path=${PKG_CONFIG_SYSROOT_DIR} \
+ ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dbuild_introspection_data=true', '-Dbuild_introspection_data=false', d)} \
+ ${@'-Dgir_dir_prefix=${libdir}' if d.getVar('MULTILIBS') else ''} \
+"
+
+do_configure:prepend:class-native() {
+ # Tweak the native python scripts so that they don't refer to the
+ # full path of native python binary (the solution is taken from glib-2.0 recipe)
+ # This removes the risk of exceeding Linux kernel's shebang line limit (128 bytes)
+ sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/tools/g-ir-tool-template.in
+}
+
+do_configure:prepend:class-target() {
+ # Write out a qemu wrapper that will be given to gi-scanner so that it
+ # can run target helper binaries through that.
+ qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\\$GIR_EXTRA_LIBS_PATH','.libs','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
+ cat > ${B}/g-ir-scanner-qemuwrapper << EOF
+#!/bin/sh
+# Use a modules directory which doesn't exist so we don't load random things
+# which may then get deleted (or their dependencies) and potentially segfault
+export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy
+
+$qemu_binary "\$@"
+if [ \$? -ne 0 ]; then
+ echo "If the above error message is about missing .so libraries, then setting up GIR_EXTRA_LIBS_PATH in the recipe should help."
+ echo "(typically like this: GIR_EXTRA_LIBS_PATH=\"$""{B}/something/.libs\" )"
+ exit 1
+fi
+EOF
+ chmod +x ${B}/g-ir-scanner-qemuwrapper
+
+ # Write out a wrapper for g-ir-scanner itself, which will be used when building introspection files
+ # for glib-based packages. This wrapper calls the native version of the scanner, and tells it to use
+ # a qemu wrapper for running transient target binaries produced by the scanner, and an include directory
+ # from the target sysroot.
+ cat > ${B}/g-ir-scanner-wrapper << EOF
+#!/bin/sh
+# This prevents g-ir-scanner from writing cache data to user's HOME dir
+export GI_SCANNER_DISABLE_CACHE=1
+
+g-ir-scanner --lib-dirs-envvar=GIR_EXTRA_LIBS_PATH --use-binary-wrapper=${STAGING_BINDIR}/g-ir-scanner-qemuwrapper --use-ldd-wrapper=${STAGING_BINDIR}/g-ir-scanner-lddwrapper --add-include-path=${STAGING_DATADIR}/gir-1.0 --add-include-path=${STAGING_LIBDIR}/gir-1.0 "\$@"
+EOF
+ chmod +x ${B}/g-ir-scanner-wrapper
+
+ # Write out a wrapper for g-ir-compiler, which runs the target version of it through qemu.
+ # g-ir-compiler writes out the raw content of a C struct to disk, and therefore is architecture dependent.
+ cat > ${B}/g-ir-compiler-wrapper << EOF
+#!/bin/sh
+${STAGING_BINDIR}/g-ir-scanner-qemuwrapper ${STAGING_BINDIR}/g-ir-compiler "\$@"
+EOF
+ chmod +x ${B}/g-ir-compiler-wrapper
+
+ # Write out a wrapper to use instead of ldd, which does not work when a binary is built
+ # for a different architecture
+ cat > ${B}/g-ir-scanner-lddwrapper << EOF
+#!/bin/sh
+\$OBJDUMP -p "\$@"
+EOF
+ chmod +x ${B}/g-ir-scanner-lddwrapper
+
+ # Also tweak the target python scripts so that they don't refer to the
+ # native version of python binary (the solution is taken from glib-2.0 recipe)
+ sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/tools/g-ir-tool-template.in
+}
+
+do_compile:prepend() {
+ # Needed to run g-ir unit tests, which won't be able to find the built libraries otherwise
+ export GIR_EXTRA_LIBS_PATH=$B/.libs
+}
+
+do_install:prepend() {
+ # This prevents g-ir-scanner from writing cache data to $HOME
+ export GI_SCANNER_DISABLE_CACHE=1
+}
+
+# Our wrappers need to be available system-wide, because they will be used
+# to build introspection files for all other gobject-based packages
+do_install:append:class-target() {
+ install -d ${D}${bindir}/
+ install ${B}/g-ir-scanner-qemuwrapper ${D}${bindir}/
+ install ${B}/g-ir-scanner-wrapper ${D}${bindir}/
+ install ${B}/g-ir-compiler-wrapper ${D}${bindir}/
+ install ${B}/g-ir-scanner-lddwrapper ${D}${bindir}/
+}
+
+# we need target versions of introspection tools in sysroot so that they can be run via qemu
+# when building introspection files in other packages
+SYSROOT_DIRS:append:class-target = " ${bindir}"
+
+SYSROOT_PREPROCESS_FUNCS:append:class-target = " gi_binaries_sysroot_preprocess"
+gi_binaries_sysroot_preprocess() {
+ # Tweak the binary names in the introspection pkgconfig file, so that it
+ # picks up our wrappers which do the cross-compile and qemu magic.
+ sed -i \
+ -e "s|g_ir_scanner=.*|g_ir_scanner=${bindir}/g-ir-scanner-wrapper|" \
+ -e "s|g_ir_compiler=.*|g_ir_compiler=${bindir}/g-ir-compiler-wrapper|" \
+ ${SYSROOT_DESTDIR}${libdir}/pkgconfig/gobject-introspection-1.0.pc
+}
+
+SYSROOT_PREPROCESS_FUNCS:append = " gi_ldsoconf_sysroot_preprocess"
+gi_ldsoconf_sysroot_preprocess () {
+ mkdir -p ${SYSROOT_DESTDIR}${bindir}
+ dest=${SYSROOT_DESTDIR}${bindir}/postinst-ldsoconf-${PN}
+ echo "#!/bin/sh" > $dest
+ echo "mkdir -p ${STAGING_DIR_TARGET}${sysconfdir}" >> $dest
+ echo "echo ${base_libdir} >> ${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf" >> $dest
+ echo "echo ${libdir} >> ${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf" >> $dest
+ chmod 755 $dest
+}
+
+# Remove wrapper files from the package, only used for cross-compiling
+PACKAGE_PREPROCESS_FUNCS += "gi_package_preprocess"
+gi_package_preprocess() {
+ rm -f ${PKGD}${bindir}/g-ir-scanner-qemuwrapper
+ rm -f ${PKGD}${bindir}/g-ir-scanner-wrapper
+ rm -f ${PKGD}${bindir}/g-ir-compiler-wrapper
+ rm -f ${PKGD}${bindir}/g-ir-scanner-lddwrapper
+}
+
+SSTATE_SCAN_FILES += "g-ir-scanner-qemuwrapper g-ir-scanner-wrapper g-ir-compiler-wrapper g-ir-scanner-lddwrapper Gio-2.0.gir postinst-ldsoconf-${PN}"
+
+# .typelib files are needed at runtime and so they go to the main package
+FILES:${PN}:append = " ${libdir}/girepository-*/*.typelib"
+
+# .gir files go to dev package, as they're needed for developing (but not for running)
+# things that depends on introspection.
+FILES:${PN}-dev:append = " ${datadir}/gir-*/*.gir ${libdir}/gir-*/*.gir"
+FILES:${PN}-dev:append = " ${datadir}/gir-*/*.rnc"
+
+# These are used by gobject-based packages
+# to generate transient introspection binaries
+FILES:${PN}-dev:append = " ${datadir}/gobject-introspection-1.0/gdump.c \
+ ${datadir}/gobject-introspection-1.0/Makefile.introspection"
+
+# These are used by dependent packages (e.g. pygobject) to build their
+# testsuites.
+FILES:${PN}-dev:append = " ${datadir}/gobject-introspection-1.0/tests/*.c \
+ ${datadir}/gobject-introspection-1.0/tests/*.h"
+
+FILES:${PN}-dbg += "${libdir}/gobject-introspection/giscanner/.debug/"
+FILES:${PN}-staticdev += "${libdir}/gobject-introspection/giscanner/*.a"
+
+# setuptools can be removed when upstream removes all uses of distutils
+RDEPENDS:${PN} = "python3-pickle python3-xml python3-setuptools"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_42.0.bb b/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_42.0.bb
deleted file mode 100644
index b8ae15c229..0000000000
--- a/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_42.0.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "GNOME desktop-wide GSettings schemas"
-DESCRIPTION = "GSettings desktop-wide schemas contains a collection of \
-GSettings schemas for settings shared by various components of a desktop."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/gsettings-desktop-schemas"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/gsettings-desktop-schemas/issues"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-DEPENDS = "glib-2.0"
-
-GNOMEBASEBUILDCLASS = "meson"
-
-inherit gnomebase gsettings gobject-introspection gettext
-
-SRC_URI[archive.sha256sum] = "6686335a9ed623f7ae2276fefa50a410d4e71d4231880824714070cb317323d2"
diff --git a/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_46.0.bb b/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_46.0.bb
new file mode 100644
index 0000000000..8260c3d357
--- /dev/null
+++ b/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_46.0.bb
@@ -0,0 +1,15 @@
+SUMMARY = "GNOME desktop-wide GSettings schemas"
+DESCRIPTION = "GSettings desktop-wide schemas contains a collection of \
+GSettings schemas for settings shared by various components of a desktop."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/gsettings-desktop-schemas"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/gsettings-desktop-schemas/issues"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "glib-2.0"
+
+
+inherit gnomebase gsettings gobject-introspection gettext
+
+SRC_URI[archive.sha256sum] = "493a46a1161b6388d57aa72f632a79ce96c42d5ffbd1d0b00f496ec5876f8575"
diff --git a/meta/recipes-gnome/gtk+/gtk+3.inc b/meta/recipes-gnome/gtk+/gtk+3.inc
index 7d50d7bd2e..e1603b43fc 100644
--- a/meta/recipes-gnome/gtk+/gtk+3.inc
+++ b/meta/recipes-gnome/gtk+/gtk+3.inc
@@ -6,12 +6,11 @@ HOMEPAGE = "http://www.gtk.org"
BUGTRACKER = "https://bugzilla.gnome.org/"
SECTION = "libs"
-DEPENDS = "glib-2.0 cairo pango atk jpeg libpng gdk-pixbuf \
- gdk-pixbuf-native"
+DEPENDS = "glib-2.0 cairo pango atk jpeg libpng gdk-pixbuf gdk-pixbuf-native"
LICENSE = "LGPL-2.0-only & LGPL-2.0-or-later & LGPL-2.1-or-later"
-inherit autotools gettext pkgconfig gtk-doc update-alternatives gtk-immodules-cache gsettings features_check gobject-introspection
+inherit meson gettext pkgconfig gtk-doc update-alternatives gtk-immodules-cache gsettings features_check gobject-introspection
BBCLASSEXTEND = "native nativesdk"
@@ -22,43 +21,33 @@ UPSTREAM_CHECK_REGEX = "[^\d\.](?P<pver>3\.([1-8]?[02468])+(\.\d+)+)\.tar"
ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
-# This should be in autotools.bbclass, but until something elses uses it putting
-# it here avoids rebuilding everything.
-export PKG_CONFIG_FOR_BUILD = "${STAGING_BINDIR_NATIVE}/pkg-config-native"
-
do_configure:prepend() {
- #delete a file that will get confused with generated one in ${B}
- rm -f ${S}/gtk/gtktypefuncs.c
-
# These files are generated by wayland-scanner but will race over modification
# time between the copies in the sysroot from wayland-protocols and the copy
# in the source tree. Solve the race by deleting so they need to be regenerated.
- # 3.24.22 will not be shipping these files so this can be deleted then:
- # https://gitlab.gnome.org/GNOME/gtk/-/merge_requests/2183
- rm -f ${S}/modules/input/text-input-unstable-v3*.[ch]
+ rm -f ${S}/modules/input/*-text-input-*.[ch]
}
-EXTRA_OECONF += " \
- --disable-glibtest \
- --disable-xinerama \
- --enable-modules \
- ${@bb.utils.contains("DISTRO_FEATURES", "x11", "", "--disable-gtk-doc", d)} \
- "
+GTKDOC_MESON_OPTION = 'gtk_doc'
-do_compile:prepend() {
- export GIR_EXTRA_LIBS_PATH="${B}/gdk/.libs"
-}
+EXTRA_OEMESON = "-Dxinerama=no -Dtests=false"
+EXTRA_OEMESON:append:class-native = " -Ddemos=false -Dexamples=false"
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'opengl wayland x11', d)}"
+PACKAGECONFIG ??= " \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'opengl x11', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl wayland', 'wayland', '', d)} \
+"
PACKAGECONFIG:class-native = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
PACKAGECONFIG:class-nativesdk = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
-PACKAGECONFIG[x11] = "--enable-x11-backend,--disable-x11-backend,at-spi2-atk fontconfig libx11 libxext libxcursor libxi libxdamage libxrandr libxrender libxcomposite libxfixes"
+PACKAGECONFIG[x11] = "-Dx11_backend=true,-Dx11_backend=false,at-spi2-atk fontconfig libx11 libxext libxcursor libxi libxdamage libxrandr libxrender libxcomposite libxfixes"
# this is provided by oe-core patch that removes epoxy/gl dependency from a X11 build
-PACKAGECONFIG[opengl] = "--enable-opengl,--disable-opengl,libepoxy"
-PACKAGECONFIG[wayland] = "--enable-wayland-backend,--disable-wayland-backend,wayland wayland-protocols libxkbcommon virtual/egl virtual/libgles2 wayland-native"
-PACKAGECONFIG[cups] = "--enable-cups,--disable-cups,cups"
-PACKAGECONFIG[colord] = "--enable-colord,--disable-colord,colord"
+PACKAGECONFIG[opengl] = "-Dopengl=true,-Dopengl=false,libepoxy"
+PACKAGECONFIG[wayland] = "-Dwayland_backend=true,-Dwayland_backend=false,wayland wayland-protocols libxkbcommon virtual/egl virtual/libgles2 wayland-native"
+PACKAGECONFIG[cups] = ",,cups,cups gtk3-printbackend-cups"
+PACKAGECONFIG[colord] = "-Dcolord=yes,-Dcolord=no,colord"
+PACKAGECONFIG[cloudproviders] = "-Dcloudproviders=true,-Dcloudproviders=false,libcloudproviders"
+PACKAGECONFIG[tracker3] = "-Dtracker3=true,-Dtracker3=false,tracker,tracker-miners"
prepare_gtk_scripts() {
mv ${D}${bindir}/gtk-update-icon-cache ${D}${bindir}/gtk-update-icon-cache-3.0
@@ -101,7 +90,8 @@ FILES:${PN}-demo = "${bindir}/gtk3-demo \
FILES:${PN}:append = " ${bindir}/gtk-update-icon-cache-3.0 \
${bindir}/gtk-query-immodules-3.0 \
${bindir}/gtk-launch \
- ${datadir}/themes ${sysconfdir} ${datadir}/glib-2.0/schemas/ \
+ ${datadir}/themes ${datadir}/gtk-3.0/emoji \
+ ${sysconfdir} ${datadir}/glib-2.0/schemas/ \
${libdir}/gtk-3.0/${LIBV}/engines/libpixmap.so \
${libdir}/gtk-3.0/modules/*.so"
diff --git a/meta/recipes-gnome/gtk+/gtk+3/0002-Do-not-try-to-initialize-GL-without-libGL.patch b/meta/recipes-gnome/gtk+/gtk+3/0002-Do-not-try-to-initialize-GL-without-libGL.patch
deleted file mode 100644
index 80dc2d7a05..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+3/0002-Do-not-try-to-initialize-GL-without-libGL.patch
+++ /dev/null
@@ -1,57 +0,0 @@
-From 6575ab0f0e8c1bba033ad1616511e37a2ec995ff Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Fri, 16 Oct 2015 16:35:16 +0300
-Subject: [PATCH] Do not try to initialize GL without libGL
-
-_gdk_x11_screen_update_visuals_for_gl() will end up calling epoxys
-GLX api which will exit() if libGL.so.1 is not present. We do not
-want that to happen and we don't want every app to have to set
-"GDK_GL=disabled" environment variable: so use #ifdef set based on
-opengl distro feature.
-
-Upstream is not interested in the fix as it is: Either epoxy should be
-fixed (to not exit) or GTK+ possibly could do some additional probing
-before calling epoxy APIs.
-
-Upstream-Status: Denied
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
----
- configure.ac | 6 ++++++
- gdk/x11/gdkvisual-x11.c | 5 +++++
- 2 files changed, 11 insertions(+)
-
-diff --git a/configure.ac b/configure.ac
-index e9f5583..bd651bb 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -346,6 +346,12 @@ AC_ARG_ENABLE(cloudproviders,
- [AS_HELP_STRING([--enable-cloudproviders],
- [enable libcloudproviders integration])],
- [cloudproviders_set=yes])
-+AC_ARG_ENABLE(glx,
-+ [AS_HELP_STRING([--enable-glx],
-+ [When enabled Gdk will try to initialize GLX])])
-+AS_IF([test "x$enable_glx" != "xno"], [
-+ AC_DEFINE([HAVE_GLX], [], [GLX will be available at runtime])
-+])
-
- AC_ARG_ENABLE(profiler,
- [AS_HELP_STRING([--enable-profiler],
-diff --git a/gdk/x11/gdkvisual-x11.c b/gdk/x11/gdkvisual-x11.c
-index 81479d8..3c8c5c0 100644
---- a/gdk/x11/gdkvisual-x11.c
-+++ b/gdk/x11/gdkvisual-x11.c
-@@ -306,7 +306,12 @@ _gdk_x11_screen_init_visuals (GdkScreen *screen)
- /* If GL is available we want to pick better default/rgba visuals,
- as we care about glx details such as alpha/depth/stencil depth,
- stereo and double buffering */
-+ /* update_visuals_for_gl() will end up calling epoxy GLX api which
-+ will exit if libgl is not there: so only do this if we know GL
-+ is available */
-+#ifdef HAVE_GLX
- _gdk_x11_screen_update_visuals_for_gl (screen);
-+#endif
- }
-
- gint
diff --git a/meta/recipes-gnome/gtk+/gtk+3/0003-Add-disable-opengl-configure-option.patch b/meta/recipes-gnome/gtk+/gtk+3/0003-Add-disable-opengl-configure-option.patch
deleted file mode 100644
index 35cbab8761..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+3/0003-Add-disable-opengl-configure-option.patch
+++ /dev/null
@@ -1,872 +0,0 @@
-From eef50c94587fc30cd624adb5eb213eb9fa663dc1 Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Tue, 21 Jun 2016 15:11:39 +0300
-Subject: [PATCH] Add --disable-opengl configure option
-
---disable-opengl will remove the dependency on libepoxy and on the
-OpenGL APIs. This is useful for those who want to keep using gtk+3
-without the "opengl" distro feature.
-
-GtkGLArea is still part of the API (it just doesn't work) even when
-OpenGL is disabled. GdkX11GLContext was removed from the Gtk API
-completely: that object exposes GL API elements so it had to be at
-the very least modified.
-
-The patch is _not_ great from a maintenance point of view and
-modifying the library API is also a fairly nasty thing to do.
-Next long term release (4.0) will require alternative solutions
-as it actually will depend on OpenGL.
-
-Upstream-Status: Inappropriate [Evil eye expected from upstream]
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
----
- configure.ac | 13 ++++-
- demos/gtk-demo/glarea.c | 14 ++++++
- docs/tools/Makefile.am | 9 +++-
- docs/tools/widgets.c | 4 +-
- gdk/Makefile.am | 8 ++-
- gdk/gdkdisplay.c | 4 +-
- gdk/gdkgl.c | 10 ++++
- gdk/gdkglcontext.c | 6 +++
- gdk/gdkwindow.c | 13 +++++
- gdk/x11/Makefile.am | 30 +++++++++--
- gdk/x11/gdkdisplay-x11.c | 6 ++-
- gdk/x11/gdkscreen-x11.c | 5 ++
- gdk/x11/gdkwindow-x11.c | 4 ++
- gdk/x11/gdkx-autocleanups.h | 2 +
- gdk/x11/{gdkx.h => gdkx-with-gl-context.h} | 1 -
- gdk/x11/gdkx-without-gl-context.h | 58 ++++++++++++++++++++++
- gtk/Makefile.am | 2 +-
- gtk/gtkglarea.c | 20 +++++++-
- gtk/inspector/general.c | 6 +++
- tests/Makefile.am | 10 ++--
- testsuite/gtk/objects-finalize.c | 2 +
- 21 files changed, 208 insertions(+), 19 deletions(-)
- rename gdk/x11/{gdkx.h => gdkx-with-gl-context.h} (98%)
- create mode 100644 gdk/x11/gdkx-without-gl-context.h
-
-diff --git a/configure.ac b/configure.ac
-index 851bcbf..6cbf6a2 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -346,6 +346,15 @@ AC_ARG_ENABLE(cloudproviders,
- [AS_HELP_STRING([--enable-cloudproviders],
- [enable libcloudproviders integration])],
- [cloudproviders_set=yes])
-+AC_ARG_ENABLE(opengl,
-+ [AS_HELP_STRING([--enable-opengl],
-+ [When enabled, Gtk+ will use libepoxy and exposes GtkGLArea widget ])])
-+AS_IF([test "x$enable_opengl" != "xno"], [
-+ AC_DEFINE([HAVE_OPENGL], [1], [libepoxy and opengl APIs are available at buildtime])
-+ EPOXY_PACKAGES="epoxy >= epoxy_required_version"
-+])
-+AM_CONDITIONAL([HAVE_OPENGL],[test "x$enable_opengl" != "xno"])
-+
- AC_ARG_ENABLE(glx,
- [AS_HELP_STRING([--enable-glx],
- [When enabled Gdk will try to initialize GLX])])
-@@ -1345,7 +1354,7 @@ CFLAGS="$saved_cflags"
- LDFLAGS="$saved_ldflags"
-
- GDK_PACKAGES="$PANGO_PACKAGES gdk-pixbuf-2.0 >= gdk_pixbuf_required_version cairo >= cairo_required_version cairo-gobject >= cairo_required_version"
--GDK_PRIVATE_PACKAGES="$GDK_GIO_PACKAGE $X_PACKAGES $WAYLAND_PACKAGES $cairo_backends epoxy >= epoxy_required_version $CLOUDPROVIDER_PACKAGES $PROFILER_PACKAGES fribidi >= fribidi_required_version"
-+GDK_PRIVATE_PACKAGES="$GDK_GIO_PACKAGE $X_PACKAGES $WAYLAND_PACKAGES $cairo_backends $EPOXY_PACKAGES $CLOUDPROVIDER_PACKAGES $PROFILER_PACKAGES fribidi >= fribidi_required_version"
-
- PKG_CHECK_MODULES(GDK_DEP, $GDK_PACKAGES $GDK_PRIVATE_PACKAGES)
- GDK_DEP_LIBS="$GDK_EXTRA_LIBS $GDK_DEP_LIBS $MATH_LIB"
-@@ -1379,7 +1388,7 @@ fi
- PKG_CHECK_MODULES(ATK, $ATK_PACKAGES)
-
- GTK_PACKAGES="atk >= atk_required_version cairo >= cairo_required_version cairo-gobject >= cairo_required_version gdk-pixbuf-2.0 >= gdk_pixbuf_required_version gio-2.0 >= glib_required_version"
--GTK_PRIVATE_PACKAGES="$ATK_PACKAGES $WAYLAND_PACKAGES epoxy >= epoxy_required_version fribidi >= fribidi_required_version"
-+GTK_PRIVATE_PACKAGES="$ATK_PACKAGES $WAYLAND_PACKAGES $EPOXY_PACKAGES fribidi >= fribidi_required_version"
- if test "x$enable_x11_backend" = xyes -o "x$enable_wayland_backend" = xyes; then
- GTK_PRIVATE_PACKAGES="$GTK_PRIVATE_PACKAGES pangoft2"
- fi
-diff --git a/demos/gtk-demo/glarea.c b/demos/gtk-demo/glarea.c
-index b51e4ae..82409c7 100644
---- a/demos/gtk-demo/glarea.c
-+++ b/demos/gtk-demo/glarea.c
-@@ -3,9 +3,12 @@
- * GtkGLArea is a widget that allows custom drawing using OpenGL calls.
- */
-
-+#include "config.h"
- #include <math.h>
- #include <gtk/gtk.h>
-+#if HAVE_OPENGL
- #include <epoxy/gl.h>
-+#endif
-
- static GtkWidget *demo_window = NULL;
-
-@@ -23,6 +26,8 @@ enum {
- /* Rotation angles on each axis */
- static float rotation_angles[N_AXIS] = { 0.0 };
-
-+#ifdef HAVE_OPENGL
-+
- /* The object we are drawing */
- static const GLfloat vertex_data[] = {
- 0.f, 0.5f, 0.f, 1.f,
-@@ -215,6 +220,7 @@ compute_mvp (float *res,
- static GLuint position_buffer;
- static GLuint program;
- static GLuint mvp_location;
-+#endif
-
- /* We need to set up our state when we realize the GtkGLArea widget */
- static void
-@@ -241,8 +247,10 @@ realize (GtkWidget *widget)
- fragment_path = "/glarea/glarea-gl.fs.glsl";
- }
-
-+#ifdef HAVE_OPENGL
- init_buffers (&position_buffer, NULL);
- init_shaders (vertex_path, fragment_path, &program, &mvp_location);
-+#endif
- }
-
- /* We should tear down the state when unrealizing */
-@@ -254,10 +262,13 @@ unrealize (GtkWidget *widget)
- if (gtk_gl_area_get_error (GTK_GL_AREA (widget)) != NULL)
- return;
-
-+#ifdef HAVE_OPENGL
- glDeleteBuffers (1, &position_buffer);
- glDeleteProgram (program);
-+#endif
- }
-
-+#ifdef HAVE_OPENGL
- static void
- draw_triangle (void)
- {
-@@ -290,6 +301,7 @@ draw_triangle (void)
- glBindBuffer (GL_ARRAY_BUFFER, 0);
- glUseProgram (0);
- }
-+#endif
-
- static gboolean
- render (GtkGLArea *area,
-@@ -298,6 +310,7 @@ render (GtkGLArea *area,
- if (gtk_gl_area_get_error (area) != NULL)
- return FALSE;
-
-+#ifdef HAVE_OPENGL
- /* Clear the viewport */
- glClearColor (0.5, 0.5, 0.5, 1.0);
- glClear (GL_COLOR_BUFFER_BIT);
-@@ -307,6 +320,7 @@ render (GtkGLArea *area,
-
- /* Flush the contents of the pipeline */
- glFlush ();
-+#endif
-
- return TRUE;
- }
-diff --git a/docs/tools/Makefile.am b/docs/tools/Makefile.am
-index bec43e3..189e8fc 100644
---- a/docs/tools/Makefile.am
-+++ b/docs/tools/Makefile.am
-@@ -9,13 +9,18 @@ AM_CPPFLAGS = \
- $(GTK_DEBUG_FLAGS) \
- $(GTK_DEP_CFLAGS)
-
-+if HAVE_OPENGL
-+GEARS_LDADD = $(top_builddir)/tests/gtkgears.o
-+endif
-+
- DEPS = \
-- $(top_builddir)/gtk/libgtk-3.la
-+ $(top_builddir)/gtk/libgtk-3.la \
-+ $(GEARS_LDADD)
-
- LDADDS = \
- $(top_builddir)/gtk/libgtk-3.la \
- $(top_builddir)/gdk/libgdk-3.la \
-- $(top_builddir)/tests/gtkgears.o \
-+ $(GEARS_LDADD) \
- $(GTK_DEP_LIBS) \
- $(GDK_DEP_LIBS) \
- -lm
-diff --git a/docs/tools/widgets.c b/docs/tools/widgets.c
-index 932daf1..54239d6 100644
---- a/docs/tools/widgets.c
-+++ b/docs/tools/widgets.c
-@@ -1526,9 +1526,11 @@ create_gl_area (void)
- widget = gtk_frame_new (NULL);
- gtk_frame_set_shadow_type (GTK_FRAME (widget), GTK_SHADOW_IN);
-
-+#ifdef HAVE_OPENGL
- gears = gtk_gears_new ();
- gtk_container_add (GTK_CONTAINER (widget), gears);
--
-+#endif
-+
- info = new_widget_info ("glarea", widget, MEDIUM);
-
- return info;
-diff --git a/gdk/Makefile.am b/gdk/Makefile.am
-index 710a548..b45f631 100644
---- a/gdk/Makefile.am
-+++ b/gdk/Makefile.am
-@@ -274,7 +274,6 @@ x11_introspection_files = \
- x11/gdkeventsource.c \
- x11/gdkeventtranslator.c \
- x11/gdkgeometry-x11.c \
-- x11/gdkglcontext-x11.c \
- x11/gdkkeys-x11.c \
- x11/gdkmain-x11.c \
- x11/gdkmonitor-x11.c \
-@@ -300,7 +299,6 @@ x11_introspection_files = \
- x11/gdkx11display.h \
- x11/gdkx11displaymanager.h \
- x11/gdkx11dnd.h \
-- x11/gdkx11glcontext.h \
- x11/gdkx11keys.h \
- x11/gdkx11monitor.h \
- x11/gdkx11property.h \
-@@ -310,6 +308,12 @@ x11_introspection_files = \
- x11/gdkx11visual.h \
- x11/gdkx11window.h
-
-+if HAVE_OPENGL
-+x11_introspection_files += \
-+ x11/gdkglcontext-x11.c \
-+ x11/gdkx11glcontext.h
-+endif
-+
- GdkX11-3.0.gir: libgdk-3.la Gdk-3.0.gir Makefile
- GdkX11_3_0_gir_SCANNERFLAGS = \
- --identifier-prefix=Gdk \
-diff --git a/gdk/gdkdisplay.c b/gdk/gdkdisplay.c
-index 748f548..911ab2a 100644
---- a/gdk/gdkdisplay.c
-+++ b/gdk/gdkdisplay.c
-@@ -2420,7 +2420,9 @@ gboolean
- gdk_display_make_gl_context_current (GdkDisplay *display,
- GdkGLContext *context)
- {
-- return GDK_DISPLAY_GET_CLASS (display)->make_gl_context_current (display, context);
-+ if (GDK_DISPLAY_GET_CLASS (display)->make_gl_context_current)
-+ return GDK_DISPLAY_GET_CLASS (display)->make_gl_context_current (display, context);
-+ return FALSE;
- }
-
- GdkRenderingMode
-diff --git a/gdk/gdkgl.c b/gdk/gdkgl.c
-index 9690077..55f85ef 100644
---- a/gdk/gdkgl.c
-+++ b/gdk/gdkgl.c
-@@ -26,7 +26,9 @@
- # include "win32/gdkwin32.h"
- #endif
-
-+#ifdef HAVE_OPENGL
- #include <epoxy/gl.h>
-+#endif
- #include <math.h>
- #include <string.h>
-
-@@ -40,6 +42,7 @@ gdk_cairo_surface_mark_as_direct (cairo_surface_t *surface,
- g_object_ref (window), g_object_unref);
- }
-
-+#ifdef HAVE_OPENGL
- static const char *
- get_vertex_type_name (int type)
- {
-@@ -212,6 +215,7 @@ use_texture_rect_program (GdkGLContextPaintData *paint_data)
- glUseProgram (paint_data->current_program->program);
- }
- }
-+#endif
-
- void
- gdk_gl_texture_quads (GdkGLContext *paint_context,
-@@ -220,6 +224,7 @@ gdk_gl_texture_quads (GdkGLContext *paint_context,
- GdkTexturedQuad *quads,
- gboolean flip_colors)
- {
-+#ifdef HAVE_OPENGL
- GdkGLContextPaintData *paint_data = gdk_gl_context_get_paint_data (paint_context);
- GdkGLContextProgram *program;
- GdkWindow *window = gdk_gl_context_get_window (paint_context);
-@@ -293,6 +298,7 @@ gdk_gl_texture_quads (GdkGLContext *paint_context,
-
- glDisableVertexAttribArray (program->position_location);
- glDisableVertexAttribArray (program->uv_location);
-+#endif
- }
-
- /* x,y,width,height describes a rectangle in the gl render buffer
-@@ -341,6 +347,7 @@ gdk_cairo_draw_from_gl (cairo_t *cr,
- int width,
- int height)
- {
-+#ifdef HAVE_OPENGL
- GdkGLContext *paint_context;
- cairo_surface_t *image;
- cairo_matrix_t matrix;
-@@ -718,6 +725,7 @@ out:
- if (clip_region)
- cairo_region_destroy (clip_region);
-
-+#endif
- }
-
- /* This is always called with the paint context current */
-@@ -725,6 +733,7 @@ void
- gdk_gl_texture_from_surface (cairo_surface_t *surface,
- cairo_region_t *region)
- {
-+#ifdef HAVE_OPENGL
- GdkGLContext *paint_context;
- cairo_surface_t *image;
- double device_x_offset, device_y_offset;
-@@ -825,4 +834,5 @@ gdk_gl_texture_from_surface (cairo_surface_t *surface,
-
- glDisable (GL_SCISSOR_TEST);
- glDeleteTextures (1, &texture_id);
-+#endif
- }
-diff --git a/gdk/gdkglcontext.c b/gdk/gdkglcontext.c
-index 3b23639..1f04f8e 100644
---- a/gdk/gdkglcontext.c
-+++ b/gdk/gdkglcontext.c
-@@ -85,7 +85,9 @@
- #include "gdkintl.h"
- #include "gdk-private.h"
-
-+#ifdef HAVE_OPENGL
- #include <epoxy/gl.h>
-+#endif
-
- typedef struct {
- GdkDisplay *display;
-@@ -243,6 +245,7 @@ gdk_gl_context_upload_texture (GdkGLContext *context,
- int height,
- guint texture_target)
- {
-+#ifdef HAVE_OPENGL
- GdkGLContextPrivate *priv = gdk_gl_context_get_instance_private (context);
-
- g_return_if_fail (GDK_IS_GL_CONTEXT (context));
-@@ -286,6 +289,7 @@ gdk_gl_context_upload_texture (GdkGLContext *context,
- glTexSubImage2D (texture_target, 0, 0, i, width, 1, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, (unsigned char*) data + (i * stride));
- }
- }
-+#endif
- }
-
- static gboolean
-@@ -774,6 +778,7 @@ gdk_gl_context_realize (GdkGLContext *context,
- static void
- gdk_gl_context_check_extensions (GdkGLContext *context)
- {
-+#ifdef HAVE_OPENGL
- GdkGLContextPrivate *priv = gdk_gl_context_get_instance_private (context);
- gboolean has_npot, has_texture_rectangle;
-
-@@ -853,6 +858,7 @@ gdk_gl_context_check_extensions (GdkGLContext *context)
- priv->use_texture_rectangle ? "yes" : "no"));
-
- priv->extensions_checked = TRUE;
-+#endif
- }
-
- /**
-diff --git a/gdk/gdkwindow.c b/gdk/gdkwindow.c
-index 2de8ba4..1883a79 100644
---- a/gdk/gdkwindow.c
-+++ b/gdk/gdkwindow.c
-@@ -45,7 +45,9 @@
-
- #include <math.h>
-
-+#ifdef HAVE_OPENGL
- #include <epoxy/gl.h>
-+#endif
-
- /* for the use of round() */
- #include "fallback-c89.c"
-@@ -2844,6 +2846,13 @@ gdk_window_get_paint_gl_context (GdkWindow *window,
- {
- GError *internal_error = NULL;
-
-+#ifndef HAVE_OPENGL
-+ g_set_error_literal (error, GDK_GL_ERROR,
-+ GDK_GL_ERROR_NOT_AVAILABLE,
-+ _("GL support disabled with --disable-opengl"));
-+ return NULL;
-+#endif
-+
- if (_gdk_gl_flags & GDK_GL_DISABLE)
- {
- g_set_error_literal (error, GDK_GL_ERROR,
-@@ -2979,6 +2988,7 @@ gdk_window_begin_paint_internal (GdkWindow *window,
- }
- else
- {
-+#ifdef HAVE_OPENGL
- gdk_gl_context_make_current (context);
- /* With gl we always need a surface to combine the gl
- drawing with the native drawing. */
-@@ -2993,6 +3003,7 @@ gdk_window_begin_paint_internal (GdkWindow *window,
- glBlendFunc (GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
-
- glViewport (0, 0, ww, wh);
-+#endif
- }
- }
-
-@@ -3056,6 +3067,7 @@ gdk_window_end_paint_internal (GdkWindow *window)
-
- gdk_gl_context_make_current (window->gl_paint_context);
-
-+#ifdef HAVE_OPENGL
- if (!cairo_region_is_empty (opaque_region))
- gdk_gl_texture_from_surface (window->current_paint.surface,
- opaque_region);
-@@ -3066,6 +3078,7 @@ gdk_window_end_paint_internal (GdkWindow *window)
- window->current_paint.need_blend_region);
- glDisable(GL_BLEND);
- }
-+#endif
-
- cairo_region_destroy (opaque_region);
-
-diff --git a/gdk/x11/Makefile.am b/gdk/x11/Makefile.am
-index 32b1f24..6352313 100644
---- a/gdk/x11/Makefile.am
-+++ b/gdk/x11/Makefile.am
-@@ -40,8 +40,6 @@ libgdk_x11_la_SOURCES = \
- gdkeventtranslator.c \
- gdkeventtranslator.h \
- gdkgeometry-x11.c \
-- gdkglcontext-x11.c \
-- gdkglcontext-x11.h \
- gdkkeys-x11.c \
- gdkmain-x11.c \
- gdkmonitor-x11.c \
-@@ -56,14 +54,32 @@ libgdk_x11_la_SOURCES = \
- gdkwindow-x11.h \
- gdkxftdefaults.c \
- gdkxid.c \
-- gdkx.h \
- gdkprivate-x11.h \
- xsettings-client.h \
- xsettings-client.c
-
-+if HAVE_OPENGL
-+libgdk_x11_la_SOURCES += \
-+ gdkglcontext-x11.c \
-+ gdkglcontext-x11.h
-+endif
-+
- libgdkinclude_HEADERS = \
- gdkx.h
-
-+if HAVE_OPENGL
-+GDKX_HEADER = gdkx-with-gl-context.h
-+else
-+GDKX_HEADER = gdkx-without-gl-context.h
-+endif
-+
-+BUILT_SOURCES = gdkx.h
-+
-+.PHONY: gdkx.h
-+gdkx.h:
-+ $(AM_V_GEN) cd $(srcdir) \
-+ && (cmp -s $(GDKX_HEADER) gdkx.h || cp $(GDKX_HEADER) gdkx.h )
-+
- libgdkx11include_HEADERS = \
- gdkx-autocleanups.h \
- gdkx11applaunchcontext.h \
-@@ -77,7 +93,6 @@ libgdkx11include_HEADERS = \
- gdkx11display.h \
- gdkx11displaymanager.h \
- gdkx11dnd.h \
-- gdkx11glcontext.h \
- gdkx11keys.h \
- gdkx11monitor.h \
- gdkx11property.h \
-@@ -87,10 +102,17 @@ libgdkx11include_HEADERS = \
- gdkx11visual.h \
- gdkx11window.h
-
-+if HAVE_OPENGL
-+libgdkx11include_HEADERS += gdkx11glcontext.h
-+endif
-+
- # We need to include all these C files here since the conditionals
- # don't seem to be correctly expanded for the dist files.
- EXTRA_DIST += \
-+ gdkx.h \
- gdksettings.c \
- meson.build
-
-+MAINTAINERCLEANFILES = gdkx.h
-+
- -include $(top_srcdir)/git.mk
-diff --git a/gdk/x11/gdkdisplay-x11.c b/gdk/x11/gdkdisplay-x11.c
-index 7e08f47..30fd7b6 100644
---- a/gdk/x11/gdkdisplay-x11.c
-+++ b/gdk/x11/gdkdisplay-x11.c
-@@ -37,7 +37,9 @@
- #include "gdkdisplay-x11.h"
- #include "gdkprivate-x11.h"
- #include "gdkscreen-x11.h"
-+#ifdef HAVE_OPENGL
- #include "gdkglcontext-x11.h"
-+#endif
- #include "gdk-private.h"
- #include "gdkprofilerprivate.h"
-
-@@ -3191,7 +3193,9 @@ gdk_x11_display_class_init (GdkX11DisplayClass * class)
- display_class->text_property_to_utf8_list = _gdk_x11_display_text_property_to_utf8_list;
- display_class->utf8_to_string_target = _gdk_x11_display_utf8_to_string_target;
-
-- display_class->make_gl_context_current = gdk_x11_display_make_gl_context_current;
-+#ifdef HAVE_OPENGL
-+ display_class->make_gl_context_current = gdk_x11_display_make_gl_context_current;
-+#endif
-
- display_class->get_default_seat = gdk_x11_display_get_default_seat;
-
-diff --git a/gdk/x11/gdkscreen-x11.c b/gdk/x11/gdkscreen-x11.c
-index bb4df05..46f5349 100644
---- a/gdk/x11/gdkscreen-x11.c
-+++ b/gdk/x11/gdkscreen-x11.c
-@@ -1827,3 +1827,8 @@ gdk_x11_screen_get_current_desktop (GdkScreen *screen)
- {
- return get_netwm_cardinal_property (screen, "_NET_CURRENT_DESKTOP");
- }
-+
-+#ifndef HAVE_OPENGL
-+/* Function from in gdk/x11/gdkglcontext-x11.c */
-+void _gdk_x11_screen_update_visuals_for_gl (GdkScreen *screen) {}
-+#endif
-diff --git a/gdk/x11/gdkwindow-x11.c b/gdk/x11/gdkwindow-x11.c
-index 721d9bb..8e87acc 100644
---- a/gdk/x11/gdkwindow-x11.c
-+++ b/gdk/x11/gdkwindow-x11.c
-@@ -36,7 +36,9 @@
- #include "gdkasync.h"
- #include "gdkeventsource.h"
- #include "gdkdisplay-x11.h"
-+#ifdef HAVE_OPENGL
- #include "gdkglcontext-x11.h"
-+#endif
- #include "gdkprivate-x11.h"
- #include "gdk-private.h"
-
-@@ -5881,7 +5883,9 @@ gdk_window_impl_x11_class_init (GdkWindowImplX11Class *klass)
- impl_class->set_opaque_region = gdk_x11_window_set_opaque_region;
- impl_class->set_shadow_width = gdk_x11_window_set_shadow_width;
- impl_class->show_window_menu = gdk_x11_window_show_window_menu;
-+#ifdef HAVE_OPENGL
- impl_class->create_gl_context = gdk_x11_window_create_gl_context;
- impl_class->invalidate_for_new_frame = gdk_x11_window_invalidate_for_new_frame;
-+#endif
- impl_class->get_unscaled_size = gdk_x11_window_get_unscaled_size;
- }
-diff --git a/gdk/x11/gdkx-autocleanups.h b/gdk/x11/gdkx-autocleanups.h
-index edb0ea7..a317d61 100644
---- a/gdk/x11/gdkx-autocleanups.h
-+++ b/gdk/x11/gdkx-autocleanups.h
-@@ -30,7 +30,9 @@ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DeviceXI2, g_object_unref)
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Display, g_object_unref)
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DisplayManager, g_object_unref)
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DragContext, g_object_unref)
-+#ifdef HAVE_OPENGL
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11GLContext, g_object_unref)
-+#endif
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Keymap, g_object_unref)
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Screen, g_object_unref)
- G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Visual, g_object_unref)
-diff --git a/gdk/x11/gdkx.h b/gdk/x11/gdkx-with-gl-context.h
-similarity index 98%
-rename from gdk/x11/gdkx.h
-rename to gdk/x11/gdkx-with-gl-context.h
-index 1f64bcc..ae05fa6 100644
---- a/gdk/x11/gdkx.h
-+++ b/gdk/x11/gdkx-with-gl-context.h
-@@ -45,7 +45,6 @@
- #include <gdk/x11/gdkx11dnd.h>
- #include <gdk/x11/gdkx11glcontext.h>
- #include <gdk/x11/gdkx11keys.h>
--#include <gdk/x11/gdkx11monitor.h>
- #include <gdk/x11/gdkx11property.h>
- #include <gdk/x11/gdkx11screen.h>
- #include <gdk/x11/gdkx11selection.h>
-diff --git a/gdk/x11/gdkx-without-gl-context.h b/gdk/x11/gdkx-without-gl-context.h
-new file mode 100644
-index 0000000..c9e2617
---- /dev/null
-+++ b/gdk/x11/gdkx-without-gl-context.h
-@@ -0,0 +1,58 @@
-+/* GDK - The GIMP Drawing Kit
-+ * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
-+ *
-+ * This library is free software; you can redistribute it and/or
-+ * modify it under the terms of the GNU Lesser General Public
-+ * License as published by the Free Software Foundation; either
-+ * version 2 of the License, or (at your option) any later version.
-+ *
-+ * This library is distributed in the hope that it will be useful,
-+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
-+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-+ * Lesser General Public License for more details.
-+ *
-+ * You should have received a copy of the GNU Lesser General Public
-+ * License along with this library. If not, see <http://www.gnu.org/licenses/>.
-+ */
-+
-+/*
-+ * Modified by the GTK+ Team and others 1997-2000. See the AUTHORS
-+ * file for a list of people on the GTK+ Team. See the ChangeLog
-+ * files for a list of changes. These files are distributed with
-+ * GTK+ at ftp://ftp.gtk.org/pub/gtk/.
-+ */
-+
-+#ifndef __GDK_X_H__
-+#define __GDK_X_H__
-+
-+#include <gdk/gdk.h>
-+
-+#include <X11/Xlib.h>
-+#include <X11/Xutil.h>
-+
-+#define __GDKX_H_INSIDE__
-+
-+#include <gdk/x11/gdkx11applaunchcontext.h>
-+#include <gdk/x11/gdkx11cursor.h>
-+#include <gdk/x11/gdkx11device.h>
-+#include <gdk/x11/gdkx11device-core.h>
-+#include <gdk/x11/gdkx11device-xi2.h>
-+#include <gdk/x11/gdkx11devicemanager.h>
-+#include <gdk/x11/gdkx11devicemanager-core.h>
-+#include <gdk/x11/gdkx11devicemanager-xi2.h>
-+#include <gdk/x11/gdkx11display.h>
-+#include <gdk/x11/gdkx11displaymanager.h>
-+#include <gdk/x11/gdkx11dnd.h>
-+#include <gdk/x11/gdkx11keys.h>
-+#include <gdk/x11/gdkx11property.h>
-+#include <gdk/x11/gdkx11screen.h>
-+#include <gdk/x11/gdkx11selection.h>
-+#include <gdk/x11/gdkx11utils.h>
-+#include <gdk/x11/gdkx11visual.h>
-+#include <gdk/x11/gdkx11window.h>
-+
-+#include <gdk/x11/gdkx-autocleanups.h>
-+
-+#undef __GDKX_H_INSIDE__
-+
-+#endif /* __GDK_X_H__ */
-diff --git a/gtk/Makefile.am b/gtk/Makefile.am
-index 074fb35..4fa9eb6 100644
---- a/gtk/Makefile.am
-+++ b/gtk/Makefile.am
-@@ -1457,7 +1457,7 @@ gtktypefuncs.inc: stamp-gtktypebuiltins.h stamp-gtkprivatetypebuiltins.h $(top_s
- ${CPP} $(DEFS) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) xgen-gtfsrc.c | \
- $(GREP) -o '\bg[td]k_[a-zA-Z0-9_]*_get_type\b' | \
- sort | uniq | \
-- $(SED) '{ s/^/*tp++ = /; s/$$/();/; s/^.*\(gdk_x11\|gtk_plug_\|gtk_socket_\).*$$/#ifdef GDK_WINDOWING_X11\n&\n#endif/; }' >> xgen-gtf \
-+ $(SED) '{ s/^/*tp++ = /; s/$$/();/; s/^.*\(gdk_x11\|gtk_plug_\|gtk_socket_\).*$$/#ifdef GDK_WINDOWING_X11\n&\n#endif/; s/^.*gdk_x11_gl.*$$/#ifdef HAVE_OPENGL\n&\n#endif/; }' >> xgen-gtf \
- && cp xgen-gtf $@ && rm -f xgen-gtf
- $(srcdir)/gtktestutils.c: gtktypefuncs.inc
-
-diff --git a/gtk/gtkglarea.c b/gtk/gtkglarea.c
-index 802303e..33001cf 100644
---- a/gtk/gtkglarea.c
-+++ b/gtk/gtkglarea.c
-@@ -29,7 +29,9 @@
- #include "gtkprivate.h"
- #include "gtkrender.h"
-
-+#ifdef HAVE_OPENGL
- #include <epoxy/gl.h>
-+#endif
-
- /**
- * SECTION:gtkglarea
-@@ -369,9 +371,12 @@ gtk_gl_area_real_create_context (GtkGLArea *area)
- static void
- gtk_gl_area_resize (GtkGLArea *area, int width, int height)
- {
-+#ifdef HAVE_OPENGL
- glViewport (0, 0, width, height);
-+#endif
- }
-
-+#ifdef HAVE_OPENGL
- /*
- * Creates all the buffer objects needed for rendering the scene
- */
-@@ -483,6 +488,7 @@ gtk_gl_area_allocate_buffers (GtkGLArea *area)
-
- priv->needs_render = TRUE;
- }
-+#endif
-
- /**
- * gtk_gl_area_attach_buffers:
-@@ -501,6 +507,7 @@ gtk_gl_area_allocate_buffers (GtkGLArea *area)
- void
- gtk_gl_area_attach_buffers (GtkGLArea *area)
- {
-+#ifdef HAVE_OPENGL
- GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
-
- g_return_if_fail (GTK_IS_GL_AREA (area));
-@@ -533,11 +540,13 @@ gtk_gl_area_attach_buffers (GtkGLArea *area)
- glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT,
- GL_RENDERBUFFER, priv->depth_stencil_buffer);
- }
-+#endif
- }
-
- static void
- gtk_gl_area_delete_buffers (GtkGLArea *area)
- {
-+#ifdef HAVE_OPENGL
- GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
-
- if (priv->context == NULL)
-@@ -569,6 +578,7 @@ gtk_gl_area_delete_buffers (GtkGLArea *area)
- glDeleteFramebuffers (1, &priv->frame_buffer);
- priv->frame_buffer = 0;
- }
-+#endif
- }
-
- static void
-@@ -679,6 +689,7 @@ gtk_gl_area_draw (GtkWidget *widget,
- GtkGLArea *area = GTK_GL_AREA (widget);
- GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
- gboolean unused;
-+#ifdef HAVE_OPENGL
- int w, h, scale;
- GLenum status;
-
-@@ -690,7 +701,6 @@ gtk_gl_area_draw (GtkWidget *widget,
- gtk_widget_get_allocated_height (widget));
- return FALSE;
- }
--
- if (priv->context == NULL)
- return FALSE;
-
-@@ -736,6 +746,14 @@ gtk_gl_area_draw (GtkWidget *widget,
- }
-
- return TRUE;
-+#else
-+ if (priv->error != NULL)
-+ gtk_gl_area_draw_error_screen (area,
-+ cr,
-+ gtk_widget_get_allocated_width (widget),
-+ gtk_widget_get_allocated_height (widget));
-+ return FALSE;
-+#endif
- }
-
- static gboolean
-diff --git a/gtk/inspector/general.c b/gtk/inspector/general.c
-index 48237d1..1f9b9be 100644
---- a/gtk/inspector/general.c
-+++ b/gtk/inspector/general.c
-@@ -33,8 +33,10 @@
-
- #ifdef GDK_WINDOWING_X11
- #include "x11/gdkx.h"
-+#ifdef HAVE_OPENGL
- #include <epoxy/glx.h>
- #endif
-+#endif
-
- #ifdef GDK_WINDOWING_WIN32
- #include "win32/gdkwin32.h"
-@@ -196,6 +198,7 @@ add_label_row (GtkInspectorGeneral *gen,
- gtk_size_group_add_widget (GTK_SIZE_GROUP (gen->priv->labels), label);
- }
-
-+#ifdef HAVE_OPENGL
- #ifdef GDK_WINDOWING_X11
- static void
- append_glx_extension_row (GtkInspectorGeneral *gen,
-@@ -205,6 +208,7 @@ append_glx_extension_row (GtkInspectorGeneral *gen,
- add_check_row (gen, GTK_LIST_BOX (gen->priv->gl_box), ext, epoxy_has_glx_extension (dpy, 0, ext), 0);
- }
- #endif
-+#endif
-
- #ifdef GDK_WINDOWING_WAYLAND
- static void
-@@ -254,6 +258,7 @@ wayland_get_display (struct wl_display *wl_display)
- static void
- init_gl (GtkInspectorGeneral *gen)
- {
-+#ifdef HAVE_OPENGL
- #ifdef GDK_WINDOWING_X11
- if (GDK_IS_X11_DISPLAY (gdk_display_get_default ()))
- {
-@@ -280,6 +285,7 @@ init_gl (GtkInspectorGeneral *gen)
- }
- else
- #endif
-+#endif
- #ifdef GDK_WINDOWING_WAYLAND
- if (GDK_IS_WAYLAND_DISPLAY (gdk_display_get_default ()))
- {
-diff --git a/tests/Makefile.am b/tests/Makefile.am
-index f283e89..5e7180e 100644
---- a/tests/Makefile.am
-+++ b/tests/Makefile.am
-@@ -80,8 +80,6 @@ noinst_PROGRAMS = $(TEST_PROGS) \
- testfullscreen \
- testgeometry \
- testgiconpixbuf \
-- testglarea \
-- testglblending \
- testgrid \
- testgtk \
- testheaderbar \
-@@ -172,12 +170,18 @@ noinst_PROGRAMS = $(TEST_PROGS) \
- testactionbar \
- testwindowsize \
- testpopover \
-- gdkgears \
- listmodel \
- testpopup \
- testpopupat \
- $(NULL)
-
-+if HAVE_OPENGL
-+noinst_PROGRAMS +=
-+ testglarea \
-+ testglblending \
-+ gdkgears
-+endif
-+
- if USE_WAYLAND
- noinst_PROGRAMS += testforeign
- endif
-diff --git a/testsuite/gtk/objects-finalize.c b/testsuite/gtk/objects-finalize.c
-index 24540e3..e0f863a 100644
---- a/testsuite/gtk/objects-finalize.c
-+++ b/testsuite/gtk/objects-finalize.c
-@@ -116,7 +116,9 @@ main (int argc, char **argv)
- all_types[i] != GDK_TYPE_X11_DEVICE_MANAGER_CORE &&
- all_types[i] != GDK_TYPE_X11_DEVICE_MANAGER_XI2 &&
- all_types[i] != GDK_TYPE_X11_DISPLAY_MANAGER &&
-+#ifdef HAVE_OPENGL
- all_types[i] != GDK_TYPE_X11_GL_CONTEXT &&
-+#endif
- #endif
- /* Not allowed to finalize a GdkPixbufLoader without calling gdk_pixbuf_loader_close() */
- all_types[i] != GDK_TYPE_PIXBUF_LOADER &&
diff --git a/meta/recipes-gnome/gtk+/gtk+3/link_fribidi.patch b/meta/recipes-gnome/gtk+/gtk+3/link_fribidi.patch
deleted file mode 100644
index e4bbd799f1..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+3/link_fribidi.patch
+++ /dev/null
@@ -1,19 +0,0 @@
-Link with libfribidi, this is to avoid under linking where these functions are
-used but the library is not linked in, and they are marked undefined by BFD linker
-but gold linker refuses to link
-
-| ./.libs/libgdk-3.so: error: undefined reference to 'fribidi_get_bidi_type'
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
-
---- a/gdk/Makefile.am
-+++ b/gdk/Makefile.am
-@@ -55,6 +55,7 @@ LDADD = \
- -version-info $(LT_VERSION_INFO) \
- -export-dynamic \
- -rpath $(libdir) \
-+ -lfribidi \
- $(no_undefined)
-
- #
diff --git a/meta/recipes-gnome/gtk+/gtk+3/opengl.patch b/meta/recipes-gnome/gtk+/gtk+3/opengl.patch
new file mode 100644
index 0000000000..8f2feb0ee7
--- /dev/null
+++ b/meta/recipes-gnome/gtk+/gtk+3/opengl.patch
@@ -0,0 +1,738 @@
+From 4a0716f04fb25b51b08e994bd5a900b2e7f7fed5 Mon Sep 17 00:00:00 2001
+From: Jussi Kukkonen <jussi.kukkonen@intel.com>
+Date: Fri, 16 Oct 2015 16:35:16 +0300
+Subject: [PATCH] Do not try to initialize GL without libGL
+
+_gdk_x11_screen_update_visuals_for_gl() will end up calling epoxys
+GLX api which will exit() if libGL.so.1 is not present. We do not
+want that to happen and we don't want every app to have to set
+"GDK_GL=disabled" environment variable: so use #ifdef set based on
+opengl distro feature.
+
+Upstream is not interested in the fix as it is: Either epoxy should be
+fixed (to not exit) or GTK+ possibly could do some additional probing
+before calling epoxy APIs.
+
+Upstream-Status: Denied
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ demos/gtk-demo/meson.build | 5 ++++-
+ docs/tools/meson.build | 7 +++++--
+ docs/tools/widgets.c | 6 +++++-
+ gdk/gdkconfig.h.meson | 1 +
+ gdk/gdkdisplay.c | 4 ++++
+ gdk/gdkgl.c | 10 ++++++++++
+ gdk/gdkglcontext.c | 6 ++++++
+ gdk/gdkwindow.c | 13 +++++++++++++
+ gdk/meson.build | 8 +++++++-
+ gdk/x11/gdkdisplay-x11.c | 6 +++++-
+ gdk/x11/gdkvisual-x11.c | 5 +++++
+ gdk/x11/gdkwindow-x11.c | 4 ++++
+ gdk/x11/gdkx-autocleanups.h | 2 ++
+ gdk/x11/gdkx.h | 2 ++
+ gdk/x11/meson.build | 7 +++++--
+ gtk/gtkglarea.c | 19 +++++++++++++++++++
+ gtk/inspector/general.c | 6 ++++++
+ meson.build | 17 ++++++++++++++---
+ meson_options.txt | 2 ++
+ tests/meson.build | 9 +++++++--
+ testsuite/gtk/objects-finalize.c | 2 ++
+ 21 files changed, 128 insertions(+), 13 deletions(-)
+
+diff --git a/demos/gtk-demo/meson.build b/demos/gtk-demo/meson.build
+index 252da16d05..4b57cff6ac 100644
+--- a/demos/gtk-demo/meson.build
++++ b/demos/gtk-demo/meson.build
+@@ -28,7 +28,6 @@ demos = files([
+ 'fishbowl.c',
+ 'foreigndrawing.c',
+ 'gestures.c',
+- 'glarea.c',
+ 'headerbar.c',
+ 'hypertext.c',
+ 'iconview.c',
+@@ -87,6 +86,10 @@ elif harfbuzz_dep.found() and pangoft_dep.found()
+ gtkdemo_deps += [harfbuzz_dep, pangoft_dep]
+ endif
+
++if opengl_enabled
++ demos += files('glarea.c')
++endif
++
+ if os_unix
+ demos += files('pagesetup.c')
+ endif
+diff --git a/docs/tools/meson.build b/docs/tools/meson.build
+index 05621ee7ed..3d0a333b32 100644
+--- a/docs/tools/meson.build
++++ b/docs/tools/meson.build
+@@ -2,10 +2,13 @@ if x11_enabled
+ doc_shooter_sources = [
+ 'shadow.c',
+ 'shooter.c',
+- 'widgets.c',
+- '../../tests/gtkgears.c',
++ 'widgets.c'
+ ]
+
++ if opengl_enabled
++ doc_shooter_sources += ['../../tests/gtkgears.c']
++ endif
++
+ doc_shooter = executable('doc-shooter', doc_shooter_sources,
+ include_directories: [ confinc, gdkinc, gtkinc, testinc, ],
+ dependencies: libgtk_dep)
+diff --git a/docs/tools/widgets.c b/docs/tools/widgets.c
+index 932daf1746..348807e133 100644
+--- a/docs/tools/widgets.c
++++ b/docs/tools/widgets.c
+@@ -8,7 +8,9 @@
+ #include <X11/Xatom.h>
+ #include <gdkx.h>
+ #include "widgets.h"
++#ifdef HAVE_OPENGL
+ #include "gtkgears.h"
++#endif
+
+ #define SMALL_WIDTH 240
+ #define SMALL_HEIGHT 75
+@@ -1526,9 +1528,11 @@ create_gl_area (void)
+ widget = gtk_frame_new (NULL);
+ gtk_frame_set_shadow_type (GTK_FRAME (widget), GTK_SHADOW_IN);
+
++#ifdef HAVE_OPENGL
+ gears = gtk_gears_new ();
+ gtk_container_add (GTK_CONTAINER (widget), gears);
+-
++#endif
++
+ info = new_widget_info ("glarea", widget, MEDIUM);
+
+ return info;
+diff --git a/gdk/gdkconfig.h.meson b/gdk/gdkconfig.h.meson
+index 7db19e0470..088651bafa 100644
+--- a/gdk/gdkconfig.h.meson
++++ b/gdk/gdkconfig.h.meson
+@@ -15,6 +15,7 @@ G_BEGIN_DECLS
+ #mesondefine GDK_WINDOWING_WAYLAND
+ #mesondefine GDK_WINDOWING_WIN32
+ #mesondefine GDK_WINDOWING_QUARTZ
++#mesondefine GDK_WITH_OPENGL
+
+ G_END_DECLS
+
+diff --git a/gdk/gdkdisplay.c b/gdk/gdkdisplay.c
+index 748f54860c..04ef2c09d4 100644
+--- a/gdk/gdkdisplay.c
++++ b/gdk/gdkdisplay.c
+@@ -2420,7 +2420,11 @@ gboolean
+ gdk_display_make_gl_context_current (GdkDisplay *display,
+ GdkGLContext *context)
+ {
++#ifdef HAVE_OPENGL
+ return GDK_DISPLAY_GET_CLASS (display)->make_gl_context_current (display, context);
++#else
++ return FALSE;
++#endif
+ }
+
+ GdkRenderingMode
+diff --git a/gdk/gdkgl.c b/gdk/gdkgl.c
+index 9690077cc2..55f85ef605 100644
+--- a/gdk/gdkgl.c
++++ b/gdk/gdkgl.c
+@@ -21,8 +21,9 @@
+ #include "gdkglcontextprivate.h"
+
+ #include "gdkinternals.h"
+-
++#ifdef HAVE_OPENGL
+ #include <epoxy/gl.h>
++#endif
+ #include <math.h>
+ #include <string.h>
+
+@@ -36,6 +37,7 @@
+ g_object_ref (window), g_object_unref);
+ }
+
++#ifdef HAVE_OPENGL
+ static const char *
+ get_vertex_type_name (int type)
+ {
+@@ -208,6 +210,7 @@
+ glUseProgram (paint_data->current_program->program);
+ }
+ }
++#endif
+
+ void
+ gdk_gl_texture_quads (GdkGLContext *paint_context,
+@@ -216,6 +219,7 @@
+ GdkTexturedQuad *quads,
+ gboolean flip_colors)
+ {
++#ifdef HAVE_OPENGL
+ GdkGLContextPaintData *paint_data = gdk_gl_context_get_paint_data (paint_context);
+ GdkGLContextProgram *program;
+ GdkWindow *window = gdk_gl_context_get_window (paint_context);
+@@ -289,6 +293,7 @@
+
+ glDisableVertexAttribArray (program->position_location);
+ glDisableVertexAttribArray (program->uv_location);
++#endif
+ }
+
+ /* x,y,width,height describes a rectangle in the gl render buffer
+@@ -337,6 +342,7 @@
+ int width,
+ int height)
+ {
++#ifdef HAVE_OPENGL
+ GdkGLContext *paint_context, *current_context;
+ cairo_surface_t *image;
+ cairo_matrix_t matrix;
+@@ -703,6 +709,7 @@
+ if (clip_region)
+ cairo_region_destroy (clip_region);
+
++#endif
+ }
+
+ /* This is always called with the paint context current */
+@@ -710,6 +717,7 @@
+ gdk_gl_texture_from_surface (cairo_surface_t *surface,
+ cairo_region_t *region)
+ {
++#ifdef HAVE_OPENGL
+ GdkGLContext *paint_context;
+ cairo_surface_t *image;
+ double device_x_offset, device_y_offset;
+@@ -810,4 +818,5 @@
+
+ glDisable (GL_SCISSOR_TEST);
+ glDeleteTextures (1, &texture_id);
++#endif
+ }
+diff --git a/gdk/gdkglcontext.c b/gdk/gdkglcontext.c
+index 3b23639e1c..1f04f8e0b2 100644
+--- a/gdk/gdkglcontext.c
++++ b/gdk/gdkglcontext.c
+@@ -85,7 +85,9 @@
+ #include "gdkintl.h"
+ #include "gdk-private.h"
+
++#ifdef HAVE_OPENGL
+ #include <epoxy/gl.h>
++#endif
+
+ typedef struct {
+ GdkDisplay *display;
+@@ -243,6 +245,7 @@ gdk_gl_context_upload_texture (GdkGLContext *context,
+ int height,
+ guint texture_target)
+ {
++#ifdef HAVE_OPENGL
+ GdkGLContextPrivate *priv = gdk_gl_context_get_instance_private (context);
+
+ g_return_if_fail (GDK_IS_GL_CONTEXT (context));
+@@ -286,6 +289,7 @@ gdk_gl_context_upload_texture (GdkGLContext *context,
+ glTexSubImage2D (texture_target, 0, 0, i, width, 1, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, (unsigned char*) data + (i * stride));
+ }
+ }
++#endif
+ }
+
+ static gboolean
+@@ -774,6 +778,7 @@ gdk_gl_context_realize (GdkGLContext *context,
+ static void
+ gdk_gl_context_check_extensions (GdkGLContext *context)
+ {
++#ifdef HAVE_OPENGL
+ GdkGLContextPrivate *priv = gdk_gl_context_get_instance_private (context);
+ gboolean has_npot, has_texture_rectangle;
+
+@@ -853,6 +858,7 @@ gdk_gl_context_check_extensions (GdkGLContext *context)
+ priv->use_texture_rectangle ? "yes" : "no"));
+
+ priv->extensions_checked = TRUE;
++#endif
+ }
+
+ /**
+diff --git a/gdk/gdkwindow.c b/gdk/gdkwindow.c
+index 727b0cf1f4..d4d91b0d16 100644
+--- a/gdk/gdkwindow.c
++++ b/gdk/gdkwindow.c
+@@ -45,7 +45,9 @@
+
+ #include <math.h>
+
++#ifdef HAVE_OPENGL
+ #include <epoxy/gl.h>
++#endif
+
+ /* for the use of round() */
+ #include "fallback-c89.c"
+@@ -2844,6 +2846,13 @@ gdk_window_get_paint_gl_context (GdkWindow *window,
+ {
+ GError *internal_error = NULL;
+
++#ifndef HAVE_OPENGL
++ g_set_error_literal (error, GDK_GL_ERROR,
++ GDK_GL_ERROR_NOT_AVAILABLE,
++ _("GL support disabled with --disable-opengl"));
++ return NULL;
++#endif
++
+ if (_gdk_gl_flags & GDK_GL_DISABLE)
+ {
+ g_set_error_literal (error, GDK_GL_ERROR,
+@@ -2979,6 +2988,7 @@ gdk_window_begin_paint_internal (GdkWindow *window,
+ }
+ else
+ {
++#ifdef HAVE_OPENGL
+ gdk_gl_context_make_current (context);
+ /* With gl we always need a surface to combine the gl
+ drawing with the native drawing. */
+@@ -2993,6 +3003,7 @@ gdk_window_begin_paint_internal (GdkWindow *window,
+ glBlendFunc (GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
+
+ glViewport (0, 0, ww, wh);
++#endif
+ }
+ }
+
+@@ -3056,6 +3067,7 @@ gdk_window_end_paint_internal (GdkWindow *window)
+
+ gdk_gl_context_make_current (window->gl_paint_context);
+
++#ifdef HAVE_OPENGL
+ if (!cairo_region_is_empty (opaque_region))
+ gdk_gl_texture_from_surface (window->current_paint.surface,
+ opaque_region);
+@@ -3066,6 +3078,7 @@ gdk_window_end_paint_internal (GdkWindow *window)
+ window->current_paint.need_blend_region);
+ glDisable(GL_BLEND);
+ }
++#endif
+
+ cairo_region_destroy (opaque_region);
+
+diff --git a/gdk/meson.build b/gdk/meson.build
+index 4bb1bf2b6c..64172b8d3e 100644
+--- a/gdk/meson.build
++++ b/gdk/meson.build
+@@ -56,7 +56,6 @@ gdk_gir_public_headers = files(
+ 'gdkdrawingcontext.h',
+ 'gdkevents.h',
+ 'gdkframetimings.h',
+- 'gdkglcontext.h',
+ 'gdkkeys.h',
+ 'gdkkeysyms.h',
+ 'gdkmain.h',
+@@ -78,6 +77,12 @@ gdk_gir_public_headers = files(
+ 'gdkwindow.h',
+ )
+ gdk_nogir_public_headers = [files('gdkkeysyms-compat.h')]
++if opengl_enabled
++gdk_gir_public_headers += files('gdkglcontext.h')
++else
++gdk_nogir_public_headers += files('gdkglcontext.h')
++endif
++
+ gdk_public_headers = gdk_gir_public_headers + gdk_nogir_public_headers
+ install_headers(gdk_public_headers, subdir : 'gtk-3.0/gdk')
+
+@@ -166,6 +171,7 @@ gdkconfig_cdata.set('GDK_WINDOWING_WAYLAND', wayland_enabled)
+ gdkconfig_cdata.set('GDK_WINDOWING_WIN32', win32_enabled)
+ gdkconfig_cdata.set('GDK_WINDOWING_BROADWAY', broadway_enabled)
+ gdkconfig_cdata.set('GDK_WINDOWING_QUARTZ', quartz_enabled)
++gdkconfig_cdata.set('GDK_WITH_OPENGL', opengl_enabled)
+
+ gdkconfig = configure_file(
+ input : 'gdkconfig.h.meson',
+diff --git a/gdk/x11/gdkdisplay-x11.c b/gdk/x11/gdkdisplay-x11.c
+index 7e08f472cc..30fd7b6089 100644
+--- a/gdk/x11/gdkdisplay-x11.c
++++ b/gdk/x11/gdkdisplay-x11.c
+@@ -37,7 +37,9 @@
+ #include "gdkdisplay-x11.h"
+ #include "gdkprivate-x11.h"
+ #include "gdkscreen-x11.h"
++#ifdef HAVE_OPENGL
+ #include "gdkglcontext-x11.h"
++#endif
+ #include "gdk-private.h"
+ #include "gdkprofilerprivate.h"
+
+@@ -3191,7 +3193,9 @@ gdk_x11_display_class_init (GdkX11DisplayClass * class)
+ display_class->text_property_to_utf8_list = _gdk_x11_display_text_property_to_utf8_list;
+ display_class->utf8_to_string_target = _gdk_x11_display_utf8_to_string_target;
+
+- display_class->make_gl_context_current = gdk_x11_display_make_gl_context_current;
++#ifdef HAVE_OPENGL
++ display_class->make_gl_context_current = gdk_x11_display_make_gl_context_current;
++#endif
+
+ display_class->get_default_seat = gdk_x11_display_get_default_seat;
+
+diff --git a/gdk/x11/gdkvisual-x11.c b/gdk/x11/gdkvisual-x11.c
+index 81479d81f4..3c8c5c02ff 100644
+--- a/gdk/x11/gdkvisual-x11.c
++++ b/gdk/x11/gdkvisual-x11.c
+@@ -306,7 +306,12 @@ _gdk_x11_screen_init_visuals (GdkScreen *screen)
+ /* If GL is available we want to pick better default/rgba visuals,
+ as we care about glx details such as alpha/depth/stencil depth,
+ stereo and double buffering */
++ /* update_visuals_for_gl() will end up calling epoxy GLX api which
++ will exit if libgl is not there: so only do this if we know GL
++ is available */
++#ifdef HAVE_GLX
+ _gdk_x11_screen_update_visuals_for_gl (screen);
++#endif
+ }
+
+ gint
+diff --git a/gdk/x11/gdkwindow-x11.c b/gdk/x11/gdkwindow-x11.c
+index 194bc82e29..0302bb68d4 100644
+--- a/gdk/x11/gdkwindow-x11.c
++++ b/gdk/x11/gdkwindow-x11.c
+@@ -36,7 +36,9 @@
+ #include "gdkasync.h"
+ #include "gdkeventsource.h"
+ #include "gdkdisplay-x11.h"
++#ifdef HAVE_OPENGL
+ #include "gdkglcontext-x11.h"
++#endif
+ #include "gdkprivate-x11.h"
+ #include "gdk-private.h"
+
+@@ -5888,7 +5890,9 @@ gdk_window_impl_x11_class_init (GdkWindowImplX11Class *klass)
+ impl_class->set_opaque_region = gdk_x11_window_set_opaque_region;
+ impl_class->set_shadow_width = gdk_x11_window_set_shadow_width;
+ impl_class->show_window_menu = gdk_x11_window_show_window_menu;
++#ifdef HAVE_OPENGL
+ impl_class->create_gl_context = gdk_x11_window_create_gl_context;
+ impl_class->invalidate_for_new_frame = gdk_x11_window_invalidate_for_new_frame;
++#endif
+ impl_class->get_unscaled_size = gdk_x11_window_get_unscaled_size;
+ }
+diff --git a/gdk/x11/gdkx-autocleanups.h b/gdk/x11/gdkx-autocleanups.h
+index edb0ea7dbf..a317d61cca 100644
+--- a/gdk/x11/gdkx-autocleanups.h
++++ b/gdk/x11/gdkx-autocleanups.h
+@@ -30,7 +30,9 @@ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DeviceXI2, g_object_unref)
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Display, g_object_unref)
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DisplayManager, g_object_unref)
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11DragContext, g_object_unref)
++#ifdef HAVE_OPENGL
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11GLContext, g_object_unref)
++#endif
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Keymap, g_object_unref)
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Screen, g_object_unref)
+ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GdkX11Visual, g_object_unref)
+diff --git a/gdk/x11/gdkx.h b/gdk/x11/gdkx.h
+index 1f64bccb6d..4db6c18351 100644
+--- a/gdk/x11/gdkx.h
++++ b/gdk/x11/gdkx.h
+@@ -43,7 +43,9 @@
+ #include <gdk/x11/gdkx11display.h>
+ #include <gdk/x11/gdkx11displaymanager.h>
+ #include <gdk/x11/gdkx11dnd.h>
++#ifdef GDK_WITH_OPENGL
+ #include <gdk/x11/gdkx11glcontext.h>
++#endif
+ #include <gdk/x11/gdkx11keys.h>
+ #include <gdk/x11/gdkx11monitor.h>
+ #include <gdk/x11/gdkx11property.h>
+diff --git a/gdk/x11/meson.build b/gdk/x11/meson.build
+index 754ae0a615..0318c83877 100644
+--- a/gdk/x11/meson.build
++++ b/gdk/x11/meson.build
+@@ -14,7 +14,6 @@ gdk_x11_sources = files(
+ 'gdkeventsource.c',
+ 'gdkeventtranslator.c',
+ 'gdkgeometry-x11.c',
+- 'gdkglcontext-x11.c',
+ 'gdkkeys-x11.c',
+ 'gdkmain-x11.c',
+ 'gdkproperty-x11.c',
+@@ -42,7 +41,6 @@ gdk_x11_public_headers = files(
+ 'gdkx11display.h',
+ 'gdkx11displaymanager.h',
+ 'gdkx11dnd.h',
+- 'gdkx11glcontext.h',
+ 'gdkx11keys.h',
+ 'gdkx11monitor.h',
+ 'gdkx11property.h',
+@@ -53,6 +51,11 @@ gdk_x11_public_headers = files(
+ 'gdkx11window.h',
+ )
+
++if opengl_enabled
++ gdk_x11_sources += files('gdkglcontext-x11.c')
++ gdk_x11_public_headers += files('gdkx11glcontext.h')
++endif
++
+ install_headers(gdk_x11_public_headers, subdir: 'gtk-3.0/gdk/x11/')
+ install_headers('gdkx.h', subdir: 'gtk-3.0/gdk/')
+
+diff --git a/gtk/gtkglarea.c b/gtk/gtkglarea.c
+index 802303ea9f..6439d7745d 100644
+--- a/gtk/gtkglarea.c
++++ b/gtk/gtkglarea.c
+@@ -29,7 +29,9 @@
+ #include "gtkprivate.h"
+ #include "gtkrender.h"
+
++#ifdef HAVE_OPENGL
+ #include <epoxy/gl.h>
++#endif
+
+ /**
+ * SECTION:gtkglarea
+@@ -369,9 +371,12 @@ gtk_gl_area_real_create_context (GtkGLArea *area)
+ static void
+ gtk_gl_area_resize (GtkGLArea *area, int width, int height)
+ {
++#ifdef HAVE_OPENGL
+ glViewport (0, 0, width, height);
++#endif
+ }
+
++#ifdef HAVE_OPENGL
+ /*
+ * Creates all the buffer objects needed for rendering the scene
+ */
+@@ -483,6 +488,7 @@ gtk_gl_area_allocate_buffers (GtkGLArea *area)
+
+ priv->needs_render = TRUE;
+ }
++#endif
+
+ /**
+ * gtk_gl_area_attach_buffers:
+@@ -501,6 +507,7 @@ gtk_gl_area_allocate_buffers (GtkGLArea *area)
+ void
+ gtk_gl_area_attach_buffers (GtkGLArea *area)
+ {
++#ifdef HAVE_OPENGL
+ GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
+
+ g_return_if_fail (GTK_IS_GL_AREA (area));
+@@ -533,11 +540,13 @@ gtk_gl_area_attach_buffers (GtkGLArea *area)
+ glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT,
+ GL_RENDERBUFFER, priv->depth_stencil_buffer);
+ }
++#endif
+ }
+
+ static void
+ gtk_gl_area_delete_buffers (GtkGLArea *area)
+ {
++#ifdef HAVE_OPENGL
+ GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
+
+ if (priv->context == NULL)
+@@ -569,6 +578,7 @@ gtk_gl_area_delete_buffers (GtkGLArea *area)
+ glDeleteFramebuffers (1, &priv->frame_buffer);
+ priv->frame_buffer = 0;
+ }
++#endif
+ }
+
+ static void
+@@ -679,6 +689,7 @@ gtk_gl_area_draw (GtkWidget *widget,
+ GtkGLArea *area = GTK_GL_AREA (widget);
+ GtkGLAreaPrivate *priv = gtk_gl_area_get_instance_private (area);
+ gboolean unused;
++#ifdef HAVE_OPENGL
+ int w, h, scale;
+ GLenum status;
+
+@@ -736,6 +747,14 @@ gtk_gl_area_draw (GtkWidget *widget,
+ }
+
+ return TRUE;
++#else
++ if (priv->error != NULL)
++ gtk_gl_area_draw_error_screen (area,
++ cr,
++ gtk_widget_get_allocated_width (widget),
++ gtk_widget_get_allocated_height (widget));
++ return FALSE;
++#endif
+ }
+
+ static gboolean
+diff --git a/gtk/inspector/general.c b/gtk/inspector/general.c
+index 4fd0c3039c..a8e59ed077 100644
+--- a/gtk/inspector/general.c
++++ b/gtk/inspector/general.c
+@@ -33,8 +33,10 @@
+
+ #ifdef GDK_WINDOWING_X11
+ #include "x11/gdkx.h"
++#ifdef HAVE_OPENGL
+ #include <epoxy/glx.h>
+ #endif
++#endif
+
+ #ifdef GDK_WINDOWING_WIN32
+ #include "win32/gdkwin32.h"
+@@ -217,6 +219,7 @@ add_label_row (GtkInspectorGeneral *gen,
+ gtk_size_group_add_widget (GTK_SIZE_GROUP (gen->priv->labels), label);
+ }
+
++#ifdef HAVE_OPENGL
+ #ifdef GDK_WINDOWING_X11
+ static void
+ append_glx_extension_row (GtkInspectorGeneral *gen,
+@@ -226,6 +229,7 @@ append_glx_extension_row (GtkInspectorGeneral *gen,
+ add_check_row (gen, GTK_LIST_BOX (gen->priv->gl_box), ext, epoxy_has_glx_extension (dpy, 0, ext), 0);
+ }
+ #endif
++#endif
+
+ #ifdef GDK_WINDOWING_WAYLAND
+ static void
+@@ -275,6 +279,7 @@ wayland_get_display (struct wl_display *wl_display)
+ static void
+ init_gl (GtkInspectorGeneral *gen)
+ {
++#ifdef HAVE_OPENGL
+ #ifdef GDK_WINDOWING_X11
+ if (GDK_IS_X11_DISPLAY (gdk_display_get_default ()))
+ {
+@@ -301,6 +306,7 @@ init_gl (GtkInspectorGeneral *gen)
+ }
+ else
+ #endif
++#endif
+ #ifdef GDK_WINDOWING_WAYLAND
+ if (GDK_IS_WAYLAND_DISPLAY (gdk_display_get_default ()))
+ {
+diff --git a/meson.build b/meson.build
+index aed48fc3f6..bfc33af0f6 100644
+--- a/meson.build
++++ b/meson.build
+@@ -137,6 +137,7 @@ wayland_enabled = get_option('wayland_backend')
+ broadway_enabled = get_option('broadway_backend')
+ quartz_enabled = get_option('quartz_backend')
+ win32_enabled = get_option('win32_backend')
++opengl_enabled = get_option('opengl')
+
+ os_unix = false
+ os_linux = false
+@@ -430,7 +431,7 @@ pangocairo_dep = dependency('pangocairo', version: cairo_req,
+ fallback : ['pango', 'libpangocairo_dep'])
+ pixbuf_dep = dependency('gdk-pixbuf-2.0', version: gdk_pixbuf_req,
+ fallback : ['gdk-pixbuf', 'gdkpixbuf_dep'])
+-epoxy_dep = dependency('epoxy', version: epoxy_req,
++epoxy_dep = dependency('epoxy', version: epoxy_req, required: opengl_enabled,
+ fallback: ['libepoxy', 'libepoxy_dep'])
+ atk_dep = dependency('atk', version: atk_req,
+ fallback : ['atk', 'libatk_dep'])
+@@ -476,6 +477,10 @@ if tracker3_enabled
+ endif
+ endif
+
++if opengl_enabled
++ cdata.set('HAVE_OPENGL', 1)
++endif
++
+ if iso_codes_dep.found()
+ cdata.set_quoted('ISO_CODES_PREFIX', iso_codes_dep.get_variable(pkgconfig: 'prefix'))
+ else
+@@ -912,9 +917,15 @@ else
+ gio_packages = ['gio-2.0', glib_req]
+ endif
+
++if opengl_enabled
++ epoxy_packages = ['epoxy', epoxy_req]
++else
++ epoxy_packages = []
++endif
++
+ pkgconf.set('GDK_PRIVATE_PACKAGES',
+ ' '.join(gio_packages + x11_pkgs + wayland_pkgs + cairo_backends +
+- ['epoxy', epoxy_req] + cloudproviders_packages +
++ epoxy_packages + cloudproviders_packages +
+ ['fribidi', fribidi_req]))
+
+ gtk_packages = ' '.join([
+@@ -928,7 +939,7 @@ pkgconf.set('GTK_PACKAGES', gtk_packages)
+ # Requires.private
+ pc_gdk_extra_libs += cairo_libs
+
+-gtk_private_packages = atk_pkgs + wayland_pkgs + ['epoxy', epoxy_req, 'fribidi', fribidi_req]
++gtk_private_packages = atk_pkgs + wayland_pkgs + epoxy_packages + ['fribidi', fribidi_req]
+ if wayland_enabled or x11_enabled
+ gtk_private_packages += ['pangoft2']
+ endif
+diff --git a/meson_options.txt b/meson_options.txt
+index 94099aa01e..8bd096896d 100644
+--- a/meson_options.txt
++++ b/meson_options.txt
+@@ -19,6 +19,8 @@ option('profiler', type: 'boolean', value: false,
+ description : 'Enable profiler support')
+ option('tracker3', type: 'boolean', value: false,
+ description : 'Enable Tracker3 filechooser search')
++option('opengl', type: 'boolean', value: true,
++ description : 'Enable use of GL')
+
+ # Print backends
+ option('print_backends', type : 'string', value : 'auto',
+diff --git a/tests/meson.build b/tests/meson.build
+index 586fe2f45e..6ecf317dde 100644
+--- a/tests/meson.build
++++ b/tests/meson.build
+@@ -5,7 +5,6 @@ gtk_tests = [
+ ['scrolling-performance', ['frame-stats.c', 'variable.c']],
+ ['blur-performance', ['../gtk/gtkcairoblur.c']],
+ ['flicker'],
+- ['gdkgears', ['gtkgears.c']],
+ ['listmodel'],
+ ['motion-compression'],
+ ['styleexamples'],
+@@ -54,7 +53,6 @@ gtk_tests = [
+ ['testfullscreen'],
+ ['testgeometry'],
+ ['testgiconpixbuf'],
+- ['testglblending', ['gtkgears.c']],
+ ['testgmenu'],
+ ['testgrid'],
+ ['testgrouping'],
+@@ -137,6 +135,13 @@ if x11_enabled
+ ]
+ endif
+
++if opengl_enabled
++ gtk_tests += [
++ ['gdkgears', ['gtkgears.c']],
++ ['testglblending', ['gtkgears.c']],
++ ]
++endif
++
+ if os_linux
+ gtk_tests += [['testfontchooserdialog']]
+ endif
+diff --git a/testsuite/gtk/objects-finalize.c b/testsuite/gtk/objects-finalize.c
+index 24540e313f..e0f863ab6a 100644
+--- a/testsuite/gtk/objects-finalize.c
++++ b/testsuite/gtk/objects-finalize.c
+@@ -116,7 +116,9 @@ main (int argc, char **argv)
+ all_types[i] != GDK_TYPE_X11_DEVICE_MANAGER_CORE &&
+ all_types[i] != GDK_TYPE_X11_DEVICE_MANAGER_XI2 &&
+ all_types[i] != GDK_TYPE_X11_DISPLAY_MANAGER &&
++#ifdef HAVE_OPENGL
+ all_types[i] != GDK_TYPE_X11_GL_CONTEXT &&
++#endif
+ #endif
+ /* Not allowed to finalize a GdkPixbufLoader without calling gdk_pixbuf_loader_close() */
+ all_types[i] != GDK_TYPE_PIXBUF_LOADER &&
+--
+2.30.2
+
diff --git a/meta/recipes-gnome/gtk+/gtk+3_3.24.33.bb b/meta/recipes-gnome/gtk+/gtk+3_3.24.33.bb
deleted file mode 100644
index fb9e29dd8c..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+3_3.24.33.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-require gtk+3.inc
-
-MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-
-SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \
- file://0002-Do-not-try-to-initialize-GL-without-libGL.patch \
- file://0003-Add-disable-opengl-configure-option.patch \
- file://link_fribidi.patch \
- "
-SRC_URI[sha256sum] = "588b06522e25d1579e989b6f9d8a1bdbf2fe13cde01a04e904ff346a225e7801"
-
-S = "${WORKDIR}/gtk+-${PV}"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2 \
- file://gtk/gtk.h;endline=25;md5=1d8dc0fccdbfa26287a271dce88af737 \
- file://gdk/gdk.h;endline=25;md5=c920ce39dc88c6f06d3e7c50e08086f2 \
- file://tests/testgtk.c;endline=25;md5=cb732daee1d82af7a2bf953cf3cf26f1"
diff --git a/meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb b/meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb
new file mode 100644
index 0000000000..17e90c59f0
--- /dev/null
+++ b/meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb
@@ -0,0 +1,17 @@
+require gtk+3.inc
+
+MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+
+SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \
+ file://opengl.patch \
+ "
+SRC_URI[sha256sum] = "47da61487af3087a94bc49296fd025ca0bc02f96ef06c556e7c8988bd651b6fa"
+
+S = "${WORKDIR}/gtk+-${PV}"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2 \
+ file://gtk/gtk.h;endline=25;md5=1d8dc0fccdbfa26287a271dce88af737 \
+ file://gdk/gdk.h;endline=25;md5=c920ce39dc88c6f06d3e7c50e08086f2 \
+ file://tests/testgtk.c;endline=25;md5=cb732daee1d82af7a2bf953cf3cf26f1"
+
+CVE_PRODUCT = "gnome:gtk"
diff --git a/meta/recipes-gnome/gtk+/gtk4_4.14.1.bb b/meta/recipes-gnome/gtk+/gtk4_4.14.1.bb
new file mode 100644
index 0000000000..ce733769a5
--- /dev/null
+++ b/meta/recipes-gnome/gtk+/gtk4_4.14.1.bb
@@ -0,0 +1,130 @@
+SUMMARY = "Multi-platform toolkit for creating GUIs"
+DESCRIPTION = "GTK is a multi-platform toolkit for creating graphical user interfaces. Offering a complete \
+set of widgets, GTK is suitable for projects ranging from small one-off projects to complete application suites."
+HOMEPAGE = "http://www.gtk.org"
+BUGTRACKER = "https://bugzilla.gnome.org/"
+SECTION = "libs"
+
+DEPENDS = " \
+ atk \
+ cairo \
+ fribidi \
+ gdk-pixbuf \
+ gdk-pixbuf-native \
+ gi-docgen \
+ glib-2.0 \
+ graphene \
+ harfbuzz \
+ jpeg \
+ libepoxy \
+ libpng \
+ librsvg \
+ libxkbcommon \
+ pango \
+ tiff \
+"
+
+LICENSE = "LGPL-2.0-only & LGPL-2.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = " \
+ file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2 \
+ file://gtk/gtk.h;endline=25;md5=61900d77e8d5bc67cf15ad93de9a3490 \
+ file://gdk/gdk.h;endline=25;md5=a0fb26c1f6b94e66d148279e192c333f \
+ file://tests/testgtk.c;endline=25;md5=49d06770681b8322466b52ed19d29fb2 \
+"
+
+MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+
+UPSTREAM_CHECK_REGEX = "gtk-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar.xz"
+
+SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk/${MAJ_VER}/gtk-${PV}.tar.xz"
+SRC_URI[sha256sum] = "fcefb3f132f8cc4711a9efa5b353c9ae9bb5eeff0246fa74dbc2f2f839b9e308"
+
+S = "${WORKDIR}/gtk-${PV}"
+
+CVE_PRODUCT = "gnome:gtk"
+
+inherit meson gettext pkgconfig gi-docgen update-alternatives gsettings features_check gobject-introspection
+
+# TBD: nativesdk
+# gobject-introspection.bbclass pins introspection off for nativesk. As long as
+# we do not remove this wisdom or hack gtk4, it is not possible to build
+# nativesdk-gtk4
+BBCLASSEXTEND = "native"
+
+GSETTINGS_PACKAGE:class-native = ""
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+REQUIRED_DISTRO_FEATURES = "opengl"
+GTKDOC_MESON_OPTION = "documentation"
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+EXTRA_OEMESON = " -Dbuild-tests=false -Dbuild-testsuite=false -Dbuild-demos=false"
+
+PACKAGECONFIG ??= "gstreamer ${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11 vulkan', d)}"
+PACKAGECONFIG:class-native = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
+PACKAGECONFIG:class-nativesdk = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
+
+PACKAGECONFIG[x11] = "-Dx11-backend=true,-Dx11-backend=false,at-spi2-atk fontconfig libx11 libxext libxcursor libxi libxdamage libxrandr libxrender libxcomposite libxfixes xinerama"
+PACKAGECONFIG[wayland] = "-Dwayland-backend=true,-Dwayland-backend=false,wayland wayland-protocols virtual/egl virtual/libgles2 wayland-native"
+PACKAGECONFIG[cloudproviders] = "-Dcloudproviders=enabled,-Dcloudproviders=disabled,libcloudproviders"
+PACKAGECONFIG[cups] = "-Dprint-cups=enabled,-Dprint-cups=disabled,cups,cups gtk4-printbackend-cups"
+PACKAGECONFIG[colord] = "-Dcolord=enabled,-Dcolord=disabled,colord"
+PACKAGECONFIG[iso-codes] = ",,iso-codes,iso-codes"
+# gtk4 wants gstreamer-player-1.0 -> gstreamer1.0-plugins-bad
+PACKAGECONFIG[gstreamer] = "-Dmedia-gstreamer=enabled,-Dmedia-gstreamer=disabled,gstreamer1.0-plugins-bad"
+PACKAGECONFIG[tracker] = "-Dtracker=enabled,-Dtracker=disabled,tracker,tracker-miners"
+PACKAGECONFIG[vulkan] = "-Dvulkan=enabled,-Dvulkan=disabled, vulkan-loader vulkan-headers shaderc-native"
+
+LIBV = "4.0.0"
+
+FILES:${PN}:append = " \
+ ${datadir}/glib-2.0/schemas/ \
+ ${datadir}/gtk-4.0/emoji/ \
+ ${datadir}/metainfo/ \
+ ${datadir}/icons/hicolor/*/apps/org.gtk.PrintEditor4*.* \
+ ${libdir}/gtk-4.0/${LIBV}/media \
+ ${bindir}/gtk4-update-icon-cache \
+ ${bindir}/gtk4-launch \
+"
+
+FILES:${PN}-dev += " \
+ ${datadir}/gtk-4.0/gtk4builder.rng \
+ ${datadir}/gtk-4.0/include \
+ ${datadir}/gtk-4.0/valgrind \
+ ${datadir}/gettext/its \
+ ${bindir}/gtk4-builder-tool \
+ ${bindir}/gtk4-encode-symbolic-svg \
+ ${bindir}/gtk4-query-settings \
+"
+
+GTKBASE_RRECOMMENDS ?= " \
+ liberation-fonts \
+ gdk-pixbuf-loader-png \
+ gdk-pixbuf-loader-jpeg \
+ gdk-pixbuf-loader-gif \
+ gdk-pixbuf-loader-xpm \
+ shared-mime-info \
+ adwaita-icon-theme-symbolic \
+"
+
+GTKBASE_RRECOMMENDS:class-native ?= ""
+
+GTKGLIBC_RRECOMMENDS ?= "${GTKBASE_RRECOMMENDS} glibc-gconv-iso8859-1"
+
+RRECOMMENDS:${PN} = "${GTKBASE_RRECOMMENDS}"
+RRECOMMENDS:${PN}:libc-glibc = "${GTKGLIBC_RRECOMMENDS}"
+RDEPENDS:${PN}-dev += "${@bb.utils.contains("PACKAGECONFIG", "wayland", "wayland-protocols", "", d)}"
+
+PACKAGES_DYNAMIC += "^gtk4-printbackend-.*"
+python populate_packages:prepend () {
+ import os.path
+
+ gtk_libdir = d.expand('${libdir}/gtk-4.0/${LIBV}')
+ printmodules_root = os.path.join(gtk_libdir, 'printbackends');
+
+ do_split_packages(d, printmodules_root, r'^libprintbackend-(.*)\.so$', 'gtk4-printbackend-%s', 'GTK printbackend module for %s')
+
+ if (d.getVar('DEBIAN_NAMES')):
+ d.setVar(d.expand('PKG:${PN}'), '${MLPREFIX}libgtk-4.0')
+}
diff --git a/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch b/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch
new file mode 100644
index 0000000000..f40124877c
--- /dev/null
+++ b/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch
@@ -0,0 +1,24 @@
+From 72dfeec0e49478b0bfb471c4155044391bad8e6c Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Fri, 8 Dec 2023 10:35:25 +0000
+Subject: [PATCH] Don't use docdir from environment
+
+Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gtk-doc/-/merge_requests/73]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ buildsystems/autotools/gtkdocize.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/buildsystems/autotools/gtkdocize.in b/buildsystems/autotools/gtkdocize.in
+index 83127bf..76dcbfd 100755
+--- a/buildsystems/autotools/gtkdocize.in
++++ b/buildsystems/autotools/gtkdocize.in
+@@ -39,7 +39,7 @@ set - $args
+
+ # assume working directory if srcdir is not set
+ test "$srcdir" || srcdir=.
+-test "$docdir" || docdir="$srcdir"
++docdir="$srcdir"
+
+ # detect configure script
+ no_configure_found=0
diff --git a/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb b/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb
index 150d2c0b23..4fd5a6e925 100644
--- a/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb
+++ b/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb
@@ -5,6 +5,7 @@ HOMEPAGE = "https://www.gtk.org/docs/"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+GNOMEBASEBUILDCLASS = "autotools"
inherit gnomebase
# Configure the scripts correctly (and build their dependencies) only if they are actually
@@ -15,14 +16,17 @@ PACKAGECONFIG ??= "${@bb.utils.contains("DISTRO_FEATURES", "api-documentation",
# into its scripts. This means that target gtk-doc package is broken;
# hopefully no one minds because its scripts are not used for anything during build
# and shouldn't be used on targets.
-PACKAGECONFIG[working-scripts] = ",,libxslt-native xmlto-native python3-six python3-pygments"
+PACKAGECONFIG[working-scripts] = ",,libxslt-native docbook-xml-dtd4-native docbook-xsl-stylesheets python3-pygments"
PACKAGECONFIG[tests] = "--enable-tests,--disable-tests,glib-2.0"
+CACHED_CONFIGUREVARS += "ac_cv_path_XSLTPROC=xsltproc"
+
SRC_URI[archive.sha256sum] = "cc1b709a20eb030a278a1f9842a362e00402b7f834ae1df4c1998a723152bf43"
SRC_URI += "file://0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch \
file://0001-Do-not-error-out-if-xsltproc-is-not-found.patch \
file://conditionaltests.patch \
file://no-clobber.patch \
+ file://0001-Don-t-use-docdir-from-environment.patch \
"
SRC_URI:append:class-native = " file://pkg-config-native.patch"
@@ -43,6 +47,7 @@ do_install:append () {
${datadir}/gtk-doc/python/gtkdoc/config.py; do
sed -e 's,${RECIPE_SYSROOT_NATIVE}/usr/bin/pkg-config,${bindir}/pkg-config,' \
-e 's,${HOSTTOOLS_DIR}/python3,${bindir}/python3,' \
+ -e '1s|^#!.*|#!/usr/bin/env python3|' \
-i ${D}$fn
done
}
diff --git a/meta/recipes-gnome/json-glib/json-glib_1.6.6.bb b/meta/recipes-gnome/json-glib/json-glib_1.6.6.bb
deleted file mode 100644
index 627f389536..0000000000
--- a/meta/recipes-gnome/json-glib/json-glib_1.6.6.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "JSON-GLib implements a full JSON parser using GLib and GObject"
-DESCRIPTION = "Use JSON-GLib it is possible to parse and generate valid JSON\
- data structures, using a DOM-like API. JSON-GLib also offers GObject \
-integration, providing the ability to serialize and deserialize GObject \
-instances to and from JSON data types."
-HOMEPAGE = "https://wiki.gnome.org/Projects/JsonGlib"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/json-glib/issues"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
-
-DEPENDS = "glib-2.0"
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase lib_package gobject-introspection gi-docgen gettext ptest-gnome manpages upstream-version-is-even
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-GIDOCGEN_MESON_ENABLE_FLAG = 'enabled'
-GIDOCGEN_MESON_DISABLE_FLAG = 'disabled'
-
-SRC_URI += " file://run-ptest"
-SRC_URI[archive.sha256sum] = "96ec98be7a91f6dde33636720e3da2ff6ecbb90e76ccaa49497f31a6855a490e"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
-PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native xmlto-native"
-PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# Currently it's not possible to disable gettext in Meson, so we need to force
-# this back on.
-USE_NLS:class-native = "yes"
diff --git a/meta/recipes-gnome/json-glib/json-glib_1.8.0.bb b/meta/recipes-gnome/json-glib/json-glib_1.8.0.bb
new file mode 100644
index 0000000000..2094ebe3e7
--- /dev/null
+++ b/meta/recipes-gnome/json-glib/json-glib_1.8.0.bb
@@ -0,0 +1,31 @@
+SUMMARY = "JSON-GLib implements a full JSON parser using GLib and GObject"
+DESCRIPTION = "Use JSON-GLib it is possible to parse and generate valid JSON\
+ data structures, using a DOM-like API. JSON-GLib also offers GObject \
+integration, providing the ability to serialize and deserialize GObject \
+instances to and from JSON data types."
+HOMEPAGE = "https://wiki.gnome.org/Projects/JsonGlib"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/json-glib/issues"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
+
+DEPENDS = "glib-2.0 glib-2.0-native"
+
+inherit gnomebase lib_package gobject-introspection gi-docgen gettext ptest-gnome manpages upstream-version-is-even
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+GIDOCGEN_MESON_ENABLE_FLAG = 'enabled'
+GIDOCGEN_MESON_DISABLE_FLAG = 'disabled'
+
+SRC_URI += " file://run-ptest"
+SRC_URI[archive.sha256sum] = "97ef5eb92ca811039ad50a65f06633f1aae64792789307be7170795d8b319454"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native xmlto-native"
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# Currently it's not possible to disable gettext in Meson, so we need to force
+# this back on.
+USE_NLS:class-native = "yes"
diff --git a/meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb b/meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb
new file mode 100644
index 0000000000..b0b1e4502a
--- /dev/null
+++ b/meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Building blocks for modern GNOME applications"
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/libadwaita"
+LICENSE="LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+
+DEPENDS = " \
+ gtk4 \
+ appstream \
+"
+
+inherit gnomebase gobject-introspection gi-docgen vala features_check
+
+SRC_URI[archive.sha256sum] = "fd92287df9bb95c963654fb6e70d3e082e2bcb37b147e0e3c905567167993783"
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+GTKDOC_MESON_OPTION = 'gtk_doc'
+
+PACKAGECONFIG[examples] = "-Dexamples=true,-Dexamples=false"
+
+FILES:${PN} += "${datadir}/metainfo"
+
+EXTRA_OEMESON += "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dvapi=true', '-Dvapi=false', d)}"
diff --git a/meta/recipes-gnome/libdazzle/libdazzle_3.44.0.bb b/meta/recipes-gnome/libdazzle/libdazzle_3.44.0.bb
index 49ebc818b0..9bca7e9dce 100644
--- a/meta/recipes-gnome/libdazzle/libdazzle_3.44.0.bb
+++ b/meta/recipes-gnome/libdazzle/libdazzle_3.44.0.bb
@@ -7,7 +7,6 @@ HOMEPAGE = "https://gitlab.gnome.org/GNOME/libdazzle"
BUGTRACKER = "https://gitlab.gnome.org/GNOME/libdazzle/issues"
LIC_FILES_CHKSUM = "file://COPYING;md5=8f0e2cd40e05189ec81232da84bd6e1a"
-GNOMEBASEBUILDCLASS = "meson"
inherit gnomebase upstream-version-is-even vala features_check gobject-introspection
DEPENDS = "glib-2.0-native glib-2.0 gtk+3"
diff --git a/meta/recipes-gnome/libgudev/libgudev/0001-meson-Pass-export-dynamic-option-to-linker.patch b/meta/recipes-gnome/libgudev/libgudev/0001-meson-Pass-export-dynamic-option-to-linker.patch
new file mode 100644
index 0000000000..8a06d244e4
--- /dev/null
+++ b/meta/recipes-gnome/libgudev/libgudev/0001-meson-Pass-export-dynamic-option-to-linker.patch
@@ -0,0 +1,38 @@
+From dc4fcfb1e1e2326a412b252314af3e9424a31457 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 16 Jan 2024 12:02:46 -0800
+Subject: [PATCH] meson: Pass --export-dynamic option to linker
+
+Bypass the compiler driver trying to comprehend and translate it for
+linker, since its not clear what the right behavior should be, gcc seems
+to translate it into --export-dynamic but clang 18+ rejects it
+
+| x86_64-yoe-linux-clang: error: unknown argument: '-export-dynamic'
+
+also see [1]
+
+This makes it work as intended across gcc and clang
+
+Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/libgudev/-/merge_requests/30]
+[1] https://discourse.llvm.org/t/clang-option-export-dynamic-parse-to-e-xport-dynamic-error/72454
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gudev/meson.build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/gudev/meson.build b/gudev/meson.build
+index e904203..3ed580b 100644
+--- a/gudev/meson.build
++++ b/gudev/meson.build
+@@ -33,7 +33,7 @@ libgudev_c_args = [
+ ]
+
+ libgudev_link_args = [
+- '-export-dynamic',
++ '-Wl,--export-dynamic',
+ '-Wl,--version-script,@0@/libgudev-1.0.sym'.format(top_srcdir),
+ ]
+
+--
+2.43.0
+
diff --git a/meta/recipes-gnome/libgudev/libgudev_237.bb b/meta/recipes-gnome/libgudev/libgudev_237.bb
deleted file mode 100644
index 9ce43ce34b..0000000000
--- a/meta/recipes-gnome/libgudev/libgudev_237.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "GObject wrapper for libudev"
-DESCRIPTION = "This library makes it much simpler to use libudev from programs \
-already using GObject. It also makes it possible to easily use libudev from \
-other programming languages, such as Javascript, because of GObject \
-introspection support."
-HOMEPAGE = "https://wiki.gnome.org/Projects/libgudev"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/libgudev/issues"
-SRC_URI[archive.sha256sum] = "0d06b21170d20c93e4f0534dbb9b0a8b4f1119ffb00b4031aaeb5b9148b686aa"
-
-DEPENDS = "glib-2.0 udev"
-
-RCONFLICTS:${PN} = "systemd (<= 220)"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase gobject-introspection gtk-doc
-
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-
-GTKDOC_MESON_OPTION = "gtk_doc"
-
-UPSTREAM_CHECK_URI = "http://ftp.gnome.org/pub/GNOME/sources/libgudev/"
-UPSTREAM_CHECK_REGEX = "(?P<pver>(\d+))"
-
-# This isn't a GNOME-style version do gnome_verdir fails. Just return the
-# version as that is how the directory is structured.
-def gnome_verdir(v):
- return v
diff --git a/meta/recipes-gnome/libgudev/libgudev_238.bb b/meta/recipes-gnome/libgudev/libgudev_238.bb
new file mode 100644
index 0000000000..f197f6421d
--- /dev/null
+++ b/meta/recipes-gnome/libgudev/libgudev_238.bb
@@ -0,0 +1,34 @@
+SUMMARY = "GObject wrapper for libudev"
+DESCRIPTION = "This library makes it much simpler to use libudev from programs \
+already using GObject. It also makes it possible to easily use libudev from \
+other programming languages, such as Javascript, because of GObject \
+introspection support."
+HOMEPAGE = "https://wiki.gnome.org/Projects/libgudev"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/libgudev/issues"
+SRC_URI[archive.sha256sum] = "61266ab1afc9d73dbc60a8b2af73e99d2fdff47d99544d085760e4fa667b5dd1"
+
+DEPENDS = "glib-2.0 glib-2.0-native udev"
+
+RCONFLICTS:${PN} = "systemd (<= 220)"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+inherit gnomebase gobject-introspection gtk-doc
+
+SRC_URI += "file://0001-meson-Pass-export-dynamic-option-to-linker.patch"
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+
+UPSTREAM_CHECK_URI = "http://ftp.gnome.org/pub/GNOME/sources/libgudev/"
+UPSTREAM_CHECK_REGEX = "(?P<pver>(\d+))"
+
+EXTRA_OEMESON += "-Dtests=disabled -Dvapi=disabled"
+
+# This isn't a GNOME-style version do gnome_verdir fails. Just return the
+# version as that is how the directory is structured.
+def gnome_verdir(v):
+ return v
diff --git a/meta/recipes-gnome/libhandy/libhandy_1.6.2.bb b/meta/recipes-gnome/libhandy/libhandy_1.6.2.bb
deleted file mode 100644
index 8d9904637f..0000000000
--- a/meta/recipes-gnome/libhandy/libhandy_1.6.2.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "A library full of GTK+ widgets for mobile phones"
-DESCRIPTION = "Library with GTK widgets for mobile phones. Libhandy provides \
-GTK widgets and GObjects to ease developing applications for mobile phones. \
-It was developed by Purism (and used by several official GNOME projects) \
-to extend Gtk by providing mobile-friendly widgets and make the creation of \
-responsive apps easier."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/libhandy"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/libhandy/-/issues"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI = "git://gitlab.gnome.org/GNOME/libhandy.git;protocol=https;branch=libhandy-1-6"
-SRCREV = "f050453109db05621b9a47b9931fe1b571905e67"
-S = "${WORKDIR}/git"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+\.(\d*[02468])+(\.\d+))"
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-
-inherit meson gobject-introspection vala gettext gi-docgen features_check pkgconfig
-
-ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
-
-DEPENDS += "gtk+3"
-
-PACKAGES =+ "${PN}-examples"
-FILES:${PN}-examples = "${bindir}"
diff --git a/meta/recipes-gnome/libhandy/libhandy_1.8.3.bb b/meta/recipes-gnome/libhandy/libhandy_1.8.3.bb
new file mode 100644
index 0000000000..7f5f02a7ba
--- /dev/null
+++ b/meta/recipes-gnome/libhandy/libhandy_1.8.3.bb
@@ -0,0 +1,27 @@
+SUMMARY = "A library full of GTK+ widgets for mobile phones"
+DESCRIPTION = "Library with GTK widgets for mobile phones. Libhandy provides \
+GTK widgets and GObjects to ease developing applications for mobile phones. \
+It was developed by Purism (and used by several official GNOME projects) \
+to extend Gtk by providing mobile-friendly widgets and make the creation of \
+responsive apps easier."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/libhandy"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/libhandy/-/issues"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "git://gitlab.gnome.org/GNOME/libhandy.git;protocol=https;branch=libhandy-1-8"
+SRCREV = "9b0071408ce86a3ef843806fddd723a85f6f2416"
+S = "${WORKDIR}/git"
+
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+\.(\d*[02468])+(\.\d+))"
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+inherit meson gobject-introspection vala gettext gi-docgen features_check pkgconfig
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+
+DEPENDS += "gtk+3"
+
+PACKAGES =+ "${PN}-examples"
+FILES:${PN}-examples = "${bindir}"
diff --git a/meta/recipes-gnome/libnotify/libnotify_0.7.12.bb b/meta/recipes-gnome/libnotify/libnotify_0.7.12.bb
deleted file mode 100644
index 21e737fd45..0000000000
--- a/meta/recipes-gnome/libnotify/libnotify_0.7.12.bb
+++ /dev/null
@@ -1,37 +0,0 @@
-SUMMARY = "Library for sending desktop notifications to a notification daemon"
-DESCRIPTION = "It sends desktop notifications to a notification daemon, as defined \
-in the Desktop Notifications spec. These notifications can be used to inform \
-the user about an event or display some form of information without getting \
-in the user's way."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/libnotify"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/libnotify/issues"
-SECTION = "libs"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
-
-DEPENDS = "dbus glib-2.0 gdk-pixbuf"
-
-PACKAGECONFIG ?= ""
-PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false,gtk+3"
-
-GNOMEBASEBUILDCLASS = "meson"
-GTKDOC_MESON_OPTION = "gtk_doc"
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
-inherit gnomebase gtk-doc features_check gobject-introspection
-# depends on gtk+3 if tests are enabled
-ANY_OF_DISTRO_FEATURES = "${@bb.utils.contains('PACKAGECONFIG', 'tests', '${GTK3DISTROFEATURES}', '', d)}"
-
-SRC_URI[archive.sha256sum] = "744b2b37508135f8261b755a9debe6e09add421adc75bde930f6e198b70ab46e"
-
-EXTRA_OEMESON = "-Dman=false"
-
-# there were times, we had two versions of libnotify (oe-core libnotify:0.6.x /
-# meta-gnome libnotify3: 0.7.x)
-PROVIDES += "libnotify3"
-RPROVIDES:${PN} += "libnotify3"
-RCONFLICTS:${PN} += "libnotify3"
-RREPLACES:${PN} += "libnotify3"
-
-# -7381 is specific to the NodeJS bindings
-CVE_CHECK_IGNORE += "CVE-2013-7381"
diff --git a/meta/recipes-gnome/libnotify/libnotify_0.8.3.bb b/meta/recipes-gnome/libnotify/libnotify_0.8.3.bb
new file mode 100644
index 0000000000..1e606b84b2
--- /dev/null
+++ b/meta/recipes-gnome/libnotify/libnotify_0.8.3.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Library for sending desktop notifications to a notification daemon"
+DESCRIPTION = "It sends desktop notifications to a notification daemon, as defined \
+in the Desktop Notifications spec. These notifications can be used to inform \
+the user about an event or display some form of information without getting \
+in the user's way."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/libnotify"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/libnotify/issues"
+SECTION = "libs"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
+
+DEPENDS = "glib-2.0 glib-2.0-native gdk-pixbuf"
+
+PACKAGECONFIG ?= ""
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false,gtk+3"
+
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
+inherit gnomebase gi-docgen features_check gobject-introspection
+# depends on gtk+3 if tests are enabled
+ANY_OF_DISTRO_FEATURES = "${@bb.utils.contains('PACKAGECONFIG', 'tests', '${GTK3DISTROFEATURES}', '', d)}"
+
+SRC_URI[archive.sha256sum] = "ee8f3ef946156ad3406fdf45feedbdcd932dbd211ab4f16f75eba4f36fb2f6c0"
+
+EXTRA_OEMESON = "-Dman=false"
+
+# there were times, we had two versions of libnotify (oe-core libnotify:0.6.x /
+# meta-gnome libnotify3: 0.7.x)
+PROVIDES += "libnotify3"
+RPROVIDES:${PN} += "libnotify3"
+RCONFLICTS:${PN} += "libnotify3"
+RREPLACES:${PN} += "libnotify3"
+
+# -7381 is specific to the NodeJS bindings
+CVE_STATUS[CVE-2013-7381] = "cpe-incorrect: The issue is specific to the NodeJS bindings"
diff --git a/meta/recipes-gnome/libportal/libportal_0.7.1.bb b/meta/recipes-gnome/libportal/libportal_0.7.1.bb
new file mode 100644
index 0000000000..22e45559c9
--- /dev/null
+++ b/meta/recipes-gnome/libportal/libportal_0.7.1.bb
@@ -0,0 +1,20 @@
+SUMMARY = "libportal provides GIO-style async APIs for most Flatpak portals."
+DESCRIPTION = "It provides simple asynchronous wrappers for most Flatpak portals \
+with a familiar GObject API along side the D-Bus API"
+HOMEPAGE = "https://github.com/flatpak/libportal"
+BUGTRACKER = "https://github.com/flatpak/libportal/issues"
+LICENSE = "LGPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=3000208d539ec061b899bce1d9ce9404"
+
+SRC_URI = "git://github.com/flatpak/${BPN}.git;protocol=https;branch=main"
+SRCREV = "e9ed3a50cdde321eaf42361212480a66eb94a57a"
+S = "${WORKDIR}/git"
+
+inherit meson gi-docgen gobject-introspection vala features_check pkgconfig
+GIDOCGEN_MESON_OPTION = 'docs'
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+
+DEPENDS += "glib-2.0 glib-2.0-native gtk+3 ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gtk4', '', d)}"
+
+EXTRA_OEMESON = "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dvapi=true', '-Dvapi=false', d)} -Dbackend-qt5=disabled"
diff --git a/meta/recipes-gnome/librsvg/librsvg-crates.inc b/meta/recipes-gnome/librsvg/librsvg-crates.inc
new file mode 100644
index 0000000000..94eba30948
--- /dev/null
+++ b/meta/recipes-gnome/librsvg/librsvg-crates.inc
@@ -0,0 +1,546 @@
+# Autogenerated with 'bitbake -c update_crates librsvg'
+
+# from Cargo.lock
+SRC_URI += " \
+ crate://crates.io/adler/1.0.2 \
+ crate://crates.io/aho-corasick/1.1.2 \
+ crate://crates.io/android-tzdata/0.1.1 \
+ crate://crates.io/android_system_properties/0.1.5 \
+ crate://crates.io/anes/0.1.6 \
+ crate://crates.io/anstream/0.6.5 \
+ crate://crates.io/anstyle/1.0.4 \
+ crate://crates.io/anstyle-parse/0.2.3 \
+ crate://crates.io/anstyle-query/1.0.2 \
+ crate://crates.io/anstyle-wincon/3.0.2 \
+ crate://crates.io/anyhow/1.0.75 \
+ crate://crates.io/approx/0.5.1 \
+ crate://crates.io/assert_cmd/2.0.12 \
+ crate://crates.io/autocfg/1.1.0 \
+ crate://crates.io/bit-set/0.5.3 \
+ crate://crates.io/bit-vec/0.6.3 \
+ crate://crates.io/bitflags/1.3.2 \
+ crate://crates.io/bitflags/2.4.1 \
+ crate://crates.io/block/0.1.6 \
+ crate://crates.io/bstr/1.8.0 \
+ crate://crates.io/bumpalo/3.14.0 \
+ crate://crates.io/bytemuck/1.14.0 \
+ crate://crates.io/byteorder/1.5.0 \
+ crate://crates.io/cairo-rs/0.18.3 \
+ crate://crates.io/cairo-sys-rs/0.18.2 \
+ crate://crates.io/cast/0.3.0 \
+ crate://crates.io/cc/1.0.83 \
+ crate://crates.io/cfg-expr/0.15.5 \
+ crate://crates.io/cfg-if/1.0.0 \
+ crate://crates.io/chrono/0.4.31 \
+ crate://crates.io/ciborium/0.2.1 \
+ crate://crates.io/ciborium-io/0.2.1 \
+ crate://crates.io/ciborium-ll/0.2.1 \
+ crate://crates.io/clap/4.4.11 \
+ crate://crates.io/clap_builder/4.4.11 \
+ crate://crates.io/clap_complete/4.4.4 \
+ crate://crates.io/clap_derive/4.4.7 \
+ crate://crates.io/clap_lex/0.6.0 \
+ crate://crates.io/colorchoice/1.0.0 \
+ crate://crates.io/const-cstr/0.3.0 \
+ crate://crates.io/core-foundation-sys/0.8.6 \
+ crate://crates.io/crc32fast/1.3.2 \
+ crate://crates.io/criterion/0.5.1 \
+ crate://crates.io/criterion-plot/0.5.0 \
+ crate://crates.io/crossbeam-deque/0.8.4 \
+ crate://crates.io/crossbeam-epoch/0.9.16 \
+ crate://crates.io/crossbeam-utils/0.8.17 \
+ crate://crates.io/cssparser/0.31.2 \
+ crate://crates.io/cssparser-macros/0.6.1 \
+ crate://crates.io/cstr/0.2.11 \
+ crate://crates.io/data-url/0.3.1 \
+ crate://crates.io/deranged/0.3.10 \
+ crate://crates.io/derive_more/0.99.17 \
+ crate://crates.io/difflib/0.4.0 \
+ crate://crates.io/dlib/0.5.2 \
+ crate://crates.io/doc-comment/0.3.3 \
+ crate://crates.io/dtoa/1.0.9 \
+ crate://crates.io/dtoa-short/0.3.4 \
+ crate://crates.io/either/1.9.0 \
+ crate://crates.io/encoding_rs/0.8.33 \
+ crate://crates.io/equivalent/1.0.1 \
+ crate://crates.io/errno/0.3.8 \
+ crate://crates.io/fastrand/2.0.1 \
+ crate://crates.io/fdeflate/0.3.1 \
+ crate://crates.io/flate2/1.0.28 \
+ crate://crates.io/float-cmp/0.9.0 \
+ crate://crates.io/fnv/1.0.7 \
+ crate://crates.io/form_urlencoded/1.2.1 \
+ crate://crates.io/futf/0.1.5 \
+ crate://crates.io/futures-channel/0.3.29 \
+ crate://crates.io/futures-core/0.3.29 \
+ crate://crates.io/futures-executor/0.3.29 \
+ crate://crates.io/futures-io/0.3.29 \
+ crate://crates.io/futures-macro/0.3.29 \
+ crate://crates.io/futures-task/0.3.29 \
+ crate://crates.io/futures-util/0.3.29 \
+ crate://crates.io/fxhash/0.2.1 \
+ crate://crates.io/gdk-pixbuf/0.18.3 \
+ crate://crates.io/gdk-pixbuf-sys/0.18.0 \
+ crate://crates.io/getrandom/0.2.11 \
+ crate://crates.io/gio/0.18.4 \
+ crate://crates.io/gio-sys/0.18.1 \
+ crate://crates.io/glib/0.18.4 \
+ crate://crates.io/glib-macros/0.18.3 \
+ crate://crates.io/glib-sys/0.18.1 \
+ crate://crates.io/gobject-sys/0.18.0 \
+ crate://crates.io/half/1.8.2 \
+ crate://crates.io/hashbrown/0.14.3 \
+ crate://crates.io/heck/0.4.1 \
+ crate://crates.io/hermit-abi/0.3.3 \
+ crate://crates.io/iana-time-zone/0.1.58 \
+ crate://crates.io/iana-time-zone-haiku/0.1.2 \
+ crate://crates.io/idna/0.5.0 \
+ crate://crates.io/indexmap/2.1.0 \
+ crate://crates.io/is-terminal/0.4.9 \
+ crate://crates.io/itertools/0.10.5 \
+ crate://crates.io/itertools/0.11.0 \
+ crate://crates.io/itoa/1.0.10 \
+ crate://crates.io/js-sys/0.3.66 \
+ crate://crates.io/language-tags/0.3.2 \
+ crate://crates.io/lazy_static/1.4.0 \
+ crate://crates.io/libc/0.2.151 \
+ crate://crates.io/libloading/0.8.1 \
+ crate://crates.io/libm/0.2.8 \
+ crate://crates.io/linked-hash-map/0.5.6 \
+ crate://crates.io/linux-raw-sys/0.4.12 \
+ crate://crates.io/locale_config/0.3.0 \
+ crate://crates.io/lock_api/0.4.11 \
+ crate://crates.io/log/0.4.20 \
+ crate://crates.io/lopdf/0.31.0 \
+ crate://crates.io/mac/0.1.1 \
+ crate://crates.io/malloc_buf/0.0.6 \
+ crate://crates.io/markup5ever/0.11.0 \
+ crate://crates.io/matches/0.1.10 \
+ crate://crates.io/matrixmultiply/0.3.8 \
+ crate://crates.io/md5/0.7.0 \
+ crate://crates.io/memchr/2.6.4 \
+ crate://crates.io/memoffset/0.9.0 \
+ crate://crates.io/minimal-lexical/0.2.1 \
+ crate://crates.io/miniz_oxide/0.7.1 \
+ crate://crates.io/nalgebra/0.32.3 \
+ crate://crates.io/nalgebra-macros/0.2.1 \
+ crate://crates.io/new_debug_unreachable/1.0.4 \
+ crate://crates.io/nom/7.1.3 \
+ crate://crates.io/normalize-line-endings/0.3.0 \
+ crate://crates.io/num-complex/0.4.4 \
+ crate://crates.io/num-integer/0.1.45 \
+ crate://crates.io/num-rational/0.4.1 \
+ crate://crates.io/num-traits/0.2.17 \
+ crate://crates.io/objc/0.2.7 \
+ crate://crates.io/objc-foundation/0.1.1 \
+ crate://crates.io/objc_id/0.1.1 \
+ crate://crates.io/once_cell/1.19.0 \
+ crate://crates.io/oorandom/11.1.3 \
+ crate://crates.io/pango/0.18.3 \
+ crate://crates.io/pango-sys/0.18.0 \
+ crate://crates.io/pangocairo/0.18.0 \
+ crate://crates.io/pangocairo-sys/0.18.0 \
+ crate://crates.io/parking_lot/0.12.1 \
+ crate://crates.io/parking_lot_core/0.9.9 \
+ crate://crates.io/paste/1.0.14 \
+ crate://crates.io/percent-encoding/2.3.1 \
+ crate://crates.io/phf/0.10.1 \
+ crate://crates.io/phf/0.11.2 \
+ crate://crates.io/phf_codegen/0.10.0 \
+ crate://crates.io/phf_generator/0.10.0 \
+ crate://crates.io/phf_generator/0.11.2 \
+ crate://crates.io/phf_macros/0.11.2 \
+ crate://crates.io/phf_shared/0.10.0 \
+ crate://crates.io/phf_shared/0.11.2 \
+ crate://crates.io/pin-project-lite/0.2.13 \
+ crate://crates.io/pin-utils/0.1.0 \
+ crate://crates.io/pkg-config/0.3.27 \
+ crate://crates.io/plotters/0.3.5 \
+ crate://crates.io/plotters-backend/0.3.5 \
+ crate://crates.io/plotters-svg/0.3.5 \
+ crate://crates.io/png/0.17.10 \
+ crate://crates.io/powerfmt/0.2.0 \
+ crate://crates.io/ppv-lite86/0.2.17 \
+ crate://crates.io/precomputed-hash/0.1.1 \
+ crate://crates.io/predicates/3.0.4 \
+ crate://crates.io/predicates-core/1.0.6 \
+ crate://crates.io/predicates-tree/1.0.9 \
+ crate://crates.io/proc-macro-crate/2.0.1 \
+ crate://crates.io/proc-macro-error/1.0.4 \
+ crate://crates.io/proc-macro-error-attr/1.0.4 \
+ crate://crates.io/proc-macro2/1.0.70 \
+ crate://crates.io/proptest/1.4.0 \
+ crate://crates.io/quick-error/1.2.3 \
+ crate://crates.io/quick-error/2.0.1 \
+ crate://crates.io/quote/1.0.33 \
+ crate://crates.io/rand/0.8.5 \
+ crate://crates.io/rand_chacha/0.3.1 \
+ crate://crates.io/rand_core/0.6.4 \
+ crate://crates.io/rand_xorshift/0.3.0 \
+ crate://crates.io/rawpointer/0.2.1 \
+ crate://crates.io/rayon/1.8.0 \
+ crate://crates.io/rayon-core/1.12.0 \
+ crate://crates.io/rctree/0.5.0 \
+ crate://crates.io/redox_syscall/0.4.1 \
+ crate://crates.io/regex/1.10.2 \
+ crate://crates.io/regex-automata/0.4.3 \
+ crate://crates.io/regex-syntax/0.8.2 \
+ crate://crates.io/rgb/0.8.37 \
+ crate://crates.io/rustix/0.38.28 \
+ crate://crates.io/rusty-fork/0.3.0 \
+ crate://crates.io/ryu/1.0.16 \
+ crate://crates.io/safe_arch/0.7.1 \
+ crate://crates.io/same-file/1.0.6 \
+ crate://crates.io/scopeguard/1.2.0 \
+ crate://crates.io/selectors/0.25.0 \
+ crate://crates.io/serde/1.0.193 \
+ crate://crates.io/serde_derive/1.0.193 \
+ crate://crates.io/serde_json/1.0.108 \
+ crate://crates.io/serde_spanned/0.6.4 \
+ crate://crates.io/servo_arc/0.3.0 \
+ crate://crates.io/simba/0.8.1 \
+ crate://crates.io/simd-adler32/0.3.7 \
+ crate://crates.io/siphasher/0.3.11 \
+ crate://crates.io/slab/0.4.9 \
+ crate://crates.io/smallvec/1.11.2 \
+ crate://crates.io/stable_deref_trait/1.2.0 \
+ crate://crates.io/string_cache/0.8.7 \
+ crate://crates.io/string_cache_codegen/0.5.2 \
+ crate://crates.io/strsim/0.10.0 \
+ crate://crates.io/syn/1.0.109 \
+ crate://crates.io/syn/2.0.41 \
+ crate://crates.io/system-deps/6.2.0 \
+ crate://crates.io/target-lexicon/0.12.12 \
+ crate://crates.io/tempfile/3.8.1 \
+ crate://crates.io/tendril/0.4.3 \
+ crate://crates.io/termtree/0.4.1 \
+ crate://crates.io/thiserror/1.0.50 \
+ crate://crates.io/thiserror-impl/1.0.50 \
+ crate://crates.io/time/0.3.30 \
+ crate://crates.io/time-core/0.1.2 \
+ crate://crates.io/time-macros/0.2.15 \
+ crate://crates.io/tinytemplate/1.2.1 \
+ crate://crates.io/tinyvec/1.6.0 \
+ crate://crates.io/tinyvec_macros/0.1.1 \
+ crate://crates.io/toml/0.8.2 \
+ crate://crates.io/toml_datetime/0.6.3 \
+ crate://crates.io/toml_edit/0.20.2 \
+ crate://crates.io/typenum/1.17.0 \
+ crate://crates.io/unarray/0.1.4 \
+ crate://crates.io/unicode-bidi/0.3.14 \
+ crate://crates.io/unicode-ident/1.0.12 \
+ crate://crates.io/unicode-normalization/0.1.22 \
+ crate://crates.io/url/2.5.0 \
+ crate://crates.io/utf-8/0.7.6 \
+ crate://crates.io/utf8parse/0.2.1 \
+ crate://crates.io/version-compare/0.1.1 \
+ crate://crates.io/version_check/0.9.4 \
+ crate://crates.io/wait-timeout/0.2.0 \
+ crate://crates.io/walkdir/2.4.0 \
+ crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \
+ crate://crates.io/wasm-bindgen/0.2.89 \
+ crate://crates.io/wasm-bindgen-backend/0.2.89 \
+ crate://crates.io/wasm-bindgen-macro/0.2.89 \
+ crate://crates.io/wasm-bindgen-macro-support/0.2.89 \
+ crate://crates.io/wasm-bindgen-shared/0.2.89 \
+ crate://crates.io/web-sys/0.3.66 \
+ crate://crates.io/weezl/0.1.7 \
+ crate://crates.io/wide/0.7.13 \
+ crate://crates.io/winapi/0.3.9 \
+ crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0 \
+ crate://crates.io/winapi-util/0.1.6 \
+ crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0 \
+ crate://crates.io/windows-core/0.51.1 \
+ crate://crates.io/windows-sys/0.48.0 \
+ crate://crates.io/windows-sys/0.52.0 \
+ crate://crates.io/windows-targets/0.48.5 \
+ crate://crates.io/windows-targets/0.52.0 \
+ crate://crates.io/windows_aarch64_gnullvm/0.48.5 \
+ crate://crates.io/windows_aarch64_gnullvm/0.52.0 \
+ crate://crates.io/windows_aarch64_msvc/0.48.5 \
+ crate://crates.io/windows_aarch64_msvc/0.52.0 \
+ crate://crates.io/windows_i686_gnu/0.48.5 \
+ crate://crates.io/windows_i686_gnu/0.52.0 \
+ crate://crates.io/windows_i686_msvc/0.48.5 \
+ crate://crates.io/windows_i686_msvc/0.52.0 \
+ crate://crates.io/windows_x86_64_gnu/0.48.5 \
+ crate://crates.io/windows_x86_64_gnu/0.52.0 \
+ crate://crates.io/windows_x86_64_gnullvm/0.48.5 \
+ crate://crates.io/windows_x86_64_gnullvm/0.52.0 \
+ crate://crates.io/windows_x86_64_msvc/0.48.5 \
+ crate://crates.io/windows_x86_64_msvc/0.52.0 \
+ crate://crates.io/winnow/0.5.28 \
+ crate://crates.io/xml5ever/0.17.0 \
+ crate://crates.io/yeslogic-fontconfig-sys/4.0.1 \
+"
+
+SRC_URI[adler-1.0.2.sha256sum] = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+SRC_URI[aho-corasick-1.1.2.sha256sum] = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+SRC_URI[android-tzdata-0.1.1.sha256sum] = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+SRC_URI[android_system_properties-0.1.5.sha256sum] = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+SRC_URI[anes-0.1.6.sha256sum] = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
+SRC_URI[anstream-0.6.5.sha256sum] = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6"
+SRC_URI[anstyle-1.0.4.sha256sum] = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
+SRC_URI[anstyle-parse-0.2.3.sha256sum] = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
+SRC_URI[anstyle-query-1.0.2.sha256sum] = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
+SRC_URI[anstyle-wincon-3.0.2.sha256sum] = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
+SRC_URI[anyhow-1.0.75.sha256sum] = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
+SRC_URI[approx-0.5.1.sha256sum] = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6"
+SRC_URI[assert_cmd-2.0.12.sha256sum] = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6"
+SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+SRC_URI[bit-set-0.5.3.sha256sum] = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
+SRC_URI[bit-vec-0.6.3.sha256sum] = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
+SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+SRC_URI[bitflags-2.4.1.sha256sum] = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
+SRC_URI[block-0.1.6.sha256sum] = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a"
+SRC_URI[bstr-1.8.0.sha256sum] = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c"
+SRC_URI[bumpalo-3.14.0.sha256sum] = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
+SRC_URI[bytemuck-1.14.0.sha256sum] = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6"
+SRC_URI[byteorder-1.5.0.sha256sum] = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+SRC_URI[cairo-rs-0.18.3.sha256sum] = "f33613627f0dea6a731b0605101fad59ba4f193a52c96c4687728d822605a8a1"
+SRC_URI[cairo-sys-rs-0.18.2.sha256sum] = "685c9fa8e590b8b3d678873528d83411db17242a73fccaed827770ea0fedda51"
+SRC_URI[cast-0.3.0.sha256sum] = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
+SRC_URI[cc-1.0.83.sha256sum] = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+SRC_URI[cfg-expr-0.15.5.sha256sum] = "03915af431787e6ffdcc74c645077518c6b6e01f80b761e0fbbfa288536311b3"
+SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+SRC_URI[chrono-0.4.31.sha256sum] = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38"
+SRC_URI[ciborium-0.2.1.sha256sum] = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926"
+SRC_URI[ciborium-io-0.2.1.sha256sum] = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656"
+SRC_URI[ciborium-ll-0.2.1.sha256sum] = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b"
+SRC_URI[clap-4.4.11.sha256sum] = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2"
+SRC_URI[clap_builder-4.4.11.sha256sum] = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb"
+SRC_URI[clap_complete-4.4.4.sha256sum] = "bffe91f06a11b4b9420f62103854e90867812cd5d01557f853c5ee8e791b12ae"
+SRC_URI[clap_derive-4.4.7.sha256sum] = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442"
+SRC_URI[clap_lex-0.6.0.sha256sum] = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1"
+SRC_URI[colorchoice-1.0.0.sha256sum] = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
+SRC_URI[const-cstr-0.3.0.sha256sum] = "ed3d0b5ff30645a68f35ece8cea4556ca14ef8a1651455f789a099a0513532a6"
+SRC_URI[core-foundation-sys-0.8.6.sha256sum] = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
+SRC_URI[crc32fast-1.3.2.sha256sum] = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+SRC_URI[criterion-0.5.1.sha256sum] = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
+SRC_URI[criterion-plot-0.5.0.sha256sum] = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
+SRC_URI[crossbeam-deque-0.8.4.sha256sum] = "fca89a0e215bab21874660c67903c5f143333cab1da83d041c7ded6053774751"
+SRC_URI[crossbeam-epoch-0.9.16.sha256sum] = "2d2fe95351b870527a5d09bf563ed3c97c0cffb87cf1c78a591bf48bb218d9aa"
+SRC_URI[crossbeam-utils-0.8.17.sha256sum] = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f"
+SRC_URI[cssparser-0.31.2.sha256sum] = "5b3df4f93e5fbbe73ec01ec8d3f68bba73107993a5b1e7519273c32db9b0d5be"
+SRC_URI[cssparser-macros-0.6.1.sha256sum] = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
+SRC_URI[cstr-0.2.11.sha256sum] = "8aa998c33a6d3271e3678950a22134cd7dd27cef86dee1b611b5b14207d1d90b"
+SRC_URI[data-url-0.3.1.sha256sum] = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"
+SRC_URI[deranged-0.3.10.sha256sum] = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc"
+SRC_URI[derive_more-0.99.17.sha256sum] = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
+SRC_URI[difflib-0.4.0.sha256sum] = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
+SRC_URI[dlib-0.5.2.sha256sum] = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412"
+SRC_URI[doc-comment-0.3.3.sha256sum] = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
+SRC_URI[dtoa-1.0.9.sha256sum] = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653"
+SRC_URI[dtoa-short-0.3.4.sha256sum] = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74"
+SRC_URI[either-1.9.0.sha256sum] = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
+SRC_URI[encoding_rs-0.8.33.sha256sum] = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
+SRC_URI[equivalent-1.0.1.sha256sum] = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+SRC_URI[errno-0.3.8.sha256sum] = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
+SRC_URI[fastrand-2.0.1.sha256sum] = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
+SRC_URI[fdeflate-0.3.1.sha256sum] = "64d6dafc854908ff5da46ff3f8f473c6984119a2876a383a860246dd7841a868"
+SRC_URI[flate2-1.0.28.sha256sum] = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
+SRC_URI[float-cmp-0.9.0.sha256sum] = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
+SRC_URI[fnv-1.0.7.sha256sum] = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+SRC_URI[form_urlencoded-1.2.1.sha256sum] = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
+SRC_URI[futf-0.1.5.sha256sum] = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843"
+SRC_URI[futures-channel-0.3.29.sha256sum] = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb"
+SRC_URI[futures-core-0.3.29.sha256sum] = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c"
+SRC_URI[futures-executor-0.3.29.sha256sum] = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc"
+SRC_URI[futures-io-0.3.29.sha256sum] = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa"
+SRC_URI[futures-macro-0.3.29.sha256sum] = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb"
+SRC_URI[futures-task-0.3.29.sha256sum] = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2"
+SRC_URI[futures-util-0.3.29.sha256sum] = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104"
+SRC_URI[fxhash-0.2.1.sha256sum] = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
+SRC_URI[gdk-pixbuf-0.18.3.sha256sum] = "446f32b74d22c33b7b258d4af4ffde53c2bf96ca2e29abdf1a785fe59bd6c82c"
+SRC_URI[gdk-pixbuf-sys-0.18.0.sha256sum] = "3f9839ea644ed9c97a34d129ad56d38a25e6756f99f3a88e15cd39c20629caf7"
+SRC_URI[getrandom-0.2.11.sha256sum] = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
+SRC_URI[gio-0.18.4.sha256sum] = "d4fc8f532f87b79cbc51a79748f16a6828fb784be93145a322fa14d06d354c73"
+SRC_URI[gio-sys-0.18.1.sha256sum] = "37566df850baf5e4cb0dfb78af2e4b9898d817ed9263d1090a2df958c64737d2"
+SRC_URI[glib-0.18.4.sha256sum] = "951bbd7fdc5c044ede9f05170f05a3ae9479239c3afdfe2d22d537a3add15c4e"
+SRC_URI[glib-macros-0.18.3.sha256sum] = "72793962ceece3863c2965d7f10c8786323b17c7adea75a515809fa20ab799a5"
+SRC_URI[glib-sys-0.18.1.sha256sum] = "063ce2eb6a8d0ea93d2bf8ba1957e78dbab6be1c2220dd3daca57d5a9d869898"
+SRC_URI[gobject-sys-0.18.0.sha256sum] = "0850127b514d1c4a4654ead6dedadb18198999985908e6ffe4436f53c785ce44"
+SRC_URI[half-1.8.2.sha256sum] = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
+SRC_URI[hashbrown-0.14.3.sha256sum] = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
+SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+SRC_URI[hermit-abi-0.3.3.sha256sum] = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
+SRC_URI[iana-time-zone-0.1.58.sha256sum] = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20"
+SRC_URI[iana-time-zone-haiku-0.1.2.sha256sum] = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+SRC_URI[idna-0.5.0.sha256sum] = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+SRC_URI[indexmap-2.1.0.sha256sum] = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
+SRC_URI[is-terminal-0.4.9.sha256sum] = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
+SRC_URI[itertools-0.10.5.sha256sum] = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+SRC_URI[itertools-0.11.0.sha256sum] = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+SRC_URI[itoa-1.0.10.sha256sum] = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
+SRC_URI[js-sys-0.3.66.sha256sum] = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca"
+SRC_URI[language-tags-0.3.2.sha256sum] = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+SRC_URI[lazy_static-1.4.0.sha256sum] = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+SRC_URI[libc-0.2.151.sha256sum] = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4"
+SRC_URI[libloading-0.8.1.sha256sum] = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
+SRC_URI[libm-0.2.8.sha256sum] = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
+SRC_URI[linked-hash-map-0.5.6.sha256sum] = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+SRC_URI[linux-raw-sys-0.4.12.sha256sum] = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456"
+SRC_URI[locale_config-0.3.0.sha256sum] = "08d2c35b16f4483f6c26f0e4e9550717a2f6575bcd6f12a53ff0c490a94a6934"
+SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+SRC_URI[log-0.4.20.sha256sum] = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+SRC_URI[lopdf-0.31.0.sha256sum] = "07c8e1b6184b1b32ea5f72f572ebdc40e5da1d2921fa469947ff7c480ad1f85a"
+SRC_URI[mac-0.1.1.sha256sum] = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
+SRC_URI[malloc_buf-0.0.6.sha256sum] = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb"
+SRC_URI[markup5ever-0.11.0.sha256sum] = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016"
+SRC_URI[matches-0.1.10.sha256sum] = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
+SRC_URI[matrixmultiply-0.3.8.sha256sum] = "7574c1cf36da4798ab73da5b215bbf444f50718207754cb522201d78d1cd0ff2"
+SRC_URI[md5-0.7.0.sha256sum] = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
+SRC_URI[memchr-2.6.4.sha256sum] = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
+SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+SRC_URI[minimal-lexical-0.2.1.sha256sum] = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+SRC_URI[miniz_oxide-0.7.1.sha256sum] = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+SRC_URI[nalgebra-0.32.3.sha256sum] = "307ed9b18cc2423f29e83f84fd23a8e73628727990181f18641a8b5dc2ab1caa"
+SRC_URI[nalgebra-macros-0.2.1.sha256sum] = "91761aed67d03ad966ef783ae962ef9bbaca728d2dd7ceb7939ec110fffad998"
+SRC_URI[new_debug_unreachable-1.0.4.sha256sum] = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
+SRC_URI[nom-7.1.3.sha256sum] = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+SRC_URI[normalize-line-endings-0.3.0.sha256sum] = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
+SRC_URI[num-complex-0.4.4.sha256sum] = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214"
+SRC_URI[num-integer-0.1.45.sha256sum] = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
+SRC_URI[num-rational-0.4.1.sha256sum] = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0"
+SRC_URI[num-traits-0.2.17.sha256sum] = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
+SRC_URI[objc-0.2.7.sha256sum] = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1"
+SRC_URI[objc-foundation-0.1.1.sha256sum] = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9"
+SRC_URI[objc_id-0.1.1.sha256sum] = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b"
+SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+SRC_URI[oorandom-11.1.3.sha256sum] = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+SRC_URI[pango-0.18.3.sha256sum] = "7ca27ec1eb0457ab26f3036ea52229edbdb74dee1edd29063f5b9b010e7ebee4"
+SRC_URI[pango-sys-0.18.0.sha256sum] = "436737e391a843e5933d6d9aa102cb126d501e815b83601365a948a518555dc5"
+SRC_URI[pangocairo-0.18.0.sha256sum] = "57036589a9cfcacf83f9e606d15813fc6bf03f0e9e69aa2b5e3bb85af86b38a5"
+SRC_URI[pangocairo-sys-0.18.0.sha256sum] = "fc3c8ff676a37e7a72ec1d5fc029f91c407278083d2752784ff9f5188c108833"
+SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+SRC_URI[paste-1.0.14.sha256sum] = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
+SRC_URI[percent-encoding-2.3.1.sha256sum] = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
+SRC_URI[phf-0.10.1.sha256sum] = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259"
+SRC_URI[phf-0.11.2.sha256sum] = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
+SRC_URI[phf_codegen-0.10.0.sha256sum] = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd"
+SRC_URI[phf_generator-0.10.0.sha256sum] = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
+SRC_URI[phf_generator-0.11.2.sha256sum] = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0"
+SRC_URI[phf_macros-0.11.2.sha256sum] = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b"
+SRC_URI[phf_shared-0.10.0.sha256sum] = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
+SRC_URI[phf_shared-0.11.2.sha256sum] = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b"
+SRC_URI[pin-project-lite-0.2.13.sha256sum] = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+SRC_URI[pin-utils-0.1.0.sha256sum] = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+SRC_URI[pkg-config-0.3.27.sha256sum] = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
+SRC_URI[plotters-0.3.5.sha256sum] = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45"
+SRC_URI[plotters-backend-0.3.5.sha256sum] = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609"
+SRC_URI[plotters-svg-0.3.5.sha256sum] = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab"
+SRC_URI[png-0.17.10.sha256sum] = "dd75bf2d8dd3702b9707cdbc56a5b9ef42cec752eb8b3bafc01234558442aa64"
+SRC_URI[powerfmt-0.2.0.sha256sum] = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+SRC_URI[ppv-lite86-0.2.17.sha256sum] = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+SRC_URI[precomputed-hash-0.1.1.sha256sum] = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
+SRC_URI[predicates-3.0.4.sha256sum] = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0"
+SRC_URI[predicates-core-1.0.6.sha256sum] = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174"
+SRC_URI[predicates-tree-1.0.9.sha256sum] = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf"
+SRC_URI[proc-macro-crate-2.0.1.sha256sum] = "97dc5fea232fc28d2f597b37c4876b348a40e33f3b02cc975c8d006d78d94b1a"
+SRC_URI[proc-macro-error-1.0.4.sha256sum] = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+SRC_URI[proc-macro-error-attr-1.0.4.sha256sum] = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+SRC_URI[proc-macro2-1.0.70.sha256sum] = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b"
+SRC_URI[proptest-1.4.0.sha256sum] = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf"
+SRC_URI[quick-error-1.2.3.sha256sum] = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+SRC_URI[quick-error-2.0.1.sha256sum] = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
+SRC_URI[quote-1.0.33.sha256sum] = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+SRC_URI[rand-0.8.5.sha256sum] = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+SRC_URI[rand_chacha-0.3.1.sha256sum] = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+SRC_URI[rand_core-0.6.4.sha256sum] = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+SRC_URI[rand_xorshift-0.3.0.sha256sum] = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
+SRC_URI[rawpointer-0.2.1.sha256sum] = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
+SRC_URI[rayon-1.8.0.sha256sum] = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
+SRC_URI[rayon-core-1.12.0.sha256sum] = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
+SRC_URI[rctree-0.5.0.sha256sum] = "3b42e27ef78c35d3998403c1d26f3efd9e135d3e5121b0a4845cc5cc27547f4f"
+SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+SRC_URI[regex-1.10.2.sha256sum] = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
+SRC_URI[regex-automata-0.4.3.sha256sum] = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+SRC_URI[regex-syntax-0.8.2.sha256sum] = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+SRC_URI[rgb-0.8.37.sha256sum] = "05aaa8004b64fd573fc9d002f4e632d51ad4f026c2b5ba95fcb6c2f32c2c47d8"
+SRC_URI[rustix-0.38.28.sha256sum] = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316"
+SRC_URI[rusty-fork-0.3.0.sha256sum] = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
+SRC_URI[ryu-1.0.16.sha256sum] = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"
+SRC_URI[safe_arch-0.7.1.sha256sum] = "f398075ce1e6a179b46f51bd88d0598b92b00d3551f1a2d4ac49e771b56ac354"
+SRC_URI[same-file-1.0.6.sha256sum] = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+SRC_URI[selectors-0.25.0.sha256sum] = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06"
+SRC_URI[serde-1.0.193.sha256sum] = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
+SRC_URI[serde_derive-1.0.193.sha256sum] = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
+SRC_URI[serde_json-1.0.108.sha256sum] = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
+SRC_URI[serde_spanned-0.6.4.sha256sum] = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80"
+SRC_URI[servo_arc-0.3.0.sha256sum] = "d036d71a959e00c77a63538b90a6c2390969f9772b096ea837205c6bd0491a44"
+SRC_URI[simba-0.8.1.sha256sum] = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae"
+SRC_URI[simd-adler32-0.3.7.sha256sum] = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
+SRC_URI[siphasher-0.3.11.sha256sum] = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
+SRC_URI[slab-0.4.9.sha256sum] = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+SRC_URI[smallvec-1.11.2.sha256sum] = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
+SRC_URI[stable_deref_trait-1.2.0.sha256sum] = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+SRC_URI[string_cache-0.8.7.sha256sum] = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
+SRC_URI[string_cache_codegen-0.5.2.sha256sum] = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988"
+SRC_URI[strsim-0.10.0.sha256sum] = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+SRC_URI[syn-1.0.109.sha256sum] = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+SRC_URI[syn-2.0.41.sha256sum] = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269"
+SRC_URI[system-deps-6.2.0.sha256sum] = "2a2d580ff6a20c55dfb86be5f9c238f67835d0e81cbdea8bf5680e0897320331"
+SRC_URI[target-lexicon-0.12.12.sha256sum] = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a"
+SRC_URI[tempfile-3.8.1.sha256sum] = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
+SRC_URI[tendril-0.4.3.sha256sum] = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
+SRC_URI[termtree-0.4.1.sha256sum] = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
+SRC_URI[thiserror-1.0.50.sha256sum] = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
+SRC_URI[thiserror-impl-1.0.50.sha256sum] = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
+SRC_URI[time-0.3.30.sha256sum] = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
+SRC_URI[time-core-0.1.2.sha256sum] = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+SRC_URI[time-macros-0.2.15.sha256sum] = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
+SRC_URI[tinytemplate-1.2.1.sha256sum] = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
+SRC_URI[tinyvec-1.6.0.sha256sum] = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+SRC_URI[tinyvec_macros-0.1.1.sha256sum] = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+SRC_URI[toml-0.8.2.sha256sum] = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
+SRC_URI[toml_datetime-0.6.3.sha256sum] = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
+SRC_URI[toml_edit-0.20.2.sha256sum] = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
+SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+SRC_URI[unarray-0.1.4.sha256sum] = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+SRC_URI[unicode-bidi-0.3.14.sha256sum] = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416"
+SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+SRC_URI[unicode-normalization-0.1.22.sha256sum] = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+SRC_URI[url-2.5.0.sha256sum] = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+SRC_URI[utf-8-0.7.6.sha256sum] = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
+SRC_URI[utf8parse-0.2.1.sha256sum] = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
+SRC_URI[version-compare-0.1.1.sha256sum] = "579a42fc0b8e0c63b76519a339be31bed574929511fa53c1a3acae26eb258f29"
+SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+SRC_URI[wait-timeout-0.2.0.sha256sum] = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+SRC_URI[walkdir-2.4.0.sha256sum] = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
+SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+SRC_URI[wasm-bindgen-0.2.89.sha256sum] = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e"
+SRC_URI[wasm-bindgen-backend-0.2.89.sha256sum] = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826"
+SRC_URI[wasm-bindgen-macro-0.2.89.sha256sum] = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
+SRC_URI[wasm-bindgen-macro-support-0.2.89.sha256sum] = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
+SRC_URI[wasm-bindgen-shared-0.2.89.sha256sum] = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f"
+SRC_URI[web-sys-0.3.66.sha256sum] = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f"
+SRC_URI[weezl-0.1.7.sha256sum] = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb"
+SRC_URI[wide-0.7.13.sha256sum] = "c68938b57b33da363195412cfc5fc37c9ed49aa9cfe2156fde64b8d2c9498242"
+SRC_URI[winapi-0.3.9.sha256sum] = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+SRC_URI[winapi-i686-pc-windows-gnu-0.4.0.sha256sum] = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+SRC_URI[winapi-util-0.1.6.sha256sum] = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
+SRC_URI[winapi-x86_64-pc-windows-gnu-0.4.0.sha256sum] = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+SRC_URI[windows-core-0.51.1.sha256sum] = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64"
+SRC_URI[windows-sys-0.48.0.sha256sum] = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+SRC_URI[windows-sys-0.52.0.sha256sum] = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+SRC_URI[windows-targets-0.52.0.sha256sum] = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+SRC_URI[windows_aarch64_gnullvm-0.52.0.sha256sum] = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+SRC_URI[windows_aarch64_msvc-0.52.0.sha256sum] = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+SRC_URI[windows_i686_gnu-0.52.0.sha256sum] = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+SRC_URI[windows_i686_msvc-0.52.0.sha256sum] = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+SRC_URI[windows_x86_64_gnu-0.52.0.sha256sum] = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+SRC_URI[windows_x86_64_gnullvm-0.52.0.sha256sum] = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+SRC_URI[windows_x86_64_msvc-0.52.0.sha256sum] = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+SRC_URI[winnow-0.5.28.sha256sum] = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2"
+SRC_URI[xml5ever-0.17.0.sha256sum] = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650"
+SRC_URI[yeslogic-fontconfig-sys-4.0.1.sha256sum] = "ec657fd32bbcbeaef5c7bc8e10b3db95b143fab8db0a50079773dbf936fd4f73"
diff --git a/meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch b/meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch
index d7487c234c..c34a610522 100644
--- a/meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch
+++ b/meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch
@@ -1,4 +1,4 @@
-From 95b2cd0169cb1b4694c2bce67169b1aa1d5e2be0 Mon Sep 17 00:00:00 2001
+From d0f4f5b3ae900b9d092407e29b5372d93910c18c Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Tue, 21 Sep 2021 16:27:14 +0200
Subject: [PATCH] Makefile.am: pass rust target to cargo also when not cross
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
2 files changed, 1 insertion(+), 5 deletions(-)
diff --git a/Makefile.am b/Makefile.am
-index e93714e..effe5d3 100644
+index 2a89954..4e296d3 100644
--- a/Makefile.am
+++ b/Makefile.am
-@@ -137,9 +137,7 @@ else
+@@ -173,9 +173,7 @@ else
CARGO_RELEASE_ARGS=--release
endif
@@ -28,10 +28,10 @@ index e93714e..effe5d3 100644
CARGO_VERBOSE = $(cargo_verbose_$(V))
cargo_verbose_ = $(cargo_verbose_$(AM_DEFAULT_VERBOSITY))
diff --git a/configure.ac b/configure.ac
-index 41590ca..84cd56b 100644
+index 42215cf..2499c96 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -255,9 +255,7 @@ AC_ARG_VAR(RUST_TARGET, [Set rust target (default=$host)])
+@@ -292,9 +292,7 @@ AC_ARG_VAR(RUST_TARGET, [Set rust target (default=$host)])
: ${RUST_TARGET:=$host}
AM_CONDITIONAL([CROSS_COMPILING], [test "x$cross_compiling" = xyes])
@@ -42,6 +42,3 @@ index 41590ca..84cd56b 100644
AC_SUBST([RUST_TARGET_SUBDIR])
dnl ===========================================================================
---
-2.20.1
-
diff --git a/meta/recipes-gnome/librsvg/librsvg/0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch b/meta/recipes-gnome/librsvg/librsvg/0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch
deleted file mode 100644
index 992ff8dee7..0000000000
--- a/meta/recipes-gnome/librsvg/librsvg/0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch
+++ /dev/null
@@ -1,51 +0,0 @@
-From c0b0ef51b3c997a1c20ef9381ba2201ed477f609 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 21 Sep 2021 16:54:23 +0200
-Subject: [PATCH] system-deps/src/lib.rs: do not probe into harcoded list of
- targets
-
-Oe-core defines custom targets, and this probe fails.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
-
----
- vendor/system-deps/.cargo-checksum.json | 2 +-
- vendor/system-deps/src/lib.rs | 16 +---------------
- 2 files changed, 2 insertions(+), 16 deletions(-)
-
-diff --git a/vendor/system-deps/.cargo-checksum.json b/vendor/system-deps/.cargo-checksum.json
-index 862df8b..931748e 100644
---- a/vendor/system-deps/.cargo-checksum.json
-+++ b/vendor/system-deps/.cargo-checksum.json
-@@ -1 +1 @@
--{"files":{"Cargo.toml":"927df7476ebf5f5983169cfd973f4c95b84da17caeb20d33cccf50e326af5316","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3fe7396637bf9233908f41c6001cfcb00a379225e06e36e508c8b3d7264a8aae","src/lib.rs":"c38fd96ca3233ebee3bb7e37ca8f8a7a2685cdd7fcccf0210eaa879aa91dc684","src/metadata.rs":"657bc1b77e949e4800f9dd808790ffa535820e1658d412121a1da548e0cdd02c","src/test.rs":"9d5f8c1fb7a821352d6ba75cb005ead3e36b4f359e61feb605ab8a36d4fd31f8","src/tests/lib/libteststatic.a":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/tests/testanotherlib.pc":"bb4fd942324e6d49ce3becd827aa5c948d1924ca6681904a3695c19b1424eb3c","src/tests/testdata.pc":"43f481e989c03674fed5ef78c6420b3f8d36a2ce001928d86c418d1844acd5e7","src/tests/testlib-2.0.pc":"152eb0c70c14c3d948118408f3d1fd3bb7531b02aa792db85bd957f7db90b45b","src/tests/testlib-3.0.pc":"cd39c2ef88f6828c9291150cc4b624e769abef484674eaebaa4f67979501315f","src/tests/testlib.pc":"75c0d8a5345f65794f583c83e1cf0dbf3385af6e6abea1d61bb86eef707a52db","src/tests/teststaticlib.pc":"77df23f6c7c1d47aff18453b47c87e53ec8a96017546e0a55c8c4d1e13b70134","src/tests/toml-missing-file/no-cargo-toml-here":"6ab4da4b56f15315df6538610cfcd2ba3d0f9a7a8414678ff00ab5a78f7d41fa"},"package":"a1a45a1c4c9015217e12347f2a411b57ce2c4fc543913b14b6fe40483328e709"}
-\ No newline at end of file
-+{"files":{"Cargo.toml":"927df7476ebf5f5983169cfd973f4c95b84da17caeb20d33cccf50e326af5316","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3fe7396637bf9233908f41c6001cfcb00a379225e06e36e508c8b3d7264a8aae","src/lib.rs":"534efc2f977d8c67d5be3a4c3e81093a6c00ab2de1102404166b1573ad96dbd2","src/metadata.rs":"657bc1b77e949e4800f9dd808790ffa535820e1658d412121a1da548e0cdd02c","src/test.rs":"9d5f8c1fb7a821352d6ba75cb005ead3e36b4f359e61feb605ab8a36d4fd31f8","src/tests/lib/libteststatic.a":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/tests/testanotherlib.pc":"bb4fd942324e6d49ce3becd827aa5c948d1924ca6681904a3695c19b1424eb3c","src/tests/testdata.pc":"43f481e989c03674fed5ef78c6420b3f8d36a2ce001928d86c418d1844acd5e7","src/tests/testlib-2.0.pc":"152eb0c70c14c3d948118408f3d1fd3bb7531b02aa792db85bd957f7db90b45b","src/tests/testlib-3.0.pc":"cd39c2ef88f6828c9291150cc4b624e769abef484674eaebaa4f67979501315f","src/tests/testlib.pc":"75c0d8a5345f65794f583c83e1cf0dbf3385af6e6abea1d61bb86eef707a52db","src/tests/teststaticlib.pc":"77df23f6c7c1d47aff18453b47c87e53ec8a96017546e0a55c8c4d1e13b70134","src/tests/toml-missing-file/no-cargo-toml-here":"6ab4da4b56f15315df6538610cfcd2ba3d0f9a7a8414678ff00ab5a78f7d41fa"},"package":"a1a45a1c4c9015217e12347f2a411b57ce2c4fc543913b14b6fe40483328e709"}
-diff --git a/vendor/system-deps/src/lib.rs b/vendor/system-deps/src/lib.rs
-index 45ab1ce..f87d1ec 100644
---- a/vendor/system-deps/src/lib.rs
-+++ b/vendor/system-deps/src/lib.rs
-@@ -800,21 +800,7 @@ impl Config {
- }
-
- fn check_cfg(&self, cfg: &cfg_expr::Expression) -> Result<bool, Error> {
-- use cfg_expr::{targets::get_builtin_target_by_triple, Predicate};
--
-- let target = self
-- .env
-- .get("TARGET")
-- .expect("no TARGET env variable defined");
-- let target = get_builtin_target_by_triple(&target)
-- .unwrap_or_else(|| panic!("Invalid TARGET: {}", target));
--
-- let res = cfg.eval(|pred| match pred {
-- Predicate::Target(tp) => Some(tp.matches(target)),
-- _ => None,
-- });
--
-- res.ok_or_else(|| Error::UnsupportedCfg(cfg.original().to_string()))
-+ Ok(true)
- }
- }
-
diff --git a/meta/recipes-gnome/librsvg/librsvg_2.54.3.bb b/meta/recipes-gnome/librsvg/librsvg_2.54.3.bb
deleted file mode 100644
index 3578898692..0000000000
--- a/meta/recipes-gnome/librsvg/librsvg_2.54.3.bb
+++ /dev/null
@@ -1,75 +0,0 @@
-SUMMARY = "Library for rendering SVG files"
-DESCRIPTION = "A small library to render Scalable Vector Graphics (SVG), \
-associated with the GNOME Project. It renders SVG files to Cairo surfaces. \
-Cairo is the 2D, antialiased drawing library that GNOME uses to draw things to \
-the screen or to generate output for printing."
-HOMEPAGE = "https://gitlab.gnome.org/GNOME/librsvg"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/librsvg/issues"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c \
- "
-
-SECTION = "x11/utils"
-DEPENDS = "cairo gdk-pixbuf glib-2.0 libxml2 pango python3-docutils-native"
-BBCLASSEXTEND = "native nativesdk"
-
-inherit gnomebase pixbufcache upstream-version-is-even gobject-introspection rust vala gi-docgen
-
-SRC_URI += "file://0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch \
- file://0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch \
- "
-
-SRC_URI[archive.sha256sum] = "66158f2ef46dde260026846c4da102e4a9dd4e5293010f30949c6cc26dd6efe8"
-
-# librsvg is still autotools-based, but is calling cargo from its automake-driven makefiles
-# so we cannot use cargo class directly, but still need bits and pieces from it
-# for cargo to be happy
-BASEDEPENDS:append = " cargo-native"
-
-export RUST_BACKTRACE = "full"
-export RUSTFLAGS
-export RUST_TARGET_PATH
-
-export RUST_TARGET = "${HOST_SYS}"
-
-RUSTFLAGS:append:mips = " --cfg crossbeam_no_atomic_64"
-RUSTFLAGS:append:mipsel = " --cfg crossbeam_no_atomic_64"
-RUSTFLAGS:append:powerpc = " --cfg crossbeam_no_atomic_64"
-RUSTFLAGS:append:riscv32 = " --cfg crossbeam_no_atomic_64"
-
-# rust-cross writes the target linker binary into target json definition without any flags.
-# This breaks here because the linker isn't going to work without at least knowing where
-# the sysroot is. So copy the json to workdir, and patch in the path to wrapper from rust class
-# which supplies the needed flags.
-do_compile:prepend() {
- cp ${STAGING_LIBDIR_NATIVE}/rustlib/${HOST_SYS}.json ${WORKDIR}
- cp ${STAGING_LIBDIR_NATIVE}/rustlib/${BUILD_SYS}.json ${WORKDIR}
- sed -ie 's,"linker": ".*","linker": "${RUST_TARGET_CC}",g' ${WORKDIR}/${HOST_SYS}.json
- RUST_TARGET_PATH="${WORKDIR}"
- export RUST_TARGET_PATH
-}
-
-# Issue only on windows
-CVE_CHECK_IGNORE += "CVE-2018-1000041"
-
-CACHED_CONFIGUREVARS = "ac_cv_path_GDK_PIXBUF_QUERYLOADERS=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders"
-
-PACKAGECONFIG ??= "gdkpixbuf"
-# The gdk-pixbuf loader
-PACKAGECONFIG[gdkpixbuf] = "--enable-pixbuf-loader,--disable-pixbuf-loader,gdk-pixbuf-native"
-
-do_install:append() {
- # Loadable modules don't need .a or .la on Linux
- rm -f ${D}${libdir}/gdk-pixbuf-2.0/*/loaders/*.a ${D}${libdir}/gdk-pixbuf-2.0/*/loaders/*.la
-}
-
-PACKAGES =+ "librsvg-gtk rsvg"
-FILES:rsvg = "${bindir}/rsvg* \
- ${datadir}/pixmaps/svg-viewer.svg \
- ${datadir}/themes"
-FILES:librsvg-gtk = "${libdir}/gdk-pixbuf-2.0/*/*/*.so \
- ${datadir}/thumbnailers/librsvg.thumbnailer"
-RRECOMMENDS:librsvg-gtk = "gdk-pixbuf-bin"
-
-PIXBUF_PACKAGES = "librsvg-gtk"
diff --git a/meta/recipes-gnome/librsvg/librsvg_2.57.1.bb b/meta/recipes-gnome/librsvg/librsvg_2.57.1.bb
new file mode 100644
index 0000000000..4b52d4062b
--- /dev/null
+++ b/meta/recipes-gnome/librsvg/librsvg_2.57.1.bb
@@ -0,0 +1,79 @@
+SUMMARY = "Library for rendering SVG files"
+DESCRIPTION = "A small library to render Scalable Vector Graphics (SVG), \
+associated with the GNOME Project. It renders SVG files to Cairo surfaces. \
+Cairo is the 2D, antialiased drawing library that GNOME uses to draw things to \
+the screen or to generate output for printing."
+HOMEPAGE = "https://gitlab.gnome.org/GNOME/librsvg"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/librsvg/issues"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c \
+ "
+
+SECTION = "x11/utils"
+DEPENDS = "cairo gdk-pixbuf glib-2.0 libxml2 pango python3-docutils-native"
+BBCLASSEXTEND = "native nativesdk"
+
+GNOMEBASEBUILDCLASS = "autotools"
+inherit cargo_common gnomebase pixbufcache gobject-introspection rust vala gi-docgen cargo-update-recipe-crates
+
+require ${BPN}-crates.inc
+
+SRC_URI += " \
+ file://0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch \
+ "
+
+SRC_URI[archive.sha256sum] = "074671a3ed6fbcd67cae2a40e539107f4f097ca8a4ab1a894c05e2524ff340ef"
+
+UPSTREAM_CHECK_REGEX = "librsvg-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
+
+# librsvg is still autotools-based, but is calling cargo from its automake-driven makefiles
+# so we cannot use cargo class directly, but still need bits and pieces from it
+# for cargo to be happy
+BASEDEPENDS:append = " cargo-native"
+
+export RUST_BACKTRACE = "full"
+export RUSTFLAGS
+
+export RUST_TARGET = "${RUST_HOST_SYS}"
+
+RUSTFLAGS:append:mips = " --cfg crossbeam_no_atomic_64"
+RUSTFLAGS:append:mipsel = " --cfg crossbeam_no_atomic_64"
+RUSTFLAGS:append:powerpc = " --cfg crossbeam_no_atomic_64"
+
+do_configure[postfuncs] += "cargo_common_do_configure"
+
+inherit rust-target-config
+
+# rust-cross writes the target linker binary into target json definition without any flags.
+# This breaks here because the linker isn't going to work without at least knowing where
+# the sysroot is. So copy the json to workdir, and patch in the path to wrapper from rust class
+# which supplies the needed flags.
+do_compile:prepend() {
+ sed -ie 's,"linker": ".*","linker": "${RUST_TARGET_CC}",g' ${RUST_TARGETS_DIR}/${RUST_HOST_SYS}.json
+}
+
+CVE_STATUS[CVE-2018-1000041] = "not-applicable-platform: Issue only applies on Windows"
+
+CACHED_CONFIGUREVARS = "ac_cv_path_GDK_PIXBUF_QUERYLOADERS=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders"
+
+PACKAGECONFIG ??= "gdkpixbuf"
+PACKAGECONFIG:append:class-target = " ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'vala', '', d)}"
+# The gdk-pixbuf loader
+PACKAGECONFIG[gdkpixbuf] = "--enable-pixbuf-loader,--disable-pixbuf-loader,gdk-pixbuf-native"
+PACKAGECONFIG[vala] = "--enable-vala,--disable-vala"
+
+do_install:append() {
+ # Loadable modules don't need .a or .la on Linux
+ rm -f ${D}${libdir}/gdk-pixbuf-2.0/*/loaders/*.a ${D}${libdir}/gdk-pixbuf-2.0/*/loaders/*.la
+}
+
+PACKAGES =+ "librsvg-gtk rsvg"
+FILES:rsvg = "${bindir}/rsvg* \
+ ${datadir}/pixmaps/svg-viewer.svg \
+ ${datadir}/themes"
+FILES:librsvg-gtk = "${libdir}/gdk-pixbuf-2.0/*/*/*.so \
+ ${datadir}/thumbnailers/librsvg.thumbnailer"
+RRECOMMENDS:librsvg-gtk = "gdk-pixbuf-bin"
+
+PIXBUF_PACKAGES = "librsvg-gtk"
diff --git a/meta/recipes-gnome/libsecret/libsecret_0.20.5.bb b/meta/recipes-gnome/libsecret/libsecret_0.20.5.bb
deleted file mode 100644
index 9b09c3bee4..0000000000
--- a/meta/recipes-gnome/libsecret/libsecret_0.20.5.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "libsecret is a library for storing and retrieving passwords and other secrets"
-DESCRIPTION = "A GObject-based library for accessing the Secret Service API of \
-the freedesktop.org project, a cross-desktop effort to access passwords, \
-tokens and other types of secrets. libsecret provides a convenient wrapper \
-for these methods so consumers do not have to call the low-level DBus methods."
-LICENSE = "LGPL-2.1-only"
-HOMEPAGE = "https://github.com/GNOME/libsecret"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/libsecret/issues"
-LIC_FILES_CHKSUM = "file://COPYING;md5=23c2a5e0106b99d75238986559bb5fc6"
-
-GNOMEBASEBUILDCLASS = "meson"
-
-inherit gnomebase gi-docgen vala gobject-introspection manpages
-
-DEPENDS += "glib-2.0 libgcrypt gettext-native"
-
-SRC_URI[archive.sha256sum] = "3fb3ce340fcd7db54d87c893e69bfc2b1f6e4d4b279065ffe66dac9f0fd12b4d"
-
-GTKDOC_MESON_OPTION = 'gtk_doc'
-
-PACKAGECONFIG[manpages] = "-Dmanpage=true,-Dmanpage=false,libxslt-native xmlto-native"
-
-# http://errors.yoctoproject.org/Errors/Details/20228/
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv6 = "arm"
diff --git a/meta/recipes-gnome/libsecret/libsecret_0.21.4.bb b/meta/recipes-gnome/libsecret/libsecret_0.21.4.bb
new file mode 100644
index 0000000000..88c3c73510
--- /dev/null
+++ b/meta/recipes-gnome/libsecret/libsecret_0.21.4.bb
@@ -0,0 +1,25 @@
+SUMMARY = "libsecret is a library for storing and retrieving passwords and other secrets"
+DESCRIPTION = "A GObject-based library for accessing the Secret Service API of \
+the freedesktop.org project, a cross-desktop effort to access passwords, \
+tokens and other types of secrets. libsecret provides a convenient wrapper \
+for these methods so consumers do not have to call the low-level DBus methods."
+LICENSE = "LGPL-2.1-only"
+HOMEPAGE = "https://github.com/GNOME/libsecret"
+BUGTRACKER = "https://gitlab.gnome.org/GNOME/libsecret/issues"
+LIC_FILES_CHKSUM = "file://COPYING;md5=23c2a5e0106b99d75238986559bb5fc6"
+
+
+inherit gnomebase gi-docgen vala gobject-introspection manpages
+
+DEPENDS += "glib-2.0 libgcrypt gettext-native"
+
+SRC_URI[archive.sha256sum] = "163d08d783be6d4ab9a979ceb5a4fecbc1d9660d3c34168c581301cd53912b20"
+
+GTKDOC_MESON_OPTION = 'gtk_doc'
+
+PACKAGECONFIG[manpages] = "-Dmanpage=true,-Dmanpage=false,libxslt-native xmlto-native"
+
+# http://errors.yoctoproject.org/Errors/Details/20228/
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv6 = "arm"
diff --git a/meta/recipes-gnome/libxmlb/libxmlb/0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch b/meta/recipes-gnome/libxmlb/libxmlb/0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch
new file mode 100644
index 0000000000..27081d8749
--- /dev/null
+++ b/meta/recipes-gnome/libxmlb/libxmlb/0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch
@@ -0,0 +1,32 @@
+From 8a1aa4c318b8dbe4c0c2b1c4968f867ea6641b32 Mon Sep 17 00:00:00 2001
+From: Markus Volk <f_l_k@t-online.de>
+Date: Sat, 16 Sep 2023 14:02:57 +0200
+Subject: [PATCH] xb-self-test.c: hardcode G_TEST_SRCDIR
+
+This avoids:
+ libxmlb-0.3.14-r0 do_package_qa: QA Issue: File
+ /usr/libexec/installed-tests/libxmlb/xb-self-test in package libxmlb-ptest
+ contains reference to TMPDIR [buildpaths]
+
+and also fixes the runtime for the ptest.
+
+Upstream-Status: Inappropriate [oe-specific]
+
+Signed-off-by: Markus Volk <f_l_k@t-online.de>
+---
+ src/xb-self-test.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/xb-self-test.c b/src/xb-self-test.c
+index 1daca18..2e5b251 100644
+--- a/src/xb-self-test.c
++++ b/src/xb-self-test.c
+@@ -2876,7 +2876,7 @@ xb_speed_func(void)
+ int
+ main(int argc, char **argv)
+ {
+- g_setenv("G_TEST_SRCDIR", SRCDIR, FALSE);
++ g_setenv("G_TEST_SRCDIR", "/usr/libexec/installed-tests/libxmlb", FALSE);
+
+ g_test_init(&argc, &argv, NULL);
+
diff --git a/meta/recipes-gnome/libxmlb/libxmlb/run-ptest b/meta/recipes-gnome/libxmlb/libxmlb/run-ptest
new file mode 100644
index 0000000000..6d0bb95d3a
--- /dev/null
+++ b/meta/recipes-gnome/libxmlb/libxmlb/run-ptest
@@ -0,0 +1,3 @@
+#! /bin/sh
+
+gnome-desktop-testing-runner libxmlb
diff --git a/meta/recipes-gnome/libxmlb/libxmlb_0.3.17.bb b/meta/recipes-gnome/libxmlb/libxmlb_0.3.17.bb
new file mode 100644
index 0000000000..f403857fea
--- /dev/null
+++ b/meta/recipes-gnome/libxmlb/libxmlb_0.3.17.bb
@@ -0,0 +1,25 @@
+SUMMARY = "A library to help create and query binary XML blobs"
+HOMEPAGE = "https://github.com/hughsie/libxmlb"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1803fa9c2c3ce8cb06b4861d75310742"
+
+SRC_URI = " \
+ git://github.com/hughsie/libxmlb.git;branch=main;protocol=https \
+ file://0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch \
+ file://run-ptest \
+"
+SRCREV = "db54f1b3254334e59b29b01b6cb666f444746594"
+S = "${WORKDIR}/git"
+
+DEPENDS = "glib-2.0 xz zstd"
+
+inherit gobject-introspection gtk-doc meson ptest-gnome lib_package pkgconfig
+
+PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false"
+
+GTKDOC_MESON_OPTION = "gtkdoc"
+
+FILES:${PN} += "${datadir}"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/builder/builder_0.1.bb b/meta/recipes-graphics/builder/builder_0.1.bb
index 39be3bd63f..52c9351f93 100644
--- a/meta/recipes-graphics/builder/builder_0.1.bb
+++ b/meta/recipes-graphics/builder/builder_0.1.bb
@@ -1,7 +1,6 @@
SUMMARY = "New user to do specific job"
DESCRIPTION = "This recipe create a new user named ${PN}, who is used for specific jobs like building. The task can be auto started via mini X"
SECTION = "x11"
-PR = "r6"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://builder_session.sh;endline=5;md5=84796c3c41785d86100fdabcbdade00e"
@@ -29,5 +28,4 @@ do_install () {
chown builder.builder ${D}${sysconfdir}/mini_x/session.d/builder_session.sh
}
-# -4178 is an unrelated 'builder'
-CVE_CHECK_IGNORE = "CVE-2008-4178"
+CVE_STATUS[CVE-2008-4178] = "cpe-incorrect: This CVE is for an unrelated builder"
diff --git a/meta/recipes-graphics/cairo/cairo/CVE-2018-19876.patch b/meta/recipes-graphics/cairo/cairo/CVE-2018-19876.patch
deleted file mode 100644
index 4252a5663b..0000000000
--- a/meta/recipes-graphics/cairo/cairo/CVE-2018-19876.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-CVE: CVE-2018-19876
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-From 90e85c2493fdfa3551f202ff10282463f1e36645 Mon Sep 17 00:00:00 2001
-From: Carlos Garcia Campos <cgarcia@igalia.com>
-Date: Mon, 19 Nov 2018 12:33:07 +0100
-Subject: [PATCH] ft: Use FT_Done_MM_Var instead of free when available in
- cairo_ft_apply_variations
-
-Fixes a crash when using freetype >= 2.9
----
- src/cairo-ft-font.c | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/src/cairo-ft-font.c b/src/cairo-ft-font.c
-index 325dd61b4..981973f78 100644
---- a/src/cairo-ft-font.c
-+++ b/src/cairo-ft-font.c
-@@ -2393,7 +2393,11 @@ skip:
- done:
- free (coords);
- free (current_coords);
-+#if HAVE_FT_DONE_MM_VAR
-+ FT_Done_MM_Var (face->glyph->library, ft_mm_var);
-+#else
- free (ft_mm_var);
-+#endif
- }
- }
-
---
-2.11.0
-
diff --git a/meta/recipes-graphics/cairo/cairo/CVE-2019-6461.patch b/meta/recipes-graphics/cairo/cairo/CVE-2019-6461.patch
deleted file mode 100644
index 5232cf70c6..0000000000
--- a/meta/recipes-graphics/cairo/cairo/CVE-2019-6461.patch
+++ /dev/null
@@ -1,19 +0,0 @@
-There is a potential infinite-loop in function _arc_error_normalized().
-
-CVE: CVE-2019-6461
-Upstream-Status: Pending
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/src/cairo-arc.c b/src/cairo-arc.c
-index 390397bae..f9249dbeb 100644
---- a/src/cairo-arc.c
-+++ b/src/cairo-arc.c
-@@ -99,7 +99,7 @@ _arc_max_angle_for_tolerance_normalized (double tolerance)
- do {
- angle = M_PI / i++;
- error = _arc_error_normalized (angle);
-- } while (error > tolerance);
-+ } while (error > tolerance && error > __DBL_EPSILON__);
-
- return angle;
- }
diff --git a/meta/recipes-graphics/cairo/cairo/CVE-2019-6462.patch b/meta/recipes-graphics/cairo/cairo/CVE-2019-6462.patch
deleted file mode 100644
index 4e4598c5b5..0000000000
--- a/meta/recipes-graphics/cairo/cairo/CVE-2019-6462.patch
+++ /dev/null
@@ -1,20 +0,0 @@
-There is an assertion in function _cairo_arc_in_direction().
-
-CVE: CVE-2019-6462
-Upstream-Status: Pending
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/src/cairo-arc.c b/src/cairo-arc.c
-index 390397bae..1bde774a4 100644
---- a/src/cairo-arc.c
-+++ b/src/cairo-arc.c
-@@ -186,7 +186,8 @@ _cairo_arc_in_direction (cairo_t *cr,
- if (cairo_status (cr))
- return;
-
-- assert (angle_max >= angle_min);
-+ if (angle_max < angle_min)
-+ return;
-
- if (angle_max - angle_min > 2 * M_PI * MAX_FULL_CIRCLES) {
- angle_max = fmod (angle_max - angle_min, 2 * M_PI);
diff --git a/meta/recipes-graphics/cairo/cairo/CVE-2020-35492.patch b/meta/recipes-graphics/cairo/cairo/CVE-2020-35492.patch
deleted file mode 100644
index fb6ce5cfdf..0000000000
--- a/meta/recipes-graphics/cairo/cairo/CVE-2020-35492.patch
+++ /dev/null
@@ -1,60 +0,0 @@
-Fix stack buffer overflow.
-
-CVE: CVE-2020-35492
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 03a820b173ed1fdef6ff14b4468f5dbc02ff59be Mon Sep 17 00:00:00 2001
-From: Heiko Lewin <heiko.lewin@worldiety.de>
-Date: Tue, 15 Dec 2020 16:48:19 +0100
-Subject: [PATCH] Fix mask usage in image-compositor
-
----
- src/cairo-image-compositor.c | 8 ++--
- test/Makefile.sources | 1 +
- test/bug-image-compositor.c | 39 ++++++++++++++++++++
- test/reference/bug-image-compositor.ref.png | Bin 0 -> 185 bytes
- 4 files changed, 44 insertions(+), 4 deletions(-)
- create mode 100644 test/bug-image-compositor.c
- create mode 100644 test/reference/bug-image-compositor.ref.png
-
-diff --git a/src/cairo-image-compositor.c b/src/cairo-image-compositor.c
-index 79ad69f68..4f8aaed99 100644
---- a/src/cairo-image-compositor.c
-+++ b/src/cairo-image-compositor.c
-@@ -2601,14 +2601,14 @@ _inplace_src_spans (void *abstract_renderer, int y, int h,
- unsigned num_spans)
- {
- cairo_image_span_renderer_t *r = abstract_renderer;
-- uint8_t *m;
-+ uint8_t *m, *base = (uint8_t*)pixman_image_get_data(r->mask);
- int x0;
-
- if (num_spans == 0)
- return CAIRO_STATUS_SUCCESS;
-
- x0 = spans[0].x;
-- m = r->_buf;
-+ m = base;
- do {
- int len = spans[1].x - spans[0].x;
- if (len >= r->u.composite.run_length && spans[0].coverage == 0xff) {
-@@ -2655,7 +2655,7 @@ _inplace_src_spans (void *abstract_renderer, int y, int h,
- spans[0].x, y,
- spans[1].x - spans[0].x, h);
-
-- m = r->_buf;
-+ m = base;
- x0 = spans[1].x;
- } else if (spans[0].coverage == 0x0) {
- if (spans[0].x != x0) {
-@@ -2684,7 +2684,7 @@ _inplace_src_spans (void *abstract_renderer, int y, int h,
- #endif
- }
-
-- m = r->_buf;
-+ m = base;
- x0 = spans[1].x;
- } else {
- *m++ = spans[0].coverage;
---
diff --git a/meta/recipes-graphics/cairo/cairo/cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff b/meta/recipes-graphics/cairo/cairo/cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff
index 7aaad2eedd..6c761bf2a7 100644
--- a/meta/recipes-graphics/cairo/cairo/cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff
+++ b/meta/recipes-graphics/cairo/cairo/cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff
@@ -3,7 +3,7 @@ Cairo: Fix Denial-of-Service Attack due to Logical Problem in Program
https://bugs.freedesktop.org/show_bug.cgi?id=100763
CVE: CVE-2017-7475
-Upstream-Status: Submitted
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/cairo/cairo/-/issues/80]
Signed-off-by: Fan Xin <fan.xin@jp.fujitsu.com>
diff --git a/meta/recipes-graphics/cairo/cairo_1.16.0.bb b/meta/recipes-graphics/cairo/cairo_1.16.0.bb
deleted file mode 100644
index 67081bb8cb..0000000000
--- a/meta/recipes-graphics/cairo/cairo_1.16.0.bb
+++ /dev/null
@@ -1,102 +0,0 @@
-SUMMARY = "The Cairo 2D vector graphics library"
-DESCRIPTION = "Cairo is a multi-platform library providing anti-aliased \
-vector-based rendering for multiple target backends. Paths consist \
-of line segments and cubic splines and can be rendered at any width \
-with various join and cap styles. All colors may be specified with \
-optional translucence (opacity/alpha) and combined using the \
-extended Porter/Duff compositing algebra as found in the X Render \
-Extension."
-HOMEPAGE = "http://cairographics.org"
-BUGTRACKER = "http://bugs.freedesktop.org"
-SECTION = "libs"
-
-LICENSE = "(MPL-1.1 | LGPL-2.1-only) & GPL-3.0-or-later"
-LICENSE:${PN} = "MPL-1.1 | LGPL-2.1-only"
-LICENSE:${PN}-dev = "MPL-1.1 | LGPL-2.1-only"
-LICENSE:${PN}-doc = "MPL-1.1 | LGPL-2.1-only"
-LICENSE:${PN}-gobject = "MPL-1.1 | LGPL-2.1-only"
-LICENSE:${PN}-script-interpreter = "MPL-1.1 | LGPL-2.1-only"
-LICENSE:${PN}-perf-utils = "GPL-3.0-or-later"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=e73e999e0c72b5ac9012424fa157ad77 \
- file://util/cairo-trace/COPYING-GPL-3;md5=d32239bcb673463ab874e80d47fae504"
-
-
-DEPENDS = "fontconfig glib-2.0 libpng pixman zlib"
-
-SRC_URI = "http://cairographics.org/releases/cairo-${PV}.tar.xz \
- file://cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff \
- file://CVE-2018-19876.patch \
- file://CVE-2019-6461.patch \
- file://CVE-2019-6462.patch \
- file://CVE-2020-35492.patch \
- "
-
-SRC_URI[md5sum] = "f19e0353828269c22bd72e271243a552"
-SRC_URI[sha256sum] = "5e7b29b3f113ef870d1e3ecf8adf21f923396401604bda16d44be45e66052331"
-
-inherit autotools pkgconfig upstream-version-is-even gtk-doc multilib_script
-
-MULTILIB_SCRIPTS = "${PN}-perf-utils:${bindir}/cairo-trace"
-
-X11DEPENDS = "virtual/libx11 libsm libxrender libxext"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'directfb', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11 xcb', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'opengl', '', d)} \
- trace"
-PACKAGECONFIG:class-native = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11 xcb', '', d)}"
-PACKAGECONFIG:class-nativesdk = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11 xcb', '', d)}"
-
-PACKAGECONFIG[x11] = "--with-x=yes -enable-xlib,--with-x=no --disable-xlib,${X11DEPENDS}"
-PACKAGECONFIG[xcb] = "--enable-xcb,--disable-xcb,libxcb"
-PACKAGECONFIG[directfb] = "--enable-directfb=yes,,directfb"
-PACKAGECONFIG[valgrind] = "--enable-valgrind=yes,--disable-valgrind,valgrind"
-PACKAGECONFIG[egl] = "--enable-egl=yes,--disable-egl,virtual/egl"
-PACKAGECONFIG[glesv2] = "--enable-glesv2,--disable-glesv2,virtual/libgles2"
-PACKAGECONFIG[opengl] = "--enable-gl,--disable-gl,virtual/libgl"
-# trace is under GPLv3
-PACKAGECONFIG[trace] = "--enable-trace,--disable-trace"
-
-EXTRA_OECONF += " \
- ${@bb.utils.contains('TARGET_FPU', 'soft', '--disable-some-floating-point', '', d)} \
- --enable-tee \
-"
-
-# We don't depend on binutils so we need to disable this
-export ac_cv_lib_bfd_bfd_openr="no"
-# Ensure we don't depend on LZO
-export ac_cv_lib_lzo2_lzo2a_decompress="no"
-
-do_install:append () {
- rm -rf ${D}${bindir}/cairo-sphinx
- rm -rf ${D}${libdir}/cairo/cairo-fdr*
- rm -rf ${D}${libdir}/cairo/cairo-sphinx*
- rm -rf ${D}${libdir}/cairo/.debug/cairo-fdr*
- rm -rf ${D}${libdir}/cairo/.debug/cairo-sphinx*
- [ ! -d ${D}${bindir} ] ||
- rmdir -p --ignore-fail-on-non-empty ${D}${bindir}
- [ ! -d ${D}${libdir}/cairo ] ||
- rmdir -p --ignore-fail-on-non-empty ${D}${libdir}/cairo
-}
-
-PACKAGES =+ "cairo-gobject cairo-script-interpreter cairo-perf-utils"
-
-SUMMARY:cairo-gobject = "The Cairo library GObject wrapper library"
-DESCRIPTION:cairo-gobject = "A GObject wrapper library for the Cairo API."
-
-SUMMARY:cairo-script-interpreter = "The Cairo library script interpreter"
-DESCRIPTION:cairo-script-interpreter = "The Cairo script interpreter implements \
-CairoScript. CairoScript is used by tracing utilities to enable the ability \
-to replay rendering."
-
-DESCRIPTION:cairo-perf-utils = "The Cairo library performance utilities"
-
-FILES:${PN} = "${libdir}/libcairo.so.*"
-FILES:${PN}-gobject = "${libdir}/libcairo-gobject.so.*"
-FILES:${PN}-script-interpreter = "${libdir}/libcairo-script-interpreter.so.*"
-FILES:${PN}-perf-utils = "${bindir}/cairo-trace* ${libdir}/cairo/*.la ${libdir}/cairo/libcairo-trace.so"
-
-BBCLASSEXTEND = "native nativesdk"
-
-UPSTREAM_CHECK_REGEX = "cairo-(?P<pver>\d+(\.\d+)+).tar.xz"
diff --git a/meta/recipes-graphics/cairo/cairo_1.18.0.bb b/meta/recipes-graphics/cairo/cairo_1.18.0.bb
new file mode 100644
index 0000000000..4c97e973d0
--- /dev/null
+++ b/meta/recipes-graphics/cairo/cairo_1.18.0.bb
@@ -0,0 +1,86 @@
+SUMMARY = "The Cairo 2D vector graphics library"
+DESCRIPTION = "Cairo is a multi-platform library providing anti-aliased \
+vector-based rendering for multiple target backends. Paths consist \
+of line segments and cubic splines and can be rendered at any width \
+with various join and cap styles. All colors may be specified with \
+optional translucence (opacity/alpha) and combined using the \
+extended Porter/Duff compositing algebra as found in the X Render \
+Extension."
+HOMEPAGE = "http://cairographics.org"
+BUGTRACKER = "https://gitlab.freedesktop.org/cairo/cairo/-/issues"
+SECTION = "libs"
+
+LICENSE = "(MPL-1.1 | LGPL-2.1-only) & GPL-3.0-or-later"
+LICENSE:${PN} = "MPL-1.1 | LGPL-2.1-only"
+LICENSE:${PN}-dev = "MPL-1.1 | LGPL-2.1-only"
+LICENSE:${PN}-doc = "MPL-1.1 | LGPL-2.1-only"
+LICENSE:${PN}-gobject = "MPL-1.1 | LGPL-2.1-only"
+LICENSE:${PN}-script-interpreter = "MPL-1.1 | LGPL-2.1-only"
+LICENSE:${PN}-perf-utils = "GPL-3.0-or-later"
+# Adapt the licenses for cairo-dbg and cairo-src depending on whether
+# cairo-trace is being built.
+LICENSE:${PN}-dbg = "(MPL-1.1 | LGPL-2.1-only)${@bb.utils.contains('PACKAGECONFIG', 'trace', ' & GPL-3.0-or-later', '', d)}"
+LICENSE:${PN}-src = "(MPL-1.1 | LGPL-2.1-only)${@bb.utils.contains('PACKAGECONFIG', 'trace', ' & GPL-3.0-or-later', '', d)}"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=e73e999e0c72b5ac9012424fa157ad77 \
+ ${@bb.utils.contains('PACKAGECONFIG', 'trace', 'file://util/cairo-trace/COPYING-GPL-3;md5=d32239bcb673463ab874e80d47fae504', '', d)}"
+
+
+DEPENDS = "fontconfig freetype glib-2.0 libpng pixman zlib"
+
+SRC_URI = "http://cairographics.org/releases/cairo-${PV}.tar.xz \
+ file://cairo-get_bitmap_surface-bsc1036789-CVE-2017-7475.diff \
+ "
+
+SRC_URI[sha256sum] = "243a0736b978a33dee29f9cca7521733b78a65b5418206fef7bd1c3d4cf10b64"
+
+inherit meson pkgconfig upstream-version-is-even gtk-doc multilib_script
+
+# if qemu usermode isn't available, this value needs to be set statically
+# (otherwise it's determinted by running a small target executable with qemu)
+do_write_config:append() {
+ cat >${WORKDIR}/cairo.cross <<EOF
+[properties]
+ipc_rmid_deferred_release = 'true'
+EOF
+}
+EXTRA_OEMESON:append:class-nativesdk = "${@' --cross-file ${WORKDIR}/cairo.cross' if d.getVar('EXEWRAPPER_ENABLED') == 'False' else ''}"
+EXTRA_OEMESON:append:class-target = "${@' --cross-file ${WORKDIR}/cairo.cross' if d.getVar('EXEWRAPPER_ENABLED') == 'False' else ''}"
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+
+MULTILIB_SCRIPTS = "${PN}-perf-utils:${bindir}/cairo-trace"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xlib xcb', '', d)} trace"
+PACKAGECONFIG[xlib] = "-Dxlib=enabled,-Dxlib=disabled,virtual/libx11 libxrender libxext"
+PACKAGECONFIG[xcb] = "-Dxcb=enabled,-Dxcb=disabled,libxcb"
+# cairo-trace is GPLv3 so add an option to remove it
+PACKAGECONFIG[trace] = ""
+
+do_install:append () {
+ if ! ${@bb.utils.contains('PACKAGECONFIG', 'trace', 'true', 'false', d)}; then
+ rm ${D}${bindir}/cairo-trace ${D}${libdir}/cairo/libcairo-trace.so
+ rmdir --ignore-fail-on-non-empty ${D}${bindir} ${D}${libdir}/cairo
+ fi
+}
+
+PACKAGES =+ "cairo-gobject cairo-script-interpreter cairo-perf-utils"
+
+SUMMARY:cairo-gobject = "The Cairo library GObject wrapper library"
+DESCRIPTION:cairo-gobject = "A GObject wrapper library for the Cairo API."
+
+SUMMARY:cairo-script-interpreter = "The Cairo library script interpreter"
+DESCRIPTION:cairo-script-interpreter = "The Cairo script interpreter implements \
+CairoScript. CairoScript is used by tracing utilities to enable the ability \
+to replay rendering."
+
+DESCRIPTION:cairo-perf-utils = "The Cairo library performance utilities"
+
+FILES:${PN} = "${libdir}/libcairo.so.*"
+FILES:${PN}-gobject = "${libdir}/libcairo-gobject.so.*"
+FILES:${PN}-script-interpreter = "${libdir}/libcairo-script-interpreter.so.*"
+FILES:${PN}-perf-utils = "${bindir}/cairo-* ${libdir}/cairo/libcairo-trace.so ${libdir}/cairo/libcairo-fdr.so"
+
+BBCLASSEXTEND = "native nativesdk"
+
+UPSTREAM_CHECK_REGEX = "cairo-(?P<pver>\d+(\.\d+)+).tar.xz"
diff --git a/meta/recipes-graphics/drm/libdrm_2.4.110.bb b/meta/recipes-graphics/drm/libdrm_2.4.110.bb
deleted file mode 100644
index dd775939a5..0000000000
--- a/meta/recipes-graphics/drm/libdrm_2.4.110.bb
+++ /dev/null
@@ -1,60 +0,0 @@
-SUMMARY = "Userspace interface to the kernel DRM services"
-DESCRIPTION = "The runtime library for accessing the kernel DRM services. DRM \
-stands for \"Direct Rendering Manager\", which is the kernel portion of the \
-\"Direct Rendering Infrastructure\" (DRI). DRI is required for many hardware \
-accelerated OpenGL drivers."
-HOMEPAGE = "http://dri.freedesktop.org"
-SECTION = "x11/base"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://xf86drm.c;beginline=9;endline=32;md5=c8a3b961af7667c530816761e949dc71"
-PROVIDES = "drm"
-DEPENDS = "libpthread-stubs"
-
-SRC_URI = "http://dri.freedesktop.org/libdrm/${BP}.tar.xz \
- "
-
-SRC_URI[sha256sum] = "eecee4c4b47ed6d6ce1a9be3d6d92102548ea35e442282216d47d05293cf9737"
-
-inherit meson pkgconfig manpages
-
-PACKAGECONFIG ??= "libkms intel radeon amdgpu nouveau vmwgfx omap freedreno vc4 etnaviv install-test-programs"
-PACKAGECONFIG[libkms] = "-Dlibkms=true,-Dlibkms=false"
-PACKAGECONFIG[intel] = "-Dintel=true,-Dintel=false,libpciaccess"
-PACKAGECONFIG[radeon] = "-Dradeon=true,-Dradeon=false"
-PACKAGECONFIG[amdgpu] = "-Damdgpu=true,-Damdgpu=false"
-PACKAGECONFIG[nouveau] = "-Dnouveau=true,-Dnouveau=false"
-PACKAGECONFIG[vmwgfx] = "-Dvmwgfx=true,-Dvmwgfx=false"
-PACKAGECONFIG[omap] = "-Domap=true,-Domap=false"
-PACKAGECONFIG[exynos] = "-Dexynos=true,-Dexynos=false"
-PACKAGECONFIG[freedreno] = "-Dfreedreno=true,-Dfreedreno=false"
-PACKAGECONFIG[tegra] = "-Dtegra=true,-Dtegra=false"
-PACKAGECONFIG[vc4] = "-Dvc4=true,-Dvc4=false"
-PACKAGECONFIG[etnaviv] = "-Detnaviv=true,-Detnaviv=false"
-PACKAGECONFIG[freedreno-kgsl] = "-Dfreedreno-kgsl=true,-Dfreedreno-kgsl=false"
-PACKAGECONFIG[valgrind] = "-Dvalgrind=true,-Dvalgrind=false,valgrind"
-PACKAGECONFIG[install-test-programs] = "-Dinstall-test-programs=true,-Dinstall-test-programs=false"
-PACKAGECONFIG[cairo-tests] = "-Dcairo-tests=true,-Dcairo-tests=false"
-PACKAGECONFIG[udev] = "-Dudev=true,-Dudev=false,udev"
-PACKAGECONFIG[manpages] = "-Dman-pages=true,-Dman-pages=false,libxslt-native xmlto-native python3-docutils-native"
-
-ALLOW_EMPTY:${PN}-drivers = "1"
-PACKAGES =+ "${PN}-tests ${PN}-drivers ${PN}-radeon ${PN}-nouveau ${PN}-omap \
- ${PN}-intel ${PN}-exynos ${PN}-kms ${PN}-freedreno ${PN}-amdgpu \
- ${PN}-etnaviv"
-
-RRECOMMENDS:${PN}-drivers = "${PN}-radeon ${PN}-nouveau ${PN}-omap ${PN}-intel \
- ${PN}-exynos ${PN}-freedreno ${PN}-amdgpu \
- ${PN}-etnaviv"
-
-FILES:${PN}-tests = "${bindir}/*"
-FILES:${PN}-radeon = "${libdir}/libdrm_radeon.so.*"
-FILES:${PN}-nouveau = "${libdir}/libdrm_nouveau.so.*"
-FILES:${PN}-omap = "${libdir}/libdrm_omap.so.*"
-FILES:${PN}-intel = "${libdir}/libdrm_intel.so.*"
-FILES:${PN}-exynos = "${libdir}/libdrm_exynos.so.*"
-FILES:${PN}-kms = "${libdir}/libkms*.so.*"
-FILES:${PN}-freedreno = "${libdir}/libdrm_freedreno.so.*"
-FILES:${PN}-amdgpu = "${libdir}/libdrm_amdgpu.so.* ${datadir}/${PN}/amdgpu.ids"
-FILES:${PN}-etnaviv = "${libdir}/libdrm_etnaviv.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/drm/libdrm_2.4.120.bb b/meta/recipes-graphics/drm/libdrm_2.4.120.bb
new file mode 100644
index 0000000000..18b3256843
--- /dev/null
+++ b/meta/recipes-graphics/drm/libdrm_2.4.120.bb
@@ -0,0 +1,59 @@
+SUMMARY = "Userspace interface to the kernel DRM services"
+DESCRIPTION = "The runtime library for accessing the kernel DRM services. DRM \
+stands for \"Direct Rendering Manager\", which is the kernel portion of the \
+\"Direct Rendering Infrastructure\" (DRI). DRI is required for many hardware \
+accelerated OpenGL drivers."
+HOMEPAGE = "http://dri.freedesktop.org"
+SECTION = "x11/base"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://xf86drm.c;beginline=9;endline=32;md5=c8a3b961af7667c530816761e949dc71"
+PROVIDES = "drm"
+DEPENDS = "libpthread-stubs"
+
+SRC_URI = "http://dri.freedesktop.org/libdrm/${BP}.tar.xz \
+ "
+
+SRC_URI[sha256sum] = "3bf55363f76c7250946441ab51d3a6cc0ae518055c0ff017324ab76cdefb327a"
+
+inherit meson pkgconfig manpages
+
+PACKAGECONFIG ??= "intel radeon amdgpu nouveau vmwgfx omap freedreno vc4 etnaviv tests install-test-programs"
+PACKAGECONFIG[intel] = "-Dintel=enabled,-Dintel=disabled,libpciaccess"
+PACKAGECONFIG[radeon] = "-Dradeon=enabled,-Dradeon=disabled"
+PACKAGECONFIG[amdgpu] = "-Damdgpu=enabled,-Damdgpu=disabled"
+PACKAGECONFIG[nouveau] = "-Dnouveau=enabled,-Dnouveau=disabled"
+PACKAGECONFIG[vmwgfx] = "-Dvmwgfx=enabled,-Dvmwgfx=disabled"
+PACKAGECONFIG[omap] = "-Domap=enabled,-Domap=disabled"
+PACKAGECONFIG[exynos] = "-Dexynos=enabled,-Dexynos=disabled"
+PACKAGECONFIG[freedreno] = "-Dfreedreno=enabled,-Dfreedreno=disabled"
+PACKAGECONFIG[tegra] = "-Dtegra=enabled,-Dtegra=disabled"
+PACKAGECONFIG[vc4] = "-Dvc4=enabled,-Dvc4=disabled"
+PACKAGECONFIG[etnaviv] = "-Detnaviv=enabled,-Detnaviv=disabled"
+PACKAGECONFIG[freedreno-kgsl] = "-Dfreedreno-kgsl=true,-Dfreedreno-kgsl=false"
+PACKAGECONFIG[valgrind] = "-Dvalgrind=enabled,-Dvalgrind=disabled,valgrind"
+PACKAGECONFIG[install-test-programs] = "-Dinstall-test-programs=true,-Dinstall-test-programs=false"
+PACKAGECONFIG[cairo-tests] = "-Dcairo-tests=enabled,-Dcairo-tests=disabled"
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false"
+PACKAGECONFIG[udev] = "-Dudev=true,-Dudev=false,udev"
+PACKAGECONFIG[manpages] = "-Dman-pages=enabled,-Dman-pages=disabled,libxslt-native xmlto-native python3-docutils-native"
+
+ALLOW_EMPTY:${PN}-drivers = "1"
+PACKAGES =+ "${PN}-tests ${PN}-drivers ${PN}-radeon ${PN}-nouveau ${PN}-omap \
+ ${PN}-intel ${PN}-exynos ${PN}-freedreno ${PN}-amdgpu \
+ ${PN}-etnaviv"
+
+RRECOMMENDS:${PN}-drivers = "${PN}-radeon ${PN}-nouveau ${PN}-omap ${PN}-intel \
+ ${PN}-exynos ${PN}-freedreno ${PN}-amdgpu \
+ ${PN}-etnaviv"
+
+FILES:${PN}-tests = "${bindir}/*"
+FILES:${PN}-radeon = "${libdir}/libdrm_radeon.so.*"
+FILES:${PN}-nouveau = "${libdir}/libdrm_nouveau.so.*"
+FILES:${PN}-omap = "${libdir}/libdrm_omap.so.*"
+FILES:${PN}-intel = "${libdir}/libdrm_intel.so.*"
+FILES:${PN}-exynos = "${libdir}/libdrm_exynos.so.*"
+FILES:${PN}-freedreno = "${libdir}/libdrm_freedreno.so.*"
+FILES:${PN}-amdgpu = "${libdir}/libdrm_amdgpu.so.* ${datadir}/${PN}/amdgpu.ids"
+FILES:${PN}-etnaviv = "${libdir}/libdrm_etnaviv.so.*"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/fontconfig/fontconfig_2.14.0.bb b/meta/recipes-graphics/fontconfig/fontconfig_2.14.0.bb
deleted file mode 100644
index 80f952a024..0000000000
--- a/meta/recipes-graphics/fontconfig/fontconfig_2.14.0.bb
+++ /dev/null
@@ -1,70 +0,0 @@
-SUMMARY = "Generic font configuration library"
-DESCRIPTION = "Fontconfig is a font configuration and customization library, which \
-does not depend on the X Window System. It is designed to locate \
-fonts within the system and select them according to requirements \
-specified by applications. \
-Fontconfig is not a rasterization library, nor does it impose a \
-particular rasterization library on the application. The X-specific \
-library 'Xft' uses fontconfig along with freetype to specify and \
-rasterize fonts."
-
-HOMEPAGE = "http://www.fontconfig.org"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=fontconfig"
-
-LICENSE = "MIT & MIT & PD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=00252fd272bf2e722925613ad74cb6c7 \
- file://src/fcfreetype.c;endline=45;md5=ce976b310a013a6ace6b60afa71851c1 \
- file://src/fccache.c;beginline=1671;endline=1686;md5=906c2f04b0c79a1bcc84ecfca4de5619 \
- "
-
-SECTION = "libs"
-
-DEPENDS = "expat freetype zlib gperf-native util-linux"
-
-SRC_URI = "http://fontconfig.org/release/fontconfig-${PV}.tar.gz \
- file://revert-static-pkgconfig.patch \
- "
-
-SRC_URI[sha256sum] = "b8f607d556e8257da2f3616b4d704be30fd73bd71e367355ca78963f9a7f0434"
-
-UPSTREAM_CHECK_REGEX = "fontconfig-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
-
-do_configure:prepend() {
- # work around https://bugs.freedesktop.org/show_bug.cgi?id=101280
- rm -f ${S}/src/fcobjshash.h ${S}/src/fcobjshash.gperf
-}
-
-do_install:append:class-target() {
- # duplicate fc-cache for postinstall script
- mkdir -p ${D}${libexecdir}
- ln ${D}${bindir}/fc-cache ${D}${libexecdir}/${MLPREFIX}fc-cache
-}
-
-do_install:append:class-nativesdk() {
- # duplicate fc-cache for postinstall script
- mkdir -p ${D}${libexecdir}
- ln ${D}${bindir}/fc-cache ${D}${libexecdir}/${MLPREFIX}fc-cache
-}
-
-PACKAGES =+ "fontconfig-utils"
-FILES:${PN} =+ "${datadir}/xml/*"
-FILES:${PN}-dev += "${datadir}/gettext/*"
-FILES:fontconfig-utils = "${bindir}/* ${libexecdir}/*"
-
-# Work around past breakage in debian.bbclass
-RPROVIDES:fontconfig-utils = "libfontconfig-utils"
-RREPLACES:fontconfig-utils = "libfontconfig-utils"
-RCONFLICTS:fontconfig-utils = "libfontconfig-utils"
-DEBIAN_NOAUTONAME:fontconfig-utils = "1"
-
-inherit autotools pkgconfig relative_symlinks gettext
-
-FONTCONFIG_CACHE_DIR ?= "${localstatedir}/cache/fontconfig"
-
-# comma separated list of additional directories
-# /usr/share/fonts is already included by default (you can change it with --with-default-fonts)
-FONTCONFIG_FONT_DIRS ?= "no"
-
-EXTRA_OECONF = " --disable-docs --with-default-fonts=${datadir}/fonts --with-cache-dir=${FONTCONFIG_CACHE_DIR} --with-add-fonts=${FONTCONFIG_FONT_DIRS}"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/fontconfig/fontconfig_2.15.0.bb b/meta/recipes-graphics/fontconfig/fontconfig_2.15.0.bb
new file mode 100644
index 0000000000..b737447e47
--- /dev/null
+++ b/meta/recipes-graphics/fontconfig/fontconfig_2.15.0.bb
@@ -0,0 +1,69 @@
+SUMMARY = "Generic font configuration library"
+DESCRIPTION = "Fontconfig is a font configuration and customization library, which \
+does not depend on the X Window System. It is designed to locate \
+fonts within the system and select them according to requirements \
+specified by applications. \
+Fontconfig is not a rasterization library, nor does it impose a \
+particular rasterization library on the application. The X-specific \
+library 'Xft' uses fontconfig along with freetype to specify and \
+rasterize fonts."
+
+HOMEPAGE = "http://www.fontconfig.org"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=fontconfig"
+
+LICENSE = "MIT & MIT & PD"
+LIC_FILES_CHKSUM = "file://COPYING;md5=00252fd272bf2e722925613ad74cb6c7 \
+ file://src/fcfreetype.c;endline=45;md5=ef8702fbf3dc506715be8a9d69cb0252 \
+ "
+
+SECTION = "libs"
+
+DEPENDS = "expat freetype zlib gperf-native util-linux"
+
+SRC_URI = "http://fontconfig.org/release/fontconfig-${PV}.tar.gz \
+ file://revert-static-pkgconfig.patch \
+ "
+
+SRC_URI[sha256sum] = "f5f359d6332861bd497570848fcb42520964a9e83d5e3abe397b6b6db9bcaaf4"
+
+UPSTREAM_CHECK_REGEX = "fontconfig-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
+
+do_configure:prepend() {
+ # work around https://bugs.freedesktop.org/show_bug.cgi?id=101280
+ rm -f ${S}/src/fcobjshash.h ${S}/src/fcobjshash.gperf
+}
+
+do_install:append:class-target() {
+ # duplicate fc-cache for postinstall script
+ mkdir -p ${D}${libexecdir}
+ ln ${D}${bindir}/fc-cache ${D}${libexecdir}/${MLPREFIX}fc-cache
+}
+
+do_install:append:class-nativesdk() {
+ # duplicate fc-cache for postinstall script
+ mkdir -p ${D}${libexecdir}
+ ln ${D}${bindir}/fc-cache ${D}${libexecdir}/${MLPREFIX}fc-cache
+}
+
+PACKAGES =+ "fontconfig-utils"
+FILES:${PN} =+ "${datadir}/xml/*"
+FILES:${PN}-dev += "${datadir}/gettext/*"
+FILES:fontconfig-utils = "${bindir}/* ${libexecdir}/*"
+
+# Work around past breakage in debian.bbclass
+RPROVIDES:fontconfig-utils = "libfontconfig-utils"
+RREPLACES:fontconfig-utils = "libfontconfig-utils"
+RCONFLICTS:fontconfig-utils = "libfontconfig-utils"
+DEBIAN_NOAUTONAME:fontconfig-utils = "1"
+
+inherit autotools pkgconfig relative_symlinks gettext
+
+FONTCONFIG_CACHE_DIR ?= "${localstatedir}/cache/fontconfig"
+
+# comma separated list of additional directories
+# /usr/share/fonts is already included by default (you can change it with --with-default-fonts)
+FONTCONFIG_FONT_DIRS ?= "no"
+
+EXTRA_OECONF = " --disable-docs --with-default-fonts=${datadir}/fonts --with-cache-dir=${FONTCONFIG_CACHE_DIR} --with-add-fonts=${FONTCONFIG_FONT_DIRS}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/freetype/freetype_2.12.1.bb b/meta/recipes-graphics/freetype/freetype_2.12.1.bb
deleted file mode 100644
index 46c6182630..0000000000
--- a/meta/recipes-graphics/freetype/freetype_2.12.1.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "Freetype font rendering library"
-DESCRIPTION = "FreeType is a software font engine that is designed to be small, efficient, \
-highly customizable, and portable while capable of producing high-quality output (glyph \
-images). It can be used in graphics libraries, display servers, font conversion tools, text \
-image generation tools, and many other products as well."
-HOMEPAGE = "http://www.freetype.org/"
-BUGTRACKER = "https://savannah.nongnu.org/bugs/?group=freetype"
-SECTION = "libs"
-
-LICENSE = "FTL | GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=a5927784d823d443c6cae55701d01553 \
- file://docs/FTL.TXT;md5=9f37b4e6afa3fef9dba8932b16bd3f97 \
- file://docs/GPLv2.TXT;md5=8ef380476f642c20ebf40fecb0add2ec"
-
-SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/${BPN}/${BP}.tar.xz"
-SRC_URI[sha256sum] = "4766f20157cc4cf0cd292f80bf917f92d1c439b243ac3018debf6b9140c41a7f"
-
-UPSTREAM_CHECK_REGEX = "freetype-(?P<pver>\d+(\.\d+)+)"
-
-inherit autotools pkgconfig multilib_header
-
-# Adapt autotools to work with the minimal autoconf usage in freetype
-AUTOTOOLS_SCRIPT_PATH = "${S}/builds/unix"
-CONFIGURE_SCRIPT = "${S}/configure"
-EXTRA_AUTORECONF += "--exclude=autoheader --exclude=automake"
-
-PACKAGECONFIG ??= "zlib pixmap"
-
-PACKAGECONFIG[bzip2] = "--with-bzip2,--without-bzip2,bzip2"
-# harfbuzz results in a circular dependency so enabling is non-trivial
-PACKAGECONFIG[harfbuzz] = "--with-harfbuzz,--without-harfbuzz,harfbuzz"
-PACKAGECONFIG[pixmap] = "--with-png,--without-png,libpng"
-PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
-PACKAGECONFIG[freetypeconfig] = "--enable-freetype-config=yes,--enable-freetype-config=no,"
-
-EXTRA_OECONF = "CC_BUILD='${BUILD_CC}'"
-
-TARGET_CPPFLAGS += "-D_FILE_OFFSET_BITS=64"
-
-do_install:append() {
- oe_multilib_header freetype2/freetype/config/ftconfig.h
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/freetype/freetype_2.13.2.bb b/meta/recipes-graphics/freetype/freetype_2.13.2.bb
new file mode 100644
index 0000000000..4e7a0ad160
--- /dev/null
+++ b/meta/recipes-graphics/freetype/freetype_2.13.2.bb
@@ -0,0 +1,45 @@
+SUMMARY = "Freetype font rendering library"
+DESCRIPTION = "FreeType is a software font engine that is designed to be small, efficient, \
+highly customizable, and portable while capable of producing high-quality output (glyph \
+images). It can be used in graphics libraries, display servers, font conversion tools, text \
+image generation tools, and many other products as well."
+HOMEPAGE = "http://www.freetype.org/"
+BUGTRACKER = "https://savannah.nongnu.org/bugs/?group=freetype"
+SECTION = "libs"
+
+LICENSE = "(FTL | GPL-2.0-or-later) & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.TXT;md5=843b6efc16f6b1652ec97f89d5a516c0 \
+ file://docs/FTL.TXT;md5=9f37b4e6afa3fef9dba8932b16bd3f97 \
+ file://docs/GPLv2.TXT;md5=8ef380476f642c20ebf40fecb0add2ec \
+ "
+
+SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/${BPN}/${BP}.tar.xz"
+SRC_URI[sha256sum] = "12991c4e55c506dd7f9b765933e62fd2be2e06d421505d7950a132e4f1bb484d"
+
+UPSTREAM_CHECK_REGEX = "freetype-(?P<pver>\d+(\.\d+)+)"
+
+inherit autotools pkgconfig multilib_header
+
+# Adapt autotools to work with the minimal autoconf usage in freetype
+AUTOTOOLS_SCRIPT_PATH = "${S}/builds/unix"
+CONFIGURE_SCRIPT = "${S}/configure"
+EXTRA_AUTORECONF += "--exclude=autoheader --exclude=automake"
+
+PACKAGECONFIG ??= "zlib pixmap"
+
+PACKAGECONFIG[bzip2] = "--with-bzip2,--without-bzip2,bzip2"
+# harfbuzz results in a circular dependency so enabling is non-trivial
+PACKAGECONFIG[harfbuzz] = "--with-harfbuzz,--without-harfbuzz,harfbuzz"
+PACKAGECONFIG[pixmap] = "--with-png,--without-png,libpng"
+PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
+PACKAGECONFIG[freetypeconfig] = "--enable-freetype-config=yes,--enable-freetype-config=no,"
+
+EXTRA_OECONF = "CC_BUILD='${BUILD_CC}'"
+
+TARGET_CPPFLAGS += "-D_FILE_OFFSET_BITS=64"
+
+do_install:append() {
+ oe_multilib_header freetype2/freetype/config/ftconfig.h
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/glslang/glslang/0001-generate-glslang-pkg-config.patch b/meta/recipes-graphics/glslang/glslang/0001-generate-glslang-pkg-config.patch
index cddd330971..e6bb6ec8e3 100644
--- a/meta/recipes-graphics/glslang/glslang/0001-generate-glslang-pkg-config.patch
+++ b/meta/recipes-graphics/glslang/glslang/0001-generate-glslang-pkg-config.patch
@@ -1,4 +1,4 @@
-From 6692611126b40d499c94bbcc8e29604560262072 Mon Sep 17 00:00:00 2001
+From fc33f1cf032a15c07044ef932bc991c346d62d62 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sun, 7 Feb 2021 01:30:39 +0000
Subject: [PATCH] generate glslang pkg-config
@@ -8,7 +8,6 @@ Based on https://src.fedoraproject.org/rpms/glslang/blob/main/f/0001-pkg-config-
Upstream-Status: Pending
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
-
---
glslang/CMakeLists.txt | 2 ++
glslang/glslang.pc.cmake.in | 11 +++++++++++
@@ -16,21 +15,21 @@ Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
create mode 100644 glslang/glslang.pc.cmake.in
diff --git a/glslang/CMakeLists.txt b/glslang/CMakeLists.txt
-index 1c7d22a2..10c1e841 100644
+index e4690f09..8e660bc5 100644
--- a/glslang/CMakeLists.txt
+++ b/glslang/CMakeLists.txt
-@@ -209,6 +209,8 @@ if(ENABLE_GLSLANG_INSTALL)
- EXPORT glslangTargets
- ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
+@@ -233,6 +233,8 @@ if(GLSLANG_ENABLE_INSTALL)
+ install(TARGETS MachineIndependent EXPORT glslang-targets)
+ install(TARGETS GenericCodeGen EXPORT glslang-targets)
endif()
+ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/glslang.pc.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/pkgconfig/glslang.pc @ONLY)
+ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/pkgconfig/glslang.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
- install(EXPORT glslangTargets DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake)
-
+ set(PUBLIC_HEADERS
+ Public/ResourceLimits.h
diff --git a/glslang/glslang.pc.cmake.in b/glslang/glslang.pc.cmake.in
new file mode 100644
-index 00000000..64b6882d
+index 00000000..43104e68
--- /dev/null
+++ b/glslang/glslang.pc.cmake.in
@@ -0,0 +1,11 @@
@@ -43,5 +42,5 @@ index 00000000..64b6882d
+Description: OpenGL and OpenGL ES shader front end and validator
+Requires:
+Version: @GLSLANG_VERSION@
-+Libs: -L${libdir} -lglslang -lOSDependent -lHLSL -lOGLCompiler -lSPVRemapper
++Libs: -L${libdir} -lglslang -lHLSL -lSPVRemapper
+Cflags: -I${includedir}
diff --git a/meta/recipes-graphics/glslang/glslang_1.3.211.0.bb b/meta/recipes-graphics/glslang/glslang_1.3.211.0.bb
deleted file mode 100644
index 37dea07a8a..0000000000
--- a/meta/recipes-graphics/glslang/glslang_1.3.211.0.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "OpenGL / OpenGL ES Reference Compiler"
-DESCRIPTION = "Glslang is the official reference compiler front end for the \
-OpenGL ES and OpenGL shading languages. It implements a strict interpretation \
-of the specifications for these languages. It is open and free for anyone to use, \
-either from a command line or programmatically."
-SECTION = "graphics"
-HOMEPAGE = "https://www.khronos.org/opengles/sdk/tools/Reference-Compiler"
-LICENSE = "BSD-3-Clause & BSD-2-Clause & MIT & Apache-2.0 & GPL-3-with-bison-exception"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=2a2b5acd7bc4844964cfda45fe807dc3"
-
-SRCREV = "9bb8cfffb0eed010e07132282c41d73064a7a609"
-SRC_URI = "git://github.com/KhronosGroup/glslang.git;protocol=https;branch=master \
- file://0001-generate-glslang-pkg-config.patch"
-PE = "1"
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
-S = "${WORKDIR}/git"
-
-inherit cmake python3native
-
-EXTRA_OECMAKE = " \
- -DCMAKE_BUILD_TYPE=Release \
- -DBUILD_SHARED_LIBS=ON \
- -DENABLE_PCH=OFF \
- -DENABLE_CTEST=OFF \
- -DBUILD_EXTERNAL=OFF \
-"
-
-SOLIBSDEV = "glslang.so"
-# all the other libraries are unversioned, so pack it on PN
-FILES:${PN} += "${libdir}/*.so"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/glslang/glslang_1.3.280.0.bb b/meta/recipes-graphics/glslang/glslang_1.3.280.0.bb
new file mode 100644
index 0000000000..637082c719
--- /dev/null
+++ b/meta/recipes-graphics/glslang/glslang_1.3.280.0.bb
@@ -0,0 +1,37 @@
+SUMMARY = "OpenGL / OpenGL ES Reference Compiler"
+DESCRIPTION = "Glslang is the official reference compiler front end for the \
+OpenGL ES and OpenGL shading languages. It implements a strict interpretation \
+of the specifications for these languages. It is open and free for anyone to use, \
+either from a command line or programmatically."
+SECTION = "graphics"
+HOMEPAGE = "https://www.khronos.org/opengles/sdk/tools/Reference-Compiler"
+LICENSE = "BSD-3-Clause & BSD-2-Clause & MIT & Apache-2.0 & GPL-3-with-bison-exception"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=2a2b5acd7bc4844964cfda45fe807dc3"
+
+SRCREV = "ee2f5d09eaf8f4e8d0d598bd2172fce290d4ca60"
+SRC_URI = "git://github.com/KhronosGroup/glslang.git;protocol=https;branch=main \
+ file://0001-generate-glslang-pkg-config.patch \
+ "
+PE = "1"
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
+S = "${WORKDIR}/git"
+
+inherit cmake python3native
+
+DEPENDS = "spirv-tools"
+
+EXTRA_OECMAKE = " \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DBUILD_SHARED_LIBS=ON \
+ -DENABLE_PCH=OFF \
+ -DENABLE_CTEST=OFF \
+ -DBUILD_EXTERNAL=OFF \
+ -DALLOW_EXTERNAL_SPIRV_TOOLS=ON \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/graphene/files/float-div.patch b/meta/recipes-graphics/graphene/files/float-div.patch
new file mode 100644
index 0000000000..bf74101b1c
--- /dev/null
+++ b/meta/recipes-graphics/graphene/files/float-div.patch
@@ -0,0 +1,28 @@
+From c19d1f4a7e44e071df3a2612ae2eb20c84e831a6 Mon Sep 17 00:00:00 2001
+From: Emmanuele Bassi <ebassi@gnome.org>
+Date: Thu, 10 Aug 2023 12:44:49 +0100
+Subject: [PATCH] build: Allow host builds when cross-compiling
+
+Environments that set up execution wrappers when cross-compiling should
+be allowed to run code. We only fall back on external properties if we
+really can't run any native code on the host machine.
+
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ meson.build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/meson.build b/meson.build
+index 48f22d7..7dcb9e6 100644
+--- a/meson.build
++++ b/meson.build
+@@ -270,7 +270,7 @@ int main() {
+ return 0;
+ }
+ '''
+-if meson.is_cross_build()
++if not meson.can_run_host_binaries()
+ ieee754_float_div = meson.get_external_property('ieee754_float_div', cc.get_id() in ['gcc', 'clang'])
+ message('Cross-building, assuming IEEE 754 division:', ieee754_float_div)
+ else
diff --git a/meta/recipes-graphics/graphene/graphene_1.10.8.bb b/meta/recipes-graphics/graphene/graphene_1.10.8.bb
new file mode 100644
index 0000000000..e2e82b32aa
--- /dev/null
+++ b/meta/recipes-graphics/graphene/graphene_1.10.8.bb
@@ -0,0 +1,29 @@
+SUMMARY = "A thin layer of graphic data types"
+HOMEPAGE = "http://ebassi.github.io/graphene/"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=a7d871d9e23c450c421a85bb2819f648"
+
+
+inherit gnomebase gobject-introspection gtk-doc
+
+SRC_URI += "file://float-div.patch"
+
+SRC_URI[archive.sha256sum] = "a37bb0e78a419dcbeaa9c7027bcff52f5ec2367c25ec859da31dfde2928f279a"
+
+# Disable neon support by default on ARM-32 platforms because of the
+# following upstream bug: https://github.com/ebassi/graphene/issues/215
+PACKAGECONFIG ?= "gobject-types ${@bb.utils.contains('TUNE_FEATURES', 'aarch64', 'neon', '', d)}"
+
+PACKAGECONFIG[gobject-types] = "-Dgobject_types=true,-Dgobject_types=false,glib-2.0"
+PACKAGECONFIG[neon] = "-Darm_neon=true,-Darm_neon=false,"
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+
+EXTRA_OEMESON = "-Dinstalled_tests=false"
+
+FILES:${PN} += "${libdir}/graphene-1.0"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/harfbuzz/harfbuzz_4.2.1.bb b/meta/recipes-graphics/harfbuzz/harfbuzz_4.2.1.bb
deleted file mode 100644
index 05d5aefd87..0000000000
--- a/meta/recipes-graphics/harfbuzz/harfbuzz_4.2.1.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "Text shaping library"
-DESCRIPTION = "HarfBuzz is an OpenType text shaping engine."
-HOMEPAGE = "http://www.freedesktop.org/wiki/Software/HarfBuzz"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=HarfBuzz"
-SECTION = "libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6ee0f16281694fb6aa689cca1e0fb3da \
- file://src/hb-ucd.cc;beginline=1;endline=15;md5=29d4dcb6410429195df67efe3382d8bc \
- "
-
-UPSTREAM_CHECK_URI = "https://github.com/${BPN}/${BPN}/releases"
-UPSTREAM_CHECK_REGEX = "harfbuzz-(?P<pver>\d+(\.\d+)+).tar"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/${PV}/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "bd17916513829aeff961359a5ccebba6de2f4bf37a91faee3ac29c120e3d7ee1"
-
-inherit meson pkgconfig lib_package gtk-doc gobject-introspection
-
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-GTKDOC_MESON_ENABLE_FLAG = 'enabled'
-GTKDOC_MESON_DISABLE_FLAG = 'disabled'
-
-PACKAGECONFIG ??= "cairo freetype glib icu"
-PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
-PACKAGECONFIG[freetype] = "-Dfreetype=enabled,-Dfreetype=disabled,freetype"
-PACKAGECONFIG[glib] = "-Dglib=enabled,-Dglib=disabled,glib-2.0"
-PACKAGECONFIG[graphite] = "-Dgraphite=enabled,-Dgraphite=disabled,graphite2"
-PACKAGECONFIG[icu] = "-Dicu=enabled,-Dicu=disabled,icu"
-
-PACKAGES =+ "${PN}-icu ${PN}-icu-dev ${PN}-subset"
-
-LEAD_SONAME = "libharfbuzz.so"
-
-do_install:append() {
- # If no tools are installed due to PACKAGECONFIG then this directory is
- #still installed, so remove it to stop packaging wanings.
- rmdir --ignore-fail-on-non-empty ${D}${bindir}
-}
-
-FILES:${PN}-icu = "${libdir}/libharfbuzz-icu.so.*"
-FILES:${PN}-icu-dev = "${libdir}/libharfbuzz-icu.la \
- ${libdir}/libharfbuzz-icu.so \
- ${libdir}/pkgconfig/harfbuzz-icu.pc \
-"
-FILES:${PN}-subset = "${libdir}/libharfbuzz-subset.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/harfbuzz/harfbuzz_8.3.1.bb b/meta/recipes-graphics/harfbuzz/harfbuzz_8.3.1.bb
new file mode 100644
index 0000000000..fd4dcc2338
--- /dev/null
+++ b/meta/recipes-graphics/harfbuzz/harfbuzz_8.3.1.bb
@@ -0,0 +1,48 @@
+SUMMARY = "Text shaping library"
+DESCRIPTION = "HarfBuzz is an OpenType text shaping engine."
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/HarfBuzz"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=HarfBuzz"
+SECTION = "libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b98429b8e8e3c2a67cfef01e99e4893d \
+ file://src/hb-ucd.cc;beginline=1;endline=15;md5=29d4dcb6410429195df67efe3382d8bc \
+ "
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "f73e1eacd7e2ffae687bc3f056bb0c705b7a05aee86337686e09da8fc1c2030c"
+
+DEPENDS += "glib-2.0-native"
+
+inherit meson pkgconfig lib_package gtk-doc gobject-introspection github-releases
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+GTKDOC_MESON_ENABLE_FLAG = 'enabled'
+GTKDOC_MESON_DISABLE_FLAG = 'disabled'
+
+PACKAGECONFIG ??= "cairo freetype glib icu"
+PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
+PACKAGECONFIG[chafa] = "-Dchafa=enabled,-Dchafa=disabled,chafa"
+PACKAGECONFIG[freetype] = "-Dfreetype=enabled,-Dfreetype=disabled,freetype"
+PACKAGECONFIG[glib] = "-Dglib=enabled,-Dglib=disabled,glib-2.0"
+PACKAGECONFIG[graphite] = "-Dgraphite2=enabled,-Dgraphite2=disabled,graphite2"
+PACKAGECONFIG[icu] = "-Dicu=enabled,-Dicu=disabled,icu"
+
+PACKAGES =+ "${PN}-icu ${PN}-icu-dev ${PN}-subset"
+
+LEAD_SONAME = "libharfbuzz.so"
+
+do_install:append() {
+ # If no tools are installed due to PACKAGECONFIG then this directory might
+ # still be installed, so remove it to stop packaging warnings.
+ [ ! -d ${D}${bindir} ] || rmdir --ignore-fail-on-non-empty ${D}${bindir}
+}
+
+FILES:${PN}-icu = "${libdir}/libharfbuzz-icu.so.*"
+FILES:${PN}-icu-dev = "${libdir}/libharfbuzz-icu.la \
+ ${libdir}/libharfbuzz-icu.so \
+ ${libdir}/pkgconfig/harfbuzz-icu.pc \
+"
+FILES:${PN}-subset = "${libdir}/libharfbuzz-subset.so.*"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools/0001-lib-meson.build-fix-meson-0.60-compatibility.patch b/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools/0001-lib-meson.build-fix-meson-0.60-compatibility.patch
deleted file mode 100644
index e7c782055b..0000000000
--- a/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools/0001-lib-meson.build-fix-meson-0.60-compatibility.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-From b9a19aa4bdb21751c04c0e3ba8a9e2e35a10c953 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 25 Oct 2021 18:18:15 +0200
-Subject: [PATCH] lib/meson.build: fix meson 0.60 compatibility
-
-Upstream-Status: Backport [https://gitlab.freedesktop.org/drm/igt-gpu-tools/-/commit/963917a3565466832a3b2fc22e9285d34a0bf944]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- lib/meson.build | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/lib/meson.build b/lib/meson.build
-index 9929520e..1a525587 100644
---- a/lib/meson.build
-+++ b/lib/meson.build
-@@ -152,7 +152,7 @@ lib_version = vcs_tag(input : 'version.h.in', output : 'version.h',
-
- lib_intermediates = []
- foreach f: lib_sources
-- name = f.underscorify(f)
-+ name = f.underscorify()
- lib = static_library('igt-' + name,
- [ f, lib_version ],
- include_directories: inc,
diff --git a/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb b/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb
index bb2f9564d2..90cd09ea70 100644
--- a/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb
+++ b/meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb
@@ -9,24 +9,22 @@ LICENSE = "MIT"
inherit meson pkgconfig
-SRCREV = "203def046b466fb2da67f9f15552d84e1c0b41f2"
-PV = "1.26"
+SRCREV = "31ec677ca24e7ed86e35f367f40a29d3d9f51c06"
+PV = "1.28"
-SRC_URI = "git://gitlab.freedesktop.org/drm/igt-gpu-tools.git;protocol=https;branch=master \
- file://0001-lib-meson.build-fix-meson-0.60-compatibility.patch \
- "
+SRC_URI = "git://gitlab.freedesktop.org/drm/igt-gpu-tools.git;protocol=https;branch=master"
S = "${WORKDIR}/git"
DEPENDS += "libdrm libpciaccess cairo udev glib-2.0 procps libunwind kmod openssl elfutils alsa-lib json-c bison-native"
-RDEPENDS:${PN} += "bash"
+RDEPENDS:${PN} += "bash perl"
RDEPENDS:${PN}-tests += "bash"
PACKAGE_BEFORE_PN = "${PN}-benchmarks ${PN}-tests"
PACKAGECONFIG[chamelium] = "-Dchamelium=enabled,-Dchamelium=disabled,gsl xmlrpc-c"
-EXTRA_OEMESON = "-Ddocs=disabled -Drunner=enabled -Dsrcdir=/usr/src/debug/${PN}/${PV}-${PR}/git/"
+EXTRA_OEMESON = "-Ddocs=disabled -Drunner=enabled -Dsrcdir=${TARGET_DBGSRC_DIR}/git/ -Dversion_hash=${PV}"
COMPATIBLE_HOST = "(x86_64.*|i.86.*|arm.*|aarch64).*-linux"
COMPATIBLE_HOST:libc-musl:class-target = "null"
SECURITY_LDFLAGS = "${SECURITY_X_LDFLAGS}"
diff --git a/meta/recipes-graphics/jpeg/files/0001-libjpeg-turbo-fix-package_qa-error.patch b/meta/recipes-graphics/jpeg/files/0001-libjpeg-turbo-fix-package_qa-error.patch
deleted file mode 100644
index fab5109f83..0000000000
--- a/meta/recipes-graphics/jpeg/files/0001-libjpeg-turbo-fix-package_qa-error.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 5cf847b5bef8dc3f9f89bd09dd5af4e6603f393c Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Mon, 27 Aug 2018 16:10:55 +0800
-Subject: [PATCH] libjpeg-turbo: fix package_qa error
-
-Fix package qa errors like below:
-libjpeg.so.62.3.0 contains probably-redundant RPATH /usr/lib [useless-rpaths]
-usr/bin/cjpeg contains probably-redundant RPATH /usr/lib
-
-Upstream-Status: Inappropriate [oe-specific]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
-Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
----
- CMakeLists.txt | 4 ----
- 1 file changed, 4 deletions(-)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 2bc3458..ea3041e 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -191,10 +191,6 @@ endif()
- report_option(ENABLE_SHARED "Shared libraries")
- report_option(ENABLE_STATIC "Static libraries")
-
--if(ENABLE_SHARED)
-- set(CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_FULL_LIBDIR})
--endif()
--
- if(WITH_JPEG8 OR WITH_JPEG7)
- set(WITH_ARITH_ENC 1)
- set(WITH_ARITH_DEC 1)
diff --git a/meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.3.bb b/meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.3.bb
deleted file mode 100644
index fdc035d5f7..0000000000
--- a/meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.3.bb
+++ /dev/null
@@ -1,62 +0,0 @@
-SUMMARY = "Hardware accelerated JPEG compression/decompression library"
-DESCRIPTION = "libjpeg-turbo is a derivative of libjpeg that uses SIMD instructions (MMX, SSE2, NEON) to accelerate baseline JPEG compression and decompression"
-HOMEPAGE = "http://libjpeg-turbo.org/"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://cdjpeg.h;endline=13;md5=8a61af33cc1c681cd5cc297150bbb5bd \
- file://jpeglib.h;endline=16;md5=52b5eaade8d5b6a452a7693dfe52c084 \
- file://djpeg.c;endline=11;md5=510b386442ab6a27ee241fc5669bc5ea \
- "
-DEPENDS:append:x86-64:class-target = " nasm-native"
-DEPENDS:append:x86:class-target = " nasm-native"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz \
- file://0001-libjpeg-turbo-fix-package_qa-error.patch \
- "
-
-SRC_URI[sha256sum] = "467b310903832b033fe56cd37720d1b73a6a3bd0171dbf6ff0b620385f4f76d0"
-UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/libjpeg-turbo/files/"
-UPSTREAM_CHECK_REGEX = "/libjpeg-turbo/files/(?P<pver>(\d+[\.\-_]*)+)/"
-
-PE = "1"
-
-# Drop-in replacement for jpeg
-PROVIDES = "jpeg"
-RPROVIDES:${PN} += "jpeg"
-RREPLACES:${PN} += "jpeg"
-RCONFLICTS:${PN} += "jpeg"
-
-inherit cmake pkgconfig
-
-export NASMENV = "--reproducible --debug-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}"
-
-# Add nasm-native dependency consistently for all build arches is hard
-EXTRA_OECMAKE:append:class-native = " -DWITH_SIMD=False"
-EXTRA_OECMAKE:append:class-nativesdk = " -DWITH_SIMD=False"
-
-# Work around missing x32 ABI support
-EXTRA_OECMAKE:append:class-target = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", "-DWITH_SIMD=False", "", d)}"
-
-# Work around missing non-floating point ABI support in MIPS
-EXTRA_OECMAKE:append:class-target = " ${@bb.utils.contains("MIPSPKGSFX_FPU", "-nf", "-DWITH_SIMD=False", "", d)}"
-
-EXTRA_OECMAKE:append:class-target:arm = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "", "-DWITH_SIMD=False", d)}"
-EXTRA_OECMAKE:append:class-target:armeb = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "", "-DWITH_SIMD=False", d)}"
-
-# Provide a workaround if Altivec unit is not present in PPC
-EXTRA_OECMAKE:append:class-target:powerpc = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
-EXTRA_OECMAKE:append:class-target:powerpc64 = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
-EXTRA_OECMAKE:append:class-target:powerpc64le = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
-
-DEBUG_OPTIMIZATION:append:armv4 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
-DEBUG_OPTIMIZATION:append:armv5 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
-
-PACKAGES =+ "jpeg-tools libturbojpeg"
-
-DESCRIPTION:jpeg-tools = "The jpeg-tools package includes client programs to access libjpeg functionality. These tools allow for the compression, decompression, transformation and display of JPEG files and benchmarking of the libjpeg library."
-FILES:jpeg-tools = "${bindir}/*"
-
-DESCRIPTION:libturbojpeg = "A SIMD-accelerated JPEG codec which provides only TurboJPEG APIs"
-FILES:libturbojpeg = "${libdir}/libturbojpeg.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/jpeg/libjpeg-turbo_3.0.1.bb b/meta/recipes-graphics/jpeg/libjpeg-turbo_3.0.1.bb
new file mode 100644
index 0000000000..132d4b375a
--- /dev/null
+++ b/meta/recipes-graphics/jpeg/libjpeg-turbo_3.0.1.bb
@@ -0,0 +1,58 @@
+SUMMARY = "Hardware accelerated JPEG compression/decompression library"
+DESCRIPTION = "libjpeg-turbo is a derivative of libjpeg that uses SIMD instructions (MMX, SSE2, NEON) to accelerate baseline JPEG compression and decompression"
+HOMEPAGE = "http://libjpeg-turbo.org/"
+
+LICENSE = "IJG & BSD-3-Clause & Zlib"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=2a8e0d8226a102f07ab63ed7fd6ce155"
+
+DEPENDS:append:x86-64:class-target = " nasm-native"
+DEPENDS:append:x86:class-target = " nasm-native"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "22429507714ae147b3acacd299e82099fce5d9f456882fc28e252e4579ba2a75"
+UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/libjpeg-turbo/files/"
+UPSTREAM_CHECK_REGEX = "/libjpeg-turbo/files/(?P<pver>(\d+[\.\-_]*)+)/"
+
+PE = "1"
+
+# Drop-in replacement for jpeg
+PROVIDES = "jpeg"
+RPROVIDES:${PN} += "jpeg"
+RREPLACES:${PN} += "jpeg"
+RCONFLICTS:${PN} += "jpeg"
+
+inherit cmake pkgconfig
+
+export NASMENV = "--reproducible --debug-prefix-map=${WORKDIR}=${TARGET_DBGSRC_DIR}"
+
+# Add nasm-native dependency consistently for all build arches is hard
+EXTRA_OECMAKE:append:class-native = " -DWITH_SIMD=False"
+EXTRA_OECMAKE:append:class-nativesdk = " -DWITH_SIMD=False"
+
+# Work around missing x32 ABI support
+EXTRA_OECMAKE:append:class-target = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", "-DWITH_SIMD=False", "", d)}"
+
+# Work around missing non-floating point ABI support in MIPS
+EXTRA_OECMAKE:append:class-target = " ${@bb.utils.contains("MIPSPKGSFX_FPU", "-nf", "-DWITH_SIMD=False", "", d)}"
+
+EXTRA_OECMAKE:append:class-target:arm = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "", "-DWITH_SIMD=False", d)}"
+EXTRA_OECMAKE:append:class-target:armeb = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "", "-DWITH_SIMD=False", d)}"
+
+# Provide a workaround if Altivec unit is not present in PPC
+EXTRA_OECMAKE:append:class-target:powerpc = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
+EXTRA_OECMAKE:append:class-target:powerpc64 = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
+EXTRA_OECMAKE:append:class-target:powerpc64le = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "", "-DWITH_SIMD=False", d)}"
+
+DEBUG_OPTIMIZATION:append:armv4 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
+DEBUG_OPTIMIZATION:append:armv5 = " ${@bb.utils.contains('TUNE_CCARGS', '-mthumb', '-fomit-frame-pointer', '', d)}"
+
+PACKAGES =+ "jpeg-tools libturbojpeg"
+
+DESCRIPTION:jpeg-tools = "The jpeg-tools package includes client programs to access libjpeg functionality. These tools allow for the compression, decompression, transformation and display of JPEG files and benchmarking of the libjpeg library."
+FILES:jpeg-tools = "${bindir}/*"
+
+DESCRIPTION:libturbojpeg = "A SIMD-accelerated JPEG codec which provides only TurboJPEG APIs"
+FILES:libturbojpeg = "${libdir}/libturbojpeg.so.*"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/kmscube/kmscube/0001-cube-gears-Change-header-file-to-GLES3-gl3.h.patch b/meta/recipes-graphics/kmscube/kmscube/0001-cube-gears-Change-header-file-to-GLES3-gl3.h.patch
new file mode 100644
index 0000000000..f8608d4676
--- /dev/null
+++ b/meta/recipes-graphics/kmscube/kmscube/0001-cube-gears-Change-header-file-to-GLES3-gl3.h.patch
@@ -0,0 +1,31 @@
+From 5f54f067da7ee24a61d6cb41e0636383729e2bad Mon Sep 17 00:00:00 2001
+From: Fabio Estevam <festevam@gmail.com>
+Date: Mon, 8 Jan 2024 15:00:01 -0300
+Subject: [PATCH] cube-gears: Change header file to <GLES3/gl3.h>
+
+Since commit 96d63eb59e34 ("kmscube: Add gears mode"), kmscube fails
+to build on platforms without <GL/gl.h>.
+
+Fix it by changing the header file to <GLES3/gl3.h>.
+
+Reported-by: Martin Jansa <martin.jansa@gmail.com>
+Suggested-by: Martin Jansa <martin.jansa@gmail.com>
+Signed-off-by: Fabio Estevam <festevam@gmail.com>
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/kmscube/-/merge_requests/51]
+---
+ cube-gears.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/cube-gears.c b/cube-gears.c
+index d5b7a5f..cb538ec 100644
+--- a/cube-gears.c
++++ b/cube-gears.c
+@@ -31,7 +31,7 @@
+ #include <sys/time.h>
+ #include <math.h>
+
+-#include <GL/gl.h>
++#include <GLES3/gl3.h>
+
+ #include "common.h"
+ #include "esUtil.h"
diff --git a/meta/recipes-graphics/kmscube/kmscube/0001-texturator-Use-correct-GL-extension-header.patch b/meta/recipes-graphics/kmscube/kmscube/0001-texturator-Use-correct-GL-extension-header.patch
deleted file mode 100644
index 5965782de7..0000000000
--- a/meta/recipes-graphics/kmscube/kmscube/0001-texturator-Use-correct-GL-extension-header.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From 2b74e0e32235f6ab2e3e42d53dea985a7ba6227f Mon Sep 17 00:00:00 2001
-From: Damian Hobson-Garcia <dhobsong@igel.co.jp>
-Date: Wed, 16 Dec 2020 11:08:25 +0900
-Subject: [PATCH] texturator: Use correct GL extension header
-
-gl2ext.h is the extenstion header for OpenGL ES 2.0 and all later
-versions according to the Khronos documentation [1]. gl3ext.h is either
-an empty stub, or may not even exist on some platforms.
-
-[1]: https://www.khronos.org/registry/OpenGL/index_es.php#headers
-
-Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/kmscube/-/merge_requests/26]
-Signed-off-by: Quentin Schulz <quentin.schulz@theobroma-systems.com>
----
- texturator.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/texturator.c b/texturator.c
-index d9335d7..6d97856 100644
---- a/texturator.c
-+++ b/texturator.c
-@@ -30,7 +30,7 @@
- #include <math.h>
-
- #include <GLES3/gl3.h>
--#include <GLES3/gl3ext.h>
-+#include <GLES2/gl2ext.h>
-
- #ifdef HAVE_LIBPNG
- #include <png.h>
---
-2.33.1
-
diff --git a/meta/recipes-graphics/kmscube/kmscube_git.bb b/meta/recipes-graphics/kmscube/kmscube_git.bb
index 58ce26a3d5..19bbcd195d 100644
--- a/meta/recipes-graphics/kmscube/kmscube_git.bb
+++ b/meta/recipes-graphics/kmscube/kmscube_git.bb
@@ -6,21 +6,25 @@ OpenGL or OpenGL ES."
HOMEPAGE = "https://cgit.freedesktop.org/mesa/kmscube/"
LICENSE = "MIT"
SECTION = "graphics"
-DEPENDS = "virtual/libgles3 virtual/libgles2 virtual/egl libdrm"
+DEPENDS = "virtual/libgles3 virtual/libgles2 virtual/egl libdrm virtual/libgbm"
LIC_FILES_CHKSUM = "file://kmscube.c;beginline=1;endline=23;md5=8b309d4ee67b7315ff7381270dd631fb"
-SRCREV = "9f63f359fab1b5d8e862508e4e51c9dfe339ccb0"
-SRC_URI = "git://gitlab.freedesktop.org/mesa/kmscube;branch=master;protocol=https"
-SRC_URI += "file://0001-texturator-Use-correct-GL-extension-header.patch"
+SRCREV = "6ab022fdfcfedd28f4b5dbd8d3299414a367746f"
+SRC_URI = "git://gitlab.freedesktop.org/mesa/kmscube;branch=master;protocol=https \
+ file://0001-cube-gears-Change-header-file-to-GLES3-gl3.h.patch \
+ "
+
UPSTREAM_CHECK_COMMITS = "1"
S = "${WORKDIR}/git"
+BASEPV = "0.0.1"
+PV = "${BASEPV}+git"
+
inherit meson pkgconfig features_check
REQUIRED_DISTRO_FEATURES = "opengl"
-DEPENDS = "virtual/libgbm"
PACKAGECONFIG ??= ""
PACKAGECONFIG[gstreamer] = "-Dgstreamer=enabled,-Dgstreamer=disabled,gstreamer1.0 gstreamer1.0-plugins-base"
diff --git a/meta/recipes-graphics/libepoxy/files/0001-dispatch_common.h-define-also-EGL_NO_X11.patch b/meta/recipes-graphics/libepoxy/files/0001-dispatch_common.h-define-also-EGL_NO_X11.patch
deleted file mode 100644
index 971a3f54e0..0000000000
--- a/meta/recipes-graphics/libepoxy/files/0001-dispatch_common.h-define-also-EGL_NO_X11.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 7211120d1e2f059d900f3379b9790484dbcf7761 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Fri, 25 Oct 2019 11:09:34 +0000
-Subject: [PATCH] dispatch_common.h: define also EGL_NO_X11
-
-MESA_EGL_NO_X11_HEADERS was renamed to EGL_NO_X11 in:
-https://github.com/mesa3d/mesa/commit/6202a13b71e18dc31ba7e2f4ea915b67eacc1ddb
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-Upstream-Status: Pending
-
----
- src/dispatch_common.h | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/src/dispatch_common.h b/src/dispatch_common.h
-index a136943..448c9b1 100644
---- a/src/dispatch_common.h
-+++ b/src/dispatch_common.h
-@@ -55,6 +55,7 @@
- * as EGL_NO_X11
- */
- # define MESA_EGL_NO_X11_HEADERS 1
-+# define EGL_NO_X11 1
- # endif
- #include "epoxy/egl.h"
- #endif
diff --git a/meta/recipes-graphics/libepoxy/libepoxy_1.5.10.bb b/meta/recipes-graphics/libepoxy/libepoxy_1.5.10.bb
new file mode 100644
index 0000000000..384afa6907
--- /dev/null
+++ b/meta/recipes-graphics/libepoxy/libepoxy_1.5.10.bb
@@ -0,0 +1,30 @@
+SUMMARY = "OpenGL function pointer management library"
+DESCRIPTION = "It hides the complexity of dlopen(), dlsym(), \
+glXGetProcAddress(), eglGetProcAddress(), etc. from the app developer, with \
+very little knowledge needed on their part. They get to read GL specs and \
+write code using undecorated function names like glCompileShader()."
+HOMEPAGE = "https://github.com/anholt/libepoxy/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=58ef4c80d401e07bd9ee8b6b58cf464b"
+
+SRC_URI = "git://github.com/anholt/libepoxy;branch=master;protocol=https"
+SRCREV = "c84bc9459357a40e46e2fec0408d04fbdde2c973"
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig features_check github-releases
+
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+PACKAGECONFIG[egl] = "-Degl=yes, -Degl=no, virtual/egl"
+PACKAGECONFIG[x11] = "-Dglx=yes, -Dglx=no -Dx11=false, virtual/libx11 virtual/libgl"
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} egl"
+
+EXTRA_OEMESON += "-Dtests=false"
+
+PACKAGECONFIG:class-native = "egl x11"
+PACKAGECONFIG:class-nativesdk = "egl x11"
+
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta/recipes-graphics/libepoxy/libepoxy_1.5.9.bb b/meta/recipes-graphics/libepoxy/libepoxy_1.5.9.bb
deleted file mode 100644
index 487fc00360..0000000000
--- a/meta/recipes-graphics/libepoxy/libepoxy_1.5.9.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "OpenGL function pointer management library"
-DESCRIPTION = "It hides the complexity of dlopen(), dlsym(), \
-glXGetProcAddress(), eglGetProcAddress(), etc. from the app developer, with \
-very little knowledge needed on their part. They get to read GL specs and \
-write code using undecorated function names like glCompileShader()."
-HOMEPAGE = "https://github.com/anholt/libepoxy/"
-SECTION = "libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=58ef4c80d401e07bd9ee8b6b58cf464b"
-
-SRC_URI = "https://github.com/anholt/${BPN}/releases/download/${PV}/${BP}.tar.xz \
- file://0001-dispatch_common.h-define-also-EGL_NO_X11.patch \
- "
-SRC_URI[sha256sum] = "d168a19a6edfdd9977fef1308ccf516079856a4275cf876de688fb7927e365e4"
-UPSTREAM_CHECK_URI = "https://github.com/anholt/libepoxy/releases"
-
-inherit meson pkgconfig features_check
-
-REQUIRED_DISTRO_FEATURES = "opengl"
-
-PACKAGECONFIG[egl] = "-Degl=yes, -Degl=no, virtual/egl"
-PACKAGECONFIG[x11] = "-Dglx=yes, -Dglx=no -Dx11=false, virtual/libx11 virtual/libgl"
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} egl"
-
-EXTRA_OEMESON += "-Dtests=false"
-
-PACKAGECONFIG:class-native = "egl x11"
-PACKAGECONFIG:class-nativesdk = "egl x11"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-graphics/libfakekey/libfakekey_git.bb b/meta/recipes-graphics/libfakekey/libfakekey_git.bb
index 5d94be52b4..5f902d3a67 100644
--- a/meta/recipes-graphics/libfakekey/libfakekey_git.bb
+++ b/meta/recipes-graphics/libfakekey/libfakekey_git.bb
@@ -11,9 +11,9 @@ DEPENDS = "libxtst"
SECTION = "x11/wm"
SRCREV = "7ad885912efb2131e80914e964d5e635b0d07b40"
-PV = "0.3+git${SRCPV}"
+PV = "0.3+git"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-graphics/libmatchbox/libmatchbox_1.12.bb b/meta/recipes-graphics/libmatchbox/libmatchbox_1.12.bb
index b01b332a55..bf3f5c005d 100644
--- a/meta/recipes-graphics/libmatchbox/libmatchbox_1.12.bb
+++ b/meta/recipes-graphics/libmatchbox/libmatchbox_1.12.bb
@@ -17,7 +17,7 @@ DEPENDS = "virtual/libx11 libxext"
#SRCREV for 1.12
SRCREV = "e846ee434f8e23d9db38af13c523f791495e0e87"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-graphics/libsdl2/libsdl2/0001-Disable-libunwind-in-native-OE-builds-by-not-looking.patch b/meta/recipes-graphics/libsdl2/libsdl2/0001-Disable-libunwind-in-native-OE-builds-by-not-looking.patch
deleted file mode 100644
index 57bc522393..0000000000
--- a/meta/recipes-graphics/libsdl2/libsdl2/0001-Disable-libunwind-in-native-OE-builds-by-not-looking.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 0234c546d86174fafe9ab280cf5f44de50b73676 Mon Sep 17 00:00:00 2001
-From: Carlos Rafael Giani <crg7475@mailbox.org>
-Date: Fri, 18 Mar 2022 12:06:23 +0100
-Subject: [PATCH] Disable libunwind in native OE builds by not looking for
- libunwind
-
-This is a workaround for this build error:
-
-| -- Checking for one of the modules 'libunwind'
-| CMake Error at [...]/build/tmp/work/x86_64-linux/libsdl2-native/2.0.20-r0/recipe-sysroot-native/usr/share/cmake-3.22/Modules/FindPkgConfig.cmake:890 (message):
-| None of the required 'libunwind' found
-| Call Stack (most recent call first):
-| CMakeLists.txt:1367 (pkg_search_module)
-
-By not looking for the libunwind header, the rest of the libunwind
-specific bits in the CMake build script are disabled.
-
-Upstream-Status: Inappropriate [OE specific]
-
----
- CMakeLists.txt | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 644715a..bbf2e28 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -869,7 +869,7 @@ if(SDL_LIBC)
- check_include_file(sys/types.h HAVE_SYS_TYPES_H)
- foreach(_HEADER
- stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h limits.h float.h
-- strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h libunwind.h)
-+ strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h)
- string(TOUPPER "HAVE_${_HEADER}" _UPPER)
- string(REPLACE "." "_" _HAVE_H ${_UPPER})
- check_include_file("${_HEADER}" ${_HAVE_H})
diff --git a/meta/recipes-graphics/libsdl2/libsdl2/0001-video-restore-ability-to-disable-fb-accel-via-hint.patch b/meta/recipes-graphics/libsdl2/libsdl2/0001-video-restore-ability-to-disable-fb-accel-via-hint.patch
deleted file mode 100644
index fc74d30556..0000000000
--- a/meta/recipes-graphics/libsdl2/libsdl2/0001-video-restore-ability-to-disable-fb-accel-via-hint.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From 14ad91658fd296e34bb9e833281e72c871bfb189 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 3 May 2022 12:31:50 +0200
-Subject: [PATCH] video: restore ability to disable fb accel via hint
-
-Somewhere in code refactoring between .20 and .22 this check
-was lost, and so the hint had no effect anymore.
-
-Upstream-Status: Submitted [https://github.com/libsdl-org/SDL/pull/5611]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- src/video/SDL_video.c | 8 ++++++++
- 1 file changed, 8 insertions(+)
-
-diff --git a/src/video/SDL_video.c b/src/video/SDL_video.c
-index 93c803e..cbe7616 100644
---- a/src/video/SDL_video.c
-+++ b/src/video/SDL_video.c
-@@ -2503,6 +2503,14 @@ SDL_CreateWindowFramebuffer(SDL_Window * window)
- if (!_this->checked_texture_framebuffer) {
- SDL_bool attempt_texture_framebuffer = SDL_TRUE;
-
-+ /* See if the user or application wants to specifically disable the framebuffer */
-+ const char *hint = SDL_GetHint(SDL_HINT_FRAMEBUFFER_ACCELERATION);
-+ if (hint) {
-+ if (*hint == '0' || SDL_strcasecmp(hint, "false") == 0) {
-+ attempt_texture_framebuffer = SDL_FALSE;
-+ }
-+ }
-+
- if (_this->is_dummy) { /* dummy driver never has GPU support, of course. */
- attempt_texture_framebuffer = SDL_FALSE;
- }
---
-2.30.2
-
diff --git a/meta/recipes-graphics/libsdl2/libsdl2_2.0.22.bb b/meta/recipes-graphics/libsdl2/libsdl2_2.0.22.bb
deleted file mode 100644
index b112d63784..0000000000
--- a/meta/recipes-graphics/libsdl2/libsdl2_2.0.22.bb
+++ /dev/null
@@ -1,83 +0,0 @@
-SUMMARY = "Simple DirectMedia Layer"
-DESCRIPTION = "Simple DirectMedia Layer is a cross-platform multimedia \
-library designed to provide low level access to audio, keyboard, mouse, \
-joystick, 3D hardware via OpenGL, and 2D video framebuffer."
-HOMEPAGE = "http://www.libsdl.org"
-BUGTRACKER = "http://bugzilla.libsdl.org/"
-
-SECTION = "libs"
-
-LICENSE = "Zlib & BSD-2-Clause"
-LIC_FILES_CHKSUM = "\
- file://LICENSE.txt;md5=68a088513da90254b2fbe664f42af315 \
- file://src/hidapi/LICENSE.txt;md5=7c3949a631240cb6c31c50f3eb696077 \
- file://src/hidapi/LICENSE-bsd.txt;md5=b5fa085ce0926bb50d0621620a82361f \
- file://src/video/yuv2rgb/LICENSE;md5=79f8f3418d91531e05f0fc94ca67e071 \
-"
-
-# arm-neon adds MIT license
-LICENSE:append = " ${@bb.utils.contains('PACKAGECONFIG', 'arm-neon', '& MIT', '', d)}"
-LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'arm-neon', 'file://src/video/arm/pixman-arm-neon-asm.h;md5=9a9cc1e51abbf1da58f4d9528ec9d49b;beginline=1;endline=24', '', d)}"
-
-PROVIDES = "virtual/libsdl2"
-
-SRC_URI = "http://www.libsdl.org/release/SDL2-${PV}.tar.gz \
- file://0001-video-restore-ability-to-disable-fb-accel-via-hint.patch \
- "
-SRC_URI:append:class-native = " file://0001-Disable-libunwind-in-native-OE-builds-by-not-looking.patch"
-
-S = "${WORKDIR}/SDL2-${PV}"
-
-SRC_URI[sha256sum] = "fe7cbf3127882e3fc7259a75a0cb585620272c51745d3852ab9dd87960697f2e"
-
-inherit cmake lib_package binconfig-disabled pkgconfig
-
-BINCONFIG = "${bindir}/sdl2-config"
-
-CVE_PRODUCT = "simple_directmedia_layer sdl"
-
-EXTRA_OECMAKE = "-DSDL_OSS=OFF -DSDL_ESD=OFF -DSDL_ARTS=OFF \
- -DSDL_DISKAUDIO=OFF -DSDL_NAS=OFF -DSDL_ESD_SHARED=OFF \
- -DSDL_DUMMYVIDEO=OFF \
- -DSDL_RPI=OFF \
- -DSDL_PTHREADS=ON \
- -DSDL_RPATH=OFF \
- -DSDL_SNDIO=OFF \
- -DSDL_X11_XCURSOR=OFF \
- -DSDL_X11_XDBE=OFF \
- -DSDL_X11_XFIXES=OFF \
- -DSDL_X11_XINPUT=OFF \
- -DSDL_X11_XRANDR=OFF \
- -DSDL_X11_XSCRNSAVER=OFF \
- -DSDL_X11_XSHAPE=OFF \
-"
-
-# opengl packageconfig factored out to make it easy for distros
-# and BSP layers to pick either (desktop) opengl, gles2, or no GL
-PACKAGECONFIG_GL ?= "${@bb.utils.filter('DISTRO_FEATURES', 'opengl', d)}"
-
-PACKAGECONFIG:class-native = "x11 ${PACKAGECONFIG_GL}"
-PACKAGECONFIG:class-nativesdk = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} ${PACKAGECONFIG_GL}"
-PACKAGECONFIG ??= " \
- ${PACKAGECONFIG_GL} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'alsa directfb pulseaudio x11', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland gles2', '', d)} \
- ${@bb.utils.contains("TUNE_FEATURES", "neon","arm-neon","",d)} \
-"
-PACKAGECONFIG[alsa] = "-DSDL_ALSA=ON,-DSDL_ALSA=OFF,alsa-lib,"
-PACKAGECONFIG[arm-neon] = "-DSDL_ARMNEON=ON,-DSDL_ARMNEON=OFF"
-PACKAGECONFIG[directfb] = "-DSDL_DIRECTFB=ON,-DSDL_DIRECTFB=OFF,directfb,directfb"
-PACKAGECONFIG[gles2] = "-DSDL_OPENGLES=ON,-DSDL_OPENGLES=OFF,virtual/libgles2"
-PACKAGECONFIG[jack] = "-DSDL_JACK=ON,-DSDL_JACK=OFF,jack"
-PACKAGECONFIG[kmsdrm] = "-DSDL_KMSDRM=ON,-DSDL_KMSDRM=OFF,libdrm virtual/libgbm"
-# The hidraw support doesn't catch Xbox, PS4 and Nintendo controllers,
-# so we'll just use libusb when it's available.
-PACKAGECONFIG[libusb] = ",,libusb1"
-PACKAGECONFIG[opengl] = "-DSDL_OPENGL=ON,-DSDL_OPENGL=OFF,virtual/egl"
-PACKAGECONFIG[pulseaudio] = "-DSDL_PULSEAUDIO=ON,-DSDL_PULSEAUDIO=OFF,pulseaudio"
-PACKAGECONFIG[wayland] = "-DSDL_WAYLAND=ON,-DSDL_WAYLAND=OFF,wayland-native wayland wayland-protocols libxkbcommon"
-PACKAGECONFIG[x11] = "-DSDL_X11=ON,-DSDL_X11=OFF,virtual/libx11 libxext libxrandr libxrender"
-
-CFLAGS:append:class-native = " -DNO_SHARED_MEMORY"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/libsdl2/libsdl2_2.30.1.bb b/meta/recipes-graphics/libsdl2/libsdl2_2.30.1.bb
new file mode 100644
index 0000000000..891e91190a
--- /dev/null
+++ b/meta/recipes-graphics/libsdl2/libsdl2_2.30.1.bb
@@ -0,0 +1,85 @@
+SUMMARY = "Simple DirectMedia Layer"
+DESCRIPTION = "Simple DirectMedia Layer is a cross-platform multimedia \
+library designed to provide low level access to audio, keyboard, mouse, \
+joystick, 3D hardware via OpenGL, and 2D video framebuffer."
+HOMEPAGE = "http://www.libsdl.org"
+BUGTRACKER = "http://bugzilla.libsdl.org/"
+
+SECTION = "libs"
+
+LICENSE = "Zlib & BSD-2-Clause"
+LIC_FILES_CHKSUM = "\
+ file://LICENSE.txt;md5=25231a5b96ccdd8f39eb53c07717be64 \
+ file://src/hidapi/LICENSE.txt;md5=7c3949a631240cb6c31c50f3eb696077 \
+ file://src/hidapi/LICENSE-bsd.txt;md5=b5fa085ce0926bb50d0621620a82361f \
+ file://src/video/yuv2rgb/LICENSE;md5=79f8f3418d91531e05f0fc94ca67e071 \
+"
+
+# arm-neon adds MIT license
+LICENSE:append = " ${@bb.utils.contains('PACKAGECONFIG', 'arm-neon', '& MIT', '', d)}"
+LIC_FILES_CHKSUM:append = " ${@bb.utils.contains('PACKAGECONFIG', 'arm-neon', 'file://src/video/arm/pixman-arm-neon-asm.h;md5=9a9cc1e51abbf1da58f4d9528ec9d49b;beginline=1;endline=24', '', d)}"
+
+PROVIDES = "virtual/libsdl2"
+
+SRC_URI = "http://www.libsdl.org/release/SDL2-${PV}.tar.gz"
+
+S = "${WORKDIR}/SDL2-${PV}"
+
+SRC_URI[sha256sum] = "01215ffbc8cfc4ad165ba7573750f15ddda1f971d5a66e9dcaffd37c587f473a"
+
+inherit cmake lib_package binconfig-disabled pkgconfig upstream-version-is-even
+
+BINCONFIG = "${bindir}/sdl2-config"
+
+CVE_PRODUCT = "simple_directmedia_layer sdl"
+
+EXTRA_OECMAKE = "-DSDL_OSS=OFF -DSDL_ESD=OFF -DSDL_ARTS=OFF \
+ -DSDL_DISKAUDIO=OFF -DSDL_NAS=OFF -DSDL_ESD_SHARED=OFF \
+ -DSDL_DUMMYVIDEO=OFF \
+ -DSDL_RPI=OFF \
+ -DSDL_PTHREADS=ON \
+ -DSDL_RPATH=OFF \
+ -DSDL_SNDIO=OFF \
+ -DSDL_X11_XCURSOR=OFF \
+ -DSDL_X11_XDBE=OFF \
+ -DSDL_X11_XFIXES=OFF \
+ -DSDL_X11_XINPUT=OFF \
+ -DSDL_X11_XRANDR=OFF \
+ -DSDL_X11_XSCRNSAVER=OFF \
+ -DSDL_X11_XSHAPE=OFF \
+"
+
+# opengl packageconfig factored out to make it easy for distros
+# and BSP layers to pick either (desktop) opengl, gles2, or no GL
+PACKAGECONFIG_GL ?= "${@bb.utils.filter('DISTRO_FEATURES', 'opengl', d)}"
+
+PACKAGECONFIG:class-native = "x11 ${PACKAGECONFIG_GL}"
+PACKAGECONFIG:class-nativesdk = "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} ${PACKAGECONFIG_GL}"
+PACKAGECONFIG ??= " \
+ ${PACKAGECONFIG_GL} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'alsa directfb pulseaudio pipewire x11 vulkan', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland gles2', '', d)} \
+ ${@bb.utils.contains("TUNE_FEATURES", "neon","arm-neon","",d)} \
+"
+PACKAGECONFIG[alsa] = "-DSDL_ALSA=ON,-DSDL_ALSA=OFF,alsa-lib,"
+PACKAGECONFIG[arm-neon] = "-DSDL_ARMNEON=ON,-DSDL_ARMNEON=OFF"
+PACKAGECONFIG[directfb] = "-DSDL_DIRECTFB=ON,-DSDL_DIRECTFB=OFF,directfb,directfb"
+PACKAGECONFIG[gles2] = "-DSDL_OPENGLES=ON,-DSDL_OPENGLES=OFF,virtual/libgles2"
+PACKAGECONFIG[jack] = "-DSDL_JACK=ON,-DSDL_JACK=OFF,jack"
+PACKAGECONFIG[kmsdrm] = "-DSDL_KMSDRM=ON,-DSDL_KMSDRM=OFF,libdrm virtual/libgbm"
+# The hidraw support doesn't catch Xbox, PS4 and Nintendo controllers,
+# so we'll just use libusb when it's available.
+PACKAGECONFIG[libusb] = ",,libusb1"
+PACKAGECONFIG[libdecor] = "-DSDL_WAYLAND_LIBDECOR=ON,-DSDL_WAYLAND_LIBDECOR=OFF,libdecor,libdecor"
+PACKAGECONFIG[opengl] = "-DSDL_OPENGL=ON,-DSDL_OPENGL=OFF,virtual/egl"
+PACKAGECONFIG[pipewire] = "-DSDL_PIPEWIRE_SHARED=ON,-DSDL_PIPEWIRE_SHARED=OFF,pipewire"
+PACKAGECONFIG[pulseaudio] = "-DSDL_PULSEAUDIO=ON,-DSDL_PULSEAUDIO=OFF,pulseaudio"
+PACKAGECONFIG[vulkan] = "-DSDL_VULKAN=ON,-DSDL_VULKAN=OFF"
+PACKAGECONFIG[wayland] = "-DSDL_WAYLAND=ON,-DSDL_WAYLAND=OFF,wayland-native wayland wayland-protocols libxkbcommon"
+PACKAGECONFIG[x11] = "-DSDL_X11=ON,-DSDL_X11=OFF,virtual/libx11 libxext libxrandr libxrender"
+
+CFLAGS:append:class-native = " -DNO_SHARED_MEMORY"
+
+FILES:${PN} += "${datadir}/licenses/SDL2/LICENSE.txt"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/libva/libva-initial_2.14.0.bb b/meta/recipes-graphics/libva/libva-initial_2.20.0.bb
index 453096822f..453096822f 100644
--- a/meta/recipes-graphics/libva/libva-initial_2.14.0.bb
+++ b/meta/recipes-graphics/libva/libva-initial_2.20.0.bb
diff --git a/meta/recipes-graphics/libva/libva-utils_2.14.0.bb b/meta/recipes-graphics/libva/libva-utils_2.14.0.bb
deleted file mode 100644
index b816b5e96d..0000000000
--- a/meta/recipes-graphics/libva/libva-utils_2.14.0.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "libva-utils is a collection of utilities from libva project"
-
-DESCRIPTION = "libva-utils is a collection of utilities \
-and examples to exercise VA-API in accordance with the libva \
-project.VA-API is an open-source library and API specification, \
-which provides access to graphics hardware acceleration capabilities \
-for video processing. It consists of a main library and driver-specific \
-acceleration backends for each supported hardware vendor"
-
-HOMEPAGE = "https://01.org/linuxmedia/vaapi"
-BUGTRACKER = "https://github.com/intel/libva-utils/issues"
-
-SECTION = "x11"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b148fc8adf19dc9aec17cf9cd29a9a5e"
-
-SRC_URI = "git://github.com/intel/libva-utils.git;branch=v2.14-branch;protocol=https"
-SRCREV = "9cf21686ad9cacef53ee6b3f13c02b830cf0c7a3"
-S = "${WORKDIR}/git"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))$"
-
-DEPENDS = "libva"
-
-inherit meson pkgconfig features_check
-
-# depends on libva which requires opengl
-REQUIRED_DISTRO_FEATURES = "opengl"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
-PACKAGECONFIG[x11] = "-Dx11=true, -Dx11=false,virtual/libx11 libxext libxfixes"
-PACKAGECONFIG[wayland] = "-Dwayland=true, -Dwayland=false,wayland-native wayland"
diff --git a/meta/recipes-graphics/libva/libva-utils_2.20.1.bb b/meta/recipes-graphics/libva/libva-utils_2.20.1.bb
new file mode 100644
index 0000000000..3bce9a1e32
--- /dev/null
+++ b/meta/recipes-graphics/libva/libva-utils_2.20.1.bb
@@ -0,0 +1,32 @@
+SUMMARY = "libva-utils is a collection of utilities from libva project"
+
+DESCRIPTION = "libva-utils is a collection of utilities \
+and examples to exercise VA-API in accordance with the libva \
+project.VA-API is an open-source library and API specification, \
+which provides access to graphics hardware acceleration capabilities \
+for video processing. It consists of a main library and driver-specific \
+acceleration backends for each supported hardware vendor"
+
+HOMEPAGE = "https://01.org/linuxmedia/vaapi"
+BUGTRACKER = "https://github.com/intel/libva-utils/issues"
+
+SECTION = "x11"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b148fc8adf19dc9aec17cf9cd29a9a5e"
+
+SRC_URI = "git://github.com/intel/libva-utils.git;branch=v2.20-branch;protocol=https"
+SRCREV = "2ad888bb463dc9bfb3deb512ec9faf78f1d3bfa8"
+S = "${WORKDIR}/git"
+
+UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))$"
+
+DEPENDS = "libva"
+
+inherit meson pkgconfig features_check
+
+# depends on libva which requires opengl
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
+PACKAGECONFIG[x11] = "-Dx11=true, -Dx11=false,virtual/libx11 libxext libxfixes"
+PACKAGECONFIG[wayland] = "-Dwayland=true, -Dwayland=false,wayland-native wayland"
diff --git a/meta/recipes-graphics/libva/libva.inc b/meta/recipes-graphics/libva/libva.inc
index 148bb03b5f..486291bced 100644
--- a/meta/recipes-graphics/libva/libva.inc
+++ b/meta/recipes-graphics/libva/libva.inc
@@ -16,17 +16,17 @@ BUGTRACKER = "https://github.com/intel/libva/issues"
SECTION = "x11"
LICENSE = "MIT"
-SRC_URI = "https://github.com/intel/libva/releases/download/${PV}/libva-${PV}.tar.bz2"
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/libva-${PV}.tar.bz2"
LIC_FILES_CHKSUM = "file://COPYING;md5=2e48940f94acb0af582e5ef03537800f"
-SRC_URI[sha256sum] = "511e9a74d1ccfb548bc192290603082c11074ac59b17aadfd5d56a3f93e58454"
+SRC_URI[sha256sum] = "f72bdb4f48dfe71ad01f1cbefe069672a2c949a6abd51cf3c4d4784210badc49"
S = "${WORKDIR}/libva-${PV}"
-UPSTREAM_CHECK_URI = "https://github.com/intel/libva/releases"
+GITHUB_BASE_URI = "https://github.com/intel/libva/releases"
DEPENDS = "libdrm"
-inherit meson pkgconfig
+inherit meson pkgconfig github-releases
PACKAGECONFIG[x11] = "-Dwith_x11=yes,-Dwith_x11=no,virtual/libx11 libxext libxfixes"
PACKAGECONFIG[glx] = "-Dwith_glx=yes,-Dwith_glx=no,virtual/mesa"
diff --git a/meta/recipes-graphics/libva/libva_2.14.0.bb b/meta/recipes-graphics/libva/libva_2.20.0.bb
index 63dc5af8f2..63dc5af8f2 100644
--- a/meta/recipes-graphics/libva/libva_2.14.0.bb
+++ b/meta/recipes-graphics/libva/libva_2.20.0.bb
diff --git a/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb b/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb
index 1a16ed4374..d1f0a67d84 100644
--- a/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb
+++ b/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb
@@ -12,7 +12,6 @@ RCONFLICTS:${PN} = "matchbox-common"
SRC_URI = "file://matchbox-session"
S = "${WORKDIR}"
-PR = "r4"
inherit update-alternatives
diff --git a/meta/recipes-graphics/matchbox-wm/matchbox-wm/0001-Fix-build-with-gcc-10.patch b/meta/recipes-graphics/matchbox-wm/matchbox-wm/0001-Fix-build-with-gcc-10.patch
deleted file mode 100644
index 541b5c9c84..0000000000
--- a/meta/recipes-graphics/matchbox-wm/matchbox-wm/0001-Fix-build-with-gcc-10.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From 8a8f8446e803cad04d7bbceaab78ee45d9778c3c Mon Sep 17 00:00:00 2001
-From: Adrian Bunk <bunk@stusta.de>
-Date: Tue, 12 May 2020 09:44:05 +0300
-Subject: Fix build with gcc 10
-
-Upstream-Status: Pending
-Signed-off-by: Adrian Bunk <bunk@stusta.de>
----
- src/mbtheme.h | 2 +-
- src/structs.h | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/mbtheme.h b/src/mbtheme.h
-index aa9a7c5..ad03bde 100644
---- a/src/mbtheme.h
-+++ b/src/mbtheme.h
-@@ -46,7 +46,7 @@ typedef struct _mb_theme_param
-
- } MBThemeParam;
-
--enum {
-+typedef enum {
- LAYER_GRADIENT_HORIZ = 1,
- LAYER_GRADIENT_VERT,
- LAYER_LABEL,
-diff --git a/src/structs.h b/src/structs.h
-index 24985e7..8f53e72 100644
---- a/src/structs.h
-+++ b/src/structs.h
-@@ -148,7 +148,7 @@
-
- /* Atoms, if you change these check ewmh_init() first */
-
--enum {
-+typedef enum {
- WM_STATE = 0,
- WM_CHANGE_STATE,
- WM_PROTOCOLS,
---
-2.17.1
-
diff --git a/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.2.bb b/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.2.bb
deleted file mode 100644
index 81704beac4..0000000000
--- a/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.2.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "Matchbox lightweight window manager"
-HOMEPAGE = "http://matchbox-project.org"
-BUGTRACKER = "http://bugzilla.yoctoproject.org/"
-
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://src/wm.h;endline=21;md5=a7e844465edbcf79c282369f93caa835 \
- file://src/main.c;endline=21;md5=3e5d9f832b527b0d72dbe8e3c4c60b95 \
- file://src/wm.c;endline=21;md5=8dc9d24477d87ef5dfbc2e4927146aab"
-
-SECTION = "x11/wm"
-DEPENDS = "libmatchbox virtual/libx11 libxext libxrender startup-notification expat gconf libxcursor libxfixes"
-
-# SRCREV tagged 1.2.2
-SRCREV = "27da947e7fbdf9659f7e5bd1e92af92af6c03970"
-SRC_URI = "git://git.yoctoproject.org/matchbox-window-manager;branch=master \
- file://0001-Fix-build-with-gcc-10.patch \
- file://kbdconfig"
-
-S = "${WORKDIR}/git"
-
-inherit autotools pkgconfig features_check
-# depends on virtual/libx11
-REQUIRED_DISTRO_FEATURES = "x11"
-
-FILES:${PN} = "${bindir}/* \
- ${datadir}/matchbox \
- ${sysconfdir}/matchbox \
- ${datadir}/themes/blondie/matchbox \
- ${datadir}/themes/Default/matchbox \
- ${datadir}/themes/MBOpus/matchbox"
-
-EXTRA_OECONF = " --enable-startup-notification \
- --disable-xrm \
- --enable-expat \
- --with-expat-lib=${STAGING_LIBDIR} \
- --with-expat-includes=${STAGING_INCDIR}"
-
-do_install:prepend() {
- install ${WORKDIR}/kbdconfig ${S}/data/kbdconfig
-}
diff --git a/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.3.bb b/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.3.bb
new file mode 100644
index 0000000000..880d4ed666
--- /dev/null
+++ b/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.3.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Matchbox lightweight window manager"
+HOMEPAGE = "http://matchbox-project.org"
+BUGTRACKER = "http://bugzilla.yoctoproject.org/"
+
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://src/wm.h;endline=21;md5=ce20617ac10f26045cc57b8d977ab552 \
+ file://src/main.c;endline=21;md5=508f280276140250ce483e0a44f7a9ec \
+ file://src/wm.c;endline=21;md5=f54584fb0d48cfc2e6876e0f0e272e6c"
+
+SECTION = "x11/wm"
+DEPENDS = "libmatchbox virtual/libx11 libxext libxrender startup-notification expat gconf libxcursor libxfixes"
+
+SRCREV = "ce8c1053270d960a7235ab5c3435f707541810a4"
+SRC_URI = "git://git.yoctoproject.org/matchbox-window-manager;branch=master;protocol=https \
+ file://kbdconfig"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig features_check
+# depends on virtual/libx11
+REQUIRED_DISTRO_FEATURES = "x11"
+
+FILES:${PN} = "${bindir}/* \
+ ${datadir}/matchbox \
+ ${sysconfdir}/matchbox \
+ ${datadir}/themes/blondie/matchbox \
+ ${datadir}/themes/Default/matchbox \
+ ${datadir}/themes/MBOpus/matchbox"
+
+EXTRA_OECONF = " --enable-startup-notification \
+ --disable-xrm \
+ --enable-expat \
+ --with-expat-lib=${STAGING_LIBDIR} \
+ --with-expat-includes=${STAGING_INCDIR}"
+
+do_install:prepend() {
+ install ${WORKDIR}/kbdconfig ${S}/data/kbdconfig
+}
diff --git a/meta/recipes-graphics/mesa/files/0001-Revert-meson-do-not-pull-in-clc-for-clover.patch b/meta/recipes-graphics/mesa/files/0001-Revert-meson-do-not-pull-in-clc-for-clover.patch
new file mode 100644
index 0000000000..1711e22585
--- /dev/null
+++ b/meta/recipes-graphics/mesa/files/0001-Revert-meson-do-not-pull-in-clc-for-clover.patch
@@ -0,0 +1,53 @@
+From 051f41beda540f0ae77b341db01a6de83c9e938a Mon Sep 17 00:00:00 2001
+From: Markus Volk <f_l_k@t-online.de>
+Date: Fri, 8 Mar 2024 15:53:11 +0100
+Subject: [PATCH] Revert "meson: do not pull in clc for clover"
+
+This reverts commit 815a6647eb1383e9dc704ffcc266d85f3b13338a.
+Upstream-Status: Inappropriate [https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/27783/diffs?commit_id=a976f2c9f0c07f9e06cc9affd9124b45bc60c2bd]
+
+Once the merge request above was added, it will only be possible to provide opencl spir-v with gallium-rusticl=true. This is not yet supported in the mesa recipe.
+For now reverting this commit allows to still use clover with opencl-spirv, which would otherwise be broken starting from mesa 24.0.2.
+
+After it was merged, this patch needs to be removed and rusticl support will be required
+
+Signed-off-by: Markus Volk <f_l_k@t-online.de>
+---
+ meson.build | 3 ++-
+ src/compiler/meson.build | 2 +-
+ 2 files changed, 3 insertions(+), 2 deletions(-)
+
+diff --git a/meson.build b/meson.build
+index 2db6185..741b5d1 100644
+--- a/meson.build
++++ b/meson.build
+@@ -813,6 +813,7 @@ if _opencl != 'disabled'
+ error('The Clover OpenCL state tracker requires rtti')
+ endif
+
++ with_clc = true
+ with_gallium_opencl = true
+ with_opencl_icd = _opencl == 'icd'
+ else
+@@ -837,7 +838,7 @@ if with_gallium_rusticl
+ endif
+
+ dep_clc = null_dep
+-if with_gallium_opencl or with_clc
++if with_clc
+ dep_clc = dependency('libclc')
+ endif
+
+diff --git a/src/compiler/meson.build b/src/compiler/meson.build
+index 8d73544..1dae56d 100644
+--- a/src/compiler/meson.build
++++ b/src/compiler/meson.build
+@@ -79,7 +79,7 @@ subdir('nir')
+
+ subdir('spirv')
+
+-if with_clc
++if with_opencl_spirv
+ subdir('clc')
+ endif
+ if with_gallium
diff --git a/meta/recipes-graphics/mesa/files/0001-drisw-fix-build-without-dri3.patch b/meta/recipes-graphics/mesa/files/0001-drisw-fix-build-without-dri3.patch
new file mode 100644
index 0000000000..ab16152090
--- /dev/null
+++ b/meta/recipes-graphics/mesa/files/0001-drisw-fix-build-without-dri3.patch
@@ -0,0 +1,58 @@
+From 4bd15a419e892da843489c374c58c5b29c40b5d6 Mon Sep 17 00:00:00 2001
+From: Romain Naour <romain.naour@smile.fr>
+Date: Tue, 6 Feb 2024 09:47:09 +0100
+Subject: [PATCH 1/2] drisw: fix build without dri3
+
+commit 1887368df41 ("glx/sw: check for modifier support in the kopper path")
+added dri3_priv.h header and dri3_check_multibuffer() function in drisw that
+can be build without dri3.
+
+ i686-buildroot-linux-gnu/bin/ld: src/glx/libglx.a.p/drisw_glx.c.o: in function `driswCreateScreenDriver':
+ drisw_glx.c:(.text.driswCreateScreenDriver+0x3a0): undefined reference to `dri3_check_multibuffer'
+ collect2: error: ld returned 1 exit status
+
+Add HAVE_DRI3 guard around dri3_priv.h header and the zink code using
+dri3_check_multibuffer().
+
+Fixes: 1887368df41 ("glx/sw: check for modifier support in the kopper path")
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/27478]
+Signed-off-by: Romain Naour <romain.naour@smile.fr>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/glx/drisw_glx.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/src/glx/drisw_glx.c b/src/glx/drisw_glx.c
+index 3d3f752..4b19e2d 100644
+--- a/src/glx/drisw_glx.c
++++ b/src/glx/drisw_glx.c
+@@ -32,7 +32,9 @@
+ #include <dlfcn.h>
+ #include "dri_common.h"
+ #include "drisw_priv.h"
++#ifdef HAVE_DRI3
+ #include "dri3_priv.h"
++#endif
+ #include <X11/extensions/shmproto.h>
+ #include <assert.h>
+ #include <vulkan/vulkan_core.h>
+@@ -995,6 +997,7 @@ driswCreateScreenDriver(int screen, struct glx_display *priv,
+ goto handle_error;
+ }
+
++#ifdef HAVE_DRI3
+ if (pdpyp->zink) {
+ bool err;
+ psc->has_multibuffer = dri3_check_multibuffer(priv->dpy, &err);
+@@ -1005,6 +1008,7 @@ driswCreateScreenDriver(int screen, struct glx_display *priv,
+ goto handle_error;
+ }
+ }
++#endif
+
+ glx_config_destroy_list(psc->base.configs);
+ psc->base.configs = configs;
+--
+2.44.0
+
diff --git a/meta/recipes-graphics/mesa/files/0001-futex.h-Define-__NR_futex-if-it-does-not-exist.patch b/meta/recipes-graphics/mesa/files/0001-futex.h-Define-__NR_futex-if-it-does-not-exist.patch
deleted file mode 100644
index 3b0bfa323b..0000000000
--- a/meta/recipes-graphics/mesa/files/0001-futex.h-Define-__NR_futex-if-it-does-not-exist.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-From 253b042d2bf10e9abfa9cc508e0782aefd834145 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 16 Oct 2020 11:03:47 -0700
-Subject: [PATCH] futex.h: Define __NR_futex if it does not exist
-
-__NR_futex is not defines by newer architectures e.g. arc, riscv32 as
-they only have 64bit variant of time_t. Glibc defines SYS_futex interface based on
-__NR_futex, since this is used in applications, such applications start
-to fail to build for these newer architectures. This patch defines a
-fallback to alias __NR_futex to __NR_futex_tim64 so SYS_futex keeps
-working
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/util/futex.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/src/util/futex.h b/src/util/futex.h
-index 43097f4..941b0ec 100644
---- a/src/util/futex.h
-+++ b/src/util/futex.h
-@@ -34,6 +34,10 @@
- #include <sys/syscall.h>
- #include <sys/time.h>
-
-+#if !defined(SYS_futex) && defined(SYS_futex_time64)
-+# define SYS_futex SYS_futex_time64
-+#endif
-+
- static inline long sys_futex(void *addr1, int op, int val1, const struct timespec *timeout, void *addr2, int val3)
- {
- return syscall(SYS_futex, addr1, op, val1, timeout, addr2, val3);
diff --git a/meta/recipes-graphics/mesa/files/0001-meson-misdetects-64bit-atomics-on-mips-clang.patch b/meta/recipes-graphics/mesa/files/0001-meson-misdetects-64bit-atomics-on-mips-clang.patch
index b08e4d86c2..baa98a0d46 100644
--- a/meta/recipes-graphics/mesa/files/0001-meson-misdetects-64bit-atomics-on-mips-clang.patch
+++ b/meta/recipes-graphics/mesa/files/0001-meson-misdetects-64bit-atomics-on-mips-clang.patch
@@ -1,11 +1,10 @@
-From d34bdbd80e5a1f309d2ba280cdc66ff0ee0e5c43 Mon Sep 17 00:00:00 2001
+From 02cc21800fe29f566add525e63f619c0536d6e7b Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 13 Jan 2020 15:23:47 -0800
Subject: [PATCH] meson misdetects 64bit atomics on mips/clang
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---
src/util/u_atomic.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch b/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch
index aea23d0ec2..7be7d81eeb 100644
--- a/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch
+++ b/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch
@@ -1,4 +1,4 @@
-From f9c597a2c517eb85c23cbeeb2e95c55794c74cda Mon Sep 17 00:00:00 2001
+From e8ec6b1cc5e401ba719095722d8b317d755ae613 Mon Sep 17 00:00:00 2001
From: Alistair Francis <alistair@alistair23.me>
Date: Thu, 14 Nov 2019 13:04:49 -0800
Subject: [PATCH] meson.build: check for all linux host_os combinations
@@ -14,30 +14,29 @@ Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
Signed-off-by: Fabio Berton <fabio.berton@ossystems.com.br>
Signed-off-by: Otavio Salvador <otavio@ossystems.com.br>
Signed-off-by: Alistair Francis <alistair@alistair23.me>
-
---
meson.build | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/meson.build b/meson.build
-index bca6b1f..70d06c0 100644
+index 133fd9a..817861e 100644
--- a/meson.build
+++ b/meson.build
-@@ -172,7 +172,7 @@ with_any_opengl = with_opengl or with_gles1 or with_gles2
+@@ -128,7 +128,7 @@ with_any_opengl = with_opengl or with_gles1 or with_gles2
# Only build shared_glapi if at least one OpenGL API is enabled
with_shared_glapi = with_shared_glapi and with_any_opengl
--system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'gnu/kfreebsd', 'dragonfly', 'linux', 'sunos'].contains(host_machine.system())
-+system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'dragonfly'].contains(host_machine.system()) or host_machine.system().startswith('linux')
+-system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'gnu/kfreebsd', 'dragonfly', 'linux', 'sunos', 'android', 'managarm'].contains(host_machine.system())
++system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'gnu/kfreebsd', 'dragonfly', 'linux', 'sunos', 'android', 'managarm'].contains(host_machine.system()) or host_machine.system().startswith('linux')
- dri_drivers = get_option('dri-drivers')
- if dri_drivers.length() != 0
-@@ -1074,7 +1074,7 @@ if cc.compiles('__uint128_t foo(void) { return 0; }',
+ gallium_drivers = get_option('gallium-drivers')
+ if gallium_drivers.contains('auto')
+@@ -997,7 +997,7 @@ if cc.has_function('fmemopen')
endif
# TODO: this is very incomplete
--if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku'].contains(host_machine.system())
-+if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku'].contains(host_machine.system()) or host_machine.system().startswith('linux')
+-if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku', 'android', 'managarm'].contains(host_machine.system())
++if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku', 'android', 'managarm'].contains(host_machine.system()) or host_machine.system().startswith('linux')
pre_args += '-D_GNU_SOURCE'
elif host_machine.system() == 'sunos'
pre_args += '-D__EXTENSIONS__'
diff --git a/meta/recipes-graphics/mesa/files/0001-util-format-Check-for-NEON-before-using-it.patch b/meta/recipes-graphics/mesa/files/0001-util-format-Check-for-NEON-before-using-it.patch
deleted file mode 100644
index 5c6165c281..0000000000
--- a/meta/recipes-graphics/mesa/files/0001-util-format-Check-for-NEON-before-using-it.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From fdb2face4eeac3c20eedcca7520f4e7014225fb4 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 2 Dec 2021 19:57:42 -0800
-Subject: [PATCH] util/format: Check for NEON before using it
-
-This fixes build on rpi0-w and any other machine which does not have
-neon unit and is not used as FPU unit
-
-Fixes errors e.g.
-
-In file included from ../mesa-21.3.0/src/util/format/u_format_unpack_neon.c:35:
-/mnt/b/yoe/master/build/tmp/work/arm1176jzfshf-vfp-yoe-linux-gnueabi/mesa/2_21.3.0-r0/recipe-sysroot-native/usr/lib/clang/13.0.1/include/arm_neon.h:32:2: error: "NEON support not enabled"
-
-Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/14032]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/util/format/u_format.c | 2 +-
- src/util/format/u_format_unpack_neon.c | 2 +-
- 2 files changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/util/format/u_format.c b/src/util/format/u_format.c
-index 36c5e52..f0a0097 100644
---- a/src/util/format/u_format.c
-+++ b/src/util/format/u_format.c
-@@ -1138,7 +1138,7 @@ static void
- util_format_unpack_table_init(void)
- {
- for (enum pipe_format format = PIPE_FORMAT_NONE; format < PIPE_FORMAT_COUNT; format++) {
--#if (defined(PIPE_ARCH_AARCH64) || defined(PIPE_ARCH_ARM)) && !defined(NO_FORMAT_ASM) && !defined(__SOFTFP__)
-+#if (defined(PIPE_ARCH_AARCH64) || (defined(__ARM_NEON) && defined(PIPE_ARCH_ARM))) && !defined(NO_FORMAT_ASM)
- const struct util_format_unpack_description *unpack = util_format_unpack_description_neon(format);
- if (unpack) {
- util_format_unpack_table[format] = unpack;
-diff --git a/src/util/format/u_format_unpack_neon.c b/src/util/format/u_format_unpack_neon.c
-index a4a5cb1..1e4f794 100644
---- a/src/util/format/u_format_unpack_neon.c
-+++ b/src/util/format/u_format_unpack_neon.c
-@@ -23,7 +23,7 @@
-
- #include <u_format.h>
-
--#if (defined(PIPE_ARCH_AARCH64) || defined(PIPE_ARCH_ARM)) && !defined(NO_FORMAT_ASM) && !defined(__SOFTFP__)
-+#if (defined(PIPE_ARCH_AARCH64) || (defined(__ARM_NEON) && defined(PIPE_ARCH_ARM))) && !defined(NO_FORMAT_ASM)
-
- /* armhf builds default to vfp, not neon, and refuses to compile neon intrinsics
- * unless you tell it "no really".
diff --git a/meta/recipes-graphics/mesa/files/0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch b/meta/recipes-graphics/mesa/files/0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch
new file mode 100644
index 0000000000..036a0b4945
--- /dev/null
+++ b/meta/recipes-graphics/mesa/files/0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch
@@ -0,0 +1,42 @@
+From 62495ebb977866c52d5bed8499a547c49f0d9bc1 Mon Sep 17 00:00:00 2001
+From: Romain Naour <romain.naour@smile.fr>
+Date: Tue, 6 Feb 2024 09:47:10 +0100
+Subject: [PATCH 2/2] glxext: don't try zink if not enabled in mesa
+
+Commit 7d9ea77b459 ("glx: add automatic zink fallback loading between hw and sw drivers")
+added an automatic zink fallback even when the zink gallium is not
+enabled at build time.
+
+It leads to unexpected error log while loading drisw driver and
+zink is not installed on the rootfs:
+
+ MESA-LOADER: failed to open zink: /usr/lib/dri/zink_dri.so
+
+Fixes: 7d9ea77b459 ("glx: add automatic zink fallback loading between hw and sw drivers")
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/27478]
+Signed-off-by: Romain Naour <romain.naour@smile.fr>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/glx/glxext.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/src/glx/glxext.c b/src/glx/glxext.c
+index 05c825a..7a06aa9 100644
+--- a/src/glx/glxext.c
++++ b/src/glx/glxext.c
+@@ -908,9 +908,11 @@ __glXInitialize(Display * dpy)
+ #endif /* HAVE_DRI3 */
+ if (!debug_get_bool_option("LIBGL_DRI2_DISABLE", false))
+ dpyPriv->dri2Display = dri2CreateDisplay(dpy);
++#if defined(HAVE_ZINK)
+ if (!dpyPriv->dri3Display && !dpyPriv->dri2Display)
+ try_zink = !debug_get_bool_option("LIBGL_KOPPER_DISABLE", false) &&
+ !getenv("GALLIUM_DRIVER");
++#endif /* HAVE_ZINK */
+ }
+ #endif /* GLX_USE_DRM */
+ if (glx_direct)
+--
+2.44.0
+
diff --git a/meta/recipes-graphics/mesa/files/0002-meson.build-make-TLS-ELF-optional.patch b/meta/recipes-graphics/mesa/files/0002-meson.build-make-TLS-ELF-optional.patch
deleted file mode 100644
index af11baee86..0000000000
--- a/meta/recipes-graphics/mesa/files/0002-meson.build-make-TLS-ELF-optional.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-From bf41fa026ae3d378e62fd83d03a6f5933b52ca04 Mon Sep 17 00:00:00 2001
-From: Alistair Francis <alistair@alistair23.me>
-Date: Thu, 14 Nov 2019 13:08:31 -0800
-Subject: [PATCH] meson.build: make TLS ELF optional
-
-USE_ELF_TLS has replaced GLX_USE_TLS so this patch is the original "make
-TLS GLX optional again" patch updated to the latest mesa.
-
-For details, see:
-https://gitlab.freedesktop.org/mesa/mesa/-/issues/966
-
-This prevents runtime segfault on musl:
-
-Traceback (most recent call last):
- File "/home/pokybuild/yocto-worker/musl-qemux86/build/meta/lib/oeqa/core/decorator/__init__.py", line 36, in wrapped_f
- return func(*args, **kwargs)
- File "/home/pokybuild/yocto-worker/musl-qemux86/build/meta/lib/oeqa/runtime/cases/parselogs.py", line 378, in test_parselogs
- self.assertEqual(errcount, 0, msg=self.msg)
-AssertionError: 1 != 0 : Log: /home/pokybuild/yocto-worker/musl-qemux86/build/build/tmp/work/qemux86-poky-linux-musl/core-image-sato-sdk/1.0-r0/target_logs/Xorg.0.log
-
-Upstream-Status: Inappropriate [configuration]
----
- meson.build | 7 +++++--
- meson_options.txt | 6 ++++++
- 2 files changed, 11 insertions(+), 2 deletions(-)
-
-diff --git a/meson.build b/meson.build
-index 70d06c0..1441611 100644
---- a/meson.build
-+++ b/meson.build
-@@ -490,8 +490,11 @@ foreach platform : _platforms
- pre_args += '-DHAVE_@0@_PLATFORM'.format(platform.to_upper())
- endforeach
-
--use_elf_tls = true
--pre_args += '-DUSE_ELF_TLS'
-+use_elf_tls = false
-+if get_option('elf-tls')
-+ use_elf_tls = true
-+ pre_args += '-DUSE_ELF_TLS'
-+endif
-
- if with_platform_android and get_option('platform-sdk-version') >= 29
- # By default the NDK compiler, at least, emits emutls references instead of
-diff --git a/meson_options.txt b/meson_options.txt
-index 1f6ef38..99cc5cb 100644
---- a/meson_options.txt
-+++ b/meson_options.txt
-@@ -440,6 +440,12 @@ option(
- value : true,
- description : 'Enable direct rendering in GLX and EGL for DRI',
- )
-+option(
-+ 'elf-tls',
-+ type : 'boolean',
-+ value : true,
-+ description : 'Enable TLS support in ELF',
-+)
- option('egl-lib-suffix',
- type : 'string',
- value : '',
diff --git a/meta/recipes-graphics/mesa/libglu_9.0.2.bb b/meta/recipes-graphics/mesa/libglu_9.0.2.bb
deleted file mode 100644
index 64fa82e5a8..0000000000
--- a/meta/recipes-graphics/mesa/libglu_9.0.2.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "The OpenGL utility toolkit"
-DESCRIPTION = "GLU is a utility toolkit used with OpenGL implementations"
-
-HOMEPAGE = "http://mesa3d.org"
-BUGTRACKER = "https://bugs.freedesktop.org"
-SECTION = "x11"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://include/GL/glu.h;endline=29;md5=6b79c570f644363b356456e7d44471d9 \
- file://src/libtess/tess.c;endline=29;md5=6b79c570f644363b356456e7d44471d9"
-
-# Epoch as this used to be part of mesa
-PE = "2"
-
-SRC_URI = "https://mesa.freedesktop.org/archive/glu/glu-${PV}.tar.gz"
-
-SRC_URI[sha256sum] = "24effdfb952453cc00e275e1c82ca9787506aba0282145fff054498e60e19a65"
-
-S = "${WORKDIR}/glu-${PV}"
-
-DEPENDS = "virtual/libgl"
-
-inherit autotools pkgconfig features_check
-
-# Requires libGL.so which is provided by mesa when x11 in DISTRO_FEATURES
-REQUIRED_DISTRO_FEATURES = "x11 opengl"
-
-# Remove the mesa-glu dependency in mesa-glu-dev, as mesa-glu is empty
-RDEPENDS:${PN}-dev = ""
diff --git a/meta/recipes-graphics/mesa/libglu_9.0.3.bb b/meta/recipes-graphics/mesa/libglu_9.0.3.bb
new file mode 100644
index 0000000000..8151727c52
--- /dev/null
+++ b/meta/recipes-graphics/mesa/libglu_9.0.3.bb
@@ -0,0 +1,31 @@
+SUMMARY = "The OpenGL utility toolkit"
+DESCRIPTION = "GLU is a utility toolkit used with OpenGL implementations"
+
+HOMEPAGE = "http://mesa3d.org"
+BUGTRACKER = "https://bugs.freedesktop.org"
+SECTION = "x11"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://include/GL/glu.h;endline=29;md5=6b79c570f644363b356456e7d44471d9 \
+ file://src/libtess/tess.c;endline=29;md5=6b79c570f644363b356456e7d44471d9"
+
+# Epoch as this used to be part of mesa
+PE = "2"
+
+SRC_URI = "https://mesa.freedesktop.org/archive/glu/glu-${PV}.tar.xz \
+ "
+
+SRC_URI[sha256sum] = "bd43fe12f374b1192eb15fe20e45ff456b9bc26ab57f0eee919f96ca0f8a330f"
+
+S = "${WORKDIR}/glu-${PV}"
+
+DEPENDS = "virtual/libgl"
+
+inherit meson pkgconfig features_check
+
+EXTRA_OEMESON = "-Dgl_provider=gl"
+
+# Requires libGL.so which is provided by mesa when x11 in DISTRO_FEATURES
+REQUIRED_DISTRO_FEATURES = "x11 opengl"
+
+# Remove the mesa-glu dependency in mesa-glu-dev, as mesa-glu is empty
+DEV_PKG_DEPENDENCY = ""
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch b/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch
deleted file mode 100644
index e31827d4a3..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch
+++ /dev/null
@@ -1,382 +0,0 @@
-From 779438770bedf3d53e6ad8f7cd6889b7f50daf3b Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Wed, 9 Jul 2014 14:23:41 +0200
-Subject: [PATCH] configure: Allow to disable demos which require GLEW or GLU
-
-* in some systems without X11 support we don't have GLEW, but
- mesa-demos are still useful
-
-This isn't currently appropriate for upstream submission as glew has
-been replaced with glad there; glu situation would need to be re-assesed
-when upstream makes a new release, requested here:
-https://gitlab.freedesktop.org/mesa/demos/-/issues/22
-
-Upstream-Status: Inappropriate [see above]
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-
-Port to 8.3.0
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
----
- configure.ac | 49 ++++++++++++++++++++---------
- src/Makefile.am | 18 ++++++++---
- src/demos/Makefile.am | 73 ++++++++++++++++++++++++-------------------
- src/egl/Makefile.am | 8 +++--
- src/egl/opengles1/Makefile.am | 10 ++++--
- src/egl/opengles2/Makefile.am | 29 ++++++++---------
- 6 files changed, 117 insertions(+), 70 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 0525b09..28834cd 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -93,25 +93,44 @@ AC_EGREP_HEADER([glutInitContextProfile],
- [AC_DEFINE(HAVE_FREEGLUT)],
- [])
-
--dnl Check for GLEW
--PKG_CHECK_MODULES(GLEW, [glew >= 1.5.4])
--DEMO_CFLAGS="$DEMO_CFLAGS $GLEW_CFLAGS"
--DEMO_LIBS="$DEMO_LIBS $GLEW_LIBS"
-+AC_ARG_ENABLE([glew],
-+ [AS_HELP_STRING([--enable-glew],
-+ [build demos which require glew @<:@default=yes@:>@])],
-+ [enable_glew="$enableval"],
-+ [enable_glew=yes]
-+)
-+
-+if test "x$enable_glew" = xyes; then
-+ dnl Check for GLEW
-+ PKG_CHECK_MODULES(GLEW, [glew >= 1.5.4], [glew_enabled=yes], [glew_enabled=no])
-+ DEMO_CFLAGS="$DEMO_CFLAGS $GLEW_CFLAGS"
-+ DEMO_LIBS="$DEMO_LIBS $GLEW_LIBS"
-+fi
-
- # LIBS was set by AC_CHECK_LIB above
- LIBS=""
-
--PKG_CHECK_MODULES(GLU, [glu], [],
-- [AC_CHECK_HEADER([GL/glu.h],
-- [],
-- AC_MSG_ERROR([GLU not found]))
-- AC_CHECK_LIB([GLU],
-- [gluBeginCurve],
-- [GLU_LIBS=-lGLU],
-- AC_MSG_ERROR([GLU required])) ])
-+AC_ARG_ENABLE([glu],
-+ [AS_HELP_STRING([--enable-glu],
-+ [build demos which require glu @<:@default=yes@:>@])],
-+ [enable_glu="$enableval"],
-+ [enable_glu=yes]
-+)
-
--DEMO_CFLAGS="$DEMO_CFLAGS $GLU_CFLAGS"
--DEMO_LIBS="$DEMO_LIBS $GLU_LIBS"
-+if test "x$enable_glu" = xyes; then
-+ PKG_CHECK_MODULES(GLU, [glu], [glu_enabled=yes],
-+ [AC_CHECK_HEADER([GL/glu.h],
-+ [],
-+ AC_MSG_ERROR([GLU not found]))
-+ AC_CHECK_LIB([GLU],
-+ [gluBeginCurve],
-+ [GLU_LIBS=-lGLU
-+ glu_enabled=yes],
-+ AC_MSG_ERROR([GLU required])) ])
-+
-+ DEMO_CFLAGS="$DEMO_CFLAGS $GLU_CFLAGS"
-+ DEMO_LIBS="$DEMO_LIBS $GLU_LIBS"
-+fi
-
- AC_ARG_ENABLE([egl],
- [AS_HELP_STRING([--enable-egl],
-@@ -304,6 +323,8 @@ AC_SUBST([WAYLAND_CFLAGS])
- AC_SUBST([WAYLAND_LIBS])
-
-
-+AM_CONDITIONAL(HAVE_GLU, test "x$glu_enabled" = "xyes")
-+AM_CONDITIONAL(HAVE_GLEW, test "x$glew_enabled" = "xyes")
- AM_CONDITIONAL(HAVE_EGL, test "x$egl_enabled" = "xyes")
- AM_CONDITIONAL(HAVE_GLESV1, test "x$glesv1_enabled" = "xyes")
- AM_CONDITIONAL(HAVE_GLESV2, test "x$glesv2_enabled" = "xyes")
-diff --git a/src/Makefile.am b/src/Makefile.am
-index 1647d64..8b89dee 100644
---- a/src/Makefile.am
-+++ b/src/Makefile.am
-@@ -22,15 +22,19 @@
- # Authors:
- # Eric Anholt <eric@anholt.net>
-
-+if HAVE_GLEW
-+UTIL = util
-+endif
-+
- SUBDIRS = \
-- util \
-+ $(UTIL) \
- data \
- demos \
- egl \
- fp \
- fpglsl \
- glsl \
-- gs \
-+ gs \
- objviewer \
- osdemos \
- perf \
-@@ -40,8 +44,12 @@ SUBDIRS = \
- slang \
- tests \
- tools \
-- trivial \
-- vp \
-- vpglsl \
- wgl \
- xdemos
-+
-+if HAVE_GLEW
-+SUBDIRS += \
-+ vp \
-+ vpglsl \
-+ trivial
-+endif
-diff --git a/src/demos/Makefile.am b/src/demos/Makefile.am
-index 41603fa..ab1e3ab 100644
---- a/src/demos/Makefile.am
-+++ b/src/demos/Makefile.am
-@@ -30,91 +30,100 @@ AM_LDFLAGS = \
- $(DEMO_LIBS) \
- $(GLUT_LIBS)
-
-+bin_PROGRAMS =
-+
- if HAVE_GLUT
--bin_PROGRAMS = \
-+if HAVE_GLEW
-+bin_PROGRAMS += \
- arbfplight \
- arbfslight \
- arbocclude \
- arbocclude2 \
-- bounce \
-- clearspd \
- copypix \
- cubemap \
- cuberender \
- dinoshade \
-- dissolve \
-- drawpix \
- engine \
- fbo_firecube \
- fbotexture \
-- fire \
- fogcoord \
- fplight \
- fslight \
-+ gloss \
-+ isosurf \
-+ multiarb \
-+ paltex \
-+ pointblast \
-+ projtex \
-+ shadowtex \
-+ spriteblast \
-+ stex3d \
-+ textures \
-+ vao_demo \
-+ winpos
-+
-+copypix_LDADD = ../util/libutil.la
-+cubemap_LDADD = ../util/libutil.la
-+cuberender_LDADD = ../util/libutil.la
-+engine_LDADD = ../util/libutil.la
-+fbo_firecube_LDADD = ../util/libutil.la
-+gloss_LDADD = ../util/libutil.la
-+isosurf_LDADD = ../util/libutil.la
-+multiarb_LDADD = ../util/libutil.la
-+projtex_LDADD = ../util/libutil.la
-+textures_LDADD = ../util/libutil.la
-+winpos_LDADD = ../util/libutil.la
-+endif
-+
-+if HAVE_GLU
-+bin_PROGRAMS += \
-+ bounce \
-+ clearspd \
-+ dissolve \
-+ drawpix \
-+ fire \
- gamma \
- gearbox \
- gears \
- geartrain \
- glinfo \
-- gloss \
- gltestperf \
- ipers \
-- isosurf \
- lodbias \
- morph3d \
-- multiarb \
-- paltex \
- pixeltest \
-- pointblast \
-- projtex \
- ray \
- readpix \
- reflect \
- renormal \
-- shadowtex \
- singlebuffer \
- spectex \
-- spriteblast \
-- stex3d \
- teapot \
- terrain \
- tessdemo \
- texcyl \
- texenv \
-- textures \
- trispd \
- tunnel2 \
-- tunnel \
-- vao_demo \
-- winpos
--endif
-+ tunnel
-
- tunnel_SOURCES = \
- tunnel.c \
- tunneldat.h
-
--copypix_LDADD = ../util/libutil.la
--cubemap_LDADD = ../util/libutil.la
--cuberender_LDADD = ../util/libutil.la
--drawpix_LDADD = ../util/libutil.la
- dissolve_LDADD = ../util/libutil.la
--engine_LDADD = ../util/libutil.la
--fbo_firecube_LDADD = ../util/libutil.la
-+drawpix_LDADD = ../util/libutil.la
- fire_LDADD = ../util/libutil.la
--gloss_LDADD = ../util/libutil.la
- ipers_LDADD = ../util/libutil.la
--isosurf_LDADD = ../util/libutil.la
- lodbias_LDADD = ../util/libutil.la
--multiarb_LDADD = ../util/libutil.la
--projtex_LDADD = ../util/libutil.la
- readpix_LDADD = ../util/libutil.la
- reflect_LDADD = ../util/libutil.la
- teapot_LDADD = ../util/libutil.la
- texcyl_LDADD = ../util/libutil.la
--textures_LDADD = ../util/libutil.la
- tunnel_LDADD = ../util/libutil.la
- tunnel2_LDADD = ../util/libutil.la
--winpos_LDADD = ../util/libutil.la
-+endif
-+endif
-
- EXTRA_DIST = \
- README
-diff --git a/src/egl/Makefile.am b/src/egl/Makefile.am
-index d64a49e..4fe1ca8 100644
---- a/src/egl/Makefile.am
-+++ b/src/egl/Makefile.am
-@@ -24,8 +24,12 @@
-
- SUBDIRS = \
- eglut \
-- opengl \
-- openvg \
- opengles1 \
- opengles2 \
- oes_vg
-+
-+if HAVE_GLU
-+SUBDIRS += \
-+ opengl \
-+ openvg
-+endif
-diff --git a/src/egl/opengles1/Makefile.am b/src/egl/opengles1/Makefile.am
-index fa397c2..21853e8 100644
---- a/src/egl/opengles1/Makefile.am
-+++ b/src/egl/opengles1/Makefile.am
-@@ -36,9 +36,12 @@ AM_LDFLAGS = \
- $(EGL_LIBS) \
- -lm
-
-+noinst_PROGRAMS =
-+
- if HAVE_EGL
- if HAVE_GLESV1
--noinst_PROGRAMS = \
-+if HAVE_X11
-+bin_PROGRAMS = \
- bindtex \
- clear \
- drawtex_x11 \
-@@ -52,8 +55,6 @@ noinst_PROGRAMS = \
- torus_x11 \
- tri_x11 \
- two_win
--endif
--endif
-
- bindtex_LDADD = $(X11_LIBS)
- es1_info_LDADD = $(X11_LIBS)
-@@ -76,3 +77,6 @@ drawtex_x11_LDADD = ../eglut/libeglut_x11.la
- gears_x11_LDADD = ../eglut/libeglut_x11.la
- torus_x11_LDADD = ../eglut/libeglut_x11.la
- tri_x11_LDADD = ../eglut/libeglut_x11.la
-+endif
-+endif
-+endif
-diff --git a/src/egl/opengles2/Makefile.am b/src/egl/opengles2/Makefile.am
-index b80ba50..17f8d49 100644
---- a/src/egl/opengles2/Makefile.am
-+++ b/src/egl/opengles2/Makefile.am
-@@ -33,27 +33,28 @@ AM_LDFLAGS = \
- $(EGL_LIBS) \
- -lm
-
-+bin_PROGRAMS =
-+
- if HAVE_EGL
- if HAVE_GLESV2
--bin_PROGRAMS =
--if HAVE_X11
--bin_PROGRAMS += \
-- es2_info \
-- es2gears_x11 \
-- es2tri
--endif
- if HAVE_WAYLAND
- bin_PROGRAMS += es2gears_wayland
--endif
--endif
-+
-+es2gears_wayland_SOURCES = es2gears.c
-+es2gears_wayland_LDADD = ../eglut/libeglut_wayland.la
- endif
-
--es2_info_LDADD = $(X11_LIBS)
--es2tri_LDADD = $(X11_LIBS)
-+if HAVE_X11
-+bin_PROGRAMS += \
-+ es2tri \
-+ es2_info \
-+ es2gears_x11
-
-+es2_info_LDADD = $(X11_LIBS)
- es2gears_x11_SOURCES = es2gears.c
--
- es2gears_x11_LDADD = ../eglut/libeglut_x11.la
-+es2tri_LDADD = $(X11_LIBS)
-+endif
-+endif
-+endif
-
--es2gears_wayland_SOURCES = es2gears.c
--es2gears_wayland_LDADD = ../eglut/libeglut_wayland.la
---
-2.1.4
-
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0007-Install-few-more-test-programs.patch b/meta/recipes-graphics/mesa/mesa-demos/0007-Install-few-more-test-programs.patch
deleted file mode 100644
index b27d9eafa5..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos/0007-Install-few-more-test-programs.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-From 2e0367a941445a862ab99c54ec85d1357d0f73c0 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Thu, 10 Jul 2014 14:30:52 +0200
-Subject: [PATCH] Install few more test programs
-
-Upstream-Status: Pending
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
----
- src/egl/opengl/Makefile.am | 3 +--
- src/egl/openvg/Makefile.am | 2 +-
- 2 files changed, 2 insertions(+), 3 deletions(-)
-
-diff --git a/src/egl/opengl/Makefile.am b/src/egl/opengl/Makefile.am
-index 6d184ff6..ab09d028 100644
---- a/src/egl/opengl/Makefile.am
-+++ b/src/egl/opengl/Makefile.am
-@@ -57,8 +57,7 @@ endif
-
- if HAVE_EGL
- bin_PROGRAMS = \
-- eglinfo
--noinst_PROGRAMS = \
-+ eglinfo \
- peglgears \
- $(EGL_DRM_DEMOS) \
- $(EGL_X11_DEMOS) \
-diff --git a/src/egl/openvg/Makefile.am b/src/egl/openvg/Makefile.am
-index b0f1212f..5fd1cf83 100644
---- a/src/egl/openvg/Makefile.am
-+++ b/src/egl/openvg/Makefile.am
-@@ -49,7 +49,7 @@ endif
-
- if HAVE_EGL
- if HAVE_VG
--noinst_PROGRAMS = \
-+bin_PROGRAMS = \
- $(EGL_X11_DEMOS)
- endif
- endif
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch b/meta/recipes-graphics/mesa/mesa-demos/0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch
deleted file mode 100644
index a6d168175f..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos/0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch
+++ /dev/null
@@ -1,99 +0,0 @@
-From 894add34c2b5e6b4ccc78996bf681d7ec7bc9e36 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Thu, 10 Jul 2014 14:29:27 +0200
-Subject: [PATCH] glsl, perf: Add few missing .glsl, .vert, .frag files to
- EXTRA_DATA
-
-Upstream-Status: Pending
-
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-
----
- src/fpglsl/Makefile.am | 2 ++
- src/glsl/Makefile.am | 10 ++++++++--
- src/perf/Makefile.am | 6 ++++++
- src/vpglsl/Makefile.am | 1 +
- 4 files changed, 17 insertions(+), 2 deletions(-)
-
-diff --git a/src/fpglsl/Makefile.am b/src/fpglsl/Makefile.am
-index 47c1039f..fd43c919 100644
---- a/src/fpglsl/Makefile.am
-+++ b/src/fpglsl/Makefile.am
-@@ -39,10 +39,12 @@ noinst_PROGRAMS = \
- endif
-
- EXTRA_DIST = \
-+ depth-read.glsl \
- dowhile2.glsl \
- dowhile.glsl \
- forbreak.glsl \
- for.glsl \
-+ infinite-loop.glsl \
- mov.glsl \
- mov-imm.glsl \
- simpleif.glsl \
-diff --git a/src/glsl/Makefile.am b/src/glsl/Makefile.am
-index 4faa8dbf..079a29d8 100644
---- a/src/glsl/Makefile.am
-+++ b/src/glsl/Makefile.am
-@@ -37,7 +37,7 @@ AM_LDFLAGS = \
- if HAVE_GLUT
- bin_PROGRAMS = \
- array \
-- bezier \
-+ bezier \
- bitmap \
- brick \
- bump \
-@@ -123,12 +123,16 @@ EXTRA_DIST = \
- CH06-brick.vert \
- CH11-bumpmap.frag \
- CH11-bumpmap.vert \
-+ CH11-bumpmaptex.frag \
- CH11-toyball.frag \
- CH11-toyball.vert \
- CH18-mandel.frag \
- CH18-mandel.vert \
-- bezier.geom \
-+ bezier.geom \
- brick.shtest \
-+ blinking-teapot.frag \
-+ blinking-teapot.vert \
-+ convolution.frag \
- convolution.vert \
- cubemap.frag \
- mandelbrot.shtest \
-@@ -138,5 +142,7 @@ EXTRA_DIST = \
- reflect.vert \
- shadowtex.frag \
- simple.vert \
-+ simplex-noise.glsl \
- skinning.frag \
-+ skinning.vert \
- toyball.shtest
-diff --git a/src/perf/Makefile.am b/src/perf/Makefile.am
-index f0031fea..60069396 100644
---- a/src/perf/Makefile.am
-+++ b/src/perf/Makefile.am
-@@ -59,3 +59,9 @@ endif
-
- glslstateschange_LDADD = libperf.la ../util/libutil.la
- glsl_compile_time_LDADD = ../util/libutil.la
-+
-+EXTRA_DIST = \
-+ glslstateschange1.frag \
-+ glslstateschange1.vert \
-+ glslstateschange2.frag \
-+ glslstateschange2.vert
-diff --git a/src/vpglsl/Makefile.am b/src/vpglsl/Makefile.am
-index 4a85ed40..48b08f48 100644
---- a/src/vpglsl/Makefile.am
-+++ b/src/vpglsl/Makefile.am
-@@ -44,6 +44,7 @@ EXTRA_DIST = \
- func2.glsl \
- ifelse.glsl \
- if.glsl \
-+ infinite-loop.glsl \
- mov.glsl \
- nestedifs.glsl \
- nestedswizzle.glsl \
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch b/meta/recipes-graphics/mesa/mesa-demos/0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch
deleted file mode 100644
index 8a98ba60d1..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos/0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch
+++ /dev/null
@@ -1,71 +0,0 @@
-From 477ab6d90a17d8e4d3935be6ce8b8e154db0e3e5 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Thu, 10 Jul 2014 14:48:12 +0200
-Subject: [PATCH] glsl, perf: Install .glsl, .vert, .frag files
-
-Upstream-Status: Pending
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
-
----
- src/fpglsl/Makefile.am | 3 ++-
- src/glsl/Makefile.am | 3 ++-
- src/perf/Makefile.am | 3 ++-
- src/vpglsl/Makefile.am | 3 ++-
- 4 files changed, 8 insertions(+), 4 deletions(-)
-
-diff --git a/src/fpglsl/Makefile.am b/src/fpglsl/Makefile.am
-index fd43c919..2bf51de4 100644
---- a/src/fpglsl/Makefile.am
-+++ b/src/fpglsl/Makefile.am
-@@ -38,7 +38,8 @@ noinst_PROGRAMS = \
- fp-tri
- endif
-
--EXTRA_DIST = \
-+demosdatadir=$(datadir)/$(PACKAGE)/
-+dist_demosdata_DATA= \
- depth-read.glsl \
- dowhile2.glsl \
- dowhile.glsl \
-diff --git a/src/glsl/Makefile.am b/src/glsl/Makefile.am
-index 079a29d8..f66ec299 100644
---- a/src/glsl/Makefile.am
-+++ b/src/glsl/Makefile.am
-@@ -118,7 +118,8 @@ vert_or_frag_only_LDADD = ../util/libutil.la
- vert_tex_LDADD = ../util/libutil.la
- vsraytrace_LDADD = ../util/libutil.la
-
--EXTRA_DIST = \
-+demosdatadir=$(datadir)/$(PACKAGE)/
-+dist_demosdata_DATA= \
- CH06-brick.frag \
- CH06-brick.vert \
- CH11-bumpmap.frag \
-diff --git a/src/perf/Makefile.am b/src/perf/Makefile.am
-index 60069396..469bdf45 100644
---- a/src/perf/Makefile.am
-+++ b/src/perf/Makefile.am
-@@ -60,7 +60,8 @@ endif
- glslstateschange_LDADD = libperf.la ../util/libutil.la
- glsl_compile_time_LDADD = ../util/libutil.la
-
--EXTRA_DIST = \
-+demosdatadir=$(datadir)/$(PACKAGE)/
-+dist_demosdata_DATA= \
- glslstateschange1.frag \
- glslstateschange1.vert \
- glslstateschange2.frag \
-diff --git a/src/vpglsl/Makefile.am b/src/vpglsl/Makefile.am
-index 48b08f48..55268675 100644
---- a/src/vpglsl/Makefile.am
-+++ b/src/vpglsl/Makefile.am
-@@ -38,7 +38,8 @@ noinst_PROGRAMS = \
- vp-tris
- endif
-
--EXTRA_DIST = \
-+demosdatadir=$(datadir)/$(PACKAGE)/
-+dist_demosdata_DATA= \
- for.glsl \
- func.glsl \
- func2.glsl \
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch b/meta/recipes-graphics/mesa/mesa-demos/0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch
deleted file mode 100644
index c687647407..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos/0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 3aa84c47e88a4c38446ce1323abf6f2c77389104 Mon Sep 17 00:00:00 2001
-From: Prabhu <prabhu.sundararaj@freescale.com>
-Date: Mon, 16 Nov 2015 17:09:32 -0600
-Subject: [PATCH] mesa-demos: OpenVG demos with single frame need eglSwapBuffer
-
-sp and text demos rendering single frame. to display the
-single frame rendered needed a eglSwapBuffer to diplay to window.
-Hence added eglutPostRedisplay to display the frame
-
-Upstream-Status: Pending
-
-Signed-off-by: Prabhu <prabhu.sundararaj@freescale.com>
----
- src/egl/openvg/sp.c | 1 +
- src/egl/openvg/text.c | 1 +
- 2 files changed, 2 insertions(+)
-
-diff --git a/src/egl/openvg/sp.c b/src/egl/openvg/sp.c
-index a20c0a3..468e91e 100644
---- a/src/egl/openvg/sp.c
-+++ b/src/egl/openvg/sp.c
-@@ -500,6 +500,7 @@ draw(void)
- }
-
- vgFlush();
-+ eglutPostRedisplay();
- }
-
-
-diff --git a/src/egl/openvg/text.c b/src/egl/openvg/text.c
-index f5c6de8..492581c 100644
---- a/src/egl/openvg/text.c
-+++ b/src/egl/openvg/text.c
-@@ -360,6 +360,7 @@ display(void)
- {
- vgClear(0, 0, width, height);
- glyph_string_draw(10.0, 10.0);
-+ eglutPostRedisplay();
- }
-
-
---
-2.5.1
-
diff --git a/meta/recipes-graphics/mesa/mesa-demos_8.4.0.bb b/meta/recipes-graphics/mesa/mesa-demos_8.4.0.bb
deleted file mode 100644
index 01e5b35d0e..0000000000
--- a/meta/recipes-graphics/mesa/mesa-demos_8.4.0.bb
+++ /dev/null
@@ -1,57 +0,0 @@
-SUMMARY = "Mesa demo applications"
-DESCRIPTION = "This package includes the demonstration application, such as glxgears. \
-These applications can be used for Mesa validation and benchmarking."
-HOMEPAGE = "http://mesa3d.org"
-BUGTRACKER = "https://bugs.freedesktop.org"
-SECTION = "x11"
-
-LICENSE = "MIT & PD"
-LIC_FILES_CHKSUM = "file://src/xdemos/glxgears.c;beginline=1;endline=20;md5=914225785450eff644a86c871d3ae00e \
- file://src/xdemos/glxdemo.c;beginline=1;endline=8;md5=b01d5ab1aee94d35b7efaa2ef48e1a06"
-
-SRC_URI = "https://mesa.freedesktop.org/archive/demos/${BPN}-${PV}.tar.bz2 \
- file://0001-mesa-demos-Add-missing-data-files.patch \
- file://0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch \
- file://0004-Use-DEMOS_DATA_DIR-to-locate-data-files.patch \
- file://0007-Install-few-more-test-programs.patch \
- file://0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch \
- file://0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch \
- file://0012-mesa-demos-OpenVG-demos-with-single-frame-need-eglSw.patch \
- "
-SRC_URI[md5sum] = "6b65a02622765522176d00f553086fa3"
-SRC_URI[sha256sum] = "01e99c94a0184e63e796728af89bfac559795fb2a0d6f506fa900455ca5fff7d"
-
-inherit autotools pkgconfig features_check
-# depends on virtual/egl, virtual/libgl ...
-REQUIRED_DISTRO_FEATURES = "opengl x11"
-
-PACKAGECONFIG ?= "drm osmesa freetype2 gbm egl gles1 gles2 \
- x11 glew glu"
-
-# The Wayland code doesn't work with Wayland 1.0, so disable it for now
-#${@bb.utils.filter('DISTRO_FEATURES', 'wayland', d)}"
-
-EXTRA_OECONF = "--with-system-data-files"
-
-PACKAGECONFIG[drm] = "--enable-libdrm,--disable-libdrm,libdrm"
-PACKAGECONFIG[egl] = "--enable-egl,--disable-egl,virtual/egl"
-PACKAGECONFIG[freetype2] = "--enable-freetype2,--disable-freetype2,freetype"
-PACKAGECONFIG[gbm] = "--enable-gbm,--disable-gbm,virtual/libgl"
-PACKAGECONFIG[gles1] = "--enable-gles1,--disable-gles1,virtual/libgles1"
-PACKAGECONFIG[gles2] = "--enable-gles2,--disable-gles2,virtual/libgles2"
-PACKAGECONFIG[glut] = "--with-glut=${STAGING_EXECPREFIXDIR},--without-glut,freeglut"
-PACKAGECONFIG[osmesa] = "--enable-osmesa,--disable-osmesa,"
-PACKAGECONFIG[vg] = "--enable-vg,--disable-vg,virtual/libopenvg"
-PACKAGECONFIG[wayland] = "--enable-wayland,--disable-wayland,virtual/libgl wayland"
-PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,virtual/libx11"
-PACKAGECONFIG[glew] = "--enable-glew,--disable-glew,glew"
-PACKAGECONFIG[glu] = "--enable-glu,--disable-glu,virtual/libgl"
-
-do_install:append() {
- # it can be completely empty when all PACKAGECONFIG options are disabled
- rmdir --ignore-fail-on-non-empty ${D}${bindir}
-
- if [ -f ${D}${bindir}/clear ]; then
- mv ${D}${bindir}/clear ${D}${bindir}/clear.mesa-demos
- fi
-}
diff --git a/meta/recipes-graphics/mesa/mesa-demos_8.5.0.bb b/meta/recipes-graphics/mesa/mesa-demos_8.5.0.bb
new file mode 100644
index 0000000000..12f41d75a5
--- /dev/null
+++ b/meta/recipes-graphics/mesa/mesa-demos_8.5.0.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Mesa demo applications"
+DESCRIPTION = "This package includes the demonstration application, such as glxgears. \
+These applications can be used for Mesa validation and benchmarking."
+HOMEPAGE = "http://mesa3d.org"
+BUGTRACKER = "https://bugs.freedesktop.org"
+SECTION = "x11"
+
+LICENSE = "MIT & PD"
+LIC_FILES_CHKSUM = "file://src/xdemos/glxgears.c;beginline=1;endline=20;md5=914225785450eff644a86c871d3ae00e \
+ file://src/xdemos/glxdemo.c;beginline=1;endline=8;md5=b01d5ab1aee94d35b7efaa2ef48e1a06"
+
+SRC_URI = "https://mesa.freedesktop.org/archive/demos/${PV}/${BPN}-${PV}.tar.bz2 \
+ file://0001-mesa-demos-Add-missing-data-files.patch \
+ file://0004-Use-DEMOS_DATA_DIR-to-locate-data-files.patch \
+ "
+SRC_URI[sha256sum] = "cea2df0a80f09a30f635c4eb1a672bf90c5ddee0b8e77f4d70041668ef71aac1"
+
+inherit meson pkgconfig features_check
+# depends on virtual/egl, virtual/libgl ...
+REQUIRED_DISTRO_FEATURES = "opengl x11"
+
+EXTRA_OEMESON = "-Dwith-system-data-files=true"
+
+PACKAGECONFIG ?= "drm egl gles1 gles2 \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'x11 wayland', d)}"
+
+PACKAGECONFIG[drm] = "-Dlibdrm=enabled,-Dlibdrm=disabled,libdrm"
+PACKAGECONFIG[egl] = "-Degl=enabled,-Degl=disabled,virtual/egl"
+PACKAGECONFIG[gles1] = "-Dgles1=enabled,-Dgles1=disabled,virtual/libgles1"
+PACKAGECONFIG[gles2] = "-Dgles2=enabled,-Dgles2=disabled,virtual/libgles2"
+PACKAGECONFIG[glut] = "-Dwith-glut=${STAGING_EXECPREFIXDIR},,freeglut"
+PACKAGECONFIG[osmesa] = "-Dosmesa=enabled,-Dosmesa=disabled,"
+PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,virtual/libgl wayland wayland-native wayland-protocols"
+PACKAGECONFIG[x11] = "-Dx11=enabled,-Dx11=disabled,virtual/libx11 libglu"
+
+do_install:append() {
+ # it can be completely empty when all PACKAGECONFIG options are disabled
+ rmdir --ignore-fail-on-non-empty ${D}${bindir}
+
+ if [ -f ${D}${bindir}/clear ]; then
+ mv ${D}${bindir}/clear ${D}${bindir}/clear.mesa-demos
+ fi
+}
diff --git a/meta/recipes-graphics/mesa/mesa-gl_22.0.3.bb b/meta/recipes-graphics/mesa/mesa-gl_22.0.3.bb
deleted file mode 100644
index f2bc8f6b5b..0000000000
--- a/meta/recipes-graphics/mesa/mesa-gl_22.0.3.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require mesa.inc
-
-SUMMARY += " (OpenGL only, no EGL/GLES)"
-
-PROVIDES = "virtual/libgl virtual/mesa"
-
-S = "${WORKDIR}/mesa-${PV}"
-
-# At least one DRI rendering engine is required to build mesa.
-# When no X11 is available, use osmesa for the rendering engine.
-PACKAGECONFIG ??= "opengl ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', 'osmesa gallium', d)}"
-PACKAGECONFIG:class-target = "opengl ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', 'osmesa gallium', d)}"
-
diff --git a/meta/recipes-graphics/mesa/mesa-gl_24.0.3.bb b/meta/recipes-graphics/mesa/mesa-gl_24.0.3.bb
new file mode 100644
index 0000000000..ca160f1bfc
--- /dev/null
+++ b/meta/recipes-graphics/mesa/mesa-gl_24.0.3.bb
@@ -0,0 +1,15 @@
+require mesa.inc
+
+SUMMARY += " (OpenGL only, no EGL/GLES)"
+
+PROVIDES = "virtual/libgl virtual/mesa"
+
+S = "${WORKDIR}/mesa-${PV}"
+
+TARGET_CFLAGS = "-I${STAGING_INCDIR}/drm"
+
+# At least one DRI rendering engine is required to build mesa.
+# When no X11 is available, use osmesa for the rendering engine.
+PACKAGECONFIG ??= "opengl gallium ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', 'osmesa', d)}"
+PACKAGECONFIG:class-target = "opengl gallium ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', 'osmesa', d)}"
+
diff --git a/meta/recipes-graphics/mesa/mesa.inc b/meta/recipes-graphics/mesa/mesa.inc
index a5de6f9dbf..1c9fa66c72 100644
--- a/meta/recipes-graphics/mesa/mesa.inc
+++ b/meta/recipes-graphics/mesa/mesa.inc
@@ -10,34 +10,38 @@ HOMEPAGE = "http://mesa3d.org"
BUGTRACKER = "https://bugs.freedesktop.org"
SECTION = "x11"
LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://docs/license.rst;md5=9a383ee9f65a4e939d6630e9b067ff58"
+LIC_FILES_CHKSUM = "file://docs/license.rst;md5=63779ec98d78d823a9dc533a0735ef10"
PE = "2"
SRC_URI = "https://mesa.freedesktop.org/archive/mesa-${PV}.tar.xz \
file://0001-meson.build-check-for-all-linux-host_os-combinations.patch \
- file://0002-meson.build-make-TLS-ELF-optional.patch \
file://0001-meson-misdetects-64bit-atomics-on-mips-clang.patch \
- file://0001-futex.h-Define-__NR_futex-if-it-does-not-exist.patch \
- file://0001-util-format-Check-for-NEON-before-using-it.patch \
- "
+ file://0001-drisw-fix-build-without-dri3.patch \
+ file://0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch \
+ file://0001-Revert-meson-do-not-pull-in-clc-for-clover.patch \
+"
-SRC_URI[sha256sum] = "9f2b30f5276a9abaf71aafc6979685e2636189de1a87aea2c9e69744a6d0ebb9"
+SRC_URI[sha256sum] = "77aec9a2a37b7d3596ea1640b3cc53d0b5d9b3b52abed89de07e3717e91bfdbe"
UPSTREAM_CHECK_GITTAGREGEX = "mesa-(?P<pver>\d+(\.\d+)+)"
#because we cannot rely on the fact that all apps will use pkgconfig,
#make eglplatform.h independent of MESA_EGL_NO_X11_HEADER
do_install:append() {
- if ${@bb.utils.contains('PACKAGECONFIG', 'egl', 'true', 'false', d)}; then
- sed -i -e 's/^#elif defined(__unix__) && defined(EGL_NO_X11)$/#elif defined(__unix__) \&\& defined(EGL_NO_X11) || ${@bb.utils.contains('PACKAGECONFIG', 'x11', '0', '1', d)}/' ${D}${includedir}/EGL/eglplatform.h
- fi
+ # sed can't find EGL/eglplatform.h as it doesn't get installed when glvnd enabled.
+ # So, check if EGL/eglplatform.h exists before running sed.
+ if ${@bb.utils.contains('PACKAGECONFIG', 'egl', 'true', 'false', d)} && [ -f ${D}${includedir}/EGL/eglplatform.h ]; then
+ sed -i -e 's/^#elif defined(__unix__) && defined(EGL_NO_X11)$/#elif defined(__unix__) \&\& defined(EGL_NO_X11) || ${@bb.utils.contains('PACKAGECONFIG', 'x11', '0', '1', d)}/' ${D}${includedir}/EGL/eglplatform.h
+ fi
}
DEPENDS = "expat makedepend-native flex-native bison-native libxml2-native zlib chrpath-replacement-native python3-mako-native gettext-native"
+DEPENDS:append:class-target = " ${@bb.utils.contains('PACKAGECONFIG', 'opencl', 'mesa-native', '', d)}"
EXTRANATIVEPATH += "chrpath-native"
PROVIDES = " \
${@bb.utils.contains('PACKAGECONFIG', 'opengl', 'virtual/libgl', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'glvnd', 'virtual/libglx', '', d)} \
${@bb.utils.contains('PACKAGECONFIG', 'gles', 'virtual/libgles1 virtual/libgles2 virtual/libgles3', '', d)} \
${@bb.utils.contains('PACKAGECONFIG', 'egl', 'virtual/egl', '', d)} \
${@bb.utils.contains('PACKAGECONFIG', 'gbm', 'virtual/libgbm', '', d)} \
@@ -48,17 +52,10 @@ inherit meson pkgconfig python3native gettext features_check
BBCLASSEXTEND = "native nativesdk"
-ANY_OF_DISTRO_FEATURES:class-target = "opengl vulkan"
+ANY_OF_DISTRO_FEATURES = "opengl vulkan"
PLATFORMS ??= "${@bb.utils.filter('PACKAGECONFIG', 'x11 wayland', d)}"
-export YOCTO_ALTERNATE_EXE_PATH = "${STAGING_LIBDIR}/llvm-config"
-export YOCTO_ALTERNATE_MULTILIB_NAME = "${base_libdir}"
-export LLVM_CONFIG = "${STAGING_BINDIR_NATIVE}/llvm-config${MESA_LLVM_RELEASE}"
-export WANT_LLVM_RELEASE = "${MESA_LLVM_RELEASE}"
-
-MESA_LLVM_RELEASE ?= "${LLVMVERSION}"
-
# set the MESA_BUILD_TYPE to either 'release' (default) or 'debug'
# by default the upstream mesa sources build a debug release
# here we assume the user will want a release build by default
@@ -78,26 +75,30 @@ EXTRA_OEMESON = " \
-Dplatforms='${@",".join("${PLATFORMS}".split())}' \
"
+EXTRA_OEMESON:append:class-target = " ${@bb.utils.contains('PACKAGECONFIG', 'opencl', '-Dintel-clc=system', '', d)}"
+EXTRA_OEMESON:append:class-native = " ${@bb.utils.contains('PACKAGECONFIG', 'opencl', '-Dintel-clc=enabled', '', d)}"
+
def strip_comma(s):
return s.strip(',')
PACKAGECONFIG = " \
gallium \
+ video-codecs \
${@bb.utils.filter('DISTRO_FEATURES', 'x11 vulkan wayland', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'opengl egl gles gbm virgl', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'dri3', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'x11 vulkan', 'dri3', '', d)} \
- ${@bb.utils.contains('TCLIBC', 'glibc', 'elf-tls', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'vulkan', 'zink', '', d)} \
"
+PACKAGECONFIG:append:class-native = "gallium-llvm r600"
+
# "gbm" requires "opengl"
PACKAGECONFIG[gbm] = "-Dgbm=enabled,-Dgbm=disabled"
X11_DEPS = "xorgproto virtual/libx11 libxext libxxf86vm libxdamage libxfixes xrandr"
# "x11" requires "opengl"
PACKAGECONFIG[x11] = ",-Dglx=disabled,${X11_DEPS}"
-PACKAGECONFIG[elf-tls] = "-Delf-tls=true, -Delf-tls=false"
-PACKAGECONFIG[xvmc] = "-Dgallium-xvmc=enabled,-Dgallium-xvmc=disabled,libxvmc"
PACKAGECONFIG[wayland] = ",,wayland-native wayland libdrm wayland-protocols"
PACKAGECONFIG[dri3] = "-Ddri3=enabled, -Ddri3=disabled, xorgproto libxshmfence"
@@ -105,13 +106,36 @@ PACKAGECONFIG[dri3] = "-Ddri3=enabled, -Ddri3=disabled, xorgproto libxshmfence"
# Vulkan drivers need dri3 enabled
# amd could be enabled as well but requires gallium-llvm with llvm >= 3.9
VULKAN_DRIVERS = ""
-VULKAN_DRIVERS:append:x86:class-target = ",intel"
-VULKAN_DRIVERS:append:x86-64:class-target = ",intel"
+VULKAN_DRIVERS:append:x86 = ",intel,amd"
+VULKAN_DRIVERS:append:x86-64 = ",intel,amd"
+# i686 is a 32 bit override for mesa-native
+VULKAN_DRIVERS:append:i686 = ",intel,amd"
VULKAN_DRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'freedreno', ',freedreno', '', d)}"
VULKAN_DRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'broadcom', ',broadcom', '', d)}"
-PACKAGECONFIG[vulkan] = "-Dvulkan-drivers=${@strip_comma('${VULKAN_DRIVERS}')}, -Dvulkan-drivers='',"
+VULKAN_DRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'gallium-llvm', ',swrast', '', d)}"
+VULKAN_DRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'imagination', ',imagination-experimental', '', d)}"
+PACKAGECONFIG[vulkan] = "-Dvulkan-drivers=${@strip_comma('${VULKAN_DRIVERS}')}, -Dvulkan-drivers='',glslang-native vulkan-loader vulkan-headers"
+
+# mesa development and testing tools support, per driver
+TOOLS = ""
+TOOLS_DEPS = ""
+TOOLS:append = "${@bb.utils.contains('PACKAGECONFIG', 'etnaviv', ',etnaviv', '', d)}"
+TOOLS:append = "${@bb.utils.contains('PACKAGECONFIG', 'freedreno', ',freedreno', '', d)}"
+TOOLS:append = "${@bb.utils.contains('PACKAGECONFIG', 'lima', ',lima', '', d)}"
+TOOLS:append = "${@bb.utils.contains('PACKAGECONFIG', 'panfrost', ',panfrost', '', d)}"
+TOOLS:append = "${@bb.utils.contains('PACKAGECONFIG', 'imagination', ',imagination', '', d)}"
+
+# dependencies for tools.
+TOOLS_DEPS:append = "${@bb.utils.contains('PACKAGECONFIG', 'freedreno', ' ncurses libxml2 ', '', d)}"
+
+# the fdperf tool requires libconfig (a part of meta-oe) so it needs special
+# treatment in addition to the usual 'freedreno tools'.
+PACKAGECONFIG[freedreno-fdperf] = ",,libconfig"
+
+PACKAGECONFIG[tools] = "-Dtools=${@strip_comma('${TOOLS}')}, -Dtools='', ${TOOLS_DEPS}"
PACKAGECONFIG[opengl] = "-Dopengl=true, -Dopengl=false"
+PACKAGECONFIG[glvnd] = "-Dglvnd=true, -Dglvnd=false, libglvnd"
# "gles" requires "opengl"
PACKAGECONFIG[gles] = "-Dgles1=enabled -Dgles2=enabled, -Dgles1=disabled -Dgles2=disabled"
@@ -120,7 +144,8 @@ PACKAGECONFIG[gles] = "-Dgles1=enabled -Dgles2=enabled, -Dgles1=disabled -Dgles2
PACKAGECONFIG[egl] = "-Degl=enabled, -Degl=disabled"
# "opencl" requires libclc from meta-clang and spirv-tools from OE-Core
-PACKAGECONFIG[opencl] = "-Dgallium-opencl=icd -Dopencl-spirv=true,-Dgallium-opencl=disabled -Dopencl-spirv=false,libclc spirv-tools"
+OPENCL_NATIVE = "${@bb.utils.contains('PACKAGECONFIG', 'freedreno', '-Dopencl-native=true', '', d)}"
+PACKAGECONFIG[opencl] = "-Dgallium-opencl=icd -Dopencl-spirv=true ${OPENCL_NATIVE},-Dgallium-opencl=disabled -Dopencl-spirv=false,libclc spirv-tools python3-ply-native"
PACKAGECONFIG[broadcom] = ""
PACKAGECONFIG[etnaviv] = ""
@@ -128,52 +153,65 @@ PACKAGECONFIG[freedreno] = ""
PACKAGECONFIG[kmsro] = ""
PACKAGECONFIG[vc4] = ""
PACKAGECONFIG[v3d] = ""
+PACKAGECONFIG[zink] = ""
GALLIUMDRIVERS = "swrast"
# gallium swrast was found to crash Xorg on startup in x32 qemu
GALLIUMDRIVERS:x86-x32 = ""
-GALLIUMDRIVERS:append:x86:class-target = ",i915,iris,crocus"
-GALLIUMDRIVERS:append:x86-64:class-target = ",i915,iris,crocus"
+GALLIUMDRIVERS:append:x86 = ",i915,iris,crocus"
+GALLIUMDRIVERS:append:x86-64 = ",i915,iris,crocus"
+# i686 is a 32 bit override for mesa-native
+GALLIUMDRIVERS:append:i686 = ",i915,iris,crocus"
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'etnaviv', ',etnaviv', '', d)}"
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'freedreno', ',freedreno', '', d)}"
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'kmsro', ',kmsro', '', d)}"
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'vc4', ',vc4', '', d)}"
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'v3d', ',v3d', '', d)}"
+GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'zink', ',zink', '', d)}"
# radeonsi requires LLVM
GALLIUMDRIVERS_RADEONSI = "${@bb.utils.contains('PACKAGECONFIG', 'r600', ',radeonsi', '', d)}"
-GALLIUMDRIVERS_LLVM = "r300,nouveau${GALLIUMDRIVERS_RADEONSI}"
-GALLIUMDRIVERS_LLVM:append:x86:class-target = ",svga"
-GALLIUMDRIVERS_LLVM:append:x86-64:class-target = ",svga"
+GALLIUMDRIVERS_LLVM = ",r300,nouveau${GALLIUMDRIVERS_RADEONSI}"
+GALLIUMDRIVERS_LLVM:append:x86 = ",svga"
+GALLIUMDRIVERS_LLVM:append:x86-64 = ",svga"
+# i686 is a 32 bit override for mesa-native
+GALLIUMDRIVERS_LLVM:append:i686 = ",svga"
PACKAGECONFIG[r600] = ""
PACKAGECONFIG[virgl] = ""
-GALLIUMDRIVERS:append = "${@bb.utils.contains('PACKAGECONFIG', 'gallium-llvm', ',${GALLIUMDRIVERS_LLVM}', '', d)}"
+GALLIUMDRIVERS:append = "${@bb.utils.contains('PACKAGECONFIG', 'gallium-llvm', '${GALLIUMDRIVERS_LLVM}', '', d)}"
GALLIUMDRIVERS:append = "${@bb.utils.contains('PACKAGECONFIG', 'r600', ',r600', '', d)}"
GALLIUMDRIVERS:append = "${@bb.utils.contains('PACKAGECONFIG', 'virgl', ',virgl', '', d)}"
PACKAGECONFIG[gallium] = "-Dgallium-drivers=${@strip_comma('${GALLIUMDRIVERS}')}, -Dgallium-drivers='', libdrm"
-PACKAGECONFIG[gallium-llvm] = "-Dllvm=enabled -Dshared-llvm=enabled, -Dllvm=disabled, llvm${MESA_LLVM_RELEASE} llvm-native \
- elfutils"
+PACKAGECONFIG[gallium-llvm] = "-Dllvm=enabled -Dshared-llvm=enabled, -Dllvm=disabled, llvm llvm-native elfutils"
PACKAGECONFIG[xa] = "-Dgallium-xa=enabled, -Dgallium-xa=disabled"
PACKAGECONFIG[va] = "-Dgallium-va=enabled,-Dgallium-va=disabled,libva-initial"
-
PACKAGECONFIG[vdpau] = "-Dgallium-vdpau=enabled,-Dgallium-vdpau=disabled,libvdpau"
+PACKAGECONFIG[imagination] = "-Dimagination-srv=true,-Dimagination-srv=false"
+
PACKAGECONFIG[lima] = ""
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'lima', ',lima', '', d)}"
PACKAGECONFIG[panfrost] = ""
GALLIUMDRIVERS:append ="${@bb.utils.contains('PACKAGECONFIG', 'panfrost', ',panfrost', '', d)}"
+PACKAGECONFIG[vulkan-beta] = "-Dvulkan-beta=true,-Dvulkan-beta=false"
+
PACKAGECONFIG[osmesa] = "-Dosmesa=true,-Dosmesa=false"
+PACKAGECONFIG[perfetto] = "-Dperfetto=true,-Dperfetto=false,libperfetto"
+
PACKAGECONFIG[unwind] = "-Dlibunwind=enabled,-Dlibunwind=disabled,libunwind"
PACKAGECONFIG[lmsensors] = "-Dlmsensors=enabled,-Dlmsensors=disabled,lmsensors"
+VIDEO_CODECS ?= "${@bb.utils.contains('LICENSE_FLAGS_ACCEPTED', 'commercial', 'all', 'all_free', d)}"
+PACKAGECONFIG[video-codecs] = "-Dvideo-codecs=${VIDEO_CODECS}, -Dvideo-codecs=''"
+
# llvmpipe is slow if compiled with -fomit-frame-pointer (e.g. -O2)
FULL_OPTIMIZATION:append = " -fno-omit-frame-pointer"
@@ -181,7 +219,7 @@ CFLAGS:append:armv5 = " -DMISSING_64BIT_ATOMICS"
CFLAGS:append:armv6 = " -DMISSING_64BIT_ATOMICS"
# Remove the mesa dependency on mesa-dev, as mesa is empty
-RDEPENDS:${PN}-dev = ""
+DEV_PKG_DEPENDENCY = ""
# Khronos documentation says that include/GLES2/gl2ext.h can be used for
# OpenGL ES 3 specification as well as for OpenGL ES 2.
@@ -195,6 +233,7 @@ RDEPENDS:libopencl-mesa += "${@bb.utils.contains('PACKAGECONFIG', 'opencl', 'lib
PACKAGES =+ "libegl-mesa libegl-mesa-dev \
libosmesa libosmesa-dev \
libgl-mesa libgl-mesa-dev \
+ libglx-mesa libglx-mesa-dev \
libglapi libglapi-dev \
libgbm libgbm-dev \
libgles1-mesa libgles1-mesa-dev \
@@ -203,7 +242,7 @@ PACKAGES =+ "libegl-mesa libegl-mesa-dev \
libopencl-mesa libopencl-mesa-dev \
libxatracker libxatracker-dev \
mesa-megadriver mesa-vulkan-drivers \
- mesa-vdpau-drivers \
+ mesa-vdpau-drivers mesa-tools \
"
do_install:append () {
@@ -213,14 +252,18 @@ do_install:append () {
rm -f ${D}${libdir}/gallium-pipe/*.la
rm -f ${D}${libdir}/gbm/*.la
- # it was packaged in libdricore9.1.3-1 and preventing upgrades when debian.bbclass was used
- chrpath --delete ${D}${libdir}/dri/*_dri.so || true
-
# libwayland-egl has been moved to wayland 1.15+
rm -f ${D}${libdir}/libwayland-egl*
rm -f ${D}${libdir}/pkgconfig/wayland-egl.pc
}
+do_install:append:class-native () {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'opencl', 'true', 'false', d)}; then
+ install -d ${D}${bindir}
+ install -m0755 ${B}/src/intel/compiler/intel_clc ${D}${bindir}
+ fi
+}
+
# For the packages that make up the OpenGL interfaces, inject variables so that
# they don't get Debian-renamed (which would remove the -mesa suffix), and
# RPROVIDEs/RCONFLICTs on the generic libgl name.
@@ -231,6 +274,7 @@ python __anonymous() {
suffix = "-native"
for p in (("egl", "libegl", "libegl1"),
("opengl", "libgl", "libgl1"),
+ ("glvnd", "libglx",),
("gles", "libgles1", "libglesv1-cm1"),
("gles", "libgles2", "libglesv2-2"),
("gles", "libgles3",),
@@ -281,28 +325,30 @@ python mesa_populate_packages() {
do_split_packages(d, pipe_drivers_root, r'^pipe_(.*)\.so$', 'mesa-driver-pipe-%s', 'Mesa %s pipe driver', extra_depends='')
}
-PACKAGESPLITFUNCS:prepend = "mesa_populate_packages "
+PACKAGESPLITFUNCS =+ "mesa_populate_packages"
PACKAGES_DYNAMIC += "^mesa-driver-.*"
PACKAGES_DYNAMIC:class-native = "^mesa-driver-.*-native"
FILES:mesa-megadriver = "${libdir}/dri/* ${datadir}/drirc.d"
-FILES:mesa-vulkan-drivers = "${libdir}/libvulkan_*.so ${datadir}/vulkan"
+FILES:mesa-vulkan-drivers = "${libdir}/libvulkan_*.so ${libdir}/libpowervr_rogue.so ${datadir}/vulkan"
FILES:${PN}-vdpau-drivers = "${libdir}/vdpau/*.so.*"
-FILES:libegl-mesa = "${libdir}/libEGL.so.*"
+FILES:libegl-mesa = "${libdir}/libEGL*.so.* ${datadir}/glvnd/egl_vendor.d"
FILES:libgbm = "${libdir}/libgbm.so.*"
FILES:libgles1-mesa = "${libdir}/libGLESv1*.so.*"
FILES:libgles2-mesa = "${libdir}/libGLESv2.so.*"
FILES:libgl-mesa = "${libdir}/libGL.so.*"
-FILES:libopencl-mesa = "${libdir}/libMesaOpenCL.so.* ${sysconfdir}/OpenCL/vendors/mesa.icd"
+FILES:libglx-mesa = "${libdir}/libGLX*.so.*"
+FILES:libopencl-mesa = "${libdir}/libMesaOpenCL.so.* ${libdir}/gallium-pipe/*.so ${sysconfdir}/OpenCL/vendors/mesa.icd"
FILES:libglapi = "${libdir}/libglapi.so.*"
FILES:libosmesa = "${libdir}/libOSMesa.so.*"
FILES:libxatracker = "${libdir}/libxatracker.so.*"
FILES:${PN}-dev = "${libdir}/pkgconfig/dri.pc ${includedir}/vulkan ${libdir}/vdpau/*.so"
-FILES:libegl-mesa-dev = "${libdir}/libEGL.* ${includedir}/EGL ${includedir}/KHR ${libdir}/pkgconfig/egl.pc"
+FILES:libegl-mesa-dev = "${libdir}/libEGL*.* ${includedir}/EGL ${includedir}/KHR ${libdir}/pkgconfig/egl.pc"
FILES:libgbm-dev = "${libdir}/libgbm.* ${libdir}/pkgconfig/gbm.pc ${includedir}/gbm.h"
FILES:libgl-mesa-dev = "${libdir}/libGL.* ${includedir}/GL ${libdir}/pkgconfig/gl.pc"
+FILES:libglx-mesa-dev = "${libdir}/libGLX*.*"
FILES:libglapi-dev = "${libdir}/libglapi.*"
FILES:libgles1-mesa-dev = "${libdir}/libGLESv1*.* ${includedir}/GLES ${libdir}/pkgconfig/glesv1*.pc"
FILES:libgles2-mesa-dev = "${libdir}/libGLESv2.* ${includedir}/GLES2 ${libdir}/pkgconfig/glesv2.pc"
@@ -312,6 +358,9 @@ FILES:libosmesa-dev = "${libdir}/libOSMesa.* ${includedir}/GL/osmesa.h ${libdir}
FILES:libxatracker-dev = "${libdir}/libxatracker.so ${libdir}/libxatracker.la \
${includedir}/xa_tracker.h ${includedir}/xa_composite.h ${includedir}/xa_context.h \
${libdir}/pkgconfig/xatracker.pc"
+# catch all to get all the tools and data
+FILES:${PN}-tools = "${bindir} ${datadir}"
+ALLOW_EMPTY:${PN}-tools = "1"
# Fix upgrade path from mesa to mesa-megadriver
RREPLACES:mesa-megadriver = "mesa"
diff --git a/meta/recipes-graphics/mesa/mesa_22.0.3.bb b/meta/recipes-graphics/mesa/mesa_24.0.3.bb
index 96e8aa38d6..96e8aa38d6 100644
--- a/meta/recipes-graphics/mesa/mesa_22.0.3.bb
+++ b/meta/recipes-graphics/mesa/mesa_24.0.3.bb
diff --git a/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb b/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb
index 12c72edb79..ba81d0b17a 100644
--- a/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb
+++ b/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb
@@ -3,7 +3,6 @@ DESCRIPTION = "Simple session manager for X, that provides just the right boiler
HOMEPAGE = "http://www.yoctoproject.org"
BUGTRACKER = "http://bugzilla.pokylinux.org"
-PR = "r4"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://mini-x-session;endline=5;md5=b6430bffbcf05f9760e72938826b7487"
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-weston.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-weston.bb
index 0ce91ca940..dd302c89e7 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-weston.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-weston.bb
@@ -1,6 +1,5 @@
SUMMARY = "Basic Weston compositor setup"
DESCRIPTION = "Packages required to set up a basic working Weston session"
-PR = "r1"
inherit packagegroup features_check
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-base.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-base.bb
index 0185c93354..db6242af33 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-base.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-base.bb
@@ -1,6 +1,5 @@
SUMMARY = "Basic X11 session"
DESCRIPTION = "Packages required to set up a basic working X11 session"
-PR = "r1"
inherit packagegroup features_check
# rdepends on matchbox-wm
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
index 3bb308fbba..ebc1330055 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
@@ -3,7 +3,6 @@
#
SUMMARY = "X11 display server"
-PR = "r40"
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -11,7 +10,10 @@ inherit packagegroup features_check
# rdepends on XSERVER
REQUIRED_DISTRO_FEATURES = "x11"
-XSERVER ?= "xserver-xorg xf86-video-fbdev"
+XSERVER ?= "xserver-xorg \
+ xf86-video-fbdev \
+ xf86-video-modesetting \
+ "
XSERVERCODECS ?= ""
RDEPENDS:${PN} = "\
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-x11.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-x11.bb
index 9ca058b38b..91db7ed33b 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-x11.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-x11.bb
@@ -2,7 +2,6 @@
# Copyright (C) 2011 Intel Corporation
#
-PR = "r40"
inherit packagegroup features_check
REQUIRED_DISTRO_FEATURES = "x11"
diff --git a/meta/recipes-graphics/pango/pango_1.50.7.bb b/meta/recipes-graphics/pango/pango_1.50.7.bb
deleted file mode 100644
index 727c170288..0000000000
--- a/meta/recipes-graphics/pango/pango_1.50.7.bb
+++ /dev/null
@@ -1,52 +0,0 @@
-SUMMARY = "Framework for layout and rendering of internationalized text"
-DESCRIPTION = "Pango is a library for laying out and rendering of text, \
-with an emphasis on internationalization. Pango can be used anywhere \
-that text layout is needed, though most of the work on Pango so far has \
-been done in the context of the GTK+ widget toolkit. Pango forms the \
-core of text and font handling for GTK+-2.x."
-HOMEPAGE = "http://www.pango.org/"
-BUGTRACKER = "http://bugzilla.gnome.org"
-SECTION = "libs"
-LICENSE = "LGPL-2.0-or-later"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=3bf50002aefd002f49e7bb854063f7e7"
-
-GNOMEBASEBUILDCLASS = "meson"
-
-inherit gnomebase gi-docgen ptest-gnome upstream-version-is-even gobject-introspection
-
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
-
-SRC_URI += "file://run-ptest \
- file://0001-Skip-running-test-layout-test.patch \
-"
-
-SRC_URI[archive.sha256sum] = "0477f369a3d4c695df7299a6989dc004756a7f4de27eecac405c6790b7e3ad33"
-
-DEPENDS = "glib-2.0 glib-2.0-native fontconfig freetype virtual/libiconv cairo harfbuzz fribidi"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} \
- ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
-
-PACKAGECONFIG[x11] = ",,virtual/libx11 libxft"
-PACKAGECONFIG[tests] = "-Dinstall-tests=true, -Dinstall-tests=false"
-PACKAGECONFIG[thai] = ",,libthai"
-
-GIR_MESON_OPTION = 'introspection'
-
-do_configure:prepend() {
- chmod +x ${S}/tests/*.py
-}
-
-LEAD_SONAME = "libpango-1.0*"
-
-FILES:${PN} = "${bindir}/* ${libdir}/libpango*${SOLIBS}"
-
-RDEPENDS:${PN}-ptest += "cantarell-fonts"
-RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us"
-
-RPROVIDES:${PN} += "pango-modules pango-module-indic-lang \
- pango-module-basic-fc pango-module-arabic-lang"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/pango/pango_1.52.1.bb b/meta/recipes-graphics/pango/pango_1.52.1.bb
new file mode 100644
index 0000000000..a67c3791a2
--- /dev/null
+++ b/meta/recipes-graphics/pango/pango_1.52.1.bb
@@ -0,0 +1,53 @@
+SUMMARY = "Framework for layout and rendering of internationalized text"
+DESCRIPTION = "Pango is a library for laying out and rendering of text, \
+with an emphasis on internationalization. Pango can be used anywhere \
+that text layout is needed, though most of the work on Pango so far has \
+been done in the context of the GTK+ widget toolkit. Pango forms the \
+core of text and font handling for GTK+-2.x."
+HOMEPAGE = "http://www.pango.org/"
+BUGTRACKER = "http://bugzilla.gnome.org"
+SECTION = "libs"
+LICENSE = "LGPL-2.0-or-later"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=3bf50002aefd002f49e7bb854063f7e7"
+
+
+inherit gnomebase gi-docgen ptest-gnome upstream-version-is-even gobject-introspection
+
+UPSTREAM_CHECK_REGEX = "pango-(?P<pver>\d+\.(?!9\d+)\d+\.\d+)"
+
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
+
+SRC_URI += "file://run-ptest \
+ file://0001-Skip-running-test-layout-test.patch \
+ "
+
+SRC_URI[archive.sha256sum] = "58728a0a2d86f60761208df9493033d18ecb2497abac80ee1a274ad0c6e55f0f"
+
+DEPENDS = "glib-2.0 glib-2.0-native fontconfig freetype virtual/libiconv cairo harfbuzz fribidi"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)} \
+ ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+
+PACKAGECONFIG[x11] = ",,virtual/libx11 libxft"
+PACKAGECONFIG[tests] = "-Dinstall-tests=true, -Dinstall-tests=false"
+PACKAGECONFIG[thai] = "-Dlibthai=enabled,-Dlibthai=disabled,libthai"
+
+GIR_MESON_OPTION = 'introspection'
+
+do_configure:prepend() {
+ chmod +x ${S}/tests/*.py
+}
+
+LEAD_SONAME = "libpango-1.0*"
+
+FILES:${PN} = "${bindir}/* ${libdir}/libpango*${SOLIBS}"
+
+RDEPENDS:${PN}-ptest += "cantarell-fonts"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us"
+
+RPROVIDES:${PN} += "pango-modules pango-module-indic-lang \
+ pango-module-basic-fc pango-module-arabic-lang"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/piglit/piglit/0001-CMakeLists.txt-add-missing-endian.h-check.patch b/meta/recipes-graphics/piglit/piglit/0001-CMakeLists.txt-add-missing-endian.h-check.patch
deleted file mode 100644
index 68f783ebe4..0000000000
--- a/meta/recipes-graphics/piglit/piglit/0001-CMakeLists.txt-add-missing-endian.h-check.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From a8ea135125b284a8a9c965d8091b3d1d9a31849c Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 5 Apr 2022 16:51:34 +0200
-Subject: [PATCH] CMakeLists.txt: add missing endian.h check
-
-This is needed to actually have HAVE_ENDIAN_H defined properly in config.h.
-
-Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/piglit/-/merge_requests/648]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- CMakeLists.txt | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 9fb0508da..158fac4bc 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -481,6 +481,7 @@ check_include_file(sys/stat.h HAVE_SYS_STAT_H)
- check_include_file(unistd.h HAVE_UNISTD_H)
- check_include_file(fcntl.h HAVE_FCNTL_H)
- check_include_file(linux/sync_file.h HAVE_LINUX_SYNC_FILE_H)
-+check_include_file(endian.h HAVE_ENDIAN_H)
-
- if(DEFINED PIGLIT_INSTALL_VERSION)
- set(PIGLIT_INSTALL_VERSION_SUFFIX
diff --git a/meta/recipes-graphics/piglit/piglit/0001-cmake-install-bash-completions-in-the-right-place.patch b/meta/recipes-graphics/piglit/piglit/0001-cmake-install-bash-completions-in-the-right-place.patch
deleted file mode 100644
index e07e810a73..0000000000
--- a/meta/recipes-graphics/piglit/piglit/0001-cmake-install-bash-completions-in-the-right-place.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 26faa2c157a27a18a9f767976730fe0c115e3af4 Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Wed, 13 Jul 2016 19:19:02 +0300
-Subject: [PATCH] cmake: install bash-completions in the right place
-
-The completionsdir variable is a full path and should not be
-prefixed.
-
-This does mean the files may be installed outside of
-CMAKE_INSTALL_PREFIX -- the alternative is more difficult and
-means that bash completion files may be installed where
-bash-completion can't find them.
-
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Upstream-Status: Submitted [mailing list]
----
- CMakeLists.txt | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 8e2abba..784a8f9 100644
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -532,7 +532,7 @@ install (
- if (BASH_COMPLETION_FOUND)
- install(
- FILES completions/bash/piglit
-- DESTINATION ${CMAKE_INSTALL_PREFIX}/${BASH_COMPLETION_COMPLETIONSDIR}/
-+ DESTINATION ${BASH_COMPLETION_COMPLETIONSDIR}/
- )
- endif (BASH_COMPLETION_FOUND)
-
---
-2.8.1
-
diff --git a/meta/recipes-graphics/piglit/piglit/0001-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch b/meta/recipes-graphics/piglit/piglit/0001-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch
deleted file mode 100644
index 5d6ec368ba..0000000000
--- a/meta/recipes-graphics/piglit/piglit/0001-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 3bf1beee1ddd19bc536ff2856e04ac269d43daa2 Mon Sep 17 00:00:00 2001
-From: Pascal Bach <pascal.bach@siemens.com>
-Date: Thu, 4 Oct 2018 14:43:17 +0200
-Subject: [PATCH] cmake: use proper WAYLAND_INCLUDE_DIRS variable
-
-WAYLAND_wayland-client_INCLUDEDIR is an internal variable and is not correctly
-set when cross compiling. WAYLAND_INCLUDE_DIRS includes the correct path even
-when cross compiling.
-
-Signed-off-by: Pascal Bach <pascal.bach@siemens.com>
-
-Upstream-Status: Submitted [piglit@lists.freedesktop.org]
----
- tests/util/CMakeLists.txt | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/tests/util/CMakeLists.txt b/tests/util/CMakeLists.txt
-index a5f080156..a303a9f58 100644
---- a/tests/util/CMakeLists.txt
-+++ b/tests/util/CMakeLists.txt
-@@ -97,7 +97,7 @@ if(PIGLIT_USE_WAFFLE)
- piglit-framework-gl/piglit_wl_framework.c
- )
- list(APPEND UTIL_GL_INCLUDES
-- ${WAYLAND_wayland-client_INCLUDEDIR}
-+ ${WAYLAND_INCLUDE_DIRS}
- )
- endif()
- if(PIGLIT_HAS_X11)
---
-2.11.0
-
diff --git a/meta/recipes-graphics/piglit/piglit/0001-tests-Fix-narrowing-errors-seen-with-clang.patch b/meta/recipes-graphics/piglit/piglit/0001-tests-Fix-narrowing-errors-seen-with-clang.patch
new file mode 100644
index 0000000000..08e709b1d0
--- /dev/null
+++ b/meta/recipes-graphics/piglit/piglit/0001-tests-Fix-narrowing-errors-seen-with-clang.patch
@@ -0,0 +1,50 @@
+From 77f327909e4a99c64261290cd76e234e10cc64d2 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 3 May 2023 21:59:43 -0700
+Subject: [PATCH] tests: Fix narrowing errors seen with clang
+
+Fixes
+piglit-test-pattern.cpp:656:26: error: type 'float' cannot be narrowed to 'int' in initiali
+zer list [-Wc++11-narrowing]
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/piglit/-/merge_requests/807]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ .../spec/ext_framebuffer_multisample/draw-buffers-common.cpp | 4 ++--
+ tests/util/piglit-test-pattern.cpp | 4 ++--
+ 2 files changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/tests/spec/ext_framebuffer_multisample/draw-buffers-common.cpp b/tests/spec/ext_framebuffer_multisample/draw-buffers-common.cpp
+index 48e1ad4a5..b36830c45 100644
+--- a/tests/spec/ext_framebuffer_multisample/draw-buffers-common.cpp
++++ b/tests/spec/ext_framebuffer_multisample/draw-buffers-common.cpp
+@@ -353,8 +353,8 @@ draw_pattern(bool sample_alpha_to_coverage,
+ float vertices[4][2] = {
+ { 0.0f, 0.0f + i * (pattern_height / num_rects) },
+ { 0.0f, (i + 1.0f) * (pattern_height / num_rects) },
+- { pattern_width, (i + 1.0f) * (pattern_height / num_rects) },
+- { pattern_width, 0.0f + i * (pattern_height / num_rects) } };
++ { static_cast<float>(pattern_width), (i + 1.0f) * (pattern_height / num_rects) },
++ { static_cast<float>(pattern_width), 0.0f + i * (pattern_height / num_rects) } };
+
+ glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE,
+ sizeof(vertices[0]),
+diff --git a/tests/util/piglit-test-pattern.cpp b/tests/util/piglit-test-pattern.cpp
+index 43d451d6a..52ee94457 100644
+--- a/tests/util/piglit-test-pattern.cpp
++++ b/tests/util/piglit-test-pattern.cpp
+@@ -653,12 +653,12 @@ ColorGradientSunburst::draw_with_scale_and_offset(const float (*proj)[4],
+ {
+ switch (out_type) {
+ case GL_INT: {
+- int clear_color[4] = { offset, offset, offset, offset };
++ int clear_color[4] = { static_cast<int>(offset), static_cast<int>(offset), static_cast<int>(offset), static_cast<int>(offset) };
+ glClearBufferiv(GL_COLOR, 0, clear_color);
+ break;
+ }
+ case GL_UNSIGNED_INT: {
+- unsigned clear_color[4] = { offset, offset, offset, offset };
++ unsigned clear_color[4] = { static_cast<unsigned>(offset), static_cast<unsigned>(offset), static_cast<unsigned>(offset), static_cast<unsigned>(offset) };
+ glClearBufferuiv(GL_COLOR, 0, clear_color);
+ break;
+ }
diff --git a/meta/recipes-graphics/piglit/piglit/0001-utils-Include-libgen.h-on-musl-linux-systems.patch b/meta/recipes-graphics/piglit/piglit/0001-utils-Include-libgen.h-on-musl-linux-systems.patch
new file mode 100644
index 0000000000..2875dd52ec
--- /dev/null
+++ b/meta/recipes-graphics/piglit/piglit/0001-utils-Include-libgen.h-on-musl-linux-systems.patch
@@ -0,0 +1,83 @@
+From 6c852e6ac292008137a6f3a8aa908090bb5b4b11 Mon Sep 17 00:00:00 2001
+From: Erik Faye-Lund <erik.faye-lund@collabora.com>
+Date: Tue, 19 Mar 2024 16:34:02 +0100
+Subject: [PATCH] properly check for libgen.h
+
+Some users are reporting that basename is not available when building on
+Linux with musl libc. And since the POSIX spec[1] says that basename is
+defined in libgen.h, we should include that when available.
+
+So let's properly detect the header, and include it if it exists. This
+should hopefully make things a bit more robust.
+
+Since we're also including this from the CL program-tester, let's
+rearrange the includes a bit so we know that config.h has been included.
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/mesa/piglit/-/merge_requests/888]
+Reviewed-by: David Heidelberg <david.heidelberg@collabora.com>
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ CMakeLists.txt | 1 +
+ tests/cl/program/program-tester.c | 5 ++++-
+ tests/util/config.h.in | 1 +
+ tests/util/piglit-util.h | 4 ++--
+ 4 files changed, 8 insertions(+), 3 deletions(-)
+
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index dd2bf67125..5563fe0e20 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -492,6 +492,7 @@ check_include_file(unistd.h HAVE_UNISTD_H)
+ check_include_file(fcntl.h HAVE_FCNTL_H)
+ check_include_file(linux/sync_file.h HAVE_LINUX_SYNC_FILE_H)
+ check_include_file(endian.h HAVE_ENDIAN_H)
++check_include_file(libgen.h HAVE_LIBGEN_H)
+
+ if(DEFINED PIGLIT_INSTALL_VERSION)
+ set(PIGLIT_INSTALL_VERSION_SUFFIX
+diff --git a/tests/cl/program/program-tester.c b/tests/cl/program/program-tester.c
+index 97fe64906d..e47fb5aacc 100644
+--- a/tests/cl/program/program-tester.c
++++ b/tests/cl/program/program-tester.c
+@@ -31,10 +31,13 @@
+ #include <inttypes.h>
+ #include <math.h>
+ #include <regex.h>
+-#include <libgen.h>
+
+ #include "piglit-framework-cl-program.h"
+
++#ifdef HAVE_LIBGEN_H
++#include <libgen.h>
++#endif
++
+ /* Regexes */
+
+ /*
+diff --git a/tests/util/config.h.in b/tests/util/config.h.in
+index 8ed5af1709..437eb91418 100644
+--- a/tests/util/config.h.in
++++ b/tests/util/config.h.in
+@@ -16,3 +16,4 @@
+ #cmakedefine HAVE_SYS_RESOURCE_H 1
+ #cmakedefine HAVE_UNISTD_H 1
+ #cmakedefine HAVE_ENDIAN_H 1
++#cmakedefine HAVE_LIBGEN_H 1
+diff --git a/tests/util/piglit-util.h b/tests/util/piglit-util.h
+index 4d3606c708..de999980b6 100644
+--- a/tests/util/piglit-util.h
++++ b/tests/util/piglit-util.h
+@@ -52,8 +52,8 @@ extern "C" {
+ #include <math.h>
+ #include <float.h>
+
+-#if defined(__APPLE__) || defined(__MINGW32__)
+-# include "libgen.h" // for basename
++#ifdef HAVE_LIBGEN_H
++# include <libgen.h> // for basename
+ #elif defined(_MSC_VER)
+
+ static inline char *
+--
+GitLab
+
diff --git a/meta/recipes-graphics/piglit/piglit/0002-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch b/meta/recipes-graphics/piglit/piglit/0002-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch
new file mode 100644
index 0000000000..70bb55053d
--- /dev/null
+++ b/meta/recipes-graphics/piglit/piglit/0002-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch
@@ -0,0 +1,29 @@
+From cb8e4b99fcfe81399e3e6d922156db4a48a39a8e Mon Sep 17 00:00:00 2001
+From: Pascal Bach <pascal.bach@siemens.com>
+Date: Thu, 4 Oct 2018 14:43:17 +0200
+Subject: [PATCH] cmake: use proper WAYLAND_INCLUDE_DIRS variable
+
+WAYLAND_wayland-client_INCLUDEDIR is an internal variable and is not correctly
+set when cross compiling. WAYLAND_INCLUDE_DIRS includes the correct path even
+when cross compiling.
+
+Signed-off-by: Pascal Bach <pascal.bach@siemens.com>
+
+Upstream-Status: Submitted [piglit@lists.freedesktop.org]
+---
+ tests/util/CMakeLists.txt | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tests/util/CMakeLists.txt b/tests/util/CMakeLists.txt
+index 1714ab41f..3b67aa7da 100644
+--- a/tests/util/CMakeLists.txt
++++ b/tests/util/CMakeLists.txt
+@@ -97,7 +97,7 @@ if(PIGLIT_USE_WAFFLE)
+ piglit-framework-gl/piglit_wl_framework.c
+ )
+ list(APPEND UTIL_GL_INCLUDES
+- ${WAYLAND_wayland-client_INCLUDEDIR}
++ ${WAYLAND_INCLUDE_DIRS}
+ )
+ endif()
+ if(PIGLIT_HAS_X11)
diff --git a/meta/recipes-graphics/piglit/piglit/0002-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch b/meta/recipes-graphics/piglit/piglit/0002-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch
deleted file mode 100644
index 16c7c5c803..0000000000
--- a/meta/recipes-graphics/piglit/piglit/0002-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From 1c67250308a92d4991ed05d9d240090ab84accae Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 10 Nov 2020 17:13:50 +0000
-Subject: [PATCH 2/2] tests/util/piglit-shader.c: do not hardcode build path
- into target binary
-
-This helps reproducibilty.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- tests/util/piglit-shader.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/tests/util/piglit-shader.c b/tests/util/piglit-shader.c
-index 4fd68d21e..c9ea8295e 100644
---- a/tests/util/piglit-shader.c
-+++ b/tests/util/piglit-shader.c
-@@ -73,7 +73,7 @@ piglit_compile_shader(GLenum target, const char *filename)
-
- source_dir = getenv("PIGLIT_SOURCE_DIR");
- if (source_dir == NULL) {
-- source_dir = SOURCE_DIR;
-+ source_dir = ".";
- }
-
- snprintf(filename_with_path, FILENAME_MAX - 1,
---
-2.17.1
-
diff --git a/meta/recipes-graphics/piglit/piglit/0003-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch b/meta/recipes-graphics/piglit/piglit/0003-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch
new file mode 100644
index 0000000000..ce5cbd3ea9
--- /dev/null
+++ b/meta/recipes-graphics/piglit/piglit/0003-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch
@@ -0,0 +1,27 @@
+From 7d2d23125f1946a7b74f9a427388d469500fcd8d Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Tue, 10 Nov 2020 17:13:50 +0000
+Subject: [PATCH] tests/util/piglit-shader.c: do not hardcode build path into
+ target binary
+
+This helps reproducibilty.
+
+Upstream-Status: Inappropriate [oe-core specific]
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+---
+ tests/util/piglit-shader.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tests/util/piglit-shader.c b/tests/util/piglit-shader.c
+index 1787eb180..9e74704b1 100644
+--- a/tests/util/piglit-shader.c
++++ b/tests/util/piglit-shader.c
+@@ -73,7 +73,7 @@ piglit_compile_shader(GLenum target, const char *filename)
+
+ source_dir = getenv("PIGLIT_SOURCE_DIR");
+ if (source_dir == NULL) {
+- source_dir = SOURCE_DIR;
++ source_dir = ".";
+ }
+
+ snprintf(filename_with_path, FILENAME_MAX - 1,
diff --git a/meta/recipes-graphics/piglit/piglit_git.bb b/meta/recipes-graphics/piglit/piglit_git.bb
index 28f76fed37..99d75e64f8 100644
--- a/meta/recipes-graphics/piglit/piglit_git.bb
+++ b/meta/recipes-graphics/piglit/piglit_git.bb
@@ -7,16 +7,16 @@ LICENSE = "MIT & LGPL-2.0-or-later & GPL-3.0-only & GPL-2.0-or-later & BSD-3-Cla
LIC_FILES_CHKSUM = "file://COPYING;md5=b2beded7103a3d8a442a2a0391d607b0"
SRC_URI = "git://gitlab.freedesktop.org/mesa/piglit.git;protocol=https;branch=main \
- file://0001-cmake-install-bash-completions-in-the-right-place.patch \
- file://0001-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch \
- file://0002-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch \
- file://0001-CMakeLists.txt-add-missing-endian.h-check.patch \
+ file://0002-cmake-use-proper-WAYLAND_INCLUDE_DIRS-variable.patch \
+ file://0003-tests-util-piglit-shader.c-do-not-hardcode-build-pat.patch \
+ file://0001-tests-Fix-narrowing-errors-seen-with-clang.patch \
+ file://0001-utils-Include-libgen.h-on-musl-linux-systems.patch \
"
UPSTREAM_CHECK_COMMITS = "1"
-SRCREV = "65892137809f7d0d4d7d65dd84af59902b3e7f1e"
+SRCREV = "22eaf6a91cfd57f7bb3df4e5068c2ac1472d4ec1"
# (when PV goes above 1.0 remove the trailing r)
-PV = "1.0+gitr${SRCPV}"
+PV = "1.0+gitr"
S = "${WORKDIR}/git"
@@ -36,10 +36,12 @@ REQUIRED_DISTRO_FEATURES += "opengl"
export TEMP = "${B}/temp/"
do_compile[dirs] =+ "${B}/temp/"
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11 glx', '', d)}"
PACKAGECONFIG[freeglut] = "-DPIGLIT_USE_GLUT=1,-DPIGLIT_USE_GLUT=0,freeglut,"
+PACKAGECONFIG[glx] = "-DPIGLIT_BUILD_GLX_TESTS=ON,-DPIGLIT_BUILD_GLX_TESTS=OFF"
+PACKAGECONFIG[opencl] = "-DPIGLIT_BUILD_CL_TESTS=ON,-DPIGLIT_BUILD_CL_TESTS=OFF,virtual/opencl-icd"
PACKAGECONFIG[x11] = "-DPIGLIT_BUILD_GL_TESTS=ON,-DPIGLIT_BUILD_GL_TESTS=OFF,${X11_DEPS}, ${X11_RDEPS}"
-PACKAGECONFIG[vulkan] = "-DPIGLIT_BUILD_VK_TESTS=ON,-DPIGLIT_BUILD_VK_TESTS=OFF,vulkan-loader"
+PACKAGECONFIG[vulkan] = "-DPIGLIT_BUILD_VK_TESTS=ON,-DPIGLIT_BUILD_VK_TESTS=OFF,glslang-native vulkan-loader,glslang"
export PIGLIT_BUILD_DIR = "../../../../git"
@@ -50,8 +52,10 @@ do_configure:prepend() {
fi
}
-# Forcibly strip because Piglit is *huge*
+# Forcibly strip because Piglit is *huge*, and don't bother trying to split/strip the result.
OECMAKE_TARGET_INSTALL = "install/strip"
+INHIBIT_PACKAGE_STRIP = "1"
+INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
RDEPENDS:${PN} = "waffle waffle-bin python3 python3-mako python3-json \
python3-misc \
diff --git a/meta/recipes-graphics/shaderc/files/0001-cmake-disable-building-external-dependencies.patch b/meta/recipes-graphics/shaderc/files/0001-cmake-disable-building-external-dependencies.patch
index cc9a400028..5c49aa7fd5 100644
--- a/meta/recipes-graphics/shaderc/files/0001-cmake-disable-building-external-dependencies.patch
+++ b/meta/recipes-graphics/shaderc/files/0001-cmake-disable-building-external-dependencies.patch
@@ -1,7 +1,7 @@
-From 071a9d71bea91bbefcf15e061fc87e53568f3188 Mon Sep 17 00:00:00 2001
+From 941f5f5831e7a52c26168f81f25d0470860ca6f1 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sat, 13 Feb 2021 00:45:56 +0000
-Subject: [PATCH 1/3] cmake: disable building external dependencies
+Subject: [PATCH] cmake: disable building external dependencies
- add cmake option to disable the build of the third_party dependencies
- change the update_build_version.py to use pkg-config when third_party dependencies not found
@@ -15,18 +15,18 @@ Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
2 files changed, 25 insertions(+), 10 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 5c74cd8..b358f6b 100644
+index ffcb54b..cce715e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
-@@ -41,6 +41,7 @@ else()
+@@ -62,6 +62,7 @@ else()
endif()
option(SHADERC_ENABLE_WERROR_COMPILE "Enable passing -Werror to compiler, if available" ON)
+option(BUILD_EXTERNAL "Build external dependencies in /third_party" ON)
- set (CMAKE_CXX_STANDARD 11)
+ set (CMAKE_CXX_STANDARD 17)
-@@ -101,8 +102,14 @@ endif(MSVC)
+@@ -119,8 +120,14 @@ endif(MSVC)
# Configure subdirectories.
@@ -43,9 +43,9 @@ index 5c74cd8..b358f6b 100644
add_subdirectory(libshaderc_util)
add_subdirectory(libshaderc)
-@@ -112,7 +119,7 @@ add_subdirectory(examples)
+@@ -132,7 +139,7 @@ endif()
add_custom_target(build-version
- ${PYTHON_EXECUTABLE}
+ ${Python_EXECUTABLE}
${CMAKE_CURRENT_SOURCE_DIR}/utils/update_build_version.py
- ${shaderc_SOURCE_DIR} ${spirv-tools_SOURCE_DIR} ${glslang_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/build-version.inc
+ ${CMAKE_CURRENT_BINARY_DIR}/build-version.inc ${shaderc_SOURCE_DIR} ${spirv-tools_SOURCE_DIR} ${glslang_SOURCE_DIR}
@@ -53,7 +53,7 @@ index 5c74cd8..b358f6b 100644
function(define_pkg_config_file NAME LIBS)
diff --git a/utils/update_build_version.py b/utils/update_build_version.py
-index 5785390..f72b762 100755
+index b7ce5b8..95b34c5 100755
--- a/utils/update_build_version.py
+++ b/utils/update_build_version.py
@@ -30,6 +30,7 @@ import re
@@ -104,6 +104,3 @@ index 5785390..f72b762 100755
mkdir_p(os.path.dirname(output_file))
if os.path.isfile(output_file):
---
-2.30.1
-
diff --git a/meta/recipes-graphics/shaderc/files/0002-libshaderc_util-fix-glslang-header-file-location.patch b/meta/recipes-graphics/shaderc/files/0002-libshaderc_util-fix-glslang-header-file-location.patch
index 0e8ad7e4d2..39f982b7f3 100644
--- a/meta/recipes-graphics/shaderc/files/0002-libshaderc_util-fix-glslang-header-file-location.patch
+++ b/meta/recipes-graphics/shaderc/files/0002-libshaderc_util-fix-glslang-header-file-location.patch
@@ -1,4 +1,4 @@
-From d02ad48d5c9b48af70ddea2e6998081347ef82f3 Mon Sep 17 00:00:00 2001
+From ec2442940e1d5338971861bb81537bae3a6c19e2 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sat, 13 Feb 2021 00:45:56 +0000
Subject: [PATCH] libshaderc_util: fix glslang header file location
@@ -6,13 +6,12 @@ Subject: [PATCH] libshaderc_util: fix glslang header file location
Upstream-Status: Pending
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
-
---
libshaderc_util/src/compiler.cc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/libshaderc_util/src/compiler.cc b/libshaderc_util/src/compiler.cc
-index 051558b..9c4476c 100644
+index e5f5d10..5fd6d3c 100644
--- a/libshaderc_util/src/compiler.cc
+++ b/libshaderc_util/src/compiler.cc
@@ -20,7 +20,7 @@
diff --git a/meta/recipes-graphics/shaderc/shaderc_2022.1.bb b/meta/recipes-graphics/shaderc/shaderc_2022.1.bb
deleted file mode 100644
index fe9d94094c..0000000000
--- a/meta/recipes-graphics/shaderc/shaderc_2022.1.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "A collection of tools, libraries and tests for shader compilation"
-DESCRIPTION = "The Shaderc library provides an API for compiling GLSL/HLSL \
-source code to SPIRV modules. It has been shipping in the Android NDK since version r12b."
-SECTION = "graphics"
-HOMEPAGE = "https://github.com/google/shaderc"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
-
-SRCREV = "e4722b0ad49ee60c143d43baae8390f75ba27d2d"
-SRC_URI = "git://github.com/google/shaderc.git;protocol=https;branch=main \
- file://0001-cmake-disable-building-external-dependencies.patch \
- file://0002-libshaderc_util-fix-glslang-header-file-location.patch \
- "
-UPSTREAM_CHECK_GITTAGREGEX = "^v(?P<pver>\d+(\.\d+)+)$"
-S = "${WORKDIR}/git"
-
-inherit cmake python3native pkgconfig
-
-DEPENDS = "spirv-headers spirv-tools glslang"
-
-EXTRA_OECMAKE = " \
- -DCMAKE_BUILD_TYPE=Release \
- -DBUILD_EXTERNAL=OFF \
- -DSHADERC_SKIP_TESTS=ON \
- -DSHADERC_SKIP_EXAMPLES=ON \
- -DSHADERC_SKIP_COPYRIGHT_CHECK=ON \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/shaderc/shaderc_2024.0.bb b/meta/recipes-graphics/shaderc/shaderc_2024.0.bb
new file mode 100644
index 0000000000..9975c608ac
--- /dev/null
+++ b/meta/recipes-graphics/shaderc/shaderc_2024.0.bb
@@ -0,0 +1,29 @@
+SUMMARY = "A collection of tools, libraries and tests for shader compilation"
+DESCRIPTION = "The Shaderc library provides an API for compiling GLSL/HLSL \
+source code to SPIRV modules. It has been shipping in the Android NDK since version r12b."
+SECTION = "graphics"
+HOMEPAGE = "https://github.com/google/shaderc"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
+
+SRCREV = "9f56ca620c07d6c4d119c65c1c1f3f1c584c9985"
+SRC_URI = "git://github.com/google/shaderc.git;protocol=https;branch=main \
+ file://0001-cmake-disable-building-external-dependencies.patch \
+ file://0002-libshaderc_util-fix-glslang-header-file-location.patch \
+ "
+UPSTREAM_CHECK_GITTAGREGEX = "^v(?P<pver>\d+(\.\d+)+)$"
+S = "${WORKDIR}/git"
+
+inherit cmake python3native pkgconfig
+
+DEPENDS = "spirv-headers spirv-tools glslang"
+
+EXTRA_OECMAKE = " \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DBUILD_EXTERNAL=OFF \
+ -DSHADERC_SKIP_TESTS=ON \
+ -DSHADERC_SKIP_EXAMPLES=ON \
+ -DSHADERC_SKIP_COPYRIGHT_CHECK=ON \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/spir/spirv-headers_1.3.211.0.bb b/meta/recipes-graphics/spir/spirv-headers_1.3.211.0.bb
deleted file mode 100644
index 910ba668af..0000000000
--- a/meta/recipes-graphics/spir/spirv-headers_1.3.211.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "Machine-readable files for the SPIR-V Registry"
-DESCRIPTION = "Headers are provided in the include directory, with up-to-date \
-headers in the unified1 subdirectory. Older headers are provided according to \
-their version."
-SECTION = "graphics"
-HOMEPAGE = "https://www.khronos.org/registry/spir-v"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=c938b85bceb8fb26c1a807f28a52ae2d"
-
-SRCREV = "4995a2f2723c401eb0ea3e10c81298906bf1422b"
-SRC_URI = "git://github.com/KhronosGroup/SPIRV-Headers;protocol=https;branch=master"
-PE = "1"
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
-S = "${WORKDIR}/git"
-
-inherit cmake
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb b/meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb
new file mode 100644
index 0000000000..26bfd9c4fa
--- /dev/null
+++ b/meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Machine-readable files for the SPIR-V Registry"
+SECTION = "graphics"
+HOMEPAGE = "https://www.khronos.org/registry/spir-v"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=d14ee3b13f42e9c9674acc5925c3d741"
+
+SRCREV = "8b246ff75c6615ba4532fe4fde20f1be090c3764"
+SRC_URI = "git://github.com/KhronosGroup/SPIRV-Headers;protocol=https;branch=main"
+PE = "1"
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/spir/spirv-tools_1.3.211.0.bb b/meta/recipes-graphics/spir/spirv-tools_1.3.211.0.bb
deleted file mode 100644
index 9804febbcd..0000000000
--- a/meta/recipes-graphics/spir/spirv-tools_1.3.211.0.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-SUMMARY = "The SPIR-V Tools project provides an API and commands for \
-processing SPIR-V modules"
-DESCRIPTION = "The project includes an assembler, binary module parser, \
-disassembler, validator, and optimizer for SPIR-V."
-HOMEPAGE = "https://github.com/KhronosGroup/SPIRV-Tools"
-SECTION = "graphics"
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-SRCREV = "7826e1941eab1aa66fbe84c48b95921bff402a96"
-SRC_URI = "git://github.com/KhronosGroup/SPIRV-Tools.git;branch=master;protocol=https"
-PE = "1"
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
-S = "${WORKDIR}/git"
-
-inherit cmake python3native
-
-DEPENDS = "spirv-headers"
-
-EXTRA_OECMAKE += "\
- -DSPIRV-Headers_SOURCE_DIR=${STAGING_EXECPREFIXDIR} \
- -DSPIRV_TOOLS_BUILD_STATIC=OFF \
- -DBUILD_SHARED_LIBS=ON \
- -DSPIRV_SKIP_TESTS=ON \
-"
-
-do_install:append:class-target() {
- # reproducibility: remove build host path
- sed -i ${D}${libdir}/cmake/SPIRV-Tools/SPIRV-ToolsTarget.cmake \
- -e 's:${STAGING_DIR_HOST}::g'
-}
-
-# all the libraries are unversioned, so don't pack it on PN-dev
-SOLIBS = ".so"
-FILES_SOLIBSDEV = ""
-
-PACKAGES =+ "${PN}-lesspipe"
-FILES:${PN}-lesspipe = "${base_bindir}/spirv-lesspipe.sh"
-RDEPENDS:${PN}-lesspipe += "${PN} bash"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb b/meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb
new file mode 100644
index 0000000000..d2b6acf946
--- /dev/null
+++ b/meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb
@@ -0,0 +1,47 @@
+SUMMARY = "The SPIR-V Tools project provides an API and commands for \
+processing SPIR-V modules"
+DESCRIPTION = "The project includes an assembler, binary module parser, \
+disassembler, validator, and optimizer for SPIR-V."
+HOMEPAGE = "https://github.com/KhronosGroup/SPIRV-Tools"
+SECTION = "graphics"
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
+
+SRCREV = "04896c462d9f3f504c99a4698605b6524af813c1"
+SRC_URI = "git://github.com/KhronosGroup/SPIRV-Tools.git;branch=main;protocol=https"
+PE = "1"
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+DEPENDS = "spirv-headers"
+
+EXTRA_OECMAKE += "\
+ -DSPIRV-Headers_SOURCE_DIR=${STAGING_EXECPREFIXDIR} \
+ -DSPIRV_TOOLS_BUILD_STATIC=OFF \
+ -DBUILD_SHARED_LIBS=ON \
+ -DSPIRV_SKIP_TESTS=ON \
+"
+
+do_install:append:class-target() {
+ # Properly set _IMPORT_PREFIX in INTERFACE_LINK_LIBRARIES so that dependent
+ # tools can find the right library
+ sed -i ${D}${libdir}/cmake/SPIRV-Tools/SPIRV-ToolsTarget.cmake \
+ -e 's:INTERFACE_LINK_LIBRARIES.*$:INTERFACE_LINK_LIBRARIES "\$\{_IMPORT_PREFIX\}/${baselib}":'
+}
+
+# all the libraries are unversioned, so don't pack it on PN-dev
+SOLIBS = ".so"
+FILES_SOLIBSDEV = ""
+
+PACKAGES =+ "${PN}-lesspipe"
+FILES:${PN}-lesspipe = "${base_bindir}/spirv-lesspipe.sh"
+RDEPENDS:${PN}-lesspipe += "${PN} bash"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/startup-notification/startup-notification_0.12.bb b/meta/recipes-graphics/startup-notification/startup-notification_0.12.bb
index efd426f8c4..aa18881a12 100644
--- a/meta/recipes-graphics/startup-notification/startup-notification_0.12.bb
+++ b/meta/recipes-graphics/startup-notification/startup-notification_0.12.bb
@@ -13,7 +13,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a2ae2cd47d6d2f238410f5364dfbc0f2 \
file://libsn/sn-common.h;endline=23;md5=6d05bc0ebdcf5513a6e77cb26e8cd7e2 \
file://test/test-boilerplate.h;endline=23;md5=923e706b2a70586176eead261cc5bb98"
-PR = "r2"
SECTION = "libs"
diff --git a/meta/recipes-graphics/ttf-fonts/liberation-fonts_2.1.5.bb b/meta/recipes-graphics/ttf-fonts/liberation-fonts_2.1.5.bb
index 32247b09c8..b7fdc0d399 100644
--- a/meta/recipes-graphics/ttf-fonts/liberation-fonts_2.1.5.bb
+++ b/meta/recipes-graphics/ttf-fonts/liberation-fonts_2.1.5.bb
@@ -13,11 +13,11 @@ PE = "1"
SRC_URI = "https://github.com/liberationfonts/liberation-fonts/files/7261482/liberation-fonts-ttf-${PV}.tar.gz \
file://30-liberation-aliases.conf"
SRC_URI[sha256sum] = "7191c669bf38899f73a2094ed00f7b800553364f90e2637010a69c0e268f25d0"
-UPSTREAM_CHECK_URI = "https://github.com/liberationfonts/liberation-fonts/releases"
+GITHUB_BASE_URI = "https://github.com/liberationfonts/liberation-fonts/releases"
S = "${WORKDIR}/liberation-fonts-ttf-${PV}"
-inherit allarch fontcache
+inherit allarch fontcache github-releases
do_install () {
install -d ${D}${datadir}/fonts/ttf/
diff --git a/meta/recipes-graphics/ttf-fonts/ttf-bitstream-vera_1.10.bb b/meta/recipes-graphics/ttf-fonts/ttf-bitstream-vera_1.10.bb
index 1f1ad83e0b..0fc075a468 100644
--- a/meta/recipes-graphics/ttf-fonts/ttf-bitstream-vera_1.10.bb
+++ b/meta/recipes-graphics/ttf-fonts/ttf-bitstream-vera_1.10.bb
@@ -8,13 +8,9 @@ but is visibly different than normal and bold, and reasonably pleasing."
SECTION = "x11/fonts"
LICENSE = "BitstreamVera"
LIC_FILES_CHKSUM = "file://COPYRIGHT.TXT;md5=27d7484b1e18d0ee4ce538644a3f04be"
-PR = "r8"
inherit allarch fontcache
-# remove at next version upgrade or when output changes
-HASHEQUIV_HASH_VERSION .= ".1"
-
FONT_PACKAGES = "${PN}"
SRC_URI = "${GNOME_MIRROR}/ttf-bitstream-vera/1.10/ttf-bitstream-vera-${PV}.tar.bz2"
diff --git a/meta/recipes-graphics/virglrenderer/virglrenderer/0001-meson.build-use-python3-directly-for-python.patch b/meta/recipes-graphics/virglrenderer/virglrenderer/0001-meson.build-use-python3-directly-for-python.patch
index 0fd1d511d6..3e12495970 100644
--- a/meta/recipes-graphics/virglrenderer/virglrenderer/0001-meson.build-use-python3-directly-for-python.patch
+++ b/meta/recipes-graphics/virglrenderer/virglrenderer/0001-meson.build-use-python3-directly-for-python.patch
@@ -1,4 +1,4 @@
-From 63788c63ed39a3ce9994f4315d8997e1a9300d4d Mon Sep 17 00:00:00 2001
+From dec8cca59124d7f8796b54902482ceb295a71b51 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 6 Jan 2020 12:44:42 +0100
Subject: [PATCH] meson.build: use 'python3' directly for python
@@ -8,20 +8,21 @@ its configuration).
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+
---
meson.build | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meson.build b/meson.build
-index 682d7c8..19d2eae 100644
+index 9f91ff2f..2391f89d 100644
--- a/meson.build
+++ b/meson.build
-@@ -60,7 +60,7 @@ foreach w : warnings
- endif
- endforeach
+@@ -74,7 +74,7 @@ flags = [
+
+ add_project_arguments(cc.get_supported_arguments(flags), language : 'c')
-prog_python = import('python').find_installation('python3')
+prog_python = 'python3'
- libdrm_dep = dependency('libdrm', version : '>=2.4.50')
- thread_dep = dependency('threads')
+ not_found = dependency('', required: false)
+ libdrm_dep = dependency('libdrm', version : '>=2.4.50', required: get_option('drm').enabled() or get_option('venus'))
diff --git a/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0135.patch b/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0135.patch
deleted file mode 100644
index ae42dc8f6c..0000000000
--- a/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0135.patch
+++ /dev/null
@@ -1,117 +0,0 @@
-From 63aee871365f9c9e7fa9125672302a0fb250d34d Mon Sep 17 00:00:00 2001
-From: Gert Wollny <gert.wollny@collabora.com>
-Date: Tue, 30 Nov 2021 09:16:24 +0100
-Subject: [PATCH 2/2] vrend: propperly check whether the shader image range is
- correct
-
-Also add a test to check the integer underflow.
-
-Closes: #251
-Signed-off-by: Gert Wollny <gert.wollny@collabora.com>
-Reviewed-by: Chia-I Wu <olvaffe@gmail.com>
-
-cherry-pick from anongit.freedesktop.org/virglrenderer
-commit 2aed5d4...
-
-CVE: CVE-2022-0135
-Upstream-Status: Backport
-Signed-off-by: Joe Slater <joe.slater@windriver.com>
-
----
- src/vrend_decode.c | 3 +-
- tests/test_fuzzer_formats.c | 57 +++++++++++++++++++++++++++++++++++++
- 2 files changed, 59 insertions(+), 1 deletion(-)
-
-diff --git a/src/vrend_decode.c b/src/vrend_decode.c
-index 91f5f24..6771b10 100644
---- a/src/vrend_decode.c
-+++ b/src/vrend_decode.c
-@@ -1249,8 +1249,9 @@ static int vrend_decode_set_shader_images(struct vrend_context *ctx, const uint3
- if (num_images < 1) {
- return 0;
- }
-+
- if (start_slot > PIPE_MAX_SHADER_IMAGES ||
-- start_slot > PIPE_MAX_SHADER_IMAGES - num_images)
-+ start_slot + num_images > PIPE_MAX_SHADER_IMAGES)
- return EINVAL;
-
- for (uint32_t i = 0; i < num_images; i++) {
-diff --git a/tests/test_fuzzer_formats.c b/tests/test_fuzzer_formats.c
-index 154a2e5..e32caf0 100644
---- a/tests/test_fuzzer_formats.c
-+++ b/tests/test_fuzzer_formats.c
-@@ -958,6 +958,61 @@ static void test_vrend_set_signle_abo_heap_overflow() {
- virgl_renderer_submit_cmd((void *) cmd, ctx_id, 0xde);
- }
-
-+static void test_vrend_set_shader_images_overflow()
-+{
-+ uint32_t num_shaders = PIPE_MAX_SHADER_IMAGES + 1;
-+ uint32_t size = num_shaders * VIRGL_SET_SHADER_IMAGE_ELEMENT_SIZE + 3;
-+ uint32_t cmd[size];
-+ int i = 0;
-+ cmd[i++] = ((size - 1)<< 16) | 0 << 8 | VIRGL_CCMD_SET_SHADER_IMAGES;
-+ cmd[i++] = PIPE_SHADER_FRAGMENT;
-+ memset(&cmd[i], 0, size - i);
-+
-+ virgl_renderer_submit_cmd((void *) cmd, ctx_id, size);
-+}
-+
-+/* Test adapted from yaojun8558363@gmail.com:
-+ * https://gitlab.freedesktop.org/virgl/virglrenderer/-/issues/250
-+*/
-+static void test_vrend_3d_resource_overflow() {
-+
-+ struct virgl_renderer_resource_create_args resource;
-+ resource.handle = 0x4c474572;
-+ resource.target = PIPE_TEXTURE_2D_ARRAY;
-+ resource.format = VIRGL_FORMAT_Z24X8_UNORM;
-+ resource.nr_samples = 2;
-+ resource.last_level = 0;
-+ resource.array_size = 3;
-+ resource.bind = VIRGL_BIND_SAMPLER_VIEW;
-+ resource.depth = 1;
-+ resource.width = 8;
-+ resource.height = 4;
-+ resource.flags = 0;
-+
-+ virgl_renderer_resource_create(&resource, NULL, 0);
-+ virgl_renderer_ctx_attach_resource(ctx_id, resource.handle);
-+
-+ uint32_t size = 0x400;
-+ uint32_t cmd[size];
-+ int i = 0;
-+ cmd[i++] = (size - 1) << 16 | 0 << 8 | VIRGL_CCMD_RESOURCE_INLINE_WRITE;
-+ cmd[i++] = resource.handle;
-+ cmd[i++] = 0; // level
-+ cmd[i++] = 0; // usage
-+ cmd[i++] = 0; // stride
-+ cmd[i++] = 0; // layer_stride
-+ cmd[i++] = 0; // x
-+ cmd[i++] = 0; // y
-+ cmd[i++] = 0; // z
-+ cmd[i++] = 8; // w
-+ cmd[i++] = 4; // h
-+ cmd[i++] = 3; // d
-+ memset(&cmd[i], 0, size - i);
-+
-+ virgl_renderer_submit_cmd((void *) cmd, ctx_id, size);
-+}
-+
-+
- int main()
- {
- initialize_environment();
-@@ -980,6 +1035,8 @@ int main()
- test_cs_nullpointer_deference();
- test_vrend_set_signle_abo_heap_overflow();
-
-+ test_vrend_set_shader_images_overflow();
-+ test_vrend_3d_resource_overflow();
-
- virgl_renderer_context_destroy(ctx_id);
- virgl_renderer_cleanup(&cookie);
---
-2.25.1
-
diff --git a/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0175.patch b/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0175.patch
deleted file mode 100644
index 7fbab75091..0000000000
--- a/meta/recipes-graphics/virglrenderer/virglrenderer/cve-2022-0175.patch
+++ /dev/null
@@ -1,107 +0,0 @@
-From 5ca7aca001092c557f0b6fc1ba3db7dcdab860b7 Mon Sep 17 00:00:00 2001
-From: Gert Wollny <gert.wollny@collabora.com>
-Date: Tue, 30 Nov 2021 09:29:42 +0100
-Subject: [PATCH 1/2] vrend: clear memory when allocating a host-backed memory
- resource
-
-Closes: #249
-Signed-off-by: Gert Wollny <gert.wollny@collabora.com>
-Reviewed-by: Chia-I Wu <olvaffe@gmail.com>
-
-cherry-pick from anongit.freedesktop.org/virglrenderer
-commit b05bb61...
-
-CVE: CVE-2022-0175
-Upstream-Status: Backport
-Signed-off-by: Joe Slater <joe.slater@windriver.com>
-
----
- src/vrend_renderer.c | 2 +-
- tests/test_virgl_transfer.c | 51 +++++++++++++++++++++++++++++++++++++
- 2 files changed, 52 insertions(+), 1 deletion(-)
-
-diff --git a/src/vrend_renderer.c b/src/vrend_renderer.c
-index b8b2a36..2650cf2 100644
---- a/src/vrend_renderer.c
-+++ b/src/vrend_renderer.c
-@@ -6788,7 +6788,7 @@ vrend_resource_alloc_buffer(struct vrend_resource *gr, uint32_t flags)
- if (bind == VIRGL_BIND_CUSTOM) {
- /* use iovec directly when attached */
- gr->storage_bits |= VREND_STORAGE_HOST_SYSTEM_MEMORY;
-- gr->ptr = malloc(size);
-+ gr->ptr = calloc(1, size);
- if (!gr->ptr)
- return -ENOMEM;
- } else if (bind == VIRGL_BIND_STAGING) {
-diff --git a/tests/test_virgl_transfer.c b/tests/test_virgl_transfer.c
-index bf7f438..3c53c3d 100644
---- a/tests/test_virgl_transfer.c
-+++ b/tests/test_virgl_transfer.c
-@@ -952,6 +952,56 @@ START_TEST(virgl_test_transfer_near_res_bounds_with_stride_succeeds)
- }
- END_TEST
-
-+START_TEST(test_vrend_host_backed_memory_no_data_leak)
-+{
-+ struct iovec iovs[1];
-+ int niovs = 1;
-+
-+ struct virgl_context ctx = {0};
-+
-+ int ret = testvirgl_init_ctx_cmdbuf(&ctx);
-+
-+ struct virgl_renderer_resource_create_args res;
-+ res.handle = 0x400;
-+ res.target = PIPE_BUFFER;
-+ res.format = VIRGL_FORMAT_R8_UNORM;
-+ res.nr_samples = 0;
-+ res.last_level = 0;
-+ res.array_size = 1;
-+ res.bind = VIRGL_BIND_CUSTOM;
-+ res.depth = 1;
-+ res.width = 32;
-+ res.height = 1;
-+ res.flags = 0;
-+
-+ uint32_t size = 32;
-+ uint8_t* data = calloc(1, size);
-+ memset(data, 1, 32);
-+ iovs[0].iov_base = data;
-+ iovs[0].iov_len = size;
-+
-+ struct pipe_box box = {0,0,0, size, 1,1};
-+
-+ virgl_renderer_resource_create(&res, NULL, 0);
-+ virgl_renderer_ctx_attach_resource(ctx.ctx_id, res.handle);
-+
-+ ret = virgl_renderer_transfer_read_iov(res.handle, ctx.ctx_id, 0, 0, 0,
-+ (struct virgl_box *)&box, 0, iovs, niovs);
-+
-+ ck_assert_int_eq(ret, 0);
-+
-+ for (int i = 0; i < 32; ++i)
-+ ck_assert_int_eq(data[i], 0);
-+
-+ virgl_renderer_ctx_detach_resource(1, res.handle);
-+
-+ virgl_renderer_resource_unref(res.handle);
-+ free(data);
-+
-+}
-+END_TEST
-+
-+
- static Suite *virgl_init_suite(void)
- {
- Suite *s;
-@@ -981,6 +1031,7 @@ static Suite *virgl_init_suite(void)
- tcase_add_test(tc_core, virgl_test_transfer_buffer_bad_strides);
- tcase_add_test(tc_core, virgl_test_transfer_2d_array_bad_layer_stride);
- tcase_add_test(tc_core, virgl_test_transfer_2d_bad_level);
-+ tcase_add_test(tc_core, test_vrend_host_backed_memory_no_data_leak);
-
- tcase_add_loop_test(tc_core, virgl_test_transfer_res_read_valid, 0, PIPE_MAX_TEXTURE_TYPES);
- tcase_add_loop_test(tc_core, virgl_test_transfer_res_write_valid, 0, PIPE_MAX_TEXTURE_TYPES);
---
-2.25.1
-
diff --git a/meta/recipes-graphics/virglrenderer/virglrenderer_0.9.1.bb b/meta/recipes-graphics/virglrenderer/virglrenderer_0.9.1.bb
deleted file mode 100644
index ad3688e300..0000000000
--- a/meta/recipes-graphics/virglrenderer/virglrenderer_0.9.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "VirGL virtual OpenGL renderer"
-DESCRIPTION = "Virgil is a research project to investigate the possibility of \
-creating a virtual 3D GPU for use inside qemu virtual machines, that allows \
-the guest operating system to use the capabilities of the host GPU to \
-accelerate 3D rendering."
-HOMEPAGE = "https://virgil3d.github.io/"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c81c08eeefd9418fca8f88309a76db10"
-
-DEPENDS = "libdrm virtual/egl virtual/libgbm libepoxy"
-SRCREV = "363915595e05fb252e70d6514be2f0c0b5ca312b"
-SRC_URI = "git://anongit.freedesktop.org/git/virglrenderer;branch=branch-0.9.1 \
- file://0001-meson.build-use-python3-directly-for-python.patch \
- file://cve-2022-0135.patch \
- file://cve-2022-0175.patch \
- "
-
-S = "${WORKDIR}/git"
-
-inherit meson pkgconfig features_check
-
-BBCLASSEXTEND = "native nativesdk"
-
-REQUIRED_DISTRO_FEATURES = "opengl"
diff --git a/meta/recipes-graphics/virglrenderer/virglrenderer_1.0.1.bb b/meta/recipes-graphics/virglrenderer/virglrenderer_1.0.1.bb
new file mode 100644
index 0000000000..0501b0c59c
--- /dev/null
+++ b/meta/recipes-graphics/virglrenderer/virglrenderer_1.0.1.bb
@@ -0,0 +1,33 @@
+SUMMARY = "VirGL virtual OpenGL renderer"
+DESCRIPTION = "Virgil is a research project to investigate the possibility of \
+creating a virtual 3D GPU for use inside qemu virtual machines, that allows \
+the guest operating system to use the capabilities of the host GPU to \
+accelerate 3D rendering."
+HOMEPAGE = "https://virgil3d.github.io/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c81c08eeefd9418fca8f88309a76db10"
+
+DEPENDS = "libdrm libepoxy virtual/egl virtual/libgbm"
+SRCREV = "690680e5f0f952e22424fca1538c1b24457a0868"
+SRC_URI = "git://gitlab.freedesktop.org/virgl/virglrenderer.git;branch=main;protocol=https \
+ file://0001-meson.build-use-python3-directly-for-python.patch \
+ "
+
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig features_check
+
+PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'vulkan', 'venus', '', d)}"
+
+PACKAGECONFIG[venus] = "-Dvenus=true,-Dvenus=false,vulkan-loader vulkan-headers"
+PACKAGECONFIG[va] = "-Dvideo=true,-Dvideo=false,libva"
+PACKAGECONFIG[render-server] = "-Drender-server=true,-Drender-server=false"
+PACKAGECONFIG[drm-msm-experimental] = "-Ddrm-msm-experimental=true,-Ddrm-msm-experimental=false"
+PACKAGECONFIG[minigbm_allocation] = "-Dminigbm_allocation=true,-Dminigbm_allocation=false"
+PACKAGECONFIG[venus-validate] = "-Dvenus-validate=true,-Dvenus-validate=false"
+PACKAGECONFIG[tests] = "-Dtests=true,-Dtests=false,libcheck"
+
+BBCLASSEXTEND = "native nativesdk"
+
+REQUIRED_DISTRO_FEATURES = "opengl"
diff --git a/meta/recipes-graphics/vulkan/vulkan-headers_1.3.211.0.bb b/meta/recipes-graphics/vulkan/vulkan-headers_1.3.211.0.bb
deleted file mode 100644
index c74eb4cf5b..0000000000
--- a/meta/recipes-graphics/vulkan/vulkan-headers_1.3.211.0.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Vulkan Header files and API registry"
-DESCRIPTION = "Vulkan is a 3D graphics and compute API providing cross-platform access \
-to modern GPUs with low overhead and targeting realtime graphics applications such as \
-games and interactive media. This package contains the development headers \
-for packages wanting to make use of Vulkan."
-HOMEPAGE = "https://www.khronos.org/vulkan/"
-BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Headers"
-SECTION = "libs"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3b83ef96387f14655fc854ddc3c6bd57"
-SRC_URI = "git://github.com/KhronosGroup/Vulkan-Headers.git;branch=main;protocol=https"
-
-SRCREV = "76f00ef6cbb1886eb1162d1fa39bee8b51e22ee8"
-
-S = "${WORKDIR}/git"
-
-inherit cmake
-
-FILES:${PN} += "${datadir}/vulkan"
-
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb
new file mode 100644
index 0000000000..371cc7304d
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Vulkan Header files and API registry"
+DESCRIPTION = "Vulkan is a 3D graphics and compute API providing cross-platform access \
+to modern GPUs with low overhead and targeting realtime graphics applications such as \
+games and interactive media. This package contains the development headers \
+for packages wanting to make use of Vulkan."
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Headers"
+SECTION = "libs"
+
+LICENSE = "Apache-2.0 & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=1bc355d8c4196f774c8b87ed1a8dd625"
+SRC_URI = "git://github.com/KhronosGroup/Vulkan-Headers.git;branch=main;protocol=https"
+
+SRCREV = "577baa05033cf1d9236b3d078ca4b3269ed87a2b"
+
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+FILES:${PN} += "${datadir}/vulkan"
+RDEPENDS:${PN} += "python3-core"
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools,
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-loader_1.3.211.0.bb b/meta/recipes-graphics/vulkan/vulkan-loader_1.3.211.0.bb
deleted file mode 100644
index cb4932ef79..0000000000
--- a/meta/recipes-graphics/vulkan/vulkan-loader_1.3.211.0.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "3D graphics and compute API common loader"
-DESCRIPTION = "Vulkan is a new generation graphics and compute API \
-that provides efficient access to modern GPUs. These packages \
-provide only the common vendor-agnostic library loader, headers and \
-the vulkaninfo utility."
-HOMEPAGE = "https://www.khronos.org/vulkan/"
-BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Loader"
-SECTION = "libs"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7dbefed23242760aa3475ee42801c5ac"
-SRC_URI = "git://github.com/KhronosGroup/Vulkan-Loader.git;branch=sdk-1.3.211;protocol=https"
-SRCREV = "bfb419161602361626e40a7a3af0a63e06bcf204"
-
-S = "${WORKDIR}/git"
-
-REQUIRED_DISTRO_FEATURES = "vulkan"
-
-inherit cmake features_check pkgconfig
-ANY_OF_DISTRO_FEATURES = "x11 wayland"
-
-DEPENDS += "vulkan-headers"
-
-EXTRA_OECMAKE = "\
- -DBUILD_TESTS=OFF \
- -DPYTHON_EXECUTABLE=${HOSTTOOLS_DIR}/python3 \
- -DASSEMBLER_WORKS=FALSE \
- -DVulkanHeaders_INCLUDE_DIR=${STAGING_INCDIR} \
- -DVulkanRegistry_DIR=${RECIPE_SYSROOT}/${datadir} \
- "
-
-# must choose x11 or wayland or both
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
-
-PACKAGECONFIG[x11] = "-DBUILD_WSI_XLIB_SUPPORT=ON -DBUILD_WSI_XCB_SUPPORT=ON, -DBUILD_WSI_XLIB_SUPPORT=OFF -DBUILD_WSI_XCB_SUPPORT=OFF, libxcb libx11 libxrandr"
-PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SUPPORT=OFF, wayland"
-
-RRECOMMENDS:${PN} = "mesa-vulkan-drivers"
-
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb
new file mode 100644
index 0000000000..b738771801
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb
@@ -0,0 +1,43 @@
+SUMMARY = "3D graphics and compute API common loader"
+DESCRIPTION = "Vulkan is a new generation graphics and compute API \
+that provides efficient access to modern GPUs. These packages \
+provide only the common vendor-agnostic library loader, headers and \
+the vulkaninfo utility."
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Loader"
+SECTION = "libs"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7dbefed23242760aa3475ee42801c5ac"
+SRC_URI = "git://github.com/KhronosGroup/Vulkan-Loader.git;branch=vulkan-sdk-1.3.280;protocol=https"
+SRCREV = "61a9c50248e09f3a0e0be7ce6f8bb1663855f979"
+
+S = "${WORKDIR}/git"
+
+REQUIRED_DISTRO_FEATURES = "vulkan"
+
+inherit cmake features_check pkgconfig
+
+DEPENDS += "vulkan-headers"
+
+EXTRA_OECMAKE = "\
+ -DBUILD_TESTS=OFF \
+ -DPYTHON_EXECUTABLE=${HOSTTOOLS_DIR}/python3 \
+ -DASSEMBLER_WORKS=FALSE \
+ -DVulkanHeaders_INCLUDE_DIR=${STAGING_INCDIR} \
+ -DVulkanRegistry_DIR=${RECIPE_SYSROOT}/${datadir} \
+ "
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
+
+PACKAGECONFIG[x11] = "-DBUILD_WSI_XLIB_SUPPORT=ON -DBUILD_WSI_XCB_SUPPORT=ON, -DBUILD_WSI_XLIB_SUPPORT=OFF -DBUILD_WSI_XCB_SUPPORT=OFF, libxcb libx11 libxrandr"
+PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SUPPORT=OFF, wayland"
+
+RRECOMMENDS:${PN} = "mesa-vulkan-drivers"
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools,
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/0001-CMakeLists.txt-do-not-hardcode-lib-as-installation-t.patch b/meta/recipes-graphics/vulkan/vulkan-samples/0001-CMakeLists.txt-do-not-hardcode-lib-as-installation-t.patch
deleted file mode 100644
index 90fe277a41..0000000000
--- a/meta/recipes-graphics/vulkan/vulkan-samples/0001-CMakeLists.txt-do-not-hardcode-lib-as-installation-t.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From 5fb216d35b6846074196e80421f3162df3b9c8cd Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Sun, 1 Nov 2020 23:19:22 +0000
-Subject: [PATCH] CMakeLists.txt: do not hardcode 'lib' as installation target
-
-Upstream-Status: Inappropriate [already fixed in newer versions]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- CMakeLists.txt | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/CMakeLists.txt b/CMakeLists.txt
-index e00f8c77..f9896eed 100644
---- a/third_party/spirv-cross/CMakeLists.txt
-+++ b/third_party/spirv-cross/CMakeLists.txt
-@@ -67,8 +67,8 @@ macro(spirv_cross_add_library name config_name)
- install(TARGETS ${name}
- EXPORT ${config_name}Config
- RUNTIME DESTINATION bin
-- LIBRARY DESTINATION lib
-- ARCHIVE DESTINATION lib
-+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
-+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
- PUBLIC_HEADER DESTINATION include/spirv_cross)
- install(FILES ${hdrs} DESTINATION include/spirv_cross)
- install(EXPORT ${config_name}Config DESTINATION share/${config_name}/cmake)
---
-2.17.1
-
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/0001-Deprecate-u8string_view.patch b/meta/recipes-graphics/vulkan/vulkan-samples/0001-Deprecate-u8string_view.patch
new file mode 100644
index 0000000000..c2304bdd48
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-samples/0001-Deprecate-u8string_view.patch
@@ -0,0 +1,59 @@
+From 93987b1ce7d6f91387202495aac61026070597df Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 15 Jan 2023 21:37:52 -0800
+Subject: [PATCH] Deprecate u8string_view
+
+Use basic_string_view instead
+
+Upstream-Status: Backport [https://github.com/fmtlib/fmt/commit/dea7fde8b7d649923dd41b0766bdf076033c62a2]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ include/spdlog/fmt/bundled/core.h | 3 ++-
+ include/spdlog/fmt/bundled/format.h | 15 ++-------------
+ 2 files changed, 4 insertions(+), 14 deletions(-)
+
+diff --git a/include/spdlog/fmt/bundled/core.h b/include/spdlog/fmt/bundled/core.h
+index 50b79351..e8b029ef 100644
+--- a/include/spdlog/fmt/bundled/core.h
++++ b/include/spdlog/fmt/bundled/core.h
+@@ -1484,7 +1484,8 @@ FMT_API void vprint(wstring_view format_str, wformat_args args);
+
+ /**
+ \rst
+- Prints formatted data to ``stdout``.
++ Formats ``args`` according to specifications in ``format_str`` and writes the
++ output to ``stdout``.
+
+ **Example**::
+
+diff --git a/include/spdlog/fmt/bundled/format.h b/include/spdlog/fmt/bundled/format.h
+index 1bb24a52..39426361 100644
+--- a/include/spdlog/fmt/bundled/format.h
++++ b/include/spdlog/fmt/bundled/format.h
+@@ -407,21 +407,10 @@ void basic_buffer<T>::append(const U *begin, const U *end) {
+ enum char8_t: unsigned char {};
+ #endif
+
+-// A UTF-8 string view.
+-class u8string_view : public basic_string_view<char8_t> {
+- public:
+- typedef char8_t char_type;
+-
+- u8string_view(const char *s):
+- basic_string_view<char8_t>(reinterpret_cast<const char8_t*>(s)) {}
+- u8string_view(const char *s, size_t count) FMT_NOEXCEPT:
+- basic_string_view<char8_t>(reinterpret_cast<const char8_t*>(s), count) {}
+-};
+-
+ #if FMT_USE_USER_DEFINED_LITERALS
+ inline namespace literals {
+-inline u8string_view operator"" _u(const char *s, std::size_t n) {
+- return {s, n};
++inline basic_string_view<char8_t> operator"" _u(const char* s, std::size_t n) {
++ return {reinterpret_cast<const char8_t*>(s), n};
+ }
+ }
+ #endif
+--
+2.39.0
+
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/0001-Do-not-use-LFS64-functions-on-linux-musl.patch b/meta/recipes-graphics/vulkan/vulkan-samples/0001-Do-not-use-LFS64-functions-on-linux-musl.patch
new file mode 100644
index 0000000000..f2bd7e510b
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-samples/0001-Do-not-use-LFS64-functions-on-linux-musl.patch
@@ -0,0 +1,37 @@
+From ce7a593e74c8e0c2ece15c73e7614d4f13a19a53 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 30 Dec 2022 13:04:08 -0800
+Subject: [PATCH] Do not use LFS64 functions on linux/musl
+
+On musl, off_t is 64bit always ( even on 32bit platforms ), therefore using
+LFS64 funcitons is not needed on such platforms. Moreover, musl has stopped
+providing aliases for these functions [1] which means it wont compile on
+newer musl systems. Therefore only use it on 32bit glibc/linux platforms
+and exclude musl like cygwin or OSX
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=246f1c811448f37a44b41cd8df8d0ef9736d95f4
+
+Upstream-Status: Submitted [https://github.com/gabime/spdlog/pull/2589]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ include/spdlog/details/os.h | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/include/spdlog/details/os.h b/include/spdlog/details/os.h
+index 8e8476f0..be0a67b8 100644
+--- a/include/spdlog/details/os.h
++++ b/include/spdlog/details/os.h
+@@ -227,7 +227,9 @@ inline size_t filesize(FILE *f)
+ #else // unix
+ int fd = fileno(f);
+ // 64 bits(but not in osx or cygwin, where fstat64 is deprecated)
+-#if !defined(__FreeBSD__) && !defined(__APPLE__) && (defined(__x86_64__) || defined(__ppc64__)) && !defined(__CYGWIN__)
++#if !defined(__FreeBSD__) && !defined(__APPLE__) && \
++ (defined(__linux__) && defined(__GLIBC__)) && \
++ (defined(__x86_64__) || defined(__ppc64__)) && !defined(__CYGWIN__)
+ struct stat64 st;
+ if (::fstat64(fd, &st) == 0)
+ {
+--
+2.39.0
+
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/0001-vulkan-samples-Fix-reproducibility-issue.patch b/meta/recipes-graphics/vulkan/vulkan-samples/0001-vulkan-samples-Fix-reproducibility-issue.patch
new file mode 100644
index 0000000000..c16e05112f
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-samples/0001-vulkan-samples-Fix-reproducibility-issue.patch
@@ -0,0 +1,43 @@
+From d998c753254649c7cf7c64e3fed78e41c11ad7ed Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Wed, 23 Aug 2023 09:38:37 +0200
+Subject: [PATCH] vulkan-samples: Fix reproducibility issue
+
+There is code to remove the prefix CMAKE_SOURCE_DIR from __FILENAME__ paths
+used for logging with LOGE() in the code. We need to make this match the value we use
+in the debug source remapping from CFLAGS
+
+We export the right path to use in the recipe with:
+
+EXTRA_OECMAKE = "-DCMAKE_DEBUG_SRCDIR=${TARGET_DBGSRC_DIR}/"
+
+and we then patch this into the code instead of the broken use
+of CMAKE_SOURCE_DIR since __FILENAME__ will match our path prefix
+changes.
+
+This also breaks reproducibility since the path length of the build directory
+will currently change the output!
+
+Upstream-Status: Pending [needs to be discussed upstream]
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Signed-off-by: Julien Stephan <jstephan@baylibre.com>
+---
+ bldsys/cmake/global_options.cmake | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/bldsys/cmake/global_options.cmake b/bldsys/cmake/global_options.cmake
+index b15c2da..d8952e5 100644
+--- a/bldsys/cmake/global_options.cmake
++++ b/bldsys/cmake/global_options.cmake
+@@ -62,7 +62,7 @@ set(CMAKE_CXX_STANDARD 14)
+ set(CMAKE_DISABLE_SOURCE_CHANGES ON)
+ set(CMAKE_DISABLE_IN_SOURCE_BUILD ON)
+
+-string(LENGTH "${CMAKE_SOURCE_DIR}/" ROOT_PATH_SIZE)
++string(LENGTH "${CMAKE_DEBUG_SRCDIR}/" ROOT_PATH_SIZE)
+ add_definitions(-DROOT_PATH_SIZE=${ROOT_PATH_SIZE})
+
+ set(CMAKE_C_FLAGS_DEBUG "-DDEBUG=0 ${CMAKE_C_FLAGS_DEBUG}")
+--
+2.41.0
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/32bit.patch b/meta/recipes-graphics/vulkan/vulkan-samples/32bit.patch
new file mode 100644
index 0000000000..644c3b6167
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-samples/32bit.patch
@@ -0,0 +1,101 @@
+From 49761ca63797014223d8e3ff6fb2c0235803c19c Mon Sep 17 00:00:00 2001
+From: asuessenbach <asuessenbach@nvidia.com>
+Date: Wed, 3 May 2023 09:50:08 +0200
+Subject: [PATCH] Resolve some Vulkan-Hpp-related issues on Win32.
+
+This patch fixes vulkan-samples compilation on 32-bit hosts.
+
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+---
+ framework/common/hpp_vk_common.h | 4 ++--
+ framework/core/hpp_buffer.cpp | 4 ++--
+ framework/core/hpp_buffer.h | 2 +-
+ framework/core/hpp_image.cpp | 2 +-
+ samples/api/hpp_texture_loading/hpp_texture_loading.cpp | 2 +-
+ 5 files changed, 7 insertions(+), 7 deletions(-)
+
+diff --git a/framework/common/hpp_vk_common.h b/framework/common/hpp_vk_common.h
+index 39ed3dcde..0cbbe479e 100644
+--- a/framework/common/hpp_vk_common.h
++++ b/framework/common/hpp_vk_common.h
+@@ -92,7 +92,7 @@ inline bool is_dynamic_buffer_descriptor_type(vk::DescriptorType descriptor_type
+
+ inline vk::ShaderModule load_shader(const std::string &filename, vk::Device device, vk::ShaderStageFlagBits stage)
+ {
+- return vkb::load_shader(filename, device, static_cast<VkShaderStageFlagBits>(stage));
++ return static_cast<vk::ShaderModule>(vkb::load_shader(filename, device, static_cast<VkShaderStageFlagBits>(stage)));
+ }
+
+ inline void set_image_layout(vk::CommandBuffer command_buffer,
+@@ -104,7 +104,7 @@ inline void set_image_layout(vk::CommandBuffer command_buffer,
+ vk::PipelineStageFlags dst_mask = vk::PipelineStageFlagBits::eAllCommands)
+ {
+ vkb::set_image_layout(command_buffer,
+- image,
++ static_cast<VkImage>(image),
+ static_cast<VkImageLayout>(old_layout),
+ static_cast<VkImageLayout>(new_layout),
+ static_cast<VkImageSubresourceRange>(subresource_range),
+diff --git a/framework/core/hpp_buffer.cpp b/framework/core/hpp_buffer.cpp
+index 8da265acb..e6509b9f4 100644
+--- a/framework/core/hpp_buffer.cpp
++++ b/framework/core/hpp_buffer.cpp
+@@ -84,7 +84,7 @@ HPPBuffer::~HPPBuffer()
+ if (get_handle() && (allocation != VK_NULL_HANDLE))
+ {
+ unmap();
+- vmaDestroyBuffer(get_device().get_memory_allocator(), get_handle(), allocation);
++ vmaDestroyBuffer(get_device().get_memory_allocator(), static_cast<VkBuffer>(get_handle()), allocation);
+ }
+ }
+
+@@ -93,7 +93,7 @@ VmaAllocation HPPBuffer::get_allocation() const
+ return allocation;
+ }
+
+-VkDeviceMemory HPPBuffer::get_memory() const
++vk::DeviceMemory HPPBuffer::get_memory() const
+ {
+ return memory;
+ }
+diff --git a/framework/core/hpp_buffer.h b/framework/core/hpp_buffer.h
+index 7a243c265..bad47406d 100644
+--- a/framework/core/hpp_buffer.h
++++ b/framework/core/hpp_buffer.h
+@@ -55,7 +55,7 @@ class HPPBuffer : public vkb::core::HPPVulkanResource<vk::Buffer>
+
+ VmaAllocation get_allocation() const;
+ const uint8_t *get_data() const;
+- VkDeviceMemory get_memory() const;
++ vk::DeviceMemory get_memory() const;
+
+ /**
+ * @return Return the buffer's device address (note: requires that the buffer has been created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT usage fla)
+diff --git a/framework/core/hpp_image.cpp b/framework/core/hpp_image.cpp
+index 00fa89ba7..5e6f27363 100644
+--- a/framework/core/hpp_image.cpp
++++ b/framework/core/hpp_image.cpp
+@@ -138,7 +138,7 @@ HPPImage::~HPPImage()
+ if (get_handle() && memory)
+ {
+ unmap();
+- vmaDestroyImage(get_device().get_memory_allocator(), get_handle(), memory);
++ vmaDestroyImage(get_device().get_memory_allocator(), static_cast<VkImage>(get_handle()), memory);
+ }
+ }
+
+diff --git a/samples/api/hpp_texture_loading/hpp_texture_loading.cpp b/samples/api/hpp_texture_loading/hpp_texture_loading.cpp
+index 11a1f24c1..cbdd22773 100644
+--- a/samples/api/hpp_texture_loading/hpp_texture_loading.cpp
++++ b/samples/api/hpp_texture_loading/hpp_texture_loading.cpp
+@@ -170,7 +170,7 @@ void HPPTextureLoading::load_texture()
+ memory_allocate_info = {memory_requirements.size,
+ get_device()->get_gpu().get_memory_type(memory_requirements.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal)};
+ texture.device_memory = get_device()->get_handle().allocateMemory(memory_allocate_info);
+- VK_CHECK(vkBindImageMemory(get_device()->get_handle(), texture.image, texture.device_memory, 0));
++ get_device()->get_handle().bindImageMemory(texture.image, texture.device_memory, 0);
+
+ vk::CommandBuffer copy_command = get_device()->create_command_buffer(vk::CommandBufferLevel::ePrimary, true);
+
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples/debugfix.patch b/meta/recipes-graphics/vulkan/vulkan-samples/debugfix.patch
deleted file mode 100644
index d723fcc19a..0000000000
--- a/meta/recipes-graphics/vulkan/vulkan-samples/debugfix.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-There is code to remove the prefix CMAKE_SOURCE_DIR from __FILENAME__ paths
-used for logging with LOGE() in the code. We need to make this match the value we use
-in the debug source remapping from CFLAGS
-
-We export the right path to use in the recipe with:
-
-EXTRA_OECMAKE = "-DCMAKE_DEBUG_SRCDIR=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/"
-
-and we then patch this into the code instead of the broken use
-of CMAKE_SOURCE_DIR since __FILENAME__ will match our path prefix
-changes.
-
-This also breaks reproducibility since the path length of the build directory
-will currently change the output!
-
-Upstream-Status: Pending [needs to be discussed upstream]
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: git/bldsys/cmake/global_options.cmake
-===================================================================
---- git.orig/bldsys/cmake/global_options.cmake
-+++ git/bldsys/cmake/global_options.cmake
-@@ -47,7 +47,7 @@ set(CMAKE_CXX_STANDARD 14)
- set(CMAKE_DISABLE_SOURCE_CHANGES ON)
- set(CMAKE_DISABLE_IN_SOURCE_BUILD ON)
-
--string(LENGTH "${CMAKE_SOURCE_DIR}/" ROOT_PATH_SIZE)
-+string(LENGTH "${CMAKE_DEBUG_SRCDIR}/" ROOT_PATH_SIZE)
- add_definitions(-DROOT_PATH_SIZE=${ROOT_PATH_SIZE})
-
- set(CMAKE_C_FLAGS_DEBUG "-DDEBUG=0 ${CMAKE_C_FLAGS_DEBUG}")
diff --git a/meta/recipes-graphics/vulkan/vulkan-samples_git.bb b/meta/recipes-graphics/vulkan/vulkan-samples_git.bb
index 57d3bd8fe5..d60c0f3190 100644
--- a/meta/recipes-graphics/vulkan/vulkan-samples_git.bb
+++ b/meta/recipes-graphics/vulkan/vulkan-samples_git.bb
@@ -1,23 +1,24 @@
-DESCRIPTION = "The Vulkan Samples is collection of resources to help develop optimized Vulkan applications."
+SUMMARY = "The Vulkan Samples is collection of resources to help develop optimized Vulkan applications."
HOMEPAGE = "https://www.khronos.org/vulkan/"
BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Samples/issues"
LICENSE = "Apache-2.0"
LIC_FILES_CHKSUM = "file://LICENSE;md5=48aa35cefb768436223a6e7f18dc2a2a"
-SRC_URI = "gitsm://github.com/KhronosGroup/Vulkan-Samples.git;branch=master;protocol=https \
- file://0001-CMakeLists.txt-do-not-hardcode-lib-as-installation-t.patch \
- file://debugfix.patch \
+SRC_URI = "gitsm://github.com/KhronosGroup/Vulkan-Samples.git;branch=main;protocol=https;lfs=0 \
+ file://0001-vulkan-samples-Fix-reproducibility-issue.patch \
+ file://0001-Do-not-use-LFS64-functions-on-linux-musl.patch;patchdir=third_party/spdlog \
+ file://0001-Deprecate-u8string_view.patch;patchdir=third_party/spdlog \
+ file://32bit.patch \
"
UPSTREAM_CHECK_COMMITS = "1"
-SRCREV = "0cc04a63583769bd13bcc51229a5a263d59343df"
+SRCREV = "2307c3eb5608cb1205fa3514b3a31dbfb857d00c"
UPSTREAM_CHECK_GITTAGREGEX = "These are not the releases you're looking for"
S = "${WORKDIR}/git"
REQUIRED_DISTRO_FEATURES = 'vulkan'
-COMPATIBLE_HOST = "(x86_64|aarch64|mips64|powerpc64|riscv64).*-linux"
inherit cmake features_check
@@ -28,7 +29,7 @@ FILES:${PN} += "${datadir}"
# used for logging with LOGE in the code. We need to make this match the value we use
# in the debug source remapping from CFLAGS
#
-EXTRA_OECMAKE += "-DCMAKE_DEBUG_SRCDIR=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}/"
+EXTRA_OECMAKE += "-DCMAKE_DEBUG_SRCDIR=${TARGET_DBGSRC_DIR}/"
# Binaries built with PCH enabled don't appear reproducible, differing results were seen
# from some builds depending on the point the PCH was compiled. Disable it to be
# deterministic
diff --git a/meta/recipes-graphics/vulkan/vulkan-tools_1.3.211.0.bb b/meta/recipes-graphics/vulkan/vulkan-tools_1.3.211.0.bb
deleted file mode 100644
index 3cb4ccc23f..0000000000
--- a/meta/recipes-graphics/vulkan/vulkan-tools_1.3.211.0.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Vulkan Utilities and Tools"
-DESCRIPTION = "Assist development by enabling developers to verify their applications correct use of the Vulkan API."
-HOMEPAGE = "https://www.khronos.org/vulkan/"
-BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Tools"
-SECTION = "libs"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3b83ef96387f14655fc854ddc3c6bd57"
-SRC_URI = "git://github.com/KhronosGroup/Vulkan-Tools.git;branch=sdk-1.3.211;protocol=https"
-SRCREV = "4fdfd2b1187ab44f062d091ba3113c5e3eab5a1b"
-
-S = "${WORKDIR}/git"
-
-inherit cmake features_check
-ANY_OF_DISTRO_FEATURES = "x11 wayland"
-REQUIRED_DISTRO_FEATURES = "vulkan"
-
-DEPENDS += "vulkan-headers vulkan-loader"
-
-EXTRA_OECMAKE = "\
- -DBUILD_TESTS=OFF \
- -DBUILD_CUBE=OFF \
- -DPYTHON_EXECUTABLE=${HOSTTOOLS_DIR}/python3 \
- "
-
-# must choose x11 or wayland or both
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
-
-PACKAGECONFIG[x11] = "-DBUILD_WSI_XLIB_SUPPORT=ON -DBUILD_WSI_XCB_SUPPORT=ON, -DBUILD_WSI_XLIB_SUPPORT=OFF -DBUILD_WSI_XCB_SUPPORT=OFF, libxcb libx11 libxrandr"
-PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SUPPORT=OFF, wayland"
-
-UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb
new file mode 100644
index 0000000000..a7e4a67aaa
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb
@@ -0,0 +1,37 @@
+SUMMARY = "Vulkan Utilities and Tools"
+DESCRIPTION = "Assist development by enabling developers to verify their applications correct use of the Vulkan API."
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Tools"
+SECTION = "libs"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3b83ef96387f14655fc854ddc3c6bd57"
+SRC_URI = "git://github.com/KhronosGroup/Vulkan-Tools.git;branch=vulkan-sdk-1.3.280;protocol=https"
+SRCREV = "136976082d0b14dad8b9687982b2a80cc6e6a633"
+
+S = "${WORKDIR}/git"
+
+inherit cmake features_check pkgconfig
+ANY_OF_DISTRO_FEATURES = "x11 wayland"
+REQUIRED_DISTRO_FEATURES = "vulkan"
+
+DEPENDS += "vulkan-headers vulkan-loader vulkan-volk"
+
+EXTRA_OECMAKE = "\
+ -DBUILD_TESTS=OFF \
+ -DBUILD_CUBE=OFF \
+ -DPYTHON_EXECUTABLE=${HOSTTOOLS_DIR}/python3 \
+ "
+
+# must choose x11 or wayland or both
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
+
+PACKAGECONFIG[x11] = "-DBUILD_WSI_XLIB_SUPPORT=ON -DBUILD_WSI_XCB_SUPPORT=ON, -DBUILD_WSI_XLIB_SUPPORT=OFF -DBUILD_WSI_XCB_SUPPORT=OFF, libxcb libx11 libxrandr"
+PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SUPPORT=OFF, wayland"
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools
+# vulkan-validation-layers, vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb
new file mode 100644
index 0000000000..3ab31af96a
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Vulkan Utility Libraries"
+DESCRIPTION = "Common libraries created to share code across various \
+Vulkan repositories, solving long standing issues for Vulkan SDK \
+developers and users."
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-Utility-Libraries"
+SECTION = "libs"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=4ca2d6799091aaa98a8520f1b793939b"
+
+SRC_URI = "git://github.com/KhronosGroup/Vulkan-Utility-Libraries.git;branch=main;protocol=https"
+SRCREV = "a4140c5fd47dcf3a030726a60b293db61cfb54a3"
+
+S = "${WORKDIR}/git"
+
+REQUIRED_DISTRO_FEATURES = "vulkan"
+
+DEPENDS = "vulkan-headers"
+
+EXTRA_OECMAKE = "\
+ -DBUILD_TESTS=OFF \
+ "
+
+inherit cmake features_check pkgconfig
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools,
+# vulkan-validation-layers, spirv-headers, spirv-tools,
+# vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb
new file mode 100644
index 0000000000..c488309c91
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb
@@ -0,0 +1,49 @@
+SUMMARY = "Vulkan Validation layers"
+DESCRIPTION = "Khronos official Vulkan validation layers to assist developers \
+in verifying that their applications correctly use the Vulkan API"
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/KhronosGroup/Vulkan-ValidationLayers"
+SECTION = "libs"
+
+LICENSE = "Apache-2.0 & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=cd3c0bc366cd9b6a906e22f0bcb5910f"
+
+SRC_URI = "git://github.com/KhronosGroup/Vulkan-ValidationLayers.git;branch=vulkan-sdk-1.3.280;protocol=https"
+SRCREV = "8506077b9a25a00684e8be24b779733ae1405a54"
+
+S = "${WORKDIR}/git"
+
+REQUIRED_DISTRO_FEATURES = "vulkan"
+
+DEPENDS = "vulkan-headers vulkan-loader spirv-headers spirv-tools glslang vulkan-utility-libraries"
+
+# BUILD_TESTS - Not required for OE builds
+# USE_ROBIN_HOOD_HASHING - Provides substantial performance improvements on all platforms.
+# Yocto project doesn't contain a recipe for package so disabled it.
+EXTRA_OECMAKE = "\
+ -DBUILD_TESTS=OFF \
+ -DUSE_ROBIN_HOOD_HASHING=OFF \
+ -DGLSLANG_INSTALL_DIR=${STAGING_LIBDIR} \
+ -DVULKAN_HEADERS_INSTALL_DIR=${STAGING_EXECPREFIXDIR} \
+ -DSPIRV_HEADERS_INSTALL_DIR=${STAGING_EXECPREFIXDIR} \
+ "
+
+PACKAGECONFIG[x11] = "-DBUILD_WSI_XLIB_SUPPORT=ON -DBUILD_WSI_XCB_SUPPORT=ON, -DBUILD_WSI_XLIB_SUPPORT=OFF -DBUILD_WSI_XCB_SUPPORT=OFF, libxcb libx11 libxrandr"
+PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SUPPORT=OFF, wayland"
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'x11 wayland', d)}"
+
+inherit cmake features_check pkgconfig
+
+FILES:${PN} += "${datadir}/vulkan"
+
+SOLIBS = ".so"
+FILES_SOLIBSDEV = ""
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools,
+# vulkan-validation-layers, spirv-headers, spirv-tools,
+# vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb
new file mode 100644
index 0000000000..2ef12fedf8
--- /dev/null
+++ b/meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb
@@ -0,0 +1,37 @@
+SUMMARY = "A meta-loader for Vulkan"
+DESCRIPTION = "Volk allows one to dynamically load entrypoints required \
+to use Vulkan without linking to vulkan-1.dll or statically linking Vulkan loader. \
+"
+HOMEPAGE = "https://www.khronos.org/vulkan/"
+BUGTRACKER = "https://github.com/zeux/volk"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=12e6af3a0e2a5e5dbf7796aa82b64626"
+
+SRC_URI = "git://github.com/zeux/volk.git;branch=master;protocol=https"
+SRCREV = "01986ac85fa2e5c70df09aeae9c907e27c5d50b2"
+
+S = "${WORKDIR}/git"
+
+REQUIRED_DISTRO_FEATURES = "vulkan"
+
+DEPENDS = "vulkan-headers"
+
+EXTRA_OECMAKE = "\
+ -DVOLK_INSTALL=ON \
+ "
+
+inherit cmake features_check pkgconfig
+
+# These recipes need to be updated in lockstep with each other:
+# glslang, vulkan-headers, vulkan-loader, vulkan-tools,
+# vulkan-validation-layers, spirv-headers, spirv-tools,
+# vulkan-utility-libraries.
+# The tags versions should always be sdk-x.y.z, as this is what
+# upstream considers a release.
+UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)"
+
+do_install:append() {
+ sed -i -e 's,${STAGING_DIR_TARGET},,g' ${D}${libdir}/cmake/volk/volkTargets.cmake
+}
diff --git a/meta/recipes-graphics/waffle/waffle/0001-waffle-do-not-make-core-protocol-into-the-library.patch b/meta/recipes-graphics/waffle/waffle/0001-waffle-do-not-make-core-protocol-into-the-library.patch
index 24b2de5d9c..31ac3e0dd1 100644
--- a/meta/recipes-graphics/waffle/waffle/0001-waffle-do-not-make-core-protocol-into-the-library.patch
+++ b/meta/recipes-graphics/waffle/waffle/0001-waffle-do-not-make-core-protocol-into-the-library.patch
@@ -1,4 +1,4 @@
-From 7610ec4b572d3a54d30fca6798f0c406f3fd8a46 Mon Sep 17 00:00:00 2001
+From 79b9e4338f803d79449e53a40b1ecc0a5a5889e4 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Tue, 26 Oct 2021 08:52:17 +0200
Subject: [PATCH] waffle: do not make core protocol into the library
@@ -10,30 +10,14 @@ wayland.xml from the host.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
---
- meson.build | 4 ----
src/waffle/meson.build | 7 -------
- 2 files changed, 11 deletions(-)
+ 1 file changed, 7 deletions(-)
-diff --git a/meson.build b/meson.build
-index ffc02ff..0bb6128 100644
---- a/meson.build
-+++ b/meson.build
-@@ -104,10 +104,6 @@ else
- dep_wayland_client = dependency(
- 'wayland-client', version : '>= 1.10', required : get_option('wayland'),
- )
-- if dep_wayland_client.found()
-- wayland_core_xml = join_paths(dep_wayland_client.get_pkgconfig_variable('pkgdatadir'),
-- 'wayland.xml')
-- endif
- dep_wayland_egl = dependency(
- 'wayland-egl', version : '>= 9.1', required : get_option('wayland'),
- )
diff --git a/src/waffle/meson.build b/src/waffle/meson.build
-index 01898c8..6245868 100644
+index 1cc99f6..022745a 100644
--- a/src/waffle/meson.build
+++ b/src/waffle/meson.build
-@@ -88,12 +88,6 @@ if build_surfaceless
+@@ -89,12 +89,6 @@ if build_surfaceless
endif
if build_wayland
@@ -46,7 +30,7 @@ index 01898c8..6245868 100644
wl_xdg_shell_proto_c = custom_target(
'wl-xdg-shell-proto.c',
input: wayland_xdg_shell_xml,
-@@ -114,7 +108,6 @@ if build_wayland
+@@ -115,7 +109,6 @@ if build_wayland
'wayland/wayland_wrapper.c',
)
files_libwaffle += [
diff --git a/meta/recipes-graphics/waffle/waffle_1.7.0.bb b/meta/recipes-graphics/waffle/waffle_1.7.0.bb
deleted file mode 100644
index f1fd9e7630..0000000000
--- a/meta/recipes-graphics/waffle/waffle_1.7.0.bb
+++ /dev/null
@@ -1,50 +0,0 @@
-SUMMARY = "A C library for selecting an OpenGL API and window system at runtime"
-DESCRIPTION = "A cross-platform C library that allows one to defer selection \
-of an OpenGL API and window system until runtime. For example, on Linux, Waffle \
-enables an application to select X11/EGL with an OpenGL 3.3 core profile, \
-Wayland with OpenGL ES2, and other window system / API combinations."
-HOMEPAGE = "https://gitlab.freedesktop.org/mesa/waffle"
-BUGTRACKER = "https://gitlab.freedesktop.org/mesa/waffle"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=4c5154407c2490750dd461c50ad94797 \
- file://include/waffle-1/waffle.h;endline=24;md5=61dbf8697f61c78645e75a93c585b1bf"
-
-SRC_URI = "git://gitlab.freedesktop.org/mesa/waffle.git;protocol=https;branch=master \
- file://0001-waffle-do-not-make-core-protocol-into-the-library.patch \
- "
-SRCREV = "905c6c10f2483adf0cbfa024e2d3c2ed541fb300"
-S = "${WORKDIR}/git"
-
-inherit meson features_check lib_package bash-completion pkgconfig
-
-DEPENDS:append = " python3 cmake-native"
-
-# This should be overridden per-machine to reflect the capabilities of the GL
-# stack.
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'glx x11-egl', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gbm surfaceless-egl', '', d)} \
-"
-
-# virtual/libgl requires opengl in DISTRO_FEATURES.
-REQUIRED_DISTRO_FEATURES += "${@bb.utils.contains('DEPENDS', 'virtual/${MLPREFIX}libgl', 'opengl', '', d)}"
-
-# I say virtual/libgl, actually wants gl.pc
-PACKAGECONFIG[glx] = "-Dglx=enabled,-Dglx=disabled,virtual/${MLPREFIX}libgl libx11"
-
-# wants wayland-egl.pc, egl.pc, and the wayland
-# DISTRO_FEATURE.
-PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,virtual/${MLPREFIX}egl wayland wayland-native wayland-protocols"
-
-# wants gbm.pc egl.pc
-PACKAGECONFIG[gbm] = "-Dgbm=enabled,-Dgbm=disabled,virtual/${MLPREFIX}egl virtual/${MLPREFIX}libgbm udev"
-
-# wants egl.pc
-PACKAGECONFIG[x11-egl] = "-Dx11_egl=enabled,-Dx11_egl=disabled,virtual/${MLPREFIX}egl libxcb"
-PACKAGECONFIG[surfaceless-egl] = "-Dsurfaceless_egl=enabled,-Dsurfaceless_egl=disabled,virtual/${MLPREFIX}egl"
-
-# TODO: optionally build manpages and examples
-
-do_install:append() {
- sed -i -e "s,${WORKDIR},,g" ${D}/${libdir}/cmake/Waffle/WaffleConfig.cmake
-}
diff --git a/meta/recipes-graphics/waffle/waffle_1.8.0.bb b/meta/recipes-graphics/waffle/waffle_1.8.0.bb
new file mode 100644
index 0000000000..12b31dcff1
--- /dev/null
+++ b/meta/recipes-graphics/waffle/waffle_1.8.0.bb
@@ -0,0 +1,50 @@
+SUMMARY = "A C library for selecting an OpenGL API and window system at runtime"
+DESCRIPTION = "A cross-platform C library that allows one to defer selection \
+of an OpenGL API and window system until runtime. For example, on Linux, Waffle \
+enables an application to select X11/EGL with an OpenGL 3.3 core profile, \
+Wayland with OpenGL ES2, and other window system / API combinations."
+HOMEPAGE = "https://gitlab.freedesktop.org/mesa/waffle"
+BUGTRACKER = "https://gitlab.freedesktop.org/mesa/waffle"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=4c5154407c2490750dd461c50ad94797 \
+ file://include/waffle-1/waffle.h;endline=24;md5=61dbf8697f61c78645e75a93c585b1bf"
+
+SRC_URI = "git://gitlab.freedesktop.org/mesa/waffle.git;protocol=https;branch=master \
+ file://0001-waffle-do-not-make-core-protocol-into-the-library.patch \
+ "
+SRCREV = "580b912a30085528886603942c100c7b309b3bdb"
+S = "${WORKDIR}/git"
+
+inherit meson features_check lib_package bash-completion pkgconfig
+
+DEPENDS:append = " python3"
+
+# This should be overridden per-machine to reflect the capabilities of the GL
+# stack.
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'glx x11-egl', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gbm surfaceless-egl', '', d)} \
+"
+
+# virtual/libgl requires opengl in DISTRO_FEATURES.
+REQUIRED_DISTRO_FEATURES += "${@bb.utils.contains('DEPENDS', 'virtual/${MLPREFIX}libgl', 'opengl', '', d)}"
+
+# I say virtual/libgl, actually wants gl.pc
+PACKAGECONFIG[glx] = "-Dglx=enabled,-Dglx=disabled,virtual/${MLPREFIX}libgl libx11"
+
+# wants wayland-egl.pc, egl.pc, and the wayland
+# DISTRO_FEATURE.
+PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,virtual/${MLPREFIX}egl wayland wayland-native wayland-protocols"
+
+# wants gbm.pc egl.pc
+PACKAGECONFIG[gbm] = "-Dgbm=enabled,-Dgbm=disabled,virtual/${MLPREFIX}egl virtual/${MLPREFIX}libgbm udev"
+
+# wants egl.pc
+PACKAGECONFIG[x11-egl] = "-Dx11_egl=enabled,-Dx11_egl=disabled,virtual/${MLPREFIX}egl libxcb"
+PACKAGECONFIG[surfaceless-egl] = "-Dsurfaceless_egl=enabled,-Dsurfaceless_egl=disabled,virtual/${MLPREFIX}egl"
+
+# TODO: optionally build manpages and examples
+
+do_install:append() {
+ rm -rf ${D}${datadir}/zsh
+}
diff --git a/meta/recipes-graphics/wayland/libinput/determinism.patch b/meta/recipes-graphics/wayland/libinput/determinism.patch
deleted file mode 100644
index cb554030cf..0000000000
--- a/meta/recipes-graphics/wayland/libinput/determinism.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-This finds our outer git tree and that version information breaks
-determinism of this recipe. Disable it.
-
-RP 2020/2/6
-
-Upstream-Status: Pending
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-
-Index: libinput-1.14.3/meson.build
-===================================================================
---- libinput-1.14.3.orig/meson.build
-+++ libinput-1.14.3/meson.build
-@@ -387,7 +387,7 @@ pkgconfig.generate(
- libraries : lib_libinput
- )
-
--git_version_h = vcs_tag(command : ['git', 'describe'],
-+git_version_h = vcs_tag(command : ['false'],
- fallback : 'unknown',
- input : 'src/libinput-git-version.h.in',
- output :'libinput-git-version.h')
diff --git a/meta/recipes-graphics/wayland/libinput_1.19.4.bb b/meta/recipes-graphics/wayland/libinput_1.19.4.bb
deleted file mode 100644
index a7d0c4be69..0000000000
--- a/meta/recipes-graphics/wayland/libinput_1.19.4.bb
+++ /dev/null
@@ -1,49 +0,0 @@
-SUMMARY = "Library to handle input devices in Wayland compositors"
-DESCRIPTION = "libinput is a library to handle input devices in Wayland \
-compositors and to provide a generic X.Org input driver. It provides \
-device detection, device handling, input device event processing and \
-abstraction so minimize the amount of custom input code compositors need to \
-provide the common set of functionality that users expect."
-HOMEPAGE = "http://www.freedesktop.org/wiki/Software/libinput/"
-SECTION = "libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=bab4ac7dc1c10bc0fb037dc76c46ef8a"
-
-DEPENDS = "libevdev udev mtdev libcheck"
-
-SRC_URI = "http://www.freedesktop.org/software/${BPN}/${BP}.tar.xz \
- file://run-ptest \
- file://determinism.patch \
- "
-SRC_URI[sha256sum] = "ff33a570b5a936c81e6c08389a8581c2665311d026ce3d225c88d09c49f9b440"
-
-UPSTREAM_CHECK_REGEX = "libinput-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
-
-inherit meson pkgconfig lib_package ptest
-
-# Patch out build directory, otherwise it leaks into ptest binary
-do_configure:append() {
- sed -i -e "s,${WORKDIR},,g" config.h
- if [ -e "litest-config.h" ]; then
- sed -i -e "s,${WORKDIR},,g" litest-config.h
- fi
-}
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[libwacom] = "-Dlibwacom=true,-Dlibwacom=false,libwacom"
-PACKAGECONFIG[gui] = "-Ddebug-gui=true,-Ddebug-gui=false,cairo gtk+3"
-
-UDEVDIR = "`pkg-config --variable=udevdir udev`"
-
-EXTRA_OEMESON += "-Dudev-dir=${UDEVDIR} \
- -Ddocumentation=false \
- ${@bb.utils.contains('PTEST_ENABLED', '1', '-Dtests=true -Dinstall-tests=true', '-Dtests=false -Dinstall-tests=false', d)} \
- -Dzshcompletiondir=no"
-
-# package name changed in 1.8.1 upgrade: make sure package upgrades work
-RPROVIDES:${PN} = "libinput"
-RREPLACES:${PN} = "libinput"
-RCONFLICTS:${PN} = "libinput"
-
-FILES:${PN}-ptest += "${libexecdir}/libinput/libinput-test-suite"
diff --git a/meta/recipes-graphics/wayland/libinput_1.25.0.bb b/meta/recipes-graphics/wayland/libinput_1.25.0.bb
new file mode 100644
index 0000000000..517b247fed
--- /dev/null
+++ b/meta/recipes-graphics/wayland/libinput_1.25.0.bb
@@ -0,0 +1,49 @@
+SUMMARY = "Library to handle input devices in Wayland compositors"
+DESCRIPTION = "libinput is a library to handle input devices in Wayland \
+compositors and to provide a generic X.Org input driver. It provides \
+device detection, device handling, input device event processing and \
+abstraction so minimize the amount of custom input code compositors need to \
+provide the common set of functionality that users expect."
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/libinput/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=bab4ac7dc1c10bc0fb037dc76c46ef8a"
+
+DEPENDS = "libevdev udev mtdev"
+
+SRC_URI = "git://gitlab.freedesktop.org/libinput/libinput.git;protocol=https;branch=main \
+ file://run-ptest \
+ "
+SRCREV = "3fd38d89276b679ac3565efd7c2150fd047902cb"
+S = "${WORKDIR}/git"
+
+UPSTREAM_CHECK_REGEX = "libinput-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
+
+inherit meson pkgconfig lib_package ptest
+
+# Patch out build directory, otherwise it leaks into ptest binary
+do_configure:append() {
+ sed -i -e "s,${WORKDIR},,g" config.h
+ if [ -e "litest-config.h" ]; then
+ sed -i -e "s,${WORKDIR},,g" litest-config.h
+ fi
+}
+
+PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}"
+PACKAGECONFIG[libwacom] = "-Dlibwacom=true,-Dlibwacom=false,libwacom"
+PACKAGECONFIG[gui] = "-Ddebug-gui=true,-Ddebug-gui=false,cairo gtk+3"
+PACKAGECONFIG[tests] = "-Dtests=true -Dinstall-tests=true,-Dtests=false -Dinstall-tests=false,libcheck"
+
+UDEVDIR = "`pkg-config --variable=udevdir udev`"
+
+EXTRA_OEMESON += "-Dudev-dir=${UDEVDIR} \
+ -Ddocumentation=false \
+ -Dzshcompletiondir=no"
+
+# package name changed in 1.8.1 upgrade: make sure package upgrades work
+RPROVIDES:${PN} = "libinput"
+RREPLACES:${PN} = "libinput"
+RCONFLICTS:${PN} = "libinput"
+
+FILES:${PN}-ptest += "${libexecdir}/libinput/libinput-test-suite"
diff --git a/meta/recipes-graphics/wayland/mtdev_1.1.6.bb b/meta/recipes-graphics/wayland/mtdev_1.1.6.bb
deleted file mode 100644
index 7c1cb5e4ec..0000000000
--- a/meta/recipes-graphics/wayland/mtdev_1.1.6.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "Multitouch Protocol Translation Library"
-
-DESCRIPTION = "mtdev is a library which transforms all variants of kernel \
-multitouch events to the slotted type B protocol. The events put into mtdev may \
-be from any MT device, specifically type A without contact tracking, type A with \
-contact tracking, or type B with contact tracking"
-
-HOMEPAGE = "http://bitmath.org/code/mtdev/"
-SECTION = "libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=ea6bd0268bb0fcd6b27698616ceee5d6"
-
-SRC_URI = "http://bitmath.org/code/${BPN}/${BP}.tar.bz2"
-SRC_URI[md5sum] = "bf8ef2482e84a00b5db8fbd3ce00e249"
-SRC_URI[sha256sum] = "15d7b28da8ac71d8bc8c9287c2045fd174267bc740bec10cfda332dc1204e0e0"
-
-inherit autotools pkgconfig
diff --git a/meta/recipes-graphics/wayland/mtdev_1.1.7.bb b/meta/recipes-graphics/wayland/mtdev_1.1.7.bb
new file mode 100644
index 0000000000..24803c4238
--- /dev/null
+++ b/meta/recipes-graphics/wayland/mtdev_1.1.7.bb
@@ -0,0 +1,17 @@
+SUMMARY = "Multitouch Protocol Translation Library"
+
+DESCRIPTION = "mtdev is a library which transforms all variants of kernel \
+multitouch events to the slotted type B protocol. The events put into mtdev may \
+be from any MT device, specifically type A without contact tracking, type A with \
+contact tracking, or type B with contact tracking"
+
+HOMEPAGE = "http://bitmath.org/code/mtdev/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=ea6bd0268bb0fcd6b27698616ceee5d6"
+
+SRC_URI = "http://bitmath.org/code/${BPN}/${BP}.tar.bz2"
+SRC_URI[sha256sum] = "a107adad2101fecac54ac7f9f0e0a0dd155d954193da55c2340c97f2ff1d814e"
+
+inherit autotools pkgconfig
diff --git a/meta/recipes-graphics/wayland/wayland-protocols_1.25.bb b/meta/recipes-graphics/wayland/wayland-protocols_1.25.bb
deleted file mode 100644
index 92ba296165..0000000000
--- a/meta/recipes-graphics/wayland/wayland-protocols_1.25.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Collection of additional Wayland protocols"
-DESCRIPTION = "Wayland protocols that add functionality not \
-available in the Wayland core protocol. Such protocols either add \
-completely new functionality, or extend the functionality of some other \
-protocol either in Wayland core, or some other protocol in \
-wayland-protocols."
-HOMEPAGE = "http://wayland.freedesktop.org"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c7b12b6702da38ca028ace54aae3d484 \
- file://stable/presentation-time/presentation-time.xml;endline=26;md5=4646cd7d9edc9fa55db941f2d3a7dc53"
-
-SRC_URI = "https://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
- "
-SRC_URI[sha256sum] = "f1ff0f7199d0a0da337217dd8c99979967808dc37731a1e759e822b75b571460"
-
-UPSTREAM_CHECK_URI = "https://wayland.freedesktop.org/releases.html"
-
-inherit meson pkgconfig allarch
-
-EXTRA_OEMESON += "-Dtests=false"
-
-PACKAGES = "${PN}"
-FILES:${PN} += "${datadir}/pkgconfig/wayland-protocols.pc"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-graphics/wayland/wayland-protocols_1.34.bb b/meta/recipes-graphics/wayland/wayland-protocols_1.34.bb
new file mode 100644
index 0000000000..8c12985714
--- /dev/null
+++ b/meta/recipes-graphics/wayland/wayland-protocols_1.34.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Collection of additional Wayland protocols"
+DESCRIPTION = "Wayland protocols that add functionality not \
+available in the Wayland core protocol. Such protocols either add \
+completely new functionality, or extend the functionality of some other \
+protocol either in Wayland core, or some other protocol in \
+wayland-protocols."
+HOMEPAGE = "http://wayland.freedesktop.org"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c7b12b6702da38ca028ace54aae3d484 \
+ file://stable/presentation-time/presentation-time.xml;endline=26;md5=4646cd7d9edc9fa55db941f2d3a7dc53"
+
+SRC_URI = "https://gitlab.freedesktop.org/wayland/wayland-protocols/-/releases/${PV}/downloads/wayland-protocols-${PV}.tar.xz"
+SRC_URI[sha256sum] = "c59b27cacd85f60baf4ee5f80df5c0d15760ead6a2432b00ab7e2e0574dcafeb"
+
+UPSTREAM_CHECK_URI = "https://gitlab.freedesktop.org/wayland/wayland-protocols/-/tags"
+
+inherit meson pkgconfig allarch
+
+EXTRA_OEMESON += "-Dtests=false"
+
+PACKAGES = "${PN}"
+FILES:${PN} += "${datadir}/pkgconfig/wayland-protocols.pc"
+
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta/recipes-graphics/wayland/wayland-utils_1.0.0.bb b/meta/recipes-graphics/wayland/wayland-utils_1.0.0.bb
deleted file mode 100644
index 4c51f7e8c5..0000000000
--- a/meta/recipes-graphics/wayland/wayland-utils_1.0.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "Wayland utilities"
-DESCRIPTION = "Wayland-utils contains (for now) \
-wayland-info, a utility for displaying information about the Wayland \
-protocols supported by a Wayland compositor. \
-wayland-info is basically a standalone version of weston-info as found \
-in weston repository. "
-HOMEPAGE = "http://wayland.freedesktop.org"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=548a66038a77415e1df51118625e832f \
- "
-
-SRC_URI = "https://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
- "
-SRC_URI[sha256sum] = "64fecc4c58e87ae9b302901abe10c2e8af69c7503c221a96ecd0700e0aa268c0"
-
-UPSTREAM_CHECK_URI = "https://wayland.freedesktop.org/releases.html"
-
-inherit meson pkgconfig
-
-DEPENDS += "wayland wayland-native wayland-protocols"
diff --git a/meta/recipes-graphics/wayland/wayland-utils_1.2.0.bb b/meta/recipes-graphics/wayland/wayland-utils_1.2.0.bb
new file mode 100644
index 0000000000..878c7a267e
--- /dev/null
+++ b/meta/recipes-graphics/wayland/wayland-utils_1.2.0.bb
@@ -0,0 +1,22 @@
+SUMMARY = "Wayland utilities"
+DESCRIPTION = "Wayland-utils contains (for now) \
+wayland-info, a utility for displaying information about the Wayland \
+protocols supported by a Wayland compositor. \
+wayland-info is basically a standalone version of weston-info as found \
+in weston repository. "
+HOMEPAGE = "http://wayland.freedesktop.org"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=548a66038a77415e1df51118625e832f \
+ "
+
+SRC_URI = "https://gitlab.freedesktop.org/wayland/wayland-utils/-/releases/${PV}/downloads/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "d9278c22554586881802540751bcc42569262bf80cd9ac9b0fd12ff4bd09a9e4"
+
+UPSTREAM_CHECK_URI = "https://gitlab.freedesktop.org/wayland/wayland-utils/-/tags"
+
+inherit meson pkgconfig
+
+DEPENDS += "wayland wayland-native wayland-protocols"
+
+PACKAGECONFIG ??= "drm"
+PACKAGECONFIG[drm] = "-Ddrm=enabled,-Ddrm=disabled,libdrm"
diff --git a/meta/recipes-graphics/wayland/wayland/0002-Consider-pkgconfig-sysroot-for-pkgdatadir.patch b/meta/recipes-graphics/wayland/wayland/0002-Consider-pkgconfig-sysroot-for-pkgdatadir.patch
new file mode 100644
index 0000000000..4573bb635a
--- /dev/null
+++ b/meta/recipes-graphics/wayland/wayland/0002-Consider-pkgconfig-sysroot-for-pkgdatadir.patch
@@ -0,0 +1,46 @@
+From ff8ecbe8891d592e645927659318720f9e190054 Mon Sep 17 00:00:00 2001
+From: Andreas Cord-Landwehr <cordlandwehr@kde.org>
+Date: Sun, 6 Feb 2022 17:23:46 +0100
+Subject: [PATCH] Consider pkgconfig sysroot for pkgdatadir
+
+For libs/cflags this is done automatically, but not for manually accessed
+variables. This matches what wayland-protocols does.
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/wayland/wayland/-/merge_requests/272]
+---
+ src/meson.build | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/src/meson.build b/src/meson.build
+index a8a1d2b..721e151 100644
+--- a/src/meson.build
++++ b/src/meson.build
+@@ -65,7 +65,7 @@ if get_option('scanner')
+ version: meson.project_version(),
+ variables: [
+ 'datarootdir=' + join_paths('${prefix}', get_option('datadir')),
+- 'pkgdatadir=' + join_paths('${datarootdir}', meson.project_name()),
++ 'pkgdatadir=' + join_paths('${pc_sysrootdir}${datarootdir}', meson.project_name()),
+ 'bindir=' + join_paths('${prefix}', get_option('bindir')),
+ 'wayland_scanner=${bindir}/wayland-scanner'
+ ],
+@@ -211,7 +211,7 @@ if get_option('libraries')
+ filebase: 'wayland-server',
+ variables: [
+ 'datarootdir=' + join_paths('${prefix}', get_option('datadir')),
+- 'pkgdatadir=' + join_paths('${datarootdir}', meson.project_name())
++ 'pkgdatadir=' + join_paths('${pc_sysrootdir}${datarootdir}', meson.project_name())
+ ]
+ )
+
+@@ -250,7 +250,7 @@ if get_option('libraries')
+ filebase: 'wayland-client',
+ variables: [
+ 'datarootdir=' + join_paths('${prefix}', get_option('datadir')),
+- 'pkgdatadir=' + join_paths('${datarootdir}', meson.project_name())
++ 'pkgdatadir=' + join_paths('${pc_sysrootdir}${datarootdir}', meson.project_name())
+ ]
+ )
+
+--
+2.37.2
diff --git a/meta/recipes-graphics/wayland/wayland/0002-Do-not-hardcode-the-path-to-wayland-scanner.patch b/meta/recipes-graphics/wayland/wayland/0002-Do-not-hardcode-the-path-to-wayland-scanner.patch
deleted file mode 100644
index e3e71925b8..0000000000
--- a/meta/recipes-graphics/wayland/wayland/0002-Do-not-hardcode-the-path-to-wayland-scanner.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 3e7cd56611aeec274e48a4816bc7c21f74f15be0 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Mon, 17 Feb 2020 21:46:18 +0100
-Subject: [PATCH] Do not hardcode the path to wayland-scanner
-
-This results in host contamination during builds.
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- src/meson.build | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/meson.build b/src/meson.build
-index 15730a3..61edbc8 100644
---- a/src/meson.build
-+++ b/src/meson.build
-@@ -52,7 +52,7 @@ if get_option('scanner')
- 'datarootdir=' + join_paths('${prefix}', get_option('datadir')),
- 'pkgdatadir=' + join_paths('${datarootdir}', meson.project_name()),
- 'bindir=' + join_paths('${prefix}', get_option('bindir')),
-- 'wayland_scanner=${bindir}/wayland-scanner'
-+ 'wayland_scanner=wayland-scanner'
- ],
- filebase: 'wayland-scanner'
- )
diff --git a/meta/recipes-graphics/wayland/wayland_1.20.0.bb b/meta/recipes-graphics/wayland/wayland_1.20.0.bb
deleted file mode 100644
index bd437767b2..0000000000
--- a/meta/recipes-graphics/wayland/wayland_1.20.0.bb
+++ /dev/null
@@ -1,61 +0,0 @@
-SUMMARY = "Wayland, a protocol between a compositor and clients"
-DESCRIPTION = "Wayland is a protocol for a compositor to talk to its clients \
-as well as a C library implementation of that protocol. The compositor can be \
-a standalone display server running on Linux kernel modesetting and evdev \
-input devices, an X application, or a wayland client itself. The clients can \
-be traditional applications, X servers (rootless or fullscreen) or other \
-display servers."
-HOMEPAGE = "http://wayland.freedesktop.org"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b31d8f53b6aaf2b4985d7dd7810a70d1 \
- file://src/wayland-server.c;endline=24;md5=b8e046164a766bb1ede8ba38e9dcd7ce"
-
-DEPENDS = "expat libffi wayland-native"
-
-SRC_URI = "https://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
- file://run-ptest \
- file://0002-Do-not-hardcode-the-path-to-wayland-scanner.patch \
- file://0001-build-Fix-strndup-detection-on-MinGW.patch \
- "
-SRC_URI[sha256sum] = "b8a034154c7059772e0fdbd27dbfcda6c732df29cae56a82274f6ec5d7cd8725"
-
-UPSTREAM_CHECK_URI = "https://wayland.freedesktop.org/releases.html"
-
-inherit meson pkgconfig ptest
-
-PACKAGECONFIG ??= "dtd-validation"
-PACKAGECONFIG[dtd-validation] = "-Ddtd_validation=true,-Ddtd_validation=false,libxml2,,"
-
-EXTRA_OEMESON = "-Ddocumentation=false"
-EXTRA_OEMESON:class-native = "-Ddocumentation=false"
-
-# Wayland installs a M4 macro for other projects to use, which uses the target
-# pkg-config to find files. Replace pkg-config with pkg-config-native.
-do_install:append:class-native() {
- sed -e 's,PKG_CHECK_MODULES(.*),,g' \
- -e 's,$PKG_CONFIG,pkg-config-native,g' \
- -i ${D}/${datadir}/aclocal/wayland-scanner.m4
-}
-
-do_install_ptest() {
- mkdir -p ${D}${PTEST_PATH}/tests/data
- cp -rf ${B}/tests/*-test ${B}/tests/*-checker ${D}${PTEST_PATH}/tests
- cp -rf ${B}/tests/*-checker ${D}${PTEST_PATH}
- cp -rf ${S}/tests/scanner-test.sh ${D}${PTEST_PATH}/tests
- cp -rf ${S}/tests/data/* ${D}${PTEST_PATH}/tests/data/
- cp -rf ${S}/egl/wayland-egl-symbols-check ${D}${PTEST_PATH}/tests/
-}
-
-sysroot_stage_all:append:class-target () {
- rm ${SYSROOT_DESTDIR}/${datadir}/aclocal/wayland-scanner.m4
- cp ${STAGING_DATADIR_NATIVE}/aclocal/wayland-scanner.m4 ${SYSROOT_DESTDIR}/${datadir}/aclocal/
-}
-
-PACKAGES =+ "${PN}-tools"
-
-FILES:${PN}-tools = "${bindir}/wayland-scanner"
-FILES:${PN}-dev += "${datadir}/${BPN}/wayland-scanner.mk"
-
-BBCLASSEXTEND = "native nativesdk"
-
-RDEPENDS:${PN}-ptest += "binutils sed ${PN}-tools"
diff --git a/meta/recipes-graphics/wayland/wayland_1.22.0.bb b/meta/recipes-graphics/wayland/wayland_1.22.0.bb
new file mode 100644
index 0000000000..6aa76063ea
--- /dev/null
+++ b/meta/recipes-graphics/wayland/wayland_1.22.0.bb
@@ -0,0 +1,62 @@
+SUMMARY = "Wayland, a protocol between a compositor and clients"
+DESCRIPTION = "Wayland is a protocol for a compositor to talk to its clients \
+as well as a C library implementation of that protocol. The compositor can be \
+a standalone display server running on Linux kernel modesetting and evdev \
+input devices, an X application, or a wayland client itself. The clients can \
+be traditional applications, X servers (rootless or fullscreen) or other \
+display servers."
+HOMEPAGE = "http://wayland.freedesktop.org"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b31d8f53b6aaf2b4985d7dd7810a70d1 \
+ file://src/wayland-server.c;endline=24;md5=b8e046164a766bb1ede8ba38e9dcd7ce"
+
+DEPENDS = "expat libffi wayland-native"
+
+SRC_URI = "https://gitlab.freedesktop.org/wayland/wayland/-/releases/${PV}/downloads/${BPN}-${PV}.tar.xz \
+ file://run-ptest \
+ file://0001-build-Fix-strndup-detection-on-MinGW.patch \
+ file://0002-Consider-pkgconfig-sysroot-for-pkgdatadir.patch \
+ "
+SRC_URI[sha256sum] = "1540af1ea698a471c2d8e9d288332c7e0fd360c8f1d12936ebb7e7cbc2425842"
+
+UPSTREAM_CHECK_URI = "https://gitlab.freedesktop.org/wayland/wayland/-/tags"
+UPSTREAM_CHECK_REGEX = "wayland-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
+
+inherit meson pkgconfig ptest
+
+PACKAGECONFIG ??= "dtd-validation"
+PACKAGECONFIG[dtd-validation] = "-Ddtd_validation=true,-Ddtd_validation=false,libxml2,,"
+
+EXTRA_OEMESON = "-Ddocumentation=false"
+EXTRA_OEMESON:class-native = "-Ddocumentation=false"
+
+# Wayland installs a M4 macro for other projects to use, which uses the target
+# pkg-config to find files. Replace pkg-config with pkg-config-native.
+do_install:append:class-native() {
+ sed -e 's,PKG_CHECK_MODULES(.*),,g' \
+ -e 's,$PKG_CONFIG,pkg-config-native,g' \
+ -i ${D}/${datadir}/aclocal/wayland-scanner.m4
+}
+
+do_install_ptest() {
+ mkdir -p ${D}${PTEST_PATH}/tests/data
+ cp -rf ${B}/tests/*-test ${B}/tests/*-checker ${D}${PTEST_PATH}/tests
+ cp -rf ${B}/tests/*-checker ${D}${PTEST_PATH}
+ cp -rf ${S}/tests/scanner-test.sh ${D}${PTEST_PATH}/tests
+ cp -rf ${S}/tests/data/* ${D}${PTEST_PATH}/tests/data/
+ cp -rf ${S}/egl/wayland-egl-symbols-check ${D}${PTEST_PATH}/tests/
+}
+
+sysroot_stage_all:append:class-target () {
+ rm ${SYSROOT_DESTDIR}/${datadir}/aclocal/wayland-scanner.m4
+ cp ${STAGING_DATADIR_NATIVE}/aclocal/wayland-scanner.m4 ${SYSROOT_DESTDIR}/${datadir}/aclocal/
+}
+
+PACKAGES =+ "${PN}-tools"
+
+FILES:${PN}-tools = "${bindir}/wayland-scanner"
+FILES:${PN}-dev += "${datadir}/${BPN}/wayland-scanner.mk"
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN}-ptest += "binutils sed ${PN}-tools"
diff --git a/meta/recipes-graphics/wayland/weston-init.bb b/meta/recipes-graphics/wayland/weston-init.bb
index 497d860203..024e400665 100644
--- a/meta/recipes-graphics/wayland/weston-init.bb
+++ b/meta/recipes-graphics/wayland/weston-init.bb
@@ -9,15 +9,17 @@ SRC_URI = "file://init \
file://weston.ini \
file://weston.service \
file://weston.socket \
+ file://weston-socket.sh \
file://weston-autologin \
file://weston-start"
S = "${WORKDIR}"
-PACKAGECONFIG ??= ""
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xwayland', '', d)}"
PACKAGECONFIG:append:qemuriscv64 = " use-pixman"
PACKAGECONFIG:append:qemuppc64 = " use-pixman"
+PACKAGECONFIG[xwayland] = ",,"
PACKAGECONFIG[no-idle-timeout] = ",,"
PACKAGECONFIG[use-pixman] = ",,"
@@ -25,31 +27,41 @@ DEFAULTBACKEND ??= ""
DEFAULTBACKEND:qemuall ?= "drm"
do_install() {
- if [ "${VIRTUAL-RUNTIME_init_manager}" != "systemd" ]; then
+ # Install weston-start script
+ if [ "${VIRTUAL-RUNTIME_init_manager}" != "systemd" ]; then
+ install -Dm755 ${WORKDIR}/weston-start ${D}${bindir}/weston-start
+ sed -i 's,@DATADIR@,${datadir},g' ${D}${bindir}/weston-start
+ sed -i 's,@LOCALSTATEDIR@,${localstatedir},g' ${D}${bindir}/weston-start
install -Dm755 ${WORKDIR}/init ${D}/${sysconfdir}/init.d/weston
sed -i 's#ROOTHOME#${ROOT_HOME}#' ${D}/${sysconfdir}/init.d/weston
- fi
+ fi
+
+ # Install Weston systemd service
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ install -D -p -m0644 ${WORKDIR}/weston.service ${D}${systemd_system_unitdir}/weston.service
+ install -D -p -m0644 ${WORKDIR}/weston.socket ${D}${systemd_system_unitdir}/weston.socket
+ install -D -p -m0644 ${WORKDIR}/weston-socket.sh ${D}${sysconfdir}/profile.d/weston-socket.sh
+ sed -i -e s:/etc:${sysconfdir}:g \
+ -e s:/usr/bin:${bindir}:g \
+ -e s:/var:${localstatedir}:g \
+ ${D}${systemd_system_unitdir}/weston.service
+ fi
+
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
+ install -D -p -m0644 ${WORKDIR}/weston-autologin ${D}${sysconfdir}/pam.d/weston-autologin
+ fi
+
install -D -p -m0644 ${WORKDIR}/weston.ini ${D}${sysconfdir}/xdg/weston/weston.ini
install -Dm644 ${WORKDIR}/weston.env ${D}${sysconfdir}/default/weston
- # Install Weston systemd service and accompanying udev rule
- install -D -p -m0644 ${WORKDIR}/weston.service ${D}${systemd_system_unitdir}/weston.service
- install -D -p -m0644 ${WORKDIR}/weston.socket ${D}${systemd_system_unitdir}/weston.socket
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then
- install -D -p -m0644 ${WORKDIR}/weston-autologin ${D}${sysconfdir}/pam.d/weston-autologin
- fi
- sed -i -e s:/etc:${sysconfdir}:g \
- -e s:/usr/bin:${bindir}:g \
- -e s:/var:${localstatedir}:g \
- ${D}${systemd_system_unitdir}/weston.service
- # Install weston-start script
- install -Dm755 ${WORKDIR}/weston-start ${D}${bindir}/weston-start
- sed -i 's,@DATADIR@,${datadir},g' ${D}${bindir}/weston-start
- sed -i 's,@LOCALSTATEDIR@,${localstatedir},g' ${D}${bindir}/weston-start
- if [ -n "${DEFAULTBACKEND}" ]; then
+ if [ -n "${DEFAULTBACKEND}" ]; then
sed -i -e "/^\[core\]/a backend=${DEFAULTBACKEND}-backend.so" ${D}${sysconfdir}/xdg/weston/weston.ini
fi
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'xwayland', 'yes', 'no', d)}" = "yes" ]; then
+ sed -i -e "/^\[core\]/a xwayland=true" ${D}${sysconfdir}/xdg/weston/weston.ini
+ fi
+
if [ "${@bb.utils.contains('PACKAGECONFIG', 'no-idle-timeout', 'yes', 'no', d)}" = "yes" ]; then
sed -i -e "/^\[core\]/a idle-time=0" ${D}${sysconfdir}/xdg/weston/weston.ini
fi
@@ -78,6 +90,7 @@ INITSCRIPT_PARAMS = "start 9 5 2 . stop 20 0 1 6 ."
FILES:${PN} += "\
${sysconfdir}/xdg/weston/weston.ini \
+ ${sysconfdir}/profile.d/weston-socket.sh \
${systemd_system_unitdir}/weston.service \
${systemd_system_unitdir}/weston.socket \
${sysconfdir}/default/weston \
@@ -88,6 +101,6 @@ FILES:${PN} += "\
CONFFILES:${PN} += "${sysconfdir}/xdg/weston/weston.ini ${sysconfdir}/default/weston"
SYSTEMD_SERVICE:${PN} = "weston.service weston.socket"
-USERADD_PARAM:${PN} = "--home /home/weston --shell /bin/sh --user-group -G video,input weston"
-GROUPADD_PARAM:${PN} = "-r wayland"
+USERADD_PARAM:${PN} = "--home /home/weston --shell /bin/sh --user-group -G video,input,render,wayland weston"
+GROUPADD_PARAM:${PN} = "-r wayland; -r render"
diff --git a/meta/recipes-graphics/wayland/weston-init/init b/meta/recipes-graphics/wayland/weston-init/init
index d3b0d1873e..a5c54e001e 100644
--- a/meta/recipes-graphics/wayland/weston-init/init
+++ b/meta/recipes-graphics/wayland/weston-init/init
@@ -30,7 +30,7 @@ done
case "$1" in
start)
. /etc/profile
- export HOME=ROOTHOME
+ export HOME=ROOTHOME
WESTON_USER=weston weston-start $OPTARGS &
;;
diff --git a/meta/recipes-graphics/wayland/weston-init/weston-socket.sh b/meta/recipes-graphics/wayland/weston-init/weston-socket.sh
new file mode 100755
index 0000000000..86389d63a3
--- /dev/null
+++ b/meta/recipes-graphics/wayland/weston-init/weston-socket.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+# set weston variables for use with global weston socket
+global_socket="/run/wayland-0"
+if [ -e "$global_socket" ]; then
+ weston_group=$(stat -c "%G" "$global_socket")
+ if [ "$(id -u)" = "0" ]; then
+ export WAYLAND_DISPLAY="$global_socket"
+ else
+ case "$(groups "$USER")" in
+ *"$weston_group"*)
+ export WAYLAND_DISPLAY="$global_socket"
+ ;;
+ *)
+ ;;
+ esac
+ fi
+ unset weston_group
+fi
+unset global_socket
diff --git a/meta/recipes-graphics/wayland/weston/0001-libweston-tools-Include-libgen.h-for-basename-signat.patch b/meta/recipes-graphics/wayland/weston/0001-libweston-tools-Include-libgen.h-for-basename-signat.patch
new file mode 100644
index 0000000000..1d281fa832
--- /dev/null
+++ b/meta/recipes-graphics/wayland/weston/0001-libweston-tools-Include-libgen.h-for-basename-signat.patch
@@ -0,0 +1,48 @@
+From 2b53236ac637dfa7fb0f438f7391a73f6ef92a06 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 14 Dec 2023 09:13:54 -0800
+Subject: [PATCH] libweston,tools: Include libgen.h for basename signature
+
+Latest musl has removed the declaration from string.h [1] as it only
+implements POSIX version alone and string.h in glibc implements GNU
+version of basename. This now results in compile errors on musl.
+
+This might be a warning with older compilers but it is error with
+Clang-17+ as it treats -Wimplicit-function-declaration as error
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/wayland/weston/-/merge_requests/1420]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libweston/backend-drm/libbacklight.c | 1 +
+ tools/zunitc/src/zunitc_impl.c | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/libweston/backend-drm/libbacklight.c b/libweston/backend-drm/libbacklight.c
+index ca7f2d68..74690fa7 100644
+--- a/libweston/backend-drm/libbacklight.c
++++ b/libweston/backend-drm/libbacklight.c
+@@ -41,6 +41,7 @@
+ #include <drm.h>
+ #include <fcntl.h>
+ #include <malloc.h>
++#include <libgen.h>
+ #include <string.h>
+ #include <errno.h>
+
+diff --git a/tools/zunitc/src/zunitc_impl.c b/tools/zunitc/src/zunitc_impl.c
+index 18f03015..9b460fa0 100644
+--- a/tools/zunitc/src/zunitc_impl.c
++++ b/tools/zunitc/src/zunitc_impl.c
+@@ -27,6 +27,7 @@
+
+ #include <errno.h>
+ #include <fcntl.h>
++#include <libgen.h>
+ #include <stdarg.h>
+ #include <stdbool.h>
+ #include <stdio.h>
+--
+2.43.0
+
diff --git a/meta/recipes-graphics/wayland/weston/dont-use-plane-add-prop.patch b/meta/recipes-graphics/wayland/weston/dont-use-plane-add-prop.patch
deleted file mode 100644
index 1ac0695222..0000000000
--- a/meta/recipes-graphics/wayland/weston/dont-use-plane-add-prop.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From ece4c3d261aeec230869c0304ed1011ff6837c16 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 12 Sep 2020 14:04:04 -0700
-Subject: [PATCH] Fix atomic modesetting with musl
-
-atomic modesetting seems to fail with drm weston backend and this patch fixes
-it, below errors are seen before weston exits
-
-atomic: couldn't commit new state: Invalid argument
-
-Upstream-Status: Submitted [https://gitlab.freedesktop.org/wayland/weston/-/issues/158]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- libweston/backend-drm/kms.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/libweston/backend-drm/kms.c b/libweston/backend-drm/kms.c
-index 780d007..9994da1 100644
---- a/libweston/backend-drm/kms.c
-+++ b/libweston/backend-drm/kms.c
-@@ -1142,8 +1142,8 @@ drm_pending_state_apply_atomic(struct drm_pending_state *pending_state,
- wl_list_for_each(plane, &b->plane_list, link) {
- drm_debug(b, "\t\t[atomic] starting with plane %lu disabled\n",
- (unsigned long) plane->plane_id);
-- plane_add_prop(req, plane, WDRM_PLANE_CRTC_ID, 0);
-- plane_add_prop(req, plane, WDRM_PLANE_FB_ID, 0);
-+ //plane_add_prop(req, plane, WDRM_PLANE_CRTC_ID, 0);
-+ //plane_add_prop(req, plane, WDRM_PLANE_FB_ID, 0);
- }
-
- flags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
diff --git a/meta/recipes-graphics/wayland/weston/xwayland.weston-start b/meta/recipes-graphics/wayland/weston/xwayland.weston-start
index db384b1a53..342ac8d129 100644
--- a/meta/recipes-graphics/wayland/weston/xwayland.weston-start
+++ b/meta/recipes-graphics/wayland/weston/xwayland.weston-start
@@ -1,5 +1,6 @@
#!/bin/sh
if type Xwayland >/dev/null 2>/dev/null; then
- mkdir -p /tmp/.X11-unix
+ mkdir -m 775 -p /tmp/.X11-unix
+ chown root:video /tmp/.X11-unix
fi
diff --git a/meta/recipes-graphics/wayland/weston_10.0.0.bb b/meta/recipes-graphics/wayland/weston_10.0.0.bb
deleted file mode 100644
index 93f7b59659..0000000000
--- a/meta/recipes-graphics/wayland/weston_10.0.0.bb
+++ /dev/null
@@ -1,144 +0,0 @@
-SUMMARY = "Weston, a Wayland compositor"
-DESCRIPTION = "Weston is the reference implementation of a Wayland compositor"
-HOMEPAGE = "http://wayland.freedesktop.org"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d79ee9e66bb0f95d3386a7acae780b70 \
- file://libweston/compositor.c;endline=27;md5=eb6d5297798cabe2ddc65e2af519bcf0 \
- "
-
-SRC_URI = "https://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
- file://weston.png \
- file://weston.desktop \
- file://xwayland.weston-start \
- file://systemd-notify.weston-start \
- "
-
-SRC_URI:append:libc-musl = " file://dont-use-plane-add-prop.patch "
-
-SRC_URI[sha256sum] = "5c23964112b90238bed39e5dd1e41cd71a79398813cdc3bbb15a9fdc94e547ae"
-
-UPSTREAM_CHECK_URI = "https://wayland.freedesktop.org/releases.html"
-
-inherit meson pkgconfig useradd
-
-# depends on virtual/egl
-#
-require ${THISDIR}/required-distro-features.inc
-
-DEPENDS = "libxkbcommon gdk-pixbuf pixman cairo glib-2.0"
-DEPENDS += "wayland wayland-protocols libinput virtual/egl pango wayland-native"
-
-LDFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'lto', '-Wl,-z,undefs', '', d)}"
-
-WESTON_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:1])}"
-
-EXTRA_OEMESON += "-Dpipewire=false"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'kms wayland egl clients', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11 wayland', 'xwayland', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'systemd x11', d)} \
- ${@bb.utils.contains_any('DISTRO_FEATURES', 'wayland x11', '', 'headless', d)} \
- ${@oe.utils.conditional('VIRTUAL-RUNTIME_init_manager', 'sysvinit', 'launcher-libseat', '', d)} \
- image-jpeg \
- screenshare \
- shell-desktop \
- shell-fullscreen \
- shell-ivi"
-
-# Can be 'damage', 'im', 'egl', 'shm', 'touch', 'dmabuf-feedback', 'dmabuf-v4l', 'dmabuf-egl' or 'all'
-SIMPLECLIENTS ?= "all"
-
-#
-# Compositor choices
-#
-# Weston on KMS
-PACKAGECONFIG[kms] = "-Dbackend-drm=true,-Dbackend-drm=false,drm udev virtual/egl virtual/libgles2 virtual/libgbm mtdev"
-# Weston on Wayland (nested Weston)
-PACKAGECONFIG[wayland] = "-Dbackend-wayland=true,-Dbackend-wayland=false,virtual/egl virtual/libgles2"
-# Weston on X11
-PACKAGECONFIG[x11] = "-Dbackend-x11=true,-Dbackend-x11=false,virtual/libx11 libxcb libxcb libxcursor cairo"
-# Headless Weston
-PACKAGECONFIG[headless] = "-Dbackend-headless=true,-Dbackend-headless=false"
-# Weston on framebuffer
-PACKAGECONFIG[fbdev] = "-Ddeprecated-backend-fbdev=true,-Ddeprecated-backend-fbdev=false,udev mtdev"
-# Weston on RDP
-PACKAGECONFIG[rdp] = "-Dbackend-rdp=true,-Dbackend-rdp=false,freerdp"
-# weston-launch
-PACKAGECONFIG[launch] = "-Ddeprecated-weston-launch=true,-Ddeprecated-weston-launch=false,drm"
-# VA-API desktop recorder
-PACKAGECONFIG[vaapi] = "-Dbackend-drm-screencast-vaapi=true,-Dbackend-drm-screencast-vaapi=false,libva"
-# Weston with EGL support
-PACKAGECONFIG[egl] = "-Drenderer-gl=true,-Drenderer-gl=false,virtual/egl"
-# Weston with lcms support
-PACKAGECONFIG[lcms] = "-Dcolor-management-lcms=true,-Dcolor-management-lcms=false,lcms"
-# Weston with webp support
-PACKAGECONFIG[webp] = "-Dimage-webp=true,-Dimage-webp=false,libwebp"
-# Weston with systemd-login support
-PACKAGECONFIG[systemd] = "-Dsystemd=true -Dlauncher-logind=true,-Dsystemd=false -Dlauncher-logind=false,systemd dbus"
-# Weston with Xwayland support (requires X11 and Wayland)
-PACKAGECONFIG[xwayland] = "-Dxwayland=true,-Dxwayland=false"
-# colord CMS support
-PACKAGECONFIG[colord] = "-Dcolor-management-colord=true,-Dcolor-management-colord=false,colord"
-# Clients support
-PACKAGECONFIG[clients] = "-Dsimple-clients=${SIMPLECLIENTS} -Ddemo-clients=true,-Dsimple-clients= -Ddemo-clients=false"
-# Virtual remote output with GStreamer on DRM backend
-PACKAGECONFIG[remoting] = "-Dremoting=true,-Dremoting=false,gstreamer1.0 gstreamer1.0-plugins-base"
-# Weston with screen-share support
-PACKAGECONFIG[screenshare] = "-Dscreenshare=true,-Dscreenshare=false"
-# Traditional desktop shell
-PACKAGECONFIG[shell-desktop] = "-Dshell-desktop=true,-Dshell-desktop=false"
-# Fullscreen shell
-PACKAGECONFIG[shell-fullscreen] = "-Dshell-fullscreen=true,-Dshell-fullscreen=false"
-# In-Vehicle Infotainment (IVI) shell
-PACKAGECONFIG[shell-ivi] = "-Dshell-ivi=true,-Dshell-ivi=false"
-# JPEG image loading support
-PACKAGECONFIG[image-jpeg] = "-Dimage-jpeg=true,-Dimage-jpeg=false, jpeg"
-# support libseat based launch
-PACKAGECONFIG[launcher-libseat] = "-Dlauncher-libseat=true,-Dlauncher-libseat=false,seatd"
-
-do_install:append() {
- # Weston doesn't need the .la files to load modules, so wipe them
- rm -f ${D}/${libdir}/libweston-${WESTON_MAJOR_VERSION}/*.la
-
- # If X11, ship a desktop file to launch it
- if [ "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}" ]; then
- install -d ${D}${datadir}/applications
- install ${WORKDIR}/weston.desktop ${D}${datadir}/applications
-
- install -d ${D}${datadir}/icons/hicolor/48x48/apps
- install ${WORKDIR}/weston.png ${D}${datadir}/icons/hicolor/48x48/apps
- fi
-
- if [ "${@bb.utils.contains('PACKAGECONFIG', 'xwayland', 'yes', 'no', d)}" = "yes" ]; then
- install -Dm 644 ${WORKDIR}/xwayland.weston-start ${D}${datadir}/weston-start/xwayland
- fi
-
- if [ "${@bb.utils.contains('PACKAGECONFIG', 'systemd', 'yes', 'no', d)}" = "yes" ]; then
- install -Dm 644 ${WORKDIR}/systemd-notify.weston-start ${D}${datadir}/weston-start/systemd-notify
- fi
-
- if [ "${@bb.utils.contains('PACKAGECONFIG', 'launch', 'yes', 'no', d)}" = "yes" ]; then
- chmod u+s ${D}${bindir}/weston-launch
- fi
-}
-
-PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'xwayland', '${PN}-xwayland', '', d)} \
- libweston-${WESTON_MAJOR_VERSION} ${PN}-examples"
-
-FILES:${PN}-dev += "${libdir}/${BPN}/libexec_weston.so"
-FILES:${PN} = "${bindir}/weston ${bindir}/weston-terminal ${bindir}/weston-info ${bindir}/weston-launch ${bindir}/wcap-decode ${libexecdir} ${libdir}/${BPN}/*.so* ${datadir}"
-
-FILES:libweston-${WESTON_MAJOR_VERSION} = "${libdir}/lib*${SOLIBS} ${libdir}/libweston-${WESTON_MAJOR_VERSION}/*.so"
-SUMMARY:libweston-${WESTON_MAJOR_VERSION} = "Helper library for implementing 'wayland window managers'."
-
-FILES:${PN}-examples = "${bindir}/*"
-
-FILES:${PN}-xwayland = "${libdir}/libweston-${WESTON_MAJOR_VERSION}/xwayland.so"
-RDEPENDS:${PN}-xwayland += "xwayland"
-
-RDEPENDS:${PN} += "xkeyboard-config"
-RRECOMMENDS:${PN} = "weston-init liberation-fonts"
-RRECOMMENDS:${PN}-dev += "wayland-protocols"
-
-USERADD_PACKAGES = "${PN}"
-GROUPADD_PARAM:${PN} = "--system weston-launch"
diff --git a/meta/recipes-graphics/wayland/weston_13.0.0.bb b/meta/recipes-graphics/wayland/weston_13.0.0.bb
new file mode 100644
index 0000000000..b728bd0ef3
--- /dev/null
+++ b/meta/recipes-graphics/wayland/weston_13.0.0.bb
@@ -0,0 +1,146 @@
+SUMMARY = "Weston, a Wayland compositor"
+DESCRIPTION = "Weston is the reference implementation of a Wayland compositor"
+HOMEPAGE = "http://wayland.freedesktop.org"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d79ee9e66bb0f95d3386a7acae780b70 \
+ file://libweston/compositor.c;endline=27;md5=eb6d5297798cabe2ddc65e2af519bcf0 \
+ "
+
+SRC_URI = "https://gitlab.freedesktop.org/wayland/weston/-/releases/${PV}/downloads/${BPN}-${PV}.tar.xz \
+ file://0001-libweston-tools-Include-libgen.h-for-basename-signat.patch \
+ file://weston.png \
+ file://weston.desktop \
+ file://xwayland.weston-start \
+ file://systemd-notify.weston-start \
+ "
+
+SRC_URI[sha256sum] = "52ff1d4aa2394a2e416c85a338b627ce97fa71d43eb762fd4aaf145d36fc795a"
+
+UPSTREAM_CHECK_URI = "https://gitlab.freedesktop.org/wayland/weston/-/tags"
+UPSTREAM_CHECK_REGEX = "weston-(?P<pver>\d+\.\d+\.(?!9\d+)\d+)"
+
+inherit meson pkgconfig useradd
+
+# depends on virtual/egl
+#
+require ${THISDIR}/required-distro-features.inc
+
+DEPENDS = "libxkbcommon gdk-pixbuf pixman cairo glib-2.0"
+DEPENDS += "wayland wayland-protocols libinput virtual/egl pango wayland-native"
+
+LDFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'lto', '-Wl,-z,undefs', '', d)}"
+
+WESTON_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:1])}"
+
+EXTRA_OEMESON += "-Dpipewire=false"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'kms wayland egl clients', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11 wayland', 'xwayland', '', d)} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'systemd x11', d)} \
+ ${@bb.utils.contains_any('DISTRO_FEATURES', 'wayland x11', '', 'headless', d)} \
+ image-jpeg \
+ screenshare \
+ shell-desktop \
+ shell-fullscreen \
+ shell-ivi \
+ shell-kiosk \
+ "
+
+# Can be 'damage', 'im', 'egl', 'shm', 'touch', 'dmabuf-feedback', 'dmabuf-v4l', 'dmabuf-egl' or 'all'
+SIMPLECLIENTS ?= "all"
+
+#
+# Compositor choices
+#
+# Weston on KMS
+PACKAGECONFIG[kms] = "-Dbackend-drm=true,-Dbackend-drm=false,drm udev seatd virtual/egl virtual/libgles2 virtual/libgbm mtdev"
+# Weston on Wayland (nested Weston)
+PACKAGECONFIG[wayland] = "-Dbackend-wayland=true,-Dbackend-wayland=false,virtual/egl virtual/libgles2"
+# Weston on X11
+PACKAGECONFIG[x11] = "-Dbackend-x11=true,-Dbackend-x11=false,virtual/libx11 libxcb libxcursor"
+# Headless Weston
+PACKAGECONFIG[headless] = "-Dbackend-headless=true,-Dbackend-headless=false"
+# Weston on RDP
+PACKAGECONFIG[rdp] = "-Dbackend-rdp=true,-Dbackend-rdp=false,freerdp"
+# VA-API desktop recorder
+PACKAGECONFIG[vaapi] = "-Dbackend-drm-screencast-vaapi=true,-Dbackend-drm-screencast-vaapi=false,libva"
+# Weston with EGL support
+PACKAGECONFIG[egl] = "-Drenderer-gl=true,-Drenderer-gl=false,virtual/egl"
+# Weston with lcms support
+PACKAGECONFIG[lcms] = "-Dcolor-management-lcms=true,-Dcolor-management-lcms=false,lcms"
+# Weston with webp support
+PACKAGECONFIG[webp] = "-Dimage-webp=true,-Dimage-webp=false,libwebp"
+# Weston with systemd support
+PACKAGECONFIG[systemd] = "-Dsystemd=true,-Dsystemd=false,systemd dbus"
+# Weston with Xwayland support (requires X11 and Wayland)
+PACKAGECONFIG[xwayland] = "-Dxwayland=true,-Dxwayland=false,libxcb libxcursor xcb-util-cursor xwayland"
+# colord CMS support
+PACKAGECONFIG[colord] = "-Ddeprecated-color-management-colord=true,-Ddeprecated-color-management-colord=false,colord"
+# Clients support
+PACKAGECONFIG[clients] = "-Dsimple-clients=${SIMPLECLIENTS} -Ddemo-clients=true,-Dsimple-clients= -Ddemo-clients=false"
+# Virtual remote output with GStreamer on DRM backend
+PACKAGECONFIG[remoting] = "-Dremoting=true,-Dremoting=false,gstreamer1.0 gstreamer1.0-plugins-base"
+# Weston with screen-share support
+PACKAGECONFIG[screenshare] = "-Dscreenshare=true,-Dscreenshare=false"
+# Traditional desktop shell
+PACKAGECONFIG[shell-desktop] = "-Dshell-desktop=true,-Dshell-desktop=false"
+# Fullscreen shell
+PACKAGECONFIG[shell-fullscreen] = "-Dshell-fullscreen=true,-Dshell-fullscreen=false"
+# In-Vehicle Infotainment (IVI) shell
+PACKAGECONFIG[shell-ivi] = "-Dshell-ivi=true,-Dshell-ivi=false"
+# Kiosk shell
+PACKAGECONFIG[shell-kiosk] = "-Dshell-kiosk=true,-Dshell-kiosk=false"
+# JPEG image loading support
+PACKAGECONFIG[image-jpeg] = "-Dimage-jpeg=true,-Dimage-jpeg=false, jpeg"
+# screencasting via PipeWire
+PACKAGECONFIG[pipewire] = "-Dbackend-pipewire=true,-Dbackend-pipewire=false,pipewire"
+# VNC remote screensharing
+PACKAGECONFIG[vnc] = "-Dbackend-vnc=true,-Dbackend-vnc=false,neatvnc"
+
+
+do_install:append() {
+ # Weston doesn't need the .la files to load modules, so wipe them
+ rm -f ${D}/${libdir}/libweston-${WESTON_MAJOR_VERSION}/*.la
+
+ # If X11, ship a desktop file to launch it
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}" ]; then
+ install -d ${D}${datadir}/applications
+ install ${WORKDIR}/weston.desktop ${D}${datadir}/applications
+
+ install -d ${D}${datadir}/icons/hicolor/48x48/apps
+ install ${WORKDIR}/weston.png ${D}${datadir}/icons/hicolor/48x48/apps
+ fi
+
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'xwayland', 'yes', 'no', d)}" = "yes" ]; then
+ install -Dm 644 ${WORKDIR}/xwayland.weston-start ${D}${datadir}/weston-start/xwayland
+ fi
+
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'systemd', 'yes', 'no', d)}" = "yes" ]; then
+ install -Dm 644 ${WORKDIR}/systemd-notify.weston-start ${D}${datadir}/weston-start/systemd-notify
+ fi
+
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'launch', 'yes', 'no', d)}" = "yes" ]; then
+ chmod u+s ${D}${bindir}/weston-launch
+ fi
+}
+
+PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'xwayland', '${PN}-xwayland', '', d)} \
+ libweston-${WESTON_MAJOR_VERSION} ${PN}-examples"
+
+FILES:${PN}-dev += "${libdir}/${BPN}/libexec_weston.so"
+FILES:${PN} = "${sysconfdir} ${bindir}/weston ${bindir}/weston-terminal ${bindir}/weston-info ${bindir}/weston-launch ${bindir}/wcap-decode ${libexecdir} ${libdir}/${BPN}/*.so* ${datadir}"
+
+FILES:libweston-${WESTON_MAJOR_VERSION} = "${libdir}/lib*${SOLIBS} ${libdir}/libweston-${WESTON_MAJOR_VERSION}/*.so"
+SUMMARY:libweston-${WESTON_MAJOR_VERSION} = "Helper library for implementing 'wayland window managers'."
+
+FILES:${PN}-examples = "${bindir}/*"
+
+FILES:${PN}-xwayland = "${libdir}/libweston-${WESTON_MAJOR_VERSION}/xwayland.so"
+RDEPENDS:${PN}-xwayland += "xwayland"
+
+RDEPENDS:${PN} += "xkeyboard-config"
+RRECOMMENDS:${PN} = "weston-init liberation-fonts"
+RRECOMMENDS:${PN}-dev += "wayland-protocols"
+
+USERADD_PACKAGES = "${PN}"
+GROUPADD_PARAM:${PN} = "--system weston-launch"
diff --git a/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb b/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb
index 3cc8e9ed4d..671c0a290e 100644
--- a/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb
+++ b/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb
@@ -2,7 +2,6 @@ SUMMARY = "Simple Xserver Init Script (no dm)"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
SECTION = "x11"
-PR = "r31"
SRC_URI = "file://xserver-nodm \
file://Xserver \
diff --git a/meta/recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_git.bb b/meta/recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_git.bb
index 09fbc9b798..40b77422bf 100644
--- a/meta/recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_git.bb
+++ b/meta/recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_git.bb
@@ -8,9 +8,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
SECTION = "x11/base"
SRCREV = "23c8af5ba4a1b7efbaf0bbca59a65ff7e10a1a06"
-PV = "0.1.1+git${SRCPV}"
+PV = "0.1.1+git"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https"
UPSTREAM_CHECK_COMMITS = "1"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb b/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb
index b77c940dde..5c9742fb20 100644
--- a/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb
+++ b/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb
@@ -5,7 +5,6 @@ BUGTRACKER = "https://github.com/tias/xinput_calibrator/issues"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-PR = "r7"
SRC_URI = "file://pointercal.xinput"
S = "${WORKDIR}"
diff --git a/meta/recipes-graphics/xinput-calibrator/xinput-calibrator_git.bb b/meta/recipes-graphics/xinput-calibrator/xinput-calibrator_git.bb
index 3f49df6258..7b49668f92 100644
--- a/meta/recipes-graphics/xinput-calibrator/xinput-calibrator_git.bb
+++ b/meta/recipes-graphics/xinput-calibrator/xinput-calibrator_git.bb
@@ -4,8 +4,7 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://src/calibrator.cpp;endline=22;md5=1bcba08f67cdb56f34021557898e4b5a"
DEPENDS = "virtual/libx11 libxi libxrandr"
-PV = "0.7.5+git${SRCPV}"
-PR = "r6"
+PV = "0.7.5+git"
inherit autotools pkgconfig features_check
# depends on virtual/libx11
diff --git a/meta/recipes-graphics/xorg-app/mkfontscale_1.2.2.bb b/meta/recipes-graphics/xorg-app/mkfontscale_1.2.2.bb
deleted file mode 100644
index cd658ab219..0000000000
--- a/meta/recipes-graphics/xorg-app/mkfontscale_1.2.2.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "A program to create an index of scalable font files for X"
-
-DESCRIPTION = "For each directory argument, mkfontscale reads all of the \
-scalable font files in the directory. For every font file found, an X11 \
-font name (XLFD) is generated, and is written together with the file \
-name to a file fonts.scale in the directory. The resulting fonts.scale \
-is used by the mkfontdir program."
-
-DEPENDS = "util-macros-native zlib libfontenc freetype xorgproto"
-
-PROVIDES += "mkfontdir"
-RPROVIDES:${PN} += "mkfontdir"
-
-BBCLASSEXTEND = "native"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=99b1e1269aba5179139b9e4380fc0934"
-
-SRC_URI_EXT = "xz"
-SRC_URI[sha256sum] = "8ae3fb5b1fe7436e1f565060acaa3e2918fe745b0e4979b5593968914fe2d5c4"
diff --git a/meta/recipes-graphics/xorg-app/mkfontscale_1.2.3.bb b/meta/recipes-graphics/xorg-app/mkfontscale_1.2.3.bb
new file mode 100644
index 0000000000..aa79902a34
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/mkfontscale_1.2.3.bb
@@ -0,0 +1,21 @@
+require xorg-app-common.inc
+
+SUMMARY = "A program to create an index of scalable font files for X"
+
+DESCRIPTION = "For each directory argument, mkfontscale reads all of the \
+scalable font files in the directory. For every font file found, an X11 \
+font name (XLFD) is generated, and is written together with the file \
+name to a file fonts.scale in the directory. The resulting fonts.scale \
+is used by the mkfontdir program."
+
+DEPENDS = "util-macros-native zlib libfontenc freetype xorgproto"
+
+PROVIDES += "mkfontdir"
+RPROVIDES:${PN} += "mkfontdir"
+
+BBCLASSEXTEND = "native"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=5a60c596d1b5f3dee9f005b703b3180d"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "2921cdc344f1acee04bcd6ea1e29565c1308263006e134a9ee38cf9c9d6fe75e"
diff --git a/meta/recipes-graphics/xorg-app/rgb_1.0.6.bb b/meta/recipes-graphics/xorg-app/rgb_1.0.6.bb
deleted file mode 100644
index db287a5b57..0000000000
--- a/meta/recipes-graphics/xorg-app/rgb_1.0.6.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "X11 color name database"
-DESCRIPTION = "This package includes both the list mapping X color names \
-to RGB values (rgb.txt) and, if configured to use a database for color \
-lookup, the rgb program to convert the text file into the binary database \
-format."
-
-DEPENDS += " xorgproto util-macros"
-LIC_FILES_CHKSUM = "file://COPYING;md5=ef598adbe241bd0b0b9113831f6e249a"
-PE = "1"
-
-SRC_URI[md5sum] = "eab5bbd7642e5c784429307ec210d198"
-SRC_URI[sha256sum] = "bbca7c6aa59939b9f6a0fb9fff15dfd62176420ffd4ae30c8d92a6a125fbe6b0"
-
-FILES:${PN} += "${datadir}/X11"
diff --git a/meta/recipes-graphics/xorg-app/rgb_1.1.0.bb b/meta/recipes-graphics/xorg-app/rgb_1.1.0.bb
new file mode 100644
index 0000000000..f14da7fd21
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/rgb_1.1.0.bb
@@ -0,0 +1,16 @@
+require xorg-app-common.inc
+
+SUMMARY = "X11 color name database"
+DESCRIPTION = "This package includes both the list mapping X color names \
+to RGB values (rgb.txt) and, if configured to use a database for color \
+lookup, the rgb program to convert the text file into the binary database \
+format."
+
+DEPENDS += " xorgproto util-macros"
+LIC_FILES_CHKSUM = "file://COPYING;md5=ef598adbe241bd0b0b9113831f6e249a"
+PE = "1"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "fc03d7f56e5b2a617668167f8927948cce54f93097e7ccd9f056077f479ed37b"
+
+FILES:${PN} += "${datadir}/X11"
diff --git a/meta/recipes-graphics/xorg-app/xauth_1.1.2.bb b/meta/recipes-graphics/xorg-app/xauth_1.1.2.bb
deleted file mode 100644
index 1ad18ef63b..0000000000
--- a/meta/recipes-graphics/xorg-app/xauth_1.1.2.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-require xorg-app-common.inc
-SUMMARY = "X authority utilities"
-DESCRIPTION = "X application to edit and display the authorization \
-information used in connecting to the X server."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=5ec74dd7ea4d10c4715a7c44f159a40b"
-
-DEPENDS += "libxau libxext libxmu"
-PE = "1"
-
-SRC_URI_EXT = "xz"
-SRC_URI[sha256sum] = "78ba6afd19536ced1dddb3276cba6e9555a211b468a06f95f6a97c62ff8ee200"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-app/xauth_1.1.3.bb b/meta/recipes-graphics/xorg-app/xauth_1.1.3.bb
new file mode 100644
index 0000000000..b0ca33bf4b
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xauth_1.1.3.bb
@@ -0,0 +1,15 @@
+require xorg-app-common.inc
+SUMMARY = "X authority utilities"
+DESCRIPTION = "X application to edit and display the authorization \
+information used in connecting to the X server."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=5ec74dd7ea4d10c4715a7c44f159a40b"
+
+DEPENDS += "libxau libxext libxmu"
+PE = "1"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "e7075498bae332f917f01d660f9b940c0752b2556a8da61ccb62a44d0ffe9d33"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.3.bb b/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.3.bb
deleted file mode 100644
index e75a840b7d..0000000000
--- a/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.3.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Display information utility for X"
-
-DESCRIPTION = "Xdpyinfo is a utility for displaying information about an \
-X server. It is used to examine the capabilities of a server, the \
-predefined values for various parameters used in communicating between \
-clients and the server, and the different types of screens and visuals \
-that are available."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=f3d09e6b9e203a1af489e16c708f4fb3"
-DEPENDS += "libxtst libxext libxxf86vm libxi libxrender libxinerama libdmx libxau libxcomposite"
-PE = "1"
-
-SRC_URI += "file://disable-xkb.patch"
-
-SRC_URI_EXT = "xz"
-SRC_URI[sha256sum] = "356d5fd62f3e98ee36d6becf1b32d4ab6112d618339fb4b592ccffbd9e0fc206"
-
-EXTRA_OECONF = "--disable-xkb"
diff --git a/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.4.bb b/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.4.bb
new file mode 100644
index 0000000000..3becd40281
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xdpyinfo_1.3.4.bb
@@ -0,0 +1,20 @@
+require xorg-app-common.inc
+
+SUMMARY = "Display information utility for X"
+
+DESCRIPTION = "Xdpyinfo is a utility for displaying information about an \
+X server. It is used to examine the capabilities of a server, the \
+predefined values for various parameters used in communicating between \
+clients and the server, and the different types of screens and visuals \
+that are available."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=f3d09e6b9e203a1af489e16c708f4fb3"
+DEPENDS += "libxtst libxext libxxf86vm libxi libxrender libxinerama libxau libxcomposite"
+PE = "1"
+
+SRC_URI += "file://disable-xkb.patch"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "a8ada581dbd7266440d7c3794fa89edf6b99b8857fc2e8c31042684f3af4822b"
+
+EXTRA_OECONF = "--disable-xkb"
diff --git a/meta/recipes-graphics/xorg-app/xev/diet-x11.patch b/meta/recipes-graphics/xorg-app/xev/diet-x11.patch
deleted file mode 100644
index 361369b291..0000000000
--- a/meta/recipes-graphics/xorg-app/xev/diet-x11.patch
+++ /dev/null
@@ -1,114 +0,0 @@
-From b9b2b8d1af283a13cdccea55562cf332de48dcb9 Mon Sep 17 00:00:00 2001
-From: Ross Burton <ross@openedhand.com>
-Date: Wed, 28 Mar 2007 16:10:50 +0000
-Subject: [PATCH] Add xev
-
-Upstream-Status: Inappropriate [disable feature]
-
----
- xev.c | 64 +----------------------------------------------------------
- 1 file changed, 1 insertion(+), 63 deletions(-)
-
-diff --git a/xev.c b/xev.c
-index ea69234..6d5eb30 100644
---- a/xev.c
-+++ b/xev.c
-@@ -175,17 +175,6 @@ do_KeyPress(XEvent *eventp)
- nbytes = XLookupString(e, str, 256, &ks, NULL);
-
- /* not supposed to call XmbLookupString on a key release event */
-- if (e->type == KeyPress && xic) {
-- do {
-- nmbbytes = XmbLookupString(xic, e, buf, bsize - 1, &ks, &status);
-- buf[nmbbytes] = '\0';
--
-- if (status == XBufferOverflow) {
-- bsize = nmbbytes + 1;
-- buf = realloc(buf, bsize);
-- }
-- } while (status == XBufferOverflow);
-- }
-
- if (ks == NoSymbol)
- ksname = "NoSymbol";
-@@ -220,16 +209,6 @@ do_KeyPress(XEvent *eventp)
- }
-
- /* not supposed to call XmbLookupString on a key release event */
-- if (e->type == KeyPress && xic) {
-- output(Indent, "XmbLookupString gives %d bytes: ", nmbbytes);
-- if (nmbbytes > 0) {
-- dump(buf, nmbbytes);
-- output(NewLine, " \"%s\"", buf);
-- }
-- else {
-- output_new_line();
-- }
-- }
-
- output(Indent | NewLine, "XFilterEvent returns: %s",
- XFilterEvent(eventp, e->window) ? "True" : "False");
-@@ -1211,7 +1190,7 @@ parse_event_mask(const char *s, long event_masks[])
- if (s)
- return True;
- }
-- }
-+ }
-
- if (s != NULL)
- fprintf(stderr, "%s: unrecognized event mask '%s'\n", ProgramName, s);
-@@ -1361,37 +1340,6 @@ main(int argc, char **argv)
- fprintf(stderr, "%s: XSetLocaleModifiers failed\n", ProgramName);
- }
-
-- xim = XOpenIM(dpy, NULL, NULL, NULL);
-- if (xim == NULL) {
-- fprintf(stderr, "%s: XOpenIM failed\n", ProgramName);
-- }
--
-- if (xim) {
-- imvalret = XGetIMValues(xim, XNQueryInputStyle, &xim_styles, NULL);
-- if (imvalret != NULL || xim_styles == NULL) {
-- fprintf(stderr, "%s: input method doesn't support any styles\n",
-- ProgramName);
-- }
--
-- if (xim_styles) {
-- xim_style = 0;
-- for (i = 0; i < xim_styles->count_styles; i++) {
-- if (xim_styles->supported_styles[i] ==
-- (XIMPreeditNothing | XIMStatusNothing)) {
-- xim_style = xim_styles->supported_styles[i];
-- break;
-- }
-- }
--
-- if (xim_style == 0) {
-- fprintf(stderr,
-- "%s: input method doesn't support the style we support\n",
-- ProgramName);
-- }
-- XFree(xim_styles);
-- }
-- }
--
- screen = DefaultScreen(dpy);
-
- attr.event_mask = event_masks[EVENT_MASK_INDEX_CORE];
-@@ -1446,16 +1394,6 @@ main(int argc, char **argv)
- printf("Outer window is 0x%lx, inner window is 0x%lx\n", w, subw);
- }
-
-- if (xim && xim_style) {
-- xic = XCreateIC(xim,
-- XNInputStyle, xim_style,
-- XNClientWindow, w, XNFocusWindow, w, NULL);
--
-- if (xic == NULL) {
-- fprintf(stderr, "XCreateIC failed\n");
-- }
-- }
--
- have_rr = XRRQueryExtension(dpy, &rr_event_base, &rr_error_base);
- if (have_rr) {
- int rr_major, rr_minor;
diff --git a/meta/recipes-graphics/xorg-app/xev_1.2.4.bb b/meta/recipes-graphics/xorg-app/xev_1.2.4.bb
deleted file mode 100644
index 9407fa65f1..0000000000
--- a/meta/recipes-graphics/xorg-app/xev_1.2.4.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "X Event Viewer"
-DESCRIPTION = "Xev creates a window and then asks the X server to send it events \
-whenever anything happens to the window (such as it being moved, resized, \
-typed in, clicked in, etc.). You can also attach it to an existing window."
-
-LIC_FILES_CHKSUM = "file://xev.c;beginline=1;endline=33;md5=577c99421f1803b891d2c79097ae4682"
-LICENSE = "MIT"
-
-PE = "1"
-
-DEPENDS += "libxrandr xorgproto"
-
-SRC_URI += "file://diet-x11.patch"
-
-SRC_URI[sha256sum] = "d700e08bfe751ed2dbf802baa204b056d0e49348b6eb3c6f9cb035d8ae4885e2"
diff --git a/meta/recipes-graphics/xorg-app/xev_1.2.6.bb b/meta/recipes-graphics/xorg-app/xev_1.2.6.bb
new file mode 100644
index 0000000000..182aeff1b7
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xev_1.2.6.bb
@@ -0,0 +1,17 @@
+require xorg-app-common.inc
+
+SUMMARY = "X Event Viewer"
+DESCRIPTION = "Xev creates a window and then asks the X server to send it events \
+whenever anything happens to the window (such as it being moved, resized, \
+typed in, clicked in, etc.). You can also attach it to an existing window."
+
+LIC_FILES_CHKSUM = "file://xev.c;beginline=1;endline=33;md5=577c99421f1803b891d2c79097ae4682"
+LICENSE = "MIT"
+
+PE = "1"
+
+DEPENDS += "libxrandr xorgproto"
+
+SRC_URI[sha256sum] = "61e1c5e008ac9973aca7cdddf36e9df7410e77083b030eb04f4dc737c51807d7"
+
+SRC_URI_EXT = "xz"
diff --git a/meta/recipes-graphics/xorg-app/xeyes_1.2.0.bb b/meta/recipes-graphics/xorg-app/xeyes_1.2.0.bb
deleted file mode 100644
index 73d09f058d..0000000000
--- a/meta/recipes-graphics/xorg-app/xeyes_1.2.0.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "X11 eyes that follow the mouse cursor demo"
-DESCRIPTION = "Xeyes is a small X11 application that shows a pair of eyes that move to \
-follow the location of the mouse cursor around the screen."
-
-PE = "1"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=3ea51b365051ac32d1813a7dbaa4bfc6"
-
-SRC_URI[sha256sum] = "f8a17e23146bef1ab345a1e303c6749e42aaa7bcf4f25428afad41770721b6db"
-
-DEPENDS += "libxau libxt libxext libxmu libxrender libxi"
diff --git a/meta/recipes-graphics/xorg-app/xeyes_1.3.0.bb b/meta/recipes-graphics/xorg-app/xeyes_1.3.0.bb
new file mode 100644
index 0000000000..3d1a7063ea
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xeyes_1.3.0.bb
@@ -0,0 +1,14 @@
+require xorg-app-common.inc
+
+SUMMARY = "X11 eyes that follow the mouse cursor demo"
+DESCRIPTION = "Xeyes is a small X11 application that shows a pair of eyes that move to \
+follow the location of the mouse cursor around the screen."
+
+PE = "1"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=3ea51b365051ac32d1813a7dbaa4bfc6"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "0950c600bf33447e169a539ee6655ef9f36d6cebf2c1be67f7ab55dacb753023"
+
+DEPENDS += "libxau libxt libxext libxmu libxrender libxi"
diff --git a/meta/recipes-graphics/xorg-app/xhost_1.0.8.bb b/meta/recipes-graphics/xorg-app/xhost_1.0.8.bb
deleted file mode 100644
index 26bec3f0dd..0000000000
--- a/meta/recipes-graphics/xorg-app/xhost_1.0.8.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Server access control program for X"
-
-DESCRIPTION = "The xhost program is used to add and delete host names or \
-user names to the list allowed to make connections to the X server. In \
-the case of hosts, this provides a rudimentary form of privacy control \
-and security. Environments which require more sophisticated measures \
-should implement the user-based mechanism or use the hooks in the \
-protocol for passing other authentication data to the server."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=8fbed71dddf48541818cef8079124199"
-DEPENDS += "libxmu libxau"
-PE = "1"
-
-SRC_URI[md5sum] = "a48c72954ae6665e0616f6653636da8c"
-SRC_URI[sha256sum] = "a2dc3c579e13674947395ef8ccc1b3763f89012a216c2cc6277096489aadc396"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-app/xhost_1.0.9.bb b/meta/recipes-graphics/xorg-app/xhost_1.0.9.bb
new file mode 100644
index 0000000000..d54e6cae65
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xhost_1.0.9.bb
@@ -0,0 +1,21 @@
+require xorg-app-common.inc
+
+SUMMARY = "Server access control program for X"
+
+DESCRIPTION = "The xhost program is used to add and delete host names or \
+user names to the list allowed to make connections to the X server. In \
+the case of hosts, this provides a rudimentary form of privacy control \
+and security. Environments which require more sophisticated measures \
+should implement the user-based mechanism or use the hooks in the \
+protocol for passing other authentication data to the server."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=b1f81049109f21bb3c365d9f42f79f3b"
+DEPENDS += "libxmu libxau gettext-native"
+PE = "1"
+
+SRC_URI_EXT = "xz"
+
+SRC_URI[sha256sum] = "ea86b531462035b19a2e5e01ef3d9a35cca7d984086645e2fc844d8f0e346645"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-app/xinit_1.4.1.bb b/meta/recipes-graphics/xorg-app/xinit_1.4.1.bb
deleted file mode 100644
index e6ebf273e8..0000000000
--- a/meta/recipes-graphics/xorg-app/xinit_1.4.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "X Window System initializer"
-
-DESCRIPTION = "The xinit program is used to start the X Window System \
-server and a first client program on systems that cannot start X \
-directly from /etc/init or in environments that use multiple window \
-systems. When this first client exits, xinit will kill the X server and \
-then terminate."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=18f01e7b39807bebe2b8df101a039b68"
-
-PE = "1"
-
-SRC_URI += "file://0001-Make-manpage-multilib-identical.patch"
-
-SRC_URI[md5sum] = "6d506ab2efc17a08e87778654e099d37"
-SRC_URI[sha256sum] = "de9b8f617b68a70f6caf87da01fcf0ebd2b75690cdcba9c921d0ef54fa54abb9"
-
-EXTRA_OECONF = "ac_cv_path_MCOOKIE=${bindir}/mcookie"
-
-PACKAGECONFIG ??= "rxvt"
-PACKAGECONFIG[rxvt] = "--with-xterm=rxvt,,,rxvt-unicode"
-
-RDEPENDS:${PN} += "util-linux-mcookie"
diff --git a/meta/recipes-graphics/xorg-app/xinit_1.4.2.bb b/meta/recipes-graphics/xorg-app/xinit_1.4.2.bb
new file mode 100644
index 0000000000..82c0b0d833
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xinit_1.4.2.bb
@@ -0,0 +1,26 @@
+require xorg-app-common.inc
+
+SUMMARY = "X Window System initializer"
+
+DESCRIPTION = "The xinit program is used to start the X Window System \
+server and a first client program on systems that cannot start X \
+directly from /etc/init or in environments that use multiple window \
+systems. When this first client exits, xinit will kill the X server and \
+then terminate."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=18f01e7b39807bebe2b8df101a039b68"
+
+PE = "1"
+
+SRC_URI += "file://0001-Make-manpage-multilib-identical.patch"
+
+SRC_URI_EXT = "xz"
+
+SRC_URI[sha256sum] = "b7d8dc8d22ef9f15985a10b606ee4f2aad6828befa437359934647e88d331f23"
+
+EXTRA_OECONF = "ac_cv_path_MCOOKIE=${bindir}/mcookie"
+
+PACKAGECONFIG ??= "rxvt"
+PACKAGECONFIG[rxvt] = "--with-xterm=rxvt,,,rxvt-unicode"
+
+RDEPENDS:${PN} += "util-linux-mcookie"
diff --git a/meta/recipes-graphics/xorg-app/xinput_1.6.3.bb b/meta/recipes-graphics/xorg-app/xinput_1.6.3.bb
deleted file mode 100644
index ff1f1c9148..0000000000
--- a/meta/recipes-graphics/xorg-app/xinput_1.6.3.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Runtime configuration and test of XInput devices"
-
-DESCRIPTION = "Xinput is an utility for configuring and testing XInput devices"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=881525f89f99cad39c9832bcb72e6fa5"
-
-DEPENDS += " libxi libxrandr libxinerama"
-
-SRC_URI[md5sum] = "ac6b7432726008b2f50eba82b0e2dbe4"
-SRC_URI[sha256sum] = "35a281dd3b9b22ea85e39869bb7670ba78955d5fec17c6ef7165d61e5aeb66ed"
diff --git a/meta/recipes-graphics/xorg-app/xinput_1.6.4.bb b/meta/recipes-graphics/xorg-app/xinput_1.6.4.bb
new file mode 100644
index 0000000000..f5e3f5da12
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xinput_1.6.4.bb
@@ -0,0 +1,13 @@
+require xorg-app-common.inc
+
+SUMMARY = "Runtime configuration and test of XInput devices"
+
+DESCRIPTION = "Xinput is an utility for configuring and testing XInput devices"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=881525f89f99cad39c9832bcb72e6fa5"
+
+DEPENDS += " libxi libxrandr libxinerama"
+
+SRC_URI_EXT = "xz"
+SRC_URI[md5sum] = "8e4d14823b7cbefe1581c398c6ab0035"
+SRC_URI[sha256sum] = "ad04d00d656884d133110eeddc34e9c69e626ebebbbab04dc95791c2907057c8"
diff --git a/meta/recipes-graphics/xorg-app/xkbcomp_1.4.5.bb b/meta/recipes-graphics/xorg-app/xkbcomp_1.4.5.bb
deleted file mode 100644
index 657ec7580d..0000000000
--- a/meta/recipes-graphics/xorg-app/xkbcomp_1.4.5.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "A program to compile XKB keyboard description"
-
-DESCRIPTION = "The xkbcomp keymap compiler converts a description of an \
-XKB keymap into one of several output formats. The most common use for \
-xkbcomp is to create a compiled keymap file (.xkm extension) which can \
-be read directly by XKB-capable X servers or utilities."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=be5e191f04d3f2cd80aa83b928ba984c"
-
-DEPENDS += "libxkbfile"
-
-BBCLASSEXTEND = "native"
-
-EXTRA_OECONF += "--disable-selective-werror"
-
-SRC_URI[sha256sum] = "6851086c4244b6fd0cc562880d8ff193fb2bbf1e141c73632e10731b31d4b05e"
diff --git a/meta/recipes-graphics/xorg-app/xkbcomp_1.4.7.bb b/meta/recipes-graphics/xorg-app/xkbcomp_1.4.7.bb
new file mode 100644
index 0000000000..f259f33dd8
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xkbcomp_1.4.7.bb
@@ -0,0 +1,19 @@
+require xorg-app-common.inc
+
+SUMMARY = "A program to compile XKB keyboard description"
+
+DESCRIPTION = "The xkbcomp keymap compiler converts a description of an \
+XKB keymap into one of several output formats. The most common use for \
+xkbcomp is to create a compiled keymap file (.xkm extension) which can \
+be read directly by XKB-capable X servers or utilities."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=be5e191f04d3f2cd80aa83b928ba984c"
+
+DEPENDS += "libxkbfile"
+
+BBCLASSEXTEND = "native"
+
+EXTRA_OECONF += "--disable-selective-werror"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "0a288114e5f44e31987042c79aecff1ffad53a8154b8ec971c24a69a80f81f77"
diff --git a/meta/recipes-graphics/xorg-app/xmodmap_1.0.10.bb b/meta/recipes-graphics/xorg-app/xmodmap_1.0.10.bb
deleted file mode 100644
index 7dedb03a2b..0000000000
--- a/meta/recipes-graphics/xorg-app/xmodmap_1.0.10.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Utility for modifying keymaps and pointer button mappings in X"
-
-DESCRIPTION = "The xmodmap program is used to edit and display the \
-keyboard modifier map and keymap table that are used by client \
-applications to convert event keycodes into keysyms. It is usually run \
-from the user's session startup script to configure the keyboard \
-according to personal tastes."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=272c17e96370e1e74773fa22d9989621"
-
-PE = "1"
-SRC_URI[md5sum] = "51f1d30a525e9903280ffeea2744b1f6"
-SRC_URI[sha256sum] = "473f0941d7439d501bb895ff358832b936ec34c749b9704c37a15e11c318487c"
diff --git a/meta/recipes-graphics/xorg-app/xmodmap_1.0.11.bb b/meta/recipes-graphics/xorg-app/xmodmap_1.0.11.bb
new file mode 100644
index 0000000000..dc955aa977
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xmodmap_1.0.11.bb
@@ -0,0 +1,17 @@
+require xorg-app-common.inc
+
+SUMMARY = "Utility for modifying keymaps and pointer button mappings in X"
+
+DESCRIPTION = "The xmodmap program is used to edit and display the \
+keyboard modifier map and keymap table that are used by client \
+applications to convert event keycodes into keysyms. It is usually run \
+from the user's session startup script to configure the keyboard \
+according to personal tastes."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=272c17e96370e1e74773fa22d9989621"
+
+PE = "1"
+SRC_URI[sha256sum] = "9a2f8168f7b0bc382828847403902cb6bf175e17658b36189eac87edda877e81"
+
+SRC_URI_EXT = "xz"
diff --git a/meta/recipes-graphics/xorg-app/xprop_1.2.5.bb b/meta/recipes-graphics/xorg-app/xprop_1.2.5.bb
deleted file mode 100644
index b1589a8d75..0000000000
--- a/meta/recipes-graphics/xorg-app/xprop_1.2.5.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Utility to display window and font properties of an X server"
-
-DESCRIPTION = "The xprop utility is for displaying window and font \
-properties in an X server. One window or font is selected using the \
-command line arguments or possibly in the case of a window, by clicking \
-on the desired window. A list of properties is then given, possibly with \
-formatting information."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=e226ab8db88ac0bc0391673be40c9f91"
-
-DEPENDS += "libxmu"
-
-PE = "1"
-
-SRC_URI[sha256sum] = "9b92ed0316bf2486121d8bac88bd1878f16b43bd335f18009b1f941f1eca93a1"
diff --git a/meta/recipes-graphics/xorg-app/xprop_1.2.7.bb b/meta/recipes-graphics/xorg-app/xprop_1.2.7.bb
new file mode 100644
index 0000000000..96e364b2b3
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xprop_1.2.7.bb
@@ -0,0 +1,18 @@
+require xorg-app-common.inc
+
+SUMMARY = "Utility to display window and font properties of an X server"
+
+DESCRIPTION = "The xprop utility is for displaying window and font \
+properties in an X server. One window or font is selected using the \
+command line arguments or possibly in the case of a window, by clicking \
+on the desired window. A list of properties is then given, possibly with \
+formatting information."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=e226ab8db88ac0bc0391673be40c9f91"
+
+DEPENDS += "libxmu"
+
+PE = "1"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "4436e3148bb91a162406230d9f736a49ca8b50b74790015dc15d78d6ce8e825f"
diff --git a/meta/recipes-graphics/xorg-app/xrandr_1.5.1.bb b/meta/recipes-graphics/xorg-app/xrandr_1.5.1.bb
deleted file mode 100644
index 0e0347f768..0000000000
--- a/meta/recipes-graphics/xorg-app/xrandr_1.5.1.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "XRandR: X Resize, Rotate and Reflect extension command"
-
-DESCRIPTION = "Xrandr is used to set the size, orientation and/or \
-reflection of the outputs for a screen. It can also set the screen \
-size."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fe1608bdb33cf8c62a4438f7d34679b3"
-DEPENDS += "libxrandr libxrender"
-PE = "1"
-
-SRC_URI_EXT = "xz"
-SRC_URI[md5sum] = "fe40f7a4fd39dd3a02248d3e0b1972e4"
-SRC_URI[sha256sum] = "7bc76daf9d72f8aff885efad04ce06b90488a1a169d118dea8a2b661832e8762"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-app/xrandr_1.5.2.bb b/meta/recipes-graphics/xorg-app/xrandr_1.5.2.bb
new file mode 100644
index 0000000000..1e29031849
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xrandr_1.5.2.bb
@@ -0,0 +1,17 @@
+require xorg-app-common.inc
+
+SUMMARY = "XRandR: X Resize, Rotate and Reflect extension command"
+
+DESCRIPTION = "Xrandr is used to set the size, orientation and/or \
+reflection of the outputs for a screen. It can also set the screen \
+size."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fe1608bdb33cf8c62a4438f7d34679b3"
+DEPENDS += "libxrandr libxrender"
+PE = "1"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "c8bee4790d9058bacc4b6246456c58021db58a87ddda1a9d0139bf5f18f1f240"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-app/xset/disable-xkb.patch b/meta/recipes-graphics/xorg-app/xset/disable-xkb.patch
index a97c7b72fa..0bb05a3b88 100644
--- a/meta/recipes-graphics/xorg-app/xset/disable-xkb.patch
+++ b/meta/recipes-graphics/xorg-app/xset/disable-xkb.patch
@@ -1,14 +1,21 @@
-add "-disable-xkb" option
+From 3090dcafac7c8080091b5e3e3720209c65251a1a Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard@openedhand.com>
+Date: Mon, 22 May 2006 22:15:57 +0000
+Subject: [PATCH] add "-disable-xkb" option
Upstream-Status: Pending
Rebase for 1.2.1 by: Yu Ke <ke.yu@intel.com>
-Index: xset-1.2.3/configure.ac
-===================================================================
---- xset-1.2.3.orig/configure.ac
-+++ xset-1.2.3/configure.ac
-@@ -53,11 +53,15 @@ PKG_CHECK_MODULES(SET_XEXT, xext,
+---
+ configure.ac | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/configure.ac b/configure.ac
+index e7c2bdf..8e1ed5e 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -52,11 +52,15 @@ PKG_CHECK_MODULES(SET_XEXT, xext,
AC_CHECK_HEADERS([X11/extensions/dpms.h X11/extensions/MITMisc.h],,,[#include <X11/Xlib.h>])
CPPFLAGS="$SAVE_CPPFLAGS"],[echo "not found"])
@@ -22,5 +29,5 @@ Index: xset-1.2.3/configure.ac
CPPFLAGS="$SAVE_CPPFLAGS"],[echo "not found"])
+fi
- AC_ARG_WITH(xf86misc, AS_HELP_STRING([--without-xf86misc],[Disable xf86misc support.]),
- [USE_XF86MISC="$withval"], [USE_XF86MISC="yes"])
+ AC_ARG_WITH(xf86misc, AS_HELP_STRING([--with-xf86misc],[Enable xf86misc support.]),
+ [USE_XF86MISC="$withval"], [USE_XF86MISC="no"])
diff --git a/meta/recipes-graphics/xorg-app/xset_1.2.4.bb b/meta/recipes-graphics/xorg-app/xset_1.2.4.bb
deleted file mode 100644
index e21120e788..0000000000
--- a/meta/recipes-graphics/xorg-app/xset_1.2.4.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Utility for setting various user preference options of the display"
-
-DESCRIPTION = "xset is a utility that is used to set various user \
-preference options of the display."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=bea81cc9827cdf1af0e12c2b8228cf8d"
-DEPENDS += "libxext libxmu libxau"
-PE = "1"
-
-SRC_URI += "file://disable-xkb.patch"
-
-SRC_URI[md5sum] = "70ea7bc7bacf1a124b1692605883f620"
-SRC_URI[sha256sum] = "e4fd95280df52a88e9b0abc1fee11dcf0f34fc24041b9f45a247e52df941c957"
-
-CFLAGS += "-D_GNU_SOURCE"
-EXTRA_OECONF = "--disable-xkb --without-fontcache"
diff --git a/meta/recipes-graphics/xorg-app/xset_1.2.5.bb b/meta/recipes-graphics/xorg-app/xset_1.2.5.bb
new file mode 100644
index 0000000000..398530f342
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xset_1.2.5.bb
@@ -0,0 +1,20 @@
+require xorg-app-common.inc
+
+SUMMARY = "Utility for setting various user preference options of the display"
+
+DESCRIPTION = "xset is a utility that is used to set various user \
+preference options of the display."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=bea81cc9827cdf1af0e12c2b8228cf8d"
+DEPENDS += "libxext libxmu libxau"
+PE = "1"
+
+SRC_URI += "file://disable-xkb.patch"
+
+SRC_URI_EXT = "xz"
+
+SRC_URI[sha256sum] = "9f692d55635b3862cd63633b1222a87680ec283c7a8e8ed6dd698a3147f75e2f"
+
+CFLAGS += "-D_GNU_SOURCE"
+EXTRA_OECONF = "--disable-xkb --without-fontcache"
diff --git a/meta/recipes-graphics/xorg-app/xvinfo_1.1.4.bb b/meta/recipes-graphics/xorg-app/xvinfo_1.1.4.bb
deleted file mode 100644
index a913ae11f3..0000000000
--- a/meta/recipes-graphics/xorg-app/xvinfo_1.1.4.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Print out X-Video extension adaptor information"
-
-DESCRIPTION = "xvinfo prints out the capabilities of any video adaptors \
-associated with the display that are accessible through the X-Video \
-extension."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=b664101ad7a1dc758a4c4109bf978e68"
-DEPENDS += " libxv"
-PE = "1"
-
-SRC_URI[md5sum] = "b13afec137b9b331814a9824ab03ec80"
-SRC_URI[sha256sum] = "0353220d6606077ba42363db65f50410759f9815352f77adc799e2adfa76e73f"
diff --git a/meta/recipes-graphics/xorg-app/xvinfo_1.1.5.bb b/meta/recipes-graphics/xorg-app/xvinfo_1.1.5.bb
new file mode 100644
index 0000000000..d7e5d917ef
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xvinfo_1.1.5.bb
@@ -0,0 +1,14 @@
+require xorg-app-common.inc
+
+SUMMARY = "Print out X-Video extension adaptor information"
+
+DESCRIPTION = "xvinfo prints out the capabilities of any video adaptors \
+associated with the display that are accessible through the X-Video \
+extension."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=b664101ad7a1dc758a4c4109bf978e68"
+DEPENDS += " libxv"
+PE = "1"
+
+SRC_URI_EXT = "xz"
+SRC_URI[sha256sum] = "3ede71ecb26d9614ccbc6916720285e95a2c7e0c5e19b8570eaaf72ad7c5c404"
diff --git a/meta/recipes-graphics/xorg-app/xwininfo_1.1.5.bb b/meta/recipes-graphics/xorg-app/xwininfo_1.1.5.bb
deleted file mode 100644
index ec9f08268e..0000000000
--- a/meta/recipes-graphics/xorg-app/xwininfo_1.1.5.bb
+++ /dev/null
@@ -1,15 +0,0 @@
-require xorg-app-common.inc
-
-SUMMARY = "Window information utility for X"
-
-DESCRIPTION = "Xwininfo is a utility for displaying information about \
-windows. Information may include window position, size, color depth, \
-and a number of other items."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=78976cd3115f6faf615accc4e094d90e"
-DEPENDS += "libxext libxmu gettext-native"
-
-PE = "0"
-
-SRC_URI[md5sum] = "26d46f7ef0588d3392da3ad5802be420"
-SRC_URI[sha256sum] = "7a405441dfc476666c744f5fcd1bc8a75abf8b5b1d85db7b88b370982365080e"
diff --git a/meta/recipes-graphics/xorg-app/xwininfo_1.1.6.bb b/meta/recipes-graphics/xorg-app/xwininfo_1.1.6.bb
new file mode 100644
index 0000000000..39ec76724c
--- /dev/null
+++ b/meta/recipes-graphics/xorg-app/xwininfo_1.1.6.bb
@@ -0,0 +1,16 @@
+require xorg-app-common.inc
+
+SUMMARY = "Window information utility for X"
+
+DESCRIPTION = "Xwininfo is a utility for displaying information about \
+windows. Information may include window position, size, color depth, \
+and a number of other items."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=a1b9559d7b7997a6e9588012ebf8769a"
+DEPENDS += "libxext libxmu gettext-native"
+
+PE = "0"
+
+SRC_URI_EXT = "xz"
+SRC_URI[md5sum] = "c91201bc1eb5e7b38933be8d0f7f16a8"
+SRC_URI[sha256sum] = "3518897c17448df9ba99ad6d9bb1ca0f17bc0ed7c0fd61281b34ceed29a9253f"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb
deleted file mode 100644
index 1896fff9f9..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require xorg-driver-input.inc
-
-SUMMARY = "X.Org X server -- keyboard input driver"
-
-DESCRIPTION = "keyboard is an Xorg input driver for keyboards. The \
-driver supports the standard OS-provided keyboard interface. The driver \
-functions as a keyboard input device, and may be used as the X server's \
-core keyboard."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=ea2099d24ac9e316a6d4b9f20b3d4e10"
-
-SRC_URI[md5sum] = "a3a3f0dd32361dcdbd406e894dafe090"
-SRC_URI[sha256sum] = "f7c900f21752683402992b288d5a2826de7a6c0c0abac2aadd7e8a409e170388"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.2.1.bb b/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.2.1.bb
deleted file mode 100644
index e1c47aa5e7..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.2.1.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-require xorg-driver-input.inc
-
-SUMMARY = "Generic input driver for the X.Org server based on libinput"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a22925127bd3c827c384cedd23ed2309"
-
-DEPENDS += "libinput"
-
-XORG_DRIVER_COMPRESSOR = ".tar.xz"
-SRC_URI[sha256sum] = "8151db5b9ddb317c0ce92dcb62da9a8db5079e5b8a95b60abc854da21e7e971b"
-
-FILES:${PN} += "${datadir}/X11/xorg.conf.d"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.4.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.4.0.bb
new file mode 100644
index 0000000000..92272c3f2a
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.4.0.bb
@@ -0,0 +1,12 @@
+require xorg-driver-input.inc
+
+SUMMARY = "Generic input driver for the X.Org server based on libinput"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a22925127bd3c827c384cedd23ed2309"
+
+DEPENDS += "libinput"
+
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
+SRC_URI[sha256sum] = "3a3d14cd895dc75b59ae2783b888031956a0bac7a1eff16d240dbb9d5df3e398"
+
+FILES:${PN} += "${datadir}/X11/xorg.conf.d"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.3.bb b/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.3.bb
deleted file mode 100644
index c38dab505b..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.3.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-require xorg-driver-input.inc
-
-SUMMARY = "X.Org X server -- mouse input driver"
-
-DESCRIPTION = "mouse is an Xorg input driver for mice. The driver \
-supports most available mouse types and interfaces. The mouse driver \
-functions as a pointer input device, and may be used as the X server's \
-core pointer. Multiple mice are supported by multiple instances of this \
-driver."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=90ea9f90d72b6d9327dede5ffdb2a510"
-
-SRC_URI[md5sum] = "a2104693bbcfe1413397f7905eecd3dc"
-SRC_URI[sha256sum] = "93ecb350604d05be98b7d4e5db3b8155a44890069a7d8d6b800c0bea79c85cc5"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.5.bb b/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.5.bb
new file mode 100644
index 0000000000..92e54d9701
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.5.bb
@@ -0,0 +1,14 @@
+require xorg-driver-input.inc
+
+SUMMARY = "X.Org X server -- mouse input driver"
+
+DESCRIPTION = "mouse is an Xorg input driver for mice. The driver \
+supports most available mouse types and interfaces. The mouse driver \
+functions as a pointer input device, and may be used as the X server's \
+core pointer. Multiple mice are supported by multiple instances of this \
+driver."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d213a69053dffe9bcab94abf60013d33"
+
+SRC_URI[sha256sum] = "4fde8ae9b44352e2a208584c36528ee3ed13cf5fe4417208a9785daccefd9968"
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/64bit_time_t_support.patch b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/64bit_time_t_support.patch
deleted file mode 100644
index 4bb7fb3e23..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/64bit_time_t_support.patch
+++ /dev/null
@@ -1,51 +0,0 @@
-This patch avoids using time field of input_event structure which is not available
-on 32bit arches supporting 64bit time_t structs, Patch makes it compatible with new
-and keeps old input.h implementation functional as well.
-
-See https://sourceware.org/glibc/wiki/Y2038ProofnessDesign
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---- a/src/eventcomm.c
-+++ b/src/eventcomm.c
-@@ -575,10 +575,12 @@ SynapticsReadEvent(InputInfoPtr pInfo, s
- ev->type = EV_SYN;
- ev->code = SYN_REPORT;
- ev->value = 0;
-- ev->time = last_event_time;
-- } else if (ev->type == EV_SYN)
-- last_event_time = ev->time;
--
-+ ev->input_event_sec = last_event_time.tv_sec;
-+ ev->input_event_usec = last_event_time.tv_usec;
-+ } else if (ev->type == EV_SYN) {
-+ last_event_time.tv_sec = ev->input_event_sec;
-+ last_event_time.tv_usec = ev->input_event_usec;
-+ }
- return TRUE;
- }
-
-@@ -725,7 +727,7 @@ EventReadHwState(InputInfoPtr pInfo,
- case SYN_REPORT:
- hw->numFingers = count_fingers(pInfo, comm);
- if (proto_data->have_monotonic_clock)
-- hw->millis = 1000 * ev.time.tv_sec + ev.time.tv_usec / 1000;
-+ hw->millis = 1000 * ev.input_event_sec + ev.input_event_usec / 1000;
- else
- hw->millis = GetTimeInMillis();
- SynapticsCopyHwState(hwRet, hw);
---- a/src/eventcomm.h
-+++ b/src/eventcomm.h
-@@ -34,6 +34,11 @@
- #include <xf86Xinput.h>
- #include "synproto.h"
-
-+#ifndef input_event_sec
-+#define input_event_sec time.tv_sec
-+#define input_event_usec time.tv_usec
-+#endif
-+
- /* for auto-dev: */
- #define DEV_INPUT_EVENT "/dev/input"
- #define EVENT_DEV_NAME "event"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.1.bb b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.1.bb
deleted file mode 100644
index 388350c96e..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.1.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-require xorg-driver-input.inc
-
-SUMMARY = "X.Org X server -- synaptics touchpad input driver"
-
-DESCRIPTION = "synaptics is an Xorg input driver for the touchpads from \
-Synaptics Incorporated. Even though these touchpads (by default, \
-operating in a compatibility mode emulating a standard mouse) can be \
-handled by the normal evdev or mouse drivers, this driver allows more \
-advanced features of the touchpad to become available."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=55aacd3535a741824955c5eb8f061398"
-
-SRC_URI += "file://64bit_time_t_support.patch"
-
-SRC_URI[md5sum] = "cfb79d3c975151f9bbf30b727c260cb9"
-SRC_URI[sha256sum] = "7af83526eff1c76e8b9e1553b34245c203d029028d8044dd9dcf71eef1001576"
-
-DEPENDS += "libxi mtdev libxtst libevdev"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.2.bb b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.2.bb
new file mode 100644
index 0000000000..8e446290b2
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.2.bb
@@ -0,0 +1,17 @@
+require xorg-driver-input.inc
+
+SUMMARY = "X.Org X server -- synaptics touchpad input driver"
+
+DESCRIPTION = "synaptics is an Xorg input driver for the touchpads from \
+Synaptics Incorporated. Even though these touchpads (by default, \
+operating in a compatibility mode emulating a standard mouse) can be \
+handled by the normal evdev or mouse drivers, this driver allows more \
+advanced features of the touchpad to become available."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=55aacd3535a741824955c5eb8f061398"
+
+SRC_URI[sha256sum] = "b8fa4aab913fc63754bbd6439e020658c412743a055201ddf212760593962c38"
+
+DEPENDS += "libxi mtdev libxtst libevdev"
+
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.1.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.1.0.bb
deleted file mode 100644
index 5e740037d9..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.1.0.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-require xorg-driver-input.inc
-
-SUMMARY = "X.Org X server -- VMWare mouse input driver"
-DESCRIPTION = "The vmmouse driver enables support for the special VMMouse \
-protocol that is provided by VMware virtual machines to give absolute \
-pointer positioning. The vmmouse driver is capable of falling back to the \
-standard 'mouse' driver if a VMware virtual machine is not detected."
-
-SRC_URI[md5sum] = "85e2e464b7219c495ad3a16465c226ed"
-SRC_URI[sha256sum] = "0af558957ac1be1b2863712c2475de8f4d7f14921fd01ded2e2fde4921b19319"
-
-RDEPENDS:${PN} += "xf86-input-mouse"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=622841c068a9d7625fbfe7acffb1a8fc"
-
-COMPATIBLE_HOST = '(i.86|x86_64).*-linux'
-
-do_install:append () {
- # We don't care about hal
- rm -rf ${D}${datadir}/hal/
- rm -rf ${D}${libdir}/hal/
-}
-
-EXTRA_OECONF = "--with-udev-rules-dir=${nonarch_base_libdir}/udev/rules.d"
-
-FILES:${PN} += "${datadir}/X11/xorg.conf.d"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.2.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.2.0.bb
new file mode 100644
index 0000000000..7567b1ef2b
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.2.0.bb
@@ -0,0 +1,27 @@
+require xorg-driver-input.inc
+
+SUMMARY = "X.Org X server -- VMWare mouse input driver"
+DESCRIPTION = "The vmmouse driver enables support for the special VMMouse \
+protocol that is provided by VMware virtual machines to give absolute \
+pointer positioning. The vmmouse driver is capable of falling back to the \
+standard 'mouse' driver if a VMware virtual machine is not detected."
+
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
+
+SRC_URI[sha256sum] = "56f077580ab8f02e2f40358c5c46b0ae3e1828fc77744526b24adf1ceea339b8"
+
+RDEPENDS:${PN} += "xf86-input-mouse"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=622841c068a9d7625fbfe7acffb1a8fc"
+
+COMPATIBLE_HOST = '(i.86|x86_64).*-linux'
+
+do_install:append () {
+ # We don't care about hal
+ rm -rf ${D}${datadir}/hal/
+ rm -rf ${D}${libdir}/hal/
+}
+
+EXTRA_OECONF = "--with-udev-rules-dir=${nonarch_base_libdir}/udev/rules.d"
+
+FILES:${PN} += "${datadir}/X11/xorg.conf.d"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.3.bb b/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.3.bb
deleted file mode 100644
index c498477604..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.3.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require xorg-driver-video.inc
-
-SUMMARY = "X.Org X server -- cirrus display driver"
-DESCRIPTION = "cirrus is an Xorg driver for Cirrus Logic VGA adapters. These \
-devices are not so common in the wild anymore, but QEMU can emulate one, so \
-the driver is still useful."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6ddc7ca860dc5fd014e7f160ea699295"
-
-SRC_URI[md5sum] = "7d7dfd4cdc42aa8b6e923510fa00ad2b"
-SRC_URI[sha256sum] = "edc87b20a55259126b5239b5c1ef913419eab7ded0ed12ae9ae989460d7351ab"
-
-DEPENDS += "libpciaccess"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.6.0.bb b/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.6.0.bb
new file mode 100644
index 0000000000..32eb8a4816
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.6.0.bb
@@ -0,0 +1,14 @@
+require xorg-driver-video.inc
+
+SUMMARY = "X.Org X server -- cirrus display driver"
+DESCRIPTION = "cirrus is an Xorg driver for Cirrus Logic VGA adapters. These \
+devices are not so common in the wild anymore, but QEMU can emulate one, so \
+the driver is still useful."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6ddc7ca860dc5fd014e7f160ea699295"
+
+SRC_URI[sha256sum] = "0ef3fa0083de3d9a040f11b3af38598d8405f1165b908fdd6712c30038326401"
+
+DEPENDS += "libpciaccess"
+
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-intel_git.bb b/meta/recipes-graphics/xorg-driver/xf86-video-intel_git.bb
index 74ec57aa66..2e526bd799 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-video-intel_git.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-intel_git.bb
@@ -10,11 +10,10 @@ Infrastructure (DRI)."
LIC_FILES_CHKSUM = "file://COPYING;md5=8730ad58d11c7bbad9a7066d69f7808e"
SRCREV = "31486f40f8e8f8923ca0799aea84b58799754564"
-PV = "2.99.917+git${SRCPV}"
+PV = "2.99.917+git"
S = "${WORKDIR}/git"
-SRC_URI = "git://anongit.freedesktop.org/xorg/driver/xf86-video-intel;branch=master \
-"
+SRC_URI = "git://anongit.freedesktop.org/git/xorg/driver/xf86-video-intel;protocol=https;branch=master"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.5.0.bb b/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.5.0.bb
deleted file mode 100644
index 05bb905b35..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.5.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-require xorg-driver-video.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=a1f0610ebdc6f314a9fa5102a8c5c1b0"
-
-SUMMARY = "X.Org X server -- Generic Vesa video driver"
-
-DESCRIPTION = "vesa is an Xorg driver for generic VESA video cards. It \
-can drive most VESA-compatible video cards, but only makes use of the \
-basic standard VESA core that is common to these cards. The driver \
-supports depths 8, 15 16 and 24."
-
-DEPENDS += "virtual/libx11 xorgproto libpciaccess"
-
-COMPATIBLE_HOST = '(i.86|x86_64).*-linux'
-
-RRECOMMENDS:${PN} += "xserver-xorg-module-libint10"
-
-SRC_URI[sha256sum] = "1f1624f3c73906801ad1bc98335a2cb5676a7a4d18e5374d9a1d18464e54c659"
-
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.6.0.bb b/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.6.0.bb
new file mode 100644
index 0000000000..33c2035f6b
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-vesa_2.6.0.bb
@@ -0,0 +1,20 @@
+require xorg-driver-video.inc
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=a1f0610ebdc6f314a9fa5102a8c5c1b0"
+
+SUMMARY = "X.Org X server -- Generic Vesa video driver"
+
+DESCRIPTION = "vesa is an Xorg driver for generic VESA video cards. It \
+can drive most VESA-compatible video cards, but only makes use of the \
+basic standard VESA core that is common to these cards. The driver \
+supports depths 8, 15 16 and 24."
+
+DEPENDS += "virtual/libx11 xorgproto libpciaccess"
+
+COMPATIBLE_HOST = '(i.86|x86_64).*-linux'
+
+RRECOMMENDS:${PN} += "xserver-xorg-module-libint10"
+
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
+SRC_URI[sha256sum] = "10082771b1a8f659c60b3d9b70456a4b59948a75b3cade2dfa33b8b05e549bb1"
+
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-vmware/0002-add-option-for-vmwgfx.patch b/meta/recipes-graphics/xorg-driver/xf86-video-vmware/0002-add-option-for-vmwgfx.patch
deleted file mode 100644
index b37f697aa4..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-video-vmware/0002-add-option-for-vmwgfx.patch
+++ /dev/null
@@ -1,103 +0,0 @@
-From da6612659a279bcb02f70622b53a3894fbc11052 Mon Sep 17 00:00:00 2001
-From: Jussi Kukkonen <jussi.kukkonen@intel.com>
-Date: Fri, 9 Dec 2016 14:35:45 +0200
-Subject: [PATCH] Add option for vmwgfx
-
-Upstream-Status: Submitted
-
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
----
- configure.ac | 51 +++++++++++++++++++++------------------------------
- 1 file changed, 21 insertions(+), 30 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 9f259ce..f3fec57 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -64,6 +64,13 @@ AC_ARG_ENABLE(vmwarectrl-client,
- [VMWARECTRL=$enableval], [VMWARECTRL=no])
- AM_CONDITIONAL(BUILD_VMWARECTRL, [test "x$VMWARECTRL" = xyes])
-
-+# Define a configure option to build the vmwgfx driver
-+AC_ARG_ENABLE(vmwgfx,
-+ AS_HELP_STRING([--disable-vmwgfx],
-+ [Disable vmwgfx driver (KMS/3D) (default: enabled)]),
-+ [VMWGFX="$enableval"], [VMWGFX="yes"])
-+AM_CONDITIONAL(BUILD_VMWGFX, test "x$VMWGFX" = xyes)
-+
- # Store the list of server defined optional extensions in REQUIRED_MODULES
- XORG_DRIVER_CHECK_EXT(RANDR, randrproto)
- XORG_DRIVER_CHECK_EXT(RENDER, renderproto)
-@@ -85,11 +92,6 @@ PKG_CHECK_EXISTS([xorg-server >= 1.4.99],
- [AC_DEFINE([HAVE_XORG_SERVER_1_5_0], 1,
- [Has version 1.5.0 or greater of the Xserver])])
-
--PKG_CHECK_EXISTS([xorg-server >= 1.7.0],
-- [AC_DEFINE([HAVE_XORG_SERVER_1_7_0], 1,
-- [Has version 1.7.0 or greater of the Xserver])
-- BUILD_VMWGFX=yes],[BUILD_VMWGFX=no])
--
- PKG_CHECK_EXISTS([xorg-server >= 1.12.0],
- [AC_DEFINE([HAVE_XORG_SERVER_1_12_0], 1,
- [Has version 1.12.0 or greater of the Xserver])])
-@@ -114,34 +116,22 @@ AM_CONDITIONAL(XSERVER_LIBPCIACCESS, test "x$XSERVER_LIBPCIACCESS" = xyes)
-
- AC_SUBST([moduledir])
-
--if test x$BUILD_VMWGFX = xyes; then
-- PKG_CHECK_MODULES([LIBDRM], [libdrm],[],[BUILD_VMWGFX=no])
--fi
--if test x$BUILD_VMWGFX = xyes; then
--#
--# Early versions of mesa 10 forgot to bump the XA major version number in
--# the xa_tracker.h header
--#
-- PKG_CHECK_MODULES([XATRACKER], [xatracker >= 0.4.0],
-- [PKG_CHECK_EXISTS([xatracker = 2.0.0],
-- [AC_DEFINE([HAVE_XA_2], 1,
-- [Has version 2 of XA])])],
-- [BUILD_VMWGFX=no])
--#
--# Check for prime.
--#
-+AC_MSG_CHECKING([whether to build Kernel Mode Setting and 3D])
-+if test x$VMWGFX = xyes; then
-+ AC_MSG_RESULT([yes])
-+ PKG_CHECK_MODULES([LIBDRM], [libdrm])
-+ # Check for prime.
- PKG_CHECK_EXISTS([libdrm >= 2.4.38],
- [AC_DEFINE([HAVE_LIBDRM_2_4_38], 1,
- [Has version 2.4.38 or greater of libdrm])])
--fi
-
--DRIVER_NAME=vmware
--AC_SUBST([DRIVER_NAME])
--
--AC_MSG_CHECKING([whether to build Kernel Mode Setting and 3D])
--if test x$BUILD_VMWGFX = xyes; then
-- AC_MSG_RESULT([yes])
-- AC_DEFINE([BUILD_VMWGFX], 1, [Building the vmwgfx driver path])
-+ # Early versions of mesa 10 forgot to bump the XA major version number
-+ # in the xa_tracker.h header
-+ PKG_CHECK_MODULES([XATRACKER], [xatracker >= 0.4.0],
-+ [PKG_CHECK_EXISTS([xatracker = 2.0.0],
-+ [AC_DEFINE([HAVE_XA_2], 1,
-+ [Has version 2 of XA])])])
-+ AC_DEFINE([BUILD_VMWGFX], 1, [Building the vmwgfx driver path])
- libudev_check=yes
- AC_ARG_WITH([libudev],
- [AS_HELP_STRING([--without-libudev],
-@@ -158,7 +148,8 @@ else
- AC_MSG_RESULT([no])
- fi
-
--AM_CONDITIONAL(BUILD_VMWGFX, test "x$BUILD_VMWGFX" = xyes)
-+DRIVER_NAME=vmware
-+AC_SUBST([DRIVER_NAME])
-
- AC_CONFIG_FILES([
- Makefile
---
-2.1.4
-
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.3.0.bb b/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.3.0.bb
deleted file mode 100644
index 41612b7f07..0000000000
--- a/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.3.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-require xorg-driver-video.inc
-
-SUMMARY = "X.Org X server -- VMware SVGA display driver"
-
-DESCRIPTION = "vmware is an Xorg driver for VMware virtual video cards."
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=5fcd7d437a959a15fbee8707747c6b53"
-
-DEPENDS += "virtual/libx11 xorgproto libpciaccess"
-
-SRC_URI += "file://0002-add-option-for-vmwgfx.patch"
-
-SRC_URI[md5sum] = "08d66d062055080ff699ab4869726ea2"
-SRC_URI[sha256sum] = "47971924659e51666a757269ad941a059ef5afe7a47b5101c174a6022ac4066c"
-
-COMPATIBLE_HOST = '(i.86.*-linux|x86_64.*-linux)'
-
-PACKAGECONFIG ?= ""
-PACKAGECONFIG[vmwgfx] = "--enable-vmwgfx, --disable-vmwgfx, libdrm virtual/mesa"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.4.0.bb b/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.4.0.bb
new file mode 100644
index 0000000000..d7ba2b9e86
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.4.0.bb
@@ -0,0 +1,14 @@
+require xorg-driver-video.inc
+
+SUMMARY = "X.Org X server -- VMware SVGA display driver"
+
+DESCRIPTION = "vmware is an Xorg driver for VMware virtual video cards."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=5fcd7d437a959a15fbee8707747c6b53"
+
+DEPENDS += "virtual/libx11 xorgproto libpciaccess"
+XORG_DRIVER_COMPRESSOR = ".tar.xz"
+
+SRC_URI[sha256sum] = "aed31ee5ed5ecc6e2226705383e7ad06f7602c1376a295305f376b17af3eb81a"
+
+COMPATIBLE_HOST = '(i.86.*-linux|x86_64.*-linux)'
diff --git a/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc b/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
index 8b3f19426b..c61cdd05b3 100644
--- a/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
+++ b/meta/recipes-graphics/xorg-driver/xorg-driver-common.inc
@@ -13,8 +13,9 @@ SRC_URI = "${XORG_MIRROR}/individual/driver/${BPN}-${PV}${XORG_DRIVER_COMPRESSOR
FILES:${PN} += " ${libdir}/xorg/modules/drivers/*.so"
+inherit pkgconfig features_check
XORGBUILDCLASS ??= "autotools"
-inherit ${XORGBUILDCLASS} pkgconfig features_check
+inherit_defer ${XORGBUILDCLASS}
# depends on virtual/xserver
REQUIRED_DISTRO_FEATURES = "x11"
diff --git a/meta/recipes-graphics/xorg-font/encodings/nocompiler.patch b/meta/recipes-graphics/xorg-font/encodings/nocompiler.patch
deleted file mode 100644
index ec7c7d80c1..0000000000
--- a/meta/recipes-graphics/xorg-font/encodings/nocompiler.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From b08c43a0842076e0a94e88ad6456a9326cd7ffc9 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Tue, 17 May 2011 23:03:02 +0000
-Subject: [PATCH] Improve handling of 'all' architecture recipes and their
-
-Upstream-Status: Inappropriate [configuration]
-
-XORG_DEFAULT_OPTIONS pulls in the following dependency chains:
-
-XORG_CWARNFLAGS -> AC_PROG_CC_C99
-XORG_STRICT_OPTION -> AC_PROG_CC_C99, XORG_CWARNFLAGS
-XORG_MANPAGE_SECTIONS -> AC_CANONICAL_HOST -> Checks host
-
-each of which triggers the use of the host compiler. As an "all"
-architecture package, it shouldn't need a compiler (and doesn't).
-
-RP 17/5/2011
-
----
- configure.ac | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 622c27b..5ee84ed 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -3,12 +3,12 @@ AC_INIT([encodings], [1.0.5],
- [https://gitlab.freedesktop.org/xorg/font/encodings/issues])
- AM_INIT_AUTOMAKE([foreign dist-bzip2])
-
--# Require xorg-macros: XORG_DEFAULT_OPTIONS
- m4_ifndef([XORG_MACROS_VERSION],
- [m4_fatal([must install xorg-macros 1.3 or later before running autoconf/autogen])])
- XORG_MACROS_VERSION(1.3)
--XORG_DEFAULT_OPTIONS
--
-+XORG_RELEASE_VERSION
-+XORG_CHANGELOG
-+XORG_INSTALL
- AC_PROG_INSTALL
-
- # Require X.Org's font util macros 1.2 or later
diff --git a/meta/recipes-graphics/xorg-font/encodings_1.0.5.bb b/meta/recipes-graphics/xorg-font/encodings_1.0.5.bb
deleted file mode 100644
index 8ddbaf24dd..0000000000
--- a/meta/recipes-graphics/xorg-font/encodings_1.0.5.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "The Xorg font encoding files"
-
-DESCRIPTION = "The encodings that map to specific characters for a \
-number of Xorg and common fonts."
-
-require xorg-font-common.inc
-LICENSE = "PD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=9da93f2daf2d5572faa2bfaf0dbd9e76"
-PE = "1"
-PR = "r3"
-
-DEPENDS = "mkfontscale-native mkfontdir-native font-util-native"
-RDEPENDS:${PN} = ""
-
-SRC_URI += "file://nocompiler.patch"
-SRC_URI[md5sum] = "bbae4f247b88ccde0e85ed6a403da22a"
-SRC_URI[sha256sum] = "bd96e16143a044b19e87f217cf6a3763a70c561d1076aad6f6d862ec41774a31"
-
-inherit allarch
-
-EXTRA_OECONF += "--with-encodingsdir=${datadir}/fonts/X11/encodings"
-
-# postinst from .inc doesn't apply to this recipe
-pkg_postinst:${PN} () {
-}
diff --git a/meta/recipes-graphics/xorg-font/encodings_1.1.0.bb b/meta/recipes-graphics/xorg-font/encodings_1.1.0.bb
new file mode 100644
index 0000000000..7432c08ec0
--- /dev/null
+++ b/meta/recipes-graphics/xorg-font/encodings_1.1.0.bb
@@ -0,0 +1,24 @@
+SUMMARY = "The Xorg font encoding files"
+
+DESCRIPTION = "The encodings that map to specific characters for a \
+number of Xorg and common fonts."
+
+require xorg-font-common.inc
+LICENSE = "PD"
+LIC_FILES_CHKSUM = "file://COPYING;md5=9da93f2daf2d5572faa2bfaf0dbd9e76"
+PE = "1"
+
+DEPENDS = "mkfontscale-native mkfontdir-native font-util-native"
+RDEPENDS:${PN} = ""
+
+SRC_URI[sha256sum] = "9ff13c621756cfa12e95f32ba48a5b23839e8f577d0048beda66c67dab4de975"
+
+SRC_URI_EXT = "xz"
+
+inherit allarch
+
+EXTRA_OECONF += "--with-encodingsdir=${datadir}/fonts/X11/encodings"
+
+# postinst from .inc doesn't apply to this recipe
+pkg_postinst:${PN} () {
+}
diff --git a/meta/recipes-graphics/xorg-font/font-alias-1.0.4/nocompiler.patch b/meta/recipes-graphics/xorg-font/font-alias-1.0.4/nocompiler.patch
deleted file mode 100644
index e54eee4ea9..0000000000
--- a/meta/recipes-graphics/xorg-font/font-alias-1.0.4/nocompiler.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From fa2bbd48a55d54bd2dae30edf7936e3ab7587c96 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Tue, 17 May 2011 23:03:02 +0000
-Subject: [PATCH] Improve handling of 'all' architecture recipes and their
-
-Upstream-Status: Inappropriate [configuration]
-
-XORG_DEFAULT_OPTIONS pulls in the following dependency chains:
-
-XORG_CWARNFLAGS -> AC_PROG_CC_C99
-XORG_STRICT_OPTION -> AC_PROG_CC_C99, XORG_CWARNFLAGS
-XORG_MANPAGE_SECTIONS -> AC_CANONICAL_HOST -> Checks host
-
-each of which triggers the use of the host compiler. As an "all"
-architecture package, it shouldn't need a compiler (and doesn't).
-
-RP 17/5/2011
-
----
- configure.ac | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 3407c69..9fe1f89 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -27,12 +27,12 @@ AC_INIT([font-alias], [1.0.4],
- [font-alias])
- AM_INIT_AUTOMAKE([foreign dist-bzip2])
-
--# Require xorg-macros: XORG_DEFAULT_OPTIONS
- m4_ifndef([XORG_MACROS_VERSION],
- [m4_fatal([must install xorg-macros 1.3 or later before running autoconf/autogen])])
- XORG_MACROS_VERSION(1.3)
--XORG_DEFAULT_OPTIONS
--
-+XORG_RELEASE_VERSION
-+XORG_CHANGELOG
-+XORG_INSTALL
- AC_PROG_INSTALL
-
- # Require X.Org's font util macros 1.2 or later
diff --git a/meta/recipes-graphics/xorg-font/font-alias_1.0.4.bb b/meta/recipes-graphics/xorg-font/font-alias_1.0.4.bb
deleted file mode 100644
index d80ecbeb8f..0000000000
--- a/meta/recipes-graphics/xorg-font/font-alias_1.0.4.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "X font aliases"
-
-require xorg-font-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=bf0158b89be493d523d69d9f29265038 \
- file://cyrillic/fonts.alias;md5=d27bc65a2655cacdbc2644b51c064c20 \
- file://75dpi/fonts.alias;md5=6bc48023f2ae7f3bfc105db7b0ee6b49 \
- file://misc/fonts.alias;md5=1bdafa7c31aa54f87f3531f2ef8ed5a6 \
- file://100dpi/fonts.alias;md5=85bebd6ca213aa656c301a72eb4397cb \
- "
-
-SRC_URI += "file://nocompiler.patch"
-
-DEPENDS = "util-macros-native font-util-native"
-RDEPENDS:${PN} = "encodings font-util"
-RDEPENDS:${PN}:class-native = "font-util-native"
-
-inherit allarch
-
-PE = "1"
-
-SRC_URI[sha256sum] = "f3111ae8bf2e980f5f56af400e8eefe5fc9f4207f4a412ea79637fd66c945276"
diff --git a/meta/recipes-graphics/xorg-font/font-alias_1.0.5.bb b/meta/recipes-graphics/xorg-font/font-alias_1.0.5.bb
new file mode 100644
index 0000000000..03c39a4334
--- /dev/null
+++ b/meta/recipes-graphics/xorg-font/font-alias_1.0.5.bb
@@ -0,0 +1,22 @@
+SUMMARY = "X font aliases"
+
+require xorg-font-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=bf0158b89be493d523d69d9f29265038 \
+ file://cyrillic/fonts.alias;md5=d27bc65a2655cacdbc2644b51c064c20 \
+ file://75dpi/fonts.alias;md5=6bc48023f2ae7f3bfc105db7b0ee6b49 \
+ file://misc/fonts.alias;md5=1bdafa7c31aa54f87f3531f2ef8ed5a6 \
+ file://100dpi/fonts.alias;md5=85bebd6ca213aa656c301a72eb4397cb \
+ "
+SRC_URI_EXT = "xz"
+
+DEPENDS = "util-macros-native font-util-native"
+RDEPENDS:${PN} = "encodings font-util"
+RDEPENDS:${PN}:class-native = "font-util-native"
+
+inherit allarch
+
+PE = "1"
+
+SRC_URI[sha256sum] = "9f89e217bb73e0e3636a0a493fbf8b7c995156e0c53d9a0476d201b67c2d6b6e"
diff --git a/meta/recipes-graphics/xorg-font/font-util_1.3.2.bb b/meta/recipes-graphics/xorg-font/font-util_1.3.2.bb
deleted file mode 100644
index b3e832756b..0000000000
--- a/meta/recipes-graphics/xorg-font/font-util_1.3.2.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "X.Org font package creation/installation utilities"
-
-require xorg-font-common.inc
-
-#Unicode is MIT
-LICENSE = "MIT & MIT & BSD-4-Clause & BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5df208ec65eb84ce5bb8d82d8f3b9675 \
- file://ucs2any.c;endline=28;md5=8357dc567fc628bd12696f15b2a33bcb \
- file://bdftruncate.c;endline=26;md5=4f82ffc101a1b165eae9c6998abff937 \
- file://map-ISO8859-1;beginline=9;endline=23;md5=1cecb984063248f29ffe5c46f5c04f34"
-
-DEPENDS = "encodings util-macros"
-DEPENDS:class-native = "util-macros-native"
-RDEPENDS:${PN} = "mkfontdir mkfontscale encodings"
-RDEPENDS:${PN}:class-native = ""
-
-BBCLASSEXTEND = "native"
-
-SRC_URI[md5sum] = "3d6adb76fdd072db8c8fae41b40855e8"
-SRC_URI[sha256sum] = "3ad880444123ac06a7238546fa38a2a6ad7f7e0cc3614de7e103863616522282"
-
-SYSROOT_DIRS_IGNORE:remove = "${datadir}/fonts"
diff --git a/meta/recipes-graphics/xorg-font/font-util_1.4.1.bb b/meta/recipes-graphics/xorg-font/font-util_1.4.1.bb
new file mode 100644
index 0000000000..0c65927fb4
--- /dev/null
+++ b/meta/recipes-graphics/xorg-font/font-util_1.4.1.bb
@@ -0,0 +1,23 @@
+SUMMARY = "X.Org font package creation/installation utilities"
+
+require xorg-font-common.inc
+
+LICENSE = "Unicode-TOU & BSD-4-Clause & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2a9e705c00e463c8d294f90486852e06 \
+ file://ucs2any.c;endline=28;md5=8357dc567fc628bd12696f15b2a33bcb \
+ file://bdftruncate.c;endline=26;md5=4f82ffc101a1b165eae9c6998abff937 \
+ file://map-ISO8859-1;beginline=1;endline=4;md5=9c9c1d525d29c0e82b5c99edbb8e71c1 \
+ "
+
+DEPENDS = "encodings util-macros"
+DEPENDS:class-native = "util-macros-native"
+RDEPENDS:${PN} = "mkfontdir mkfontscale encodings"
+RDEPENDS:${PN}:class-native = ""
+
+BBCLASSEXTEND = "native"
+
+SRC_URI[sha256sum] = "5c9f64123c194b150fee89049991687386e6ff36ef2af7b80ba53efaf368cc95"
+
+SYSROOT_DIRS_IGNORE:remove = "${datadir}/fonts"
+
+SRC_URI_EXT = "xz"
diff --git a/meta/recipes-graphics/xorg-font/xorg-font-common.inc b/meta/recipes-graphics/xorg-font/xorg-font-common.inc
index 2df23efed4..edf7cf7642 100644
--- a/meta/recipes-graphics/xorg-font/xorg-font-common.inc
+++ b/meta/recipes-graphics/xorg-font/xorg-font-common.inc
@@ -9,7 +9,8 @@ RDEPENDS:${PN} = "encodings font-util font-alias"
XORG_PN = "${BPN}"
-SRC_URI = "${XORG_MIRROR}/individual/font/${XORG_PN}-${PV}.tar.bz2"
+SRC_URI_EXT = "bz2"
+SRC_URI = "${XORG_MIRROR}/individual/font/${XORG_PN}-${PV}.tar.${SRC_URI_EXT}"
S = "${WORKDIR}/${XORG_PN}-${PV}"
inherit autotools pkgconfig features_check
diff --git a/meta/recipes-graphics/xorg-font/xorg-minimal-fonts.bb b/meta/recipes-graphics/xorg-font/xorg-minimal-fonts.bb
index 88f534ccf7..d32ef77447 100644
--- a/meta/recipes-graphics/xorg-font/xorg-minimal-fonts.bb
+++ b/meta/recipes-graphics/xorg-font/xorg-minimal-fonts.bb
@@ -13,9 +13,6 @@ SRC_URI = "file://misc"
SOURCE_DATE_EPOCH = "1613559011"
PE = "1"
-PR = "r4"
-# remove at next version upgrade or when output changes
-HASHEQUIV_HASH_VERSION .= ".1"
inherit allarch features_check
diff --git a/meta/recipes-graphics/xorg-lib/libdmx_1.1.4.bb b/meta/recipes-graphics/xorg-lib/libdmx_1.1.4.bb
deleted file mode 100644
index 66172cb30c..0000000000
--- a/meta/recipes-graphics/xorg-lib/libdmx_1.1.4.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require xorg-lib-common.inc
-
-SUMMARY = "DMX: Distributed Multihead X extension library"
-
-DESCRIPTION = "The DMX extension provides support for communication with \
-and control of Xdmx(1) server. Attributes of the Xdmx(1) server and of \
-the back-end screens attached to the server can be queried and modified \
-via this protocol."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a3c3499231a8035efd0e004cfbd3b72a \
- file://src/dmx.c;endline=33;md5=c43f19af03c7c8619cadc9724ed9afe1"
-
-DEPENDS += "libxext xorgproto"
-
-PE = "1"
-
-SRC_URI[md5sum] = "d2f1f0ec68ac3932dd7f1d9aa0a7a11c"
-SRC_URI[sha256sum] = "253f90005d134fa7a209fbcbc5a3024335367c930adf0f3203e754cf32747243"
-
diff --git a/meta/recipes-graphics/xorg-lib/libfontenc_1.1.4.bb b/meta/recipes-graphics/xorg-lib/libfontenc_1.1.4.bb
deleted file mode 100644
index 1fc74b1995..0000000000
--- a/meta/recipes-graphics/xorg-lib/libfontenc_1.1.4.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-SUMMARY = "X font encoding library"
-
-DESCRIPTION = "libfontenc is a library which helps font libraries \
-portably determine and deal with different encodings of fonts."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=96254c20ab81c63e65b26f0dbcd4a1c1"
-
-DEPENDS += "zlib xorgproto font-util"
-PE = "1"
-
-SRC_URI[md5sum] = "6447db6a689fb530c218f0f8328c3abc"
-SRC_URI[sha256sum] = "2cfcce810ddd48f2e5dc658d28c1808e86dcf303eaff16728b9aa3dbc0092079"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libfontenc_1.1.8.bb b/meta/recipes-graphics/xorg-lib/libfontenc_1.1.8.bb
new file mode 100644
index 0000000000..0fc117fc58
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libfontenc_1.1.8.bb
@@ -0,0 +1,16 @@
+SUMMARY = "X font encoding library"
+
+DESCRIPTION = "libfontenc is a library which helps font libraries \
+portably determine and deal with different encodings of fonts."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=96254c20ab81c63e65b26f0dbcd4a1c1"
+
+DEPENDS += "zlib xorgproto font-util"
+PE = "1"
+
+SRC_URI[sha256sum] = "7b02c3d405236e0d86806b1de9d6868fe60c313628b38350b032914aa4fd14c6"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libice_1.0.10.bb b/meta/recipes-graphics/xorg-lib/libice_1.0.10.bb
deleted file mode 100644
index 44339eb27a..0000000000
--- a/meta/recipes-graphics/xorg-lib/libice_1.0.10.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "ICE: Inter-Client Exchange library"
-
-DESCRIPTION = "The Inter-Client Exchange (ICE) protocol provides a \
-generic framework for building protocols on top of reliable, byte-stream \
-transport connections. It provides basic mechanisms for setting up and \
-shutting down connections, for performing authentication, for \
-negotiating versions, and for reporting errors. "
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d162b1b3c6fa812da9d804dcf8584a93"
-
-DEPENDS += "xorgproto xtrans"
-PROVIDES = "ice"
-
-PE = "1"
-
-XORG_PN = "libICE"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "76d77499ee7120a56566891ca2c0dbcf"
-SRC_URI[sha256sum] = "6f86dce12cf4bcaf5c37dddd8b1b64ed2ddf1ef7b218f22b9942595fb747c348"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[arc4] = "ac_cv_lib_bsd_arc4random_buf=yes,ac_cv_lib_bsd_arc4random_buf=no,libbsd"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-lib/libice_1.1.1.bb b/meta/recipes-graphics/xorg-lib/libice_1.1.1.bb
new file mode 100644
index 0000000000..d5e3d28e06
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libice_1.1.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "ICE: Inter-Client Exchange library"
+
+DESCRIPTION = "The Inter-Client Exchange (ICE) protocol provides a \
+generic framework for building protocols on top of reliable, byte-stream \
+transport connections. It provides basic mechanisms for setting up and \
+shutting down connections, for performing authentication, for \
+negotiating versions, and for reporting errors. "
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d162b1b3c6fa812da9d804dcf8584a93"
+
+DEPENDS += "xorgproto xtrans"
+PROVIDES = "ice"
+
+PE = "1"
+
+XORG_PN = "libICE"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "03e77afaf72942c7ac02ccebb19034e6e20f456dcf8dddadfeb572aa5ad3e451"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[arc4] = "ac_cv_lib_bsd_arc4random_buf=yes,ac_cv_lib_bsd_arc4random_buf=no,libbsd"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-lib/libpciaccess_0.16.bb b/meta/recipes-graphics/xorg-lib/libpciaccess_0.16.bb
deleted file mode 100644
index d55315efc2..0000000000
--- a/meta/recipes-graphics/xorg-lib/libpciaccess_0.16.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Generic PCI access library for X"
-
-DESCRIPTION = "libpciaccess provides functionality for X to access the \
-PCI bus and devices in a platform-independent way."
-
-require xorg-lib-common.inc
-
-SRC_URI += "\
-"
-
-SRC_URI[md5sum] = "b34e2cbdd6aa8f9cc3fa613fd401a6d6"
-SRC_URI[sha256sum] = "214c9d0d884fdd7375ec8da8dcb91a8d3169f263294c9a90c575bf1938b9f489"
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=277aada5222b9a22fbf3471ff3687068"
-
-REQUIRED_DISTRO_FEATURES = ""
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libpciaccess_0.18.1.bb b/meta/recipes-graphics/xorg-lib/libpciaccess_0.18.1.bb
new file mode 100644
index 0000000000..d311fd95e0
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libpciaccess_0.18.1.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Generic PCI access library for X"
+
+DESCRIPTION = "libpciaccess provides functionality for X to access the \
+PCI bus and devices in a platform-independent way."
+
+HOMEPAGE = "http://www.x.org"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=xorg"
+
+SECTION = "x11/libs"
+LICENSE = "MIT"
+DEPENDS = "util-macros"
+
+SRC_URI = "${XORG_MIRROR}/individual/lib/${BP}.tar.xz"
+
+inherit features_check pkgconfig meson
+
+REQUIRED_DISTRO_FEATURES ?= "x11"
+
+SRC_URI[sha256sum] = "4af43444b38adb5545d0ed1c2ce46d9608cc47b31c2387fc5181656765a6fa76"
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=54c978968e565218eea36cf03ef24352"
+
+REQUIRED_DISTRO_FEATURES = ""
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.4.bb b/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.4.bb
deleted file mode 100644
index b398e8b626..0000000000
--- a/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.4.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Library that provides weak aliases for pthread functions"
-DESCRIPTION = "This library provides weak aliases for pthread functions \
-not provided in libc or otherwise available by default."
-HOMEPAGE = "http://xcb.freedesktop.org"
-BUGTRACKER = "http://bugs.freedesktop.org/buglist.cgi?product=XCB"
-SECTION = "x11/libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6edc1fea03d959f0c2d743fe5ca746ad"
-
-SRC_URI = "http://xcb.freedesktop.org/dist/${BP}.tar.bz2"
-SRC_URI[md5sum] = "48c1544854a94db0e51499cc3afd797f"
-SRC_URI[sha256sum] = "e4d05911a3165d3b18321cc067fdd2f023f06436e391c6a28dff618a78d2e733"
-
-inherit autotools
-
-RDEPENDS:${PN}-dev = ""
-RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.5.bb b/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.5.bb
new file mode 100644
index 0000000000..62384e6d2e
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libpthread-stubs_0.5.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Library that provides weak aliases for pthread functions"
+DESCRIPTION = "This library provides weak aliases for pthread functions \
+not provided in libc or otherwise available by default."
+HOMEPAGE = "http://xcb.freedesktop.org"
+BUGTRACKER = "http://bugs.freedesktop.org/buglist.cgi?product=XCB"
+SECTION = "x11/libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6edc1fea03d959f0c2d743fe5ca746ad"
+
+SRC_URI = "http://xcb.freedesktop.org/dist/${BP}.tar.xz"
+SRC_URI[sha256sum] = "59da566decceba7c2a7970a4a03b48d9905f1262ff94410a649224e33d2442bc"
+
+inherit autotools
+
+DEV_PKG_DEPENDENCY = ""
+RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libsm_1.2.3.bb b/meta/recipes-graphics/xorg-lib/libsm_1.2.3.bb
deleted file mode 100644
index 90917ef437..0000000000
--- a/meta/recipes-graphics/xorg-lib/libsm_1.2.3.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "SM: Session Management library"
-
-DESCRIPTION = "The Session Management Library (SMlib) is a low-level \"C\" \
-language interface to XSMP. The purpose of the X Session Management \
-Protocol (XSMP) is to provide a uniform mechanism for users to save and \
-restore their sessions. A session is a group of clients, each of which \
-has a particular state."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c0fb37f44e02bdbde80546024400728d"
-
-DEPENDS += "libice xorgproto xtrans"
-
-# libSM can work without libuuid, we explicitly disable it to break the following circular dependency
-# when DISTRO_FEATURES contains 'systemd' and 'x11'.
-# systemd -> dbus -> libsm -> util-linux -> systemd
-EXTRA_OECONF += "--without-libuuid"
-
-PE = "1"
-
-XORG_PN = "libSM"
-
-SRC_URI[md5sum] = "87c7fad1c1813517979184c8ccd76628"
-SRC_URI[sha256sum] = "2d264499dcb05f56438dee12a1b4b71d76736ce7ba7aa6efbf15ebb113769cbb"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libsm_1.2.4.bb b/meta/recipes-graphics/xorg-lib/libsm_1.2.4.bb
new file mode 100644
index 0000000000..ab030723c6
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libsm_1.2.4.bb
@@ -0,0 +1,30 @@
+SUMMARY = "SM: Session Management library"
+
+DESCRIPTION = "The Session Management Library (SMlib) is a low-level \"C\" \
+language interface to XSMP. The purpose of the X Session Management \
+Protocol (XSMP) is to provide a uniform mechanism for users to save and \
+restore their sessions. A session is a group of clients, each of which \
+has a particular state."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c0fb37f44e02bdbde80546024400728d"
+
+DEPENDS += "libice xorgproto xtrans"
+
+# libSM can work without libuuid, we explicitly disable it to break the following circular dependency
+# when DISTRO_FEATURES contains 'systemd' and 'x11'.
+# systemd -> dbus -> libsm -> util-linux -> systemd
+EXTRA_OECONF += "--without-libuuid"
+
+PE = "1"
+
+XORG_PN = "libSM"
+
+SRC_URI[sha256sum] = "fdcbe51e4d1276b1183da77a8a4e74a137ca203e0bcfb20972dd5f3347e97b84"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libx11-compose-data/0001-Drop-x11-dependencies.patch b/meta/recipes-graphics/xorg-lib/libx11-compose-data/0001-Drop-x11-dependencies.patch
index 46a53a17f2..61af98d95e 100644
--- a/meta/recipes-graphics/xorg-lib/libx11-compose-data/0001-Drop-x11-dependencies.patch
+++ b/meta/recipes-graphics/xorg-lib/libx11-compose-data/0001-Drop-x11-dependencies.patch
@@ -6,12 +6,16 @@ Subject: [PATCH] Drop x11 dependencies
Upstream-Status: Inappropriate [OE-Specific]
Signed-off-by: Ming Liu <ming.liu@toradex.com>
+
+Rebase for version 1.8.3.
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
---
configure.ac | 44 --------------------------------------------
1 file changed, 44 deletions(-)
diff --git a/configure.ac b/configure.ac
-index a5fd706..b42488c 100644
+index 33d0102..50e2d93 100644
--- a/configure.ac
+++ b/configure.ac
@@ -76,27 +76,8 @@ fi
@@ -20,7 +24,7 @@ index a5fd706..b42488c 100644
# Checks for pkg-config packages
-
-# Always required
--X11_REQUIRES='xproto >= 7.0.17 xextproto xtrans xcb >= 1.11.1'
+-X11_REQUIRES='xproto >= 7.0.25 xextproto xtrans xcb >= 1.11.1'
-X11_EXTRA_DEPS="xcb >= 1.11.1"
-
PKG_PROG_PKG_CONFIG()
@@ -42,7 +46,7 @@ index a5fd706..b42488c 100644
# Check for dlopen
AC_MSG_CHECKING([if run-time linking is supported])
AC_SEARCH_LIBS(dlopen,[dl svld])
-@@ -237,30 +218,6 @@ AC_SUBST(XTHREAD_CFLAGS)
+@@ -255,30 +236,6 @@ AC_SUBST(USE_THREAD_LIBS)
AC_CHECK_FUNC(poll, [AC_DEFINE(USE_POLL, 1, [poll() function is available])], )
@@ -73,7 +77,7 @@ index a5fd706..b42488c 100644
AC_ARG_ENABLE(xcms,
AS_HELP_STRING([--disable-xcms],
[Disable Xlib support for CMS *EXPERIMENTAL*]),
-@@ -340,7 +297,6 @@ X11_LIBDIR="${libdir}/X11"
+@@ -358,7 +315,6 @@ X11_LIBDIR="${libdir}/X11"
AX_DEFINE_DIR(X11_LIBDIR, X11_LIBDIR, [Location of libX11 library data])
AC_SUBST(X11_LIBDIR)
diff --git a/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.6.8.bb b/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.6.8.bb
deleted file mode 100644
index b231f8b34d..0000000000
--- a/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.6.8.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Xlib: Compose data files for libx11"
-DESCRIPTION = "This package provides the compose data files for libx11."
-
-python () {
- if bb.utils.contains('DISTRO_FEATURES', 'x11', True, False, d):
- raise bb.parse.SkipRecipe("libx11-compose-data is incompatible with x11 distro feature, use libx11 instead.")
-}
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT & MIT & BSD-1-Clause & HPND & HPND-sell-variant"
-LIC_FILES_CHKSUM = "file://COPYING;md5=172255dee66bb0151435b2d5d709fcf7"
-
-SRC_URI[md5sum] = "c5fa5a86a20e3591bed6c046498d4b8f"
-SRC_URI[sha256sum] = "b289a845c189e251e0e884cc0f9269bbe97c238df3741e854ec4c17c21e473d5"
-
-SRC_URI += "file://0001-Drop-x11-dependencies.patch \
- "
-
-XORG_PN = "libX11"
-
-EXTRA_OECONF += "--disable-xkb"
-
-PACKAGES = "${PN}"
-
-FILES:${PN} = "${datadir}/X11/locale ${libdir}/X11/locale"
-
-do_compile() {
- oe_runmake -C nls
-}
-
-do_install() {
- oe_runmake DESTDIR=${D} -C nls install
-}
-
-REQUIRED_DISTRO_FEATURES = ""
diff --git a/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.8.4.bb b/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.8.4.bb
new file mode 100644
index 0000000000..2131f46213
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libx11-compose-data_1.8.4.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Xlib: Compose data files for libx11"
+DESCRIPTION = "This package provides the compose data files for libx11."
+
+python () {
+ if bb.utils.contains('DISTRO_FEATURES', 'x11', True, False, d):
+ raise bb.parse.SkipRecipe("libx11-compose-data is incompatible with x11 distro feature, use libx11 instead.")
+}
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT & BSD-1-Clause & HPND & HPND-sell-variant & ISC"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1d49cdd2b386c5db11ec636d680b7116"
+
+XORG_PN = "libX11"
+
+SRC_URI += "file://0001-Drop-x11-dependencies.patch \
+ "
+
+SRC_URI[sha256sum] = "c9a287a5aefa9804ce3cfafcf516fe96ed3f7e8e45c0e2ee59e84c86757df518"
+
+REQUIRED_DISTRO_FEATURES = ""
+
+EXTRA_OECONF += "--disable-xkb"
+
+do_compile() {
+ oe_runmake -C nls
+}
+
+do_install() {
+ oe_runmake DESTDIR=${D} -C nls install
+}
+
+PACKAGES = "${PN}"
+
+FILES:${PN} = "${datadir}/X11/locale ${libdir}/X11/locale"
diff --git a/meta/recipes-graphics/xorg-lib/libx11_1.8.9.bb b/meta/recipes-graphics/xorg-lib/libx11_1.8.9.bb
new file mode 100644
index 0000000000..5dcb063473
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libx11_1.8.9.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Xlib: C Language X Interface library"
+
+DESCRIPTION = "This package provides a client interface to the X Window \
+System, otherwise known as 'Xlib'. It provides a complete API for the \
+basic functions of the window system."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT & BSD-1-Clause & HPND & HPND-sell-variant & ISC"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1d49cdd2b386c5db11ec636d680b7116"
+
+DEPENDS += "xorgproto \
+ xtrans \
+ libxcb \
+ xorgproto-native \
+ autoconf-archive-native \
+ "
+
+PROVIDES = "virtual/libx11"
+
+PE = "1"
+
+XORG_PN = "libX11"
+
+SRC_URI += "file://disable_tests.patch"
+
+SRC_URI[sha256sum] = "779d8f111d144ef93e2daa5f23a762ce9555affc99592844e71c4243d3bd3262"
+
+inherit gettext
+
+EXTRA_OECONF += "--with-keysymdefdir=${STAGING_INCDIR}/X11/ --disable-xf86bigfont"
+EXTRA_OEMAKE += 'CWARNFLAGS=""'
+
+PACKAGECONFIG ??= "xcms"
+PACKAGECONFIG[xcms] = "--enable-xcms,--disable-xcms"
+
+PACKAGES =+ "${PN}-xcb"
+
+FILES:${PN} += "${datadir}/X11/XKeysymDB ${datadir}/X11/XErrorDB ${datadir}/X11/Xcms.txt"
+FILES:${PN}-xcb += "${libdir}/libX11-xcb.so.*"
+FILES:${PN}-locale += "${datadir}/X11/locale ${libdir}/X11/locale"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libx11_1.8.bb b/meta/recipes-graphics/xorg-lib/libx11_1.8.bb
deleted file mode 100644
index 6d5a8626d6..0000000000
--- a/meta/recipes-graphics/xorg-lib/libx11_1.8.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "Xlib: C Language X Interface library"
-
-DESCRIPTION = "This package provides a client interface to the X Window \
-System, otherwise known as 'Xlib'. It provides a complete API for the \
-basic functions of the window system."
-
-require xorg-lib-common.inc
-
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/libx11:"
-
-PE = "1"
-
-# temporarily override SRC_URI which is hard-coded in xorg-lib-common.inc
-# since new versions of packages use a new compression format - .tar.gz
-SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.xz"
-
-SRC_URI += "file://disable_tests.patch \
- "
-SRC_URI[sha256sum] = "081bf42ebab023aa92cfdb20c7af8c5ae13d13e88a5e22f90f4453ef80bbdde4"
-
-PROVIDES = "virtual/libx11"
-
-XORG_PN = "libX11"
-LICENSE = "MIT & MIT & BSD-1-Clause & HPND & HPND-sell-variant"
-LIC_FILES_CHKSUM = "file://COPYING;md5=172255dee66bb0151435b2d5d709fcf7"
-
-DEPENDS += "xorgproto \
- xtrans \
- libxcb \
- xorgproto-native \
- autoconf-archive \
- "
-
-EXTRA_OECONF += "--with-keysymdefdir=${STAGING_INCDIR}/X11/ --disable-xf86bigfont"
-EXTRA_OEMAKE += 'CWARNFLAGS=""'
-
-PACKAGECONFIG ??= "xcms"
-PACKAGECONFIG[xcms] = "--enable-xcms,--disable-xcms"
-
-PACKAGES =+ "${PN}-xcb"
-
-inherit gettext
-
-FILES:${PN} += "${datadir}/X11/XKeysymDB ${datadir}/X11/XErrorDB ${datadir}/X11/Xcms.txt"
-FILES:${PN}-xcb += "${libdir}/libX11-xcb.so.*"
-FILES:${PN}-locale += "${datadir}/X11/locale ${libdir}/X11/locale"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxau_1.0.11.bb b/meta/recipes-graphics/xorg-lib/libxau_1.0.11.bb
new file mode 100644
index 0000000000..f1b492c39f
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxau_1.0.11.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Xau: X Authority Database library"
+
+DESCRIPTION = "libxau provides the main interfaces to the X11 \
+authorisation handling, which controls authorisation for X connections, \
+both client-side and server-side."
+
+require xorg-lib-common.inc
+
+inherit gettext
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7908e342491198401321cec1956807ec"
+
+DEPENDS += " xorgproto"
+PROVIDES = "xau"
+
+PE = "1"
+
+XORG_PN = "libXau"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "f3fa3282f5570c3f6bd620244438dbfbdd580fc80f02f549587a0f8ab329bbeb"
diff --git a/meta/recipes-graphics/xorg-lib/libxau_1.0.9.bb b/meta/recipes-graphics/xorg-lib/libxau_1.0.9.bb
deleted file mode 100644
index 3fbc3a5b44..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxau_1.0.9.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "Xau: X Authority Database library"
-
-DESCRIPTION = "libxau provides the main interfaces to the X11 \
-authorisation handling, which controls authorisation for X connections, \
-both client-side and server-side."
-
-require xorg-lib-common.inc
-
-inherit gettext
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7908e342491198401321cec1956807ec"
-
-DEPENDS += " xorgproto"
-PROVIDES = "xau"
-
-PE = "1"
-
-XORG_PN = "libXau"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "c5f16288f2da9f071b29111d68797480"
-SRC_URI[sha256sum] = "ccf8cbf0dbf676faa2ea0a6d64bcc3b6746064722b606c8c52917ed00dcb73ec"
diff --git a/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch b/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch
new file mode 100644
index 0000000000..5b159d646d
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch
@@ -0,0 +1,45 @@
+From d55b6b1fa87700f3eae3a29522972d2e7be7d53e Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 14 Jan 2023 10:11:35 -0800
+Subject: [PATCH] use _Alignof to avoid UB in ALIGNOF
+
+WG14 N2350 clearly says that it is an UB having type definitions
+within "offsetof" [1]. Clang 16+ has started diagnosing it [2].
+This patch changes the implementation of macro
+"ALIGNOF" to builtin "_Alignof" to avoid undefined behavior.
+
+_Alignof() return the ABI required minimum alignment.
+
+[1] https://www.open-std.org/jtc1/sc22/wg14/www/docs/n2350.htm
+[2] https://reviews.llvm.org/D133574
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/xorg/lib/libxcb/-/merge_requests/42]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/c_client.py | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/src/c_client.py b/src/c_client.py
+index fd256f0..4e48f13 100644
+--- a/src/c_client.py
++++ b/src/c_client.py
+@@ -288,7 +288,6 @@ def c_open(self):
+ _c('#include "%s.h"', _ns.header)
+
+ _c('')
+- _c('#define ALIGNOF(type) offsetof(struct { char dummy; type member; }, member)')
+
+ if _ns.is_ext:
+ for (n, h) in self.direct_imports:
+@@ -1266,7 +1265,7 @@ def _c_serialize_helper_fields(context, self,
+ count += 1
+
+ code_lines.append(
+- '%s xcb_align_to = ALIGNOF(%s);'
++ '%s xcb_align_to = _Alignof(%s);'
+ % (space,
+ 'char'
+ if field.c_field_type == 'void' or field.type.is_switch
+--
+2.39.0
+
diff --git a/meta/recipes-graphics/xorg-lib/libxcb_1.15.bb b/meta/recipes-graphics/xorg-lib/libxcb_1.15.bb
deleted file mode 100644
index 839577326e..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcb_1.15.bb
+++ /dev/null
@@ -1,37 +0,0 @@
-SUMMARY = "XCB: The X protocol C binding library"
-DESCRIPTION = "The X protocol C-language Binding (XCB) is a replacement \
-for Xlib featuring a small footprint, latency hiding, direct access to \
-the protocol, improved threading support, and extensibility."
-HOMEPAGE = "http://xcb.freedesktop.org"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=XCB"
-SECTION = "x11/libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7"
-
-SRC_URI = "http://xcb.freedesktop.org/dist/libxcb-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "cc38744f817cf6814c847e2df37fcb8997357d72fa4bcbc228ae0fe47219a059"
-
-BBCLASSEXTEND = "native nativesdk"
-
-DEPENDS = "xcb-proto xorgproto libxau libpthread-stubs libxdmcp"
-
-PACKAGES_DYNAMIC = "^${PN}-.*"
-
-FILES:${PN} = "${libdir}/libxcb.so.*"
-
-inherit autotools pkgconfig features_check
-
-# The libxau and others requires x11 in DISTRO_FEATURES
-REQUIRED_DISTRO_FEATURES = "x11"
-
-export PYTHON = "python3"
-
-do_install:append () {
- chown root.root ${D}${datadir}/doc/${BPN}/tutorial -R
-}
-
-python populate_packages:prepend () {
- do_split_packages(d, '${libdir}', r'^libxcb-(.*)\.so\..*$', '${PN}-%s', 'XCB library module for %s', allow_links=True)
-}
diff --git a/meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb b/meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb
new file mode 100644
index 0000000000..de3290aa9f
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb
@@ -0,0 +1,38 @@
+SUMMARY = "XCB: The X protocol C binding library"
+DESCRIPTION = "The X protocol C-language Binding (XCB) is a replacement \
+for Xlib featuring a small footprint, latency hiding, direct access to \
+the protocol, improved threading support, and extensibility."
+HOMEPAGE = "http://xcb.freedesktop.org"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=XCB"
+SECTION = "x11/libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7"
+
+SRC_URI = "http://xcb.freedesktop.org/dist/libxcb-${PV}.tar.xz \
+ file://0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch"
+
+SRC_URI[sha256sum] = "f24d187154c8e027b358fc7cb6588e35e33e6a92f11c668fe77396a7ae66e311"
+
+BBCLASSEXTEND = "native nativesdk"
+
+DEPENDS = "xcb-proto xorgproto libxau libpthread-stubs libxdmcp"
+
+PACKAGES_DYNAMIC = "^${PN}-.*"
+
+FILES:${PN} = "${libdir}/libxcb.so.*"
+
+inherit autotools pkgconfig features_check
+
+# The libxau and others requires x11 in DISTRO_FEATURES
+REQUIRED_DISTRO_FEATURES = "x11"
+
+export PYTHON = "python3"
+
+do_install:append () {
+ chown root.root ${D}${datadir}/doc/${BPN}/tutorial -R
+}
+
+python populate_packages:prepend () {
+ do_split_packages(d, '${libdir}', r'^libxcb-(.*)\.so\..*$', '${PN}-%s', 'XCB library module for %s', allow_links=True)
+}
diff --git a/meta/recipes-graphics/xorg-lib/libxcomposite/change-include-order.patch b/meta/recipes-graphics/xorg-lib/libxcomposite/change-include-order.patch
deleted file mode 100644
index 60331f6e78..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcomposite/change-include-order.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-Upstream-Status: Inappropriate [configuration]
-
-This patch makes the build use its own Xcomposite.h over rather than an
-older Xcomposite.h that might already be installed in the staging dir.
-
-Index: libXcomposite-0.4.3/src/Makefile.am
-===================================================================
---- libXcomposite-0.4.3.orig/src/Makefile.am 2010-06-30 22:42:53.000000000 -0700
-+++ libXcomposite-0.4.3/src/Makefile.am 2010-11-23 23:09:34.544322930 -0800
-@@ -19,7 +19,7 @@
- # TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
- # PERFORMANCE OF THIS SOFTWARE.
-
--AM_CFLAGS = $(CWARNFLAGS) $(XCOMPOSITE_CFLAGS) $(XFIXES_CFLAGS)
-+AM_CFLAGS = -I$(top_srcdir)/include $(CWARNFLAGS) $(XCOMPOSITE_CFLAGS) $(XFIXES_CFLAGS)
- AM_CPPFLAGS = -I$(top_srcdir)/include
-
- lib_LTLIBRARIES = libXcomposite.la
diff --git a/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.5.bb b/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.5.bb
deleted file mode 100644
index c07f31b92d..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.5.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Xcomposite: X Composite extension library"
-
-DESCRIPTION = "The composite extension provides three related \
-mechanisms: per-hierarchy storage, automatic shadow update, and external \
-parent. In per-hierarchy storage, the rendering of an entire hierarchy \
-of windows is redirected to off-screen storage. In automatic shadow \
-update, when a hierarchy is rendered off-screen, the X server provides \
-an automatic mechanism for presenting those contents within the parent \
-window. In external parent, a mechanism for providing redirection of \
-compositing transformations through a client."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3f2907aad541f6f226fbc58cc1b3cdf1"
-
-DEPENDS += " xorgproto virtual/libx11 libxfixes libxext"
-PROVIDES = "xcomposite"
-BBCLASSEXTEND = "native nativesdk"
-
-PE = "1"
-
-XORG_PN = "libXcomposite"
-
-SRC_URI += " file://change-include-order.patch"
-
-SRC_URI[md5sum] = "3fa0841ea89024719b20cd702a9b54e0"
-SRC_URI[sha256sum] = "b3218a2c15bab8035d16810df5b8251ffc7132ff3aa70651a1fba0bfe9634e8f"
diff --git a/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.6.bb b/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.6.bb
new file mode 100644
index 0000000000..881f579695
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcomposite_0.4.6.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Xcomposite: X Composite extension library"
+
+DESCRIPTION = "The composite extension provides three related \
+mechanisms: per-hierarchy storage, automatic shadow update, and external \
+parent. In per-hierarchy storage, the rendering of an entire hierarchy \
+of windows is redirected to off-screen storage. In automatic shadow \
+update, when a hierarchy is rendered off-screen, the X server provides \
+an automatic mechanism for presenting those contents within the parent \
+window. In external parent, a mechanism for providing redirection of \
+compositing transformations through a client."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=3f2907aad541f6f226fbc58cc1b3cdf1"
+
+DEPENDS += " xorgproto virtual/libx11 libxfixes libxext"
+PROVIDES = "xcomposite"
+BBCLASSEXTEND = "native nativesdk"
+
+PE = "1"
+
+XORG_PN = "libXcomposite"
+
+SRC_URI[sha256sum] = "fe40bcf0ae1a09070eba24088a5eb9810efe57453779ec1e20a55080c6dc2c87"
diff --git a/meta/recipes-graphics/xorg-lib/libxcursor_1.2.1.bb b/meta/recipes-graphics/xorg-lib/libxcursor_1.2.1.bb
deleted file mode 100644
index 4fd7e949c4..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcursor_1.2.1.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Xcursor: X Cursor management library"
-
-DESCRIPTION = "Xcursor is a simple library designed to help locate and \
-load cursors. Cursors can be loaded from files or memory. A library of \
-common cursors exists which map to the standard X cursor names. Cursors \
-can exist in several sizes and the library automatically picks the best \
-size."
-
-require xorg-lib-common.inc
-SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.xz"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=dbf3bd0f2348eeccd0f217146ba26250"
-
-DEPENDS += "libxrender libxfixes"
-
-PE = "1"
-
-XORG_PN = "libXcursor"
-
-SRC_URI[sha256sum] = "46c143731610bafd2070159a844571b287ac26192537d047a39df06155492104"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxcursor_1.2.2.bb b/meta/recipes-graphics/xorg-lib/libxcursor_1.2.2.bb
new file mode 100644
index 0000000000..159ac4f7fa
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcursor_1.2.2.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Xcursor: X Cursor management library"
+
+DESCRIPTION = "Xcursor is a simple library designed to help locate and \
+load cursors. Cursors can be loaded from files or memory. A library of \
+common cursors exists which map to the standard X cursor names. Cursors \
+can exist in several sizes and the library automatically picks the best \
+size."
+
+require xorg-lib-common.inc
+SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.xz"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=dbf3bd0f2348eeccd0f217146ba26250"
+
+DEPENDS += "libxrender libxfixes"
+
+PE = "1"
+
+XORG_PN = "libXcursor"
+
+SRC_URI[sha256sum] = "53d071bd2cc56e517a30998d5e685c8a74556ddada43c6985d14da9a023a88ee"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxcvt_0.1.1.bb b/meta/recipes-graphics/xorg-lib/libxcvt_0.1.1.bb
deleted file mode 100644
index 134c40acd1..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcvt_0.1.1.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Library providing a standalone version of the X server \
-implementation of the VESA CVT standard timing modelines generator"
-HOMEPAGE = "https://gitlab.freedesktop.org/xorg/lib/libxcvt"
-BUGTRACKER = "https://gitlab.freedesktop.org/xorg/lib/libxcvt/-/issues"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=129947a06984d6faa6f9a9788fa2a03f"
-SECTION = "x11/libs"
-
-SRC_URI = "git://gitlab.freedesktop.org/xorg/lib/libxcvt.git;protocol=https;branch=master"
-SRCREV = "6fe840b9295cfdc41bd734586c5b8756f6af6f9b"
-
-S = "${WORKDIR}/git"
-
-inherit meson
-
-FILES:${PN} = " \
- ${libdir}/libxcvt.so.0* \
- ${bindir}/cvt \
-"
diff --git a/meta/recipes-graphics/xorg-lib/libxcvt_0.1.2.bb b/meta/recipes-graphics/xorg-lib/libxcvt_0.1.2.bb
new file mode 100644
index 0000000000..e62fabdf58
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcvt_0.1.2.bb
@@ -0,0 +1,19 @@
+SUMMARY = "Library providing a standalone version of the X server \
+implementation of the VESA CVT standard timing modelines generator"
+HOMEPAGE = "https://gitlab.freedesktop.org/xorg/lib/libxcvt"
+BUGTRACKER = "https://gitlab.freedesktop.org/xorg/lib/libxcvt/-/issues"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=129947a06984d6faa6f9a9788fa2a03f"
+SECTION = "x11/libs"
+
+SRC_URI = "git://gitlab.freedesktop.org/xorg/lib/libxcvt.git;protocol=https;branch=master"
+SRCREV = "d9ca87eea9eecddaccc3a77227bcb3acf84e89df"
+
+S = "${WORKDIR}/git"
+
+inherit meson
+
+FILES:${PN} = " \
+ ${libdir}/libxcvt.so.0* \
+ ${bindir}/cvt \
+"
diff --git a/meta/recipes-graphics/xorg-lib/libxdamage_1.1.5.bb b/meta/recipes-graphics/xorg-lib/libxdamage_1.1.5.bb
deleted file mode 100644
index b201124d86..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxdamage_1.1.5.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "Xdamage: X Damage extension library"
-
-DESCRIPTION = "'Damage' is a term that describes changes make to pixel \
-contents of windows and pixmaps. Damage accumulates as drawing occurs \
-in the drawable. Each drawing operation 'damages' one or more \
-rectangular areas within the drawable. The rectangles are guaranteed to \
-include the set of pixels modified by each operation, but may include \
-significantly more than just those pixels. The DAMAGE extension allows \
-applications to either receive the raw rectangles as a stream of events, \
-or to have them partially processed within the X server to reduce the \
-amount of data transmitted as well as reduce the processing latency once \
-the repaint operation has started."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=9fe101f30dd24134cf43146863241868"
-
-DEPENDS += "virtual/libx11 xorgproto libxfixes"
-PROVIDES = "xdamage"
-
-PE = "1"
-
-XORG_PN = "libXdamage"
-
-SRC_URI[md5sum] = "e3f554267a7a04b042dc1f6352bd6d99"
-SRC_URI[sha256sum] = "b734068643cac3b5f3d2c8279dd366b5bf28c7219d9e9d8717e1383995e0ea45"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxdamage_1.1.6.bb b/meta/recipes-graphics/xorg-lib/libxdamage_1.1.6.bb
new file mode 100644
index 0000000000..279d6c33dc
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxdamage_1.1.6.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Xdamage: X Damage extension library"
+
+DESCRIPTION = "'Damage' is a term that describes changes make to pixel \
+contents of windows and pixmaps. Damage accumulates as drawing occurs \
+in the drawable. Each drawing operation 'damages' one or more \
+rectangular areas within the drawable. The rectangles are guaranteed to \
+include the set of pixels modified by each operation, but may include \
+significantly more than just those pixels. The DAMAGE extension allows \
+applications to either receive the raw rectangles as a stream of events, \
+or to have them partially processed within the X server to reduce the \
+amount of data transmitted as well as reduce the processing latency once \
+the repaint operation has started."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=9fe101f30dd24134cf43146863241868"
+
+DEPENDS += "virtual/libx11 xorgproto libxfixes"
+PROVIDES = "xdamage"
+
+PE = "1"
+
+XORG_PN = "libXdamage"
+
+SRC_URI[sha256sum] = "52733c1f5262fca35f64e7d5060c6fcd81a880ba8e1e65c9621cf0727afb5d11"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.3.bb b/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.3.bb
deleted file mode 100644
index 92ec954df2..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.3.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "XDMCP: X Display Manager Control Protocol library"
-
-DESCRIPTION = "The purpose of the X Display Manager Control Protocol \
-(XDMCP) is to provide a uniform mechanism for an autonomous display to \
-request login service from a remote host. An X terminal (screen, \
-keyboard, mouse, processor, network interface) is a prime example of an \
-autonomous display."
-
-require xorg-lib-common.inc
-
-inherit gettext
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d559fb26e129626022e052a5e6e0e123"
-
-DEPENDS += "xorgproto"
-PROVIDES = "xdmcp"
-
-PE = "1"
-
-XORG_PN = "libXdmcp"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "115c5c12ecce0e749cd91d999a5fd160"
-SRC_URI[sha256sum] = "20523b44aaa513e17c009e873ad7bbc301507a3224c232610ce2e099011c6529"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[arc4] = "ac_cv_lib_bsd_arc4random_buf=yes,ac_cv_lib_bsd_arc4random_buf=no,libbsd"
diff --git a/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.5.bb b/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.5.bb
new file mode 100644
index 0000000000..d1a1a2e7f9
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxdmcp_1.1.5.bb
@@ -0,0 +1,28 @@
+SUMMARY = "XDMCP: X Display Manager Control Protocol library"
+
+DESCRIPTION = "The purpose of the X Display Manager Control Protocol \
+(XDMCP) is to provide a uniform mechanism for an autonomous display to \
+request login service from a remote host. An X terminal (screen, \
+keyboard, mouse, processor, network interface) is a prime example of an \
+autonomous display."
+
+require xorg-lib-common.inc
+
+inherit gettext
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d559fb26e129626022e052a5e6e0e123"
+
+DEPENDS += "xorgproto"
+PROVIDES = "xdmcp"
+
+PE = "1"
+
+XORG_PN = "libXdmcp"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "d8a5222828c3adab70adf69a5583f1d32eb5ece04304f7f8392b6a353aa2228c"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[arc4] = "ac_cv_lib_bsd_arc4random_buf=yes,ac_cv_lib_bsd_arc4random_buf=no,libbsd"
diff --git a/meta/recipes-graphics/xorg-lib/libxext_1.3.4.bb b/meta/recipes-graphics/xorg-lib/libxext_1.3.4.bb
deleted file mode 100644
index a0ba54d184..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxext_1.3.4.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "XExt: X Extension library"
-
-DESCRIPTION = "libXext provides an X Window System client interface to \
-several extensions to the X protocol. The supported protocol extensions \
-are DOUBLE-BUFFER, DPMS, Extended-Visual-Information, LBX, MIT_SHM, \
-MIT_SUNDRY-NONSTANDARD, Multi-Buffering, SECURITY, SHAPE, SYNC, TOG-CUP, \
-XC-APPGROUP, XC-MISC, XTEST. libXext also provides a small set of \
-utility functions to aid authors of client APIs for X protocol \
-extensions."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=879ce266785414bd1cbc3bc2f4d9d7c8"
-
-DEPENDS += "xorgproto virtual/libx11"
-PROVIDES = "xext"
-
-PE = "1"
-
-XORG_PN = "libXext"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "f5b48bb76ba327cd2a8dc7a383532a95"
-SRC_URI[sha256sum] = "59ad6fcce98deaecc14d39a672cf218ca37aba617c9a0f691cac3bcd28edf82b"
diff --git a/meta/recipes-graphics/xorg-lib/libxext_1.3.6.bb b/meta/recipes-graphics/xorg-lib/libxext_1.3.6.bb
new file mode 100644
index 0000000000..275926a885
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxext_1.3.6.bb
@@ -0,0 +1,24 @@
+SUMMARY = "XExt: X Extension library"
+
+DESCRIPTION = "libXext provides an X Window System client interface to \
+several extensions to the X protocol. The supported protocol extensions \
+are DOUBLE-BUFFER, DPMS, Extended-Visual-Information, LBX, MIT_SHM, \
+MIT_SUNDRY-NONSTANDARD, Multi-Buffering, SECURITY, SHAPE, SYNC, TOG-CUP, \
+XC-APPGROUP, XC-MISC, XTEST. libXext also provides a small set of \
+utility functions to aid authors of client APIs for X protocol \
+extensions."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4234bb3b2f1521ea101e4e9db7c33c69"
+
+DEPENDS += "xorgproto virtual/libx11"
+PROVIDES = "xext"
+
+PE = "1"
+
+XORG_PN = "libXext"
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "edb59fa23994e405fdc5b400afdf5820ae6160b94f35e3dc3da4457a16e89753"
diff --git a/meta/recipes-graphics/xorg-lib/libxfixes_6.0.0.bb b/meta/recipes-graphics/xorg-lib/libxfixes_6.0.0.bb
deleted file mode 100644
index 3a8543171e..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxfixes_6.0.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "XFixes: X Fixes extension library"
-
-DESCRIPTION = "X applications have often needed to work around various \
-shortcomings in the core X window system. This extension is designed to \
-provide the minimal server-side support necessary to eliminate problems \
-caused by these workarounds."
-
-require xorg-lib-common.inc
-SRC_URI[sha256sum] = "a7c1a24da53e0b46cac5aea79094b4b2257321c621b258729bc3139149245b4c"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3c1ce42c334a6f5cccb0277556a053e0"
-
-DEPENDS += "virtual/libx11 xorgproto"
-
-PE = "1"
-
-XORG_PN = "libXfixes"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxfixes_6.0.1.bb b/meta/recipes-graphics/xorg-lib/libxfixes_6.0.1.bb
new file mode 100644
index 0000000000..28df7bb805
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxfixes_6.0.1.bb
@@ -0,0 +1,21 @@
+SUMMARY = "XFixes: X Fixes extension library"
+
+DESCRIPTION = "X applications have often needed to work around various \
+shortcomings in the core X window system. This extension is designed to \
+provide the minimal server-side support necessary to eliminate problems \
+caused by these workarounds."
+
+require xorg-lib-common.inc
+SRC_URI[sha256sum] = "b695f93cd2499421ab02d22744458e650ccc88c1d4c8130d60200213abc02d58"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a5a9755c8921cc7dc08a5cfe4267d0ff"
+
+DEPENDS += "virtual/libx11 xorgproto"
+
+PE = "1"
+
+XORG_PN = "libXfixes"
+XORG_EXT = "tar.xz"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxfont2_2.0.5.bb b/meta/recipes-graphics/xorg-lib/libxfont2_2.0.5.bb
deleted file mode 100644
index 10e44c39ba..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxfont2_2.0.5.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "XFont2: X Font rasterisation library"
-
-DESCRIPTION = "libXfont2 provides various services for X servers, most \
-notably font selection and rasterisation (through external libraries \
-such as freetype)."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT & MIT & BSD-4-Clause & BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a46c8040f2f737bcd0c435feb2ab1c2c"
-
-DEPENDS += "freetype xtrans xorgproto libfontenc zlib"
-
-XORG_PN = "libXfont2"
-
-BBCLASSEXTEND = "native"
-
-SRC_URI[sha256sum] = "aa7c6f211cf7215c0ab4819ed893dc98034363d7b930b844bb43603c2e10b53e"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-
-CVE_PRODUCT = "libxfont libxfont2"
diff --git a/meta/recipes-graphics/xorg-lib/libxfont2_2.0.6.bb b/meta/recipes-graphics/xorg-lib/libxfont2_2.0.6.bb
new file mode 100644
index 0000000000..535e7f629e
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxfont2_2.0.6.bb
@@ -0,0 +1,23 @@
+SUMMARY = "XFont2: X Font rasterisation library"
+
+DESCRIPTION = "libXfont2 provides various services for X servers, most \
+notably font selection and rasterisation (through external libraries \
+such as freetype)."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT & MIT & BSD-4-Clause & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a46c8040f2f737bcd0c435feb2ab1c2c"
+
+DEPENDS += "freetype xtrans xorgproto libfontenc zlib"
+
+XORG_PN = "libXfont2"
+
+BBCLASSEXTEND = "native"
+
+SRC_URI[sha256sum] = "74ca20017eb0fb3f56d8d5e60685f560fc85e5ff3d84c61c4cb891e40c27aef4"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+
+CVE_PRODUCT = "libxfont libxfont2"
diff --git a/meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb b/meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb
index 27e686b087..fa6585d6dc 100644
--- a/meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb
+++ b/meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb
@@ -15,6 +15,7 @@ PROVIDES = "xfont"
PE = "1"
XORG_PN = "libXfont"
+XORG_EXT = "tar.bz2"
BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.3.4.bb b/meta/recipes-graphics/xorg-lib/libxft_2.3.4.bb
deleted file mode 100644
index 12636914dd..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxft_2.3.4.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "XFt: X FreeType libary"
-
-DESCRIPTION = "Xft was designed to provide good support for scalable \
-fonts, and to do so efficiently. Unlike the core fonts system, it \
-supports features such as anti-aliasing and sub-pixel rasterisation. \
-Perhaps more importantly, it gives applications full control over the \
-way glyphs are rendered, making fine typesetting and WYSIWIG display \
-possible. Finally, it allows applications to use fonts that are not \
-installed system-wide for displaying documents with embedded fonts. Xft \
-is not compatible with the core fonts system: usage of Xft requires \
-fairly extensive changes to toolkits (user-interface libraries)."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=90b90b60eb30f65fc9c2673d7cf59e24"
-
-DEPENDS += "virtual/libx11 libxrender freetype fontconfig"
-PROVIDES = "xft"
-
-PE = "1"
-
-SRC_URI[sha256sum] = "57dedaab20914002146bdae0cb0c769ba3f75214c4c91bd2613d6ef79fc9abdd"
-
-XORG_PN = "libXft"
-
-BBCLASSEXTEND = "native nativesdk"
-
-python populate_packages:prepend () {
- if d.getVar('DEBIAN_NAMES'):
- d.setVar('PKG:${PN}', '${MLPREFIX}libxft2')
-}
diff --git a/meta/recipes-graphics/xorg-lib/libxft_2.3.8.bb b/meta/recipes-graphics/xorg-lib/libxft_2.3.8.bb
new file mode 100644
index 0000000000..2699c1dfd7
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxft_2.3.8.bb
@@ -0,0 +1,32 @@
+SUMMARY = "XFt: X FreeType libary"
+
+DESCRIPTION = "Xft was designed to provide good support for scalable \
+fonts, and to do so efficiently. Unlike the core fonts system, it \
+supports features such as anti-aliasing and sub-pixel rasterisation. \
+Perhaps more importantly, it gives applications full control over the \
+way glyphs are rendered, making fine typesetting and WYSIWIG display \
+possible. Finally, it allows applications to use fonts that are not \
+installed system-wide for displaying documents with embedded fonts. Xft \
+is not compatible with the core fonts system: usage of Xft requires \
+fairly extensive changes to toolkits (user-interface libraries)."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=90b90b60eb30f65fc9c2673d7cf59e24"
+
+DEPENDS += "virtual/libx11 libxrender freetype fontconfig"
+PROVIDES = "xft"
+
+PE = "1"
+
+SRC_URI[sha256sum] = "5e8c3c4bc2d4c0a40aef6b4b38ed2fb74301640da29f6528154b5009b1c6dd49"
+
+XORG_PN = "libXft"
+
+BBCLASSEXTEND = "native nativesdk"
+
+python populate_packages:prepend () {
+ if d.getVar('DEBIAN_NAMES'):
+ d.setVar('PKG:${PN}', '${MLPREFIX}libxft2')
+}
diff --git a/meta/recipes-graphics/xorg-lib/libxi_1.8.1.bb b/meta/recipes-graphics/xorg-lib/libxi_1.8.1.bb
new file mode 100644
index 0000000000..00bd46ef13
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxi_1.8.1.bb
@@ -0,0 +1,23 @@
+require xorg-lib-common.inc
+
+SUMMARY = "XI: X Input extension library"
+
+DESCRIPTION = "libxi is an extension to the X11 protocol to support \
+input devices other than the core X keyboard and pointer. It allows \
+client programs to select input from these devices independently from \
+each other and independently from the core devices."
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=17b064789fab936a1c58c4e13d965b0f \
+ file://src/XIGetDevFocus.c;endline=23;md5=cdfb0d435a33ec57ea0d1e8e395b729f"
+
+DEPENDS += "libxext xorgproto libxfixes"
+
+PE = "1"
+
+XORG_PN = "libXi"
+XORG_EXT = "tar.xz"
+
+SRC_URI[sha256sum] = "89bfc0e814f288f784202e6e5f9b362b788ccecdeb078670145eacd8749656a7"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxi_1.8.bb b/meta/recipes-graphics/xorg-lib/libxi_1.8.bb
deleted file mode 100644
index 2aa75148d1..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxi_1.8.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-require xorg-lib-common.inc
-
-SUMMARY = "XI: X Input extension library"
-
-DESCRIPTION = "libxi is an extension to the X11 protocol to support \
-input devices other than the core X keyboard and pointer. It allows \
-client programs to select input from these devices independently from \
-each other and independently from the core devices."
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=17b064789fab936a1c58c4e13d965b0f \
- file://src/XIGetDevFocus.c;endline=23;md5=cdfb0d435a33ec57ea0d1e8e395b729f"
-
-DEPENDS += "libxext xorgproto libxfixes"
-
-PE = "1"
-
-XORG_PN = "libXi"
-
-SRC_URI[sha256sum] = "2ed181446a61c7337576467870bc5336fc9e222a281122d96c4d39a3298bba00"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxinerama_1.1.4.bb b/meta/recipes-graphics/xorg-lib/libxinerama_1.1.4.bb
deleted file mode 100644
index edce35da5d..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxinerama_1.1.4.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-require xorg-lib-common.inc
-
-SUMMARY = "Xinerama: Xinerama extension library"
-
-DESCRIPTION = "Xinerama is a simple library designed to interface the \
-Xinerama Extension for retrieving information about physical output \
-devices which may be combined into a single logical X screen."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6f4f634d1643a2e638bba3fcd19c2536 \
- file://src/Xinerama.c;beginline=2;endline=25;md5=fcef273bfb66339256411dd06ea79c02"
-
-DEPENDS += "libxext xorgproto"
-PROVIDES = "xinerama"
-PE = "1"
-
-XORG_PN = "libXinerama"
-
-SRC_URI[md5sum] = "0d5f826a197dae74da67af4a9ef35885"
-SRC_URI[sha256sum] = "0008dbd7ecf717e1e507eed1856ab0d9cf946d03201b85d5dcf61489bb02d720"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxinerama_1.1.5.bb b/meta/recipes-graphics/xorg-lib/libxinerama_1.1.5.bb
new file mode 100644
index 0000000000..421b64e68d
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxinerama_1.1.5.bb
@@ -0,0 +1,20 @@
+require xorg-lib-common.inc
+
+SUMMARY = "Xinerama: Xinerama extension library"
+
+DESCRIPTION = "Xinerama is a simple library designed to interface the \
+Xinerama Extension for retrieving information about physical output \
+devices which may be combined into a single logical X screen."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6f4f634d1643a2e638bba3fcd19c2536 \
+ file://src/Xinerama.c;beginline=2;endline=25;md5=fcef273bfb66339256411dd06ea79c02"
+
+DEPENDS += "libxext xorgproto"
+PROVIDES = "xinerama"
+PE = "1"
+
+XORG_PN = "libXinerama"
+SRC_URI[sha256sum] = "5094d1f0fcc1828cb1696d0d39d9e866ae32520c54d01f618f1a3c1e30c2085c"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.0.bb b/meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.0.bb
deleted file mode 100644
index 34652e9c4e..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxkbcommon_1.4.0.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Generic XKB keymap library"
-DESCRIPTION = "libxkbcommon is a keymap compiler and support library which \
-processes a reduced subset of keymaps as defined by the XKB specification."
-HOMEPAGE = "http://www.xkbcommon.org"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=e525ed9809e1f8a07cf4bce8b09e8b87"
-LICENSE = "MIT & MIT"
-
-DEPENDS = "flex-native bison-native"
-
-SRC_URI = "http://xkbcommon.org/download/${BPN}-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "106cec5263f9100a7e79b5f7220f889bc78e7d7ffc55d2b6fdb1efefb8024031"
-
-UPSTREAM_CHECK_URI = "http://xkbcommon.org/"
-
-inherit meson pkgconfig
-
-PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'x11 wayland', d)} xkbregistry"
-
-PACKAGECONFIG[docs] = "-Denable-docs=true,-Denable-docs=false,doxygen-native"
-PACKAGECONFIG[wayland] = "-Denable-wayland=true,-Denable-wayland=false,wayland-native wayland wayland-protocols,"
-PACKAGECONFIG[x11] = "-Denable-x11=true,-Denable-x11=false,libxcb xkeyboard-config,"
-PACKAGECONFIG[xkbregistry] = "-Denable-xkbregistry=true,-Denable-xkbregistry=false,libxml2"
-
-PACKAGE_BEFORE_PN += "xkbcli"
-FILES:${PN} = ""
-FILES:xkbcli = "${bindir}/xkbcli ${libexecdir}/xkbcommon/xkbcli-*"
-
-python populate_packages:prepend () {
- # Put the libraries into separate packages to avoid dependency creep
- do_split_packages(d, d.expand('${libdir}'), r'^(lib.*)\.so\.*', '%s', '%s library', extra_depends='', allow_links=True)
-}
-
-# Fix a following runtime error:
-# xkbcommon: ERROR: couldn't find a Compose file for locale "C"
-RDEPENDS:${PN} = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'libx11-locale', 'libx11-compose-data', d)}"
diff --git a/meta/recipes-graphics/xorg-lib/libxkbcommon_1.7.0.bb b/meta/recipes-graphics/xorg-lib/libxkbcommon_1.7.0.bb
new file mode 100644
index 0000000000..40cf616f0b
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxkbcommon_1.7.0.bb
@@ -0,0 +1,40 @@
+SUMMARY = "Generic XKB keymap library"
+DESCRIPTION = "libxkbcommon is a keymap compiler and support library which \
+processes a reduced subset of keymaps as defined by the XKB specification."
+HOMEPAGE = "http://www.xkbcommon.org"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=e525ed9809e1f8a07cf4bce8b09e8b87"
+LICENSE = "MIT & MIT"
+
+DEPENDS = "flex-native bison-native"
+
+SRC_URI = "http://xkbcommon.org/download/${BPN}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "65782f0a10a4b455af9c6baab7040e2f537520caa2ec2092805cdfd36863b247"
+
+UPSTREAM_CHECK_URI = "http://xkbcommon.org/"
+
+inherit meson pkgconfig bash-completion
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'x11 wayland', d)} xkbregistry"
+
+PACKAGECONFIG[docs] = "-Denable-docs=true,-Denable-docs=false,doxygen-native"
+PACKAGECONFIG[wayland] = "-Denable-wayland=true,-Denable-wayland=false,wayland-native wayland wayland-protocols,"
+PACKAGECONFIG[x11] = "-Denable-x11=true,-Denable-x11=false,libxcb xkeyboard-config,"
+PACKAGECONFIG[xkbregistry] = "-Denable-xkbregistry=true,-Denable-xkbregistry=false,libxml2"
+
+PACKAGE_BEFORE_PN += "xkbcli"
+FILES:${PN} = ""
+FILES:xkbcli = "${bindir}/xkbcli ${libexecdir}/xkbcommon/xkbcli-*"
+
+python populate_packages:prepend () {
+ # Put the libraries into separate packages to avoid dependency creep
+ do_split_packages(d, d.expand('${libdir}'), r'^(lib.*)\.so\.*', '%s', '%s library', extra_depends='', allow_links=True)
+}
+
+# Fix a following runtime error:
+# xkbcommon: ERROR: couldn't find a Compose file for locale "C"
+RDEPENDS:${PN} = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'libx11-locale', 'libx11-compose-data', d)}"
+
+BBCLASSEXTEND += "native"
+
+CVE_PRODUCT += "xkbcommon:libxkbcommon"
diff --git a/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.0.bb b/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.0.bb
deleted file mode 100644
index 0fd0e3f47f..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "XKB: X Keyboard File manipulation library"
-
-DESCRIPTION = "libxkbfile provides an interface to read and manipulate \
-description files for XKB, the X11 keyboard configuration extension."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=8be7367f7e5d605a426f76bb37d4d61f"
-
-DEPENDS += "virtual/libx11 xorgproto"
-
-PE = "1"
-
-BBCLASSEXTEND = "native"
-
-SRC_URI[md5sum] = "dd7e1e946def674e78c0efbc5c7d5b3b"
-SRC_URI[sha256sum] = "758dbdaa20add2db4902df0b1b7c936564b7376c02a0acd1f2a331bd334b38c7"
diff --git a/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.3.bb b/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.3.bb
new file mode 100644
index 0000000000..aab6284384
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxkbfile_1.1.3.bb
@@ -0,0 +1,16 @@
+SUMMARY = "XKB: X Keyboard File manipulation library"
+
+DESCRIPTION = "libxkbfile provides an interface to read and manipulate \
+description files for XKB, the X11 keyboard configuration extension."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=8be7367f7e5d605a426f76bb37d4d61f"
+
+DEPENDS += "virtual/libx11 xorgproto"
+
+PE = "1"
+
+BBCLASSEXTEND = "native"
+SRC_URI[sha256sum] = "a9b63eea997abb9ee6a8b4fbb515831c841f471af845a09de443b28003874bec"
diff --git a/meta/recipes-graphics/xorg-lib/libxmu_1.1.3.bb b/meta/recipes-graphics/xorg-lib/libxmu_1.1.3.bb
deleted file mode 100644
index eab3f07512..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxmu_1.1.3.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "Xmu and Xmuu: X Miscellaneous Utility libraries"
-
-DESCRIPTION = "The Xmu Library is a collection of miscellaneous (some \
-might say random) utility functions that have been useful in building \
-various applications and widgets. This library is required by the Athena \
-Widgets. A subset of the functions that do not rely on the Athena \
-Widgets (libXaw) or X Toolkit Instrinsics (libXt) are provided in a \
-second library, libXmuu."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=def3d8e4e9c42004f1941fa22f01dc18"
-
-DEPENDS += "libxt libxext"
-PROVIDES = "xmu"
-
-PE = "1"
-
-XORG_PN = "libXmu"
-
-LEAD_SONAME = "libXmu"
-
-PACKAGES =+ "libxmuu"
-
-FILES:libxmuu = "${libdir}/libXmuu.so.*"
-
-BBCLASSEXTEND = "native"
-
-SRC_URI[md5sum] = "ac774cff8b493f566088a255dbf91201"
-SRC_URI[sha256sum] = "9c343225e7c3dc0904f2122b562278da5fed639b1b5e880d25111561bac5b731"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
diff --git a/meta/recipes-graphics/xorg-lib/libxmu_1.2.0.bb b/meta/recipes-graphics/xorg-lib/libxmu_1.2.0.bb
new file mode 100644
index 0000000000..d97bc7a3dd
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxmu_1.2.0.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Xmu and Xmuu: X Miscellaneous Utility libraries"
+
+DESCRIPTION = "The Xmu Library is a collection of miscellaneous (some \
+might say random) utility functions that have been useful in building \
+various applications and widgets. This library is required by the Athena \
+Widgets. A subset of the functions that do not rely on the Athena \
+Widgets (libXaw) or X Toolkit Instrinsics (libXt) are provided in a \
+second library, libXmuu."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=e79ad4fcc53b9bfe0fc38507a56446b9"
+
+DEPENDS += "libxt libxext"
+PROVIDES = "xmu"
+
+PE = "1"
+
+XORG_PN = "libXmu"
+
+LEAD_SONAME = "libXmu"
+
+PACKAGES =+ "libxmuu"
+
+FILES:libxmuu = "${libdir}/libXmuu.so.*"
+
+BBCLASSEXTEND = "native"
+
+SRC_URI[sha256sum] = "072026fe305889538e5b0c5f9cbcd623d2c27d2b85dcd37ca369ab21590b6963"
diff --git a/meta/recipes-graphics/xorg-lib/libxpm_3.5.13.bb b/meta/recipes-graphics/xorg-lib/libxpm_3.5.13.bb
deleted file mode 100644
index 4f0a5d7ba0..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxpm_3.5.13.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-require xorg-lib-common.inc
-
-# libxpm requires xgettext to build
-inherit gettext
-
-SUMMARY = "Xpm: X Pixmap extension library"
-
-DESCRIPTION = "libXpm provides support and common operation for the XPM \
-pixmap format, which is commonly used in legacy X applications. XPM is \
-an extension of the monochrome XBM bitmap specificied in the X \
-protocol."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=51f4270b012ecd4ab1a164f5f4ed6cf7"
-DEPENDS += "libxext libsm libxt gettext-native"
-PE = "1"
-
-XORG_PN = "libXpm"
-
-PACKAGES =+ "sxpm cxpm"
-FILES:cxpm = "${bindir}/cxpm"
-FILES:sxpm = "${bindir}/sxpm"
-
-SRC_URI[md5sum] = "6f0ecf8d103d528cfc803aa475137afa"
-SRC_URI[sha256sum] = "9cd1da57588b6cb71450eff2273ef6b657537a9ac4d02d0014228845b935ac25"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libxpm_3.5.17.bb b/meta/recipes-graphics/xorg-lib/libxpm_3.5.17.bb
new file mode 100644
index 0000000000..8e15ecc0d4
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxpm_3.5.17.bb
@@ -0,0 +1,27 @@
+require xorg-lib-common.inc
+
+# libxpm requires xgettext to build
+inherit gettext
+
+SUMMARY = "Xpm: X Pixmap extension library"
+
+DESCRIPTION = "libXpm provides support and common operation for the XPM \
+pixmap format, which is commonly used in legacy X applications. XPM is \
+an extension of the monochrome XBM bitmap specificied in the X \
+protocol."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=903942ebc9d807dfb68540f40bae5aff"
+DEPENDS += "libxext libsm libxt gettext-native"
+PE = "1"
+
+XORG_PN = "libXpm"
+EXTRA_OECONF += "--disable-open-zfile"
+
+PACKAGES =+ "sxpm cxpm"
+FILES:cxpm = "${bindir}/cxpm"
+FILES:sxpm = "${bindir}/sxpm"
+
+SRC_URI[sha256sum] = "64b31f81019e7d388c822b0b28af8d51c4622b83f1f0cb6fa3fc95e271226e43"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libxrandr_1.5.2.bb b/meta/recipes-graphics/xorg-lib/libxrandr_1.5.2.bb
deleted file mode 100644
index 482017d2ae..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxrandr_1.5.2.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "XRandR: X Resize, Rotate and Reflect extension library"
-
-DESCRIPTION = "The X Resize, Rotate and Reflect Extension, called RandR \
-for short, brings the ability to resize, rotate and reflect the root \
-window of a screen. It is based on the X Resize and Rotate Extension as \
-specified in the Proceedings of the 2001 Usenix Technical Conference \
-[RANDR]."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c9d1a2118a6cd5727521db8e7a2fee69"
-
-DEPENDS += "virtual/libx11 xorgproto libxrender libxext"
-
-PE = "1"
-
-XORG_PN = "libXrandr"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "18f3b20d522f45e4dadd34afb5bea048"
-SRC_URI[sha256sum] = "8aea0ebe403d62330bb741ed595b53741acf45033d3bda1792f1d4cc3daee023"
diff --git a/meta/recipes-graphics/xorg-lib/libxrandr_1.5.4.bb b/meta/recipes-graphics/xorg-lib/libxrandr_1.5.4.bb
new file mode 100644
index 0000000000..3e2825b916
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxrandr_1.5.4.bb
@@ -0,0 +1,22 @@
+SUMMARY = "XRandR: X Resize, Rotate and Reflect extension library"
+
+DESCRIPTION = "The X Resize, Rotate and Reflect Extension, called RandR \
+for short, brings the ability to resize, rotate and reflect the root \
+window of a screen. It is based on the X Resize and Rotate Extension as \
+specified in the Proceedings of the 2001 Usenix Technical Conference \
+[RANDR]."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c9d1a2118a6cd5727521db8e7a2fee69"
+
+DEPENDS += "virtual/libx11 xorgproto libxrender libxext"
+
+PE = "1"
+
+XORG_PN = "libXrandr"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "1ad5b065375f4a85915aa60611cc6407c060492a214d7f9daf214be752c3b4d3"
diff --git a/meta/recipes-graphics/xorg-lib/libxrender_0.9.10.bb b/meta/recipes-graphics/xorg-lib/libxrender_0.9.10.bb
deleted file mode 100644
index 35763a5a26..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxrender_0.9.10.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "XRender: X Rendering Extension library"
-
-DESCRIPTION = "The X Rendering Extension (Render) introduces digital \
-image composition as the foundation of a new rendering model within the \
-X Window System. Rendering geometric figures is accomplished by \
-client-side tessellation into either triangles or trapezoids. Text is \
-drawn by loading glyphs into the server and rendering sets of them."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d8bc71986d3b9b3639f6dfd6fac8f196"
-
-DEPENDS += "virtual/libx11 xorgproto"
-
-PE = "1"
-
-XORG_PN = "libXrender"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "802179a76bded0b658f4e9ec5e1830a4"
-SRC_URI[sha256sum] = "c06d5979f86e64cabbde57c223938db0b939dff49fdb5a793a1d3d0396650949"
-
diff --git a/meta/recipes-graphics/xorg-lib/libxrender_0.9.11.bb b/meta/recipes-graphics/xorg-lib/libxrender_0.9.11.bb
new file mode 100644
index 0000000000..e138764dff
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxrender_0.9.11.bb
@@ -0,0 +1,22 @@
+SUMMARY = "XRender: X Rendering Extension library"
+
+DESCRIPTION = "The X Rendering Extension (Render) introduces digital \
+image composition as the foundation of a new rendering model within the \
+X Window System. Rendering geometric figures is accomplished by \
+client-side tessellation into either triangles or trapezoids. Text is \
+drawn by loading glyphs into the server and rendering sets of them."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d8bc71986d3b9b3639f6dfd6fac8f196"
+
+DEPENDS += "virtual/libx11 xorgproto"
+
+PE = "1"
+
+XORG_PN = "libXrender"
+
+BBCLASSEXTEND = "native nativesdk"
+SRC_URI[sha256sum] = "bc53759a3a83d1ff702fb59641b3d2f7c56e05051fa0cfa93501166fa782dc24"
+
diff --git a/meta/recipes-graphics/xorg-lib/libxres_1.2.1.bb b/meta/recipes-graphics/xorg-lib/libxres_1.2.1.bb
deleted file mode 100644
index 29a799a328..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxres_1.2.1.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "XRes: X Resource extension library"
-
-DESCRIPTION = "libXRes provides an X Window System client interface to \
-the Resource extension to the X protocol. The Resource extension allows \
-for X clients to see and monitor the X resource usage of various clients \
-(pixmaps, et al)."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=8c89441a8df261bdc56587465e13c7fa"
-
-DEPENDS += "libxext xorgproto"
-
-PE = "1"
-
-XORG_PN = "libXres"
-
-SRC_URI[sha256sum] = "b6e6fb1ebb61610e56017edd928fb89a5f53b3f4f990078309877468663b2b11"
diff --git a/meta/recipes-graphics/xorg-lib/libxres_1.2.2.bb b/meta/recipes-graphics/xorg-lib/libxres_1.2.2.bb
new file mode 100644
index 0000000000..72b28d9107
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxres_1.2.2.bb
@@ -0,0 +1,19 @@
+SUMMARY = "XRes: X Resource extension library"
+
+DESCRIPTION = "libXRes provides an X Window System client interface to \
+the Resource extension to the X protocol. The Resource extension allows \
+for X clients to see and monitor the X resource usage of various clients \
+(pixmaps, et al)."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=8c89441a8df261bdc56587465e13c7fa"
+
+DEPENDS += "libxext xorgproto"
+
+PE = "1"
+
+XORG_PN = "libXres"
+
+SRC_URI[sha256sum] = "9a7446f3484b9b7538ac5ee30d2c1ce9e5b7fbbaf1440e02f6cca186a1fa745f"
diff --git a/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.3.bb b/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.3.bb
deleted file mode 100644
index b52fd00f1a..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.3.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-require xorg-lib-common.inc
-
-SUMMARY = "Xscrnsaver: X Screen Saver extension library"
-
-DESCRIPTION = "The X Window System provides support for changing the \
-image on a display screen after a user-settable period of inactivity to \
-avoid burning the cathode ray tube phosphors. However, no interfaces are \
-provided for the user to control the image that is drawn. This extension \
-allows an external \"screen saver\" client to detect when the alternate \
-image is to be displayed and to provide the graphics."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=21fd154ee757813632ada871a34113fb"
-
-DEPENDS += "libxext xorgproto"
-PROVIDES = "libxss"
-RREPLACES:${PN} = "libxss"
-PE = "1"
-
-XORG_PN = "libXScrnSaver"
-
-SRC_URI[md5sum] = "eeea9d5af3e6c143d0ea1721d27a5e49"
-SRC_URI[sha256sum] = "f917075a1b7b5a38d67a8b0238eaab14acd2557679835b154cf2bca576e89bf8"
diff --git a/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.4.bb b/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.4.bb
new file mode 100644
index 0000000000..1b407f52f0
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxscrnsaver_1.2.4.bb
@@ -0,0 +1,22 @@
+require xorg-lib-common.inc
+
+SUMMARY = "Xscrnsaver: X Screen Saver extension library"
+
+DESCRIPTION = "The X Window System provides support for changing the \
+image on a display screen after a user-settable period of inactivity to \
+avoid burning the cathode ray tube phosphors. However, no interfaces are \
+provided for the user to control the image that is drawn. This extension \
+allows an external \"screen saver\" client to detect when the alternate \
+image is to be displayed and to provide the graphics."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=21fd154ee757813632ada871a34113fb"
+
+DEPENDS += "libxext xorgproto"
+PROVIDES = "libxss"
+RREPLACES:${PN} = "libxss"
+PE = "1"
+
+XORG_PN = "libXScrnSaver"
+
+SRC_URI[sha256sum] = "75cd2859f38e207a090cac980d76bc71e9da99d48d09703584e00585abc920fe"
diff --git a/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.2.bb b/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.2.bb
new file mode 100644
index 0000000000..0a7da938e1
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.2.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Shared memory 'SyncFence' synchronization primitive"
+
+DESCRIPTION = "This library offers a CPU-based synchronization primitive compatible \
+with the X SyncFence objects that can be shared between processes \
+using file descriptor passing."
+
+require xorg-lib-common.inc
+
+LICENSE = "HPND"
+LIC_FILES_CHKSUM = "file://COPYING;md5=47e508ca280fde97906eacb77892c3ac"
+
+DEPENDS += "virtual/libx11"
+
+EXTRA_OECONF += "--with-shared-memory-dir=/dev/shm"
+
+SRC_URI += "file://0001-xshmfence_futex.h-Define-SYS_futex-if-it-does-not-ex.patch"
+
+SRC_URI[sha256sum] = "870df257bc40b126d91b5a8f1da6ca8a524555268c50b59c0acd1a27f361606f"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.bb b/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.bb
deleted file mode 100644
index b0b6cb6b56..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxshmfence_1.3.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-SUMMARY = "Shared memory 'SyncFence' synchronization primitive"
-
-DESCRIPTION = "This library offers a CPU-based synchronization primitive compatible \
-with the X SyncFence objects that can be shared between processes \
-using file descriptor passing."
-
-require xorg-lib-common.inc
-
-LICENSE = "HPND"
-LIC_FILES_CHKSUM = "file://COPYING;md5=47e508ca280fde97906eacb77892c3ac"
-
-DEPENDS += "virtual/libx11"
-
-EXTRA_OECONF += "--with-shared-memory-dir=/dev/shm"
-
-SRC_URI += "file://0001-xshmfence_futex.h-Define-SYS_futex-if-it-does-not-ex.patch"
-
-SRC_URI[md5sum] = "42dda8016943dc12aff2c03a036e0937"
-SRC_URI[sha256sum] = "b884300d26a14961a076fbebc762a39831cb75f92bed5ccf9836345b459220c7"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxt_1.2.1.bb b/meta/recipes-graphics/xorg-lib/libxt_1.2.1.bb
deleted file mode 100644
index eef561dbba..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxt_1.2.1.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "Xt: X Toolkit Intrinsics library"
-
-DESCRIPTION = "The Intrinsics are a programming library tailored to the \
-special requirements of user interface construction within a network \
-window system, specifically the X Window System. The Intrinsics and a \
-widget set make up an X Toolkit. The Intrinsics provide the base \
-mechanism necessary to build a wide variety of interoperating widget \
-sets and application environments. The Intrinsics are a layer on top of \
-Xlib, the C Library X Interface. They extend the fundamental \
-abstractions provided by the X Window System while still remaining \
-independent of any particular user interface policy or style."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=73d55cea4d27ca1a09a5d23378b3ecf8"
-
-DEPENDS += "util-linux libxcb libsm virtual/libx11 xorgproto libxdmcp"
-PROVIDES = "xt"
-
-PE = "1"
-
-XORG_PN = "libXt"
-
-SRC_URI += "file://libxt_fix_for_x32.patch"
-
-SRC_URI[sha256sum] = "679cc08f1646dbd27f5e48ffe8dd49406102937109130caab02ca32c083a3d60"
-
-BBCLASSEXTEND = "native nativesdk"
-
-EXTRA_OECONF += "--disable-xkb"
diff --git a/meta/recipes-graphics/xorg-lib/libxt_1.3.0.bb b/meta/recipes-graphics/xorg-lib/libxt_1.3.0.bb
new file mode 100644
index 0000000000..1805dae772
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxt_1.3.0.bb
@@ -0,0 +1,32 @@
+SUMMARY = "Xt: X Toolkit Intrinsics library"
+
+DESCRIPTION = "The Intrinsics are a programming library tailored to the \
+special requirements of user interface construction within a network \
+window system, specifically the X Window System. The Intrinsics and a \
+widget set make up an X Toolkit. The Intrinsics provide the base \
+mechanism necessary to build a wide variety of interoperating widget \
+sets and application environments. The Intrinsics are a layer on top of \
+Xlib, the C Library X Interface. They extend the fundamental \
+abstractions provided by the X Window System while still remaining \
+independent of any particular user interface policy or style."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d6e9ca2c4b2276625afe025b0a2a4d8c"
+
+DEPENDS += "util-linux libxcb libsm virtual/libx11 xorgproto libxdmcp"
+PROVIDES = "xt"
+
+PE = "1"
+
+XORG_PN = "libXt"
+XORG_EXT = "tar.xz"
+
+SRC_URI += "file://libxt_fix_for_x32.patch"
+
+SRC_URI[sha256sum] = "52820b3cdb827d08dc90bdfd1b0022a3ad8919b57a39808b12591973b331bf91"
+
+BBCLASSEXTEND = "native nativesdk"
+
+EXTRA_OECONF += "--disable-xkb"
diff --git a/meta/recipes-graphics/xorg-lib/libxtst_1.2.3.bb b/meta/recipes-graphics/xorg-lib/libxtst_1.2.3.bb
deleted file mode 100644
index ebe9be5b9a..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxtst_1.2.3.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-require xorg-lib-common.inc
-
-SUMMARY = "XTest: X Test extension library"
-
-DESCRIPTION = "This extension is a minimal set of client and server \
-extensions required to completely test the X11 server with no user \
-intervention."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=bb4f89972c3869f617f61c1a79ad1952 \
- file://src/XTest.c;beginline=2;endline=32;md5=b1c8c9dff842b4d5b89ca5fa32c40e99"
-
-DEPENDS += "libxext xorgproto libxi"
-PROVIDES = "xtst"
-PE = "1"
-
-XORG_PN = "libXtst"
-
-SRC_URI[md5sum] = "ef8c2c1d16a00bd95b9fdcef63b8a2ca"
-SRC_URI[sha256sum] = "4655498a1b8e844e3d6f21f3b2c4e2b571effb5fd83199d428a6ba7ea4bf5204"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxtst_1.2.4.bb b/meta/recipes-graphics/xorg-lib/libxtst_1.2.4.bb
new file mode 100644
index 0000000000..db0621ba2e
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxtst_1.2.4.bb
@@ -0,0 +1,20 @@
+require xorg-lib-common.inc
+
+SUMMARY = "XTest: X Test extension library"
+
+DESCRIPTION = "This extension is a minimal set of client and server \
+extensions required to completely test the X11 server with no user \
+intervention."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=bb4f89972c3869f617f61c1a79ad1952 \
+ file://src/XTest.c;beginline=2;endline=32;md5=b1c8c9dff842b4d5b89ca5fa32c40e99"
+
+DEPENDS += "libxext xorgproto libxi"
+PROVIDES = "xtst"
+PE = "1"
+
+XORG_PN = "libXtst"
+SRC_URI[sha256sum] = "84f5f30b9254b4ffee14b5b0940e2622153b0d3aed8286a3c5b7eeb340ca33c8"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb b/meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb
deleted file mode 100644
index 3c8bf08153..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "Xv: X Video extension library"
-
-DESCRIPTION = "libXv provides an X Window System client interface to the \
-X Video extension to the X protocol. The X Video extension allows for \
-accelerated drawing of videos. Hardware adaptors are exposed to \
-clients, which may draw in a number of colourspaces, including YUV."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=827da9afab1f727f2a66574629e0f39c"
-
-DEPENDS += "libxext xorgproto"
-
-XORG_PN = "libXv"
-
-SRC_URI[md5sum] = "210b6ef30dda2256d54763136faa37b9"
-SRC_URI[sha256sum] = "d26c13eac99ac4504c532e8e76a1c8e4bd526471eb8a0a4ff2a88db60cb0b088"
-
diff --git a/meta/recipes-graphics/xorg-lib/libxv_1.0.12.bb b/meta/recipes-graphics/xorg-lib/libxv_1.0.12.bb
new file mode 100644
index 0000000000..7f9c5e5cbc
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxv_1.0.12.bb
@@ -0,0 +1,18 @@
+SUMMARY = "Xv: X Video extension library"
+
+DESCRIPTION = "libXv provides an X Window System client interface to the \
+X Video extension to the X protocol. The X Video extension allows for \
+accelerated drawing of videos. Hardware adaptors are exposed to \
+clients, which may draw in a number of colourspaces, including YUV."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=827da9afab1f727f2a66574629e0f39c"
+
+DEPENDS += "libxext xorgproto"
+
+XORG_PN = "libXv"
+
+SRC_URI[sha256sum] = "aaf7fa09f689f7a2000fe493c0d64d1487a1210db154053e9e2336b860c63848"
+
diff --git a/meta/recipes-graphics/xorg-lib/libxvmc_1.0.13.bb b/meta/recipes-graphics/xorg-lib/libxvmc_1.0.13.bb
deleted file mode 100644
index d6d009831a..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxvmc_1.0.13.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "XvMC: X Video Motion Compensation extension library"
-
-DESCRIPTION = "XvMC extends the X Video extension (Xv) and enables \
-hardware rendered motion compensation support."
-
-require xorg-lib-common.inc
-SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.xz"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=0a207f08d4961489c55046c9a5e500da \
- file://wrapper/XvMCWrapper.c;endline=26;md5=5151daa8172a3f1bb0cb0e0ff157d9de"
-
-DEPENDS += "libxext libxv xorgproto"
-
-PE = "1"
-
-XORG_PN = "libXvMC"
-
-SRC_URI[sha256sum] = "0a9ebe6dea7888a747e5aca1b891d53cd7d3a5f141a9645f77d9b6a12cee657c"
diff --git a/meta/recipes-graphics/xorg-lib/libxvmc_1.0.14.bb b/meta/recipes-graphics/xorg-lib/libxvmc_1.0.14.bb
new file mode 100644
index 0000000000..af18afc62d
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxvmc_1.0.14.bb
@@ -0,0 +1,19 @@
+SUMMARY = "XvMC: X Video Motion Compensation extension library"
+
+DESCRIPTION = "XvMC extends the X Video extension (Xv) and enables \
+hardware rendered motion compensation support."
+
+require xorg-lib-common.inc
+SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.xz"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=0a207f08d4961489c55046c9a5e500da \
+ file://wrapper/XvMCWrapper.c;endline=26;md5=5151daa8172a3f1bb0cb0e0ff157d9de"
+
+DEPENDS += "libxext libxv xorgproto"
+
+PE = "1"
+
+XORG_PN = "libXvMC"
+
+SRC_URI[sha256sum] = "e4be9eb6b6bafdbbf81f47f7163047215376e45e2dc786d0ea6181c930725ed9"
diff --git a/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.4.bb b/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.4.bb
deleted file mode 100644
index 1612a50f19..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.4.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "XFree86-VM: XFree86 video mode extension library"
-
-DESCRIPTION = "libXxf86vm provides an interface to the \
-XFree86-VidModeExtension extension, which allows client applications to \
-get and set video mode timings in extensive detail. It is used by the \
-xvidtune program in particular."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fa0b9c462d8f2f13eba26492d42ea63d"
-
-DEPENDS += "libxext xorgproto"
-
-PE = "1"
-
-XORG_PN = "libXxf86vm"
-
-SRC_URI[md5sum] = "298b8fff82df17304dfdb5fe4066fe3a"
-SRC_URI[sha256sum] = "afee27f93c5f31c0ad582852c0fb36d50e4de7cd585fcf655e278a633d85cd57"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.5.bb b/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.5.bb
new file mode 100644
index 0000000000..16fd29ebcd
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxxf86vm_1.1.5.bb
@@ -0,0 +1,20 @@
+SUMMARY = "XFree86-VM: XFree86 video mode extension library"
+
+DESCRIPTION = "libXxf86vm provides an interface to the \
+XFree86-VidModeExtension extension, which allows client applications to \
+get and set video mode timings in extensive detail. It is used by the \
+xvidtune program in particular."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fa0b9c462d8f2f13eba26492d42ea63d"
+
+DEPENDS += "libxext xorgproto"
+
+PE = "1"
+
+XORG_PN = "libXxf86vm"
+SRC_URI[sha256sum] = "247fef48b3e0e7e67129e41f1e789e8d006ba47dba1c0cdce684b9b703f888e7"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/pixman_0.40.0.bb b/meta/recipes-graphics/xorg-lib/pixman_0.40.0.bb
deleted file mode 100644
index ccfe277746..0000000000
--- a/meta/recipes-graphics/xorg-lib/pixman_0.40.0.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-SUMMARY = "Pixman: Pixel Manipulation library"
-DESCRIPTION = "Pixman provides a library for manipulating pixel regions \
--- a set of Y-X banded rectangles, image compositing using the \
-Porter/Duff model and implicit mask generation for geometric primitives \
-including trapezoids, triangles, and rectangles."
-HOMEPAGE = "http://www.pixman.org"
-SECTION = "x11/libs"
-DEPENDS = "zlib"
-
-SRC_URI = "https://www.cairographics.org/releases/${BP}.tar.gz \
- file://0001-ARM-qemu-related-workarounds-in-cpu-features-detecti.patch \
- "
-SRC_URI[md5sum] = "73858c0862dd9896fb5f62ae267084a4"
-SRC_URI[sha256sum] = "6d200dec3740d9ec4ec8d1180e25779c00bc749f94278c8b9021f5534db223fc"
-
-# see http://cairographics.org/releases/ - only even minor versions are stable
-UPSTREAM_CHECK_REGEX = "pixman-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)"
-
-PE = "1"
-
-LICENSE = "MIT & MIT & PD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=14096c769ae0cbb5fcb94ec468be11b3 \
- file://pixman/pixman-matrix.c;endline=21;md5=4a018dff3e4e25302724c88ff95c2456 \
- file://pixman/pixman-arm-neon-asm.h;endline=24;md5=9a9cc1e51abbf1da58f4d9528ec9d49b \
- "
-
-inherit meson pkgconfig
-
-# These are for the tests and demos, which we don't install
-EXTRA_OEMESON = "-Dgtk=disabled -Dlibpng=disabled"
-# ld: pixman/libpixman-mmx.a(pixman-mmx.c.o):
-# linking mips:loongson_2f module with previous mips:isa64 modules
-EXTRA_OEMESON += "-Dloongson-mmi=disabled"
-# disable iwmmxt due to compile fails on most arm platforms.
-EXTRA_OEMESON += "-Diwmmxt=disabled"
-
-EXTRA_OEMESON:append:class-target:powerpc = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
-EXTRA_OEMESON:append:class-target:powerpc64 = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
-EXTRA_OEMESON:append:class-target:powerpc64le = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-lib/pixman_0.42.2.bb b/meta/recipes-graphics/xorg-lib/pixman_0.42.2.bb
new file mode 100644
index 0000000000..23ae0cbb27
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/pixman_0.42.2.bb
@@ -0,0 +1,47 @@
+SUMMARY = "Pixman: Pixel Manipulation library"
+DESCRIPTION = "Pixman provides a library for manipulating pixel regions \
+-- a set of Y-X banded rectangles, image compositing using the \
+Porter/Duff model and implicit mask generation for geometric primitives \
+including trapezoids, triangles, and rectangles."
+HOMEPAGE = "http://www.pixman.org"
+SECTION = "x11/libs"
+DEPENDS = "zlib"
+
+SRC_URI = "https://www.cairographics.org/releases/${BP}.tar.gz \
+ file://0001-ARM-qemu-related-workarounds-in-cpu-features-detecti.patch \
+ "
+SRC_URI[sha256sum] = "ea1480efada2fd948bc75366f7c349e1c96d3297d09a3fe62626e38e234a625e"
+
+# see http://cairographics.org/releases/ - only even minor versions are stable
+UPSTREAM_CHECK_REGEX = "pixman-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)"
+
+PE = "1"
+
+LICENSE = "MIT & PD"
+LIC_FILES_CHKSUM = "file://COPYING;md5=14096c769ae0cbb5fcb94ec468be11b3 \
+ file://pixman/pixman-matrix.c;endline=21;md5=4a018dff3e4e25302724c88ff95c2456 \
+ file://pixman/pixman-arm-neon-asm.h;endline=24;md5=9a9cc1e51abbf1da58f4d9528ec9d49b \
+ "
+
+inherit meson pkgconfig
+
+# These are for the tests and demos, which we don't install
+EXTRA_OEMESON = "-Dgtk=disabled -Dlibpng=disabled"
+# ld: pixman/libpixman-mmx.a(pixman-mmx.c.o):
+# linking mips:loongson_2f module with previous mips:isa64 modules
+EXTRA_OEMESON += "-Dloongson-mmi=disabled"
+# disable iwmmxt due to compile fails on most arm platforms.
+EXTRA_OEMESON += "-Diwmmxt=disabled"
+
+EXTRA_OEMESON:append:class-target:powerpc = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
+EXTRA_OEMESON:append:class-target:powerpc64 = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
+EXTRA_OEMESON:append:class-target:powerpc64le = " ${@bb.utils.contains("TUNE_FEATURES", "altivec", "-Dvmx=enabled", "-Dvmx=disabled", d)}"
+
+EXTRA_OEMESON:append:armv7a = "${@bb.utils.contains("TUNE_FEATURES","neon",""," -Dneon=disabled",d)}"
+EXTRA_OEMESON:append:armv7ve = "${@bb.utils.contains("TUNE_FEATURES","neon",""," -Dneon=disabled",d)}"
+
+EXTRA_OEMESON:append:class-native = " -Dopenmp=disabled"
+
+BBCLASSEXTEND = "native nativesdk"
+
+CVE_STATUS[CVE-2023-37769] = "not-applicable-config: stress-test is an uninstalled test"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-cursor_0.1.4.bb b/meta/recipes-graphics/xorg-lib/xcb-util-cursor_0.1.4.bb
new file mode 100644
index 0000000000..047697845c
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util-cursor_0.1.4.bb
@@ -0,0 +1,10 @@
+require recipes-graphics/xorg-lib/xcb-util.inc
+
+SUMMARY = "XCB port of libXcursor"
+
+DEPENDS += "xcb-util xcb-util-renderutil xcb-util-image"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=ce469b61c70ff8d7cce0547476891974"
+
+SRC_URI[sha256sum] = "28dcfe90bcab7b3561abe0dd58eb6832aa9cc77cfe42fcdfa4ebe20d605231fb"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.0.bb b/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.0.bb
deleted file mode 100644
index c6bb5a61cd..0000000000
--- a/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.0.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require xcb-util.inc
-
-DEPENDS += "xcb-util"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://image/xcb_image.c;endline=24;md5=eafdf965cfb89955fdedf75054223fb4 \
- file://image/xcb_image.h;beginline=4;endline=27;md5=48cd25ae55e7de525fe1e1a3a7672e1c"
-
-SRC_URI += "file://clang.patch \
-"
-
-SRC_URI[md5sum] = "08fe8ffecc8d4e37c0ade7906b3f4c87"
-SRC_URI[sha256sum] = "2db96a37d78831d643538dd1b595d7d712e04bdccf8896a5e18ce0f398ea2ffc"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.1.bb b/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.1.bb
new file mode 100644
index 0000000000..018ac913bf
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util-image_0.4.1.bb
@@ -0,0 +1,12 @@
+require xcb-util.inc
+
+DEPENDS += "xcb-util"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://image/xcb_image.c;endline=24;md5=eafdf965cfb89955fdedf75054223fb4 \
+ file://image/xcb_image.h;beginline=4;endline=27;md5=48cd25ae55e7de525fe1e1a3a7672e1c"
+
+SRC_URI += "file://clang.patch \
+"
+
+SRC_URI[sha256sum] = "ccad8ee5dadb1271fd4727ad14d9bd77a64e505608766c4e98267d9aede40d3d"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.0.bb b/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.0.bb
deleted file mode 100644
index bca64ed61f..0000000000
--- a/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.0.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-require xcb-util.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://keysyms/keysyms.c;endline=30;md5=2f8de023ed823bb92f0b47900574ea9e \
- "
-SRC_URI[md5sum] = "1022293083eec9e62d5659261c29e367"
-SRC_URI[sha256sum] = "0ef8490ff1dede52b7de533158547f8b454b241aa3e4dcca369507f66f216dd9"
-
-
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.1.bb b/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.1.bb
new file mode 100644
index 0000000000..f1a4c3e500
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util-keysyms_0.4.1.bb
@@ -0,0 +1,8 @@
+require xcb-util.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://keysyms/keysyms.c;endline=30;md5=2f8de023ed823bb92f0b47900574ea9e \
+ "
+SRC_URI[sha256sum] = "7c260a5294412aed429df1da2f8afd3bd07b7cba3fec772fba15a613a6d5c638"
+
+
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.10.bb b/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.10.bb
new file mode 100644
index 0000000000..6590fa3301
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.10.bb
@@ -0,0 +1,9 @@
+require xcb-util.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://renderutil/glyph.c;endline=24;md5=c517c483b8d726234ec94f9169236661 \
+ file://renderutil/util.c;endline=20;md5=6e0bfc44fb13298c0f4694eb70dc80d4 \
+ file://renderutil/xcb_renderutil.h;endline=24;md5=d0ddab3052dd4949c93cfcb0891c96df \
+ "
+
+SRC_URI[sha256sum] = "3e15d4f0e22d8ddbfbb9f5d77db43eacd7a304029bf25a6166cc63caa96d04ba"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.9.bb b/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.9.bb
deleted file mode 100644
index 25c22e5fa7..0000000000
--- a/meta/recipes-graphics/xorg-lib/xcb-util-renderutil_0.3.9.bb
+++ /dev/null
@@ -1,10 +0,0 @@
-require xcb-util.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://renderutil/glyph.c;endline=24;md5=c517c483b8d726234ec94f9169236661 \
- file://renderutil/util.c;endline=20;md5=6e0bfc44fb13298c0f4694eb70dc80d4 \
- file://renderutil/xcb_renderutil.h;endline=24;md5=d0ddab3052dd4949c93cfcb0891c96df \
- "
-
-SRC_URI[md5sum] = "468b119c94da910e1291f3ffab91019a"
-SRC_URI[sha256sum] = "c6e97e48fb1286d6394dddb1c1732f00227c70bd1bedb7d1acabefdd340bea5b"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.1.bb b/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.1.bb
deleted file mode 100644
index 39f1383299..0000000000
--- a/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.1.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require xcb-util.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://ewmh/ewmh.c.m4;endline=27;md5=63fd80552d7dc886f4eb267610d5bb9d \
- file://ewmh/xcb_ewmh.h.m4;beginline=4;endline=30;md5=73b589f90ef90b4a43998955daad5cd8 \
- file://icccm/icccm.c;endline=28;md5=e2b3240a0c197e8977adde2a5cf18d50 \
- file://icccm/xcb_icccm.h;beginline=4;endline=31;md5=393772b7b07b9868d479d538238c1d8a \
- "
-
-SRC_URI[md5sum] = "87b19a1cd7bfcb65a24e36c300e03129"
-SRC_URI[sha256sum] = "28bf8179640eaa89276d2b0f1ce4285103d136be6c98262b6151aaee1d3c2a3f"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.2.bb b/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.2.bb
new file mode 100644
index 0000000000..65d340f88c
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util-wm_0.4.2.bb
@@ -0,0 +1,10 @@
+require xcb-util.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://ewmh/ewmh.c.m4;endline=27;md5=63fd80552d7dc886f4eb267610d5bb9d \
+ file://ewmh/xcb_ewmh.h.m4;beginline=4;endline=30;md5=73b589f90ef90b4a43998955daad5cd8 \
+ file://icccm/icccm.c;endline=28;md5=e2b3240a0c197e8977adde2a5cf18d50 \
+ file://icccm/xcb_icccm.h;beginline=4;endline=31;md5=393772b7b07b9868d479d538238c1d8a \
+ "
+
+SRC_URI[sha256sum] = "62c34e21d06264687faea7edbf63632c9f04d55e72114aa4a57bb95e4f888a0b"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util.inc b/meta/recipes-graphics/xorg-lib/xcb-util.inc
index 0e5ab70b2d..9898a6a13e 100644
--- a/meta/recipes-graphics/xorg-lib/xcb-util.inc
+++ b/meta/recipes-graphics/xorg-lib/xcb-util.inc
@@ -16,7 +16,7 @@ SECTION = "x11/libs"
DEPENDS = "libxcb"
DEPENDS += "gperf-native"
-SRC_URI = "http://xcb.freedesktop.org/dist/${BPN}-${PV}.tar.bz2"
+SRC_URI = "http://xcb.freedesktop.org/dist/${BPN}-${PV}.tar.xz"
inherit autotools pkgconfig features_check
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util_0.4.0.bb b/meta/recipes-graphics/xorg-lib/xcb-util_0.4.0.bb
deleted file mode 100644
index 206f07a65c..0000000000
--- a/meta/recipes-graphics/xorg-lib/xcb-util_0.4.0.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require xcb-util.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://src/xcb_aux.c;endline=30;md5=ae305b9c2a38f9ba27060191046a6460 \
- file://src/xcb_event.h;endline=27;md5=627be355aee59e1b8ade80d5bd90fad9"
-
-SRC_URI[md5sum] = "2e97feed81919465a04ccc71e4073313"
-SRC_URI[sha256sum] = "46e49469cb3b594af1d33176cd7565def2be3fa8be4371d62271fabb5eae50e9"
diff --git a/meta/recipes-graphics/xorg-lib/xcb-util_0.4.1.bb b/meta/recipes-graphics/xorg-lib/xcb-util_0.4.1.bb
new file mode 100644
index 0000000000..0da004f907
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xcb-util_0.4.1.bb
@@ -0,0 +1,9 @@
+require xcb-util.inc
+
+SRC_URI = "http://xcb.freedesktop.org/dist/${BPN}-${PV}.tar.xz"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://src/xcb_aux.c;endline=30;md5=ae305b9c2a38f9ba27060191046a6460 \
+ file://src/xcb_event.h;endline=27;md5=627be355aee59e1b8ade80d5bd90fad9"
+
+SRC_URI[sha256sum] = "5abe3bbbd8e54f0fa3ec945291b7e8fa8cfd3cccc43718f8758430f94126e512"
diff --git a/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.35.1.bb b/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.35.1.bb
deleted file mode 100644
index 5215131e3e..0000000000
--- a/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.35.1.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Keyboard configuration database for X Window"
-
-DESCRIPTION = "The non-arch keyboard configuration database for X \
-Window. The goal is to provide the consistent, well-structured, \
-frequently released open source of X keyboard configuration data for X \
-Window System implementations. The project is targeted to XKB-based \
-systems."
-
-HOMEPAGE = "http://freedesktop.org/wiki/Software/XKeyboardConfig"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=xkeyboard-config"
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=0e7f21ca7db975c63467d2e7624a12f9"
-
-SRC_URI = "${XORG_MIRROR}/individual/data/xkeyboard-config/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "18ce50ff0c74ae6093062bce1aeab3d363913ea35162fe271f8a0ce399de85cc"
-
-SECTION = "x11/libs"
-DEPENDS = "util-macros libxslt-native"
-
-EXTRA_OECONF = "--with-xkb-rules-symlink=xorg --disable-runtime-deps"
-
-FILES:${PN} += "${datadir}/X11/xkb"
-
-inherit meson pkgconfig gettext python3native
-
-do_install:append () {
- install -d ${D}${datadir}/X11/xkb/compiled
- cd ${D}${datadir}/X11/xkb/rules && ln -sf base xorg
-}
diff --git a/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.41.bb b/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.41.bb
new file mode 100644
index 0000000000..79ce7dea35
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.41.bb
@@ -0,0 +1,32 @@
+SUMMARY = "Keyboard configuration database for X Window"
+
+DESCRIPTION = "The non-arch keyboard configuration database for X \
+Window. The goal is to provide the consistent, well-structured, \
+frequently released open source of X keyboard configuration data for X \
+Window System implementations. The project is targeted to XKB-based \
+systems."
+
+HOMEPAGE = "http://freedesktop.org/wiki/Software/XKeyboardConfig"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=xkeyboard-config"
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=8fc8ae699974c360e2e2e883a63ce264"
+
+SRC_URI = "${XORG_MIRROR}/individual/data/xkeyboard-config/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "f02cd6b957295e0d50236a3db15825256c92f67ef1f73bf1c77a4b179edf728f"
+
+SECTION = "x11/libs"
+DEPENDS = "util-macros libxslt-native"
+
+EXTRA_OECONF = "--with-xkb-rules-symlink=xorg --disable-runtime-deps"
+
+FILES:${PN} += "${datadir}/X11/xkb"
+
+inherit meson pkgconfig gettext python3native
+
+do_install:append () {
+ install -d ${D}${datadir}/X11/xkb/compiled
+ cd ${D}${datadir}/X11/xkb/rules && ln -sf base xorg
+}
+
+BBCLASSEXTEND += "native"
diff --git a/meta/recipes-graphics/xorg-lib/xorg-lib-common.inc b/meta/recipes-graphics/xorg-lib/xorg-lib-common.inc
index 60bc8c76fa..15c7ecf782 100644
--- a/meta/recipes-graphics/xorg-lib/xorg-lib-common.inc
+++ b/meta/recipes-graphics/xorg-lib/xorg-lib-common.inc
@@ -6,8 +6,9 @@ LICENSE = "MIT"
DEPENDS = "util-macros"
XORG_PN = "${BPN}"
+XORG_EXT ?= "tar.xz"
-SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.tar.bz2"
+SRC_URI = "${XORG_MIRROR}/individual/lib/${XORG_PN}-${PV}.${XORG_EXT}"
S = "${WORKDIR}/${XORG_PN}-${PV}"
diff --git a/meta/recipes-graphics/xorg-lib/xtrans_1.4.0.bb b/meta/recipes-graphics/xorg-lib/xtrans_1.4.0.bb
deleted file mode 100644
index 08773c8ccc..0000000000
--- a/meta/recipes-graphics/xorg-lib/xtrans_1.4.0.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "XTrans: X Transport library"
-
-DESCRIPTION = "The X Transport Interface is intended to combine all \
-system and transport specific code into a single place. This API should \
-be used by all libraries, clients and servers of the X Window System. \
-Use of this API should allow the addition of new types of transports and \
-support for new platforms without making any changes to the source \
-except in the X Transport Interface code."
-
-require xorg-lib-common.inc
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=49347921d4d5268021a999f250edc9ca"
-
-SRC_URI += "file://multilibfix.patch"
-
-PE = "1"
-
-RDEPENDS:${PN}-dev = ""
-
-inherit gettext
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI[md5sum] = "ce2fb8100c6647ee81451ebe388b17ad"
-SRC_URI[sha256sum] = "377c4491593c417946efcd2c7600d1e62639f7a8bbca391887e2c4679807d773"
diff --git a/meta/recipes-graphics/xorg-lib/xtrans_1.5.0.bb b/meta/recipes-graphics/xorg-lib/xtrans_1.5.0.bb
new file mode 100644
index 0000000000..781382e516
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/xtrans_1.5.0.bb
@@ -0,0 +1,25 @@
+SUMMARY = "XTrans: X Transport library"
+
+DESCRIPTION = "The X Transport Interface is intended to combine all \
+system and transport specific code into a single place. This API should \
+be used by all libraries, clients and servers of the X Window System. \
+Use of this API should allow the addition of new types of transports and \
+support for new platforms without making any changes to the source \
+except in the X Transport Interface code."
+
+require xorg-lib-common.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=bc875e1c864f4f62b29f7d8651f627fa"
+
+SRC_URI += "file://multilibfix.patch"
+
+PE = "1"
+
+DEV_PKG_DEPENDENCY = ""
+
+inherit gettext
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI[sha256sum] = "1ba4b703696bfddbf40bacf25bce4e3efb2a0088878f017a50e9884b0c8fb1bd"
diff --git a/meta/recipes-graphics/xorg-proto/xcb-proto/0001-Fix-install-conflict-when-enable-multilib.patch b/meta/recipes-graphics/xorg-proto/xcb-proto/0001-Fix-install-conflict-when-enable-multilib.patch
new file mode 100644
index 0000000000..4209139da8
--- /dev/null
+++ b/meta/recipes-graphics/xorg-proto/xcb-proto/0001-Fix-install-conflict-when-enable-multilib.patch
@@ -0,0 +1,32 @@
+From fc28149b6b198042c8d29e0931415adad7ed3231 Mon Sep 17 00:00:00 2001
+From: Wang Mingyu <wangmy@fujitsu.com>
+Date: Thu, 16 Mar 2023 08:03:47 +0000
+Subject: [PATCH] Fix install conflict when enable multilib.
+
+Automake defines pythondir in terms of libdir (rather than hardcode 'lib' or query it from python as automake upstream does)
+https://git.yoctoproject.org/poky/tree/meta/recipes-devtools/automake/automake/0001-automake-Update-for-python.m4-to-respect-libdir.patch
+
+So libdir needs to be defined when pythondir is defined.
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Wang Mingyu <wangmy@fujitsu.com>
+---
+ Makefile.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Makefile.am b/Makefile.am
+index 8b57a83..580f5bc 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -1,6 +1,6 @@
+ SUBDIRS = src xcbgen
+
+-pkgconfigdir = $(datarootdir)/pkgconfig
++pkgconfigdir = $(libdir)/pkgconfig
+ pkgconfig_DATA = xcb-proto.pc
+
+ EXTRA_DIST=doc xcb-proto.pc.in autogen.sh README.md
+--
+2.34.1
+
diff --git a/meta/recipes-graphics/xorg-proto/xcb-proto/0001-xcb-proto.pc.in-reinstate-libdir.patch b/meta/recipes-graphics/xorg-proto/xcb-proto/0001-xcb-proto.pc.in-reinstate-libdir.patch
new file mode 100644
index 0000000000..fe0a96c207
--- /dev/null
+++ b/meta/recipes-graphics/xorg-proto/xcb-proto/0001-xcb-proto.pc.in-reinstate-libdir.patch
@@ -0,0 +1,29 @@
+From ed499cffabe64fc1dc5898cb69ffa7e62c19e9d8 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Tue, 5 Jul 2022 21:19:20 +0200
+Subject: [PATCH] xcb-proto.pc.in: reinstate libdir
+
+Otherwise the following occurs when building libxcb:
+| checking for xcb-proto >= 1.14... no
+| configure: error: Package requirements (xcb-proto >= 1.14) were not met:
+|
+| Variable 'libdir' not defined in '/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/libxcb/1.15-r0/recipe-sysroot/usr/share/pkgconfig/xcb-proto.pc'
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/xorg/proto/xcbproto/-/merge_requests/31]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ xcb-proto.pc.in | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/xcb-proto.pc.in b/xcb-proto.pc.in
+index 0822b18..c7c8b47 100644
+--- a/xcb-proto.pc.in
++++ b/xcb-proto.pc.in
+@@ -2,6 +2,7 @@ prefix=@prefix@
+ exec_prefix=@exec_prefix@
+ datarootdir=@datarootdir@
+ datadir=@datadir@
++libdir=@libdir@
+ xcbincludedir=${pc_sysrootdir}@xcbincludedir@
+ PYTHON_PREFIX=@PYTHON_PREFIX@
+ pythondir=${pc_sysrootdir}@pythondir@
diff --git a/meta/recipes-graphics/xorg-proto/xcb-proto_1.15.bb b/meta/recipes-graphics/xorg-proto/xcb-proto_1.15.bb
deleted file mode 100644
index f050ed366c..0000000000
--- a/meta/recipes-graphics/xorg-proto/xcb-proto_1.15.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "XCB: The X protocol C binding headers"
-DESCRIPTION = "Function prototypes for the X protocol C-language Binding \
-(XCB). XCB is a replacement for Xlib featuring a small footprint, \
-latency hiding, direct access to the protocol, improved threading \
-support, and extensibility."
-HOMEPAGE = "http://xcb.freedesktop.org"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=XCB"
-
-SECTION = "x11/libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7 \
- file://src/dri2.xml;beginline=2;endline=28;md5=f8763b13ff432e8597e0d610cf598e65"
-
-SRC_URI = "https://xorg.freedesktop.org/archive/individual/proto/${BP}.tar.xz"
-SRC_URI[sha256sum] = "d34c3b264e8365d16fa9db49179cfa3e9952baaf9275badda0f413966b65955f"
-
-inherit autotools pkgconfig python3native
-
-PACKAGES += "python-xcbgen"
-
-FILES:${PN} = ""
-FILES:${PN}-dev += "${datadir}/xcb/*.xml ${datadir}/xcb/*.xsd"
-FILES:python-xcbgen = "${PYTHON_SITEPACKAGES_DIR}"
-
-RDEPENDS:${PN}-dev = ""
-RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-proto/xcb-proto_1.16.0.bb b/meta/recipes-graphics/xorg-proto/xcb-proto_1.16.0.bb
new file mode 100644
index 0000000000..67c1e8d97d
--- /dev/null
+++ b/meta/recipes-graphics/xorg-proto/xcb-proto_1.16.0.bb
@@ -0,0 +1,31 @@
+SUMMARY = "XCB: The X protocol C binding headers"
+DESCRIPTION = "Function prototypes for the X protocol C-language Binding \
+(XCB). XCB is a replacement for Xlib featuring a small footprint, \
+latency hiding, direct access to the protocol, improved threading \
+support, and extensibility."
+HOMEPAGE = "http://xcb.freedesktop.org"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=XCB"
+
+SECTION = "x11/libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7 \
+ file://src/dri2.xml;beginline=2;endline=28;md5=f8763b13ff432e8597e0d610cf598e65"
+
+SRC_URI = "https://xorg.freedesktop.org/archive/individual/proto/${BP}.tar.xz \
+ file://0001-xcb-proto.pc.in-reinstate-libdir.patch \
+ file://0001-Fix-install-conflict-when-enable-multilib.patch \
+ "
+SRC_URI[sha256sum] = "a75a1848ad2a89a82d841a51be56ce988ff3c63a8d6bf4383ae3219d8d915119"
+
+inherit autotools pkgconfig python3native
+
+PACKAGES += "python-xcbgen"
+
+FILES:${PN} = ""
+FILES:${PN}-dev += "${datadir}/xcb/*.xml ${datadir}/xcb/*.xsd"
+FILES:python-xcbgen = "${PYTHON_SITEPACKAGES_DIR}"
+
+DEV_PKG_DEPENDENCY = ""
+RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-proto/xorgproto_2022.1.bb b/meta/recipes-graphics/xorg-proto/xorgproto_2022.1.bb
deleted file mode 100644
index 7786318476..0000000000
--- a/meta/recipes-graphics/xorg-proto/xorgproto_2022.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "X Window System unified protocol definitions"
-DESCRIPTION = "This package provides the headers and specification documents defining \
-the core protocol and (many) extensions for the X Window System"
-HOMEPAGE = "http://www.x.org"
-BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=xorg"
-
-SECTION = "x11/libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING-x11proto;md5=dfc4bd2b0568b31725b85b0604e69b56"
-
-SRC_URI = "${XORG_MIRROR}/individual/proto/${BP}.tar.bz2"
-SRC_URI[sha256sum] = "1d2dcc66963f234d2c1e1f8d98a0d3e8725149cdac0a263df4097593c48bc2a6"
-
-inherit meson
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[legacy] = "-Dlegacy=true,-Dlegacy=false"
-
-# Datadir only used to install pc files, $datadir/pkgconfig
-datadir="${libdir}"
-# ${PN} is empty so we need to tweak -dev and -dbg package dependencies
-RDEPENDS:${PN}-dev = ""
-RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-proto/xorgproto_2024.1.bb b/meta/recipes-graphics/xorg-proto/xorgproto_2024.1.bb
new file mode 100644
index 0000000000..3f56e21a55
--- /dev/null
+++ b/meta/recipes-graphics/xorg-proto/xorgproto_2024.1.bb
@@ -0,0 +1,25 @@
+SUMMARY = "X Window System unified protocol definitions"
+DESCRIPTION = "This package provides the headers and specification documents defining \
+the core protocol and (many) extensions for the X Window System"
+HOMEPAGE = "http://www.x.org"
+BUGTRACKER = "https://bugs.freedesktop.org/enter_bug.cgi?product=xorg"
+
+SECTION = "x11/libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING-x11proto;md5=0b9fe3db4015bcbe920e7c67a39ee3f1"
+
+SRC_URI = "${XORG_MIRROR}/individual/proto/${BP}.tar.xz"
+SRC_URI[sha256sum] = "372225fd40815b8423547f5d890c5debc72e88b91088fbfb13158c20495ccb59"
+
+inherit meson
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[legacy] = "-Dlegacy=true,-Dlegacy=false"
+
+# Datadir only used to install pc files, $datadir/pkgconfig
+datadir="${libdir}"
+# ${PN} is empty so we need to tweak -dev and -dbg package dependencies
+DEV_PKG_DEPENDENCY = ""
+RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-util/makedepend_1.0.6.bb b/meta/recipes-graphics/xorg-util/makedepend_1.0.6.bb
deleted file mode 100644
index 2760edd173..0000000000
--- a/meta/recipes-graphics/xorg-util/makedepend_1.0.6.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-require xorg-util-common.inc
-
-SUMMARY = "create dependencies in makefiles"
-
-DESCRIPTION = "The makedepend program reads each sourcefile in sequence \
-and parses it like a C-preprocessor, processing \
-all #include, #define, #undef, #ifdef, #ifndef, #endif, #if, #elif \
-and #else directives so that it can correctly tell which #include, \
-directives would be used in a compilation. Any #include, directives \
-can reference files having other #include directives, and parsing will \
-occur in these files as well."
-
-DEPENDS = "xorgproto util-macros"
-PE = "1"
-
-BBCLASSEXTEND = "native"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=43a6eda34b48ee821b3b66f4f753ce4f"
-
-SRC_URI[md5sum] = "8a583055c84914060c35b6c2f963fc07"
-SRC_URI[sha256sum] = "845f6708fc850bf53f5b1d0fb4352c4feab3949f140b26f71b22faba354c3365"
diff --git a/meta/recipes-graphics/xorg-util/makedepend_1.0.9.bb b/meta/recipes-graphics/xorg-util/makedepend_1.0.9.bb
new file mode 100644
index 0000000000..6db7d04079
--- /dev/null
+++ b/meta/recipes-graphics/xorg-util/makedepend_1.0.9.bb
@@ -0,0 +1,20 @@
+require xorg-util-common.inc
+
+SUMMARY = "create dependencies in makefiles"
+
+DESCRIPTION = "The makedepend program reads each sourcefile in sequence \
+and parses it like a C-preprocessor, processing \
+all #include, #define, #undef, #ifdef, #ifndef, #endif, #if, #elif \
+and #else directives so that it can correctly tell which #include, \
+directives would be used in a compilation. Any #include, directives \
+can reference files having other #include directives, and parsing will \
+occur in these files as well."
+
+DEPENDS = "xorgproto util-macros"
+PE = "1"
+
+BBCLASSEXTEND = "native"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=43a6eda34b48ee821b3b66f4f753ce4f"
+
+SRC_URI[sha256sum] = "bc94ffda6cd4671603a69c39dbe8f96b317707b9185b2aaa3b54b5d134b41884"
diff --git a/meta/recipes-graphics/xorg-util/util-macros/0001-xorg-macros.m4.in-do-not-run-AC_CANONICAL_HOST-in-ma.patch b/meta/recipes-graphics/xorg-util/util-macros/0001-xorg-macros.m4.in-do-not-run-AC_CANONICAL_HOST-in-ma.patch
new file mode 100644
index 0000000000..e08b586bf0
--- /dev/null
+++ b/meta/recipes-graphics/xorg-util/util-macros/0001-xorg-macros.m4.in-do-not-run-AC_CANONICAL_HOST-in-ma.patch
@@ -0,0 +1,28 @@
+From 6afaaf164ab9370204856961a92ad8ee523a8293 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Fri, 3 Mar 2023 13:24:58 +0100
+Subject: [PATCH] xorg-macros.m4.in: do not run AC_CANONICAL_HOST in manpage
+ section macro
+
+This doesn't work when building allarch items, as it calls into
+config.sub with the host triplet, and config.sub can't match
+it against any architeture it knows.
+
+Upstream-Status: Inappropriate [oe specific]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+ xorg-macros.m4.in | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/xorg-macros.m4.in b/xorg-macros.m4.in
+index 3b8b8e9..6541236 100644
+--- a/xorg-macros.m4.in
++++ b/xorg-macros.m4.in
+@@ -111,7 +111,6 @@ AC_SUBST(TRADITIONALCPPFLAGS)
+ # Added AC_PROG_SED in version 1.8
+
+ AC_DEFUN([XORG_MANPAGE_SECTIONS],[
+-AC_REQUIRE([AC_CANONICAL_HOST])
+ AC_REQUIRE([AC_PROG_SED])
+
+ case $host_os in
diff --git a/meta/recipes-graphics/xorg-util/util-macros_1.19.3.bb b/meta/recipes-graphics/xorg-util/util-macros_1.19.3.bb
deleted file mode 100644
index 0164256eb4..0000000000
--- a/meta/recipes-graphics/xorg-util/util-macros_1.19.3.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "X autotools macros"
-
-DESCRIPTION = "M4 autotools macros used by various X.org programs."
-
-require xorg-util-common.inc
-
-LICENSE = "MIT & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1970511fddd439b07a6ba789d28ff662"
-
-PE = "1"
-
-SRC_URI[md5sum] = "66cb74d4a0120a06e32c3b01c29417d8"
-SRC_URI[sha256sum] = "624bb6c3a4613d18114a7e3849a3d70f2d7af9dc6eabeaba98060d87e3aef35b"
-
-# ${PN} is empty so we need to tweak -dev and -dbg package dependencies
-RDEPENDS:${PN}-dev = ""
-RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-util/util-macros_1.20.0.bb b/meta/recipes-graphics/xorg-util/util-macros_1.20.0.bb
new file mode 100644
index 0000000000..cd4bc387a2
--- /dev/null
+++ b/meta/recipes-graphics/xorg-util/util-macros_1.20.0.bb
@@ -0,0 +1,20 @@
+SUMMARY = "X autotools macros"
+
+DESCRIPTION = "M4 autotools macros used by various X.org programs."
+
+require xorg-util-common.inc
+
+LICENSE = "MIT & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=42ba50748cb7ccf8739424e5e2072b02"
+
+PE = "1"
+
+SRC_URI += "file://0001-xorg-macros.m4.in-do-not-run-AC_CANONICAL_HOST-in-ma.patch"
+
+SRC_URI[sha256sum] = "8daf36913d551a90fd1013cb078401375dabae021cb4713b9b256a70f00eeb74"
+
+# ${PN} is empty so we need to tweak -dev and -dbg package dependencies
+DEV_PKG_DEPENDENCY = ""
+RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuarm/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuarm/xorg.conf
index f4cd139e8d..3eb380a0a4 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuarm/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuarm/xorg.conf
@@ -35,4 +35,8 @@ Section "ServerLayout"
Identifier "Default Layout"
Screen "Default Screen"
Option "AllowEmptyInput" "no"
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
EndSection
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuppc/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuppc/xorg.conf
index f4cd139e8d..3eb380a0a4 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuppc/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemuppc/xorg.conf
@@ -35,4 +35,8 @@ Section "ServerLayout"
Identifier "Default Layout"
Screen "Default Screen"
Option "AllowEmptyInput" "no"
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
EndSection
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemush4/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemush4/xorg.conf
index f4cd139e8d..3eb380a0a4 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemush4/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemush4/xorg.conf
@@ -35,4 +35,8 @@ Section "ServerLayout"
Identifier "Default Layout"
Screen "Default Screen"
Option "AllowEmptyInput" "no"
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
EndSection
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
index c12d92c201..c01c3331c5 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
@@ -30,4 +30,8 @@ Section "ServerLayout"
Identifier "Default Layout"
Screen "Default Screen"
Option "AllowEmptyInput" "no"
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
EndSection
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
index c12d92c201..c01c3331c5 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
@@ -30,4 +30,8 @@ Section "ServerLayout"
Identifier "Default Layout"
Screen "Default Screen"
Option "AllowEmptyInput" "no"
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
EndSection
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb
index f82a6960e6..03f14cef2b 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb
@@ -3,7 +3,6 @@ HOMEPAGE = "http://www.x.org"
SECTION = "x11/base"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-PR = "r33"
SRC_URI = "file://xorg.conf"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
index 057a1ba6ad..22f7d9a8ad 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
@@ -20,14 +20,15 @@ SRC_URI = "${XORG_MIRROR}/individual/xserver/${XORG_PN}-${PV}.tar.xz"
UPSTREAM_CHECK_REGEX = "xorg-server-(?P<pver>\d+(\.(?!99)\d+)+)\.tar"
CVE_PRODUCT = "xorg-server x_server"
-# This is specific to Debian's xserver-wrapper.c
-CVE_CHECK_IGNORE += "CVE-2011-4613"
-# As per upstream, exploiting this flaw is non-trivial and it requires exact
-# timing on the behalf of the attacker. Many graphical applications exit if their
-# connection to the X server is lost, so a typical desktop session is either
-# impossible or difficult to exploit. There is currently no upstream patch
-# available for this flaw.
-CVE_CHECK_IGNORE += "CVE-2020-25697"
+
+CVE_STATUS[CVE-2011-4613] = "not-applicable-platform: This is specific to Debian's xserver-wrapper.c"
+CVE_STATUS[CVE-2020-25697] = "upstream-wontfix: \
+As per upstream, exploiting this flaw is non-trivial and it requires exact \
+timing on the behalf of the attacker. Many graphical applications exit if their \
+connection to the X server is lost, so a typical desktop session is either \
+impossible or difficult to exploit. There is currently no upstream patch \
+available for this flaw."
+CVE_STATUS[CVE-2022-3553] = "cpe-incorrect: This is specific to XQuartz, which is the macOS X server port"
S = "${WORKDIR}/${XORG_PN}-${PV}"
@@ -80,9 +81,9 @@ PACKAGES =+ "${PN}-sdl \
SUMMARY:xf86-video-modesetting = "X.Org X server -- modesetting display driver"
INSANE_SKIP:${MLPREFIX}xf86-video-modesetting = "xorg-driver-abi"
-XSERVER_RRECOMMENDS = "xkeyboard-config rgb xserver-xf86-config xkbcomp xf86-input-libinput"
-RRECOMMENDS:${PN} += "${XSERVER_RRECOMMENDS}"
-RRECOMMENDS:${PN}-xwayland += "${XSERVER_RRECOMMENDS}"
+XSERVER_RDEPENDS = "xkeyboard-config rgb xserver-xf86-config xkbcomp xf86-input-libinput"
+RDEPENDS:${PN} += "${XSERVER_RDEPENDS}"
+RDEPENDS:${PN}-xwayland += "${XSERVER_RDEPENDS}"
RDEPENDS:${PN}-xvfb += "xkeyboard-config"
RDEPENDS:${PN}-module-exa = "${PN} (= ${EXTENDPKGV})"
@@ -115,7 +116,6 @@ FILES:xf86-video-modesetting = "${libdir}/xorg/modules/drivers/modesetting_drv.s
EXTRA_OEMESON += " \
-Dxnest=false \
- -Dxvfb=true \
-Ddtrace=false \
-Dint10=x86emu \
-Dxkb_output_dir=/var/lib/xkb \
@@ -137,6 +137,7 @@ PACKAGECONFIG[glamor] = "-Dglamor=true,-Dglamor=false,libepoxy virtual/libgbm,li
PACKAGECONFIG[unwind] = "-Dlibunwind=true,-Dlibunwind=false,libunwind"
PACKAGECONFIG[systemd-logind] = "-Dsystemd_logind=true,-Dsystemd_logind=false,dbus,"
PACKAGECONFIG[xinerama] = "-Dxinerama=true,-Dxinerama=false"
+PACKAGECONFIG[xvfb] = "-Dxvfb=true,-Dxvfb=false"
# Xorg requires a SHA1 implementation, pick one
XORG_CRYPTO ??= "openssl"
@@ -174,3 +175,5 @@ python populate_packages:prepend() {
d.appendVar("RPROVIDES:" + pn, " " + get_abi("input"))
d.appendVar("RPROVIDES:" + pn, " " + get_abi("video"))
}
+
+CVE_STATUS[CVE-2023-5574] = "${@bb.utils.contains('PACKAGECONFIG', 'xvfb', '', 'not-applicable-config: specific to Xvfb', d)}"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-render-Fix-build-with-gcc-12.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-render-Fix-build-with-gcc-12.patch
deleted file mode 100644
index df9332fae7..0000000000
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-render-Fix-build-with-gcc-12.patch
+++ /dev/null
@@ -1,90 +0,0 @@
-From 12041ad0610f1345d6b9994c32943fd4dd01f65d Mon Sep 17 00:00:00 2001
-From: Olivier Fourdan <ofourdan@redhat.com>
-Date: Thu, 20 Jan 2022 10:20:38 +0100
-Subject: [PATCH] render: Fix build with gcc 12
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-The xserver fails to compile with the latest gcc 12:
-
- render/picture.c: In function ‘CreateSolidPicture’:
- render/picture.c:874:26: error: array subscript ‘union _SourcePict[0]’ is partly outside array bounds of ‘unsigned char[16]’ [-Werror=array-bounds]
- 874 | pPicture->pSourcePict->type = SourcePictTypeSolidFill;
- | ^~
- render/picture.c:868:45: note: object of size 16 allocated by ‘malloc’
- 868 | pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictSolidFill));
- | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- render/picture.c: In function ‘CreateLinearGradientPicture’:
- render/picture.c:906:26: error: array subscript ‘union _SourcePict[0]’ is partly outside array bounds of ‘unsigned char[32]’ [-Werror=array-bounds]
- 906 | pPicture->pSourcePict->linear.type = SourcePictTypeLinear;
- | ^~
- render/picture.c:899:45: note: object of size 32 allocated by ‘malloc’
- 899 | pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictLinearGradient));
- | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- render/picture.c: In function ‘CreateConicalGradientPicture’:
- render/picture.c:989:26: error: array subscript ‘union _SourcePict[0]’ is partly outside array bounds of ‘unsigned char[32]’ [-Werror=array-bounds]
- 989 | pPicture->pSourcePict->conical.type = SourcePictTypeConical;
- | ^~
- render/picture.c:982:45: note: object of size 32 allocated by ‘malloc’
- 982 | pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictConicalGradient));
- | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- cc1: some warnings being treated as errors
- ninja: build stopped: subcommand failed.
-
-This is because gcc 12 has become stricter and raises a warning now.
-
-Fix the warning/error by allocating enough memory to store the union
-struct.
-
-Upstream-Status: Backport [https://gitlab.freedesktop.org/xorg/xserver/-/commit/c6b0dcb82d4db07a2f32c09a8c09c85a5f57248e]
-Signed-off-by: Olivier Fourdan <ofourdan@redhat.com>
-Acked-by: Michel Dänzer <mdaenzer@redhat.com>
-Closes: https://gitlab.freedesktop.org/xorg/xserver/-/issues/1256
----
- render/picture.c | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/render/picture.c b/render/picture.c
-index afa0d25..2be4b19 100644
---- a/render/picture.c
-+++ b/render/picture.c
-@@ -865,7 +865,7 @@ CreateSolidPicture(Picture pid, xRenderColor * color, int *error)
- }
-
- pPicture->id = pid;
-- pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictSolidFill));
-+ pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(SourcePict));
- if (!pPicture->pSourcePict) {
- *error = BadAlloc;
- free(pPicture);
-@@ -896,7 +896,7 @@ CreateLinearGradientPicture(Picture pid, xPointFixed * p1, xPointFixed * p2,
- }
-
- pPicture->id = pid;
-- pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictLinearGradient));
-+ pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(SourcePict));
- if (!pPicture->pSourcePict) {
- *error = BadAlloc;
- free(pPicture);
-@@ -936,7 +936,7 @@ CreateRadialGradientPicture(Picture pid, xPointFixed * inner,
- }
-
- pPicture->id = pid;
-- pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictRadialGradient));
-+ pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(SourcePict));
- if (!pPicture->pSourcePict) {
- *error = BadAlloc;
- free(pPicture);
-@@ -979,7 +979,7 @@ CreateConicalGradientPicture(Picture pid, xPointFixed * center, xFixed angle,
- }
-
- pPicture->id = pid;
-- pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(PictConicalGradient));
-+ pPicture->pSourcePict = (SourcePictPtr) malloc(sizeof(SourcePict));
- if (!pPicture->pSourcePict) {
- *error = BadAlloc;
- free(pPicture);
---
-2.35.1
-
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb b/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb
new file mode 100644
index 0000000000..6506d775ca
--- /dev/null
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb
@@ -0,0 +1,28 @@
+require xserver-xorg.inc
+
+SRC_URI += "file://0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch \
+ file://0001-Avoid-duplicate-definitions-of-IOPortBase.patch \
+ "
+SRC_URI[sha256sum] = "1d3dadbd57fb86b16a018e9f5f957aeeadf744f56c0553f55737628d06d326ef"
+
+# These extensions are now integrated into the server, so declare the migration
+# path for in-place upgrades.
+
+RREPLACES:${PN} = "${PN}-extension-dri \
+ ${PN}-extension-dri2 \
+ ${PN}-extension-record \
+ ${PN}-extension-extmod \
+ ${PN}-extension-dbe \
+ "
+RPROVIDES:${PN} = "${PN}-extension-dri \
+ ${PN}-extension-dri2 \
+ ${PN}-extension-record \
+ ${PN}-extension-extmod \
+ ${PN}-extension-dbe \
+ "
+RCONFLICTS:${PN} = "${PN}-extension-dri \
+ ${PN}-extension-dri2 \
+ ${PN}-extension-record \
+ ${PN}-extension-extmod \
+ ${PN}-extension-dbe \
+ "
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.3.bb b/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.3.bb
deleted file mode 100644
index 1f53ab5177..0000000000
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.3.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-require xserver-xorg.inc
-
-SRC_URI += "file://0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch \
- file://0001-Avoid-duplicate-definitions-of-IOPortBase.patch \
- file://0001-render-Fix-build-with-gcc-12.patch \
- "
-SRC_URI[sha256sum] = "61d6aad5b6b47a116b960bd7f0cba4ee7e6da95d6bb0b127bde75d7d1acdebe5"
-
-# These extensions are now integrated into the server, so declare the migration
-# path for in-place upgrades.
-
-RREPLACES:${PN} = "${PN}-extension-dri \
- ${PN}-extension-dri2 \
- ${PN}-extension-record \
- ${PN}-extension-extmod \
- ${PN}-extension-dbe \
- "
-RPROVIDES:${PN} = "${PN}-extension-dri \
- ${PN}-extension-dri2 \
- ${PN}-extension-record \
- ${PN}-extension-extmod \
- ${PN}-extension-dbe \
- "
-RCONFLICTS:${PN} = "${PN}-extension-dri \
- ${PN}-extension-dri2 \
- ${PN}-extension-record \
- ${PN}-extension-extmod \
- ${PN}-extension-dbe \
- "
diff --git a/meta/recipes-graphics/xrestop/xrestop_0.4.bb b/meta/recipes-graphics/xrestop/xrestop_0.4.bb
index 0b5ab506e3..a6b6c4238d 100644
--- a/meta/recipes-graphics/xrestop/xrestop_0.4.bb
+++ b/meta/recipes-graphics/xrestop/xrestop_0.4.bb
@@ -10,7 +10,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://xrestop.c;endline=18;md5=730876c30f0d8a928676bcd1242a3b35"
SECTION = "x11/utils"
-PR = "r3"
DEPENDS = "libxres libxext virtual/libx11 ncurses"
diff --git a/meta/recipes-graphics/xwayland/xwayland_22.1.1.bb b/meta/recipes-graphics/xwayland/xwayland_22.1.1.bb
deleted file mode 100644
index b512b9932d..0000000000
--- a/meta/recipes-graphics/xwayland/xwayland_22.1.1.bb
+++ /dev/null
@@ -1,45 +0,0 @@
-SUMMARY = "XWayland is an X Server that runs under Wayland."
-DESCRIPTION = "XWayland is an X Server running as a Wayland client, \
-and thus is capable of displaying native X11 client applications in a \
-Wayland compositor environment. The goal of XWayland is to facilitate \
-the transition from X Window System to Wayland environments, providing \
-a way to run unported applications in the meantime."
-HOMEPAGE = "https://fedoraproject.org/wiki/Changes/XwaylandStandalone"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5df87950af51ac2c5822094553ea1880"
-
-SRC_URI = "https://www.x.org/archive/individual/xserver/xwayland-${PV}.tar.xz"
-SRC_URI[sha256sum] = "f5d0e0ba37e19bb87c62f61da5970bd204939f2120620964bed4cc8495baa657"
-
-UPSTREAM_CHECK_REGEX = "xwayland-(?P<pver>\d+(\.(?!90\d)\d+)+)\.tar"
-
-inherit meson features_check pkgconfig
-REQUIRED_DISTRO_FEATURES = "x11 opengl"
-
-DEPENDS += "xorgproto xtrans pixman libxkbfile libxfont2 wayland wayland-native wayland-protocols libdrm libepoxy libxcvt"
-
-OPENGL_PKGCONFIGS = "glx glamor dri3"
-PACKAGECONFIG ??= "${XORG_CRYPTO} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', '${OPENGL_PKGCONFIGS}', '', d)} \
-"
-PACKAGECONFIG[dri3] = "-Ddri3=true,-Ddri3=false"
-PACKAGECONFIG[glx] = "-Dglx=true,-Dglx=false,virtual/libgl virtual/libx11"
-PACKAGECONFIG[glamor] = "-Dglamor=true,-Dglamor=false,libepoxy virtual/libgbm,libegl"
-PACKAGECONFIG[unwind] = "-Dlibunwind=true,-Dlibunwind=false,libunwind"
-PACKAGECONFIG[xinerama] = "-Dxinerama=true,-Dxinerama=false"
-
-# Xorg requires a SHA1 implementation, pick one
-XORG_CRYPTO ??= "openssl"
-PACKAGECONFIG[openssl] = "-Dsha1=libcrypto,,openssl"
-PACKAGECONFIG[nettle] = "-Dsha1=libnettle,,nettle"
-PACKAGECONFIG[gcrypt] = "-Dsha1=libgcrypt,,libgcrypt"
-
-do_install:append() {
- # remove files not needed and clashing with xserver-xorg
- rm -rf ${D}/${libdir}/xorg/
-}
-
-FILES:${PN} += "${libdir}/xorg/protocol.txt"
-
-RDEPENDS:${PN} += "xkbcomp"
diff --git a/meta/recipes-graphics/xwayland/xwayland_23.2.5.bb b/meta/recipes-graphics/xwayland/xwayland_23.2.5.bb
new file mode 100644
index 0000000000..b934a873d1
--- /dev/null
+++ b/meta/recipes-graphics/xwayland/xwayland_23.2.5.bb
@@ -0,0 +1,45 @@
+SUMMARY = "XWayland is an X Server that runs under Wayland."
+DESCRIPTION = "XWayland is an X Server running as a Wayland client, \
+and thus is capable of displaying native X11 client applications in a \
+Wayland compositor environment. The goal of XWayland is to facilitate \
+the transition from X Window System to Wayland environments, providing \
+a way to run unported applications in the meantime."
+HOMEPAGE = "https://fedoraproject.org/wiki/Changes/XwaylandStandalone"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5df87950af51ac2c5822094553ea1880"
+
+SRC_URI = "https://www.x.org/archive/individual/xserver/xwayland-${PV}.tar.xz"
+SRC_URI[sha256sum] = "33ec7ff2687a59faaa52b9b09aa8caf118e7ecb6aed8953f526a625ff9f4bd90"
+
+UPSTREAM_CHECK_REGEX = "xwayland-(?P<pver>\d+(\.(?!90\d)\d+)+)\.tar"
+
+inherit meson features_check pkgconfig
+REQUIRED_DISTRO_FEATURES = "x11 opengl"
+
+DEPENDS += "xorgproto xtrans pixman libxkbfile libxfont2 wayland wayland-native wayland-protocols libdrm libepoxy libxcvt"
+
+OPENGL_PKGCONFIGS = "glx glamor dri3"
+PACKAGECONFIG ??= "${XORG_CRYPTO} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', '${OPENGL_PKGCONFIGS}', '', d)} \
+"
+PACKAGECONFIG[dri3] = "-Ddri3=true,-Ddri3=false,libxshmfence"
+PACKAGECONFIG[glx] = "-Dglx=true,-Dglx=false,virtual/libgl virtual/libx11"
+PACKAGECONFIG[glamor] = "-Dglamor=true,-Dglamor=false,libepoxy virtual/libgbm,libegl"
+PACKAGECONFIG[unwind] = "-Dlibunwind=true,-Dlibunwind=false,libunwind"
+PACKAGECONFIG[xinerama] = "-Dxinerama=true,-Dxinerama=false"
+
+# Xorg requires a SHA1 implementation, pick one
+XORG_CRYPTO ??= "openssl"
+PACKAGECONFIG[openssl] = "-Dsha1=libcrypto,,openssl"
+PACKAGECONFIG[nettle] = "-Dsha1=libnettle,,nettle"
+PACKAGECONFIG[gcrypt] = "-Dsha1=libgcrypt,,libgcrypt"
+
+do_install:append() {
+ # remove files not needed and clashing with xserver-xorg
+ rm -rf ${D}/${libdir}/xorg/
+}
+
+FILES:${PN} += "${libdir}/xorg/protocol.txt"
+
+RDEPENDS:${PN} += "xkbcomp"
diff --git a/meta/recipes-kernel/blktrace/blktrace_git.bb b/meta/recipes-kernel/blktrace/blktrace_git.bb
index bba5e04504..d4f5bac84f 100644
--- a/meta/recipes-kernel/blktrace/blktrace_git.bb
+++ b/meta/recipes-kernel/blktrace/blktrace_git.bb
@@ -12,9 +12,9 @@ DEPENDS = "libaio"
SRCREV = "366d30b9cdb20345c5d064af850d686da79b89eb"
-PV = "1.3.0+git${SRCPV}"
+PV = "1.3.0+git"
-SRC_URI = "git://git.kernel.dk/blktrace.git;branch=master"
+SRC_URI = "git://git.kernel.dk/blktrace.git;branch=master;protocol=https"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-kernel/cryptodev/cryptodev-linux_1.12.bb b/meta/recipes-kernel/cryptodev/cryptodev-linux_1.13.bb
index d5ea9d8529..d5ea9d8529 100644
--- a/meta/recipes-kernel/cryptodev/cryptodev-linux_1.12.bb
+++ b/meta/recipes-kernel/cryptodev/cryptodev-linux_1.13.bb
diff --git a/meta/recipes-kernel/cryptodev/cryptodev-module_1.12.bb b/meta/recipes-kernel/cryptodev/cryptodev-module_1.13.bb
index 5192cf03ed..5192cf03ed 100644
--- a/meta/recipes-kernel/cryptodev/cryptodev-module_1.12.bb
+++ b/meta/recipes-kernel/cryptodev/cryptodev-module_1.13.bb
diff --git a/meta/recipes-kernel/cryptodev/cryptodev-tests_1.12.bb b/meta/recipes-kernel/cryptodev/cryptodev-tests_1.12.bb
deleted file mode 100644
index c541478796..0000000000
--- a/meta/recipes-kernel/cryptodev/cryptodev-tests_1.12.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-require cryptodev.inc
-
-SUMMARY = "A test suite for /dev/crypto device driver"
-
-DEPENDS += "openssl"
-
-SRC_URI += " \
-file://0001-Add-the-compile-and-install-rules-for-cryptodev-test.patch \
-file://0001-tests-Makefile-do-not-use-Werror.patch \
-"
-
-EXTRA_OEMAKE='KERNEL_DIR="${STAGING_EXECPREFIXDIR}" PREFIX="${D}"'
-
-do_compile() {
- oe_runmake testprogs
-}
-
-do_install() {
- oe_runmake install_tests
-}
-
-FILES:${PN} = "${bindir}/*"
diff --git a/meta/recipes-kernel/cryptodev/cryptodev-tests_1.13.bb b/meta/recipes-kernel/cryptodev/cryptodev-tests_1.13.bb
new file mode 100644
index 0000000000..458ad8ecf2
--- /dev/null
+++ b/meta/recipes-kernel/cryptodev/cryptodev-tests_1.13.bb
@@ -0,0 +1,21 @@
+require cryptodev.inc
+
+SUMMARY = "A test suite for /dev/crypto device driver"
+
+DEPENDS += "openssl"
+
+SRC_URI += " \
+ file://0001-tests-Makefile-do-not-use-Werror.patch \
+ "
+
+EXTRA_OEMAKE='KERNEL_DIR="${STAGING_EXECPREFIXDIR}" PREFIX="${D}"'
+
+do_compile() {
+ oe_runmake tests
+}
+
+do_install() {
+ oe_runmake install_tests
+}
+
+FILES:${PN} = "${bindir}/*"
diff --git a/meta/recipes-kernel/cryptodev/cryptodev.inc b/meta/recipes-kernel/cryptodev/cryptodev.inc
index ef342a157c..64a9c2926b 100644
--- a/meta/recipes-kernel/cryptodev/cryptodev.inc
+++ b/meta/recipes-kernel/cryptodev/cryptodev.inc
@@ -10,7 +10,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
SRC_URI = "git://github.com/cryptodev-linux/cryptodev-linux;branch=master;protocol=https \
"
-SRCREV = "e0c25e289d6baf1d83c2b9cb523d3bc237d0c0c9"
+SRCREV = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f"
+PV = "1.13+git${SRCPV}"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-kernel/cryptodev/files/0001-Add-the-compile-and-install-rules-for-cryptodev-test.patch b/meta/recipes-kernel/cryptodev/files/0001-Add-the-compile-and-install-rules-for-cryptodev-test.patch
deleted file mode 100644
index 84fd27e681..0000000000
--- a/meta/recipes-kernel/cryptodev/files/0001-Add-the-compile-and-install-rules-for-cryptodev-test.patch
+++ /dev/null
@@ -1,66 +0,0 @@
-From 1980a8f4779a955e73285e7a0d86549b69bea5c8 Mon Sep 17 00:00:00 2001
-From: Yu Zongchun <b40527@freescale.com>
-Date: Sun, 28 Apr 2013 14:39:22 +0800
-Subject: [PATCH] Add the compile and install rules for cryptodev tests folder
-
-This is required to install the cryptodev tests folder to rootfs
-
-Signed-off-by: Yu Zongchun <b40527@freescale.com>
-
-Upstream-Status: Pending
-
----
- Makefile | 6 ++++++
- tests/Makefile | 8 ++++++++
- 2 files changed, 14 insertions(+), 0 deletions(-)
-
-Index: git/Makefile
-===================================================================
---- git.orig/Makefile
-+++ git/Makefile
-@@ -35,6 +35,9 @@ modules_install:
- $(MAKE) $(KERNEL_MAKE_OPTS) modules_install
- install -m 644 -D crypto/cryptodev.h $(DESTDIR)/$(includedir)/crypto/cryptodev.h
-
-+install_tests:
-+ make -C tests install DESTDIR=$(PREFIX)
-+
- clean:
- $(MAKE) $(KERNEL_MAKE_OPTS) clean
- rm -f $(hostprogs) *~
-@@ -43,6 +46,9 @@ clean:
- check:
- CFLAGS=$(CRYPTODEV_CFLAGS) KERNEL_DIR=$(KERNEL_DIR) $(MAKE) -C tests check
-
-+testprogs:
-+ KERNEL_DIR=$(KERNEL_DIR) make -C tests testprogs
-+
- CPOPTS =
- ifneq ($(SHOW_TYPES),)
- CPOPTS += --show-types
-Index: git/tests/Makefile
-===================================================================
---- git.orig/tests/Makefile
-+++ git/tests/Makefile
-@@ -23,6 +23,12 @@ bindir = $(execprefix)/bin
-
- all: $(hostprogs)
-
-+install:
-+ install -d $(DESTDIR)/usr/bin/tests_cryptodev
-+ for bin in $(hostprogs); do \
-+ install -m 755 $${bin} $(DESTDIR)/usr/bin/tests_cryptodev/; \
-+ done
-+
- check: $(hostprogs)
- ./cipher
- ./hmac
-@@ -38,6 +44,8 @@ install:
- install -m 755 $$prog $(DESTDIR)/$(bindir); \
- done
-
-+testprogs: $(hostprogs)
-+
- clean:
- rm -f *.o *~ $(hostprogs)
-
diff --git a/meta/recipes-kernel/cryptodev/files/0001-Disable-installing-header-file-provided-by-another-p.patch b/meta/recipes-kernel/cryptodev/files/0001-Disable-installing-header-file-provided-by-another-p.patch
index 885b5823e4..c7fdef4da4 100644
--- a/meta/recipes-kernel/cryptodev/files/0001-Disable-installing-header-file-provided-by-another-p.patch
+++ b/meta/recipes-kernel/cryptodev/files/0001-Disable-installing-header-file-provided-by-another-p.patch
@@ -1,4 +1,4 @@
-From 8a884f55bd1527baa82fab68c186ba546273860c Mon Sep 17 00:00:00 2001
+From 66d85d3f26e21cf7c38b27de0dcc42376f5d853e Mon Sep 17 00:00:00 2001
From: Denys Dmytriyenko <denys@ti.com>
Date: Sun, 6 Apr 2014 19:51:39 -0400
Subject: [PATCH] Disable installing header file provided by another package
@@ -6,19 +6,20 @@ Subject: [PATCH] Disable installing header file provided by another package
Signed-off-by: Denys Dmytriyenko <denys@ti.com>
Upstream-Status: Inappropriate [ OE specific ]
+
---
Makefile | 1 -
1 file changed, 1 deletion(-)
diff --git a/Makefile b/Makefile
-index 5a080e0..bf02396 100644
+index d83aee6..c8d8ae5 100644
--- a/Makefile
+++ b/Makefile
-@@ -33,7 +33,6 @@ install: modules_install
+@@ -36,7 +36,6 @@ install: modules_install
modules_install:
$(MAKE) $(KERNEL_MAKE_OPTS) modules_install
- install -m 644 -D crypto/cryptodev.h $(DESTDIR)/$(includedir)/crypto/cryptodev.h
- clean:
- $(MAKE) $(KERNEL_MAKE_OPTS) clean
+ install_tests: tests
+ $(MAKE) -C tests install DESTDIR=$(PREFIX)
diff --git a/meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch b/meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch
index 347a4aef0f..3285548a57 100644
--- a/meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch
+++ b/meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch
@@ -1,4 +1,4 @@
-From e2c2895d52761ddc6384a31364236dd13f677c34 Mon Sep 17 00:00:00 2001
+From 47438e53e1156db0916c0f4683a24fe4d82152f2 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Fri, 10 Sep 2021 10:44:42 +0200
Subject: [PATCH] tests/Makefile: do not use -Werror
@@ -8,6 +8,7 @@ Reported at https://github.com/cryptodev-linux/cryptodev-linux/issues/67
Upstream-Status: Inappropriate [upstream needs to update the code]
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
---
tests/Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta/recipes-kernel/dtc/dtc/0001-fdtdump-fix-Werror-int-to-pointer-cast.patch b/meta/recipes-kernel/dtc/dtc/0001-fdtdump-fix-Werror-int-to-pointer-cast.patch
deleted file mode 100644
index 4c3e34b1ff..0000000000
--- a/meta/recipes-kernel/dtc/dtc/0001-fdtdump-fix-Werror-int-to-pointer-cast.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-Fix the build of fdtdump with mingw.
-
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From ae0ce1fa7f4d679b5f8df1fc0e797246e43547fe Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Marc-Andr=C3=A9=20Lureau?= <marcandre.lureau@redhat.com>
-Date: Wed, 25 Aug 2021 16:13:50 +0400
-Subject: [PATCH] fdtdump: fix -Werror=int-to-pointer-cast
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-With mingw64-gcc, the compiler complains with various warnings:
-error: cast from pointer to integer of different size [-Werror=pointer-to-int-cast]
-
-Signed-off-by: Marc-André Lureau <marcandre.lureau@redhat.com>
-Message-Id: <20210825121350.213551-1-marcandre.lureau@redhat.com>
-Acked-by: Rob Herring <robh@kernel.org>
-Signed-off-by: David Gibson <david@gibson.dropbear.id.au>
----
- fdtdump.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/fdtdump.c b/fdtdump.c
-index d9fb374..483f367 100644
---- a/fdtdump.c
-+++ b/fdtdump.c
-@@ -21,7 +21,7 @@
- #define MAX_VERSION 17
-
- #define ALIGN(x, a) (((x) + ((a) - 1)) & ~((a) - 1))
--#define PALIGN(p, a) ((void *)(ALIGN((unsigned long)(p), (a))))
-+#define PALIGN(p, a) ((void *)(ALIGN((uintptr_t)(p), (a))))
- #define GET_CELL(p) (p += 4, *((const fdt32_t *)(p-4)))
-
- static const char *tagname(uint32_t tag)
---
-2.25.1
-
diff --git a/meta/recipes-kernel/dtc/dtc/0001-meson.build-bump-version-to-1.7.0.patch b/meta/recipes-kernel/dtc/dtc/0001-meson.build-bump-version-to-1.7.0.patch
new file mode 100644
index 0000000000..79a3b92b44
--- /dev/null
+++ b/meta/recipes-kernel/dtc/dtc/0001-meson.build-bump-version-to-1.7.0.patch
@@ -0,0 +1,29 @@
+From 9153522103bd4ed7e3299c4d073f66bb37cb2d42 Mon Sep 17 00:00:00 2001
+From: Nikolay Letov <letov.nikolay@gmail.com>
+Date: Wed, 22 Feb 2023 13:36:07 +0300
+Subject: [PATCH 1/2] meson.build: bump version to 1.7.0
+
+[This was botched in the actual 1.7.0 release :( - David Gibson]
+
+Upstream-Status: Backport [https://git.kernel.org/pub/scm/utils/dtc/dtc.git/commit/?id=64a907f08b9bedd89833c1eee674148cff2343c6]
+
+Signed-off-by: Nikolay Letov <letov.nikolay@gmail.com>
+Signed-off-by: Peter Marko <peter.marko@siemens.com>
+---
+ meson.build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/meson.build b/meson.build
+index 78251eb..d88cd9f 100644
+--- a/meson.build
++++ b/meson.build
+@@ -1,5 +1,5 @@
+ project('dtc', 'c',
+- version: '1.6.0',
++ version: '1.7.0',
+ license: ['GPL2+', 'BSD-2'],
+ default_options: 'werror=true',
+ )
+--
+2.30.2
+
diff --git a/meta/recipes-kernel/dtc/dtc/0002-meson-allow-building-from-shallow-clones.patch b/meta/recipes-kernel/dtc/dtc/0002-meson-allow-building-from-shallow-clones.patch
new file mode 100644
index 0000000000..0284905913
--- /dev/null
+++ b/meta/recipes-kernel/dtc/dtc/0002-meson-allow-building-from-shallow-clones.patch
@@ -0,0 +1,38 @@
+From 4415b0baece3c4351a6d3637c2754abbefd4795d Mon Sep 17 00:00:00 2001
+From: Peter Marko <peter.marko@siemens.com>
+Date: Sat, 16 Dec 2023 18:58:31 +0100
+Subject: [PATCH 2/2] meson: allow building from shallow clones
+
+When building from shallow clone, tag is not available
+and version defaults to git hash.
+Problem is that some builds check DTC version and fail the comparison.
+Example is https://git.trustedfirmware.org/TF-A/trusted-firmware-a.git
+Which fails to build with following error:
+dtc version too old (039a994), you need at least version 1.4.4
+
+Drop --always from git describe command, see
+https://github.com/mesonbuild/meson/blob/1.3.0/mesonbuild/utils/universal.py#L773
+This will make it more closer to build via Makefile.
+
+Upstream-Status: Submitted [https://github.com/dgibson/dtc/pull/122]
+
+Signed-off-by: Peter Marko <peter.marko@siemens.com>
+---
+ meson.build | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/meson.build b/meson.build
+index 78251eb..fc0c92a 100644
+--- a/meson.build
++++ b/meson.build
+@@ -56,6 +56,7 @@ py = py.find_installation(required: get_option('python'))
+ swig = find_program('swig', required: get_option('python'))
+
+ version_gen_h = vcs_tag(
++ command: ['git', 'describe', '--dirty=+'],
+ input: 'version_gen.h.in',
+ output: 'version_gen.h',
+ )
+--
+2.30.2
+
diff --git a/meta/recipes-kernel/dtc/dtc_1.6.1.bb b/meta/recipes-kernel/dtc/dtc_1.6.1.bb
deleted file mode 100644
index 2a6ac089a3..0000000000
--- a/meta/recipes-kernel/dtc/dtc_1.6.1.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Device Tree Compiler"
-HOMEPAGE = "https://devicetree.org/"
-DESCRIPTION = "The Device Tree Compiler is a tool used to manipulate the Open-Firmware-like device tree used by PowerPC kernels."
-SECTION = "bootloader"
-LICENSE = "GPL-2.0-only | BSD-2-Clause"
-
-LIC_FILES_CHKSUM = "file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://libfdt/libfdt.h;beginline=4;endline=7;md5=05bb357cfb75cae7d2b01d2ee8d76407"
-
-SRC_URI = "git://git.kernel.org/pub/scm/utils/dtc/dtc.git;branch=master \
- file://0001-fdtdump-fix-Werror-int-to-pointer-cast.patch"
-SRCREV = "b6910bec11614980a21e46fbccc35934b671bd81"
-
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
-
-S = "${WORKDIR}/git"
-
-inherit meson pkgconfig
-
-EXTRA_OEMESON = "-Dpython=disabled -Dvalgrind=disabled"
-
-PACKAGECONFIG ??= "tools"
-PACKAGECONFIG[tools] = "-Dtools=true,-Dtools=false,flex-native bison-native"
-PACKAGECONFIG[yaml] = "-Dyaml=enabled,-Dyaml=disabled,libyaml"
-
-PACKAGES =+ "${PN}-misc"
-FILES:${PN}-misc = "${bindir}/convert-dtsv0 ${bindir}/ftdump ${bindir}/dtdiff"
-RDEPENDS:${PN}-misc += "${@bb.utils.contains('PACKAGECONFIG', 'tools', 'bash diffutils', '', d)}"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-kernel/dtc/dtc_1.7.0.bb b/meta/recipes-kernel/dtc/dtc_1.7.0.bb
new file mode 100644
index 0000000000..0702fc16df
--- /dev/null
+++ b/meta/recipes-kernel/dtc/dtc_1.7.0.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Device Tree Compiler"
+HOMEPAGE = "https://devicetree.org/"
+DESCRIPTION = "The Device Tree Compiler is a toolchain for working with device tree source and binary files."
+SECTION = "bootloader"
+LICENSE = "GPL-2.0-only | BSD-2-Clause"
+
+LIC_FILES_CHKSUM = "file://GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://BSD-2-Clause;md5=5d6306d1b08f8df623178dfd81880927 \
+ file://README.license;md5=a1eb22e37f09df5b5511b8a278992d0e"
+
+SRC_URI = " \
+ git://git.kernel.org/pub/scm/utils/dtc/dtc.git;branch=main;protocol=https \
+ file://0001-meson.build-bump-version-to-1.7.0.patch \
+ file://0002-meson-allow-building-from-shallow-clones.patch \
+"
+SRCREV = "039a99414e778332d8f9c04cbd3072e1dcc62798"
+
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig
+
+EXTRA_OEMESON = "-Dpython=disabled -Dvalgrind=disabled"
+
+PACKAGECONFIG ??= "tools"
+PACKAGECONFIG[tools] = "-Dtools=true,-Dtools=false,flex-native bison-native"
+PACKAGECONFIG[yaml] = "-Dyaml=enabled,-Dyaml=disabled,libyaml"
+
+PACKAGES =+ "${PN}-misc"
+FILES:${PN}-misc = "${bindir}/convert-dtsv0 ${bindir}/ftdump ${bindir}/dtdiff"
+RDEPENDS:${PN}-misc += "${@bb.utils.contains('PACKAGECONFIG', 'tools', 'bash diffutils', '', d)}"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb b/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb
index c869274d09..974fcba876 100644
--- a/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb
+++ b/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb
@@ -1,4 +1,4 @@
-DESCRIPTION = "Wrapper for tooling for devicetree validation using YAML and jsonschema"
+SUMMARY = "Wrapper for tooling for devicetree validation using YAML and jsonschema"
HOMEPAGE = "https://yoctoproject.org"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
diff --git a/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb b/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
index 11613ab3b6..8eff00821a 100644
--- a/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
+++ b/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
@@ -9,14 +9,14 @@ LIC_FILES_CHKSUM = "\
file://Kconfiglib/LICENSE.txt;md5=712177a72a3937909543eda3ad1bfb7c \
"
-DEPENDS = "git-native"
+DEPENDS += "git-replacement-native"
-SRCREV = "f70b1d52f4706a263ae22e2c61039ccd875e97b6"
-PV = "0.3+git${SRCPV}"
+SRCREV = "7160ebe8b865dd6028aef278efa219433db93f7e"
+PV = "0.3+git"
inherit native
-SRC_URI = "git://git.yoctoproject.org/yocto-kernel-tools.git;branch=master"
+SRC_URI = "git://git.yoctoproject.org/yocto-kernel-tools.git;branch=master;protocol=https"
S = "${WORKDIR}/git"
do_configure() {
diff --git a/meta/recipes-kernel/kexec/kexec-tools/0002-purgatory-Pass-r-directly-to-linker.patch b/meta/recipes-kernel/kexec/kexec-tools/0002-purgatory-Pass-r-directly-to-linker.patch
index 363d5da4ae..a537ac2f0b 100644
--- a/meta/recipes-kernel/kexec/kexec-tools/0002-purgatory-Pass-r-directly-to-linker.patch
+++ b/meta/recipes-kernel/kexec/kexec-tools/0002-purgatory-Pass-r-directly-to-linker.patch
@@ -1,4 +1,4 @@
-From a04bcf8f683c1a5a7d015920124457ad56fb7cf0 Mon Sep 17 00:00:00 2001
+From e5bc9fbd6029057a4e3815a5326af5bd83a450e6 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Mon, 7 Sep 2015 07:59:45 +0000
Subject: [PATCH] purgatory: Pass -r directly to linker
@@ -10,15 +10,16 @@ unfiltered
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
+
---
purgatory/Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/purgatory/Makefile b/purgatory/Makefile
-index 2dd6c47..416e6b9 100644
+index 4d2d071..6673423 100644
--- a/purgatory/Makefile
+++ b/purgatory/Makefile
-@@ -60,7 +60,7 @@ $(PURGATORY): CPPFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS) \
+@@ -61,7 +61,7 @@ $(PURGATORY): CPPFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS) \
-I$(shell $(CC) -print-file-name=include)
$(PURGATORY): LDFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS)\
-Wl,--no-undefined -nostartfiles -nostdlib \
diff --git a/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch b/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch
index 832fe67716..e874a8b4f1 100644
--- a/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch
+++ b/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch
@@ -1,4 +1,4 @@
-From 55e583d20651e829afbbc8dba0f8ec3017cda2d5 Mon Sep 17 00:00:00 2001
+From b62c1da8f8e641397add10367ee9c4cfdedb1cc0 Mon Sep 17 00:00:00 2001
From: Haiqing Bai <Haiqing.Bai@windriver.com>
Date: Mon, 9 Jan 2017 15:26:29 +0800
Subject: [PATCH] kexec: ARM: Fix add_buffer_phys_virt() align issue
@@ -12,15 +12,16 @@ Upstream-Status: Pending
Suggested-By:fredrik.markstrom@gmail.com
Signed-off-by: Haiqing Bai <Haiqing.Bai@windriver.com>
+
---
kexec/arch/arm/crashdump-arm.c | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/kexec/arch/arm/crashdump-arm.c b/kexec/arch/arm/crashdump-arm.c
-index daa4788..3f72b38 100644
+index 1ec1826..cc20f63 100644
--- a/kexec/arch/arm/crashdump-arm.c
+++ b/kexec/arch/arm/crashdump-arm.c
-@@ -240,6 +240,7 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
+@@ -242,6 +242,7 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
void *buf;
int err;
int last_ranges;
@@ -28,7 +29,7 @@ index daa4788..3f72b38 100644
/*
* First fetch all the memory (RAM) ranges that we are going to pass to
-@@ -281,6 +282,7 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
+@@ -283,6 +284,7 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
/* for support LPAE enabled kernel*/
elf_info.class = ELFCLASS64;
@@ -36,7 +37,7 @@ index daa4788..3f72b38 100644
err = crash_create_elf64_headers(info, &elf_info,
usablemem_rgns.ranges,
-@@ -302,8 +304,9 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
+@@ -304,8 +306,9 @@ int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
* 1MB) so that available memory passed in kernel command line will be
* aligned to 1MB. This is because kernel create_mapping() wants memory
* regions to be aligned to SECTION_SIZE.
diff --git a/meta/recipes-kernel/kexec/kexec-tools/0005-Disable-PIE-during-link.patch b/meta/recipes-kernel/kexec/kexec-tools/0005-Disable-PIE-during-link.patch
index 7a4b8548ca..6a21744ac1 100644
--- a/meta/recipes-kernel/kexec/kexec-tools/0005-Disable-PIE-during-link.patch
+++ b/meta/recipes-kernel/kexec/kexec-tools/0005-Disable-PIE-during-link.patch
@@ -1,4 +1,4 @@
-From c54488ad5fd657e0f154d76d7456d9080be24836 Mon Sep 17 00:00:00 2001
+From 494888bcc3bbf070dfce1b2686ee34c8619aa33d Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sat, 10 Jun 2017 11:18:49 -0700
Subject: [PATCH] Disable PIE during link
@@ -9,15 +9,16 @@ just need to match it with linker flags
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
---
purgatory/Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/purgatory/Makefile b/purgatory/Makefile
-index 416e6b9..f00edb4 100644
+index 6673423..a7405ea 100644
--- a/purgatory/Makefile
+++ b/purgatory/Makefile
-@@ -59,7 +59,7 @@ $(PURGATORY): CPPFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS) \
+@@ -60,7 +60,7 @@ $(PURGATORY): CPPFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS) \
-Iinclude \
-I$(shell $(CC) -print-file-name=include)
$(PURGATORY): LDFLAGS=$($(ARCH)_PURGATORY_EXTRA_CFLAGS)\
diff --git a/meta/recipes-kernel/kexec/kexec-tools/Fix-building-on-x86_64-with-binutils-2.41.patch b/meta/recipes-kernel/kexec/kexec-tools/Fix-building-on-x86_64-with-binutils-2.41.patch
new file mode 100644
index 0000000000..4894f044fc
--- /dev/null
+++ b/meta/recipes-kernel/kexec/kexec-tools/Fix-building-on-x86_64-with-binutils-2.41.patch
@@ -0,0 +1,95 @@
+From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
+From: Michel Lind <salimma@fedoraproject.org>
+Date: Tue, 30 Jan 2024 04:14:31 -0600
+Subject: [PATCH] Fix building on x86_64 with binutils 2.41
+
+Newer versions of the GNU assembler (observed with binutils 2.41) will
+complain about the ".arch i386" in files assembled with "as --64",
+with the message "Error: 64bit mode not supported on 'i386'".
+
+Fix by moving ".arch i386" below the relevant ".code32" directive, so
+that the assembler is no longer expecting 64-bit instructions to be used
+by the time that the ".arch i386" directive is encountered.
+
+Based on similar iPXE fix:
+https://github.com/ipxe/ipxe/commit/6ca597eee
+
+Signed-off-by: Michel Lind <michel@michel-slm.name>
+Signed-off-by: Simon Horman <horms@kernel.org>
+
+Upstream-Status: Backport [https://git.kernel.org/pub/scm/utils/kernel/kexec/kexec-tools.git/commit/?h=main&id=328de8e00e298f00d7ba6b25dc3950147e9642e6]
+Signed-off-by: Yoann Congal <yoann.congal@smile.fr>
+---
+ purgatory/arch/i386/entry32-16-debug.S | 2 +-
+ purgatory/arch/i386/entry32-16.S | 2 +-
+ purgatory/arch/i386/entry32.S | 2 +-
+ purgatory/arch/i386/setup-x86.S | 2 +-
+ 4 files changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/purgatory/arch/i386/entry32-16-debug.S b/purgatory/arch/i386/entry32-16-debug.S
+index 5167944..12e1164 100644
+--- a/purgatory/arch/i386/entry32-16-debug.S
++++ b/purgatory/arch/i386/entry32-16-debug.S
+@@ -25,10 +25,10 @@
+ .globl entry16_debug_pre32
+ .globl entry16_debug_first32
+ .globl entry16_debug_old_first32
+- .arch i386
+ .balign 16
+ entry16_debug:
+ .code32
++ .arch i386
+ /* Compute where I am running at (assumes esp valid) */
+ call 1f
+ 1: popl %ebx
+diff --git a/purgatory/arch/i386/entry32-16.S b/purgatory/arch/i386/entry32-16.S
+index c051aab..eace095 100644
+--- a/purgatory/arch/i386/entry32-16.S
++++ b/purgatory/arch/i386/entry32-16.S
+@@ -20,10 +20,10 @@
+ #undef i386
+ .text
+ .globl entry16, entry16_regs
+- .arch i386
+ .balign 16
+ entry16:
+ .code32
++ .arch i386
+ /* Compute where I am running at (assumes esp valid) */
+ call 1f
+ 1: popl %ebx
+diff --git a/purgatory/arch/i386/entry32.S b/purgatory/arch/i386/entry32.S
+index f7a494f..8ce9e31 100644
+--- a/purgatory/arch/i386/entry32.S
++++ b/purgatory/arch/i386/entry32.S
+@@ -20,10 +20,10 @@
+ #undef i386
+
+ .text
+- .arch i386
+ .globl entry32, entry32_regs
+ entry32:
+ .code32
++ .arch i386
+
+ /* Setup a gdt that should that is generally usefully */
+ lgdt %cs:gdt
+diff --git a/purgatory/arch/i386/setup-x86.S b/purgatory/arch/i386/setup-x86.S
+index 201bb2c..a212eed 100644
+--- a/purgatory/arch/i386/setup-x86.S
++++ b/purgatory/arch/i386/setup-x86.S
+@@ -21,10 +21,10 @@
+ #undef i386
+
+ .text
+- .arch i386
+ .globl purgatory_start
+ purgatory_start:
+ .code32
++ .arch i386
+
+ /* Load a gdt so I know what the segment registers are */
+ lgdt %cs:gdt
+--
+2.39.2
+
diff --git a/meta/recipes-kernel/kexec/kexec-tools_2.0.24.bb b/meta/recipes-kernel/kexec/kexec-tools_2.0.24.bb
deleted file mode 100644
index fdad40ed6a..0000000000
--- a/meta/recipes-kernel/kexec/kexec-tools_2.0.24.bb
+++ /dev/null
@@ -1,86 +0,0 @@
-
-SUMMARY = "Kexec fast reboot tools"
-DESCRIPTION = "Kexec is a fast reboot feature that lets you reboot to a new Linux kernel"
-AUTHOR = "Eric Biederman"
-HOMEPAGE = "http://kernel.org/pub/linux/utils/kernel/kexec/"
-SECTION = "kernel/userland"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=ea5bed2f60d357618ca161ad539f7c0a \
- file://kexec/kexec.c;beginline=1;endline=20;md5=af10f6ae4a8715965e648aa687ad3e09"
-DEPENDS = "zlib xz"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/kexec/kexec-tools-${PV}.tar.gz \
- file://kdump \
- file://kdump.conf \
- file://kdump.service \
- file://0001-powerpc-change-the-memory-size-limit.patch \
- file://0002-purgatory-Pass-r-directly-to-linker.patch \
- file://0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch \
- file://0005-Disable-PIE-during-link.patch \
- file://0001-arm64-kexec-disabled-check-if-kaslr-seed-dtb-propert.patch \
- "
-
-SRC_URI[sha256sum] = "1ff9137327aeac3b2ab922a71bc6eb4655571a0ff77c071cb92783a9a59d4d26"
-
-inherit autotools update-rc.d systemd
-
-export LDFLAGS = "-L${STAGING_LIBDIR}"
-EXTRA_OECONF = " --with-zlib=yes"
-
-do_compile:prepend() {
- # Remove the prepackaged config.h from the source tree as it overrides
- # the same file generated by configure and placed in the build tree
- rm -f ${S}/include/config.h
-
- # Remove the '*.d' file to make sure the recompile is OK
- for dep in `find ${B} -type f -name '*.d'`; do
- dep_no_d="`echo $dep | sed 's#.d$##'`"
- # Remove file.d when there is a file.o
- if [ -f "$dep_no_d.o" ]; then
- rm -f $dep
- fi
- done
-}
-
-do_install:append () {
- install -d ${D}${sysconfdir}/sysconfig
- install -m 0644 ${WORKDIR}/kdump.conf ${D}${sysconfdir}/sysconfig
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
- install -D -m 0755 ${WORKDIR}/kdump ${D}${sysconfdir}/init.d/kdump
- fi
-
- if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
- install -D -m 0755 ${WORKDIR}/kdump ${D}${libexecdir}/kdump-helper
- install -D -m 0644 ${WORKDIR}/kdump.service ${D}${systemd_system_unitdir}/kdump.service
- sed -i -e 's,@LIBEXECDIR@,${libexecdir},g' ${D}${systemd_system_unitdir}/kdump.service
- fi
-}
-
-PACKAGES =+ "kexec kdump vmcore-dmesg"
-
-ALLOW_EMPTY:${PN} = "1"
-RRECOMMENDS:${PN} = "kexec kdump vmcore-dmesg"
-
-FILES:kexec = "${sbindir}/kexec"
-FILES:kdump = "${sbindir}/kdump \
- ${sysconfdir}/sysconfig/kdump.conf \
- ${sysconfdir}/init.d/kdump \
- ${libexecdir}/kdump-helper \
- ${systemd_system_unitdir}/kdump.service \
-"
-
-FILES:vmcore-dmesg = "${sbindir}/vmcore-dmesg"
-
-INITSCRIPT_PACKAGES = "kdump"
-INITSCRIPT_NAME:kdump = "kdump"
-INITSCRIPT_PARAMS:kdump = "start 56 2 3 4 5 . stop 56 0 1 6 ."
-
-SYSTEMD_PACKAGES = "kdump"
-SYSTEMD_SERVICE:kdump = "kdump.service"
-
-SECURITY_PIE_CFLAGS:remove = "-fPIE -pie"
-
-COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|powerpc.*|mips.*)-(linux|freebsd.*)'
-
-INSANE_SKIP:${PN} = "arch"
diff --git a/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb b/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb
new file mode 100644
index 0000000000..dec821ea88
--- /dev/null
+++ b/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb
@@ -0,0 +1,86 @@
+
+SUMMARY = "Kexec fast reboot tools"
+DESCRIPTION = "Kexec is a fast reboot feature that lets you reboot to a new Linux kernel"
+HOMEPAGE = "http://kernel.org/pub/linux/utils/kernel/kexec/"
+SECTION = "kernel/userland"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=ea5bed2f60d357618ca161ad539f7c0a \
+ file://kexec/kexec.c;beginline=1;endline=20;md5=af10f6ae4a8715965e648aa687ad3e09"
+DEPENDS = "zlib xz"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/kexec/kexec-tools-${PV}.tar.gz \
+ file://kdump \
+ file://kdump.conf \
+ file://kdump.service \
+ file://0001-powerpc-change-the-memory-size-limit.patch \
+ file://0002-purgatory-Pass-r-directly-to-linker.patch \
+ file://0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch \
+ file://0005-Disable-PIE-during-link.patch \
+ file://0001-arm64-kexec-disabled-check-if-kaslr-seed-dtb-propert.patch \
+ file://Fix-building-on-x86_64-with-binutils-2.41.patch \
+ "
+
+SRC_URI[sha256sum] = "f33d2660b3e38d25a127e87097978e0f7a9a73ab5151a29eb80974d169ff6a29"
+
+inherit autotools update-rc.d systemd
+
+export LDFLAGS = "-L${STAGING_LIBDIR}"
+EXTRA_OECONF = " --with-zlib=yes"
+
+do_compile:prepend() {
+ # Remove the prepackaged config.h from the source tree as it overrides
+ # the same file generated by configure and placed in the build tree
+ rm -f ${S}/include/config.h
+
+ # Remove the '*.d' file to make sure the recompile is OK
+ for dep in `find ${B} -type f -name '*.d'`; do
+ dep_no_d="`echo $dep | sed 's#.d$##'`"
+ # Remove file.d when there is a file.o
+ if [ -f "$dep_no_d.o" ]; then
+ rm -f $dep
+ fi
+ done
+}
+
+do_install:append () {
+ install -d ${D}${sysconfdir}/sysconfig
+ install -m 0644 ${WORKDIR}/kdump.conf ${D}${sysconfdir}/sysconfig
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then
+ install -D -m 0755 ${WORKDIR}/kdump ${D}${sysconfdir}/init.d/kdump
+ fi
+
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -D -m 0755 ${WORKDIR}/kdump ${D}${libexecdir}/kdump-helper
+ install -D -m 0644 ${WORKDIR}/kdump.service ${D}${systemd_system_unitdir}/kdump.service
+ sed -i -e 's,@LIBEXECDIR@,${libexecdir},g' ${D}${systemd_system_unitdir}/kdump.service
+ fi
+}
+
+PACKAGES =+ "kexec kdump vmcore-dmesg"
+
+ALLOW_EMPTY:${PN} = "1"
+RRECOMMENDS:${PN} = "kexec kdump vmcore-dmesg"
+
+FILES:kexec = "${sbindir}/kexec"
+FILES:kdump = "${sbindir}/kdump \
+ ${sysconfdir}/sysconfig/kdump.conf \
+ ${sysconfdir}/init.d/kdump \
+ ${libexecdir}/kdump-helper \
+ ${systemd_system_unitdir}/kdump.service \
+"
+
+FILES:vmcore-dmesg = "${sbindir}/vmcore-dmesg"
+
+INITSCRIPT_PACKAGES = "kdump"
+INITSCRIPT_NAME:kdump = "kdump"
+INITSCRIPT_PARAMS:kdump = "start 56 2 3 4 5 . stop 56 0 1 6 ."
+
+SYSTEMD_PACKAGES = "kdump"
+SYSTEMD_SERVICE:kdump = "kdump.service"
+
+SECURITY_PIE_CFLAGS:remove = "-fPIE -pie"
+
+COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|powerpc.*|mips.*)-(linux|freebsd.*)'
+
+INSANE_SKIP:${PN} = "arch"
diff --git a/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb b/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb
index 303026ad78..6c0739d64f 100644
--- a/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb
+++ b/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb
@@ -21,13 +21,17 @@ do_install() {
#!/bin/sh
# Expected to be called as: depmodwrapper -a KERNEL_VERSION
if [ "\$1" != "-a" -o "\$2" != "-b" ]; then
- echo "Usage: depmodwrapper -a -b rootfs KERNEL_VERSION" >&2
+ echo "Usage: depmodwrapper -a -b rootfs KERNEL_VERSION [KERNEL_PACKAGE_NAME]" >&2
exit 1
fi
+kernelpkgname="kernel"
+# If no KERNEL_PACKAGE_NAME, assume "kernel".
+[ -z "\$5" ] || kernelpkgname="\$5"
+
kernelabi=""
-if [ -r "${PKGDATA_DIR}/kernel-depmod/kernel-abiversion" ]; then
- kernelabi=\$(cat "${PKGDATA_DIR}/kernel-depmod/kernel-abiversion")
+if [ -r "${PKGDATA_DIR}/\${kernelpkgname}-depmod/\${kernelpkgname}-abiversion" ]; then
+ kernelabi=\$(cat "${PKGDATA_DIR}/\${kernelpkgname}-depmod/\${kernelpkgname}-abiversion")
fi
if [ ! -e "\$3${nonarch_base_libdir}/depmod.d/exclude.conf" ]; then
@@ -35,11 +39,11 @@ if [ ! -e "\$3${nonarch_base_libdir}/depmod.d/exclude.conf" ]; then
echo "exclude .debug" > "\$3${nonarch_base_libdir}/depmod.d/exclude.conf"
fi
-if [ ! -r ${PKGDATA_DIR}/kernel-depmod/System.map-\$4 ] || [ "\$kernelabi" != "\$4" ]; then
- echo "Unable to read: ${PKGDATA_DIR}/kernel-depmod/System.map-\$4" >&2
+if [ ! -r ${PKGDATA_DIR}/\${kernelpkgname}-depmod/System.map-\$4 ] || [ "\$kernelabi" != "\$4" ]; then
+ echo "Unable to read: ${PKGDATA_DIR}/\${kernelpkgname}-depmod/System.map-\$4" >&2
exec env depmod -C "\$3${nonarch_base_libdir}/depmod.d" "\$1" "\$2" "\$3" "\$4"
else
- exec env depmod -C "\$3${nonarch_base_libdir}/depmod.d" "\$1" "\$2" "\$3" -F "${PKGDATA_DIR}/kernel-depmod/System.map-\$4" "\$4"
+ exec env depmod -C "\$3${nonarch_base_libdir}/depmod.d" "\$1" "\$2" "\$3" -F "${PKGDATA_DIR}/\${kernelpkgname}-depmod/System.map-\$4" "\$4"
fi
EOF
chmod +x ${D}${bindir_crossscripts}/depmodwrapper
diff --git a/meta/recipes-kernel/kmod/kmod/0001-Use-portable-implementation-for-basename-API.patch b/meta/recipes-kernel/kmod/kmod/0001-Use-portable-implementation-for-basename-API.patch
new file mode 100644
index 0000000000..6a7f9ded4f
--- /dev/null
+++ b/meta/recipes-kernel/kmod/kmod/0001-Use-portable-implementation-for-basename-API.patch
@@ -0,0 +1,136 @@
+From 721ed6040c7aa47070faf6378c433089e178bd43 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 9 Dec 2023 17:35:59 -0800
+Subject: [PATCH] Use portable implementation for basename API
+
+musl has removed the non-prototype declaration of basename from
+string.h [1] which now results in build errors with clang-17+ compiler
+
+Implement GNU basename behavior using strchr which is portable across libcs
+
+Fixes
+../git/tools/kmod.c:71:19: error: call to undeclared function 'basename'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
+71 | "Commands:\n", basename(argv[0]));
+| ^
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://github.com/kmod-project/kmod/pull/32]
+
+Suggested-by: Rich Felker
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libkmod/libkmod-config.c | 2 +-
+ shared/util.c | 4 ++--
+ shared/util.h | 7 +++++++
+ testsuite/testsuite.c | 2 +-
+ tools/depmod.c | 2 +-
+ tools/kmod.c | 4 ++--
+ 6 files changed, 14 insertions(+), 7 deletions(-)
+
+diff --git a/libkmod/libkmod-config.c b/libkmod/libkmod-config.c
+index e83621b..8aa555a 100644
+--- a/libkmod/libkmod-config.c
++++ b/libkmod/libkmod-config.c
+@@ -794,7 +794,7 @@ static int conf_files_insert_sorted(struct kmod_ctx *ctx,
+ bool is_single = false;
+
+ if (name == NULL) {
+- name = basename(path);
++ name = gnu_basename(path);
+ is_single = true;
+ }
+
+diff --git a/shared/util.c b/shared/util.c
+index e2bab83..0e16670 100644
+--- a/shared/util.c
++++ b/shared/util.c
+@@ -172,9 +172,9 @@ char *modname_normalize(const char *modname, char buf[static PATH_MAX], size_t *
+
+ char *path_to_modname(const char *path, char buf[static PATH_MAX], size_t *len)
+ {
+- char *modname;
++ const char *modname;
+
+- modname = basename(path);
++ modname = gnu_basename(path);
+ if (modname == NULL || modname[0] == '\0')
+ return NULL;
+
+diff --git a/shared/util.h b/shared/util.h
+index c4a3916..073dc5a 100644
+--- a/shared/util.h
++++ b/shared/util.h
+@@ -5,6 +5,7 @@
+ #include <stdbool.h>
+ #include <stdlib.h>
+ #include <stdio.h>
++#include <string.h>
+ #include <sys/types.h>
+ #include <sys/stat.h>
+ #include <time.h>
+@@ -76,6 +77,12 @@ do { \
+ __p->__v = (val); \
+ } while(0)
+
++static _always_inline_ const char *gnu_basename(const char *s)
++{
++ const char *p = strrchr(s, '/');
++ return p ? p+1 : s;
++}
++
+ static _always_inline_ unsigned int ALIGN_POWER2(unsigned int u)
+ {
+ return 1 << ((sizeof(u) * 8) - __builtin_clz(u - 1));
+diff --git a/testsuite/testsuite.c b/testsuite/testsuite.c
+index 318343a..aafc987 100644
+--- a/testsuite/testsuite.c
++++ b/testsuite/testsuite.c
+@@ -70,7 +70,7 @@ static void help(void)
+
+ printf("Usage:\n"
+ "\t%s [options] <test>\n"
+- "Options:\n", basename(progname));
++ "Options:\n", gnu_basename(progname));
+
+ for (itr = options, itr_short = options_short;
+ itr->name != NULL; itr++, itr_short++)
+diff --git a/tools/depmod.c b/tools/depmod.c
+index 43fc354..cfb15b1 100644
+--- a/tools/depmod.c
++++ b/tools/depmod.c
+@@ -762,7 +762,7 @@ static int cfg_files_insert_sorted(struct cfg_file ***p_files, size_t *p_n_files
+ if (name != NULL)
+ namelen = strlen(name);
+ else {
+- name = basename(dir);
++ name = gnu_basename(dir);
+ namelen = strlen(name);
+ dirlen -= namelen + 1;
+ }
+diff --git a/tools/kmod.c b/tools/kmod.c
+index 55689c0..df91e5c 100644
+--- a/tools/kmod.c
++++ b/tools/kmod.c
+@@ -68,7 +68,7 @@ static int kmod_help(int argc, char *argv[])
+ "Options:\n"
+ "\t-V, --version show version\n"
+ "\t-h, --help show this help\n\n"
+- "Commands:\n", basename(argv[0]));
++ "Commands:\n", gnu_basename(argv[0]));
+
+ for (i = 0; i < ARRAY_SIZE(kmod_cmds); i++) {
+ if (kmod_cmds[i]->help != NULL) {
+@@ -156,7 +156,7 @@ static int handle_kmod_compat_commands(int argc, char *argv[])
+ const char *cmd;
+ size_t i;
+
+- cmd = basename(argv[0]);
++ cmd = gnu_basename(argv[0]);
+
+ for (i = 0; i < ARRAY_SIZE(kmod_compat_cmds); i++) {
+ if (streq(kmod_compat_cmds[i]->name, cmd))
+--
+2.43.0
+
diff --git a/meta/recipes-kernel/kmod/kmod/0001-depmod-Add-support-for-excluding-a-directory.patch b/meta/recipes-kernel/kmod/kmod/0001-depmod-Add-support-for-excluding-a-directory.patch
deleted file mode 100644
index ea0570af2b..0000000000
--- a/meta/recipes-kernel/kmod/kmod/0001-depmod-Add-support-for-excluding-a-directory.patch
+++ /dev/null
@@ -1,172 +0,0 @@
-From f50e2d67575ac5f256fb853ca9d29aeac92d9a57 Mon Sep 17 00:00:00 2001
-From: Saul Wold <saul.wold@windriver.com>
-Date: Thu, 31 Mar 2022 14:56:28 -0700
-Subject: [PATCH] depmod: Add support for excluding a directory
-
-This adds support to depmod to enable a new exclude directive in
-the depmod.d/*.conf configuration file. Currently depmod
-already excludes directories named source or build. This change
-will allow additional directories like .debug to be excluded also
-via a new exclude directive.
-
-depmod.d/exclude.conf example:
-exclude .debug
-
-Upstream-Status: Accepted
-
-Signed-off-by: Saul Wold <saul.wold@windriver.com>
-[ Fix warnings and make should_exclude_dir() return bool ]
-Signed-off-by: Lucas De Marchi <lucas.demarchi@intel.com>
----
- man/depmod.d.xml | 14 ++++++++++
- tools/depmod.c | 66 +++++++++++++++++++++++++++++++++++++++++++++---
- 2 files changed, 76 insertions(+), 4 deletions(-)
-
-diff --git a/man/depmod.d.xml b/man/depmod.d.xml
-index b315e93..76548e9 100644
---- a/man/depmod.d.xml
-+++ b/man/depmod.d.xml
-@@ -131,6 +131,20 @@
- </para>
- </listitem>
- </varlistentry>
-+ <varlistentry>
-+ <term>exclude <replaceable>excludedir</replaceable>
-+ </term>
-+ <listitem>
-+ <para>
-+ This specifies the trailing directories that will be excluded
-+ during the search for kernel modules.
-+ </para>
-+ <para>
-+ The <replaceable>excludedir</replaceable> is the trailing directory
-+ to exclude
-+ </para>
-+ </listitem>
-+ </varlistentry>
- </variablelist>
- </refsect1>
-
-diff --git a/tools/depmod.c b/tools/depmod.c
-index 07a35ba..4117dd1 100644
---- a/tools/depmod.c
-+++ b/tools/depmod.c
-@@ -458,6 +458,11 @@ struct cfg_external {
- char path[];
- };
-
-+struct cfg_exclude {
-+ struct cfg_exclude *next;
-+ char exclude_dir[];
-+};
-+
- struct cfg {
- const char *kversion;
- char dirname[PATH_MAX];
-@@ -469,6 +474,7 @@ struct cfg {
- struct cfg_override *overrides;
- struct cfg_search *searches;
- struct cfg_external *externals;
-+ struct cfg_exclude *excludes;
- };
-
- static enum search_type cfg_define_search_type(const char *path)
-@@ -580,6 +586,30 @@ static void cfg_external_free(struct cfg_external *ext)
- free(ext);
- }
-
-+static int cfg_exclude_add(struct cfg *cfg, const char *path)
-+{
-+ struct cfg_exclude *exc;
-+ size_t len = strlen(path);
-+
-+ exc = malloc(sizeof(struct cfg_exclude) + len + 1);
-+ if (exc == NULL) {
-+ ERR("exclude add: out of memory\n");
-+ return -ENOMEM;
-+ }
-+ memcpy(exc->exclude_dir, path, len + 1);
-+
-+ DBG("exclude add: %s\n", path);
-+
-+ exc->next = cfg->excludes;
-+ cfg->excludes = exc;
-+ return 0;
-+}
-+
-+static void cfg_exclude_free(struct cfg_exclude *exc)
-+{
-+ free(exc);
-+}
-+
- static int cfg_kernel_matches(const struct cfg *cfg, const char *pattern)
- {
- regex_t re;
-@@ -657,6 +687,11 @@ static int cfg_file_parse(struct cfg *cfg, const char *filename)
- }
-
- cfg_external_add(cfg, dir);
-+ } else if (streq(cmd, "exclude")) {
-+ const char *sp;
-+ while ((sp = strtok_r(NULL, "\t ", &saveptr)) != NULL) {
-+ cfg_exclude_add(cfg, sp);
-+ }
- } else if (streq(cmd, "include")
- || streq(cmd, "make_map_files")) {
- INF("%s:%u: command %s not implemented yet\n",
-@@ -857,6 +892,12 @@ static void cfg_free(struct cfg *cfg)
- cfg->externals = cfg->externals->next;
- cfg_external_free(tmp);
- }
-+
-+ while (cfg->excludes) {
-+ struct cfg_exclude *tmp = cfg->excludes;
-+ cfg->excludes = cfg->excludes->next;
-+ cfg_exclude_free(tmp);
-+ }
- }
-
-
-@@ -1229,6 +1270,25 @@ add:
- return 0;
- }
-
-+static bool should_exclude_dir(const struct cfg *cfg, const char *name)
-+{
-+ struct cfg_exclude *exc;
-+
-+ if (name[0] == '.' && (name[1] == '\0' ||
-+ (name[1] == '.' && name[2] == '\0')))
-+ return true;
-+
-+ if (streq(name, "build") || streq(name, "source"))
-+ return true;
-+
-+ for (exc = cfg->excludes; exc != NULL; exc = exc->next) {
-+ if (streq(name, exc->exclude_dir))
-+ return true;
-+ }
-+
-+ return false;
-+}
-+
- static int depmod_modules_search_dir(struct depmod *depmod, DIR *d, size_t baselen, struct scratchbuf *s_path)
- {
- struct dirent *de;
-@@ -1240,11 +1300,9 @@ static int depmod_modules_search_dir(struct depmod *depmod, DIR *d, size_t basel
- size_t namelen;
- uint8_t is_dir;
-
-- if (name[0] == '.' && (name[1] == '\0' ||
-- (name[1] == '.' && name[2] == '\0')))
-- continue;
-- if (streq(name, "build") || streq(name, "source"))
-+ if (should_exclude_dir(depmod->cfg, name))
- continue;
-+
- namelen = strlen(name);
- if (scratchbuf_alloc(s_path, baselen + namelen + 2) < 0) {
- err = -ENOMEM;
---
-2.31.1
-
diff --git a/meta/recipes-kernel/kmod/kmod/gtkdocdir.patch b/meta/recipes-kernel/kmod/kmod/gtkdocdir.patch
new file mode 100644
index 0000000000..a34ea466e8
--- /dev/null
+++ b/meta/recipes-kernel/kmod/kmod/gtkdocdir.patch
@@ -0,0 +1,33 @@
+From dd59095f70f774f6d1e767010e25b35ef6db4c4b Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Fri, 8 Dec 2023 22:35:45 +0000
+Subject: [PATCH] configure: set docdir in GTK_DOC_CHECK
+
+By passing --docdir in the GTK_DOC_CHECK arguments (to match
+autogen.sh) autoreconf will work out of the box.
+
+Without this autoreconf fails due to the documentation not being in
+./docs, the default location.
+
+Upstream-Status: Submitted [https://lore.kernel.org/linux-modules/20231208224511.1363066-1-ross.burton@arm.com/T/#u]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ configure.ac | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index de01e08..67696c4 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -255,7 +255,7 @@ AS_IF([test "x$enable_coverage" = "xyes"], [
+ AM_CONDITIONAL([ENABLE_COVERAGE], [test "x$enable_coverage" = "xyes"])
+
+ m4_ifdef([GTK_DOC_CHECK], [
+-GTK_DOC_CHECK([1.14],[--flavour no-tmpl-flat])
++GTK_DOC_CHECK([1.14],[--flavour no-tmpl-flat --docdir libkmod/docs])
+ ], [
+ AM_CONDITIONAL([ENABLE_GTK_DOC], false)])
+
+--
+2.34.1
+
diff --git a/meta/recipes-kernel/kmod/kmod/ptest.patch b/meta/recipes-kernel/kmod/kmod/ptest.patch
deleted file mode 100644
index 831dbcb909..0000000000
--- a/meta/recipes-kernel/kmod/kmod/ptest.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-Add 'install-ptest' rule.
-
-Signed-off-by: Tudor Florea <tudor.florea@enea.com>
-Upstream-Status: Pending
-
-diff -ruN a/Makefile.am b/Makefile.am
---- a/Makefile.am 2013-07-12 17:11:05.278331557 +0200
-+++ b/Makefile.am 2013-07-12 17:14:27.033788016 +0200
-@@ -204,6 +204,16 @@
-
- distclean-local: $(DISTCLEAN_LOCAL_HOOKS)
-
-+install-ptest:
-+ @$(MKDIR_P) $(DESTDIR)/testsuite
-+ @for file in $(TESTSUITE); do \
-+ install $$file $(DESTDIR)/testsuite; \
-+ done;
-+ @sed -e 's/^Makefile/_Makefile/' < Makefile > $(DESTDIR)/Makefile
-+ @$(MKDIR_P) $(DESTDIR)/tools
-+ @cp $(noinst_SCRIPTS) $(noinst_PROGRAMS) $(DESTDIR)/tools
-+ @cp -r testsuite/rootfs testsuite/.libs $(DESTDIR)/testsuite
-+
- # ------------------------------------------------------------------------------
- # custom release helpers
- # ------------------------------------------------------------------------------
diff --git a/meta/recipes-kernel/kmod/kmod_29.bb b/meta/recipes-kernel/kmod/kmod_29.bb
deleted file mode 100644
index 32dc49c126..0000000000
--- a/meta/recipes-kernel/kmod/kmod_29.bb
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright (C) 2012 Khem Raj <raj.khem@gmail.com>
-# Released under the MIT license (see COPYING.MIT for the terms)
-
-SUMMARY = "Tools for managing Linux kernel modules"
-DESCRIPTION = "kmod is a set of tools to handle common tasks with Linux kernel modules like \
- insert, remove, list, check properties, resolve dependencies and aliases."
-HOMEPAGE = "http://kernel.org/pub/linux/utils/kernel/kmod/"
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LICENSE:libkmod = "LGPL-2.1-or-later"
-SECTION = "base"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
- file://libkmod/COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
- file://tools/COPYING;md5=751419260aa954499f7abaabaa882bbe \
- "
-inherit autotools bash-completion gtk-doc pkgconfig manpages update-alternatives
-
-SRCREV = "b6ecfc916a17eab8f93be5b09f4e4f845aabd3d1"
-
-SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git;branch=master \
- file://depmod-search.conf \
- file://avoid_parallel_tests.patch \
- file://0001-depmod-Add-support-for-excluding-a-directory.patch \
- "
-
-S = "${WORKDIR}/git"
-
-EXTRA_OECONF += "--enable-tools"
-
-PACKAGECONFIG ??= "zlib xz"
-PACKAGECONFIG[debug] = "--enable-debug,--disable-debug"
-PACKAGECONFIG[logging] = " --enable-logging,--disable-logging"
-PACKAGECONFIG[manpages] = "--enable-manpages, --disable-manpages, libxslt-native xmlto-native"
-PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl"
-PACKAGECONFIG[xz] = "--with-xz,--without-xz,xz"
-PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
-PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
-
-GTKDOC_DOCDIR = "${S}/libkmod/docs"
-
-PROVIDES += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
-RPROVIDES:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
-RCONFLICTS:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
-RREPLACES:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
-
-# to force user to remove old module-init-tools and replace them with kmod variants
-RCONFLICTS:libkmod2 += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
-
-# autotools set prefix to /usr, however we want them in /bin and /sbin
-EXTRA_OECONF += "--bindir=${base_bindir} --sbindir=${base_sbindir}"
-
-do_install:append () {
- install -dm755 ${D}${base_bindir}
- install -dm755 ${D}${base_sbindir}
- # add symlinks to kmod
- ln -rs ${D}${base_bindir}/kmod ${D}${base_bindir}/lsmod
- for tool in insmod rmmod depmod modinfo modprobe; do
- ln -rs ${D}${base_bindir}/kmod ${D}${base_sbindir}/${tool}
- done
- # configuration directories
- install -dm755 ${D}${nonarch_base_libdir}/depmod.d
- install -dm755 ${D}${nonarch_base_libdir}/modprobe.d
- install -dm755 ${D}${sysconfdir}/depmod.d
- install -dm755 ${D}${sysconfdir}/modprobe.d
-
- # install depmod.d file for search/ dir
- install -Dm644 "${WORKDIR}/depmod-search.conf" "${D}${nonarch_base_libdir}/depmod.d/search.conf"
-
- # Add .debug to the exclude path for depmod
- echo "exclude .debug" > ${D}${nonarch_base_libdir}/depmod.d/exclude.conf
-}
-
-ALTERNATIVE_PRIORITY = "70"
-
-ALTERNATIVE:kmod = "insmod modprobe rmmod modinfo bin-lsmod lsmod depmod"
-
-ALTERNATIVE_LINK_NAME[depmod] = "${base_sbindir}/depmod"
-ALTERNATIVE_LINK_NAME[insmod] = "${base_sbindir}/insmod"
-ALTERNATIVE_LINK_NAME[modprobe] = "${base_sbindir}/modprobe"
-ALTERNATIVE_LINK_NAME[rmmod] = "${base_sbindir}/rmmod"
-ALTERNATIVE_LINK_NAME[modinfo] = "${base_sbindir}/modinfo"
-ALTERNATIVE_LINK_NAME[bin-lsmod] = "${base_bindir}/lsmod"
-ALTERNATIVE_LINK_NAME[lsmod] = "${base_sbindir}/lsmod"
-ALTERNATIVE_TARGET[lsmod] = "${base_bindir}/lsmod.${BPN}"
-
-PACKAGES =+ "libkmod"
-FILES:libkmod = "${base_libdir}/libkmod*${SOLIBS} ${libdir}/libkmod*${SOLIBS}"
-FILES:${PN} += "${nonarch_base_libdir}/depmod.d ${nonarch_base_libdir}/modprobe.d"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-kernel/kmod/kmod_31.bb b/meta/recipes-kernel/kmod/kmod_31.bb
new file mode 100644
index 0000000000..6ae83a7c66
--- /dev/null
+++ b/meta/recipes-kernel/kmod/kmod_31.bb
@@ -0,0 +1,89 @@
+# Copyright (C) 2012 Khem Raj <raj.khem@gmail.com>
+# Released under the MIT license (see COPYING.MIT for the terms)
+
+SUMMARY = "Tools for managing Linux kernel modules"
+DESCRIPTION = "kmod is a set of tools to handle common tasks with Linux kernel modules like \
+ insert, remove, list, check properties, resolve dependencies and aliases."
+HOMEPAGE = "http://kernel.org/pub/linux/utils/kernel/kmod/"
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LICENSE:libkmod = "LGPL-2.1-or-later"
+SECTION = "base"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
+ file://libkmod/COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
+ file://tools/COPYING;md5=751419260aa954499f7abaabaa882bbe \
+ "
+inherit autotools bash-completion gtk-doc pkgconfig manpages update-alternatives
+
+SRCREV = "aff617ea871d0568cc491bd116c0be1e857463bb"
+
+SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git;branch=master;protocol=https \
+ file://depmod-search.conf \
+ file://avoid_parallel_tests.patch \
+ file://0001-Use-portable-implementation-for-basename-API.patch \
+ file://gtkdocdir.patch \
+ "
+
+S = "${WORKDIR}/git"
+
+EXTRA_OECONF += "--enable-tools"
+
+PACKAGECONFIG ??= "zlib xz openssl"
+PACKAGECONFIG[debug] = "--enable-debug,--disable-debug"
+PACKAGECONFIG[logging] = " --enable-logging,--disable-logging"
+PACKAGECONFIG[manpages] = "--enable-manpages, --disable-manpages, libxslt-native xmlto-native"
+PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl"
+PACKAGECONFIG[xz] = "--with-xz,--without-xz,xz"
+PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
+
+PROVIDES += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
+RPROVIDES:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
+RCONFLICTS:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
+RREPLACES:${PN} += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
+
+# to force user to remove old module-init-tools and replace them with kmod variants
+RCONFLICTS:libkmod2 += "module-init-tools-insmod-static module-init-tools-depmod module-init-tools"
+
+# autotools set prefix to /usr, however we want them in /bin and /sbin
+EXTRA_OECONF += "--bindir=${base_bindir} --sbindir=${base_sbindir}"
+
+do_install:append () {
+ install -dm755 ${D}${base_bindir}
+ install -dm755 ${D}${base_sbindir}
+ # add symlinks to kmod
+ ln -rs ${D}${base_bindir}/kmod ${D}${base_bindir}/lsmod
+ for tool in insmod rmmod depmod modinfo modprobe; do
+ ln -rs ${D}${base_bindir}/kmod ${D}${base_sbindir}/${tool}
+ done
+ # configuration directories
+ install -dm755 ${D}${nonarch_base_libdir}/depmod.d
+ install -dm755 ${D}${nonarch_base_libdir}/modprobe.d
+ install -dm755 ${D}${sysconfdir}/depmod.d
+ install -dm755 ${D}${sysconfdir}/modprobe.d
+
+ # install depmod.d file for search/ dir
+ install -Dm644 "${WORKDIR}/depmod-search.conf" "${D}${nonarch_base_libdir}/depmod.d/search.conf"
+
+ # Add .debug to the exclude path for depmod
+ echo "exclude .debug" > ${D}${nonarch_base_libdir}/depmod.d/exclude.conf
+}
+
+ALTERNATIVE_PRIORITY = "70"
+
+ALTERNATIVE:kmod = "insmod modprobe rmmod modinfo bin-lsmod lsmod depmod"
+
+ALTERNATIVE_LINK_NAME[depmod] = "${base_sbindir}/depmod"
+ALTERNATIVE_LINK_NAME[insmod] = "${base_sbindir}/insmod"
+ALTERNATIVE_LINK_NAME[modprobe] = "${base_sbindir}/modprobe"
+ALTERNATIVE_LINK_NAME[rmmod] = "${base_sbindir}/rmmod"
+ALTERNATIVE_LINK_NAME[modinfo] = "${base_sbindir}/modinfo"
+ALTERNATIVE_LINK_NAME[bin-lsmod] = "${base_bindir}/lsmod"
+ALTERNATIVE_LINK_NAME[lsmod] = "${base_sbindir}/lsmod"
+ALTERNATIVE_TARGET[lsmod] = "${base_bindir}/lsmod.${BPN}"
+
+PACKAGES =+ "libkmod"
+FILES:libkmod = "${base_libdir}/libkmod*${SOLIBS} ${libdir}/libkmod*${SOLIBS}"
+FILES:${PN} += "${nonarch_base_libdir}/depmod.d ${nonarch_base_libdir}/modprobe.d"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-kernel/libtraceevent/libtraceevent/meson.patch b/meta/recipes-kernel/libtraceevent/libtraceevent/meson.patch
new file mode 100644
index 0000000000..0c21b2347a
--- /dev/null
+++ b/meta/recipes-kernel/libtraceevent/libtraceevent/meson.patch
@@ -0,0 +1,45 @@
+From 7f88c9ba5f27276e844252500a9f0ba2b350b919 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Sun, 27 Aug 2023 20:57:44 +0100
+Subject: [PATCH] Fixes for the Meson build of libtraceevent:
+
+- Make the plugin directory the same as the Makefiles
+- Install the plugins as modules not static and versioned shared libraries
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ meson.build | 2 +-
+ plugins/meson.build | 3 +--
+ 2 files changed, 2 insertions(+), 3 deletions(-)
+
+diff --git a/meson.build b/meson.build
+index 40ce38c..1eb7912 100644
+--- a/meson.build
++++ b/meson.build
+@@ -25,7 +25,7 @@ htmldir = join_paths(prefixdir, get_option('htmldir'))
+ libdir = join_paths(prefixdir, get_option('libdir'))
+ plugindir = get_option('plugindir')
+ if plugindir == ''
+- plugindir = join_paths(libdir, 'libtraceevent/plugins')
++ plugindir = join_paths(libdir, 'traceevent/plugins')
+ endif
+
+ add_project_arguments(
+diff --git a/plugins/meson.build b/plugins/meson.build
+index 74ad664..4919be4 100644
+--- a/plugins/meson.build
++++ b/plugins/meson.build
+@@ -19,11 +19,10 @@ plugins = [
+
+ pdeps = []
+ foreach plugin : plugins
+- pdeps += library(
++ pdeps += shared_module(
+ plugin.replace('.c', ''),
+ plugin,
+ name_prefix: '',
+- version: library_version,
+ dependencies: [libtraceevent_dep],
+ include_directories: [incdir],
+ install: true,
diff --git a/meta/recipes-kernel/libtraceevent/libtraceevent_1.8.2.bb b/meta/recipes-kernel/libtraceevent/libtraceevent_1.8.2.bb
new file mode 100644
index 0000000000..d4ace54884
--- /dev/null
+++ b/meta/recipes-kernel/libtraceevent/libtraceevent_1.8.2.bb
@@ -0,0 +1,23 @@
+# Copyright (C) 2022 Khem Raj <raj.khem@gmail.com>
+# Released under the MIT license (see COPYING.MIT for the terms)
+
+SUMMARY = "API to access the kernel tracefs directory"
+HOMEPAGE = "https://git.kernel.org/pub/scm/libs/libtrace/libtracefs.git/"
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://LICENSES/GPL-2.0;md5=e6a75371ba4d16749254a51215d13f97 \
+ file://LICENSES/LGPL-2.1;md5=b370887980db5dd40659b50909238dbd"
+SECTION = "libs"
+
+SRCREV = "6f6d5802f31992e7527a4c32b43a32fda6bf6bdf"
+SRC_URI = "git://git.kernel.org/pub/scm/libs/libtrace/libtraceevent.git;branch=${BPN};protocol=https \
+ file://meson.patch"
+
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig
+
+EXTRA_OEMESON = "-Ddoc=false"
+
+PACKAGES += "${PN}-plugins"
+
+FILES:${PN}-plugins += "${libdir}/traceevent/plugins"
diff --git a/meta/recipes-kernel/linux-firmware/files/0001-Makefile-replace-mkdir-by-install.patch b/meta/recipes-kernel/linux-firmware/files/0001-Makefile-replace-mkdir-by-install.patch
deleted file mode 100644
index b1ac5a16ab..0000000000
--- a/meta/recipes-kernel/linux-firmware/files/0001-Makefile-replace-mkdir-by-install.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-From 71514e74f35f2b51ca24062573d6d913525b30db Mon Sep 17 00:00:00 2001
-From: Konrad Weihmann <kweihmann@outlook.com>
-Date: Mon, 9 May 2022 12:57:57 +0200
-Subject: [PATCH] Makefile: replace mkdir by install
-
-mkdir -p creates paths that are bound to user's settings and therefore
-can lead to different file mode bits of the base paths accross different
-machines.
-Use install instead, as this tool is not prone to such behavior.
-
-Signed-off-by: Konrad Weihmann <kweihmann@outlook.com>
-Upstream-Status: Submitted [https://lore.kernel.org/linux-firmware/PR2PR09MB310088EA719E6D7CA5C268F1A8C69@PR2PR09MB3100.eurprd09.prod.outlook.com/]
----
- Makefile | 2 +-
- carl9170fw/toolchain/Makefile | 4 ++--
- copy-firmware.sh | 6 +++---
- 3 files changed, 6 insertions(+), 6 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index e1c362f..83a0ec6 100644
---- a/Makefile
-+++ b/Makefile
-@@ -9,5 +9,5 @@ check:
- @./check_whence.py
-
- install:
-- mkdir -p $(DESTDIR)$(FIRMWAREDIR)
-+ install -d $(DESTDIR)$(FIRMWAREDIR)
- ./copy-firmware.sh $(DESTDIR)$(FIRMWAREDIR)
-diff --git a/carl9170fw/toolchain/Makefile b/carl9170fw/toolchain/Makefile
-index 2b25ffe..aaea8e8 100644
---- a/carl9170fw/toolchain/Makefile
-+++ b/carl9170fw/toolchain/Makefile
-@@ -46,14 +46,14 @@ src/gcc-$(GCC_VER): src/$(GCC_TAR) src/newlib-$(NEWLIB_VER)
- ln -s $(BASEDIR)/src/newlib-$(NEWLIB_VER)/libgloss $@
-
- binutils: src/binutils-$(BINUTILS_VER)
-- mkdir -p build/binutils
-+ install -d build/binutils
- cd build/binutils; \
- $(BASEDIR)/$</configure --target=sh-elf --prefix=$(BASEDIR)/inst; \
- $(MAKE) -j3; \
- $(MAKE) install
-
- gcc: src/gcc-$(GCC_VER) binutils
-- mkdir -p build/gcc
-+ install -d build/gcc
- cd build/gcc; \
- $(BASEDIR)/$</configure --target=sh-elf --prefix=$(BASEDIR)/inst -enable-languages=c --without-pkgversion --with-newlib; \
- $(MAKE) -j3; \
-diff --git a/copy-firmware.sh b/copy-firmware.sh
-index 9b46b63..bbacb92 100755
---- a/copy-firmware.sh
-+++ b/copy-firmware.sh
-@@ -34,7 +34,7 @@ done
- grep '^File:' WHENCE | sed -e's/^File: *//g' -e's/"//g' | while read f; do
- test -f "$f" || continue
- $verbose "copying file $f"
-- mkdir -p $destdir/$(dirname "$f")
-+ install -d $destdir/$(dirname "$f")
- cp -d "$f" $destdir/"$f"
- done
-
-@@ -42,7 +42,7 @@ grep -E '^Link:' WHENCE | sed -e's/^Link: *//g' -e's/-> //g' | while read f d; d
- if test -L "$f"; then
- test -f "$destdir/$f" && continue
- $verbose "copying link $f"
-- mkdir -p $destdir/$(dirname "$f")
-+ install -d $destdir/$(dirname "$f")
- cp -d "$f" $destdir/"$f"
-
- if test "x$d" != "x"; then
-@@ -63,7 +63,7 @@ grep -E '^Link:' WHENCE | sed -e's/^Link: *//g' -e's/-> //g' | while read f d; d
- fi
- else
- $verbose "creating link $f -> $d"
-- mkdir -p $destdir/$(dirname "$f")
-+ install -d $destdir/$(dirname "$f")
- ln -sf "$d" "$destdir/$f"
- fi
- done
---
-2.25.1
-
diff --git a/meta/recipes-kernel/linux-firmware/linux-firmware_20220509.bb b/meta/recipes-kernel/linux-firmware/linux-firmware_20220509.bb
deleted file mode 100644
index 6e328e5726..0000000000
--- a/meta/recipes-kernel/linux-firmware/linux-firmware_20220509.bb
+++ /dev/null
@@ -1,1084 +0,0 @@
-SUMMARY = "Firmware files for use with Linux kernel"
-HOMEPAGE = "https://www.kernel.org/"
-DESCRIPTION = "Linux firmware is a package distributed alongside the Linux kernel \
-that contains firmware binary blobs necessary for partial or full functionality \
-of certain hardware devices."
-SECTION = "kernel"
-
-LICENSE = "\
- Firmware-Abilis \
- & Firmware-adsp_sst \
- & Firmware-agere \
- & Firmware-amdgpu \
- & Firmware-amd-ucode \
- & Firmware-amlogic_vdec \
- & Firmware-atheros_firmware \
- & Firmware-atmel \
- & Firmware-broadcom_bcm43xx \
- & Firmware-ca0132 \
- & Firmware-cavium \
- & Firmware-chelsio_firmware \
- & Firmware-cw1200 \
- & Firmware-cypress \
- & Firmware-dib0700 \
- & Firmware-e100 \
- & Firmware-ene_firmware \
- & Firmware-fw_sst_0f28 \
- & Firmware-go7007 \
- & Firmware-GPLv2 \
- & Firmware-hfi1_firmware \
- & Firmware-i2400m \
- & Firmware-i915 \
- & Firmware-ibt_firmware \
- & Firmware-ice \
- & Firmware-it913x \
- & Firmware-iwlwifi_firmware \
- & Firmware-IntcSST2 \
- & Firmware-kaweth \
- & Firmware-Lontium \
- & Firmware-Marvell \
- & Firmware-moxa \
- & Firmware-myri10ge_firmware \
- & Firmware-netronome \
- & Firmware-nvidia \
- & Firmware-OLPC \
- & Firmware-ath9k-htc \
- & Firmware-phanfw \
- & Firmware-qat \
- & Firmware-qcom \
- & Firmware-qla1280 \
- & Firmware-qla2xxx \
- & Firmware-qualcommAthos_ar3k \
- & Firmware-qualcommAthos_ath10k \
- & Firmware-r8a779x_usb3 \
- & Firmware-radeon \
- & Firmware-ralink_a_mediatek_company_firmware \
- & Firmware-ralink-firmware \
- & Firmware-rtlwifi_firmware \
- & Firmware-imx-sdma_firmware \
- & Firmware-siano \
- & Firmware-tda7706-firmware \
- & Firmware-ti-connectivity \
- & Firmware-ti-keystone \
- & Firmware-ueagle-atm4-firmware \
- & Firmware-via_vt6656 \
- & Firmware-wl1251 \
- & Firmware-xc4000 \
- & Firmware-xc5000 \
- & Firmware-xc5000c \
- & WHENCE \
-"
-
-LIC_FILES_CHKSUM = "file://LICENCE.Abilis;md5=b5ee3f410780e56711ad48eadc22b8bc \
- file://LICENCE.adsp_sst;md5=615c45b91a5a4a9fe046d6ab9a2df728 \
- file://LICENCE.agere;md5=af0133de6b4a9b2522defd5f188afd31 \
- file://LICENSE.amdgpu;md5=44c1166d052226cb2d6c8d7400090203 \
- file://LICENSE.amd-ucode;md5=3c5399dc9148d7f0e1f41e34b69cf14f \
- file://LICENSE.amlogic_vdec;md5=dc44f59bf64a81643e500ad3f39a468a \
- file://LICENCE.atheros_firmware;md5=30a14c7823beedac9fa39c64fdd01a13 \
- file://LICENSE.atmel;md5=aa74ac0c60595dee4d4e239107ea77a3 \
- file://LICENCE.broadcom_bcm43xx;md5=3160c14df7228891b868060e1951dfbc \
- file://LICENCE.ca0132;md5=209b33e66ee5be0461f13d31da392198 \
- file://LICENCE.cadence;md5=009f46816f6956cfb75ede13d3e1cee0 \
- file://LICENCE.cavium;md5=c37aaffb1ebe5939b2580d073a95daea \
- file://LICENCE.chelsio_firmware;md5=819aa8c3fa453f1b258ed8d168a9d903 \
- file://LICENCE.cw1200;md5=f0f770864e7a8444a5c5aa9d12a3a7ed \
- file://LICENCE.cypress;md5=48cd9436c763bf873961f9ed7b5c147b \
- file://LICENSE.dib0700;md5=f7411825c8a555a1a3e5eab9ca773431 \
- file://LICENCE.e100;md5=ec0f84136766df159a3ae6d02acdf5a8 \
- file://LICENCE.ene_firmware;md5=ed67f0f62f8f798130c296720b7d3921 \
- file://LICENCE.fw_sst_0f28;md5=6353931c988ad52818ae733ac61cd293 \
- file://LICENCE.go7007;md5=c0bb9f6aaaba55b0529ee9b30aa66beb \
- file://GPL-2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://LICENSE.hfi1_firmware;md5=5e7b6e586ce7339d12689e49931ad444 \
- file://LICENCE.i2400m;md5=14b901969e23c41881327c0d9e4b7d36 \
- file://LICENSE.i915;md5=2b0b2e0d20984affd4490ba2cba02570 \
- file://LICENCE.ibt_firmware;md5=fdbee1ddfe0fb7ab0b2fcd6b454a366b \
- file://LICENSE.ice;md5=742ab4850f2670792940e6d15c974b2f \
- file://LICENCE.IntcSST2;md5=9e7d8bea77612d7cc7d9e9b54b623062 \
- file://LICENCE.it913x;md5=1fbf727bfb6a949810c4dbfa7e6ce4f8 \
- file://LICENCE.iwlwifi_firmware;md5=2ce6786e0fc11ac6e36b54bb9b799f1b \
- file://LICENCE.kaweth;md5=b1d876e562f4b3b8d391ad8395dfe03f \
- file://LICENSE.Lontium;md5=4ec8dc582ff7295f39e2ca6a7b0be2b6 \
- file://LICENCE.Marvell;md5=28b6ed8bd04ba105af6e4dcd6e997772 \
- file://LICENCE.mediatek;md5=7c1976b63217d76ce47d0a11d8a79cf2 \
- file://LICENCE.moxa;md5=1086614767d8ccf744a923289d3d4261 \
- file://LICENCE.myri10ge_firmware;md5=42e32fb89f6b959ca222e25ac8df8fed \
- file://LICENCE.Netronome;md5=4add08f2577086d44447996503cddf5f \
- file://LICENCE.nvidia;md5=4428a922ed3ba2ceec95f076a488ce07 \
- file://LICENCE.NXP;md5=58bb8ba632cd729b9ba6183bc6aed36f \
- file://LICENCE.OLPC;md5=5b917f9d8c061991be4f6f5f108719cd \
- file://LICENCE.open-ath9k-htc-firmware;md5=1b33c9f4d17bc4d457bdb23727046837 \
- file://LICENCE.phanfw;md5=954dcec0e051f9409812b561ea743bfa \
- file://LICENCE.qat_firmware;md5=9e7d8bea77612d7cc7d9e9b54b623062 \
- file://LICENSE.qcom;md5=164e3362a538eb11d3ac51e8e134294b \
- file://LICENCE.qla1280;md5=d6895732e622d950609093223a2c4f5d \
- file://LICENCE.qla2xxx;md5=505855e921b75f1be4a437ad9b79dff0 \
- file://LICENSE.QualcommAtheros_ar3k;md5=b5fe244fb2b532311de1472a3bc06da5 \
- file://LICENSE.QualcommAtheros_ath10k;md5=cb42b686ee5f5cb890275e4321db60a8 \
- file://LICENCE.r8a779x_usb3;md5=4c1671656153025d7076105a5da7e498 \
- file://LICENSE.radeon;md5=68ec28bacb3613200bca44f404c69b16 \
- file://LICENCE.ralink_a_mediatek_company_firmware;md5=728f1a85fd53fd67fa8d7afb080bc435 \
- file://LICENCE.ralink-firmware.txt;md5=ab2c269277c45476fb449673911a2dfd \
- file://LICENCE.rtlwifi_firmware.txt;md5=00d06cfd3eddd5a2698948ead2ad54a5 \
- file://LICENSE.sdma_firmware;md5=51e8c19ecc2270f4b8ea30341ad63ce9 \
- file://LICENCE.siano;md5=4556c1bf830067f12ca151ad953ec2a5 \
- file://LICENCE.tda7706-firmware.txt;md5=835997cf5e3c131d0dddd695c7d9103e \
- file://LICENCE.ti-connectivity;md5=c5e02be633f1499c109d1652514d85ec \
- file://LICENCE.ti-keystone;md5=3a86335d32864b0bef996bee26cc0f2c \
- file://LICENCE.ueagle-atm4-firmware;md5=4ed7ea6b507ccc583b9d594417714118 \
- file://LICENCE.via_vt6656;md5=e4159694cba42d4377a912e78a6e850f \
- file://LICENCE.wl1251;md5=ad3f81922bb9e197014bb187289d3b5b \
- file://LICENCE.xc4000;md5=0ff51d2dc49fce04814c9155081092f0 \
- file://LICENCE.xc5000;md5=1e170c13175323c32c7f4d0998d53f66 \
- file://LICENCE.xc5000c;md5=12b02efa3049db65d524aeb418dd87ca \
- file://WHENCE;md5=d3eb82686904888f8bbbe8d865371404 \
- "
-
-# These are not common licenses, set NO_GENERIC_LICENSE for them
-# so that the license files will be copied from fetched source
-NO_GENERIC_LICENSE[Firmware-Abilis] = "LICENCE.Abilis"
-NO_GENERIC_LICENSE[Firmware-adsp_sst] = "LICENCE.adsp_sst"
-NO_GENERIC_LICENSE[Firmware-agere] = "LICENCE.agere"
-NO_GENERIC_LICENSE[Firmware-amdgpu] = "LICENSE.amdgpu"
-NO_GENERIC_LICENSE[Firmware-amd-ucode] = "LICENSE.amd-ucode"
-NO_GENERIC_LICENSE[Firmware-amlogic_vdec] = "LICENSE.amlogic_vdec"
-NO_GENERIC_LICENSE[Firmware-atheros_firmware] = "LICENCE.atheros_firmware"
-NO_GENERIC_LICENSE[Firmware-atmel] = "LICENSE.atmel"
-NO_GENERIC_LICENSE[Firmware-broadcom_bcm43xx] = "LICENCE.broadcom_bcm43xx"
-NO_GENERIC_LICENSE[Firmware-ca0132] = "LICENCE.ca0132"
-NO_GENERIC_LICENSE[Firmware-cadence] = "LICENCE.cadence"
-NO_GENERIC_LICENSE[Firmware-cavium] = "LICENCE.cavium"
-NO_GENERIC_LICENSE[Firmware-chelsio_firmware] = "LICENCE.chelsio_firmware"
-NO_GENERIC_LICENSE[Firmware-cw1200] = "LICENCE.cw1200"
-NO_GENERIC_LICENSE[Firmware-cypress] = "LICENCE.cypress"
-NO_GENERIC_LICENSE[Firmware-dib0700] = "LICENSE.dib0700"
-NO_GENERIC_LICENSE[Firmware-e100] = "LICENCE.e100"
-NO_GENERIC_LICENSE[Firmware-ene_firmware] = "LICENCE.ene_firmware"
-NO_GENERIC_LICENSE[Firmware-fw_sst_0f28] = "LICENCE.fw_sst_0f28"
-NO_GENERIC_LICENSE[Firmware-go7007] = "LICENCE.go7007"
-NO_GENERIC_LICENSE[Firmware-GPLv2] = "GPL-2"
-NO_GENERIC_LICENSE[Firmware-hfi1_firmware] = "LICENSE.hfi1_firmware"
-NO_GENERIC_LICENSE[Firmware-i2400m] = "LICENCE.i2400m"
-NO_GENERIC_LICENSE[Firmware-i915] = "LICENSE.i915"
-NO_GENERIC_LICENSE[Firmware-ibt_firmware] = "LICENCE.ibt_firmware"
-NO_GENERIC_LICENSE[Firmware-ice] = "LICENSE.ice"
-NO_GENERIC_LICENSE[Firmware-IntcSST2] = "LICENCE.IntcSST2"
-NO_GENERIC_LICENSE[Firmware-it913x] = "LICENCE.it913x"
-NO_GENERIC_LICENSE[Firmware-iwlwifi_firmware] = "LICENCE.iwlwifi_firmware"
-NO_GENERIC_LICENSE[Firmware-kaweth] = "LICENCE.kaweth"
-NO_GENERIC_LICENSE[Firmware-Lontium] = "LICENSE.Lontium"
-NO_GENERIC_LICENSE[Firmware-Marvell] = "LICENCE.Marvell"
-NO_GENERIC_LICENSE[Firmware-mediatek] = "LICENCE.mediatek"
-NO_GENERIC_LICENSE[Firmware-moxa] = "LICENCE.moxa"
-NO_GENERIC_LICENSE[Firmware-myri10ge_firmware] = "LICENCE.myri10ge_firmware"
-NO_GENERIC_LICENSE[Firmware-netronome] = "LICENCE.Netronome"
-NO_GENERIC_LICENSE[Firmware-nvidia] = "LICENCE.nvidia"
-NO_GENERIC_LICENSE[Firmware-OLPC] = "LICENCE.OLPC"
-NO_GENERIC_LICENSE[Firmware-ath9k-htc] = "LICENCE.open-ath9k-htc-firmware"
-NO_GENERIC_LICENSE[Firmware-phanfw] = "LICENCE.phanfw"
-NO_GENERIC_LICENSE[Firmware-qat] = "LICENCE.qat_firmware"
-NO_GENERIC_LICENSE[Firmware-qcom] = "LICENSE.qcom"
-NO_GENERIC_LICENSE[Firmware-qla1280] = "LICENCE.qla1280"
-NO_GENERIC_LICENSE[Firmware-qla2xxx] = "LICENCE.qla2xxx"
-NO_GENERIC_LICENSE[Firmware-qualcommAthos_ar3k] = "LICENSE.QualcommAtheros_ar3k"
-NO_GENERIC_LICENSE[Firmware-qualcommAthos_ath10k] = "LICENSE.QualcommAtheros_ath10k"
-NO_GENERIC_LICENSE[Firmware-r8a779x_usb3] = "LICENCE.r8a779x_usb3"
-NO_GENERIC_LICENSE[Firmware-radeon] = "LICENSE.radeon"
-NO_GENERIC_LICENSE[Firmware-ralink_a_mediatek_company_firmware] = "LICENCE.ralink_a_mediatek_company_firmware"
-NO_GENERIC_LICENSE[Firmware-ralink-firmware] = "LICENCE.ralink-firmware.txt"
-NO_GENERIC_LICENSE[Firmware-rtlwifi_firmware] = "LICENCE.rtlwifi_firmware.txt"
-NO_GENERIC_LICENSE[Firmware-siano] = "LICENCE.siano"
-NO_GENERIC_LICENSE[Firmware-imx-sdma_firmware] = "LICENSE.sdma_firmware"
-NO_GENERIC_LICENSE[Firmware-tda7706-firmware] = "LICENCE.tda7706-firmware.txt"
-NO_GENERIC_LICENSE[Firmware-ti-connectivity] = "LICENCE.ti-connectivity"
-NO_GENERIC_LICENSE[Firmware-ti-keystone] = "LICENCE.ti-keystone"
-NO_GENERIC_LICENSE[Firmware-ueagle-atm4-firmware] = "LICENCE.ueagle-atm4-firmware"
-NO_GENERIC_LICENSE[Firmware-via_vt6656] = "LICENCE.via_vt6656"
-NO_GENERIC_LICENSE[Firmware-wl1251] = "LICENCE.wl1251"
-NO_GENERIC_LICENSE[Firmware-xc4000] = "LICENCE.xc4000"
-NO_GENERIC_LICENSE[Firmware-xc5000] = "LICENCE.xc5000"
-NO_GENERIC_LICENSE[Firmware-xc5000c] = "LICENCE.xc5000c"
-NO_GENERIC_LICENSE[WHENCE] = "WHENCE"
-
-PE = "1"
-
-SRC_URI = "\
- ${KERNELORG_MIRROR}/linux/kernel/firmware/${BPN}-${PV}.tar.xz \
- file://0001-Makefile-replace-mkdir-by-install.patch \
-"
-
-SRC_URI[sha256sum] = "376e0b3d7b4f8aaa2abf7f5ab74803dcf14b06b94e3d841b1467cd9a2848255e"
-
-inherit allarch
-
-CLEANBROKEN = "1"
-
-do_compile() {
- :
-}
-
-do_install() {
- oe_runmake 'DESTDIR=${D}' 'FIRMWAREDIR=${nonarch_base_libdir}/firmware' install
- cp GPL-2 LICEN[CS]E.* WHENCE ${D}${nonarch_base_libdir}/firmware/
-}
-
-
-PACKAGES =+ "${PN}-ralink-license ${PN}-ralink \
- ${PN}-mt7601u-license ${PN}-mt7601u \
- ${PN}-radeon-license ${PN}-radeon \
- ${PN}-marvell-license ${PN}-pcie8897 ${PN}-pcie8997 \
- ${PN}-sd8686 ${PN}-sd8688 ${PN}-sd8787 ${PN}-sd8797 ${PN}-sd8801 \
- ${PN}-sd8887 ${PN}-sd8897 ${PN}-sd8997 ${PN}-usb8997 \
- ${PN}-ti-connectivity-license ${PN}-wlcommon ${PN}-wl12xx ${PN}-wl18xx \
- ${PN}-vt6656-license ${PN}-vt6656 \
- ${PN}-rs9113 ${PN}-rs9116 \
- ${PN}-rtl-license ${PN}-rtl8188 ${PN}-rtl8192cu ${PN}-rtl8192ce ${PN}-rtl8192su ${PN}-rtl8723 ${PN}-rtl8821 \
- ${PN}-rtl8168 \
- ${PN}-cypress-license \
- ${PN}-broadcom-license \
- ${PN}-bcm-0bb4-0306 \
- ${PN}-bcm43143 \
- ${PN}-bcm43236b \
- ${PN}-bcm43241b0 \
- ${PN}-bcm43241b4 \
- ${PN}-bcm43241b5 \
- ${PN}-bcm43242a \
- ${PN}-bcm4329 \
- ${PN}-bcm4329-fullmac \
- ${PN}-bcm4330 \
- ${PN}-bcm4334 \
- ${PN}-bcm43340 \
- ${PN}-bcm4335 \
- ${PN}-bcm43362 \
- ${PN}-bcm4339 \
- ${PN}-bcm43430 \
- ${PN}-bcm43430a0 \
- ${PN}-bcm43455 \
- ${PN}-bcm4350 \
- ${PN}-bcm4350c2 \
- ${PN}-bcm4354 \
- ${PN}-bcm4356 \
- ${PN}-bcm4356-pcie \
- ${PN}-bcm43569 \
- ${PN}-bcm43570 \
- ${PN}-bcm4358 \
- ${PN}-bcm43602 \
- ${PN}-bcm4366b \
- ${PN}-bcm4366c \
- ${PN}-bcm4371 \
- ${PN}-bcm4373 \
- ${PN}-bcm43xx \
- ${PN}-bcm43xx-hdr \
- ${PN}-atheros-license ${PN}-ar9170 ${PN}-ath6k ${PN}-ath9k ${PN}-ath3k \
- ${PN}-gplv2-license ${PN}-carl9170 \
- ${PN}-ar3k-license ${PN}-ar3k ${PN}-ath10k-license ${PN}-ath10k ${PN}-ath11k ${PN}-qca \
- \
- ${PN}-imx-sdma-license ${PN}-imx-sdma-imx6q ${PN}-imx-sdma-imx7d \
- \
- ${PN}-iwlwifi-license ${PN}-iwlwifi \
- ${PN}-iwlwifi-135-6 \
- ${PN}-iwlwifi-3160-7 ${PN}-iwlwifi-3160-8 ${PN}-iwlwifi-3160-9 \
- ${PN}-iwlwifi-3160-10 ${PN}-iwlwifi-3160-12 ${PN}-iwlwifi-3160-13 \
- ${PN}-iwlwifi-3160-16 ${PN}-iwlwifi-3160-17 \
- ${PN}-iwlwifi-6000-4 ${PN}-iwlwifi-6000g2a-5 ${PN}-iwlwifi-6000g2a-6 \
- ${PN}-iwlwifi-6000g2b-5 ${PN}-iwlwifi-6000g2b-6 \
- ${PN}-iwlwifi-6050-4 ${PN}-iwlwifi-6050-5 \
- ${PN}-iwlwifi-7260 \
- ${PN}-iwlwifi-7265 \
- ${PN}-iwlwifi-7265d ${PN}-iwlwifi-8000c ${PN}-iwlwifi-8265 \
- ${PN}-iwlwifi-9000 \
- ${PN}-iwlwifi-misc \
- ${PN}-ibt-license ${PN}-ibt \
- ${PN}-ibt-11-5 ${PN}-ibt-12-16 ${PN}-ibt-hw-37-7 ${PN}-ibt-hw-37-8 \
- ${PN}-ibt-17 \
- ${PN}-ibt-20 \
- ${PN}-ibt-misc \
- ${PN}-i915-license ${PN}-i915 \
- ${PN}-ice-license ${PN}-ice \
- ${PN}-adsp-sst-license ${PN}-adsp-sst \
- ${PN}-bnx2-mips \
- ${PN}-liquidio \
- ${PN}-nvidia-license \
- ${PN}-nvidia-tegra-k1 ${PN}-nvidia-tegra \
- ${PN}-nvidia-gpu \
- ${PN}-netronome-license ${PN}-netronome \
- ${PN}-qat ${PN}-qat-license \
- ${PN}-qcom-license \
- ${PN}-qcom-venus-1.8 ${PN}-qcom-venus-4.2 ${PN}-qcom-venus-5.2 ${PN}-qcom-venus-5.4 \
- ${PN}-qcom-vpu-1.0 ${PN}-qcom-vpu-2.0 \
- ${PN}-qcom-adreno-a2xx ${PN}-qcom-adreno-a3xx ${PN}-qcom-adreno-a4xx ${PN}-qcom-adreno-a530 \
- ${PN}-qcom-adreno-a630 ${PN}-qcom-adreno-a650 ${PN}-qcom-adreno-a660 \
- ${PN}-qcom-apq8096-audio ${PN}-qcom-apq8096-modem \
- ${PN}-qcom-sdm845-audio ${PN}-qcom-sdm845-compute ${PN}-qcom-sdm845-modem \
- ${PN}-qcom-sm8250-audio ${PN}-qcom-sm8250-compute \
- ${PN}-amlogic-vdec-license ${PN}-amlogic-vdec \
- ${PN}-lt9611uxc ${PN}-lontium-license \
- ${PN}-whence-license \
- ${PN}-license \
- "
-
-# For atheros
-LICENSE:${PN}-ar9170 = "Firmware-atheros_firmware"
-LICENSE:${PN}-ath3k = "Firmware-atheros_firmware"
-LICENSE:${PN}-ath6k = "Firmware-atheros_firmware"
-LICENSE:${PN}-ath9k = "Firmware-atheros_firmware"
-LICENSE:${PN}-atheros-license = "Firmware-atheros_firmware"
-
-FILES:${PN}-atheros-license = "${nonarch_base_libdir}/firmware/LICENCE.atheros_firmware"
-FILES:${PN}-ar9170 = " \
- ${nonarch_base_libdir}/firmware/ar9170*.fw \
-"
-FILES:${PN}-ath3k = " \
- ${nonarch_base_libdir}/firmware/ath3k*fw \
-"
-FILES:${PN}-ath6k = " \
- ${nonarch_base_libdir}/firmware/ath6k \
-"
-FILES:${PN}-ath9k = " \
- ${nonarch_base_libdir}/firmware/ar9271.fw \
- ${nonarch_base_libdir}/firmware/ar7010*.fw \
- ${nonarch_base_libdir}/firmware/htc_9271.fw \
- ${nonarch_base_libdir}/firmware/htc_7010.fw \
- ${nonarch_base_libdir}/firmware/ath9k_htc/htc_7010-1.4.0.fw \
- ${nonarch_base_libdir}/firmware/ath9k_htc/htc_9271-1.4.0.fw \
-"
-
-RDEPENDS:${PN}-ar9170 += "${PN}-atheros-license"
-RDEPENDS:${PN}-ath6k += "${PN}-atheros-license"
-RDEPENDS:${PN}-ath9k += "${PN}-atheros-license"
-
-# For carl9170
-LICENSE:${PN}-carl9170 = "Firmware-GPLv2"
-LICENSE:${PN}-gplv2-license = "Firmware-GPLv2"
-
-FILES:${PN}-gplv2-license = "${nonarch_base_libdir}/firmware/GPL-2"
-FILES:${PN}-carl9170 = " \
- ${nonarch_base_libdir}/firmware/carl9170*.fw \
-"
-
-RDEPENDS:${PN}-carl9170 += "${PN}-gplv2-license"
-
-# For QualCommAthos
-LICENSE:${PN}-ar3k = "Firmware-qualcommAthos_ar3k & Firmware-atheros_firmware"
-LICENSE:${PN}-ar3k-license = "Firmware-qualcommAthos_ar3k"
-LICENSE:${PN}-ath10k = "Firmware-qualcommAthos_ath10k"
-LICENSE:${PN}-ath10k-license = "Firmware-qualcommAthos_ath10k"
-LICENSE:${PN}-qca = "Firmware-qualcommAthos_ath10k"
-
-FILES:${PN}-ar3k-license = "${nonarch_base_libdir}/firmware/LICENSE.QualcommAtheros_ar3k"
-FILES:${PN}-ar3k = " \
- ${nonarch_base_libdir}/firmware/ar3k \
-"
-
-FILES:${PN}-ath10k-license = "${nonarch_base_libdir}/firmware/LICENSE.QualcommAtheros_ath10k"
-FILES:${PN}-ath10k = " \
- ${nonarch_base_libdir}/firmware/ath10k \
-"
-
-FILES:${PN}-ath11k = " \
- ${nonarch_base_libdir}/firmware/ath11k \
-"
-
-FILES:${PN}-qca = " \
- ${nonarch_base_libdir}/firmware/qca \
-"
-
-RDEPENDS:${PN}-ar3k += "${PN}-ar3k-license ${PN}-atheros-license"
-RDEPENDS:${PN}-ath10k += "${PN}-ath10k-license"
-RDEPENDS:${PN}-ath11k += "${PN}-ath10k-license"
-RDEPENDS:${PN}-qca += "${PN}-ath10k-license"
-
-# For ralink
-LICENSE:${PN}-ralink = "Firmware-ralink-firmware"
-LICENSE:${PN}-ralink-license = "Firmware-ralink-firmware"
-
-FILES:${PN}-ralink-license = "${nonarch_base_libdir}/firmware/LICENCE.ralink-firmware.txt"
-FILES:${PN}-ralink = " \
- ${nonarch_base_libdir}/firmware/rt*.bin \
-"
-
-RDEPENDS:${PN}-ralink += "${PN}-ralink-license"
-
-# For mediatek MT7601U
-LICENSE:${PN}-mt7601u = "Firmware-ralink_a_mediatek_company_firmware"
-LICENSE:${PN}-mt7601u-license = "Firmware-ralink_a_mediatek_company_firmware"
-
-FILES:${PN}-mt7601u-license = "${nonarch_base_libdir}/firmware/LICENCE.ralink_a_mediatek_company_firmware"
-FILES:${PN}-mt7601u = " \
- ${nonarch_base_libdir}/firmware/mt7601u.bin \
-"
-
-RDEPENDS:${PN}-mt7601u += "${PN}-mt7601u-license"
-
-# For radeon
-LICENSE:${PN}-radeon = "Firmware-radeon"
-LICENSE:${PN}-radeon-license = "Firmware-radeon"
-
-FILES:${PN}-radeon-license = "${nonarch_base_libdir}/firmware/LICENSE.radeon"
-FILES:${PN}-radeon = " \
- ${nonarch_base_libdir}/firmware/radeon \
-"
-
-RDEPENDS:${PN}-radeon += "${PN}-radeon-license"
-
-# For lontium
-LICENSE:${PN}-lt9611uxc = "Firmware-Lontium"
-
-FILES:${PN}-lontium-license = "${nonarch_base_libdir}/firmware/LICENSE.Lontium"
-FILES:${PN}-lt9611uxc = "${nonarch_base_libdir}/firmware/lt9611uxc_fw.bin"
-
-# For marvell
-LICENSE:${PN}-pcie8897 = "Firmware-Marvell"
-LICENSE:${PN}-pcie8997 = "Firmware-Marvell"
-LICENSE:${PN}-sd8686 = "Firmware-Marvell"
-LICENSE:${PN}-sd8688 = "Firmware-Marvell"
-LICENSE:${PN}-sd8787 = "Firmware-Marvell"
-LICENSE:${PN}-sd8797 = "Firmware-Marvell"
-LICENSE:${PN}-sd8801 = "Firmware-Marvell"
-LICENSE:${PN}-sd8887 = "Firmware-Marvell"
-LICENSE:${PN}-sd8897 = "Firmware-Marvell"
-LICENSE:${PN}-sd8997 = "Firmware-Marvell"
-LICENSE:${PN}-usb8997 = "Firmware-Marvell"
-LICENSE:${PN}-marvell-license = "Firmware-Marvell"
-
-FILES:${PN}-marvell-license = "${nonarch_base_libdir}/firmware/LICENCE.Marvell"
-FILES:${PN}-pcie8897 = " \
- ${nonarch_base_libdir}/firmware/mrvl/pcie8897_uapsta.bin \
-"
-FILES:${PN}-pcie8997 = " \
- ${nonarch_base_libdir}/firmware/mrvl/pcie8997_wlan_v4.bin \
- ${nonarch_base_libdir}/firmware/mrvl/pcieuart8997_combo_v4.bin \
- ${nonarch_base_libdir}/firmware/mrvl/pcieusb8997_combo_v4.bin \
-"
-FILES:${PN}-sd8686 = " \
- ${nonarch_base_libdir}/firmware/libertas/sd8686_v9* \
- ${nonarch_base_libdir}/firmware/sd8686* \
-"
-FILES:${PN}-sd8688 = " \
- ${nonarch_base_libdir}/firmware/libertas/sd8688* \
- ${nonarch_base_libdir}/firmware/mrvl/sd8688* \
-"
-FILES:${PN}-sd8787 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8787_uapsta.bin \
-"
-FILES:${PN}-sd8797 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8797_uapsta.bin \
-"
-FILES:${PN}-sd8801 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8801_uapsta.bin \
-"
-FILES:${PN}-sd8887 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8887_uapsta.bin \
-"
-FILES:${PN}-sd8897 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8897_uapsta.bin \
-"
-do_install:append() {
- # The kernel 5.6.x driver still uses the old name, provide a symlink for
- # older kernels
- ln -fs sdsd8997_combo_v4.bin ${D}${nonarch_base_libdir}/firmware/mrvl/sd8997_uapsta.bin
-}
-FILES:${PN}-sd8997 = " \
- ${nonarch_base_libdir}/firmware/mrvl/sd8997_uapsta.bin \
- ${nonarch_base_libdir}/firmware/mrvl/sdsd8997_combo_v4.bin \
-"
-FILES:${PN}-usb8997 = " \
- ${nonarch_base_libdir}/firmware/mrvl/usbusb8997_combo_v4.bin \
-"
-
-RDEPENDS:${PN}-sd8686 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8688 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8787 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8797 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8801 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8887 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8897 += "${PN}-marvell-license"
-RDEPENDS:${PN}-sd8997 += "${PN}-marvell-license"
-RDEPENDS:${PN}-usb8997 += "${PN}-marvell-license"
-
-# For netronome
-LICENSE:${PN}-netronome = "Firmware-netronome"
-
-FILES:${PN}-netronome-license = " \
- ${nonarch_base_libdir}/firmware/LICENCE.Netronome \
-"
-FILES:${PN}-netronome = " \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0081*.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0096*.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0097*.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0099*.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0058-0011_2x40.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0058-0012_2x40.nffw \
- ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0078-0011_1x100.nffw \
- ${nonarch_base_libdir}/firmware/netronome/bpf \
- ${nonarch_base_libdir}/firmware/netronome/flower \
- ${nonarch_base_libdir}/firmware/netronome/nic \
- ${nonarch_base_libdir}/firmware/netronome/nic-sriov \
-"
-
-RDEPENDS:${PN}-netronome += "${PN}-netronome-license"
-
-# For Nvidia
-LICENSE:${PN}-nvidia-gpu = "Firmware-nvidia"
-LICENSE:${PN}-nvidia-tegra = "Firmware-nvidia"
-LICENSE:${PN}-nvidia-tegra-k1 = "Firmware-nvidia"
-LICENSE:${PN}-nvidia-license = "Firmware-nvidia"
-
-FILES:${PN}-nvidia-gpu = "${nonarch_base_libdir}/firmware/nvidia"
-FILES:${PN}-nvidia-tegra = " \
- ${nonarch_base_libdir}/firmware/nvidia/tegra* \
- ${nonarch_base_libdir}/firmware/nvidia/gm20b \
- ${nonarch_base_libdir}/firmware/nvidia/gp10b \
-"
-FILES:${PN}-nvidia-tegra-k1 = " \
- ${nonarch_base_libdir}/firmware/nvidia/tegra124 \
- ${nonarch_base_libdir}/firmware/nvidia/gk20a \
-"
-FILES:${PN}-nvidia-license = "${nonarch_base_libdir}/firmware/LICENCE.nvidia"
-
-RDEPENDS:${PN}-nvidia-gpu += "${PN}-nvidia-license"
-RDEPENDS:${PN}-nvidia-tegra += "${PN}-nvidia-license"
-RDEPENDS:${PN}-nvidia-tegra-k1 += "${PN}-nvidia-license"
-
-# For RSI RS911x WiFi
-LICENSE:${PN}-rs9113 = "WHENCE"
-LICENSE:${PN}-rs9116 = "WHENCE"
-
-FILES:${PN}-rs9113 = " ${nonarch_base_libdir}/firmware/rsi/rs9113*.rps "
-FILES:${PN}-rs9116 = " ${nonarch_base_libdir}/firmware/rsi/rs9116*.rps "
-
-RDEPENDS:${PN}-rs9113 += "${PN}-whence-license"
-RDEPENDS:${PN}-rs9116 += "${PN}-whence-license"
-
-# For rtl
-LICENSE:${PN}-rtl8188 = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8192cu = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8192ce = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8192su = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8723 = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8821 = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl-license = "Firmware-rtlwifi_firmware"
-LICENSE:${PN}-rtl8168 = "WHENCE"
-
-FILES:${PN}-rtl-license = " \
- ${nonarch_base_libdir}/firmware/LICENCE.rtlwifi_firmware.txt \
-"
-FILES:${PN}-rtl8188 = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8188*.bin \
-"
-FILES:${PN}-rtl8192cu = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8192cufw*.bin \
-"
-FILES:${PN}-rtl8192ce = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8192cfw*.bin \
-"
-FILES:${PN}-rtl8192su = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8712u.bin \
-"
-FILES:${PN}-rtl8723 = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8723*.bin \
-"
-FILES:${PN}-rtl8821 = " \
- ${nonarch_base_libdir}/firmware/rtlwifi/rtl8821*.bin \
-"
-FILES:${PN}-rtl8168 = " \
- ${nonarch_base_libdir}/firmware/rtl_nic/rtl8168*.fw \
-"
-
-RDEPENDS:${PN}-rtl8188 += "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8192ce += "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8192cu += "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8192su = "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8723 += "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8821 += "${PN}-rtl-license"
-RDEPENDS:${PN}-rtl8168 += "${PN}-whence-license"
-
-# For ti-connectivity
-LICENSE:${PN}-wlcommon = "Firmware-ti-connectivity"
-LICENSE:${PN}-wl12xx = "Firmware-ti-connectivity"
-LICENSE:${PN}-wl18xx = "Firmware-ti-connectivity"
-LICENSE:${PN}-ti-connectivity-license = "Firmware-ti-connectivity"
-
-FILES:${PN}-ti-connectivity-license = "${nonarch_base_libdir}/firmware/LICENCE.ti-connectivity"
-# wl18xx optionally needs wl1271-nvs.bin (which itself is a symlink to
-# wl127x-nvs.bin) - see linux/drivers/net/wireless/ti/wlcore/sdio.c
-# and drivers/net/wireless/ti/wlcore/spi.c.
-# While they're optional and actually only used to override the MAC
-# address on wl18xx, driver loading will delay (by udev timout - 60s)
-# if not there. So let's make it available always. Because it's a
-# symlink, both need to go to wlcommon.
-FILES:${PN}-wlcommon = " \
- ${nonarch_base_libdir}/firmware/ti-connectivity/TI* \
- ${nonarch_base_libdir}/firmware/ti-connectivity/wl127x-nvs.bin \
- ${nonarch_base_libdir}/firmware/ti-connectivity/wl1271-nvs.bin \
-"
-FILES:${PN}-wl12xx = " \
- ${nonarch_base_libdir}/firmware/ti-connectivity/wl12* \
-"
-FILES:${PN}-wl18xx = " \
- ${nonarch_base_libdir}/firmware/ti-connectivity/wl18* \
-"
-
-RDEPENDS:${PN}-wl12xx = "${PN}-ti-connectivity-license ${PN}-wlcommon"
-RDEPENDS:${PN}-wl18xx = "${PN}-ti-connectivity-license ${PN}-wlcommon"
-
-# For vt6656
-LICENSE:${PN}-vt6656 = "Firmware-via_vt6656"
-LICENSE:${PN}-vt6656-license = "Firmware-via_vt6656"
-
-FILES:${PN}-vt6656-license = "${nonarch_base_libdir}/firmware/LICENCE.via_vt6656"
-FILES:${PN}-vt6656 = " \
- ${nonarch_base_libdir}/firmware/vntwusb.fw \
-"
-
-RDEPENDS:${PN}-vt6656 = "${PN}-vt6656-license"
-
-# For broadcom
-
-# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo -e " \${PN}-$pkg \\"; done | sort -u
-
-LICENSE:${PN}-broadcom-license = "Firmware-broadcom_bcm43xx"
-FILES:${PN}-broadcom-license = "${nonarch_base_libdir}/firmware/LICENCE.broadcom_bcm43xx"
-
-# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo "$i - $pkg"; echo -e "FILES:\${PN}-$pkg = \"\${nonarch_base_libdir}/firmware/brcm/$i\""; done | grep ^FILES
-
-FILES:${PN}-bcm43xx = "${nonarch_base_libdir}/firmware/brcm/bcm43xx-0.fw"
-FILES:${PN}-bcm43xx-hdr = "${nonarch_base_libdir}/firmware/brcm/bcm43xx_hdr-0.fw"
-FILES:${PN}-bcm4329-fullmac = "${nonarch_base_libdir}/firmware/brcm/bcm4329-fullmac-4.bin"
-FILES:${PN}-bcm43236b = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43236b.bin"
-FILES:${PN}-bcm4329 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4329-sdio.bin"
-FILES:${PN}-bcm4330 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4330-sdio.*"
-FILES:${PN}-bcm4334 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4334-sdio.bin"
-FILES:${PN}-bcm4335 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4335-sdio.bin"
-FILES:${PN}-bcm4339 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4339-sdio.bin \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac4339-sdio.bin \
-"
-FILES:${PN}-bcm43241b0 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b0-sdio.bin"
-FILES:${PN}-bcm43241b4 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b4-sdio.bin"
-FILES:${PN}-bcm43241b5 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b5-sdio.bin"
-FILES:${PN}-bcm43242a = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43242a.bin"
-FILES:${PN}-bcm43143 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43143.bin \
- ${nonarch_base_libdir}/firmware/brcm/brcmfmac43143-sdio.bin \
-"
-FILES:${PN}-bcm43430a0 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43430a0-sdio.*"
-FILES:${PN}-bcm43455 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43455-sdio.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac43455-sdio.* \
-"
-FILES:${PN}-bcm4350c2 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4350c2-pcie.bin"
-FILES:${PN}-bcm4350 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4350-pcie.bin"
-FILES:${PN}-bcm4356 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4356-sdio.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac4356-sdio.* \
-"
-FILES:${PN}-bcm43569 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43569.bin"
-FILES:${PN}-bcm43570 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43570-pcie.bin \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac43570-pcie.bin \
-"
-FILES:${PN}-bcm4358 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4358-pcie.bin"
-FILES:${PN}-bcm43602 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43602-pcie.bin \
- ${nonarch_base_libdir}/firmware/brcm/brcmfmac43602-pcie.ap.bin \
-"
-FILES:${PN}-bcm4366b = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4366b-pcie.bin"
-FILES:${PN}-bcm4366c = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4366c-pcie.bin"
-FILES:${PN}-bcm4371 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4371-pcie.bin"
-
-# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo -e "LICENSE:\${PN}-$pkg = \"Firmware-broadcom_bcm43xx\"\nRDEPENDS_\${PN}-$pkg += \"\${PN}-broadcom-license\""; done
-# Currently 1st one and last 6 have cypress LICENSE
-
-LICENSE:${PN}-bcm43xx = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43xx += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43xx-hdr = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43xx-hdr += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4329-fullmac = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4329-fullmac += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43236b = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43236b += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4329 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4329 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4330 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4330 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4334 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4334 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4335 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4335 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4339 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4339 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43241b0 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43241b0 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43241b4 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43241b4 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43241b5 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43241b5 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43242a = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43242a += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43143 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43143 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43430a0 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43430a0 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43455 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43455 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4350c2 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4350c2 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4350 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4350 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4356 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4356 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43569 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43569 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43570 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43570 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4358 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4358 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm43602 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm43602 += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4366b = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4366b += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4366c = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4366c += "${PN}-broadcom-license"
-LICENSE:${PN}-bcm4371 = "Firmware-broadcom_bcm43xx"
-RDEPENDS:${PN}-bcm4371 += "${PN}-broadcom-license"
-
-# For broadcom cypress
-
-LICENSE:${PN}-cypress-license = "Firmware-cypress"
-FILES:${PN}-cypress-license = "${nonarch_base_libdir}/firmware/LICENCE.cypress"
-
-FILES:${PN}-bcm-0bb4-0306 = "${nonarch_base_libdir}/firmware/brcm/BCM-0bb4-0306.hcd"
-FILES:${PN}-bcm43340 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43340-sdio.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac43340-sdio.*"
-FILES:${PN}-bcm43362 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43362-sdio.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac43362-sdio.*"
-FILES:${PN}-bcm43430 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43430-sdio.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac43430-sdio.*"
-FILES:${PN}-bcm4354 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4354-sdio.bin \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac4354-sdio.bin \
-"
-FILES:${PN}-bcm4356-pcie = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4356-pcie.* \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac4356-pcie.* \
-"
-FILES:${PN}-bcm4373 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4373-sdio.bin \
- ${nonarch_base_libdir}/firmware/brcm/brcmfmac4373.bin \
- ${nonarch_base_libdir}/firmware/cypress/cyfmac4373-sdio.bin \
- ${nonarch_base_libdir}/firmware/brcm/brcmfmac4373-sdio.clm_blob \
-"
-
-LICENSE:${PN}-bcm-0bb4-0306 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm-0bb4-0306 += "${PN}-cypress-license"
-LICENSE:${PN}-bcm43340 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm43340 += "${PN}-cypress-license"
-LICENSE:${PN}-bcm43362 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm43362 += "${PN}-cypress-license"
-LICENSE:${PN}-bcm43430 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm43430 += "${PN}-cypress-license"
-LICENSE:${PN}-bcm4354 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm4354 += "${PN}-cypress-license"
-LICENSE:${PN}-bcm4356-pcie = "Firmware-cypress"
-RDEPENDS:${PN}-bcm4356-pcie += "${PN}-cypress-license"
-LICENSE:${PN}-bcm4373 = "Firmware-cypress"
-RDEPENDS:${PN}-bcm4373 += "${PN}-cypress-license"
-
-# For Broadcom bnx2-mips
-#
-# which is a separate case to the other Broadcom firmwares since its
-# license is contained in the shared WHENCE file.
-
-LICENSE:${PN}-bnx2-mips = "WHENCE"
-LICENSE:${PN}-whence-license = "WHENCE"
-
-FILES:${PN}-bnx2-mips = "${nonarch_base_libdir}/firmware/bnx2/bnx2-mips-09-6.2.1b.fw"
-FILES:${PN}-whence-license = "${nonarch_base_libdir}/firmware/WHENCE"
-
-RDEPENDS:${PN}-bnx2-mips += "${PN}-whence-license"
-
-# For imx-sdma
-LICENSE:${PN}-imx-sdma-imx6q = "Firmware-imx-sdma_firmware"
-LICENSE:${PN}-imx-sdma-imx7d = "Firmware-imx-sdma_firmware"
-LICENSE:${PN}-imx-sdma-license = "Firmware-imx-sdma_firmware"
-
-FILES:${PN}-imx-sdma-imx6q = "${nonarch_base_libdir}/firmware/imx/sdma/sdma-imx6q.bin"
-
-RPROVIDES:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
-RREPLACES:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
-RCONFLICTS:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
-
-FILES:${PN}-imx-sdma-imx7d = "${nonarch_base_libdir}/firmware/imx/sdma/sdma-imx7d.bin"
-
-FILES:${PN}-imx-sdma-license = "${nonarch_base_libdir}/firmware/LICENSE.sdma_firmware"
-
-RDEPENDS:${PN}-imx-sdma-imx6q += "${PN}-imx-sdma-license"
-RDEPENDS:${PN}-imx-sdma-imx7d += "${PN}-imx-sdma-license"
-
-# For iwlwifi
-LICENSE:${PN}-iwlwifi = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-135-6 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-7 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-8 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-9 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-10 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-12 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-13 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-16 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-3160-17 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6000-4 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6000g2a-5 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6000g2a-6 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6000g2b-5 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6000g2b-6 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6050-4 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-6050-5 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-7260 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-7265 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-7265d = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-8000c = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-8265 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-9000 = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-misc = "Firmware-iwlwifi_firmware"
-LICENSE:${PN}-iwlwifi-license = "Firmware-iwlwifi_firmware"
-
-
-FILES:${PN}-iwlwifi-license = "${nonarch_base_libdir}/firmware/LICENCE.iwlwifi_firmware"
-FILES:${PN}-iwlwifi-135-6 = "${nonarch_base_libdir}/firmware/iwlwifi-135-6.ucode"
-FILES:${PN}-iwlwifi-3160-7 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-7.ucode"
-FILES:${PN}-iwlwifi-3160-8 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-8.ucode"
-FILES:${PN}-iwlwifi-3160-9 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-9.ucode"
-FILES:${PN}-iwlwifi-3160-10 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-10.ucode"
-FILES:${PN}-iwlwifi-3160-12 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-12.ucode"
-FILES:${PN}-iwlwifi-3160-13 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-13.ucode"
-FILES:${PN}-iwlwifi-3160-16 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-16.ucode"
-FILES:${PN}-iwlwifi-3160-17 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-17.ucode"
-FILES:${PN}-iwlwifi-6000-4 = "${nonarch_base_libdir}/firmware/iwlwifi-6000-4.ucode"
-FILES:${PN}-iwlwifi-6000g2a-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2a-5.ucode"
-FILES:${PN}-iwlwifi-6000g2a-6 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2a-6.ucode"
-FILES:${PN}-iwlwifi-6000g2b-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2b-5.ucode"
-FILES:${PN}-iwlwifi-6000g2b-6 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2b-6.ucode"
-FILES:${PN}-iwlwifi-6050-4 = "${nonarch_base_libdir}/firmware/iwlwifi-6050-4.ucode"
-FILES:${PN}-iwlwifi-6050-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6050-5.ucode"
-FILES:${PN}-iwlwifi-7260 = "${nonarch_base_libdir}/firmware/iwlwifi-7260-*.ucode"
-FILES:${PN}-iwlwifi-7265 = "${nonarch_base_libdir}/firmware/iwlwifi-7265-*.ucode"
-FILES:${PN}-iwlwifi-7265d = "${nonarch_base_libdir}/firmware/iwlwifi-7265D-*.ucode"
-FILES:${PN}-iwlwifi-8000c = "${nonarch_base_libdir}/firmware/iwlwifi-8000C-*.ucode"
-FILES:${PN}-iwlwifi-8265 = "${nonarch_base_libdir}/firmware/iwlwifi-8265-*.ucode"
-FILES:${PN}-iwlwifi-9000 = "${nonarch_base_libdir}/firmware/iwlwifi-9000-*.ucode"
-FILES:${PN}-iwlwifi-misc = "${nonarch_base_libdir}/firmware/iwlwifi-*.ucode"
-
-RDEPENDS:${PN}-iwlwifi-135-6 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-7 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-8 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-9 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-10 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-12 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-13 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-16 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-3160-17 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6000-4 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6000g2a-5 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6000g2a-6 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6000g2b-5 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6000g2b-6 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6050-4 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-6050-5 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-7265d = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-8000c = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-8265 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-9000 = "${PN}-iwlwifi-license"
-RDEPENDS:${PN}-iwlwifi-misc = "${PN}-iwlwifi-license"
-
-# -iwlwifi-misc is a "catch all" package that includes all the iwlwifi
-# firmwares that are not already included in other -iwlwifi- packages.
-# -iwlwifi is a virtual package that depends upon all iwlwifi packages.
-# These are distinct in order to allow the -misc firmwares to be installed
-# without pulling in every other iwlwifi package.
-ALLOW_EMPTY:${PN}-iwlwifi = "1"
-ALLOW_EMPTY:${PN}-iwlwifi-misc = "1"
-
-# Handle package updating for the newly merged iwlwifi groupings
-RPROVIDES:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
-RREPLACES:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
-RCONFLICTS:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
-
-RPROVIDES:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
-RREPLACES:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
-RCONFLICTS:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
-
-# For ibt
-LICENSE:${PN}-ibt-license = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-hw-37-7 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-hw-37-8 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-11-5 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-12-16 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-17 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-20 = "Firmware-ibt_firmware"
-LICENSE:${PN}-ibt-misc = "Firmware-ibt_firmware"
-
-FILES:${PN}-ibt-license = "${nonarch_base_libdir}/firmware/LICENCE.ibt_firmware"
-FILES:${PN}-ibt-hw-37-7 = "${nonarch_base_libdir}/firmware/intel/ibt-hw-37.7*.bseq"
-FILES:${PN}-ibt-hw-37-8 = "${nonarch_base_libdir}/firmware/intel/ibt-hw-37.8*.bseq"
-FILES:${PN}-ibt-11-5 = "${nonarch_base_libdir}/firmware/intel/ibt-11-5.sfi ${nonarch_base_libdir}/firmware/intel/ibt-11-5.ddc"
-FILES:${PN}-ibt-12-16 = "${nonarch_base_libdir}/firmware/intel/ibt-12-16.sfi ${nonarch_base_libdir}/firmware/intel/ibt-12-16.ddc"
-FILES:${PN}-ibt-17 = "${nonarch_base_libdir}/firmware/intel/ibt-17-*.sfi ${nonarch_base_libdir}/firmware/intel/ibt-17-*.ddc"
-FILES:${PN}-ibt-20 = "${nonarch_base_libdir}/firmware/intel/ibt-20-*.sfi ${nonarch_base_libdir}/firmware/intel/ibt-20-*.ddc"
-FILES:${PN}-ibt-misc = "${nonarch_base_libdir}/firmware/intel/ibt-*"
-
-RDEPENDS:${PN}-ibt-hw-37-7 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-hw-37.8 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-11-5 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-12-16 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-17 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-20 = "${PN}-ibt-license"
-RDEPENDS:${PN}-ibt-misc = "${PN}-ibt-license"
-
-ALLOW_EMPTY:${PN}-ibt= "1"
-ALLOW_EMPTY:${PN}-ibt-misc = "1"
-
-LICENSE:${PN}-i915 = "Firmware-i915"
-LICENSE:${PN}-i915-license = "Firmware-i915"
-FILES:${PN}-i915-license = "${nonarch_base_libdir}/firmware/LICENSE.i915"
-FILES:${PN}-i915 = "${nonarch_base_libdir}/firmware/i915"
-RDEPENDS:${PN}-i915 = "${PN}-i915-license"
-
-LICENSE:${PN}-ice = "Firmware-ice"
-LICENSE:${PN}-ice-license = "Firmware-ice"
-FILES:${PN}-ice-license = "${nonarch_base_libdir}/firmware/LICENSE.ice"
-FILES:${PN}-ice = "${nonarch_base_libdir}/firmware/intel/ice"
-RDEPENDS:${PN}-ice = "${PN}-ice-license"
-
-FILES:${PN}-adsp-sst-license = "${nonarch_base_libdir}/firmware/LICENCE.adsp_sst"
-LICENSE:${PN}-adsp-sst = "Firmware-adsp_sst"
-LICENSE:${PN}-adsp-sst-license = "Firmware-adsp_sst"
-FILES:${PN}-adsp-sst = "${nonarch_base_libdir}/firmware/intel/dsp_fw*"
-RDEPENDS:${PN}-adsp-sst = "${PN}-adsp-sst-license"
-
-# For QAT
-LICENSE:${PN}-qat = "Firmware-qat"
-LICENSE:${PN}-qat-license = "Firmware-qat"
-FILES:${PN}-qat-license = "${nonarch_base_libdir}/firmware/LICENCE.qat_firmware"
-FILES:${PN}-qat = "${nonarch_base_libdir}/firmware/qat*.bin"
-RDEPENDS:${PN}-qat = "${PN}-qat-license"
-
-# For QCOM VPU/GPU and SDM845
-LICENSE:${PN}-qcom-license = "Firmware-qcom"
-FILES:${PN}-qcom-license = "${nonarch_base_libdir}/firmware/LICENSE.qcom ${nonarch_base_libdir}/firmware/qcom/NOTICE.txt"
-FILES:${PN}-qcom-venus-1.8 = "${nonarch_base_libdir}/firmware/qcom/venus-1.8/*"
-FILES:${PN}-qcom-venus-4.2 = "${nonarch_base_libdir}/firmware/qcom/venus-4.2/*"
-FILES:${PN}-qcom-venus-5.2 = "${nonarch_base_libdir}/firmware/qcom/venus-5.2/*"
-FILES:${PN}-qcom-venus-5.4 = "${nonarch_base_libdir}/firmware/qcom/venus-5.4/*"
-FILES:${PN}-qcom-vpu-1.0 = "${nonarch_base_libdir}/firmware/qcom/vpu-1.0/*"
-FILES:${PN}-qcom-vpu-2.0 = "${nonarch_base_libdir}/firmware/qcom/vpu-2.0/*"
-FILES:${PN}-qcom-adreno-a2xx = "${nonarch_base_libdir}/firmware/qcom/leia_*.fw"
-FILES:${PN}-qcom-adreno-a3xx = "${nonarch_base_libdir}/firmware/qcom/a3*_*.fw ${nonarch_base_libdir}/firmware/a300_*.fw"
-FILES:${PN}-qcom-adreno-a4xx = "${nonarch_base_libdir}/firmware/qcom/a4*_*.fw"
-FILES:${PN}-qcom-adreno-a530 = "${nonarch_base_libdir}/firmware/qcom/a530*.*"
-FILES:${PN}-qcom-adreno-a630 = "${nonarch_base_libdir}/firmware/qcom/a630*.* ${nonarch_base_libdir}/firmware/qcom/sdm845/a630*.*"
-FILES:${PN}-qcom-adreno-a650 = "${nonarch_base_libdir}/firmware/qcom/a650*.* ${nonarch_base_libdir}/firmware/qcom/sm8250/a650*.*"
-FILES:${PN}-qcom-adreno-a660 = "${nonarch_base_libdir}/firmware/qcom/a660*.*"
-FILES:${PN}-qcom-apq8096-audio = "${nonarch_base_libdir}/firmware/qcom/apq8096/adsp*.*"
-FILES:${PN}-qcom-apq8096-modem = "${nonarch_base_libdir}/firmware/qcom/apq8096/mba.mbn ${nonarch_base_libdir}/firmware/qcom/apq8096/modem*.* ${nonarch_base_libdir}/firmware/qcom/apq8096/wlanmdsp.mbn"
-FILES:${PN}-qcom-sdm845-audio = "${nonarch_base_libdir}/firmware/qcom/sdm845/adsp*.*"
-FILES:${PN}-qcom-sdm845-compute = "${nonarch_base_libdir}/firmware/qcom/sdm845/cdsp*.*"
-FILES:${PN}-qcom-sdm845-modem = "${nonarch_base_libdir}/firmware/qcom/sdm845/mba.mbn ${nonarch_base_libdir}/firmware/qcom/sdm845/modem*.* ${nonarch_base_libdir}/firmware/qcom/sdm845/wlanmdsp.mbn"
-FILES:${PN}-qcom-sm8250-audio = "${nonarch_base_libdir}/firmware/qcom/sm8250/adsp*.*"
-FILES:${PN}-qcom-sm8250-compute = "${nonarch_base_libdir}/firmware/qcom/sm8250/cdsp*.*"
-RDEPENDS:${PN}-qcom-venus-1.8 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-venus-4.2 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-venus-5.2 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-venus-5.4 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-vpu-1.0 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-vpu-2.0 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a2xx = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a3xx = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a4xx = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a530 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a630 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a650 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-adreno-a660 = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-apq8096-audio = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-apq8096-modem = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-sdm845-audio = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-sdm845-compute = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-sdm845-modem = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-sm8250-audio = "${PN}-qcom-license"
-RDEPENDS:${PN}-qcom-sm8250-compute = "${PN}-qcom-license"
-
-FILES:${PN}-liquidio = "${nonarch_base_libdir}/firmware/liquidio"
-
-# For Amlogic VDEC
-LICENSE:${PN}-amlogic-vdec = "Firmware-amlogic_vdec"
-FILES:${PN}-amlogic-vdec-license = "${nonarch_base_libdir}/firmware/LICENSE.amlogic_vdec"
-FILES:${PN}-amlogic-vdec = "${nonarch_base_libdir}/firmware/meson/vdec/*"
-RDEPENDS:${PN}-amlogic-vdec = "${PN}-amlogic-vdec-license"
-
-# For other firmwares
-# Maybe split out to separate packages when needed.
-LICENSE:${PN} = "\
- Firmware-Abilis \
- & Firmware-agere \
- & Firmware-amdgpu \
- & Firmware-amd-ucode \
- & Firmware-amlogic_vdec \
- & Firmware-atmel \
- & Firmware-ca0132 \
- & Firmware-cavium \
- & Firmware-chelsio_firmware \
- & Firmware-cw1200 \
- & Firmware-dib0700 \
- & Firmware-e100 \
- & Firmware-ene_firmware \
- & Firmware-fw_sst_0f28 \
- & Firmware-go7007 \
- & Firmware-hfi1_firmware \
- & Firmware-i2400m \
- & Firmware-ibt_firmware \
- & Firmware-it913x \
- & Firmware-IntcSST2 \
- & Firmware-kaweth \
- & Firmware-moxa \
- & Firmware-myri10ge_firmware \
- & Firmware-nvidia \
- & Firmware-OLPC \
- & Firmware-ath9k-htc \
- & Firmware-phanfw \
- & Firmware-qat \
- & Firmware-qcom \
- & Firmware-qla1280 \
- & Firmware-qla2xxx \
- & Firmware-r8a779x_usb3 \
- & Firmware-radeon \
- & Firmware-ralink_a_mediatek_company_firmware \
- & Firmware-ralink-firmware \
- & Firmware-imx-sdma_firmware \
- & Firmware-siano \
- & Firmware-tda7706-firmware \
- & Firmware-ti-connectivity \
- & Firmware-ti-keystone \
- & Firmware-ueagle-atm4-firmware \
- & Firmware-wl1251 \
- & Firmware-xc4000 \
- & Firmware-xc5000 \
- & Firmware-xc5000c \
- & WHENCE \
-"
-
-FILES:${PN}-license += "${nonarch_base_libdir}/firmware/LICEN*"
-FILES:${PN} += "${nonarch_base_libdir}/firmware/*"
-RDEPENDS:${PN} += "${PN}-license"
-RDEPENDS:${PN} += "${PN}-whence-license"
-
-# Make linux-firmware depend on all of the split-out packages.
-# Make linux-firmware-iwlwifi depend on all of the split-out iwlwifi packages.
-# Make linux-firmware-ibt depend on all of the split-out ibt packages.
-python populate_packages:prepend () {
- firmware_pkgs = oe.utils.packages_filter_out_system(d)
- d.appendVar('RRECOMMENDS:linux-firmware', ' ' + ' '.join(firmware_pkgs))
-
- iwlwifi_pkgs = filter(lambda x: x.find('-iwlwifi-') != -1, firmware_pkgs)
- d.appendVar('RRECOMMENDS:linux-firmware-iwlwifi', ' ' + ' '.join(iwlwifi_pkgs))
-
- ibt_pkgs = filter(lambda x: x.find('-ibt-') != -1, firmware_pkgs)
- d.appendVar('RRECOMMENDS:linux-firmware-ibt', ' ' + ' '.join(ibt_pkgs))
-}
-
-# Firmware files are generally not ran on the CPU, so they can be
-# allarch despite being architecture specific
-INSANE_SKIP = "arch"
diff --git a/meta/recipes-kernel/linux-firmware/linux-firmware_20240312.bb b/meta/recipes-kernel/linux-firmware/linux-firmware_20240312.bb
new file mode 100644
index 0000000000..ff79bb9b33
--- /dev/null
+++ b/meta/recipes-kernel/linux-firmware/linux-firmware_20240312.bb
@@ -0,0 +1,1583 @@
+SUMMARY = "Firmware files for use with Linux kernel"
+HOMEPAGE = "https://www.kernel.org/"
+DESCRIPTION = "Linux firmware is a package distributed alongside the Linux kernel \
+that contains firmware binary blobs necessary for partial or full functionality \
+of certain hardware devices."
+SECTION = "kernel"
+
+LICENSE = "\
+ Firmware-Abilis \
+ & Firmware-adsp_sst \
+ & Firmware-agere \
+ & Firmware-amdgpu \
+ & Firmware-amd-ucode \
+ & Firmware-amlogic_vdec \
+ & Firmware-amphion_vpu \
+ & Firmware-atheros_firmware \
+ & Firmware-atmel \
+ & Firmware-broadcom_bcm43xx \
+ & Firmware-ca0132 \
+ & Firmware-cavium \
+ & Firmware-chelsio_firmware \
+ & Firmware-cirrus \
+ & Firmware-cnm \
+ & Firmware-cw1200 \
+ & Firmware-cypress \
+ & Firmware-dib0700 \
+ & Firmware-e100 \
+ & Firmware-ene_firmware \
+ & Firmware-fw_sst_0f28 \
+ & Firmware-go7007 \
+ & Firmware-hfi1_firmware \
+ & Firmware-i915 \
+ & Firmware-ibt_firmware \
+ & Firmware-ice \
+ & Firmware-ice_enhanced \
+ & Firmware-it913x \
+ & Firmware-iwlwifi_firmware \
+ & Firmware-IntcSST2 \
+ & Firmware-kaweth \
+ & Firmware-linaro \
+ & Firmware-Lontium \
+ & Firmware-Marvell \
+ & Firmware-mediatek \
+ & Firmware-microchip \
+ & Firmware-moxa \
+ & Firmware-myri10ge_firmware \
+ & Firmware-netronome \
+ & Firmware-nvidia \
+ & Firmware-nxp \
+ & Firmware-nxp_mc_firmware \
+ & Firmware-OLPC \
+ & Firmware-ath9k-htc \
+ & Firmware-phanfw \
+ & Firmware-powervr \
+ & Firmware-qat \
+ & Firmware-qcom \
+ & Firmware-qcom-yamato \
+ & Firmware-qla1280 \
+ & Firmware-qla2xxx \
+ & Firmware-qualcommAthos_ar3k \
+ & Firmware-qualcommAthos_ath10k \
+ & Firmware-r8a779x_usb3 \
+ & Firmware-radeon \
+ & Firmware-ralink_a_mediatek_company_firmware \
+ & Firmware-ralink-firmware \
+ & Firmware-rockchip \
+ & Firmware-rtlwifi_firmware \
+ & Firmware-imx-sdma_firmware \
+ & Firmware-siano \
+ & Firmware-ti-connectivity \
+ & Firmware-ti-keystone \
+ & Firmware-ueagle-atm4-firmware \
+ & Firmware-via_vt6656 \
+ & Firmware-wl1251 \
+ & Firmware-xc4000 \
+ & Firmware-xc5000 \
+ & Firmware-xc5000c \
+ & WHENCE \
+ & GPL-2.0-or-later \
+"
+
+LIC_FILES_CHKSUM = "file://LICENCE.Abilis;md5=b5ee3f410780e56711ad48eadc22b8bc \
+ file://LICENCE.adsp_sst;md5=615c45b91a5a4a9fe046d6ab9a2df728 \
+ file://LICENCE.agere;md5=af0133de6b4a9b2522defd5f188afd31 \
+ file://LICENSE.amdgpu;md5=a2589a05ea5b6bd2b7f4f623c7e7a649 \
+ file://LICENSE.amd-ucode;md5=6ca90c57f7b248de1e25c7f68ffc4698 \
+ file://LICENSE.amlogic_vdec;md5=dc44f59bf64a81643e500ad3f39a468a \
+ file://LICENSE.amphion_vpu;md5=2bcdc00527b2d0542bd92b52aaec2b60 \
+ file://LICENCE.atheros_firmware;md5=30a14c7823beedac9fa39c64fdd01a13 \
+ file://LICENSE.atmel;md5=aa74ac0c60595dee4d4e239107ea77a3 \
+ file://LICENCE.broadcom_bcm43xx;md5=3160c14df7228891b868060e1951dfbc \
+ file://LICENCE.ca0132;md5=209b33e66ee5be0461f13d31da392198 \
+ file://LICENCE.cadence;md5=009f46816f6956cfb75ede13d3e1cee0 \
+ file://LICENCE.cavium;md5=c37aaffb1ebe5939b2580d073a95daea \
+ file://LICENCE.chelsio_firmware;md5=819aa8c3fa453f1b258ed8d168a9d903 \
+ file://LICENSE.cirrus;md5=662ea2c1a8888f7d79ed7f27c27472e1 \
+ file://LICENCE.cnm;md5=93b67e6bac7f8fec22b96b8ad0a1a9d0 \
+ file://LICENCE.cw1200;md5=f0f770864e7a8444a5c5aa9d12a3a7ed \
+ file://LICENCE.cypress;md5=48cd9436c763bf873961f9ed7b5c147b \
+ file://LICENSE.dib0700;md5=f7411825c8a555a1a3e5eab9ca773431 \
+ file://LICENCE.e100;md5=ec0f84136766df159a3ae6d02acdf5a8 \
+ file://LICENCE.ene_firmware;md5=ed67f0f62f8f798130c296720b7d3921 \
+ file://LICENCE.fw_sst_0f28;md5=6353931c988ad52818ae733ac61cd293 \
+ file://LICENCE.go7007;md5=c0bb9f6aaaba55b0529ee9b30aa66beb \
+ file://LICENSE.hfi1_firmware;md5=5e7b6e586ce7339d12689e49931ad444 \
+ file://LICENSE.i915;md5=2b0b2e0d20984affd4490ba2cba02570 \
+ file://LICENCE.ibt_firmware;md5=fdbee1ddfe0fb7ab0b2fcd6b454a366b \
+ file://LICENSE.ice;md5=742ab4850f2670792940e6d15c974b2f \
+ file://LICENSE.ice_enhanced;md5=f305cfc31b64f95f774f9edd9df0224d \
+ file://LICENCE.IntcSST2;md5=9e7d8bea77612d7cc7d9e9b54b623062 \
+ file://LICENCE.it913x;md5=1fbf727bfb6a949810c4dbfa7e6ce4f8 \
+ file://LICENCE.iwlwifi_firmware;md5=2ce6786e0fc11ac6e36b54bb9b799f1b \
+ file://LICENCE.kaweth;md5=b1d876e562f4b3b8d391ad8395dfe03f \
+ file://LICENCE.linaro;md5=936d91e71cf9cd30e733db4bf11661cc \
+ file://LICENSE.Lontium;md5=4ec8dc582ff7295f39e2ca6a7b0be2b6 \
+ file://LICENCE.Marvell;md5=28b6ed8bd04ba105af6e4dcd6e997772 \
+ file://LICENCE.mediatek;md5=7c1976b63217d76ce47d0a11d8a79cf2 \
+ file://LICENCE.microchip;md5=db753b00305675dfbf120e3f24a47277 \
+ file://LICENCE.moxa;md5=1086614767d8ccf744a923289d3d4261 \
+ file://LICENCE.myri10ge_firmware;md5=42e32fb89f6b959ca222e25ac8df8fed \
+ file://LICENCE.Netronome;md5=4add08f2577086d44447996503cddf5f \
+ file://LICENCE.nvidia;md5=4428a922ed3ba2ceec95f076a488ce07 \
+ file://LICENCE.NXP;md5=58bb8ba632cd729b9ba6183bc6aed36f \
+ file://LICENSE.nxp;md5=cca321ca1524d6a1e4fed87486cd82dc \
+ file://LICENSE.nxp_mc_firmware;md5=9dc97e4b279b3858cae8879ae2fe5dd7 \
+ file://LICENCE.OLPC;md5=5b917f9d8c061991be4f6f5f108719cd \
+ file://LICENCE.open-ath9k-htc-firmware;md5=1b33c9f4d17bc4d457bdb23727046837 \
+ file://LICENCE.phanfw;md5=954dcec0e051f9409812b561ea743bfa \
+ file://LICENSE.powervr;md5=83045ed2a2cda15b4eaff682c98c9533 \
+ file://LICENCE.qat_firmware;md5=72de83dfd9b87be7685ed099a39fbea4 \
+ file://LICENSE.qcom;md5=164e3362a538eb11d3ac51e8e134294b \
+ file://LICENSE.qcom_yamato;md5=d0de0eeccaf1843a850bf7a6777eec5c \
+ file://LICENCE.qla1280;md5=d6895732e622d950609093223a2c4f5d \
+ file://LICENCE.qla2xxx;md5=505855e921b75f1be4a437ad9b79dff0 \
+ file://LICENSE.QualcommAtheros_ar3k;md5=b5fe244fb2b532311de1472a3bc06da5 \
+ file://LICENSE.QualcommAtheros_ath10k;md5=cb42b686ee5f5cb890275e4321db60a8 \
+ file://LICENCE.r8a779x_usb3;md5=4c1671656153025d7076105a5da7e498 \
+ file://LICENSE.radeon;md5=68ec28bacb3613200bca44f404c69b16 \
+ file://LICENCE.ralink_a_mediatek_company_firmware;md5=728f1a85fd53fd67fa8d7afb080bc435 \
+ file://LICENCE.ralink-firmware.txt;md5=ab2c269277c45476fb449673911a2dfd \
+ file://LICENCE.rockchip;md5=5fd70190c5ed39734baceada8ecced26 \
+ file://LICENCE.rtlwifi_firmware.txt;md5=00d06cfd3eddd5a2698948ead2ad54a5 \
+ file://LICENSE.sdma_firmware;md5=51e8c19ecc2270f4b8ea30341ad63ce9 \
+ file://LICENCE.siano;md5=4556c1bf830067f12ca151ad953ec2a5 \
+ file://LICENCE.ti-connectivity;md5=c5e02be633f1499c109d1652514d85ec \
+ file://LICENCE.ti-keystone;md5=3a86335d32864b0bef996bee26cc0f2c \
+ file://LICENCE.ueagle-atm4-firmware;md5=4ed7ea6b507ccc583b9d594417714118 \
+ file://LICENCE.via_vt6656;md5=e4159694cba42d4377a912e78a6e850f \
+ file://LICENCE.wl1251;md5=ad3f81922bb9e197014bb187289d3b5b \
+ file://LICENCE.xc4000;md5=0ff51d2dc49fce04814c9155081092f0 \
+ file://LICENCE.xc5000;md5=1e170c13175323c32c7f4d0998d53f66 \
+ file://LICENCE.xc5000c;md5=12b02efa3049db65d524aeb418dd87ca \
+ file://WHENCE;md5=${WHENCE_CHKSUM} \
+ "
+# WHENCE checksum is defined separately to ease overriding it if
+# class-devupstream is selected.
+WHENCE_CHKSUM = "514da1cd8b363373030f0c16749feb8d"
+
+# These are not common licenses, set NO_GENERIC_LICENSE for them
+# so that the license files will be copied from fetched source
+NO_GENERIC_LICENSE[Firmware-Abilis] = "LICENCE.Abilis"
+NO_GENERIC_LICENSE[Firmware-adsp_sst] = "LICENCE.adsp_sst"
+NO_GENERIC_LICENSE[Firmware-agere] = "LICENCE.agere"
+NO_GENERIC_LICENSE[Firmware-amdgpu] = "LICENSE.amdgpu"
+NO_GENERIC_LICENSE[Firmware-amd-ucode] = "LICENSE.amd-ucode"
+NO_GENERIC_LICENSE[Firmware-amlogic_vdec] = "LICENSE.amlogic_vdec"
+NO_GENERIC_LICENSE[Firmware-amphion_vpu] = "LICENSE.amphion_vpu"
+NO_GENERIC_LICENSE[Firmware-atheros_firmware] = "LICENCE.atheros_firmware"
+NO_GENERIC_LICENSE[Firmware-atmel] = "LICENSE.atmel"
+NO_GENERIC_LICENSE[Firmware-broadcom_bcm43xx] = "LICENCE.broadcom_bcm43xx"
+NO_GENERIC_LICENSE[Firmware-ca0132] = "LICENCE.ca0132"
+NO_GENERIC_LICENSE[Firmware-cadence] = "LICENCE.cadence"
+NO_GENERIC_LICENSE[Firmware-cavium] = "LICENCE.cavium"
+NO_GENERIC_LICENSE[Firmware-chelsio_firmware] = "LICENCE.chelsio_firmware"
+NO_GENERIC_LICENSE[Firmware-cirrus] = "LICENSE.cirrus"
+NO_GENERIC_LICENSE[Firmware-cnm] = "LICENCE.cnm"
+NO_GENERIC_LICENSE[Firmware-cw1200] = "LICENCE.cw1200"
+NO_GENERIC_LICENSE[Firmware-cypress] = "LICENCE.cypress"
+NO_GENERIC_LICENSE[Firmware-dib0700] = "LICENSE.dib0700"
+NO_GENERIC_LICENSE[Firmware-e100] = "LICENCE.e100"
+NO_GENERIC_LICENSE[Firmware-ene_firmware] = "LICENCE.ene_firmware"
+NO_GENERIC_LICENSE[Firmware-fw_sst_0f28] = "LICENCE.fw_sst_0f28"
+NO_GENERIC_LICENSE[Firmware-go7007] = "LICENCE.go7007"
+NO_GENERIC_LICENSE[Firmware-hfi1_firmware] = "LICENSE.hfi1_firmware"
+NO_GENERIC_LICENSE[Firmware-i915] = "LICENSE.i915"
+NO_GENERIC_LICENSE[Firmware-ibt_firmware] = "LICENCE.ibt_firmware"
+NO_GENERIC_LICENSE[Firmware-ice] = "LICENSE.ice"
+NO_GENERIC_LICENSE[Firmware-ice_enhanced] = "LICENSE.ice_enhanced"
+NO_GENERIC_LICENSE[Firmware-IntcSST2] = "LICENCE.IntcSST2"
+NO_GENERIC_LICENSE[Firmware-it913x] = "LICENCE.it913x"
+NO_GENERIC_LICENSE[Firmware-iwlwifi_firmware] = "LICENCE.iwlwifi_firmware"
+NO_GENERIC_LICENSE[Firmware-kaweth] = "LICENCE.kaweth"
+NO_GENERIC_LICENSE[Firmware-linaro] = "LICENCE.linaro"
+NO_GENERIC_LICENSE[Firmware-Lontium] = "LICENSE.Lontium"
+NO_GENERIC_LICENSE[Firmware-Marvell] = "LICENCE.Marvell"
+NO_GENERIC_LICENSE[Firmware-mediatek] = "LICENCE.mediatek"
+NO_GENERIC_LICENSE[Firmware-microchip] = "LICENCE.microchip"
+NO_GENERIC_LICENSE[Firmware-moxa] = "LICENCE.moxa"
+NO_GENERIC_LICENSE[Firmware-myri10ge_firmware] = "LICENCE.myri10ge_firmware"
+NO_GENERIC_LICENSE[Firmware-netronome] = "LICENCE.Netronome"
+NO_GENERIC_LICENSE[Firmware-nvidia] = "LICENCE.nvidia"
+NO_GENERIC_LICENSE[Firmware-nxp] = "LICENSE.nxp"
+NO_GENERIC_LICENSE[Firmware-nxp_mc_firmware] = "LICENSE.nxp_mc_firmware"
+NO_GENERIC_LICENSE[Firmware-OLPC] = "LICENCE.OLPC"
+NO_GENERIC_LICENSE[Firmware-ath9k-htc] = "LICENCE.open-ath9k-htc-firmware"
+NO_GENERIC_LICENSE[Firmware-phanfw] = "LICENCE.phanfw"
+NO_GENERIC_LICENSE[Firmware-powervr] = "LICENSE.powervr"
+NO_GENERIC_LICENSE[Firmware-qat] = "LICENCE.qat_firmware"
+NO_GENERIC_LICENSE[Firmware-qcom] = "LICENSE.qcom"
+NO_GENERIC_LICENSE[Firmware-qcom-yamato] = "LICENSE.qcom_yamato"
+NO_GENERIC_LICENSE[Firmware-qla1280] = "LICENCE.qla1280"
+NO_GENERIC_LICENSE[Firmware-qla2xxx] = "LICENCE.qla2xxx"
+NO_GENERIC_LICENSE[Firmware-qualcommAthos_ar3k] = "LICENSE.QualcommAtheros_ar3k"
+NO_GENERIC_LICENSE[Firmware-qualcommAthos_ath10k] = "LICENSE.QualcommAtheros_ath10k"
+NO_GENERIC_LICENSE[Firmware-r8a779x_usb3] = "LICENCE.r8a779x_usb3"
+NO_GENERIC_LICENSE[Firmware-radeon] = "LICENSE.radeon"
+NO_GENERIC_LICENSE[Firmware-ralink_a_mediatek_company_firmware] = "LICENCE.ralink_a_mediatek_company_firmware"
+NO_GENERIC_LICENSE[Firmware-ralink-firmware] = "LICENCE.ralink-firmware.txt"
+NO_GENERIC_LICENSE[Firmware-rockchip] = "LICENCE.rockchip"
+NO_GENERIC_LICENSE[Firmware-rtlwifi_firmware] = "LICENCE.rtlwifi_firmware.txt"
+NO_GENERIC_LICENSE[Firmware-siano] = "LICENCE.siano"
+NO_GENERIC_LICENSE[Firmware-imx-sdma_firmware] = "LICENSE.sdma_firmware"
+NO_GENERIC_LICENSE[Firmware-ti-connectivity] = "LICENCE.ti-connectivity"
+NO_GENERIC_LICENSE[Firmware-ti-keystone] = "LICENCE.ti-keystone"
+NO_GENERIC_LICENSE[Firmware-ueagle-atm4-firmware] = "LICENCE.ueagle-atm4-firmware"
+NO_GENERIC_LICENSE[Firmware-via_vt6656] = "LICENCE.via_vt6656"
+NO_GENERIC_LICENSE[Firmware-wl1251] = "LICENCE.wl1251"
+NO_GENERIC_LICENSE[Firmware-xc4000] = "LICENCE.xc4000"
+NO_GENERIC_LICENSE[Firmware-xc5000] = "LICENCE.xc5000"
+NO_GENERIC_LICENSE[Firmware-xc5000c] = "LICENCE.xc5000c"
+NO_GENERIC_LICENSE[WHENCE] = "WHENCE"
+
+PE = "1"
+
+SRC_URI = "\
+ ${KERNELORG_MIRROR}/linux/kernel/firmware/${BPN}-${PV}.tar.xz \
+"
+
+BBCLASSEXTEND = "devupstream:target"
+SRC_URI:class-devupstream = "git://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git;protocol=https;branch=main"
+# Pin this to the 20220509 release, override this in local.conf
+SRCREV:class-devupstream ?= "b19cbdca78ab2adfd210c91be15a22568e8b8cae"
+
+SRC_URI[sha256sum] = "b2327a54ad1897c828008caf63af5ee15469ba723a5016be58f2b44f07bd4b94"
+
+inherit allarch
+
+CLEANBROKEN = "1"
+
+# Use PACKAGECONFIG_CONFARGS to set the Makefile target
+PACKAGECONFIG ??= ""
+# Enabling dedup will turn duplicate firmware files into links
+PACKAGECONFIG[deduplicate] = "install,install-nodedup,rdfind-native"
+
+do_compile() {
+ :
+}
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' 'FIRMWAREDIR=${nonarch_base_libdir}/firmware' ${PACKAGECONFIG_CONFARGS}
+ cp LICEN[CS]E.* WHENCE ${D}${nonarch_base_libdir}/firmware/
+}
+
+
+PACKAGES =+ "${PN}-amphion-vpu-license ${PN}-amphion-vpu \
+ ${PN}-cw1200-license ${PN}-cw1200 \
+ ${PN}-ralink-license ${PN}-ralink \
+ ${PN}-mt76x-license ${PN}-mt7601u ${PN}-mt7650 ${PN}-mt76x2 \
+ ${PN}-radeon-license ${PN}-radeon \
+ ${PN}-amdgpu-license ${PN}-amdgpu \
+ ${PN}-marvell-license ${PN}-pcie8897 ${PN}-pcie8997 \
+ ${PN}-mediatek-license ${PN}-mediatek \
+ ${PN}-microchip-license ${PN}-microchip \
+ ${PN}-moxa-license ${PN}-moxa \
+ ${PN}-sd8686 ${PN}-sd8688 ${PN}-sd8787 ${PN}-sd8797 ${PN}-sd8801 \
+ ${PN}-sd8887 ${PN}-sd8897 ${PN}-sd8997 ${PN}-usb8997 \
+ ${PN}-ti-connectivity-license ${PN}-wlcommon ${PN}-wl12xx ${PN}-wl18xx \
+ ${PN}-ti-keystone-license ${PN}-ti-keystone \
+ ${PN}-vt6656-license ${PN}-vt6656 \
+ ${PN}-rs9113 ${PN}-rs9116 \
+ ${PN}-rtl-license ${PN}-rtl8188 ${PN}-rtl8192cu ${PN}-rtl8192ce ${PN}-rtl8192su ${PN}-rtl8723 ${PN}-rtl8821 \
+ ${PN}-rtl8761 \
+ ${PN}-rtl8168 \
+ ${PN}-rtl8822 \
+ ${PN}-rtl-nic \
+ ${PN}-cypress-license \
+ ${PN}-broadcom-license \
+ ${PN}-bcm-0bb4-0306 \
+ ${PN}-bcm43143 \
+ ${PN}-bcm43236b \
+ ${PN}-bcm43241b0 \
+ ${PN}-bcm43241b4 \
+ ${PN}-bcm43241b5 \
+ ${PN}-bcm43242a \
+ ${PN}-bcm4329 \
+ ${PN}-bcm4329-fullmac \
+ ${PN}-bcm4330 \
+ ${PN}-bcm4334 \
+ ${PN}-bcm43340 \
+ ${PN}-bcm4335 \
+ ${PN}-bcm43362 \
+ ${PN}-bcm4339 \
+ ${PN}-bcm43430 \
+ ${PN}-bcm43430a0 \
+ ${PN}-bcm43455 \
+ ${PN}-bcm4350 \
+ ${PN}-bcm4350c2 \
+ ${PN}-bcm4354 \
+ ${PN}-bcm4356 \
+ ${PN}-bcm4356-pcie \
+ ${PN}-bcm43569 \
+ ${PN}-bcm43570 \
+ ${PN}-bcm4358 \
+ ${PN}-bcm43602 \
+ ${PN}-bcm4366b \
+ ${PN}-bcm4366c \
+ ${PN}-bcm4371 \
+ ${PN}-bcm4373 \
+ ${PN}-bcm43xx \
+ ${PN}-bcm43xx-hdr \
+ ${PN}-cirrus-license ${PN}-cirrus \
+ ${PN}-cnm-license ${PN}-cnm \
+ ${PN}-atheros-license ${PN}-ar5523 ${PN}-ar9170 ${PN}-ath6k ${PN}-ath9k ${PN}-ath3k \
+ ${PN}-carl9170 \
+ ${PN}-ar3k-license ${PN}-ar3k ${PN}-ath10k-license ${PN}-ath10k ${PN}-ath11k ${PN}-qca \
+ \
+ ${PN}-imx-sdma-license ${PN}-imx-sdma-imx6q ${PN}-imx-sdma-imx7d \
+ \
+ ${PN}-iwlwifi-license ${PN}-iwlwifi \
+ ${PN}-iwlwifi-135-6 \
+ ${PN}-iwlwifi-3160-7 ${PN}-iwlwifi-3160-8 ${PN}-iwlwifi-3160-9 \
+ ${PN}-iwlwifi-3160-10 ${PN}-iwlwifi-3160-12 ${PN}-iwlwifi-3160-13 \
+ ${PN}-iwlwifi-3160-16 ${PN}-iwlwifi-3160-17 \
+ ${PN}-iwlwifi-6000-4 ${PN}-iwlwifi-6000g2a-5 ${PN}-iwlwifi-6000g2a-6 \
+ ${PN}-iwlwifi-6000g2b-5 ${PN}-iwlwifi-6000g2b-6 \
+ ${PN}-iwlwifi-6050-4 ${PN}-iwlwifi-6050-5 \
+ ${PN}-iwlwifi-7260 \
+ ${PN}-iwlwifi-7265 \
+ ${PN}-iwlwifi-7265d ${PN}-iwlwifi-8000c ${PN}-iwlwifi-8265 \
+ ${PN}-iwlwifi-9000 \
+ ${PN}-iwlwifi-9260 \
+ ${PN}-iwlwifi-misc \
+ ${PN}-ibt-license ${PN}-ibt \
+ ${PN}-ibt-11-5 ${PN}-ibt-12-16 ${PN}-ibt-hw-37-7 ${PN}-ibt-hw-37-8 \
+ ${PN}-ibt-17 \
+ ${PN}-ibt-20 \
+ ${PN}-ibt-misc \
+ ${PN}-i915-license ${PN}-i915 \
+ ${PN}-ice-license ${PN}-ice \
+ ${PN}-ice-enhanced-license ${PN}-ice-enhanced \
+ ${PN}-adsp-sst-license ${PN}-adsp-sst \
+ ${PN}-bnx2 \
+ ${PN}-bnx2x \
+ ${PN}-liquidio \
+ ${PN}-mellanox \
+ ${PN}-nvidia-license \
+ ${PN}-nvidia-tegra-k1 ${PN}-nvidia-tegra \
+ ${PN}-nvidia-gpu \
+ ${PN}-nxp-license \
+ ${PN}-nxp8987-sdio \
+ ${PN}-nxp8997-common \
+ ${PN}-nxp8997-pcie \
+ ${PN}-nxp8997-sdio \
+ ${PN}-nxp9098-common \
+ ${PN}-nxp9098-pcie \
+ ${PN}-nxp9098-sdio \
+ ${PN}-nxpiw416-sdio \
+ ${PN}-nxpiw612-sdio \
+ ${PN}-nxp-mc-license ${PN}-nxp-mc \
+ ${PN}-netronome-license ${PN}-netronome \
+ ${PN}-olpc-license ${PN}-olpc \
+ ${PN}-phanfw-license ${PN}-phanfw \
+ ${PN}-powervr-license ${PN}-powervr \
+ ${PN}-prestera \
+ ${PN}-qat ${PN}-qat-license \
+ ${PN}-qed \
+ ${PN}-qcom-license ${PN}-qcom-yamato-license \
+ ${PN}-qcom-venus-1.8 ${PN}-qcom-venus-4.2 ${PN}-qcom-venus-5.2 ${PN}-qcom-venus-5.4 ${PN}-qcom-venus-6.0 \
+ ${PN}-qcom-vpu-1.0 ${PN}-qcom-vpu-2.0 \
+ ${PN}-qcom-adreno-a2xx ${PN}-qcom-adreno-a3xx ${PN}-qcom-adreno-a4xx ${PN}-qcom-adreno-a530 \
+ ${PN}-qcom-adreno-a630 ${PN}-qcom-adreno-a650 ${PN}-qcom-adreno-a660 ${PN}-qcom-adreno-a702 \
+ ${PN}-qcom-apq8016-modem ${PN}-qcom-apq8016-wifi \
+ ${PN}-qcom-apq8096-adreno ${PN}-qcom-apq8096-audio ${PN}-qcom-apq8096-modem \
+ ${PN}-qcom-qcm2290-adreno ${PN}-qcom-qcm2290-audio ${PN}-qcom-qcm2290-modem ${PN}-qcom-qcm2290-wifi \
+ ${PN}-qcom-qrb4210-adreno ${PN}-qcom-qrb4210-audio ${PN}-qcom-qrb4210-compute \
+ ${PN}-qcom-qrb4210-modem ${PN}-qcom-qrb4210-wifi \
+ ${PN}-qcom-sc8280xp-lenovo-x13s-compat \
+ ${PN}-qcom-sc8280xp-lenovo-x13s-audio \
+ ${PN}-qcom-sc8280xp-lenovo-x13s-adreno \
+ ${PN}-qcom-sc8280xp-lenovo-x13s-compute \
+ ${PN}-qcom-sc8280xp-lenovo-x13s-sensors \
+ ${PN}-qcom-sdm845-adreno ${PN}-qcom-sdm845-audio ${PN}-qcom-sdm845-compute ${PN}-qcom-sdm845-modem \
+ ${PN}-qcom-sdm845-thundercomm-db845c-sensors \
+ ${PN}-qcom-sm8250-adreno ${PN}-qcom-sm8250-audio ${PN}-qcom-sm8250-compute \
+ ${PN}-qcom-sm8250-thundercomm-rb5-sensors \
+ ${PN}-qla2xxx ${PN}-qla2xxx-license \
+ ${PN}-rockchip-license ${PN}-rockchip-dptx \
+ ${PN}-amlogic-vdec-license ${PN}-amlogic-vdec \
+ ${PN}-lt9611uxc ${PN}-lontium-license \
+ ${PN}-whence-license \
+ ${PN}-wl1251-license ${PN}-wl1251 \
+ ${PN}-xc4000-license ${PN}-xc4000 \
+ ${PN}-xc5000-license ${PN}-xc5000 \
+ ${PN}-xc5000c-license ${PN}-xc5000c \
+ ${PN}-license \
+ "
+
+# For Amphion VPU
+LICENSE:${PN}-amphion-vpu = "Firmware-amphion_vpu"
+LICENSE:${PN}-amphion-vpu-license = "Firmware-amphion_vpu"
+
+FILES:${PN}-amphion-vpu = "${nonarch_base_libdir}/firmware/amphion/*"
+FILES:${PN}-amphion-vpu-license = " \
+ ${nonarch_base_libdir}/firmware/LICENSE.amphion_vpu \
+"
+RDEPENDS:${PN}-amphion-vpu += "${PN}-amphion-vpu-license"
+
+# For cw1200
+LICENSE:${PN}-cw1200 = "Firmware-cw1200"
+LICENSE:${PN}-cw1200-license = "Firmware-cw1200"
+
+FILES:${PN}-cw1200 = "${nonarch_base_libdir}/firmware/wsm_22.bin"
+FILES:${PN}-cw1200-license = "${nonarch_base_libdir}/firmware/LICENCE.cw1200"
+
+RDEPENDS:${PN}-cw1200 += "${PN}-cw1200-license"
+
+# For atheros
+LICENSE:${PN}-ar5523 = "Firmware-atheros_firmware"
+LICENSE:${PN}-ar9170 = "Firmware-atheros_firmware"
+LICENSE:${PN}-ath3k = "Firmware-atheros_firmware"
+LICENSE:${PN}-ath6k = "Firmware-atheros_firmware"
+LICENSE:${PN}-ath9k = "Firmware-atheros_firmware"
+LICENSE:${PN}-atheros-license = "Firmware-atheros_firmware"
+
+FILES:${PN}-atheros-license = "${nonarch_base_libdir}/firmware/LICENCE.atheros_firmware"
+FILES:${PN}-ar5523 = " \
+ ${nonarch_base_libdir}/firmware/ar5523.bin \
+"
+FILES:${PN}-ar9170 = " \
+ ${nonarch_base_libdir}/firmware/ar9170*.fw \
+"
+FILES:${PN}-ath3k = " \
+ ${nonarch_base_libdir}/firmware/ath3k*fw \
+"
+FILES:${PN}-ath6k = " \
+ ${nonarch_base_libdir}/firmware/ath6k \
+"
+FILES:${PN}-ath9k = " \
+ ${nonarch_base_libdir}/firmware/ar9271.fw \
+ ${nonarch_base_libdir}/firmware/ar7010*.fw \
+ ${nonarch_base_libdir}/firmware/htc_9271.fw \
+ ${nonarch_base_libdir}/firmware/htc_7010.fw \
+ ${nonarch_base_libdir}/firmware/ath9k_htc/htc_7010-1.4.0.fw \
+ ${nonarch_base_libdir}/firmware/ath9k_htc/htc_9271-1.4.0.fw \
+"
+
+RDEPENDS:${PN}-ar5523 += "${PN}-atheros-license"
+RDEPENDS:${PN}-ar9170 += "${PN}-atheros-license"
+RDEPENDS:${PN}-ath6k += "${PN}-atheros-license"
+RDEPENDS:${PN}-ath9k += "${PN}-atheros-license"
+
+# For carl9170
+
+FILES:${PN}-carl9170 = " \
+ ${nonarch_base_libdir}/firmware/carl9170*.fw \
+"
+LICENSE:${PN}-carl9170 = "GPL-2.0-or-later"
+
+# For QualCommAthos
+LICENSE:${PN}-ar3k = "Firmware-qualcommAthos_ar3k & Firmware-atheros_firmware"
+LICENSE:${PN}-ar3k-license = "Firmware-qualcommAthos_ar3k"
+LICENSE:${PN}-ath10k = "Firmware-qualcommAthos_ath10k"
+LICENSE:${PN}-ath10k-license = "Firmware-qualcommAthos_ath10k"
+LICENSE:${PN}-qca = "Firmware-qualcommAthos_ath10k"
+
+FILES:${PN}-ar3k-license = "${nonarch_base_libdir}/firmware/LICENSE.QualcommAtheros_ar3k"
+FILES:${PN}-ar3k = " \
+ ${nonarch_base_libdir}/firmware/ar3k \
+"
+
+FILES:${PN}-ath10k-license = "${nonarch_base_libdir}/firmware/LICENSE.QualcommAtheros_ath10k"
+FILES:${PN}-ath10k = " \
+ ${nonarch_base_libdir}/firmware/ath10k \
+"
+
+FILES:${PN}-ath11k = " \
+ ${nonarch_base_libdir}/firmware/ath11k \
+"
+
+FILES:${PN}-qca = " \
+ ${nonarch_base_libdir}/firmware/qca \
+"
+
+RDEPENDS:${PN}-ar3k += "${PN}-ar3k-license ${PN}-atheros-license"
+RDEPENDS:${PN}-ath10k += "${PN}-ath10k-license"
+RDEPENDS:${PN}-ath11k += "${PN}-ath10k-license"
+RDEPENDS:${PN}-qca += "${PN}-ath10k-license"
+
+# For ralink
+LICENSE:${PN}-ralink = "Firmware-ralink-firmware"
+LICENSE:${PN}-ralink-license = "Firmware-ralink-firmware"
+
+FILES:${PN}-ralink-license = "${nonarch_base_libdir}/firmware/LICENCE.ralink-firmware.txt"
+FILES:${PN}-ralink = " \
+ ${nonarch_base_libdir}/firmware/rt*.bin \
+"
+
+RDEPENDS:${PN}-ralink += "${PN}-ralink-license"
+
+# For mediatek MT7601U
+LICENSE:${PN}-mt76x-license = "Firmware-ralink_a_mediatek_company_firmware"
+FILES:${PN}-mt76x-license = "${nonarch_base_libdir}/firmware/LICENCE.ralink_a_mediatek_company_firmware"
+
+LICENSE:${PN}-mt7601u = "Firmware-ralink_a_mediatek_company_firmware"
+
+FILES:${PN}-mt7601u = " \
+ ${nonarch_base_libdir}/firmware/mediatek/mt7601u.bin \
+ ${nonarch_base_libdir}/firmware/mt7601u.bin \
+"
+RDEPENDS:${PN}-mt7601u += "${PN}-mt76x-license"
+
+# For MediaTek Bluetooth USB driver 7650
+LICENSE:${PN}-mt7650 = "Firmware-ralink_a_mediatek_company_firmware"
+
+FILES:${PN}-mt7650 = " \
+ ${nonarch_base_libdir}/firmware/mediatek/mt7650.bin \
+ ${nonarch_base_libdir}/firmware/mt7650.bin \
+"
+RDEPENDS:${PN}-mt7650 += "${PN}-mt76x-license"
+
+# For MediaTek MT76x2 Wireless MACs
+LICENSE:${PN}-mt76x2 = "Firmware-ralink_a_mediatek_company_firmware"
+
+FILES:${PN}-mt76x2 = " \
+ ${nonarch_base_libdir}/firmware/mediatek/mt7662.bin \
+ ${nonarch_base_libdir}/firmware/mt7662.bin \
+ ${nonarch_base_libdir}/firmware/mediatek/mt7662_rom_patch.bin \
+ ${nonarch_base_libdir}/firmware/mt7662_rom_patch.bin \
+"
+RDEPENDS:${PN}-mt76x2 += "${PN}-mt76x-license"
+
+# For MediaTek
+LICENSE:${PN}-mediatek = "Firmware-mediatek"
+LICENSE:${PN}-mediatek-license = "Firmware-mediatek"
+
+FILES:${PN}-mediatek = " \
+ ${nonarch_base_libdir}/firmware/mediatek/* \
+ ${nonarch_base_libdir}/firmware/vpu_d.bin \
+ ${nonarch_base_libdir}/firmware/vpu_p.bin \
+"
+FILES:${PN}-mediatek-license = " \
+ ${nonarch_base_libdir}/firmware/LICENCE.mediatek \
+"
+RDEPENDS:${PN}-mediatek += "${PN}-mediatek-license"
+
+# For Microchip
+LICENSE:${PN}-microchip = "Firmware-microchip"
+LICENSE:${PN}-microchip-license = "Firmware-microchip"
+
+FILES:${PN}-microchip = "${nonarch_base_libdir}/firmware/microchip/*"
+FILES:${PN}-microchip-license = " \
+ ${nonarch_base_libdir}/firmware/LICENCE.microchip \
+"
+RDEPENDS:${PN}-microchip += "${PN}-microchip-license"
+
+# For MOXA
+LICENSE:${PN}-moxa = "Firmware-moxa"
+LICENSE:${PN}-moxa-license = "Firmware-moxa"
+
+FILES:${PN}-moxa = "${nonarch_base_libdir}/firmware/moxa"
+FILES:${PN}-moxa-license = "${nonarch_base_libdir}/firmware/LICENCE.moxa"
+
+RDEPENDS:${PN}-moxa += "${PN}-moxa-license"
+
+# For radeon
+
+LICENSE:${PN}-radeon = "Firmware-radeon"
+LICENSE:${PN}-radeon-license = "Firmware-radeon"
+
+FILES:${PN}-radeon-license = "${nonarch_base_libdir}/firmware/LICENSE.radeon"
+FILES:${PN}-radeon = " \
+ ${nonarch_base_libdir}/firmware/radeon \
+"
+
+RDEPENDS:${PN}-radeon += "${PN}-radeon-license"
+
+# For amdgpu
+LICENSE:${PN}-amdgpu = "Firmware-amdgpu"
+LICENSE:${PN}-amdgpu-license = "Firmware-amdgpu"
+
+FILES:${PN}-amdgpu-license = "${nonarch_base_libdir}/firmware/LICENSE.amdgpu"
+FILES:${PN}-amdgpu = " \
+ ${nonarch_base_libdir}/firmware/amdgpu \
+"
+
+RDEPENDS:${PN}-amdgpu += "${PN}-amdgpu-license"
+
+# For lontium
+LICENSE:${PN}-lt9611uxc = "Firmware-Lontium"
+
+FILES:${PN}-lontium-license = "${nonarch_base_libdir}/firmware/LICENSE.Lontium"
+FILES:${PN}-lt9611uxc = "${nonarch_base_libdir}/firmware/lt9611uxc_fw.bin"
+
+# For marvell
+LICENSE:${PN}-pcie8897 = "Firmware-Marvell"
+LICENSE:${PN}-pcie8997 = "Firmware-Marvell"
+LICENSE:${PN}-sd8686 = "Firmware-Marvell"
+LICENSE:${PN}-sd8688 = "Firmware-Marvell"
+LICENSE:${PN}-sd8787 = "Firmware-Marvell"
+LICENSE:${PN}-sd8797 = "Firmware-Marvell"
+LICENSE:${PN}-sd8801 = "Firmware-Marvell"
+LICENSE:${PN}-sd8887 = "Firmware-Marvell"
+LICENSE:${PN}-sd8897 = "Firmware-Marvell"
+LICENSE:${PN}-sd8997 = "Firmware-Marvell"
+LICENSE:${PN}-usb8997 = "Firmware-Marvell"
+LICENSE:${PN}-marvell-license = "Firmware-Marvell"
+
+FILES:${PN}-marvell-license = "${nonarch_base_libdir}/firmware/LICENCE.Marvell"
+FILES:${PN}-pcie8897 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/pcie8897_uapsta.bin \
+"
+FILES:${PN}-pcie8997 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/pcie8997_wlan_v4.bin \
+ ${nonarch_base_libdir}/firmware/mrvl/pcieuart8997_combo_v4.bin \
+ ${nonarch_base_libdir}/firmware/mrvl/pcieusb8997_combo_v4.bin \
+"
+FILES:${PN}-sd8686 = " \
+ ${nonarch_base_libdir}/firmware/libertas/sd8686_v9* \
+ ${nonarch_base_libdir}/firmware/sd8686* \
+"
+FILES:${PN}-sd8688 = " \
+ ${nonarch_base_libdir}/firmware/libertas/sd8688* \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8688* \
+"
+FILES:${PN}-sd8787 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8787_uapsta.bin \
+"
+FILES:${PN}-sd8797 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8797_uapsta.bin \
+"
+FILES:${PN}-sd8801 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8801_uapsta.bin \
+"
+FILES:${PN}-sd8887 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8887_uapsta.bin \
+"
+FILES:${PN}-sd8897 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8897_uapsta.bin \
+"
+do_install:append() {
+ # The kernel 5.6.x driver still uses the old name, provide a symlink for
+ # older kernels
+ ln -fs sdsd8997_combo_v4.bin ${D}${nonarch_base_libdir}/firmware/mrvl/sd8997_uapsta.bin
+}
+FILES:${PN}-sd8997 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/sd8997_uapsta.bin \
+ ${nonarch_base_libdir}/firmware/mrvl/sdsd8997_combo_v4.bin \
+"
+FILES:${PN}-usb8997 = " \
+ ${nonarch_base_libdir}/firmware/mrvl/usbusb8997_combo_v4.bin \
+"
+
+RDEPENDS:${PN}-sd8686 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8688 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8787 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8797 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8801 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8887 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8897 += "${PN}-marvell-license"
+RDEPENDS:${PN}-sd8997 += "${PN}-marvell-license"
+RDEPENDS:${PN}-usb8997 += "${PN}-marvell-license"
+
+# For netronome
+LICENSE:${PN}-netronome = "Firmware-netronome"
+
+FILES:${PN}-netronome-license = " \
+ ${nonarch_base_libdir}/firmware/LICENCE.Netronome \
+"
+FILES:${PN}-netronome = " \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0081*.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0096*.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0097*.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0099*.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0058-0011_2x40.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0058-0012_2x40.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/nic_AMDA0078-0011_1x100.nffw \
+ ${nonarch_base_libdir}/firmware/netronome/bpf \
+ ${nonarch_base_libdir}/firmware/netronome/flower \
+ ${nonarch_base_libdir}/firmware/netronome/nic \
+ ${nonarch_base_libdir}/firmware/netronome/nic-sriov \
+"
+
+RDEPENDS:${PN}-netronome += "${PN}-netronome-license"
+
+# For NXP
+LICENSE:${PN}-nxp8987-sdio = "Firmware-nxp"
+LICENSE:${PN}-nxp8997-common = "Firmware-nxp"
+LICENSE:${PN}-nxp8997-pcie = "Firmware-nxp"
+LICENSE:${PN}-nxp8997-sdio = "Firmware-nxp"
+LICENSE:${PN}-nxp9098-common = "Firmware-nxp"
+LICENSE:${PN}-nxp9098-pcie = "Firmware-nxp"
+LICENSE:${PN}-nxp9098-sdio = "Firmware-nxp"
+LICENSE:${PN}-nxpiw416-sdio = "Firmware-nxp"
+LICENSE:${PN}-nxpiw612-sdio = "Firmware-nxp"
+LICENSE:${PN}-nxp-license = "Firmware-nxp"
+
+FILES:${PN}-nxp8987-sdio = "${nonarch_base_libdir}/firmware/nxp/*8987*"
+FILES:${PN}-nxp8997-common = " \
+ ${nonarch_base_libdir}/firmware/nxp/uartuart8997_bt_v4.bin \
+ ${nonarch_base_libdir}/firmware/nxp/helper_uart_3000000.bin \
+"
+ALLOW_EMPTY:${PN}-nxp8997-pcie = "1"
+ALLOW_EMPTY:${PN}-nxp8997-sdio = "1"
+FILES:${PN}-nxp9098-common = "${nonarch_base_libdir}/firmware/nxp/uartuart9098_bt_v1.bin"
+ALLOW_EMPTY:${PN}-nxp9098-pcie = "1"
+ALLOW_EMPTY:${PN}-nxp9098-sdio = "1"
+FILES:${PN}-nxpiw416-sdio = "${nonarch_base_libdir}/firmware/nxp/*iw416*"
+FILES:${PN}-nxpiw612-sdio = "${nonarch_base_libdir}/firmware/nxp/uartspi_n61x_v1.bin.se"
+FILES:${PN}-nxp-license = "${nonarch_base_libdir}/firmware/LICENSE.nxp"
+
+RDEPENDS:${PN}-nxp8987-sdio += "${PN}-nxp-license"
+RDEPENDS:${PN}-nxp8997-common += "${PN}-nxp-license"
+RDEPENDS:${PN}-nxp8997-pcie += "${PN}-nxp8997-common"
+RDEPENDS:${PN}-nxp8997-sdio += "${PN}-nxp8997-common"
+RDEPENDS:${PN}-nxp9098-common += "${PN}-nxp-license"
+RDEPENDS:${PN}-nxp9098-pcie += "${PN}-nxp9098-common"
+RDEPENDS:${PN}-nxp9098-sdio += "${PN}-nxp9098-common"
+RDEPENDS:${PN}-nxpiw416-sdio += "${PN}-nxp-license"
+RDEPENDS:${PN}-nxpiw612-sdio += "${PN}-nxp-license"
+
+# For nxp-mc
+LICENSE:${PN}-nxp-mc = "Firmware-nxp_mc_firmware"
+LICENSE:${PN}-nxp-mc-license = "Firmware-nxp_mc_firmware"
+
+FILES:${PN}-nxp-mc= "${nonarch_base_libdir}/firmware/dpaa2/mc/*"
+FILES:${PN}-nxp-mc-license = " \
+ ${nonarch_base_libdir}/firmware/LICENSE.nxp_mc_firmware \
+"
+RDEPENDS:${PN}-nxp-mc += "${PN}-nxp-mc-license"
+
+# For Nvidia
+LICENSE:${PN}-nvidia-gpu = "Firmware-nvidia"
+LICENSE:${PN}-nvidia-tegra = "Firmware-nvidia"
+LICENSE:${PN}-nvidia-tegra-k1 = "Firmware-nvidia"
+LICENSE:${PN}-nvidia-license = "Firmware-nvidia"
+
+FILES:${PN}-nvidia-gpu = "${nonarch_base_libdir}/firmware/nvidia"
+FILES:${PN}-nvidia-tegra = " \
+ ${nonarch_base_libdir}/firmware/nvidia/tegra* \
+ ${nonarch_base_libdir}/firmware/nvidia/gm20b \
+ ${nonarch_base_libdir}/firmware/nvidia/gp10b \
+"
+FILES:${PN}-nvidia-tegra-k1 = " \
+ ${nonarch_base_libdir}/firmware/nvidia/tegra124 \
+ ${nonarch_base_libdir}/firmware/nvidia/gk20a \
+"
+FILES:${PN}-nvidia-license = "${nonarch_base_libdir}/firmware/LICENCE.nvidia"
+
+RDEPENDS:${PN}-nvidia-gpu += "${PN}-nvidia-license"
+RDEPENDS:${PN}-nvidia-tegra += "${PN}-nvidia-license"
+RDEPENDS:${PN}-nvidia-tegra-k1 += "${PN}-nvidia-license"
+
+# For OLPC
+LICENSE:${PN}-olpc = "Firmware-OLPC"
+LICENSE:${PN}-olpc-license = "Firmware-OLPC"
+
+FILES:${PN}-olpc = " \
+ ${nonarch_base_libdir}/firmware/libertas/lbtf_sdio.bin \
+ ${nonarch_base_libdir}/firmware/lbtf_usb.bin \
+ ${nonarch_base_libdir}/firmware/libertas/usb8388_olpc.bin \
+"
+FILES:${PN}-olpc-license = "${nonarch_base_libdir}/firmware/LICENCE.OLPC"
+
+RDEPENDS:${PN}-olpc += "${PN}-olpc-license"
+
+# For phanfw
+LICENSE:${PN}-phanfw = "Firmware-phanfw"
+LICENSE:${PN}-phanfw-license = "Firmware-phanfw"
+
+FILES:${PN}-phanfw = "${nonarch_base_libdir}/firmware/phanfw.bin"
+FILES:${PN}-phanfw-license = "${nonarch_base_libdir}/firmware/LICENCE.phanfw"
+
+RDEPENDS:${PN}-phanfw += "${PN}-phanfw-license"
+
+# For PowerVR
+LICENSE:${PN}-powervr = "Firmware-powervr"
+LICENSE:${PN}-powervr-license = "Firmware-powervr"
+
+FILES:${PN}-powervr = "${nonarch_base_libdir}/firmware/powervr"
+FILES:${PN}-powervr-license = "${nonarch_base_libdir}/firmware/LICENSE.powervr"
+
+RDEPENDS:${PN}-powervr += "${PN}-powervr-license"
+
+# For qla2xxx
+LICENSE:${PN}-qla2xxx = "Firmware-qla2xxx"
+LICENSE:${PN}-qla2xxx-license = "Firmware-qla2xxx"
+
+FILES:${PN}-qla2xxx = "${nonarch_base_libdir}/firmware/ql2*"
+FILES:${PN}-qla2xxx-license = "${nonarch_base_libdir}/firmware/LICENCE.qla2xxx"
+
+RDEPENDS:${PN}-qla2xxx += "${PN}-qla2xxx-license"
+
+# For RSI RS911x WiFi
+LICENSE:${PN}-rs9113 = "WHENCE"
+LICENSE:${PN}-rs9116 = "WHENCE"
+
+FILES:${PN}-rs9113 = " ${nonarch_base_libdir}/firmware/rsi/rs9113*.rps "
+FILES:${PN}-rs9116 = " ${nonarch_base_libdir}/firmware/rsi/rs9116*.rps "
+
+RDEPENDS:${PN}-rs9113 += "${PN}-whence-license"
+RDEPENDS:${PN}-rs9116 += "${PN}-whence-license"
+
+# For rtl
+LICENSE:${PN}-rtl8188 = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8192cu = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8192ce = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8192su = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8723 = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8761 = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8821 = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl8822 = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl-license = "Firmware-rtlwifi_firmware"
+LICENSE:${PN}-rtl-nic = "WHENCE"
+LICENSE:${PN}-rtl8168 = "WHENCE"
+
+FILES:${PN}-rtl-license = " \
+ ${nonarch_base_libdir}/firmware/LICENCE.rtlwifi_firmware.txt \
+"
+FILES:${PN}-rtl8188 = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8188*.bin \
+"
+FILES:${PN}-rtl8192cu = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8192cufw*.bin \
+"
+FILES:${PN}-rtl8192ce = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8192cfw*.bin \
+"
+FILES:${PN}-rtl8192su = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8712u.bin \
+"
+FILES:${PN}-rtl8723 = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8723*.bin \
+"
+FILES:${PN}-rtl8821 = " \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8821*.bin \
+ ${nonarch_base_libdir}/firmware/rtw88/rtw8821*.bin \
+"
+FILES:${PN}-rtl8761 = " \
+ ${nonarch_base_libdir}/firmware/rtl_bt/rtl8761*.bin \
+"
+FILES:${PN}-rtl8168 = " \
+ ${nonarch_base_libdir}/firmware/rtl_nic/rtl8168*.fw \
+"
+FILES:${PN}-rtl8822 = " \
+ ${nonarch_base_libdir}/firmware/rtl_bt/rtl8822*.bin \
+ ${nonarch_base_libdir}/firmware/rtw88/rtw8822*.bin \
+ ${nonarch_base_libdir}/firmware/rtlwifi/rtl8822*.bin \
+"
+FILES:${PN}-rtl-nic = " \
+ ${nonarch_base_libdir}/firmware/rtl_nic/*.fw \
+"
+
+RDEPENDS:${PN}-rtl8188 += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8192ce += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8192cu += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8192su = "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8723 += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8821 += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8761 += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8822 += "${PN}-rtl-license"
+RDEPENDS:${PN}-rtl8168 += "${PN}-whence-license"
+RDEPENDS:${PN}-rtl-nic += "${PN}-whence-license"
+
+# For TI wl1251
+LICENSE:${PN}-wl1251 = "Firmware-wl1251"
+LICENSE:${PN}-wl1251-license = "Firmware-wl1251"
+
+FILES:${PN}-wl1251 = " \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl1251-fw.bin \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl1251-nvs.bin \
+"
+FILES:${PN}-wl1251-license = "${nonarch_base_libdir}/firmware/LICENCE.wl1251"
+
+RDEPENDS:${PN}-wl1251 += "${PN}-wl1251-license"
+
+# For ti-connectivity
+LICENSE:${PN}-wlcommon = "Firmware-ti-connectivity"
+LICENSE:${PN}-wl12xx = "Firmware-ti-connectivity"
+LICENSE:${PN}-wl18xx = "Firmware-ti-connectivity"
+LICENSE:${PN}-ti-connectivity-license = "Firmware-ti-connectivity"
+
+FILES:${PN}-ti-connectivity-license = "${nonarch_base_libdir}/firmware/LICENCE.ti-connectivity"
+# wl18xx optionally needs wl1271-nvs.bin (which itself is a symlink to
+# wl127x-nvs.bin) - see linux/drivers/net/wireless/ti/wlcore/sdio.c
+# and drivers/net/wireless/ti/wlcore/spi.c.
+# While they're optional and actually only used to override the MAC
+# address on wl18xx, driver loading will delay (by udev timout - 60s)
+# if not there. So let's make it available always. Because it's a
+# symlink, both need to go to wlcommon.
+FILES:${PN}-wlcommon = " \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/TI* \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl127x-nvs.bin \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl1271-nvs.bin \
+"
+FILES:${PN}-wl12xx = " \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl12* \
+"
+FILES:${PN}-wl18xx = " \
+ ${nonarch_base_libdir}/firmware/ti-connectivity/wl18* \
+"
+
+RDEPENDS:${PN}-wl12xx = "${PN}-ti-connectivity-license ${PN}-wlcommon"
+RDEPENDS:${PN}-wl18xx = "${PN}-ti-connectivity-license ${PN}-wlcommon"
+
+# For ti-keystone
+LICENSE:${PN}-ti-keystone = "Firmware-ti-keystone"
+LICENSE:${PN}-ti-keystone-license = "Firmware-ti-keystone"
+
+FILES:${PN}-ti-keystone = "${nonarch_base_libdir}/firmware/ti-keystone/*"
+FILES:${PN}-ti-keystone-license = " \
+ ${nonarch_base_libdir}/firmware/LICENCE.ti-keystone \
+"
+RDEPENDS:${PN}-ti-keystone += "${PN}-ti-keystone-license"
+
+# For vt6656
+LICENSE:${PN}-vt6656 = "Firmware-via_vt6656"
+LICENSE:${PN}-vt6656-license = "Firmware-via_vt6656"
+
+FILES:${PN}-vt6656-license = "${nonarch_base_libdir}/firmware/LICENCE.via_vt6656"
+FILES:${PN}-vt6656 = " \
+ ${nonarch_base_libdir}/firmware/vntwusb.fw \
+"
+
+RDEPENDS:${PN}-vt6656 = "${PN}-vt6656-license"
+
+# For xc4000
+LICENSE:${PN}-xc4000 = "Firmware-xc4000"
+LICENSE:${PN}-xc4000-license = "Firmware-xc4000"
+
+FILES:${PN}-xc4000 = "${nonarch_base_libdir}/firmware/dvb-fe-xc4000-1.4.1.fw"
+FILES:${PN}-xc4000-license = "${nonarch_base_libdir}/firmware/LICENCE.xc4000"
+
+RDEPENDS:${PN}-xc4000 += "${PN}-xc4000-license"
+
+# For xc5000
+LICENSE:${PN}-xc5000 = "Firmware-xc5000"
+LICENSE:${PN}-xc5000-license = "Firmware-xc5000"
+
+FILES:${PN}-xc5000 = "${nonarch_base_libdir}/firmware/dvb-fe-xc5000-1.6.114.fw"
+FILES:${PN}-xc5000-license = "${nonarch_base_libdir}/firmware/LICENCE.xc5000"
+
+RDEPENDS:${PN}-xc5000 += "${PN}-xc5000-license"
+
+# For xc5000c
+LICENSE:${PN}-xc5000c = "Firmware-xc5000c"
+LICENSE:${PN}-xc5000c-license = "Firmware-xc5000c"
+
+FILES:${PN}-xc5000c = " \
+ ${nonarch_base_libdir}/firmware/dvb-fe-xc5000c-4.1.30.7.fw \
+"
+FILES:${PN}-xc5000c-license = "${nonarch_base_libdir}/firmware/LICENCE.xc5000c"
+
+RDEPENDS:${PN}-xc5000c += "${PN}-xc5000c-license"
+
+# For broadcom
+
+# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo -e " \${PN}-$pkg \\"; done | sort -u
+
+LICENSE:${PN}-broadcom-license = "Firmware-broadcom_bcm43xx"
+FILES:${PN}-broadcom-license = "${nonarch_base_libdir}/firmware/LICENCE.broadcom_bcm43xx"
+
+# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo "$i - $pkg"; echo -e "FILES:\${PN}-$pkg = \"\${nonarch_base_libdir}/firmware/brcm/$i\""; done | grep ^FILES
+
+FILES:${PN}-bcm43xx = "${nonarch_base_libdir}/firmware/brcm/bcm43xx-0.fw"
+FILES:${PN}-bcm43xx-hdr = "${nonarch_base_libdir}/firmware/brcm/bcm43xx_hdr-0.fw"
+FILES:${PN}-bcm4329-fullmac = "${nonarch_base_libdir}/firmware/brcm/bcm4329-fullmac-4.bin"
+FILES:${PN}-bcm43236b = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43236b.bin"
+FILES:${PN}-bcm4329 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4329-sdio.bin"
+FILES:${PN}-bcm4330 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4330-sdio.*"
+FILES:${PN}-bcm4334 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4334-sdio.bin"
+FILES:${PN}-bcm4335 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4335-sdio.bin"
+FILES:${PN}-bcm4339 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4339-sdio.bin \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4339-sdio.bin \
+"
+FILES:${PN}-bcm43241b0 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b0-sdio.bin"
+FILES:${PN}-bcm43241b4 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b4-sdio.bin"
+FILES:${PN}-bcm43241b5 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43241b5-sdio.bin"
+FILES:${PN}-bcm43242a = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43242a.bin"
+FILES:${PN}-bcm43143 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43143.bin \
+ ${nonarch_base_libdir}/firmware/brcm/brcmfmac43143-sdio.bin \
+"
+FILES:${PN}-bcm43430a0 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43430a0-sdio.*"
+FILES:${PN}-bcm43455 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43455-sdio.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac43455-sdio.* \
+"
+FILES:${PN}-bcm4350c2 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4350c2-pcie.bin"
+FILES:${PN}-bcm4350 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4350-pcie.bin"
+FILES:${PN}-bcm4356 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4356-sdio.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4356-sdio.* \
+"
+FILES:${PN}-bcm43569 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43569.bin"
+FILES:${PN}-bcm43570 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43570-pcie.bin \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac43570-pcie.bin \
+"
+FILES:${PN}-bcm4358 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4358-pcie.bin"
+FILES:${PN}-bcm43602 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43602-pcie.bin \
+ ${nonarch_base_libdir}/firmware/brcm/brcmfmac43602-pcie.ap.bin \
+"
+FILES:${PN}-bcm4366b = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4366b-pcie.bin"
+FILES:${PN}-bcm4366c = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4366c-pcie.bin"
+FILES:${PN}-bcm4371 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4371-pcie.bin"
+
+# for i in `grep brcm WHENCE | grep ^File | sed 's/File: brcm.//g'`; do pkg=`echo $i | sed 's/-[sp40].*//g; s/\.bin//g; s/brcmfmac/bcm/g; s/_hdr/-hdr/g; s/BCM/bcm-0bb4-0306/g'`; echo -e "LICENSE:\${PN}-$pkg = \"Firmware-broadcom_bcm43xx\"\nRDEPENDS_\${PN}-$pkg += \"\${PN}-broadcom-license\""; done
+# Currently 1st one and last 6 have cypress LICENSE
+
+LICENSE:${PN}-bcm43xx = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43xx += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43xx-hdr = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43xx-hdr += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4329-fullmac = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4329-fullmac += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43236b = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43236b += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4329 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4329 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4330 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4330 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4334 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4334 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4335 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4335 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4339 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4339 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43241b0 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43241b0 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43241b4 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43241b4 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43241b5 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43241b5 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43242a = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43242a += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43143 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43143 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43430a0 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43430a0 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43455 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43455 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4350c2 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4350c2 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4350 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4350 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4356 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4356 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43569 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43569 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43570 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43570 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4358 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4358 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm43602 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm43602 += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4366b = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4366b += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4366c = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4366c += "${PN}-broadcom-license"
+LICENSE:${PN}-bcm4371 = "Firmware-broadcom_bcm43xx"
+RDEPENDS:${PN}-bcm4371 += "${PN}-broadcom-license"
+
+# For broadcom cypress
+
+LICENSE:${PN}-cypress-license = "Firmware-cypress"
+FILES:${PN}-cypress-license = "${nonarch_base_libdir}/firmware/LICENCE.cypress"
+
+FILES:${PN}-bcm-0bb4-0306 = "${nonarch_base_libdir}/firmware/brcm/BCM-0bb4-0306.hcd"
+FILES:${PN}-bcm43340 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43340-sdio.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac43340-sdio.*"
+FILES:${PN}-bcm43362 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43362-sdio.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac43362-sdio.*"
+FILES:${PN}-bcm43430 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac43430-sdio.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac43430-sdio.*"
+FILES:${PN}-bcm4354 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4354-sdio.bin \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4354-sdio.bin \
+"
+FILES:${PN}-bcm4356-pcie = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4356-pcie.* \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4356-pcie.* \
+"
+FILES:${PN}-bcm4373 = "${nonarch_base_libdir}/firmware/brcm/brcmfmac4373-sdio.bin \
+ ${nonarch_base_libdir}/firmware/brcm/brcmfmac4373.bin \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4373-sdio.bin \
+ ${nonarch_base_libdir}/firmware/brcm/brcmfmac4373-sdio.clm_blob \
+ ${nonarch_base_libdir}/firmware/cypress/cyfmac4373-sdio.clm_blob \
+"
+
+LICENSE:${PN}-bcm-0bb4-0306 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm-0bb4-0306 += "${PN}-cypress-license"
+LICENSE:${PN}-bcm43340 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm43340 += "${PN}-cypress-license"
+LICENSE:${PN}-bcm43362 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm43362 += "${PN}-cypress-license"
+LICENSE:${PN}-bcm43430 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm43430 += "${PN}-cypress-license"
+LICENSE:${PN}-bcm4354 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm4354 += "${PN}-cypress-license"
+LICENSE:${PN}-bcm4356-pcie = "Firmware-cypress"
+RDEPENDS:${PN}-bcm4356-pcie += "${PN}-cypress-license"
+LICENSE:${PN}-bcm4373 = "Firmware-cypress"
+RDEPENDS:${PN}-bcm4373 += "${PN}-cypress-license"
+
+# For Broadcom bnx2
+#
+# which is a separate case to the other Broadcom firmwares since its
+# license is contained in the shared WHENCE file.
+
+LICENSE:${PN}-bnx2 = "WHENCE"
+LICENSE:${PN}-whence-license = "WHENCE"
+
+FILES:${PN}-bnx2 = " \
+ ${nonarch_base_libdir}/firmware/bnx2/bnx2-mips*.fw \
+ ${nonarch_base_libdir}/firmware/bnx2/bnx2-rv2p*.fw \
+"
+FILES:${PN}-whence-license = "${nonarch_base_libdir}/firmware/WHENCE"
+
+RDEPENDS:${PN}-bnx2 += "${PN}-whence-license"
+RPROVIDES:${PN}-bnx2 = "${PN}-bnx2-mips"
+
+LICENSE:${PN}-bnx2x = "WHENCE"
+
+FILES:${PN}-bnx2x = "${nonarch_base_libdir}/firmware/bnx2x/bnx2x*.fw"
+
+RDEPENDS:${PN}-bnx2x += "${PN}-whence-license"
+
+# For cirrus
+LICENSE:${PN}-cirrus = "Firmware-cirrus"
+LICENSE:${PN}-cirrus-license = "Firmware-cirrus"
+
+FILES:${PN}-cirrus = "${nonarch_base_libdir}/firmware/cirrus/*"
+FILES:${PN}-cirrus-license = "${nonarch_base_libdir}/firmware/LICENSE.cirrus"
+
+RDEPENDS:${PN}-cirrus += "${PN}-cirrus-license"
+
+# For cnm
+LICENSE:${PN}-cnm = "Firmware-cnm"
+LICENSE:${PN}-cnm-license = "Firmware-cnm"
+
+FILES:${PN}-cnm = "${nonarch_base_libdir}/firmware/cnm/wave521c_k3_codec_fw.bin"
+FILES:${PN}-cnm-license = "${nonarch_base_libdir}/firmware/LICENCE.cnm"
+
+RDEPENDS:${PN}-cnm += "${PN}-cnm-license"
+
+# For imx-sdma
+LICENSE:${PN}-imx-sdma-imx6q = "Firmware-imx-sdma_firmware"
+LICENSE:${PN}-imx-sdma-imx7d = "Firmware-imx-sdma_firmware"
+LICENSE:${PN}-imx-sdma-license = "Firmware-imx-sdma_firmware"
+
+FILES:${PN}-imx-sdma-imx6q = "${nonarch_base_libdir}/firmware/imx/sdma/sdma-imx6q.bin"
+
+RPROVIDES:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
+RREPLACES:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
+RCONFLICTS:${PN}-imx-sdma-imx6q = "firmware-imx-sdma-imx6q"
+
+FILES:${PN}-imx-sdma-imx7d = "${nonarch_base_libdir}/firmware/imx/sdma/sdma-imx7d.bin"
+
+FILES:${PN}-imx-sdma-license = "${nonarch_base_libdir}/firmware/LICENSE.sdma_firmware"
+
+RDEPENDS:${PN}-imx-sdma-imx6q += "${PN}-imx-sdma-license"
+RDEPENDS:${PN}-imx-sdma-imx7d += "${PN}-imx-sdma-license"
+
+# For iwlwifi
+LICENSE:${PN}-iwlwifi = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-135-6 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-7 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-8 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-9 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-10 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-12 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-13 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-16 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-3160-17 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6000-4 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6000g2a-5 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6000g2a-6 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6000g2b-5 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6000g2b-6 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6050-4 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-6050-5 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-7260 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-7265 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-7265d = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-8000c = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-8265 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-9000 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-9260 = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-misc = "Firmware-iwlwifi_firmware"
+LICENSE:${PN}-iwlwifi-license = "Firmware-iwlwifi_firmware"
+
+
+FILES:${PN}-iwlwifi-license = "${nonarch_base_libdir}/firmware/LICENCE.iwlwifi_firmware"
+FILES:${PN}-iwlwifi-135-6 = "${nonarch_base_libdir}/firmware/iwlwifi-135-6.ucode"
+FILES:${PN}-iwlwifi-3160-7 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-7.ucode"
+FILES:${PN}-iwlwifi-3160-8 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-8.ucode"
+FILES:${PN}-iwlwifi-3160-9 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-9.ucode"
+FILES:${PN}-iwlwifi-3160-10 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-10.ucode"
+FILES:${PN}-iwlwifi-3160-12 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-12.ucode"
+FILES:${PN}-iwlwifi-3160-13 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-13.ucode"
+FILES:${PN}-iwlwifi-3160-16 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-16.ucode"
+FILES:${PN}-iwlwifi-3160-17 = "${nonarch_base_libdir}/firmware/iwlwifi-3160-17.ucode"
+FILES:${PN}-iwlwifi-6000-4 = "${nonarch_base_libdir}/firmware/iwlwifi-6000-4.ucode"
+FILES:${PN}-iwlwifi-6000g2a-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2a-5.ucode"
+FILES:${PN}-iwlwifi-6000g2a-6 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2a-6.ucode"
+FILES:${PN}-iwlwifi-6000g2b-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2b-5.ucode"
+FILES:${PN}-iwlwifi-6000g2b-6 = "${nonarch_base_libdir}/firmware/iwlwifi-6000g2b-6.ucode"
+FILES:${PN}-iwlwifi-6050-4 = "${nonarch_base_libdir}/firmware/iwlwifi-6050-4.ucode"
+FILES:${PN}-iwlwifi-6050-5 = "${nonarch_base_libdir}/firmware/iwlwifi-6050-5.ucode"
+FILES:${PN}-iwlwifi-7260 = "${nonarch_base_libdir}/firmware/iwlwifi-7260-*.ucode"
+FILES:${PN}-iwlwifi-7265 = "${nonarch_base_libdir}/firmware/iwlwifi-7265-*.ucode"
+FILES:${PN}-iwlwifi-7265d = "${nonarch_base_libdir}/firmware/iwlwifi-7265D-*.ucode"
+FILES:${PN}-iwlwifi-8000c = "${nonarch_base_libdir}/firmware/iwlwifi-8000C-*.ucode"
+FILES:${PN}-iwlwifi-8265 = "${nonarch_base_libdir}/firmware/iwlwifi-8265-*.ucode"
+FILES:${PN}-iwlwifi-9000 = "${nonarch_base_libdir}/firmware/iwlwifi-9000-*.ucode"
+FILES:${PN}-iwlwifi-9260 = "${nonarch_base_libdir}/firmware/iwlwifi-9260-*.ucode"
+FILES:${PN}-iwlwifi-misc = " \
+ ${nonarch_base_libdir}/firmware/iwlwifi-*.ucode \
+ ${nonarch_base_libdir}/firmware/iwlwifi-*.pnvm \
+"
+
+RDEPENDS:${PN}-iwlwifi-135-6 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-7 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-8 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-9 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-10 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-12 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-13 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-16 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-3160-17 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6000-4 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6000g2a-5 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6000g2a-6 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6000g2b-5 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6000g2b-6 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6050-4 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-6050-5 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-7265d = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-8000c = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-8265 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-9000 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-9260 = "${PN}-iwlwifi-license"
+RDEPENDS:${PN}-iwlwifi-misc = "${PN}-iwlwifi-license"
+
+# -iwlwifi-misc is a "catch all" package that includes all the iwlwifi
+# firmwares that are not already included in other -iwlwifi- packages.
+# -iwlwifi is a virtual package that depends upon all iwlwifi packages.
+# These are distinct in order to allow the -misc firmwares to be installed
+# without pulling in every other iwlwifi package.
+ALLOW_EMPTY:${PN}-iwlwifi = "1"
+ALLOW_EMPTY:${PN}-iwlwifi-misc = "1"
+
+# Handle package updating for the newly merged iwlwifi groupings
+RPROVIDES:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
+RREPLACES:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
+RCONFLICTS:${PN}-iwlwifi-7265 = "${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9"
+
+RPROVIDES:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
+RREPLACES:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
+RCONFLICTS:${PN}-iwlwifi-7260 = "${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9"
+
+# For ibt
+LICENSE:${PN}-ibt-license = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-hw-37-7 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-hw-37-8 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-11-5 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-12-16 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-17 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-20 = "Firmware-ibt_firmware"
+LICENSE:${PN}-ibt-misc = "Firmware-ibt_firmware"
+
+FILES:${PN}-ibt-license = "${nonarch_base_libdir}/firmware/LICENCE.ibt_firmware"
+FILES:${PN}-ibt-hw-37-7 = "${nonarch_base_libdir}/firmware/intel/ibt-hw-37.7*.bseq"
+FILES:${PN}-ibt-hw-37-8 = "${nonarch_base_libdir}/firmware/intel/ibt-hw-37.8*.bseq"
+FILES:${PN}-ibt-11-5 = "${nonarch_base_libdir}/firmware/intel/ibt-11-5.sfi ${nonarch_base_libdir}/firmware/intel/ibt-11-5.ddc"
+FILES:${PN}-ibt-12-16 = "${nonarch_base_libdir}/firmware/intel/ibt-12-16.sfi ${nonarch_base_libdir}/firmware/intel/ibt-12-16.ddc"
+FILES:${PN}-ibt-17 = "${nonarch_base_libdir}/firmware/intel/ibt-17-*.sfi ${nonarch_base_libdir}/firmware/intel/ibt-17-*.ddc"
+FILES:${PN}-ibt-20 = "${nonarch_base_libdir}/firmware/intel/ibt-20-*.sfi ${nonarch_base_libdir}/firmware/intel/ibt-20-*.ddc"
+FILES:${PN}-ibt-misc = "${nonarch_base_libdir}/firmware/intel/ibt-*"
+
+RDEPENDS:${PN}-ibt-hw-37-7 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-hw-37.8 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-11-5 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-12-16 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-17 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-20 = "${PN}-ibt-license"
+RDEPENDS:${PN}-ibt-misc = "${PN}-ibt-license"
+
+ALLOW_EMPTY:${PN}-ibt= "1"
+ALLOW_EMPTY:${PN}-ibt-misc = "1"
+
+LICENSE:${PN}-i915 = "Firmware-i915"
+LICENSE:${PN}-i915-license = "Firmware-i915"
+FILES:${PN}-i915-license = "${nonarch_base_libdir}/firmware/LICENSE.i915"
+FILES:${PN}-i915 = "${nonarch_base_libdir}/firmware/i915"
+RDEPENDS:${PN}-i915 = "${PN}-i915-license"
+
+# For ice-enhanced
+LICENSE:${PN}-ice-enhanced = "Firmware-ice_enhanced"
+LICENSE:${PN}-ice-enhanced-license = "Firmware-ice_enhanced"
+
+FILES:${PN}-ice-enhanced = " \
+ ${nonarch_base_libdir}/firmware/intel/ice/ddp-comms/* \
+ ${nonarch_base_libdir}/firmware/intel/ice/ddp-wireless_edge/* \
+"
+FILES:${PN}-ice-enhanced-license = " \
+ ${nonarch_base_libdir}/firmware/LICENSE.ice_enhanced \
+"
+RDEPENDS:${PN}-ice-enhanced = "${PN}-ice-enhanced-license"
+
+LICENSE:${PN}-ice = "Firmware-ice"
+LICENSE:${PN}-ice-license = "Firmware-ice"
+FILES:${PN}-ice-license = "${nonarch_base_libdir}/firmware/LICENSE.ice"
+FILES:${PN}-ice = " \
+ ${nonarch_base_libdir}/firmware/intel/ice/ddp/* \
+ ${nonarch_base_libdir}/firmware/intel/ice/ddp-lag/* \
+"
+RDEPENDS:${PN}-ice = "${PN}-ice-license"
+
+FILES:${PN}-adsp-sst-license = "${nonarch_base_libdir}/firmware/LICENCE.adsp_sst"
+LICENSE:${PN}-adsp-sst = "Firmware-adsp_sst"
+LICENSE:${PN}-adsp-sst-license = "Firmware-adsp_sst"
+FILES:${PN}-adsp-sst = "${nonarch_base_libdir}/firmware/intel/dsp_fw*"
+RDEPENDS:${PN}-adsp-sst = "${PN}-adsp-sst-license"
+
+# For QAT
+LICENSE:${PN}-qat = "Firmware-qat"
+LICENSE:${PN}-qat-license = "Firmware-qat"
+FILES:${PN}-qat-license = "${nonarch_base_libdir}/firmware/LICENCE.qat_firmware"
+FILES:${PN}-qat = "${nonarch_base_libdir}/firmware/qat*.bin"
+RDEPENDS:${PN}-qat = "${PN}-qat-license"
+
+LICENSE:${PN}-qed = "WHENCE"
+FILES:${PN}-qed = "${nonarch_base_libdir}/firmware/qed/*"
+
+# For QCOM VPU/GPU and SDM845
+LICENSE:${PN}-qcom-license = "Firmware-qcom"
+LICENSE:${PN}-qcom-yamato-license = "Firmware-qcom-yamato"
+LICENSE:${PN}-qcom-venus-1.8 = "Firmware-qcom"
+LICENSE:${PN}-qcom-venus-4.2 = "Firmware-qcom"
+LICENSE:${PN}-qcom-venus-5.2 = "Firmware-qcom"
+LICENSE:${PN}-qcom-venus-5.4 = "Firmware-qcom"
+LICENSE:${PN}-qcom-venus-6.0 = "Firmware-qcom"
+LICENSE:${PN}-qcom-vpu-1.0 = "Firmware-qcom"
+LICENSE:${PN}-qcom-vpu-2.0 = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a2xx = "Firmware-qcom Firmware-qcom-yamato"
+LICENSE:${PN}-qcom-adreno-a3xx = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a4xx = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a530 = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a630 = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a650 = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a660 = "Firmware-qcom"
+LICENSE:${PN}-qcom-adreno-a702 = "Firmware-qcom"
+LICENSE:${PN}-qcom-apq8016-modem = "Firmware-qcom"
+LICENSE:${PN}-qcom-apq8016-wifi = "Firmware-qcom"
+LICENSE:${PN}-qcom-apq8096-audio = "Firmware-qcom"
+LICENSE:${PN}-qcom-apq8096-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-apq8096-modem = "Firmware-qcom"
+LICENSE:${PN}-qcom-qcm2290-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-qcm2290-audio = "Firmware-qcom"
+LICENSE:${PN}-qcom-qcm2290-modem = "Firmware-qcom"
+LICENSE:${PN}-qcom-qcm2290-wifi = "Firmware-qcom"
+LICENSE:${PN}-qcom-qrb4210-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-qrb4210-audio = "Firmware-qcom"
+LICENSE:${PN}-qcom-qrb4210-compute = "Firmware-qcom"
+LICENSE:${PN}-qcom-qrb4210-modem = "Firmware-qcom"
+LICENSE:${PN}-qcom-qrb4210-wifi = "Firmware-qcom"
+LICENSE:${PN}-qcom-sc8280xp-lenovo-x13s-audio = "Firmware-qcom & Firmware-linaro"
+LICENSE:${PN}-qcom-sc8280xp-lenovo-x13s-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-sc8280xp-lenovo-x13s-compute = "Firmware-qcom"
+LICENSE:${PN}-qcom-sc8280xp-lenovo-x13s-sensors = "Firmware-qcom"
+LICENSE:${PN}-qcom-sdm845-audio = "Firmware-qcom"
+LICENSE:${PN}-qcom-sdm845-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-sdm845-compute = "Firmware-qcom"
+LICENSE:${PN}-qcom-sdm845-modem = "Firmware-qcom"
+LICENSE:${PN}-qcom-sdm845-thundercomm-db845c-sensors = "Firmware-qcom"
+LICENSE:${PN}-qcom-sm8250-audio = "Firmware-qcom"
+LICENSE:${PN}-qcom-sm8250-adreno = "Firmware-qcom"
+LICENSE:${PN}-qcom-sm8250-compute = "Firmware-qcom"
+LICENSE:${PN}-qcom-sm8250-thundercomm-rb5-sensors = "Firmware-qcom"
+
+FILES:${PN}-qcom-license = "${nonarch_base_libdir}/firmware/LICENSE.qcom ${nonarch_base_libdir}/firmware/qcom/NOTICE.txt"
+FILES:${PN}-qcom-yamato-license = "${nonarch_base_libdir}/firmware/LICENSE.qcom_yamato"
+FILES:${PN}-qcom-venus-1.8 = "${nonarch_base_libdir}/firmware/qcom/venus-1.8/*"
+FILES:${PN}-qcom-venus-4.2 = "${nonarch_base_libdir}/firmware/qcom/venus-4.2/*"
+FILES:${PN}-qcom-venus-5.2 = "${nonarch_base_libdir}/firmware/qcom/venus-5.2/*"
+FILES:${PN}-qcom-venus-5.4 = "${nonarch_base_libdir}/firmware/qcom/venus-5.4/*"
+FILES:${PN}-qcom-venus-6.0 = "${nonarch_base_libdir}/firmware/qcom/venus-6.0/*"
+FILES:${PN}-qcom-vpu-1.0 = "${nonarch_base_libdir}/firmware/qcom/vpu-1.0/*"
+FILES:${PN}-qcom-vpu-2.0 = "${nonarch_base_libdir}/firmware/qcom/vpu-2.0/*"
+FILES:${PN}-qcom-adreno-a2xx = "${nonarch_base_libdir}/firmware/qcom/leia_*.fw ${nonarch_base_libdir}/firmware/qcom/yamato_*.fw"
+FILES:${PN}-qcom-adreno-a3xx = "${nonarch_base_libdir}/firmware/qcom/a3*_*.fw ${nonarch_base_libdir}/firmware/a300_*.fw"
+FILES:${PN}-qcom-adreno-a4xx = "${nonarch_base_libdir}/firmware/qcom/a4*_*.fw"
+FILES:${PN}-qcom-adreno-a530 = "${nonarch_base_libdir}/firmware/qcom/a530*.fw*"
+FILES:${PN}-qcom-adreno-a630 = "${nonarch_base_libdir}/firmware/qcom/a630*.*"
+FILES:${PN}-qcom-adreno-a650 = "${nonarch_base_libdir}/firmware/qcom/a650*.*"
+FILES:${PN}-qcom-adreno-a660 = "${nonarch_base_libdir}/firmware/qcom/a660*.*"
+FILES:${PN}-qcom-adreno-a702 = "${nonarch_base_libdir}/firmware/qcom/a702*.*"
+FILES:${PN}-qcom-apq8016-modem = "${nonarch_base_libdir}/firmware/qcom/apq8016/mba.mbn ${nonarch_base_libdir}/firmware/qcom/apq8016/modem.mbn"
+FILES:${PN}-qcom-apq8016-wifi = "${nonarch_base_libdir}/firmware/qcom/apq8016/wcnss.mbn ${nonarch_base_libdir}/firmware/qcom/apq8016/WCNSS*"
+FILES:${PN}-qcom-apq8096-adreno = "${nonarch_base_libdir}/firmware/qcom/apq8096/a530_zap.mbn ${nonarch_base_libdir}/firmware/qcom/a530_zap.mdt"
+FILES:${PN}-qcom-apq8096-audio = "${nonarch_base_libdir}/firmware/qcom/apq8096/adsp*.*"
+FILES:${PN}-qcom-apq8096-modem = "${nonarch_base_libdir}/firmware/qcom/apq8096/mba.mbn ${nonarch_base_libdir}/firmware/qcom/apq8096/modem*.* ${nonarch_base_libdir}/firmware/qcom/apq8096/wlanmdsp.mbn"
+FILES:${PN}-qcom-qcm2290-adreno = "${nonarch_base_libdir}/firmware/qcom/qcm2290/a702_zap.mbn"
+FILES:${PN}-qcom-qcm2290-audio = "${nonarch_base_libdir}/firmware/qcom/qcm2290/adsp*.*"
+FILES:${PN}-qcom-qcm2290-modem = "${nonarch_base_libdir}/firmware/qcom/qcm2290/modem*.*"
+FILES:${PN}-qcom-qcm2290-wifi = "${nonarch_base_libdir}/firmware/qcom/qcm2290/wlanmdsp.mbn"
+FILES:${PN}-qcom-qrb4210-adreno = "${nonarch_base_libdir}/firmware/qcom/qrb4210/a610_zap.mbn"
+FILES:${PN}-qcom-qrb4210-audio = "${nonarch_base_libdir}/firmware/qcom/qrb4210/adsp*.*"
+FILES:${PN}-qcom-qrb4210-compute = "${nonarch_base_libdir}/firmware/qcom/qrb4210/cdsp*.*"
+FILES:${PN}-qcom-qrb4210-modem = "${nonarch_base_libdir}/firmware/qcom/qrb4210/modem*.*"
+FILES:${PN}-qcom-qrb4210-wifi = "${nonarch_base_libdir}/firmware/qcom/qrb4210/wlanmdsp.mbn"
+FILES:${PN}-qcom-sc8280xp-lenovo-x13s-compat = "${nonarch_base_libdir}/firmware/qcom/LENOVO/21BX"
+FILES:${PN}-qcom-sc8280xp-lenovo-x13s-audio = "${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/*adsp*.* ${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/battmgr.jsn ${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/audioreach-tplg.bin ${nonarch_base_libdir}/firmware/qcom/sc8280xp/SC8280XP-LENOVO-X13S-tplg.bin"
+FILES:${PN}-qcom-sc8280xp-lenovo-x13s-adreno = "${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/qcdxkmsuc8280.mbn"
+FILES:${PN}-qcom-sc8280xp-lenovo-x13s-compute = "${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/*cdsp*.*"
+FILES:${PN}-qcom-sc8280xp-lenovo-x13s-sensors = "${nonarch_base_libdir}/firmware/qcom/sc8280xp/LENOVO/21BX/*slpi*.*"
+FILES:${PN}-qcom-sdm845-adreno = "${nonarch_base_libdir}/firmware/qcom/sdm845/a630*.*"
+FILES:${PN}-qcom-sdm845-audio = "${nonarch_base_libdir}/firmware/qcom/sdm845/adsp*.*"
+FILES:${PN}-qcom-sdm845-compute = "${nonarch_base_libdir}/firmware/qcom/sdm845/cdsp*.*"
+FILES:${PN}-qcom-sdm845-modem = "${nonarch_base_libdir}/firmware/qcom/sdm845/mba.mbn ${nonarch_base_libdir}/firmware/qcom/sdm845/modem*.* ${nonarch_base_libdir}/firmware/qcom/sdm845/wlanmdsp.mbn ${nonarch_base_libdir}/firmware/qcom/sdm845/notice.txt_wlanmdsp"
+FILES:${PN}-qcom-sdm845-thundercomm-db845c-sensors = "${nonarch_base_libdir}/firmware/qcom/sdm845/Thundercomm/db845c/slpi*.*"
+FILES:${PN}-qcom-sm8250-adreno = "${nonarch_base_libdir}/firmware/qcom/sm8250/a650*.*"
+FILES:${PN}-qcom-sm8250-audio = "${nonarch_base_libdir}/firmware/qcom/sm8250/adsp*.*"
+FILES:${PN}-qcom-sm8250-compute = "${nonarch_base_libdir}/firmware/qcom/sm8250/cdsp*.*"
+FILES:${PN}-qcom-sm8250-thundercomm-rb5-sensors = "${nonarch_base_libdir}/firmware/qcom/sm8250/Thundercomm/RB5/slpi*.*"
+
+RDEPENDS:${PN}-qcom-venus-1.8 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-venus-4.2 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-venus-5.2 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-venus-5.4 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-venus-6.0 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-vpu-1.0 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-vpu-2.0 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a2xx = "${PN}-qcom-license ${PN}-qcom-yamato-license"
+RDEPENDS:${PN}-qcom-adreno-a3xx = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a4xx = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a530 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a630 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a650 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a660 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-adreno-a702 = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-apq8016-modem = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-apq8016-wifi = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-apq8096-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-apq8096-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-apq8096-modem = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qcm2290-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qcm2290-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qcm2290-modem = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qcm2290-wifi = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qrb4210-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qrb4210-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qrb4210-compute = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qrb4210-modem = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-qrb4210-wifi = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sc8280xp-lenovo-x13s-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sc8280xp-lenovo-x13s-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sc8280xp-lenovo-x13s-compute = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sc8280xp-lenovo-x13s-sensors = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sdm845-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sdm845-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sdm845-compute = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sdm845-modem = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sdm845-thundercomm-db845c-sensors = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sm8250-adreno = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sm8250-audio = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sm8250-compute = "${PN}-qcom-license"
+RDEPENDS:${PN}-qcom-sm8250-thundercomm-rb5-sensors = "${PN}-qcom-license"
+
+RRECOMMENDS:${PN}-qcom-sc8280xp-lenovo-x13s-audio = "${PN}-qcom-sc8280xp-lenovo-x13s-compat"
+RRECOMMENDS:${PN}-qcom-sc8280xp-lenovo-x13s-adreno = "${PN}-qcom-sc8280xp-lenovo-x13s-compat"
+RRECOMMENDS:${PN}-qcom-sc8280xp-lenovo-x13s-compute = "${PN}-qcom-sc8280xp-lenovo-x13s-compat"
+RRECOMMENDS:${PN}-qcom-sc8280xp-lenovo-x13s-sensors = "${PN}-qcom-sc8280xp-lenovo-x13s-compat"
+
+LICENSE:${PN}-liquidui = "Firmware-cavium_liquidio"
+FILES:${PN}-liquidio = "${nonarch_base_libdir}/firmware/liquidio"
+
+LICENSE:${PN}-mellanox = "WHENCE"
+FILES:${PN}-mellanox = "${nonarch_base_libdir}/firmware/mellanox"
+
+LICENSE:${PN}-prestera = "Firmware-Marvell"
+FILES:${PN}-prestera = "${nonarch_base_libdir}/firmware/mrvl/prestera"
+RDEPENDS:${PN}-prestera = "${PN}-marvell-license"
+
+# For Rockchip
+LICENSE:${PN}-rockchip-dptx = "Firmware-rockchip"
+FILES:${PN}-rockchip-license = "${nonarch_base_libdir}/firmware/LICENCE.rockchip"
+FILES:${PN}-rockchip-dptx = "${nonarch_base_libdir}/firmware/rockchip/dptx.bin"
+RDEPENDS:${PN}-rockchip-dptx = "${PN}-rockchip-license"
+
+# For Amlogic VDEC
+LICENSE:${PN}-amlogic-vdec = "Firmware-amlogic_vdec"
+FILES:${PN}-amlogic-vdec-license = "${nonarch_base_libdir}/firmware/LICENSE.amlogic_vdec"
+FILES:${PN}-amlogic-vdec = "${nonarch_base_libdir}/firmware/meson/vdec/*"
+RDEPENDS:${PN}-amlogic-vdec = "${PN}-amlogic-vdec-license"
+
+# For other firmwares
+# Maybe split out to separate packages when needed.
+LICENSE:${PN} = "\
+ Firmware-Abilis \
+ & Firmware-agere \
+ & Firmware-amdgpu \
+ & Firmware-amd-ucode \
+ & Firmware-amlogic_vdec \
+ & Firmware-atmel \
+ & Firmware-ca0132 \
+ & Firmware-cavium \
+ & Firmware-chelsio_firmware \
+ & Firmware-cirrus \
+ & Firmware-cnm \
+ & Firmware-cw1200 \
+ & Firmware-dib0700 \
+ & Firmware-e100 \
+ & Firmware-ene_firmware \
+ & Firmware-fw_sst_0f28 \
+ & Firmware-go7007 \
+ & Firmware-hfi1_firmware \
+ & Firmware-ibt_firmware \
+ & Firmware-it913x \
+ & Firmware-IntcSST2 \
+ & Firmware-kaweth \
+ & Firmware-moxa \
+ & Firmware-myri10ge_firmware \
+ & Firmware-nvidia \
+ & Firmware-nxp \
+ & Firmware-OLPC \
+ & Firmware-ath9k-htc \
+ & Firmware-phanfw \
+ & Firmware-qat \
+ & Firmware-qcom \
+ & Firmware-qla1280 \
+ & Firmware-qla2xxx \
+ & Firmware-r8a779x_usb3 \
+ & Firmware-radeon \
+ & Firmware-ralink_a_mediatek_company_firmware \
+ & Firmware-ralink-firmware \
+ & Firmware-imx-sdma_firmware \
+ & Firmware-siano \
+ & Firmware-ti-connectivity \
+ & Firmware-ti-keystone \
+ & Firmware-ueagle-atm4-firmware \
+ & Firmware-wl1251 \
+ & Firmware-xc4000 \
+ & Firmware-xc5000 \
+ & Firmware-xc5000c \
+ & WHENCE \
+"
+
+FILES:${PN}-license += "${nonarch_base_libdir}/firmware/LICEN*"
+FILES:${PN} += "${nonarch_base_libdir}/firmware/*"
+RDEPENDS:${PN} += "${PN}-license"
+RDEPENDS:${PN} += "${PN}-whence-license"
+
+# Make linux-firmware depend on all of the split-out packages.
+# Make linux-firmware-iwlwifi depend on all of the split-out iwlwifi packages.
+# Make linux-firmware-ibt depend on all of the split-out ibt packages.
+python populate_packages:prepend () {
+ firmware_pkgs = oe.utils.packages_filter_out_system(d)
+ d.appendVar('RRECOMMENDS:linux-firmware', ' ' + ' '.join(firmware_pkgs))
+
+ iwlwifi_pkgs = filter(lambda x: x.find('-iwlwifi-') != -1, firmware_pkgs)
+ d.appendVar('RRECOMMENDS:linux-firmware-iwlwifi', ' ' + ' '.join(iwlwifi_pkgs))
+
+ ibt_pkgs = filter(lambda x: x.find('-ibt-') != -1, firmware_pkgs)
+ d.appendVar('RRECOMMENDS:linux-firmware-ibt', ' ' + ' '.join(ibt_pkgs))
+}
+
+# Firmware files are generally not ran on the CPU, so they can be
+# allarch despite being architecture specific
+INSANE_SKIP = "arch"
+
+# Don't warn about already stripped files
+INSANE_SKIP:${PN} = "already-stripped"
+
+# No need to put firmware into the sysroot
+SYSROOT_DIRS_IGNORE += "${nonarch_base_libdir}/firmware"
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc
index 47f09952de..3a8d32e785 100644
--- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc
+++ b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers.inc
@@ -44,6 +44,8 @@ python __anonymous () {
d.setVar("HEADER_FETCH_VER", "4.x")
elif major == "5":
d.setVar("HEADER_FETCH_VER", "5.x")
+ elif major == "6":
+ d.setVar("HEADER_FETCH_VER", "6.x")
else:
d.setVar("HEADER_FETCH_VER", "2.6")
}
@@ -103,7 +105,7 @@ do_install_armmultilib () {
BBCLASSEXTEND = "nativesdk"
-RDEPENDS:${PN}-dev = ""
+DEV_PKG_DEPENDENCY = ""
RRECOMMENDS:${PN}-dbg = "${PN}-dev (= ${EXTENDPKGV})"
INHIBIT_DEFAULT_DEPS = "1"
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-include-linux-stddef.h-in-swab.h-uapi-header.patch b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-include-linux-stddef.h-in-swab.h-uapi-header.patch
deleted file mode 100644
index 5b7c1b6e21..0000000000
--- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-include-linux-stddef.h-in-swab.h-uapi-header.patch
+++ /dev/null
@@ -1,42 +0,0 @@
-From dc221138c809125dc1bbff8506c70cb7bd846368 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 12 Sep 2018 17:08:58 -0700
-Subject: [PATCH] include linux/stddef.h in swab.h uapi header
-
-swab.h uses __always_inline without including the header where it is
-defined, this is exposed by musl based distributions where this macro is
-not defined by system C library headers unlike glibc where it is defined
-in sys/cdefs.h and that header gets pulled in indirectly via
-
-features.h -> sys/cdefs.h
-
-and features.h gets pulled in a lot of headers. Therefore it may work in
-cases where features.h is includes but not otherwise.
-
-Adding linux/stddef.h here ensures that __always_inline is always
-defined independent of which C library is used in userspace
-
-Upstream-Status: Submitted [https://lkml.org/lkml/2018/9/13/78]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Cc: Philippe Ombredanne <pombredanne@nexb.com>
-Cc: Kate Stewart <kstewart@linuxfoundation.org>
-Cc: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
-Cc: Thomas Gleixner <tglx@linutronix.de>
-
----
- include/uapi/linux/swab.h | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/include/uapi/linux/swab.h b/include/uapi/linux/swab.h
-index 7272f85d6..2912fe463 100644
---- a/include/uapi/linux/swab.h
-+++ b/include/uapi/linux/swab.h
-@@ -3,6 +3,7 @@
- #define _UAPI_LINUX_SWAB_H
-
- #include <linux/types.h>
-+#include <linux/stddef.h>
- #include <linux/compiler.h>
- #include <asm/bitsperlong.h>
- #include <asm/swab.h>
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-scripts-Use-fixed-input-and-output-files-instead-of-.patch b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-scripts-Use-fixed-input-and-output-files-instead-of-.patch
deleted file mode 100644
index a5ded602e5..0000000000
--- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-scripts-Use-fixed-input-and-output-files-instead-of-.patch
+++ /dev/null
@@ -1,67 +0,0 @@
-From 694eba7bb974f6b8bd308804cb24350150108b2b Mon Sep 17 00:00:00 2001
-From: He Zhe <zhe.he@windriver.com>
-Date: Wed, 21 Nov 2018 15:12:43 +0800
-Subject: [PATCH] scripts: Use fixed input and output files instead of pipe for here-doc
-
-There was a bug of "as" in binutils that when it checks if the input file and
-output file are the same one, it would not check if they are on the same block
-device. The check is introduced by the following commit in v2.31.
-
-https://sourceware.org/git/gitweb.cgi?p=binutils-gdb.git;a=commit;h=
-67f846b59b32f3d704c601669409c2584383fea9
-
-The here-doc usage in this script creates temporary file in /tmp. When we run in
-an environment where /tmp has rarely been used, the newly created temporary file
-may have a very low inode number. If the inode number was 6 which is the same as
-/dev/null, the as would wrongly think the input file and the output file are the
-same and report the following error.
-
-*** Compiler lacks asm-goto support.. Stop.
-
-One observed case happened in docker where the /tmp could be so rarely used that
-very low number inode may be allocated and triggers the error.
-
-The fix below for the bug only exists on the master branch of binutils so far
-and has not been released from upstream. As the convict is introduced since
-v2.31, only v2.31 is affected.
-
-https://sourceware.org/git/gitweb.cgi?p=binutils-gdb.git;a=commit;h=
-2a50366ded329bfb39d387253450c9d5302c3503
-
-When building linux-libc-headers we need to use "as" in binutils which does not
-contain the fix for the moment. To work around the error, we create a fixed
-temporary file to contain the program being tested.
-
-This patch also removes ">/dev/null 2>&1" so we will have more direct error
-information in case something else wrong happened.
-
-Upstream-Status: Inappropriate [A work around for binutils v2.31]
-
-Signed-off-by: He Zhe <zhe.he@windriver.com>
----
- scripts/gcc-goto.sh | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/scripts/gcc-goto.sh b/scripts/gcc-goto.sh
-index 8b980fb22..d256a9438 100755
---- a/scripts/gcc-goto.sh
-+++ b/scripts/gcc-goto.sh
-@@ -3,7 +3,7 @@
- # Test for gcc 'asm goto' support
- # Copyright (C) 2010, Jason Baron <jbaron@redhat.com>
-
--cat << "END" | $@ -x c - -fno-PIE -c -o /dev/null
-+cat << "END" > ./input
- int main(void)
- {
- #if defined(__arm__) || defined(__aarch64__)
-@@ -20,3 +20,6 @@ entry:
- return 0;
- }
- END
-+
-+$@ -x c ./input -fno-PIE -c -o ./output
-+rm ./input ./output
---
-2.19.1
-
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.16.bb b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.16.bb
deleted file mode 100644
index c64629d094..0000000000
--- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.16.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require linux-libc-headers.inc
-
-SRC_URI:append:libc-musl = "\
- file://0001-libc-compat.h-fix-some-issues-arising-from-in6.h.patch \
- file://0003-remove-inclusion-of-sysinfo.h-in-kernel.h.patch \
- file://0001-libc-compat.h-musl-_does_-define-IFF_LOWER_UP-DORMAN.patch \
- file://0001-include-linux-stddef.h-in-swab.h-uapi-header.patch \
- "
-
-SRC_URI:append = "\
- file://0001-scripts-Use-fixed-input-and-output-files-instead-of-.patch \
- file://0001-kbuild-install_headers.sh-Strip-_UAPI-from-if-define.patch \
-"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-
-SRC_URI[md5sum] = "e6680ce7c989a3efe58b51e3f3f0bf93"
-SRC_URI[sha256sum] = "027d7e8988bb69ac12ee92406c3be1fe13f990b1ca2249e226225cd1573308bb"
-
-
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb
new file mode 100644
index 0000000000..d68de6ec7e
--- /dev/null
+++ b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb
@@ -0,0 +1,17 @@
+require linux-libc-headers.inc
+
+SRC_URI:append:libc-musl = "\
+ file://0001-libc-compat.h-fix-some-issues-arising-from-in6.h.patch \
+ file://0003-remove-inclusion-of-sysinfo.h-in-kernel.h.patch \
+ file://0001-libc-compat.h-musl-_does_-define-IFF_LOWER_UP-DORMAN.patch \
+ "
+
+SRC_URI += "\
+ file://0001-kbuild-install_headers.sh-Strip-_UAPI-from-if-define.patch \
+"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
+
+SRC_URI[sha256sum] = "d926a06c63dd8ac7df3f86ee1ffc2ce2a3b81a2d168484e76b5b389aba8e56d0"
+
+
diff --git a/meta/recipes-kernel/linux/cve-exclusion.inc b/meta/recipes-kernel/linux/cve-exclusion.inc
new file mode 100644
index 0000000000..7857633943
--- /dev/null
+++ b/meta/recipes-kernel/linux/cve-exclusion.inc
@@ -0,0 +1,34 @@
+CVE_STATUS[CVE-1999-0656] = "not-applicable-config: specific to ugidd, part of the old user-mode NFS server"
+
+CVE_STATUS[CVE-2006-2932] = "not-applicable-platform: specific to RHEL"
+
+CVE_STATUS[CVE-2007-2764] = "not-applicable-platform: specific to Sun/Brocade SilkWorm switches"
+
+CVE_STATUS[CVE-2007-4998] = "cpe-incorrect: a historic cp bug, no longer an issue as per https://bugzilla.redhat.com/show_bug.cgi?id=356471#c5"
+
+CVE_STATUS[CVE-2008-2544] = "disputed: not an issue as per https://bugzilla.redhat.com/show_bug.cgi?id=449089#c22"
+
+CVE_STATUS[CVE-2010-0298] = "fixed-version: 2.6.34 (1871c6)"
+
+CVE_STATUS[CVE-2014-2648] = "cpe-incorrect: not Linux"
+
+CVE_STATUS[CVE-2016-0774] = "ignored: result of incomplete backport"
+
+CVE_STATUS[CVE-2016-3695] = "not-applicable-platform: specific to RHEL with securelevel patches"
+
+CVE_STATUS[CVE-2016-3699] = "not-applicable-platform: specific to RHEL with securelevel patches"
+
+CVE_STATUS[CVE-2017-6264] = "not-applicable-platform: Android specific"
+
+CVE_STATUS[CVE-2017-1000377] = "not-applicable-platform: GRSecurity specific"
+
+CVE_STATUS[CVE-2018-6559] = "not-applicable-platform: Issue only affects Ubuntu"
+
+CVE_STATUS[CVE-2020-11935] = "not-applicable-config: Issue only affects aufs, which is not in linux-yocto"
+
+# Introduced in version v6.1 7b88bda3761b95856cf97822efe8281c8100067b
+# Patched in kernel since v6.2 4a625ceee8a0ab0273534cb6b432ce6b331db5ee
+# But, the CVE is disputed:
+CVE_STATUS[CVE-2023-23005] = "disputed: There are no realistic cases \
+in which a user can cause the alloc_memory_type error case to be reached. \
+See: https://bugzilla.suse.com/show_bug.cgi?id=1208844#c2"
diff --git a/meta/recipes-kernel/linux/cve-exclusion_6.6.inc b/meta/recipes-kernel/linux/cve-exclusion_6.6.inc
new file mode 100644
index 0000000000..bb9ba49c48
--- /dev/null
+++ b/meta/recipes-kernel/linux/cve-exclusion_6.6.inc
@@ -0,0 +1,5384 @@
+
+# Auto-generated CVE metadata, DO NOT EDIT BY HAND.
+# Generated at 2024-03-28 16:40:04.102652+00:00 for version 6.6.23
+
+python check_kernel_cve_status_version() {
+ this_version = "6.6.23"
+ kernel_version = d.getVar("LINUX_VERSION")
+ if kernel_version != this_version:
+ bb.warn("Kernel CVE status needs updating: generated for %s but kernel is %s" % (this_version, kernel_version))
+}
+do_cve_check[prefuncs] += "check_kernel_cve_status_version"
+
+CVE_STATUS[CVE-2003-1604] = "fixed-version: Fixed from version 2.6.12rc2"
+
+CVE_STATUS[CVE-2004-0230] = "fixed-version: Fixed from version 3.6rc1"
+
+# CVE-2005-3660 has no known resolution
+
+CVE_STATUS[CVE-2006-3635] = "fixed-version: Fixed from version 2.6.26rc5"
+
+CVE_STATUS[CVE-2006-5331] = "fixed-version: Fixed from version 2.6.19rc3"
+
+CVE_STATUS[CVE-2006-6128] = "fixed-version: Fixed from version 2.6.19rc2"
+
+# CVE-2007-3719 has no known resolution
+
+CVE_STATUS[CVE-2007-4774] = "fixed-version: Fixed from version 2.6.12rc2"
+
+CVE_STATUS[CVE-2007-6761] = "fixed-version: Fixed from version 2.6.24rc6"
+
+CVE_STATUS[CVE-2007-6762] = "fixed-version: Fixed from version 2.6.20rc5"
+
+# CVE-2008-2544 has no known resolution
+
+# CVE-2008-4609 has no known resolution
+
+CVE_STATUS[CVE-2008-7316] = "fixed-version: Fixed from version 2.6.25rc1"
+
+CVE_STATUS[CVE-2009-2692] = "fixed-version: Fixed from version 2.6.31rc6"
+
+CVE_STATUS[CVE-2010-0008] = "fixed-version: Fixed from version 2.6.23rc9"
+
+CVE_STATUS[CVE-2010-3432] = "fixed-version: Fixed from version 2.6.36rc5"
+
+# CVE-2010-4563 has no known resolution
+
+CVE_STATUS[CVE-2010-4648] = "fixed-version: Fixed from version 2.6.37rc6"
+
+CVE_STATUS[CVE-2010-5313] = "fixed-version: Fixed from version 2.6.38rc1"
+
+# CVE-2010-5321 has no known resolution
+
+CVE_STATUS[CVE-2010-5328] = "fixed-version: Fixed from version 2.6.35rc1"
+
+CVE_STATUS[CVE-2010-5329] = "fixed-version: Fixed from version 2.6.39rc1"
+
+CVE_STATUS[CVE-2010-5331] = "fixed-version: Fixed from version 2.6.34rc7"
+
+CVE_STATUS[CVE-2010-5332] = "fixed-version: Fixed from version 2.6.37rc1"
+
+CVE_STATUS[CVE-2011-4098] = "fixed-version: Fixed from version 3.2rc1"
+
+CVE_STATUS[CVE-2011-4131] = "fixed-version: Fixed from version 3.3rc1"
+
+CVE_STATUS[CVE-2011-4915] = "fixed-version: Fixed from version 3.2rc1"
+
+# CVE-2011-4916 has no known resolution
+
+# CVE-2011-4917 has no known resolution
+
+CVE_STATUS[CVE-2011-5321] = "fixed-version: Fixed from version 3.2rc1"
+
+CVE_STATUS[CVE-2011-5327] = "fixed-version: Fixed from version 3.1rc1"
+
+CVE_STATUS[CVE-2012-0957] = "fixed-version: Fixed from version 3.7rc2"
+
+CVE_STATUS[CVE-2012-2119] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2012-2136] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2012-2137] = "fixed-version: Fixed from version 3.5rc2"
+
+CVE_STATUS[CVE-2012-2313] = "fixed-version: Fixed from version 3.4rc6"
+
+CVE_STATUS[CVE-2012-2319] = "fixed-version: Fixed from version 3.4rc6"
+
+CVE_STATUS[CVE-2012-2372] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2012-2375] = "fixed-version: Fixed from version 3.4rc1"
+
+CVE_STATUS[CVE-2012-2390] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2012-2669] = "fixed-version: Fixed from version 3.5rc4"
+
+CVE_STATUS[CVE-2012-2744] = "fixed-version: Fixed from version 2.6.34rc1"
+
+CVE_STATUS[CVE-2012-2745] = "fixed-version: Fixed from version 3.4rc3"
+
+CVE_STATUS[CVE-2012-3364] = "fixed-version: Fixed from version 3.5rc6"
+
+CVE_STATUS[CVE-2012-3375] = "fixed-version: Fixed from version 3.4rc5"
+
+CVE_STATUS[CVE-2012-3400] = "fixed-version: Fixed from version 3.5rc5"
+
+CVE_STATUS[CVE-2012-3412] = "fixed-version: Fixed from version 3.6rc2"
+
+CVE_STATUS[CVE-2012-3430] = "fixed-version: Fixed from version 3.6rc1"
+
+CVE_STATUS[CVE-2012-3510] = "fixed-version: Fixed from version 2.6.19rc4"
+
+CVE_STATUS[CVE-2012-3511] = "fixed-version: Fixed from version 3.5rc6"
+
+CVE_STATUS[CVE-2012-3520] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-3552] = "fixed-version: Fixed from version 3.0rc1"
+
+# Skipping CVE-2012-4220, no affected_versions
+
+# Skipping CVE-2012-4221, no affected_versions
+
+# Skipping CVE-2012-4222, no affected_versions
+
+CVE_STATUS[CVE-2012-4398] = "fixed-version: Fixed from version 3.4rc1"
+
+CVE_STATUS[CVE-2012-4444] = "fixed-version: Fixed from version 2.6.36rc4"
+
+CVE_STATUS[CVE-2012-4461] = "fixed-version: Fixed from version 3.7rc6"
+
+CVE_STATUS[CVE-2012-4467] = "fixed-version: Fixed from version 3.6rc5"
+
+CVE_STATUS[CVE-2012-4508] = "fixed-version: Fixed from version 3.7rc3"
+
+CVE_STATUS[CVE-2012-4530] = "fixed-version: Fixed from version 3.8rc1"
+
+# CVE-2012-4542 has no known resolution
+
+CVE_STATUS[CVE-2012-4565] = "fixed-version: Fixed from version 3.7rc4"
+
+CVE_STATUS[CVE-2012-5374] = "fixed-version: Fixed from version 3.8rc1"
+
+CVE_STATUS[CVE-2012-5375] = "fixed-version: Fixed from version 3.8rc1"
+
+CVE_STATUS[CVE-2012-5517] = "fixed-version: Fixed from version 3.6rc1"
+
+CVE_STATUS[CVE-2012-6536] = "fixed-version: Fixed from version 3.6rc7"
+
+CVE_STATUS[CVE-2012-6537] = "fixed-version: Fixed from version 3.6rc7"
+
+CVE_STATUS[CVE-2012-6538] = "fixed-version: Fixed from version 3.6rc7"
+
+CVE_STATUS[CVE-2012-6539] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6540] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6541] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6542] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6543] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6544] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6545] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6546] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2012-6547] = "fixed-version: Fixed from version 3.6rc1"
+
+CVE_STATUS[CVE-2012-6548] = "fixed-version: Fixed from version 3.6rc1"
+
+CVE_STATUS[CVE-2012-6549] = "fixed-version: Fixed from version 3.6rc1"
+
+CVE_STATUS[CVE-2012-6638] = "fixed-version: Fixed from version 3.3rc1"
+
+CVE_STATUS[CVE-2012-6647] = "fixed-version: Fixed from version 3.6rc2"
+
+CVE_STATUS[CVE-2012-6657] = "fixed-version: Fixed from version 3.6"
+
+CVE_STATUS[CVE-2012-6689] = "fixed-version: Fixed from version 3.6rc5"
+
+CVE_STATUS[CVE-2012-6701] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2012-6703] = "fixed-version: Fixed from version 3.7rc1"
+
+CVE_STATUS[CVE-2012-6704] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2012-6712] = "fixed-version: Fixed from version 3.4rc1"
+
+CVE_STATUS[CVE-2013-0160] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-0190] = "fixed-version: Fixed from version 3.8rc5"
+
+CVE_STATUS[CVE-2013-0216] = "fixed-version: Fixed from version 3.8rc7"
+
+CVE_STATUS[CVE-2013-0217] = "fixed-version: Fixed from version 3.8rc7"
+
+CVE_STATUS[CVE-2013-0228] = "fixed-version: Fixed from version 3.8"
+
+CVE_STATUS[CVE-2013-0231] = "fixed-version: Fixed from version 3.8rc7"
+
+CVE_STATUS[CVE-2013-0268] = "fixed-version: Fixed from version 3.8rc6"
+
+CVE_STATUS[CVE-2013-0290] = "fixed-version: Fixed from version 3.8"
+
+CVE_STATUS[CVE-2013-0309] = "fixed-version: Fixed from version 3.7rc1"
+
+CVE_STATUS[CVE-2013-0310] = "fixed-version: Fixed from version 3.5"
+
+CVE_STATUS[CVE-2013-0311] = "fixed-version: Fixed from version 3.7rc8"
+
+CVE_STATUS[CVE-2013-0313] = "fixed-version: Fixed from version 3.8rc5"
+
+CVE_STATUS[CVE-2013-0343] = "fixed-version: Fixed from version 3.11rc7"
+
+CVE_STATUS[CVE-2013-0349] = "fixed-version: Fixed from version 3.8rc6"
+
+CVE_STATUS[CVE-2013-0871] = "fixed-version: Fixed from version 3.8rc5"
+
+CVE_STATUS[CVE-2013-0913] = "fixed-version: Fixed from version 3.9rc4"
+
+CVE_STATUS[CVE-2013-0914] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-1059] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-1763] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-1767] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-1772] = "fixed-version: Fixed from version 3.5rc1"
+
+CVE_STATUS[CVE-2013-1773] = "fixed-version: Fixed from version 3.3rc1"
+
+CVE_STATUS[CVE-2013-1774] = "fixed-version: Fixed from version 3.8rc5"
+
+CVE_STATUS[CVE-2013-1792] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-1796] = "fixed-version: Fixed from version 3.9rc4"
+
+CVE_STATUS[CVE-2013-1797] = "fixed-version: Fixed from version 3.9rc4"
+
+CVE_STATUS[CVE-2013-1798] = "fixed-version: Fixed from version 3.9rc4"
+
+CVE_STATUS[CVE-2013-1819] = "fixed-version: Fixed from version 3.8rc6"
+
+CVE_STATUS[CVE-2013-1826] = "fixed-version: Fixed from version 3.6rc7"
+
+CVE_STATUS[CVE-2013-1827] = "fixed-version: Fixed from version 3.6rc3"
+
+CVE_STATUS[CVE-2013-1828] = "fixed-version: Fixed from version 3.9rc2"
+
+CVE_STATUS[CVE-2013-1848] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-1858] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-1860] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-1928] = "fixed-version: Fixed from version 3.7rc3"
+
+CVE_STATUS[CVE-2013-1929] = "fixed-version: Fixed from version 3.9rc6"
+
+# Skipping CVE-2013-1935, no affected_versions
+
+CVE_STATUS[CVE-2013-1943] = "fixed-version: Fixed from version 3.0rc1"
+
+CVE_STATUS[CVE-2013-1956] = "fixed-version: Fixed from version 3.9rc5"
+
+CVE_STATUS[CVE-2013-1957] = "fixed-version: Fixed from version 3.9rc5"
+
+CVE_STATUS[CVE-2013-1958] = "fixed-version: Fixed from version 3.9rc5"
+
+CVE_STATUS[CVE-2013-1959] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-1979] = "fixed-version: Fixed from version 3.9rc8"
+
+CVE_STATUS[CVE-2013-2015] = "fixed-version: Fixed from version 3.8rc2"
+
+CVE_STATUS[CVE-2013-2017] = "fixed-version: Fixed from version 2.6.34"
+
+CVE_STATUS[CVE-2013-2058] = "fixed-version: Fixed from version 3.8rc4"
+
+CVE_STATUS[CVE-2013-2094] = "fixed-version: Fixed from version 3.9rc8"
+
+CVE_STATUS[CVE-2013-2128] = "fixed-version: Fixed from version 2.6.34rc4"
+
+CVE_STATUS[CVE-2013-2140] = "fixed-version: Fixed from version 3.11rc3"
+
+CVE_STATUS[CVE-2013-2141] = "fixed-version: Fixed from version 3.9rc8"
+
+CVE_STATUS[CVE-2013-2146] = "fixed-version: Fixed from version 3.9rc8"
+
+CVE_STATUS[CVE-2013-2147] = "fixed-version: Fixed from version 3.12rc3"
+
+CVE_STATUS[CVE-2013-2148] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-2164] = "fixed-version: Fixed from version 3.11rc1"
+
+# Skipping CVE-2013-2188, no affected_versions
+
+CVE_STATUS[CVE-2013-2206] = "fixed-version: Fixed from version 3.9rc4"
+
+# Skipping CVE-2013-2224, no affected_versions
+
+CVE_STATUS[CVE-2013-2232] = "fixed-version: Fixed from version 3.10"
+
+CVE_STATUS[CVE-2013-2234] = "fixed-version: Fixed from version 3.10"
+
+CVE_STATUS[CVE-2013-2237] = "fixed-version: Fixed from version 3.9rc6"
+
+# Skipping CVE-2013-2239, no affected_versions
+
+CVE_STATUS[CVE-2013-2546] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-2547] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-2548] = "fixed-version: Fixed from version 3.9rc1"
+
+CVE_STATUS[CVE-2013-2596] = "fixed-version: Fixed from version 3.9rc8"
+
+CVE_STATUS[CVE-2013-2634] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-2635] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-2636] = "fixed-version: Fixed from version 3.9rc3"
+
+CVE_STATUS[CVE-2013-2850] = "fixed-version: Fixed from version 3.10rc4"
+
+CVE_STATUS[CVE-2013-2851] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-2852] = "fixed-version: Fixed from version 3.10rc6"
+
+CVE_STATUS[CVE-2013-2888] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-2889] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2890] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2891] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2892] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-2893] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2894] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2895] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2896] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-2897] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-2898] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-2899] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-2929] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-2930] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-3076] = "fixed-version: Fixed from version 3.9"
+
+CVE_STATUS[CVE-2013-3222] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3223] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3224] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3225] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3226] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3227] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3228] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3229] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3230] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3231] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3232] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3233] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3234] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3235] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3236] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3237] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3301] = "fixed-version: Fixed from version 3.9rc7"
+
+CVE_STATUS[CVE-2013-3302] = "fixed-version: Fixed from version 3.8rc3"
+
+CVE_STATUS[CVE-2013-4125] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-4127] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-4129] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-4162] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-4163] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2013-4205] = "fixed-version: Fixed from version 3.11rc5"
+
+CVE_STATUS[CVE-2013-4220] = "fixed-version: Fixed from version 3.10rc4"
+
+CVE_STATUS[CVE-2013-4247] = "fixed-version: Fixed from version 3.10rc5"
+
+CVE_STATUS[CVE-2013-4254] = "fixed-version: Fixed from version 3.11rc6"
+
+CVE_STATUS[CVE-2013-4270] = "fixed-version: Fixed from version 3.12rc4"
+
+CVE_STATUS[CVE-2013-4299] = "fixed-version: Fixed from version 3.12rc6"
+
+CVE_STATUS[CVE-2013-4300] = "fixed-version: Fixed from version 3.11"
+
+CVE_STATUS[CVE-2013-4312] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2013-4343] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-4345] = "fixed-version: Fixed from version 3.13rc2"
+
+CVE_STATUS[CVE-2013-4348] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-4350] = "fixed-version: Fixed from version 3.12rc2"
+
+CVE_STATUS[CVE-2013-4387] = "fixed-version: Fixed from version 3.12rc4"
+
+CVE_STATUS[CVE-2013-4470] = "fixed-version: Fixed from version 3.12rc7"
+
+CVE_STATUS[CVE-2013-4483] = "fixed-version: Fixed from version 3.10rc1"
+
+CVE_STATUS[CVE-2013-4511] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4512] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4513] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4514] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4515] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4516] = "fixed-version: Fixed from version 3.12"
+
+CVE_STATUS[CVE-2013-4563] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-4579] = "fixed-version: Fixed from version 3.13rc7"
+
+CVE_STATUS[CVE-2013-4587] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2013-4588] = "fixed-version: Fixed from version 2.6.33rc4"
+
+CVE_STATUS[CVE-2013-4591] = "fixed-version: Fixed from version 3.8rc1"
+
+CVE_STATUS[CVE-2013-4592] = "fixed-version: Fixed from version 3.7rc1"
+
+# Skipping CVE-2013-4737, no affected_versions
+
+# Skipping CVE-2013-4738, no affected_versions
+
+# Skipping CVE-2013-4739, no affected_versions
+
+CVE_STATUS[CVE-2013-5634] = "fixed-version: Fixed from version 3.10rc5"
+
+CVE_STATUS[CVE-2013-6282] = "fixed-version: Fixed from version 3.6rc6"
+
+CVE_STATUS[CVE-2013-6367] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2013-6368] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2013-6376] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2013-6378] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-6380] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-6381] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-6382] = "fixed-version: Fixed from version 3.13rc4"
+
+CVE_STATUS[CVE-2013-6383] = "fixed-version: Fixed from version 3.12"
+
+# Skipping CVE-2013-6392, no affected_versions
+
+CVE_STATUS[CVE-2013-6431] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2013-6432] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-6885] = "fixed-version: Fixed from version 3.14rc1"
+
+CVE_STATUS[CVE-2013-7026] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7027] = "fixed-version: Fixed from version 3.12rc7"
+
+CVE_STATUS[CVE-2013-7263] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7264] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7265] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7266] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7267] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7268] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7269] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7270] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7271] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7281] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7339] = "fixed-version: Fixed from version 3.13rc7"
+
+CVE_STATUS[CVE-2013-7348] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2013-7421] = "fixed-version: Fixed from version 3.19rc1"
+
+# CVE-2013-7445 has no known resolution
+
+CVE_STATUS[CVE-2013-7446] = "fixed-version: Fixed from version 4.4rc4"
+
+CVE_STATUS[CVE-2013-7470] = "fixed-version: Fixed from version 3.12rc7"
+
+CVE_STATUS[CVE-2014-0038] = "fixed-version: Fixed from version 3.14rc1"
+
+CVE_STATUS[CVE-2014-0049] = "fixed-version: Fixed from version 3.14rc5"
+
+CVE_STATUS[CVE-2014-0055] = "fixed-version: Fixed from version 3.14"
+
+CVE_STATUS[CVE-2014-0069] = "fixed-version: Fixed from version 3.14rc4"
+
+CVE_STATUS[CVE-2014-0077] = "fixed-version: Fixed from version 3.14"
+
+CVE_STATUS[CVE-2014-0100] = "fixed-version: Fixed from version 3.14rc7"
+
+CVE_STATUS[CVE-2014-0101] = "fixed-version: Fixed from version 3.14rc6"
+
+CVE_STATUS[CVE-2014-0102] = "fixed-version: Fixed from version 3.14rc6"
+
+CVE_STATUS[CVE-2014-0131] = "fixed-version: Fixed from version 3.14rc7"
+
+CVE_STATUS[CVE-2014-0155] = "fixed-version: Fixed from version 3.15rc2"
+
+CVE_STATUS[CVE-2014-0181] = "fixed-version: Fixed from version 3.15rc5"
+
+CVE_STATUS[CVE-2014-0196] = "fixed-version: Fixed from version 3.15rc5"
+
+CVE_STATUS[CVE-2014-0203] = "fixed-version: Fixed from version 2.6.33rc5"
+
+CVE_STATUS[CVE-2014-0205] = "fixed-version: Fixed from version 2.6.37rc1"
+
+CVE_STATUS[CVE-2014-0206] = "fixed-version: Fixed from version 3.16rc3"
+
+# Skipping CVE-2014-0972, no affected_versions
+
+CVE_STATUS[CVE-2014-1438] = "fixed-version: Fixed from version 3.13"
+
+CVE_STATUS[CVE-2014-1444] = "fixed-version: Fixed from version 3.12rc7"
+
+CVE_STATUS[CVE-2014-1445] = "fixed-version: Fixed from version 3.12rc7"
+
+CVE_STATUS[CVE-2014-1446] = "fixed-version: Fixed from version 3.13rc7"
+
+CVE_STATUS[CVE-2014-1690] = "fixed-version: Fixed from version 3.13rc8"
+
+CVE_STATUS[CVE-2014-1737] = "fixed-version: Fixed from version 3.15rc5"
+
+CVE_STATUS[CVE-2014-1738] = "fixed-version: Fixed from version 3.15rc5"
+
+CVE_STATUS[CVE-2014-1739] = "fixed-version: Fixed from version 3.15rc6"
+
+CVE_STATUS[CVE-2014-1874] = "fixed-version: Fixed from version 3.14rc2"
+
+CVE_STATUS[CVE-2014-2038] = "fixed-version: Fixed from version 3.14rc1"
+
+CVE_STATUS[CVE-2014-2039] = "fixed-version: Fixed from version 3.14rc3"
+
+CVE_STATUS[CVE-2014-2309] = "fixed-version: Fixed from version 3.14rc7"
+
+CVE_STATUS[CVE-2014-2523] = "fixed-version: Fixed from version 3.14rc1"
+
+CVE_STATUS[CVE-2014-2568] = "fixed-version: Fixed from version 3.14"
+
+CVE_STATUS[CVE-2014-2580] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-2672] = "fixed-version: Fixed from version 3.14rc6"
+
+CVE_STATUS[CVE-2014-2673] = "fixed-version: Fixed from version 3.14rc6"
+
+CVE_STATUS[CVE-2014-2678] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-2706] = "fixed-version: Fixed from version 3.14rc6"
+
+CVE_STATUS[CVE-2014-2739] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-2851] = "fixed-version: Fixed from version 3.15rc2"
+
+CVE_STATUS[CVE-2014-2889] = "fixed-version: Fixed from version 3.2rc7"
+
+CVE_STATUS[CVE-2014-3122] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-3144] = "fixed-version: Fixed from version 3.15rc2"
+
+CVE_STATUS[CVE-2014-3145] = "fixed-version: Fixed from version 3.15rc2"
+
+CVE_STATUS[CVE-2014-3153] = "fixed-version: Fixed from version 3.15"
+
+CVE_STATUS[CVE-2014-3180] = "fixed-version: Fixed from version 3.17rc4"
+
+CVE_STATUS[CVE-2014-3181] = "fixed-version: Fixed from version 3.17rc3"
+
+CVE_STATUS[CVE-2014-3182] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-3183] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-3184] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-3185] = "fixed-version: Fixed from version 3.17rc3"
+
+CVE_STATUS[CVE-2014-3186] = "fixed-version: Fixed from version 3.17rc3"
+
+# Skipping CVE-2014-3519, no affected_versions
+
+CVE_STATUS[CVE-2014-3534] = "fixed-version: Fixed from version 3.16rc7"
+
+CVE_STATUS[CVE-2014-3535] = "fixed-version: Fixed from version 2.6.36rc1"
+
+CVE_STATUS[CVE-2014-3601] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-3610] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-3611] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-3631] = "fixed-version: Fixed from version 3.17rc5"
+
+CVE_STATUS[CVE-2014-3645] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2014-3646] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-3647] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-3673] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-3687] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-3688] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-3690] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-3917] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2014-3940] = "fixed-version: Fixed from version 3.15"
+
+CVE_STATUS[CVE-2014-4014] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2014-4027] = "fixed-version: Fixed from version 3.14rc1"
+
+CVE_STATUS[CVE-2014-4157] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-4171] = "fixed-version: Fixed from version 3.16rc3"
+
+# Skipping CVE-2014-4322, no affected_versions
+
+# Skipping CVE-2014-4323, no affected_versions
+
+CVE_STATUS[CVE-2014-4508] = "fixed-version: Fixed from version 3.16rc3"
+
+CVE_STATUS[CVE-2014-4608] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-4611] = "fixed-version: Fixed from version 3.16rc3"
+
+CVE_STATUS[CVE-2014-4652] = "fixed-version: Fixed from version 3.16rc2"
+
+CVE_STATUS[CVE-2014-4653] = "fixed-version: Fixed from version 3.16rc2"
+
+CVE_STATUS[CVE-2014-4654] = "fixed-version: Fixed from version 3.16rc2"
+
+CVE_STATUS[CVE-2014-4655] = "fixed-version: Fixed from version 3.16rc2"
+
+CVE_STATUS[CVE-2014-4656] = "fixed-version: Fixed from version 3.16rc2"
+
+CVE_STATUS[CVE-2014-4667] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2014-4699] = "fixed-version: Fixed from version 3.16rc4"
+
+CVE_STATUS[CVE-2014-4943] = "fixed-version: Fixed from version 3.16rc6"
+
+CVE_STATUS[CVE-2014-5045] = "fixed-version: Fixed from version 3.16rc7"
+
+CVE_STATUS[CVE-2014-5077] = "fixed-version: Fixed from version 3.16"
+
+CVE_STATUS[CVE-2014-5206] = "fixed-version: Fixed from version 3.17rc1"
+
+CVE_STATUS[CVE-2014-5207] = "fixed-version: Fixed from version 3.17rc1"
+
+# Skipping CVE-2014-5332, no affected_versions
+
+CVE_STATUS[CVE-2014-5471] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-5472] = "fixed-version: Fixed from version 3.17rc2"
+
+CVE_STATUS[CVE-2014-6410] = "fixed-version: Fixed from version 3.17rc5"
+
+CVE_STATUS[CVE-2014-6416] = "fixed-version: Fixed from version 3.17rc5"
+
+CVE_STATUS[CVE-2014-6417] = "fixed-version: Fixed from version 3.17rc5"
+
+CVE_STATUS[CVE-2014-6418] = "fixed-version: Fixed from version 3.17rc5"
+
+CVE_STATUS[CVE-2014-7145] = "fixed-version: Fixed from version 3.17rc2"
+
+# Skipping CVE-2014-7207, no affected_versions
+
+CVE_STATUS[CVE-2014-7283] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-7284] = "fixed-version: Fixed from version 3.15rc7"
+
+CVE_STATUS[CVE-2014-7822] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2014-7825] = "fixed-version: Fixed from version 3.18rc3"
+
+CVE_STATUS[CVE-2014-7826] = "fixed-version: Fixed from version 3.18rc3"
+
+CVE_STATUS[CVE-2014-7841] = "fixed-version: Fixed from version 3.18rc5"
+
+CVE_STATUS[CVE-2014-7842] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-7843] = "fixed-version: Fixed from version 3.18rc5"
+
+CVE_STATUS[CVE-2014-7970] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-7975] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-8086] = "fixed-version: Fixed from version 3.18rc3"
+
+CVE_STATUS[CVE-2014-8133] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-8134] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-8159] = "fixed-version: Fixed from version 4.0rc7"
+
+CVE_STATUS[CVE-2014-8160] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-8171] = "fixed-version: Fixed from version 3.12rc1"
+
+CVE_STATUS[CVE-2014-8172] = "fixed-version: Fixed from version 3.13rc1"
+
+CVE_STATUS[CVE-2014-8173] = "fixed-version: Fixed from version 3.13rc5"
+
+# Skipping CVE-2014-8181, no affected_versions
+
+CVE_STATUS[CVE-2014-8369] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-8480] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-8481] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-8559] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-8709] = "fixed-version: Fixed from version 3.14rc3"
+
+CVE_STATUS[CVE-2014-8884] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2014-8989] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9090] = "fixed-version: Fixed from version 3.18rc6"
+
+CVE_STATUS[CVE-2014-9322] = "fixed-version: Fixed from version 3.18rc6"
+
+CVE_STATUS[CVE-2014-9419] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9420] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9428] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2014-9529] = "fixed-version: Fixed from version 3.19rc4"
+
+CVE_STATUS[CVE-2014-9584] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2014-9585] = "fixed-version: Fixed from version 3.19rc4"
+
+CVE_STATUS[CVE-2014-9644] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9683] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9710] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2014-9715] = "fixed-version: Fixed from version 3.15rc1"
+
+CVE_STATUS[CVE-2014-9717] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2014-9728] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2014-9729] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2014-9730] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2014-9731] = "fixed-version: Fixed from version 3.19rc3"
+
+# Skipping CVE-2014-9777, no affected_versions
+
+# Skipping CVE-2014-9778, no affected_versions
+
+# Skipping CVE-2014-9779, no affected_versions
+
+# Skipping CVE-2014-9780, no affected_versions
+
+# Skipping CVE-2014-9781, no affected_versions
+
+# Skipping CVE-2014-9782, no affected_versions
+
+# Skipping CVE-2014-9783, no affected_versions
+
+# Skipping CVE-2014-9784, no affected_versions
+
+# Skipping CVE-2014-9785, no affected_versions
+
+# Skipping CVE-2014-9786, no affected_versions
+
+# Skipping CVE-2014-9787, no affected_versions
+
+# Skipping CVE-2014-9788, no affected_versions
+
+# Skipping CVE-2014-9789, no affected_versions
+
+CVE_STATUS[CVE-2014-9803] = "fixed-version: Fixed from version 3.16rc1"
+
+# Skipping CVE-2014-9863, no affected_versions
+
+# Skipping CVE-2014-9864, no affected_versions
+
+# Skipping CVE-2014-9865, no affected_versions
+
+# Skipping CVE-2014-9866, no affected_versions
+
+# Skipping CVE-2014-9867, no affected_versions
+
+# Skipping CVE-2014-9868, no affected_versions
+
+# Skipping CVE-2014-9869, no affected_versions
+
+CVE_STATUS[CVE-2014-9870] = "fixed-version: Fixed from version 3.11rc1"
+
+# Skipping CVE-2014-9871, no affected_versions
+
+# Skipping CVE-2014-9872, no affected_versions
+
+# Skipping CVE-2014-9873, no affected_versions
+
+# Skipping CVE-2014-9874, no affected_versions
+
+# Skipping CVE-2014-9875, no affected_versions
+
+# Skipping CVE-2014-9876, no affected_versions
+
+# Skipping CVE-2014-9877, no affected_versions
+
+# Skipping CVE-2014-9878, no affected_versions
+
+# Skipping CVE-2014-9879, no affected_versions
+
+# Skipping CVE-2014-9880, no affected_versions
+
+# Skipping CVE-2014-9881, no affected_versions
+
+# Skipping CVE-2014-9882, no affected_versions
+
+# Skipping CVE-2014-9883, no affected_versions
+
+# Skipping CVE-2014-9884, no affected_versions
+
+# Skipping CVE-2014-9885, no affected_versions
+
+# Skipping CVE-2014-9886, no affected_versions
+
+# Skipping CVE-2014-9887, no affected_versions
+
+CVE_STATUS[CVE-2014-9888] = "fixed-version: Fixed from version 3.13rc1"
+
+# Skipping CVE-2014-9889, no affected_versions
+
+# Skipping CVE-2014-9890, no affected_versions
+
+# Skipping CVE-2014-9891, no affected_versions
+
+# Skipping CVE-2014-9892, no affected_versions
+
+# Skipping CVE-2014-9893, no affected_versions
+
+# Skipping CVE-2014-9894, no affected_versions
+
+CVE_STATUS[CVE-2014-9895] = "fixed-version: Fixed from version 3.11rc1"
+
+# Skipping CVE-2014-9896, no affected_versions
+
+# Skipping CVE-2014-9897, no affected_versions
+
+# Skipping CVE-2014-9898, no affected_versions
+
+# Skipping CVE-2014-9899, no affected_versions
+
+# Skipping CVE-2014-9900, no affected_versions
+
+CVE_STATUS[CVE-2014-9903] = "fixed-version: Fixed from version 3.14rc4"
+
+CVE_STATUS[CVE-2014-9904] = "fixed-version: Fixed from version 3.17rc1"
+
+CVE_STATUS[CVE-2014-9914] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2014-9922] = "fixed-version: Fixed from version 3.18rc2"
+
+CVE_STATUS[CVE-2014-9940] = "fixed-version: Fixed from version 3.19rc1"
+
+CVE_STATUS[CVE-2015-0239] = "fixed-version: Fixed from version 3.19rc6"
+
+CVE_STATUS[CVE-2015-0274] = "fixed-version: Fixed from version 3.15rc5"
+
+CVE_STATUS[CVE-2015-0275] = "fixed-version: Fixed from version 4.1rc1"
+
+# Skipping CVE-2015-0777, no affected_versions
+
+# Skipping CVE-2015-1328, no affected_versions
+
+CVE_STATUS[CVE-2015-1333] = "fixed-version: Fixed from version 4.2rc5"
+
+CVE_STATUS[CVE-2015-1339] = "fixed-version: Fixed from version 4.4rc5"
+
+CVE_STATUS[CVE-2015-1350] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2015-1420] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-1421] = "fixed-version: Fixed from version 3.19rc7"
+
+CVE_STATUS[CVE-2015-1465] = "fixed-version: Fixed from version 3.19rc7"
+
+CVE_STATUS[CVE-2015-1573] = "fixed-version: Fixed from version 3.19rc5"
+
+CVE_STATUS[CVE-2015-1593] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2015-1805] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2015-2041] = "fixed-version: Fixed from version 3.19rc7"
+
+CVE_STATUS[CVE-2015-2042] = "fixed-version: Fixed from version 3.19"
+
+CVE_STATUS[CVE-2015-2150] = "fixed-version: Fixed from version 4.0rc4"
+
+CVE_STATUS[CVE-2015-2666] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2015-2672] = "fixed-version: Fixed from version 4.0rc3"
+
+CVE_STATUS[CVE-2015-2686] = "fixed-version: Fixed from version 4.0rc6"
+
+CVE_STATUS[CVE-2015-2830] = "fixed-version: Fixed from version 4.0rc3"
+
+# CVE-2015-2877 has no known resolution
+
+CVE_STATUS[CVE-2015-2922] = "fixed-version: Fixed from version 4.0rc7"
+
+CVE_STATUS[CVE-2015-2925] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2015-3212] = "fixed-version: Fixed from version 4.2rc1"
+
+CVE_STATUS[CVE-2015-3214] = "fixed-version: Fixed from version 2.6.33rc8"
+
+CVE_STATUS[CVE-2015-3288] = "fixed-version: Fixed from version 4.2rc2"
+
+CVE_STATUS[CVE-2015-3290] = "fixed-version: Fixed from version 4.2rc3"
+
+CVE_STATUS[CVE-2015-3291] = "fixed-version: Fixed from version 4.2rc3"
+
+CVE_STATUS[CVE-2015-3331] = "fixed-version: Fixed from version 4.0rc5"
+
+# Skipping CVE-2015-3332, no affected_versions
+
+CVE_STATUS[CVE-2015-3339] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-3636] = "fixed-version: Fixed from version 4.1rc2"
+
+CVE_STATUS[CVE-2015-4001] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-4002] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-4003] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-4004] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2015-4036] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2015-4167] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2015-4170] = "fixed-version: Fixed from version 3.13rc5"
+
+CVE_STATUS[CVE-2015-4176] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-4177] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-4178] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-4692] = "fixed-version: Fixed from version 4.2rc1"
+
+CVE_STATUS[CVE-2015-4700] = "fixed-version: Fixed from version 4.1rc6"
+
+CVE_STATUS[CVE-2015-5156] = "fixed-version: Fixed from version 4.2rc7"
+
+CVE_STATUS[CVE-2015-5157] = "fixed-version: Fixed from version 4.2rc3"
+
+CVE_STATUS[CVE-2015-5257] = "fixed-version: Fixed from version 4.3rc3"
+
+CVE_STATUS[CVE-2015-5283] = "fixed-version: Fixed from version 4.3rc3"
+
+CVE_STATUS[CVE-2015-5307] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-5327] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-5364] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-5366] = "fixed-version: Fixed from version 4.1rc7"
+
+CVE_STATUS[CVE-2015-5697] = "fixed-version: Fixed from version 4.2rc6"
+
+CVE_STATUS[CVE-2015-5706] = "fixed-version: Fixed from version 4.1rc3"
+
+CVE_STATUS[CVE-2015-5707] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-6252] = "fixed-version: Fixed from version 4.2rc5"
+
+CVE_STATUS[CVE-2015-6526] = "fixed-version: Fixed from version 4.1rc1"
+
+# CVE-2015-6619 has no known resolution
+
+# CVE-2015-6646 has no known resolution
+
+CVE_STATUS[CVE-2015-6937] = "fixed-version: Fixed from version 4.3rc1"
+
+# Skipping CVE-2015-7312, no affected_versions
+
+CVE_STATUS[CVE-2015-7509] = "fixed-version: Fixed from version 3.7rc1"
+
+CVE_STATUS[CVE-2015-7513] = "fixed-version: Fixed from version 4.4rc7"
+
+CVE_STATUS[CVE-2015-7515] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-7550] = "fixed-version: Fixed from version 4.4rc8"
+
+# Skipping CVE-2015-7553, no affected_versions
+
+CVE_STATUS[CVE-2015-7566] = "fixed-version: Fixed from version 4.5rc2"
+
+CVE_STATUS[CVE-2015-7613] = "fixed-version: Fixed from version 4.3rc4"
+
+CVE_STATUS[CVE-2015-7799] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-7833] = "fixed-version: Fixed from version 4.6rc6"
+
+# Skipping CVE-2015-7837, no affected_versions
+
+CVE_STATUS[CVE-2015-7872] = "fixed-version: Fixed from version 4.3rc7"
+
+CVE_STATUS[CVE-2015-7884] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-7885] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-7990] = "fixed-version: Fixed from version 4.4rc4"
+
+# Skipping CVE-2015-8019, no affected_versions
+
+CVE_STATUS[CVE-2015-8104] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-8215] = "fixed-version: Fixed from version 4.0rc3"
+
+CVE_STATUS[CVE-2015-8324] = "fixed-version: Fixed from version 2.6.34rc1"
+
+CVE_STATUS[CVE-2015-8374] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-8539] = "fixed-version: Fixed from version 4.4rc3"
+
+CVE_STATUS[CVE-2015-8543] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8550] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8551] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8552] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8553] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8569] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8575] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8660] = "fixed-version: Fixed from version 4.4rc4"
+
+CVE_STATUS[CVE-2015-8709] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2015-8746] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2015-8767] = "fixed-version: Fixed from version 4.3rc4"
+
+CVE_STATUS[CVE-2015-8785] = "fixed-version: Fixed from version 4.4rc5"
+
+CVE_STATUS[CVE-2015-8787] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-8812] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2015-8816] = "fixed-version: Fixed from version 4.4rc6"
+
+CVE_STATUS[CVE-2015-8830] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-8839] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2015-8844] = "fixed-version: Fixed from version 4.4rc3"
+
+CVE_STATUS[CVE-2015-8845] = "fixed-version: Fixed from version 4.4rc3"
+
+# Skipping CVE-2015-8937, no affected_versions
+
+# Skipping CVE-2015-8938, no affected_versions
+
+# Skipping CVE-2015-8939, no affected_versions
+
+# Skipping CVE-2015-8940, no affected_versions
+
+# Skipping CVE-2015-8941, no affected_versions
+
+# Skipping CVE-2015-8942, no affected_versions
+
+# Skipping CVE-2015-8943, no affected_versions
+
+# Skipping CVE-2015-8944, no affected_versions
+
+CVE_STATUS[CVE-2015-8950] = "fixed-version: Fixed from version 4.1rc2"
+
+CVE_STATUS[CVE-2015-8952] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2015-8953] = "fixed-version: Fixed from version 4.3"
+
+CVE_STATUS[CVE-2015-8955] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2015-8956] = "fixed-version: Fixed from version 4.2rc1"
+
+CVE_STATUS[CVE-2015-8961] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-8962] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2015-8963] = "fixed-version: Fixed from version 4.4"
+
+CVE_STATUS[CVE-2015-8964] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2015-8966] = "fixed-version: Fixed from version 4.4rc8"
+
+CVE_STATUS[CVE-2015-8967] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2015-8970] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2015-9004] = "fixed-version: Fixed from version 3.19rc7"
+
+CVE_STATUS[CVE-2015-9016] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2015-9289] = "fixed-version: Fixed from version 4.2rc1"
+
+CVE_STATUS[CVE-2016-0617] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-0723] = "fixed-version: Fixed from version 4.5rc2"
+
+CVE_STATUS[CVE-2016-0728] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-0758] = "fixed-version: Fixed from version 4.6"
+
+# Skipping CVE-2016-0774, no affected_versions
+
+CVE_STATUS[CVE-2016-0821] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2016-0823] = "fixed-version: Fixed from version 4.0rc5"
+
+CVE_STATUS[CVE-2016-10044] = "fixed-version: Fixed from version 4.8rc7"
+
+CVE_STATUS[CVE-2016-10088] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10147] = "fixed-version: Fixed from version 4.9"
+
+CVE_STATUS[CVE-2016-10150] = "fixed-version: Fixed from version 4.9rc8"
+
+CVE_STATUS[CVE-2016-10153] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10154] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10200] = "fixed-version: Fixed from version 4.9rc7"
+
+CVE_STATUS[CVE-2016-10208] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10229] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-10318] = "fixed-version: Fixed from version 4.8rc6"
+
+CVE_STATUS[CVE-2016-10723] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2016-10741] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10764] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-10905] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2016-10906] = "fixed-version: Fixed from version 4.5rc6"
+
+CVE_STATUS[CVE-2016-10907] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2016-1237] = "fixed-version: Fixed from version 4.7rc5"
+
+CVE_STATUS[CVE-2016-1575] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-1576] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-1583] = "fixed-version: Fixed from version 4.7rc3"
+
+CVE_STATUS[CVE-2016-2053] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2016-2069] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2070] = "fixed-version: Fixed from version 4.4"
+
+CVE_STATUS[CVE-2016-2085] = "fixed-version: Fixed from version 4.5rc4"
+
+CVE_STATUS[CVE-2016-2117] = "fixed-version: Fixed from version 4.6rc5"
+
+CVE_STATUS[CVE-2016-2143] = "fixed-version: Fixed from version 4.5"
+
+CVE_STATUS[CVE-2016-2184] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-2185] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-2186] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-2187] = "fixed-version: Fixed from version 4.6rc5"
+
+CVE_STATUS[CVE-2016-2188] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2016-2383] = "fixed-version: Fixed from version 4.5rc4"
+
+CVE_STATUS[CVE-2016-2384] = "fixed-version: Fixed from version 4.5rc4"
+
+CVE_STATUS[CVE-2016-2543] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2544] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2545] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2546] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2547] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2548] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2549] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2016-2550] = "fixed-version: Fixed from version 4.5rc4"
+
+CVE_STATUS[CVE-2016-2782] = "fixed-version: Fixed from version 4.5rc2"
+
+CVE_STATUS[CVE-2016-2847] = "fixed-version: Fixed from version 4.5rc1"
+
+# Skipping CVE-2016-2853, no affected_versions
+
+# Skipping CVE-2016-2854, no affected_versions
+
+CVE_STATUS[CVE-2016-3044] = "fixed-version: Fixed from version 4.5"
+
+CVE_STATUS[CVE-2016-3070] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2016-3134] = "fixed-version: Fixed from version 4.6rc2"
+
+CVE_STATUS[CVE-2016-3135] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-3136] = "fixed-version: Fixed from version 4.6rc3"
+
+CVE_STATUS[CVE-2016-3137] = "fixed-version: Fixed from version 4.6rc3"
+
+CVE_STATUS[CVE-2016-3138] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-3139] = "fixed-version: Fixed from version 3.17rc1"
+
+CVE_STATUS[CVE-2016-3140] = "fixed-version: Fixed from version 4.6rc3"
+
+CVE_STATUS[CVE-2016-3156] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-3157] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-3672] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-3689] = "fixed-version: Fixed from version 4.6rc1"
+
+# Skipping CVE-2016-3695, no affected_versions
+
+# Skipping CVE-2016-3699, no affected_versions
+
+# Skipping CVE-2016-3707, no affected_versions
+
+CVE_STATUS[CVE-2016-3713] = "fixed-version: Fixed from version 4.7rc1"
+
+# CVE-2016-3775 has no known resolution
+
+# CVE-2016-3802 has no known resolution
+
+# CVE-2016-3803 has no known resolution
+
+CVE_STATUS[CVE-2016-3841] = "fixed-version: Fixed from version 4.4rc4"
+
+CVE_STATUS[CVE-2016-3857] = "fixed-version: Fixed from version 4.8rc2"
+
+CVE_STATUS[CVE-2016-3951] = "fixed-version: Fixed from version 4.5"
+
+CVE_STATUS[CVE-2016-3955] = "fixed-version: Fixed from version 4.6rc3"
+
+CVE_STATUS[CVE-2016-3961] = "fixed-version: Fixed from version 4.6rc5"
+
+CVE_STATUS[CVE-2016-4440] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4470] = "fixed-version: Fixed from version 4.7rc4"
+
+CVE_STATUS[CVE-2016-4482] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4485] = "fixed-version: Fixed from version 4.6"
+
+CVE_STATUS[CVE-2016-4486] = "fixed-version: Fixed from version 4.6"
+
+CVE_STATUS[CVE-2016-4557] = "fixed-version: Fixed from version 4.6rc6"
+
+CVE_STATUS[CVE-2016-4558] = "fixed-version: Fixed from version 4.6rc7"
+
+CVE_STATUS[CVE-2016-4565] = "fixed-version: Fixed from version 4.6rc6"
+
+CVE_STATUS[CVE-2016-4568] = "fixed-version: Fixed from version 4.6rc6"
+
+CVE_STATUS[CVE-2016-4569] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4578] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4580] = "fixed-version: Fixed from version 4.6"
+
+CVE_STATUS[CVE-2016-4581] = "fixed-version: Fixed from version 4.6rc7"
+
+CVE_STATUS[CVE-2016-4794] = "fixed-version: Fixed from version 4.7rc4"
+
+CVE_STATUS[CVE-2016-4805] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-4913] = "fixed-version: Fixed from version 4.6"
+
+CVE_STATUS[CVE-2016-4951] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4997] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-4998] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-5195] = "fixed-version: Fixed from version 4.9rc2"
+
+CVE_STATUS[CVE-2016-5243] = "fixed-version: Fixed from version 4.7rc3"
+
+CVE_STATUS[CVE-2016-5244] = "fixed-version: Fixed from version 4.7rc3"
+
+# Skipping CVE-2016-5340, no affected_versions
+
+# Skipping CVE-2016-5342, no affected_versions
+
+# Skipping CVE-2016-5343, no affected_versions
+
+# Skipping CVE-2016-5344, no affected_versions
+
+CVE_STATUS[CVE-2016-5400] = "fixed-version: Fixed from version 4.7"
+
+CVE_STATUS[CVE-2016-5412] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2016-5696] = "fixed-version: Fixed from version 4.7"
+
+CVE_STATUS[CVE-2016-5728] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-5828] = "fixed-version: Fixed from version 4.7rc6"
+
+CVE_STATUS[CVE-2016-5829] = "fixed-version: Fixed from version 4.7rc5"
+
+# CVE-2016-5870 has no known resolution
+
+CVE_STATUS[CVE-2016-6130] = "fixed-version: Fixed from version 4.6rc6"
+
+CVE_STATUS[CVE-2016-6136] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2016-6156] = "fixed-version: Fixed from version 4.7rc7"
+
+CVE_STATUS[CVE-2016-6162] = "fixed-version: Fixed from version 4.7"
+
+CVE_STATUS[CVE-2016-6187] = "fixed-version: Fixed from version 4.7rc7"
+
+CVE_STATUS[CVE-2016-6197] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-6198] = "fixed-version: Fixed from version 4.6"
+
+CVE_STATUS[CVE-2016-6213] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2016-6327] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-6480] = "fixed-version: Fixed from version 4.8rc3"
+
+CVE_STATUS[CVE-2016-6516] = "fixed-version: Fixed from version 4.8rc1"
+
+# Skipping CVE-2016-6753, no affected_versions
+
+CVE_STATUS[CVE-2016-6786] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2016-6787] = "fixed-version: Fixed from version 4.0rc1"
+
+CVE_STATUS[CVE-2016-6828] = "fixed-version: Fixed from version 4.8rc5"
+
+CVE_STATUS[CVE-2016-7039] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-7042] = "fixed-version: Fixed from version 4.9rc3"
+
+CVE_STATUS[CVE-2016-7097] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2016-7117] = "fixed-version: Fixed from version 4.6rc1"
+
+# Skipping CVE-2016-7118, no affected_versions
+
+CVE_STATUS[CVE-2016-7425] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2016-7910] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2016-7911] = "fixed-version: Fixed from version 4.7rc7"
+
+CVE_STATUS[CVE-2016-7912] = "fixed-version: Fixed from version 4.6rc5"
+
+CVE_STATUS[CVE-2016-7913] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-7914] = "fixed-version: Fixed from version 4.6rc4"
+
+CVE_STATUS[CVE-2016-7915] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-7916] = "fixed-version: Fixed from version 4.6rc7"
+
+CVE_STATUS[CVE-2016-7917] = "fixed-version: Fixed from version 4.5rc6"
+
+CVE_STATUS[CVE-2016-8399] = "fixed-version: Fixed from version 4.9"
+
+# Skipping CVE-2016-8401, no affected_versions
+
+# Skipping CVE-2016-8402, no affected_versions
+
+# Skipping CVE-2016-8403, no affected_versions
+
+# Skipping CVE-2016-8404, no affected_versions
+
+CVE_STATUS[CVE-2016-8405] = "fixed-version: Fixed from version 4.10rc6"
+
+# Skipping CVE-2016-8406, no affected_versions
+
+# Skipping CVE-2016-8407, no affected_versions
+
+CVE_STATUS[CVE-2016-8630] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-8632] = "fixed-version: Fixed from version 4.9rc8"
+
+CVE_STATUS[CVE-2016-8633] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-8636] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2016-8645] = "fixed-version: Fixed from version 4.9rc6"
+
+CVE_STATUS[CVE-2016-8646] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2016-8650] = "fixed-version: Fixed from version 4.9rc7"
+
+CVE_STATUS[CVE-2016-8655] = "fixed-version: Fixed from version 4.9rc8"
+
+CVE_STATUS[CVE-2016-8658] = "fixed-version: Fixed from version 4.8rc7"
+
+# CVE-2016-8660 has no known resolution
+
+CVE_STATUS[CVE-2016-8666] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-9083] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-9084] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-9120] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-9178] = "fixed-version: Fixed from version 4.8rc7"
+
+CVE_STATUS[CVE-2016-9191] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2016-9313] = "fixed-version: Fixed from version 4.9rc3"
+
+CVE_STATUS[CVE-2016-9555] = "fixed-version: Fixed from version 4.9rc4"
+
+CVE_STATUS[CVE-2016-9576] = "fixed-version: Fixed from version 4.9"
+
+CVE_STATUS[CVE-2016-9588] = "fixed-version: Fixed from version 4.10rc1"
+
+CVE_STATUS[CVE-2016-9604] = "fixed-version: Fixed from version 4.11rc8"
+
+# Skipping CVE-2016-9644, no affected_versions
+
+CVE_STATUS[CVE-2016-9685] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2016-9754] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-9755] = "fixed-version: Fixed from version 4.9rc8"
+
+CVE_STATUS[CVE-2016-9756] = "fixed-version: Fixed from version 4.9rc7"
+
+CVE_STATUS[CVE-2016-9777] = "fixed-version: Fixed from version 4.9rc7"
+
+CVE_STATUS[CVE-2016-9793] = "fixed-version: Fixed from version 4.9rc8"
+
+CVE_STATUS[CVE-2016-9794] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-9806] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2016-9919] = "fixed-version: Fixed from version 4.9rc8"
+
+# Skipping CVE-2017-0403, no affected_versions
+
+# Skipping CVE-2017-0404, no affected_versions
+
+# Skipping CVE-2017-0426, no affected_versions
+
+# Skipping CVE-2017-0427, no affected_versions
+
+# CVE-2017-0507 has no known resolution
+
+# CVE-2017-0508 has no known resolution
+
+# Skipping CVE-2017-0510, no affected_versions
+
+# Skipping CVE-2017-0528, no affected_versions
+
+# Skipping CVE-2017-0537, no affected_versions
+
+# CVE-2017-0564 has no known resolution
+
+CVE_STATUS[CVE-2017-0605] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-0627] = "fixed-version: Fixed from version 4.14rc1"
+
+# CVE-2017-0630 has no known resolution
+
+# CVE-2017-0749 has no known resolution
+
+CVE_STATUS[CVE-2017-0750] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2017-0786] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-0861] = "fixed-version: Fixed from version 4.15rc3"
+
+CVE_STATUS[CVE-2017-1000] = "fixed-version: Fixed from version 4.13rc5"
+
+CVE_STATUS[CVE-2017-1000111] = "fixed-version: Fixed from version 4.13rc5"
+
+CVE_STATUS[CVE-2017-1000112] = "fixed-version: Fixed from version 4.13rc5"
+
+CVE_STATUS[CVE-2017-1000251] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-1000252] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-1000253] = "fixed-version: Fixed from version 4.1rc1"
+
+CVE_STATUS[CVE-2017-1000255] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-1000363] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-1000364] = "fixed-version: Fixed from version 4.12rc6"
+
+CVE_STATUS[CVE-2017-1000365] = "fixed-version: Fixed from version 4.12rc7"
+
+CVE_STATUS[CVE-2017-1000370] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-1000371] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-1000379] = "fixed-version: Fixed from version 4.12rc6"
+
+CVE_STATUS[CVE-2017-1000380] = "fixed-version: Fixed from version 4.12rc5"
+
+CVE_STATUS[CVE-2017-1000405] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2017-1000407] = "fixed-version: Fixed from version 4.15rc3"
+
+CVE_STATUS[CVE-2017-1000410] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2017-10661] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-10662] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-10663] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-10810] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-10911] = "fixed-version: Fixed from version 4.12rc7"
+
+CVE_STATUS[CVE-2017-11089] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-11176] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-11472] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-11473] = "fixed-version: Fixed from version 4.13rc2"
+
+CVE_STATUS[CVE-2017-11600] = "fixed-version: Fixed from version 4.13"
+
+CVE_STATUS[CVE-2017-12134] = "fixed-version: Fixed from version 4.13rc6"
+
+CVE_STATUS[CVE-2017-12146] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-12153] = "fixed-version: Fixed from version 4.14rc2"
+
+CVE_STATUS[CVE-2017-12154] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-12168] = "fixed-version: Fixed from version 4.9rc6"
+
+CVE_STATUS[CVE-2017-12188] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-12190] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-12192] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-12193] = "fixed-version: Fixed from version 4.14rc7"
+
+CVE_STATUS[CVE-2017-12762] = "fixed-version: Fixed from version 4.13rc4"
+
+CVE_STATUS[CVE-2017-13080] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-13166] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2017-13167] = "fixed-version: Fixed from version 4.5rc4"
+
+CVE_STATUS[CVE-2017-13168] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2017-13215] = "fixed-version: Fixed from version 4.5rc1"
+
+CVE_STATUS[CVE-2017-13216] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2017-13220] = "fixed-version: Fixed from version 3.19rc3"
+
+# CVE-2017-13221 has no known resolution
+
+# CVE-2017-13222 has no known resolution
+
+CVE_STATUS[CVE-2017-13305] = "fixed-version: Fixed from version 4.12rc5"
+
+CVE_STATUS[CVE-2017-13686] = "fixed-version: Fixed from version 4.13rc7"
+
+# CVE-2017-13693 has no known resolution
+
+# CVE-2017-13694 has no known resolution
+
+CVE_STATUS[CVE-2017-13695] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2017-13715] = "fixed-version: Fixed from version 4.3rc1"
+
+CVE_STATUS[CVE-2017-14051] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-14106] = "fixed-version: Fixed from version 4.12rc3"
+
+CVE_STATUS[CVE-2017-14140] = "fixed-version: Fixed from version 4.13rc6"
+
+CVE_STATUS[CVE-2017-14156] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-14340] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-14489] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-14497] = "fixed-version: Fixed from version 4.13"
+
+CVE_STATUS[CVE-2017-14954] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-14991] = "fixed-version: Fixed from version 4.14rc2"
+
+CVE_STATUS[CVE-2017-15102] = "fixed-version: Fixed from version 4.9rc1"
+
+CVE_STATUS[CVE-2017-15115] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-15116] = "fixed-version: Fixed from version 4.2rc1"
+
+CVE_STATUS[CVE-2017-15121] = "fixed-version: Fixed from version 3.11rc1"
+
+CVE_STATUS[CVE-2017-15126] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-15127] = "fixed-version: Fixed from version 4.13rc5"
+
+CVE_STATUS[CVE-2017-15128] = "fixed-version: Fixed from version 4.14rc8"
+
+CVE_STATUS[CVE-2017-15129] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-15265] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-15274] = "fixed-version: Fixed from version 4.12rc5"
+
+CVE_STATUS[CVE-2017-15299] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-15306] = "fixed-version: Fixed from version 4.14rc7"
+
+CVE_STATUS[CVE-2017-15537] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-15649] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-15868] = "fixed-version: Fixed from version 3.19rc3"
+
+CVE_STATUS[CVE-2017-15951] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-16525] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-16526] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-16527] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-16528] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2017-16529] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-16530] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-16531] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-16532] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-16533] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-16534] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2017-16535] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-16536] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-16537] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-16538] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2017-16643] = "fixed-version: Fixed from version 4.14rc7"
+
+CVE_STATUS[CVE-2017-16644] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2017-16645] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2017-16646] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-16647] = "fixed-version: Fixed from version 4.14"
+
+CVE_STATUS[CVE-2017-16648] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-16649] = "fixed-version: Fixed from version 4.14"
+
+CVE_STATUS[CVE-2017-16650] = "fixed-version: Fixed from version 4.14"
+
+CVE_STATUS[CVE-2017-16911] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-16912] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-16913] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-16914] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-16939] = "fixed-version: Fixed from version 4.14rc7"
+
+CVE_STATUS[CVE-2017-16994] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-16995] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-16996] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17052] = "fixed-version: Fixed from version 4.13rc7"
+
+CVE_STATUS[CVE-2017-17053] = "fixed-version: Fixed from version 4.13rc7"
+
+CVE_STATUS[CVE-2017-17448] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17449] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17450] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17558] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17712] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17741] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17805] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17806] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-17807] = "fixed-version: Fixed from version 4.15rc3"
+
+CVE_STATUS[CVE-2017-17852] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17853] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17854] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17855] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17856] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17857] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17862] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-17863] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17864] = "fixed-version: Fixed from version 4.15rc5"
+
+CVE_STATUS[CVE-2017-17975] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2017-18017] = "fixed-version: Fixed from version 4.11rc7"
+
+CVE_STATUS[CVE-2017-18075] = "fixed-version: Fixed from version 4.15rc7"
+
+CVE_STATUS[CVE-2017-18079] = "fixed-version: Fixed from version 4.13rc1"
+
+# CVE-2017-18169 has no known resolution
+
+CVE_STATUS[CVE-2017-18174] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2017-18193] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-18200] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-18202] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2017-18203] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-18204] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-18208] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2017-18216] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-18218] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-18221] = "fixed-version: Fixed from version 4.12rc4"
+
+CVE_STATUS[CVE-2017-18222] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-18224] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2017-18232] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2017-18241] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-18249] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-18255] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-18257] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-18261] = "fixed-version: Fixed from version 4.13rc6"
+
+CVE_STATUS[CVE-2017-18270] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-18344] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2017-18360] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-18379] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2017-18509] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-18549] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-18550] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-18551] = "fixed-version: Fixed from version 4.15rc9"
+
+CVE_STATUS[CVE-2017-18552] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-18595] = "fixed-version: Fixed from version 4.15rc6"
+
+CVE_STATUS[CVE-2017-2583] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-2584] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-2596] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-2618] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-2634] = "fixed-version: Fixed from version 2.6.25rc1"
+
+CVE_STATUS[CVE-2017-2636] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2017-2647] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2017-2671] = "fixed-version: Fixed from version 4.11rc6"
+
+CVE_STATUS[CVE-2017-5123] = "fixed-version: Fixed from version 4.14rc5"
+
+CVE_STATUS[CVE-2017-5546] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-5547] = "fixed-version: Fixed from version 4.10rc5"
+
+CVE_STATUS[CVE-2017-5548] = "fixed-version: Fixed from version 4.10rc5"
+
+CVE_STATUS[CVE-2017-5549] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-5550] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-5551] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-5576] = "fixed-version: Fixed from version 4.10rc6"
+
+CVE_STATUS[CVE-2017-5577] = "fixed-version: Fixed from version 4.10rc6"
+
+CVE_STATUS[CVE-2017-5669] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-5715] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2017-5753] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2017-5754] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2017-5897] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-5967] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-5970] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-5972] = "fixed-version: Fixed from version 4.4rc1"
+
+CVE_STATUS[CVE-2017-5986] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-6001] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-6074] = "fixed-version: Fixed from version 4.10"
+
+CVE_STATUS[CVE-2017-6214] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-6345] = "fixed-version: Fixed from version 4.10"
+
+CVE_STATUS[CVE-2017-6346] = "fixed-version: Fixed from version 4.10"
+
+CVE_STATUS[CVE-2017-6347] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-6348] = "fixed-version: Fixed from version 4.10"
+
+CVE_STATUS[CVE-2017-6353] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-6874] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2017-6951] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2017-7184] = "fixed-version: Fixed from version 4.11rc5"
+
+CVE_STATUS[CVE-2017-7187] = "fixed-version: Fixed from version 4.11rc5"
+
+CVE_STATUS[CVE-2017-7261] = "fixed-version: Fixed from version 4.11rc6"
+
+CVE_STATUS[CVE-2017-7273] = "fixed-version: Fixed from version 4.10rc4"
+
+CVE_STATUS[CVE-2017-7277] = "fixed-version: Fixed from version 4.11rc4"
+
+CVE_STATUS[CVE-2017-7294] = "fixed-version: Fixed from version 4.11rc6"
+
+CVE_STATUS[CVE-2017-7308] = "fixed-version: Fixed from version 4.11rc6"
+
+CVE_STATUS[CVE-2017-7346] = "fixed-version: Fixed from version 4.12rc5"
+
+# CVE-2017-7369 has no known resolution
+
+CVE_STATUS[CVE-2017-7374] = "fixed-version: Fixed from version 4.11rc4"
+
+CVE_STATUS[CVE-2017-7472] = "fixed-version: Fixed from version 4.11rc8"
+
+CVE_STATUS[CVE-2017-7477] = "fixed-version: Fixed from version 4.11"
+
+CVE_STATUS[CVE-2017-7482] = "fixed-version: Fixed from version 4.12rc7"
+
+CVE_STATUS[CVE-2017-7487] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-7495] = "fixed-version: Fixed from version 4.7rc1"
+
+CVE_STATUS[CVE-2017-7518] = "fixed-version: Fixed from version 4.12rc7"
+
+CVE_STATUS[CVE-2017-7533] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-7541] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-7542] = "fixed-version: Fixed from version 4.13rc2"
+
+CVE_STATUS[CVE-2017-7558] = "fixed-version: Fixed from version 4.13"
+
+CVE_STATUS[CVE-2017-7616] = "fixed-version: Fixed from version 4.11rc6"
+
+CVE_STATUS[CVE-2017-7618] = "fixed-version: Fixed from version 4.11rc8"
+
+CVE_STATUS[CVE-2017-7645] = "fixed-version: Fixed from version 4.11"
+
+CVE_STATUS[CVE-2017-7889] = "fixed-version: Fixed from version 4.11rc7"
+
+CVE_STATUS[CVE-2017-7895] = "fixed-version: Fixed from version 4.11"
+
+CVE_STATUS[CVE-2017-7979] = "fixed-version: Fixed from version 4.11rc8"
+
+CVE_STATUS[CVE-2017-8061] = "fixed-version: Fixed from version 4.11rc4"
+
+CVE_STATUS[CVE-2017-8062] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2017-8063] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-8064] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-8065] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-8066] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-8067] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2017-8068] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-8069] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-8070] = "fixed-version: Fixed from version 4.10rc8"
+
+CVE_STATUS[CVE-2017-8071] = "fixed-version: Fixed from version 4.10rc7"
+
+CVE_STATUS[CVE-2017-8072] = "fixed-version: Fixed from version 4.10rc7"
+
+CVE_STATUS[CVE-2017-8106] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2017-8240] = "fixed-version: Fixed from version 3.19rc6"
+
+# CVE-2017-8242 has no known resolution
+
+# CVE-2017-8244 has no known resolution
+
+# CVE-2017-8245 has no known resolution
+
+# CVE-2017-8246 has no known resolution
+
+CVE_STATUS[CVE-2017-8797] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-8824] = "fixed-version: Fixed from version 4.15rc3"
+
+CVE_STATUS[CVE-2017-8831] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-8890] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-8924] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2017-8925] = "fixed-version: Fixed from version 4.11rc2"
+
+CVE_STATUS[CVE-2017-9059] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-9074] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-9075] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-9076] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-9077] = "fixed-version: Fixed from version 4.12rc2"
+
+CVE_STATUS[CVE-2017-9150] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2017-9211] = "fixed-version: Fixed from version 4.12rc3"
+
+CVE_STATUS[CVE-2017-9242] = "fixed-version: Fixed from version 4.12rc3"
+
+CVE_STATUS[CVE-2017-9605] = "fixed-version: Fixed from version 4.12rc5"
+
+CVE_STATUS[CVE-2017-9725] = "fixed-version: Fixed from version 4.3rc7"
+
+CVE_STATUS[CVE-2017-9984] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-9985] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2017-9986] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2018-1000004] = "fixed-version: Fixed from version 4.15rc9"
+
+CVE_STATUS[CVE-2018-1000026] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-1000028] = "fixed-version: Fixed from version 4.15"
+
+CVE_STATUS[CVE-2018-1000199] = "fixed-version: Fixed from version 4.16"
+
+CVE_STATUS[CVE-2018-1000200] = "fixed-version: Fixed from version 4.17rc5"
+
+CVE_STATUS[CVE-2018-1000204] = "fixed-version: Fixed from version 4.17rc7"
+
+CVE_STATUS[CVE-2018-10021] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-10074] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-10087] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2018-10124] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2018-10322] = "fixed-version: Fixed from version 4.17rc4"
+
+CVE_STATUS[CVE-2018-10323] = "fixed-version: Fixed from version 4.17rc4"
+
+CVE_STATUS[CVE-2018-1065] = "fixed-version: Fixed from version 4.16rc3"
+
+CVE_STATUS[CVE-2018-1066] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2018-10675] = "fixed-version: Fixed from version 4.13rc6"
+
+CVE_STATUS[CVE-2018-1068] = "fixed-version: Fixed from version 4.16rc5"
+
+CVE_STATUS[CVE-2018-10840] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-10853] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-1087] = "fixed-version: Fixed from version 4.16rc7"
+
+# CVE-2018-10872 has no known resolution
+
+CVE_STATUS[CVE-2018-10876] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10877] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10878] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10879] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10880] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10881] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10882] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10883] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-10901] = "fixed-version: Fixed from version 2.6.36rc1"
+
+CVE_STATUS[CVE-2018-10902] = "fixed-version: Fixed from version 4.18rc6"
+
+CVE_STATUS[CVE-2018-1091] = "fixed-version: Fixed from version 4.14rc2"
+
+CVE_STATUS[CVE-2018-1092] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-1093] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-10938] = "fixed-version: Fixed from version 4.13rc5"
+
+CVE_STATUS[CVE-2018-1094] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-10940] = "fixed-version: Fixed from version 4.17rc3"
+
+CVE_STATUS[CVE-2018-1095] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-1108] = "fixed-version: Fixed from version 4.17rc2"
+
+CVE_STATUS[CVE-2018-1118] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-1120] = "fixed-version: Fixed from version 4.17rc6"
+
+# CVE-2018-1121 has no known resolution
+
+CVE_STATUS[CVE-2018-11232] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2018-1128] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-1129] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-1130] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-11412] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-11506] = "fixed-version: Fixed from version 4.17rc7"
+
+CVE_STATUS[CVE-2018-11508] = "fixed-version: Fixed from version 4.17rc5"
+
+# CVE-2018-11987 has no known resolution
+
+CVE_STATUS[CVE-2018-12126] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2018-12127] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2018-12130] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2018-12207] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2018-12232] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-12233] = "fixed-version: Fixed from version 4.18rc2"
+
+CVE_STATUS[CVE-2018-12633] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-12714] = "fixed-version: Fixed from version 4.18rc2"
+
+CVE_STATUS[CVE-2018-12896] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-12904] = "fixed-version: Fixed from version 4.18rc1"
+
+# CVE-2018-12928 has no known resolution
+
+# CVE-2018-12929 has no known resolution
+
+# CVE-2018-12930 has no known resolution
+
+# CVE-2018-12931 has no known resolution
+
+CVE_STATUS[CVE-2018-13053] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13093] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-13094] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-13095] = "fixed-version: Fixed from version 4.18rc3"
+
+CVE_STATUS[CVE-2018-13096] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13097] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13098] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13099] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13100] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-13405] = "fixed-version: Fixed from version 4.18rc4"
+
+CVE_STATUS[CVE-2018-13406] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-14609] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14610] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14611] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14612] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14613] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14614] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14615] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14616] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14617] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-14619] = "fixed-version: Fixed from version 4.15rc4"
+
+CVE_STATUS[CVE-2018-14625] = "fixed-version: Fixed from version 4.20rc6"
+
+CVE_STATUS[CVE-2018-14633] = "fixed-version: Fixed from version 4.19rc6"
+
+CVE_STATUS[CVE-2018-14634] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2018-14641] = "fixed-version: Fixed from version 4.19rc4"
+
+CVE_STATUS[CVE-2018-14646] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2018-14656] = "fixed-version: Fixed from version 4.19rc2"
+
+CVE_STATUS[CVE-2018-14678] = "fixed-version: Fixed from version 4.18rc8"
+
+CVE_STATUS[CVE-2018-14734] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-15471] = "fixed-version: Fixed from version 4.19rc7"
+
+CVE_STATUS[CVE-2018-15572] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-15594] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-16276] = "fixed-version: Fixed from version 4.18rc5"
+
+CVE_STATUS[CVE-2018-16597] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2018-16658] = "fixed-version: Fixed from version 4.19rc2"
+
+CVE_STATUS[CVE-2018-16862] = "fixed-version: Fixed from version 4.20rc5"
+
+CVE_STATUS[CVE-2018-16871] = "fixed-version: Fixed from version 4.20rc3"
+
+CVE_STATUS[CVE-2018-16880] = "fixed-version: Fixed from version 5.0rc5"
+
+CVE_STATUS[CVE-2018-16882] = "fixed-version: Fixed from version 4.20"
+
+CVE_STATUS[CVE-2018-16884] = "fixed-version: Fixed from version 5.0rc1"
+
+# CVE-2018-16885 has no known resolution
+
+CVE_STATUS[CVE-2018-17182] = "fixed-version: Fixed from version 4.19rc4"
+
+CVE_STATUS[CVE-2018-17972] = "fixed-version: Fixed from version 4.19rc7"
+
+# CVE-2018-17977 has no known resolution
+
+CVE_STATUS[CVE-2018-18021] = "fixed-version: Fixed from version 4.19rc7"
+
+CVE_STATUS[CVE-2018-18281] = "fixed-version: Fixed from version 4.19"
+
+CVE_STATUS[CVE-2018-18386] = "fixed-version: Fixed from version 4.15rc6"
+
+CVE_STATUS[CVE-2018-18397] = "fixed-version: Fixed from version 4.20rc5"
+
+CVE_STATUS[CVE-2018-18445] = "fixed-version: Fixed from version 4.19rc7"
+
+CVE_STATUS[CVE-2018-18559] = "fixed-version: Fixed from version 4.15rc2"
+
+# CVE-2018-18653 has no known resolution
+
+CVE_STATUS[CVE-2018-18690] = "fixed-version: Fixed from version 4.17rc4"
+
+CVE_STATUS[CVE-2018-18710] = "fixed-version: Fixed from version 4.20rc1"
+
+CVE_STATUS[CVE-2018-18955] = "fixed-version: Fixed from version 4.20rc2"
+
+CVE_STATUS[CVE-2018-19406] = "fixed-version: Fixed from version 4.20rc5"
+
+CVE_STATUS[CVE-2018-19407] = "fixed-version: Fixed from version 4.20rc5"
+
+CVE_STATUS[CVE-2018-19824] = "fixed-version: Fixed from version 4.20rc6"
+
+CVE_STATUS[CVE-2018-19854] = "fixed-version: Fixed from version 4.20rc3"
+
+CVE_STATUS[CVE-2018-19985] = "fixed-version: Fixed from version 4.20"
+
+CVE_STATUS[CVE-2018-20169] = "fixed-version: Fixed from version 4.20rc6"
+
+CVE_STATUS[CVE-2018-20449] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2018-20509] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2018-20510] = "fixed-version: Fixed from version 4.16rc3"
+
+CVE_STATUS[CVE-2018-20511] = "fixed-version: Fixed from version 4.19rc5"
+
+CVE_STATUS[CVE-2018-20669] = "fixed-version: Fixed from version 5.0rc1"
+
+CVE_STATUS[CVE-2018-20784] = "fixed-version: Fixed from version 5.0rc1"
+
+CVE_STATUS[CVE-2018-20836] = "fixed-version: Fixed from version 4.20rc1"
+
+CVE_STATUS[CVE-2018-20854] = "fixed-version: Fixed from version 4.20rc1"
+
+CVE_STATUS[CVE-2018-20855] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-20856] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-20961] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-20976] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-21008] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2018-25015] = "fixed-version: Fixed from version 4.15rc9"
+
+CVE_STATUS[CVE-2018-25020] = "fixed-version: Fixed from version 4.17rc7"
+
+# CVE-2018-3574 has no known resolution
+
+CVE_STATUS[CVE-2018-3620] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-3639] = "fixed-version: Fixed from version 4.17rc7"
+
+CVE_STATUS[CVE-2018-3646] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-3665] = "fixed-version: Fixed from version 3.7rc1"
+
+CVE_STATUS[CVE-2018-3693] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-5332] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2018-5333] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2018-5344] = "fixed-version: Fixed from version 4.15rc8"
+
+CVE_STATUS[CVE-2018-5390] = "fixed-version: Fixed from version 4.18rc7"
+
+CVE_STATUS[CVE-2018-5391] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-5703] = "fixed-version: Fixed from version 4.16rc5"
+
+CVE_STATUS[CVE-2018-5750] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-5803] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-5814] = "fixed-version: Fixed from version 4.17rc6"
+
+CVE_STATUS[CVE-2018-5848] = "fixed-version: Fixed from version 4.16rc1"
+
+# Skipping CVE-2018-5856, no affected_versions
+
+CVE_STATUS[CVE-2018-5873] = "fixed-version: Fixed from version 4.11rc8"
+
+CVE_STATUS[CVE-2018-5953] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2018-5995] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2018-6412] = "fixed-version: Fixed from version 4.16rc5"
+
+CVE_STATUS[CVE-2018-6554] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2018-6555] = "fixed-version: Fixed from version 4.17rc1"
+
+# CVE-2018-6559 has no known resolution
+
+CVE_STATUS[CVE-2018-6927] = "fixed-version: Fixed from version 4.15rc9"
+
+CVE_STATUS[CVE-2018-7191] = "fixed-version: Fixed from version 4.14rc6"
+
+CVE_STATUS[CVE-2018-7273] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2018-7480] = "fixed-version: Fixed from version 4.11rc1"
+
+CVE_STATUS[CVE-2018-7492] = "fixed-version: Fixed from version 4.15rc3"
+
+CVE_STATUS[CVE-2018-7566] = "fixed-version: Fixed from version 4.16rc2"
+
+CVE_STATUS[CVE-2018-7740] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-7754] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2018-7755] = "fixed-version: Fixed from version 4.19rc5"
+
+CVE_STATUS[CVE-2018-7757] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-7995] = "fixed-version: Fixed from version 4.16rc5"
+
+CVE_STATUS[CVE-2018-8043] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-8087] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2018-8781] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-8822] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-8897] = "fixed-version: Fixed from version 4.16rc7"
+
+CVE_STATUS[CVE-2018-9363] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2018-9385] = "fixed-version: Fixed from version 4.17rc3"
+
+CVE_STATUS[CVE-2018-9415] = "fixed-version: Fixed from version 4.17rc3"
+
+CVE_STATUS[CVE-2018-9422] = "fixed-version: Fixed from version 4.6rc1"
+
+CVE_STATUS[CVE-2018-9465] = "fixed-version: Fixed from version 4.15rc6"
+
+CVE_STATUS[CVE-2018-9516] = "fixed-version: Fixed from version 4.18rc5"
+
+CVE_STATUS[CVE-2018-9517] = "fixed-version: Fixed from version 4.14rc1"
+
+CVE_STATUS[CVE-2018-9518] = "fixed-version: Fixed from version 4.16rc3"
+
+CVE_STATUS[CVE-2018-9568] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2019-0136] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-0145] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-0146] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-0147] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-0148] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-0149] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-0154] = "fixed-version: Fixed from version 5.4rc8"
+
+CVE_STATUS[CVE-2019-0155] = "fixed-version: Fixed from version 5.4rc8"
+
+CVE_STATUS[CVE-2019-10124] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-10125] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-10126] = "fixed-version: Fixed from version 5.2rc6"
+
+# CVE-2019-10140 has no known resolution
+
+CVE_STATUS[CVE-2019-10142] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-10207] = "fixed-version: Fixed from version 5.3rc3"
+
+CVE_STATUS[CVE-2019-10220] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-10638] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-10639] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-11085] = "fixed-version: Fixed from version 5.0rc3"
+
+CVE_STATUS[CVE-2019-11091] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-11135] = "fixed-version: Fixed from version 5.4rc8"
+
+CVE_STATUS[CVE-2019-11190] = "fixed-version: Fixed from version 4.8rc5"
+
+CVE_STATUS[CVE-2019-11191] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-1125] = "fixed-version: Fixed from version 5.3rc4"
+
+CVE_STATUS[CVE-2019-11477] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-11478] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-11479] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-11486] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-11487] = "fixed-version: Fixed from version 5.1rc5"
+
+CVE_STATUS[CVE-2019-11599] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-11683] = "fixed-version: Fixed from version 5.1"
+
+CVE_STATUS[CVE-2019-11810] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-11811] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-11815] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-11833] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-11884] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-12378] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-12379] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-12380] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-12381] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-12382] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-12454] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-12455] = "fixed-version: Fixed from version 5.3rc1"
+
+# CVE-2019-12456 has no known resolution
+
+CVE_STATUS[CVE-2019-12614] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-12615] = "fixed-version: Fixed from version 5.2rc4"
+
+CVE_STATUS[CVE-2019-12817] = "fixed-version: Fixed from version 5.2rc7"
+
+CVE_STATUS[CVE-2019-12818] = "fixed-version: Fixed from version 5.0"
+
+CVE_STATUS[CVE-2019-12819] = "fixed-version: Fixed from version 5.0rc8"
+
+CVE_STATUS[CVE-2019-12881] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2019-12984] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-13233] = "fixed-version: Fixed from version 5.2rc4"
+
+CVE_STATUS[CVE-2019-13272] = "fixed-version: Fixed from version 5.2"
+
+CVE_STATUS[CVE-2019-13631] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-13648] = "fixed-version: Fixed from version 5.3rc2"
+
+CVE_STATUS[CVE-2019-14283] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-14284] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-14615] = "fixed-version: Fixed from version 5.5rc7"
+
+CVE_STATUS[CVE-2019-14763] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2019-14814] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-14815] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-14816] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-14821] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-14835] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-14895] = "fixed-version: Fixed from version 5.5rc3"
+
+CVE_STATUS[CVE-2019-14896] = "fixed-version: Fixed from version 5.5"
+
+CVE_STATUS[CVE-2019-14897] = "fixed-version: Fixed from version 5.5"
+
+# CVE-2019-14898 has no known resolution
+
+CVE_STATUS[CVE-2019-14901] = "fixed-version: Fixed from version 5.5rc3"
+
+CVE_STATUS[CVE-2019-15030] = "fixed-version: Fixed from version 5.3rc8"
+
+CVE_STATUS[CVE-2019-15031] = "fixed-version: Fixed from version 5.3rc8"
+
+CVE_STATUS[CVE-2019-15090] = "fixed-version: Fixed from version 5.2rc2"
+
+CVE_STATUS[CVE-2019-15098] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-15099] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-15117] = "fixed-version: Fixed from version 5.3rc5"
+
+CVE_STATUS[CVE-2019-15118] = "fixed-version: Fixed from version 5.3rc5"
+
+CVE_STATUS[CVE-2019-15211] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15212] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-15213] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15214] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-15215] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15216] = "fixed-version: Fixed from version 5.1"
+
+CVE_STATUS[CVE-2019-15217] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15218] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-15219] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2019-15220] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15221] = "fixed-version: Fixed from version 5.2"
+
+CVE_STATUS[CVE-2019-15222] = "fixed-version: Fixed from version 5.3rc3"
+
+CVE_STATUS[CVE-2019-15223] = "fixed-version: Fixed from version 5.2rc3"
+
+# CVE-2019-15239 has no known resolution
+
+# CVE-2019-15290 has no known resolution
+
+CVE_STATUS[CVE-2019-15291] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-15292] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-15504] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-15505] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-15538] = "fixed-version: Fixed from version 5.3rc6"
+
+CVE_STATUS[CVE-2019-15666] = "fixed-version: Fixed from version 5.1"
+
+# CVE-2019-15791 has no known resolution
+
+# CVE-2019-15792 has no known resolution
+
+# CVE-2019-15793 has no known resolution
+
+CVE_STATUS[CVE-2019-15794] = "fixed-version: Fixed from version 5.12"
+
+CVE_STATUS[CVE-2019-15807] = "fixed-version: Fixed from version 5.2rc3"
+
+# CVE-2019-15902 has no known resolution
+
+CVE_STATUS[CVE-2019-15916] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-15917] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-15918] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-15919] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-15920] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-15921] = "fixed-version: Fixed from version 5.1rc3"
+
+CVE_STATUS[CVE-2019-15922] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-15923] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-15924] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-15925] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15926] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-15927] = "fixed-version: Fixed from version 5.0rc2"
+
+# CVE-2019-16089 has no known resolution
+
+CVE_STATUS[CVE-2019-16229] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-16230] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-16231] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-16232] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-16233] = "fixed-version: Fixed from version 5.4rc5"
+
+CVE_STATUS[CVE-2019-16234] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-16413] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-16714] = "fixed-version: Fixed from version 5.3rc7"
+
+CVE_STATUS[CVE-2019-16746] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-16921] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2019-16994] = "fixed-version: Fixed from version 5.0"
+
+CVE_STATUS[CVE-2019-16995] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-17052] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-17053] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-17054] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-17055] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-17056] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-17075] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-17133] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-17351] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-17666] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-18198] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-18282] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-18660] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-18675] = "fixed-version: Fixed from version 4.17rc5"
+
+# CVE-2019-18680 has no known resolution
+
+CVE_STATUS[CVE-2019-18683] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-18786] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-18805] = "fixed-version: Fixed from version 5.1rc7"
+
+CVE_STATUS[CVE-2019-18806] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-18807] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-18808] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-18809] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-18810] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-18811] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-18812] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-18813] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-18814] = "fixed-version: Fixed from version 5.7rc7"
+
+CVE_STATUS[CVE-2019-18885] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-19036] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19037] = "fixed-version: Fixed from version 5.5rc3"
+
+CVE_STATUS[CVE-2019-19039] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2019-19043] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19044] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-19045] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-19046] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19047] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-19048] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19049] = "fixed-version: Fixed from version 5.4rc5"
+
+CVE_STATUS[CVE-2019-19050] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19051] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-19052] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-19053] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19054] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19055] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-19056] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19057] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19058] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-19059] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-19060] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19061] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19062] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19063] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19064] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19065] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19066] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19067] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-19068] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19069] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19070] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19071] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19072] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19073] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19074] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19075] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-19076] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19077] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19078] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19079] = "fixed-version: Fixed from version 5.3"
+
+CVE_STATUS[CVE-2019-19080] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19081] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19082] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19083] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-19227] = "fixed-version: Fixed from version 5.1rc3"
+
+CVE_STATUS[CVE-2019-19241] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19252] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19318] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19319] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-19332] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19338] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19377] = "fixed-version: Fixed from version 5.7rc1"
+
+# CVE-2019-19378 has no known resolution
+
+CVE_STATUS[CVE-2019-19447] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19448] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2019-19449] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2019-19462] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2019-19523] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19524] = "fixed-version: Fixed from version 5.4rc8"
+
+CVE_STATUS[CVE-2019-19525] = "fixed-version: Fixed from version 5.4rc2"
+
+CVE_STATUS[CVE-2019-19526] = "fixed-version: Fixed from version 5.4rc4"
+
+CVE_STATUS[CVE-2019-19527] = "fixed-version: Fixed from version 5.3rc4"
+
+CVE_STATUS[CVE-2019-19528] = "fixed-version: Fixed from version 5.4rc3"
+
+CVE_STATUS[CVE-2019-19529] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-19530] = "fixed-version: Fixed from version 5.3rc5"
+
+CVE_STATUS[CVE-2019-19531] = "fixed-version: Fixed from version 5.3rc4"
+
+CVE_STATUS[CVE-2019-19532] = "fixed-version: Fixed from version 5.4rc6"
+
+CVE_STATUS[CVE-2019-19533] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19534] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-19535] = "fixed-version: Fixed from version 5.3rc4"
+
+CVE_STATUS[CVE-2019-19536] = "fixed-version: Fixed from version 5.3rc4"
+
+CVE_STATUS[CVE-2019-19537] = "fixed-version: Fixed from version 5.3rc5"
+
+CVE_STATUS[CVE-2019-19543] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-19602] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19767] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2019-19768] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2019-19769] = "fixed-version: Fixed from version 5.6rc5"
+
+CVE_STATUS[CVE-2019-19770] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2019-19807] = "fixed-version: Fixed from version 5.4rc7"
+
+CVE_STATUS[CVE-2019-19813] = "fixed-version: Fixed from version 5.2rc1"
+
+# CVE-2019-19814 has no known resolution
+
+CVE_STATUS[CVE-2019-19815] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2019-19816] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-19922] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-19927] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-19947] = "fixed-version: Fixed from version 5.5rc3"
+
+CVE_STATUS[CVE-2019-19965] = "fixed-version: Fixed from version 5.5rc2"
+
+CVE_STATUS[CVE-2019-19966] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-1999] = "fixed-version: Fixed from version 5.1rc3"
+
+CVE_STATUS[CVE-2019-20054] = "fixed-version: Fixed from version 5.1rc3"
+
+CVE_STATUS[CVE-2019-20095] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-20096] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-2024] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2019-2025] = "fixed-version: Fixed from version 4.20rc5"
+
+CVE_STATUS[CVE-2019-20422] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-2054] = "fixed-version: Fixed from version 4.8rc1"
+
+CVE_STATUS[CVE-2019-20636] = "fixed-version: Fixed from version 5.5rc6"
+
+# CVE-2019-20794 has no known resolution
+
+CVE_STATUS[CVE-2019-20806] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-20810] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2019-20811] = "fixed-version: Fixed from version 5.1rc3"
+
+CVE_STATUS[CVE-2019-20812] = "fixed-version: Fixed from version 5.5rc3"
+
+CVE_STATUS[CVE-2019-20908] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2019-20934] = "fixed-version: Fixed from version 5.3rc2"
+
+CVE_STATUS[CVE-2019-2101] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-2181] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-2182] = "fixed-version: Fixed from version 4.16rc3"
+
+CVE_STATUS[CVE-2019-2213] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-2214] = "fixed-version: Fixed from version 5.3rc2"
+
+CVE_STATUS[CVE-2019-2215] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2019-25044] = "fixed-version: Fixed from version 5.2rc4"
+
+CVE_STATUS[CVE-2019-25045] = "fixed-version: Fixed from version 5.1"
+
+CVE_STATUS[CVE-2019-3016] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2019-3459] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-3460] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-3701] = "fixed-version: Fixed from version 5.0rc3"
+
+CVE_STATUS[CVE-2019-3819] = "fixed-version: Fixed from version 5.0rc6"
+
+CVE_STATUS[CVE-2019-3837] = "fixed-version: Fixed from version 3.18rc1"
+
+CVE_STATUS[CVE-2019-3846] = "fixed-version: Fixed from version 5.2rc6"
+
+CVE_STATUS[CVE-2019-3874] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-3882] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-3887] = "fixed-version: Fixed from version 5.1rc4"
+
+CVE_STATUS[CVE-2019-3892] = "fixed-version: Fixed from version 5.1rc6"
+
+CVE_STATUS[CVE-2019-3896] = "fixed-version: Fixed from version 2.6.35rc1"
+
+CVE_STATUS[CVE-2019-3900] = "fixed-version: Fixed from version 5.2rc4"
+
+CVE_STATUS[CVE-2019-3901] = "fixed-version: Fixed from version 4.6rc6"
+
+CVE_STATUS[CVE-2019-5108] = "fixed-version: Fixed from version 5.3"
+
+# Skipping CVE-2019-5489, no affected_versions
+
+CVE_STATUS[CVE-2019-6133] = "fixed-version: Fixed from version 5.0rc2"
+
+CVE_STATUS[CVE-2019-6974] = "fixed-version: Fixed from version 5.0rc6"
+
+CVE_STATUS[CVE-2019-7221] = "fixed-version: Fixed from version 5.0rc6"
+
+CVE_STATUS[CVE-2019-7222] = "fixed-version: Fixed from version 5.0rc6"
+
+CVE_STATUS[CVE-2019-7308] = "fixed-version: Fixed from version 5.0rc3"
+
+CVE_STATUS[CVE-2019-8912] = "fixed-version: Fixed from version 5.0rc8"
+
+CVE_STATUS[CVE-2019-8956] = "fixed-version: Fixed from version 5.0rc6"
+
+CVE_STATUS[CVE-2019-8980] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-9003] = "fixed-version: Fixed from version 5.0rc4"
+
+CVE_STATUS[CVE-2019-9162] = "fixed-version: Fixed from version 5.0rc7"
+
+CVE_STATUS[CVE-2019-9213] = "fixed-version: Fixed from version 5.0"
+
+CVE_STATUS[CVE-2019-9245] = "fixed-version: Fixed from version 5.0rc1"
+
+CVE_STATUS[CVE-2019-9444] = "fixed-version: Fixed from version 4.15rc2"
+
+CVE_STATUS[CVE-2019-9445] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-9453] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2019-9454] = "fixed-version: Fixed from version 4.15rc9"
+
+CVE_STATUS[CVE-2019-9455] = "fixed-version: Fixed from version 5.0rc1"
+
+CVE_STATUS[CVE-2019-9456] = "fixed-version: Fixed from version 4.16rc6"
+
+CVE_STATUS[CVE-2019-9457] = "fixed-version: Fixed from version 4.13rc1"
+
+CVE_STATUS[CVE-2019-9458] = "fixed-version: Fixed from version 4.19rc7"
+
+CVE_STATUS[CVE-2019-9466] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-9500] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-9503] = "fixed-version: Fixed from version 5.1rc1"
+
+CVE_STATUS[CVE-2019-9506] = "fixed-version: Fixed from version 5.2"
+
+CVE_STATUS[CVE-2019-9857] = "fixed-version: Fixed from version 5.1rc2"
+
+CVE_STATUS[CVE-2020-0009] = "fixed-version: Fixed from version 5.6rc3"
+
+CVE_STATUS[CVE-2020-0030] = "fixed-version: Fixed from version 4.16rc3"
+
+CVE_STATUS[CVE-2020-0041] = "fixed-version: Fixed from version 5.5rc2"
+
+CVE_STATUS[CVE-2020-0066] = "fixed-version: Fixed from version 4.3rc7"
+
+CVE_STATUS[CVE-2020-0067] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2020-0110] = "fixed-version: Fixed from version 5.6rc2"
+
+CVE_STATUS[CVE-2020-0255] = "fixed-version: Fixed from version 5.7rc4"
+
+CVE_STATUS[CVE-2020-0305] = "fixed-version: Fixed from version 5.5rc6"
+
+# CVE-2020-0347 has no known resolution
+
+CVE_STATUS[CVE-2020-0404] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2020-0423] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-0427] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2020-0429] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2020-0430] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2020-0431] = "fixed-version: Fixed from version 5.5rc6"
+
+CVE_STATUS[CVE-2020-0432] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2020-0433] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2020-0435] = "fixed-version: Fixed from version 4.19rc1"
+
+CVE_STATUS[CVE-2020-0444] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2020-0465] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-0466] = "fixed-version: Fixed from version 5.9rc2"
+
+CVE_STATUS[CVE-2020-0543] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10135] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10690] = "fixed-version: Fixed from version 5.5rc5"
+
+# CVE-2020-10708 has no known resolution
+
+CVE_STATUS[CVE-2020-10711] = "fixed-version: Fixed from version 5.7rc6"
+
+CVE_STATUS[CVE-2020-10720] = "fixed-version: Fixed from version 5.2rc3"
+
+CVE_STATUS[CVE-2020-10732] = "fixed-version: Fixed from version 5.7"
+
+CVE_STATUS[CVE-2020-10742] = "fixed-version: Fixed from version 3.16rc1"
+
+CVE_STATUS[CVE-2020-10751] = "fixed-version: Fixed from version 5.7rc4"
+
+CVE_STATUS[CVE-2020-10757] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10766] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10767] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10768] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-10769] = "fixed-version: Fixed from version 5.0rc3"
+
+CVE_STATUS[CVE-2020-10773] = "fixed-version: Fixed from version 5.4rc6"
+
+# CVE-2020-10774 has no known resolution
+
+CVE_STATUS[CVE-2020-10781] = "fixed-version: Fixed from version 5.8rc6"
+
+CVE_STATUS[CVE-2020-10942] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2020-11494] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-11565] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-11608] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-11609] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-11668] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-11669] = "fixed-version: Fixed from version 5.2rc1"
+
+# CVE-2020-11725 has no known resolution
+
+CVE_STATUS[CVE-2020-11884] = "fixed-version: Fixed from version 5.7rc4"
+
+# CVE-2020-11935 has no known resolution
+
+CVE_STATUS[CVE-2020-12114] = "fixed-version: Fixed from version 5.3rc1"
+
+CVE_STATUS[CVE-2020-12351] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-12352] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-12362] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-12363] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-12364] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-12464] = "fixed-version: Fixed from version 5.7rc3"
+
+CVE_STATUS[CVE-2020-12465] = "fixed-version: Fixed from version 5.6rc6"
+
+CVE_STATUS[CVE-2020-12652] = "fixed-version: Fixed from version 5.5rc7"
+
+CVE_STATUS[CVE-2020-12653] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2020-12654] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2020-12655] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-12656] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-12657] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-12659] = "fixed-version: Fixed from version 5.7rc2"
+
+CVE_STATUS[CVE-2020-12768] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2020-12769] = "fixed-version: Fixed from version 5.5rc6"
+
+CVE_STATUS[CVE-2020-12770] = "fixed-version: Fixed from version 5.7rc3"
+
+CVE_STATUS[CVE-2020-12771] = "fixed-version: Fixed from version 5.8rc2"
+
+CVE_STATUS[CVE-2020-12826] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-12888] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-12912] = "fixed-version: Fixed from version 5.10rc4"
+
+CVE_STATUS[CVE-2020-13143] = "fixed-version: Fixed from version 5.7rc6"
+
+CVE_STATUS[CVE-2020-13974] = "fixed-version: Fixed from version 5.8rc1"
+
+# CVE-2020-14304 has no known resolution
+
+CVE_STATUS[CVE-2020-14305] = "fixed-version: Fixed from version 4.12rc1"
+
+CVE_STATUS[CVE-2020-14314] = "fixed-version: Fixed from version 5.9rc2"
+
+CVE_STATUS[CVE-2020-14331] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2020-14351] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-14353] = "fixed-version: Fixed from version 4.14rc3"
+
+CVE_STATUS[CVE-2020-14356] = "fixed-version: Fixed from version 5.8rc5"
+
+CVE_STATUS[CVE-2020-14381] = "fixed-version: Fixed from version 5.6rc6"
+
+CVE_STATUS[CVE-2020-14385] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-14386] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-14390] = "fixed-version: Fixed from version 5.9rc6"
+
+CVE_STATUS[CVE-2020-14416] = "fixed-version: Fixed from version 5.5"
+
+CVE_STATUS[CVE-2020-15393] = "fixed-version: Fixed from version 5.8rc3"
+
+CVE_STATUS[CVE-2020-15436] = "fixed-version: Fixed from version 5.8rc2"
+
+CVE_STATUS[CVE-2020-15437] = "fixed-version: Fixed from version 5.8rc7"
+
+CVE_STATUS[CVE-2020-15780] = "fixed-version: Fixed from version 5.8rc3"
+
+# CVE-2020-15802 has no known resolution
+
+CVE_STATUS[CVE-2020-15852] = "fixed-version: Fixed from version 5.8rc6"
+
+CVE_STATUS[CVE-2020-16119] = "fixed-version: Fixed from version 5.15rc2"
+
+CVE_STATUS[CVE-2020-16120] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-16166] = "fixed-version: Fixed from version 5.8"
+
+CVE_STATUS[CVE-2020-1749] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2020-24394] = "fixed-version: Fixed from version 5.8rc4"
+
+CVE_STATUS[CVE-2020-24490] = "fixed-version: Fixed from version 5.8"
+
+# CVE-2020-24502 has no known resolution
+
+# CVE-2020-24503 has no known resolution
+
+CVE_STATUS[CVE-2020-24504] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2020-24586] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2020-24587] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2020-24588] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2020-25211] = "fixed-version: Fixed from version 5.9rc7"
+
+CVE_STATUS[CVE-2020-25212] = "fixed-version: Fixed from version 5.9rc1"
+
+# CVE-2020-25220 has no known resolution
+
+CVE_STATUS[CVE-2020-25221] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-25284] = "fixed-version: Fixed from version 5.9rc5"
+
+CVE_STATUS[CVE-2020-25285] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-25639] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2020-25641] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2020-25643] = "fixed-version: Fixed from version 5.9rc7"
+
+CVE_STATUS[CVE-2020-25645] = "fixed-version: Fixed from version 5.9rc7"
+
+CVE_STATUS[CVE-2020-25656] = "fixed-version: Fixed from version 5.10rc2"
+
+# CVE-2020-25661 has no known resolution
+
+# CVE-2020-25662 has no known resolution
+
+CVE_STATUS[CVE-2020-25668] = "fixed-version: Fixed from version 5.10rc3"
+
+CVE_STATUS[CVE-2020-25669] = "fixed-version: Fixed from version 5.10rc5"
+
+CVE_STATUS[CVE-2020-25670] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2020-25671] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2020-25672] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2020-25673] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2020-25704] = "fixed-version: Fixed from version 5.10rc3"
+
+CVE_STATUS[CVE-2020-25705] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-26088] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2020-26139] = "fixed-version: Fixed from version 5.13rc4"
+
+# CVE-2020-26140 has no known resolution
+
+CVE_STATUS[CVE-2020-26141] = "fixed-version: Fixed from version 5.13rc4"
+
+# CVE-2020-26142 has no known resolution
+
+# CVE-2020-26143 has no known resolution
+
+CVE_STATUS[CVE-2020-26145] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2020-26147] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2020-26541] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2020-26555] = "fixed-version: Fixed from version 5.13rc1"
+
+# CVE-2020-26556 has no known resolution
+
+# CVE-2020-26557 has no known resolution
+
+CVE_STATUS[CVE-2020-26558] = "fixed-version: Fixed from version 5.13rc1"
+
+# CVE-2020-26559 has no known resolution
+
+# CVE-2020-26560 has no known resolution
+
+CVE_STATUS[CVE-2020-27066] = "fixed-version: Fixed from version 5.6"
+
+CVE_STATUS[CVE-2020-27067] = "fixed-version: Fixed from version 4.14rc4"
+
+CVE_STATUS[CVE-2020-27068] = "fixed-version: Fixed from version 5.6rc2"
+
+CVE_STATUS[CVE-2020-27152] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27170] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2020-27171] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2020-27194] = "fixed-version: Fixed from version 5.9"
+
+CVE_STATUS[CVE-2020-2732] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2020-27418] = "fixed-version: Fixed from version 5.6rc5"
+
+CVE_STATUS[CVE-2020-27673] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27675] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27777] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27784] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27786] = "fixed-version: Fixed from version 5.7rc6"
+
+CVE_STATUS[CVE-2020-27815] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-27820] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2020-27825] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-27830] = "fixed-version: Fixed from version 5.10rc7"
+
+CVE_STATUS[CVE-2020-27835] = "fixed-version: Fixed from version 5.10rc6"
+
+CVE_STATUS[CVE-2020-28097] = "fixed-version: Fixed from version 5.9rc6"
+
+CVE_STATUS[CVE-2020-28374] = "fixed-version: Fixed from version 5.11rc4"
+
+CVE_STATUS[CVE-2020-28588] = "fixed-version: Fixed from version 5.10rc7"
+
+CVE_STATUS[CVE-2020-28915] = "fixed-version: Fixed from version 5.9"
+
+CVE_STATUS[CVE-2020-28941] = "fixed-version: Fixed from version 5.10rc5"
+
+CVE_STATUS[CVE-2020-28974] = "fixed-version: Fixed from version 5.10rc3"
+
+CVE_STATUS[CVE-2020-29368] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-29369] = "fixed-version: Fixed from version 5.8rc7"
+
+CVE_STATUS[CVE-2020-29370] = "fixed-version: Fixed from version 5.6rc7"
+
+CVE_STATUS[CVE-2020-29371] = "fixed-version: Fixed from version 5.9rc2"
+
+CVE_STATUS[CVE-2020-29372] = "fixed-version: Fixed from version 5.7rc3"
+
+CVE_STATUS[CVE-2020-29373] = "fixed-version: Fixed from version 5.6rc2"
+
+CVE_STATUS[CVE-2020-29374] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-29534] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-29568] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-29569] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-29660] = "fixed-version: Fixed from version 5.10rc7"
+
+CVE_STATUS[CVE-2020-29661] = "fixed-version: Fixed from version 5.10rc7"
+
+CVE_STATUS[CVE-2020-35499] = "fixed-version: Fixed from version 5.11rc1"
+
+# CVE-2020-35501 has no known resolution
+
+CVE_STATUS[CVE-2020-35508] = "fixed-version: Fixed from version 5.10rc3"
+
+CVE_STATUS[CVE-2020-35513] = "fixed-version: Fixed from version 4.17rc1"
+
+CVE_STATUS[CVE-2020-35519] = "fixed-version: Fixed from version 5.10rc7"
+
+CVE_STATUS[CVE-2020-36158] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-36310] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-36311] = "fixed-version: Fixed from version 5.9rc5"
+
+CVE_STATUS[CVE-2020-36312] = "fixed-version: Fixed from version 5.9rc5"
+
+CVE_STATUS[CVE-2020-36313] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-36322] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2020-36385] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2020-36386] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2020-36387] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2020-36516] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2020-36557] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-36558] = "fixed-version: Fixed from version 5.6rc3"
+
+CVE_STATUS[CVE-2020-36691] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2020-36694] = "fixed-version: Fixed from version 5.10"
+
+CVE_STATUS[CVE-2020-36766] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2020-3702] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2020-4788] = "fixed-version: Fixed from version 5.10rc5"
+
+CVE_STATUS[CVE-2020-7053] = "fixed-version: Fixed from version 5.2rc1"
+
+CVE_STATUS[CVE-2020-8428] = "fixed-version: Fixed from version 5.5"
+
+CVE_STATUS[CVE-2020-8647] = "fixed-version: Fixed from version 5.6rc5"
+
+CVE_STATUS[CVE-2020-8648] = "fixed-version: Fixed from version 5.6rc3"
+
+CVE_STATUS[CVE-2020-8649] = "fixed-version: Fixed from version 5.6rc5"
+
+CVE_STATUS[CVE-2020-8694] = "fixed-version: Fixed from version 5.10rc4"
+
+# CVE-2020-8832 has no known resolution
+
+CVE_STATUS[CVE-2020-8834] = "fixed-version: Fixed from version 4.18rc1"
+
+CVE_STATUS[CVE-2020-8835] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2020-8992] = "fixed-version: Fixed from version 5.6rc2"
+
+CVE_STATUS[CVE-2020-9383] = "fixed-version: Fixed from version 5.6rc4"
+
+CVE_STATUS[CVE-2020-9391] = "fixed-version: Fixed from version 5.6rc3"
+
+CVE_STATUS[CVE-2021-0129] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-0342] = "fixed-version: Fixed from version 5.8rc1"
+
+# CVE-2021-0399 has no known resolution
+
+CVE_STATUS[CVE-2021-0447] = "fixed-version: Fixed from version 4.15rc1"
+
+CVE_STATUS[CVE-2021-0448] = "fixed-version: Fixed from version 5.9rc7"
+
+CVE_STATUS[CVE-2021-0512] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-0605] = "fixed-version: Fixed from version 5.8"
+
+# CVE-2021-0606 has no known resolution
+
+# CVE-2021-0695 has no known resolution
+
+CVE_STATUS[CVE-2021-0707] = "fixed-version: Fixed from version 5.11rc3"
+
+CVE_STATUS[CVE-2021-0920] = "fixed-version: Fixed from version 5.14rc4"
+
+# CVE-2021-0924 has no known resolution
+
+CVE_STATUS[CVE-2021-0929] = "fixed-version: Fixed from version 5.6rc1"
+
+CVE_STATUS[CVE-2021-0935] = "fixed-version: Fixed from version 4.16rc7"
+
+# CVE-2021-0936 has no known resolution
+
+CVE_STATUS[CVE-2021-0937] = "fixed-version: Fixed from version 5.12rc8"
+
+CVE_STATUS[CVE-2021-0938] = "fixed-version: Fixed from version 5.10rc4"
+
+CVE_STATUS[CVE-2021-0941] = "fixed-version: Fixed from version 5.12rc1"
+
+# CVE-2021-0961 has no known resolution
+
+CVE_STATUS[CVE-2021-1048] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2021-20177] = "fixed-version: Fixed from version 5.5rc1"
+
+CVE_STATUS[CVE-2021-20194] = "fixed-version: Fixed from version 5.10rc1"
+
+# CVE-2021-20219 has no known resolution
+
+CVE_STATUS[CVE-2021-20226] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2021-20239] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2021-20261] = "fixed-version: Fixed from version 4.5rc5"
+
+CVE_STATUS[CVE-2021-20265] = "fixed-version: Fixed from version 4.5rc3"
+
+CVE_STATUS[CVE-2021-20268] = "fixed-version: Fixed from version 5.11rc5"
+
+CVE_STATUS[CVE-2021-20292] = "fixed-version: Fixed from version 5.9rc1"
+
+CVE_STATUS[CVE-2021-20317] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2021-20320] = "fixed-version: Fixed from version 5.15rc3"
+
+CVE_STATUS[CVE-2021-20321] = "fixed-version: Fixed from version 5.15rc5"
+
+CVE_STATUS[CVE-2021-20322] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-21781] = "fixed-version: Fixed from version 5.11rc7"
+
+CVE_STATUS[CVE-2021-22543] = "fixed-version: Fixed from version 5.13"
+
+CVE_STATUS[CVE-2021-22555] = "fixed-version: Fixed from version 5.12rc8"
+
+CVE_STATUS[CVE-2021-22600] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2021-23133] = "fixed-version: Fixed from version 5.12rc8"
+
+CVE_STATUS[CVE-2021-23134] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-26401] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2021-26708] = "fixed-version: Fixed from version 5.11rc7"
+
+CVE_STATUS[CVE-2021-26930] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-26931] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-26932] = "fixed-version: Fixed from version 5.12rc1"
+
+# CVE-2021-26934 has no known resolution
+
+CVE_STATUS[CVE-2021-27363] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-27364] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-27365] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-28038] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-28039] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-28375] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-28660] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-28688] = "fixed-version: Fixed from version 5.12rc6"
+
+CVE_STATUS[CVE-2021-28691] = "fixed-version: Fixed from version 5.13rc6"
+
+CVE_STATUS[CVE-2021-28711] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-28712] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-28713] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-28714] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-28715] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-28950] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-28951] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-28952] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-28964] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-28971] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-28972] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-29154] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2021-29155] = "fixed-version: Fixed from version 5.12rc8"
+
+CVE_STATUS[CVE-2021-29264] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-29265] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-29266] = "fixed-version: Fixed from version 5.12rc4"
+
+CVE_STATUS[CVE-2021-29646] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-29647] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-29648] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-29649] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-29650] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-29657] = "fixed-version: Fixed from version 5.12rc6"
+
+CVE_STATUS[CVE-2021-30002] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-30178] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2021-31440] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-3178] = "fixed-version: Fixed from version 5.11rc5"
+
+CVE_STATUS[CVE-2021-31829] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-31916] = "fixed-version: Fixed from version 5.12rc5"
+
+CVE_STATUS[CVE-2021-32078] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-32399] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-32606] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2021-33033] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-33034] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-33061] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2021-33098] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2021-33135] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2021-33200] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2021-3347] = "fixed-version: Fixed from version 5.11rc6"
+
+CVE_STATUS[CVE-2021-3348] = "fixed-version: Fixed from version 5.11rc6"
+
+CVE_STATUS[CVE-2021-33624] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-33630] = "fixed-version: Fixed from version 5.4rc1"
+
+CVE_STATUS[CVE-2021-33631] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2021-33655] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2021-33656] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-33909] = "fixed-version: Fixed from version 5.14rc3"
+
+CVE_STATUS[CVE-2021-3411] = "fixed-version: Fixed from version 5.10"
+
+CVE_STATUS[CVE-2021-3428] = "fixed-version: Fixed from version 5.9rc2"
+
+CVE_STATUS[CVE-2021-3444] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-34556] = "fixed-version: Fixed from version 5.14rc4"
+
+CVE_STATUS[CVE-2021-34693] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-3483] = "fixed-version: Fixed from version 5.12rc6"
+
+CVE_STATUS[CVE-2021-34866] = "fixed-version: Fixed from version 5.14"
+
+CVE_STATUS[CVE-2021-3489] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2021-3490] = "fixed-version: Fixed from version 5.13rc4"
+
+CVE_STATUS[CVE-2021-3491] = "fixed-version: Fixed from version 5.13rc1"
+
+# CVE-2021-3492 has no known resolution
+
+CVE_STATUS[CVE-2021-3493] = "fixed-version: Fixed from version 5.11rc1"
+
+CVE_STATUS[CVE-2021-34981] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-3501] = "fixed-version: Fixed from version 5.12rc8"
+
+CVE_STATUS[CVE-2021-35039] = "fixed-version: Fixed from version 5.13"
+
+CVE_STATUS[CVE-2021-3506] = "fixed-version: Fixed from version 5.13rc1"
+
+# CVE-2021-3542 has no known resolution
+
+CVE_STATUS[CVE-2021-3543] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-35477] = "fixed-version: Fixed from version 5.14rc4"
+
+CVE_STATUS[CVE-2021-3564] = "fixed-version: Fixed from version 5.13rc5"
+
+CVE_STATUS[CVE-2021-3573] = "fixed-version: Fixed from version 5.13rc5"
+
+CVE_STATUS[CVE-2021-3587] = "fixed-version: Fixed from version 5.13rc5"
+
+CVE_STATUS[CVE-2021-3600] = "fixed-version: Fixed from version 5.11"
+
+CVE_STATUS[CVE-2021-3609] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-3612] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-3635] = "fixed-version: Fixed from version 5.5rc7"
+
+CVE_STATUS[CVE-2021-3640] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2021-3653] = "fixed-version: Fixed from version 5.14rc7"
+
+CVE_STATUS[CVE-2021-3655] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-3656] = "fixed-version: Fixed from version 5.14rc7"
+
+CVE_STATUS[CVE-2021-3659] = "fixed-version: Fixed from version 5.12rc7"
+
+CVE_STATUS[CVE-2021-3669] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-3679] = "fixed-version: Fixed from version 5.14rc3"
+
+# CVE-2021-3714 has no known resolution
+
+CVE_STATUS[CVE-2021-3715] = "fixed-version: Fixed from version 5.6"
+
+CVE_STATUS[CVE-2021-37159] = "fixed-version: Fixed from version 5.14rc3"
+
+CVE_STATUS[CVE-2021-3732] = "fixed-version: Fixed from version 5.14rc6"
+
+CVE_STATUS[CVE-2021-3736] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-3739] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-3743] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-3744] = "fixed-version: Fixed from version 5.15rc4"
+
+CVE_STATUS[CVE-2021-3752] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2021-3753] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-37576] = "fixed-version: Fixed from version 5.14rc3"
+
+CVE_STATUS[CVE-2021-3759] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-3760] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-3764] = "fixed-version: Fixed from version 5.15rc4"
+
+CVE_STATUS[CVE-2021-3772] = "fixed-version: Fixed from version 5.15"
+
+CVE_STATUS[CVE-2021-38160] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-38166] = "fixed-version: Fixed from version 5.14rc6"
+
+CVE_STATUS[CVE-2021-38198] = "fixed-version: Fixed from version 5.13rc6"
+
+CVE_STATUS[CVE-2021-38199] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-38200] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-38201] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-38202] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-38203] = "fixed-version: Fixed from version 5.14rc2"
+
+CVE_STATUS[CVE-2021-38204] = "fixed-version: Fixed from version 5.14rc3"
+
+CVE_STATUS[CVE-2021-38205] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-38206] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-38207] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2021-38208] = "fixed-version: Fixed from version 5.13rc5"
+
+CVE_STATUS[CVE-2021-38209] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-38300] = "fixed-version: Fixed from version 5.15rc4"
+
+# CVE-2021-3847 has no known resolution
+
+# CVE-2021-3864 has no known resolution
+
+# CVE-2021-3892 has no known resolution
+
+CVE_STATUS[CVE-2021-3894] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-3896] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-3923] = "fixed-version: Fixed from version 5.16"
+
+CVE_STATUS[CVE-2021-39633] = "fixed-version: Fixed from version 5.14"
+
+CVE_STATUS[CVE-2021-39634] = "fixed-version: Fixed from version 5.9rc8"
+
+CVE_STATUS[CVE-2021-39636] = "fixed-version: Fixed from version 4.16rc1"
+
+CVE_STATUS[CVE-2021-39648] = "fixed-version: Fixed from version 5.11rc3"
+
+CVE_STATUS[CVE-2021-39656] = "fixed-version: Fixed from version 5.12rc3"
+
+CVE_STATUS[CVE-2021-39657] = "fixed-version: Fixed from version 5.11rc4"
+
+CVE_STATUS[CVE-2021-39685] = "fixed-version: Fixed from version 5.16rc5"
+
+CVE_STATUS[CVE-2021-39686] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2021-39698] = "fixed-version: Fixed from version 5.16rc5"
+
+CVE_STATUS[CVE-2021-39711] = "fixed-version: Fixed from version 4.18rc6"
+
+CVE_STATUS[CVE-2021-39713] = "fixed-version: Fixed from version 4.20rc1"
+
+CVE_STATUS[CVE-2021-39714] = "fixed-version: Fixed from version 4.12rc1"
+
+# CVE-2021-39800 has no known resolution
+
+# CVE-2021-39801 has no known resolution
+
+# CVE-2021-39802 has no known resolution
+
+CVE_STATUS[CVE-2021-4001] = "fixed-version: Fixed from version 5.16rc2"
+
+CVE_STATUS[CVE-2021-4002] = "fixed-version: Fixed from version 5.16rc3"
+
+CVE_STATUS[CVE-2021-4023] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-4028] = "fixed-version: Fixed from version 5.15rc4"
+
+CVE_STATUS[CVE-2021-4032] = "fixed-version: Fixed from version 5.15rc7"
+
+CVE_STATUS[CVE-2021-4037] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2021-40490] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-4083] = "fixed-version: Fixed from version 5.16rc4"
+
+CVE_STATUS[CVE-2021-4090] = "fixed-version: Fixed from version 5.16rc2"
+
+CVE_STATUS[CVE-2021-4093] = "fixed-version: Fixed from version 5.15rc7"
+
+CVE_STATUS[CVE-2021-4095] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2021-41073] = "fixed-version: Fixed from version 5.15rc2"
+
+CVE_STATUS[CVE-2021-4135] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2021-4148] = "fixed-version: Fixed from version 5.15"
+
+CVE_STATUS[CVE-2021-4149] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-4150] = "fixed-version: Fixed from version 5.15rc7"
+
+CVE_STATUS[CVE-2021-4154] = "fixed-version: Fixed from version 5.14rc2"
+
+CVE_STATUS[CVE-2021-4155] = "fixed-version: Fixed from version 5.16"
+
+CVE_STATUS[CVE-2021-4157] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-4159] = "fixed-version: Fixed from version 5.7rc1"
+
+CVE_STATUS[CVE-2021-41864] = "fixed-version: Fixed from version 5.15rc5"
+
+CVE_STATUS[CVE-2021-4197] = "fixed-version: Fixed from version 5.16"
+
+CVE_STATUS[CVE-2021-42008] = "fixed-version: Fixed from version 5.14rc7"
+
+CVE_STATUS[CVE-2021-4202] = "fixed-version: Fixed from version 5.16rc2"
+
+CVE_STATUS[CVE-2021-4203] = "fixed-version: Fixed from version 5.15rc4"
+
+CVE_STATUS[CVE-2021-4204] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2021-4218] = "fixed-version: Fixed from version 5.8rc1"
+
+CVE_STATUS[CVE-2021-42252] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2021-42327] = "fixed-version: Fixed from version 5.15"
+
+CVE_STATUS[CVE-2021-42739] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2021-43056] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-43057] = "fixed-version: Fixed from version 5.15rc3"
+
+CVE_STATUS[CVE-2021-43267] = "fixed-version: Fixed from version 5.15"
+
+CVE_STATUS[CVE-2021-43389] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2021-43975] = "fixed-version: Fixed from version 5.16rc2"
+
+CVE_STATUS[CVE-2021-43976] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2021-44733] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-44879] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2021-45095] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2021-45100] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2021-45402] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2021-45469] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2021-45480] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2021-45485] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2021-45486] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2021-45868] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2021-46283] = "fixed-version: Fixed from version 5.13rc7"
+
+CVE_STATUS[CVE-2022-0001] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-0002] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-0168] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-0171] = "fixed-version: Fixed from version 5.18rc4"
+
+CVE_STATUS[CVE-2022-0185] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-0264] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2022-0286] = "fixed-version: Fixed from version 5.14rc2"
+
+CVE_STATUS[CVE-2022-0322] = "fixed-version: Fixed from version 5.15rc6"
+
+CVE_STATUS[CVE-2022-0330] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-0382] = "fixed-version: Fixed from version 5.16"
+
+# CVE-2022-0400 has no known resolution
+
+CVE_STATUS[CVE-2022-0433] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-0435] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2022-0480] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2022-0487] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2022-0492] = "fixed-version: Fixed from version 5.17rc3"
+
+CVE_STATUS[CVE-2022-0494] = "fixed-version: Fixed from version 5.17rc5"
+
+CVE_STATUS[CVE-2022-0500] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-0516] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2022-0617] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-0644] = "fixed-version: Fixed from version 5.15rc7"
+
+CVE_STATUS[CVE-2022-0646] = "fixed-version: Fixed from version 5.17rc5"
+
+CVE_STATUS[CVE-2022-0742] = "fixed-version: Fixed from version 5.17rc7"
+
+CVE_STATUS[CVE-2022-0812] = "fixed-version: Fixed from version 5.8rc6"
+
+CVE_STATUS[CVE-2022-0847] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-0850] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2022-0854] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-0995] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-0998] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-1011] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-1012] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2022-1015] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1016] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1043] = "fixed-version: Fixed from version 5.14rc7"
+
+CVE_STATUS[CVE-2022-1048] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1055] = "fixed-version: Fixed from version 5.17rc3"
+
+# CVE-2022-1116 has no known resolution
+
+CVE_STATUS[CVE-2022-1158] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1184] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1195] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2022-1198] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-1199] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-1204] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1205] = "fixed-version: Fixed from version 5.18rc1"
+
+# CVE-2022-1247 has no known resolution
+
+CVE_STATUS[CVE-2022-1263] = "fixed-version: Fixed from version 5.18rc3"
+
+CVE_STATUS[CVE-2022-1280] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2022-1353] = "fixed-version: Fixed from version 5.17"
+
+CVE_STATUS[CVE-2022-1419] = "fixed-version: Fixed from version 5.6rc2"
+
+CVE_STATUS[CVE-2022-1462] = "fixed-version: Fixed from version 5.19rc7"
+
+CVE_STATUS[CVE-2022-1508] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2022-1516] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1651] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1652] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2022-1671] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-1678] = "fixed-version: Fixed from version 4.20rc1"
+
+CVE_STATUS[CVE-2022-1679] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-1729] = "fixed-version: Fixed from version 5.18"
+
+CVE_STATUS[CVE-2022-1734] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2022-1786] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-1789] = "fixed-version: Fixed from version 5.18"
+
+CVE_STATUS[CVE-2022-1836] = "fixed-version: Fixed from version 5.18rc5"
+
+CVE_STATUS[CVE-2022-1852] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1882] = "fixed-version: Fixed from version 5.19rc8"
+
+CVE_STATUS[CVE-2022-1943] = "fixed-version: Fixed from version 5.18rc7"
+
+CVE_STATUS[CVE-2022-1966] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1972] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1973] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1974] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2022-1975] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2022-1976] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-1998] = "fixed-version: Fixed from version 5.17rc3"
+
+CVE_STATUS[CVE-2022-20008] = "fixed-version: Fixed from version 5.17rc5"
+
+CVE_STATUS[CVE-2022-20132] = "fixed-version: Fixed from version 5.16rc5"
+
+CVE_STATUS[CVE-2022-20141] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2022-20148] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2022-20153] = "fixed-version: Fixed from version 5.13rc1"
+
+CVE_STATUS[CVE-2022-20154] = "fixed-version: Fixed from version 5.16rc8"
+
+CVE_STATUS[CVE-2022-20158] = "fixed-version: Fixed from version 5.17"
+
+CVE_STATUS[CVE-2022-20166] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2022-20368] = "fixed-version: Fixed from version 5.17"
+
+CVE_STATUS[CVE-2022-20369] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-20409] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-20421] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-20422] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-20423] = "fixed-version: Fixed from version 5.17"
+
+CVE_STATUS[CVE-2022-20424] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-20565] = "fixed-version: Fixed from version 5.9rc4"
+
+CVE_STATUS[CVE-2022-20566] = "fixed-version: Fixed from version 5.19"
+
+CVE_STATUS[CVE-2022-20567] = "fixed-version: Fixed from version 4.16rc5"
+
+CVE_STATUS[CVE-2022-20568] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-20572] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-2078] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-21123] = "fixed-version: Fixed from version 5.19rc3"
+
+CVE_STATUS[CVE-2022-21125] = "fixed-version: Fixed from version 5.19rc3"
+
+CVE_STATUS[CVE-2022-21166] = "fixed-version: Fixed from version 5.19rc3"
+
+CVE_STATUS[CVE-2022-21385] = "fixed-version: Fixed from version 4.20"
+
+CVE_STATUS[CVE-2022-21499] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-21505] = "fixed-version: Fixed from version 5.19rc8"
+
+CVE_STATUS[CVE-2022-2153] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-2196] = "fixed-version: Fixed from version 6.2rc1"
+
+# CVE-2022-2209 has no known resolution
+
+CVE_STATUS[CVE-2022-22942] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-23036] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23037] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23038] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23039] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23040] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23041] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-23042] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-2308] = "fixed-version: Fixed from version 6.0"
+
+CVE_STATUS[CVE-2022-2318] = "fixed-version: Fixed from version 5.19rc5"
+
+CVE_STATUS[CVE-2022-23222] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-2327] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-2380] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-23816] = "fixed-version: Fixed from version 5.19rc7"
+
+# CVE-2022-23825 has no known resolution
+
+CVE_STATUS[CVE-2022-23960] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-24122] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-24448] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-24958] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-24959] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-2503] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-25258] = "fixed-version: Fixed from version 5.17rc4"
+
+# CVE-2022-25265 has no known resolution
+
+CVE_STATUS[CVE-2022-25375] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2022-25636] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-2585] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-2586] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-2588] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-2590] = "fixed-version: Fixed from version 6.0rc3"
+
+CVE_STATUS[CVE-2022-2602] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-26365] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-26373] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-2639] = "fixed-version: Fixed from version 5.18rc4"
+
+CVE_STATUS[CVE-2022-26490] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-2663] = "fixed-version: Fixed from version 6.0rc5"
+
+# CVE-2022-26878 has no known resolution
+
+CVE_STATUS[CVE-2022-26966] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-27223] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-27666] = "fixed-version: Fixed from version 5.17rc8"
+
+CVE_STATUS[CVE-2022-27672] = "fixed-version: Fixed from version 6.2"
+
+CVE_STATUS[CVE-2022-2785] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-27950] = "fixed-version: Fixed from version 5.17rc5"
+
+CVE_STATUS[CVE-2022-28356] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-28388] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-28389] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-28390] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-2873] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-28796] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-28893] = "fixed-version: Fixed from version 5.18rc2"
+
+CVE_STATUS[CVE-2022-2905] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-29156] = "fixed-version: Fixed from version 5.17rc6"
+
+CVE_STATUS[CVE-2022-2938] = "fixed-version: Fixed from version 5.17rc2"
+
+CVE_STATUS[CVE-2022-29581] = "fixed-version: Fixed from version 5.18rc4"
+
+CVE_STATUS[CVE-2022-29582] = "fixed-version: Fixed from version 5.18rc2"
+
+CVE_STATUS[CVE-2022-2959] = "fixed-version: Fixed from version 5.19rc1"
+
+# CVE-2022-2961 has no known resolution
+
+CVE_STATUS[CVE-2022-2964] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2022-2977] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-2978] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-29900] = "fixed-version: Fixed from version 5.19rc7"
+
+CVE_STATUS[CVE-2022-29901] = "fixed-version: Fixed from version 5.19rc7"
+
+CVE_STATUS[CVE-2022-2991] = "fixed-version: Fixed from version 5.15rc1"
+
+CVE_STATUS[CVE-2022-29968] = "fixed-version: Fixed from version 5.18rc5"
+
+CVE_STATUS[CVE-2022-3028] = "fixed-version: Fixed from version 6.0rc3"
+
+CVE_STATUS[CVE-2022-30594] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-3061] = "fixed-version: Fixed from version 5.18rc5"
+
+CVE_STATUS[CVE-2022-3077] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3078] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-3103] = "fixed-version: Fixed from version 6.0rc3"
+
+CVE_STATUS[CVE-2022-3104] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3105] = "fixed-version: Fixed from version 5.16"
+
+CVE_STATUS[CVE-2022-3106] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2022-3107] = "fixed-version: Fixed from version 5.17"
+
+CVE_STATUS[CVE-2022-3108] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-3110] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3111] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-3112] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-3113] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-3114] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3115] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3169] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3170] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-3176] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2022-3202] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-32250] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-32296] = "fixed-version: Fixed from version 5.18rc6"
+
+# CVE-2022-3238 has no known resolution
+
+CVE_STATUS[CVE-2022-3239] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2022-32981] = "fixed-version: Fixed from version 5.19rc2"
+
+CVE_STATUS[CVE-2022-3303] = "fixed-version: Fixed from version 6.0rc5"
+
+CVE_STATUS[CVE-2022-3344] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2022-33740] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-33741] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-33742] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-33743] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-33744] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-33981] = "fixed-version: Fixed from version 5.18rc5"
+
+CVE_STATUS[CVE-2022-3424] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-3435] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-34494] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-34495] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-34918] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-3521] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3522] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3523] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3524] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3526] = "fixed-version: Fixed from version 5.18rc3"
+
+CVE_STATUS[CVE-2022-3531] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-3532] = "fixed-version: Fixed from version 6.2rc1"
+
+# CVE-2022-3533 has no known resolution
+
+CVE_STATUS[CVE-2022-3534] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-3535] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3541] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3542] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3543] = "fixed-version: Fixed from version 6.1rc1"
+
+# CVE-2022-3544 has no known resolution
+
+CVE_STATUS[CVE-2022-3545] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3564] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2022-3565] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3566] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3567] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3577] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3586] = "fixed-version: Fixed from version 6.0rc5"
+
+CVE_STATUS[CVE-2022-3594] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3595] = "fixed-version: Fixed from version 6.1rc1"
+
+# CVE-2022-3606 has no known resolution
+
+CVE_STATUS[CVE-2022-36123] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-3619] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2022-3621] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3623] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3624] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3625] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3628] = "fixed-version: Fixed from version 6.1rc5"
+
+CVE_STATUS[CVE-2022-36280] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-3629] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3630] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3633] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3635] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-3636] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-3640] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2022-36402] = "fixed-version: Fixed from version 6.5"
+
+# CVE-2022-3642 has no known resolution
+
+CVE_STATUS[CVE-2022-3643] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2022-3646] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-3649] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-36879] = "fixed-version: Fixed from version 5.19rc8"
+
+CVE_STATUS[CVE-2022-36946] = "fixed-version: Fixed from version 5.19"
+
+CVE_STATUS[CVE-2022-3707] = "fixed-version: Fixed from version 6.2rc3"
+
+# CVE-2022-38096 has no known resolution
+
+CVE_STATUS[CVE-2022-38457] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2022-3903] = "fixed-version: Fixed from version 6.1rc2"
+
+CVE_STATUS[CVE-2022-3910] = "fixed-version: Fixed from version 6.0rc6"
+
+CVE_STATUS[CVE-2022-39188] = "fixed-version: Fixed from version 5.19rc8"
+
+CVE_STATUS[CVE-2022-39189] = "fixed-version: Fixed from version 5.19rc2"
+
+CVE_STATUS[CVE-2022-39190] = "fixed-version: Fixed from version 6.0rc3"
+
+CVE_STATUS[CVE-2022-3977] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-39842] = "fixed-version: Fixed from version 5.19rc4"
+
+CVE_STATUS[CVE-2022-40133] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2022-40307] = "fixed-version: Fixed from version 6.0rc5"
+
+CVE_STATUS[CVE-2022-40476] = "fixed-version: Fixed from version 5.19rc4"
+
+CVE_STATUS[CVE-2022-40768] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-4095] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-40982] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2022-41218] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-41222] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2022-4127] = "fixed-version: Fixed from version 5.19rc6"
+
+CVE_STATUS[CVE-2022-4128] = "fixed-version: Fixed from version 5.19rc7"
+
+CVE_STATUS[CVE-2022-4129] = "fixed-version: Fixed from version 6.1rc6"
+
+CVE_STATUS[CVE-2022-4139] = "fixed-version: Fixed from version 6.1rc8"
+
+CVE_STATUS[CVE-2022-41674] = "fixed-version: Fixed from version 6.1rc1"
+
+# CVE-2022-41848 has no known resolution
+
+CVE_STATUS[CVE-2022-41849] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-41850] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-41858] = "fixed-version: Fixed from version 5.18rc2"
+
+CVE_STATUS[CVE-2022-42328] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2022-42329] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2022-42432] = "fixed-version: Fixed from version 6.0rc7"
+
+CVE_STATUS[CVE-2022-4269] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2022-42703] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-42719] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-42720] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-42721] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-42722] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-42895] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2022-42896] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2022-43750] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-4378] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2022-4379] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-4382] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2022-43945] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2022-44032] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2022-44033] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2022-44034] = "fixed-version: Fixed from version 6.4rc1"
+
+# CVE-2022-4543 has no known resolution
+
+CVE_STATUS[CVE-2022-45869] = "fixed-version: Fixed from version 6.1rc7"
+
+# CVE-2022-45884 has no known resolution
+
+# CVE-2022-45885 has no known resolution
+
+CVE_STATUS[CVE-2022-45886] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2022-45887] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2022-45888] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-45919] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2022-45934] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2022-4662] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2022-4696] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2022-4744] = "fixed-version: Fixed from version 5.16rc7"
+
+CVE_STATUS[CVE-2022-47518] = "fixed-version: Fixed from version 6.1rc8"
+
+CVE_STATUS[CVE-2022-47519] = "fixed-version: Fixed from version 6.1rc8"
+
+CVE_STATUS[CVE-2022-47520] = "fixed-version: Fixed from version 6.1rc8"
+
+CVE_STATUS[CVE-2022-47521] = "fixed-version: Fixed from version 6.1rc8"
+
+CVE_STATUS[CVE-2022-47929] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2022-47938] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-47939] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-47940] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2022-47941] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-47942] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-47943] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2022-47946] = "fixed-version: Fixed from version 5.12rc2"
+
+CVE_STATUS[CVE-2022-4842] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-48423] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-48424] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-48425] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2022-48502] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2022-48619] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2023-0030] = "fixed-version: Fixed from version 5.0rc1"
+
+CVE_STATUS[CVE-2023-0045] = "fixed-version: Fixed from version 6.2rc3"
+
+CVE_STATUS[CVE-2023-0047] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2023-0122] = "fixed-version: Fixed from version 6.0rc4"
+
+CVE_STATUS[CVE-2023-0160] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-0179] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-0210] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2023-0240] = "fixed-version: Fixed from version 5.10rc1"
+
+CVE_STATUS[CVE-2023-0266] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2023-0386] = "fixed-version: Fixed from version 6.2rc6"
+
+CVE_STATUS[CVE-2023-0394] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2023-0458] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-0459] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-0461] = "fixed-version: Fixed from version 6.2rc3"
+
+CVE_STATUS[CVE-2023-0468] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-0469] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-0590] = "fixed-version: Fixed from version 6.1rc2"
+
+CVE_STATUS[CVE-2023-0597] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-0615] = "fixed-version: Fixed from version 6.1rc3"
+
+CVE_STATUS[CVE-2023-1032] = "fixed-version: Fixed from version 6.3rc2"
+
+CVE_STATUS[CVE-2023-1073] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-1074] = "fixed-version: Fixed from version 6.2rc6"
+
+CVE_STATUS[CVE-2023-1075] = "fixed-version: Fixed from version 6.2rc7"
+
+CVE_STATUS[CVE-2023-1076] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-1077] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-1078] = "fixed-version: Fixed from version 6.2rc8"
+
+CVE_STATUS[CVE-2023-1079] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-1095] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-1118] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-1192] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-1193] = "fixed-version: Fixed from version 6.3rc6"
+
+CVE_STATUS[CVE-2023-1194] = "fixed-version: Fixed from version 6.4rc6"
+
+CVE_STATUS[CVE-2023-1195] = "fixed-version: Fixed from version 6.1rc3"
+
+CVE_STATUS[CVE-2023-1206] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-1249] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2023-1252] = "fixed-version: Fixed from version 5.16rc1"
+
+CVE_STATUS[CVE-2023-1281] = "fixed-version: Fixed from version 6.2"
+
+CVE_STATUS[CVE-2023-1295] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2023-1380] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-1382] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-1390] = "fixed-version: Fixed from version 5.11rc4"
+
+# CVE-2023-1476 has no known resolution
+
+CVE_STATUS[CVE-2023-1513] = "fixed-version: Fixed from version 6.2"
+
+CVE_STATUS[CVE-2023-1582] = "fixed-version: Fixed from version 5.17rc4"
+
+CVE_STATUS[CVE-2023-1583] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-1611] = "fixed-version: Fixed from version 6.3rc5"
+
+CVE_STATUS[CVE-2023-1637] = "fixed-version: Fixed from version 5.18rc2"
+
+CVE_STATUS[CVE-2023-1652] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-1670] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-1829] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-1838] = "fixed-version: Fixed from version 5.18"
+
+CVE_STATUS[CVE-2023-1855] = "fixed-version: Fixed from version 6.3rc3"
+
+CVE_STATUS[CVE-2023-1859] = "fixed-version: Fixed from version 6.3rc7"
+
+CVE_STATUS[CVE-2023-1872] = "fixed-version: Fixed from version 5.18rc2"
+
+CVE_STATUS[CVE-2023-1989] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-1990] = "fixed-version: Fixed from version 6.3rc3"
+
+CVE_STATUS[CVE-2023-1998] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-2002] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-2006] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-2007] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-2008] = "fixed-version: Fixed from version 5.19rc4"
+
+CVE_STATUS[CVE-2023-2019] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-20569] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-20588] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-20593] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-20928] = "fixed-version: Fixed from version 6.0rc1"
+
+# CVE-2023-20937 has no known resolution
+
+CVE_STATUS[CVE-2023-20938] = "fixed-version: Fixed from version 5.18rc5"
+
+# CVE-2023-20941 has no known resolution
+
+CVE_STATUS[CVE-2023-21102] = "fixed-version: Fixed from version 6.2rc4"
+
+CVE_STATUS[CVE-2023-21106] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-2124] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-21255] = "fixed-version: Fixed from version 6.4rc4"
+
+CVE_STATUS[CVE-2023-21264] = "fixed-version: Fixed from version 6.4rc5"
+
+# CVE-2023-21400 has no known resolution
+
+CVE_STATUS[CVE-2023-2156] = "fixed-version: Fixed from version 6.3"
+
+CVE_STATUS[CVE-2023-2162] = "fixed-version: Fixed from version 6.2rc6"
+
+CVE_STATUS[CVE-2023-2163] = "fixed-version: Fixed from version 6.3"
+
+CVE_STATUS[CVE-2023-2166] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2023-2176] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-2177] = "fixed-version: Fixed from version 5.19"
+
+CVE_STATUS[CVE-2023-2194] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-2235] = "fixed-version: Fixed from version 6.3rc3"
+
+CVE_STATUS[CVE-2023-2236] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-2248] = "fixed-version: Fixed from version 6.3"
+
+CVE_STATUS[CVE-2023-2269] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-22995] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2023-22996] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2023-22997] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-22998] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-22999] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2023-23000] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2023-23001] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2023-23002] = "fixed-version: Fixed from version 5.17rc1"
+
+CVE_STATUS[CVE-2023-23003] = "fixed-version: Fixed from version 5.16rc6"
+
+CVE_STATUS[CVE-2023-23004] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2023-23005] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-23006] = "fixed-version: Fixed from version 5.16rc8"
+
+# CVE-2023-23039 has no known resolution
+
+CVE_STATUS[CVE-2023-23454] = "fixed-version: Fixed from version 6.2rc3"
+
+CVE_STATUS[CVE-2023-23455] = "fixed-version: Fixed from version 6.2rc3"
+
+CVE_STATUS[CVE-2023-23559] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-23586] = "fixed-version: Fixed from version 5.12rc1"
+
+CVE_STATUS[CVE-2023-2430] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-2483] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-25012] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-2513] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-25775] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-2598] = "fixed-version: Fixed from version 6.4rc1"
+
+# CVE-2023-26242 has no known resolution
+
+# CVE-2023-2640 has no known resolution
+
+CVE_STATUS[CVE-2023-26544] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-26545] = "fixed-version: Fixed from version 6.2"
+
+CVE_STATUS[CVE-2023-26605] = "fixed-version: Fixed from version 6.1rc7"
+
+CVE_STATUS[CVE-2023-26606] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-26607] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2023-28327] = "fixed-version: Fixed from version 6.1"
+
+CVE_STATUS[CVE-2023-28328] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-28410] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2023-28464] = "fixed-version: Fixed from version 6.3rc7"
+
+CVE_STATUS[CVE-2023-28466] = "fixed-version: Fixed from version 6.3rc2"
+
+CVE_STATUS[CVE-2023-2860] = "fixed-version: Fixed from version 6.0rc5"
+
+CVE_STATUS[CVE-2023-28772] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2023-28866] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-2898] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-2985] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-3006] = "fixed-version: Fixed from version 6.1rc1"
+
+# Skipping CVE-2023-3022, no affected_versions
+
+CVE_STATUS[CVE-2023-30456] = "fixed-version: Fixed from version 6.3rc3"
+
+CVE_STATUS[CVE-2023-30772] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-3090] = "fixed-version: Fixed from version 6.4rc2"
+
+CVE_STATUS[CVE-2023-3106] = "fixed-version: Fixed from version 4.8rc7"
+
+# Skipping CVE-2023-3108, no affected_versions
+
+# CVE-2023-31081 has no known resolution
+
+# CVE-2023-31082 has no known resolution
+
+CVE_STATUS[CVE-2023-31083] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-31084] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2023-31085] = "fixed-version: Fixed from version 6.6rc5"
+
+CVE_STATUS[CVE-2023-3111] = "fixed-version: Fixed from version 6.0rc2"
+
+CVE_STATUS[CVE-2023-3117] = "fixed-version: Fixed from version 6.4rc7"
+
+CVE_STATUS[CVE-2023-31248] = "fixed-version: Fixed from version 6.5rc2"
+
+CVE_STATUS[CVE-2023-3141] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-31436] = "fixed-version: Fixed from version 6.3"
+
+CVE_STATUS[CVE-2023-3159] = "fixed-version: Fixed from version 5.18rc6"
+
+CVE_STATUS[CVE-2023-3161] = "fixed-version: Fixed from version 6.2rc7"
+
+CVE_STATUS[CVE-2023-3212] = "fixed-version: Fixed from version 6.4rc2"
+
+CVE_STATUS[CVE-2023-3220] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-32233] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32247] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32248] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32250] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32252] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32254] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32257] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32258] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-32269] = "fixed-version: Fixed from version 6.2rc7"
+
+# CVE-2023-32629 has no known resolution
+
+CVE_STATUS[CVE-2023-3268] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-3269] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-3312] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-3317] = "fixed-version: Fixed from version 6.3rc6"
+
+CVE_STATUS[CVE-2023-33203] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-33250] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-33288] = "fixed-version: Fixed from version 6.3rc4"
+
+CVE_STATUS[CVE-2023-3338] = "fixed-version: Fixed from version 6.1rc1"
+
+CVE_STATUS[CVE-2023-3355] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-3357] = "fixed-version: Fixed from version 6.2rc1"
+
+CVE_STATUS[CVE-2023-3358] = "fixed-version: Fixed from version 6.2rc5"
+
+CVE_STATUS[CVE-2023-3359] = "fixed-version: Fixed from version 6.2rc7"
+
+CVE_STATUS[CVE-2023-3389] = "fixed-version: Fixed from version 6.0rc1"
+
+CVE_STATUS[CVE-2023-3390] = "fixed-version: Fixed from version 6.4rc7"
+
+CVE_STATUS[CVE-2023-33951] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-33952] = "fixed-version: Fixed from version 6.4rc1"
+
+# CVE-2023-3397 has no known resolution
+
+CVE_STATUS[CVE-2023-34255] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-34256] = "fixed-version: Fixed from version 6.4rc2"
+
+CVE_STATUS[CVE-2023-34319] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-34324] = "fixed-version: Fixed from version 6.6rc6"
+
+CVE_STATUS[CVE-2023-3439] = "fixed-version: Fixed from version 5.18rc5"
+
+CVE_STATUS[CVE-2023-35001] = "fixed-version: Fixed from version 6.5rc2"
+
+CVE_STATUS[CVE-2023-3567] = "fixed-version: Fixed from version 6.2rc7"
+
+# CVE-2023-35693 has no known resolution
+
+CVE_STATUS[CVE-2023-35788] = "fixed-version: Fixed from version 6.4rc5"
+
+CVE_STATUS[CVE-2023-35823] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-35824] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-35826] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-35827] = "fixed-version: Fixed from version 6.6rc6"
+
+CVE_STATUS[CVE-2023-35828] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-35829] = "fixed-version: Fixed from version 6.4rc1"
+
+CVE_STATUS[CVE-2023-3609] = "fixed-version: Fixed from version 6.4rc7"
+
+CVE_STATUS[CVE-2023-3610] = "fixed-version: Fixed from version 6.4"
+
+CVE_STATUS[CVE-2023-3611] = "fixed-version: Fixed from version 6.5rc2"
+
+# CVE-2023-3640 has no known resolution
+
+CVE_STATUS[CVE-2023-37453] = "fixed-version: Fixed from version 6.6rc1"
+
+# CVE-2023-37454 has no known resolution
+
+CVE_STATUS[CVE-2023-3772] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-3773] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-3776] = "fixed-version: Fixed from version 6.5rc2"
+
+CVE_STATUS[CVE-2023-3777] = "fixed-version: Fixed from version 6.5rc3"
+
+CVE_STATUS[CVE-2023-3812] = "fixed-version: Fixed from version 6.1rc4"
+
+CVE_STATUS[CVE-2023-38409] = "fixed-version: Fixed from version 6.3rc7"
+
+CVE_STATUS[CVE-2023-38426] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2023-38427] = "fixed-version: Fixed from version 6.4rc6"
+
+CVE_STATUS[CVE-2023-38428] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2023-38429] = "fixed-version: Fixed from version 6.4rc3"
+
+CVE_STATUS[CVE-2023-38430] = "fixed-version: Fixed from version 6.4rc6"
+
+CVE_STATUS[CVE-2023-38431] = "fixed-version: Fixed from version 6.4rc6"
+
+CVE_STATUS[CVE-2023-38432] = "fixed-version: Fixed from version 6.4"
+
+CVE_STATUS[CVE-2023-3863] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-3865] = "fixed-version: Fixed from version 6.4"
+
+CVE_STATUS[CVE-2023-3866] = "fixed-version: Fixed from version 6.4"
+
+CVE_STATUS[CVE-2023-3867] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-39189] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-39191] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-39192] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-39193] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-39194] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-39197] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-39198] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-4004] = "fixed-version: Fixed from version 6.5rc3"
+
+# CVE-2023-4010 has no known resolution
+
+CVE_STATUS[CVE-2023-4015] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-40283] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-40791] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-4128] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-4132] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-4133] = "fixed-version: Fixed from version 6.3"
+
+CVE_STATUS[CVE-2023-4134] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-4147] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-4155] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-4194] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-4206] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-4207] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-4208] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-4244] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-4273] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-42752] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-42753] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-42754] = "fixed-version: Fixed from version 6.6rc3"
+
+CVE_STATUS[CVE-2023-42755] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-42756] = "fixed-version: Fixed from version 6.6rc3"
+
+CVE_STATUS[CVE-2023-4385] = "fixed-version: Fixed from version 5.19rc1"
+
+CVE_STATUS[CVE-2023-4387] = "fixed-version: Fixed from version 5.18"
+
+CVE_STATUS[CVE-2023-4389] = "fixed-version: Fixed from version 5.18rc3"
+
+CVE_STATUS[CVE-2023-4394] = "fixed-version: Fixed from version 6.0rc3"
+
+CVE_STATUS[CVE-2023-44466] = "fixed-version: Fixed from version 6.5rc2"
+
+CVE_STATUS[CVE-2023-4459] = "fixed-version: Fixed from version 5.18"
+
+CVE_STATUS[CVE-2023-4563] = "fixed-version: Fixed from version 6.5rc6"
+
+CVE_STATUS[CVE-2023-4569] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-45862] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-45863] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-45871] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-45898] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-4610] = "fixed-version: Fixed from version 6.4"
+
+CVE_STATUS[CVE-2023-4611] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-4622] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-4623] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-46343] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-46813] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-46838] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-46862] = "fixed-version: Fixed from version 6.6"
+
+# CVE-2023-47233 has no known resolution
+
+CVE_STATUS[CVE-2023-4732] = "fixed-version: Fixed from version 5.14rc1"
+
+CVE_STATUS[CVE-2023-4881] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-4921] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-50431] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-5090] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-51042] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2023-51043] = "fixed-version: Fixed from version 6.5rc3"
+
+CVE_STATUS[CVE-2023-5158] = "fixed-version: Fixed from version 6.6rc5"
+
+CVE_STATUS[CVE-2023-51779] = "cpe-stable-backport: Backported in 6.6.9"
+
+CVE_STATUS[CVE-2023-5178] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-51780] = "cpe-stable-backport: Backported in 6.6.8"
+
+CVE_STATUS[CVE-2023-51781] = "cpe-stable-backport: Backported in 6.6.8"
+
+CVE_STATUS[CVE-2023-51782] = "cpe-stable-backport: Backported in 6.6.8"
+
+CVE_STATUS[CVE-2023-5197] = "fixed-version: Fixed from version 6.6rc3"
+
+CVE_STATUS[CVE-2023-52340] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2023-52429] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2023-52433] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-52434] = "cpe-stable-backport: Backported in 6.6.8"
+
+CVE_STATUS[CVE-2023-52435] = "cpe-stable-backport: Backported in 6.6.11"
+
+CVE_STATUS[CVE-2023-52436] = "cpe-stable-backport: Backported in 6.6.13"
+
+CVE_STATUS[CVE-2023-52438] = "cpe-stable-backport: Backported in 6.6.13"
+
+CVE_STATUS[CVE-2023-52439] = "cpe-stable-backport: Backported in 6.6.13"
+
+CVE_STATUS[CVE-2023-52440] = "fixed-version: Fixed from version 6.6rc1"
+
+CVE_STATUS[CVE-2023-52441] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-52442] = "fixed-version: Fixed from version 6.5rc4"
+
+CVE_STATUS[CVE-2023-52443] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52444] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52445] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52446] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52447] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52448] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52449] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52450] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52451] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52452] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52453] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52454] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52455] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52456] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52457] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52458] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52459] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52460] = "fixed-version: only affects 6.7rc1 onwards"
+
+CVE_STATUS[CVE-2023-52461] = "fixed-version: only affects 6.7rc1 onwards"
+
+CVE_STATUS[CVE-2023-52462] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52463] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-52464] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2023-5345] = "fixed-version: Fixed from version 6.6rc4"
+
+CVE_STATUS[CVE-2023-5633] = "fixed-version: Fixed from version 6.6rc6"
+
+CVE_STATUS[CVE-2023-5717] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-5972] = "fixed-version: Fixed from version 6.6rc7"
+
+CVE_STATUS[CVE-2023-6039] = "fixed-version: Fixed from version 6.5rc5"
+
+CVE_STATUS[CVE-2023-6040] = "fixed-version: Fixed from version 5.18rc1"
+
+CVE_STATUS[CVE-2023-6111] = "cpe-stable-backport: Backported in 6.6.3"
+
+CVE_STATUS[CVE-2023-6121] = "cpe-stable-backport: Backported in 6.6.4"
+
+CVE_STATUS[CVE-2023-6176] = "fixed-version: Fixed from version 6.6rc2"
+
+CVE_STATUS[CVE-2023-6200] = "cpe-stable-backport: Backported in 6.6.9"
+
+# CVE-2023-6238 has no known resolution
+
+# CVE-2023-6240 has no known resolution
+
+# CVE-2023-6270 has no known resolution
+
+# CVE-2023-6356 has no known resolution
+
+CVE_STATUS[CVE-2023-6531] = "cpe-stable-backport: Backported in 6.6.7"
+
+# CVE-2023-6535 has no known resolution
+
+# CVE-2023-6536 has no known resolution
+
+CVE_STATUS[CVE-2023-6546] = "fixed-version: Fixed from version 6.5rc7"
+
+CVE_STATUS[CVE-2023-6560] = "cpe-stable-backport: Backported in 6.6.5"
+
+CVE_STATUS[CVE-2023-6606] = "cpe-stable-backport: Backported in 6.6.9"
+
+CVE_STATUS[CVE-2023-6610] = "cpe-stable-backport: Backported in 6.6.13"
+
+CVE_STATUS[CVE-2023-6622] = "cpe-stable-backport: Backported in 6.6.7"
+
+CVE_STATUS[CVE-2023-6679] = "fixed-version: only affects 6.7rc1 onwards"
+
+CVE_STATUS[CVE-2023-6817] = "cpe-stable-backport: Backported in 6.6.7"
+
+CVE_STATUS[CVE-2023-6915] = "cpe-stable-backport: Backported in 6.6.13"
+
+CVE_STATUS[CVE-2023-6931] = "cpe-stable-backport: Backported in 6.6.7"
+
+CVE_STATUS[CVE-2023-6932] = "cpe-stable-backport: Backported in 6.6.5"
+
+# CVE-2023-7042 has no known resolution
+
+CVE_STATUS[CVE-2023-7192] = "fixed-version: Fixed from version 6.3rc1"
+
+CVE_STATUS[CVE-2024-0193] = "cpe-stable-backport: Backported in 6.6.10"
+
+CVE_STATUS[CVE-2024-0340] = "fixed-version: Fixed from version 6.4rc6"
+
+CVE_STATUS[CVE-2024-0443] = "fixed-version: Fixed from version 6.4rc7"
+
+CVE_STATUS[CVE-2024-0562] = "fixed-version: Fixed from version 6.0rc3"
+
+# CVE-2024-0564 has no known resolution
+
+CVE_STATUS[CVE-2024-0565] = "cpe-stable-backport: Backported in 6.6.8"
+
+CVE_STATUS[CVE-2024-0582] = "cpe-stable-backport: Backported in 6.6.5"
+
+CVE_STATUS[CVE-2024-0584] = "cpe-stable-backport: Backported in 6.6.5"
+
+CVE_STATUS[CVE-2024-0607] = "cpe-stable-backport: Backported in 6.6.3"
+
+CVE_STATUS[CVE-2024-0639] = "fixed-version: Fixed from version 6.5rc1"
+
+CVE_STATUS[CVE-2024-0641] = "fixed-version: Fixed from version 6.6rc5"
+
+CVE_STATUS[CVE-2024-0646] = "cpe-stable-backport: Backported in 6.6.7"
+
+CVE_STATUS[CVE-2024-0775] = "fixed-version: Fixed from version 6.4rc2"
+
+# CVE-2024-0841 has no known resolution
+
+CVE_STATUS[CVE-2024-1085] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-1086] = "cpe-stable-backport: Backported in 6.6.15"
+
+CVE_STATUS[CVE-2024-1151] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-1312] = "fixed-version: Fixed from version 6.5rc4"
+
+# CVE-2024-21803 has no known resolution
+
+# CVE-2024-22099 has no known resolution
+
+# CVE-2024-22386 has no known resolution
+
+CVE_STATUS[CVE-2024-22705] = "cpe-stable-backport: Backported in 6.6.10"
+
+# CVE-2024-23196 has no known resolution
+
+# CVE-2024-23307 has no known resolution
+
+# CVE-2024-23848 has no known resolution
+
+CVE_STATUS[CVE-2024-23849] = "cpe-stable-backport: Backported in 6.6.15"
+
+CVE_STATUS[CVE-2024-23850] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-23851] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-24855] = "fixed-version: Fixed from version 6.5rc2"
+
+# CVE-2024-24857 has no known resolution
+
+# CVE-2024-24858 has no known resolution
+
+# CVE-2024-24859 has no known resolution
+
+CVE_STATUS[CVE-2024-24860] = "cpe-stable-backport: Backported in 6.6.14"
+
+# CVE-2024-24861 has no known resolution
+
+# CVE-2024-24864 has no known resolution
+
+# CVE-2024-25739 has no known resolution
+
+# CVE-2024-25740 has no known resolution
+
+# CVE-2024-25741 has no known resolution
+
+CVE_STATUS[CVE-2024-25744] = "cpe-stable-backport: Backported in 6.6.7"
+
+CVE_STATUS[CVE-2024-26581] = "cpe-stable-backport: Backported in 6.6.17"
+
+CVE_STATUS[CVE-2024-26582] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-26583] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-26584] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-26585] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-26586] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26587] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26588] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26589] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26590] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26591] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26592] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26593] = "cpe-stable-backport: Backported in 6.6.18"
+
+CVE_STATUS[CVE-2024-26594] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26595] = "cpe-stable-backport: Backported in 6.6.14"
+
+# CVE-2024-26596 needs backporting (fixed from 6.8rc1)
+
+CVE_STATUS[CVE-2024-26597] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26598] = "cpe-stable-backport: Backported in 6.6.14"
+
+CVE_STATUS[CVE-2024-26599] = "cpe-stable-backport: Backported in 6.6.14"
+
diff --git a/meta/recipes-kernel/linux/generate-cve-exclusions.py b/meta/recipes-kernel/linux/generate-cve-exclusions.py
new file mode 100755
index 0000000000..aa9195aab4
--- /dev/null
+++ b/meta/recipes-kernel/linux/generate-cve-exclusions.py
@@ -0,0 +1,98 @@
+#! /usr/bin/env python3
+
+# Generate granular CVE status metadata for a specific version of the kernel
+# using data from linuxkernelcves.com.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import argparse
+import datetime
+import json
+import pathlib
+import re
+
+from packaging.version import Version
+
+
+def parse_version(s):
+ """
+ Parse the version string and either return a packaging.version.Version, or
+ None if the string was unset or "unk".
+ """
+ if s and s != "unk":
+ # packaging.version.Version doesn't approve of versions like v5.12-rc1-dontuse
+ s = s.replace("-dontuse", "")
+ return Version(s)
+ return None
+
+
+def main(argp=None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("datadir", type=pathlib.Path, help="Path to a clone of https://github.com/nluedtke/linux_kernel_cves")
+ parser.add_argument("version", type=Version, help="Kernel version number to generate data for, such as 6.1.38")
+
+ args = parser.parse_args(argp)
+ datadir = args.datadir
+ version = args.version
+ base_version = f"{version.major}.{version.minor}"
+
+ with open(datadir / "data" / "kernel_cves.json", "r") as f:
+ cve_data = json.load(f)
+
+ with open(datadir / "data" / "stream_fixes.json", "r") as f:
+ stream_data = json.load(f)
+
+ print(f"""
+# Auto-generated CVE metadata, DO NOT EDIT BY HAND.
+# Generated at {datetime.datetime.now(datetime.timezone.utc)} for version {version}
+
+python check_kernel_cve_status_version() {{
+ this_version = "{version}"
+ kernel_version = d.getVar("LINUX_VERSION")
+ if kernel_version != this_version:
+ bb.warn("Kernel CVE status needs updating: generated for %s but kernel is %s" % (this_version, kernel_version))
+}}
+do_cve_check[prefuncs] += "check_kernel_cve_status_version"
+""")
+
+ for cve, data in cve_data.items():
+ if "affected_versions" not in data:
+ print(f"# Skipping {cve}, no affected_versions")
+ print()
+ continue
+
+ affected = data["affected_versions"]
+ first_affected, fixed = re.search(r"(.+) to (.+)", affected).groups()
+ first_affected = parse_version(first_affected)
+ fixed = parse_version(fixed)
+
+ if not fixed:
+ print(f"# {cve} has no known resolution")
+ elif first_affected and version < first_affected:
+ print(f'CVE_STATUS[{cve}] = "fixed-version: only affects {first_affected} onwards"')
+ elif fixed <= version:
+ print(
+ f'CVE_STATUS[{cve}] = "fixed-version: Fixed from version {fixed}"'
+ )
+ else:
+ if cve in stream_data:
+ backport_data = stream_data[cve]
+ if base_version in backport_data:
+ backport_ver = Version(backport_data[base_version]["fixed_version"])
+ if backport_ver <= version:
+ print(
+ f'CVE_STATUS[{cve}] = "cpe-stable-backport: Backported in {backport_ver}"'
+ )
+ else:
+ # TODO print a note that the kernel needs bumping
+ print(f"# {cve} needs backporting (fixed from {backport_ver})")
+ else:
+ print(f"# {cve} needs backporting (fixed from {fixed})")
+ else:
+ print(f"# {cve} needs backporting (fixed from {fixed})")
+
+ print()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/recipes-kernel/linux/kernel-devsrc.bb b/meta/recipes-kernel/linux/kernel-devsrc.bb
index 9654488a54..fafab475f3 100644
--- a/meta/recipes-kernel/linux/kernel-devsrc.bb
+++ b/meta/recipes-kernel/linux/kernel-devsrc.bb
@@ -47,21 +47,21 @@ do_install() {
# create a /usr/src/kernel symlink to /lib/modules/<version>/source
mkdir -p ${D}/usr/src
(
- cd ${D}/usr/src
- ln -rs ${D}${KERNEL_BUILD_ROOT}${KERNEL_VERSION}/source kernel
+ cd ${D}/usr/src
+ ln -rs ${D}${KERNEL_BUILD_ROOT}${KERNEL_VERSION}/source kernel
)
# for on target purposes, we unify build and source
(
- cd $kerneldir
- ln -s build source
+ cd $kerneldir
+ ln -s build source
)
# first copy everything
(
- cd ${S}
- cp --parents $(find -type f -name "Makefile*" -o -name "Kconfig*") $kerneldir/build
- cp --parents $(find -type f -name "Build" -o -name "Build.include") $kerneldir/build
+ cd ${S}
+ cp --parents $(find -type f -name "Makefile*" -o -name "Kconfig*") $kerneldir/build
+ cp --parents $(find -type f -name "Build" -o -name "Build.include") $kerneldir/build
)
# then drop all but the needed Makefiles/Kconfig files
@@ -70,100 +70,113 @@ do_install() {
# now copy in parts from the build that we'll need later
(
- cd ${B}
-
- if [ -s Module.symvers ]; then
- cp Module.symvers $kerneldir/build
- fi
- cp System.map* $kerneldir/build
- if [ -s Module.markers ]; then
- cp Module.markers $kerneldir/build
- fi
-
- cp -a .config $kerneldir/build
-
- # This scripts copy blow up QA, so for now, we require a more
- # complex 'make scripts' to restore these, versus copying them
- # here. Left as a reference to indicate that we know the scripts must
- # be dealt with.
- # cp -a scripts $kerneldir/build
-
- # although module.lds can be regenerated on target via 'make modules_prepare'
- # there are several places where 'makes scripts prepare' is done, and that won't
- # regenerate the file. So we copy it onto the target as a migration to using
- # modules_prepare
- cp -a --parents scripts/module.lds $kerneldir/build/ 2>/dev/null || :
+ cd ${B}
+
+ if [ -s Module.symvers ]; then
+ cp Module.symvers $kerneldir/build
+ fi
+ cp System.map* $kerneldir/build
+ if [ -s Module.markers ]; then
+ cp Module.markers $kerneldir/build
+ fi
+
+ cp -a .config $kerneldir/build
+
+ # This scripts copy blow up QA, so for now, we require a more
+ # complex 'make scripts' to restore these, versus copying them
+ # here. Left as a reference to indicate that we know the scripts must
+ # be dealt with.
+ # cp -a scripts $kerneldir/build
+
+ # although module.lds can be regenerated on target via 'make modules_prepare'
+ # there are several places where 'makes scripts prepare' is done, and that won't
+ # regenerate the file. So we copy it onto the target as a migration to using
+ # modules_prepare
+ cp -a --parents scripts/module.lds $kerneldir/build/ 2>/dev/null || :
if [ -d arch/${ARCH}/scripts ]; then
- cp -a arch/${ARCH}/scripts $kerneldir/build/arch/${ARCH}
- fi
- if [ -f arch/${ARCH}/*lds ]; then
- cp -a arch/${ARCH}/*lds $kerneldir/build/arch/${ARCH}
- fi
-
- rm -f $kerneldir/build/scripts/*.o
- rm -f $kerneldir/build/scripts/*/*.o
-
- if [ "${ARCH}" = "powerpc" ]; then
- if [ -e arch/powerpc/lib/crtsavres.S ] ||
- [ -e arch/powerpc/lib/crtsavres.o ]; then
- cp -a --parents arch/powerpc/lib/crtsavres.[So] $kerneldir/build/
- fi
- fi
-
- if [ "${ARCH}" = "arm64" -o "${ARCH}" = "riscv" ]; then
+ cp -a arch/${ARCH}/scripts $kerneldir/build/arch/${ARCH}
+ fi
+ if [ -f arch/${ARCH}/*lds ]; then
+ cp -a arch/${ARCH}/*lds $kerneldir/build/arch/${ARCH}
+ fi
+
+ rm -f $kerneldir/build/scripts/*.o
+ rm -f $kerneldir/build/scripts/*/*.o
+
+ if [ "${ARCH}" = "powerpc" ]; then
+ if [ -e arch/powerpc/lib/crtsavres.S ] ||
+ [ -e arch/powerpc/lib/crtsavres.o ]; then
+ cp -a --parents arch/powerpc/lib/crtsavres.[So] $kerneldir/build/
+ fi
+ fi
+
+ if [ "${ARCH}" = "arm64" -o "${ARCH}" = "riscv" ]; then
if [ -e arch/${ARCH}/kernel/vdso/vdso.lds ]; then
- cp -a --parents arch/${ARCH}/kernel/vdso/vdso.lds $kerneldir/build/
+ cp -a --parents arch/${ARCH}/kernel/vdso/vdso.lds $kerneldir/build/
fi
- fi
- if [ "${ARCH}" = "powerpc" ]; then
- cp -a --parents arch/powerpc/kernel/vdso32/vdso32.lds $kerneldir/build 2>/dev/null || :
- cp -a --parents arch/powerpc/kernel/vdso64/vdso64.lds $kerneldir/build 2>/dev/null || :
- fi
-
- cp -a include $kerneldir/build/include
-
- # we don't usually copy generated files, since they can be rebuilt on the target,
- # but without this file, we get a forced syncconfig run in v5.8+, which prompts and
- # breaks workflows.
- cp -a --parents include/generated/autoconf.h $kerneldir/build 2>/dev/null || :
-
- if [ -e $kerneldir/include/generated/.vdso-offsets.h.cmd ]; then
- rm $kerneldir/include/generated/.vdso-offsets.h.cmd
- fi
+ fi
+ if [ "${ARCH}" = "powerpc" ]; then
+ cp -a --parents arch/powerpc/kernel/vdso32/vdso32.lds $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/powerpc/kernel/vdso64/vdso64.lds $kerneldir/build 2>/dev/null || :
+ # v5.19+
+ cp -a --parents arch/powerpc/kernel/vdso/vdso*.lds $kerneldir/build 2>/dev/null || :
+ fi
+
+ cp -a include $kerneldir/build/include
+
+ # we don't usually copy generated files, since they can be rebuilt on the target,
+ # but without this file, we get a forced syncconfig run in v5.8+, which prompts and
+ # breaks workflows.
+ cp -a --parents include/generated/autoconf.h $kerneldir/build 2>/dev/null || :
+
+ rm -f $kerneldir/include/generated/.vdso-offsets.h.cmd
+ rm -f $kerneldir/build/include/generated/.vdso-offsets.h.cmd
+ rm -f $kerneldir/build/include/generated/.compat_vdso-offsets.h.cmd
+ rm -f $kerneldir/build/include/generated/.vdso32-offsets.h.cmd
)
# now grab the chunks from the source tree that we need
(
- cd ${S}
+ cd ${S}
+
+ cp -a scripts $kerneldir/build
- cp -a scripts $kerneldir/build
+ # for v6.1+ (otherwise we are missing multiple default targets)
+ cp -a --parents Kbuild $kerneldir/build 2>/dev/null || :
- # if our build dir had objtool, it will also be rebuilt on target, so
- # we copy what is required for that build
- if [ -f ${B}/tools/objtool/objtool ]; then
- # these are a few files associated with objtool, since we'll need to
- # rebuild it
- cp -a --parents tools/build/Build.include $kerneldir/build/
- cp -a --parents tools/build/Build $kerneldir/build/
- cp -a --parents tools/build/fixdep.c $kerneldir/build/
- cp -a --parents tools/scripts/utilities.mak $kerneldir/build/
+ # For v6.6+ the debian packing is moved out to seperate rules file
+ # Remove as we else would ned to RDEPEND on make
+ rm $kerneldir/build/scripts/package/debian/rules 2>/dev/null || :
- # extra files, just in case
- cp -a --parents tools/objtool/* $kerneldir/build/
- cp -a --parents tools/lib/* $kerneldir/build/
- cp -a --parents tools/lib/subcmd/* $kerneldir/build/
+ # if our build dir had objtool, it will also be rebuilt on target, so
+ # we copy what is required for that build
+ if [ -f ${B}/tools/objtool/objtool ]; then
+ # these are a few files associated with objtool, since we'll need to
+ # rebuild it
+ cp -a --parents tools/build/Build.include $kerneldir/build/
+ cp -a --parents tools/build/Build $kerneldir/build/
+ cp -a --parents tools/build/fixdep.c $kerneldir/build/
+ cp -a --parents tools/scripts/utilities.mak $kerneldir/build/
- cp -a --parents tools/include/* $kerneldir/build/
+ # extra files, just in case
+ cp -a --parents tools/objtool/* $kerneldir/build/
+ cp -a --parents tools/lib/* $kerneldir/build/
+ cp -a --parents tools/lib/subcmd/* $kerneldir/build/
- cp -a --parents $(find tools/arch/${ARCH}/ -type f) $kerneldir/build/
- fi
+ cp -a --parents tools/include/* $kerneldir/build/
- if [ "${ARCH}" = "arm64" ]; then
- # arch/arm64/include/asm/xen references arch/arm
- cp -a --parents arch/arm/include/asm/xen $kerneldir/build/
- # arch/arm64/include/asm/opcodes.h references arch/arm
- cp -a --parents arch/arm/include/asm/opcodes.h $kerneldir/build/
+ cp -a --parents $(find tools/arch/${ARCH}/ -type f) $kerneldir/build/
+ fi
+
+ if [ "${ARCH}" = "arm64" ]; then
+ # arch/arm64/include/asm/xen references arch/arm
+ cp -a --parents arch/arm/include/asm/xen $kerneldir/build/
+ # arch/arm64/include/asm/opcodes.h references arch/arm
+ cp -a --parents arch/arm/include/asm/opcodes.h $kerneldir/build/
+
+ # v6.1+
+ cp -a --parents arch/arm64/kernel/asm-offsets.c $kerneldir/build/
cp -a --parents arch/arm64/kernel/vdso/*gettimeofday.* $kerneldir/build/
cp -a --parents arch/arm64/kernel/vdso/sigreturn.S $kerneldir/build/
@@ -176,119 +189,169 @@ do_install() {
cp -a --parents arch/arm64/tools/gen-cpucaps.awk $kerneldir/build/ 2>/dev/null || :
cp -a --parents arch/arm64/tools/cpucaps $kerneldir/build/ 2>/dev/null || :
+ # 5.19+
+ cp -a --parents arch/arm64/tools/gen-sysreg.awk $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/arm64/tools/sysreg $kerneldir/build/ 2>/dev/null || :
+
if [ -e $kerneldir/build/arch/arm64/tools/gen-cpucaps.awk ]; then
sed -i -e "s,#!.*awk.*,#!${USRBINPATH}/env awk," $kerneldir/build/arch/arm64/tools/gen-cpucaps.awk
fi
- fi
-
- if [ "${ARCH}" = "powerpc" ]; then
- # 5.0 needs these files, but don't error if they aren't present in the source
- cp -a --parents arch/${ARCH}/kernel/syscalls/syscall.tbl $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/${ARCH}/kernel/syscalls/syscalltbl.sh $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/${ARCH}/kernel/syscalls/syscallhdr.sh $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/${ARCH}/kernel/vdso32/* $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/${ARCH}/kernel/vdso64/* $kerneldir/build/ 2>/dev/null || :
- fi
- if [ "${ARCH}" = "riscv" ]; then
+ if [ -e $kerneldir/build/arch/arm64/tools/gen-sysreg.awk ]; then
+ sed -i -e "s,#!.*awk.*,#!${USRBINPATH}/env awk," $kerneldir/build/arch/arm64/tools/gen-sysreg.awk
+ fi
+ fi
+
+ if [ "${ARCH}" = "powerpc" ]; then
+ # 5.0 needs these files, but don't error if they aren't present in the source
+ cp -a --parents arch/${ARCH}/kernel/syscalls/syscall.tbl $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/${ARCH}/kernel/syscalls/syscalltbl.sh $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/${ARCH}/kernel/syscalls/syscallhdr.sh $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/${ARCH}/kernel/vdso32/* $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/${ARCH}/kernel/vdso64/* $kerneldir/build/ 2>/dev/null || :
+
+ # v5.19+
+ cp -a --parents arch/powerpc/kernel/vdso/*.S $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/powerpc/kernel/vdso/*gettimeofday.* $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/powerpc/kernel/vdso/gen_vdso*_offsets.sh $kerneldir/build/ 2>/dev/null || :
+
+ # v6,1+
+ cp -a --parents arch/powerpc/kernel/asm-offsets.c $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/powerpc/kernel/head_booke.h $kerneldir/build/ 2>/dev/null || :
+ fi
+ if [ "${ARCH}" = "riscv" ]; then
cp -a --parents arch/riscv/kernel/vdso/*gettimeofday.* $kerneldir/build/
cp -a --parents arch/riscv/kernel/vdso/note.S $kerneldir/build/
+ # v6.1+
+ cp -a --parents arch/riscv/kernel/asm-offsets.c $kerneldir/build/
if [ -e arch/riscv/kernel/vdso/gen_vdso_offsets.sh ]; then
cp -a --parents arch/riscv/kernel/vdso/gen_vdso_offsets.sh $kerneldir/build/
fi
- cp -a --parents arch/riscv/kernel/vdso/* $kerneldir/build/ 2>/dev/null || :
- fi
+ cp -a --parents arch/riscv/kernel/vdso/* $kerneldir/build/ 2>/dev/null || :
+ if [ -e arch/riscv/kernel/compat_vdso/gen_compat_vdso_offsets.sh ]; then
+ cp -a --parents arch/riscv/kernel/compat_vdso/gen_compat_vdso_offsets.sh $kerneldir/build/
+ fi
+ cp -a --parents arch/riscv/kernel/compat_vdso/* $kerneldir/build/ 2>/dev/null || :
+ fi
- # include the machine specific headers for ARM variants, if available.
- if [ "${ARCH}" = "arm" ]; then
- cp -a --parents arch/${ARCH}/mach-*/include $kerneldir/build/
+ # include the machine specific headers for ARM variants, if available.
+ if [ "${ARCH}" = "arm" ]; then
+ cp -a --parents arch/${ARCH}/mach-*/include $kerneldir/build/
- # include a few files for 'make prepare'
- cp -a --parents arch/arm/tools/gen-mach-types $kerneldir/build/
- cp -a --parents arch/arm/tools/mach-types $kerneldir/build/
+ # include a few files for 'make prepare'
+ cp -a --parents arch/arm/tools/gen-mach-types $kerneldir/build/
+ cp -a --parents arch/arm/tools/mach-types $kerneldir/build/
- # ARM syscall table tools only exist for kernels v4.10 or later
+ # 5.19+
+ cp -a --parents arch/arm/tools/gen-sysreg.awk $kerneldir/build/ 2>/dev/null || :
+
+ # ARM syscall table tools only exist for kernels v4.10 or later
SYSCALL_TOOLS=$(find arch/arm/tools -name "syscall*")
if [ -n "$SYSCALL_TOOLS" ] ; then
- cp -a --parents $SYSCALL_TOOLS $kerneldir/build/
+ cp -a --parents $SYSCALL_TOOLS $kerneldir/build/
fi
cp -a --parents arch/arm/kernel/module.lds $kerneldir/build/ 2>/dev/null || :
- fi
-
- if [ -d arch/${ARCH}/include ]; then
- cp -a --parents arch/${ARCH}/include $kerneldir/build/
- fi
-
- cp -a include $kerneldir/build
-
- cp -a --parents lib/vdso/* $kerneldir/build/ 2>/dev/null || :
-
- cp -a --parents tools/include/tools/le_byteshift.h $kerneldir/build/
- cp -a --parents tools/include/tools/be_byteshift.h $kerneldir/build/
-
- # required for generate missing syscalls prepare phase
- cp -a --parents $(find arch/x86 -type f -name "syscall_32.tbl") $kerneldir/build
- cp -a --parents $(find arch/arm -type f -name "*.tbl") $kerneldir/build 2>/dev/null || :
-
- if [ "${ARCH}" = "x86" ]; then
- # files for 'make prepare' to succeed with kernel-devel
- cp -a --parents $(find arch/x86 -type f -name "syscall_32.tbl") $kerneldir/build/ 2>/dev/null || :
- cp -a --parents $(find arch/x86 -type f -name "syscalltbl.sh") $kerneldir/build/ 2>/dev/null || :
- cp -a --parents $(find arch/x86 -type f -name "syscallhdr.sh") $kerneldir/build/ 2>/dev/null || :
- cp -a --parents $(find arch/x86 -type f -name "syscall_64.tbl") $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/tools/relocs_32.c $kerneldir/build/
- cp -a --parents arch/x86/tools/relocs_64.c $kerneldir/build/
- cp -a --parents arch/x86/tools/relocs.c $kerneldir/build/
- cp -a --parents arch/x86/tools/relocs_common.c $kerneldir/build/
- cp -a --parents arch/x86/tools/relocs.h $kerneldir/build/
- cp -a --parents arch/x86/tools/gen-insn-attr-x86.awk $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/purgatory/purgatory.c $kerneldir/build/
-
- # 4.18 + have unified the purgatory files, so we ignore any errors if
- # these files are not present
- cp -a --parents arch/x86/purgatory/sha256.h $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/purgatory/sha256.c $kerneldir/build/ 2>/dev/null || :
-
- cp -a --parents arch/x86/purgatory/stack.S $kerneldir/build/
- cp -a --parents arch/x86/purgatory/string.c $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/purgatory/setup-x86_64.S $kerneldir/build/
- cp -a --parents arch/x86/purgatory/entry64.S $kerneldir/build/
- cp -a --parents arch/x86/boot/string.h $kerneldir/build/
- cp -a --parents arch/x86/boot/string.c $kerneldir/build/
- cp -a --parents arch/x86/boot/compressed/string.c $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/boot/ctype.h $kerneldir/build/
-
- # objtool requires these files
- cp -a --parents arch/x86/lib/inat.c $kerneldir/build/ 2>/dev/null || :
- cp -a --parents arch/x86/lib/insn.c $kerneldir/build/ 2>/dev/null || :
- fi
-
- if [ "${ARCH}" = "mips" ]; then
- cp -a --parents arch/mips/Kbuild.platforms $kerneldir/build/
- cp --parents $(find -type f -name "Platform") $kerneldir/build
- cp --parents arch/mips/boot/tools/relocs* $kerneldir/build
- cp -a --parents arch/mips/kernel/asm-offsets.c $kerneldir/build
- cp -a --parents kernel/time/timeconst.bc $kerneldir/build
- cp -a --parents kernel/bounds.c $kerneldir/build
- cp -a --parents Kbuild $kerneldir/build
- cp -a --parents arch/mips/kernel/syscalls/*.sh $kerneldir/build 2>/dev/null || :
- cp -a --parents arch/mips/kernel/syscalls/*.tbl $kerneldir/build 2>/dev/null || :
- cp -a --parents arch/mips/tools/elf-entry.c $kerneldir/build 2>/dev/null || :
- fi
+ # v6.1+
+ cp -a --parents arch/arm/kernel/asm-offsets.c $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/arm/kernel/signal.h $kerneldir/build/ 2>/dev/null || :
+ fi
+
+ if [ -d arch/${ARCH}/include ]; then
+ cp -a --parents arch/${ARCH}/include $kerneldir/build/
+ fi
+
+ cp -a include $kerneldir/build
+
+ cp -a --parents lib/vdso/* $kerneldir/build/ 2>/dev/null || :
+
+ cp -a --parents tools/include/tools/le_byteshift.h $kerneldir/build/
+ cp -a --parents tools/include/tools/be_byteshift.h $kerneldir/build/
+
+ # required for generate missing syscalls prepare phase
+ cp -a --parents $(find arch/x86 -type f -name "syscall_32.tbl") $kerneldir/build
+ cp -a --parents $(find arch/arm -type f -name "*.tbl") $kerneldir/build 2>/dev/null || :
+
+ if [ "${ARCH}" = "x86" ]; then
+ # files for 'make prepare' to succeed with kernel-devel
+ cp -a --parents $(find arch/x86 -type f -name "syscall_32.tbl") $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents $(find arch/x86 -type f -name "syscalltbl.sh") $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents $(find arch/x86 -type f -name "syscallhdr.sh") $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents $(find arch/x86 -type f -name "syscall_64.tbl") $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/tools/relocs_32.c $kerneldir/build/
+ cp -a --parents arch/x86/tools/relocs_64.c $kerneldir/build/
+ cp -a --parents arch/x86/tools/relocs.c $kerneldir/build/
+ cp -a --parents arch/x86/tools/relocs_common.c $kerneldir/build/
+ cp -a --parents arch/x86/tools/relocs.h $kerneldir/build/
+ cp -a --parents arch/x86/tools/gen-insn-attr-x86.awk $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/purgatory/purgatory.c $kerneldir/build/
+
+ # 4.18 + have unified the purgatory files, so we ignore any errors if
+ # these files are not present
+ cp -a --parents arch/x86/purgatory/sha256.h $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/purgatory/sha256.c $kerneldir/build/ 2>/dev/null || :
+
+ cp -a --parents arch/x86/purgatory/stack.S $kerneldir/build/
+ cp -a --parents arch/x86/purgatory/string.c $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/purgatory/setup-x86_64.S $kerneldir/build/
+ cp -a --parents arch/x86/purgatory/entry64.S $kerneldir/build/
+ cp -a --parents arch/x86/boot/string.h $kerneldir/build/
+ cp -a --parents arch/x86/boot/string.c $kerneldir/build/
+ cp -a --parents arch/x86/boot/compressed/string.c $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/boot/ctype.h $kerneldir/build/
+
+ # objtool requires these files
+ cp -a --parents arch/x86/lib/inat.c $kerneldir/build/ 2>/dev/null || :
+ cp -a --parents arch/x86/lib/insn.c $kerneldir/build/ 2>/dev/null || :
+
+ # v6.1+
+ cp -a --parents arch/x86/kernel/asm-offsets* $kerneldir/build || :
+ # for capabilities.h, vmx.h
+ cp -a --parents arch/x86/kvm/vmx/*.h $kerneldir/build || :
+ # for lapic.h, hyperv.h ....
+ cp -a --parents arch/x86/kvm/*.h $kerneldir/build || :
+ fi
+
+ # moved from arch/mips to all arches for v6.1+
+ cp -a --parents kernel/time/timeconst.bc $kerneldir/build 2>/dev/null || :
+ cp -a --parents kernel/bounds.c $kerneldir/build 2>/dev/null || :
+
+ if [ "${ARCH}" = "mips" ]; then
+ cp -a --parents arch/mips/Kbuild.platforms $kerneldir/build/
+ cp --parents $(find -type f -name "Platform") $kerneldir/build
+ cp --parents arch/mips/boot/tools/relocs* $kerneldir/build
+ cp -a --parents arch/mips/kernel/asm-offsets.c $kerneldir/build
+ cp -a --parents Kbuild $kerneldir/build
+ cp -a --parents arch/mips/kernel/syscalls/*.sh $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/mips/kernel/syscalls/*.tbl $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/mips/tools/elf-entry.c $kerneldir/build 2>/dev/null || :
+ fi
+
+ if [ "${ARCH}" = "loongarch" ]; then
+ cp -a --parents arch/loongarch/kernel/asm-offsets.c $kerneldir/build
+ cp -a --parents Kbuild $kerneldir/build
+ cp -a --parents arch/loongarch/vdso/*.S $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/loongarch/vdso/*gettimeofday.* $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/loongarch/vdso/*getcpu.* $kerneldir/build 2>/dev/null || :
+ cp -a --parents arch/loongarch/vdso/gen_vdso*_offsets.sh $kerneldir/build/ 2>/dev/null || :
+ fi
# required to build scripts/selinux/genheaders/genheaders
cp -a --parents security/selinux/include/* $kerneldir/build/
- # copy any localversion files
- cp -a localversion* $kerneldir/build/ 2>/dev/null || :
+ # copy any localversion files
+ cp -a localversion* $kerneldir/build/ 2>/dev/null || :
)
# Make sure the Makefile and version.h have a matching timestamp so that
# external modules can be built
touch -r $kerneldir/build/Makefile $kerneldir/build/include/generated/uapi/linux/version.h
- # Copy .config to include/config/auto.conf so "make prepare" is unnecessary.
- cp $kerneldir/build/.config $kerneldir/build/include/config/auto.conf
+ # This fixes a warning that the compilers don't match when building a module
+ # Change: CONFIG_CC_VERSION_TEXT="x86_64-poky-linux-gcc (GCC) 12.2.0" to "gcc (GCC) 12.2.0"
+ # #define CONFIG_CC_VERSION_TEXT "x86_64-poky-linux-gcc (GCC) 12.2.0" to "gcc (GCC) 12.2.0"
+ sed -i 's/CONFIG_CC_VERSION_TEXT=".*\(gcc.*\)"/CONFIG_CC_VERSION_TEXT="\1"/' "$kerneldir/build/.config"
+ sed -i 's/#define CONFIG_CC_VERSION_TEXT ".*\(gcc.*\)"/#define CONFIG_CC_VERSION_TEXT "\1"/' $kerneldir/build/include/generated/autoconf.h
+ sed -i 's/CONFIG_CC_VERSION_TEXT=".*\(gcc.*\)"/CONFIG_CC_VERSION_TEXT="\1"/' $kerneldir/build/include/config/auto.conf
# make sure these are at least as old as the .config, or rebuilds will trigger
touch -r $kerneldir/build/.config $kerneldir/build/include/generated/autoconf.h 2>/dev/null || :
@@ -316,9 +379,9 @@ do_install() {
# left as /usr/bin/python rootfs assembly will fail, since we only have python3
# in the RDEPENDS (and the python3 package does not include /usr/bin/python)
for ss in $(find $kerneldir/build/scripts -type f -name '*'); do
- sed -i 's,/usr/bin/python2,/usr/bin/env python3,' "$ss"
- sed -i 's,/usr/bin/env python2,/usr/bin/env python3,' "$ss"
- sed -i 's,/usr/bin/python,/usr/bin/env python3,' "$ss"
+ sed -i 's,/usr/bin/python2,/usr/bin/env python3,' "$ss"
+ sed -i 's,/usr/bin/env python2,/usr/bin/env python3,' "$ss"
+ sed -i 's,/usr/bin/python,/usr/bin/env python3,' "$ss"
done
chown -R root:root ${D}
@@ -330,14 +393,14 @@ do_install[lockfiles] = "${TMPDIR}/kernel-scripts.lock"
FILES:${PN} = "${KERNEL_BUILD_ROOT} ${KERNEL_SRC_PATH}"
FILES:${PN}-dbg += "${KERNEL_BUILD_ROOT}*/build/scripts/*/.debug/*"
-RDEPENDS:${PN} = "bc python3 flex bison ${TCLIBC}-utils"
+RDEPENDS:${PN} = "bc python3-core flex bison ${TCLIBC}-utils gawk"
# 4.15+ needs these next two RDEPENDS
RDEPENDS:${PN} += "openssl-dev util-linux"
# and x86 needs a bit more for 4.15+
RDEPENDS:${PN} += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-dev', '', d)}"
+# powerpc needs elfutils on 6.3+
+RDEPENDS:${PN} += "${@bb.utils.contains('ARCH', 'powerpc', 'elfutils-dev', '', d)}"
# 5.8+ needs gcc-plugins libmpc-dev
RDEPENDS:${PN} += "gcc-plugins libmpc-dev"
-# 5.13+ needs awk for arm64
-RDEPENDS:${PN}:append:aarch64 = " gawk"
# 5.13+ needs grep for powerpc
RDEPENDS:${PN}:append:powerpc = " grep"
diff --git a/meta/recipes-kernel/linux/linux-dummy.bb b/meta/recipes-kernel/linux/linux-dummy.bb
index 7c46f80d8d..2396f46202 100644
--- a/meta/recipes-kernel/linux/linux-dummy.bb
+++ b/meta/recipes-kernel/linux/linux-dummy.bb
@@ -28,7 +28,6 @@ INHIBIT_DEFAULT_DEPS = "1"
COMPATIBLE_HOST = ".*-linux"
-PR = "r1"
SRC_URI = "file://COPYING.GPL"
S = "${WORKDIR}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-dev.bb b/meta/recipes-kernel/linux/linux-yocto-dev.bb
index d35632071b..d5cba42a5c 100644
--- a/meta/recipes-kernel/linux/linux-yocto-dev.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-dev.bb
@@ -10,17 +10,15 @@
inherit kernel
require recipes-kernel/linux/linux-yocto.inc
-# for ncurses tests
-inherit pkgconfig
# provide this .inc to set specific revisions
include recipes-kernel/linux/linux-yocto-dev-revisions.inc
-KBRANCH = "v5.18/standard/base"
+KBRANCH = "v6.9/standard/base"
KMETA = "kernel-meta"
-SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;branch=${KBRANCH};name=machine \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=master;destsuffix=${KMETA}"
+SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;branch=${KBRANCH};name=machine;protocol=https \
+ git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=master;destsuffix=${KMETA};protocol=https"
# Set default SRCREVs. Both the machine and meta SRCREVs are statically set
# to the korg v3.7 tag, and hence prevent network access during parsing. If
@@ -30,16 +28,12 @@ SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;branch=${KBRANCH};name
SRCREV_machine ?= '${@oe.utils.conditional("PREFERRED_PROVIDER_virtual/kernel", "linux-yocto-dev", "${AUTOREV}", "29594404d7fe73cd80eaa4ee8c43dcc53970c60e", d)}'
SRCREV_meta ?= '${@oe.utils.conditional("PREFERRED_PROVIDER_virtual/kernel", "linux-yocto-dev", "${AUTOREV}", "29594404d7fe73cd80eaa4ee8c43dcc53970c60e", d)}'
-LINUX_VERSION ?= "5.18+"
+LINUX_VERSION ?= "6.9"
LINUX_VERSION_EXTENSION ?= "-yoctodev-${LINUX_KERNEL_TYPE}"
-PV = "${LINUX_VERSION}+git${SRCPV}"
+PV = "${LINUX_VERSION}+git"
LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-DEPENDS += "gmp-native libmpc-native"
-
# yaml and dtschema are required for 5.16+ device tree validation, libyaml is checked
# via pkgconfig, so must always be present, but we can wrap the others to make them
# conditional
@@ -50,9 +44,9 @@ PACKAGECONFIG[dt-validation] = ",,python3-dtschema-native"
# we need the wrappers if validation isn't in the packageconfig
DEPENDS += "${@bb.utils.contains('PACKAGECONFIG', 'dt-validation', '', 'python3-dtschema-wrapper-native', d)}"
-COMPATIBLE_MACHINE = "(qemuarm|qemux86|qemuppc|qemumips|qemumips64|qemux86-64|qemuriscv32|qemuriscv64)"
+COMPATIBLE_MACHINE = "^(qemuarmv5|qemuarm|qemuarm64|qemux86|qemuppc|qemumips|qemumips64|qemux86-64|qemuriscv32|qemuriscv64|qemuloongarch64)$"
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
+KERNEL_DEVICETREE:qemuarmv5 = "arm/versatile-pb.dtb"
# Functionality flags
KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
@@ -62,6 +56,6 @@ KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}"
KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc", "", d)}"
+KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc features/gpio/sim.scc", "", d)}"
KERNEL_VERSION_SANITY_SKIP = "1"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb b/meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb
deleted file mode 100644
index ad206e4235..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb
+++ /dev/null
@@ -1,45 +0,0 @@
-KBRANCH ?= "v5.10/standard/preempt-rt/base"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-# Skip processing of this recipe if it is not explicitly specified as the
-# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying
-# to build multiple virtual/kernel providers, e.g. as dependency of
-# core-image-rt-sdk, core-image-rt.
-python () {
- if d.getVar("KERNEL_PACKAGE_NAME") == "kernel" and d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
- raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
-}
-
-SRCREV_machine ?= "a8d19259c63cbf1254fe545d0a1aacf8f82547ac"
-SRCREV_meta ?= "b368b4c1c8ee0e0e7573caa7f1596c3385ad855f"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.10;destsuffix=${KMETA}"
-
-LINUX_VERSION ?= "5.10.114"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "1"
-
-LINUX_KERNEL_TYPE = "preempt-rt"
-
-COMPATIBLE_MACHINE = "(qemux86|qemux86-64|qemuarm|qemuarmv5|qemuarm64|qemuppc|qemumips)"
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
-
-# Functionality flags
-KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
-KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
-KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc"
-KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
-KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc", "", d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_5.15.bb b/meta/recipes-kernel/linux/linux-yocto-rt_5.15.bb
deleted file mode 100644
index 6bfb8deb1e..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-rt_5.15.bb
+++ /dev/null
@@ -1,45 +0,0 @@
-KBRANCH ?= "v5.15/standard/preempt-rt/base"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-# Skip processing of this recipe if it is not explicitly specified as the
-# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying
-# to build multiple virtual/kernel providers, e.g. as dependency of
-# core-image-rt-sdk, core-image-rt.
-python () {
- if d.getVar("KERNEL_PACKAGE_NAME") == "kernel" and d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
- raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
-}
-
-SRCREV_machine ?= "b028c4baada43d48014d4350dd66debe7e4cf362"
-SRCREV_meta ?= "37891dc371e83a3451781dd81a8a85cccd60084b"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.15;destsuffix=${KMETA}"
-
-LINUX_VERSION ?= "5.15.38"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "1"
-
-LINUX_KERNEL_TYPE = "preempt-rt"
-
-COMPATIBLE_MACHINE = "(qemux86|qemux86-64|qemuarm|qemuarmv5|qemuarm64|qemuppc|qemumips)"
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
-
-# Functionality flags
-KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
-KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
-KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc"
-KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
-KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc", "", d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb b/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb
new file mode 100644
index 0000000000..a44a08451a
--- /dev/null
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb
@@ -0,0 +1,48 @@
+KBRANCH ?= "v6.6/standard/preempt-rt/base"
+
+require recipes-kernel/linux/linux-yocto.inc
+
+# CVE exclusions
+include recipes-kernel/linux/cve-exclusion_6.6.inc
+
+# Skip processing of this recipe if it is not explicitly specified as the
+# PREFERRED_PROVIDER for virtual/kernel. This avoids errors when trying
+# to build multiple virtual/kernel providers, e.g. as dependency of
+# core-image-rt-sdk, core-image-rt.
+python () {
+ if d.getVar("KERNEL_PACKAGE_NAME") == "kernel" and d.getVar("PREFERRED_PROVIDER_virtual/kernel") != "linux-yocto-rt":
+ raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it")
+}
+
+SRCREV_machine ?= "19813826de57a6425518c7b3daf8dd6a04d2321f"
+SRCREV_meta ?= "f7f00b22efcfcae6489e9ec7db7002685fbc078b"
+
+SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine;protocol=https \
+ git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https"
+
+LINUX_VERSION ?= "6.6.23"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
+
+DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
+DEPENDS += "openssl-native util-linux-native"
+
+PV = "${LINUX_VERSION}+git"
+
+KMETA = "kernel-meta"
+KCONF_BSP_AUDIT_LEVEL = "1"
+
+LINUX_KERNEL_TYPE = "preempt-rt"
+
+COMPATIBLE_MACHINE = "^(qemux86|qemux86-64|qemuarm|qemuarmv5|qemuarm64|qemuppc|qemumips)$"
+
+KERNEL_DEVICETREE:qemuarmv5 = "arm/versatile-pb.dtb"
+
+# Functionality flags
+KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
+KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc"
+KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
+KERNEL_FEATURES:append = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc features/gpio/sim.scc", "", d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb
deleted file mode 100644
index 9a8e6ecffc..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-KBRANCH ?= "v5.10/standard/tiny/base"
-KBRANCH:qemuarm ?= "v5.10/standard/tiny/arm-versatile-926ejs"
-
-LINUX_KERNEL_TYPE = "tiny"
-KCONFIG_MODE = "--allnoconfig"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-LINUX_VERSION ?= "5.10.114"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "2"
-
-SRCREV_machine:qemuarm ?= "a0664baa09b1c6265bd404e4d3a7ceb14577dcb8"
-SRCREV_machine ?= "63b2e48029f00bfc90371adc378050181ba5d253"
-SRCREV_meta ?= "b368b4c1c8ee0e0e7573caa7f1596c3385ad855f"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.10;destsuffix=${KMETA}"
-
-COMPATIBLE_MACHINE = "qemux86|qemux86-64|qemuarm|qemuarmv5"
-
-# Functionality flags
-KERNEL_FEATURES = ""
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_5.15.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_5.15.bb
deleted file mode 100644
index 0cf2d68d56..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-tiny_5.15.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-KBRANCH ?= "v5.15/standard/tiny/base"
-
-LINUX_KERNEL_TYPE = "tiny"
-KCONFIG_MODE = "--allnoconfig"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-LINUX_VERSION ?= "5.15.38"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "2"
-
-SRCREV_machine ?= "7b1fe61102fd6efc26373305417cd683687eb7a0"
-SRCREV_meta ?= "37891dc371e83a3451781dd81a8a85cccd60084b"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.15;destsuffix=${KMETA}"
-
-COMPATIBLE_MACHINE = "qemux86|qemux86-64|qemuarm64|qemuarm|qemuarmv5"
-
-# Functionality flags
-KERNEL_FEATURES = ""
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb
new file mode 100644
index 0000000000..db9e252572
--- /dev/null
+++ b/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb
@@ -0,0 +1,33 @@
+KBRANCH ?= "v6.6/standard/tiny/base"
+
+LINUX_KERNEL_TYPE = "tiny"
+KCONFIG_MODE = "--allnoconfig"
+
+require recipes-kernel/linux/linux-yocto.inc
+
+# CVE exclusions
+include recipes-kernel/linux/cve-exclusion_6.6.inc
+
+LINUX_VERSION ?= "6.6.23"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
+
+DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
+DEPENDS += "openssl-native util-linux-native"
+
+KMETA = "kernel-meta"
+KCONF_BSP_AUDIT_LEVEL = "2"
+
+SRCREV_machine ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_meta ?= "f7f00b22efcfcae6489e9ec7db7002685fbc078b"
+
+PV = "${LINUX_VERSION}+git"
+
+SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine;protocol=https \
+ git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https"
+
+COMPATIBLE_MACHINE = "^(qemux86|qemux86-64|qemuarm64|qemuarm|qemuarmv5)$"
+
+# Functionality flags
+KERNEL_FEATURES = ""
+
+KERNEL_DEVICETREE:qemuarmv5 = "arm/versatile-pb.dtb"
diff --git a/meta/recipes-kernel/linux/linux-yocto.inc b/meta/recipes-kernel/linux/linux-yocto.inc
index cabc8f4975..0132fcffb3 100644
--- a/meta/recipes-kernel/linux/linux-yocto.inc
+++ b/meta/recipes-kernel/linux/linux-yocto.inc
@@ -31,8 +31,9 @@ DEPENDS:append:arc = " libgcc"
KERNEL_CC:append:arc = " ${TOOLCHAIN_OPTIONS}"
KERNEL_LD:append:arc = " ${TOOLCHAIN_OPTIONS}"
-KERNEL_FEATURES:append:qemuall=" features/debug/printk.scc"
+KERNEL_FEATURES:append:qemuall=" features/debug/printk.scc features/taskstats/taskstats.scc"
+KERNEL_FEATURES:append = " ${@bb.utils.contains('MACHINE_FEATURES', 'efi', 'cfg/efi.scc', '', d)}"
KERNEL_FEATURES:append = " ${@bb.utils.contains('MACHINE_FEATURES', 'numa', 'features/numa/numa.scc', '', d)}"
KERNEL_FEATURES:append = " ${@bb.utils.contains('MACHINE_FEATURES', 'vfat', 'cfg/fs/vfat.scc', '', d)}"
@@ -46,7 +47,6 @@ LINUX_VERSION_EXTENSION ??= "-yocto-${LINUX_KERNEL_TYPE}"
# Pick up shared functions
inherit kernel
inherit kernel-yocto
-inherit pkgconfig
B = "${WORKDIR}/linux-${PACKAGE_ARCH}-${LINUX_KERNEL_TYPE}-build"
@@ -59,8 +59,17 @@ do_install:append(){
# enable kernel-sample for oeqa/runtime/cases's ksample.py test
KERNEL_FEATURES:append:qemuall=" features/kernel-sample/kernel-sample.scc"
-KERNEL_DEBUG_OPTIONS ?= "stack"
-KERNEL_EXTRA_ARGS:append:x86-64 = "${@bb.utils.contains('KERNEL_DEBUG_OPTIONS', 'stack', 'HOST_LIBELF_LIBS="-L${RECIPE_SYSROOT_NATIVE}/usr/lib/pkgconfig/../../../usr/lib/ -lelf"', '', d)}"
+KERNEL_DEBUG ?= ""
+# These used to be version specific, but are now common dependencies. New
+# tools / dependencies will continue to be added in version specific recipes.
+DEPENDS += '${@bb.utils.contains_any("ARCH", [ "x86", "arm64", "powerpc" ], "elfutils-native", "", d)}'
+DEPENDS += "openssl-native util-linux-native"
+DEPENDS += "gmp-native libmpc-native"
+
+# Some options depend on CONFIG_PAHOLE_VERSION, so need to make pahole-native available before do_kernel_configme
+do_kernel_configme[depends] += '${@bb.utils.contains("KERNEL_DEBUG", "True", "pahole-native:do_populate_sysroot", "", d)}'
+
+EXTRA_OEMAKE += '${@bb.utils.contains("KERNEL_DEBUG", "True", "", "PAHOLE=false", d)}'
do_devshell:prepend() {
# setup native pkg-config variables (kconfig scripts call pkg-config directly, cannot generically be overriden to pkg-config-native)
diff --git a/meta/recipes-kernel/linux/linux-yocto_5.10.bb b/meta/recipes-kernel/linux/linux-yocto_5.10.bb
deleted file mode 100644
index ffd9538633..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto_5.10.bb
+++ /dev/null
@@ -1,58 +0,0 @@
-KBRANCH ?= "v5.10/standard/base"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-# board specific branches
-KBRANCH:qemuarm ?= "v5.10/standard/arm-versatile-926ejs"
-KBRANCH:qemuarm64 ?= "v5.10/standard/qemuarm64"
-KBRANCH:qemumips ?= "v5.10/standard/mti-malta32"
-KBRANCH:qemuppc ?= "v5.10/standard/qemuppc"
-KBRANCH:qemuriscv64 ?= "v5.10/standard/base"
-KBRANCH:qemuriscv32 ?= "v5.10/standard/base"
-KBRANCH:qemux86 ?= "v5.10/standard/base"
-KBRANCH:qemux86-64 ?= "v5.10/standard/base"
-KBRANCH:qemumips64 ?= "v5.10/standard/mti-malta64"
-
-SRCREV_machine:qemuarm ?= "49e2af0d85dc824c7829427c18e06ad2b1abbb8d"
-SRCREV_machine:qemuarm64 ?= "2b403ec0a7f60be807890902dbd0d5467bdef545"
-SRCREV_machine:qemumips ?= "793de0c2e4a3aa0e2f3091af8ff06df35f156fb3"
-SRCREV_machine:qemuppc ?= "9875eb3923e3d20893774ee7304142e2ce5ab22b"
-SRCREV_machine:qemuriscv64 ?= "8ce5741d3f6c0aa649aca5f89ee8d1022f530437"
-SRCREV_machine:qemuriscv32 ?= "8ce5741d3f6c0aa649aca5f89ee8d1022f530437"
-SRCREV_machine:qemux86 ?= "8ce5741d3f6c0aa649aca5f89ee8d1022f530437"
-SRCREV_machine:qemux86-64 ?= "8ce5741d3f6c0aa649aca5f89ee8d1022f530437"
-SRCREV_machine:qemumips64 ?= "4c3d57cc8bec91ea6225a7c9f134fd97bc1e8e0d"
-SRCREV_machine ?= "8ce5741d3f6c0aa649aca5f89ee8d1022f530437"
-SRCREV_meta ?= "b368b4c1c8ee0e0e7573caa7f1596c3385ad855f"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRANCH}; \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.10;destsuffix=${KMETA}"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-LINUX_VERSION ?= "5.10.114"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-DEPENDS += "gmp-native libmpc-native"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "1"
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
-
-COMPATIBLE_MACHINE = "qemuarm|qemuarmv5|qemuarm64|qemux86|qemuppc|qemuppc64|qemumips|qemumips64|qemux86-64|qemuriscv64|qemuriscv32"
-
-# Functionality flags
-KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
-KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
-KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc"
-KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append:powerpc =" arch/powerpc/powerpc-debug.scc"
-KERNEL_FEATURES:append:powerpc64 =" arch/powerpc/powerpc-debug.scc"
-KERNEL_FEATURES:append:powerpc64le =" arch/powerpc/powerpc-debug.scc"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc", "", d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto_5.15.bb b/meta/recipes-kernel/linux/linux-yocto_5.15.bb
deleted file mode 100644
index 206cc0692d..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto_5.15.bb
+++ /dev/null
@@ -1,70 +0,0 @@
-KBRANCH ?= "v5.15/standard/base"
-
-require recipes-kernel/linux/linux-yocto.inc
-
-# board specific branches
-KBRANCH:qemuarm ?= "v5.15/standard/arm-versatile-926ejs"
-KBRANCH:qemuarm64 ?= "v5.15/standard/qemuarm64"
-KBRANCH:qemumips ?= "v5.15/standard/mti-malta32"
-KBRANCH:qemuppc ?= "v5.15/standard/qemuppc"
-KBRANCH:qemuriscv64 ?= "v5.15/standard/base"
-KBRANCH:qemuriscv32 ?= "v5.15/standard/base"
-KBRANCH:qemux86 ?= "v5.15/standard/base"
-KBRANCH:qemux86-64 ?= "v5.15/standard/base"
-KBRANCH:qemumips64 ?= "v5.15/standard/mti-malta64"
-
-SRCREV_machine:qemuarm ?= "5a68f2d15d17f0f3c397e7f8c83f3f664f7037e5"
-SRCREV_machine:qemuarm64 ?= "00e666e6154fcdf52268f2a5a612b96afad073b0"
-SRCREV_machine:qemumips ?= "fb9e75076deade31754b7ad644952d63137e616b"
-SRCREV_machine:qemuppc ?= "49f6567f3b85a843e8b6042a79c58aab0bdbd0c9"
-SRCREV_machine:qemuriscv64 ?= "cc9695f5fd3b520464eb2ded66950734f308525c"
-SRCREV_machine:qemuriscv32 ?= "cc9695f5fd3b520464eb2ded66950734f308525c"
-SRCREV_machine:qemux86 ?= "cc9695f5fd3b520464eb2ded66950734f308525c"
-SRCREV_machine:qemux86-64 ?= "cc9695f5fd3b520464eb2ded66950734f308525c"
-SRCREV_machine:qemumips64 ?= "1ad01ab47ec056d4126798f6d57a33b65b2be49c"
-SRCREV_machine ?= "cc9695f5fd3b520464eb2ded66950734f308525c"
-SRCREV_meta ?= "37891dc371e83a3451781dd81a8a85cccd60084b"
-
-# set your preferred provider of linux-yocto to 'linux-yocto-upstream', and you'll
-# get the <version>/base branch, which is pure upstream -stable, and the same
-# meta SRCREV as the linux-yocto-standard builds. Select your version using the
-# normal PREFERRED_VERSION settings.
-BBCLASSEXTEND = "devupstream:target"
-SRCREV_machine:class-devupstream ?= "3fbf24b73f4a5bc8fd39a6b7a29145451c1039ce"
-PN:class-devupstream = "linux-yocto-upstream"
-KBRANCH:class-devupstream = "v5.15/base"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRANCH}; \
- git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-5.15;destsuffix=${KMETA}"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
-LINUX_VERSION ?= "5.15.38"
-
-DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}"
-DEPENDS += "openssl-native util-linux-native"
-DEPENDS += "gmp-native libmpc-native"
-
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-KMETA = "kernel-meta"
-KCONF_BSP_AUDIT_LEVEL = "1"
-
-KERNEL_DEVICETREE:qemuarmv5 = "versatile-pb.dtb"
-
-COMPATIBLE_MACHINE = "qemuarm|qemuarmv5|qemuarm64|qemux86|qemuppc|qemuppc64|qemumips|qemumips64|qemux86-64|qemuriscv64|qemuriscv32"
-
-# Functionality flags
-KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
-KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
-KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc"
-KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc", "", d)}"
-KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc", "", d)}"
-KERNEL_FEATURES:append:powerpc =" arch/powerpc/powerpc-debug.scc"
-KERNEL_FEATURES:append:powerpc64 =" arch/powerpc/powerpc-debug.scc"
-KERNEL_FEATURES:append:powerpc64le =" arch/powerpc/powerpc-debug.scc"
-
-INSANE_SKIP:kernel-vmlinux:qemuppc64 = "textrel"
-
diff --git a/meta/recipes-kernel/linux/linux-yocto_6.6.bb b/meta/recipes-kernel/linux/linux-yocto_6.6.bb
new file mode 100644
index 0000000000..43696db59b
--- /dev/null
+++ b/meta/recipes-kernel/linux/linux-yocto_6.6.bb
@@ -0,0 +1,72 @@
+KBRANCH ?= "v6.6/standard/base"
+
+require recipes-kernel/linux/linux-yocto.inc
+
+# CVE exclusions
+include recipes-kernel/linux/cve-exclusion.inc
+include recipes-kernel/linux/cve-exclusion_6.6.inc
+
+# board specific branches
+KBRANCH:qemuarm ?= "v6.6/standard/arm-versatile-926ejs"
+KBRANCH:qemuarm64 ?= "v6.6/standard/qemuarm64"
+KBRANCH:qemumips ?= "v6.6/standard/mti-malta32"
+KBRANCH:qemuppc ?= "v6.6/standard/qemuppc"
+KBRANCH:qemuriscv64 ?= "v6.6/standard/base"
+KBRANCH:qemuriscv32 ?= "v6.6/standard/base"
+KBRANCH:qemux86 ?= "v6.6/standard/base"
+KBRANCH:qemux86-64 ?= "v6.6/standard/base"
+KBRANCH:qemuloongarch64 ?= "v6.6/standard/base"
+KBRANCH:qemumips64 ?= "v6.6/standard/mti-malta64"
+
+SRCREV_machine:qemuarm ?= "ceb94a85299b59d8840ed7ed392b1d3e4c727678"
+SRCREV_machine:qemuarm64 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemuloongarch64 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemumips ?= "c79ffc89f8909f60de52005ef258db9752634eda"
+SRCREV_machine:qemuppc ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemuriscv64 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemuriscv32 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemux86 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemux86-64 ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_machine:qemumips64 ?= "b0a73fa83073c8d7d7bc917bcbeac88d296ebe38"
+SRCREV_machine ?= "2d01bc1d4eeade12518371139dd24a21438f523c"
+SRCREV_meta ?= "f7f00b22efcfcae6489e9ec7db7002685fbc078b"
+
+# set your preferred provider of linux-yocto to 'linux-yocto-upstream', and you'll
+# get the <version>/base branch, which is pure upstream -stable, and the same
+# meta SRCREV as the linux-yocto-standard builds. Select your version using the
+# normal PREFERRED_VERSION settings.
+BBCLASSEXTEND = "devupstream:target"
+SRCREV_machine:class-devupstream ?= "5c7587f69194bc9fc714953ab4c7203e6e68885b"
+PN:class-devupstream = "linux-yocto-upstream"
+KBRANCH:class-devupstream = "v6.6/base"
+
+SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRANCH};protocol=https \
+ git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46"
+LINUX_VERSION ?= "6.6.23"
+
+PV = "${LINUX_VERSION}+git"
+
+KMETA = "kernel-meta"
+KCONF_BSP_AUDIT_LEVEL = "1"
+
+KERNEL_DEVICETREE:qemuarmv5 = "arm/versatile-pb.dtb"
+
+COMPATIBLE_MACHINE = "^(qemuarm|qemuarmv5|qemuarm64|qemux86|qemuppc|qemuppc64|qemumips|qemumips64|qemux86-64|qemuriscv64|qemuriscv32|qemuloongarch64)$"
+
+# Functionality flags
+KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
+KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES:append:qemuall=" cfg/virtio.scc features/drm-bochs/drm-bochs.scc cfg/net/mdio.scc"
+KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}"
+KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc features/nf_tables/nft_test.scc", "", d)}"
+KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc features/gpio/sim.scc", "", d)}"
+KERNEL_FEATURES:append:powerpc =" arch/powerpc/powerpc-debug.scc"
+KERNEL_FEATURES:append:powerpc64 =" arch/powerpc/powerpc-debug.scc"
+KERNEL_FEATURES:append:powerpc64le =" arch/powerpc/powerpc-debug.scc"
+
+INSANE_SKIP:kernel-vmlinux:qemuppc64 = "textrel"
+
diff --git a/meta/recipes-kernel/lttng/babeltrace2/run-ptest b/meta/recipes-kernel/lttng/babeltrace2/run-ptest
index 72fe223436..71a9c81bfb 100755
--- a/meta/recipes-kernel/lttng/babeltrace2/run-ptest
+++ b/meta/recipes-kernel/lttng/babeltrace2/run-ptest
@@ -6,4 +6,14 @@
# test plan to raise ERRORs; this is just noise.
makeargs="LOG_DRIVER_FLAGS=--ignore-exit abs_top_srcdir=$PWD abs_top_builddir=$PWD GREP=grep SED=sed PYTHON=python3"
-exec make -C tests -k -s $makeargs $target 2>/dev/null
+exec 2> error.log
+make -C tests -k -s $makeargs $target
+exitcode=$?
+if [ -e error.log ]; then
+ cat error.log
+fi
+if [ -e tests/test-suite.log ]; then
+ cat tests/test-suite.log
+fi
+
+exit $exitcode \ No newline at end of file
diff --git a/meta/recipes-kernel/lttng/babeltrace2_2.0.4.bb b/meta/recipes-kernel/lttng/babeltrace2_2.0.4.bb
deleted file mode 100644
index b48f07ea0d..0000000000
--- a/meta/recipes-kernel/lttng/babeltrace2_2.0.4.bb
+++ /dev/null
@@ -1,93 +0,0 @@
-SUMMARY = "Babeltrace2 - Trace Format Babel Tower"
-DESCRIPTION = "Babeltrace provides trace read and write libraries in host side, as well as a trace converter, which used to convert LTTng 2.0 traces into human-readable log."
-HOMEPAGE = "http://babeltrace.org/"
-BUGTRACKER = "https://bugs.lttng.org/projects/babeltrace"
-LICENSE = "MIT & GPL-2.0-only & LGPL-2.1-only & BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=a6a458c13f18385b7bc5069a6d7b176e"
-
-DEPENDS = "glib-2.0 util-linux popt bison-native flex-native"
-
-SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-2.0 \
- file://run-ptest \
- file://0001-tests-do-not-run-test-applications-from-.libs.patch \
- file://0001-Make-manpages-multilib-identical.patch \
- "
-SRCREV = "23e8cf4e6fdc1d0b230e964dafac08a57e6228e6"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>2(\.\d+)+)$"
-
-S = "${WORKDIR}/git"
-
-inherit autotools pkgconfig ptest python3targetconfig
-
-EXTRA_OECONF = "--disable-debug-info --disable-Werror"
-
-PACKAGECONFIG ??= "manpages"
-PACKAGECONFIG[manpages] = ", --disable-man-pages, asciidoc-native xmlto-native"
-
-FILES:${PN}-staticdev += "${libdir}/babeltrace2/plugins/*.a"
-FILES:${PN} += "${libdir}/babeltrace2/plugins/*.so"
-
-ASNEEDED = ""
-
-RDEPENDS:${PN}-ptest += "bash gawk python3"
-
-do_compile_ptest () {
- make -C tests all
-}
-
-do_install_ptest () {
- install -d "${D}${PTEST_PATH}/tests"
-
- # Copy required files from source directory
- for d in $(find "${S}/tests" -type d -printf '%P ') ; do
- install -d "${D}${PTEST_PATH}/tests/$d"
- find "${S}/tests/$d" -maxdepth 1 -executable -type f \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
- find "${S}/tests/$d" -maxdepth 1 -name *.sh \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
- find "${S}/tests/$d" -maxdepth 1 -name *.py \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
- find "${S}/tests/$d" -maxdepth 1 -name *.expect \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
- done
- install -d "${D}${PTEST_PATH}/tests/data/ctf-traces/"
- cp -a ${S}/tests/data/ctf-traces/* ${D}${PTEST_PATH}/tests/data/ctf-traces/
-
- # Copy the tests directory tree and the executables and
- # Makefiles found within.
- install -D "${B}/tests/Makefile" "${D}${PTEST_PATH}/tests/"
- for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
- install -d "${D}${PTEST_PATH}/tests/$d"
- find "${B}/tests/$d" -maxdepth 1 -executable -type f \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
- test -r "${B}/tests/$d/Makefile" && \
- install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
- find "${B}/tests/$d" -maxdepth 1 -name *.sh \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
- done
-
- for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
- for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
- cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
- done
- done
-
- # Prevent attempts to update Makefiles during test runs, and
- # silence "Making check in $SUBDIR" messages.
- find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
- sed -i \
- -e '/Makefile:/,/^$/d' \
- -e '/%: %.in/,/^$/d' \
- -e '/echo "Making $$target in $$subdir"; \\/d' \
- -e 's/^srcdir = \(.*\)/srcdir = ./' \
- -e 's/^builddir = \(.*\)/builddir = ./' \
- -e 's/^all-am:.*/all-am:/' \
- {} +
-
- # Substitute links to installed binaries.
- install -d "${D}${PTEST_PATH}/src/cli/"
- ln -s "${bindir}/babeltrace2" ${D}${PTEST_PATH}/src/cli/
-
- # Remove architechture specific testfiles
- rm -rf ${D}${PTEST_PATH}/tests/data/plugins/flt.lttng-utils.debug-info/*
-}
diff --git a/meta/recipes-kernel/lttng/babeltrace2_2.0.6.bb b/meta/recipes-kernel/lttng/babeltrace2_2.0.6.bb
new file mode 100644
index 0000000000..d6c75d7580
--- /dev/null
+++ b/meta/recipes-kernel/lttng/babeltrace2_2.0.6.bb
@@ -0,0 +1,95 @@
+SUMMARY = "Babeltrace2 - Trace Format Babel Tower"
+DESCRIPTION = "Babeltrace provides trace read and write libraries in host side, as well as a trace converter, which used to convert LTTng 2.0 traces into human-readable log."
+HOMEPAGE = "http://babeltrace.org/"
+BUGTRACKER = "https://bugs.lttng.org/projects/babeltrace"
+LICENSE = "MIT & GPL-2.0-only & LGPL-2.1-only & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=a6a458c13f18385b7bc5069a6d7b176e"
+
+DEPENDS = "glib-2.0 util-linux popt bison-native flex-native"
+
+SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-2.0;protocol=https \
+ file://run-ptest \
+ file://0001-tests-do-not-run-test-applications-from-.libs.patch \
+ file://0001-Make-manpages-multilib-identical.patch \
+ "
+SRCREV = "0a6632f77801f3218a288604c646f8a39cb0d2c4"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>2(\.\d+)+)$"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig ptest python3targetconfig
+
+EXTRA_OECONF = "--disable-debug-info --disable-Werror"
+
+PACKAGECONFIG ??= "manpages"
+PACKAGECONFIG[manpages] = ", --disable-man-pages, asciidoc-native xmlto-native"
+
+FILES:${PN}-staticdev += "${libdir}/babeltrace2/plugins/*.a"
+FILES:${PN} += "${libdir}/babeltrace2/plugins/*.so"
+
+ASNEEDED = ""
+LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-lld ptest', ' -fuse-ld=bfd ', '', d)}"
+
+# coreutils since we need full mktemp
+RDEPENDS:${PN}-ptest += "bash gawk python3 make grep coreutils findutils"
+
+do_compile_ptest () {
+ make -C tests all
+}
+
+do_install_ptest () {
+ install -d "${D}${PTEST_PATH}/tests"
+
+ # Copy required files from source directory
+ for d in $(find "${S}/tests" -type d -printf '%P ') ; do
+ install -d "${D}${PTEST_PATH}/tests/$d"
+ find "${S}/tests/$d" -maxdepth 1 -executable -type f \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
+ find "${S}/tests/$d" -maxdepth 1 -name *.sh \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
+ find "${S}/tests/$d" -maxdepth 1 -name *.py \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
+ find "${S}/tests/$d" -maxdepth 1 -name *.expect \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
+ done
+ install -d "${D}${PTEST_PATH}/tests/data/ctf-traces/"
+ cp -a ${S}/tests/data/ctf-traces/* ${D}${PTEST_PATH}/tests/data/ctf-traces/
+
+ # Copy the tests directory tree and the executables and
+ # Makefiles found within.
+ install -D "${B}/tests/Makefile" "${D}${PTEST_PATH}/tests/"
+ for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
+ install -d "${D}${PTEST_PATH}/tests/$d"
+ find "${B}/tests/$d" -maxdepth 1 -executable -type f \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
+ test -r "${B}/tests/$d/Makefile" && \
+ install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
+ find "${B}/tests/$d" -maxdepth 1 -name *.sh \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
+ done
+
+ for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
+ for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
+ cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
+ done
+ done
+
+ # Prevent attempts to update Makefiles during test runs, and
+ # silence "Making check in $SUBDIR" messages.
+ find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
+ sed -i \
+ -e '/Makefile:/,/^$/d' \
+ -e '/%: %.in/,/^$/d' \
+ -e '/echo "Making $$target in $$subdir"; \\/d' \
+ -e 's/^srcdir = \(.*\)/srcdir = ./' \
+ -e 's/^builddir = \(.*\)/builddir = ./' \
+ -e 's/^all-am:.*/all-am:/' \
+ {} +
+
+ # Substitute links to installed binaries.
+ install -d "${D}${PTEST_PATH}/src/cli/"
+ ln -s "${bindir}/babeltrace2" ${D}${PTEST_PATH}/src/cli/
+
+ # Remove architechture specific testfiles
+ rm -rf ${D}${PTEST_PATH}/tests/data/plugins/flt.lttng-utils.debug-info/*
+}
diff --git a/meta/recipes-kernel/lttng/babeltrace_1.5.11.bb b/meta/recipes-kernel/lttng/babeltrace_1.5.11.bb
new file mode 100644
index 0000000000..f4d9b5e42a
--- /dev/null
+++ b/meta/recipes-kernel/lttng/babeltrace_1.5.11.bb
@@ -0,0 +1,98 @@
+SUMMARY = "Babeltrace - Trace Format Babel Tower"
+DESCRIPTION = "Babeltrace provides trace read and write libraries in host side, as well as a trace converter, which used to convert LTTng 2.0 traces into human-readable log."
+HOMEPAGE = "http://babeltrace.org/"
+BUGTRACKER = "https://bugs.lttng.org/projects/babeltrace"
+LICENSE = "MIT & GPL-2.0-only & LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=76ba15dd76a248e1dd526bca0e2125fa"
+
+DEPENDS = "glib-2.0 util-linux popt bison-native flex-native"
+
+SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-1.5;protocol=https \
+ file://run-ptest \
+ "
+SRCREV = "91c00f70884887ff5c4849a8e3d47e311a22ba9d"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>1(\.\d+)+)$"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig ptest
+
+EXTRA_OECONF = "--disable-debug-info"
+
+ASNEEDED = ""
+
+RDEPENDS:${PN}-ptest += "bash gawk make"
+
+addtask do_patch_ptest_path after do_patch before do_configure
+do_patch_ptest_path () {
+ for f in $(grep -l -r abs_top_srcdir ${S}/tests); do
+ sed -i 's:\@abs_top_srcdir\@:${PTEST_PATH}:' ${f}
+ done
+
+ for f in $(grep -l -r abs_top_builddir ${S}/tests); do
+ sed -i 's:\@abs_top_builddir\@:${PTEST_PATH}:' ${f}
+ done
+ for f in $(grep -l -r GREP ${S}/tests); do
+ sed -i 's:\@GREP\@:grep:' ${f}
+ done
+
+ for f in $(grep -l -r SED ${S}/tests); do
+ sed -i 's:\@SED\@:sed:' ${f}
+ done
+}
+
+do_compile_ptest () {
+ make -C tests all
+}
+
+do_install_ptest () {
+ # Copy required files from source directory
+ for f in config/tap-driver.sh config/test-driver; do
+ install -D "${S}/$f" "${D}${PTEST_PATH}/$f"
+ done
+ install -d "$f" "${D}${PTEST_PATH}/tests/ctf-traces/"
+ cp -a ${S}/tests/ctf-traces/* ${D}${PTEST_PATH}/tests/ctf-traces/
+
+ # Copy the tests directory tree and the executables and
+ # Makefiles found within.
+ install -D "${B}/tests/Makefile" "${D}${PTEST_PATH}/tests/"
+ for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
+ install -d "${D}${PTEST_PATH}/tests/$d"
+ find "${B}/tests/$d" -maxdepth 1 -executable -type f \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
+ test -r "${B}/tests/$d/Makefile" && \
+ install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
+ find "${B}/tests/$d" -maxdepth 1 -name *.sh \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
+ done
+
+ for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
+ for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
+ cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
+ done
+ done
+
+ install -D ${B}/formats/ctf/metadata/.libs/ctf-parser-test \
+ ${D}${PTEST_PATH}/formats/ctf/metadata/ctf-parser-test
+
+ # Prevent attempts to update Makefiles during test runs, and
+ # silence "Making check in $SUBDIR" messages.
+ find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
+ sed -i \
+ -e '/Makefile:/,/^$/d' \
+ -e '/$(check_SCRIPTS)/s/^/#/' \
+ -e '/%: %.in/,/^$/d' \
+ -e '/echo "Making $$target in $$subdir"; \\/d' \
+ -e 's/^srcdir = \(.*\)/srcdir = ./' \
+ -e 's/^builddir = \(.*\)/builddir = ./' \
+ -e 's/^all-am:.*/all-am:/' \
+ {} +
+
+ # Remove path to babeltrace.
+ for f in $(grep -l -r "^BABELTRACE_BIN" ${D}${PTEST_PATH}); do
+ sed -i 's:^BABELTRACE_BIN.*:BABELTRACE_BIN=/usr/bin/babeltrace:' ${f}
+ done
+ for f in $(grep -l -r "^BTBIN" ${D}${PTEST_PATH}); do
+ sed -i 's:^BTBIN.*:BTBIN=/usr/bin/babeltrace:' ${f}
+ done
+}
diff --git a/meta/recipes-kernel/lttng/babeltrace_1.5.8.bb b/meta/recipes-kernel/lttng/babeltrace_1.5.8.bb
deleted file mode 100644
index 19601e7d1b..0000000000
--- a/meta/recipes-kernel/lttng/babeltrace_1.5.8.bb
+++ /dev/null
@@ -1,98 +0,0 @@
-SUMMARY = "Babeltrace - Trace Format Babel Tower"
-DESCRIPTION = "Babeltrace provides trace read and write libraries in host side, as well as a trace converter, which used to convert LTTng 2.0 traces into human-readable log."
-HOMEPAGE = "http://babeltrace.org/"
-BUGTRACKER = "https://bugs.lttng.org/projects/babeltrace"
-LICENSE = "MIT & GPL-2.0-only & LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=76ba15dd76a248e1dd526bca0e2125fa"
-
-DEPENDS = "glib-2.0 util-linux popt bison-native flex-native"
-
-SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-1.5 \
- file://run-ptest \
- "
-SRCREV = "054a54ae10b01a271afc4f19496c041b10fb414c"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>1(\.\d+)+)$"
-
-S = "${WORKDIR}/git"
-
-inherit autotools pkgconfig ptest
-
-EXTRA_OECONF = "--disable-debug-info"
-
-ASNEEDED = ""
-
-RDEPENDS:${PN}-ptest += "bash gawk"
-
-addtask do_patch_ptest_path after do_patch before do_configure
-do_patch_ptest_path () {
- for f in $(grep -l -r abs_top_srcdir ${S}/tests); do
- sed -i 's:\@abs_top_srcdir\@:${PTEST_PATH}:' ${f}
- done
-
- for f in $(grep -l -r abs_top_builddir ${S}/tests); do
- sed -i 's:\@abs_top_builddir\@:${PTEST_PATH}:' ${f}
- done
- for f in $(grep -l -r GREP ${S}/tests); do
- sed -i 's:\@GREP\@:grep:' ${f}
- done
-
- for f in $(grep -l -r SED ${S}/tests); do
- sed -i 's:\@SED\@:sed:' ${f}
- done
-}
-
-do_compile_ptest () {
- make -C tests all
-}
-
-do_install_ptest () {
- # Copy required files from source directory
- for f in config/tap-driver.sh config/test-driver; do
- install -D "${S}/$f" "${D}${PTEST_PATH}/$f"
- done
- install -d "$f" "${D}${PTEST_PATH}/tests/ctf-traces/"
- cp -a ${S}/tests/ctf-traces/* ${D}${PTEST_PATH}/tests/ctf-traces/
-
- # Copy the tests directory tree and the executables and
- # Makefiles found within.
- install -D "${B}/tests/Makefile" "${D}${PTEST_PATH}/tests/"
- for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
- install -d "${D}${PTEST_PATH}/tests/$d"
- find "${B}/tests/$d" -maxdepth 1 -executable -type f \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
- test -r "${B}/tests/$d/Makefile" && \
- install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
- find "${B}/tests/$d" -maxdepth 1 -name *.sh \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} \;
- done
-
- for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
- for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
- cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
- done
- done
-
- install -D ${B}/formats/ctf/metadata/.libs/ctf-parser-test \
- ${D}${PTEST_PATH}/formats/ctf/metadata/ctf-parser-test
-
- # Prevent attempts to update Makefiles during test runs, and
- # silence "Making check in $SUBDIR" messages.
- find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
- sed -i \
- -e '/Makefile:/,/^$/d' \
- -e '/$(check_SCRIPTS)/s/^/#/' \
- -e '/%: %.in/,/^$/d' \
- -e '/echo "Making $$target in $$subdir"; \\/d' \
- -e 's/^srcdir = \(.*\)/srcdir = ./' \
- -e 's/^builddir = \(.*\)/builddir = ./' \
- -e 's/^all-am:.*/all-am:/' \
- {} +
-
- # Remove path to babeltrace.
- for f in $(grep -l -r "^BABELTRACE_BIN" ${D}${PTEST_PATH}); do
- sed -i 's:^BABELTRACE_BIN.*:BABELTRACE_BIN=/usr/bin/babeltrace:' ${f}
- done
- for f in $(grep -l -r "^BTBIN" ${D}${PTEST_PATH}); do
- sed -i 's:^BTBIN.*:BTBIN=/usr/bin/babeltrace:' ${f}
- done
-}
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-ASoC-snd_doc_dapm-on-linux-6.9-rc1.patch b/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-ASoC-snd_doc_dapm-on-linux-6.9-rc1.patch
new file mode 100644
index 0000000000..9d5fd0de60
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-ASoC-snd_doc_dapm-on-linux-6.9-rc1.patch
@@ -0,0 +1,93 @@
+From fec007d9630e010062cf5699a08460f71f46b527 Mon Sep 17 00:00:00 2001
+From: Kienan Stewart <kstewart@efficios.com>
+Date: Mon, 25 Mar 2024 08:54:42 -0400
+Subject: [PATCH 1/4] Fix: ASoC snd_doc_dapm on linux 6.9-rc1
+
+See upstream commit:
+
+ commit 7df3eb4cdb6bbfa482f51548b9fd47c2723c68ba
+ Author: Luca Ceresoli <luca.ceresoli@bootlin.com>
+ Date: Wed Mar 6 10:30:01 2024 +0100
+
+ ASoC: trace: add event to snd_soc_dapm trace events
+
+ Add the event value to the snd_soc_dapm_start and snd_soc_dapm_done trace
+ events to make them more informative.
+
+ Trace before:
+
+ aplay-229 [000] 250.140309: snd_soc_dapm_start: card=vscn-2046
+ aplay-229 [000] 250.167531: snd_soc_dapm_done: card=vscn-2046
+ aplay-229 [000] 251.169588: snd_soc_dapm_start: card=vscn-2046
+ aplay-229 [000] 251.195245: snd_soc_dapm_done: card=vscn-2046
+
+ Trace after:
+
+ aplay-214 [000] 693.290612: snd_soc_dapm_start: card=vscn-2046 event=1
+ aplay-214 [000] 693.315508: snd_soc_dapm_done: card=vscn-2046 event=1
+ aplay-214 [000] 694.537349: snd_soc_dapm_start: card=vscn-2046 event=2
+ aplay-214 [000] 694.563241: snd_soc_dapm_done: card=vscn-2046 event=2
+
+Upstream-Status: Backport [88c4e0fe Fix: ASoC snd_doc_dapm on linux 6.9-rc1]
+
+Change-Id: If0d33544b8dd1dfb3d12ca9390892190fc0444b0
+Signed-off-by: Kienan Stewart <kstewart@efficios.com>
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ include/instrumentation/events/asoc.h | 33 +++++++++++++++++++++++++++
+ 1 file changed, 33 insertions(+)
+
+diff --git a/include/instrumentation/events/asoc.h b/include/instrumentation/events/asoc.h
+index 21d13a0f..5126d4c1 100644
+--- a/include/instrumentation/events/asoc.h
++++ b/include/instrumentation/events/asoc.h
+@@ -51,6 +51,38 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_card, snd_soc_bias_level_done,
+
+ )
+
++#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0))
++LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_dapm_basic,
++
++ TP_PROTO(struct snd_soc_card *card, int event),
++
++ TP_ARGS(card, event),
++
++ TP_FIELDS(
++ ctf_string(name, card->name)
++ ctf_integer(int, event, event)
++ )
++)
++LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_dapm_basic, snd_soc_dapm_start,
++
++ asoc_snd_soc_dapm_start,
++
++ TP_PROTO(struct snd_soc_card *card, int event),
++
++ TP_ARGS(card, event)
++
++)
++
++LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_dapm_basic, snd_soc_dapm_done,
++
++ asoc_snd_soc_dapm_done,
++
++ TP_PROTO(struct snd_soc_card *card, int event),
++
++ TP_ARGS(card, event)
++
++)
++#else
+ LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_dapm_basic,
+
+ TP_PROTO(struct snd_soc_card *card),
+@@ -81,6 +113,7 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_dapm_basic, snd_soc_dapm_done,
+ TP_ARGS(card)
+
+ )
++#endif
+
+ LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_dapm_widget,
+
+--
+2.39.2
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-compaction-migratepages-event-name.patch b/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-compaction-migratepages-event-name.patch
deleted file mode 100644
index e988f7a3d5..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0001-Fix-compaction-migratepages-event-name.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From c312bda00d2dc10ce5f6c1189acbefee5c6c8c6c Mon Sep 17 00:00:00 2001
-From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
-Date: Tue, 29 Mar 2022 16:34:07 -0400
-Subject: [PATCH 01/10] Fix: compaction migratepages event name
-
-The commit "fix: mm: compaction: fix the migration stats in trace_mm_compaction_migratepages() (v5.17)"
-
-Triggers this warning:
-
- LTTng: event provider mismatch: The event name needs to start with provider name + _ + one or more letter, provider: compaction, event name: mm_compaction_migratepages
-
-Upstream-Status: Backport
-
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
-Change-Id: I01c7485af765084dafb33bf33ae392e60bfbf1e7
----
- include/instrumentation/events/compaction.h | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/include/instrumentation/events/compaction.h b/include/instrumentation/events/compaction.h
-index 340e41f5..15964537 100644
---- a/include/instrumentation/events/compaction.h
-+++ b/include/instrumentation/events/compaction.h
-@@ -98,7 +98,9 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(compaction_isolate_template,
- #endif /* #else #if LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,0,0) */
-
- #if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,17,0))
--LTTNG_TRACEPOINT_EVENT(mm_compaction_migratepages,
-+LTTNG_TRACEPOINT_EVENT_MAP(mm_compaction_migratepages,
-+
-+ compaction_migratepages,
-
- TP_PROTO(unsigned long nr_all,
- unsigned int nr_succeeded),
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0001-fix-sched-tracing-Append-prev_state-to-tp-args-inste.patch b/meta/recipes-kernel/lttng/lttng-modules/0001-fix-sched-tracing-Append-prev_state-to-tp-args-inste.patch
deleted file mode 100644
index b41053b6bc..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0001-fix-sched-tracing-Append-prev_state-to-tp-args-inste.patch
+++ /dev/null
@@ -1,59 +0,0 @@
-From 9c5b8de32b5745f3ff31079c02da64595e101bee Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Tue, 17 May 2022 11:46:29 -0400
-Subject: [PATCH] fix: sched/tracing: Append prev_state to tp args instead
- (v5.18)
-
-See upstream commit :
-
- commit 9c2136be0878c88c53dea26943ce40bb03ad8d8d
- Author: Delyan Kratunov <delyank@fb.com>
- Date: Wed May 11 18:28:36 2022 +0000
-
- sched/tracing: Append prev_state to tp args instead
-
- Commit fa2c3254d7cf (sched/tracing: Don't re-read p->state when emitting
- sched_switch event, 2022-01-20) added a new prev_state argument to the
- sched_switch tracepoint, before the prev task_struct pointer.
-
- This reordering of arguments broke BPF programs that use the raw
- tracepoint (e.g. tp_btf programs). The type of the second argument has
- changed and existing programs that assume a task_struct* argument
- (e.g. for bpf_task_storage access) will now fail to verify.
-
- If we instead append the new argument to the end, all existing programs
- would continue to work and can conditionally extract the prev_state
- argument on supported kernel versions.
-
-
-Upstream-Status: Backport
-
-Change-Id: Ife2ec88a8bea2743562590cbd357068d7773863f
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/instrumentation/events/sched.h | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/include/instrumentation/events/sched.h b/include/instrumentation/events/sched.h
-index 339bec94..c1c3df15 100644
---- a/include/instrumentation/events/sched.h
-+++ b/include/instrumentation/events/sched.h
-@@ -356,11 +356,11 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE(sched_wakeup_template, sched_wakeup_new,
- LTTNG_TRACEPOINT_EVENT(sched_switch,
-
- TP_PROTO(bool preempt,
-- unsigned int prev_state,
- struct task_struct *prev,
-- struct task_struct *next),
-+ struct task_struct *next,
-+ unsigned int prev_state),
-
-- TP_ARGS(preempt, prev_state, prev, next),
-+ TP_ARGS(preempt, prev, next, prev_state),
-
- TP_FIELDS(
- ctf_array_text(char, prev_comm, prev->comm, TASK_COMM_LEN)
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch b/meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch
index 76a5787c37..120528bf9c 100644
--- a/meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch
+++ b/meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch
@@ -19,10 +19,10 @@ Signed-off-by: Bruce Ashfield <bruce.ashfield@gmail.com>
src/Kbuild | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
-diff --git a/src/Kbuild b/src/Kbuild
-index 7137874..04eb5c9 100644
---- a/src/Kbuild
-+++ b/src/Kbuild
+Index: lttng-modules-2.13.10/src/Kbuild
+===================================================================
+--- lttng-modules-2.13.10.orig/src/Kbuild
++++ lttng-modules-2.13.10/src/Kbuild
@@ -2,10 +2,13 @@
ifdef CONFIG_LOCALVERSION # Check if dot-config is included.
@@ -38,7 +38,7 @@ index 7137874..04eb5c9 100644
TOP_LTTNG_MODULES_DIR := $(shell dirname $(lastword $(MAKEFILE_LIST)))/..
lttng_check_linux_version = $(shell pwd)/include/linux/version.h
-@@ -150,3 +153,5 @@ lttng-statedump-objs := lttng-statedump-impl.o
+@@ -150,3 +153,5 @@ lttng-statedump-objs := lttng-statedump-
obj-$(CONFIG_LTTNG) += probes/
obj-$(CONFIG_LTTNG) += lib/
obj-$(CONFIG_LTTNG) += tests/
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-ASoC-add-component-to-set_bias_level-events-in-l.patch b/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-ASoC-add-component-to-set_bias_level-events-in-l.patch
new file mode 100644
index 0000000000..6ea10ffc91
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-ASoC-add-component-to-set_bias_level-events-in-l.patch
@@ -0,0 +1,132 @@
+From d8379ec6365a925db33cae94fb6783cdbdb6a922 Mon Sep 17 00:00:00 2001
+From: Kienan Stewart <kstewart@efficios.com>
+Date: Mon, 25 Mar 2024 09:40:29 -0400
+Subject: [PATCH 2/4] Fix: ASoC add component to set_bias_level events in linux
+ 6.9-rc1
+
+See upstream commit:
+
+ commit 6ef46a69ec32fe1cf56de67742fcd01af4bf48af
+ Author: Luca Ceresoli <luca.ceresoli@bootlin.com>
+ Date: Wed Mar 6 10:30:00 2024 +0100
+
+ ASoC: trace: add component to set_bias_level trace events
+
+ The snd_soc_bias_level_start and snd_soc_bias_level_done trace events
+ currently look like:
+
+ aplay-229 [000] 1250.140778: snd_soc_bias_level_start: card=vscn-2046 val=1
+ aplay-229 [000] 1250.140784: snd_soc_bias_level_done: card=vscn-2046 val=1
+ aplay-229 [000] 1250.140786: snd_soc_bias_level_start: card=vscn-2046 val=2
+ aplay-229 [000] 1250.140788: snd_soc_bias_level_done: card=vscn-2046 val=2
+ kworker/u8:1-21 [000] 1250.140871: snd_soc_bias_level_start: card=vscn-2046 val=1
+ kworker/u8:0-11 [000] 1250.140951: snd_soc_bias_level_start: card=vscn-2046 val=1
+ kworker/u8:0-11 [000] 1250.140956: snd_soc_bias_level_done: card=vscn-2046 val=1
+ kworker/u8:0-11 [000] 1250.140959: snd_soc_bias_level_start: card=vscn-2046 val=2
+ kworker/u8:0-11 [000] 1250.140961: snd_soc_bias_level_done: card=vscn-2046 val=2
+ kworker/u8:1-21 [000] 1250.167219: snd_soc_bias_level_done: card=vscn-2046 val=1
+ kworker/u8:1-21 [000] 1250.167222: snd_soc_bias_level_start: card=vscn-2046 val=2
+ kworker/u8:1-21 [000] 1250.167232: snd_soc_bias_level_done: card=vscn-2046 val=2
+ kworker/u8:0-11 [000] 1250.167440: snd_soc_bias_level_start: card=vscn-2046 val=3
+ kworker/u8:0-11 [000] 1250.167444: snd_soc_bias_level_done: card=vscn-2046 val=3
+ kworker/u8:1-21 [000] 1250.167497: snd_soc_bias_level_start: card=vscn-2046 val=3
+ kworker/u8:1-21 [000] 1250.167506: snd_soc_bias_level_done: card=vscn-2046 val=3
+
+ There are clearly multiple calls, one per component, but they cannot be
+ discriminated from each other.
+
+ Change the ftrace events to also print the component name, to make it clear
+ which part of the code is involved. This requires changing the passed value
+ from a struct snd_soc_card, where the DAPM context is not kwown, to a
+ struct snd_soc_dapm_context where it is obviously known but the a card
+ pointer is also available.
+
+ With this change, the resulting trace becomes:
+
+ aplay-247 [000] 1436.357332: snd_soc_bias_level_start: card=vscn-2046 component=(none) val=1
+ aplay-247 [000] 1436.357338: snd_soc_bias_level_done: card=vscn-2046 component=(none) val=1
+ aplay-247 [000] 1436.357340: snd_soc_bias_level_start: card=vscn-2046 component=(none) val=2
+ aplay-247 [000] 1436.357343: snd_soc_bias_level_done: card=vscn-2046 component=(none) val=2
+ kworker/u8:4-215 [000] 1436.357437: snd_soc_bias_level_start: card=vscn-2046 component=ff560000.codec val=1
+ kworker/u8:5-231 [000] 1436.357518: snd_soc_bias_level_start: card=vscn-2046 component=ff320000.i2s val=1
+ kworker/u8:5-231 [000] 1436.357523: snd_soc_bias_level_done: card=vscn-2046 component=ff320000.i2s val=1
+ kworker/u8:5-231 [000] 1436.357526: snd_soc_bias_level_start: card=vscn-2046 component=ff320000.i2s val=2
+ kworker/u8:5-231 [000] 1436.357528: snd_soc_bias_level_done: card=vscn-2046 component=ff320000.i2s val=2
+ kworker/u8:4-215 [000] 1436.383217: snd_soc_bias_level_done: card=vscn-2046 component=ff560000.codec val=1
+ kworker/u8:4-215 [000] 1436.383221: snd_soc_bias_level_start: card=vscn-2046 component=ff560000.codec val=2
+ kworker/u8:4-215 [000] 1436.383231: snd_soc_bias_level_done: card=vscn-2046 component=ff560000.codec val=2
+ kworker/u8:5-231 [000] 1436.383468: snd_soc_bias_level_start: card=vscn-2046 component=ff320000.i2s val=3
+ kworker/u8:5-231 [000] 1436.383472: snd_soc_bias_level_done: card=vscn-2046 component=ff320000.i2s val=3
+ kworker/u8:4-215 [000] 1436.383503: snd_soc_bias_level_start: card=vscn-2046 component=ff560000.codec val=3
+ kworker/u8:4-215 [000] 1436.383513: snd_soc_bias_level_done: card=vscn-2046 component=ff560000.codec val=3
+
+Upstream-Status: Backport [303434ab Fix: ASoC add component to set_bias_level events in linux 6.9-rc1]
+
+Change-Id: I959f1680c002acdf29828b968d3975247f5433d8
+Signed-off-by: Kienan Stewart <kstewart@efficios.com>
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ include/instrumentation/events/asoc.h | 36 +++++++++++++++++++++++++++
+ 1 file changed, 36 insertions(+)
+
+Index: lttng-modules-2.13.12/include/instrumentation/events/asoc.h
+===================================================================
+--- lttng-modules-2.13.12.orig/include/instrumentation/events/asoc.h
++++ lttng-modules-2.13.12/include/instrumentation/events/asoc.h
+@@ -10,6 +10,7 @@
+ #include <lttng/kernel-version.h>
+
+ #define DAPM_DIRECT "(direct)"
++#define DAPM_COMPONENT_NONE "(none)"
+
+ #ifndef _TRACE_ASOC_DEF
+ #define _TRACE_ASOC_DEF
+@@ -119,6 +120,40 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc
+ )
+ #endif
+
++#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0))
++LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_dapm_context,
++
++ TP_PROTO(struct snd_soc_dapm_context *dapm, int val),
++
++ TP_ARGS(dapm, val),
++
++ TP_FIELDS(
++ ctf_string(name, dapm->card->name)
++ ctf_string(component, dapm->component ? dapm->component->name : DAPM_COMPONENT_NONE)
++ ctf_integer(int, val, val)
++ )
++)
++
++LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_dapm_context, snd_soc_bias_level_start,
++
++ asoc_snd_soc_bias_level_start,
++
++ TP_PROTO(struct snd_soc_dapm_context *dapm, int val),
++
++ TP_ARGS(dapm, val)
++
++)
++
++LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc_snd_soc_dapm_context, snd_soc_bias_level_done,
++
++ asoc_snd_soc_bias_level_done,
++
++ TP_PROTO(struct snd_soc_dapm_context *dapm, int val),
++
++ TP_ARGS(dapm, val)
++
++)
++#else
+ LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_card,
+
+ TP_PROTO(struct snd_soc_card *card, int val),
+@@ -150,6 +185,7 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(asoc
+ TP_ARGS(card, val)
+
+ )
++#endif
+
+ #if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0))
+ LTTNG_TRACEPOINT_EVENT_CLASS(asoc_snd_soc_dapm_basic,
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch b/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch
deleted file mode 100644
index 00367eebf8..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From a7eb2e3d0a4beb1ee80b132927641dd05ef2d542 Mon Sep 17 00:00:00 2001
-From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
-Date: Mon, 4 Apr 2022 15:49:32 -0400
-Subject: [PATCH 02/10] Fix: tracepoint event: allow same provider and event
- name
-
-Using the same name for the provider (TRACE_SYSTEM) and event name
-causes a compilation error because the same identifiers are emitted
-twice.
-
-Fix this by prefixing the provider identifier with
-"__provider_event_desc___".
-
-Upstream-Status: Backport
-
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
-Change-Id: I8cdf8f859e35b8bd5c19737860d12f1ed546dfc2
----
- include/lttng/tracepoint-event-impl.h | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/include/lttng/tracepoint-event-impl.h b/include/lttng/tracepoint-event-impl.h
-index 38b1dc43..dcb22247 100644
---- a/include/lttng/tracepoint-event-impl.h
-+++ b/include/lttng/tracepoint-event-impl.h
-@@ -1255,7 +1255,7 @@ static const struct lttng_kernel_event_desc __event_desc___##_map = { \
- #define TP_ID1(_token, _system) _token##_system
- #define TP_ID(_token, _system) TP_ID1(_token, _system)
-
--static const struct lttng_kernel_event_desc * const TP_ID(__event_desc___, TRACE_SYSTEM)[] = {
-+static const struct lttng_kernel_event_desc * const TP_ID(__provider_event_desc___, TRACE_SYSTEM)[] = {
- #include TRACE_INCLUDE(TRACE_INCLUDE_FILE)
- };
-
-@@ -1274,8 +1274,8 @@ static const struct lttng_kernel_event_desc * const TP_ID(__event_desc___, TRACE
- /* non-const because list head will be modified when registered. */
- static __used struct lttng_kernel_probe_desc TP_ID(__probe_desc___, TRACE_SYSTEM) = {
- .provider_name = __stringify(TRACE_SYSTEM),
-- .event_desc = TP_ID(__event_desc___, TRACE_SYSTEM),
-- .nr_events = ARRAY_SIZE(TP_ID(__event_desc___, TRACE_SYSTEM)),
-+ .event_desc = TP_ID(__provider_event_desc___, TRACE_SYSTEM),
-+ .nr_events = ARRAY_SIZE(TP_ID(__provider_event_desc___, TRACE_SYSTEM)),
- .head = { NULL, NULL },
- .lazy_init_head = { NULL, NULL },
- .lazy = 0,
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0003-Fix-mm_compaction_migratepages-changed-in-linux-6.9-.patch b/meta/recipes-kernel/lttng/lttng-modules/0003-Fix-mm_compaction_migratepages-changed-in-linux-6.9-.patch
new file mode 100644
index 0000000000..66c48ebe8f
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/0003-Fix-mm_compaction_migratepages-changed-in-linux-6.9-.patch
@@ -0,0 +1,81 @@
+From f4a6415f8d5fa447868d1fdc7119e0a328966379 Mon Sep 17 00:00:00 2001
+From: Kienan Stewart <kstewart@efficios.com>
+Date: Mon, 25 Mar 2024 10:30:32 -0400
+Subject: [PATCH 3/4] Fix: mm_compaction_migratepages changed in linux 6.9-rc1
+
+See upstream commit:
+
+ commit ab755bf4249b992fc2140d615ab0a686d50765b4
+ Author: Baolin Wang <baolin.wang@linux.alibaba.com>
+ Date: Tue Feb 20 14:16:31 2024 +0800
+
+ mm: compaction: update the cc->nr_migratepages when allocating or freeing the freepages
+
+ Currently we will use 'cc->nr_freepages >= cc->nr_migratepages' comparison
+ to ensure that enough freepages are isolated in isolate_freepages(),
+ however it just decreases the cc->nr_freepages without updating
+ cc->nr_migratepages in compaction_alloc(), which will waste more CPU
+ cycles and cause too many freepages to be isolated.
+
+ So we should also update the cc->nr_migratepages when allocating or
+ freeing the freepages to avoid isolating excess freepages. And I can see
+ fewer free pages are scanned and isolated when running thpcompact on my
+ Arm64 server:
+
+ k6.7 k6.7_patched
+ Ops Compaction pages isolated 120692036.00 118160797.00
+ Ops Compaction migrate scanned 131210329.00 154093268.00
+ Ops Compaction free scanned 1090587971.00 1080632536.00
+ Ops Compact scan efficiency 12.03 14.26
+
+ Moreover, I did not see an obvious latency improvements, this is likely
+ because isolating freepages is not the bottleneck in the thpcompact test
+ case.
+
+ k6.7 k6.7_patched
+ Amean fault-both-1 1089.76 ( 0.00%) 1080.16 * 0.88%*
+ Amean fault-both-3 1616.48 ( 0.00%) 1636.65 * -1.25%*
+ Amean fault-both-5 2266.66 ( 0.00%) 2219.20 * 2.09%*
+ Amean fault-both-7 2909.84 ( 0.00%) 2801.90 * 3.71%*
+ Amean fault-both-12 4861.26 ( 0.00%) 4733.25 * 2.63%*
+ Amean fault-both-18 7351.11 ( 0.00%) 6950.51 * 5.45%*
+ Amean fault-both-24 9059.30 ( 0.00%) 9159.99 * -1.11%*
+ Amean fault-both-30 10685.68 ( 0.00%) 11399.02 * -6.68%*
+
+Upstream-Status: Backport [175fe77c Fix: mm_compaction_migratepages changed in linux 6.9-rc1]
+
+Change-Id: I103a43fd1b549360b3fc978fd409b7c17ef3e192
+Signed-off-by: Kienan Stewart <kstewart@efficios.com>
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ include/instrumentation/events/compaction.h | 17 ++++++++++++++++-
+ 1 file changed, 16 insertions(+), 1 deletion(-)
+
+Index: lttng-modules-2.13.12/include/instrumentation/events/compaction.h
+===================================================================
+--- lttng-modules-2.13.12.orig/include/instrumentation/events/compaction.h
++++ lttng-modules-2.13.12/include/instrumentation/events/compaction.h
+@@ -97,7 +97,22 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(comp
+
+ #endif /* #else #if LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,0,0) */
+
+-#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0) || \
++#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0))
++LTTNG_TRACEPOINT_EVENT_MAP(mm_compaction_migratepages,
++
++ compaction_migratepages,
++
++ TP_PROTO(unsigned int nr_migratepages,
++ unsigned int nr_succeeded),
++
++ TP_ARGS(nr_migratepages, nr_succeeded),
++
++ TP_FIELDS(
++ ctf_integer(unsigned long, nr_migrated, nr_succeeded)
++ ctf_integer(unsigned long, nr_failed, nr_migratepages - nr_succeeded)
++ )
++)
++#elif (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0) || \
+ LTTNG_RHEL_KERNEL_RANGE(5,14,0,163,0,0, 5,15,0,0,0,0))
+ LTTNG_TRACEPOINT_EVENT_MAP(mm_compaction_migratepages,
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch b/meta/recipes-kernel/lttng/lttng-modules/0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch
deleted file mode 100644
index afe514de82..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch
+++ /dev/null
@@ -1,183 +0,0 @@
-From 8e52fd71e693619f7a58de2692e59f0c826e9988 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 13:52:57 -0400
-Subject: [PATCH 03/10] fix: sched/tracing: Don't re-read p->state when
- emitting sched_switch event (v5.18)
-
-See upstream commit :
-
- commit fa2c3254d7cfff5f7a916ab928a562d1165f17bb
- Author: Valentin Schneider <valentin.schneider@arm.com>
- Date: Thu Jan 20 16:25:19 2022 +0000
-
- sched/tracing: Don't re-read p->state when emitting sched_switch event
-
- As of commit
-
- c6e7bd7afaeb ("sched/core: Optimize ttwu() spinning on p->on_cpu")
-
- the following sequence becomes possible:
-
- p->__state = TASK_INTERRUPTIBLE;
- __schedule()
- deactivate_task(p);
- ttwu()
- READ !p->on_rq
- p->__state=TASK_WAKING
- trace_sched_switch()
- __trace_sched_switch_state()
- task_state_index()
- return 0;
-
- TASK_WAKING isn't in TASK_REPORT, so the task appears as TASK_RUNNING in
- the trace event.
-
- Prevent this by pushing the value read from __schedule() down the trace
- event.
-
-Upstream-Status: Backport
-
-Change-Id: I46743cd006be4b4d573cae2d77df7d6d16744d04
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/instrumentation/events/sched.h | 88 +++++++++++++++++++++++---
- 1 file changed, 78 insertions(+), 10 deletions(-)
-
-diff --git a/include/instrumentation/events/sched.h b/include/instrumentation/events/sched.h
-index 91953a6f..339bec94 100644
---- a/include/instrumentation/events/sched.h
-+++ b/include/instrumentation/events/sched.h
-@@ -20,7 +20,37 @@
- #ifndef _TRACE_SCHED_DEF_
- #define _TRACE_SCHED_DEF_
-
--#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,15,0))
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+
-+static inline long __trace_sched_switch_state(bool preempt,
-+ unsigned int prev_state,
-+ struct task_struct *p)
-+{
-+ unsigned int state;
-+
-+#ifdef CONFIG_SCHED_DEBUG
-+ BUG_ON(p != current);
-+#endif /* CONFIG_SCHED_DEBUG */
-+
-+ /*
-+ * Preemption ignores task state, therefore preempted tasks are always
-+ * RUNNING (we will not have dequeued if state != RUNNING).
-+ */
-+ if (preempt)
-+ return TASK_REPORT_MAX;
-+
-+ /*
-+ * task_state_index() uses fls() and returns a value from 0-8 range.
-+ * Decrement it by 1 (except TASK_RUNNING state i.e 0) before using
-+ * it for left shift operation to get the correct task->state
-+ * mapping.
-+ */
-+ state = __task_state_index(prev_state, p->exit_state);
-+
-+ return state ? (1 << (state - 1)) : state;
-+}
-+
-+#elif (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,15,0))
-
- static inline long __trace_sched_switch_state(bool preempt, struct task_struct *p)
- {
-@@ -321,43 +351,81 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE(sched_wakeup_template, sched_wakeup_new,
- /*
- * Tracepoint for task switches, performed by the scheduler:
- */
-+
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
- LTTNG_TRACEPOINT_EVENT(sched_switch,
-
--#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,4,0))
- TP_PROTO(bool preempt,
-- struct task_struct *prev,
-- struct task_struct *next),
-+ unsigned int prev_state,
-+ struct task_struct *prev,
-+ struct task_struct *next),
-
-- TP_ARGS(preempt, prev, next),
-+ TP_ARGS(preempt, prev_state, prev, next),
-+
-+ TP_FIELDS(
-+ ctf_array_text(char, prev_comm, prev->comm, TASK_COMM_LEN)
-+ ctf_integer(pid_t, prev_tid, prev->pid)
-+ ctf_integer(int, prev_prio, prev->prio - MAX_RT_PRIO)
-+#ifdef CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM
-+ ctf_enum(task_state, long, prev_state, __trace_sched_switch_state(preempt, prev_state, prev))
- #else
-- TP_PROTO(struct task_struct *prev,
-+ ctf_integer(long, prev_state, __trace_sched_switch_state(preempt, prev_state, prev))
-+#endif
-+ ctf_array_text(char, next_comm, next->comm, TASK_COMM_LEN)
-+ ctf_integer(pid_t, next_tid, next->pid)
-+ ctf_integer(int, next_prio, next->prio - MAX_RT_PRIO)
-+ )
-+)
-+
-+#elif (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,4,0))
-+
-+LTTNG_TRACEPOINT_EVENT(sched_switch,
-+
-+ TP_PROTO(bool preempt,
-+ struct task_struct *prev,
- struct task_struct *next),
-
-- TP_ARGS(prev, next),
--#endif /* #if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,4,0)) */
-+ TP_ARGS(preempt, prev, next),
-
- TP_FIELDS(
- ctf_array_text(char, prev_comm, prev->comm, TASK_COMM_LEN)
- ctf_integer(pid_t, prev_tid, prev->pid)
- ctf_integer(int, prev_prio, prev->prio - MAX_RT_PRIO)
--#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,4,0))
- #ifdef CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM
- ctf_enum(task_state, long, prev_state, __trace_sched_switch_state(preempt, prev))
- #else
- ctf_integer(long, prev_state, __trace_sched_switch_state(preempt, prev))
- #endif
-+ ctf_array_text(char, next_comm, next->comm, TASK_COMM_LEN)
-+ ctf_integer(pid_t, next_tid, next->pid)
-+ ctf_integer(int, next_prio, next->prio - MAX_RT_PRIO)
-+ )
-+)
-+
- #else
-+
-+LTTNG_TRACEPOINT_EVENT(sched_switch,
-+
-+ TP_PROTO(struct task_struct *prev,
-+ struct task_struct *next),
-+
-+ TP_ARGS(prev, next),
-+
-+ TP_FIELDS(
-+ ctf_array_text(char, prev_comm, prev->comm, TASK_COMM_LEN)
-+ ctf_integer(pid_t, prev_tid, prev->pid)
-+ ctf_integer(int, prev_prio, prev->prio - MAX_RT_PRIO)
- #ifdef CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM
- ctf_enum(task_state, long, prev_state, __trace_sched_switch_state(prev))
- #else
- ctf_integer(long, prev_state, __trace_sched_switch_state(prev))
--#endif
- #endif
- ctf_array_text(char, next_comm, next->comm, TASK_COMM_LEN)
- ctf_integer(pid_t, next_tid, next->pid)
- ctf_integer(int, next_prio, next->prio - MAX_RT_PRIO)
- )
- )
-+#endif
-
- /*
- * Tracepoint for a task being migrated:
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0004-Fix-dev_base_lock-removed-in-linux-6.9-rc1.patch b/meta/recipes-kernel/lttng/lttng-modules/0004-Fix-dev_base_lock-removed-in-linux-6.9-rc1.patch
new file mode 100644
index 0000000000..59ef4f9313
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/0004-Fix-dev_base_lock-removed-in-linux-6.9-rc1.patch
@@ -0,0 +1,57 @@
+From 217bc2e4685050dddce9bdd2557b64f6b8c16622 Mon Sep 17 00:00:00 2001
+From: Kienan Stewart <kstewart@efficios.com>
+Date: Mon, 25 Mar 2024 10:53:46 -0400
+Subject: [PATCH 4/4] Fix: dev_base_lock removed in linux 6.9-rc1
+
+See upstream commit:
+
+ commit 1b3ef46cb7f2618cc0b507393220a69810f6da12
+ Author: Eric Dumazet <edumazet@google.com>
+ Date: Tue Feb 13 06:32:45 2024 +0000
+
+ net: remove dev_base_lock
+
+ dev_base_lock is not needed anymore, all remaining users also hold RTNL.
+
+Upstream-Status: Backport [52eb2ee9 Fix: dev_base_lock removed in linux 6.9-rc1]
+
+Change-Id: I6b07e6eed07fd398302ca14d23162ed24d74df15
+Signed-off-by: Kienan Stewart <kstewart@efficios.com>
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ src/lttng-statedump-impl.c | 15 +++++++++++++++
+ 1 file changed, 15 insertions(+)
+
+Index: lttng-modules-2.13.12/src/lttng-statedump-impl.c
+===================================================================
+--- lttng-modules-2.13.12.orig/src/lttng-statedump-impl.c
++++ lttng-modules-2.13.12/src/lttng-statedump-impl.c
+@@ -392,6 +392,20 @@ void lttng_enumerate_device(struct lttng
+ }
+ }
+
++#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0))
++static
++int lttng_enumerate_network_ip_interface(struct lttng_kernel_session *session)
++{
++ struct net_device *dev;
++
++ rtnl_lock();
++ for_each_netdev(&init_net, dev)
++ lttng_enumerate_device(session, dev);
++ rtnl_unlock();
++
++ return 0;
++}
++#else
+ static
+ int lttng_enumerate_network_ip_interface(struct lttng_kernel_session *session)
+ {
+@@ -404,6 +418,7 @@ int lttng_enumerate_network_ip_interface
+
+ return 0;
+ }
++#endif /* (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(6,9,0)) */
+ #else /* CONFIG_INET */
+ static inline
+ int lttng_enumerate_network_ip_interface(struct lttng_kernel_session *session)
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0004-fix-block-remove-genhd.h-v5.18.patch b/meta/recipes-kernel/lttng/lttng-modules/0004-fix-block-remove-genhd.h-v5.18.patch
deleted file mode 100644
index 9248ffe4ff..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0004-fix-block-remove-genhd.h-v5.18.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 868e0b6db59159197c2cec3550fa4ad5e6572bc5 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 13:54:59 -0400
-Subject: [PATCH 04/10] fix: block: remove genhd.h (v5.18)
-
-See upstream commit :
-
- commit 322cbb50de711814c42fb088f6d31901502c711a
- Author: Christoph Hellwig <hch@lst.de>
- Date: Mon Jan 24 10:39:13 2022 +0100
-
- block: remove genhd.h
-
- There is no good reason to keep genhd.h separate from the main blkdev.h
- header that includes it. So fold the contents of genhd.h into blkdev.h
- and remove genhd.h entirely.
-
-Upstream-Status: Backport
-
-Change-Id: I7cf2aaa3a4c133320b95f2edde49f790f9515dbd
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/wrapper/genhd.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/include/wrapper/genhd.h b/include/wrapper/genhd.h
-index 3c6dbcbe..4a59b68e 100644
---- a/include/wrapper/genhd.h
-+++ b/include/wrapper/genhd.h
-@@ -12,7 +12,11 @@
- #ifndef _LTTNG_WRAPPER_GENHD_H
- #define _LTTNG_WRAPPER_GENHD_H
-
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+#include <linux/blkdev.h>
-+#else
- #include <linux/genhd.h>
-+#endif
-
- #if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,17,0))
- #define LTTNG_GENHD_FL_HIDDEN GENHD_FL_HIDDEN
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch b/meta/recipes-kernel/lttng/lttng-modules/0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch
deleted file mode 100644
index 0751827613..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 2bc7cb7193124d20aa4e1b5dbad0410bfb97a470 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 14:12:13 -0400
-Subject: [PATCH 05/10] fix: scsi: block: Remove REQ_OP_WRITE_SAME support
- (v5.18)
-
-See upstream commit :
-
- commit 73bd66d9c834220579c881a3eb020fd8917075d8
- Author: Christoph Hellwig <hch@lst.de>
- Date: Wed Feb 9 09:28:28 2022 +0100
-
- scsi: block: Remove REQ_OP_WRITE_SAME support
-
- No more users of REQ_OP_WRITE_SAME or drivers implementing it are left,
- so remove the infrastructure.
-
-Upstream-Status: Backport
-
-Change-Id: Ifbff71f79f8b590436fc7cb79f82d90c6e033d84
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/instrumentation/events/block.h | 32 ++++++++++++++++++++++++++
- 1 file changed, 32 insertions(+)
-
-diff --git a/include/instrumentation/events/block.h b/include/instrumentation/events/block.h
-index 3e1104d7..050a59a2 100644
---- a/include/instrumentation/events/block.h
-+++ b/include/instrumentation/events/block.h
-@@ -66,6 +66,37 @@ LTTNG_TRACEPOINT_ENUM(block_rq_type,
- #define lttng_bio_op(bio) bio_op(bio)
- #define lttng_bio_rw(bio) ((bio)->bi_opf)
-
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+#ifdef CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM
-+#define blk_rwbs_ctf_integer(type, rwbs, op, rw, bytes) \
-+ ctf_enum(block_rq_type, type, rwbs, \
-+ ( (op) == REQ_OP_WRITE ? RWBS_FLAG_WRITE : \
-+ ( (op) == REQ_OP_DISCARD ? RWBS_FLAG_DISCARD : \
-+ ( (op) == REQ_OP_SECURE_ERASE ? (RWBS_FLAG_DISCARD | RWBS_FLAG_SECURE) : \
-+ ( (op) == REQ_OP_FLUSH ? RWBS_FLAG_FLUSH : \
-+ ( (op) == REQ_OP_READ ? RWBS_FLAG_READ : \
-+ ( 0 )))))) \
-+ | ((rw) & REQ_RAHEAD ? RWBS_FLAG_RAHEAD : 0) \
-+ | ((rw) & REQ_SYNC ? RWBS_FLAG_SYNC : 0) \
-+ | ((rw) & REQ_META ? RWBS_FLAG_META : 0) \
-+ | ((rw) & REQ_PREFLUSH ? RWBS_FLAG_PREFLUSH : 0) \
-+ | ((rw) & REQ_FUA ? RWBS_FLAG_FUA : 0))
-+#else
-+#define blk_rwbs_ctf_integer(type, rwbs, op, rw, bytes) \
-+ ctf_integer(type, rwbs, \
-+ ( (op) == REQ_OP_WRITE ? RWBS_FLAG_WRITE : \
-+ ( (op) == REQ_OP_DISCARD ? RWBS_FLAG_DISCARD : \
-+ ( (op) == REQ_OP_SECURE_ERASE ? (RWBS_FLAG_DISCARD | RWBS_FLAG_SECURE) : \
-+ ( (op) == REQ_OP_FLUSH ? RWBS_FLAG_FLUSH : \
-+ ( (op) == REQ_OP_READ ? RWBS_FLAG_READ : \
-+ ( 0 )))))) \
-+ | ((rw) & REQ_RAHEAD ? RWBS_FLAG_RAHEAD : 0) \
-+ | ((rw) & REQ_SYNC ? RWBS_FLAG_SYNC : 0) \
-+ | ((rw) & REQ_META ? RWBS_FLAG_META : 0) \
-+ | ((rw) & REQ_PREFLUSH ? RWBS_FLAG_PREFLUSH : 0) \
-+ | ((rw) & REQ_FUA ? RWBS_FLAG_FUA : 0))
-+#endif /* CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM */
-+#else
- #ifdef CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM
- #define blk_rwbs_ctf_integer(type, rwbs, op, rw, bytes) \
- ctf_enum(block_rq_type, type, rwbs, \
-@@ -95,6 +126,7 @@ LTTNG_TRACEPOINT_ENUM(block_rq_type,
- | ((rw) & REQ_PREFLUSH ? RWBS_FLAG_PREFLUSH : 0) \
- | ((rw) & REQ_FUA ? RWBS_FLAG_FUA : 0))
- #endif /* CONFIG_LTTNG_EXPERIMENTAL_BITWISE_ENUM */
-+#endif
-
- #elif (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(3,1,0))
-
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0006-fix-random-remove-unused-tracepoints-v5.18.patch b/meta/recipes-kernel/lttng/lttng-modules/0006-fix-random-remove-unused-tracepoints-v5.18.patch
deleted file mode 100644
index 9c2f70d4af..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0006-fix-random-remove-unused-tracepoints-v5.18.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 369d82bb1746447514c877088d7c5fd0f39140f8 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 14:33:42 -0400
-Subject: [PATCH 06/10] fix: random: remove unused tracepoints (v5.18)
-
-See upstream commit :
-
- commit 14c174633f349cb41ea90c2c0aaddac157012f74
- Author: Jason A. Donenfeld <Jason@zx2c4.com>
- Date: Thu Feb 10 16:40:44 2022 +0100
-
- random: remove unused tracepoints
-
- These explicit tracepoints aren't really used and show sign of aging.
- It's work to keep these up to date, and before I attempted to keep them
- up to date, they weren't up to date, which indicates that they're not
- really used. These days there are better ways of introspecting anyway.
-
-Upstream-Status: Backport
-
-Change-Id: I3b8c3e2732e7efdd76ce63204ac53a48784d0df6
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- src/probes/Kbuild | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/src/probes/Kbuild b/src/probes/Kbuild
-index e26b4359..8d6ff0f2 100644
---- a/src/probes/Kbuild
-+++ b/src/probes/Kbuild
-@@ -187,8 +187,11 @@ ifneq ($(CONFIG_FRAME_WARN),0)
- CFLAGS_lttng-probe-printk.o += -Wframe-larger-than=2200
- endif
-
-+# Introduced in v3.6, remove in v5.18
- obj-$(CONFIG_LTTNG) += $(shell \
-- if [ $(VERSION) -ge 4 \
-+ if [ \( ! \( $(VERSION) -ge 6 -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \) \) \
-+ -a \
-+ $(VERSION) -ge 4 \
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -ge 6 \) \
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 5 -a $(SUBLEVEL) -ge 2 \) \
- -o \( $(VERSION) -eq 3 -a $(PATCHLEVEL) -eq 4 -a $(SUBLEVEL) -ge 9 \) \
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch b/meta/recipes-kernel/lttng/lttng-modules/0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch
deleted file mode 100644
index effd37ffe1..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch
+++ /dev/null
@@ -1,72 +0,0 @@
-From 3c46ddc134621dba65030263aa321dd6bdae3ba3 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 15:02:10 -0400
-Subject: [PATCH 07/10] fix: kprobes: Use rethook for kretprobe if possible
- (v5.18)
-
-See upstream commit :
-
- commit 73f9b911faa74ac5107879de05c9489c419f41bb
- Author: Masami Hiramatsu <mhiramat@kernel.org>
- Date: Sat Mar 26 11:27:05 2022 +0900
-
- kprobes: Use rethook for kretprobe if possible
-
- Use rethook for kretprobe function return hooking if the arch sets
- CONFIG_HAVE_RETHOOK=y. In this case, CONFIG_KRETPROBE_ON_RETHOOK is
- set to 'y' automatically, and the kretprobe internal data fields
- switches to use rethook. If not, it continues to use kretprobe
- specific function return hooks.
-
-Upstream-Status: Backport
-
-Change-Id: I2b7670dc04e4769c1e3c372582ad2f555f6d7a66
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/wrapper/kprobes.h | 17 +++++++++++++++++
- src/probes/lttng-kretprobes.c | 2 +-
- 2 files changed, 18 insertions(+), 1 deletion(-)
-
-diff --git a/include/wrapper/kprobes.h b/include/wrapper/kprobes.h
-index b546d615..51d32b7c 100644
---- a/include/wrapper/kprobes.h
-+++ b/include/wrapper/kprobes.h
-@@ -29,4 +29,21 @@ struct kretprobe *lttng_get_kretprobe(struct kretprobe_instance *ri)
-
- #endif /* LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,11,0) */
-
-+
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+static inline
-+unsigned long lttng_get_kretprobe_retaddr(struct kretprobe_instance *ri)
-+{
-+ return get_kretprobe_retaddr(ri);
-+}
-+
-+#else
-+
-+static inline
-+unsigned long lttng_get_kretprobe_retaddr(struct kretprobe_instance *ri)
-+{
-+ return (unsigned long) ri->ret_addr;
-+}
-+#endif
-+
- #endif /* _LTTNG_WRAPPER_KPROBES_H */
-diff --git a/src/probes/lttng-kretprobes.c b/src/probes/lttng-kretprobes.c
-index 5cb2e953..565df739 100644
---- a/src/probes/lttng-kretprobes.c
-+++ b/src/probes/lttng-kretprobes.c
-@@ -81,7 +81,7 @@ int _lttng_kretprobes_handler(struct kretprobe_instance *krpi,
- int ret;
-
- payload.ip = (unsigned long) lttng_get_kretprobe(krpi)->kp.addr;
-- payload.parent_ip = (unsigned long) krpi->ret_addr;
-+ payload.parent_ip = lttng_get_kretprobe_retaddr(krpi);
-
- lib_ring_buffer_ctx_init(&ctx, event_recorder, sizeof(payload),
- lttng_alignof(payload), &lttng_probe_ctx);
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch b/meta/recipes-kernel/lttng/lttng-modules/0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch
deleted file mode 100644
index 13c504b859..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From e8d2f286b5b208ac8870d0a9c167b170e96169b3 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 15:08:48 -0400
-Subject: [PATCH 08/10] fix: scsi: core: Remove <scsi/scsi_request.h> (v5.18)
-
-See upstream commit :
-
- commit 26440303310591e29121964ede0048583cb3126d
- Author: Christoph Hellwig <hch@lst.de>
- Date: Thu Feb 24 18:55:52 2022 +0100
-
- scsi: core: Remove <scsi/scsi_request.h>
-
- This header is empty now except for an include of <linux/blk-mq.h>, so
- remove it.
-
-Upstream-Status: Backport
-
-Change-Id: Ic8ee3352f1e8bddfcd44c31be9b788db82f183aa
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/instrumentation/events/block.h | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/include/instrumentation/events/block.h b/include/instrumentation/events/block.h
-index 050a59a2..882e6e08 100644
---- a/include/instrumentation/events/block.h
-+++ b/include/instrumentation/events/block.h
-@@ -11,9 +11,9 @@
- #include <linux/trace_seq.h>
- #include <lttng/kernel-version.h>
-
--#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,11,0))
-+#if LTTNG_KERNEL_RANGE(4,11,0, 5,18,0)
- #include <scsi/scsi_request.h>
--#endif /* (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,11,0)) */
-+#endif /* LTTNG_KERNEL_RANGE(4,11,0, 5,18,0) */
-
- #ifndef _TRACE_BLOCK_DEF_
- #define _TRACE_BLOCK_DEF_
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0009-Rename-genhd-wrapper-to-blkdev.patch b/meta/recipes-kernel/lttng/lttng-modules/0009-Rename-genhd-wrapper-to-blkdev.patch
deleted file mode 100644
index 90fec9dc58..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0009-Rename-genhd-wrapper-to-blkdev.patch
+++ /dev/null
@@ -1,76 +0,0 @@
-From 82fbf9d383ff9069808fb0f5f75c660098dbae52 Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Tue, 5 Apr 2022 14:57:41 -0400
-Subject: [PATCH 09/10] Rename genhd wrapper to blkdev
-
-The genhd.h header was folded into blkdev.h in v5.18, rename our wrapper
-to follow upstream.
-
-Upstream-Status: Backport
-
-Change-Id: I4ec94fb94d11712dd20f0680aea1de77fbfa9d17
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/wrapper/{genhd.h => blkdev.h} | 10 +++++-----
- src/lttng-statedump-impl.c | 2 +-
- 2 files changed, 6 insertions(+), 6 deletions(-)
- rename include/wrapper/{genhd.h => blkdev.h} (93%)
-
-diff --git a/include/wrapper/genhd.h b/include/wrapper/blkdev.h
-similarity index 93%
-rename from include/wrapper/genhd.h
-rename to include/wrapper/blkdev.h
-index 4a59b68e..0d5ad90f 100644
---- a/include/wrapper/genhd.h
-+++ b/include/wrapper/blkdev.h
-@@ -1,6 +1,6 @@
- /* SPDX-License-Identifier: (GPL-2.0-only or LGPL-2.1-only)
- *
-- * wrapper/genhd.h
-+ * wrapper/blkdev.h
- *
- * wrapper around block layer functions and data structures. Using
- * KALLSYMS to get its address when available, else we need to have a
-@@ -9,8 +9,8 @@
- * Copyright (C) 2011-2014 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
- */
-
--#ifndef _LTTNG_WRAPPER_GENHD_H
--#define _LTTNG_WRAPPER_GENHD_H
-+#ifndef _LTTNG_WRAPPER_BLKDEV_H
-+#define _LTTNG_WRAPPER_BLKDEV_H
-
- #if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
- #include <linux/blkdev.h>
-@@ -45,7 +45,7 @@ struct class *wrapper_get_block_class(void)
- /*
- * Canary function to check for 'block_class' at compile time.
- *
-- * From 'include/linux/genhd.h':
-+ * From 'include/linux/blkdev.h':
- *
- * extern struct class block_class;
- */
-@@ -104,4 +104,4 @@ struct device_type *wrapper_get_disk_type(void)
-
- #endif
-
--#endif /* _LTTNG_WRAPPER_GENHD_H */
-+#endif /* _LTTNG_WRAPPER_BLKDEV_H */
-diff --git a/src/lttng-statedump-impl.c b/src/lttng-statedump-impl.c
-index 4d7b2921..0e753090 100644
---- a/src/lttng-statedump-impl.c
-+++ b/src/lttng-statedump-impl.c
-@@ -41,7 +41,7 @@
- #include <wrapper/namespace.h>
- #include <wrapper/irq.h>
- #include <wrapper/tracepoint.h>
--#include <wrapper/genhd.h>
-+#include <wrapper/blkdev.h>
- #include <wrapper/file.h>
- #include <wrapper/fdtable.h>
- #include <wrapper/sched.h>
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules/0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch b/meta/recipes-kernel/lttng/lttng-modules/0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch
deleted file mode 100644
index 892d3f0d23..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules/0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch
+++ /dev/null
@@ -1,106 +0,0 @@
-From f9208dc00756dfa0a2f191799722030bdf3f793d Mon Sep 17 00:00:00 2001
-From: Michael Jeanson <mjeanson@efficios.com>
-Date: Mon, 4 Apr 2022 15:14:01 -0400
-Subject: [PATCH 10/10] fix: mm: compaction: cleanup the compaction trace
- events (v5.18)
-
-See upstream commit :
-
- commit abd4349ff9b8d242376b67711254221f64f447c7
- Author: Baolin Wang <baolin.wang@linux.alibaba.com>
- Date: Tue Mar 22 14:45:56 2022 -0700
-
- mm: compaction: cleanup the compaction trace events
-
- As Steven suggested [1], we should access the pointers from the trace
- event to avoid dereferencing them to the tracepoint function when the
- tracepoint is disabled.
-
- [1] https://lkml.org/lkml/2021/11/3/409
-
-Upstream-Status: Backport
-
-Change-Id: I6c08250df8596e8dbc76780ae5d95c899c12e6fe
-Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
-Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
----
- include/instrumentation/events/compaction.h | 17 ++++++++++++++++-
- src/probes/Kbuild | 17 ++++++++++++++++-
- src/probes/lttng-probe-compaction.c | 5 +++++
- 3 files changed, 37 insertions(+), 2 deletions(-)
-
-diff --git a/include/instrumentation/events/compaction.h b/include/instrumentation/events/compaction.h
-index 15964537..ecae39a8 100644
---- a/include/instrumentation/events/compaction.h
-+++ b/include/instrumentation/events/compaction.h
-@@ -97,7 +97,22 @@ LTTNG_TRACEPOINT_EVENT_INSTANCE_MAP(compaction_isolate_template,
-
- #endif /* #else #if LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(4,0,0) */
-
--#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,17,0))
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+LTTNG_TRACEPOINT_EVENT_MAP(mm_compaction_migratepages,
-+
-+ compaction_migratepages,
-+
-+ TP_PROTO(struct compact_control *cc,
-+ unsigned int nr_succeeded),
-+
-+ TP_ARGS(cc, nr_succeeded),
-+
-+ TP_FIELDS(
-+ ctf_integer(unsigned long, nr_migrated, nr_succeeded)
-+ ctf_integer(unsigned long, nr_failed, cc->nr_migratepages - nr_succeeded)
-+ )
-+)
-+#elif (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,17,0))
- LTTNG_TRACEPOINT_EVENT_MAP(mm_compaction_migratepages,
-
- compaction_migratepages,
-diff --git a/src/probes/Kbuild b/src/probes/Kbuild
-index 8d6ff0f2..54784477 100644
---- a/src/probes/Kbuild
-+++ b/src/probes/Kbuild
-@@ -167,7 +167,22 @@ ifneq ($(CONFIG_BTRFS_FS),)
- endif # $(wildcard $(btrfs_dep))
- endif # CONFIG_BTRFS_FS
-
--obj-$(CONFIG_LTTNG) += lttng-probe-compaction.o
-+# A dependency on internal header 'mm/internal.h' was introduced in v5.18
-+compaction_dep = $(srctree)/mm/internal.h
-+compaction_dep_wildcard = $(wildcard $(compaction_dep))
-+compaction_dep_check = $(shell \
-+if [ \( $(VERSION) -ge 6 \
-+ -o \( $(VERSION) -eq 5 -a $(PATCHLEVEL) -ge 18 \) \) -a \
-+ -z "$(compaction_dep_wildcard)" ] ; then \
-+ echo "warn" ; \
-+else \
-+ echo "ok" ; \
-+fi ;)
-+ifeq ($(compaction_dep_check),ok)
-+ obj-$(CONFIG_LTTNG) += lttng-probe-compaction.o
-+else
-+ $(warning Files $(compaction_dep) not found. Probe "compaction" is disabled. Use full kernel source tree to enable it.)
-+endif # $(wildcard $(compaction_dep))
-
- ifneq ($(CONFIG_EXT4_FS),)
- ext4_dep = $(srctree)/fs/ext4/*.h
-diff --git a/src/probes/lttng-probe-compaction.c b/src/probes/lttng-probe-compaction.c
-index f8ddf384..ffaf45f0 100644
---- a/src/probes/lttng-probe-compaction.c
-+++ b/src/probes/lttng-probe-compaction.c
-@@ -10,6 +10,11 @@
-
- #include <linux/module.h>
- #include <lttng/tracer.h>
-+#include <lttng/kernel-version.h>
-+
-+#if (LTTNG_LINUX_VERSION_CODE >= LTTNG_KERNEL_VERSION(5,18,0))
-+#include "../mm/internal.h"
-+#endif
-
- /*
- * Create the tracepoint static inlines from the kernel to validate that our
---
-2.19.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-modules_2.13.12.bb b/meta/recipes-kernel/lttng/lttng-modules_2.13.12.bb
new file mode 100644
index 0000000000..95d5e2d615
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules_2.13.12.bb
@@ -0,0 +1,45 @@
+SECTION = "devel"
+SUMMARY = "Linux Trace Toolkit KERNEL MODULE"
+DESCRIPTION = "The lttng-modules 2.0 package contains the kernel tracer modules"
+HOMEPAGE = "https://lttng.org/"
+LICENSE = "LGPL-2.1-only & GPL-2.0-only & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=0464cff101a009c403cd2ed65d01d4c4"
+
+inherit module
+
+include lttng-platforms.inc
+
+SRC_URI = "https://lttng.org/files/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://0001-Fix-ASoC-snd_doc_dapm-on-linux-6.9-rc1.patch \
+ file://0002-Fix-ASoC-add-component-to-set_bias_level-events-in-l.patch \
+ file://0003-Fix-mm_compaction_migratepages-changed-in-linux-6.9-.patch \
+ file://0004-Fix-dev_base_lock-removed-in-linux-6.9-rc1.patch \
+ "
+
+# Use :append here so that the patch is applied also when using devupstream
+SRC_URI:append = " file://0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch"
+
+SRC_URI[sha256sum] = "d85fcb66c7bd31003ab8735e8c77700e5e4f417b4c22fe1f20112cf435abad79"
+
+export INSTALL_MOD_DIR="kernel/lttng-modules"
+
+EXTRA_OEMAKE += "KERNELDIR='${STAGING_KERNEL_DIR}'"
+
+MODULES_MODULE_SYMVERS_LOCATION = "src"
+
+do_install:append() {
+ # Delete empty directories to avoid QA failures if no modules were built
+ if [ -d ${D}/${nonarch_base_libdir} ]; then
+ find ${D}/${nonarch_base_libdir} -depth -type d -empty -exec rmdir {} \;
+ fi
+}
+
+python do_package:prepend() {
+ if not os.path.exists(os.path.join(d.getVar('D'), d.getVar('nonarch_base_libdir')[1:], 'modules')):
+ bb.warn("%s: no modules were created; this may be due to CONFIG_TRACEPOINTS not being enabled in your kernel." % d.getVar('PN'))
+}
+
+BBCLASSEXTEND = "devupstream:target"
+SRC_URI:class-devupstream = "git://git.lttng.org/lttng-modules;branch=stable-2.13;protocol=https"
+SRCREV:class-devupstream = "7584cfc04914cb0842a986e9808686858b9c8630"
+SRCREV_FORMAT ?= "lttng_git"
diff --git a/meta/recipes-kernel/lttng/lttng-modules_2.13.3.bb b/meta/recipes-kernel/lttng/lttng-modules_2.13.3.bb
deleted file mode 100644
index a5e6b906d2..0000000000
--- a/meta/recipes-kernel/lttng/lttng-modules_2.13.3.bb
+++ /dev/null
@@ -1,52 +0,0 @@
-SECTION = "devel"
-SUMMARY = "Linux Trace Toolkit KERNEL MODULE"
-DESCRIPTION = "The lttng-modules 2.0 package contains the kernel tracer modules"
-HOMEPAGE = "https://lttng.org/"
-LICENSE = "LGPL-2.1-only & GPL-2.0-only & MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=0464cff101a009c403cd2ed65d01d4c4"
-
-inherit module
-
-include lttng-platforms.inc
-
-SRC_URI = "https://lttng.org/files/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://0001-Fix-compaction-migratepages-event-name.patch \
- file://0002-Fix-tracepoint-event-allow-same-provider-and-event-n.patch \
- file://0003-fix-sched-tracing-Don-t-re-read-p-state-when-emittin.patch \
- file://0004-fix-block-remove-genhd.h-v5.18.patch \
- file://0005-fix-scsi-block-Remove-REQ_OP_WRITE_SAME-support-v5.1.patch \
- file://0006-fix-random-remove-unused-tracepoints-v5.18.patch \
- file://0007-fix-kprobes-Use-rethook-for-kretprobe-if-possible-v5.patch \
- file://0008-fix-scsi-core-Remove-scsi-scsi_request.h-v5.18.patch \
- file://0009-Rename-genhd-wrapper-to-blkdev.patch \
- file://0010-fix-mm-compaction-cleanup-the-compaction-trace-event.patch \
- file://0001-fix-sched-tracing-Append-prev_state-to-tp-args-inste.patch \
- "
-
-# Use :append here so that the patch is applied also when using devupstream
-SRC_URI:append = " file://0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch"
-
-SRC_URI[sha256sum] = "7cf1acbb50b84116acc9b4281b81dcc2643d6018bbd1e8514ad1270239896c4b"
-
-export INSTALL_MOD_DIR="kernel/lttng-modules"
-
-EXTRA_OEMAKE += "KERNELDIR='${STAGING_KERNEL_DIR}'"
-
-MODULES_MODULE_SYMVERS_LOCATION = "src"
-
-do_install:append() {
- # Delete empty directories to avoid QA failures if no modules were built
- if [ -d ${D}/${nonarch_base_libdir} ]; then
- find ${D}/${nonarch_base_libdir} -depth -type d -empty -exec rmdir {} \;
- fi
-}
-
-python do_package:prepend() {
- if not os.path.exists(os.path.join(d.getVar('D'), d.getVar('nonarch_base_libdir')[1:], 'modules')):
- bb.warn("%s: no modules were created; this may be due to CONFIG_TRACEPOINTS not being enabled in your kernel." % d.getVar('PN'))
-}
-
-BBCLASSEXTEND = "devupstream:target"
-SRC_URI:class-devupstream = "git://git.lttng.org/lttng-modules;branch=stable-2.13"
-SRCREV:class-devupstream = "7584cfc04914cb0842a986e9808686858b9c8630"
-SRCREV_FORMAT ?= "lttng_git"
diff --git a/meta/recipes-kernel/lttng/lttng-platforms.inc b/meta/recipes-kernel/lttng/lttng-platforms.inc
index 933c65d85d..900e36df82 100644
--- a/meta/recipes-kernel/lttng/lttng-platforms.inc
+++ b/meta/recipes-kernel/lttng/lttng-platforms.inc
@@ -15,3 +15,7 @@ LTTNGUST:arc = ""
COMPATIBLE_HOST:arc:pn-lttng-ust = "null"
+# Whether the platform supports lttng-tools
+# lttng-tools requires SYS_ppoll and SYS_pselect6 which are not supported on riscv32.
+# It's also turned off for riscv32 in meta-riscv. See https://github.com/riscv/meta-riscv/blob/master/conf/layer.conf
+COMPATIBLE_HOST:riscv32:pn-lttng-tools = "null"
diff --git a/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-rotation-destroy-flush-fix-session-daemon-abort-.patch b/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-rotation-destroy-flush-fix-session-daemon-abort-.patch
new file mode 100644
index 0000000000..3286dfb5c1
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-rotation-destroy-flush-fix-session-daemon-abort-.patch
@@ -0,0 +1,56 @@
+From 6b45c5f80d20e7bbf3d98c1fa17d2cf8716af3bb Mon Sep 17 00:00:00 2001
+From: Xiangyu Chen <xiangyu.chen@windriver.com>
+Date: Mon, 25 Mar 2024 18:20:14 +0800
+Subject: [PATCH] Fix: rotation-destroy-flush: fix session daemon abort if no
+ kernel module present
+
+Testing rotation-destroy-flush when no lttng kernel modules present, it would
+be failed with error message:
+
+ Error: Unable to load required module lttng-ring-buffer-client-discard
+ not ok 1 - Start session daemon
+ Failed test 'Start session daemon'
+ not ok 2 - Create session rotation_destroy_flush in -o /tmp/tmp.test_rot ...
+ ...
+
+This because test script that sets the LTTNG_ABORT_ON_ERROR environment
+variable. It's this environment variable that causes the sessiond to handle the
+kernel module loading failure as an abort rather than a warning.
+
+Using "check_skip_kernel_test" to detect whether the kernel module fails to
+load is expected or not. If the failure is expected, the script won't set that
+environment variable any more.
+
+Fixes: 3a174400
+("tests:add check_skip_kernel_test to check root user and lttng kernel modules")
+
+Upstream-Status: Submitted [https://review.lttng.org/c/lttng-tools/+/12155]
+
+Change-Id: I371e9ba717613e2940186f710cf3cccd35baed6c
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+---
+ .../ust/rotation-destroy-flush/test_rotation_destroy_flush | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/tests/regression/ust/rotation-destroy-flush/test_rotation_destroy_flush b/tests/regression/ust/rotation-destroy-flush/test_rotation_destroy_flush
+index 669bcbc43..64161768f 100755
+--- a/tests/regression/ust/rotation-destroy-flush/test_rotation_destroy_flush
++++ b/tests/regression/ust/rotation-destroy-flush/test_rotation_destroy_flush
+@@ -23,11 +23,11 @@ SIZE_LIMIT=$PAGE_SIZE
+ NR_ITER=10
+ NUM_TESTS=$((15*$NR_ITER))
+
+-# Ensure the daemons invoke abort on error.
+-export LTTNG_ABORT_ON_ERROR=1
+-
+ source $TESTDIR/utils/utils.sh
+
++# Ensure the daemons invoke abort on error.
++check_skip_kernel_test || export LTTNG_ABORT_ON_ERROR=1
++
+ # MUST set TESTDIR before calling those functions
+ function run_app()
+ {
+--
+2.25.1
+
diff --git a/meta/recipes-kernel/lttng/lttng-tools/0001-compat-Define-off64_t-as-off_t-on-linux.patch b/meta/recipes-kernel/lttng/lttng-tools/0001-compat-Define-off64_t-as-off_t-on-linux.patch
new file mode 100644
index 0000000000..4e21d1e9f1
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-tools/0001-compat-Define-off64_t-as-off_t-on-linux.patch
@@ -0,0 +1,74 @@
+From 74b3844737b03492756b4f896c938b504b069f14 Mon Sep 17 00:00:00 2001
+From: Jérémie Galarneau <jeremie.galarneau@efficios.com>
+Date: Tue, 17 Jan 2023 16:57:35 -0500
+Subject: [PATCH] compat: off64_t is not defined by musl
+
+This helps compile with latest musl, where off64_t is not defined unless
+_LARGEFILE64_SOURCE is defined. On glibc, _LARGEFILE64_SOURCE is defined
+if _GNU_SOURCE is defined, so the problem is only seen with musl.
+
+Since the project uses AC_SYS_LARGEFILE, which from the autoconf doc:
+"arrange for 64-bit file offsets, known as large-file support."
+
+As such, it is safe to assume off_t is 64-bit wide. This is checked by a
+static_assert to catch any platform where autoconf would let a 32-bit
+off_t slip.
+
+Upstream-Status: Submitted [https://review.lttng.org/c/lttng-tools/+/9268]
+Reported-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Jérémie Galarneau <jeremie.galarneau@efficios.com>
+Change-Id: If2c6007a8c85bc3f3065002af8a7538b882fb4a8
+---
+
+--- a/src/common/compat/compat-fcntl.c
++++ b/src/common/compat/compat-fcntl.c
+@@ -8,14 +8,17 @@
+ #define _LGPL_SOURCE
+ #include <common/compat/fcntl.h>
+ #include <common/macros.h>
++#include <common/bug.h>
++#include <stdint.h>
+ #include <unistd.h>
+
+ #ifdef __linux__
+
+ LTTNG_HIDDEN
+-int compat_sync_file_range(int fd, off64_t offset, off64_t nbytes,
++int compat_sync_file_range(int fd, off_t offset, off_t nbytes,
+ unsigned int flags)
+ {
++ LTTNG_BUILD_BUG_ON(sizeof(off_t) != sizeof(int64_t));
+ #ifdef HAVE_SYNC_FILE_RANGE
+ return sync_file_range(fd, offset, nbytes, flags);
+ #else
+--- a/src/common/compat/fcntl.h
++++ b/src/common/compat/fcntl.h
+@@ -13,16 +13,12 @@
+
+ #include <common/compat/errno.h>
+
+-#if (defined(__CYGWIN__))
+-typedef long long off64_t;
+-#endif
+-
+ #if (defined(__FreeBSD__) || defined(__sun__))
+ typedef off64_t loff_t;
+ #endif
+
+ #ifdef __linux__
+-extern int compat_sync_file_range(int fd, off64_t offset, off64_t nbytes,
++extern int compat_sync_file_range(int fd, off_t offset, off_t nbytes,
+ unsigned int flags);
+ #define lttng_sync_file_range(fd, offset, nbytes, flags) \
+ compat_sync_file_range(fd, offset, nbytes, flags)
+@@ -37,8 +33,8 @@ extern int compat_sync_file_range(int fd
+ #define SYNC_FILE_RANGE_WAIT_BEFORE 0
+ #define SYNC_FILE_RANGE_WRITE 0
+
+-static inline int lttng_sync_file_range(int fd, off64_t offset,
+- off64_t nbytes, unsigned int flags)
++static inline int lttng_sync_file_range(int fd, off_t offset,
++ off_t nbytes, unsigned int flags)
+ {
+ return -ENOSYS;
+ }
diff --git a/meta/recipes-kernel/lttng/lttng-tools/0001-tests-add-check_skip_kernel_test-to-check-root-user-.patch b/meta/recipes-kernel/lttng/lttng-tools/0001-tests-add-check_skip_kernel_test-to-check-root-user-.patch
new file mode 100644
index 0000000000..2671a1908e
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-tools/0001-tests-add-check_skip_kernel_test-to-check-root-user-.patch
@@ -0,0 +1,1246 @@
+From cf558f802b259a33605fe0ede4d74ae2ff6be699 Mon Sep 17 00:00:00 2001
+From: Xiangyu Chen <xiangyu.chen@windriver.com>
+Date: Mon, 12 Feb 2024 09:23:54 -0500
+Subject: [PATCH] tests: add check_skip_kernel_test to check root user and
+ lttng kernel modules
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+The current tests will run both userspace and kernel testing. Some of
+use cases only use lttng for one kind of tracing on an embedded
+device (e.g. userspace), so in this scenario, the kernel modules might
+not install to target rootfs, the test cases would be fail and exit.
+
+Add LTTNG_TOOLS_DISABLE_KERNEL_TESTS to skip the lttng kernel features
+test, this flag can be set via "make":
+
+ make check LTTNG_TOOLS_DISABLE_KERNEL_TESTS=1
+
+When this flag was set, all kernel related testcases would be marked as
+SKIP in result.
+
+Since the the LTTNG_TOOLS_DISABLE_KERNEL_TESTS was checked in function
+check_skip_kernel_test, lots of testcases also need to check root
+permission, so merging the root permission checking into
+check_skip_kernel_test.
+
+Upstream-Status: Backport from
+[https://git.lttng.org/?p=lttng-tools.git;a=commit;h=3a1744008331a0604479d3d7461f77056fad3a64]
+
+Change-Id: I49a1f642a9869c21a69e0186c296fd917bd7b525
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+Signed-off-by: Michael Jeanson <mjeanson@efficios.com>
+Signed-off-by: Jérémie Galarneau <jeremie.galarneau@efficios.com>
+---
+ tests/destructive/metadata-regeneration | 8 +----
+ tests/perf/test_perf_raw.in | 8 +----
+ tests/regression/kernel/test_all_events | 8 +----
+ tests/regression/kernel/test_callstack | 8 +----
+ tests/regression/kernel/test_channel | 8 +----
+ tests/regression/kernel/test_clock_override | 8 +----
+ tests/regression/kernel/test_event_basic | 8 +----
+ tests/regression/kernel/test_kernel_function | 8 +----
+ tests/regression/kernel/test_lttng_logger | 8 +----
+ tests/regression/kernel/test_ns_contexts | 8 +----
+ .../regression/kernel/test_ns_contexts_change | 9 +----
+ .../kernel/test_rotation_destroy_flush | 8 +----
+ .../regression/kernel/test_select_poll_epoll | 8 +----
+ tests/regression/kernel/test_syscall | 8 +----
+ tests/regression/kernel/test_userspace_probe | 8 +----
+ tests/regression/tools/clear/test_kernel | 8 +----
+ .../tools/filtering/test_invalid_filter | 8 +----
+ .../tools/filtering/test_unsupported_op | 8 +----
+ .../tools/filtering/test_valid_filter | 8 +----
+ tests/regression/tools/health/test_health.sh | 10 ++----
+ tests/regression/tools/health/test_thread_ok | 9 +----
+ tests/regression/tools/live/test_kernel | 10 +++---
+ tests/regression/tools/live/test_lttng_kernel | 8 +----
+ tests/regression/tools/metadata/test_kernel | 8 +----
+ .../test_notification_kernel_buffer_usage | 36 +++++++++----------
+ .../test_notification_kernel_capture | 23 ++++++------
+ .../test_notification_kernel_error | 23 ++++++------
+ .../test_notification_kernel_instrumentation | 23 ++++++------
+ .../test_notification_kernel_syscall | 19 +++++-----
+ .../test_notification_kernel_userspace_probe | 20 +++++------
+ .../notification/test_notification_multi_app | 14 +++-----
+ ...test_notification_notifier_discarded_count | 9 +++--
+ .../tools/regen-metadata/test_kernel | 8 +----
+ .../tools/regen-statedump/test_kernel | 8 +----
+ tests/regression/tools/rotation/test_kernel | 8 +----
+ tests/regression/tools/snapshots/test_kernel | 8 +----
+ .../tools/snapshots/test_kernel_streaming | 8 +----
+ .../streaming/test_high_throughput_limits | 8 +----
+ tests/regression/tools/streaming/test_kernel | 8 +----
+ .../tools/tracker/test_event_tracker | 8 +----
+ .../tools/trigger/test_add_trigger_cli | 12 ++-----
+ .../tools/trigger/test_list_triggers_cli | 26 +++++---------
+ .../tools/wildcard/test_event_wildcard | 8 +----
+ .../test_relayd_working_directory | 4 +--
+ .../ust/namespaces/test_ns_contexts_change | 7 +---
+ tests/regression/ust/test_event_perf | 8 +----
+ tests/utils/utils.sh | 35 ++++++++++++++++++
+ 47 files changed, 166 insertions(+), 363 deletions(-)
+
+diff --git a/tests/destructive/metadata-regeneration b/tests/destructive/metadata-regeneration
+index b81e7af32..36b130d17 100755
+--- a/tests/destructive/metadata-regeneration
++++ b/tests/destructive/metadata-regeneration
+@@ -185,19 +185,13 @@ function test_ust_streaming ()
+ rm -f ${file_sync_before_last}
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ if ! destructive_tests_enabled ; then
+ echo 'You need to set the LTTNG_ENABLE_DESTRUCTIVE_TESTS to "will-break-my-system" as argument to run this test'
+ echo 'Moreover, please make sure that ntp is not running while executing this test'
+ exit 0
+ fi
+
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test $NUM_TESTS "Skipping all tests." ||
+ {
+ start_lttng_relayd "-o $TRACE_PATH"
+ start_lttng_sessiond
+diff --git a/tests/perf/test_perf_raw.in b/tests/perf/test_perf_raw.in
+index f293ccd71..d35529a87 100644
+--- a/tests/perf/test_perf_raw.in
++++ b/tests/perf/test_perf_raw.in
+@@ -137,12 +137,6 @@ function test_kernel_raw()
+ rm -rf $TRACE_PATH
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ # MUST set TESTDIR before calling those functions
+ plan_tests $NUM_TESTS
+
+@@ -154,7 +148,7 @@ have_libpfm
+
+ test_ust_raw
+
+-skip $isroot "Root access is needed for kernel testing, skipping." 9 ||
++check_skip_kernel_test 9 ||
+ {
+ modprobe lttng-test
+ test_kernel_raw
+diff --git a/tests/regression/kernel/test_all_events b/tests/regression/kernel/test_all_events
+index 2e20888df..044f9b65f 100755
+--- a/tests/regression/kernel/test_all_events
++++ b/tests/regression/kernel/test_all_events
+@@ -43,13 +43,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/kernel/test_callstack b/tests/regression/kernel/test_callstack
+index a4477fd7c..d8d6b5e0f 100755
+--- a/tests/regression/kernel/test_callstack
++++ b/tests/regression/kernel/test_callstack
+@@ -134,13 +134,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." "$NUM_TESTS" ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/kernel/test_channel b/tests/regression/kernel/test_channel
+index 9cc74c4e8..4c377bd2a 100755
+--- a/tests/regression/kernel/test_channel
++++ b/tests/regression/kernel/test_channel
+@@ -47,13 +47,7 @@ function test_channel_buffer_too_large()
+ plan_tests $NUM_TESTS
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ start_lttng_sessiond
+
+diff --git a/tests/regression/kernel/test_clock_override b/tests/regression/kernel/test_clock_override
+index 72892898b..48a3f926a 100755
+--- a/tests/regression/kernel/test_clock_override
++++ b/tests/regression/kernel/test_clock_override
+@@ -172,13 +172,7 @@ TESTS=(
+ TEST_COUNT=${#TESTS[@]}
+ i=0
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ trap signal_cleanup SIGTERM SIGINT
+diff --git a/tests/regression/kernel/test_event_basic b/tests/regression/kernel/test_event_basic
+index ac9ec0549..387e2f733 100755
+--- a/tests/regression/kernel/test_event_basic
++++ b/tests/regression/kernel/test_event_basic
+@@ -73,13 +73,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test $NUM_TESTS "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/kernel/test_kernel_function b/tests/regression/kernel/test_kernel_function
+index b1d5491fc..ea16cdeef 100755
+--- a/tests/regression/kernel/test_kernel_function
++++ b/tests/regression/kernel/test_kernel_function
+@@ -43,13 +43,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ start_lttng_sessiond_notap
+ validate_lttng_modules_present
+diff --git a/tests/regression/kernel/test_lttng_logger b/tests/regression/kernel/test_lttng_logger
+index b8f7ded82..00eaae823 100755
+--- a/tests/regression/kernel/test_lttng_logger
++++ b/tests/regression/kernel/test_lttng_logger
+@@ -110,13 +110,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/kernel/test_ns_contexts b/tests/regression/kernel/test_ns_contexts
+index 0c8718d78..59e2568f6 100755
+--- a/tests/regression/kernel/test_ns_contexts
++++ b/tests/regression/kernel/test_ns_contexts
+@@ -108,13 +108,7 @@ plan_tests $NUM_TESTS
+ print_test_banner "$TEST_DESC"
+
+
+-isroot=0
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." "$NUM_TESTS" && exit 0
+-
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." && exit 0
+
+ system_has_ns=0
+ if [ -d "/proc/$$/ns" ]; then
+diff --git a/tests/regression/kernel/test_ns_contexts_change b/tests/regression/kernel/test_ns_contexts_change
+index 42a61276b..3f5e4eeab 100755
+--- a/tests/regression/kernel/test_ns_contexts_change
++++ b/tests/regression/kernel/test_ns_contexts_change
+@@ -162,14 +162,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-
+-isroot=0
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." "$NUM_TESTS" && exit 0
+-
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." && exit 0
+
+ system_has_ns=0
+ if [ -d "/proc/$$/ns" ]; then
+diff --git a/tests/regression/kernel/test_rotation_destroy_flush b/tests/regression/kernel/test_rotation_destroy_flush
+index cb773d7df..0af514b49 100755
+--- a/tests/regression/kernel/test_rotation_destroy_flush
++++ b/tests/regression/kernel/test_rotation_destroy_flush
+@@ -120,13 +120,7 @@ TESTS=(
+ TEST_COUNT=${#TESTS[@]}
+ i=0
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ trap signal_cleanup SIGTERM SIGINT
+diff --git a/tests/regression/kernel/test_select_poll_epoll b/tests/regression/kernel/test_select_poll_epoll
+index d8245a0e7..20f0ef0ae 100755
+--- a/tests/regression/kernel/test_select_poll_epoll
++++ b/tests/regression/kernel/test_select_poll_epoll
+@@ -374,13 +374,7 @@ if test $? != 0; then
+ exit 0
+ fi
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+
+diff --git a/tests/regression/kernel/test_syscall b/tests/regression/kernel/test_syscall
+index 401a18a8d..219d94703 100755
+--- a/tests/regression/kernel/test_syscall
++++ b/tests/regression/kernel/test_syscall
+@@ -664,13 +664,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/kernel/test_userspace_probe b/tests/regression/kernel/test_userspace_probe
+index 1091ee65e..5d984d666 100755
+--- a/tests/regression/kernel/test_userspace_probe
++++ b/tests/regression/kernel/test_userspace_probe
+@@ -815,13 +815,7 @@ fi
+ plan_tests $NUM_TESTS
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/tools/clear/test_kernel b/tests/regression/tools/clear/test_kernel
+index 06fb1c368..48250a742 100755
+--- a/tests/regression/tools/clear/test_kernel
++++ b/tests/regression/tools/clear/test_kernel
+@@ -536,12 +536,6 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ streaming_tests=(test_kernel_streaming
+ test_kernel_streaming_rotate_clear
+ test_kernel_streaming_clear_rotate
+@@ -563,7 +557,7 @@ snapshot_tests=(test_kernel_streaming_snapshot
+ test_kernel_local_snapshot
+ )
+
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping kernel streaming tests." ||
+ {
+ trap signal_cleanup SIGTERM SIGINT
+
+diff --git a/tests/regression/tools/filtering/test_invalid_filter b/tests/regression/tools/filtering/test_invalid_filter
+index 7d9e524d9..8435e5546 100755
+--- a/tests/regression/tools/filtering/test_invalid_filter
++++ b/tests/regression/tools/filtering/test_invalid_filter
+@@ -168,13 +168,7 @@ done
+
+ test_bytecode_limit -u
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel invalid filter tests." $NUM_KERNEL_TESTS ||
++check_skip_kernel_test "$NUM_KERNEL_TESTS" "Skipping kernel invalid filter tests." ||
+ {
+ diag "Test kernel filters"
+ i=0
+diff --git a/tests/regression/tools/filtering/test_unsupported_op b/tests/regression/tools/filtering/test_unsupported_op
+index 299247a3c..91eb86d21 100755
+--- a/tests/regression/tools/filtering/test_unsupported_op
++++ b/tests/regression/tools/filtering/test_unsupported_op
+@@ -103,13 +103,7 @@ while [ "$i" -lt "$OP_COUNT" ]; do
+ let "i++"
+ done
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel unsupported filter operations tests." $NUM_KERNEL_TESTS ||
++check_skip_kernel_test $NUM_KERNEL_TESTS "Skipping kernel unsupported filter operations tests." ||
+ {
+ diag "Test kernel unsupported filter operations"
+
+diff --git a/tests/regression/tools/filtering/test_valid_filter b/tests/regression/tools/filtering/test_valid_filter
+index e76ffa25f..1ba7c79bb 100755
+--- a/tests/regression/tools/filtering/test_valid_filter
++++ b/tests/regression/tools/filtering/test_valid_filter
+@@ -1452,13 +1452,7 @@ KERNEL_FILTERS=(
+
+ IFS=$OLDIFS
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel valid filter tests." $NUM_KERNEL_TESTS ||
++check_skip_kernel_test "$NUM_KERNEL_TESTS" "Skipping kernel valid filter tests." ||
+ {
+ diag "Test kernel valid filters"
+
+diff --git a/tests/regression/tools/health/test_health.sh b/tests/regression/tools/health/test_health.sh
+index b3d6419d2..68716e6b9 100644
+--- a/tests/regression/tools/health/test_health.sh
++++ b/tests/regression/tools/health/test_health.sh
+@@ -82,7 +82,7 @@ function test_health
+ diag "With UST consumer daemons"
+ enable_ust_lttng_event_ok $SESSION_NAME $UST_EVENT_NAME $CHANNEL_NAME
+
+- skip $isroot "Root access is needed. Skipping kernel consumer health check test." "1" ||
++ check_skip_kernel_test "1" "Skipping kernel consumer health check test." ||
+ {
+ diag "With kernel consumer daemon"
+ lttng_enable_kernel_event $SESSION_NAME $KERNEL_EVENT_NAME $CHANNEL_NAME
+@@ -113,7 +113,7 @@ function test_health
+
+
+ if [ ${test_needs_root} -eq 1 ]; then
+- skip ${isroot} "Root access needed for test \"${test_thread_name}\"." "1" ||
++ check_skip_kernel_test "1" "Skipping \"${test_thread_name}\"." ||
+ {
+ report_errors "${test_thread_error_string}" "${test_relayd}"
+ }
+@@ -276,12 +276,6 @@ STDERR_PATH=$(mktemp --tmpdir tmp.test_health_stderr_path.XXXXXX)
+ TRACE_PATH=$(mktemp --tmpdir -d tmp.test_health_trace_path.XXXXXX)
+ HEALTH_PATH=$(mktemp --tmpdir -d tmp.test_health_trace_path.XXXXXX)
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ THREAD_COUNT=${#THREAD[@]}
+ i=0
+ while [ "$i" -lt "$THREAD_COUNT" ]; do
+diff --git a/tests/regression/tools/health/test_thread_ok b/tests/regression/tools/health/test_thread_ok
+index e84adb611..e5e23543f 100755
+--- a/tests/regression/tools/health/test_thread_ok
++++ b/tests/regression/tools/health/test_thread_ok
+@@ -67,7 +67,7 @@ function test_thread_ok
+ $CURDIR/$HEALTH_CHECK_BIN > ${STDOUT_PATH} 2> ${STDERR_PATH}
+ report_errors
+
+- skip $isroot "Root access is needed. Skipping kernel consumer health check test." "5" ||
++ check_skip_kernel_test "5" "Skipping kernel consumer health check test." ||
+ {
+ diag "With kernel consumer daemon"
+ create_lttng_session_no_output $SESSION_NAME
+@@ -115,13 +115,6 @@ STDERR_PATH=$(mktemp --tmpdir tmp.test_thread_ok_stderr_path.XXXXXX)
+ TRACE_PATH=$(mktemp --tmpdir -d tmp.test_thread_ok_trace_path.XXXXXX)
+ HEALTH_PATH=$(mktemp --tmpdir -d tmp.test_thread_ok_trace_path.XXXXXX)
+
+-# The manage kernel thread is only spawned if we are root
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ test_thread_ok
+
+ rm -rf ${HEALTH_PATH}
+diff --git a/tests/regression/tools/live/test_kernel b/tests/regression/tools/live/test_kernel
+index b622b5214..fdaa09f0d 100755
+--- a/tests/regression/tools/live/test_kernel
++++ b/tests/regression/tools/live/test_kernel
+@@ -39,13 +39,11 @@ function clean_live_tracing()
+ rm -rf $TRACE_PATH
+ }
+
+-# Need root access for kernel tracing.
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- plan_skip_all "Root access is needed. Skipping all tests."
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
+ exit 0
+-fi
++}
+
+ modprobe lttng-test
+
+diff --git a/tests/regression/tools/live/test_lttng_kernel b/tests/regression/tools/live/test_lttng_kernel
+index a23d9373a..1b933648f 100755
+--- a/tests/regression/tools/live/test_lttng_kernel
++++ b/tests/regression/tools/live/test_lttng_kernel
+@@ -45,13 +45,7 @@ function clean_live_tracing()
+ }
+
+ # Need root access for kernel tracing.
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ modprobe lttng-test
+
+diff --git a/tests/regression/tools/metadata/test_kernel b/tests/regression/tools/metadata/test_kernel
+index 57cace6ea..26e95d93a 100755
+--- a/tests/regression/tools/metadata/test_kernel
++++ b/tests/regression/tools/metadata/test_kernel
+@@ -91,13 +91,7 @@ plan_tests $NUM_TESTS
+ print_test_banner "$TEST_DESC"
+
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel metadata tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping kernel metadata tests." ||
+ {
+ validate_lttng_modules_present
+ modprobe lttng-test
+diff --git a/tests/regression/tools/notification/test_notification_kernel_buffer_usage b/tests/regression/tools/notification/test_notification_kernel_buffer_usage
+index 76e69a77e..8fdaabb3e 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_buffer_usage
++++ b/tests/regression/tools/notification/test_notification_kernel_buffer_usage
+@@ -60,29 +60,27 @@ function test_buffer_usage_notification
+ wait $APP_PID 2> /dev/null
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+-
+- validate_lttng_modules_present
+-
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ rm -rf "$TEST_TMPDIR"
++ exit 0
++}
+
+- modprobe lttng-test
++validate_lttng_modules_present
+
+- # Used on sessiond launch.
+- LTTNG_SESSIOND_ENV_VARS="LTTNG_TESTPOINT_ENABLE=1 \
+- CONSUMER_PAUSE_PIPE_PATH=${TESTPOINT_PIPE_PATH} \
+- LD_PRELOAD=${TESTPOINT}"
+- start_lttng_sessiond_notap
++modprobe lttng-test
+
+- test_buffer_usage_notification
++# Used on sessiond launch.
++LTTNG_SESSIOND_ENV_VARS="LTTNG_TESTPOINT_ENABLE=1 \
++ CONSUMER_PAUSE_PIPE_PATH=${TESTPOINT_PIPE_PATH} \
++ LD_PRELOAD=${TESTPOINT}"
++start_lttng_sessiond_notap
+
+- stop_lttng_sessiond_notap
+- rmmod lttng-test
++test_buffer_usage_notification
+
+- rm -rf "${consumerd_pipe[@]}" 2> /dev/null
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
++stop_lttng_sessiond_notap
++rmmod lttng-test
+
++rm -rf "${consumerd_pipe[@]}" 2> /dev/null
+ rm -rf "$TEST_TMPDIR"
+diff --git a/tests/regression/tools/notification/test_notification_kernel_capture b/tests/regression/tools/notification/test_notification_kernel_capture
+index 88f123d3d..0f8a2bc6f 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_capture
++++ b/tests/regression/tools/notification/test_notification_kernel_capture
+@@ -31,22 +31,21 @@ function test_basic_error_path
+ }
+
+
+-if [ "$(id -u)" == "0" ]; then
+- validate_lttng_modules_present
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ exit 0
++}
+
+- modprobe lttng-test
++validate_lttng_modules_present
+
+- start_lttng_sessiond_notap
++modprobe lttng-test
+
+- test_basic_error_path
++start_lttng_sessiond_notap
+
+- stop_lttng_sessiond_notap
+- rmmod lttng-test
++test_basic_error_path
+
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
++stop_lttng_sessiond_notap
++rmmod lttng-test
+
+ rm -f "$TESTAPP_STATE_PATH"
+diff --git a/tests/regression/tools/notification/test_notification_kernel_error b/tests/regression/tools/notification/test_notification_kernel_error
+index 80fe6e5b5..b757ec2b4 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_error
++++ b/tests/regression/tools/notification/test_notification_kernel_error
+@@ -30,23 +30,22 @@ function test_basic_error_path
+ wait $APP_PID 2> /dev/null
+ }
+
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ exit 0
++}
+
+-if [ "$(id -u)" == "0" ]; then
+- validate_lttng_modules_present
++validate_lttng_modules_present
+
+- modprobe lttng-test
++modprobe lttng-test
+
+- start_lttng_sessiond_notap
++start_lttng_sessiond_notap
+
+- test_basic_error_path
++test_basic_error_path
+
+- stop_lttng_sessiond_notap
+- rmmod lttng-test
++stop_lttng_sessiond_notap
++rmmod lttng-test
+
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
+
+ rm -f "$TESTAPP_STATE_PATH"
+diff --git a/tests/regression/tools/notification/test_notification_kernel_instrumentation b/tests/regression/tools/notification/test_notification_kernel_instrumentation
+index 90545a541..705f7703d 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_instrumentation
++++ b/tests/regression/tools/notification/test_notification_kernel_instrumentation
+@@ -28,22 +28,21 @@ function test_kernel_instrumentation_notification
+ wait $APP_PID 2> /dev/null
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+- validate_lttng_modules_present
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ exit 0
++}
+
+- modprobe lttng-test
++validate_lttng_modules_present
+
+- start_lttng_sessiond_notap
++modprobe lttng-test
+
+- test_kernel_instrumentation_notification
++start_lttng_sessiond_notap
+
+- stop_lttng_sessiond_notap
+- rmmod lttng-test
++test_kernel_instrumentation_notification
+
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
++stop_lttng_sessiond_notap
++rmmod lttng-test
+
+ rm -f "$TESTAPP_STATE_PATH"
+diff --git a/tests/regression/tools/notification/test_notification_kernel_syscall b/tests/regression/tools/notification/test_notification_kernel_syscall
+index d273cb55d..7fa235388 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_syscall
++++ b/tests/regression/tools/notification/test_notification_kernel_syscall
+@@ -31,19 +31,18 @@ function test_kernel_syscall_notification
+ wait $APP_PID 2> /dev/null
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+- validate_lttng_modules_present
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ exit 0
++}
+
+- start_lttng_sessiond_notap
++validate_lttng_modules_present
+
+- test_kernel_syscall_notification
++start_lttng_sessiond_notap
+
+- stop_lttng_sessiond_notap
++test_kernel_syscall_notification
+
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
++stop_lttng_sessiond_notap
+
+ rm -f "$TESTAPP_STATE_PATH"
+diff --git a/tests/regression/tools/notification/test_notification_kernel_userspace_probe b/tests/regression/tools/notification/test_notification_kernel_userspace_probe
+index 8ef8d708d..abddd9be7 100755
+--- a/tests/regression/tools/notification/test_notification_kernel_userspace_probe
++++ b/tests/regression/tools/notification/test_notification_kernel_userspace_probe
+@@ -29,18 +29,18 @@ function test_kernel_userspace_probe_notification
+ wait $APP_PID 2> /dev/null
+ }
+
+-if [ "$(id -u)" == "0" ]; then
+- validate_lttng_modules_present
++check_skip_kernel_test &&
++{
++ plan_skip_all "Skipping all tests."
++ exit 0
++}
++
++validate_lttng_modules_present
+
+- start_lttng_sessiond_notap
++start_lttng_sessiond_notap
+
+- test_kernel_userspace_probe_notification
++test_kernel_userspace_probe_notification
+
+- stop_lttng_sessiond_notap
+-else
+- # Kernel tests are skipped.
+- plan_tests $NUM_TESTS
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $NUM_TESTS
+-fi
++stop_lttng_sessiond_notap
+
+ rm -f "$TESTAPP_STATE_PATH"
+diff --git a/tests/regression/tools/notification/test_notification_multi_app b/tests/regression/tools/notification/test_notification_multi_app
+index d8b639225..61891b56f 100755
+--- a/tests/regression/tools/notification/test_notification_multi_app
++++ b/tests/regression/tools/notification/test_notification_multi_app
+@@ -411,22 +411,18 @@ function test_on_register_evaluation ()
+ rm -rf "$output_dir"
+ }
+
+-
+ TESTS=(
+ test_multi_app_ust
+ test_on_register_evaluation_ust
+ )
+
+-if [ "$(id -u)" == "0" ]; then
++check_skip_kernel_test "$NUM_TEST_KERNEL" "Skipping kernel multi-app notification tests." || {
+ validate_lttng_modules_present
+ TESTS+=(
+- test_multi_app_kernel
+- test_on_register_evaluation_kernel
+-)
+-else
+- skip 0 "Root access is needed. Skipping all kernel multi-app notification tests." $NUM_TEST_KERNEL
+-fi
+-
++ test_multi_app_kernel
++ test_on_register_evaluation_kernel
++ )
++}
+
+ for fct_test in ${TESTS[@]};
+ do
+diff --git a/tests/regression/tools/notification/test_notification_notifier_discarded_count b/tests/regression/tools/notification/test_notification_notifier_discarded_count
+index c9235393e..a6c31a728 100755
+--- a/tests/regression/tools/notification/test_notification_notifier_discarded_count
++++ b/tests/regression/tools/notification/test_notification_notifier_discarded_count
+@@ -391,7 +391,8 @@ function test_ust_notifier_discarded_regardless_trigger_owner
+ test_ust_notifier_discarded_count
+ test_ust_notifier_discarded_count_max_bucket
+
+-if [ "$(id -u)" == "0" ]; then
++check_skip_kernel_test "$KERNEL_NUM_TESTS" "Skipping kernel notification tests." ||
++{
+
+ validate_lttng_modules_present
+
+@@ -413,9 +414,7 @@ if [ "$(id -u)" == "0" ]; then
+ modprobe --remove lttng-test
+
+ rm -rf "${sessiond_pipe[@]}" 2> /dev/null
+-else
+- # Kernel tests are skipped.
+- skip 0 "Root access is needed. Skipping all kernel notification tests." $KERNEL_NUM_TESTS
+-fi
++
++}
+
+ rm -rf "$TEST_TMPDIR"
+diff --git a/tests/regression/tools/regen-metadata/test_kernel b/tests/regression/tools/regen-metadata/test_kernel
+index 49eea32a7..555a4e2ce 100755
+--- a/tests/regression/tools/regen-metadata/test_kernel
++++ b/tests/regression/tools/regen-metadata/test_kernel
+@@ -99,13 +99,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+
+diff --git a/tests/regression/tools/regen-statedump/test_kernel b/tests/regression/tools/regen-statedump/test_kernel
+index 8a261355d..bbbac3942 100755
+--- a/tests/regression/tools/regen-statedump/test_kernel
++++ b/tests/regression/tools/regen-statedump/test_kernel
+@@ -39,13 +39,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+ start_lttng_sessiond
+diff --git a/tests/regression/tools/rotation/test_kernel b/tests/regression/tools/rotation/test_kernel
+index f5f1f5553..efe3fd359 100755
+--- a/tests/regression/tools/rotation/test_kernel
++++ b/tests/regression/tools/rotation/test_kernel
+@@ -82,13 +82,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+
+diff --git a/tests/regression/tools/snapshots/test_kernel b/tests/regression/tools/snapshots/test_kernel
+index abb243563..d91876867 100755
+--- a/tests/regression/tools/snapshots/test_kernel
++++ b/tests/regression/tools/snapshots/test_kernel
+@@ -217,13 +217,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel snapshot tests" $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+
+ validate_lttng_modules_present
+diff --git a/tests/regression/tools/snapshots/test_kernel_streaming b/tests/regression/tools/snapshots/test_kernel_streaming
+index 0c92dc710..dd965afee 100755
+--- a/tests/regression/tools/snapshots/test_kernel_streaming
++++ b/tests/regression/tools/snapshots/test_kernel_streaming
+@@ -145,13 +145,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests" $NUM_TESTS ||
++check_skip_kernel_test $NUM_TESTS "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+
+diff --git a/tests/regression/tools/streaming/test_high_throughput_limits b/tests/regression/tools/streaming/test_high_throughput_limits
+index 2b9e3ad39..c55d51098 100755
+--- a/tests/regression/tools/streaming/test_high_throughput_limits
++++ b/tests/regression/tools/streaming/test_high_throughput_limits
+@@ -170,13 +170,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed to set bandwith limits. Skipping all tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+
+ # Catch sigint and try to cleanup limits
+diff --git a/tests/regression/tools/streaming/test_kernel b/tests/regression/tools/streaming/test_kernel
+index 33334229f..113eea7bf 100755
+--- a/tests/regression/tools/streaming/test_kernel
++++ b/tests/regression/tools/streaming/test_kernel
+@@ -47,13 +47,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel streaming tests." $NUM_TESTS ||
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." ||
+ {
+ validate_lttng_modules_present
+
+diff --git a/tests/regression/tools/tracker/test_event_tracker b/tests/regression/tools/tracker/test_event_tracker
+index cc0f698d2..de0c79d36 100755
+--- a/tests/regression/tools/tracker/test_event_tracker
++++ b/tests/regression/tools/tracker/test_event_tracker
+@@ -466,13 +466,7 @@ test_event_track_untrack ust 0 "${EVENT_NAME}" "--pid --all" # backward compat
+ test_event_tracker ust 1 "${EVENT_NAME}" "--pid --all" # backward compat
+ test_event_pid_tracker ust 1 "${EVENT_NAME}"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel tracker tests." $NUM_KERNEL_TESTS ||
++check_skip_kernel_test "$NUM_KERNEL_TESTS" "Skipping kernel tracker tests." ||
+ {
+ diag "Test kernel tracker"
+
+diff --git a/tests/regression/tools/trigger/test_add_trigger_cli b/tests/regression/tools/trigger/test_add_trigger_cli
+index 98ecf6272..d1763aa2e 100755
+--- a/tests/regression/tools/trigger/test_add_trigger_cli
++++ b/tests/regression/tools/trigger/test_add_trigger_cli
+@@ -34,12 +34,6 @@ tmp_stdout=$(mktemp --tmpdir -t test_parse_cli_trigger_stdout.XXXXXX)
+ tmp_stderr=$(mktemp --tmpdir -t test_parse_cli_trigger_stderr.XXXXXX)
+ uprobe_elf_binary="${TESTDIR}/utils/testapp/userspace-probe-elf-binary/.libs/userspace-probe-elf-binary"
+
+-if [ "$(id -u)" == "0" ]; then
+- ist_root=1
+-else
+- ist_root=0
+-fi
+-
+ function test_success ()
+ {
+ local test_name="$1"
+@@ -223,7 +217,7 @@ test_success "--exclude-name two" "trigger5" \
+ --condition event-rule-matches --type=user --name='jean-*' --exclude-name jean-chretien -x jean-charest \
+ --action notify
+
+-skip $ist_root "non-root user: skipping kprobe tests" 18 || {
++check_skip_kernel_test 18 "Skipping kprobe tests." || {
+ i=0
+
+ for type in kprobe kernel:kprobe; do
+@@ -262,7 +256,7 @@ skip $ist_root "non-root user: skipping kprobe tests" 18 || {
+ done
+ }
+
+-skip $ist_root "non-root user: skipping uprobe tests" 6 || {
++check_skip_kernel_test 6 "Skipping uprobe tests." || {
+ test_success "--condition event-rule-matches uprobe" "uprobe-trigger-0" \
+ --name="uprobe-trigger-0" \
+ --condition event-rule-matches --type=kernel:uprobe --location=${uprobe_elf_binary}:test_function --event-name=ma-probe \
+@@ -274,7 +268,7 @@ skip $ist_root "non-root user: skipping uprobe tests" 6 || {
+ --action notify
+ }
+
+-skip $ist_root "non-root user: skipping syscall tests" 30 || {
++check_skip_kernel_test 30 "Skipping syscall tests." || {
+ test_success "--condition event-rule-matches one syscall" "syscall-trigger-0" \
+ --name="syscall-trigger-0" \
+ --condition event-rule-matches --type=syscall --name=open \
+diff --git a/tests/regression/tools/trigger/test_list_triggers_cli b/tests/regression/tools/trigger/test_list_triggers_cli
+index a04018013..02c3de3e9 100755
+--- a/tests/regression/tools/trigger/test_list_triggers_cli
++++ b/tests/regression/tools/trigger/test_list_triggers_cli
+@@ -36,22 +36,12 @@ uprobe_sdt_binary=$(realpath "${TESTDIR}/utils/testapp/userspace-probe-sdt-binar
+ register_some_triggers_bin=$(realpath "${CURDIR}/utils/register-some-triggers")
+
+ uid=$(id --user)
+-gid=$(id --group)
+
+-if [ "$uid" == "0" ]; then
+- ist_root=1
+- ls "$uprobe_sdt_binary" >/dev/null 2>&1
+- if test $? == 0; then
+- hast_sdt_binary=1
+- else
+- hast_sdt_binary=0
+- fi
+-else
+- ist_root=0
+- hast_sdt_binary=0
++sdt_binary_present=0
++if [ -f "$uprobe_sdt_binary" ]; then
++ sdt_binary_present=1
+ fi
+
+-
+ test_top_level_options ()
+ {
+ diag "Listing top level options"
+@@ -2695,10 +2685,12 @@ start_lttng_sessiond_notap
+
+ test_top_level_options
+ test_event_rule_matches_tracepoint
+-skip $ist_root "non-root user: skipping kprobe tests" 13 || test_event_rule_matches_probe
+-skip $ist_root "non-root user: skipping uprobe tests" 9 || test_event_rule_matches_userspace_probe_elf
+-skip $(($ist_root && $hast_sdt_binary)) "skipping userspace probe SDT tests" 9 || test_event_rule_matches_userspace_probe_sdt
+-skip $ist_root "non-root user: skipping syscall tests" 17 || test_event_rule_matches_syscall
++check_skip_kernel_test 48 "Skipping kprobe, uprobe, SDT and syscall tests." || {
++ test_event_rule_matches_probe
++ test_event_rule_matches_userspace_probe_elf
++ skip $sdt_binary_present "No SDT binary. Skipping userspace probe SDT tests" 9 || test_event_rule_matches_userspace_probe_sdt
++ test_event_rule_matches_syscall
++}
+ test_session_consumed_size_condition
+ test_buffer_usage_conditions
+ test_session_rotation_conditions
+diff --git a/tests/regression/tools/wildcard/test_event_wildcard b/tests/regression/tools/wildcard/test_event_wildcard
+index f69baffc4..14d9bb88a 100755
+--- a/tests/regression/tools/wildcard/test_event_wildcard
++++ b/tests/regression/tools/wildcard/test_event_wildcard
+@@ -124,13 +124,7 @@ test_event_wildcard ust 1 'tp*tptest'
+ test_event_wildcard ust 1 'tp**tptest'
+ test_event_wildcard ust 1 'tp*test'
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all kernel wildcard tests." $NUM_KERNEL_TESTS ||
++check_skip_kernel_test "$NUM_KERNEL_TESTS" "Skipping kernel wildcard tests." ||
+ {
+ diag "Test kernel wildcards"
+
+diff --git a/tests/regression/tools/working-directory/test_relayd_working_directory b/tests/regression/tools/working-directory/test_relayd_working_directory
+index c7e784cca..6bd1e504a 100755
+--- a/tests/regression/tools/working-directory/test_relayd_working_directory
++++ b/tests/regression/tools/working-directory/test_relayd_working_directory
+@@ -145,9 +145,9 @@ function test_relayd_debug_permission()
+ diag "Test lttng-relayd change working directory on non writable directory"
+
+ if [ "$(id -u)" == "0" ]; then
+- is_user=0
++ is_user=0
+ else
+- is_user=1
++ is_user=1
+ fi
+
+ skip $is_user "Skipping permission debug output test; operation can't fail as root" 6 ||
+diff --git a/tests/regression/ust/namespaces/test_ns_contexts_change b/tests/regression/ust/namespaces/test_ns_contexts_change
+index 8a4b62ce3..622241f7b 100755
+--- a/tests/regression/ust/namespaces/test_ns_contexts_change
++++ b/tests/regression/ust/namespaces/test_ns_contexts_change
+@@ -101,12 +101,7 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-isroot=0
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-fi
+-
+-skip $isroot "Root access is needed. Skipping all tests." "$NUM_TESTS" && exit 0
++check_skip_kernel_test "$NUM_TESTS" "Skipping all tests." && exit 0
+
+ system_has_ns=0
+ if [ -d "/proc/$$/ns" ]; then
+diff --git a/tests/regression/ust/test_event_perf b/tests/regression/ust/test_event_perf
+index 33aab2981..178959a19 100755
+--- a/tests/regression/ust/test_event_perf
++++ b/tests/regression/ust/test_event_perf
+@@ -119,17 +119,11 @@ plan_tests $NUM_TESTS
+
+ print_test_banner "$TEST_DESC"
+
+-if [ "$(id -u)" == "0" ]; then
+- isroot=1
+-else
+- isroot=0
+-fi
+-
+ start_lttng_sessiond
+
+ test_parsing_raw
+
+-skip $isroot "Root access is needed. Skipping UST perf tests." 8 ||
++check_skip_kernel_test 8 "Skipping UST perf tests." ||
+ {
+ test_event_basic
+ }
+diff --git a/tests/utils/utils.sh b/tests/utils/utils.sh
+index faa87e783..da0e0569d 100644
+--- a/tests/utils/utils.sh
++++ b/tests/utils/utils.sh
+@@ -318,6 +318,41 @@ function conf_proc_count()
+ echo
+ }
+
++# Usage:
++# check_skip_kernel_test [NB_TESTS] [SKIP_MESSAGE]
++# Return 0 if LTTNG_TOOLS_DISABLE_KERNEL_TESTS was set or the current user is not a root user
++# If NB_TESTS is set, call skip() to skip number of tests.
++# If NB_TESTS is empty, just output a reason with diag.
++# An optional message can be added.
++
++function check_skip_kernel_test ()
++{
++ local num_tests="$1"
++ local skip_message="$2"
++
++ # Check for skip test kernel flag
++ if [ "$LTTNG_TOOLS_DISABLE_KERNEL_TESTS" == "1" ]; then
++ if ! test -z "$num_tests"; then
++ skip 0 "LTTNG_TOOLS_DISABLE_KERNEL_TESTS was set.${skip_message+ }${skip_message}" "$num_tests"
++ else
++ diag "LTTNG_TOOLS_DISABLE_KERNEL_TESTS was set.${skip_message+ }${skip_message}"
++ fi
++ return 0
++ fi
++
++ # Check if we are running as root
++ if [ "$(id -u)" != "0" ]; then
++ if ! test -z "$num_tests"; then
++ skip 0 "Root access is needed for kernel testing.${skip_message+ }${skip_message}" "$num_tests"
++ else
++ diag "Root access is needed for kernel testing.${skip_message+ }${skip_message}"
++ fi
++ return 0
++ fi
++
++ return 1
++}
++
+ # Check if base lttng-modules are present.
+ # Bail out on failure
+ function validate_lttng_modules_present ()
+--
+2.25.1
+
diff --git a/meta/recipes-kernel/lttng/lttng-tools/0001-tests-do-not-strip-a-helper-library.patch b/meta/recipes-kernel/lttng/lttng-tools/0001-tests-do-not-strip-a-helper-library.patch
index 2d08b08879..2f95889c4b 100644
--- a/meta/recipes-kernel/lttng/lttng-tools/0001-tests-do-not-strip-a-helper-library.patch
+++ b/meta/recipes-kernel/lttng/lttng-tools/0001-tests-do-not-strip-a-helper-library.patch
@@ -1,4 +1,4 @@
-From ab238c213fac190972f55e73cf3e0bb1c7846eb8 Mon Sep 17 00:00:00 2001
+From 2237748af00467ad8250a7ccd944200f811db69a Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 12 Dec 2019 16:52:07 +0100
Subject: [PATCH] tests: do not strip a helper library
@@ -10,10 +10,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am b/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
-index 03f5d5a..d12c343 100644
+index 836f13e..e19a554 100644
--- a/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
+++ b/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
-@@ -12,7 +12,7 @@ userspace_probe_elf_binary_LDADD = libfoo.la
+@@ -14,7 +14,7 @@ userspace_probe_elf_binary_LDADD = libfoo.la
libfoo.strip: libfoo.la
$(OBJCOPY) --strip-all .libs/libfoo.so
@@ -22,6 +22,3 @@ index 03f5d5a..d12c343 100644
@if [ x"$(srcdir)" != x"$(builddir)" ]; then \
for script in $(EXTRA_DIST); do \
cp -f $(srcdir)/$$script $(builddir); \
---
-2.17.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-tools/determinism.patch b/meta/recipes-kernel/lttng/lttng-tools/determinism.patch
deleted file mode 100644
index b2ab880bd6..0000000000
--- a/meta/recipes-kernel/lttng/lttng-tools/determinism.patch
+++ /dev/null
@@ -1,64 +0,0 @@
-This is a bit ugly. Specifing abs_builddir as an RPATH is plain wrong when
-cross compiling. Sadly, removing the rpath makes libtool/automake do
-weird things and breaks the build as shared libs are no longer generated.
-
-We already try and delete the RPATH at do_install with chrpath however
-that does leave the path in the string table so it doesn't help us
-with reproducibility.
-
-Instead, hack in a bogus but harmless path, then delete it later in
-our do_install. Ultimately we may want to pass a specific path to use
-to configure if we really do need to set an RPATH at all. It is unclear
-to me whether the tests need that or not.
-
-Fixes reproducibility issues for lttng-tools.
-
-Upstream-Status: Pending [needs discussion with upstream about the correct solution]
-RP 2021/3/1
-
-Index: lttng-tools-2.12.2/tests/regression/ust/ust-dl/Makefile.am
-===================================================================
---- lttng-tools-2.12.2.orig/tests/regression/ust/ust-dl/Makefile.am
-+++ lttng-tools-2.12.2/tests/regression/ust/ust-dl/Makefile.am
-@@ -27,16 +27,16 @@ noinst_LTLIBRARIES = libzzz.la libbar.la
-
- libzzz_la_SOURCES = libzzz.c libzzz.h
- libzzz_la_LDFLAGS = -module -shared -avoid-version \
-- -rpath $(abs_builddir)
-+ -rpath /usr/lib
-
- libbar_la_SOURCES = libbar.c libbar.h
- libbar_la_LDFLAGS = -module -shared -avoid-version \
-- -rpath $(abs_builddir)
-+ -rpath /usr/lib
- libbar_la_LIBADD = libzzz.la
-
- libfoo_la_SOURCES = libfoo.c libfoo.h
- libfoo_la_LDFLAGS = -module -shared -avoid-version \
-- -rpath $(abs_builddir)
-+ -rpath /usr/lib
- libfoo_la_LIBADD = libbar.la
-
- CLEANFILES = libfoo.so libfoo.so.debug libbar.so libbar.so.debug \
-@@ -44,7 +44,7 @@ CLEANFILES = libfoo.so libfoo.so.debug l
-
- libtp_la_SOURCES = libbar-tp.h libbar-tp.c libfoo-tp.h libfoo-tp.c \
- libzzz-tp.h libzzz-tp.c
--libtp_la_LDFLAGS = -module -shared -rpath $(abs_builddir)
-+libtp_la_LDFLAGS = -module -shared -rpath /usr/lib
-
- # Extract debug symbols
- libfoo.so.debug: libfoo.la
-Index: lttng-tools-2.12.2/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
-===================================================================
---- lttng-tools-2.12.2.orig/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
-+++ lttng-tools-2.12.2/tests/utils/testapp/userspace-probe-elf-binary/Makefile.am
-@@ -5,7 +5,7 @@ AM_CFLAGS += -O0
- noinst_LTLIBRARIES = libfoo.la
-
- libfoo_la_SOURCES = foo.c foo.h
--libfoo_la_LDFLAGS = -shared -module -avoid-version -rpath $(abs_builddir)/.libs/
-+libfoo_la_LDFLAGS = -shared -module -avoid-version -rpath /usr/lib
-
- noinst_PROGRAMS = userspace-probe-elf-binary
- userspace_probe_elf_binary_SOURCES = userspace-probe-elf-binary.c
diff --git a/meta/recipes-kernel/lttng/lttng-tools/run-ptest b/meta/recipes-kernel/lttng/lttng-tools/run-ptest
index c4dbe50f21..39d93e2bbf 100755
--- a/meta/recipes-kernel/lttng/lttng-tools/run-ptest
+++ b/meta/recipes-kernel/lttng/lttng-tools/run-ptest
@@ -1,7 +1,45 @@
#!/bin/sh
# Without --ignore-exit, the tap harness causes any FAILs within a
# test plan to raise ERRORs; this is just noise.
+
+#Detecting whether current system has lttng kernel modules
+LTTNG_KMOD_PATH=/lib/modules/$(uname -r)/kernel/lttng-modules/lttng-tracer.ko
+function validate_lttng_modules_present()
+{
+ # Check for loadable modules.
+ if [ -f "$LTTNG_KMOD_PATH" ]; then
+ return 0
+ fi
+
+ # Check for builtin modules.
+ ls /proc/lttng > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ return 0
+ fi
+
+ return 1
+}
+
export LD_LIBRARY_PATH=FIXMEPTESTPATH/tests/utils/testapp/userspace-probe-elf-binary/.libs
-makeargs="LOG_DRIVER_FLAGS=--ignore-exit top_srcdir=$PWD top_builddir=$PWD"
-make -k -t all >/dev/null 2>&1
-exec make -k -s $makeargs check 2>/dev/null | sed -e 's#/tmp/tmp\...........#/tmp/tmp.XXXXXXXXXX#g'
+makeargs="LOG_DRIVER_FLAGS=--ignore-exit top_srcdir=FIXMEPTESTPATH top_builddir=FIXMEPTESTPATH"
+
+#If current system doesn't have lttng kernel modules, disable lttng kernel related tests.
+validate_lttng_modules_present || {
+ makeargs="$makeargs LTTNG_TOOLS_DISABLE_KERNEL_TESTS=1"
+}
+
+make -k -t all >error.log 2>&1
+# Can specify a test e.g.:
+# -C tests/regression/ check TESTS='kernel/test_callstack'
+make -k -s $makeargs check 2>error.log | sed -e 's#/tmp/tmp\...........#/tmp/tmp.XXXXXXXXXX#g'
+exitcode=$?
+if [ -e error.log ]; then
+ cat error.log
+fi
+if [ -e tests/unit/test-suite.log ]; then
+ cat tests/unit/test-suite.log
+fi
+if [ -e tests/regression/test-suite.log ]; then
+ cat tests/regression/test-suite.log
+fi
+exit $exitcode
diff --git a/meta/recipes-kernel/lttng/lttng-tools_2.13.13.bb b/meta/recipes-kernel/lttng/lttng-tools_2.13.13.bb
new file mode 100644
index 0000000000..174dec036e
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-tools_2.13.13.bb
@@ -0,0 +1,197 @@
+SECTION = "devel"
+SUMMARY = "Linux Trace Toolkit Control"
+DESCRIPTION = "The Linux trace toolkit is a suite of tools designed \
+to extract program execution details from the Linux operating system \
+and interpret them."
+HOMEPAGE = "https://github.com/lttng/lttng-tools"
+
+LICENSE = "GPL-2.0-only & LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=40ef17463fbd6f377db3c47b1cbaded8 \
+ file://LICENSES/GPL-2.0;md5=e68f69a54b44ba526ad7cb963e18fbce \
+ file://LICENSES/LGPL-2.1;md5=9920968d0f2ff585ce61fae30344dd95"
+
+include lttng-platforms.inc
+
+DEPENDS = "liburcu popt libxml2 util-linux bison-native"
+RDEPENDS:${PN} = "libgcc"
+RRECOMMENDS:${PN} += "${LTTNGMODULES}"
+RDEPENDS:${PN}-ptest += "make perl bash gawk babeltrace procps perl-module-overloading coreutils util-linux kmod ${LTTNGMODULES} sed python3-core grep binutils"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils"
+RDEPENDS:${PN}-ptest:append:libc-musl = " musl-utils"
+# babelstats.pl wants getopt-long
+RDEPENDS:${PN}-ptest += "perl-module-getopt-long"
+
+PYTHON_OPTION = "am_cv_python_pyexecdir='${PYTHON_SITEPACKAGES_DIR}' \
+ am_cv_python_pythondir='${PYTHON_SITEPACKAGES_DIR}' \
+ PYTHON_INCLUDE='-I${STAGING_INCDIR}/python${PYTHON_BASEVERSION}${PYTHON_ABI}' \
+"
+PACKAGECONFIG ??= "${LTTNGUST} kmod"
+PACKAGECONFIG[python] = "--enable-python-bindings ${PYTHON_OPTION},,python3 swig-native"
+PACKAGECONFIG[lttng-ust] = "--with-lttng-ust, --without-lttng-ust, lttng-ust"
+PACKAGECONFIG[kmod] = "--with-kmod, --without-kmod, kmod"
+PACKAGECONFIG[manpages] = "--enable-man-pages, --disable-man-pages, asciidoc-native xmlto-native libxslt-native"
+
+SRC_URI = "https://lttng.org/files/lttng-tools/lttng-tools-${PV}.tar.bz2 \
+ file://0001-tests-do-not-strip-a-helper-library.patch \
+ file://run-ptest \
+ file://lttng-sessiond.service \
+ file://disable-tests.patch \
+ file://0001-compat-Define-off64_t-as-off_t-on-linux.patch \
+ file://0001-tests-add-check_skip_kernel_test-to-check-root-user-.patch \
+ file://0001-Fix-rotation-destroy-flush-fix-session-daemon-abort-.patch \
+ "
+
+SRC_URI[sha256sum] = "ff5f4f00b081dac66092afe8e72b7c790670931cf1c1ee0deaa7f80fbc53883e"
+
+inherit autotools ptest pkgconfig useradd python3-dir manpages systemd
+
+CACHED_CONFIGUREVARS = "PGREP=/usr/bin/pgrep"
+
+SYSTEMD_SERVICE:${PN} = "lttng-sessiond.service"
+SYSTEMD_AUTO_ENABLE = "disable"
+
+USERADD_PACKAGES = "${PN}"
+GROUPADD_PARAM:${PN} = "tracing"
+
+FILES:${PN} += "${libdir}/lttng/libexec/* ${datadir}/xml/lttng \
+ ${PYTHON_SITEPACKAGES_DIR}/*"
+FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
+FILES:${PN}-dev += "${PYTHON_SITEPACKAGES_DIR}/*.la"
+
+# Since files are installed into ${libdir}/lttng/libexec we match
+# the libexec insane test so skip it.
+# Python module needs to keep _lttng.so
+INSANE_SKIP:${PN} = "libexec dev-so"
+INSANE_SKIP:${PN}-dbg = "libexec"
+
+PRIVATE_LIBS:${PN}-ptest = "libfoo.so"
+
+do_install:append () {
+ # install systemd unit file
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${WORKDIR}/lttng-sessiond.service ${D}${systemd_system_unitdir}
+}
+
+do_install_ptest () {
+ for f in Makefile tests/Makefile tests/utils/utils.sh tests/regression/tools/save-load/*.lttng \
+ tests/regression/tools/save-load/configuration/load-42*.lttng tests/regression/tools/health/test_health.sh \
+ tests/regression/tools/metadata/utils.sh tests/regression/tools/rotation/rotate_utils.sh \
+ tests/regression/tools/notification/util_event_generator.sh \
+ tests/regression/tools/base-path/*.lttng; do
+ install -D "${B}/$f" "${D}${PTEST_PATH}/$f"
+ done
+
+ for f in tests/utils/tap-driver.sh config/test-driver src/common/config/session.xsd src/common/mi-lttng-4.1.xsd; do
+ install -D "${S}/$f" "${D}${PTEST_PATH}/$f"
+ done
+
+ # Patch in the correct path for the custom libraries a helper executable needs
+ sed -i -e 's!FIXMEPTESTPATH!${PTEST_PATH}!g' "${D}${PTEST_PATH}/run-ptest"
+
+ # Prevent 'make check' from recursing into non-test subdirectories.
+ sed -i -e 's!^SUBDIRS = .*!SUBDIRS = tests!' "${D}${PTEST_PATH}/Makefile"
+
+ # We don't need these
+ sed -i -e '/dist_noinst_SCRIPTS = /,/^$/d' "${D}${PTEST_PATH}/tests/Makefile"
+
+ # We shouldn't need to build anything in tests/utils
+ sed -i -e 's!am__append_1 = . utils!am__append_1 = . !' \
+ "${D}${PTEST_PATH}/tests/Makefile"
+
+ # Copy the tests directory tree and the executables and
+ # Makefiles found within.
+ for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
+ install -d "${D}${PTEST_PATH}/tests/$d"
+ find "${B}/tests/$d" -maxdepth 1 -executable -type f \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
+ # Take all .py scripts for tests using the python bindings.
+ find "${B}/tests/$d" -maxdepth 1 -type f -name "*.py" \
+ -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
+ test -r "${B}/tests/$d/Makefile" && \
+ install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
+ done
+
+ for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
+ for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
+ cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
+ case $f in
+ *.so|userspace-probe-elf-*)
+ install -d ${D}${PTEST_PATH}/tests/$d/
+ ln -s ../$f ${D}${PTEST_PATH}/tests/$d/$f
+ # Remove any rpath/runpath to pass QA check.
+ chrpath --delete ${D}${PTEST_PATH}/tests/$d/$f
+ ;;
+ esac
+ done
+ done
+
+ chrpath --delete ${D}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/userspace-probe-elf-binary
+ chrpath --delete ${D}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/userspace-probe-elf-cxx-binary
+ chrpath --delete ${D}${PTEST_PATH}/tests/regression/ust/ust-dl/libbar.so
+ chrpath --delete ${D}${PTEST_PATH}/tests/regression/ust/ust-dl/libfoo.so
+
+ #
+ # Use the versioned libs of liblttng-ust-dl.
+ #
+ ustdl="${D}${PTEST_PATH}/tests/regression/ust/ust-dl/test_ust-dl.py"
+ if [ -e $ustdl ]; then
+ sed -i -e 's!:liblttng-ust-dl.so!:liblttng-ust-dl.so.0!' $ustdl
+ fi
+
+ install ${B}/tests/unit/ini_config/sample.ini ${D}${PTEST_PATH}/tests/unit/ini_config/
+
+ # We shouldn't need to build anything in tests/regression/tools
+ sed -i -e 's!^SUBDIRS = tools !SUBDIRS = !' \
+ "${D}${PTEST_PATH}/tests/regression/Makefile"
+
+ # Prevent attempts to update Makefiles during test runs, and
+ # silence "Making check in $SUBDIR" messages.
+ find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
+ sed -i -e '/Makefile:/,/^$/d' -e '/%: %.in/,/^$/d' \
+ -e '/echo "Making $$target in $$subdir"; \\/d' \
+ -e 's/^srcdir = \(.*\)/srcdir = ./' \
+ -e 's/^builddir = \(.*\)/builddir = ./' \
+ -e 's/^all-am:.*/all-am:/' \
+ {} +
+
+ find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
+ touch -r "${B}/Makefile" {} +
+
+ #
+ # Need to stop generated binaries from rebuilding by removing their source dependencies
+ #
+ sed -e 's#\(^test.*OBJECTS.=\)#disable\1#g' \
+ -e 's#\(^test.*DEPENDENCIES.=\)#disable\1#g' \
+ -e 's#\(^test.*SOURCES.=\)#disable\1#g' \
+ -e 's#\(^test.*LDADD.=\)#disable\1#g' \
+ -i ${D}${PTEST_PATH}/tests/unit/Makefile
+
+ # Fix hardcoded build path
+ sed -e 's#TESTAPP_PATH=.*/tests/regression/#TESTAPP_PATH="${PTEST_PATH}/tests/regression/#' \
+ -i ${D}${PTEST_PATH}/tests/regression/ust/python-logging/test_python_logging
+
+ # Substitute links to installed binaries.
+ for prog in lttng lttng-relayd lttng-sessiond lttng-consumerd lttng-crash; do
+ exedir="${D}${PTEST_PATH}/src/bin/${prog}"
+ install -d "$exedir"
+ case "$prog" in
+ lttng-consumerd)
+ ln -s "${libdir}/lttng/libexec/$prog" "$exedir"
+ ;;
+ *)
+ ln -s "${bindir}/$prog" "$exedir"
+ ;;
+ esac
+ done
+}
+
+INHIBIT_PACKAGE_STRIP_FILES = "\
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/userspace-probe-elf-binary \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/.libs/userspace-probe-elf-binary \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/userspace-probe-elf-cxx-binary \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/.libs/userspace-probe-elf-cxx-binary \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events/gen-syscall-events \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events/.libs/gen-syscall-events \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events-callstack/gen-syscall-events-callstack \
+ ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events-callstack/.libs/gen-syscall-events-callstack \
+ "
diff --git a/meta/recipes-kernel/lttng/lttng-tools_2.13.7.bb b/meta/recipes-kernel/lttng/lttng-tools_2.13.7.bb
deleted file mode 100644
index 1a972ec836..0000000000
--- a/meta/recipes-kernel/lttng/lttng-tools_2.13.7.bb
+++ /dev/null
@@ -1,195 +0,0 @@
-SECTION = "devel"
-SUMMARY = "Linux Trace Toolkit Control"
-DESCRIPTION = "The Linux trace toolkit is a suite of tools designed \
-to extract program execution details from the Linux operating system \
-and interpret them."
-HOMEPAGE = "https://github.com/lttng/lttng-tools"
-
-LICENSE = "GPL-2.0-only & LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=40ef17463fbd6f377db3c47b1cbaded8 \
- file://LICENSES/GPL-2.0;md5=e68f69a54b44ba526ad7cb963e18fbce \
- file://LICENSES/LGPL-2.1;md5=9920968d0f2ff585ce61fae30344dd95"
-
-include lttng-platforms.inc
-
-DEPENDS = "liburcu popt libxml2 util-linux bison-native"
-RDEPENDS:${PN} = "libgcc"
-RRECOMMENDS:${PN} += "${LTTNGMODULES}"
-RDEPENDS:${PN}-ptest += "make perl bash gawk babeltrace procps perl-module-overloading coreutils util-linux kmod ${LTTNGMODULES} sed python3-core grep"
-RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-utils"
-RDEPENDS:${PN}-ptest:append:libc-musl = " musl-utils"
-# babelstats.pl wants getopt-long
-RDEPENDS:${PN}-ptest += "perl-module-getopt-long"
-
-PYTHON_OPTION = "am_cv_python_pyexecdir='${PYTHON_SITEPACKAGES_DIR}' \
- am_cv_python_pythondir='${PYTHON_SITEPACKAGES_DIR}' \
- PYTHON_INCLUDE='-I${STAGING_INCDIR}/python${PYTHON_BASEVERSION}${PYTHON_ABI}' \
-"
-PACKAGECONFIG ??= "${LTTNGUST} kmod"
-PACKAGECONFIG[python] = "--enable-python-bindings ${PYTHON_OPTION},,python3 swig-native"
-PACKAGECONFIG[lttng-ust] = "--with-lttng-ust, --without-lttng-ust, lttng-ust"
-PACKAGECONFIG[kmod] = "--with-kmod, --without-kmod, kmod"
-PACKAGECONFIG[manpages] = "--enable-man-pages, --disable-man-pages, asciidoc-native xmlto-native libxslt-native"
-
-SRC_URI = "https://lttng.org/files/lttng-tools/lttng-tools-${PV}.tar.bz2 \
- file://0001-tests-do-not-strip-a-helper-library.patch \
- file://run-ptest \
- file://lttng-sessiond.service \
- file://determinism.patch \
- file://disable-tests.patch \
- "
-
-SRC_URI[sha256sum] = "d17a02e8f178a7cf3403e3c9edfb90ad3a1628e20aa0b5131408ae47f722f08d"
-
-inherit autotools ptest pkgconfig useradd python3-dir manpages systemd
-
-CACHED_CONFIGUREVARS = "PGREP=/usr/bin/pgrep"
-
-SYSTEMD_SERVICE:${PN} = "lttng-sessiond.service"
-SYSTEMD_AUTO_ENABLE = "disable"
-
-USERADD_PACKAGES = "${PN}"
-GROUPADD_PARAM:${PN} = "tracing"
-
-FILES:${PN} += "${libdir}/lttng/libexec/* ${datadir}/xml/lttng \
- ${PYTHON_SITEPACKAGES_DIR}/*"
-FILES:${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
-FILES:${PN}-dev += "${PYTHON_SITEPACKAGES_DIR}/*.la"
-
-# Since files are installed into ${libdir}/lttng/libexec we match
-# the libexec insane test so skip it.
-# Python module needs to keep _lttng.so
-INSANE_SKIP:${PN} = "libexec dev-so"
-INSANE_SKIP:${PN}-dbg = "libexec"
-
-PRIVATE_LIBS:${PN}-ptest = "libfoo.so"
-
-do_install:append () {
- # install systemd unit file
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${WORKDIR}/lttng-sessiond.service ${D}${systemd_system_unitdir}
-}
-
-do_install_ptest () {
- for f in Makefile tests/Makefile tests/utils/utils.sh tests/regression/tools/save-load/*.lttng \
- tests/regression/tools/save-load/configuration/load-42*.lttng tests/regression/tools/health/test_health.sh \
- tests/regression/tools/metadata/utils.sh tests/regression/tools/rotation/rotate_utils.sh \
- tests/regression/tools/notification/util_event_generator.sh \
- tests/regression/tools/base-path/*.lttng; do
- install -D "${B}/$f" "${D}${PTEST_PATH}/$f"
- done
-
- for f in tests/utils/tap-driver.sh config/test-driver src/common/config/session.xsd src/common/mi-lttng-4.1.xsd; do
- install -D "${S}/$f" "${D}${PTEST_PATH}/$f"
- done
-
- # Patch in the correct path for the custom libraries a helper executable needs
- sed -i -e 's!FIXMEPTESTPATH!${PTEST_PATH}!' "${D}${PTEST_PATH}/run-ptest"
-
- # Prevent 'make check' from recursing into non-test subdirectories.
- sed -i -e 's!^SUBDIRS = .*!SUBDIRS = tests!' "${D}${PTEST_PATH}/Makefile"
-
- # We don't need these
- sed -i -e '/dist_noinst_SCRIPTS = /,/^$/d' "${D}${PTEST_PATH}/tests/Makefile"
-
- # We shouldn't need to build anything in tests/utils
- sed -i -e 's!am__append_1 = . utils!am__append_1 = . !' \
- "${D}${PTEST_PATH}/tests/Makefile"
-
- # Copy the tests directory tree and the executables and
- # Makefiles found within.
- for d in $(find "${B}/tests" -type d -not -name .libs -printf '%P ') ; do
- install -d "${D}${PTEST_PATH}/tests/$d"
- find "${B}/tests/$d" -maxdepth 1 -executable -type f \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
- # Take all .py scripts for tests using the python bindings.
- find "${B}/tests/$d" -maxdepth 1 -type f -name "*.py" \
- -exec install -t "${D}${PTEST_PATH}/tests/$d" {} +
- test -r "${B}/tests/$d/Makefile" && \
- install -t "${D}${PTEST_PATH}/tests/$d" "${B}/tests/$d/Makefile"
- done
-
- for d in $(find "${B}/tests" -type d -name .libs -printf '%P ') ; do
- for f in $(find "${B}/tests/$d" -maxdepth 1 -executable -type f -printf '%P ') ; do
- cp ${B}/tests/$d/$f ${D}${PTEST_PATH}/tests/`dirname $d`/$f
- case $f in
- *.so|userspace-probe-elf-*)
- install -d ${D}${PTEST_PATH}/tests/$d/
- ln -s ../$f ${D}${PTEST_PATH}/tests/$d/$f
- # Remove any rpath/runpath to pass QA check.
- chrpath --delete ${D}${PTEST_PATH}/tests/$d/$f
- ;;
- esac
- done
- done
-
- chrpath --delete ${D}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/userspace-probe-elf-binary
- chrpath --delete ${D}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/userspace-probe-elf-cxx-binary
- chrpath --delete ${D}${PTEST_PATH}/tests/regression/ust/ust-dl/libbar.so
- chrpath --delete ${D}${PTEST_PATH}/tests/regression/ust/ust-dl/libfoo.so
-
- #
- # Use the versioned libs of liblttng-ust-dl.
- #
- ustdl="${D}${PTEST_PATH}/tests/regression/ust/ust-dl/test_ust-dl.py"
- if [ -e $ustdl ]; then
- sed -i -e 's!:liblttng-ust-dl.so!:liblttng-ust-dl.so.0!' $ustdl
- fi
-
- install ${B}/tests/unit/ini_config/sample.ini ${D}${PTEST_PATH}/tests/unit/ini_config/
-
- # We shouldn't need to build anything in tests/regression/tools
- sed -i -e 's!^SUBDIRS = tools !SUBDIRS = !' \
- "${D}${PTEST_PATH}/tests/regression/Makefile"
-
- # Prevent attempts to update Makefiles during test runs, and
- # silence "Making check in $SUBDIR" messages.
- find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
- sed -i -e '/Makefile:/,/^$/d' -e '/%: %.in/,/^$/d' \
- -e '/echo "Making $$target in $$subdir"; \\/d' \
- -e 's/^srcdir = \(.*\)/srcdir = ./' \
- -e 's/^builddir = \(.*\)/builddir = ./' \
- -e 's/^all-am:.*/all-am:/' \
- {} +
-
- find "${D}${PTEST_PATH}" -name Makefile -type f -exec \
- touch -r "${B}/Makefile" {} +
-
- #
- # Need to stop generated binaries from rebuilding by removing their source dependencies
- #
- sed -e 's#\(^test.*OBJECTS.=\)#disable\1#g' \
- -e 's#\(^test.*DEPENDENCIES.=\)#disable\1#g' \
- -e 's#\(^test.*SOURCES.=\)#disable\1#g' \
- -e 's#\(^test.*LDADD.=\)#disable\1#g' \
- -i ${D}${PTEST_PATH}/tests/unit/Makefile
-
- # Fix hardcoded build path
- sed -e 's#TESTAPP_PATH=.*/tests/regression/#TESTAPP_PATH="${PTEST_PATH}/tests/regression/#' \
- -i ${D}${PTEST_PATH}/tests/regression/ust/python-logging/test_python_logging
-
- # Substitute links to installed binaries.
- for prog in lttng lttng-relayd lttng-sessiond lttng-consumerd lttng-crash; do
- exedir="${D}${PTEST_PATH}/src/bin/${prog}"
- install -d "$exedir"
- case "$prog" in
- lttng-consumerd)
- ln -s "${libdir}/lttng/libexec/$prog" "$exedir"
- ;;
- *)
- ln -s "${bindir}/$prog" "$exedir"
- ;;
- esac
- done
-}
-
-INHIBIT_PACKAGE_STRIP_FILES = "\
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/userspace-probe-elf-binary \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-binary/.libs/userspace-probe-elf-binary \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/userspace-probe-elf-cxx-binary \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/userspace-probe-elf-cxx-binary/.libs/userspace-probe-elf-cxx-binary \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events/gen-syscall-events \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events/.libs/gen-syscall-events \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events-callstack/gen-syscall-events-callstack \
- ${PKGD}${PTEST_PATH}/tests/utils/testapp/gen-syscall-events-callstack/.libs/gen-syscall-events-callstack \
- "
diff --git a/meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch b/meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch
index cbbf1df812..6aca8f85fa 100644
--- a/meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch
+++ b/meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch
@@ -1,4 +1,4 @@
-From 06279f50e924d1d55b43eb3b299f6633ecb1f7a4 Mon Sep 17 00:00:00 2001
+From 7d053804ab3823d40ae10d90f4efc49dbfb4cb66 Mon Sep 17 00:00:00 2001
From: Changqing Li <changqing.li@windriver.com>
Date: Wed, 22 Sep 2021 16:33:10 +0800
Subject: [PATCH] Makefile.am: update rpath link
@@ -13,15 +13,16 @@ ld: warning: liblttng-ust-tracepoint.so.1, needed by ../../../src/lib/lttng-ust/
Upstream-Status: Submitted [https://github.com/lttng/lttng-ust/pull/61]
Signed-off-by: Changqing Li <changqing.li@windriver.com>
+
---
doc/examples/Makefile.am | 2 +-
- 1 file changed, 1 insertions(+), 1 deletions(-)
+ 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/examples/Makefile.am b/doc/examples/Makefile.am
-index 8ee0564..20d246c 100644
+index 57782cc..d46caa6 100644
--- a/doc/examples/Makefile.am
+++ b/doc/examples/Makefile.am
-@@ -142,7 +142,7 @@ all-local:
+@@ -167,7 +167,7 @@ all-local:
CFLAGS='$(CFLAGS)' \
AM_CFLAGS='$(AM_CFLAGS)' \
LDFLAGS="$(LDFLAGS)" \
@@ -30,6 +31,3 @@ index 8ee0564..20d246c 100644
LTTNG_GEN_TP_PATH="$$rel_src_subdir$(top_srcdir)/tools/" \
AM_V_P="$(AM_V_P)" \
AM_V_at="$(AM_V_at)" \
---
-2.17.1
-
diff --git a/meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch b/meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch
index d3c451fd2b..e85dbdb439 100644
--- a/meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch
+++ b/meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch
@@ -1,4 +1,4 @@
-From 2058584b7e87d6bd9d1765577766e0df7752232c Mon Sep 17 00:00:00 2001
+From 27402453f25fbdb5a9fb1a1b88d1c4d9852187d2 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex@linutronix.de>
Date: Sun, 5 Sep 2021 10:44:19 +0200
Subject: [PATCH] lttng-ust-common: link with liburcu explicitly
@@ -7,15 +7,16 @@ Otherwise linking errors are seen on x86-32.
Upstream-Status: Submitted [by email to lttng-dev, Francis, Jonathan]
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+
---
src/lib/lttng-ust-common/Makefile.am | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/lib/lttng-ust-common/Makefile.am b/src/lib/lttng-ust-common/Makefile.am
-index caeea2b..30febf0 100644
+index 1ccc290..6531fa0 100644
--- a/src/lib/lttng-ust-common/Makefile.am
+++ b/src/lib/lttng-ust-common/Makefile.am
-@@ -15,6 +15,7 @@ liblttng_ust_common_la_SOURCES = \
+@@ -16,6 +16,7 @@ liblttng_ust_common_la_SOURCES = \
liblttng_ust_common_la_LIBADD = \
$(top_builddir)/src/common/libcommon.la \
diff --git a/meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch b/meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch
index fd9b6ea7ff..7c49583e0d 100644
--- a/meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch
+++ b/meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch
@@ -1,6 +1,6 @@
-From dd1fdc841d069dbd4e284f430a88af79de951124 Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 26 Sep 2019 17:54:00 +0800
+From f65efd963e52de51d35e2e03fb773f33dd64f565 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Tue, 20 Feb 2024 12:19:06 +0000
Subject: [PATCH] python-lttngust/Makefile.am: Add --install-lib to setup.py
Otherwise it may install to /usr/lib, but should be /usr/lib64 when cross
@@ -15,15 +15,15 @@ Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/python-lttngust/Makefile.am b/src/python-lttngust/Makefile.am
-index f95482d..f76d95b 100644
+index d53e21d..283901e 100644
--- a/src/python-lttngust/Makefile.am
+++ b/src/python-lttngust/Makefile.am
-@@ -32,7 +32,7 @@ install-exec-local: build-python-bindings.stamp
+@@ -45,7 +45,7 @@ install-exec-local: build-python-bindings.stamp
if [ "$(DESTDIR)" != "" ]; then \
opts="$$opts --root=$(DESTDIR)"; \
fi; \
-- $(PYTHON) $(builddir)/setup.py install $$opts;
-+ $(PYTHON) $(builddir)/setup.py install $$opts --install-lib=$(pythondir);
+- $(PYTHON) $(builddir)/setup.py install $(PY_INSTALL_OPTS) $$opts;
++ $(PYTHON) $(builddir)/setup.py install $(PY_INSTALL_OPTS) $$opts --install-lib=$(pythondir);
clean-local:
rm -rf $(builddir)/build
diff --git a/meta/recipes-kernel/lttng/lttng-ust_2.13.2.bb b/meta/recipes-kernel/lttng/lttng-ust_2.13.2.bb
deleted file mode 100644
index 516d90a196..0000000000
--- a/meta/recipes-kernel/lttng/lttng-ust_2.13.2.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-SUMMARY = "Linux Trace Toolkit Userspace Tracer 2.x"
-DESCRIPTION = "The LTTng UST 2.x package contains the userspace tracer library to trace userspace codes."
-HOMEPAGE = "http://lttng.org/ust"
-BUGTRACKER = "https://bugs.lttng.org/projects/lttng-ust"
-
-LICENSE = "LGPL-2.1-or-later & MIT & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=a46577a38ad0c36ff6ff43ccf40c480f"
-
-PYTHON_OPTION = "am_cv_python_pyexecdir='${PYTHON_SITEPACKAGES_DIR}' \
- am_cv_python_pythondir='${PYTHON_SITEPACKAGES_DIR}' \
- PYTHON_INCLUDE='-I${STAGING_INCDIR}/python${PYTHON_BASEVERSION}${PYTHON_ABI}' \
-"
-
-inherit autotools lib_package manpages python3native pkgconfig
-
-include lttng-platforms.inc
-
-EXTRA_OECONF = "--disable-numa"
-CPPFLAGS:append:arm = "${@oe.utils.vartrue('DEBUG_BUILD', '-DUATOMIC_NO_LINK_ERROR', '', d)}"
-
-DEPENDS = "liburcu util-linux"
-RDEPENDS:${PN}-bin = "python3-core"
-
-# For backwards compatibility after rename
-RPROVIDES:${PN} = "lttng2-ust"
-RREPLACES:${PN} = "lttng2-ust"
-RCONFLICTS:${PN} = "lttng2-ust"
-
-PE = "2"
-
-SRC_URI = "https://lttng.org/files/lttng-ust/lttng-ust-${PV}.tar.bz2 \
- file://0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch \
- file://0001-lttng-ust-common-link-with-liburcu-explicitly.patch \
- file://0001-Makefile.am-update-rpath-link.patch \
- "
-
-SRC_URI[sha256sum] = "08679a1dfc2c9428885273861628aa5d828972f389a38bb08fdce39adee589b7"
-
-CVE_PRODUCT = "ust"
-
-PACKAGECONFIG[examples] = "--enable-examples, --disable-examples,"
-PACKAGECONFIG[manpages] = "--enable-man-pages, --disable-man-pages, asciidoc-native xmlto-native libxslt-native"
-PACKAGECONFIG[python3-agent] = "--enable-python-agent ${PYTHON_OPTION}, --disable-python-agent, python3, python3"
-
-FILES:${PN} += " ${PYTHON_SITEPACKAGES_DIR}/*"
-FILES:${PN}-staticdev += " ${PYTHON_SITEPACKAGES_DIR}/*.a"
-FILES:${PN}-dev += " ${PYTHON_SITEPACKAGES_DIR}/*.la"
-
-do_install:append() {
- # Patch python tools to use Python 3; they should be source compatible, but
- # still refer to Python 2 in the shebang
- sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${bindir}/lttng-gen-tp
-}
diff --git a/meta/recipes-kernel/lttng/lttng-ust_2.13.7.bb b/meta/recipes-kernel/lttng/lttng-ust_2.13.7.bb
new file mode 100644
index 0000000000..9509185bad
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-ust_2.13.7.bb
@@ -0,0 +1,53 @@
+SUMMARY = "Linux Trace Toolkit Userspace Tracer 2.x"
+DESCRIPTION = "The LTTng UST 2.x package contains the userspace tracer library to trace userspace codes."
+HOMEPAGE = "http://lttng.org/ust"
+BUGTRACKER = "https://bugs.lttng.org/projects/lttng-ust"
+
+LICENSE = "LGPL-2.1-or-later & MIT & GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=a46577a38ad0c36ff6ff43ccf40c480f"
+
+PYTHON_OPTION = "am_cv_python_pyexecdir='${PYTHON_SITEPACKAGES_DIR}' \
+ am_cv_python_pythondir='${PYTHON_SITEPACKAGES_DIR}' \
+ PYTHON_INCLUDE='-I${STAGING_INCDIR}/python${PYTHON_BASEVERSION}${PYTHON_ABI}' \
+"
+
+inherit autotools lib_package manpages python3native pkgconfig
+
+include lttng-platforms.inc
+
+EXTRA_OECONF = "--disable-numa"
+CPPFLAGS:append:arm = "${@oe.utils.vartrue('DEBUG_BUILD', '-DUATOMIC_NO_LINK_ERROR', '', d)}"
+
+DEPENDS = "liburcu util-linux"
+RDEPENDS:${PN}-bin = "python3-core"
+
+# For backwards compatibility after rename
+RPROVIDES:${PN} = "lttng2-ust"
+RREPLACES:${PN} = "lttng2-ust"
+RCONFLICTS:${PN} = "lttng2-ust"
+
+PE = "2"
+
+SRC_URI = "https://lttng.org/files/lttng-ust/lttng-ust-${PV}.tar.bz2 \
+ file://0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch \
+ file://0001-lttng-ust-common-link-with-liburcu-explicitly.patch \
+ file://0001-Makefile.am-update-rpath-link.patch \
+ "
+
+SRC_URI[sha256sum] = "5fb4f17c307c8c1b79c68561e89be9562d07e7425bf40e728c4d66755342a5eb"
+
+CVE_PRODUCT = "ust"
+
+PACKAGECONFIG[examples] = "--enable-examples, --disable-examples,"
+PACKAGECONFIG[manpages] = "--enable-man-pages, --disable-man-pages, asciidoc-native xmlto-native libxslt-native"
+PACKAGECONFIG[python3-agent] = "--enable-python-agent ${PYTHON_OPTION}, --disable-python-agent, python3, python3"
+
+FILES:${PN} += " ${PYTHON_SITEPACKAGES_DIR}/*"
+FILES:${PN}-staticdev += " ${PYTHON_SITEPACKAGES_DIR}/*.a"
+FILES:${PN}-dev += " ${PYTHON_SITEPACKAGES_DIR}/*.la"
+
+do_install:append() {
+ # Patch python tools to use Python 3; they should be source compatible, but
+ # still refer to Python 2 in the shebang
+ sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${bindir}/lttng-gen-tp
+}
diff --git a/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb b/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb
index 0e420a25d9..a91680d497 100644
--- a/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb
+++ b/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb
@@ -3,7 +3,7 @@ HOMEPAGE = "https://www.yoctoproject.org/"
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6"
-inherit kernel-arch
+inherit kernel-arch linux-kernel-base
inherit pkgconfig
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -13,7 +13,7 @@ S = "${WORKDIR}"
do_configure[depends] += "virtual/kernel:do_shared_workdir openssl-native:do_populate_sysroot"
do_compile[depends] += "virtual/kernel:do_compile_kernelmodules"
-RDEPENDS:${PN}-dev = ""
+DEV_PKG_DEPENDENCY = ""
DEPENDS += "bc-native bison-native"
DEPENDS += "gmp-native"
@@ -21,13 +21,18 @@ DEPENDS += "gmp-native"
EXTRA_OEMAKE = " HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}""
EXTRA_OEMAKE += " HOSTCXX="${BUILD_CXX} ${BUILD_CXXFLAGS} ${BUILD_LDFLAGS}" CROSS_COMPILE=${TARGET_PREFIX}"
+KERNEL_LOCALVERSION = "${@get_kernellocalversion_file("${STAGING_KERNEL_BUILDDIR}")}"
+export LOCALVERSION="${KERNEL_LOCALVERSION}"
+
# Build some host tools under work-shared. CC, LD, and AR are probably
# not used, but this is the historical way of invoking "make scripts".
#
do_configure() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
for t in prepare scripts_basic scripts; do
- oe_runmake CC="${KERNEL_CC}" LD="${KERNEL_LD}" AR="${KERNEL_AR}" \
+ oe_runmake CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
+ AR="${KERNEL_AR}" OBJCOPY="${KERNEL_OBJCOPY}" \
+ STRIP="${KERNEL_STRIP}" \
-C ${STAGING_KERNEL_DIR} O=${STAGING_KERNEL_BUILDDIR} $t
done
}
diff --git a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
index 8f4fdd76bf..b630a402ff 100644
--- a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
+++ b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
@@ -4,7 +4,6 @@ LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://modutils.sh;beginline=3;endline=3;md5=b2dccaa94b3629a08bfb4f983cad6f89"
SRC_URI = "file://modutils.sh"
-PR = "r7"
S = "${WORKDIR}"
diff --git a/meta/recipes-kernel/perf/perf-perl.inc b/meta/recipes-kernel/perf/perf-perl.inc
index ae77319b20..491f54c328 100644
--- a/meta/recipes-kernel/perf/perf-perl.inc
+++ b/meta/recipes-kernel/perf/perf-perl.inc
@@ -2,6 +2,10 @@ inherit perlnative cpan-base
# Env var which tells perl if it should use host (no) or target (yes) settings
export PERLCONFIGTARGET = "${@is_target(d)}"
-export PERL_INC = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}/CORE"
-export PERL_LIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
-export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
+export PERL_INC = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}/${@get_perl_arch(d)}/CORE"
+export PERL_LIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}"
+export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}/${@get_perl_arch(d)}"
+
+# The perl symbols CPPSTDIN and CPPRUN embed the sysroot into the
+# binaries, work needed to remove this
+INSANE_SKIP:${PN}-dbg += "buildpaths"
diff --git a/meta/recipes-kernel/perf/perf.bb b/meta/recipes-kernel/perf/perf.bb
index adefc44eaa..11fa917649 100644
--- a/meta/recipes-kernel/perf/perf.bb
+++ b/meta/recipes-kernel/perf/perf.bb
@@ -9,25 +9,32 @@ HOMEPAGE = "https://perf.wiki.kernel.org/index.php/Main_Page"
LICENSE = "GPL-2.0-only"
-PR = "r9"
-PACKAGECONFIG ??= "scripting tui libunwind"
+PACKAGECONFIG ??= "python tui libunwind libtraceevent"
PACKAGECONFIG[dwarf] = ",NO_DWARF=1"
-PACKAGECONFIG[scripting] = ",NO_LIBPERL=1 NO_LIBPYTHON=1,perl python3"
+PACKAGECONFIG[perl] = ",NO_LIBPERL=1,perl"
+PACKAGECONFIG[python] = ",NO_LIBPYTHON=1,python3 python3-setuptools-native"
# gui support was added with kernel 3.6.35
# since 3.10 libnewt was replaced by slang
# to cover a wide range of kernel we add both dependencies
PACKAGECONFIG[tui] = ",NO_NEWT=1,libnewt slang"
PACKAGECONFIG[libunwind] = ",NO_LIBUNWIND=1 NO_LIBDW_DWARF_UNWIND=1,libunwind"
PACKAGECONFIG[libnuma] = ",NO_LIBNUMA=1"
+PACKAGECONFIG[bfd] = ",NO_LIBBFD=1"
PACKAGECONFIG[systemtap] = ",NO_SDT=1,systemtap"
PACKAGECONFIG[jvmti] = ",NO_JVMTI=1"
# libaudit support would need scripting to be enabled
PACKAGECONFIG[audit] = ",NO_LIBAUDIT=1,audit"
PACKAGECONFIG[manpages] = ",,xmlto-native asciidoc-native"
PACKAGECONFIG[cap] = ",,libcap"
+PACKAGECONFIG[libtraceevent] = ",NO_LIBTRACEEVENT=1,libtraceevent"
+# jevents requires host python for generating a .c file, but is
+# unrelated to the python item.
+PACKAGECONFIG[jevents] = ",NO_JEVENTS=1,python3-native"
# Arm CoreSight
PACKAGECONFIG[coresight] = "CORESIGHT=1,,opencsd"
+PACKAGECONFIG[pfm4] = ",NO_LIBPFM4=1,libpfm4"
+PACKAGECONFIG[babeltrace] = ",NO_LIBBABELTRACE=1,babeltrace"
# libunwind is not yet ported for some architectures
PACKAGECONFIG:remove:arc = "libunwind"
@@ -47,7 +54,7 @@ PROVIDES = "virtual/perf"
inherit linux-kernel-base kernel-arch manpages
# needed for building the tools/perf Python bindings
-inherit ${@bb.utils.contains('PACKAGECONFIG', 'scripting', 'python3targetconfig', '', d)}
+inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3targetconfig', '', d)}
inherit python3-dir
export PYTHON_SITEPACKAGES_DIR
@@ -57,7 +64,7 @@ export WERROR = "0"
do_populate_lic[depends] += "virtual/kernel:do_shared_workdir"
# needed for building the tools/perf Perl binding
-include ${@bb.utils.contains('PACKAGECONFIG', 'scripting', 'perf-perl.inc', '', d)}
+include ${@bb.utils.contains('PACKAGECONFIG', 'perl', 'perf-perl.inc', '', d)}
inherit kernelsrc
@@ -69,8 +76,18 @@ SPDX_S = "${S}/tools/perf"
# supported kernel.
LDFLAGS="-ldl -lutil"
+# Perf's build system adds its own optimization flags for most TUs,
+# overriding the flags included here. But for some, perf does not add
+# any -O option, so ensure the distro's chosen optimization gets used
+# for those. Since ${SELECTED_OPTIMIZATION} always includes
+# ${DEBUG_FLAGS} which in turn includes ${DEBUG_PREFIX_MAP}, this also
+# ensures perf is built with appropriate -f*-prefix-map options,
+# avoiding the 'buildpaths' QA warning.
+TARGET_CC_ARCH += "${SELECTED_OPTIMIZATION}"
+
EXTRA_OEMAKE = '\
V=1 \
+ VF=1 \
-C ${S}/tools/perf \
O=${B} \
CROSS_COMPILE=${TARGET_PREFIX} \
@@ -80,12 +97,13 @@ EXTRA_OEMAKE = '\
LDSHARED="${CC} -shared" \
AR="${AR}" \
LD="${LD}" \
- EXTRA_CFLAGS="-ldw" \
- YFLAGS='-y --file-prefix-map=${WORKDIR}=/usr/src/debug/${PN}/${EXTENDPE}${PV}-${PR}' \
+ EXTRA_CFLAGS="-ldw -I${S}" \
+ YFLAGS='-y --file-prefix-map=${WORKDIR}=${TARGET_DBGSRC_DIR}' \
EXTRA_LDFLAGS="${PERF_EXTRA_LDFLAGS}" \
perfexecdir=${libexecdir} \
NO_GTK2=1 \
${PACKAGECONFIG_CONFARGS} \
+ PKG_CONFIG=pkg-config \
TMPDIR="${B}" \
LIBUNWIND_DIR=${STAGING_EXECPREFIXDIR} \
'
@@ -101,7 +119,7 @@ EXTRA_OEMAKE += "\
'sharedir=${@os.path.relpath(datadir, prefix)}' \
'mandir=${@os.path.relpath(mandir, prefix)}' \
'infodir=${@os.path.relpath(infodir, prefix)}' \
- ${@bb.utils.contains('PACKAGECONFIG', 'scripting', 'PYTHON=python3 PYTHON_CONFIG=python3-config', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'python', 'PYTHON=python3 PYTHON_CONFIG=python3-config', '', d)} \
"
# During do_configure, we might run a 'make clean'. That often breaks
@@ -111,6 +129,10 @@ EXTRA_OEMAKE += "\
# honour a JOBS variable.
EXTRA_OEMAKE:append:task-configure = " JOBS=1"
+# the architectures that need this file can be found in
+# tools/include/uapi/asm/bpf_perf_event.h
+# We are only listing supported arches at the moment
+PERF_BPF_EVENT_SRC ?= '${@bb.utils.contains_any("ARCH", [ "riscv", "arm64" ], "arch/${ARCH}/include/uapi/asm/bpf_perf_event.h", "", d)}'
PERF_SRC ?= "Makefile \
tools/arch \
tools/build \
@@ -120,6 +142,8 @@ PERF_SRC ?= "Makefile \
tools/perf \
tools/scripts \
scripts/ \
+ arch/arm64/tools \
+ ${PERF_BPF_EVENT_SRC} \
arch/${ARCH}/Makefile \
"
@@ -134,6 +158,10 @@ PERF_EXTRA_LDFLAGS:mipsarchn64el = "-m elf64ltsmip"
do_compile() {
# Linux kernel build system is expected to do the right thing
unset CFLAGS
+ test -e ${S}/tools/lib/traceevent/plugins/Makefile && \
+ sed -i -e 's|\$(libdir)/traceevent/plugins|\$(libdir)/traceevent_${KERNEL_VERSION}/plugins|g' ${S}/tools/lib/traceevent/plugins/Makefile
+ test -e ${S}/tools/perf/Makefile.config && \
+ sed -i -e 's|\$(libdir)/traceevent/plugins|\$(libdir)/traceevent_${KERNEL_VERSION}/plugins|g' ${S}/tools/perf/Makefile.config
oe_runmake all
}
@@ -142,8 +170,11 @@ do_install() {
unset CFLAGS
oe_runmake install
# we are checking for this make target to be compatible with older perf versions
- if ${@bb.utils.contains('PACKAGECONFIG', 'scripting', 'true', 'false', d)} && grep -q install-python_ext ${S}/tools/perf/Makefile*; then
+ if ${@bb.utils.contains('PACKAGECONFIG', 'python', 'true', 'false', d)} && grep -q install-python_ext ${S}/tools/perf/Makefile*; then
oe_runmake DESTDIR=${D} install-python_ext
+ if [ -e ${D}${libdir}/python*/site-packages/perf-*/SOURCES.txt ]; then
+ sed -i -e 's#${WORKDIR}##g' ${D}${libdir}/python*/site-packages/perf-*/SOURCES.txt
+ fi
fi
}
@@ -158,7 +189,8 @@ python copy_perf_source_from_kernel() {
src = oe.path.join(src_dir, s)
dest = oe.path.join(dest_dir, s)
if not os.path.exists(src):
- bb.fatal("Path does not exist: %s. Maybe PERF_SRC does not match the kernel version." % src)
+ bb.warn("Path does not exist: %s. Maybe PERF_SRC lists more files than what your kernel version provides and needs." % src)
+ continue
if os.path.isdir(src):
oe.path.copyhardlinktree(src, dest)
else:
@@ -203,14 +235,18 @@ do_configure:prepend () {
if [ -e "${S}/tools/perf/Makefile.perf" ]; then
sed -i -e 's,\ .config-detected, $(OUTPUT)/config-detected,g' \
${S}/tools/perf/Makefile.perf
- sed -i -e "s,prefix='\$(DESTDIR_SQ)/usr'$,prefix='\$(DESTDIR_SQ)/usr' --install-lib='\$(DESTDIR)\$(PYTHON_SITEPACKAGES_DIR)',g" \
+ # Variant with linux-yocto-specific patch
+ sed -i -e "s,prefix='\$(DESTDIR_SQ)/usr'$,prefix='\$(DESTDIR_SQ)/usr' --install-lib='\$(PYTHON_SITEPACKAGES_DIR)' --root='\$(DESTDIR)',g" \
+ ${S}/tools/perf/Makefile.perf
+ # Variant for mainline Linux
+ sed -i -e "s,root='/\$(DESTDIR_SQ)',prefix='\$(DESTDIR_SQ)/usr' --install-lib='\$(PYTHON_SITEPACKAGES_DIR)' --root='/\$(DESTDIR_SQ)',g" \
${S}/tools/perf/Makefile.perf
# backport https://github.com/torvalds/linux/commit/e4ffd066ff440a57097e9140fa9e16ceef905de8
sed -i -e 's,\($(Q)$(SHELL) .$(arch_errno_tbl).\) $(CC) $(arch_errno_hdr_dir),\1 $(firstword $(CC)) $(arch_errno_hdr_dir),g' \
${S}/tools/perf/Makefile.perf
fi
sed -i -e "s,--root='/\$(DESTDIR_SQ)',--prefix='\$(DESTDIR_SQ)/usr' --install-lib='\$(DESTDIR)\$(PYTHON_SITEPACKAGES_DIR)',g" \
- ${S}/tools/perf/Makefile*
+ ${S}/tools/perf/Makefile
if [ -e "${S}/tools/build/Makefile.build" ]; then
sed -i -e 's,\ .config-detected, $(OUTPUT)/config-detected,g' \
@@ -227,6 +263,15 @@ do_configure:prepend () {
# reproducible.
sed -i -e 's,$(call get-executable-or-default\,PYTHON\,$(PYTHON_AUTO)),$(notdir $(call get-executable-or-default\,PYTHON\,$(PYTHON_AUTO))),g' \
${S}/tools/perf/Makefile.config
+ # The same line is in older releases, but looking explicitly for Python 2
+ sed -i -e 's,$(call get-executable-or-default\,PYTHON\,$(PYTHON2)),$(notdir $(call get-executable-or-default\,PYTHON\,$(PYTHON2))),g' \
+ ${S}/tools/perf/Makefile.config
+
+ # likewise with this substitution. Kernels with commit 18f2967418d031a39
+ # [perf tools: Use Python devtools for version autodetection rather than runtime]
+ # need this substitution for reproducibility.
+ sed -i -e 's,$(call get-executable-or-default\,PYTHON\,$(subst -config\,\,$(PYTHON_AUTO))),$(notdir $(call get-executable-or-default\,PYTHON\,$(subst -config\,\,$(PYTHON_AUTO)))),g' \
+ ${S}/tools/perf/Makefile.config
# The following line:
# srcdir_SQ = $(patsubst %tools/perf,tools/perf,$(subst ','\'',$(srcdir))),
@@ -235,6 +280,9 @@ do_configure:prepend () {
# change the Makefile line to remove everything before 'tools/perf'
sed -i -e "s%srcdir_SQ = \$(subst ','\\\'',\$(srcdir))%srcdir_SQ = \$(patsubst \%tools/perf,tools/perf,\$(subst ','\\\'',\$(srcdir)))%g" \
${S}/tools/perf/Makefile.config
+ # Avoid hardcoded path to python-native
+ sed -i -e 's#\(PYTHON_WORD := \)$(call shell-wordify,$(PYTHON))#\1 python3#g' \
+ ${S}/tools/perf/Makefile.config
fi
if [ -e "${S}/tools/perf/tests/Build" ]; then
# OUTPUT is the full path, we have python on the path so we remove it from the
@@ -260,6 +308,20 @@ do_configure:prepend () {
sed -i -e "s,$target,$replacement1$replacement2$replacement3,g" \
"${S}/tools/perf/pmu-events/Build"
fi
+ if [ -e "${S}/tools/perf/pmu-events/jevents.py" ]; then
+ sed -i -e "s#os.scandir(path)#sorted(os.scandir(path), key=lambda e: e.name)#g" \
+ "${S}/tools/perf/pmu-events/jevents.py"
+ fi
+ if [ -e "${S}/tools/perf/arch/arm64/Makefile" ]; then
+ sed -i 's,sysdef := $(srctree)/,sysdef := ,' ${S}/tools/perf/arch/arm64/Makefile
+ sed -i 's,$(incpath) $(sysdef),$(incpath) $(srctree)/$(sysdef) $(sysdef),' ${S}/tools/perf/arch/arm64/Makefile
+ fi
+ if [ -e "${S}/tools/perf/arch/arm64/entry/syscalls/mksyscalltbl" ]; then
+ if ! grep -q input_rel ${S}/tools/perf/arch/arm64/entry/syscalls/mksyscalltbl; then
+ sed -i 's,input=$4,input=$4\ninput_rel=$5,' ${S}/tools/perf/arch/arm64/entry/syscalls/mksyscalltbl
+ fi
+ sed -i 's,#include \\"\$input\\",#include \\"\$input_rel\\",' ${S}/tools/perf/arch/arm64/entry/syscalls/mksyscalltbl
+ fi
# end reproducibility substitutions
# We need to ensure the --sysroot option in CC is preserved
@@ -267,6 +329,7 @@ do_configure:prepend () {
sed -i 's,CC = $(CROSS_COMPILE)gcc,#CC,' ${S}/tools/perf/Makefile.perf
sed -i 's,AR = $(CROSS_COMPILE)ar,#AR,' ${S}/tools/perf/Makefile.perf
sed -i 's,LD = $(CROSS_COMPILE)ld,#LD,' ${S}/tools/perf/Makefile.perf
+ sed -i 's,PKG_CONFIG = $(CROSS_COMPILE)pkg-config,#PKG_CONFIG,' ${S}/tools/perf/Makefile.perf
fi
if [ -e "${S}/tools/lib/api/Makefile" ]; then
sed -i 's,CC = $(CROSS_COMPILE)gcc,#CC,' ${S}/tools/lib/api/Makefile
@@ -283,6 +346,9 @@ do_configure:prepend () {
if [ -e "${S}/tools/build/Makefile.feature" ]; then
sed -i 's,CFLAGS=,CC="\$(CC)" CFLAGS=,' ${S}/tools/build/Makefile.feature
fi
+ # The libperl feature check produces fatal warnings due to -Werror being
+ # used, silence enough errors that the check passes.
+ sed -i 's/\(FLAGS_PERL_EMBED=.*\)/\1 -Wno-error=unused-function -Wno-error=attributes/' ${S}/tools/build/feature/Makefile
# 3.17-rc1+ has a include issue for arm/powerpc. Temporarily sed in the appropriate include
if [ -e "${S}/tools/perf/arch/$ARCH/util/skip-callchain-idx.c" ]; then
@@ -325,13 +391,14 @@ RDEPENDS:${PN} += "elfutils bash"
RDEPENDS:${PN}-archive =+ "bash"
RDEPENDS:${PN}-python =+ "bash python3 python3-modules ${@bb.utils.contains('PACKAGECONFIG', 'audit', 'audit-python', '', d)}"
RDEPENDS:${PN}-perl =+ "bash perl perl-modules"
-RDEPENDS:${PN}-tests =+ "python3 bash"
-
-RSUGGESTS_SCRIPTING = "${@bb.utils.contains('PACKAGECONFIG', 'scripting', '${PN}-perl ${PN}-python', '',d)}"
-RSUGGESTS:${PN} += "${PN}-archive ${PN}-tests ${RSUGGESTS_SCRIPTING}"
+RDEPENDS:${PN}-tests =+ "python3 bash perl"
+RSUGGESTS:${PN} += "${PN}-archive ${PN}-tests \
+ ${@bb.utils.contains('PACKAGECONFIG', 'perl', '${PN}-perl', '', d)} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'python', '${PN}-python', '', d)} \
+ "
FILES_SOLIBSDEV = ""
-FILES:${PN} += "${libexecdir}/perf-core ${exec_prefix}/libexec/perf-core ${libdir}/traceevent ${libdir}/libperf-jvmti.so"
+FILES:${PN} += "${libexecdir}/perf-core ${exec_prefix}/libexec/perf-core ${libdir}/traceevent* ${libdir}/libperf-jvmti.so"
FILES:${PN}-archive = "${libdir}/perf/perf-core/perf-archive"
FILES:${PN}-tests = "${libdir}/perf/perf-core/tests ${libexecdir}/perf-core/tests"
FILES:${PN}-python = " \
@@ -340,6 +407,16 @@ FILES:${PN}-python = " \
"
FILES:${PN}-perl = "${libexecdir}/perf-core/scripts/perl"
-
-INHIBIT_PACKAGE_DEBUG_SPLIT="1"
DEBUG_OPTIMIZATION:append = " -Wno-error=maybe-uninitialized"
+
+PACKAGESPLITFUNCS =+ "perf_fix_sources"
+
+perf_fix_sources () {
+ for f in util/parse-events-flex.h util/parse-events-flex.c util/pmu-flex.c \
+ util/pmu-flex.h util/expr-flex.h util/expr-flex.c; do
+ f=${PKGD}${TARGET_DBGSRC_DIR}/$f
+ if [ -e $f ]; then
+ sed -i -e 's#${S}/##g' $f
+ fi
+ done
+}
diff --git a/meta/recipes-kernel/perf/perf/sort-pmuevents.py b/meta/recipes-kernel/perf/perf/sort-pmuevents.py
index 09ba3328a7..0a87e553ab 100755
--- a/meta/recipes-kernel/perf/perf/sort-pmuevents.py
+++ b/meta/recipes-kernel/perf/perf/sort-pmuevents.py
@@ -36,10 +36,10 @@ with open(infile, 'r') as file:
preamble_regex = re.compile( '^(.*?)^(struct|const struct|static struct|static const struct)', re.MULTILINE | re.DOTALL )
preamble = re.search( preamble_regex, data )
-struct_block_regex = re.compile( '^(struct|const struct|static struct|static const struct).*?(\w+) (.*?)\[\] = {(.*?)^};', re.MULTILINE | re.DOTALL )
-field_regex = re.compile( '{.*?},', re.MULTILINE | re.DOTALL )
-cpuid_regex = re.compile( '\.cpuid = (.*?),', re.MULTILINE | re.DOTALL )
-name_regex = re.compile( '\.name = (.*?),', re.MULTILINE | re.DOTALL )
+struct_block_regex = re.compile(r'^(struct|const struct|static struct|static const struct).*?(\w+) (.*?)\[\] = {(.*?)^};', re.MULTILINE | re.DOTALL )
+field_regex = re.compile(r'{.*?},', re.MULTILINE | re.DOTALL )
+cpuid_regex = re.compile(r'\.cpuid = (.*?),', re.MULTILINE | re.DOTALL )
+name_regex = re.compile(r'\.name = (.*?),', re.MULTILINE | re.DOTALL )
# create a dictionary structure to store all the structs, their
# types and then their fields.
@@ -62,7 +62,10 @@ for struct in re.findall( struct_block_regex, data ):
#print( " name found: %s" % name.group(1) )
entry_dict[struct[2]]['fields'][name.group(1)] = entry
- if not entry_dict[struct[2]]['fields']:
+ # unmatched entries are most likely array terminators and
+ # should end up as the last element in the sorted list, which
+ # is achieved by using '0' as the key
+ if not cpuid and not name:
entry_dict[struct[2]]['fields']['0'] = entry
# created ordered dictionaries from the captured values. These are ordered by
diff --git a/meta/recipes-kernel/powertop/powertop/0001-src-fix-compatibility-with-ncurses-6.3.patch b/meta/recipes-kernel/powertop/powertop/0001-src-fix-compatibility-with-ncurses-6.3.patch
deleted file mode 100644
index 84b05ac971..0000000000
--- a/meta/recipes-kernel/powertop/powertop/0001-src-fix-compatibility-with-ncurses-6.3.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-From 76d3f0851520bc8488e432f423941f1e72cc7405 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 25 Oct 2021 17:47:23 +0200
-Subject: [PATCH] src: fix compatibility with ncurses 6.3
-
-Upstream-Status: Submitted [https://github.com/fenrus75/powertop/pull/92]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- src/devices/devfreq.cpp | 2 +-
- src/display.cpp | 2 +-
- src/lib.cpp | 2 +-
- 3 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/devices/devfreq.cpp b/src/devices/devfreq.cpp
-index 0509d0f..b194ac4 100644
---- a/src/devices/devfreq.cpp
-+++ b/src/devices/devfreq.cpp
-@@ -297,7 +297,7 @@ void display_devfreq_devices(void)
- df->fill_freq_utilization(j, buf);
- strcat(fline, buf);
- strcat(fline, "\n");
-- wprintw(win, fline);
-+ wprintw(win, "%s", fline);
- }
- wprintw(win, "\n");
- }
-diff --git a/src/display.cpp b/src/display.cpp
-index 7131144..cc03919 100644
---- a/src/display.cpp
-+++ b/src/display.cpp
-@@ -125,7 +125,7 @@ void show_tab(unsigned int tab)
-
- c = bottom_lines[tab_names[tab]].c_str();
- if (c && strlen(c) > 0)
-- mvwprintw(bottom_line, 0,0, c);
-+ mvwprintw(bottom_line, 0,0, "%s", c);
- else
- mvwprintw(bottom_line, 0, 0,
- "<ESC> %s | <TAB> / <Shift + TAB> %s | ", _("Exit"),
-diff --git a/src/lib.cpp b/src/lib.cpp
-index 5e48f37..5cd1c4a 100644
---- a/src/lib.cpp
-+++ b/src/lib.cpp
-@@ -583,7 +583,7 @@ void ui_notify_user_ncurses(const char *frmt, ...)
- * buffer */
- vsnprintf(notify, UI_NOTIFY_BUFF_SZ - 1, frmt, list);
- va_end(list);
-- mvprintw(1, 0, notify);
-+ mvprintw(1, 0, "%s", notify);
- attroff(COLOR_PAIR(1));
- }
-
diff --git a/meta/recipes-kernel/powertop/powertop_2.14.bb b/meta/recipes-kernel/powertop/powertop_2.14.bb
deleted file mode 100644
index a2f30040b5..0000000000
--- a/meta/recipes-kernel/powertop/powertop_2.14.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Power usage tool"
-DESCRIPTION = "Linux tool to diagnose issues with power consumption and power management."
-HOMEPAGE = "https://01.org/powertop/"
-BUGTRACKER = "https://app.devzing.com/powertopbugs/bugzilla"
-DEPENDS = "ncurses libnl pciutils autoconf-archive"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e"
-
-SRC_URI = "git://github.com/fenrus75/powertop;protocol=https;branch=master \
- file://0001-wakeup_xxx.h-include-limits.h.patch \
- file://0001-src-fix-compatibility-with-ncurses-6.3.patch \
- "
-SRCREV = "52f022f9bbe6e060fba11701d657a8d9762702ba"
-
-S = "${WORKDIR}/git"
-
-LDFLAGS:append = " -pthread"
-
-inherit autotools gettext pkgconfig bash-completion
-
-inherit update-alternatives
-ALTERNATIVE:${PN} = "powertop"
-ALTERNATIVE_TARGET[powertop] = "${sbindir}/powertop"
-ALTERNATIVE_LINK_NAME[powertop] = "${sbindir}/powertop"
-ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-kernel/powertop/powertop_2.15.bb b/meta/recipes-kernel/powertop/powertop_2.15.bb
new file mode 100644
index 0000000000..65c6d0fcd3
--- /dev/null
+++ b/meta/recipes-kernel/powertop/powertop_2.15.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Power usage tool"
+DESCRIPTION = "Linux tool to diagnose issues with power consumption and power management."
+HOMEPAGE = "https://01.org/powertop/"
+BUGTRACKER = "https://app.devzing.com/powertopbugs/bugzilla"
+DEPENDS = "ncurses libnl pciutils autoconf-archive-native"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e"
+
+SRC_URI = "git://github.com/fenrus75/powertop;protocol=https;branch=master \
+ file://0001-wakeup_xxx.h-include-limits.h.patch \
+ "
+SRCREV = "d51ad395436d4d1dcc3ca46e1519ffeb475bf651"
+
+S = "${WORKDIR}/git"
+
+LDFLAGS:append = " -pthread"
+
+inherit autotools gettext pkgconfig bash-completion
+
+inherit update-alternatives
+ALTERNATIVE:${PN} = "powertop"
+ALTERNATIVE_TARGET[powertop] = "${sbindir}/powertop"
+ALTERNATIVE_LINK_NAME[powertop] = "${sbindir}/powertop"
+ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-kernel/systemtap/systemtap-native_git.bb b/meta/recipes-kernel/systemtap/systemtap-native_git.bb
index 19cc1cf0f0..2690b259c8 100644
--- a/meta/recipes-kernel/systemtap/systemtap-native_git.bb
+++ b/meta/recipes-kernel/systemtap/systemtap-native_git.bb
@@ -1,6 +1,6 @@
require systemtap_git.bb
-inherit native
+inherit_defer native
addtask addto_recipe_sysroot after do_populate_sysroot before do_build
diff --git a/meta/recipes-kernel/systemtap/systemtap-uprobes_git.bb b/meta/recipes-kernel/systemtap/systemtap-uprobes_git.bb
deleted file mode 100644
index 2181e45a8d..0000000000
--- a/meta/recipes-kernel/systemtap/systemtap-uprobes_git.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "UProbes kernel module for SystemTap"
-HOMEPAGE = "https://sourceware.org/systemtap/"
-require systemtap_git.inc
-
-DEPENDS = "systemtap virtual/kernel"
-
-# On systems without CONFIG_UTRACE, this package is empty.
-ALLOW_EMPTY:${PN} = "1"
-
-inherit module-base gettext
-
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/systemtap:"
-
-FILES:${PN} += "${datadir}/systemtap/runtime/uprobes"
-
-# Compile and install the uprobes kernel module on machines with utrace
-# support. Note that staprun expects it in the systemtap/runtime directory,
-# not in /lib/modules.
-do_compile() {
- if grep -q "CONFIG_UTRACE=y" ${STAGING_KERNEL_BUILDDIR}/.config
- then
- unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS CC LD CPP
- oe_runmake CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
- AR="${KERNEL_AR}" \
- -C ${STAGING_KERNEL_DIR} scripts
- oe_runmake KDIR=${STAGING_KERNEL_DIR} \
- M="${S}/runtime/uprobes/" \
- CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
- AR="${KERNEL_AR}" \
- -C "${S}/runtime/uprobes/"
- fi
-}
-
-do_install() {
- if [ -e "${S}/runtime/uprobes/uprobes.ko" ]
- then
- install -d ${D}${datadir}/systemtap/runtime/uprobes/
- install -m 0644 ${S}/runtime/uprobes/uprobes.ko ${D}${datadir}/systemtap/runtime/uprobes/
- fi
-}
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch b/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch
new file mode 100644
index 0000000000..2d46a3962d
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch
@@ -0,0 +1,40 @@
+From 5a01e28bd806326b2143e3e6bb28d4780c5d879d Mon Sep 17 00:00:00 2001
+From: Victor Kamensky <victor.kamensky7@gmail.com>
+Date: Sun, 3 Dec 2023 18:40:05 -0800
+Subject: [PATCH] Makefile.am: remove runtime/linux/uprobes and
+ runtime/linux/uprobes2 install
+
+"PR30434 continuation: Removed old uprobes, uprobes2 implementation,
+uprobes-inc.h & any mentions of CONFIG_UTRACE." commit removed uprobes,
+and uprobes2 sources and directories, but Makefile.am still tries to
+install them. In fact after failing to 'cd' into runtime/linux/uprobes
+directory it copies top level *.[ch] files into
+${prefix}/share/systemtap/runtime/linux/uprobes directory.
+
+The issue was caught by OpenEmbedded project do_package_qa checks.
+
+Signed-off-by: Victor Kamensky <victor.kamensky7@gmail.com>
+
+Upstream-Status: Submitted [https://sourceware.org/pipermail/systemtap/2023q4/027880.html]
+---
+ Makefile.am | 4 ----
+ 1 file changed, 4 deletions(-)
+
+diff --git a/Makefile.am b/Makefile.am
+index 5737c6b20..2ba896088 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -277,10 +277,6 @@ endif
+ do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/softfloat/$$f; done)
+ (cd $(srcdir)/runtime/linux; for f in *.[ch]; \
+ do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/$$f; done)
+- (cd $(srcdir)/runtime/linux/uprobes; for f in Makefile *.[ch]; \
+- do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/uprobes/$$f; done)
+- (cd $(srcdir)/runtime/linux/uprobes2; for f in *.[ch]; \
+- do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/uprobes2/$$f; done)
+ (cd $(srcdir)/tapset; find . \( -name '*.stp' -o -name '*.stpm' -o -name README \) -print \
+ | while read f; do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/tapset/$$f; done)
+ (cd $(srcdir)/testsuite/systemtap.examples; find . -type f -print \
+--
+2.31.1
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch b/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch
new file mode 100644
index 0000000000..e3d94d9312
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch
@@ -0,0 +1,40 @@
+From d42139cf9cd26d0c0363fcfe007716baeb8de517 Mon Sep 17 00:00:00 2001
+From: Sergei Trofimovich <slyich@gmail.com>
+Date: Fri, 22 Dec 2023 19:42:38 +0000
+Subject: [PATCH] bpf-translate.cxx: fix build against upcoming `gcc-14`
+ (`-Werror=calloc-transposed-args`)
+
+`gcc-14` added a new `-Wcalloc-transposed-args` warning recently. It
+detected minor infelicity in `calloc()` API usage in `systemtap`:
+
+ bpf-translate.cxx: In function 'bpf::BPF_Section* bpf::output_probe(BPF_Output&, program&, const std::string&, unsigned int)':
+ bpf-translate.cxx:5044:39: error: 'void* calloc(size_t, size_t)' sizes specified with 'sizeof' in the earlier argument and not in the later argument [-Werror=calloc-transposed-args]
+ 5044 | bpf_insn *buf = (bpf_insn*) calloc (sizeof(bpf_insn), ninsns);
+ | ^~~~~~~~~~~~~~~~
+ bpf-translate.cxx:5044:39: note: earlier argument should specify number of elements, later size of each element
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=systemtap.git;a=commit;h=d42139cf9cd26d0c0363fcfe007716baeb8de517]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ bpf-translate.cxx | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/bpf-translate.cxx b/bpf-translate.cxx
+index 1a9302463..aa8ef65ce 100644
+--- a/bpf-translate.cxx
++++ b/bpf-translate.cxx
+@@ -5041,9 +5041,9 @@ output_probe(BPF_Output &eo, program &prog,
+ }
+ }
+
+- bpf_insn *buf = (bpf_insn*) calloc (sizeof(bpf_insn), ninsns);
++ bpf_insn *buf = (bpf_insn*) calloc (ninsns, sizeof(bpf_insn));
+ assert (buf);
+- Elf64_Rel *rel = (Elf64_Rel*) calloc (sizeof(Elf64_Rel), nreloc);
++ Elf64_Rel *rel = (Elf64_Rel*) calloc (nreloc, sizeof(Elf64_Rel));
+ assert (rel);
+
+ unsigned i = 0, r = 0;
+--
+2.43.0
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch b/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch
new file mode 100644
index 0000000000..98641826f6
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch
@@ -0,0 +1,51 @@
+From 3913ad3e28a19811e1b52338112344a487057e4f Mon Sep 17 00:00:00 2001
+From: Victor Kamensky <victor.kamensky7@gmail.com>
+Date: Mon, 18 Dec 2023 03:13:38 +0000
+Subject: [PATCH 1/2] configure.ac: fix broken libdebuginfod library auto
+ detection
+
+After 2e67b053e3796ee7cf29a39f9698729b52078406 "configury: rework debuginfod searches"
+commit, libdebuginfod.so library auto detection is broken. It was reported by Martin Jansa
+on openembedded-core mailing list [1].
+
+Currently configure.ac does "AC_DEFINE([HAVE_LIBDEBUGINFOD], [1] ..." as long as
+no --without-debuginfod option is passed, regardless PKG_CHECK_MODULES check result.
+It seems to be bad copy/paste. Address the issue by moving the AC_DEFINE back to
+PKG_CHECK_MODULES action-if-found block.
+
+To reproduce the issue on FC system, one can do the following
+"sudo dnf remove elfutils-debuginfod-client-devel" and then try to build SystemTap
+util.cxx will fail to compile because of missing elfutils/debuginfod.h because
+config.h will have "#define HAVE_LIBDEBUGINFOD 1", while config.log and configure
+output indicates that check for libdebuginfod library failed.
+
+[1] https://lists.openembedded.org/g/openembedded-core/message/192109?p=%2C%2C%2C20%2C0%2C0%2C0%3A%3Acreated%2C0%2Csystemtap%2C20%2C2%2C0%2C102987514
+
+Upstream-Status: Submitted [https://sourceware.org/pipermail/systemtap/2023q4/027914.html]
+Signed-off-by: Victor Kamensky <victor.kamensky7@gmail.com>
+---
+ configure.ac | 5 ++---
+ 1 file changed, 2 insertions(+), 3 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index d9559c5c3..18cd7f84a 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -219,12 +219,11 @@ dnl take the user at his or her word
+ elif test "x$with_debuginfod" != xno; then
+ dnl check in the system pkgconfig
+ PKG_CHECK_MODULES([debuginfod], [libdebuginfod >= 0.179],
+- [have_debuginfod=1],
++ [have_debuginfod=1
++ AC_DEFINE([HAVE_LIBDEBUGINFOD], [1], [Define to 1 if debuginfod is enabled.])],
+ [if test "x$with_debuginfod" = xyes; then
+ AC_MSG_ERROR(["--with-debuginfod was given, but libdebuginfod is missing or unusable."])
+ fi])
+- AC_DEFINE([HAVE_LIBDEBUGINFOD], [1], [Define to 1 if debuginfod is enabled.])
+- AC_MSG_RESULT([yes])
+ else
+ AC_MSG_RESULT([no])
+ fi
+--
+2.31.1
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch b/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch
new file mode 100644
index 0000000000..afdc10a3fe
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch
@@ -0,0 +1,23 @@
+From cfc2c1d53924face11e3fab78ded61c359778eb9 Mon Sep 17 00:00:00 2001
+From: "Frank Ch. Eigler" <fche@redhat.com>
+Date: Sat, 4 Nov 2023 12:19:59 -0400
+Subject: [PATCH] prerelease datestamp fixes
+
+Upstream-Status: Backport
+---
+ NEWS | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/NEWS b/NEWS
+index c7f31e8b7..f0f9056c4 100644
+--- a/NEWS
++++ b/NEWS
+@@ -1,4 +1,4 @@
+-* What's new in version 5.0, 2023-11-03
++* What's new in version 5.0, 2023-11-04
+
+ - Performance improvements in uprobe registration and module startup.
+
+--
+2.31.1
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch b/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch
new file mode 100644
index 0000000000..22578fb3f6
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch
@@ -0,0 +1,36 @@
+From 52596f023652114642faba5726c99488529029ce Mon Sep 17 00:00:00 2001
+From: Sergei Trofimovich <slyich@gmail.com>
+Date: Thu, 21 Dec 2023 10:00:06 +0000
+Subject: [PATCH] staprun: fix build against upcoming `gcc-14`
+ (`-Werror=calloc-transposed-args`)
+
+`gcc-14` added a new `-Wcalloc-transposed-args` warning recently. It
+detected minor infelicity in `calloc()` API usage in `systemtap`:
+
+ staprun.c: In function 'main':
+ staprun.c:550:50: error: 'calloc' sizes specified with 'sizeof' in the earlier argument and not in the later argument [-Werror=calloc-transposed-args]
+ 550 | char ** new_argv = calloc(sizeof(char *),argc+2);
+ | ^~~~
+
+Upstream-Status: Backport [https://sourceware.org/git/?p=systemtap.git;a=commit;h=52596f023652114642faba5726c99488529029ce]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ staprun/staprun.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/staprun/staprun.c b/staprun/staprun.c
+index 8437f3af6..d1b0b221b 100644
+--- a/staprun/staprun.c
++++ b/staprun/staprun.c
+@@ -547,7 +547,7 @@ int main(int argc, char **argv)
+ us to extend argv[], with all the C fun that entails. */
+ #ifdef HAVE_OPENAT
+ if (relay_basedir_fd >= 0) {
+- char ** new_argv = calloc(sizeof(char *),argc+2);
++ char ** new_argv = calloc(argc+2, sizeof(char *));
+ const int new_Foption_size = 10; /* -FNNNNN */
+ char * new_Foption = malloc(new_Foption_size);
+ int i;
+--
+2.43.0
+
diff --git a/meta/recipes-kernel/systemtap/systemtap_git.bb b/meta/recipes-kernel/systemtap/systemtap_git.bb
index 072fcb310a..68f5c76428 100644
--- a/meta/recipes-kernel/systemtap/systemtap_git.bb
+++ b/meta/recipes-kernel/systemtap/systemtap_git.bb
@@ -24,15 +24,19 @@ STAP_DOCS ?= "--disable-docs --disable-publican --disable-refdocs"
EXTRA_OECONF += "${STAP_DOCS} "
-PACKAGECONFIG ??= "translator sqlite monitor python3-probes"
+PACKAGECONFIG ??= "translator sqlite monitor python3-probes ${@bb.utils.filter('DISTRO_FEATURES', 'debuginfod', d)}"
PACKAGECONFIG[translator] = "--enable-translator,--disable-translator,boost,bash"
PACKAGECONFIG[libvirt] = "--enable-libvirt,--disable-libvirt,libvirt"
PACKAGECONFIG[sqlite] = "--enable-sqlite,--disable-sqlite,sqlite3"
PACKAGECONFIG[monitor] = "--enable-monitor,--disable-monitor,ncurses json-c"
PACKAGECONFIG[python3-probes] = "--with-python3-probes,--without-python3-probes,python3-setuptools-native"
+PACKAGECONFIG[debuginfod] = "--with-debuginfod, --without-debuginfod"
inherit autotools gettext pkgconfig systemd
-inherit ${@bb.utils.contains('PACKAGECONFIG', 'python3-probes', 'setuptools3-base', '', d)}
+inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python3-probes', 'setuptools3-base', '', d)}
+
+# | ../git/elaborate.cxx:2601:21: error: storing the address of local variable 'sym' in '*s.systemtap_session::symbol_resolver' [-Werror=dangling-pointer=]
+CXXFLAGS += "-Wno-dangling-pointer"
# exporter comes with python3-probes
PACKAGES =+ "${PN}-exporter"
diff --git a/meta/recipes-kernel/systemtap/systemtap_git.inc b/meta/recipes-kernel/systemtap/systemtap_git.inc
index 2b79aa8fca..c574bcb2ba 100644
--- a/meta/recipes-kernel/systemtap/systemtap_git.inc
+++ b/meta/recipes-kernel/systemtap/systemtap_git.inc
@@ -1,12 +1,17 @@
LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-SRCREV = "0c335a75a789ff44b514e567d458881e15cc283d"
-PV = "4.7"
+SRCREV = "e72dc118e563c645d93a1a2d771e8d90e8fec1ae"
+PV = "5.0"
-SRC_URI = "git://sourceware.org/git/systemtap.git;branch=master \
+SRC_URI = "git://sourceware.org/git/systemtap.git;branch=master;protocol=https \
file://0001-Do-not-let-configure-write-a-python-location-into-th.patch \
file://0001-Install-python-modules-to-correct-library-dir.patch \
file://0001-staprun-stapbpf-don-t-support-installing-a-non-root.patch \
+ file://0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch \
+ file://0001-prerelease-datestamp-fixes.patch \
+ file://0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch \
+ file://0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch \
+ file://0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch \
"
COMPATIBLE_HOST = '(x86_64|i.86|powerpc|arm|aarch64|microblazeel|mips|riscv64).*-linux'
diff --git a/meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.04.08.bb b/meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.04.08.bb
deleted file mode 100644
index cd42039680..0000000000
--- a/meta/recipes-kernel/wireless-regdb/wireless-regdb_2022.04.08.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-SUMMARY = "Wireless Central Regulatory Domain Database"
-HOMEPAGE = "https://wireless.wiki.kernel.org/en/developers/regulatory/crda"
-SECTION = "net"
-LICENSE = "ISC"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=07c4f6dea3845b02a18dc00c8c87699c"
-
-SRC_URI = "https://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.xz"
-SRC_URI[sha256sum] = "884ba2e3c1e8b98762b6dc25ff60b5ec75c8d33a39e019b3ed4aa615491460d3"
-
-inherit bin_package allarch
-
-do_install() {
- install -d -m0755 ${D}${nonarch_libdir}/crda
- install -d -m0755 ${D}${sysconfdir}/wireless-regdb/pubkeys
- install -m 0644 regulatory.bin ${D}${nonarch_libdir}/crda/regulatory.bin
- install -m 0644 sforshee.key.pub.pem ${D}${sysconfdir}/wireless-regdb/pubkeys/sforshee.key.pub.pem
-
- install -m 0644 -D regulatory.db ${D}${nonarch_base_libdir}/firmware/regulatory.db
- install -m 0644 regulatory.db.p7s ${D}${nonarch_base_libdir}/firmware/regulatory.db.p7s
-}
-
-# Install static regulatory DB in /lib/firmware for kernel to load.
-# This requires Linux kernel >= v4.15.
-# For kernel <= v4.14, inherit the kernel_wireless_regdb.bbclass
-# (in meta-networking) in kernel's recipe.
-PACKAGES = "${PN}-static ${PN}"
-RCONFLICTS:${PN} = "${PN}-static"
-
-FILES:${PN}-static = " \
- ${nonarch_base_libdir}/firmware/regulatory.db \
- ${nonarch_base_libdir}/firmware/regulatory.db.p7s \
-"
-
-# Native users might want to use the source of regulatory DB.
-# This is for example used by Linux kernel <= v4.14 and
-# kernel_wireless_regdb.bbclass in meta-networking.
-do_install:append:class-native() {
- install -m 0644 -D db.txt ${D}${libdir}/crda/db.txt
-}
-
-RSUGGESTS:${PN} = "crda"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb b/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb
new file mode 100644
index 0000000000..8fde236ab4
--- /dev/null
+++ b/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb
@@ -0,0 +1,43 @@
+SUMMARY = "Wireless Central Regulatory Domain Database"
+HOMEPAGE = "https://wireless.wiki.kernel.org/en/developers/regulatory/crda"
+SECTION = "net"
+LICENSE = "ISC"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=07c4f6dea3845b02a18dc00c8c87699c"
+
+SRC_URI = "https://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.xz"
+SRC_URI[sha256sum] = "c8a61c9acf76fa7eb4239e89f640dee3e87098d9f69b4d3518c9c60fc6d20c55"
+
+inherit bin_package allarch
+
+do_install() {
+ install -d -m0755 ${D}${nonarch_libdir}/crda
+ install -d -m0755 ${D}${sysconfdir}/wireless-regdb/pubkeys
+ install -m 0644 regulatory.bin ${D}${nonarch_libdir}/crda/regulatory.bin
+ install -m 0644 wens.key.pub.pem ${D}${sysconfdir}/wireless-regdb/pubkeys/wens.key.pub.pem
+
+ install -m 0644 -D regulatory.db ${D}${nonarch_base_libdir}/firmware/regulatory.db
+ install -m 0644 regulatory.db.p7s ${D}${nonarch_base_libdir}/firmware/regulatory.db.p7s
+}
+
+# Install static regulatory DB in /lib/firmware for kernel to load.
+# This requires Linux kernel >= v4.15.
+# For kernel <= v4.14, inherit the kernel_wireless_regdb.bbclass
+# (in meta-networking) in kernel's recipe.
+PACKAGES = "${PN}-static ${PN}"
+RCONFLICTS:${PN} = "${PN}-static"
+
+FILES:${PN}-static = " \
+ ${nonarch_base_libdir}/firmware/regulatory.db \
+ ${nonarch_base_libdir}/firmware/regulatory.db.p7s \
+"
+
+# Native users might want to use the source of regulatory DB.
+# This is for example used by Linux kernel <= v4.14 and
+# kernel_wireless_regdb.bbclass in meta-networking.
+do_install:append:class-native() {
+ install -m 0644 -D db.txt ${D}${libdir}/crda/db.txt
+}
+
+RSUGGESTS:${PN} = "crda"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-multimedia/alsa/alsa-lib/0001-topology-correct-version-script-path.patch b/meta/recipes-multimedia/alsa/alsa-lib/0001-topology-correct-version-script-path.patch
new file mode 100644
index 0000000000..30aeef5817
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-lib/0001-topology-correct-version-script-path.patch
@@ -0,0 +1,38 @@
+From 2a736a0d2543f206fd2653aaae8a08a4c42eb917 Mon Sep 17 00:00:00 2001
+From: Michael Opdenacker <michael.opdenacker@bootlin.com>
+Date: Tue, 30 Jan 2024 14:24:25 +0100
+Subject: [PATCH] topology: correct version script path
+
+From: Jan Palus <jpalus@fastmail.com>
+
+contrary to libasound, version script for libatopology is a regular
+source file. while it's often the case that $(builddir) and $(srcdir)
+point to the same directory, they don't always have to. therefore path
+needs to point explicitly to $(srcdir) for Versions script in topology
+
+Closes: https://github.com/alsa-project/alsa-lib/pull/383
+Fixes: GH-382
+Fixes: dc7da761f3a2 ("topology: separate Versions linker script")
+Signed-off-by: Jan Palus <jpalus@fastmail.com>
+Signed-off-by: Jaroslav Kysela <perex@perex.cz>
+Upstream-Status: Backport [https://git.alsa-project.org/?p=alsa-lib.git;a=commitdiff;h=2a736a0d2543f206fd2653aaae8a08a4c42eb917]
+---
+ src/topology/Makefile.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/topology/Makefile.am b/src/topology/Makefile.am
+index 04299588..e0b78373 100644
+--- a/src/topology/Makefile.am
++++ b/src/topology/Makefile.am
+@@ -2,7 +2,7 @@ EXTRA_DIST = Versions
+ COMPATNUM=@LIBTOOL_VERSION_INFO@
+
+ if VERSIONED_SYMBOLS
+-VSYMS = -Wl,--version-script=Versions
++VSYMS = -Wl,--version-script=$(srcdir)/Versions
+ else
+ VSYMS =
+ endif
+--
+2.34.1
+
diff --git a/meta/recipes-multimedia/alsa/alsa-lib_1.2.11.bb b/meta/recipes-multimedia/alsa/alsa-lib_1.2.11.bb
new file mode 100644
index 0000000000..c212b17aa3
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-lib_1.2.11.bb
@@ -0,0 +1,46 @@
+SUMMARY = "ALSA sound library"
+DESCRIPTION = "(Occasionally a.k.a. libasound) is a userspace library that \
+provides a level of abstraction over the /dev interfaces provided by the kernel modules."
+HOMEPAGE = "http://www.alsa-project.org"
+BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
+SECTION = "libs/multimedia"
+LICENSE = "LGPL-2.1-only & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7 \
+ file://src/socket.c;md5=285675b45e83f571c6a957fe4ab79c93;beginline=9;endline=24 \
+ "
+
+SRC_URI = "https://www.alsa-project.org/files/pub/lib/${BP}.tar.bz2 \
+ file://0001-topology-correct-version-script-path.patch \
+ "
+SRC_URI[sha256sum] = "9f3f2f69b995f9ad37359072fbc69a3a88bfba081fc83e9be30e14662795bb4d"
+
+inherit autotools pkgconfig
+
+EXTRA_OECONF += " \
+ ${@bb.utils.contains('TARGET_FPU', 'soft', '--with-softfloat', '', d)} \
+ --disable-python --disable-old-symbols \
+"
+
+PACKAGES =+ "alsa-server alsa-conf libatopology"
+
+FILES:alsa-server = "${bindir}/*"
+FILES:alsa-conf = "${datadir}/alsa/"
+FILES:libatopology = "${libdir}/libatopology.so.*"
+
+RDEPENDS:${PN}:class-target = "alsa-conf alsa-ucm-conf"
+RDEPENDS:libatopology:class-target = "alsa-topology-conf"
+
+# upgrade path
+RPROVIDES:${PN} = "libasound"
+RREPLACES:${PN} = "libasound"
+RCONFLICTS:${PN} = "libasound"
+
+RPROVIDES:${PN}-dev = "alsa-dev"
+RREPLACES:${PN}-dev = "alsa-dev"
+RCONFLICTS:${PN}-dev = "alsa-dev"
+
+RPROVIDES:alsa-conf = "alsa-conf-base"
+RREPLACES:alsa-conf = "alsa-conf-base"
+RCONFLICTS:alsa-conf = "alsa-conf-base"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/alsa/alsa-lib_1.2.6.1.bb b/meta/recipes-multimedia/alsa/alsa-lib_1.2.6.1.bb
deleted file mode 100644
index ca6bedae97..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-lib_1.2.6.1.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "ALSA sound library"
-DESCRIPTION = "(Occasionally a.k.a. libasound) is a userspace library that \
-provides a level of abstraction over the /dev interfaces provided by the kernel modules."
-HOMEPAGE = "http://www.alsa-project.org"
-BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
-SECTION = "libs/multimedia"
-LICENSE = "LGPL-2.1-only & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7 \
- file://src/socket.c;md5=285675b45e83f571c6a957fe4ab79c93;beginline=9;endline=24 \
- "
-
-SRC_URI = "https://www.alsa-project.org/files/pub/lib/${BP}.tar.bz2"
-SRC_URI[sha256sum] = "ad582993d52cdb5fb159a0beab60a6ac57eab0cc1bdf85dc4db6d6197f02333f"
-
-inherit autotools pkgconfig
-
-EXTRA_OECONF += " \
- ${@bb.utils.contains('TARGET_FPU', 'soft', '--with-softfloat', '', d)} \
- --disable-python \
-"
-
-PACKAGES =+ "alsa-server alsa-conf libatopology"
-
-FILES:alsa-server = "${bindir}/*"
-FILES:alsa-conf = "${datadir}/alsa/"
-FILES:libatopology = "${libdir}/libatopology.so.*"
-
-RDEPENDS:${PN}:class-target = "alsa-conf alsa-ucm-conf"
-RDEPENDS:libatopology:class-target = "alsa-topology-conf"
-
-# upgrade path
-RPROVIDES:${PN} = "libasound"
-RREPLACES:${PN} = "libasound"
-RCONFLICTS:${PN} = "libasound"
-
-RPROVIDES:${PN}-dev = "alsa-dev"
-RREPLACES:${PN}-dev = "alsa-dev"
-RCONFLICTS:${PN}-dev = "alsa-dev"
-
-RPROVIDES:alsa-conf = "alsa-conf-base"
-RREPLACES:alsa-conf = "alsa-conf-base"
-RCONFLICTS:alsa-conf = "alsa-conf-base"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/alsa/alsa-plugins/0001-arcam_av.c-Include-missing-string.h.patch b/meta/recipes-multimedia/alsa/alsa-plugins/0001-arcam_av.c-Include-missing-string.h.patch
new file mode 100644
index 0000000000..ff7745d637
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-plugins/0001-arcam_av.c-Include-missing-string.h.patch
@@ -0,0 +1,25 @@
+From b01b176a665ba65979d74922955f51dc4888a713 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 23 Aug 2022 15:21:16 -0700
+Subject: [PATCH] arcam_av.c: Include missing string.h
+
+bzero() function needs this header to be included
+
+Upstream-Status: Submitted [https://github.com/alsa-project/alsa-plugins/pull/47]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ arcam-av/arcam_av.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/arcam-av/arcam_av.c b/arcam-av/arcam_av.c
+index 63f9b4e..29fc537 100644
+--- a/arcam-av/arcam_av.c
++++ b/arcam-av/arcam_av.c
+@@ -27,6 +27,7 @@
+ #include <signal.h>
+ #include <stddef.h>
+ #include <stdio.h>
++#include <string.h>
+ #include <termios.h>
+ #include <unistd.h>
+
diff --git a/meta/recipes-multimedia/alsa/alsa-plugins_1.2.6.bb b/meta/recipes-multimedia/alsa/alsa-plugins_1.2.6.bb
deleted file mode 100644
index c435f61d4c..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-plugins_1.2.6.bb
+++ /dev/null
@@ -1,174 +0,0 @@
-SUMMARY = "ALSA Plugins"
-DESCRIPTION = "Used to create virtual devices that can be used like normal \
-hardware devices but cause extra processing of the sound stream to take place. \
-They are used while configuring ALSA in the .asoundrc file."
-HOMEPAGE = "http://alsa-project.org"
-BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
-SECTION = "multimedia"
-
-# The primary license of alsa-plugins is LGPL-2.1-only.
-#
-# m4/attributes.m4 is licensed under GPL-2.0-or-later. m4/attributes.m4 is part
-# of the build system, and doesn't affect the licensing of the build result.
-#
-# The samplerate plugin source code is licensed under GPL-2.0-or-later to be
-# consistent with the libsamplerate license. However, if the licensee has a
-# commercial license for libsamplerate, the samplerate plugin may be used under
-# the terms of LGPL-2.1-only like the rest of the plugins.
-LICENSE = "LGPL-2.1-only & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7 \
- file://COPYING.GPL;md5=59530bdf33659b29e73d4adb9f9f6552 \
- file://m4/attributes.m4;endline=33;md5=bb8c6b2a67ac15156961e242fec33e50 \
- file://rate/rate_samplerate.c;endline=35;md5=fd77bce85f4a338c0e8ab18430b69fae \
- "
-
-SRC_URI = "https://www.alsa-project.org/files/pub/plugins/${BP}.tar.bz2"
-SRC_URI[sha256sum] = "068818a4b55d8c029daa00015d853d45113f56b224b7c64e1e117988c825b2a0"
-
-DEPENDS += "alsa-lib"
-
-inherit autotools pkgconfig
-
-PACKAGECONFIG ??= "\
- samplerate \
- speexdsp \
- ${@bb.utils.filter('DISTRO_FEATURES', 'pulseaudio', d)} \
-"
-PACKAGECONFIG[aaf] = "--enable-aaf,--disable-aaf,avtp"
-PACKAGECONFIG[jack] = "--enable-jack,--disable-jack,jack"
-PACKAGECONFIG[libav] = "--enable-libav,--disable-libav,libav"
-PACKAGECONFIG[maemo-plugin] = "--enable-maemo-plugin,--disable-maemo-plugin"
-PACKAGECONFIG[maemo-resource-manager] = "--enable-maemo-resource-manager,--disable-maemo-resource-manager,dbus"
-PACKAGECONFIG[pulseaudio] = "--enable-pulseaudio,--disable-pulseaudio,pulseaudio"
-PACKAGECONFIG[samplerate] = "--enable-samplerate,--disable-samplerate,libsamplerate0"
-PACKAGECONFIG[speexdsp] = "--with-speex=lib,--with-speex=no,speexdsp"
-
-PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'alsa-plugins-pulseaudio-conf', '', d)}"
-
-PACKAGES_DYNAMIC = "^libasound-module-.*"
-
-# The alsa-plugins package doesn't itself contain anything, it just depends on
-# all built plugins.
-FILES:${PN} = ""
-ALLOW_EMPTY:${PN} = "1"
-
-do_install:append() {
- rm -f ${D}${libdir}/alsa-lib/*.la
-
- if [ "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'yes', 'no', d)}" = "yes" ]; then
- # We use the example as is. Upstream installs the file under
- # /etc, but we move it under /usr/share and add a symlink under
- # /etc to be consistent with other installed configuration
- # files.
- mv ${D}${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf.example ${D}${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf
- ln -s ${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf ${D}${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf
- fi
-}
-
-python populate_packages:prepend() {
- plugindir = d.expand('${libdir}/alsa-lib/')
- packages = " ".join(do_split_packages(d, plugindir, r'^libasound_module_(.*)\.so$', 'libasound-module-%s', 'Alsa plugin for %s', extra_depends=''))
- d.setVar("RDEPENDS:alsa-plugins", packages)
-}
-
-# Many plugins have a configuration file (plus a symlink in /etc) associated
-# with them. We put the plugin and it's configuration usually in the same
-# package, but that's problematic when the configuration file is related to
-# multiple plugins, as is the case with the pulse, oss and maemo plugins. In
-# case of the pulse plugins, we have a separate alsa-plugins-pulseaudio-conf
-# package that depends on all the pulse plugins, which ensures that all plugins
-# that the configuration references are installed. The oss and maemo
-# configuration files, on the other hand, are in the respective pcm plugin
-# packages. Therefore it's possible to install the configuration file without
-# the ctl plugin that the configuration file references. This is unlikely to
-# cause big problems, but some kind of improvement to the packaging could
-# probably be done here (at least it would be good to handle the different
-# plugins in a consistent way).
-FILES:${MLPREFIX}libasound-module-ctl-arcam-av += "\
- ${datadir}/alsa/alsa.conf.d/50-arcam-av-ctl.conf \
- ${sysconfdir}/alsa/conf.d/50-arcam-av-ctl.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-a52 += "\
- ${datadir}/alsa/alsa.conf.d/60-a52-encoder.conf \
- ${sysconfdir}/alsa/conf.d/60-a52-encoder.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-alsa-dsp += "\
- ${datadir}/alsa/alsa.conf.d/98-maemo.conf \
- ${sysconfdir}/alsa/conf.d/98-maemo.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-jack += "\
- ${datadir}/alsa/alsa.conf.d/50-jack.conf \
- ${sysconfdir}/alsa/conf.d/50-jack.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-oss += "\
- ${datadir}/alsa/alsa.conf.d/50-oss.conf \
- ${sysconfdir}/alsa/conf.d/50-oss.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-speex += "\
- ${datadir}/alsa/alsa.conf.d/60-speex.conf \
- ${sysconfdir}/alsa/conf.d/60-speex.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-upmix += "\
- ${datadir}/alsa/alsa.conf.d/60-upmix.conf \
- ${sysconfdir}/alsa/conf.d/60-upmix.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-usb-stream += "\
- ${datadir}/alsa/alsa.conf.d/98-usb-stream.conf \
- ${sysconfdir}/alsa/conf.d/98-usb-stream.conf \
-"
-FILES:${MLPREFIX}libasound-module-pcm-vdownmix += "\
- ${datadir}/alsa/alsa.conf.d/60-vdownmix.conf \
- ${sysconfdir}/alsa/conf.d/60-vdownmix.conf \
-"
-FILES:${MLPREFIX}libasound-module-rate-lavrate += "\
- ${datadir}/alsa/alsa.conf.d/10-rate-lav.conf \
- ${sysconfdir}/alsa/conf.d/10-rate-lav.conf \
-"
-FILES:${MLPREFIX}libasound-module-rate-samplerate += "\
- ${datadir}/alsa/alsa.conf.d/10-samplerate.conf \
- ${sysconfdir}/alsa/conf.d/10-samplerate.conf \
-"
-FILES:${MLPREFIX}libasound-module-rate-speexrate += "\
- ${datadir}/alsa/alsa.conf.d/10-speexrate.conf \
- ${sysconfdir}/alsa/conf.d/10-speexrate.conf \
-"
-
-# The rate plugins create some symlinks. For example, the samplerate plugin
-# creates these links to the main plugin file:
-#
-# libasound_module_rate_samplerate_best.so
-# libasound_module_rate_samplerate_linear.so
-# libasound_module_rate_samplerate_medium.so
-# libasound_module_rate_samplerate_order.so
-#
-# The other rate plugins create similar links. We have to add the links to
-# FILES manually, because do_split_packages() skips the links (which is good,
-# because we wouldn't want do_split_packages() to create separate packages for
-# the symlinks).
-#
-# The symlinks cause QA errors, because usually it's a bug if a non
-# -dev/-dbg/-nativesdk package contains links to .so files, but in this case
-# the errors are false positives, so we disable the QA checks.
-FILES:${MLPREFIX}libasound-module-rate-lavrate += "${libdir}/alsa-lib/*rate_lavrate_*.so"
-FILES:${MLPREFIX}libasound-module-rate-samplerate += "${libdir}/alsa-lib/*rate_samplerate_*.so"
-FILES:${MLPREFIX}libasound-module-rate-speexrate += "${libdir}/alsa-lib/*rate_speexrate_*.so"
-INSANE_SKIP:${MLPREFIX}libasound-module-rate-lavrate = "dev-so"
-INSANE_SKIP:${MLPREFIX}libasound-module-rate-samplerate = "dev-so"
-INSANE_SKIP:${MLPREFIX}libasound-module-rate-speexrate = "dev-so"
-
-# 50-pulseaudio.conf defines a device named "pulse" that applications can use
-# if they explicitly want to use the PulseAudio plugin.
-# 99-pulseaudio-default.conf configures the "default" device to use the
-# PulseAudio plugin.
-FILES:${PN}-pulseaudio-conf += "\
- ${datadir}/alsa/alsa.conf.d/50-pulseaudio.conf \
- ${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf \
- ${sysconfdir}/alsa/conf.d/50-pulseaudio.conf \
- ${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf \
-"
-
-RDEPENDS:${PN}-pulseaudio-conf += "\
- ${MLPREFIX}libasound-module-conf-pulse \
- ${MLPREFIX}libasound-module-ctl-pulse \
- ${MLPREFIX}libasound-module-pcm-pulse \
-"
diff --git a/meta/recipes-multimedia/alsa/alsa-plugins_1.2.7.1.bb b/meta/recipes-multimedia/alsa/alsa-plugins_1.2.7.1.bb
new file mode 100644
index 0000000000..9500462d5c
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-plugins_1.2.7.1.bb
@@ -0,0 +1,176 @@
+SUMMARY = "ALSA Plugins"
+DESCRIPTION = "Used to create virtual devices that can be used like normal \
+hardware devices but cause extra processing of the sound stream to take place. \
+They are used while configuring ALSA in the .asoundrc file."
+HOMEPAGE = "http://alsa-project.org"
+BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
+SECTION = "multimedia"
+
+# The primary license of alsa-plugins is LGPL-2.1-only.
+#
+# m4/attributes.m4 is licensed under GPL-2.0-or-later. m4/attributes.m4 is part
+# of the build system, and doesn't affect the licensing of the build result.
+#
+# The samplerate plugin source code is licensed under GPL-2.0-or-later to be
+# consistent with the libsamplerate license. However, if the licensee has a
+# commercial license for libsamplerate, the samplerate plugin may be used under
+# the terms of LGPL-2.1-only like the rest of the plugins.
+LICENSE = "LGPL-2.1-only & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7 \
+ file://COPYING.GPL;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://m4/attributes.m4;endline=33;md5=bb8c6b2a67ac15156961e242fec33e50 \
+ file://rate/rate_samplerate.c;endline=35;md5=fd77bce85f4a338c0e8ab18430b69fae \
+ "
+
+SRC_URI = "https://www.alsa-project.org/files/pub/plugins/${BP}.tar.bz2 \
+ file://0001-arcam_av.c-Include-missing-string.h.patch \
+ "
+SRC_URI[sha256sum] = "8c337814954bb7c167456733a6046142a2931f12eccba3ec2a4ae618a3432511"
+
+DEPENDS += "alsa-lib"
+
+inherit autotools pkgconfig
+
+PACKAGECONFIG ??= "\
+ samplerate \
+ speexdsp \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'pulseaudio', d)} \
+"
+PACKAGECONFIG[aaf] = "--enable-aaf,--disable-aaf,libavtp"
+PACKAGECONFIG[jack] = "--enable-jack,--disable-jack,jack"
+PACKAGECONFIG[libav] = "--enable-libav,--disable-libav,libav"
+PACKAGECONFIG[maemo-plugin] = "--enable-maemo-plugin,--disable-maemo-plugin"
+PACKAGECONFIG[maemo-resource-manager] = "--enable-maemo-resource-manager,--disable-maemo-resource-manager,dbus"
+PACKAGECONFIG[pulseaudio] = "--enable-pulseaudio,--disable-pulseaudio,pulseaudio"
+PACKAGECONFIG[samplerate] = "--enable-samplerate,--disable-samplerate,libsamplerate0"
+PACKAGECONFIG[speexdsp] = "--with-speex=lib,--with-speex=no,speexdsp"
+
+PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'alsa-plugins-pulseaudio-conf', '', d)}"
+
+PACKAGES_DYNAMIC = "^libasound-module-.*"
+
+# The alsa-plugins package doesn't itself contain anything, it just depends on
+# all built plugins.
+FILES:${PN} = ""
+ALLOW_EMPTY:${PN} = "1"
+
+do_install:append() {
+ rm -f ${D}${libdir}/alsa-lib/*.la
+
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'yes', 'no', d)}" = "yes" ]; then
+ # We use the example as is. Upstream installs the file under
+ # /etc, but we move it under /usr/share and add a symlink under
+ # /etc to be consistent with other installed configuration
+ # files.
+ mv ${D}${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf.example ${D}${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf
+ ln -s ${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf ${D}${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf
+ fi
+}
+
+python populate_packages:prepend() {
+ plugindir = d.expand('${libdir}/alsa-lib/')
+ packages = " ".join(do_split_packages(d, plugindir, r'^libasound_module_(.*)\.so$', 'libasound-module-%s', 'Alsa plugin for %s', extra_depends=''))
+ d.setVar("RDEPENDS:alsa-plugins", packages)
+}
+
+# Many plugins have a configuration file (plus a symlink in /etc) associated
+# with them. We put the plugin and it's configuration usually in the same
+# package, but that's problematic when the configuration file is related to
+# multiple plugins, as is the case with the pulse, oss and maemo plugins. In
+# case of the pulse plugins, we have a separate alsa-plugins-pulseaudio-conf
+# package that depends on all the pulse plugins, which ensures that all plugins
+# that the configuration references are installed. The oss and maemo
+# configuration files, on the other hand, are in the respective pcm plugin
+# packages. Therefore it's possible to install the configuration file without
+# the ctl plugin that the configuration file references. This is unlikely to
+# cause big problems, but some kind of improvement to the packaging could
+# probably be done here (at least it would be good to handle the different
+# plugins in a consistent way).
+FILES:${MLPREFIX}libasound-module-ctl-arcam-av += "\
+ ${datadir}/alsa/alsa.conf.d/50-arcam-av-ctl.conf \
+ ${sysconfdir}/alsa/conf.d/50-arcam-av-ctl.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-a52 += "\
+ ${datadir}/alsa/alsa.conf.d/60-a52-encoder.conf \
+ ${sysconfdir}/alsa/conf.d/60-a52-encoder.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-alsa-dsp += "\
+ ${datadir}/alsa/alsa.conf.d/98-maemo.conf \
+ ${sysconfdir}/alsa/conf.d/98-maemo.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-jack += "\
+ ${datadir}/alsa/alsa.conf.d/50-jack.conf \
+ ${sysconfdir}/alsa/conf.d/50-jack.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-oss += "\
+ ${datadir}/alsa/alsa.conf.d/50-oss.conf \
+ ${sysconfdir}/alsa/conf.d/50-oss.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-speex += "\
+ ${datadir}/alsa/alsa.conf.d/60-speex.conf \
+ ${sysconfdir}/alsa/conf.d/60-speex.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-upmix += "\
+ ${datadir}/alsa/alsa.conf.d/60-upmix.conf \
+ ${sysconfdir}/alsa/conf.d/60-upmix.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-usb-stream += "\
+ ${datadir}/alsa/alsa.conf.d/98-usb-stream.conf \
+ ${sysconfdir}/alsa/conf.d/98-usb-stream.conf \
+"
+FILES:${MLPREFIX}libasound-module-pcm-vdownmix += "\
+ ${datadir}/alsa/alsa.conf.d/60-vdownmix.conf \
+ ${sysconfdir}/alsa/conf.d/60-vdownmix.conf \
+"
+FILES:${MLPREFIX}libasound-module-rate-lavrate += "\
+ ${datadir}/alsa/alsa.conf.d/10-rate-lav.conf \
+ ${sysconfdir}/alsa/conf.d/10-rate-lav.conf \
+"
+FILES:${MLPREFIX}libasound-module-rate-samplerate += "\
+ ${datadir}/alsa/alsa.conf.d/10-samplerate.conf \
+ ${sysconfdir}/alsa/conf.d/10-samplerate.conf \
+"
+FILES:${MLPREFIX}libasound-module-rate-speexrate += "\
+ ${datadir}/alsa/alsa.conf.d/10-speexrate.conf \
+ ${sysconfdir}/alsa/conf.d/10-speexrate.conf \
+"
+
+# The rate plugins create some symlinks. For example, the samplerate plugin
+# creates these links to the main plugin file:
+#
+# libasound_module_rate_samplerate_best.so
+# libasound_module_rate_samplerate_linear.so
+# libasound_module_rate_samplerate_medium.so
+# libasound_module_rate_samplerate_order.so
+#
+# The other rate plugins create similar links. We have to add the links to
+# FILES manually, because do_split_packages() skips the links (which is good,
+# because we wouldn't want do_split_packages() to create separate packages for
+# the symlinks).
+#
+# The symlinks cause QA errors, because usually it's a bug if a non
+# -dev/-dbg/-nativesdk package contains links to .so files, but in this case
+# the errors are false positives, so we disable the QA checks.
+FILES:${MLPREFIX}libasound-module-rate-lavrate += "${libdir}/alsa-lib/*rate_lavrate_*.so"
+FILES:${MLPREFIX}libasound-module-rate-samplerate += "${libdir}/alsa-lib/*rate_samplerate_*.so"
+FILES:${MLPREFIX}libasound-module-rate-speexrate += "${libdir}/alsa-lib/*rate_speexrate_*.so"
+INSANE_SKIP:${MLPREFIX}libasound-module-rate-lavrate = "dev-so"
+INSANE_SKIP:${MLPREFIX}libasound-module-rate-samplerate = "dev-so"
+INSANE_SKIP:${MLPREFIX}libasound-module-rate-speexrate = "dev-so"
+
+# 50-pulseaudio.conf defines a device named "pulse" that applications can use
+# if they explicitly want to use the PulseAudio plugin.
+# 99-pulseaudio-default.conf configures the "default" device to use the
+# PulseAudio plugin.
+FILES:${PN}-pulseaudio-conf += "\
+ ${datadir}/alsa/alsa.conf.d/50-pulseaudio.conf \
+ ${datadir}/alsa/alsa.conf.d/99-pulseaudio-default.conf \
+ ${sysconfdir}/alsa/conf.d/50-pulseaudio.conf \
+ ${sysconfdir}/alsa/conf.d/99-pulseaudio-default.conf \
+"
+
+RDEPENDS:${PN}-pulseaudio-conf += "\
+ ${MLPREFIX}libasound-module-conf-pulse \
+ ${MLPREFIX}libasound-module-ctl-pulse \
+ ${MLPREFIX}libasound-module-pcm-pulse \
+"
diff --git a/meta/recipes-multimedia/alsa/alsa-tools_1.2.11.bb b/meta/recipes-multimedia/alsa/alsa-tools_1.2.11.bb
new file mode 100644
index 0000000000..53868041c0
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-tools_1.2.11.bb
@@ -0,0 +1,89 @@
+SUMMARY = "Advanced tools for certain ALSA sound card drivers"
+DESCRIPTION = "Package containing a number of tools ranging from envy24control \
+which provides complete control over all devices with an envy24 chip, to \
+firmware loaders for pcmcia, USB and the hdsp devices."
+HOMEPAGE = "http://www.alsa-project.org"
+BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-only & LGPL-2.0-or-later"
+DEPENDS = "alsa-lib"
+
+LIC_FILES_CHKSUM = "file://hdsploader/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://ld10k1/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7 \
+ "
+
+SRC_URI = "https://www.alsa-project.org/files/pub/tools/${BP}.tar.bz2"
+
+SRC_URI[sha256sum] = "0915c9634a502fd3655ca9c574d259bc9e79983d91d45aeacff6f3c00f8ae3e9"
+
+inherit autotools-brokensep pkgconfig
+# brokensep as as10k1 (and probably more) fail out of tree
+CLEANBROKEN = "1"
+
+# Here we use PACKAGECONFIG options to pick which directories we configure/build.
+# Remember on upgrades to check that no new tools have been added.
+PACKAGECONFIG ??= "as10k1 hdajacksensetest hda-verb hdsploader ld10k1 mixartloader pcxhrloader \
+ sb16_csp seq--sbiload sscape_ctl us428control usx2yloader vxloader \
+ ${@bb.utils.contains_any('DISTRO_FEATURES', '${GTK3DISTROFEATURES}', 'hdajackretask', '', d)} \
+ "
+
+PACKAGECONFIG[as10k1] = ""
+PACKAGECONFIG[echomixer] = ",,gtk+"
+PACKAGECONFIG[envy24control] = ",,gtk+"
+PACKAGECONFIG[hda-verb] = ""
+PACKAGECONFIG[hdajackretask] = ",,gtk+3"
+PACKAGECONFIG[hdajacksensetest] = ",,glib-2.0"
+PACKAGECONFIG[hdspconf] = ",,fltk"
+PACKAGECONFIG[hdsploader] = ""
+PACKAGECONFIG[hdspmixer] = ",,fltk"
+PACKAGECONFIG[ld10k1] = ""
+PACKAGECONFIG[mixartloader] = ""
+PACKAGECONFIG[pcxhrloader] = ""
+PACKAGECONFIG[qlo10k1] = ",,qt-x11-free"
+PACKAGECONFIG[rmedigicontrol] = ",,gtk+"
+PACKAGECONFIG[sb16_csp] = ""
+PACKAGECONFIG[seq--sbiload] = ""
+PACKAGECONFIG[sscape_ctl] = ""
+PACKAGECONFIG[us428control] = ""
+PACKAGECONFIG[usx2yloader] = ""
+PACKAGECONFIG[vxloader] = ""
+
+# At the time of writing pyalsa is not packaged for OE, so this is not expected
+# to work.
+PACKAGECONFIG[hwmixvolume] = ",,,python-core python-pygobject pyalsa"
+
+python do_configure() {
+ for subdir in d.getVar("PACKAGECONFIG").split():
+ subdir = subdir.replace("--", "/")
+ bb.note("Configuring %s" % subdir)
+ dd = d.createCopy()
+ dd.setVar("S", os.path.join(d.getVar("S"), subdir))
+ bb.build.exec_func("autotools_do_configure", dd)
+}
+
+python do_compile() {
+ for subdir in d.getVar("PACKAGECONFIG").split():
+ subdir = subdir.replace("--", "/")
+ bb.note("Compiling %s" % subdir)
+ dd = d.createCopy()
+ dd.setVar("S", os.path.join(d.getVar("S"), subdir))
+ bb.build.exec_func("autotools_do_compile", dd)
+}
+
+python do_install() {
+ d.delVarFlag("autotools_do_install", "cleandirs")
+ for subdir in d.getVar("PACKAGECONFIG").split():
+ subdir = subdir.replace("--", "/")
+ bb.note("Installing %s" % subdir)
+ dd = d.createCopy()
+ dd.setVar("S", os.path.join(d.getVar("S"), subdir))
+ bb.build.exec_func("autotools_do_install", dd)
+
+ # Just remove bash-needing init script that isn't installed as an init script
+ try:
+ os.remove(oe.path.join(d.getVar("D"), d.getVar("sbindir"), "ld10k1d"))
+ except:
+ pass
+}
+
+FILES:${PN} += "${datadir}"
diff --git a/meta/recipes-multimedia/alsa/alsa-tools_1.2.5.bb b/meta/recipes-multimedia/alsa/alsa-tools_1.2.5.bb
deleted file mode 100644
index 56acc84559..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-tools_1.2.5.bb
+++ /dev/null
@@ -1,89 +0,0 @@
-SUMMARY = "Advanced tools for certain ALSA sound card drivers"
-DESCRIPTION = "Package containing a number of tools ranging from envy24control \
-which provides complete control over all devices with an envy24 chip, to \
-firmware loaders for pcmcia, USB and the hdsp devices."
-HOMEPAGE = "http://www.alsa-project.org"
-BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-only & LGPL-2.0-or-later"
-DEPENDS = "alsa-lib"
-
-LIC_FILES_CHKSUM = "file://hdsploader/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
- file://ld10k1/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7 \
- "
-
-SRC_URI = "https://www.alsa-project.org/files/pub/tools/${BP}.tar.bz2"
-
-SRC_URI[sha256sum] = "35a71027a01f4d7de4722e223520e940de68b3c570b6c671691567ae28f9893e"
-
-inherit autotools-brokensep pkgconfig
-# brokensep as as10k1 (and probably more) fail out of tree
-CLEANBROKEN = "1"
-
-# Here we use PACKAGECONFIG options to pick which directories we configure/build.
-# Remember on upgrades to check that no new tools have been added.
-PACKAGECONFIG ??= "as10k1 hdajacksensetest hda-verb hdsploader ld10k1 mixartloader pcxhrloader \
- sb16_csp seq--sbiload sscape_ctl us428control usx2yloader vxloader \
- ${@bb.utils.contains_any('DISTRO_FEATURES', '${GTK3DISTROFEATURES}', 'hdajackretask', '', d)} \
- "
-
-PACKAGECONFIG[as10k1] = ""
-PACKAGECONFIG[echomixer] = ",,gtk+"
-PACKAGECONFIG[envy24control] = ",,gtk+"
-PACKAGECONFIG[hda-verb] = ""
-PACKAGECONFIG[hdajackretask] = ",,gtk+3"
-PACKAGECONFIG[hdajacksensetest] = ",,glib-2.0"
-PACKAGECONFIG[hdspconf] = ",,fltk"
-PACKAGECONFIG[hdsploader] = ""
-PACKAGECONFIG[hdspmixer] = ",,fltk"
-PACKAGECONFIG[ld10k1] = ""
-PACKAGECONFIG[mixartloader] = ""
-PACKAGECONFIG[pcxhrloader] = ""
-PACKAGECONFIG[qlo10k1] = ",,qt-x11-free"
-PACKAGECONFIG[rmedigicontrol] = ",,gtk+"
-PACKAGECONFIG[sb16_csp] = ""
-PACKAGECONFIG[seq--sbiload] = ""
-PACKAGECONFIG[sscape_ctl] = ""
-PACKAGECONFIG[us428control] = ""
-PACKAGECONFIG[usx2yloader] = ""
-PACKAGECONFIG[vxloader] = ""
-
-# At the time of writing pyalsa is not packaged for OE, so this is not expected
-# to work.
-PACKAGECONFIG[hwmixvolume] = ",,,python-core python-pygobject pyalsa"
-
-python do_configure() {
- for subdir in d.getVar("PACKAGECONFIG").split():
- subdir = subdir.replace("--", "/")
- bb.note("Configuring %s" % subdir)
- dd = d.createCopy()
- dd.setVar("S", os.path.join(d.getVar("S"), subdir))
- bb.build.exec_func("autotools_do_configure", dd)
-}
-
-python do_compile() {
- for subdir in d.getVar("PACKAGECONFIG").split():
- subdir = subdir.replace("--", "/")
- bb.note("Compiling %s" % subdir)
- dd = d.createCopy()
- dd.setVar("S", os.path.join(d.getVar("S"), subdir))
- bb.build.exec_func("autotools_do_compile", dd)
-}
-
-python do_install() {
- d.delVarFlag("autotools_do_install", "cleandirs")
- for subdir in d.getVar("PACKAGECONFIG").split():
- subdir = subdir.replace("--", "/")
- bb.note("Installing %s" % subdir)
- dd = d.createCopy()
- dd.setVar("S", os.path.join(d.getVar("S"), subdir))
- bb.build.exec_func("autotools_do_install", dd)
-
- # Just remove bash-needing init script that isn't installed as an init script
- try:
- os.remove(oe.path.join(d.getVar("D"), d.getVar("sbindir"), "ld10k1d"))
- except:
- pass
-}
-
-FILES:${PN} += "${datadir}"
diff --git a/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.11.bb b/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.11.bb
new file mode 100644
index 0000000000..ce1a42c9b8
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.11.bb
@@ -0,0 +1,24 @@
+SUMMARY = "ALSA Use Case Manager configuration"
+DESCRIPTION = "This package contains ALSA Use Case Manager configuration \
+of audio input/output names and routing for specific audio hardware. \
+They can be used with the alsaucm tool. "
+HOMEPAGE = "https://alsa-project.org"
+BUGTRACKER = "https://alsa-project.org/wiki/Bug_Tracking"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=20d74d74db9741697903372ad001d3b4"
+
+SRC_URI = "https://www.alsa-project.org/files/pub/lib/${BP}.tar.bz2"
+SRC_URI[sha256sum] = "387c01cf30e2a1676d7b8f72b2681cf219abca70dd1ec2a9e33add5bf3feae81"
+# Something went wrong at upstream tarballing
+
+inherit allarch
+
+do_install() {
+ install -d "${D}${datadir}/alsa"
+ cp -r "${S}/ucm" "${D}${datadir}/alsa"
+ cp -r "${S}/ucm2" "${D}${datadir}/alsa"
+}
+
+PACKAGES = "${PN}"
+
+FILES:${PN} = "*"
diff --git a/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.6.3.bb b/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.6.3.bb
deleted file mode 100644
index a493269063..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-ucm-conf_1.2.6.3.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "ALSA Use Case Manager configuration"
-DESCRIPTION = "This package contains ALSA Use Case Manager configuration \
-of audio input/output names and routing for specific audio hardware. \
-They can be used with the alsaucm tool. "
-HOMEPAGE = "https://alsa-project.org"
-BUGTRACKER = "https://alsa-project.org/wiki/Bug_Tracking"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=20d74d74db9741697903372ad001d3b4"
-
-SRC_URI = "https://www.alsa-project.org/files/pub/lib/${BP}.tar.bz2"
-SRC_URI[sha256sum] = "b8a03aa387a624a2f65edc201bf777421190b60529a92087646823afbd96c5cd"
-# Something went wrong at upstream tarballing
-
-inherit allarch
-
-do_install() {
- install -d "${D}${datadir}/alsa"
- cp -r "${S}/ucm" "${D}${datadir}/alsa"
- cp -r "${S}/ucm2" "${D}${datadir}/alsa"
-}
-
-PACKAGES = "${PN}"
-
-FILES:${PN} = "*"
diff --git a/meta/recipes-multimedia/alsa/alsa-utils-scripts_1.2.6.bb b/meta/recipes-multimedia/alsa/alsa-utils-scripts_1.2.6.bb
deleted file mode 100644
index 000e984a7d..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-utils-scripts_1.2.6.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-require alsa-utils.inc
-
-SUMMARY = "Shell scripts that show help info and create ALSA configuration files"
-PROVIDES = "alsa-utils-alsaconf"
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/alsa-utils:"
-
-PACKAGES = "${PN}"
-RDEPENDS:${PN} += "bash"
-
-FILES:${PN} = "${sbindir}/alsaconf \
- ${sbindir}/alsa-info.sh \
- ${sbindir}/alsabat-test.sh \
- "
-
-S = "${WORKDIR}/alsa-utils-${PV}"
-
-do_install() {
- install -d ${D}${sbindir}
- install -m 0755 ${B}/alsaconf/alsaconf ${D}${sbindir}/
- install -m 0755 ${S}/alsa-info/alsa-info.sh ${D}${sbindir}/
- if ${@bb.utils.contains('PACKAGECONFIG', 'bat', 'true', 'false', d)}; then
- install -m 0755 ${S}/bat/alsabat-test.sh ${D}${sbindir}/
- fi
-}
diff --git a/meta/recipes-multimedia/alsa/alsa-utils.inc b/meta/recipes-multimedia/alsa/alsa-utils.inc
deleted file mode 100644
index bca7cdd3bd..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-utils.inc
+++ /dev/null
@@ -1,108 +0,0 @@
-SUMMARY = "ALSA sound utilities"
-DESCRIPTION = "collection of small and often extremely powerful applications \
-designed to allow users to control the various parts of the ALSA system."
-HOMEPAGE = "http://www.alsa-project.org"
-BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
-SECTION = "console/utils"
-# Some parts are GPL-2.0-or-later, some are GPL-2.0-only (e.g. axfer, alsactl)
-# so result is GPL-2.0-only
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
- file://alsactl/utils.c;beginline=3;endline=18;md5=96cc06a4cebe5eb7975688ffb0e65642"
-DEPENDS = "alsa-lib ncurses libsamplerate0"
-
-PACKAGECONFIG ??= "udev"
-
-# alsabat can be built also without fftw support (with reduced functionality).
-# It would be better to always enable alsabat, but provide an option for
-# enabling/disabling fftw. The configure script doesn't support that, however
-# (at least in any obvious way), so for now we only support alsabat with fftw
-# or no alsabat at all.
-PACKAGECONFIG[bat] = "--enable-bat,--disable-bat,fftwf"
-
-PACKAGECONFIG[udev] = "--with-udev-rules-dir=`pkg-config --variable=udevdir udev`/rules.d,--with-udev-rules-dir=/unwanted/rules.d,udev"
-PACKAGECONFIG[manpages] = "--enable-xmlto, --disable-xmlto, xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
-
-# alsa-utils specified in SRC_URI due to alsa-utils-scripts recipe
-SRC_URI = "https://www.alsa-project.org/files/pub/utils/alsa-utils-${PV}.tar.bz2"
-SRC_URI[sha256sum] = "6a1efd8a1f1d9d38e489633eaec1fffa5c315663b316cab804be486887e6145d"
-
-# On build machines with python-docutils (not python3-docutils !!) installed
-# rst2man (not rst2man.py) is detected and compile fails with
-# | make[1]: *** No rule to make target 'alsaucm.1', needed by 'all-am'. Stop.
-# Avoid this by disabling expicitly
-EXTRA_OECONF = "--disable-rst2man"
-
-inherit autotools gettext pkgconfig manpages
-
-# This are all packages that we need to make. Also, the now empty alsa-utils
-# ipk depends on them.
-
-ALSA_UTILS_PKGS = "\
- ${@bb.utils.contains('PACKAGECONFIG', 'bat', 'alsa-utils-alsabat', '', d)} \
- alsa-utils-alsamixer \
- alsa-utils-alsatplg \
- alsa-utils-midi \
- alsa-utils-aplay \
- alsa-utils-amixer \
- alsa-utils-aconnect \
- alsa-utils-iecset \
- alsa-utils-speakertest \
- alsa-utils-aseqnet \
- alsa-utils-aseqdump \
- alsa-utils-alsactl \
- alsa-utils-alsaloop \
- alsa-utils-alsaucm \
- "
-
-PACKAGES += "${ALSA_UTILS_PKGS}"
-RDEPENDS:${PN} += "${ALSA_UTILS_PKGS}"
-
-FILES:${PN} = ""
-ALLOW_EMPTY:alsa-utils = "1"
-FILES:alsa-utils-alsabat = "${bindir}/alsabat"
-FILES:alsa-utils-alsatplg = "${bindir}/alsatplg"
-FILES:alsa-utils-aplay = "${bindir}/aplay ${bindir}/arecord ${bindir}/axfer"
-FILES:alsa-utils-amixer = "${bindir}/amixer"
-FILES:alsa-utils-alsamixer = "${bindir}/alsamixer"
-FILES:alsa-utils-speakertest = "${bindir}/speaker-test ${datadir}/sounds/alsa/ ${datadir}/alsa/speaker-test/"
-FILES:alsa-utils-midi = "${bindir}/aplaymidi ${bindir}/arecordmidi ${bindir}/amidi"
-FILES:alsa-utils-aconnect = "${bindir}/aconnect"
-FILES:alsa-utils-aseqnet = "${bindir}/aseqnet"
-FILES:alsa-utils-iecset = "${bindir}/iecset"
-FILES:alsa-utils-alsactl = "${sbindir}/alsactl */udev/rules.d/90-alsa-restore.rules */*/udev/rules.d/90-alsa-restore.rules ${systemd_unitdir} ${localstatedir}/lib/alsa ${datadir}/alsa/init/"
-FILES:alsa-utils-aseqdump = "${bindir}/aseqdump"
-FILES:alsa-utils-alsaloop = "${bindir}/alsaloop"
-FILES:alsa-utils-alsaucm = "${bindir}/alsaucm */udev/rules.d/89-alsa-ucm.rules */*/udev/rules.d/89-alsa-ucm.rules"
-
-SUMMARY:alsa-utils-alsabat = "Command-line sound tester for ALSA sound card driver"
-SUMMARY:alsa-utils-alsatplg = "Converts topology text files into binary format for kernel"
-SUMMARY:alsa-utils-aplay = "Play (and record) sound files using ALSA"
-SUMMARY:alsa-utils-amixer = "Command-line control for ALSA mixer and settings"
-SUMMARY:alsa-utils-alsamixer = "ncurses-based control for ALSA mixer and settings"
-SUMMARY:alsa-utils-speakertest = "ALSA surround speaker test utility"
-SUMMARY:alsa-utils-midi = "Miscellaneous MIDI utilities for ALSA"
-SUMMARY:alsa-utils-aconnect = "ALSA sequencer connection manager"
-SUMMARY:alsa-utils-aseqnet = "Network client/server for ALSA sequencer"
-SUMMARY:alsa-utils-iecset = "ALSA utility for setting/showing IEC958 (S/PDIF) status bits"
-SUMMARY:alsa-utils-alsactl = "Saves/restores ALSA-settings in /etc/asound.state"
-SUMMARY:alsa-utils-aseqdump = "Shows the events received at an ALSA sequencer port"
-SUMMARY:alsa-utils-alsaloop = "ALSA PCM loopback utility"
-SUMMARY:alsa-utils-alsaucm = "ALSA Use Case Manager"
-
-RRECOMMENDS:alsa-utils-alsactl = "alsa-states"
-
-do_install() {
- autotools_do_install
-
- # We don't ship this here because it requires a dependency on bash.
- # See alsa-utils-scripts_${PV}.bb
- rm ${D}${sbindir}/alsaconf
- rm ${D}${sbindir}/alsa-info.sh
- rm -f ${D}${sbindir}/alsabat-test.sh
-
- # If udev is disabled, we told configure to install the rules
- # in /unwanted, so we can remove them now. If udev is enabled,
- # then /unwanted won't exist and this will have no effect.
- rm -rf ${D}/unwanted
-}
diff --git a/meta/recipes-multimedia/alsa/alsa-utils_1.2.11.bb b/meta/recipes-multimedia/alsa/alsa-utils_1.2.11.bb
new file mode 100644
index 0000000000..e598fac9f8
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-utils_1.2.11.bb
@@ -0,0 +1,124 @@
+SUMMARY = "ALSA sound utilities"
+DESCRIPTION = "collection of small and often extremely powerful applications \
+designed to allow users to control the various parts of the ALSA system."
+HOMEPAGE = "http://www.alsa-project.org"
+BUGTRACKER = "http://alsa-project.org/main/index.php/Bug_Tracking"
+SECTION = "console/utils"
+# Some parts are GPL-2.0-or-later, some are GPL-2.0-only (e.g. axfer, alsactl)
+# so result is GPL-2.0-only
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://alsactl/utils.c;beginline=3;endline=18;md5=96cc06a4cebe5eb7975688ffb0e65642"
+DEPENDS = "alsa-lib ncurses libsamplerate0"
+
+PACKAGECONFIG ??= "udev"
+
+# alsabat can be built also without fftw support (with reduced functionality).
+# It would be better to always enable alsabat, but provide an option for
+# enabling/disabling fftw. The configure script doesn't support that, however
+# (at least in any obvious way), so for now we only support alsabat with fftw
+# or no alsabat at all.
+PACKAGECONFIG[bat] = "--enable-bat,--disable-bat,fftwf"
+
+PACKAGECONFIG[udev] = "--with-udev-rules-dir=`pkg-config --variable=udevdir udev`/rules.d,--with-udev-rules-dir=/unwanted/rules.d,udev"
+PACKAGECONFIG[manpages] = "--enable-xmlto, --disable-xmlto, xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
+
+# alsa-utils specified in SRC_URI due to alsa-utils-scripts recipe
+SRC_URI = "https://www.alsa-project.org/files/pub/utils/alsa-utils-${PV}.tar.bz2 \
+ "
+SRC_URI[sha256sum] = "9ac6ca3a883f151e568dcf979b8d2e5cbecc51b819bb0e6bb8a2e9b34cc428a7"
+
+# On build machines with python-docutils (not python3-docutils !!) installed
+# rst2man (not rst2man.py) is detected and compile fails with
+# | make[1]: *** No rule to make target 'alsaucm.1', needed by 'all-am'. Stop.
+# Avoid this by disabling expicitly
+EXTRA_OECONF = "--disable-rst2man"
+
+inherit autotools gettext pkgconfig manpages
+
+# This are all packages that we need to make. Also, the now empty alsa-utils
+# ipk depends on them.
+
+ALSA_UTILS_PKGS = "\
+ ${@bb.utils.contains('PACKAGECONFIG', 'bat', 'alsa-utils-alsabat', '', d)} \
+ alsa-utils-alsamixer \
+ alsa-utils-alsatplg \
+ alsa-utils-midi \
+ alsa-utils-aplay \
+ alsa-utils-amixer \
+ alsa-utils-aconnect \
+ alsa-utils-iecset \
+ alsa-utils-speakertest \
+ alsa-utils-aseqnet \
+ alsa-utils-aseqdump \
+ alsa-utils-alsactl \
+ alsa-utils-alsaloop \
+ alsa-utils-alsaucm \
+ alsa-utils-scripts \
+ alsa-utils-nhltdmicinfo \
+ "
+
+PACKAGES += "${ALSA_UTILS_PKGS}"
+RDEPENDS:${PN} += "${ALSA_UTILS_PKGS}"
+
+FILES:${PN} = ""
+ALLOW_EMPTY:alsa-utils = "1"
+FILES:alsa-utils-alsabat = "${bindir}/alsabat"
+FILES:alsa-utils-alsatplg = "${bindir}/alsatplg ${libdir}/alsa-topology"
+FILES:alsa-utils-aplay = "${bindir}/aplay ${bindir}/arecord ${bindir}/axfer"
+FILES:alsa-utils-amixer = "${bindir}/amixer"
+FILES:alsa-utils-alsamixer = "${bindir}/alsamixer"
+FILES:alsa-utils-speakertest = "${bindir}/speaker-test ${datadir}/sounds/alsa/ ${datadir}/alsa/speaker-test/"
+FILES:alsa-utils-midi = "${bindir}/aplaymidi ${bindir}/arecordmidi ${bindir}/amidi"
+FILES:alsa-utils-aconnect = "${bindir}/aconnect"
+FILES:alsa-utils-aseqnet = "${bindir}/aseqnet"
+FILES:alsa-utils-iecset = "${bindir}/iecset"
+FILES:alsa-utils-alsactl = "${sbindir}/alsactl */udev/rules.d/90-alsa-restore.rules */*/udev/rules.d/90-alsa-restore.rules ${systemd_unitdir} ${localstatedir}/lib/alsa ${datadir}/alsa/init/"
+FILES:alsa-utils-aseqdump = "${bindir}/aseqdump"
+FILES:alsa-utils-alsaloop = "${bindir}/alsaloop"
+FILES:alsa-utils-alsaucm = "${bindir}/alsaucm */udev/rules.d/89-alsa-ucm.rules */*/udev/rules.d/89-alsa-ucm.rules"
+FILES:alsa-utils-scripts = "${sbindir}/alsaconf \
+ ${sbindir}/alsa-info.sh \
+ ${sbindir}/alsabat-test.sh \
+ "
+FILES:alsa-utils-nhltdmicinfo = "${bindir}/nhlt-dmic-info"
+
+SUMMARY:alsa-utils-alsabat = "Command-line sound tester for ALSA sound card driver"
+SUMMARY:alsa-utils-alsatplg = "Converts topology text files into binary format for kernel"
+SUMMARY:alsa-utils-aplay = "Play (and record) sound files using ALSA"
+SUMMARY:alsa-utils-amixer = "Command-line control for ALSA mixer and settings"
+SUMMARY:alsa-utils-alsamixer = "ncurses-based control for ALSA mixer and settings"
+SUMMARY:alsa-utils-speakertest = "ALSA surround speaker test utility"
+SUMMARY:alsa-utils-midi = "Miscellaneous MIDI utilities for ALSA"
+SUMMARY:alsa-utils-aconnect = "ALSA sequencer connection manager"
+SUMMARY:alsa-utils-aseqnet = "Network client/server for ALSA sequencer"
+SUMMARY:alsa-utils-iecset = "ALSA utility for setting/showing IEC958 (S/PDIF) status bits"
+SUMMARY:alsa-utils-alsactl = "Saves/restores ALSA-settings in /etc/asound.state"
+SUMMARY:alsa-utils-aseqdump = "Shows the events received at an ALSA sequencer port"
+SUMMARY:alsa-utils-alsaloop = "ALSA PCM loopback utility"
+SUMMARY:alsa-utils-alsaucm = "ALSA Use Case Manager"
+SUMMARY:alsa-utils-scripts = "Shell scripts that show help info and create ALSA configuration files"
+SUMMARY:alsa-utils-nhltdmicinfo = "Dumps microphone array information from ACPI NHLT table"
+
+RRECOMMENDS:alsa-utils-alsactl = "alsa-states"
+
+do_install() {
+ autotools_do_install
+
+ install -d ${D}${sbindir}
+ install -m 0755 ${B}/alsaconf/alsaconf ${D}${sbindir}/
+ install -m 0755 ${S}/alsa-info/alsa-info.sh ${D}${sbindir}/
+ if ${@bb.utils.contains('PACKAGECONFIG', 'bat', 'true', 'false', d)}; then
+ install -m 0755 ${S}/bat/alsabat-test.sh ${D}${sbindir}/
+ fi
+
+ # If udev is disabled, we told configure to install the rules
+ # in /unwanted, so we can remove them now. If udev is enabled,
+ # then /unwanted won't exist and this will have no effect.
+ rm -rf ${D}/unwanted
+}
+
+
+PROVIDES = "alsa-utils-alsaconf alsa-utils-scripts"
+
+RDEPENDS:${PN}-scripts += "bash"
diff --git a/meta/recipes-multimedia/alsa/alsa-utils_1.2.6.bb b/meta/recipes-multimedia/alsa/alsa-utils_1.2.6.bb
deleted file mode 100644
index 3430288da4..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-utils_1.2.6.bb
+++ /dev/null
@@ -1,2 +0,0 @@
-require alsa-utils.inc
-
diff --git a/meta/recipes-multimedia/ffmpeg/ffmpeg/0001-libavutil-include-assembly-with-full-path-from-sourc.patch b/meta/recipes-multimedia/ffmpeg/ffmpeg/0001-libavutil-include-assembly-with-full-path-from-sourc.patch
deleted file mode 100644
index 7d0a06f85b..0000000000
--- a/meta/recipes-multimedia/ffmpeg/ffmpeg/0001-libavutil-include-assembly-with-full-path-from-sourc.patch
+++ /dev/null
@@ -1,112 +0,0 @@
-From 4a891e1eddbf63f32fe769b5bff289f6748abf45 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Tue, 10 Nov 2020 15:32:14 +0000
-Subject: [PATCH] libavutil: include assembly with full path from source root
-
-Otherwise nasm writes the full host-specific paths into .o
-output, which breaks binary reproducibility.
-
-Upstream-Status: Submitted [http://ffmpeg.org/pipermail/ffmpeg-devel/2022-January/291781.html]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- libavutil/x86/cpuid.asm | 2 +-
- libavutil/x86/emms.asm | 2 +-
- libavutil/x86/fixed_dsp.asm | 2 +-
- libavutil/x86/float_dsp.asm | 2 +-
- libavutil/x86/lls.asm | 2 +-
- libavutil/x86/pixelutils.asm | 2 +-
- libavutil/x86/tx_float.asm | 2 +-
- 7 files changed, 7 insertions(+), 7 deletions(-)
-
-diff --git a/libavutil/x86/cpuid.asm b/libavutil/x86/cpuid.asm
-index c3f7866..766f77f 100644
---- a/libavutil/x86/cpuid.asm
-+++ b/libavutil/x86/cpuid.asm
-@@ -21,7 +21,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION .text
-
-diff --git a/libavutil/x86/emms.asm b/libavutil/x86/emms.asm
-index 8611762..df84f22 100644
---- a/libavutil/x86/emms.asm
-+++ b/libavutil/x86/emms.asm
-@@ -18,7 +18,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION .text
-
-diff --git a/libavutil/x86/fixed_dsp.asm b/libavutil/x86/fixed_dsp.asm
-index 979dd5c..2f41185 100644
---- a/libavutil/x86/fixed_dsp.asm
-+++ b/libavutil/x86/fixed_dsp.asm
-@@ -20,7 +20,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION .text
-
-diff --git a/libavutil/x86/float_dsp.asm b/libavutil/x86/float_dsp.asm
-index 517fd63..b773e61 100644
---- a/libavutil/x86/float_dsp.asm
-+++ b/libavutil/x86/float_dsp.asm
-@@ -20,7 +20,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION_RODATA 32
- pd_reverse: dd 7, 6, 5, 4, 3, 2, 1, 0
-diff --git a/libavutil/x86/lls.asm b/libavutil/x86/lls.asm
-index 317fba6..d2526d1 100644
---- a/libavutil/x86/lls.asm
-+++ b/libavutil/x86/lls.asm
-@@ -20,7 +20,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION .text
-
-diff --git a/libavutil/x86/pixelutils.asm b/libavutil/x86/pixelutils.asm
-index 36c57c5..8b45ead 100644
---- a/libavutil/x86/pixelutils.asm
-+++ b/libavutil/x86/pixelutils.asm
-@@ -21,7 +21,7 @@
- ;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- ;******************************************************************************
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- SECTION .text
-
-diff --git a/libavutil/x86/tx_float.asm b/libavutil/x86/tx_float.asm
-index 4d2283f..ea39f21 100644
---- a/libavutil/x86/tx_float.asm
-+++ b/libavutil/x86/tx_float.asm
-@@ -29,7 +29,7 @@
- ; replace some shuffles with vblends?
- ; avx512 split-radix
-
--%include "x86util.asm"
-+%include "libavutil/x86/x86util.asm"
-
- %if ARCH_X86_64
- %define ptr resq
diff --git a/meta/recipes-multimedia/ffmpeg/ffmpeg_5.0.1.bb b/meta/recipes-multimedia/ffmpeg/ffmpeg_5.0.1.bb
deleted file mode 100644
index dd14f8df6f..0000000000
--- a/meta/recipes-multimedia/ffmpeg/ffmpeg_5.0.1.bb
+++ /dev/null
@@ -1,176 +0,0 @@
-SUMMARY = "A complete, cross-platform solution to record, convert and stream audio and video."
-DESCRIPTION = "FFmpeg is the leading multimedia framework, able to decode, encode, transcode, \
- mux, demux, stream, filter and play pretty much anything that humans and machines \
- have created. It supports the most obscure ancient formats up to the cutting edge."
-HOMEPAGE = "https://www.ffmpeg.org/"
-SECTION = "libs"
-
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later & ISC & MIT & BSD-2-Clause & BSD-3-Clause & IJG"
-LICENSE:${PN} = "GPL-2.0-or-later"
-LICENSE:libavcodec = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libavdevice = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libavfilter = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libavformat = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libavutil = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libpostproc = "GPL-2.0-or-later"
-LICENSE:libswresample = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE:libswscale = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
-LICENSE_FLAGS = "commercial"
-
-LIC_FILES_CHKSUM = "file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.GPLv3;md5=d32239bcb673463ab874e80d47fae504 \
- file://COPYING.LGPLv2.1;md5=bd7a443320af8c812e4c18d1b79df004 \
- file://COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02"
-
-SRC_URI = "https://www.ffmpeg.org/releases/${BP}.tar.xz \
- file://0001-libavutil-include-assembly-with-full-path-from-sourc.patch \
- "
-SRC_URI[sha256sum] = "ef2efae259ce80a240de48ec85ecb062cecca26e4352ffb3fda562c21a93007b"
-
-# Build fails when thumb is enabled: https://bugzilla.yoctoproject.org/show_bug.cgi?id=7717
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv6 = "arm"
-
-# Should be API compatible with libav (which was a fork of ffmpeg)
-# libpostproc was previously packaged from a separate recipe
-PROVIDES = "libav libpostproc"
-
-DEPENDS = "nasm-native"
-
-inherit autotools pkgconfig
-
-PACKAGECONFIG ??= "avdevice avfilter avcodec avformat swresample swscale postproc \
- alsa bzlib lzma pic pthreads shared theora zlib \
- ${@bb.utils.contains('AVAILTUNES', 'mips32r2', 'mips32r2', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xv xcb', '', d)}"
-
-# libraries to build in addition to avutil
-PACKAGECONFIG[avdevice] = "--enable-avdevice,--disable-avdevice"
-PACKAGECONFIG[avfilter] = "--enable-avfilter,--disable-avfilter"
-PACKAGECONFIG[avcodec] = "--enable-avcodec,--disable-avcodec"
-PACKAGECONFIG[avformat] = "--enable-avformat,--disable-avformat"
-PACKAGECONFIG[swresample] = "--enable-swresample,--disable-swresample"
-PACKAGECONFIG[swscale] = "--enable-swscale,--disable-swscale"
-PACKAGECONFIG[postproc] = "--enable-postproc,--disable-postproc"
-
-# features to support
-PACKAGECONFIG[alsa] = "--enable-alsa,--disable-alsa,alsa-lib"
-PACKAGECONFIG[altivec] = "--enable-altivec,--disable-altivec,"
-PACKAGECONFIG[bzlib] = "--enable-bzlib,--disable-bzlib,bzip2"
-PACKAGECONFIG[fdk-aac] = "--enable-libfdk-aac --enable-nonfree,--disable-libfdk-aac,fdk-aac"
-PACKAGECONFIG[gpl] = "--enable-gpl,--disable-gpl"
-PACKAGECONFIG[gsm] = "--enable-libgsm,--disable-libgsm,libgsm"
-PACKAGECONFIG[jack] = "--enable-indev=jack,--disable-indev=jack,jack"
-PACKAGECONFIG[libopus] = "--enable-libopus,--disable-libopus,libopus"
-PACKAGECONFIG[libvorbis] = "--enable-libvorbis,--disable-libvorbis,libvorbis"
-PACKAGECONFIG[lzma] = "--enable-lzma,--disable-lzma,xz"
-PACKAGECONFIG[mfx] = "--enable-libmfx,--disable-libmfx,intel-mediasdk"
-PACKAGECONFIG[mp3lame] = "--enable-libmp3lame,--disable-libmp3lame,lame"
-PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
-PACKAGECONFIG[sdl2] = "--enable-sdl2,--disable-sdl2,virtual/libsdl2"
-PACKAGECONFIG[speex] = "--enable-libspeex,--disable-libspeex,speex"
-PACKAGECONFIG[srt] = "--enable-libsrt,--disable-libsrt,srt"
-PACKAGECONFIG[theora] = "--enable-libtheora,--disable-libtheora,libtheora libogg"
-PACKAGECONFIG[vaapi] = "--enable-vaapi,--disable-vaapi,libva"
-PACKAGECONFIG[vdpau] = "--enable-vdpau,--disable-vdpau,libvdpau"
-PACKAGECONFIG[vpx] = "--enable-libvpx,--disable-libvpx,libvpx"
-PACKAGECONFIG[x264] = "--enable-libx264,--disable-libx264,x264"
-PACKAGECONFIG[x265] = "--enable-libx265,--disable-libx265,x265"
-PACKAGECONFIG[xcb] = "--enable-libxcb,--disable-libxcb,libxcb"
-PACKAGECONFIG[xv] = "--enable-outdev=xv,--disable-outdev=xv,libxv"
-PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib"
-
-# other configuration options
-PACKAGECONFIG[mips32r2] = ",--disable-mipsdsp --disable-mipsdspr2"
-PACKAGECONFIG[pic] = "--enable-pic"
-PACKAGECONFIG[pthreads] = "--enable-pthreads,--disable-pthreads"
-PACKAGECONFIG[shared] = "--enable-shared"
-PACKAGECONFIG[strip] = ",--disable-stripping"
-
-# Check codecs that require --enable-nonfree
-USE_NONFREE = "${@bb.utils.contains_any('PACKAGECONFIG', [ 'openssl' ], 'yes', '', d)}"
-
-def cpu(d):
- for arg in (d.getVar('TUNE_CCARGS') or '').split():
- if arg.startswith('-mcpu='):
- return arg[6:]
- return 'generic'
-
-EXTRA_OECONF = " \
- ${@bb.utils.contains('USE_NONFREE', 'yes', '--enable-nonfree', '', d)} \
- \
- --cross-prefix=${TARGET_PREFIX} \
- \
- --ld='${CCLD}' \
- --cc='${CC}' \
- --cxx='${CXX}' \
- --arch=${TARGET_ARCH} \
- --target-os='linux' \
- --enable-cross-compile \
- --extra-cflags='${CFLAGS} ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}' \
- --extra-ldflags='${LDFLAGS}' \
- --sysroot='${STAGING_DIR_TARGET}' \
- ${EXTRA_FFCONF} \
- --libdir=${libdir} \
- --shlibdir=${libdir} \
- --datadir=${datadir}/ffmpeg \
- --cpu=${@cpu(d)} \
- --pkg-config=pkg-config \
-"
-
-EXTRA_OECONF:append:linux-gnux32 = " --disable-asm"
-
-EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa64r6', '--disable-mips64r2 --disable-mips32r2', '', d)}"
-EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa64r2', '--disable-mips64r6 --disable-mips32r6', '', d)}"
-EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mips32r2', '--disable-mips64r6 --disable-mips32r6', '', d)}"
-EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mips32r6', '--disable-mips64r2 --disable-mips32r2', '', d)}"
-EXTRA_OECONF:append:mips = " --extra-libs=-latomic --disable-mips32r5 --disable-mipsdsp --disable-mipsdspr2 \
- --disable-loongson2 --disable-loongson3 --disable-mmi --disable-msa"
-EXTRA_OECONF:append:riscv32 = " --extra-libs=-latomic"
-EXTRA_OECONF:append:armv5 = " --extra-libs=-latomic"
-EXTRA_OECONF:append:powerpc = " --extra-libs=-latomic"
-
-# gold crashes on x86, another solution is to --disable-asm but thats more hacky
-# ld.gold: internal error in relocate_section, at ../../gold/i386.cc:3684
-
-LDFLAGS:append:x86 = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd ', '', d)}"
-
-EXTRA_OEMAKE = "V=1"
-
-do_configure() {
- ${S}/configure ${EXTRA_OECONF}
-}
-
-# patch out build host paths for reproducibility
-do_compile:prepend:class-target() {
- sed -i -e "s,${WORKDIR},,g" ${B}/config.h
-}
-
-PACKAGES =+ "libavcodec \
- libavdevice \
- libavfilter \
- libavformat \
- libavutil \
- libpostproc \
- libswresample \
- libswscale"
-
-FILES:libavcodec = "${libdir}/libavcodec${SOLIBS}"
-FILES:libavdevice = "${libdir}/libavdevice${SOLIBS}"
-FILES:libavfilter = "${libdir}/libavfilter${SOLIBS}"
-FILES:libavformat = "${libdir}/libavformat${SOLIBS}"
-FILES:libavutil = "${libdir}/libavutil${SOLIBS}"
-FILES:libpostproc = "${libdir}/libpostproc${SOLIBS}"
-FILES:libswresample = "${libdir}/libswresample${SOLIBS}"
-FILES:libswscale = "${libdir}/libswscale${SOLIBS}"
-
-# ffmpeg disables PIC on some platforms (e.g. x86-32)
-INSANE_SKIP:${MLPREFIX}libavcodec = "textrel"
-INSANE_SKIP:${MLPREFIX}libavdevice = "textrel"
-INSANE_SKIP:${MLPREFIX}libavfilter = "textrel"
-INSANE_SKIP:${MLPREFIX}libavformat = "textrel"
-INSANE_SKIP:${MLPREFIX}libavutil = "textrel"
-INSANE_SKIP:${MLPREFIX}libswscale = "textrel"
-INSANE_SKIP:${MLPREFIX}libswresample = "textrel"
-INSANE_SKIP:${MLPREFIX}libpostproc = "textrel"
diff --git a/meta/recipes-multimedia/ffmpeg/ffmpeg_6.1.1.bb b/meta/recipes-multimedia/ffmpeg/ffmpeg_6.1.1.bb
new file mode 100644
index 0000000000..aa59755034
--- /dev/null
+++ b/meta/recipes-multimedia/ffmpeg/ffmpeg_6.1.1.bb
@@ -0,0 +1,186 @@
+SUMMARY = "A complete, cross-platform solution to record, convert and stream audio and video."
+DESCRIPTION = "FFmpeg is the leading multimedia framework, able to decode, encode, transcode, \
+ mux, demux, stream, filter and play pretty much anything that humans and machines \
+ have created. It supports the most obscure ancient formats up to the cutting edge."
+HOMEPAGE = "https://www.ffmpeg.org/"
+SECTION = "libs"
+
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later & ISC & MIT & BSD-2-Clause & BSD-3-Clause & IJG"
+LICENSE:${PN} = "GPL-2.0-or-later"
+LICENSE:libavcodec = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libavdevice = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libavfilter = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libavformat = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libavutil = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libpostproc = "GPL-2.0-or-later"
+LICENSE:libswresample = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE:libswscale = "${@bb.utils.contains('PACKAGECONFIG', 'gpl', 'GPL-2.0-or-later', 'LGPL-2.1-or-later', d)}"
+LICENSE_FLAGS = "commercial"
+
+LIC_FILES_CHKSUM = "file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.GPLv3;md5=d32239bcb673463ab874e80d47fae504 \
+ file://COPYING.LGPLv2.1;md5=bd7a443320af8c812e4c18d1b79df004 \
+ file://COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02"
+
+SRC_URI = "https://www.ffmpeg.org/releases/${BP}.tar.xz"
+
+SRC_URI[sha256sum] = "8684f4b00f94b85461884c3719382f1261f0d9eb3d59640a1f4ac0873616f968"
+
+# https://nvd.nist.gov/vuln/detail/CVE-2023-39018
+# https://github.com/bramp/ffmpeg-cli-wrapper/issues/291
+# https://security-tracker.debian.org/tracker/CVE-2023-39018
+# https://bugzilla.suse.com/show_bug.cgi?id=CVE-2023-39018
+CVE_STATUS[CVE-2023-39018] = "cpe-incorrect: This issue belongs to ffmpeg-cli-wrapper \
+(Java wrapper around the FFmpeg CLI) and not ffmepg itself."
+
+# Build fails when thumb is enabled: https://bugzilla.yoctoproject.org/show_bug.cgi?id=7717
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv6 = "arm"
+
+# Should be API compatible with libav (which was a fork of ffmpeg)
+# libpostproc was previously packaged from a separate recipe
+PROVIDES = "libav libpostproc"
+
+DEPENDS = "nasm-native"
+
+inherit autotools pkgconfig
+
+PACKAGECONFIG ??= "avdevice avfilter avcodec avformat swresample swscale postproc \
+ alsa bzlib lzma theora zlib \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xv xcb', '', d)}"
+
+# libraries to build in addition to avutil
+PACKAGECONFIG[avdevice] = "--enable-avdevice,--disable-avdevice"
+PACKAGECONFIG[avfilter] = "--enable-avfilter,--disable-avfilter"
+PACKAGECONFIG[avcodec] = "--enable-avcodec,--disable-avcodec"
+PACKAGECONFIG[avformat] = "--enable-avformat,--disable-avformat"
+PACKAGECONFIG[swresample] = "--enable-swresample,--disable-swresample"
+PACKAGECONFIG[swscale] = "--enable-swscale,--disable-swscale"
+PACKAGECONFIG[postproc] = "--enable-postproc,--disable-postproc"
+
+# features to support
+PACKAGECONFIG[alsa] = "--enable-alsa,--disable-alsa,alsa-lib"
+PACKAGECONFIG[altivec] = "--enable-altivec,--disable-altivec,"
+PACKAGECONFIG[bzlib] = "--enable-bzlib,--disable-bzlib,bzip2"
+PACKAGECONFIG[fdk-aac] = "--enable-libfdk-aac --enable-nonfree,--disable-libfdk-aac,fdk-aac"
+PACKAGECONFIG[gpl] = "--enable-gpl,--disable-gpl"
+PACKAGECONFIG[gsm] = "--enable-libgsm,--disable-libgsm,libgsm"
+PACKAGECONFIG[jack] = "--enable-indev=jack,--disable-indev=jack,jack"
+PACKAGECONFIG[libopus] = "--enable-libopus,--disable-libopus,libopus"
+PACKAGECONFIG[libvorbis] = "--enable-libvorbis,--disable-libvorbis,libvorbis"
+PACKAGECONFIG[lzma] = "--enable-lzma,--disable-lzma,xz"
+PACKAGECONFIG[mfx] = "--enable-libmfx,--disable-libmfx,intel-mediasdk"
+PACKAGECONFIG[mp3lame] = "--enable-libmp3lame,--disable-libmp3lame,lame"
+PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
+PACKAGECONFIG[sdl2] = "--enable-sdl2,--disable-sdl2,virtual/libsdl2"
+PACKAGECONFIG[speex] = "--enable-libspeex,--disable-libspeex,speex"
+PACKAGECONFIG[srt] = "--enable-libsrt,--disable-libsrt,srt"
+PACKAGECONFIG[theora] = "--enable-libtheora,--disable-libtheora,libtheora libogg"
+PACKAGECONFIG[v4l2] = "--enable-libv4l2,--disable-libv4l2,v4l-utils"
+PACKAGECONFIG[vaapi] = "--enable-vaapi,--disable-vaapi,libva"
+PACKAGECONFIG[vdpau] = "--enable-vdpau,--disable-vdpau,libvdpau"
+PACKAGECONFIG[vpx] = "--enable-libvpx,--disable-libvpx,libvpx"
+PACKAGECONFIG[x264] = "--enable-libx264,--disable-libx264,x264"
+PACKAGECONFIG[x265] = "--enable-libx265,--disable-libx265,x265"
+PACKAGECONFIG[xcb] = "--enable-libxcb,--disable-libxcb,libxcb"
+PACKAGECONFIG[xv] = "--enable-outdev=xv,--disable-outdev=xv,libxv"
+PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib"
+
+# Check codecs that require --enable-nonfree
+USE_NONFREE = "${@bb.utils.contains_any('PACKAGECONFIG', [ 'openssl' ], 'yes', '', d)}"
+
+def cpu(d):
+ for arg in (d.getVar('TUNE_CCARGS') or '').split():
+ if arg.startswith('-mcpu='):
+ return arg[6:]
+ return 'generic'
+
+EXTRA_OECONF = " \
+ --disable-stripping \
+ --enable-pic \
+ --enable-shared \
+ --enable-pthreads \
+ ${@bb.utils.contains('USE_NONFREE', 'yes', '--enable-nonfree', '', d)} \
+ \
+ --cross-prefix=${TARGET_PREFIX} \
+ \
+ --ld='${CCLD}' \
+ --cc='${CC}' \
+ --cxx='${CXX}' \
+ --arch=${TARGET_ARCH} \
+ --target-os='linux' \
+ --enable-cross-compile \
+ --extra-cflags='${CFLAGS} ${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}' \
+ --extra-ldflags='${LDFLAGS}' \
+ --sysroot='${STAGING_DIR_TARGET}' \
+ ${EXTRA_FFCONF} \
+ --libdir=${libdir} \
+ --shlibdir=${libdir} \
+ --datadir=${datadir}/ffmpeg \
+ ${@bb.utils.contains('AVAILTUNES', 'mips32r2', '', '--disable-mipsdsp --disable-mipsdspr2', d)} \
+ --cpu=${@cpu(d)} \
+ --pkg-config=pkg-config \
+"
+
+EXTRA_OECONF:append:linux-gnux32 = " --disable-asm"
+
+EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa64r6', '--disable-mips64r2 --disable-mips32r2', '', d)}"
+EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa64r2', '--disable-mips64r6 --disable-mips32r6', '', d)}"
+EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mips32r2', '--disable-mips64r6 --disable-mips32r6', '', d)}"
+EXTRA_OECONF += "${@bb.utils.contains('TUNE_FEATURES', 'mips32r6', '--disable-mips64r2 --disable-mips32r2', '', d)}"
+EXTRA_OECONF:append:mips = " --extra-libs=-latomic --disable-mips32r5 --disable-mipsdsp --disable-mipsdspr2 \
+ --disable-loongson2 --disable-loongson3 --disable-mmi --disable-msa"
+EXTRA_OECONF:append:riscv32 = " --extra-libs=-latomic --disable-rvv --disable-asm"
+EXTRA_OECONF:append:armv5 = " --extra-libs=-latomic"
+EXTRA_OECONF:append:powerpc = " --extra-libs=-latomic"
+EXTRA_OECONF:append:armv7a = "${@bb.utils.contains('TUNE_FEATURES','neon','',' --disable-neon',d)}"
+EXTRA_OECONF:append:armv7ve = "${@bb.utils.contains('TUNE_FEATURES','neon','',' --disable-neon',d)}"
+
+# gold crashes on x86, another solution is to --disable-asm but thats more hacky
+# ld.gold: internal error in relocate_section, at ../../gold/i386.cc:3684
+
+LDFLAGS:append:x86 = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd ', '', d)}"
+LDFLAGS:append:x86 = "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-lld', ' -fuse-ld=bfd ', '', d)}"
+
+EXTRA_OEMAKE = "V=1"
+
+do_configure() {
+ export TMPDIR="${B}/tmp"
+ mkdir -p ${B}/tmp
+ ${S}/configure ${EXTRA_OECONF}
+ sed -i -e "s,^X86ASMFLAGS=.*,& --debug-prefix-map=${S}=${TARGET_DBGSRC_DIR} --debug-prefix-map=${B}=${TARGET_DBGSRC_DIR},g" ${B}/ffbuild/config.mak
+}
+
+# patch out build host paths for reproducibility
+do_compile:prepend:class-target() {
+ sed -i -e "s,${WORKDIR},,g" ${B}/config.h
+}
+
+PACKAGES =+ "libavcodec \
+ libavdevice \
+ libavfilter \
+ libavformat \
+ libavutil \
+ libpostproc \
+ libswresample \
+ libswscale"
+
+FILES:libavcodec = "${libdir}/libavcodec${SOLIBS}"
+FILES:libavdevice = "${libdir}/libavdevice${SOLIBS}"
+FILES:libavfilter = "${libdir}/libavfilter${SOLIBS}"
+FILES:libavformat = "${libdir}/libavformat${SOLIBS}"
+FILES:libavutil = "${libdir}/libavutil${SOLIBS}"
+FILES:libpostproc = "${libdir}/libpostproc${SOLIBS}"
+FILES:libswresample = "${libdir}/libswresample${SOLIBS}"
+FILES:libswscale = "${libdir}/libswscale${SOLIBS}"
+
+# ffmpeg disables PIC on some platforms (e.g. x86-32)
+INSANE_SKIP:${MLPREFIX}libavcodec = "textrel"
+INSANE_SKIP:${MLPREFIX}libavdevice = "textrel"
+INSANE_SKIP:${MLPREFIX}libavfilter = "textrel"
+INSANE_SKIP:${MLPREFIX}libavformat = "textrel"
+INSANE_SKIP:${MLPREFIX}libavutil = "textrel"
+INSANE_SKIP:${MLPREFIX}libswscale = "textrel"
+INSANE_SKIP:${MLPREFIX}libswresample = "textrel"
+INSANE_SKIP:${MLPREFIX}libpostproc = "textrel"
diff --git a/meta/recipes-multimedia/flac/flac_1.3.4.bb b/meta/recipes-multimedia/flac/flac_1.3.4.bb
deleted file mode 100644
index 012da0a0a0..0000000000
--- a/meta/recipes-multimedia/flac/flac_1.3.4.bb
+++ /dev/null
@@ -1,45 +0,0 @@
-SUMMARY = "Free Lossless Audio Codec"
-DESCRIPTION = "FLAC stands for Free Lossless Audio Codec, a lossless audio compression format."
-HOMEPAGE = "https://xiph.org/flac/"
-BUGTRACKER = "http://sourceforge.net/p/flac/bugs/"
-SECTION = "libs"
-LICENSE = "GFDL-1.2 & GPL-2.0-or-later & LGPL-2.1-or-later & BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING.FDL;md5=ad1419ecc56e060eccf8184a87c4285f \
- file://src/Makefile.am;beginline=1;endline=17;md5=09501c864f89dfc7ead65553129817ca \
- file://COPYING.GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://src/flac/main.c;beginline=1;endline=18;md5=09777e2934947a36f13568d0beb81199 \
- file://COPYING.LGPL;md5=fbc093901857fcd118f065f900982c24 \
- file://src/plugin_common/all.h;beginline=1;endline=18;md5=f56cb4ba9a3bc9ec6102e8df03215271 \
- file://COPYING.Xiph;md5=b59c1b6d7fc0fb7965f821a3d36505e3 \
- file://include/FLAC/all.h;beginline=65;endline=70;md5=64474f2b22e9e77b28d8b8b25c983a48"
-DEPENDS = "libogg"
-
-SRC_URI = "http://downloads.xiph.org/releases/flac/${BP}.tar.xz \
-"
-
-SRC_URI[sha256sum] = "8ff0607e75a322dd7cd6ec48f4f225471404ae2730d0ea945127b1355155e737"
-
-CVE_PRODUCT = "libflac flac"
-
-inherit autotools gettext
-
-EXTRA_OECONF = "--disable-oggtest \
- --with-ogg-libraries=${STAGING_LIBDIR} \
- --with-ogg-includes=${STAGING_INCDIR} \
- --disable-xmms-plugin \
- --without-libiconv-prefix \
- ac_cv_prog_NASM="" \
- "
-
-EXTRA_OECONF += "${@bb.utils.contains("TUNE_FEATURES", "altivec", " --enable-altivec", " --disable-altivec", d)}"
-EXTRA_OECONF += "${@bb.utils.contains("TUNE_FEATURES", "vsx", " --enable-vsx", " --disable-vsx", d)}"
-EXTRA_OECONF += "${@bb.utils.contains("TUNE_FEATURES", "core2", " --enable-sse", "", d)}"
-EXTRA_OECONF += "${@bb.utils.contains("TUNE_FEATURES", "corei7", " --enable-sse", "", d)}"
-
-PACKAGES += "libflac libflac++ liboggflac liboggflac++"
-FILES:${PN} = "${bindir}/*"
-FILES:libflac = "${libdir}/libFLAC.so.*"
-FILES:libflac++ = "${libdir}/libFLAC++.so.*"
-FILES:liboggflac = "${libdir}/libOggFLAC.so.*"
-FILES:liboggflac++ = "${libdir}/libOggFLAC++.so.*"
-
diff --git a/meta/recipes-multimedia/flac/flac_1.4.3.bb b/meta/recipes-multimedia/flac/flac_1.4.3.bb
new file mode 100644
index 0000000000..d4e463cda5
--- /dev/null
+++ b/meta/recipes-multimedia/flac/flac_1.4.3.bb
@@ -0,0 +1,36 @@
+SUMMARY = "Free Lossless Audio Codec"
+DESCRIPTION = "FLAC stands for Free Lossless Audio Codec, a lossless audio compression format."
+HOMEPAGE = "https://xiph.org/flac/"
+BUGTRACKER = "https://github.com/xiph/flac/issues"
+SECTION = "libs"
+LICENSE = "GFDL-1.2 & GPL-2.0-or-later & LGPL-2.1-or-later & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING.FDL;md5=ad1419ecc56e060eccf8184a87c4285f \
+ file://src/Makefile.am;beginline=1;endline=17;md5=b1dab2704be7f01bfbd9b7f6d5f000a9 \
+ file://COPYING.GPL;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://src/flac/main.c;beginline=1;endline=18;md5=23099119c034d894bd1bf7ef5bd22101 \
+ file://COPYING.LGPL;md5=fbc093901857fcd118f065f900982c24 \
+ file://COPYING.Xiph;md5=0c90e41ab2fa7e69ca9391330d870221 \
+ file://include/FLAC/all.h;beginline=65;endline=70;md5=39aaf5e03c7364363884c8b8ddda8eea"
+
+SRC_URI = "http://downloads.xiph.org/releases/flac/${BP}.tar.xz"
+SRC_URI[sha256sum] = "6c58e69cd22348f441b861092b825e591d0b822e106de6eb0ee4d05d27205b70"
+
+CVE_PRODUCT = "libflac flac"
+
+inherit autotools gettext
+
+EXTRA_OECONF = "--disable-oggtest \
+ --without-libiconv-prefix \
+ ac_cv_prog_NASM="" \
+ "
+
+PACKAGECONFIG ??= " \
+ ogg \
+"
+PACKAGECONFIG[avx] = "--enable-avx,--disable-avx"
+PACKAGECONFIG[ogg] = "--enable-ogg --with-ogg-libraries=${STAGING_LIBDIR} --with-ogg-includes=${STAGING_INCDIR},--disable-ogg,libogg"
+
+PACKAGES += "libflac libflac++"
+FILES:${PN} = "${bindir}/*"
+FILES:libflac = "${libdir}/libFLAC.so.*"
+FILES:libflac++ = "${libdir}/libFLAC++.so.*"
diff --git a/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch b/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch
index c0e4581358..21e30d6d46 100644
--- a/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch
+++ b/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch
@@ -1,4 +1,4 @@
-From 73b1002eda17451db1f58431b42c25203f1d3097 Mon Sep 17 00:00:00 2001
+From 0c73b8131efba63c5cd37ea8c7551434c3b57304 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sun, 9 Sep 2018 17:38:10 -0700
Subject: [PATCH] connect has a different signature on musl
@@ -11,6 +11,7 @@ typcasted to struct sockaddr_in* type inside the function before use
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
---
validate/plugins/fault_injection/socket_interposer.c | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/meta/recipes-multimedia/gstreamer/gst-devtools_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gst-devtools_1.20.2.bb
deleted file mode 100644
index 4819a34b26..0000000000
--- a/meta/recipes-multimedia/gstreamer/gst-devtools_1.20.2.bb
+++ /dev/null
@@ -1,52 +0,0 @@
-SUMMARY = "Gstreamer validation tool"
-DESCRIPTION = "A Tool to test GStreamer components"
-HOMEPAGE = "https://gstreamer.freedesktop.org/documentation/gst-devtools/index.html"
-SECTION = "multimedia"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://validate/COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343"
-
-#S = "${WORKDIR}/gst-devtools-${PV}"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-devtools/gst-devtools-${PV}.tar.xz \
- file://0001-connect-has-a-different-signature-on-musl.patch \
- "
-
-SRC_URI[sha256sum] = "b28dba953a92532208b30467ff91076295e266f65364b1b3482b4c4372d44b2a"
-
-DEPENDS = "json-glib glib-2.0 glib-2.0-native gstreamer1.0 gstreamer1.0-plugins-base"
-RRECOMMENDS:${PN} = "git"
-
-FILES:${PN} += "${datadir}/gstreamer-1.0/* ${libdir}/gst-validate-launcher/* ${libdir}/gstreamer-1.0/*"
-
-inherit meson pkgconfig gettext upstream-version-is-even gobject-introspection
-
-# TODO: put this in a gettext.bbclass patch
-def gettext_oemeson(d):
- if d.getVar('USE_NLS') == 'no':
- return '-Dnls=disabled'
- # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
- if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
- return '-Dnls=disabled'
- return '-Dnls=enabled'
-
-# Build GstValidateVideo
-PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Ddebug_viewer=disabled \
- -Dtests=disabled \
- -Dvalidate=enabled \
- ${@gettext_oemeson(d)} \
-"
-
-do_install:append () {
- for fn in ${bindir}/gst-validate-launcher \
- ${libdir}/gst-validate-launcher/python/launcher/config.py; do
- sed -i -e 's,${B},/usr/src/debug/${PN},g' -e 's,${S},/usr/src/debug/${PN},g' ${D}$fn
- done
-}
-
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
diff --git a/meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb
new file mode 100644
index 0000000000..2be406192f
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb
@@ -0,0 +1,52 @@
+SUMMARY = "Gstreamer validation tool"
+DESCRIPTION = "A Tool to test GStreamer components"
+HOMEPAGE = "https://gstreamer.freedesktop.org/documentation/gst-devtools/index.html"
+SECTION = "multimedia"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://validate/COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343"
+
+#S = "${WORKDIR}/gst-devtools-${PV}"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-devtools/gst-devtools-${PV}.tar.xz \
+ file://0001-connect-has-a-different-signature-on-musl.patch \
+ "
+
+SRC_URI[sha256sum] = "07766425ecb5bf857ab5ad3962321c55cd89f9386b720843f9df71c0a455eb9b"
+
+DEPENDS = "json-glib glib-2.0 glib-2.0-native gstreamer1.0 gstreamer1.0-plugins-base"
+RRECOMMENDS:${PN} = "git"
+
+FILES:${PN} += "${datadir}/gstreamer-1.0/* ${libdir}/gst-validate-launcher/* ${libdir}/gstreamer-1.0/*"
+
+inherit meson pkgconfig gettext upstream-version-is-even gobject-introspection
+
+# TODO: put this in a gettext.bbclass patch
+def gettext_oemeson(d):
+ if d.getVar('USE_NLS') == 'no':
+ return '-Dnls=disabled'
+ # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
+ if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
+ return '-Dnls=disabled'
+ return '-Dnls=enabled'
+
+# Build GstValidateVideo
+PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Ddebug_viewer=disabled \
+ -Dtests=disabled \
+ -Dvalidate=enabled \
+ ${@gettext_oemeson(d)} \
+"
+
+do_install:append () {
+ for fn in ${bindir}/gst-validate-launcher \
+ ${libdir}/gst-validate-launcher/python/launcher/config.py; do
+ sed -i -e 's,${B},/usr/src/debug/${PN},g' -e 's,${S},/usr/src/debug/${PN},g' ${D}$fn
+ done
+}
+
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
diff --git a/meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop b/meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop
index 7ddd456a1e..d165e5d910 100644
--- a/meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop
+++ b/meta/recipes-multimedia/gstreamer/gst-examples/gst-player.desktop
@@ -1,7 +1,7 @@
[Desktop Entry]
Name=Media Player
Comment=Basic media player
-Icon=multimedia-player
+Icon=video-player
TryExec=gtk-play
Exec=gtk-play
StartupNotify=true
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.2.bb
deleted file mode 100644
index 4ef9755c07..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.20.2.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-SUMMARY = "Libav-based GStreamer 1.x plugin"
-DESCRIPTION = "Contains a GStreamer plugin for using the encoders, decoders, \
-muxers, and demuxers provided by FFmpeg."
-HOMEPAGE = "http://gstreamer.freedesktop.org/"
-SECTION = "multimedia"
-
-# ffmpeg has comercial license flags so add it as we need ffmpeg as a dependency
-LICENSE_FLAGS = "commercial"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770 \
- file://ext/libav/gstav.h;beginline=1;endline=18;md5=a752c35267d8276fd9ca3db6994fca9c \
- "
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz"
-SRC_URI[sha256sum] = "b5c531dd8413bf771c79dab66b8e389f20b3991f745115133f0fa0b8e32809f9"
-
-S = "${WORKDIR}/gst-libav-${PV}"
-
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base ffmpeg"
-
-inherit meson pkgconfig upstream-version-is-even
-
-EXTRA_OEMESON += " \
- -Dtests=disabled \
-"
-
-FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
-FILES:${PN}-staticdev += "${libdir}/gstreamer-1.0/*.a"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb
new file mode 100644
index 0000000000..f3287efa96
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Libav-based GStreamer 1.x plugin"
+DESCRIPTION = "Contains a GStreamer plugin for using the encoders, decoders, \
+muxers, and demuxers provided by FFmpeg."
+HOMEPAGE = "http://gstreamer.freedesktop.org/"
+SECTION = "multimedia"
+
+# ffmpeg has comercial license flags so add it as we need ffmpeg as a dependency
+LICENSE_FLAGS = "commercial"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770 \
+ file://ext/libav/gstav.h;beginline=1;endline=18;md5=a752c35267d8276fd9ca3db6994fca9c \
+ "
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz"
+SRC_URI[sha256sum] = "6b13dcc9332ef27a7c1e7005c0196883874f91622f8aa6e52f218b05b15d2bf5"
+
+S = "${WORKDIR}/gst-libav-${PV}"
+
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base ffmpeg"
+
+inherit meson pkgconfig upstream-version-is-even
+
+EXTRA_OEMESON += " \
+ -Dtests=disabled \
+"
+
+FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
+FILES:${PN}-staticdev += "${libdir}/gstreamer-1.0/*.a"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
index 57a9adbaec..6cc11e1928 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
@@ -34,8 +34,7 @@ RDEPENDS:gstreamer1.0-meta-base = "\
gstreamer1.0-plugins-base-audioconvert \
gstreamer1.0-plugins-base-audioresample \
gstreamer1.0-plugins-base-typefindfunctions \
- gstreamer1.0-plugins-base-videoscale \
- gstreamer1.0-plugins-base-videoconvert \
+ gstreamer1.0-plugins-base-videoconvertscale \
gstreamer1.0-plugins-good-autodetect \
gstreamer1.0-plugins-good-soup"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.2.bb
deleted file mode 100644
index c4f5d719bb..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.20.2.bb
+++ /dev/null
@@ -1,47 +0,0 @@
-SUMMARY = "OpenMAX IL plugins for GStreamer"
-DESCRIPTION = "Wraps available OpenMAX IL components and makes them available as standard GStreamer elements."
-HOMEPAGE = "http://gstreamer.freedesktop.org/"
-SECTION = "multimedia"
-
-LICENSE = "LGPL-2.1-or-later"
-LICENSE_FLAGS = "commercial"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
- file://omx/gstomx.h;beginline=1;endline=21;md5=5c8e1fca32704488e76d2ba9ddfa935f"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-omx/gst-omx-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "7efed7cc5b0acf9a669e38c5360a7892430a4e86c858daac6faa1ade2b151668"
-
-S = "${WORKDIR}/gst-omx-${PV}"
-
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad virtual/libomxil"
-
-inherit meson pkgconfig upstream-version-is-even
-
-GSTREAMER_1_0_OMX_TARGET ?= "bellagio"
-GSTREAMER_1_0_OMX_CORE_NAME ?= "${libdir}/libomxil-bellagio.so.0"
-
-EXTRA_OEMESON += "-Dtarget=${GSTREAMER_1_0_OMX_TARGET}"
-
-python __anonymous () {
- omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET")
- if omx_target in ['generic', 'bellagio']:
- # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,#
- # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros);
- # appending a directory path to gst-omx' internal OpenMAX IL headers fixes this
- d.appendVar("CFLAGS", " -I${S}/omx/openmax")
- elif omx_target == "rpi":
- # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific
- d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH"))
-}
-
-set_omx_core_name() {
- sed -i -e "s;^core-name=.*;core-name=${GSTREAMER_1_0_OMX_CORE_NAME};" "${D}${sysconfdir}/xdg/gstomx.conf"
-}
-do_install[postfuncs] += " set_omx_core_name "
-
-FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
-FILES:${PN}-staticdev += "${libdir}/gstreamer-1.0/*.a"
-
-VIRTUAL-RUNTIME_libomxil ?= "libomxil"
-RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_libomxil}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb
new file mode 100644
index 0000000000..97348fb398
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb
@@ -0,0 +1,47 @@
+SUMMARY = "OpenMAX IL plugins for GStreamer"
+DESCRIPTION = "Wraps available OpenMAX IL components and makes them available as standard GStreamer elements."
+HOMEPAGE = "http://gstreamer.freedesktop.org/"
+SECTION = "multimedia"
+
+LICENSE = "LGPL-2.1-or-later"
+LICENSE_FLAGS = "commercial"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
+ file://omx/gstomx.h;beginline=1;endline=21;md5=5c8e1fca32704488e76d2ba9ddfa935f"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-omx/gst-omx-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "18dfdf5f6b773d67e62a315c6cf6247da320b83603a5819493f53c69ed2eeef6"
+
+S = "${WORKDIR}/gst-omx-${PV}"
+
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad virtual/libomxil"
+
+inherit meson pkgconfig upstream-version-is-even
+
+GSTREAMER_1_0_OMX_TARGET ?= "bellagio"
+GSTREAMER_1_0_OMX_CORE_NAME ?= "${libdir}/libomxil-bellagio.so.0"
+
+EXTRA_OEMESON += "-Dtarget=${GSTREAMER_1_0_OMX_TARGET}"
+
+python __anonymous () {
+ omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET")
+ if omx_target in ['generic', 'bellagio']:
+ # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,#
+ # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros);
+ # appending a directory path to gst-omx' internal OpenMAX IL headers fixes this
+ d.appendVar("CFLAGS", " -I${S}/omx/openmax")
+ elif omx_target == "rpi":
+ # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific
+ d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH"))
+}
+
+set_omx_core_name() {
+ sed -i -e "s;^core-name=.*;core-name=${GSTREAMER_1_0_OMX_CORE_NAME};" "${D}${sysconfdir}/xdg/gstomx.conf"
+}
+do_install[postfuncs] += " set_omx_core_name "
+
+FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
+FILES:${PN}-staticdev += "${libdir}/gstreamer-1.0/*.a"
+
+VIRTUAL-RUNTIME_libomxil ?= "libomxil"
+RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_libomxil}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch
index 4b3451d321..a57fcd7d21 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch
@@ -1,4 +1,4 @@
-From 6c3eeaf6435608f744303c17d52245f926cd28ac Mon Sep 17 00:00:00 2001
+From 8be0c6ac60c96e87d8aa443be9c431844dc3d92a Mon Sep 17 00:00:00 2001
From: Andre McCurdy <armccurdy@gmail.com>
Date: Tue, 26 Jan 2016 15:16:01 -0800
Subject: [PATCH] fix maybe-uninitialized warnings when compiling with -Os
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch
index e3e5d9fb60..6509a293b7 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch
@@ -1,4 +1,4 @@
-From 701255c5797f222f0c47ace6437d213607b98d1e Mon Sep 17 00:00:00 2001
+From 426ee79adeecc24605426030a486d7c5a755048c Mon Sep 17 00:00:00 2001
From: Andre McCurdy <armccurdy@gmail.com>
Date: Wed, 3 Feb 2016 18:05:41 -0800
Subject: [PATCH] avoid including <sys/poll.h> directly
@@ -14,7 +14,7 @@ Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/sys/dvb/gstdvbsrc.c b/sys/dvb/gstdvbsrc.c
-index 92c918c..5e27871 100644
+index b8e5b1a..5bca6e5 100644
--- a/sys/dvb/gstdvbsrc.c
+++ b/sys/dvb/gstdvbsrc.c
@@ -98,7 +98,7 @@
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0003-ensure-valid-sentinals-for-gst_structure_get-etc.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0003-ensure-valid-sentinals-for-gst_structure_get-etc.patch
deleted file mode 100644
index 280cbf984a..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0003-ensure-valid-sentinals-for-gst_structure_get-etc.patch
+++ /dev/null
@@ -1,86 +0,0 @@
-From 001fa08542dd5fc79571f7c803b2d3dd59c04a06 Mon Sep 17 00:00:00 2001
-From: Andre McCurdy <armccurdy@gmail.com>
-Date: Tue, 9 Feb 2016 14:00:00 -0800
-Subject: [PATCH] ensure valid sentinals for gst_structure_get() etc
-
-For GStreamer functions declared with G_GNUC_NULL_TERMINATED,
-ie __attribute__((__sentinel__)), gcc will generate a warning if the
-last parameter passed to the function is not NULL (where a valid NULL
-in this context is defined as zero with any pointer type).
-
-The C callers to such functions within gst-plugins-bad use the C NULL
-definition (ie ((void*)0)), which is a valid sentinel.
-
-However the C++ NULL definition (ie 0L), is not a valid sentinel
-without an explicit cast to a pointer type.
-
-Upstream-Status: Pending
-
-Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
-
----
- sys/decklink/gstdecklink.cpp | 10 +++++-----
- sys/decklink/gstdecklinkaudiosrc.cpp | 2 +-
- sys/decklink/gstdecklinkvideosink.cpp | 2 +-
- 3 files changed, 7 insertions(+), 7 deletions(-)
-
-diff --git a/sys/decklink/gstdecklink.cpp b/sys/decklink/gstdecklink.cpp
-index 3f79deb..96600c6 100644
---- a/sys/decklink/gstdecklink.cpp
-+++ b/sys/decklink/gstdecklink.cpp
-@@ -680,7 +680,7 @@ gst_decklink_mode_get_generic_structure (GstDecklinkModeEnum e)
- "pixel-aspect-ratio", GST_TYPE_FRACTION, mode->par_n, mode->par_d,
- "interlace-mode", G_TYPE_STRING,
- mode->interlaced ? "interleaved" : "progressive",
-- "framerate", GST_TYPE_FRACTION, mode->fps_n, mode->fps_d, NULL);
-+ "framerate", GST_TYPE_FRACTION, mode->fps_n, mode->fps_d, (void*)NULL);
-
- return s;
- }
-@@ -705,16 +705,16 @@ gst_decklink_mode_get_structure (GstDecklinkModeEnum e, BMDPixelFormat f,
- case bmdFormat8BitYUV: /* '2vuy' */
- gst_structure_set (s, "format", G_TYPE_STRING, "UYVY",
- "colorimetry", G_TYPE_STRING, mode->colorimetry,
-- "chroma-site", G_TYPE_STRING, "mpeg2", NULL);
-+ "chroma-site", G_TYPE_STRING, "mpeg2", (void*)NULL);
- break;
- case bmdFormat10BitYUV: /* 'v210' */
-- gst_structure_set (s, "format", G_TYPE_STRING, "v210", NULL);
-+ gst_structure_set (s, "format", G_TYPE_STRING, "v210", (void*)NULL);
- break;
- case bmdFormat8BitARGB: /* 'ARGB' */
-- gst_structure_set (s, "format", G_TYPE_STRING, "ARGB", NULL);
-+ gst_structure_set (s, "format", G_TYPE_STRING, "ARGB", (void*)NULL);
- break;
- case bmdFormat8BitBGRA: /* 'BGRA' */
-- gst_structure_set (s, "format", G_TYPE_STRING, "BGRA", NULL);
-+ gst_structure_set (s, "format", G_TYPE_STRING, "BGRA", (void*)NULL);
- break;
- case bmdFormat10BitRGB: /* 'r210' Big-endian RGB 10-bit per component with SMPTE video levels (64-960). Packed as 2:10:10:10 */
- case bmdFormat12BitRGB: /* 'R12B' Big-endian RGB 12-bit per component with full range (0-4095). Packed as 12-bit per component */
-diff --git a/sys/decklink/gstdecklinkaudiosrc.cpp b/sys/decklink/gstdecklinkaudiosrc.cpp
-index 50ad5cc..d209180 100644
---- a/sys/decklink/gstdecklinkaudiosrc.cpp
-+++ b/sys/decklink/gstdecklinkaudiosrc.cpp
-@@ -388,7 +388,7 @@ gst_decklink_audio_src_start (GstDecklinkAudioSrc * self)
- g_mutex_unlock (&self->input->lock);
-
- if (videosrc) {
-- g_object_get (videosrc, "connection", &vconn, NULL);
-+ g_object_get (videosrc, "connection", &vconn, (void *) NULL);
- gst_object_unref (videosrc);
-
- switch (vconn) {
-diff --git a/sys/decklink/gstdecklinkvideosink.cpp b/sys/decklink/gstdecklinkvideosink.cpp
-index a64c046..07a09e8 100644
---- a/sys/decklink/gstdecklinkvideosink.cpp
-+++ b/sys/decklink/gstdecklinkvideosink.cpp
-@@ -288,7 +288,7 @@ reset_framerate (GstCapsFeatures * features, GstStructure * structure,
- gpointer user_data)
- {
- gst_structure_set (structure, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
-- G_MAXINT, 1, NULL);
-+ G_MAXINT, 1, (void *) NULL);
-
- return TRUE;
- }
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch
index 029b80e174..50a3143eca 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch
@@ -1,4 +1,4 @@
-From 3bc5d48257032b6bbee532aad15062fbbcc43bfe Mon Sep 17 00:00:00 2001
+From e6eb7536fcfc2c75f7831f67b1f16f3f36ef2545 Mon Sep 17 00:00:00 2001
From: Andrey Zhizhikin <andrey.z@gmail.com>
Date: Mon, 27 Jan 2020 10:22:35 +0000
Subject: [PATCH] opencv: resolve missing opencv data dir in yocto build
@@ -18,7 +18,7 @@ Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
1 file changed, 3 insertions(+)
diff --git a/ext/opencv/meson.build b/ext/opencv/meson.build
-index 1d86b90..b5c8b95 100644
+index 37e2015..326f737 100644
--- a/ext/opencv/meson.build
+++ b/ext/opencv/meson.build
@@ -87,6 +87,9 @@ if opencv_found
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.2.bb
deleted file mode 100644
index bb33e3822e..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.20.2.bb
+++ /dev/null
@@ -1,166 +0,0 @@
-require gstreamer1.0-plugins-common.inc
-require gstreamer1.0-plugins-license.inc
-
-DESCRIPTION = "'Bad' GStreamer plugins and helper libraries "
-HOMEPAGE = "https://gstreamer.freedesktop.org/"
-BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/issues"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-bad/gst-plugins-bad-${PV}.tar.xz \
- file://0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch \
- file://0002-avoid-including-sys-poll.h-directly.patch \
- file://0003-ensure-valid-sentinals-for-gst_structure_get-etc.patch \
- file://0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch \
- "
-SRC_URI[sha256sum] = "4adc4c05f41051f8136b80cda99b0d049a34e777832f9fea7c5a70347658745b"
-
-S = "${WORKDIR}/gst-plugins-bad-${PV}"
-
-LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-DEPENDS += "gstreamer1.0-plugins-base"
-
-inherit gobject-introspection
-
-PACKAGECONFIG ??= " \
- ${GSTREAMER_ORC} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'directfb vulkan x11', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gl', '', d)} \
- bz2 closedcaption curl dash dtls hls openssl sbc smoothstreaming \
- sndfile ttml uvch264 webp \
- ${@bb.utils.contains('TUNE_FEATURES', 'mx32', '', 'rsvg', d)} \
-"
-
-PACKAGECONFIG[aom] = "-Daom=enabled,-Daom=disabled,aom"
-PACKAGECONFIG[assrender] = "-Dassrender=enabled,-Dassrender=disabled,libass"
-PACKAGECONFIG[bluez] = "-Dbluez=enabled,-Dbluez=disabled,bluez5"
-PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2"
-PACKAGECONFIG[closedcaption] = "-Dclosedcaption=enabled,-Dclosedcaption=disabled,pango cairo"
-PACKAGECONFIG[curl] = "-Dcurl=enabled,-Dcurl=disabled,curl"
-PACKAGECONFIG[dash] = "-Ddash=enabled,-Ddash=disabled,libxml2"
-PACKAGECONFIG[dc1394] = "-Ddc1394=enabled,-Ddc1394=disabled,libdc1394"
-PACKAGECONFIG[directfb] = "-Ddirectfb=enabled,-Ddirectfb=disabled,directfb"
-PACKAGECONFIG[dtls] = "-Ddtls=enabled,-Ddtls=disabled,openssl"
-PACKAGECONFIG[faac] = "-Dfaac=enabled,-Dfaac=disabled,faac"
-PACKAGECONFIG[faad] = "-Dfaad=enabled,-Dfaad=disabled,faad2"
-PACKAGECONFIG[fluidsynth] = "-Dfluidsynth=enabled,-Dfluidsynth=disabled,fluidsynth"
-PACKAGECONFIG[hls] = "-Dhls=enabled,-Dhls=disabled,"
-# Pick atleast one crypto backend below when enabling hls
-PACKAGECONFIG[nettle] = "-Dhls-crypto=nettle,,nettle"
-PACKAGECONFIG[openssl] = "-Dhls-crypto=openssl,,openssl"
-PACKAGECONFIG[gcrypt] = "-Dhls-crypto=libgcrypt,,libgcrypt"
-# the gl packageconfig enables OpenGL elements that haven't been ported
-# to -base yet. They depend on the gstgl library in -base, so we do
-# not add GL dependencies here, since these are taken care of in -base.
-PACKAGECONFIG[gl] = "-Dgl=enabled,-Dgl=disabled,"
-PACKAGECONFIG[kms] = "-Dkms=enabled,-Dkms=disabled,libdrm"
-PACKAGECONFIG[libde265] = "-Dlibde265=enabled,-Dlibde265=disabled,libde265"
-PACKAGECONFIG[libssh2] = "-Dcurl-ssh2=enabled,-Dcurl-ssh2=disabled,libssh2"
-PACKAGECONFIG[lcms2] = "-Dcolormanagement=enabled,-Dcolormanagement=disabled,lcms"
-PACKAGECONFIG[modplug] = "-Dmodplug=enabled,-Dmodplug=disabled,libmodplug"
-PACKAGECONFIG[msdk] = "-Dmsdk=enabled,-Dmsdk=disabled,intel-mediasdk"
-PACKAGECONFIG[neon] = "-Dneon=enabled,-Dneon=disabled,neon"
-PACKAGECONFIG[openal] = "-Dopenal=enabled,-Dopenal=disabled,openal-soft"
-PACKAGECONFIG[opencv] = "-Dopencv=enabled,-Dopencv=disabled,opencv"
-PACKAGECONFIG[openh264] = "-Dopenh264=enabled,-Dopenh264=disabled,openh264"
-PACKAGECONFIG[openjpeg] = "-Dopenjpeg=enabled,-Dopenjpeg=disabled,openjpeg"
-PACKAGECONFIG[openmpt] = "-Dopenmpt=enabled,-Dopenmpt=disabled,libopenmpt"
-# the opus encoder/decoder elements are now in the -base package,
-# but the opus parser remains in -bad
-PACKAGECONFIG[opusparse] = "-Dopus=enabled,-Dopus=disabled,libopus"
-PACKAGECONFIG[resindvd] = "-Dresindvd=enabled,-Dresindvd=disabled,libdvdread libdvdnav"
-PACKAGECONFIG[rsvg] = "-Drsvg=enabled,-Drsvg=disabled,librsvg"
-PACKAGECONFIG[rtmp] = "-Drtmp=enabled,-Drtmp=disabled,rtmpdump"
-PACKAGECONFIG[sbc] = "-Dsbc=enabled,-Dsbc=disabled,sbc"
-PACKAGECONFIG[sctp] = "-Dsctp=enabled,-Dsctp=disabled"
-PACKAGECONFIG[smoothstreaming] = "-Dsmoothstreaming=enabled,-Dsmoothstreaming=disabled,libxml2"
-PACKAGECONFIG[sndfile] = "-Dsndfile=enabled,-Dsndfile=disabled,libsndfile1"
-PACKAGECONFIG[srt] = "-Dsrt=enabled,-Dsrt=disabled,srt"
-PACKAGECONFIG[srtp] = "-Dsrtp=enabled,-Dsrtp=disabled,libsrtp"
-PACKAGECONFIG[tinyalsa] = "-Dtinyalsa=enabled,-Dtinyalsa=disabled,tinyalsa"
-PACKAGECONFIG[ttml] = "-Dttml=enabled,-Dttml=disabled,libxml2 pango cairo"
-PACKAGECONFIG[uvch264] = "-Duvch264=enabled,-Duvch264=disabled,libusb1 libgudev"
-# this enables support for stateless V4L2 mem2mem codecs, which is a newer form of
-# V4L2 codec; the V4L2 code in -base supports the older stateful V4L2 mem2mem codecs
-PACKAGECONFIG[v4l2codecs] = "-Dv4l2codecs=enabled,-Dv4l2codecs=disabled,libgudev"
-PACKAGECONFIG[va] = "-Dva=enabled,-Dva=disabled,libva"
-PACKAGECONFIG[voaacenc] = "-Dvoaacenc=enabled,-Dvoaacenc=disabled,vo-aacenc"
-PACKAGECONFIG[voamrwbenc] = "-Dvoamrwbenc=enabled,-Dvoamrwbenc=disabled,vo-amrwbenc"
-PACKAGECONFIG[vulkan] = "-Dvulkan=enabled,-Dvulkan=disabled,vulkan-loader shaderc-native"
-PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,wayland-native wayland wayland-protocols libdrm"
-PACKAGECONFIG[webp] = "-Dwebp=enabled,-Dwebp=disabled,libwebp"
-PACKAGECONFIG[webrtc] = "-Dwebrtc=enabled,-Dwebrtc=disabled,libnice"
-PACKAGECONFIG[webrtcdsp] = "-Dwebrtcdsp=enabled,-Dwebrtcdsp=disabled,webrtc-audio-processing"
-PACKAGECONFIG[zbar] = "-Dzbar=enabled,-Dzbar=disabled,zbar"
-PACKAGECONFIG[x11] = "-Dx11=enabled,-Dx11=disabled,libxcb libxkbcommon"
-PACKAGECONFIG[x265] = "-Dx265=enabled,-Dx265=disabled,x265"
-
-GSTREAMER_GPL = "${@bb.utils.filter('PACKAGECONFIG', 'faad resindvd x265', d)}"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Daes=enabled \
- -Dcodecalpha=enabled \
- -Ddecklink=enabled \
- -Ddvb=enabled \
- -Dfbdev=enabled \
- -Dipcpipeline=enabled \
- -Dshm=enabled \
- -Dtranscode=enabled \
- -Dandroidmedia=disabled \
- -Dapplemedia=disabled \
- -Dasio=disabled \
- -Davtp=disabled \
- -Dbs2b=disabled \
- -Dchromaprint=disabled \
- -Dd3dvideosink=disabled \
- -Dd3d11=disabled \
- -Ddirectsound=disabled \
- -Ddts=disabled \
- -Dfdkaac=disabled \
- -Dflite=disabled \
- -Dgme=disabled \
- -Dgs=disabled \
- -Dgsm=disabled \
- -Diqa=disabled \
- -Dkate=disabled \
- -Dladspa=disabled \
- -Dldac=disabled \
- -Dlv2=disabled \
- -Dmagicleap=disabled \
- -Dmediafoundation=disabled \
- -Dmicrodns=disabled \
- -Dmpeg2enc=disabled \
- -Dmplex=disabled \
- -Dmusepack=disabled \
- -Dnvcodec=disabled \
- -Dopenexr=disabled \
- -Dopenni2=disabled \
- -Dopenaptx=disabled \
- -Dopensles=disabled \
- -Donnx=disabled \
- -Dqroverlay=disabled \
- -Dsoundtouch=disabled \
- -Dspandsp=disabled \
- -Dsvthevcenc=disabled \
- -Dteletext=disabled \
- -Dwasapi=disabled \
- -Dwasapi2=disabled \
- -Dwildmidi=disabled \
- -Dwinks=disabled \
- -Dwinscreencap=disabled \
- -Dwpe=disabled \
- -Dzxing=disabled \
-"
-
-export OPENCV_PREFIX = "${STAGING_DIR_TARGET}${prefix}"
-
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-
-FILES:${PN}-freeverb += "${datadir}/gstreamer-1.0/presets/GstFreeverb.prs"
-FILES:${PN}-opencv += "${datadir}/gst-plugins-bad/1.0/opencv*"
-FILES:${PN}-transcode += "${datadir}/gstreamer-1.0/encoding-profiles"
-FILES:${PN}-voamrwbenc += "${datadir}/gstreamer-1.0/presets/GstVoAmrwbEnc.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb
new file mode 100644
index 0000000000..523ee7a5ae
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb
@@ -0,0 +1,165 @@
+require gstreamer1.0-plugins-common.inc
+require gstreamer1.0-plugins-license.inc
+
+SUMMARY = "'Bad' GStreamer plugins and helper libraries "
+HOMEPAGE = "https://gstreamer.freedesktop.org/"
+BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/issues"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-bad/gst-plugins-bad-${PV}.tar.xz \
+ file://0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch \
+ file://0002-avoid-including-sys-poll.h-directly.patch \
+ file://0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch \
+ "
+SRC_URI[sha256sum] = "808d3b33fc4c71aeb2561c364a87c2e8a3e2343319a83244c8391be4b09499c8"
+
+S = "${WORKDIR}/gst-plugins-bad-${PV}"
+
+LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS += "gstreamer1.0-plugins-base"
+
+inherit gobject-introspection
+
+PACKAGECONFIG ??= " \
+ ${GSTREAMER_ORC} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'directfb vulkan x11', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gl', '', d)} \
+ bz2 closedcaption curl dash dtls hls openssl sbc smoothstreaming \
+ sndfile ttml uvch264 webp \
+ ${@bb.utils.contains('TUNE_FEATURES', 'mx32', '', 'rsvg', d)} \
+"
+
+PACKAGECONFIG[aom] = "-Daom=enabled,-Daom=disabled,aom"
+PACKAGECONFIG[assrender] = "-Dassrender=enabled,-Dassrender=disabled,libass"
+PACKAGECONFIG[avtp] = "-Davtp=enabled,-Davtp=disabled,libavtp"
+PACKAGECONFIG[bluez] = "-Dbluez=enabled,-Dbluez=disabled,bluez5"
+PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2"
+PACKAGECONFIG[closedcaption] = "-Dclosedcaption=enabled,-Dclosedcaption=disabled,pango cairo"
+PACKAGECONFIG[curl] = "-Dcurl=enabled,-Dcurl=disabled,curl"
+PACKAGECONFIG[dash] = "-Ddash=enabled,-Ddash=disabled,libxml2"
+PACKAGECONFIG[dc1394] = "-Ddc1394=enabled,-Ddc1394=disabled,libdc1394"
+PACKAGECONFIG[directfb] = "-Ddirectfb=enabled,-Ddirectfb=disabled,directfb"
+PACKAGECONFIG[dtls] = "-Ddtls=enabled,-Ddtls=disabled,openssl"
+PACKAGECONFIG[faac] = "-Dfaac=enabled,-Dfaac=disabled,faac"
+PACKAGECONFIG[faad] = "-Dfaad=enabled,-Dfaad=disabled,faad2"
+PACKAGECONFIG[fluidsynth] = "-Dfluidsynth=enabled,-Dfluidsynth=disabled,fluidsynth"
+PACKAGECONFIG[hls] = "-Dhls=enabled,-Dhls=disabled,"
+# Pick atleast one crypto backend below when enabling hls
+PACKAGECONFIG[nettle] = "-Dhls-crypto=nettle,,nettle"
+PACKAGECONFIG[openssl] = "-Dhls-crypto=openssl,,openssl"
+PACKAGECONFIG[gcrypt] = "-Dhls-crypto=libgcrypt,,libgcrypt"
+# the gl packageconfig enables OpenGL elements that haven't been ported
+# to -base yet. They depend on the gstgl library in -base, so we do
+# not add GL dependencies here, since these are taken care of in -base.
+PACKAGECONFIG[gl] = "-Dgl=enabled,-Dgl=disabled,"
+PACKAGECONFIG[kms] = "-Dkms=enabled,-Dkms=disabled,libdrm"
+PACKAGECONFIG[libde265] = "-Dlibde265=enabled,-Dlibde265=disabled,libde265"
+PACKAGECONFIG[libssh2] = "-Dcurl-ssh2=enabled,-Dcurl-ssh2=disabled,libssh2"
+PACKAGECONFIG[lcms2] = "-Dcolormanagement=enabled,-Dcolormanagement=disabled,lcms"
+PACKAGECONFIG[modplug] = "-Dmodplug=enabled,-Dmodplug=disabled,libmodplug"
+PACKAGECONFIG[msdk] = "-Dmsdk=enabled -Dmfx_api=oneVPL,-Dmsdk=disabled,onevpl-intel-gpu"
+PACKAGECONFIG[neon] = "-Dneon=enabled,-Dneon=disabled,neon"
+PACKAGECONFIG[openal] = "-Dopenal=enabled,-Dopenal=disabled,openal-soft"
+PACKAGECONFIG[opencv] = "-Dopencv=enabled,-Dopencv=disabled,opencv"
+PACKAGECONFIG[openh264] = "-Dopenh264=enabled,-Dopenh264=disabled,openh264"
+PACKAGECONFIG[openjpeg] = "-Dopenjpeg=enabled,-Dopenjpeg=disabled,openjpeg"
+PACKAGECONFIG[openmpt] = "-Dopenmpt=enabled,-Dopenmpt=disabled,libopenmpt"
+# the opus encoder/decoder elements are now in the -base package,
+# but the opus parser remains in -bad
+PACKAGECONFIG[opusparse] = "-Dopus=enabled,-Dopus=disabled,libopus"
+PACKAGECONFIG[resindvd] = "-Dresindvd=enabled,-Dresindvd=disabled,libdvdread libdvdnav"
+PACKAGECONFIG[rsvg] = "-Drsvg=enabled,-Drsvg=disabled,librsvg"
+PACKAGECONFIG[rtmp] = "-Drtmp=enabled,-Drtmp=disabled,rtmpdump"
+PACKAGECONFIG[sbc] = "-Dsbc=enabled,-Dsbc=disabled,sbc"
+PACKAGECONFIG[sctp] = "-Dsctp=enabled,-Dsctp=disabled"
+PACKAGECONFIG[smoothstreaming] = "-Dsmoothstreaming=enabled,-Dsmoothstreaming=disabled,libxml2"
+PACKAGECONFIG[sndfile] = "-Dsndfile=enabled,-Dsndfile=disabled,libsndfile1"
+PACKAGECONFIG[srt] = "-Dsrt=enabled,-Dsrt=disabled,srt"
+PACKAGECONFIG[srtp] = "-Dsrtp=enabled,-Dsrtp=disabled,libsrtp"
+PACKAGECONFIG[tinyalsa] = "-Dtinyalsa=enabled,-Dtinyalsa=disabled,tinyalsa"
+PACKAGECONFIG[ttml] = "-Dttml=enabled,-Dttml=disabled,libxml2 pango cairo"
+PACKAGECONFIG[uvch264] = "-Duvch264=enabled,-Duvch264=disabled,libusb1 libgudev"
+# this enables support for stateless V4L2 mem2mem codecs, which is a newer form of
+# V4L2 codec; the V4L2 code in -base supports the older stateful V4L2 mem2mem codecs
+PACKAGECONFIG[v4l2codecs] = "-Dv4l2codecs=enabled,-Dv4l2codecs=disabled,libgudev"
+PACKAGECONFIG[va] = "-Dva=enabled,-Dva=disabled,libva"
+PACKAGECONFIG[voaacenc] = "-Dvoaacenc=enabled,-Dvoaacenc=disabled,vo-aacenc"
+PACKAGECONFIG[voamrwbenc] = "-Dvoamrwbenc=enabled,-Dvoamrwbenc=disabled,vo-amrwbenc"
+PACKAGECONFIG[vulkan] = "-Dvulkan=enabled,-Dvulkan=disabled,vulkan-loader shaderc-native"
+PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,wayland-native wayland wayland-protocols libdrm"
+PACKAGECONFIG[webp] = "-Dwebp=enabled,-Dwebp=disabled,libwebp"
+PACKAGECONFIG[webrtc] = "-Dwebrtc=enabled,-Dwebrtc=disabled,libnice"
+PACKAGECONFIG[webrtcdsp] = "-Dwebrtcdsp=enabled,-Dwebrtcdsp=disabled,webrtc-audio-processing"
+PACKAGECONFIG[zbar] = "-Dzbar=enabled,-Dzbar=disabled,zbar"
+PACKAGECONFIG[x11] = "-Dx11=enabled,-Dx11=disabled,libxcb libxkbcommon"
+PACKAGECONFIG[x265] = "-Dx265=enabled,-Dx265=disabled,x265"
+
+GSTREAMER_GPL = "${@bb.utils.filter('PACKAGECONFIG', 'faad resindvd x265', d)}"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Daes=enabled \
+ -Dcodecalpha=enabled \
+ -Ddecklink=enabled \
+ -Ddvb=enabled \
+ -Dfbdev=enabled \
+ -Dipcpipeline=enabled \
+ -Dshm=enabled \
+ -Dtranscode=enabled \
+ -Dandroidmedia=disabled \
+ -Dapplemedia=disabled \
+ -Dasio=disabled \
+ -Dbs2b=disabled \
+ -Dchromaprint=disabled \
+ -Dd3dvideosink=disabled \
+ -Dd3d11=disabled \
+ -Ddirectsound=disabled \
+ -Ddts=disabled \
+ -Dfdkaac=disabled \
+ -Dflite=disabled \
+ -Dgme=disabled \
+ -Dgs=disabled \
+ -Dgsm=disabled \
+ -Diqa=disabled \
+ -Dkate=disabled \
+ -Dladspa=disabled \
+ -Dldac=disabled \
+ -Dlv2=disabled \
+ -Dmagicleap=disabled \
+ -Dmediafoundation=disabled \
+ -Dmicrodns=disabled \
+ -Dmpeg2enc=disabled \
+ -Dmplex=disabled \
+ -Dmusepack=disabled \
+ -Dnvcodec=disabled \
+ -Dopenexr=disabled \
+ -Dopenni2=disabled \
+ -Dopenaptx=disabled \
+ -Dopensles=disabled \
+ -Donnx=disabled \
+ -Dqroverlay=disabled \
+ -Dsoundtouch=disabled \
+ -Dspandsp=disabled \
+ -Dsvthevcenc=disabled \
+ -Dteletext=disabled \
+ -Dwasapi=disabled \
+ -Dwasapi2=disabled \
+ -Dwildmidi=disabled \
+ -Dwinks=disabled \
+ -Dwinscreencap=disabled \
+ -Dwpe=disabled \
+ -Dzxing=disabled \
+"
+
+export OPENCV_PREFIX = "${STAGING_DIR_TARGET}${prefix}"
+
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+
+FILES:${PN}-freeverb += "${datadir}/gstreamer-1.0/presets/GstFreeverb.prs"
+FILES:${PN}-opencv += "${datadir}/gst-plugins-bad/1.0/opencv*"
+FILES:${PN}-transcode += "${datadir}/gstreamer-1.0/encoding-profiles"
+FILES:${PN}-voamrwbenc += "${datadir}/gstreamer-1.0/presets/GstVoAmrwbEnc.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch
index d5d9838372..34c99ded42 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch
@@ -1,13 +1,14 @@
-From f9d48cd85ee68207733b1b91a00453462c33524a Mon Sep 17 00:00:00 2001
+From 63fe5a7b4ef70e2c490bad3b0838329935a8d77c Mon Sep 17 00:00:00 2001
From: zhouming <b42586@freescale.com>
Date: Wed, 14 May 2014 10:16:20 +0800
-Subject: [PATCH 1/4] ENGR00312515: get caps from src pad when query caps
+Subject: [PATCH] ENGR00312515: get caps from src pad when query caps
https://bugzilla.gnome.org/show_bug.cgi?id=728312
Upstream-Status: Pending
Signed-off-by: zhouming <b42586@freescale.com>
+
---
gst-libs/gst/tag/gsttagdemux.c | 13 +++++++++++++
1 file changed, 13 insertions(+)
@@ -16,10 +17,10 @@ Signed-off-by: zhouming <b42586@freescale.com>
diff --git a/gst-libs/gst/tag/gsttagdemux.c b/gst-libs/gst/tag/gsttagdemux.c
old mode 100644
new mode 100755
-index f545857..62d10ef
+index 173da37..2b7f34c
--- a/gst-libs/gst/tag/gsttagdemux.c
+++ b/gst-libs/gst/tag/gsttagdemux.c
-@@ -1777,6 +1777,19 @@ gst_tag_demux_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+@@ -1796,6 +1796,19 @@ gst_tag_demux_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
}
break;
}
@@ -39,6 +40,3 @@ index f545857..62d10ef
default:
res = gst_pad_query_default (pad, parent, query);
break;
---
-2.28.0
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch
index e453a500c9..2adeae93d6 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch
@@ -1,14 +1,15 @@
-From f587861bbe12ad0b10370f835592746aafedbf56 Mon Sep 17 00:00:00 2001
+From 7bf9525528c8f4a47413d7f82214d76f95f0c5f6 Mon Sep 17 00:00:00 2001
From: Mingke Wang <mingke.wang@freescale.com>
Date: Thu, 19 Mar 2015 14:17:10 +0800
-Subject: [PATCH 2/4] ssaparse: enhance SSA text lines parsing.
+Subject: [PATCH] ssaparse: enhance SSA text lines parsing.
some parser will pass in the original ssa text line which starts with "Dialog:"
and there's are maybe multiple Dialog lines in one input buffer.
-Upstream-Status: Submitted [https://bugzilla.gnome.org/show_bug.cgi?id=747496]
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/issues/178]
Signed-off-by: Mingke Wang <mingke.wang@freescale.com>
+
---
gst/subparse/gstssaparse.c | 150 +++++++++++++++++++++++++++++++++----
1 file changed, 134 insertions(+), 16 deletions(-)
@@ -17,10 +18,10 @@ Signed-off-by: Mingke Wang <mingke.wang@freescale.com>
diff --git a/gst/subparse/gstssaparse.c b/gst/subparse/gstssaparse.c
old mode 100644
new mode 100755
-index c849c08..4b9636c
+index d6fdb9c..5ebe678
--- a/gst/subparse/gstssaparse.c
+++ b/gst/subparse/gstssaparse.c
-@@ -262,6 +262,7 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt)
+@@ -270,6 +270,7 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt)
* gst_ssa_parse_push_line:
* @parse: caller element
* @txt: text to push
@@ -28,7 +29,7 @@ index c849c08..4b9636c
* @start: timestamp for the buffer
* @duration: duration for the buffer
*
-@@ -271,27 +272,133 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt)
+@@ -279,27 +280,133 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt)
* Returns: result of the push of the created buffer
*/
static GstFlowReturn
@@ -173,7 +174,7 @@ index c849c08..4b9636c
GST_LOG_OBJECT (parse, "Text : %s", t);
if (gst_ssa_parse_remove_override_codes (parse, t)) {
-@@ -309,13 +416,22 @@ gst_ssa_parse_push_line (GstSsaParse * parse, gchar * txt,
+@@ -317,13 +424,22 @@ gst_ssa_parse_push_line (GstSsaParse * parse, gchar * txt,
gst_buffer_fill (buf, 0, escaped, len + 1);
gst_buffer_set_size (buf, len);
g_free (escaped);
@@ -200,7 +201,7 @@ index c849c08..4b9636c
ret = gst_pad_push (parse->srcpad, buf);
-@@ -335,6 +451,7 @@ gst_ssa_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
+@@ -343,6 +459,7 @@ gst_ssa_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
GstClockTime ts;
gchar *txt;
GstMapInfo map;
@@ -208,7 +209,7 @@ index c849c08..4b9636c
if (G_UNLIKELY (!parse->framed))
goto not_framed;
-@@ -352,13 +469,14 @@ gst_ssa_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
+@@ -360,13 +477,14 @@ gst_ssa_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
/* make double-sure it's 0-terminated and all */
gst_buffer_map (buf, &map, GST_MAP_READ);
txt = g_strndup ((gchar *) map.data, map.size);
@@ -224,6 +225,3 @@ index c849c08..4b9636c
if (ret != GST_FLOW_OK && GST_CLOCK_TIME_IS_VALID (ts)) {
GstSegment segment;
---
-2.28.0
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch
index 2af83ff8b9..a605533be8 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch
@@ -1,19 +1,20 @@
-From 153f3b83a3fed77785bd1420bed8bbafa2d791b3 Mon Sep 17 00:00:00 2001
+From 2b07840122bc2e83bd23dad59aa80d9479f2e1e4 Mon Sep 17 00:00:00 2001
From: Carlos Rafael Giani <crg7475@mailbox.org>
Date: Tue, 21 May 2019 14:01:11 +0200
-Subject: [PATCH 3/4] viv-fb: Make sure config.h is included
+Subject: [PATCH] viv-fb: Make sure config.h is included
This prevents build errors due to missing GST_API_* symbols
Upstream-Status: Pending
Signed-off-by: Carlos Rafael Giani <crg7475@mailbox.org>
+
---
gst-libs/gst/gl/gl-prelude.h | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/gst-libs/gst/gl/gl-prelude.h b/gst-libs/gst/gl/gl-prelude.h
-index 05e1f62..96ce5e6 100644
+index 85fca5a..946c729 100644
--- a/gst-libs/gst/gl/gl-prelude.h
+++ b/gst-libs/gst/gl/gl-prelude.h
@@ -22,6 +22,10 @@
@@ -27,6 +28,3 @@ index 05e1f62..96ce5e6 100644
#include <gst/gst.h>
#ifdef BUILDING_GST_GL
---
-2.28.0
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.2.bb
deleted file mode 100644
index e47851700a..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.20.2.bb
+++ /dev/null
@@ -1,94 +0,0 @@
-require gstreamer1.0-plugins-common.inc
-
-DESCRIPTION = "'Base' GStreamer plugins and helper libraries"
-HOMEPAGE = "https://gstreamer.freedesktop.org/"
-BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/issues"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-base/gst-plugins-base-${PV}.tar.xz \
- file://0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch \
- file://0003-viv-fb-Make-sure-config.h-is-included.patch \
- file://0002-ssaparse-enhance-SSA-text-lines-parsing.patch \
- "
-SRC_URI[sha256sum] = "ab0656f2ad4d38292a803e0cb4ca090943a9b43c8063f650b4d3e3606c317f17"
-
-S = "${WORKDIR}/gst-plugins-base-${PV}"
-
-DEPENDS += "iso-codes util-linux zlib"
-
-inherit gobject-introspection
-
-# opengl packageconfig factored out to make it easy for distros
-# and BSP layers to choose OpenGL APIs/platforms/window systems
-PACKAGECONFIG_GL ?= "${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gles2 egl', '', d)}"
-
-PACKAGECONFIG ??= " \
- ${GSTREAMER_ORC} \
- ${PACKAGECONFIG_GL} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'alsa x11', d)} \
- jpeg ogg pango png theora vorbis \
- ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland egl', '', d)} \
-"
-
-OPENGL_APIS = 'opengl gles2'
-OPENGL_PLATFORMS = 'egl'
-
-X11DEPENDS = "virtual/libx11 libsm libxrender libxv"
-X11ENABLEOPTS = "-Dx11=enabled -Dxvideo=enabled -Dxshm=enabled"
-X11DISABLEOPTS = "-Dx11=disabled -Dxvideo=disabled -Dxshm=disabled"
-
-PACKAGECONFIG[alsa] = "-Dalsa=enabled,-Dalsa=disabled,alsa-lib"
-PACKAGECONFIG[cdparanoia] = "-Dcdparanoia=enabled,-Dcdparanoia=disabled,cdparanoia"
-PACKAGECONFIG[graphene] = "-Dgl-graphene=enabled,-Dgl-graphene=disabled,graphene"
-PACKAGECONFIG[jpeg] = "-Dgl-jpeg=enabled,-Dgl-jpeg=disabled,jpeg"
-PACKAGECONFIG[ogg] = "-Dogg=enabled,-Dogg=disabled,libogg"
-PACKAGECONFIG[opus] = "-Dopus=enabled,-Dopus=disabled,libopus"
-PACKAGECONFIG[pango] = "-Dpango=enabled,-Dpango=disabled,pango"
-PACKAGECONFIG[png] = "-Dgl-png=enabled,-Dgl-png=disabled,libpng"
-# This enables Qt5 QML examples in -base. The Qt5 GStreamer
-# qmlglsink and qmlglsrc plugins still exist in -good.
-PACKAGECONFIG[qt5] = "-Dqt5=enabled,-Dqt5=disabled,qtbase qtdeclarative qtbase-native"
-PACKAGECONFIG[theora] = "-Dtheora=enabled,-Dtheora=disabled,libtheora"
-PACKAGECONFIG[tremor] = "-Dtremor=enabled,-Dtremor=disabled,tremor"
-PACKAGECONFIG[visual] = "-Dlibvisual=enabled,-Dlibvisual=disabled,libvisual"
-PACKAGECONFIG[vorbis] = "-Dvorbis=enabled,-Dvorbis=disabled,libvorbis"
-PACKAGECONFIG[x11] = "${X11ENABLEOPTS},${X11DISABLEOPTS},${X11DEPENDS}"
-
-# OpenGL API packageconfigs
-PACKAGECONFIG[opengl] = ",,virtual/libgl libglu"
-PACKAGECONFIG[gles2] = ",,virtual/libgles2"
-
-# OpenGL platform packageconfigs
-PACKAGECONFIG[egl] = ",,virtual/egl"
-
-# OpenGL window systems (except for X11)
-PACKAGECONFIG[gbm] = ",,virtual/libgbm libgudev libdrm"
-PACKAGECONFIG[wayland] = ",,wayland-native wayland wayland-protocols libdrm"
-PACKAGECONFIG[dispmanx] = ",,virtual/libomxil"
-PACKAGECONFIG[viv-fb] = ",,virtual/libgles2 virtual/libg2d"
-
-OPENGL_WINSYS = "${@bb.utils.filter('PACKAGECONFIG', 'x11 gbm wayland dispmanx egl viv-fb', d)}"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- ${@get_opengl_cmdline_list('gl_api', d.getVar('OPENGL_APIS'), d)} \
- ${@get_opengl_cmdline_list('gl_platform', d.getVar('OPENGL_PLATFORMS'), d)} \
- ${@get_opengl_cmdline_list('gl_winsys', d.getVar('OPENGL_WINSYS'), d)} \
-"
-
-FILES:${PN}-dev += "${libdir}/gstreamer-1.0/include/gst/gl/gstglconfig.h"
-FILES:${MLPREFIX}libgsttag-1.0 += "${datadir}/gst-plugins-base/1.0/license-translations.dict"
-
-def get_opengl_cmdline_list(switch_name, options, d):
- selected_options = []
- if bb.utils.contains('DISTRO_FEATURES', 'opengl', True, False, d):
- for option in options.split():
- if bb.utils.contains('PACKAGECONFIG', option, True, False, d):
- selected_options += [option]
- if selected_options:
- return '-D' + switch_name + '=' + ','.join(selected_options)
- else:
- return ''
-
-CVE_PRODUCT += "gst-plugins-base"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb
new file mode 100644
index 0000000000..7aa10eb646
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb
@@ -0,0 +1,96 @@
+require gstreamer1.0-plugins-common.inc
+
+SUMMARY = "'Base' GStreamer plugins and helper libraries"
+HOMEPAGE = "https://gstreamer.freedesktop.org/"
+BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/issues"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-base/gst-plugins-base-${PV}.tar.xz \
+ file://0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch \
+ file://0003-viv-fb-Make-sure-config.h-is-included.patch \
+ file://0002-ssaparse-enhance-SSA-text-lines-parsing.patch \
+ "
+SRC_URI[sha256sum] = "65eaf72296cc5edc985695a4d80affc931e64a79f4879d05615854f7a2cf5bd1"
+
+S = "${WORKDIR}/gst-plugins-base-${PV}"
+
+DEPENDS += "iso-codes util-linux zlib"
+
+inherit gobject-introspection
+
+# opengl packageconfig factored out to make it easy for distros
+# and BSP layers to choose OpenGL APIs/platforms/window systems
+PACKAGECONFIG_X11 = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'opengl glx', '', d)}"
+PACKAGECONFIG_GL ?= "${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gles2 egl ${PACKAGECONFIG_X11}', '', d)}"
+
+PACKAGECONFIG ??= " \
+ ${GSTREAMER_ORC} \
+ ${PACKAGECONFIG_GL} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'alsa x11', d)} \
+ jpeg ogg pango png theora vorbis \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland egl', '', d)} \
+"
+
+OPENGL_APIS = 'opengl gles2'
+OPENGL_PLATFORMS = 'egl glx'
+
+X11DEPENDS = "virtual/libx11 libsm libxrender libxv"
+X11ENABLEOPTS = "-Dx11=enabled -Dxvideo=enabled -Dxshm=enabled"
+X11DISABLEOPTS = "-Dx11=disabled -Dxvideo=disabled -Dxshm=disabled"
+
+PACKAGECONFIG[alsa] = "-Dalsa=enabled,-Dalsa=disabled,alsa-lib"
+PACKAGECONFIG[cdparanoia] = "-Dcdparanoia=enabled,-Dcdparanoia=disabled,cdparanoia"
+PACKAGECONFIG[graphene] = "-Dgl-graphene=enabled,-Dgl-graphene=disabled,graphene"
+PACKAGECONFIG[jpeg] = "-Dgl-jpeg=enabled,-Dgl-jpeg=disabled,jpeg"
+PACKAGECONFIG[ogg] = "-Dogg=enabled,-Dogg=disabled,libogg"
+PACKAGECONFIG[opus] = "-Dopus=enabled,-Dopus=disabled,libopus"
+PACKAGECONFIG[pango] = "-Dpango=enabled,-Dpango=disabled,pango"
+PACKAGECONFIG[png] = "-Dgl-png=enabled,-Dgl-png=disabled,libpng"
+# This enables Qt5 QML examples in -base. The Qt5 GStreamer
+# qmlglsink and qmlglsrc plugins still exist in -good.
+PACKAGECONFIG[qt5] = "-Dqt5=enabled,-Dqt5=disabled,qtbase qtdeclarative qtbase-native"
+PACKAGECONFIG[theora] = "-Dtheora=enabled,-Dtheora=disabled,libtheora"
+PACKAGECONFIG[tremor] = "-Dtremor=enabled,-Dtremor=disabled,tremor"
+PACKAGECONFIG[visual] = "-Dlibvisual=enabled,-Dlibvisual=disabled,libvisual"
+PACKAGECONFIG[vorbis] = "-Dvorbis=enabled,-Dvorbis=disabled,libvorbis"
+PACKAGECONFIG[x11] = "${X11ENABLEOPTS},${X11DISABLEOPTS},${X11DEPENDS}"
+
+# OpenGL API packageconfigs
+PACKAGECONFIG[opengl] = ",,virtual/libgl libglu"
+PACKAGECONFIG[gles2] = ",,virtual/libgles2"
+
+# OpenGL platform packageconfigs
+PACKAGECONFIG[egl] = ",,virtual/egl"
+PACKAGECONFIG[glx] = ",,virtual/libgl"
+
+# OpenGL window systems (except for X11)
+PACKAGECONFIG[gbm] = ",,virtual/libgbm libgudev libdrm"
+PACKAGECONFIG[wayland] = ",,wayland-native wayland wayland-protocols libdrm"
+PACKAGECONFIG[dispmanx] = ",,virtual/libomxil"
+PACKAGECONFIG[viv-fb] = ",,virtual/libgles2 virtual/libg2d"
+
+OPENGL_WINSYS = "${@bb.utils.filter('PACKAGECONFIG', 'x11 gbm wayland dispmanx egl viv-fb', d)}"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ ${@get_opengl_cmdline_list('gl_api', d.getVar('OPENGL_APIS'), d)} \
+ ${@get_opengl_cmdline_list('gl_platform', d.getVar('OPENGL_PLATFORMS'), d)} \
+ ${@get_opengl_cmdline_list('gl_winsys', d.getVar('OPENGL_WINSYS'), d)} \
+"
+
+FILES:${PN}-dev += "${libdir}/gstreamer-1.0/include/gst/gl/gstglconfig.h"
+FILES:${MLPREFIX}libgsttag-1.0 += "${datadir}/gst-plugins-base/1.0/license-translations.dict"
+
+def get_opengl_cmdline_list(switch_name, options, d):
+ selected_options = []
+ if bb.utils.contains('DISTRO_FEATURES', 'opengl', True, False, d):
+ for option in options.split():
+ if bb.utils.contains('PACKAGECONFIG', option, True, False, d):
+ selected_options += [option]
+ if selected_options:
+ return '-D' + switch_name + '=' + ','.join(selected_options)
+ else:
+ return ''
+
+CVE_PRODUCT += "gst-plugins-base"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch
index 788d752058..9703a3a889 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch
@@ -1,4 +1,4 @@
-From bf8b2fa0f6870589d036f0f33c140a3f85b530a0 Mon Sep 17 00:00:00 2001
+From 99f48716051ce5ddb8c1b77292213af1e462549e Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 31 Mar 2020 21:23:28 -0700
Subject: [PATCH] qt: include ext/qt/gstqtgl.h instead of gst/gl/gstglfuncs.h
@@ -20,13 +20,14 @@ In file included from /mnt/b/yoe/build/tmp/work/cortexa7t2hf-neon-vfpv4-yoe-linu
Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
---
ext/qt/gstqsgtexture.cc | 2 +-
ext/qt/qtwindow.cc | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/ext/qt/gstqsgtexture.cc b/ext/qt/gstqsgtexture.cc
-index a05d26e..4cc9fc6 100644
+index 663696b..36b17d4 100644
--- a/ext/qt/gstqsgtexture.cc
+++ b/ext/qt/gstqsgtexture.cc
@@ -27,7 +27,7 @@
@@ -39,7 +40,7 @@ index a05d26e..4cc9fc6 100644
#define GST_CAT_DEFAULT gst_qsg_texture_debug
diff --git a/ext/qt/qtwindow.cc b/ext/qt/qtwindow.cc
-index 9360c33..0dfd3f1 100644
+index 2872cb5..5a36be9 100644
--- a/ext/qt/qtwindow.cc
+++ b/ext/qt/qtwindow.cc
@@ -25,7 +25,7 @@
@@ -51,6 +52,3 @@ index 9360c33..0dfd3f1 100644
#include "qtwindow.h"
#include "gstqsgtexture.h"
#include "gstqtglutility.h"
---
-2.28.0
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch
new file mode 100644
index 0000000000..33bd4200f6
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch
@@ -0,0 +1,38 @@
+From b77d4806fd5de50d0b017a3e6a19c5bfdef7b3e4 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 13 Feb 2023 12:47:31 -0800
+Subject: [PATCH] v4l2: Define ioctl_req_t for posix/linux case
+
+this is an issue seen with musl based linux distros e.g. alpine [1]
+musl is not going to change this since it breaks ABI/API interfaces
+Newer compilers are stringent ( e.g. clang16 ) which can now detect
+signature mismatches in function pointers too, existing code warned but
+did not error with older clang
+
+Fixes
+gstv4l2object.c:544:23: error: incompatible function pointer types assigning to 'gint (*)(gint, ioctl_req_t, ...)' (aka 'int (*)(int, unsigned long, ...)') from 'int (int, int, ...)' [-Wincompatible-function-pointer-types]
+ v4l2object->ioctl = ioctl;
+ ^ ~~~~~
+
+[1] https://gitlab.alpinelinux.org/alpine/aports/-/issues/7580
+
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/3950]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+---
+ sys/v4l2/gstv4l2object.h | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/sys/v4l2/gstv4l2object.h b/sys/v4l2/gstv4l2object.h
+index d95b375..5223cbb 100644
+--- a/sys/v4l2/gstv4l2object.h
++++ b/sys/v4l2/gstv4l2object.h
+@@ -76,6 +76,8 @@ typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
+ * 'unsigned long' for the 2nd parameter */
+ #ifdef __ANDROID__
+ typedef unsigned ioctl_req_t;
++#elif defined(__linux__) && !defined(__GLIBC__) /* musl/linux */
++typedef int ioctl_req_t;
+ #else
+ typedef gulong ioctl_req_t;
+ #endif
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.2.bb
deleted file mode 100644
index 6c52fb35b9..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.20.2.bb
+++ /dev/null
@@ -1,81 +0,0 @@
-require gstreamer1.0-plugins-common.inc
-
-DESCRIPTION = "'Good' GStreamer plugins"
-HOMEPAGE = "https://gstreamer.freedesktop.org/"
-BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/issues"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-${PV}.tar.xz \
- file://0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch \
- "
-
-SRC_URI[sha256sum] = "83589007bf002b8f9ef627718f308c16d83351905f0db8e85c3060f304143aae"
-
-S = "${WORKDIR}/gst-plugins-good-${PV}"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
- file://gst/replaygain/rganalysis.c;beginline=1;endline=23;md5=b60ebefd5b2f5a8e0cab6bfee391a5fe"
-
-DEPENDS += "gstreamer1.0-plugins-base libcap zlib"
-RPROVIDES:${PN}-pulseaudio += "${PN}-pulse"
-RPROVIDES:${PN}-soup += "${PN}-souphttpsrc"
-
-PACKAGECONFIG ??= " \
- ${GSTREAMER_ORC} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'pulseaudio x11', d)} \
- ${@bb.utils.contains('TUNE_FEATURES', 'm64', 'asm', '', d)} \
- bz2 cairo flac gdk-pixbuf gudev jpeg lame libpng mpg123 soup speex taglib v4l2 \
-"
-
-X11DEPENDS = "virtual/libx11 libsm libxrender libxfixes libxdamage"
-X11ENABLEOPTS = "-Dximagesrc=enabled -Dximagesrc-xshm=enabled -Dximagesrc-xfixes=enabled -Dximagesrc-xdamage=enabled"
-X11DISABLEOPTS = "-Dximagesrc=disabled -Dximagesrc-xshm=disabled -Dximagesrc-xfixes=disabled -Dximagesrc-xdamage=disabled"
-
-QT5WAYLANDDEPENDS = "${@bb.utils.contains("DISTRO_FEATURES", "wayland", "qtwayland", "", d)}"
-
-PACKAGECONFIG[asm] = "-Dasm=enabled,-Dasm=disabled,nasm-native"
-PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2"
-PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
-PACKAGECONFIG[dv1394] = "-Ddv1394=enabled,-Ddv1394=disabled,libiec61883 libavc1394 libraw1394"
-PACKAGECONFIG[flac] = "-Dflac=enabled,-Dflac=disabled,flac"
-PACKAGECONFIG[gdk-pixbuf] = "-Dgdk-pixbuf=enabled,-Dgdk-pixbuf=disabled,gdk-pixbuf"
-PACKAGECONFIG[gtk] = "-Dgtk3=enabled,-Dgtk3=disabled,gtk+3"
-PACKAGECONFIG[gudev] = "-Dv4l2-gudev=enabled,-Dv4l2-gudev=disabled,libgudev"
-PACKAGECONFIG[jack] = "-Djack=enabled,-Djack=disabled,jack"
-PACKAGECONFIG[jpeg] = "-Djpeg=enabled,-Djpeg=disabled,jpeg"
-PACKAGECONFIG[lame] = "-Dlame=enabled,-Dlame=disabled,lame"
-PACKAGECONFIG[libpng] = "-Dpng=enabled,-Dpng=disabled,libpng"
-PACKAGECONFIG[libv4l2] = "-Dv4l2-libv4l2=enabled,-Dv4l2-libv4l2=disabled,v4l-utils"
-PACKAGECONFIG[mpg123] = "-Dmpg123=enabled,-Dmpg123=disabled,mpg123"
-PACKAGECONFIG[pulseaudio] = "-Dpulse=enabled,-Dpulse=disabled,pulseaudio"
-PACKAGECONFIG[qt5] = "-Dqt5=enabled,-Dqt5=disabled,qtbase qtdeclarative qtbase-native ${QT5WAYLANDDEPENDS}"
-# Starting with version 1.20, the GStreamer soup plugin loads libsoup with dlopen()
-# instead of linking to it. And instead of using the default libsoup C headers, it
-# uses its own stub header. Consequently, objdump will not show the libsoup .so as
-# a dependency, and libsoup won't be added to an image. Fix this by setting libsoup
-# as RDEPEND instead of DEPEND.
-PACKAGECONFIG[soup] = "-Dsoup=enabled,-Dsoup=disabled,,libsoup-2.4"
-PACKAGECONFIG[speex] = "-Dspeex=enabled,-Dspeex=disabled,speex"
-PACKAGECONFIG[rpi] = "-Drpicamsrc=enabled,-Drpicamsrc=disabled,userland"
-PACKAGECONFIG[taglib] = "-Dtaglib=enabled,-Dtaglib=disabled,taglib"
-PACKAGECONFIG[v4l2] = "-Dv4l2=enabled -Dv4l2-probe=true,-Dv4l2=disabled -Dv4l2-probe=false"
-PACKAGECONFIG[vpx] = "-Dvpx=enabled,-Dvpx=disabled,libvpx"
-PACKAGECONFIG[wavpack] = "-Dwavpack=enabled,-Dwavpack=disabled,wavpack"
-PACKAGECONFIG[x11] = "${X11ENABLEOPTS},${X11DISABLEOPTS},${X11DEPENDS}"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Daalib=disabled \
- -Ddirectsound=disabled \
- -Ddv=disabled \
- -Dlibcaca=disabled \
- -Doss=enabled \
- -Doss4=disabled \
- -Dosxaudio=disabled \
- -Dosxvideo=disabled \
- -Dshout2=disabled \
- -Dtwolame=disabled \
- -Dwaveform=disabled \
-"
-
-FILES:${PN}-equalizer += "${datadir}/gstreamer-1.0/presets/*.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb
new file mode 100644
index 0000000000..edd8609b7c
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb
@@ -0,0 +1,81 @@
+require gstreamer1.0-plugins-common.inc
+
+SUMMARY = "'Good' GStreamer plugins"
+HOMEPAGE = "https://gstreamer.freedesktop.org/"
+BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/issues"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-${PV}.tar.xz \
+ file://0001-qt-include-ext-qt-gstqtgl.h-instead-of-gst-gl-gstglf.patch \
+ file://0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch"
+
+SRC_URI[sha256sum] = "6ddd032381827d31820540735f0004b429436b0bdac19aaeab44fa22faad52e2"
+
+S = "${WORKDIR}/gst-plugins-good-${PV}"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
+ file://gst/replaygain/rganalysis.c;beginline=1;endline=23;md5=b60ebefd5b2f5a8e0cab6bfee391a5fe"
+
+DEPENDS += "gstreamer1.0-plugins-base libcap zlib"
+RPROVIDES:${PN}-pulseaudio += "${PN}-pulse"
+RPROVIDES:${PN}-soup += "${PN}-souphttpsrc"
+RDEPENDS:${PN}-soup += "${MLPREFIX}${@bb.utils.contains('PACKAGECONFIG', 'soup2', 'libsoup-2.4', 'libsoup', d)}"
+
+PACKAGECONFIG_SOUP ?= "soup3"
+
+PACKAGECONFIG ??= " \
+ ${GSTREAMER_ORC} \
+ ${PACKAGECONFIG_SOUP} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'pulseaudio x11', d)} \
+ ${@bb.utils.contains('TUNE_FEATURES', 'm64', 'asm', '', d)} \
+ bz2 cairo flac gdk-pixbuf gudev jpeg lame libpng mpg123 speex taglib v4l2 \
+"
+
+X11DEPENDS = "virtual/libx11 libsm libxrender libxfixes libxdamage"
+X11ENABLEOPTS = "-Dximagesrc=enabled -Dximagesrc-xshm=enabled -Dximagesrc-xfixes=enabled -Dximagesrc-xdamage=enabled"
+X11DISABLEOPTS = "-Dximagesrc=disabled -Dximagesrc-xshm=disabled -Dximagesrc-xfixes=disabled -Dximagesrc-xdamage=disabled"
+
+QT5WAYLANDDEPENDS = "${@bb.utils.contains("DISTRO_FEATURES", "wayland", "qtwayland", "", d)}"
+
+PACKAGECONFIG[asm] = "-Dasm=enabled,-Dasm=disabled,nasm-native"
+PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2"
+PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo"
+PACKAGECONFIG[dv1394] = "-Ddv1394=enabled,-Ddv1394=disabled,libiec61883 libavc1394 libraw1394"
+PACKAGECONFIG[flac] = "-Dflac=enabled,-Dflac=disabled,flac"
+PACKAGECONFIG[gdk-pixbuf] = "-Dgdk-pixbuf=enabled,-Dgdk-pixbuf=disabled,gdk-pixbuf"
+PACKAGECONFIG[gtk] = "-Dgtk3=enabled,-Dgtk3=disabled,gtk+3"
+PACKAGECONFIG[gudev] = "-Dv4l2-gudev=enabled,-Dv4l2-gudev=disabled,libgudev"
+PACKAGECONFIG[jack] = "-Djack=enabled,-Djack=disabled,jack"
+PACKAGECONFIG[jpeg] = "-Djpeg=enabled,-Djpeg=disabled,jpeg"
+PACKAGECONFIG[lame] = "-Dlame=enabled,-Dlame=disabled,lame"
+PACKAGECONFIG[libpng] = "-Dpng=enabled,-Dpng=disabled,libpng"
+PACKAGECONFIG[libv4l2] = "-Dv4l2-libv4l2=enabled,-Dv4l2-libv4l2=disabled,v4l-utils"
+PACKAGECONFIG[mpg123] = "-Dmpg123=enabled,-Dmpg123=disabled,mpg123"
+PACKAGECONFIG[pulseaudio] = "-Dpulse=enabled,-Dpulse=disabled,pulseaudio"
+PACKAGECONFIG[qt5] = "-Dqt5=enabled,-Dqt5=disabled,qtbase qtdeclarative qtbase-native ${QT5WAYLANDDEPENDS}"
+PACKAGECONFIG[soup2] = "-Dsoup=enabled,,libsoup-2.4,,,soup3"
+PACKAGECONFIG[soup3] = "-Dsoup=enabled,,libsoup,,,soup2"
+PACKAGECONFIG[speex] = "-Dspeex=enabled,-Dspeex=disabled,speex"
+PACKAGECONFIG[rpi] = "-Drpicamsrc=enabled,-Drpicamsrc=disabled,userland"
+PACKAGECONFIG[taglib] = "-Dtaglib=enabled,-Dtaglib=disabled,taglib"
+PACKAGECONFIG[v4l2] = "-Dv4l2=enabled -Dv4l2-probe=true,-Dv4l2=disabled -Dv4l2-probe=false"
+PACKAGECONFIG[vpx] = "-Dvpx=enabled,-Dvpx=disabled,libvpx"
+PACKAGECONFIG[wavpack] = "-Dwavpack=enabled,-Dwavpack=disabled,wavpack"
+PACKAGECONFIG[x11] = "${X11ENABLEOPTS},${X11DISABLEOPTS},${X11DEPENDS}"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Daalib=disabled \
+ -Ddirectsound=disabled \
+ -Ddv=disabled \
+ -Dlibcaca=disabled \
+ -Doss=enabled \
+ -Doss4=disabled \
+ -Dosxaudio=disabled \
+ -Dosxvideo=disabled \
+ -Dshout2=disabled \
+ -Dtwolame=disabled \
+ -Dwaveform=disabled \
+"
+
+FILES:${PN}-equalizer += "${datadir}/gstreamer-1.0/presets/*.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-license.inc b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-license.inc
index 6638f13540..8b1c001111 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-license.inc
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-license.inc
@@ -1,7 +1,7 @@
# This .inc file contains functionality for automatically setting
# the the license of all plugins according to the GSTREAMER_GPL.
-PACKAGESPLITFUNCS:append = " set_gstreamer_license "
+PACKAGESPLITFUNCS += "set_gstreamer_license"
python set_gstreamer_license () {
import oe.utils
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-packaging.inc b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-packaging.inc
index afc7be40c8..d77aeed8a2 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-packaging.inc
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-packaging.inc
@@ -9,8 +9,8 @@
# Dynamically generate packages for all enabled plugins
PACKAGES_DYNAMIC = "^${PN}-.* ^libgst.*"
-PACKAGESPLITFUNCS:prepend = " split_gstreamer10_packages "
-PACKAGESPLITFUNCS:append = " set_gstreamer10_metapkg_rdepends "
+PACKAGESPLITFUNCS =+ "split_gstreamer10_packages"
+PACKAGESPLITFUNCS += "set_gstreamer10_metapkg_rdepends"
python split_gstreamer10_packages () {
gst_libdir = d.expand('${libdir}/gstreamer-1.0')
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.2.bb
deleted file mode 100644
index edc2ece979..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.20.2.bb
+++ /dev/null
@@ -1,46 +0,0 @@
-require gstreamer1.0-plugins-common.inc
-require gstreamer1.0-plugins-license.inc
-
-DESCRIPTION = "'Ugly GStreamer plugins"
-HOMEPAGE = "https://gstreamer.freedesktop.org/"
-BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-ugly/-/issues"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
- file://tests/check/elements/xingmux.c;beginline=1;endline=21;md5=4c771b8af188724855cb99cadd390068"
-
-LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
-LICENSE_FLAGS = "commercial"
-
-SRC_URI = " \
- https://gstreamer.freedesktop.org/src/gst-plugins-ugly/gst-plugins-ugly-${PV}.tar.xz \
- "
-SRC_URI[sha256sum] = "b43fb4df94459afbf67ec22003ca58ffadcd19e763f276dca25b64c848adb7bf"
-
-S = "${WORKDIR}/gst-plugins-ugly-${PV}"
-
-DEPENDS += "gstreamer1.0-plugins-base"
-
-GST_PLUGIN_SET_HAS_EXAMPLES = "0"
-
-PACKAGECONFIG ??= " \
- ${GSTREAMER_ORC} \
- a52dec mpeg2dec \
-"
-
-PACKAGECONFIG[amrnb] = "-Damrnb=enabled,-Damrnb=disabled,opencore-amr"
-PACKAGECONFIG[amrwb] = "-Damrwbdec=enabled,-Damrwbdec=disabled,opencore-amr"
-PACKAGECONFIG[a52dec] = "-Da52dec=enabled,-Da52dec=disabled,liba52"
-PACKAGECONFIG[cdio] = "-Dcdio=enabled,-Dcdio=disabled,libcdio"
-PACKAGECONFIG[dvdread] = "-Ddvdread=enabled,-Ddvdread=disabled,libdvdread"
-PACKAGECONFIG[mpeg2dec] = "-Dmpeg2dec=enabled,-Dmpeg2dec=disabled,mpeg2dec"
-PACKAGECONFIG[x264] = "-Dx264=enabled,-Dx264=disabled,x264"
-
-GSTREAMER_GPL = "${@bb.utils.filter('PACKAGECONFIG', 'a52dec cdio dvdread mpeg2dec x264', d)}"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Dsidplay=disabled \
-"
-
-FILES:${PN}-amrnb += "${datadir}/gstreamer-1.0/presets/GstAmrnbEnc.prs"
-FILES:${PN}-x264 += "${datadir}/gstreamer-1.0/presets/GstX264Enc.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb
new file mode 100644
index 0000000000..61f46fbf7e
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb
@@ -0,0 +1,47 @@
+require gstreamer1.0-plugins-common.inc
+require gstreamer1.0-plugins-license.inc
+
+SUMMARY = "'Ugly GStreamer plugins"
+HOMEPAGE = "https://gstreamer.freedesktop.org/"
+BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-ugly/-/issues"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
+ "
+
+LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
+LICENSE_FLAGS = "commercial"
+
+SRC_URI = " \
+ https://gstreamer.freedesktop.org/src/gst-plugins-ugly/gst-plugins-ugly-${PV}.tar.xz \
+ "
+
+SRC_URI[sha256sum] = "7758b7decfd20c00cae5700822bcbbf03f98c723e33e17634db2e07ca1da60bf"
+
+S = "${WORKDIR}/gst-plugins-ugly-${PV}"
+
+DEPENDS += "gstreamer1.0-plugins-base"
+
+GST_PLUGIN_SET_HAS_EXAMPLES = "0"
+
+PACKAGECONFIG ??= " \
+ ${GSTREAMER_ORC} \
+ a52dec mpeg2dec \
+"
+
+PACKAGECONFIG[amrnb] = "-Damrnb=enabled,-Damrnb=disabled,opencore-amr"
+PACKAGECONFIG[amrwb] = "-Damrwbdec=enabled,-Damrwbdec=disabled,opencore-amr"
+PACKAGECONFIG[a52dec] = "-Da52dec=enabled,-Da52dec=disabled,liba52"
+PACKAGECONFIG[cdio] = "-Dcdio=enabled,-Dcdio=disabled,libcdio"
+PACKAGECONFIG[dvdread] = "-Ddvdread=enabled,-Ddvdread=disabled,libdvdread"
+PACKAGECONFIG[mpeg2dec] = "-Dmpeg2dec=enabled,-Dmpeg2dec=disabled,mpeg2dec"
+PACKAGECONFIG[x264] = "-Dx264=enabled,-Dx264=disabled,x264"
+
+GSTREAMER_GPL = "${@bb.utils.filter('PACKAGECONFIG', 'a52dec cdio dvdread mpeg2dec x264', d)}"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Dsidplay=disabled \
+"
+
+FILES:${PN}-amrnb += "${datadir}/gstreamer-1.0/presets/GstAmrnbEnc.prs"
+FILES:${PN}-x264 += "${datadir}/gstreamer-1.0/presets/GstX264Enc.prs"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.2.bb
deleted file mode 100644
index 34bc4bf4f4..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.20.2.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Python bindings for GStreamer 1.0"
-DESCRIPTION = "GStreamer Python binding overrides (complementing the bindings \
-provided by python-gi) "
-HOMEPAGE = "http://cgit.freedesktop.org/gstreamer/gst-python/"
-SECTION = "multimedia"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c34deae4e395ca07e725ab0076a5f740"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "853ea35a1088c762fb703e5aea9c30031a19222b59786b6599956e154620fa2f"
-
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject"
-RDEPENDS:${PN} += "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject"
-
-PNREAL = "gst-python"
-
-S = "${WORKDIR}/${PNREAL}-${PV}"
-
-EXTRA_OEMESON += "\
- -Dtests=disabled \
- -Dplugin=enabled \
- -Dlibpython-dir=${libdir} \
-"
-
-# gobject-introspection is mandatory and cannot be configured
-REQUIRED_DISTRO_FEATURES = "gobject-introspection-data"
-GIR_MESON_OPTION = ""
-
-inherit meson pkgconfig setuptools3-base upstream-version-is-even gobject-introspection features_check
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb
new file mode 100644
index 0000000000..0fbb03f757
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb
@@ -0,0 +1,30 @@
+SUMMARY = "Python bindings for GStreamer 1.0"
+DESCRIPTION = "GStreamer Python binding overrides (complementing the bindings \
+provided by python-gi) "
+HOMEPAGE = "http://cgit.freedesktop.org/gstreamer/gst-python/"
+SECTION = "multimedia"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=c34deae4e395ca07e725ab0076a5f740"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "f7a5450d93fd81bf46060dca7f4a048d095b6717961fec211731a11a994c99a7"
+
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject"
+RDEPENDS:${PN} += "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject"
+
+PNREAL = "gst-python"
+
+S = "${WORKDIR}/${PNREAL}-${PV}"
+
+EXTRA_OEMESON += "\
+ -Dtests=disabled \
+ -Dplugin=enabled \
+ -Dlibpython-dir=${libdir} \
+"
+
+inherit meson pkgconfig setuptools3-base upstream-version-is-even features_check
+
+FILES:${PN} += "${libdir}/gstreamer-1.0"
+
+REQUIRED_DISTRO_FEATURES = "gobject-introspection-data"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.2.bb
deleted file mode 100644
index fd6e16c2bf..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.20.2.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "A library on top of GStreamer for building an RTSP server"
-HOMEPAGE = "http://cgit.freedesktop.org/gstreamer/gst-rtsp-server/"
-SECTION = "multimedia"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770"
-
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base"
-
-PNREAL = "gst-rtsp-server"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "6a8e9d136bbee4fc03858a0680dd5cbf91e2e989c43da115858eb21fb1adbcab"
-
-S = "${WORKDIR}/${PNREAL}-${PV}"
-
-inherit meson pkgconfig upstream-version-is-even gobject-introspection
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Dexamples=disabled \
- -Dtests=disabled \
-"
-
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
-
-# Starting with 1.8.0 gst-rtsp-server includes dependency-less plugins as well
-require gstreamer1.0-plugins-packaging.inc
-
-CVE_PRODUCT += "gst-rtsp-server"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb
new file mode 100644
index 0000000000..554ed9ec8f
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb
@@ -0,0 +1,31 @@
+SUMMARY = "A library on top of GStreamer for building an RTSP server"
+HOMEPAGE = "http://cgit.freedesktop.org/gstreamer/gst-rtsp-server/"
+SECTION = "multimedia"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770"
+
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base"
+
+PNREAL = "gst-rtsp-server"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "ec49d474750a6ff6729c85b448abc607fb6840b21717ad7abc967e2adbf07a24"
+
+S = "${WORKDIR}/${PNREAL}-${PV}"
+
+inherit meson pkgconfig upstream-version-is-even gobject-introspection
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Dexamples=disabled \
+ -Dtests=disabled \
+"
+
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
+
+# Starting with 1.8.0 gst-rtsp-server includes dependency-less plugins as well
+require gstreamer1.0-plugins-packaging.inc
+
+CVE_PRODUCT += "gst-rtsp-server"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.2.bb
deleted file mode 100644
index 40dc21cf45..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.20.2.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-SUMMARY = "VA-API support to GStreamer"
-HOMEPAGE = "https://gstreamer.freedesktop.org/"
-DESCRIPTION = "gstreamer-vaapi consists of a collection of VA-API \
-based plugins for GStreamer and helper libraries: `vaapidecode', \
-`vaapiconvert', and `vaapisink'."
-
-REALPN = "gstreamer-vaapi"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/${REALPN}/${REALPN}-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "30126ab6e3105dab8da76bd9951a68886149bcd70c7fee0bac68de564de41d3d"
-
-S = "${WORKDIR}/${REALPN}-${PV}"
-DEPENDS = "libva gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad"
-
-inherit meson pkgconfig features_check upstream-version-is-even
-
-REQUIRED_DISTRO_FEATURES ?= "opengl"
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Dexamples=disabled \
- -Dtests=enabled \
-"
-
-PACKAGES =+ "${PN}-tests"
-
-# OpenGL packageconfig factored out to make it easy for distros
-# and BSP layers to pick either glx, egl, or no GL. By default,
-# try detecting X11 first, and if found (with OpenGL), use GLX,
-# otherwise try to check if EGL can be used.
-PACKAGECONFIG_GL ?= "${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'glx', \
- bb.utils.contains('DISTRO_FEATURES', 'opengl', 'egl', \
- '', d), d)}"
-
-PACKAGECONFIG ??= "drm encoders \
- ${PACKAGECONFIG_GL} \
- ${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
-
-PACKAGECONFIG[drm] = "-Dwith_drm=yes,-Dwith_drm=no,udev libdrm"
-PACKAGECONFIG[egl] = "-Dwith_egl=yes,-Dwith_egl=no,virtual/egl"
-PACKAGECONFIG[encoders] = "-Dwith_encoders=yes,-Dwith_encoders=no"
-PACKAGECONFIG[glx] = "-Dwith_glx=yes,-Dwith_glx=no,virtual/libgl"
-PACKAGECONFIG[wayland] = "-Dwith_wayland=yes,-Dwith_wayland=no,wayland-native wayland wayland-protocols"
-PACKAGECONFIG[x11] = "-Dwith_x11=yes,-Dwith_x11=no,virtual/libx11 libxrandr libxrender"
-
-FILES:${PN} += "${libdir}/gstreamer-*/*.so"
-FILES:${PN}-dbg += "${libdir}/gstreamer-*/.debug"
-FILES:${PN}-dev += "${libdir}/gstreamer-*/*.a"
-FILES:${PN}-tests = "${bindir}/*"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb
new file mode 100644
index 0000000000..87eb8484a1
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb
@@ -0,0 +1,53 @@
+SUMMARY = "VA-API support to GStreamer"
+HOMEPAGE = "https://gstreamer.freedesktop.org/"
+DESCRIPTION = "gstreamer-vaapi consists of a collection of VA-API \
+based plugins for GStreamer and helper libraries: `vaapidecode', \
+`vaapiconvert', and `vaapisink'."
+
+REALPN = "gstreamer-vaapi"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/${REALPN}/${REALPN}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "6eae1360658302b9b512fa46b4d06f5b818dfce5f2f43d7d710ca8142719d8ad"
+
+S = "${WORKDIR}/${REALPN}-${PV}"
+DEPENDS = "libva gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad"
+
+inherit meson pkgconfig features_check upstream-version-is-even
+
+REQUIRED_DISTRO_FEATURES ?= "opengl"
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Dexamples=disabled \
+ -Dtests=enabled \
+"
+
+PACKAGES =+ "${PN}-tests"
+
+# OpenGL packageconfig factored out to make it easy for distros
+# and BSP layers to pick either glx, egl, or no GL. By default,
+# try detecting X11 first, and if found (with OpenGL), use GLX,
+# otherwise try to check if EGL can be used.
+PACKAGECONFIG_GL ?= "${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'glx', \
+ bb.utils.contains('DISTRO_FEATURES', 'opengl', 'egl', \
+ '', d), d)}"
+
+PACKAGECONFIG ??= "drm encoders \
+ ${PACKAGECONFIG_GL} \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'wayland x11', d)}"
+
+PACKAGECONFIG[drm] = "-Ddrm=enabled,-Ddrm=disabled,udev libdrm"
+PACKAGECONFIG[egl] = "-Degl=enabled,-Degl=disabled,virtual/egl"
+PACKAGECONFIG[encoders] = "-Dencoders=enabled,-Dencoders=disabled"
+PACKAGECONFIG[glx] = "-Dglx=enabled,-Dglx=disabled,virtual/libgl"
+PACKAGECONFIG[wayland] = "-Dwayland=enabled,-Dwayland=disabled,wayland-native wayland wayland-protocols"
+PACKAGECONFIG[x11] = "-Dx11=enabled,-Dx11=disabled,virtual/libx11 libxrandr libxrender"
+
+FILES:${PN} += "${libdir}/gstreamer-*/*.so"
+FILES:${PN}-dbg += "${libdir}/gstreamer-*/.debug"
+FILES:${PN}-dev += "${libdir}/gstreamer-*/*.a"
+FILES:${PN}-tests = "${bindir}/*"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch
index 312c04fbfd..0d839bd6c8 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch
@@ -1,20 +1,21 @@
-From 1eb77a4ea5a3967c688d8f1192f99c605badc7e2 Mon Sep 17 00:00:00 2001
+From 5372cd5bf2a9dd247b9c5fc6e4fe248046dbb085 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sun, 11 Apr 2021 19:48:13 +0100
-Subject: [PATCH 1/4] tests: respect the idententaion used in meson
+Subject: [PATCH] tests: respect the idententaion used in meson
Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789]
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
+
---
- subprojects/gstreamer/tests/check/meson.build | 10 +++++-----
+ tests/check/meson.build | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
-diff --git a/subprojects/gstreamer/tests/check/meson.build b/subprojects/gstreamer/tests/check/meson.build
-index 9c4228b6e4..506606684d 100644
---- a/subprojects/gstreamer/tests/check/meson.build
-+++ b/subprojects/gstreamer/tests/check/meson.build
-@@ -146,11 +146,11 @@ foreach t : core_tests
+diff --git a/tests/check/meson.build b/tests/check/meson.build
+index 9787b0a..16caac7 100644
+--- a/tests/check/meson.build
++++ b/tests/check/meson.build
+@@ -145,11 +145,11 @@ foreach t : core_tests
if not skip_test
exe = executable(test_name, fname,
@@ -22,15 +23,12 @@ index 9c4228b6e4..506606684d 100644
- cpp_args : gst_c_args + test_defines,
- include_directories : [configinc],
- link_with : link_with_libs,
-- dependencies : test_deps + glib_deps + gst_deps,
+- dependencies : gst_deps + test_deps,
+ c_args : gst_c_args + test_defines,
+ cpp_args : gst_c_args + test_defines,
+ include_directories : [configinc],
+ link_with : link_with_libs,
-+ dependencies : test_deps + glib_deps + gst_deps,
++ dependencies : gst_deps + test_deps,
)
env = environment()
---
-2.33.1
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch
index d18c19c397..64717e66c3 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch
@@ -1,7 +1,7 @@
-From 62f5bb7bfc2498219df7280e73480d8ed2378ce5 Mon Sep 17 00:00:00 2001
+From 27e977d03b0f7c1d0bf19621ef0cec0585410e7b Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sun, 11 Apr 2021 19:48:13 +0100
-Subject: [PATCH 2/4] tests: add support for install the tests
+Subject: [PATCH] tests: add support for install the tests
This will provide to run the tests using the gnome-desktop-testing [1]
@@ -10,19 +10,20 @@ This will provide to run the tests using the gnome-desktop-testing [1]
Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789]
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
+
---
- subprojects/gstreamer/meson.build | 4 ++++
- subprojects/gstreamer/meson_options.txt | 1 +
- subprojects/gstreamer/tests/check/meson.build | 22 ++++++++++++++++++-
- .../gstreamer/tests/check/template.test.in | 3 +++
+ meson.build | 4 ++++
+ meson_options.txt | 1 +
+ tests/check/meson.build | 22 +++++++++++++++++++++-
+ tests/check/template.test.in | 3 +++
4 files changed, 29 insertions(+), 1 deletion(-)
- create mode 100644 subprojects/gstreamer/tests/check/template.test.in
+ create mode 100644 tests/check/template.test.in
-diff --git a/subprojects/gstreamer/meson.build b/subprojects/gstreamer/meson.build
-index d20fe0040f..b595d8f990 100644
---- a/subprojects/gstreamer/meson.build
-+++ b/subprojects/gstreamer/meson.build
-@@ -562,6 +562,10 @@ if bashcomp_dep.found()
+diff --git a/meson.build b/meson.build
+index 60c7bec..f7650b1 100644
+--- a/meson.build
++++ b/meson.build
+@@ -606,6 +606,10 @@ if bashcomp_dep.found()
endif
endif
@@ -33,10 +34,10 @@ index d20fe0040f..b595d8f990 100644
plugins_install_dir = join_paths(get_option('libdir'), 'gstreamer-1.0')
pkgconfig = import('pkgconfig')
-diff --git a/subprojects/gstreamer/meson_options.txt b/subprojects/gstreamer/meson_options.txt
-index 7363bdb7a1..a34ba37dad 100644
---- a/subprojects/gstreamer/meson_options.txt
-+++ b/subprojects/gstreamer/meson_options.txt
+diff --git a/meson_options.txt b/meson_options.txt
+index 7363bdb..a34ba37 100644
+--- a/meson_options.txt
++++ b/meson_options.txt
@@ -15,6 +15,7 @@ option('poisoning', type : 'boolean', value : false, description : 'Enable poiso
option('memory-alignment', type: 'combo',
choices : ['1', '2', '4', '8', '16', '32', '64', '128', '256', '512', '1024', '2048', '4096', '8192', 'malloc', 'pagesize'],
@@ -45,10 +46,10 @@ index 7363bdb7a1..a34ba37dad 100644
# Feature options
option('check', type : 'feature', value : 'auto', description : 'Build unit test libraries')
-diff --git a/subprojects/gstreamer/tests/check/meson.build b/subprojects/gstreamer/tests/check/meson.build
-index 506606684d..00a138a568 100644
---- a/subprojects/gstreamer/tests/check/meson.build
-+++ b/subprojects/gstreamer/tests/check/meson.build
+diff --git a/tests/check/meson.build b/tests/check/meson.build
+index 16caac7..f2d400f 100644
+--- a/tests/check/meson.build
++++ b/tests/check/meson.build
@@ -124,10 +124,16 @@ test_defines = [
'-UG_DISABLE_ASSERT',
'-UG_DISABLE_CAST_CHECKS',
@@ -67,16 +68,16 @@ index 506606684d..00a138a568 100644
# sanity checking
if get_option('check').disabled()
if get_option('tests').enabled()
-@@ -151,6 +157,8 @@ foreach t : core_tests
+@@ -150,6 +156,8 @@ foreach t : core_tests
include_directories : [configinc],
link_with : link_with_libs,
- dependencies : test_deps + glib_deps + gst_deps,
+ dependencies : gst_deps + test_deps,
+ install_dir: installed_tests_execdir,
+ install: installed_tests_enabled,
)
env = environment()
-@@ -162,6 +170,18 @@ foreach t : core_tests
+@@ -161,6 +169,18 @@ foreach t : core_tests
env.set('GST_PLUGIN_SCANNER_1_0', gst_scanner_dir + '/gst-plugin-scanner')
env.set('GST_PLUGIN_LOADING_WHITELIST', 'gstreamer')
@@ -95,15 +96,12 @@ index 506606684d..00a138a568 100644
test(test_name, exe, env: env, timeout : 3 * 60)
endif
endforeach
-diff --git a/subprojects/gstreamer/tests/check/template.test.in b/subprojects/gstreamer/tests/check/template.test.in
+diff --git a/tests/check/template.test.in b/tests/check/template.test.in
new file mode 100644
-index 0000000000..f701627f87
+index 0000000..f701627
--- /dev/null
-+++ b/subprojects/gstreamer/tests/check/template.test.in
++++ b/tests/check/template.test.in
@@ -0,0 +1,3 @@
+[Test]
+Type=session
+Exec=@installed_tests_dir@/@program@
---
-2.33.1
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch
index 10a6dcc619..0fd830f150 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch
@@ -1,7 +1,7 @@
-From fe830f0f75f3b4b41e3dbef8d4cf6ee4103c9e06 Mon Sep 17 00:00:00 2001
+From 7041bc5adf9501beb1428d8bbae6b351a6bf07f9 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sat, 24 Apr 2021 10:34:47 +0100
-Subject: [PATCH 3/4] tests: use a dictionaries for environment
+Subject: [PATCH] tests: use a dictionaries for environment
meson environment() can't be passed to configure_file and it is needed for installed_tests,
use a dictionary as this is simplest solution to install the environment.
@@ -9,15 +9,16 @@ use a dictionary as this is simplest solution to install the environment.
Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789]
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
+
---
- subprojects/gstreamer/tests/check/meson.build | 21 ++++++++++++-------
+ tests/check/meson.build | 21 +++++++++++++--------
1 file changed, 13 insertions(+), 8 deletions(-)
-diff --git a/subprojects/gstreamer/tests/check/meson.build b/subprojects/gstreamer/tests/check/meson.build
-index 00a138a568..48ec2532f8 100644
---- a/subprojects/gstreamer/tests/check/meson.build
-+++ b/subprojects/gstreamer/tests/check/meson.build
-@@ -161,14 +161,19 @@ foreach t : core_tests
+diff --git a/tests/check/meson.build b/tests/check/meson.build
+index f2d400f..50dff7f 100644
+--- a/tests/check/meson.build
++++ b/tests/check/meson.build
+@@ -160,14 +160,19 @@ foreach t : core_tests
install: installed_tests_enabled,
)
@@ -45,6 +46,3 @@ index 00a138a568..48ec2532f8 100644
if installed_tests_enabled
test_conf = configuration_data()
---
-2.33.1
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch
index efa004f8ce..5689dc9fbb 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch
@@ -1,7 +1,7 @@
-From 4dcbabebca2ad6f1fdd59ee35a858082e87db7b6 Mon Sep 17 00:00:00 2001
+From 1b1d1ce4227b6bea7c7def5dac4a663486e070c2 Mon Sep 17 00:00:00 2001
From: Jose Quaresma <quaresma.jose@gmail.com>
Date: Sun, 2 May 2021 01:58:01 +0100
-Subject: [PATCH 4/4] tests: add helper script to run the installed_tests
+Subject: [PATCH] tests: add helper script to run the installed_tests
- this is a bash script that will run the installed_tests
with some of the environment variables used in the meson
@@ -10,18 +10,19 @@ testing framework.
Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789]
Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com>
+
---
- subprojects/gstreamer/tests/check/meson.build | 17 +++++++++++++++++
- .../gstreamer/tests/check/template.sh.in | 9 +++++++++
- .../gstreamer/tests/check/template.test.in | 2 +-
+ tests/check/meson.build | 17 +++++++++++++++++
+ tests/check/template.sh.in | 9 +++++++++
+ tests/check/template.test.in | 2 +-
3 files changed, 27 insertions(+), 1 deletion(-)
- create mode 100755 subprojects/gstreamer/tests/check/template.sh.in
+ create mode 100755 tests/check/template.sh.in
-diff --git a/subprojects/gstreamer/tests/check/meson.build b/subprojects/gstreamer/tests/check/meson.build
-index 48ec2532f8..7dc4990d4e 100644
---- a/subprojects/gstreamer/tests/check/meson.build
-+++ b/subprojects/gstreamer/tests/check/meson.build
-@@ -185,6 +185,23 @@ foreach t : core_tests
+diff --git a/tests/check/meson.build b/tests/check/meson.build
+index 50dff7f..2b9e979 100644
+--- a/tests/check/meson.build
++++ b/tests/check/meson.build
+@@ -184,6 +184,23 @@ foreach t : core_tests
install_dir: installed_tests_metadir,
configuration: test_conf
)
@@ -45,11 +46,11 @@ index 48ec2532f8..7dc4990d4e 100644
endif
test(test_name, exe, env: env, timeout : 3 * 60)
-diff --git a/subprojects/gstreamer/tests/check/template.sh.in b/subprojects/gstreamer/tests/check/template.sh.in
+diff --git a/tests/check/template.sh.in b/tests/check/template.sh.in
new file mode 100755
-index 0000000000..cf7d31b0ea
+index 0000000..cf7d31b
--- /dev/null
-+++ b/subprojects/gstreamer/tests/check/template.sh.in
++++ b/tests/check/template.sh.in
@@ -0,0 +1,9 @@
+#!/bin/sh
+
@@ -60,15 +61,12 @@ index 0000000000..cf7d31b0ea
+GST_REGISTRY=@GST_REGISTRY@
+GST_STATE_IGNORE_ELEMENTS="@GST_STATE_IGNORE_ELEMENTS@"
+exec @TEST@
-diff --git a/subprojects/gstreamer/tests/check/template.test.in b/subprojects/gstreamer/tests/check/template.test.in
-index f701627f87..b74ef6ad73 100644
---- a/subprojects/gstreamer/tests/check/template.test.in
-+++ b/subprojects/gstreamer/tests/check/template.test.in
+diff --git a/tests/check/template.test.in b/tests/check/template.test.in
+index f701627..b74ef6a 100644
+--- a/tests/check/template.test.in
++++ b/tests/check/template.test.in
@@ -1,3 +1,3 @@
[Test]
Type=session
-Exec=@installed_tests_dir@/@program@
+Exec=@installed_tests_dir@/@program@.sh
---
-2.33.1
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0005-tests-remove-gstbin-test_watch_for_state_change-test.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0005-tests-remove-gstbin-test_watch_for_state_change-test.patch
deleted file mode 100644
index f51df6d20b..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0005-tests-remove-gstbin-test_watch_for_state_change-test.patch
+++ /dev/null
@@ -1,107 +0,0 @@
-From b935abba3d8fa3ea1ce384c08e650afd8c20b78a Mon Sep 17 00:00:00 2001
-From: Claudius Heine <ch@denx.de>
-Date: Wed, 2 Feb 2022 13:47:02 +0100
-Subject: [PATCH] tests: remove gstbin:test_watch_for_state_change testcase
-
-This testcase seems to be flaky, and upstream marked it as such:
-https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/778
-
-This patch removes the testcase to avoid it interfering with out ptest.
-
-Signed-off-by: Claudius Heine <ch@denx.de>
-
-Upstream-Status: Inappropriate [needs proper upstream fix]
----
- tests/check/gst/gstbin.c | 69 -------------------
- 1 file changed, 69 deletions(-)
-
-diff --git a/tests/check/gst/gstbin.c b/tests/check/gst/gstbin.c
-index e366d5fe20..ac29d81474 100644
---- a/tests/check/gst/gstbin.c
-+++ b/tests/check/gst/gstbin.c
-@@ -691,74 +691,6 @@ GST_START_TEST (test_message_state_changed_children)
-
- GST_END_TEST;
-
--GST_START_TEST (test_watch_for_state_change)
--{
-- GstElement *src, *sink, *bin;
-- GstBus *bus;
-- GstStateChangeReturn ret;
--
-- bin = gst_element_factory_make ("bin", NULL);
-- fail_unless (bin != NULL, "Could not create bin");
--
-- bus = g_object_new (gst_bus_get_type (), NULL);
-- gst_object_ref_sink (bus);
-- gst_element_set_bus (GST_ELEMENT_CAST (bin), bus);
--
-- src = gst_element_factory_make ("fakesrc", NULL);
-- fail_if (src == NULL, "Could not create fakesrc");
-- sink = gst_element_factory_make ("fakesink", NULL);
-- fail_if (sink == NULL, "Could not create fakesink");
--
-- gst_bin_add (GST_BIN (bin), sink);
-- gst_bin_add (GST_BIN (bin), src);
--
-- fail_unless (gst_element_link (src, sink), "could not link src and sink");
--
-- /* change state, spawning two times three messages */
-- ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PAUSED);
-- fail_unless (ret == GST_STATE_CHANGE_ASYNC);
-- ret =
-- gst_element_get_state (GST_ELEMENT (bin), NULL, NULL,
-- GST_CLOCK_TIME_NONE);
-- fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
--
-- pop_state_changed (bus, 6);
-- pop_async_done (bus);
-- pop_latency (bus);
--
-- fail_unless (gst_bus_have_pending (bus) == FALSE,
-- "Unexpected messages on bus");
--
-- ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);
-- fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
--
-- pop_state_changed (bus, 3);
--
-- /* this one might return either SUCCESS or ASYNC, likely SUCCESS */
-- ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PAUSED);
-- gst_element_get_state (GST_ELEMENT (bin), NULL, NULL, GST_CLOCK_TIME_NONE);
--
-- pop_state_changed (bus, 3);
-- if (ret == GST_STATE_CHANGE_ASYNC) {
-- pop_async_done (bus);
-- pop_latency (bus);
-- }
--
-- fail_unless (gst_bus_have_pending (bus) == FALSE,
-- "Unexpected messages on bus");
--
-- gst_bus_set_flushing (bus, TRUE);
--
-- ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);
-- fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
--
-- /* clean up */
-- gst_object_unref (bus);
-- gst_object_unref (bin);
--}
--
--GST_END_TEST;
--
- GST_START_TEST (test_state_change_error_message)
- {
- GstElement *src, *sink, *bin;
-@@ -1956,7 +1888,6 @@ gst_bin_suite (void)
- tcase_add_test (tc_chain, test_message_state_changed);
- tcase_add_test (tc_chain, test_message_state_changed_child);
- tcase_add_test (tc_chain, test_message_state_changed_children);
-- tcase_add_test (tc_chain, test_watch_for_state_change);
- tcase_add_test (tc_chain, test_state_change_error_message);
- tcase_add_test (tc_chain, test_add_linked);
- tcase_add_test (tc_chain, test_add_self);
---
-2.33.1
-
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest b/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest
index 0cfa955f03..7d0312005f 100755
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest
@@ -1,2 +1,16 @@
-#!/usr/bin/env sh
+#! /bin/sh
+
+# Multiply all timeouts by ten so they're more likely to work
+# on a loaded system.
+export CK_TIMEOUT_MULTIPLIER=5
+
+# Skip some tests that we know are problematic
+export GST_CHECKS_IGNORE=""
+# gstnetclientclock.c:test_functioning is very sensitive to load
+GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_functioning"
+
+# aggregator.c:test_infinite_seek_50_src_live is known to be flaky
+# https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/410
+GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_infinite_seek_50_src_live"
+
gnome-desktop-testing-runner gstreamer
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.2.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.2.bb
deleted file mode 100644
index 3aa9aa7048..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.20.2.bb
+++ /dev/null
@@ -1,73 +0,0 @@
-SUMMARY = "GStreamer 1.0 multimedia framework"
-DESCRIPTION = "GStreamer is a multimedia framework for encoding and decoding video and sound. \
-It supports a wide range of formats including mp3, ogg, avi, mpeg and quicktime."
-HOMEPAGE = "http://gstreamer.freedesktop.org/"
-BUGTRACKER = "https://bugzilla.gnome.org/enter_bug.cgi?product=Gstreamer"
-SECTION = "multimedia"
-LICENSE = "LGPL-2.1-or-later"
-
-DEPENDS = "glib-2.0 glib-2.0-native libxml2 bison-native flex-native"
-
-inherit meson pkgconfig gettext upstream-version-is-even gobject-introspection ptest-gnome
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770 \
- file://gst/gst.h;beginline=1;endline=21;md5=e059138481205ee2c6fc1c079c016d0d"
-
-S = "${WORKDIR}/gstreamer-${PV}"
-
-SRC_URI = "https://gstreamer.freedesktop.org/src/gstreamer/gstreamer-${PV}.tar.xz \
- file://run-ptest \
- file://0001-tests-respect-the-idententaion-used-in-meson.patch;striplevel=3 \
- file://0002-tests-add-support-for-install-the-tests.patch;striplevel=3 \
- file://0003-tests-use-a-dictionaries-for-environment.patch;striplevel=3 \
- file://0004-tests-add-helper-script-to-run-the-installed_tests.patch;striplevel=3 \
- file://0005-tests-remove-gstbin-test_watch_for_state_change-test.patch \
- "
-SRC_URI[sha256sum] = "df24e8792691a02dfe003b3833a51f1dbc6c3331ae625d143b17da939ceb5e0a"
-
-PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)} \
- check \
- debug \
- tools"
-
-PACKAGECONFIG[debug] = "-Dgst_debug=true,-Dgst_debug=false"
-PACKAGECONFIG[tracer-hooks] = "-Dtracer_hooks=true,-Dtracer_hooks=false"
-PACKAGECONFIG[coretracers] = "-Dcoretracers=enabled,-Dcoretracers=disabled"
-PACKAGECONFIG[check] = "-Dcheck=enabled,-Dcheck=disabled"
-PACKAGECONFIG[tests] = "-Dtests=enabled -Dinstalled_tests=true,-Dtests=disabled -Dinstalled_tests=false"
-PACKAGECONFIG[unwind] = "-Dlibunwind=enabled,-Dlibunwind=disabled,libunwind"
-PACKAGECONFIG[dw] = "-Dlibdw=enabled,-Dlibdw=disabled,elfutils"
-PACKAGECONFIG[bash-completion] = "-Dbash-completion=enabled,-Dbash-completion=disabled,bash-completion"
-PACKAGECONFIG[tools] = "-Dtools=enabled,-Dtools=disabled"
-PACKAGECONFIG[setcap] = "-Dptp-helper-permissions=capabilities,,libcap libcap-native"
-
-# TODO: put this in a gettext.bbclass patch
-def gettext_oemeson(d):
- if d.getVar('USE_NLS') == 'no':
- return '-Dnls=disabled'
- # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
- if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
- return '-Dnls=disabled'
- return '-Dnls=enabled'
-
-EXTRA_OEMESON += " \
- -Ddoc=disabled \
- -Dexamples=disabled \
- -Ddbghelp=disabled \
- ${@gettext_oemeson(d)} \
-"
-
-GIR_MESON_ENABLE_FLAG = "enabled"
-GIR_MESON_DISABLE_FLAG = "disabled"
-
-PACKAGES += "${PN}-bash-completion"
-
-# Add the core element plugins to the main package
-FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
-FILES:${PN}-dev += "${libdir}/gstreamer-1.0/*.a ${libdir}/gstreamer-1.0/include"
-FILES:${PN}-bash-completion += "${datadir}/bash-completion/completions/ ${datadir}/bash-completion/helpers/gst*"
-FILES:${PN}-dbg += "${datadir}/gdb ${datadir}/gstreamer-1.0/gdb"
-
-CVE_PRODUCT = "gstreamer"
-
-PTEST_BUILD_HOST_FILES = ""
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb
new file mode 100644
index 0000000000..8965497d01
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb
@@ -0,0 +1,74 @@
+SUMMARY = "GStreamer 1.0 multimedia framework"
+DESCRIPTION = "GStreamer is a multimedia framework for encoding and decoding video and sound. \
+It supports a wide range of formats including mp3, ogg, avi, mpeg and quicktime."
+HOMEPAGE = "http://gstreamer.freedesktop.org/"
+BUGTRACKER = "https://bugzilla.gnome.org/enter_bug.cgi?product=Gstreamer"
+SECTION = "multimedia"
+LICENSE = "LGPL-2.1-or-later"
+
+DEPENDS = "glib-2.0 glib-2.0-native libxml2 bison-native flex-native"
+
+inherit meson pkgconfig gettext upstream-version-is-even gobject-introspection ptest-gnome
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770 \
+ file://gst/gst.h;beginline=1;endline=21;md5=e059138481205ee2c6fc1c079c016d0d"
+
+S = "${WORKDIR}/gstreamer-${PV}"
+
+SRC_URI = "https://gstreamer.freedesktop.org/src/gstreamer/gstreamer-${PV}.tar.xz \
+ file://run-ptest \
+ file://0001-tests-respect-the-idententaion-used-in-meson.patch \
+ file://0002-tests-add-support-for-install-the-tests.patch \
+ file://0003-tests-use-a-dictionaries-for-environment.patch \
+ file://0004-tests-add-helper-script-to-run-the-installed_tests.patch \
+ "
+SRC_URI[sha256sum] = "3d16259e9dab8b002c57ce208a09b350d8282f5b0197306c0cdba9a0d0799744"
+
+PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)} \
+ check \
+ debug \
+ tools"
+
+PACKAGECONFIG[debug] = "-Dgst_debug=true,-Dgst_debug=false"
+PACKAGECONFIG[tracer-hooks] = "-Dtracer_hooks=true,-Dtracer_hooks=false"
+PACKAGECONFIG[coretracers] = "-Dcoretracers=enabled,-Dcoretracers=disabled"
+PACKAGECONFIG[check] = "-Dcheck=enabled,-Dcheck=disabled"
+PACKAGECONFIG[tests] = "-Dtests=enabled -Dinstalled_tests=true,-Dtests=disabled -Dinstalled_tests=false"
+PACKAGECONFIG[unwind] = "-Dlibunwind=enabled,-Dlibunwind=disabled,libunwind"
+PACKAGECONFIG[dw] = "-Dlibdw=enabled,-Dlibdw=disabled,elfutils"
+PACKAGECONFIG[bash-completion] = "-Dbash-completion=enabled,-Dbash-completion=disabled,bash-completion"
+PACKAGECONFIG[tools] = "-Dtools=enabled,-Dtools=disabled"
+PACKAGECONFIG[setcap] = "-Dptp-helper-permissions=capabilities,,libcap libcap-native"
+
+# TODO: put this in a gettext.bbclass patch
+def gettext_oemeson(d):
+ if d.getVar('USE_NLS') == 'no':
+ return '-Dnls=disabled'
+ # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
+ if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
+ return '-Dnls=disabled'
+ return '-Dnls=enabled'
+
+EXTRA_OEMESON += " \
+ -Ddoc=disabled \
+ -Dexamples=disabled \
+ -Ddbghelp=disabled \
+ ${@gettext_oemeson(d)} \
+"
+
+GIR_MESON_ENABLE_FLAG = "enabled"
+GIR_MESON_DISABLE_FLAG = "disabled"
+
+PACKAGES += "${PN}-bash-completion"
+
+# Add the core element plugins to the main package
+FILES:${PN} += "${libdir}/gstreamer-1.0/*.so"
+FILES:${PN}-dev += "${libdir}/gstreamer-1.0/*.a ${libdir}/gstreamer-1.0/include"
+FILES:${PN}-bash-completion += "${datadir}/bash-completion/completions/ ${datadir}/bash-completion/helpers/gst*"
+FILES:${PN}-dbg += "${datadir}/gdb ${datadir}/gstreamer-1.0/gdb"
+
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-gconv-iso8859-5"
+
+CVE_PRODUCT = "gstreamer"
+
+PTEST_BUILD_HOST_FILES = ""
diff --git a/meta/recipes-multimedia/liba52/liba52_0.7.4.bb b/meta/recipes-multimedia/liba52/liba52_0.7.4.bb
index f6bca4e730..7a3b4a43c8 100644
--- a/meta/recipes-multimedia/liba52/liba52_0.7.4.bb
+++ b/meta/recipes-multimedia/liba52/liba52_0.7.4.bb
@@ -7,7 +7,6 @@ LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
file://include/a52.h;beginline=1;endline=12;md5=81152ceb3562bf20a60d1b6018175dd1"
SECTION = "libs"
-PR = "r4"
inherit autotools
diff --git a/meta/recipes-multimedia/libpng/libpng_1.6.37.bb b/meta/recipes-multimedia/libpng/libpng_1.6.37.bb
deleted file mode 100644
index 61e3d92e95..0000000000
--- a/meta/recipes-multimedia/libpng/libpng_1.6.37.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "PNG image format decoding library"
-DESCRIPTION = "An open source project to develop and maintain the reference \
-library for use in applications that read, create, and manipulate PNG \
-(Portable Network Graphics) raster image files. "
-HOMEPAGE = "http://www.libpng.org/"
-SECTION = "libs"
-LICENSE = "Libpng"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=b0085051bf265bac2bfc38bc89f50000"
-DEPENDS = "zlib"
-
-LIBV = "16"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}${LIBV}/${BP}.tar.xz"
-SRC_URI[md5sum] = "015e8e15db1eecde5f2eb9eb5b6e59e9"
-SRC_URI[sha256sum] = "505e70834d35383537b6491e7ae8641f1a4bed1876dbfe361201fc80868d88ca"
-
-MIRRORS += "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}${LIBV}/ ${SOURCEFORGE_MIRROR}/${BPN}/${BPN}${LIBV}/older-releases/"
-
-UPSTREAM_CHECK_URI = "http://libpng.org/pub/png/libpng.html"
-
-BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
-
-inherit autotools binconfig-disabled pkgconfig
-
-# Work around missing symbols
-EXTRA_OECONF:append:class-target = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "--enable-arm-neon=on", "--enable-arm-neon=off", d)}"
-
-PACKAGES =+ "${PN}-tools"
-
-FILES:${PN}-tools = "${bindir}/png-fix-itxt ${bindir}/pngfix ${bindir}/pngcp"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# CVE-2019-17371 is actually a memory leak in gif2png 2.x
-CVE_CHECK_IGNORE += "CVE-2019-17371"
diff --git a/meta/recipes-multimedia/libpng/libpng_1.6.43.bb b/meta/recipes-multimedia/libpng/libpng_1.6.43.bb
new file mode 100644
index 0000000000..f1febd0a02
--- /dev/null
+++ b/meta/recipes-multimedia/libpng/libpng_1.6.43.bb
@@ -0,0 +1,33 @@
+SUMMARY = "PNG image format decoding library"
+DESCRIPTION = "An open source project to develop and maintain the reference \
+library for use in applications that read, create, and manipulate PNG \
+(Portable Network Graphics) raster image files. "
+HOMEPAGE = "http://www.libpng.org/"
+SECTION = "libs"
+LICENSE = "Libpng"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=0fdbfbe10fc294a6fca24dc76134222a"
+DEPENDS = "zlib"
+
+LIBV = "16"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}${LIBV}/${BP}.tar.xz"
+SRC_URI[sha256sum] = "6a5ca0652392a2d7c9db2ae5b40210843c0bbc081cbd410825ab00cc59f14a6c"
+
+MIRRORS += "${SOURCEFORGE_MIRROR}/project/${BPN}/${BPN}${LIBV}/ ${SOURCEFORGE_MIRROR}/project/${BPN}/${BPN}${LIBV}/older-releases/"
+
+UPSTREAM_CHECK_URI = "http://libpng.org/pub/png/libpng.html"
+
+BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
+
+inherit autotools binconfig-disabled pkgconfig
+
+# Work around missing symbols
+ARMNEON = "${@bb.utils.contains("TUNE_FEATURES", "neon", "--enable-arm-neon=on", "--enable-arm-neon=off", d)}"
+ARMNEON:aarch64 = "--enable-hardware-optimizations=on"
+EXTRA_OECONF += "${ARMNEON}"
+
+PACKAGES =+ "${PN}-tools"
+
+FILES:${PN}-tools = "${bindir}/png-fix-itxt ${bindir}/pngfix ${bindir}/pngcp"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.2.2.bb b/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.2.2.bb
index ed2b6437dd..29d48fd333 100644
--- a/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.2.2.bb
+++ b/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.2.2.bb
@@ -7,18 +7,18 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=336d6faf40fb600bafb0061f4052f1f4 \
file://src/samplerate.c;beginline=1;endline=7;md5=7a4238289dc36bfb70968ccaa5bd0d4f"
DEPENDS = "libsndfile1"
-SRC_URI = "https://github.com/libsndfile/libsamplerate/releases/download/${PV}/libsamplerate-${PV}.tar.xz \
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/libsamplerate-${PV}.tar.xz \
"
SRC_URI[sha256sum] = "3258da280511d24b49d6b08615bbe824d0cacc9842b0e4caf11c52cf2b043893"
CVE_PRODUCT = "libsamplerate"
-UPSTREAM_CHECK_URI = "https://github.com/libsndfile/libsamplerate/releases"
+GITHUB_BASE_URI = "https://github.com/libsndfile/libsamplerate/releases"
S = "${WORKDIR}/libsamplerate-${PV}"
-inherit autotools pkgconfig
+inherit autotools pkgconfig github-releases
# FFTW and ALSA are only used in tests and examples, so they don't affect
# normal builds. It should be safe to ignore these, but explicitly disabling
diff --git a/meta/recipes-multimedia/libsndfile/libsndfile1/cve-2022-33065.patch b/meta/recipes-multimedia/libsndfile/libsndfile1/cve-2022-33065.patch
new file mode 100644
index 0000000000..fa4b2fc08b
--- /dev/null
+++ b/meta/recipes-multimedia/libsndfile/libsndfile1/cve-2022-33065.patch
@@ -0,0 +1,739 @@
+From c7ce5b0ebeeb58934825077d1324960aa0747718 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Tue, 10 Oct 2023 16:10:34 -0400
+Subject: [PATCH] mat4/mat5: fix int overflow in dataend calculation
+
+The clang sanitizer warns of a possible signed integer overflow when
+calculating the `dataend` value in `mat4_read_header()`.
+
+```
+src/mat4.c:323:41: runtime error: signed integer overflow: 205 * -100663296 cannot be represented in type 'int'
+SUMMARY: UndefinedBehaviorSanitizer: undefined-behavior src/mat4.c:323:41 in
+src/mat4.c:323:48: runtime error: signed integer overflow: 838860800 * 4 cannot be represented in type 'int'
+SUMMARY: UndefinedBehaviorSanitizer: undefined-behavior src/mat4.c:323:48 in
+```
+
+Cast the offending `rows` and `cols` ints to `sf_count_t` (the type of
+`dataend` before performing the calculation, to avoid the issue.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/789
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Upstream-Status: Backport [9a829113c88a51e57c1e46473e90609e4b7df151]
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/mat4.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/mat4.c b/src/mat4.c
+index 0b1b414b..575683ba 100644
+--- a/src/mat4.c
++++ b/src/mat4.c
+@@ -320,7 +320,7 @@ mat4_read_header (SF_PRIVATE *psf)
+ psf->filelength - psf->dataoffset, psf->sf.channels * psf->sf.frames * psf->bytewidth) ;
+ }
+ else if ((psf->filelength - psf->dataoffset) > psf->sf.channels * psf->sf.frames * psf->bytewidth)
+- psf->dataend = psf->dataoffset + rows * cols * psf->bytewidth ;
++ psf->dataend = psf->dataoffset + (sf_count_t) rows * (sf_count_t) cols * psf->bytewidth ;
+
+ psf->datalength = psf->filelength - psf->dataoffset - psf->dataend ;
+
+From 842303f984b2081481e74cb84a9a24ecbe3dec1a Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 16:36:02 -0400
+Subject: [PATCH] au: avoid int overflow while calculating data_end
+
+At several points in au_read_header(), we calculate the functional end
+of the data segment by adding the (int)au_fmt.dataoffset and the
+(int)au_fmt.datasize. This can overflow the implicit int_32 return value
+and cause undefined behavior.
+
+Instead, precalculate the value and assign it to a 64-bit
+(sf_count_t)data_end variable.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/au.c | 10 ++++++----
+ 1 file changed, 6 insertions(+), 4 deletions(-)
+
+diff --git a/src/au.c b/src/au.c
+index 62bd691d..f68f2587 100644
+--- a/src/au.c
++++ b/src/au.c
+@@ -291,6 +291,7 @@ static int
+ au_read_header (SF_PRIVATE *psf)
+ { AU_FMT au_fmt ;
+ int marker, dword ;
++ sf_count_t data_end ;
+
+ memset (&au_fmt, 0, sizeof (au_fmt)) ;
+ psf_binheader_readf (psf, "pm", 0, &marker) ;
+@@ -317,14 +318,15 @@ au_read_header (SF_PRIVATE *psf)
+ return SFE_AU_EMBED_BAD_LEN ;
+ } ;
+
++ data_end = (sf_count_t) au_fmt.dataoffset + (sf_count_t) au_fmt.datasize ;
+ if (psf->fileoffset > 0)
+- { psf->filelength = au_fmt.dataoffset + au_fmt.datasize ;
++ { psf->filelength = data_end ;
+ psf_log_printf (psf, " Data Size : %d\n", au_fmt.datasize) ;
+ }
+- else if (au_fmt.datasize == -1 || au_fmt.dataoffset + au_fmt.datasize == psf->filelength)
++ else if (au_fmt.datasize == -1 || data_end == psf->filelength)
+ psf_log_printf (psf, " Data Size : %d\n", au_fmt.datasize) ;
+- else if (au_fmt.dataoffset + au_fmt.datasize < psf->filelength)
+- { psf->filelength = au_fmt.dataoffset + au_fmt.datasize ;
++ else if (data_end < psf->filelength)
++ { psf->filelength = data_end ;
+ psf_log_printf (psf, " Data Size : %d\n", au_fmt.datasize) ;
+ }
+ else
+From 0754d3380a54e3fbdde0f684b88955c80c79f58f Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 16:46:29 -0400
+Subject: [PATCH] avr: fix int overflow in avr_read_header()
+
+Pre-cast hdr.frames to sf_count_t, to provide the calculation with
+enough numeric space to avoid an int-overflow.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/avr.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/avr.c b/src/avr.c
+index 6c78ff69..1bc1ffc9 100644
+--- a/src/avr.c
++++ b/src/avr.c
+@@ -162,7 +162,7 @@ avr_read_header (SF_PRIVATE *psf)
+ psf->endian = SF_ENDIAN_BIG ;
+
+ psf->dataoffset = AVR_HDR_SIZE ;
+- psf->datalength = hdr.frames * (hdr.rez / 8) ;
++ psf->datalength = (sf_count_t) hdr.frames * (hdr.rez / 8) ;
+
+ if (psf->fileoffset > 0)
+ psf->filelength = AVR_HDR_SIZE + psf->datalength ;
+From 6ac31a68a614e2bba4a05b54e5558d6270c98376 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 16:54:21 -0400
+Subject: [PATCH] sds: fix int overflow warning in sample calculations
+
+The sds_*byte_read() functions compose their uint_32 sample buffers by
+shifting 7bit samples into a 32bit wide buffer, and adding them
+together. Because the 7bit samples are stored in 32bit ints, code
+fuzzers become concerned that the addition operation can overflow and
+cause undefined behavior.
+
+Instead, bitwise-OR the bytes together - which should accomplish the
+same arithmetic operation, without risking an int-overflow.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+
+Do the same for the 3byte and 4byte read functions.
+---
+ src/sds.c | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/src/sds.c b/src/sds.c
+index 6bc76171..2a0f164c 100644
+--- a/src/sds.c
++++ b/src/sds.c
+@@ -454,7 +454,7 @@ sds_2byte_read (SF_PRIVATE *psf, SDS_PRIVATE *psds)
+
+ ucptr = psds->read_data + 5 ;
+ for (k = 0 ; k < 120 ; k += 2)
+- { sample = arith_shift_left (ucptr [k], 25) + arith_shift_left (ucptr [k + 1], 18) ;
++ { sample = arith_shift_left (ucptr [k], 25) | arith_shift_left (ucptr [k + 1], 18) ;
+ psds->read_samples [k / 2] = (int) (sample - 0x80000000) ;
+ } ;
+
+@@ -498,7 +498,7 @@ sds_3byte_read (SF_PRIVATE *psf, SDS_PRIVATE *psds)
+
+ ucptr = psds->read_data + 5 ;
+ for (k = 0 ; k < 120 ; k += 3)
+- { sample = (((uint32_t) ucptr [k]) << 25) + (ucptr [k + 1] << 18) + (ucptr [k + 2] << 11) ;
++ { sample = (((uint32_t) ucptr [k]) << 25) | (ucptr [k + 1] << 18) | (ucptr [k + 2] << 11) ;
+ psds->read_samples [k / 3] = (int) (sample - 0x80000000) ;
+ } ;
+
+@@ -542,7 +542,7 @@ sds_4byte_read (SF_PRIVATE *psf, SDS_PRIVATE *psds)
+
+ ucptr = psds->read_data + 5 ;
+ for (k = 0 ; k < 120 ; k += 4)
+- { sample = (((uint32_t) ucptr [k]) << 25) + (ucptr [k + 1] << 18) + (ucptr [k + 2] << 11) + (ucptr [k + 3] << 4) ;
++ { sample = (((uint32_t) ucptr [k]) << 25) | (ucptr [k + 1] << 18) | (ucptr [k + 2] << 11) | (ucptr [k + 3] << 4) ;
+ psds->read_samples [k / 4] = (int) (sample - 0x80000000) ;
+ } ;
+
+From 96428e1dd4998f1cd47df24f8fe9b0da35d7b947 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 17:26:51 -0400
+Subject: [PATCH] aiff: fix int overflow when counting header elements
+
+aiff_read_basc_chunk() tries to count the AIFF header size by keeping
+track of the bytes returned by psf_binheader_readf(). Though improbable,
+it is technically possible for these added bytes to exceed the int-sized
+`count` accumulator.
+
+Use a 64-bit sf_count_t type for `count`, to ensure that it always has
+enough numeric space.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/aiff.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/aiff.c b/src/aiff.c
+index a2bda8f4..6b244302 100644
+--- a/src/aiff.c
++++ b/src/aiff.c
+@@ -1702,7 +1702,7 @@ static int
+ aiff_read_basc_chunk (SF_PRIVATE * psf, int datasize)
+ { const char * type_str ;
+ basc_CHUNK bc ;
+- int count ;
++ sf_count_t count ;
+
+ count = psf_binheader_readf (psf, "E442", &bc.version, &bc.numBeats, &bc.rootNote) ;
+ count += psf_binheader_readf (psf, "E222", &bc.scaleType, &bc.sigNumerator, &bc.sigDenominator) ;
+From b352c350d35bf978e4d3a32e5d9df1f2284445f4 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 17:43:02 -0400
+Subject: [PATCH] ircam: fix int overflow in ircam_read_header()
+
+When reading the IRCAM header, it is possible for the calculated
+blockwidth to exceed the bounds of a signed int32.
+
+Use a 64bit sf_count_t to store the blockwidth.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/common.h | 2 +-
+ src/ircam.c | 10 +++++-----
+ 2 files changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/src/common.h b/src/common.h
+index d92eabde..5369cb67 100644
+--- a/src/common.h
++++ b/src/common.h
+@@ -439,7 +439,7 @@ typedef struct sf_private_tag
+ sf_count_t datalength ; /* Length in bytes of the audio data. */
+ sf_count_t dataend ; /* Offset to file tailer. */
+
+- int blockwidth ; /* Size in bytes of one set of interleaved samples. */
++ sf_count_t blockwidth ; /* Size in bytes of one set of interleaved samples. */
+ int bytewidth ; /* Size in bytes of one sample (one channel). */
+
+ void *dither ;
+diff --git a/src/ircam.c b/src/ircam.c
+index 8e7cdba8..3d73ba44 100644
+--- a/src/ircam.c
++++ b/src/ircam.c
+@@ -171,35 +171,35 @@ ircam_read_header (SF_PRIVATE *psf)
+ switch (encoding)
+ { case IRCAM_PCM_16 :
+ psf->bytewidth = 2 ;
+- psf->blockwidth = psf->sf.channels * psf->bytewidth ;
++ psf->blockwidth = (sf_count_t) psf->sf.channels * psf->bytewidth ;
+
+ psf->sf.format = SF_FORMAT_IRCAM | SF_FORMAT_PCM_16 ;
+ break ;
+
+ case IRCAM_PCM_32 :
+ psf->bytewidth = 4 ;
+- psf->blockwidth = psf->sf.channels * psf->bytewidth ;
++ psf->blockwidth = (sf_count_t) psf->sf.channels * psf->bytewidth ;
+
+ psf->sf.format = SF_FORMAT_IRCAM | SF_FORMAT_PCM_32 ;
+ break ;
+
+ case IRCAM_FLOAT :
+ psf->bytewidth = 4 ;
+- psf->blockwidth = psf->sf.channels * psf->bytewidth ;
++ psf->blockwidth = (sf_count_t) psf->sf.channels * psf->bytewidth ;
+
+ psf->sf.format = SF_FORMAT_IRCAM | SF_FORMAT_FLOAT ;
+ break ;
+
+ case IRCAM_ALAW :
+ psf->bytewidth = 1 ;
+- psf->blockwidth = psf->sf.channels * psf->bytewidth ;
++ psf->blockwidth = (sf_count_t) psf->sf.channels * psf->bytewidth ;
+
+ psf->sf.format = SF_FORMAT_IRCAM | SF_FORMAT_ALAW ;
+ break ;
+
+ case IRCAM_ULAW :
+ psf->bytewidth = 1 ;
+- psf->blockwidth = psf->sf.channels * psf->bytewidth ;
++ psf->blockwidth = (sf_count_t) psf->sf.channels * psf->bytewidth ;
+
+ psf->sf.format = SF_FORMAT_IRCAM | SF_FORMAT_ULAW ;
+ break ;
+From 3bcd291e57867f88f558fa6f80990e84311df78c Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Wed, 11 Oct 2023 16:12:22 -0400
+Subject: [PATCH] mat4/mat5: fix int overflow when calculating blockwidth
+
+Pre-cast the components of the blockwidth calculation to sf_count_t to
+avoid overflowing integers during calculation.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/mat4.c | 2 +-
+ src/mat5.c | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/mat4.c b/src/mat4.c
+index 575683ba..9f046f0c 100644
+--- a/src/mat4.c
++++ b/src/mat4.c
+@@ -104,7 +104,7 @@ mat4_open (SF_PRIVATE *psf)
+
+ psf->container_close = mat4_close ;
+
+- psf->blockwidth = psf->bytewidth * psf->sf.channels ;
++ psf->blockwidth = (sf_count_t) psf->bytewidth * psf->sf.channels ;
+
+ switch (subformat)
+ { case SF_FORMAT_PCM_16 :
+diff --git a/src/mat5.c b/src/mat5.c
+index da5a6eca..20f0ea64 100644
+--- a/src/mat5.c
++++ b/src/mat5.c
+@@ -114,7 +114,7 @@ mat5_open (SF_PRIVATE *psf)
+
+ psf->container_close = mat5_close ;
+
+- psf->blockwidth = psf->bytewidth * psf->sf.channels ;
++ psf->blockwidth = (sf_count_t) psf->bytewidth * psf->sf.channels ;
+
+ switch (subformat)
+ { case SF_FORMAT_PCM_U8 :
+From c177e292d47ef73b1d3c1bb391320299a0ed2ff9 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Mon, 16 Oct 2023 12:37:47 -0400
+Subject: [PATCH] common: fix int overflow in psf_binheader_readf()
+
+The psf_binheader_readf() function attempts to count and return the
+number of bytes traversed in the header. During this accumulation, it is
+possible to overflow the int-sized byte_count variable.
+
+Avoid this overflow by checking that the accumulated bytes do not exceed
+INT_MAX and throwing an error if they do. This implies that files with
+multi-gigabyte headers threaten to produce this error, but I imagine
+those files don't really exist - and this error is better than the
+undefined behavior which would have resulted previously.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/common.c | 36 ++++++++++++++++++++++++------------
+ 1 file changed, 24 insertions(+), 12 deletions(-)
+
+diff --git a/src/common.c b/src/common.c
+index 1c3d951d..7f6cceca 100644
+--- a/src/common.c
++++ b/src/common.c
+@@ -18,6 +18,7 @@
+
+ #include <config.h>
+
++#include <limits.h>
+ #include <stdarg.h>
+ #include <string.h>
+ #if HAVE_UNISTD_H
+@@ -990,6 +991,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ double *doubleptr ;
+ char c ;
+ int byte_count = 0, count = 0 ;
++ int read_bytes = 0 ;
+
+ if (! format)
+ return psf_ftell (psf) ;
+@@ -998,6 +1000,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+
+ while ((c = *format++))
+ {
++ read_bytes = 0 ;
+ if (psf->header.indx + 16 >= psf->header.len && psf_bump_header_allocation (psf, 16))
+ break ;
+
+@@ -1014,7 +1017,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ intptr = va_arg (argptr, unsigned int*) ;
+ *intptr = 0 ;
+ ucptr = (unsigned char*) intptr ;
+- byte_count += header_read (psf, ucptr, sizeof (int)) ;
++ read_bytes = header_read (psf, ucptr, sizeof (int)) ;
+ *intptr = GET_MARKER (ucptr) ;
+ break ;
+
+@@ -1022,7 +1025,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ intptr = va_arg (argptr, unsigned int*) ;
+ *intptr = 0 ;
+ ucptr = (unsigned char*) intptr ;
+- byte_count += header_read (psf, sixteen_bytes, sizeof (sixteen_bytes)) ;
++ read_bytes = header_read (psf, sixteen_bytes, sizeof (sixteen_bytes)) ;
+ { int k ;
+ intdata = 0 ;
+ for (k = 0 ; k < 16 ; k++)
+@@ -1034,14 +1037,14 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case '1' :
+ charptr = va_arg (argptr, char*) ;
+ *charptr = 0 ;
+- byte_count += header_read (psf, charptr, sizeof (char)) ;
++ read_bytes = header_read (psf, charptr, sizeof (char)) ;
+ break ;
+
+ case '2' : /* 2 byte value with the current endian-ness */
+ shortptr = va_arg (argptr, unsigned short*) ;
+ *shortptr = 0 ;
+ ucptr = (unsigned char*) shortptr ;
+- byte_count += header_read (psf, ucptr, sizeof (short)) ;
++ read_bytes = header_read (psf, ucptr, sizeof (short)) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ *shortptr = GET_BE_SHORT (ucptr) ;
+ else
+@@ -1051,7 +1054,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case '3' : /* 3 byte value with the current endian-ness */
+ intptr = va_arg (argptr, unsigned int*) ;
+ *intptr = 0 ;
+- byte_count += header_read (psf, sixteen_bytes, 3) ;
++ read_bytes = header_read (psf, sixteen_bytes, 3) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ *intptr = GET_BE_3BYTE (sixteen_bytes) ;
+ else
+@@ -1062,7 +1065,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ intptr = va_arg (argptr, unsigned int*) ;
+ *intptr = 0 ;
+ ucptr = (unsigned char*) intptr ;
+- byte_count += header_read (psf, ucptr, sizeof (int)) ;
++ read_bytes = header_read (psf, ucptr, sizeof (int)) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ *intptr = psf_get_be32 (ucptr, 0) ;
+ else
+@@ -1072,7 +1075,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case '8' : /* 8 byte value with the current endian-ness */
+ countptr = va_arg (argptr, sf_count_t *) ;
+ *countptr = 0 ;
+- byte_count += header_read (psf, sixteen_bytes, 8) ;
++ read_bytes = header_read (psf, sixteen_bytes, 8) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ countdata = psf_get_be64 (sixteen_bytes, 0) ;
+ else
+@@ -1083,7 +1086,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case 'f' : /* Float conversion */
+ floatptr = va_arg (argptr, float *) ;
+ *floatptr = 0.0 ;
+- byte_count += header_read (psf, floatptr, sizeof (float)) ;
++ read_bytes = header_read (psf, floatptr, sizeof (float)) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ *floatptr = float32_be_read ((unsigned char*) floatptr) ;
+ else
+@@ -1093,7 +1096,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case 'd' : /* double conversion */
+ doubleptr = va_arg (argptr, double *) ;
+ *doubleptr = 0.0 ;
+- byte_count += header_read (psf, doubleptr, sizeof (double)) ;
++ read_bytes = header_read (psf, doubleptr, sizeof (double)) ;
+ if (psf->rwf_endian == SF_ENDIAN_BIG)
+ *doubleptr = double64_be_read ((unsigned char*) doubleptr) ;
+ else
+@@ -1117,7 +1120,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ charptr = va_arg (argptr, char*) ;
+ count = va_arg (argptr, size_t) ;
+ memset (charptr, 0, count) ;
+- byte_count += header_read (psf, charptr, count) ;
++ read_bytes = header_read (psf, charptr, count) ;
+ break ;
+
+ case 'G' :
+@@ -1128,7 +1131,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ if (psf->header.indx + count >= psf->header.len && psf_bump_header_allocation (psf, count))
+ break ;
+
+- byte_count += header_gets (psf, charptr, count) ;
++ read_bytes = header_gets (psf, charptr, count) ;
+ break ;
+
+ case 'z' :
+@@ -1152,7 +1155,7 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ case 'j' : /* Seek to position from current position. */
+ count = va_arg (argptr, size_t) ;
+ header_seek (psf, count, SEEK_CUR) ;
+- byte_count += count ;
++ read_bytes = count ;
+ break ;
+
+ case '!' : /* Clear buffer, forcing re-read. */
+@@ -1164,8 +1167,17 @@ psf_binheader_readf (SF_PRIVATE *psf, char const *format, ...)
+ psf->error = SFE_INTERNAL ;
+ break ;
+ } ;
++
++ if (read_bytes > 0 && byte_count > (INT_MAX - read_bytes))
++ { psf_log_printf (psf, "Header size exceeds INT_MAX. Aborting.", c) ;
++ psf->error = SFE_INTERNAL ;
++ break ;
++ } else
++ { byte_count += read_bytes ;
+ } ;
+
++ } ; /*end while*/
++
+ va_end (argptr) ;
+
+ return byte_count ;
+From a23d563386e7c8d93dcdbe7d5b1d63cad6009116 Mon Sep 17 00:00:00 2001
+From: Alex Stewart <alex.stewart@ni.com>
+Date: Thu, 19 Oct 2023 14:07:19 -0400
+Subject: [PATCH] nms_adpcm: fix int overflow in signal estimate
+
+It is possible (though functionally incorrect) for the signal estimate
+calculation in nms_adpcm_update() to overflow the int value of s_e,
+resulting in undefined behavior.
+
+Since adpcm state signal values are never practically larger than
+16 bits, use smaller numeric sizes throughout the file to avoid the
+overflow.
+
+CVE: CVE-2022-33065
+Fixes: https://github.com/libsndfile/libsndfile/issues/833
+
+Authored-by: Arthur Taylor <art@ified.ca>
+Signed-off-by: Alex Stewart <alex.stewart@ni.com>
+Rebased-by: Alex Stewart <alex.stewart@ni.com>
+---
+ src/nms_adpcm.c | 85 ++++++++++++++++++++++++-------------------------
+ 1 file changed, 42 insertions(+), 43 deletions(-)
+
+diff --git a/src/nms_adpcm.c b/src/nms_adpcm.c
+index 96d6ad26..460ea077 100644
+--- a/src/nms_adpcm.c
++++ b/src/nms_adpcm.c
+@@ -48,36 +48,36 @@
+ /* Variable names from ITU G.726 spec */
+ struct nms_adpcm_state
+ { /* Log of the step size multiplier. Operated on by codewords. */
+- int yl ;
++ short yl ;
+
+ /* Quantizer step size multiplier. Generated from yl. */
+- int y ;
++ short y ;
+
+- /* Coefficents of the pole predictor */
+- int a [2] ;
++ /* Coefficients of the pole predictor */
++ short a [2] ;
+
+- /* Coefficents of the zero predictor */
+- int b [6] ;
++ /* Coefficients of the zero predictor */
++ short b [6] ;
+
+ /* Previous quantized deltas (multiplied by 2^14) */
+- int d_q [7] ;
++ short d_q [7] ;
+
+ /* d_q [x] + s_ez [x], used by the pole-predictor for signs only. */
+- int p [3] ;
++ short p [3] ;
+
+ /* Previous reconstructed signal values. */
+- int s_r [2] ;
++ short s_r [2] ;
+
+ /* Zero predictor components of the signal estimate. */
+- int s_ez ;
++ short s_ez ;
+
+ /* Signal estimate, (including s_ez). */
+- int s_e ;
++ short s_e ;
+
+ /* The most recent codeword (enc:generated, dec:inputted) */
+- int Ik ;
++ char Ik ;
+
+- int parity ;
++ char parity ;
+
+ /*
+ ** Offset into code tables for the bitrate.
+@@ -109,7 +109,7 @@ typedef struct
+ } NMS_ADPCM_PRIVATE ;
+
+ /* Pre-computed exponential interval used in the antilog approximation. */
+-static unsigned int table_expn [] =
++static unsigned short table_expn [] =
+ { 0x4000, 0x4167, 0x42d5, 0x444c, 0x45cb, 0x4752, 0x48e2, 0x4a7a,
+ 0x4c1b, 0x4dc7, 0x4f7a, 0x5138, 0x52ff, 0x54d1, 0x56ac, 0x5892,
+ 0x5a82, 0x5c7e, 0x5e84, 0x6096, 0x62b4, 0x64dd, 0x6712, 0x6954,
+@@ -117,21 +117,21 @@ static unsigned int table_expn [] =
+ } ;
+
+ /* Table mapping codewords to scale factor deltas. */
+-static int table_scale_factor_step [] =
++static short table_scale_factor_step [] =
+ { 0x0, 0x0, 0x0, 0x0, 0x4b0, 0x0, 0x0, 0x0, /* 2-bit */
+ -0x3c, 0x0, 0x90, 0x0, 0x2ee, 0x0, 0x898, 0x0, /* 3-bit */
+ -0x30, 0x12, 0x6b, 0xc8, 0x188, 0x2e0, 0x551, 0x1150, /* 4-bit */
+ } ;
+
+ /* Table mapping codewords to quantized delta interval steps. */
+-static unsigned int table_step [] =
++static unsigned short table_step [] =
+ { 0x73F, 0, 0, 0, 0x1829, 0, 0, 0, /* 2-bit */
+ 0x3EB, 0, 0xC18, 0, 0x1581, 0, 0x226E, 0, /* 3-bit */
+ 0x20C, 0x635, 0xA83, 0xF12, 0x1418, 0x19E3, 0x211A, 0x2BBA, /* 4-bit */
+ } ;
+
+ /* Binary search lookup table for quantizing using table_step. */
+-static int table_step_search [] =
++static short table_step_search [] =
+ { 0, 0x1F6D, 0, -0x1F6D, 0, 0, 0, 0, /* 2-bit */
+ 0x1008, 0x1192, 0, -0x219A, 0x1656, -0x1656, 0, 0, /* 3-bit */
+ 0x872, 0x1277, -0x8E6, -0x232B, 0xD06, -0x17D7, -0x11D3, 0, /* 4-bit */
+@@ -179,23 +179,23 @@ static sf_count_t nms_adpcm_seek (SF_PRIVATE *psf, int mode, sf_count_t offset)
+ ** Maps [1,20480] to [1,1024] in an exponential relationship. This is
+ ** approximately ret = b^exp where b = e^(ln(1024)/ln(20480)) ~= 1.0003385
+ */
+-static inline int
+-nms_adpcm_antilog (int exp)
+-{ int ret ;
++static inline short
++nms_adpcm_antilog (short exp)
++{ int_fast32_t r ;
+
+- ret = 0x1000 ;
+- ret += (((exp & 0x3f) * 0x166b) >> 12) ;
+- ret *= table_expn [(exp & 0x7c0) >> 6] ;
+- ret >>= (26 - (exp >> 11)) ;
++ r = 0x1000 ;
++ r += (((int_fast32_t) (exp & 0x3f) * 0x166b) >> 12) ;
++ r *= table_expn [(exp & 0x7c0) >> 6] ;
++ r >>= (26 - (exp >> 11)) ;
+
+- return ret ;
++ return (short) r ;
+ } /* nms_adpcm_antilog */
+
+ static void
+ nms_adpcm_update (struct nms_adpcm_state *s)
+ { /* Variable names from ITU G.726 spec */
+- int a1ul ;
+- int fa1 ;
++ short a1ul, fa1 ;
++ int_fast32_t se ;
+ int i ;
+
+ /* Decay and Modify the scale factor in the log domain based on the codeword. */
+@@ -222,7 +222,7 @@ nms_adpcm_update (struct nms_adpcm_state *s)
+ else if (fa1 > 256)
+ fa1 = 256 ;
+
+- s->a [0] = (0xff * s->a [0]) >> 8 ;
++ s->a [0] = (s->a [0] * 0xff) >> 8 ;
+ if (s->p [0] != 0 && s->p [1] != 0 && ((s->p [0] ^ s->p [1]) < 0))
+ s->a [0] -= 192 ;
+ else
+@@ -230,7 +230,7 @@ nms_adpcm_update (struct nms_adpcm_state *s)
+ fa1 = -fa1 ;
+ }
+
+- s->a [1] = fa1 + ((0xfe * s->a [1]) >> 8) ;
++ s->a [1] = fa1 + ((s->a [1] * 0xfe) >> 8) ;
+ if (s->p [0] != 0 && s->p [2] != 0 && ((s->p [0] ^ s->p [2]) < 0))
+ s->a [1] -= 128 ;
+ else
+@@ -250,19 +250,18 @@ nms_adpcm_update (struct nms_adpcm_state *s)
+ s->a [0] = a1ul ;
+ } ;
+
+- /* Compute the zero predictor estimate. Rotate past deltas too. */
+- s->s_ez = 0 ;
++ /* Compute the zero predictor estimate and rotate past deltas. */
++ se = 0 ;
+ for (i = 5 ; i >= 0 ; i--)
+- { s->s_ez += s->d_q [i] * s->b [i] ;
++ { se += (int_fast32_t) s->d_q [i] * s->b [i] ;
+ s->d_q [i + 1] = s->d_q [i] ;
+ } ;
++ s->s_ez = se >> 14 ;
+
+- /* Compute the signal estimate. */
+- s->s_e = s->a [0] * s->s_r [0] + s->a [1] * s->s_r [1] + s->s_ez ;
+-
+- /* Return to scale */
+- s->s_ez >>= 14 ;
+- s->s_e >>= 14 ;
++ /* Complete the signal estimate. */
++ se += (int_fast32_t) s->a [0] * s->s_r [0] ;
++ se += (int_fast32_t) s->a [1] * s->s_r [1] ;
++ s->s_e = se >> 14 ;
+
+ /* Rotate members to prepare for next iteration. */
+ s->s_r [1] = s->s_r [0] ;
+@@ -274,7 +273,7 @@ nms_adpcm_update (struct nms_adpcm_state *s)
+ static int16_t
+ nms_adpcm_reconstruct_sample (struct nms_adpcm_state *s, uint8_t I)
+ { /* Variable names from ITU G.726 spec */
+- int dqx ;
++ int_fast32_t dqx ;
+
+ /*
+ ** The ordering of the 12-bit right-shift is a precision loss. It agrees
+@@ -308,17 +307,17 @@ nms_adpcm_codec_init (struct nms_adpcm_state *s, enum nms_enc_type type)
+ /*
+ ** nms_adpcm_encode_sample()
+ **
+-** Encode a linear 16-bit pcm sample into a 2,3, or 4 bit NMS-ADPCM codeword
++** Encode a linear 16-bit pcm sample into a 2, 3, or 4 bit NMS-ADPCM codeword
+ ** using and updating the predictor state.
+ */
+ static uint8_t
+ nms_adpcm_encode_sample (struct nms_adpcm_state *s, int16_t sl)
+ { /* Variable names from ITU G.726 spec */
+- int d ;
++ int_fast32_t d ;
+ uint8_t I ;
+
+ /* Down scale the sample from 16 => ~14 bits. */
+- sl = (sl * 0x1fdf) / 0x7fff ;
++ sl = ((int_fast32_t) sl * 0x1fdf) / 0x7fff ;
+
+ /* Compute estimate, and delta from actual value */
+ nms_adpcm_update (s) ;
+@@ -407,7 +406,7 @@ nms_adpcm_encode_sample (struct nms_adpcm_state *s, int16_t sl)
+ */
+ static int16_t
+ nms_adpcm_decode_sample (struct nms_adpcm_state *s, uint8_t I)
+-{ int sl ;
++{ int_fast32_t sl ;
+
+ nms_adpcm_update (s) ;
+ sl = nms_adpcm_reconstruct_sample (s, I) ;
diff --git a/meta/recipes-multimedia/libsndfile/libsndfile1_1.1.0.bb b/meta/recipes-multimedia/libsndfile/libsndfile1_1.1.0.bb
deleted file mode 100644
index 70626b3c16..0000000000
--- a/meta/recipes-multimedia/libsndfile/libsndfile1_1.1.0.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Audio format Conversion library"
-DESCRIPTION = "Library for reading and writing files containing sampled \
-sound (such as MS Windows WAV and the Apple/SGI AIFF format) through \
-one standard library interface."
-HOMEPAGE = "https://libsndfile.github.io/libsndfile/"
-AUTHOR = "Erik de Castro Lopo"
-DEPENDS = "flac libogg libvorbis"
-SECTION = "libs/multimedia"
-LICENSE = "LGPL-2.1-only"
-
-SRC_URI = "https://github.com/libsndfile/libsndfile/releases/download/${PV}/libsndfile-${PV}.tar.xz \
- file://noopus.patch \
- "
-UPSTREAM_CHECK_URI = "https://github.com/libsndfile/libsndfile/releases/"
-
-SRC_URI[sha256sum] = "0f98e101c0f7c850a71225fb5feaf33b106227b3d331333ddc9bacee190bcf41"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=e77fe93202736b47c07035910f47974a"
-
-CVE_PRODUCT = "libsndfile"
-
-S = "${WORKDIR}/libsndfile-${PV}"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'alsa', d)}"
-PACKAGECONFIG[alsa] = "--enable-alsa,--disable-alsa,alsa-lib"
-PACKAGECONFIG[regtest] = "--enable-sqlite,--disable-sqlite,sqlite3"
-
-inherit autotools lib_package pkgconfig multilib_header
-
-do_install:append() {
- oe_multilib_header sndfile.h
-}
diff --git a/meta/recipes-multimedia/libsndfile/libsndfile1_1.2.2.bb b/meta/recipes-multimedia/libsndfile/libsndfile1_1.2.2.bb
new file mode 100644
index 0000000000..a9ee7c3575
--- /dev/null
+++ b/meta/recipes-multimedia/libsndfile/libsndfile1_1.2.2.bb
@@ -0,0 +1,32 @@
+SUMMARY = "Audio format Conversion library"
+DESCRIPTION = "Library for reading and writing files containing sampled \
+sound (such as MS Windows WAV and the Apple/SGI AIFF format) through \
+one standard library interface."
+HOMEPAGE = "https://libsndfile.github.io/libsndfile/"
+DEPENDS = "flac libogg libvorbis"
+SECTION = "libs/multimedia"
+LICENSE = "LGPL-2.1-only"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/libsndfile-${PV}.tar.xz \
+ file://noopus.patch \
+ file://cve-2022-33065.patch \
+ "
+GITHUB_BASE_URI = "https://github.com/libsndfile/libsndfile/releases/"
+
+SRC_URI[sha256sum] = "3799ca9924d3125038880367bf1468e53a1b7e3686a934f098b7e1d286cdb80e"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=e77fe93202736b47c07035910f47974a"
+
+CVE_PRODUCT = "libsndfile"
+
+S = "${WORKDIR}/libsndfile-${PV}"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'alsa', d)}"
+PACKAGECONFIG[alsa] = "--enable-alsa,--disable-alsa,alsa-lib"
+PACKAGECONFIG[regtest] = "--enable-sqlite,--disable-sqlite,sqlite3"
+
+inherit autotools lib_package pkgconfig multilib_header github-releases
+
+do_install:append() {
+ oe_multilib_header sndfile.h
+}
diff --git a/meta/recipes-multimedia/libtheora/libtheora_1.1.1.bb b/meta/recipes-multimedia/libtheora/libtheora_1.1.1.bb
index 178e1a9240..11674af379 100644
--- a/meta/recipes-multimedia/libtheora/libtheora_1.1.1.bb
+++ b/meta/recipes-multimedia/libtheora/libtheora_1.1.1.bb
@@ -7,7 +7,6 @@ LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://COPYING;md5=cf91718f59eb6a83d06dc7bcaf411132"
DEPENDS = "libogg"
-PR = "r1"
SRC_URI = "http://downloads.xiph.org/releases/theora/libtheora-${PV}.tar.bz2 \
file://no-docs.patch"
diff --git a/meta/recipes-multimedia/libtiff/tiff/0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch b/meta/recipes-multimedia/libtiff/tiff/0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch
deleted file mode 100644
index f1a4ab4251..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-CVE: CVE-2022-0865
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 88da11ae3c4db527cb870fb1017456cc8fbac2e7 Mon Sep 17 00:00:00 2001
-From: Even Rouault <even.rouault@spatialys.com>
-Date: Thu, 24 Feb 2022 22:26:02 +0100
-Subject: [PATCH 1/6] tif_jbig.c: fix crash when reading a file with multiple
- IFD in memory-mapped mode and when bit reversal is needed (fixes #385)
-
----
- libtiff/tif_jbig.c | 10 ++++++++++
- 1 file changed, 10 insertions(+)
-
-diff --git a/libtiff/tif_jbig.c b/libtiff/tif_jbig.c
-index 74086338..8bfa4cef 100644
---- a/libtiff/tif_jbig.c
-+++ b/libtiff/tif_jbig.c
-@@ -209,6 +209,16 @@ int TIFFInitJBIG(TIFF* tif, int scheme)
- */
- tif->tif_flags |= TIFF_NOBITREV;
- tif->tif_flags &= ~TIFF_MAPPED;
-+ /* We may have read from a previous IFD and thus set TIFF_BUFFERMMAP and
-+ * cleared TIFF_MYBUFFER. It is necessary to restore them to their initial
-+ * value to be consistent with the state of a non-memory mapped file.
-+ */
-+ if (tif->tif_flags&TIFF_BUFFERMMAP) {
-+ tif->tif_rawdata = NULL;
-+ tif->tif_rawdatasize = 0;
-+ tif->tif_flags &= ~TIFF_BUFFERMMAP;
-+ tif->tif_flags |= TIFF_MYBUFFER;
-+ }
-
- /* Setup the function pointers for encode, decode, and cleanup. */
- tif->tif_setupdecode = JBIGSetupDecode;
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch b/meta/recipes-multimedia/libtiff/tiff/0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch
deleted file mode 100644
index 72776f09ba..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-CVE: CVE-2022-22844
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From b12a0326e6064b6e0b051d1184a219877472f69b Mon Sep 17 00:00:00 2001
-From: 4ugustus <wangdw.augustus@qq.com>
-Date: Tue, 25 Jan 2022 16:25:28 +0000
-Subject: [PATCH] tiffset: fix global-buffer-overflow for ASCII tags where
- count is required (fixes #355)
-
----
- tools/tiffset.c | 16 +++++++++++++---
- 1 file changed, 13 insertions(+), 3 deletions(-)
-
-diff --git a/tools/tiffset.c b/tools/tiffset.c
-index 8c9e23c5..e7a88c09 100644
---- a/tools/tiffset.c
-+++ b/tools/tiffset.c
-@@ -146,9 +146,19 @@ main(int argc, char* argv[])
-
- arg_index++;
- if (TIFFFieldDataType(fip) == TIFF_ASCII) {
-- if (TIFFSetField(tiff, TIFFFieldTag(fip), argv[arg_index]) != 1)
-- fprintf( stderr, "Failed to set %s=%s\n",
-- TIFFFieldName(fip), argv[arg_index] );
-+ if(TIFFFieldPassCount( fip )) {
-+ size_t len;
-+ len = strlen(argv[arg_index]) + 1;
-+ if (len > UINT16_MAX || TIFFSetField(tiff, TIFFFieldTag(fip),
-+ (uint16_t)len, argv[arg_index]) != 1)
-+ fprintf( stderr, "Failed to set %s=%s\n",
-+ TIFFFieldName(fip), argv[arg_index] );
-+ } else {
-+ if (TIFFSetField(tiff, TIFFFieldTag(fip),
-+ argv[arg_index]) != 1)
-+ fprintf( stderr, "Failed to set %s=%s\n",
-+ TIFFFieldName(fip), argv[arg_index] );
-+ }
- } else if (TIFFFieldWriteCount(fip) > 0
- || TIFFFieldWriteCount(fip) == TIFF_VARIABLE) {
- int ret = 1;
---
-2.25.1
diff --git a/meta/recipes-multimedia/libtiff/tiff/0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch b/meta/recipes-multimedia/libtiff/tiff/0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch
deleted file mode 100644
index 812ffb232d..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch
+++ /dev/null
@@ -1,219 +0,0 @@
-CVE: CVE-2022-0891
-CVE: CVE-2022-1056
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From e46b49e60fddb2e924302fb1751f79eb9cfb2253 Mon Sep 17 00:00:00 2001
-From: Su Laus <sulau@freenet.de>
-Date: Tue, 8 Mar 2022 17:02:44 +0000
-Subject: [PATCH 2/6] tiffcrop: fix issue #380 and #382 heap buffer overflow in
- extractImageSection
-
----
- tools/tiffcrop.c | 92 +++++++++++++++++++-----------------------------
- 1 file changed, 36 insertions(+), 56 deletions(-)
-
-diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
-index b85c2ce7..302a7e91 100644
---- a/tools/tiffcrop.c
-+++ b/tools/tiffcrop.c
-@@ -105,8 +105,8 @@
- * of messages to monitor progress without enabling dump logs.
- */
-
--static char tiffcrop_version_id[] = "2.4";
--static char tiffcrop_rev_date[] = "12-13-2010";
-+static char tiffcrop_version_id[] = "2.4.1";
-+static char tiffcrop_rev_date[] = "03-03-2010";
-
- #include "tif_config.h"
- #include "libport.h"
-@@ -6710,10 +6710,10 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- #ifdef DEVELMODE
- uint32_t img_length;
- #endif
-- uint32_t j, shift1, shift2, trailing_bits;
-+ uint32_t j, shift1, trailing_bits;
- uint32_t row, first_row, last_row, first_col, last_col;
- uint32_t src_offset, dst_offset, row_offset, col_offset;
-- uint32_t offset1, offset2, full_bytes;
-+ uint32_t offset1, full_bytes;
- uint32_t sect_width;
- #ifdef DEVELMODE
- uint32_t sect_length;
-@@ -6723,7 +6723,6 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- #ifdef DEVELMODE
- int k;
- unsigned char bitset;
-- static char *bitarray = NULL;
- #endif
-
- img_width = image->width;
-@@ -6741,17 +6740,12 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- dst_offset = 0;
-
- #ifdef DEVELMODE
-- if (bitarray == NULL)
-- {
-- if ((bitarray = (char *)malloc(img_width)) == NULL)
-- {
-- TIFFError ("", "DEBUG: Unable to allocate debugging bitarray");
-- return (-1);
-- }
-- }
-+ char bitarray[39];
- #endif
-
-- /* rows, columns, width, length are expressed in pixels */
-+ /* rows, columns, width, length are expressed in pixels
-+ * first_row, last_row, .. are index into image array starting at 0 to width-1,
-+ * last_col shall be also extracted. */
- first_row = section->y1;
- last_row = section->y2;
- first_col = section->x1;
-@@ -6761,9 +6755,14 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- #ifdef DEVELMODE
- sect_length = last_row - first_row + 1;
- #endif
-- img_rowsize = ((img_width * bps + 7) / 8) * spp;
-- full_bytes = (sect_width * spp * bps) / 8; /* number of COMPLETE bytes per row in section */
-- trailing_bits = (sect_width * bps) % 8;
-+ /* The read function loadImage() used copy separate plane data into a buffer as interleaved
-+ * samples rather than separate planes so the same logic works to extract regions
-+ * regardless of the way the data are organized in the input file.
-+ * Furthermore, bytes and bits are arranged in buffer according to COMPRESSION=1 and FILLORDER=1
-+ */
-+ img_rowsize = (((img_width * spp * bps) + 7) / 8); /* row size in full bytes of source image */
-+ full_bytes = (sect_width * spp * bps) / 8; /* number of COMPLETE bytes per row in section */
-+ trailing_bits = (sect_width * spp * bps) % 8; /* trailing bits within the last byte of destination buffer */
-
- #ifdef DEVELMODE
- TIFFError ("", "First row: %"PRIu32", last row: %"PRIu32", First col: %"PRIu32", last col: %"PRIu32"\n",
-@@ -6776,10 +6775,9 @@ extractImageSection(struct image_data *image, struct pageseg *section,
-
- if ((bps % 8) == 0)
- {
-- col_offset = first_col * spp * bps / 8;
-+ col_offset = (first_col * spp * bps) / 8;
- for (row = first_row; row <= last_row; row++)
- {
-- /* row_offset = row * img_width * spp * bps / 8; */
- row_offset = row * img_rowsize;
- src_offset = row_offset + col_offset;
-
-@@ -6792,14 +6790,12 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- }
- else
- { /* bps != 8 */
-- shift1 = spp * ((first_col * bps) % 8);
-- shift2 = spp * ((last_col * bps) % 8);
-+ shift1 = ((first_col * spp * bps) % 8); /* shift1 = bits to skip in the first byte of source buffer*/
- for (row = first_row; row <= last_row; row++)
- {
- /* pull out the first byte */
- row_offset = row * img_rowsize;
-- offset1 = row_offset + (first_col * bps / 8);
-- offset2 = row_offset + (last_col * bps / 8);
-+ offset1 = row_offset + ((first_col * spp * bps) / 8); /* offset1 = offset into source of byte with first bits to be extracted */
-
- #ifdef DEVELMODE
- for (j = 0, k = 7; j < 8; j++, k--)
-@@ -6811,12 +6807,12 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- sprintf(&bitarray[9], " ");
- for (j = 10, k = 7; j < 18; j++, k--)
- {
-- bitset = *(src_buff + offset2) & (((unsigned char)1 << k)) ? 1 : 0;
-+ bitset = *(src_buff + offset1 + full_bytes) & (((unsigned char)1 << k)) ? 1 : 0;
- sprintf(&bitarray[j], (bitset) ? "1" : "0");
- }
- bitarray[18] = '\0';
-- TIFFError ("", "Row: %3d Offset1: %"PRIu32", Shift1: %"PRIu32", Offset2: %"PRIu32", Shift2: %"PRIu32"\n",
-- row, offset1, shift1, offset2, shift2);
-+ TIFFError ("", "Row: %3d Offset1: %"PRIu32", Shift1: %"PRIu32", Offset2: %"PRIu32", Trailing_bits: %"PRIu32"\n",
-+ row, offset1, shift1, offset1+full_bytes, trailing_bits);
- #endif
-
- bytebuff1 = bytebuff2 = 0;
-@@ -6840,11 +6836,12 @@ extractImageSection(struct image_data *image, struct pageseg *section,
-
- if (trailing_bits != 0)
- {
-- bytebuff2 = src_buff[offset2] & ((unsigned char)255 << (7 - shift2));
-+ /* Only copy higher bits of samples and mask lower bits of not wanted column samples to zero */
-+ bytebuff2 = src_buff[offset1 + full_bytes] & ((unsigned char)255 << (8 - trailing_bits));
- sect_buff[dst_offset] = bytebuff2;
- #ifdef DEVELMODE
- TIFFError ("", " Trailing bits src offset: %8"PRIu32", Dst offset: %8"PRIu32"\n",
-- offset2, dst_offset);
-+ offset1 + full_bytes, dst_offset);
- for (j = 30, k = 7; j < 38; j++, k--)
- {
- bitset = *(sect_buff + dst_offset) & (((unsigned char)1 << k)) ? 1 : 0;
-@@ -6863,8 +6860,10 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- #endif
- for (j = 0; j <= full_bytes; j++)
- {
-- bytebuff1 = src_buff[offset1 + j] & ((unsigned char)255 >> shift1);
-- bytebuff2 = src_buff[offset1 + j + 1] & ((unsigned char)255 << (7 - shift1));
-+ /* Skip the first shift1 bits and shift the source up by shift1 bits before save to destination.*/
-+ /* Attention: src_buff size needs to be some bytes larger than image size, because could read behind image here. */
-+ bytebuff1 = src_buff[offset1 + j] & ((unsigned char)255 >> shift1);
-+ bytebuff2 = src_buff[offset1 + j + 1] & ((unsigned char)255 << (8 - shift1));
- sect_buff[dst_offset + j] = (bytebuff1 << shift1) | (bytebuff2 >> (8 - shift1));
- }
- #ifdef DEVELMODE
-@@ -6880,36 +6879,17 @@ extractImageSection(struct image_data *image, struct pageseg *section,
- #endif
- dst_offset += full_bytes;
-
-+ /* Copy the trailing_bits for the last byte in the destination buffer.
-+ Could come from one ore two bytes of the source buffer. */
- if (trailing_bits != 0)
- {
- #ifdef DEVELMODE
-- TIFFError ("", " Trailing bits src offset: %8"PRIu32", Dst offset: %8"PRIu32"\n", offset1 + full_bytes, dst_offset);
--#endif
-- if (shift2 > shift1)
-- {
-- bytebuff1 = src_buff[offset1 + full_bytes] & ((unsigned char)255 << (7 - shift2));
-- bytebuff2 = bytebuff1 & ((unsigned char)255 << shift1);
-- sect_buff[dst_offset] = bytebuff2;
--#ifdef DEVELMODE
-- TIFFError ("", " Shift2 > Shift1\n");
-+ TIFFError("", " Trailing bits %4"PRIu32" src offset: %8"PRIu32", Dst offset: %8"PRIu32"\n", trailing_bits, offset1 + full_bytes, dst_offset);
- #endif
-+ /* More than necessary bits are already copied into last destination buffer,
-+ * only masking of last byte in destination buffer is necessary.*/
-+ sect_buff[dst_offset] &= ((uint8_t)0xFF << (8 - trailing_bits));
- }
-- else
-- {
-- if (shift2 < shift1)
-- {
-- bytebuff2 = ((unsigned char)255 << (shift1 - shift2 - 1));
-- sect_buff[dst_offset] &= bytebuff2;
--#ifdef DEVELMODE
-- TIFFError ("", " Shift2 < Shift1\n");
--#endif
-- }
--#ifdef DEVELMODE
-- else
-- TIFFError ("", " Shift2 == Shift1\n");
--#endif
-- }
-- }
- #ifdef DEVELMODE
- sprintf(&bitarray[28], " ");
- sprintf(&bitarray[29], " ");
-@@ -7062,7 +7042,7 @@ writeImageSections(TIFF *in, TIFF *out, struct image_data *image,
- width = sections[i].x2 - sections[i].x1 + 1;
- length = sections[i].y2 - sections[i].y1 + 1;
- sectsize = (uint32_t)
-- ceil((width * image->bps + 7) / (double)8) * image->spp * length;
-+ ceil((width * image->bps * image->spp + 7) / (double)8) * length;
- /* allocate a buffer if we don't have one already */
- if (createImageSection(sectsize, sect_buff_ptr))
- {
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/0003-add-checks-for-return-value-of-limitMalloc-392.patch b/meta/recipes-multimedia/libtiff/tiff/0003-add-checks-for-return-value-of-limitMalloc-392.patch
deleted file mode 100644
index a0b856b9e1..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0003-add-checks-for-return-value-of-limitMalloc-392.patch
+++ /dev/null
@@ -1,93 +0,0 @@
-CVE: CVE-2022-0907
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From a139191cc86f4dc44c74a0f22928e0fb38ed2485 Mon Sep 17 00:00:00 2001
-From: Augustus <wangdw.augustus@qq.com>
-Date: Mon, 7 Mar 2022 18:21:49 +0800
-Subject: [PATCH 3/6] add checks for return value of limitMalloc (#392)
-
----
- tools/tiffcrop.c | 33 +++++++++++++++++++++------------
- 1 file changed, 21 insertions(+), 12 deletions(-)
-
-diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
-index 302a7e91..e407bf51 100644
---- a/tools/tiffcrop.c
-+++ b/tools/tiffcrop.c
-@@ -7357,7 +7357,11 @@ createImageSection(uint32_t sectsize, unsigned char **sect_buff_ptr)
- if (!sect_buff)
- {
- sect_buff = (unsigned char *)limitMalloc(sectsize);
-- *sect_buff_ptr = sect_buff;
-+ if (!sect_buff)
-+ {
-+ TIFFError("createImageSection", "Unable to allocate/reallocate section buffer");
-+ return (-1);
-+ }
- _TIFFmemset(sect_buff, 0, sectsize);
- }
- else
-@@ -7373,15 +7377,15 @@ createImageSection(uint32_t sectsize, unsigned char **sect_buff_ptr)
- else
- sect_buff = new_buff;
-
-+ if (!sect_buff)
-+ {
-+ TIFFError("createImageSection", "Unable to allocate/reallocate section buffer");
-+ return (-1);
-+ }
- _TIFFmemset(sect_buff, 0, sectsize);
- }
- }
-
-- if (!sect_buff)
-- {
-- TIFFError("createImageSection", "Unable to allocate/reallocate section buffer");
-- return (-1);
-- }
- prev_sectsize = sectsize;
- *sect_buff_ptr = sect_buff;
-
-@@ -7648,7 +7652,11 @@ createCroppedImage(struct image_data *image, struct crop_mask *crop,
- if (!crop_buff)
- {
- crop_buff = (unsigned char *)limitMalloc(cropsize);
-- *crop_buff_ptr = crop_buff;
-+ if (!crop_buff)
-+ {
-+ TIFFError("createCroppedImage", "Unable to allocate/reallocate crop buffer");
-+ return (-1);
-+ }
- _TIFFmemset(crop_buff, 0, cropsize);
- prev_cropsize = cropsize;
- }
-@@ -7664,15 +7672,15 @@ createCroppedImage(struct image_data *image, struct crop_mask *crop,
- }
- else
- crop_buff = new_buff;
-+ if (!crop_buff)
-+ {
-+ TIFFError("createCroppedImage", "Unable to allocate/reallocate crop buffer");
-+ return (-1);
-+ }
- _TIFFmemset(crop_buff, 0, cropsize);
- }
- }
-
-- if (!crop_buff)
-- {
-- TIFFError("createCroppedImage", "Unable to allocate/reallocate crop buffer");
-- return (-1);
-- }
- *crop_buff_ptr = crop_buff;
-
- if (crop->crop_mode & CROP_INVERT)
-@@ -9231,3 +9239,4 @@ invertImage(uint16_t photometric, uint16_t spp, uint16_t bps, uint32_t width, ui
- * fill-column: 78
- * End:
- */
-+
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch b/meta/recipes-multimedia/libtiff/tiff/0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch
deleted file mode 100644
index 719dabaecc..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-CVE: CVE-2022-0908
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From ef5a0bf271823df168642444d051528a68205cb0 Mon Sep 17 00:00:00 2001
-From: Even Rouault <even.rouault@spatialys.com>
-Date: Thu, 17 Feb 2022 15:28:43 +0100
-Subject: [PATCH 4/6] TIFFFetchNormalTag(): avoid calling memcpy() with a null
- source pointer and size of zero (fixes #383)
-
----
- libtiff/tif_dirread.c | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
-index d84147a0..4e8ce729 100644
---- a/libtiff/tif_dirread.c
-+++ b/libtiff/tif_dirread.c
-@@ -5079,7 +5079,10 @@ TIFFFetchNormalTag(TIFF* tif, TIFFDirEntry* dp, int recover)
- _TIFFfree(data);
- return(0);
- }
-- _TIFFmemcpy(o,data,(uint32_t)dp->tdir_count);
-+ if (dp->tdir_count > 0 )
-+ {
-+ _TIFFmemcpy(o,data,(uint32_t)dp->tdir_count);
-+ }
- o[(uint32_t)dp->tdir_count]=0;
- if (data!=0)
- _TIFFfree(data);
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/0005-fix-the-FPE-in-tiffcrop-393.patch b/meta/recipes-multimedia/libtiff/tiff/0005-fix-the-FPE-in-tiffcrop-393.patch
deleted file mode 100644
index 64dbe9ef92..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0005-fix-the-FPE-in-tiffcrop-393.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-CVE: CVE-2022-0909
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 4768355a074d562177e0a8b551c561d1af7eb74a Mon Sep 17 00:00:00 2001
-From: 4ugustus <wangdw.augustus@qq.com>
-Date: Tue, 8 Mar 2022 16:22:04 +0000
-Subject: [PATCH 5/6] fix the FPE in tiffcrop (#393)
-
----
- libtiff/tif_dir.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/libtiff/tif_dir.c b/libtiff/tif_dir.c
-index a6c254fc..77da6ea4 100644
---- a/libtiff/tif_dir.c
-+++ b/libtiff/tif_dir.c
-@@ -335,13 +335,13 @@ _TIFFVSetField(TIFF* tif, uint32_t tag, va_list ap)
- break;
- case TIFFTAG_XRESOLUTION:
- dblval = va_arg(ap, double);
-- if( dblval < 0 )
-+ if( dblval != dblval || dblval < 0 )
- goto badvaluedouble;
- td->td_xresolution = _TIFFClampDoubleToFloat( dblval );
- break;
- case TIFFTAG_YRESOLUTION:
- dblval = va_arg(ap, double);
-- if( dblval < 0 )
-+ if( dblval != dblval || dblval < 0 )
- goto badvaluedouble;
- td->td_yresolution = _TIFFClampDoubleToFloat( dblval );
- break;
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/0006-fix-heap-buffer-overflow-in-tiffcp-278.patch b/meta/recipes-multimedia/libtiff/tiff/0006-fix-heap-buffer-overflow-in-tiffcp-278.patch
deleted file mode 100644
index afd5e59960..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/0006-fix-heap-buffer-overflow-in-tiffcp-278.patch
+++ /dev/null
@@ -1,57 +0,0 @@
-CVE: CVE-2022-0924
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-From 1074b9691322b1e3671cd8ea0b6b3509d08978fb Mon Sep 17 00:00:00 2001
-From: 4ugustus <wangdw.augustus@qq.com>
-Date: Thu, 10 Mar 2022 08:48:00 +0000
-Subject: [PATCH 6/6] fix heap buffer overflow in tiffcp (#278)
-
----
- tools/tiffcp.c | 17 ++++++++++++++++-
- 1 file changed, 16 insertions(+), 1 deletion(-)
-
-diff --git a/tools/tiffcp.c b/tools/tiffcp.c
-index 1f889516..552d8fad 100644
---- a/tools/tiffcp.c
-+++ b/tools/tiffcp.c
-@@ -1661,12 +1661,27 @@ DECLAREwriteFunc(writeBufferToSeparateStrips)
- tdata_t obuf;
- tstrip_t strip = 0;
- tsample_t s;
-+ uint16_t bps = 0, bytes_per_sample;
-
- obuf = limitMalloc(stripsize);
- if (obuf == NULL)
- return (0);
- _TIFFmemset(obuf, 0, stripsize);
- (void) TIFFGetFieldDefaulted(out, TIFFTAG_ROWSPERSTRIP, &rowsperstrip);
-+ (void) TIFFGetField(out, TIFFTAG_BITSPERSAMPLE, &bps);
-+ if( bps == 0 )
-+ {
-+ TIFFError(TIFFFileName(out), "Error, cannot read BitsPerSample");
-+ _TIFFfree(obuf);
-+ return 0;
-+ }
-+ if( (bps % 8) != 0 )
-+ {
-+ TIFFError(TIFFFileName(out), "Error, cannot handle BitsPerSample that is not a multiple of 8");
-+ _TIFFfree(obuf);
-+ return 0;
-+ }
-+ bytes_per_sample = bps/8;
- for (s = 0; s < spp; s++) {
- uint32_t row;
- for (row = 0; row < imagelength; row += rowsperstrip) {
-@@ -1676,7 +1691,7 @@ DECLAREwriteFunc(writeBufferToSeparateStrips)
-
- cpContigBufToSeparateBuf(
- obuf, (uint8_t*) buf + row * rowsize + s,
-- nrows, imagewidth, 0, 0, spp, 1);
-+ nrows, imagewidth, 0, 0, spp, bytes_per_sample);
- if (TIFFWriteEncodedStrip(out, strip++, obuf, stripsize) < 0) {
- TIFFError(TIFFFileName(out),
- "Error, can't write strip %"PRIu32,
---
-2.25.1
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/561599c99f987dc32ae110370cfdd7df7975586b.patch b/meta/recipes-multimedia/libtiff/tiff/561599c99f987dc32ae110370cfdd7df7975586b.patch
deleted file mode 100644
index 0b41dde606..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/561599c99f987dc32ae110370cfdd7df7975586b.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From 561599c99f987dc32ae110370cfdd7df7975586b Mon Sep 17 00:00:00 2001
-From: Even Rouault <even.rouault@spatialys.com>
-Date: Sat, 5 Feb 2022 20:36:41 +0100
-Subject: [PATCH] TIFFReadDirectory(): avoid calling memcpy() with a null
- source pointer and size of zero (fixes #362)
-
-Upstream-Status: Backport
-CVE: CVE-2022-0562
-
----
- libtiff/tif_dirread.c | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
-index 2bbc4585..23194ced 100644
---- a/libtiff/tif_dirread.c
-+++ b/libtiff/tif_dirread.c
-@@ -4177,7 +4177,8 @@ TIFFReadDirectory(TIFF* tif)
- goto bad;
- }
-
-- memcpy(new_sampleinfo, tif->tif_dir.td_sampleinfo, old_extrasamples * sizeof(uint16_t));
-+ if (old_extrasamples > 0)
-+ memcpy(new_sampleinfo, tif->tif_dir.td_sampleinfo, old_extrasamples * sizeof(uint16_t));
- _TIFFsetShortArray(&tif->tif_dir.td_sampleinfo, new_sampleinfo, tif->tif_dir.td_extrasamples);
- _TIFFfree(new_sampleinfo);
- }
---
-GitLab
-
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0001.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0001.patch
new file mode 100644
index 0000000000..f5520fcafd
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0001.patch
@@ -0,0 +1,238 @@
+From 335947359ce2dd3862cd9f7c49f92eba065dfed4 Mon Sep 17 00:00:00 2001
+From: Su_Laus <sulau@freenet.de>
+Date: Thu, 1 Feb 2024 13:06:08 +0000
+Subject: [PATCH] manpage: Update TIFF documentation about TIFFOpenOptions.rst
+ and TIFFOpenOptionsSetMaxSingleMemAlloc() usage and some other small fixes.
+
+CVE: CVE-2023-52355
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/commit/335947359ce2dd3862cd9f7c49f92eba065dfed4]
+
+Signed-off-by: Yogita Urade <yogita.urade@windriver.com>
+---
+ doc/functions/TIFFDeferStrileArrayWriting.rst | 5 +++
+ doc/functions/TIFFError.rst | 3 ++
+ doc/functions/TIFFOpen.rst | 13 +++---
+ doc/functions/TIFFOpenOptions.rst | 44 ++++++++++++++++++-
+ doc/functions/TIFFStrileQuery.rst | 5 +++
+ doc/libtiff.rst | 31 ++++++++++++-
+ 6 files changed, 91 insertions(+), 10 deletions(-)
+
+diff --git a/doc/functions/TIFFDeferStrileArrayWriting.rst b/doc/functions/TIFFDeferStrileArrayWriting.rst
+index 60ee746..705aebc 100644
+--- a/doc/functions/TIFFDeferStrileArrayWriting.rst
++++ b/doc/functions/TIFFDeferStrileArrayWriting.rst
+@@ -61,6 +61,11 @@ Diagnostics
+ All error messages are directed to the :c:func:`TIFFErrorExtR` routine.
+ Likewise, warning messages are directed to the :c:func:`TIFFWarningExtR` routine.
+
++Note
++----
++
++This functionality was introduced with libtiff 4.1.
++
+ See also
+ --------
+
+diff --git a/doc/functions/TIFFError.rst b/doc/functions/TIFFError.rst
+index 99924ad..cf4b37c 100644
+--- a/doc/functions/TIFFError.rst
++++ b/doc/functions/TIFFError.rst
+@@ -65,6 +65,9 @@ or :c:func:`TIFFClientOpenExt`.
+ Furthermore, a **custom defined data structure** *user_data* for the
+ error handler can be given along.
+
++Please refer to :doc:`/functions/TIFFOpenOptions` for how to setup the
++application-specific handler introduced with libtiff 4.5.
++
+ Note
+ ----
+
+diff --git a/doc/functions/TIFFOpen.rst b/doc/functions/TIFFOpen.rst
+index db79d7b..adc474f 100644
+--- a/doc/functions/TIFFOpen.rst
++++ b/doc/functions/TIFFOpen.rst
+@@ -94,8 +94,9 @@ TIFF structure without closing the file handle and afterwards the
+ file should be closed using its file descriptor *fd*.
+
+ :c:func:`TIFFOpenExt` (added in libtiff 4.5) is like :c:func:`TIFFOpen`,
+-but options, such as re-entrant error and warning handlers may be passed
+-with the *opts* argument. The *opts* argument may be NULL.
++but options, such as re-entrant error and warning handlers and a limit in byte
++that libtiff internal memory allocation functions are allowed to request per call
++may be passed with the *opts* argument. The *opts* argument may be NULL.
+ Refer to :doc:`TIFFOpenOptions` for allocating and filling the *opts* argument
+ parameters. The allocated memory for :c:type:`TIFFOpenOptions`
+ can be released straight after successful execution of the related
+@@ -105,9 +106,7 @@ can be released straight after successful execution of the related
+ but opens a TIFF file with a Unicode filename.
+
+ :c:func:`TIFFFdOpenExt` (added in libtiff 4.5) is like :c:func:`TIFFFdOpen`,
+-but options, such as re-entrant error and warning handlers may be passed
+-with the *opts* argument. The *opts* argument may be NULL.
+-Refer to :doc:`TIFFOpenOptions` for filling the *opts* argument.
++but options argument *opts* like for :c:func:`TIFFOpenExt` can be passed.
+
+ :c:func:`TIFFSetFileName` sets the file name in the tif-structure
+ and returns the old file name.
+@@ -326,5 +325,5 @@ See also
+
+ :doc:`libtiff` (3tiff),
+ :doc:`TIFFClose` (3tiff),
+-:doc:`TIFFStrileQuery`,
+-:doc:`TIFFOpenOptions`
+\ No newline at end of file
++:doc:`TIFFStrileQuery` (3tiff),
++:doc:`TIFFOpenOptions`
+diff --git a/doc/functions/TIFFOpenOptions.rst b/doc/functions/TIFFOpenOptions.rst
+index 5c67566..23f2975 100644
+--- a/doc/functions/TIFFOpenOptions.rst
++++ b/doc/functions/TIFFOpenOptions.rst
+@@ -38,12 +38,17 @@ opaque structure and returns a :c:type:`TIFFOpenOptions` pointer.
+ :c:func:`TIFFOpenOptionsFree` releases the allocated memory for
+ :c:type:`TIFFOpenOptions`. The allocated memory for :c:type:`TIFFOpenOptions`
+ can be released straight after successful execution of the related
+-TIFF open"Ext" functions like :c:func:`TIFFOpenExt`.
++TIFFOpen"Ext" functions like :c:func:`TIFFOpenExt`.
+
+ :c:func:`TIFFOpenOptionsSetMaxSingleMemAlloc` sets parameter for the
+ maximum single memory limit in byte that ``libtiff`` internal memory allocation
+ functions are allowed to request per call.
+
++.. note::
++ However, the ``libtiff`` external functions :c:func:`_TIFFmalloc`
++ and :c:func:`_TIFFrealloc` **do not apply** this internal memory
++ allocation limit set by :c:func:`TIFFOpenOptionsSetMaxSingleMemAlloc`!
++
+ :c:func:`TIFFOpenOptionsSetErrorHandlerExtR` sets the function pointer to
+ an application-specific and per-TIFF handle (re-entrant) error handler.
+ Furthermore, a pointer to a **custom defined data structure** *errorhandler_user_data*
+@@ -55,6 +60,43 @@ The *errorhandler_user_data* argument may be NULL.
+ :c:func:`TIFFOpenOptionsSetErrorHandlerExtR` but for the warning handler,
+ which is invoked through :c:func:`TIFFWarningExtR`
+
++Example
++-------
++
++::
++
++ #include "tiffio.h"
++
++ typedef struct MyErrorHandlerUserDataStruct
++ {
++ /* ... any user data structure ... */
++ } MyErrorHandlerUserDataStruct;
++
++ static int myErrorHandler(TIFF *tiff, void *user_data, const char *module,
++ const char *fmt, va_list ap)
++ {
++ MyErrorHandlerUserDataStruct *errorhandler_user_data =
++ (MyErrorHandlerUserDataStruct *)user_data;
++ /*... code of myErrorHandler ...*/
++ return 1;
++ }
++
++
++ main()
++ {
++ tmsize_t limit = (256 * 1024 * 1024);
++ MyErrorHandlerUserDataStruct user_data = { /* ... any data ... */};
++
++ TIFFOpenOptions *opts = TIFFOpenOptionsAlloc();
++ TIFFOpenOptionsSetMaxSingleMemAlloc(opts, limit);
++ TIFFOpenOptionsSetErrorHandlerExtR(opts, myErrorHandler, &user_data);
++ TIFF *tif = TIFFOpenExt("foo.tif", "r", opts);
++ TIFFOpenOptionsFree(opts);
++ /* ... go on here ... */
++
++ TIFFClose(tif);
++ }
++
+ Note
+ ----
+
+diff --git a/doc/functions/TIFFStrileQuery.rst b/doc/functions/TIFFStrileQuery.rst
+index f8631af..7931fe4 100644
+--- a/doc/functions/TIFFStrileQuery.rst
++++ b/doc/functions/TIFFStrileQuery.rst
+@@ -66,6 +66,11 @@ Diagnostics
+ All error messages are directed to the :c:func:`TIFFErrorExtR` routine.
+ Likewise, warning messages are directed to the :c:func:`TIFFWarningExtR` routine.
+
++Note
++----
++
++This functionality was introduced with libtiff 4.1.
++
+ See also
+ --------
+
+diff --git a/doc/libtiff.rst b/doc/libtiff.rst
+index 6a0054c..d96a860 100644
+--- a/doc/libtiff.rst
++++ b/doc/libtiff.rst
+@@ -90,11 +90,15 @@ compatibility on machines with a segmented architecture.
+ :c:func:`realloc`, and :c:func:`free` routines in the C library.)
+
+ To deal with segmented pointer issues ``libtiff`` also provides
+-:c:func:`_TIFFmemcpy`, :c:func:`_TIFFmemset`, and :c:func:`_TIFFmemmove`
++:c:func:`_TIFFmemcpy`, :c:func:`_TIFFmemset`, and :c:func:`_TIFFmemcmp`
+ routines that mimic the equivalent ANSI C routines, but that are
+ intended for use with memory allocated through :c:func:`_TIFFmalloc`
+ and :c:func:`_TIFFrealloc`.
+
++With ``libtiff`` 4.5 a method was introduced to limit the internal
++memory allocation that functions are allowed to request per call
++(see :c:func:`TIFFOpenOptionsSetMaxSingleMemAlloc` and :c:func:`TIFFOpenExt`).
++
+ Error Handling
+ --------------
+
+@@ -106,6 +110,10 @@ routine that can be specified with a call to :c:func:`TIFFSetErrorHandler`.
+ Likewise warning messages are directed to a single handler routine
+ that can be specified with a call to :c:func:`TIFFSetWarningHandler`
+
++Further application-specific and per-TIFF handle (re-entrant) error handler
++and warning handler can be set. Please refer to :doc:`/functions/TIFFError`
++and :doc:`/functions/TIFFOpenOptions`.
++
+ Basic File Handling
+ -------------------
+
+@@ -139,7 +147,7 @@ a ``"w"`` argument:
+ main()
+ {
+ TIFF* tif = TIFFOpen("foo.tif", "w");
+- ... do stuff ...
++ /* ... do stuff ... */
+ TIFFClose(tif);
+ }
+
+@@ -157,6 +165,25 @@ to always call :c:func:`TIFFClose` or :c:func:`TIFFFlush` to flush any
+ buffered information to a file. Note that if you call :c:func:`TIFFClose`
+ you do not need to call :c:func:`TIFFFlush`.
+
++.. warning::
++
++ In order to prevent out-of-memory issues when opening a TIFF file
++ :c:func:`TIFFOpenExt` can be used and then the maximum single memory
++ limit in byte that ``libtiff`` internal memory allocation functions
++ are allowed to request per call can be set with
++ :c:func:`TIFFOpenOptionsSetMaxSingleMemAlloc`.
++
++Example
++
++::
++
++ tmsize_t limit = (256 * 1024 * 1024);
++ TIFFOpenOptions *opts = TIFFOpenOptionsAlloc();
++ TIFFOpenOptionsSetMaxSingleMemAlloc(opts, limit);
++ TIFF *tif = TIFFOpenExt("foo.tif", "w", opts);
++ TIFFOpenOptionsFree(opts);
++ /* ... go on here ... */
++
+ TIFF Directories
+ ----------------
+
+--
+2.40.0
+
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0002.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0002.patch
new file mode 100644
index 0000000000..19a1ef727a
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52355-0002.patch
@@ -0,0 +1,28 @@
+From 16ab4a205cfc938c32686e8d697d048fabf97ed4 Mon Sep 17 00:00:00 2001
+From: Timothy Lyanguzov <theta682@gmail.com>
+Date: Thu, 1 Feb 2024 11:19:06 +0000
+Subject: [PATCH] Fix typo.
+
+CVE: CVE-2023-52355
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/commit/16ab4a205cfc938c32686e8d697d048fabf97ed4]
+
+Signed-off-by: Yogita Urade <yogita.urade@windriver.com>
+---
+ doc/libtiff.rst | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/doc/libtiff.rst b/doc/libtiff.rst
+index d96a860..4fedc3e 100644
+--- a/doc/libtiff.rst
++++ b/doc/libtiff.rst
+@@ -169,7 +169,7 @@ you do not need to call :c:func:`TIFFFlush`.
+
+ In order to prevent out-of-memory issues when opening a TIFF file
+ :c:func:`TIFFOpenExt` can be used and then the maximum single memory
+- limit in byte that ``libtiff`` internal memory allocation functions
++ limit in bytes that ``libtiff`` internal memory allocation functions
+ are allowed to request per call can be set with
+ :c:func:`TIFFOpenOptionsSetMaxSingleMemAlloc`.
+
+--
+2.40.0
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52356.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52356.patch
new file mode 100644
index 0000000000..75f5d8946a
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-52356.patch
@@ -0,0 +1,49 @@
+From 51558511bdbbcffdce534db21dbaf5d54b31638a Mon Sep 17 00:00:00 2001
+From: Even Rouault <even.rouault@spatialys.com>
+Date: Thu, 1 Feb 2024 11:38:14 +0000
+Subject: [PATCH] TIFFReadRGBAStrip/TIFFReadRGBATile: add more validation of
+ col/row (fixes #622)
+
+CVE: CVE-2023-52356
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/commit/51558511bdbbcffdce534db21dbaf5d54b31638a]
+
+Signed-off-by: Yogita Urade <yogita.urade@windriver.com>
+---
+ libtiff/tif_getimage.c | 15 +++++++++++++++
+ 1 file changed, 15 insertions(+)
+
+diff --git a/libtiff/tif_getimage.c b/libtiff/tif_getimage.c
+index 41f7dfd..9cd6eee 100644
+--- a/libtiff/tif_getimage.c
++++ b/libtiff/tif_getimage.c
+@@ -3224,6 +3224,13 @@ int TIFFReadRGBAStripExt(TIFF *tif, uint32_t row, uint32_t *raster,
+ if (TIFFRGBAImageOK(tif, emsg) &&
+ TIFFRGBAImageBegin(&img, tif, stop_on_error, emsg))
+ {
++ if (row >= img.height)
++ {
++ TIFFErrorExtR(tif, TIFFFileName(tif),
++ "Invalid row passed to TIFFReadRGBAStrip().");
++ TIFFRGBAImageEnd(&img);
++ return (0);
++ }
+
+ img.row_offset = row;
+ img.col_offset = 0;
+@@ -3301,6 +3308,14 @@ int TIFFReadRGBATileExt(TIFF *tif, uint32_t col, uint32_t row, uint32_t *raster,
+ return (0);
+ }
+
++ if (col >= img.width || row >= img.height)
++ {
++ TIFFErrorExtR(tif, TIFFFileName(tif),
++ "Invalid row/col passed to TIFFReadRGBATile().");
++ TIFFRGBAImageEnd(&img);
++ return (0);
++ }
++
+ /*
+ * The TIFFRGBAImageGet() function doesn't allow us to get off the
+ * edge of the image, even to fill an otherwise valid tile. So we
+--
+2.40.0
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6228.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6228.patch
new file mode 100644
index 0000000000..2020508fdf
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6228.patch
@@ -0,0 +1,31 @@
+From 1e7d217a323eac701b134afc4ae39b6bdfdbc96a Mon Sep 17 00:00:00 2001
+From: Su_Laus <sulau@freenet.de>
+Date: Wed, 17 Jan 2024 06:57:08 +0000
+Subject: [PATCH] codec of input image is available, independently from codec
+ check of output image and return with error if not.
+
+Fixes #606.
+
+CVE: CVE-2023-6228
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/commit/1e7d217a323eac701b134afc4ae39b6bdfdbc96a]
+
+Signed-off-by: Yogita Urade <yogita.urade@windriver.com>
+---
+ tools/tiffcp.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/tools/tiffcp.c b/tools/tiffcp.c
+index aff0626..a4f7f6b 100644
+--- a/tools/tiffcp.c
++++ b/tools/tiffcp.c
+@@ -846,6 +846,8 @@ static int tiffcp(TIFF *in, TIFF *out)
+ if (!TIFFIsCODECConfigured(compression))
+ return FALSE;
+ TIFFGetFieldDefaulted(in, TIFFTAG_COMPRESSION, &input_compression);
++ if (!TIFFIsCODECConfigured(input_compression))
++ return FALSE;
+ TIFFGetFieldDefaulted(in, TIFFTAG_PHOTOMETRIC, &input_photometric);
+ if (input_compression == COMPRESSION_JPEG)
+ {
+--
+2.40.0
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-Apply-1-suggestion-s-to-1-file-s.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-Apply-1-suggestion-s-to-1-file-s.patch
new file mode 100644
index 0000000000..5d15dff1d9
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-Apply-1-suggestion-s-to-1-file-s.patch
@@ -0,0 +1,27 @@
+From e1640519208121f916da1772a5efb6ca28971b86 Mon Sep 17 00:00:00 2001
+From: Even Rouault <even.rouault@spatialys.com>
+Date: Tue, 31 Oct 2023 15:04:37 +0000
+Subject: [PATCH 3/3] Apply 1 suggestion(s) to 1 file(s)
+
+CVE: CVE-2023-6277
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/merge_requests/545]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libtiff/tif_dirread.c | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
+index fe8d6f8..58a4276 100644
+--- a/libtiff/tif_dirread.c
++++ b/libtiff/tif_dirread.c
+@@ -5306,7 +5306,6 @@ static int EstimateStripByteCounts(TIFF *tif, TIFFDirEntry *dir,
+ {
+ uint64_t space;
+ uint16_t n;
+- filesize = TIFFGetFileSize(tif);
+ if (!(tif->tif_flags & TIFF_BIGTIFF))
+ space = sizeof(TIFFHeaderClassic) + 2 + dircount * 12 + 4;
+ else
+--
+2.43.0
+
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data-2.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data-2.patch
new file mode 100644
index 0000000000..9fc8182fef
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data-2.patch
@@ -0,0 +1,36 @@
+From f500facf7723f1cae725dd288b2daad15e45131c Mon Sep 17 00:00:00 2001
+From: Su_Laus <sulau@freenet.de>
+Date: Mon, 30 Oct 2023 21:21:57 +0100
+Subject: [PATCH 2/3] At image reading, compare data size of some tags / data
+ structures (StripByteCounts, StripOffsets, StripArray, TIFF directory) with
+ file size to prevent provoked out-of-memory attacks.
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+See issue #614.
+
+Correct declaration of ‘filesize’ shadows a previous local.
+
+CVE: CVE-2023-6277
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/merge_requests/545]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libtiff/tif_dirread.c | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
+index c52d41f..fe8d6f8 100644
+--- a/libtiff/tif_dirread.c
++++ b/libtiff/tif_dirread.c
+@@ -5305,7 +5305,6 @@ static int EstimateStripByteCounts(TIFF *tif, TIFFDirEntry *dir,
+ if (td->td_compression != COMPRESSION_NONE)
+ {
+ uint64_t space;
+- uint64_t filesize;
+ uint16_t n;
+ filesize = TIFFGetFileSize(tif);
+ if (!(tif->tif_flags & TIFF_BIGTIFF))
+--
+2.43.0
+
diff --git a/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data.patch b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data.patch
new file mode 100644
index 0000000000..d5854a9059
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff/CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data.patch
@@ -0,0 +1,162 @@
+From b33baa5d9c6aac8ce49b5180dd48e39697ab7a11 Mon Sep 17 00:00:00 2001
+From: Su_Laus <sulau@freenet.de>
+Date: Fri, 27 Oct 2023 22:11:10 +0200
+Subject: [PATCH 1/3] At image reading, compare data size of some tags / data
+ structures (StripByteCounts, StripOffsets, StripArray, TIFF directory) with
+ file size to prevent provoked out-of-memory attacks.
+
+See issue #614.
+
+CVE: CVE-2023-6277
+Upstream-Status: Backport [https://gitlab.com/libtiff/libtiff/-/merge_requests/545]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libtiff/tif_dirread.c | 90 +++++++++++++++++++++++++++++++++++++++++++
+ 1 file changed, 90 insertions(+)
+
+diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
+index 2c49dc6..c52d41f 100644
+--- a/libtiff/tif_dirread.c
++++ b/libtiff/tif_dirread.c
+@@ -1308,6 +1308,21 @@ TIFFReadDirEntryArrayWithLimit(TIFF *tif, TIFFDirEntry *direntry,
+ datasize = (*count) * typesize;
+ assert((tmsize_t)datasize > 0);
+
++ /* Before allocating a huge amount of memory for corrupted files, check if
++ * size of requested memory is not greater than file size.
++ */
++ uint64_t filesize = TIFFGetFileSize(tif);
++ if (datasize > filesize)
++ {
++ TIFFWarningExtR(tif, "ReadDirEntryArray",
++ "Requested memory size for tag %d (0x%x) %" PRIu32
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated, tag not read",
++ direntry->tdir_tag, direntry->tdir_tag, datasize,
++ filesize);
++ return (TIFFReadDirEntryErrAlloc);
++ }
++
+ if (isMapped(tif) && datasize > (uint64_t)tif->tif_size)
+ return TIFFReadDirEntryErrIo;
+
+@@ -5266,6 +5281,20 @@ static int EstimateStripByteCounts(TIFF *tif, TIFFDirEntry *dir,
+ if (!_TIFFFillStrilesInternal(tif, 0))
+ return -1;
+
++ /* Before allocating a huge amount of memory for corrupted files, check if
++ * size of requested memory is not greater than file size. */
++ uint64_t filesize = TIFFGetFileSize(tif);
++ uint64_t allocsize = (uint64_t)td->td_nstrips * sizeof(uint64_t);
++ if (allocsize > filesize)
++ {
++ TIFFWarningExtR(tif, module,
++ "Requested memory size for StripByteCounts of %" PRIu64
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated",
++ allocsize, filesize);
++ return -1;
++ }
++
+ if (td->td_stripbytecount_p)
+ _TIFFfreeExt(tif, td->td_stripbytecount_p);
+ td->td_stripbytecount_p = (uint64_t *)_TIFFCheckMalloc(
+@@ -5807,6 +5836,20 @@ static uint16_t TIFFFetchDirectory(TIFF *tif, uint64_t diroff,
+ dircount16 = (uint16_t)dircount64;
+ dirsize = 20;
+ }
++ /* Before allocating a huge amount of memory for corrupted files, check
++ * if size of requested memory is not greater than file size. */
++ uint64_t filesize = TIFFGetFileSize(tif);
++ uint64_t allocsize = (uint64_t)dircount16 * dirsize;
++ if (allocsize > filesize)
++ {
++ TIFFWarningExtR(
++ tif, module,
++ "Requested memory size for TIFF directory of %" PRIu64
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated, TIFF directory not read",
++ allocsize, filesize);
++ return 0;
++ }
+ origdir = _TIFFCheckMalloc(tif, dircount16, dirsize,
+ "to read TIFF directory");
+ if (origdir == NULL)
+@@ -5921,6 +5964,20 @@ static uint16_t TIFFFetchDirectory(TIFF *tif, uint64_t diroff,
+ "directories not supported");
+ return 0;
+ }
++ /* Before allocating a huge amount of memory for corrupted files, check
++ * if size of requested memory is not greater than file size. */
++ uint64_t filesize = TIFFGetFileSize(tif);
++ uint64_t allocsize = (uint64_t)dircount16 * dirsize;
++ if (allocsize > filesize)
++ {
++ TIFFWarningExtR(
++ tif, module,
++ "Requested memory size for TIFF directory of %" PRIu64
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated, TIFF directory not read",
++ allocsize, filesize);
++ return 0;
++ }
+ origdir = _TIFFCheckMalloc(tif, dircount16, dirsize,
+ "to read TIFF directory");
+ if (origdir == NULL)
+@@ -5968,6 +6025,8 @@ static uint16_t TIFFFetchDirectory(TIFF *tif, uint64_t diroff,
+ }
+ }
+ }
++ /* No check against filesize needed here because "dir" should have same size
++ * than "origdir" checked above. */
+ dir = (TIFFDirEntry *)_TIFFCheckMalloc(
+ tif, dircount16, sizeof(TIFFDirEntry), "to read TIFF directory");
+ if (dir == 0)
+@@ -7164,6 +7223,20 @@ static int TIFFFetchStripThing(TIFF *tif, TIFFDirEntry *dir, uint32_t nstrips,
+ return (0);
+ }
+
++ /* Before allocating a huge amount of memory for corrupted files, check
++ * if size of requested memory is not greater than file size. */
++ uint64_t filesize = TIFFGetFileSize(tif);
++ uint64_t allocsize = (uint64_t)nstrips * sizeof(uint64_t);
++ if (allocsize > filesize)
++ {
++ TIFFWarningExtR(tif, module,
++ "Requested memory size for StripArray of %" PRIu64
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated",
++ allocsize, filesize);
++ _TIFFfreeExt(tif, data);
++ return (0);
++ }
+ resizeddata = (uint64_t *)_TIFFCheckMalloc(
+ tif, nstrips, sizeof(uint64_t), "for strip array");
+ if (resizeddata == 0)
+@@ -7263,6 +7336,23 @@ static void allocChoppedUpStripArrays(TIFF *tif, uint32_t nstrips,
+ }
+ bytecount = last_offset + last_bytecount - offset;
+
++ /* Before allocating a huge amount of memory for corrupted files, check if
++ * size of StripByteCount and StripOffset tags is not greater than
++ * file size.
++ */
++ uint64_t allocsize = (uint64_t)nstrips * sizeof(uint64_t) * 2;
++ uint64_t filesize = TIFFGetFileSize(tif);
++ if (allocsize > filesize)
++ {
++ TIFFWarningExtR(tif, "allocChoppedUpStripArrays",
++ "Requested memory size for StripByteCount and "
++ "StripOffsets %" PRIu64
++ " is greather than filesize %" PRIu64
++ ". Memory not allocated",
++ allocsize, filesize);
++ return;
++ }
++
+ newcounts =
+ (uint64_t *)_TIFFCheckMalloc(tif, nstrips, sizeof(uint64_t),
+ "for chopped \"StripByteCounts\" array");
+--
+2.43.0
+
diff --git a/meta/recipes-multimedia/libtiff/tiff/eecb0712f4c3a5b449f70c57988260a667ddbdef.patch b/meta/recipes-multimedia/libtiff/tiff/eecb0712f4c3a5b449f70c57988260a667ddbdef.patch
deleted file mode 100644
index 74f9649fdf..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff/eecb0712f4c3a5b449f70c57988260a667ddbdef.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From eecb0712f4c3a5b449f70c57988260a667ddbdef Mon Sep 17 00:00:00 2001
-From: Even Rouault <even.rouault@spatialys.com>
-Date: Sun, 6 Feb 2022 13:08:38 +0100
-Subject: [PATCH] TIFFFetchStripThing(): avoid calling memcpy() with a null
- source pointer and size of zero (fixes #362)
-
-Upstream-Status: Backport
-CVE: CVE-2022-0561
-
----
- libtiff/tif_dirread.c | 5 +++--
- 1 file changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/libtiff/tif_dirread.c b/libtiff/tif_dirread.c
-index 23194ced..50ebf8ac 100644
---- a/libtiff/tif_dirread.c
-+++ b/libtiff/tif_dirread.c
-@@ -5777,8 +5777,9 @@ TIFFFetchStripThing(TIFF* tif, TIFFDirEntry* dir, uint32_t nstrips, uint64_t** l
- _TIFFfree(data);
- return(0);
- }
-- _TIFFmemcpy(resizeddata,data, (uint32_t)dir->tdir_count * sizeof(uint64_t));
-- _TIFFmemset(resizeddata+(uint32_t)dir->tdir_count, 0, (nstrips - (uint32_t)dir->tdir_count) * sizeof(uint64_t));
-+ if( dir->tdir_count )
-+ _TIFFmemcpy(resizeddata,data, (uint32_t)dir->tdir_count * sizeof(uint64_t));
-+ _TIFFmemset(resizeddata+(uint32_t)dir->tdir_count, 0, (nstrips - (uint32_t)dir->tdir_count) * sizeof(uint64_t));
- _TIFFfree(data);
- data=resizeddata;
- }
---
-GitLab
-
diff --git a/meta/recipes-multimedia/libtiff/tiff_4.3.0.bb b/meta/recipes-multimedia/libtiff/tiff_4.3.0.bb
deleted file mode 100644
index 9c9108a6af..0000000000
--- a/meta/recipes-multimedia/libtiff/tiff_4.3.0.bb
+++ /dev/null
@@ -1,67 +0,0 @@
-SUMMARY = "Provides support for the Tag Image File Format (TIFF)"
-DESCRIPTION = "Library provides support for the Tag Image File Format \
-(TIFF), a widely used format for storing image data. This library \
-provide means to easily access and create TIFF image files."
-HOMEPAGE = "http://www.libtiff.org/"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=34da3db46fab7501992f9615d7e158cf"
-
-CVE_PRODUCT = "libtiff"
-
-SRC_URI = "http://download.osgeo.org/libtiff/tiff-${PV}.tar.gz \
- file://0001-tiffset-fix-global-buffer-overflow-for-ASCII-tags-wh.patch \
- file://561599c99f987dc32ae110370cfdd7df7975586b.patch \
- file://eecb0712f4c3a5b449f70c57988260a667ddbdef.patch \
- file://0001-tif_jbig.c-fix-crash-when-reading-a-file-with-multip.patch \
- file://0002-tiffcrop-fix-issue-380-and-382-heap-buffer-overflow-.patch \
- file://0003-add-checks-for-return-value-of-limitMalloc-392.patch \
- file://0004-TIFFFetchNormalTag-avoid-calling-memcpy-with-a-null-.patch \
- file://0005-fix-the-FPE-in-tiffcrop-393.patch \
- file://0006-fix-heap-buffer-overflow-in-tiffcp-278.patch \
- "
-
-SRC_URI[sha256sum] = "0e46e5acb087ce7d1ac53cf4f56a09b221537fc86dfc5daaad1c2e89e1b37ac8"
-
-# exclude betas
-UPSTREAM_CHECK_REGEX = "tiff-(?P<pver>\d+(\.\d+)+).tar"
-
-# Tested with check from https://security-tracker.debian.org/tracker/CVE-2015-7313
-# and 4.3.0 doesn't have the issue
-CVE_CHECK_IGNORE += "CVE-2015-7313"
-
-inherit autotools multilib_header
-
-CACHED_CONFIGUREVARS = "ax_cv_check_gl_libgl=no"
-
-PACKAGECONFIG ?= "cxx jpeg zlib lzma \
- strip-chopping extrasample-as-alpha check-ycbcr-subsampling"
-
-PACKAGECONFIG[cxx] = "--enable-cxx,--disable-cxx,,"
-PACKAGECONFIG[jpeg] = "--enable-jpeg,--disable-jpeg,jpeg,"
-PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib,"
-PACKAGECONFIG[lzma] = "--enable-lzma,--disable-lzma,xz,"
-
-# Convert single-strip uncompressed images to multiple strips of specified
-# size (default: 8192) to reduce memory usage
-PACKAGECONFIG[strip-chopping] = "--enable-strip-chopping,--disable-strip-chopping,,"
-
-# Treat a fourth sample with no EXTRASAMPLE_ value as being ASSOCALPHA
-PACKAGECONFIG[extrasample-as-alpha] = "--enable-extrasample-as-alpha,--disable-extrasample-as-alpha,,"
-
-# Control picking up YCbCr subsample info. Disable to support files lacking
-# the tag
-PACKAGECONFIG[check-ycbcr-subsampling] = "--enable-check-ycbcr-subsampling,--disable-check-ycbcr-subsampling,,"
-
-# Support a mechanism allowing reading large strips (usually one strip files)
-# in chunks when using TIFFReadScanline. Experimental 4.0+ feature
-PACKAGECONFIG[chunky-strip-read] = "--enable-chunky-strip-read,--disable-chunky-strip-read,,"
-
-PACKAGES =+ "tiffxx tiff-utils"
-FILES:tiffxx = "${libdir}/libtiffxx.so.*"
-FILES:tiff-utils = "${bindir}/*"
-
-do_install:append() {
- oe_multilib_header tiffconf.h
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/libtiff/tiff_4.6.0.bb b/meta/recipes-multimedia/libtiff/tiff_4.6.0.bb
new file mode 100644
index 0000000000..d42ea6a6e5
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/tiff_4.6.0.bb
@@ -0,0 +1,68 @@
+SUMMARY = "Provides support for the Tag Image File Format (TIFF)"
+DESCRIPTION = "Library provides support for the Tag Image File Format \
+(TIFF), a widely used format for storing image data. This library \
+provide means to easily access and create TIFF image files."
+HOMEPAGE = "http://www.libtiff.org/"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE.md;md5=a3e32d664d6db1386b4689c8121531c3"
+
+CVE_PRODUCT = "libtiff"
+
+SRC_URI = "http://download.osgeo.org/libtiff/tiff-${PV}.tar.gz \
+ file://CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data.patch \
+ file://CVE-2023-6277-At-image-reading-compare-data-size-of-some-tags-data-2.patch \
+ file://CVE-2023-6277-Apply-1-suggestion-s-to-1-file-s.patch \
+ file://CVE-2023-6228.patch \
+ file://CVE-2023-52355-0001.patch \
+ file://CVE-2023-52355-0002.patch \
+ file://CVE-2023-52356.patch \
+ "
+
+SRC_URI[sha256sum] = "88b3979e6d5c7e32b50d7ec72fb15af724f6ab2cbf7e10880c360a77e4b5d99a"
+
+# exclude betas
+UPSTREAM_CHECK_REGEX = "tiff-(?P<pver>\d+(\.\d+)+).tar"
+
+CVE_STATUS[CVE-2015-7313] = "fixed-version: Tested with check from https://security-tracker.debian.org/tracker/CVE-2015-7313 and already 4.3.0 doesn't have the issue"
+CVE_STATUS[CVE-2023-3164] = "cpe-incorrect: Issue only affects the tiffcrop tool not compiled by default since 4.6.0"
+
+inherit autotools multilib_header
+
+CACHED_CONFIGUREVARS = "ax_cv_check_gl_libgl=no"
+
+PACKAGECONFIG ?= "cxx jpeg zlib lzma \
+ strip-chopping extrasample-as-alpha check-ycbcr-subsampling"
+
+PACKAGECONFIG[cxx] = "--enable-cxx,--disable-cxx,,"
+PACKAGECONFIG[jbig] = "--enable-jbig,--disable-jbig,jbig,"
+PACKAGECONFIG[jpeg] = "--enable-jpeg,--disable-jpeg,jpeg,"
+PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib,"
+PACKAGECONFIG[lzma] = "--enable-lzma,--disable-lzma,xz,"
+PACKAGECONFIG[webp] = "--enable-webp,--disable-webp,libwebp,"
+PACKAGECONFIG[zstd] = "--enable-zstd,--disable-zstd,zstd,"
+PACKAGECONFIG[libdeflate] = "--enable-libdeflate,--disable-libdeflate,libdeflate,"
+
+# Convert single-strip uncompressed images to multiple strips of specified
+# size (default: 8192) to reduce memory usage
+PACKAGECONFIG[strip-chopping] = "--enable-strip-chopping,--disable-strip-chopping,,"
+
+# Treat a fourth sample with no EXTRASAMPLE_ value as being ASSOCALPHA
+PACKAGECONFIG[extrasample-as-alpha] = "--enable-extrasample-as-alpha,--disable-extrasample-as-alpha,,"
+
+# Control picking up YCbCr subsample info. Disable to support files lacking
+# the tag
+PACKAGECONFIG[check-ycbcr-subsampling] = "--enable-check-ycbcr-subsampling,--disable-check-ycbcr-subsampling,,"
+
+# Support a mechanism allowing reading large strips (usually one strip files)
+# in chunks when using TIFFReadScanline. Experimental 4.0+ feature
+PACKAGECONFIG[chunky-strip-read] = "--enable-chunky-strip-read,--disable-chunky-strip-read,,"
+
+PACKAGES =+ "tiffxx tiff-utils"
+FILES:tiffxx = "${libdir}/libtiffxx.so.*"
+FILES:tiff-utils = "${bindir}/*"
+
+do_install:append() {
+ oe_multilib_header tiffconf.h
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/mpg123/mpg123_1.29.3.bb b/meta/recipes-multimedia/mpg123/mpg123_1.29.3.bb
deleted file mode 100644
index 0baa7aa4a1..0000000000
--- a/meta/recipes-multimedia/mpg123/mpg123_1.29.3.bb
+++ /dev/null
@@ -1,52 +0,0 @@
-SUMMARY = "Audio decoder for MPEG-1 Layer 1/2/3"
-DESCRIPTION = "The core of mpg123 is an MPEG-1 Layer 1/2/3 decoding library, which can be used by other programs. \
-mpg123 also comes with a command-line tool which can playback using ALSA, PulseAudio, OSS, and several other APIs, \
-and also can write the decoded audio to WAV."
-HOMEPAGE = "http://mpg123.de/"
-BUGTRACKER = "http://sourceforge.net/p/mpg123/bugs/"
-SECTION = "multimedia"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=e7b9c15fcfb986abb4cc5e8400a24169"
-
-SRC_URI = "https://www.mpg123.de/download/${BP}.tar.bz2"
-SRC_URI[sha256sum] = "963885d8cc77262f28b77187c7d189e32195e64244de2530b798ddf32183e847"
-
-UPSTREAM_CHECK_REGEX = "mpg123-(?P<pver>\d+(\.\d+)+)\.tar"
-
-inherit autotools pkgconfig
-
-# The options should be mutually exclusive for configuration script.
-# If both alsa and pulseaudio are specified (as in the default distro features)
-# pulseaudio takes precedence.
-PACKAGECONFIG_ALSA = "${@bb.utils.filter('DISTRO_FEATURES', 'alsa', d)}"
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'pulseaudio', 'pulseaudio', '${PACKAGECONFIG_ALSA}', d)}"
-
-PACKAGECONFIG[alsa] = "--with-default-audio=alsa,,alsa-lib"
-PACKAGECONFIG[esd] = ",,esound"
-PACKAGECONFIG[jack] = ",,jack"
-PACKAGECONFIG[openal] = ",,openal-soft"
-PACKAGECONFIG[portaudio] = ",,portaudio-v19"
-PACKAGECONFIG[pulseaudio] = "--with-default-audio=pulse,,pulseaudio"
-PACKAGECONFIG[sdl] = ",,libsdl2"
-
-# Following are possible sound output modules:
-# alsa arts coreaudio dummy esd jack nas openal os2 oss portaudio pulse sdl sndio sun tinyalsa win32 win32_wasapi
-AUDIOMODS += "${@bb.utils.filter('PACKAGECONFIG', 'alsa esd jack openal portaudio sdl', d)}"
-AUDIOMODS += "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'pulse', '', d)}"
-
-EXTRA_OECONF = " \
- --enable-shared \
- --with-audio='${AUDIOMODS}' \
- ${@bb.utils.contains('TUNE_FEATURES', 'neon', '--with-cpu=neon', '', d)} \
- ${@bb.utils.contains('TUNE_FEATURES', 'altivec', '--with-cpu=altivec', '', d)} \
- ${@bb.utils.contains('TARGET_FPU', 'soft', '--with-cpu=generic_nofpu', '', d)} \
-"
-# Fails to build with thumb-1 (qemuarm)
-#| {standard input}: Assembler messages:
-#| {standard input}:47: Error: selected processor does not support Thumb mode `smull r5,r6,r7,r4'
-#| {standard input}:48: Error: shifts in CMP/MOV instructions are only supported in unified syntax -- `mov r5,r5,lsr#24'
-#...
-#| make[3]: *** [equalizer.lo] Error 1
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
diff --git a/meta/recipes-multimedia/mpg123/mpg123_1.32.6.bb b/meta/recipes-multimedia/mpg123/mpg123_1.32.6.bb
new file mode 100644
index 0000000000..f7786e8588
--- /dev/null
+++ b/meta/recipes-multimedia/mpg123/mpg123_1.32.6.bb
@@ -0,0 +1,55 @@
+SUMMARY = "Audio decoder for MPEG-1 Layer 1/2/3"
+DESCRIPTION = "The core of mpg123 is an MPEG-1 Layer 1/2/3 decoding library, which can be used by other programs. \
+mpg123 also comes with a command-line tool which can playback using ALSA, PulseAudio, OSS, and several other APIs, \
+and also can write the decoded audio to WAV."
+HOMEPAGE = "http://mpg123.de/"
+BUGTRACKER = "http://sourceforge.net/p/mpg123/bugs/"
+SECTION = "multimedia"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=e7b9c15fcfb986abb4cc5e8400a24169"
+
+SRC_URI = "https://www.mpg123.de/download/${BP}.tar.bz2"
+SRC_URI[sha256sum] = "ccdd1d0abc31d73d8b435fc658c79049d0a905b30669b6a42a03ad169dc609e6"
+
+UPSTREAM_CHECK_REGEX = "mpg123-(?P<pver>\d+(\.\d+)+)\.tar"
+
+inherit autotools pkgconfig
+
+# The options should be mutually exclusive for configuration script.
+# If both alsa and pulseaudio are specified (as in the default distro features)
+# pulseaudio takes precedence.
+PACKAGECONFIG_ALSA = "${@bb.utils.filter('DISTRO_FEATURES', 'alsa', d)}"
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'pulseaudio', 'pulseaudio', '${PACKAGECONFIG_ALSA}', d)}"
+
+PACKAGECONFIG[alsa] = "--with-default-audio=alsa,,alsa-lib"
+PACKAGECONFIG[esd] = ",,esound"
+PACKAGECONFIG[jack] = ",,jack"
+PACKAGECONFIG[openal] = ",,openal-soft"
+PACKAGECONFIG[portaudio] = ",,portaudio-v19"
+PACKAGECONFIG[pulseaudio] = "--with-default-audio=pulse,,pulseaudio"
+PACKAGECONFIG[sdl] = ",,libsdl2"
+
+# Following are possible sound output modules:
+# alsa arts coreaudio dummy esd jack nas openal os2 oss portaudio pulse sdl sndio sun tinyalsa win32 win32_wasapi
+AUDIOMODS += "${@bb.utils.filter('PACKAGECONFIG', 'alsa esd jack openal portaudio sdl', d)}"
+AUDIOMODS += "${@bb.utils.contains('PACKAGECONFIG', 'pulseaudio', 'pulse', '', d)}"
+
+CACHED_CONFIGUREVARS:libc-musl = "ac_cv_sys_file_offset_bits=no"
+
+EXTRA_OECONF = " \
+ --enable-shared \
+ --enable-largefile \
+ --with-audio='${AUDIOMODS}' \
+ ${@bb.utils.contains('TUNE_FEATURES', 'neon', '--with-cpu=neon', '', d)} \
+ ${@bb.utils.contains('TUNE_FEATURES', 'altivec', '--with-cpu=altivec', '', d)} \
+ ${@bb.utils.contains('TARGET_FPU', 'soft', '--with-cpu=generic_nofpu', '', d)} \
+"
+# Fails to build with thumb-1 (qemuarm)
+#| {standard input}: Assembler messages:
+#| {standard input}:47: Error: selected processor does not support Thumb mode `smull r5,r6,r7,r4'
+#| {standard input}:48: Error: shifts in CMP/MOV instructions are only supported in unified syntax -- `mov r5,r5,lsr#24'
+#...
+#| make[3]: *** [equalizer.lo] Error 1
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
index 821ce7d1df..ae16056d24 100644
--- a/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
+++ b/meta/recipes-multimedia/pulseaudio/pulseaudio.inc
@@ -2,7 +2,6 @@ SUMMARY = "Sound server for Linux and Unix-like operating systems"
DESCRIPTION = "A general purpose sound server intended to run as a middleware \
between your applications and your hardware devices, either using ALSA or OSS."
HOMEPAGE = "http://www.pulseaudio.org"
-AUTHOR = "Lennart Poettering"
SECTION = "libs/multimedia"
# Most of PulseAudio code is under LGPL-2.1-or-later. There are a few
@@ -61,7 +60,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=0e5cd938de1a7a53ea5adac38cc10c39 \
"
# libtool is needed for libltdl, used in module loading.
-DEPENDS = "libatomic-ops libsndfile1 libtool"
+DEPENDS = "m4-native libatomic-ops libsndfile1 libtool"
# optional
DEPENDS += "udev alsa-lib glib-2.0"
DEPENDS += "speexdsp libxml-parser-perl-native libcap"
@@ -188,6 +187,7 @@ FILES:${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala"
FILES:${PN}-bin += "${sysconfdir}/default/volatiles/04_pulse"
FILES:${PN}-pa-info = "${bindir}/pa-info"
FILES:${PN}-server = "${bindir}/pulseaudio ${bindir}/start-* ${sysconfdir} ${bindir}/pactl */udev/rules.d/*.rules */*/udev/rules.d/*.rules ${systemd_user_unitdir}/*"
+FILES:${PN}-server += "${datadir}/dbus-1/system.d/pulseaudio-system.conf"
#SYSTEMD_PACKAGES = "${PN}-server"
SYSTEMD_SERVICE:${PN}-server = "pulseaudio.service"
@@ -213,7 +213,7 @@ pkg_postinst:${PN}-server() {
}
python populate_packages:prepend() {
- plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
+ plugindir = d.expand('${libdir}/pulseaudio/modules/')
do_split_packages(d, plugindir, r'^module-(.*)\.so$', '${PN}-module-%s', 'PulseAudio module for %s', extra_depends='', prepend=True)
do_split_packages(d, plugindir, r'^lib(.*)\.so$', '${PN}-lib-%s', 'PulseAudio library for %s', extra_depends='', prepend=True)
}
@@ -258,7 +258,7 @@ RDEPENDS:pulseaudio-server += "${@bb.utils.contains('DISTRO_FEATURES', 'pulseaud
# pulseaudio-module-console-kit is built whenever dbus is enabled by PACKAGECONFIG
# but consolekit depends on libx11 and is available only for DISTRO with x11 in DISTRO_FEATURES
-RDEPENDS:pulseaudio-module-console-kit =+ "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'consolekit', '', d)}"
+RDEPENDS:pulseaudio-module-console-kit =+ "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit x11', 'consolekit', '', d)}"
RDEPENDS:pulseaudio-misc += "pulseaudio-module-cli-protocol-unix"
FILES:${PN}-module-alsa-card += "${datadir}/pulseaudio/alsa-mixer"
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-meson-Check-for-__get_cpuid.patch b/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-meson-Check-for-__get_cpuid.patch
deleted file mode 100644
index c9d8abcbf2..0000000000
--- a/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-meson-Check-for-__get_cpuid.patch
+++ /dev/null
@@ -1,82 +0,0 @@
-From 9d0dc8aedd08d77797f90fa6075a59613f18bf0d Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sun, 20 Dec 2020 07:56:07 -0800
-Subject: [PATCH] meson: Check for __get_cpuid
-
-checking for presence of cpuid.h header alone is not sufficient in some case to use
-cpuid related functions. e.g. when using clang which is built for
-multiple targets will have cpuid.h header as part of compiler headers in
-distribution but one maybe compiling pulseaudion for non-x86 target. The
-current check in meson succeeds and then compile fails later because
-cpuid.h is x86-specific header. Therefore checking for symbol that is
-needed makes this robust, so even if header exist it will try to ensure
-the given symbol can be used
-
-Fixes
-src/pulsecore/core-util.c:113:
-| /mnt/b/yoe/master/build/tmp/work/riscv64-yoe-linux/pulseaudio/14.0-r0/recipe-sysroot-native/usr/lib/clang/11.0.1/include/cpuid.h:11:2: error: this header is for x86 only
-| #error this header is for x86 only
-| ^
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Cc: Tanu Kaskinen <tanuk@iki.fi>
----
- meson.build | 5 ++++-
- src/pulsecore/core-util.c | 2 +-
- src/pulsecore/cpu-x86.c | 2 +-
- 3 files changed, 6 insertions(+), 3 deletions(-)
-
-diff --git a/meson.build b/meson.build
-index 2589627..5f5127e 100644
---- a/meson.build
-+++ b/meson.build
-@@ -185,7 +185,6 @@ endif
- check_headers = [
- 'arpa/inet.h',
- 'byteswap.h',
-- 'cpuid.h',
- 'dlfcn.h',
- 'execinfo.h',
- 'grp.h',
-@@ -243,6 +242,10 @@ if cc.has_header_symbol('pthread.h', 'PTHREAD_PRIO_INHERIT')
- cdata.set('HAVE_PTHREAD_PRIO_INHERIT', 1)
- endif
-
-+if cc.has_header_symbol('cpuid.h', '__get_cpuid')
-+ cdata.set('HAVE_GET_CPUID', 1)
-+endif
-+
- # Functions
-
- check_functions = [
-diff --git a/src/pulsecore/core-util.c b/src/pulsecore/core-util.c
-index 601b1d1..6f34e7c 100644
---- a/src/pulsecore/core-util.c
-+++ b/src/pulsecore/core-util.c
-@@ -109,7 +109,7 @@
- #include <sys/personality.h>
- #endif
-
--#ifdef HAVE_CPUID_H
-+#ifdef HAVE_GET_CPUID
- #include <cpuid.h>
- #endif
-
-diff --git a/src/pulsecore/cpu-x86.c b/src/pulsecore/cpu-x86.c
-index 4e59e14..86595d4 100644
---- a/src/pulsecore/cpu-x86.c
-+++ b/src/pulsecore/cpu-x86.c
-@@ -24,7 +24,7 @@
-
- #include <stdint.h>
-
--#ifdef HAVE_CPUID_H
-+#ifdef HAVE_GET_CPUID
- #include <cpuid.h>
- #endif
-
---
-2.29.2
-
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb b/meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb
deleted file mode 100644
index 239d31810b..0000000000
--- a/meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require pulseaudio.inc
-
-SRC_URI = "http://freedesktop.org/software/pulseaudio/releases/${BP}.tar.xz \
- file://0001-client-conf-Add-allow-autospawn-for-root.patch \
- file://0002-do-not-display-CLFAGS-to-improve-reproducibility-bui.patch \
- file://0001-meson-Check-for-__get_cpuid.patch \
- file://volatiles.04_pulse \
- file://0001-doxygen-meson.build-remove-dependency-on-doxygen-bin.patch \
- "
-SRC_URI[sha256sum] = "a40b887a3ba98cc26976eb11bdb6613988f145b19024d1b6555c6a03c9cba1a0"
-UPSTREAM_CHECK_REGEX = "pulseaudio-(?P<pver>\d+(\.(?!99)\d+)+)\.tar"
diff --git a/meta/recipes-multimedia/pulseaudio/pulseaudio_17.0.bb b/meta/recipes-multimedia/pulseaudio/pulseaudio_17.0.bb
new file mode 100644
index 0000000000..54c79b4097
--- /dev/null
+++ b/meta/recipes-multimedia/pulseaudio/pulseaudio_17.0.bb
@@ -0,0 +1,10 @@
+require pulseaudio.inc
+
+SRC_URI = "http://freedesktop.org/software/pulseaudio/releases/${BP}.tar.xz \
+ file://0001-client-conf-Add-allow-autospawn-for-root.patch \
+ file://0002-do-not-display-CLFAGS-to-improve-reproducibility-bui.patch \
+ file://volatiles.04_pulse \
+ file://0001-doxygen-meson.build-remove-dependency-on-doxygen-bin.patch \
+ "
+SRC_URI[sha256sum] = "053794d6671a3e397d849e478a80b82a63cb9d8ca296bd35b73317bb5ceb87b5"
+UPSTREAM_CHECK_REGEX = "pulseaudio-(?P<pver>\d+(\.(?!99)\d+)+)\.tar"
diff --git a/meta/recipes-multimedia/sbc/sbc/0001-sbc_primitives-Fix-build-on-non-x86.patch b/meta/recipes-multimedia/sbc/sbc/0001-sbc_primitives-Fix-build-on-non-x86.patch
deleted file mode 100644
index 11cec74fbb..0000000000
--- a/meta/recipes-multimedia/sbc/sbc/0001-sbc_primitives-Fix-build-on-non-x86.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From f4a1224323e386090a44bf70ee0ac9877ba7fb0d Mon Sep 17 00:00:00 2001
-From: Marius Bakke <marius@gnu.org>
-Date: Tue, 22 Dec 2020 11:04:26 +0000
-Subject: [PATCH] sbc_primitives: Fix build on non-x86.
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Don't call __builtin_cpu_init unless targeting i386 or x86_64.
-Otherwise we get an error at link time:
-
- CC sbc/sbc_primitives.lo
-sbc/sbc_primitives.c: In function ‘sbc_init_primitives_x86’:
-sbc/sbc_primitives.c:596:2: warning: implicit declaration of function ‘__builtin_cpu_init’; did you mean ‘__builtin_irint’? [-Wimplicit-function-declaration]
-[...]
- CCLD src/sbcdec
-ld: sbc/.libs/libsbc-private.a(sbc_primitives.o): in function `sbc_init_primitives':
-sbc_primitives.c:(.text+0x3a30): undefined reference to `__builtin_cpu_init'
-
-Upstream-Status: Backport
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
----
- sbc/sbc_primitives.c | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/sbc/sbc_primitives.c b/sbc/sbc_primitives.c
-index 97a75be..09c214a 100644
---- a/sbc/sbc_primitives.c
-+++ b/sbc/sbc_primitives.c
-@@ -593,6 +593,7 @@ static int sbc_calc_scalefactors_j(
-
- static void sbc_init_primitives_x86(struct sbc_encoder_state *state)
- {
-+#if defined(__x86_64__) || defined(__i386__)
- __builtin_cpu_init();
-
- #ifdef SBC_BUILD_WITH_MMX_SUPPORT
-@@ -604,6 +605,7 @@ static void sbc_init_primitives_x86(struct sbc_encoder_state *state)
- if (__builtin_cpu_supports("sse4.2"))
- sbc_init_primitives_sse(state);
- #endif
-+#endif
- }
-
- /*
diff --git a/meta/recipes-multimedia/sbc/sbc_1.5.bb b/meta/recipes-multimedia/sbc/sbc_1.5.bb
deleted file mode 100644
index e5c72ec0b4..0000000000
--- a/meta/recipes-multimedia/sbc/sbc_1.5.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "SBC Audio Codec"
-DESCRIPTION = "Bluetooth low-complexity, subband codec (SBC) library."
-HOMEPAGE = "https://www.bluez.org"
-SECTION = "libs"
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LICENSE:${PN} = "LGPL-2.1-or-later"
-LICENSE:${PN}-examples = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
- file://COPYING.LIB;md5=fb504b67c50331fc78734fed90fb0e09 \
- file://src/sbcenc.c;beginline=1;endline=24;md5=08e7a70b127f4100ff2cd7d629147d8d \
- file://sbc/sbc.h;beginline=1;endline=26;md5=0f57d0df22b0d40746bdd29805a4361b"
-
-DEPENDS = "libsndfile1"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/bluetooth/${BP}.tar.xz \
- file://0001-sbc_primitives-Fix-build-on-non-x86.patch \
- "
-
-SRC_URI[sha256sum] = "0cbad69823a99e8421fe0700e8cf9eeb8fa0c1ad28e8dbc2182b3353507931d2"
-
-inherit autotools pkgconfig
-
-PACKAGES =+ "${PN}-examples"
-FILES:${PN}-examples += "${bindir}/*"
diff --git a/meta/recipes-multimedia/sbc/sbc_2.0.bb b/meta/recipes-multimedia/sbc/sbc_2.0.bb
new file mode 100644
index 0000000000..d25be9e80c
--- /dev/null
+++ b/meta/recipes-multimedia/sbc/sbc_2.0.bb
@@ -0,0 +1,22 @@
+SUMMARY = "SBC Audio Codec"
+DESCRIPTION = "Bluetooth low-complexity, subband codec (SBC) library."
+HOMEPAGE = "https://www.bluez.org"
+SECTION = "libs"
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LICENSE:${PN} = "LGPL-2.1-or-later"
+LICENSE:${PN}-examples = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
+ file://COPYING.LIB;md5=fb504b67c50331fc78734fed90fb0e09 \
+ file://src/sbcenc.c;beginline=1;endline=24;md5=08e7a70b127f4100ff2cd7d629147d8d \
+ file://sbc/sbc.h;beginline=1;endline=26;md5=0f57d0df22b0d40746bdd29805a4361b"
+
+DEPENDS = "libsndfile1"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/bluetooth/${BP}.tar.xz"
+
+SRC_URI[sha256sum] = "8f12368e1dbbf55e14536520473cfb338c84b392939cc9b64298360fd4a07992"
+
+inherit autotools pkgconfig
+
+PACKAGES =+ "${PN}-examples"
+FILES:${PN}-examples += "${bindir}/*"
diff --git a/meta/recipes-multimedia/speex/speex/CVE-2020-23903.patch b/meta/recipes-multimedia/speex/speex/CVE-2020-23903.patch
deleted file mode 100644
index eb16e95ffc..0000000000
--- a/meta/recipes-multimedia/speex/speex/CVE-2020-23903.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-Backport patch to fix CVE-2020-23903.
-
-CVE: CVE-2020-23903
-Upstream-Status: Backport [https://github.com/xiph/speex/commit/870ff84]
-
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-
-From 870ff845b32f314aec0036641ffe18aba4916887 Mon Sep 17 00:00:00 2001
-From: Tristan Matthews <tmatth@videolan.org>
-Date: Mon, 13 Jul 2020 23:25:03 -0400
-Subject: [PATCH] wav_io: guard against invalid channel numbers
-
-Fixes #13
----
- src/wav_io.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/wav_io.c b/src/wav_io.c
-index b5183015..09d62eb0 100644
---- a/src/wav_io.c
-+++ b/src/wav_io.c
-@@ -111,7 +111,7 @@ int read_wav_header(FILE *file, int *rate, int *channels, int *format, spx_int32
- stmp = le_short(stmp);
- *channels = stmp;
-
-- if (stmp>2)
-+ if (stmp>2 || stmp<1)
- {
- fprintf (stderr, "Only mono and (intensity) stereo supported\n");
- return -1;
diff --git a/meta/recipes-multimedia/speex/speex_1.2.0.bb b/meta/recipes-multimedia/speex/speex_1.2.0.bb
deleted file mode 100644
index ea475f0f1b..0000000000
--- a/meta/recipes-multimedia/speex/speex_1.2.0.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Speech Audio Codec"
-DESCRIPTION = "Speex is an Open Source/Free Software patent-free audio compression format designed for speech."
-HOMEPAGE = "http://www.speex.org"
-SECTION = "libs"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=314649d8ba9dd7045dfb6683f298d0a8 \
- file://include/speex/speex.h;beginline=1;endline=34;md5=ef8c8ea4f7198d71cf3509c6ed05ea50"
-DEPENDS = "libogg speexdsp"
-
-SRC_URI = "http://downloads.xiph.org/releases/speex/speex-${PV}.tar.gz \
- file://CVE-2020-23903.patch \
- "
-UPSTREAM_CHECK_REGEX = "speex-(?P<pver>\d+(\.\d+)+)\.tar"
-
-SRC_URI[md5sum] = "8ab7bb2589110dfaf0ed7fa7757dc49c"
-SRC_URI[sha256sum] = "eaae8af0ac742dc7d542c9439ac72f1f385ce838392dc849cae4536af9210094"
-
-inherit autotools pkgconfig lib_package
-
-EXTRA_OECONF = "\
- ${@bb.utils.contains('TARGET_FPU', 'soft', '--enable-fixed-point --disable-float-api --disable-vbr', '', d)} \
-"
diff --git a/meta/recipes-multimedia/speex/speex_1.2.1.bb b/meta/recipes-multimedia/speex/speex_1.2.1.bb
new file mode 100644
index 0000000000..c40198fa8f
--- /dev/null
+++ b/meta/recipes-multimedia/speex/speex_1.2.1.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Speech Audio Codec"
+DESCRIPTION = "Speex is an Open Source/Free Software patent-free audio compression format designed for speech."
+HOMEPAGE = "http://www.speex.org"
+SECTION = "libs"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eff3f76350f52a99a3df5eec6b79c02a \
+ file://include/speex/speex.h;beginline=1;endline=34;md5=ef8c8ea4f7198d71cf3509c6ed05ea50 \
+ "
+DEPENDS = "libogg speexdsp"
+
+SRC_URI = "http://downloads.xiph.org/releases/speex/speex-${PV}.tar.gz"
+UPSTREAM_CHECK_REGEX = "speex-(?P<pver>\d+(\.\d+)+)\.tar"
+
+SRC_URI[sha256sum] = "4b44d4f2b38a370a2d98a78329fefc56a0cf93d1c1be70029217baae6628feea"
+
+inherit autotools pkgconfig lib_package
+
+EXTRA_OECONF = "\
+ ${@bb.utils.contains('TARGET_FPU', 'soft', '--enable-fixed-point --disable-float-api --disable-vbr', '', d)} \
+"
diff --git a/meta/recipes-multimedia/speex/speexdsp_1.2.0.bb b/meta/recipes-multimedia/speex/speexdsp_1.2.0.bb
deleted file mode 100644
index bb7bc9aade..0000000000
--- a/meta/recipes-multimedia/speex/speexdsp_1.2.0.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "A patent-free DSP library"
-DESCRIPTION = "SpeexDSP is a patent-free, Open Source/Free Software DSP library."
-HOMEPAGE = "http://www.speex.org"
-SECTION = "libs"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=314649d8ba9dd7045dfb6683f298d0a8"
-
-SRC_URI = "http://downloads.xiph.org/releases/speex/speexdsp-${PV}.tar.gz"
-
-UPSTREAM_CHECK_REGEX = "speexdsp-(?P<pver>\d+(\.\d+)+)\.tar"
-
-SRC_URI[md5sum] = "b722df341576dc185d897131321008fc"
-SRC_URI[sha256sum] = "682042fc6f9bee6294ec453f470dadc26c6ff29b9c9e9ad2ffc1f4312fd64771"
-
-inherit autotools pkgconfig
-
-EXTRA_OECONF = "\
- --disable-examples \
- ${@bb.utils.contains('TARGET_FPU', 'soft', '--enable-fixed-point --disable-float-api', '', d)} \
-"
-
-# speexdsp was split off from speex in 1.2rc2. Older versions of speex can't
-# be installed together with speexdsp, since they contain overlapping files.
-RCONFLICTS:${PN} = "speex (< 1.2rc2)"
-RCONFLICTS:${PN}-dbg = "speex-dbg (< 1.2rc2)"
-RCONFLICTS:${PN}-dev = "speex-dev (< 1.2rc2)"
-RCONFLICTS:${PN}-staticdev = "speex-staticdev (< 1.2rc2)"
diff --git a/meta/recipes-multimedia/speex/speexdsp_1.2.1.bb b/meta/recipes-multimedia/speex/speexdsp_1.2.1.bb
new file mode 100644
index 0000000000..435b56bbb8
--- /dev/null
+++ b/meta/recipes-multimedia/speex/speexdsp_1.2.1.bb
@@ -0,0 +1,28 @@
+SUMMARY = "A patent-free DSP library"
+DESCRIPTION = "SpeexDSP is a patent-free, Open Source/Free Software DSP library."
+HOMEPAGE = "http://www.speex.org"
+SECTION = "libs"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eff3f76350f52a99a3df5eec6b79c02a"
+
+SRC_URI = "http://downloads.xiph.org/releases/speex/speexdsp-${PV}.tar.gz"
+
+UPSTREAM_CHECK_REGEX = "speexdsp-(?P<pver>\d+(\.\d+)+)\.tar"
+
+SRC_URI[sha256sum] = "8c777343e4a6399569c72abc38a95b24db56882c83dbdb6c6424a5f4aeb54d3d"
+
+inherit autotools pkgconfig
+
+EXTRA_OECONF = "\
+ --disable-examples \
+ ${@bb.utils.contains('TARGET_FPU', 'soft', '--enable-fixed-point --disable-float-api', '', d)} \
+"
+
+# speexdsp was split off from speex in 1.2rc2. Older versions of speex can't
+# be installed together with speexdsp, since they contain overlapping files.
+RCONFLICTS:${PN} = "speex (< 1.2rc2)"
+RCONFLICTS:${PN}-dbg = "speex-dbg (< 1.2rc2)"
+RCONFLICTS:${PN}-dev = "speex-dev (< 1.2rc2)"
+RCONFLICTS:${PN}-staticdev = "speex-staticdev (< 1.2rc2)"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-multimedia/webp/libwebp_1.2.2.bb b/meta/recipes-multimedia/webp/libwebp_1.2.2.bb
deleted file mode 100644
index 281cff1bf2..0000000000
--- a/meta/recipes-multimedia/webp/libwebp_1.2.2.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "WebP is an image format designed for the Web"
-DESCRIPTION = "WebP is a method of lossy and lossless compression that can be \
- used on a large variety of photographic, translucent and \
- graphical images found on the web. The degree of lossy \
- compression is adjustable so a user can choose the trade-off \
- between file size and image quality. WebP typically achieves \
- an average of 30% more compression than JPEG and JPEG 2000, \
- without loss of image quality."
-HOMEPAGE = "https://developers.google.com/speed/webp/"
-SECTION = "libs"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=6e8dee932c26f2dab503abf70c96d8bb \
- file://PATENTS;md5=c6926d0cb07d296f886ab6e0cc5a85b7"
-
-SRC_URI = "http://downloads.webmproject.org/releases/webp/${BP}.tar.gz"
-SRC_URI[sha256sum] = "7656532f837af5f4cec3ff6bafe552c044dc39bf453587bd5b77450802f4aee6"
-
-UPSTREAM_CHECK_URI = "http://downloads.webmproject.org/releases/webp/index.html"
-
-EXTRA_OECONF = " \
- --disable-wic \
- --enable-libwebpmux \
- --enable-libwebpdemux \
- --enable-threading \
-"
-
-# Do not trust configure to determine if neon is available.
-#
-EXTRA_OECONF_ARM = " \
- ${@bb.utils.contains("TUNE_FEATURES","neon","--enable-neon","--disable-neon",d)} \
-"
-EXTRA_OECONF:append:arm = " ${EXTRA_OECONF_ARM}"
-EXTRA_OECONF:append:armeb = " ${EXTRA_OECONF_ARM}"
-
-inherit autotools lib_package
-
-PACKAGECONFIG ??= ""
-
-# libwebpdecoder is a subset of libwebp, don't build it unless requested
-PACKAGECONFIG[decoder] = "--enable-libwebpdecoder,--disable-libwebpdecoder"
-
-# Apply for examples programs: cwebp and dwebp
-PACKAGECONFIG[gif] = "--enable-gif,--disable-gif,giflib"
-PACKAGECONFIG[jpeg] = "--enable-jpeg,--disable-jpeg,jpeg"
-PACKAGECONFIG[png] = "--enable-png,--disable-png,,libpng"
-PACKAGECONFIG[tiff] = "--enable-tiff,--disable-tiff,tiff"
-
-# Apply only for example program vwebp
-PACKAGECONFIG[gl] = "--enable-gl,--disable-gl,mesa-glut"
-
-PACKAGES =+ "${PN}-gif2webp"
-
-DESCRIPTION:${PN}-gif2webp = "Simple tool to convert animated GIFs to WebP"
-FILES:${PN}-gif2webp = "${bindir}/gif2webp"
diff --git a/meta/recipes-multimedia/webp/libwebp_1.3.2.bb b/meta/recipes-multimedia/webp/libwebp_1.3.2.bb
new file mode 100644
index 0000000000..63b0fd9a6c
--- /dev/null
+++ b/meta/recipes-multimedia/webp/libwebp_1.3.2.bb
@@ -0,0 +1,57 @@
+SUMMARY = "WebP is an image format designed for the Web"
+DESCRIPTION = "WebP is a method of lossy and lossless compression that can be \
+ used on a large variety of photographic, translucent and \
+ graphical images found on the web. The degree of lossy \
+ compression is adjustable so a user can choose the trade-off \
+ between file size and image quality. WebP typically achieves \
+ an average of 30% more compression than JPEG and JPEG 2000, \
+ without loss of image quality."
+HOMEPAGE = "https://developers.google.com/speed/webp/"
+SECTION = "libs"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=6e8dee932c26f2dab503abf70c96d8bb \
+ file://PATENTS;md5=c6926d0cb07d296f886ab6e0cc5a85b7"
+
+SRC_URI = "http://downloads.webmproject.org/releases/webp/${BP}.tar.gz"
+SRC_URI[sha256sum] = "2a499607df669e40258e53d0ade8035ba4ec0175244869d1025d460562aa09b4"
+
+UPSTREAM_CHECK_URI = "http://downloads.webmproject.org/releases/webp/index.html"
+
+EXTRA_OECONF = " \
+ --disable-wic \
+ --enable-libwebpmux \
+ --enable-libwebpdemux \
+ --enable-threading \
+"
+
+# Do not trust configure to determine if neon is available.
+#
+EXTRA_OECONF_ARM = " \
+ ${@bb.utils.contains("TUNE_FEATURES","neon","--enable-neon","--disable-neon",d)} \
+"
+EXTRA_OECONF:append:arm = " ${EXTRA_OECONF_ARM}"
+EXTRA_OECONF:append:armeb = " ${EXTRA_OECONF_ARM}"
+
+inherit autotools lib_package
+
+PACKAGECONFIG ??= ""
+
+# libwebpdecoder is a subset of libwebp, don't build it unless requested
+PACKAGECONFIG[decoder] = "--enable-libwebpdecoder,--disable-libwebpdecoder"
+
+# Apply for examples programs: cwebp and dwebp
+PACKAGECONFIG[gif] = "--enable-gif,--disable-gif,giflib"
+PACKAGECONFIG[jpeg] = "--enable-jpeg,--disable-jpeg,jpeg"
+PACKAGECONFIG[png] = "--enable-png,--disable-png,,libpng"
+PACKAGECONFIG[tiff] = "--enable-tiff,--disable-tiff,tiff"
+
+# Apply only for example program vwebp
+PACKAGECONFIG[gl] = "--enable-gl,--disable-gl,mesa-glut"
+
+PACKAGES =+ "${PN}-gif2webp"
+
+DESCRIPTION:${PN}-gif2webp = "Simple tool to convert animated GIFs to WebP"
+FILES:${PN}-gif2webp = "${bindir}/gif2webp"
+
+BBCLASSEXTEND += "native nativesdk"
diff --git a/meta/recipes-multimedia/x264/x264_git.bb b/meta/recipes-multimedia/x264/x264_git.bb
index 51ae9d1179..e7d9e75e8d 100644
--- a/meta/recipes-multimedia/x264/x264_git.bb
+++ b/meta/recipes-multimedia/x264/x264_git.bb
@@ -14,9 +14,9 @@ SRC_URI = "git://github.com/mirror/x264;branch=stable;protocol=https \
"
UPSTREAM_CHECK_COMMITS = "1"
-SRCREV = "5db6aa6cab1b146e07b60cc1736a01f21da01154"
+SRCREV = "baee400fa9ced6f5481a728138fed6e867b0ff7f"
-PV = "r3039+git${SRCPV}"
+PV = "r3039+git"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-rt/README b/meta/recipes-rt/README
index 7d7558de49..0144d68b70 100644
--- a/meta/recipes-rt/README
+++ b/meta/recipes-rt/README
@@ -10,4 +10,4 @@ If you are creating a new BSP which should use linux-yocto-rt by default,
use the line above in the $MACHINE.conf in your BSP layer, and specify the
following in a linux-yocto-rt bbappend recipe:
-COMPATIBLE_MACHINE_$MACHINE = $MACHINE
+COMPATIBLE_MACHINE:$MACHINE = $MACHINE
diff --git a/meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch b/meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch
index 43c2a2f33b..a1fc9e26a0 100644
--- a/meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch
+++ b/meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch
@@ -1,4 +1,4 @@
-From 05ccf5045adf4248f29623ad0db2d2a55ae5cd06 Mon Sep 17 00:00:00 2001
+From 7cf4cf097895bd4c3f3d3a6f783c766b3173cf1a Mon Sep 17 00:00:00 2001
From: Randy Witt <randy.e.witt@intel.com>
Date: Fri, 16 Oct 2020 16:54:30 -0700
Subject: [PATCH] Makefile: Allow for CC and AR to be overridden
@@ -15,11 +15,11 @@ Upstream-Status: Inappropriate [other]
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Makefile b/Makefile
-index ec5d693..ab8a8de 100644
+index 223a839..a735584 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,6 @@
- VERSION = 2.3
+ VERSION = 2.6
-CC = $(CROSS_COMPILE)gcc
-AR = $(CROSS_COMPILE)ar
+CC ?= $(CROSS_COMPILE)gcc
diff --git a/meta/recipes-rt/rt-tests/files/rt_bmark.py b/meta/recipes-rt/rt-tests/files/rt_bmark.py
index 3b84447a0f..2a4eed412f 100755
--- a/meta/recipes-rt/rt-tests/files/rt_bmark.py
+++ b/meta/recipes-rt/rt-tests/files/rt_bmark.py
@@ -265,7 +265,7 @@ cmd = ("cyclictest",
"-d", str(interval_delta),
"-l", str(loop_count)
)
-rex = re.compile(b"C:\s*(\d+).*Min:\s*(\d+).*Avg:\s*(\d+).*Max:\s*(\d+)")
+rex = re.compile(r"C:\s*(\d+).*Min:\s*(\d+).*Avg:\s*(\d+).*Max:\s*(\d+)")
def run_cyclictest_once():
res = subprocess.check_output(cmd)
diff --git a/meta/recipes-rt/rt-tests/rt-tests.inc b/meta/recipes-rt/rt-tests/rt-tests.inc
index e519dfdc61..70315c4694 100644
--- a/meta/recipes-rt/rt-tests/rt-tests.inc
+++ b/meta/recipes-rt/rt-tests/rt-tests.inc
@@ -1,7 +1,7 @@
-SRCREV = "338843c1ee157ae5d990c6f544f66b447b24fa8b"
-PV = "2.3"
+SRCREV = "217cd8518c5f7777d490892aa9c765a6b2782cb5"
+PV = "2.6"
PE = "1"
-SRC_URI = "git://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git;branch=main"
+SRC_URI = "git://git.kernel.org/pub/scm/utils/rt-tests/rt-tests.git;branch=main;protocol=https"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/images/core-image-sato.bb b/meta/recipes-sato/images/core-image-sato.bb
index e63a229d12..9200ae5440 100644
--- a/meta/recipes-sato/images/core-image-sato.bb
+++ b/meta/recipes-sato/images/core-image-sato.bb
@@ -1,3 +1,4 @@
+SUMMARY = "Image with Sato, a mobile environment and visual style for mobile devices."
DESCRIPTION = "Image with Sato, a mobile environment and visual style for \
mobile devices. The image supports X11 with a Sato theme, Pimlico \
applications, and contains terminal, editor, and file manager."
diff --git a/meta/recipes-sato/l3afpad/l3afpad_git.bb b/meta/recipes-sato/l3afpad/l3afpad_git.bb
index 56cbe6bc8d..5540083928 100644
--- a/meta/recipes-sato/l3afpad/l3afpad_git.bb
+++ b/meta/recipes-sato/l3afpad/l3afpad_git.bb
@@ -15,7 +15,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
DEPENDS = "gtk+3 intltool-native gettext-native"
-PV = "0.8.18.1.11+git${SRCPV}"
+PV = "0.8.18.1.11+git"
SRC_URI = "git://github.com/stevenhoneyman/l3afpad.git;branch=master;protocol=https"
SRCREV ="3cdccdc9505643e50f8208171d9eee5de11a42ff"
diff --git a/meta/recipes-sato/matchbox-config-gtk/matchbox-config-gtk_0.2.bb b/meta/recipes-sato/matchbox-config-gtk/matchbox-config-gtk_0.2.bb
index d951baf361..caa34966cc 100644
--- a/meta/recipes-sato/matchbox-config-gtk/matchbox-config-gtk_0.2.bb
+++ b/meta/recipes-sato/matchbox-config-gtk/matchbox-config-gtk_0.2.bb
@@ -4,16 +4,16 @@ BUGTRACKER = "http://bugzilla.yoctoproject.org/"
LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://appearance/mb-appearance.c;endline=25;md5=ea92333cf8a6802639d62d874c114a28"
+ file://appearance/mb-appearance.c;endline=25;md5=f49d7ae8b8634a94315410cd2e055bdf"
DEPENDS = "gconf gtk+3"
RDEPENDS:${PN} = "settings-daemon"
-# SRCREV tagged 0.2
-SRCREV = "ef2192ce98d9374ffdad5f78544c3f8f353c16aa"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master \
+SRCREV = "7182e603357250952aa24d90f6d89345f93da7ce"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https \
file://no-handed.patch"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
+PV = "0.2+git"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.2.bb b/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.2.bb
deleted file mode 100644
index b05a9c48ad..0000000000
--- a/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.2.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "Matchbox Window Manager Desktop"
-DESCRIPTION = "A lightweight windows manager for embedded systems. It uses the desktop background to provide an application launcher and allows modules to be loaded for additional functionality."
-HOMEPAGE = "http://matchbox-project.org/"
-BUGTRACKER = "http://bugzilla.yoctoproject.org/"
-
-LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://src/desktop.c;endline=20;md5=36c9bf295e6007f3423095f405af5a2d \
- file://src/main.c;endline=19;md5=2044244f97a195c25b7dc602ac7e9a00"
-
-DEPENDS = "gtk+3 startup-notification dbus"
-SECTION = "x11/wm"
-
-# SRCREV tagged 2.2
-SRCREV = "6bc67d09da4147e5552fe30011a05a2c59d2f777"
-SRC_URI = "git://git.yoctoproject.org/${BPN}-2;branch=master \
- file://vfolders/ \
- "
-
-EXTRA_OECONF = "--enable-startup-notification --with-dbus"
-
-S = "${WORKDIR}/git"
-
-inherit autotools pkgconfig features_check
-
-# The startup-notification requires x11 in DISTRO_FEATURES
-REQUIRED_DISTRO_FEATURES = "x11"
-
-do_install:append() {
- install -d ${D}${datadir}/matchbox/vfolders/
- install -m 0644 ${WORKDIR}/vfolders/* ${D}${datadir}/matchbox/vfolders/
-}
-
-FILES:${PN} += "${datadir}/matchbox/vfolders/"
diff --git a/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.3.bb b/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.3.bb
new file mode 100644
index 0000000000..3777f05b5a
--- /dev/null
+++ b/meta/recipes-sato/matchbox-desktop/matchbox-desktop_2.3.bb
@@ -0,0 +1,33 @@
+SUMMARY = "Matchbox Window Manager Desktop"
+DESCRIPTION = "A lightweight windows manager for embedded systems. It uses the desktop background to provide an application launcher and allows modules to be loaded for additional functionality."
+HOMEPAGE = "http://matchbox-project.org/"
+BUGTRACKER = "http://bugzilla.yoctoproject.org/"
+
+LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://src/desktop.c;endline=20;md5=2e488557570c7dee53bfd0567e4273a9 \
+ file://src/main.c;endline=19;md5=5d2234b35efa927ab3ae36ebac52ba59"
+
+DEPENDS = "gtk+3 startup-notification dbus"
+SECTION = "x11/wm"
+
+SRCREV = "0fd6a0c3f3b7bbf4f4b46190d71f7aef35d6bbfd"
+SRC_URI = "git://git.yoctoproject.org/${BPN}-2;branch=master;protocol=https \
+ file://vfolders/ \
+ "
+
+EXTRA_OECONF = "--enable-startup-notification --with-dbus"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig features_check
+
+# The startup-notification requires x11 in DISTRO_FEATURES
+REQUIRED_DISTRO_FEATURES = "x11"
+
+do_install:append() {
+ install -d ${D}${datadir}/matchbox/vfolders/
+ install -m 0644 ${WORKDIR}/vfolders/* ${D}${datadir}/matchbox/vfolders/
+}
+
+FILES:${PN} += "${datadir}/matchbox/vfolders/"
diff --git a/meta/recipes-sato/matchbox-keyboard/matchbox-keyboard_0.1.1.bb b/meta/recipes-sato/matchbox-keyboard/matchbox-keyboard_0.1.1.bb
index b1724675ba..6b70948f16 100644
--- a/meta/recipes-sato/matchbox-keyboard/matchbox-keyboard_0.1.1.bb
+++ b/meta/recipes-sato/matchbox-keyboard/matchbox-keyboard_0.1.1.bb
@@ -13,7 +13,7 @@ DEPENDS = "libfakekey expat libxft"
#SRCREV for 0.1.1
SRCREV = "630d89068dc0a1e9199306d405cb32f892dfa4d3"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=matchbox-keyboard-0-1 \
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=matchbox-keyboard-0-1;protocol=https \
file://0001-desktop-file-Hide-the-keyboard-from-app-list.patch \
file://80matchboxkeyboard.sh"
diff --git a/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.11.bb b/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.11.bb
deleted file mode 100644
index 3dec68b963..0000000000
--- a/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.11.bb
+++ /dev/null
@@ -1,42 +0,0 @@
-SUMMARY = "Simple GTK+ based panel for handheld devices"
-DESCRIPTION = "A flexible always present 'window bar' for holding application \
-launchers and small 'applet' style applications"
-HOMEPAGE = "http://matchbox-project.org"
-BUGTRACKER = "http://bugzilla.yoctoproject.org/"
-
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://matchbox-panel/mb-panel.h;endline=10;md5=0b7db28f4b6863fb853d0467e590019a \
- file://applets/startup/startup.c;endline=22;md5=7cbcea60b667f609495222faf3e07917"
-
-DEPENDS = "gtk+3 startup-notification dbus dbus-glib dbus-glib-native"
-DEPENDS += " ${@bb.utils.contains("MACHINE_FEATURES", "acpi", "libacpi", "",d)}"
-DEPENDS += " ${@bb.utils.contains("MACHINE_FEATURES", "apm", "apmd", "",d)}"
-
-# The startup-notification requires x11 in DISTRO_FEATURES
-REQUIRED_DISTRO_FEATURES = "x11"
-
-# SRCREV tagged 2.11 plus some autotools fixes
-SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
-
-RPROVIDES:${PN} = "matchbox-panel"
-RREPLACES:${PN} = "matchbox-panel"
-RCONFLICTS:${PN} = "matchbox-panel"
-
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master \
- file://0001-applets-systray-Allow-icons-to-be-smaller.patch \
- "
-
-EXTRA_OECONF = "--enable-startup-notification --enable-dbus"
-EXTRA_OECONF += " ${@bb.utils.contains("MACHINE_FEATURES", "acpi", "--with-battery=acpi", "",d)}"
-EXTRA_OECONF += " ${@bb.utils.contains("MACHINE_FEATURES", "apm", "--with-battery=apm", "",d)}"
-
-S = "${WORKDIR}/git"
-
-FILES:${PN} += "${libdir}/matchbox-panel/*.so \
- ${datadir}/matchbox-panel/brightness/*.png \
- ${datadir}/matchbox-panel/startup/*.png \
- ${datadir}/icons/"
-FILES:${PN}-dev += "${libdir}/matchbox-panel/*.la"
-
-inherit autotools pkgconfig features_check gettext
diff --git a/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.12.bb b/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.12.bb
new file mode 100644
index 0000000000..83425f60fe
--- /dev/null
+++ b/meta/recipes-sato/matchbox-panel-2/matchbox-panel-2_2.12.bb
@@ -0,0 +1,39 @@
+SUMMARY = "Simple GTK+ based panel for handheld devices"
+DESCRIPTION = "A flexible always present 'window bar' for holding application \
+launchers and small 'applet' style applications"
+HOMEPAGE = "http://matchbox-project.org"
+BUGTRACKER = "http://bugzilla.yoctoproject.org/"
+
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://matchbox-panel/mb-panel.h;endline=10;md5=f007add2f09d6a1920ee5a28645e9ce1 \
+ file://applets/startup/startup.c;endline=22;md5=9ee7a4192e53cc00041ef445d4ff6909"
+
+DEPENDS = "gtk+3 startup-notification dbus dbus-glib dbus-glib-native"
+DEPENDS += " ${@bb.utils.contains("MACHINE_FEATURES", "acpi", "libacpi", "",d)}"
+
+# The startup-notification requires x11 in DISTRO_FEATURES
+REQUIRED_DISTRO_FEATURES = "x11"
+
+SRCREV = "39fdbc40888c1c75cd7bfd9fd341e6ad58487fce"
+
+RPROVIDES:${PN} = "matchbox-panel"
+RREPLACES:${PN} = "matchbox-panel"
+RCONFLICTS:${PN} = "matchbox-panel"
+
+SRC_URI = "git://git.yoctoproject.org/${BPN};protocol=https;branch=master \
+ file://0001-applets-systray-Allow-icons-to-be-smaller.patch \
+ "
+
+EXTRA_OECONF = "--enable-startup-notification --enable-dbus"
+EXTRA_OECONF += " ${@bb.utils.contains("MACHINE_FEATURES", "acpi", "--with-battery=acpi", "",d)}"
+
+S = "${WORKDIR}/git"
+
+FILES:${PN} += "${libdir}/matchbox-panel/*.so \
+ ${datadir}/matchbox-panel/brightness/*.png \
+ ${datadir}/matchbox-panel/startup/*.png \
+ ${datadir}/icons/"
+FILES:${PN}-dev += "${libdir}/matchbox-panel/*.la"
+
+inherit autotools pkgconfig features_check gettext
diff --git a/meta/recipes-sato/matchbox-sato/matchbox-session-sato/session b/meta/recipes-sato/matchbox-sato/matchbox-session-sato/session
index 3a70574af9..61fea51054 100644
--- a/meta/recipes-sato/matchbox-sato/matchbox-session-sato/session
+++ b/meta/recipes-sato/matchbox-sato/matchbox-session-sato/session
@@ -22,7 +22,6 @@ matchbox-desktop &
START_APPLETS=showdesktop,windowselector
END_APPLETS=$KEYBOARD_APPLET,systray,startup-notify,notify
END_APPLETS=battery,$END_APPLETS # feature-acpi
-END_APPLETS=battery,$END_APPLETS # feature-apm
END_APPLETS=clock,$END_APPLETS
END_APPLETS=openmoko-panel-gsm,$END_APPLETS # feature-phone
diff --git a/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb b/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb
index 020e211ea5..90734b6bd0 100644
--- a/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb
+++ b/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb
@@ -7,7 +7,6 @@ LIC_FILES_CHKSUM = "file://session;endline=3;md5=f8a5c5b9c279e52dc094d10e11c2be6
SECTION = "x11"
RDEPENDS:${PN} = "formfactor matchbox-theme-sato matchbox-panel-2 matchbox-desktop matchbox-session gconf"
-PR = "r30"
# This package is architecture specific because the session script is modified
# based on the machine architecture.
@@ -26,7 +25,7 @@ FILES:${PN} += "${datadir}/themes/Sato/index.theme"
do_install() {
# This is the set of machine features that the script has markers for
- FEATURES="acpi apm phone"
+ FEATURES="acpi phone"
SCRIPT="${S}/sedder"
rm -f $SCRIPT
touch $SCRIPT
diff --git a/meta/recipes-sato/matchbox-terminal/matchbox-terminal_0.2.bb b/meta/recipes-sato/matchbox-terminal/matchbox-terminal_0.2.bb
index 802ffa971b..f91c593f78 100644
--- a/meta/recipes-sato/matchbox-terminal/matchbox-terminal_0.2.bb
+++ b/meta/recipes-sato/matchbox-terminal/matchbox-terminal_0.2.bb
@@ -4,15 +4,15 @@ BUGTRACKER = "http://bugzilla.yoctoproject.org/"
LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://main.c;endline=20;md5=96e39176d9e355639a0b8b1c7a840820"
+ file://main.c;endline=20;md5=7d9d802a36298b5c74440a880e2f4817"
DEPENDS = "gtk+3 vte"
SECTION = "x11/utils"
-#SRCREV tagged 0.2
-SRCREV = "161276d0f5d1be8187010fd0d9581a6feca70ea5"
-SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master"
+SRCREV = "99e6eb7db1b5fef110973d96194eec992a2515a2"
+SRC_URI = "git://git.yoctoproject.org/${BPN};branch=master;protocol=https"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
+PV = "0.2+git"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/matchbox-theme-sato/matchbox-theme-sato_0.2.bb b/meta/recipes-sato/matchbox-theme-sato/matchbox-theme-sato_0.2.bb
index bc4024736f..c60a11c3c0 100644
--- a/meta/recipes-sato/matchbox-theme-sato/matchbox-theme-sato_0.2.bb
+++ b/meta/recipes-sato/matchbox-theme-sato/matchbox-theme-sato_0.2.bb
@@ -2,7 +2,7 @@ require matchbox-theme-sato.inc
# SRCREV tagged 0.2
SRCREV = "df085ba9cdaeaf2956890b0e29d7ea1779bf6c78"
-SRC_URI = "git://git.yoctoproject.org/matchbox-sato;branch=master"
+SRC_URI = "git://git.yoctoproject.org/matchbox-sato;branch=master;protocol=https"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb b/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
index cfa301a58d..3f735e072b 100644
--- a/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
+++ b/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
@@ -5,7 +5,6 @@
SUMMARY = "Sato desktop"
DESCRIPTION = "Packagegroups provide a convenient mechanism of bundling a collection of packages."
HOMEPAGE = "https://www.yoctoproject.org/"
-PR = "r33"
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -31,7 +30,7 @@ RDEPENDS:${PN}-base = "\
matchbox-keyboard-im \
matchbox-config-gtk \
xcursor-transparent-theme \
- adwaita-icon-theme \
+ sato-icon-theme \
settings-daemon \
shutdown-desktop \
${NETWORK_MANAGER} \
diff --git a/meta/recipes-sato/puzzles/puzzles_git.bb b/meta/recipes-sato/puzzles/puzzles_git.bb
index 0104f2672a..e9403ee130 100644
--- a/meta/recipes-sato/puzzles/puzzles_git.bb
+++ b/meta/recipes-sato/puzzles/puzzles_git.bb
@@ -2,17 +2,17 @@ SUMMARY = "Simon Tatham's Portable Puzzle Collection"
DESCRIPTION = "Collection of small computer programs which implement one-player puzzle games."
HOMEPAGE = "http://www.chiark.greenend.org.uk/~sgtatham/puzzles/"
LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENCE;md5=6e7d24cf1c949887ee9447a1e2a4a24c"
+LIC_FILES_CHKSUM = "file://LICENCE;md5=191542b32377bde254e9799e0a46f18b"
# gtk support includes a bunch of x11 headers
REQUIRED_DISTRO_FEATURES = "x11"
-SRC_URI = "git://git.tartarus.org/simon/puzzles.git;branch=main"
+SRC_URI = "git://git.tartarus.org/simon/puzzles.git;branch=main;protocol=https"
UPSTREAM_CHECK_COMMITS = "1"
-SRCREV = "c43a34fbfe430d235bafc379595761880a19ed9f"
+SRCREV = "80aac3104096aee4057b675c53ece8e60793aa90"
PE = "2"
-PV = "0.0+git${SRCPV}"
+PV = "0.0+git"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/rxvt-unicode/rxvt-unicode.inc b/meta/recipes-sato/rxvt-unicode/rxvt-unicode.inc
index e7d520ebef..016614b19c 100644
--- a/meta/recipes-sato/rxvt-unicode/rxvt-unicode.inc
+++ b/meta/recipes-sato/rxvt-unicode/rxvt-unicode.inc
@@ -6,7 +6,7 @@ terminal emulator rxvt, modified to store text in Unicode \
output. It also supports mixing multiple fonts at the \
same time, including Xft fonts."
HOMEPAGE = "https://rxvt.org/"
-DEPENDS = "virtual/libx11 libxt libxft gdk-pixbuf libxmu libptytty"
+DEPENDS = "virtual/libx11 libxt libxft gdk-pixbuf libxmu libptytty ncurses-native"
SRC_URI = "http://dist.schmorp.de/rxvt-unicode/Attic/rxvt-unicode-${PV}.tar.bz2 \
file://xwc.patch \
@@ -53,6 +53,9 @@ do_install:append () {
install -m 0644 ${WORKDIR}/rxvt.png ${D}/${datadir}/pixmaps
install -m 0644 ${WORKDIR}/rxvt.desktop ${D}/${datadir}/applications
+
+ ${STAGING_BINDIR_NATIVE}/tic -x ${S}/doc/etc/rxvt-unicode.terminfo -o ${D}${datadir}/terminfo || \
+ ${STAGING_BINDIR_NATIVE}/tic ${S}/doc/etc/rxvt-unicode.terminfo -o ${D}${datadir}/terminfo
}
-FILES:${PN} += "${datadir}/applications/rxvt.desktop ${datadir}/pixmaps/rxvt.png"
+FILES:${PN} += "${datadir}/applications/rxvt.desktop ${datadir}/pixmaps/rxvt.png ${datadir}/terminfo"
diff --git a/meta/recipes-sato/rxvt-unicode/rxvt-unicode/rxvt.desktop b/meta/recipes-sato/rxvt-unicode/rxvt-unicode/rxvt.desktop
index 6b7978afc9..1ac3d99510 100644
--- a/meta/recipes-sato/rxvt-unicode/rxvt-unicode/rxvt.desktop
+++ b/meta/recipes-sato/rxvt-unicode/rxvt-unicode/rxvt.desktop
@@ -1,6 +1,6 @@
[Desktop Entry]
Version=1.0
-Name=Terminal
+Name=Rxvt Terminal
Comment=A unicode capable rxvt clone
Exec=rxvt
Icon=utilities-terminal
diff --git a/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.30.bb b/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.30.bb
deleted file mode 100644
index 5e3c84194a..0000000000
--- a/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.30.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require rxvt-unicode.inc
-
-LICENSE = "GPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://src/main.C;beginline=1;endline=31;md5=d3600d7ee1062667fcd1193fbe6485f6"
-
-SRC_URI[sha256sum] = "fe1c93d12f385876457a989fc3ae05c0915d2692efc59289d0f70fabe5b44d2d"
-
diff --git a/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.31.bb b/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.31.bb
new file mode 100644
index 0000000000..c127b9bbe3
--- /dev/null
+++ b/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.31.bb
@@ -0,0 +1,8 @@
+require rxvt-unicode.inc
+
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
+ file://src/main.C;beginline=1;endline=31;md5=d3600d7ee1062667fcd1193fbe6485f6"
+
+SRC_URI[sha256sum] = "aaa13fcbc149fe0f3f391f933279580f74a96fd312d6ed06b8ff03c2d46672e8"
+
diff --git a/meta/recipes-sato/sato-icon-theme/icon-naming-utils_0.8.90.bb b/meta/recipes-sato/sato-icon-theme/icon-naming-utils_0.8.90.bb
new file mode 100644
index 0000000000..5502b66905
--- /dev/null
+++ b/meta/recipes-sato/sato-icon-theme/icon-naming-utils_0.8.90.bb
@@ -0,0 +1,29 @@
+HOMEPAGE = "http://tango.freedesktop.org/"
+SUMMARY = "Enables backwards compatibility with current desktop icon themes"
+DESCRIPTION = "A Perl script used for maintaining backwards \
+compatibility with current desktop icon themes, while migrating to the \
+names specified in the Icon Naming Specification. The Icon Naming \
+Utilities map the icon names used by the GNOME and KDE desktops to the \
+icon names proposed in the Icon Naming Specification, and generate the \
+icon files appropriate to the desktop environment you use. The Icon \
+Naming Specification provides a standard list of common icon contexts \
+and names that can be used to make icon themes work in a desktop \
+environment or application that implements the naming specification. "
+LICENSE = "GPL-2.0-only"
+DEPENDS = "libxml-simple-perl-native"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+
+SRC_URI = "${DEBIAN_MIRROR}/main/i/icon-naming-utils/icon-naming-utils_${PV}.orig.tar.gz"
+SRC_URI[sha256sum] = "044ab2199ed8c6a55ce36fd4fcd8b8021a5e21f5bab028c0a7cdcf52a5902e1c"
+
+inherit autotools allarch perlnative
+
+do_configure:append() {
+ # Make sure we use our nativeperl wrapper.
+ sed -i -e "1s:#!.*:#!/usr/bin/env nativeperl:" ${S}/icon-name-mapping.pl.in
+}
+
+FILES:${PN} += "${datadir}/dtds"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-sato/sato-icon-theme/sato-icon-theme_git.bb b/meta/recipes-sato/sato-icon-theme/sato-icon-theme_git.bb
new file mode 100644
index 0000000000..1ebbf010f3
--- /dev/null
+++ b/meta/recipes-sato/sato-icon-theme/sato-icon-theme_git.bb
@@ -0,0 +1,29 @@
+SUMMARY = "Sato icon theme"
+HOMEPAGE = "https://www.yoctoproject.org/"
+BUGTRACKER = "http://bugzilla.yoctoproject.org/"
+
+LICENSE = "CC-BY-SA-3.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=56a830bbe6e4697fe6cbbae01bb7c2b2"
+SECTION = "x11"
+
+DEPENDS = "icon-naming-utils-native libxml-simple-perl-native"
+
+SRC_URI = "git://git.yoctoproject.org/sato-icon-theme.git;protocol=https;branch=master"
+SRCREV = "d23f04ecb0328f655bf195df8eb04c1b734d53a9"
+UPSTREAM_CHECK_COMMITS = "1"
+
+S = "${WORKDIR}/git"
+
+inherit autotools pkgconfig allarch gtk-icon-cache perlnative
+
+# The configure script uses pkg-config to find native binaries to execute, so
+# tell it to use our pkg-config-native wrapper.
+export PKG_CONFIG = "pkg-config-native"
+
+FILES:${PN} += "${datadir}/icons/Sato"
+
+do_install:append() {
+ find ${D}${datadir}/icons/Sato/ -maxdepth 1 -type d -exec ln -s preferences-system.png {}/apps/preferences-desktop.png \;
+ find ${D}${datadir}/icons/Sato/ -maxdepth 1 -type d -exec ln -s file-manager.png {}/apps/system-file-manager.png \;
+ find ${D}${datadir}/icons/Sato/ -maxdepth 1 -type d -exec ln -s ../apps/terminal.png {}/places/computer.png \;
+}
diff --git a/meta/recipes-sato/sato-screenshot/sato-screenshot_0.3.bb b/meta/recipes-sato/sato-screenshot/sato-screenshot_0.3.bb
index eea7025c8d..de4ab82484 100644
--- a/meta/recipes-sato/sato-screenshot/sato-screenshot_0.3.bb
+++ b/meta/recipes-sato/sato-screenshot/sato-screenshot_0.3.bb
@@ -11,7 +11,7 @@ DEPENDS = "matchbox-panel-2 gtk+3"
# SRCREV tagged 0.3
SRCREV = "9250fa5a012d84ff45984e8c4345ee7635227756"
-SRC_URI = "git://git.yoctoproject.org/screenshot;branch=master"
+SRC_URI = "git://git.yoctoproject.org/screenshot;branch=master;protocol=https"
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+)+))"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-sato/settings-daemon/settings-daemon_0.0.2.bb b/meta/recipes-sato/settings-daemon/settings-daemon_0.0.2.bb
index 4356930da0..4bdbefcb75 100644
--- a/meta/recipes-sato/settings-daemon/settings-daemon_0.0.2.bb
+++ b/meta/recipes-sato/settings-daemon/settings-daemon_0.0.2.bb
@@ -9,7 +9,7 @@ SECTION = "x11"
# SRCREV tagged 0.0.2
SRCREV = "b2e5da502f8c5ff75e9e6da771372ef8e40fd9a2"
-SRC_URI = "git://git.yoctoproject.org/xsettings-daemon;branch=master \
+SRC_URI = "git://git.yoctoproject.org/xsettings-daemon;branch=master;protocol=https \
file://addsoundkeys.patch \
file://70settings-daemon.sh \
"
diff --git a/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb b/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb
index 4bb896fc7b..766f574b25 100644
--- a/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb
+++ b/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb
@@ -4,7 +4,6 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384
SRC_URI = "file://shutdown.desktop"
-PR = "r1"
S = "${WORKDIR}"
diff --git a/meta/recipes-sato/webkit/libwpe_1.12.0.bb b/meta/recipes-sato/webkit/libwpe_1.12.0.bb
deleted file mode 100644
index ac4ee3eb23..0000000000
--- a/meta/recipes-sato/webkit/libwpe_1.12.0.bb
+++ /dev/null
@@ -1,18 +0,0 @@
-SUMMARY = "General-purpose library specifically developed for the WPE-flavored port of WebKit."
-HOMEPAGE = "https://github.com/WebPlatformForEmbedded/libwpe"
-BUGTRACKER = "https://github.com/WebPlatformForEmbedded/libwpe/issues"
-
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=371a616eb4903c6cb79e9893a5f615cc"
-DEPENDS = "virtual/egl libxkbcommon"
-
-inherit cmake features_check pkgconfig
-
-REQUIRED_DISTRO_FEATURES = "opengl"
-
-SRC_URI = "https://wpewebkit.org/releases/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "e8eeca228a6b4c36294cfb63f7d3ba9ada47a430904a5a973b3c99c96a44c18c"
-
-# This is a tweak of upstream-version-is-even needed because
-# ipstream directory contains tarballs for other components as well.
-UPSTREAM_CHECK_REGEX = "libwpe-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar"
diff --git a/meta/recipes-sato/webkit/libwpe_1.16.0.bb b/meta/recipes-sato/webkit/libwpe_1.16.0.bb
new file mode 100644
index 0000000000..57f91ce87e
--- /dev/null
+++ b/meta/recipes-sato/webkit/libwpe_1.16.0.bb
@@ -0,0 +1,18 @@
+SUMMARY = "General-purpose library specifically developed for the WPE-flavored port of WebKit."
+HOMEPAGE = "https://github.com/WebPlatformForEmbedded/libwpe"
+BUGTRACKER = "https://github.com/WebPlatformForEmbedded/libwpe/issues"
+
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=371a616eb4903c6cb79e9893a5f615cc"
+DEPENDS = "virtual/egl libxkbcommon"
+
+inherit cmake features_check pkgconfig
+
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+SRC_URI = "https://wpewebkit.org/releases/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "c7f3a3c6b3d006790d486dc7cceda2b6d2e329de07f33bc47dfc53f00f334b2a"
+
+# This is a tweak of upstream-version-is-even needed because
+# ipstream directory contains tarballs for other components as well.
+UPSTREAM_CHECK_REGEX = "libwpe-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar"
diff --git a/meta/recipes-sato/webkit/webkitgtk/0001-CMake-Add-a-variable-to-control-macro-__PAS_ALWAYS_I.patch b/meta/recipes-sato/webkit/webkitgtk/0001-CMake-Add-a-variable-to-control-macro-__PAS_ALWAYS_I.patch
new file mode 100644
index 0000000000..a819e22127
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk/0001-CMake-Add-a-variable-to-control-macro-__PAS_ALWAYS_I.patch
@@ -0,0 +1,73 @@
+From 575b848a3b3c14280679db80d0d518922c83d62a Mon Sep 17 00:00:00 2001
+From: Kai Kang <kai.kang@windriver.com>
+Date: Fri, 11 Aug 2023 14:20:48 +0800
+Subject: [PATCH] Add a variable to control macro
+ __PAS_ALWAYS_INLINE_BUT_NOT_INLINE
+ https://bugs.webkit.org/show_bug.cgi?id=260065
+
+Reviewed by NOBODY (OOPS!).
+
+It fails to compile webkitgtk with option `-Og` of gcc/g++:
+
+| In file included from Source/bmalloc/libpas/src/libpas/pas_heap_page_provider.h:30,
+| from Source/bmalloc/libpas/src/libpas/pas_bootstrap_heap_page_provider.h:29,
+| from Source/bmalloc/libpas/src/libpas/pas_large_heap_physical_page_sharing_cache.h:29,
+| from Source/bmalloc/libpas/src/libpas/pas_basic_heap_page_caches.h:29,
+| from Source/bmalloc/libpas/src/libpas/pas_heap_config_utils.h:32,
+| from Source/bmalloc/libpas/src/libpas/bmalloc_heap_config.h:34,
+| from Source/bmalloc/libpas/src/libpas/bmalloc_heap_inlines.h:34,
+| from Source/bmalloc/bmalloc/bmalloc.h:39,
+| from Source/bmalloc/bmalloc/bmalloc.cpp:26:
+| In function 'pas_allocation_result pas_local_allocator_try_allocate(pas_local_allocator*, size_t, size_t, pas_heap_config, pas_allocator_counts*, pas_allocation_result_filter)',
+| inlined from 'pas_allocation_result pas_try_allocate_common_impl_fast(pas_heap_config, pas_allocator_counts*, pas_allocation_result_filter, pas_local_allocator*, size_t, size_t)' at webkitgtk-2.40.2/Source/bmalloc/libpas/src/libpas/pas_try_allocate_common.h:85:46,
+| inlined from 'pas_allocation_result bmalloc_try_allocate_with_alignment_impl_impl_fast(pas_local_allocator*, size_t, size_t)' at webkitgtk-2.40.2/Source/bmalloc/libpas/src/libpas/bmalloc_heap_inlines.h:59:1,
+| inlined from 'pas_allocation_result pas_try_allocate_intrinsic_impl_casual_case(__pas_heap*, size_t, size_t, pas_intrinsic_heap_support*, pas_heap_config, pas_try_allocate_common_fast, pas_try_allocate_common_slow, pas_intrinsic_heap_designation_mode)' at webkitgtk-2.40.2/Source/bmalloc/libpas/src/libpas/pas_try_allocate_intrinsic.h:167:44,
+| inlined from 'pas_allocation_result bmalloc_try_allocate_with_alignment_impl_casual_case(size_t, size_t)' at webkitgtk-2.40.2/Source/bmalloc/libpas/src/libpas/bmalloc_heap_inlines.h:59:1:
+| webkitgtk-2.40.2/Source/bmalloc/libpas/src/libpas/pas_allocation_result.h:76:1: error: inlining failed in call to 'always_inline' 'pas_allocation_result pas_allocation_result_identity(pas_allocation_result)': function not considered for inlining
+| 76 | pas_allocation_result_identity(pas_allocation_result result)
+| | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Add an variable `WEBKIT_NO_INLINE_HINTS` to control macro
+__PAS_ALWAYS_INLINE_BUT_NOT_INLINE whether includes function attribute
+`always_inline`. It could set the variable to make compilation pass when
+gcc option `-Og` is used.
+
+* Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h:
+* Source/cmake/WebKitCompilerFlags.cmake:
+
+Upstream-Status: Submitted [https://github.com/WebKit/WebKit/pull/16601]
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h | 2 +-
+ Source/cmake/WebKitCompilerFlags.cmake | 7 +++++++
+ 2 files changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h b/Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h
+index 5d5fb38c..a554f700 100644
+--- a/Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h
++++ b/Source/bmalloc/libpas/src/libpas/pas_utils_prefix.h
+@@ -44,7 +44,7 @@ __PAS_BEGIN_EXTERN_C;
+ #define __SUSPICIOUS__
+ #define __BROKEN__
+
+-#ifdef __OPTIMIZE__
++#if defined(__OPTIMIZE__) && !defined(WEBKIT_NO_INLINE_HINTS)
+ #define __PAS_ALWAYS_INLINE_BUT_NOT_INLINE __attribute__((__always_inline__))
+ #else
+ #define __PAS_ALWAYS_INLINE_BUT_NOT_INLINE
+diff --git a/Source/cmake/WebKitCompilerFlags.cmake b/Source/cmake/WebKitCompilerFlags.cmake
+index 9b2fecf9..7cdc2b6a 100644
+--- a/Source/cmake/WebKitCompilerFlags.cmake
++++ b/Source/cmake/WebKitCompilerFlags.cmake
+@@ -453,3 +453,10 @@ endif ()
+
+ # FIXME: Enable pre-compiled headers for all ports <https://webkit.org/b/139438>
+ set(CMAKE_DISABLE_PRECOMPILE_HEADERS ON)
++
++# It fails to compile with `gcc -Og`
++set(WEBKIT_NO_INLINE_HINTS OFF CACHE BOOL "Disable funtion attribute always_inline for WebKit")
++
++if (WEBKIT_NO_INLINE_HINTS)
++ add_definitions(-DWEBKIT_NO_INLINE_HINTS)
++endif ()
diff --git a/meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch b/meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch
index 2379924cc9..8e29ce17ed 100644
--- a/meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch
+++ b/meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch
@@ -1,4 +1,4 @@
-From e0bf97eecfa601e08b9578f00b1000890284241a Mon Sep 17 00:00:00 2001
+From 6348f91c29e2350ad3fec5264aa57dd4994d4583 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 27 Oct 2015 16:02:19 +0200
Subject: [PATCH] FindGObjectIntrospection.cmake: prefix variables obtained
@@ -9,30 +9,21 @@ reasons why this is not approproiate for upstream submission.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
- Source/cmake/FindGObjectIntrospection.cmake | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
+ Source/cmake/FindGI.cmake | 3 +++
+ 1 file changed, 3 insertions(+)
-diff --git a/Source/cmake/FindGObjectIntrospection.cmake b/Source/cmake/FindGObjectIntrospection.cmake
-index be96814a..ae67b593 100644
---- a/Source/cmake/FindGObjectIntrospection.cmake
-+++ b/Source/cmake/FindGObjectIntrospection.cmake
-@@ -16,7 +16,6 @@
- # Redistribution and use is allowed according to the terms of the BSD license.
+diff --git a/Source/cmake/FindGI.cmake b/Source/cmake/FindGI.cmake
+index fdc56b21..d42eca52 100644
+--- a/Source/cmake/FindGI.cmake
++++ b/Source/cmake/FindGI.cmake
+@@ -72,6 +72,9 @@ if (PKG_CONFIG_FOUND)
+ endif ()
+ endif ()
++set(_GI_SCANNER_EXE "$ENV{PKG_CONFIG_SYSROOT_DIR}${_GI_SCANNER_EXE}")
++set(_GI_COMPILER_EXE "$ENV{PKG_CONFIG_SYSROOT_DIR}${_GI_COMPILER_EXE}")
++
+ find_program(GI_SCANNER_EXE NAMES ${_GI_SCANNER_EXE} g-ir-scanner)
+ find_program(GI_COMPILER_EXE NAMES ${_GI_COMPILER_EXE} g-ir-compiler)
--
- find_package(PkgConfig QUIET)
- if (PKG_CONFIG_FOUND)
- if (PACKAGE_FIND_VERSION_COUNT GREATER 0)
-@@ -30,6 +29,9 @@ if (PKG_CONFIG_FOUND)
- pkg_get_variable(INTROSPECTION_GENERATE gobject-introspection-1.0 g_ir_generate)
- pkg_get_variable(INTROSPECTION_GIRDIR gobject-introspection-1.0 girdir)
- pkg_get_variable(INTROSPECTION_TYPELIBDIR gobject-introspection-1.0 typelibdir)
-+ set(INTROSPECTION_SCANNER "$ENV{PKG_CONFIG_SYSROOT_DIR}${INTROSPECTION_SCANNER}")
-+ set(INTROSPECTION_COMPILER "$ENV{PKG_CONFIG_SYSROOT_DIR}${INTROSPECTION_COMPILER}")
-+ set(INTROSPECTION_GENERATE "$ENV{PKG_CONFIG_SYSROOT_DIR}${INTROSPECTION_GENERATE}")
- set(INTROSPECTION_VERSION "${_pc_gir_VERSION}")
- if (${INTROSPECTION_VERSION} VERSION_GREATER_EQUAL "1.59.1")
- set(INTROSPECTION_HAVE_SOURCES_TOP_DIRS YES)
diff --git a/meta/recipes-sato/webkit/webkitgtk/0001-Fix-build-without-opengl-or-es.patch b/meta/recipes-sato/webkit/webkitgtk/0001-Fix-build-without-opengl-or-es.patch
deleted file mode 100644
index 09256951f4..0000000000
--- a/meta/recipes-sato/webkit/webkitgtk/0001-Fix-build-without-opengl-or-es.patch
+++ /dev/null
@@ -1,133 +0,0 @@
-From 646e347c173dbb9782492ac7cb4f54b65533ba90 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Sun, 17 Oct 2021 20:49:21 +0000
-Subject: [PATCH] Fix build without opengl-or-es
-
-* fix build failure when opengl-or-es is disabled:
- In file included from /OE/build/oe-core/tmp-glibc/work/core2-64-oe-linux/webkitgtk/2.34.0-r0/webkitgtk-2.34.0/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.cpp:30,
- from /OE/build/oe-core/tmp-glibc/work/core2-64-oe-linux/webkitgtk/2.34.0-r0/build/DerivedSources/WebKit/unified-sources/UnifiedSource-54928a2b-36.cpp:1:
- /OE/build/oe-core/tmp-glibc/work/core2-64-oe-linux/webkitgtk/2.34.0-r0/webkitgtk-2.34.0/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h:41:10: fatal error: WebCore/CoordinatedGraphicsLayer.h: No such file or directory
- 41 | #include <WebCore/CoordinatedGraphicsLayer.h>
- | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- compilation terminated.
-
-* the CoordinatedGraphicsLayer.h header installation is controled by
- USE_COORDINATED_GRAPHICS in webkitgtk-2.34.0/Source/WebCore/platform/TextureMapper.cmake
- but in Source/cmake/OptionsGTK.cmake USE_COORDINATED_GRAPHICS was enabled only inside
- if (USE_OPENGL_OR_ES)
-
-Upstream-Status: Submitted [https://bugs.webkit.org/show_bug.cgi?id=232934]
----
- .../DrawingAreaProxyCoordinatedGraphics.cpp | 2 +-
- .../DrawingAreaProxyCoordinatedGraphics.h | 2 +-
- .../CoordinatedGraphics/DrawingAreaCoordinatedGraphics.cpp | 2 +-
- .../CoordinatedGraphics/DrawingAreaCoordinatedGraphics.h | 2 +-
- .../WebPage/CoordinatedGraphics/LayerTreeHost.cpp | 4 ++--
- .../WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h | 6 +++---
- 6 files changed, 9 insertions(+), 9 deletions(-)
-
-diff --git a/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.cpp b/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.cpp
-index 038d9ee2..5e828a10 100644
---- a/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.cpp
-+++ b/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.cpp
-@@ -152,7 +152,7 @@ void DrawingAreaProxyCoordinatedGraphics::setBackingStoreIsDiscardable(bool isBa
- #endif
- }
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- void DrawingAreaProxyCoordinatedGraphics::adjustTransientZoom(double scale, FloatPoint origin)
- {
- send(Messages::DrawingArea::AdjustTransientZoom(scale, origin));
-diff --git a/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.h b/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.h
-index b23a45ff..cd263402 100644
---- a/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.h
-+++ b/Source/WebKit/UIProcess/CoordinatedGraphics/DrawingAreaProxyCoordinatedGraphics.h
-@@ -57,7 +57,7 @@ private:
- void waitForBackingStoreUpdateOnNextPaint() override;
- void setBackingStoreIsDiscardable(bool) override;
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- void adjustTransientZoom(double scale, WebCore::FloatPoint origin) override;
- void commitTransientZoom(double scale, WebCore::FloatPoint origin) override;
- #endif
-diff --git a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.cpp b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.cpp
-index 33ac2e1d..42375784 100644
---- a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.cpp
-+++ b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.cpp
-@@ -486,7 +486,7 @@ void DrawingAreaCoordinatedGraphics::didUpdate()
- displayTimerFired();
- }
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- void DrawingAreaCoordinatedGraphics::adjustTransientZoom(double scale, FloatPoint origin)
- {
- if (!m_transientZoom) {
-diff --git a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.h b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.h
-index d8dc6df7..c8322364 100644
---- a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.h
-+++ b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/DrawingAreaCoordinatedGraphics.h
-@@ -84,7 +84,7 @@ private:
- void updateBackingStoreState(uint64_t backingStoreStateID, bool respondImmediately, float deviceScaleFactor, const WebCore::IntSize&, const WebCore::IntSize& scrollOffset) override;
- void didUpdate() override;
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- void adjustTransientZoom(double scale, WebCore::FloatPoint origin) override;
- void commitTransientZoom(double scale, WebCore::FloatPoint origin) override;
- #endif
-diff --git a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.cpp b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.cpp
-index f3304d10..ca0476ff 100644
---- a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.cpp
-+++ b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.cpp
-@@ -156,7 +156,7 @@ void LayerTreeHost::layerFlushTimerFired()
-
- bool didSync = m_coordinator.flushPendingLayerChanges(flags);
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- // If we have an active transient zoom, we want the zoom to win over any changes
- // that WebCore makes to the relevant layers, so re-apply our changes after flushing.
- if (m_transientZoom)
-@@ -453,7 +453,7 @@ void LayerTreeHost::renderNextFrame(bool forceRepaint)
- }
- }
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- FloatPoint LayerTreeHost::constrainTransientZoomOrigin(double scale, FloatPoint origin) const
- {
- FrameView& frameView = *m_webPage.mainFrameView();
-diff --git a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h
-index 4f727e41..b070266e 100644
---- a/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h
-+++ b/Source/WebKit/WebProcess/WebPage/CoordinatedGraphics/LayerTreeHost.h
-@@ -37,7 +37,7 @@
- #include <wtf/Forward.h>
- #include <wtf/OptionSet.h>
- #include <wtf/RunLoop.h>
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- #include <WebCore/CoordinatedGraphicsLayer.h>
- #endif
-
-@@ -100,7 +100,7 @@ public:
-
- WebCore::PlatformDisplayID displayID() const { return m_displayID; }
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- void adjustTransientZoom(double, WebCore::FloatPoint);
- void commitTransientZoom(double, WebCore::FloatPoint);
- #endif
-@@ -213,7 +213,7 @@ private:
- #endif // USE(COORDINATED_GRAPHICS)
- WebCore::PlatformDisplayID m_displayID;
-
--#if PLATFORM(GTK)
-+#if PLATFORM(GTK) && USE(COORDINATED_GRAPHICS)
- bool m_transientZoom { false };
- double m_transientZoomScale { 1 };
- WebCore::FloatPoint m_transientZoomOrigin;
diff --git a/meta/recipes-sato/webkit/webkitgtk/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch b/meta/recipes-sato/webkit/webkitgtk/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch
deleted file mode 100644
index 866e9d9d09..0000000000
--- a/meta/recipes-sato/webkit/webkitgtk/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From 1c7e7a385387d7febf633bbb6d2b99ece523e719 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex.kanavin@gmail.com>
-Date: Thu, 11 Aug 2016 17:13:51 +0300
-Subject: [PATCH] Tweak gtkdoc settings so that gtkdoc generation works under
- OpenEmbedded build system
-
-This requires setting a few environment variables so that the transient
-binary is build and linked correctly, and disabling the tweaks to RUN
-variable from gtkdoc.py script so that our qemu wrapper is taken into use.
-
-Upstream-Status: Inappropriate [oe-specific]
-Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
----
- Source/cmake/GtkDoc.cmake | 2 +-
- Tools/gtkdoc/gtkdoc.py | 4 ++--
- 2 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/Source/cmake/GtkDoc.cmake b/Source/cmake/GtkDoc.cmake
-index 18e86448..102c873a 100644
---- a/Source/cmake/GtkDoc.cmake
-+++ b/Source/cmake/GtkDoc.cmake
-@@ -4,7 +4,7 @@ macro(ADD_GTKDOC_GENERATOR _stamp_name _extra_args)
- add_custom_command(
- OUTPUT "${CMAKE_BINARY_DIR}/${_stamp_name}"
- DEPENDS ${DocumentationDependencies}
-- COMMAND ${CMAKE_COMMAND} -E env "CC=${CMAKE_C_COMPILER}" "CFLAGS=${CMAKE_C_FLAGS} -Wno-unused-parameter" "LDFLAGS=${CMAKE_EXE_LINKER_FLAGS}" ${PYTHON_EXECUTABLE} ${CMAKE_SOURCE_DIR}/Tools/gtkdoc/generate-gtkdoc ${_extra_args}
-+ COMMAND ${CMAKE_COMMAND} -E env "CC=${CMAKE_C_COMPILER}" "CFLAGS=${CMAKE_C_FLAGS} -Wno-unused-parameter" "LD=${CMAKE_C_COMPILER}" "LDFLAGS=${CMAKE_C_LINK_FLAGS}" "RUN=${CMAKE_BINARY_DIR}/gtkdoc-qemuwrapper" ${PYTHON_EXECUTABLE} ${CMAKE_SOURCE_DIR}/Tools/gtkdoc/generate-gtkdoc ${_extra_args}
- COMMAND touch ${_stamp_name}
- WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
- VERBATIM
-diff --git a/Tools/gtkdoc/gtkdoc.py b/Tools/gtkdoc/gtkdoc.py
-index 054cafa1..416de7d1 100644
---- a/Tools/gtkdoc/gtkdoc.py
-+++ b/Tools/gtkdoc/gtkdoc.py
-@@ -320,9 +320,9 @@ class GTKDoc(object):
- additional_ldflags = '%s %s' % (additional_ldflags, arg)
- ldflags = ' "-L%s" %s ' % (self.library_path, additional_ldflags) + ldflags
- current_ld_library_path = env.get('LD_LIBRARY_PATH')
-- if current_ld_library_path:
-+ if current_ld_library_path and 'RUN' not in env:
- env['LD_LIBRARY_PATH'] = '%s:%s' % (self.library_path, current_ld_library_path)
-- else:
-+ elif 'RUN' not in env:
- env['LD_LIBRARY_PATH'] = self.library_path
-
- if ldflags:
diff --git a/meta/recipes-sato/webkit/webkitgtk/0001-When-building-introspection-files-do-not-quote-CFLAG.patch b/meta/recipes-sato/webkit/webkitgtk/0001-When-building-introspection-files-do-not-quote-CFLAG.patch
deleted file mode 100644
index 3d02be9894..0000000000
--- a/meta/recipes-sato/webkit/webkitgtk/0001-When-building-introspection-files-do-not-quote-CFLAG.patch
+++ /dev/null
@@ -1,78 +0,0 @@
-From 01f5ab0edf3dd8f76d37fd36c2dcd3108be33a7b Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Mon, 4 Apr 2022 21:20:05 +0200
-Subject: [PATCH] When building introspection files, do not quote CFLAGS.
-
-This does not seem to be propagated to the compiler correctly:
-
-In file included from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/x86_64-poky-linux/gcc/x86_64-poky-linux/11.2.0/include-fixed/syslimits.h:7,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/x86_64-poky-linux/gcc/x86_64-poky-linux/11.2.0/include-fixed/limits.h:34,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/lib/glib-2.0/include/glibconfig.h:11,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib/gtypes.h:32,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib/galloca.h:32,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib.h:30,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/build/Source/JavaScriptCore/tmp-introspectb51ks33n/JavaScriptCore-4.0.c:2:
-/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/x86_64-poky-linux/gcc/x86_64-poky-linux/11.2.0/include-fixed/limits.h:203:75: error: no include path in which to search for limits.h
- 203 | #include_next <limits.h> /* recurse down to the real one */
- | ^
-In file included from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib/galloca.h:32,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib.h:30,
- from /srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/build/Source/JavaScriptCore/tmp-introspectb51ks33n/JavaScriptCore-4.0.c:2:
-/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot/usr/include/glib-2.0/glib/gtypes.h:35:10: fatal error: time.h: No such file or directory
- 35 | #include <time.h>
- | ^~~~~~~~
-compilation terminated.
-Traceback (most recent call last):
- File "/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/python3.10/distutils/unixccompiler.py", line 117, in _compile
- self.spawn(compiler_so + cc_args + [src, '-o', obj] +
- File "/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/python3.10/distutils/ccompiler.py", line 910, in spawn
- spawn(cmd, dry_run=self.dry_run)
- File "/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/lib/python3.10/distutils/spawn.py", line 91, in spawn
- raise DistutilsExecError(
-distutils.errors.DistutilsExecError: command '/srv/work/alex/poky/build-64-alt/tmp/work/core2-64-poky-linux/webkitgtk/2.36.0-r0/recipe-sysroot-native/usr/bin/x86_64-poky-linux/x86_64-poky-linux-gcc' failed with exit code 1
-
-Upstream-Status: Inappropriate [upstream has rewritten the whole thing as of https://github.com/WebKit/WebKit/commit/b0ae032850bb6b2672051bab8032fc9f9ef5eb97]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- Source/JavaScriptCore/PlatformGTK.cmake | 2 +-
- Source/WebKit/PlatformGTK.cmake | 4 ++--
- 2 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/Source/JavaScriptCore/PlatformGTK.cmake b/Source/JavaScriptCore/PlatformGTK.cmake
-index 1c3c8fb7..efd18807 100644
---- a/Source/JavaScriptCore/PlatformGTK.cmake
-+++ b/Source/JavaScriptCore/PlatformGTK.cmake
-@@ -71,7 +71,7 @@ if (ENABLE_INTROSPECTION)
- add_custom_command(
- OUTPUT ${CMAKE_BINARY_DIR}/JavaScriptCore-${WEBKITGTK_API_VERSION}.gir
- DEPENDS JavaScriptCore
-- COMMAND CC=${CMAKE_C_COMPILER} CFLAGS="-Wno-deprecated-declarations ${CMAKE_C_FLAGS}" LDFLAGS=
-+ COMMAND CC=${CMAKE_C_COMPILER} CFLAGS=-Wno-deprecated-declarations\ ${CMAKE_C_FLAGS} LDFLAGS=
- ${LOADER_LIBRARY_PATH_VAR}="${INTROSPECTION_ADDITIONAL_LIBRARY_PATH}"
- ${INTROSPECTION_SCANNER}
- --quiet
-diff --git a/Source/WebKit/PlatformGTK.cmake b/Source/WebKit/PlatformGTK.cmake
-index 48d1820d..ded05035 100644
---- a/Source/WebKit/PlatformGTK.cmake
-+++ b/Source/WebKit/PlatformGTK.cmake
-@@ -675,7 +675,7 @@ if (ENABLE_INTROSPECTION)
- add_custom_target(WebKit2-${WEBKITGTK_API_VERSION}-gir
- DEPENDS WebKit
- DEPENDS ${CMAKE_BINARY_DIR}/JavaScriptCore-${WEBKITGTK_API_VERSION}.gir
-- COMMAND CC=${CMAKE_C_COMPILER} CFLAGS="-Wno-deprecated-declarations ${CMAKE_C_FLAGS}" LDFLAGS=
-+ COMMAND CC=${CMAKE_C_COMPILER} CFLAGS=-Wno-deprecated-declarations\ ${CMAKE_C_FLAGS} LDFLAGS=
- ${LOADER_LIBRARY_PATH_VAR}="${INTROSPECTION_ADDITIONAL_LIBRARY_PATH}"
- ${INTROSPECTION_SCANNER}
- --quiet
-@@ -721,7 +721,7 @@ if (ENABLE_INTROSPECTION)
- OUTPUT ${CMAKE_BINARY_DIR}/WebKit2WebExtension-${WEBKITGTK_API_VERSION}.gir
- DEPENDS ${CMAKE_BINARY_DIR}/JavaScriptCore-${WEBKITGTK_API_VERSION}.gir
- DEPENDS WebKit2-${WEBKITGTK_API_VERSION}-gir
-- COMMAND CC=${CMAKE_C_COMPILER} CFLAGS="-Wno-deprecated-declarations ${CMAKE_C_FLAGS}"
-+ COMMAND CC=${CMAKE_C_COMPILER} CFLAGS=-Wno-deprecated-declarations\ ${CMAKE_C_FLAGS}
- LDFLAGS="${INTROSPECTION_ADDITIONAL_LDFLAGS}"
- ${LOADER_LIBRARY_PATH_VAR}="${INTROSPECTION_ADDITIONAL_LIBRARY_PATH}"
- ${INTROSPECTION_SCANNER}
---
-2.30.2
-
diff --git a/meta/recipes-sato/webkit/webkitgtk/2922af379dc70b4b1a63b01d67179eb431f03ac4.patch b/meta/recipes-sato/webkit/webkitgtk/2922af379dc70b4b1a63b01d67179eb431f03ac4.patch
new file mode 100644
index 0000000000..3067500447
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk/2922af379dc70b4b1a63b01d67179eb431f03ac4.patch
@@ -0,0 +1,38 @@
+From 2922af379dc70b4b1a63b01d67179eb431f03ac4 Mon Sep 17 00:00:00 2001
+From: Michael Catanzaro <mcatanzaro@redhat.com>
+Date: Mon, 18 Mar 2024 11:14:54 -0700
+Subject: [PATCH] REGRESSION(274077@main): failure to build on i586 (and likely
+ other 32bit arches): static assertion failed: Timer should stay small
+ https://bugs.webkit.org/show_bug.cgi?id=271108
+
+Unreviewed build fix. This changes SameSizeOfTimer to ensure it matches
+the size of Timer on 32-bit platforms.
+
+* Source/WebCore/platform/Timer.cpp:
+
+Canonical link: https://commits.webkit.org/276282@main
+
+Upstream-Status: Backport [https://github.com/WebKit/WebKit/commit/2922af379dc70b4b1a63b01d67179eb431f03ac4]
+
+Signed-off-by: Markus Volk <f_l_k@t-online.de>
+---
+ Source/WebCore/platform/Timer.cpp | 6 +++++-
+ 1 file changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/Source/WebCore/platform/Timer.cpp b/Source/WebCore/platform/Timer.cpp
+index 4f7c0f5c39ca9..0f3734cca2474 100644
+--- a/Source/WebCore/platform/Timer.cpp
++++ b/Source/WebCore/platform/Timer.cpp
+@@ -263,7 +263,11 @@ struct SameSizeAsTimer {
+
+ WeakPtr<TimerAlignment> timerAlignment;
+ double times[2];
+- void* pointers[3];
++ void* pointers[2];
++#if CPU(ADDRESS32)
++ uint8_t bitfields;
++#endif
++ void* pointer;
+ };
+
+ static_assert(sizeof(Timer) == sizeof(SameSizeAsTimer), "Timer should stay small");
diff --git a/meta/recipes-sato/webkit/webkitgtk/30e1d5e22213fdaca2a29ec3400c927d710a37a8.patch b/meta/recipes-sato/webkit/webkitgtk/30e1d5e22213fdaca2a29ec3400c927d710a37a8.patch
new file mode 100644
index 0000000000..76bcb3df99
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk/30e1d5e22213fdaca2a29ec3400c927d710a37a8.patch
@@ -0,0 +1,67 @@
+From 1523e00a2a76e285262c8aa3721b5d99f3f2d612 Mon Sep 17 00:00:00 2001
+From: Thomas Devoogdt <thomas.devoogdt@barco.com>
+Date: Mon, 16 Jan 2023 17:03:30 +0100
+Subject: [PATCH] REGRESSION(257865@main): B3Validate.cpp: fix
+
+ !ENABLE(WEBASSEMBLY_B3JIT)
+
+https://bugs.webkit.org/show_bug.cgi?id=250681
+
+Reviewed by NOBODY (OOPS!).
+
+WasmTypeDefinition.h isn't included if not ENABLE(WEBASSEMBLY_B3JIT).
+Also, toB3Type and simdScalarType are not defined if it is included.
+
+Signed-off-by: Thomas Devoogdt <thomas.devoogdt@barco.com>
+
+Upstream-Status: Inappropriate [https://bugs.launchpad.net/ubuntu/+source/webkit2gtk/+bug/2008798]
+
+Signed-off-by: Markus Volk <f_l_k@t-online.de>
+---
+ Source/JavaScriptCore/b3/B3Validate.cpp | 12 +++++++++---
+ 1 file changed, 9 insertions(+), 3 deletions(-)
+
+diff --git a/Source/JavaScriptCore/b3/B3Validate.cpp b/Source/JavaScriptCore/b3/B3Validate.cpp
+index eaaa3749..1d089783 100644
+--- a/Source/JavaScriptCore/b3/B3Validate.cpp
++++ b/Source/JavaScriptCore/b3/B3Validate.cpp
+@@ -47,6 +47,12 @@
+ #include <wtf/StringPrintStream.h>
+ #include <wtf/text/CString.h>
+
++#if ENABLE(WEBASSEMBLY) && ENABLE(WEBASSEMBLY_B3JIT)
++#define simdScalarTypeToB3Type(type) toB3Type(Wasm::simdScalarType(type))
++#else
++#define simdScalarTypeToB3Type(type) B3::Type()
++#endif
++
+ namespace JSC { namespace B3 {
+
+ namespace {
+@@ -454,7 +460,7 @@ public:
+ case VectorExtractLane:
+ VALIDATE(!value->kind().hasExtraBits(), ("At ", *value));
+ VALIDATE(value->numChildren() == 1, ("At ", *value));
+- VALIDATE(value->type() == toB3Type(Wasm::simdScalarType(value->asSIMDValue()->simdLane())), ("At ", *value));
++ VALIDATE(value->type() == simdScalarTypeToB3Type(value->asSIMDValue()->simdLane()), ("At ", *value));
+ VALIDATE(value->child(0)->type() == V128, ("At ", *value));
+ break;
+ case VectorReplaceLane:
+@@ -462,7 +468,7 @@ public:
+ VALIDATE(value->numChildren() == 2, ("At ", *value));
+ VALIDATE(value->type() == V128, ("At ", *value));
+ VALIDATE(value->child(0)->type() == V128, ("At ", *value));
+- VALIDATE(value->child(1)->type() == toB3Type(Wasm::simdScalarType(value->asSIMDValue()->simdLane())), ("At ", *value));
++ VALIDATE(value->child(1)->type() == simdScalarTypeToB3Type(value->asSIMDValue()->simdLane()), ("At ", *value));
+ break;
+ case VectorDupElement:
+ VALIDATE(!value->kind().hasExtraBits(), ("At ", *value));
+@@ -484,7 +490,7 @@ public:
+ VALIDATE(!value->kind().hasExtraBits(), ("At ", *value));
+ VALIDATE(value->numChildren() == 1, ("At ", *value));
+ VALIDATE(value->type() == V128, ("At ", *value));
+- VALIDATE(value->child(0)->type() == toB3Type(Wasm::simdScalarType(value->asSIMDValue()->simdLane())), ("At ", *value));
++ VALIDATE(value->child(0)->type() == simdScalarTypeToB3Type(value->asSIMDValue()->simdLane()), ("At ", *value));
+ break;
+
+ case VectorPopcnt:
diff --git a/meta/recipes-sato/webkit/webkitgtk/no-musttail-arm.patch b/meta/recipes-sato/webkit/webkitgtk/no-musttail-arm.patch
new file mode 100644
index 0000000000..8ce37a01cc
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk/no-musttail-arm.patch
@@ -0,0 +1,30 @@
+From a9c874f7418cefbe78f7cd26505ae495cb59bbcf Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 12 Jan 2024 09:21:39 -0800
+Subject: [PATCH] clang/arm: Do not use MUST_TAIL_CALL
+
+This causes clang-17 to crash see [1]
+this code is new in webkit 2.42[2] thats why we do not see the crash in older webkit
+
+[1] https://github.com/llvm/llvm-project/issues/67767
+[2] https://github.com/WebKit/WebKit/commit/4d816460b765acd8aef90ab474615850b91ecc35
+
+Upstream-Status: Inappropriate [work around to avoid clang compiler crash]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ Source/WTF/wtf/Compiler.h | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Source/WTF/wtf/Compiler.h b/Source/WTF/wtf/Compiler.h
+index 0ea5cb76..c5480dbc 100644
+--- a/Source/WTF/wtf/Compiler.h
++++ b/Source/WTF/wtf/Compiler.h
+@@ -284,7 +284,7 @@
+ /* MUST_TAIL_CALL */
+
+ #if !defined(MUST_TAIL_CALL) && defined(__cplusplus) && defined(__has_cpp_attribute)
+-#if __has_cpp_attribute(clang::musttail)
++#if __has_cpp_attribute(clang::musttail) && !defined(__arm__)
+ #define MUST_TAIL_CALL [[clang::musttail]]
+ #endif
+ #endif
diff --git a/meta/recipes-sato/webkit/webkitgtk/reproducibility.patch b/meta/recipes-sato/webkit/webkitgtk/reproducibility.patch
index e866a1a193..93a431a0b1 100644
--- a/meta/recipes-sato/webkit/webkitgtk/reproducibility.patch
+++ b/meta/recipes-sato/webkit/webkitgtk/reproducibility.patch
@@ -1,3 +1,8 @@
+From d096b945113ddecaf33062296e20b6d5a007cab3 Mon Sep 17 00:00:00 2001
+From: Richard Purdie <richard.purdie@linuxfoundation.org>
+Date: Mon, 3 Jan 2022 14:18:34 +0000
+Subject: [PATCH] webkitgtk: Add reproducibility fix
+
Injection a year based on the current date isn't reproducible. Hack this
to a specific year for now for reproducibilty and to avoid autobuilder failures.
@@ -6,11 +11,14 @@ then this could be submitted upstream, sadly my ruby isn't up to that.
Upstream-Status: Pending [could be reworked]
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+---
+ Source/JavaScriptCore/generator/GeneratedFile.rb | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
-Index: webkitgtk-2.34.2/Source/JavaScriptCore/generator/GeneratedFile.rb
-===================================================================
---- webkitgtk-2.34.2.orig/Source/JavaScriptCore/generator/GeneratedFile.rb
-+++ webkitgtk-2.34.2/Source/JavaScriptCore/generator/GeneratedFile.rb
+diff --git a/Source/JavaScriptCore/generator/GeneratedFile.rb b/Source/JavaScriptCore/generator/GeneratedFile.rb
+index 6ed2b6e4..86a28286 100644
+--- a/Source/JavaScriptCore/generator/GeneratedFile.rb
++++ b/Source/JavaScriptCore/generator/GeneratedFile.rb
@@ -25,7 +25,7 @@ require 'date'
require 'digest'
diff --git a/meta/recipes-sato/webkit/webkitgtk/t6-not-declared.patch b/meta/recipes-sato/webkit/webkitgtk/t6-not-declared.patch
new file mode 100644
index 0000000000..d4720e4f28
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk/t6-not-declared.patch
@@ -0,0 +1,37 @@
+From 3d5373575695b293b8559155431d0079a6153aff Mon Sep 17 00:00:00 2001
+From: Michael Catanzaro <mcatanzaro@redhat.com>
+Date: Mon, 5 Feb 2024 11:00:49 -0600
+Subject: [PATCH] =?UTF-8?q?[GTK]=20[2.42.5]=20LowLevelInterpreter.cpp:339:?=
+ =?UTF-8?q?21:=20error:=20=E2=80=98t6=E2=80=99=20was=20not=20declared=20in?=
+ =?UTF-8?q?=20this=20scope=20https://bugs.webkit.org/show=5Fbug.cgi=3Fid?=
+ =?UTF-8?q?=3D268739?=
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Unreviewed build fix. Seems a backport went badly, and we didn't notice
+because the code is architecture-specific.
+
+* Source/JavaScriptCore/llint/LowLevelInterpreter.cpp:
+(JSC::CLoop::execute):
+
+Upstream-Status: Backport [https://github.com/WebKit/WebKit/commit/3d5373575695b293b8559155431d0079a6153aff]
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+---
+ Source/JavaScriptCore/llint/LowLevelInterpreter.cpp | 2 --
+ 1 file changed, 2 deletions(-)
+
+diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp b/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp
+index 5064ead6cd2e7..9a2e2653b1219 100644
+--- a/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp
++++ b/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp
+@@ -336,8 +336,6 @@ JSValue CLoop::execute(OpcodeID entryOpcodeID, void* executableAddress, VM* vm,
+ UNUSED_VARIABLE(t2);
+ UNUSED_VARIABLE(t3);
+ UNUSED_VARIABLE(t5);
+- UNUSED_VARIABLE(t6);
+- UNUSED_VARIABLE(t7);
+
+ struct StackPointerScope {
+ StackPointerScope(CLoopStack& stack)
+
diff --git a/meta/recipes-sato/webkit/webkitgtk_2.36.1.bb b/meta/recipes-sato/webkit/webkitgtk_2.36.1.bb
deleted file mode 100644
index 65757c36a7..0000000000
--- a/meta/recipes-sato/webkit/webkitgtk_2.36.1.bb
+++ /dev/null
@@ -1,167 +0,0 @@
-SUMMARY = "WebKit web rendering engine for the GTK+ platform"
-HOMEPAGE = "https://www.webkitgtk.org/"
-BUGTRACKER = "https://bugs.webkit.org/"
-
-LICENSE = "BSD-2-Clause & LGPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://Source/JavaScriptCore/COPYING.LIB;md5=d0c6d6397a5d84286dda758da57bd691 \
- file://Source/WebCore/LICENSE-APPLE;md5=4646f90082c40bcf298c285f8bab0b12 \
- file://Source/WebCore/LICENSE-LGPL-2;md5=36357ffde2b64ae177b2494445b79d21 \
- file://Source/WebCore/LICENSE-LGPL-2.1;md5=a778a33ef338abbaf8b8a7c36b6eec80 \
- "
-
-SRC_URI = "https://www.webkitgtk.org/releases/${BPN}-${PV}.tar.xz \
- file://0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch \
- file://0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch \
- file://0001-Fix-build-without-opengl-or-es.patch \
- file://reproducibility.patch \
- file://0001-When-building-introspection-files-do-not-quote-CFLAG.patch \
- "
-
-SRC_URI[sha256sum] = "0149ea5fb1d20f2a9981677d45c952a047330001ea24a8dc29035239f12c0c8f"
-
-inherit cmake pkgconfig gobject-introspection perlnative features_check upstream-version-is-even gtk-doc
-
-ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
-REQUIRED_DISTRO_FEATURES = "${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'opengl', '', d)}"
-
-CVE_PRODUCT = "webkitgtk webkitgtk\+"
-
-DEPENDS = " \
- ruby-native \
- gperf-native \
- cairo \
- harfbuzz \
- jpeg \
- atk \
- libwebp \
- gtk+3 \
- libxslt \
- libtasn1 \
- libnotify \
- gstreamer1.0 \
- gstreamer1.0-plugins-base \
- "
-
-PACKAGECONFIG_SOUP ?= "soup2"
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd wayland x11', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'webgl opengl', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', 'webgl gles2', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'opengl-or-es', '', d)} \
- enchant \
- libsecret \
- ${PACKAGECONFIG_SOUP} \
- "
-
-PACKAGECONFIG[wayland] = "-DENABLE_WAYLAND_TARGET=ON,-DENABLE_WAYLAND_TARGET=OFF,wayland libwpe wpebackend-fdo wayland-native"
-PACKAGECONFIG[angle] = "-DUSE_ANGLE_WEBGL=ON,-DUSE_ANGLE_WEBGL=OFF"
-PACKAGECONFIG[x11] = "-DENABLE_X11_TARGET=ON,-DENABLE_X11_TARGET=OFF,virtual/libx11 libxcomposite libxdamage libxrender libxt"
-PACKAGECONFIG[geoclue] = "-DENABLE_GEOLOCATION=ON,-DENABLE_GEOLOCATION=OFF,geoclue"
-PACKAGECONFIG[enchant] = "-DENABLE_SPELLCHECK=ON,-DENABLE_SPELLCHECK=OFF,enchant2"
-PACKAGECONFIG[gles2] = "-DENABLE_GLES2=ON,-DENABLE_GLES2=OFF,virtual/libgles2"
-PACKAGECONFIG[webgl] = "-DENABLE_WEBGL=ON,-DENABLE_WEBGL=OFF,virtual/egl"
-PACKAGECONFIG[opengl] = "-DENABLE_GRAPHICS_CONTEXT_GL=ON,-DENABLE_GRAPHICS_CONTEXT_GL=OFF,virtual/egl"
-PACKAGECONFIG[opengl-or-es] = "-DUSE_OPENGL_OR_ES=ON,-DUSE_OPENGL_OR_ES=OFF"
-PACKAGECONFIG[libsecret] = "-DUSE_LIBSECRET=ON,-DUSE_LIBSECRET=OFF,libsecret"
-PACKAGECONFIG[libhyphen] = "-DUSE_LIBHYPHEN=ON,-DUSE_LIBHYPHEN=OFF,libhyphen"
-PACKAGECONFIG[woff2] = "-DUSE_WOFF2=ON,-DUSE_WOFF2=OFF,woff2"
-PACKAGECONFIG[openjpeg] = "-DUSE_OPENJPEG=ON,-DUSE_OPENJPEG=OFF,openjpeg"
-PACKAGECONFIG[systemd] = "-DUSE_SYSTEMD=ON,-DUSE_SYSTEMD=off,systemd"
-PACKAGECONFIG[reduce-size] = "-DCMAKE_BUILD_TYPE=MinSizeRel,-DCMAKE_BUILD_TYPE=Release,,"
-PACKAGECONFIG[lcms] = "-DUSE_LCMS=ON,-DUSE_LCMS=OFF,lcms"
-PACKAGECONFIG[soup2] = "-DUSE_SOUP2=ON,-DUSE_SOUP2=OFF,libsoup-2.4,,,soup3"
-PACKAGECONFIG[soup3] = ",,libsoup,,,soup2"
-PACKAGECONFIG[journald] = "-DENABLE_JOURNALD_LOG=ON,-DENABLE_JOURNALD_LOG=OFF,systemd"
-
-# webkitgtk is full of /usr/bin/env python, particular for generating docs
-do_configure[postfuncs] += "setup_python_link"
-setup_python_link() {
- if [ ! -e ${STAGING_BINDIR_NATIVE}/python ]; then
- ln -s `which python3` ${STAGING_BINDIR_NATIVE}/python
- fi
-}
-
-EXTRA_OECMAKE = " \
- -DPORT=GTK \
- ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DENABLE_INTROSPECTION=ON', '-DENABLE_INTROSPECTION=OFF', d)} \
- ${@bb.utils.contains('GTKDOC_ENABLED', 'True', '-DENABLE_GTKDOC=ON', '-DENABLE_GTKDOC=OFF', d)} \
- -DENABLE_MINIBROWSER=ON \
- -DPYTHON_EXECUTABLE=`which python3` \
- -DENABLE_BUBBLEWRAP_SANDBOX=OFF \
- -DENABLE_GAMEPAD=OFF \
- "
-
-# Javascript JIT is not supported on ARC
-EXTRA_OECMAKE:append:arc = " -DENABLE_JIT=OFF "
-# By default 25-bit "medium" calls are used on ARC
-# which is not enough for binaries larger than 32 MiB
-CFLAGS:append:arc = " -mlong-calls"
-CXXFLAGS:append:arc = " -mlong-calls"
-
-# Needed for non-mesa graphics stacks when x11 is disabled
-CXXFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', '-DEGL_NO_X11=1', d)}"
-
-# Javascript JIT is not supported on powerpc
-EXTRA_OECMAKE:append:powerpc = " -DENABLE_JIT=OFF "
-EXTRA_OECMAKE:append:powerpc64 = " -DENABLE_JIT=OFF "
-
-# ARM JIT code does not build on ARMv4/5/6 anymore
-EXTRA_OECMAKE:append:armv5 = " -DENABLE_JIT=OFF "
-EXTRA_OECMAKE:append:armv6 = " -DENABLE_JIT=OFF "
-EXTRA_OECMAKE:append:armv4 = " -DENABLE_JIT=OFF "
-
-EXTRA_OECMAKE:append:mipsarch = " -DUSE_LD_GOLD=OFF "
-EXTRA_OECMAKE:append:powerpc = " -DUSE_LD_GOLD=OFF "
-
-# JIT and gold linker does not work on RISCV
-EXTRA_OECMAKE:append:riscv32 = " -DUSE_LD_GOLD=OFF -DENABLE_JIT=OFF"
-EXTRA_OECMAKE:append:riscv64 = " -DUSE_LD_GOLD=OFF -DENABLE_JIT=OFF"
-
-# JIT not supported on MIPS either
-EXTRA_OECMAKE:append:mipsarch = " -DENABLE_JIT=OFF -DENABLE_C_LOOP=ON "
-
-# JIT not supported on X32
-# An attempt was made to upstream JIT support for x32 in
-# https://bugs.webkit.org/show_bug.cgi?id=100450, but this was closed as
-# unresolved due to limited X32 adoption.
-EXTRA_OECMAKE:append:x86-x32 = " -DENABLE_JIT=OFF "
-
-SECURITY_CFLAGS:remove:aarch64 = "-fpie"
-SECURITY_CFLAGS:append:aarch64 = " -fPIE"
-
-FILES:${PN} += "${libdir}/webkit2gtk-4.*/injected-bundle/libwebkit2gtkinjectedbundle.so"
-
-RRECOMMENDS:${PN} += "ca-certificates shared-mime-info"
-
-# http://errors.yoctoproject.org/Errors/Details/20370/
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv6 = "arm"
-
-# https://bugzilla.yoctoproject.org/show_bug.cgi?id=9474
-# https://bugs.webkit.org/show_bug.cgi?id=159880
-# JSC JIT can build on ARMv7 with -marm, but doesn't work on runtime.
-# Upstream only tests regularly the JSC JIT on ARMv7 with Thumb2 (-mthumb).
-ARM_INSTRUCTION_SET:armv7a = "thumb"
-ARM_INSTRUCTION_SET:armv7r = "thumb"
-ARM_INSTRUCTION_SET:armv7ve = "thumb"
-
-# introspection inside qemu-arm hangs forever on musl/arm builds
-# therefore disable GI_DATA
-GI_DATA_ENABLED:libc-musl:armv7a = "False"
-GI_DATA_ENABLED:libc-musl:armv7ve = "False"
-
-# Can't be built with ccache
-CCACHE_DISABLE = "1"
-
-PACKAGE_PREPROCESS_FUNCS += "src_package_preprocess"
-src_package_preprocess () {
- # Trim build paths from comments in generated sources to ensure reproducibility
- sed -i -e "s,${WORKDIR},,g" \
- ${B}/JavaScriptCore/DerivedSources/*.h \
- ${B}/JavaScriptCore/DerivedSources/yarr/*.h \
- ${B}/JavaScriptCore/PrivateHeaders/JavaScriptCore/*.h \
- ${B}/WebKit2Gtk/DerivedSources/webkit2/*.cpp \
- ${B}/WebKit2Gtk/DerivedSources/webkit2/*.h
-
-}
-
diff --git a/meta/recipes-sato/webkit/webkitgtk_2.44.0.bb b/meta/recipes-sato/webkit/webkitgtk_2.44.0.bb
new file mode 100644
index 0000000000..0819f6de0d
--- /dev/null
+++ b/meta/recipes-sato/webkit/webkitgtk_2.44.0.bb
@@ -0,0 +1,187 @@
+SUMMARY = "WebKit web rendering engine for the GTK+ platform"
+HOMEPAGE = "https://www.webkitgtk.org/"
+BUGTRACKER = "https://bugs.webkit.org/"
+
+LICENSE = "BSD-2-Clause & LGPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://Source/JavaScriptCore/COPYING.LIB;md5=d0c6d6397a5d84286dda758da57bd691 \
+ file://Source/WebCore/LICENSE-APPLE;md5=4646f90082c40bcf298c285f8bab0b12 \
+ file://Source/WebCore/LICENSE-LGPL-2;md5=36357ffde2b64ae177b2494445b79d21 \
+ file://Source/WebCore/LICENSE-LGPL-2.1;md5=a778a33ef338abbaf8b8a7c36b6eec80 \
+ "
+
+SRC_URI = "https://www.webkitgtk.org/releases/${BPN}-${PV}.tar.xz \
+ file://0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch \
+ file://reproducibility.patch \
+ file://0001-CMake-Add-a-variable-to-control-macro-__PAS_ALWAYS_I.patch \
+ file://no-musttail-arm.patch \
+ file://t6-not-declared.patch \
+ file://30e1d5e22213fdaca2a29ec3400c927d710a37a8.patch \
+ file://2922af379dc70b4b1a63b01d67179eb431f03ac4.patch \
+ "
+SRC_URI[sha256sum] = "c66530e41ba59b1edba4ee89ef20b2188e273bed0497e95084729e3cfbe30c87"
+
+inherit cmake pkgconfig gobject-introspection perlnative features_check upstream-version-is-even gi-docgen
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+CVE_PRODUCT = "webkitgtk webkitgtk\+"
+
+DEPENDS += " \
+ ruby-native \
+ gperf-native \
+ unifdef-native \
+ cairo \
+ harfbuzz \
+ jpeg \
+ atk \
+ libwebp \
+ gtk4 \
+ libxslt \
+ libtasn1 \
+ libnotify \
+ gstreamer1.0 \
+ gstreamer1.0-plugins-base \
+ glib-2.0-native \
+ gettext-native \
+ "
+
+PACKAGECONFIG_SOUP ?= "soup3"
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd wayland x11', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'webgl opengl', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', 'webgl gles2', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'opengl-or-es', '', d)} \
+ enchant \
+ libsecret \
+ ${PACKAGECONFIG_SOUP} \
+ "
+
+PACKAGECONFIG[wayland] = "-DENABLE_WAYLAND_TARGET=ON,-DENABLE_WAYLAND_TARGET=OFF,wayland libwpe wpebackend-fdo wayland-native"
+PACKAGECONFIG[angle] = "-DUSE_ANGLE_WEBGL=ON,-DUSE_ANGLE_WEBGL=OFF"
+PACKAGECONFIG[x11] = "-DENABLE_X11_TARGET=ON,-DENABLE_X11_TARGET=OFF,virtual/libx11 libxcomposite libxdamage libxrender libxt"
+PACKAGECONFIG[geoclue] = "-DENABLE_GEOLOCATION=ON,-DENABLE_GEOLOCATION=OFF,geoclue"
+PACKAGECONFIG[enchant] = "-DENABLE_SPELLCHECK=ON,-DENABLE_SPELLCHECK=OFF,enchant2"
+PACKAGECONFIG[gles2] = "-DENABLE_GLES2=ON,-DENABLE_GLES2=OFF,virtual/libgles2"
+PACKAGECONFIG[webgl] = "-DENABLE_WEBGL=ON,-DENABLE_WEBGL=OFF,virtual/egl"
+PACKAGECONFIG[opengl] = "-DENABLE_GRAPHICS_CONTEXT_GL=ON,-DENABLE_GRAPHICS_CONTEXT_GL=OFF,virtual/egl"
+PACKAGECONFIG[opengl-or-es] = "-DUSE_OPENGL_OR_ES=ON,-DUSE_OPENGL_OR_ES=OFF"
+PACKAGECONFIG[libsecret] = "-DUSE_LIBSECRET=ON,-DUSE_LIBSECRET=OFF,libsecret"
+PACKAGECONFIG[libhyphen] = "-DUSE_LIBHYPHEN=ON,-DUSE_LIBHYPHEN=OFF,libhyphen"
+PACKAGECONFIG[woff2] = "-DUSE_WOFF2=ON,-DUSE_WOFF2=OFF,woff2"
+PACKAGECONFIG[openjpeg] = "-DUSE_OPENJPEG=ON,-DUSE_OPENJPEG=OFF,openjpeg"
+PACKAGECONFIG[systemd] = "-DUSE_SYSTEMD=ON,-DUSE_SYSTEMD=off,systemd"
+PACKAGECONFIG[reduce-size] = "-DCMAKE_BUILD_TYPE=MinSizeRel,-DCMAKE_BUILD_TYPE=Release,,"
+PACKAGECONFIG[lcms] = "-DUSE_LCMS=ON,-DUSE_LCMS=OFF,lcms"
+PACKAGECONFIG[soup2] = "-DUSE_SOUP2=ON,-DUSE_SOUP2=OFF,libsoup-2.4,,,soup3"
+PACKAGECONFIG[soup3] = ",,libsoup,,,soup2"
+PACKAGECONFIG[journald] = "-DENABLE_JOURNALD_LOG=ON,-DENABLE_JOURNALD_LOG=OFF,systemd"
+PACKAGECONFIG[avif] = "-DUSE_AVIF_LOG=ON,-DUSE_AVIF=OFF,libavif"
+PACKAGECONFIG[media-recorder] = "-DENABLE_MEDIA_RECORDER=ON,-DENABLE_MEDIA_RECORDER=OFF,gstreamer1.0-plugins-bad"
+PACKAGECONFIG[jpegxl] = "-DUSE_JPEGXL=ON,-DUSE_JPEGXL=OFF,libjxl"
+PACKAGECONFIG[backtrace] = "-DUSE_LIBBACKTRACE=ON,-DUSE_LIBBACKTRACE=OFF,libbacktrace"
+PACKAGECONFIG[gamepad] = "-DENABLE_GAMEPAD=ON,-DENABLE_GAMEPAD=OFF,libmanette"
+
+EXTRA_OECMAKE = " \
+ -DPORT=GTK \
+ ${@oe.utils.vartrue('GI_DATA_ENABLED', '-DENABLE_INTROSPECTION=ON', '-DENABLE_INTROSPECTION=OFF', d)} \
+ ${@oe.utils.vartrue('GIDOCGEN_ENABLED', '-DENABLE_DOCUMENTATION=ON', '-DENABLE_DOCUMENTATION=OFF', d)} \
+ ${@oe.utils.vartrue('DEBUG_BUILD', '-DWEBKIT_NO_INLINE_HINTS=ON', '-DWEBKIT_NO_INLINE_HINTS=OFFF', d)} \
+ -DENABLE_MINIBROWSER=ON \
+ -DENABLE_BUBBLEWRAP_SANDBOX=OFF \
+ -DUSE_GTK4=ON \
+ -DCMAKE_EXPORT_COMPILE_COMMANDS=OFF \
+ "
+
+# Unless DEBUG_BUILD is enabled, pass -g1 to massively reduce the size of the
+# debug symbols (4.3GB to 700M at time of writing)
+DEBUG_FLAGS:append = "${@oe.utils.vartrue('DEBUG_BUILD', '', ' -g1', d)}"
+
+# Javascript JIT is not supported on ARC
+EXTRA_OECMAKE:append:arc = " -DENABLE_JIT=OFF "
+# By default 25-bit "medium" calls are used on ARC
+# which is not enough for binaries larger than 32 MiB
+CFLAGS:append:arc = " -mlong-calls"
+CXXFLAGS:append:arc = " -mlong-calls"
+
+# Needed for non-mesa graphics stacks when x11 is disabled
+CXXFLAGS += "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', '-DEGL_NO_X11=1', d)}"
+
+# Javascript JIT is not supported on powerpc
+EXTRA_OECMAKE:append:powerpc = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE:append:powerpc64 = " -DENABLE_JIT=OFF "
+
+# ARM JIT code does not build on ARMv4/5/6 anymore
+EXTRA_OECMAKE:append:armv4 = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE:append:armv5 = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE:append:armv6 = " -DENABLE_JIT=OFF "
+
+# And for armv7* don't enable it for softfp, because after:
+# https://github.com/WebKit/WebKit/commit/a2ec4ef1997d6fafa6ffc607bffb54e76168a918
+# https://bugs.webkit.org/show_bug.cgi?id=242172
+# softfp armv7* fails because WEBASSEMBLY is left enabled by default and JIT gets
+# explicitly disabled causing:
+# http://errors.yoctoproject.org/Errors/Details/734587/
+# PR was sent upstream, but the end result is the same both JIT and WEBASSEMBLY disabled
+# https://github.com/WebKit/WebKit/pull/17447
+EXTRA_OECMAKE:append:armv7a = " -DENABLE_JIT=${@bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'ON', 'OFF', d)}"
+EXTRA_OECMAKE:append:armv7r = " -DENABLE_JIT=${@bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'ON', 'OFF', d)}"
+EXTRA_OECMAKE:append:armv7ve = " -DENABLE_JIT=${@bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'ON', 'OFF', d)}"
+
+EXTRA_OECMAKE:append:mipsarch = " -DUSE_LD_GOLD=OFF "
+EXTRA_OECMAKE:append:powerpc = " -DUSE_LD_GOLD=OFF "
+
+# JIT and gold linker does not work on RISCV
+EXTRA_OECMAKE:append:riscv32 = " -DUSE_LD_GOLD=OFF -DENABLE_JIT=OFF"
+EXTRA_OECMAKE:append:riscv64 = " -DUSE_LD_GOLD=OFF"
+
+# JIT not supported on MIPS either
+EXTRA_OECMAKE:append:mipsarch = " -DENABLE_JIT=OFF -DENABLE_C_LOOP=ON "
+
+# JIT not supported on X32
+# An attempt was made to upstream JIT support for x32 in
+# https://bugs.webkit.org/show_bug.cgi?id=100450, but this was closed as
+# unresolved due to limited X32 adoption.
+EXTRA_OECMAKE:append:x86-x32 = " -DENABLE_JIT=OFF "
+
+SECURITY_CFLAGS:remove:aarch64 = "-fpie"
+SECURITY_CFLAGS:append:aarch64 = " -fPIE"
+
+FILES:${PN} += "${libdir}/webkitgtk-*/injected-bundle/*.so"
+
+RRECOMMENDS:${PN} += "ca-certificates shared-mime-info"
+
+# http://errors.yoctoproject.org/Errors/Details/20370/
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv6 = "arm"
+
+# https://bugzilla.yoctoproject.org/show_bug.cgi?id=9474
+# https://bugs.webkit.org/show_bug.cgi?id=159880
+# JSC JIT can build on ARMv7 with -marm, but doesn't work on runtime.
+# Upstream only tests regularly the JSC JIT on ARMv7 with Thumb2 (-mthumb).
+ARM_INSTRUCTION_SET:armv7a = "thumb"
+ARM_INSTRUCTION_SET:armv7r = "thumb"
+ARM_INSTRUCTION_SET:armv7ve = "thumb"
+
+# ANGLE requires SSE support as of webkit 2.40.x on 32 bit x86
+COMPATIBLE_HOST:x86 = "${@bb.utils.contains_any('TUNE_FEATURES', 'core2 corei7', '.*', 'null', d)}"
+
+# introspection inside qemu-arm hangs forever on musl/arm builds
+# therefore disable GI_DATA
+GI_DATA_ENABLED:libc-musl:armv7a = "False"
+GI_DATA_ENABLED:libc-musl:armv7ve = "False"
+
+# Can't be built with ccache
+CCACHE_DISABLE = "1"
+
+PACKAGE_PREPROCESS_FUNCS += "src_package_preprocess"
+src_package_preprocess () {
+ # Trim build paths from comments in generated sources to ensure reproducibility
+ sed -i -e "s,${WORKDIR},,g" \
+ ${B}/JavaScriptCore/DerivedSources/*.h \
+ ${B}/JavaScriptCore/DerivedSources/yarr/*.h \
+ ${B}/JavaScriptCore/PrivateHeaders/JavaScriptCore/*.h \
+ ${B}/WebCore/DerivedSources/*.cpp \
+ ${B}/WebKitGTK/DerivedSources/webkit/*.cpp
+}
+
diff --git a/meta/recipes-sato/webkit/wpebackend-fdo_1.12.0.bb b/meta/recipes-sato/webkit/wpebackend-fdo_1.12.0.bb
deleted file mode 100644
index 4a18467ea4..0000000000
--- a/meta/recipes-sato/webkit/wpebackend-fdo_1.12.0.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "WPE's backend based on a freedesktop.org stack."
-HOMEPAGE = "https://github.com/Igalia/WPEBackend-fdo"
-BUGTRACKER = "https://github.com/Igalia/WPEBackend-fdo/issues"
-
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1f62cef2e3645e3e74eb05fd389d7a66"
-DEPENDS = "glib-2.0 libxkbcommon wayland virtual/egl libwpe libepoxy"
-
-DEPENDS:append:class-target = " wayland-native"
-
-inherit meson features_check pkgconfig
-
-REQUIRED_DISTRO_FEATURES = "opengl"
-
-SRC_URI = "https://wpewebkit.org/releases/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "6239c9c15523410798d66315de6b491712ab30009ba180f3e0dd076d9b0074ac"
-
-# Especially helps compiling with clang which enable this as error when
-# using c++11
-CXXFLAGS += "-Wno-c++11-narrowing"
-
-# This is a tweak of upstream-version-is-even needed because
-# ipstream directory contains tarballs for other components as well.
-UPSTREAM_CHECK_REGEX = "wpebackend-fdo-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar"
diff --git a/meta/recipes-sato/webkit/wpebackend-fdo_1.14.2.bb b/meta/recipes-sato/webkit/wpebackend-fdo_1.14.2.bb
new file mode 100644
index 0000000000..b3d7b229c8
--- /dev/null
+++ b/meta/recipes-sato/webkit/wpebackend-fdo_1.14.2.bb
@@ -0,0 +1,24 @@
+SUMMARY = "WPE's backend based on a freedesktop.org stack."
+HOMEPAGE = "https://github.com/Igalia/WPEBackend-fdo"
+BUGTRACKER = "https://github.com/Igalia/WPEBackend-fdo/issues"
+
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1f62cef2e3645e3e74eb05fd389d7a66"
+DEPENDS = "glib-2.0 libxkbcommon wayland virtual/egl libwpe libepoxy"
+
+DEPENDS:append:class-target = " wayland-native"
+
+inherit meson features_check pkgconfig
+
+REQUIRED_DISTRO_FEATURES = "opengl"
+
+SRC_URI = "https://wpewebkit.org/releases/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "93c9766ae9864eeaeaee2b0a74f22cbca08df42c1a1bdb55b086f2528e380d38"
+
+# Especially helps compiling with clang which enable this as error when
+# using c++11
+CXXFLAGS += "-Wno-c++11-narrowing"
+
+# This is a tweak of upstream-version-is-even needed because
+# ipstream directory contains tarballs for other components as well.
+UPSTREAM_CHECK_REGEX = "wpebackend-fdo-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar"
diff --git a/meta/recipes-support/appstream/appstream/0001-remove-hardcoded-path.patch b/meta/recipes-support/appstream/appstream/0001-remove-hardcoded-path.patch
new file mode 100644
index 0000000000..bc811d67b0
--- /dev/null
+++ b/meta/recipes-support/appstream/appstream/0001-remove-hardcoded-path.patch
@@ -0,0 +1,32 @@
+From fe9ebb8c0fa7b84a9c5c667386b8fbb3baea2fad Mon Sep 17 00:00:00 2001
+From: Markus Volk <f_l_k@t-online.de>
+Date: Mon, 12 Dec 2022 15:42:42 +0100
+Subject: [PATCH] remove hardcoded path
+
+Signed-off-by: Markus Volk <f_l_k@t-online.de>
+
+Dont include hardcoded path. This fixes:
+| cc1: error: include location "/usr/include" is unsafe for cross-compilation [-Werror=poison-system-directories]
+
+Upstream-Status: Inappropriate [oe-specific]
+---
+ meson.build | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/meson.build b/meson.build
+index fa171d5..79d5168 100644
+--- a/meson.build
++++ b/meson.build
+@@ -185,10 +185,10 @@ endif
+ stemmer_inc_dirs = include_directories()
+ if get_option('stemming')
+ stemmer_lib = cc.find_library('stemmer', required: true)
+- stemmer_inc_dirs = include_directories(['/usr/include'])
++ stemmer_inc_dirs = include_directories([''])
+ if not cc.has_header('libstemmer.h')
+ if cc.has_header('libstemmer/libstemmer.h')
+- stemmer_inc_dirs = include_directories('/usr/include/libstemmer')
++ stemmer_inc_dirs = include_directories('')
+ else
+ error('Unable to find Snowball header "libstemmer.h". Please ensure libstemmer/Snowball is installed properly in order to continue.')
+ endif
diff --git a/meta/recipes-support/appstream/appstream_1.0.2.bb b/meta/recipes-support/appstream/appstream_1.0.2.bb
new file mode 100644
index 0000000000..7eb12a04c5
--- /dev/null
+++ b/meta/recipes-support/appstream/appstream_1.0.2.bb
@@ -0,0 +1,44 @@
+SUMMARY = "AppStream is a collaborative effort for making machine-readable software metadata easily available."
+HOMEPAGE = "https://github.com/ximion/appstream"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=435ed639f84d4585d93824e7da3d85da"
+
+DEPENDS = " \
+ appstream-native \
+ curl-native \
+ curl \
+ docbook-xml-dtd4-native \
+ gperf-native \
+ glib-2.0 \
+ libyaml \
+ libxml2 \
+ libxmlb \
+ libxslt-native \
+ itstool-native \
+ docbook-xsl-stylesheets-native \
+ python3-pygments-native \
+"
+
+inherit meson gobject-introspection gettext gi-docgen pkgconfig vala
+
+GIR_MESON_OPTION = "gir"
+GIDOCGEN_MESON_OPTION = "apidocs"
+
+SRC_URI = " \
+ https://www.freedesktop.org/software/appstream/releases/AppStream-${PV}.tar.xz \
+ file://0001-remove-hardcoded-path.patch \
+"
+SRC_URI[sha256sum] = "1a5148ca97dcbf5eb6e9c380278bb0d20938569292ea8652df1b3cac8bd2736b"
+
+S = "${WORKDIR}/AppStream-${PV}"
+
+PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}"
+
+PACKAGECONFIG[systemd] = "-Dsystemd=true,-Dsystemd=false,systemd"
+PACKAGECONFIG[stemming] = "-Dstemming=true,-Dstemming=false,libstemmer"
+
+FILES:${PN} += "${datadir}"
+
+EXTRA_OEMESON += "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dvapi=true', '-Dvapi=false', d)}"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/apr/apr-util/0001-Fix-error-handling-in-gdbm.patch b/meta/recipes-support/apr/apr-util/0001-Fix-error-handling-in-gdbm.patch
deleted file mode 100644
index 6f27876a7f..0000000000
--- a/meta/recipes-support/apr/apr-util/0001-Fix-error-handling-in-gdbm.patch
+++ /dev/null
@@ -1,134 +0,0 @@
-From 6b638fa9afbeb54dfa19378e391465a5284ce1ad Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Wed, 12 Sep 2018 17:16:36 +0800
-Subject: [PATCH] Fix error handling in gdbm
-
-Only check for gdbm_errno if the return value of the called gdbm_*
-function says so. This fixes apr-util with gdbm 1.14, which does not
-seem to always reset gdbm_errno.
-
-Also make the gdbm driver return error codes starting with
-APR_OS_START_USEERR instead of always returning APR_EGENERAL. This is
-what the berkleydb driver already does.
-
-Also ensure that dsize is 0 if dptr == NULL.
-
-Upstream-Status: Backport [https://svn.apache.org/viewvc?view=revision&amp;revision=1825311]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- dbm/apr_dbm_gdbm.c | 47 +++++++++++++++++++++++++++++------------------
- 1 file changed, 29 insertions(+), 18 deletions(-)
-
-diff --git a/dbm/apr_dbm_gdbm.c b/dbm/apr_dbm_gdbm.c
-index 749447a..1c86327 100644
---- a/dbm/apr_dbm_gdbm.c
-+++ b/dbm/apr_dbm_gdbm.c
-@@ -36,13 +36,25 @@
- static apr_status_t g2s(int gerr)
- {
- if (gerr == -1) {
-- /* ### need to fix this */
-- return APR_EGENERAL;
-+ if (gdbm_errno == GDBM_NO_ERROR)
-+ return APR_SUCCESS;
-+ return APR_OS_START_USEERR + gdbm_errno;
- }
-
- return APR_SUCCESS;
- }
-
-+static apr_status_t gdat2s(datum d)
-+{
-+ if (d.dptr == NULL) {
-+ if (gdbm_errno == GDBM_NO_ERROR || gdbm_errno == GDBM_ITEM_NOT_FOUND)
-+ return APR_SUCCESS;
-+ return APR_OS_START_USEERR + gdbm_errno;
-+ }
-+
-+ return APR_SUCCESS;
-+}
-+
- static apr_status_t datum_cleanup(void *dptr)
- {
- if (dptr)
-@@ -53,22 +65,15 @@ static apr_status_t datum_cleanup(void *dptr)
-
- static apr_status_t set_error(apr_dbm_t *dbm, apr_status_t dbm_said)
- {
-- apr_status_t rv = APR_SUCCESS;
-
-- /* ### ignore whatever the DBM said (dbm_said); ask it explicitly */
-+ dbm->errcode = dbm_said;
-
-- if ((dbm->errcode = gdbm_errno) == GDBM_NO_ERROR) {
-+ if (dbm_said == APR_SUCCESS)
- dbm->errmsg = NULL;
-- }
-- else {
-- dbm->errmsg = gdbm_strerror(gdbm_errno);
-- rv = APR_EGENERAL; /* ### need something better */
-- }
--
-- /* captured it. clear it now. */
-- gdbm_errno = GDBM_NO_ERROR;
-+ else
-+ dbm->errmsg = gdbm_strerror(dbm_said - APR_OS_START_USEERR);
-
-- return rv;
-+ return dbm_said;
- }
-
- /* --------------------------------------------------------------------------
-@@ -107,7 +112,7 @@ static apr_status_t vt_gdbm_open(apr_dbm_t **pdb, const char *pathname,
- NULL);
-
- if (file == NULL)
-- return APR_EGENERAL; /* ### need a better error */
-+ return APR_OS_START_USEERR + gdbm_errno; /* ### need a better error */
-
- /* we have an open database... return it */
- *pdb = apr_pcalloc(pool, sizeof(**pdb));
-@@ -141,10 +146,12 @@ static apr_status_t vt_gdbm_fetch(apr_dbm_t *dbm, apr_datum_t key,
- if (pvalue->dptr)
- apr_pool_cleanup_register(dbm->pool, pvalue->dptr, datum_cleanup,
- apr_pool_cleanup_null);
-+ else
-+ pvalue->dsize = 0;
-
- /* store the error info into DBM, and return a status code. Also, note
- that *pvalue should have been cleared on error. */
-- return set_error(dbm, APR_SUCCESS);
-+ return set_error(dbm, gdat2s(rd));
- }
-
- static apr_status_t vt_gdbm_store(apr_dbm_t *dbm, apr_datum_t key,
-@@ -201,9 +208,11 @@ static apr_status_t vt_gdbm_firstkey(apr_dbm_t *dbm, apr_datum_t *pkey)
- if (pkey->dptr)
- apr_pool_cleanup_register(dbm->pool, pkey->dptr, datum_cleanup,
- apr_pool_cleanup_null);
-+ else
-+ pkey->dsize = 0;
-
- /* store any error info into DBM, and return a status code. */
-- return set_error(dbm, APR_SUCCESS);
-+ return set_error(dbm, gdat2s(rd));
- }
-
- static apr_status_t vt_gdbm_nextkey(apr_dbm_t *dbm, apr_datum_t *pkey)
-@@ -221,9 +230,11 @@ static apr_status_t vt_gdbm_nextkey(apr_dbm_t *dbm, apr_datum_t *pkey)
- if (pkey->dptr)
- apr_pool_cleanup_register(dbm->pool, pkey->dptr, datum_cleanup,
- apr_pool_cleanup_null);
-+ else
-+ pkey->dsize = 0;
-
- /* store any error info into DBM, and return a status code. */
-- return set_error(dbm, APR_SUCCESS);
-+ return set_error(dbm, gdat2s(rd));
- }
-
- static void vt_gdbm_freedatum(apr_dbm_t *dbm, apr_datum_t data)
---
-2.7.4
-
diff --git a/meta/recipes-support/apr/apr-util/0001-test_transformation-Check-if-transform-is-supported-.patch b/meta/recipes-support/apr/apr-util/0001-test_transformation-Check-if-transform-is-supported-.patch
new file mode 100644
index 0000000000..261b78736f
--- /dev/null
+++ b/meta/recipes-support/apr/apr-util/0001-test_transformation-Check-if-transform-is-supported-.patch
@@ -0,0 +1,37 @@
+From 3a97f58cfb40fc1911bbfd067e8457a472613d75 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Tue, 18 Apr 2023 22:58:00 -0700
+Subject: [PATCH] test_transformation: Check if transform is supported before
+ using it
+
+This helps in excluding these tests on systems where these are not
+available e.g. musl
+
+Upstream-Status: Submitted [https://bz.apache.org/bugzilla/show_bug.cgi?id=66570]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ test/testxlate.c | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/test/testxlate.c b/test/testxlate.c
+index 6981eff..de00fa4 100644
+--- a/test/testxlate.c
++++ b/test/testxlate.c
+@@ -116,8 +116,12 @@ static void test_transformation(abts_case *tc, void *data)
+ }
+
+ /* 4. Transformation using charset aliases */
+- one_test(tc, "UTF-8", "UTF-7", test_utf8, test_utf7, p);
+- one_test(tc, "UTF-7", "UTF-8", test_utf7, test_utf8, p);
++ if (is_transform_supported(tc, "UTF-8", "UTF-7", p)) {
++ one_test(tc, "UTF-8", "UTF-7", test_utf8, test_utf7, p);
++ }
++ if (is_transform_supported(tc, "UTF-7", "UTF-8", p)) {
++ one_test(tc, "UTF-7", "UTF-8", test_utf7, test_utf8, p);
++ }
+ }
+
+ #endif /* APR_HAS_XLATE */
+--
+2.40.0
+
diff --git a/meta/recipes-support/apr/apr-util_1.6.1.bb b/meta/recipes-support/apr/apr-util_1.6.1.bb
deleted file mode 100644
index b851d46351..0000000000
--- a/meta/recipes-support/apr/apr-util_1.6.1.bb
+++ /dev/null
@@ -1,98 +0,0 @@
-SUMMARY = "Apache Portable Runtime (APR) companion library"
-HOMEPAGE = "http://apr.apache.org/"
-SECTION = "libs"
-DEPENDS = "apr expat"
-
-BBCLASSEXTEND = "native nativesdk"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=158aa0b1efe0c12f23d4b007ddb9a5db \
- file://include/apu_version.h;endline=15;md5=823b3d1a7225df8f7b68a69c3c2b4c71"
-
-SRC_URI = "${APACHE_MIRROR}/apr/${BPN}-${PV}.tar.gz \
- file://configfix.patch \
- file://configure_fixes.patch \
- file://run-ptest \
- file://0001-Fix-error-handling-in-gdbm.patch \
-"
-
-SRC_URI[md5sum] = "bd502b9a8670a8012c4d90c31a84955f"
-SRC_URI[sha256sum] = "b65e40713da57d004123b6319828be7f1273fbc6490e145874ee1177e112c459"
-
-EXTRA_OECONF = "--with-apr=${STAGING_BINDIR_CROSS}/apr-1-config \
- --without-odbc \
- --without-pgsql \
- --without-sqlite2 \
- --with-expat=${STAGING_DIR_HOST}${prefix}"
-
-
-inherit autotools lib_package binconfig multilib_script
-
-MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/apu-1-config"
-
-OE_BINCONFIG_EXTRA_MANGLE = " -e 's:location=source:location=installed:'"
-
-do_configure:append() {
- if [ "${CLASSOVERRIDE}" = "class-target" ]; then
- cp ${STAGING_DATADIR}/apr/apr_rules.mk ${B}/build/rules.mk
- sed -i -e 's#^CFLAGS=.*#CFLAGS=${TARGET_CFLAGS}#g' ${B}/build/rules.mk
- fi
-}
-do_configure:prepend:class-native() {
- mkdir ${B}/build
- cp ${STAGING_DATADIR_NATIVE}/apr/apr_rules.mk ${B}/build/rules.mk
-}
-do_configure:append:class-native() {
- sed -i "s#LIBTOOL=\$(SHELL) \$(apr_builddir)#LIBTOOL=\$(SHELL) ${STAGING_BINDIR_NATIVE}#" ${B}/build/rules.mk
- # sometimes there isn't SHELL
- sed -i "s#LIBTOOL=\$(apr_builddir)#LIBTOOL=${STAGING_BINDIR_NATIVE}#" ${B}/build/rules.mk
-}
-
-do_configure:prepend:class-nativesdk() {
- cp ${STAGING_DATADIR}/apr/apr_rules.mk ${S}/build/rules.mk
- sed -i -e 's#^CFLAGS=.*#CFLAGS=${TARGET_CFLAGS}#g' ${S}/build/rules.mk
-}
-
-do_configure:append:class-nativesdk() {
- sed -i "s#\(apr_builddir\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
- sed -i "s#\(apr_builders\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
- sed -i "s#\(top_builddir\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
- sed -i "s#\(LIBTOOL=\$(apr_builddir)\).*#\1/libtool#" ${B}/build/rules.mk
-}
-
-do_install:append:class-target() {
- sed -i -e 's,${STAGING_DIR_HOST},,g' \
- -e 's,APU_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
- -e 's,APU_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apu-1-config
-}
-
-PACKAGECONFIG ??= "crypto gdbm"
-PACKAGECONFIG[ldap] = "--with-ldap,--without-ldap,openldap"
-PACKAGECONFIG[crypto] = "--with-openssl=${STAGING_DIR_HOST}${prefix} --with-crypto,--without-crypto,openssl"
-PACKAGECONFIG[sqlite3] = "--with-sqlite3=${STAGING_DIR_HOST}${prefix},--without-sqlite3,sqlite3"
-PACKAGECONFIG[gdbm] = "--with-dbm=gdbm --with-gdbm=${STAGING_DIR_HOST}${prefix},--without-gdbm,gdbm"
-
-#files ${libdir}/apr-util-1/*.so are not symlinks but loadable modules thus they are packaged in ${PN}
-FILES:${PN} += "${libdir}/apr-util-1/apr*${SOLIBS} ${libdir}/apr-util-1/apr*${SOLIBSDEV}"
-FILES:${PN}-dev += "${libdir}/aprutil.exp ${libdir}/apr-util-1/*.la"
-FILES:${PN}-staticdev += "${libdir}/apr-util-1/*.a"
-
-INSANE_SKIP:${PN} += "dev-so"
-
-inherit ptest
-
-RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-gconv-iso8859-1 glibc-gconv-iso8859-2 glibc-gconv-utf-7"
-RDEPENDS:${PN}-ptest += "libgcc"
-
-do_compile_ptest() {
- cd ${B}/test
- oe_runmake
-}
-
-do_install_ptest() {
- t=${D}${PTEST_PATH}/test
- mkdir $t
- for i in testall data; do \
- cp -r ${B}/test/$i $t; \
- done
-}
diff --git a/meta/recipes-support/apr/apr-util_1.6.3.bb b/meta/recipes-support/apr/apr-util_1.6.3.bb
new file mode 100644
index 0000000000..1371e262dd
--- /dev/null
+++ b/meta/recipes-support/apr/apr-util_1.6.3.bb
@@ -0,0 +1,97 @@
+SUMMARY = "Apache Portable Runtime (APR) companion library"
+HOMEPAGE = "http://apr.apache.org/"
+SECTION = "libs"
+DEPENDS = "apr expat"
+
+BBCLASSEXTEND = "native nativesdk"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=158aa0b1efe0c12f23d4b007ddb9a5db \
+ file://include/apu_version.h;endline=15;md5=823b3d1a7225df8f7b68a69c3c2b4c71"
+
+SRC_URI = "${APACHE_MIRROR}/apr/${BPN}-${PV}.tar.gz \
+ file://configfix.patch \
+ file://configure_fixes.patch \
+ file://0001-test_transformation-Check-if-transform-is-supported-.patch \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "2b74d8932703826862ca305b094eef2983c27b39d5c9414442e9976a9acf1983"
+
+EXTRA_OECONF = "--with-apr=${STAGING_BINDIR_CROSS}/apr-1-config \
+ --without-odbc \
+ --without-pgsql \
+ --without-sqlite2 \
+ --with-expat=${STAGING_DIR_HOST}${prefix}"
+
+
+inherit autotools lib_package binconfig multilib_script
+
+MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/apu-1-config"
+
+OE_BINCONFIG_EXTRA_MANGLE = " -e 's:location=source:location=installed:'"
+
+do_configure:append() {
+ if [ "${CLASSOVERRIDE}" = "class-target" ]; then
+ cp ${STAGING_DATADIR}/apr/apr_rules.mk ${B}/build/rules.mk
+ sed -i -e 's#^CFLAGS=.*#CFLAGS=${TARGET_CFLAGS}#g' ${B}/build/rules.mk
+ fi
+}
+do_configure:prepend:class-native() {
+ mkdir ${B}/build
+ cp ${STAGING_DATADIR_NATIVE}/apr/apr_rules.mk ${B}/build/rules.mk
+}
+do_configure:append:class-native() {
+ sed -i "s#LIBTOOL=\$(SHELL) \$(apr_builddir)#LIBTOOL=\$(SHELL) ${STAGING_BINDIR_NATIVE}#" ${B}/build/rules.mk
+ # sometimes there isn't SHELL
+ sed -i "s#LIBTOOL=\$(apr_builddir)#LIBTOOL=${STAGING_BINDIR_NATIVE}#" ${B}/build/rules.mk
+}
+
+do_configure:prepend:class-nativesdk() {
+ cp ${STAGING_DATADIR}/apr/apr_rules.mk ${S}/build/rules.mk
+ sed -i -e 's#^CFLAGS=.*#CFLAGS=${TARGET_CFLAGS}#g' ${S}/build/rules.mk
+}
+
+do_configure:append:class-nativesdk() {
+ sed -i "s#\(apr_builddir\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
+ sed -i "s#\(apr_builders\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
+ sed -i "s#\(top_builddir\)=.*#\1=${STAGING_DATADIR}/build-1#" ${B}/build/rules.mk
+ sed -i "s#\(LIBTOOL=\$(apr_builddir)\).*#\1/libtool#" ${B}/build/rules.mk
+}
+
+do_install:append:class-target() {
+ sed -i -e 's,${STAGING_DIR_HOST},,g' \
+ -e 's,APU_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
+ -e 's,APU_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apu-1-config
+}
+
+PACKAGECONFIG ??= "crypto gdbm"
+PACKAGECONFIG[ldap] = "--with-ldap,--without-ldap,openldap"
+PACKAGECONFIG[crypto] = "--with-openssl=${STAGING_DIR_HOST}${prefix} --with-crypto,--without-crypto,openssl"
+PACKAGECONFIG[sqlite3] = "--with-sqlite3=${STAGING_DIR_HOST}${prefix},--without-sqlite3,sqlite3"
+PACKAGECONFIG[gdbm] = "--with-dbm=gdbm --with-gdbm=${STAGING_DIR_HOST}${prefix},--without-gdbm,gdbm"
+
+#files ${libdir}/apr-util-1/*.so are not symlinks but loadable modules thus they are packaged in ${PN}
+FILES:${PN} += "${libdir}/apr-util-1/apr*${SOLIBS} ${libdir}/apr-util-1/apr*${SOLIBSDEV}"
+FILES:${PN}-dev += "${libdir}/aprutil.exp ${libdir}/apr-util-1/*.la"
+FILES:${PN}-staticdev += "${libdir}/apr-util-1/*.a"
+
+INSANE_SKIP:${PN} += "dev-so"
+
+inherit ptest
+
+RDEPENDS:${PN}-ptest:append:libc-glibc = " glibc-gconv-iso8859-1 glibc-gconv-iso8859-2 glibc-gconv-utf-7"
+RDEPENDS:${PN}-ptest += "libgcc"
+
+do_compile_ptest() {
+ cd ${B}/test
+ oe_runmake
+}
+
+do_install_ptest() {
+ t=${D}${PTEST_PATH}/test
+ mkdir $t
+ for i in testall data; do \
+ cp -r ${B}/test/$i $t; \
+ done
+}
diff --git a/meta/recipes-support/apr/apr/0001-Add-option-to-disable-timed-dependant-tests.patch b/meta/recipes-support/apr/apr/0001-Add-option-to-disable-timed-dependant-tests.patch
index abff4e9331..a274f3a16e 100644
--- a/meta/recipes-support/apr/apr/0001-Add-option-to-disable-timed-dependant-tests.patch
+++ b/meta/recipes-support/apr/apr/0001-Add-option-to-disable-timed-dependant-tests.patch
@@ -1,14 +1,15 @@
-From 2bbe20b4f69e84e7a18bc79d382486953f479328 Mon Sep 17 00:00:00 2001
+From 225abf37cd0b49960664b59f08e515a4c4ea5ad0 Mon Sep 17 00:00:00 2001
From: Jeremy Puhlman <jpuhlman@mvista.com>
Date: Thu, 26 Mar 2020 18:30:36 +0000
Subject: [PATCH] Add option to disable timed dependant tests
-The disabled tests rely on timing to pass correctly. On a virtualized
+The disabled tests rely on timing to pass correctly. On a virtualized
system under heavy load, these tests randomly fail because they miss
a timer or other timing related issues.
Upstream-Status: Pending
Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com>
+
---
configure.in | 6 ++++++
include/apr.h.in | 1 +
@@ -16,10 +17,10 @@ Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com>
3 files changed, 9 insertions(+), 2 deletions(-)
diff --git a/configure.in b/configure.in
-index d9f32d6..f0c5661 100644
+index bfd488b..3663220 100644
--- a/configure.in
+++ b/configure.in
-@@ -2886,6 +2886,12 @@ AC_ARG_ENABLE(timedlocks,
+@@ -3023,6 +3023,12 @@ AC_ARG_ENABLE(timedlocks,
)
AC_SUBST(apr_has_timedlocks)
@@ -45,10 +46,10 @@ index ee99def..c46a5f4 100644
#define APR_PROCATTR_USER_SET_REQUIRES_PASSWORD @apr_procattr_user_set_requires_password@
diff --git a/test/testlock.c b/test/testlock.c
-index a43f477..6233d0b 100644
+index e3437c1..04e01b9 100644
--- a/test/testlock.c
+++ b/test/testlock.c
-@@ -396,13 +396,13 @@ abts_suite *testlock(abts_suite *suite)
+@@ -535,7 +535,7 @@ abts_suite *testlock(abts_suite *suite)
abts_run_test(suite, threads_not_impl, NULL);
#else
abts_run_test(suite, test_thread_mutex, NULL);
@@ -56,6 +57,8 @@ index a43f477..6233d0b 100644
+#if APR_HAS_TIMEDLOCKS && APR_HAVE_TIME_DEPENDANT_TESTS
abts_run_test(suite, test_thread_timedmutex, NULL);
#endif
+ abts_run_test(suite, test_thread_nestedmutex, NULL);
+@@ -543,7 +543,7 @@ abts_suite *testlock(abts_suite *suite)
abts_run_test(suite, test_thread_rwlock, NULL);
abts_run_test(suite, test_cond, NULL);
abts_run_test(suite, test_timeoutcond, NULL);
@@ -63,7 +66,4 @@ index a43f477..6233d0b 100644
+#if APR_HAS_TIMEDLOCKS && APR_HAVE_TIME_DEPENDANT_TESTS
abts_run_test(suite, test_timeoutmutex, NULL);
#endif
- #endif
---
-2.23.0
-
+ #ifdef WIN32
diff --git a/meta/recipes-support/apr/apr/0001-configure-Remove-runtime-test-for-mmap-that-can-map-.patch b/meta/recipes-support/apr/apr/0001-configure-Remove-runtime-test-for-mmap-that-can-map-.patch
new file mode 100644
index 0000000000..a78b16284f
--- /dev/null
+++ b/meta/recipes-support/apr/apr/0001-configure-Remove-runtime-test-for-mmap-that-can-map-.patch
@@ -0,0 +1,58 @@
+From 316b81c462f065927d7fec56aadd5c8cb94d1cf0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 26 Aug 2022 00:28:08 -0700
+Subject: [PATCH] configure: Remove runtime test for mmap that can map
+ /dev/zero
+
+This never works for cross-compile moreover it ends up disabling
+ac_cv_file__dev_zero which then results in compiler errors in shared
+mutexes
+
+Upstream-Status: Inappropriate [Cross-compile specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+---
+ configure.in | 30 ------------------------------
+ 1 file changed, 30 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index 3663220..dce9789 100644
+--- a/configure.in
++++ b/configure.in
+@@ -1303,36 +1303,6 @@ AC_CHECK_FUNCS([mmap munmap shm_open shm_unlink shmget shmat shmdt shmctl \
+ APR_CHECK_DEFINE(MAP_ANON, sys/mman.h)
+ AC_CHECK_FILE(/dev/zero)
+
+-# Not all systems can mmap /dev/zero (such as HP-UX). Check for that.
+-if test "$ac_cv_func_mmap" = "yes" &&
+- test "$ac_cv_file__dev_zero" = "yes"; then
+- AC_CACHE_CHECK([for mmap that can map /dev/zero],
+- [ac_cv_mmap__dev_zero],
+- [AC_TRY_RUN([#include <sys/types.h>
+-#include <sys/stat.h>
+-#include <fcntl.h>
+-#ifdef HAVE_SYS_MMAN_H
+-#include <sys/mman.h>
+-#endif
+- int main()
+- {
+- int fd;
+- void *m;
+- fd = open("/dev/zero", O_RDWR);
+- if (fd < 0) {
+- return 1;
+- }
+- m = mmap(0, sizeof(void*), PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0);
+- if (m == (void *)-1) { /* aka MAP_FAILED */
+- return 2;
+- }
+- if (munmap(m, sizeof(void*)) < 0) {
+- return 3;
+- }
+- return 0;
+- }], [], [ac_cv_file__dev_zero=no], [ac_cv_file__dev_zero=no])])
+-fi
+-
+ # Now we determine which one is our anonymous shmem preference.
+ haveshmgetanon="0"
+ havemmapzero="0"
diff --git a/meta/recipes-support/apr/apr/0001-dso-Check-for-NULL-handle-in-apr_dso_sym.patch b/meta/recipes-support/apr/apr/0001-dso-Check-for-NULL-handle-in-apr_dso_sym.patch
new file mode 100644
index 0000000000..8ba181b887
--- /dev/null
+++ b/meta/recipes-support/apr/apr/0001-dso-Check-for-NULL-handle-in-apr_dso_sym.patch
@@ -0,0 +1,37 @@
+From a25be1aaa92a6d2e7f4cc3fdfbb92e5a10b63035 Mon Sep 17 00:00:00 2001
+From: Greg Beard <gmbeard@googlemail.com>
+Date: Sat, 25 Mar 2023 08:31:36 +0000
+Subject: [PATCH] dso: Check for NULL handle in apr_dso_sym
+
+Upstream-Status: Backport [https://github.com/apache/apr/pull/40/commits/0efce00093b1ba405d91c7f0eab9755c8527eead]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ dso/unix/dso.c | 12 ++++++++++++
+ 1 file changed, 12 insertions(+)
+
+diff --git a/dso/unix/dso.c b/dso/unix/dso.c
+index fdd56f1..583d5de 100644
+--- a/dso/unix/dso.c
++++ b/dso/unix/dso.c
+@@ -173,6 +173,18 @@ APR_DECLARE(apr_status_t) apr_dso_sym(apr_dso_handle_sym_t *ressym,
+ apr_dso_handle_t *handle,
+ const char *symname)
+ {
++ /* This is necessary for `testdso.c`. For some reason, musl
++ * builds fail the `test_unload_library` test if the below
++ * check isn't in place. `test_unload_library` unloads the
++ * library and then immediately calls this function. Maybe
++ * musl's `dlsym()` assumes the handle is never NULL and
++ * some UB is being invoked here...
++ */
++ if (handle->handle == NULL) {
++ handle->errormsg = "library not loaded";
++ return APR_ESYMNOTFOUND;
++ }
++
+ #if defined(DSO_USE_SHL)
+ void *symaddr = NULL;
+ int status;
+--
+2.42.0
+
diff --git a/meta/recipes-support/apr/apr/0002-apr-Remove-workdir-path-references-from-installed-ap.patch b/meta/recipes-support/apr/apr/0002-apr-Remove-workdir-path-references-from-installed-ap.patch
index 72e706f966..d63423f3a1 100644
--- a/meta/recipes-support/apr/apr/0002-apr-Remove-workdir-path-references-from-installed-ap.patch
+++ b/meta/recipes-support/apr/apr/0002-apr-Remove-workdir-path-references-from-installed-ap.patch
@@ -1,8 +1,7 @@
-From 5925b20da8bbc34d9bf5a5dca123ef38864d43c6 Mon Sep 17 00:00:00 2001
+From 689a8db96a6d1e1cae9cbfb35d05ac82140a6555 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 30 Jan 2018 09:39:06 +0800
-Subject: [PATCH 2/7] apr: Remove workdir path references from installed apr
- files
+Subject: [PATCH] apr: Remove workdir path references from installed apr files
Upstream-Status: Inappropriate [configuration]
@@ -14,20 +13,23 @@ packages at target run time, the workdir path caused confusion.
Rebase to 1.6.3
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
- apr-config.in | 26 ++------------------------
- 1 file changed, 2 insertions(+), 24 deletions(-)
+ apr-config.in | 32 ++------------------------------
+ 1 file changed, 2 insertions(+), 30 deletions(-)
diff --git a/apr-config.in b/apr-config.in
-index 84b4073..bbbf651 100644
+index bed47ca..47874e5 100644
--- a/apr-config.in
+++ b/apr-config.in
-@@ -152,14 +152,7 @@ while test $# -gt 0; do
+@@ -164,16 +164,7 @@ while test $# -gt 0; do
flags="$flags $LDFLAGS"
;;
--includes)
- if test "$location" = "installed"; then
flags="$flags -I$includedir $EXTRA_INCLUDES"
+- elif test "$location" = "crosscompile"; then
+- flags="$flags -I$APR_TARGET_DIR/$includedir $EXTRA_INCLUDES"
- elif test "$location" = "source"; then
- flags="$flags -I$APR_SOURCE_DIR/include $EXTRA_INCLUDES"
- else
@@ -37,13 +39,15 @@ index 84b4073..bbbf651 100644
;;
--srcdir)
echo $APR_SOURCE_DIR
-@@ -181,29 +174,14 @@ while test $# -gt 0; do
+@@ -197,33 +188,14 @@ while test $# -gt 0; do
exit 0
;;
--link-ld)
- if test "$location" = "installed"; then
- ### avoid using -L if libdir is a "standard" location like /usr/lib
- flags="$flags -L$libdir -l${APR_LIBNAME}"
+- elif test "$location" = "crosscompile"; then
+- flags="$flags -L$APR_TARGET_DIR/$libdir -l${APR_LIBNAME}"
- else
- ### this surely can't work since the library is in .libs?
- flags="$flags -L$APR_BUILD_DIR -l${APR_LIBNAME}"
@@ -62,6 +66,8 @@ index 84b4073..bbbf651 100644
- # Since the user is specifying they are linking with libtool, we
- # *know* that -R will be recognized by libtool.
- flags="$flags -L$libdir -R$libdir -l${APR_LIBNAME}"
+- elif test "$location" = "crosscompile"; then
+- flags="$flags -L${APR_TARGET_DIR}/$libdir -l${APR_LIBNAME}"
- else
- flags="$flags $LA_FILE"
- fi
@@ -69,6 +75,3 @@ index 84b4073..bbbf651 100644
;;
--shlib-path-var)
echo "$SHLIBPATH_VAR"
---
-1.8.3.1
-
diff --git a/meta/recipes-support/apr/apr/0003-Makefile.in-configure.in-support-cross-compiling.patch b/meta/recipes-support/apr/apr/0003-Makefile.in-configure.in-support-cross-compiling.patch
deleted file mode 100644
index 4dd53bd8eb..0000000000
--- a/meta/recipes-support/apr/apr/0003-Makefile.in-configure.in-support-cross-compiling.patch
+++ /dev/null
@@ -1,63 +0,0 @@
-From d5028c10f156c224475b340cfb1ba025d6797243 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Fri, 2 Feb 2018 15:51:42 +0800
-Subject: [PATCH 3/7] Makefile.in/configure.in: support cross compiling
-
-While cross compiling, the tools/gen_test_char could not
-be executed at build time, use AX_PROG_CC_FOR_BUILD to
-build native tools/gen_test_char
-
-Upstream-Status: Submitted [https://github.com/apache/apr/pull/8]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- Makefile.in | 10 +++-------
- configure.in | 3 +++
- 2 files changed, 6 insertions(+), 7 deletions(-)
-
-diff --git a/Makefile.in b/Makefile.in
-index 5fb760e..8675f90 100644
---- a/Makefile.in
-+++ b/Makefile.in
-@@ -46,7 +46,7 @@ LT_VERSION = @LT_VERSION@
-
- CLEAN_TARGETS = apr-config.out apr.exp exports.c export_vars.c .make.dirs \
- build/apr_rules.out tools/gen_test_char@EXEEXT@ \
-- tools/gen_test_char.o tools/gen_test_char.lo \
-+ tools/gen_test_char.o \
- include/private/apr_escape_test_char.h
- DISTCLEAN_TARGETS = config.cache config.log config.status \
- include/apr.h include/arch/unix/apr_private.h \
-@@ -131,13 +131,9 @@ check: $(TARGET_LIB)
- etags:
- etags `find . -name '*.[ch]'`
-
--OBJECTS_gen_test_char = tools/gen_test_char.lo $(LOCAL_LIBS)
--tools/gen_test_char.lo: tools/gen_test_char.c
-+tools/gen_test_char@EXEEXT@: tools/gen_test_char.c
- $(APR_MKDIR) tools
-- $(LT_COMPILE)
--
--tools/gen_test_char@EXEEXT@: $(OBJECTS_gen_test_char)
-- $(LINK_PROG) $(OBJECTS_gen_test_char) $(ALL_LIBS)
-+ $(CC_FOR_BUILD) $(CFLAGS_FOR_BUILD) $< -o $@
-
- include/private/apr_escape_test_char.h: tools/gen_test_char@EXEEXT@
- $(APR_MKDIR) include/private
-diff --git a/configure.in b/configure.in
-index 719f331..361120f 100644
---- a/configure.in
-+++ b/configure.in
-@@ -183,6 +183,9 @@ dnl can only be used once within a configure script, so this prevents a
- dnl preload section from invoking the macro to get compiler info.
- AC_PROG_CC
-
-+dnl Check build CC for gen_test_char compiling which is executed at build time.
-+AX_PROG_CC_FOR_BUILD
-+
- dnl AC_PROG_SED is only avaliable in recent autoconf versions.
- dnl Use AC_CHECK_PROG instead if AC_PROG_SED is not present.
- ifdef([AC_PROG_SED],
---
-1.8.3.1
-
diff --git a/meta/recipes-support/apr/apr/0006-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch b/meta/recipes-support/apr/apr/0006-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch
deleted file mode 100644
index d1a2ebe881..0000000000
--- a/meta/recipes-support/apr/apr/0006-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch
+++ /dev/null
@@ -1,76 +0,0 @@
-From 49661ea3858cf8494926cccf57d3e8c6dcb47117 Mon Sep 17 00:00:00 2001
-From: Dengke Du <dengke.du@windriver.com>
-Date: Wed, 14 Dec 2016 18:13:08 +0800
-Subject: [PATCH] apr: fix off_t size doesn't match in glibc when cross
- compiling
-
-In configure.in, it contains the following:
-
- APR_CHECK_SIZEOF_EXTENDED([#include <sys/types.h>], off_t, 8)
-
-the macro "APR_CHECK_SIZEOF_EXTENDED" was defined in build/apr_common.m4,
-it use the "AC_TRY_RUN" macro, this macro let the off_t to 8, when cross
-compiling enable.
-
-So it was hardcoded for cross compiling, we should detect it dynamic based on
-the sysroot's glibc. We change it to the following:
-
- AC_CHECK_SIZEOF(off_t)
-
-The same for the following hardcoded types for cross compiling:
-
- pid_t 8
- ssize_t 8
- size_t 8
- off_t 8
-
-Change the above correspondingly.
-
-Signed-off-by: Dengke Du <dengke.du@windriver.com>
-
-Upstream-Status: Pending
-
----
- configure.in | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/configure.in b/configure.in
-index 27b8539..fb408d1 100644
---- a/configure.in
-+++ b/configure.in
-@@ -1801,7 +1801,7 @@ else
- socklen_t_value="int"
- fi
-
--APR_CHECK_SIZEOF_EXTENDED([#include <sys/types.h>], pid_t, 8)
-+AC_CHECK_SIZEOF(pid_t)
-
- if test "$ac_cv_sizeof_pid_t" = "$ac_cv_sizeof_short"; then
- pid_t_fmt='#define APR_PID_T_FMT "hd"'
-@@ -1873,7 +1873,7 @@ APR_CHECK_TYPES_FMT_COMPATIBLE(size_t, unsigned long, lu, [size_t_fmt="lu"], [
- APR_CHECK_TYPES_FMT_COMPATIBLE(size_t, unsigned int, u, [size_t_fmt="u"])
- ])
-
--APR_CHECK_SIZEOF_EXTENDED([#include <sys/types.h>], ssize_t, 8)
-+AC_CHECK_SIZEOF(ssize_t)
-
- dnl the else cases below should no longer occur;
- AC_MSG_CHECKING([which format to use for apr_ssize_t])
-@@ -1891,7 +1891,7 @@ fi
-
- ssize_t_fmt="#define APR_SSIZE_T_FMT \"$ssize_t_fmt\""
-
--APR_CHECK_SIZEOF_EXTENDED([#include <stddef.h>], size_t, 8)
-+AC_CHECK_SIZEOF(size_t)
-
- # else cases below should no longer occur;
- AC_MSG_CHECKING([which format to use for apr_size_t])
-@@ -1909,7 +1909,7 @@ fi
-
- size_t_fmt="#define APR_SIZE_T_FMT \"$size_t_fmt\""
-
--APR_CHECK_SIZEOF_EXTENDED([#include <sys/types.h>], off_t, 8)
-+AC_CHECK_SIZEOF(off_t)
-
- if test "${ac_cv_sizeof_off_t}${apr_cv_use_lfs64}" = "4yes"; then
- # Enable LFS
diff --git a/meta/recipes-support/apr/apr/CVE-2021-35940.patch b/meta/recipes-support/apr/apr/CVE-2021-35940.patch
deleted file mode 100644
index 00befdacee..0000000000
--- a/meta/recipes-support/apr/apr/CVE-2021-35940.patch
+++ /dev/null
@@ -1,58 +0,0 @@
-
-SECURITY: CVE-2021-35940 (cve.mitre.org)
-
-Restore fix for CVE-2017-12613 which was missing in 1.7.x branch, though
-was addressed in 1.6.x in 1.6.3 and later via r1807976.
-
-The fix was merged back to 1.7.x in r1891198.
-
-Since this was a regression in 1.7.0, a new CVE name has been assigned
-to track this, CVE-2021-35940.
-
-Thanks to Iveta Cesalova <icesalov redhat.com> for reporting this issue.
-
-https://svn.apache.org/viewvc?view=revision&revision=1891198
-
-Upstream-Status: Backport
-CVE: CVE-2021-35940
-Signed-off-by: Armin Kuster <akuster@mvista.com>
-
-
-Index: time/unix/time.c
-===================================================================
---- a/time/unix/time.c (revision 1891197)
-+++ b/time/unix/time.c (revision 1891198)
-@@ -142,6 +142,9 @@
- static const int dayoffset[12] =
- {306, 337, 0, 31, 61, 92, 122, 153, 184, 214, 245, 275};
-
-+ if (xt->tm_mon < 0 || xt->tm_mon >= 12)
-+ return APR_EBADDATE;
-+
- /* shift new year to 1st March in order to make leap year calc easy */
-
- if (xt->tm_mon < 2)
-Index: time/win32/time.c
-===================================================================
---- a/time/win32/time.c (revision 1891197)
-+++ b/time/win32/time.c (revision 1891198)
-@@ -54,6 +54,9 @@
- static const int dayoffset[12] =
- {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334};
-
-+ if (tm->wMonth < 1 || tm->wMonth > 12)
-+ return APR_EBADDATE;
-+
- /* Note; the caller is responsible for filling in detailed tm_usec,
- * tm_gmtoff and tm_isdst data when applicable.
- */
-@@ -228,6 +231,9 @@
- static const int dayoffset[12] =
- {306, 337, 0, 31, 61, 92, 122, 153, 184, 214, 245, 275};
-
-+ if (xt->tm_mon < 0 || xt->tm_mon >= 12)
-+ return APR_EBADDATE;
-+
- /* shift new year to 1st March in order to make leap year calc easy */
-
- if (xt->tm_mon < 2)
diff --git a/meta/recipes-support/apr/apr/autoconf-2.73.patch b/meta/recipes-support/apr/apr/autoconf-2.73.patch
new file mode 100644
index 0000000000..a8b7a77566
--- /dev/null
+++ b/meta/recipes-support/apr/apr/autoconf-2.73.patch
@@ -0,0 +1,26 @@
+To work with autoconf 2.73, tweak the macro ordering in configure.in.
+
+Upstream-Status: Pending
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Index: apr-1.7.2/configure.in
+===================================================================
+--- apr-1.7.2.orig/configure.in
++++ apr-1.7.2/configure.in
+@@ -430,6 +430,8 @@ if test "$host" = "i586-pc-beos"; then
+ ) dnl
+ fi
+
++APR_CHECK_DEFINE(LOCK_EX, sys/file.h)
++
+ # this is the place to put specific options for platform/compiler
+ # combinations
+ case "$host:$CC" in
+@@ -2384,7 +2386,6 @@ AC_MSG_RESULT([$msg])
+ AC_SUBST(have_union_semun)
+
+ dnl Checks for libraries.
+-APR_CHECK_DEFINE(LOCK_EX, sys/file.h)
+ APR_CHECK_DEFINE(F_SETLK, fcntl.h)
+ APR_CHECK_DEFINE(SEM_UNDO, sys/sem.h)
+
diff --git a/meta/recipes-support/apr/apr/autoconf270.patch b/meta/recipes-support/apr/apr/autoconf270.patch
deleted file mode 100644
index 9f7b5c624c..0000000000
--- a/meta/recipes-support/apr/apr/autoconf270.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-With autoconf 2.70 confdefs.h is already included. Including it twice generates
-compiler warnings and since this macros is to error on warnings, it breaks.
-
-Fix by not including the file.
-
-Upstream-Status: Pending
-RP - 2021/1/28
-
-Index: apr-1.7.0/build/apr_common.m4
-===================================================================
---- apr-1.7.0.orig/build/apr_common.m4
-+++ apr-1.7.0/build/apr_common.m4
-@@ -505,8 +505,7 @@ AC_DEFUN([APR_TRY_COMPILE_NO_WARNING],
- fi
- AC_COMPILE_IFELSE(
- [AC_LANG_SOURCE(
-- [#include "confdefs.h"
-- ]
-+ []
- [[$1]]
- [int main(int argc, const char *const *argv) {]
- [[$2]]
diff --git a/meta/recipes-support/apr/apr/libtoolize_check.patch b/meta/recipes-support/apr/apr/libtoolize_check.patch
index 740792e6b0..80ce43caa4 100644
--- a/meta/recipes-support/apr/apr/libtoolize_check.patch
+++ b/meta/recipes-support/apr/apr/libtoolize_check.patch
@@ -1,6 +1,7 @@
+From 17835709bc55657b7af1f7c99b3f572b819cf97e Mon Sep 17 00:00:00 2001
From: Helmut Grohne <helmut@subdivi.de>
-Subject: check for libtoolize rather than libtool
-Last-Update: 2014-09-19
+Date: Tue, 7 Feb 2023 07:04:00 +0000
+Subject: [PATCH] check for libtoolize rather than libtool
libtool is now in package libtool-bin, but apr only needs libtoolize.
@@ -8,14 +9,22 @@ Upstream-Status: Pending [ from debian: https://sources.debian.org/data/main/a/a
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
---- apr.orig/build/buildcheck.sh
-+++ apr/build/buildcheck.sh
-@@ -39,11 +39,11 @@ fi
+---
+ build/buildcheck.sh | 10 ++++------
+ 1 file changed, 4 insertions(+), 6 deletions(-)
+
+diff --git a/build/buildcheck.sh b/build/buildcheck.sh
+index 44921b5..08bc8a8 100755
+--- a/build/buildcheck.sh
++++ b/build/buildcheck.sh
+@@ -39,13 +39,11 @@ fi
# ltmain.sh (GNU libtool 1.1361 2004/01/02 23:10:52) 1.5a
# output is multiline from 1.5 onwards
-# Require libtool 1.4 or newer
--libtool=`build/PrintPath glibtool1 glibtool libtool libtool15 libtool14`
+-if test -z "$libtool"; then
+- libtool=`build/PrintPath glibtool1 glibtool libtool libtool15 libtool14`
+-fi
-lt_pversion=`$libtool --version 2>/dev/null|sed -e 's/([^)]*)//g;s/^[^0-9]*//;s/[- ].*//g;q'`
+# Require libtoolize 1.4 or newer
+libtoolize=`build/PrintPath glibtoolize1 glibtoolize libtoolize libtoolize15 libtoolize14`
diff --git a/meta/recipes-support/apr/apr_1.7.0.bb b/meta/recipes-support/apr/apr_1.7.0.bb
deleted file mode 100644
index 9c826d4380..0000000000
--- a/meta/recipes-support/apr/apr_1.7.0.bb
+++ /dev/null
@@ -1,128 +0,0 @@
-SUMMARY = "Apache Portable Runtime (APR) library"
-
-DESCRIPTION = "Create and maintain software libraries that provide a predictable \
-and consistent interface to underlying platform-specific implementations."
-
-HOMEPAGE = "http://apr.apache.org/"
-SECTION = "libs"
-DEPENDS = "util-linux"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=4dfd4cd216828c8cae5de5a12f3844c8 \
- file://include/apr_lib.h;endline=15;md5=823b3d1a7225df8f7b68a69c3c2b4c71"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI = "${APACHE_MIRROR}/apr/${BPN}-${PV}.tar.bz2 \
- file://run-ptest \
- file://0002-apr-Remove-workdir-path-references-from-installed-ap.patch \
- file://0003-Makefile.in-configure.in-support-cross-compiling.patch \
- file://0004-Fix-packet-discards-HTTP-redirect.patch \
- file://0005-configure.in-fix-LTFLAGS-to-make-it-work-with-ccache.patch \
- file://0006-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch \
- file://0007-explicitly-link-libapr-against-phtread-to-make-gold-.patch \
- file://libtoolize_check.patch \
- file://0001-Add-option-to-disable-timed-dependant-tests.patch \
- file://autoconf270.patch \
- file://CVE-2021-35940.patch \
- "
-
-SRC_URI[md5sum] = "7a14a83d664e87599ea25ff4432e48a7"
-SRC_URI[sha256sum] = "e2e148f0b2e99b8e5c6caa09f6d4fb4dd3e83f744aa72a952f94f5a14436f7ea"
-
-inherit autotools-brokensep lib_package binconfig multilib_header ptest multilib_script
-
-OE_BINCONFIG_EXTRA_MANGLE = " -e 's:location=source:location=installed:'"
-
-# Added to fix some issues with cmake. Refer to https://github.com/bmwcarit/meta-ros/issues/68#issuecomment-19896928
-CACHED_CONFIGUREVARS += "apr_cv_mutex_recursive=yes"
-
-# Also suppress trying to use sctp.
-#
-CACHED_CONFIGUREVARS += "ac_cv_header_netinet_sctp_h=no ac_cv_header_netinet_sctp_uio_h=no"
-
-CACHED_CONFIGUREVARS += "ac_cv_sizeof_struct_iovec=yes"
-CACHED_CONFIGUREVARS += "ac_cv_file__dev_zero=yes"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-PACKAGECONFIG[timed-tests] = "--enable-timed-tests,--disable-timed-tests,"
-
-do_configure:prepend() {
- # Avoid absolute paths for grep since it causes failures
- # when using sstate between different hosts with different
- # install paths for grep.
- export GREP="grep"
-
- cd ${S}
- # The "2" means libtool version 2.
- ./buildconf 2
-}
-
-MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/apr-1-config \
- ${PN}-dev:${datadir}/build-1/apr_rules.mk"
-
-FILES:${PN}-dev += "${libdir}/apr.exp ${datadir}/build-1/*"
-RDEPENDS:${PN}-dev += "bash libtool"
-
-RDEPENDS:${PN}-ptest += "libgcc"
-
-#for some reason, build/libtool.m4 handled by buildconf still be overwritten
-#when autoconf, so handle it again.
-do_configure:append() {
- sed -i -e 's/LIBTOOL=\(.*\)top_build/LIBTOOL=\1apr_build/' ${S}/build/libtool.m4
- sed -i -e 's/LIBTOOL=\(.*\)top_build/LIBTOOL=\1apr_build/' ${S}/build/apr_rules.mk
-}
-
-do_install:append() {
- oe_multilib_header apr.h
- install -d ${D}${datadir}/apr
-}
-
-do_install:append:class-target() {
- rm -f ${D}${datadir}/build-1/libtool
- sed -i s,LIBTOOL=.*,LIBTOOL=libtool,g ${D}${datadir}/build-1/apr_rules.mk
- sed -i -e 's,${DEBUG_PREFIX_MAP},,g' \
- -e 's,${STAGING_DIR_HOST},,g' ${D}${datadir}/build-1/apr_rules.mk
- sed -i -e 's,${STAGING_DIR_HOST},,g' \
- -e 's,APR_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
- -e 's,APR_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apr-1-config
-}
-
-SSTATE_SCAN_FILES += "apr_rules.mk libtool"
-
-SYSROOT_PREPROCESS_FUNCS += "apr_sysroot_preprocess"
-
-apr_sysroot_preprocess () {
- d=${SYSROOT_DESTDIR}${datadir}/apr
- install -d $d/
- cp ${S}/build/apr_rules.mk $d/
- sed -i s,apr_builddir=.*,apr_builddir=,g $d/apr_rules.mk
- sed -i s,apr_builders=.*,apr_builders=,g $d/apr_rules.mk
- sed -i s,LIBTOOL=.*,LIBTOOL=libtool,g $d/apr_rules.mk
- sed -i s,\$\(apr_builders\),${STAGING_DATADIR}/apr/,g $d/apr_rules.mk
- cp ${S}/build/mkdir.sh $d/
- cp ${S}/build/make_exports.awk $d/
- cp ${S}/build/make_var_export.awk $d/
- cp ${S}/libtool ${SYSROOT_DESTDIR}${datadir}/build-1/libtool
-}
-
-do_compile_ptest() {
- cd ${S}/test
- oe_runmake
-}
-
-do_install_ptest() {
- t=${D}${PTEST_PATH}/test
- mkdir -p $t/.libs
- cp -r ${S}/test/data $t/
- cp -r ${S}/test/.libs/*.so $t/.libs/
- cp ${S}/test/proc_child $t/
- cp ${S}/test/readchild $t/
- cp ${S}/test/sockchild $t/
- cp ${S}/test/sockperf $t/
- cp ${S}/test/testall $t/
- cp ${S}/test/tryread $t/
-}
-
-export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-support/apr/apr_1.7.4.bb b/meta/recipes-support/apr/apr_1.7.4.bb
new file mode 100644
index 0000000000..d322629b66
--- /dev/null
+++ b/meta/recipes-support/apr/apr_1.7.4.bb
@@ -0,0 +1,139 @@
+SUMMARY = "Apache Portable Runtime (APR) library"
+
+DESCRIPTION = "Create and maintain software libraries that provide a predictable \
+and consistent interface to underlying platform-specific implementations."
+
+HOMEPAGE = "http://apr.apache.org/"
+SECTION = "libs"
+DEPENDS = "util-linux"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=4dfd4cd216828c8cae5de5a12f3844c8 \
+ file://include/apr_lib.h;endline=15;md5=823b3d1a7225df8f7b68a69c3c2b4c71"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI = "${APACHE_MIRROR}/apr/${BPN}-${PV}.tar.bz2 \
+ file://run-ptest \
+ file://0002-apr-Remove-workdir-path-references-from-installed-ap.patch \
+ file://0004-Fix-packet-discards-HTTP-redirect.patch \
+ file://0005-configure.in-fix-LTFLAGS-to-make-it-work-with-ccache.patch \
+ file://0007-explicitly-link-libapr-against-phtread-to-make-gold-.patch \
+ file://libtoolize_check.patch \
+ file://0001-Add-option-to-disable-timed-dependant-tests.patch \
+ file://0001-configure-Remove-runtime-test-for-mmap-that-can-map-.patch \
+ file://autoconf-2.73.patch \
+ file://0001-dso-Check-for-NULL-handle-in-apr_dso_sym.patch \
+ "
+
+SRC_URI[sha256sum] = "fc648de983f3a2a6c9e78dea1f180639bd2fad6c06d556d4367a701fe5c35577"
+
+inherit autotools-brokensep lib_package binconfig multilib_header ptest multilib_script
+
+OE_BINCONFIG_EXTRA_MANGLE = " -e 's:location=source:location=installed:'"
+
+# Added to fix some issues with cmake. Refer to https://github.com/bmwcarit/meta-ros/issues/68#issuecomment-19896928
+CACHED_CONFIGUREVARS += "apr_cv_mutex_recursive=yes"
+# Enable largefile
+CACHED_CONFIGUREVARS += "apr_cv_use_lfs64=yes"
+# Additional AC_TRY_RUN tests which will need to be cached for cross compile
+CACHED_CONFIGUREVARS += "apr_cv_epoll=yes epoll_create1=yes apr_cv_sock_cloexec=yes \
+ ac_cv_struct_rlimit=yes \
+ ac_cv_func_sem_open=yes \
+ apr_cv_process_shared_works=yes \
+ apr_cv_mutex_robust_shared=yes \
+ "
+# Also suppress trying to use sctp.
+#
+CACHED_CONFIGUREVARS += "ac_cv_header_netinet_sctp_h=no ac_cv_header_netinet_sctp_uio_h=no"
+
+# ac_cv_sizeof_struct_iovec is deduced using runtime check which will fail during cross-compile
+CACHED_CONFIGUREVARS += "${@['ac_cv_sizeof_struct_iovec=16','ac_cv_sizeof_struct_iovec=8'][d.getVar('SITEINFO_BITS') != '32']}"
+
+CACHED_CONFIGUREVARS += "ac_cv_file__dev_zero=yes"
+
+CACHED_CONFIGUREVARS:append:libc-musl = " ac_cv_strerror_r_rc_int=yes"
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}"
+PACKAGECONFIG:append:libc-musl = " xsi-strerror"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+PACKAGECONFIG[timed-tests] = "--enable-timed-tests,--disable-timed-tests,"
+PACKAGECONFIG[xsi-strerror] = "ac_cv_strerror_r_rc_int=yes,ac_cv_strerror_r_rc_int=no,"
+
+do_configure:prepend() {
+ # Avoid absolute paths for grep since it causes failures
+ # when using sstate between different hosts with different
+ # install paths for grep.
+ export GREP="grep"
+
+ cd ${S}
+ # The "2" means libtool version 2.
+ ./buildconf 2
+}
+
+MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/apr-1-config \
+ ${PN}-dev:${datadir}/build-1/apr_rules.mk"
+
+FILES:${PN}-dev += "${libdir}/apr.exp ${datadir}/build-1/*"
+RDEPENDS:${PN}-dev += "bash libtool"
+
+RDEPENDS:${PN}-ptest += "libgcc"
+
+#for some reason, build/libtool.m4 handled by buildconf still be overwritten
+#when autoconf, so handle it again.
+do_configure:append() {
+ sed -i -e 's/LIBTOOL=\(.*\)top_build/LIBTOOL=\1apr_build/' ${S}/build/libtool.m4
+ sed -i -e 's/LIBTOOL=\(.*\)top_build/LIBTOOL=\1apr_build/' ${S}/build/apr_rules.mk
+}
+
+do_install:append() {
+ oe_multilib_header apr.h
+ install -d ${D}${datadir}/apr
+}
+
+do_install:append:class-target() {
+ rm -f ${D}${datadir}/build-1/libtool
+ sed -i s,LIBTOOL=.*,LIBTOOL=libtool,g ${D}${datadir}/build-1/apr_rules.mk
+ sed -i -e 's,${DEBUG_PREFIX_MAP},,g' \
+ -e 's,${STAGING_DIR_HOST},,g' ${D}${datadir}/build-1/apr_rules.mk
+ sed -i -e 's,${STAGING_DIR_HOST},,g' \
+ -e 's,APR_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
+ -e 's,APR_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apr-1-config
+}
+
+SSTATE_SCAN_FILES += "apr_rules.mk libtool"
+
+SYSROOT_PREPROCESS_FUNCS += "apr_sysroot_preprocess"
+
+apr_sysroot_preprocess () {
+ d=${SYSROOT_DESTDIR}${datadir}/apr
+ install -d $d/
+ cp ${S}/build/apr_rules.mk $d/
+ sed -i s,apr_builddir=.*,apr_builddir=,g $d/apr_rules.mk
+ sed -i s,apr_builders=.*,apr_builders=,g $d/apr_rules.mk
+ sed -i s,LIBTOOL=.*,LIBTOOL=libtool,g $d/apr_rules.mk
+ sed -i s,\$\(apr_builders\),${STAGING_DATADIR}/apr/,g $d/apr_rules.mk
+ cp ${S}/build/mkdir.sh $d/
+ cp ${S}/build/make_exports.awk $d/
+ cp ${S}/build/make_var_export.awk $d/
+ cp ${S}/libtool ${SYSROOT_DESTDIR}${datadir}/build-1/libtool
+}
+
+do_compile_ptest() {
+ cd ${S}/test
+ oe_runmake
+}
+
+do_install_ptest() {
+ t=${D}${PTEST_PATH}/test
+ mkdir -p $t/.libs
+ cp -r ${S}/test/data $t/
+ cp -r ${S}/test/.libs/*.so $t/.libs/
+ cp ${S}/test/proc_child $t/
+ cp ${S}/test/readchild $t/
+ cp ${S}/test/sockchild $t/
+ cp ${S}/test/sockperf $t/
+ cp ${S}/test/testall $t/
+ cp ${S}/test/tryread $t/
+}
+
+export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-support/argp-standalone/argp-standalone_1.3.bb b/meta/recipes-support/argp-standalone/argp-standalone_1.3.bb
deleted file mode 100644
index e7599d69d6..0000000000
--- a/meta/recipes-support/argp-standalone/argp-standalone_1.3.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (C) 2015 Khem Raj <raj.khem@gmail.com>
-# Released under the MIT license (see COPYING.MIT for the terms)
-
-SUMMARY = "Glibc hierarchical argument parsing standalone library"
-DESCRIPTION = "Standalone version of arguments parsing functions from GLIBC"
-HOMEPAGE = "http://www.lysator.liu.se/~nisse/misc/"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://argp.h;beginline=1;endline=20;md5=008b7e53dea6f9e1d9fdef0d9cf3184a"
-SECTION = "libs"
-
-SRC_URI = "http://www.lysator.liu.se/~nisse/misc/argp-standalone-${PV}.tar.gz \
- file://0001-throw-in-funcdef.patch \
- file://0002-isprint.patch \
- file://out_of_tree_build.patch \
- "
-SRC_URI[md5sum] = "720704bac078d067111b32444e24ba69"
-SRC_URI[sha256sum] = "dec79694da1319acd2238ce95df57f3680fea2482096e483323fddf3d818d8be"
-
-inherit autotools
-
-CFLAGS += "-fPIC -U__OPTIMIZE__"
-
-RDEPENDS:${PN}-dev = ""
-RDEPENDS:${PN}-staticdev = ""
-
-do_install() {
- install -D -m 0644 ${B}/libargp.a ${D}${libdir}/libargp.a
- install -D -m 0644 ${S}/argp.h ${D}${includedir}/argp.h
-}
-#
-# We will skip parsing for non-musl systems
-#
-COMPATIBLE_HOST = ".*-musl.*"
diff --git a/meta/recipes-support/argp-standalone/argp-standalone_1.4.1.bb b/meta/recipes-support/argp-standalone/argp-standalone_1.4.1.bb
new file mode 100644
index 0000000000..00b6036502
--- /dev/null
+++ b/meta/recipes-support/argp-standalone/argp-standalone_1.4.1.bb
@@ -0,0 +1,31 @@
+# Copyright (C) 2015 Khem Raj <raj.khem@gmail.com>
+# Released under the MIT license (see COPYING.MIT for the terms)
+
+SUMMARY = "Glibc hierarchical argument parsing standalone library"
+DESCRIPTION = "Standalone version of arguments parsing functions from GLIBC"
+HOMEPAGE = "https://github.com/ericonr/argp-standalone"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://argp.h;beginline=1;endline=20;md5=464f2cfb1c35a5123f9e309d7afd79f8"
+SECTION = "libs"
+
+SRC_URI = "git://github.com/ericonr/argp-standalone;branch=master;protocol=https \
+ file://out_of_tree_build.patch \
+ "
+SRCREV = "e5fe9ad9e83e6765cf8fa787f903d4c6792338b5"
+S = "${WORKDIR}/git"
+
+inherit autotools
+
+CFLAGS += "-fPIC -U__OPTIMIZE__"
+
+DEV_PKG_DEPENDENCY = ""
+RDEPENDS:${PN}-staticdev = ""
+
+do_install() {
+ install -D -m 0644 ${B}/libargp.a ${D}${libdir}/libargp.a
+ install -D -m 0644 ${S}/argp.h ${D}${includedir}/argp.h
+}
+#
+# We will skip parsing for non-musl systems
+#
+COMPATIBLE_HOST = ".*-musl.*"
diff --git a/meta/recipes-support/argp-standalone/files/0001-throw-in-funcdef.patch b/meta/recipes-support/argp-standalone/files/0001-throw-in-funcdef.patch
deleted file mode 100644
index a6e2759c5d..0000000000
--- a/meta/recipes-support/argp-standalone/files/0001-throw-in-funcdef.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-# --- T2-COPYRIGHT-NOTE-BEGIN ---
-# This copyright note is auto-generated by ./scripts/Create-CopyPatch.
-#
-# T2 SDE: package/.../rng-tools/throw-in-funcdef.patch.argp-standalone
-# Copyright (C) 2006 The T2 SDE Project
-#
-# More information can be found in the files COPYING and README.
-#
-# This patch file is dual-licensed. It is available under the license the
-# patched project is licensed under, as long as it is an OpenSource license
-# as defined at http://www.opensource.org/ (e.g. BSD, X11) or under the terms
-# of the GNU General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-# --- T2-COPYRIGHT-NOTE-END ---
-
-
-No __THROW in function implementation.
- --jsaw
-
-Taken from buildroot
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---- argp-standalone-1.4-test2/argp.h.orig 2006-01-06 02:29:59.000000000 +0100
-+++ argp-standalone-1.4-test2/argp.h 2006-01-06 02:41:10.000000000 +0100
-@@ -560,17 +560,17 @@
- # endif
-
- # ifndef ARGP_EI
--# define ARGP_EI extern __inline__
-+# define ARGP_EI extern inline
- # endif
-
- ARGP_EI void
--__argp_usage (__const struct argp_state *__state) __THROW
-+__argp_usage (__const struct argp_state *__state)
- {
- __argp_state_help (__state, stderr, ARGP_HELP_STD_USAGE);
- }
-
- ARGP_EI int
--__option_is_short (__const struct argp_option *__opt) __THROW
-+__option_is_short (__const struct argp_option *__opt)
- {
- if (__opt->flags & OPTION_DOC)
- return 0;
-@@ -582,7 +582,7 @@
- }
-
- ARGP_EI int
--__option_is_end (__const struct argp_option *__opt) __THROW
-+__option_is_end (__const struct argp_option *__opt)
- {
- return !__opt->key && !__opt->name && !__opt->doc && !__opt->group;
- }
---- argp-standalone-1.4-test2/argp-parse.c.orig 2006-01-06 02:47:48.000000000 +0100
-+++ argp-standalone-1.4-test2/argp-parse.c 2006-01-06 02:48:16.000000000 +0100
-@@ -1290,13 +1290,13 @@
- /* Defined here, in case a user is not inlining the definitions in
- * argp.h */
- void
--__argp_usage (__const struct argp_state *__state) __THROW
-+__argp_usage (__const struct argp_state *__state)
- {
- __argp_state_help (__state, stderr, ARGP_HELP_STD_USAGE);
- }
-
- int
--__option_is_short (__const struct argp_option *__opt) __THROW
-+__option_is_short (__const struct argp_option *__opt)
- {
- if (__opt->flags & OPTION_DOC)
- return 0;
-@@ -1310,7 +1310,7 @@
- }
-
- int
--__option_is_end (__const struct argp_option *__opt) __THROW
-+__option_is_end (__const struct argp_option *__opt)
- {
- return !__opt->key && !__opt->name && !__opt->doc && !__opt->group;
- }
diff --git a/meta/recipes-support/argp-standalone/files/0002-isprint.patch b/meta/recipes-support/argp-standalone/files/0002-isprint.patch
deleted file mode 100644
index 1c07eea3c1..0000000000
--- a/meta/recipes-support/argp-standalone/files/0002-isprint.patch
+++ /dev/null
@@ -1,51 +0,0 @@
-Subject: restrict value range passed to isprint function
-
-According to C standards isprint argument shall be representable as an
-unsigned char or be equal to EOF, otherwise the behaviour is undefined.
-
-Passing arbitrary ints leads to segfault in nm program from elfutils.
-
-Restrict isprint argument range to values representable by unsigned char.
-
-Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
-
-Taken from buildroot
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
-Index: b/argp.h
-===================================================================
---- a/argp.h
-+++ b/argp.h
-@@ -23,6 +23,7 @@
-
- #include <stdio.h>
- #include <ctype.h>
-+#include <limits.h>
-
- #define __need_error_t
- #include <errno.h>
-@@ -577,7 +578,7 @@
- else
- {
- int __key = __opt->key;
-- return __key > 0 && isprint (__key);
-+ return __key > 0 && __key <= UCHAR_MAX && isprint (__key);
- }
- }
-
-Index: b/argp-parse.c
-===================================================================
---- a/argp-parse.c
-+++ b/argp-parse.c
-@@ -1292,7 +1292,7 @@
- int __key = __opt->key;
- /* FIXME: whether or not a particular key implies a short option
- * ought not to be locale dependent. */
-- return __key > 0 && isprint (__key);
-+ return __key > 0 && __key <= UCHAR_MAX && isprint (__key);
- }
- }
-
diff --git a/meta/recipes-support/argp-standalone/files/out_of_tree_build.patch b/meta/recipes-support/argp-standalone/files/out_of_tree_build.patch
index b7777cbd91..c863104430 100644
--- a/meta/recipes-support/argp-standalone/files/out_of_tree_build.patch
+++ b/meta/recipes-support/argp-standalone/files/out_of_tree_build.patch
@@ -2,7 +2,7 @@ Fix the testsuite to built out of tree
in OE S != B
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
+Upstream-Status: Submitted [https://github.com/ericonr/argp-standalone/pull/9]
Index: argp-standalone-1.3/testsuite/Makefile.am
===================================================================
--- argp-standalone-1.3.orig/testsuite/Makefile.am
diff --git a/meta/recipes-support/aspell/aspell_0.60.8.1.bb b/meta/recipes-support/aspell/aspell_0.60.8.1.bb
new file mode 100644
index 0000000000..0ea9b063e0
--- /dev/null
+++ b/meta/recipes-support/aspell/aspell_0.60.8.1.bb
@@ -0,0 +1,38 @@
+SUMMARY = "GNU Aspell spell-checker"
+
+DESCRIPTION = "Spell checker designed to eventually replace Ispell. \
+It can either be used as a library or as an independent spell checker. \
+Its main feature is that it does a superior job of suggesting possible \
+replacements for a misspelled word than just about any other spell \
+checker out there for the English language."
+
+SECTION = "console/utils"
+
+HOMEPAGE = "http://aspell.net/"
+
+LICENSE = "LGPL-2.0-only | LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
+
+SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz"
+SRC_URI[sha256sum] = "d6da12b34d42d457fa604e435ad484a74b2effcd120ff40acd6bb3fb2887d21b"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses"
+
+PACKAGES += "libaspell libpspell aspell-utils"
+
+RDEPENDS:${PN}-utils += "perl"
+
+FILES:libaspell = "${libdir}/libaspell.so.* ${libdir}/aspell*"
+FILES:aspell-utils = "${bindir}/word-list-compress ${bindir}/aspell-import ${bindir}/run-with-aspell ${bindir}/pre*"
+FILES:${PN} = "${bindir}/aspell"
+FILES:libpspell = "${libdir}/libpspell.so.*"
+FILES:${PN}-dev += "${bindir}/pspell-config"
+
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+ARM_INSTRUCTION_SET:armv6 = "arm"
+
+inherit autotools-brokensep gettext texinfo binconfig-disabled
+
+BINCONFIG = "${bindir}/pspell-config"
diff --git a/meta/recipes-support/aspell/aspell_0.60.8.bb b/meta/recipes-support/aspell/aspell_0.60.8.bb
deleted file mode 100644
index 39b55f4ff2..0000000000
--- a/meta/recipes-support/aspell/aspell_0.60.8.bb
+++ /dev/null
@@ -1,41 +0,0 @@
-SUMMARY = "GNU Aspell spell-checker"
-
-DESCRIPTION = "Spell checker designed to eventually replace Ispell. \
-It can either be used as a library or as an independent spell checker. \
-Its main feature is that it does a superior job of suggesting possible \
-replacements for a misspelled word than just about any other spell \
-checker out there for the English language."
-
-SECTION = "console/utils"
-
-HOMEPAGE = "http://aspell.net/"
-
-LICENSE = "LGPL-2.0-only | LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34"
-
-SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz \
- file://CVE-2019-25051.patch \
-"
-SRC_URI[md5sum] = "012fa9209203ae4e5a61c2a668fd10e3"
-SRC_URI[sha256sum] = "f9b77e515334a751b2e60daab5db23499e26c9209f5e7b7443b05235ad0226f2"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses"
-
-PACKAGES += "libaspell libpspell aspell-utils"
-
-RDEPENDS:${PN}-utils += "perl"
-
-FILES:libaspell = "${libdir}/libaspell.so.* ${libdir}/aspell*"
-FILES:aspell-utils = "${bindir}/word-list-compress ${bindir}/aspell-import ${bindir}/run-with-aspell ${bindir}/pre*"
-FILES:${PN} = "${bindir}/aspell"
-FILES:libpspell = "${libdir}/libpspell.so.*"
-FILES:${PN}-dev += "${bindir}/pspell-config"
-
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-ARM_INSTRUCTION_SET:armv6 = "arm"
-
-inherit autotools-brokensep gettext texinfo binconfig-disabled
-
-BINCONFIG = "${bindir}/pspell-config"
diff --git a/meta/recipes-support/aspell/files/CVE-2019-25051.patch b/meta/recipes-support/aspell/files/CVE-2019-25051.patch
deleted file mode 100644
index 8513f6de79..0000000000
--- a/meta/recipes-support/aspell/files/CVE-2019-25051.patch
+++ /dev/null
@@ -1,101 +0,0 @@
-From 0718b375425aad8e54e1150313b862e4c6fd324a Mon Sep 17 00:00:00 2001
-From: Kevin Atkinson <kevina@gnu.org>
-Date: Sat, 21 Dec 2019 20:32:47 +0000
-Subject: [PATCH] objstack: assert that the alloc size will fit within a chunk
- to prevent a buffer overflow
-
-Bug found using OSS-Fuze.
-
-Upstream-Status: Backport
-[https://github.com/gnuaspell/aspell/commit/0718b375425aad8e54e1150313b862e4c6fd324a]
-CVE: CVE-2019-25051
-Signed-off-by: Chee Yang Lee <chee.yang.lee@intel.com>
----
- common/objstack.hpp | 18 ++++++++++++++----
- 1 file changed, 14 insertions(+), 4 deletions(-)
-
-diff --git a/common/objstack.hpp b/common/objstack.hpp
-index 3997bf7..bd97ccd 100644
---- a/common/objstack.hpp
-+++ b/common/objstack.hpp
-@@ -5,6 +5,7 @@
- #include "parm_string.hpp"
- #include <stdlib.h>
- #include <assert.h>
-+#include <stddef.h>
-
- namespace acommon {
-
-@@ -26,6 +27,12 @@ class ObjStack
- byte * temp_end;
- void setup_chunk();
- void new_chunk();
-+ bool will_overflow(size_t sz) const {
-+ return offsetof(Node,data) + sz > chunk_size;
-+ }
-+ void check_size(size_t sz) {
-+ assert(!will_overflow(sz));
-+ }
-
- ObjStack(const ObjStack &);
- void operator=(const ObjStack &);
-@@ -56,7 +63,7 @@ class ObjStack
- void * alloc_bottom(size_t size) {
- byte * tmp = bottom;
- bottom += size;
-- if (bottom > top) {new_chunk(); tmp = bottom; bottom += size;}
-+ if (bottom > top) {check_size(size); new_chunk(); tmp = bottom; bottom += size;}
- return tmp;
- }
- // This alloc_bottom will insure that the object is aligned based on the
-@@ -66,7 +73,7 @@ class ObjStack
- align_bottom(align);
- byte * tmp = bottom;
- bottom += size;
-- if (bottom > top) {new_chunk(); goto loop;}
-+ if (bottom > top) {check_size(size); new_chunk(); goto loop;}
- return tmp;
- }
- char * dup_bottom(ParmString str) {
-@@ -79,7 +86,7 @@ class ObjStack
- // always be aligned as such.
- void * alloc_top(size_t size) {
- top -= size;
-- if (top < bottom) {new_chunk(); top -= size;}
-+ if (top < bottom) {check_size(size); new_chunk(); top -= size;}
- return top;
- }
- // This alloc_top will insure that the object is aligned based on
-@@ -88,7 +95,7 @@ class ObjStack
- {loop:
- top -= size;
- align_top(align);
-- if (top < bottom) {new_chunk(); goto loop;}
-+ if (top < bottom) {check_size(size); new_chunk(); goto loop;}
- return top;
- }
- char * dup_top(ParmString str) {
-@@ -117,6 +124,7 @@ class ObjStack
- void * alloc_temp(size_t size) {
- temp_end = bottom + size;
- if (temp_end > top) {
-+ check_size(size);
- new_chunk();
- temp_end = bottom + size;
- }
-@@ -131,6 +139,7 @@ class ObjStack
- } else {
- size_t s = temp_end - bottom;
- byte * p = bottom;
-+ check_size(size);
- new_chunk();
- memcpy(bottom, p, s);
- temp_end = bottom + size;
-@@ -150,6 +159,7 @@ class ObjStack
- } else {
- size_t s = temp_end - bottom;
- byte * p = bottom;
-+ check_size(size);
- new_chunk();
- memcpy(bottom, p, s);
- temp_end = bottom + size;
diff --git a/meta/recipes-support/atk/at-spi2-atk_2.38.0.bb b/meta/recipes-support/atk/at-spi2-atk_2.38.0.bb
deleted file mode 100644
index 87f7b83c7d..0000000000
--- a/meta/recipes-support/atk/at-spi2-atk_2.38.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "AT-SPI 2 Toolkit Bridge"
-DESCRIPTION = "Contains a library that bridges ATK to At-Spi2 D-Bus service. Toolkit widgets use it to provide their content to screen readers such as Orca."
-HOMEPAGE = "https://wiki.linuxfoundation.org/accessibility/d-bus"
-BUGTRACKER = "http://bugzilla.gnome.org/"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI[archive.sha256sum] = "cfa008a5af822b36ae6287f18182c40c91dd699c55faa38605881ed175ca464f"
-
-DEPENDS = "dbus glib-2.0 glib-2.0-native atk at-spi2-core libxml2"
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase upstream-version-is-even
-
-PACKAGES =+ "${PN}-gnome ${PN}-gtk2"
-
-FILES:${PN}-gnome = "${libdir}/gnome-settings-daemon-3.0/gtk-modules"
-FILES:${PN}-gtk2 = "${libdir}/gtk-2.0/modules/libatk-bridge.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/atk/at-spi2-core_2.44.1.bb b/meta/recipes-support/atk/at-spi2-core_2.44.1.bb
deleted file mode 100644
index 95a6c619f0..0000000000
--- a/meta/recipes-support/atk/at-spi2-core_2.44.1.bb
+++ /dev/null
@@ -1,39 +0,0 @@
-SUMMARY = "Assistive Technology Service Provider Interface (dbus core)"
-
-DESCRIPTION = "It provides a Service Provider Interface for the Assistive Technologies available on the GNOME platform and a library against which applications can be linked."
-
-HOMEPAGE = "https://wiki.linuxfoundation.org/accessibility/d-bus"
-BUGTRACKER = "http://bugzilla.gnome.org/"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-
-SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "4beb23270ba6cf7caf20b597354d75194d89afb69d2efcf15f4271688ba6f746"
-
-X11DEPENDS = "virtual/libx11 libxi libxtst"
-
-DEPENDS = "dbus glib-2.0"
-DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '${X11DEPENDS}', '', d)}"
-
-inherit meson gtk-doc gettext systemd pkgconfig upstream-version-is-even gobject-introspection
-
-EXTRA_OEMESON = " -Dsystemd_user_dir=${systemd_user_unitdir} \
- -Ddbus_daemon=${bindir}/dbus-daemon \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', '-Dx11=yes', '-Dx11=no', d)} \
-"
-
-GTKDOC_MESON_OPTION = "docs"
-
-GIR_MESON_OPTION = 'introspection'
-GIR_MESON_ENABLE_FLAG = 'yes'
-GIR_MESON_DISABLE_FLAG = 'no'
-
-FILES:${PN} += "${datadir}/dbus-1/services/*.service \
- ${datadir}/dbus-1/accessibility-services/*.service \
- ${datadir}/defaults/at-spi2 \
- ${systemd_user_unitdir}/at-spi-dbus-bus.service \
- "
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/atk/at-spi2-core_2.52.0.bb b/meta/recipes-support/atk/at-spi2-core_2.52.0.bb
new file mode 100644
index 0000000000..cf221e0389
--- /dev/null
+++ b/meta/recipes-support/atk/at-spi2-core_2.52.0.bb
@@ -0,0 +1,48 @@
+SUMMARY = "Assistive Technology Service Provider Interface (dbus core)"
+
+DESCRIPTION = "It provides a Service Provider Interface for the Assistive Technologies available on the GNOME platform and a library against which applications can be linked."
+
+HOMEPAGE = "https://wiki.linuxfoundation.org/accessibility/d-bus"
+BUGTRACKER = "http://bugzilla.gnome.org/"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
+
+SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "0ac3fc8320c8d01fa147c272ba7fa03806389c6b03d3c406d0823e30e35ff5ab"
+
+DEPENDS = " \
+ dbus \
+ glib-2.0 \
+ glib-2.0-native \
+ libxml2 \
+ ${@'python3-sphinx-native' if d.getVar('GIDOCGEN_ENABLED') == 'True' else ''} \
+"
+
+# For backwards compatibility
+PROVIDES += "atk at-spi2-atk"
+RPROVIDES:${PN} += "atk at-spi2-atk"
+
+inherit meson gi-docgen gettext systemd pkgconfig upstream-version-is-even gobject-introspection
+
+EXTRA_OEMESON = " -Dsystemd_user_dir=${systemd_user_unitdir} \
+ -Ddbus_daemon=${bindir}/dbus-daemon"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
+PACKAGECONFIG[x11] = "-Dx11=enabled,-Dx11=disabled,virtual/libx11 libxi libxtst"
+
+GIDOCGEN_MESON_OPTION = "docs"
+GIR_MESON_OPTION = 'introspection'
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+FILES:${PN} += "${libdir}/gnome-settings-daemon-3.0/gtk-modules/at-spi2-atk.desktop \
+ ${libdir}/gtk-2.0/modules/libatk-bridge.so \
+ ${datadir}/dbus-1/services/*.service \
+ ${datadir}/dbus-1/accessibility-services/*.service \
+ ${datadir}/defaults/at-spi2 \
+ ${systemd_user_unitdir}/at-spi-dbus-bus.service \
+ "
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/atk/atk_2.38.0.bb b/meta/recipes-support/atk/atk_2.38.0.bb
deleted file mode 100644
index 62fec9abf8..0000000000
--- a/meta/recipes-support/atk/atk_2.38.0.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-SUMMARY = "Accessibility toolkit for GNOME"
-DESCRIPTION = "Provides application programming interfaces (APIs) for implementing accessibility support in software."
-HOMEPAGE = "https://wiki.gnome.org/Accessibility"
-BUGTRACKER = "https://gitlab.gnome.org/GNOME/atk/-/issues"
-SECTION = "x11/libs"
-
-LICENSE = "GPL-2.0-or-later & LGPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3bf50002aefd002f49e7bb854063f7e7 \
- file://atk/atkutil.c;endline=18;md5=6fd31cd2fdc9b30f619ca8d819bc12d3 \
- file://atk/atk.h;endline=18;md5=fcd7710187e0eae485e356c30d1b0c3b"
-
-# Need gettext-native as Meson can't turn off i18n
-DEPENDS = "gettext-native glib-2.0"
-
-GNOMEBASEBUILDCLASS = "meson"
-inherit gnomebase gtk-doc gettext upstream-version-is-even gobject-introspection
-
-SRC_URI[archive.sha256sum] = "ac4de2a4ef4bd5665052952fe169657e65e895c5057dffb3c2a810f6191a0c36"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/attr/acl/0001-test-patch-out-failing-bits.patch b/meta/recipes-support/attr/acl/0001-test-patch-out-failing-bits.patch
index ba2ffee5d2..219feaccd0 100644
--- a/meta/recipes-support/attr/acl/0001-test-patch-out-failing-bits.patch
+++ b/meta/recipes-support/attr/acl/0001-test-patch-out-failing-bits.patch
@@ -1,4 +1,4 @@
-From 9e08219e0e99ee2589cf35fa8d52cef3515accce Mon Sep 17 00:00:00 2001
+From 7dec6fa3b3494a55120402ff1ea3eb96b67138e8 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 12 Dec 2019 15:47:49 +0100
Subject: [PATCH] test: patch out failing bits
@@ -58,6 +58,3 @@ index 8f8f825..21e8a95 100644
$ : > f
$ : <> f
>~ .*f: Permission denied$
---
-2.17.1
-
diff --git a/meta/recipes-support/attr/acl/0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch b/meta/recipes-support/attr/acl/0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch
index 57ef0bb728..748f37f3e7 100644
--- a/meta/recipes-support/attr/acl/0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch
+++ b/meta/recipes-support/attr/acl/0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch
@@ -1,4 +1,4 @@
-From 39d332a8801de5d9ef09dacb3dba85c208b7b2ad Mon Sep 17 00:00:00 2001
+From 42ae3f8a5e32ba0681ccd1552a203ddad8748a6e Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 12 Dec 2019 13:45:52 +0100
Subject: [PATCH] tests: do not hardcode the build path into a helper library
@@ -10,7 +10,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/test/Makemodule.am b/test/Makemodule.am
-index 17d4927..015de7f 100644
+index e1d715d..cffe732 100644
--- a/test/Makemodule.am
+++ b/test/Makemodule.am
@@ -30,7 +30,7 @@ EXTRA_DIST += \
@@ -21,4 +21,4 @@ index 17d4927..015de7f 100644
+libtestlookup_la_CFLAGS = -DBASEDIR=\"/tmp/acl-ptest\"
libtestlookup_la_LDFLAGS = -rpath $(abs_builddir)
- AM_TESTS_ENVIRONMENT = PATH="$(abs_top_builddir):$$PATH";
+ # Make sure translations don't break tests when matching output.
diff --git a/meta/recipes-support/attr/acl/run-ptest b/meta/recipes-support/attr/acl/run-ptest
index 4312823365..3af75c84fe 100644
--- a/meta/recipes-support/attr/acl/run-ptest
+++ b/meta/recipes-support/attr/acl/run-ptest
@@ -7,4 +7,10 @@
mkdir -p /tmp/acl-ptest/test
cp test/test.* /tmp/acl-ptest/test
+set +e
make test-suite.log
+exitcode=$?
+if [ $exitcode -ne 0 -a -e test-suite.log ]; then
+ cat test-suite.log
+fi
+exit $exitcode
diff --git a/meta/recipes-support/attr/acl_2.3.1.bb b/meta/recipes-support/attr/acl_2.3.1.bb
deleted file mode 100644
index aca04a9aac..0000000000
--- a/meta/recipes-support/attr/acl_2.3.1.bb
+++ /dev/null
@@ -1,78 +0,0 @@
-SUMMARY = "Utilities for managing POSIX Access Control Lists"
-DESCRIPTION = "ACL allows you to provide different levels of access to files \
-and folders for different users."
-
-HOMEPAGE = "http://savannah.nongnu.org/projects/acl/"
-BUGTRACKER = "http://savannah.nongnu.org/bugs/?group=acl"
-
-SECTION = "libs"
-
-LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
-LICENSE:${PN} = "GPL-2.0-or-later"
-LICENSE:lib${BPN} = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://doc/COPYING;md5=c781d70ed2b4d48995b790403217a249 \
- file://doc/COPYING.LGPL;md5=9e9a206917f8af112da634ce3ab41764"
-
-DEPENDS = "attr"
-
-SRC_URI = "${SAVANNAH_GNU_MIRROR}/acl/${BP}.tar.gz \
- file://run-ptest \
- file://0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch \
- file://0001-test-patch-out-failing-bits.patch \
- "
-
-SRC_URI[sha256sum] = "760c61c68901b37fdd5eefeeaf4c0c7a26bdfdd8ac747a1edff1ce0e243c11af"
-
-inherit autotools gettext ptest
-
-PACKAGES =+ "lib${BPN}"
-
-FILES:lib${BPN} = "${libdir}/lib*${SOLIBS}"
-
-PTEST_BUILD_HOST_FILES = "builddefs"
-PTEST_BUILD_HOST_PATTERN = "^RPM"
-
-do_compile_ptest() {
- oe_runmake libtestlookup.la
-}
-
-do_install_ptest() {
- cp -rf ${S}/test/ ${D}${PTEST_PATH}
- cp -rf ${S}/build-aux/ ${D}${PTEST_PATH}
- mkdir -p ${D}${PTEST_PATH}/.libs
- cp -rf ${B}/.libs/libtestlookup* ${D}${PTEST_PATH}/.libs
- cp ${B}/Makefile ${D}${PTEST_PATH}
-
- sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
- -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
- -i ${D}${PTEST_PATH}/Makefile
-
- sed -i "s|^srcdir =.*|srcdir = \.|g" ${D}${PTEST_PATH}/Makefile
- sed -i "s|^abs_srcdir =.*|abs_srcdir = \.|g" ${D}${PTEST_PATH}/Makefile
- sed -i "s|^abs_top_srcdir =.*|abs_top_srcdir = \.\.|g" ${D}${PTEST_PATH}/Makefile
- sed -i "s|^Makefile:.*|Makefile:|g" ${D}${PTEST_PATH}/Makefile
-
- rm ${D}${PTEST_PATH}/.libs/libtestlookup.lai
-}
-
-RDEPENDS:${PN}-ptest = "acl \
- bash \
- coreutils \
- perl \
- perl-module-filehandle \
- perl-module-getopt-std \
- perl-module-posix \
- shadow \
- make \
- gawk \
- e2fsprogs-mke2fs \
- perl-module-cwd \
- perl-module-file-basename \
- perl-module-file-path \
- perl-module-file-spec \
- "
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/attr/acl_2.3.2.bb b/meta/recipes-support/attr/acl_2.3.2.bb
new file mode 100644
index 0000000000..6178473873
--- /dev/null
+++ b/meta/recipes-support/attr/acl_2.3.2.bb
@@ -0,0 +1,85 @@
+SUMMARY = "Utilities for managing POSIX Access Control Lists"
+DESCRIPTION = "ACL allows you to provide different levels of access to files \
+and folders for different users."
+
+HOMEPAGE = "http://savannah.nongnu.org/projects/acl/"
+BUGTRACKER = "http://savannah.nongnu.org/bugs/?group=acl"
+
+SECTION = "libs"
+
+LICENSE = "LGPL-2.1-or-later & GPL-2.0-or-later"
+LICENSE:${PN} = "GPL-2.0-or-later"
+LICENSE:lib${BPN} = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=c781d70ed2b4d48995b790403217a249 \
+ file://doc/COPYING.LGPL;md5=9e9a206917f8af112da634ce3ab41764"
+
+DEPENDS = "attr"
+
+SRC_URI = "${SAVANNAH_GNU_MIRROR}/acl/${BP}.tar.gz \
+ file://run-ptest \
+ file://0001-tests-do-not-hardcode-the-build-path-into-a-helper-l.patch \
+ file://0001-test-patch-out-failing-bits.patch \
+ "
+
+SRC_URI[sha256sum] = "5f2bdbad629707aa7d85c623f994aa8a1d2dec55a73de5205bac0bf6058a2f7c"
+
+inherit autotools gettext ptest
+
+EXTRA_OECONF += "--enable-largefile"
+
+PACKAGES =+ "lib${BPN}"
+
+FILES:lib${BPN} = "${libdir}/lib*${SOLIBS}"
+
+PTEST_BUILD_HOST_FILES = "builddefs"
+PTEST_BUILD_HOST_PATTERN = "^RPM"
+
+do_compile_ptest() {
+ oe_runmake libtestlookup.la
+}
+
+do_install_ptest() {
+ cp -rf ${S}/test/ ${D}${PTEST_PATH}
+ cp -rf ${S}/build-aux/ ${D}${PTEST_PATH}
+ mkdir -p ${D}${PTEST_PATH}/.libs
+ cp -rf ${B}/.libs/libtestlookup* ${D}${PTEST_PATH}/.libs
+ cp ${B}/Makefile ${D}${PTEST_PATH}
+
+ sed -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ -e 's:${RECIPE_SYSROOT_NATIVE}::g' \
+ -e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
+ -i ${D}${PTEST_PATH}/Makefile
+
+ sed -i "s|^srcdir =.*|srcdir = \.|g" ${D}${PTEST_PATH}/Makefile
+ sed -i "s|^abs_srcdir =.*|abs_srcdir = \.|g" ${D}${PTEST_PATH}/Makefile
+ sed -i "s|^abs_top_srcdir =.*|abs_top_srcdir = \.\.|g" ${D}${PTEST_PATH}/Makefile
+ sed -i "s|^Makefile:.*|Makefile:|g" ${D}${PTEST_PATH}/Makefile
+
+ rm ${D}${PTEST_PATH}/.libs/libtestlookup.lai
+}
+
+do_install_ptest:append:libc-musl() {
+ sed -i -e '/test\/misc.test/d' ${D}${PTEST_PATH}/Makefile
+}
+
+RDEPENDS:${PN}-ptest = "acl \
+ bash \
+ coreutils \
+ perl \
+ perl-module-constant \
+ perl-module-filehandle \
+ perl-module-getopt-std \
+ perl-module-posix \
+ shadow \
+ make \
+ gawk \
+ e2fsprogs-mke2fs \
+ perl-module-cwd \
+ perl-module-file-basename \
+ perl-module-file-path \
+ perl-module-file-spec \
+ "
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/attr/attr.inc b/meta/recipes-support/attr/attr.inc
index a4e38f2b19..75d616893a 100644
--- a/meta/recipes-support/attr/attr.inc
+++ b/meta/recipes-support/attr/attr.inc
@@ -16,13 +16,14 @@ LIC_FILES_CHKSUM = "file://doc/COPYING;md5=2d0aa14b3fce4694e4f615e30186335f \
SRC_URI = "${SAVANNAH_GNU_MIRROR}/attr/${BP}.tar.gz \
file://run-ptest \
+ file://0001-attr.c-Include-libgen.h-for-posix-version-of-basenam.patch \
"
inherit ptest update-alternatives autotools gettext
PACKAGES =+ "lib${BPN}"
-FILES:lib${BPN} = "${libdir}/lib*${SOLIBS}"
+FILES:lib${BPN} = "${libdir}/lib*${SOLIBS} ${sysconfdir}"
ALTERNATIVE_PRIORITY = "100"
ALTERNATIVE:${PN} = "setfattr"
@@ -48,8 +49,14 @@ do_install_ptest() {
cp -rf ${S}/test/ ${D}${PTEST_PATH}
}
+do_install_ptest:append:libc-musl() {
+ sed -i -e 's|f: Operation n|f: N|g' ${D}${PTEST_PATH}/test/attr.test
+}
+
RDEPENDS:${PN}-ptest = "attr \
+ bash \
coreutils \
+ perl-module-constant \
perl-module-filehandle \
perl-module-getopt-std \
perl-module-posix \
diff --git a/meta/recipes-support/attr/attr/0001-attr.c-Include-libgen.h-for-posix-version-of-basenam.patch b/meta/recipes-support/attr/attr/0001-attr.c-Include-libgen.h-for-posix-version-of-basenam.patch
new file mode 100644
index 0000000000..1e2bea5067
--- /dev/null
+++ b/meta/recipes-support/attr/attr/0001-attr.c-Include-libgen.h-for-posix-version-of-basenam.patch
@@ -0,0 +1,35 @@
+From 6d9e827bcacf387bb3cfae64bd4fe520168ccad4 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 3 Dec 2023 19:29:27 -0800
+Subject: [PATCH] attr.c: Include libgen.h for posix version of basename API
+
+Musl has removed the definition from string.h [1] which results in
+compile failures with clang
+
+| ../attr-2.5.1/tools/attr.c:69:13: error: call to undeclared function 'basename'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
+| 69 | progname = basename(argv[0]);
+| | ^
+
+[1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7
+
+Upstream-Status: Submitted [https://savannah.nongnu.org/bugs/index.php?64972]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ tools/attr.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/tools/attr.c b/tools/attr.c
+index 312aef1..90dab83 100644
+--- a/tools/attr.c
++++ b/tools/attr.c
+@@ -26,6 +26,7 @@
+ #include <stdlib.h>
+ #include <unistd.h>
+ #include <errno.h>
++#include <libgen.h>
+ #include <string.h>
+ #include <locale.h>
+
+--
+2.43.0
+
diff --git a/meta/recipes-support/attr/attr/run-ptest b/meta/recipes-support/attr/attr/run-ptest
index f64244f239..3e7a3a17a0 100644
--- a/meta/recipes-support/attr/attr/run-ptest
+++ b/meta/recipes-support/attr/attr/run-ptest
@@ -1,3 +1,10 @@
#!/bin/sh
+set +e
make test-suite.log
+exitcode=$?
+if [ $exitcode -ne 0 -a -e test-suite.log ]; then
+ cat test-suite.log
+fi
+exit $exitcode
+
diff --git a/meta/recipes-support/attr/attr_2.5.1.bb b/meta/recipes-support/attr/attr_2.5.1.bb
deleted file mode 100644
index 5c5f7bcffb..0000000000
--- a/meta/recipes-support/attr/attr_2.5.1.bb
+++ /dev/null
@@ -1,5 +0,0 @@
-require attr.inc
-
-SRC_URI[sha256sum] = "bae1c6949b258a0d68001367ce0c741cebdacdd3b62965d17e5eb23cd78adaf8"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/attr/attr_2.5.2.bb b/meta/recipes-support/attr/attr_2.5.2.bb
new file mode 100644
index 0000000000..2110c6d885
--- /dev/null
+++ b/meta/recipes-support/attr/attr_2.5.2.bb
@@ -0,0 +1,5 @@
+require attr.inc
+
+SRC_URI[sha256sum] = "39bf67452fa41d0948c2197601053f48b3d78a029389734332a6309a680c6c87"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/bash-completion/bash-completion_2.11.bb b/meta/recipes-support/bash-completion/bash-completion_2.11.bb
deleted file mode 100644
index d981dd0c3a..0000000000
--- a/meta/recipes-support/bash-completion/bash-completion_2.11.bb
+++ /dev/null
@@ -1,40 +0,0 @@
-SUMMARY = "Programmable Completion for Bash 4"
-DESCRIPTION = "Collection of command line command completions for the Bash shell, \
-collection of helper functions to assist in creating new completions, \
-and set of facilities for loading completions automatically on demand, as well \
-as installing them."
-
-HOMEPAGE = "https://github.com/scop/bash-completion"
-BUGTRACKER = "https://github.com/scop/bash-completion/issues"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-
-SECTION = "console/utils"
-
-SRC_URI = "https://github.com/scop/bash-completion/releases/download/${PV}/${BPN}-${PV}.tar.xz"
-
-SRC_URI[md5sum] = "2514c6772d0de6254758b98c53f91861"
-SRC_URI[sha256sum] = "73a8894bad94dee83ab468fa09f628daffd567e8bef1a24277f1e9a0daf911ac"
-UPSTREAM_CHECK_REGEX = "bash-completion-(?P<pver>(?!2008).+)\.tar"
-UPSTREAM_CHECK_URI = "https://github.com/scop/bash-completion/releases"
-
-PARALLEL_MAKE = ""
-
-inherit autotools
-
-do_install:append() {
- # compatdir
- install -d ${D}${sysconfdir}/bash_completion.d/
- echo '. ${datadir}/${BPN}/bash_completion' >${D}${sysconfdir}/bash_completion
-
-}
-
-RDEPENDS:${PN} = "bash"
-
-# Some recipes are providing ${PN}-bash-completion packages
-PACKAGES =+ "${PN}-extra"
-FILES:${PN}-extra = "${datadir}/${BPN}/completions/ \
- ${datadir}/${BPN}/helpers/"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-support/bash-completion/bash-completion_2.13.0.bb b/meta/recipes-support/bash-completion/bash-completion_2.13.0.bb
new file mode 100644
index 0000000000..f75d61e219
--- /dev/null
+++ b/meta/recipes-support/bash-completion/bash-completion_2.13.0.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Programmable Completion for Bash 4"
+DESCRIPTION = "Collection of command line command completions for the Bash shell, \
+collection of helper functions to assist in creating new completions, \
+and set of facilities for loading completions automatically on demand, as well \
+as installing them."
+
+HOMEPAGE = "https://github.com/scop/bash-completion"
+BUGTRACKER = "https://github.com/scop/bash-completion/issues"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SECTION = "console/utils"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "c5f99a39e40f0d154c03ff15438e87ece1f5ac666336a4459899e2ff4bedf3d1"
+GITHUB_BASE_URI = "https://github.com/scop/bash-completion/releases"
+
+PARALLEL_MAKE = ""
+
+inherit autotools github-releases
+
+do_install:append() {
+ # compatdir
+ install -d ${D}${sysconfdir}/bash_completion.d/
+ echo '. ${datadir}/${BPN}/bash_completion' >${D}${sysconfdir}/bash_completion
+
+}
+
+RDEPENDS:${PN} = "bash"
+
+# Some recipes are providing ${PN}-bash-completion packages
+PACKAGES =+ "${PN}-extra"
+FILES:${PN}-extra = "${datadir}/${BPN}/completions/ \
+ ${datadir}/${BPN}/helpers/"
+
+BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-support/bmap-tools/bmap-tools_git.bb b/meta/recipes-support/bmap-tools/bmap-tools_git.bb
deleted file mode 100644
index 78c51e7731..0000000000
--- a/meta/recipes-support/bmap-tools/bmap-tools_git.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Tools to generate block map (AKA bmap) and flash images using bmap"
-DESCRIPTION = "Bmap-tools - tools to generate block map (AKA bmap) and flash images using \
-bmap. Bmaptool is a generic tool for creating the block map (bmap) for a file, \
-and copying files using the block map. The idea is that large file containing \
-unused blocks, like raw system image files, can be copied or flashed a lot \
-faster with bmaptool than with traditional tools like "dd" or "cp"."
-HOMEPAGE = "https://github.com/01org/bmap-tools"
-SECTION = "console/utils"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "git://github.com/intel/${BPN};branch=master;protocol=https"
-
-SRCREV = "c0673962a8ec1624b5189dc1d24f33fe4f06785a"
-S = "${WORKDIR}/git"
-BASEVER = "3.6"
-PV = "${BASEVER}+git${SRCPV}"
-
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
-
-# Need df from coreutils
-RDEPENDS:${PN} = "python3-core python3-compression python3-mmap python3-setuptools python3-fcntl python3-six coreutils"
-
-inherit setuptools3
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/bmaptool/bmaptool_git.bb b/meta/recipes-support/bmaptool/bmaptool_git.bb
new file mode 100644
index 0000000000..fd53c21c06
--- /dev/null
+++ b/meta/recipes-support/bmaptool/bmaptool_git.bb
@@ -0,0 +1,29 @@
+SUMMARY = "Tools to generate block map (AKA bmap) and flash images using bmap"
+DESCRIPTION = "bmaptool - tools to generate block map (AKA bmap) and flash images using \
+bmap. bmaptool is a generic tool for creating the block map (bmap) for a file, \
+and copying files using the block map. The idea is that large file containing \
+unused blocks, like raw system image files, can be copied or flashed a lot \
+faster with bmaptool than with traditional tools like "dd" or "cp"."
+HOMEPAGE = "https://github.com/yoctoproject/bmaptool"
+SECTION = "console/utils"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "git://github.com/yoctoproject/${BPN};branch=main;protocol=https"
+SRCREV = "2ff5750b8a3e0b36a9993c20e2ea10a07bc62085"
+S = "${WORKDIR}/git"
+BASEVER = "3.8.0"
+PV = "${BASEVER}+git"
+
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+# Need df from coreutils
+RDEPENDS:${PN} = "python3-core python3-compression python3-misc python3-mmap python3-setuptools python3-fcntl python3-six coreutils"
+
+inherit setuptools3
+
+# For compatibility with layers before scarthgap
+RREPLACES:${PN} = "bmap-tools"
+RCONFLICTS:${PN} = "bmap-tools"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/boost/boost-1.79.0.inc b/meta/recipes-support/boost/boost-1.79.0.inc
deleted file mode 100644
index f90c463931..0000000000
--- a/meta/recipes-support/boost/boost-1.79.0.inc
+++ /dev/null
@@ -1,20 +0,0 @@
-# The Boost web site provides free peer-reviewed portable
-# C++ source libraries. The emphasis is on libraries which
-# work well with the C++ Standard Library. The libraries are
-# intended to be widely useful, and are in regular use by
-# thousands of programmers across a broad spectrum of applications.
-HOMEPAGE = "http://www.boost.org/"
-LICENSE = "BSL-1.0 & MIT & Python-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE_1_0.txt;md5=e4224ccaecb14d942c71d31bef20d78c"
-
-BOOST_VER = "${@"_".join(d.getVar("PV").split("."))}"
-BOOST_MAJ = "${@"_".join(d.getVar("PV").split(".")[0:2])}"
-BOOST_P = "boost_${BOOST_VER}"
-
-SRC_URI = "https://boostorg.jfrog.io/artifactory/main/release/${PV}/source/${BOOST_P}.tar.bz2"
-SRC_URI[sha256sum] = "475d589d51a7f8b3ba2ba4eda022b170e562ca3b760ee922c146b6c65856ef39"
-
-UPSTREAM_CHECK_URI = "http://www.boost.org/users/download/"
-UPSTREAM_CHECK_REGEX = "release/(?P<pver>.*)/source/"
-
-S = "${WORKDIR}/${BOOST_P}"
diff --git a/meta/recipes-support/boost/boost-1.84.0.inc b/meta/recipes-support/boost/boost-1.84.0.inc
new file mode 100644
index 0000000000..5bbea2ba5b
--- /dev/null
+++ b/meta/recipes-support/boost/boost-1.84.0.inc
@@ -0,0 +1,20 @@
+# The Boost web site provides free peer-reviewed portable
+# C++ source libraries. The emphasis is on libraries which
+# work well with the C++ Standard Library. The libraries are
+# intended to be widely useful, and are in regular use by
+# thousands of programmers across a broad spectrum of applications.
+HOMEPAGE = "http://www.boost.org/"
+LICENSE = "BSL-1.0 & MIT & Python-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE_1_0.txt;md5=e4224ccaecb14d942c71d31bef20d78c"
+
+BOOST_VER = "${@"_".join(d.getVar("PV").split("."))}"
+BOOST_MAJ = "${@"_".join(d.getVar("PV").split(".")[0:2])}"
+BOOST_P = "boost_${BOOST_VER}"
+
+SRC_URI = "https://boostorg.jfrog.io/artifactory/main/release/${PV}/source/${BOOST_P}.tar.bz2"
+SRC_URI[sha256sum] = "cc4b893acf645c9d4b698e9a0f08ca8846aa5d6c68275c14c3e7949c24109454"
+
+UPSTREAM_CHECK_URI = "http://www.boost.org/users/download/"
+UPSTREAM_CHECK_REGEX = "release/(?P<pver>.*)/source/"
+
+S = "${WORKDIR}/${BOOST_P}"
diff --git a/meta/recipes-support/boost/boost-build-native_1.84.0.bb b/meta/recipes-support/boost/boost-build-native_1.84.0.bb
new file mode 100644
index 0000000000..a345bac499
--- /dev/null
+++ b/meta/recipes-support/boost/boost-build-native_1.84.0.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Boost.Build"
+DESCRIPTION = "B2 makes it easy to build C++ projects, everywhere."
+HOMEPAGE = "https://github.com/boostorg/build"
+SECTION = "devel"
+
+LICENSE = "BSL-1.0"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=e4224ccaecb14d942c71d31bef20d78c"
+
+SRC_URI = "git://github.com/boostorg/build;protocol=https;branch=master"
+SRCREV = "8d86b9a85407d73d6e8c631771f18c2a237d2d71"
+PE = "1"
+
+UPSTREAM_CHECK_GITTAGREGEX = "boost-(?P<pver>(\d+(\.\d+)+))"
+
+inherit native
+
+S = "${WORKDIR}/git"
+
+do_compile() {
+ ./bootstrap.sh
+}
+
+do_install() {
+ HOME=/var/run ./b2 install --prefix=${prefix} staging-prefix=${D}${prefix}
+}
+
+# The build is either release mode (pre-stripped) or debug (-O0).
+INSANE_SKIP:${PN} = "already-stripped"
diff --git a/meta/recipes-support/boost/boost-build-native_4.4.1.bb b/meta/recipes-support/boost/boost-build-native_4.4.1.bb
deleted file mode 100644
index de566eeb82..0000000000
--- a/meta/recipes-support/boost/boost-build-native_4.4.1.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "Boost.Build"
-DESCRIPTION = "B2 makes it easy to build C++ projects, everywhere."
-HOMEPAGE = "https://github.com/boostorg/build"
-SECTION = "devel"
-
-LICENSE = "BSL-1.0"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=e4224ccaecb14d942c71d31bef20d78c"
-
-SRC_URI = "git://github.com/boostorg/build;protocol=https;branch=master"
-SRCREV = "76da80f33187a3d9e5336157cdfae12ce82e37eb"
-
-UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>(\d+(\.\d+){2,}))"
-
-inherit native
-
-S = "${WORKDIR}/git"
-
-do_compile() {
- ./bootstrap.sh
-}
-
-do_install() {
- HOME=/var/run ./b2 install --prefix=${prefix} staging-prefix=${D}${prefix}
-}
-
-# The build is either release mode (pre-stripped) or debug (-O0).
-INSANE_SKIP:${PN} = "already-stripped"
diff --git a/meta/recipes-support/boost/boost.inc b/meta/recipes-support/boost/boost.inc
index 402f1592cf..77105aa1fa 100644
--- a/meta/recipes-support/boost/boost.inc
+++ b/meta/recipes-support/boost/boost.inc
@@ -40,6 +40,7 @@ BOOST_LIBS = "\
thread \
timer \
type_erasure \
+ url \
wave \
"
@@ -151,6 +152,7 @@ BOOST_PARALLEL_MAKE = "${@oe.utils.parallel_make_argument(d, '-j%d')}"
BJAM_OPTS = '${BOOST_PARALLEL_MAKE} -d+2 -q \
${BJAM_TOOLS} \
-sBOOST_BUILD_USER_CONFIG=${WORKDIR}/user-config.jam \
+ -sICU_PATH=${STAGING_EXECPREFIXDIR} \
--build-dir=${B} \
--disable-icu \
${BJAM_EXTRA}'
diff --git a/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch b/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch
index 67d5dff125..9ba5d5cc33 100644
--- a/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch
+++ b/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch
@@ -1,4 +1,4 @@
-From 4d2a8fc8117e56bc283349e5f7f889ebbfc55c71 Mon Sep 17 00:00:00 2001
+From 18185e939ca121335a6172c8e50872af8122a759 Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Tue, 18 Dec 2018 15:42:57 +0100
Subject: [PATCH] Don't set up arch/instruction-set flags, we do that
@@ -8,16 +8,15 @@ ourselves
Upstream-Status: Inappropriate
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
-
---
- tools/build/src/tools/gcc.jam | 144 ----------------------------------
- 1 file changed, 144 deletions(-)
+ tools/build/src/tools/gcc.jam | 153 ----------------------------------
+ 1 file changed, 153 deletions(-)
diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam
-index 47a113223..d77525724 100644
+index 834f5e1bf..493a43e6d 100644
--- a/tools/build/src/tools/gcc.jam
+++ b/tools/build/src/tools/gcc.jam
-@@ -1122,147 +1122,3 @@ local rule cpu-flags ( toolset variable : architecture : instruction-set + :
+@@ -1113,156 +1113,3 @@ local rule cpu-flags ( toolset variable : architecture : instruction-set + :
<architecture>$(architecture)/<instruction-set>$(instruction-set)
: $(values) ;
}
@@ -72,6 +71,9 @@ index 47a113223..d77525724 100644
-cpu-flags gcc OPTIONS : x86 : cascadelake : -march=skylake-avx512 -mavx512vnni ;
-cpu-flags gcc OPTIONS : x86 : cooperlake : -march=cooperlake ;
-cpu-flags gcc OPTIONS : x86 : tigerlake : -march=tigerlake ;
+-cpu-flags gcc OPTIONS : x86 : rocketlake : -march=rocketlake ;
+-cpu-flags gcc OPTIONS : x86 : alderlake : -march=alderlake ;
+-cpu-flags gcc OPTIONS : x86 : sapphirerapids : -march=sapphirerapids ;
-cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
-cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
-cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
@@ -98,6 +100,7 @@ index 47a113223..d77525724 100644
-cpu-flags gcc OPTIONS : x86 : btver2 : -march=btver2 ;
-cpu-flags gcc OPTIONS : x86 : znver1 : -march=znver1 ;
-cpu-flags gcc OPTIONS : x86 : znver2 : -march=znver2 ;
+-cpu-flags gcc OPTIONS : x86 : znver3 : -march=znver3 ;
-cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
-cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
-cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
@@ -165,3 +168,8 @@ index 47a113223..d77525724 100644
-cpu-flags gcc OPTIONS : arm : cortex-r5+vfpv3-d16 : -mcpu=cortex-r5 -mfpu=vfpv3-d16 -mfloat-abi=hard ;
-# AIX variant of RS/6000 & PowerPC
-toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X64" ;
+-
+-# Enable response file control
+-toolset.flags gcc RESPONSE_FILE_SUB <response-file>auto : a ;
+-toolset.flags gcc RESPONSE_FILE_SUB <response-file>file : f ;
+-toolset.flags gcc RESPONSE_FILE_SUB <response-file>contents : c ;
diff --git a/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch b/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch
index 78b19225d4..913810a340 100644
--- a/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch
+++ b/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch
@@ -1,4 +1,4 @@
-From 59402e3a61d14eb7ce8c2019ea1a87ad4bd28605 Mon Sep 17 00:00:00 2001
+From bbb0845c0a79238fb2e84cca41826a0944b6ce7e Mon Sep 17 00:00:00 2001
From: Anuj Mittal <anuj.mittal@intel.com>
Date: Thu, 14 Nov 2019 10:13:53 +0800
Subject: [PATCH] dont setup compiler flags -m32/-m64
@@ -13,13 +13,14 @@ Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
1 file changed, 14 deletions(-)
diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam
-index c7e3cf3..24486e0 100644
+index 493a43e6d..42dccbdfe 100644
--- a/tools/build/src/tools/gcc.jam
+++ b/tools/build/src/tools/gcc.jam
-@@ -430,20 +430,6 @@ local rule compile-link-flags ( * )
+@@ -360,20 +360,6 @@ local rule compile-link-flags ( * )
+ compile-link-flags <link>shared/<target-os>$(non-windows) : -fPIC ;
}
- {
+-{
- # Handle address-model
- compile-link-flags <target-os>aix/<address-model>32 : -maix32 ;
- compile-link-flags <target-os>aix/<address-model>64 : -maix64 ;
@@ -33,10 +34,6 @@ index c7e3cf3..24486e0 100644
- compile-link-flags <target-os>$(generic-os)/<architecture>$(arch)/<address-model>64 : -m64 ;
-}
-
--{
+ {
# Handle threading
local rule threading-flags ( * )
- {
---
-2.7.4
-
diff --git a/meta/recipes-support/boost/boost/boost-CVE-2012-2677.patch b/meta/recipes-support/boost/boost/boost-CVE-2012-2677.patch
deleted file mode 100644
index 917617a044..0000000000
--- a/meta/recipes-support/boost/boost/boost-CVE-2012-2677.patch
+++ /dev/null
@@ -1,112 +0,0 @@
-Reference
-
-https://svn.boost.org/trac/boost/changeset/78326
-
-Upstream-Status: Backport
-CVE: CVE-2012-2677
-Signed-off-by: Yue Tao <yue.tao@windriver.com>
-
-diff --git a/boost/pool/pool.hpp.old b/boost/pool/pool.hpp
-index c47b11f..417a1e0 100644
---- a/boost/pool/pool.hpp.old
-+++ b/boost/pool/pool.hpp
-@@ -26,6 +26,8 @@
-
- #include <boost/pool/poolfwd.hpp>
-
-+// std::numeric_limits
-+#include <boost/limits.hpp>
- // boost::integer::static_lcm
- #include <boost/integer/common_factor_ct.hpp>
- // boost::simple_segregated_storage
-@@ -355,6 +357,15 @@ class pool: protected simple_segregated_storage < typename UserAllocator::size_t
- return s;
- }
-
-+ size_type max_chunks() const
-+ { //! Calculated maximum number of memory chunks that can be allocated in a single call by this Pool.
-+ size_type partition_size = alloc_size();
-+ size_type POD_size = integer::static_lcm<sizeof(size_type), sizeof(void *)>::value + sizeof(size_type);
-+ size_type max_chunks = (std::numeric_limits<size_type>::max() - POD_size) / alloc_size();
-+
-+ return max_chunks;
-+ }
-+
- static void * & nextof(void * const ptr)
- { //! \returns Pointer dereferenced.
- //! (Provided and used for the sake of code readability :)
-@@ -375,6 +386,8 @@ class pool: protected simple_segregated_storage < typename UserAllocator::size_t
- //! the first time that object needs to allocate system memory.
- //! The default is 32. This parameter may not be 0.
- //! \param nmax_size is the maximum number of chunks to allocate in one block.
-+ set_next_size(nnext_size);
-+ set_max_size(nmax_size);
- }
-
- ~pool()
-@@ -398,8 +411,8 @@ class pool: protected simple_segregated_storage < typename UserAllocator::size_t
- }
- void set_next_size(const size_type nnext_size)
- { //! Set number of chunks to request from the system the next time that object needs to allocate system memory. This value should never be set to 0.
-- //! \returns nnext_size.
-- next_size = start_size = nnext_size;
-+ BOOST_USING_STD_MIN();
-+ next_size = start_size = min BOOST_PREVENT_MACRO_SUBSTITUTION(nnext_size, max_chunks());
- }
- size_type get_max_size() const
- { //! \returns max_size.
-@@ -407,7 +420,8 @@ class pool: protected simple_segregated_storage < typename UserAllocator::size_t
- }
- void set_max_size(const size_type nmax_size)
- { //! Set max_size.
-- max_size = nmax_size;
-+ BOOST_USING_STD_MIN();
-+ max_size = min BOOST_PREVENT_MACRO_SUBSTITUTION(nmax_size, max_chunks());
- }
- size_type get_requested_size() const
- { //! \returns the requested size passed into the constructor.
-@@ -708,9 +722,9 @@ void * pool<UserAllocator>::malloc_need_resize()
-
- BOOST_USING_STD_MIN();
- if(!max_size)
-- next_size <<= 1;
-+ set_next_size(next_size << 1);
- else if( next_size*partition_size/requested_size < max_size)
-- next_size = min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size*requested_size/ partition_size);
-+ set_next_size(min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size * requested_size / partition_size));
-
- // initialize it,
- store().add_block(node.begin(), node.element_size(), partition_size);
-@@ -748,9 +762,9 @@ void * pool<UserAllocator>::ordered_malloc_need_resize()
-
- BOOST_USING_STD_MIN();
- if(!max_size)
-- next_size <<= 1;
-+ set_next_size(next_size << 1);
- else if( next_size*partition_size/requested_size < max_size)
-- next_size = min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size*requested_size/ partition_size);
-+ set_next_size(min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size * requested_size / partition_size));
-
- // initialize it,
- // (we can use "add_block" here because we know that
-@@ -792,6 +806,8 @@ void * pool<UserAllocator>::ordered_malloc(const size_type n)
- { //! Gets address of a chunk n, allocating new memory if not already available.
- //! \returns Address of chunk n if allocated ok.
- //! \returns 0 if not enough memory for n chunks.
-+ if (n > max_chunks())
-+ return 0;
-
- const size_type partition_size = alloc_size();
- const size_type total_req_size = n * requested_size;
-@@ -840,9 +856,9 @@ void * pool<UserAllocator>::ordered_malloc(const size_type n)
-
- BOOST_USING_STD_MIN();
- if(!max_size)
-- next_size <<= 1;
-+ set_next_size(next_size << 1);
- else if( next_size*partition_size/requested_size < max_size)
-- next_size = min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size*requested_size/ partition_size);
-+ set_next_size(min BOOST_PREVENT_MACRO_SUBSTITUTION(next_size << 1, max_size * requested_size / partition_size));
-
- // insert it into the list,
- // handle border case.
diff --git a/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch b/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch
index 595ba17865..9b6fcc6358 100644
--- a/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch
+++ b/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch
@@ -1,4 +1,4 @@
-From cabfcba1ff7511ffd6b91ca244288d44f585aad2 Mon Sep 17 00:00:00 2001
+From d6864bd78c01c5e8578805a7b67555c70a0e99a2 Mon Sep 17 00:00:00 2001
From: Jackie Huang <jackie.huang@windriver.com>
Date: Fri, 23 Sep 2016 01:04:50 -0700
Subject: [PATCH] boost-math: disable pch for gcc
@@ -14,10 +14,10 @@ Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/libs/math/build/Jamfile.v2 b/libs/math/build/Jamfile.v2
-index e19fb2e..82472a7 100644
+index fd840287e..5aff7d724 100644
--- a/libs/math/build/Jamfile.v2
+++ b/libs/math/build/Jamfile.v2
-@@ -13,7 +13,7 @@ project
+@@ -14,7 +14,7 @@ project
#<toolset>intel-linux:<pch>off
<toolset>intel-darwin:<pch>off
<toolset>msvc-7.1:<pch>off
@@ -26,6 +26,3 @@ index e19fb2e..82472a7 100644
#<toolset>gcc:<cxxflags>-fvisibility=hidden
<toolset>intel-linux:<cxxflags>-fvisibility=hidden
#<toolset>sun:<cxxflags>-xldscope=hidden
---
-2.7.4
-
diff --git a/meta/recipes-support/boost/boost_1.79.0.bb b/meta/recipes-support/boost/boost_1.79.0.bb
deleted file mode 100644
index dd5d6eadd6..0000000000
--- a/meta/recipes-support/boost/boost_1.79.0.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require boost-${PV}.inc
-require boost.inc
-
-SRC_URI += "file://boost-CVE-2012-2677.patch \
- file://boost-math-disable-pch-for-gcc.patch \
- file://0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch \
- file://0001-dont-setup-compiler-flags-m32-m64.patch \
- "
diff --git a/meta/recipes-support/boost/boost_1.84.0.bb b/meta/recipes-support/boost/boost_1.84.0.bb
new file mode 100644
index 0000000000..4b580d078b
--- /dev/null
+++ b/meta/recipes-support/boost/boost_1.84.0.bb
@@ -0,0 +1,7 @@
+require boost-${PV}.inc
+require boost.inc
+
+SRC_URI += "file://boost-math-disable-pch-for-gcc.patch \
+ file://0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch \
+ file://0001-dont-setup-compiler-flags-m32-m64.patch \
+ "
diff --git a/meta/recipes-support/curl/curl/disable-tests b/meta/recipes-support/curl/curl/disable-tests
new file mode 100644
index 0000000000..259576fd01
--- /dev/null
+++ b/meta/recipes-support/curl/curl/disable-tests
@@ -0,0 +1,41 @@
+# Intermittently fails e.g. https://autobuilder.yocto.io/pub/non-release/20231220-28/testresults/qemux86-64-ptest/curl.log
+# https://autobuilder.yocto.io/pub/non-release/20231220-27/testresults/qemux86-64-ptest/curl.log
+337
+# These CRL test (alt-avc) are failing
+356
+412
+413
+# These CRL tests are scanning docs
+971
+# Intermittently hangs e.g http://autobuilder.yocto.io/pub/non-release/20231228-18/testresults/qemux86-64-ptest/curl.log
+1091
+# Intermittently hangs e.g https://autobuilder.yocto.io/pub/non-release/20231220-27/testresults/qemux86-64-ptest/curl.log
+1096
+# These CRL tests are scanning docs
+1119
+1132
+1135
+1478
+# These CRL tests are scanning headers
+1167
+1477
+# These CRL tests are scanning man pages
+1139
+1140
+1173
+1177
+# This CRL test is looking for m4 files
+1165
+# This CRL test is looking for src files
+1185
+# This test is scanning the source tree
+1222
+# These CRL tests need --libcurl option to be enabled
+1279
+1400
+1401
+1402
+1403
+1404
+1405
+1465
diff --git a/meta/recipes-support/curl/curl/no-test-timeout.patch b/meta/recipes-support/curl/curl/no-test-timeout.patch
new file mode 100644
index 0000000000..7122b6f043
--- /dev/null
+++ b/meta/recipes-support/curl/curl/no-test-timeout.patch
@@ -0,0 +1,25 @@
+From 42cddb52e821cfc2f09f1974742714e5f2f1856e Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Fri, 15 Mar 2024 14:37:37 +0000
+Subject: [PATCH] Set the max-time timeout to 600 so the timeout is 10 minutes
+ instead of 13 seconds.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ tests/servers.pm | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tests/servers.pm b/tests/servers.pm
+index d4472d5..9999938 100644
+--- a/tests/servers.pm
++++ b/tests/servers.pm
+@@ -120,7 +120,7 @@ my $sshdverstr; # for socks server, ssh daemon version string
+ my $sshderror; # for socks server, ssh daemon version error
+ my %doesntrun; # servers that don't work, identified by pidfile
+ my %PORT = (nolisten => 47); # port we use for a local non-listening service
+-my $server_response_maxtime=13;
++my $server_response_maxtime=600;
+ my $httptlssrv = find_httptlssrv();
+ my %run; # running server
+ my %runcert; # cert file currently in use by an ssl running server
diff --git a/meta/recipes-support/curl/curl/run-ptest b/meta/recipes-support/curl/curl/run-ptest
new file mode 100644
index 0000000000..3d25f3d90b
--- /dev/null
+++ b/meta/recipes-support/curl/curl/run-ptest
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+cd tests
+
+# Run all tests, don't stop on first failure
+# Don't use valgrind if it is found
+# Use automake-style output
+# Run four tests in parallel
+# Print log output on failure
+# Don't run the flaky or timing dependent tests
+./runtests.pl -a -n -am -j4 -p !flaky !timing-dependent
diff --git a/meta/recipes-support/curl/curl_7.83.1.bb b/meta/recipes-support/curl/curl_7.83.1.bb
deleted file mode 100644
index 1d9de29a65..0000000000
--- a/meta/recipes-support/curl/curl_7.83.1.bb
+++ /dev/null
@@ -1,93 +0,0 @@
-SUMMARY = "Command line tool and library for client-side URL transfers"
-DESCRIPTION = "It uses URL syntax to transfer data to and from servers. \
-curl is a widely used because of its ability to be flexible and complete \
-complex tasks. For example, you can use curl for things like user authentication, \
-HTTP post, SSL connections, proxy support, FTP uploads, and more!"
-HOMEPAGE = "https://curl.se/"
-BUGTRACKER = "https://github.com/curl/curl/issues"
-SECTION = "console/network"
-LICENSE = "MIT-open-group"
-LIC_FILES_CHKSUM = "file://COPYING;md5=190c514872597083303371684954f238"
-
-SRC_URI = "https://curl.se/download/${BP}.tar.xz"
-SRC_URI[sha256sum] = "2cb9c2356e7263a1272fd1435ef7cdebf2cd21400ec287b068396deb705c22c4"
-
-# Curl has used many names over the years...
-CVE_PRODUCT = "haxx:curl haxx:libcurl curl:curl curl:libcurl libcurl:libcurl daniel_stenberg:curl"
-
-inherit autotools pkgconfig binconfig multilib_header
-
-# Entropy source for random PACKAGECONFIG option
-RANDOM ?= "/dev/urandom"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} libidn openssl proxy random threaded-resolver verbose zlib"
-PACKAGECONFIG:class-native = "ipv6 openssl proxy random threaded-resolver verbose zlib"
-PACKAGECONFIG:class-nativesdk = "ipv6 openssl proxy random threaded-resolver verbose zlib"
-
-# 'ares' and 'threaded-resolver' are mutually exclusive
-PACKAGECONFIG[ares] = "--enable-ares,--disable-ares,c-ares,,,threaded-resolver"
-PACKAGECONFIG[brotli] = "--with-brotli,--without-brotli,brotli"
-PACKAGECONFIG[builtinmanual] = "--enable-manual,--disable-manual"
-PACKAGECONFIG[dict] = "--enable-dict,--disable-dict,"
-PACKAGECONFIG[gnutls] = "--with-gnutls,--without-gnutls,gnutls"
-PACKAGECONFIG[gopher] = "--enable-gopher,--disable-gopher,"
-PACKAGECONFIG[imap] = "--enable-imap,--disable-imap,"
-PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
-PACKAGECONFIG[krb5] = "--with-gssapi,--without-gssapi,krb5"
-PACKAGECONFIG[ldap] = "--enable-ldap,--disable-ldap,"
-PACKAGECONFIG[ldaps] = "--enable-ldaps,--disable-ldaps,"
-PACKAGECONFIG[libgsasl] = "--with-libgsasl,--without-libgsasl,libgsasl"
-PACKAGECONFIG[libidn] = "--with-libidn2,--without-libidn2,libidn2"
-PACKAGECONFIG[libssh2] = "--with-libssh2,--without-libssh2,libssh2"
-PACKAGECONFIG[mbedtls] = "--with-mbedtls=${STAGING_DIR_TARGET},--without-mbedtls,mbedtls"
-PACKAGECONFIG[mqtt] = "--enable-mqtt,--disable-mqtt,"
-PACKAGECONFIG[nghttp2] = "--with-nghttp2,--without-nghttp2,nghttp2"
-PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl"
-PACKAGECONFIG[pop3] = "--enable-pop3,--disable-pop3,"
-PACKAGECONFIG[proxy] = "--enable-proxy,--disable-proxy,"
-PACKAGECONFIG[random] = "--with-random=${RANDOM},--without-random"
-PACKAGECONFIG[rtmpdump] = "--with-librtmp,--without-librtmp,rtmpdump"
-PACKAGECONFIG[rtsp] = "--enable-rtsp,--disable-rtsp,"
-PACKAGECONFIG[smb] = "--enable-smb,--disable-smb,"
-PACKAGECONFIG[smtp] = "--enable-smtp,--disable-smtp,"
-PACKAGECONFIG[nss] = "--with-nss,--without-nss,nss"
-PACKAGECONFIG[telnet] = "--enable-telnet,--disable-telnet,"
-PACKAGECONFIG[tftp] = "--enable-tftp,--disable-tftp,"
-PACKAGECONFIG[threaded-resolver] = "--enable-threaded-resolver,--disable-threaded-resolver,,,,ares"
-PACKAGECONFIG[verbose] = "--enable-verbose,--disable-verbose"
-PACKAGECONFIG[zlib] = "--with-zlib=${STAGING_LIBDIR}/../,--without-zlib,zlib"
-PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
-
-EXTRA_OECONF = " \
- --disable-libcurl-option \
- --disable-ntlm-wb \
- --enable-crypto-auth \
- --with-ca-bundle=${sysconfdir}/ssl/certs/ca-certificates.crt \
- --without-libpsl \
- --enable-debug \
- --enable-optimize \
- --disable-curldebug \
- ${@'--without-ssl' if (bb.utils.filter('PACKAGECONFIG', 'gnutls mbedtls nss openssl', d) == '') else ''} \
-"
-
-do_install:append:class-target() {
- # cleanup buildpaths from curl-config
- sed -i \
- -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's,--with-libtool-sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's|${@" ".join(d.getVar("DEBUG_PREFIX_MAP").split())}||g' \
- ${D}${bindir}/curl-config
-}
-
-PACKAGES =+ "lib${BPN}"
-
-FILES:lib${BPN} = "${libdir}/lib*.so.*"
-RRECOMMENDS:lib${BPN} += "ca-certificates"
-
-FILES:${PN} += "${datadir}/zsh"
-
-inherit multilib_script
-MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/curl-config"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/curl/curl_8.7.1.bb b/meta/recipes-support/curl/curl_8.7.1.bb
new file mode 100644
index 0000000000..c6654bbad6
--- /dev/null
+++ b/meta/recipes-support/curl/curl_8.7.1.bb
@@ -0,0 +1,149 @@
+SUMMARY = "Command line tool and library for client-side URL transfers"
+DESCRIPTION = "It uses URL syntax to transfer data to and from servers. \
+curl is a widely used because of its ability to be flexible and complete \
+complex tasks. For example, you can use curl for things like user authentication, \
+HTTP post, SSL connections, proxy support, FTP uploads, and more!"
+HOMEPAGE = "https://curl.se/"
+BUGTRACKER = "https://github.com/curl/curl/issues"
+SECTION = "console/network"
+LICENSE = "curl"
+LIC_FILES_CHKSUM = "file://COPYING;md5=eed2e5088e1ac619c9a1c747da291d75"
+
+SRC_URI = " \
+ https://curl.se/download/${BP}.tar.xz \
+ file://run-ptest \
+ file://disable-tests \
+ file://no-test-timeout.patch \
+"
+SRC_URI[sha256sum] = "6fea2aac6a4610fbd0400afb0bcddbe7258a64c63f1f68e5855ebc0c659710cd"
+
+# Curl has used many names over the years...
+CVE_PRODUCT = "haxx:curl haxx:libcurl curl:curl curl:libcurl libcurl:libcurl daniel_stenberg:curl"
+
+inherit autotools pkgconfig binconfig multilib_header ptest
+
+# Entropy source for random PACKAGECONFIG option
+RANDOM ?= "/dev/urandom"
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} aws basic-auth bearer-auth digest-auth negotiate-auth libidn openssl proxy random threaded-resolver verbose zlib"
+PACKAGECONFIG:class-native = "ipv6 openssl proxy random threaded-resolver verbose zlib"
+PACKAGECONFIG:class-nativesdk = "ipv6 openssl proxy random threaded-resolver verbose zlib"
+
+# 'ares' and 'threaded-resolver' are mutually exclusive
+PACKAGECONFIG[ares] = "--enable-ares,--disable-ares,c-ares,,,threaded-resolver"
+PACKAGECONFIG[aws] = "--enable-aws,--disable-aws"
+PACKAGECONFIG[basic-auth] = "--enable-basic-auth,--disable-basic-auth"
+PACKAGECONFIG[bearer-auth] = "--enable-bearer-auth,--disable-bearer-auth"
+PACKAGECONFIG[brotli] = "--with-brotli,--without-brotli,brotli"
+PACKAGECONFIG[builtinmanual] = "--enable-manual,--disable-manual"
+# Don't use this in production
+PACKAGECONFIG[debug] = "--enable-debug,--disable-debug"
+PACKAGECONFIG[dict] = "--enable-dict,--disable-dict,"
+PACKAGECONFIG[digest-auth] = "--enable-digest-auth,--disable-digest-auth"
+PACKAGECONFIG[gnutls] = "--with-gnutls,--without-gnutls,gnutls"
+PACKAGECONFIG[gopher] = "--enable-gopher,--disable-gopher,"
+PACKAGECONFIG[imap] = "--enable-imap,--disable-imap,"
+PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
+PACKAGECONFIG[kerberos-auth] = "--enable-kerberos-auth,--disable-kerberos-auth"
+PACKAGECONFIG[krb5] = "--with-gssapi,--without-gssapi,krb5"
+PACKAGECONFIG[ldap] = "--enable-ldap,--disable-ldap,openldap"
+PACKAGECONFIG[ldaps] = "--enable-ldaps,--disable-ldaps,openldap"
+PACKAGECONFIG[libgsasl] = "--with-libgsasl,--without-libgsasl,libgsasl"
+PACKAGECONFIG[libidn] = "--with-libidn2,--without-libidn2,libidn2"
+PACKAGECONFIG[libssh2] = "--with-libssh2,--without-libssh2,libssh2"
+PACKAGECONFIG[mbedtls] = "--with-mbedtls=${STAGING_DIR_TARGET},--without-mbedtls,mbedtls"
+PACKAGECONFIG[mqtt] = "--enable-mqtt,--disable-mqtt,"
+PACKAGECONFIG[negotiate-auth] = "--enable-negotiate-auth,--disable-negotiate-auth"
+PACKAGECONFIG[nghttp2] = "--with-nghttp2,--without-nghttp2,nghttp2"
+PACKAGECONFIG[openssl] = "--with-openssl,--without-openssl,openssl"
+PACKAGECONFIG[pop3] = "--enable-pop3,--disable-pop3,"
+PACKAGECONFIG[proxy] = "--enable-proxy,--disable-proxy,"
+PACKAGECONFIG[random] = "--with-random=${RANDOM},--without-random"
+PACKAGECONFIG[rtmpdump] = "--with-librtmp,--without-librtmp,rtmpdump"
+PACKAGECONFIG[rtsp] = "--enable-rtsp,--disable-rtsp,"
+PACKAGECONFIG[smb] = "--enable-smb,--disable-smb,"
+PACKAGECONFIG[smtp] = "--enable-smtp,--disable-smtp,"
+PACKAGECONFIG[telnet] = "--enable-telnet,--disable-telnet,"
+PACKAGECONFIG[tftp] = "--enable-tftp,--disable-tftp,"
+PACKAGECONFIG[threaded-resolver] = "--enable-threaded-resolver,--disable-threaded-resolver,,,,ares"
+PACKAGECONFIG[verbose] = "--enable-verbose,--disable-verbose"
+PACKAGECONFIG[zlib] = "--with-zlib=${STAGING_LIBDIR}/../,--without-zlib,zlib"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
+
+EXTRA_OECONF = " \
+ --disable-libcurl-option \
+ --disable-ntlm-wb \
+ --with-ca-bundle=${sysconfdir}/ssl/certs/ca-certificates.crt \
+ --without-libpsl \
+ --enable-optimize \
+ ${@'--without-ssl' if (bb.utils.filter('PACKAGECONFIG', 'gnutls mbedtls openssl', d) == '') else ''} \
+"
+
+fix_absolute_paths () {
+ # cleanup buildpaths from curl-config
+ sed -i \
+ -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's,--with-libtool-sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's|${@" ".join(d.getVar("DEBUG_PREFIX_MAP").split())}||g' \
+ ${D}${bindir}/curl-config
+}
+
+do_install:append:class-target() {
+ fix_absolute_paths
+}
+
+do_install:append:class-nativesdk() {
+ fix_absolute_paths
+}
+
+do_compile_ptest() {
+ oe_runmake -C ${B}/tests
+}
+
+do_install_ptest() {
+ cat ${WORKDIR}/disable-tests >> ${S}/tests/data/DISABLED
+ rm -f ${B}/tests/configurehelp.pm
+ cp -rf ${B}/tests ${D}${PTEST_PATH}
+ rm -f ${D}${PTEST_PATH}/tests/libtest/.libs/libhostname.la
+ rm -f ${D}${PTEST_PATH}/tests/libtest/libhostname.la
+ mv ${D}${PTEST_PATH}/tests/libtest/.libs/* ${D}${PTEST_PATH}/tests/libtest/
+ mv ${D}${PTEST_PATH}/tests/libtest/libhostname.so ${D}${PTEST_PATH}/tests/libtest/.libs/
+ mv ${D}${PTEST_PATH}/tests/http/clients/.libs/* ${D}${PTEST_PATH}/tests/http/clients/
+ cp -rf ${S}/tests ${D}${PTEST_PATH}
+ find ${D}${PTEST_PATH}/ -type f -name Makefile.am -o -name Makefile.in -o -name Makefile -delete
+ install -d ${D}${PTEST_PATH}/src
+ ln -sf ${bindir}/curl ${D}${PTEST_PATH}/src/curl
+ cp -rf ${D}${bindir}/curl-config ${D}${PTEST_PATH}
+}
+
+RDEPENDS:${PN}-ptest += " \
+ bash \
+ perl-module-b \
+ perl-module-base \
+ perl-module-cwd \
+ perl-module-digest \
+ perl-module-digest-md5 \
+ perl-module-file-basename \
+ perl-module-file-spec \
+ perl-module-file-temp \
+ perl-module-io-socket \
+ perl-module-ipc-open2 \
+ perl-module-list-util \
+ perl-module-memoize \
+ perl-module-storable \
+ perl-module-time-hires \
+"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us"
+
+PACKAGES =+ "lib${BPN}"
+
+FILES:lib${BPN} = "${libdir}/lib*.so.*"
+RRECOMMENDS:lib${BPN} += "ca-certificates"
+
+FILES:${PN} += "${datadir}/zsh"
+
+inherit multilib_script
+MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/curl-config"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/db/db_5.3.28.bb b/meta/recipes-support/db/db_5.3.28.bb
index c5427f54eb..a99d5cea62 100644
--- a/meta/recipes-support/db/db_5.3.28.bb
+++ b/meta/recipes-support/db/db_5.3.28.bb
@@ -18,7 +18,6 @@ RCONFLICTS:${PN} = "db3"
CVE_PRODUCT = "oracle_berkeley_db berkeley_db"
CVE_VERSION = "11.2.${PV}"
-PR = "r1"
PE = "1"
SRC_URI = "https://download.oracle.com/berkeley-db/db-${PV}.tar.gz"
diff --git a/meta/recipes-support/debianutils/debianutils_5.17.bb b/meta/recipes-support/debianutils/debianutils_5.17.bb
new file mode 100644
index 0000000000..c5f25bae40
--- /dev/null
+++ b/meta/recipes-support/debianutils/debianutils_5.17.bb
@@ -0,0 +1,58 @@
+SUMMARY = "Miscellaneous utilities specific to Debian"
+DESCRIPTION = "Provides a number of small utilities which are used \
+primarily by the installation scripts of Debian packages, although \
+you may use them directly. "
+HOMEPAGE = "https://packages.debian.org/sid/debianutils"
+BUGTRACKER = "https://bugs.debian.org/cgi-bin/pkgreport.cgi?pkg=debianutils;dist=unstable"
+SECTION = "base"
+LICENSE = "GPL-2.0-only & SMAIL_GPL"
+LIC_FILES_CHKSUM = "file://debian/copyright;md5=4b667f30411d21bc8fd7db85d502a8e9"
+
+SRC_URI = "git://salsa.debian.org/debian/debianutils.git;protocol=https;branch=master \
+ "
+
+SRCREV = "baf12e98a02883d1b76081e32f2185ee3497570b"
+
+inherit autotools update-alternatives
+
+S = "${WORKDIR}/git"
+
+# Disable po4a (translated manpages) sub-directory, as that requires po4a to build
+do_configure:prepend() {
+ sed -i -e 's:po4a::g' ${S}/Makefile.am
+}
+
+
+do_install:append() {
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ # Debian places some utils into ${base_bindir} as does busybox
+ install -d ${D}${base_bindir}
+ for app in run-parts; do
+ mv ${D}${bindir}/$app ${D}${base_bindir}/$app
+ done
+ fi
+ mv ${D}${bindir}/which.debianutils ${D}${bindir}/which
+}
+
+# Note that we package the update-alternatives name.
+#
+PACKAGES =+ "${PN}-run-parts"
+FILES:${PN}-run-parts = "${base_bindir}/run-parts.debianutils"
+
+RDEPENDS:${PN} += "${PN}-run-parts"
+RDEPENDS:${PN}:class-native = ""
+
+ALTERNATIVE_PRIORITY = "30"
+ALTERNATIVE:${PN} = "add-shell installkernel remove-shell savelog which"
+
+ALTERNATIVE_PRIORITY_${PN}-run-parts = "60"
+ALTERNATIVE:${PN}-run-parts = "run-parts"
+
+ALTERNATIVE_LINK_NAME[add-shell] = "${sbindir}/add-shell"
+ALTERNATIVE_LINK_NAME[installkernel] = "${sbindir}/installkernel"
+ALTERNATIVE_LINK_NAME[remove-shell] = "${sbindir}/remove-shell"
+ALTERNATIVE_LINK_NAME[run-parts] = "${base_bindir}/run-parts"
+ALTERNATIVE_LINK_NAME[savelog] = "${bindir}/savelog"
+ALTERNATIVE_LINK_NAME[which] = "${bindir}/which"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/debianutils/debianutils_5.7.bb b/meta/recipes-support/debianutils/debianutils_5.7.bb
deleted file mode 100644
index 7d705c6ff4..0000000000
--- a/meta/recipes-support/debianutils/debianutils_5.7.bb
+++ /dev/null
@@ -1,60 +0,0 @@
-SUMMARY = "Miscellaneous utilities specific to Debian"
-DESCRIPTION = "Provides a number of small utilities which are used \
-primarily by the installation scripts of Debian packages, although \
-you may use them directly. "
-HOMEPAGE = "https://packages.debian.org/sid/debianutils"
-BUGTRACKER = "https://bugs.debian.org/cgi-bin/pkgreport.cgi?pkg=debianutils;dist=unstable"
-SECTION = "base"
-LICENSE = "GPL-2.0-only & SMAIL_GPL"
-LIC_FILES_CHKSUM = "file://debian/copyright;md5=9b912cd0cc654134c0ef3424a0705b94"
-
-SRC_URI = "git://salsa.debian.org/debian/debianutils.git;protocol=https;branch=master \
- "
-
-SRCREV = "de14223e5bffe15e374a441302c528ffc1cbed57"
-
-inherit autotools update-alternatives
-
-S = "${WORKDIR}/git"
-
-# Disable po4a (translated manpages) sub-directory, as that requires po4a to build
-do_configure:prepend() {
- sed -i -e 's:po4a::g' ${S}/Makefile.am
-}
-
-
-do_install:append() {
- if [ "${base_bindir}" != "${bindir}" ]; then
- # Debian places some utils into ${base_bindir} as does busybox
- install -d ${D}${base_bindir}
- for app in run-parts; do
- mv ${D}${bindir}/$app ${D}${base_bindir}/$app
- done
- fi
-}
-
-# Note that we package the update-alternatives name.
-#
-PACKAGES =+ "${PN}-run-parts"
-FILES:${PN}-run-parts = "${base_bindir}/run-parts.debianutils"
-
-RDEPENDS:${PN} += "${PN}-run-parts"
-RDEPENDS:${PN}:class-native = ""
-
-ALTERNATIVE_PRIORITY = "30"
-ALTERNATIVE:${PN} = "add-shell installkernel remove-shell savelog which"
-
-ALTERNATIVE_PRIORITY_${PN}-run-parts = "60"
-ALTERNATIVE:${PN}-run-parts = "run-parts"
-
-ALTERNATIVE:${PN}-doc = "which.1"
-ALTERNATIVE_LINK_NAME[which.1] = "${mandir}/man1/which.1"
-
-ALTERNATIVE_LINK_NAME[add-shell] = "${sbindir}/add-shell"
-ALTERNATIVE_LINK_NAME[installkernel] = "${sbindir}/installkernel"
-ALTERNATIVE_LINK_NAME[remove-shell] = "${sbindir}/remove-shell"
-ALTERNATIVE_LINK_NAME[run-parts] = "${base_bindir}/run-parts"
-ALTERNATIVE_LINK_NAME[savelog] = "${bindir}/savelog"
-ALTERNATIVE_LINK_NAME[which] = "${bindir}/which"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/diffoscope/diffoscope_212.bb b/meta/recipes-support/diffoscope/diffoscope_212.bb
deleted file mode 100644
index 41f896d74f..0000000000
--- a/meta/recipes-support/diffoscope/diffoscope_212.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "in-depth comparison of files, archives, and directories"
-DESCRIPTION = "Tries to get to the bottom of what makes files or directories \
-different. It will recursively unpack archives of many kinds and transform \
-various binary formats into more human-readable form to compare them. \
-It can compare two tarballs, ISO images, or PDF just as easily."
-HOMEPAGE = "https://diffoscope.org/"
-BUGTRACKER = "https://salsa.debian.org/reproducible-builds/diffoscope/-/issues"
-LICENSE = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
-
-PYPI_PACKAGE = "diffoscope"
-
-inherit pypi setuptools3
-
-SRC_URI[sha256sum] = "744260ccf2bb869c58c50dfab68957dd494b12e446520993d0925b2394ea1db7"
-
-RDEPENDS:${PN} += "binutils vim squashfs-tools python3-libarchive-c python3-magic python3-rpm"
-
-# Dependencies don't build for musl
-COMPATIBLE_HOST:libc-musl = 'null'
-
-do_install:append:class-native() {
- create_wrapper ${D}${bindir}/diffoscope \
- MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc \
- RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
- LD_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \
- RPM_ETCCONFIGDIR=${STAGING_DIR_NATIVE}
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/diffoscope/diffoscope_260.bb b/meta/recipes-support/diffoscope/diffoscope_260.bb
new file mode 100644
index 0000000000..297a53fb53
--- /dev/null
+++ b/meta/recipes-support/diffoscope/diffoscope_260.bb
@@ -0,0 +1,43 @@
+SUMMARY = "in-depth comparison of files, archives, and directories"
+DESCRIPTION = "Tries to get to the bottom of what makes files or directories \
+different. It will recursively unpack archives of many kinds and transform \
+various binary formats into more human-readable form to compare them. \
+It can compare two tarballs, ISO images, or PDF just as easily."
+HOMEPAGE = "https://diffoscope.org/"
+BUGTRACKER = "https://salsa.debian.org/reproducible-builds/diffoscope/-/issues"
+LICENSE = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+PYPI_PACKAGE = "diffoscope"
+
+inherit pypi setuptools3
+
+SRC_URI[sha256sum] = "405a55502c8b2c988e46c0800d6a93e8e4e7632c1542b0a540dda50aeea41dac"
+
+RDEPENDS:${PN} += "\
+ binutils \
+ python3-curses \
+ python3-difflib \
+ python3-fcntl \
+ python3-json \
+ python3-libarchive-c \
+ python3-magic \
+ python3-multiprocessing \
+ python3-pprint \
+ python3-rpm \
+ squashfs-tools \
+ vim \
+ "
+
+# Dependencies don't build for musl
+COMPATIBLE_HOST:libc-musl = 'null'
+
+do_install:append:class-native() {
+ create_wrapper ${D}${bindir}/diffoscope \
+ MAGIC=${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc \
+ RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \
+ LD_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \
+ RPM_ETCCONFIGDIR=${STAGING_DIR_NATIVE}
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/dos2unix/dos2unix_7.4.2.bb b/meta/recipes-support/dos2unix/dos2unix_7.4.2.bb
deleted file mode 100644
index 509a0a0ddc..0000000000
--- a/meta/recipes-support/dos2unix/dos2unix_7.4.2.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "Convert text file line endings between CRLF and LF"
-DESCRIPTION = "The Dos2unix package includes utilities dos2unix and \
-unix2dos to convert plain text files in DOS or Mac format to Unix \
-format and vice versa."
-HOMEPAGE = "http://waterlan.home.xs4all.nl/dos2unix.html"
-SECTION = "support"
-
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING.txt;md5=8a7c3499a1142df819e727253cd53a12"
-
-SRC_URI = "git://git.code.sf.net/p/dos2unix/dos2unix;branch=master"
-UPSTREAM_CHECK_GITTAGREGEX = "dos2unix-(?P<pver>(\d+(\.\d+)+))"
-
-SRCREV = "72596f0ae21faa25a07a872d4843bc885475115d"
-
-S = "${WORKDIR}/git/dos2unix"
-
-inherit gettext perlnative
-
-# The dos2unix NLS relies on po4a-native, while po4a recipe is
-# provided by meta-perl layer, so make it optional here, you
-# need have meta-perl in bblayers.conf before enabling nls in
-# PACKAGECONFIG.
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[nls] = "ENABLE_NLS=1,ENABLE_NLS=,po4a-native"
-
-EXTRA_OEMAKE = "${PACKAGECONFIG_CONFARGS} LDFLAGS_USER='${LDFLAGS}'"
-EXTRA_OEMAKE:class-native = "ENABLE_NLS="
-
-do_install () {
- oe_runmake DESTDIR="${D}${base_prefix}" install
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/dos2unix/dos2unix_7.5.2.bb b/meta/recipes-support/dos2unix/dos2unix_7.5.2.bb
new file mode 100644
index 0000000000..06338dd62f
--- /dev/null
+++ b/meta/recipes-support/dos2unix/dos2unix_7.5.2.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Convert text file line endings between CRLF and LF"
+DESCRIPTION = "The Dos2unix package includes utilities dos2unix and \
+unix2dos to convert plain text files in DOS or Mac format to Unix \
+format and vice versa."
+HOMEPAGE = "http://waterlan.home.xs4all.nl/dos2unix.html"
+SECTION = "support"
+
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING.txt;md5=49ec74e0139712398b9ddb06d824929a"
+
+SRC_URI = "git://git.code.sf.net/p/dos2unix/dos2unix;branch=master;protocol=https"
+UPSTREAM_CHECK_GITTAGREGEX = "dos2unix-(?P<pver>(\d+(\.\d+)+))"
+
+SRCREV = "c16a0b31a64c7eb6b2d76960e5144732b57f2993"
+
+S = "${WORKDIR}/git/dos2unix"
+
+inherit gettext perlnative
+
+# The dos2unix NLS relies on po4a-native, while po4a recipe is
+# provided by meta-perl layer, so make it optional here, you
+# need have meta-perl in bblayers.conf before enabling nls in
+# PACKAGECONFIG.
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[nls] = "ENABLE_NLS=1,ENABLE_NLS=,po4a-native"
+
+EXTRA_OEMAKE = "${PACKAGECONFIG_CONFARGS} LDFLAGS_USER='${LDFLAGS}'"
+EXTRA_OEMAKE:class-native = "ENABLE_NLS="
+
+do_install () {
+ oe_runmake DESTDIR="${D}${base_prefix}" install
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/enchant/enchant2_2.3.3.bb b/meta/recipes-support/enchant/enchant2_2.3.3.bb
deleted file mode 100644
index 96d56c2ea4..0000000000
--- a/meta/recipes-support/enchant/enchant2_2.3.3.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "Enchant Spell checker API Library"
-DESCRIPTION = "A library (and command-line program) that wraps a number of \
-different spelling libraries and programs with a consistent interface."
-SECTION = "libs"
-HOMEPAGE = "https://abiword.github.io/enchant/"
-BUGTRACKER = "https://github.com/AbiWord/enchant/issues/"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
-
-DEPENDS = "glib-2.0"
-
-inherit autotools pkgconfig
-
-SRC_URI = "https://github.com/AbiWord/enchant/releases/download/v${PV}/enchant-${PV}.tar.gz"
-SRC_URI[sha256sum] = "3da12103f11cf49c3cf2fd2ce3017575c5321a489e5b9bfa81dd91ec413f3891"
-
-UPSTREAM_CHECK_URI = "https://github.com/AbiWord/enchant/releases"
-
-S = "${WORKDIR}/enchant-${PV}"
-
-EXTRA_OEMAKE = "pkgdatadir=${datadir}/enchant-2"
-
-PACKAGECONFIG ??= "aspell"
-PACKAGECONFIG[aspell] = "--with-aspell,--without-aspell,aspell,aspell"
-PACKAGECONFIG[hunspell] = "--with-hunspell,--without-hunspell,hunspell,hunspell"
-
-FILES:${PN} += " \
- ${datadir}/enchant-2 \
- ${libdir}/enchant-2 \
-"
-FILES:${PN}-staticdev += "${libdir}/enchant-2/*.a"
diff --git a/meta/recipes-support/enchant/enchant2_2.6.9.bb b/meta/recipes-support/enchant/enchant2_2.6.9.bb
new file mode 100644
index 0000000000..aaf3525ad8
--- /dev/null
+++ b/meta/recipes-support/enchant/enchant2_2.6.9.bb
@@ -0,0 +1,29 @@
+SUMMARY = "Enchant Spell checker API Library"
+DESCRIPTION = "A library (and command-line program) that wraps a number of \
+different spelling libraries and programs with a consistent interface."
+SECTION = "libs"
+HOMEPAGE = "https://abiword.github.io/enchant/"
+BUGTRACKER = "https://github.com/AbiWord/enchant/issues/"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "glib-2.0 groff-native"
+
+inherit autotools pkgconfig github-releases
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/enchant-${PV}.tar.gz"
+SRC_URI[sha256sum] = "d9a5a10dc9b38a43b3a0fa22c76ed6ebb7e09eb535aff62954afcdbd40efff6b"
+
+GITHUB_BASE_URI = "https://github.com/AbiWord/enchant/releases"
+
+S = "${WORKDIR}/enchant-${PV}"
+
+PACKAGECONFIG ??= "aspell"
+PACKAGECONFIG[aspell] = "--with-aspell,--without-aspell,aspell,aspell"
+PACKAGECONFIG[hunspell] = "--with-hunspell,--without-hunspell,hunspell,hunspell"
+
+FILES:${PN} += " \
+ ${datadir}/enchant-2 \
+ ${libdir}/enchant-2 \
+"
+FILES:${PN}-staticdev += "${libdir}/enchant-2/*.a"
diff --git a/meta/recipes-support/fribidi/fribidi_1.0.12.bb b/meta/recipes-support/fribidi/fribidi_1.0.12.bb
deleted file mode 100644
index b29c47822f..0000000000
--- a/meta/recipes-support/fribidi/fribidi_1.0.12.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "Free Implementation of the Unicode Bidirectional Algorithm"
-DESCRIPTION = "It provides utility functions to aid in the development \
-of interactive editors and widgets that implement BiDi functionality. \
-The BiDi algorithm is a prerequisite for supporting right-to-left scripts such \
-as Hebrew, Arabic, Syriac, and Thaana. "
-SECTION = "libs"
-HOMEPAGE = "http://fribidi.org/"
-BUGTRACKER = "https://github.com/fribidi/fribidi/issues"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/v${PV}/${BP}.tar.xz \
- "
-SRC_URI[sha256sum] = "0cd233f97fc8c67bb3ac27ce8440def5d3ffacf516765b91c2cc654498293495"
-
-UPSTREAM_CHECK_URI = "https://github.com/${BPN}/${BPN}/releases"
-
-inherit meson lib_package pkgconfig
-
-CVE_PRODUCT = "gnu_fribidi fribidi"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/fribidi/fribidi_1.0.13.bb b/meta/recipes-support/fribidi/fribidi_1.0.13.bb
new file mode 100644
index 0000000000..5d0476a375
--- /dev/null
+++ b/meta/recipes-support/fribidi/fribidi_1.0.13.bb
@@ -0,0 +1,20 @@
+SUMMARY = "Free Implementation of the Unicode Bidirectional Algorithm"
+DESCRIPTION = "It provides utility functions to aid in the development \
+of interactive editors and widgets that implement BiDi functionality. \
+The BiDi algorithm is a prerequisite for supporting right-to-left scripts such \
+as Hebrew, Arabic, Syriac, and Thaana. "
+SECTION = "libs"
+HOMEPAGE = "http://fribidi.org/"
+BUGTRACKER = "https://github.com/fribidi/fribidi/issues"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.xz \
+ "
+SRC_URI[sha256sum] = "7fa16c80c81bd622f7b198d31356da139cc318a63fc7761217af4130903f54a2"
+
+inherit meson lib_package pkgconfig github-releases
+
+CVE_PRODUCT = "gnu_fribidi fribidi"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gdbm/files/run-ptest b/meta/recipes-support/gdbm/files/run-ptest
index 615da8444b..8d6481185d 100755
--- a/meta/recipes-support/gdbm/files/run-ptest
+++ b/meta/recipes-support/gdbm/files/run-ptest
@@ -4,4 +4,4 @@
cd tests
-./testsuite AUTOTEST_PATH=$PWD abs_builddir=$PWD COMPAT=1 | sed 's/^[^0-9]*\([0-9]\+\): \(.*\)\(ok\|pass\|skipped\|fail\|FAILED\)\(.*\)$/\3: \2 \4/;s/ \+/ /g;s/^ok/PASS/;s/^skipped/SKIP/;s/^fail/FAIL/;s/^FAILED/FAIL/'
+./testsuite --am-fmt AUTOTEST_PATH=$PWD abs_builddir=$PWD COMPAT=1
diff --git a/meta/recipes-support/gmp/gmp/cve-2021-43618.patch b/meta/recipes-support/gmp/gmp/cve-2021-43618.patch
deleted file mode 100644
index 095fb21eaa..0000000000
--- a/meta/recipes-support/gmp/gmp/cve-2021-43618.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-CVE: CVE-2021-43618
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@arm.com>
-
-# HG changeset patch
-# User Marco Bodrato <bodrato@mail.dm.unipi.it>
-# Date 1634836009 -7200
-# Node ID 561a9c25298e17bb01896801ff353546c6923dbd
-# Parent e1fd9db13b475209a864577237ea4b9105b3e96e
-mpz/inp_raw.c: Avoid bit size overflows
-
-diff -r e1fd9db13b47 -r 561a9c25298e mpz/inp_raw.c
---- a/mpz/inp_raw.c Tue Dec 22 23:49:51 2020 +0100
-+++ b/mpz/inp_raw.c Thu Oct 21 19:06:49 2021 +0200
-@@ -88,8 +88,11 @@
-
- abs_csize = ABS (csize);
-
-+ if (UNLIKELY (abs_csize > ~(mp_bitcnt_t) 0 / 8))
-+ return 0; /* Bit size overflows */
-+
- /* round up to a multiple of limbs */
-- abs_xsize = BITS_TO_LIMBS (abs_csize*8);
-+ abs_xsize = BITS_TO_LIMBS ((mp_bitcnt_t) abs_csize * 8);
-
- if (abs_xsize != 0)
- {
diff --git a/meta/recipes-support/gmp/gmp_6.2.1.bb b/meta/recipes-support/gmp/gmp_6.2.1.bb
deleted file mode 100644
index 29055492c2..0000000000
--- a/meta/recipes-support/gmp/gmp_6.2.1.bb
+++ /dev/null
@@ -1,47 +0,0 @@
-require gmp.inc
-
-LICENSE = "GPL-2.0-or-later | LGPL-3.0-or-later"
-
-LIC_FILES_CHKSUM = "\
- file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://COPYING.LESSERv3;md5=6a6a8e020838b23406c81b19c1d46df6 \
- file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYINGv3;md5=11cc2d3ee574f9d6b7ee797bdce4d423 \
-"
-
-REVISION = ""
-SRC_URI = "https://gmplib.org/download/${BPN}/${BP}${REVISION}.tar.bz2 \
- file://use-includedir.patch \
- file://0001-Append-the-user-provided-flags-to-the-auto-detected-.patch \
- file://0001-confiure.ac-Believe-the-cflags-from-environment.patch \
- file://cve-2021-43618.patch \
- "
-SRC_URI[md5sum] = "28971fc21cf028042d4897f02fd355ea"
-SRC_URI[sha256sum] = "eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c"
-
-acpaths = ""
-
-EXTRA_OECONF += " --enable-cxx=detect"
-EXTRA_OECONF:append:mipsarchr6 = " --disable-assembly"
-
-PACKAGES =+ "libgmpxx"
-FILES:libgmpxx = "${libdir}/libgmpxx${SOLIBS}"
-
-do_install:append() {
- oe_multilib_header gmp.h
-}
-
-do_install:prepend:class-target() {
- sed -i \
- -e "s|--sysroot=${STAGING_DIR_HOST}||g" \
- -e "s|${DEBUG_PREFIX_MAP}||g" \
- ${B}/gmp.h
-}
-
-SSTATE_SCAN_FILES += "gmp.h"
-
-# Doesn't compile in MIPS16e mode due to use of hand-written
-# assembly
-MIPS_INSTRUCTION_SET = "mips"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gmp/gmp_6.3.0.bb b/meta/recipes-support/gmp/gmp_6.3.0.bb
new file mode 100644
index 0000000000..fd4aec92fd
--- /dev/null
+++ b/meta/recipes-support/gmp/gmp_6.3.0.bb
@@ -0,0 +1,53 @@
+require gmp.inc
+
+LICENSE = "GPL-2.0-or-later | LGPL-3.0-or-later"
+
+LIC_FILES_CHKSUM = "\
+ file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
+ file://COPYING.LESSERv3;md5=6a6a8e020838b23406c81b19c1d46df6 \
+ file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYINGv3;md5=11cc2d3ee574f9d6b7ee797bdce4d423 \
+"
+
+REVISION = ""
+SRC_URI = "https://gmplib.org/download/${BPN}/${BP}${REVISION}.tar.bz2 \
+ file://use-includedir.patch \
+ file://0001-Append-the-user-provided-flags-to-the-auto-detected-.patch \
+ file://0001-confiure.ac-Believe-the-cflags-from-environment.patch \
+ "
+SRC_URI[sha256sum] = "ac28211a7cfb609bae2e2c8d6058d66c8fe96434f740cf6fe2e47b000d1c20cb"
+
+acpaths = ""
+
+EXTRA_OECONF += " --enable-cxx=detect"
+EXTRA_OECONF:append:mipsarchr6 = " --disable-assembly"
+
+PACKAGES =+ "libgmpxx"
+FILES:libgmpxx = "${libdir}/libgmpxx${SOLIBS}"
+
+do_install:append() {
+ oe_multilib_header gmp.h
+}
+
+fix_absolute_paths () {
+ sed -i \
+ -e "s|--sysroot=${STAGING_DIR_HOST}||g" \
+ -e "s|${DEBUG_PREFIX_MAP}||g" \
+ ${B}/gmp.h
+}
+
+do_install:prepend:class-target() {
+ fix_absolute_paths
+}
+
+do_install:prepend:class-nativesdk() {
+ fix_absolute_paths
+}
+
+SSTATE_SCAN_FILES += "gmp.h"
+
+# Doesn't compile in MIPS16e mode due to use of hand-written
+# assembly
+MIPS_INSTRUCTION_SET = "mips"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2021.1.bb b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2021.1.bb
index 0cc7cd7b67..1173dd4af3 100644
--- a/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2021.1.bb
+++ b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2021.1.bb
@@ -9,7 +9,7 @@ LICENSE = "LGPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=3bf50002aefd002f49e7bb854063f7e7 \
file://src/gnome-desktop-testing-runner.c;beginline=1;endline=20;md5=7ef3ad9da2ffcf7707dc11151fe007f4"
-SRC_URI = "git://gitlab.gnome.org/GNOME/gnome-desktop-testing.git;protocol=http;branch=master \
+SRC_URI = "git://gitlab.gnome.org/GNOME/gnome-desktop-testing.git;protocol=https;branch=master \
file://0001-fix-non-literal-format-string-issue-with-clang.patch \
"
SRCREV = "e346cd4ed2e2102c9b195b614f3c642d23f5f6e7"
diff --git a/meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch b/meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch
index c4ede9ea5e..f957f6b55e 100644
--- a/meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch
+++ b/meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch
@@ -1,4 +1,4 @@
-From 89b98553084fbefe1ef2c7cbff9e72cf43144c49 Mon Sep 17 00:00:00 2001
+From 6b581c43bd01f815db78a410fd3814fc5994171e Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Mon, 22 Jan 2018 18:00:21 +0200
Subject: [PATCH] configure.ac: use a custom value for the location of
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index d86c60e..65c22b2 100644
+index 26d7f7b..e953c2e 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -1955,7 +1955,7 @@ AC_DEFINE_UNQUOTED(GPGCONF_DISP_NAME, "GPGConf",
+@@ -1921,7 +1921,7 @@ AC_DEFINE_UNQUOTED(GPGCONF_DISP_NAME, "GPGConf",
AC_DEFINE_UNQUOTED(GPGTAR_NAME, "gpgtar", [The name of the gpgtar tool])
diff --git a/meta/recipes-support/gnupg/gnupg/0002-use-pkgconfig-instead-of-npth-config.patch b/meta/recipes-support/gnupg/gnupg/0002-use-pkgconfig-instead-of-npth-config.patch
index d3790ac782..0e58fd4c4d 100644
--- a/meta/recipes-support/gnupg/gnupg/0002-use-pkgconfig-instead-of-npth-config.patch
+++ b/meta/recipes-support/gnupg/gnupg/0002-use-pkgconfig-instead-of-npth-config.patch
@@ -1,4 +1,4 @@
-From 15668161c351aee5f29152db2972b1648da11210 Mon Sep 17 00:00:00 2001
+From d9048788d906774b1475c3bb1b17e22455c2add4 Mon Sep 17 00:00:00 2001
From: Saul Wold <sgw@linux.intel.com>
Date: Wed, 16 Aug 2017 11:16:30 +0800
Subject: [PATCH] use pkgconfig instead of npth config
@@ -15,7 +15,7 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
1 file changed, 8 insertions(+), 45 deletions(-)
diff --git a/m4/npth.m4 b/m4/npth.m4
-index 06cdaee..4adda01 100644
+index 619ed89..0cb354d 100644
--- a/m4/npth.m4
+++ b/m4/npth.m4
@@ -19,25 +19,10 @@ AC_DEFUN([_AM_PATH_NPTH_CONFIG],
@@ -25,7 +25,7 @@ index 06cdaee..4adda01 100644
+ AC_PATH_PROG(PKGCONFIG, pkg-config, no)
- use_gpgrt_config=""
-- if test x"$NPTH_CONFIG" = x -a x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
+- if test x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
- if $GPGRT_CONFIG npth --exists; then
- NPTH_CONFIG="$GPGRT_CONFIG npth"
- AC_MSG_NOTICE([Use gpgrt-config as npth-config])
diff --git a/meta/recipes-support/gnupg/gnupg/0004-autogen.sh-fix-find-version-for-beta-checking.patch b/meta/recipes-support/gnupg/gnupg/0004-autogen.sh-fix-find-version-for-beta-checking.patch
index dcd8582ca6..d664c36a1b 100644
--- a/meta/recipes-support/gnupg/gnupg/0004-autogen.sh-fix-find-version-for-beta-checking.patch
+++ b/meta/recipes-support/gnupg/gnupg/0004-autogen.sh-fix-find-version-for-beta-checking.patch
@@ -1,7 +1,7 @@
-From 914ae4a3f7529fb069467bf0ded57dd24ee2e763 Mon Sep 17 00:00:00 2001
+From 6a7f9b71d936847dcaeeac7d1b69d8299be4dd85 Mon Sep 17 00:00:00 2001
From: Wenzong Fan <wenzong.fan@windriver.com>
Date: Wed, 16 Aug 2017 11:23:22 +0800
-Subject: [PATCH 4/4] autogen.sh: fix find-version for beta checking
+Subject: [PATCH] autogen.sh: fix find-version for beta checking
find-version always assumes that gnupg is beta if autogen.sh is run
out of git-repo. This doesn't work for users whom just take release
@@ -13,15 +13,16 @@ Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
Rebase to 2.1.23
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
autogen.sh | 1 -
1 file changed, 1 deletion(-)
diff --git a/autogen.sh b/autogen.sh
-index e5ba5bf..05e0e11 100755
+index 0abf103..78753b5 100755
--- a/autogen.sh
+++ b/autogen.sh
-@@ -245,7 +245,6 @@ if [ "$myhost" = "find-version" ]; then
+@@ -237,7 +237,6 @@ if [ "$myhost" = "find-version" ]; then
rvd=$((0x$(echo ${rev} | dd bs=1 count=4 2>/dev/null)))
else
ingit=no
@@ -29,6 +30,3 @@ index e5ba5bf..05e0e11 100755
tmp="-unknown"
rev="0000000"
rvd="0"
---
-1.8.3.1
-
diff --git a/meta/recipes-support/gnupg/gnupg/relocate.patch b/meta/recipes-support/gnupg/gnupg/relocate.patch
index 43999b8a6d..ea0252026a 100644
--- a/meta/recipes-support/gnupg/gnupg/relocate.patch
+++ b/meta/recipes-support/gnupg/gnupg/relocate.patch
@@ -1,4 +1,4 @@
-From 89ae4f03307104689e1857d9857d452af6b35ac4 Mon Sep 17 00:00:00 2001
+From c50d0a95fcf8f96c272fadd4ba85f3eeac39fcaf Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Wed, 19 Sep 2018 14:44:40 +0100
Subject: [PATCH] Allow the environment to override where gnupg looks for its
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de>
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/common/homedir.c b/common/homedir.c
-index 260aeb2..1aeb08d 100644
+index 6f99f3e..f22aa9e 100644
--- a/common/homedir.c
+++ b/common/homedir.c
-@@ -1143,7 +1143,7 @@ gnupg_socketdir (void)
+@@ -1284,7 +1284,7 @@ gnupg_socketdir (void)
if (!name)
{
unsigned int dummy;
@@ -26,7 +26,7 @@ index 260aeb2..1aeb08d 100644
gpgrt_annotate_leaked_object (name);
}
-@@ -1175,7 +1175,7 @@ gnupg_sysconfdir (void)
+@@ -1316,7 +1316,7 @@ gnupg_sysconfdir (void)
if (dir)
return dir;
else
@@ -35,7 +35,7 @@ index 260aeb2..1aeb08d 100644
#endif /*!HAVE_W32_SYSTEM*/
}
-@@ -1211,7 +1211,7 @@ gnupg_bindir (void)
+@@ -1352,7 +1352,7 @@ gnupg_bindir (void)
return name;
}
else
@@ -44,7 +44,7 @@ index 260aeb2..1aeb08d 100644
#endif /*!HAVE_W32_SYSTEM*/
}
-@@ -1238,7 +1238,7 @@ gnupg_libexecdir (void)
+@@ -1379,7 +1379,7 @@ gnupg_libexecdir (void)
return name;
}
else
@@ -53,7 +53,7 @@ index 260aeb2..1aeb08d 100644
#endif /*!HAVE_W32_SYSTEM*/
}
-@@ -1268,7 +1268,7 @@ gnupg_libdir (void)
+@@ -1409,7 +1409,7 @@ gnupg_libdir (void)
return name;
}
else
@@ -62,7 +62,7 @@ index 260aeb2..1aeb08d 100644
#endif /*!HAVE_W32_SYSTEM*/
}
-@@ -1299,7 +1299,7 @@ gnupg_datadir (void)
+@@ -1440,7 +1440,7 @@ gnupg_datadir (void)
return name;
}
else
@@ -71,7 +71,7 @@ index 260aeb2..1aeb08d 100644
#endif /*!HAVE_W32_SYSTEM*/
}
-@@ -1331,7 +1331,7 @@ gnupg_localedir (void)
+@@ -1472,7 +1472,7 @@ gnupg_localedir (void)
return name;
}
else
diff --git a/meta/recipes-support/gnupg/gnupg_2.3.6.bb b/meta/recipes-support/gnupg/gnupg_2.3.6.bb
deleted file mode 100644
index f35eb8c75a..0000000000
--- a/meta/recipes-support/gnupg/gnupg_2.3.6.bb
+++ /dev/null
@@ -1,87 +0,0 @@
-SUMMARY = "GNU Privacy Guard - encryption and signing tools (2.x)"
-DESCRIPTION = "A complete and free implementation of the OpenPGP standard \
-as defined by RFC4880 (also known as PGP). GnuPG allows you to encrypt \
-and sign your data and communications; it features a versatile key \
-management system, along with access modules for all kinds of public \
-key directories."
-HOMEPAGE = "http://www.gnupg.org/"
-LICENSE = "GPL-3.0-only & LGPL-3.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=189af8afca6d6075ba6c9e0aa8077626 \
- file://COPYING.LGPL3;md5=a2b6bf2cb38ee52619e60f30a1fc7257"
-
-DEPENDS = "npth libassuan libksba zlib bzip2 readline libgcrypt"
-
-inherit autotools gettext texinfo pkgconfig
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://0002-use-pkgconfig-instead-of-npth-config.patch \
- file://0004-autogen.sh-fix-find-version-for-beta-checking.patch \
- file://0001-Woverride-init-is-not-needed-with-gcc-9.patch \
- "
-SRC_URI:append:class-native = " file://0001-configure.ac-use-a-custom-value-for-the-location-of-.patch \
- file://relocate.patch"
-SRC_URI:append:class-nativesdk = " file://relocate.patch"
-
-SRC_URI[sha256sum] = "21f7fe2fc5c2f214184ab050977ec7a8e304e58bfae2ab098fec69f8fabda9c1"
-
-EXTRA_OECONF = "--disable-ldap \
- --disable-ccid-driver \
- --with-zlib=${STAGING_LIBDIR}/.. \
- --with-bzip2=${STAGING_LIBDIR}/.. \
- --with-readline=${STAGING_LIBDIR}/.. \
- --with-mailprog=${sbindir}/sendmail \
- --enable-gpg-is-gpg2 \
- "
-
-# A minimal package containing just enough to run gpg+gpgagent (E.g. use gpgme in opkg)
-PACKAGES =+ "${PN}-gpg"
-FILES:${PN}-gpg = " \
- ${bindir}/gpg \
- ${bindir}/gpg2 \
- ${bindir}/gpg-agent \
-"
-
-# Normal package (gnupg) should depend on minimal package (gnupg-gpg)
-# to ensure all tools are included. This is done only in non-native
-# builds. Native builds don't have sub-packages, so appending RDEPENDS
-# in this case breaks recipe parsing.
-RDEPENDS:${PN} += "${@ "" if ("native" in d.getVar("PN")) else (d.getVar("PN") + "-gpg")}"
-
-RRECOMMENDS:${PN} = "pinentry"
-
-do_configure:prepend () {
- # Else these could be used in prefernce to those in aclocal-copy
- rm -f ${S}/m4/gpg-error.m4
- rm -f ${S}/m4/libassuan.m4
- rm -f ${S}/m4/ksba.m4
- rm -f ${S}/m4/libgcrypt.m4
-}
-
-do_install:append() {
- ln -sf gpg2 ${D}${bindir}/gpg
- ln -sf gpgv2 ${D}${bindir}/gpgv
-}
-
-do_install:append:class-native() {
- create_wrappers ${STAGING_BINDIR_NATIVE}
-}
-
-do_install:append:class-nativesdk() {
- create_wrappers ${SDKPATHNATIVE}${bindir_nativesdk}
-}
-
-create_wrappers() {
- for i in gpg2 gpgconf gpg-agent gpg-connect-agent; do
- create_wrapper ${D}${bindir}/$i GNUPG_BINDIR=$1
- done
-}
-
-PACKAGECONFIG ??= "gnutls"
-PACKAGECONFIG[gnutls] = "--enable-gnutls, --disable-gnutls, gnutls"
-PACKAGECONFIG[sqlite3] = "--enable-sqlite, --disable-sqlite, sqlite3"
-
-BBCLASSEXTEND = "native nativesdk"
-
-lcl_maybe_fortify:mipsarch = ""
-
diff --git a/meta/recipes-support/gnupg/gnupg_2.4.5.bb b/meta/recipes-support/gnupg/gnupg_2.4.5.bb
new file mode 100644
index 0000000000..99996968b1
--- /dev/null
+++ b/meta/recipes-support/gnupg/gnupg_2.4.5.bb
@@ -0,0 +1,90 @@
+SUMMARY = "GNU Privacy Guard - encryption and signing tools (2.x)"
+DESCRIPTION = "A complete and free implementation of the OpenPGP standard \
+as defined by RFC4880 (also known as PGP). GnuPG allows you to encrypt \
+and sign your data and communications; it features a versatile key \
+management system, along with access modules for all kinds of public \
+key directories."
+HOMEPAGE = "http://www.gnupg.org/"
+LICENSE = "GPL-3.0-only & LGPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=189af8afca6d6075ba6c9e0aa8077626 \
+ file://COPYING.LGPL3;md5=a2b6bf2cb38ee52619e60f30a1fc7257"
+
+DEPENDS = "npth libassuan libksba zlib bzip2 readline libgcrypt"
+
+inherit autotools gettext texinfo pkgconfig
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://0002-use-pkgconfig-instead-of-npth-config.patch \
+ file://0004-autogen.sh-fix-find-version-for-beta-checking.patch \
+ file://0001-Woverride-init-is-not-needed-with-gcc-9.patch \
+ "
+SRC_URI:append:class-native = " file://0001-configure.ac-use-a-custom-value-for-the-location-of-.patch \
+ file://relocate.patch"
+SRC_URI:append:class-nativesdk = " file://relocate.patch"
+
+SRC_URI[sha256sum] = "f68f7d75d06cb1635c336d34d844af97436c3f64ea14bcb7c869782f96f44277"
+
+EXTRA_OECONF = "--disable-ldap \
+ --disable-ccid-driver \
+ --with-zlib=${STAGING_LIBDIR}/.. \
+ --with-bzip2=${STAGING_LIBDIR}/.. \
+ --with-readline=${STAGING_LIBDIR}/.. \
+ --with-mailprog=${sbindir}/sendmail \
+ --enable-gpg-is-gpg2 \
+ --disable-tests \
+ "
+# yat2m can be found from recipe-sysroot-native non-deterministically with different versioning otherwise
+CACHED_CONFIGUREVARS += "ac_cv_path_YAT2M=./yat2m"
+
+# A minimal package containing just enough to run gpg+gpgagent (E.g. use gpgme in opkg)
+PACKAGES =+ "${PN}-gpg"
+FILES:${PN}-gpg = " \
+ ${bindir}/gpg \
+ ${bindir}/gpg2 \
+ ${bindir}/gpg-agent \
+"
+
+# Normal package (gnupg) should depend on minimal package (gnupg-gpg)
+# to ensure all tools are included. This is done only in non-native
+# builds. Native builds don't have sub-packages, so appending RDEPENDS
+# in this case breaks recipe parsing.
+RDEPENDS:${PN} += "${@ "" if ("native" in d.getVar("PN")) else (d.getVar("PN") + "-gpg")}"
+
+RRECOMMENDS:${PN} = "pinentry"
+
+do_configure:prepend () {
+ # Else these could be used in prefernce to those in aclocal-copy
+ rm -f ${S}/m4/gpg-error.m4
+ rm -f ${S}/m4/libassuan.m4
+ rm -f ${S}/m4/ksba.m4
+ rm -f ${S}/m4/libgcrypt.m4
+}
+
+do_install:append() {
+ ln -sf gpg2 ${D}${bindir}/gpg
+ ln -sf gpgv2 ${D}${bindir}/gpgv
+}
+
+do_install:append:class-native() {
+ create_wrappers ${STAGING_BINDIR_NATIVE}
+}
+
+do_install:append:class-nativesdk() {
+ create_wrappers ${SDKPATHNATIVE}${bindir_nativesdk}
+}
+
+create_wrappers() {
+ for i in gpg2 gpgconf gpg-agent gpg-connect-agent; do
+ create_wrapper ${D}${bindir}/$i GNUPG_BINDIR=$1
+ done
+}
+
+PACKAGECONFIG ??= "gnutls"
+PACKAGECONFIG[gnutls] = "--enable-gnutls, --disable-gnutls, gnutls"
+PACKAGECONFIG[sqlite3] = "--enable-sqlite, --disable-sqlite, sqlite3"
+
+BBCLASSEXTEND = "native nativesdk"
+
+lcl_maybe_fortify:mipsarch = ""
+
diff --git a/meta/recipes-support/gnutls/gnutls/0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch b/meta/recipes-support/gnutls/gnutls/0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch
new file mode 100644
index 0000000000..d13bfee8ef
--- /dev/null
+++ b/meta/recipes-support/gnutls/gnutls/0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch
@@ -0,0 +1,26 @@
+From 7be8ec59a53e93c2bd453b3ba2d63d1b300ef11f Mon Sep 17 00:00:00 2001
+From: Lei Maohui <leimaohui@fujitsu.com>
+Date: Mon, 23 May 2022 10:44:43 +0900
+Subject: [PATCH] Creating .hmac file should be excuted in target environment,
+ so deleted it from build process.
+
+Upstream-Status: Inappropriate [https://gitlab.com/gnutls/gnutls/-/issues/1373]
+Signed-off-by: Lei Maohui <leimaohui@fujitsu.com>
+---
+ lib/Makefile.am | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/lib/Makefile.am b/lib/Makefile.am
+index a50d311..193ea19 100644
+--- a/lib/Makefile.am
++++ b/lib/Makefile.am
+@@ -198,8 +198,7 @@ hmac_file = .libs/.$(gnutls_so).hmac
+
+ all-local: $(hmac_file)
+
+-$(hmac_file): libgnutls.la fipshmac
+- $(AM_V_GEN) $(builddir)/fipshmac > $@-t && mv $@-t $@
++.libs/.$(gnutls_so).hmac:
+
+ CLEANFILES = $(hmac_file)
+ endif
diff --git a/meta/recipes-support/gnutls/gnutls/Add-ptest-support.patch b/meta/recipes-support/gnutls/gnutls/Add-ptest-support.patch
new file mode 100644
index 0000000000..1152d3797f
--- /dev/null
+++ b/meta/recipes-support/gnutls/gnutls/Add-ptest-support.patch
@@ -0,0 +1,57 @@
+From ff6a345235b2585c261752e47a749228672b07dc Mon Sep 17 00:00:00 2001
+From: Ravineet Singh <ravineet.a.singh@est.tech>
+Date: Tue, 10 Jan 2023 16:11:10 +0100
+Subject: [PATCH] gnutls: add ptest support
+
+Upstream-Status: Inappropriate [embedded specific]
+Signed-off-by: Ravineet Singh <ravineet.a.singh@est.tech>
+---
+ Makefile.am | 3 +++
+ configure.ac | 2 ++
+ tests/Makefile.am | 6 ++++++
+ 3 files changed, 11 insertions(+)
+
+diff --git a/Makefile.am b/Makefile.am
+index 843193f..816b09f 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -191,6 +191,9 @@ dist-hook:
+ mv ChangeLog $(distdir)
+ touch -c $(distdir)/doc/*.html $(distdir)/doc/*.pdf $(distdir)/doc/*.info
+
++install-ptest:
++ $(MAKE) -C tests DESTDIR=$(DESTDIR)/tests $@
++
+ .PHONY: abi-check abi-dump-versioned abi-dump-latest pic-check symbol-check local-code-coverage-output files-update AUTHORS
+
+ include $(top_srcdir)/cligen/cligen.mk
+diff --git a/configure.ac b/configure.ac
+index d6e03cf..e3f15fb 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -1213,6 +1213,8 @@ AC_SUBST(LIBGNUTLS_CFLAGS)
+
+ AM_CONDITIONAL(NEEDS_LIBRT, test "$gnutls_needs_librt" = "yes")
+
++AM_EXTRA_RECURSIVE_TARGETS([buildtest-TESTS])
++
+ AC_DEFINE([GNUTLS_INTERNAL_BUILD], 1, [We allow temporarily usage of deprecated functions - until they are removed.])
+
+ hw_features=
+diff --git a/tests/Makefile.am b/tests/Makefile.am
+index fb9e55a..c2d226a 100644
+--- a/tests/Makefile.am
++++ b/tests/Makefile.am
+@@ -658,6 +658,12 @@ SH_LOG_COMPILER = $(SHELL)
+ AM_VALGRINDFLAGS = --suppressions=$(srcdir)/suppressions.valgrind
+ LOG_COMPILER = $(LOG_VALGRIND)
+
++install-ptest: $(check_PROGRAMS)
++ @$(INSTALL) -d $(DESTDIR)
++ @for file in $^; do \
++ $(INSTALL_PROGRAM) $$file $(DESTDIR) ; \
++ done
++
+ distclean-local:
+ rm -rf softhsm-*.db softhsm-*.config *.tmp tmp-* x509-crt-list-import-url.config.db port.lock.d
+
diff --git a/meta/recipes-support/gnutls/gnutls/arm_eabi.patch b/meta/recipes-support/gnutls/gnutls/arm_eabi.patch
index 6eb1edbdb1..883d0123db 100644
--- a/meta/recipes-support/gnutls/gnutls/arm_eabi.patch
+++ b/meta/recipes-support/gnutls/gnutls/arm_eabi.patch
@@ -1,4 +1,4 @@
-From 8a5c96057cf305bbeac0d6e0e59ee24fbb9497fe Mon Sep 17 00:00:00 2001
+From d17ae0ef31c3c186766a338e8c40c87d1b98820e Mon Sep 17 00:00:00 2001
From: Joe Slater <jslater@windriver.com>
Date: Wed, 25 Jan 2017 13:52:59 -0800
Subject: [PATCH] gnutls: account for ARM_EABI
@@ -9,16 +9,15 @@ reference to them.
Upstream-Status: Pending
Signed-off-by: Joe Slater <jslater@windriver.com>
-
---
tests/seccomp.c | 2 ++
1 file changed, 2 insertions(+)
diff --git a/tests/seccomp.c b/tests/seccomp.c
-index ed14d00..3c5b726 100644
+index 881f0bb..5f9204a 100644
--- a/tests/seccomp.c
+++ b/tests/seccomp.c
-@@ -53,7 +53,9 @@ int disable_system_calls(void)
+@@ -55,7 +55,9 @@ int disable_system_calls(void)
ADD_SYSCALL(nanosleep, 0);
ADD_SYSCALL(clock_nanosleep, 0);
diff --git a/meta/recipes-support/gnutls/gnutls/run-ptest b/meta/recipes-support/gnutls/gnutls/run-ptest
new file mode 100644
index 0000000000..17e26eae70
--- /dev/null
+++ b/meta/recipes-support/gnutls/gnutls/run-ptest
@@ -0,0 +1,100 @@
+#!/bin/sh
+
+rjob() {
+ local job=$1
+ local log=$2
+
+ # TODO: Output will be garbled
+ ./${job} >> ${log} 2>&1
+
+ ret=$?
+ case $ret in
+ 0)
+ echo "PASS: $t" >> ${log}
+ echo "PASS: $t"
+ ;;
+ 77)
+ echo "SKIP: $t" >> ${log}
+ echo "SKIP: $t"
+ ;;
+ *)
+ echo "FAIL: $t" >> ${log}
+ echo "FAIL: $t"
+ ;;
+ esac
+}
+
+is_disallowed() {
+ local key=$1
+ $(echo ${test_disallowlist} | grep -w -q ${key})
+ return $?
+}
+
+# TODO
+# This list should probably be in a external file
+# Testcases defined here either take very long time (dtls-stress)
+# or are dependent on local files (certs, etc) in local file system
+# currently not exported to target.
+
+test_disallowlist=""
+test_disallowlist="${test_disallowlist} dtls-stress"
+test_disallowlist="${test_disallowlist} handshake-large-cert"
+test_disallowlist="${test_disallowlist} id-on-xmppAddr"
+test_disallowlist="${test_disallowlist} mini-x509-cas"
+test_disallowlist="${test_disallowlist} pkcs12_simple"
+test_disallowlist="${test_disallowlist} protocol-set-allowlist"
+test_disallowlist="${test_disallowlist} psk-file"
+test_disallowlist="${test_disallowlist} rawpk-api"
+test_disallowlist="${test_disallowlist} set_pkcs12_cred"
+test_disallowlist="${test_disallowlist} system-override-curves-allowlist"
+test_disallowlist="${test_disallowlist} system-override-hash"
+test_disallowlist="${test_disallowlist} system-override-sig"
+test_disallowlist="${test_disallowlist} system-override-sig-tls"
+test_disallowlist="${test_disallowlist} system-prio-file"
+test_disallowlist="${test_disallowlist} x509cert-tl"
+
+LOG=${PWD}/tests.log
+cd tests
+max_njobs=$(grep -c ^processor /proc/cpuinfo)
+njobs=0
+
+set +e
+
+for t in *; do
+ [ -x $t ] || continue
+ [ -f $t ] || continue
+
+ is_disallowed ${t}
+ [ $? -eq 0 ] && continue
+
+ rjob ${t} ${LOG} &
+ one=1
+ njobs=$(expr ${njobs} + ${one})
+ if [ ${njobs} -eq ${max_njobs} ]; then
+ wait
+ njobs=0
+ fi
+done
+wait
+
+skipped=$(grep -c SKIP ${LOG})
+passed=$(grep -c PASS ${LOG})
+failed=$(grep -c FAIL ${LOG})
+total=$(expr ${passed} + ${failed} + ${skipped})
+
+if [ ${failed} -ne 0 ]; then
+ echo
+ echo "Tests failed for gnutls, log is:"
+ echo "--------------------"
+ cat ${LOG}
+ echo
+fi
+
+echo
+echo "gnutls test summary:"
+echo "--------------------"
+echo "total: ${total}"
+echo "pass : ${passed}"
+echo "fail : ${failed}"
+echo "skip : ${skipped}"
+echo
diff --git a/meta/recipes-support/gnutls/gnutls_3.7.5.bb b/meta/recipes-support/gnutls/gnutls_3.7.5.bb
deleted file mode 100644
index 5feedb7fdc..0000000000
--- a/meta/recipes-support/gnutls/gnutls_3.7.5.bb
+++ /dev/null
@@ -1,68 +0,0 @@
-SUMMARY = "GNU Transport Layer Security Library"
-DESCRIPTION = "a secure communications library implementing the SSL, \
-TLS and DTLS protocols and technologies around them."
-HOMEPAGE = "https://gnutls.org/"
-BUGTRACKER = "https://savannah.gnu.org/support/?group=gnutls"
-
-LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
-LICENSE:${PN} = "LGPL-2.1-or-later"
-LICENSE:${PN}-xx = "LGPL-2.1-or-later"
-LICENSE:${PN}-bin = "GPL-3.0-or-later"
-LICENSE:${PN}-OpenSSL = "GPL-3.0-or-later"
-
-LIC_FILES_CHKSUM = "file://LICENSE;md5=71391c8e0c1cfe68077e7fce3b586283 \
- file://doc/COPYING;md5=c678957b0c8e964aa6c70fd77641a71e \
- file://doc/COPYING.LESSER;md5=a6f89e2100d9b6cdffcea4f398e37343"
-
-DEPENDS = "nettle gmp virtual/libiconv libunistring"
-DEPENDS:append:libc-musl = " argp-standalone"
-
-SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
-
-SRC_URI = "https://www.gnupg.org/ftp/gcrypt/gnutls/v${SHRT_VER}/gnutls-${PV}.tar.xz \
- file://arm_eabi.patch \
- "
-
-SRC_URI[sha256sum] = "1f85028475b4f255cc5b480af0c37e61eab43024c1507c8b75d6be506c0553ad"
-
-inherit autotools texinfo pkgconfig gettext lib_package gtk-doc
-
-PACKAGECONFIG ??= "libidn ${@bb.utils.filter('DISTRO_FEATURES', 'seccomp', d)}"
-
-# You must also have CONFIG_SECCOMP enabled in the kernel for
-# seccomp to work.
-PACKAGECONFIG[seccomp] = "--with-libseccomp-prefix=${STAGING_EXECPREFIXDIR},ac_cv_libseccomp=no,libseccomp"
-PACKAGECONFIG[libidn] = "--with-idn,--without-idn,libidn2"
-PACKAGECONFIG[libtasn1] = "--with-included-libtasn1=no,--with-included-libtasn1,libtasn1"
-PACKAGECONFIG[p11-kit] = "--with-p11-kit,--without-p11-kit,p11-kit"
-PACKAGECONFIG[tpm] = "--with-tpm,--without-tpm,trousers"
-
-EXTRA_OECONF = " \
- --enable-doc \
- --disable-libdane \
- --disable-guile \
- --disable-rpath \
- --enable-openssl-compatibility \
- --with-libpthread-prefix=${STAGING_DIR_HOST}${prefix} \
- --with-librt-prefix=${STAGING_DIR_HOST}${prefix} \
- --with-default-trust-store-file=${sysconfdir}/ssl/certs/ca-certificates.crt \
-"
-
-# Otherwise the tools try and use HOSTTOOLS_DIR/bash as a shell.
-export POSIX_SHELL="${base_bindir}/sh"
-
-LDFLAGS:append:libc-musl = " -largp"
-
-do_configure:prepend() {
- for dir in . lib; do
- rm -f ${dir}/aclocal.m4 ${dir}/m4/libtool.m4 ${dir}/m4/lt*.m4
- done
-}
-
-PACKAGES =+ "${PN}-openssl ${PN}-xx"
-
-FILES:${PN}-dev += "${bindir}/gnutls-cli-debug"
-FILES:${PN}-openssl = "${libdir}/libgnutls-openssl.so.*"
-FILES:${PN}-xx = "${libdir}/libgnutlsxx.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gnutls/gnutls_3.8.4.bb b/meta/recipes-support/gnutls/gnutls_3.8.4.bb
new file mode 100644
index 0000000000..20139b4dd4
--- /dev/null
+++ b/meta/recipes-support/gnutls/gnutls_3.8.4.bb
@@ -0,0 +1,100 @@
+SUMMARY = "GNU Transport Layer Security Library"
+DESCRIPTION = "a secure communications library implementing the SSL, \
+TLS and DTLS protocols and technologies around them."
+HOMEPAGE = "https://gnutls.org/"
+BUGTRACKER = "https://savannah.gnu.org/support/?group=gnutls"
+
+LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
+LICENSE:${PN} = "LGPL-2.1-or-later"
+LICENSE:${PN}-xx = "LGPL-2.1-or-later"
+LICENSE:${PN}-bin = "GPL-3.0-or-later"
+LICENSE:${PN}-openssl = "GPL-3.0-or-later"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=71391c8e0c1cfe68077e7fce3b586283 \
+ file://doc/COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://doc/COPYING.LESSER;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "nettle gmp virtual/libiconv libunistring"
+
+SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
+
+SRC_URI = "https://www.gnupg.org/ftp/gcrypt/gnutls/v${SHRT_VER}/gnutls-${PV}.tar.xz \
+ file://arm_eabi.patch \
+ file://0001-Creating-.hmac-file-should-be-excuted-in-target-envi.patch \
+ file://run-ptest \
+ file://Add-ptest-support.patch \
+ "
+
+SRC_URI[sha256sum] = "2bea4e154794f3f00180fa2a5c51fe8b005ac7a31cd58bd44cdfa7f36ebc3a9b"
+
+inherit autotools texinfo pkgconfig gettext lib_package gtk-doc ptest
+
+PACKAGECONFIG ??= "libidn libtasn1 ${@bb.utils.filter('DISTRO_FEATURES', 'seccomp', d)}"
+
+# You must also have CONFIG_SECCOMP enabled in the kernel for
+# seccomp to work.
+PACKAGECONFIG[seccomp] = "--with-libseccomp-prefix=${STAGING_EXECPREFIXDIR},ac_cv_libseccomp=no,libseccomp"
+PACKAGECONFIG[libidn] = "--with-idn,--without-idn,libidn2"
+PACKAGECONFIG[libtasn1] = "--without-included-libtasn1,--with-included-libtasn1,libtasn1"
+PACKAGECONFIG[p11-kit] = "--with-p11-kit,--without-p11-kit,p11-kit"
+PACKAGECONFIG[tpm] = "--with-tpm,--without-tpm,trousers"
+PACKAGECONFIG[fips] = "--enable-fips140-mode --with-libdl-prefix=${STAGING_BASELIBDIR}"
+PACKAGECONFIG[dane] = "--enable-libdane,--disable-libdane,unbound"
+# Certificate compression
+PACKAGECONFIG[brotli] = "--with-brotli,--without-brotli,brotli"
+PACKAGECONFIG[zlib] = "--with-zlib,--without-zlib,zlib"
+PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd"
+
+EXTRA_OECONF = " \
+ --enable-doc \
+ --disable-rpath \
+ --enable-openssl-compatibility \
+ --with-libpthread-prefix=${STAGING_DIR_HOST}${prefix} \
+ --with-librt-prefix=${STAGING_DIR_HOST}${prefix} \
+ --with-default-trust-store-file=${sysconfdir}/ssl/certs/ca-certificates.crt \
+"
+
+# Otherwise the tools try and use HOSTTOOLS_DIR/bash as a shell.
+export POSIX_SHELL="${base_bindir}/sh"
+
+do_configure:prepend() {
+ for dir in . lib; do
+ rm -f ${dir}/aclocal.m4 ${dir}/m4/libtool.m4 ${dir}/m4/lt*.m4
+ done
+}
+
+do_compile_ptest() {
+ oe_runmake -C tests buildtest-TESTS
+}
+
+do_install:append:class-target() {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'fips', 'true', 'false', d)}; then
+ install -d ${D}${bindir}/bin
+ install -m 0755 ${B}/lib/.libs/fipshmac ${D}/${bindir}/
+ fi
+}
+
+PACKAGES =+ "${PN}-dane ${PN}-openssl ${PN}-xx ${PN}-fips"
+
+FILES:${PN}-dev += "${bindir}/gnutls-cli-debug"
+
+FILES:${PN}-dane = "${libdir}/libgnutls-dane.so.*"
+FILES:${PN}-openssl = "${libdir}/libgnutls-openssl.so.*"
+FILES:${PN}-xx = "${libdir}/libgnutlsxx.so.*"
+FILES:${PN}-fips = "${bindir}/fipshmac"
+
+RDEPENDS:${PN}-ptest += "python3"
+
+BBCLASSEXTEND = "native nativesdk"
+
+pkg_postinst_ontarget:${PN}-fips () {
+ if test -x ${bindir}/fipshmac
+ then
+ mkdir ${sysconfdir}/gnutls
+ touch ${sysconfdir}/gnutls/config
+ ${bindir}/fipshmac ${libdir}/libgnutls.so.30.*.* > ${libdir}/.libgnutls.so.30.hmac
+ ${bindir}/fipshmac ${libdir}/libnettle.so.8.* > ${libdir}/.libnettle.so.8.hmac
+ ${bindir}/fipshmac ${libdir}/libgmp.so.10.*.* > ${libdir}/.libgmp.so.10.hmac
+ ${bindir}/fipshmac ${libdir}/libhogweed.so.6.* > ${libdir}/.libhogweed.so.6.hmac
+ fi
+}
diff --git a/meta/recipes-support/gnutls/libtasn1_4.18.0.bb b/meta/recipes-support/gnutls/libtasn1_4.18.0.bb
deleted file mode 100644
index db49adc1c2..0000000000
--- a/meta/recipes-support/gnutls/libtasn1_4.18.0.bb
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Library for ASN.1 and DER manipulation"
-DESCRIPTION = "A highly portable C library that encodes and decodes \
-DER/BER data following an ASN.1 schema. "
-HOMEPAGE = "http://www.gnu.org/software/libtasn1/"
-
-LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
-LICENSE:${PN}-bin = "GPL-3.0-or-later"
-LICENSE:${PN} = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://doc/COPYING;md5=d32239bcb673463ab874e80d47fae504 \
- file://doc/COPYING.LESSER;md5=4fbd65380cdd255951079008b364516c \
- file://COPYING;md5=75ac100ec923f959898182307970c360"
-
-SRC_URI = "${GNU_MIRROR}/libtasn1/libtasn1-${PV}.tar.gz \
- file://dont-depend-on-help2man.patch \
- "
-
-DEPENDS = "bison-native"
-
-SRC_URI[sha256sum] = "4365c154953563d64c67a024b607d1ee75c6db76e0d0f65709ea80a334cd1898"
-
-inherit autotools texinfo lib_package gtk-doc
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gnutls/libtasn1_4.19.0.bb b/meta/recipes-support/gnutls/libtasn1_4.19.0.bb
new file mode 100644
index 0000000000..5fb8b54c06
--- /dev/null
+++ b/meta/recipes-support/gnutls/libtasn1_4.19.0.bb
@@ -0,0 +1,23 @@
+SUMMARY = "Library for ASN.1 and DER manipulation"
+DESCRIPTION = "A highly portable C library that encodes and decodes \
+DER/BER data following an ASN.1 schema. "
+HOMEPAGE = "http://www.gnu.org/software/libtasn1/"
+
+LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
+LICENSE:${PN}-bin = "GPL-3.0-or-later"
+LICENSE:${PN} = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://doc/COPYING;md5=d32239bcb673463ab874e80d47fae504 \
+ file://doc/COPYING.LESSER;md5=4fbd65380cdd255951079008b364516c \
+ file://COPYING;md5=75ac100ec923f959898182307970c360"
+
+SRC_URI = "${GNU_MIRROR}/libtasn1/libtasn1-${PV}.tar.gz \
+ file://dont-depend-on-help2man.patch \
+ "
+
+DEPENDS = "bison-native"
+
+SRC_URI[sha256sum] = "1613f0ac1cf484d6ec0ce3b8c06d56263cc7242f1c23b30d82d23de345a63f7a"
+
+inherit autotools texinfo lib_package gtk-doc
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/gpgme/gpgme/0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch b/meta/recipes-support/gpgme/gpgme/0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch
index 0c15cc7c38..8ed39280ca 100644
--- a/meta/recipes-support/gpgme/gpgme/0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch
+++ b/meta/recipes-support/gpgme/gpgme/0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch
@@ -1,4 +1,4 @@
-From a001b3c23bf80fd752044615b9bba6b926ff9666 Mon Sep 17 00:00:00 2001
+From ee1421f7a7a9f31400ba992a5be52b88d20170c9 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Fri, 10 May 2019 14:18:04 +0800
Subject: [PATCH] Revert "build: Make gpgme.m4 use gpgrt-config with *.pc."
@@ -10,36 +10,21 @@ The oe-core does not support gpgrt-config, so revert it
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
- src/gpgme.m4 | 58 +++++++++-------------------------------------------
- 1 file changed, 10 insertions(+), 48 deletions(-)
+ src/gpgme.m4 | 54 ++++++++--------------------------------------------
+ 1 file changed, 8 insertions(+), 46 deletions(-)
diff --git a/src/gpgme.m4 b/src/gpgme.m4
-index c749a5d..8579146 100644
+index f2906c1..8cc2898 100644
--- a/src/gpgme.m4
+++ b/src/gpgme.m4
-@@ -1,5 +1,5 @@
- # gpgme.m4 - autoconf macro to detect GPGME.
--# Copyright (C) 2002, 2003, 2004, 2014, 2018 g10 Code GmbH
-+# Copyright (C) 2002, 2003, 2004, 2014 g10 Code GmbH
- #
- # This file is free software; as a special exception the author gives
- # unlimited permission to copy and/or distribute it, with or without
-@@ -9,7 +9,7 @@
- # WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
- # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- #
--# Last-changed: 2020-11-20
-+# Last-changed: 2014-10-02
-
-
- AC_DEFUN([_AM_PATH_GPGME_CONFIG],
-@@ -36,24 +36,10 @@ AC_DEFUN([_AM_PATH_GPGME_CONFIG],
+@@ -125,24 +125,10 @@ AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl
fi
fi
- use_gpgrt_config=""
-- if test x"${GPGME_CONFIG}" = x -a x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
+- if test x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
- if $GPGRT_CONFIG gpgme --exists; then
- GPGME_CONFIG="$GPGRT_CONFIG gpgme"
- AC_MSG_NOTICE([Use gpgrt-config as gpgme-config])
@@ -61,7 +46,7 @@ index c749a5d..8579146 100644
fi
gpgme_version_major=`echo $gpgme_version | \
sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\).*/\1/'`
-@@ -66,16 +52,12 @@ AC_DEFUN([_AM_PATH_GPGME_CONFIG],
+@@ -155,16 +141,12 @@ AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl
AC_DEFUN([_AM_PATH_GPGME_CONFIG_HOST_CHECK],
[
@@ -80,7 +65,7 @@ index c749a5d..8579146 100644
*** built for $gpgme_config_host and thus may not match the
*** used host $host.
*** You may want to use the configure option --with-gpgme-prefix
-@@ -136,11 +118,7 @@ AC_DEFUN([AM_PATH_GPGME],
+@@ -225,11 +207,7 @@ AC_DEFUN([AM_PATH_GPGME],
# If we have a recent GPGME, we should also check that the
# API is compatible.
if test "$req_gpgme_api" -gt 0 ; then
@@ -93,7 +78,7 @@ index c749a5d..8579146 100644
if test "$tmp" -gt 0 ; then
if test "$req_gpgme_api" -ne "$tmp" ; then
ok=no
-@@ -280,11 +258,7 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
+@@ -372,11 +350,7 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
# If we have a recent GPGME, we should also check that the
# API is compatible.
if test "$req_gpgme_api" -gt 0 ; then
@@ -106,7 +91,7 @@ index c749a5d..8579146 100644
if test "$tmp" -gt 0 ; then
if test "$req_gpgme_api" -ne "$tmp" ; then
ok=no
-@@ -293,20 +267,8 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
+@@ -385,20 +359,8 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
fi
fi
if test $ok = yes; then
@@ -129,6 +114,3 @@ index c749a5d..8579146 100644
AC_MSG_RESULT(yes)
ifelse([$2], , :, [$2])
_AM_PATH_GPGME_CONFIG_HOST_CHECK
---
-2.25.1
-
diff --git a/meta/recipes-support/gpgme/gpgme/0001-autogen.sh-remove-unknown-in-version.patch b/meta/recipes-support/gpgme/gpgme/0001-autogen.sh-remove-unknown-in-version.patch
new file mode 100644
index 0000000000..6d67e6d6c9
--- /dev/null
+++ b/meta/recipes-support/gpgme/gpgme/0001-autogen.sh-remove-unknown-in-version.patch
@@ -0,0 +1,32 @@
+From acc4750ffa79a5c2ef3c95cf31ef51352a68837b Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Tue, 28 Feb 2023 13:43:51 +0800
+Subject: [PATCH] autogen.sh: remove '-unknown' in version
+
+python setuptools >=66.0.0 treats '-unknown' as an invalid version.
+The error message is as below:
+ pkg_resources.extern.packaging.version.InvalidVersion: Invalid version: '1.18.0-unknown'
+
+Remove the '-unknown' suffix to fix this issue.
+
+Upstream-Status: Submitted [https://lists.gnupg.org/pipermail/gnupg-devel/2023-February/035293.html]
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+
+---
+ autogen.sh | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/autogen.sh b/autogen.sh
+index 4e1665b..a55326d 100755
+--- a/autogen.sh
++++ b/autogen.sh
+@@ -269,7 +269,7 @@ if [ "$myhost" = "find-version" ]; then
+ else
+ ingit=no
+ beta=yes
+- tmp="-unknown"
++ tmp=""
+ rev="0000000"
+ rvd="0"
+ fi
diff --git a/meta/recipes-support/gpgme/gpgme/0001-pkgconfig.patch b/meta/recipes-support/gpgme/gpgme/0001-pkgconfig.patch
index 35c6b4056c..831771ca2d 100644
--- a/meta/recipes-support/gpgme/gpgme/0001-pkgconfig.patch
+++ b/meta/recipes-support/gpgme/gpgme/0001-pkgconfig.patch
@@ -1,4 +1,4 @@
-From 98ce65902b197faa8f660564613ca2e504c2f8f8 Mon Sep 17 00:00:00 2001
+From 7b3a00cb0fc27c896dd85c9afc474bd9c2aea30d Mon Sep 17 00:00:00 2001
From: Richard Purdie <richard.purdie@linuxfoundation.org>
Date: Fri, 10 May 2019 14:23:55 +0800
Subject: [PATCH] pkgconfig
@@ -15,20 +15,21 @@ Rebase to 1.13.0
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
Rebase to 1.17.0
Signed-off-by: Wang Mingyu <wangmy@fujitsu.com>
+
---
configure.ac | 1 +
src/Makefile.am | 4 +-
- src/gpgme-pthread.pc.in | 15 ++++++
- src/gpgme.m4 | 114 +++-------------------------------------
+ src/gpgme-pthread.pc.in | 15 +++++
+ src/gpgme.m4 | 119 +++-------------------------------------
src/gpgme.pc.in | 4 +-
- 5 files changed, 28 insertions(+), 110 deletions(-)
+ 5 files changed, 29 insertions(+), 114 deletions(-)
create mode 100644 src/gpgme-pthread.pc.in
diff --git a/configure.ac b/configure.ac
-index 80ce79c..d7c0ac1 100644
+index ae4c7da..df892c9 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -905,6 +905,7 @@ AC_CONFIG_FILES(Makefile src/Makefile
+@@ -1112,6 +1112,7 @@ AC_CONFIG_FILES(Makefile src/Makefile
src/gpgme-glib.pc
src/gpgme.h)
AC_CONFIG_FILES(src/gpgme-config, chmod +x src/gpgme-config)
@@ -37,7 +38,7 @@ index 80ce79c..d7c0ac1 100644
AC_CONFIG_FILES(lang/cpp/tests/Makefile)
AC_CONFIG_FILES(lang/cpp/src/GpgmeppConfig-w32.cmake.in)
diff --git a/src/Makefile.am b/src/Makefile.am
-index 39c341f..3aca716 100644
+index ca70950..bda5498 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -20,11 +20,11 @@
@@ -52,8 +53,8 @@ index 39c341f..3aca716 100644
- gpgme.pc.in gpgme-glib.pc.in
+ gpgme.pc.in gpgme-glib.pc.in gpgme-pthread.pc.in
- bin_SCRIPTS = gpgme-config
- m4datadir = $(datadir)/aclocal
+ if USE_GPGRT_CONFIG
+ noinst_SCRIPTS = gpgme-config
diff --git a/src/gpgme-pthread.pc.in b/src/gpgme-pthread.pc.in
new file mode 100644
index 0000000..074bbf6
@@ -76,10 +77,10 @@ index 0000000..074bbf6
+Cflags: -I${includedir}
+Requires: libassuan gpg-error
diff --git a/src/gpgme.m4 b/src/gpgme.m4
-index 71b0010..30ec151 100644
+index 8cc2898..0ff550e 100644
--- a/src/gpgme.m4
+++ b/src/gpgme.m4
-@@ -79,7 +79,7 @@ dnl config script does not match the host specification the script
+@@ -168,7 +168,7 @@ dnl config script does not match the host specification the script
dnl is added to the gpg_config_script_warn variable.
dnl
AC_DEFUN([AM_PATH_GPGME],
@@ -88,7 +89,7 @@ index 71b0010..30ec151 100644
tmp=ifelse([$1], ,1:0.4.2,$1)
if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then
req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'`
-@@ -89,36 +89,12 @@ AC_DEFUN([AM_PATH_GPGME],
+@@ -178,36 +178,12 @@ AC_DEFUN([AM_PATH_GPGME],
min_gpgme_version="$tmp"
fi
@@ -127,7 +128,7 @@ index 71b0010..30ec151 100644
if test "$tmp" -gt 0 ; then
if test "$req_gpgme_api" -ne "$tmp" ; then
ok=no
-@@ -127,19 +103,11 @@ AC_DEFUN([AM_PATH_GPGME],
+@@ -216,19 +192,11 @@ AC_DEFUN([AM_PATH_GPGME],
fi
fi
if test $ok = yes; then
@@ -147,44 +148,45 @@ index 71b0010..30ec151 100644
])
dnl AM_PATH_GPGME_PTHREAD([MINIMUM-VERSION,
-@@ -148,7 +116,7 @@ dnl Test for libgpgme and define GPGME_PTHREAD_CFLAGS
+@@ -236,9 +204,8 @@ dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]])
+ dnl Test for libgpgme and define GPGME_PTHREAD_CFLAGS
dnl and GPGME_PTHREAD_LIBS.
dnl
- AC_DEFUN([AM_PATH_GPGME_PTHREAD],
--[ AC_REQUIRE([_AM_PATH_GPGME_CONFIG])dnl
+-AC_DEFUN([AM_PATH_GPGME_PTHREAD],[
+- AC_OBSOLETE([$0], [; use AM_PATH_GPGME instead to use GPGME_CFLAGS and GPGME_LIBS])dnl
+- AC_REQUIRE([_AM_PATH_GPGME_CONFIG])dnl
++AC_DEFUN([AM_PATH_GPGME_PTHREAD],
+[
tmp=ifelse([$1], ,1:0.4.2,$1)
if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then
req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'`
-@@ -158,38 +126,12 @@ AC_DEFUN([AM_PATH_GPGME_PTHREAD],
+@@ -248,40 +215,12 @@ AC_DEFUN([AM_PATH_GPGME_PTHREAD],[
min_gpgme_version="$tmp"
fi
- AC_MSG_CHECKING(for GPGME pthread - version >= $min_gpgme_version)
- ok=no
- if test "$GPGME_CONFIG" != "no" ; then
-- if `$GPGME_CONFIG --thread=pthread 2> /dev/null` ; then
-- req_major=`echo $min_gpgme_version | \
+- req_major=`echo $min_gpgme_version | \
- sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\1/'`
-- req_minor=`echo $min_gpgme_version | \
+- req_minor=`echo $min_gpgme_version | \
- sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\2/'`
-- req_micro=`echo $min_gpgme_version | \
+- req_micro=`echo $min_gpgme_version | \
- sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\3/'`
-- if test "$gpgme_version_major" -gt "$req_major"; then
+- if test "$gpgme_version_major" -gt "$req_major"; then
- ok=yes
-- else
+- else
- if test "$gpgme_version_major" -eq "$req_major"; then
-- if test "$gpgme_version_minor" -gt "$req_minor"; then
-- ok=yes
-- else
-- if test "$gpgme_version_minor" -eq "$req_minor"; then
-- if test "$gpgme_version_micro" -ge "$req_micro"; then
-- ok=yes
-- fi
+- if test "$gpgme_version_minor" -gt "$req_minor"; then
+- ok=yes
+- else
+- if test "$gpgme_version_minor" -eq "$req_minor"; then
+- if test "$gpgme_version_micro" -ge "$req_micro"; then
+- ok=yes
+- fi
+- fi
- fi
-- fi
- fi
-- fi
- fi
- fi
+ PKG_CHECK_MODULES(GPGME_PTHREAD, [gpgme-pthread >= $min_gpgme_version], [ok=yes], [ok=no])
@@ -192,17 +194,21 @@ index 71b0010..30ec151 100644
# If we have a recent GPGME, we should also check that the
# API is compatible.
if test "$req_gpgme_api" -gt 0 ; then
-- tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0`
+- if test -z "$use_gpgrt_config"; then
+- tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0`
+- else
+- tmp=`$GPGME_CONFIG --variable=api_version 2>/dev/null || echo 0`
+- fi
+ tmp=`$PKG_CONFIG --variable=api_version gpgme-pthread 2>/dev/null || echo 0`
if test "$tmp" -gt 0 ; then
if test "$req_gpgme_api" -ne "$tmp" ; then
ok=no
-@@ -198,19 +140,11 @@ AC_DEFUN([AM_PATH_GPGME_PTHREAD],
+@@ -290,19 +229,11 @@ AC_DEFUN([AM_PATH_GPGME_PTHREAD],[
fi
fi
if test $ok = yes; then
-- GPGME_PTHREAD_CFLAGS=`$GPGME_CONFIG --thread=pthread --cflags`
-- GPGME_PTHREAD_LIBS=`$GPGME_CONFIG --thread=pthread --libs`
+- GPGME_PTHREAD_CFLAGS=`$GPGME_CONFIG --cflags`
+- GPGME_PTHREAD_LIBS=`$GPGME_CONFIG --libs`
- AC_MSG_RESULT(yes)
ifelse([$2], , :, [$2])
_AM_PATH_GPGME_CONFIG_HOST_CHECK
@@ -217,7 +223,7 @@ index 71b0010..30ec151 100644
])
-@@ -229,36 +163,12 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
+@@ -321,36 +252,12 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
min_gpgme_version="$tmp"
fi
@@ -256,7 +262,7 @@ index 71b0010..30ec151 100644
if test "$tmp" -gt 0 ; then
if test "$req_gpgme_api" -ne "$tmp" ; then
ok=no
-@@ -267,17 +177,9 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
+@@ -359,17 +266,9 @@ AC_DEFUN([AM_PATH_GPGME_GLIB],
fi
fi
if test $ok = yes; then
@@ -275,18 +281,15 @@ index 71b0010..30ec151 100644
- AC_SUBST(GPGME_GLIB_LIBS)
])
diff --git a/src/gpgme.pc.in b/src/gpgme.pc.in
-index 80d59de..932645b 100644
+index 9ddef5c..932645b 100644
--- a/src/gpgme.pc.in
+++ b/src/gpgme.pc.in
@@ -9,6 +9,6 @@ Name: gpgme
Description: GnuPG Made Easy to access GnuPG
Requires.private: gpg-error, libassuan
Version: @PACKAGE_VERSION@
--Cflags: -I${includedir} @GPGME_CONFIG_CFLAGS@
--Libs: -L${libdir} @GPGME_CONFIG_LIBS@
+-Cflags: @GPGME_CONFIG_CFLAGS@
+-Libs: @GPGME_CONFIG_LIBS@
+Cflags: -I${includedir}
+Libs: -L${libdir} -lgpgme
URL: https://www.gnupg.org/software/gpgme/index.html
---
-2.25.1
-
diff --git a/meta/recipes-support/gpgme/gpgme/0001-posix-io.c-Use-off_t-instead-of-off64_t.patch b/meta/recipes-support/gpgme/gpgme/0001-posix-io.c-Use-off_t-instead-of-off64_t.patch
new file mode 100644
index 0000000000..be58c42128
--- /dev/null
+++ b/meta/recipes-support/gpgme/gpgme/0001-posix-io.c-Use-off_t-instead-of-off64_t.patch
@@ -0,0 +1,42 @@
+From be33cf1812b4c2a3ed85fb2532992bfb2b27b3be Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 15 Dec 2022 08:44:13 -0800
+Subject: [PATCH] posix-io.c: Use off_t instead of off64_t
+
+configure.ac checks for largefile support via AC_SYS_LARGEFILE already,
+therefore use off_t and ino_t instead of 64bit variants. Musl e.g. does
+not define them without _LARGEFILE64_SOURCE and error is not seen on
+glibc because _GNU_SOURCE defines _LARGEFILE64_SOURCE
+
+Fixes errors like below on 32-bit musl systems
+
+../../../../../../../../workspace/sources/gpgme/src/posix-io.c:77:3: error: unknown type name 'ino64_t'; did you mean 'int64_t'?
+ ino64_t d_ino;
+ ^~~~~~~
+ int64_t
+ ^
+../../../../../../../../workspace/sources/gpgme/src/posix-io.c:78:3: error: unknown type name 'off64_t'; did you mean 'off_t'?
+ off64_t d_off;
+ ^~~~~~~
+
+Upstream-Status: Submitted [https://lists.gnupg.org/pipermail/gnupg-devel/2022-December/035222.html]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/posix-io.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/posix-io.c b/src/posix-io.c
+index 0d0a98b..286a626 100644
+--- a/src/posix-io.c
++++ b/src/posix-io.c
+@@ -74,8 +74,8 @@
+ * define it ourselves. */
+ struct linux_dirent64
+ {
+- ino64_t d_ino;
+- off64_t d_off;
++ ino_t d_ino;
++ off_t d_off;
+ unsigned short d_reclen;
+ unsigned char d_type;
+ char d_name[];
diff --git a/meta/recipes-support/gpgme/gpgme/0001-use-closefrom-on-linux-and-glibc-2.34.patch b/meta/recipes-support/gpgme/gpgme/0001-use-closefrom-on-linux-and-glibc-2.34.patch
index f3de55f7b3..3d05808d63 100644
--- a/meta/recipes-support/gpgme/gpgme/0001-use-closefrom-on-linux-and-glibc-2.34.patch
+++ b/meta/recipes-support/gpgme/gpgme/0001-use-closefrom-on-linux-and-glibc-2.34.patch
@@ -1,4 +1,4 @@
-From adb1d4e5498a19e9d591ac8f42f9ddfdb23a1354 Mon Sep 17 00:00:00 2001
+From e1fc4b06dcb62e1c2d85ae99f39ef1a8860570e1 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Thu, 15 Jul 2021 12:33:13 -0700
Subject: [PATCH] use closefrom() on linux and glibc 2.34+
@@ -8,15 +8,16 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
Rebase to 1.17.0
Signed-off-by: Wang Mingyu <wangmy@fujitsu.com>
+
---
src/posix-io.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/posix-io.c b/src/posix-io.c
-index 5c6cf1d..52e513a 100644
+index a422d8f..0d0a98b 100644
--- a/src/posix-io.c
+++ b/src/posix-io.c
-@@ -570,7 +570,7 @@ _gpgme_io_spawn (const char *path, char *const argv[], unsigned int flags,
+@@ -577,7 +577,7 @@ _gpgme_io_spawn (const char *path, char *const argv[], unsigned int flags,
if (fd_list[i].fd > fd)
fd = fd_list[i].fd;
fd++;
@@ -25,6 +26,3 @@ index 5c6cf1d..52e513a 100644
closefrom (fd);
max_fds = fd;
#else /*!__sun */
---
-2.25.1
-
diff --git a/meta/recipes-support/gpgme/gpgme/0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch b/meta/recipes-support/gpgme/gpgme/0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch
index ecd1793ab9..0d2692912f 100644
--- a/meta/recipes-support/gpgme/gpgme/0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch
+++ b/meta/recipes-support/gpgme/gpgme/0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch
@@ -1,7 +1,7 @@
-From 52f8fd2010b900c7d382a7c4e6c1a317d8160585 Mon Sep 17 00:00:00 2001
+From 9fce0689bc04506e5758444efa24ea99fe1ff5ef Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Fri, 10 May 2019 14:30:36 +0800
-Subject: [PATCH 2/7] gpgme/lang/python: gpg-error-config should not be used
+Subject: [PATCH] gpgme/lang/python: gpg-error-config should not be used
gpg-error-config was modified by OE to always return an error.
So we want to find an alternative way to retrieve whatever it
@@ -14,39 +14,23 @@ Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
Rebase to 1.13.0
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
- lang/python/setup.py.in | 9 +--------
- 1 file changed, 1 insertion(+), 8 deletions(-)
+ lang/python/setup.py.in | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/lang/python/setup.py.in b/lang/python/setup.py.in
-index 9785a28..006216d 100755
+index 6f36861..1d9058b 100755
--- a/lang/python/setup.py.in
+++ b/lang/python/setup.py.in
-@@ -30,7 +30,6 @@ import subprocess
- import sys
-
- # Out-of-tree build of the gpg bindings.
--gpg_error_config = ['gpg-error-config']
- gpgme_config_flags = ['--thread=pthread']
- gpgme_config = ['gpgme-config'] + gpgme_config_flags
- gpgme_h = ''
-@@ -182,15 +181,9 @@ class BuildExtFirstHack(build):
+@@ -169,9 +169,8 @@ class BuildExtFirstHack(build):
def _generate_errors_i(self):
-- try:
-- subprocess.check_call(
-- gpg_error_config + ['--version'], stdout=devnull)
-- except:
-- sys.exit('Could not find gpg-error-config. ' +
-- 'Please install the libgpg-error development package.')
-
+- ge_cflags='@GPG_ERROR_CFLAGS@'
gpg_error_content = self._read_header(
-- 'gpg-error.h', getconfig('cflags', config=gpg_error_config))
+- 'gpg-error.h', ge_cflags.split(' ') if ge_cflags else [])
+ "gpg-error.h", os.environ.get('CFLAGS').split())
filter_re = re.compile(r'GPG_ERR_[^ ]* =')
rewrite_re = re.compile(r' *(.*) = .*')
---
-2.7.4
-
diff --git a/meta/recipes-support/gpgme/gpgme/0003-Correctly-install-python-modules.patch b/meta/recipes-support/gpgme/gpgme/0003-Correctly-install-python-modules.patch
index de1689ec05..204fe5934d 100644
--- a/meta/recipes-support/gpgme/gpgme/0003-Correctly-install-python-modules.patch
+++ b/meta/recipes-support/gpgme/gpgme/0003-Correctly-install-python-modules.patch
@@ -1,7 +1,7 @@
-From f632148fcc8757bb9a9601a6dab275e88cd309d2 Mon Sep 17 00:00:00 2001
+From 8c850aa109c42d0c193bccc7af45dbc2689b23f6 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Tue, 30 Jan 2018 15:28:49 +0800
-Subject: [PATCH 3/7] Correctly install python modules
+Subject: [PATCH] Correctly install python modules
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
@@ -12,15 +12,16 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
Rebase to 1.10.0
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
lang/python/Makefile.am | 1 +
1 file changed, 1 insertion(+)
diff --git a/lang/python/Makefile.am b/lang/python/Makefile.am
-index 6988faf..36c6f7b 100644
+index 68b98e8..bbb9111 100644
--- a/lang/python/Makefile.am
+++ b/lang/python/Makefile.am
-@@ -93,6 +93,7 @@ install-exec-local:
+@@ -97,6 +97,7 @@ install-exec-local:
--build-base="$$(basename "$${PYTHON}")-gpg" \
install \
--prefix "$(DESTDIR)$(prefix)" \
@@ -28,6 +29,3 @@ index 6988faf..36c6f7b 100644
--verbose ; \
done
---
-2.7.4
-
diff --git a/meta/recipes-support/gpgme/gpgme/0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch b/meta/recipes-support/gpgme/gpgme/0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch
index 50bf21463f..7fe0c9538a 100644
--- a/meta/recipes-support/gpgme/gpgme/0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch
+++ b/meta/recipes-support/gpgme/gpgme/0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch
@@ -1,7 +1,7 @@
-From 8d9613c34ae495bbcbd725a2e7ac48138ba53c30 Mon Sep 17 00:00:00 2001
+From 57475742b0288b4ee53c01c59c3ab03c1ef7932e Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Thu, 13 Apr 2017 16:40:27 +0300
-Subject: [PATCH 5/7] gpgme-config: skip all /lib* or /usr/lib* directories in
+Subject: [PATCH] gpgme-config: skip all /lib* or /usr/lib* directories in
output
The logic was not working in multilib setups which use other
@@ -9,15 +9,16 @@ directory names than plain /lib or /usr/lib.
Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+
---
src/gpgme-config.in | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/gpgme-config.in b/src/gpgme-config.in
-index a4d152e..8342865 100644
+index 56b98f8..e96f3c3 100644
--- a/src/gpgme-config.in
+++ b/src/gpgme-config.in
-@@ -154,7 +154,7 @@ while test $# -gt 0; do
+@@ -157,7 +157,7 @@ while test $# -gt 0; do
for i in $libs $tmp_l $assuan_libs $gpg_error_libs $tmp_x; do
skip=no
case $i in
@@ -26,6 +27,3 @@ index a4d152e..8342865 100644
skip=yes
;;
-L*|-l*)
---
-2.7.4
-
diff --git a/meta/recipes-support/gpgme/gpgme/0006-fix-build-path-issue.patch b/meta/recipes-support/gpgme/gpgme/0006-fix-build-path-issue.patch
index 1471184d67..8d48db2feb 100644
--- a/meta/recipes-support/gpgme/gpgme/0006-fix-build-path-issue.patch
+++ b/meta/recipes-support/gpgme/gpgme/0006-fix-build-path-issue.patch
@@ -1,7 +1,7 @@
-From ef920688bfe1c7328c9e97229d62ccd35304ad84 Mon Sep 17 00:00:00 2001
+From 7938a5a760827aec737ea49d2f8719051abda188 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Wed, 31 Jan 2018 11:01:09 +0800
-Subject: [PATCH 6/7] fix build path issue
+Subject: [PATCH] fix build path issue
Get the "--root" directory supplied to the "install" command,
and use it as a prefix to strip off the purported filename
@@ -12,15 +12,16 @@ to use relative path.
Upstream-Status: Submitted [gnupg-devel@gnupg.org]
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
---
lang/python/Makefile.am | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/lang/python/Makefile.am b/lang/python/Makefile.am
-index 36c6f7b..ce9a108 100644
+index bbb9111..cee499e 100644
--- a/lang/python/Makefile.am
+++ b/lang/python/Makefile.am
-@@ -92,8 +92,9 @@ install-exec-local:
+@@ -96,8 +96,9 @@ install-exec-local:
build \
--build-base="$$(basename "$${PYTHON}")-gpg" \
install \
@@ -32,6 +33,3 @@ index 36c6f7b..ce9a108 100644
--verbose ; \
done
---
-2.7.4
-
diff --git a/meta/recipes-support/gpgme/gpgme/0007-python-Add-variables-to-tests.patch b/meta/recipes-support/gpgme/gpgme/0007-python-Add-variables-to-tests.patch
deleted file mode 100644
index 4c97f63cc2..0000000000
--- a/meta/recipes-support/gpgme/gpgme/0007-python-Add-variables-to-tests.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-From 093c88817397425ee4c2333c469467229a46c9e1 Mon Sep 17 00:00:00 2001
-From: Yuan Chao <yuanc.fnst@cn.fujitsu.com>
-Date: Mon, 5 Aug 2019 01:00:58 +0900
-Subject: [PATCH 7/7] python: Add variables to tests
-
-* configure.ac, lang/python/Makefile.am:
- New variable to `lang/python', set to `lang/python' if RUN_LANG_PYTHON_TESTS
-
-Upstream-Status: Submitted [gnupg-devel@gnupg.org]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
-Signed-off-by: Yuan Chao <yuanc.fnst@cn.fujitsu.com>
----
- configure.ac | 5 +++++
- lang/python/Makefile.am | 3 +++
- 2 files changed, 8 insertions(+)
-
-diff --git a/configure.ac b/configure.ac
-index bd85886..b5ae42e 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -533,6 +533,11 @@ AC_ARG_ENABLE(g13-test,
- run_g13_test=$enableval)
- AM_CONDITIONAL(RUN_G13_TESTS, test "$run_g13_test" = "yes")
-
-+run_lang_python_test="yes"
-+AC_ARG_ENABLE(lang-python-test,
-+ AC_HELP_STRING([--disable-lang-python-test], [disable Python regression test]),
-+ run_lang_python_test=$enableval)
-+AM_CONDITIONAL(RUN_LANG_PYTHON_TESTS, test "$run_lang_python_test" = "yes")
-
- # Checks for header files.
- AC_CHECK_HEADERS_ONCE([locale.h sys/select.h sys/uio.h argp.h stdint.h
-diff --git a/lang/python/Makefile.am b/lang/python/Makefile.am
-index 551deee..0fd555e 100644
---- a/lang/python/Makefile.am
-+++ b/lang/python/Makefile.am
-@@ -23,7 +23,10 @@ EXTRA_DIST = \
- gpgme.i \
- helpers.c helpers.h private.h
-
-+SUBDIRS = . examples doc src
-+if RUN_LANG_PYTHON_TESTS
- SUBDIRS = . tests examples doc src
-+endif
-
- .PHONY: prepare
- prepare: copystamp
---
-2.7.4
-
diff --git a/meta/recipes-support/gpgme/gpgme/0008-do-not-auto-check-var-PYTHON.patch b/meta/recipes-support/gpgme/gpgme/0008-do-not-auto-check-var-PYTHON.patch
index dfea1bf78a..a11621ed64 100644
--- a/meta/recipes-support/gpgme/gpgme/0008-do-not-auto-check-var-PYTHON.patch
+++ b/meta/recipes-support/gpgme/gpgme/0008-do-not-auto-check-var-PYTHON.patch
@@ -1,4 +1,4 @@
-From 5bbf7a048b6d81d23186340e839f9f65b5b514b6 Mon Sep 17 00:00:00 2001
+From 84c389705e7742d2b68e144a5733e618441d293e Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Fri, 10 May 2019 16:19:54 +0800
Subject: [PATCH] do not auto check var-PYTHON
@@ -14,18 +14,19 @@ Upstream-Status: Inappropriate [oe-core specific]
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
---
- configure.ac | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
+ configure.ac | 5 ++---
+ 1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/configure.ac b/configure.ac
-index 5ef00c0..bbcff93 100644
+index df892c9..8c6194e 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -425,8 +425,8 @@ if test "$found_py" = "1"; then
+@@ -590,9 +590,8 @@ if test "$found_py" = "1"; then
if test "$found_py" = "1" -o "$found_py3" = "1"; then
# Reset everything, so that we can look for another Python.
m4_foreach([mym4pythonver],
-- [[2.7],[3.4],[3.5],[3.6],[3.7],[3.8],[3.9],[all]],
+- [[2.7],[3.4],[3.5],[3.6],[3.7],[3.8],[3.9],[3.10],
+- [3.11],[3.12],[all]],
- [unset PYTHON
+ [[2.7],[3.7]],
+ [
diff --git a/meta/recipes-support/gpgme/gpgme_1.17.1.bb b/meta/recipes-support/gpgme/gpgme_1.17.1.bb
deleted file mode 100644
index d95ed6c299..0000000000
--- a/meta/recipes-support/gpgme/gpgme_1.17.1.bb
+++ /dev/null
@@ -1,87 +0,0 @@
-SUMMARY = "High-level GnuPG encryption/signing API"
-DESCRIPTION = "GnuPG Made Easy (GPGME) is a library designed to make access to GnuPG easier for applications. It provides a High-Level Crypto API for encryption, decryption, signing, signature verification and key management"
-HOMEPAGE = "http://www.gnupg.org/gpgme.html"
-BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
-
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://COPYING.LESSER;md5=bbb461211a33b134d42ed5ee802b37ff \
- file://src/gpgme.h.in;endline=23;md5=2f0bf06d1c7dcb28532a9d0f94a7ca1d \
- file://src/engine.h;endline=22;md5=4b6d8ba313d9b564cc4d4cfb1640af9d"
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/gpgme/${BP}.tar.bz2 \
- file://0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch \
- file://0001-pkgconfig.patch \
- file://0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch \
- file://0003-Correctly-install-python-modules.patch \
- file://0004-python-import.patch \
- file://0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch \
- file://0006-fix-build-path-issue.patch \
- file://0007-python-Add-variables-to-tests.patch \
- file://0008-do-not-auto-check-var-PYTHON.patch \
- file://0001-use-closefrom-on-linux-and-glibc-2.34.patch \
- "
-
-SRC_URI[sha256sum] = "711eabf5dd661b9b04be9edc9ace2a7bc031f6bd9d37a768d02d0efdef108f5f"
-
-DEPENDS = "libgpg-error libassuan"
-RDEPENDS:${PN}-cpp += "libstdc++"
-
-RDEPENDS:python2-gpg += "python-unixadmin"
-RDEPENDS:python3-gpg += "python3-unixadmin"
-
-BINCONFIG = "${bindir}/gpgme-config"
-
-# Note select python2 or python3, but you can't select both at the same time
-PACKAGECONFIG ??= "python3"
-PACKAGECONFIG[python2] = ",,python swig-native,"
-PACKAGECONFIG[python3] = ",,python3 swig-native,"
-
-# Default in configure.ac: "cl cpp python qt"
-# Supported: "cl cpp python python2 python3 qt"
-# python says 'search and find python2 or python3'
-
-# Building the C++ bindings for native requires a C++ compiler with C++11
-# support. Since these bindings are currently not needed, we can disable them.
-DEFAULT_LANGUAGES = ""
-DEFAULT_LANGUAGES:class-target = "cpp"
-LANGUAGES ?= "${DEFAULT_LANGUAGES} python"
-
-PYTHON_INHERIT = "${@bb.utils.contains('PACKAGECONFIG', 'python2', 'pythonnative', '', d)}"
-PYTHON_INHERIT .= "${@bb.utils.contains('PACKAGECONFIG', 'python3', 'python3native python3targetconfig', '', d)}"
-
-EXTRA_OECONF += '--enable-languages="${LANGUAGES}" \
- --disable-gpgconf-test \
- --disable-gpg-test \
- --disable-gpgsm-test \
- --disable-g13-test \
- --disable-lang-python-test \
-'
-
-inherit autotools texinfo binconfig-disabled pkgconfig setuptools3-base ${PYTHON_INHERIT} multilib_header
-
-export PKG_CONFIG='pkg-config'
-
-BBCLASSEXTEND = "native nativesdk"
-
-PACKAGES =+ "${PN}-cpp"
-PACKAGES =. "${@bb.utils.contains('PACKAGECONFIG', 'python2', 'python2-gpg ', '', d)}"
-PACKAGES =. "${@bb.utils.contains('PACKAGECONFIG', 'python3', 'python3-gpg ', '', d)}"
-
-FILES:${PN}-cpp = "${libdir}/libgpgmepp.so.*"
-FILES:python2-gpg = "${PYTHON_SITEPACKAGES_DIR}/*"
-FILES:python3-gpg = "${PYTHON_SITEPACKAGES_DIR}/*"
-FILES:${PN}-dev += "${datadir}/common-lisp/source/gpgme/*"
-
-CFLAGS:append:libc-musl = " -D__error_t_defined "
-do_configure:prepend () {
- # Else these could be used in preference to those in aclocal-copy
- rm -f ${S}/m4/gpg-error.m4
- rm -f ${S}/m4/libassuan.m4
- rm -f ${S}/m4/python.m4
-}
-
-do_install:append() {
- oe_multilib_header gpgme.h
-}
diff --git a/meta/recipes-support/gpgme/gpgme_1.23.2.bb b/meta/recipes-support/gpgme/gpgme_1.23.2.bb
new file mode 100644
index 0000000000..d8807b3af2
--- /dev/null
+++ b/meta/recipes-support/gpgme/gpgme_1.23.2.bb
@@ -0,0 +1,82 @@
+SUMMARY = "High-level GnuPG encryption/signing API"
+DESCRIPTION = "GnuPG Made Easy (GPGME) is a library designed to make access to GnuPG easier for applications. It provides a High-Level Crypto API for encryption, decryption, signing, signature verification and key management"
+HOMEPAGE = "http://www.gnupg.org/gpgme.html"
+BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
+
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://COPYING.LESSER;md5=bbb461211a33b134d42ed5ee802b37ff \
+ file://src/gpgme.h.in;endline=23;md5=2f0bf06d1c7dcb28532a9d0f94a7ca1d \
+ file://src/engine.h;endline=22;md5=4b6d8ba313d9b564cc4d4cfb1640af9d"
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/gpgme/${BP}.tar.bz2 \
+ file://0001-Revert-build-Make-gpgme.m4-use-gpgrt-config-with-.pc.patch \
+ file://0001-pkgconfig.patch \
+ file://0002-gpgme-lang-python-gpg-error-config-should-not-be-use.patch \
+ file://0003-Correctly-install-python-modules.patch \
+ file://0004-python-import.patch \
+ file://0005-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch \
+ file://0006-fix-build-path-issue.patch \
+ file://0008-do-not-auto-check-var-PYTHON.patch \
+ file://0001-use-closefrom-on-linux-and-glibc-2.34.patch \
+ file://0001-posix-io.c-Use-off_t-instead-of-off64_t.patch \
+ file://0001-autogen.sh-remove-unknown-in-version.patch \
+ "
+
+SRC_URI[sha256sum] = "9499e8b1f33cccb6815527a1bc16049d35a6198a6c5fae0185f2bd561bce5224"
+
+PYTHON_DEPS = "${@bb.utils.contains('LANGUAGES', 'python', 'swig-native', '', d)}"
+
+DEPENDS = "libgpg-error libassuan ${PYTHON_DEPS}"
+RDEPENDS:${PN}-cpp += "libstdc++"
+
+RDEPENDS:python3-gpg += "python3-unixadmin"
+
+BINCONFIG = "${bindir}/gpgme-config"
+
+# Default in configure.ac: "cl cpp python qt"
+# Supported: "cl cpp python python2 python3 qt"
+# python says 'search and find python2 or python3'
+
+# Building the C++ bindings for native requires a C++ compiler with C++11
+# support. Since these bindings are currently not needed, we can disable them.
+DEFAULT_LANGUAGES = ""
+DEFAULT_LANGUAGES:class-target = "cpp"
+LANGUAGES ?= "${DEFAULT_LANGUAGES}"
+
+PYTHON_INHERIT = "${@bb.utils.contains('LANGUAGES', 'python', 'setuptools3-base', '', d)}"
+
+EXTRA_OECONF += '--enable-languages="${LANGUAGES}" \
+ --disable-gpgconf-test \
+ --disable-gpg-test \
+ --disable-gpgsm-test \
+ --disable-g13-test \
+'
+
+inherit autotools texinfo binconfig-disabled pkgconfig multilib_header
+inherit_defer ${PYTHON_INHERIT} python3native
+
+export PKG_CONFIG='pkg-config'
+
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGES =+ "${PN}-cpp python3-gpg"
+
+FILES:${PN}-cpp = "${libdir}/libgpgmepp.so.*"
+FILES:python3-gpg = "${PYTHON_SITEPACKAGES_DIR}/*"
+FILES:${PN}-dev += "${datadir}/common-lisp/source/gpgme/*"
+
+CFLAGS:append:libc-musl = " -D__error_t_defined "
+CACHED_CONFIGUREVARS:libc-musl = "ac_cv_sys_file_offset_bits=no"
+
+do_configure:prepend () {
+ # Else these could be used in preference to those in aclocal-copy
+ rm -f ${S}/m4/gpg-error.m4
+ rm -f ${S}/m4/libassuan.m4
+ rm -f ${S}/m4/python.m4
+}
+
+do_install:append() {
+ oe_multilib_header gpgme.h
+}
diff --git a/meta/recipes-support/icu/icu/0001-Disable-LDFLAGSICUDT-for-Linux.patch b/meta/recipes-support/icu/icu/0001-Disable-LDFLAGSICUDT-for-Linux.patch
index 2968d571bb..c0e9e2806b 100644
--- a/meta/recipes-support/icu/icu/0001-Disable-LDFLAGSICUDT-for-Linux.patch
+++ b/meta/recipes-support/icu/icu/0001-Disable-LDFLAGSICUDT-for-Linux.patch
@@ -1,4 +1,4 @@
-From 0c82d6aa02c08e41b13c83b14782bd7024e25d59 Mon Sep 17 00:00:00 2001
+From 0f4e9eee64bd4220aa4fae1ab4f7bbf10b87cd69 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Sat, 15 Feb 2014 21:06:42 +0000
Subject: [PATCH] Disable LDFLAGSICUDT for Linux
@@ -7,14 +7,14 @@ Upstream-Status: Inappropriate [ OE Configuration ]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
- source/config/mh-linux | 2 +-
+ config/mh-linux | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/config/mh-linux b/config/mh-linux
-index 366f0cc..2689aab 100644
+index 53d6780..b622513 100644
--- a/config/mh-linux
+++ b/config/mh-linux
-@@ -21,7 +21,7 @@ LD_RPATH= -Wl,-zorigin,-rpath,'$$'ORIGIN
+@@ -23,7 +23,7 @@ LD_RPATH= -Wl,-zorigin,-rpath,'$$'ORIGIN
LD_RPATH_PRE = -Wl,-rpath,
## These are the library specific LDFLAGS
@@ -23,6 +23,3 @@ index 366f0cc..2689aab 100644
## Compiler switch to embed a library name
# The initial tab in the next line is to prevent icu-config from reading it.
---
-1.7.10.4
-
diff --git a/meta/recipes-support/icu/icu/0001-icu-Added-armeb-support.patch b/meta/recipes-support/icu/icu/0001-icu-Added-armeb-support.patch
index 578517b57f..a25285fb54 100644
--- a/meta/recipes-support/icu/icu/0001-icu-Added-armeb-support.patch
+++ b/meta/recipes-support/icu/icu/0001-icu-Added-armeb-support.patch
@@ -1,4 +1,4 @@
-From e3c25af4b40d64f098e7b8ae5a91fdc3c90163c4 Mon Sep 17 00:00:00 2001
+From cf55c077b77b7a219502364eaadf12c074ab005e Mon Sep 17 00:00:00 2001
From: Lei Maohui <leimaohui@cn.fujitsu.com>
Date: Wed, 8 May 2019 14:42:30 +0900
Subject: [PATCH] icu: Added armeb support.
@@ -8,16 +8,15 @@ Make icu support arm32 BE.
Upstream-Status: Pending
Signed-off-by: Lei Maohui <leimaohui@cn.fujitsu.com>
-
---
i18n/double-conversion-utils.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
-diff --git a/source/i18n/double-conversion-utils.h b/source/i18n/double-conversion-utils.h
-index 7f23e0a..7f89094 100644
+diff --git a/i18n/double-conversion-utils.h b/i18n/double-conversion-utils.h
+index 303668f..7d859dc 100644
--- a/i18n/double-conversion-utils.h
+++ b/i18n/double-conversion-utils.h
-@@ -115,7 +115,7 @@ int main(int argc, char** argv) {
+@@ -146,7 +146,7 @@ int main(int argc, char** argv) {
//
// If it prints "correct" then the architecture should be here, in the "correct" section.
#if defined(_M_X64) || defined(__x86_64__) || \
diff --git a/meta/recipes-support/icu/icu/fix-install-manx.patch b/meta/recipes-support/icu/icu/fix-install-manx.patch
index 925b064ebd..7526bde370 100644
--- a/meta/recipes-support/icu/icu/fix-install-manx.patch
+++ b/meta/recipes-support/icu/icu/fix-install-manx.patch
@@ -1,4 +1,4 @@
-From a6ddabc8cadb76bfe2d2e374a6702442cfe51cce Mon Sep 17 00:00:00 2001
+From 2d544cac238eccbfc32cafc9502ddf6e00994211 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Fri, 9 Oct 2015 17:50:41 +0100
Subject: [PATCH] icu: fix install race
@@ -13,13 +13,12 @@ in the install command.
Upstream-Status: Pending
Signed-off-by: Ross Burton <ross.burton@intel.com>
-
---
- source/Makefile.in | 8 ++++----
+ Makefile.in | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/Makefile.in b/Makefile.in
-index be9435b..ada20d7 100644
+index 8366f01..79b91c9 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -77,7 +77,7 @@ EXTRA_DATA =
diff --git a/meta/recipes-support/icu/icu_71.1.bb b/meta/recipes-support/icu/icu_71.1.bb
deleted file mode 100644
index d8ef2a3746..0000000000
--- a/meta/recipes-support/icu/icu_71.1.bb
+++ /dev/null
@@ -1,150 +0,0 @@
-SUMMARY = "International Component for Unicode libraries"
-DESCRIPTION = "The International Component for Unicode (ICU) is a mature, \
-portable set of C/C++ and Java libraries for Unicode support, software \
-internationalization (I18N) and globalization (G11N), giving applications the \
-same results on all platforms."
-HOMEPAGE = "http://site.icu-project.org/"
-
-LICENSE = "ICU"
-DEPENDS = "icu-native autoconf-archive"
-DEPENDS:class-native = "autoconf-archive-native"
-
-CVE_PRODUCT = "international_components_for_unicode"
-
-S = "${WORKDIR}/icu/source"
-SPDX_S = "${WORKDIR}/icu"
-STAGING_ICU_DIR_NATIVE = "${STAGING_DATADIR_NATIVE}/${BPN}/${PV}"
-
-BINCONFIG = "${bindir}/icu-config"
-
-ICU_MAJOR_VER = "${@d.getVar('PV').split('.')[0]}"
-
-inherit autotools pkgconfig binconfig multilib_script
-
-MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/icu-config"
-
-# ICU needs the native build directory as an argument to its --with-cross-build option when
-# cross-compiling. Taken the situation that different builds may share a common sstate-cache
-# into consideration, the native build directory needs to be staged.
-EXTRA_OECONF = "--with-cross-build=${STAGING_ICU_DIR_NATIVE}"
-EXTRA_OECONF:class-native = ""
-EXTRA_OECONF:class-nativesdk = "--with-cross-build=${STAGING_ICU_DIR_NATIVE}"
-
-EXTRA_OECONF:append:class-target = "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'be', ' --with-data-packaging=archive', '', d)}"
-TARGET_CXXFLAGS:append = "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'be', ' -DICU_DATA_DIR=\\""${datadir}/${BPN}/${PV}\\""', '', d)}"
-
-ASNEEDED = ""
-
-do_compile:prepend:class-target () {
- # Make sure certain build host references do not end up being compiled
- # in the image. This only affects libicutu and icu-dbg
- sed \
- -e 's,DU_BUILD=,DU_BUILD_unused=,g' \
- -e '/^CPPFLAGS.*/ s,--sysroot=${STAGING_DIR_TARGET},,g' \
- -i ${B}/tools/toolutil/Makefile
-}
-
-PREPROCESS_RELOCATE_DIRS = "${datadir}/${BPN}/${PV}"
-do_install:append:class-native() {
- mkdir -p ${D}/${STAGING_ICU_DIR_NATIVE}/config
- cp -r ${B}/config/icucross.mk ${D}/${STAGING_ICU_DIR_NATIVE}/config
- cp -r ${B}/config/icucross.inc ${D}/${STAGING_ICU_DIR_NATIVE}/config
- cp -r ${B}/lib ${D}/${STAGING_ICU_DIR_NATIVE}
- cp -r ${B}/bin ${D}/${STAGING_ICU_DIR_NATIVE}
- cp -r ${B}/tools ${D}/${STAGING_ICU_DIR_NATIVE}
-}
-
-do_install:append:class-target() {
- # The native pkgdata can not generate the correct data file.
- # Use icupkg to re-generate it.
- if [ "${SITEINFO_ENDIANNESS}" = "be" ] ; then
- rm -f ${D}/${datadir}/${BPN}/${PV}/icudt${ICU_MAJOR_VER}b.dat
- icupkg -tb ${S}/data/in/icudt${ICU_MAJOR_VER}l.dat ${D}/${datadir}/${BPN}/${PV}/icudt${ICU_MAJOR_VER}b.dat
- fi
-
- # Remove build host references...
- sed -i \
- -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
- -e 's|${DEBUG_PREFIX_MAP}||g' \
- -e 's:${HOSTTOOLS_DIR}/::g' \
- ${D}/${bindir}/icu-config ${D}/${libdir}/${BPN}/${PV}/Makefile.inc \
- ${D}/${libdir}/${BPN}/${PV}/pkgdata.inc
-}
-
-PACKAGES =+ "libicudata libicuuc libicui18n libicutu libicuio"
-
-FILES:${PN}-dev += "${libdir}/${BPN}/"
-
-FILES:libicudata = "${libdir}/libicudata.so.*"
-FILES:libicuuc = "${libdir}/libicuuc.so.*"
-FILES:libicui18n = "${libdir}/libicui18n.so.*"
-FILES:libicutu = "${libdir}/libicutu.so.*"
-FILES:libicuio = "${libdir}/libicuio.so.*"
-
-BBCLASSEXTEND = "native nativesdk"
-
-LIC_FILES_CHKSUM = "file://../LICENSE;md5=a89d03060ff9c46552434dbd1fe3ed1f"
-
-def icu_download_version(d):
- pvsplit = d.getVar('PV').split('.')
- return pvsplit[0] + "_" + pvsplit[1]
-
-def icu_download_folder(d):
- pvsplit = d.getVar('PV').split('.')
- return pvsplit[0] + "-" + pvsplit[1]
-
-ICU_PV = "${@icu_download_version(d)}"
-ICU_FOLDER = "${@icu_download_folder(d)}"
-
-# http://errors.yoctoproject.org/Errors/Details/20486/
-ARM_INSTRUCTION_SET:armv4 = "arm"
-ARM_INSTRUCTION_SET:armv5 = "arm"
-
-BASE_SRC_URI = "https://github.com/unicode-org/icu/releases/download/release-${ICU_FOLDER}/icu4c-${ICU_PV}-src.tgz"
-DATA_SRC_URI = "https://github.com/unicode-org/icu/releases/download/release-${ICU_FOLDER}/icu4c-${ICU_PV}-data.zip"
-SRC_URI = "${BASE_SRC_URI};name=code \
- ${DATA_SRC_URI};name=data \
- file://filter.json \
- file://fix-install-manx.patch \
- file://0001-icu-Added-armeb-support.patch \
- "
-
-SRC_URI:append:class-target = "\
- file://0001-Disable-LDFLAGSICUDT-for-Linux.patch \
- "
-SRC_URI[code.sha256sum] = "67a7e6e51f61faf1306b6935333e13b2c48abd8da6d2f46ce6adca24b1e21ebf"
-SRC_URI[data.sha256sum] = "e3882b4fece6e5e039f22c3189b7ba224180fd26fdbfa9db284617455b93e804"
-
-UPSTREAM_CHECK_REGEX = "icu4c-(?P<pver>\d+(_\d+)+)-src"
-UPSTREAM_CHECK_URI = "https://github.com/unicode-org/icu/releases"
-
-EXTRA_OECONF:append:libc-musl = " ac_cv_func_strtod_l=no"
-
-PACKAGECONFIG ?= ""
-PACKAGECONFIG[make-icudata] = ",,,"
-
-do_make_icudata:class-target () {
- ${@bb.utils.contains('PACKAGECONFIG', 'make-icudata', '', 'exit 0', d)}
- cd ${S}
- rm -rf data
- cp -a ${WORKDIR}/data .
- AR='${BUILD_AR}' \
- CC='${BUILD_CC}' \
- CPP='${BUILD_CPP}' \
- CXX='${BUILD_CXX}' \
- RANLIB='${BUILD_RANLIB}' \
- CFLAGS='${BUILD_CFLAGS}' \
- CPPFLAGS='${BUILD_CPPFLAGS}' \
- CXXFLAGS='${BUILD_CXXFLAGS}' \
- LDFLAGS='${BUILD_LDFLAGS}' \
- ICU_DATA_FILTER_FILE=${WORKDIR}/filter.json \
- ./runConfigureICU Linux --with-data-packaging=archive
- oe_runmake
- install -Dm644 ${S}/data/out/icudt${ICU_MAJOR_VER}l.dat ${S}/data/in/icudt${ICU_MAJOR_VER}l.dat
-}
-
-do_make_icudata() {
- :
-}
-
-addtask make_icudata before do_configure after do_patch do_prepare_recipe_sysroot
diff --git a/meta/recipes-support/icu/icu_74-2.bb b/meta/recipes-support/icu/icu_74-2.bb
new file mode 100644
index 0000000000..8352bf2a5b
--- /dev/null
+++ b/meta/recipes-support/icu/icu_74-2.bb
@@ -0,0 +1,149 @@
+SUMMARY = "International Component for Unicode libraries"
+DESCRIPTION = "The International Component for Unicode (ICU) is a mature, \
+portable set of C/C++ and Java libraries for Unicode support, software \
+internationalization (I18N) and globalization (G11N), giving applications the \
+same results on all platforms."
+HOMEPAGE = "http://site.icu-project.org/"
+
+LICENSE = "ICU"
+DEPENDS = "icu-native autoconf-archive-native"
+
+CVE_PRODUCT = "international_components_for_unicode"
+
+S = "${WORKDIR}/icu/source"
+SPDX_S = "${WORKDIR}/icu"
+STAGING_ICU_DIR_NATIVE = "${STAGING_DATADIR_NATIVE}/${BPN}/${PV}"
+
+ICU_MAJOR_VER = "${@d.getVar('PV').split('-')[0]}"
+
+inherit autotools pkgconfig github-releases
+
+# ICU needs the native build directory as an argument to its --with-cross-build option when
+# cross-compiling. Taken the situation that different builds may share a common sstate-cache
+# into consideration, the native build directory needs to be staged.
+EXTRA_OECONF = "--with-cross-build=${STAGING_ICU_DIR_NATIVE} --disable-icu-config"
+EXTRA_OECONF:class-native = "--disable-icu-config"
+EXTRA_OECONF:class-nativesdk = "--with-cross-build=${STAGING_ICU_DIR_NATIVE} --disable-icu-config"
+
+EXTRA_OECONF:append:class-target = "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'be', ' --with-data-packaging=archive', '', d)}"
+TARGET_CXXFLAGS:append = "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'be', ' -DICU_DATA_DIR=\\""${datadir}/${BPN}/${PV}\\""', '', d)}"
+
+ASNEEDED = ""
+
+do_compile:prepend:class-target () {
+ # Make sure certain build host references do not end up being compiled
+ # in the image. This only affects libicutu and icu-dbg
+ sed \
+ -e 's,DU_BUILD=,DU_BUILD_unused=,g' \
+ -e '/^CPPFLAGS.*/ s,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -i ${B}/tools/toolutil/Makefile
+}
+
+PREPROCESS_RELOCATE_DIRS = "${datadir}/${BPN}/${PV}"
+do_install:append:class-native() {
+ mkdir -p ${D}/${STAGING_ICU_DIR_NATIVE}/config
+ cp -r ${B}/config/icucross.mk ${D}/${STAGING_ICU_DIR_NATIVE}/config
+ cp -r ${B}/config/icucross.inc ${D}/${STAGING_ICU_DIR_NATIVE}/config
+ cp -r ${B}/lib ${D}/${STAGING_ICU_DIR_NATIVE}
+ cp -r ${B}/bin ${D}/${STAGING_ICU_DIR_NATIVE}
+ cp -r ${B}/tools ${D}/${STAGING_ICU_DIR_NATIVE}
+}
+
+do_install:append:class-target() {
+ # The native pkgdata can not generate the correct data file.
+ # Use icupkg to re-generate it.
+ if [ "${SITEINFO_ENDIANNESS}" = "be" ] ; then
+ rm -f ${D}/${datadir}/${BPN}/${@icu_install_folder(d)}/icudt${ICU_MAJOR_VER}b.dat
+ icupkg -tb ${S}/data/in/icudt${ICU_MAJOR_VER}l.dat ${D}/${datadir}/${BPN}/${@icu_install_folder(d)}/icudt${ICU_MAJOR_VER}b.dat
+ fi
+
+ # Remove build host references...
+ sed -i \
+ -e 's,--sysroot=${STAGING_DIR_TARGET},,g' \
+ -e 's|${DEBUG_PREFIX_MAP}||g' \
+ -e 's:${HOSTTOOLS_DIR}/::g' \
+ ${D}/${libdir}/${BPN}/${@icu_install_folder(d)}/Makefile.inc \
+ ${D}/${libdir}/${BPN}/${@icu_install_folder(d)}/pkgdata.inc
+}
+
+PACKAGES =+ "libicudata libicuuc libicui18n libicutu libicuio"
+
+FILES:${PN}-dev += "${libdir}/${BPN}/"
+
+FILES:libicudata = "${libdir}/libicudata.so.*"
+FILES:libicuuc = "${libdir}/libicuuc.so.*"
+FILES:libicui18n = "${libdir}/libicui18n.so.*"
+FILES:libicutu = "${libdir}/libicutu.so.*"
+FILES:libicuio = "${libdir}/libicuio.so.*"
+
+BBCLASSEXTEND = "native nativesdk"
+
+LIC_FILES_CHKSUM = "file://../LICENSE;md5=08dc3852df8fffa807301902ad899ff8"
+
+def icu_download_version(d):
+ pvsplit = d.getVar('PV').split('-')
+ return pvsplit[0] + "_" + pvsplit[1]
+
+def icu_download_folder(d):
+ pvsplit = d.getVar('PV').split('-')
+ return pvsplit[0] + "-" + pvsplit[1]
+
+def icu_install_folder(d):
+ pvsplit = d.getVar('PV').split('-')
+ return pvsplit[0] + "." + pvsplit[1]
+
+ICU_PV = "${@icu_download_version(d)}"
+ICU_FOLDER = "${@icu_download_folder(d)}"
+
+# http://errors.yoctoproject.org/Errors/Details/20486/
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+
+BASE_SRC_URI = "${GITHUB_BASE_URI}/download/release-${ICU_FOLDER}/icu4c-${ICU_PV}-src.tgz"
+DATA_SRC_URI = "${GITHUB_BASE_URI}/download/release-${ICU_FOLDER}/icu4c-${ICU_PV}-data.zip"
+SRC_URI = "${BASE_SRC_URI};name=code \
+ ${DATA_SRC_URI};name=data \
+ file://filter.json \
+ file://fix-install-manx.patch \
+ file://0001-icu-Added-armeb-support.patch \
+ "
+
+SRC_URI:append:class-target = "\
+ file://0001-Disable-LDFLAGSICUDT-for-Linux.patch \
+ "
+SRC_URI[code.sha256sum] = "68db082212a96d6f53e35d60f47d38b962e9f9d207a74cfac78029ae8ff5e08c"
+SRC_URI[data.sha256sum] = "c28c3ca5f4ba3384781797138a294ca360988d4322674ad4d51e52f5d9b0a2b6"
+
+UPSTREAM_CHECK_REGEX = "releases/tag/release-(?P<pver>(?!.+rc).+)"
+GITHUB_BASE_URI = "https://github.com/unicode-org/icu/releases"
+
+EXTRA_OECONF:append:libc-musl = " ac_cv_func_strtod_l=no"
+
+PACKAGECONFIG ?= ""
+PACKAGECONFIG[make-icudata] = ",,,"
+
+do_make_icudata:class-target () {
+ ${@bb.utils.contains('PACKAGECONFIG', 'make-icudata', '', 'exit 0', d)}
+ cd ${S}
+ rm -rf data
+ cp -a ${WORKDIR}/data .
+ AR='${BUILD_AR}' \
+ CC='${BUILD_CC}' \
+ CPP='${BUILD_CPP}' \
+ CXX='${BUILD_CXX}' \
+ RANLIB='${BUILD_RANLIB}' \
+ CFLAGS='${BUILD_CFLAGS}' \
+ CPPFLAGS='${BUILD_CPPFLAGS}' \
+ CXXFLAGS='${BUILD_CXXFLAGS}' \
+ LDFLAGS='${BUILD_LDFLAGS}' \
+ ICU_DATA_FILTER_FILE=${WORKDIR}/filter.json \
+ ./runConfigureICU Linux --with-data-packaging=archive
+ oe_runmake
+ install -Dm644 ${S}/data/out/icudt${ICU_MAJOR_VER}l.dat ${S}/data/in/icudt${ICU_MAJOR_VER}l.dat
+}
+
+do_make_icudata() {
+ :
+}
+
+addtask make_icudata before do_configure after do_patch do_prepare_recipe_sysroot
diff --git a/meta/recipes-support/iso-codes/iso-codes_4.16.0.bb b/meta/recipes-support/iso-codes/iso-codes_4.16.0.bb
new file mode 100644
index 0000000000..f33cce1a9d
--- /dev/null
+++ b/meta/recipes-support/iso-codes/iso-codes_4.16.0.bb
@@ -0,0 +1,24 @@
+SUMMARY = "ISO language, territory, currency, script codes and their translations"
+DESCRIPTION = "Provides lists of various ISO standards (e.g. country, \
+language, language scripts, and currency names) in one place, rather \
+than repeated in many programs throughout the system."
+HOMEPAGE = "https://salsa.debian.org/iso-codes-team/iso-codes"
+BUGTRACKER = "https://salsa.debian.org/iso-codes-team/iso-codes/issues"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI = "git://salsa.debian.org/iso-codes-team/iso-codes.git;protocol=https;branch=main;"
+SRCREV = "c2fcaadc832ed9f858950a43994973442d85ef4f"
+
+# inherit gettext cannot be used, because it adds gettext-native to BASEDEPENDS which
+# are inhibited by allarch
+DEPENDS = "gettext-native"
+
+S = "${WORKDIR}/git"
+
+inherit allarch autotools
+
+FILES:${PN} += "${datadir}/xml/"
+
+BBCLASSEXTEND += "native"
diff --git a/meta/recipes-support/iso-codes/iso-codes_4.9.0.bb b/meta/recipes-support/iso-codes/iso-codes_4.9.0.bb
deleted file mode 100644
index f51edf09a4..0000000000
--- a/meta/recipes-support/iso-codes/iso-codes_4.9.0.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "ISO language, territory, currency, script codes and their translations"
-DESCRIPTION = "Provides lists of various ISO standards (e.g. country, \
-language, language scripts, and currency names) in one place, rather \
-than repeated in many programs throughout the system."
-HOMEPAGE = "https://salsa.debian.org/iso-codes-team/iso-codes"
-BUGTRACKER = "https://salsa.debian.org/iso-codes-team/iso-codes/issues"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI = "git://salsa.debian.org/iso-codes-team/iso-codes.git;protocol=https;branch=main;"
-SRCREV = "35ae2024024eb8e6603d3034dbbc406594f3874c"
-
-# inherit gettext cannot be used, because it adds gettext-native to BASEDEPENDS which
-# are inhibited by allarch
-DEPENDS = "gettext-native"
-
-S = "${WORKDIR}/git"
-
-inherit allarch autotools
-
-FILES:${PN} += "${datadir}/xml/"
diff --git a/meta/recipes-support/libassuan/libassuan_2.5.5.bb b/meta/recipes-support/libassuan/libassuan_2.5.5.bb
deleted file mode 100644
index 2bab3ac955..0000000000
--- a/meta/recipes-support/libassuan/libassuan_2.5.5.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "IPC library used by GnuPG and GPGME"
-DESCRIPTION = "A small library implementing the so-called Assuan protocol. \
-This protocol is used for IPC between most newer GnuPG components. \
-Both, server and client side functions are provided. "
-HOMEPAGE = "http://www.gnupg.org/related_software/libassuan/"
-BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
-
-LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
-LICENSE:${PN} = "LGPL-2.1-or-later"
-LICENSE:${PN}-doc = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949 \
- file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
- file://src/assuan.c;endline=20;md5=ab92143a5a2adabd06d7994d1467ea5c\
- file://src/assuan-defs.h;endline=20;md5=15d950c83e82978e35b35e790d7e4d39"
-
-DEPENDS = "libgpg-error"
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/libassuan/libassuan-${PV}.tar.bz2 \
- file://libassuan-add-pkgconfig-support.patch \
- "
-
-SRC_URI[sha256sum] = "8e8c2fcc982f9ca67dcbb1d95e2dc746b1739a4668bc20b3a3c5be632edb34e4"
-
-BINCONFIG = "${bindir}/libassuan-config"
-
-inherit autotools texinfo binconfig-disabled pkgconfig multilib_header
-
-do_configure:prepend () {
- # Else these could be used in preference to those in aclocal-copy
- rm -f ${S}/m4/*.m4
-}
-
-do_install:append () {
- oe_multilib_header assuan.h
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libassuan/libassuan_2.5.7.bb b/meta/recipes-support/libassuan/libassuan_2.5.7.bb
new file mode 100644
index 0000000000..d1e622635f
--- /dev/null
+++ b/meta/recipes-support/libassuan/libassuan_2.5.7.bb
@@ -0,0 +1,38 @@
+SUMMARY = "IPC library used by GnuPG and GPGME"
+DESCRIPTION = "A small library implementing the so-called Assuan protocol. \
+This protocol is used for IPC between most newer GnuPG components. \
+Both, server and client side functions are provided. "
+HOMEPAGE = "http://www.gnupg.org/related_software/libassuan/"
+BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
+
+LICENSE = "GPL-3.0-or-later & LGPL-2.1-or-later"
+LICENSE:${PN} = "LGPL-2.1-or-later"
+LICENSE:${PN}-doc = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949 \
+ file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
+ file://src/assuan.c;endline=20;md5=ab92143a5a2adabd06d7994d1467ea5c\
+ file://src/assuan-defs.h;endline=20;md5=15d950c83e82978e35b35e790d7e4d39"
+
+DEPENDS = "libgpg-error"
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/libassuan/libassuan-${PV}.tar.bz2 \
+ file://libassuan-add-pkgconfig-support.patch \
+ "
+
+SRC_URI[sha256sum] = "0103081ffc27838a2e50479153ca105e873d3d65d8a9593282e9c94c7e6afb76"
+
+BINCONFIG = "${bindir}/libassuan-config"
+
+inherit autotools texinfo binconfig-disabled pkgconfig multilib_header
+
+do_configure:prepend () {
+ # Else these could be used in preference to those in aclocal-copy
+ rm -f ${S}/m4/*.m4
+}
+
+do_install:append () {
+ oe_multilib_header assuan.h
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libatomic-ops/libatomic-ops_7.6.12.bb b/meta/recipes-support/libatomic-ops/libatomic-ops_7.6.12.bb
deleted file mode 100644
index 8ea8436977..0000000000
--- a/meta/recipes-support/libatomic-ops/libatomic-ops_7.6.12.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "A library for atomic integer operations"
-DESCRIPTION = "Package provides semi-portable access to hardware-provided atomic memory update operations on a number of architectures."
-HOMEPAGE = "https://github.com/ivmai/libatomic_ops/"
-SECTION = "optional"
-PROVIDES += "libatomics-ops"
-LICENSE = "GPL-2.0-only & MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://doc/LICENSING.txt;md5=e00dd5c8ac03a14c5ae5225a4525fa2d \
- "
-
-SRC_URI = "https://github.com/ivmai/libatomic_ops/releases/download/v${PV}/libatomic_ops-${PV}.tar.gz"
-UPSTREAM_CHECK_URI = "https://github.com/ivmai/libatomic_ops/releases"
-
-SRC_URI[sha256sum] = "f0ab566e25fce08b560e1feab6a3db01db4a38e5bc687804334ef3920c549f3e"
-
-S = "${WORKDIR}/libatomic_ops-${PV}"
-
-ALLOW_EMPTY:${PN} = "1"
-
-inherit autotools pkgconfig
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libatomic-ops/libatomic-ops_7.8.2.bb b/meta/recipes-support/libatomic-ops/libatomic-ops_7.8.2.bb
new file mode 100644
index 0000000000..824400e743
--- /dev/null
+++ b/meta/recipes-support/libatomic-ops/libatomic-ops_7.8.2.bb
@@ -0,0 +1,22 @@
+SUMMARY = "A library for atomic integer operations"
+DESCRIPTION = "Package provides semi-portable access to hardware-provided atomic memory update operations on a number of architectures."
+HOMEPAGE = "https://github.com/ivmai/libatomic_ops/"
+SECTION = "optional"
+PROVIDES += "libatomics-ops"
+LICENSE = "GPL-2.0-only & MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://LICENSE;md5=5700d28353dfa2f191ca9b1bd707865e \
+ "
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/libatomic_ops-${PV}.tar.gz"
+GITHUB_BASE_URI = "https://github.com/ivmai/libatomic_ops/releases"
+
+SRC_URI[sha256sum] = "d305207fe207f2b3fb5cb4c019da12b44ce3fcbc593dfd5080d867b1a2419b51"
+
+S = "${WORKDIR}/libatomic_ops-${PV}"
+
+ALLOW_EMPTY:${PN} = "1"
+
+inherit autotools pkgconfig github-releases
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libbsd/libbsd_0.11.6.bb b/meta/recipes-support/libbsd/libbsd_0.11.6.bb
deleted file mode 100644
index 74fc8ef4e5..0000000000
--- a/meta/recipes-support/libbsd/libbsd_0.11.6.bb
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (C) 2013 Khem Raj <raj.khem@gmail.com>
-# Released under the MIT license (see COPYING.MIT for the terms)
-
-SUMMARY = "Library of utility functions from BSD systems"
-DESCRIPTION = "This library provides useful functions commonly found on BSD systems, \
- and lacking on others like GNU systems, thus making it easier to port \
- projects with strong BSD origins, without needing to embed the same \
- code over and over again on each project."
-
-HOMEPAGE = "https://libbsd.freedesktop.org/wiki/"
-# There seems to be more licenses used in the code, I don't think we want to list them all here, complete list:
-# OE @ ~/projects/libbsd $ grep ^License: COPYING | sort -u
-# License: BSD-2-clause
-# License: BSD-2-clause-NetBSD
-# License: BSD-2-clause-author
-# License: BSD-2-clause-verbatim
-# License: BSD-3-clause
-# License: BSD-3-clause-author
-# License: BSD-3-clause-John-Birrell
-# License: BSD-3-clause-Regents
-# License: BSD-4-clause-Christopher-G-Demetriou
-# License: BSD-4-clause-Niels-Provos
-# License: BSD-5-clause-Peter-Wemm
-# License: Beerware
-# License: Expat
-# License: ISC
-# License: ISC-Original
-# License: public-domain
-# License: public-domain-Colin-Plumb
-LICENSE = "BSD-3-Clause & BSD-4-Clause & ISC & PD"
-LICENSE:${PN} = "BSD-3-Clause & ISC & PD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=c2c635b94c8dcd3593f53e10fa8a499e"
-SECTION = "libs"
-
-SRC_URI = "https://libbsd.freedesktop.org/releases/${BPN}-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "19b38f3172eaf693e6e1c68714636190c7e48851e45224d720b3b5bc0499b5df"
-
-inherit autotools pkgconfig
-
-DEPENDS += "libmd"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libbsd/libbsd_0.12.2.bb b/meta/recipes-support/libbsd/libbsd_0.12.2.bb
new file mode 100644
index 0000000000..7d5e88f293
--- /dev/null
+++ b/meta/recipes-support/libbsd/libbsd_0.12.2.bb
@@ -0,0 +1,47 @@
+SUMMARY = "Library of utility functions from BSD systems"
+DESCRIPTION = "This library provides useful functions commonly found on BSD systems, \
+ and lacking on others like GNU systems, thus making it easier to port \
+ projects with strong BSD origins, without needing to embed the same \
+ code over and over again on each project."
+
+HOMEPAGE = "https://libbsd.freedesktop.org/wiki/"
+# There seems to be more licenses used in the code, I don't think we want to list them all here, complete list:
+# OE @ ~/projects/libbsd $ grep ^License: COPYING | sort -u
+# License: BSD-2-clause
+# License: BSD-2-clause-NetBSD
+# License: BSD-2-clause-author
+# License: BSD-2-clause-verbatim
+# License: BSD-3-clause
+# License: BSD-3-clause-author
+# License: BSD-3-clause-John-Birrell
+# License: BSD-3-clause-Regents
+# License: BSD-4-clause-Christopher-G-Demetriou
+# License: BSD-4-clause-Niels-Provos
+# License: BSD-5-clause-Peter-Wemm
+# License: Beerware
+# License: Expat
+# License: ISC
+# License: ISC-Original
+# License: public-domain
+# License: public-domain-Colin-Plumb
+LICENSE = "BSD-3-Clause & BSD-4-Clause & ISC & PD"
+LICENSE:${PN} = "BSD-3-Clause & ISC & PD"
+LICENSE:${PN}-dbg = "BSD-3-Clause & ISC & PD"
+LICENSE:${PN}-dev = "BSD-3-Clause & ISC & PD"
+LICENSE:${PN}-doc = "BSD-3-Clause & BSD-4-Clause & ISC & PD"
+LICENSE:${PN}-locale = "BSD-3-Clause & ISC & PD"
+LICENSE:${PN}-src = "BSD-3-Clause & ISC & PD"
+LICENSE:${PN}-staticdev = "BSD-3-Clause & ISC & PD"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=9b087a0981a1fcad42efbba6d4925a0f"
+SECTION = "libs"
+
+SRC_URI = "https://libbsd.freedesktop.org/releases/${BPN}-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "b88cc9163d0c652aaf39a99991d974ddba1c3a9711db8f1b5838af2a14731014"
+
+inherit autotools pkgconfig
+
+DEPENDS += "libmd"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch b/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch
new file mode 100644
index 0000000000..fb424fe725
--- /dev/null
+++ b/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch
@@ -0,0 +1,32 @@
+From 355eada2d20886287cffc16e304087dd6f66ae37 Mon Sep 17 00:00:00 2001
+From: Steve Grubb <ausearch.1@gmail.com>
+Date: Thu, 4 Jan 2024 15:06:29 -0500
+Subject: [PATCH] Remove python global exception handler since its deprecated
+
+Upstream-Status: Backport [https://github.com/stevegrubb/libcap-ng/commit/30453b6553948cd05c438f9f509013e3bb84f25b]
+Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
+---
+ bindings/src/capng_swig.i | 7 -------
+ 1 file changed, 7 deletions(-)
+
+diff --git a/bindings/src/capng_swig.i b/bindings/src/capng_swig.i
+index fcdaf18..fa85e13 100644
+--- a/bindings/src/capng_swig.i
++++ b/bindings/src/capng_swig.i
+@@ -30,13 +30,6 @@
+
+ %varargs(16, signed capability = 0) capng_updatev;
+
+-%except(python) {
+- $action
+- if (result < 0) {
+- PyErr_SetFromErrno(PyExc_OSError);
+- return NULL;
+- }
+-}
+ #endif
+
+ %define __signed__
+--
+2.43.2
+
diff --git a/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.3.bb b/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.3.bb
deleted file mode 100644
index 1240589d00..0000000000
--- a/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.3.bb
+++ /dev/null
@@ -1,28 +0,0 @@
-require libcap-ng.inc
-
-FILESEXTRAPATHS:prepend := "${THISDIR}/libcap-ng:"
-
-SUMMARY .= " - python"
-
-inherit lib_package autotools python3targetconfig
-
-DEPENDS += "libcap-ng python3 swig-native"
-
-S = "${WORKDIR}/libcap-ng-${PV}"
-
-EXTRA_OECONF += "--with-python --with-python3"
-
-do_install:append() {
- rm -rf ${D}${bindir}
- rm -rf ${D}${libdir}/.debug
- rm -f ${D}${libdir}/lib*
- rm -rf ${D}${libdir}/pkgconfig
- rm -rf ${D}${datadir}
- rm -rf ${D}${includedir}
-}
-
-# PACKAGES = "${PN}"
-
-FILES:${PN} = "${libdir}/python${PYTHON_BASEVERSION}"
-FILES:${PN}-dbg =+ "${PYTHON_SITEPACKAGES_DIR}/.debug/_capng.so"
-
diff --git a/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb b/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb
new file mode 100644
index 0000000000..4790134ae9
--- /dev/null
+++ b/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb
@@ -0,0 +1,29 @@
+require libcap-ng.inc
+
+FILESEXTRAPATHS:prepend := "${THISDIR}/libcap-ng:"
+
+SUMMARY .= " - python"
+
+inherit lib_package autotools python3targetconfig
+
+# drop setuptools when version > 0.8.3 is released; it's needed only for distutils
+DEPENDS += "libcap-ng python3 swig-native python3-setuptools-native"
+
+S = "${WORKDIR}/libcap-ng-${PV}"
+
+EXTRA_OECONF += "--with-python3"
+
+do_install:append() {
+ rm -rf ${D}${bindir}
+ rm -rf ${D}${libdir}/.debug
+ rm -f ${D}${libdir}/lib*
+ rm -rf ${D}${libdir}/pkgconfig
+ rm -rf ${D}${datadir}
+ rm -rf ${D}${includedir}
+}
+
+# PACKAGES = "${PN}"
+
+FILES:${PN} = "${libdir}/python${PYTHON_BASEVERSION}"
+FILES:${PN}-dbg =+ "${PYTHON_SITEPACKAGES_DIR}/.debug/_capng.so"
+
diff --git a/meta/recipes-support/libcap-ng/libcap-ng.inc b/meta/recipes-support/libcap-ng/libcap-ng.inc
index 55e3287396..845b7c2f0a 100644
--- a/meta/recipes-support/libcap-ng/libcap-ng.inc
+++ b/meta/recipes-support/libcap-ng/libcap-ng.inc
@@ -7,9 +7,11 @@ LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://COPYING.LIB;md5=e3eda01d9815f8d24aae2dbd89b68b06"
-SRC_URI = "https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-${PV}.tar.gz"
+SRC_URI = "https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-${PV}.tar.gz \
+ file://fix-issues-with-swig-4-2.patch \
+ "
-SRC_URI[sha256sum] = "bed6f6848e22bb2f83b5f764b2aef0ed393054e803a8e3a8711cb2a39e6b492d"
+SRC_URI[sha256sum] = "68581d3b38e7553cb6f6ddf7813b1fc99e52856f21421f7b477ce5abd2605a8a"
EXTRA_OECONF:append:class-target = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h"
EXTRA_OECONF:append:class-nativesdk = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h"
diff --git a/meta/recipes-support/libcap-ng/libcap-ng_0.8.3.bb b/meta/recipes-support/libcap-ng/libcap-ng_0.8.3.bb
deleted file mode 100644
index 6e16e886b9..0000000000
--- a/meta/recipes-support/libcap-ng/libcap-ng_0.8.3.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-require libcap-ng.inc
-
-inherit lib_package autotools
-
-EXTRA_OECONF += "--without-python --without-python3"
-
-BBCLASSEXTEND = "native nativesdk"
-
-do_install:append() {
- # Moving libcap-ng to base_libdir
- if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
- mkdir -p ${D}/${base_libdir}/
- mv -f ${D}${libdir}/libcap-ng.so.* ${D}${base_libdir}/
- relpath=${@os.path.relpath("${base_libdir}", "${libdir}")}
- ln -sf ${relpath}/libcap-ng.so.0.0.0 ${D}${libdir}/libcap-ng.so
- fi
-}
diff --git a/meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb b/meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb
new file mode 100644
index 0000000000..3dbe3e2ffd
--- /dev/null
+++ b/meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb
@@ -0,0 +1,17 @@
+require libcap-ng.inc
+
+inherit lib_package autotools
+
+EXTRA_OECONF += "--without-python3"
+
+BBCLASSEXTEND = "native nativesdk"
+
+do_install:append() {
+ # Moving libcap-ng to base_libdir
+ if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
+ mkdir -p ${D}/${base_libdir}/
+ mv -f ${D}${libdir}/libcap-ng.so.* ${D}${base_libdir}/
+ relpath=${@os.path.relpath("${base_libdir}", "${libdir}")}
+ ln -sf ${relpath}/libcap-ng.so.0.0.0 ${D}${libdir}/libcap-ng.so
+ fi
+}
diff --git a/meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch b/meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch
index 9884fb5641..2ac1e6e560 100644
--- a/meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch
+++ b/meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch
@@ -1,4 +1,4 @@
-From fc60e000169618a4adced845b9462d36ced1efdd Mon Sep 17 00:00:00 2001
+From 709aa8e156415215b0bb034d05b2aa2f44be044e Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Thu, 14 Oct 2021 15:57:36 +0800
Subject: [PATCH] nativesdk-libcap: Raise the size of arrays containing dl
diff --git a/meta/recipes-support/libcap/libcap_2.64.bb b/meta/recipes-support/libcap/libcap_2.64.bb
deleted file mode 100644
index 7690d3e9a5..0000000000
--- a/meta/recipes-support/libcap/libcap_2.64.bb
+++ /dev/null
@@ -1,80 +0,0 @@
-SUMMARY = "Library for getting/setting POSIX.1e capabilities"
-DESCRIPTION = "A library providing the API to access POSIX capabilities. \
-These allow giving various kinds of specific privileges to individual \
-users, without giving them full root permissions."
-HOMEPAGE = "http://sites.google.com/site/fullycapable/"
-# no specific GPL version required
-LICENSE = "BSD-3-Clause | GPL-2.0-only"
-LIC_FILES_CHKSUM_PAM = "file://pam_cap/License;md5=0ad4c9c052b9719ee4fce1bfc7c7dee4"
-LIC_FILES_CHKSUM = "\
- file://License;md5=e2370ba375efe9e1a095c26d37e483b8 \
- ${@bb.utils.contains('PACKAGECONFIG', 'pam', '${LIC_FILES_CHKSUM_PAM}', '', d)} \
-"
-
-DEPENDS = "hostperl-runtime-native gperf-native"
-
-SRC_URI = "${KERNELORG_MIRROR}/linux/libs/security/linux-privs/${BPN}2/${BPN}-${PV}.tar.xz \
- file://0001-ensure-the-XATTR_NAME_CAPS-is-defined-when-it-is-use.patch \
- file://0002-tests-do-not-run-target-executables.patch \
- "
-SRC_URI:append:class-nativesdk = " \
- file://0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch \
- "
-SRC_URI[sha256sum] = "c8465e1f0b068d5fc06199231135ccac7adb56d662b1de93589252e8cd071e13"
-
-UPSTREAM_CHECK_URI = "https://www.kernel.org/pub/linux/libs/security/linux-privs/${BPN}2/"
-
-inherit lib_package
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
-PACKAGECONFIG:class-native ??= ""
-
-PACKAGECONFIG[pam] = "PAM_CAP=yes,PAM_CAP=no,libpam"
-
-EXTRA_OEMAKE = " \
- INDENT= \
- lib='${baselib}' \
- RAISE_SETFCAP=no \
- DYNAMIC=yes \
- USE_GPERF=yes \
-"
-
-EXTRA_OEMAKE:append:class-target = " SYSTEM_HEADERS=${STAGING_INCDIR}"
-
-do_compile() {
- unset CFLAGS BUILD_CFLAGS
- oe_runmake \
- ${PACKAGECONFIG_CONFARGS} \
- AR="${AR}" \
- CC="${CC}" \
- RANLIB="${RANLIB}" \
- OBJCOPY="${OBJCOPY}" \
- COPTS="${CFLAGS}" \
- BUILD_COPTS="${BUILD_CFLAGS}"
-}
-
-do_install() {
- oe_runmake install \
- ${PACKAGECONFIG_CONFARGS} \
- DESTDIR="${D}" \
- prefix="${prefix}" \
- SBINDIR="${sbindir}"
-}
-
-do_install:append() {
- # Move the library to base_libdir
- install -d ${D}${base_libdir}
- if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
- mv ${D}${libdir}/libcap* ${D}${base_libdir}
- if [ -d ${D}${libdir}/security ]; then
- mv ${D}${libdir}/security ${D}${base_libdir}
- fi
- fi
-}
-
-FILES:${PN}-dev += "${base_libdir}/*.so"
-
-# pam files
-FILES:${PN} += "${base_libdir}/security/*.so"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libcap/libcap_2.69.bb b/meta/recipes-support/libcap/libcap_2.69.bb
new file mode 100644
index 0000000000..92fa766d37
--- /dev/null
+++ b/meta/recipes-support/libcap/libcap_2.69.bb
@@ -0,0 +1,79 @@
+SUMMARY = "Library for getting/setting POSIX.1e capabilities"
+DESCRIPTION = "A library providing the API to access POSIX capabilities. \
+These allow giving various kinds of specific privileges to individual \
+users, without giving them full root permissions."
+HOMEPAGE = "http://sites.google.com/site/fullycapable/"
+# no specific GPL version required
+LICENSE = "BSD-3-Clause | GPL-2.0-only"
+LIC_FILES_CHKSUM_PAM = "file://pam_cap/License;md5=905326f41d3d1f8df21943f9a4ed6b50"
+LIC_FILES_CHKSUM = "file://License;md5=2965a646645b72ecee859b43c592dcaa \
+ ${@bb.utils.contains('PACKAGECONFIG', 'pam', '${LIC_FILES_CHKSUM_PAM}', '', d)} \
+ "
+
+DEPENDS = "hostperl-runtime-native gperf-native"
+
+SRC_URI = "${KERNELORG_MIRROR}/linux/libs/security/linux-privs/${BPN}2/${BPN}-${PV}.tar.xz \
+ file://0001-ensure-the-XATTR_NAME_CAPS-is-defined-when-it-is-use.patch \
+ file://0002-tests-do-not-run-target-executables.patch \
+ "
+SRC_URI:append:class-nativesdk = " \
+ file://0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch \
+ "
+SRC_URI[sha256sum] = "f311f8f3dad84699d0566d1d6f7ec943a9298b28f714cae3c931dfd57492d7eb"
+
+UPSTREAM_CHECK_URI = "https://www.kernel.org/pub/linux/libs/security/linux-privs/${BPN}2/"
+
+inherit lib_package
+
+PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}"
+PACKAGECONFIG:class-native ??= ""
+
+PACKAGECONFIG[pam] = "PAM_CAP=yes,PAM_CAP=no,libpam"
+
+EXTRA_OEMAKE = " \
+ INDENT= \
+ lib='${baselib}' \
+ RAISE_SETFCAP=no \
+ DYNAMIC=yes \
+ USE_GPERF=yes \
+"
+
+EXTRA_OEMAKE:append:class-target = " SYSTEM_HEADERS=${STAGING_INCDIR}"
+
+do_compile() {
+ unset CFLAGS BUILD_CFLAGS
+ oe_runmake \
+ ${PACKAGECONFIG_CONFARGS} \
+ AR="${AR}" \
+ CC="${CC}" \
+ RANLIB="${RANLIB}" \
+ OBJCOPY="${OBJCOPY}" \
+ COPTS="${CFLAGS}" \
+ BUILD_COPTS="${BUILD_CFLAGS}"
+}
+
+do_install() {
+ oe_runmake install \
+ ${PACKAGECONFIG_CONFARGS} \
+ DESTDIR="${D}" \
+ prefix="${prefix}" \
+ SBINDIR="${sbindir}"
+}
+
+do_install:append() {
+ # Move the library to base_libdir
+ install -d ${D}${base_libdir}
+ if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
+ mv ${D}${libdir}/libcap* ${D}${base_libdir}
+ if [ -d ${D}${libdir}/security ]; then
+ mv ${D}${libdir}/security ${D}${base_libdir}
+ fi
+ fi
+}
+
+FILES:${PN}-dev += "${base_libdir}/*.so"
+
+# pam files
+FILES:${PN} += "${base_libdir}/security/*.so"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libcheck/libcheck/automake-output.patch b/meta/recipes-support/libcheck/libcheck/automake-output.patch
new file mode 100644
index 0000000000..c860f0cc0f
--- /dev/null
+++ b/meta/recipes-support/libcheck/libcheck/automake-output.patch
@@ -0,0 +1,82 @@
+Add optional output in automake style, for integration with ptest.
+Export CK_AUTOMAKE=1 when running a test suite and you'll get
+PASS/FAIL lines on standard output.
+
+Marking this as Inappropriate right now as it's a little rough on the
+edges. Filed https://github.com/libcheck/check/issues/349 to discuss
+with upstream.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+diff --git a/src/check_log.c b/src/check_log.c
+index 0844661..ad23c65 100644
+--- a/src/check_log.c
++++ b/src/check_log.c
+@@ -26,6 +26,7 @@
+ #if ENABLE_SUBUNIT
+ #include <subunit/child.h>
+ #endif
++#include <libgen.h>
+
+ #include "check_error.h"
+ #include "check_list.h"
+@@ -381,6 +382,34 @@ void tap_lfun(SRunner * sr CK_ATTRIBUTE_UNUSED, FILE * file,
+ }
+ }
+
++void am_lfun(SRunner * sr CK_ATTRIBUTE_UNUSED, FILE * file,
++ enum print_output printmode CK_ATTRIBUTE_UNUSED, void *obj,
++ enum cl_event evt)
++{
++ TestResult *tr;
++ const char* types[] = { "INVALID", "PASS", "FAIL", "ERROR"};
++
++ switch (evt)
++ {
++ case CLINITLOG_SR:
++ case CLENDLOG_SR:
++ case CLSTART_SR:
++ case CLSTART_S:
++ case CLEND_SR:
++ case CLEND_S:
++ case CLSTART_T:
++ break;
++ case CLEND_T:
++ tr = (TestResult *)obj;
++ fprintf(file, "%s: %s:%s:%s %s\n",
++ types[tr->rtype], basename(tr->file), tr->tcname, tr->tname, tr->msg);
++ fflush(file);
++ break;
++ default:
++ eprintf("Bad event type received in am_lfun", __FILE__, __LINE__);
++ }
++}
++
+ #if ENABLE_SUBUNIT
+ void subunit_lfun(SRunner * sr, FILE * file, enum print_output printmode,
+ void *obj, enum cl_event evt)
+@@ -527,6 +556,9 @@ void srunner_init_logging(SRunner * sr, enum print_output print_mode)
+ {
+ srunner_register_lfun(sr, f, f != stdout, tap_lfun, print_mode);
+ }
++ if (getenv("CK_AUTOMAKE"))
++ srunner_register_lfun(sr, stdout, 0, am_lfun, print_mode);
++
+ srunner_send_evt(sr, NULL, CLINITLOG_SR);
+ }
+
+diff --git a/src/check_log.h b/src/check_log.h
+index 7223b98..bfe1de3 100644
+--- a/src/check_log.h
++++ b/src/check_log.h
+@@ -40,6 +40,9 @@ void xml_lfun(SRunner * sr, FILE * file, enum print_output,
+ void tap_lfun(SRunner * sr, FILE * file, enum print_output,
+ void *obj, enum cl_event evt);
+
++void am_lfun(SRunner * sr, FILE * file, enum print_output,
++ void *obj, enum cl_event evt);
++
+ void subunit_lfun(SRunner * sr, FILE * file, enum print_output,
+ void *obj, enum cl_event evt);
+
diff --git a/meta/recipes-support/libcheck/libcheck_0.15.2.bb b/meta/recipes-support/libcheck/libcheck_0.15.2.bb
index 188d689cc3..5ab67b8728 100644
--- a/meta/recipes-support/libcheck/libcheck_0.15.2.bb
+++ b/meta/recipes-support/libcheck/libcheck_0.15.2.bb
@@ -10,19 +10,23 @@ SECTION = "devel"
LICENSE = "LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://COPYING.LESSER;md5=2d5025d4aa3495befef8f17206a5b0a1"
-SRC_URI = "https://github.com/${BPN}/check/releases/download/${PV}/check-${PV}.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/check-${PV}.tar.gz \
+ file://automake-output.patch \
file://not-echo-compiler-info-to-check_stdint.h.patch"
SRC_URI[sha256sum] = "a8de4e0bacfb4d76dd1c618ded263523b53b85d92a146d8835eb1a52932fa20a"
-UPSTREAM_CHECK_URI = "https://github.com/libcheck/check/releases/"
+GITHUB_BASE_URI = "https://github.com/libcheck/check/releases/"
S = "${WORKDIR}/check-${PV}"
-inherit autotools pkgconfig texinfo
+inherit autotools pkgconfig texinfo github-releases
CACHED_CONFIGUREVARS += "ac_cv_path_AWK_PATH=${bindir}/gawk"
RREPLACES:${PN} = "check (<= 0.9.5)"
+do_install:append:class-native() {
+ create_cmdline_shebang_wrapper ${D}${bindir}/checkmk
+}
BBCLASSEXTEND = "native nativesdk"
PACKAGES =+ "checkmk"
@@ -30,4 +34,3 @@ PACKAGES =+ "checkmk"
FILES:checkmk = "${bindir}/checkmk"
RDEPENDS:checkmk = "gawk"
-
diff --git a/meta/recipes-support/libcroco/files/CVE-2020-12825.patch b/meta/recipes-support/libcroco/files/CVE-2020-12825.patch
deleted file mode 100644
index 42f92e3607..0000000000
--- a/meta/recipes-support/libcroco/files/CVE-2020-12825.patch
+++ /dev/null
@@ -1,192 +0,0 @@
-From fdf78a4877afa987ba646a8779b513f258e6d04c Mon Sep 17 00:00:00 2001
-From: Michael Catanzaro <mcatanzaro@gnome.org>
-Date: Fri, 31 Jul 2020 15:21:53 -0500
-Subject: [PATCH] libcroco: Limit recursion in block and any productions
-
- (CVE-2020-12825)
-
-If we don't have any limits, we can recurse forever and overflow the
-stack.
-
-Fixes #8
-This is per https://gitlab.gnome.org/Archive/libcroco/-/issues/8
-
-https://gitlab.gnome.org/GNOME/gnome-shell/-/merge_requests/1404
-
-CVE: CVE-2020-12825
-Upstream-Status: Backport [https://gitlab.gnome.org/Archive/libcroco/-/commit/6eb257e5c731c691eb137fca94e916ca73941a5a]
-Comment: No refreshing changes done.
-Signed-off-by: Saloni Jain <Saloni.Jain@kpit.com>
-
----
- src/cr-parser.c | 44 +++++++++++++++++++++++++++++---------------
- 1 file changed, 29 insertions(+), 15 deletions(-)
-
-diff --git a/src/cr-parser.c b/src/cr-parser.c
-index 18c9a01..f4a62e3 100644
---- a/src/cr-parser.c
-+++ b/src/cr-parser.c
-@@ -136,6 +136,8 @@ struct _CRParserPriv {
-
- #define CHARS_TAB_SIZE 12
-
-+#define RECURSIVE_CALLERS_LIMIT 100
-+
- /**
- * IS_NUM:
- *@a_char: the char to test.
-@@ -344,9 +346,11 @@ static enum CRStatus cr_parser_parse_selector_core (CRParser * a_this);
-
- static enum CRStatus cr_parser_parse_declaration_core (CRParser * a_this);
-
--static enum CRStatus cr_parser_parse_any_core (CRParser * a_this);
-+static enum CRStatus cr_parser_parse_any_core (CRParser * a_this,
-+ guint n_calls);
-
--static enum CRStatus cr_parser_parse_block_core (CRParser * a_this);
-+static enum CRStatus cr_parser_parse_block_core (CRParser * a_this,
-+ guint n_calls);
-
- static enum CRStatus cr_parser_parse_value_core (CRParser * a_this);
-
-@@ -784,7 +788,7 @@ cr_parser_parse_atrule_core (CRParser * a_this)
- cr_parser_try_to_skip_spaces_and_comments (a_this);
-
- do {
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, 0);
- } while (status == CR_OK);
-
- status = cr_tknzr_get_next_token (PRIVATE (a_this)->tknzr,
-@@ -795,7 +799,7 @@ cr_parser_parse_atrule_core (CRParser * a_this)
- cr_tknzr_unget_token (PRIVATE (a_this)->tknzr,
- token);
- token = NULL;
-- status = cr_parser_parse_block_core (a_this);
-+ status = cr_parser_parse_block_core (a_this, 0);
- CHECK_PARSING_STATUS (status,
- FALSE);
- goto done;
-@@ -930,11 +934,11 @@ cr_parser_parse_selector_core (CRParser * a_this)
-
- RECORD_INITIAL_POS (a_this, &init_pos);
-
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, 0);
- CHECK_PARSING_STATUS (status, FALSE);
-
- do {
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, 0);
-
- } while (status == CR_OK);
-
-@@ -956,10 +960,12 @@ cr_parser_parse_selector_core (CRParser * a_this)
- *in chapter 4.1 of the css2 spec.
- *block ::= '{' S* [ any | block | ATKEYWORD S* | ';' ]* '}' S*;
- *@param a_this the current instance of #CRParser.
-+ *@param n_calls used to limit recursion depth
- *FIXME: code this function.
- */
- static enum CRStatus
--cr_parser_parse_block_core (CRParser * a_this)
-+cr_parser_parse_block_core (CRParser * a_this,
-+ guint n_calls)
- {
- CRToken *token = NULL;
- CRInputPos init_pos;
-@@ -967,6 +973,9 @@ cr_parser_parse_block_core (CRParser * a_this)
-
- g_return_val_if_fail (a_this && PRIVATE (a_this), CR_BAD_PARAM_ERROR);
-
-+ if (n_calls > RECURSIVE_CALLERS_LIMIT)
-+ return CR_ERROR;
-+
- RECORD_INITIAL_POS (a_this, &init_pos);
-
- status = cr_tknzr_get_next_token (PRIVATE (a_this)->tknzr, &token);
-@@ -996,13 +1005,13 @@ cr_parser_parse_block_core (CRParser * a_this)
- } else if (token->type == CBO_TK) {
- cr_tknzr_unget_token (PRIVATE (a_this)->tknzr, token);
- token = NULL;
-- status = cr_parser_parse_block_core (a_this);
-+ status = cr_parser_parse_block_core (a_this, n_calls + 1);
- CHECK_PARSING_STATUS (status, FALSE);
- goto parse_block_content;
- } else {
- cr_tknzr_unget_token (PRIVATE (a_this)->tknzr, token);
- token = NULL;
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, n_calls + 1);
- CHECK_PARSING_STATUS (status, FALSE);
- goto parse_block_content;
- }
-@@ -1109,7 +1118,7 @@ cr_parser_parse_value_core (CRParser * a_this)
- status = cr_tknzr_unget_token (PRIVATE (a_this)->tknzr,
- token);
- token = NULL;
-- status = cr_parser_parse_block_core (a_this);
-+ status = cr_parser_parse_block_core (a_this, 0);
- CHECK_PARSING_STATUS (status, FALSE);
- ref++;
- goto continue_parsing;
-@@ -1123,7 +1132,7 @@ cr_parser_parse_value_core (CRParser * a_this)
- status = cr_tknzr_unget_token (PRIVATE (a_this)->tknzr,
- token);
- token = NULL;
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, 0);
- if (status == CR_OK) {
- ref++;
- goto continue_parsing;
-@@ -1162,10 +1171,12 @@ cr_parser_parse_value_core (CRParser * a_this)
- * | FUNCTION | DASHMATCH | '(' any* ')' | '[' any* ']' ] S*;
- *
- *@param a_this the current instance of #CRParser.
-+ *@param n_calls used to limit recursion depth
- *@return CR_OK upon successfull completion, an error code otherwise.
- */
- static enum CRStatus
--cr_parser_parse_any_core (CRParser * a_this)
-+cr_parser_parse_any_core (CRParser * a_this,
-+ guint n_calls)
- {
- CRToken *token1 = NULL,
- *token2 = NULL;
-@@ -1174,6 +1185,9 @@ cr_parser_parse_any_core (CRParser * a_this)
-
- g_return_val_if_fail (a_this, CR_BAD_PARAM_ERROR);
-
-+ if (n_calls > RECURSIVE_CALLERS_LIMIT)
-+ return CR_ERROR;
-+
- RECORD_INITIAL_POS (a_this, &init_pos);
-
- status = cr_tknzr_get_next_token (PRIVATE (a_this)->tknzr, &token1);
-@@ -1212,7 +1226,7 @@ cr_parser_parse_any_core (CRParser * a_this)
- *We consider parameter as being an "any*" production.
- */
- do {
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, n_calls + 1);
- } while (status == CR_OK);
-
- ENSURE_PARSING_COND (status == CR_PARSING_ERROR);
-@@ -1237,7 +1251,7 @@ cr_parser_parse_any_core (CRParser * a_this)
- }
-
- do {
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, n_calls + 1);
- } while (status == CR_OK);
-
- ENSURE_PARSING_COND (status == CR_PARSING_ERROR);
-@@ -1265,7 +1279,7 @@ cr_parser_parse_any_core (CRParser * a_this)
- }
-
- do {
-- status = cr_parser_parse_any_core (a_this);
-+ status = cr_parser_parse_any_core (a_this, n_calls + 1);
- } while (status == CR_OK);
-
- ENSURE_PARSING_COND (status == CR_PARSING_ERROR);
diff --git a/meta/recipes-support/libcroco/libcroco_0.6.13.bb b/meta/recipes-support/libcroco/libcroco_0.6.13.bb
deleted file mode 100644
index 419c962657..0000000000
--- a/meta/recipes-support/libcroco/libcroco_0.6.13.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-SUMMARY = "Cascading Style Sheet (CSS) parsing and manipulation toolkit"
-DESCRIPTION = "The Libcroco project is an effort to build a generic \
-Cascading Style Sheet (CSS) parsing and manipulation toolkit that can be \
-used by GNOME applications in need of CSS support."
-HOMEPAGE = "http://www.gnome.org/"
-BUGTRACKER = "https://bugzilla.gnome.org/"
-
-LICENSE = "LGPL-2.0-only & LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=55ca817ccb7d5b5b66355690e9abc605 \
- file://src/cr-rgb.c;endline=22;md5=31d5f0944d556c8589d04ea6055fcc66 \
- file://tests/cr-test-utils.c;endline=21;md5=2382c27934cae1d3792fcb17a6142c4e"
-
-SECTION = "x11/utils"
-DEPENDS = "glib-2.0 libxml2 zlib"
-BBCLASSEXTEND = "native nativesdk"
-EXTRA_OECONF += "--enable-Bsymbolic=auto"
-
-BINCONFIG = "${bindir}/croco-0.6-config"
-
-inherit gnomebase gtk-doc binconfig-disabled
-
-SRC_URI[archive.md5sum] = "c80c5a8385011a0260dce6bd0da93dce"
-SRC_URI[archive.sha256sum] = "767ec234ae7aa684695b3a735548224888132e063f92db585759b422570621d4"
-
-SRC_URI +="file://CVE-2020-12825.patch \
-"
diff --git a/meta/recipes-support/libdaemon/libdaemon_0.14.bb b/meta/recipes-support/libdaemon/libdaemon_0.14.bb
index de6ac388a3..089f19d1cd 100644
--- a/meta/recipes-support/libdaemon/libdaemon_0.14.bb
+++ b/meta/recipes-support/libdaemon/libdaemon_0.14.bb
@@ -4,7 +4,6 @@ facilities for logging and a signal handler to enable graceful shutdown, \
as well as file locking to ensure that only a single copy of a given daemon \
is running at a time."
SECTION = "libs"
-AUTHOR = "Lennart Poettering <lennart@poettering.net>"
HOMEPAGE = "http://0pointer.de/lennart/projects/libdaemon/"
LICENSE = "LGPL-2.1-or-later"
LIC_FILES_CHKSUM = "file://LICENSE;md5=2d5025d4aa3495befef8f17206a5b0a1 \
diff --git a/meta/recipes-support/libevdev/libevdev_1.12.1.bb b/meta/recipes-support/libevdev/libevdev_1.12.1.bb
deleted file mode 100644
index bdca64f357..0000000000
--- a/meta/recipes-support/libevdev/libevdev_1.12.1.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-SUMMARY = "Wrapper library for evdev devices"
-DESCRIPTION = "A library for handling evdev kernel devices. It abstracts \
-the evdev ioctls through type-safe interfaces and provides functions \
-to change the appearance of the device."
-HOMEPAGE = "http://www.freedesktop.org/wiki/Software/libevdev/"
-SECTION = "libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=80c550b3197bcb8da7d7557ebcc3fc46 \
- "
-
-SRC_URI = "http://www.freedesktop.org/software/libevdev/${BP}.tar.xz"
-SRC_URI[sha256sum] = "1dbba41bc516d3ca7abc0da5b862efe3ea8a7018fa6e9b97ce9d39401b22426c"
-
-inherit autotools pkgconfig
-
-UPSTREAM_CHECK_REGEX = "libevdev-(?P<pver>(\d+\.)+(?!90\d+)\d+)"
diff --git a/meta/recipes-support/libevdev/libevdev_1.13.1.bb b/meta/recipes-support/libevdev/libevdev_1.13.1.bb
new file mode 100644
index 0000000000..85cfd78507
--- /dev/null
+++ b/meta/recipes-support/libevdev/libevdev_1.13.1.bb
@@ -0,0 +1,17 @@
+SUMMARY = "Wrapper library for evdev devices"
+DESCRIPTION = "A library for handling evdev kernel devices. It abstracts \
+the evdev ioctls through type-safe interfaces and provides functions \
+to change the appearance of the device."
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/libevdev/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=80c550b3197bcb8da7d7557ebcc3fc46 \
+ "
+
+SRC_URI = "http://www.freedesktop.org/software/libevdev/${BP}.tar.xz"
+SRC_URI[sha256sum] = "06a77bf2ac5c993305882bc1641017f5bec1592d6d1b64787bad492ab34f2f36"
+
+inherit autotools pkgconfig
+
+UPSTREAM_CHECK_REGEX = "libevdev-(?P<pver>(\d+\.)+(?!90\d+)\d+)"
diff --git a/meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch b/meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch
index ea17e876ea..26b707ad31 100644
--- a/meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch
+++ b/meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch
@@ -6,7 +6,7 @@ Subject: [PATCH] test: retriable tests are marked failed only when all
Fixes: #1193
-Upstream-Status: Accepted
+Upstream-Status: Backport [https://github.com/libevent/libevent/commit/3daebf308a01b4b2d3fb867be3d6631f7b5a2dbb]
Signed-off-by: Thomas Perrot <thomas.perrot@bootlin.com>
---
diff --git a/meta/recipes-support/libevent/libevent_2.1.12.bb b/meta/recipes-support/libevent/libevent_2.1.12.bb
index e26e8a9b57..25388fb4d7 100644
--- a/meta/recipes-support/libevent/libevent_2.1.12.bb
+++ b/meta/recipes-support/libevent/libevent_2.1.12.bb
@@ -11,7 +11,7 @@ SECTION = "libs"
LICENSE = "BSD-3-Clause & MIT"
LIC_FILES_CHKSUM = "file://LICENSE;md5=17f20574c0b154d12236d5fbe964f549"
-SRC_URI = "https://github.com/libevent/libevent/releases/download/release-${PV}-stable/${BP}-stable.tar.gz \
+SRC_URI = "${GITHUB_BASE_URI}/download/release-${PV}-stable/${BP}-stable.tar.gz \
file://Makefile-missing-test-dir.patch \
file://run-ptest \
file://0001-test-regress_dns.c-patch-out-tests-that-require-a-wo.patch \
@@ -21,15 +21,14 @@ SRC_URI = "https://github.com/libevent/libevent/releases/download/release-${PV}-
"
SRC_URI[sha256sum] = "92e6de1be9ec176428fd2367677e61ceffc2ee1cb119035037a27d346b0403bb"
-
-UPSTREAM_CHECK_URI = "http://libevent.org/"
+UPSTREAM_CHECK_REGEX = "releases/tag/release-(?P<pver>.+)-stable"
S = "${WORKDIR}/${BPN}-${PV}-stable"
PACKAGECONFIG ??= ""
PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
-inherit autotools
+inherit autotools github-releases
# Needed for Debian packaging
LEAD_SONAME = "libevent-2.1.so"
@@ -42,7 +41,7 @@ PACKAGES_DYNAMIC = "^${PN}-.*$"
python split_libevent_libs () {
do_split_packages(d, '${libdir}', r'^libevent_([a-z]*)-.*\.so\..*', '${PN}-%s', '${SUMMARY} (%s)', prepend=True, allow_links=True)
}
-PACKAGESPLITFUNCS:prepend = "split_libevent_libs "
+PACKAGESPLITFUNCS =+ "split_libevent_libs"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libexif/libexif/0001-Add-serial-tests-config-needed-by-ptest.patch b/meta/recipes-support/libexif/libexif/0001-Add-serial-tests-config-needed-by-ptest.patch
new file mode 100644
index 0000000000..505aa07330
--- /dev/null
+++ b/meta/recipes-support/libexif/libexif/0001-Add-serial-tests-config-needed-by-ptest.patch
@@ -0,0 +1,26 @@
+From 1ee7217c8ae724d793f9a9876c3608057a2ccbf8 Mon Sep 17 00:00:00 2001
+From: Julien Stephan <jstephan@baylibre.com>
+Date: Tue, 11 Jul 2023 16:07:54 +0200
+Subject: [PATCH] Add serial-tests config needed by ptest
+
+Add serial-tests support, ptest needs it.
+
+Upstream-Status: Inappropriate [oe specific]
+
+Signed-off-by: Julien Stephan <jstephan@baylibre.com>
+---
+ configure.ac | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/configure.ac b/configure.ac
+index cd48047..5413907 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -16,6 +16,7 @@ AM_INIT_AUTOMAKE([
+ dist-zip
+ check-news
+ subdir-objects
++ serial-tests
+ ])
+ AM_MAINTAINER_MODE
+
diff --git a/meta/recipes-support/libexif/libexif/run-ptest b/meta/recipes-support/libexif/libexif/run-ptest
new file mode 100644
index 0000000000..2d23159eb0
--- /dev/null
+++ b/meta/recipes-support/libexif/libexif/run-ptest
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+make -o Makefile runtest-TESTS
diff --git a/meta/recipes-support/libexif/libexif_0.6.24.bb b/meta/recipes-support/libexif/libexif_0.6.24.bb
index 20ba0ef282..b407ee52de 100644
--- a/meta/recipes-support/libexif/libexif_0.6.24.bb
+++ b/meta/recipes-support/libexif/libexif_0.6.24.bb
@@ -7,18 +7,41 @@ SECTION = "libs"
LICENSE = "LGPL-2.1-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=243b725d71bb5df4a1e5920b344b86ad"
-def version_underscore(v):
- return "_".join(v.split("."))
-
-SRC_URI = "https://github.com/libexif/libexif/releases/download/v${PV}/libexif-${PV}.tar.bz2 \
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/libexif-${PV}.tar.bz2 \
+ file://0001-Add-serial-tests-config-needed-by-ptest.patch \
+ file://run-ptest \
"
SRC_URI[sha256sum] = "d47564c433b733d83b6704c70477e0a4067811d184ec565258ac563d8223f6ae"
-UPSTREAM_CHECK_URI = "https://github.com/libexif/libexif/releases/"
-
-inherit autotools gettext
+inherit autotools gettext github-releases ptest
EXTRA_OECONF += "--disable-docs"
+do_compile_ptest() {
+ oe_runmake -C test buildtest-TESTS
+}
+
+do_install_ptest() {
+ install ${B}/test/test*[!\.o] ${D}${PTEST_PATH}
+ for f in ${D}${PTEST_PATH}/test*; do
+ sed -i "s/\(LD_LIBRARY_PATH=\).*\(:\$LD_LIBRARY_PATH\)\"/\1.\2/" $f
+ done
+
+ install ${B}/test/Makefile ${D}${PTEST_PATH}
+ sed -i -e "/^srcdir/c srcdir = \$\{PWD\}" ${D}${PTEST_PATH}/Makefile
+
+ install -d ${D}${PTEST_PATH}/nls
+ install ${B}/test/nls/*[!\.o] ${D}${PTEST_PATH}/nls
+ install -d ${D}${PTEST_PATH}/.libs
+ install ${B}/test/.libs/* ${D}${PTEST_PATH}/.libs
+
+ install ${S}/test/*.sh ${D}${PTEST_PATH}
+
+ install -d ${D}${PTEST_PATH}/testdata
+ install ${S}/test/testdata/* ${D}${PTEST_PATH}/testdata
+}
+
+RDEPENDS:${PN}-ptest += "make bash"
+
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libffi/libffi/0001-arm-sysv-reverted-clang-VFP-mitigation.patch b/meta/recipes-support/libffi/libffi/0001-arm-sysv-reverted-clang-VFP-mitigation.patch
deleted file mode 100644
index 5e529d1ce7..0000000000
--- a/meta/recipes-support/libffi/libffi/0001-arm-sysv-reverted-clang-VFP-mitigation.patch
+++ /dev/null
@@ -1,104 +0,0 @@
-From 501a6b55853af549fae72723e74271f2a4ec7cf6 Mon Sep 17 00:00:00 2001
-From: Brett Warren <brett.warren@arm.com>
-Date: Fri, 27 Nov 2020 15:28:42 +0000
-Subject: [PATCH] arm/sysv: reverted clang VFP mitigation
-
-Since commit e3d2812ce43940aacae5bab2d0e965278cb1e7ea,
-seperate instructions were used when compiling under clang,
-as clang didn't allow the directives at the time. This mitigation
-now causes compilation to fail under clang 10, as described by
-https://github.com/libffi/libffi/issues/607. Now that
-clang supports the LDC and SDC instructions, this mitigation
-has been reverted.
-
-Upstream-Status: Pending
-Signed-off-by: Brett Warren <brett.warren@arm.com>
----
- src/arm/sysv.S | 33 ---------------------------------
- 1 file changed, 33 deletions(-)
-
-diff --git a/src/arm/sysv.S b/src/arm/sysv.S
-index fb36213..e4272a1 100644
---- a/src/arm/sysv.S
-+++ b/src/arm/sysv.S
-@@ -142,13 +142,8 @@ ARM_FUNC_START(ffi_call_VFP)
-
- cmp r3, #3 @ load only d0 if possible
- ite le
--#ifdef __clang__
-- vldrle d0, [r0]
-- vldmgt r0, {d0-d7}
--#else
- ldcle p11, cr0, [r0] @ vldrle d0, [r0]
- ldcgt p11, cr0, [r0], {16} @ vldmgt r0, {d0-d7}
--#endif
- add r0, r0, #64 @ discard the vfp register args
- /* FALLTHRU */
- ARM_FUNC_END(ffi_call_VFP)
-@@ -193,25 +188,13 @@ ARM_FUNC_START(ffi_call_SYSV)
- #endif
- 0:
- E(ARM_TYPE_VFP_S)
--#ifdef __clang__
-- vstr s0, [r2]
--#else
- stc p10, cr0, [r2] @ vstr s0, [r2]
--#endif
- pop {fp,pc}
- E(ARM_TYPE_VFP_D)
--#ifdef __clang__
-- vstr d0, [r2]
--#else
- stc p11, cr0, [r2] @ vstr d0, [r2]
--#endif
- pop {fp,pc}
- E(ARM_TYPE_VFP_N)
--#ifdef __clang__
-- vstm r2, {d0-d3}
--#else
- stc p11, cr0, [r2], {8} @ vstm r2, {d0-d3}
--#endif
- pop {fp,pc}
- E(ARM_TYPE_INT64)
- str r1, [r2, #4]
-@@ -320,11 +303,7 @@ ARM_FUNC_START(ffi_closure_VFP)
- add ip, sp, #16
- sub sp, sp, #64+32 @ allocate frame
- cfi_adjust_cfa_offset(64+32)
--#ifdef __clang__
-- vstm sp, {d0-d7}
--#else
- stc p11, cr0, [sp], {16} @ vstm sp, {d0-d7}
--#endif
- stmdb sp!, {ip,lr}
-
- /* See above. */
-@@ -358,25 +337,13 @@ ARM_FUNC_START_LOCAL(ffi_closure_ret)
- cfi_rel_offset(lr, 4)
- 0:
- E(ARM_TYPE_VFP_S)
--#ifdef __clang__
-- vldr s0, [r2]
--#else
- ldc p10, cr0, [r2] @ vldr s0, [r2]
--#endif
- b call_epilogue
- E(ARM_TYPE_VFP_D)
--#ifdef __clang__
-- vldr d0, [r2]
--#else
- ldc p11, cr0, [r2] @ vldr d0, [r2]
--#endif
- b call_epilogue
- E(ARM_TYPE_VFP_N)
--#ifdef __clang__
-- vldm r2, {d0-d3}
--#else
- ldc p11, cr0, [r2], {8} @ vldm r2, {d0-d3}
--#endif
- b call_epilogue
- E(ARM_TYPE_INT64)
- ldr r1, [r2, #4]
---
-2.25.1
-
diff --git a/meta/recipes-support/libffi/libffi/not-win32.patch b/meta/recipes-support/libffi/libffi/not-win32.patch
index 62daaf4b38..f4ed839ed8 100644
--- a/meta/recipes-support/libffi/libffi/not-win32.patch
+++ b/meta/recipes-support/libffi/libffi/not-win32.patch
@@ -1,4 +1,4 @@
-From 306719369a0d3608b4ff2737de74ae284788a14b Mon Sep 17 00:00:00 2001
+From be9d4cc110c159812699780348686da371296b94 Mon Sep 17 00:00:00 2001
From: Ross Burton <ross.burton@intel.com>
Date: Thu, 4 Feb 2016 16:22:50 +0000
Subject: [PATCH] libffi: ensure sysroot paths are not in libffi.pc
@@ -15,17 +15,16 @@ As this block is generally pointless, disable it.
Upstream-Status: Inappropriate
Signed-off-by: Ross Burton <ross.burton@intel.com>
-
---
configure.ac | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/configure.ac b/configure.ac
-index b764368..d51ce91 100644
+index 816bfd6..e37c5a8 100644
--- a/configure.ac
+++ b/configure.ac
-@@ -354,7 +354,7 @@ AC_ARG_ENABLE(multi-os-directory,
-
+@@ -401,7 +401,7 @@ AC_ARG_ENABLE(multi-os-directory,
+
# These variables are only ever used when we cross-build to X86_WIN32.
# And we only support this with GCC, so...
-if test "x$GCC" = "xyes"; then
diff --git a/meta/recipes-support/libffi/libffi_3.4.2.bb b/meta/recipes-support/libffi/libffi_3.4.2.bb
deleted file mode 100644
index 733fcc5e6c..0000000000
--- a/meta/recipes-support/libffi/libffi_3.4.2.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "A portable foreign function interface library"
-HOMEPAGE = "http://sourceware.org/libffi/"
-DESCRIPTION = "The `libffi' library provides a portable, high level programming interface to various calling \
-conventions. This allows a programmer to call any function specified by a call interface description at run \
-time. FFI stands for Foreign Function Interface. A foreign function interface is the popular name for the \
-interface that allows code written in one language to call code written in another language. The `libffi' \
-library really only provides the lowest, machine dependent layer of a fully featured foreign function interface. \
-A layer must exist above `libffi' that handles type conversions for values passed between the two languages."
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=679b5c9bdc79a2b93ee574e193e7a7bc"
-
-SRC_URI = "https://github.com/libffi/libffi/releases/download/v${PV}/${BPN}-${PV}.tar.gz \
- file://not-win32.patch \
- file://0001-arm-sysv-reverted-clang-VFP-mitigation.patch \
- "
-SRC_URI[sha256sum] = "540fb721619a6aba3bdeef7d940d8e9e0e6d2c193595bc243241b77ff9e93620"
-UPSTREAM_CHECK_URI = "https://github.com/libffi/libffi/releases/"
-UPSTREAM_CHECK_REGEX = "libffi-(?P<pver>\d+(\.\d+)+)\.tar"
-
-EXTRA_OECONF += "--disable-builddir --disable-exec-static-tramp"
-EXTRA_OEMAKE:class-target = "LIBTOOLFLAGS='--tag=CC'"
-inherit autotools texinfo multilib_header
-
-do_install:append() {
- oe_multilib_header ffi.h ffitarget.h
-}
-
-FILES:${PN}-dev += "${libdir}/libffi-${PV}"
-
-# Doesn't compile in MIPS16e mode due to use of hand-written
-# assembly
-MIPS_INSTRUCTION_SET = "mips"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-support/libffi/libffi_3.4.6.bb b/meta/recipes-support/libffi/libffi_3.4.6.bb
new file mode 100644
index 0000000000..a6857a3981
--- /dev/null
+++ b/meta/recipes-support/libffi/libffi_3.4.6.bb
@@ -0,0 +1,32 @@
+SUMMARY = "A portable foreign function interface library"
+HOMEPAGE = "http://sourceware.org/libffi/"
+DESCRIPTION = "The `libffi' library provides a portable, high level programming interface to various calling \
+conventions. This allows a programmer to call any function specified by a call interface description at run \
+time. FFI stands for Foreign Function Interface. A foreign function interface is the popular name for the \
+interface that allows code written in one language to call code written in another language. The `libffi' \
+library really only provides the lowest, machine dependent layer of a fully featured foreign function interface. \
+A layer must exist above `libffi' that handles type conversions for values passed between the two languages."
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1db54c9fd307a12218766c3c7f650ca7"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BPN}-${PV}.tar.gz \
+ file://not-win32.patch \
+ "
+SRC_URI[sha256sum] = "b0dea9df23c863a7a50e825440f3ebffabd65df1497108e5d437747843895a4e"
+
+EXTRA_OECONF += "--disable-builddir --disable-exec-static-tramp"
+EXTRA_OECONF:class-native += "--with-gcc-arch=generic"
+EXTRA_OEMAKE:class-target = "LIBTOOLFLAGS='--tag=CC'"
+
+inherit autotools texinfo multilib_header github-releases
+
+do_install:append() {
+ oe_multilib_header ffi.h ffitarget.h
+}
+
+# Doesn't compile in MIPS16e mode due to use of hand-written
+# assembly
+MIPS_INSTRUCTION_SET = "mips"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libgcrypt/files/0001-libgcrypt-fix-m4-file-for-oe-core.patch b/meta/recipes-support/libgcrypt/files/0001-libgcrypt-fix-m4-file-for-oe-core.patch
index c873e24ed5..cbfa30f0ad 100644
--- a/meta/recipes-support/libgcrypt/files/0001-libgcrypt-fix-m4-file-for-oe-core.patch
+++ b/meta/recipes-support/libgcrypt/files/0001-libgcrypt-fix-m4-file-for-oe-core.patch
@@ -1,4 +1,4 @@
-From 839f38e5ecc22b7f1b837284bbbffac8cb32ab1e Mon Sep 17 00:00:00 2001
+From bcfd89abdb5110b93314297120412d4c7f2da313 Mon Sep 17 00:00:00 2001
From: Trevor Gamblin <trevor.gamblin@windriver.com>
Date: Tue, 29 Oct 2019 14:08:32 -0400
Subject: [PATCH] libgcrypt: fix m4 file for oe-core
@@ -17,7 +17,7 @@ Signed-off-by: Trevor Gamblin <trevor.gamblin@windriver.com>
1 file changed, 4 insertions(+), 86 deletions(-)
diff --git a/src/libgcrypt.m4 b/src/libgcrypt.m4
-index 19d514f..21125c7 100644
+index cd4249e..e53a36f 100644
--- a/src/libgcrypt.m4
+++ b/src/libgcrypt.m4
@@ -29,41 +29,6 @@ dnl is added to the gpg_config_script_warn variable.
@@ -35,7 +35,7 @@ index 19d514f..21125c7 100644
- fi
-
- use_gpgrt_config=""
-- if test x"${LIBGCRYPT_CONFIG}" = x -a x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
+- if test x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
- if $GPGRT_CONFIG libgcrypt --exists; then
- LIBGCRYPT_CONFIG="$GPGRT_CONFIG libgcrypt"
- AC_MSG_NOTICE([Use gpgrt-config as libgcrypt-config])
diff --git a/meta/recipes-support/libgcrypt/files/0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch b/meta/recipes-support/libgcrypt/files/0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch
index f9c362431c..cf9ebfb3e6 100644
--- a/meta/recipes-support/libgcrypt/files/0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch
+++ b/meta/recipes-support/libgcrypt/files/0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch
@@ -1,4 +1,4 @@
-From 3c6c10eae0993c8ca60879494c6650f7b8f54ebe Mon Sep 17 00:00:00 2001
+From 0f66e796a8522e1043dda03b88d5f6feae839d16 Mon Sep 17 00:00:00 2001
From: Chen Qi <Qi.Chen@windriver.com>
Date: Wed, 16 Aug 2017 10:44:41 +0800
Subject: [PATCH] libgcrypt: fix building error with '-O2' in sysroot path
@@ -25,15 +25,15 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/cipher/Makefile.am b/cipher/Makefile.am
-index d644005..1cf5072 100644
+index c3d642b..88c883a 100644
--- a/cipher/Makefile.am
+++ b/cipher/Makefile.am
-@@ -147,7 +147,7 @@ gost-s-box: gost-s-box.c
+@@ -153,7 +153,7 @@ gost-s-box: gost-s-box.c
if ENABLE_O_FLAG_MUNGING
--o_flag_munging = sed -e 's/-O\([2-9sg][2-9sg]*\)/-O1/' -e 's/-Ofast/-O1/g'
-+o_flag_munging = sed -e 's/ -O\([2-9sg][2-9sg]*\) / -O1 /' -e 's/ -Ofast / -O1 /g'
+-o_flag_munging = sed -e 's/-O\([2-9sgz][2-9sgz]*\)/-O1/' -e 's/-Ofast/-O1/g'
++o_flag_munging = sed -e 's/ -O\([2-9sgz][2-9sgz]*\) / -O1 /' -e 's/ -Ofast / -O1 /g'
else
o_flag_munging = cat
endif
diff --git a/meta/recipes-support/libgcrypt/files/0003-tests-bench-slope.c-workaround-ICE-failure-on-mips-w.patch b/meta/recipes-support/libgcrypt/files/0003-tests-bench-slope.c-workaround-ICE-failure-on-mips-w.patch
deleted file mode 100644
index 105df2957e..0000000000
--- a/meta/recipes-support/libgcrypt/files/0003-tests-bench-slope.c-workaround-ICE-failure-on-mips-w.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 7cc702c7b5a1ccc2b0091f3effa1391b6c3030fd Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Wed, 16 Aug 2017 10:46:28 +0800
-Subject: [PATCH 3/4] tests/bench-slope.c: workaround ICE failure on mips with
- '-O -g'
-
-Hit a ICE and could reduce it to the following minimal example:
-
-1. Only the size of array assigned with 2 caused the issue:
-$ cat > mipgcc-test.c << END
-
-int main (int argc, char **argv)
-{
- char *pStrArry[ARRAY_SIZE_MAX] = {"hello"};
- int i = 0;
-
- while(pStrArry[i] && i<ARRAY_SIZE_MAX)
- {
- printf("%s\n", pStrArry[i]);
- i++;
- }
-
- return 0;
-}
-
-END
-
-2. Only -O1 and -g on mips caused the issue:
-$ mips-poky-linux-gcc -O1 -g -o mipgcc-test mipgcc-test.c
-mipgcc-test.c: In function 'main':
-mipgcc-test.c:18:1: internal compiler error: in dwarf2out_var_location,
-at dwarf2out.c:20810
- }
- ^
-Please submit a full bug report,
-with preprocessed source if appropriate.
-See <http://gcc.gnu.org/bugs.html> for instructions
-
-3. The quick workround is trying to enlarge the size of array with
-larger
-than 2.
-
-4. File a bug to GNU, but it could not be reproduced on there
-environment.
-http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60643
-
-Upstream-Status: Inappropriate [oe specific]
-
-Rebase to 1.8.0
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- tests/bench-slope.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/tests/bench-slope.c b/tests/bench-slope.c
-index 75e6e43..4e70842 100644
---- a/tests/bench-slope.c
-+++ b/tests/bench-slope.c
-@@ -1463,7 +1463,7 @@ static struct bench_ops hash_ops = {
- };
-
-
--static struct bench_hash_mode hash_modes[] = {
-+static struct bench_hash_mode hash_modes[3] = {
- {"", &hash_ops},
- {0},
- };
-@@ -1629,7 +1629,7 @@ static struct bench_ops mac_ops = {
- };
-
-
--static struct bench_mac_mode mac_modes[] = {
-+static struct bench_mac_mode mac_modes[3] = {
- {"", &mac_ops},
- {0},
- };
---
-1.8.3.1
-
diff --git a/meta/recipes-support/libgcrypt/files/no-bench-slope.patch b/meta/recipes-support/libgcrypt/files/no-bench-slope.patch
new file mode 100644
index 0000000000..8de3c6713b
--- /dev/null
+++ b/meta/recipes-support/libgcrypt/files/no-bench-slope.patch
@@ -0,0 +1,20 @@
+The bench-slope test appears to be aborting fairly frequently, which causes
+failures on the autobuilder.
+
+Until this has been root-caused, disable the test.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+diff --git a/tests/testdrv.c b/tests/testdrv.c
+index 0ccde326..d3455186 100644
+--- a/tests/testdrv.c
++++ b/tests/testdrv.c
+@@ -77,7 +77,6 @@ static struct {
+ { "t-x448" },
+ { "t-ed448" },
+ { "benchmark" },
+- { "bench-slope" },
+ { "hashtest-256g", "hashtest", "--gigs 256 SHA1 SHA256 SHA512 SM3",
+ LONG_RUNNING },
+ { NULL }
diff --git a/meta/recipes-support/libgcrypt/files/no-native-gpg-error.patch b/meta/recipes-support/libgcrypt/files/no-native-gpg-error.patch
new file mode 100644
index 0000000000..b9a607863d
--- /dev/null
+++ b/meta/recipes-support/libgcrypt/files/no-native-gpg-error.patch
@@ -0,0 +1,18 @@
+Don't depend on a native libgpg-error to build the test driver, as it's
+an optional dependency for some C annotations.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+diff --git a/tests/testdrv.c b/tests/testdrv.c
+index 0ccde326..6d6abd57 100644
+--- a/tests/testdrv.c
++++ b/tests/testdrv.c
+@@ -32,7 +32,6 @@
+ # include <fcntl.h>
+ # include <sys/wait.h>
+ #endif
+-#include <gpg-error.h> /* For some macros. */
+
+ #include "stopwatch.h"
+
diff --git a/meta/recipes-support/libgcrypt/files/run-ptest b/meta/recipes-support/libgcrypt/files/run-ptest
index 4818a061b4..c349ae1944 100644
--- a/meta/recipes-support/libgcrypt/files/run-ptest
+++ b/meta/recipes-support/libgcrypt/files/run-ptest
@@ -1,3 +1,9 @@
#!/bin/sh
-make -C build/tests runtest-TESTS
+# Run the tests in regression mode so they are quicker
+export GCRYPT_IN_REGRESSION_TEST=1
+# The 'random' test invokes itself, so we need to be sure that the test
+# directory is on PATH.
+export PATH=$PATH:.
+
+./testdrv --verbose
diff --git a/meta/recipes-support/libgcrypt/libgcrypt_1.10.1.bb b/meta/recipes-support/libgcrypt/libgcrypt_1.10.1.bb
deleted file mode 100644
index f7108013d3..0000000000
--- a/meta/recipes-support/libgcrypt/libgcrypt_1.10.1.bb
+++ /dev/null
@@ -1,75 +0,0 @@
-SUMMARY = "General purpose cryptographic library based on the code from GnuPG"
-DESCRIPTION = "A cryptography library developed as a separated module of GnuPG. \
-It can also be used independently of GnuPG, but depends on its error-reporting \
-library Libgpg-error."
-HOMEPAGE = "http://directory.fsf.org/project/libgcrypt/"
-BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
-SECTION = "libs"
-
-# helper program gcryptrnd and getrandom are under GPL, rest LGPL
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LICENSE:${PN} = "LGPL-2.1-or-later"
-LICENSE:${PN}-dev = "GPL-2.0-or-later & LGPL-2.1-or-later"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
- file://COPYING.LIB;md5=bbb461211a33b134d42ed5ee802b37ff \
- file://LICENSES;md5=ef545b6cc717747072616519a1256d69 \
- "
-
-DEPENDS = "libgpg-error"
-RDEPENDS:${PN}-ptest = "bash make"
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/libgcrypt/libgcrypt-${PV}.tar.bz2 \
- file://0001-libgcrypt-fix-m4-file-for-oe-core.patch \
- file://0003-tests-bench-slope.c-workaround-ICE-failure-on-mips-w.patch \
- file://0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch \
- file://0004-tests-Makefile.am-fix-undefined-reference-to-pthread.patch \
- file://run-ptest \
- "
-SRC_URI[sha256sum] = "ef14ae546b0084cd84259f61a55e07a38c3b53afc0f546bffcef2f01baffe9de"
-
-# Below whitelisted CVEs are disputed and not affecting crypto libraries for any distro.
-CVE_CHECK_IGNORE += "CVE-2018-12433 CVE-2018-12438"
-
-BINCONFIG = "${bindir}/libgcrypt-config"
-
-inherit autotools texinfo binconfig-disabled pkgconfig ptest
-
-EXTRA_OECONF = "--disable-asm"
-EXTRA_OEMAKE:class-target = "LIBTOOLFLAGS='--tag=CC'"
-
-PRIVATE_LIBS:${PN}-ptest:append = " libgcrypt.so.20"
-
-PACKAGECONFIG ??= "capabilities"
-PACKAGECONFIG[capabilities] = "--with-capabilities,--without-capabilities,libcap"
-
-do_configure:prepend () {
- # Else this could be used in preference to the one in aclocal-copy
- rm -f ${S}/m4/gpg-error.m4
-}
-
-# libgcrypt.pc is added locally and thus installed here
-do_install:append() {
- install -d ${D}/${libdir}/pkgconfig
- install -m 0644 ${B}/src/libgcrypt.pc ${D}/${libdir}/pkgconfig/
-}
-
-do_install_ptest() {
- cp -r --preserve=mode,links -v ${S} ${D}${PTEST_PATH}
- cp -r --preserve=mode,links -v ${B} ${D}${PTEST_PATH}
- rm ${D}${PTEST_PATH}/build/cipher/gost-s-box
- rm ${D}${PTEST_PATH}/build/doc/yat2m
- rm ${D}${PTEST_PATH}/build/libtool
- rm ${D}${PTEST_PATH}/build/config.status
- rm ${D}${PTEST_PATH}/build/config.log
- rm ${D}${PTEST_PATH}/build/src/mpicalc
- rm ${D}${PTEST_PATH}/${BP}/autom4te* -rf
- sed -i -e 's/Makefile:.*/Makefile-disabled:/' ${D}${PTEST_PATH}/build/Makefile
- find ${D}/${PTEST_PATH}/build -name "*.cmake" -or -name "Makefile" \
- | xargs sed -e "s|${WORKDIR}|${PTEST_PATH}|g" -e "s|${WORKDIR}/recipe-sysroot-native||g" -i
-}
-
-FILES:${PN}-dev += "${bindir}/hmac256 ${bindir}/dumpsexp"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libgcrypt/libgcrypt_1.10.3.bb b/meta/recipes-support/libgcrypt/libgcrypt_1.10.3.bb
new file mode 100644
index 0000000000..5a76201ab5
--- /dev/null
+++ b/meta/recipes-support/libgcrypt/libgcrypt_1.10.3.bb
@@ -0,0 +1,55 @@
+SUMMARY = "General purpose cryptographic library based on the code from GnuPG"
+DESCRIPTION = "A cryptography library developed as a separated module of GnuPG. \
+It can also be used independently of GnuPG, but depends on its error-reporting \
+library Libgpg-error."
+HOMEPAGE = "http://directory.fsf.org/project/libgcrypt/"
+BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
+SECTION = "libs"
+
+# helper program gcryptrnd and getrandom are under GPL, rest LGPL
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LICENSE:${PN} = "LGPL-2.1-or-later"
+LICENSE:${PN}-dev = "GPL-2.0-or-later & LGPL-2.1-or-later"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
+ file://COPYING.LIB;md5=bbb461211a33b134d42ed5ee802b37ff \
+ file://LICENSES;md5=ef545b6cc717747072616519a1256d69 \
+ "
+
+DEPENDS = "libgpg-error"
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/libgcrypt/libgcrypt-${PV}.tar.bz2 \
+ file://0001-libgcrypt-fix-m4-file-for-oe-core.patch \
+ file://0002-libgcrypt-fix-building-error-with-O2-in-sysroot-path.patch \
+ file://0004-tests-Makefile.am-fix-undefined-reference-to-pthread.patch \
+ file://no-native-gpg-error.patch \
+ file://no-bench-slope.patch \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "8b0870897ac5ac67ded568dcfadf45969cfa8a6beb0fd60af2a9eadc2a3272aa"
+
+BINCONFIG = "${bindir}/libgcrypt-config"
+
+inherit autotools texinfo binconfig-disabled pkgconfig ptest
+
+EXTRA_OECONF = "--disable-asm"
+EXTRA_OEMAKE:class-target = "LIBTOOLFLAGS='--tag=CC'"
+
+PACKAGECONFIG ??= "capabilities"
+PACKAGECONFIG[capabilities] = "--with-capabilities,--without-capabilities,libcap"
+
+do_configure:prepend () {
+ # Else this could be used in preference to the one in aclocal-copy
+ rm -f ${S}/m4/gpg-error.m4
+}
+
+do_install_ptest() {
+ cd tests
+ oe_runmake testdrv-build testdrv
+ install testdrv $(srcdir=${S}/tests ./testdrv-build --files | sort | uniq) ${D}${PTEST_PATH}
+}
+
+FILES:${PN}-dev += "${bindir}/hmac256 ${bindir}/dumpsexp"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libgit2/libgit2_1.4.3.bb b/meta/recipes-support/libgit2/libgit2_1.4.3.bb
deleted file mode 100644
index 7e27b5b018..0000000000
--- a/meta/recipes-support/libgit2/libgit2_1.4.3.bb
+++ /dev/null
@@ -1,22 +0,0 @@
-SUMMARY = "the Git linkable library"
-HOMEPAGE = "http://libgit2.github.com/"
-LICENSE = "GPL-2.0-with-GCC-exception & MIT & OpenSSL"
-LIC_FILES_CHKSUM = "file://COPYING;md5=e5a9227de4cb6afb5d35ed7b0fdf480d"
-
-DEPENDS = "curl openssl zlib libssh2 libgcrypt libpcre2"
-
-SRC_URI = "git://github.com/libgit2/libgit2.git;branch=maint/v1.4;protocol=https"
-SRCREV = "465bbf88ea939a965fbcbade72870c61f815e457"
-
-S = "${WORKDIR}/git"
-
-inherit cmake
-
-EXTRA_OECMAKE = "\
- -DBUILD_CLAR=OFF \
- -DCMAKE_BUILD_TYPE=RelWithDebInfo \
- -DLIB_INSTALL_DIR=${libdir} \
- -DREGEX_BACKEND='pcre2' \
-"
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libgit2/libgit2_1.7.2.bb b/meta/recipes-support/libgit2/libgit2_1.7.2.bb
new file mode 100644
index 0000000000..d1629c0c0b
--- /dev/null
+++ b/meta/recipes-support/libgit2/libgit2_1.7.2.bb
@@ -0,0 +1,21 @@
+SUMMARY = "the Git linkable library"
+HOMEPAGE = "http://libgit2.github.com/"
+LICENSE = "GPL-2.0-with-GCC-exception & MIT & OpenSSL & BSD-3-Clause & Zlib & ISC & LGPL-2.1-or-later & CC0-1.0 & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5bdf47bbc9a39dc6ce076d59e322dc17"
+
+DEPENDS = "curl openssl zlib libssh2 libgcrypt libpcre2"
+
+SRC_URI = "git://github.com/libgit2/libgit2.git;branch=maint/v1.7;protocol=https"
+SRCREV = "a418d9d4ab87bae16b87d8f37143a4687ae0e4b2"
+
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+EXTRA_OECMAKE = "\
+ -DBUILD_TESTS=OFF \
+ -DCMAKE_BUILD_TYPE=RelWithDebInfo \
+ -DREGEX_BACKEND='pcre2' \
+"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libgpg-error/libgpg-error/0001-Do-not-fail-when-testing-config-scripts.patch b/meta/recipes-support/libgpg-error/libgpg-error/0001-Do-not-fail-when-testing-config-scripts.patch
index a0ab4f23ff..d72838bb05 100644
--- a/meta/recipes-support/libgpg-error/libgpg-error/0001-Do-not-fail-when-testing-config-scripts.patch
+++ b/meta/recipes-support/libgpg-error/libgpg-error/0001-Do-not-fail-when-testing-config-scripts.patch
@@ -1,4 +1,4 @@
-From 137805890951b6b585c1d23097e0a51e8c7a8a50 Mon Sep 17 00:00:00 2001
+From 75622f8b21459eb556b36edc3537fdaded7f04bf Mon Sep 17 00:00:00 2001
From: Alexander Kanavin <alex.kanavin@gmail.com>
Date: Fri, 28 Feb 2020 09:19:56 +0000
Subject: [PATCH] Do not fail when testing config scripts
@@ -14,14 +14,13 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
1 file changed, 1 deletion(-)
diff --git a/src/Makefile.am b/src/Makefile.am
-index 9303954..7630540 100644
+index 9389e99..6ded263 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
-@@ -344,7 +344,6 @@ gpg-error-config: gpgrt-config gpg-error-config-old
+@@ -324,6 +324,5 @@ gpg-error-config: gpgrt-config gpg-error-config-old gpg-error-config-test.sh
else \
echo "no"; \
echo "*** Please report to <https://bugs.gnupg.org> with gpg-error-config-test.log"; \
- exit 1; \
fi
cp gpg-error-config-old $@
-
diff --git a/meta/recipes-support/libgpg-error/libgpg-error/pkgconfig.patch b/meta/recipes-support/libgpg-error/libgpg-error/pkgconfig.patch
index 82bb4cdb47..3d886bcaa4 100644
--- a/meta/recipes-support/libgpg-error/libgpg-error/pkgconfig.patch
+++ b/meta/recipes-support/libgpg-error/libgpg-error/pkgconfig.patch
@@ -1,4 +1,4 @@
-From 0214d886d0153f7288f81213c47dc50f3b9fde20 Mon Sep 17 00:00:00 2001
+From b0df1cb468264a9bb9113524f0e1318c456b2348 Mon Sep 17 00:00:00 2001
From: Hongxu Jia <hongxu.jia@windriver.com>
Date: Thu, 29 Mar 2018 15:12:17 +0800
Subject: [PATCH] support pkgconfig
@@ -14,17 +14,19 @@ Signed-off-by: Armin Kuster <akuster808@gmail.com>
Signed-off-by: Zheng Ruoqin <zhengrq.fnst@cn.fujitsu.com>
---
- src/gpg-error.m4 | 161 +----------------------------------------------
- 1 file changed, 3 insertions(+), 158 deletions(-)
+ src/gpg-error.m4 | 192 ++---------------------------------------------
+ 1 file changed, 5 insertions(+), 187 deletions(-)
diff --git a/src/gpg-error.m4 b/src/gpg-error.m4
-index 4b5cd40..c9978d6 100644
+index 7fa52b1..15a0859 100644
--- a/src/gpg-error.m4
+++ b/src/gpg-error.m4
-@@ -27,158 +27,12 @@ dnl is added to the gpg_config_script_warn variable.
+@@ -15,187 +15,14 @@
dnl
- AC_DEFUN([AM_PATH_GPG_ERROR],
- [ AC_REQUIRE([AC_CANONICAL_HOST])
+ dnl Find gpg-error-config, for backward compatibility
+ dnl
+-dnl _AM_PATH_POSSIBLE_GPG_ERROR_CONFIG
+-AC_DEFUN([_AM_PATH_POSSIBLE_GPG_ERROR_CONFIG],[dnl
- gpg_error_config_prefix=""
- dnl --with-libgpg-error-prefix=PFX is the preferred name for this option,
- dnl since that is consistent with how our three siblings use the directory/
@@ -60,9 +62,14 @@ index 4b5cd40..c9978d6 100644
- fi
-
- AC_PATH_PROG(GPG_ERROR_CONFIG, gpg-error-config, no)
-- min_gpg_error_version=ifelse([$1], ,1.33,$1)
-- ok=no
+-])
-
+-dnl
+-dnl Find gpgrt-config, which uses .pc file
+-dnl (minimum pkg-config functionality, supporting cross build)
+-dnl
+-dnl _AM_PATH_GPGRT_CONFIG
+-AC_DEFUN([_AM_PATH_GPGRT_CONFIG],[dnl
- AC_PATH_PROG(GPGRT_CONFIG, gpgrt-config, no, [$prefix/bin:$PATH])
- if test "$GPGRT_CONFIG" != "no"; then
- # Determine gpgrt_libdir
@@ -118,8 +125,9 @@ index 4b5cd40..c9978d6 100644
- fi
- if test -n "$gpgrt_libdir"; then break; fi
- done
-- else
-- # When we cannot determine system libdir-format, use this:
+- fi
+- if test -z "$gpgrt_libdir"; then
+- # No valid pkgconfig dir in any of the system directories, fallback
- gpgrt_libdir=${possible_libdir1}
- fi
- else
@@ -133,12 +141,33 @@ index 4b5cd40..c9978d6 100644
- AC_MSG_NOTICE([Use gpgrt-config with $gpgrt_libdir as gpg-error-config])
- gpg_error_config_version=`$GPG_ERROR_CONFIG --modversion`
- else
+- gpg_error_config_version=`$GPG_ERROR_CONFIG --version`
- unset GPGRT_CONFIG
- fi
- elif test "$GPG_ERROR_CONFIG" != "no"; then
- gpg_error_config_version=`$GPG_ERROR_CONFIG --version`
- unset GPGRT_CONFIG
- fi
+-])
+-
+-dnl AM_PATH_GPG_ERROR([MINIMUM-VERSION,
+-dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]])
+-dnl
+-dnl Test for libgpg-error and define GPG_ERROR_CFLAGS, GPG_ERROR_LIBS,
+-dnl GPG_ERROR_MT_CFLAGS, and GPG_ERROR_MT_LIBS. The _MT_ variants are
+-dnl used for programs requireing real multi thread support.
+-dnl
+-dnl If a prefix option is not used, the config script is first
+-dnl searched in $SYSROOT/bin and then along $PATH. If the used
+-dnl config script does not match the host specification the script
+-dnl is added to the gpg_config_script_warn variable.
+-dnl
+-AC_DEFUN([AM_PATH_GPG_ERROR],[dnl
+-AC_REQUIRE([AC_CANONICAL_HOST])dnl
+-AC_REQUIRE([_AM_PATH_POSSIBLE_GPG_ERROR_CONFIG])dnl
+-AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl
+- min_gpg_error_version=ifelse([$1], ,1.33,$1)
+- ok=no
- if test "$GPG_ERROR_CONFIG" != "no"; then
- req_major=`echo $min_gpg_error_version | \
- sed 's/\([[0-9]]*\)\.\([[0-9]]*\)/\1/'`
@@ -159,6 +188,8 @@ index 4b5cd40..c9978d6 100644
- fi
- fi
- AC_MSG_CHECKING(for GPG Error - version >= $min_gpg_error_version)
++AC_DEFUN([AM_PATH_GPG_ERROR],
++[ AC_REQUIRE([AC_CANONICAL_HOST])
+ min_gpg_error_version=ifelse([$1], ,0.0,$1)
+ PKG_CHECK_MODULES(GPG_ERROR, [gpg-error >= $min_gpg_error_version], [ok=yes], [ok=no])
if test $ok = yes; then
@@ -183,7 +214,7 @@ index 4b5cd40..c9978d6 100644
fi
if test x"$gpg_error_config_host" != xnone ; then
if test x"$gpg_error_config_host" != x"$host" ; then
-@@ -194,15 +48,6 @@ AC_DEFUN([AM_PATH_GPG_ERROR],
+@@ -211,15 +38,6 @@ AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl
fi
fi
else
diff --git a/meta/recipes-support/libgpg-error/libgpg-error_1.45.bb b/meta/recipes-support/libgpg-error/libgpg-error_1.45.bb
deleted file mode 100644
index ff3a673df2..0000000000
--- a/meta/recipes-support/libgpg-error/libgpg-error_1.45.bb
+++ /dev/null
@@ -1,53 +0,0 @@
-SUMMARY = "Small library that defines common error values for all GnuPG components"
-DESCRIPTION = "Contains common error codes and error handling functions used by GnuPG, Libgcrypt, GPGME and more packages. "
-HOMEPAGE = "http://www.gnupg.org/related_software/libgpg-error/"
-BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
-
-LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
- file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
- file://src/gpg-error.h.in;beginline=2;endline=18;md5=d82591bc81561f617da71e00ff4a9d79 \
- file://src/init.c;beginline=2;endline=17;md5=f01cdfcf747af5380590cfd9bbfeaaf7 \
- "
-
-
-SECTION = "libs"
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/libgpg-error/libgpg-error-${PV}.tar.bz2 \
- file://pkgconfig.patch \
- file://0001-Do-not-fail-when-testing-config-scripts.patch \
- file://run-ptest \
- "
-
-SRC_URI[sha256sum] = "570f8ee4fb4bff7b7495cff920c275002aea2147e9a1d220c068213267f80a26"
-
-BINCONFIG = "${bindir}/gpg-error-config"
-
-inherit autotools binconfig-disabled pkgconfig gettext multilib_header multilib_script ptest
-
-RDEPENDS:${PN}-ptest:append = " make"
-
-MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/gpgrt-config"
-
-CPPFLAGS += "-P"
-
-do_install:append() {
- # we don't have common lisp in OE
- rm -rf "${D}${datadir}/common-lisp/"
- oe_multilib_header gpg-error.h gpgrt.h
-}
-
-do_compile_ptest() {
- oe_runmake -C tests buildtest-TESTS
-}
-
-do_install_ptest() {
- install ${B}/tests/t-*[!\.o] ${D}${PTEST_PATH}
- install ${B}/tests/Makefile ${D}${PTEST_PATH}
-}
-
-FILES:${PN}-dev += "${bindir}/gpg-error"
-FILES:${PN}-doc += "${datadir}/libgpg-error/errorref.txt"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libgpg-error/libgpg-error_1.48.bb b/meta/recipes-support/libgpg-error/libgpg-error_1.48.bb
new file mode 100644
index 0000000000..9f2b49209d
--- /dev/null
+++ b/meta/recipes-support/libgpg-error/libgpg-error_1.48.bb
@@ -0,0 +1,53 @@
+SUMMARY = "Small library that defines common error values for all GnuPG components"
+DESCRIPTION = "Contains common error codes and error handling functions used by GnuPG, Libgcrypt, GPGME and more packages. "
+HOMEPAGE = "http://www.gnupg.org/related_software/libgpg-error/"
+BUGTRACKER = "https://bugs.g10code.com/gnupg/index"
+
+LICENSE = "GPL-2.0-or-later & LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
+ file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \
+ file://src/gpg-error.h.in;beginline=2;endline=18;md5=20f4cf5d81ea2fad18f6297500018654 \
+ file://src/init.c;beginline=2;endline=17;md5=f01cdfcf747af5380590cfd9bbfeaaf7 \
+ "
+
+
+SECTION = "libs"
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/libgpg-error/libgpg-error-${PV}.tar.bz2 \
+ file://pkgconfig.patch \
+ file://0001-Do-not-fail-when-testing-config-scripts.patch \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "89ce1ae893e122924b858de84dc4f67aae29ffa610ebf668d5aa539045663d6f"
+
+BINCONFIG = "${bindir}/gpg-error-config"
+
+inherit autotools binconfig-disabled pkgconfig gettext multilib_header multilib_script ptest
+
+RDEPENDS:${PN}-ptest:append = " make bash"
+
+MULTILIB_SCRIPTS = "${PN}-dev:${bindir}/gpgrt-config"
+
+CPPFLAGS += "-P"
+
+do_install:append() {
+ # we don't have common lisp in OE
+ rm -rf "${D}${datadir}/common-lisp/"
+ oe_multilib_header gpg-error.h gpgrt.h
+}
+
+do_compile_ptest() {
+ oe_runmake -C tests buildtest-TESTS
+}
+
+do_install_ptest() {
+ install ${B}/tests/t-*[!\.o] ${D}${PTEST_PATH}
+ install ${B}/tests/Makefile ${D}${PTEST_PATH}
+}
+
+FILES:${PN}-dev += "${bindir}/gpg-error"
+FILES:${PN}-doc += "${datadir}/libgpg-error/errorref.txt"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libical/libical_3.0.14.bb b/meta/recipes-support/libical/libical_3.0.14.bb
deleted file mode 100644
index 58baf3f32f..0000000000
--- a/meta/recipes-support/libical/libical_3.0.14.bb
+++ /dev/null
@@ -1,55 +0,0 @@
-SUMMARY = "iCal and scheduling (RFC 2445, 2446, 2447) library"
-DESCRIPTION = "An Open Source implementation of the iCalendar protocols \
-and protocol data units. The iCalendar specification describes how \
-calendar clients can communicate with calendar servers so users can store \
-their calendar data and arrange meetings with other users. "
-HOMEPAGE = "https://github.com/libical/libical"
-BUGTRACKER = "https://github.com/libical/libical/issues"
-LICENSE = "LGPL-2.1-only | MPL-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=1910a2a76ddf6a9ba369182494170d87 \
- file://LICENSE.LGPL21.txt;md5=933adb561f159e7c3da079536f0ed871 \
- file://LICENSE.MPL2.txt;md5=f75d2927d3c1ed2414ef72048f5ad640 \
- "
-SECTION = "libs"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/v${PV}/${BP}.tar.gz \
- file://0001-cmake-Do-not-export-CC-into-gir-compiler.patch \
- "
-SRC_URI[sha256sum] = "4284b780356f1dc6a01f16083e7b836e63d3815e27ed0eaaad684712357ccc8f"
-UPSTREAM_CHECK_URI = "https://github.com/libical/libical/releases"
-
-inherit cmake pkgconfig gobject-introspection vala
-
-DEPENDS += "libical-native"
-
-PACKAGECONFIG ??= "icu glib"
-PACKAGECONFIG[bdb] = ",-DCMAKE_DISABLE_FIND_PACKAGE_BDB=True,db"
-PACKAGECONFIG[glib] = "-DICAL_GLIB=True,-DICAL_GLIB=False,glib-2.0-native libxml2-native glib-2.0 libxml2"
-# ICU is used for RSCALE (RFC7529) support
-PACKAGECONFIG[icu] = ",-DCMAKE_DISABLE_FIND_PACKAGE_ICU=True,icu"
-
-# No need to use perl-native, the host perl is sufficient.
-EXTRA_OECMAKE += "-DPERL_EXECUTABLE=${HOSTTOOLS_DIR}/perl"
-# Disable the test suite as we can't install it
-EXTRA_OECMAKE += "-DLIBICAL_BUILD_TESTING=false"
-# doc build fails with linker error (??) for libical-glib so disable it
-EXTRA_OECMAKE += "-DICAL_BUILD_DOCS=false"
-# gobject-introspection
-EXTRA_OECMAKE:append:class-target = " -DGObjectIntrospection_COMPILER=${STAGING_BINDIR}/g-ir-compiler-wrapper"
-EXTRA_OECMAKE:append:class-target = " -DGObjectIntrospection_SCANNER=${STAGING_BINDIR}/g-ir-scanner-wrapper"
-EXTRA_OECMAKE += "-DVAPIGEN=${STAGING_BINDIR_NATIVE}/vapigen"
-EXTRA_OECMAKE += "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DGOBJECT_INTROSPECTION=ON -DICAL_GLIB_VAPI=ON', '-DGOBJECT_INTROSPECTION=OFF', d)}"
-
-# Tell the cross-libical where the tool it needs to build is
-EXTRA_OECMAKE:append:class-target = " -DIMPORT_ICAL_GLIB_SRC_GENERATOR=${STAGING_LIBDIR_NATIVE}/cmake/LibIcal/IcalGlibSrcGenerator.cmake"
-
-do_install:append () {
- # Remove build host references (https://github.com/libical/libical/issues/532)
- sed -i \
- -e 's,${STAGING_LIBDIR},${libdir},g' \
- -e 's,${STAGING_INCDIR},${includedir},g' \
- ${D}${libdir}/cmake/LibIcal/LibIcal*.cmake \
- ${D}${libdir}/cmake/LibIcal/Ical*.cmake
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libical/libical_3.0.18.bb b/meta/recipes-support/libical/libical_3.0.18.bb
new file mode 100644
index 0000000000..040d23b69d
--- /dev/null
+++ b/meta/recipes-support/libical/libical_3.0.18.bb
@@ -0,0 +1,54 @@
+SUMMARY = "iCal and scheduling (RFC 2445, 2446, 2447) library"
+DESCRIPTION = "An Open Source implementation of the iCalendar protocols \
+and protocol data units. The iCalendar specification describes how \
+calendar clients can communicate with calendar servers so users can store \
+their calendar data and arrange meetings with other users. "
+HOMEPAGE = "https://github.com/libical/libical"
+BUGTRACKER = "https://github.com/libical/libical/issues"
+LICENSE = "LGPL-2.1-only | MPL-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=1910a2a76ddf6a9ba369182494170d87 \
+ file://LICENSE.LGPL21.txt;md5=8f690bb538f4b301d931374a6eb864d0 \
+ file://LICENSE.MPL2.txt;md5=f75d2927d3c1ed2414ef72048f5ad640 \
+ "
+SECTION = "libs"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
+ file://0001-cmake-Do-not-export-CC-into-gir-compiler.patch \
+ "
+SRC_URI[sha256sum] = "72b7dc1a5937533aee5a2baefc990983b66b141dd80d43b51f80aced4aae219c"
+
+inherit cmake pkgconfig gobject-introspection vala github-releases
+
+DEPENDS += "libical-native"
+
+PACKAGECONFIG ??= "icu glib"
+PACKAGECONFIG[bdb] = ",-DCMAKE_DISABLE_FIND_PACKAGE_BDB=True,db"
+PACKAGECONFIG[glib] = "-DICAL_GLIB=True,-DICAL_GLIB=False,glib-2.0-native libxml2-native glib-2.0 libxml2"
+# ICU is used for RSCALE (RFC7529) support
+PACKAGECONFIG[icu] = ",-DCMAKE_DISABLE_FIND_PACKAGE_ICU=True,icu"
+
+# No need to use perl-native, the host perl is sufficient.
+EXTRA_OECMAKE += "-DPERL_EXECUTABLE=${HOSTTOOLS_DIR}/perl"
+# Disable the test suite as we can't install it
+EXTRA_OECMAKE += "-DLIBICAL_BUILD_TESTING=false"
+# doc build fails with linker error (??) for libical-glib so disable it
+EXTRA_OECMAKE += "-DICAL_BUILD_DOCS=false"
+# gobject-introspection
+EXTRA_OECMAKE:append:class-target = " -DGObjectIntrospection_COMPILER=${STAGING_BINDIR}/g-ir-compiler-wrapper"
+EXTRA_OECMAKE:append:class-target = " -DGObjectIntrospection_SCANNER=${STAGING_BINDIR}/g-ir-scanner-wrapper"
+EXTRA_OECMAKE += "-DVAPIGEN=${STAGING_BINDIR_NATIVE}/vapigen"
+EXTRA_OECMAKE += "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DGOBJECT_INTROSPECTION=ON -DICAL_GLIB_VAPI=ON', '-DGOBJECT_INTROSPECTION=OFF', d)}"
+
+# Tell the cross-libical where the tool it needs to build is
+EXTRA_OECMAKE:append:class-target = " -DIMPORT_ICAL_GLIB_SRC_GENERATOR=${STAGING_LIBDIR_NATIVE}/cmake/LibIcal/IcalGlibSrcGenerator.cmake"
+
+do_install:append () {
+ # Remove build host references (https://github.com/libical/libical/issues/532)
+ sed -i \
+ -e 's,${STAGING_LIBDIR},${libdir},g' \
+ -e 's,${STAGING_INCDIR},${includedir},g' \
+ ${D}${libdir}/cmake/LibIcal/LibIcal*.cmake \
+ ${D}${libdir}/cmake/LibIcal/Ical*.cmake
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.0.bb b/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.0.bb
deleted file mode 100644
index 1d291cc357..0000000000
--- a/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.0.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-SUMMARY = "Hardware RNG based on CPU timing jitter"
-DESCRIPTION = "The Jitter RNG provides a noise source using the CPU execution timing jitter. \
-It does not depend on any system resource other than a high-resolution time \
-stamp. It is a small-scale, yet fast entropy source that is viable in almost \
-all environments and on a lot of CPU architectures."
-HOMEPAGE = "http://www.chronox.de/jent.html"
-LICENSE = "GPL-2.0-or-later | BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=64a87180908540620ce364b5e69b3b03 \
- file://LICENSE.gplv2;md5=eb723b61539feef013de476e68b5c50a \
- file://LICENSE.bsd;md5=66a5cedaf62c4b2637025f049f9b826f \
- "
-SRC_URI = "git://github.com/smuellerDD/jitterentropy-library.git;branch=master;protocol=https"
-SRCREV = "2e5019cfe63038faaa405ce53715effe4ea580e4"
-S = "${WORKDIR}/git"
-
-# remove at next version upgrade or when output changes
-HASHEQUIV_HASH_VERSION .= ".2"
-
-do_configure[noexec] = "1"
-
-LDFLAGS += "-Wl,-O0"
-
-do_install () {
- oe_runmake install INCDIR="/include" \
- DESTDIR="${D}" \
- PREFIX="${exec_prefix}" \
- LIBDIR="${baselib}" \
- INSTALL_STRIP="install"
-}
-
diff --git a/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.1.bb b/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.1.bb
new file mode 100644
index 0000000000..6cef8c068c
--- /dev/null
+++ b/meta/recipes-support/libjitterentropy/libjitterentropy_3.4.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Hardware RNG based on CPU timing jitter"
+DESCRIPTION = "The Jitter RNG provides a noise source using the CPU execution timing jitter. \
+It does not depend on any system resource other than a high-resolution time \
+stamp. It is a small-scale, yet fast entropy source that is viable in almost \
+all environments and on a lot of CPU architectures."
+HOMEPAGE = "http://www.chronox.de/jent.html"
+LICENSE = "GPL-2.0-or-later | BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=64a87180908540620ce364b5e69b3b03 \
+ file://LICENSE.gplv2;md5=eb723b61539feef013de476e68b5c50a \
+ file://LICENSE.bsd;md5=66a5cedaf62c4b2637025f049f9b826f \
+ "
+SRC_URI = "git://github.com/smuellerDD/jitterentropy-library.git;branch=master;protocol=https"
+SRCREV = "4544e11320138ac02797af81766f4476a71bb09f"
+S = "${WORKDIR}/git"
+
+do_configure[noexec] = "1"
+
+LDFLAGS += "-Wl,-O0"
+
+do_install () {
+ oe_runmake install INCDIR="/include" \
+ DESTDIR="${D}" \
+ PREFIX="${exec_prefix}" \
+ LIBDIR="${baselib}" \
+ INSTALL_STRIP="install"
+}
+
diff --git a/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch b/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
index af96bd57cd..bdb80ff34d 100644
--- a/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
+++ b/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
@@ -1,4 +1,4 @@
-From 6081640895b6d566fa21123e2de7d111eeab5c4c Mon Sep 17 00:00:00 2001
+From ca8174aa81d7bf364b33f7254a9e887735c4996d Mon Sep 17 00:00:00 2001
From: Chen Qi <Qi.Chen@windriver.com>
Date: Mon, 3 Dec 2012 18:17:31 +0800
Subject: [PATCH] libksba: add pkgconfig support
@@ -16,7 +16,7 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
1 file changed, 4 insertions(+), 86 deletions(-)
diff --git a/src/ksba.m4 b/src/ksba.m4
-index 6b55bb8..6e7336f 100644
+index 452c245..aa96255 100644
--- a/src/ksba.m4
+++ b/src/ksba.m4
@@ -23,37 +23,6 @@ dnl with a changed API.
@@ -44,7 +44,7 @@ index 6b55bb8..6e7336f 100644
- fi
-
- use_gpgrt_config=""
-- if test x"$KSBA_CONFIG" = x -a x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
+- if test x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then
- if $GPGRT_CONFIG ksba --exists; then
- KSBA_CONFIG="$GPGRT_CONFIG ksba"
- AC_MSG_NOTICE([Use gpgrt-config as ksba-config])
diff --git a/meta/recipes-support/libksba/libksba_1.6.0.bb b/meta/recipes-support/libksba/libksba_1.6.0.bb
deleted file mode 100644
index f9e83681dd..0000000000
--- a/meta/recipes-support/libksba/libksba_1.6.0.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "Easy API to create and parse X.509 and CMS related objects"
-DESCRIPTION = "A library to make the tasks of working with X.509 certificates, \
-CMS data and related objects more easy. It provides a highlevel interface to \
-the implemented protocols and presents the data in a consistent way. The \
-library does not rely on another cryptographic library but provides \
-hooks for easy integration with Libgcrypt. "
-HOMEPAGE = "http://www.gnupg.org/related_software/libksba/"
-LICENSE = "GPL-3.0-or-later & (GPL-2.0-or-later | LGPL-3.0-or-later)"
-LICENSE:${PN} = "GPL-2.0-or-later | LGPL-3.0-or-later"
-LICENSE:${PN}-doc = "GPL-3.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fd541d83f75d038c4e0617b672ed8bda \
- file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.GPLv3;md5=2f31b266d3440dd7ee50f92cf67d8e6c \
- file://COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02 \
- "
-
-DEPENDS = "libgpg-error"
-
-BINCONFIG = "${bindir}/ksba-config"
-
-inherit autotools binconfig-disabled pkgconfig texinfo
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://ksba-add-pkgconfig-support.patch"
-
-SRC_URI[sha256sum] = "dad683e6f2d915d880aa4bed5cea9a115690b8935b78a1bbe01669189307a48b"
-
-do_configure:prepend () {
- # Else these could be used in preference to those in aclocal-copy
- rm -f ${S}/m4/gpg-error.m4
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libksba/libksba_1.6.6.bb b/meta/recipes-support/libksba/libksba_1.6.6.bb
new file mode 100644
index 0000000000..2230a032f6
--- /dev/null
+++ b/meta/recipes-support/libksba/libksba_1.6.6.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Easy API to create and parse X.509 and CMS related objects"
+DESCRIPTION = "A library to make the tasks of working with X.509 certificates, \
+CMS data and related objects more easy. It provides a highlevel interface to \
+the implemented protocols and presents the data in a consistent way. The \
+library does not rely on another cryptographic library but provides \
+hooks for easy integration with Libgcrypt. "
+HOMEPAGE = "http://www.gnupg.org/related_software/libksba/"
+LICENSE = "GPL-3.0-or-later & (GPL-2.0-or-later | LGPL-3.0-or-later)"
+LICENSE:${PN} = "GPL-2.0-or-later | LGPL-3.0-or-later"
+LICENSE:${PN}-doc = "GPL-3.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fd541d83f75d038c4e0617b672ed8bda \
+ file://COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.GPLv3;md5=2f31b266d3440dd7ee50f92cf67d8e6c \
+ file://COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02 \
+ "
+
+DEPENDS = "libgpg-error"
+
+BINCONFIG = "${bindir}/ksba-config"
+
+inherit autotools binconfig-disabled pkgconfig texinfo
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://ksba-add-pkgconfig-support.patch"
+
+SRC_URI[sha256sum] = "5dec033d211559338838c0c4957c73dfdc3ee86f73977d6279640c9cd08ce6a4"
+
+do_configure:prepend () {
+ # Else these could be used in preference to those in aclocal-copy
+ rm -f ${S}/m4/gpg-error.m4
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libmd/libmd_1.0.4.bb b/meta/recipes-support/libmd/libmd_1.0.4.bb
deleted file mode 100644
index b93dc2d78d..0000000000
--- a/meta/recipes-support/libmd/libmd_1.0.4.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "Message Digest functions from BSD systems"
-DESCRIPTION = "This library provides message digest functions \
-found on BSD systems either on their libc (NetBSD, OpenBSD) or \
-libmd (FreeBSD, DragonflyBSD, macOS, Solaris) libraries and \
-lacking on others like GNU systems."
-HOMEPAGE = "https://www.hadrons.org/software/libmd/"
-
-LICENSE = "BSD-3-Clause & BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=0436d4fb62a71f661d6e8b7812f9e1df"
-
-SRC_URI = "https://archive.hadrons.org/software/libmd/libmd-${PV}.tar.xz"
-SRC_URI[sha256sum] = "f51c921042e34beddeded4b75557656559cf5b1f2448033b4c1eec11c07e530f"
-
-inherit autotools
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libmd/libmd_1.1.0.bb b/meta/recipes-support/libmd/libmd_1.1.0.bb
new file mode 100644
index 0000000000..dc588a0f95
--- /dev/null
+++ b/meta/recipes-support/libmd/libmd_1.1.0.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Message Digest functions from BSD systems"
+DESCRIPTION = "This library provides message digest functions \
+found on BSD systems either on their libc (NetBSD, OpenBSD) or \
+libmd (FreeBSD, DragonflyBSD, macOS, Solaris) libraries and \
+lacking on others like GNU systems."
+HOMEPAGE = "https://www.hadrons.org/software/libmd/"
+
+LICENSE = "BSD-3-Clause & BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=0436d4fb62a71f661d6e8b7812f9e1df"
+
+SRC_URI = "https://archive.hadrons.org/software/libmd/libmd-${PV}.tar.xz"
+SRC_URI[sha256sum] = "1bd6aa42275313af3141c7cf2e5b964e8b1fd488025caf2f971f43b00776b332"
+
+inherit autotools
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libmicrohttpd/libmicrohttpd_0.9.75.bb b/meta/recipes-support/libmicrohttpd/libmicrohttpd_0.9.75.bb
deleted file mode 100644
index 9c99af7c91..0000000000
--- a/meta/recipes-support/libmicrohttpd/libmicrohttpd_0.9.75.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-DESCRIPTION = "A small C library that is supposed to make it easy to run an HTTP server as part of another application"
-HOMEPAGE = "http://www.gnu.org/software/libmicrohttpd/"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=57d09285feac8a64efa878e692b14f36"
-SECTION = "net"
-
-DEPENDS = "file"
-
-SRC_URI = "${GNU_MIRROR}/libmicrohttpd/${BPN}-${PV}.tar.gz"
-SRC_URI[sha256sum] = "9278907a6f571b391aab9644fd646a5108ed97311ec66f6359cebbedb0a4e3bb"
-
-inherit autotools lib_package pkgconfig gettext
-
-CFLAGS += "-pthread -D_REENTRANT"
-
-EXTRA_OECONF += "--disable-static --with-gnutls=${STAGING_LIBDIR}/../"
-
-PACKAGECONFIG ?= "curl https"
-PACKAGECONFIG:append:class-target = "\
- ${@bb.utils.filter('DISTRO_FEATURES', 'largefile', d)} \
-"
-PACKAGECONFIG[largefile] = "--enable-largefile,--disable-largefile,,"
-PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl,"
-PACKAGECONFIG[https] = "--enable-https,--disable-https,libgcrypt gnutls,"
-
-do_compile:append() {
- sed -i s:-L${STAGING_LIBDIR}::g libmicrohttpd.pc
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libmicrohttpd/libmicrohttpd_1.0.1.bb b/meta/recipes-support/libmicrohttpd/libmicrohttpd_1.0.1.bb
new file mode 100644
index 0000000000..0628ee71b5
--- /dev/null
+++ b/meta/recipes-support/libmicrohttpd/libmicrohttpd_1.0.1.bb
@@ -0,0 +1,27 @@
+SUMMARY = "A small C library that is supposed to make it easy to run an HTTP server as part of another application"
+HOMEPAGE = "http://www.gnu.org/software/libmicrohttpd/"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=57d09285feac8a64efa878e692b14f36"
+SECTION = "net"
+
+DEPENDS = "file"
+
+SRC_URI = "${GNU_MIRROR}/libmicrohttpd/${BPN}-${PV}.tar.gz"
+SRC_URI[sha256sum] = "a89e09fc9b4de34dde19f4fcb4faaa1ce10299b9908db1132bbfa1de47882b94"
+
+inherit autotools lib_package pkgconfig gettext
+
+CFLAGS += "-pthread -D_REENTRANT"
+
+EXTRA_OECONF += "--disable-static --with-gnutls=${STAGING_LIBDIR}/../ --enable-largefile"
+
+PACKAGECONFIG ?= "curl https"
+
+PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl,"
+PACKAGECONFIG[https] = "--enable-https,--disable-https,libgcrypt gnutls,"
+
+do_compile:append() {
+ sed -i s:-L${STAGING_LIBDIR}::g libmicrohttpd.pc
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libmpc/libmpc_1.2.1.bb b/meta/recipes-support/libmpc/libmpc_1.2.1.bb
deleted file mode 100644
index 15d382c4ad..0000000000
--- a/meta/recipes-support/libmpc/libmpc_1.2.1.bb
+++ /dev/null
@@ -1,12 +0,0 @@
-require libmpc.inc
-
-DEPENDS = "gmp mpfr"
-
-LIC_FILES_CHKSUM = "file://COPYING.LESSER;md5=e6a600fd5e1d9cbde2d983680233ad02"
-SRC_URI = "${GNU_MIRROR}/mpc/mpc-${PV}.tar.gz"
-
-SRC_URI[sha256sum] = "17503d2c395dfcf106b622dc142683c1199431d095367c6aacba6eec30340459"
-
-S = "${WORKDIR}/mpc-${PV}"
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta/recipes-support/libmpc/libmpc_1.3.1.bb b/meta/recipes-support/libmpc/libmpc_1.3.1.bb
new file mode 100644
index 0000000000..ed57396c0a
--- /dev/null
+++ b/meta/recipes-support/libmpc/libmpc_1.3.1.bb
@@ -0,0 +1,12 @@
+require libmpc.inc
+
+DEPENDS = "gmp mpfr"
+
+LIC_FILES_CHKSUM = "file://COPYING.LESSER;md5=e6a600fd5e1d9cbde2d983680233ad02"
+SRC_URI = "${GNU_MIRROR}/mpc/mpc-${PV}.tar.gz"
+
+SRC_URI[sha256sum] = "ab642492f5cf882b74aa0cb730cd410a81edcdbec895183ce930e706c1c759b8"
+
+S = "${WORKDIR}/mpc-${PV}"
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta/recipes-support/libnl/files/enable-serial-tests.patch b/meta/recipes-support/libnl/files/enable-serial-tests.patch
deleted file mode 100644
index db774ad821..0000000000
--- a/meta/recipes-support/libnl/files/enable-serial-tests.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From b1476d89bf7a0bc6062438731ee4e9026696328b Mon Sep 17 00:00:00 2001
-From: Eric Xu <chi.xu@windriver.com>
-Date: Fri, 9 Mar 2018 03:38:49 +0000
-Subject: [PATCH] Add ptest for libnl
-
-serial-tests is required to generate those targets.
-
-Upstream-Status: Inappropriate [oe-specific]
-Signed-off-by: Eric Xu <chi.xu@windriver.com>
----
- configure.ac | 2 +-
- 1 files changed, 1 insertion(+), 1 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index dfead98..2cc8257 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -46,7 +46,7 @@ AC_INIT(libnl, [libnl_version], [], [], [http://www.infradead.org/~tgr/libnl/])
- AC_CONFIG_HEADERS([lib/defs.h])
- AC_CONFIG_AUX_DIR([build-aux])
- AC_CONFIG_MACRO_DIR([m4])
--AM_INIT_AUTOMAKE([-Wall foreign subdir-objects])
-+AM_INIT_AUTOMAKE([-Wall foreign subdir-objects serial-tests])
- m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES(yes)], [])
- m4_ifdef([AM_PROG_AR], [AM_PROG_AR])
-
---
-2.13.3
-
diff --git a/meta/recipes-support/libnl/files/fa7f97f8982544c4fcb403893bae6701230d5165.patch b/meta/recipes-support/libnl/files/fa7f97f8982544c4fcb403893bae6701230d5165.patch
deleted file mode 100644
index 02662c939e..0000000000
--- a/meta/recipes-support/libnl/files/fa7f97f8982544c4fcb403893bae6701230d5165.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From fa7f97f8982544c4fcb403893bae6701230d5165 Mon Sep 17 00:00:00 2001
-From: Thomas Haller <thaller@redhat.com>
-Date: Fri, 15 Apr 2022 13:29:49 +0200
-Subject: [PATCH] build: avoid building check-direct with --disable-static
-
-"check-direct" needs to statically link with the libraries, because
-it wants to test internal ABI, which is hidden in the share libraries.
-When configuring with "--disable-static", static libs are not build
-and the test tool cannot be build.
-
-Just skip the test in that case.
-
-https://github.com/thom311/libnl/issues/306
-Upstream-Status: Backport [https://github.com/thom311/libnl/commit/fa7f97f8982544c4fcb403893bae6701230d5165]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- Makefile.am | 2 ++
- configure.ac | 2 ++
- 2 files changed, 4 insertions(+)
-
-diff --git a/Makefile.am b/Makefile.am
-index a6bcf553..2f5e0dfc 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -984,8 +984,10 @@ tests_check_all_LDADD = \
- $(NULL)
-
- if WITH_CHECK
-+if ENABLE_STATIC
- check_programs += tests/check-direct
- endif
-+endif
-
- tests_check_direct_SOURCES = \
- tests/check-direct.c \
-diff --git a/configure.ac b/configure.ac
-index 1f9ad0eb..0fd1cc0f 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -107,6 +107,8 @@ else
- AC_CHECK_LIB([pthread], [pthread_mutex_lock], [], AC_MSG_ERROR([libpthread is required]))
- fi
-
-+AM_CONDITIONAL([ENABLE_STATIC], [test "$enable_static" != "no"])
-+
- AC_ARG_ENABLE([debug],
- AS_HELP_STRING([--disable-debug], [Do not include debugging statements]),
- [enable_debug="$enableval"], [enable_debug="yes"])
diff --git a/meta/recipes-support/libnl/files/run-ptest b/meta/recipes-support/libnl/files/run-ptest
index e5c9eb0a32..0d0c665cd2 100755
--- a/meta/recipes-support/libnl/files/run-ptest
+++ b/meta/recipes-support/libnl/files/run-ptest
@@ -1,16 +1,3 @@
-#!/bin/sh
+#! /bin/sh
-num_fail=0
-
-for test in check*
-do
- ./"$test" \
- && echo "PASS: $test" \
- || {
- echo "FAIL: $test"
- num_fail=$(( ${num_fail} + 1))
- }
-
-done
-
-exit $num_fail
+CK_AUTOMAKE=1 ./check-all
diff --git a/meta/recipes-support/libnl/libnl_3.6.0.bb b/meta/recipes-support/libnl/libnl_3.6.0.bb
deleted file mode 100644
index af3aa41040..0000000000
--- a/meta/recipes-support/libnl/libnl_3.6.0.bb
+++ /dev/null
@@ -1,78 +0,0 @@
-SUMMARY = "A library for applications dealing with netlink sockets"
-DESCRIPTION = "The libnl suite is a collection of libraries providing \
-APIs to netlink protocol based Linux kernel interfaces. libnl is the core \
-library implementing the fundamentals required to use the netlink protocol \
-such as socket handling, message construction and parsing, and sending \
-and receiving of data."
-HOMEPAGE = "http://www.infradead.org/~tgr/libnl/"
-SECTION = "libs/network"
-
-PE = "1"
-
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-DEPENDS = "flex-native bison-native"
-
-SRC_URI = " \
- https://github.com/thom311/${BPN}/releases/download/${BPN}${@d.getVar('PV').replace('.','_')}/${BP}.tar.gz \
- file://enable-serial-tests.patch \
- file://run-ptest \
- file://fa7f97f8982544c4fcb403893bae6701230d5165.patch \
- "
-
-SRC_URI[sha256sum] = "532155fd011e5a805bd67121b87a01c757e2bb24112ac17e69cb86013b970009"
-
-
-UPSTREAM_CHECK_URI = "https://github.com/thom311/${BPN}/releases"
-
-inherit autotools pkgconfig ptest
-
-FILES:${PN} = "${libdir}/libnl-3.so.* \
- ${libdir}/libnl.so.* \
- ${sysconfdir}"
-RREPLACES:${PN} = "libnl2"
-RCONFLICTS:${PN} = "libnl2"
-
-FILES:${PN}-dev += "${libdir}/libnl/cli/*/*.la"
-FILES:${PN}-staticdev += "${libdir}/libnl/cli/*/*.a"
-
-PACKAGES += "${PN}-cli ${PN}-genl ${PN}-idiag ${PN}-nf ${PN}-route ${PN}-xfrm"
-FILES:${PN}-cli = "${libdir}/libnl-cli-3.so.* \
- ${libdir}/libnl/cli/*/*.so \
- ${bindir}/genl-ctrl-list \
- ${bindir}/idiag-socket-details \
- ${bindir}/nf-* \
- ${bindir}/nl-*"
-FILES:${PN}-genl = "${libdir}/libnl-genl-3.so.* \
- ${libdir}/libnl-genl.so.*"
-FILES:${PN}-idiag = "${libdir}/libnl-idiag-3.so.*"
-FILES:${PN}-nf = "${libdir}/libnl-nf-3.so.*"
-FILES:${PN}-route = "${libdir}/libnl-route-3.so.*"
-FILES:${PN}-xfrm = "${libdir}/libnl-xfrm-3.so.*"
-RREPLACES:${PN}-genl = "libnl-genl2"
-RCONFLICTS:${PN}-genl = "libnl-genl2"
-
-RDEPENDS:${PN}-ptest += "libcheck"
-RRECOMMENDS:${PN}-ptest += "kernel-module-dummy kernel-module-bonding"
-DEPENDS += "${@bb.utils.contains('PTEST_ENABLED', '1', 'libcheck', '', d)}"
-
-# make sure the tests don't link against wrong so file
-EXTRA_OECONF += "${@bb.utils.contains('PTEST_ENABLED', '1', '--disable-rpath', '', d)}"
-
-do_compile_ptest() {
- # hack to remove the call to `make runtest-TESTS`
- sed -i 's/$(MAKE) $(AM_MAKEFLAGS) runtest-TESTS//g' Makefile
- oe_runmake check
-}
-
-do_install_ptest(){
- # legacy? tests are also installed, but ptest-runner will not run them
- # upstream are not running these tests in their CI pipeline
- # issue opened https://github.com/thom311/libnl/issues/270
- install -m 0755 tests/.libs/* ${D}${PTEST_PATH}/
- # contains build paths
- rm ${D}${PTEST_PATH}/*.la
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libnl/libnl_3.9.0.bb b/meta/recipes-support/libnl/libnl_3.9.0.bb
new file mode 100644
index 0000000000..db9d93e8cb
--- /dev/null
+++ b/meta/recipes-support/libnl/libnl_3.9.0.bb
@@ -0,0 +1,64 @@
+SUMMARY = "A library for applications dealing with netlink sockets"
+DESCRIPTION = "The libnl suite is a collection of libraries providing \
+APIs to netlink protocol based Linux kernel interfaces. libnl is the core \
+library implementing the fundamentals required to use the netlink protocol \
+such as socket handling, message construction and parsing, and sending \
+and receiving of data."
+HOMEPAGE = "http://www.infradead.org/~tgr/libnl/"
+SECTION = "libs/network"
+
+PE = "1"
+
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+DEPENDS = "flex-native bison-native"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${BPN}${@d.getVar('PV').replace('.','_')}/${BP}.tar.gz \
+ file://run-ptest \
+ "
+
+SRC_URI[sha256sum] = "aed507004d728a5cf11eab48ca4bf9e6e1874444e33939b9d3dfed25018ee9bb"
+
+GITHUB_BASE_URI = "https://github.com/thom311/${BPN}/releases"
+UPSTREAM_CHECK_REGEX = "releases/tag/libnl(?P<pver>.+)"
+
+inherit autotools pkgconfig ptest github-releases
+
+FILES:${PN} = "${libdir}/libnl-3.so.* \
+ ${libdir}/libnl.so.* \
+ ${sysconfdir}"
+RREPLACES:${PN} = "libnl2"
+RCONFLICTS:${PN} = "libnl2"
+
+FILES:${PN}-dev += "${libdir}/libnl/cli/*/*.la"
+FILES:${PN}-staticdev += "${libdir}/libnl/cli/*/*.a"
+
+PACKAGES += "${PN}-cli ${PN}-genl ${PN}-idiag ${PN}-nf ${PN}-route ${PN}-xfrm"
+FILES:${PN}-cli = "${libdir}/libnl-cli-3.so.* \
+ ${libdir}/libnl/cli/*/*.so \
+ ${bindir}/genl-ctrl-list \
+ ${bindir}/idiag-socket-details \
+ ${bindir}/nf-* \
+ ${bindir}/nl-*"
+FILES:${PN}-genl = "${libdir}/libnl-genl-3.so.* \
+ ${libdir}/libnl-genl.so.*"
+FILES:${PN}-idiag = "${libdir}/libnl-idiag-3.so.*"
+FILES:${PN}-nf = "${libdir}/libnl-nf-3.so.*"
+FILES:${PN}-route = "${libdir}/libnl-route-3.so.*"
+FILES:${PN}-xfrm = "${libdir}/libnl-xfrm-3.so.*"
+RREPLACES:${PN}-genl = "libnl-genl2"
+RCONFLICTS:${PN}-genl = "libnl-genl2"
+
+DEPENDS += "${@bb.utils.contains('PTEST_ENABLED', '1', 'libcheck', '', d)}"
+RRECOMMENDS:${PN}-ptest += "kernel-module-dummy kernel-module-bonding"
+
+do_compile_ptest() {
+ oe_runmake ./tests/check-all
+}
+
+do_install_ptest() {
+ ./libtool install install ./tests/check-all ${D}${PTEST_PATH}/
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libpcre/libpcre2_10.40.bb b/meta/recipes-support/libpcre/libpcre2_10.40.bb
deleted file mode 100644
index 8c2cadfe2b..0000000000
--- a/meta/recipes-support/libpcre/libpcre2_10.40.bb
+++ /dev/null
@@ -1,54 +0,0 @@
-DESCRIPTION = "There are two major versions of the PCRE library. The \
-newest version is PCRE2, which is a re-working of the original PCRE \
-library to provide an entirely new API. The original, very widely \
-deployed PCRE library's API and feature are stable, future releases \
- will be for bugfixes only. All new future features will be to PCRE2, \
-not the original PCRE 8.x series."
-SUMMARY = "Perl Compatible Regular Expressions version 2"
-HOMEPAGE = "http://www.pcre.org"
-SECTION = "devel"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://LICENCE;md5=41bfb977e4933c506588724ce69bf5d2"
-
-SRC_URI = "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-${PV}/pcre2-${PV}.tar.bz2"
-
-UPSTREAM_CHECK_URI = "https://github.com/PhilipHazel/pcre2/releases"
-
-SRC_URI[sha256sum] = "14e4b83c4783933dc17e964318e6324f7cae1bc75d8f3c79bc6969f00c159d68"
-
-CVE_PRODUCT = "pcre2"
-
-S = "${WORKDIR}/pcre2-${PV}"
-
-PROVIDES += "pcre2"
-DEPENDS += "bzip2 zlib"
-
-BINCONFIG = "${bindir}/pcre2-config"
-
-inherit autotools binconfig-disabled
-
-EXTRA_OECONF = "\
- --enable-newline-is-lf \
- --with-link-size=2 \
- --with-match-limit=10000000 \
- --enable-pcre2-16 \
- --enable-pcre2-32 \
-"
-CFLAGS += "-D_REENTRANT"
-CXXFLAGS:append:powerpc = " -lstdc++"
-
-PACKAGES =+ "libpcre2-16 libpcre2-32 pcre2grep pcre2grep-doc pcre2test pcre2test-doc"
-
-SUMMARY:pcre2grep = "grep utility that uses perl 5 compatible regexes"
-SUMMARY:pcre2grep-doc = "grep utility that uses perl 5 compatible regexes - docs"
-SUMMARY:pcre2test = "program for testing Perl-comatible regular expressions"
-SUMMARY:pcre2test-doc = "program for testing Perl-comatible regular expressions - docs"
-
-FILES:libpcre2-16 = "${libdir}/libpcre2-16.so.*"
-FILES:libpcre2-32 = "${libdir}/libpcre2-32.so.*"
-FILES:pcre2grep = "${bindir}/pcre2grep"
-FILES:pcre2grep-doc = "${mandir}/man1/pcre2grep.1"
-FILES:pcre2test = "${bindir}/pcre2test"
-FILES:pcre2test-doc = "${mandir}/man1/pcre2test.1"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libpcre/libpcre2_10.43.bb b/meta/recipes-support/libpcre/libpcre2_10.43.bb
new file mode 100644
index 0000000000..fd0bd79212
--- /dev/null
+++ b/meta/recipes-support/libpcre/libpcre2_10.43.bb
@@ -0,0 +1,55 @@
+DESCRIPTION = "There are two major versions of the PCRE library. The \
+newest version is PCRE2, which is a re-working of the original PCRE \
+library to provide an entirely new API. The original, very widely \
+deployed PCRE library's API and feature are stable, future releases \
+ will be for bugfixes only. All new future features will be to PCRE2, \
+not the original PCRE 8.x series."
+SUMMARY = "Perl Compatible Regular Expressions version 2"
+HOMEPAGE = "http://www.pcre.org"
+SECTION = "devel"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENCE;md5=321a5eb46acae6b6c1ff2c7a866d836a"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/pcre2-${PV}/pcre2-${PV}.tar.bz2"
+
+GITHUB_BASE_URI = "https://github.com/PhilipHazel/pcre2/releases"
+UPSTREAM_CHECK_REGEX = "releases/tag/pcre2-(?P<pver>\d+(\.\d+)+)$"
+
+SRC_URI[sha256sum] = "e2a53984ff0b07dfdb5ae4486bbb9b21cca8e7df2434096cc9bf1b728c350bcb"
+
+CVE_PRODUCT = "pcre2"
+
+S = "${WORKDIR}/pcre2-${PV}"
+
+PROVIDES += "pcre2"
+DEPENDS += "bzip2 zlib"
+
+BINCONFIG = "${bindir}/pcre2-config"
+
+inherit autotools binconfig-disabled github-releases
+
+EXTRA_OECONF = "\
+ --enable-newline-is-lf \
+ --with-link-size=2 \
+ --with-match-limit=10000000 \
+ --enable-pcre2-16 \
+ --enable-pcre2-32 \
+"
+CFLAGS += "-D_REENTRANT"
+CXXFLAGS:append:powerpc = " -lstdc++"
+
+PACKAGES =+ "libpcre2-16 libpcre2-32 pcre2grep pcre2grep-doc pcre2test pcre2test-doc"
+
+SUMMARY:pcre2grep = "grep utility that uses perl 5 compatible regexes"
+SUMMARY:pcre2grep-doc = "grep utility that uses perl 5 compatible regexes - docs"
+SUMMARY:pcre2test = "program for testing Perl-comatible regular expressions"
+SUMMARY:pcre2test-doc = "program for testing Perl-comatible regular expressions - docs"
+
+FILES:libpcre2-16 = "${libdir}/libpcre2-16.so.*"
+FILES:libpcre2-32 = "${libdir}/libpcre2-32.so.*"
+FILES:pcre2grep = "${bindir}/pcre2grep"
+FILES:pcre2grep-doc = "${mandir}/man1/pcre2grep.1"
+FILES:pcre2test = "${bindir}/pcre2test"
+FILES:pcre2test-doc = "${mandir}/man1/pcre2test.1"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libproxy/libproxy/determinism.patch b/meta/recipes-support/libproxy/libproxy/determinism.patch
deleted file mode 100644
index 09770f68f0..0000000000
--- a/meta/recipes-support/libproxy/libproxy/determinism.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-[PATCH] cmake/modules/config_sysconfig: Allow configuration of sysconfig module
-
-Checking whether the host system has /etc/sysconfig is not deterministic
-when cross compiling. Allow this to be disabled by adding a configure
-option for it. OpenEmbedded can set this and have deterministic build
-output independent of the host OS.
-
-Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-Upstream-Status: Submitted [https://github.com/libproxy/libproxy/pull/160]
-
-Index: libproxy-0.4.17/libproxy/cmake/modules/config_sysconfig.cmk
-===================================================================
---- libproxy-0.4.17.orig/libproxy/cmake/modules/config_sysconfig.cmk
-+++ libproxy-0.4.17/libproxy/cmake/modules/config_sysconfig.cmk
-@@ -1,5 +1,8 @@
- if (NOT WIN32 AND NOT APPLE)
-- if (EXISTS "/etc/sysconfig" AND IS_DIRECTORY "/etc/sysconfig")
-- set(SYSCONFIG_FOUND 1)
-+ option(WITH_SYSCONFIG "Build sysconfig module for /etc/sysconfig" ON)
-+ if(WITH_SYSCONFIG)
-+ if (EXISTS "/etc/sysconfig" AND IS_DIRECTORY "/etc/sysconfig")
-+ set(SYSCONFIG_FOUND 1)
-+ endif()
- endif()
- endif()
-\ No newline at end of file
diff --git a/meta/recipes-support/libproxy/libproxy_0.4.17.bb b/meta/recipes-support/libproxy/libproxy_0.4.17.bb
deleted file mode 100644
index e121681ae8..0000000000
--- a/meta/recipes-support/libproxy/libproxy_0.4.17.bb
+++ /dev/null
@@ -1,42 +0,0 @@
-SUMMARY = "Library providing automatic proxy configuration management"
-DESCRIPTION = "libproxy provides interfaces to get the proxy that will be \
-used to access network resources. It uses various plugins to get proxy \
-configuration via different mechanisms (e.g. environment variables or \
-desktop settings)."
-HOMEPAGE = "https://github.com/libproxy/libproxy"
-BUGTRACKER = "https://github.com/libproxy/libproxy/issues"
-SECTION = "libs"
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
- file://utils/proxy.c;beginline=1;endline=18;md5=55152a1006d7dafbef32baf9c30a99c0"
-
-DEPENDS = "glib-2.0"
-
-SRC_URI = "https://github.com/${BPN}/${BPN}/releases/download/${PV}/${BP}.tar.xz \
- file://determinism.patch"
-SRC_URI[sha256sum] = "bc89f842f654ee1985a31c0ba56dc7e2ce8044a0264ddca84e650f46cd7f8b05"
-
-UPSTREAM_CHECK_URI = "https://github.com/libproxy/libproxy/releases"
-UPSTREAM_CHECK_REGEX = "libproxy-(?P<pver>.*)\.tar"
-
-inherit cmake pkgconfig
-
-PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'gnome', '', d)} gnome3"
-PACKAGECONFIG[gnome] = "-DWITH_GNOME=yes,-DWITH_GNOME=no,gconf"
-PACKAGECONFIG[gnome3] = "-DWITH_GNOME3=yes,-DWITH_GNOME3=no"
-
-EXTRA_OECMAKE += " \
- -DWITH_KDE=no \
- -DWITH_MOZJS=no \
- -DWITH_NM=no \
- -DWITH_PERL=no \
- -DWITH_PYTHON2=no \
- -DWITH_PYTHON3=no \
- -DWITH_WEBKIT=no \
- -DWITH_SYSCONFIG=no \
- -DLIB_INSTALL_DIR=${libdir} \
- -DLIBEXEC_INSTALL_DIR=${libexecdir} \
-"
-SECURITY_PIE_CFLAGS:remove = "-fPIE -pie"
-
-FILES:${PN} += "${libdir}/${BPN}/${PV}/modules"
diff --git a/meta/recipes-support/libproxy/libproxy_0.5.4.bb b/meta/recipes-support/libproxy/libproxy_0.5.4.bb
new file mode 100644
index 0000000000..b4f4c6d254
--- /dev/null
+++ b/meta/recipes-support/libproxy/libproxy_0.5.4.bb
@@ -0,0 +1,28 @@
+SUMMARY = "Library providing automatic proxy configuration management"
+DESCRIPTION = "libproxy provides interfaces to get the proxy that will be \
+used to access network resources. It uses various plugins to get proxy \
+configuration via different mechanisms (e.g. environment variables or \
+desktop settings)."
+HOMEPAGE = "https://github.com/libproxy/libproxy"
+BUGTRACKER = "https://github.com/libproxy/libproxy/issues"
+SECTION = "libs"
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \
+ file://src/libproxy/proxy.c;beginline=1;endline=20;md5=bb9a177ef1c995311070f34c5638a402 \
+ "
+
+DEPENDS = "glib-2.0"
+
+SRC_URI = "git://github.com/libproxy/libproxy;protocol=https;branch=main"
+SRCREV = "dff9a603d823dcc740ec966cd27960daa6b891b1"
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig gobject-introspection vala gi-docgen
+GIDOCGEN_MESON_OPTION = 'docs'
+
+PACKAGECONFIG ?= ""
+PACKAGECONFIG[curl] = "-Dcurl=true,-Dcurl=false,curl"
+PACKAGECONFIG[config-gnome] = "-Dconfig-gnome=true,-Dconfig-gnome=false,gsettings-desktop-schemas"
+PACKAGECONFIG[pacrunner-duktape] = "-Dpacrunner-duktape=true,-Dpacrunner-duktape=false,duktape"
+
+FILES:${PN} += "${libdir}/${BPN}/${PV}/modules"
diff --git a/meta/recipes-support/libpsl/libpsl_0.21.1.bb b/meta/recipes-support/libpsl/libpsl_0.21.1.bb
deleted file mode 100644
index 4fc0ad8acb..0000000000
--- a/meta/recipes-support/libpsl/libpsl_0.21.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Public Suffix List library"
-DESCRIPTION = "The libpsl package provides a library for accessing and \
-resolving information from the Public Suffix List (PSL). The PSL is a set of \
-domain names beyond the standard suffixes, such as .com."
-
-HOMEPAGE = "https://rockdaboot.github.io/libpsl/"
-BUGTRACKER = "https://github.com/rockdaboot/libpsl/issues"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=5437030d9e4fbe7267ced058ddb8a7f5 \
- file://COPYING;md5=f41d10997a12da5ee3c24ceeb0148d18"
-
-SRC_URI = "https://github.com/rockdaboot/${BPN}/releases/download/${PV}/${BP}.tar.gz \
- "
-SRC_URI[sha256sum] = "ac6ce1e1fbd4d0254c4ddb9d37f1fa99dec83619c1253328155206b896210d4c"
-
-UPSTREAM_CHECK_URI = "https://github.com/rockdaboot/libpsl/releases"
-
-inherit autotools gettext gtk-doc manpages pkgconfig lib_package
-
-PACKAGECONFIG ?= "icu"
-PACKAGECONFIG[manpages] = "--enable-man,--disable-man,libxslt-native"
-PACKAGECONFIG[icu] = "--enable-runtime=libicu --enable-builtin=libicu,,icu"
-PACKAGECONFIG[idn2] = "--enable-runtime=libidn2 --enable-builtin=libidn2,,libidn2 libunistring"
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libpsl/libpsl_0.21.5.bb b/meta/recipes-support/libpsl/libpsl_0.21.5.bb
new file mode 100644
index 0000000000..b9341a986e
--- /dev/null
+++ b/meta/recipes-support/libpsl/libpsl_0.21.5.bb
@@ -0,0 +1,26 @@
+SUMMARY = "Public Suffix List library"
+DESCRIPTION = "The libpsl package provides a library for accessing and \
+resolving information from the Public Suffix List (PSL). The PSL is a set of \
+domain names beyond the standard suffixes, such as .com."
+
+HOMEPAGE = "https://rockdaboot.github.io/libpsl/"
+BUGTRACKER = "https://github.com/rockdaboot/libpsl/issues"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=9f9e317096db2a598fc44237c5b8a4f7 \
+ file://COPYING;md5=9f9e317096db2a598fc44237c5b8a4f7 \
+ "
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BP}.tar.gz \
+ "
+SRC_URI[sha256sum] = "1dcc9ceae8b128f3c0b3f654decd0e1e891afc6ff81098f227ef260449dae208"
+
+GITHUB_BASE_URI = "https://github.com/rockdaboot/libpsl/releases"
+
+inherit autotools gettext gtk-doc manpages pkgconfig lib_package github-releases
+
+PACKAGECONFIG ?= "icu"
+PACKAGECONFIG[manpages] = "--enable-man,--disable-man,libxslt-native"
+PACKAGECONFIG[icu] = "--enable-runtime=libicu --enable-builtin=libicu,,icu"
+PACKAGECONFIG[idn2] = "--enable-runtime=libidn2 --enable-builtin=libidn2,,libidn2 libunistring"
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libseccomp/files/run-ptest b/meta/recipes-support/libseccomp/files/run-ptest
index 54b4a63cd2..63c79f09c4 100644
--- a/meta/recipes-support/libseccomp/files/run-ptest
+++ b/meta/recipes-support/libseccomp/files/run-ptest
@@ -1,4 +1,7 @@
#!/bin/sh
cd tests
+sed -i 's/SUCCESS/PASS/g; s/FAILURE/FAIL/g; s/SKIPPED/SKIP/g' regression
+sed -i 's/"Test %s result: %s\\n" "$1" "$2"/"%s: %s\\n" "$2" "$1"/g' regression
+sed -i 's/"Test %s result: %s %s\\n" "$1" "$2" "$3"/"%s: %s %s\\n" "$2" "$1" "$3"/g' regression
./regression -a
diff --git a/meta/recipes-support/libseccomp/libseccomp_2.5.4.bb b/meta/recipes-support/libseccomp/libseccomp_2.5.4.bb
deleted file mode 100644
index c76f3c3573..0000000000
--- a/meta/recipes-support/libseccomp/libseccomp_2.5.4.bb
+++ /dev/null
@@ -1,54 +0,0 @@
-SUMMARY = "interface to seccomp filtering mechanism"
-DESCRIPTION = "The libseccomp library provides and easy to use, platform independent,interface to the Linux Kernel's syscall filtering mechanism: seccomp."
-HOMEPAGE = "https://github.com/seccomp/libseccomp"
-SECTION = "security"
-LICENSE = "LGPL-2.1-only"
-LIC_FILES_CHKSUM = "file://LICENSE;beginline=0;endline=1;md5=8eac08d22113880357ceb8e7c37f989f"
-
-DEPENDS += "gperf-native"
-
-SRCREV = "f33f95014b36f97b42f0c2290e96d5c31647ed10"
-
-SRC_URI = "git://github.com/seccomp/libseccomp.git;branch=release-2.5;protocol=https \
- file://run-ptest \
- "
-
-S = "${WORKDIR}/git"
-
-inherit autotools-brokensep pkgconfig ptest features_check
-
-REQUIRED_DISTRO_FEATURES = "seccomp"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[python] = "--enable-python, --disable-python, python3"
-
-DISABLE_STATIC = ""
-
-do_compile_ptest() {
- oe_runmake -C tests check-build
-}
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- install -d ${D}${PTEST_PATH}/tools
- for file in $(find tests/* -executable -type f); do
- install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tests
- done
- for file in $(find tests/*.tests -type f); do
- install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tests
- done
- for file in $(find tools/* -executable -type f); do
- install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tools
- done
- # Overwrite libtool wrappers with real executables
- for file in $(find tools/.libs/* -executable -type f); do
- install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tools
- done
-}
-
-FILES:${PN} = "${bindir} ${libdir}/${BPN}.so*"
-FILES:${PN}-dbg += "${libdir}/${PN}/tests/.debug/* ${libdir}/${PN}/tools/.debug"
-
-RDEPENDS:${PN}-ptest = "coreutils bash"
-
-PV = "2.5.4"
diff --git a/meta/recipes-support/libseccomp/libseccomp_2.5.5.bb b/meta/recipes-support/libseccomp/libseccomp_2.5.5.bb
new file mode 100644
index 0000000000..478e5f4289
--- /dev/null
+++ b/meta/recipes-support/libseccomp/libseccomp_2.5.5.bb
@@ -0,0 +1,62 @@
+SUMMARY = "interface to seccomp filtering mechanism"
+DESCRIPTION = "The libseccomp library provides an easy to use, platform independent, interface to the Linux Kernel's syscall filtering mechanism: seccomp."
+HOMEPAGE = "https://github.com/seccomp/libseccomp"
+SECTION = "security"
+LICENSE = "LGPL-2.1-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=7c13b3376cea0ce68d2d2da0a1b3a72c"
+
+DEPENDS += "gperf-native"
+
+SRCREV = "f0b04ab0b4fc0bc2cde6da1f407b4a487fe6d78f"
+
+SRC_URI = "git://github.com/seccomp/libseccomp.git;branch=release-2.5;protocol=https \
+ file://run-ptest \
+ "
+
+S = "${WORKDIR}/git"
+
+inherit autotools-brokensep pkgconfig ptest features_check
+
+inherit_defer ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3native', '', d)}
+
+REQUIRED_DISTRO_FEATURES = "seccomp"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[python] = "--enable-python, --disable-python, python3-cython-native"
+
+DISABLE_STATIC = ""
+
+do_compile_ptest() {
+ oe_runmake -C tests check-build
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ install -d ${D}${PTEST_PATH}/tools
+ for file in $(find tests/* -executable -type f); do
+ install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tests
+ done
+ for file in $(find tests/*.tests -type f); do
+ install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tests
+ done
+ for file in $(find tests/*.pfc -type f); do
+ install -m 644 ${S}/${file} ${D}/${PTEST_PATH}/tests
+ done
+ install -m 644 ${S}/tests/valgrind_test.supp ${D}/${PTEST_PATH}/tests
+ for file in $(find tools/* -executable -type f); do
+ install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tools
+ done
+ # Overwrite libtool wrappers with real executables
+ for file in $(find tools/.libs/* -executable -type f); do
+ install -m 744 ${S}/${file} ${D}/${PTEST_PATH}/tools
+ done
+ # fix python shebang
+ sed -i -e 's@cmd /usr/bin/env python @cmd /usr/bin/env python3 @' ${D}/${PTEST_PATH}/tests/regression
+ sed -i -e 's@^#!/usr/bin/env python$@#!/usr/bin/env python3@' ${D}/${PTEST_PATH}/tests/*.py
+}
+
+FILES:${PN} = "${bindir} ${libdir}/${BPN}.so* ${PYTHON_SITEPACKAGES_DIR}/"
+FILES:${PN}-dbg += "${libdir}/${PN}/tests/.debug/* ${libdir}/${PN}/tools/.debug"
+
+RDEPENDS:${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3', '', d)}"
+RDEPENDS:${PN}-ptest = "coreutils bash"
diff --git a/meta/recipes-support/libsoup/libsoup-2.4/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch b/meta/recipes-support/libsoup/libsoup-2.4/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch
new file mode 100644
index 0000000000..d867e5bc17
--- /dev/null
+++ b/meta/recipes-support/libsoup/libsoup-2.4/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch
@@ -0,0 +1,44 @@
+From ced3c5d8cad0177b297666343f1561799dfefb0d Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 22 Nov 2023 18:49:10 -0800
+Subject: [PATCH] Fix build with libxml2-2.12.0 and clang-17
+
+Fixes build errors about missing function prototypes with clang-17
+
+Fixes
+| ../libsoup-2.74.3/libsoup/soup-xmlrpc-old.c:512:8: error: call to undeclared function 'xmlParseMemory'; ISO C99 and later do not support implicit function declarations
+
+Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/libsoup/-/merge_requests/385]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ libsoup/soup-xmlrpc-old.c | 1 +
+ libsoup/soup-xmlrpc.c | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/libsoup/soup-xmlrpc-old.c b/libsoup/soup-xmlrpc-old.c
+index c57086b6..527e3b23 100644
+--- a/libsoup/soup-xmlrpc-old.c
++++ b/libsoup/soup-xmlrpc-old.c
+@@ -11,6 +11,7 @@
+
+ #include <string.h>
+
++#include <libxml/parser.h>
+ #include <libxml/tree.h>
+
+ #include "soup-xmlrpc-old.h"
+diff --git a/libsoup/soup-xmlrpc.c b/libsoup/soup-xmlrpc.c
+index 42dcda9c..e991cbf0 100644
+--- a/libsoup/soup-xmlrpc.c
++++ b/libsoup/soup-xmlrpc.c
+@@ -17,6 +17,7 @@
+
+ #include <string.h>
+ #include <errno.h>
++#include <libxml/parser.h>
+ #include <libxml/tree.h>
+ #include "soup-xmlrpc.h"
+ #include "soup.h"
+--
+2.43.0
+
diff --git a/meta/recipes-support/libsoup/libsoup-2.4_2.74.2.bb b/meta/recipes-support/libsoup/libsoup-2.4_2.74.2.bb
deleted file mode 100644
index 6ff393c9a3..0000000000
--- a/meta/recipes-support/libsoup/libsoup-2.4_2.74.2.bb
+++ /dev/null
@@ -1,46 +0,0 @@
-SUMMARY = "An HTTP library implementation in C"
-DESCRIPTION = "libsoup is an HTTP client/server library for GNOME. It uses GObjects \
-and the glib main loop, to integrate well with GNOME applications."
-HOMEPAGE = "https://wiki.gnome.org/Projects/libsoup"
-BUGTRACKER = "https://bugzilla.gnome.org/"
-SECTION = "x11/gnome/libs"
-LICENSE = "LGPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
-
-DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 libpsl"
-
-SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
-
-SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz"
-SRC_URI[sha256sum] = "f0a427656e5fe19e1df71c107e88dfa1b2e673c25c547b7823b6018b40d01159"
-
-CVE_PRODUCT = "libsoup"
-
-S = "${WORKDIR}/libsoup-${PV}"
-
-inherit meson gettext pkgconfig upstream-version-is-even gobject-introspection gtk-doc
-
-UPSTREAM_CHECK_REGEX = "libsoup-(?P<pver>2(\.(?!99)\d+)+)\.tar"
-
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-
-# libsoup-gnome is entirely deprecated and just stubs in 2.42 onwards. Disable by default.
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[gnome] = "-Dgnome=true,-Dgnome=false"
-PACKAGECONFIG[gssapi] = "-Dgssapi=enabled,-Dgssapi=disabled,krb5"
-
-EXTRA_OEMESON:append = " -Dvapi=disabled -Dtls_check=false"
-
-GTKDOC_MESON_OPTION = "gtk_doc"
-
-# When built without gnome support, libsoup-2.4 will contain only one shared lib
-# and will therefore become subject to renaming by debian.bbclass. Prevent
-# renaming in order to keep the package name consistent regardless of whether
-# gnome support is enabled or disabled.
-DEBIAN_NOAUTONAME:${PN} = "1"
-
-# glib-networking is needed for SSL, proxies, etc.
-RRECOMMENDS:${PN} = "glib-networking"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libsoup/libsoup-2.4_2.74.3.bb b/meta/recipes-support/libsoup/libsoup-2.4_2.74.3.bb
new file mode 100644
index 0000000000..ee20530b64
--- /dev/null
+++ b/meta/recipes-support/libsoup/libsoup-2.4_2.74.3.bb
@@ -0,0 +1,59 @@
+SUMMARY = "An HTTP library implementation in C"
+DESCRIPTION = "libsoup is an HTTP client/server library for GNOME. It uses GObjects \
+and the glib main loop, to integrate well with GNOME applications."
+HOMEPAGE = "https://wiki.gnome.org/Projects/libsoup"
+BUGTRACKER = "https://bugzilla.gnome.org/"
+SECTION = "x11/gnome/libs"
+LICENSE = "LGPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
+
+DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 libpsl"
+
+SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
+
+SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz \
+ file://0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch"
+SRC_URI[sha256sum] = "e4b77c41cfc4c8c5a035fcdc320c7bc6cfb75ef7c5a034153df1413fa1d92f13"
+
+CVE_PRODUCT = "libsoup"
+
+S = "${WORKDIR}/libsoup-${PV}"
+
+inherit meson gettext pkgconfig upstream-version-is-even gobject-introspection gtk-doc
+
+UPSTREAM_CHECK_REGEX = "libsoup-(?P<pver>2(\.(?!99)\d+)+)\.tar"
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[brotli] = "-Dbrotli=enabled,-Dbrotli=disabled,brotli"
+# libsoup-gnome is entirely deprecated and just stubs in 2.42 onwards
+PACKAGECONFIG[gnome] = "-Dgnome=true,-Dgnome=false"
+PACKAGECONFIG[gssapi] = "-Dgssapi=enabled,-Dgssapi=disabled,krb5"
+PACKAGECONFIG[ntlm] = "-Dntlm=enabled,-Dntlm=disabled"
+PACKAGECONFIG[sysprof] = "-Dsysprof=enabled,-Dsysprof=disabled,sysprof"
+
+# Tell libsoup where the target ntlm_auth is installed
+do_write_config:append:class-target() {
+ cat >${WORKDIR}/soup.cross <<EOF
+[binaries]
+ntlm_auth = '${bindir}/ntlm_auth'
+EOF
+}
+EXTRA_OEMESON:append:class-target = " --cross-file ${WORKDIR}/soup.cross"
+
+EXTRA_OEMESON += "-Dvapi=disabled -Dtls_check=false"
+
+GTKDOC_MESON_OPTION = "gtk_doc"
+
+# When built without gnome support, libsoup-2.4 will contain only one shared lib
+# and will therefore become subject to renaming by debian.bbclass. Prevent
+# renaming in order to keep the package name consistent regardless of whether
+# gnome support is enabled or disabled.
+DEBIAN_NOAUTONAME:${PN} = "1"
+
+# glib-networking is needed for SSL, proxies, etc.
+RRECOMMENDS:${PN} = "glib-networking"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libsoup/libsoup_3.0.6.bb b/meta/recipes-support/libsoup/libsoup_3.0.6.bb
deleted file mode 100644
index 17825ae6a4..0000000000
--- a/meta/recipes-support/libsoup/libsoup_3.0.6.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "An HTTP library implementation in C"
-DESCRIPTION = "libsoup is an HTTP client/server library for GNOME. It uses GObjects \
-and the glib main loop, to integrate well with GNOME applications."
-HOMEPAGE = "https://wiki.gnome.org/Projects/libsoup"
-BUGTRACKER = "https://bugzilla.gnome.org/"
-SECTION = "x11/gnome/libs"
-LICENSE = "LGPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
-
-DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 libpsl nghttp2"
-
-SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
-
-SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz"
-SRC_URI[sha256sum] = "b45d59f840b9acf9bb45fd45854e3ef672f57e3ab957401c3ad8d7502ac23da6"
-
-PROVIDES = "libsoup-3.0"
-CVE_PRODUCT = "libsoup"
-
-S = "${WORKDIR}/libsoup-${PV}"
-
-inherit meson gettext pkgconfig upstream-version-is-even gobject-introspection gtk-doc
-
-GIR_MESON_ENABLE_FLAG = 'enabled'
-GIR_MESON_DISABLE_FLAG = 'disabled'
-
-# libsoup-gnome is entirely deprecated and just stubs in 2.42 onwards. Disable by default.
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[gssapi] = "-Dgssapi=enabled,-Dgssapi=disabled,krb5"
-
-EXTRA_OEMESON:append = " -Dvapi=disabled -Dtls_check=false"
-
-GTKDOC_MESON_OPTION = "gtk_doc"
-
-# When built without gnome support, libsoup will contain only one shared lib
-# and will therefore become subject to renaming by debian.bbclass. Prevent
-# renaming in order to keep the package name consistent regardless of whether
-# gnome support is enabled or disabled.
-DEBIAN_NOAUTONAME:${PN} = "1"
-
-# glib-networking is needed for SSL, proxies, etc.
-RRECOMMENDS:${PN} = "glib-networking"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libsoup/libsoup_3.4.4.bb b/meta/recipes-support/libsoup/libsoup_3.4.4.bb
new file mode 100644
index 0000000000..6f7cac4cf8
--- /dev/null
+++ b/meta/recipes-support/libsoup/libsoup_3.4.4.bb
@@ -0,0 +1,59 @@
+SUMMARY = "An HTTP library implementation in C"
+DESCRIPTION = "libsoup is an HTTP client/server library for GNOME. It uses GObjects \
+and the glib main loop, to integrate well with GNOME applications."
+HOMEPAGE = "https://wiki.gnome.org/Projects/libsoup"
+BUGTRACKER = "https://bugzilla.gnome.org/"
+SECTION = "x11/gnome/libs"
+LICENSE = "LGPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
+
+DEPENDS = "glib-2.0 glib-2.0-native libxml2 sqlite3 libpsl nghttp2"
+
+SHRT_VER = "${@d.getVar('PV').split('.')[0]}.${@d.getVar('PV').split('.')[1]}"
+
+SRC_URI = "${GNOME_MIRROR}/libsoup/${SHRT_VER}/libsoup-${PV}.tar.xz"
+SRC_URI[sha256sum] = "291c67725f36ed90ea43efff25064b69c5a2d1981488477c05c481a3b4b0c5aa"
+
+PROVIDES = "libsoup-3.0"
+CVE_PRODUCT = "libsoup"
+
+S = "${WORKDIR}/libsoup-${PV}"
+
+inherit meson gettext pkgconfig upstream-version-is-even gobject-introspection gi-docgen vala
+
+GIR_MESON_ENABLE_FLAG = 'enabled'
+GIR_MESON_DISABLE_FLAG = 'disabled'
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[brotli] = "-Dbrotli=enabled,-Dbrotli=disabled,brotli"
+PACKAGECONFIG[gssapi] = "-Dgssapi=enabled,-Dgssapi=disabled,krb5"
+PACKAGECONFIG[ntlm] = "-Dntlm=enabled,-Dntlm=disabled"
+PACKAGECONFIG[sysprof] = "-Dsysprof=enabled,-Dsysprof=disabled,sysprof"
+
+# Tell libsoup where the target ntlm_auth is installed
+do_write_config:append:class-target() {
+ cat >${WORKDIR}/soup.cross <<EOF
+[binaries]
+ntlm_auth = '${bindir}/ntlm_auth'
+EOF
+}
+EXTRA_OEMESON:append:class-target = " --cross-file ${WORKDIR}/soup.cross"
+
+EXTRA_OEMESON += "-Dtls_check=false"
+# Disable the test suites
+EXTRA_OEMESON += "-Dtests=false -Dautobahn=disabled -Dpkcs11_tests=disabled"
+
+GIDOCGEN_MESON_OPTION = 'docs'
+GIDOCGEN_MESON_ENABLE_FLAG = 'enabled'
+GIDOCGEN_MESON_DISABLE_FLAG = 'disabled'
+
+# When built without gnome support, libsoup will contain only one shared lib
+# and will therefore become subject to renaming by debian.bbclass. Prevent
+# renaming in order to keep the package name consistent regardless of whether
+# gnome support is enabled or disabled.
+DEBIAN_NOAUTONAME:${PN} = "1"
+
+# glib-networking is needed for SSL, proxies, etc.
+RRECOMMENDS:${PN} = "glib-networking"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libssh2/files/0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch b/meta/recipes-support/libssh2/files/0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch
deleted file mode 100644
index b1204e49eb..0000000000
--- a/meta/recipes-support/libssh2/files/0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From f6abce5ba41a412a247250dcd80e387e53474466 Mon Sep 17 00:00:00 2001
-From: Your Name <you@example.com>
-Date: Mon, 28 Dec 2020 02:08:03 +0000
-Subject: [PATCH] Don't let host enviroment to decide if a test is build
-
-test ssh2.sh need sshd, for cross compile, we need it on target, so
-don't use SSHD on host to decide weither to build a test
-
-Upstream-Status: Inappropriate[oe specific]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
-
----
- tests/Makefile.am | 6 +-----
- 1 file changed, 1 insertion(+), 5 deletions(-)
-
-diff --git a/tests/Makefile.am b/tests/Makefile.am
-index dc0922f..6cbc35d 100644
---- a/tests/Makefile.am
-+++ b/tests/Makefile.am
-@@ -1,16 +1,12 @@
- AM_CPPFLAGS = -I$(top_srcdir)/src -I$(top_srcdir)/include -I$(top_builddir)/src
- LDADD = ../src/libssh2.la
-
--if SSHD
- noinst_PROGRAMS = ssh2
- ssh2_SOURCES = ssh2.c
--endif
-
- ctests = simple$(EXEEXT)
- TESTS = $(ctests) mansyntax.sh
--if SSHD
- TESTS += ssh2.sh
--endif
- check_PROGRAMS = $(ctests)
-
- TESTS_ENVIRONMENT = SSHD=$(SSHD) EXEEXT=$(EXEEXT)
-@@ -38,4 +34,4 @@ if OPENSSL
- # EXTRA_DIST += test_public_key_auth_succeeds_with_correct_encrypted_ed25519_key.c
- # EXTRA_DIST += test_public_key_auth_succeeds_with_correct_ed25519_key_from_mem.c
- EXTRA_DIST += test_public_key_auth_succeeds_with_correct_rsa_openssh_key.c
--endif
-\ No newline at end of file
-+endif
diff --git a/meta/recipes-support/libssh2/files/run-ptest b/meta/recipes-support/libssh2/files/run-ptest
deleted file mode 100644
index 9e2fce2d24..0000000000
--- a/meta/recipes-support/libssh2/files/run-ptest
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-
-ptestdir=$(dirname "$(readlink -f "$0")")
-cd tests
-# omit ssh2.sh until https://github.com/libssh2/libssh2/issues/630 is fixed
-for test in simple mansyntax.sh
-do
- ./../test-driver --test-name $test --log-file ../$test.log --trs-file ../$test.trs --color-tests no --enable-hard-errors yes --expect-failure no -- ./$test
-done
diff --git a/meta/recipes-support/libssh2/libssh2/CVE-2023-48795.patch b/meta/recipes-support/libssh2/libssh2/CVE-2023-48795.patch
new file mode 100644
index 0000000000..ab0f419ac5
--- /dev/null
+++ b/meta/recipes-support/libssh2/libssh2/CVE-2023-48795.patch
@@ -0,0 +1,466 @@
+From d4634630432594b139b3af6b9f254b890c0f275d Mon Sep 17 00:00:00 2001
+From: Michael Buckley <michael@buckleyisms.com>
+Date: Thu, 30 Nov 2023 15:08:02 -0800
+Subject: [PATCH] src: add 'strict KEX' to fix CVE-2023-48795 "Terrapin Attack"
+
+Refs:
+https://terrapin-attack.com/
+https://seclists.org/oss-sec/2023/q4/292
+https://osv.dev/list?ecosystem=&q=CVE-2023-48795
+https://github.com/advisories/GHSA-45x7-px36-x8w8
+https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-48795
+
+Fixes #1290
+Closes #1291
+
+CVE: CVE-2023-48795
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ src/kex.c | 63 +++++++++++++++++++++++------------
+ src/libssh2_priv.h | 18 +++++++---
+ src/packet.c | 83 +++++++++++++++++++++++++++++++++++++++++++---
+ src/packet.h | 2 +-
+ src/session.c | 3 ++
+ src/transport.c | 12 ++++++-
+ 6 files changed, 149 insertions(+), 32 deletions(-)
+
+diff --git a/src/kex.c b/src/kex.c
+index d4034a0a..b4b748ca 100644
+--- a/src/kex.c
++++ b/src/kex.c
+@@ -3037,6 +3037,13 @@ kex_method_extension_negotiation = {
+ 0,
+ };
+
++static const LIBSSH2_KEX_METHOD
++kex_method_strict_client_extension = {
++ "kex-strict-c-v00@openssh.com",
++ NULL,
++ 0,
++};
++
+ static const LIBSSH2_KEX_METHOD *libssh2_kex_methods[] = {
+ #if LIBSSH2_ED25519
+ &kex_method_ssh_curve25519_sha256,
+@@ -3055,6 +3062,7 @@ static const LIBSSH2_KEX_METHOD *libssh2_kex_methods[] = {
+ &kex_method_diffie_helman_group1_sha1,
+ &kex_method_diffie_helman_group_exchange_sha1,
+ &kex_method_extension_negotiation,
++ &kex_method_strict_client_extension,
+ NULL
+ };
+
+@@ -3307,13 +3315,13 @@ static int kexinit(LIBSSH2_SESSION * session)
+ return 0;
+ }
+
+-/* kex_agree_instr
++/* _libssh2_kex_agree_instr
+ * Kex specific variant of strstr()
+ * Needle must be preceded by BOL or ',', and followed by ',' or EOL
+ */
+-static unsigned char *
+-kex_agree_instr(unsigned char *haystack, size_t haystack_len,
+- const unsigned char *needle, size_t needle_len)
++unsigned char *
++_libssh2_kex_agree_instr(unsigned char *haystack, size_t haystack_len,
++ const unsigned char *needle, size_t needle_len)
+ {
+ unsigned char *s;
+ unsigned char *end_haystack;
+@@ -3398,7 +3406,7 @@ static int kex_agree_hostkey(LIBSSH2_SESSION * session,
+ while(s && *s) {
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+- if(kex_agree_instr(hostkey, hostkey_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(hostkey, hostkey_len, s, method_len)) {
+ const LIBSSH2_HOSTKEY_METHOD *method =
+ (const LIBSSH2_HOSTKEY_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3432,9 +3440,9 @@ static int kex_agree_hostkey(LIBSSH2_SESSION * session,
+ }
+
+ while(hostkeyp && (*hostkeyp) && (*hostkeyp)->name) {
+- s = kex_agree_instr(hostkey, hostkey_len,
+- (unsigned char *) (*hostkeyp)->name,
+- strlen((*hostkeyp)->name));
++ s = _libssh2_kex_agree_instr(hostkey, hostkey_len,
++ (unsigned char *) (*hostkeyp)->name,
++ strlen((*hostkeyp)->name));
+ if(s) {
+ /* So far so good, but does it suit our purposes? (Encrypting vs
+ Signing) */
+@@ -3468,6 +3476,12 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ {
+ const LIBSSH2_KEX_METHOD **kexp = libssh2_kex_methods;
+ unsigned char *s;
++ const unsigned char *strict =
++ (unsigned char *)"kex-strict-s-v00@openssh.com";
++
++ if(_libssh2_kex_agree_instr(kex, kex_len, strict, 28)) {
++ session->kex_strict = 1;
++ }
+
+ if(session->kex_prefs) {
+ s = (unsigned char *) session->kex_prefs;
+@@ -3475,7 +3489,7 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ while(s && *s) {
+ unsigned char *q, *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+- q = kex_agree_instr(kex, kex_len, s, method_len);
++ q = _libssh2_kex_agree_instr(kex, kex_len, s, method_len);
+ if(q) {
+ const LIBSSH2_KEX_METHOD *method = (const LIBSSH2_KEX_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3509,9 +3523,9 @@ static int kex_agree_kex_hostkey(LIBSSH2_SESSION * session, unsigned char *kex,
+ }
+
+ while(*kexp && (*kexp)->name) {
+- s = kex_agree_instr(kex, kex_len,
+- (unsigned char *) (*kexp)->name,
+- strlen((*kexp)->name));
++ s = _libssh2_kex_agree_instr(kex, kex_len,
++ (unsigned char *) (*kexp)->name,
++ strlen((*kexp)->name));
+ if(s) {
+ /* We've agreed on a key exchange method,
+ * Can we agree on a hostkey that works with this kex?
+@@ -3555,7 +3569,7 @@ static int kex_agree_crypt(LIBSSH2_SESSION * session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(crypt, crypt_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(crypt, crypt_len, s, method_len)) {
+ const LIBSSH2_CRYPT_METHOD *method =
+ (const LIBSSH2_CRYPT_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3577,9 +3591,9 @@ static int kex_agree_crypt(LIBSSH2_SESSION * session,
+ }
+
+ while(*cryptp && (*cryptp)->name) {
+- s = kex_agree_instr(crypt, crypt_len,
+- (unsigned char *) (*cryptp)->name,
+- strlen((*cryptp)->name));
++ s = _libssh2_kex_agree_instr(crypt, crypt_len,
++ (unsigned char *) (*cryptp)->name,
++ strlen((*cryptp)->name));
+ if(s) {
+ endpoint->crypt = *cryptp;
+ return 0;
+@@ -3619,7 +3633,7 @@ static int kex_agree_mac(LIBSSH2_SESSION * session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(mac, mac_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(mac, mac_len, s, method_len)) {
+ const LIBSSH2_MAC_METHOD *method = (const LIBSSH2_MAC_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+ (const LIBSSH2_COMMON_METHOD **)
+@@ -3640,8 +3654,9 @@ static int kex_agree_mac(LIBSSH2_SESSION * session,
+ }
+
+ while(*macp && (*macp)->name) {
+- s = kex_agree_instr(mac, mac_len, (unsigned char *) (*macp)->name,
+- strlen((*macp)->name));
++ s = _libssh2_kex_agree_instr(mac, mac_len,
++ (unsigned char *) (*macp)->name,
++ strlen((*macp)->name));
+ if(s) {
+ endpoint->mac = *macp;
+ return 0;
+@@ -3672,7 +3687,7 @@ static int kex_agree_comp(LIBSSH2_SESSION *session,
+ unsigned char *p = (unsigned char *) strchr((char *) s, ',');
+ size_t method_len = (p ? (size_t)(p - s) : strlen((char *) s));
+
+- if(kex_agree_instr(comp, comp_len, s, method_len)) {
++ if(_libssh2_kex_agree_instr(comp, comp_len, s, method_len)) {
+ const LIBSSH2_COMP_METHOD *method =
+ (const LIBSSH2_COMP_METHOD *)
+ kex_get_method_by_name((char *) s, method_len,
+@@ -3694,8 +3709,9 @@ static int kex_agree_comp(LIBSSH2_SESSION *session,
+ }
+
+ while(*compp && (*compp)->name) {
+- s = kex_agree_instr(comp, comp_len, (unsigned char *) (*compp)->name,
+- strlen((*compp)->name));
++ s = _libssh2_kex_agree_instr(comp, comp_len,
++ (unsigned char *) (*compp)->name,
++ strlen((*compp)->name));
+ if(s) {
+ endpoint->comp = *compp;
+ return 0;
+@@ -3876,6 +3892,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->local.kexinit = key_state->oldlocal;
+ session->local.kexinit_len = key_state->oldlocal_len;
+ key_state->state = libssh2_NB_state_idle;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ return -1;
+@@ -3901,6 +3918,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->local.kexinit = key_state->oldlocal;
+ session->local.kexinit_len = key_state->oldlocal_len;
+ key_state->state = libssh2_NB_state_idle;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ return -1;
+@@ -3949,6 +3967,7 @@ _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ session->remote.kexinit = NULL;
+ }
+
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_KEX_ACTIVE;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+
+diff --git a/src/libssh2_priv.h b/src/libssh2_priv.h
+index 82c3afe2..ee1d8b5c 100644
+--- a/src/libssh2_priv.h
++++ b/src/libssh2_priv.h
+@@ -699,6 +699,9 @@ struct _LIBSSH2_SESSION
+ /* key signing algorithm preferences -- NULL yields server order */
+ char *sign_algo_prefs;
+
++ /* Whether to use the OpenSSH Strict KEX extension */
++ int kex_strict;
++
+ /* (remote as source of data -- packet_read ) */
+ libssh2_endpoint_data remote;
+
+@@ -870,6 +873,7 @@ struct _LIBSSH2_SESSION
+ int fullpacket_macstate;
+ size_t fullpacket_payload_len;
+ int fullpacket_packet_type;
++ uint32_t fullpacket_required_type;
+
+ /* State variables used in libssh2_sftp_init() */
+ libssh2_nonblocking_states sftpInit_state;
+@@ -910,10 +914,11 @@ struct _LIBSSH2_SESSION
+ };
+
+ /* session.state bits */
+-#define LIBSSH2_STATE_EXCHANGING_KEYS 0x00000001
+-#define LIBSSH2_STATE_NEWKEYS 0x00000002
+-#define LIBSSH2_STATE_AUTHENTICATED 0x00000004
+-#define LIBSSH2_STATE_KEX_ACTIVE 0x00000008
++#define LIBSSH2_STATE_INITIAL_KEX 0x00000001
++#define LIBSSH2_STATE_EXCHANGING_KEYS 0x00000002
++#define LIBSSH2_STATE_NEWKEYS 0x00000004
++#define LIBSSH2_STATE_AUTHENTICATED 0x00000008
++#define LIBSSH2_STATE_KEX_ACTIVE 0x00000010
+
+ /* session.flag helpers */
+ #ifdef MSG_NOSIGNAL
+@@ -1144,6 +1149,11 @@ ssize_t _libssh2_send(libssh2_socket_t socket, const void *buffer,
+ int _libssh2_kex_exchange(LIBSSH2_SESSION * session, int reexchange,
+ key_exchange_state_t * state);
+
++unsigned char *_libssh2_kex_agree_instr(unsigned char *haystack,
++ size_t haystack_len,
++ const unsigned char *needle,
++ size_t needle_len);
++
+ /* Let crypt.c/hostkey.c expose their method structs */
+ const LIBSSH2_CRYPT_METHOD **libssh2_crypt_methods(void);
+ const LIBSSH2_HOSTKEY_METHOD **libssh2_hostkey_methods(void);
+diff --git a/src/packet.c b/src/packet.c
+index b5b41981..35d4d39e 100644
+--- a/src/packet.c
++++ b/src/packet.c
+@@ -605,14 +605,13 @@ authagent_exit:
+ * layer when it has received a packet.
+ *
+ * The input pointer 'data' is pointing to allocated data that this function
+- * is asked to deal with so on failure OR success, it must be freed fine.
+- * The only exception is when the return code is LIBSSH2_ERROR_EAGAIN.
++ * will be freed unless return the code is LIBSSH2_ERROR_EAGAIN.
+ *
+ * This function will always be called with 'datalen' greater than zero.
+ */
+ int
+ _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+- size_t datalen, int macstate)
++ size_t datalen, int macstate, uint32_t seq)
+ {
+ int rc = 0;
+ unsigned char *message = NULL;
+@@ -657,6 +656,70 @@ _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+ break;
+ }
+
++ if(session->state & LIBSSH2_STATE_INITIAL_KEX) {
++ if(msg == SSH_MSG_KEXINIT) {
++ if(!session->kex_strict) {
++ if(datalen < 17) {
++ LIBSSH2_FREE(session, data);
++ session->packAdd_state = libssh2_NB_state_idle;
++ return _libssh2_error(session,
++ LIBSSH2_ERROR_BUFFER_TOO_SMALL,
++ "Data too short extracting kex");
++ }
++ else {
++ const unsigned char *strict =
++ (unsigned char *)"kex-strict-s-v00@openssh.com";
++ struct string_buf buf;
++ unsigned char *algs = NULL;
++ size_t algs_len = 0;
++
++ buf.data = (unsigned char *)data;
++ buf.dataptr = buf.data;
++ buf.len = datalen;
++ buf.dataptr += 17; /* advance past type and cookie */
++
++ if(_libssh2_get_string(&buf, &algs, &algs_len)) {
++ LIBSSH2_FREE(session, data);
++ session->packAdd_state = libssh2_NB_state_idle;
++ return _libssh2_error(session,
++ LIBSSH2_ERROR_BUFFER_TOO_SMALL,
++ "Algs too short");
++ }
++
++ if(algs_len == 0 ||
++ _libssh2_kex_agree_instr(algs, algs_len, strict, 28)) {
++ session->kex_strict = 1;
++ }
++ }
++ }
++
++ if(session->kex_strict && seq) {
++ LIBSSH2_FREE(session, data);
++ session->socket_state = LIBSSH2_SOCKET_DISCONNECTED;
++ session->packAdd_state = libssh2_NB_state_idle;
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "KEXINIT was not the first packet");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "KEXINIT was not the first packet");
++ }
++ }
++
++ if(session->kex_strict && session->fullpacket_required_type &&
++ session->fullpacket_required_type != msg) {
++ LIBSSH2_FREE(session, data);
++ session->socket_state = LIBSSH2_SOCKET_DISCONNECTED;
++ session->packAdd_state = libssh2_NB_state_idle;
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "unexpected packet type");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "unexpected packet type");
++ }
++ }
++
+ if(session->packAdd_state == libssh2_NB_state_allocated) {
+ /* A couple exceptions to the packet adding rule: */
+ switch(msg) {
+@@ -1341,6 +1404,15 @@ _libssh2_packet_ask(LIBSSH2_SESSION * session, unsigned char packet_type,
+
+ return 0;
+ }
++ else if(session->kex_strict &&
++ (session->state & LIBSSH2_STATE_INITIAL_KEX)) {
++ libssh2_session_disconnect(session, "strict KEX violation: "
++ "unexpected packet type");
++
++ return _libssh2_error(session, LIBSSH2_ERROR_SOCKET_DISCONNECT,
++ "strict KEX violation: "
++ "unexpected packet type");
++ }
+ packet = _libssh2_list_next(&packet->node);
+ }
+ return -1;
+@@ -1402,7 +1474,10 @@ _libssh2_packet_require(LIBSSH2_SESSION * session, unsigned char packet_type,
+ }
+
+ while(session->socket_state == LIBSSH2_SOCKET_CONNECTED) {
+- int ret = _libssh2_transport_read(session);
++ int ret;
++ session->fullpacket_required_type = packet_type;
++ ret = _libssh2_transport_read(session);
++ session->fullpacket_required_type = 0;
+ if(ret == LIBSSH2_ERROR_EAGAIN)
+ return ret;
+ else if(ret < 0) {
+diff --git a/src/packet.h b/src/packet.h
+index 79018bcf..6ea100a5 100644
+--- a/src/packet.h
++++ b/src/packet.h
+@@ -71,6 +71,6 @@ int _libssh2_packet_burn(LIBSSH2_SESSION * session,
+ int _libssh2_packet_write(LIBSSH2_SESSION * session, unsigned char *data,
+ unsigned long data_len);
+ int _libssh2_packet_add(LIBSSH2_SESSION * session, unsigned char *data,
+- size_t datalen, int macstate);
++ size_t datalen, int macstate, uint32_t seq);
+
+ #endif /* __LIBSSH2_PACKET_H */
+diff --git a/src/session.c b/src/session.c
+index a4d602ba..f4bafb57 100644
+--- a/src/session.c
++++ b/src/session.c
+@@ -464,6 +464,8 @@ libssh2_session_init_ex(LIBSSH2_ALLOC_FUNC((*my_alloc)),
+ session->abstract = abstract;
+ session->api_timeout = 0; /* timeout-free API by default */
+ session->api_block_mode = 1; /* blocking API by default */
++ session->state = LIBSSH2_STATE_INITIAL_KEX;
++ session->fullpacket_required_type = 0;
+ session->packet_read_timeout = LIBSSH2_DEFAULT_READ_TIMEOUT;
+ session->flag.quote_paths = 1; /* default behavior is to quote paths
+ for the scp subsystem */
+@@ -1186,6 +1188,7 @@ libssh2_session_disconnect_ex(LIBSSH2_SESSION *session, int reason,
+ const char *desc, const char *lang)
+ {
+ int rc;
++ session->state &= ~LIBSSH2_STATE_INITIAL_KEX;
+ session->state &= ~LIBSSH2_STATE_EXCHANGING_KEYS;
+ BLOCK_ADJUST(rc, session,
+ session_disconnect(session, reason, desc, lang));
+diff --git a/src/transport.c b/src/transport.c
+index 6d902d33..3b30ff84 100644
+--- a/src/transport.c
++++ b/src/transport.c
+@@ -187,6 +187,7 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+ struct transportpacket *p = &session->packet;
+ int rc;
+ int compressed;
++ uint32_t seq = session->remote.seqno;
+
+ if(session->fullpacket_state == libssh2_NB_state_idle) {
+ session->fullpacket_macstate = LIBSSH2_MAC_CONFIRMED;
+@@ -318,7 +319,7 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+ if(session->fullpacket_state == libssh2_NB_state_created) {
+ rc = _libssh2_packet_add(session, p->payload,
+ session->fullpacket_payload_len,
+- session->fullpacket_macstate);
++ session->fullpacket_macstate, seq);
+ if(rc == LIBSSH2_ERROR_EAGAIN)
+ return rc;
+ if(rc) {
+@@ -329,6 +330,11 @@ fullpacket(LIBSSH2_SESSION * session, int encrypted /* 1 or 0 */ )
+
+ session->fullpacket_state = libssh2_NB_state_idle;
+
++ if(session->kex_strict &&
++ session->fullpacket_packet_type == SSH_MSG_NEWKEYS) {
++ session->remote.seqno = 0;
++ }
++
+ return session->fullpacket_packet_type;
+ }
+
+@@ -1091,6 +1097,10 @@ int _libssh2_transport_send(LIBSSH2_SESSION *session,
+
+ session->local.seqno++;
+
++ if(session->kex_strict && data[0] == SSH_MSG_NEWKEYS) {
++ session->local.seqno = 0;
++ }
++
+ ret = LIBSSH2_SEND(session, p->outbuf, total_length,
+ LIBSSH2_SOCKET_SEND_FLAGS(session));
+ if(ret < 0)
+--
+2.34.1
+
diff --git a/meta/recipes-support/libssh2/libssh2/run-ptest b/meta/recipes-support/libssh2/libssh2/run-ptest
new file mode 100644
index 0000000000..0f5526e316
--- /dev/null
+++ b/meta/recipes-support/libssh2/libssh2/run-ptest
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+ptestdir=$(dirname "$(readlink -f "$0")")
+cd tests
+for test in mansyntax.sh test_simple test_sshd.test
+do
+ ./../test-driver --test-name $test --log-file ../$test.log --trs-file ../$test.trs --color-tests no --enable-hard-errors yes --expect-failure no -- ./$test
+done
diff --git a/meta/recipes-support/libssh2/libssh2_1.10.0.bb b/meta/recipes-support/libssh2/libssh2_1.10.0.bb
deleted file mode 100644
index 072d6819c0..0000000000
--- a/meta/recipes-support/libssh2/libssh2_1.10.0.bb
+++ /dev/null
@@ -1,50 +0,0 @@
-SUMMARY = "A client-side C library implementing the SSH2 protocol"
-HOMEPAGE = "http://www.libssh2.org/"
-SECTION = "libs"
-
-DEPENDS = "zlib"
-
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=3e089ad0cf27edf1e7f261dfcd06acc7"
-
-SRC_URI = "http://www.libssh2.org/download/${BP}.tar.gz \
- file://run-ptest \
- "
-
-SRC_URI:append:ptest = " file://0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch"
-
-SRC_URI[sha256sum] = "2d64e90f3ded394b91d3a2e774ca203a4179f69aebee03003e5a6fa621e41d51"
-
-inherit autotools pkgconfig ptest
-
-EXTRA_OECONF += "\
- --with-libz \
- --with-libz-prefix=${STAGING_LIBDIR} \
- "
-
-# only one of openssl and gcrypt could be set
-PACKAGECONFIG ??= "openssl"
-PACKAGECONFIG[openssl] = "--with-crypto=openssl --with-libssl-prefix=${STAGING_LIBDIR}, , openssl"
-PACKAGECONFIG[gcrypt] = "--with-crypto=libgcrypt --with-libgcrypt-prefix=${STAGING_EXECPREFIXDIR}, , libgcrypt"
-
-BBCLASSEXTEND = "native nativesdk"
-
-# required for ptest on documentation
-RDEPENDS:${PN}-ptest = "man-db openssh util-linux-col"
-RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us"
-
-do_compile_ptest() {
- sed -i "/\$(MAKE) \$(AM_MAKEFLAGS) check-TESTS/d" tests/Makefile
- oe_runmake check
-}
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/tests
- install -m 0755 ${S}/test-driver ${D}${PTEST_PATH}/
- cp -rf ${B}/tests/.libs/* ${D}${PTEST_PATH}/tests/
- cp -rf ${S}/tests/mansyntax.sh ${D}${PTEST_PATH}/tests/
- cp -rf ${S}/tests/ssh2.sh ${D}${PTEST_PATH}/tests/
- cp -rf ${S}/tests/etc ${D}${PTEST_PATH}/tests/
- mkdir -p ${D}${PTEST_PATH}/docs
- cp -r ${S}/docs/* ${D}${PTEST_PATH}/docs/
-}
diff --git a/meta/recipes-support/libssh2/libssh2_1.11.0.bb b/meta/recipes-support/libssh2/libssh2_1.11.0.bb
new file mode 100644
index 0000000000..5100e6f7f9
--- /dev/null
+++ b/meta/recipes-support/libssh2/libssh2_1.11.0.bb
@@ -0,0 +1,53 @@
+SUMMARY = "A client-side C library implementing the SSH2 protocol"
+HOMEPAGE = "http://www.libssh2.org/"
+SECTION = "libs"
+
+DEPENDS = "zlib"
+
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=24a33237426720395ebb1dd1349ca225"
+
+SRC_URI = "http://www.libssh2.org/download/${BP}.tar.gz \
+ file://run-ptest \
+ file://CVE-2023-48795.patch \
+ "
+
+SRC_URI[sha256sum] = "3736161e41e2693324deb38c26cfdc3efe6209d634ba4258db1cecff6a5ad461"
+
+inherit autotools pkgconfig ptest
+
+EXTRA_OECONF += "\
+ --with-libz \
+ --with-libz-prefix=${STAGING_LIBDIR} \
+ --disable-rpath \
+ "
+DISABLE_STATIC = ""
+
+# only one of openssl and gcrypt could be set
+PACKAGECONFIG ??= "openssl"
+PACKAGECONFIG[openssl] = "--with-crypto=openssl --with-libssl-prefix=${STAGING_LIBDIR}, , openssl"
+PACKAGECONFIG[gcrypt] = "--with-crypto=libgcrypt --with-libgcrypt-prefix=${STAGING_EXECPREFIXDIR}, , libgcrypt"
+
+BBCLASSEXTEND = "native nativesdk"
+
+# required for ptest on documentation
+RDEPENDS:${PN}-ptest = "bash man-db openssh util-linux-col"
+RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us"
+
+do_compile_ptest() {
+ sed -i "/\$(MAKE) \$(AM_MAKEFLAGS) check-TESTS/d" tests/Makefile
+ oe_runmake check
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests
+ install -m 0755 ${S}/test-driver ${D}${PTEST_PATH}/
+ cp -rf ${B}/tests/.libs/* ${D}${PTEST_PATH}/tests/
+ cp -rf ${B}/tests/test_simple ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/tests/mansyntax.sh ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/tests/key* ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/tests/openssh_server/ ${D}${PTEST_PATH}/tests/
+ cp -rf ${S}/tests/*.test ${D}${PTEST_PATH}/tests/
+ mkdir -p ${D}${PTEST_PATH}/docs
+ cp -r ${S}/docs/* ${D}${PTEST_PATH}/docs/
+}
diff --git a/meta/recipes-support/libunistring/libunistring_1.0.bb b/meta/recipes-support/libunistring/libunistring_1.0.bb
deleted file mode 100644
index b6f5100ae8..0000000000
--- a/meta/recipes-support/libunistring/libunistring_1.0.bb
+++ /dev/null
@@ -1,27 +0,0 @@
-SUMMARY = "Library for manipulating C and Unicode strings"
-
-DESCRIPTION = "Text files are nowadays usually encoded in Unicode, and may\
- consist of very different scripts from Latin letters to Chinese Hanzi\
- with many kinds of special characters accents, right-to-left writing\
- marks, hyphens, Roman numbers, and much more. But the POSIX platform\
- APIs for text do not contain adequate functions for dealing with\
- particular properties of many Unicode characters. In fact, the POSIX\
- APIs for text have several assumptions at their base which don't hold\
- for Unicode text. This library provides functions for manipulating\
- Unicode strings and for manipulating C strings according to the Unicode\
- standard. This package contains documentation."
-
-HOMEPAGE = "http://www.gnu.org/software/libunistring/"
-SECTION = "devel"
-LICENSE = "LGPL-3.0-or-later | GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \
- file://README;beginline=45;endline=65;md5=3a896a943b4da2c551e6be1af27eff8d \
- file://doc/libunistring.texi;md5=266e4297d7c18f197be3d9622ba99685 \
- "
-DEPENDS = "gperf-native"
-
-SRC_URI = "${GNU_MIRROR}/libunistring/libunistring-${PV}.tar.gz"
-SRC_URI[sha256sum] = "3c0184c0e492d7c208ce31d25dd1d2c58f0c3ed6cbbe032c5b248cddad318544"
-
-inherit autotools texinfo
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libunistring/libunistring_1.2.bb b/meta/recipes-support/libunistring/libunistring_1.2.bb
new file mode 100644
index 0000000000..3018dbfdfb
--- /dev/null
+++ b/meta/recipes-support/libunistring/libunistring_1.2.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Library for manipulating C and Unicode strings"
+
+DESCRIPTION = "Text files are nowadays usually encoded in Unicode, and may\
+ consist of very different scripts from Latin letters to Chinese Hanzi\
+ with many kinds of special characters accents, right-to-left writing\
+ marks, hyphens, Roman numbers, and much more. But the POSIX platform\
+ APIs for text do not contain adequate functions for dealing with\
+ particular properties of many Unicode characters. In fact, the POSIX\
+ APIs for text have several assumptions at their base which don't hold\
+ for Unicode text. This library provides functions for manipulating\
+ Unicode strings and for manipulating C strings according to the Unicode\
+ standard. This package contains documentation."
+
+HOMEPAGE = "http://www.gnu.org/software/libunistring/"
+SECTION = "devel"
+LICENSE = "LGPL-3.0-or-later | GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \
+ file://README;beginline=45;endline=65;md5=3a896a943b4da2c551e6be1af27eff8d \
+ file://doc/libunistring.texi;md5=36b7d20daef7fbcc032333ae2429aa94 \
+ "
+DEPENDS = "gperf-native"
+
+SRC_URI = "${GNU_MIRROR}/libunistring/libunistring-${PV}.tar.gz"
+SRC_URI[sha256sum] = "fd6d5662fa706487c48349a758b57bc149ce94ec6c30624ec9fdc473ceabbc8e"
+
+inherit autotools texinfo
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libunwind/libunwind.inc b/meta/recipes-support/libunwind/libunwind.inc
deleted file mode 100644
index bf74f9fa3c..0000000000
--- a/meta/recipes-support/libunwind/libunwind.inc
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "Library for obtaining the call-chain of a program"
-DESCRIPTION = "a portable and efficient C programming interface (API) to determine the call-chain of a program"
-HOMEPAGE = "http://www.nongnu.org/libunwind"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=2d80c8ed4062b8339b715f90fa68cc9f"
-DEPENDS += "libatomic-ops"
-DEPENDS:append:libc-musl = " libucontext"
-
-inherit autotools multilib_header
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[lzma] = "--enable-minidebuginfo,--disable-minidebuginfo,xz"
-PACKAGECONFIG[latexdocs] = "--enable-documentation, --disable-documentation, latex2man-native"
-
-EXTRA_OECONF:arm = "--enable-debug-frame"
-EXTRA_OECONF:armeb = "--enable-debug-frame"
-EXTRA_OECONF:aarch64 = "--enable-debug-frame"
-
-do_install:append () {
- oe_multilib_header libunwind.h
-}
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libunwind/libunwind/0001-src-Gtrace-remove-unguarded-print-calls.patch b/meta/recipes-support/libunwind/libunwind/0001-src-Gtrace-remove-unguarded-print-calls.patch
new file mode 100644
index 0000000000..5840c2b4f6
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind/0001-src-Gtrace-remove-unguarded-print-calls.patch
@@ -0,0 +1,52 @@
+From 9b27fa9bcd5cadd4c841c42710f41a090377e531 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Fri, 24 Mar 2023 16:18:44 +0000
+Subject: [PATCH] src/Gtrace: remove unguarded print() calls
+
+There is a use of printf() without #include stdio.h in src/arm/Gtrace.c,
+which results in a compiler error if clang 16 is used:
+
+src/arm/Gtrace.c:529:7: error: call to undeclared library function
+'printf' with type 'int (const char *, ...)'; ISO C99 and later do not
+support implicit function declarations [-Wimplicit-function-declaration]
+
+Replace the printf("XXX") with a Dprintf, so it doesn't pull stdio in
+unless in a debug build, and reword the message to be clearer.
+
+Also there is another printf("XXX") inside a FreeBSD-specific block in
+the UNW_ARM_FRAME_SIGRETURN case, replace this with a #error as the code
+needs to be implemented.
+
+Fixes #482.
+
+Upstream-Status: Backport [9b27fa9bcd5cadd4c841c42710f41a090377e531]
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ src/arm/Gtrace.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/arm/Gtrace.c b/src/arm/Gtrace.c
+index 51fc281d..9e0f25af 100644
+--- a/src/arm/Gtrace.c
++++ b/src/arm/Gtrace.c
+@@ -514,7 +514,7 @@ tdep_trace (unw_cursor_t *cursor, void **buffer, int *size)
+ if (likely(ret >= 0))
+ ACCESS_MEM_FAST(ret, c->validate, d, cfa + LINUX_SC_LR_OFF, lr);
+ #elif defined(__FreeBSD__)
+- printf("XXX\n");
++ #error implement UNW_ARM_FRAME_SIGRETURN on FreeBSD
+ #endif
+
+ /* Resume stack at signal restoration point. The stack is not
+@@ -526,7 +526,7 @@ tdep_trace (unw_cursor_t *cursor, void **buffer, int *size)
+ break;
+
+ case UNW_ARM_FRAME_SYSCALL:
+- printf("XXX1\n");
++ Dprintf ("%s: implement me\n", __FUNCTION__);
+ break;
+
+ default:
+--
+2.34.1
+
diff --git a/meta/recipes-support/libunwind/libunwind/0003-x86-Stub-out-x86_local_resume.patch b/meta/recipes-support/libunwind/libunwind/0003-x86-Stub-out-x86_local_resume.patch
deleted file mode 100644
index f02f13f79c..0000000000
--- a/meta/recipes-support/libunwind/libunwind/0003-x86-Stub-out-x86_local_resume.patch
+++ /dev/null
@@ -1,55 +0,0 @@
-From 0b2243f19d4ea12a2a68478a5aed503947a800af Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 22 Mar 2016 16:19:29 +0000
-Subject: [PATCH] x86: Stub out x86_local_resume()
-
-its purpose seems
-to be unwinding across signal handler boundaries, which cannot happen
-in correct programs anyway. Replacing the whole function with
-something like *(volatile char *)0=0; (i.e. crash), gets a working
-libunwind
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/x86/Gos-linux.c | 26 +-------------------------
- 1 file changed, 1 insertion(+), 25 deletions(-)
-
-diff --git a/src/x86/Gos-linux.c b/src/x86/Gos-linux.c
-index d448dce..c25ae0c 100644
---- a/src/x86/Gos-linux.c
-+++ b/src/x86/Gos-linux.c
-@@ -284,31 +284,7 @@ x86_r_uc_addr (ucontext_t *uc, int reg)
- HIDDEN int
- x86_local_resume (unw_addr_space_t as, unw_cursor_t *cursor, void *arg)
- {
-- struct cursor *c = (struct cursor *) cursor;
-- ucontext_t *uc = c->uc;
--
-- /* Ensure c->pi is up-to-date. On x86, it's relatively common to be
-- missing DWARF unwind info. We don't want to fail in that case,
-- because the frame-chain still would let us do a backtrace at
-- least. */
-- dwarf_make_proc_info (&c->dwarf);
--
-- if (unlikely (c->sigcontext_format != X86_SCF_NONE))
-- {
-- struct sigcontext *sc = (struct sigcontext *) c->sigcontext_addr;
--
-- Debug (8, "resuming at ip=%x via sigreturn(%p)\n", c->dwarf.ip, sc);
--#if !defined(__ANDROID__)
-- x86_sigreturn (sc);
--#endif
-- }
-- else
-- {
-- Debug (8, "resuming at ip=%x via setcontext()\n", c->dwarf.ip);
--#if !defined(__ANDROID__)
-- setcontext (uc);
--#endif
-- }
-+ *(volatile char *)0=0;
- return -UNW_EINVAL;
- }
-
diff --git a/meta/recipes-support/libunwind/libunwind/0004-Fix-build-on-mips-musl.patch b/meta/recipes-support/libunwind/libunwind/0004-Fix-build-on-mips-musl.patch
deleted file mode 100644
index 124d0e00b1..0000000000
--- a/meta/recipes-support/libunwind/libunwind/0004-Fix-build-on-mips-musl.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-From 6bdab5cc8f1e2ec5f84fc9f59f1699a726980709 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 23 Mar 2016 06:08:59 +0000
-Subject: [PATCH 4/6] Fix build on mips/musl
-
-Do not include endian.h on musl it includes
-further headers which can not be compiled in __ASSEMBLER__
- mode
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
----
- src/coredump/_UCD_internal.h | 35 +++++++++++++++++++++++++++++++++++
- src/mips/getcontext.S | 3 +--
- 2 files changed, 36 insertions(+), 2 deletions(-)
-
-diff --git a/src/coredump/_UCD_internal.h b/src/coredump/_UCD_internal.h
-index 3c95a2a..21ed1c3 100644
---- a/src/coredump/_UCD_internal.h
-+++ b/src/coredump/_UCD_internal.h
-@@ -44,6 +44,41 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
-
- #include "libunwind_i.h"
-
-+#ifndef __GLIBC__
-+#include <sys/reg.h>
-+
-+#define EF_REG0 6
-+#define EF_REG1 7
-+#define EF_REG2 8
-+#define EF_REG3 9
-+#define EF_REG4 10
-+#define EF_REG5 11
-+#define EF_REG6 12
-+#define EF_REG7 13
-+#define EF_REG8 14
-+#define EF_REG9 15
-+#define EF_REG10 16
-+#define EF_REG11 17
-+#define EF_REG12 18
-+#define EF_REG13 19
-+#define EF_REG14 20
-+#define EF_REG15 21
-+#define EF_REG16 22
-+#define EF_REG17 23
-+#define EF_REG18 24
-+#define EF_REG19 25
-+#define EF_REG20 26
-+#define EF_REG21 27
-+#define EF_REG22 28
-+#define EF_REG23 29
-+#define EF_REG24 30
-+#define EF_REG25 31
-+#define EF_REG28 34
-+#define EF_REG29 35
-+#define EF_REG30 36
-+#define EF_REG31 37
-+#endif
-+
-
- #if SIZEOF_OFF_T == 4
- typedef uint32_t uoff_t;
-diff --git a/src/mips/getcontext.S b/src/mips/getcontext.S
-index d1dbd57..de9b681 100644
---- a/src/mips/getcontext.S
-+++ b/src/mips/getcontext.S
-@@ -24,12 +24,11 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
-
- #include "offsets.h"
--#include <endian.h>
-
- .text
-
- #if _MIPS_SIM == _ABIO32
--# if __BYTE_ORDER == __BIG_ENDIAN
-+# if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
- # define OFFSET 4
- # else
- # define OFFSET 0
---
-2.20.1
-
diff --git a/meta/recipes-support/libunwind/libunwind/0006-Fix-for-X32.patch b/meta/recipes-support/libunwind/libunwind/0006-Fix-for-X32.patch
deleted file mode 100644
index 9941612387..0000000000
--- a/meta/recipes-support/libunwind/libunwind/0006-Fix-for-X32.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From f2eae0af620925b3686410470fc6fbc66ec1dc52 Mon Sep 17 00:00:00 2001
-From: Christopher Larson <chris_larson@mentor.com>
-Date: Tue, 13 Dec 2016 09:50:34 -0700
-Subject: [PATCH] Fix for X32
-
-Apply patch to fix the X32 build from https://github.com/sjnewbury/x32.
-
-Upstream-Status: Pending
-Signed-off-by: Christopher Larson <chris_larson@mentor.com>
-
----
- src/x86_64/Gos-linux.c | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/src/x86_64/Gos-linux.c b/src/x86_64/Gos-linux.c
-index b489329..0550005 100644
---- a/src/x86_64/Gos-linux.c
-+++ b/src/x86_64/Gos-linux.c
-@@ -146,8 +146,8 @@ x86_64_sigreturn (unw_cursor_t *cursor)
-
- Debug (8, "resuming at ip=%llx via sigreturn(%p)\n",
- (unsigned long long) c->dwarf.ip, sc);
-- __asm__ __volatile__ ("mov %0, %%rsp;"
-- "mov %1, %%rax;"
-+ __asm__ __volatile__ ("mov %q0, %%rsp;"
-+ "mov %q1, %%rax;"
- "syscall"
- :: "r"((uint64_t)sc), "i"(SYS_rt_sigreturn)
- : "memory");
diff --git a/meta/recipes-support/libunwind/libunwind/mips-byte-order.patch b/meta/recipes-support/libunwind/libunwind/mips-byte-order.patch
new file mode 100644
index 0000000000..8848780fd1
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind/mips-byte-order.patch
@@ -0,0 +1,35 @@
+From dbbf8110ed3fd2cbac20a8ec2ac769e13c67bab1 Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Tue, 16 Jan 2024 18:22:38 +0000
+Subject: [PATCH 2/2] byte order
+
+endian.h on musl/mips can't be included in __ASSEMBLER__ mode,
+so use the __BYTE_ORDER__ symbol instead.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+---
+ src/mips/getcontext.S | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/src/mips/getcontext.S b/src/mips/getcontext.S
+index d1dbd579..de9b6818 100644
+--- a/src/mips/getcontext.S
++++ b/src/mips/getcontext.S
+@@ -24,12 +24,11 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+
+ #include "offsets.h"
+-#include <endian.h>
+
+ .text
+
+ #if _MIPS_SIM == _ABIO32
+-# if __BYTE_ORDER == __BIG_ENDIAN
++# if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+ # define OFFSET 4
+ # else
+ # define OFFSET 0
+--
+2.34.1
+
diff --git a/meta/recipes-support/libunwind/libunwind/mips-coredump-register.patch b/meta/recipes-support/libunwind/libunwind/mips-coredump-register.patch
new file mode 100644
index 0000000000..68adcd1d71
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind/mips-coredump-register.patch
@@ -0,0 +1,100 @@
+From 7750e2a29b084ee033acc82abab410035e220d3f Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@arm.com>
+Date: Tue, 16 Jan 2024 18:21:26 +0000
+Subject: [PATCH 1/2] coredump-mips-register
+
+glibc and musl have different names for the registers, add a
+macro that generates the names appropriately.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@arm.com>
+
+---
+ src/coredump/_UCD_access_reg_linux.c | 69 ++++++++++++++++------------
+ 1 file changed, 39 insertions(+), 30 deletions(-)
+
+diff --git a/src/coredump/_UCD_access_reg_linux.c b/src/coredump/_UCD_access_reg_linux.c
+index 27eef123..beefdb47 100644
+--- a/src/coredump/_UCD_access_reg_linux.c
++++ b/src/coredump/_UCD_access_reg_linux.c
+@@ -67,38 +67,47 @@ _UCD_access_reg (unw_addr_space_t as,
+ goto badreg;
+ #else
+ #if defined(UNW_TARGET_MIPS)
++
++/* glibc and musl use different names */
++#ifdef __GLIBC__
++#define EF_REG(x) EF_REG ## x
++#else
++#include <sys/reg.h>
++#define EF_REG(x) EF_R ## x
++#endif
++
+ static const uint8_t remap_regs[] =
+ {
+- [UNW_MIPS_R0] = EF_REG0,
+- [UNW_MIPS_R1] = EF_REG1,
+- [UNW_MIPS_R2] = EF_REG2,
+- [UNW_MIPS_R3] = EF_REG3,
+- [UNW_MIPS_R4] = EF_REG4,
+- [UNW_MIPS_R5] = EF_REG5,
+- [UNW_MIPS_R6] = EF_REG6,
+- [UNW_MIPS_R7] = EF_REG7,
+- [UNW_MIPS_R8] = EF_REG8,
+- [UNW_MIPS_R9] = EF_REG9,
+- [UNW_MIPS_R10] = EF_REG10,
+- [UNW_MIPS_R11] = EF_REG11,
+- [UNW_MIPS_R12] = EF_REG12,
+- [UNW_MIPS_R13] = EF_REG13,
+- [UNW_MIPS_R14] = EF_REG14,
+- [UNW_MIPS_R15] = EF_REG15,
+- [UNW_MIPS_R16] = EF_REG16,
+- [UNW_MIPS_R17] = EF_REG17,
+- [UNW_MIPS_R18] = EF_REG18,
+- [UNW_MIPS_R19] = EF_REG19,
+- [UNW_MIPS_R20] = EF_REG20,
+- [UNW_MIPS_R21] = EF_REG21,
+- [UNW_MIPS_R22] = EF_REG22,
+- [UNW_MIPS_R23] = EF_REG23,
+- [UNW_MIPS_R24] = EF_REG24,
+- [UNW_MIPS_R25] = EF_REG25,
+- [UNW_MIPS_R28] = EF_REG28,
+- [UNW_MIPS_R29] = EF_REG29,
+- [UNW_MIPS_R30] = EF_REG30,
+- [UNW_MIPS_R31] = EF_REG31,
++ [UNW_MIPS_R0] = EF_REG(0),
++ [UNW_MIPS_R1] = EF_REG(1),
++ [UNW_MIPS_R2] = EF_REG(2),
++ [UNW_MIPS_R3] = EF_REG(3),
++ [UNW_MIPS_R4] = EF_REG(4),
++ [UNW_MIPS_R5] = EF_REG(5),
++ [UNW_MIPS_R6] = EF_REG(6),
++ [UNW_MIPS_R7] = EF_REG(7),
++ [UNW_MIPS_R8] = EF_REG(8),
++ [UNW_MIPS_R9] = EF_REG(9),
++ [UNW_MIPS_R10] = EF_REG(10),
++ [UNW_MIPS_R11] = EF_REG(11),
++ [UNW_MIPS_R12] = EF_REG(12),
++ [UNW_MIPS_R13] = EF_REG(13),
++ [UNW_MIPS_R14] = EF_REG(14),
++ [UNW_MIPS_R15] = EF_REG(15),
++ [UNW_MIPS_R16] = EF_REG(16),
++ [UNW_MIPS_R17] = EF_REG(17),
++ [UNW_MIPS_R18] = EF_REG(18),
++ [UNW_MIPS_R19] = EF_REG(19),
++ [UNW_MIPS_R20] = EF_REG(20),
++ [UNW_MIPS_R21] = EF_REG(21),
++ [UNW_MIPS_R22] = EF_REG(22),
++ [UNW_MIPS_R23] = EF_REG(23),
++ [UNW_MIPS_R24] = EF_REG(24),
++ [UNW_MIPS_R25] = EF_REG(25),
++ [UNW_MIPS_R28] = EF_REG(28),
++ [UNW_MIPS_R29] = EF_REG(29),
++ [UNW_MIPS_R30] = EF_REG(30),
++ [UNW_MIPS_R31] = EF_REG(31),
+ [UNW_MIPS_PC] = EF_CP0_EPC,
+ };
+ #elif defined(UNW_TARGET_X86)
+--
+2.34.1
+
diff --git a/meta/recipes-support/libunwind/libunwind/musl-header-conflict.patch b/meta/recipes-support/libunwind/libunwind/musl-header-conflict.patch
deleted file mode 100644
index 49985b5b3c..0000000000
--- a/meta/recipes-support/libunwind/libunwind/musl-header-conflict.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From e1de5a5b42062dc02769f320c7785928b2ee0c57 Mon Sep 17 00:00:00 2001
-From: Richard Purdie <richard.purdie@linuxfoundation.org>
-Date: Thu, 18 Aug 2016 14:46:32 +0100
-Subject: [PATCH] If you:
-
-TCLIBC=musl bitbake unwind
-TCLIBC=musl bitbake gcc-runtime -c cleansstate
-TCLIBC=musl bitbake gcc-runtime
-
-you will see libstdc++ fail to build due to finding libunwind's header file.
-
-Khem: "When we build any of gcc components they expect to use internal version
-and that works with glibc based gcc since the search headers first look into gcc
-headers, however with musl the gcc headers are searched after the standard
-headers ( which is by design the right thing )."
-
-This patch hacks around the issue by looking for a define used during gcc-runtime's
-build and skipping to the internal header in that case.
-
-[YOCTO #10129]
-
-RP 2016/8/18
-
-Upstream-Status: Inappropriate [really need to fix gcc]
-
----
- include/unwind.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
-diff --git a/include/unwind.h b/include/unwind.h
-index 93780fa..c812414 100644
---- a/include/unwind.h
-+++ b/include/unwind.h
-@@ -23,6 +23,10 @@ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
-
-+#ifdef _GLIBCXX_SHARED
-+#include_next <unwind.h>
-+#endif
-+
- #ifndef _UNWIND_H
- #define _UNWIND_H
-
diff --git a/meta/recipes-support/libunwind/libunwind_1.6.2.bb b/meta/recipes-support/libunwind/libunwind_1.6.2.bb
index 716be9a452..3208785124 100644
--- a/meta/recipes-support/libunwind/libunwind_1.6.2.bb
+++ b/meta/recipes-support/libunwind/libunwind_1.6.2.bb
@@ -1,24 +1,42 @@
-require libunwind.inc
+SUMMARY = "Library for obtaining the call-chain of a program"
+DESCRIPTION = "a portable and efficient C programming interface (API) to determine the call-chain of a program"
+HOMEPAGE = "http://www.nongnu.org/libunwind"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=2d80c8ed4062b8339b715f90fa68cc9f"
+DEPENDS += "libatomic-ops"
+DEPENDS:append:libc-musl = " libucontext"
SRC_URI = "http://download.savannah.nongnu.org/releases/libunwind/libunwind-${PV}.tar.gz \
- file://0003-x86-Stub-out-x86_local_resume.patch \
- file://0004-Fix-build-on-mips-musl.patch \
+ file://mips-byte-order.patch \
+ file://mips-coredump-register.patch \
file://0005-ppc32-Consider-ucontext-mismatches-between-glibc-and.patch \
- file://0006-Fix-for-X32.patch \
+ file://0001-src-Gtrace-remove-unguarded-print-calls.patch \
"
-SRC_URI:append:libc-musl = " file://musl-header-conflict.patch"
SRC_URI[sha256sum] = "4a6aec666991fb45d0889c44aede8ad6eb108071c3554fcdff671f9c94794976"
-EXTRA_OECONF:append:libc-musl = " --disable-documentation --disable-tests --enable-static"
+inherit autotools multilib_header
+
+COMPATIBLE_HOST:riscv32 = "null"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[lzma] = "--enable-minidebuginfo,--disable-minidebuginfo,xz"
+PACKAGECONFIG[zlib] = "--enable-zlibdebuginfo,--disable-zlibdebuginfo,zlib"
+PACKAGECONFIG[latexdocs] = "--enable-documentation, --disable-documentation, latex2man-native"
+
+EXTRA_OECONF = "--enable-static"
# http://errors.yoctoproject.org/Errors/Details/20487/
ARM_INSTRUCTION_SET:armv4 = "arm"
ARM_INSTRUCTION_SET:armv5 = "arm"
-COMPATIBLE_HOST:riscv32 = "null"
-
LDFLAGS += "-Wl,-z,relro,-z,now ${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd ', '', d)}"
SECURITY_LDFLAGS:append:libc-musl = " -lssp_nonshared"
CACHED_CONFIGUREVARS:append:libc-musl = " LDFLAGS='${LDFLAGS} -lucontext'"
+
+do_install:append () {
+ oe_multilib_header libunwind.h
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/liburcu/liburcu_0.13.1.bb b/meta/recipes-support/liburcu/liburcu_0.13.1.bb
deleted file mode 100644
index 66763349d2..0000000000
--- a/meta/recipes-support/liburcu/liburcu_0.13.1.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "Userspace RCU (read-copy-update) library"
-DESCRIPTION = "A userspace RCU (read-copy-update) library. This data \
-synchronization library provides read-side access which scales linearly \
-with the number of cores. "
-HOMEPAGE = "http://lttng.org/urcu"
-BUGTRACKER = "http://lttng.org/project/issues"
-
-LICENSE = "LGPL-2.1-or-later & MIT"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=e548d28737289d75a8f1e01ba2fd7825 \
- file://include/urcu/urcu.h;beginline=4;endline=32;md5=4de0d68d3a997643715036d2209ae1d9 \
- file://include/urcu/uatomic/x86.h;beginline=4;endline=21;md5=58e50bbd8a2f073bb5500e6554af0d0b"
-
-SRC_URI = "http://lttng.org/files/urcu/userspace-rcu-${PV}.tar.bz2"
-
-SRC_URI[sha256sum] = "3213f33d2b8f710eb920eb1abb279ec04bf8ae6361f44f2513c28c20d3363083"
-
-S = "${WORKDIR}/userspace-rcu-${PV}"
-inherit autotools multilib_header
-
-CPPFLAGS:append:riscv64 = " -pthread -D_REENTRANT"
-
-do_install:append() {
- oe_multilib_header urcu/config.h
-}
diff --git a/meta/recipes-support/liburcu/liburcu_0.14.0.bb b/meta/recipes-support/liburcu/liburcu_0.14.0.bb
new file mode 100644
index 0000000000..f03c990632
--- /dev/null
+++ b/meta/recipes-support/liburcu/liburcu_0.14.0.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Userspace RCU (read-copy-update) library"
+DESCRIPTION = "A userspace RCU (read-copy-update) library. This data \
+synchronization library provides read-side access which scales linearly \
+with the number of cores. "
+HOMEPAGE = "http://lttng.org/urcu"
+BUGTRACKER = "http://lttng.org/project/issues"
+
+LICENSE = "LGPL-2.1-or-later & MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=e548d28737289d75a8f1e01ba2fd7825 \
+ file://include/urcu/urcu.h;beginline=4;endline=32;md5=4de0d68d3a997643715036d2209ae1d9 \
+ file://include/urcu/uatomic/x86.h;beginline=4;endline=21;md5=58e50bbd8a2f073bb5500e6554af0d0b"
+
+SRC_URI = "http://lttng.org/files/urcu/userspace-rcu-${PV}.tar.bz2"
+
+SRC_URI[sha256sum] = "ca43bf261d4d392cff20dfae440836603bf009fce24fdc9b2697d837a2239d4f"
+
+S = "${WORKDIR}/userspace-rcu-${PV}"
+inherit autotools multilib_header
+
+CPPFLAGS:append:riscv64 = " -pthread -D_REENTRANT"
+
+do_install:append() {
+ oe_multilib_header urcu/config.h
+}
diff --git a/meta/recipes-support/libusb/libusb1_1.0.26.bb b/meta/recipes-support/libusb/libusb1_1.0.26.bb
deleted file mode 100644
index fd63e7adc2..0000000000
--- a/meta/recipes-support/libusb/libusb1_1.0.26.bb
+++ /dev/null
@@ -1,47 +0,0 @@
-SUMMARY = "Userspace library to access USB (version 1.0)"
-DESCRIPTION = "A cross-platform library to access USB devices from Linux, \
-macOS, Windows, OpenBSD/NetBSD, Haiku and Solaris userspace."
-HOMEPAGE = "https://libusb.info"
-BUGTRACKER = "http://www.libusb.org/report"
-SECTION = "libs"
-
-LICENSE = "LGPL-2.1-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
-
-BBCLASSEXTEND = "native nativesdk"
-
-SRC_URI = "https://github.com/libusb/libusb/releases/download/v${PV}/libusb-${PV}.tar.bz2 \
- file://run-ptest \
- "
-
-UPSTREAM_CHECK_URI = "https://github.com/libusb/libusb/releases"
-
-SRC_URI[sha256sum] = "12ce7a61fc9854d1d2a1ffe095f7b5fac19ddba095c259e6067a46500381b5a5"
-
-S = "${WORKDIR}/libusb-${PV}"
-
-inherit autotools pkgconfig ptest
-
-PACKAGECONFIG:class-target ??= "udev"
-PACKAGECONFIG[udev] = "--enable-udev,--disable-udev,udev"
-
-EXTRA_OECONF = "--libdir=${base_libdir}"
-
-do_install:append() {
- install -d ${D}${libdir}
- if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
- mv ${D}${base_libdir}/pkgconfig ${D}${libdir}
- fi
-}
-
-do_compile_ptest() {
- oe_runmake -C tests stress
-}
-
-do_install_ptest() {
- install -m 755 ${B}/tests/.libs/stress ${D}${PTEST_PATH}
-}
-
-FILES:${PN} += "${base_libdir}/*.so.*"
-
-FILES:${PN}-dev += "${base_libdir}/*.so ${base_libdir}/*.la"
diff --git a/meta/recipes-support/libusb/libusb1_1.0.27.bb b/meta/recipes-support/libusb/libusb1_1.0.27.bb
new file mode 100644
index 0000000000..f2431d75c8
--- /dev/null
+++ b/meta/recipes-support/libusb/libusb1_1.0.27.bb
@@ -0,0 +1,48 @@
+SUMMARY = "Userspace library to access USB (version 1.0)"
+DESCRIPTION = "A cross-platform library to access USB devices from Linux, \
+macOS, Windows, OpenBSD/NetBSD, Haiku and Solaris userspace."
+HOMEPAGE = "https://libusb.info"
+BUGTRACKER = "http://www.libusb.org/report"
+SECTION = "libs"
+
+LICENSE = "LGPL-2.1-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
+
+BBCLASSEXTEND = "native nativesdk"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/libusb-${PV}.tar.bz2 \
+ file://run-ptest \
+ "
+
+GITHUB_BASE_URI = "https://github.com/libusb/libusb/releases"
+UPSTREAM_CHECK_REGEX = "releases/tag/v?(?P<pver>\d+(\.\d+)+)$"
+
+SRC_URI[sha256sum] = "ffaa41d741a8a3bee244ac8e54a72ea05bf2879663c098c82fc5757853441575"
+
+S = "${WORKDIR}/libusb-${PV}"
+
+inherit autotools pkgconfig ptest github-releases
+
+PACKAGECONFIG:class-target ??= "udev"
+PACKAGECONFIG[udev] = "--enable-udev,--disable-udev,udev"
+
+EXTRA_OECONF = "--libdir=${base_libdir}"
+
+do_install:append() {
+ install -d ${D}${libdir}
+ if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
+ mv ${D}${base_libdir}/pkgconfig ${D}${libdir}
+ fi
+}
+
+do_compile_ptest() {
+ oe_runmake -C tests stress
+}
+
+do_install_ptest() {
+ install -m 755 ${B}/tests/stress ${D}${PTEST_PATH}
+}
+
+FILES:${PN} += "${base_libdir}/*.so.*"
+
+FILES:${PN}-dev += "${base_libdir}/*.so ${base_libdir}/*.la"
diff --git a/meta/recipes-support/libxslt/libxslt_1.1.35.bb b/meta/recipes-support/libxslt/libxslt_1.1.35.bb
deleted file mode 100644
index 51cfb2e281..0000000000
--- a/meta/recipes-support/libxslt/libxslt_1.1.35.bb
+++ /dev/null
@@ -1,56 +0,0 @@
-SUMMARY = "GNOME XSLT library"
-DESCRIPTION = "libxslt is the XSLT C parser and toolkit developed for the Gnome project. \
-XSLT itself is a an XML language to define transformation for XML. Libxslt is based on \
-libxml2 the XML C library developed for the GNOME project. It also implements most of \
-the EXSLT set of processor-portable extensions functions and some of Saxon's evaluate \
-and expressions extensions."
-HOMEPAGE = "http://xmlsoft.org/XSLT/"
-BUGTRACKER = "https://bugzilla.gnome.org/"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://Copyright;md5=0cd9a07afbeb24026c9b03aecfeba458"
-
-SECTION = "libs"
-DEPENDS = "libxml2"
-
-SRC_URI = "https://download.gnome.org/sources/libxslt/1.1/libxslt-${PV}.tar.xz"
-
-SRC_URI[sha256sum] = "8247f33e9a872c6ac859aa45018bc4c4d00b97e2feac9eebc10c93ce1f34dd79"
-
-UPSTREAM_CHECK_REGEX = "libxslt-(?P<pver>\d+(\.\d+)+)\.tar"
-
-S = "${WORKDIR}/libxslt-${PV}"
-
-BINCONFIG = "${bindir}/xslt-config"
-
-inherit autotools pkgconfig binconfig-disabled lib_package multilib_header
-
-do_configure:prepend () {
- # We don't DEPEND on binutils for ansidecl.h so ensure we don't use the header.
- # This can be removed when upgrading to 1.1.34.
- sed -i -e 's/ansidecl.h//' ${S}/configure.ac
-
- # The timestamps in the 1.1.28 tarball are messed up causing this file to
- # appear out of date. Touch it so that we don't try to regenerate it.
- touch ${S}/doc/xsltproc.1
-}
-
-EXTRA_OECONF = "--without-python --without-debug --without-mem-debug --without-crypto --with-html-subdir=${BPN}"
-# older versions of this recipe had ${PN}-utils
-RPROVIDES:${PN}-bin += "${PN}-utils"
-RCONFLICTS:${PN}-bin += "${PN}-utils"
-RREPLACES:${PN}-bin += "${PN}-utils"
-
-# This is only needed until libxml can load the relocated catalog itself
-do_install:append:class-native () {
- create_wrapper ${D}/${bindir}/xsltproc XML_CATALOG_FILES=${sysconfdir}/xml/catalog
-}
-
-do_install:append () {
- oe_multilib_header libxslt/xsltconfig.h
-}
-
-FILES:${PN} += "${libdir}/libxslt-plugins"
-FILES:${PN}-dev += "${libdir}/xsltConf.sh"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libxslt/libxslt_1.1.39.bb b/meta/recipes-support/libxslt/libxslt_1.1.39.bb
new file mode 100644
index 0000000000..2cc0c84bec
--- /dev/null
+++ b/meta/recipes-support/libxslt/libxslt_1.1.39.bb
@@ -0,0 +1,58 @@
+SUMMARY = "GNOME XSLT library"
+DESCRIPTION = "libxslt is the XSLT C parser and toolkit developed for the Gnome project. \
+XSLT itself is a an XML language to define transformation for XML. Libxslt is based on \
+libxml2 the XML C library developed for the GNOME project. It also implements most of \
+the EXSLT set of processor-portable extensions functions and some of Saxon's evaluate \
+and expressions extensions."
+HOMEPAGE = "http://xmlsoft.org/XSLT/"
+BUGTRACKER = "https://bugzilla.gnome.org/"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://Copyright;md5=0cd9a07afbeb24026c9b03aecfeba458"
+
+SECTION = "libs"
+DEPENDS = "libxml2"
+
+SRC_URI = "https://download.gnome.org/sources/libxslt/1.1/libxslt-${PV}.tar.xz"
+
+SRC_URI[sha256sum] = "2a20ad621148339b0759c4d4e96719362dee64c9a096dbba625ba053846349f0"
+
+UPSTREAM_CHECK_REGEX = "libxslt-(?P<pver>\d+(\.\d+)+)\.tar"
+
+CVE_STATUS[CVE-2022-29824] = "not-applicable-config: Static linking to libxml2 is not enabled."
+
+S = "${WORKDIR}/libxslt-${PV}"
+
+BINCONFIG = "${bindir}/xslt-config"
+
+inherit autotools pkgconfig binconfig-disabled lib_package multilib_header
+
+do_configure:prepend () {
+ # We don't DEPEND on binutils for ansidecl.h so ensure we don't use the header.
+ # This can be removed when upgrading to 1.1.34.
+ sed -i -e 's/ansidecl.h//' ${S}/configure.ac
+
+ # The timestamps in the 1.1.28 tarball are messed up causing this file to
+ # appear out of date. Touch it so that we don't try to regenerate it.
+ touch ${S}/doc/xsltproc.1
+}
+
+EXTRA_OECONF = "--without-python --without-debug --without-mem-debug --without-crypto"
+# older versions of this recipe had ${PN}-utils
+RPROVIDES:${PN}-bin += "${PN}-utils"
+RCONFLICTS:${PN}-bin += "${PN}-utils"
+RREPLACES:${PN}-bin += "${PN}-utils"
+
+# This is only needed until libxml can load the relocated catalog itself
+do_install:append:class-native () {
+ create_wrapper ${D}/${bindir}/xsltproc XML_CATALOG_FILES=${sysconfdir}/xml/catalog
+}
+
+do_install:append () {
+ oe_multilib_header libxslt/xsltconfig.h
+}
+
+FILES:${PN} += "${libdir}/libxslt-plugins"
+FILES:${PN}-dev += "${libdir}/xsltConf.sh"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/lz4/files/CVE-2021-3520.patch b/meta/recipes-support/lz4/files/CVE-2021-3520.patch
deleted file mode 100644
index 5ac8f6691f..0000000000
--- a/meta/recipes-support/lz4/files/CVE-2021-3520.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 8301a21773ef61656225e264f4f06ae14462bca7 Mon Sep 17 00:00:00 2001
-From: Jasper Lievisse Adriaanse <j@jasper.la>
-Date: Fri, 26 Feb 2021 15:21:20 +0100
-Subject: [PATCH] Fix potential memory corruption with negative memmove() size
-
-Upstream-Status: Backport
-https://github.com/lz4/lz4/commit/8301a21773ef61656225e264f4f06ae14462bca7#diff-7055e9cf14c488aea9837aaf9f528b58ee3c22988d7d0d81d172ec62d94a88a7
-CVE: CVE-2021-3520
-Signed-off-by: Armin Kuster <akuster@mvista.com>
-
----
- lib/lz4.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-Index: git/lib/lz4.c
-===================================================================
---- git.orig/lib/lz4.c
-+++ git/lib/lz4.c
-@@ -1665,7 +1665,7 @@ LZ4_decompress_generic(
- const size_t dictSize /* note : = 0 if noDict */
- )
- {
-- if (src == NULL) { return -1; }
-+ if ((src == NULL) || (outputSize < 0)) { return -1; }
-
- { const BYTE* ip = (const BYTE*) src;
- const BYTE* const iend = ip + srcSize;
diff --git a/meta/recipes-support/lz4/files/run-ptest b/meta/recipes-support/lz4/files/run-ptest
new file mode 100644
index 0000000000..f4a46a4f2c
--- /dev/null
+++ b/meta/recipes-support/lz4/files/run-ptest
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+# Define test work dir
+WORKDIR=@PTEST_PATH@/tests/
+
+# Run test
+cd ${WORKDIR}
+
+case_name="test_frame"
+./frametest
+ret=$?
+if [[ ${ret} == "0" ]]; then
+ echo "PASS: lz4/$case_name"
+else
+ echo "FAIL: lz4/$case_name"
+fi
+
diff --git a/meta/recipes-support/lz4/lz4_1.9.3.bb b/meta/recipes-support/lz4/lz4_1.9.3.bb
deleted file mode 100644
index 129a86b681..0000000000
--- a/meta/recipes-support/lz4/lz4_1.9.3.bb
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "Extremely Fast Compression algorithm"
-DESCRIPTION = "LZ4 is a very fast lossless compression algorithm, providing compression speed at 400 MB/s per core, scalable with multi-cores CPU. It also features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems."
-HOMEPAGE = "https://github.com/lz4/lz4"
-
-LICENSE = "BSD-2-Clause | GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://lib/LICENSE;md5=ebc2ea4814a64de7708f1571904b32cc \
- file://programs/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://LICENSE;md5=d57c0d21cb917fb4e0af2454aa48b956 \
- "
-
-PE = "1"
-
-SRCREV = "d44371841a2f1728a3f36839fd4b7e872d0927d3"
-
-SRC_URI = "git://github.com/lz4/lz4.git;branch=release;protocol=https \
- file://CVE-2021-3520.patch \
- "
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
-
-S = "${WORKDIR}/git"
-
-# Fixed in r118, which is larger than the current version.
-CVE_CHECK_IGNORE += "CVE-2014-4715"
-
-EXTRA_OEMAKE = "PREFIX=${prefix} CC='${CC}' CFLAGS='${CFLAGS}' DESTDIR=${D} LIBDIR=${libdir} INCLUDEDIR=${includedir} BUILD_STATIC=no"
-
-do_install() {
- oe_runmake install
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/lz4/lz4_1.9.4.bb b/meta/recipes-support/lz4/lz4_1.9.4.bb
new file mode 100644
index 0000000000..51a854d44a
--- /dev/null
+++ b/meta/recipes-support/lz4/lz4_1.9.4.bb
@@ -0,0 +1,46 @@
+SUMMARY = "Extremely Fast Compression algorithm"
+DESCRIPTION = "LZ4 is a very fast lossless compression algorithm, providing compression speed at 400 MB/s per core, scalable with multi-cores CPU. It also features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems."
+HOMEPAGE = "https://github.com/lz4/lz4"
+
+LICENSE = "BSD-2-Clause | GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://lib/LICENSE;md5=5cd5f851b52ec832b10eedb3f01f885a \
+ file://programs/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://LICENSE;md5=c5cc3cd6f9274b4d32988096df9c3ec3 \
+ "
+
+PE = "1"
+
+SRCREV = "5ff839680134437dbf4678f3d0c7b371d84f4964"
+
+SRC_URI = "git://github.com/lz4/lz4.git;branch=release;protocol=https \
+ file://run-ptest \
+ "
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
+
+S = "${WORKDIR}/git"
+
+inherit ptest
+
+CVE_STATUS[CVE-2014-4715] = "fixed-version: Fixed in r118, which is larger than the current version."
+
+EXTRA_OEMAKE = "PREFIX=${prefix} CC='${CC}' CFLAGS='${CFLAGS}' DESTDIR=${D} LIBDIR=${libdir} INCLUDEDIR=${includedir} BUILD_STATIC=no"
+
+do_install() {
+ oe_runmake install
+}
+
+BBCLASSEXTEND = "native nativesdk"
+
+RDEPENDS:${PN}-ptest += "bash"
+
+do_compile_ptest() {
+ oe_runmake -C ${B}/tests/
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/tests/
+ install --mode=755 ${B}/tests/frametest ${D}${PTEST_PATH}/tests/
+ sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/run-ptest
+
+}
+
diff --git a/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch b/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch
index db3a70e803..a0d2502d24 100644
--- a/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch
+++ b/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch
@@ -10,7 +10,15 @@ Change suggested by Julian Taylor.
Bug-Debian: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=757037
-Upstream-Status: Pending
+RP: Patch is still in debian as of 20220524 in a revised form:
+https://sources.debian.org/patches/lzo2/2.10-2/
+https://sources.debian.org/patches/lzo2/2.10-2/0001-Conditionally-replace-reinvention-of-memcpy-with-cal.patch/
+It was submitted in 2015, no reply to an email from RP in 2022 either.
+
+We likely need this in OE to prevent against unaligned accesses
+on systems such as armv5.
+
+Upstream-Status: Inactive-Upstream
Signed-off-by: Saul Wold <sgw@linux.intel.com>
---
minilzo/minilzo.c | 14 ++++++++++++++
diff --git a/meta/recipes-support/mpfr/mpfr_4.1.0.bb b/meta/recipes-support/mpfr/mpfr_4.1.0.bb
deleted file mode 100644
index 2121dad57c..0000000000
--- a/meta/recipes-support/mpfr/mpfr_4.1.0.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-SUMMARY = "C library for multiple-precision floating-point computations with exact rounding"
-DESCRIPTION = "The GNU Multiple Precision Floating-Point Reliable Library (GNU MPFR) is a GNU portable C library for arbitrary-precision binary floating-point computation with correct rounding, based on GNU Multi-Precision Library. MPFR's computation is both efficient and has a well-defined semantics: the functions are completely specified on all the possible operands and the results do not depend on the platform."
-HOMEPAGE = "https://www.mpfr.org/"
-LICENSE = "LGPL-3.0-or-later"
-SECTION = "devel"
-
-inherit autotools texinfo
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
- file://COPYING.LESSER;md5=3000208d539ec061b899bce1d9ce9404 \
- "
-DEPENDS = "gmp autoconf-archive"
-
-SRC_URI = "https://www.mpfr.org/mpfr-${PV}/mpfr-${PV}.tar.xz"
-SRC_URI[sha256sum] = "0c98a3f1732ff6ca4ea690552079da9c597872d30e96ec28414ee23c95558a7f"
-
-UPSTREAM_CHECK_URI = "http://www.mpfr.org/mpfr-current/"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/mpfr/mpfr_4.2.1.bb b/meta/recipes-support/mpfr/mpfr_4.2.1.bb
new file mode 100644
index 0000000000..a2067e1036
--- /dev/null
+++ b/meta/recipes-support/mpfr/mpfr_4.2.1.bb
@@ -0,0 +1,19 @@
+SUMMARY = "C library for multiple-precision floating-point computations with exact rounding"
+DESCRIPTION = "The GNU Multiple Precision Floating-Point Reliable Library (GNU MPFR) is a GNU portable C library for arbitrary-precision binary floating-point computation with correct rounding, based on GNU Multi-Precision Library. MPFR's computation is both efficient and has a well-defined semantics: the functions are completely specified on all the possible operands and the results do not depend on the platform."
+HOMEPAGE = "https://www.mpfr.org/"
+LICENSE = "LGPL-3.0-or-later"
+SECTION = "devel"
+
+inherit autotools texinfo
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=1ebbd3e34237af26da5dc08a4e440464 \
+ file://COPYING.LESSER;md5=3000208d539ec061b899bce1d9ce9404 \
+ "
+DEPENDS = "gmp autoconf-archive-native"
+
+SRC_URI = "https://www.mpfr.org/mpfr-${PV}/mpfr-${PV}.tar.xz"
+SRC_URI[sha256sum] = "277807353a6726978996945af13e52829e3abd7a9a5b7fb2793894e18f1fcbb2"
+
+UPSTREAM_CHECK_URI = "http://www.mpfr.org/mpfr-current/"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/nettle/nettle/dlopen-test.patch b/meta/recipes-support/nettle/nettle/dlopen-test.patch
deleted file mode 100644
index ab9b91f88b..0000000000
--- a/meta/recipes-support/nettle/nettle/dlopen-test.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-Remove the relative path for libnettle.so so the test
-program can find it.
-Relative paths are not suitable, as the folder strucure for ptest
-is different from the one expected by the nettle testsuite.
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Juro Bystricky <juro.bystricky@intel.com>
-Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
----
- testsuite/dlopen-test.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/testsuite/dlopen-test.c b/testsuite/dlopen-test.c
-index 4265bf7..1a25d17 100644
---- a/testsuite/dlopen-test.c
-+++ b/testsuite/dlopen-test.c
-@@ -15,7 +15,7 @@ int
- main (int argc UNUSED, char **argv UNUSED)
- {
- #if HAVE_LIBDL
-- void *handle = dlopen ("../libnettle." SO_EXT, RTLD_NOW);
-+ void *handle = dlopen ("libnettle.so", RTLD_NOW);
- int (*get_version)(void);
- if (!handle)
- {
---
-2.17.1
-
diff --git a/meta/recipes-support/nettle/nettle/run-ptest b/meta/recipes-support/nettle/nettle/run-ptest
index b90bed66d2..61a43c78f1 100644
--- a/meta/recipes-support/nettle/nettle/run-ptest
+++ b/meta/recipes-support/nettle/nettle/run-ptest
@@ -6,31 +6,35 @@ failed=0
all=0
for f in *-test; do
- if [ "$f" = "sha1-huge-test" ] ; then
- echo "SKIP: $f (skipped for ludicrous run time)"
- continue
- fi
-
- "./$f"
- case "$?" in
- 0)
- echo "PASS: $f"
- all=$((all + 1))
+ case "$f" in
+ "sha1-huge-test")
+ echo "SKIP: $f (long run time)"
;;
- 77)
- echo "SKIP: $f"
+ "symbols-test")
+ echo "SKIP: $f (needs static libraries)"
;;
*)
- echo "FAIL: $f"
- failed=$((failed + 1))
- all=$((all + 1))
+ "./$f"
+ case "$?" in
+ 0)
+ echo "PASS: $f"
+ all=$((all + 1))
+ ;;
+ 77)
+ echo "SKIP: $f"
+ ;;
+ *)
+ echo "FAIL: $f"
+ failed=$((failed + 1))
+ all=$((all + 1))
+ ;;
+ esac
;;
esac
done
if [ "$failed" -eq 0 ] ; then
- echo "All $all tests passed"
+ echo "All $all tests passed"
else
- echo "$failed of $all tests failed"
+ echo "$failed of $all tests failed"
fi
-
diff --git a/meta/recipes-support/nettle/nettle_3.7.3.bb b/meta/recipes-support/nettle/nettle_3.7.3.bb
deleted file mode 100644
index 889dc74667..0000000000
--- a/meta/recipes-support/nettle/nettle_3.7.3.bb
+++ /dev/null
@@ -1,57 +0,0 @@
-SUMMARY = "A low level cryptographic library"
-DESCRIPTION = "Nettle is a cryptographic library that is designed to fit easily in more or less any context: In crypto toolkits for object-oriented languages (C++, Python, Pike, ...), in applications like LSH or GNUPG, or even in kernel space."
-HOMEPAGE = "http://www.lysator.liu.se/~nisse/nettle/"
-DESCRIPTION = "It tries to solve a problem of providing a common set of \
-cryptographic algorithms for higher-level applications by implementing a \
-context-independent set of cryptographic algorithms"
-SECTION = "libs"
-LICENSE = "LGPL-3.0-or-later | GPL-2.0-or-later"
-
-LIC_FILES_CHKSUM = "file://COPYING.LESSERv3;md5=6a6a8e020838b23406c81b19c1d46df6 \
- file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://serpent-decrypt.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e \
- file://serpent-set-key.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e"
-
-DEPENDS += "gmp"
-
-SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.gz \
- file://Add-target-to-only-build-tests-not-run-them.patch \
- file://run-ptest \
- file://check-header-files-of-openssl-only-if-enable_.patch \
- "
-
-SRC_URI:append:class-target = "\
- file://dlopen-test.patch \
- "
-
-SRC_URI[sha256sum] = "661f5eb03f048a3b924c3a8ad2515d4068e40f67e774e8a26827658007e3bcf0"
-
-UPSTREAM_CHECK_REGEX = "nettle-(?P<pver>\d+(\.\d+)+)\.tar"
-
-inherit autotools ptest multilib_header
-
-EXTRA_AUTORECONF += "--exclude=aclocal"
-
-EXTRA_OECONF = "--disable-openssl"
-
-do_compile_ptest() {
- oe_runmake buildtest
-}
-
-do_install:append() {
- oe_multilib_header nettle/version.h
-}
-
-do_install_ptest() {
- install -d ${D}${PTEST_PATH}/testsuite/
- install ${S}/testsuite/gold-bug.txt ${D}${PTEST_PATH}/testsuite/
- install ${S}/testsuite/*-test ${D}${PTEST_PATH}/testsuite/
- # tools can be found in PATH, not in ../tools/
- sed -i -e 's|../tools/||' ${D}${PTEST_PATH}/testsuite/*-test
- install ${B}/testsuite/*-test ${D}${PTEST_PATH}/testsuite/
-}
-
-RDEPENDS:${PN}-ptest += "${PN}-dev"
-INSANE_SKIP:${PN}-ptest += "dev-deps"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/nettle/nettle_3.9.1.bb b/meta/recipes-support/nettle/nettle_3.9.1.bb
new file mode 100644
index 0000000000..396708cf87
--- /dev/null
+++ b/meta/recipes-support/nettle/nettle_3.9.1.bb
@@ -0,0 +1,61 @@
+SUMMARY = "A low level cryptographic library"
+DESCRIPTION = "Nettle is a cryptographic library that is designed to fit easily in more or less any context: In crypto toolkits for object-oriented languages (C++, Python, Pike, ...), in applications like LSH or GNUPG, or even in kernel space."
+HOMEPAGE = "http://www.lysator.liu.se/~nisse/nettle/"
+DESCRIPTION = "It tries to solve a problem of providing a common set of \
+cryptographic algorithms for higher-level applications by implementing a \
+context-independent set of cryptographic algorithms"
+SECTION = "libs"
+LICENSE = "LGPL-3.0-or-later | GPL-2.0-or-later"
+
+LIC_FILES_CHKSUM = "file://COPYING.LESSERv3;md5=6a6a8e020838b23406c81b19c1d46df6 \
+ file://COPYINGv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://serpent-decrypt.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e \
+ file://serpent-set-key.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e"
+
+DEPENDS += "gmp"
+
+SRC_URI = "${GNU_MIRROR}/${BPN}/${BP}.tar.gz \
+ file://Add-target-to-only-build-tests-not-run-them.patch \
+ file://run-ptest \
+ file://check-header-files-of-openssl-only-if-enable_.patch \
+ "
+
+SRC_URI[sha256sum] = "ccfeff981b0ca71bbd6fbcb054f407c60ffb644389a5be80d6716d5b550c6ce3"
+
+UPSTREAM_CHECK_REGEX = "nettle-(?P<pver>\d+(\.\d+)+)\.tar"
+
+inherit autotools ptest multilib_header lib_package
+
+EXTRA_AUTORECONF += "--exclude=aclocal"
+
+EXTRA_OECONF = "--disable-openssl"
+
+EXTRA_OECONF:append:armv7a = "${@bb.utils.contains("TUNE_FEATURES","neon",""," --disable-arm-neon --disable-fat",d)}"
+EXTRA_OECONF:append:armv7ve = "${@bb.utils.contains("TUNE_FEATURES","neon",""," --disable-arm-neon --disable-fat",d)}"
+
+do_compile_ptest() {
+ oe_runmake buildtest
+}
+
+do_install:append() {
+ oe_multilib_header nettle/version.h
+}
+
+do_install_ptest() {
+ install -d ${D}${PTEST_PATH}/testsuite/
+ install ${B}/testsuite/*-test ${D}${PTEST_PATH}/testsuite/
+ install ${S}/testsuite/*-test ${D}${PTEST_PATH}/testsuite/
+ install ${S}/testsuite/gold-bug.txt ${D}${PTEST_PATH}/testsuite/
+
+ # Install a symlink for dlopen-test
+ ln -sr ${D}${libdir}/libnettle.so.*.* ${D}${PTEST_PATH}/libnettle.so
+ # These examples are needed for pkcs1-conv-test
+ install ${B}/examples/rsa-sign ${B}/examples/rsa-verify ${D}${PTEST_PATH}/testsuite/
+ # Fix build-time relative paths
+ sed -i -e 's|../tools/|${bindir}/|g' ${D}${PTEST_PATH}/testsuite/*-test
+ sed -i -e 's|../examples/|./|g' ${D}${PTEST_PATH}/testsuite/*-test
+}
+
+RDEPENDS:${PN}-ptest += "${PN}-bin"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/nghttp2/nghttp2/0001-fetch-ocsp-response-use-python3.patch b/meta/recipes-support/nghttp2/nghttp2/0001-fetch-ocsp-response-use-python3.patch
deleted file mode 100644
index e4db09638f..0000000000
--- a/meta/recipes-support/nghttp2/nghttp2/0001-fetch-ocsp-response-use-python3.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 73ec79432fc557a8be4f1500982b1c0f5fdf12a9 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Andr=C3=A9=20Draszik?= <andre.draszik@jci.com>
-Date: Thu, 7 Nov 2019 09:58:52 +0000
-Subject: [PATCH] fetch-ocsp-response: use python3
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Upstream-Status: Inappropriate [oe specific]
-Signed-off-by: André Draszik <git@andred.net>
----
- script/fetch-ocsp-response | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/script/fetch-ocsp-response b/script/fetch-ocsp-response
-index 0ff7461..185116b 100755
---- a/script/fetch-ocsp-response
-+++ b/script/fetch-ocsp-response
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # -*- coding: utf-8 -*-
-
- # nghttp2 - HTTP/2 C Library
---
-2.23.0.rc1
-
diff --git a/meta/recipes-support/nghttp2/nghttp2_1.47.0.bb b/meta/recipes-support/nghttp2/nghttp2_1.47.0.bb
deleted file mode 100644
index 58ce08084d..0000000000
--- a/meta/recipes-support/nghttp2/nghttp2_1.47.0.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "HTTP/2 C Library and tools"
-HOMEPAGE = "https://nghttp2.org/"
-SECTION = "libs"
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=764abdf30b2eadd37ce47dcbce0ea1ec"
-
-UPSTREAM_CHECK_URI = "https://github.com/nghttp2/nghttp2/releases"
-
-SRC_URI = "\
- https://github.com/nghttp2/nghttp2/releases/download/v${PV}/nghttp2-${PV}.tar.xz \
- file://0001-fetch-ocsp-response-use-python3.patch \
-"
-SRC_URI[sha256sum] = "68271951324554c34501b85190f22f2221056db69f493afc3bbac8e7be21e7cc"
-
-inherit cmake manpages python3native
-PACKAGECONFIG[manpages] = ""
-
-# examples are never installed, and don't need to be built in the
-# first place
-EXTRA_OECMAKE = "-DENABLE_EXAMPLES=OFF -DENABLE_APP=OFF -DENABLE_HPACK_TOOLS=OFF"
-
-PACKAGES =+ "lib${BPN} ${PN}-client ${PN}-proxy ${PN}-server"
-
-RDEPENDS:${PN} = "${PN}-client (>= ${PV}) ${PN}-proxy (>= ${PV}) ${PN}-server (>= ${PV})"
-RDEPENDS:${PN}:class-native = ""
-RDEPENDS:${PN}-proxy = "openssl python3-core python3-io python3-shell"
-
-ALLOW_EMPTY:${PN} = "1"
-FILES:${PN} = ""
-FILES:lib${BPN} = "${libdir}/*${SOLIBS}"
-FILES:${PN}-client = "${bindir}/h2load ${bindir}/nghttp"
-FILES:${PN}-proxy = "${bindir}/nghttpx ${datadir}/${BPN}/fetch-ocsp-response"
-FILES:${PN}-server = "${bindir}/nghttpd"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/nghttp2/nghttp2_1.61.0.bb b/meta/recipes-support/nghttp2/nghttp2_1.61.0.bb
new file mode 100644
index 0000000000..ad85576dcb
--- /dev/null
+++ b/meta/recipes-support/nghttp2/nghttp2_1.61.0.bb
@@ -0,0 +1,28 @@
+SUMMARY = "HTTP/2 C Library and tools"
+HOMEPAGE = "https://nghttp2.org/"
+SECTION = "libs"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=764abdf30b2eadd37ce47dcbce0ea1ec"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/nghttp2-${PV}.tar.xz"
+SRC_URI[sha256sum] = "c0e660175b9dc429f11d25b9507a834fb752eea9135ab420bb7cb7e9dbcc9654"
+
+inherit cmake manpages python3native github-releases
+PACKAGECONFIG[manpages] = ""
+
+# examples are never installed, and don't need to be built in the
+# first place
+EXTRA_OECMAKE = "-DENABLE_EXAMPLES=OFF -DENABLE_APP=OFF -DENABLE_HPACK_TOOLS=OFF -DENABLE_PYTHON_BINDINGS=OFF"
+
+PACKAGES =+ "lib${BPN} ${PN}-proxy "
+
+RDEPENDS:${PN} = "${PN}-proxy (>= ${PV})"
+RDEPENDS:${PN}:class-native = ""
+RDEPENDS:${PN}-proxy = "openssl python3-core python3-io python3-shell"
+
+ALLOW_EMPTY:${PN} = "1"
+FILES:${PN} = ""
+FILES:lib${BPN} = "${libdir}/*${SOLIBS}"
+FILES:${PN}-proxy = "${bindir}/nghttpx ${datadir}/${BPN}/fetch-ocsp-response"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/npth/npth/0001-Revert-Fix-problem-with-regression-tests-on-recent-g.patch b/meta/recipes-support/npth/npth/0001-Revert-Fix-problem-with-regression-tests-on-recent-g.patch
deleted file mode 100644
index 47c426b4a5..0000000000
--- a/meta/recipes-support/npth/npth/0001-Revert-Fix-problem-with-regression-tests-on-recent-g.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-From e43524868bb4901703d63876f9d49f73ca75b3ab Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 12 May 2021 20:27:52 -0700
-Subject: [PATCH] Revert "Fix problem with regression tests on recent glibc."
-
-This reverts commit 3a9d32eb59194b989656548755066ccd9feb36ac.
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- configure.ac | 8 +++-----
- 1 file changed, 3 insertions(+), 5 deletions(-)
-
-diff --git a/configure.ac b/configure.ac
-index 8a9373c..8cda28d 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -284,11 +284,9 @@ AC_TYPE_SSIZE_T
- #
- # Checks for libraries and functions.
- #
--# We test for pthread_detach because glibc 2.22 includes
--# pthread_create but not pthread_detach.
- if test "$have_w32_system" = no; then
-- AC_SEARCH_LIBS([pthread_detach],[pthread])
-- case "x$ac_cv_search_pthread_detach" in
-+ AC_SEARCH_LIBS([pthread_create],[pthread])
-+ case "x$ac_cv_search_pthread_create" in
- xno)
- have_pthread=no
- ;;
-@@ -297,7 +295,7 @@ if test "$have_w32_system" = no; then
- ;;
- *)
- have_pthread=yes
-- config_libs="$config_libs $ac_cv_search_pthread_detach"
-+ config_libs="$config_libs $ac_cv_search_pthread_create"
- ;;
- esac
- if test "$have_pthread" != no; then
---
-2.31.1
-
diff --git a/meta/recipes-support/npth/npth/musl-fix.patch b/meta/recipes-support/npth/npth/musl-fix.patch
new file mode 100644
index 0000000000..fabe78b14c
--- /dev/null
+++ b/meta/recipes-support/npth/npth/musl-fix.patch
@@ -0,0 +1,37 @@
+From 417abd56fd7bf45cd4948414050615cb1ad59134 Mon Sep 17 00:00:00 2001
+From: NIIBE Yutaka <gniibe@fsij.org>
+Date: Fri, 1 Mar 2024 13:53:52 +0900
+Subject: [PATCH] Fix INSERT_EXPOSE_RWLOCK_API for musl C library.
+
+* configure.ac: Add a case for musl system.
+
+Upstream-Status: Backport [https://git.gnupg.org/cgi-bin/gitweb.cgi?p=npth.git;a=commit;h=417abd56fd7bf45cd4948414050615cb1ad59134]
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+--
+
+GnuPG-bug-id: 5664
+Signed-off-by: NIIBE Yutaka <gniibe@fsij.org>
+---
+ configure.ac | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index c1091b1..576a26e 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -381,7 +381,10 @@ fi
+ AC_SUBST(INSERT_NO_RWLOCK)
+
+ case "${host}" in
+- *-*-linux*|*-*-gnu*)
++ *-*-linux-musl*)
++ INSERT_EXPOSE_RWLOCK_API="1"
++ ;;
++ *-*-linux-gnu*|*-*-gnu*)
+ INSERT_EXPOSE_RWLOCK_API="defined(__USE_UNIX98) || defined(__USE_XOPEN2K)"
+ ;;
+ *)
+--
+2.30.2
+
+
diff --git a/meta/recipes-support/npth/npth/pkgconfig.patch b/meta/recipes-support/npth/npth/pkgconfig.patch
index b6a12e7309..e736921b43 100644
--- a/meta/recipes-support/npth/npth/pkgconfig.patch
+++ b/meta/recipes-support/npth/npth/pkgconfig.patch
@@ -1,13 +1,51 @@
-Added npth pkgconfig file
+From ff19a9648f1c7d93087e2c33ca64bb881d53ea5a Mon Sep 17 00:00:00 2001
+From: Saul Wold <sgw@linux.intel.com>
+Date: Mon, 10 Nov 2014 13:59:03 -0800
+Subject: [PATCH] Added npth pkgconfig file
Upstream-Status: Pending
Signed-off-by: Saul Wold <sgw@linux.intel.com>
+---
+ configure.ac | 1 +
+ src/Makefile.am | 4 +++-
+ src/npth.pc.in | 10 ++++++++++
+ 3 files changed, 14 insertions(+), 1 deletion(-)
+ create mode 100644 src/npth.pc.in
-Index: npth-1.1/src/npth.pc.in
-===================================================================
+diff --git a/configure.ac b/configure.ac
+index 10f3629..65b76a1 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -476,6 +476,7 @@ src/Makefile
+ w32/Makefile
+ tests/Makefile])
+ AC_CONFIG_FILES(npth-config, chmod +x npth-config)
++AC_CONFIG_FILES([src/npth.pc])
+ AC_OUTPUT
+
+ echo "
+diff --git a/src/Makefile.am b/src/Makefile.am
+index 7070118..6f01c64 100644
+--- a/src/Makefile.am
++++ b/src/Makefile.am
+@@ -17,8 +17,10 @@
+ # License along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ ## Process this file with automake to produce Makefile.in
++pkgconfigdir = $(libdir)/pkgconfig
++pkgconfig_DATA = npth.pc
+
+-EXTRA_DIST = libnpth.vers
++EXTRA_DIST = libnpth.vers npth.pc
+ # versioninfo.rc.in
+ nodist_include_HEADERS = npth.h
+
+diff --git a/src/npth.pc.in b/src/npth.pc.in
+new file mode 100644
+index 0000000..db091e8
--- /dev/null
-+++ npth-1.1/src/npth.pc.in
++++ b/src/npth.pc.in
@@ -0,0 +1,10 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
@@ -19,31 +57,3 @@ Index: npth-1.1/src/npth.pc.in
+Version: @VERSION@
+Libs: -L${libdir} -lnpth -lpthread
+Cflags: -I${includedir}
-Index: npth-1.1/src/Makefile.am
-===================================================================
---- npth-1.1.orig/src/Makefile.am
-+++ npth-1.1/src/Makefile.am
-@@ -27,8 +27,10 @@
- # License along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- ## Process this file with automake to produce Makefile.in
-+pkgconfigdir = $(libdir)/pkgconfig
-+pkgconfig_DATA = npth.pc
-
--EXTRA_DIST = libnpth.vers
-+EXTRA_DIST = libnpth.vers npth.pc
- # versioninfo.rc.in
- nodist_include_HEADERS = npth.h
-
-Index: npth-1.1/configure.ac
-===================================================================
---- npth-1.1.orig/configure.ac
-+++ npth-1.1/configure.ac
-@@ -337,6 +337,7 @@ src/Makefile
- w32/Makefile
- tests/Makefile])
- AC_CONFIG_FILES(npth-config, chmod +x npth-config)
-+AC_CONFIG_FILES([src/npth.pc])
- AC_OUTPUT
-
- echo "
diff --git a/meta/recipes-support/npth/npth_1.6.bb b/meta/recipes-support/npth/npth_1.6.bb
deleted file mode 100644
index ef863d39b0..0000000000
--- a/meta/recipes-support/npth/npth_1.6.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "New GNU Portable Threads library"
-DESCRIPTION = "nPth is a library to provide the GNU Pth API and thus a non-preemptive threads implementation. "
-HOMEPAGE = "https://www.gnu.org/software/pth/"
-SECTION = "libs"
-LICENSE = "LGPL-2.0-or-later"
-LIC_FILES_CHKSUM = "\
- file://COPYING.LIB;md5=2caced0b25dfefd4c601d92bd15116de\
- "
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/npth/npth-${PV}.tar.bz2 \
- file://pkgconfig.patch \
- file://0001-Revert-Fix-problem-with-regression-tests-on-recent-g.patch \
- "
-
-SRC_URI[md5sum] = "375d1a15ad969f32d25f1a7630929854"
-SRC_URI[sha256sum] = "1393abd9adcf0762d34798dc34fdcf4d0d22a8410721e76f1e3afcd1daa4e2d1"
-
-BINCONFIG = "${bindir}/npth-config"
-
-inherit autotools binconfig-disabled multilib_header
-
-FILES:${PN} = "${libdir}/libnpth.so.*"
-FILES:${PN}-dev += "${bindir}/npth-config"
-
-do_install:append() {
- oe_multilib_header npth.h
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/npth/npth_1.7.bb b/meta/recipes-support/npth/npth_1.7.bb
new file mode 100644
index 0000000000..f02a731f85
--- /dev/null
+++ b/meta/recipes-support/npth/npth_1.7.bb
@@ -0,0 +1,25 @@
+SUMMARY = "New GNU Portable Threads library"
+DESCRIPTION = "nPth is a library to provide the GNU Pth API and thus a non-preemptive threads implementation. "
+HOMEPAGE = "https://www.gnu.org/software/pth/"
+SECTION = "libs"
+LICENSE = "LGPL-2.0-or-later"
+LIC_FILES_CHKSUM = "\
+ file://COPYING.LIB;md5=2caced0b25dfefd4c601d92bd15116de\
+ "
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/npth/npth-${PV}.tar.bz2 \
+ file://pkgconfig.patch \
+ file://musl-fix.patch \
+ "
+
+SRC_URI[sha256sum] = "8589f56937b75ce33b28d312fccbf302b3b71ec3f3945fde6aaa74027914ad05"
+
+inherit autotools binconfig-disabled multilib_header
+
+FILES:${PN} = "${libdir}/libnpth.so.*"
+
+do_install:append() {
+ oe_multilib_header npth.h
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/numactl/numactl/0001-define-run-test-target.patch b/meta/recipes-support/numactl/numactl/0001-define-run-test-target.patch
index 9e65a45133..68b3e2491f 100644
--- a/meta/recipes-support/numactl/numactl/0001-define-run-test-target.patch
+++ b/meta/recipes-support/numactl/numactl/0001-define-run-test-target.patch
@@ -1,4 +1,7 @@
-rename test target as run-test
+From a5e1fa272f294e739d6caaf629968478796ca53a Mon Sep 17 00:00:00 2001
+From: Roy Li <rongqing.li@windriver.com>
+Date: Mon, 19 Oct 2020 14:42:11 -0700
+Subject: [PATCH] rename test target as run-test
Upstream-Status: Pending
@@ -8,15 +11,16 @@ not suitable for cross-compile environment, so rename it as run-test.
and define test target to compile the test files.
Signed-off-by: Roy Li <rongqing.li@windriver.com>
+
---
Makefile.am | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/Makefile.am b/Makefile.am
-index b6db339..de176c4 100644
+index da01d27..c738b93 100644
--- a/Makefile.am
+++ b/Makefile.am
-@@ -124,7 +124,9 @@ regress2: $(check_PROGRAMS)
+@@ -126,7 +126,9 @@ regress2: $(check_PROGRAMS)
test_numademo: numademo
./numademo -t -e 10M
@@ -27,6 +31,3 @@ index b6db339..de176c4 100644
TESTS_ENVIRONMENT = builddir='$(builddir)'; export builddir;
---
-1.9.1
-
diff --git a/meta/recipes-support/numactl/numactl/Fix-the-test-output-format.patch b/meta/recipes-support/numactl/numactl/Fix-the-test-output-format.patch
index 9812ecc8b3..8345f71d72 100644
--- a/meta/recipes-support/numactl/numactl/Fix-the-test-output-format.patch
+++ b/meta/recipes-support/numactl/numactl/Fix-the-test-output-format.patch
@@ -1,4 +1,4 @@
-From 59fd750a84bbe5874dec936d2bee9ef11a1b6505 Mon Sep 17 00:00:00 2001
+From 29f811d9d381f2ab54b3c8cef77334e32970ef09 Mon Sep 17 00:00:00 2001
From: Li xin <lixin.fnst@cn.fujitsu.com>
Date: Tue, 21 Jul 2015 02:01:22 +0900
Subject: [PATCH] Fix the test output format
@@ -7,24 +7,26 @@ Upstream-Status: Pending
Signed-off-by: Roy Li <rongqing.li@windriver.com>
Signed-off-by: Li Xin <lixin.fnst@cn.fujitsu.com>
+Signed-off-by: Xiangyu Chen <xiangyu.chen@windriver.com>
+
---
test/regress | 6 +++---
test/regress2 | 11 +++++------
2 files changed, 8 insertions(+), 9 deletions(-)
diff --git a/test/regress b/test/regress
-index 2ce1705..d086a47 100755
+index f06b22f..2fdfacb 100755
--- a/test/regress
+++ b/test/regress
-@@ -74,6 +74,7 @@ probe_hardware()
+@@ -78,6 +78,7 @@ probe_hardware()
if [ $numnodes -lt 2 ] ; then
echo "need at least two nodes with at least $NEEDPAGES each of"
echo "free memory for mempolicy regression tests"
-+ echo "FAIL: numa regress"
- exit 77 # Skip test
++ echo "SKIP: numa regress"
+ exit 77 # Skip test
fi
}
-@@ -207,10 +208,9 @@ main()
+@@ -209,10 +210,9 @@ main()
rm A B
if [ "$EXIT" = 0 ] ; then
@@ -59,6 +61,3 @@ index aa6ea41..450c510 100755
}
# still broken
---
-1.8.4.2
-
diff --git a/meta/recipes-support/numactl/numactl/run-ptest b/meta/recipes-support/numactl/numactl/run-ptest
index bf269da755..e019b0d364 100755
--- a/meta/recipes-support/numactl/numactl/run-ptest
+++ b/meta/recipes-support/numactl/numactl/run-ptest
@@ -8,7 +8,11 @@ if ! numactl -s | grep -q "No NUMA support available on this system."; then
if numademo -t -e 10M; then
echo "PASS: numademo"
else
- echo "FAIL: numademo"
+ if [ "$?" = 77 ] ; then
+ echo "SKIP: numademo"
+ else
+ echo "FAIL: numademo"
+ fi
fi
else
echo "SKIP: ./../test/bind_range"
diff --git a/meta/recipes-support/numactl/numactl_git.bb b/meta/recipes-support/numactl/numactl_git.bb
index 93547ea239..bd16df91ec 100644
--- a/meta/recipes-support/numactl/numactl_git.bb
+++ b/meta/recipes-support/numactl/numactl_git.bb
@@ -8,10 +8,10 @@ SECTION = "apps"
inherit autotools-brokensep ptest
-LIC_FILES_CHKSUM = "file://README.md;beginline=19;endline=32;md5=f8ff2391624f28e481299f3f677b21bb"
+LIC_FILES_CHKSUM = "file://README.md;beginline=19;endline=32;md5=9f34c3af4ed6f3f5df0da5f3c0835a43"
-SRCREV = "dd6de072c92c892a86e18c0fd0dfa1ba57a9a05d"
-PV = "2.0.14"
+SRCREV = "3871b1c42fc71bceadafd745d2eff5dddfc2d67e"
+PV = "2.0.18"
SRC_URI = "git://github.com/numactl/numactl;branch=master;protocol=https \
file://Fix-the-test-output-format.patch \
diff --git a/meta/recipes-support/p11-kit/files/fix-parallel-build-failures.patch b/meta/recipes-support/p11-kit/files/fix-parallel-build-failures.patch
new file mode 100644
index 0000000000..47df027106
--- /dev/null
+++ b/meta/recipes-support/p11-kit/files/fix-parallel-build-failures.patch
@@ -0,0 +1,33 @@
+It fails occasionally with missing generated header files:
+
+| ../git/common/asn1.c:42:10: fatal error: openssl.asn.h: No such file or directory
+| 42 | #include "openssl.asn.h"
+| | ^~~~~~~~~~~~~~~
+| compilation terminated.
+
+According to meson manual page:
+
+https://mesonbuild.com/Wrap-best-practices-and-tips.html#declare-generated-headers-explicitly
+
+'asn_h_dep' should be a dependency of static_library target 'libp11_asn1' to
+make sure that required header files generated before compile common/asn1.c.
+
+Upstream-Status: Submitted [https://github.com/p11-glue/p11-kit/pull/619]
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ common/meson.build | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/common/meson.build b/common/meson.build
+index dc86d7b..cc3ec48 100644
+--- a/common/meson.build
++++ b/common/meson.build
+@@ -113,6 +113,7 @@ if with_asn1
+ 'p11-asn1', libp11_asn1_sources,
+ gnu_symbol_visibility: 'hidden',
+ include_directories: configinc,
++ dependencies: asn_h_dep,
+ )
+
+ libp11_asn1_dep = declare_dependency(
diff --git a/meta/recipes-support/p11-kit/p11-kit_0.24.1.bb b/meta/recipes-support/p11-kit/p11-kit_0.24.1.bb
deleted file mode 100644
index 59cbb67961..0000000000
--- a/meta/recipes-support/p11-kit/p11-kit_0.24.1.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "Provides a way to load and enumerate PKCS#11 modules"
-DESCRIPTION = " Provides a standard configuration setup for installing PKCS#11 modules in such a way that they're discoverable. Also solves problems with coordinating the use of PKCS#11 by different components or libraries living in the same process."
-HOMEPAGE = "https://p11-glue.github.io/p11-glue/p11-kit.html"
-LICENSE = "BSD-3-Clause"
-LIC_FILES_CHKSUM = "file://COPYING;md5=02933887f609807fbb57aa4237d14a50"
-
-inherit meson gettext pkgconfig gtk-doc bash-completion manpages
-
-DEPENDS = "libtasn1 libtasn1-native libffi"
-
-DEPENDS:append = "${@' glib-2.0' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}"
-
-SRC_URI = "git://github.com/p11-glue/p11-kit;branch=master;protocol=https"
-SRCREV = "dd0590d4e583f107e3e9fafe9ed754149da335d0"
-S = "${WORKDIR}/git"
-
-PACKAGECONFIG ??= ""
-PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native"
-PACKAGECONFIG[trust-paths] = "-Dtrust_paths=/etc/ssl/certs/ca-certificates.crt,,,ca-certificates"
-
-GTKDOC_MESON_OPTION = 'gtk_doc'
-
-FILES:${PN} += " \
- ${libdir}/p11-kit-proxy.so \
- ${libdir}/pkcs11/*.so \
- ${libdir}/pkcs11/*.la \
- ${systemd_user_unitdir}/*"
-
-# PN contains p11-kit-proxy.so, a symlink to a loadable module
-INSANE_SKIP:${PN} = "dev-so"
-
-BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-support/p11-kit/p11-kit_0.25.3.bb b/meta/recipes-support/p11-kit/p11-kit_0.25.3.bb
new file mode 100644
index 0000000000..b7ebd44abc
--- /dev/null
+++ b/meta/recipes-support/p11-kit/p11-kit_0.25.3.bb
@@ -0,0 +1,34 @@
+SUMMARY = "Provides a way to load and enumerate PKCS#11 modules"
+DESCRIPTION = " Provides a standard configuration setup for installing PKCS#11 modules in such a way that they're discoverable. Also solves problems with coordinating the use of PKCS#11 by different components or libraries living in the same process."
+HOMEPAGE = "https://p11-glue.github.io/p11-glue/p11-kit.html"
+LICENSE = "BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://COPYING;md5=02933887f609807fbb57aa4237d14a50"
+
+inherit meson gettext pkgconfig gtk-doc bash-completion manpages
+
+DEPENDS = "libtasn1 libtasn1-native libffi"
+
+DEPENDS:append = "${@' glib-2.0' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}"
+
+SRC_URI = "gitsm://github.com/p11-glue/p11-kit;branch=master;protocol=https \
+ file://fix-parallel-build-failures.patch \
+ "
+SRCREV = "917e02a3211dabbdea4b079cb598581dce84fda1"
+S = "${WORKDIR}/git"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[manpages] = "-Dman=true,-Dman=false,libxslt-native"
+PACKAGECONFIG[trust-paths] = "-Dtrust_paths=/etc/ssl/certs/ca-certificates.crt,,,ca-certificates"
+
+GTKDOC_MESON_OPTION = 'gtk_doc'
+
+FILES:${PN} += " \
+ ${libdir}/p11-kit-proxy.so \
+ ${libdir}/pkcs11/*.so \
+ ${libdir}/pkcs11/*.la \
+ ${systemd_user_unitdir}/*"
+
+# PN contains p11-kit-proxy.so, a symlink to a loadable module
+INSANE_SKIP:${PN} = "dev-so"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/pinentry/pinentry-1.2.0/gpg-error_pkconf.patch b/meta/recipes-support/pinentry/pinentry-1.2.1/gpg-error_pkconf.patch
index 507c0c3917..bb7e43b1e7 100644
--- a/meta/recipes-support/pinentry/pinentry-1.2.0/gpg-error_pkconf.patch
+++ b/meta/recipes-support/pinentry/pinentry-1.2.1/gpg-error_pkconf.patch
@@ -1,4 +1,4 @@
-From 54a4c9d3e5f1897ed4b978d5cdee646ca7a4f637 Mon Sep 17 00:00:00 2001
+From 785777dc0bc6b69ff68c91547ec6b6634049662f Mon Sep 17 00:00:00 2001
From: Armin Kuster <akuster@mvista.com>
Date: Fri, 2 Sep 2005 11:50:01 +0000
Subject: [PATCH] Add gtk+, avahi, dbus-0.34 (.36 coming soon) and
@@ -11,14 +11,14 @@ Upstream-Status: Inappropriate [OE specific]
Signed-off-by: Armin Kuster <akuster@mvista.com>
---
- m4/gpg-error.m4 | 159 ++----------------------------------------------
- 1 file changed, 4 insertions(+), 155 deletions(-)
+ m4/gpg-error.m4 | 160 ++----------------------------------------------
+ 1 file changed, 4 insertions(+), 156 deletions(-)
diff --git a/m4/gpg-error.m4 b/m4/gpg-error.m4
-index 56a5d07..c0bec1f 100644
+index 4b5cd40..7dfbb83 100644
--- a/m4/gpg-error.m4
+++ b/m4/gpg-error.m4
-@@ -26,159 +26,12 @@ dnl config script does not match the host specification the script
+@@ -26,160 +26,12 @@ dnl config script does not match the host specification the script
dnl is added to the gpg_config_script_warn variable.
dnl
AC_DEFUN([AM_PATH_GPG_ERROR],
@@ -135,6 +135,7 @@ index 56a5d07..c0bec1f 100644
- fi
- elif test "$GPG_ERROR_CONFIG" != "no"; then
- gpg_error_config_version=`$GPG_ERROR_CONFIG --version`
+- unset GPGRT_CONFIG
- fi
- if test "$GPG_ERROR_CONFIG" != "no"; then
- req_major=`echo $min_gpg_error_version | \
@@ -182,7 +183,7 @@ index 56a5d07..c0bec1f 100644
if test x"$gpg_error_config_host" != xnone ; then
if test x"$gpg_error_config_host" != x"$host" ; then
AC_MSG_WARN([[
-@@ -193,10 +46,6 @@ AC_DEFUN([AM_PATH_GPG_ERROR],
+@@ -194,10 +46,6 @@ AC_DEFUN([AM_PATH_GPG_ERROR],
fi
fi
else
diff --git a/meta/recipes-support/pinentry/pinentry-1.2.0/libassuan_pkgconf.patch b/meta/recipes-support/pinentry/pinentry-1.2.1/libassuan_pkgconf.patch
index f4aec2d1c3..f4aec2d1c3 100644
--- a/meta/recipes-support/pinentry/pinentry-1.2.0/libassuan_pkgconf.patch
+++ b/meta/recipes-support/pinentry/pinentry-1.2.1/libassuan_pkgconf.patch
diff --git a/meta/recipes-support/pinentry/pinentry_1.2.0.bb b/meta/recipes-support/pinentry/pinentry_1.2.0.bb
deleted file mode 100644
index 169cac8965..0000000000
--- a/meta/recipes-support/pinentry/pinentry_1.2.0.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Collection of simple PIN or passphrase entry dialogs"
-DESCRIPTION = "\
- Pinentry is a collection of simple PIN or passphrase entry dialogs which \
- utilize the Assuan protocol as described by the aegypten project; see \
- http://www.gnupg.org/aegypten/ for details."
-
-HOMEPAGE = "http://www.gnupg.org/related_software/pinentry/index.en.html"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=cbbd794e2a0a289b9dfcc9f513d1996e"
-
-DEPENDS = "gettext-native libassuan libgpg-error"
-
-UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
-SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://libassuan_pkgconf.patch \
- file://gpg-error_pkconf.patch \
-"
-
-SRC_URI[sha256sum] = "10072045a3e043d0581f91cd5676fcac7ffee957a16636adedaa4f583a616470"
-
-inherit autotools pkgconfig
-
-PACKAGECONFIG ??= "ncurses libcap"
-
-PACKAGECONFIG[ncurses] = "--enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses"
-PACKAGECONFIG[libcap] = "--with-libcap, --without-libcap, libcap"
-PACKAGECONFIG[qt] = "--enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase"
-PACKAGECONFIG[gtk2] = "--enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0"
-
-PACKAGECONFIG[secret] = "--enable-libsecret, --disable-libsecret, libsecret"
-
-EXTRA_OECONF = " \
- --disable-rpath \
-"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/pinentry/pinentry_1.2.1.bb b/meta/recipes-support/pinentry/pinentry_1.2.1.bb
new file mode 100644
index 0000000000..7daf80f36e
--- /dev/null
+++ b/meta/recipes-support/pinentry/pinentry_1.2.1.bb
@@ -0,0 +1,38 @@
+SUMMARY = "Collection of simple PIN or passphrase entry dialogs"
+DESCRIPTION = "\
+ Pinentry is a collection of simple PIN or passphrase entry dialogs which \
+ utilize the Assuan protocol as described by the aegypten project; see \
+ http://www.gnupg.org/aegypten/ for details."
+
+HOMEPAGE = "http://www.gnupg.org/related_software/pinentry/index.en.html"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=cbbd794e2a0a289b9dfcc9f513d1996e"
+
+DEPENDS = "gettext-native libassuan libgpg-error"
+
+UPSTREAM_CHECK_URI = "https://gnupg.org/download/index.html"
+SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://libassuan_pkgconf.patch \
+ file://gpg-error_pkconf.patch \
+"
+
+SRC_URI[sha256sum] = "457a185e5a85238fb945a955dc6352ab962dc8b48720b62fc9fa48c7540a4067"
+
+inherit autotools pkgconfig
+
+PACKAGECONFIG ??= "ncurses"
+
+PACKAGECONFIG[ncurses] = "--enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses"
+PACKAGECONFIG[qt] = "--enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase"
+PACKAGECONFIG[gtk2] = "--enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0"
+
+PACKAGECONFIG[secret] = "--enable-libsecret, --disable-libsecret, libsecret"
+
+EXTRA_OECONF = " \
+ --disable-rpath \
+"
+EXTRA_OECONF:append:libc-musl = " \
+ ac_cv_should_define__xopen_source=yes \
+"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/popt/popt/0001-popt-test-output-format-for-ptest.patch b/meta/recipes-support/popt/popt/0001-popt-test-output-format-for-ptest.patch
deleted file mode 100644
index 020949cc34..0000000000
--- a/meta/recipes-support/popt/popt/0001-popt-test-output-format-for-ptest.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-From 330b77081c3a4c7ae76cb08602db385b40fa7ff7 Mon Sep 17 00:00:00 2001
-From: Simone Weiss <simone.weiss@elektrobit.com>
-Date: Mon, 13 Dec 2021 09:29:53 +0000
-Subject: [PATCH] popt test output format for ptest
-
-patch test output format to match simple-test as it is used in ptests.
-
-Upstream-Status: Pending
-
-Signed-off-by: Simone Weiss <simone.weiss@elektrobit.com>
-Signed-off-by: David Niederpruem <david.niederpruem@elektrobit.com>
----
- tests/testit.sh | 20 ++++++++------------
- 1 file changed, 8 insertions(+), 12 deletions(-)
-
-diff --git a/tests/testit.sh b/tests/testit.sh
-index 2a7b4aa..50b42e2 100755
---- a/tests/testit.sh
-+++ b/tests/testit.sh
-@@ -5,12 +5,11 @@ run() {
- name=$1; shift
- answer=$1; shift
-
-- echo Running test $name.
--
- result=`HOME=$builddir $builddir/$prog $*`
- if [ "$answer" != "$result" ]; then
-- echo "Test \"$prog $*\" failed with: \"$result\" != \"$answer\" "
-- exit 2
-+ echo "FAIL: $name"
-+ else
-+ echo "PASS: $name"
- fi
- }
-
-@@ -23,17 +22,17 @@ run_diff() {
- out=$builddir/tmp.out
- diff_file=$builddir/tmp.diff
-
-- echo Running test $name.
--
- $builddir/$prog $in_file > $out
- ret=$?
- diff $out $answer_file > $diff_file
- diff_ret=$?
-
- if [ "$diff_ret" != "0" ]; then
-- echo "Test \"$name\" failed output is in $out, diff is:"
-- cat $diff_file
-- exit 2
-+ echo "FAIL: $name"
-+ echo "diff is:"
-+ cat $diff_file
-+ else
-+ echo "PASS: $name"
- fi
- rm $out $diff_file
- }
-@@ -174,6 +173,3 @@ Help options:
- #run_diff test3 "test3 - 51" test3-data/01.input test3-data/01.answer
- #run_diff test3 "test3 - 52" test3-data/02.input test3-data/02.answer
- #run_diff test3 "test3 - 53" test3-data/03.input test3-data/03.answer
--
--echo ""
--echo "Passed."
---
-2.17.1
-
diff --git a/meta/recipes-support/popt/popt_1.18.bb b/meta/recipes-support/popt/popt_1.18.bb
deleted file mode 100644
index af8add4ad6..0000000000
--- a/meta/recipes-support/popt/popt_1.18.bb
+++ /dev/null
@@ -1,33 +0,0 @@
-SUMMARY = "Library for parsing command line options"
-DESCRIPTION = "Popt is a C library for parsing command line parameters. Popt was heavily influenced by the getopt() and getopt_long() functions, but it improves on them by allowing more powerful argument expansion. Popt can parse arbitrary argv[] style arrays and automatically set variables based on command line arguments."
-HOMEPAGE = "https://www.rpm.org/"
-SECTION = "libs"
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=cb0613c30af2a8249b8dcc67d3edb06d"
-
-DEPENDS = "virtual/libiconv"
-
-SRC_URI = "\
- http://ftp.rpm.org/popt/releases/popt-1.x/${BP}.tar.gz \
- file://0001-popt-test-output-format-for-ptest.patch \
- file://run-ptest \
-"
-SRC_URI[sha256sum] = "5159bc03a20b28ce363aa96765f37df99ea4d8850b1ece17d1e6ad5c24fdc5d1"
-
-inherit autotools gettext ptest
-
-RDEPENDS_${PN}-ptest += "bash"
-
-do_compile_ptest() {
- sed 's#lt-test1#test1#g' ${S}/tests/testit.sh > ${B}/tests/testit.sh
-}
-
-do_install_ptest() {
- install ${B}/tests/.libs/test* ${D}/${PTEST_PATH}
- install ${B}/tests/.libs/tdict ${D}/${PTEST_PATH}
- install ${B}/tests/testit.sh ${D}/${PTEST_PATH}
- install ${B}/tests/test-poptrc ${D}/${PTEST_PATH}
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/popt/popt_1.19.bb b/meta/recipes-support/popt/popt_1.19.bb
new file mode 100644
index 0000000000..b1d8bea790
--- /dev/null
+++ b/meta/recipes-support/popt/popt_1.19.bb
@@ -0,0 +1,31 @@
+SUMMARY = "Library for parsing command line options"
+DESCRIPTION = "Popt is a C library for parsing command line parameters. Popt was heavily influenced by the getopt() and getopt_long() functions, but it improves on them by allowing more powerful argument expansion. Popt can parse arbitrary argv[] style arrays and automatically set variables based on command line arguments."
+HOMEPAGE = "https://www.rpm.org/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=e0206ac9471d06667e076212db20c5f4"
+
+DEPENDS = "virtual/libiconv"
+
+SRC_URI = "http://ftp.rpm.org/popt/releases/popt-1.x/${BP}.tar.gz \
+ file://run-ptest \
+ "
+SRC_URI[sha256sum] = "c25a4838fc8e4c1c8aacb8bd620edb3084a3d63bf8987fdad3ca2758c63240f9"
+
+inherit autotools gettext ptest
+
+RDEPENDS:${PN}-ptest += "bash"
+
+do_compile_ptest() {
+ sed 's#lt-test1#test1#g' ${S}/tests/testit.sh > ${B}/tests/testit.sh
+}
+
+do_install_ptest() {
+ install ${B}/tests/.libs/test* ${D}/${PTEST_PATH}
+ install ${B}/tests/.libs/tdict ${D}/${PTEST_PATH}
+ install ${B}/tests/testit.sh ${D}/${PTEST_PATH}
+ install ${B}/tests/test-poptrc ${D}/${PTEST_PATH}
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/ptest-runner/ptest-runner_2.4.2.bb b/meta/recipes-support/ptest-runner/ptest-runner_2.4.2.bb
deleted file mode 100644
index 5a1d329993..0000000000
--- a/meta/recipes-support/ptest-runner/ptest-runner_2.4.2.bb
+++ /dev/null
@@ -1,34 +0,0 @@
-SUMMARY = "A C program to run all installed ptests"
-DESCRIPTION = "The ptest-runner2 package installs a ptest-runner \
-program which loops through all installed ptest test suites and \
-runs them in sequence."
-HOMEPAGE = "http://git.yoctoproject.org/cgit/cgit.cgi/ptest-runner2/about/"
-
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=751419260aa954499f7abaabaa882bbe"
-
-SRCREV = "bcb82804daa8f725b6add259dcef2067e61a75aa"
-PV .= "+git${SRCPV}"
-
-SRC_URI = "git://git.yoctoproject.org/ptest-runner2;branch=master \
-"
-
-S = "${WORKDIR}/git"
-
-FILES:${PN} = "${bindir}/ptest-runner ${bindir}/ptest-runner-collect-system-data"
-
-EXTRA_OEMAKE = "-e MAKEFLAGS= CFLAGS="${CFLAGS} -DDEFAULT_DIRECTORY=\\\"${libdir}\\\"""
-
-do_compile () {
- oe_runmake
-}
-
-do_install () {
- install -D -m 0755 ${S}/ptest-runner ${D}${bindir}/ptest-runner
- install -D -m 0755 ${S}/ptest-runner-collect-system-data ${D}${bindir}/ptest-runner-collect-system-data
-}
-
-RDEPENDS:${PN}:append:libc-glibc = " libgcc"
-
-# pstree is called by ptest-runner-collect-system-data
-RDEPENDS:${PN}:append = " pstree"
diff --git a/meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb b/meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb
new file mode 100644
index 0000000000..e6668da01f
--- /dev/null
+++ b/meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb
@@ -0,0 +1,39 @@
+SUMMARY = "A C program to run all installed ptests"
+DESCRIPTION = "The ptest-runner2 package installs a ptest-runner \
+program which loops through all installed ptest test suites and \
+runs them in sequence."
+HOMEPAGE = "http://git.yoctoproject.org/cgit/cgit.cgi/ptest-runner2/about/"
+
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=751419260aa954499f7abaabaa882bbe"
+
+SRCREV = "92c1b97bfdb4a94acc1cabcaf97eef52dc29144c"
+PV .= "+git"
+
+SRC_URI = "git://git.yoctoproject.org/ptest-runner2;branch=master;protocol=https \
+"
+
+S = "${WORKDIR}/git"
+
+FILES:${PN} = "${bindir}/ptest-runner ${bindir}/ptest-runner-collect-system-data"
+
+EXTRA_OEMAKE = "-e MAKEFLAGS= CFLAGS="${CFLAGS} -DDEFAULT_DIRECTORY=\\\"${libdir}\\\"""
+
+do_compile () {
+ oe_runmake
+}
+
+do_install () {
+ install -D -m 0755 ${S}/ptest-runner ${D}${bindir}/ptest-runner
+ install -D -m 0755 ${S}/ptest-runner-collect-system-data ${D}${bindir}/ptest-runner-collect-system-data
+}
+
+RDEPENDS:${PN}:append:libc-glibc = " libgcc"
+
+# pstree is called by ptest-runner-collect-system-data
+RDEPENDS:${PN}:append = " pstree"
+
+# Create a non-root user that test suites can use easily
+inherit useradd
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM:${PN} = "--system --no-create-home --home / --user-group ptest"
diff --git a/meta/recipes-support/re2c/re2c_3.0.bb b/meta/recipes-support/re2c/re2c_3.0.bb
deleted file mode 100644
index ab047601ae..0000000000
--- a/meta/recipes-support/re2c/re2c_3.0.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "Tool for writing very fast and very flexible scanners"
-DESCRIPTION = "A free and open-source lexer generator for C, C++ and Go. It compiles regular expressions to determinisitic finite automata and encodes the automata in the form of a program in the target language. Unlike any other such tool, re2c focuses on generating high efficient code for regular expression matching. As a result this allows a much broader range of use than any traditional lexer."
-HOMEPAGE = "http://re2c.org/"
-BUGTRACKER = "https://github.com/skvadrik/re2c/issues"
-AUTHOR = "Marcus Börger <helly@users.sourceforge.net>"
-SECTION = "devel"
-LICENSE = "PD"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=64eca4d8a3b67f9dc7656094731a2c8d"
-
-SRC_URI = "https://github.com/skvadrik/re2c/releases/download/${PV}/${BPN}-${PV}.tar.xz"
-SRC_URI[sha256sum] = "b3babbbb1461e13fe22c630a40c43885efcfbbbb585830c6f4c0d791cf82ba0b"
-UPSTREAM_CHECK_URI = "https://github.com/skvadrik/re2c/releases"
-
-BBCLASSEXTEND = "native nativesdk"
-
-inherit autotools
diff --git a/meta/recipes-support/re2c/re2c_3.1.bb b/meta/recipes-support/re2c/re2c_3.1.bb
new file mode 100644
index 0000000000..a4b20422f6
--- /dev/null
+++ b/meta/recipes-support/re2c/re2c_3.1.bb
@@ -0,0 +1,15 @@
+SUMMARY = "Tool for writing very fast and very flexible scanners"
+DESCRIPTION = "A free and open-source lexer generator for C, C++ and Go. It compiles regular expressions to determinisitic finite automata and encodes the automata in the form of a program in the target language. Unlike any other such tool, re2c focuses on generating high efficient code for regular expression matching. As a result this allows a much broader range of use than any traditional lexer."
+HOMEPAGE = "http://re2c.org/"
+BUGTRACKER = "https://github.com/skvadrik/re2c/issues"
+SECTION = "devel"
+LICENSE = "PD"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=64eca4d8a3b67f9dc7656094731a2c8d"
+
+SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz"
+SRC_URI[sha256sum] = "0ac299ad359e3f512b06a99397d025cfff81d3be34464ded0656f8a96676c029"
+GITHUB_BASE_URI = "https://github.com/skvadrik/re2c/releases"
+
+BBCLASSEXTEND = "native nativesdk"
+
+inherit autotools github-releases
diff --git a/meta/recipes-support/rng-tools/rng-tools/rng-tools.service b/meta/recipes-support/rng-tools/rng-tools/rng-tools.service
new file mode 100644
index 0000000000..5ae2fba215
--- /dev/null
+++ b/meta/recipes-support/rng-tools/rng-tools/rng-tools.service
@@ -0,0 +1,32 @@
+[Unit]
+Description=Hardware RNG Entropy Gatherer Daemon
+DefaultDependencies=no
+Conflicts=shutdown.target
+Before=sysinit.target shutdown.target
+ConditionVirtualization=!container
+
+[Service]
+EnvironmentFile=-@SYSCONFDIR@/default/rng-tools
+ExecStart=@SBINDIR@/rngd -f $EXTRA_ARGS
+CapabilityBoundingSet=CAP_SYS_ADMIN
+IPAddressDeny=any
+LockPersonality=yes
+MemoryDenyWriteExecute=yes
+NoNewPrivileges=yes
+PrivateTmp=yes
+ProtectControlGroups=yes
+ProtectHome=yes
+ProtectHostname=yes
+ProtectKernelModules=yes
+ProtectKernelLogs=yes
+ProtectSystem=strict
+RestrictAddressFamilies=AF_UNIX
+RestrictNamespaces=yes
+RestrictRealtime=yes
+RestrictSUIDSGID=yes
+SystemCallArchitectures=native
+SystemCallErrorNumber=EPERM
+SystemCallFilter=@system-service
+
+[Install]
+WantedBy=sysinit.target
diff --git a/meta/recipes-support/rng-tools/rng-tools/rngd.service b/meta/recipes-support/rng-tools/rng-tools/rngd.service
deleted file mode 100644
index 568686e80e..0000000000
--- a/meta/recipes-support/rng-tools/rng-tools/rngd.service
+++ /dev/null
@@ -1,33 +0,0 @@
-[Unit]
-Description=Hardware RNG Entropy Gatherer Daemon
-DefaultDependencies=no
-After=systemd-udev-settle.service
-Before=sysinit.target shutdown.target
-Wants=systemd-udev-settle.service
-Conflicts=shutdown.target
-
-[Service]
-EnvironmentFile=-@SYSCONFDIR@/default/rng-tools
-ExecStart=@SBINDIR@/rngd -f $EXTRA_ARGS
-CapabilityBoundingSet=CAP_SYS_ADMIN
-IPAddressDeny=any
-LockPersonality=yes
-MemoryDenyWriteExecute=yes
-NoNewPrivileges=yes
-PrivateTmp=yes
-ProtectControlGroups=yes
-ProtectHome=yes
-ProtectHostname=yes
-ProtectKernelModules=yes
-ProtectKernelLogs=yes
-ProtectSystem=strict
-RestrictAddressFamilies=AF_UNIX
-RestrictNamespaces=yes
-RestrictRealtime=yes
-RestrictSUIDSGID=yes
-SystemCallArchitectures=native
-SystemCallErrorNumber=EPERM
-SystemCallFilter=@system-service
-
-[Install]
-WantedBy=sysinit.target
diff --git a/meta/recipes-support/rng-tools/rng-tools_6.15.bb b/meta/recipes-support/rng-tools/rng-tools_6.15.bb
deleted file mode 100644
index 0696351903..0000000000
--- a/meta/recipes-support/rng-tools/rng-tools_6.15.bb
+++ /dev/null
@@ -1,61 +0,0 @@
-SUMMARY = "Random number generator daemon"
-DESCRIPTION = "Check and feed random data from hardware device to kernel"
-AUTHOR = "Philipp Rumpf, Jeff Garzik <jgarzik@pobox.com>, \
- Henrique de Moraes Holschuh <hmh@debian.org>"
-HOMEPAGE = "https://github.com/nhorman/rng-tools"
-BUGTRACKER = "https://github.com/nhorman/rng-tools/issues"
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-DEPENDS = "sysfsutils openssl"
-
-SRC_URI = "git://github.com/nhorman/rng-tools.git;branch=master;protocol=https \
- file://init \
- file://default \
- file://rngd.service \
- "
-SRCREV = "381f69828b782afda574f259c1b7549f48f9bb77"
-
-S = "${WORKDIR}/git"
-
-inherit autotools update-rc.d systemd pkgconfig
-
-EXTRA_OECONF = "--without-rtlsdr"
-
-PACKAGECONFIG ??= "libjitterentropy"
-PACKAGECONFIG:libc-musl = "libargp libjitterentropy"
-
-PACKAGECONFIG[libargp] = "--with-libargp,--without-libargp,argp-standalone,"
-PACKAGECONFIG[libjitterentropy] = "--enable-jitterentropy,--disable-jitterentropy,libjitterentropy"
-PACKAGECONFIG[libp11] = "--with-pkcs11,--without-pkcs11,libp11 openssl"
-PACKAGECONFIG[nistbeacon] = "--with-nistbeacon,--without-nistbeacon,curl libxml2 openssl"
-
-INITSCRIPT_NAME = "rng-tools"
-INITSCRIPT_PARAMS = "start 03 2 3 4 5 . stop 30 0 6 1 ."
-
-SYSTEMD_SERVICE:${PN} = "rngd.service"
-
-CFLAGS += " -DJENT_CONF_ENABLE_INTERNAL_TIMER "
-
-# Refer autogen.sh in rng-tools
-do_configure:prepend() {
- cp ${S}/README.md ${S}/README
-}
-
-do_install:append() {
- install -Dm 0644 ${WORKDIR}/default ${D}${sysconfdir}/default/rng-tools
- install -Dm 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/rng-tools
- install -Dm 0644 ${WORKDIR}/rngd.service \
- ${D}${systemd_system_unitdir}/rngd.service
- sed -i \
- -e 's,@SYSCONFDIR@,${sysconfdir},g' \
- -e 's,@SBINDIR@,${sbindir},g' \
- ${D}${sysconfdir}/init.d/rng-tools \
- ${D}${systemd_system_unitdir}/rngd.service
-
- if [ "${@bb.utils.contains('PACKAGECONFIG', 'nistbeacon', 'yes', 'no', d)}" = "yes" ]; then
- sed -i \
- -e '/^IPAddressDeny=any/d' \
- -e '/^RestrictAddressFamilies=/ s/$/ AF_INET AF_INET6/' \
- ${D}${systemd_system_unitdir}/rngd.service
- fi
-}
diff --git a/meta/recipes-support/rng-tools/rng-tools_6.16.bb b/meta/recipes-support/rng-tools/rng-tools_6.16.bb
new file mode 100644
index 0000000000..f0aa3ff93f
--- /dev/null
+++ b/meta/recipes-support/rng-tools/rng-tools_6.16.bb
@@ -0,0 +1,69 @@
+SUMMARY = "Random number generator daemon"
+DESCRIPTION = "Check and feed random data from hardware device to kernel"
+HOMEPAGE = "https://github.com/nhorman/rng-tools"
+BUGTRACKER = "https://github.com/nhorman/rng-tools/issues"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+DEPENDS = "openssl libcap"
+
+SRC_URI = "git://github.com/nhorman/rng-tools.git;branch=master;protocol=https \
+ file://init \
+ file://default \
+ file://rng-tools.service \
+ "
+SRCREV = "e061c313b95890eb5fa0ada0cd6eec619dafdfe2"
+
+S = "${WORKDIR}/git"
+
+inherit autotools update-rc.d systemd pkgconfig
+
+EXTRA_OECONF = "--without-rtlsdr"
+
+PACKAGECONFIG ??= "libjitterentropy"
+PACKAGECONFIG:libc-musl = "libargp libjitterentropy"
+
+PACKAGECONFIG[libargp] = "--with-libargp,--without-libargp,argp-standalone,"
+PACKAGECONFIG[libjitterentropy] = "--enable-jitterentropy,--disable-jitterentropy,libjitterentropy"
+PACKAGECONFIG[libp11] = "--with-pkcs11,--without-pkcs11,libp11 openssl"
+PACKAGECONFIG[nistbeacon] = "--with-nistbeacon,--without-nistbeacon,curl libxml2"
+PACKAGECONFIG[qrypt] = "--with-qrypt,--without-qrypt,curl"
+
+INITSCRIPT_PACKAGES = "${PN}-service"
+INITSCRIPT_NAME:${PN}-service = "rng-tools"
+INITSCRIPT_PARAMS:${PN}-service = "start 03 2 3 4 5 . stop 30 0 6 1 ."
+
+SYSTEMD_PACKAGES = "${PN}-service"
+SYSTEMD_SERVICE:${PN}-service = "rng-tools.service"
+
+CFLAGS += " -DJENT_CONF_ENABLE_INTERNAL_TIMER "
+
+PACKAGES =+ "${PN}-service"
+
+FILES:${PN}-service += " \
+ ${sysconfdir}/init.d/rng-tools \
+ ${sysconfdir}/default/rng-tools \
+"
+
+# Refer autogen.sh in rng-tools
+do_configure:prepend() {
+ cp ${S}/README.md ${S}/README
+}
+
+do_install:append() {
+ install -Dm 0644 ${WORKDIR}/default ${D}${sysconfdir}/default/rng-tools
+ install -Dm 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/rng-tools
+ install -Dm 0644 ${WORKDIR}/rng-tools.service \
+ ${D}${systemd_system_unitdir}/rng-tools.service
+ sed -i \
+ -e 's,@SYSCONFDIR@,${sysconfdir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${sysconfdir}/init.d/rng-tools \
+ ${D}${systemd_system_unitdir}/rng-tools.service
+
+ if [ "${@bb.utils.contains('PACKAGECONFIG', 'nistbeacon', 'yes', 'no', d)}" = "yes" ]; then
+ sed -i \
+ -e '/^IPAddressDeny=any/d' \
+ -e '/^RestrictAddressFamilies=/ s/$/ AF_INET AF_INET6/' \
+ ${D}${systemd_system_unitdir}/rng-tools.service
+ fi
+}
diff --git a/meta/recipes-support/serf/serf/0001-Fix-syntax-of-a-print-in-the-scons-file-to-unbreak-b.patch b/meta/recipes-support/serf/serf/0001-Fix-syntax-of-a-print-in-the-scons-file-to-unbreak-b.patch
deleted file mode 100644
index 4a5832ac1a..0000000000
--- a/meta/recipes-support/serf/serf/0001-Fix-syntax-of-a-print-in-the-scons-file-to-unbreak-b.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From 99f6e1b0d68281b63218d6adfe68cd9e331ac5be Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 3 Sep 2018 10:50:08 -0700
-Subject: [PATCH] Fix syntax of a print() in the scons file to unbreak building
- with most recent scons version.
-
-* SConstruct Use Python 3.0 valid syntax to make Scons 3.0.0 happy on both python
- 3.0 and 2.7.
-
-Upstream-Status: Backport
-[https://svn.apache.org/viewvc/serf/trunk/SConstruct?r1=1809132&r2=1811083&diff_format=h]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- SConstruct | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/SConstruct b/SConstruct
-index 1670459..18a45fa 100644
---- a/SConstruct
-+++ b/SConstruct
-@@ -184,7 +184,7 @@ CALLOUT_OKAY = not (env.GetOption('clean') or env.GetOption('help'))
-
- unknown = opts.UnknownVariables()
- if unknown:
-- print 'Warning: Used unknown variables:', ', '.join(unknown.keys())
-+ print('Warning: Used unknown variables:', ', '.join(unknown.keys()))
-
- apr = str(env['APR'])
- apu = str(env['APU'])
diff --git a/meta/recipes-support/serf/serf/0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch b/meta/recipes-support/serf/serf/0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch
deleted file mode 100644
index 91ccc8a474..0000000000
--- a/meta/recipes-support/serf/serf/0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 2f45711a66ff99886b6e4a5708e2db01a63e5af4 Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Fri, 10 Sep 2021 11:05:10 +0200
-Subject: [PATCH] buckets/ssl_buckets.c: do not use ERR_GET_FUNC
-
-Upstream removed it in
-https://github.com/openssl/openssl/pull/16004
-
-Upstream-Status: Inactive-Upstream [lastrelease: 2015, lastcommit: 2019]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- buckets/ssl_buckets.c | 3 +--
- 1 file changed, 1 insertion(+), 2 deletions(-)
-
-diff --git a/buckets/ssl_buckets.c b/buckets/ssl_buckets.c
-index b01e535..9801f87 100644
---- a/buckets/ssl_buckets.c
-+++ b/buckets/ssl_buckets.c
-@@ -1325,8 +1325,7 @@ static int ssl_need_client_cert(SSL *ssl, X509 **cert, EVP_PKEY **pkey)
- return 0;
- }
- else {
-- printf("OpenSSL cert error: %d %d %d\n", ERR_GET_LIB(err),
-- ERR_GET_FUNC(err),
-+ printf("OpenSSL cert error: %d %d\n", ERR_GET_LIB(err),
- ERR_GET_REASON(err));
- PKCS12_free(p12);
- bio_meth_free(biom);
diff --git a/meta/recipes-support/serf/serf/0004-Follow-up-to-r1811083-fix-building-with-scons-3.0.0-.patch b/meta/recipes-support/serf/serf/0004-Follow-up-to-r1811083-fix-building-with-scons-3.0.0-.patch
deleted file mode 100644
index 02fa9e3a06..0000000000
--- a/meta/recipes-support/serf/serf/0004-Follow-up-to-r1811083-fix-building-with-scons-3.0.0-.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-From 565211fd082ef653ca9c44a345350fc1451f5a0f Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 3 Sep 2018 11:12:38 -0700
-Subject: [PATCH] Follow-up to r1811083 fix building with scons 3.0.0 and
- Python3
-
-* SConstruct: Append decode('utf-8) to FILE.get_contents() to avoid
- TypeError: cannot use a string pattern on a bytes-like object
-
-Upstream-Status: Backport
-[https://svn.apache.org/viewvc/serf/trunk/SConstruct?r1=1811088&r2=1814604]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- SConstruct | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/SConstruct b/SConstruct
-index 877731e..7678bb1 100644
---- a/SConstruct
-+++ b/SConstruct
-@@ -169,7 +169,7 @@ env.Append(BUILDERS = {
- match = re.search('SERF_MAJOR_VERSION ([0-9]+).*'
- 'SERF_MINOR_VERSION ([0-9]+).*'
- 'SERF_PATCH_VERSION ([0-9]+)',
-- env.File('serf.h').get_contents(),
-+ env.File('serf.h').get_contents().decode('utf-8'),
- re.DOTALL)
- MAJOR, MINOR, PATCH = [int(x) for x in match.groups()]
- env.Append(MAJOR=str(MAJOR))
diff --git a/meta/recipes-support/serf/serf/SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch b/meta/recipes-support/serf/serf/SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch
index 4105868a7e..91640d6044 100644
--- a/meta/recipes-support/serf/serf/SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch
+++ b/meta/recipes-support/serf/serf/SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch
@@ -31,7 +31,7 @@ ERROR: scons install execution failed.
and the installed paths (including the paths inside libserf*.pc)
look correct
-Upstream-Status: Inactive-Upstream [lastrelease: 2015, lastcommit: 2019]
+Upstream-Status: Pending
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
diff --git a/meta/recipes-support/serf/serf_1.3.10.bb b/meta/recipes-support/serf/serf_1.3.10.bb
new file mode 100644
index 0000000000..c6b51452aa
--- /dev/null
+++ b/meta/recipes-support/serf/serf_1.3.10.bb
@@ -0,0 +1,40 @@
+SUMMARY = "High-Performance Asynchronous HTTP Client Library"
+DESCRIPTION = "The Apache Serf library is a C-based HTTP client library built upon the Apache \
+Portable Runtime (APR) library. It multiplexes connections, running the \
+read/write communication asynchronously. Memory copies and transformations are \
+kept to a minimum to provide high performance operation."
+HOMEPAGE = "http://serf.apache.org/"
+SRC_URI = "${APACHE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://norpath.patch \
+ file://env.patch \
+ file://0002-SConstruct-Fix-path-quoting-for-.def-generator.patch \
+ file://0003-gen_def.patch \
+ file://SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch \
+ "
+
+SRC_URI[sha256sum] = "be81ef08baa2516ecda76a77adf7def7bc3227eeb578b9a33b45f7b41dc064e6"
+
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
+
+inherit scons
+
+DEPENDS += " openssl apr apr-util util-linux expat"
+
+EXTRA_OESCONS = " \
+ LIBDIR=${libdir} \
+ --install-sandbox=${D} \
+ CC="${CC}" \
+ CFLAGS="${CFLAGS}" \
+ LINKFLAGS="${LDFLAGS}" \
+ APR=`which apr-1-config` \
+ APU=`which apu-1-config` \
+ OPENSSL="${STAGING_EXECPREFIXDIR}" \
+ "
+
+# scons creates non-reproducible archives
+do_install:append() {
+ rm ${D}/${libdir}/*.a
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/serf/serf_1.3.9.bb b/meta/recipes-support/serf/serf_1.3.9.bb
deleted file mode 100644
index 669f42b8e7..0000000000
--- a/meta/recipes-support/serf/serf_1.3.9.bb
+++ /dev/null
@@ -1,44 +0,0 @@
-SUMMARY = "High-Performance Asynchronous HTTP Client Library"
-DESCRIPTION = "The Apache Serf library is a C-based HTTP client library built upon the Apache \
-Portable Runtime (APR) library. It multiplexes connections, running the \
-read/write communication asynchronously. Memory copies and transformations are \
-kept to a minimum to provide high performance operation."
-HOMEPAGE = "http://serf.apache.org/"
-SRC_URI = "${APACHE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://norpath.patch \
- file://env.patch \
- file://0001-Fix-syntax-of-a-print-in-the-scons-file-to-unbreak-b.patch \
- file://0002-SConstruct-Fix-path-quoting-for-.def-generator.patch \
- file://0003-gen_def.patch \
- file://0004-Follow-up-to-r1811083-fix-building-with-scons-3.0.0-.patch \
- file://SConstruct.stop.creating.directories.without.sandbox-install.prefix.patch \
- file://0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch \
- "
-
-SRC_URI[md5sum] = "370a6340ff20366ab088012cd13f2b57"
-SRC_URI[sha256sum] = "549c2d21c577a8a9c0450facb5cca809f26591f048e466552240947bdf7a87cc"
-
-LICENSE = "Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
-
-inherit scons
-
-DEPENDS += " openssl apr apr-util util-linux expat"
-
-EXTRA_OESCONS = " \
- LIBDIR=${libdir} \
- --install-sandbox=${D} \
- CC="${CC}" \
- CFLAGS="${CFLAGS}" \
- LINKFLAGS="${LDFLAGS}" \
- APR=`which apr-1-config` \
- APU=`which apu-1-config` \
- OPENSSL="${STAGING_EXECPREFIXDIR}" \
- "
-
-# scons creates non-reproducible archives
-do_install:append() {
- rm ${D}/${libdir}/*.a
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/shared-mime-info/shared-mime-info/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch b/meta/recipes-support/shared-mime-info/shared-mime-info/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch
new file mode 100644
index 0000000000..936f72ccf8
--- /dev/null
+++ b/meta/recipes-support/shared-mime-info/shared-mime-info/0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch
@@ -0,0 +1,26 @@
+From 665383306c725f299a1b373f947cda01949d49e4 Mon Sep 17 00:00:00 2001
+From: David Faure <faure@kde.org>
+Date: Sun, 19 Nov 2023 11:18:11 +0100
+Subject: [PATCH] Fix build with libxml2-2.12.0 and clang-17
+
+Fixes #219
+
+Upstream-Status: Backport [https://gitlab.freedesktop.org/xdg/shared-mime-info/-/commit/c918fe77e255150938e83a6aec259f153d303573]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/test-subclassing.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/test-subclassing.c b/src/test-subclassing.c
+index dd099e4..0758164 100644
+--- a/src/test-subclassing.c
++++ b/src/test-subclassing.c
+@@ -1,4 +1,5 @@
+ #include <libxml/tree.h>
++#include <libxml/parser.h>
+ #include <stdio.h>
+ #include <string.h>
+
+--
+2.43.0
+
diff --git a/meta/recipes-support/shared-mime-info/shared-mime-info/0002-Handle-build-with-older-versions-of-GCC.patch b/meta/recipes-support/shared-mime-info/shared-mime-info/0002-Handle-build-with-older-versions-of-GCC.patch
new file mode 100644
index 0000000000..2af6b461b6
--- /dev/null
+++ b/meta/recipes-support/shared-mime-info/shared-mime-info/0002-Handle-build-with-older-versions-of-GCC.patch
@@ -0,0 +1,54 @@
+From 461d00fcd5c5842b9a56f7462d55d46bf21163cc Mon Sep 17 00:00:00 2001
+From: Patrick Williams <patrick@stwcx.xyz>
+Date: Fri, 1 Dec 2023 11:19:02 -0600
+Subject: [PATCH] Handle build with older versions of GCC
+
+Older versions of GCC (prior to 9.1) did not put the `std::filesystem`
+support directly into libstdcpp, but in a separate `libstdc++fs`. Add
+meson logic to detect if an extra linker flag is necessary.
+
+Fixes #223.
+
+Tested on AlmaLinux 8 which uses GCC-8.
+
+Signed-off-by: Patrick Williams <patrick@stwcx.xyz>
+Upstream-Status: Submitted [https://gitlab.freedesktop.org/xdg/shared-mime-info/-/merge_requests/278]
+---
+ meson.build | 14 ++++++++++++++
+ 1 file changed, 14 insertions(+)
+
+diff --git a/meson.build b/meson.build
+index ecc012f..93acd40 100644
+--- a/meson.build
++++ b/meson.build
+@@ -11,6 +11,7 @@ config = configuration_data()
+ i18n = import('i18n')
+
+ cc = meson.get_compiler('c')
++cxx = meson.get_compiler('cpp')
+
+ ###############################################################################
+ # Project configuration
+@@ -46,6 +47,19 @@ if not xdgmime_found
+ ''')
+ endif
+
++###############################################################################
++# Check if GCC needs -lstdc++fs (before 9.1)
++
++if not cxx.links('''
++ #include <filesystem>
++ int main() {
++ return std::filesystem::is_directory(
++ std::filesystem::status("/tmp")) ? 0 : 1;
++ }
++ ''', name: 'std++fs-check')
++ add_project_link_arguments('-lstdc++fs', language : 'cpp')
++endif
++
+ ###############################################################################
+ # Dependencies
+
+--
+2.41.0
+
diff --git a/meta/recipes-support/shared-mime-info/shared-mime-info_2.4.bb b/meta/recipes-support/shared-mime-info/shared-mime-info_2.4.bb
new file mode 100644
index 0000000000..ef5df44ad6
--- /dev/null
+++ b/meta/recipes-support/shared-mime-info/shared-mime-info_2.4.bb
@@ -0,0 +1,31 @@
+SUMMARY = "Shared MIME type database and specification"
+DESCRIPTION = "The shared-mime-info package contains the core database of common types."
+HOMEPAGE = "http://freedesktop.org/wiki/Software/shared-mime-info"
+SECTION = "base"
+
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+DEPENDS = "libxml2 itstool-native glib-2.0 shared-mime-info-native xmlto-native"
+
+SRC_URI = "git://gitlab.freedesktop.org/xdg/shared-mime-info.git;protocol=https;branch=master \
+ file://0001-Fix-build-with-libxml2-2.12.0-and-clang-17.patch \
+ file://0002-Handle-build-with-older-versions-of-GCC.patch"
+SRCREV = "9a6d6b8e963935f145f3a1ef446552de6996dada"
+
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig gettext python3native mime
+
+EXTRA_OEMESON = "-Dupdate-mimedb=true"
+
+FILES:${PN} += "${datadir}/mime"
+FILES:${PN}-dev += "${datadir}/pkgconfig/shared-mime-info.pc ${datadir}/gettext/its"
+
+# freedesktop.org.xml is only required when updating the mime database,
+# package it separately
+PACKAGES =+ "shared-mime-info-data"
+FILES:shared-mime-info-data = "${datadir}/mime/packages/freedesktop.org.xml"
+RDEPENDS:shared-mime-info-data = "shared-mime-info"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/shared-mime-info/shared-mime-info_git.bb b/meta/recipes-support/shared-mime-info/shared-mime-info_git.bb
deleted file mode 100644
index 387dd7bbad..0000000000
--- a/meta/recipes-support/shared-mime-info/shared-mime-info_git.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "Shared MIME type database and specification"
-DESCRIPTION = "The shared-mime-info package contains the core database of common types."
-HOMEPAGE = "http://freedesktop.org/wiki/Software/shared-mime-info"
-SECTION = "base"
-
-LICENSE = "GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-DEPENDS = "libxml2 itstool-native glib-2.0 shared-mime-info-native xmlto-native"
-
-SRC_URI = "git://gitlab.freedesktop.org/xdg/shared-mime-info.git;protocol=https;branch=master"
-SRCREV = "13695c7225c2f525a435e72739c33ac32bbfbbb9"
-PV = "2.2"
-S = "${WORKDIR}/git"
-
-inherit meson pkgconfig gettext python3native mime
-
-EXTRA_OEMESON = "-Dupdate-mimedb=true"
-
-FILES:${PN} += "${datadir}/mime"
-FILES:${PN}-dev += "${datadir}/pkgconfig/shared-mime-info.pc ${datadir}/gettext/its"
-
-# freedesktop.org.xml is only required when updating the mime database,
-# package it separately
-PACKAGES =+ "shared-mime-info-data"
-FILES:shared-mime-info-data = "${datadir}/mime/packages/freedesktop.org.xml"
-RDEPENDS:shared-mime-info-data = "shared-mime-info"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/sqlite/sqlite3_3.38.5.bb b/meta/recipes-support/sqlite/sqlite3_3.38.5.bb
deleted file mode 100644
index d56a3a0209..0000000000
--- a/meta/recipes-support/sqlite/sqlite3_3.38.5.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-require sqlite3.inc
-
-LICENSE = "PD"
-LIC_FILES_CHKSUM = "file://sqlite3.h;endline=11;md5=786d3dc581eff03f4fd9e4a77ed00c66"
-
-SRC_URI = "http://www.sqlite.org/2022/sqlite-autoconf-${SQLITE_PV}.tar.gz"
-SRC_URI[sha256sum] = "5af07de982ba658fd91a03170c945f99c971f6955bc79df3266544373e39869c"
-
-# -19242 is only an issue in specific development branch commits
-CVE_CHECK_IGNORE += "CVE-2019-19242"
-# This is believed to be iOS specific (https://groups.google.com/g/sqlite-dev/c/U7OjAbZO6LA)
-CVE_CHECK_IGNORE += "CVE-2015-3717"
-# Issue in an experimental extension we don't have/use. Fixed by https://sqlite.org/src/info/b1e0c22ec981cf5f
-CVE_CHECK_IGNORE += "CVE-2021-36690"
diff --git a/meta/recipes-support/sqlite/sqlite3_3.45.1.bb b/meta/recipes-support/sqlite/sqlite3_3.45.1.bb
new file mode 100644
index 0000000000..50612feb25
--- /dev/null
+++ b/meta/recipes-support/sqlite/sqlite3_3.45.1.bb
@@ -0,0 +1,8 @@
+require sqlite3.inc
+
+LICENSE = "PD"
+LIC_FILES_CHKSUM = "file://sqlite3.h;endline=11;md5=786d3dc581eff03f4fd9e4a77ed00c66"
+
+SRC_URI = "http://www.sqlite.org/2024/sqlite-autoconf-${SQLITE_PV}.tar.gz"
+SRC_URI[sha256sum] = "cd9c27841b7a5932c9897651e20b86c701dd740556989b01ca596fcfa3d49a0a"
+
diff --git a/meta/recipes-support/taglib/taglib_1.12.bb b/meta/recipes-support/taglib/taglib_1.12.bb
deleted file mode 100644
index 47ad8aacb6..0000000000
--- a/meta/recipes-support/taglib/taglib_1.12.bb
+++ /dev/null
@@ -1,42 +0,0 @@
-SUMMARY = "Library for reading and editing the meta-data of popular audio formats"
-DESCRIPTION = "Platform-independent library (tested on Windows/Linux) for reading and writing metadata in media files, including video, audio, and photo formats. This is a convenient one-stop-shop to present or tag all your media collection, regardless of which format/container these might use. You can read/write the standard or more common tags/properties of a media, or you can also create and retrieve your own custom tags."
-SECTION = "libs/multimedia"
-HOMEPAGE = "http://taglib.github.io/"
-LICENSE = "LGPL-2.1-only | MPL-1.1"
-LIC_FILES_CHKSUM = "file://COPYING.LGPL;md5=4fbd65380cdd255951079008b364516c \
- file://COPYING.MPL;md5=bfe1f75d606912a4111c90743d6c7325 \
- file://taglib/audioproperties.h;beginline=1;endline=24;md5=9df2c7399519b7310568a7c55042ecee"
-
-DEPENDS = "zlib"
-
-SRC_URI = "http://taglib.github.io/releases/${BP}.tar.gz"
-
-SRC_URI[md5sum] = "4313ed2671234e029b7af8f97c84e9af"
-SRC_URI[sha256sum] = "7fccd07669a523b07a15bd24c8da1bbb92206cb19e9366c3692af3d79253b703"
-
-UPSTREAM_CHECK_URI = "http://github.com/taglib/taglib/releases/"
-
-BINCONFIG = "${bindir}/taglib-config"
-
-inherit cmake pkgconfig binconfig-disabled
-
-PACKAGES =+ "${PN}-c"
-FILES:${PN}-c = "${libdir}/libtag_c.so.*"
-
-EXTRA_OECMAKE = "-DBUILD_SHARED_LIBS=ON \
- -DCMAKE_DISABLE_FIND_PACKAGE_Boost=TRUE \
- -DHAVE_BOOST_BYTESWAP=FALSE \
- -DCMAKE_CXX_STANDARD=11 \
- -DCMAKE_CXX_STANDARD_REQUIRED=OFF \
- -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
-"
-CXXFLAGS += "-std=c++11"
-
-do_configure:prepend () {
- rm -f ${S}/admin/ltmain.sh
- rm -f ${S}/admin/libtool.m4.in
-}
-
-# without -fPIC depending packages failed with many error like:
-# | <...>/ld: error: <...>/usr/lib/libtag.a(modfilebase.cpp.o): requires unsupported dynamic reloc R_ARM_THM_MOVW_ABS_NC; recompile with -fPIC
-CXXFLAGS += "-fPIC"
diff --git a/meta/recipes-support/taglib/taglib_2.0.bb b/meta/recipes-support/taglib/taglib_2.0.bb
new file mode 100644
index 0000000000..4bf9be15ae
--- /dev/null
+++ b/meta/recipes-support/taglib/taglib_2.0.bb
@@ -0,0 +1,42 @@
+SUMMARY = "Library for reading and editing the meta-data of popular audio formats"
+DESCRIPTION = "Platform-independent library (tested on Windows/Linux) for reading and writing metadata in media files, including video, audio, and photo formats. This is a convenient one-stop-shop to present or tag all your media collection, regardless of which format/container these might use. You can read/write the standard or more common tags/properties of a media, or you can also create and retrieve your own custom tags."
+SECTION = "libs/multimedia"
+HOMEPAGE = "http://taglib.github.io/"
+LICENSE = "LGPL-2.1-only | MPL-1.1"
+LIC_FILES_CHKSUM = "file://COPYING.LGPL;md5=4fbd65380cdd255951079008b364516c \
+ file://COPYING.MPL;md5=bfe1f75d606912a4111c90743d6c7325 \
+ file://taglib/audioproperties.h;beginline=1;endline=24;md5=9df2c7399519b7310568a7c55042ecee"
+
+DEPENDS = "zlib utfcpp"
+
+SRC_URI = "http://taglib.github.io/releases/${BP}.tar.gz"
+
+SRC_URI[sha256sum] = "e36ea877a6370810b97d84cf8f72b1e4ed205149ab3ac8232d44c850f38a2859"
+
+UPSTREAM_CHECK_URI = "https://taglib.org/"
+UPSTREAM_CHECK_REGEX = "taglib-(?P<pver>\d+(\.\d+)+)\.tar"
+
+BINCONFIG = "${bindir}/taglib-config"
+
+inherit cmake pkgconfig binconfig-disabled
+
+PACKAGES =+ "${PN}-c"
+FILES:${PN}-c = "${libdir}/libtag_c.so.*"
+
+EXTRA_OECMAKE = "-DBUILD_SHARED_LIBS=ON \
+ -DCMAKE_DISABLE_FIND_PACKAGE_Boost=TRUE \
+ -DHAVE_BOOST_BYTESWAP=FALSE \
+ -DCMAKE_CXX_STANDARD=11 \
+ -DCMAKE_CXX_STANDARD_REQUIRED=OFF \
+ -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
+"
+CXXFLAGS += "-std=c++11"
+
+do_configure:prepend () {
+ rm -f ${S}/admin/ltmain.sh
+ rm -f ${S}/admin/libtool.m4.in
+}
+
+# without -fPIC depending packages failed with many error like:
+# | <...>/ld: error: <...>/usr/lib/libtag.a(modfilebase.cpp.o): requires unsupported dynamic reloc R_ARM_THM_MOVW_ABS_NC; recompile with -fPIC
+CXXFLAGS += "-fPIC"
diff --git a/meta/recipes-support/utfcpp/utfcpp_4.0.5.bb b/meta/recipes-support/utfcpp/utfcpp_4.0.5.bb
new file mode 100644
index 0000000000..5ac6fd369a
--- /dev/null
+++ b/meta/recipes-support/utfcpp/utfcpp_4.0.5.bb
@@ -0,0 +1,16 @@
+SUMMARY = " UTF-8 with C++ in a Portable Way"
+HOMEPAGE = "https://github.com/nemtrif/utfcpp"
+
+LICENSE = "BSL-1.0 & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=e4224ccaecb14d942c71d31bef20d78c \
+ file://extern/ftest/LICENSE;md5=d33c6488d3b003723a5f17ac984db030"
+
+SRC_URI = "gitsm://github.com/nemtrif/utfcpp;protocol=https;branch=master"
+
+SRCREV = "6be08bbea14ffa0a5c594257fb6285a054395cd7"
+
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+FILES:${PN}-dev += "${datadir}/utf8cpp/cmake"
diff --git a/meta/recipes-support/vim/files/no-path-adjust.patch b/meta/recipes-support/vim/files/no-path-adjust.patch
index 9d6da80913..908459a95e 100644
--- a/meta/recipes-support/vim/files/no-path-adjust.patch
+++ b/meta/recipes-support/vim/files/no-path-adjust.patch
@@ -1,4 +1,7 @@
-vim: do not adjust script pathnames
+From 4125a1ccb82fd53d003acdc34e462f238f0c4f0d Mon Sep 17 00:00:00 2001
+From: Joe Slater <joe.slater@windriver.com>
+Date: Fri, 8 Jul 2022 11:03:22 +0800
+Subject: [PATCH] vim: do not adjust script pathnames
When cross-compiling, we do not want to reference the host versions of
things like perl and awk.
@@ -6,24 +9,28 @@ things like perl and awk.
Upstream-Status: Pending
Signed-off-by: Joe Slater <joe.slater@windriver.com>
+Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
+---
+ src/Makefile | 6 +-----
+ 1 file changed, 1 insertion(+), 5 deletions(-)
-Index: git/src/Makefile
-===================================================================
---- git.orig/src/Makefile
-+++ git/src/Makefile
-@@ -2565,11 +2565,14 @@ installtools: $(TOOLS) $(DESTDIR)$(exec_
+diff --git a/src/Makefile b/src/Makefile
+index c9513a632..7a7cbdc43 100644
+--- a/src/Makefile
++++ b/src/Makefile
+@@ -2534,11 +2534,7 @@ installtools: $(TOOLS) $(DESTDIR)$(exec_prefix) $(DEST_BIN) \
rm -rf $$cvs; \
fi
-chmod $(FILEMOD) $(DEST_TOOLS)/*
-# replace the path in some tools
-+
-+# replace the path in some tools, but not when cross-compiling
-+ifneq ($(CROSS_COMPILING),1)
- perlpath=`./which.sh perl` && sed -e "s+/usr/bin/perl+$$perlpath+" $(TOOLSSOURCE)/efm_perl.pl >$(DEST_TOOLS)/efm_perl.pl
- awkpath=`./which.sh nawk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; if test -z "$$awkpath"; then \
- awkpath=`./which.sh gawk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; if test -z "$$awkpath"; then \
- awkpath=`./which.sh awk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; fi; fi
-+endif
+- perlpath=`./which.sh perl` && sed -e "s+/usr/bin/perl+$$perlpath+" $(TOOLSSOURCE)/efm_perl.pl >$(DEST_TOOLS)/efm_perl.pl
+- awkpath=`./which.sh nawk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; if test -z "$$awkpath"; then \
+- awkpath=`./which.sh gawk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; if test -z "$$awkpath"; then \
+- awkpath=`./which.sh awk` && sed -e "s+/usr/bin/nawk+$$awkpath+" $(TOOLSSOURCE)/mve.awk >$(DEST_TOOLS)/mve.awk; fi; fi
++# not replace the path in some tools
-chmod $(SCRIPTMOD) `grep -l "^#!" $(DEST_TOOLS)/*`
# install the language specific files for tools, if they were unpacked
+--
+2.25.1
+
diff --git a/meta/recipes-support/vim/files/racefix.patch b/meta/recipes-support/vim/files/racefix.patch
deleted file mode 100644
index 1cb8fb442f..0000000000
--- a/meta/recipes-support/vim/files/racefix.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-The creation of the LINGUAS file is duplicated for each desktop file
-which can lead the commands to race against each other. Rework
-the makefile to avoid this as the expense of leaving the file on disk.
-
-Upstream-Status: Pending
-RP 2021/2/15
-
-Index: git/src/po/Makefile
-===================================================================
---- git.orig/src/po/Makefile
-+++ git/src/po/Makefile
-@@ -207,17 +207,16 @@ $(PACKAGE).pot: $(PO_INPUTLIST) $(PO_VIM
- # Delete the temporary files
- rm *.js
-
--vim.desktop: vim.desktop.in $(POFILES)
-+LINGUAS:
- echo $(LANGUAGES) | tr " " "\n" |sed -e '/\./d' | sort > LINGUAS
-+
-+vim.desktop: vim.desktop.in $(POFILES) LINGUAS
- $(MSGFMT) --desktop -d . --template vim.desktop.in -o tmp_vim.desktop
-- rm -f LINGUAS
- if command -v desktop-file-validate; then desktop-file-validate tmp_vim.desktop; fi
- mv tmp_vim.desktop vim.desktop
-
--gvim.desktop: gvim.desktop.in $(POFILES)
-- echo $(LANGUAGES) | tr " " "\n" |sed -e '/\./d' | sort > LINGUAS
-+gvim.desktop: gvim.desktop.in $(POFILES) LINGUAS
- $(MSGFMT) --desktop -d . --template gvim.desktop.in -o tmp_gvim.desktop
-- rm -f LINGUAS
- if command -v desktop-file-validate; then desktop-file-validate tmp_gvim.desktop; fi
- mv tmp_gvim.desktop gvim.desktop
-
diff --git a/meta/recipes-support/vim/vim-tiny_8.2.bb b/meta/recipes-support/vim/vim-tiny_9.1.bb
index e4c26d23f6..e4c26d23f6 100644
--- a/meta/recipes-support/vim/vim-tiny_8.2.bb
+++ b/meta/recipes-support/vim/vim-tiny_9.1.bb
diff --git a/meta/recipes-support/vim/vim.inc b/meta/recipes-support/vim/vim.inc
index c5922b7fcd..071deed338 100644
--- a/meta/recipes-support/vim/vim.inc
+++ b/meta/recipes-support/vim/vim.inc
@@ -10,31 +10,28 @@ DEPENDS = "ncurses gettext-native"
RSUGGESTS:${PN} = "diffutils"
LICENSE = "Vim"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=6b30ea4fa660c483b619924bc709ef99 \
- file://runtime/doc/uganda.txt;md5=daf48235bb824c77fe8ae88d5f575f74"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=d1a651ab770b45d41c0f8cb5a8ca930e"
SRC_URI = "git://github.com/vim/vim.git;branch=master;protocol=https \
file://disable_acl_header_check.patch \
file://vim-add-knob-whether-elf.h-are-checked.patch \
file://0001-src-Makefile-improve-reproducibility.patch \
file://no-path-adjust.patch \
- file://racefix.patch \
"
-PV .= ".4912"
-SRCREV = "a7583c42cd6b64fd276a5d7bb0db5ce7bfafa730"
-
-# Remove when 8.3 is out
-UPSTREAM_VERSION_UNKNOWN = "1"
+PV .= ".0114"
+SRCREV = "fcaed6a70faf73bff3e5405ada556d726024f866"
# Do not consider .z in x.y.z, as that is updated with every commit
UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+\.\d+)\.0"
+# Ignore that the upstream version .z in x.y.z is always newer
+UPSTREAM_VERSION_UNKNOWN = "1"
S = "${WORKDIR}/git"
VIMDIR = "vim${@d.getVar('PV').split('.')[0]}${@d.getVar('PV').split('.')[1]}"
-inherit autotools-brokensep update-alternatives mime-xdg
+inherit autotools-brokensep update-alternatives mime-xdg pkgconfig
CLEANBROKEN = "1"
@@ -43,22 +40,18 @@ do_configure () {
cd src
rm -f auto/*
touch auto/config.mk
+ # git timestamps aren't reliable, so touch the shipped .po files so they aren't regenerated
+ touch -c po/cs.cp1250.po po/ja.euc-jp.po po/ja.sjis.po po/ko.po po/pl.UTF-8.po po/pl.cp1250.po po/ru.cp1251.po po/sk.cp1250.po po/uk.cp1251.po po/zh_CN.po po/zh_CN.cp936.po po/zh_TW.po
+ # ru.cp1251.po uses CP1251 rather than cp1251, fix that
+ sed -i -e s/CP1251/cp1251/ po/ru.cp1251.po
aclocal
autoconf
cd ..
oe_runconf
touch src/auto/configure
touch src/auto/config.mk src/auto/config.h
-}
-
-do_compile() {
- # We do not support fully / correctly the following locales. Attempting
- # to use these with msgfmt in order to update the ".desktop" files exposes
- # this problem and leads to the compile failing.
- for LOCALE in cs fr ko pl sk zh_CN zh_TW;do
- echo -n > src/po/${LOCALE}.po
- done
- autotools_do_compile
+ # need a native tool, not a target one
+ ${BUILD_CC} src/po/sjiscorr.c -o src/po/sjiscorr
}
PACKAGECONFIG ??= "\
@@ -83,6 +76,7 @@ EXTRA_OECONF = " \
--disable-netbeans \
--disable-desktop-database-update \
--with-tlib=ncurses \
+ --with-modified-by='${MAINTAINER}' \
ac_cv_small_wchar_t=no \
ac_cv_path_GLIB_COMPILE_RESOURCES=no \
vim_cv_getcwd_broken=no \
@@ -95,6 +89,10 @@ EXTRA_OECONF = " \
STRIP=/bin/true \
"
+# Some host distros don't have it, disable consistently
+EXTRA_OECONF:append:class-native = " vim_cv_timer_create=no"
+EXTRA_OECONF:append:class-target = " vim_cv_timer_create=yes"
+
do_install() {
autotools_do_install
diff --git a/meta/recipes-support/vim/vim_8.2.bb b/meta/recipes-support/vim/vim_8.2.bb
deleted file mode 100644
index f358e61132..0000000000
--- a/meta/recipes-support/vim/vim_8.2.bb
+++ /dev/null
@@ -1,19 +0,0 @@
-require vim.inc
-
-PROVIDES = "xxd"
-
-RDEPENDS:${PN} = "ncurses-terminfo-base"
-# Recommend that runtime data is installed along with vim
-RRECOMMENDS:${PN} = "${PN}-syntax ${PN}-help ${PN}-tutor ${PN}-vimrc ${PN}-common"
-
-PACKAGECONFIG:class-native = ""
-BBCLASSEXTEND = "native nativesdk"
-
-ALTERNATIVE:${PN}:append = " xxd"
-ALTERNATIVE_TARGET[xxd] = "${bindir}/xxd"
-ALTERNATIVE_LINK_NAME[xxd] = "${bindir}/xxd"
-
-# We override the default in security_flags.inc because vim (not vim-tiny!) will abort
-# in many places for _FORTIFY_SOURCE=2. Security flags become part of CC.
-#
-lcl_maybe_fortify = "${@oe.utils.conditional('DEBUG_BUILD','1','','-D_FORTIFY_SOURCE=1',d)}"
diff --git a/meta/recipes-support/vim/vim_9.1.bb b/meta/recipes-support/vim/vim_9.1.bb
new file mode 100644
index 0000000000..fee9f055e9
--- /dev/null
+++ b/meta/recipes-support/vim/vim_9.1.bb
@@ -0,0 +1,23 @@
+require vim.inc
+
+PROVIDES = "xxd"
+
+RDEPENDS:${PN} = "ncurses-terminfo-base ${PN}-xxd"
+# Recommend that runtime data is installed along with vim
+RRECOMMENDS:${PN} = "${PN}-syntax ${PN}-help ${PN}-tutor ${PN}-vimrc ${PN}-common"
+
+PACKAGECONFIG:class-native = ""
+BBCLASSEXTEND = "native nativesdk"
+
+PACKAGES =+ "${PN}-xxd"
+FILES:${PN}-xxd = "${bindir}/xxd"
+RPROVIDES:${PN}-xxd = "xxd"
+
+ALTERNATIVE:${PN}-xxd = "xxd"
+ALTERNATIVE_TARGET[xxd] = "${bindir}/xxd"
+ALTERNATIVE_LINK_NAME[xxd] = "${bindir}/xxd"
+
+# We override the default in security_flags.inc because vim (not vim-tiny!) will abort
+# in many places for _FORTIFY_SOURCE=2. Security flags become part of CC.
+#
+lcl_maybe_fortify = "${@oe.utils.conditional('DEBUG_BUILD','1','','-D_FORTIFY_SOURCE=1',d)}"
diff --git a/meta/recipes-support/vte/vte/0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch b/meta/recipes-support/vte/vte/0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch
index b4100fc381..8934d5f80a 100644
--- a/meta/recipes-support/vte/vte/0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch
+++ b/meta/recipes-support/vte/vte/0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch
@@ -11,32 +11,25 @@ Upstream-Status: Submitted [1]
Signed-off-by: Andreas Müller <schnitzeltony@gmail.com>
[1] https://gitlab.gnome.org/GNOME/vte/issues/72
-
---
- src/missing.hh | 4 ++++
- src/widget.cc | 1 +
- 2 files changed, 5 insertions(+)
+ src/widget.cc | 4 +++
+ 1 files changed, 4 insertions(+)
---- a/src/missing.hh
-+++ b/src/missing.hh
-@@ -24,6 +24,10 @@
- #define NSIG (8 * sizeof(sigset_t))
- #endif
+diff --git a/src/widget.cc b/src/widget.cc
+index 07f7cabf..31a77f68 100644
+--- a/src/widget.cc
++++ b/src/widget.cc
+@@ -16,6 +16,10 @@
+ * along with this library. If not, see <https://www.gnu.org/licenses/>.
+ */
+#ifndef W_EXITCODE
+#define W_EXITCODE(ret, sig) ((ret) << 8 | (sig))
+#endif
+
- #ifndef HAVE_FDWALK
- int fdwalk(int (*cb)(void* data, int fd),
- void* data);
---- a/src/widget.cc
-+++ b/src/widget.cc
-@@ -21,6 +21,7 @@
- #include "widget.hh"
-
- #include <sys/wait.h> // for W_EXITCODE
-+#include "missing.hh" // for W_EXITCODE on non-glibc systems
+ #include "config.h"
- #include <exception>
- #include <new>
+ #include "widget.hh"
+--
+2.42.0
+
diff --git a/meta/recipes-support/vte/vte/0001-Makefile.docs-correctly-substitute-gtkdoc-qemu-wrapp.patch b/meta/recipes-support/vte/vte/0001-Makefile.docs-correctly-substitute-gtkdoc-qemu-wrapp.patch
deleted file mode 100644
index c69a52e5d0..0000000000
--- a/meta/recipes-support/vte/vte/0001-Makefile.docs-correctly-substitute-gtkdoc-qemu-wrapp.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-From daa30d0039397a735d49ea535305ed0bc5f9d73b Mon Sep 17 00:00:00 2001
-From: Alexander Kanavin <alex@linutronix.de>
-Date: Tue, 26 Oct 2021 09:38:42 +0200
-Subject: [PATCH] Makefile.docs: correctly substitute gtkdoc qemu wrapper
-
-Upstream-Status: Inappropriate [oe-core specific]
-Signed-off-by: Alexander Kanavin <alex@linutronix.de>
----
- doc/reference/Makefile.docs | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/doc/reference/Makefile.docs b/doc/reference/Makefile.docs
-index b18f0a4..da18440 100644
---- a/doc/reference/Makefile.docs
-+++ b/doc/reference/Makefile.docs
-@@ -233,7 +233,7 @@ INSTALL_DATA = $(INSTALL) -m 644
-
- GTKDOC_CC = $(CC) $(INCLUDES) $(GTKDOC_DEPS_CFLAGS) $(CPPFLAGS) $(CFLAGS)
- GTKDOC_LD = $(CC) $(GTKDOC_DEPS_LIBS) $(CFLAGS) $(LDFLAGS)
--GTKDOC_RUN =
-+GTKDOC_RUN = $(top_builddir)/gtkdoc-qemuwrapper
-
- GTKDOC_CHECK_PATH = gtkdoc-check
- GTKDOC_REBASE = gtkdoc-rebase
diff --git a/meta/recipes-support/vte/vte_0.68.0.bb b/meta/recipes-support/vte/vte_0.68.0.bb
deleted file mode 100644
index fc4324872d..0000000000
--- a/meta/recipes-support/vte/vte_0.68.0.bb
+++ /dev/null
@@ -1,62 +0,0 @@
-SUMMARY = "Virtual terminal emulator GTK+ widget library"
-DESCRIPTION = "VTE provides a virtual terminal widget for GTK applications."
-HOMEPAGE = "https://wiki.gnome.org/Apps/Terminal/VTE"
-BUGTRACKER = "https://bugzilla.gnome.org/buglist.cgi?product=vte"
-LICENSE = "GPL-3.0-only & LGPL-3.0-or-later & MIT"
-LICENSE:libvte = "LGPL-3.0-or-later"
-
-LIC_FILES_CHKSUM = " \
- file://COPYING.GPL3;md5=cc702cf3444d1f19680c794cc61948f9 \
- file://COPYING.LGPL3;md5=b52f2d57d10c4f7ee67a7eb9615d5d24 \
- file://COPYING.XTERM;md5=d7fc3a23c16c039afafe2e042030f057 \
-"
-
-DEPENDS = "glib-2.0 gtk+3 libpcre2 libxml2-native gperf-native icu"
-
-GNOMEBASEBUILDCLASS = "meson"
-GIR_MESON_OPTION = 'gir'
-
-inherit gnomebase gtk-doc features_check upstream-version-is-even gobject-introspection
-
-# vapigen.m4 is required when vala is not present (but the one from vala should be used normally)
-SRC_URI += "file://0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch \
- file://0001-Makefile.docs-correctly-substitute-gtkdoc-qemu-wrapp.patch"
-SRC_URI[archive.sha256sum] = "13e7d4789ca216a33780030d246c9b13ddbfd04094c6316eea7ff92284dd1749"
-
-ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
-
-# Instead of "inherit vala" we do the relevant settings here so we can
-# set DEPENDS based on PACKAGECONFIG.
-
-# Our patched version of Vala looks in STAGING_DATADIR for .vapi files
-export STAGING_DATADIR
-# Upstream Vala >= 0.11 looks in XDG_DATA_DIRS for .vapi files
-export XDG_DATA_DIRS = "${STAGING_DATADIR}"
-
-# Help g-ir-scanner find the .so for linking
-do_compile:prepend() {
- export GIR_EXTRA_LIBS_PATH="${B}/src/.libs"
-}
-
-# Package additional files
-FILES:${PN}-dev += "${datadir}/vala/vapi/*"
-
-PACKAGECONFIG ??= "gnutls"
-PACKAGECONFIG[vala] = "-Dvapi=true,-Dvapi=false,vala-native vala"
-PACKAGECONFIG[gnutls] = "-Dgnutls=true,-Dgnutls=false,gnutls"
-PACKAGECONFIG[systemd] = "-D_systemd=true,-D_systemd=false,systemd"
-# vala requires gir
-PACKAGECONFIG:remove:class-native = "vala"
-
-CFLAGS += "-D_GNU_SOURCE"
-
-PACKAGES =+ "libvte ${PN}-prompt"
-FILES:libvte = "${libdir}/*.so.* ${libdir}/girepository-1.0/*"
-FILES:${PN}-prompt = " \
- ${sysconfdir}/profile.d \
- ${libexecdir}/vte-urlencode-cwd \
-"
-
-FILES:${PN}-dev += "${datadir}/glade/"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/vte/vte_0.74.2.bb b/meta/recipes-support/vte/vte_0.74.2.bb
new file mode 100644
index 0000000000..d8eafde2fb
--- /dev/null
+++ b/meta/recipes-support/vte/vte_0.74.2.bb
@@ -0,0 +1,56 @@
+SUMMARY = "Virtual terminal emulator GTK+ widget library"
+DESCRIPTION = "VTE provides a virtual terminal widget for GTK applications."
+HOMEPAGE = "https://wiki.gnome.org/Apps/Terminal/VTE"
+BUGTRACKER = "https://bugzilla.gnome.org/buglist.cgi?product=vte"
+LICENSE = "GPL-3.0-only & LGPL-3.0-or-later & MIT"
+LICENSE:libvte = "LGPL-3.0-or-later"
+
+LIC_FILES_CHKSUM = " \
+ file://COPYING.GPL3;md5=cc702cf3444d1f19680c794cc61948f9 \
+ file://COPYING.LGPL3;md5=b52f2d57d10c4f7ee67a7eb9615d5d24 \
+ file://COPYING.XTERM;md5=d7fc3a23c16c039afafe2e042030f057 \
+"
+
+DEPENDS = "glib-2.0 glib-2.0-native gtk+3 libpcre2 libxml2-native gperf-native icu"
+
+GIR_MESON_OPTION = 'gir'
+GIDOCGEN_MESON_OPTION = "docs"
+
+inherit gnomebase gi-docgen features_check upstream-version-is-even gobject-introspection systemd vala
+
+SRC_URI += "file://0001-Add-W_EXITCODE-macro-for-non-glibc-systems.patch"
+SRC_URI[archive.sha256sum] = "a535fb2a98fea8a2449cd1a02cccf5190131dddff52e715afdace3feb536eae7"
+
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
+
+EXTRA_OEMESON += "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-Dvapi=true', '-Dvapi=false', d)}"
+EXTRA_OEMESON:append = " ${@bb.utils.contains('GI_DATA_ENABLED', 'False', '-Ddocs=false', '', d)}"
+
+PACKAGECONFIG ??= " \
+ gnutls \
+ ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gtk4', '', d)} \
+"
+PACKAGECONFIG[fribidi] = "-Dfribidi=true,-Dfribidi=false,fribidi"
+PACKAGECONFIG[gtk4] = "-Dgtk4=true,-Dgtk4=false,gtk4"
+PACKAGECONFIG[gnutls] = "-Dgnutls=true,-Dgnutls=false,gnutls"
+PACKAGECONFIG[systemd] = "-D_systemd=true,-D_systemd=false,systemd"
+
+PACKAGES =+ "libvte-gtk4 ${PN}-gtk4 ${PN}-gtk4-dev libvte ${PN}-prompt"
+FILES:libvte-gtk4 = "${libdir}/lib*gtk4.so.* ${libdir}/girepository-1.0/Vte-3.91.typelib"
+FILES:${PN}-gtk4 ="${bindir}/vte-2.91-gtk4"
+FILES:${PN}-gtk4-dev = "${libdir}/lib*gtk4.so \
+ ${libdir}/pkgconfig/vte-2.91-gtk4.pc \
+ ${datadir}/gir-1.0/Vte-3.91.gir \
+ ${datadir}/vala/vapi/vte-2.91-gtk4.deps \
+ ${datadir}/vala/vapi/vte-2.91-gtk4.vapi \
+ ${includedir}/vte-2.91-gtk4 \
+ "
+FILES:${PN} +="${systemd_user_unitdir}"
+FILES:libvte = "${libdir}/*.so.* ${libdir}/girepository-1.0/*"
+FILES:${PN}-prompt = " \
+ ${sysconfdir}/profile.d \
+ ${libexecdir}/vte-urlencode-cwd \
+"
+
+FILES:${PN}-dev += "${datadir}/glade/"
diff --git a/meta/recipes-support/xxhash/xxhash_0.8.1.bb b/meta/recipes-support/xxhash/xxhash_0.8.1.bb
deleted file mode 100644
index b3b9702598..0000000000
--- a/meta/recipes-support/xxhash/xxhash_0.8.1.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-SUMMARY = "Extremely fast non-cryptographic hash algorithm"
-DESCRIPTION = "xxHash is an extremely fast non-cryptographic hash algorithm, \
-working at speeds close to RAM limits."
-HOMEPAGE = "http://www.xxhash.com/"
-LICENSE = "BSD-2-Clause & GPL-2.0-only"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=cdfe7764d5685d8e08b3df302885d7f3 \
- file://cli/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- "
-
-SRC_URI = "git://github.com/Cyan4973/xxHash.git;branch=release;protocol=https"
-UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
-
-SRCREV = "35b0373c697b5f160d3db26b1cbb45a0d5ba788c"
-
-S = "${WORKDIR}/git"
-
-do_compile () {
- oe_runmake all
-}
-
-do_install () {
- oe_runmake DESTDIR=${D} install
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/xxhash/xxhash_0.8.2.bb b/meta/recipes-support/xxhash/xxhash_0.8.2.bb
new file mode 100644
index 0000000000..f16b183c95
--- /dev/null
+++ b/meta/recipes-support/xxhash/xxhash_0.8.2.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Extremely fast non-cryptographic hash algorithm"
+DESCRIPTION = "xxHash is an extremely fast non-cryptographic hash algorithm, \
+working at speeds close to RAM limits."
+HOMEPAGE = "http://www.xxhash.com/"
+LICENSE = "BSD-2-Clause & GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=13be6b481ff5616f77dda971191bb29b \
+ file://cli/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ "
+
+SRC_URI = "git://github.com/Cyan4973/xxHash.git;branch=release;protocol=https"
+UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)"
+
+SRCREV = "bbb27a5efb85b92a0486cf361a8635715a53f6ba"
+
+S = "${WORKDIR}/git"
+
+CFLAGS += "${@bb.utils.contains('SELECTED_OPTIMIZATION', '-Og', '-DXXH_NO_INLINE_HINTS', '', d)}"
+
+do_compile () {
+ oe_runmake all
+}
+
+do_install () {
+ oe_runmake DESTDIR=${D} install
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/site/arc-common b/meta/site/arc-common
index d3c721a8bb..5cdad0b07b 100644
--- a/meta/site/arc-common
+++ b/meta/site/arc-common
@@ -3,9 +3,5 @@ ac_cv_alignof_guint32=4
ac_cv_alignof_guint64=4
ac_cv_alignof_unsigned_long=4
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
# startup-notification
lf_cv_sane_realloc=${lf_cv_sane_realloc=yes}
diff --git a/meta/site/arm-32 b/meta/site/arm-32
index 60ada2e974..be741617b6 100644
--- a/meta/site/arm-32
+++ b/meta/site/arm-32
@@ -4,30 +4,5 @@
ac_cv_sizeof_size_t=${ac_cv_sizeof_size_t=4}
ac_cv_sizeof_ssize_t=${ac_cv_sizeof_ssize_t=4}
-# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-
-# glib-2.0
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
-
# jikes
ac_cv_sizeof_wchar_t=4
-
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=4
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-ac_cv_alignof_double=8
diff --git a/meta/site/arm-64 b/meta/site/arm-64
index 40763fb059..f4eae88471 100644
--- a/meta/site/arm-64
+++ b/meta/site/arm-64
@@ -6,27 +6,3 @@ ac_cv_sizeof_size_t=8
ac_cv_sizeof_ssize_t=8
ac_cv_alignof_char=1
ac_cv_alignof_double=8
-
-# glib
-#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-#glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-
-# glib-2.0
-#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-#glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-#glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-#glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-#glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-
-# at-spi2-core
-ac_cv_alignof_dbind_pointer=8
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-
diff --git a/meta/site/arm-common b/meta/site/arm-common
index 4c25c82be9..18c6d42200 100644
--- a/meta/site/arm-common
+++ b/meta/site/arm-common
@@ -41,25 +41,6 @@ ac_cv_func_fnmatch_works=${ac_cv_func_fnmatch_works=yes}
# gettext
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
-# glib
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-
-# glib-2.0
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-
#gstreamer
as_cv_unaligned_access=${as_cv_unaligned_access=no}
diff --git a/meta/site/arm-darwin b/meta/site/arm-darwin
index 09ff097aa0..00d9a64aa7 100644
--- a/meta/site/arm-darwin
+++ b/meta/site/arm-darwin
@@ -1,7 +1,5 @@
ac_cv_lib_m_sin=${ac_cv_lib_m_sin=yes}
ac_cv_func_posix_spawn=${ac_cv_func_posix_spawn=no}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
ac_cv_func_posix_getpwnam_r=${ac_cv_func_posix_getpwnam_r=no}
ac_cv_func_posix_getpwuid_r=${ac_cv_func_posix_getpwuid_r=no}
ac_cv_func_posix_getgrgid_r=${ac_cv_func_posix_getgrgid_r=no}
diff --git a/meta/site/arm-darwin8 b/meta/site/arm-darwin8
index 09ff097aa0..00d9a64aa7 100644
--- a/meta/site/arm-darwin8
+++ b/meta/site/arm-darwin8
@@ -1,7 +1,5 @@
ac_cv_lib_m_sin=${ac_cv_lib_m_sin=yes}
ac_cv_func_posix_spawn=${ac_cv_func_posix_spawn=no}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
ac_cv_func_posix_getpwnam_r=${ac_cv_func_posix_getpwnam_r=no}
ac_cv_func_posix_getpwuid_r=${ac_cv_func_posix_getpwuid_r=no}
ac_cv_func_posix_getgrgid_r=${ac_cv_func_posix_getgrgid_r=no}
diff --git a/meta/site/arm-linux b/meta/site/arm-linux
index fb3c81d0e9..c4205a4a2e 100644
--- a/meta/site/arm-linux
+++ b/meta/site/arm-linux
@@ -17,7 +17,7 @@ ac_cv_sizeof_long_p=${ac_cv_sizeof_long_p=4}
ac_cv_sizeof_float=${ac_cv_sizeof_float=4}
ac_cv_sizeof_double=${ac_cv_sizeof_double=8}
ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=8}
-ac_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+ac_cv_sizeof_ptrdiff_t=${ac_cv_sizeof_ptrdiff_t=4}
ac_cv_sizeof_unsigned_short=${ac_cv_sizeof_unsigned_short=2}
ac_cv_sizeof_unsigned=${ac_cv_sizeof_unsigned=4}
ac_cv_sizeof_unsigned_int=${ac_cv_sizeof_unsigned_int=4}
diff --git a/meta/site/armeb-linux b/meta/site/armeb-linux
index 6521ff584a..3e57e1843c 100644
--- a/meta/site/armeb-linux
+++ b/meta/site/armeb-linux
@@ -12,7 +12,7 @@ ac_cv_sizeof_void_p=${ac_cv_sizeof_void_p=4}
ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=8}
ac_cv_sizeof_char_p=${ac_cv_sizeof_char_p=4}
ac_cv_sizeof_unsigned=${ac_cv_sizeof_unsigned=4}
-ac_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+ac_cv_sizeof_ptrdiff_t=${ac_cv_sizeof_ptrdiff_t=4}
ac_cv_uchar=${ac_cv_uchar=no}
ac_cv_uint=${ac_cv_uint=yes}
diff --git a/meta/site/common-glibc b/meta/site/common-glibc
index 0a665658fb..958699b143 100644
--- a/meta/site/common-glibc
+++ b/meta/site/common-glibc
@@ -12,7 +12,6 @@ ac_cv_func_getgrgid_r=${ac_cv_func_getgrgid_r=yes}
ac_cv_func_getpwuid_r=${ac_cv_func_getpwuid_r=yes}
ac_cv_func_posix_getpwuid_r=${ac_cv_func_posix_getpwuid_r=yes}
ac_cv_func_posix_getgrgid_r=${ac_cv_func_posix_getgrgid_r=yes}
-ac_cv_type_uid_t={ac_cv_type_uid_t=yes}
ac_cv_func_getaddrinfo=${ac_cv_func_getaddrinfo=yes}
ac_cv_func_strtod=${ac_cv_func_strtod=yes}
@@ -36,19 +35,6 @@ gl_cv_func_working_mkstemp=${gl_cv_func_working_mkstemp=yes}
gl_cv_func_working_mktime=${gl_cv_func_working_mktime=yes}
gl_cv_func_working_utimes=${gl_cv_func_working_utimes=yes}
-# glib
-glib_cv_strlcpy=${glib_cv_strlcpy=no}
-ac_cv_func_printf_unix98=${ac_cv_func_printf_unix98=yes}
-ac_cv_func_snprintf_c99=${ac_cv_func_snprintf_c99=yes}
-ac_cv_func_vsnprintf_c99=${ac_cv_func_vsnprintf_c99=yes}
-glib_cv_compliant_posix_memalign=${glib_cv_compliant_posix_memalign=1}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-# glib-1.x requires this and pkgconfig-native / pkgconfig-nativesdk use
-# that to avoid a dependency loop.
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_va_copy=${glib_cv_va_copy=yes}
-glib_cv_have_qsort_r=${glib_cv_have_qsort_r=yes}
-
#dbus-glib
ac_cv_have_abstract_sockets=${ac_cv_have_abstract_sockets=yes}
diff --git a/meta/site/common-musl b/meta/site/common-musl
index 86aed4a38c..0fd0413819 100644
--- a/meta/site/common-musl
+++ b/meta/site/common-musl
@@ -15,15 +15,8 @@ ac_cv_func_posix_getgrgid_r=${ac_cv_func_posix_getgrgid_r=yes}
ac_cv_func_getaddrinfo=${ac_cv_func_getaddrinfo=yes}
ac_cv_func_mmap_fixed_mapped=${ac_cv_func_mmap_fixed_mapped=yes}
ac_cv_func_strtod=${ac_cv_func_strtod=yes}
-
-# glib
-glib_cv_strlcpy=${glib_cv_strlcpy=no}
-ac_cv_func_printf_unix98=${ac_cv_func_printf_unix98=yes}
-ac_cv_func_snprintf_c99=${ac_cv_func_snprintf_c99=yes}
-ac_cv_func_vsnprintf_c99=${ac_cv_func_vsnprintf_c99=yes}
-glib_cv_compliant_posix_memalign=${glib_cv_compliant_posix_memalign=1}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_have_qsort_r=${glib_cv_have_qsort_r=yes}
+# off_t is always 64bit on musl systems
+ac_cv_sys_file_offset_bits=${ac_cv_sys_file_offset_bits=64}
#dbus-glib
ac_cv_have_abstract_sockets=${ac_cv_have_abstract_sockets=yes}
diff --git a/meta/site/ix86-common b/meta/site/ix86-common
index d98ae5265b..eae5670d93 100644
--- a/meta/site/ix86-common
+++ b/meta/site/ix86-common
@@ -69,28 +69,6 @@ ettercap_cv_type_socklen_t=${ettercap_cv_type_socklen_t=yes}
# gettext
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
-# glib
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=yes}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=4
-ac_cv_alignof_unsigned_long=4
-
# guile
ac_cv_uchar=${ac_cv_uchar=no}
ac_cv_uint=${ac_cv_uint=yes}
@@ -192,13 +170,3 @@ ac_cv_lib_c_signalfd=${ac_cv_lib_c_signalfd=yes}
nfsd_cv_broken_setfsuid=${nfsd_cv_broken_setfsuid=0}
nfsd_cv_func_statfs=${nfsd_cv_func_statfs=statfs2_bsize}
nfsd_cv_bsd_signals=${nfsd_cv_bsd_signals=yes}
-
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=4
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=4
-ac_cv_alignof_double=4
diff --git a/meta/site/loongarch64-linux b/meta/site/loongarch64-linux
new file mode 100644
index 0000000000..384d3bd877
--- /dev/null
+++ b/meta/site/loongarch64-linux
@@ -0,0 +1,7 @@
+# general
+ac_cv_alignof_guint32=4
+ac_cv_alignof_guint64=8
+ac_cv_alignof_unsigned_long=8
+
+# startup-notification
+lf_cv_sane_realloc=${lf_cv_sane_realloc=yes}
diff --git a/meta/site/microblaze-linux b/meta/site/microblaze-linux
index c6a40c6bec..61dcd43e8c 100644
--- a/meta/site/microblaze-linux
+++ b/meta/site/microblaze-linux
@@ -26,29 +26,6 @@ ac_cv_uint=${ac_cv_uint=yes}
ac_cv_ulong=${ac_cv_ulong=yes}
ac_cv_ushort=${ac_cv_ushort=yes}
-# glib
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_copy=${glib_cv_va_copy=yes}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-
-# glib-2.0
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-
# startup-notification
lf_cv_sane_realloc=yes
diff --git a/meta/site/mips-common b/meta/site/mips-common
index 892817b108..55dc50beb1 100644
--- a/meta/site/mips-common
+++ b/meta/site/mips-common
@@ -18,22 +18,5 @@ screen_cv_sys_terminfo_used=${screen_cv_sys_terminfo_used=yes}
# apache
ac_cv_sizeof_ssize_t=${ac_cv_sizeof_ssize_t=4}
-# glib
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
-
-
# slrn
slrn_cv_va_val_copy=${slrn_cv_va_val_copy=yes}
-
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=4
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-ac_cv_alignof_double=8
diff --git a/meta/site/mips-linux b/meta/site/mips-linux
index b2f15eee18..91570ec309 100644
--- a/meta/site/mips-linux
+++ b/meta/site/mips-linux
@@ -15,24 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mips64-linux b/meta/site/mips64-linux
index e7c16405eb..7e05b94a09 100644
--- a/meta/site/mips64-linux
+++ b/meta/site/mips64-linux
@@ -15,28 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-ac_cv_sizeof_ssize_t=8
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mips64el-linux b/meta/site/mips64el-linux
index 406ffd66c5..1e1d7687c2 100644
--- a/meta/site/mips64el-linux
+++ b/meta/site/mips64el-linux
@@ -15,28 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-ac_cv_sizeof_ssize_t=8
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mipsel-linux b/meta/site/mipsel-linux
index e9cf193cec..a3077f2161 100644
--- a/meta/site/mipsel-linux
+++ b/meta/site/mipsel-linux
@@ -15,24 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mipsisa32r6-linux b/meta/site/mipsisa32r6-linux
index b2f15eee18..91570ec309 100644
--- a/meta/site/mipsisa32r6-linux
+++ b/meta/site/mipsisa32r6-linux
@@ -15,24 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mipsisa32r6el-linux b/meta/site/mipsisa32r6el-linux
index e9cf193cec..a3077f2161 100644
--- a/meta/site/mipsisa32r6el-linux
+++ b/meta/site/mipsisa32r6el-linux
@@ -15,24 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mipsisa64r6-linux b/meta/site/mipsisa64r6-linux
index e7c16405eb..7e05b94a09 100644
--- a/meta/site/mipsisa64r6-linux
+++ b/meta/site/mipsisa64r6-linux
@@ -15,28 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-ac_cv_sizeof_ssize_t=8
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/mipsisa64r6el-linux b/meta/site/mipsisa64r6el-linux
index 406ffd66c5..1e1d7687c2 100644
--- a/meta/site/mipsisa64r6el-linux
+++ b/meta/site/mipsisa64r6el-linux
@@ -15,28 +15,6 @@ ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_have_space_d_name_in_struct_dir
# fget
compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
-# glib
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-utils_cv_sys_open_max=${utils_cv_sys_open_max=1015}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-ac_cv_sizeof_ssize_t=8
-
# libpcap
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/nios2-linux b/meta/site/nios2-linux
index 656da9fde4..58ffcbc7e2 100644
--- a/meta/site/nios2-linux
+++ b/meta/site/nios2-linux
@@ -53,39 +53,6 @@ ac_cv_func_fnmatch_works=${ac_cv_func_fnmatch_works=yes}
# gettext
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
-# gio
-ac_cv_func_getgrgid_r=${ac_cv_func_getgrgid_r=yes}
-
-# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_copy=${glib_cv_va_copy=yes}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv___va_copy=${glib_cv___va_copy=yes}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-ac_cv_func_getpwuid_r=${ac_cv_func_getpwuid_r=yes}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-
-# glib-2.0
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-
#gstreamer
as_cv_unaligned_access=${as_cv_unaligned_access=no}
@@ -225,7 +192,7 @@ ac_cv_sizeof_long_p=${ac_cv_sizeof_long_p=4}
ac_cv_sizeof_float=${ac_cv_sizeof_float=4}
ac_cv_sizeof_double=${ac_cv_sizeof_double=8}
ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=8}
-ac_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+ac_cv_sizeof_ptrdiff_t=${ac_cv_sizeof_ptrdiff_t=4}
ac_cv_sizeof_unsigned_short=${ac_cv_sizeof_unsigned_short=2}
ac_cv_sizeof_unsigned=${ac_cv_sizeof_unsigned=4}
ac_cv_sizeof_unsigned_int=${ac_cv_sizeof_unsigned_int=4}
diff --git a/meta/site/powerpc-common b/meta/site/powerpc-common
deleted file mode 100644
index efa299c664..0000000000
--- a/meta/site/powerpc-common
+++ /dev/null
@@ -1,14 +0,0 @@
-# glib
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
-
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=4
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-ac_cv_alignof_double=8
diff --git a/meta/site/powerpc-linux b/meta/site/powerpc-linux
index a9f89cb5d4..0aa5732fc0 100644
--- a/meta/site/powerpc-linux
+++ b/meta/site/powerpc-linux
@@ -4,11 +4,6 @@ ac_cv_va_val_copy=${ac_cv_va_val_copy=yes}
# startup-notification
lf_cv_sane_realloc=yes
-# glib
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-
# lftp
lftp_cv_va_val_copy=${lftp_cv_va_val_copy=yes}
diff --git a/meta/site/powerpc32-linux b/meta/site/powerpc32-linux
index 501e5b605d..c2f870b432 100644
--- a/meta/site/powerpc32-linux
+++ b/meta/site/powerpc32-linux
@@ -97,21 +97,6 @@ php_cv_lib_cookie_io_functions_use_off64_t=${php_cv_lib_cookie_io_functions_use_
# gettext
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
-# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-
# ettercap
ettercap_cv_type_socklen_t=${ettercap_cv_type_socklen_t=yes}
@@ -121,17 +106,6 @@ acx_working_snprintf=${acx_working_snprintf=yes}
# D-BUS
ac_cv_func_posix_getpwnam_r=${ac_cv_func_posix_getpwnam_r=yes}
-# glib 2.0
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-
-glib_cv_strlcpy=${glib_cv_strlcpy=no}
-
# httppc
ac_cv_strerror_r_SUSv3=${ac_cv_strerror_r_SUSv3=no}
@@ -195,7 +169,6 @@ liberty_cv_uint64=${liberty_cv_uint64=uint64_t}
#samba
ac_cv_func_memcmp_working=${ac_cv_func_memcmp_working=yes}
-fu_cv_sys_stat_statvfs64=${fu_cv_sys_stat_statvfs64=yes}
samba_cv_HAVE_BROKEN_FCNTL64_LOCKS=${samba_cv_HAVE_BROKEN_FCNTL64_LOCKS=no}
samba_cv_HAVE_BROKEN_GETGROUPS=${samba_cv_HAVE_BROKEN_GETGROUPS=no}
samba_cv_HAVE_BROKEN_READDIR=${samba_cv_HAVE_BROKEN_READDIR=yes}
diff --git a/meta/site/riscv32-linux b/meta/site/riscv32-linux
index 97de628e22..2cc26f3b93 100644
--- a/meta/site/riscv32-linux
+++ b/meta/site/riscv32-linux
@@ -3,9 +3,5 @@ ac_cv_alignof_guint32=4
ac_cv_alignof_guint64=8
ac_cv_alignof_unsigned_long=4
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
# startup-notification
lf_cv_sane_realloc=${lf_cv_sane_realloc=yes}
diff --git a/meta/site/riscv64-linux b/meta/site/riscv64-linux
index 38bc5f044d..384d3bd877 100644
--- a/meta/site/riscv64-linux
+++ b/meta/site/riscv64-linux
@@ -3,9 +3,5 @@ ac_cv_alignof_guint32=4
ac_cv_alignof_guint64=8
ac_cv_alignof_unsigned_long=8
-# glib-2.0
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-
# startup-notification
lf_cv_sane_realloc=${lf_cv_sane_realloc=yes}
diff --git a/meta/site/sh-common b/meta/site/sh-common
index d269a17db9..720d454351 100644
--- a/meta/site/sh-common
+++ b/meta/site/sh-common
@@ -103,24 +103,6 @@ ac_cv_pread=${ac_cv_pread=no}
ac_cv_pwrite=${ac_cv_pwrite=no}
php_cv_lib_cookie_io_functions_use_off64_t=${php_cv_lib_cookie_io_functions_use_off64_t=yes}
-# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-
# ettercap
ettercap_cv_type_socklen_t=${ettercap_cv_type_socklen_t=yes}
@@ -130,21 +112,6 @@ acx_working_snprintf=${acx_working_snprintf=yes}
# D-BUS
ac_cv_func_posix_getpwnam_r=${ac_cv_func_posix_getpwnam_r=yes}
-# glib 2.0
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-
-glib_cv_strlcpy=${glib_cv_strlcpy=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
-
# httppc
ac_cv_strerror_r_SUSv3=${ac_cv_strerror_r_SUSv3=no}
diff --git a/meta/site/sparc-linux b/meta/site/sparc-linux
index 49d0d9a3d6..c87eaae5ec 100644
--- a/meta/site/sparc-linux
+++ b/meta/site/sparc-linux
@@ -38,11 +38,5 @@ am_cv_sys_posix_termios=${am_cv_sys_posix_termios=yes}
# gettext
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
-#glib
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-
#linux
ac_cv_linux_vers=${ac_cv_linux_vers=2}
diff --git a/meta/site/x32-linux b/meta/site/x32-linux
index 4b70422ed2..2e120480c3 100644
--- a/meta/site/x32-linux
+++ b/meta/site/x32-linux
@@ -1,8 +1,4 @@
# general
ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=16}
ac_cv_sizeof_dev_t=${ac_cv_sizeof_dev_t=8}
-ac_cv_sys_file_offset_bits=${ac_cv_sys_file_offset_bits=64}
ac_cv_alignof_double=8
-
-# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=32}
diff --git a/meta/site/x86_64-linux b/meta/site/x86_64-linux
index ea3176da93..b9ee141fd5 100644
--- a/meta/site/x86_64-linux
+++ b/meta/site/x86_64-linux
@@ -32,28 +32,6 @@ db_cv_align_t=${db_cv_align_t='unsigned long long'}
db_cv_fcntl_f_setfd=${db_cv_fcntl_f_setfd=yes}
db_cv_sprintf_count=${db_cv_sprintf_count=yes}
-# glib
-glib_cv_hasinline=${glib_cv_hasinline=yes}
-glib_cv_has__inline=${glib_cv_has__inline=yes}
-glib_cv_has__inline__=${glib_cv_has__inline__=yes}
-glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=yes}
-glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=40}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=8}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=8}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=8}
-glib_cv_stack_grows=${glib_cv_stack_grows=no}
-glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
-glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
-glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
-glib_cv_uscore=${glib_cv_uscore=no}
-glib_cv_va_val_copy=${glib_cv_va_val_copy=no}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=8
-
nano_cv_func_regexec_segv_emptystr=${nano_cv_func_regexec_segv_emptystr=no}
samba_cv_HAVE_VA_COPY=${samba_cv_HAVE_VA_COPY=yes}
screen_cv_sys_bcopy_overlap=${screen_cv_sys_bcopy_overlap=no}
@@ -105,13 +83,3 @@ ac_cv_func__restgpr_14_x=no
# cvs
cvs_cv_func_printf_ptr=${cvs_cv_func_printf_ptr=yes}
-
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=8
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-ac_cv_alignof_double=8
diff --git a/oe-init-build-env b/oe-init-build-env
index 38333ab858..82382f2707 100755
--- a/oe-init-build-env
+++ b/oe-init-build-env
@@ -47,6 +47,12 @@ export OEROOT
unset OEROOT
return 1
}
+
+# Generate an initial configuration for VSCode and the yocto-bitbake plugin.
+if command -v code > /dev/null && [ ! -d "$OEROOT/.vscode" ]; then
+ oe-setup-vscode "$OEROOT" "$BUILDDIR"
+fi
+
unset OEROOT
[ -z "$BUILDDIR" ] || cd "$BUILDDIR"
diff --git a/scripts/.oe-layers.json b/scripts/.oe-layers.json
new file mode 100644
index 0000000000..1b00a84b54
--- /dev/null
+++ b/scripts/.oe-layers.json
@@ -0,0 +1,7 @@
+{
+ "layers": [
+ "../meta-poky",
+ "../meta"
+ ],
+ "version": "1.0"
+}
diff --git a/scripts/autobuilder-worker-prereq-tests b/scripts/autobuilder-worker-prereq-tests
index 572227dccd..54fd3c1004 100755
--- a/scripts/autobuilder-worker-prereq-tests
+++ b/scripts/autobuilder-worker-prereq-tests
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# Script which can be run on new autobuilder workers to check all needed configuration is present.
# Designed to be run in a repo where bitbake/oe-core are already present.
#
diff --git a/scripts/bblock b/scripts/bblock
new file mode 100755
index 0000000000..0082059af8
--- /dev/null
+++ b/scripts/bblock
@@ -0,0 +1,184 @@
+#!/usr/bin/env python3
+# bblock
+# lock/unlock task to latest signature
+#
+# Copyright (c) 2023 BayLibre, SAS
+# Author: Julien Stepahn <jstephan@baylibre.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import sys
+import logging
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + "/lib"
+sys.path = sys.path + [lib_path]
+
+import scriptpath
+
+scriptpath.add_bitbake_lib_path()
+
+import bb.tinfoil
+import bb.msg
+
+import argparse_oe
+
+myname = os.path.basename(sys.argv[0])
+logger = bb.msg.logger_create(myname)
+
+
+def getTaskSignatures(tinfoil, pn, tasks):
+ tinfoil.set_event_mask(
+ [
+ "bb.event.GetTaskSignatureResult",
+ "logging.LogRecord",
+ "bb.command.CommandCompleted",
+ "bb.command.CommandFailed",
+ ]
+ )
+ ret = tinfoil.run_command("getTaskSignatures", pn, tasks)
+ if ret:
+ while True:
+ event = tinfoil.wait_event(1)
+ if event:
+ if isinstance(event, bb.command.CommandCompleted):
+ break
+ elif isinstance(event, bb.command.CommandFailed):
+ logger.error(str(event))
+ sys.exit(2)
+ elif isinstance(event, bb.event.GetTaskSignatureResult):
+ sig = event.sig
+ elif isinstance(event, logging.LogRecord):
+ logger.handle(event)
+ else:
+ logger.error("No result returned from getTaskSignatures command")
+ sys.exit(2)
+ return sig
+
+
+def parseRecipe(tinfoil, recipe):
+ try:
+ tinfoil.parse_recipes()
+ d = tinfoil.parse_recipe(recipe)
+ except Exception:
+ logger.error("Failed to get recipe info for: %s" % recipe)
+ sys.exit(1)
+ return d
+
+
+def bblockDump(lockfile):
+ try:
+ with open(lockfile, "r") as lockfile:
+ for line in lockfile:
+ print(line.strip())
+ except IOError:
+ return 1
+ return 0
+
+
+def bblockReset(lockfile, pns, package_archs, tasks):
+ if not pns:
+ logger.info("Unlocking all recipes")
+ try:
+ os.remove(lockfile)
+ except FileNotFoundError:
+ pass
+ else:
+ logger.info("Unlocking {pns}".format(pns=pns))
+ tmp_lockfile = lockfile + ".tmp"
+ with open(lockfile, "r") as infile, open(tmp_lockfile, "w") as outfile:
+ for line in infile:
+ if not (
+ any(element in line for element in pns)
+ and any(element in line for element in package_archs.split())
+ ):
+ outfile.write(line)
+ else:
+ if tasks and not any(element in line for element in tasks):
+ outfile.write(line)
+ os.remove(lockfile)
+ os.rename(tmp_lockfile, lockfile)
+
+
+def main():
+ parser = argparse_oe.ArgumentParser(description="Lock and unlock a recipe")
+ parser.add_argument("pn", nargs="*", help="Space separated list of recipe to lock")
+ parser.add_argument(
+ "-t",
+ "--tasks",
+ help="Comma separated list of tasks",
+ type=lambda s: [
+ task if task.startswith("do_") else "do_" + task for task in s.split(",")
+ ],
+ )
+ parser.add_argument(
+ "-r",
+ "--reset",
+ action="store_true",
+ help="Unlock pn recipes, or all recipes if pn is empty",
+ )
+ parser.add_argument(
+ "-d",
+ "--dump",
+ action="store_true",
+ help="Dump generated bblock.conf file",
+ )
+
+ global_args, unparsed_args = parser.parse_known_args()
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=True)
+
+ package_archs = tinfoil.config_data.getVar("PACKAGE_ARCHS")
+ builddir = tinfoil.config_data.getVar("TOPDIR")
+ lockfile = "{builddir}/conf/bblock.conf".format(builddir=builddir)
+
+ if global_args.dump:
+ bblockDump(lockfile)
+ return 0
+
+ if global_args.reset:
+ bblockReset(lockfile, global_args.pn, package_archs, global_args.tasks)
+ return 0
+
+ with open(lockfile, "a") as lockfile:
+ s = ""
+ if lockfile.tell() == 0:
+ s = "# Generated by bblock\n"
+ s += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "info"\n'
+ s += 'SIGGEN_LOCKEDSIGS_TYPES += "${PACKAGE_ARCHS}"\n'
+ s += "\n"
+
+ for pn in global_args.pn:
+ d = parseRecipe(tinfoil, pn)
+ package_arch = d.getVar("PACKAGE_ARCH")
+ siggen_locked_sigs_package_arch = d.getVar(
+ "SIGGEN_LOCKEDSIGS_{package_arch}".format(package_arch=package_arch)
+ )
+ sigs = getTaskSignatures(tinfoil, [pn], global_args.tasks)
+ for sig in sigs:
+ new_entry = "{pn}:{taskname}:{sig}".format(
+ pn=sig[0], taskname=sig[1], sig=sig[2]
+ )
+ if (
+ siggen_locked_sigs_package_arch
+ and not new_entry in siggen_locked_sigs_package_arch
+ ) or not siggen_locked_sigs_package_arch:
+ s += 'SIGGEN_LOCKEDSIGS_{package_arch} += "{new_entry}"\n'.format(
+ package_arch=package_arch, new_entry=new_entry
+ )
+ lockfile.write(s)
+ return 0
+
+
+if __name__ == "__main__":
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+
+ traceback.print_exc()
+ sys.exit(ret)
diff --git a/scripts/bitbake-prserv-tool b/scripts/bitbake-prserv-tool
index e55d98c72e..80028342b1 100755
--- a/scripts/bitbake-prserv-tool
+++ b/scripts/bitbake-prserv-tool
@@ -1,5 +1,7 @@
#!/usr/bin/env bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -15,8 +17,11 @@ help ()
clean_cache()
{
s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"`
+ # Stop any active memory resident server
+ bitbake -m
+ # Remove cache entries since we want to trigger a full reparse
if [ "x${s}" != "x" ]; then
- rm -rf ${s}
+ rm -f ${s}/bb_cache*.dat.*
fi
}
@@ -60,7 +65,7 @@ do_migrate_localcount ()
return 1
fi
- rm -rf $df
+ rm -f $df
clean_cache
echo "Exporting LOCALCOUNT to AUTOINCs..."
bitbake -R conf/migrate_localcount.conf -p
diff --git a/scripts/bitbake-whatchanged b/scripts/bitbake-whatchanged
deleted file mode 100755
index 6f4b268119..0000000000
--- a/scripts/bitbake-whatchanged
+++ /dev/null
@@ -1,320 +0,0 @@
-#!/usr/bin/env python3
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-
-# Copyright (c) 2013 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-
-import os
-import sys
-import getopt
-import shutil
-import re
-import warnings
-import subprocess
-import argparse
-
-scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
-lib_path = scripts_path + '/lib'
-sys.path = sys.path + [lib_path]
-
-import scriptpath
-
-# Figure out where is the bitbake/lib/bb since we need bb.siggen and bb.process
-bitbakepath = scriptpath.add_bitbake_lib_path()
-if not bitbakepath:
- sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
- sys.exit(1)
-scriptpath.add_oe_lib_path()
-import argparse_oe
-
-import bb.siggen
-import bb.process
-
-# Match the stamp's filename
-# group(1): PE_PV (may no PE)
-# group(2): PR
-# group(3): TASK
-# group(4): HASH
-stamp_re = re.compile("(?P<pv>.*)-(?P<pr>r\d+)\.(?P<task>do_\w+)\.(?P<hash>[^\.]*)")
-sigdata_re = re.compile(".*\.sigdata\..*")
-
-def gen_dict(stamps):
- """
- Generate the dict from the stamps dir.
- The output dict format is:
- {fake_f: {pn: PN, pv: PV, pr: PR, task: TASK, path: PATH}}
- Where:
- fake_f: pv + task + hash
- path: the path to the stamp file
- """
- # The member of the sub dict (A "path" will be appended below)
- sub_mem = ("pv", "pr", "task")
- d = {}
- for dirpath, _, files in os.walk(stamps):
- for f in files:
- # The "bitbake -S" would generate ".sigdata", but no "_setscene".
- fake_f = re.sub('_setscene.', '.', f)
- fake_f = re.sub('.sigdata', '', fake_f)
- subdict = {}
- tmp = stamp_re.match(fake_f)
- if tmp:
- for i in sub_mem:
- subdict[i] = tmp.group(i)
- if len(subdict) != 0:
- pn = os.path.basename(dirpath)
- subdict['pn'] = pn
- # The path will be used by os.stat() and bb.siggen
- subdict['path'] = dirpath + "/" + f
- fake_f = tmp.group('pv') + tmp.group('task') + tmp.group('hash')
- d[fake_f] = subdict
- return d
-
-# Re-construct the dict
-def recon_dict(dict_in):
- """
- The output dict format is:
- {pn_task: {pv: PV, pr: PR, path: PATH}}
- """
- dict_out = {}
- for k in dict_in.keys():
- subdict = {}
- # The key
- pn_task = "%s_%s" % (dict_in.get(k).get('pn'), dict_in.get(k).get('task'))
- # If more than one stamps are found, use the latest one.
- if pn_task in dict_out:
- full_path_pre = dict_out.get(pn_task).get('path')
- full_path_cur = dict_in.get(k).get('path')
- if os.stat(full_path_pre).st_mtime > os.stat(full_path_cur).st_mtime:
- continue
- subdict['pv'] = dict_in.get(k).get('pv')
- subdict['pr'] = dict_in.get(k).get('pr')
- subdict['path'] = dict_in.get(k).get('path')
- dict_out[pn_task] = subdict
-
- return dict_out
-
-def split_pntask(s):
- """
- Split the pn_task in to (pn, task) and return it
- """
- tmp = re.match("(.*)_(do_.*)", s)
- return (tmp.group(1), tmp.group(2))
-
-
-def print_added(d_new = None, d_old = None):
- """
- Print the newly added tasks
- """
- added = {}
- for k in list(d_new.keys()):
- if k not in d_old:
- # Add the new one to added dict, and remove it from
- # d_new, so the remaining ones are the changed ones
- added[k] = d_new.get(k)
- del(d_new[k])
-
- if not added:
- return 0
-
- # Format the output, the dict format is:
- # {pn: task1, task2 ...}
- added_format = {}
- counter = 0
- for k in added.keys():
- pn, task = split_pntask(k)
- if pn in added_format:
- # Append the value
- added_format[pn] = "%s %s" % (added_format.get(pn), task)
- else:
- added_format[pn] = task
- counter += 1
- print("=== Newly added tasks: (%s tasks)" % counter)
- for k in added_format.keys():
- print(" %s: %s" % (k, added_format.get(k)))
-
- return counter
-
-def print_vrchanged(d_new = None, d_old = None, vr = None):
- """
- Print the pv or pr changed tasks.
- The arg "vr" is "pv" or "pr"
- """
- pvchanged = {}
- counter = 0
- for k in list(d_new.keys()):
- if d_new.get(k).get(vr) != d_old.get(k).get(vr):
- counter += 1
- pn, task = split_pntask(k)
- if pn not in pvchanged:
- # Format the output, we only print pn (no task) since
- # all the tasks would be changed when pn or pr changed,
- # the dict format is:
- # {pn: pv/pr_old -> pv/pr_new}
- pvchanged[pn] = "%s -> %s" % (d_old.get(k).get(vr), d_new.get(k).get(vr))
- del(d_new[k])
-
- if not pvchanged:
- return 0
-
- print("\n=== %s changed: (%s tasks)" % (vr.upper(), counter))
- for k in pvchanged.keys():
- print(" %s: %s" % (k, pvchanged.get(k)))
-
- return counter
-
-def print_depchanged(d_new = None, d_old = None, verbose = False):
- """
- Print the dependency changes
- """
- depchanged = {}
- counter = 0
- for k in d_new.keys():
- counter += 1
- pn, task = split_pntask(k)
- if (verbose):
- full_path_old = d_old.get(k).get("path")
- full_path_new = d_new.get(k).get("path")
- # No counter since it is not ready here
- if sigdata_re.match(full_path_old) and sigdata_re.match(full_path_new):
- output = bb.siggen.compare_sigfiles(full_path_old, full_path_new)
- if output:
- print("\n=== The verbose changes of %s.%s:" % (pn, task))
- print('\n'.join(output))
- else:
- # Format the output, the format is:
- # {pn: task1, task2, ...}
- if pn in depchanged:
- depchanged[pn] = "%s %s" % (depchanged.get(pn), task)
- else:
- depchanged[pn] = task
-
- if len(depchanged) > 0:
- print("\n=== Dependencies changed: (%s tasks)" % counter)
- for k in depchanged.keys():
- print(" %s: %s" % (k, depchanged[k]))
-
- return counter
-
-
-def main():
- """
- Print what will be done between the current and last builds:
- 1) Run "STAMPS_DIR=<path> bitbake -S recipe" to re-generate the stamps
- 2) Figure out what are newly added and changed, can't figure out
- what are removed since we can't know the previous stamps
- clearly, for example, if there are several builds, we can't know
- which stamps the last build has used exactly.
- 3) Use bb.siggen.compare_sigfiles to diff the old and new stamps
- """
-
- parser = argparse_oe.ArgumentParser(usage = """%(prog)s [options] [package ...]
-print what will be done between the current and last builds, for example:
-
- $ bitbake core-image-sato
- # Edit the recipes
- $ bitbake-whatchanged core-image-sato
-
-The changes will be printed.
-
-Note:
- The amount of tasks is not accurate when the task is "do_build" since
- it usually depends on other tasks.
- The "nostamp" task is not included.
-"""
-)
- parser.add_argument("recipe", help="recipe to check")
- parser.add_argument("-v", "--verbose", help = "print the verbose changes", action = "store_true")
- args = parser.parse_args()
-
- # Get the STAMPS_DIR
- print("Figuring out the STAMPS_DIR ...")
- cmdline = "bitbake -e | sed -ne 's/^STAMPS_DIR=\"\(.*\)\"/\\1/p'"
- try:
- stampsdir, err = bb.process.run(cmdline)
- except:
- raise
- if not stampsdir:
- print("ERROR: No STAMPS_DIR found for '%s'" % args.recipe, file=sys.stderr)
- return 2
- stampsdir = stampsdir.rstrip("\n")
- if not os.path.isdir(stampsdir):
- print("ERROR: stamps directory \"%s\" not found!" % stampsdir, file=sys.stderr)
- return 2
-
- # The new stamps dir
- new_stampsdir = stampsdir + ".bbs"
- if os.path.exists(new_stampsdir):
- print("ERROR: %s already exists!" % new_stampsdir, file=sys.stderr)
- return 2
-
- try:
- # Generate the new stamps dir
- print("Generating the new stamps ... (need several minutes)")
- cmdline = "STAMPS_DIR=%s bitbake -S none %s" % (new_stampsdir, args.recipe)
- # FIXME
- # The "bitbake -S" may fail, not fatal error, the stamps will still
- # be generated, this might be a bug of "bitbake -S".
- try:
- bb.process.run(cmdline)
- except Exception as exc:
- print(exc)
-
- # The dict for the new and old stamps.
- old_dict = gen_dict(stampsdir)
- new_dict = gen_dict(new_stampsdir)
-
- # Remove the same one from both stamps.
- cnt_unchanged = 0
- for k in list(new_dict.keys()):
- if k in old_dict:
- cnt_unchanged += 1
- del(new_dict[k])
- del(old_dict[k])
-
- # Re-construct the dict to easily find out what is added or changed.
- # The dict format is:
- # {pn_task: {pv: PV, pr: PR, path: PATH}}
- new_recon = recon_dict(new_dict)
- old_recon = recon_dict(old_dict)
-
- del new_dict
- del old_dict
-
- # Figure out what are changed, the new_recon would be changed
- # by the print_xxx function.
- # Newly added
- cnt_added = print_added(new_recon, old_recon)
-
- # PV (including PE) and PR changed
- # Let the bb.siggen handle them if verbose
- cnt_rv = {}
- if not args.verbose:
- for i in ('pv', 'pr'):
- cnt_rv[i] = print_vrchanged(new_recon, old_recon, i)
-
- # Dependencies changed (use bitbake-diffsigs)
- cnt_dep = print_depchanged(new_recon, old_recon, args.verbose)
-
- total_changed = cnt_added + (cnt_rv.get('pv') or 0) + (cnt_rv.get('pr') or 0) + cnt_dep
-
- print("\n=== Summary: (%s changed, %s unchanged)" % (total_changed, cnt_unchanged))
- if args.verbose:
- print("Newly added: %s\nDependencies changed: %s\n" % \
- (cnt_added, cnt_dep))
- else:
- print("Newly added: %s\nPV changed: %s\nPR changed: %s\nDependencies changed: %s\n" % \
- (cnt_added, cnt_rv.get('pv') or 0, cnt_rv.get('pr') or 0, cnt_dep))
- except:
- print("ERROR occurred!")
- raise
- finally:
- # Remove the newly generated stamps dir
- if os.path.exists(new_stampsdir):
- print("Removing the newly generated stamps dir ...")
- shutil.rmtree(new_stampsdir)
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/scripts/buildstats-diff b/scripts/buildstats-diff
index 2f6498ab67..c9aa76a8fa 100755
--- a/scripts/buildstats-diff
+++ b/scripts/buildstats-diff
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
#
# Script for comparing buildstats from two different builds
#
diff --git a/scripts/buildstats-summary b/scripts/buildstats-summary
new file mode 100755
index 0000000000..b10c671b29
--- /dev/null
+++ b/scripts/buildstats-summary
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+#
+# Dump a summary of the specified buildstats to the terminal, filtering and
+# sorting by walltime.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import argparse
+import dataclasses
+import datetime
+import enum
+import os
+import pathlib
+import sys
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+sys.path.append(os.path.join(scripts_path, "lib"))
+import buildstats
+
+
+@dataclasses.dataclass
+class Task:
+ recipe: str
+ task: str
+ start: datetime.datetime
+ duration: datetime.timedelta
+
+
+class Sorting(enum.Enum):
+ start = 1
+ duration = 2
+
+ # argparse integration
+ def __str__(self) -> str:
+ return self.name
+
+ def __repr__(self) -> str:
+ return self.name
+
+ @staticmethod
+ def from_string(s: str):
+ try:
+ return Sorting[s]
+ except KeyError:
+ return s
+
+
+def read_buildstats(path: pathlib.Path) -> buildstats.BuildStats:
+ if not path.exists():
+ raise Exception(f"No such file or directory: {path}")
+ if path.is_file():
+ return buildstats.BuildStats.from_file_json(path)
+ if (path / "build_stats").is_file():
+ return buildstats.BuildStats.from_dir(path)
+ raise Exception(f"Cannot find buildstats in {path}")
+
+
+def dump_buildstats(args, bs: buildstats.BuildStats):
+ tasks = []
+ for recipe in bs.values():
+ for task, stats in recipe.tasks.items():
+ t = Task(
+ recipe.name,
+ task,
+ datetime.datetime.fromtimestamp(stats["start_time"]),
+ datetime.timedelta(seconds=int(stats.walltime)),
+ )
+ tasks.append(t)
+
+ tasks.sort(key=lambda t: getattr(t, args.sort.name))
+
+ minimum = datetime.timedelta(seconds=args.shortest)
+ highlight = datetime.timedelta(seconds=args.highlight)
+
+ for t in tasks:
+ if t.duration >= minimum:
+ line = f"{t.duration} {t.recipe}:{t.task}"
+ if args.highlight and t.duration >= highlight:
+ print(f"\033[1m{line}\033[0m")
+ else:
+ print(line)
+
+
+def main(argv=None) -> int:
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ )
+
+ parser.add_argument(
+ "buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path
+ )
+ parser.add_argument(
+ "--sort",
+ "-s",
+ type=Sorting.from_string,
+ choices=list(Sorting),
+ default=Sorting.start,
+ help="Sort tasks",
+ )
+ parser.add_argument(
+ "--shortest",
+ "-t",
+ type=int,
+ default=1,
+ metavar="SECS",
+ help="Hide tasks shorter than SECS seconds",
+ )
+ parser.add_argument(
+ "--highlight",
+ "-g",
+ type=int,
+ default=60,
+ metavar="SECS",
+ help="Highlight tasks longer than SECS seconds (0 disabled)",
+ )
+
+ args = parser.parse_args(argv)
+
+ bs = read_buildstats(args.buildstats)
+ dump_buildstats(args, bs)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/combo-layer b/scripts/combo-layer
index 045de65642..4a715914af 100755
--- a/scripts/combo-layer
+++ b/scripts/combo-layer
@@ -19,9 +19,8 @@ import tempfile
import configparser
import re
import copy
-import pipes
+import shlex
import shutil
-from collections import OrderedDict
from string import Template
from functools import reduce
@@ -192,6 +191,23 @@ def runcmd(cmd,destdir=None,printerr=True,out=None,env=None):
logger.debug("output: %s" % output.replace(chr(0), '\\0'))
return output
+def action_sync_revs(conf, args):
+ """
+ Update the last_revision config option for each repo with the latest
+ revision in the remote's branch. Useful if multiple people are using
+ combo-layer.
+ """
+ repos = get_repos(conf, args[1:])
+
+ for name in repos:
+ repo = conf.repos[name]
+ ldir = repo['local_repo_dir']
+ branch = repo.get('branch', "master")
+ runcmd("git fetch", ldir)
+ lastrev = runcmd('git rev-parse origin/%s' % branch, ldir).strip()
+ print("Updating %s to %s" % (name, lastrev))
+ conf.update(name, "last_revision", lastrev)
+
def action_init(conf, args):
"""
Clone component repositories
@@ -467,7 +483,7 @@ def check_repo_clean(repodir):
exit if repo is dirty
"""
output=runcmd("git status --porcelain", repodir)
- r = re.compile('\?\? patch-.*/')
+ r = re.compile(r'\?\? patch-.*/')
dirtyout = [item for item in output.splitlines() if not r.match(item)]
if dirtyout:
logger.error("git repo %s is dirty, please fix it first", repodir)
@@ -508,7 +524,7 @@ def check_patch(patchfile):
f.close()
if of:
of.close()
- bb.utils.rename(patchfile + '.tmp', patchfile)
+ os.rename(of.name, patchfile)
def drop_to_shell(workdir=None):
if not sys.stdin.isatty():
@@ -1259,7 +1275,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
target = os.path.join(wargs["destdir"], dest_dir)
if not os.path.isdir(target):
os.makedirs(target)
- quoted_target = pipes.quote(target)
+ quoted_target = shlex.quote(target)
# os.sysconf('SC_ARG_MAX') is lying: running a command with
# string length 629343 already failed with "Argument list too
# long" although SC_ARG_MAX = 2097152. "man execve" explains
@@ -1271,7 +1287,7 @@ def apply_commit(parent, rev, largs, wargs, dest_dir, file_filter=None):
unquoted_args = []
cmdsize = 100 + len(quoted_target)
while update:
- quoted_next = pipes.quote(update[0])
+ quoted_next = shlex.quote(update[0])
size_next = len(quoted_next) + len(dest_dir) + 1
logger.debug('cmdline length %d + %d < %d?' % (cmdsize, size_next, os.sysconf('SC_ARG_MAX')))
if cmdsize + size_next < max_cmdsize:
@@ -1302,6 +1318,7 @@ actions = {
"update": action_update,
"pull": action_pull,
"splitpatch": action_splitpatch,
+ "sync-revs": action_sync_revs,
}
def main():
@@ -1312,10 +1329,11 @@ def main():
Create and update a combination layer repository from multiple component repositories.
Action:
- init initialise the combo layer repo
- update [components] get patches from component repos and apply them to the combo repo
- pull [components] just pull component repos only
- splitpatch [commit] generate commit patch and split per component, default commit is HEAD""")
+ init initialise the combo layer repo
+ update [components] get patches from component repos and apply them to the combo repo
+ pull [components] just pull component repos only
+ sync-revs [components] update the config file's last_revision for each repository
+ splitpatch [commit] generate commit patch and split per component, default commit is HEAD""")
parser.add_option("-c", "--conf", help = "specify the config file (conf/combo-layer.conf is the default).",
action = "store", dest = "conffile", default = "conf/combo-layer.conf")
diff --git a/scripts/combo-layer-hook-default.sh b/scripts/combo-layer-hook-default.sh
index 11547a9826..fb9651b31f 100755
--- a/scripts/combo-layer-hook-default.sh
+++ b/scripts/combo-layer-hook-default.sh
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Hook to add source component/revision info to commit message
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
index e7bd129e9e..6672189c95 100755
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh
@@ -16,8 +16,8 @@
# Setup the defaults
DATFILE="bb-matrix.dat"
-XLABEL="BB_NUMBER_THREADS"
-YLABEL="PARALLEL_MAKE"
+XLABEL="BB\\\\_NUMBER\\\\_THREADS"
+YLABEL="PARALLEL\\\\_MAKE"
FIELD=3
DEF_TITLE="Elapsed Time (seconds)"
PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
index 090133600b..a9cdf082ab 100755
--- a/scripts/contrib/bbvars.py
+++ b/scripts/contrib/bbvars.py
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars):
def collect_documented_vars(docfiles):
''' Walk the docfiles and collect the documented variables '''
documented_vars = []
- prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-")
- var_prog = re.compile('<glossentry id=\'var-(.*)\'>')
+ prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
+ var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
for d in docfiles:
with open(d) as f:
documented_vars += var_prog.findall(f.read())
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles):
return documented_vars
def bbvar_doctag(var, docconf):
- prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var))
+ prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
if docconf == "":
return "?"
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
index 4d41a4c475..c69acb4095 100755
--- a/scripts/contrib/convert-overrides.py
+++ b/scripts/contrib/convert-overrides.py
@@ -22,66 +22,78 @@ import sys
import tempfile
import shutil
import mimetypes
+import argparse
-if len(sys.argv) < 2:
- print("Please specify a directory to run the conversion script against.")
- sys.exit(1)
+parser = argparse.ArgumentParser(description="Convert override syntax")
+parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
+parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
+parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
+parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
+parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
+parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
+parser.add_argument("path", nargs="+", help="Paths to convert")
+
+args = parser.parse_args()
# List of strings to treat as overrides
-vars = ["append", "prepend", "remove"]
-vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
-vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"]
-vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
-vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
-vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
-vars = vars + ["tune-", "pn-", "forcevariable"]
-vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
-vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
-vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
-vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
-vars = vars + ["linux-gnueabi", "eabi"]
-vars = vars + ["virtclass-multilib", "virtclass-mcextend"]
+vars = args.override
+vars += ["append", "prepend", "remove"]
+vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
+vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
+vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
+vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
+vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
+vars += ["tune-", "pn-", "forcevariable"]
+vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
+vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
+vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
+vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
+vars += ["linux-gnueabi", "eabi"]
+vars += ["virtclass-multilib", "virtclass-mcextend"]
# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
# Handles issues with arc matching arch.
-shortvars = ["arc", "mips", "mipsel", "sh4"]
+shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
# Variables which take packagenames as an override
packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
"pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
"PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
"pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
- "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"]
+ "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
# Expressions to skip if encountered, these are not overrides
-skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
-skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
-skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
-skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
-skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
-skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
-skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
-skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
-skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
-
-imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"]
-packagevars = packagevars + imagevars
+skips = args.skip
+skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
+skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
+skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
+skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
+skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
+skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
+skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
+skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
+skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
+
+imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
+packagevars += imagevars
+
+skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
vars_re = {}
for exp in vars:
- vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
+ vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
shortvars_re = {}
for exp in shortvars:
- shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3")
+ shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
package_re = {}
for exp in packagevars:
- package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
+ package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
# Other substitutions to make
subs = {
- 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")',
+ 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
"val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
"f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
"d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
@@ -124,21 +136,20 @@ def processfile(fn):
ourname = os.path.basename(sys.argv[0])
ourversion = "0.9.3"
-if os.path.isfile(sys.argv[1]):
- processfile(sys.argv[1])
- sys.exit(0)
-
-for targetdir in sys.argv[1:]:
- print("processing directory '%s'" % targetdir)
- for root, dirs, files in os.walk(targetdir):
- for name in files:
- if name == ourname:
- continue
- fn = os.path.join(root, name)
- if os.path.islink(fn):
- continue
- if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
- continue
- processfile(fn)
+for p in args.path:
+ if os.path.isfile(p):
+ processfile(p)
+ else:
+ print("processing directory '%s'" % p)
+ for root, dirs, files in os.walk(p):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
+ continue
+ processfile(fn)
print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
index 7f2ad112a6..70eee8ebea 100755
--- a/scripts/contrib/ddimage
+++ b/scripts/contrib/ddimage
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
index ad6070c369..82c84baa1d 100755
--- a/scripts/contrib/dialog-power-control
+++ b/scripts/contrib/dialog-power-control
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Simple script to show a manual power prompt for when you want to use
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index 36f7f3287c..7197a2fcea 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Perform an audit of which packages provide documentation and which
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
index 3c07a73a4e..4d65a99258 100755
--- a/scripts/contrib/image-manifest
+++ b/scripts/contrib/image-manifest
@@ -392,7 +392,7 @@ def export_manifest_info(args):
for key in rd.getVarFlags('PACKAGECONFIG').keys():
if key == 'doc':
continue
- rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True)
+ rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
if config['patches'] == 'yes':
patches = oe.recipeutils.get_recipe_patches(rd)
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index dc417b4c55..bceae06561 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -1,8 +1,19 @@
#! /usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
+import argparse
+import collections
+import json
+import os
+import os.path
+import pathlib
+import re
+import subprocess
+
# TODO
# - option to just list all broken files
# - test suite
@@ -33,14 +44,12 @@ def blame_patch(patch):
From a patch filename, return a list of "commit summary (author name <author
email>)" strings representing the history.
"""
- import subprocess
return subprocess.check_output(("git", "log",
"--follow", "--find-renames", "--diff-filter=A",
"--format=%s (%aN <%aE>)",
"--", patch)).decode("utf-8").splitlines()
-def patchreview(path, patches):
- import re, os.path
+def patchreview(patches):
# General pattern: start of line, optional whitespace, tag with optional
# hyphen or spaces, maybe a colon, some whitespace, then the value, all case
@@ -54,11 +63,10 @@ def patchreview(path, patches):
for patch in patches:
- fullpath = os.path.join(path, patch)
result = Result()
- results[fullpath] = result
+ results[patch] = result
- content = open(fullpath, encoding='ascii', errors='ignore').read()
+ content = open(patch, encoding='ascii', errors='ignore').read()
# Find the Signed-off-by tag
match = sob_re.search(content)
@@ -191,29 +199,56 @@ Patches in Pending state: %s""" % (total_patches,
def histogram(results):
from toolz import recipes, dicttoolz
import math
+
counts = recipes.countby(lambda r: r.upstream_status, results.values())
bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
for k in bars:
print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
+def find_layers(candidate):
+ # candidate can either be the path to a layer directly (eg meta-intel), or a
+ # repository that contains other layers (meta-arm). We can determine what by
+ # looking for a conf/layer.conf file. If that file exists then it's a layer,
+ # otherwise its a repository of layers and we can assume they're called
+ # meta-*.
+
+ if (candidate / "conf" / "layer.conf").exists():
+ return [candidate.absolute()]
+ else:
+ return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
+
+# TODO these don't actually handle dynamic-layers/
+
+def gather_patches(layers):
+ patches = []
+ for directory in layers:
+ filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
+ patches += [os.path.join(directory, f) for f in filenames]
+ return patches
+
+def count_recipes(layers):
+ count = 0
+ for directory in layers:
+ output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
+ count += len(output.splitlines())
+ return count
if __name__ == "__main__":
- import argparse, subprocess, os
-
args = argparse.ArgumentParser(description="Patch Review Tool")
args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
args.add_argument("-j", "--json", help="update JSON")
- args.add_argument("directory", help="directory to scan")
+ args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
args = args.parse_args()
- patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split()
- results = patchreview(args.directory, patches)
+ layers = find_layers(args.directory)
+ print(f"Found layers {' '.join((d.name for d in layers))}")
+ patches = gather_patches(layers)
+ results = patchreview(patches)
analyse(results, want_blame=args.blame, verbose=args.verbose)
if args.json:
- import json, os.path, collections
if os.path.isfile(args.json):
data = json.load(open(args.json))
else:
@@ -221,7 +256,11 @@ if __name__ == "__main__":
row = collections.Counter()
row["total"] = len(results)
- row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip()
+ row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
+ row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
+ row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
+ row['recipe_count'] = count_recipes(layers)
+
for r in results.values():
if r.upstream_status in status_values:
row[r.upstream_status] += 1
@@ -231,7 +270,7 @@ if __name__ == "__main__":
row['malformed-sob'] += 1
data.append(row)
- json.dump(data, open(args.json, "w"))
+ json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
if args.histogram:
print()
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
index 478e8b0d03..a2879d2336 100755
--- a/scripts/contrib/test_build_time_worker.sh
+++ b/scripts/contrib/test_build_time_worker.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This is an example script to be used in conjunction with test_build_time.sh
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
index 7bffa78e23..a90b5010bc 100755
--- a/scripts/contrib/verify-homepage.py
+++ b/scripts/contrib/verify-homepage.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script can be used to verify HOMEPAGE values for all recipes in
diff --git a/scripts/cp-noerror b/scripts/cp-noerror
index ab617c5d35..13a098eee0 100755
--- a/scripts/cp-noerror
+++ b/scripts/cp-noerror
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation,
diff --git a/scripts/create-pull-request b/scripts/create-pull-request
index 8eefcf63a5..885105fab3 100755
--- a/scripts/create-pull-request
+++ b/scripts/create-pull-request
@@ -128,7 +128,7 @@ PROTO_RE="[a-z][a-z+]*://"
GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)"
REMOTE_URL=${REMOTE_URL%.git}
REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#")
-REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#git://\4/\5#")
+REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#")
if [ -z "$BRANCH" ]; then
BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2)
@@ -149,13 +149,10 @@ fi
WEB_URL=""
case "$REMOTE_URL" in
*git.yoctoproject.org*)
- WEB_URL="http://git.yoctoproject.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
- ;;
- *git.pokylinux.org*)
- WEB_URL="http://git.pokylinux.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
+ WEB_URL="https://git.yoctoproject.org/$REMOTE_REPO/log/?h=$BRANCH"
;;
*git.openembedded.org*)
- WEB_URL="http://cgit.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
+ WEB_URL="https://git.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
;;
*github.com*)
WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH"
diff --git a/scripts/devtool b/scripts/devtool
index af4811b922..60ea3e8298 100755
--- a/scripts/devtool
+++ b/scripts/devtool
@@ -104,6 +104,7 @@ def read_workspace():
for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')):
with open(fn, 'r') as f:
pnvalues = {}
+ pn = None
for line in f:
res = externalsrc_re.match(line.rstrip())
if res:
@@ -123,6 +124,9 @@ def read_workspace():
elif line.startswith('# srctreebase: '):
pnvalues['srctreebase'] = line.split(':', 1)[1].strip()
if pnvalues:
+ if not pn:
+ raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
+ "Maybe still using old syntax?" % config.workspace_path)
if not pnvalues.get('srctreebase', None):
pnvalues['srctreebase'] = pnvalues['srctree']
logger.debug('Found recipe %s' % pnvalues)
@@ -133,17 +137,27 @@ def create_workspace(args, config, basepath, workspace):
workspacedir = os.path.abspath(args.layerpath)
else:
workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
- _create_workspace(workspacedir, config, basepath)
+ layerseries = None
+ if args.layerseries:
+ layerseries = args.layerseries
+ _create_workspace(workspacedir, config, basepath, layerseries)
if not args.create_only:
_enable_workspace_layer(workspacedir, config, basepath)
-def _create_workspace(workspacedir, config, basepath):
+def _create_workspace(workspacedir, config, basepath, layerseries=None):
import bb
confdir = os.path.join(workspacedir, 'conf')
if os.path.exists(os.path.join(confdir, 'layer.conf')):
logger.info('Specified workspace already set up, leaving as-is')
else:
+ if not layerseries:
+ tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
+ try:
+ layerseries = tinfoil.config_data.getVar('LAYERSERIES_CORENAMES')
+ finally:
+ tinfoil.shutdown()
+
# Add a config file
bb.utils.mkdirhier(confdir)
with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
@@ -155,7 +169,7 @@ def _create_workspace(workspacedir, config, basepath):
f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
- f.write('LAYERSERIES_COMPAT_workspacelayer = "${LAYERSERIES_COMPAT_core}"\n')
+ f.write('LAYERSERIES_COMPAT_workspacelayer = "%s"\n' % layerseries)
# Add a README file
with open(os.path.join(workspacedir, 'README'), 'w') as f:
f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
@@ -285,8 +299,9 @@ def main():
return 2
# Search BBPATH first to allow layers to override plugins in scripts_path
- for path in global_args.bbpath.split(':') + [scripts_path]:
- pluginpath = os.path.join(path, 'lib', 'devtool')
+ pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]]
+ context.pluginpaths = pluginpaths
+ for pluginpath in pluginpaths:
scriptutils.load_plugins(logger, plugins, pluginpath)
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
@@ -305,6 +320,7 @@ def main():
description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
group='advanced')
parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
+ parser_create_workspace.add_argument('--layerseries', help='Layer series the workspace should be set to be compatible with')
parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
@@ -314,10 +330,10 @@ def main():
args = parser.parse_args(unparsed_args, namespace=global_args)
- if not getattr(args, 'no_workspace', False):
- read_workspace()
-
try:
+ if not getattr(args, 'no_workspace', False):
+ read_workspace()
+
ret = args.func(args, config, basepath, workspace)
except DevtoolError as err:
if str(err):
diff --git a/scripts/esdk-tools/devtool b/scripts/esdk-tools/devtool
new file mode 120000
index 0000000000..176a01ca68
--- /dev/null
+++ b/scripts/esdk-tools/devtool
@@ -0,0 +1 @@
+../devtool \ No newline at end of file
diff --git a/scripts/esdk-tools/oe-find-native-sysroot b/scripts/esdk-tools/oe-find-native-sysroot
new file mode 120000
index 0000000000..d3493f3310
--- /dev/null
+++ b/scripts/esdk-tools/oe-find-native-sysroot
@@ -0,0 +1 @@
+../oe-find-native-sysroot \ No newline at end of file
diff --git a/scripts/esdk-tools/recipetool b/scripts/esdk-tools/recipetool
new file mode 120000
index 0000000000..60a95dd936
--- /dev/null
+++ b/scripts/esdk-tools/recipetool
@@ -0,0 +1 @@
+../recipetool \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu b/scripts/esdk-tools/runqemu
new file mode 120000
index 0000000000..ae7e7ad7c2
--- /dev/null
+++ b/scripts/esdk-tools/runqemu
@@ -0,0 +1 @@
+../runqemu \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-addptable2image b/scripts/esdk-tools/runqemu-addptable2image
new file mode 120000
index 0000000000..afcd00e79d
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-addptable2image
@@ -0,0 +1 @@
+../runqemu-addptable2image \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-export-rootfs b/scripts/esdk-tools/runqemu-export-rootfs
new file mode 120000
index 0000000000..a26fcf6110
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-export-rootfs
@@ -0,0 +1 @@
+../runqemu-export-rootfs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-extract-sdk b/scripts/esdk-tools/runqemu-extract-sdk
new file mode 120000
index 0000000000..cc858aaad5
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-extract-sdk
@@ -0,0 +1 @@
+../runqemu-extract-sdk \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-gen-tapdevs b/scripts/esdk-tools/runqemu-gen-tapdevs
new file mode 120000
index 0000000000..dbdf79134c
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-gen-tapdevs
@@ -0,0 +1 @@
+../runqemu-gen-tapdevs \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifdown b/scripts/esdk-tools/runqemu-ifdown
new file mode 120000
index 0000000000..0097693ca3
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifdown
@@ -0,0 +1 @@
+../runqemu-ifdown \ No newline at end of file
diff --git a/scripts/esdk-tools/runqemu-ifup b/scripts/esdk-tools/runqemu-ifup
new file mode 120000
index 0000000000..41026d2c0a
--- /dev/null
+++ b/scripts/esdk-tools/runqemu-ifup
@@ -0,0 +1 @@
+../runqemu-ifup \ No newline at end of file
diff --git a/scripts/esdk-tools/wic b/scripts/esdk-tools/wic
new file mode 120000
index 0000000000..a9d908aa25
--- /dev/null
+++ b/scripts/esdk-tools/wic
@@ -0,0 +1 @@
+../wic \ No newline at end of file
diff --git a/scripts/gen-lockedsig-cache b/scripts/gen-lockedsig-cache
index cc674f9c1b..023015ec41 100755
--- a/scripts/gen-lockedsig-cache
+++ b/scripts/gen-lockedsig-cache
@@ -1,5 +1,8 @@
#!/usr/bin/env python3
#
+#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/git b/scripts/git
index 644055e540..689adbf9dd 100755
--- a/scripts/git
+++ b/scripts/git
@@ -1,5 +1,9 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
# Wrapper around 'git' that doesn't think we are root
import os
diff --git a/scripts/install-buildtools b/scripts/install-buildtools
index 10c3d043de..2218f3ffac 100755
--- a/scripts/install-buildtools
+++ b/scripts/install-buildtools
@@ -57,8 +57,8 @@ logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto'
-DEFAULT_RELEASE = 'yocto-3.4'
-DEFAULT_INSTALLER_VERSION = '3.4'
+DEFAULT_RELEASE = 'yocto-4.1'
+DEFAULT_INSTALLER_VERSION = '4.1'
DEFAULT_BUILDDATE = '202110XX'
# Python version sanity check
@@ -154,6 +154,8 @@ def main():
group.add_argument('--without-extended-buildtools', action='store_false',
dest='with_extended_buildtools',
help='disable extended buildtools (traditional buildtools tarball)')
+ group.add_argument('--make-only', action='store_true',
+ help='only install make tarball')
group = parser.add_mutually_exclusive_group()
group.add_argument('-c', '--check', help='enable checksum validation',
default=True, action='store_true')
@@ -170,6 +172,9 @@ def main():
args = parser.parse_args()
+ if args.make_only:
+ args.with_extended_buildtools = False
+
if args.debug:
logger.setLevel(logging.DEBUG)
elif args.quiet:
@@ -197,7 +202,10 @@ def main():
if not args.build_date:
logger.error("Milestone installers require --build-date")
else:
- if args.with_extended_buildtools:
+ if args.make_only:
+ filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % (
+ arch, args.installer_version, args.build_date)
+ elif args.with_extended_buildtools:
filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
arch, args.installer_version, args.build_date)
else:
@@ -207,6 +215,8 @@ def main():
buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
# regular release SDK
else:
+ if args.make_only:
+ filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
if args.with_extended_buildtools:
filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
else:
@@ -303,7 +313,9 @@ def main():
if args.with_extended_buildtools and not m:
logger.info("Ignoring --with-extended-buildtools as filename "
"does not contain 'extended'")
- if args.with_extended_buildtools and m:
+ if args.make_only:
+ tool = 'make'
+ elif args.with_extended_buildtools and m:
tool = 'gcc'
else:
tool = 'tar'
diff --git a/scripts/lib/argparse_oe.py b/scripts/lib/argparse_oe.py
index 94a4ac5011..176b732bbc 100644
--- a/scripts/lib/argparse_oe.py
+++ b/scripts/lib/argparse_oe.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/lib/buildstats.py b/scripts/lib/buildstats.py
index c69b5bf4d7..6db60d5bcf 100644
--- a/scripts/lib/buildstats.py
+++ b/scripts/lib/buildstats.py
@@ -8,7 +8,7 @@ import json
import logging
import os
import re
-from collections import namedtuple,OrderedDict
+from collections import namedtuple
from statistics import mean
@@ -79,8 +79,8 @@ class BSTask(dict):
return self['rusage']['ru_oublock']
@classmethod
- def from_file(cls, buildstat_file):
- """Read buildstat text file"""
+ def from_file(cls, buildstat_file, fallback_end=0):
+ """Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
bs_task = cls()
log.debug("Reading task buildstats from %s", buildstat_file)
end_time = None
@@ -108,7 +108,10 @@ class BSTask(dict):
bs_task[ru_type][ru_key] = val
elif key == 'Status':
bs_task['status'] = val
- if end_time is not None and start_time is not None:
+ # If the task didn't finish, fill in the fallback end time if specified
+ if start_time and not end_time and fallback_end:
+ end_time = fallback_end
+ if start_time and end_time:
bs_task['elapsed_time'] = end_time - start_time
else:
raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
@@ -226,25 +229,44 @@ class BuildStats(dict):
epoch = match.group('epoch')
return name, epoch, version, revision
+ @staticmethod
+ def parse_top_build_stats(path):
+ """
+ Parse the top-level build_stats file for build-wide start and duration.
+ """
+ start = elapsed = 0
+ with open(path) as fobj:
+ for line in fobj.readlines():
+ key, val = line.split(':', 1)
+ val = val.strip()
+ if key == 'Build Started':
+ start = float(val)
+ elif key == "Elapsed time":
+ elapsed = float(val.split()[0])
+ return start, elapsed
+
@classmethod
def from_dir(cls, path):
"""Load buildstats from a buildstats directory"""
- if not os.path.isfile(os.path.join(path, 'build_stats')):
+ top_stats = os.path.join(path, 'build_stats')
+ if not os.path.isfile(top_stats):
raise BSError("{} does not look like a buildstats directory".format(path))
log.debug("Reading buildstats directory %s", path)
-
buildstats = cls()
+ build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
+ build_end = build_started + build_elapsed
+
subdirs = os.listdir(path)
for dirname in subdirs:
recipe_dir = os.path.join(path, dirname)
- if not os.path.isdir(recipe_dir):
+ if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
continue
name, epoch, version, revision = cls.split_nevr(dirname)
bsrecipe = BSRecipe(name, epoch, version, revision)
for task in os.listdir(recipe_dir):
bsrecipe.tasks[task] = BSTask.from_file(
- os.path.join(recipe_dir, task))
+ os.path.join(recipe_dir, task), build_end)
if name in buildstats:
raise BSError("Cannot handle multiple versions of the same "
"package ({})".format(name))
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index aa946f3036..62ecdfe390 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -16,6 +16,7 @@ class LayerType(Enum):
BSP = 0
DISTRO = 1
SOFTWARE = 2
+ CORE = 3
ERROR_NO_LAYER_CONF = 98
ERROR_BSP_DISTRO = 99
@@ -43,7 +44,7 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
ldata.setVar('LAYERDIR', layer_path)
try:
- ldata = bb.parse.handle(lconf, ldata, include=True)
+ ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
except:
raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
ldata.expandVarref('LAYERDIR')
@@ -106,7 +107,13 @@ def _detect_layer(layer_path):
if distros:
is_distro = True
- if is_bsp and is_distro:
+ layer['collections'] = _get_layer_collections(layer['path'])
+
+ if layer_name == "meta" and "core" in layer['collections']:
+ layer['type'] = LayerType.CORE
+ layer['conf']['machines'] = machines
+ layer['conf']['distros'] = distros
+ elif is_bsp and is_distro:
layer['type'] = LayerType.ERROR_BSP_DISTRO
elif is_bsp:
layer['type'] = LayerType.BSP
@@ -117,8 +124,6 @@ def _detect_layer(layer_path):
else:
layer['type'] = LayerType.SOFTWARE
- layer['collections'] = _get_layer_collections(layer['path'])
-
return layer
def detect_layers(layer_directories, no_auto):
@@ -302,7 +307,7 @@ def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
cmd += 'bitbake '
if failsafe:
cmd += '-k '
- cmd += '-S none world'
+ cmd += '-S lockedsigs world'
sigs_file = os.path.join(builddir, 'locked-sigs.inc')
if os.path.exists(sigs_file):
os.unlink(sigs_file)
@@ -319,8 +324,8 @@ def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
else:
raise
- sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$")
- tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
+ sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
+ tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
current_tune = None
with open(sigs_file, 'r') as f:
for line in f.readlines():
diff --git a/scripts/lib/checklayer/cases/bsp.py b/scripts/lib/checklayer/cases/bsp.py
index a80a5844da..b76163fb56 100644
--- a/scripts/lib/checklayer/cases/bsp.py
+++ b/scripts/lib/checklayer/cases/bsp.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
class BSPCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
- if self.tc.layer['type'] != LayerType.BSP:
+ if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
self.tc.layer['name'])
diff --git a/scripts/lib/checklayer/cases/common.py b/scripts/lib/checklayer/cases/common.py
index 491a13953c..97b16f78c8 100644
--- a/scripts/lib/checklayer/cases/common.py
+++ b/scripts/lib/checklayer/cases/common.py
@@ -12,6 +12,9 @@ from checklayer.case import OECheckLayerTestCase
class CommonCheckLayer(OECheckLayerTestCase):
def test_readme(self):
+ if self.tc.layer['type'] == LayerType.CORE:
+ raise unittest.SkipTest("Core layer's README is top level")
+
# The top-level README file may have a suffix (like README.rst or README.txt).
readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
self.assertTrue(len(readme_files) > 0,
@@ -69,6 +72,21 @@ class CommonCheckLayer(OECheckLayerTestCase):
self.tc.layer['name'])
self.fail('\n'.join(msg))
+ @unittest.expectedFailure
+ def test_patches_upstream_status(self):
+ import sys
+ sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
+ import oe.qa
+ patches = []
+ for dirpath, dirs, files in os.walk(self.tc.layer['path']):
+ for filename in files:
+ if filename.endswith(".patch"):
+ ppath = os.path.join(dirpath, filename)
+ if oe.qa.check_upstream_status(ppath):
+ patches.append(ppath)
+ self.assertEqual(len(patches), 0 , \
+ msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
+
def test_signatures(self):
if self.tc.layer['type'] == LayerType.SOFTWARE and \
not self.tc.test_software_layer_signatures:
diff --git a/scripts/lib/checklayer/cases/distro.py b/scripts/lib/checklayer/cases/distro.py
index f0bee5493c..a35332451c 100644
--- a/scripts/lib/checklayer/cases/distro.py
+++ b/scripts/lib/checklayer/cases/distro.py
@@ -11,7 +11,7 @@ from checklayer.case import OECheckLayerTestCase
class DistroCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
- if self.tc.layer['type'] != LayerType.DISTRO:
+ if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
self.tc.layer['name'])
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 702db669de..6133c1c5b4 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs):
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
# Grab the command and check it actually exists
fakerootcmd = d.getVar('FAKEROOTCMD')
+ fakerootenv = d.getVar('FAKEROOTENV')
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
+
+def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
if not os.path.exists(fakerootcmd):
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
return 2
# Set up the appropriate environment
newenv = dict(os.environ)
- fakerootenv = d.getVar('FAKEROOTENV')
for varvalue in fakerootenv.split():
if '=' in varvalue:
splitval = varvalue.split('=', 1)
@@ -233,6 +236,28 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
+ # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
+ # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
+ stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
+ found = False
+ for line in stdout.splitlines():
+ if line.endswith("/"):
+ new_dir = line.split()[1]
+ for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
+ if ".git" in dirs + files:
+ (stdout, _) = bb.process.run('git remote', cwd=root)
+ remote = stdout.splitlines()[0]
+ (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
+ remote_url = stdout.splitlines()[0]
+ logger.error(os.path.relpath(os.path.join(root, ".."), root))
+ bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
+ found = True
+ if found:
+ oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
+ found = False
+ if os.path.exists(os.path.join(repodir, '.gitmodules')):
+ bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
+
def recipe_to_append(recipefile, config, wildcard=False):
"""
Convert a recipe file to a bbappend file path within the workspace.
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index 6fe02fff2a..1cd4831d2b 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -13,7 +13,7 @@ import shutil
import errno
import sys
import tempfile
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
+from devtool import DevtoolError
from devtool import build_image
logger = logging.getLogger('devtool')
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
index e14a587417..b5ca8f2c2f 100644
--- a/scripts/lib/devtool/deploy.py
+++ b/scripts/lib/devtool/deploy.py
@@ -16,7 +16,7 @@ import bb.utils
import argparse_oe
import oe.types
-from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
@@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
return '\n'.join(lines)
-
-
def deploy(args, config, basepath, workspace):
"""Entry point for the devtool 'deploy' subcommand"""
- import math
- import oe.recipeutils
- import oe.package
+ import oe.utils
check_workspace_recipe(workspace, args.recipename, checksrc=False)
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ try:
+ rd = tinfoil.parse_recipe(args.recipename)
+ except Exception as e:
+ raise DevtoolError('Exception parsing recipe %s: %s' %
+ (args.recipename, e))
+
+ srcdir = rd.getVar('D')
+ workdir = rd.getVar('WORKDIR')
+ path = rd.getVar('PATH')
+ strip_cmd = rd.getVar('STRIP')
+ libdir = rd.getVar('libdir')
+ base_libdir = rd.getVar('base_libdir')
+ max_process = oe.utils.get_bb_number_threads(rd)
+ fakerootcmd = rd.getVar('FAKEROOTCMD')
+ fakerootenv = rd.getVar('FAKEROOTENV')
+ finally:
+ tinfoil.shutdown()
+
+ return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
+
+def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
+ import math
+ import oe.package
+
try:
host, destdir = args.target.split(':')
except ValueError:
@@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace):
if not destdir.endswith('/'):
destdir += '/'
- tinfoil = setup_tinfoil(basepath=basepath)
- try:
- try:
- rd = tinfoil.parse_recipe(args.recipename)
- except Exception as e:
- raise DevtoolError('Exception parsing recipe %s: %s' %
- (args.recipename, e))
- recipe_outdir = rd.getVar('D')
- if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
- raise DevtoolError('No files to deploy - have you built the %s '
- 'recipe? If so, the install step has not installed '
- 'any files.' % args.recipename)
-
- if args.strip and not args.dry_run:
- # Fakeroot copy to new destination
- srcdir = recipe_outdir
- recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped')
- if os.path.isdir(recipe_outdir):
- exec_fakeroot(rd, "rm -rf %s" % recipe_outdir, shell=True)
- exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
- os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
- oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
- rd.getVar('base_libdir'), rd)
-
- filelist = []
- inodes = set({})
- ftotalsize = 0
- for root, _, files in os.walk(recipe_outdir):
- for fn in files:
- fstat = os.lstat(os.path.join(root, fn))
- # Get the size in kiB (since we'll be comparing it to the output of du -k)
- # MUST use lstat() here not stat() or getfilesize() since we don't want to
- # dereference symlinks
- if fstat.st_ino in inodes:
- fsize = 0
- else:
- fsize = int(math.ceil(float(fstat.st_size)/1024))
- inodes.add(fstat.st_ino)
- ftotalsize += fsize
- # The path as it would appear on the target
- fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
- filelist.append((fpath, fsize))
-
- if args.dry_run:
- print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
- for item, _ in filelist:
- print(' %s' % item)
- return 0
-
- extraoptions = ''
- if args.no_host_check:
- extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
- if not args.show_status:
- extraoptions += ' -q'
-
- scp_sshexec = ''
- ssh_sshexec = 'ssh'
- if args.ssh_exec:
- scp_sshexec = "-S %s" % args.ssh_exec
- ssh_sshexec = args.ssh_exec
- scp_port = ''
- ssh_port = ''
- if args.port:
- scp_port = "-P %s" % args.port
- ssh_port = "-p %s" % args.port
-
- if args.key:
- extraoptions += ' -i %s' % args.key
-
- # In order to delete previously deployed files and have the manifest file on
- # the target, we write out a shell script and then copy it to the target
- # so we can then run it (piping tar output to it).
- # (We cannot use scp here, because it doesn't preserve symlinks.)
- tmpdir = tempfile.mkdtemp(prefix='devtool')
- try:
- tmpscript = '/tmp/devtool_deploy.sh'
- tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
- shellscript = _prepare_remote_script(deploy=True,
- verbose=args.show_status,
- nopreserve=args.no_preserve,
- nocheckspace=args.no_check_space)
- # Write out the script to a file
- with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
- f.write(shellscript)
- # Write out the file list
- with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
- f.write('%d\n' % ftotalsize)
- for fpath, fsize in filelist:
- f.write('%s %d\n' % (fpath, fsize))
- # Copy them to the target
- ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
- if ret != 0:
- raise DevtoolError('Failed to copy script to %s - rerun with -s to '
- 'get a complete error message' % args.target)
- finally:
- shutil.rmtree(tmpdir)
+ recipe_outdir = srcdir
+ if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
+ raise DevtoolError('No files to deploy - have you built the %s '
+ 'recipe? If so, the install step has not installed '
+ 'any files.' % args.recipename)
+
+ if args.strip and not args.dry_run:
+ # Fakeroot copy to new destination
+ srcdir = recipe_outdir
+ recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
+ if os.path.isdir(recipe_outdir):
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
+ os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
+ oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
+
+ filelist = []
+ inodes = set({})
+ ftotalsize = 0
+ for root, _, files in os.walk(recipe_outdir):
+ for fn in files:
+ fstat = os.lstat(os.path.join(root, fn))
+ # Get the size in kiB (since we'll be comparing it to the output of du -k)
+ # MUST use lstat() here not stat() or getfilesize() since we don't want to
+ # dereference symlinks
+ if fstat.st_ino in inodes:
+ fsize = 0
+ else:
+ fsize = int(math.ceil(float(fstat.st_size)/1024))
+ inodes.add(fstat.st_ino)
+ ftotalsize += fsize
+ # The path as it would appear on the target
+ fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
+ filelist.append((fpath, fsize))
+
+ if args.dry_run:
+ print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
+ for item, _ in filelist:
+ print(' %s' % item)
+ return 0
- # Now run the script
- ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
- if ret != 0:
- raise DevtoolError('Deploy failed - rerun with -s to get a complete '
- 'error message')
+ extraoptions = ''
+ if args.no_host_check:
+ extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ if not args.show_status:
+ extraoptions += ' -q'
- logger.info('Successfully deployed %s' % recipe_outdir)
+ scp_sshexec = ''
+ ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ scp_sshexec = "-S %s" % args.ssh_exec
+ ssh_sshexec = args.ssh_exec
+ scp_port = ''
+ ssh_port = ''
+ if args.port:
+ scp_port = "-P %s" % args.port
+ ssh_port = "-p %s" % args.port
+
+ if args.key:
+ extraoptions += ' -i %s' % args.key
- files_list = []
- for root, _, files in os.walk(recipe_outdir):
- for filename in files:
- filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
- files_list.append(os.path.join(destdir, filename))
+ # In order to delete previously deployed files and have the manifest file on
+ # the target, we write out a shell script and then copy it to the target
+ # so we can then run it (piping tar output to it).
+ # (We cannot use scp here, because it doesn't preserve symlinks.)
+ tmpdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ tmpscript = '/tmp/devtool_deploy.sh'
+ tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
+ shellscript = _prepare_remote_script(deploy=True,
+ verbose=args.show_status,
+ nopreserve=args.no_preserve,
+ nocheckspace=args.no_check_space)
+ # Write out the script to a file
+ with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
+ f.write(shellscript)
+ # Write out the file list
+ with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
+ f.write('%d\n' % ftotalsize)
+ for fpath, fsize in filelist:
+ f.write('%s %d\n' % (fpath, fsize))
+ # Copy them to the target
+ ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ if ret != 0:
+ raise DevtoolError('Failed to copy script to %s - rerun with -s to '
+ 'get a complete error message' % args.target)
finally:
- tinfoil.shutdown()
+ shutil.rmtree(tmpdir)
+
+ # Now run the script
+ ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
+ if ret != 0:
+ raise DevtoolError('Deploy failed - rerun with -s to get a complete '
+ 'error message')
+
+ logger.info('Successfully deployed %s' % recipe_outdir)
+
+ files_list = []
+ for root, _, files in os.walk(recipe_outdir):
+ for filename in files:
+ filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
+ files_list.append(os.path.join(destdir, filename))
return 0
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
new file mode 100644
index 0000000000..19c2f61c5f
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/__init__.py
@@ -0,0 +1,282 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
+
+import errno
+import json
+import logging
+import os
+import stat
+from enum import Enum, auto
+from devtool import DevtoolError
+from bb.utils import mkdirhier
+
+logger = logging.getLogger('devtool')
+
+
+class BuildTool(Enum):
+ UNDEFINED = auto()
+ CMAKE = auto()
+ MESON = auto()
+
+ @property
+ def is_c_ccp(self):
+ if self is BuildTool.CMAKE:
+ return True
+ if self is BuildTool.MESON:
+ return True
+ return False
+
+
+class GdbCrossConfig:
+ """Base class defining the GDB configuration generator interface
+
+ Generate a GDB configuration for a binary on the target device.
+ Only one instance per binary is allowed. This allows to assign unique port
+ numbers for all gdbserver instances.
+ """
+ _gdbserver_port_next = 1234
+ _binaries = []
+
+ def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
+ self.image_recipe = image_recipe
+ self.modified_recipe = modified_recipe
+ self.gdb_cross = modified_recipe.gdb_cross
+ self.binary = binary
+ if binary in GdbCrossConfig._binaries:
+ raise DevtoolError(
+ "gdbserver config for binary %s is already generated" % binary)
+ GdbCrossConfig._binaries.append(binary)
+ self.script_dir = modified_recipe.ide_sdk_scripts_dir
+ self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
+ self.gdbserver_multi = gdbserver_multi
+ self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
+ self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
+ GdbCrossConfig._gdbserver_port_next += 1
+ self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
+ # gdbserver start script
+ gdbserver_script_file = 'gdbserver_' + self.id_pretty
+ if self.gdbserver_multi:
+ gdbserver_script_file += "_m"
+ self.gdbserver_script = os.path.join(
+ self.script_dir, gdbserver_script_file)
+ # gdbinit file
+ self.gdbinit = os.path.join(
+ self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
+ # gdb start script
+ self.gdb_script = os.path.join(
+ self.script_dir, 'gdb_' + self.id_pretty)
+
+ def _gen_gdbserver_start_script(self):
+ """Generate a shell command starting the gdbserver on the remote device via ssh
+
+ GDB supports two modes:
+ multi: gdbserver remains running over several debug sessions
+ once: gdbserver terminates after the debugged process terminates
+ """
+ cmd_lines = ['#!/bin/sh']
+ if self.gdbserver_multi:
+ temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
+ gdbserver_cmd_start = temp_dir
+ gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
+ gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
+ gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
+ self.gdb_cross.gdbserver_path, self.gdbserver_port)
+ gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
+
+ gdbserver_cmd_stop = temp_dir
+ gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
+ gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
+
+ gdbserver_cmd_l = []
+ gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
+ gdbserver_cmd_l.append(' shift')
+ gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
+ gdbserver_cmd_l.append('else')
+ gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
+ gdbserver_cmd_l.append('fi')
+ gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
+ else:
+ gdbserver_cmd_start = "%s --once :%s %s" % (
+ self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
+ gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
+ cmd_lines.append(gdbserver_cmd)
+ GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
+
+ def _gen_gdbinit_config(self):
+ """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
+ gdbinit_lines = ['# This file is generated by devtool ide-sdk']
+ if self.gdbserver_multi:
+ target_help = '# gdbserver --multi :%d' % self.gdbserver_port
+ remote_cmd = 'target extended-remote'
+ else:
+ target_help = '# gdbserver :%d %s' % (
+ self.gdbserver_port, self.binary)
+ remote_cmd = 'target remote'
+ gdbinit_lines.append('# On the remote target:')
+ gdbinit_lines.append(target_help)
+ gdbinit_lines.append('# On the build machine:')
+ gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
+ gdbinit_lines.append(
+ '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
+
+ gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
+ gdbinit_lines.append('set substitute-path "/usr/include" "' +
+ os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
+ # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
+ gdbinit_lines.append('set debuginfod enabled off')
+ if self.image_recipe.rootfs_dbg:
+ gdbinit_lines.append(
+ 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
+ # First: Search for sources of this recipe in the workspace folder
+ if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
+ gdbinit_lines.append('set substitute-path "%s" "%s"' %
+ (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must contain the recipe name PN.")
+ # Second: Search for sources of other recipes in the rootfs-dbg
+ if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
+ gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
+ self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
+ else:
+ logger.warning(
+ "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
+ gdbinit_lines.append(
+ '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
+ gdbinit_lines.append('set remote exec-file ' + self.binary)
+ gdbinit_lines.append(
+ 'run ' + os.path.join(self.modified_recipe.d, self.binary))
+
+ GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
+
+ def _gen_gdb_start_script(self):
+ """Generate a script starting GDB with the corresponding gdbinit configuration."""
+ cmd_lines = ['#!/bin/sh']
+ cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
+ cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
+ self.gdbinit + ' "$@"')
+ GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
+
+ def initialize(self):
+ self._gen_gdbserver_start_script()
+ self._gen_gdbinit_config()
+ self._gen_gdb_start_script()
+
+ @staticmethod
+ def write_file(script_file, cmd_lines, executable=False):
+ script_dir = os.path.dirname(script_file)
+ mkdirhier(script_dir)
+ with open(script_file, 'w') as script_f:
+ script_f.write(os.linesep.join(cmd_lines))
+ script_f.write(os.linesep)
+ if executable:
+ st = os.stat(script_file)
+ os.chmod(script_file, st.st_mode | stat.S_IEXEC)
+ logger.info("Created: %s" % script_file)
+
+
+class IdeBase:
+ """Base class defining the interface for IDE plugins"""
+
+ def __init__(self):
+ self.ide_name = 'undefined'
+ self.gdb_cross_configs = []
+
+ @classmethod
+ def ide_plugin_priority(cls):
+ """Used to find the default ide handler if --ide is not passed"""
+ return 10
+
+ def setup_shared_sysroots(self, shared_env):
+ logger.warn("Shared sysroot mode is not supported for IDE %s" %
+ self.ide_name)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ logger.warn("Modified recipe mode is not supported for IDE %s" %
+ self.ide_name)
+
+ def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
+ binaries = modified_recipe.find_installed_binaries()
+ for binary in binaries:
+ gdb_cross_config = gdb_cross_config_class(
+ image_recipe, modified_recipe, binary)
+ gdb_cross_config.initialize()
+ self.gdb_cross_configs.append(gdb_cross_config)
+
+ @staticmethod
+ def gen_oe_scrtips_sym_link(modified_recipe):
+ # create a sym-link from sources to the scripts directory
+ if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
+ IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
+ os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
+
+ @staticmethod
+ def update_json_file(json_dir, json_file, update_dict):
+ """Update a json file
+
+ By default it uses the dict.update function. If this is not sutiable
+ the update function might be passed via update_func parameter.
+ """
+ json_path = os.path.join(json_dir, json_file)
+ logger.info("Updating IDE config file: %s (%s)" %
+ (json_file, json_path))
+ if not os.path.exists(json_dir):
+ os.makedirs(json_dir)
+ try:
+ with open(json_path) as f:
+ orig_dict = json.load(f)
+ except json.decoder.JSONDecodeError:
+ logger.info(
+ "Decoding %s failed. Probably because of comments in the json file" % json_path)
+ orig_dict = {}
+ except FileNotFoundError:
+ orig_dict = {}
+ orig_dict.update(update_dict)
+ with open(json_path, 'w') as f:
+ json.dump(orig_dict, f, indent=4)
+
+ @staticmethod
+ def symlink_force(tgt, dst):
+ try:
+ os.symlink(tgt, dst)
+ except OSError as err:
+ if err.errno == errno.EEXIST:
+ if os.readlink(dst) != tgt:
+ os.remove(dst)
+ os.symlink(tgt, dst)
+ else:
+ raise err
+
+
+def get_devtool_deploy_opts(args):
+ """Filter args for devtool deploy-target args"""
+ if not args.target:
+ return None
+ devtool_deploy_opts = [args.target]
+ if args.no_host_check:
+ devtool_deploy_opts += ["-c"]
+ if args.show_status:
+ devtool_deploy_opts += ["-s"]
+ if args.no_preserve:
+ devtool_deploy_opts += ["-p"]
+ if args.no_check_space:
+ devtool_deploy_opts += ["--no-check-space"]
+ if args.ssh_exec:
+ devtool_deploy_opts += ["-e", args.ssh.exec]
+ if args.port:
+ devtool_deploy_opts += ["-P", args.port]
+ if args.key:
+ devtool_deploy_opts += ["-I", args.key]
+ if args.strip is False:
+ devtool_deploy_opts += ["--no-strip"]
+ return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
new file mode 100644
index 0000000000..a62b93224e
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -0,0 +1,463 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
+
+import json
+import logging
+import os
+import shutil
+from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
+
+logger = logging.getLogger('devtool')
+
+
+class GdbCrossConfigVSCode(GdbCrossConfig):
+ def __init__(self, image_recipe, modified_recipe, binary):
+ super().__init__(image_recipe, modified_recipe, binary, False)
+
+ def initialize(self):
+ self._gen_gdbserver_start_script()
+
+
+class IdeVSCode(IdeBase):
+ """Manage IDE configurations for VSCode
+
+ Modified recipe mode:
+ - cmake: use the cmake-preset generated by devtool ide-sdk
+ - meson: meson is called via a wrapper script generated by devtool ide-sdk
+
+ Shared sysroot mode:
+ In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
+ A workspace cannot be created because there is no recipe that defines how a workspace could
+ be set up.
+ - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
+ The cmake-kit uses the environment script and the tool-chain file
+ generated by meta-ide-support.
+ - meson: Meson needs manual workspace configuration.
+ """
+
+ @classmethod
+ def ide_plugin_priority(cls):
+ """If --ide is not passed this is the default plugin"""
+ if shutil.which('code'):
+ return 100
+ return 0
+
+ def setup_shared_sysroots(self, shared_env):
+ """Expose the toolchain of the shared sysroots SDK"""
+ datadir = shared_env.ide_support.datadir
+ deploy_dir_image = shared_env.ide_support.deploy_dir_image
+ real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
+ standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
+ vscode_ws_path = os.path.join(
+ os.environ['HOME'], '.local', 'share', 'CMakeTools')
+ cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
+ oecmake_generator = "Ninja"
+ env_script = os.path.join(
+ deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
+
+ if not os.path.isdir(vscode_ws_path):
+ os.makedirs(vscode_ws_path)
+ cmake_kits_old = []
+ if os.path.exists(cmake_kits_path):
+ with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
+ cmake_kits_old = json.load(cmake_kits_file)
+ cmake_kits = cmake_kits_old.copy()
+
+ cmake_kit_new = {
+ "name": "OE " + real_multimach_target_sys,
+ "environmentSetupScript": env_script,
+ "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
+ "preferredGenerator": {
+ "name": oecmake_generator
+ }
+ }
+
+ def merge_kit(cmake_kits, cmake_kit_new):
+ i = 0
+ while i < len(cmake_kits):
+ if 'environmentSetupScript' in cmake_kits[i] and \
+ cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
+ cmake_kits[i] = cmake_kit_new
+ return
+ i += 1
+ cmake_kits.append(cmake_kit_new)
+ merge_kit(cmake_kits, cmake_kit_new)
+
+ if cmake_kits != cmake_kits_old:
+ logger.info("Updating: %s" % cmake_kits_path)
+ with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
+ json.dump(cmake_kits, cmake_kits_file, indent=4)
+ else:
+ logger.info("Already up to date: %s" % cmake_kits_path)
+
+ cmake_native = os.path.join(
+ shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
+ if os.path.isfile(cmake_native):
+ logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
+ else:
+ logger.error("Cannot find cmake native at: %s" % cmake_native)
+
+ def dot_code_dir(self, modified_recipe):
+ return os.path.join(modified_recipe.srctree, '.vscode')
+
+ def __vscode_settings_meson(self, settings_dict, modified_recipe):
+ if modified_recipe.build_tool is not BuildTool.MESON:
+ return
+ settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
+
+ confopts = modified_recipe.mesonopts.split()
+ confopts += modified_recipe.meson_cross_file.split()
+ confopts += modified_recipe.extra_oemeson.split()
+ settings_dict["mesonbuild.configureOptions"] = confopts
+ settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
+
+ def __vscode_settings_cmake(self, settings_dict, modified_recipe):
+ """Add cmake specific settings to settings.json.
+
+ Note: most settings are passed to the cmake preset.
+ """
+ if modified_recipe.build_tool is not BuildTool.CMAKE:
+ return
+ settings_dict["cmake.configureOnOpen"] = True
+ settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
+
+ def vscode_settings(self, modified_recipe, image_recipe):
+ files_excludes = {
+ "**/.git/**": True,
+ "**/oe-logs/**": True,
+ "**/oe-workdir/**": True,
+ "**/source-date-epoch/**": True
+ }
+ python_exclude = [
+ "**/.git/**",
+ "**/oe-logs/**",
+ "**/oe-workdir/**",
+ "**/source-date-epoch/**"
+ ]
+ files_readonly = {
+ modified_recipe.recipe_sysroot + '/**': True,
+ modified_recipe.recipe_sysroot_native + '/**': True,
+ }
+ if image_recipe.rootfs_dbg is not None:
+ files_readonly[image_recipe.rootfs_dbg + '/**'] = True
+ settings_dict = {
+ "files.watcherExclude": files_excludes,
+ "files.exclude": files_excludes,
+ "files.readonlyInclude": files_readonly,
+ "python.analysis.exclude": python_exclude
+ }
+ self.__vscode_settings_cmake(settings_dict, modified_recipe)
+ self.__vscode_settings_meson(settings_dict, modified_recipe)
+
+ settings_file = 'settings.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), settings_file, settings_dict)
+
+ def __vscode_extensions_cmake(self, modified_recipe, recommendations):
+ if modified_recipe.build_tool is not BuildTool.CMAKE:
+ return
+ recommendations += [
+ "twxs.cmake",
+ "ms-vscode.cmake-tools",
+ "ms-vscode.cpptools",
+ "ms-vscode.cpptools-extension-pack",
+ "ms-vscode.cpptools-themes"
+ ]
+
+ def __vscode_extensions_meson(self, modified_recipe, recommendations):
+ if modified_recipe.build_tool is not BuildTool.MESON:
+ return
+ recommendations += [
+ 'mesonbuild.mesonbuild',
+ "ms-vscode.cpptools",
+ "ms-vscode.cpptools-extension-pack",
+ "ms-vscode.cpptools-themes"
+ ]
+
+ def vscode_extensions(self, modified_recipe):
+ recommendations = []
+ self.__vscode_extensions_cmake(modified_recipe, recommendations)
+ self.__vscode_extensions_meson(modified_recipe, recommendations)
+ extensions_file = 'extensions.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
+
+ def vscode_c_cpp_properties(self, modified_recipe):
+ properties_dict = {
+ "name": modified_recipe.recipe_id_pretty,
+ }
+ if modified_recipe.build_tool is BuildTool.CMAKE:
+ properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
+ elif modified_recipe.build_tool is BuildTool.MESON:
+ properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
+ properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
+ else: # no C/C++ build
+ return
+
+ properties_dicts = {
+ "configurations": [
+ properties_dict
+ ],
+ "version": 4
+ }
+ prop_file = 'c_cpp_properties.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
+
+ def vscode_launch_bin_dbg(self, gdb_cross_config):
+ modified_recipe = gdb_cross_config.modified_recipe
+
+ launch_config = {
+ "name": gdb_cross_config.id_pretty,
+ "type": "cppdbg",
+ "request": "launch",
+ "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
+ "stopAtEntry": True,
+ "cwd": "${workspaceFolder}",
+ "environment": [],
+ "externalConsole": False,
+ "MIMode": "gdb",
+ "preLaunchTask": gdb_cross_config.id_pretty,
+ "miDebuggerPath": modified_recipe.gdb_cross.gdb,
+ "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
+ }
+
+ # Search for header files in recipe-sysroot.
+ src_file_map = {
+ "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
+ }
+ # First of all search for not stripped binaries in the image folder.
+ # These binaries are copied (and optionally stripped) by deploy-target
+ setup_commands = [
+ {
+ "description": "sysroot",
+ "text": "set sysroot " + modified_recipe.d
+ }
+ ]
+
+ if gdb_cross_config.image_recipe.rootfs_dbg:
+ launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
+ gdb_cross_config.image_recipe)
+ # First: Search for sources of this recipe in the workspace folder
+ if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
+ src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must contain the recipe name PN.")
+ # Second: Search for sources of other recipes in the rootfs-dbg
+ if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
+ src_file_map["/usr/src/debug"] = os.path.join(
+ gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
+ else:
+ logger.warning(
+ "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
+
+ launch_config['sourceFileMap'] = src_file_map
+ launch_config['setupCommands'] = setup_commands
+ return launch_config
+
+ def vscode_launch(self, modified_recipe):
+ """GDB Launch configuration for binaries (elf files)"""
+
+ configurations = []
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is modified_recipe:
+ configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
+ launch_dict = {
+ "version": "0.2.0",
+ "configurations": configurations
+ }
+ launch_file = 'launch.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), launch_file, launch_dict)
+
+ def vscode_tasks_cpp(self, args, modified_recipe):
+ run_install_deploy = modified_recipe.gen_install_deploy_script(args)
+ install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
+ tasks_dict = {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": install_task_name,
+ "type": "shell",
+ "command": run_install_deploy,
+ "problemMatcher": []
+ }
+ ]
+ }
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is not modified_recipe:
+ continue
+ tasks_dict['tasks'].append(
+ {
+ "label": gdb_cross_config.id_pretty,
+ "type": "shell",
+ "isBackground": True,
+ "dependsOn": [
+ install_task_name
+ ],
+ "command": gdb_cross_config.gdbserver_script,
+ "problemMatcher": [
+ {
+ "pattern": [
+ {
+ "regexp": ".",
+ "file": 1,
+ "location": 2,
+ "message": 3
+ }
+ ],
+ "background": {
+ "activeOnStart": True,
+ "beginsPattern": ".",
+ "endsPattern": ".",
+ }
+ }
+ ]
+ })
+ tasks_file = 'tasks.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
+
+ def vscode_tasks_fallback(self, args, modified_recipe):
+ oe_init_dir = modified_recipe.oe_init_dir
+ oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
+ dt_build = "devtool build "
+ dt_build_label = dt_build + modified_recipe.recipe_id_pretty
+ dt_build_cmd = dt_build + modified_recipe.bpn
+ clean_opt = " --clean"
+ dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
+ dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
+ dt_deploy = "devtool deploy-target "
+ dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
+ dt_deploy_cmd = dt_deploy + modified_recipe.bpn
+ dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
+ deploy_opts = ' '.join(get_devtool_deploy_opts(args))
+ tasks_dict = {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": dt_build_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s" % (oe_init, dt_build_cmd)
+ ],
+ "problemMatcher": []
+ },
+ {
+ "label": dt_deploy_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s %s" % (
+ oe_init, dt_deploy_cmd, deploy_opts)
+ ],
+ "problemMatcher": []
+ },
+ {
+ "label": dt_build_deploy_label,
+ "dependsOrder": "sequence",
+ "dependsOn": [
+ dt_build_label,
+ dt_deploy_label
+ ],
+ "problemMatcher": [],
+ "group": {
+ "kind": "build",
+ "isDefault": True
+ }
+ },
+ {
+ "label": dt_build_clean_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s" % (oe_init, dt_build_clean_cmd)
+ ],
+ "problemMatcher": []
+ }
+ ]
+ }
+ if modified_recipe.gdb_cross:
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is not modified_recipe:
+ continue
+ tasks_dict['tasks'].append(
+ {
+ "label": gdb_cross_config.id_pretty,
+ "type": "shell",
+ "isBackground": True,
+ "dependsOn": [
+ dt_build_deploy_label
+ ],
+ "command": gdb_cross_config.gdbserver_script,
+ "problemMatcher": [
+ {
+ "pattern": [
+ {
+ "regexp": ".",
+ "file": 1,
+ "location": 2,
+ "message": 3
+ }
+ ],
+ "background": {
+ "activeOnStart": True,
+ "beginsPattern": ".",
+ "endsPattern": ".",
+ }
+ }
+ ]
+ })
+ tasks_file = 'tasks.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
+
+ def vscode_tasks(self, args, modified_recipe):
+ if modified_recipe.build_tool.is_c_ccp:
+ self.vscode_tasks_cpp(args, modified_recipe)
+ else:
+ self.vscode_tasks_fallback(args, modified_recipe)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ self.vscode_settings(modified_recipe, image_recipe)
+ self.vscode_extensions(modified_recipe)
+ self.vscode_c_cpp_properties(modified_recipe)
+ if args.target:
+ self.initialize_gdb_cross_configs(
+ image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
+ self.vscode_launch(modified_recipe)
+ self.vscode_tasks(args, modified_recipe)
+
+
+def register_ide_plugin(ide_plugins):
+ ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
new file mode 100644
index 0000000000..f106c5a026
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_none.py
@@ -0,0 +1,53 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk generic IDE plugin"""
+
+import os
+import logging
+from devtool.ide_plugins import IdeBase, GdbCrossConfig
+
+logger = logging.getLogger('devtool')
+
+
+class IdeNone(IdeBase):
+ """Generate some generic helpers for other IDEs
+
+ Modified recipe mode:
+ Generate some helper scripts for remote debugging with GDB
+
+ Shared sysroot mode:
+ A wrapper for bitbake meta-ide-support and bitbake build-sysroots
+ """
+
+ def __init__(self):
+ super().__init__()
+
+ def setup_shared_sysroots(self, shared_env):
+ real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
+ deploy_dir_image = shared_env.ide_support.deploy_dir_image
+ env_script = os.path.join(
+ deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
+ logger.info(
+ "To use this SDK please source this: %s" % env_script)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ """generate some helper scripts and config files
+
+ - Execute the do_install task
+ - Execute devtool deploy-target
+ - Generate a gdbinit file per executable
+ - Generate the oe-scripts sym-link
+ """
+ script_path = modified_recipe.gen_install_deploy_script(args)
+ logger.info("Created: %s" % script_path)
+
+ self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
+
+ IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
+
+
+def register_ide_plugin(ide_plugins):
+ ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
new file mode 100755
index 0000000000..7807b322b3
--- /dev/null
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -0,0 +1,1070 @@
+# Development tool - ide-sdk command plugin
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk plugin"""
+
+import json
+import logging
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+from argparse import RawTextHelpFormatter
+from enum import Enum
+
+import scriptutils
+import bb
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
+from devtool.standard import get_real_srctree
+from devtool.ide_plugins import BuildTool
+
+
+logger = logging.getLogger('devtool')
+
+# dict of classes derived from IdeBase
+ide_plugins = {}
+
+
+class DevtoolIdeMode(Enum):
+ """Different modes are supported by the ide-sdk plugin.
+
+ The enum might be extended by more advanced modes in the future. Some ideas:
+ - auto: modified if all recipes are modified, shared if none of the recipes is modified.
+ - mixed: modified mode for modified recipes, shared mode for all other recipes.
+ """
+
+ modified = 'modified'
+ shared = 'shared'
+
+
+class TargetDevice:
+ """SSH remote login parameters"""
+
+ def __init__(self, args):
+ self.extraoptions = ''
+ if args.no_host_check:
+ self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ self.ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ self.ssh_sshexec = args.ssh_exec
+ self.ssh_port = ''
+ if args.port:
+ self.ssh_port = "-p %s" % args.port
+ if args.key:
+ self.extraoptions += ' -i %s' % args.key
+
+ self.target = args.target
+ target_sp = args.target.split('@')
+ if len(target_sp) == 1:
+ self.login = ""
+ self.host = target_sp[0]
+ elif len(target_sp) == 2:
+ self.login = target_sp[0]
+ self.host = target_sp[1]
+ else:
+ logger.error("Invalid target argument: %s" % args.target)
+
+
+class RecipeNative:
+ """Base class for calling bitbake to provide a -native recipe"""
+
+ def __init__(self, name, target_arch=None):
+ self.name = name
+ self.target_arch = target_arch
+ self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
+ self.staging_bindir_native = None
+ self.target_sys = None
+ self.__native_bin = None
+
+ def _initialize(self, config, workspace, tinfoil):
+ """Get the parsed recipe"""
+ recipe_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing %s recipe failed" % self.name)
+ self.staging_bindir_native = os.path.realpath(
+ recipe_d.getVar('STAGING_BINDIR_NATIVE'))
+ self.target_sys = recipe_d.getVar('TARGET_SYS')
+ return recipe_d
+
+ def initialize(self, config, workspace, tinfoil):
+ """Basic initialization that can be overridden by a derived class"""
+ self._initialize(config, workspace, tinfoil)
+
+ @property
+ def native_bin(self):
+ if not self.__native_bin:
+ raise DevtoolError("native binary name is not defined.")
+ return self.__native_bin
+
+
+class RecipeGdbCross(RecipeNative):
+ """Handle handle gdb-cross on the host and the gdbserver on the target device"""
+
+ def __init__(self, args, target_arch, target_device):
+ super().__init__('gdb-cross-' + target_arch, target_arch)
+ self.target_device = target_device
+ self.gdb = None
+ self.gdbserver_port_next = int(args.gdbserver_port_start)
+ self.config_db = {}
+
+ def __find_gdbserver(self, config, tinfoil):
+ """Absolute path of the gdbserver"""
+ recipe_d_gdb = parse_recipe(
+ config, tinfoil, 'gdb', appends=True, filter_workspace=False)
+ if not recipe_d_gdb:
+ raise DevtoolError("Parsing gdb recipe failed")
+ return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
+
+ def initialize(self, config, workspace, tinfoil):
+ super()._initialize(config, workspace, tinfoil)
+ gdb_bin = self.target_sys + '-gdb'
+ gdb_path = os.path.join(
+ self.staging_bindir_native, self.target_sys, gdb_bin)
+ self.gdb = gdb_path
+ self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
+
+ @property
+ def host(self):
+ return self.target_device.host
+
+
+class RecipeImage:
+ """Handle some image recipe related properties
+
+ Most workflows require firmware that runs on the target device.
+ This firmware must be consistent with the setup of the host system.
+ In particular, the debug symbols must be compatible. For this, the
+ rootfs must be created as part of the SDK.
+ """
+
+ def __init__(self, name):
+ self.combine_dbg_image = False
+ self.gdbserver_missing = False
+ self.name = name
+ self.rootfs = None
+ self.__rootfs_dbg = None
+ self.bootstrap_tasks = [self.name + ':do_build']
+
+ def initialize(self, config, tinfoil):
+ image_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not image_d:
+ raise DevtoolError(
+ "Parsing image recipe %s failed" % self.name)
+
+ self.combine_dbg_image = bb.data.inherits_class(
+ 'image-combined-dbg', image_d)
+
+ workdir = image_d.getVar('WORKDIR')
+ self.rootfs = os.path.join(workdir, 'rootfs')
+ if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
+ self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
+
+ self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
+ 'IMAGE_INSTALL')
+
+ @property
+ def debug_support(self):
+ return bool(self.rootfs_dbg)
+
+ @property
+ def rootfs_dbg(self):
+ if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
+ return self.__rootfs_dbg
+ return None
+
+
+class RecipeMetaIdeSupport:
+ """For the shared sysroots mode meta-ide-support is needed
+
+ For use cases where just a cross tool-chain is required but
+ no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
+ and bitbake build-sysroots. This also allows to expose the cross-toolchains
+ to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
+ """
+
+ def __init__(self):
+ self.bootstrap_tasks = ['meta-ide-support:do_build']
+ self.topdir = None
+ self.datadir = None
+ self.deploy_dir_image = None
+ self.build_sys = None
+ # From toolchain-scripts
+ self.real_multimach_target_sys = None
+
+ def initialize(self, config, tinfoil):
+ meta_ide_support_d = parse_recipe(
+ config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
+ if not meta_ide_support_d:
+ raise DevtoolError("Parsing meta-ide-support recipe failed")
+
+ self.topdir = meta_ide_support_d.getVar('TOPDIR')
+ self.datadir = meta_ide_support_d.getVar('datadir')
+ self.deploy_dir_image = meta_ide_support_d.getVar(
+ 'DEPLOY_DIR_IMAGE')
+ self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
+ self.real_multimach_target_sys = meta_ide_support_d.getVar(
+ 'REAL_MULTIMACH_TARGET_SYS')
+
+
+class RecipeBuildSysroots:
+ """For the shared sysroots mode build-sysroots is needed"""
+
+ def __init__(self):
+ self.standalone_sysroot = None
+ self.standalone_sysroot_native = None
+ self.bootstrap_tasks = [
+ 'build-sysroots:do_build_target_sysroot',
+ 'build-sysroots:do_build_native_sysroot'
+ ]
+
+ def initialize(self, config, tinfoil):
+ build_sysroots_d = parse_recipe(
+ config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
+ if not build_sysroots_d:
+ raise DevtoolError("Parsing build-sysroots recipe failed")
+ self.standalone_sysroot = build_sysroots_d.getVar(
+ 'STANDALONE_SYSROOT')
+ self.standalone_sysroot_native = build_sysroots_d.getVar(
+ 'STANDALONE_SYSROOT_NATIVE')
+
+
+class SharedSysrootsEnv:
+ """Handle the shared sysroots based workflow
+
+ Support the workflow with just a tool-chain without a recipe.
+ It's basically like:
+ bitbake some-dependencies
+ bitbake meta-ide-support
+ bitbake build-sysroots
+ Use the environment-* file found in the deploy folder
+ """
+
+ def __init__(self):
+ self.ide_support = None
+ self.build_sysroots = None
+
+ def initialize(self, ide_support, build_sysroots):
+ self.ide_support = ide_support
+ self.build_sysroots = build_sysroots
+
+ def setup_ide(self, ide):
+ ide.setup(self)
+
+
+class RecipeNotModified:
+ """Handling of recipes added to the Direct DSK shared sysroots."""
+
+ def __init__(self, name):
+ self.name = name
+ self.bootstrap_tasks = [name + ':do_populate_sysroot']
+
+
+class RecipeModified:
+ """Handling of recipes in the workspace created by devtool modify"""
+ OE_INIT_BUILD_ENV = 'oe-init-build-env'
+
+ VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
+
+ def __init__(self, name):
+ self.name = name
+ self.bootstrap_tasks = [name + ':do_install']
+ self.gdb_cross = None
+ # workspace
+ self.real_srctree = None
+ self.srctree = None
+ self.ide_sdk_dir = None
+ self.ide_sdk_scripts_dir = None
+ self.bbappend = None
+ # recipe variables from d.getVar
+ self.b = None
+ self.base_libdir = None
+ self.bblayers = None
+ self.bpn = None
+ self.d = None
+ self.fakerootcmd = None
+ self.fakerootenv = None
+ self.libdir = None
+ self.max_process = None
+ self.package_arch = None
+ self.package_debug_split_style = None
+ self.path = None
+ self.pn = None
+ self.recipe_sysroot = None
+ self.recipe_sysroot_native = None
+ self.staging_incdir = None
+ self.strip_cmd = None
+ self.target_arch = None
+ self.target_dbgsrc_dir = None
+ self.topdir = None
+ self.workdir = None
+ self.recipe_id = None
+ # replicate bitbake build environment
+ self.exported_vars = None
+ self.cmd_compile = None
+ self.__oe_init_dir = None
+ # main build tool used by this recipe
+ self.build_tool = BuildTool.UNDEFINED
+ # build_tool = cmake
+ self.oecmake_generator = None
+ self.cmake_cache_vars = None
+ # build_tool = meson
+ self.meson_buildtype = None
+ self.meson_wrapper = None
+ self.mesonopts = None
+ self.extra_oemeson = None
+ self.meson_cross_file = None
+
+ def initialize(self, config, workspace, tinfoil):
+ recipe_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing %s recipe failed" % self.name)
+
+ # Verify this recipe is built as externalsrc setup by devtool modify
+ workspacepn = check_workspace_recipe(
+ workspace, self.name, bbclassextend=True)
+ self.srctree = workspace[workspacepn]['srctree']
+ # Need to grab this here in case the source is within a subdirectory
+ self.real_srctree = get_real_srctree(
+ self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
+ self.bbappend = workspace[workspacepn]['bbappend']
+
+ self.ide_sdk_dir = os.path.join(
+ config.workspace_path, 'ide-sdk', self.name)
+ if os.path.exists(self.ide_sdk_dir):
+ shutil.rmtree(self.ide_sdk_dir)
+ self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
+
+ self.b = recipe_d.getVar('B')
+ self.base_libdir = recipe_d.getVar('base_libdir')
+ self.bblayers = recipe_d.getVar('BBLAYERS').split()
+ self.bpn = recipe_d.getVar('BPN')
+ self.cxx = recipe_d.getVar('CXX')
+ self.d = recipe_d.getVar('D')
+ self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
+ self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
+ self.libdir = recipe_d.getVar('libdir')
+ self.max_process = int(recipe_d.getVar(
+ "BB_NUMBER_THREADS") or os.cpu_count() or 1)
+ self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
+ self.package_debug_split_style = recipe_d.getVar(
+ 'PACKAGE_DEBUG_SPLIT_STYLE')
+ self.path = recipe_d.getVar('PATH')
+ self.pn = recipe_d.getVar('PN')
+ self.recipe_sysroot = os.path.realpath(
+ recipe_d.getVar('RECIPE_SYSROOT'))
+ self.recipe_sysroot_native = os.path.realpath(
+ recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
+ self.staging_bindir_toolchain = os.path.realpath(
+ recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
+ self.staging_incdir = os.path.realpath(
+ recipe_d.getVar('STAGING_INCDIR'))
+ self.strip_cmd = recipe_d.getVar('STRIP')
+ self.target_arch = recipe_d.getVar('TARGET_ARCH')
+ self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
+ self.topdir = recipe_d.getVar('TOPDIR')
+ self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
+
+ self.__init_exported_variables(recipe_d)
+
+ if bb.data.inherits_class('cmake', recipe_d):
+ self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
+ self.__init_cmake_preset_cache(recipe_d)
+ self.build_tool = BuildTool.CMAKE
+ elif bb.data.inherits_class('meson', recipe_d):
+ self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
+ self.mesonopts = recipe_d.getVar('MESONOPTS')
+ self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
+ self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
+ self.build_tool = BuildTool.MESON
+
+ # Recipe ID is the identifier for IDE config sections
+ self.recipe_id = self.bpn + "-" + self.package_arch
+ self.recipe_id_pretty = self.bpn + ": " + self.package_arch
+
+ def append_to_bbappend(self, append_text):
+ with open(self.bbappend, 'a') as bbap:
+ bbap.write(append_text)
+
+ def remove_from_bbappend(self, append_text):
+ with open(self.bbappend, 'r') as bbap:
+ text = bbap.read()
+ new_text = text.replace(append_text, '')
+ with open(self.bbappend, 'w') as bbap:
+ bbap.write(new_text)
+
+ @staticmethod
+ def is_valid_shell_variable(var):
+ """Skip strange shell variables like systemd
+
+ prevent from strange bugs because of strange variables which
+ are not used in this context but break various tools.
+ """
+ if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
+ bb.debug(1, "ignoring variable: %s" % var)
+ return True
+ return False
+
+ def debug_build_config(self, args):
+ """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
+ if self.build_tool is BuildTool.CMAKE:
+ append_text = os.linesep + \
+ 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
+ if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
+ self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
+ "type": "STRING",
+ "value": "Debug",
+ }
+ self.append_to_bbappend(append_text)
+ elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
+ del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
+ self.remove_from_bbappend(append_text)
+ elif self.build_tool is BuildTool.MESON:
+ append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
+ if args.debug_build_config and self.meson_buildtype != "debug":
+ self.mesonopts.replace(
+ '--buildtype ' + self.meson_buildtype, '--buildtype debug')
+ self.append_to_bbappend(append_text)
+ elif self.meson_buildtype == "debug":
+ self.mesonopts.replace(
+ '--buildtype debug', '--buildtype plain')
+ self.remove_from_bbappend(append_text)
+ elif args.debug_build_config:
+ logger.warn(
+ "--debug-build-config is not implemented for this build tool yet.")
+
+ def solib_search_path(self, image):
+ """Search for debug symbols in the rootfs and rootfs-dbg
+
+ The debug symbols of shared libraries which are provided by other packages
+ are grabbed from the -dbg packages in the rootfs-dbg.
+
+ But most cross debugging tools like gdb, perf, and systemtap need to find
+ executable/library first and through it debuglink note find corresponding
+ symbols file. Therefore the library paths from the rootfs are added as well.
+
+ Note: For the devtool modified recipe compiled from the IDE, the debug
+ symbols are taken from the unstripped binaries in the image folder.
+ Also, devtool deploy-target takes the files from the image folder.
+ debug symbols in the image folder refer to the corresponding source files
+ with absolute paths of the build machine. Debug symbols found in the
+ rootfs-dbg are relocated and contain paths which refer to the source files
+ installed on the target device e.g. /usr/src/...
+ """
+ base_libdir = self.base_libdir.lstrip('/')
+ libdir = self.libdir.lstrip('/')
+ so_paths = [
+ # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
+ os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
+ os.path.join(image.rootfs_dbg, libdir, ".debug"),
+ # debug symbols for package_debug_split_style: debug-file-directory
+ os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
+
+ # The binaries are required as well, the debug packages are not enough
+ # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
+ os.path.join(image.rootfs_dbg, base_libdir),
+ os.path.join(image.rootfs_dbg, libdir),
+ # Without image-combined-dbg.bbclass the binaries are only in rootfs.
+ # Note: Stepping into source files located in rootfs-dbg does not
+ # work without image-combined-dbg.bbclass yet.
+ os.path.join(image.rootfs, base_libdir),
+ os.path.join(image.rootfs, libdir)
+ ]
+ return so_paths
+
+ def solib_search_path_str(self, image):
+ """Return a : separated list of paths usable by GDB's set solib-search-path"""
+ return ':'.join(self.solib_search_path(image))
+
+ def __init_exported_variables(self, d):
+ """Find all variables with export flag set.
+
+ This allows to generate IDE configurations which compile with the same
+ environment as bitbake does. That's at least a reasonable default behavior.
+ """
+ exported_vars = {}
+
+ vars = (key for key in d.keys() if not key.startswith(
+ "__") and not d.getVarFlag(key, "func", False))
+ for var in vars:
+ func = d.getVarFlag(var, "func", False)
+ if d.getVarFlag(var, 'python', False) and func:
+ continue
+ export = d.getVarFlag(var, "export", False)
+ unexport = d.getVarFlag(var, "unexport", False)
+ if not export and not unexport and not func:
+ continue
+ if unexport:
+ continue
+
+ val = d.getVar(var)
+ if val is None:
+ continue
+ if set(var) & set("-.{}+"):
+ logger.warn(
+ "Warning: Found invalid character in variable name %s", str(var))
+ continue
+ varExpanded = d.expand(var)
+ val = str(val)
+
+ if not RecipeModified.is_valid_shell_variable(varExpanded):
+ continue
+
+ if func:
+ code_line = "line: {0}, file: {1}\n".format(
+ d.getVarFlag(var, "lineno", False),
+ d.getVarFlag(var, "filename", False))
+ val = val.rstrip('\n')
+ logger.warn("Warning: exported shell function %s() is not exported (%s)" %
+ (varExpanded, code_line))
+ continue
+
+ if export:
+ exported_vars[varExpanded] = val.strip()
+ continue
+
+ self.exported_vars = exported_vars
+
+ def __init_cmake_preset_cache(self, d):
+ """Get the arguments passed to cmake
+
+ Replicate the cmake configure arguments with all details to
+ share on build folder between bitbake and SDK.
+ """
+ site_file = os.path.join(self.workdir, 'site-file.cmake')
+ if os.path.exists(site_file):
+ print("Warning: site-file.cmake is not supported")
+
+ cache_vars = {}
+ oecmake_args = d.getVar('OECMAKE_ARGS').split()
+ extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
+ for param in oecmake_args + extra_oecmake:
+ d_pref = "-D"
+ if param.startswith(d_pref):
+ param = param[len(d_pref):]
+ else:
+ print("Error: expected a -D")
+ param_s = param.split('=', 1)
+ param_nt = param_s[0].split(':', 1)
+
+ def handle_undefined_variable(var):
+ if var.startswith('${') and var.endswith('}'):
+ return ''
+ else:
+ return var
+ # Example: FOO=ON
+ if len(param_nt) == 1:
+ cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
+ # Example: FOO:PATH=/tmp
+ elif len(param_nt) == 2:
+ cache_vars[param_nt[0]] = {
+ "type": param_nt[1],
+ "value": handle_undefined_variable(param_s[1]),
+ }
+ else:
+ print("Error: cannot parse %s" % param)
+ self.cmake_cache_vars = cache_vars
+
+ def cmake_preset(self):
+ """Create a preset for cmake that mimics how bitbake calls cmake"""
+ toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
+ cmake_executable = os.path.join(
+ self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
+ self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
+
+ preset_dict_configure = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "binaryDir": self.b,
+ "generator": self.oecmake_generator,
+ "toolchainFile": toolchain_file,
+ "cacheVariables": self.cmake_cache_vars,
+ "environment": self.exported_vars,
+ "cmakeExecutable": cmake_executable
+ }
+
+ preset_dict_build = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "configurePreset": self.recipe_id,
+ "inheritConfigureEnvironment": True
+ }
+
+ preset_dict_test = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "configurePreset": self.recipe_id,
+ "inheritConfigureEnvironment": True
+ }
+
+ preset_dict = {
+ "version": 3, # cmake 3.21, backward compatible with kirkstone
+ "configurePresets": [preset_dict_configure],
+ "buildPresets": [preset_dict_build],
+ "testPresets": [preset_dict_test]
+ }
+
+ # Finally write the json file
+ json_file = 'CMakeUserPresets.json'
+ json_path = os.path.join(self.real_srctree, json_file)
+ logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
+ if not os.path.exists(self.real_srctree):
+ os.makedirs(self.real_srctree)
+ try:
+ with open(json_path) as f:
+ orig_dict = json.load(f)
+ except json.decoder.JSONDecodeError:
+ logger.info(
+ "Decoding %s failed. Probably because of comments in the json file" % json_path)
+ orig_dict = {}
+ except FileNotFoundError:
+ orig_dict = {}
+
+ # Add or update the presets for the recipe and keep other presets
+ for k, v in preset_dict.items():
+ if isinstance(v, list):
+ update_preset = v[0]
+ preset_added = False
+ if k in orig_dict:
+ for index, orig_preset in enumerate(orig_dict[k]):
+ if 'name' in orig_preset:
+ if orig_preset['name'] == update_preset['name']:
+ logger.debug("Updating preset: %s" %
+ orig_preset['name'])
+ orig_dict[k][index] = update_preset
+ preset_added = True
+ break
+ else:
+ logger.debug("keeping preset: %s" %
+ orig_preset['name'])
+ else:
+ logger.warn("preset without a name found")
+ if not preset_added:
+ if not k in orig_dict:
+ orig_dict[k] = []
+ orig_dict[k].append(update_preset)
+ logger.debug("Added preset: %s" %
+ update_preset['name'])
+ else:
+ orig_dict[k] = v
+
+ with open(json_path, 'w') as f:
+ json.dump(orig_dict, f, indent=4)
+
+ def gen_meson_wrapper(self):
+ """Generate a wrapper script to call meson with the cross environment"""
+ bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
+ meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
+ meson_real = os.path.join(
+ self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
+ with open(meson_wrapper, 'w') as mwrap:
+ mwrap.write("#!/bin/sh" + os.linesep)
+ for var, val in self.exported_vars.items():
+ mwrap.write('export %s="%s"' % (var, val) + os.linesep)
+ mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
+ private_temp = os.path.join(self.b, "meson-private", "tmp")
+ mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
+ mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
+ mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
+ st = os.stat(meson_wrapper)
+ os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
+ self.meson_wrapper = meson_wrapper
+ self.cmd_compile = meson_wrapper + " compile -C " + self.b
+
+ def which(self, executable):
+ bin_path = shutil.which(executable, path=self.path)
+ if not bin_path:
+ raise DevtoolError(
+ 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
+ return bin_path
+
+ @staticmethod
+ def is_elf_file(file_path):
+ with open(file_path, "rb") as f:
+ data = f.read(4)
+ if data == b'\x7fELF':
+ return True
+ return False
+
+ def find_installed_binaries(self):
+ """find all executable elf files in the image directory"""
+ binaries = []
+ d_len = len(self.d)
+ re_so = re.compile(r'.*\.so[.0-9]*$')
+ for root, _, files in os.walk(self.d, followlinks=False):
+ for file in files:
+ if os.path.islink(file):
+ continue
+ if re_so.match(file):
+ continue
+ abs_name = os.path.join(root, file)
+ if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
+ binaries.append(abs_name[d_len:])
+ return sorted(binaries)
+
+ def gen_delete_package_dirs(self):
+ """delete folders of package tasks
+
+ This is a workaround for and issue with recipes having their sources
+ downloaded as file://
+ This likely breaks pseudo like:
+ path mismatch [3 links]: ino 79147802 db
+ .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
+ cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
+ .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
+ Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
+ """
+ cmd_lines = ['#!/bin/sh']
+
+ # Set up the appropriate environment
+ newenv = dict(os.environ)
+ for varvalue in self.fakerootenv.split():
+ if '=' in varvalue:
+ splitval = varvalue.split('=', 1)
+ newenv[splitval[0]] = splitval[1]
+
+ # Replicate the environment variables from bitbake
+ for var, val in newenv.items():
+ if not RecipeModified.is_valid_shell_variable(var):
+ continue
+ cmd_lines.append('%s="%s"' % (var, val))
+ cmd_lines.append('export %s' % var)
+
+ # Delete the folders
+ pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
+ "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
+ cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
+ cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
+
+ return self.write_script(cmd_lines, 'delete_package_dirs')
+
+ def gen_deploy_target_script(self, args):
+ """Generate a script which does what devtool deploy-target does
+
+ This script is much quicker than devtool target-deploy. Because it
+ does not need to start a bitbake server. All information from tinfoil
+ is hard-coded in the generated script.
+ """
+ cmd_lines = ['#!%s' % str(sys.executable)]
+ cmd_lines.append('import sys')
+ cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
+ cmd_lines.append('devtool_sys_path.reverse()')
+ cmd_lines.append('for p in devtool_sys_path:')
+ cmd_lines.append(' if p not in sys.path:')
+ cmd_lines.append(' sys.path.insert(0, p)')
+ cmd_lines.append('from devtool.deploy import deploy_no_d')
+ args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
+ 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
+ filtered_args_dict = {key: value for key, value in vars(
+ args).items() if key in args_filter}
+ cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
+ cmd_lines.append('class Dict2Class(object):')
+ cmd_lines.append(' def __init__(self, my_dict):')
+ cmd_lines.append(' for key in my_dict:')
+ cmd_lines.append(' setattr(self, key, my_dict[key])')
+ cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
+ cmd_lines.append(
+ 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
+ cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
+ (self.d, self.workdir, self.path, self.strip_cmd,
+ self.libdir, self.base_libdir, self.max_process,
+ self.fakerootcmd, self.fakerootenv))
+ return self.write_script(cmd_lines, 'deploy_target')
+
+ def gen_install_deploy_script(self, args):
+ """Generate a script which does install and deploy"""
+ cmd_lines = ['#!/bin/bash']
+
+ cmd_lines.append(self.gen_delete_package_dirs())
+
+ # . oe-init-build-env $BUILDDIR
+ # Note: Sourcing scripts with arguments requires bash
+ cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
+ self.oe_init_dir, self.oe_init_dir))
+ cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
+ self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
+
+ # bitbake -c install
+ cmd_lines.append(
+ 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
+
+ # Self contained devtool deploy-target
+ cmd_lines.append(self.gen_deploy_target_script(args))
+
+ return self.write_script(cmd_lines, 'install_and_deploy')
+
+ def write_script(self, cmd_lines, script_name):
+ bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
+ script_name_arch = script_name + '_' + self.recipe_id
+ script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
+ with open(script_file, 'w') as script_f:
+ script_f.write(os.linesep.join(cmd_lines))
+ st = os.stat(script_file)
+ os.chmod(script_file, st.st_mode | stat.S_IEXEC)
+ return script_file
+
+ @property
+ def oe_init_build_env(self):
+ """Find the oe-init-build-env used for this setup"""
+ oe_init_dir = self.oe_init_dir
+ if oe_init_dir:
+ return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
+ return None
+
+ @property
+ def oe_init_dir(self):
+ """Find the directory where the oe-init-build-env is located
+
+ Assumption: There might be a layer with higher priority than poky
+ which provides to oe-init-build-env in the layer's toplevel folder.
+ """
+ if not self.__oe_init_dir:
+ for layer in reversed(self.bblayers):
+ result = subprocess.run(
+ ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
+ if result.returncode == 0:
+ oe_init_dir = result.stdout.decode('utf-8').strip()
+ oe_init_path = os.path.join(
+ oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
+ if os.path.exists(oe_init_path):
+ logger.debug("Using %s from: %s" % (
+ RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
+ self.__oe_init_dir = oe_init_dir
+ break
+ if not self.__oe_init_dir:
+ logger.error("Cannot find the bitbake top level folder")
+ return self.__oe_init_dir
+
+
+def ide_setup(args, config, basepath, workspace):
+ """Generate the IDE configuration for the workspace"""
+
+ # Explicitely passing some special recipes does not make sense
+ for recipe in args.recipenames:
+ if recipe in ['meta-ide-support', 'build-sysroots']:
+ raise DevtoolError("Invalid recipe: %s." % recipe)
+
+ # Collect information about tasks which need to be bitbaked
+ bootstrap_tasks = []
+ bootstrap_tasks_late = []
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ # define mode depending on recipes which need to be processed
+ recipes_image_names = []
+ recipes_modified_names = []
+ recipes_other_names = []
+ for recipe in args.recipenames:
+ try:
+ check_workspace_recipe(
+ workspace, recipe, bbclassextend=True)
+ recipes_modified_names.append(recipe)
+ except DevtoolError:
+ recipe_d = parse_recipe(
+ config, tinfoil, recipe, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing recipe %s failed" % recipe)
+ if bb.data.inherits_class('image', recipe_d):
+ recipes_image_names.append(recipe)
+ else:
+ recipes_other_names.append(recipe)
+
+ invalid_params = False
+ if args.mode == DevtoolIdeMode.shared:
+ if len(recipes_modified_names):
+ logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
+ recipes_modified_names))
+ invalid_params = True
+ if args.mode == DevtoolIdeMode.modified:
+ if len(recipes_other_names):
+ logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
+ recipes_other_names))
+ invalid_params = True
+ if len(recipes_image_names) != 1:
+ logger.error(
+ "One image recipe is required as the rootfs for the remote development.")
+ invalid_params = True
+ for modified_recipe_name in recipes_modified_names:
+ if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
+ logger.error(
+ "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
+ invalid_params = True
+
+ if invalid_params:
+ raise DevtoolError("Invalid parameters are passed.")
+
+ # For the shared sysroots mode, add all dependencies of all the images to the sysroots
+ # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
+ recipes_images = []
+ for recipes_image_name in recipes_image_names:
+ logger.info("Using image: %s" % recipes_image_name)
+ recipe_image = RecipeImage(recipes_image_name)
+ recipe_image.initialize(config, tinfoil)
+ bootstrap_tasks += recipe_image.bootstrap_tasks
+ recipes_images.append(recipe_image)
+
+ # Provide a Direct SDK with shared sysroots
+ recipes_not_modified = []
+ if args.mode == DevtoolIdeMode.shared:
+ ide_support = RecipeMetaIdeSupport()
+ ide_support.initialize(config, tinfoil)
+ bootstrap_tasks += ide_support.bootstrap_tasks
+
+ logger.info("Adding %s to the Direct SDK sysroots." %
+ str(recipes_other_names))
+ for recipe_name in recipes_other_names:
+ recipe_not_modified = RecipeNotModified(recipe_name)
+ bootstrap_tasks += recipe_not_modified.bootstrap_tasks
+ recipes_not_modified.append(recipe_not_modified)
+
+ build_sysroots = RecipeBuildSysroots()
+ build_sysroots.initialize(config, tinfoil)
+ bootstrap_tasks_late += build_sysroots.bootstrap_tasks
+ shared_env = SharedSysrootsEnv()
+ shared_env.initialize(ide_support, build_sysroots)
+
+ recipes_modified = []
+ if args.mode == DevtoolIdeMode.modified:
+ logger.info("Setting up workspaces for modified recipe: %s" %
+ str(recipes_modified_names))
+ gdbs_cross = {}
+ for recipe_name in recipes_modified_names:
+ recipe_modified = RecipeModified(recipe_name)
+ recipe_modified.initialize(config, workspace, tinfoil)
+ bootstrap_tasks += recipe_modified.bootstrap_tasks
+ recipes_modified.append(recipe_modified)
+
+ if recipe_modified.target_arch not in gdbs_cross:
+ target_device = TargetDevice(args)
+ gdb_cross = RecipeGdbCross(
+ args, recipe_modified.target_arch, target_device)
+ gdb_cross.initialize(config, workspace, tinfoil)
+ bootstrap_tasks += gdb_cross.bootstrap_tasks
+ gdbs_cross[recipe_modified.target_arch] = gdb_cross
+ recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
+
+ finally:
+ tinfoil.shutdown()
+
+ if not args.skip_bitbake:
+ bb_cmd = 'bitbake '
+ if args.bitbake_k:
+ bb_cmd += "-k "
+ bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
+ exec_build_env_command(
+ config.init_path, basepath, bb_cmd_early, watch=True)
+ if bootstrap_tasks_late:
+ bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
+ exec_build_env_command(
+ config.init_path, basepath, bb_cmd_late, watch=True)
+
+ for recipe_image in recipes_images:
+ if (recipe_image.gdbserver_missing):
+ logger.warning(
+ "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
+
+ if recipe_image.combine_dbg_image is False:
+ logger.warning(
+ 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
+
+ # Instantiate the active IDE plugin
+ ide = ide_plugins[args.ide]()
+ if args.mode == DevtoolIdeMode.shared:
+ ide.setup_shared_sysroots(shared_env)
+ elif args.mode == DevtoolIdeMode.modified:
+ for recipe_modified in recipes_modified:
+ if recipe_modified.build_tool is BuildTool.CMAKE:
+ recipe_modified.cmake_preset()
+ if recipe_modified.build_tool is BuildTool.MESON:
+ recipe_modified.gen_meson_wrapper()
+ ide.setup_modified_recipe(
+ args, recipe_image, recipe_modified)
+ else:
+ raise DevtoolError("Must not end up here.")
+
+
+def register_commands(subparsers, context):
+ """Register devtool subcommands from this plugin"""
+
+ global ide_plugins
+
+ # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
+ pluginpaths = [os.path.join(path, 'ide_plugins')
+ for path in context.pluginpaths]
+ ide_plugin_modules = []
+ for pluginpath in pluginpaths:
+ scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
+
+ for ide_plugin_module in ide_plugin_modules:
+ if hasattr(ide_plugin_module, 'register_ide_plugin'):
+ ide_plugin_module.register_ide_plugin(ide_plugins)
+ # Sort plugins according to their priority. The first entry is the default IDE plugin.
+ ide_plugins = dict(sorted(ide_plugins.items(),
+ key=lambda p: p[1].ide_plugin_priority(), reverse=True))
+
+ parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
+ help='Setup the SDK and configure the IDE')
+ parser_ide_sdk.add_argument(
+ 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
+ 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.')
+ parser_ide_sdk.add_argument(
+ '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
+ help='Different SDK types are supported:\n'
+ '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
+ ' devtool modify creates a workspace to work on the source code of a recipe.\n'
+ ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
+ ' Usage example:\n'
+ ' devtool modify cmake-example\n'
+ ' devtool ide-sdk cmake-example core-image-minimal\n'
+ ' Start the IDE in the workspace folder\n'
+ ' At least one devtool modified recipe plus one image recipe are required:\n'
+ ' The image recipe is used to generate the target image and the remote debug configuration.\n'
+ '- "' + DevtoolIdeMode.shared.name + '":\n'
+ ' Usage example:\n'
+ ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
+ ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
+ ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
+ ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
+ default_ide = list(ide_plugins.keys())[0]
+ parser_ide_sdk.add_argument(
+ '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
+ help='Setup the configuration for this IDE (default: %s)' % default_ide)
+ parser_ide_sdk.add_argument(
+ '-t', '--target', default='root@192.168.7.2',
+ help='Live target machine running an ssh server: user@hostname.')
+ parser_ide_sdk.add_argument(
+ '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
+ parser_ide_sdk.add_argument(
+ '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-e', '--ssh-exec', help='Executable to use in place of ssh')
+ parser_ide_sdk.add_argument(
+ '-P', '--port', help='Specify ssh port to use for connection to the target')
+ parser_ide_sdk.add_argument(
+ '-I', '--key', help='Specify ssh private key for connection to the target')
+ parser_ide_sdk.add_argument(
+ '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
+ parser_ide_sdk.add_argument(
+ '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-s', '--show-status', help='Show progress/status output', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--no-check-space', help='Do not check for available space before deploying', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
+ parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
index 95384c5333..18daef30c3 100644
--- a/scripts/lib/devtool/menuconfig.py
+++ b/scripts/lib/devtool/menuconfig.py
@@ -3,6 +3,8 @@
# Copyright (C) 2018 Xilinx
# Written by: Chandana Kalluri <ckalluri@xilinx.com>
#
+# SPDX-License-Identifier: MIT
+#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
@@ -43,7 +45,7 @@ def menuconfig(args, config, basepath, workspace):
return 1
check_workspace_recipe(workspace, args.component)
- pn = rd.getVar('PN', True)
+ pn = rd.getVar('PN')
if not rd.getVarFlag('do_menuconfig','task'):
raise DevtoolError("This recipe does not support menuconfig option")
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
index d717b6c2b8..9aefd7e354 100644
--- a/scripts/lib/devtool/sdk.py
+++ b/scripts/lib/devtool/sdk.py
@@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace):
return 2
try:
- exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True)
+ exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
+ exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
except bb.process.ExecutionError as e:
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 01fb5ad96f..6674e67267 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -147,6 +147,8 @@ def add(args, config, basepath, workspace):
extracmdopts += ' -a'
if args.npm_dev:
extracmdopts += ' --npm-dev'
+ if args.no_pypi:
+ extracmdopts += ' --no-pypi'
if args.mirrors:
extracmdopts += ' --mirrors'
if args.srcrev:
@@ -234,10 +236,14 @@ def add(args, config, basepath, workspace):
if args.fetchuri and not args.no_git:
setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
- initial_rev = None
+ initial_rev = {}
if os.path.exists(os.path.join(srctree, '.git')):
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_rev["."] = stdout.rstrip()
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule) = line.split()
+ initial_rev[os.path.relpath(submodule, srctree)] = rev
if args.src_subdir:
srctree = os.path.join(srctree, args.src_subdir)
@@ -251,7 +257,8 @@ def add(args, config, basepath, workspace):
if b_is_s:
f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
+ for key, value in initial_rev.items():
+ f.write('\n# initial_rev %s: %s\n' % (key, value))
if args.binary:
f.write('do_install:append() {\n')
@@ -353,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
bb.utils.mkdirhier(dst_d)
shutil.move(src, dst)
-def _copy_file(src, dst, dry_run_outdir=None):
+def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
"""Copy a file. Creates all the directory components of destination path."""
dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
@@ -453,7 +460,7 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
-def symlink_oelocal_files_srctree(rd,srctree):
+def symlink_oelocal_files_srctree(rd, srctree):
import oe.patch
if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
# If recipe extracts to ${WORKDIR}, symlink the files into the srctree
@@ -477,11 +484,7 @@ def symlink_oelocal_files_srctree(rd,srctree):
os.symlink('oe-local-files/%s' % fn, destpth)
addfiles.append(os.path.join(relpth, fn))
if addfiles:
- bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
- bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
-
+ oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd)
def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
"""Extract sources of a recipe"""
@@ -520,7 +523,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
for event in history:
if not 'flag' in event:
if event['op'].startswith((':append[', ':prepend[')):
- extra_overrides.append(event['op'].split('[')[1].split(']')[0])
+ override = event['op'].split('[')[1].split(']')[0]
+ if not override.startswith('pn-'):
+ extra_overrides.append(override)
# We want to remove duplicate overrides. If a recipe had multiple
# SRC_URI_override += values it would cause mulitple instances of
# overrides. This doesn't play nicely with things like creating a
@@ -565,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
logger.debug('writing append file %s' % appendfile)
with open(appendfile, 'a') as f:
f.write('###--- _extract_source\n')
+ f.write('deltask do_recipe_qa\n')
+ f.write('deltask do_recipe_qa_setscene\n')
+ f.write('ERROR_QA:remove = "patch-fuzz"\n')
f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
if not is_kernel_yocto:
@@ -582,6 +590,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
with open(preservestampfile, 'w') as f:
f.write(d.getVar('STAMP'))
+ tinfoil.modified_files()
try:
if is_kernel_yocto:
# We need to generate the kernel config
@@ -644,9 +653,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
shutil.rmtree(workshareddir)
- oe.path.copyhardlinktree(srcsubdir,workshareddir)
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
elif not os.path.exists(workshareddir):
- oe.path.copyhardlinktree(srcsubdir,workshareddir)
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
@@ -654,13 +663,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
if sync:
bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
- # Move oe-local-files directory to srctree
- # As the oe-local-files is not part of the constructed git tree,
- # remove them directly during the synchrounizating might surprise
- # the users. Instead, we move it to oe-local-files.bak and remind
- # user in the log message.
+ # Move the oe-local-files directory to srctree.
+ # As oe-local-files is not part of the constructed git tree,
+ # removing it directly during the synchronization might surprise
+ # the user. Instead, we move it to oe-local-files.bak and remind
+ # the user in the log message.
if os.path.exists(srctree_localdir + '.bak'):
- shutil.rmtree(srctree_localdir, srctree_localdir + '.bak')
+ shutil.rmtree(srctree_localdir + '.bak')
if os.path.exists(srctree_localdir):
logger.info('Backing up current local file directory %s' % srctree_localdir)
@@ -676,7 +685,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
shutil.move(tempdir_localdir, srcsubdir)
shutil.move(srcsubdir, srctree)
- symlink_oelocal_files_srctree(d,srctree)
+ symlink_oelocal_files_srctree(d, srctree)
if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
@@ -749,7 +758,7 @@ def get_staging_kver(srcdir):
kerver = []
staging_kerVer=""
if os.path.exists(srcdir) and os.listdir(srcdir):
- with open(os.path.join(srcdir,"Makefile")) as f:
+ with open(os.path.join(srcdir, "Makefile")) as f:
version = [next(f) for x in range(5)][1:4]
for word in version:
kerver.append(word.split('= ')[1].split('\n')[0])
@@ -759,10 +768,20 @@ def get_staging_kver(srcdir):
def get_staging_kbranch(srcdir):
staging_kbranch = ""
if os.path.exists(srcdir) and os.listdir(srcdir):
- (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir)
+ (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
staging_kbranch = "".join(branch.split('\n')[0])
return staging_kbranch
+def get_real_srctree(srctree, s, workdir):
+ # Check that recipe isn't using a shared workdir
+ s = os.path.abspath(s)
+ workdir = os.path.abspath(workdir)
+ if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
+ # Handle if S is set to a subdirectory of the source
+ srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
+ srctree = os.path.join(srctree, srcsubdir)
+ return srctree
+
def modify(args, config, basepath, workspace):
"""Entry point for the devtool 'modify' subcommand"""
import bb
@@ -807,8 +826,8 @@ def modify(args, config, basepath, workspace):
_check_compatible_recipe(pn, rd)
- initial_rev = None
- commits = []
+ initial_revs = {}
+ commits = {}
check_commits = False
if bb.data.inherits_class('kernel-yocto', rd):
@@ -820,10 +839,10 @@ def modify(args, config, basepath, workspace):
staging_kerVer = get_staging_kver(srcdir)
staging_kbranch = get_staging_kbranch(srcdir)
if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
- oe.path.copyhardlinktree(srcdir,srctree)
+ oe.path.copyhardlinktree(srcdir, srctree)
workdir = rd.getVar('WORKDIR')
srcsubdir = rd.getVar('S')
- localfilesdir = os.path.join(srctree,'oe-local-files')
+ localfilesdir = os.path.join(srctree, 'oe-local-files')
# Move local source files into separate subdir
recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
local_files = oe.recipeutils.get_recipe_local_files(rd)
@@ -847,9 +866,9 @@ def modify(args, config, basepath, workspace):
for fname in local_files:
_move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
- f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n')
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n')
- symlink_oelocal_files_srctree(rd,srctree)
+ symlink_oelocal_files_srctree(rd, srctree)
task = 'do_configure'
res = tinfoil.build_targets(pn, task, handle_events=True)
@@ -857,22 +876,30 @@ def modify(args, config, basepath, workspace):
# Copy .config to workspace
kconfpath = rd.getVar('B')
logger.info('Copying kernel config to workspace')
- shutil.copy2(os.path.join(kconfpath, '.config'),srctree)
+ shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
# Set this to true, we still need to get initial_rev
# by parsing the git repo
args.no_extract = True
if not args.no_extract:
- initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
- if not initial_rev:
+ initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
+ if not initial_revs["."]:
return 1
logger.info('Source tree extracted to %s' % srctree)
+
if os.path.exists(os.path.join(srctree, '.git')):
# Get list of commits since this revision
- (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
- commits = stdout.split()
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
+ commits["."] = stdout.split()
check_commits = True
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule_path) = line.split()
+ submodule = os.path.relpath(submodule_path, srctree)
+ initial_revs[submodule] = rev
+ (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
+ commits[submodule] = stdout.split()
else:
if os.path.exists(os.path.join(srctree, '.git')):
# Check if it's a tree previously extracted by us. This is done
@@ -889,11 +916,11 @@ def modify(args, config, basepath, workspace):
for line in stdout.splitlines():
if line.startswith('*'):
(stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
- initial_rev = stdout.rstrip()
- if not initial_rev:
+ initial_revs["."] = stdout.rstrip()
+ if "." not in initial_revs:
# Otherwise, just grab the head revision
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_revs["."] = stdout.rstrip()
branch_patches = {}
if check_commits:
@@ -910,28 +937,40 @@ def modify(args, config, basepath, workspace):
seen_patches = []
for branch in branches:
branch_patches[branch] = []
- (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree)
- for line in stdout.splitlines():
- line = line.strip()
- if line.startswith(oe.patch.GitApplyTree.patch_line_prefix):
- origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip()
- if not origpatch in seen_patches:
- seen_patches.append(origpatch)
- branch_patches[branch].append(origpatch)
+ (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
+ for sha1 in stdout.splitlines():
+ notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
+ origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
+ if origpatch and origpatch not in seen_patches:
+ seen_patches.append(origpatch)
+ branch_patches[branch].append(origpatch)
# Need to grab this here in case the source is within a subdirectory
srctreebase = srctree
-
- # Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S'))
- workdir = os.path.abspath(rd.getVar('WORKDIR'))
- if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
- srctree = os.path.join(srctree, srcsubdir)
+ srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as f:
+ # if not present, add type=git-dependency to the secondary sources
+ # (non local files) so they can be extracted correctly when building a recipe after
+ # doing a devtool modify on it
+ src_uri = rd.getVar('SRC_URI').split()
+ src_uri_append = []
+ src_uri_remove = []
+
+ # Assume first entry is main source extracted in ${S} so skip it
+ src_uri = src_uri[1::]
+
+ # Add "type=git-dependency" to all non local sources
+ for url in src_uri:
+ if not url.startswith('file://') and not 'type=' in url:
+ src_uri_remove.append(url)
+ src_uri_append.append('%s;type=git-dependency' % url)
+
+ if src_uri_remove:
+ f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
+ f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
+
f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
# Local files can be modified/tracked in separate subdir under srctree
# Mostly useful for packages with S != WORKDIR
@@ -960,17 +999,19 @@ def modify(args, config, basepath, workspace):
' mv ${S}/.config ${S}/.config.old\n'
' fi\n'
'}\n')
- if rd.getVarFlag('do_menuconfig','task'):
+ if rd.getVarFlag('do_menuconfig', 'task'):
f.write('\ndo_configure:append() {\n'
- ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n'
- ' cp ${B}/.config ${S}/.config.baseline\n'
- ' ln -sfT ${B}/.config ${S}/.config.new\n'
+ ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
+ ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
' fi\n'
'}\n')
- if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
- for commit in commits:
- f.write('# commit: %s\n' % commit)
+ if initial_revs:
+ for name, rev in initial_revs.items():
+ f.write('\n# initial_rev %s: %s\n' % (name, rev))
+ if name in commits:
+ for commit in commits[name]:
+ f.write('# commit %s: %s\n' % (name, commit))
if branch_patches:
for branch in branch_patches:
if branch == args.branch:
@@ -1193,44 +1234,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre
branchname = stdout.rstrip()
# Parse initial rev from recipe if not specified
- commits = []
+ commits = {}
patches = []
+ initial_revs = {}
with open(recipe_path, 'r') as f:
for line in f:
- if line.startswith('# initial_rev:'):
- if not initial_rev:
- initial_rev = line.split(':')[-1].strip()
- elif line.startswith('# commit:') and not force_patch_refresh:
- commits.append(line.split(':')[-1].strip())
- elif line.startswith('# patches_%s:' % branchname):
- patches = line.split(':')[-1].strip().split(',')
-
- update_rev = initial_rev
- changed_revs = None
- if initial_rev:
+ pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
+ match = re.search(pattern, line)
+ if match:
+ name = match.group(1)
+ rev = match.group(2)
+ if line.startswith('# initial_rev'):
+ if not (name == "." and initial_rev):
+ initial_revs[name] = rev
+ elif line.startswith('# commit') and not force_patch_refresh:
+ if name not in commits:
+ commits[name] = [rev]
+ else:
+ commits[name].append(rev)
+ elif line.startswith('# patches_%s:' % branchname):
+ patches = line.split(':')[-1].strip().split(',')
+
+ update_revs = dict(initial_revs)
+ changed_revs = {}
+ for name, rev in initial_revs.items():
# Find first actually changed revision
stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
- initial_rev, cwd=srctree)
+ rev, cwd=os.path.join(srctree, name))
newcommits = stdout.split()
- for i in range(min(len(commits), len(newcommits))):
- if newcommits[i] == commits[i]:
- update_rev = commits[i]
+ if name in commits:
+ for i in range(min(len(commits[name]), len(newcommits))):
+ if newcommits[i] == commits[name][i]:
+ update_revs[name] = commits[name][i]
try:
stdout, _ = bb.process.run('git cherry devtool-patched',
- cwd=srctree)
+ cwd=os.path.join(srctree, name))
except bb.process.ExecutionError as err:
stdout = None
if stdout is not None and not force_patch_refresh:
- changed_revs = []
for line in stdout.splitlines():
if line.startswith('+ '):
rev = line.split()[1]
if rev in newcommits:
- changed_revs.append(rev)
+ if name not in changed_revs:
+ changed_revs[name] = [rev]
+ else:
+ changed_revs[name].append(rev)
- return initial_rev, update_rev, changed_revs, patches
+ return initial_revs, update_revs, changed_revs, patches
def _remove_file_entries(srcuri, filelist):
"""Remove file:// entries from SRC_URI"""
@@ -1285,14 +1338,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru
raise
-def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
+def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
"""Export patches from srctree to given location.
Returns three-tuple of dicts:
1. updated - patches that already exist in SRCURI
2. added - new patches that don't exist in SRCURI
3 removed - patches that exist in SRCURI but not in exported patches
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
+ In each dict the key is the 'basepath' of the URI and value is:
+ - for updated and added dicts, a dict with 2 optionnal keys:
+ - 'path': the absolute path to the existing file in recipe space (if any)
+ - 'patchdir': the directory in wich the patch should be applied (if any)
+ - for removed dict, the absolute path to the existing file in recipe space
"""
import oe.recipeutils
from oe.patch import GitApplyTree
@@ -1306,54 +1362,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
# Generate patches from Git, exclude local files directory
patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
- GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec)
-
- new_patches = sorted(os.listdir(destdir))
- for new_patch in new_patches:
- # Strip numbering from patch names. If it's a git sequence named patch,
- # the numbers might not match up since we are starting from a different
- # revision This does assume that people are using unique shortlog
- # values, but they ought to be anyway...
- new_basename = seqpatch_re.match(new_patch).group(2)
- match_name = None
- for old_patch in existing_patches:
- old_basename = seqpatch_re.match(old_patch).group(2)
- old_basename_splitext = os.path.splitext(old_basename)
- if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
- old_patch_noext = os.path.splitext(old_patch)[0]
- match_name = old_patch_noext
- break
- elif new_basename == old_basename:
- match_name = old_patch
- break
- if match_name:
- # Rename patch files
- if new_patch != match_name:
- bb.utils.rename(os.path.join(destdir, new_patch),
- os.path.join(destdir, match_name))
- # Need to pop it off the list now before checking changed_revs
- oldpath = existing_patches.pop(old_patch)
- if changed_revs is not None:
- # Avoid updating patches that have not actually changed
- with open(os.path.join(destdir, match_name), 'r') as f:
- firstlineitems = f.readline().split()
- # Looking for "From <hash>" line
- if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
- if not firstlineitems[1] in changed_revs:
- continue
- # Recompress if necessary
- if oldpath.endswith(('.gz', '.Z')):
- bb.process.run(['gzip', match_name], cwd=destdir)
- if oldpath.endswith('.gz'):
- match_name += '.gz'
- else:
- match_name += '.Z'
- elif oldpath.endswith('.bz2'):
- bb.process.run(['bzip2', match_name], cwd=destdir)
- match_name += '.bz2'
- updated[match_name] = oldpath
- else:
- added[new_patch] = None
+ GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
+ for dirpath, dirnames, filenames in os.walk(destdir):
+ new_patches = filenames
+ reldirpath = os.path.relpath(dirpath, destdir)
+ for new_patch in new_patches:
+ # Strip numbering from patch names. If it's a git sequence named patch,
+ # the numbers might not match up since we are starting from a different
+ # revision This does assume that people are using unique shortlog
+ # values, but they ought to be anyway...
+ new_basename = seqpatch_re.match(new_patch).group(2)
+ match_name = None
+ for old_patch in existing_patches:
+ old_basename = seqpatch_re.match(old_patch).group(2)
+ old_basename_splitext = os.path.splitext(old_basename)
+ if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
+ old_patch_noext = os.path.splitext(old_patch)[0]
+ match_name = old_patch_noext
+ break
+ elif new_basename == old_basename:
+ match_name = old_patch
+ break
+ if match_name:
+ # Rename patch files
+ if new_patch != match_name:
+ bb.utils.rename(os.path.join(destdir, new_patch),
+ os.path.join(destdir, match_name))
+ # Need to pop it off the list now before checking changed_revs
+ oldpath = existing_patches.pop(old_patch)
+ if changed_revs is not None and dirpath in changed_revs:
+ # Avoid updating patches that have not actually changed
+ with open(os.path.join(dirpath, match_name), 'r') as f:
+ firstlineitems = f.readline().split()
+ # Looking for "From <hash>" line
+ if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
+ if not firstlineitems[1] in changed_revs[dirpath]:
+ continue
+ # Recompress if necessary
+ if oldpath.endswith(('.gz', '.Z')):
+ bb.process.run(['gzip', match_name], cwd=destdir)
+ if oldpath.endswith('.gz'):
+ match_name += '.gz'
+ else:
+ match_name += '.Z'
+ elif oldpath.endswith('.bz2'):
+ bb.process.run(['bzip2', match_name], cwd=destdir)
+ match_name += '.bz2'
+ updated[match_name] = {'path' : oldpath}
+ if reldirpath != ".":
+ updated[match_name]['patchdir'] = reldirpath
+ else:
+ added[new_patch] = {}
+ if reldirpath != ".":
+ added[new_patch]['patchdir'] = reldirpath
+
return (updated, added, existing_patches)
@@ -1404,6 +1466,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
updated = OrderedDict()
added = OrderedDict()
removed = OrderedDict()
+
+ # Get current branch and return early with empty lists
+ # if on one of the override branches
+ # (local files are provided only for the main branch and processing
+ # them against lists from recipe overrides will result in mismatches
+ # and broken modifications to recipes).
+ stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
+ cwd=srctree)
+ branchname = stdout.rstrip()
+ if branchname.startswith(override_branch_prefix):
+ return (updated, added, removed)
+
local_files_dir = os.path.join(srctreebase, 'oe-local-files')
git_files = _git_ls_tree(srctree)
if 'oe-local-files' in git_files:
@@ -1509,6 +1583,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
recipedir = os.path.basename(recipefile)
logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
+ # Get original SRCREV
+ old_srcrev = rd.getVar('SRCREV') or ''
+ if old_srcrev == "INVALID":
+ raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
+ old_srcrev = {'.': old_srcrev}
+
# Get HEAD revision
try:
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
@@ -1535,13 +1615,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
if not no_remove:
# Find list of existing patches in recipe file
patches_dir = tempfile.mkdtemp(dir=tempdir)
- old_srcrev = rd.getVar('SRCREV') or ''
upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
patches_dir)
logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
# Remove deleted local files and "overlapping" patches
- remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values())
+ remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
if remove_files:
removedentries = _remove_file_entries(srcuri, remove_files)[0]
update_srcuri = True
@@ -1555,11 +1634,10 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
patchfields['SRC_URI'] = '\\\n '.join(srcuri)
if dry_run_outdir:
logger.info('Creating bbappend (dry-run)')
- else:
- appendfile, destpath = oe.recipeutils.bbappend_recipe(
- rd, appendlayerdir, files, wildcardver=wildcard_version,
- extralines=patchfields, removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ appendfile, destpath = oe.recipeutils.bbappend_recipe(
+ rd, appendlayerdir, files, wildcardver=wildcard_version,
+ extralines=patchfields, removevalues=removevalues,
+ redirect_output=dry_run_outdir)
else:
files_dir = _determine_files_dir(rd)
for basepath, path in upd_f.items():
@@ -1604,9 +1682,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
if not os.path.exists(append):
raise DevtoolError('unable to find workspace bbappend for recipe %s' %
recipename)
+ srctreebase = workspace[recipename]['srctreebase']
+ relpatchdir = os.path.relpath(srctreebase, srctree)
+ if relpatchdir == '.':
+ patchdir_params = {}
+ else:
+ patchdir_params = {'patchdir': relpatchdir}
- initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
- if not initial_rev:
+ def srcuri_entry(basepath, patchdir_params):
+ if patchdir_params:
+ paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
+ else:
+ paramstr = ''
+ return 'file://%s%s' % (basepath, paramstr)
+
+ initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
+ if not initial_revs:
raise DevtoolError('Unable to find initial revision - please specify '
'it with --initial-rev')
@@ -1620,55 +1711,62 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
local_files_dir = tempfile.mkdtemp(dir=tempdir)
- if filter_patches:
- upd_f = {}
- new_f = {}
- del_f = {}
- else:
- srctreebase = workspace[recipename]['srctreebase']
- upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
-
- remove_files = []
- if not no_remove:
- # Get all patches from source tree and check if any should be removed
- all_patches_dir = tempfile.mkdtemp(dir=tempdir)
- _, _, del_p = _export_patches(srctree, rd, initial_rev,
- all_patches_dir)
- # Remove deleted local files and patches
- remove_files = list(del_f.values()) + list(del_p.values())
+ upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
# Get updated patches from source tree
patches_dir = tempfile.mkdtemp(dir=tempdir)
- upd_p, new_p, _ = _export_patches(srctree, rd, update_rev,
+ upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
patches_dir, changed_revs)
+ # Get all patches from source tree and check if any should be removed
+ all_patches_dir = tempfile.mkdtemp(dir=tempdir)
+ _, _, del_p = _export_patches(srctree, rd, initial_revs,
+ all_patches_dir)
logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
if filter_patches:
new_p = OrderedDict()
upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
- remove_files = [f for f in remove_files if f in filter_patches]
+ del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
+ remove_files = []
+ if not no_remove:
+ # Remove deleted local files and patches
+ remove_files = list(del_f.values()) + list(del_p.values())
updatefiles = False
updaterecipe = False
destpath = None
srcuri = (rd.getVar('SRC_URI', False) or '').split()
+
if appendlayerdir:
files = OrderedDict((os.path.join(local_files_dir, key), val) for
key, val in list(upd_f.items()) + list(new_f.items()))
files.update(OrderedDict((os.path.join(patches_dir, key), val) for
key, val in list(upd_p.items()) + list(new_p.items())))
+
+ params = []
+ for file, param in files.items():
+ patchdir_param = dict(patchdir_params)
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ params.append(patchdir_param)
+
if files or remove_files:
removevalues = None
if remove_files:
removedentries, remaining = _remove_file_entries(
srcuri, remove_files)
if removedentries or remaining:
- remaining = ['file://' + os.path.basename(item) for
+ remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
item in remaining]
removevalues = {'SRC_URI': removedentries + remaining}
appendfile, destpath = oe.recipeutils.bbappend_recipe(
rd, appendlayerdir, files,
wildcardver=wildcard_version,
removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ redirect_output=dry_run_outdir,
+ params=params)
else:
logger.info('No patches or local source files needed updating')
else:
@@ -1685,14 +1783,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
_move_file(os.path.join(local_files_dir, basepath), path,
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
updatefiles = True
- for basepath, path in upd_p.items():
- patchfn = os.path.join(patches_dir, basepath)
+ for basepath, param in upd_p.items():
+ path = param['path']
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ patchdir_param = dict(patchdir_params)
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ patchfn = os.path.join(patches_dir, patchdir, basepath)
if os.path.dirname(path) + '/' == dl_dir:
# This is a a downloaded patch file - we now need to
# replace the entry in SRC_URI with our local version
logger.info('Replacing remote patch %s with updated local version' % basepath)
path = os.path.join(files_dir, basepath)
- _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath)
+ _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
updaterecipe = True
else:
logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
@@ -1706,15 +1812,23 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ srcuri.append(srcuri_entry(basepath, patchdir_params))
updaterecipe = True
- for basepath, path in new_p.items():
+ for basepath, param in new_p.items():
+ patchdir = param.get('patchdir', ".")
logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
- _move_file(os.path.join(patches_dir, basepath),
+ _move_file(os.path.join(patches_dir, patchdir, basepath),
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ params = dict(patchdir_params)
+ if patchdir != "." :
+ if params:
+ params['patchdir'] += patchdir
+ else:
+ params['patchdir'] = patchdir
+
+ srcuri.append(srcuri_entry(basepath, params))
updaterecipe = True
# Update recipe, if needed
if _remove_file_entries(srcuri, remove_files)[0]:
@@ -1771,6 +1885,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
for line in stdout.splitlines():
branchname = line[2:]
if line.startswith('* '):
+ if 'HEAD' in line:
+ raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
startbranch = branchname
if branchname.startswith(override_branch_prefix):
override_branches.append(branchname)
@@ -1960,9 +2076,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
shutil.rmtree(srctreebase)
else:
# We don't want to risk wiping out any work in progress
- logger.info('Leaving source tree %s as-is; if you no '
- 'longer need it then please delete it manually'
- % srctreebase)
+ if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
+ from datetime import datetime
+ preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
+ logger.info('Preserving source tree in %s\nIf you no '
+ 'longer need it then please delete it manually.\n'
+ 'It is also possible to reuse it via devtool source tree argument.'
+ % preservesrc)
+ bb.utils.mkdirhier(os.path.dirname(preservesrc))
+ shutil.move(srctreebase, preservesrc)
+ else:
+ logger.info('Leaving source tree %s as-is; if you no '
+ 'longer need it then please delete it manually'
+ % srctreebase)
else:
# This is unlikely, but if it's empty we can just remove it
os.rmdir(srctreebase)
@@ -2222,6 +2348,7 @@ def register_commands(subparsers, context):
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
group = parser_add.add_mutually_exclusive_group()
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index 0357ec07bf..fa5b8ef3c7 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -35,6 +35,8 @@ def _get_srctree(tmpdir):
dirs = scriptutils.filter_src_subdirs(tmpdir)
if len(dirs) == 1:
srctree = os.path.join(tmpdir, dirs[0])
+ else:
+ raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
return srctree
def _copy_source_code(orig, dest):
@@ -88,7 +90,7 @@ def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
_rename_recipe_dirs(oldpv, newpv, path)
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
-def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d):
+def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
"""Writes an append file"""
if not os.path.exists(rc):
raise DevtoolError("bbappend not created because %s does not exist" % rc)
@@ -104,6 +106,11 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
af = os.path.join(appendpath, '%s.bbappend' % brf)
with open(af, 'w') as f:
f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
+ # Local files can be modified/tracked in separate subdir under srctree
+ # Mostly useful for packages with S != WORKDIR
+ f.write('FILESPATH:prepend := "%s:"\n' %
+ os.path.join(srctreebase, 'oe-local-files'))
+ f.write('# srctreebase: %s\n' % srctreebase)
f.write('inherit externalsrc\n')
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
@@ -112,27 +119,24 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
if b_is_s:
f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
f.write('\n')
- if rev:
- f.write('# initial_rev: %s\n' % rev)
+ if revs:
+ for name, rev in revs.items():
+ f.write('# initial_rev %s: %s\n' % (name, rev))
if copied:
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
f.write('# original_files: %s\n' % ' '.join(copied))
return af
-def _cleanup_on_error(rf, srctree):
- rfp = os.path.split(rf)[0] # recipe folder
- rfpp = os.path.split(rfp)[0] # recipes folder
- if os.path.exists(rfp):
- shutil.rmtree(rfp)
- if not len(os.listdir(rfpp)):
- os.rmdir(rfpp)
+def _cleanup_on_error(rd, srctree):
+ if os.path.exists(rd):
+ shutil.rmtree(rd)
srctree = os.path.abspath(srctree)
if os.path.exists(srctree):
shutil.rmtree(srctree)
-def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None):
- if rf and not keep_failure:
- _cleanup_on_error(rf, srctree)
+def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
+ if not keep_failure:
+ _cleanup_on_error(rd, srctree)
logger.error(e)
if extramsg:
logger.error(extramsg)
@@ -179,12 +183,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
uri, rev = _get_uri(crd)
if srcrev:
rev = srcrev
+ paths = [srctree]
if uri.startswith('git://') or uri.startswith('gitsm://'):
__run('git fetch')
__run('git checkout %s' % rev)
__run('git tag -f devtool-base-new')
- md5 = None
- sha256 = None
+ __run('git submodule update --recursive')
+ __run('git submodule foreach \'git tag -f devtool-base-new\'')
+ (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
+ paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
+ checksums = {}
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
srcsubdir_rel = params.get('destsuffix', 'git')
if not srcbranch:
@@ -217,9 +225,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
if ftmpdir and keep_temp:
logger.info('Fetch temp directory is %s' % ftmpdir)
- md5 = checksums['md5sum']
- sha256 = checksums['sha256sum']
-
tmpsrctree = _get_srctree(tmpdir)
srctree = os.path.abspath(srctree)
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
@@ -253,29 +258,50 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
__run('git tag -f devtool-base-%s' % newpv)
- (stdout, _) = __run('git rev-parse HEAD')
- rev = stdout.rstrip()
+ revs = {}
+ for path in paths:
+ (stdout, _) = _run('git rev-parse HEAD', cwd=path)
+ revs[os.path.relpath(path, srctree)] = stdout.rstrip()
if no_patch:
patches = oe.recipeutils.get_recipe_patches(crd)
if patches:
logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
else:
- __run('git checkout devtool-patched -b %s' % branch)
- (stdout, _) = __run('git branch --list devtool-override-*')
- branches_to_rebase = [branch] + stdout.split()
- for b in branches_to_rebase:
- logger.info("Rebasing {} onto {}".format(b, rev))
- __run('git checkout %s' % b)
- try:
- __run('git rebase %s' % rev)
- except bb.process.ExecutionError as e:
- if 'conflict' in e.stdout:
- logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
- __run('git rebase --abort')
- else:
- logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
- __run('git checkout %s' % branch)
+ for path in paths:
+ _run('git checkout devtool-patched -b %s' % branch, cwd=path)
+ (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
+ branches_to_rebase = [branch] + stdout.split()
+ target_branch = revs[os.path.relpath(path, srctree)]
+
+ # There is a bug (or feature?) in git rebase where if a commit with
+ # a note is fully rebased away by being part of an old commit, the
+ # note is still attached to the old commit. Avoid this by making
+ # sure all old devtool related commits have a note attached to them
+ # (this assumes git config notes.rewriteMode is set to ignore).
+ (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
+ for rev in stdout.splitlines():
+ if not oe.patch.GitApplyTree.getNotes(path, rev):
+ oe.patch.GitApplyTree.addNote(path, rev, "dummy")
+
+ for b in branches_to_rebase:
+ logger.info("Rebasing {} onto {}".format(b, target_branch))
+ _run('git checkout %s' % b, cwd=path)
+ try:
+ _run('git rebase %s' % target_branch, cwd=path)
+ except bb.process.ExecutionError as e:
+ if 'conflict' in e.stdout:
+ logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
+ _run('git rebase --abort', cwd=path)
+ else:
+ logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+
+ # Remove any dummy notes added above.
+ (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
+ for rev in stdout.splitlines():
+ oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
+
+ _run('git checkout %s' % branch, cwd=path)
if tmpsrctree:
if keep_temp:
@@ -285,7 +311,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
if tmpdir != tmpsrctree:
shutil.rmtree(tmpdir)
- return (rev, md5, sha256, srcbranch, srcsubdir_rel)
+ return (revs, checksums, srcbranch, srcsubdir_rel)
def _add_license_diff_to_recipe(path, diff):
notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
@@ -306,7 +332,7 @@ def _add_license_diff_to_recipe(path, diff):
f.write("\n#\n\n".encode())
f.write(orig_content)
-def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
+def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
bpn = rd.getVar('BPN')
@@ -337,7 +363,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
replacing = True
new_src_uri = []
for entry in src_uri:
- scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
+ try:
+ scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
+ except bb.fetch2.MalformedUrl as e:
+ raise DevtoolError("Could not decode SRC_URI: {}".format(e))
if replacing and scheme in ['git', 'gitsm']:
branch = params.get('branch', 'master')
if rd.expand(branch) != srcbranch:
@@ -375,30 +404,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
addnames.append(params['name'])
# Find what's been set in the original recipe
oldnames = []
+ oldsums = []
noname = False
for varflag in rd.getVarFlags('SRC_URI'):
- if varflag.endswith(('.md5sum', '.sha256sum')):
- name = varflag.rsplit('.', 1)[0]
- if name not in oldnames:
- oldnames.append(name)
- elif varflag in ['md5sum', 'sha256sum']:
- noname = True
+ for checksum in checksums:
+ if varflag.endswith('.' + checksum):
+ name = varflag.rsplit('.', 1)[0]
+ if name not in oldnames:
+ oldnames.append(name)
+ oldsums.append(checksum)
+ elif varflag == checksum:
+ noname = True
+ oldsums.append(checksum)
# Even if SRC_URI has named entries it doesn't have to actually use the name
if noname and addnames and addnames[0] not in oldnames:
addnames = []
# Drop any old names (the name actually might include ${PV})
for name in oldnames:
if name not in newnames:
- newvalues['SRC_URI[%s.md5sum]' % name] = None
- newvalues['SRC_URI[%s.sha256sum]' % name] = None
+ for checksum in oldsums:
+ newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
- if sha256:
- if addnames:
- nameprefix = '%s.' % addnames[0]
- else:
- nameprefix = ''
+ nameprefix = '%s.' % addnames[0] if addnames else ''
+
+ # md5sum is deprecated, remove any traces of it. If it was the only old
+ # checksum, then replace it with the default checksums.
+ if 'md5sum' in oldsums:
newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
- newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256
+ oldsums.remove('md5sum')
+ if not oldsums:
+ oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
+
+ for checksum in oldsums:
+ newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
if srcsubdir_new != srcsubdir_old:
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
@@ -423,10 +461,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
_add_license_diff_to_recipe(fullpath, license_diff)
+ tinfoil.modified_files()
try:
rd = tinfoil.parse_recipe_file(fullpath, False)
except bb.tinfoil.TinfoilCommandFailed as e:
- _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed')
+ _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
@@ -435,7 +474,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
def _check_git_config():
def getconfig(name):
try:
- value = bb.process.run('git config --global %s' % name)[0].strip()
+ value = bb.process.run('git config %s' % name)[0].strip()
except bb.process.ExecutionError as e:
if e.exitcode == 1:
value = None
@@ -522,14 +561,7 @@ def upgrade(args, config, basepath, workspace):
else:
srctree = standard.get_default_srctree(config, pn)
- # Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S'))
- workdir = os.path.abspath(rd.getVar('WORKDIR'))
- srctree_s = srctree
- if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
- srctree_s = os.path.join(srctree, srcsubdir)
+ srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
# try to automatically discover latest version and revision if not provided on command line
if not args.version and not args.srcrev:
@@ -562,19 +594,18 @@ def upgrade(args, config, basepath, workspace):
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
logger.info('Extracting upgraded version source...')
- rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
+ rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
tinfoil, rd)
new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
license_diff = _generate_license_diff(old_licenses, new_licenses)
- rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
- except bb.process.CmdError as e:
- _upgrade_error(e, rf, srctree, args.keep_failure)
- except DevtoolError as e:
- _upgrade_error(e, rf, srctree, args.keep_failure)
+ rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
+ except (bb.process.CmdError, DevtoolError) as e:
+ recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
+ _upgrade_error(e, recipedir, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
- af = _write_append(rf, srctree_s, args.same_dir, args.no_same_dir, rev2,
+ af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
copied, config.workspace_path, rd)
standard._add_md5(config, pn, af)
@@ -618,7 +649,7 @@ def check_upgrade_status(args, config, basepath, workspace):
for result in results:
# pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
if args.all or result[1] != 'MATCH':
- logger.info("{:25} {:15} {:15} {} {} {}".format( result[0],
+ print("{:25} {:15} {:15} {} {} {}".format( result[0],
result[2],
result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
result[4],
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 88ed8c5f01..341e893305 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
import scriptutils
import errno
from collections import defaultdict
+import difflib
logger = logging.getLogger('recipetool')
@@ -299,7 +300,10 @@ def appendfile(args):
if st.st_mode & stat.S_IXUSR:
perms = '0755'
install = {args.newfile: (args.targetpath, perms)}
- oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine)
+ if sourcepath:
+ sourcepath = os.path.basename(sourcepath)
+ oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
+ tinfoil.modified_files()
return 0
else:
if alternative_pns:
@@ -327,6 +331,7 @@ def appendsrc(args, files, rd, extralines=None):
copyfiles = {}
extralines = extralines or []
+ params = []
for newfile, srcfile in files.items():
src_destdir = os.path.dirname(srcfile)
if not args.use_workdir:
@@ -337,25 +342,46 @@ def appendsrc(args, files, rd, extralines=None):
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
src_destdir = os.path.normpath(src_destdir)
- source_uri = 'file://{0}'.format(os.path.basename(srcfile))
if src_destdir and src_destdir != '.':
- source_uri += ';subdir={0}'.format(src_destdir)
-
- simple = bb.fetch.URI(source_uri)
- simple.params = {}
- simple_str = str(simple)
- if simple_str in simplified:
- existing = simplified[simple_str]
- if source_uri != existing:
- logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
- else:
- logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
+ params.append({'subdir': src_destdir})
else:
- extralines.append('SRC_URI += {0}'.format(source_uri))
- copyfiles[newfile] = srcfile
-
- oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines)
-
+ params.append({})
+
+ copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
+
+ dry_run_output = None
+ dry_run_outdir = None
+ if args.dry_run:
+ import tempfile
+ dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
+ dry_run_outdir = dry_run_output.name
+
+ appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
+ redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
+ if not appendfile:
+ return
+ if args.dry_run:
+ output = ''
+ appendfilename = os.path.basename(appendfile)
+ newappendfile = appendfile
+ if appendfile and os.path.exists(appendfile):
+ with open(appendfile, 'r') as f:
+ oldlines = f.readlines()
+ else:
+ appendfile = '/dev/null'
+ oldlines = []
+
+ with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
+ newlines = f.readlines()
+ diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
+ difflines = list(diff)
+ if difflines:
+ output += ''.join(difflines)
+ if output:
+ logger.info('Diff of changed files:\n%s' % output)
+ else:
+ logger.info('No changed files')
+ tinfoil.modified_files()
def appendsrcfiles(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
help='Create/update a bbappend to add or replace source files',
description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
+ parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
+ parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
@@ -442,6 +470,8 @@ def register_commands(subparsers):
parents=[common_src],
help='Create/update a bbappend to add or replace a source file',
description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
+ parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
+ parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 824ac6350d..8e9ff38db6 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -423,6 +423,36 @@ def create_recipe(args):
storeTagName = ''
pv_srcpv = False
+ handled = []
+ classes = []
+
+ # Find all plugins that want to register handlers
+ logger.debug('Loading recipe handlers')
+ raw_handlers = []
+ for plugin in plugins:
+ if hasattr(plugin, 'register_recipe_handlers'):
+ plugin.register_recipe_handlers(raw_handlers)
+ # Sort handlers by priority
+ handlers = []
+ for i, handler in enumerate(raw_handlers):
+ if isinstance(handler, tuple):
+ handlers.append((handler[0], handler[1], i))
+ else:
+ handlers.append((handler, 0, i))
+ handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
+ for handler, priority, _ in handlers:
+ logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
+ setattr(handler, '_devtool', args.devtool)
+ handlers = [item[0] for item in handlers]
+
+ fetchuri = None
+ for handler in handlers:
+ if hasattr(handler, 'process_url'):
+ ret = handler.process_url(args, classes, handled, extravalues)
+ if 'url' in handled and ret:
+ fetchuri = ret
+ break
+
if os.path.isfile(source):
source = 'file://%s' % os.path.abspath(source)
@@ -431,7 +461,8 @@ def create_recipe(args):
if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
# Fetch a URL
- fetchuri = reformat_git_uri(urldefrag(source)[0])
+ if not fetchuri:
+ fetchuri = reformat_git_uri(urldefrag(source)[0])
if args.binary:
# Assume the archive contains the directory structure verbatim
# so we need to extract to a subdirectory
@@ -638,8 +669,6 @@ def create_recipe(args):
# We'll come back and replace this later in handle_license_vars()
lines_before.append('##LICENSE_PLACEHOLDER##')
- handled = []
- classes = []
# FIXME This is kind of a hack, we probably ought to be using bitbake to do this
pn = None
@@ -677,8 +706,10 @@ def create_recipe(args):
if not srcuri:
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
lines_before.append('SRC_URI = "%s"' % srcuri)
+ shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
for key, value in sorted(checksums.items()):
- lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
+ if key in shown_checksums:
+ lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
if srcuri and supports_srcrev(srcuri):
lines_before.append('')
lines_before.append('# Modify these as desired')
@@ -690,7 +721,7 @@ def create_recipe(args):
srcpvprefix = 'svnr'
else:
srcpvprefix = scheme
- lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix))
+ lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
pv_srcpv = True
if not args.autorev and srcrev == '${AUTOREV}':
if os.path.exists(os.path.join(srctree, '.git')):
@@ -718,25 +749,6 @@ def create_recipe(args):
if args.npm_dev:
extravalues['NPM_INSTALL_DEV'] = 1
- # Find all plugins that want to register handlers
- logger.debug('Loading recipe handlers')
- raw_handlers = []
- for plugin in plugins:
- if hasattr(plugin, 'register_recipe_handlers'):
- plugin.register_recipe_handlers(raw_handlers)
- # Sort handlers by priority
- handlers = []
- for i, handler in enumerate(raw_handlers):
- if isinstance(handler, tuple):
- handlers.append((handler[0], handler[1], i))
- else:
- handlers.append((handler, 0, i))
- handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
- for handler, priority, _ in handlers:
- logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
- setattr(handler, '_devtool', args.devtool)
- handlers = [item[0] for item in handlers]
-
# Apply the handlers
if args.binary:
classes.append('bin_package')
@@ -745,6 +757,10 @@ def create_recipe(args):
for handler in handlers:
handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
+ # native and nativesdk classes are special and must be inherited last
+ # If present, put them at the end of the classes list
+ classes.sort(key=lambda c: c in ("native", "nativesdk"))
+
extrafiles = extravalues.pop('extrafiles', {})
extra_pn = extravalues.pop('PN', None)
extra_pv = extravalues.pop('PV', None)
@@ -869,8 +885,10 @@ def create_recipe(args):
outlines.append('')
outlines.extend(lines_after)
+ outlines = [ line.rstrip('\n') +"\n" for line in outlines]
+
if extravalues:
- _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False)
+ _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
if args.extract_to:
scriptutils.git_convert_standalone_clone(srctree)
@@ -886,7 +904,7 @@ def create_recipe(args):
log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
if outfile == '-':
- sys.stdout.write('\n'.join(outlines) + '\n')
+ sys.stdout.write(''.join(outlines) + '\n')
else:
with open(outfile, 'w') as f:
lastline = None
@@ -894,9 +912,10 @@ def create_recipe(args):
if not lastline and not line:
# Skip extra blank lines
continue
- f.write('%s\n' % line)
+ f.write('%s' % line)
lastline = line
log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
+ tinfoil.modified_files()
if tempsrc:
if args.keep_temp:
@@ -1054,54 +1073,18 @@ def get_license_md5sums(d, static_only=False, linenumbers=False):
return md5sums
-def crunch_license(licfile):
+def crunch_known_licenses(d):
'''
- Remove non-material text from a license file and then check
- its md5sum against a known list. This works well for licenses
- which contain a copyright statement, but is also a useful way
- to handle people's insistence upon reformatting the license text
- slightly (with no material difference to the text of the
- license).
+ Calculate the MD5 checksums for the crunched versions of all common
+ licenses. Also add additional known checksums.
'''
-
- import oe.utils
-
- # Note: these are carefully constructed!
- license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
- license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
- copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
- disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$')
- email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$')
- header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
- tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$')
- url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
-
+
crunched_md5sums = {}
# common licenses
- crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0'
- crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = '0BSD'
- crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause'
- crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause'
- crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0'
- crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0'
- crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0'
- crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0'
- crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only'
- crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later'
- crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only'
- crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later'
- crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC'
- crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only'
- crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later'
- crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only'
- crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later'
- crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only'
- crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later'
- crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
- crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0'
- crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense'
- crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL'
+ crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
+ crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
+ crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
# The following two were gleaned from the "forever" npm package
crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
@@ -1157,6 +1140,39 @@ def crunch_license(licfile):
# https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
+ commonlicdir = d.getVar('COMMON_LICENSE_DIR')
+ for fn in sorted(os.listdir(commonlicdir)):
+ md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
+ if md5value not in crunched_md5sums:
+ crunched_md5sums[md5value] = fn
+ elif fn != crunched_md5sums[md5value]:
+ bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
+ else:
+ bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
+
+ return crunched_md5sums
+
+def crunch_license(licfile):
+ '''
+ Remove non-material text from a license file and then calculate its
+ md5sum. This works well for licenses that contain a copyright statement,
+ but is also a useful way to handle people's insistence upon reformatting
+ the license text slightly (with no material difference to the text of the
+ license).
+ '''
+
+ import oe.utils
+
+ # Note: these are carefully constructed!
+ license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
+ license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
+ copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
+ disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
+ email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
+ header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
+ tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
+ url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
+
lictext = []
with open(licfile, 'r', errors='surrogateescape') as f:
for line in f:
@@ -1198,16 +1214,17 @@ def crunch_license(licfile):
except UnicodeEncodeError:
md5val = None
lictext = ''
- license = crunched_md5sums.get(md5val, None)
- return license, md5val, lictext
+ return md5val, lictext
def guess_license(srctree, d):
import bb
md5sums = get_license_md5sums(d)
+ crunched_md5sums = crunch_known_licenses(d)
+
licenses = []
licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
- skip_extensions = (".html", ".js", ".json", ".svg", ".ts")
+ skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
licfiles = []
for root, dirs, files in os.walk(srctree):
for fn in files:
@@ -1222,7 +1239,8 @@ def guess_license(srctree, d):
md5value = bb.utils.md5_file(licfile)
license = md5sums.get(md5value, None)
if not license:
- license, crunched_md5, lictext = crunch_license(licfile)
+ crunched_md5, lictext = crunch_license(licfile)
+ license = crunched_md5sums.get(crunched_md5, None)
if lictext and not license:
license = 'Unknown'
logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
@@ -1396,6 +1414,7 @@ def register_commands(subparsers):
parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
+ parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
parser_create.set_defaults(func=create_recipe)
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 5015634476..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
import logging
-import glob
from recipetool.create import RecipeHandler, validate_pv
logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
deps = []
unmappedpkgs = []
- proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE)
- pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
- pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
- findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
- findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
- checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
- include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
- subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
- dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
+ proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
+ pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
+ pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
+ findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
+ findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
+ checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
+ include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
def find_cmake_package(pkg):
RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
'makeinfo': 'texinfo',
}
- pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
- pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
- lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
- libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
- progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
- dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
- ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
- am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
- define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
- version_re = re.compile('([0-9.]+)')
+ pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
+ pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
+ lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
+ libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
+ progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
+ dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
+ ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
+ am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
+ define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
+ version_re = re.compile(r'([0-9.]+)')
defines = {}
def subst_defines(value):
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index f4f51c88b4..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -10,7 +10,7 @@ import codecs
import collections
import setuptools.command.build_py
import email
-import imp
+import importlib
import glob
import itertools
import logging
@@ -18,7 +18,11 @@ import os
import re
import sys
import subprocess
+import json
+import urllib.request
from recipetool.create import RecipeHandler
+from urllib.parse import urldefrag
+from recipetool.create import determine_from_url
logger = logging.getLogger('recipetool')
@@ -37,63 +41,8 @@ class PythonRecipeHandler(RecipeHandler):
assume_provided = ['builtins', 'os.path']
# Assumes that the host python3 builtin_module_names is sane for target too
assume_provided = assume_provided + list(sys.builtin_module_names)
+ excluded_fields = []
- bbvar_map = {
- 'Name': 'PN',
- 'Version': 'PV',
- 'Home-page': 'HOMEPAGE',
- 'Summary': 'SUMMARY',
- 'Description': 'DESCRIPTION',
- 'License': 'LICENSE',
- 'Requires': 'RDEPENDS:${PN}',
- 'Provides': 'RPROVIDES:${PN}',
- 'Obsoletes': 'RREPLACES:${PN}',
- }
- # PN/PV are already set by recipetool core & desc can be extremely long
- excluded_fields = [
- 'Description',
- ]
- setup_parse_map = {
- 'Url': 'Home-page',
- 'Classifiers': 'Classifier',
- 'Description': 'Summary',
- }
- setuparg_map = {
- 'Home-page': 'url',
- 'Classifier': 'classifiers',
- 'Summary': 'description',
- 'Description': 'long-description',
- }
- # Values which are lists, used by the setup.py argument based metadata
- # extraction method, to determine how to process the setup.py output.
- setuparg_list_fields = [
- 'Classifier',
- 'Requires',
- 'Provides',
- 'Obsoletes',
- 'Platform',
- 'Supported-Platform',
- ]
- setuparg_multi_line_values = ['Description']
- replacements = [
- ('License', r' +$', ''),
- ('License', r'^ +', ''),
- ('License', r' ', '-'),
- ('License', r'^GNU-', ''),
- ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
- ('License', r'^UNKNOWN$', ''),
-
- # Remove currently unhandled version numbers from these variables
- ('Requires', r' *\([^)]*\)', ''),
- ('Provides', r' *\([^)]*\)', ''),
- ('Obsoletes', r' *\([^)]*\)', ''),
- ('Install-requires', r'^([^><= ]+).*', r'\1'),
- ('Extras-require', r'^([^><= ]+).*', r'\1'),
- ('Tests-require', r'^([^><= ]+).*', r'\1'),
-
- # Remove unhandled dependency on particular features (e.g. foo[PDF])
- ('Install-requires', r'\[[^\]]+\]$', ''),
- ]
classifier_license_map = {
'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
@@ -166,110 +115,97 @@ class PythonRecipeHandler(RecipeHandler):
def __init__(self):
pass
- def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
- if 'buildsystem' in handled:
- return False
-
- # Check for non-zero size setup.py files
- setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
- for fn in setupfiles:
- if os.path.getsize(fn):
- break
- else:
- return False
-
- # setup.py is always parsed to get at certain required information, such as
- # distutils vs setuptools
- #
- # If egg info is available, we use it for both its PKG-INFO metadata
- # and for its requires.txt for install_requires.
- # If PKG-INFO is available but no egg info is, we use that for metadata in preference to
- # the parsed setup.py, but use the install_requires info from the
- # parsed setup.py.
-
- setupscript = os.path.join(srctree, 'setup.py')
- try:
- setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript)
- except Exception:
- logger.exception("Failed to parse setup.py")
- setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], []
-
- egginfo = glob.glob(os.path.join(srctree, '*.egg-info'))
- if egginfo:
- info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO'))
- requires_txt = os.path.join(egginfo[0], 'requires.txt')
- if os.path.exists(requires_txt):
- with codecs.open(requires_txt) as f:
- inst_req = []
- extras_req = collections.defaultdict(list)
- current_feature = None
- for line in f.readlines():
- line = line.rstrip()
- if not line:
- continue
-
- if line.startswith('['):
- current_feature = line[1:-1]
- elif current_feature:
- extras_req[current_feature].append(line)
- else:
- inst_req.append(line)
- info['Install-requires'] = inst_req
- info['Extras-require'] = extras_req
- elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']):
- info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO'))
-
- if setup_info:
- if 'Install-requires' in setup_info:
- info['Install-requires'] = setup_info['Install-requires']
- if 'Extras-require' in setup_info:
- info['Extras-require'] = setup_info['Extras-require']
- else:
- if setup_info:
- info = setup_info
+ def process_url(self, args, classes, handled, extravalues):
+ """
+ Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
+ which corresponds to the archive location, and add pypi class
+ """
+
+ if 'url' in handled:
+ return None
+
+ fetch_uri = None
+ source = args.source
+ required_version = args.version if args.version else None
+ match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
+ if match:
+ package = match.group(1)
+ version = match.group(2) if match.group(2) else required_version
+
+ json_url = f"https://pypi.org/pypi/%s/json" % package
+ response = urllib.request.urlopen(json_url)
+ if response.status == 200:
+ data = json.loads(response.read())
+ if not version:
+ # grab latest version
+ version = data["info"]["version"]
+ pypi_package = data["info"]["name"]
+ for release in reversed(data["releases"][version]):
+ if release["packagetype"] == "sdist":
+ fetch_uri = release["url"]
+ break
else:
- info = self.get_setup_args_info(setupscript)
-
- # Grab the license value before applying replacements
- license_str = info.get('License', '').strip()
-
- self.apply_info_replacements(info)
-
- if uses_setuptools:
- classes.append('setuptools3')
+ logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
+ return None
else:
- classes.append('distutils3')
-
- if license_str:
- for i, line in enumerate(lines_before):
- if line.startswith('LICENSE = '):
- lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
- break
-
- if 'Classifier' in info:
- existing_licenses = info.get('License', '')
- licenses = []
- for classifier in info['Classifier']:
- if classifier in self.classifier_license_map:
- license = self.classifier_license_map[classifier]
- if license == 'Apache' and 'Apache-2.0' in existing_licenses:
- license = 'Apache-2.0'
- elif license == 'GPL':
- if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
- license = 'GPL-2.0'
- elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
- license = 'GPL-3.0'
- elif license == 'LGPL':
- if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
- license = 'LGPL-2.1'
- elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
- license = 'LGPL-2.0'
- elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
- license = 'LGPL-3.0'
- licenses.append(license)
-
- if licenses:
- info['License'] = ' & '.join(licenses)
+ match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
+ if match:
+ fetch_uri = source
+ pypi_package = match.group(1)
+ _, version = determine_from_url(fetch_uri)
+
+ if match and not args.no_pypi:
+ if required_version and version != required_version:
+ raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
+ # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
+ # but at this point we cannot know because because user can specify the output name of the recipe on the command line
+ extravalues["PYPI_PACKAGE"] = pypi_package
+ # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
+ pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
+ if pypi_package_ext:
+ pypi_package_ext = pypi_package_ext.group(1)
+ if pypi_package_ext != "tar.gz":
+ extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
+
+ # Pypi class will handle S and SRC_URI variables, so remove them
+ # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
+ # extravalues['SRC_URI(?:\[.*?\])?'] = None
+ extravalues['S'] = None
+ extravalues['SRC_URI'] = None
+
+ classes.append('pypi')
+
+ handled.append('url')
+ return fetch_uri
+
+ def handle_classifier_license(self, classifiers, existing_licenses=""):
+
+ licenses = []
+ for classifier in classifiers:
+ if classifier in self.classifier_license_map:
+ license = self.classifier_license_map[classifier]
+ if license == 'Apache' and 'Apache-2.0' in existing_licenses:
+ license = 'Apache-2.0'
+ elif license == 'GPL':
+ if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
+ license = 'GPL-2.0'
+ elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
+ license = 'GPL-3.0'
+ elif license == 'LGPL':
+ if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
+ license = 'LGPL-2.1'
+ elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
+ license = 'LGPL-2.0'
+ elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
+ license = 'LGPL-3.0'
+ licenses.append(license)
+
+ if licenses:
+ return ' & '.join(licenses)
+
+ return None
+
+ def map_info_to_bbvar(self, info, extravalues):
# Map PKG-INFO & setup.py fields to bitbake variables
for field, values in info.items():
@@ -285,73 +221,214 @@ class PythonRecipeHandler(RecipeHandler):
value = ' '.join(str(v) for v in values if v)
bbvar = self.bbvar_map[field]
+ if bbvar == "PN":
+ # by convention python recipes start with "python3-"
+ if not value.startswith('python'):
+ value = 'python3-' + value
+
if bbvar not in extravalues and value:
extravalues[bbvar] = value
- mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
+ def apply_info_replacements(self, info):
+ if not self.replacements:
+ return
- extras_req = set()
- if 'Extras-require' in info:
- extras_req = info['Extras-require']
- if extras_req:
- lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
- lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
- lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
- lines_after.append('#')
- lines_after.append('# Uncomment this line to enable all the optional features.')
- lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
- for feature, feature_reqs in extras_req.items():
- unmapped_deps.difference_update(feature_reqs)
+ for variable, search, replace in self.replacements:
+ if variable not in info:
+ continue
- feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
- lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps)))
+ def replace_value(search, replace, value):
+ if replace is None:
+ if re.search(search, value):
+ return None
+ else:
+ new_value = re.sub(search, replace, value)
+ if value != new_value:
+ return new_value
+ return value
- inst_reqs = set()
- if 'Install-requires' in info:
- if extras_req:
- lines_after.append('')
- inst_reqs = info['Install-requires']
- if inst_reqs:
- unmapped_deps.difference_update(inst_reqs)
+ value = info[variable]
+ if isinstance(value, str):
+ new_value = replace_value(search, replace, value)
+ if new_value is None:
+ del info[variable]
+ elif new_value != value:
+ info[variable] = new_value
+ elif hasattr(value, 'items'):
+ for dkey, dvalue in list(value.items()):
+ new_list = []
+ for pos, a_value in enumerate(dvalue):
+ new_value = replace_value(search, replace, a_value)
+ if new_value is not None and new_value != value:
+ new_list.append(new_value)
- inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
- lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
- lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
- lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
+ if value != new_list:
+ value[dkey] = new_list
+ else:
+ new_list = []
+ for pos, a_value in enumerate(value):
+ new_value = replace_value(search, replace, a_value)
+ if new_value is not None and new_value != value:
+ new_list.append(new_value)
- if mapped_deps:
- name = info.get('Name')
- if name and name[0] in mapped_deps:
- # Attempt to avoid self-reference
- mapped_deps.remove(name[0])
- mapped_deps -= set(self.excluded_pkgdeps)
- if inst_reqs or extras_req:
- lines_after.append('')
- lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
- lines_after.append('# python sources, and might not be 100% accurate.')
- lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
+ if value != new_list:
+ info[variable] = new_list
- unmapped_deps -= set(extensions)
- unmapped_deps -= set(self.assume_provided)
- if unmapped_deps:
- if mapped_deps:
- lines_after.append('')
- lines_after.append('# WARNING: We were unable to map the following python package/module')
- lines_after.append('# dependencies to the bitbake packages which include them:')
- lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps))
- handled.append('buildsystem')
+ def scan_python_dependencies(self, paths):
+ deps = set()
+ try:
+ dep_output = self.run_command(['pythondeps', '-d'] + paths)
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ for line in dep_output.splitlines():
+ line = line.rstrip()
+ dep, filename = line.split('\t', 1)
+ if filename.endswith('/setup.py'):
+ continue
+ deps.add(dep)
- def get_pkginfo(self, pkginfo_fn):
- msg = email.message_from_file(open(pkginfo_fn, 'r'))
- msginfo = {}
- for field in msg.keys():
- values = msg.get_all(field)
- if len(values) == 1:
- msginfo[field] = values[0]
- else:
- msginfo[field] = values
- return msginfo
+ try:
+ provides_output = self.run_command(['pythondeps', '-p'] + paths)
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ provides_lines = (l.rstrip() for l in provides_output.splitlines())
+ provides = set(l for l in provides_lines if l and l != 'setup')
+ deps -= provides
+
+ return deps
+
+ def parse_pkgdata_for_python_packages(self):
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
+
+ ldata = tinfoil.config_data.createCopy()
+ bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
+ python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
+
+ dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
+ python_dirs = [python_sitedir + os.sep,
+ os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
+ os.path.dirname(python_sitedir) + os.sep]
+ packages = {}
+ for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
+ files_info = None
+ with open(pkgdatafile, 'r') as f:
+ for line in f.readlines():
+ field, value = line.split(': ', 1)
+ if field.startswith('FILES_INFO'):
+ files_info = ast.literal_eval(value)
+ break
+ else:
+ continue
+
+ for fn in files_info:
+ for suffix in importlib.machinery.all_suffixes():
+ if fn.endswith(suffix):
+ break
+ else:
+ continue
+
+ if fn.startswith(dynload_dir + os.sep):
+ if '/.debug/' in fn:
+ continue
+ base = os.path.basename(fn)
+ provided = base.split('.', 1)[0]
+ packages[provided] = os.path.basename(pkgdatafile)
+ continue
+
+ for python_dir in python_dirs:
+ if fn.startswith(python_dir):
+ relpath = fn[len(python_dir):]
+ relstart, _, relremaining = relpath.partition(os.sep)
+ if relstart.endswith('.egg'):
+ relpath = relremaining
+ base, _ = os.path.splitext(relpath)
+
+ if '/.debug/' in base:
+ continue
+ if os.path.basename(base) == '__init__':
+ base = os.path.dirname(base)
+ base = base.replace(os.sep + os.sep, os.sep)
+ provided = base.replace(os.sep, '.')
+ packages[provided] = os.path.basename(pkgdatafile)
+ return packages
+
+ @classmethod
+ def run_command(cls, cmd, **popenargs):
+ if 'stderr' not in popenargs:
+ popenargs['stderr'] = subprocess.STDOUT
+ try:
+ return subprocess.check_output(cmd, **popenargs).decode('utf-8')
+ except OSError as exc:
+ logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
+ raise
+ except subprocess.CalledProcessError as exc:
+ logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
+ raise
+
+class PythonSetupPyRecipeHandler(PythonRecipeHandler):
+ bbvar_map = {
+ 'Name': 'PN',
+ 'Version': 'PV',
+ 'Home-page': 'HOMEPAGE',
+ 'Summary': 'SUMMARY',
+ 'Description': 'DESCRIPTION',
+ 'License': 'LICENSE',
+ 'Requires': 'RDEPENDS:${PN}',
+ 'Provides': 'RPROVIDES:${PN}',
+ 'Obsoletes': 'RREPLACES:${PN}',
+ }
+ # PN/PV are already set by recipetool core & desc can be extremely long
+ excluded_fields = [
+ 'Description',
+ ]
+ setup_parse_map = {
+ 'Url': 'Home-page',
+ 'Classifiers': 'Classifier',
+ 'Description': 'Summary',
+ }
+ setuparg_map = {
+ 'Home-page': 'url',
+ 'Classifier': 'classifiers',
+ 'Summary': 'description',
+ 'Description': 'long-description',
+ }
+ # Values which are lists, used by the setup.py argument based metadata
+ # extraction method, to determine how to process the setup.py output.
+ setuparg_list_fields = [
+ 'Classifier',
+ 'Requires',
+ 'Provides',
+ 'Obsoletes',
+ 'Platform',
+ 'Supported-Platform',
+ ]
+ setuparg_multi_line_values = ['Description']
+
+ replacements = [
+ ('License', r' +$', ''),
+ ('License', r'^ +', ''),
+ ('License', r' ', '-'),
+ ('License', r'^GNU-', ''),
+ ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
+ ('License', r'^UNKNOWN$', ''),
+
+ # Remove currently unhandled version numbers from these variables
+ ('Requires', r' *\([^)]*\)', ''),
+ ('Provides', r' *\([^)]*\)', ''),
+ ('Obsoletes', r' *\([^)]*\)', ''),
+ ('Install-requires', r'^([^><= ]+).*', r'\1'),
+ ('Extras-require', r'^([^><= ]+).*', r'\1'),
+ ('Tests-require', r'^([^><= ]+).*', r'\1'),
+
+ # Remove unhandled dependency on particular features (e.g. foo[PDF])
+ ('Install-requires', r'\[[^\]]+\]$', ''),
+ ]
+
+ def __init__(self):
+ pass
def parse_setup_py(self, setupscript='./setup.py'):
with codecs.open(setupscript) as f:
@@ -427,47 +504,16 @@ class PythonRecipeHandler(RecipeHandler):
info[fields[lineno]] = line
return info
- def apply_info_replacements(self, info):
- for variable, search, replace in self.replacements:
- if variable not in info:
- continue
-
- def replace_value(search, replace, value):
- if replace is None:
- if re.search(search, value):
- return None
- else:
- new_value = re.sub(search, replace, value)
- if value != new_value:
- return new_value
- return value
-
- value = info[variable]
- if isinstance(value, str):
- new_value = replace_value(search, replace, value)
- if new_value is None:
- del info[variable]
- elif new_value != value:
- info[variable] = new_value
- elif hasattr(value, 'items'):
- for dkey, dvalue in list(value.items()):
- new_list = []
- for pos, a_value in enumerate(dvalue):
- new_value = replace_value(search, replace, a_value)
- if new_value is not None and new_value != value:
- new_list.append(new_value)
-
- if value != new_list:
- value[dkey] = new_list
+ def get_pkginfo(self, pkginfo_fn):
+ msg = email.message_from_file(open(pkginfo_fn, 'r'))
+ msginfo = {}
+ for field in msg.keys():
+ values = msg.get_all(field)
+ if len(values) == 1:
+ msginfo[field] = values[0]
else:
- new_list = []
- for pos, a_value in enumerate(value):
- new_value = replace_value(search, replace, a_value)
- if new_value is not None and new_value != value:
- new_list.append(new_value)
-
- if value != new_list:
- info[variable] = new_list
+ msginfo[field] = values
+ return msginfo
def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
if 'Package-dir' in setup_info:
@@ -522,99 +568,441 @@ class PythonRecipeHandler(RecipeHandler):
unmapped_deps.add(dep)
return mapped_deps, unmapped_deps
- def scan_python_dependencies(self, paths):
- deps = set()
- try:
- dep_output = self.run_command(['pythondeps', '-d'] + paths)
- except (OSError, subprocess.CalledProcessError):
- pass
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+
+ if 'buildsystem' in handled:
+ return False
+
+ logger.debug("Trying setup.py parser")
+
+ # Check for non-zero size setup.py files
+ setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
+ for fn in setupfiles:
+ if os.path.getsize(fn):
+ break
else:
- for line in dep_output.splitlines():
- line = line.rstrip()
- dep, filename = line.split('\t', 1)
- if filename.endswith('/setup.py'):
- continue
- deps.add(dep)
+ logger.debug("No setup.py found")
+ return False
+ # setup.py is always parsed to get at certain required information, such as
+ # distutils vs setuptools
+ #
+ # If egg info is available, we use it for both its PKG-INFO metadata
+ # and for its requires.txt for install_requires.
+ # If PKG-INFO is available but no egg info is, we use that for metadata in preference to
+ # the parsed setup.py, but use the install_requires info from the
+ # parsed setup.py.
+
+ setupscript = os.path.join(srctree, 'setup.py')
try:
- provides_output = self.run_command(['pythondeps', '-p'] + paths)
- except (OSError, subprocess.CalledProcessError):
- pass
+ setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript)
+ except Exception:
+ logger.exception("Failed to parse setup.py")
+ setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], []
+
+ egginfo = glob.glob(os.path.join(srctree, '*.egg-info'))
+ if egginfo:
+ info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO'))
+ requires_txt = os.path.join(egginfo[0], 'requires.txt')
+ if os.path.exists(requires_txt):
+ with codecs.open(requires_txt) as f:
+ inst_req = []
+ extras_req = collections.defaultdict(list)
+ current_feature = None
+ for line in f.readlines():
+ line = line.rstrip()
+ if not line:
+ continue
+
+ if line.startswith('['):
+ # PACKAGECONFIG must not contain expressions or whitespace
+ line = line.replace(" ", "")
+ line = line.replace(':', "")
+ line = line.replace('.', "-dot-")
+ line = line.replace('"', "")
+ line = line.replace('<', "-smaller-")
+ line = line.replace('>', "-bigger-")
+ line = line.replace('_', "-")
+ line = line.replace('(', "")
+ line = line.replace(')', "")
+ line = line.replace('!', "-not-")
+ line = line.replace('=', "-equals-")
+ current_feature = line[1:-1]
+ elif current_feature:
+ extras_req[current_feature].append(line)
+ else:
+ inst_req.append(line)
+ info['Install-requires'] = inst_req
+ info['Extras-require'] = extras_req
+ elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']):
+ info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO'))
+
+ if setup_info:
+ if 'Install-requires' in setup_info:
+ info['Install-requires'] = setup_info['Install-requires']
+ if 'Extras-require' in setup_info:
+ info['Extras-require'] = setup_info['Extras-require']
else:
- provides_lines = (l.rstrip() for l in provides_output.splitlines())
- provides = set(l for l in provides_lines if l and l != 'setup')
- deps -= provides
+ if setup_info:
+ info = setup_info
+ else:
+ info = self.get_setup_args_info(setupscript)
- return deps
+ # Grab the license value before applying replacements
+ license_str = info.get('License', '').strip()
- def parse_pkgdata_for_python_packages(self):
- suffixes = [t[0] for t in imp.get_suffixes()]
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
+ self.apply_info_replacements(info)
- ldata = tinfoil.config_data.createCopy()
- bb.parse.handle('classes/python3-dir.bbclass', ldata, True)
- python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
+ if uses_setuptools:
+ classes.append('setuptools3')
+ else:
+ classes.append('distutils3')
- dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
- python_dirs = [python_sitedir + os.sep,
- os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
- os.path.dirname(python_sitedir) + os.sep]
- packages = {}
- for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
- files_info = None
- with open(pkgdatafile, 'r') as f:
- for line in f.readlines():
- field, value = line.split(': ', 1)
- if field.startswith('FILES_INFO'):
- files_info = ast.literal_eval(value)
- break
- else:
- continue
+ if license_str:
+ for i, line in enumerate(lines_before):
+ if line.startswith('##LICENSE_PLACEHOLDER##'):
+ lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
+ break
- for fn in files_info:
- for suffix in suffixes:
- if fn.endswith(suffix):
- break
- else:
- continue
+ if 'Classifier' in info:
+ license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
+ if license:
+ info['License'] = license
- if fn.startswith(dynload_dir + os.sep):
- if '/.debug/' in fn:
- continue
- base = os.path.basename(fn)
- provided = base.split('.', 1)[0]
- packages[provided] = os.path.basename(pkgdatafile)
- continue
+ self.map_info_to_bbvar(info, extravalues)
- for python_dir in python_dirs:
- if fn.startswith(python_dir):
- relpath = fn[len(python_dir):]
- relstart, _, relremaining = relpath.partition(os.sep)
- if relstart.endswith('.egg'):
- relpath = relremaining
- base, _ = os.path.splitext(relpath)
+ mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
- if '/.debug/' in base:
- continue
- if os.path.basename(base) == '__init__':
- base = os.path.dirname(base)
- base = base.replace(os.sep + os.sep, os.sep)
- provided = base.replace(os.sep, '.')
- packages[provided] = os.path.basename(pkgdatafile)
- return packages
+ extras_req = set()
+ if 'Extras-require' in info:
+ extras_req = info['Extras-require']
+ if extras_req:
+ lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
+ lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
+ lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
+ lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
+ lines_after.append('#')
+ lines_after.append('# Uncomment this line to enable all the optional features.')
+ lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
+ for feature, feature_reqs in extras_req.items():
+ unmapped_deps.difference_update(feature_reqs)
+
+ feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
+ lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps)))
+
+ inst_reqs = set()
+ if 'Install-requires' in info:
+ if extras_req:
+ lines_after.append('')
+ inst_reqs = info['Install-requires']
+ if inst_reqs:
+ unmapped_deps.difference_update(inst_reqs)
+
+ inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
+ lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
+ lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
+ lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
+
+ if mapped_deps:
+ name = info.get('Name')
+ if name and name[0] in mapped_deps:
+ # Attempt to avoid self-reference
+ mapped_deps.remove(name[0])
+ mapped_deps -= set(self.excluded_pkgdeps)
+ if inst_reqs or extras_req:
+ lines_after.append('')
+ lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
+ lines_after.append('# python sources, and might not be 100% accurate.')
+ lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
+
+ unmapped_deps -= set(extensions)
+ unmapped_deps -= set(self.assume_provided)
+ if unmapped_deps:
+ if mapped_deps:
+ lines_after.append('')
+ lines_after.append('# WARNING: We were unable to map the following python package/module')
+ lines_after.append('# dependencies to the bitbake packages which include them:')
+ lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps))
+
+ handled.append('buildsystem')
+
+class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
+ """Base class to support PEP517 and PEP518
+
+ PEP517 https://peps.python.org/pep-0517/#source-trees
+ PEP518 https://peps.python.org/pep-0518/#build-system-table
+ """
+ # bitbake currently supports the 4 following backends
+ build_backend_map = {
+ "setuptools.build_meta": "python_setuptools_build_meta",
+ "poetry.core.masonry.api": "python_poetry_core",
+ "flit_core.buildapi": "python_flit_core",
+ "hatchling.build": "python_hatchling",
+ "maturin": "python_maturin",
+ "mesonpy": "python_mesonpy",
+ }
+
+ # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
+ # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
+ # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
+ # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
+ # and "Homepage" for "project" section. So keep both
+ bbvar_map = {
+ "name": "PN",
+ "version": "PV",
+ "Homepage": "HOMEPAGE",
+ "homepage": "HOMEPAGE",
+ "description": "SUMMARY",
+ "license": "LICENSE",
+ "dependencies": "RDEPENDS:${PN}",
+ "requires": "DEPENDS",
+ }
+
+ replacements = [
+ ("license", r" +$", ""),
+ ("license", r"^ +", ""),
+ ("license", r" ", "-"),
+ ("license", r"^GNU-", ""),
+ ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
+ ("license", r"^UNKNOWN$", ""),
+ # Remove currently unhandled version numbers from these variables
+ ("requires", r"\[[^\]]+\]$", ""),
+ ("requires", r"^([^><= ]+).*", r"\1"),
+ ("dependencies", r"\[[^\]]+\]$", ""),
+ ("dependencies", r"^([^><= ]+).*", r"\1"),
+ ]
+
+ excluded_native_pkgdeps = [
+ # already provided by python_setuptools_build_meta.bbclass
+ "python3-setuptools-native",
+ "python3-wheel-native",
+ # already provided by python_poetry_core.bbclass
+ "python3-poetry-core-native",
+ # already provided by python_flit_core.bbclass
+ "python3-flit-core-native",
+ # already provided by python_mesonpy
+ "python3-meson-python-native",
+ ]
+
+ # add here a list of known and often used packages and the corresponding bitbake package
+ known_deps_map = {
+ "setuptools": "python3-setuptools",
+ "wheel": "python3-wheel",
+ "poetry-core": "python3-poetry-core",
+ "flit_core": "python3-flit-core",
+ "setuptools-scm": "python3-setuptools-scm",
+ "hatchling": "python3-hatchling",
+ "hatch-vcs": "python3-hatch-vcs",
+ "meson-python" : "python3-meson-python",
+ }
+
+ def __init__(self):
+ pass
+
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+ info = {}
+ metadata = {}
+
+ if 'buildsystem' in handled:
+ return False
+
+ logger.debug("Trying pyproject.toml parser")
+
+ # Check for non-zero size setup.py files
+ setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
+ for fn in setupfiles:
+ if os.path.getsize(fn):
+ break
+ else:
+ logger.debug("No pyproject.toml found")
+ return False
+
+ setupscript = os.path.join(srctree, "pyproject.toml")
- @classmethod
- def run_command(cls, cmd, **popenargs):
- if 'stderr' not in popenargs:
- popenargs['stderr'] = subprocess.STDOUT
try:
- return subprocess.check_output(cmd, **popenargs).decode('utf-8')
- except OSError as exc:
- logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
- raise
- except subprocess.CalledProcessError as exc:
- logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
- raise
+ try:
+ import tomllib
+ except ImportError:
+ try:
+ import tomli as tomllib
+ except ImportError:
+ logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
+ return False
+
+ try:
+ with open(setupscript, "rb") as f:
+ config = tomllib.load(f)
+ except Exception:
+ logger.exception("Failed to parse pyproject.toml")
+ return False
+
+ build_backend = config["build-system"]["build-backend"]
+ if build_backend in self.build_backend_map:
+ classes.append(self.build_backend_map[build_backend])
+ else:
+ logger.error(
+ "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
+ % build_backend
+ )
+ return False
+
+ licfile = ""
+
+ if build_backend == "poetry.core.masonry.api":
+ if "tool" in config and "poetry" in config["tool"]:
+ metadata = config["tool"]["poetry"]
+ else:
+ if "project" in config:
+ metadata = config["project"]
+
+ if metadata:
+ for field, values in metadata.items():
+ if field == "license":
+ # For setuptools.build_meta and flit, licence is a table
+ # but for poetry licence is a string
+ # for hatchling, both table (jsonschema) and string (iniconfig) have been used
+ if build_backend == "poetry.core.masonry.api":
+ value = values
+ else:
+ value = values.get("text", "")
+ if not value:
+ licfile = values.get("file", "")
+ continue
+ elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
+ # For poetry backend, "dependencies" section looks like:
+ # [tool.poetry.dependencies]
+ # requests = "^2.13.0"
+ # requests = { version = "^2.13.0", source = "private" }
+ # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
+ # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
+ value = []
+ for k in values.keys():
+ value.append(k)
+ elif isinstance(values, dict):
+ for k, v in values.items():
+ info[k] = v
+ continue
+ else:
+ value = values
+
+ info[field] = value
+
+ # Grab the license value before applying replacements
+ license_str = info.get("license", "").strip()
+
+ if license_str:
+ for i, line in enumerate(lines_before):
+ if line.startswith("##LICENSE_PLACEHOLDER##"):
+ lines_before.insert(
+ i, "# NOTE: License in pyproject.toml is: %s" % license_str
+ )
+ break
+
+ info["requires"] = config["build-system"]["requires"]
+
+ self.apply_info_replacements(info)
+
+ if "classifiers" in info:
+ license = self.handle_classifier_license(
+ info["classifiers"], info.get("license", "")
+ )
+ if license:
+ if licfile:
+ lines = []
+ md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
+ lines.append('LICENSE = "%s"' % license)
+ lines.append(
+ 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
+ % (licfile, md5value)
+ )
+ lines.append("")
+
+ # Replace the placeholder so we get the values in the right place in the recipe file
+ try:
+ pos = lines_before.index("##LICENSE_PLACEHOLDER##")
+ except ValueError:
+ pos = -1
+ if pos == -1:
+ lines_before.extend(lines)
+ else:
+ lines_before[pos : pos + 1] = lines
+
+ handled.append(("license", [license, licfile, md5value]))
+ else:
+ info["license"] = license
+
+ provided_packages = self.parse_pkgdata_for_python_packages()
+ provided_packages.update(self.known_deps_map)
+ native_mapped_deps, native_unmapped_deps = set(), set()
+ mapped_deps, unmapped_deps = set(), set()
+
+ if "requires" in info:
+ for require in info["requires"]:
+ mapped = provided_packages.get(require)
+
+ if mapped:
+ logger.debug("Mapped %s to %s" % (require, mapped))
+ native_mapped_deps.add(mapped)
+ else:
+ logger.debug("Could not map %s" % require)
+ native_unmapped_deps.add(require)
+
+ info.pop("requires")
+
+ if native_mapped_deps != set():
+ native_mapped_deps = {
+ item + "-native" for item in native_mapped_deps
+ }
+ native_mapped_deps -= set(self.excluded_native_pkgdeps)
+ if native_mapped_deps != set():
+ info["requires"] = " ".join(sorted(native_mapped_deps))
+
+ if native_unmapped_deps:
+ lines_after.append("")
+ lines_after.append(
+ "# WARNING: We were unable to map the following python package/module"
+ )
+ lines_after.append(
+ "# dependencies to the bitbake packages which include them:"
+ )
+ lines_after.extend(
+ "# {}".format(d) for d in sorted(native_unmapped_deps)
+ )
+
+ if "dependencies" in info:
+ for dependency in info["dependencies"]:
+ mapped = provided_packages.get(dependency)
+ if mapped:
+ logger.debug("Mapped %s to %s" % (dependency, mapped))
+ mapped_deps.add(mapped)
+ else:
+ logger.debug("Could not map %s" % dependency)
+ unmapped_deps.add(dependency)
+
+ info.pop("dependencies")
+
+ if mapped_deps != set():
+ if mapped_deps != set():
+ info["dependencies"] = " ".join(sorted(mapped_deps))
+
+ if unmapped_deps:
+ lines_after.append("")
+ lines_after.append(
+ "# WARNING: We were unable to map the following python package/module"
+ )
+ lines_after.append(
+ "# runtime dependencies to the bitbake packages which include them:"
+ )
+ lines_after.extend(
+ "# {}".format(d) for d in sorted(unmapped_deps)
+ )
+
+ self.map_info_to_bbvar(info, extravalues)
+
+ handled.append("buildsystem")
+ except Exception:
+ logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
+ return False
def gather_setup_info(fileobj):
@@ -730,5 +1118,7 @@ def has_non_literals(value):
def register_recipe_handlers(handlers):
- # We need to make sure this is ahead of the makefile fallback handler
- handlers.append((PythonRecipeHandler(), 70))
+ # We need to make sure these are ahead of the makefile fallback handler
+ # and the pyproject.toml handler ahead of the setup.py handler
+ handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
+ handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..c560831442
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,779 @@
+# Recipe creation tool - go support plugin
+#
+# The code is based on golang internals. See the afftected
+# methods for further reference and information.
+#
+# Copyright (C) 2023 Weidmueller GmbH & Co KG
+# Author: Lukas Funke <lukas.funke@weidmueller.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+
+from collections import namedtuple
+from enum import Enum
+from html.parser import HTMLParser
+from recipetool.create import RecipeHandler, handle_license_vars
+from recipetool.create import guess_license, tidy_licenses, fixup_license
+from recipetool.create import determine_from_url
+from urllib.error import URLError
+
+import bb.utils
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+import shutil
+import tempfile
+import urllib.parse
+import urllib.request
+
+
+GoImport = namedtuple('GoImport', 'root vcs url suffix')
+logger = logging.getLogger('recipetool')
+CodeRepo = namedtuple(
+ 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
+
+tinfoil = None
+
+# Regular expression to parse pseudo semantic version
+# see https://go.dev/ref/mod#pseudo-versions
+re_pseudo_semver = re.compile(
+ r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
+# Regular expression to parse semantic version
+re_semver = re.compile(
+ r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
+
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+
+class GoRecipeHandler(RecipeHandler):
+ """Class to handle the go recipe creation"""
+
+ @staticmethod
+ def __ensure_go():
+ """Check if the 'go' command is available in the recipes"""
+ recipe = "go-native"
+ if not tinfoil.recipes_parsed:
+ tinfoil.parse_recipes()
+ try:
+ rd = tinfoil.parse_recipe(recipe)
+ except bb.providers.NoProvider:
+ bb.error(
+ "Nothing provides '%s' which is required for the build" % (recipe))
+ bb.note(
+ "You will likely need to add a layer that provides '%s'" % (recipe))
+ return None
+
+ bindir = rd.getVar('STAGING_BINDIR_NATIVE')
+ gopath = os.path.join(bindir, 'go')
+
+ if not os.path.exists(gopath):
+ tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
+
+ if not os.path.exists(gopath):
+ logger.error(
+ '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
+ return None
+
+ return bindir
+
+ def __resolve_repository_static(self, modulepath):
+ """Resolve the repository in a static manner
+
+ The method is based on the go implementation of
+ `repoRootFromVCSPaths` in
+ https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
+ """
+
+ url = urllib.parse.urlparse("https://" + modulepath)
+ req = urllib.request.Request(url.geturl())
+
+ try:
+ resp = urllib.request.urlopen(req)
+ # Some modulepath are just redirects to github (or some other vcs
+ # hoster). Therefore, we check if this modulepath redirects to
+ # somewhere else
+ if resp.geturl() != url.geturl():
+ bb.debug(1, "%s is redirectred to %s" %
+ (url.geturl(), resp.geturl()))
+ url = urllib.parse.urlparse(resp.geturl())
+ modulepath = url.netloc + url.path
+
+ except URLError as url_err:
+ # This is probably because the module path
+ # contains the subdir and major path. Thus,
+ # we ignore this error for now
+ logger.debug(
+ 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
+
+ host, _, _ = modulepath.partition('/')
+
+ class vcs(Enum):
+ pathprefix = "pathprefix"
+ regexp = "regexp"
+ type = "type"
+ repo = "repo"
+ check = "check"
+ schemelessRepo = "schemelessRepo"
+
+ # GitHub
+ vcsGitHub = {}
+ vcsGitHub[vcs.pathprefix] = "github.com"
+ vcsGitHub[vcs.regexp] = re.compile(
+ r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsGitHub[vcs.type] = "git"
+ vcsGitHub[vcs.repo] = "https://\\g<root>"
+
+ # Bitbucket
+ vcsBitbucket = {}
+ vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
+ vcsBitbucket[vcs.regexp] = re.compile(
+ r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsBitbucket[vcs.type] = "git"
+ vcsBitbucket[vcs.repo] = "https://\\g<root>"
+
+ # IBM DevOps Services (JazzHub)
+ vcsIBMDevOps = {}
+ vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
+ vcsIBMDevOps[vcs.regexp] = re.compile(
+ r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsIBMDevOps[vcs.type] = "git"
+ vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
+
+ # Git at Apache
+ vcsApacheGit = {}
+ vcsApacheGit[vcs.pathprefix] = "git.apache.org"
+ vcsApacheGit[vcs.regexp] = re.compile(
+ r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsApacheGit[vcs.type] = "git"
+ vcsApacheGit[vcs.repo] = "https://\\g<root>"
+
+ # Git at OpenStack
+ vcsOpenStackGit = {}
+ vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
+ vcsOpenStackGit[vcs.regexp] = re.compile(
+ r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsOpenStackGit[vcs.type] = "git"
+ vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
+
+ # chiselapp.com for fossil
+ vcsChiselapp = {}
+ vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
+ vcsChiselapp[vcs.regexp] = re.compile(
+ r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
+ vcsChiselapp[vcs.type] = "fossil"
+ vcsChiselapp[vcs.repo] = "https://\\g<root>"
+
+ # General syntax for any server.
+ # Must be last.
+ vcsGeneralServer = {}
+ vcsGeneralServer[vcs.regexp] = re.compile(
+ "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
+ vcsGeneralServer[vcs.schemelessRepo] = True
+
+ vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
+ vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
+ vcsGeneralServer]
+
+ if modulepath.startswith("example.net") or modulepath == "rsc.io":
+ logger.warning("Suspicious module path %s" % modulepath)
+ return None
+ if modulepath.startswith("http:") or modulepath.startswith("https:"):
+ logger.warning("Import path should not start with %s %s" %
+ ("http", "https"))
+ return None
+
+ rootpath = None
+ vcstype = None
+ repourl = None
+ suffix = None
+
+ for srv in vcsPaths:
+ m = srv[vcs.regexp].match(modulepath)
+ if vcs.pathprefix in srv:
+ if host == srv[vcs.pathprefix]:
+ rootpath = m.group('root')
+ vcstype = srv[vcs.type]
+ repourl = m.expand(srv[vcs.repo])
+ suffix = m.group('suffix')
+ break
+ elif m and srv[vcs.schemelessRepo]:
+ rootpath = m.group('root')
+ vcstype = m[vcs.type]
+ repourl = m[vcs.repo]
+ suffix = m.group('suffix')
+ break
+
+ return GoImport(rootpath, vcstype, repourl, suffix)
+
+ def __resolve_repository_dynamic(self, modulepath):
+ """Resolve the repository root in a dynamic manner.
+
+ The method is based on the go implementation of
+ `repoRootForImportDynamic` in
+ https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
+ """
+ url = urllib.parse.urlparse("https://" + modulepath)
+
+ class GoImportHTMLParser(HTMLParser):
+
+ def __init__(self):
+ super().__init__()
+ self.__srv = []
+
+ def handle_starttag(self, tag, attrs):
+ if tag == 'meta' and list(
+ filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
+ content = list(
+ filter(lambda a: (a[0] == 'content'), attrs))
+ if content:
+ self.__srv = content[0][1].split()
+
+ @property
+ def import_prefix(self):
+ return self.__srv[0] if len(self.__srv) else None
+
+ @property
+ def vcs(self):
+ return self.__srv[1] if len(self.__srv) else None
+
+ @property
+ def repourl(self):
+ return self.__srv[2] if len(self.__srv) else None
+
+ url = url.geturl() + "?go-get=1"
+ req = urllib.request.Request(url)
+
+ try:
+ resp = urllib.request.urlopen(req)
+
+ except URLError as url_err:
+ logger.warning(
+ "Failed to fetch page from [%s]: %s", url, str(url_err))
+ return None
+
+ parser = GoImportHTMLParser()
+ parser.feed(resp.read().decode('utf-8'))
+ parser.close()
+
+ return GoImport(parser.import_prefix, parser.vcs, parser.repourl, None)
+
+ def __resolve_from_golang_proxy(self, modulepath, version):
+ """
+ Resolves repository data from golang proxy
+ """
+ url = urllib.parse.urlparse("https://proxy.golang.org/"
+ + modulepath
+ + "/@v/"
+ + version
+ + ".info")
+
+ # Transform url to lower case, golang proxy doesn't like mixed case
+ req = urllib.request.Request(url.geturl().lower())
+
+ try:
+ resp = urllib.request.urlopen(req)
+ except URLError as url_err:
+ logger.warning(
+ "Failed to fetch page from [%s]: %s", url, str(url_err))
+ return None
+
+ golang_proxy_res = resp.read().decode('utf-8')
+ modinfo = json.loads(golang_proxy_res)
+
+ if modinfo and 'Origin' in modinfo:
+ origin = modinfo['Origin']
+ _root_url = urllib.parse.urlparse(origin['URL'])
+
+ # We normalize the repo URL since we don't want the scheme in it
+ _subdir = origin['Subdir'] if 'Subdir' in origin else None
+ _root, _, _ = self.__split_path_version(modulepath)
+ if _subdir:
+ _root = _root[:-len(_subdir)].strip('/')
+
+ _commit = origin['Hash']
+ _vcs = origin['VCS']
+ return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
+
+ return None
+
+ def __resolve_repository(self, modulepath):
+ """
+ Resolves src uri from go module-path
+ """
+ repodata = self.__resolve_repository_static(modulepath)
+ if not repodata or not repodata.url:
+ repodata = self.__resolve_repository_dynamic(modulepath)
+ if not repodata or not repodata.url:
+ logger.error(
+ "Could not resolve repository for module path '%s'" % modulepath)
+ # There is no way to recover from this
+ sys.exit(14)
+ if repodata:
+ logger.debug(1, "Resolved download path for import '%s' => %s" % (
+ modulepath, repodata.url))
+ return repodata
+
+ def __split_path_version(self, path):
+ i = len(path)
+ dot = False
+ for j in range(i, 0, -1):
+ if path[j - 1] < '0' or path[j - 1] > '9':
+ break
+ if path[j - 1] == '.':
+ dot = True
+ break
+ i = j - 1
+
+ if i <= 1 or i == len(
+ path) or path[i - 1] != 'v' or path[i - 2] != '/':
+ return path, "", True
+
+ prefix, pathMajor = path[:i - 2], path[i - 2:]
+ if dot or len(
+ pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
+ return path, "", False
+
+ return prefix, pathMajor, True
+
+ def __get_path_major(self, pathMajor):
+ if not pathMajor:
+ return ""
+
+ if pathMajor[0] != '/' and pathMajor[0] != '.':
+ logger.error(
+ "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
+
+ if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
+ pathMajor = pathMajor[:len("-unstable") - 2]
+
+ return pathMajor[1:]
+
+ def __build_coderepo(self, repo, path):
+ codedir = ""
+ pathprefix, pathMajor, _ = self.__split_path_version(path)
+ if repo.root == path:
+ pathprefix = path
+ elif path.startswith(repo.root):
+ codedir = pathprefix[len(repo.root):].strip('/')
+
+ pseudoMajor = self.__get_path_major(pathMajor)
+
+ logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
+ repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
+
+ return CodeRepo(path, repo.root, codedir,
+ pathMajor, pathprefix, pseudoMajor)
+
+ def __resolve_version(self, repo, path, version):
+ hash = None
+ coderoot = self.__build_coderepo(repo, path)
+
+ def vcs_fetch_all():
+ tmpdir = tempfile.mkdtemp()
+ clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
+ bb.process.run(clone_cmd)
+ log_cmd = "git log --all --pretty='%H %d' --decorate=short"
+ output, _ = bb.process.run(
+ log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
+ bb.utils.prunedir(tmpdir)
+ return output.strip().split('\n')
+
+ def vcs_fetch_remote(tag):
+ # add * to grab ^{}
+ refs = {}
+ ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
+ repo.url, tag)
+ output, _ = bb.process.run(ls_remote_cmd)
+ output = output.strip().split('\n')
+ for line in output:
+ f = line.split(maxsplit=1)
+ if len(f) != 2:
+ continue
+
+ for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
+ if f[1].startswith(prefix):
+ refs[f[1][len(prefix):]] = f[0]
+
+ for key, hash in refs.items():
+ if key.endswith(r"^{}"):
+ refs[key.strip(r"^{}")] = hash
+
+ return refs[tag]
+
+ m_pseudo_semver = re_pseudo_semver.match(version)
+
+ if m_pseudo_semver:
+ remote_refs = vcs_fetch_all()
+ short_commit = m_pseudo_semver.group('commithash')
+ for l in remote_refs:
+ r = l.split(maxsplit=1)
+ sha1 = r[0] if len(r) else None
+ if not sha1:
+ logger.error(
+ "Ups: could not resolve abbref commit for %s" % short_commit)
+
+ elif sha1.startswith(short_commit):
+ hash = sha1
+ break
+ else:
+ m_semver = re_semver.match(version)
+ if m_semver:
+
+ def get_sha1_remote(re):
+ rsha1 = None
+ for line in remote_refs:
+ # Split lines of the following format:
+ # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
+ lineparts = line.split(maxsplit=1)
+ sha1 = lineparts[0] if len(lineparts) else None
+ refstring = lineparts[1] if len(
+ lineparts) == 2 else None
+ if refstring:
+ # Normalize tag string and split in case of multiple
+ # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
+ refs = refstring.strip('(), ').split(',')
+ for ref in refs:
+ if re.match(ref.strip()):
+ rsha1 = sha1
+ return rsha1
+
+ semver = "v" + m_semver.group('major') + "."\
+ + m_semver.group('minor') + "."\
+ + m_semver.group('patch') \
+ + (("-" + m_semver.group('prerelease'))
+ if m_semver.group('prerelease') else "")
+
+ tag = os.path.join(
+ coderoot.codeDir, semver) if coderoot.codeDir else semver
+
+ # probe tag using 'ls-remote', which is faster than fetching
+ # complete history
+ hash = vcs_fetch_remote(tag)
+ if not hash:
+ # backup: fetch complete history
+ remote_refs = vcs_fetch_all()
+ hash = get_sha1_remote(
+ re.compile(fr"(tag:|HEAD ->) ({tag})"))
+
+ logger.debug(
+ "Resolving commit for tag '%s' -> '%s'", tag, hash)
+ return hash
+
+ def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
+ """Generate SRC_URI functions for go imports"""
+
+ logger.info("Resolving repository for module %s", path)
+ # First try to resolve repo and commit from golang proxy
+ # Most info is already there and we don't have to go through the
+ # repository or even perform the version resolve magic
+ golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
+ if golang_proxy_info:
+ repo = golang_proxy_info[0]
+ commit = golang_proxy_info[1]
+ else:
+ # Fallback
+ # Resolve repository by 'hand'
+ repo = self.__resolve_repository(path)
+ commit = self.__resolve_version(repo, path, version)
+
+ url = urllib.parse.urlparse(repo.url)
+ repo_url = url.netloc + url.path
+
+ coderoot = self.__build_coderepo(repo, path)
+
+ inline_fcn = "${@go_src_uri("
+ inline_fcn += f"'{repo_url}','{version}'"
+ if repo_url != path:
+ inline_fcn += f",path='{path}'"
+ if coderoot.codeDir:
+ inline_fcn += f",subdir='{coderoot.codeDir}'"
+ if repo.vcs != 'git':
+ inline_fcn += f",vcs='{repo.vcs}'"
+ if replaces:
+ inline_fcn += f",replaces='{replaces}'"
+ if coderoot.pathMajor:
+ inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
+ inline_fcn += ")}"
+
+ return inline_fcn, commit
+
+ def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
+
+ import re
+ src_uris = []
+ src_revs = []
+
+ def generate_src_rev(path, version, commithash):
+ src_rev = f"# {path}@{version} => {commithash}\n"
+ # Ups...maybe someone manipulated the source repository and the
+ # version or commit could not be resolved. This is a sign of
+ # a) the supply chain was manipulated (bad)
+ # b) the implementation for the version resolving didn't work
+ # anymore (less bad)
+ if not commithash:
+ src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
+ src_rev += f"#!!! Could not resolve version !!!\n"
+ src_rev += f"#!!! Possible supply chain attack !!!\n"
+ src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
+ src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
+
+ return src_rev
+
+ # we first go over replacement list, because we are essentialy
+ # interested only in the replaced path
+ if go_mod['Replace']:
+ for replacement in go_mod['Replace']:
+ oldpath = replacement['Old']['Path']
+ path = replacement['New']['Path']
+ version = ''
+ if 'Version' in replacement['New']:
+ version = replacement['New']['Version']
+
+ if os.path.exists(os.path.join(srctree, path)):
+ # the module refers to the local path, remove it from requirement list
+ # because it's a local module
+ go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
+ else:
+ # Replace the path and the version, so we don't iterate replacement list anymore
+ for require in go_mod['Require']:
+ if require['Path'] == oldpath:
+ require.update({'Path': path, 'Version': version})
+ break
+
+ for require in go_mod['Require']:
+ path = require['Path']
+ version = require['Version']
+
+ inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
+ path, version)
+ src_uris.append(inline_fcn)
+ src_revs.append(generate_src_rev(path, version, commithash))
+
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+ go_mods_basename = "%s-modules.inc" % pn
+
+ go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
+ with open(go_mods_filename, "w") as f:
+ # We introduce this indirection to make the tests a little easier
+ f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
+ f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
+ for uri in src_uris:
+ f.write(" " + uri + " \\\n")
+ f.write("\"\n\n")
+ for rev in src_revs:
+ f.write(rev + "\n")
+
+ extravalues['extrafiles'][go_mods_basename] = go_mods_filename
+
+ def __go_run_cmd(self, cmd, cwd, d):
+ return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
+ shell=True, cwd=cwd)
+
+ def __go_native_version(self, d):
+ stdout, _ = self.__go_run_cmd("go version", None, d)
+ m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
+ major = int(m.group(2))
+ minor = int(m.group(3))
+ patch = int(m.group(4))
+
+ return major, minor, patch
+
+ def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
+
+ patchfilename = "go.mod.patch"
+ go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
+ d)
+ self.__go_run_cmd("go mod tidy -go=%d.%d" %
+ (go_native_version_major, go_native_version_minor), srctree, d)
+ stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
+
+ # Create patch in order to upgrade go version
+ self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
+ # Restore original state
+ self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
+
+ go_mod = json.loads(stdout)
+ tmpfile = os.path.join(localfilesdir, patchfilename)
+ shutil.move(os.path.join(srctree, patchfilename), tmpfile)
+
+ extravalues['extrafiles'][patchfilename] = tmpfile
+
+ return go_mod, patchfilename
+
+ def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
+ # Perform vendoring to retrieve the correct modules.txt
+ tmp_vendor_dir = tempfile.mkdtemp()
+
+ # -v causes to go to print modules.txt to stderr
+ _, stderr = self.__go_run_cmd(
+ "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
+
+ modules_txt_basename = "modules.txt"
+ modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
+ with open(modules_txt_filename, "w") as f:
+ f.write(stderr)
+
+ extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
+
+ licenses = []
+ lic_files_chksum = []
+ licvalues = guess_license(tmp_vendor_dir, d)
+ shutil.rmtree(tmp_vendor_dir)
+
+ if licvalues:
+ for licvalue in licvalues:
+ license = licvalue[0]
+ lics = tidy_licenses(fixup_license(license))
+ lics = [lic for lic in lics if lic not in licenses]
+ if len(lics):
+ licenses.extend(lics)
+ lic_files_chksum.append(
+ 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
+
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+ licenses_basename = "%s-licenses.inc" % pn
+
+ licenses_filename = os.path.join(localfilesdir, licenses_basename)
+ with open(licenses_filename, "w") as f:
+ f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
+ ' & '.join(sorted(licenses, key=str.casefold)))
+ # We introduce this indirection to make the tests a little easier
+ f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
+ f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
+ for lic in lic_files_chksum:
+ f.write(" " + lic + " \\\n")
+ f.write("\"\n")
+
+ extravalues['extrafiles'][licenses_basename] = licenses_filename
+
+ def process(self, srctree, classes, lines_before,
+ lines_after, handled, extravalues):
+
+ if 'buildsystem' in handled:
+ return False
+
+ files = RecipeHandler.checkfiles(srctree, ['go.mod'])
+ if not files:
+ return False
+
+ d = bb.data.createCopy(tinfoil.config_data)
+ go_bindir = self.__ensure_go()
+ if not go_bindir:
+ sys.exit(14)
+
+ d.prependVar('PATH', '%s:' % go_bindir)
+ handled.append('buildsystem')
+ classes.append("go-vendor")
+
+ stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
+
+ go_mod = json.loads(stdout)
+ go_import = go_mod['Module']['Path']
+ go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
+ go_version_major = int(go_version_match.group(1))
+ go_version_minor = int(go_version_match.group(2))
+ src_uris = []
+
+ localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
+ extravalues.setdefault('extrafiles', {})
+
+ # Use an explicit name determined from the module name because it
+ # might differ from the actual URL for replaced modules
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+
+ # go.mod files with version < 1.17 may not include all indirect
+ # dependencies. Thus, we have to upgrade the go version.
+ if go_version_major == 1 and go_version_minor < 17:
+ logger.warning(
+ "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
+ go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
+ extravalues, d)
+ src_uris.append(
+ "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
+
+ # Check whether the module is vendored. If so, we have nothing to do.
+ # Otherwise we gather all dependencies and add them to the recipe
+ if not os.path.exists(os.path.join(srctree, "vendor")):
+
+ # Write additional $BPN-modules.inc file
+ self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
+ lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
+ lines_before.append("require %s-licenses.inc" % (pn))
+
+ self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
+
+ self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
+ lines_before.append("require %s-modules.inc" % (pn))
+
+ # Do generic license handling
+ handle_license_vars(srctree, lines_before, handled, extravalues, d)
+ self.__rewrite_lic_uri(lines_before)
+
+ lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
+ lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
+
+ def __update_lines_before(self, updated, newlines, lines_before):
+ if updated:
+ del lines_before[:]
+ for line in newlines:
+ # Hack to avoid newlines that edit_metadata inserts
+ if line.endswith('\n'):
+ line = line[:-1]
+ lines_before.append(line)
+ return updated
+
+ def __rewrite_lic_uri(self, lines_before):
+
+ def varfunc(varname, origvalue, op, newlines):
+ if varname == 'LIC_FILES_CHKSUM':
+ new_licenses = []
+ licenses = origvalue.split('\\')
+ for license in licenses:
+ if not license:
+ logger.warning("No license file was detected for the main module!")
+ # the license list of the main recipe must be empty
+ # this can happen for example in case of CLOSED license
+ # Fall through to complete recipe generation
+ continue
+ license = license.strip()
+ uri, chksum = license.split(';', 1)
+ url = urllib.parse.urlparse(uri)
+ new_uri = os.path.join(
+ url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
+ new_licenses.append(new_uri)
+
+ return new_licenses, None, -1, True
+ return origvalue, None, 0, True
+
+ updated, newlines = bb.utils.edit_metadata(
+ lines_before, ['LIC_FILES_CHKSUM'], varfunc)
+ return self.__update_lines_before(updated, newlines, lines_before)
+
+ def __rewrite_src_uri(self, lines_before, additional_uris = []):
+
+ def varfunc(varname, origvalue, op, newlines):
+ if varname == 'SRC_URI':
+ src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
+ src_uri.extend(additional_uris)
+ return src_uri, None, -1, True
+ return origvalue, None, 0, True
+
+ updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
+ return self.__update_lines_before(updated, newlines, lines_before)
+
+
+def register_recipe_handlers(handlers):
+ handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 3394a89970..113a89f6a6 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -13,6 +13,7 @@ import sys
import tempfile
import bb
from bb.fetch2.npm import NpmEnvironment
+from bb.fetch2.npm import npm_package
from bb.fetch2.npmsw import foreach_dependencies
from recipetool.create import RecipeHandler
from recipetool.create import get_license_md5sums
@@ -31,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
"""Class to handle the npm recipe creation"""
@staticmethod
- def _npm_name(name):
- """Generate a Yocto friendly npm name"""
- name = re.sub("/", "-", name)
- name = name.lower()
- name = re.sub(r"[^\-a-z0-9]", "", name)
- name = name.strip("-")
- return name
-
- @staticmethod
def _get_registry(lines):
"""Get the registry value from the 'npm://registry' url"""
registry = None
@@ -142,11 +134,10 @@ class NpmRecipeHandler(RecipeHandler):
licfiles.append(os.path.relpath(readme, srctree))
# Handle the dependencies
- def _handle_dependency(name, params, deptree):
- suffix = "-".join([self._npm_name(dep) for dep in deptree])
- destdirs = [os.path.join("node_modules", dep) for dep in deptree]
- destdir = os.path.join(*destdirs)
- packages["${PN}-" + suffix] = destdir
+ def _handle_dependency(name, params, destdir):
+ deptree = destdir.split('node_modules/')
+ suffix = "-".join([npm_package(dep) for dep in deptree])
+ packages["${PN}" + suffix] = destdir
_licfiles_append_fallback_readme_files(destdir)
with open(shrinkwrap_file, "r") as f:
@@ -155,6 +146,23 @@ class NpmRecipeHandler(RecipeHandler):
foreach_dependencies(shrinkwrap, _handle_dependency, dev)
return licfiles, packages
+
+ # Handle the peer dependencies
+ def _handle_peer_dependency(self, shrinkwrap_file):
+ """Check if package has peer dependencies and show warning if it is the case"""
+ with open(shrinkwrap_file, "r") as f:
+ shrinkwrap = json.load(f)
+
+ packages = shrinkwrap.get("packages", {})
+ peer_deps = packages.get("", {}).get("peerDependencies", {})
+
+ for peer_dep in peer_deps:
+ peer_dep_yocto_name = npm_package(peer_dep)
+ bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
+ "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
+ % peer_dep_yocto_name)
+
+
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
"""Handle the npm recipe creation"""
@@ -173,7 +181,7 @@ class NpmRecipeHandler(RecipeHandler):
if "name" not in data or "version" not in data:
return False
- extravalues["PN"] = self._npm_name(data["name"])
+ extravalues["PN"] = npm_package(data["name"])
extravalues["PV"] = data["version"]
if "description" in data:
@@ -242,7 +250,7 @@ class NpmRecipeHandler(RecipeHandler):
value = origvalue.replace("version=" + data["version"], "version=${PV}")
value = value.replace("version=latest", "version=${PV}")
values = [line.strip() for line in value.strip('\n').splitlines()]
- if "dependencies" in shrinkwrap:
+ if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
values.append(url_recipe)
return values, None, 4, False
@@ -292,6 +300,9 @@ class NpmRecipeHandler(RecipeHandler):
classes.append("npm")
handled.append("buildsystem")
+ # Check if package has peer dependencies and inform the user
+ self._handle_peer_dependency(shrinkwrap_file)
+
return True
def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
for patch in patches:
for line in patch:
sys.stdout.write(line)
+ tinfoil.modified_files()
return 0
diff --git a/scripts/lib/resulttool/log.py b/scripts/lib/resulttool/log.py
index eb3927ec82..15148ca288 100644
--- a/scripts/lib/resulttool/log.py
+++ b/scripts/lib/resulttool/log.py
@@ -28,12 +28,10 @@ def show_reproducible(result, reproducible, logger):
def log(args, logger):
results = resultutils.load_resultsdata(args.source)
- ptest_count = sum(1 for _, _, _, r in resultutils.test_run_results(results) if 'ptestresult.sections' in r)
- if ptest_count > 1 and not args.prepend_run:
- print("%i ptest sections found. '--prepend-run' is required" % ptest_count)
- return 1
-
for _, run_name, _, r in resultutils.test_run_results(results):
+ if args.list_ptest:
+ print('\n'.join(sorted(r['ptestresult.sections'].keys())))
+
if args.dump_ptest:
for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
if sectname in r:
@@ -48,6 +46,9 @@ def log(args, logger):
os.makedirs(dest_dir, exist_ok=True)
dest = os.path.join(dest_dir, '%s.log' % name)
+ if os.path.exists(dest):
+ print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name)
+ continue
print(dest)
with open(dest, 'w') as f:
f.write(logdata)
@@ -86,6 +87,8 @@ def register_commands(subparsers):
parser.set_defaults(func=log)
parser.add_argument('source',
help='the results file/directory/URL to import')
+ parser.add_argument('--list-ptest', action='store_true',
+ help='list the ptest test names')
parser.add_argument('--ptest', action='append', default=[],
help='show logs for a ptest')
parser.add_argument('--dump-ptest', metavar='DIR',
diff --git a/scripts/lib/resulttool/regression.py b/scripts/lib/resulttool/regression.py
index 9f952951b3..10e7d13841 100644
--- a/scripts/lib/resulttool/regression.py
+++ b/scripts/lib/resulttool/regression.py
@@ -7,15 +7,209 @@
#
import resulttool.resultutils as resultutils
-import json
from oeqa.utils.git import GitRepo
import oeqa.utils.gitarchive as gitarchive
-def compare_result(logger, base_name, target_name, base_result, target_result):
+METADATA_MATCH_TABLE = {
+ "oeselftest": "OESELFTEST_METADATA"
+}
+
+OESELFTEST_METADATA_GUESS_TABLE={
+ "trigger-build-posttrigger": {
+ "run_all_tests": False,
+ "run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"],
+ "skips": None,
+ "machine": None,
+ "select_tags":None,
+ "exclude_tags": None
+ },
+ "reproducible": {
+ "run_all_tests": False,
+ "run_tests":["reproducible"],
+ "skips": None,
+ "machine": None,
+ "select_tags":None,
+ "exclude_tags": None
+ },
+ "arch-qemu-quick": {
+ "run_all_tests": True,
+ "run_tests":None,
+ "skips": None,
+ "machine": None,
+ "select_tags":["machine"],
+ "exclude_tags": None
+ },
+ "arch-qemu-full-x86-or-x86_64": {
+ "run_all_tests": True,
+ "run_tests":None,
+ "skips": None,
+ "machine": None,
+ "select_tags":["machine", "toolchain-system"],
+ "exclude_tags": None
+ },
+ "arch-qemu-full-others": {
+ "run_all_tests": True,
+ "run_tests":None,
+ "skips": None,
+ "machine": None,
+ "select_tags":["machine", "toolchain-user"],
+ "exclude_tags": None
+ },
+ "selftest": {
+ "run_all_tests": True,
+ "run_tests":None,
+ "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"],
+ "machine": None,
+ "select_tags":None,
+ "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
+ },
+ "bringup": {
+ "run_all_tests": True,
+ "run_tests":None,
+ "skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"],
+ "machine": None,
+ "select_tags":None,
+ "exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
+ }
+}
+
+STATUS_STRINGS = {
+ "None": "No matching test result"
+}
+
+REGRESSIONS_DISPLAY_LIMIT=50
+
+MISSING_TESTS_BANNER = "-------------------------- Missing tests --------------------------"
+ADDITIONAL_DATA_BANNER = "--------------------- Matches and improvements --------------------"
+
+def test_has_at_least_one_matching_tag(test, tag_list):
+ return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"])
+
+def all_tests_have_at_least_one_matching_tag(results, tag_list):
+ return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items())
+
+def any_test_have_any_matching_tag(results, tag_list):
+ return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values())
+
+def have_skipped_test(result, test_prefix):
+ return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix))
+
+def have_all_tests_skipped(result, test_prefixes_list):
+ return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list)
+
+def guess_oeselftest_metadata(results):
+ """
+ When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content.
+ Check results for specific values (absence/presence of oetags, number and name of executed tests...),
+ and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA
+ to it to allow proper test filtering.
+ This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less,
+ as new tests will have OESELFTEST_METADATA properly appended at test reporting time
+ """
+
+ if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results:
+ return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger']
+ elif all(result.startswith("reproducible") for result in results):
+ return OESELFTEST_METADATA_GUESS_TABLE['reproducible']
+ elif all_tests_have_at_least_one_matching_tag(results, ["machine"]):
+ return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick']
+ elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]):
+ return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64']
+ elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]):
+ return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others']
+ elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]):
+ if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]):
+ return OESELFTEST_METADATA_GUESS_TABLE['selftest']
+ elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]):
+ return OESELFTEST_METADATA_GUESS_TABLE['bringup']
+
+ return None
+
+
+def metadata_matches(base_configuration, target_configuration):
+ """
+ For passed base and target, check test type. If test type matches one of
+ properties described in METADATA_MATCH_TABLE, compare metadata if it is
+ present in base. Return true if metadata matches, or if base lacks some
+ data (either TEST_TYPE or the corresponding metadata)
+ """
+ test_type = base_configuration.get('TEST_TYPE')
+ if test_type not in METADATA_MATCH_TABLE:
+ return True
+
+ metadata_key = METADATA_MATCH_TABLE.get(test_type)
+ if target_configuration.get(metadata_key) != base_configuration.get(metadata_key):
+ return False
+
+ return True
+
+
+def machine_matches(base_configuration, target_configuration):
+ return base_configuration.get('MACHINE') == target_configuration.get('MACHINE')
+
+
+def can_be_compared(logger, base, target):
+ """
+ Some tests are not relevant to be compared, for example some oeselftest
+ run with different tests sets or parameters. Return true if tests can be
+ compared
+ """
+ ret = True
+ base_configuration = base['configuration']
+ target_configuration = target['configuration']
+
+ # Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results.
+ if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration:
+ guess = guess_oeselftest_metadata(base['result'])
+ if guess is None:
+ logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}")
+ else:
+ logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}")
+ base_configuration['OESELFTEST_METADATA'] = guess
+ if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration:
+ guess = guess_oeselftest_metadata(target['result'])
+ if guess is None:
+ logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}")
+ else:
+ logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}")
+ target_configuration['OESELFTEST_METADATA'] = guess
+
+ # Test runs with LTP results in should only be compared with other runs with LTP tests in them
+ if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']):
+ ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result'])
+
+ return ret and metadata_matches(base_configuration, target_configuration) \
+ and machine_matches(base_configuration, target_configuration)
+
+def get_status_str(raw_status):
+ raw_status_lower = raw_status.lower() if raw_status else "None"
+ return STATUS_STRINGS.get(raw_status_lower, raw_status)
+
+def get_additional_info_line(new_pass_count, new_tests):
+ result=[]
+ if new_tests:
+ result.append(f'+{new_tests} test(s) present')
+ if new_pass_count:
+ result.append(f'+{new_pass_count} test(s) now passing')
+
+ if not result:
+ return ""
+
+ return ' -> ' + ', '.join(result) + '\n'
+
+def compare_result(logger, base_name, target_name, base_result, target_result, display_limit=None):
base_result = base_result.get('result')
target_result = target_result.get('result')
result = {}
+ new_tests = 0
+ regressions = {}
+ resultstring = ""
+ new_tests = 0
+ new_pass_count = 0
+
+ display_limit = int(display_limit) if display_limit else REGRESSIONS_DISPLAY_LIMIT
+
if base_result and target_result:
for k in base_result:
base_testcase = base_result[k]
@@ -27,12 +221,47 @@ def compare_result(logger, base_name, target_name, base_result, target_result):
result[k] = {'base': base_status, 'target': target_status}
else:
logger.error('Failed to retrieved base test case status: %s' % k)
+
+ # Also count new tests that were not present in base results: it
+ # could be newly added tests, but it could also highlights some tests
+ # renames or fixed faulty ptests
+ for k in target_result:
+ if k not in base_result:
+ new_tests += 1
if result:
- resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
- for k in sorted(result):
- resultstring += ' %s: %s -> %s\n' % (k, result[k]['base'], result[k]['target'])
+ new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values())
+ # Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...)
+ if new_pass_count < len(result):
+ resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
+ for k in sorted(result):
+ if not result[k]['target'] or not result[k]['target'].startswith("PASS"):
+ # Differentiate each ptest kind when listing regressions
+ key_parts = k.split('.')
+ key = '.'.join(key_parts[:2]) if k.startswith('ptest') else key_parts[0]
+ # Append new regression to corresponding test family
+ regressions[key] = regressions.setdefault(key, []) + [' %s: %s -> %s\n' % (k, get_status_str(result[k]['base']), get_status_str(result[k]['target']))]
+ resultstring += f" Total: {sum([len(regressions[r]) for r in regressions])} new regression(s):\n"
+ for k in regressions:
+ resultstring += f" {len(regressions[k])} regression(s) for {k}\n"
+ count_to_print=min([display_limit, len(regressions[k])]) if display_limit > 0 else len(regressions[k])
+ resultstring += ''.join(regressions[k][:count_to_print])
+ if count_to_print < len(regressions[k]):
+ resultstring+=' [...]\n'
+ if new_pass_count > 0:
+ resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n'
+ if new_tests > 0:
+ resultstring += f' Additionally, {new_tests} new test(s) is/are present\n'
+ else:
+ resultstring = "%s\n%s\n" % (base_name, target_name)
+ result = None
else:
- resultstring = "Match: %s\n %s" % (base_name, target_name)
+ resultstring = "%s\n%s\n" % (base_name, target_name)
+
+ if not result:
+ additional_info = get_additional_info_line(new_pass_count, new_tests)
+ if additional_info:
+ resultstring += additional_info
+
return result, resultstring
def get_results(logger, source):
@@ -44,12 +273,38 @@ def regression(args, logger):
regression_common(args, logger, base_results, target_results)
+# Some test case naming is poor and contains random strings, particularly lttng/babeltrace.
+# Truncating the test names works since they contain file and line number identifiers
+# which allows us to match them without the random components.
+def fixup_ptest_names(results, logger):
+ for r in results:
+ for i in results[r]:
+ tests = list(results[r][i]['result'].keys())
+ for test in tests:
+ new = None
+ if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test:
+ new = test.split("_-_")[0]
+ elif test.startswith(("ptestresult.curl.")) and "__" in test:
+ new = test.split("__")[0]
+ elif test.startswith(("ptestresult.dbus.")) and "__" in test:
+ new = test.split("__")[0]
+ elif test.startswith("ptestresult.binutils") and "build-st-" in test:
+ new = test.split(" ")[0]
+ elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test:
+ new = ".".join(test.split(".")[:2])
+ if new:
+ results[r][i]['result'][new] = results[r][i]['result'][test]
+ del results[r][i]['result'][test]
+
def regression_common(args, logger, base_results, target_results):
if args.base_result_id:
base_results = resultutils.filter_resultsdata(base_results, args.base_result_id)
if args.target_result_id:
target_results = resultutils.filter_resultsdata(target_results, args.target_result_id)
+ fixup_ptest_names(base_results, logger)
+ fixup_ptest_names(target_results, logger)
+
matches = []
regressions = []
notfound = []
@@ -62,7 +317,9 @@ def regression_common(args, logger, base_results, target_results):
# removing any pairs which match
for c in base.copy():
for b in target.copy():
- res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b])
+ if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
+ continue
+ res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
if not res:
matches.append(resstr)
base.remove(c)
@@ -71,15 +328,18 @@ def regression_common(args, logger, base_results, target_results):
# Should only now see regressions, we may not be able to match multiple pairs directly
for c in base:
for b in target:
- res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b])
+ if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
+ continue
+ res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b], args.limit)
if res:
regressions.append(resstr)
else:
notfound.append("%s not found in target" % a)
- print("\n".join(sorted(matches)))
print("\n".join(sorted(regressions)))
+ print("\n" + MISSING_TESTS_BANNER + "\n")
print("\n".join(sorted(notfound)))
-
+ print("\n" + ADDITIONAL_DATA_BANNER + "\n")
+ print("\n".join(sorted(matches)))
return 0
def regression_git(args, logger):
@@ -183,4 +443,5 @@ def register_commands(subparsers):
parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified")
parser_build.add_argument('--commit2', help="Revision to compare with")
parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified")
+ parser_build.add_argument('-l', '--limit', default=REGRESSIONS_DISPLAY_LIMIT, help="Maximum number of changes to display per test. Can be set to 0 to print all changes")
diff --git a/scripts/lib/resulttool/report.py b/scripts/lib/resulttool/report.py
index f0ca50ebe2..a349510ab8 100644
--- a/scripts/lib/resulttool/report.py
+++ b/scripts/lib/resulttool/report.py
@@ -176,7 +176,10 @@ class ResultsTextReport(object):
vals['sort'] = line['testseries'] + "_" + line['result_id']
vals['failed_testcases'] = line['failed_testcases']
for k in cols:
- vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
+ if total_tested:
+ vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
+ else:
+ vals[k] = "0 (0%)"
for k in maxlen:
if k in vals and len(vals[k]) > maxlen[k]:
maxlen[k] = len(vals[k])
diff --git a/scripts/lib/resulttool/resultutils.py b/scripts/lib/resulttool/resultutils.py
index 8917022d36..c5521d81bd 100644
--- a/scripts/lib/resulttool/resultutils.py
+++ b/scripts/lib/resulttool/resultutils.py
@@ -58,7 +58,11 @@ def append_resultsdata(results, f, configmap=store_map, configvars=extra_configv
testseries = posixpath.basename(posixpath.dirname(url.path))
else:
with open(f, "r") as filedata:
- data = json.load(filedata)
+ try:
+ data = json.load(filedata)
+ except json.decoder.JSONDecodeError:
+ print("Cannot decode {}. Possible corruption. Skipping.".format(f))
+ data = ""
testseries = os.path.basename(os.path.dirname(f))
else:
data = f
@@ -142,7 +146,7 @@ def generic_get_log(sectionname, results, section):
return decode_log(ptest['log'])
def ptestresult_get_log(results, section):
- return generic_get_log('ptestresuls.sections', results, section)
+ return generic_get_log('ptestresult.sections', results, section)
def generic_get_rawlogs(sectname, results):
if sectname not in results:
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
index adf81476f0..f23e53cba9 100644
--- a/scripts/lib/scriptutils.py
+++ b/scripts/lib/scriptutils.py
@@ -5,7 +5,6 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-import argparse
import glob
import logging
import os
@@ -25,7 +24,7 @@ class KeepAliveStreamHandler(logging.StreamHandler):
def __init__(self, keepalive=True, **kwargs):
super().__init__(**kwargs)
if keepalive is True:
- keepalive = 5000 # default timeout
+ keepalive = 5000 # default timeout
self._timeout = threading.Condition()
self._stop = False
@@ -36,9 +35,9 @@ class KeepAliveStreamHandler(logging.StreamHandler):
with self._timeout:
if not self._timeout.wait(keepalive):
self.emit(logging.LogRecord("keepalive", logging.INFO,
- None, None, "Keepalive message", None, None))
+ None, None, "Keepalive message", None, None))
- self._thread = threading.Thread(target = thread, daemon = True)
+ self._thread = threading.Thread(target=thread, daemon=True)
self._thread.start()
def close(self):
@@ -72,16 +71,15 @@ def logger_setup_color(logger, color='auto'):
for handler in logger.handlers:
if (isinstance(handler, logging.StreamHandler) and
- isinstance(handler.formatter, BBLogFormatter)):
+ isinstance(handler.formatter, BBLogFormatter)):
if color == 'always' or (color == 'auto' and handler.stream.isatty()):
handler.formatter.enable_color()
def load_plugins(logger, plugins, pluginpath):
-
def load_plugin(name):
logger.debug('Loading plugin %s' % name)
- spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
+ spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
if spec:
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
@@ -179,7 +177,7 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr
f.write('BB_STRICT_CHECKSUM = "ignore"\n')
f.write('SRC_URI = "%s"\n' % srcuri)
f.write('SRCREV = "%s"\n' % srcrev)
- f.write('PV = "0.0+${SRCPV}"\n')
+ f.write('PV = "0.0+"\n')
f.write('WORKDIR = "%s"\n' % tmpworkdir)
# Set S out of the way so it doesn't get created under the workdir
f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
@@ -279,6 +277,6 @@ def filter_src_subdirs(pth):
Used by devtool and recipetool.
"""
dirlist = os.listdir(pth)
- filterout = ['git.indirectionsymlink', 'source-date-epoch']
+ filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa']
dirlist = [x for x in dirlist if x not in filterout]
return dirlist
diff --git a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 7300e65e32..2fd286ff98 100644
--- a/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
bootloader --ptable gpt
-part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.0
+part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1
part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/scripts/lib/wic/canned-wks/qemuloongarch.wks b/scripts/lib/wic/canned-wks/qemuloongarch.wks
new file mode 100644
index 0000000000..8465c7a8c0
--- /dev/null
+++ b/scripts/lib/wic/canned-wks/qemuloongarch.wks
@@ -0,0 +1,3 @@
+# short-description: Create qcow2 image for LoongArch QEMU machines
+
+part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index 22b45217f1..808997611a 100644
--- a/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
include common.wks.inc
-bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 "
+bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 console=tty console=ttyS0 "
diff --git a/scripts/lib/wic/filemap.py b/scripts/lib/wic/filemap.py
index 4d9da28172..85b39d5d74 100644
--- a/scripts/lib/wic/filemap.py
+++ b/scripts/lib/wic/filemap.py
@@ -46,6 +46,13 @@ def get_block_size(file_obj):
bsize = stat.st_blksize
else:
raise IOError("Unable to determine block size")
+
+ # The logic in this script only supports a maximum of a 4KB
+ # block size
+ max_block_size = 4 * 1024
+ if bsize > max_block_size:
+ bsize = max_block_size
+
return bsize
class ErrorNotSupp(Exception):
diff --git a/scripts/lib/wic/help.py b/scripts/lib/wic/help.py
index 73e3380cde..163535e431 100644
--- a/scripts/lib/wic/help.py
+++ b/scripts/lib/wic/help.py
@@ -1118,7 +1118,7 @@ COMMAND:
TOPIC:
overview - Presents an overall overview of Wic
plugins - Presents an overview and API for Wic plugins
- kickstart - Presents a Wic kicstart file reference
+ kickstart - Presents a Wic kickstart file reference
Examples:
diff --git a/scripts/lib/wic/ksparser.py b/scripts/lib/wic/ksparser.py
index a49b7b97c4..7ef3dc83dd 100644
--- a/scripts/lib/wic/ksparser.py
+++ b/scripts/lib/wic/ksparser.py
@@ -159,7 +159,7 @@ class KickStart():
part.add_argument('--fstype', default='vfat',
choices=('ext2', 'ext3', 'ext4', 'btrfs',
'squashfs', 'vfat', 'msdos', 'erofs',
- 'swap'))
+ 'swap', 'none'))
part.add_argument('--mkfs-extraopts', default='')
part.add_argument('--label')
part.add_argument('--use-label', action='store_true')
@@ -171,6 +171,7 @@ class KickStart():
part.add_argument('--rootfs-dir')
part.add_argument('--type', default='primary',
choices = ('primary', 'logical'))
+ part.add_argument('--hidden', action='store_true')
# --size and --fixed-size cannot be specified together; options
# ----extra-space and --overhead-factor should also raise a parser
@@ -187,11 +188,12 @@ class KickStart():
part.add_argument('--uuid')
part.add_argument('--fsuuid')
part.add_argument('--no-fstab-update', action='store_true')
+ part.add_argument('--mbr', action='store_true')
bootloader = subparsers.add_parser('bootloader')
bootloader.add_argument('--append')
bootloader.add_argument('--configfile')
- bootloader.add_argument('--ptable', choices=('msdos', 'gpt'),
+ bootloader.add_argument('--ptable', choices=('msdos', 'gpt', 'gpt-hybrid'),
default='msdos')
bootloader.add_argument('--timeout', type=int)
bootloader.add_argument('--source')
diff --git a/scripts/lib/wic/misc.py b/scripts/lib/wic/misc.py
index 3bc165fd7d..1a7c140fa6 100644
--- a/scripts/lib/wic/misc.py
+++ b/scripts/lib/wic/misc.py
@@ -25,7 +25,7 @@ from wic import WicError
logger = logging.getLogger('wic')
# executable -> recipe pairs for exec_native_cmd
-NATIVE_RECIPES = {"bmaptool": "bmap-tools",
+NATIVE_RECIPES = {"bmaptool": "bmaptool",
"dumpe2fs": "e2fsprogs",
"grub-mkimage": "grub-efi",
"isohybrid": "syslinux",
@@ -141,11 +141,12 @@ def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
cmd_and_args = pseudo + cmd_and_args
hosttools_dir = get_bitbake_var("HOSTTOOLS_DIR")
+ target_sys = get_bitbake_var("TARGET_SYS")
- native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/bin:%s" % \
+ native_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/usr/bin/%s:%s/bin:%s" % \
(native_sysroot, native_sysroot,
- native_sysroot, native_sysroot,
- hosttools_dir)
+ native_sysroot, native_sysroot, target_sys,
+ native_sysroot, hosttools_dir)
native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
(native_paths, cmd_and_args)
diff --git a/scripts/lib/wic/partition.py b/scripts/lib/wic/partition.py
index e50871b8d7..795707ec5d 100644
--- a/scripts/lib/wic/partition.py
+++ b/scripts/lib/wic/partition.py
@@ -59,6 +59,8 @@ class Partition():
self.updated_fstab_path = None
self.has_fstab = False
self.update_fstab_in_rootfs = False
+ self.hidden = args.hidden
+ self.mbr = args.mbr
self.lineno = lineno
self.source_file = ""
@@ -133,6 +135,8 @@ class Partition():
self.update_fstab_in_rootfs = True
if not self.source:
+ if self.fstype == "none" or self.no_table:
+ return
if not self.size and not self.fixed_size:
raise WicError("The %s partition has a size of zero. Please "
"specify a non-zero --size/--fixed-size for that "
@@ -280,6 +284,20 @@ class Partition():
extraopts = self.mkfs_extraopts or "-F -i 8192"
+ if os.getenv('SOURCE_DATE_EPOCH'):
+ sde_time = int(os.getenv('SOURCE_DATE_EPOCH'))
+ if pseudo:
+ pseudo = "export E2FSPROGS_FAKE_TIME=%s;%s " % (sde_time, pseudo)
+ else:
+ pseudo = "export E2FSPROGS_FAKE_TIME=%s; " % sde_time
+
+ # Set hash_seed to generate deterministic directory indexes
+ namespace = uuid.UUID("e7429877-e7b3-4a68-a5c9-2f2fdf33d460")
+ if self.fsuuid:
+ namespace = uuid.UUID(self.fsuuid)
+ hash_seed = str(uuid.uuid5(namespace, str(sde_time)))
+ extraopts += " -E hash_seed=%s" % hash_seed
+
label_str = ""
if self.label:
label_str = "-L %s" % self.label
@@ -300,6 +318,30 @@ class Partition():
mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs)
exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo)
+ if os.getenv('SOURCE_DATE_EPOCH'):
+ sde_time = hex(int(os.getenv('SOURCE_DATE_EPOCH')))
+ debugfs_script_path = os.path.join(cr_workdir, "debugfs_script")
+ files = []
+ for root, dirs, others in os.walk(rootfs_dir):
+ base = root.replace(rootfs_dir, "").rstrip(os.sep)
+ files += [ "/" if base == "" else base ]
+ files += [ base + "/" + n for n in dirs + others ]
+ with open(debugfs_script_path, "w") as f:
+ f.write("set_current_time %s\n" % (sde_time))
+ if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
+ f.write("set_inode_field /etc/fstab mtime %s\n" % (sde_time))
+ f.write("set_inode_field /etc/fstab mtime_extra 0\n")
+ for file in set(files):
+ for time in ["atime", "ctime", "crtime"]:
+ f.write("set_inode_field \"%s\" %s %s\n" % (file, time, sde_time))
+ f.write("set_inode_field \"%s\" %s_extra 0\n" % (file, time))
+ for time in ["wtime", "mkfs_time", "lastcheck"]:
+ f.write("set_super_value %s %s\n" % (time, sde_time))
+ for time in ["mtime", "first_error_time", "last_error_time"]:
+ f.write("set_super_value %s 0\n" % (time))
+ debugfs_cmd = "debugfs -w -f %s %s" % (debugfs_script_path, rootfs)
+ exec_native_cmd(debugfs_cmd, native_sysroot)
+
self.check_for_Y2038_problem(rootfs, native_sysroot)
def prepare_rootfs_btrfs(self, rootfs, cr_workdir, oe_builddir, rootfs_dir,
@@ -353,7 +395,7 @@ class Partition():
exec_native_cmd(mcopy_cmd, native_sysroot)
if self.updated_fstab_path and self.has_fstab and not self.no_fstab_update:
- mcopy_cmd = "mcopy -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path)
+ mcopy_cmd = "mcopy -m -i %s %s ::/etc/fstab" % (rootfs, self.updated_fstab_path)
exec_native_cmd(mcopy_cmd, native_sysroot)
chmod_cmd = "chmod 644 %s" % rootfs
@@ -381,6 +423,9 @@ class Partition():
(extraopts, self.fsuuid, rootfs, rootfs_dir)
exec_native_cmd(erofs_cmd, native_sysroot, pseudo=pseudo)
+ def prepare_empty_partition_none(self, rootfs, oe_builddir, native_sysroot):
+ pass
+
def prepare_empty_partition_ext(self, rootfs, oe_builddir,
native_sysroot):
"""
diff --git a/scripts/lib/wic/plugins/imager/direct.py b/scripts/lib/wic/plugins/imager/direct.py
index da483daed5..a1d152659b 100644
--- a/scripts/lib/wic/plugins/imager/direct.py
+++ b/scripts/lib/wic/plugins/imager/direct.py
@@ -117,7 +117,7 @@ class DirectPlugin(ImagerPlugin):
updated = False
for part in self.parts:
if not part.realnum or not part.mountpoint \
- or part.mountpoint == "/" or not part.mountpoint.startswith('/'):
+ or part.mountpoint == "/" or not (part.mountpoint.startswith('/') or part.mountpoint == "swap"):
continue
if part.use_uuid:
@@ -149,6 +149,9 @@ class DirectPlugin(ImagerPlugin):
self.updated_fstab_path = os.path.join(self.workdir, "fstab")
with open(self.updated_fstab_path, "w") as f:
f.writelines(fstab_lines)
+ if os.getenv('SOURCE_DATE_EPOCH'):
+ fstab_time = int(os.getenv('SOURCE_DATE_EPOCH'))
+ os.utime(self.updated_fstab_path, (fstab_time, fstab_time))
def _full_path(self, path, name, extention):
""" Construct full file path to a file we generate. """
@@ -310,7 +313,10 @@ class PartitionedImage():
# all partitions (in bytes)
self.ptable_format = ptable_format # Partition table format
# Disk system identifier
- self.identifier = random.SystemRandom().randint(1, 0xffffffff)
+ if os.getenv('SOURCE_DATE_EPOCH'):
+ self.identifier = random.Random(int(os.getenv('SOURCE_DATE_EPOCH'))).randint(1, 0xffffffff)
+ else:
+ self.identifier = random.SystemRandom().randint(1, 0xffffffff)
self.partitions = partitions
self.partimages = []
@@ -336,7 +342,7 @@ class PartitionedImage():
# generate parition and filesystem UUIDs
for part in self.partitions:
if not part.uuid and part.use_uuid:
- if self.ptable_format == 'gpt':
+ if self.ptable_format in ('gpt', 'gpt-hybrid'):
part.uuid = str(uuid.uuid4())
else: # msdos partition table
part.uuid = '%08x-%02d' % (self.identifier, part.realnum)
@@ -392,6 +398,10 @@ class PartitionedImage():
raise WicError("setting custom partition type is not " \
"implemented for msdos partitions")
+ if part.mbr and self.ptable_format != 'gpt-hybrid':
+ raise WicError("Partition may only be included in MBR with " \
+ "a gpt-hybrid partition table")
+
# Get the disk where the partition is located
self.numpart += 1
if not part.no_table:
@@ -400,7 +410,7 @@ class PartitionedImage():
if self.numpart == 1:
if self.ptable_format == "msdos":
overhead = MBR_OVERHEAD
- elif self.ptable_format == "gpt":
+ elif self.ptable_format in ("gpt", "gpt-hybrid"):
overhead = GPT_OVERHEAD
# Skip one sector required for the partitioning scheme overhead
@@ -484,7 +494,7 @@ class PartitionedImage():
# Once all the partitions have been layed out, we can calculate the
# minumim disk size
self.min_size = self.offset
- if self.ptable_format == "gpt":
+ if self.ptable_format in ("gpt", "gpt-hybrid"):
self.min_size += GPT_OVERHEAD
self.min_size *= self.sector_size
@@ -505,22 +515,49 @@ class PartitionedImage():
return exec_native_cmd(cmd, self.native_sysroot)
+ def _write_identifier(self, device, identifier):
+ logger.debug("Set disk identifier %x", identifier)
+ with open(device, 'r+b') as img:
+ img.seek(0x1B8)
+ img.write(identifier.to_bytes(4, 'little'))
+
+ def _make_disk(self, device, ptable_format, min_size):
+ logger.debug("Creating sparse file %s", device)
+ with open(device, 'w') as sparse:
+ os.ftruncate(sparse.fileno(), min_size)
+
+ logger.debug("Initializing partition table for %s", device)
+ exec_native_cmd("parted -s %s mklabel %s" % (device, ptable_format),
+ self.native_sysroot)
+
+ def _write_disk_guid(self):
+ if self.ptable_format in ('gpt', 'gpt-hybrid'):
+ if os.getenv('SOURCE_DATE_EPOCH'):
+ self.disk_guid = uuid.UUID(int=int(os.getenv('SOURCE_DATE_EPOCH')))
+ else:
+ self.disk_guid = uuid.uuid4()
+
+ logger.debug("Set disk guid %s", self.disk_guid)
+ sfdisk_cmd = "sfdisk --disk-id %s %s" % (self.path, self.disk_guid)
+ exec_native_cmd(sfdisk_cmd, self.native_sysroot)
+
def create(self):
- logger.debug("Creating sparse file %s", self.path)
- with open(self.path, 'w') as sparse:
- os.ftruncate(sparse.fileno(), self.min_size)
+ self._make_disk(self.path,
+ "gpt" if self.ptable_format == "gpt-hybrid" else self.ptable_format,
+ self.min_size)
- logger.debug("Initializing partition table for %s", self.path)
- exec_native_cmd("parted -s %s mklabel %s" %
- (self.path, self.ptable_format), self.native_sysroot)
+ self._write_identifier(self.path, self.identifier)
+ self._write_disk_guid()
- logger.debug("Set disk identifier %x", self.identifier)
- with open(self.path, 'r+b') as img:
- img.seek(0x1B8)
- img.write(self.identifier.to_bytes(4, 'little'))
+ if self.ptable_format == "gpt-hybrid":
+ mbr_path = self.path + ".mbr"
+ self._make_disk(mbr_path, "msdos", self.min_size)
+ self._write_identifier(mbr_path, self.identifier)
logger.debug("Creating partitions")
+ hybrid_mbr_part_num = 0
+
for part in self.partitions:
if part.num == 0:
continue
@@ -565,11 +602,19 @@ class PartitionedImage():
self._create_partition(self.path, part.type,
parted_fs_type, part.start, part.size_sec)
- if part.part_name:
+ if self.ptable_format == "gpt-hybrid" and part.mbr:
+ hybrid_mbr_part_num += 1
+ if hybrid_mbr_part_num > 4:
+ raise WicError("Extended MBR partitions are not supported in hybrid MBR")
+ self._create_partition(mbr_path, "primary",
+ parted_fs_type, part.start, part.size_sec)
+
+ if self.ptable_format in ("gpt", "gpt-hybrid") and (part.part_name or part.label):
+ partition_label = part.part_name if part.part_name else part.label
logger.debug("partition %d: set name to %s",
- part.num, part.part_name)
+ part.num, partition_label)
exec_native_cmd("sgdisk --change-name=%d:%s %s" % \
- (part.num, part.part_name,
+ (part.num, partition_label,
self.path), self.native_sysroot)
if part.part_type:
@@ -579,32 +624,55 @@ class PartitionedImage():
(part.num, part.part_type,
self.path), self.native_sysroot)
- if part.uuid and self.ptable_format == "gpt":
+ if part.uuid and self.ptable_format in ("gpt", "gpt-hybrid"):
logger.debug("partition %d: set UUID to %s",
part.num, part.uuid)
exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \
(part.num, part.uuid, self.path),
self.native_sysroot)
- if part.label and self.ptable_format == "gpt":
- logger.debug("partition %d: set name to %s",
- part.num, part.label)
- exec_native_cmd("parted -s %s name %d %s" % \
- (self.path, part.num, part.label),
- self.native_sysroot)
-
if part.active:
- flag_name = "legacy_boot" if self.ptable_format == 'gpt' else "boot"
+ flag_name = "legacy_boot" if self.ptable_format in ('gpt', 'gpt-hybrid') else "boot"
logger.debug("Set '%s' flag for partition '%s' on disk '%s'",
flag_name, part.num, self.path)
exec_native_cmd("parted -s %s set %d %s on" % \
(self.path, part.num, flag_name),
self.native_sysroot)
+ if self.ptable_format == 'gpt-hybrid' and part.mbr:
+ exec_native_cmd("parted -s %s set %d %s on" % \
+ (mbr_path, hybrid_mbr_part_num, "boot"),
+ self.native_sysroot)
if part.system_id:
exec_native_cmd("sfdisk --part-type %s %s %s" % \
(self.path, part.num, part.system_id),
self.native_sysroot)
+ if part.hidden and self.ptable_format == "gpt":
+ logger.debug("Set hidden attribute for partition '%s' on disk '%s'",
+ part.num, self.path)
+ exec_native_cmd("sfdisk --part-attrs %s %s RequiredPartition" % \
+ (self.path, part.num),
+ self.native_sysroot)
+
+ if self.ptable_format == "gpt-hybrid":
+ # Write a protective GPT partition
+ hybrid_mbr_part_num += 1
+ if hybrid_mbr_part_num > 4:
+ raise WicError("Extended MBR partitions are not supported in hybrid MBR")
+
+ # parted cannot directly create a protective GPT partition, so
+ # create with an arbitrary type, then change it to the correct type
+ # with sfdisk
+ self._create_partition(mbr_path, "primary", "fat32", 1, GPT_OVERHEAD)
+ exec_native_cmd("sfdisk --part-type %s %d 0xee" % (mbr_path, hybrid_mbr_part_num),
+ self.native_sysroot)
+
+ # Copy hybrid MBR
+ with open(mbr_path, "rb") as mbr_file:
+ with open(self.path, "r+b") as image_file:
+ mbr = mbr_file.read(512)
+ image_file.write(mbr)
+
def cleanup(self):
pass
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py
index 0391aebdc8..13a9cddf4e 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -35,6 +35,26 @@ class BootimgEFIPlugin(SourcePlugin):
name = 'bootimg-efi'
@classmethod
+ def _copy_additional_files(cls, hdddir, initrd, dtb):
+ bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
+ if not bootimg_dir:
+ raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
+
+ if initrd:
+ initrds = initrd.split(';')
+ for rd in initrds:
+ cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
+ exec_cmd(cp_cmd, True)
+ else:
+ logger.debug("Ignoring missing initrd")
+
+ if dtb:
+ if ';' in dtb:
+ raise WicError("Only one DTB supported, exiting")
+ cp_cmd = "cp %s/%s %s" % (bootimg_dir, dtb, hdddir)
+ exec_cmd(cp_cmd, True)
+
+ @classmethod
def do_configure_grubefi(cls, hdddir, creator, cr_workdir, source_params):
"""
Create loader-specific (grub-efi) config
@@ -53,18 +73,9 @@ class BootimgEFIPlugin(SourcePlugin):
"get it from %s." % configfile)
initrd = source_params.get('initrd')
+ dtb = source_params.get('dtb')
- if initrd:
- bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
- if not bootimg_dir:
- raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
-
- initrds = initrd.split(';')
- for rd in initrds:
- cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
- exec_cmd(cp_cmd, True)
- else:
- logger.debug("Ignoring missing initrd")
+ cls._copy_additional_files(hdddir, initrd, dtb)
if not custom_cfg:
# Create grub configuration using parameters from wks file
@@ -98,6 +109,9 @@ class BootimgEFIPlugin(SourcePlugin):
grubefi_conf += " /%s" % rd
grubefi_conf += "\n"
+ if dtb:
+ grubefi_conf += "devicetree /%s\n" % dtb
+
grubefi_conf += "}\n"
logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg",
@@ -119,25 +133,18 @@ class BootimgEFIPlugin(SourcePlugin):
bootloader = creator.ks.bootloader
+ unified_image = source_params.get('create-unified-kernel-image') == "true"
+
loader_conf = ""
- if source_params.get('create-unified-kernel-image') != "true":
+ if not unified_image:
loader_conf += "default boot\n"
loader_conf += "timeout %d\n" % bootloader.timeout
initrd = source_params.get('initrd')
+ dtb = source_params.get('dtb')
- if initrd and source_params.get('create-unified-kernel-image') != "true":
- # obviously we need to have a common common deploy var
- bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
- if not bootimg_dir:
- raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
-
- initrds = initrd.split(';')
- for rd in initrds:
- cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
- exec_cmd(cp_cmd, True)
- else:
- logger.debug("Ignoring missing initrd")
+ if not unified_image:
+ cls._copy_additional_files(hdddir, initrd, dtb)
logger.debug("Writing systemd-boot config "
"%s/hdd/boot/loader/loader.conf", cr_workdir)
@@ -185,7 +192,10 @@ class BootimgEFIPlugin(SourcePlugin):
for rd in initrds:
boot_conf += "initrd /%s\n" % rd
- if source_params.get('create-unified-kernel-image') != "true":
+ if dtb:
+ boot_conf += "devicetree /%s\n" % dtb
+
+ if not unified_image:
logger.debug("Writing systemd-boot config "
"%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w")
@@ -210,6 +220,8 @@ class BootimgEFIPlugin(SourcePlugin):
cls.do_configure_grubefi(hdddir, creator, cr_workdir, source_params)
elif source_params['loader'] == 'systemd-boot':
cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params)
+ elif source_params['loader'] == 'uefi-kernel':
+ pass
else:
raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader'])
except KeyError:
@@ -326,25 +338,72 @@ class BootimgEFIPlugin(SourcePlugin):
exec_cmd(install_cmd)
staging_dir_host = get_bitbake_var("STAGING_DIR_HOST")
+ target_sys = get_bitbake_var("TARGET_SYS")
+
+ objdump_cmd = "%s-objdump" % target_sys
+ objdump_cmd += " -p %s" % efi_stub
+ objdump_cmd += " | awk '{ if ($1 == \"SectionAlignment\"){print $2} }'"
+
+ ret, align_str = exec_native_cmd(objdump_cmd, native_sysroot)
+ align = int(align_str, 16)
+
+ objdump_cmd = "%s-objdump" % target_sys
+ objdump_cmd += " -h %s | tail -2" % efi_stub
+ ret, output = exec_native_cmd(objdump_cmd, native_sysroot)
+
+ offset = int(output.split()[2], 16) + int(output.split()[3], 16)
+
+ osrel_off = offset + align - offset % align
+ osrel_path = "%s/usr/lib/os-release" % staging_dir_host
+ osrel_sz = os.stat(osrel_path).st_size
+
+ cmdline_off = osrel_off + osrel_sz
+ cmdline_off = cmdline_off + align - cmdline_off % align
+ cmdline_sz = os.stat(cmdline.name).st_size
+
+ dtb_off = cmdline_off + cmdline_sz
+ dtb_off = dtb_off + align - dtb_off % align
+
+ dtb = source_params.get('dtb')
+ if dtb:
+ if ';' in dtb:
+ raise WicError("Only one DTB supported, exiting")
+ dtb_path = "%s/%s" % (deploy_dir, dtb)
+ dtb_params = '--add-section .dtb=%s --change-section-vma .dtb=0x%x' % \
+ (dtb_path, dtb_off)
+ linux_off = dtb_off + os.stat(dtb_path).st_size
+ linux_off = linux_off + align - linux_off % align
+ else:
+ dtb_params = ''
+ linux_off = dtb_off
+
+ linux_path = "%s/%s" % (staging_kernel_dir, kernel)
+ linux_sz = os.stat(linux_path).st_size
+
+ initrd_off = linux_off + linux_sz
+ initrd_off = initrd_off + align - initrd_off % align
# https://www.freedesktop.org/software/systemd/man/systemd-stub.html
- objcopy_cmd = "objcopy \
- --add-section .osrel=%s --change-section-vma .osrel=0x20000 \
- --add-section .cmdline=%s --change-section-vma .cmdline=0x30000 \
- --add-section .linux=%s --change-section-vma .linux=0x2000000 \
- --add-section .initrd=%s --change-section-vma .initrd=0x3000000 \
- %s %s" % \
- ("%s/usr/lib/os-release" % staging_dir_host,
- cmdline.name,
- "%s/%s" % (staging_kernel_dir, kernel),
- initrd.name,
- efi_stub,
- "%s/EFI/Linux/linux.efi" % hdddir)
- exec_cmd(objcopy_cmd)
+ objcopy_cmd = "%s-objcopy" % target_sys
+ objcopy_cmd += " --enable-deterministic-archives"
+ objcopy_cmd += " --preserve-dates"
+ objcopy_cmd += " --add-section .osrel=%s" % osrel_path
+ objcopy_cmd += " --change-section-vma .osrel=0x%x" % osrel_off
+ objcopy_cmd += " --add-section .cmdline=%s" % cmdline.name
+ objcopy_cmd += " --change-section-vma .cmdline=0x%x" % cmdline_off
+ objcopy_cmd += dtb_params
+ objcopy_cmd += " --add-section .linux=%s" % linux_path
+ objcopy_cmd += " --change-section-vma .linux=0x%x" % linux_off
+ objcopy_cmd += " --add-section .initrd=%s" % initrd.name
+ objcopy_cmd += " --change-section-vma .initrd=0x%x" % initrd_off
+ objcopy_cmd += " %s %s/EFI/Linux/linux.efi" % (efi_stub, hdddir)
+
+ exec_native_cmd(objcopy_cmd, native_sysroot)
else:
- install_cmd = "install -m 0644 %s/%s %s/%s" % \
- (staging_kernel_dir, kernel, hdddir, kernel)
- exec_cmd(install_cmd)
+ if source_params.get('install-kernel-into-boot-dir') != 'false':
+ install_cmd = "install -m 0644 %s/%s %s/%s" % \
+ (staging_kernel_dir, kernel, hdddir, kernel)
+ exec_cmd(install_cmd)
if get_bitbake_var("IMAGE_EFI_BOOT_FILES"):
for src_path, dst_path in cls.install_task:
@@ -366,6 +425,28 @@ class BootimgEFIPlugin(SourcePlugin):
for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]:
cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:])
exec_cmd(cp_cmd, True)
+ elif source_params['loader'] == 'uefi-kernel':
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if not kernel:
+ raise WicError("Empty KERNEL_IMAGETYPE %s\n" % target)
+ target = get_bitbake_var("TARGET_SYS")
+ if not target:
+ raise WicError("Unknown arch (TARGET_SYS) %s\n" % target)
+
+ if re.match("x86_64", target):
+ kernel_efi_image = "bootx64.efi"
+ elif re.match('i.86', target):
+ kernel_efi_image = "bootia32.efi"
+ elif re.match('aarch64', target):
+ kernel_efi_image = "bootaa64.efi"
+ elif re.match('arm', target):
+ kernel_efi_image = "bootarm.efi"
+ else:
+ raise WicError("UEFI stub kernel is incompatible with target %s" % target)
+
+ for mod in [x for x in os.listdir(kernel_dir) if x.startswith(kernel)]:
+ cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, kernel_efi_image)
+ exec_cmd(cp_cmd, True)
else:
raise WicError("unrecognized bootimg-efi loader: %s" %
source_params['loader'])
@@ -377,6 +458,11 @@ class BootimgEFIPlugin(SourcePlugin):
cp_cmd = "cp %s %s/" % (startup, hdddir)
exec_cmd(cp_cmd, True)
+ for paths in part.include_path or []:
+ for path in paths:
+ cp_cmd = "cp -r %s %s/" % (path, hdddir)
+ exec_cmd(cp_cmd, True)
+
du_cmd = "du -bks %s" % hdddir
out = exec_cmd(du_cmd)
blocks = int(out.split()[0])
@@ -391,6 +477,13 @@ class BootimgEFIPlugin(SourcePlugin):
logger.debug("Added %d extra blocks to %s to get to %d total blocks",
extra_blocks, part.mountpoint, blocks)
+ # required for compatibility with certain devices expecting file system
+ # block count to be equal to partition block count
+ if blocks < part.fixed_size:
+ blocks = part.fixed_size
+ logger.debug("Overriding %s to %d total blocks for compatibility",
+ part.mountpoint, blocks)
+
# dosfs image, created by mkdosfs
bootimg = "%s/boot.img" % cr_workdir
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg-partition.py
index 5dbe2558d2..1071d1af3f 100644
--- a/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
@@ -30,6 +32,7 @@ class BootimgPartitionPlugin(SourcePlugin):
"""
name = 'bootimg-partition'
+ image_boot_files_var_name = 'IMAGE_BOOT_FILES'
@classmethod
def do_configure_partition(cls, part, source_params, cr, cr_workdir,
@@ -54,12 +57,12 @@ class BootimgPartitionPlugin(SourcePlugin):
else:
var = ""
- boot_files = get_bitbake_var("IMAGE_BOOT_FILES" + var)
+ boot_files = get_bitbake_var(cls.image_boot_files_var_name + var)
if boot_files is not None:
break
if boot_files is None:
- raise WicError('No boot files defined, IMAGE_BOOT_FILES unset for entry #%d' % part.lineno)
+ raise WicError('No boot files defined, %s unset for entry #%d' % (cls.image_boot_files_var_name, part.lineno))
logger.debug('Boot files: %s', boot_files)
@@ -110,7 +113,7 @@ class BootimgPartitionPlugin(SourcePlugin):
# Use a custom configuration for extlinux.conf
extlinux_conf = custom_cfg
logger.debug("Using custom configuration file "
- "%s for extlinux.cfg", configfile)
+ "%s for extlinux.conf", configfile)
else:
raise WicError("configfile is specified but failed to "
"get it from %s." % configfile)
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index 32e47f1831..a207a83530 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -122,7 +122,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
syslinux_conf += "DEFAULT boot\n"
syslinux_conf += "LABEL boot\n"
- kernel = "/vmlinuz"
+ kernel = "/" + get_bitbake_var("KERNEL_IMAGETYPE")
syslinux_conf += "KERNEL " + kernel + "\n"
syslinux_conf += "APPEND label=boot root=%s %s\n" % \
@@ -155,8 +155,8 @@ class BootimgPcbiosPlugin(SourcePlugin):
kernel = "%s-%s.bin" % \
(get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
- cmds = ("install -m 0644 %s/%s %s/vmlinuz" %
- (staging_kernel_dir, kernel, hdddir),
+ cmds = ("install -m 0644 %s/%s %s/%s" %
+ (staging_kernel_dir, kernel, hdddir, get_bitbake_var("KERNEL_IMAGETYPE")),
"install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" %
(bootimg_dir, hdddir),
"install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" %
diff --git a/scripts/lib/wic/plugins/source/empty.py b/scripts/lib/wic/plugins/source/empty.py
index 041617d648..4178912377 100644
--- a/scripts/lib/wic/plugins/source/empty.py
+++ b/scripts/lib/wic/plugins/source/empty.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
@@ -7,9 +9,19 @@
# To use it you must pass "empty" as argument for the "--source" parameter in
# the wks file. For example:
# part foo --source empty --ondisk sda --size="1024" --align 1024
+#
+# The plugin supports writing zeros to the start of the
+# partition. This is useful to overwrite old content like
+# filesystem signatures which may be re-recognized otherwise.
+# This feature can be enabled with
+# '--sourceparams="[fill|size=<N>[S|s|K|k|M|G]][,][bs=<N>[S|s|K|k|M|G]]"'
+# Conflicting or missing options throw errors.
import logging
+import os
+from wic import WicError
+from wic.ksparser import sizetype
from wic.pluginbase import SourcePlugin
logger = logging.getLogger('wic')
@@ -17,6 +29,16 @@ logger = logging.getLogger('wic')
class EmptyPartitionPlugin(SourcePlugin):
"""
Populate unformatted empty partition.
+
+ The following sourceparams are supported:
+ - fill
+ Fill the entire partition with zeros. Requires '--fixed-size' option
+ to be set.
+ - size=<N>[S|s|K|k|M|G]
+ Set the first N bytes of the partition to zero. Default unit is 'K'.
+ - bs=<N>[S|s|K|k|M|G]
+ Write at most N bytes at a time during source file creation.
+ Defaults to '1M'. Default unit is 'K'.
"""
name = 'empty'
@@ -29,4 +51,39 @@ class EmptyPartitionPlugin(SourcePlugin):
Called to do the actual content population for a partition i.e. it
'prepares' the partition to be incorporated into the image.
"""
- return
+ get_byte_count = sizetype('K', True)
+ size = 0
+
+ if 'fill' in source_params and 'size' in source_params:
+ raise WicError("Conflicting source parameters 'fill' and 'size' specified, exiting.")
+
+ # Set the size of the zeros to be written to the partition
+ if 'fill' in source_params:
+ if part.fixed_size == 0:
+ raise WicError("Source parameter 'fill' only works with the '--fixed-size' option, exiting.")
+ size = get_byte_count(part.fixed_size)
+ elif 'size' in source_params:
+ size = get_byte_count(source_params['size'])
+
+ if size == 0:
+ # Nothing to do, create empty partition
+ return
+
+ if 'bs' in source_params:
+ bs = get_byte_count(source_params['bs'])
+ else:
+ bs = get_byte_count('1M')
+
+ # Create a binary file of the requested size filled with zeros
+ source_file = os.path.join(cr_workdir, 'empty-plugin-zeros%s.bin' % part.lineno)
+ if not os.path.exists(os.path.dirname(source_file)):
+ os.makedirs(os.path.dirname(source_file))
+
+ quotient, remainder = divmod(size, bs)
+ with open(source_file, 'wb') as file:
+ for _ in range(quotient):
+ file.write(bytearray(bs))
+ file.write(bytearray(remainder))
+
+ part.size = (size + 1024 - 1) // 1024 # size in KB rounded up
+ part.source_file = source_file
diff --git a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index afc9ea0f8f..607356ad13 100644
--- a/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
diff --git a/scripts/lib/wic/plugins/source/rawcopy.py b/scripts/lib/wic/plugins/source/rawcopy.py
index 7c90cd3cf8..21903c2f23 100644
--- a/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/scripts/lib/wic/plugins/source/rawcopy.py
@@ -1,4 +1,6 @@
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -23,6 +25,10 @@ class RawCopyPlugin(SourcePlugin):
@staticmethod
def do_image_label(fstype, dst, label):
+ # don't create label when fstype is none
+ if fstype == 'none':
+ return
+
if fstype.startswith('ext'):
cmd = 'tune2fs -L %s %s' % (label, dst)
elif fstype in ('msdos', 'vfat'):
@@ -52,7 +58,8 @@ class RawCopyPlugin(SourcePlugin):
decompressor = {
".bz2": "bzip2",
".gz": "gzip",
- ".xz": "xz"
+ ".xz": "xz",
+ ".zst": "zstd -f",
}.get(extension)
if not decompressor:
raise WicError("Not supported compressor filename extension: %s" % extension)
diff --git a/scripts/lib/wic/plugins/source/rootfs.py b/scripts/lib/wic/plugins/source/rootfs.py
index 25bb41dd70..e29f3a4c2f 100644
--- a/scripts/lib/wic/plugins/source/rootfs.py
+++ b/scripts/lib/wic/plugins/source/rootfs.py
@@ -35,7 +35,7 @@ class RootfsPlugin(SourcePlugin):
@staticmethod
def __validate_path(cmd, rootfs_dir, path):
if os.path.isabs(path):
- logger.error("%s: Must be relative: %s" % (cmd, orig_path))
+ logger.error("%s: Must be relative: %s" % (cmd, path))
sys.exit(1)
# Disallow climbing outside of parent directory using '..',
@@ -224,7 +224,7 @@ class RootfsPlugin(SourcePlugin):
if part.update_fstab_in_rootfs and part.has_fstab and not part.no_fstab_update:
fstab_path = os.path.join(new_rootfs, "etc/fstab")
# Assume that fstab should always be owned by root with fixed permissions
- install_cmd = "install -m 0644 %s %s" % (part.updated_fstab_path, fstab_path)
+ install_cmd = "install -m 0644 -p %s %s" % (part.updated_fstab_path, fstab_path)
if new_pseudo:
pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
else:
diff --git a/scripts/nativesdk-intercept/chgrp b/scripts/nativesdk-intercept/chgrp
index 30cc417d3a..f8ae84b8b3 100755
--- a/scripts/nativesdk-intercept/chgrp
+++ b/scripts/nativesdk-intercept/chgrp
@@ -14,7 +14,10 @@ real_chgrp = shutil.which('chgrp', path=path)
args = list()
found = False
-for i in sys.argv:
+
+args.append(real_chgrp)
+
+for i in sys.argv[1:]:
if i.startswith("-"):
args.append(i)
continue
diff --git a/scripts/nativesdk-intercept/chown b/scripts/nativesdk-intercept/chown
index 3914b3e384..0805ceb70a 100755
--- a/scripts/nativesdk-intercept/chown
+++ b/scripts/nativesdk-intercept/chown
@@ -14,7 +14,10 @@ real_chown = shutil.which('chown', path=path)
args = list()
found = False
-for i in sys.argv:
+
+args.append(real_chown)
+
+for i in sys.argv[1:]:
if i.startswith("-"):
args.append(i)
continue
diff --git a/scripts/oe-buildenv-internal b/scripts/oe-buildenv-internal
index 485d4c52e1..2fdb19565a 100755
--- a/scripts/oe-buildenv-internal
+++ b/scripts/oe-buildenv-internal
@@ -32,12 +32,12 @@ fi
# We potentially have code that doesn't parse correctly with older versions
# of Python, and rather than fixing that and being eternally vigilant for
# any other new feature use, just check the version here.
-py_v35_check=$(python3 -c 'import sys; print(sys.version_info >= (3,5,0))')
-if [ "$py_v35_check" != "True" ]; then
- echo >&2 "BitBake requires Python 3.5.0 or later as 'python3 (scripts/install-buildtools can be used if needed)'"
+py_v38_check=$(python3 -c 'import sys; print(sys.version_info >= (3,8,0))')
+if [ "$py_v38_check" != "True" ]; then
+ echo >&2 "BitBake requires Python 3.8.0 or later as 'python3' (scripts/install-buildtools can be used if needed)"
return 1
fi
-unset py_v35_check
+unset py_v38_check
if [ -z "$BDIR" ]; then
if [ -z "$1" ]; then
@@ -92,19 +92,20 @@ fi
PYTHONPATH=$BITBAKEDIR/lib:$PYTHONPATH
export PYTHONPATH
+# Remove any paths added by sourcing this script before
+[ -n "$OE_ADDED_PATHS" ] && PATH=$(echo $PATH | sed -e "s#$OE_ADDED_PATHS##") ||
+ PATH=$(echo $PATH | sed -e "s#$OEROOT/scripts:$BITBAKEDIR/bin:##")
+
# Make sure our paths are at the beginning of $PATH
-for newpath in "$BITBAKEDIR/bin" "$OEROOT/scripts"; do
- # Remove any existences of $newpath from $PATH
- PATH=$(echo $PATH | sed -re "s#(^|:)$newpath(:|$)#\2#g;s#^:##")
+OE_ADDED_PATHS="$OEROOT/scripts:$BITBAKEDIR/bin:"
+PATH="$OE_ADDED_PATHS$PATH"
+export OE_ADDED_PATHS
- # Add $newpath to $PATH
- PATH="$newpath:$PATH"
-done
-unset BITBAKEDIR newpath
+# This is not needed anymore
+unset BITBAKEDIR
# Used by the runqemu script
export BUILDDIR
-export PATH
BB_ENV_PASSTHROUGH_ADDITIONS_OE="MACHINE DISTRO TCMODE TCLIBC HTTP_PROXY http_proxy \
HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
diff --git a/scripts/oe-check-sstate b/scripts/oe-check-sstate
index f4cc5869de..0d171c4463 100755
--- a/scripts/oe-check-sstate
+++ b/scripts/oe-check-sstate
@@ -18,7 +18,6 @@ import re
scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
-import scriptutils
import scriptpath
scriptpath.add_bitbake_lib_path()
import argparse_oe
@@ -51,13 +50,10 @@ def check(args):
env['TMPDIR:forcevariable'] = tmpdir
try:
- output = subprocess.check_output(
- 'bitbake -n %s' % ' '.join(args.target),
- stderr=subprocess.STDOUT,
- env=env,
- shell=True)
+ cmd = ['bitbake', '--dry-run', '--runall=build'] + args.target
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env)
- task_re = re.compile('NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)')
+ task_re = re.compile(r'NOTE: Running setscene task [0-9]+ of [0-9]+ \(([^)]+)\)')
tasks = []
for line in output.decode('utf-8').splitlines():
res = task_re.match(line)
diff --git a/scripts/oe-debuginfod b/scripts/oe-debuginfod
index 9e5482d869..b525310225 100755
--- a/scripts/oe-debuginfod
+++ b/scripts/oe-debuginfod
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: MIT
#
diff --git a/scripts/oe-depends-dot b/scripts/oe-depends-dot
index 5eb3e12769..d02ee455f6 100755
--- a/scripts/oe-depends-dot
+++ b/scripts/oe-depends-dot
@@ -14,8 +14,8 @@ import re
class Dot(object):
def __init__(self):
parser = argparse.ArgumentParser(
- description="Analyse recipe-depends.dot generated by bitbake -g",
- epilog="Use %(prog)s --help to get help")
+ description="Analyse task-depends.dot generated by bitbake -g",
+ formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("dotfile",
help = "Specify the dotfile", nargs = 1, action='store', default='')
parser.add_argument("-k", "--key",
@@ -32,6 +32,21 @@ class Dot(object):
" For example, A->B, B->C, A->C, then A->C can be removed.",
action="store_true", default=False)
+ parser.epilog = """
+Examples:
+First generate the .dot file:
+ bitbake -g core-image-minimal
+
+To find out why a package is being built:
+ %(prog)s -k <package> -w ./task-depends.dot
+
+To find out what a package depends on:
+ %(prog)s -k <package> -d ./task-depends.dot
+
+Reduce the .dot file packages only, no tasks:
+ %(prog)s -r ./task-depends.dot
+"""
+
self.args = parser.parse_args()
if len(sys.argv) != 3 and len(sys.argv) < 5:
@@ -99,6 +114,10 @@ class Dot(object):
if key == "meta-world-pkgdata":
continue
dep = m.group(2)
+ key = key.split('.')[0]
+ dep = dep.split('.')[0]
+ if key == dep:
+ continue
if key in depends:
if not key in depends[key]:
depends[key].add(dep)
@@ -140,9 +159,14 @@ class Dot(object):
reverse_deps = []
if self.args.why:
- for k, v in depends.items():
- if self.args.key in v and not k in reverse_deps:
- reverse_deps.append(k)
+ key_list = [self.args.key]
+ current_key = self.args.key
+ while (len(key_list) != 0):
+ current_key = key_list.pop()
+ for k, v in depends.items():
+ if current_key in v and not k in reverse_deps:
+ reverse_deps.append(k)
+ key_list.append(k)
print('Because: %s' % ' '.join(reverse_deps))
Dot.print_dep_chains(self.args.key, reverse_deps, depends)
diff --git a/scripts/oe-find-native-sysroot b/scripts/oe-find-native-sysroot
index 5146bbf999..6228efcbee 100755
--- a/scripts/oe-find-native-sysroot
+++ b/scripts/oe-find-native-sysroot
@@ -36,20 +36,9 @@ if [ "$1" = '--help' -o "$1" = '-h' -o $# -ne 1 ] ; then
fi
# Global vars
-BITBAKE_E=""
set_oe_native_sysroot(){
- echo "Running bitbake -e $1"
- BITBAKE_E="`bitbake -e $1`"
- OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE= | cut -d '"' -f2`
-
- if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
- # This indicates that there was an error running bitbake -e that
- # the user needs to be informed of
- echo "There was an error running bitbake to determine STAGING_DIR_NATIVE"
- echo "Here is the output from bitbake -e $1"
- echo $BITBAKE_E
- exit 1
- fi
+ echo "Getting sysroot..."
+ OECORE_NATIVE_SYSROOT=$(bitbake-getvar -r $1 --value STAGING_DIR_NATIVE)
}
if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then
diff --git a/scripts/oe-gnome-terminal-phonehome b/scripts/oe-gnome-terminal-phonehome
index b6b9a3867b..1352a9872b 100755
--- a/scripts/oe-gnome-terminal-phonehome
+++ b/scripts/oe-gnome-terminal-phonehome
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Gnome terminal won't tell us which PID a given command is run as
diff --git a/scripts/oe-pkgdata-browser b/scripts/oe-pkgdata-browser
index a3a381923b..c152c82b25 100755
--- a/scripts/oe-pkgdata-browser
+++ b/scripts/oe-pkgdata-browser
@@ -1,4 +1,9 @@
#! /usr/bin/env python3
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
import os, sys, enum, ast
diff --git a/scripts/oe-pkgdata-util b/scripts/oe-pkgdata-util
index 7412cc1f47..44ae40549a 100755
--- a/scripts/oe-pkgdata-util
+++ b/scripts/oe-pkgdata-util
@@ -296,7 +296,7 @@ def package_info(args):
extra = ''
for line in f:
for var in vars:
- m = re.match(var + '(?::\S+)?:\s*(.+?)\s*$', line)
+ m = re.match(var + r'(?::\S+)?:\s*(.+?)\s*$', line)
if m:
vals[var] = m.group(1)
pkg_version = vals['PKGV'] or ''
diff --git a/scripts/oe-pylint b/scripts/oe-pylint
index 7cc1ccb010..5ad72838e9 100755
--- a/scripts/oe-pylint
+++ b/scripts/oe-pylint
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Run the pylint3 against our common python module spaces and print a report of potential issues
diff --git a/scripts/oe-setup-build b/scripts/oe-setup-build
new file mode 100755
index 0000000000..5364f2b481
--- /dev/null
+++ b/scripts/oe-setup-build
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import argparse
+import json
+import os
+import subprocess
+
+def defaultlayers():
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '.oe-layers.json'))
+
+def makebuildpath(topdir, template):
+ return os.path.join(topdir, "build-{}".format(template))
+
+def discover_templates(layers_file):
+ if not os.path.exists(layers_file):
+ print("List of layers {} does not exist; were the layers set up using the setup-layers script?".format(layers_file))
+ return None
+
+ templates = []
+ layers_list = json.load(open(layers_file))["layers"]
+ for layer in layers_list:
+ template_dir = os.path.join(os.path.dirname(layers_file), layer, 'conf','templates')
+ if os.path.exists(template_dir):
+ for d in sorted(os.listdir(template_dir)):
+ templatepath = os.path.join(template_dir,d)
+ if not os.path.isfile(os.path.join(templatepath,'local.conf.sample')):
+ continue
+ layer_base = os.path.basename(layer)
+ templatename = "{}-{}".format(layer_base[5:] if layer_base.startswith("meta-") else layer_base, d)
+ buildpath = makebuildpath(os.getcwd(), templatename)
+ notespath = os.path.join(template_dir, d, 'conf-notes.txt')
+ try: notes = open(notespath).read()
+ except: notes = None
+ try: summary = open(os.path.join(template_dir, d, 'conf-summary.txt')).read()
+ except: summary = None
+ templates.append({"templatename":templatename,"templatepath":templatepath,"buildpath":buildpath,"notespath":notespath,"notes":notes,"summary":summary})
+
+ return templates
+
+def print_templates(templates, verbose):
+ print("Available build configurations:\n")
+
+ for i in range(len(templates)):
+ t = templates[i]
+ print("{}. {}".format(i+1, t["templatename"]))
+ print("{}".format(t["summary"].strip() if t["summary"] else "This configuration does not have a summary."))
+ if verbose:
+ print("Configuration template path:", t["templatepath"])
+ print("Build path:", t["buildpath"])
+ print("Usage notes:", t["notespath"] if t["notes"] else "This configuration does not have usage notes.")
+ print("")
+ if not verbose:
+ print("Re-run with 'list -v' to see additional information.")
+
+def list_templates(args):
+ templates = discover_templates(args.layerlist)
+ if not templates:
+ return
+
+ verbose = args.v
+ print_templates(templates, verbose)
+
+def find_template(template_name, templates):
+ print_templates(templates, False)
+ if not template_name:
+ n_s = input("Please choose a configuration by its number: ")
+ try: return templates[int(n_s) - 1]
+ except:
+ print("Invalid selection, please try again.")
+ return None
+ else:
+ for t in templates:
+ if t["templatename"] == template_name:
+ return t
+ print("Configuration {} is not one of {}, please try again.".format(tempalte_name, [t["templatename"] for t in templates]))
+ return None
+
+def setup_build_env(args):
+ templates = discover_templates(args.layerlist)
+ if not templates:
+ return
+
+ template = find_template(args.c, templates)
+ if not template:
+ return
+ builddir = args.b if args.b else template["buildpath"]
+ no_shell = args.no_shell
+ coredir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
+ cmd = "TEMPLATECONF={} . {} {}".format(template["templatepath"], os.path.join(coredir, 'oe-init-build-env'), builddir)
+ if not no_shell:
+ cmd = cmd + " && {}".format(os.environ['SHELL'])
+ print("Running:", cmd)
+ subprocess.run(cmd, shell=True, executable=os.environ['SHELL'])
+
+parser = argparse.ArgumentParser(description="A script that discovers available build configurations and sets up a build environment based on one of them. Run without arguments to choose one interactively.")
+parser.add_argument("--layerlist", default=defaultlayers(), help='Where to look for available layers (as written out by setup-layers script) (default is {}).'.format(defaultlayers()))
+
+subparsers = parser.add_subparsers()
+parser_list_templates = subparsers.add_parser('list', help='List available configurations')
+parser_list_templates.add_argument('-v', action='store_true',
+ help='Print detailed information and usage notes for each available build configuration.')
+parser_list_templates.set_defaults(func=list_templates)
+
+parser_setup_env = subparsers.add_parser('setup', help='Set up a build environment and open a shell session with it, ready to run builds.')
+parser_setup_env.add_argument('-c', metavar='configuration_name', help="Use a build configuration configuration_name to set up a build environment (run this script with 'list' to see what is available)")
+parser_setup_env.add_argument('-b', metavar='build_path', help="Set up a build directory in build_path (run this script with 'list -v' to see where it would be by default)")
+parser_setup_env.add_argument('--no-shell', action='store_true',
+ help='Create a build directory but do not start a shell session with the build environment from it.')
+parser_setup_env.set_defaults(func=setup_build_env)
+
+args = parser.parse_args()
+
+if 'func' in args:
+ args.func(args)
+else:
+ from argparse import Namespace
+ setup_build_env(Namespace(layerlist=args.layerlist, c=None, b=None, no_shell=False))
diff --git a/scripts/oe-setup-builddir b/scripts/oe-setup-builddir
index 54048e62ec..dcb384c33a 100755
--- a/scripts/oe-setup-builddir
+++ b/scripts/oe-setup-builddir
@@ -7,12 +7,14 @@
# SPDX-License-Identifier: GPL-2.0-or-later
#
-if [ -z "$BUILDDIR" ]; then
- echo >&2 "Error: The build directory (BUILDDIR) must be set!"
+die() {
+ echo Error: "$@" >&2
exit 1
-fi
+}
+
+[ -n "$BUILDDIR" ] || die "The build directory (BUILDDIR) must be set!"
-if [ "$1" = '--help' -o "$1" = '-h' ]; then
+if [ "$1" = '--help' ] || [ "$1" = '-h' ]; then
echo 'Usage: oe-setup-builddir'
echo ''
echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR"
@@ -22,33 +24,22 @@ fi
mkdir -p "$BUILDDIR/conf"
-if [ ! -d "$BUILDDIR" ]; then
- echo >&2 "Error: The builddir ($BUILDDIR) does not exist!"
- exit 1
-fi
-
-if [ ! -w "$BUILDDIR" ]; then
- echo >&2 "Error: Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
- exit 1
-fi
+[ -d "$BUILDDIR" ] || die "The build directory ($BUILDDIR) does not exist!"
+[ -w "$BUILDDIR" ] ||
+ die "Cannot write to $BUILDDIR, perhaps try sourcing with a writable path? i.e. . oe-init-build-env ~/my-build"
# Attempting removal of sticky,setuid bits from BUILDDIR, BUILDDIR/conf
chmod -st "$BUILDDIR" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR"
chmod -st "$BUILDDIR/conf" 2>/dev/null || echo "WARNING: unable to chmod $BUILDDIR/conf"
-cd "$BUILDDIR"
+cd "$BUILDDIR" || die "Failed to change directory to $BUILDDIR!"
-if [ -f "$BUILDDIR/conf/templateconf.cfg" ]; then
- TEMPLATECONF=$(cat "$BUILDDIR/conf/templateconf.cfg")
-fi
-
-. "$OEROOT"/.templateconf
+. "$OEROOT/.templateconf"
-if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then
- echo "$TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg"
-fi
+# Keep the original TEMPLATECONF before possibly prefixing it with $OEROOT below.
+ORG_TEMPLATECONF=$TEMPLATECONF
-#
+#
# $TEMPLATECONF can point to a directory for the template local.conf & bblayers.conf
#
if [ -n "$TEMPLATECONF" ]; then
@@ -57,26 +48,29 @@ if [ -n "$TEMPLATECONF" ]; then
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
fi
- if [ ! -d "$TEMPLATECONF" ]; then
- echo >&2 "Error: TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'"
- exit 1
- fi
+ [ -d "$TEMPLATECONF" ] ||
+ die "TEMPLATECONF value points to nonexistent directory '$TEMPLATECONF'"
+ fi
+ templatesdir=$(python3 -c "import sys; print(sys.argv[1].strip('/').split('/')[-2])" "$TEMPLATECONF")
+ if [ "$templatesdir" != templates ] || [ ! -f "$TEMPLATECONF/../../layer.conf" ]; then
+ die "TEMPLATECONF value (which is $TEMPLATECONF) must point to meta-some-layer/conf/templates/template-name"
fi
OECORELAYERCONF="$TEMPLATECONF/bblayers.conf.sample"
OECORELOCALCONF="$TEMPLATECONF/local.conf.sample"
+ OECORESUMMARYCONF="$TEMPLATECONF/conf-summary.txt"
OECORENOTESCONF="$TEMPLATECONF/conf-notes.txt"
fi
unset SHOWYPDOC
if [ -z "$OECORELOCALCONF" ]; then
- OECORELOCALCONF="$OEROOT/meta/conf/local.conf.sample"
+ OECORELOCALCONF="$OEROOT/meta/conf/templates/default/local.conf.sample"
fi
if [ ! -r "$BUILDDIR/conf/local.conf" ]; then
cat <<EOM
You had no conf/local.conf file. This configuration file has therefore been
-created for you with some default values. You may wish to edit it to, for
-example, select a different MACHINE (target hardware). See conf/local.conf
-for more information as common configuration options are commented.
+created for you from $OECORELOCALCONF
+You may wish to edit it to, for example, select a different MACHINE (target
+hardware).
EOM
cp -f "$OECORELOCALCONF" "$BUILDDIR/conf/local.conf"
@@ -84,13 +78,14 @@ EOM
fi
if [ -z "$OECORELAYERCONF" ]; then
- OECORELAYERCONF="$OEROOT/meta/conf/bblayers.conf.sample"
+ OECORELAYERCONF="$OEROOT/meta/conf/templates/default/bblayers.conf.sample"
fi
if [ ! -r "$BUILDDIR/conf/bblayers.conf" ]; then
cat <<EOM
You had no conf/bblayers.conf file. This configuration file has therefore been
-created for you with some default values. To add additional metadata layers
-into your configuration please add entries to conf/bblayers.conf.
+created for you from $OECORELAYERCONF
+To add additional metadata layers into your configuration please add entries
+to conf/bblayers.conf.
EOM
@@ -104,12 +99,28 @@ EOM
SHOWYPDOC=yes
fi
+if [ -z "$OECORESUMMARYCONF" ]; then
+ OECORESUMMARYCONF="$OEROOT/meta/conf/templates/default/conf-summary.txt"
+fi
+if [ ! -r "$BUILDDIR/conf/conf-summary.txt" ]; then
+ [ ! -r "$OECORESUMMARYCONF" ] || cp "$OECORESUMMARYCONF" "$BUILDDIR/conf/conf-summary.txt"
+fi
+
+if [ -z "$OECORENOTESCONF" ]; then
+ OECORENOTESCONF="$OEROOT/meta/conf/templates/default/conf-notes.txt"
+fi
+if [ ! -r "$BUILDDIR/conf/conf-notes.txt" ]; then
+ [ ! -r "$OECORENOTESCONF" ] || cp "$OECORENOTESCONF" "$BUILDDIR/conf/conf-notes.txt"
+fi
+
# Prevent disturbing a new GIT clone in same console
unset OECORELOCALCONF
unset OECORELAYERCONF
+unset OECORESUMMARYCONF
+unset OECORENOTESCONF
# Ending the first-time run message. Show the YP Documentation banner.
-if [ ! -z "$SHOWYPDOC" ]; then
+if [ -n "$SHOWYPDOC" ]; then
cat <<EOM
The Yocto Project has extensive documentation about OE including a reference
manual which can be found at:
@@ -122,8 +133,9 @@ EOM
# unset SHOWYPDOC
fi
-if [ -z "$OECORENOTESCONF" ]; then
- OECORENOTESCONF="$OEROOT/meta/conf/conf-notes.txt"
+[ ! -r "$BUILDDIR/conf/conf-summary.txt" ] || cat "$BUILDDIR/conf/conf-summary.txt"
+[ ! -r "$BUILDDIR/conf/conf-notes.txt" ] || cat "$BUILDDIR/conf/conf-notes.txt"
+
+if [ ! -f "$BUILDDIR/conf/templateconf.cfg" ]; then
+ echo "$ORG_TEMPLATECONF" >"$BUILDDIR/conf/templateconf.cfg"
fi
-[ ! -r "$OECORENOTESCONF" ] || cat "$OECORENOTESCONF"
-unset OECORENOTESCONF
diff --git a/scripts/oe-setup-layers b/scripts/oe-setup-layers
new file mode 100755
index 0000000000..6fbfefd656
--- /dev/null
+++ b/scripts/oe-setup-layers
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# This file was copied from poky(or oe-core)/scripts/oe-setup-layers by running
+#
+# bitbake-layers create-layers-setup destdir
+#
+# It is recommended that you do not modify this file directly, but rather re-run the above command to get the freshest upstream copy.
+#
+# This script is idempotent. Subsequent runs only change what is necessary to
+# ensure your layers match your configuration.
+
+import argparse
+import json
+import os
+import subprocess
+
+def _is_repo_git_repo(repodir):
+ try:
+ curr_toplevel = subprocess.check_output("git -C %s rev-parse --show-toplevel" % repodir, shell=True, stderr=subprocess.DEVNULL)
+ if curr_toplevel.strip().decode("utf-8") == repodir:
+ return True
+ except subprocess.CalledProcessError:
+ pass
+ return False
+
+def _is_repo_at_rev(repodir, rev):
+ try:
+ curr_rev = subprocess.check_output("git -C %s rev-parse HEAD" % repodir, shell=True, stderr=subprocess.DEVNULL)
+ if curr_rev.strip().decode("utf-8") == rev:
+ return True
+ except subprocess.CalledProcessError:
+ pass
+ return False
+
+def _is_repo_at_remote_uri(repodir, remote, uri):
+ try:
+ curr_uri = subprocess.check_output("git -C %s remote get-url %s" % (repodir, remote), shell=True, stderr=subprocess.DEVNULL)
+ if curr_uri.strip().decode("utf-8") == uri:
+ return True
+ except subprocess.CalledProcessError:
+ pass
+ return False
+
+def _contains_submodules(repodir):
+ return os.path.exists(os.path.join(repodir,".gitmodules"))
+
+def _write_layer_list(dest, repodirs):
+ layers = []
+ for r in repodirs:
+ for root, dirs, files in os.walk(r):
+ if os.path.basename(root) == 'conf' and 'layer.conf' in files:
+ layers.append(os.path.relpath(os.path.dirname(root), dest))
+ layers_f = os.path.join(dest, ".oe-layers.json")
+ print("Writing list of layers into {}".format(layers_f))
+ with open(layers_f, 'w') as f:
+ json.dump({"version":"1.0","layers":layers}, f, sort_keys=True, indent=4)
+
+def _do_checkout(args, json):
+ repos = json['sources']
+ repodirs = []
+ oesetupbuild = None
+ for r_name in repos:
+ r_data = repos[r_name]
+ repodir = os.path.abspath(os.path.join(args['destdir'], r_data['path']))
+ repodirs.append(repodir)
+
+ if 'contains_this_file' in r_data.keys():
+ force_arg = 'force_bootstraplayer_checkout'
+ if not args[force_arg]:
+ print('Note: not checking out source {repo}, use {repoflag} to override.'.format(repo=r_name, repoflag='--force-bootstraplayer-checkout'))
+ continue
+ r_remote = r_data['git-remote']
+ rev = r_remote['rev']
+ desc = r_remote['describe']
+ if not desc:
+ desc = rev[:10]
+ branch = r_remote['branch']
+ remotes = r_remote['remotes']
+
+ print('\nSetting up source {}, revision {}, branch {}'.format(r_name, desc, branch))
+ if not _is_repo_git_repo(repodir):
+ cmd = 'git init -q {}'.format(repodir)
+ print("Running '{}'".format(cmd))
+ subprocess.check_output(cmd, shell=True)
+
+ for remote in remotes:
+ if not _is_repo_at_remote_uri(repodir, remote, remotes[remote]['uri']):
+ cmd = "git remote remove {} > /dev/null 2>&1; git remote add {} {}".format(remote, remote, remotes[remote]['uri'])
+ print("Running '{}' in {}".format(cmd, repodir))
+ subprocess.check_output(cmd, shell=True, cwd=repodir)
+
+ cmd = "git fetch -q {} || true".format(remote)
+ print("Running '{}' in {}".format(cmd, repodir))
+ subprocess.check_output(cmd, shell=True, cwd=repodir)
+
+ if not _is_repo_at_rev(repodir, rev):
+ cmd = "git fetch -q --all || true"
+ print("Running '{}' in {}".format(cmd, repodir))
+ subprocess.check_output(cmd, shell=True, cwd=repodir)
+
+ cmd = 'git checkout -q {}'.format(rev)
+ print("Running '{}' in {}".format(cmd, repodir))
+ subprocess.check_output(cmd, shell=True, cwd=repodir)
+
+ if _contains_submodules(repodir):
+ print("Repo {} contains submodules, use 'git submodule update' to ensure they are up to date".format(repodir))
+ if os.path.exists(os.path.join(repodir, 'scripts/oe-setup-build')):
+ oesetupbuild = os.path.join(repodir, 'scripts/oe-setup-build')
+
+ _write_layer_list(args['destdir'], repodirs)
+
+ if oesetupbuild:
+ oesetupbuild_symlink = os.path.join(args['destdir'], 'setup-build')
+ if os.path.exists(oesetupbuild_symlink):
+ os.remove(oesetupbuild_symlink)
+ os.symlink(os.path.relpath(oesetupbuild,args['destdir']),oesetupbuild_symlink)
+ print("\nRun '{}' to list available build configuration templates and set up a build from one of them.".format(oesetupbuild_symlink))
+
+parser = argparse.ArgumentParser(description="A self contained python script that fetches all the needed layers and sets them to correct revisions using data in a json format from a separate file. The json data can be created from an active build directory with 'bitbake-layers create-layers-setup destdir' and there's a sample file and a schema in meta/files/")
+
+parser.add_argument('--force-bootstraplayer-checkout', action='store_true',
+ help='Force the checkout of the layer containing this file (by default it is presumed that as this script is in it, the layer is already in place).')
+
+try:
+ defaultdest = os.path.dirname(subprocess.check_output('git rev-parse --show-toplevel', universal_newlines=True, shell=True, cwd=os.path.dirname(__file__)))
+except subprocess.CalledProcessError as e:
+ defaultdest = os.path.abspath(".")
+
+parser.add_argument('--destdir', default=defaultdest, help='Where to check out the layers (default is {defaultdest}).'.format(defaultdest=defaultdest))
+parser.add_argument('--jsondata', default=__file__+".json", help='File containing the layer data in json format (default is {defaultjson}).'.format(defaultjson=__file__+".json"))
+
+args = parser.parse_args()
+
+with open(args.jsondata) as f:
+ json_f = json.load(f)
+
+supported_versions = ["1.0"]
+if json_f["version"] not in supported_versions:
+ raise Exception("File {} has version {}, which is not in supported versions: {}".format(args.jsondata, json_f["version"], supported_versions))
+
+_do_checkout(vars(args), json_f)
diff --git a/scripts/oe-setup-vscode b/scripts/oe-setup-vscode
new file mode 100755
index 0000000000..b8642780d5
--- /dev/null
+++ b/scripts/oe-setup-vscode
@@ -0,0 +1,93 @@
+#!/bin/sh
+
+usage() {
+ echo "$0 <OEINIT> <BUILDDIR>"
+ echo " OEINIT: path to directory where the .vscode folder is"
+ echo " BUILDDIR: directory passed to the oe-init-setup-env script"
+}
+
+if [ "$#" -ne 2 ]; then
+ usage
+ exit 1
+fi
+
+OEINIT=$(readlink -f "$1")
+BUILDDIR=$(readlink -f "$2")
+VSCODEDIR=$OEINIT/.vscode
+
+if [ ! -d "$OEINIT" ] || [ ! -d "$BUILDDIR" ]; then
+ echo "$OEINIT and/or $BUILDDIR directories are not present."
+ exit 1
+fi
+
+VSCODE_SETTINGS=$VSCODEDIR/settings.json
+ws_builddir="$(echo "$BUILDDIR" | sed -e "s|$OEINIT|\${workspaceFolder}|g")"
+
+# If BUILDDIR is in scope of VSCode ensure VSCode does not try to index the build folder.
+# This would lead to a busy CPU and finally to an OOM exception.
+mkdir -p "$VSCODEDIR"
+cat <<EOMsettings > "$VSCODE_SETTINGS"
+{
+ "bitbake.pathToBitbakeFolder": "\${workspaceFolder}/bitbake",
+ "bitbake.pathToEnvScript": "\${workspaceFolder}/oe-init-build-env",
+ "bitbake.pathToBuildFolder": "$ws_builddir",
+ "bitbake.commandWrapper": "",
+ "bitbake.workingDirectory": "\${workspaceFolder}",
+ "files.exclude": {
+ "**/.git/**": true,
+ "**/_build/**": true,
+ "**/buildhistory/**": true,
+ "**/cache/**": true,
+ "**/downloads/**": true,
+ "**/node_modules/**": true,
+ "**/oe-logs/**": true,
+ "**/oe-workdir/**": true,
+ "**/sstate-cache/**": true,
+ "**/tmp*/**": true,
+ "**/workspace/attic/**": true,
+ "**/workspace/sources/**": true
+ },
+ "files.watcherExclude": {
+ "**/.git/**": true,
+ "**/_build/**": true,
+ "**/buildhistory/**": true,
+ "**/cache/**": true,
+ "**/downloads/**": true,
+ "**/node_modules/**": true,
+ "**/oe-logs/**": true,
+ "**/oe-workdir/**": true,
+ "**/sstate-cache/**": true,
+ "**/tmp*/**": true,
+ "**/workspace/attic/**": true,
+ "**/workspace/sources/**": true
+ },
+ "python.analysis.exclude": [
+ "**/_build/**",
+ "**/.git/**",
+ "**/buildhistory/**",
+ "**/cache/**",
+ "**/downloads/**",
+ "**/node_modules/**",
+ "**/oe-logs/**",
+ "**/oe-workdir/**",
+ "**/sstate-cache/**",
+ "**/tmp*/**",
+ "**/workspace/attic/**",
+ "**/workspace/sources/**"
+ ]
+}
+EOMsettings
+
+
+# Ask the user if the yocto-bitbake extension should be installed
+VSCODE_EXTENSIONS=$VSCODEDIR/extensions.json
+cat <<EOMextensions > "$VSCODE_EXTENSIONS"
+{
+ "recommendations": [
+ "yocto-project.yocto-bitbake"
+ ]
+}
+EOMextensions
+
+echo "You had no $VSCODEDIR configuration."
+echo "These configuration files have therefore been created for you."
diff --git a/scripts/oe-time-dd-test.sh b/scripts/oe-time-dd-test.sh
index 386de83dce..81748b8c9e 100755
--- a/scripts/oe-time-dd-test.sh
+++ b/scripts/oe-time-dd-test.sh
@@ -1,5 +1,9 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
# oe-time-dd-test records how much time it takes to
# write <count> number of kilobytes to the filesystem.
# It also records the number of processes that are in
diff --git a/scripts/oe-trim-schemas b/scripts/oe-trim-schemas
index bf77c8cf64..e3b26e273e 100755
--- a/scripts/oe-trim-schemas
+++ b/scripts/oe-trim-schemas
@@ -1,5 +1,7 @@
#! /usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/oepydevshell-internal.py b/scripts/oepydevshell-internal.py
index e3c35bbe2c..3bf7df1114 100755
--- a/scripts/oepydevshell-internal.py
+++ b/scripts/oepydevshell-internal.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/opkg-query-helper.py b/scripts/opkg-query-helper.py
index bc3ab43823..084d9ef684 100755
--- a/scripts/opkg-query-helper.py
+++ b/scripts/opkg-query-helper.py
@@ -29,7 +29,7 @@ for arg in sys.argv[1:]:
args.append(arg)
# Regex for removing version specs after dependency items
-verregex = re.compile(' \([=<>]* [^ )]*\)')
+verregex = re.compile(r' \([=<>]* [^ )]*\)')
pkg = ""
ver = ""
diff --git a/scripts/patchtest b/scripts/patchtest
new file mode 100755
index 0000000000..0be7062dc2
--- /dev/null
+++ b/scripts/patchtest
@@ -0,0 +1,232 @@
+#!/usr/bin/env python3
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtest: execute all unittest test cases discovered for a single patch
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import sys
+import os
+import unittest
+import logging
+import traceback
+import json
+
+# Include current path so test cases can see it
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+
+# Include patchtest library
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../meta/lib/patchtest'))
+
+from data import PatchTestInput
+from repo import PatchTestRepo
+
+import utils
+logger = utils.logger_create('patchtest')
+info = logger.info
+error = logger.error
+
+import repo
+
+def getResult(patch, mergepatch, logfile=None):
+
+ class PatchTestResult(unittest.TextTestResult):
+ """ Patchtest TextTestResult """
+ shouldStop = True
+ longMessage = False
+
+ success = 'PASS'
+ fail = 'FAIL'
+ skip = 'SKIP'
+
+ def startTestRun(self):
+ # let's create the repo already, it can be used later on
+ repoargs = {
+ 'repodir': PatchTestInput.repodir,
+ 'commit' : PatchTestInput.basecommit,
+ 'branch' : PatchTestInput.basebranch,
+ 'patch' : patch,
+ }
+
+ self.repo_error = False
+ self.test_error = False
+ self.test_failure = False
+
+ try:
+ self.repo = PatchTestInput.repo = PatchTestRepo(**repoargs)
+ except:
+ logger.error(traceback.print_exc())
+ self.repo_error = True
+ self.stop()
+ return
+
+ if mergepatch:
+ self.repo.merge()
+
+ def addError(self, test, err):
+ self.test_error = True
+ (ty, va, trace) = err
+ logger.error(traceback.print_exc())
+
+ def addFailure(self, test, err):
+ test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
+ "Signed-off-by").replace("upstream status",
+ "Upstream-Status").replace("non auh",
+ "non-AUH").replace("presence format", "presence")
+ self.test_failure = True
+ fail_str = '{}: {}: {} ({})'.format(self.fail,
+ test_description, json.loads(str(err[1]))["issue"],
+ test.id())
+ print(fail_str)
+ if logfile:
+ with open(logfile, "a") as f:
+ f.write(fail_str + "\n")
+
+ def addSuccess(self, test):
+ test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
+ "Signed-off-by").replace("upstream status",
+ "Upstream-Status").replace("non auh",
+ "non-AUH").replace("presence format", "presence")
+ success_str = '{}: {} ({})'.format(self.success,
+ test_description, test.id())
+ print(success_str)
+ if logfile:
+ with open(logfile, "a") as f:
+ f.write(success_str + "\n")
+
+ def addSkip(self, test, reason):
+ test_description = test.id().split('.')[-1].replace('_', ' ').replace("cve", "CVE").replace("signed off by",
+ "Signed-off-by").replace("upstream status",
+ "Upstream-Status").replace("non auh",
+ "non-AUH").replace("presence format", "presence")
+ skip_str = '{}: {}: {} ({})'.format(self.skip,
+ test_description, json.loads(str(reason))["issue"],
+ test.id())
+ print(skip_str)
+ if logfile:
+ with open(logfile, "a") as f:
+ f.write(skip_str + "\n")
+
+ def stopTestRun(self):
+
+ # in case there was an error on repo object creation, just return
+ if self.repo_error:
+ return
+
+ self.repo.clean()
+
+ return PatchTestResult
+
+def _runner(resultklass, prefix=None):
+ # load test with the corresponding prefix
+ loader = unittest.TestLoader()
+ if prefix:
+ loader.testMethodPrefix = prefix
+
+ # create the suite with discovered tests and the corresponding runner
+ suite = loader.discover(start_dir=PatchTestInput.testdir, pattern=PatchTestInput.pattern, top_level_dir=PatchTestInput.topdir)
+ ntc = suite.countTestCases()
+
+ # if there are no test cases, just quit
+ if not ntc:
+ return 2
+ runner = unittest.TextTestRunner(resultclass=resultklass, verbosity=0)
+
+ try:
+ result = runner.run(suite)
+ except:
+ logger.error(traceback.print_exc())
+ logger.error('patchtest: something went wrong')
+ return 1
+ if result.test_failure or result.test_error:
+ return 1
+
+ return 0
+
+def run(patch, logfile=None):
+ """ Load, setup and run pre and post-merge tests """
+ # Get the result class and install the control-c handler
+ unittest.installHandler()
+
+ # run pre-merge tests, meaning those methods with 'pretest' as prefix
+ premerge_resultklass = getResult(patch, False, logfile)
+ premerge_result = _runner(premerge_resultklass, 'pretest')
+
+ # run post-merge tests, meaning those methods with 'test' as prefix
+ postmerge_resultklass = getResult(patch, True, logfile)
+ postmerge_result = _runner(postmerge_resultklass, 'test')
+
+ print('----------------------------------------------------------------------\n')
+ if premerge_result == 2 and postmerge_result == 2:
+ logger.error('patchtest: No test cases found - did you specify the correct suite directory?')
+ if premerge_result == 1 or postmerge_result == 1:
+ logger.error('WARNING: patchtest: At least one patchtest caused a failure or an error - please check https://wiki.yoctoproject.org/wiki/Patchtest for further guidance')
+ else:
+ logger.info('OK: patchtest: All patchtests passed')
+ print('----------------------------------------------------------------------\n')
+ return premerge_result or postmerge_result
+
+def main():
+ tmp_patch = False
+ patch_path = PatchTestInput.patch_path
+ log_results = PatchTestInput.log_results
+ log_path = None
+ patch_list = None
+
+ git_status = os.popen("(cd %s && git status)" % PatchTestInput.repodir).read()
+ status_matches = ["Changes not staged for commit", "Changes to be committed"]
+ if any([match in git_status for match in status_matches]):
+ logger.error("patchtest: there are uncommitted changes in the target repo that would be overwritten. Please commit or restore them before running patchtest")
+ return 1
+
+ if os.path.isdir(patch_path):
+ patch_list = [os.path.join(patch_path, filename) for filename in sorted(os.listdir(patch_path))]
+ else:
+ patch_list = [patch_path]
+
+ for patch in patch_list:
+ if os.path.getsize(patch) == 0:
+ logger.error('patchtest: patch is empty')
+ return 1
+
+ logger.info('Testing patch %s' % patch)
+
+ if log_results:
+ log_path = patch + ".testresult"
+ with open(log_path, "a") as f:
+ f.write("Patchtest results for patch '%s':\n\n" % patch)
+
+ try:
+ if log_path:
+ run(patch, log_path)
+ else:
+ run(patch)
+ finally:
+ if tmp_patch:
+ os.remove(patch)
+
+if __name__ == '__main__':
+ ret = 1
+
+ # Parse the command line arguments and store it on the PatchTestInput namespace
+ PatchTestInput.set_namespace()
+
+ # set debugging level
+ if PatchTestInput.debug:
+ logger.setLevel(logging.DEBUG)
+
+ # if topdir not define, default it to testdir
+ if not PatchTestInput.topdir:
+ PatchTestInput.topdir = PatchTestInput.testdir
+
+ try:
+ ret = main()
+ except Exception:
+ import traceback
+ traceback.print_exc(5)
+
+ sys.exit(ret)
diff --git a/scripts/patchtest-get-branch b/scripts/patchtest-get-branch
new file mode 100755
index 0000000000..c6e242f8b6
--- /dev/null
+++ b/scripts/patchtest-get-branch
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3
+
+# Get target branch from the corresponding mbox
+#
+# NOTE: this script was based on patches coming to the openembedded-core
+# where target branch is defined inside brackets as subject prefix
+# i.e. [master], [rocko], etc.
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import mailbox
+import argparse
+import re
+import git
+
+re_prefix = re.compile(r"(\[.*\])", re.DOTALL)
+
+def get_branch(filepath_repo, filepath_mbox, default_branch):
+ branch = None
+
+ # get all remotes branches
+ gitbranches = git.Git(filepath_repo).branch('-a').splitlines()
+
+ # from gitbranches, just get the names
+ branches = [b.split('/')[-1] for b in gitbranches]
+
+ subject = ' '.join(mailbox.mbox(filepath_mbox)[0]['subject'].splitlines())
+
+ # we expect that patches will have somewhere between one and three
+ # consecutive sets of square brackets with tokens inside, e.g.:
+ # 1. [PATCH]
+ # 2. [OE-core][PATCH]
+ # 3. [OE-core][kirkstone][PATCH]
+ # Some of them may also be part of a series, in which case the PATCH
+ # token will be formatted like:
+ # [PATCH 1/4]
+ # or they will be revisions to previous patches, where it will be:
+ # [PATCH v2]
+ # Or they may contain both:
+ # [PATCH v2 3/4]
+ # In any case, we want mprefix to contain all of these tokens so
+ # that we can search for branch names within them.
+ mprefix = re.findall(r'\[.*?\]', subject)
+ found_branch = None
+ if mprefix:
+ # Iterate over the tokens and compare against the branch list to
+ # figure out which one the patch is targeting
+ for token in mprefix:
+ stripped = token.lower().strip('[]')
+ if default_branch in stripped:
+ found_branch = default_branch
+ break
+ else:
+ for branch in branches:
+ # ignore branches named "core"
+ if branch != "core" and stripped.rfind(branch) != -1:
+ found_branch = token.split(' ')[0].strip('[]')
+ break
+
+ # if there's no mprefix content or no known branches were found in
+ # the tokens, assume the target is master
+ if found_branch is None:
+ found_branch = "master"
+
+ return (subject, found_branch)
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('repo', metavar='REPO', help='Main repository')
+ parser.add_argument('mbox', metavar='MBOX', help='mbox filename')
+ parser.add_argument('--default-branch', metavar='DEFAULT_BRANCH', default='master', help='Use this branch if no one is found')
+ parser.add_argument('--separator', '-s', metavar='SEPARATOR', default=' ', help='Char separator for output data')
+ args = parser.parse_args()
+
+ subject, branch = get_branch(args.repo, args.mbox, args.default_branch)
+ print("branch: %s" % branch)
+
diff --git a/scripts/patchtest-get-series b/scripts/patchtest-get-series
new file mode 100755
index 0000000000..908442089f
--- /dev/null
+++ b/scripts/patchtest-get-series
@@ -0,0 +1,115 @@
+#!/bin/bash -e
+#
+# get-latest-series: Download latest patch series from Patchwork
+#
+# Copyright (C) 2023 BayLibre Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+# the interval into the past which we want to check for new series, in minutes
+INTERVAL_MINUTES=30
+
+# Maximum number of series to retrieve. the Patchwork API can support up to 250
+# at once
+SERIES_LIMIT=250
+
+# Location to save patches
+DOWNLOAD_PATH="."
+
+# Name of the file to use/check as a log of previously-tested series IDs
+SERIES_TEST_LOG=".series_test.log"
+
+# Patchwork project to pull series patches from
+PROJECT="oe-core"
+
+# The Patchwork server to pull from
+SERVER="https://patchwork.yoctoproject.org/api/1.2/"
+
+help()
+{
+ echo "Usage: get-latest-series [ -i | --interval MINUTES ]
+ [ -d | --directory DIRECTORY ]
+ [ -l | --limit COUNT ]
+ [ -h | --help ]
+ [ -t | --tested-series LOGFILE]
+ [ -p | --project PROJECT ]
+ [ -s | --server SERVER ]"
+ exit 2
+}
+
+while [ "$1" != "" ]; do
+ case $1 in
+ -i|--interval)
+ INTERVAL_MINUTES=$2
+ shift 2
+ ;;
+ -l|--limit)
+ SERIES_LIMIT=$2
+ shift 2
+ ;;
+ -d|--directory)
+ DOWNLOAD_PATH=$2
+ shift 2
+ ;;
+ -p|--project)
+ PROJECT=$2
+ shift 2
+ ;;
+ -s|--server)
+ SERVER=$2
+ shift 2
+ ;;
+ -t|--tested-series)
+ SERIES_TEST_LOG=$2
+ shift 2
+ ;;
+ -h|--help)
+ help
+ ;;
+ *)
+ echo "Unknown option $1"
+ help
+ ;;
+ esac
+done
+
+# The time this script is running at
+START_TIME=$(date --date "now" +"%Y-%m-%dT%H:%M:%S")
+
+# the corresponding timestamp we want to check against for new patch series
+SERIES_CHECK_LIMIT=$(date --date "now - ${INTERVAL_MINUTES} minutes" +"%Y-%m-%dT%H:%M:%S")
+
+echo "Start time is $START_TIME"
+echo "Series check limit is $SERIES_CHECK_LIMIT"
+
+# Create DOWNLOAD_PATH if it doesn't exist
+if [ ! -d "$DOWNLOAD_PATH" ]; then
+ mkdir "${DOWNLOAD_PATH}"
+fi
+
+# Create SERIES_TEST_LOG if it doesn't exist
+if [ ! -f "$SERIES_TEST_LOG" ]; then
+ touch "${SERIES_TEST_LOG}"
+fi
+
+# Retrieve a list of series IDs from the 'git-pw series list' output. The API
+# supports a maximum of 250 results, so make sure we allow that when required
+SERIES_LIST=$(git-pw --project "${PROJECT}" --server "${SERVER}" series list --since "${SERIES_CHECK_LIMIT}" --limit "${SERIES_LIMIT}" | awk '{print $2}' | xargs | sed -e 's/[^0-9 ]//g')
+
+if [ -z "$SERIES_LIST" ]; then
+ echo "No new series for project ${PROJECT} since ${SERIES_CHECK_LIMIT}"
+ exit 0
+fi
+
+# Check each series ID
+for SERIES in $SERIES_LIST; do
+ # Download the series only if it's not found in the SERIES_TEST_LOG
+ if ! grep -w --quiet "${SERIES}" "${SERIES_TEST_LOG}"; then
+ echo "Downloading $SERIES..."
+ git-pw series download --separate "${SERIES}" "${DOWNLOAD_PATH}"
+ echo "${SERIES}" >> "${SERIES_TEST_LOG}"
+ else
+ echo "Already tested ${SERIES}. Skipping..."
+ fi
+done
diff --git a/scripts/patchtest-send-results b/scripts/patchtest-send-results
new file mode 100755
index 0000000000..8a3dadbd11
--- /dev/null
+++ b/scripts/patchtest-send-results
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# patchtest: execute all unittest test cases discovered for a single patch
+# Note that this script is currently under development and has been
+# hard-coded with default values for testing purposes. This script
+# should not be used without changing the default recipient, at minimum.
+#
+# Copyright (C) 2023 BayLibre Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import boto3
+import configparser
+import mailbox
+import os
+import re
+import sys
+
+greeting = """Thank you for your submission. Patchtest identified one
+or more issues with the patch. Please see the log below for
+more information:\n\n---\n"""
+
+suggestions = """\n---\n\nPlease address the issues identified and
+submit a new revision of the patch, or alternatively, reply to this
+email with an explanation of why the patch should be accepted. If you
+believe these results are due to an error in patchtest, please submit a
+bug at https://bugzilla.yoctoproject.org/ (use the 'Patchtest' category
+under 'Yocto Project Subprojects'). For more information on specific
+failures, see: https://wiki.yoctoproject.org/wiki/Patchtest. Thank
+you!"""
+
+def has_a_failed_test(raw_results):
+ return any(raw_result.split(':')[0] == "FAIL" for raw_result in raw_results.splitlines())
+
+parser = argparse.ArgumentParser(description="Send patchtest results to a submitter for a given patch")
+parser.add_argument("-p", "--patch", dest="patch", required=True, help="The patch file to summarize")
+parser.add_argument("-d", "--debug", dest="debug", required=False, action='store_true', help="Print raw email headers and content, but don't actually send it")
+args = parser.parse_args()
+
+if not os.path.exists(args.patch):
+ print(f"Patch '{args.patch}' not found - did you provide the right path?")
+ sys.exit(1)
+elif not os.path.exists(args.patch + ".testresult"):
+ print(f"Found patch '{args.patch}' but '{args.patch}.testresult' was not present. Have you run patchtest on the patch?")
+ sys.exit(1)
+
+result_file = args.patch + ".testresult"
+testresult = None
+
+with open(result_file, "r") as f:
+ testresult = f.read()
+
+# we know these patch files will only contain a single patch, so only
+# worry about the first element for getting the subject
+mbox = mailbox.mbox(args.patch)
+mbox_subject = mbox[0]['subject']
+subject_line = f"Patchtest results for {mbox_subject}"
+
+# extract the submitter email address and use it as the reply address
+# for the results
+reply_address = mbox[0]['from']
+
+# extract the message ID and use that as the in-reply-to address
+# TODO: This will need to change again when patchtest can handle a whole
+# series at once
+in_reply_to = mbox[0]['Message-ID']
+
+# the address the results email is sent from
+from_address = "patchtest@automation.yoctoproject.org"
+
+# mailing list to CC
+cc_address = "openembedded-core@lists.openembedded.org"
+
+if has_a_failed_test(testresult):
+ reply_contents = None
+ if len(max(open(result_file, 'r'), key=len)) > 220:
+ warning = "Tests failed for the patch, but the results log could not be processed due to excessive result line length."
+ reply_contents = greeting + warning + suggestions
+ else:
+ reply_contents = greeting + testresult + suggestions
+
+ ses_client = boto3.client('ses', region_name='us-west-2')
+
+ # Construct the headers for the email. We only want to reply
+ # directly to the tested patch, so make In-Reply-To and References
+ # the same value.
+ raw_data = 'From: ' + from_address + '\nTo: ' + reply_address + \
+ '\nCC: ' + cc_address + '\nSubject:' + subject_line + \
+ '\nIn-Reply-To:' + in_reply_to + \
+ '\nReferences:' + in_reply_to + \
+ '\nMIME-Version: 1.0" + \
+ "\nContent-type: Multipart/Mixed;boundary="NextPart"\n\n--NextPart\nContent-Type: text/plain\n\n' + \
+ reply_contents + '\n\n--NextPart'
+
+ if args.debug:
+ print(f"RawMessage: \n\n{raw_data}")
+ else:
+ response = ses_client.send_raw_email(
+ Source="patchtest@automation.yoctoproject.org",
+ RawMessage={
+ "Data": raw_data,
+ },
+ )
+
+else:
+ print(f"No failures identified for {args.patch}.")
diff --git a/scripts/patchtest-setup-sharedir b/scripts/patchtest-setup-sharedir
new file mode 100755
index 0000000000..277677e527
--- /dev/null
+++ b/scripts/patchtest-setup-sharedir
@@ -0,0 +1,83 @@
+#!/bin/bash -e
+#
+# patchtest-setup-sharedir: Setup a directory for storing mboxes and
+# repositories to be shared with the guest machine, including updates to
+# the repos if the directory already exists
+#
+# Copyright (C) 2023 BayLibre Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+# poky repository
+POKY_REPO="https://git.yoctoproject.org/poky"
+
+# patchtest repository
+PATCHTEST_REPO="https://git.yoctoproject.org/patchtest"
+
+# the name of the directory
+SHAREDIR="patchtest_share"
+
+help()
+{
+ echo "Usage: patchtest-setup-sharedir [ -d | --directory SHAREDIR ]
+ [ -p | --patchtest PATCHTEST_REPO ]
+ [ -y | --poky POKY_REPO ]"
+ exit 2
+}
+
+while [ "$1" != "" ]; do
+ case $1 in
+ -d|--directory)
+ SHAREDIR=$2
+ shift 2
+ ;;
+ -p|--patchtest)
+ PATCHTEST_REPO=$2
+ shift 2
+ ;;
+ -y|--poky)
+ POKY_REPO=$2
+ shift 2
+ ;;
+ -h|--help)
+ help
+ ;;
+ *)
+ echo "Unknown option $1"
+ help
+ ;;
+ esac
+done
+
+# define MBOX_DIR where the patch series will be stored by
+# get-latest-series
+MBOX_DIR="${SHAREDIR}/mboxes"
+
+# Create SHAREDIR if it doesn't exist
+if [ ! -d "$SHAREDIR" ]; then
+ mkdir -p "${SHAREDIR}"
+ echo "Created ${SHAREDIR}"
+fi
+
+# Create the mboxes directory if it doesn't exist
+if [ ! -d "$MBOX_DIR" ]; then
+ mkdir -p "${MBOX_DIR}"
+ echo "Created ${MBOX_DIR}"
+fi
+
+# clone poky if it's not already present; otherwise, update it
+if [ ! -d "$POKY_REPO" ]; then
+ BASENAME=$(basename ${POKY_REPO})
+ git clone "${POKY_REPO}" "${SHAREDIR}/${BASENAME}"
+else
+ (cd "${SHAREDIR}/$BASENAME" && git pull)
+fi
+
+# clone patchtest if it's not already present; otherwise, update it
+if [ ! -d "$PATCHTEST_REPO" ]; then
+ BASENAME=$(basename ${PATCHTEST_REPO})
+ git clone "${PATCHTEST_REPO}" "${SHAREDIR}/${BASENAME}"
+else
+ (cd "${SHAREDIR}/$BASENAME" && git pull)
+fi
diff --git a/scripts/patchtest.README b/scripts/patchtest.README
new file mode 100644
index 0000000000..76b5fcdb6d
--- /dev/null
+++ b/scripts/patchtest.README
@@ -0,0 +1,153 @@
+# Patchtest
+
+## Introduction
+
+Patchtest is a test framework for community patches based on the standard
+unittest python module. As input, it needs tree elements to work properly:
+a patch in mbox format (either created with `git format-patch` or fetched
+from 'patchwork'), a test suite and a target repository.
+
+The first test suite intended to be used with patchtest is found in the
+openembedded-core repository [1] targeted for patches that get into the
+openembedded-core mailing list [2]. This suite is also intended as a
+baseline for development of similar suites for other layers as needed.
+
+Patchtest can either run on a host or a guest machine, depending on which
+environment the execution needs to be done. If you plan to test your own patches
+(a good practice before these are sent to the mailing list), the easiest way is
+to install and execute on your local host; in the other hand, if automatic
+testing is intended, the guest method is strongly recommended. The guest
+method requires the use of the patchtest layer, in addition to the tools
+available in oe-core: https://git.yoctoproject.org/patchtest/
+
+## Installation
+
+As a tool for use with the Yocto Project, the [quick start guide](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html)
+contains the necessary prerequisites for a basic project. In addition,
+patchtest relies on the following Python modules:
+
+- boto3 (for sending automated results emails only)
+- git-pw>=2.5.0
+- jinja2
+- pylint
+- pyparsing>=3.0.9
+- unidiff
+
+These can be installed by running `pip install -r
+meta/lib/patchtest/requirements.txt`. Note that git-pw is not
+automatically added to the user's PATH; by default, it is installed at
+~/.local/bin/git-pw.
+
+For git-pw (and therefore scripts such as patchtest-get--series) to work, you need
+to provide a Patchwork instance in your user's .gitconfig, like so (the project
+can be specified using the --project argument):
+
+ git config --global pw.server "https://patchwork.yoctoproject.org/api/1.2/"
+
+To work with patchtest, you should have the following repositories cloned:
+
+1. https://git.openembedded.org/openembedded-core/ (or https://git.yoctoproject.org/poky/)
+2. https://git.openembedded.org/bitbake/ (if not using poky)
+3. https://git.yoctoproject.org/patchtest (if using guest mode)
+
+## Usage
+
+### Obtaining Patches
+
+Patch files can be obtained directly from cloned repositories using `git
+format-patch -N` (where N is the number of patches starting from HEAD to
+generate). git-pw can also be used with filters for users, patch/series IDs,
+and timeboxes if specific patches are desired. For more information, see the
+git-pw [documentation](https://patchwork.readthedocs.io/projects/git-pw/en/latest/).
+
+Alternatively, `scripts/patchtest-get-series` can be used to pull mbox files from
+the Patchwork instance configured previously in .gitconfig. It uses a log file
+called ".series_test.log" to store and compare series IDs so that the same
+versions of a patch are not tested multiple times unintentionally. By default,
+it will pull up to five patch series from the last 30 minutes using oe-core as
+the target project, but these parameters can be configured using the `--limit`,
+`--interval`, and `--project` arguments respectively. For more information, run
+`patchtest-get-series -h`.
+
+### Host Mode
+
+To run patchtest on the host, do the following:
+
+1. In openembedded-core/poky, do `source oe-init-build-env`
+2. Generate patch files from the target repository by doing `git-format patch -N`,
+ where N is the number of patches starting at HEAD, or by using git-pw
+ or patchtest-get-series
+3. Run patchtest on a patch file by doing the following:
+
+ patchtest --patch /path/to/patch/file
+
+ or, if you have stored the patch files in a directory, do:
+
+ patchtest --directory /path/to/patch/directory
+
+ For example, to test `master-gcc-Fix--fstack-protector-issue-on-aarch64.patch` against the oe-core test suite:
+
+ patchtest --patch master-gcc-Fix--fstack-protector-issue-on-aarch64.patch
+
+ If you want to use a different test suite or target repository, you can use the --testdir and --repodir flags:
+
+ patchtest --patch /path/to/patch/file --repodir /path/to/repo --testdir /path/to/test/dir
+
+### Guest Mode
+
+Patchtest's guest mode has been refactored to more closely mirror the
+typical Yocto Project image build workflow, but there are still some key
+differences to keep in mind. The primary objective is to provide a level
+of isolation from the host when testing patches pulled automatically
+from the mailing lists. When executed this way, the test process is
+essentially running random code from the internet and could be
+catastrophic if malicious bits or even poorly-handled edge cases aren't
+protected against. In order to use this mode, the
+https://git.yoctoproject.org/patchtest/ repository must be cloned and
+the meta-patchtest layer added to bblayers.conf.
+
+The general flow of guest mode is:
+
+1. Run patchtest-setup-sharedir --directory <dirname> to create a
+ directory for mounting
+2. Collect patches via patchtest-get-series (or other manual step) into the
+ <dirname>/mboxes path
+3. Ensure that a user with ID 1200 has appropriate read/write
+ permissions to <dirname> and <dirname>/mboxes, so that the
+ "patchtest" user in the core-image-patchtest image can function
+4. Build the core-image-patchtest image
+5. Run the core-image-patchtest image with the mounted sharedir, like
+ so:
+ `runqemu kvm nographic qemuparams="-snapshot -fsdev
+ local,id=test_mount,path=/workspace/yocto/poky/build/patchtestdir,security_model=mapped
+ -device virtio-9p-pci,fsdev=test_mount,mount_tag=test_mount -smp 4 -m
+ 2048"`
+
+Patchtest runs as an initscript for the core-image-patchtest image and
+shuts down after completion, so there is no input required from a user
+during operation. Unlike in host mode, the guest is designed to
+automatically generate test result files, in the same directory as the
+targeted patch files but with .testresult as an extension. These contain
+the entire output of the patchtest run for each respective pass,
+including the PASS, FAIL, and SKIP indicators for each test run.
+
+## Contributing
+
+The yocto mailing list (openembedded-core@lists.openembedded.org) is used for questions,
+comments and patch review. It is subscriber only, so please register before
+posting.
+
+When sending single patches, please use something like:
+
+ git send-email -M -1 --to=openembedded-core@lists.openembedded.org --subject-prefix=OE-core][PATCH
+
+## Maintenance
+-----------
+
+Maintainers:
+ Trevor Gamblin <tgamblin@baylibre.com>
+
+## Links
+-----
+[1] https://git.openembedded.org/openembedded-core/
+[2] https://www.yoctoproject.org/community/mailing-lists/
diff --git a/scripts/postinst-intercepts/update_gtk_icon_cache b/scripts/postinst-intercepts/update_gtk_icon_cache
index 99367a2855..a92bd840c6 100644
--- a/scripts/postinst-intercepts/update_gtk_icon_cache
+++ b/scripts/postinst-intercepts/update_gtk_icon_cache
@@ -11,7 +11,11 @@ $STAGING_DIR_NATIVE/${libdir_native}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --u
for icondir in $D/usr/share/icons/*/ ; do
if [ -d $icondir ] ; then
- gtk-update-icon-cache -fqt $icondir
+ for gtkuic_cmd in gtk-update-icon-cache gtk4-update-icon-cache ; do
+ if [ -n "$(which $gtkuic_cmd)" ]; then
+ $gtkuic_cmd -fqt $icondir
+ fi
+ done
fi
done
diff --git a/scripts/postinst-intercepts/update_mandb b/scripts/postinst-intercepts/update_mandb
new file mode 100644
index 0000000000..f91bafdb11
--- /dev/null
+++ b/scripts/postinst-intercepts/update_mandb
@@ -0,0 +1,18 @@
+#!/bin/sh
+#
+# SPDX-License-Identifier: MIT
+#
+
+set -eu
+
+# Create a temporary man_db.conf with paths to the rootfs, as mandb needs absolute paths
+CONFIG=$(mktemp --tmpdir update-mandb.XXXXX)
+sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf > $CONFIG
+
+mkdir -p $D${localstatedir}/cache/man/
+
+PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${bindir}/mandb --config-file $CONFIG --create
+
+rm -f $CONFIG
+
+chown -R man:man $D${localstatedir}/cache/man/
diff --git a/scripts/pybootchartgui/pybootchartgui/draw.py b/scripts/pybootchartgui/pybootchartgui/draw.py
index fc708b55c3..c6e67833ab 100644
--- a/scripts/pybootchartgui/pybootchartgui/draw.py
+++ b/scripts/pybootchartgui/pybootchartgui/draw.py
@@ -80,6 +80,22 @@ MEM_BUFFERS_COLOR = (0.4, 0.4, 0.4, 0.3)
# Swap color
MEM_SWAP_COLOR = DISK_TPUT_COLOR
+# avg10 CPU pressure color
+CPU_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
+# delta total CPU pressure color
+CPU_PRESSURE_TOTAL_COLOR = CPU_COLOR
+# avg10 IO pressure color
+IO_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
+# delta total IO pressure color
+IO_PRESSURE_TOTAL_COLOR = IO_COLOR
+# avg10 memory pressure color
+MEM_PRESSURE_AVG10_COLOR = (0.0, 0.0, 0.0, 1.0)
+# delta total memory pressure color
+MEM_PRESSURE_TOTAL_COLOR = DISK_TPUT_COLOR
+
+
+
+
# Process border color.
PROC_BORDER_COLOR = (0.71, 0.71, 0.71, 1.0)
# Waiting process color.
@@ -340,6 +356,12 @@ def extents(options, xscale, trace):
h += 30 + bar_h
if trace.disk_stats:
h += 30 + bar_h
+ if trace.cpu_pressure:
+ h += 30 + bar_h
+ if trace.io_pressure:
+ h += 30 + bar_h
+ if trace.mem_pressure:
+ h += 30 + bar_h
if trace.monitor_disk:
h += 30 + bar_h
if trace.mem_stats:
@@ -415,6 +437,108 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
curr_y = curr_y + 30 + bar_h
+ # render CPU pressure chart
+ if trace.cpu_pressure:
+ max_sample_avg = max (trace.cpu_pressure, key = lambda s: s.avg10)
+ max_sample_total = max (trace.cpu_pressure, key = lambda s: s.deltaTotal)
+ draw_legend_line(ctx, "avg10 CPU Pressure (max %d%%)" % (max_sample_avg.avg10), CPU_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
+ draw_legend_box(ctx, "delta total CPU Pressure (max %d)" % (max_sample_total.deltaTotal), CPU_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
+
+ # render delta total cpu
+ chart_rect = (off_x, curr_y+30, w, bar_h)
+ if clip_visible (clip, chart_rect):
+ draw_box_ticks (ctx, chart_rect, sec_w)
+ draw_annotations (ctx, proc_tree, trace.times, chart_rect)
+ draw_chart (ctx, CPU_PRESSURE_TOTAL_COLOR, True, chart_rect, \
+ [(sample.time, sample.deltaTotal) for sample in trace.cpu_pressure], \
+ proc_tree, None)
+
+ # render avg10 cpu
+ if clip_visible (clip, chart_rect):
+ draw_chart (ctx, CPU_PRESSURE_AVG10_COLOR, False, chart_rect, \
+ [(sample.time, sample.avg10) for sample in trace.cpu_pressure], \
+ proc_tree, None)
+
+ pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
+
+ shift_x, shift_y = -20, 20
+ if (pos_x < off_x + 245):
+ shift_x, shift_y = 5, 40
+
+
+ label = "%d%%" % (max_sample_avg.avg10)
+ draw_text (ctx, label, CPU_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
+
+ curr_y = curr_y + 30 + bar_h
+
+ # render I/O pressure chart
+ if trace.io_pressure:
+ max_sample_avg = max (trace.io_pressure, key = lambda s: s.avg10)
+ max_sample_total = max (trace.io_pressure, key = lambda s: s.deltaTotal)
+ draw_legend_line(ctx, "avg10 I/O Pressure (max %d%%)" % (max_sample_avg.avg10), IO_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
+ draw_legend_box(ctx, "delta total I/O Pressure (max %d)" % (max_sample_total.deltaTotal), IO_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
+
+ # render delta total io
+ chart_rect = (off_x, curr_y+30, w, bar_h)
+ if clip_visible (clip, chart_rect):
+ draw_box_ticks (ctx, chart_rect, sec_w)
+ draw_annotations (ctx, proc_tree, trace.times, chart_rect)
+ draw_chart (ctx, IO_PRESSURE_TOTAL_COLOR, True, chart_rect, \
+ [(sample.time, sample.deltaTotal) for sample in trace.io_pressure], \
+ proc_tree, None)
+
+ # render avg10 io
+ if clip_visible (clip, chart_rect):
+ draw_chart (ctx, IO_PRESSURE_AVG10_COLOR, False, chart_rect, \
+ [(sample.time, sample.avg10) for sample in trace.io_pressure], \
+ proc_tree, None)
+
+ pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
+
+ shift_x, shift_y = -20, 20
+ if (pos_x < off_x + 245):
+ shift_x, shift_y = 5, 40
+
+
+ label = "%d%%" % (max_sample_avg.avg10)
+ draw_text (ctx, label, IO_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
+
+ curr_y = curr_y + 30 + bar_h
+
+ # render MEM pressure chart
+ if trace.mem_pressure:
+ max_sample_avg = max (trace.mem_pressure, key = lambda s: s.avg10)
+ max_sample_total = max (trace.mem_pressure, key = lambda s: s.deltaTotal)
+ draw_legend_line(ctx, "avg10 MEM Pressure (max %d%%)" % (max_sample_avg.avg10), MEM_PRESSURE_AVG10_COLOR, off_x, curr_y+20, leg_s)
+ draw_legend_box(ctx, "delta total MEM Pressure (max %d)" % (max_sample_total.deltaTotal), MEM_PRESSURE_TOTAL_COLOR, off_x + 240, curr_y+20, leg_s)
+
+ # render delta total mem
+ chart_rect = (off_x, curr_y+30, w, bar_h)
+ if clip_visible (clip, chart_rect):
+ draw_box_ticks (ctx, chart_rect, sec_w)
+ draw_annotations (ctx, proc_tree, trace.times, chart_rect)
+ draw_chart (ctx, MEM_PRESSURE_TOTAL_COLOR, True, chart_rect, \
+ [(sample.time, sample.deltaTotal) for sample in trace.mem_pressure], \
+ proc_tree, None)
+
+ # render avg10 mem
+ if clip_visible (clip, chart_rect):
+ draw_chart (ctx, MEM_PRESSURE_AVG10_COLOR, False, chart_rect, \
+ [(sample.time, sample.avg10) for sample in trace.mem_pressure], \
+ proc_tree, None)
+
+ pos_x = off_x + ((max_sample_avg.time - proc_tree.start_time) * w / proc_tree.duration)
+
+ shift_x, shift_y = -20, 20
+ if (pos_x < off_x + 245):
+ shift_x, shift_y = 5, 40
+
+
+ label = "%d%%" % (max_sample_avg.avg10)
+ draw_text (ctx, label, MEM_PRESSURE_AVG10_COLOR, pos_x + shift_x, curr_y + shift_y)
+
+ curr_y = curr_y + 30 + bar_h
+
# render disk space usage
#
# Draws the amount of disk space used on each volume relative to the
@@ -496,8 +620,8 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w):
return curr_y
-def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
- chart_rect = [off_x, curr_y+header_h, w, h - curr_y - 1 * off_y - header_h ]
+def render_processes_chart(ctx, options, trace, curr_y, width, h, sec_w):
+ chart_rect = [off_x, curr_y+header_h, width, h - curr_y - 1 * off_y - header_h ]
draw_legend_box (ctx, "Configure", \
TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s)
@@ -522,8 +646,9 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
offset = trace.min or min(trace.start.keys())
for start in sorted(trace.start.keys()):
for process in sorted(trace.start[start]):
+ elapsed_time = trace.processes[process][1] - start
if not options.app_options.show_all and \
- trace.processes[process][1] - start < options.app_options.mintime:
+ elapsed_time < options.app_options.mintime:
continue
task = process.split(":")[1]
@@ -532,14 +657,23 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
#print(s)
x = chart_rect[0] + (start - offset) * sec_w
- w = ((trace.processes[process][1] - start) * sec_w)
+ w = elapsed_time * sec_w
+
+ def set_alfa(color, alfa):
+ clist = list(color)
+ clist[-1] = alfa
+ return tuple(clist)
#print("proc at %s %s %s %s" % (x, y, w, proc_h))
col = None
if task == "do_compile":
col = TASK_COLOR_COMPILE
+ elif "do_compile" in task:
+ col = set_alfa(TASK_COLOR_COMPILE, 0.25)
elif task == "do_configure":
col = TASK_COLOR_CONFIGURE
+ elif "do_configure" in task:
+ col = set_alfa(TASK_COLOR_CONFIGURE, 0.25)
elif task == "do_install":
col = TASK_COLOR_INSTALL
elif task == "do_populate_sysroot":
@@ -557,7 +691,10 @@ def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w):
draw_fill_rect(ctx, col, (x, y, w, proc_h))
draw_rect(ctx, PROC_BORDER_COLOR, (x, y, w, proc_h))
- draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, proc_h)
+ # Show elapsed time for each task
+ process = "%ds %s" % (elapsed_time, process)
+ draw_label_in_box(ctx, PROC_TEXT_COLOR, process, x, y + proc_h - 4, w, width)
+
y = y + proc_h
return curr_y
@@ -698,7 +835,7 @@ def draw_processes_recursively(ctx, proc, proc_tree, y, proc_h, rect, clip) :
cmdString = proc.cmd
else:
cmdString = ''
- if (OPTIONS.show_pid or OPTIONS.show_all) and ipid is not 0:
+ if (OPTIONS.show_pid or OPTIONS.show_all) and ipid != 0:
cmdString = cmdString + " [" + str(ipid // 1000) + "]"
if OPTIONS.show_all:
if proc.args:
@@ -796,7 +933,7 @@ class CumlSample:
if self.color is None:
i = self.next() % HSV_MAX_MOD
h = 0.0
- if i is not 0:
+ if i != 0:
h = (1.0 * i) / HSV_MAX_MOD
s = 0.5
v = 1.0
diff --git a/scripts/pybootchartgui/pybootchartgui/parsing.py b/scripts/pybootchartgui/pybootchartgui/parsing.py
index b42dac6b88..63a53b6b88 100644
--- a/scripts/pybootchartgui/pybootchartgui/parsing.py
+++ b/scripts/pybootchartgui/pybootchartgui/parsing.py
@@ -49,6 +49,9 @@ class Trace:
self.parent_map = None
self.mem_stats = []
self.monitor_disk = None
+ self.cpu_pressure = []
+ self.io_pressure = []
+ self.mem_pressure = []
self.times = [] # Always empty, but expected by draw.py when drawing system charts.
if len(paths):
@@ -128,7 +131,7 @@ class Trace:
def compile(self, writer):
def find_parent_id_for(pid):
- if pid is 0:
+ if pid == 0:
return 0
ppid = self.parent_map.get(pid)
if ppid:
@@ -554,6 +557,29 @@ def _parse_monitor_disk_log(file):
return disk_stats
+def _parse_pressure_logs(file, filename):
+ """
+ Parse file for "some" pressure with 'avg10', 'avg60' 'avg300' and delta total values
+ (in that order) directly stored on one line for both CPU and IO, based on filename.
+ """
+ pressure_stats = []
+ if filename == "cpu.log":
+ SamplingClass = CPUPressureSample
+ elif filename == "memory.log":
+ SamplingClass = MemPressureSample
+ else:
+ SamplingClass = IOPressureSample
+ for time, lines in _parse_timed_blocks(file):
+ for line in lines:
+ if not line: continue
+ tokens = line.split()
+ avg10 = float(tokens[0])
+ avg60 = float(tokens[1])
+ avg300 = float(tokens[2])
+ delta = float(tokens[3])
+ pressure_stats.append(SamplingClass(time, avg10, avg60, avg300, delta))
+
+ return pressure_stats
# if we boot the kernel with: initcall_debug printk.time=1 we can
# get all manner of interesting data from the dmesg output
@@ -741,6 +767,13 @@ def _do_parse(writer, state, filename, file):
state.cmdline = _parse_cmdline_log(writer, file)
elif name == "monitor_disk.log":
state.monitor_disk = _parse_monitor_disk_log(file)
+ #pressure logs are in a subdirectory
+ elif name == "cpu.log":
+ state.cpu_pressure = _parse_pressure_logs(file, name)
+ elif name == "io.log":
+ state.io_pressure = _parse_pressure_logs(file, name)
+ elif name == "memory.log":
+ state.mem_pressure = _parse_pressure_logs(file, name)
elif not filename.endswith('.log'):
_parse_bitbake_buildstats(writer, state, filename, file)
t2 = time.process_time()
diff --git a/scripts/pybootchartgui/pybootchartgui/samples.py b/scripts/pybootchartgui/pybootchartgui/samples.py
index 9fc309b3ab..a70d8a5a28 100644
--- a/scripts/pybootchartgui/pybootchartgui/samples.py
+++ b/scripts/pybootchartgui/pybootchartgui/samples.py
@@ -37,6 +37,31 @@ class CPUSample:
return str(self.time) + "\t" + str(self.user) + "\t" + \
str(self.sys) + "\t" + str(self.io) + "\t" + str (self.swap)
+class CPUPressureSample:
+ def __init__(self, time, avg10, avg60, avg300, deltaTotal):
+ self.time = time
+ self.avg10 = avg10
+ self.avg60 = avg60
+ self.avg300 = avg300
+ self.deltaTotal = deltaTotal
+
+class IOPressureSample:
+ def __init__(self, time, avg10, avg60, avg300, deltaTotal):
+ self.time = time
+ self.avg10 = avg10
+ self.avg60 = avg60
+ self.avg300 = avg300
+ self.deltaTotal = deltaTotal
+
+class MemPressureSample:
+ def __init__(self, time, avg10, avg60, avg300, deltaTotal):
+ self.time = time
+ self.avg10 = avg10
+ self.avg60 = avg60
+ self.avg300 = avg300
+ self.deltaTotal = deltaTotal
+
+
class MemSample:
used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',)
diff --git a/scripts/pythondeps b/scripts/pythondeps
index be21dd84eb..48277ec28a 100755
--- a/scripts/pythondeps
+++ b/scripts/pythondeps
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Determine dependencies of python scripts or available python modules in a search path.
diff --git a/scripts/relocate_sdk.py b/scripts/relocate_sdk.py
index 4ed8bfc0d1..8a728720ba 100755
--- a/scripts/relocate_sdk.py
+++ b/scripts/relocate_sdk.py
@@ -104,11 +104,12 @@ def change_interpreter(elf_file_name):
if (len(new_dl_path) >= p_filesz):
print("ERROR: could not relocate %s, interp size = %i and %i is needed." \
% (elf_file_name, p_memsz, len(new_dl_path) + 1))
- break
+ return False
dl_path = new_dl_path + b("\0") * (p_filesz - len(new_dl_path))
f.seek(p_offset)
f.write(dl_path)
break
+ return True
def change_dl_sysdirs(elf_file_name):
if arch == 32:
@@ -222,6 +223,7 @@ else:
executables_list = sys.argv[3:]
+errors = False
for e in executables_list:
perms = os.stat(e)[stat.ST_MODE]
if os.access(e, os.W_OK|os.R_OK):
@@ -247,7 +249,8 @@ for e in executables_list:
arch = get_arch()
if arch:
parse_elf_header()
- change_interpreter(e)
+ if not change_interpreter(e):
+ errors = True
change_dl_sysdirs(e)
""" change permissions back """
@@ -260,3 +263,6 @@ for e in executables_list:
print("New file size for %s is different. Looks like a relocation error!", e)
sys.exit(-1)
+if errors:
+ print("Relocation of one or more executables failed.")
+ sys.exit(-1)
diff --git a/scripts/rpm2cpio.sh b/scripts/rpm2cpio.sh
index 7cd771bbe7..8199b43784 100755
--- a/scripts/rpm2cpio.sh
+++ b/scripts/rpm2cpio.sh
@@ -7,7 +7,7 @@ fatal() {
}
pkg="$1"
-[ -n "$pkg" -a -e "$pkg" ] ||
+[ -n "$pkg" ] && [ -e "$pkg" ] ||
fatal "No package supplied"
_dd() {
@@ -16,14 +16,23 @@ _dd() {
}
calcsize() {
+
+ case "$(_dd $1 bs=4 count=1 | tr -d '\0')" in
+ "$(printf '\216\255\350')"*) ;; # '\x8e\xad\xe8'
+ *) fatal "File doesn't look like rpm: $pkg" ;;
+ esac
+
offset=$(($1 + 8))
local i b b0 b1 b2 b3 b4 b5 b6 b7
i=0
while [ $i -lt 8 ]; do
- b=$(_dd $(($offset + $i)) bs=1 count=1; echo X)
- b=${b%X}
+ # add . to not loose \n
+ # strip \0 as it gets dropped with warning otherwise
+ b="$(_dd $(($offset + $i)) bs=1 count=1 | tr -d '\0' ; echo .)"
+ b=${b%.} # strip . again
+
[ -z "$b" ] &&
b="0" ||
b="$(exec printf '%u\n' "'$b")"
@@ -35,7 +44,7 @@ calcsize() {
offset=$(($offset + $rsize))
}
-case "$(_dd 0 bs=8 count=1)" in
+case "$(_dd 0 bs=4 count=1 | tr -d '\0')" in
"$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb'
*) fatal "File doesn't look like rpm: $pkg" ;;
esac
@@ -46,10 +55,11 @@ sigsize=$rsize
calcsize $(($offset + (8 - ($sigsize % 8)) % 8))
hdrsize=$rsize
-case "$(_dd $offset bs=3 count=1)" in
- "$(printf '\102\132')"*) _dd $offset | bunzip2 ;; # '\x42\x5a'
- "$(printf '\037\213')"*) _dd $offset | gunzip ;; # '\x1f\x8b'
- "$(printf '\375\067')"*) _dd $offset | xzcat ;; # '\xfd\x37'
- "$(printf '\135\000')"*) _dd $offset | unlzma ;; # '\x5d\x00'
- *) fatal "Unrecognized rpm file: $pkg" ;;
+case "$(_dd $offset bs=2 count=1 | tr -d '\0')" in
+ "$(printf '\102\132')") _dd $offset | bunzip2 ;; # '\x42\x5a'
+ "$(printf '\037\213')") _dd $offset | gunzip ;; # '\x1f\x8b'
+ "$(printf '\375\067')") _dd $offset | xzcat ;; # '\xfd\x37'
+ "$(printf '\135')") _dd $offset | unlzma ;; # '\x5d\x00'
+ "$(printf '\050\265')") _dd $offset | unzstd ;; # '\x28\xb5'
+ *) fatal "Unrecognized payload compression format in rpm file: $pkg" ;;
esac
diff --git a/scripts/runqemu b/scripts/runqemu
index 6e1f073ed2..69cd44864e 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -66,6 +66,7 @@ of the following environment variables (in any order):
MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)
Simplified QEMU command-line options can be passed with:
nographic - disable video console
+ nonetwork - disable network connectivity
novga - Disable VGA emulation completely
sdl - choose the SDL UI frontend
gtk - choose the Gtk UI frontend
@@ -82,6 +83,8 @@ of the following environment variables (in any order):
kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
publicvnc - enable a VNC server open to all hosts
audio - enable audio
+ guestagent - enable guest agent communication
+ qmp=<path> - create a QMP socket (defaults to unix:qmp.sock if unspecified)
[*/]ovmf* - OVMF firmware file or base name for booting with UEFI
tcpserial=<port> - specify tcp serial port number
qemuparams=<xyz> - specify custom parameters to QEMU
@@ -116,10 +119,10 @@ def check_tun():
if not os.access(dev_tun, os.W_OK):
raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
-def get_first_file(cmds):
- """Return first file found in wildcard cmds"""
- for cmd in cmds:
- all_files = glob.glob(cmd)
+def get_first_file(globs):
+ """Return first file found in wildcard globs"""
+ for g in globs:
+ all_files = glob.glob(g)
if all_files:
for f in all_files:
if not os.path.isdir(f):
@@ -177,6 +180,7 @@ class BaseConfig(object):
self.serialconsole = False
self.serialstdio = False
self.nographic = False
+ self.nonetwork = False
self.sdl = False
self.gtk = False
self.gl = False
@@ -195,12 +199,14 @@ class BaseConfig(object):
self.snapshot = False
self.wictypes = ('wic', 'wic.vmdk', 'wic.qcow2', 'wic.vdi', "wic.vhd", "wic.vhdx")
self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs',
- 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz')
+ 'cpio.gz', 'cpio', 'ramfs', 'tar.bz2', 'tar.gz',
+ 'squashfs', 'squashfs-xz', 'squashfs-lzo',
+ 'squashfs-lz4', 'squashfs-zst')
self.vmtypes = ('hddimg', 'iso')
self.fsinfo = {}
self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
self.cmdline_ip_slirp = "ip=dhcp"
- self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8"
+ self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0:off:8.8.8.8 net.ifnames=0"
# Use different mac section for tap and slirp to avoid
# conflicts, e.g., when one is running with tap, the other is
# running with slirp.
@@ -210,11 +216,15 @@ class BaseConfig(object):
self.mac_tap = "52:54:00:12:34:"
self.mac_slirp = "52:54:00:12:35:"
# pid of the actual qemu process
- self.qemupid = None
+ self.qemu_environ = os.environ.copy()
+ self.qemuprocess = None
# avoid cleanup twice
self.cleaned = False
# Files to cleanup after run
self.cleanup_files = []
+ self.qmp = None
+ self.guest_agent = False
+ self.guest_agent_sockpath = '/tmp/qga.sock'
def acquire_taplock(self, error=True):
logger.debug("Acquiring lockfile %s..." % self.taplock)
@@ -361,11 +371,11 @@ class BaseConfig(object):
if p.endswith('.qemuboot.conf'):
self.qemuboot = p
self.qbconfload = True
- elif re.search('\.bin$', p) or re.search('bzImage', p) or \
+ elif re.search('\\.bin$', p) or re.search('bzImage', p) or \
re.search('zImage', p) or re.search('vmlinux', p) or \
re.search('fitImage', p) or re.search('uImage', p):
self.kernel = p
- elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p):
+ elif os.path.isfile(p) and ('-image-' in os.path.basename(p) or '.rootfs.' in os.path.basename(p)):
self.rootfs = p
# Check filename against self.fstypes can handle <file>.cpio.gz,
# otherwise, its type would be "gz", which is incorrect.
@@ -375,18 +385,24 @@ class BaseConfig(object):
fst = t
break
if not fst:
- m = re.search('.*\.(.*)$', self.rootfs)
+ m = re.search('.*\\.(.*)$', self.rootfs)
if m:
fst = m.group(1)
if fst:
self.check_arg_fstype(fst)
- qb = re.sub('\.' + fst + "$", '', self.rootfs)
- qb = '%s%s' % (re.sub('\.rootfs$', '', qb), '.qemuboot.conf')
+ qb = re.sub('\\.' + fst + "$", '.qemuboot.conf', self.rootfs)
if os.path.exists(qb):
self.qemuboot = qb
self.qbconfload = True
else:
- logger.warning("%s doesn't exist" % qb)
+ logger.warning("%s doesn't exist, will try to remove '.rootfs' from filename" % qb)
+ # They to remove .rootfs (IMAGE_NAME_SUFFIX) as well
+ qb = re.sub('\\.rootfs.qemuboot.conf$', '.qemuboot.conf', qb)
+ if os.path.exists(qb):
+ self.qemuboot = qb
+ self.qbconfload = True
+ else:
+ logger.warning("%s doesn't exist" % qb)
else:
raise RunQemuError("Can't find FSTYPE from: %s" % p)
@@ -420,6 +436,7 @@ class BaseConfig(object):
# are there other scenarios in which we need to support being
# invoked by bitbake?
deploy = self.get('DEPLOY_DIR_IMAGE')
+ image_link_name = self.get('IMAGE_LINK_NAME')
bbchild = deploy and self.get('OE_TMPDIR')
if bbchild:
self.set_machine_deploy_dir(arg, deploy)
@@ -444,31 +461,24 @@ class BaseConfig(object):
else:
logger.error("%s not a directory valid DEPLOY_DIR_IMAGE" % deploy_dir_image)
self.set("MACHINE", arg)
+ if not image_link_name:
+ s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
+ if s:
+ image_link_name = s.group(1)
+ self.set("IMAGE_LINK_NAME", image_link_name)
+ logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
def set_dri_path(self):
- # As runqemu can be run within bitbake (when using testimage, for example),
- # we need to ensure that we run host pkg-config, and that it does not
- # get mis-directed to native build paths set by bitbake.
- try:
- del os.environ['PKG_CONFIG_PATH']
- del os.environ['PKG_CONFIG_DIR']
- del os.environ['PKG_CONFIG_LIBDIR']
- del os.environ['PKG_CONFIG_SYSROOT_DIR']
- except KeyError:
- pass
- try:
- dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True)
- except subprocess.CalledProcessError as e:
- raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
- os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip()
-
- # This preloads uninative libc pieces and therefore ensures that RPATH/RUNPATH
- # in host mesa drivers doesn't trick uninative into loading host libc.
- preload_items = ['libdl.so.2', 'librt.so.1', 'libpthread.so.0']
- uninative_path = os.path.dirname(self.get("UNINATIVE_LOADER"))
- if os.path.exists(uninative_path):
- preload_paths = [os.path.join(uninative_path, i) for i in preload_items]
- os.environ['LD_PRELOAD'] = " ".join(preload_paths)
+ drivers_path = os.path.join(self.bindir_native, '../lib/dri')
+ if not os.path.exists(drivers_path) or not os.listdir(drivers_path):
+ raise RunQemuError("""
+qemu has been built without opengl support and accelerated graphics support is not available.
+To enable it, add:
+DISTRO_FEATURES_NATIVE:append = " opengl"
+DISTRO_FEATURES_NATIVESDK:append = " opengl"
+to your build configuration.
+""")
+ self.qemu_environ['LIBGL_DRIVERS_PATH'] = drivers_path
def check_args(self):
for debug in ("-d", "--debug"):
@@ -482,8 +492,8 @@ class BaseConfig(object):
sys.argv.remove(quiet)
if 'gl' not in sys.argv[1:] and 'gl-es' not in sys.argv[1:]:
- os.environ['SDL_RENDER_DRIVER'] = 'software'
- os.environ['SDL_FRAMEBUFFER_ACCELERATION'] = 'false'
+ self.qemu_environ['SDL_RENDER_DRIVER'] = 'software'
+ self.qemu_environ['SDL_FRAMEBUFFER_ACCELERATION'] = 'false'
unknown_arg = ""
for arg in sys.argv[1:]:
@@ -491,13 +501,15 @@ class BaseConfig(object):
self.check_arg_fstype(arg)
elif arg == 'nographic':
self.nographic = True
+ elif arg == "nonetwork":
+ self.nonetwork = True
elif arg == 'sdl':
self.sdl = True
elif arg == 'gtk':
self.gtk = True
elif arg == 'gl':
self.gl = True
- elif 'gl-es' in sys.argv[1:]:
+ elif arg == 'gl-es':
self.gl_es = True
elif arg == 'egl-headless':
self.egl_headless = True
@@ -524,6 +536,14 @@ class BaseConfig(object):
elif arg == 'publicvnc':
self.publicvnc = True
self.qemu_opt_script += ' -vnc :0'
+ elif arg == 'guestagent':
+ self.guest_agent = True
+ elif arg == "qmp":
+ self.qmp = "unix:qmp.sock"
+ elif arg.startswith("qmp="):
+ self.qmp = arg[len('qmp='):]
+ elif arg.startswith('guestagent-sockpath='):
+ self.guest_agent_sockpath = '%s' % arg[len('guestagent-sockpath='):]
elif arg.startswith('tcpserial='):
self.tcpserial_portnum = '%s' % arg[len('tcpserial='):]
elif arg.startswith('qemuparams='):
@@ -555,11 +575,18 @@ class BaseConfig(object):
self.check_arg_machine(unknown_arg)
if not (self.get('DEPLOY_DIR_IMAGE') or self.qbconfload):
- self.load_bitbake_env()
+ self.load_bitbake_env(target=self.rootfs)
s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M)
if s:
self.set("DEPLOY_DIR_IMAGE", s.group(1))
+ if not self.get('IMAGE_LINK_NAME') and self.rootfs:
+ s = re.search('^IMAGE_LINK_NAME="(.*)"', self.bitbake_e, re.M)
+ if s:
+ image_link_name = s.group(1)
+ self.set("IMAGE_LINK_NAME", image_link_name)
+ logger.debug('Using IMAGE_LINK_NAME = "%s"' % image_link_name)
+
def check_kvm(self):
"""Check kvm and kvm-host"""
if not (self.kvm_enabled or self.vhost_enabled):
@@ -589,11 +616,6 @@ class BaseConfig(object):
if os.access(dev_kvm, os.W_OK|os.R_OK):
self.qemu_opt_script += ' -enable-kvm'
- if self.get('MACHINE') == "qemux86":
- # Workaround for broken APIC window on pre 4.15 host kernels which causes boot hangs
- # See YOCTO #12301
- # On 64 bit we use x2apic
- self.kernel_cmdline_script += " clocksource=kvm-clock hpet=disable noapic nolapic"
else:
logger.error("You have no read or write permission on /dev/kvm.")
logger.error("Please change the ownership of this file as described at:")
@@ -634,10 +656,10 @@ class BaseConfig(object):
elif fsflag == 'kernel-in-fs':
wic_fs = False
else:
- logger.warn('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag)
+ logger.warning('Unknown flag "%s:%s" in QB_FSINFO', fstype, fsflag)
continue
else:
- logger.warn('QB_FSINFO is not supported for image type "%s"', fstype)
+ logger.warning('QB_FSINFO is not supported for image type "%s"', fstype)
continue
if fstype in self.fsinfo:
@@ -670,16 +692,16 @@ class BaseConfig(object):
if self.rootfs and not os.path.exists(self.rootfs):
# Lazy rootfs
- self.rootfs = "%s/%s-%s.%s" % (self.get('DEPLOY_DIR_IMAGE'),
- self.rootfs, self.get('MACHINE'),
+ self.rootfs = "%s/%s.%s" % (self.get('DEPLOY_DIR_IMAGE'),
+ self.get('IMAGE_LINK_NAME'),
self.fstype)
elif not self.rootfs:
- cmd_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype)
- cmd_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype)
- cmds = (cmd_name, cmd_link)
- self.rootfs = get_first_file(cmds)
+ glob_name = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_NAME'), self.fstype)
+ glob_link = '%s/%s*.%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME'), self.fstype)
+ globs = (glob_name, glob_link)
+ self.rootfs = get_first_file(globs)
if not self.rootfs:
- raise RunQemuError("Failed to find rootfs: %s or %s" % cmds)
+ raise RunQemuError("Failed to find rootfs: %s or %s" % globs)
if not os.path.exists(self.rootfs):
raise RunQemuError("Can't find rootfs: %s" % self.rootfs)
@@ -739,10 +761,10 @@ class BaseConfig(object):
kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name)
kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE'))
- cmds = (kernel_match_name, kernel_match_link, kernel_startswith)
- self.kernel = get_first_file(cmds)
+ globs = (kernel_match_name, kernel_match_link, kernel_startswith)
+ self.kernel = get_first_file(globs)
if not self.kernel:
- raise RunQemuError('KERNEL not found: %s, %s or %s' % cmds)
+ raise RunQemuError('KERNEL not found: %s, %s or %s' % globs)
if not os.path.exists(self.kernel):
raise RunQemuError("KERNEL %s not found" % self.kernel)
@@ -759,13 +781,13 @@ class BaseConfig(object):
dtb = self.get('QB_DTB')
if dtb:
deploy_dir_image = self.get('DEPLOY_DIR_IMAGE')
- cmd_match = "%s/%s" % (deploy_dir_image, dtb)
- cmd_startswith = "%s/%s*" % (deploy_dir_image, dtb)
- cmd_wild = "%s/*.dtb" % deploy_dir_image
- cmds = (cmd_match, cmd_startswith, cmd_wild)
- self.dtb = get_first_file(cmds)
+ glob_match = "%s/%s" % (deploy_dir_image, dtb)
+ glob_startswith = "%s/%s*" % (deploy_dir_image, dtb)
+ glob_wild = "%s/*.dtb" % deploy_dir_image
+ globs = (glob_match, glob_startswith, glob_wild)
+ self.dtb = get_first_file(globs)
if not os.path.exists(self.dtb):
- raise RunQemuError('DTB not found: %s, %s or %s' % cmds)
+ raise RunQemuError('DTB not found: %s, %s or %s' % globs)
def check_bios(self):
"""Check and set bios"""
@@ -816,7 +838,7 @@ class BaseConfig(object):
self.set('QB_MEM', qb_mem)
mach = self.get('MACHINE')
- if not mach.startswith(('qemumips', 'qemux86')):
+ if not mach.startswith(('qemumips', 'qemux86', 'qemuloongarch64')):
self.kernel_cmdline_script += ' mem=%s' % self.get('QB_MEM').replace('-m','').strip() + 'M'
self.qemu_opt_script += ' %s' % self.get('QB_MEM')
@@ -828,11 +850,11 @@ class BaseConfig(object):
if self.get('QB_TCPSERIAL_OPT'):
self.qemu_opt_script += ' ' + self.get('QB_TCPSERIAL_OPT').replace('@PORT@', port)
else:
- self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port
+ self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
if len(ports) > 1:
for port in ports[1:]:
- self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s' % port
+ self.qemu_opt_script += ' -serial tcp:127.0.0.1:%s,nodelay=on' % port
def check_and_set(self):
"""Check configs sanity and set when needed"""
@@ -875,8 +897,10 @@ class BaseConfig(object):
machine = self.get('MACHINE')
if not machine:
machine = os.path.basename(deploy_dir_image)
- self.qemuboot = "%s/%s-%s.qemuboot.conf" % (deploy_dir_image,
- self.rootfs, machine)
+ if not self.get('IMAGE_LINK_NAME'):
+ raise RunQemuError("IMAGE_LINK_NAME wasn't set to find corresponding .qemuboot.conf file")
+ self.qemuboot = "%s/%s.qemuboot.conf" % (deploy_dir_image,
+ self.get('IMAGE_LINK_NAME'))
else:
cmd = 'ls -t %s/*.qemuboot.conf' % deploy_dir_image
logger.debug('Running %s...' % cmd)
@@ -997,19 +1021,16 @@ class BaseConfig(object):
if self.slirp_enabled:
self.nfs_server = '10.0.2.2'
else:
- self.nfs_server = '192.168.7.1'
-
- # Figure out a new nfs_instance to allow multiple qemus running.
- ps = subprocess.check_output(("ps", "auxww")).decode('utf-8')
- pattern = '/bin/unfsd .* -i .*\.pid -e .*/exports([0-9]+) '
- all_instances = re.findall(pattern, ps, re.M)
- if all_instances:
- all_instances.sort(key=int)
- self.nfs_instance = int(all_instances.pop()) + 1
+ self.nfs_server = '192.168.7.@GATEWAY@'
- nfsd_port = 3049 + 2 * self.nfs_instance
- mountd_port = 3048 + 2 * self.nfs_instance
+ nfsd_port = 3048 + self.nfs_instance
+ lockdir = "/tmp/qemu-port-locks"
+ self.make_lock_dir(lockdir)
+ while not self.check_free_port('localhost', nfsd_port, lockdir):
+ self.nfs_instance += 1
+ nfsd_port += 1
+ mountd_port = nfsd_port
# Export vars for runqemu-export-rootfs
export_dict = {
'NFS_INSTANCE': self.nfs_instance,
@@ -1020,7 +1041,11 @@ class BaseConfig(object):
# Use '%s' since they are integers
os.putenv(k, '%s' % v)
- self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s" % (nfsd_port, mountd_port)
+ qb_nfsrootfs_extra_opt = self.get("QB_NFSROOTFS_EXTRA_OPT")
+ if qb_nfsrootfs_extra_opt and not qb_nfsrootfs_extra_opt.startswith(","):
+ qb_nfsrootfs_extra_opt = "," + qb_nfsrootfs_extra_opt
+
+ self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s%s" % (nfsd_port, mountd_port, qb_nfsrootfs_extra_opt)
# Extract .tar.bz2 or .tar.bz if no nfs dir
if not (self.rootfs and os.path.isdir(self.rootfs)):
@@ -1043,7 +1068,7 @@ class BaseConfig(object):
cmd = ('runqemu-extract-sdk', src, dest)
logger.info('Running %s...' % str(cmd))
if subprocess.call(cmd) != 0:
- raise RunQemuError('Failed to run %s' % cmd)
+ raise RunQemuError('Failed to run %s' % str(cmd))
self.rootfs = dest
self.cleanup_files.append(self.rootfs)
self.cleanup_files.append('%s.pseudo_state' % self.rootfs)
@@ -1052,14 +1077,32 @@ class BaseConfig(object):
cmd = ('runqemu-export-rootfs', 'start', self.rootfs)
logger.info('Running %s...' % str(cmd))
if subprocess.call(cmd) != 0:
- raise RunQemuError('Failed to run %s' % cmd)
+ raise RunQemuError('Failed to run %s' % str(cmd))
self.nfs_running = True
+ def setup_cmd(self):
+ cmd = self.get('QB_SETUP_CMD')
+ if cmd != '':
+ logger.info('Running setup command %s' % str(cmd))
+ if subprocess.call(cmd, shell=True) != 0:
+ raise RunQemuError('Failed to run %s' % str(cmd))
+
def setup_net_bridge(self):
self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % (
self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper')))
+ def make_lock_dir(self, lockdir):
+ if not os.path.exists(lockdir):
+ # There might be a race issue when multi runqemu processess are
+ # running at the same time.
+ try:
+ os.mkdir(lockdir)
+ os.chmod(lockdir, 0o777)
+ except FileExistsError:
+ pass
+ return
+
def setup_slirp(self):
"""Setup user networking"""
@@ -1069,7 +1112,7 @@ class BaseConfig(object):
logger.info("Network configuration:%s", netconf)
self.kernel_cmdline_script += netconf
# Port mapping
- hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23"
+ hostfwd = ",hostfwd=tcp:127.0.0.1:2222-:22,hostfwd=tcp:127.0.0.1:2323-:23"
qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE'))
qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default
# Figure out the port
@@ -1078,14 +1121,7 @@ class BaseConfig(object):
mac = 2
lockdir = "/tmp/qemu-port-locks"
- if not os.path.exists(lockdir):
- # There might be a race issue when multi runqemu processess are
- # running at the same time.
- try:
- os.mkdir(lockdir)
- os.chmod(lockdir, 0o777)
- except FileExistsError:
- pass
+ self.make_lock_dir(lockdir)
# Find a free port to avoid conflicts
for p in ports[:]:
@@ -1125,20 +1161,17 @@ class BaseConfig(object):
logger.error("ip: %s" % ip)
raise OEPathError("runqemu-ifup, runqemu-ifdown or ip not found")
- if not os.path.exists(lockdir):
- # There might be a race issue when multi runqemu processess are
- # running at the same time.
- try:
- os.mkdir(lockdir)
- os.chmod(lockdir, 0o777)
- except FileExistsError:
- pass
+ self.make_lock_dir(lockdir)
cmd = (ip, 'link')
logger.debug('Running %s...' % str(cmd))
ip_link = subprocess.check_output(cmd).decode('utf-8')
# Matches line like: 6: tap0: <foo>
- possibles = re.findall('^[0-9]+: +(tap[0-9]+): <.*', ip_link, re.M)
+ oe_tap_name = 'tap'
+ if 'OE_TAP_NAME' in os.environ:
+ oe_tap_name = os.environ['OE_TAP_NAME']
+ tap_re = '^[0-9]+: +(' + oe_tap_name + '[0-9]+): <.*'
+ possibles = re.findall(tap_re, ip_link, re.M)
tap = ""
for p in possibles:
lockfile = os.path.join(lockdir, p)
@@ -1161,7 +1194,7 @@ class BaseConfig(object):
gid = os.getgid()
uid = os.getuid()
logger.info("Setting up tap interface under sudo")
- cmd = ('sudo', self.qemuifup, str(uid), str(gid), self.bindir_native)
+ cmd = ('sudo', self.qemuifup, str(gid))
try:
tap = subprocess.check_output(cmd).decode('utf-8').strip()
except subprocess.CalledProcessError as e:
@@ -1177,7 +1210,7 @@ class BaseConfig(object):
logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
sys.exit(1)
self.tap = tap
- tapnum = int(tap[3:])
+ tapnum = int(tap[len(oe_tap_name):])
gateway = tapnum * 2 + 1
client = gateway + 1
if self.fstype == 'nfs':
@@ -1185,6 +1218,7 @@ class BaseConfig(object):
netconf = " " + self.cmdline_ip_tap
netconf = netconf.replace('@CLIENT@', str(client))
netconf = netconf.replace('@GATEWAY@', str(gateway))
+ self.nfs_server = self.nfs_server.replace('@GATEWAY@', str(gateway))
logger.info("Network configuration:%s", netconf)
self.kernel_cmdline_script += netconf
mac = "%s%02x" % (self.mac_tap, client)
@@ -1200,7 +1234,8 @@ class BaseConfig(object):
self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt))
def setup_network(self):
- if self.get('QB_NET') == 'none':
+ if self.nonetwork or self.get('QB_NET') == 'none':
+ self.set('NETWORK_CMD', '-nic none')
return
if sys.stdin.isatty():
self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip()
@@ -1299,7 +1334,7 @@ class BaseConfig(object):
"""attempt to determine the appropriate qemu-system binary"""
mach = self.get('MACHINE')
if not mach:
- search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*'
+ search = '.*(qemux86-64|qemux86|qemuarm64|qemuarm|qemuloongarch64|qemumips64|qemumips64el|qemumipsel|qemumips|qemuppc).*'
if self.rootfs:
match = re.match(search, self.rootfs)
if match:
@@ -1322,6 +1357,8 @@ class BaseConfig(object):
qbsys = 'x86_64'
elif mach == 'qemuppc':
qbsys = 'ppc'
+ elif mach == 'qemuloongarch64':
+ qbsys = 'loongarch64'
elif mach == 'qemumips':
qbsys = 'mips'
elif mach == 'qemumips64':
@@ -1350,6 +1387,35 @@ class BaseConfig(object):
raise RunQemuError("Failed to boot, QB_SYSTEM_NAME is NULL!")
self.qemu_system = qemu_system
+ def check_render_nodes(self):
+ render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one suitable for mesa llvmpipe software renderer."""
+ try:
+ content = os.listdir("/dev/dri")
+ nodes = [i for i in content if i.startswith('renderD')]
+ if len(nodes) == 0:
+ raise RunQemuError("No render nodes found in /dev/dri/: %s. %s" %(content, render_hint))
+ for n in nodes:
+ try:
+ with open(os.path.join("/dev/dri", n), "w") as f:
+ f.close()
+ break
+ except IOError:
+ pass
+ else:
+ raise RunQemuError("None of the render nodes in /dev/dri/ are accessible: %s; you may need to add yourself to 'render' group or otherwise ensure you have read-write permissions on one of them." %(nodes))
+ except FileNotFoundError:
+ raise RunQemuError("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
+
+ def setup_guest_agent(self):
+ if self.guest_agent == True:
+ self.qemu_opt += ' -chardev socket,path=' + self.guest_agent_sockpath + ',server,nowait,id=qga0 '
+ self.qemu_opt += ' -device virtio-serial '
+ self.qemu_opt += ' -device virtserialport,chardev=qga0,name=org.qemu.guest_agent.0 '
+
+ def setup_qmp(self):
+ if self.qmp:
+ self.qemu_opt += " -qmp %s,server,nowait" % self.qmp
+
def setup_vga(self):
if self.nographic == True:
if self.sdl == True:
@@ -1369,13 +1435,13 @@ class BaseConfig(object):
# need our font setup and show-cusor below so we need to see what qemu --help says
# is supported so we can pass our correct config in.
if not self.nographic and not self.sdl and not self.gtk and not self.publicvnc and not self.egl_headless == True:
- output = subprocess.check_output([self.qemu_bin, "--help"], universal_newlines=True)
+ output = subprocess.check_output([self.qemu_bin, "--help"], universal_newlines=True, env=self.qemu_environ)
if "-display gtk" in output:
self.gtk = True
elif "-display sdl" in output:
self.sdl = True
else:
- self.qemu_opt += '-display none'
+ self.qemu_opt += ' -display none'
if self.sdl == True or self.gtk == True or self.egl_headless == True:
@@ -1387,13 +1453,14 @@ class BaseConfig(object):
self.qemu_opt += ' -display '
if self.egl_headless == True:
+ self.check_render_nodes()
self.set_dri_path()
self.qemu_opt += 'egl-headless,'
else:
if self.sdl == True:
self.qemu_opt += 'sdl,'
elif self.gtk == True:
- os.environ['FONTCONFIG_PATH'] = '/etc/fonts'
+ self.qemu_environ['FONTCONFIG_PATH'] = '/etc/fonts'
self.qemu_opt += 'gtk,'
if self.gl == True:
@@ -1412,6 +1479,19 @@ class BaseConfig(object):
for entry in self.get('SERIAL_CONSOLES').split(' '):
self.kernel_cmdline_script += ' console=%s' %entry.split(';')[1]
+ # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES).
+ # If no serial or serialtcp options were specified, only ttyS0 is created
+ # and sysvinit shows an error trying to enable ttyS1:
+ # INIT: Id "S1" respawning too fast: disabled for 5 minutes
+ serial_num = len(re.findall("-serial", self.qemu_opt))
+
+ # Assume if the user passed serial options, they know what they want
+ # and pad to two devices
+ if serial_num == 1:
+ self.qemu_opt += " -serial null"
+ elif serial_num >= 2:
+ return
+
if self.serialstdio == True or self.nographic == True:
self.qemu_opt += " -serial mon:stdio"
else:
@@ -1423,10 +1503,6 @@ class BaseConfig(object):
self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT")
- # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES).
- # If no serial or serialtcp options were specified, only ttyS0 is created
- # and sysvinit shows an error trying to enable ttyS1:
- # INIT: Id "S1" respawning too fast: disabled for 5 minutes
serial_num = len(re.findall("-serial", self.qemu_opt))
if serial_num < 2:
self.qemu_opt += " -serial null"
@@ -1480,13 +1556,20 @@ class BaseConfig(object):
if self.snapshot:
self.qemu_opt += " -snapshot"
+ self.setup_guest_agent()
+ self.setup_qmp()
self.setup_serial()
self.setup_vga()
def start_qemu(self):
import shlex
if self.kernel:
- kernel_opts = "-kernel %s -append '%s %s %s %s'" % (self.kernel, self.kernel_cmdline,
+ kernel_opts = "-kernel %s" % (self.kernel)
+ if self.get('QB_KERNEL_CMDLINE') == "none":
+ if self.bootparams:
+ kernel_opts += " -append '%s'" % (self.bootparams)
+ else:
+ kernel_opts += " -append '%s %s %s %s'" % (self.kernel_cmdline,
self.kernel_cmdline_script, self.get('QB_KERNEL_CMDLINE_APPEND'),
self.bootparams)
if self.dtb:
@@ -1500,14 +1583,17 @@ class BaseConfig(object):
cmd = "%s %s" % (self.qemu_opt, kernel_opts)
cmds = shlex.split(cmd)
logger.info('Running %s\n' % cmd)
+ with open('/proc/uptime', 'r') as f:
+ uptime_seconds = f.readline().split()[0]
+ logger.info('Host uptime: %s\n' % uptime_seconds)
pass_fds = []
if self.taplock_descriptor:
pass_fds = [self.taplock_descriptor.fileno()]
if len(self.portlocks):
for descriptor in self.portlocks.values():
pass_fds.append(descriptor.fileno())
- process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds)
- self.qemupid = process.pid
+ process = subprocess.Popen(cmds, stderr=subprocess.PIPE, pass_fds=pass_fds, env=self.qemu_environ)
+ self.qemuprocess = process
retcode = process.wait()
if retcode:
if retcode == -signal.SIGTERM:
@@ -1515,6 +1601,13 @@ class BaseConfig(object):
else:
logger.error("Failed to run qemu: %s", process.stderr.read().decode())
+ def cleanup_cmd(self):
+ cmd = self.get('QB_CLEANUP_CMD')
+ if cmd != '':
+ logger.info('Running cleanup command %s' % str(cmd))
+ if subprocess.call(cmd, shell=True) != 0:
+ raise RunQemuError('Failed to run %s' % str(cmd))
+
def cleanup(self):
if self.cleaned:
return
@@ -1523,18 +1616,30 @@ class BaseConfig(object):
signal.signal(signal.SIGTERM, signal.SIG_IGN)
logger.info("Cleaning up")
+
+ if self.qemuprocess:
+ try:
+ # give it some time to shut down, ignore return values and output
+ self.qemuprocess.send_signal(signal.SIGTERM)
+ self.qemuprocess.communicate(timeout=5)
+ except subprocess.TimeoutExpired:
+ self.qemuprocess.kill()
+
+ with open('/proc/uptime', 'r') as f:
+ uptime_seconds = f.readline().split()[0]
+ logger.info('Host uptime: %s\n' % uptime_seconds)
if self.cleantap:
- cmd = ('sudo', self.qemuifdown, self.tap, self.bindir_native)
+ cmd = ('sudo', self.qemuifdown, self.tap)
logger.debug('Running %s' % str(cmd))
subprocess.check_call(cmd)
self.release_taplock()
- self.release_portlock()
if self.nfs_running:
logger.info("Shutting down the userspace NFS server...")
cmd = ("runqemu-export-rootfs", "stop", self.rootfs)
logger.debug('Running %s' % str(cmd))
subprocess.check_call(cmd)
+ self.release_portlock()
if self.saved_stty:
subprocess.check_call(("stty", self.saved_stty))
@@ -1547,9 +1652,12 @@ class BaseConfig(object):
else:
shutil.rmtree(ent)
+ # Deliberately ignore the return code of 'tput smam'.
+ subprocess.call(["tput", "smam"])
+
self.cleaned = True
- def run_bitbake_env(self, mach=None):
+ def run_bitbake_env(self, mach=None, target=''):
bitbake = shutil.which('bitbake')
if not bitbake:
return
@@ -1562,22 +1670,33 @@ class BaseConfig(object):
multiconfig = "mc:%s" % multiconfig
if mach:
- cmd = 'MACHINE=%s bitbake -e %s' % (mach, multiconfig)
+ cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
else:
- cmd = 'bitbake -e %s' % multiconfig
+ cmd = 'bitbake -e %s %s' % (multiconfig, target)
logger.info('Running %s...' % cmd)
- return subprocess.check_output(cmd, shell=True).decode('utf-8')
+ try:
+ return subprocess.check_output(cmd, shell=True).decode('utf-8')
+ except subprocess.CalledProcessError as err:
+ logger.warning("Couldn't run '%s' to gather environment information, maybe the target wasn't an image name, will retry with virtual/kernel as a target:\n%s" % (cmd, err.output.decode('utf-8')))
+ # need something with IMAGE_NAME_SUFFIX/IMAGE_LINK_NAME defined (kernel also inherits image-artifact-names.bbclass)
+ target = 'virtual/kernel'
+ if mach:
+ cmd = 'MACHINE=%s bitbake -e %s %s' % (mach, multiconfig, target)
+ else:
+ cmd = 'bitbake -e %s %s' % (multiconfig, target)
+ try:
+ return subprocess.check_output(cmd, shell=True).decode('utf-8')
+ except subprocess.CalledProcessError as err:
+ logger.warning("Couldn't run '%s' to gather environment information, giving up with 'bitbake -e':\n%s" % (cmd, err.output.decode('utf-8')))
+ return ''
+
- def load_bitbake_env(self, mach=None):
+ def load_bitbake_env(self, mach=None, target=None):
if self.bitbake_e:
return
- try:
- self.bitbake_e = self.run_bitbake_env(mach=mach)
- except subprocess.CalledProcessError as err:
- self.bitbake_e = ''
- logger.warning("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
+ self.bitbake_e = self.run_bitbake_env(mach=mach, target=target)
def validate_combos(self):
if (self.fstype in self.vmtypes) and self.kernel:
@@ -1607,7 +1726,7 @@ class BaseConfig(object):
return result
raise RunQemuError("Native sysroot directory %s doesn't exist" % result)
else:
- raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd)
+ raise RunQemuError("Can't find STAGING_BINDIR_NATIVE in '%s' output" % str(cmd))
def main():
@@ -1623,12 +1742,8 @@ def main():
subprocess.check_call([renice, str(os.getpid())])
def sigterm_handler(signum, frame):
- logger.info("SIGTERM received")
- if config.qemupid:
- os.kill(config.qemupid, signal.SIGTERM)
+ logger.info("Received signal: %s" % (signum))
config.cleanup()
- # Deliberately ignore the return code of 'tput smam'.
- subprocess.call(["tput", "smam"])
signal.signal(signal.SIGTERM, sigterm_handler)
config.check_args()
@@ -1640,6 +1755,7 @@ def main():
config.setup_network()
config.setup_rootfs()
config.setup_final()
+ config.setup_cmd()
config.start_qemu()
except RunQemuError as err:
logger.error(err)
@@ -1649,9 +1765,8 @@ def main():
traceback.print_exc()
return 1
finally:
+ config.cleanup_cmd()
config.cleanup()
- # Deliberately ignore the return code of 'tput smam'.
- subprocess.call(["tput", "smam"])
if __name__ == "__main__":
sys.exit(main())
diff --git a/scripts/runqemu-export-rootfs b/scripts/runqemu-export-rootfs
index 384c091713..6a8acd0d5a 100755
--- a/scripts/runqemu-export-rootfs
+++ b/scripts/runqemu-export-rootfs
@@ -34,16 +34,12 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
echo "Did you forget to source your build environment setup script?"
exit 1
fi
-. $SYSROOT_SETUP_SCRIPT meta-ide-support
+. $SYSROOT_SETUP_SCRIPT qemu-helper-native
if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then
echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/"
- if [ "x$OECORE_DISTRO_VERSION" = "x" ]; then
- echo "Have you run 'bitbake meta-ide-support'?"
- else
- echo "This shouldn't happen - something is missing from your toolchain installation"
- fi
+ echo "This shouldn't happen - something is missing from your toolchain installation"
exit 1
fi
@@ -74,26 +70,11 @@ MOUNTD_PORT=${MOUNTD_PORT:=$[ 3048 + 2 * $NFS_INSTANCE ]}
## For debugging you would additionally add
## --debug all
-UNFSD_OPTS="-p -N -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT"
+UNFSD_OPTS="-p -i $NFSPID -e $EXPORTS -n $NFSD_PORT -m $MOUNTD_PORT"
# See how we were called.
case "$1" in
start)
- PORTMAP_RUNNING=`ps -ef | grep portmap | grep -v grep`
- RPCBIND_RUNNING=`ps -ef | grep rpcbind | grep -v grep`
- if [[ "x$PORTMAP_RUNNING" = "x" && "x$RPCBIND_RUNNING" = "x" ]]; then
- echo "======================================================="
- echo "Error: neither rpcbind nor portmap appear to be running"
- echo "Please install and start one of these services first"
- echo "======================================================="
- echo "Tip: for recent Ubuntu hosts, run:"
- echo " sudo apt-get install rpcbind"
- echo "Then add OPTIONS=\"-i -w\" to /etc/default/rpcbind and run"
- echo " sudo service portmap restart"
-
- exit 1
- fi
-
echo "Creating exports file..."
echo "$NFS_EXPORT_DIR (rw,no_root_squash,no_all_squash,insecure)" > $EXPORTS
diff --git a/scripts/runqemu-extract-sdk b/scripts/runqemu-extract-sdk
index 9bc0c07fb8..db05da25f2 100755
--- a/scripts/runqemu-extract-sdk
+++ b/scripts/runqemu-extract-sdk
@@ -25,7 +25,7 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then
echo "Did you forget to source your build system environment setup script?"
exit 1
fi
-. $SYSROOT_SETUP_SCRIPT meta-ide-support
+. $SYSROOT_SETUP_SCRIPT qemu-helper-native
PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr"
ROOTFS_TARBALL=$1
diff --git a/scripts/runqemu-gen-tapdevs b/scripts/runqemu-gen-tapdevs
index a6ee4517da..a00c79c442 100755
--- a/scripts/runqemu-gen-tapdevs
+++ b/scripts/runqemu-gen-tapdevs
@@ -1,53 +1,58 @@
#!/bin/bash
#
# Create a "bank" of tap network devices that can be used by the
-# runqemu script. This script needs to be run as root, and will
-# use the tunctl binary from the build system sysroot. Note: many Linux
-# distros these days still use an older version of tunctl which does not
-# support the group permissions option, hence the need to use the build
-# system provided version.
+# runqemu script. This script needs to be run as root
#
# Copyright (C) 2010 Intel Corp.
#
# SPDX-License-Identifier: GPL-2.0-only
#
-uid=`id -u`
gid=`id -g`
-if [ -n "$SUDO_UID" ]; then
- uid=$SUDO_UID
-fi
if [ -n "$SUDO_GID" ]; then
gid=$SUDO_GID
fi
usage() {
- echo "Usage: sudo $0 <uid> <gid> <num> <staging_bindir_native>"
- echo "Where <uid> is the numeric user id the tap devices will be owned by"
+ echo "Usage: sudo $0 <gid> <num>"
echo "Where <gid> is the numeric group id the tap devices will be owned by"
echo "<num> is the number of tap devices to create (0 to remove all)"
- echo "<native-sysroot-basedir> is the path to the build system's native sysroot"
echo "For example:"
echo "$ bitbake qemu-helper-native"
- echo "$ sudo $0 $uid $gid 4 tmp/sysroots-components/x86_64/qemu-helper-native/usr/bin"
+ echo "$ sudo $0 $gid 4"
echo ""
exit 1
}
-if [ $# -ne 4 ]; then
+# Allow passing 4 arguments for backward compatibility with warning
+if [ $# -gt 4 ]; then
+ echo "Error: Incorrect number of arguments"
+ usage
+fi
+if [ $# -gt 3 ]; then
+ echo "Warning: Ignoring the <native-sysroot-basedir> parameter. It is no longer needed."
+fi
+if [ $# -gt 2 ]; then
+ echo "Warning: Ignoring the <uid> parameter. It is no longer needed."
+ GID=$2
+ COUNT=$3
+elif [ $# -eq 2 ]; then
+ GID=$1
+ COUNT=$2
+else
echo "Error: Incorrect number of arguments"
usage
fi
-TUID=$1
-GID=$2
-COUNT=$3
-STAGING_BINDIR_NATIVE=$4
-TUNCTL=$STAGING_BINDIR_NATIVE/tunctl
-if [[ ! -x "$TUNCTL" || -d "$TUNCTL" ]]; then
- echo "Error: $TUNCTL is not an executable"
- usage
+if [ -z "$OE_TAP_NAME" ]; then
+ OE_TAP_NAME=tap
+fi
+
+# check if COUNT is a number and >= 0
+if ! [ $COUNT -ge 0 ]; then
+ echo "Error: Incorrect count: $COUNT"
+ exit 1
fi
if [ $EUID -ne 0 ]; then
@@ -62,48 +67,41 @@ if [ ! -x "$RUNQEMU_IFUP" ]; then
exit 1
fi
-IFCONFIG=`which ip 2> /dev/null`
-if [ -z "$IFCONFIG" ]; then
- # Is it ever anywhere else?
- IFCONFIG=/sbin/ip
-fi
-if [ ! -x "$IFCONFIG" ]; then
- echo "$IFCONFIG cannot be executed"
- exit 1
-fi
-
-if [ $COUNT -ge 0 ]; then
- # Ensure we start with a clean slate
- for tap in `$IFCONFIG link | grep tap | awk '{ print \$2 }' | sed s/://`; do
- echo "Note: Destroying pre-existing tap interface $tap..."
- $TUNCTL -d $tap
- done
- rm -f /etc/runqemu-nosudo
+if interfaces=`ip tuntap list` 2>/dev/null; then
+ interfaces=`echo "$interfaces" |cut -f1 -d: |grep -E "^$OE_TAP_NAME.*"`
else
- echo "Error: Incorrect count: $COUNT"
+ echo "Failed to call 'ip tuntap list'" >&2
exit 1
fi
-if [ $COUNT -gt 0 ]; then
- echo "Creating $COUNT tap devices for UID: $TUID GID: $GID..."
- for ((index=0; index < $COUNT; index++)); do
- echo "Creating tap$index"
- ifup=`$RUNQEMU_IFUP $TUID $GID $STAGING_BINDIR_NATIVE 2>&1`
- if [ $? -ne 0 ]; then
- echo "Error running tunctl: $ifup"
- exit 1
- fi
- done
+# Ensure we start with a clean slate
+for tap in $interfaces; do
+ echo "Note: Destroying pre-existing tap interface $tap..."
+ ip tuntap del $tap mode tap
+done
+rm -f /etc/runqemu-nosudo
- echo "Note: For systems running NetworkManager, it's recommended"
- echo "Note: that the tap devices be set as unmanaged in the"
- echo "Note: NetworkManager.conf file. Add the following lines to"
- echo "Note: /etc/NetworkManager/NetworkManager.conf"
- echo "[keyfile]"
- echo "unmanaged-devices=interface-name:tap*"
-
- # The runqemu script will check for this file, and if it exists,
- # will use the existing bank of tap devices without creating
- # additional ones via sudo.
- touch /etc/runqemu-nosudo
+if [ $COUNT -eq 0 ]; then
+ exit 0
fi
+
+echo "Creating $COUNT tap devices for GID: $GID..."
+for ((index=0; index < $COUNT; index++)); do
+ echo "Creating $OE_TAP_NAME$index"
+ if ! ifup=`$RUNQEMU_IFUP $GID 2>&1`; then
+ echo "Error bringing up interface: $ifup"
+ exit 1
+ fi
+done
+
+echo "Note: For systems running NetworkManager, it's recommended"
+echo "Note: that the tap devices be set as unmanaged in the"
+echo "Note: NetworkManager.conf file. Add the following lines to"
+echo "Note: /etc/NetworkManager/NetworkManager.conf"
+echo "[keyfile]"
+echo "unmanaged-devices=interface-name:$OE_TAP_NAME*"
+
+# The runqemu script will check for this file, and if it exists,
+# will use the existing bank of tap devices without creating
+# additional ones via sudo.
+touch /etc/runqemu-nosudo
diff --git a/scripts/runqemu-ifdown b/scripts/runqemu-ifdown
index e0eb5344c6..822a2a39b9 100755
--- a/scripts/runqemu-ifdown
+++ b/scripts/runqemu-ifdown
@@ -1,8 +1,7 @@
#!/bin/bash
#
# QEMU network configuration script to bring down tap devices. This
-# utility needs to be run as root, and will use the tunctl binary
-# from the native sysroot.
+# utility needs to be run as root, and will use the ip utility
#
# If you find yourself calling this script a lot, you can add the
# the following to your /etc/sudoers file to be able to run this
@@ -17,7 +16,7 @@
#
usage() {
- echo "sudo $(basename $0) <tap-dev> <native-sysroot-basedir>"
+ echo "sudo $(basename $0) <tap-dev>"
}
if [ $EUID -ne 0 ]; then
@@ -25,30 +24,31 @@ if [ $EUID -ne 0 ]; then
exit 1
fi
-if [ $# -ne 2 ]; then
+if [ $# -gt 2 ] || [ $# -lt 1 ]; then
usage
exit 1
fi
+# backward compatibility
+if [ $# -eq 2 ] ; then
+ echo "Warning: native-sysroot-basedir parameter is ignored. It is no longer needed." >&2
+fi
+
TAP=$1
-STAGING_BINDIR_NATIVE=$2
-TUNCTL=$STAGING_BINDIR_NATIVE/tunctl
-if [ ! -e "$TUNCTL" ]; then
- echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
+if ! ip tuntap del $TAP mode tap 2>/dev/null; then
+ echo "Error: Unable to run up tuntap del"
exit 1
fi
-$TUNCTL -d $TAP
-
-IFCONFIG=`which ip 2> /dev/null`
-if [ "x$IFCONFIG" = "x" ]; then
+IPTOOL=`which ip 2> /dev/null`
+if [ "x$IPTOOL" = "x" ]; then
# better than nothing...
- IFCONFIG=/sbin/ip
+ IPTOOL=/sbin/ip
fi
-if [ -x "$IFCONFIG" ]; then
- if `$IFCONFIG link show $TAP > /dev/null 2>&1`; then
- $IFCONFIG link del $TAP
+if [ -x "$IPTOOL" ]; then
+ if `$IPTOOL link show $TAP > /dev/null 2>&1`; then
+ $IPTOOL link del $TAP
fi
fi
# cleanup the remaining iptables rules
@@ -60,8 +60,13 @@ if [ ! -x "$IPTABLES" ]; then
echo "$IPTABLES cannot be executed"
exit 1
fi
-n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ]
-dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ]
+
+if [ -z "$OE_TAP_NAME" ]; then
+ OE_TAP_NAME=tap
+fi
+
+n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
+dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32
$IPTABLES -D POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$dest/32
true
diff --git a/scripts/runqemu-ifup b/scripts/runqemu-ifup
index bb661740c5..05c9325b6b 100755
--- a/scripts/runqemu-ifup
+++ b/scripts/runqemu-ifup
@@ -1,10 +1,7 @@
#!/bin/bash
#
# QEMU network interface configuration script. This utility needs to
-# be run as root, and will use the tunctl binary from a native sysroot.
-# Note: many Linux distros these days still use an older version of
-# tunctl which does not support the group permissions option, hence
-# the need to use build system's version.
+# be run as root, and will use the ip utility
#
# If you find yourself calling this script a lot, you can add the
# the following to your /etc/sudoers file to be able to run this
@@ -24,7 +21,7 @@
#
usage() {
- echo "sudo $(basename $0) <uid> <gid> <native-sysroot-basedir>"
+ echo "sudo $(basename $0) <gid>"
}
if [ $EUID -ne 0 ]; then
@@ -32,41 +29,43 @@ if [ $EUID -ne 0 ]; then
exit 1
fi
-if [ $# -ne 3 ]; then
+if [ $# -eq 2 ]; then
+ echo "Warning: uid parameter is ignored. It is no longer needed." >&2
+ GROUP="$2"
+elif [ $# -eq 1 ]; then
+ GROUP="$1"
+else
usage
exit 1
fi
-USERID="-u $1"
-GROUP="-g $2"
-STAGING_BINDIR_NATIVE=$3
-TUNCTL=$STAGING_BINDIR_NATIVE/tunctl
-if [ ! -x "$TUNCTL" ]; then
- echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native"
- exit 1
+if [ -z "$OE_TAP_NAME" ]; then
+ OE_TAP_NAME=tap
fi
-TAP=`$TUNCTL -b $GROUP 2>&1`
-STATUS=$?
-if [ $STATUS -ne 0 ]; then
-# If tunctl -g fails, try using tunctl -u, for older host kernels
-# which do not support the TUNSETGROUP ioctl
- TAP=`$TUNCTL -b $USERID 2>&1`
- STATUS=$?
- if [ $STATUS -ne 0 ]; then
- echo "tunctl failed:"
- exit 1
+if taps=$(ip tuntap list 2>/dev/null); then
+ tap_no_last=$(echo "$taps" |cut -f 1 -d ":" |grep -E "^$OE_TAP_NAME.*" |sed "s/$OE_TAP_NAME//g" | sort -rn | head -n 1)
+ if [ -z "$tap_no_last" ]; then
+ tap_no=0
+ else
+ tap_no=$(("$tap_no_last" + 1))
fi
+ ip tuntap add "$OE_TAP_NAME$tap_no" mode tap group "$GROUP" && TAP=$OE_TAP_NAME$tap_no
+fi
+
+if [ -z "$TAP" ]; then
+ echo "Error: Unable to find a tap device to use"
+ exit 1
fi
-IFCONFIG=`which ip 2> /dev/null`
-if [ "x$IFCONFIG" = "x" ]; then
+IPTOOL=`which ip 2> /dev/null`
+if [ "x$IPTOOL" = "x" ]; then
# better than nothing...
- IFCONFIG=/sbin/ip
+ IPTOOL=/sbin/ip
fi
-if [ ! -x "$IFCONFIG" ]; then
- echo "$IFCONFIG cannot be executed"
+if [ ! -x "$IPTOOL" ]; then
+ echo "$IPTOOL cannot be executed"
exit 1
fi
@@ -79,22 +78,22 @@ if [ ! -x "$IPTABLES" ]; then
exit 1
fi
-n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ]
-$IFCONFIG addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP
+n=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 1 ]
+$IPTOOL addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP
STATUS=$?
if [ $STATUS -ne 0 ]; then
echo "Failed to set up IP addressing on $TAP"
exit 1
fi
-$IFCONFIG link set dev $TAP up
+$IPTOOL link set dev $TAP up
STATUS=$?
if [ $STATUS -ne 0 ]; then
echo "Failed to bring up $TAP"
exit 1
fi
-dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ]
-$IFCONFIG route add to 192.168.7.$dest dev $TAP
+dest=$[ (`echo $TAP | sed "s/$OE_TAP_NAME//"` * 2) + 2 ]
+$IPTOOL route add to 192.168.7.$dest dev $TAP
STATUS=$?
if [ $STATUS -ne 0 ]; then
echo "Failed to add route to 192.168.7.$dest using $TAP"
diff --git a/scripts/runqemu.README b/scripts/runqemu.README
index da9abd7dfb..e5f4b4634c 100644
--- a/scripts/runqemu.README
+++ b/scripts/runqemu.README
@@ -1,12 +1,12 @@
Using OE images with QEMU
=========================
-OE-Core can generate qemu bootable kernels and images with can be used
+OE-Core can generate qemu bootable kernels and images which can be used
on a desktop system. The scripts currently support booting ARM, MIPS, PowerPC
-and x86 (32 and 64 bit) images. The scripts can be used within the OE build
-system or externaly.
+and x86 (32 and 64 bit) images. The scripts can be used within the OE build
+system or externally.
-The runqemu script is run as:
+The runqemu script is run as:
runqemu <machine> <zimage> <filesystem>
@@ -15,13 +15,13 @@ where:
<machine> is the machine/architecture to use (qemuarm/qemumips/qemuppc/qemux86/qemux86-64)
<zimage> is the path to a kernel (e.g. zimage-qemuarm.bin)
<filesystem> is the path to an ext2 image (e.g. filesystem-qemuarm.ext2) or an nfs directory
-
-If <machine> isn't specified, the script will try to detect the machine name
+
+If <machine> isn't specified, the script will try to detect the machine name
from the name of the <zimage> file.
If <filesystem> isn't specified, nfs booting will be assumed.
-When used within the build system, it will default to qemuarm, ext2 and the last kernel and
+When used within the build system, it will default to qemuarm, ext2 and the last kernel and
core-image-sato-sdk image built by the build system. If an sdk image isn't present it will look
for sato and minimal images.
@@ -31,7 +31,7 @@ Full usage instructions can be seen by running the command with no options speci
Notes
=====
- - The scripts run qemu using sudo. Change perms on /dev/net/tun to
+ - The scripts run qemu using sudo. Change perms on /dev/net/tun to
run as non root. The runqemu-gen-tapdevs script can also be used by
root to prepopulate the appropriate network devices.
- You can access the host computer at 192.168.7.1 within the image.
diff --git a/scripts/sstate-cache-management.py b/scripts/sstate-cache-management.py
new file mode 100755
index 0000000000..d3f600bd28
--- /dev/null
+++ b/scripts/sstate-cache-management.py
@@ -0,0 +1,329 @@
+#!/usr/bin/env python3
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import argparse
+import os
+import re
+import sys
+
+from collections import defaultdict
+from concurrent.futures import ThreadPoolExecutor
+from dataclasses import dataclass
+from pathlib import Path
+
+if sys.version_info < (3, 8, 0):
+ raise RuntimeError("Sorry, python 3.8.0 or later is required for this script.")
+
+SSTATE_PREFIX = "sstate:"
+SSTATE_EXTENSION = ".tar.zst"
+# SSTATE_EXTENSION = ".tgz"
+# .siginfo.done files are mentioned in the original script?
+SSTATE_SUFFIXES = (
+ SSTATE_EXTENSION,
+ f"{SSTATE_EXTENSION}.siginfo",
+ f"{SSTATE_EXTENSION}.done",
+)
+
+RE_SSTATE_PKGSPEC = re.compile(
+ rf"""sstate:(?P<pn>[^:]*):
+ (?P<package_target>[^:]*):
+ (?P<pv>[^:]*):
+ (?P<pr>[^:]*):
+ (?P<sstate_pkgarch>[^:]*):
+ (?P<sstate_version>[^_]*):
+ (?P<bb_unihash>[^_]*)_
+ (?P<bb_task>[^:]*)
+ (?P<ext>({"|".join([re.escape(s) for s in SSTATE_SUFFIXES])}))$""",
+ re.X,
+)
+
+
+# Really we'd like something like a Path subclass which implements a stat
+# cache here, unfortunately there's no good way to do that transparently
+# (yet); see:
+#
+# https://github.com/python/cpython/issues/70219
+# https://discuss.python.org/t/make-pathlib-extensible/3428/77
+@dataclass
+class SstateEntry:
+ """Class for keeping track of an entry in sstate-cache."""
+
+ path: Path
+ match: re.Match
+ stat_result: os.stat_result = None
+
+ def __hash__(self):
+ return self.path.__hash__()
+
+ def __getattr__(self, name):
+ return self.match.group(name)
+
+
+# this is what's in the original script; as far as I can tell, it's an
+# implementation artefact which we don't need?
+def find_archs():
+ # all_archs
+ builder_arch = os.uname().machine
+
+ # FIXME
+ layer_paths = [Path("../..")]
+
+ tune_archs = set()
+ re_tune = re.compile(r'AVAILTUNES .*=.*"(.*)"')
+ for path in layer_paths:
+ for tunefile in [
+ p for p in path.glob("meta*/conf/machine/include/**/*") if p.is_file()
+ ]:
+ with open(tunefile) as f:
+ for line in f:
+ m = re_tune.match(line)
+ if m:
+ tune_archs.update(m.group(1).split())
+
+ # all_machines
+ machine_archs = set()
+ for path in layer_paths:
+ for machine_file in path.glob("meta*/conf/machine/*.conf"):
+ machine_archs.add(machine_file.parts[-1][:-5])
+
+ extra_archs = set()
+ all_archs = (
+ set(
+ arch.replace("-", "_")
+ for arch in machine_archs | tune_archs | set(["allarch", builder_arch])
+ )
+ | extra_archs
+ )
+
+ print(all_archs)
+
+
+# again, not needed?
+def find_tasks():
+ print(set([p.bb_task for p in paths]))
+
+
+def collect_sstate_paths(args):
+ def scandir(path, paths):
+ # Assume everything is a directory; by not checking we avoid needing an
+ # additional stat which is potentially a synchronous roundtrip over NFS
+ try:
+ for p in path.iterdir():
+ filename = p.parts[-1]
+ if filename.startswith(SSTATE_PREFIX):
+ if filename.endswith(SSTATE_SUFFIXES):
+ m = RE_SSTATE_PKGSPEC.match(p.parts[-1])
+ assert m
+ paths.add(SstateEntry(p, m))
+ # ignore other things (includes things like lockfiles)
+ else:
+ scandir(p, paths)
+
+ except NotADirectoryError:
+ pass
+
+ paths = set()
+ # TODO: parellise scandir
+ scandir(Path(args.cache_dir), paths)
+
+ def path_stat(p):
+ p.stat_result = p.path.lstat()
+
+ if args.remove_duplicated:
+ # This is probably slightly performance negative on a local filesystem
+ # when we interact with the GIL; over NFS it's a massive win.
+ with ThreadPoolExecutor(max_workers=args.jobs) as executor:
+ executor.map(path_stat, paths)
+
+ return paths
+
+
+def remove_by_stamps(args, paths):
+ all_sums = set()
+ for stamps_dir in args.stamps_dir:
+ stamps_path = Path(stamps_dir)
+ assert stamps_path.is_dir()
+ re_sigdata = re.compile(r"do_.*\.sigdata\.([^.]*)")
+ all_sums |= set(
+ [
+ re_sigdata.search(x.parts[-1]).group(1)
+ for x in stamps_path.glob("*/*/*.do_*.sigdata.*")
+ ]
+ )
+ re_setscene = re.compile(r"do_.*_setscene\.([^.]*)")
+ all_sums |= set(
+ [
+ re_setscene.search(x.parts[-1]).group(1)
+ for x in stamps_path.glob("*/*/*.do_*_setscene.*")
+ ]
+ )
+ return [p for p in paths if p.bb_unihash not in all_sums]
+
+
+def remove_duplicated(args, paths):
+ # Skip populate_lic as it produces duplicates in a normal build
+ #
+ # 9ae16469e707 sstate-cache-management: skip populate_lic archives when removing duplicates
+ valid_paths = [p for p in paths if p.bb_task != "populate_lic"]
+
+ keep = dict()
+ remove = list()
+ for p in valid_paths:
+ sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task, p.ext])
+ if sstate_sig not in keep:
+ keep[sstate_sig] = p
+ elif p.stat_result.st_mtime > keep[sstate_sig].stat_result.st_mtime:
+ remove.append(keep[sstate_sig])
+ keep[sstate_sig] = p
+ else:
+ remove.append(p)
+
+ return remove
+
+
+def remove_orphans(args, paths):
+ remove = list()
+ pathsigs = defaultdict(list)
+ for p in paths:
+ sstate_sig = ":".join([p.pn, p.sstate_pkgarch, p.bb_task])
+ pathsigs[sstate_sig].append(p)
+ for k, v in pathsigs.items():
+ if len([p for p in v if p.ext == SSTATE_EXTENSION]) == 0:
+ remove.extend(v)
+ return remove
+
+
+def parse_arguments():
+ parser = argparse.ArgumentParser(description="sstate cache management utility.")
+
+ parser.add_argument(
+ "--cache-dir",
+ default=os.environ.get("SSTATE_CACHE_DIR"),
+ help="""Specify sstate cache directory, will use the environment
+ variable SSTATE_CACHE_DIR if it is not specified.""",
+ )
+
+ # parser.add_argument(
+ # "--extra-archs",
+ # help="""Specify list of architectures which should be tested, this list
+ # will be extended with native arch, allarch and empty arch. The
+ # script won't be trying to generate list of available archs from
+ # AVAILTUNES in tune files.""",
+ # )
+
+ # parser.add_argument(
+ # "--extra-layer",
+ # help="""Specify the layer which will be used for searching the archs,
+ # it will search the meta and meta-* layers in the top dir by
+ # default, and will search meta, meta-*, <layer1>, <layer2>,
+ # ...<layern> when specified. Use "," as the separator.
+ #
+ # This is useless for --stamps-dir or when --extra-archs is used.""",
+ # )
+
+ parser.add_argument(
+ "-d",
+ "--remove-duplicated",
+ action="store_true",
+ help="""Remove the duplicated sstate cache files of one package, only
+ the newest one will be kept. The duplicated sstate cache files
+ of one package must have the same arch, which means sstate cache
+ files with multiple archs are not considered duplicate.
+
+ Conflicts with --stamps-dir.""",
+ )
+
+ parser.add_argument(
+ "--remove-orphans",
+ action="store_true",
+ help=f"""Remove orphan siginfo files from the sstate cache, i.e. those
+ where this is no {SSTATE_EXTENSION} file but there are associated
+ tracking files.""",
+ )
+
+ parser.add_argument(
+ "--stamps-dir",
+ action="append",
+ help="""Specify the build directory's stamps directories, the sstate
+ cache file which IS USED by these build diretories will be KEPT,
+ other sstate cache files in cache-dir will be removed. Can be
+ specified multiple times for several directories.
+
+ Conflicts with --remove-duplicated.""",
+ )
+
+ parser.add_argument(
+ "-j", "--jobs", default=8, type=int, help="Run JOBS jobs in parallel."
+ )
+
+ # parser.add_argument(
+ # "-L",
+ # "--follow-symlink",
+ # action="store_true",
+ # help="Remove both the symbol link and the destination file, default: no.",
+ # )
+
+ parser.add_argument(
+ "-y",
+ "--yes",
+ action="store_true",
+ help="""Automatic yes to prompts; assume "yes" as answer to all prompts
+ and run non-interactively.""",
+ )
+
+ parser.add_argument(
+ "-v", "--verbose", action="store_true", help="Explain what is being done."
+ )
+
+ parser.add_argument(
+ "-D",
+ "--debug",
+ action="count",
+ default=0,
+ help="Show debug info, repeat for more debug info.",
+ )
+
+ args = parser.parse_args()
+ if args.cache_dir is None or (
+ not args.remove_duplicated and not args.stamps_dir and not args.remove_orphans
+ ):
+ parser.print_usage()
+ sys.exit(1)
+
+ return args
+
+
+def main():
+ args = parse_arguments()
+
+ paths = collect_sstate_paths(args)
+ if args.remove_duplicated:
+ remove = remove_duplicated(args, paths)
+ elif args.stamps_dir:
+ remove = remove_by_stamps(args, paths)
+ else:
+ remove = list()
+
+ if args.remove_orphans:
+ remove = set(remove) | set(remove_orphans(args, paths))
+
+ if args.debug >= 1:
+ print("\n".join([str(p.path) for p in remove]))
+ print(f"{len(remove)} out of {len(paths)} files will be removed!")
+ if not args.yes:
+ print("Do you want to continue (y/n)?")
+ confirm = input() in ("y", "Y")
+ else:
+ confirm = True
+ if confirm:
+ # TODO: parallelise remove
+ for p in remove:
+ p.path.unlink()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/sstate-cache-management.sh b/scripts/sstate-cache-management.sh
deleted file mode 100755
index d39671f7c6..0000000000
--- a/scripts/sstate-cache-management.sh
+++ /dev/null
@@ -1,458 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-
-# Global vars
-cache_dir=
-confirm=
-fsym=
-total_deleted=0
-verbose=
-debug=0
-
-usage () {
- cat << EOF
-Welcome to sstate cache management utilities.
-sstate-cache-management.sh <OPTION>
-
-Options:
- -h, --help
- Display this help and exit.
-
- --cache-dir=<sstate cache dir>
- Specify sstate cache directory, will use the environment
- variable SSTATE_CACHE_DIR if it is not specified.
-
- --extra-archs=<arch1>,<arch2>...<archn>
- Specify list of architectures which should be tested, this list
- will be extended with native arch, allarch and empty arch. The
- script won't be trying to generate list of available archs from
- AVAILTUNES in tune files.
-
- --extra-layer=<layer1>,<layer2>...<layern>
- Specify the layer which will be used for searching the archs,
- it will search the meta and meta-* layers in the top dir by
- default, and will search meta, meta-*, <layer1>, <layer2>,
- ...<layern> when specified. Use "," as the separator.
-
- This is useless for --stamps-dir or when --extra-archs is used.
-
- -d, --remove-duplicated
- Remove the duplicated sstate cache files of one package, only
- the newest one will be kept. The duplicated sstate cache files
- of one package must have the same arch, which means sstate cache
- files with multiple archs are not considered duplicate.
-
- Conflicts with --stamps-dir.
-
- --stamps-dir=<dir1>,<dir2>...<dirn>
- Specify the build directory's stamps directories, the sstate
- cache file which IS USED by these build diretories will be KEPT,
- other sstate cache files in cache-dir will be removed. Use ","
- as the separator. For example:
- --stamps-dir=build1/tmp/stamps,build2/tmp/stamps
-
- Conflicts with --remove-duplicated.
-
- -L, --follow-symlink
- Remove both the symbol link and the destination file, default: no.
-
- -y, --yes
- Automatic yes to prompts; assume "yes" as answer to all prompts
- and run non-interactively.
-
- -v, --verbose
- Explain what is being done.
-
- -D, --debug
- Show debug info, repeat for more debug info.
-
-EOF
-}
-
-if [ $# -lt 1 ]; then
- usage
- exit 0
-fi
-
-# Echo no files to remove
-no_files () {
- echo No files to remove
-}
-
-# Echo nothing to do
-do_nothing () {
- echo Nothing to do
-}
-
-# Read the input "y"
-read_confirm () {
- echo "$total_deleted out of $total_files files will be removed! "
- if [ "$confirm" != "y" ]; then
- echo "Do you want to continue (y/n)? "
- while read confirm; do
- [ "$confirm" = "Y" -o "$confirm" = "y" -o "$confirm" = "n" \
- -o "$confirm" = "N" ] && break
- echo "Invalid input \"$confirm\", please input 'y' or 'n': "
- done
- else
- echo
- fi
-}
-
-# Print error information and exit.
-echo_error () {
- echo "ERROR: $1" >&2
- exit 1
-}
-
-# Generate the remove list:
-#
-# * Add .done/.siginfo to the remove list
-# * Add destination of symlink to the remove list
-#
-# $1: output file, others: sstate cache file (.tar.zst)
-gen_rmlist (){
- local rmlist_file="$1"
- shift
- local files="$@"
- for i in $files; do
- echo $i >> $rmlist_file
- # Add the ".siginfo"
- if [ -e $i.siginfo ]; then
- echo $i.siginfo >> $rmlist_file
- fi
- # Add the destination of symlink
- if [ -L "$i" ]; then
- if [ "$fsym" = "y" ]; then
- dest="`readlink -e $i`"
- if [ -n "$dest" ]; then
- echo $dest >> $rmlist_file
- # Remove the .siginfo when .tar.zst is removed
- if [ -f "$dest.siginfo" ]; then
- echo $dest.siginfo >> $rmlist_file
- fi
- fi
- fi
- # Add the ".tar.zst.done" and ".siginfo.done" (may exist in the future)
- base_fn="${i##/*/}"
- t_fn="$base_fn.done"
- s_fn="$base_fn.siginfo.done"
- for d in $t_fn $s_fn; do
- if [ -f $cache_dir/$d ]; then
- echo $cache_dir/$d >> $rmlist_file
- fi
- done
- fi
- done
-}
-
-# Remove the duplicated cache files for the pkg, keep the newest one
-remove_duplicated () {
-
- local topdir
- local oe_core_dir
- local tunedirs
- local all_archs
- local all_machines
- local ava_archs
- local arch
- local file_names
- local sstate_files_list
- local fn_tmp
- local list_suffix=`mktemp` || exit 1
-
- if [ -z "$extra_archs" ] ; then
- # Find out the archs in all the layers
- echo "Figuring out the archs in the layers ... "
- oe_core_dir=$(dirname $(dirname $(readlink -e $0)))
- topdir=$(dirname $oe_core_dir)
- tunedirs="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/include' 2>/dev/null`"
- [ -n "$tunedirs" ] || echo_error "Can't find the tune directory"
- all_machines="`find $topdir/meta* ${oe_core_dir}/meta* $layers -path '*/meta*/conf/machine/*' -name '*.conf' 2>/dev/null | sed -e 's/.*\///' -e 's/.conf$//'`"
- all_archs=`grep -r -h "^AVAILTUNES .*=" $tunedirs | sed -e 's/.*=//' -e 's/\"//g'`
- fi
-
- # Use the "_" to substitute "-", e.g., x86-64 to x86_64, but not for extra_archs which can be something like cortexa9t2-vfp-neon
- # Sort to remove the duplicated ones
- # Add allarch and builder arch (native)
- builder_arch=$(uname -m)
- all_archs="$(echo allarch $all_archs $all_machines $builder_arch \
- | sed -e 's/-/_/g' -e 's/ /\n/g' | sort -u) $extra_archs"
- echo "Done"
-
- # Total number of files including sstate-, .siginfo and .done files
- total_files=`find $cache_dir -name 'sstate*' | wc -l`
- # Save all the sstate files in a file
- sstate_files_list=`mktemp` || exit 1
- find $cache_dir -iname 'sstate:*:*:*:*:*:*:*.tar.zst*' >$sstate_files_list
-
- echo "Figuring out the suffixes in the sstate cache dir ... "
- sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tar\.zst.*%\1%g' $sstate_files_list | sort -u`"
- echo "Done"
- echo "The following suffixes have been found in the cache dir:"
- echo $sstate_suffixes
-
- echo "Figuring out the archs in the sstate cache dir ... "
- # Using this SSTATE_PKGSPEC definition it's 6th colon separated field
- # SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
- for arch in $all_archs; do
- grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.tar\.zst$" $sstate_files_list
- [ $? -eq 0 ] && ava_archs="$ava_archs $arch"
- # ${builder_arch}_$arch used by toolchain sstate
- grep -q ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:${builder_arch}_$arch:[^:]*:[^:]*\.tar\.zst$" $sstate_files_list
- [ $? -eq 0 ] && ava_archs="$ava_archs ${builder_arch}_$arch"
- done
- echo "Done"
- echo "The following archs have been found in the cache dir:"
- echo $ava_archs
- echo ""
-
- # Save the file list which needs to be removed
- local remove_listdir=`mktemp -d` || exit 1
- for suffix in $sstate_suffixes; do
- if [ "$suffix" = "populate_lic" ] ; then
- echo "Skipping populate_lic, because removing duplicates doesn't work correctly for them (use --stamps-dir instead)"
- continue
- fi
- # Total number of files including .siginfo and .done files
- total_files_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst.*" $sstate_files_list | wc -l 2>/dev/null`
- total_archive_suffix=`grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst$" $sstate_files_list | wc -l 2>/dev/null`
- # Save the file list to a file, some suffix's file may not exist
- grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:_]*_$suffix\.tar\.zst.*" $sstate_files_list >$list_suffix 2>/dev/null
- local deleted_archives=0
- local deleted_files=0
- for ext in tar.zst tar.zst.siginfo tar.zst.done; do
- echo "Figuring out the sstate:xxx_$suffix.$ext ... "
- # Uniq BPNs
- file_names=`for arch in $ava_archs ""; do
- sed -ne "s%.*/sstate:\([^:]*\):[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$%\1%p" $list_suffix
- done | sort -u`
-
- fn_tmp=`mktemp` || exit 1
- rm_list="$remove_listdir/sstate:xxx_$suffix"
- for fn in $file_names; do
- [ -z "$verbose" ] || echo "Analyzing sstate:$fn-xxx_$suffix.${ext}"
- for arch in $ava_archs ""; do
- grep -h ".*/sstate:$fn:[^:]*:[^:]*:[^:]*:$arch:[^:]*:[^:]*\.${ext}$" $list_suffix >$fn_tmp
- if [ -s $fn_tmp ] ; then
- [ $debug -gt 1 ] && echo "Available files for $fn-$arch- with suffix $suffix.${ext}:" && cat $fn_tmp
- # Use the modification time
- to_del=$(ls -t $(cat $fn_tmp) | sed -n '1!p')
- [ $debug -gt 2 ] && echo "Considering to delete: $to_del"
- # The sstate file which is downloaded from the SSTATE_MIRROR is
- # put in SSTATE_DIR, and there is a symlink in SSTATE_DIR/??/ to
- # it, so filter it out from the remove list if it should not be
- # removed.
- to_keep=$(ls -t $(cat $fn_tmp) | sed -n '1p')
- [ $debug -gt 2 ] && echo "Considering to keep: $to_keep"
- for k in $to_keep; do
- if [ -L "$k" ]; then
- # The symlink's destination
- k_dest="`readlink -e $k`"
- # Maybe it is the one in cache_dir
- k_maybe="$cache_dir/${k##/*/}"
- # Remove it from the remove list if they are the same.
- if [ "$k_dest" = "$k_maybe" ]; then
- to_del="`echo $to_del | sed 's#'\"$k_maybe\"'##g'`"
- fi
- fi
- done
- rm -f $fn_tmp
- [ $debug -gt 2 ] && echo "Decided to delete: $to_del"
- gen_rmlist $rm_list.$ext "$to_del"
- fi
- done
- done
- done
- deleted_archives=`cat $rm_list.* 2>/dev/null | grep "\.tar\.zst$" | wc -l`
- deleted_files=`cat $rm_list.* 2>/dev/null | wc -l`
- [ "$deleted_files" -gt 0 -a $debug -gt 0 ] && cat $rm_list.*
- echo "($deleted_archives out of $total_archives_suffix .tar.zst files for $suffix suffix will be removed or $deleted_files out of $total_files_suffix when counting also .siginfo and .done files)"
- let total_deleted=$total_deleted+$deleted_files
- done
- deleted_archives=0
- rm_old_list=$remove_listdir/sstate-old-filenames
- find $cache_dir -name 'sstate-*.tar.zst' >$rm_old_list
- [ -s "$rm_old_list" ] && deleted_archives=`cat $rm_old_list | grep "\.tar\.zst$" | wc -l`
- [ -s "$rm_old_list" ] && deleted_files=`cat $rm_old_list | wc -l`
- [ -s "$rm_old_list" -a $debug -gt 0 ] && cat $rm_old_list
- echo "($deleted_archives or .tar.zst files with old sstate-* filenames will be removed or $deleted_files when counting also .siginfo and .done files)"
- let total_deleted=$total_deleted+$deleted_files
-
- rm -f $list_suffix
- rm -f $sstate_files_list
- if [ $total_deleted -gt 0 ]; then
- read_confirm
- if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
- for list in `ls $remove_listdir/`; do
- echo "Removing $list.tar.zst archive (`cat $remove_listdir/$list | wc -w` files) ... "
- # Remove them one by one to avoid the argument list too long error
- for i in `cat $remove_listdir/$list`; do
- rm -f $verbose $i
- done
- echo "Done"
- done
- echo "$total_deleted files have been removed!"
- else
- do_nothing
- fi
- else
- no_files
- fi
- [ -d $remove_listdir ] && rm -fr $remove_listdir
-}
-
-# Remove the sstate file by stamps dir, the file not used by the stamps dir
-# will be removed.
-rm_by_stamps (){
-
- local cache_list=`mktemp` || exit 1
- local keep_list=`mktemp` || exit 1
- local rm_list=`mktemp` || exit 1
- local sums
- local all_sums
-
- # Total number of files including sstate-, .siginfo and .done files
- total_files=`find $cache_dir -type f -name 'sstate*' | wc -l`
- # Save all the state file list to a file
- find $cache_dir -type f -name 'sstate*' | sort -u -o $cache_list
-
- echo "Figuring out the suffixes in the sstate cache dir ... "
- local sstate_suffixes="`sed 's%.*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^_]*_\([^:]*\)\.tar\.zst.*%\1%g' $cache_list | sort -u`"
- echo "Done"
- echo "The following suffixes have been found in the cache dir:"
- echo $sstate_suffixes
-
- # Figure out all the md5sums in the stamps dir.
- echo "Figuring out all the md5sums in stamps dir ... "
- for i in $sstate_suffixes; do
- # There is no "\.sigdata" but "_setcene" when it is mirrored
- # from the SSTATE_MIRRORS, use them to figure out the sum.
- sums=`find $stamps -maxdepth 3 -name "*.do_$i.*" \
- -o -name "*.do_${i}_setscene.*" | \
- sed -ne 's#.*_setscene\.##p' -e 's#.*\.sigdata\.##p' | \
- sed -e 's#\..*##' | sort -u`
- all_sums="$all_sums $sums"
- done
- echo "Done"
-
- echo "Figuring out the files which will be removed ... "
- for i in $all_sums; do
- grep ".*/sstate:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:[^:]*:${i}_.*" $cache_list >>$keep_list
- done
- echo "Done"
-
- if [ -s $keep_list ]; then
- sort -u $keep_list -o $keep_list
- to_del=`comm -1 -3 $keep_list $cache_list`
- gen_rmlist $rm_list "$to_del"
- let total_deleted=`cat $rm_list | sort -u | wc -w`
- if [ $total_deleted -gt 0 ]; then
- [ $debug -gt 0 ] && cat $rm_list | sort -u
- read_confirm
- if [ "$confirm" = "y" -o "$confirm" = "Y" ]; then
- echo "Removing sstate cache files ... ($total_deleted files)"
- # Remove them one by one to avoid the argument list too long error
- for i in `cat $rm_list | sort -u`; do
- rm -f $verbose $i
- done
- echo "$total_deleted files have been removed"
- else
- do_nothing
- fi
- else
- no_files
- fi
- else
- echo_error "All files in cache dir will be removed! Abort!"
- fi
-
- rm -f $cache_list
- rm -f $keep_list
- rm -f $rm_list
-}
-
-# Parse arguments
-while [ -n "$1" ]; do
- case $1 in
- --cache-dir=*)
- cache_dir=`echo $1 | sed -e 's#^--cache-dir=##' | xargs readlink -e`
- [ -d "$cache_dir" ] || echo_error "Invalid argument to --cache-dir"
- shift
- ;;
- --remove-duplicated|-d)
- rm_duplicated="y"
- shift
- ;;
- --yes|-y)
- confirm="y"
- shift
- ;;
- --follow-symlink|-L)
- fsym="y"
- shift
- ;;
- --extra-archs=*)
- extra_archs=`echo $1 | sed -e 's#^--extra-archs=##' -e 's#,# #g'`
- [ -n "$extra_archs" ] || echo_error "Invalid extra arch parameter"
- shift
- ;;
- --extra-layer=*)
- extra_layers=`echo $1 | sed -e 's#^--extra-layer=##' -e 's#,# #g'`
- [ -n "$extra_layers" ] || echo_error "Invalid extra layer parameter"
- for i in $extra_layers; do
- l=`readlink -e $i`
- if [ -d "$l" ]; then
- layers="$layers $l"
- else
- echo_error "Can't find layer $i"
- fi
- done
- shift
- ;;
- --stamps-dir=*)
- stamps=`echo $1 | sed -e 's#^--stamps-dir=##' -e 's#,# #g'`
- [ -n "$stamps" ] || echo_error "Invalid stamps dir $i"
- for i in $stamps; do
- [ -d "$i" ] || echo_error "Invalid stamps dir $i"
- done
- shift
- ;;
- --verbose|-v)
- verbose="-v"
- shift
- ;;
- --debug|-D)
- debug=`expr $debug + 1`
- echo "Debug level $debug"
- shift
- ;;
- --help|-h)
- usage
- exit 0
- ;;
- *)
- echo "Invalid arguments $*"
- echo_error "Try 'sstate-cache-management.sh -h' for more information."
- ;;
- esac
-done
-
-# sstate cache directory, use environment variable SSTATE_CACHE_DIR
-# if it was not specified, otherwise, error.
-[ -n "$cache_dir" ] || cache_dir=$SSTATE_CACHE_DIR
-[ -n "$cache_dir" ] || echo_error "No cache dir found!"
-[ -d "$cache_dir" ] || echo_error "Invalid cache directory \"$cache_dir\""
-
-[ -n "$rm_duplicated" -a -n "$stamps" ] && \
- echo_error "Can not use both --remove-duplicated and --stamps-dir"
-
-[ "$rm_duplicated" = "y" ] && remove_duplicated
-[ -n "$stamps" ] && rm_by_stamps
-[ -z "$rm_duplicated" -a -z "$stamps" ] && \
- echo "What do you want to do?"
-exit 0
diff --git a/scripts/sstate-diff-machines.sh b/scripts/sstate-diff-machines.sh
index 8b64e11be1..5ed413b2ee 100755
--- a/scripts/sstate-diff-machines.sh
+++ b/scripts/sstate-diff-machines.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Used to compare sstate checksums between MACHINES.
diff --git a/scripts/sstate-sysroot-cruft.sh b/scripts/sstate-sysroot-cruft.sh
index 9c948e932d..b2002badfb 100755
--- a/scripts/sstate-sysroot-cruft.sh
+++ b/scripts/sstate-sysroot-cruft.sh
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Used to find files installed in sysroot which are not tracked by sstate manifest
diff --git a/scripts/sysroot-relativelinks.py b/scripts/sysroot-relativelinks.py
index 56e36f3ad5..ccb3c867f0 100755
--- a/scripts/sysroot-relativelinks.py
+++ b/scripts/sysroot-relativelinks.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/task-time b/scripts/task-time
index bcd1e25817..8f71b29b77 100755
--- a/scripts/task-time
+++ b/scripts/task-time
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/verify-bashisms b/scripts/verify-bashisms
index ec2374f183..fc3677c6ed 100755
--- a/scripts/verify-bashisms
+++ b/scripts/verify-bashisms
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/wic b/scripts/wic
index aee63a45aa..06e0b48db0 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -209,7 +209,7 @@ def wic_create_subcommand(options, usage_str):
logger.info(" (Please check that the build artifacts for the machine")
logger.info(" selected in local.conf actually exist and that they")
logger.info(" are the correct artifacts for the image (.wks file)).\n")
- raise WicError("The artifact that couldn't be found was %s:\n %s", not_found, not_found_dir)
+ raise WicError("The artifact that couldn't be found was %s:\n %s" % (not_found, not_found_dir))
krootfs_dir = options.rootfs_dir
if krootfs_dir is None:
diff --git a/scripts/yocto-check-layer b/scripts/yocto-check-layer
index 0e5b75b1f7..67cc71950f 100755
--- a/scripts/yocto-check-layer
+++ b/scripts/yocto-check-layer
@@ -168,14 +168,13 @@ def main():
layers_tested = 0
for layer in layers:
- if layer['type'] == LayerType.ERROR_NO_LAYER_CONF or \
- layer['type'] == LayerType.ERROR_BSP_DISTRO:
+ if layer['type'] in (LayerType.ERROR_NO_LAYER_CONF, LayerType.ERROR_BSP_DISTRO):
continue
# Reset to a clean backup copy for each run
shutil.copyfile(bblayersconf + '.backup', bblayersconf)
- if check_bblayers(bblayersconf, layer['path'], logger):
+ if layer['type'] not in (LayerType.CORE, ) and check_bblayers(bblayersconf, layer['path'], logger):
logger.info("%s already in %s. To capture initial signatures, layer under test should not present "
"in BBLAYERS. Please remove %s from BBLAYERS." % (layer['name'], bblayersconf, layer['name']))
results[layer['name']] = None
diff --git a/scripts/yocto_testresults_query.py b/scripts/yocto_testresults_query.py
new file mode 100755
index 0000000000..521ead8473
--- /dev/null
+++ b/scripts/yocto_testresults_query.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+
+# Yocto Project test results management tool
+# This script is an thin layer over resulttool to manage tes results and regression reports.
+# Its main feature is to translate tags or branch names to revisions SHA1, and then to run resulttool
+# with those computed revisions
+#
+# Copyright (C) 2023 OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import sys
+import os
+import argparse
+import subprocess
+import tempfile
+import lib.scriptutils as scriptutils
+
+script_path = os.path.dirname(os.path.realpath(__file__))
+poky_path = os.path.abspath(os.path.join(script_path, ".."))
+resulttool = os.path.abspath(os.path.join(script_path, "resulttool"))
+logger = scriptutils.logger_create(sys.argv[0])
+testresults_default_url="git://git.yoctoproject.org/yocto-testresults"
+
+def create_workdir():
+ workdir = tempfile.mkdtemp(prefix='yocto-testresults-query.')
+ logger.info(f"Shallow-cloning testresults in {workdir}")
+ subprocess.check_call(["git", "clone", testresults_default_url, workdir, "--depth", "1"])
+ return workdir
+
+def get_sha1(pokydir, revision):
+ try:
+ rev = subprocess.check_output(["git", "rev-list", "-n", "1", revision], cwd=pokydir).decode('utf-8').strip()
+ logger.info(f"SHA-1 revision for {revision} in {pokydir} is {rev}")
+ return rev
+ except subprocess.CalledProcessError:
+ logger.error(f"Can not find SHA-1 for {revision} in {pokydir}")
+ return None
+
+def get_branch(tag):
+ # The tags in test results repository, as returned by git rev-list, have the following form:
+ # refs/tags/<branch>/<count>-g<sha1>/<num>
+ return '/'.join(tag.split("/")[2:-2])
+
+def fetch_testresults(workdir, sha1):
+ logger.info(f"Fetching test results for {sha1} in {workdir}")
+ rawtags = subprocess.check_output(["git", "ls-remote", "--refs", "--tags", "origin", f"*{sha1}*"], cwd=workdir).decode('utf-8').strip()
+ if not rawtags:
+ raise Exception(f"No reference found for commit {sha1} in {workdir}")
+ branch = ""
+ for rev in [rawtag.split()[1] for rawtag in rawtags.splitlines()]:
+ if not branch:
+ branch = get_branch(rev)
+ logger.info(f"Fetching matching revision: {rev}")
+ subprocess.check_call(["git", "fetch", "--depth", "1", "origin", f"{rev}:{rev}"], cwd=workdir)
+ return branch
+
+def compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args):
+ logger.info(f"Running resulttool regression between SHA1 {baserevision} and {targetrevision}")
+ command = [resulttool, "regression-git", "--branch", basebranch, "--commit", baserevision, "--branch2", targetbranch, "--commit2", targetrevision, workdir]
+ if args.limit:
+ command.extend(["-l", args.limit])
+ report = subprocess.check_output(command).decode("utf-8")
+ return report
+
+def print_report_with_header(report, baseversion, baserevision, targetversion, targetrevision):
+ print("========================== Regression report ==============================")
+ print(f'{"=> Target:": <16}{targetversion: <16}({targetrevision})')
+ print(f'{"=> Base:": <16}{baseversion: <16}({baserevision})')
+ print("===========================================================================\n")
+ print(report, end='')
+
+def regression(args):
+ logger.info(f"Compute regression report between {args.base} and {args.target}")
+ if args.testresultsdir:
+ workdir = args.testresultsdir
+ else:
+ workdir = create_workdir()
+
+ try:
+ baserevision = get_sha1(poky_path, args.base)
+ targetrevision = get_sha1(poky_path, args.target)
+ if not baserevision or not targetrevision:
+ logger.error("One or more revision(s) missing. You might be targeting nonexistant tags/branches, or are in wrong repository (you must use Poky and not oe-core)")
+ if not args.testresultsdir:
+ subprocess.check_call(["rm", "-rf", workdir])
+ sys.exit(1)
+ basebranch = fetch_testresults(workdir, baserevision)
+ targetbranch = fetch_testresults(workdir, targetrevision)
+ report = compute_regression_report(workdir, basebranch, baserevision, targetbranch, targetrevision, args)
+ print_report_with_header(report, args.base, baserevision, args.target, targetrevision)
+ finally:
+ if not args.testresultsdir:
+ subprocess.check_call(["rm", "-rf", workdir])
+
+def main():
+ parser = argparse.ArgumentParser(description="Yocto Project test results helper")
+ subparsers = parser.add_subparsers(
+ help="Supported commands for test results helper",
+ required=True)
+ parser_regression_report = subparsers.add_parser(
+ "regression-report",
+ help="Generate regression report between two fixed revisions. Revisions can be branch name or tag")
+ parser_regression_report.add_argument(
+ 'base',
+ help="Revision or tag against which to compare results (i.e: the older)")
+ parser_regression_report.add_argument(
+ 'target',
+ help="Revision or tag to compare against the base (i.e: the newer)")
+ parser_regression_report.add_argument(
+ '-t',
+ '--testresultsdir',
+ help=f"An existing test results directory. {sys.argv[0]} will automatically clone it and use default branch if not provided")
+ parser_regression_report.add_argument(
+ '-l',
+ '--limit',
+ help=f"Maximum number of changes to display per test. Can be set to 0 to print all changes")
+ parser_regression_report.set_defaults(func=regression)
+
+ args = parser.parse_args()
+ args.func(args)
+
+if __name__ == '__main__':
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc()
+ sys.exit(ret)